@aztec/end-to-end 0.0.1-commit.d3ec352c → 0.0.1-commit.fcb71a6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (97) hide show
  1. package/dest/bench/client_flows/client_flows_benchmark.d.ts +3 -3
  2. package/dest/bench/client_flows/client_flows_benchmark.d.ts.map +1 -1
  3. package/dest/bench/client_flows/client_flows_benchmark.js +5 -3
  4. package/dest/bench/utils.d.ts +2 -2
  5. package/dest/bench/utils.d.ts.map +1 -1
  6. package/dest/bench/utils.js +10 -6
  7. package/dest/e2e_cross_chain_messaging/cross_chain_messaging_test.d.ts +5 -4
  8. package/dest/e2e_cross_chain_messaging/cross_chain_messaging_test.d.ts.map +1 -1
  9. package/dest/e2e_cross_chain_messaging/cross_chain_messaging_test.js +2 -1
  10. package/dest/e2e_epochs/epochs_test.d.ts +3 -2
  11. package/dest/e2e_epochs/epochs_test.d.ts.map +1 -1
  12. package/dest/e2e_epochs/epochs_test.js +11 -9
  13. package/dest/e2e_fees/bridging_race.notest.js +1 -1
  14. package/dest/e2e_fees/fees_test.d.ts +6 -3
  15. package/dest/e2e_fees/fees_test.d.ts.map +1 -1
  16. package/dest/e2e_fees/fees_test.js +4 -1
  17. package/dest/e2e_multi_validator/utils.js +1 -1
  18. package/dest/e2e_p2p/inactivity_slash_test.d.ts +2 -2
  19. package/dest/e2e_p2p/inactivity_slash_test.d.ts.map +1 -1
  20. package/dest/e2e_p2p/inactivity_slash_test.js +3 -6
  21. package/dest/e2e_p2p/p2p_network.d.ts +7 -6
  22. package/dest/e2e_p2p/p2p_network.d.ts.map +1 -1
  23. package/dest/e2e_p2p/p2p_network.js +10 -8
  24. package/dest/e2e_p2p/shared.d.ts +2 -2
  25. package/dest/e2e_p2p/shared.d.ts.map +1 -1
  26. package/dest/fixtures/e2e_prover_test.d.ts +3 -5
  27. package/dest/fixtures/e2e_prover_test.d.ts.map +1 -1
  28. package/dest/fixtures/e2e_prover_test.js +6 -9
  29. package/dest/fixtures/fixtures.d.ts +2 -3
  30. package/dest/fixtures/fixtures.d.ts.map +1 -1
  31. package/dest/fixtures/fixtures.js +1 -2
  32. package/dest/fixtures/get_acvm_config.js +1 -1
  33. package/dest/fixtures/l1_to_l2_messaging.d.ts +4 -3
  34. package/dest/fixtures/l1_to_l2_messaging.d.ts.map +1 -1
  35. package/dest/fixtures/l1_to_l2_messaging.js +2 -2
  36. package/dest/fixtures/setup_p2p_test.js +3 -3
  37. package/dest/fixtures/snapshot_manager.d.ts +6 -8
  38. package/dest/fixtures/snapshot_manager.d.ts.map +1 -1
  39. package/dest/fixtures/snapshot_manager.js +34 -46
  40. package/dest/fixtures/utils.d.ts +18 -462
  41. package/dest/fixtures/utils.d.ts.map +1 -1
  42. package/dest/fixtures/utils.js +50 -78
  43. package/dest/fixtures/web3signer.js +1 -1
  44. package/dest/fixtures/with_telemetry_utils.d.ts +2 -2
  45. package/dest/fixtures/with_telemetry_utils.d.ts.map +1 -1
  46. package/dest/fixtures/with_telemetry_utils.js +2 -2
  47. package/dest/shared/cross_chain_test_harness.d.ts +3 -2
  48. package/dest/shared/cross_chain_test_harness.d.ts.map +1 -1
  49. package/dest/shared/cross_chain_test_harness.js +1 -1
  50. package/dest/shared/gas_portal_test_harness.d.ts +2 -2
  51. package/dest/shared/gas_portal_test_harness.d.ts.map +1 -1
  52. package/dest/shared/uniswap_l1_l2.d.ts +4 -3
  53. package/dest/shared/uniswap_l1_l2.d.ts.map +1 -1
  54. package/dest/shared/uniswap_l1_l2.js +4 -2
  55. package/dest/simulators/lending_simulator.d.ts +2 -2
  56. package/dest/simulators/lending_simulator.d.ts.map +1 -1
  57. package/dest/simulators/lending_simulator.js +1 -1
  58. package/dest/spartan/setup_test_wallets.d.ts +1 -1
  59. package/dest/spartan/setup_test_wallets.d.ts.map +1 -1
  60. package/dest/spartan/setup_test_wallets.js +2 -1
  61. package/dest/spartan/tx_metrics.d.ts +39 -0
  62. package/dest/spartan/tx_metrics.d.ts.map +1 -0
  63. package/dest/spartan/tx_metrics.js +95 -0
  64. package/dest/spartan/utils.d.ts +40 -8
  65. package/dest/spartan/utils.d.ts.map +1 -1
  66. package/dest/spartan/utils.js +139 -29
  67. package/package.json +38 -38
  68. package/src/bench/client_flows/client_flows_benchmark.ts +7 -4
  69. package/src/bench/utils.ts +11 -7
  70. package/src/e2e_cross_chain_messaging/cross_chain_messaging_test.ts +13 -9
  71. package/src/e2e_epochs/epochs_test.ts +24 -11
  72. package/src/e2e_fees/bridging_race.notest.ts +1 -1
  73. package/src/e2e_fees/fees_test.ts +7 -2
  74. package/src/e2e_multi_validator/utils.ts +1 -1
  75. package/src/e2e_p2p/inactivity_slash_test.ts +4 -7
  76. package/src/e2e_p2p/p2p_network.ts +9 -16
  77. package/src/e2e_p2p/shared.ts +5 -1
  78. package/src/fixtures/e2e_prover_test.ts +8 -10
  79. package/src/fixtures/fixtures.ts +1 -3
  80. package/src/fixtures/get_acvm_config.ts +1 -1
  81. package/src/fixtures/l1_to_l2_messaging.ts +4 -2
  82. package/src/fixtures/setup_p2p_test.ts +3 -3
  83. package/src/fixtures/snapshot_manager.ts +51 -65
  84. package/src/fixtures/utils.ts +77 -136
  85. package/src/fixtures/web3signer.ts +1 -1
  86. package/src/fixtures/with_telemetry_utils.ts +2 -2
  87. package/src/shared/cross_chain_test_harness.ts +3 -1
  88. package/src/shared/gas_portal_test_harness.ts +1 -1
  89. package/src/shared/uniswap_l1_l2.ts +8 -10
  90. package/src/simulators/lending_simulator.ts +2 -2
  91. package/src/spartan/setup_test_wallets.ts +2 -1
  92. package/src/spartan/tx_metrics.ts +130 -0
  93. package/src/spartan/utils.ts +196 -21
  94. package/dest/fixtures/setup_l1_contracts.d.ts +0 -477
  95. package/dest/fixtures/setup_l1_contracts.d.ts.map +0 -1
  96. package/dest/fixtures/setup_l1_contracts.js +0 -17
  97. package/src/fixtures/setup_l1_contracts.ts +0 -26
@@ -1,6 +1,7 @@
1
1
  import { createLogger } from '@aztec/aztec.js/log';
2
2
  import type { RollupCheatCodes } from '@aztec/aztec/testing';
3
- import type { L1ContractAddresses, ViemPublicClient } from '@aztec/ethereum';
3
+ import type { L1ContractAddresses } from '@aztec/ethereum/l1-contract-addresses';
4
+ import type { ViemPublicClient } from '@aztec/ethereum/types';
4
5
  import type { CheckpointNumber } from '@aztec/foundation/branded-types';
5
6
  import type { Logger } from '@aztec/foundation/log';
6
7
  import { promiseWithResolvers } from '@aztec/foundation/promise';
@@ -31,6 +32,7 @@ const testConfigSchema = z.object({
31
32
  L1_RPC_URLS_JSON: z.string().optional(),
32
33
  L1_ACCOUNT_MNEMONIC: z.string().optional(),
33
34
  AZTEC_SLOT_DURATION: z.coerce.number().optional().default(24),
35
+ AZTEC_EPOCH_DURATION: z.coerce.number().optional().default(32),
34
36
  AZTEC_PROOF_SUBMISSION_WINDOW: z.coerce.number().optional().default(5),
35
37
  });
36
38
 
@@ -243,11 +245,11 @@ export async function deleteResourceByLabel({
243
245
  timeout?: string;
244
246
  force?: boolean;
245
247
  }) {
246
- // Check if the resource type exists before attempting to delete
247
248
  try {
248
- await execAsync(
249
- `kubectl api-resources --api-group="" --no-headers -o name | grep -q "^${resource}$" || kubectl api-resources --no-headers -o name | grep -q "^${resource}$"`,
250
- );
249
+ // Match both plain and group-qualified names (e.g., "podchaos" or "podchaos.chaos-mesh.org")
250
+ const escaped = resource.replace(/[-/\\^$*+?.()|[\]{}]/g, '\\$&');
251
+ const regex = `(^|\\.)${escaped}(\\.|$)`;
252
+ await execAsync(`kubectl api-resources --no-headers -o name | grep -Eq '${regex}'`);
251
253
  } catch (error) {
252
254
  logger.warn(`Resource type '${resource}' not found in cluster, skipping deletion ${error}`);
253
255
  return '';
@@ -280,6 +282,58 @@ export async function waitForResourceByLabel({
280
282
  return stdout;
281
283
  }
282
284
 
285
+ export async function waitForResourceByName({
286
+ resource,
287
+ name,
288
+ namespace,
289
+ condition = 'Ready',
290
+ timeout = '10m',
291
+ }: {
292
+ resource: string;
293
+ name: string;
294
+ namespace: string;
295
+ condition?: string;
296
+ timeout?: string;
297
+ }) {
298
+ const command = `kubectl wait ${resource}/${name} --for=condition=${condition} -n ${namespace} --timeout=${timeout}`;
299
+ logger.info(`command: ${command}`);
300
+ const { stdout } = await execAsync(command);
301
+ return stdout;
302
+ }
303
+
304
+ export async function waitForResourcesByName({
305
+ resource,
306
+ names,
307
+ namespace,
308
+ condition = 'Ready',
309
+ timeout = '10m',
310
+ }: {
311
+ resource: string;
312
+ names: string[];
313
+ namespace: string;
314
+ condition?: string;
315
+ timeout?: string;
316
+ }) {
317
+ if (!names.length) {
318
+ throw new Error(`No ${resource} names provided to waitForResourcesByName`);
319
+ }
320
+
321
+ // Wait all in parallel; if any fails, surface which one.
322
+ await Promise.all(
323
+ names.map(async name => {
324
+ try {
325
+ await waitForResourceByName({ resource, name, namespace, condition, timeout });
326
+ } catch (err) {
327
+ throw new Error(
328
+ `Failed waiting for ${resource}/${name} condition=${condition} timeout=${timeout} namespace=${namespace}: ${String(
329
+ err,
330
+ )}`,
331
+ );
332
+ }
333
+ }),
334
+ );
335
+ }
336
+
283
337
  export function getChartDir(spartanDir: string, chartName: string) {
284
338
  return path.join(spartanDir.trim(), chartName);
285
339
  }
@@ -330,7 +384,36 @@ async function execHelmCommand(args: Parameters<typeof createHelmCommand>[0]) {
330
384
  return stdout;
331
385
  }
332
386
 
333
- export async function cleanHelm(instanceName: string, namespace: string, logger: Logger) {
387
+ async function getHelmReleaseStatus(instanceName: string, namespace: string): Promise<string | undefined> {
388
+ try {
389
+ const { stdout } = await execAsync(
390
+ `helm list --namespace ${namespace} --all --filter '^${instanceName}$' --output json | cat`,
391
+ );
392
+ const parsed = JSON.parse(stdout) as Array<{ name?: string; status?: string }>;
393
+ const row = parsed.find(r => r.name === instanceName);
394
+ return row?.status;
395
+ } catch {
396
+ return undefined;
397
+ }
398
+ }
399
+
400
+ async function forceDeleteHelmReleaseRecord(instanceName: string, namespace: string, logger: Logger) {
401
+ const labelSelector = `owner=helm,name=${instanceName}`;
402
+ const cmd = `kubectl delete secret -n ${namespace} -l ${labelSelector} --ignore-not-found=true`;
403
+ logger.warn(`Force deleting Helm release record: ${cmd}`);
404
+ await execAsync(cmd).catch(() => undefined);
405
+ }
406
+
407
+ async function hasDeployedHelmRelease(instanceName: string, namespace: string): Promise<boolean> {
408
+ try {
409
+ const status = await getHelmReleaseStatus(instanceName, namespace);
410
+ return status?.toLowerCase() === 'deployed';
411
+ } catch {
412
+ return false;
413
+ }
414
+ }
415
+
416
+ export async function uninstallChaosMesh(instanceName: string, namespace: string, logger: Logger) {
334
417
  // uninstall the helm chart if it exists
335
418
  logger.info(`Uninstalling helm chart ${instanceName}`);
336
419
  await execAsync(`helm uninstall ${instanceName} --namespace ${namespace} --wait --ignore-not-found`);
@@ -394,7 +477,7 @@ export async function installChaosMeshChart({
394
477
  logger: Logger;
395
478
  }) {
396
479
  if (clean) {
397
- await cleanHelm(instanceName, targetNamespace, logger);
480
+ await uninstallChaosMesh(instanceName, targetNamespace, logger);
398
481
  }
399
482
 
400
483
  return execHelmCommand({
@@ -430,22 +513,49 @@ export function applyProverFailure({
430
513
  });
431
514
  }
432
515
 
516
+ export function applyValidatorFailure({
517
+ namespace,
518
+ spartanDir,
519
+ logger,
520
+ values,
521
+ instanceName,
522
+ }: {
523
+ namespace: string;
524
+ spartanDir: string;
525
+ logger: Logger;
526
+ values?: Record<string, string | number>;
527
+ instanceName?: string;
528
+ }) {
529
+ return installChaosMeshChart({
530
+ instanceName: instanceName ?? 'validator-failure',
531
+ targetNamespace: namespace,
532
+ valuesFile: 'validator-failure.yaml',
533
+ helmChartDir: getChartDir(spartanDir, 'aztec-chaos-scenarios'),
534
+ values,
535
+ logger,
536
+ });
537
+ }
538
+
433
539
  export function applyProverKill({
434
540
  namespace,
435
541
  spartanDir,
436
542
  logger,
543
+ values,
437
544
  }: {
438
545
  namespace: string;
439
546
  spartanDir: string;
440
547
  logger: Logger;
548
+ values?: Record<string, string | number>;
441
549
  }) {
442
550
  return installChaosMeshChart({
443
551
  instanceName: 'prover-kill',
444
552
  targetNamespace: namespace,
445
553
  valuesFile: 'prover-kill.yaml',
446
554
  helmChartDir: getChartDir(spartanDir, 'aztec-chaos-scenarios'),
555
+ chaosMeshNamespace: namespace,
447
556
  clean: true,
448
557
  logger,
558
+ values,
449
559
  });
450
560
  }
451
561
 
@@ -453,10 +563,12 @@ export function applyProverBrokerKill({
453
563
  namespace,
454
564
  spartanDir,
455
565
  logger,
566
+ values,
456
567
  }: {
457
568
  namespace: string;
458
569
  spartanDir: string;
459
570
  logger: Logger;
571
+ values?: Record<string, string | number>;
460
572
  }) {
461
573
  return installChaosMeshChart({
462
574
  instanceName: 'prover-broker-kill',
@@ -465,66 +577,79 @@ export function applyProverBrokerKill({
465
577
  helmChartDir: getChartDir(spartanDir, 'aztec-chaos-scenarios'),
466
578
  clean: true,
467
579
  logger,
580
+ values,
468
581
  });
469
582
  }
470
583
 
471
584
  export function applyBootNodeFailure({
585
+ instanceName = 'boot-node-failure',
472
586
  namespace,
473
587
  spartanDir,
474
588
  durationSeconds,
475
589
  logger,
590
+ values,
476
591
  }: {
592
+ instanceName?: string;
477
593
  namespace: string;
478
594
  spartanDir: string;
479
595
  durationSeconds: number;
480
596
  logger: Logger;
597
+ values?: Record<string, string | number>;
481
598
  }) {
482
599
  return installChaosMeshChart({
483
- instanceName: 'boot-node-failure',
600
+ instanceName,
484
601
  targetNamespace: namespace,
485
602
  valuesFile: 'boot-node-failure.yaml',
486
603
  helmChartDir: getChartDir(spartanDir, 'aztec-chaos-scenarios'),
487
604
  values: {
488
605
  'bootNodeFailure.duration': `${durationSeconds}s`,
606
+ ...(values ?? {}),
489
607
  },
490
608
  logger,
491
609
  });
492
610
  }
493
611
 
494
612
  export function applyValidatorKill({
613
+ instanceName = 'validator-kill',
495
614
  namespace,
496
615
  spartanDir,
497
616
  logger,
498
617
  values,
618
+ clean = true,
499
619
  }: {
620
+ instanceName?: string;
500
621
  namespace: string;
501
622
  spartanDir: string;
502
623
  logger: Logger;
503
624
  values?: Record<string, string | number>;
625
+ clean?: boolean;
504
626
  }) {
505
627
  return installChaosMeshChart({
506
- instanceName: 'validator-kill',
628
+ instanceName: instanceName ?? 'validator-kill',
507
629
  targetNamespace: namespace,
508
630
  valuesFile: 'validator-kill.yaml',
509
631
  helmChartDir: getChartDir(spartanDir, 'aztec-chaos-scenarios'),
632
+ clean,
510
633
  logger,
511
634
  values,
512
635
  });
513
636
  }
514
637
 
515
638
  export function applyNetworkShaping({
639
+ instanceName = 'network-shaping',
516
640
  valuesFile,
517
641
  namespace,
518
642
  spartanDir,
519
643
  logger,
520
644
  }: {
645
+ instanceName?: string;
521
646
  valuesFile: string;
522
647
  namespace: string;
523
648
  spartanDir: string;
524
649
  logger: Logger;
525
650
  }) {
526
651
  return installChaosMeshChart({
527
- instanceName: 'network-shaping',
652
+ instanceName,
528
653
  targetNamespace: namespace,
529
654
  valuesFile,
530
655
  helmChartDir: getChartDir(spartanDir, 'aztec-chaos-scenarios'),
@@ -623,6 +748,12 @@ export async function installTransferBot({
623
748
  'bot.node.env.ETHEREUM_HOSTS': `http://${namespace}-eth-execution.${namespace}.svc.cluster.local:8545`,
624
749
  // Provide L1 mnemonic for bridging (falls back to labs mnemonic)
625
750
  'bot.node.env.BOT_L1_MNEMONIC': mnemonic,
751
+
752
+ // The bot does not need Kubernetes API access. Disable RBAC + ServiceAccount creation so the chart
753
+ // can be installed by users without cluster-scoped RBAC permissions.
754
+ 'bot.rbac.create': false,
755
+ 'bot.serviceAccount.create': false,
756
+ 'bot.serviceAccount.name': 'default',
626
757
  };
627
758
  // Ensure we derive a funded L1 key (index 0 is funded on anvil default mnemonic)
628
759
  if (mnemonicStartIndex === undefined) {
@@ -647,7 +778,7 @@ export async function installTransferBot({
647
778
  if (!repository || !tag) {
648
779
  try {
649
780
  const { stdout } = await execAsync(
650
- `kubectl get pods -l app.kubernetes.io/component=validator -n ${namespace} -o jsonpath='{.items[0].spec.containers[?(@.name=="aztec")].image}' | cat`,
781
+ `kubectl get pods -l app.kubernetes.io/name=validator -n ${namespace} -o jsonpath='{.items[0].spec.containers[?(@.name=="aztec")].image}' | cat`,
651
782
  );
652
783
  const image = stdout.trim().replace(/^'|'$/g, '');
653
784
  if (image && image.includes(':')) {
@@ -668,6 +799,26 @@ export async function installTransferBot({
668
799
  typeof mnemonicStartIndex === 'string' ? mnemonicStartIndex : Number(mnemonicStartIndex);
669
800
  }
670
801
 
802
+ // If a previous install attempt left the release in a non-deployed state (e.g. FAILED),
803
+ // `helm upgrade --install` can error with "has no deployed releases".
804
+ // In that case, clear the release record and do a clean install.
805
+ const existingStatus = await getHelmReleaseStatus(instanceName, namespace);
806
+ if (existingStatus && existingStatus.toLowerCase() !== 'deployed') {
807
+ logger.warn(`Transfer bot release ${instanceName} is in status '${existingStatus}'. Reinstalling cleanly.`);
808
+ await execAsync(`helm uninstall ${instanceName} --namespace ${namespace} --wait --ignore-not-found`).catch(
809
+ () => undefined,
810
+ );
811
+ // If helm left the release in `uninstalling`, force-delete the record so we can reinstall.
812
+ const afterUninstallStatus = await getHelmReleaseStatus(instanceName, namespace);
813
+ if (afterUninstallStatus?.toLowerCase() === 'uninstalling') {
814
+ await forceDeleteHelmReleaseRecord(instanceName, namespace, logger);
815
+ }
816
+ }
817
+
818
+ // `--reuse-values` fails if the release has never successfully deployed (e.g. first install, or a previous failed install).
819
+ // Only reuse values when we have a deployed release to reuse from.
820
+ const effectiveReuseValues = reuseValues && (await hasDeployedHelmRelease(instanceName, namespace));
821
+
671
822
  await execHelmCommand({
672
823
  instanceName,
673
824
  helmChartDir,
@@ -675,7 +826,7 @@ export async function installTransferBot({
675
826
  valuesFile: undefined,
676
827
  timeout,
677
828
  values: values as unknown as Record<string, string | number | boolean>,
678
- reuseValues,
829
+ reuseValues: effectiveReuseValues,
679
830
  });
680
831
 
681
832
  if (replicas > 0) {
@@ -720,7 +871,7 @@ export async function setValidatorTxDrop({
720
871
  const drop = enabled ? 'true' : 'false';
721
872
  const prob = String(probability);
722
873
 
723
- const selectors = ['app=validator', 'app.kubernetes.io/component=validator'];
874
+ const selectors = ['app.kubernetes.io/name=validator', 'app.kubernetes.io/component=validator', 'app=validator'];
724
875
  let updated = false;
725
876
  for (const selector of selectors) {
726
877
  try {
@@ -751,7 +902,7 @@ export async function setValidatorTxDrop({
751
902
  }
752
903
 
753
904
  export async function restartValidators(namespace: string, logger: Logger) {
754
- const selectors = ['app=validator', 'app.kubernetes.io/component=validator'];
905
+ const selectors = ['app.kubernetes.io/name=validator', 'app.kubernetes.io/component=validator', 'app=validator'];
755
906
  let any = false;
756
907
  for (const selector of selectors) {
757
908
  try {
@@ -806,11 +957,33 @@ export async function enableValidatorDynamicBootNode(
806
957
  }
807
958
 
808
959
  export async function getSequencers(namespace: string) {
809
- const command = `kubectl get pods -l app.kubernetes.io/component=validator -n ${namespace} -o jsonpath='{.items[*].metadata.name}'`;
810
- const { stdout } = await execAsync(command);
811
- const sequencers = stdout.split(' ');
812
- logger.verbose(`Found sequencer pods ${sequencers.join(', ')}`);
813
- return sequencers;
960
+ const selectors = [
961
+ 'app.kubernetes.io/name=validator',
962
+ 'app.kubernetes.io/component=validator',
963
+ 'app.kubernetes.io/component=sequencer-node',
964
+ 'app=validator',
965
+ ];
966
+ for (const selector of selectors) {
967
+ try {
968
+ const command = `kubectl get pods -l ${selector} -n ${namespace} -o jsonpath='{.items[*].metadata.name}'`;
969
+ const { stdout } = await execAsync(command);
970
+ const sequencers = stdout
971
+ .split(' ')
972
+ .map(s => s.trim())
973
+ .filter(Boolean);
974
+ if (sequencers.length > 0) {
975
+ logger.verbose(`Found sequencer pods ${sequencers.join(', ')} (selector=${selector})`);
976
+ return sequencers;
977
+ }
978
+ } catch {
979
+ // try next selector
980
+ }
981
+ }
982
+
983
+ // Fail fast instead of returning [''] which leads to attempts to port-forward `pod/`.
984
+ throw new Error(
985
+ `No sequencer/validator pods found in namespace ${namespace}. Tried selectors: ${selectors.join(', ')}`,
986
+ );
814
987
  }
815
988
 
816
989
  export function updateSequencersConfig(env: TestConfig, config: Partial<AztecNodeAdminConfig>) {
@@ -871,7 +1044,7 @@ export async function getPublicViemClient(
871
1044
  containerPort: 8545,
872
1045
  });
873
1046
  const url = `http://127.0.0.1:${port}`;
874
- const client: ViemPublicClient = createPublicClient({ transport: fallback([http(url)]) });
1047
+ const client: ViemPublicClient = createPublicClient({ transport: fallback([http(url, { batch: false })]) });
875
1048
  if (processes) {
876
1049
  processes.push(process);
877
1050
  }
@@ -881,7 +1054,9 @@ export async function getPublicViemClient(
881
1054
  if (!L1_RPC_URLS_JSON) {
882
1055
  throw new Error(`L1_RPC_URLS_JSON is not defined`);
883
1056
  }
884
- const client: ViemPublicClient = createPublicClient({ transport: fallback([http(L1_RPC_URLS_JSON)]) });
1057
+ const client: ViemPublicClient = createPublicClient({
1058
+ transport: fallback([http(L1_RPC_URLS_JSON, { batch: false })]),
1059
+ });
885
1060
  return { url: L1_RPC_URLS_JSON, client };
886
1061
  }
887
1062
  }