@aztec/foundation 0.67.1 → 0.68.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/dest/abi/abi.d.ts +18 -18
  2. package/dest/blob/index.d.ts +41 -0
  3. package/dest/blob/index.d.ts.map +1 -0
  4. package/dest/blob/index.js +118 -0
  5. package/dest/config/env_var.d.ts +1 -1
  6. package/dest/config/env_var.d.ts.map +1 -1
  7. package/dest/index.d.ts +1 -0
  8. package/dest/index.d.ts.map +1 -1
  9. package/dest/index.js +2 -1
  10. package/dest/log/pino-logger.d.ts.map +1 -1
  11. package/dest/log/pino-logger.js +38 -3
  12. package/dest/noir/noir_package_config.d.ts +4 -4
  13. package/dest/promise/running-promise.d.ts +2 -1
  14. package/dest/promise/running-promise.d.ts.map +1 -1
  15. package/dest/promise/running-promise.js +10 -3
  16. package/dest/retry/index.js +2 -2
  17. package/dest/serialize/field_reader.d.ts +5 -0
  18. package/dest/serialize/field_reader.d.ts.map +1 -1
  19. package/dest/serialize/field_reader.js +8 -1
  20. package/dest/sleep/index.d.ts +1 -1
  21. package/dest/sleep/index.d.ts.map +1 -1
  22. package/dest/sleep/index.js +1 -1
  23. package/dest/string/index.d.ts +1 -0
  24. package/dest/string/index.d.ts.map +1 -1
  25. package/dest/string/index.js +4 -1
  26. package/dest/testing/files/index.d.ts +18 -0
  27. package/dest/testing/files/index.d.ts.map +1 -0
  28. package/dest/testing/files/index.js +57 -0
  29. package/dest/testing/index.d.ts +1 -1
  30. package/dest/testing/index.d.ts.map +1 -1
  31. package/dest/testing/index.js +2 -2
  32. package/dest/testing/test_data.d.ts +0 -22
  33. package/dest/testing/test_data.d.ts.map +1 -1
  34. package/dest/testing/test_data.js +1 -67
  35. package/dest/timer/date.d.ts +13 -0
  36. package/dest/timer/date.d.ts.map +1 -0
  37. package/dest/timer/date.js +22 -0
  38. package/dest/timer/index.d.ts +3 -2
  39. package/dest/timer/index.d.ts.map +1 -1
  40. package/dest/timer/index.js +4 -3
  41. package/dest/timer/timeout.d.ts +2 -3
  42. package/dest/timer/timeout.d.ts.map +1 -1
  43. package/dest/timer/timeout.js +10 -10
  44. package/package.json +8 -6
  45. package/src/blob/index.ts +152 -0
  46. package/src/config/env_var.ts +3 -4
  47. package/src/index.ts +1 -0
  48. package/src/log/pino-logger.ts +41 -2
  49. package/src/promise/running-promise.ts +11 -2
  50. package/src/retry/index.ts +1 -1
  51. package/src/serialize/field_reader.ts +8 -0
  52. package/src/sleep/index.ts +2 -2
  53. package/src/string/index.ts +4 -0
  54. package/src/testing/files/index.ts +63 -0
  55. package/src/testing/index.ts +1 -1
  56. package/src/testing/test_data.ts +0 -75
  57. package/src/timer/date.ts +24 -0
  58. package/src/timer/index.ts +3 -2
  59. package/src/timer/timeout.ts +11 -20
@@ -0,0 +1,152 @@
1
+ import cKzg from 'c-kzg';
2
+ import type { Blob as BlobBuffer } from 'c-kzg';
3
+
4
+ import { poseidon2Hash, sha256 } from '../crypto/index.js';
5
+ import { Fr } from '../fields/index.js';
6
+ import { serializeToBuffer } from '../serialize/index.js';
7
+
8
+ // Importing directly from 'c-kzg' does not work, ignoring import/no-named-as-default-member err:
9
+ /* eslint-disable import/no-named-as-default-member */
10
+
11
+ const {
12
+ BYTES_PER_BLOB,
13
+ FIELD_ELEMENTS_PER_BLOB,
14
+ blobToKzgCommitment,
15
+ computeKzgProof,
16
+ loadTrustedSetup,
17
+ verifyKzgProof,
18
+ } = cKzg;
19
+
20
+ try {
21
+ loadTrustedSetup();
22
+ } catch (error: any) {
23
+ if (error.message.includes('trusted setup is already loaded')) {
24
+ // NB: The c-kzg lib has no way of checking whether the setup is loaded or not,
25
+ // and it throws an error if it's already loaded, even though nothing is wrong.
26
+ // This is a rudimentary way of ensuring we load the trusted setup if we need it.
27
+ } else {
28
+ throw new Error(error);
29
+ }
30
+ }
31
+
32
+ // The prefix to the EVM blobHash, defined here: https://eips.ethereum.org/EIPS/eip-4844#specification
33
+ export const VERSIONED_HASH_VERSION_KZG = 0x01;
34
+
35
+ /**
36
+ * A class to create, manage, and prove EVM blobs.
37
+ */
38
+ export class Blob {
39
+ /** The blob to be broadcast on L1 in bytes form. */
40
+ public readonly data: BlobBuffer;
41
+ /** The hash of all tx effects inside the blob. Used in generating the challenge z and proving that we have included all required effects. */
42
+ public readonly fieldsHash: Fr;
43
+ /** Challenge point z (= H(H(tx_effects), kzgCommmitment). Used such that p(z) = y. */
44
+ public readonly challengeZ: Fr;
45
+ /** Evaluation y = p(z), where p() is the blob polynomial. BLS12 field element, rep. as BigNum in nr, bigint in ts. */
46
+ public readonly evaluationY: Buffer;
47
+ /** Commitment to the blob C. Used in compressed BLS12 point format (48 bytes). */
48
+ public readonly commitment: Buffer;
49
+ /** KZG opening proof for y = p(z). The commitment to quotient polynomial Q, used in compressed BLS12 point format (48 bytes). */
50
+ public readonly proof: Buffer;
51
+
52
+ constructor(
53
+ /** All fields to be broadcast in the blob. */
54
+ fields: Fr[],
55
+ /** If we want to broadcast more fields than fit into a blob, we hash those and used it as the fieldsHash across all blobs.
56
+ * This is much simpler and cheaper in the circuit to do, but MUST BE CHECKED before injecting here.
57
+ */
58
+ multiBlobFieldsHash?: Fr,
59
+ ) {
60
+ if (fields.length > FIELD_ELEMENTS_PER_BLOB) {
61
+ throw new Error(
62
+ `Attempted to overfill blob with ${fields.length} elements. The maximum is ${FIELD_ELEMENTS_PER_BLOB}`,
63
+ );
64
+ }
65
+ this.data = Buffer.concat([serializeToBuffer(fields)], BYTES_PER_BLOB);
66
+ // This matches the output of SpongeBlob.squeeze() in the blob circuit
67
+ this.fieldsHash = multiBlobFieldsHash ? multiBlobFieldsHash : poseidon2Hash(fields);
68
+ this.commitment = Buffer.from(blobToKzgCommitment(this.data));
69
+ this.challengeZ = poseidon2Hash([this.fieldsHash, ...this.commitmentToFields()]);
70
+ const res = computeKzgProof(this.data, this.challengeZ.toBuffer());
71
+ if (!verifyKzgProof(this.commitment, this.challengeZ.toBuffer(), res[1], res[0])) {
72
+ throw new Error(`KZG proof did not verify.`);
73
+ }
74
+ this.proof = Buffer.from(res[0]);
75
+ this.evaluationY = Buffer.from(res[1]);
76
+ }
77
+
78
+ // 48 bytes encoded in fields as [Fr, Fr] = [0->31, 31->48]
79
+ commitmentToFields(): [Fr, Fr] {
80
+ return [new Fr(this.commitment.subarray(0, 31)), new Fr(this.commitment.subarray(31, 48))];
81
+ }
82
+
83
+ // Returns ethereum's versioned blob hash, following kzg_to_versioned_hash: https://eips.ethereum.org/EIPS/eip-4844#helpers
84
+ getEthVersionedBlobHash(): Buffer {
85
+ const hash = sha256(this.commitment);
86
+ hash[0] = VERSIONED_HASH_VERSION_KZG;
87
+ return hash;
88
+ }
89
+
90
+ static getEthVersionedBlobHash(commitment: Buffer): Buffer {
91
+ const hash = sha256(commitment);
92
+ hash[0] = VERSIONED_HASH_VERSION_KZG;
93
+ return hash;
94
+ }
95
+
96
+ // Returns a proof of opening of the blob to verify on L1 using the point evaluation precompile:
97
+ // * input[:32] - versioned_hash
98
+ // * input[32:64] - z
99
+ // * input[64:96] - y
100
+ // * input[96:144] - commitment C
101
+ // * input[144:192] - proof (a commitment to the quotient polynomial q(X))
102
+ // See https://eips.ethereum.org/EIPS/eip-4844#point-evaluation-precompile
103
+ getEthBlobEvaluationInputs(): `0x${string}` {
104
+ const buf = Buffer.concat([
105
+ this.getEthVersionedBlobHash(),
106
+ this.challengeZ.toBuffer(),
107
+ this.evaluationY,
108
+ this.commitment,
109
+ this.proof,
110
+ ]);
111
+ return `0x${buf.toString('hex')}`;
112
+ }
113
+
114
+ static getEthBlobEvaluationInputs(blobs: Blob[]): `0x${string}` {
115
+ let buf = Buffer.alloc(0);
116
+ blobs.forEach(blob => {
117
+ buf = Buffer.concat([
118
+ buf,
119
+ blob.getEthVersionedBlobHash(),
120
+ blob.challengeZ.toBuffer(),
121
+ blob.evaluationY,
122
+ blob.commitment,
123
+ blob.proof,
124
+ ]);
125
+ });
126
+ // For multiple blobs, we prefix the number of blobs:
127
+ const lenBuf = Buffer.alloc(1);
128
+ lenBuf.writeUint8(blobs.length);
129
+ buf = Buffer.concat([lenBuf, buf]);
130
+ return `0x${buf.toString('hex')}`;
131
+ }
132
+
133
+ static getViemKzgInstance() {
134
+ return {
135
+ blobToKzgCommitment: cKzg.blobToKzgCommitment,
136
+ computeBlobKzgProof: cKzg.computeBlobKzgProof,
137
+ };
138
+ }
139
+
140
+ // Returns as many blobs as we require to broadcast the given fields
141
+ // Assumes we share the fields hash between all blobs
142
+ static getBlobs(fields: Fr[]): Blob[] {
143
+ const numBlobs = Math.max(Math.ceil(fields.length / FIELD_ELEMENTS_PER_BLOB), 1);
144
+ const multiBlobFieldsHash = poseidon2Hash(fields);
145
+ const res = [];
146
+ for (let i = 0; i < numBlobs; i++) {
147
+ const end = fields.length < (i + 1) * FIELD_ELEMENTS_PER_BLOB ? fields.length : (i + 1) * FIELD_ELEMENTS_PER_BLOB;
148
+ res.push(new Blob(fields.slice(i * FIELD_ELEMENTS_PER_BLOB, end), multiBlobFieldsHash));
149
+ }
150
+ return res;
151
+ }
152
+ }
@@ -52,6 +52,7 @@ export type EnvVar =
52
52
  | 'GOVERNANCE_PROPOSER_PAYLOAD_ADDRESS'
53
53
  | 'INBOX_CONTRACT_ADDRESS'
54
54
  | 'L1_CHAIN_ID'
55
+ | 'L1_CONSENSUS_CLIENT_URL'
55
56
  | 'L1_PRIVATE_KEY'
56
57
  | 'L2_QUEUE_SIZE'
57
58
  | 'LOG_ELAPSED_TIME'
@@ -125,7 +126,6 @@ export type EnvVar =
125
126
  | 'PROVER_REAL_PROOFS'
126
127
  | 'PROVER_REQUIRED_CONFIRMATIONS'
127
128
  | 'PROVER_TEST_DELAY_MS'
128
- | 'PROVER_CACHE_DIR'
129
129
  | 'PXE_L2_STARTING_BLOCK'
130
130
  | 'PXE_PROVER_ENABLED'
131
131
  | 'QUOTE_PROVIDER_BASIS_POINT_FEE'
@@ -136,17 +136,15 @@ export type EnvVar =
136
136
  | 'REGISTRY_CONTRACT_ADDRESS'
137
137
  | 'ROLLUP_CONTRACT_ADDRESS'
138
138
  | 'SEQ_ALLOWED_SETUP_FN'
139
- | 'SEQ_ALLOWED_TEARDOWN_FN'
140
139
  | 'SEQ_MAX_BLOCK_SIZE_IN_BYTES'
141
- | 'SEQ_MAX_SECONDS_BETWEEN_BLOCKS'
142
140
  | 'SEQ_MAX_TX_PER_BLOCK'
143
- | 'SEQ_MIN_SECONDS_BETWEEN_BLOCKS'
144
141
  | 'SEQ_MIN_TX_PER_BLOCK'
145
142
  | 'SEQ_PUBLISH_RETRY_INTERVAL_MS'
146
143
  | 'SEQ_PUBLISHER_PRIVATE_KEY'
147
144
  | 'SEQ_REQUIRED_CONFIRMATIONS'
148
145
  | 'SEQ_TX_POLLING_INTERVAL_MS'
149
146
  | 'SEQ_ENFORCE_TIME_TABLE'
147
+ | 'SEQ_MAX_L1_TX_INCLUSION_TIME_INTO_SLOT'
150
148
  | 'STAKING_ASSET_CONTRACT_ADDRESS'
151
149
  | 'REWARD_DISTRIBUTOR_CONTRACT_ADDRESS'
152
150
  | 'TELEMETRY'
@@ -184,6 +182,7 @@ export type EnvVar =
184
182
  | 'L1_TX_MONITOR_CHECK_INTERVAL_MS'
185
183
  | 'L1_TX_MONITOR_STALL_TIME_MS'
186
184
  | 'L1_TX_MONITOR_TX_TIMEOUT_MS'
185
+ | 'L1_TX_PROPAGATION_MAX_QUERY_ATTEMPTS'
187
186
  | 'FAUCET_MNEMONIC_ACCOUNT_INDEX'
188
187
  | 'FAUCET_ETH_AMOUNT'
189
188
  | 'FAUCET_INTERVAL_MS'
package/src/index.ts CHANGED
@@ -3,6 +3,7 @@ export * as abi from './abi/index.js';
3
3
  export * as asyncMap from './async-map/index.js';
4
4
  export * as aztecAddress from './aztec-address/index.js';
5
5
  export * as bigintBuffer from './bigint-buffer/index.js';
6
+ export * as blob from './blob/index.js';
6
7
  export * as collection from './collection/index.js';
7
8
  export * as committable from './committable/index.js';
8
9
  export * as crypto from './crypto/index.js';
@@ -69,7 +69,46 @@ const [logLevel, logFilters] = parseEnv(process.env.LOG_LEVEL, defaultLogLevel);
69
69
 
70
70
  // Define custom logging levels for pino.
71
71
  const customLevels = { verbose: 25 };
72
- const pinoOpts = { customLevels, useOnlyCustomLevels: false, level: logLevel };
72
+
73
+ // inspired by https://github.com/pinojs/pino/issues/726#issuecomment-605814879
74
+ const levelToSeverityFormatter = (label: string, level: number): object => {
75
+ // Severity labels https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#LogSeverity
76
+ let severity: string;
77
+
78
+ switch (label as pino.Level | keyof typeof customLevels) {
79
+ case 'trace':
80
+ case 'debug':
81
+ severity = 'DEBUG';
82
+ break;
83
+ case 'verbose':
84
+ case 'info':
85
+ severity = 'INFO';
86
+ break;
87
+ case 'warn':
88
+ severity = 'WARNING';
89
+ break;
90
+ case 'error':
91
+ severity = 'ERROR';
92
+ break;
93
+ case 'fatal':
94
+ severity = 'CRITICAL';
95
+ break;
96
+ default:
97
+ severity = 'DEFAULT';
98
+ break;
99
+ }
100
+
101
+ return { severity, level };
102
+ };
103
+
104
+ const pinoOpts: pino.LoggerOptions<keyof typeof customLevels> = {
105
+ customLevels,
106
+ useOnlyCustomLevels: false,
107
+ level: logLevel,
108
+ formatters: {
109
+ level: levelToSeverityFormatter,
110
+ },
111
+ };
73
112
 
74
113
  export const levels = {
75
114
  labels: { ...pino.levels.labels, ...Object.fromEntries(Object.entries(customLevels).map(e => e.reverse())) },
@@ -83,7 +122,7 @@ export const pinoPrettyOpts = {
83
122
  destination: 2,
84
123
  sync: true,
85
124
  colorize: useColor,
86
- ignore: 'module,pid,hostname,trace_id,span_id,trace_flags',
125
+ ignore: 'module,pid,hostname,trace_id,span_id,trace_flags,severity',
87
126
  messageFormat: `${bold('{module}')} ${reset('{msg}')}`,
88
127
  customLevels: 'fatal:60,error:50,warn:40,info:30,verbose:25,debug:20,trace:10',
89
128
  customColors: 'fatal:bgRed,error:red,warn:yellow,info:green,verbose:magenta,debug:blue,trace:gray',
@@ -1,3 +1,4 @@
1
+ import { createLogger } from '../log/pino-logger.js';
1
2
  import { InterruptibleSleep } from '../sleep/index.js';
2
3
  import { type PromiseWithResolvers, promiseWithResolvers } from './utils.js';
3
4
 
@@ -12,7 +13,11 @@ export class RunningPromise {
12
13
  private interruptibleSleep = new InterruptibleSleep();
13
14
  private requested: PromiseWithResolvers<void> | undefined = undefined;
14
15
 
15
- constructor(private fn: () => void | Promise<void>, private pollingIntervalMS = 10000) {}
16
+ constructor(
17
+ private fn: () => void | Promise<void>,
18
+ private logger = createLogger('running-promise'),
19
+ private pollingIntervalMS = 10000,
20
+ ) {}
16
21
 
17
22
  /**
18
23
  * Starts the running promise.
@@ -23,7 +28,11 @@ export class RunningPromise {
23
28
  const poll = async () => {
24
29
  while (this.running) {
25
30
  const hasRequested = this.requested !== undefined;
26
- await this.fn();
31
+ try {
32
+ await this.fn();
33
+ } catch (err) {
34
+ this.logger.error('Error in running promise', err);
35
+ }
27
36
 
28
37
  // If an immediate run had been requested *before* the function started running, resolve the request.
29
38
  if (hasRequested) {
@@ -64,7 +64,7 @@ export async function retry<Result>(
64
64
  throw err;
65
65
  }
66
66
  log.verbose(`${name} failed. Will retry in ${s}s...`);
67
- !failSilently && log.error(err);
67
+ !failSilently && log.error(`Error while retrying ${name}`, err);
68
68
  await sleep(s * 1000);
69
69
  continue;
70
70
  }
@@ -140,4 +140,12 @@ export class FieldReader {
140
140
  }): T {
141
141
  return deserializer.fromFields(this);
142
142
  }
143
+
144
+ /**
145
+ * Returns whether the reader has finished reading all fields.
146
+ * @returns A bool.
147
+ */
148
+ public isFinished(): boolean {
149
+ return this.index === this.length;
150
+ }
143
151
  }
@@ -70,6 +70,6 @@ export class InterruptibleSleep {
70
70
  * @param returnValue - The return value of the promise.
71
71
  * @returns A Promise that resolves after the specified duration, allowing the use of 'await' to pause execution.
72
72
  */
73
- export function sleep<T>(ms: number, returnValue?: T): Promise<T | undefined> {
74
- return new Promise(resolve => setTimeout(() => resolve(returnValue), ms));
73
+ export function sleep<T>(ms: number, returnValue?: T): Promise<T> {
74
+ return new Promise(resolve => setTimeout(() => resolve(returnValue as T), ms));
75
75
  }
@@ -25,3 +25,7 @@ export function pluralize(str: string, count: number | bigint, plural?: string):
25
25
  export function count(count: number | bigint, str: string, plural?: string): string {
26
26
  return `${count} ${pluralize(str, count, plural)}`;
27
27
  }
28
+
29
+ export function truncate(str: string, length: number = 64): string {
30
+ return str.length > length ? str.slice(0, length) + '...' : str;
31
+ }
@@ -0,0 +1,63 @@
1
+ import { existsSync, readFileSync, writeFileSync } from 'fs';
2
+ import { dirname, join, resolve } from 'path';
3
+
4
+ import { createConsoleLogger } from '../../log/console.js';
5
+ import { fileURLToPath } from '../../url/index.js';
6
+ import { isGenerateTestDataEnabled } from '../test_data.js';
7
+
8
+ /** Writes the contents specified to the target file if test data generation is enabled. */
9
+ export function writeTestData(targetFileFromRepoRoot: string, contents: string | Buffer) {
10
+ if (!isGenerateTestDataEnabled()) {
11
+ return;
12
+ }
13
+ const targetFile = getPathToFile(targetFileFromRepoRoot);
14
+ const toWrite = typeof contents === 'string' ? contents : contents.toString('hex');
15
+ writeFileSync(targetFile, toWrite);
16
+ const logger = createConsoleLogger('aztec:testing:test_data');
17
+ logger(`Wrote test data to ${targetFile}`);
18
+ }
19
+
20
+ /**
21
+ * Looks for a variable assignment in the target file and updates the value, only if test data generation is enabled.
22
+ * Note that a magic inline comment would be a cleaner approach, like `/* TEST-DATA-START *\/` and `/* TEST-DATA-END *\/`,
23
+ * but running nargo fmt on it panics since the comment would be erased, so we roll with this for now.
24
+ * @remarks Requires AZTEC_GENERATE_TEST_DATA=1 to be set
25
+ */
26
+ export function updateInlineTestData(targetFileFromRepoRoot: string, itemName: string, value: string) {
27
+ if (!isGenerateTestDataEnabled()) {
28
+ return;
29
+ }
30
+ const logger = createConsoleLogger('aztec:testing:test_data');
31
+ const targetFile = getPathToFile(targetFileFromRepoRoot);
32
+ const contents = readFileSync(targetFile, 'utf8').toString();
33
+ const regex = new RegExp(`let ${itemName} =[\\s\\S]*?;`, 'g');
34
+ if (!regex.exec(contents)) {
35
+ throw new Error(`Test data marker for ${itemName} not found in ${targetFile}`);
36
+ }
37
+
38
+ const updatedContents = contents.replaceAll(regex, `let ${itemName} = ${value};`);
39
+ writeFileSync(targetFile, updatedContents);
40
+ logger(`Updated test data in ${targetFile} for ${itemName} to ${value}`);
41
+ }
42
+
43
+ /**
44
+ * Updates the sample Prover.toml files in noir-projects/noir-protocol-circuits/crates/.
45
+ * @remarks Requires AZTEC_GENERATE_TEST_DATA=1 & generateProtocolCircuitTestData=true to be set
46
+ * To re-gen, run 'AZTEC_GENERATE_TEST_DATA=1 FAKE_PROOFS=1 yarn workspace @aztec/end-to-end test full.test'
47
+ */
48
+ export function updateProtocolCircuitSampleInputs(circuitName: string, value: string) {
49
+ const logger = createConsoleLogger('aztec:testing:test_data');
50
+ const targetFileFromRepoRoot = `noir-projects/noir-protocol-circuits/crates/${circuitName}/Prover.toml`;
51
+ const targetFile = getPathToFile(targetFileFromRepoRoot);
52
+ writeFileSync(targetFile, value);
53
+ logger(`Updated test data in ${targetFile} for ${circuitName}`);
54
+ }
55
+
56
+ function getPathToFile(targetFileFromRepoRoot: string) {
57
+ const repoRoot = resolve(dirname(fileURLToPath(import.meta.url)), '../../../../../');
58
+ if (!existsSync(join(repoRoot, 'CODEOWNERS'))) {
59
+ throw new Error(`Path to repo root is incorrect (got ${repoRoot})`);
60
+ }
61
+
62
+ return join(repoRoot, targetFileFromRepoRoot);
63
+ }
@@ -1,3 +1,3 @@
1
- export * from './test_data.js';
2
1
  export * from './snapshot_serializer.js';
3
2
  export * from './port_allocator.js';
3
+ export * from './test_data.js';
@@ -1,25 +1,10 @@
1
- import { existsSync, readFileSync, writeFileSync } from 'fs';
2
- import { dirname, join, resolve } from 'path';
3
-
4
- import { createConsoleLogger } from '../log/console.js';
5
- import { fileURLToPath } from '../url/index.js';
6
-
7
1
  const testData: { [key: string]: unknown[] } = {};
8
- let generateProtocolCircuitTestData = false;
9
2
 
10
3
  /** Returns whether test data generation is enabled */
11
4
  export function isGenerateTestDataEnabled() {
12
5
  return ['1', 'true'].includes(process.env.AZTEC_GENERATE_TEST_DATA ?? '') && typeof expect !== 'undefined';
13
6
  }
14
7
 
15
- /**
16
- * This is separate so Prover.tomls don't get edited everytime any test is run,
17
- * Only full.test updates prover tomls, then switches this off.
18
- */
19
- export function switchGenerateProtocolCircuitTestData() {
20
- generateProtocolCircuitTestData = !generateProtocolCircuitTestData;
21
- }
22
-
23
8
  /** Pushes test data with the given name, only if test data generation is enabled. */
24
9
  export function pushTestData<T>(itemName: string, data: T) {
25
10
  if (!isGenerateTestDataEnabled()) {
@@ -49,63 +34,3 @@ export function getTestData(itemName: string): unknown[] {
49
34
  const fullItemName = `${testName} ${itemName}`;
50
35
  return testData[fullItemName];
51
36
  }
52
-
53
- /** Writes the contents specified to the target file if test data generation is enabled. */
54
- export function writeTestData(targetFileFromRepoRoot: string, contents: string | Buffer) {
55
- if (!isGenerateTestDataEnabled()) {
56
- return;
57
- }
58
- const targetFile = getPathToFile(targetFileFromRepoRoot);
59
- const toWrite = typeof contents === 'string' ? contents : contents.toString('hex');
60
- writeFileSync(targetFile, toWrite);
61
- const logger = createConsoleLogger('aztec:testing:test_data');
62
- logger(`Wrote test data to ${targetFile}`);
63
- }
64
-
65
- /**
66
- * Looks for a variable assignment in the target file and updates the value, only if test data generation is enabled.
67
- * Note that a magic inline comment would be a cleaner approach, like `/* TEST-DATA-START *\/` and `/* TEST-DATA-END *\/`,
68
- * but running nargo fmt on it panics since the comment would be erased, so we roll with this for now.
69
- * @remarks Requires AZTEC_GENERATE_TEST_DATA=1 to be set
70
- */
71
- export function updateInlineTestData(targetFileFromRepoRoot: string, itemName: string, value: string) {
72
- if (!isGenerateTestDataEnabled()) {
73
- return;
74
- }
75
- const logger = createConsoleLogger('aztec:testing:test_data');
76
- const targetFile = getPathToFile(targetFileFromRepoRoot);
77
- const contents = readFileSync(targetFile, 'utf8').toString();
78
- const regex = new RegExp(`let ${itemName} =[\\s\\S]*?;`, 'g');
79
- if (!regex.exec(contents)) {
80
- throw new Error(`Test data marker for ${itemName} not found in ${targetFile}`);
81
- }
82
-
83
- const updatedContents = contents.replaceAll(regex, `let ${itemName} = ${value};`);
84
- writeFileSync(targetFile, updatedContents);
85
- logger(`Updated test data in ${targetFile} for ${itemName} to ${value}`);
86
- }
87
-
88
- /**
89
- * Updates the sample Prover.toml files in noir-projects/noir-protocol-circuits/crates/.
90
- * @remarks Requires AZTEC_GENERATE_TEST_DATA=1 & generateProtocolCircuitTestData=true to be set
91
- * To re-gen, run 'AZTEC_GENERATE_TEST_DATA=1 FAKE_PROOFS=1 yarn workspace @aztec/end-to-end test full.test'
92
- */
93
- export function updateProtocolCircuitSampleInputs(circuitName: string, value: string) {
94
- if (!isGenerateTestDataEnabled() || !generateProtocolCircuitTestData) {
95
- return;
96
- }
97
- const logger = createConsoleLogger('aztec:testing:test_data');
98
- const targetFileFromRepoRoot = `noir-projects/noir-protocol-circuits/crates/${circuitName}/Prover.toml`;
99
- const targetFile = getPathToFile(targetFileFromRepoRoot);
100
- writeFileSync(targetFile, value);
101
- logger(`Updated test data in ${targetFile} for ${circuitName}`);
102
- }
103
-
104
- function getPathToFile(targetFileFromRepoRoot: string) {
105
- const repoRoot = resolve(dirname(fileURLToPath(import.meta.url)), '../../../../');
106
- if (!existsSync(join(repoRoot, 'CODEOWNERS'))) {
107
- throw new Error(`Path to repo root is incorrect (got ${repoRoot})`);
108
- }
109
-
110
- return join(repoRoot, targetFileFromRepoRoot);
111
- }
@@ -0,0 +1,24 @@
1
+ import { createLogger } from '../log/pino-logger.js';
2
+
3
+ /** Returns current datetime. */
4
+ export class DateProvider {
5
+ public now(): number {
6
+ return Date.now();
7
+ }
8
+ }
9
+
10
+ /** Returns current datetime and allows to override it. */
11
+ export class TestDateProvider implements DateProvider {
12
+ private offset = 0;
13
+
14
+ constructor(private readonly logger = createLogger('foundation:test-date-provider')) {}
15
+
16
+ public now(): number {
17
+ return Date.now() + this.offset;
18
+ }
19
+
20
+ public setTime(timeMs: number) {
21
+ this.offset = timeMs - Date.now();
22
+ this.logger.warn(`Time set to ${new Date(timeMs).toISOString()}`, { offset: this.offset, timeMs });
23
+ }
24
+ }
@@ -1,3 +1,4 @@
1
- export { TimeoutTask, executeTimeoutWithCustomError } from './timeout.js';
2
- export { Timer } from './timer.js';
1
+ export * from './date.js';
3
2
  export { elapsed, elapsedSync } from './elapsed.js';
3
+ export { TimeoutTask, executeTimeout } from './timeout.js';
4
+ export { Timer } from './timer.js';
@@ -1,3 +1,5 @@
1
+ import { TimeoutError } from '../error/index.js';
2
+
1
3
  /**
2
4
  * TimeoutTask class creates an instance for managing and executing a given asynchronous function with a specified timeout duration.
3
5
  * The task will be automatically interrupted if it exceeds the given timeout duration, and will throw a custom error message.
@@ -10,14 +12,9 @@ export class TimeoutTask<T> {
10
12
  private interrupt = () => {};
11
13
  private totalTime = 0;
12
14
 
13
- constructor(
14
- private fn: () => Promise<T>,
15
- private timeout = 0,
16
- fnName = '',
17
- error = () => new Error(`Timeout${fnName ? ` running ${fnName}` : ''} after ${timeout}ms.`),
18
- ) {
15
+ constructor(private fn: () => Promise<T>, private timeout: number, errorFn: () => any) {
19
16
  this.interruptPromise = new Promise<T>((_, reject) => {
20
- this.interrupt = () => reject(error());
17
+ this.interrupt = () => reject(errorFn());
21
18
  });
22
19
  }
23
20
 
@@ -63,17 +60,11 @@ export class TimeoutTask<T> {
63
60
  }
64
61
  }
65
62
 
66
- export const executeTimeout = async <T>(fn: () => Promise<T>, timeout = 0, fnName = '') => {
67
- const task = new TimeoutTask(fn, timeout, fnName);
68
- return await task.exec();
69
- };
70
-
71
- export const executeTimeoutWithCustomError = async <T>(
72
- fn: () => Promise<T>,
73
- timeout = 0,
74
- error = () => new Error('No custom error provided'),
75
- fnName = '',
76
- ) => {
77
- const task = new TimeoutTask(fn, timeout, fnName, error);
63
+ export async function executeTimeout<T>(fn: () => Promise<T>, timeout: number, errorOrFnName?: string | (() => any)) {
64
+ const errorFn =
65
+ typeof errorOrFnName === 'function'
66
+ ? errorOrFnName
67
+ : () => new TimeoutError(`Timeout running ${errorOrFnName ?? 'function'} after ${timeout}ms.`);
68
+ const task = new TimeoutTask(fn, timeout, errorFn);
78
69
  return await task.exec();
79
- };
70
+ }