@aztec/foundation 0.66.0 → 0.67.1-devnet

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (127) hide show
  1. package/dest/abi/abi.d.ts +18 -18
  2. package/dest/abi/abi.d.ts.map +1 -1
  3. package/dest/abi/abi.js +25 -10
  4. package/dest/abi/encoder.js +3 -3
  5. package/dest/abi/event_selector.js +2 -2
  6. package/dest/abi/function_selector.js +2 -2
  7. package/dest/blob/index.d.ts +41 -0
  8. package/dest/blob/index.d.ts.map +1 -0
  9. package/dest/blob/index.js +118 -0
  10. package/dest/collection/array.d.ts +2 -0
  11. package/dest/collection/array.d.ts.map +1 -1
  12. package/dest/collection/array.js +10 -1
  13. package/dest/collection/object.d.ts +6 -0
  14. package/dest/collection/object.d.ts.map +1 -1
  15. package/dest/collection/object.js +15 -1
  16. package/dest/config/env_var.d.ts +1 -1
  17. package/dest/config/env_var.d.ts.map +1 -1
  18. package/dest/crypto/random/randomness_singleton.js +3 -3
  19. package/dest/fields/fields.d.ts +20 -2
  20. package/dest/fields/fields.d.ts.map +1 -1
  21. package/dest/fields/fields.js +37 -3
  22. package/dest/fs/run_in_dir.js +2 -2
  23. package/dest/index.d.ts +1 -0
  24. package/dest/index.d.ts.map +1 -1
  25. package/dest/index.js +2 -1
  26. package/dest/iterable/index.d.ts +1 -0
  27. package/dest/iterable/index.d.ts.map +1 -1
  28. package/dest/iterable/index.js +2 -1
  29. package/dest/iterable/toArray.d.ts +2 -0
  30. package/dest/iterable/toArray.d.ts.map +1 -0
  31. package/dest/iterable/toArray.js +8 -0
  32. package/dest/json-rpc/client/fetch.d.ts +2 -2
  33. package/dest/json-rpc/client/fetch.d.ts.map +1 -1
  34. package/dest/json-rpc/client/fetch.js +3 -3
  35. package/dest/json-rpc/client/safe_json_rpc_client.d.ts.map +1 -1
  36. package/dest/json-rpc/client/safe_json_rpc_client.js +7 -12
  37. package/dest/json-rpc/server/safe_json_rpc_server.d.ts +3 -2
  38. package/dest/json-rpc/server/safe_json_rpc_server.d.ts.map +1 -1
  39. package/dest/json-rpc/server/safe_json_rpc_server.js +5 -5
  40. package/dest/log/log-filters.d.ts.map +1 -1
  41. package/dest/log/log-filters.js +8 -2
  42. package/dest/log/log_fn.d.ts +1 -1
  43. package/dest/log/pino-logger.d.ts +18 -7
  44. package/dest/log/pino-logger.d.ts.map +1 -1
  45. package/dest/log/pino-logger.js +67 -18
  46. package/dest/noir/noir_package_config.d.ts +4 -4
  47. package/dest/promise/running-promise.d.ts +2 -1
  48. package/dest/promise/running-promise.d.ts.map +1 -1
  49. package/dest/promise/running-promise.js +10 -3
  50. package/dest/queue/base_memory_queue.d.ts.map +1 -1
  51. package/dest/queue/base_memory_queue.js +3 -3
  52. package/dest/queue/bounded_serial_queue.d.ts.map +1 -1
  53. package/dest/queue/bounded_serial_queue.js +3 -3
  54. package/dest/queue/fifo_memory_queue.d.ts +2 -2
  55. package/dest/queue/fifo_memory_queue.d.ts.map +1 -1
  56. package/dest/queue/fifo_memory_queue.js +1 -1
  57. package/dest/retry/index.d.ts.map +1 -1
  58. package/dest/retry/index.js +4 -4
  59. package/dest/schemas/utils.d.ts +3 -1
  60. package/dest/schemas/utils.d.ts.map +1 -1
  61. package/dest/schemas/utils.js +8 -3
  62. package/dest/serialize/field_reader.d.ts +5 -0
  63. package/dest/serialize/field_reader.d.ts.map +1 -1
  64. package/dest/serialize/field_reader.js +8 -1
  65. package/dest/string/index.d.ts +1 -0
  66. package/dest/string/index.d.ts.map +1 -1
  67. package/dest/string/index.js +4 -1
  68. package/dest/testing/files/index.d.ts +23 -0
  69. package/dest/testing/files/index.d.ts.map +1 -0
  70. package/dest/testing/files/index.js +68 -0
  71. package/dest/testing/index.d.ts +1 -1
  72. package/dest/testing/index.d.ts.map +1 -1
  73. package/dest/testing/index.js +2 -2
  74. package/dest/testing/test_data.d.ts +0 -11
  75. package/dest/testing/test_data.d.ts.map +1 -1
  76. package/dest/testing/test_data.js +1 -44
  77. package/dest/timer/date.d.ts +13 -0
  78. package/dest/timer/date.d.ts.map +1 -0
  79. package/dest/timer/date.js +22 -0
  80. package/dest/timer/index.d.ts +1 -0
  81. package/dest/timer/index.d.ts.map +1 -1
  82. package/dest/timer/index.js +2 -1
  83. package/dest/transport/dispatch/create_dispatch_fn.d.ts.map +1 -1
  84. package/dest/transport/dispatch/create_dispatch_fn.js +3 -3
  85. package/dest/transport/transport_client.js +3 -3
  86. package/dest/wasm/wasm_module.d.ts.map +1 -1
  87. package/dest/wasm/wasm_module.js +2 -2
  88. package/dest/worker/worker_pool.js +3 -3
  89. package/package.json +12 -3
  90. package/src/abi/abi.ts +30 -11
  91. package/src/abi/encoder.ts +2 -2
  92. package/src/abi/event_selector.ts +1 -1
  93. package/src/abi/function_selector.ts +1 -1
  94. package/src/blob/index.ts +152 -0
  95. package/src/collection/array.ts +10 -0
  96. package/src/collection/object.ts +22 -0
  97. package/src/config/env_var.ts +11 -6
  98. package/src/crypto/random/randomness_singleton.ts +2 -2
  99. package/src/fields/fields.ts +40 -2
  100. package/src/fs/run_in_dir.ts +1 -1
  101. package/src/index.ts +1 -0
  102. package/src/iterable/index.ts +1 -0
  103. package/src/iterable/toArray.ts +7 -0
  104. package/src/jest/setup.mjs +9 -0
  105. package/src/json-rpc/client/fetch.ts +3 -3
  106. package/src/json-rpc/client/safe_json_rpc_client.ts +7 -15
  107. package/src/json-rpc/server/safe_json_rpc_server.ts +5 -5
  108. package/src/log/log-filters.ts +7 -1
  109. package/src/log/log_fn.ts +1 -1
  110. package/src/log/pino-logger.ts +87 -41
  111. package/src/promise/running-promise.ts +11 -2
  112. package/src/queue/base_memory_queue.ts +2 -2
  113. package/src/queue/bounded_serial_queue.ts +2 -2
  114. package/src/queue/fifo_memory_queue.ts +2 -2
  115. package/src/retry/index.ts +3 -3
  116. package/src/schemas/utils.ts +10 -2
  117. package/src/serialize/field_reader.ts +8 -0
  118. package/src/string/index.ts +4 -0
  119. package/src/testing/files/index.ts +76 -0
  120. package/src/testing/index.ts +1 -1
  121. package/src/testing/test_data.ts +0 -50
  122. package/src/timer/date.ts +24 -0
  123. package/src/timer/index.ts +1 -0
  124. package/src/transport/dispatch/create_dispatch_fn.ts +2 -2
  125. package/src/transport/transport_client.ts +2 -2
  126. package/src/wasm/wasm_module.ts +1 -1
  127. package/src/worker/worker_pool.ts +2 -2
@@ -1,7 +1,6 @@
1
1
  import { createColors } from 'colorette';
2
2
  import isNode from 'detect-node';
3
3
  import { pino, symbols } from 'pino';
4
- import pretty from 'pino-pretty';
5
4
  import { type Writable } from 'stream';
6
5
  import { inspect } from 'util';
7
6
 
@@ -10,38 +9,37 @@ import { getLogLevelFromFilters, parseEnv } from './log-filters.js';
10
9
  import { type LogLevel } from './log-levels.js';
11
10
  import { type LogData, type LogFn } from './log_fn.js';
12
11
 
13
- // TODO(palla/log): Rename to createLogger
14
- export function createDebugLogger(module: string): DebugLogger {
15
- // TODO(palla/log): Rename all module names to remove the aztec prefix
16
- const pinoLogger = logger.child(
17
- { module: module.replace(/^aztec:/, '') },
18
- { level: getLogLevelFromFilters(logFilters, module) },
19
- );
12
+ export function createLogger(module: string): Logger {
13
+ module = module.replace(/^aztec:/, '');
14
+ const pinoLogger = logger.child({ module }, { level: getLogLevelFromFilters(logFilters, module) });
20
15
 
21
16
  // We check manually for isLevelEnabled to avoid calling processLogData unnecessarily.
22
17
  // Note that isLevelEnabled is missing from the browser version of pino.
23
- const logFn = (level: LogLevel, msg: string, data?: LogData) =>
24
- isLevelEnabled(pinoLogger, level) && pinoLogger[level](processLogData(data ?? {}), msg);
18
+ const logFn = (level: LogLevel, msg: string, data?: unknown) =>
19
+ isLevelEnabled(pinoLogger, level) && pinoLogger[level](processLogData((data as LogData) ?? {}), msg);
25
20
 
26
21
  return {
27
22
  silent: () => {},
28
23
  // TODO(palla/log): Should we move err to data instead of the text message?
29
24
  /** Log as fatal. Use when an error has brought down the system. */
30
- fatal: (msg: string, err?: unknown, data?: LogData) => logFn('fatal', formatErr(msg, err), data),
25
+ fatal: (msg: string, err?: unknown, data?: unknown) => logFn('fatal', formatErr(msg, err), data),
31
26
  /** Log as error. Use for errors in general. */
32
- error: (msg: string, err?: unknown, data?: LogData) => logFn('error', formatErr(msg, err), data),
27
+ error: (msg: string, err?: unknown, data?: unknown) => logFn('error', formatErr(msg, err), data),
33
28
  /** Log as warn. Use for when we stray from the happy path. */
34
- warn: (msg: string, data?: LogData) => logFn('warn', msg, data),
29
+ warn: (msg: string, data?: unknown) => logFn('warn', msg, data),
35
30
  /** Log as info. Use for providing an operator with info on what the system is doing. */
36
- info: (msg: string, data?: LogData) => logFn('info', msg, data),
31
+ info: (msg: string, data?: unknown) => logFn('info', msg, data),
37
32
  /** Log as verbose. Use for when we need additional insight on what a subsystem is doing. */
38
- verbose: (msg: string, data?: LogData) => logFn('verbose', msg, data),
33
+ verbose: (msg: string, data?: unknown) => logFn('verbose', msg, data),
39
34
  /** Log as debug. Use for when we need debugging info to troubleshoot an issue on a specific component. */
40
- debug: (msg: string, data?: LogData) => logFn('debug', msg, data),
35
+ debug: (msg: string, data?: unknown) => logFn('debug', msg, data),
41
36
  /** Log as trace. Use for when we want to denial-of-service any recipient of the logs. */
42
- trace: (msg: string, data?: LogData) => logFn('trace', msg, data),
37
+ trace: (msg: string, data?: unknown) => logFn('trace', msg, data),
43
38
  level: pinoLogger.level as LogLevel,
39
+ /** Whether the given level is enabled for this logger. */
44
40
  isLevelEnabled: (level: LogLevel) => isLevelEnabled(pinoLogger, level),
41
+ /** Module name for the logger. */
42
+ module,
45
43
  };
46
44
  }
47
45
 
@@ -69,10 +67,58 @@ function isLevelEnabled(logger: pino.Logger<'verbose', boolean>, level: LogLevel
69
67
  const defaultLogLevel = process.env.NODE_ENV === 'test' ? 'silent' : 'info';
70
68
  const [logLevel, logFilters] = parseEnv(process.env.LOG_LEVEL, defaultLogLevel);
71
69
 
70
+ // Define custom logging levels for pino.
71
+ const customLevels = { verbose: 25 };
72
+
73
+ // inspired by https://github.com/pinojs/pino/issues/726#issuecomment-605814879
74
+ const levelToSeverityFormatter = (label: string, level: number): object => {
75
+ // Severity labels https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#LogSeverity
76
+ let severity: string;
77
+
78
+ switch (label as pino.Level | keyof typeof customLevels) {
79
+ case 'trace':
80
+ case 'debug':
81
+ severity = 'DEBUG';
82
+ break;
83
+ case 'verbose':
84
+ case 'info':
85
+ severity = 'INFO';
86
+ break;
87
+ case 'warn':
88
+ severity = 'WARNING';
89
+ break;
90
+ case 'error':
91
+ severity = 'ERROR';
92
+ break;
93
+ case 'fatal':
94
+ severity = 'CRITICAL';
95
+ break;
96
+ default:
97
+ severity = 'DEFAULT';
98
+ break;
99
+ }
100
+
101
+ return { severity, level };
102
+ };
103
+
104
+ const pinoOpts: pino.LoggerOptions<keyof typeof customLevels> = {
105
+ customLevels,
106
+ useOnlyCustomLevels: false,
107
+ level: logLevel,
108
+ formatters: {
109
+ level: levelToSeverityFormatter,
110
+ },
111
+ };
112
+
113
+ export const levels = {
114
+ labels: { ...pino.levels.labels, ...Object.fromEntries(Object.entries(customLevels).map(e => e.reverse())) },
115
+ values: { ...pino.levels.values, ...customLevels },
116
+ };
117
+
72
118
  // Transport options for pretty logging to stderr via pino-pretty.
73
119
  const useColor = true;
74
120
  const { bold, reset } = createColors({ useColor });
75
- const pinoPrettyOpts = {
121
+ export const pinoPrettyOpts = {
76
122
  destination: 2,
77
123
  sync: true,
78
124
  colorize: useColor,
@@ -81,25 +127,20 @@ const pinoPrettyOpts = {
81
127
  customLevels: 'fatal:60,error:50,warn:40,info:30,verbose:25,debug:20,trace:10',
82
128
  customColors: 'fatal:bgRed,error:red,warn:yellow,info:green,verbose:magenta,debug:blue,trace:gray',
83
129
  minimumLevel: 'trace' as const,
130
+ singleLine: !['1', 'true'].includes(process.env.LOG_MULTILINE ?? ''),
84
131
  };
85
- const prettyTransport: pino.TransportSingleOptions = {
132
+
133
+ const prettyTransport: pino.TransportTargetOptions = {
86
134
  target: 'pino-pretty',
87
135
  options: pinoPrettyOpts,
136
+ level: 'trace',
88
137
  };
89
138
 
90
139
  // Transport for vanilla stdio logging as JSON.
91
- const stdioTransport: pino.TransportSingleOptions = {
140
+ const stdioTransport: pino.TransportTargetOptions = {
92
141
  target: 'pino/file',
93
142
  options: { destination: 2 },
94
- };
95
-
96
- // Define custom logging levels for pino.
97
- const customLevels = { verbose: 25 };
98
- const pinoOpts = { customLevels, useOnlyCustomLevels: false, level: logLevel };
99
-
100
- export const levels = {
101
- labels: { ...pino.levels.labels, ...Object.fromEntries(Object.entries(customLevels).map(e => e.reverse())) },
102
- values: { ...pino.levels.values, ...customLevels },
143
+ level: 'trace',
103
144
  };
104
145
 
105
146
  // Transport for OpenTelemetry logging. While defining this here is an abstraction leakage since this
@@ -110,18 +151,21 @@ export const levels = {
110
151
  // since pino will load this transport separately on a worker thread, to minimize disruption to the main loop.
111
152
  const otlpEndpoint = process.env.OTEL_EXPORTER_OTLP_LOGS_ENDPOINT;
112
153
  const otelOpts = { levels };
113
- const otelTransport: pino.TransportSingleOptions = {
154
+ const otelTransport: pino.TransportTargetOptions = {
114
155
  target: '@aztec/telemetry-client/otel-pino-stream',
115
156
  options: otelOpts,
157
+ level: 'trace',
116
158
  };
117
159
 
118
160
  function makeLogger() {
119
161
  if (!isNode) {
120
- // We are on the browser
162
+ // We are on the browser.
121
163
  return pino({ ...pinoOpts, browser: { asObject: false } });
122
164
  } else if (process.env.JEST_WORKER_ID) {
123
- // We are on jest, so we need sync logging. We stream to stderr with pretty.
124
- return pino(pinoOpts, pretty(pinoPrettyOpts));
165
+ // We are on jest, so we need sync logging and stream to stderr.
166
+ // We expect jest/setup.mjs to kick in later and replace set up a pretty logger,
167
+ // but if for some reason it doesn't, at least we're covered with a default logger.
168
+ return pino(pinoOpts, pino.destination(2));
125
169
  } else {
126
170
  // Regular nodejs with transports on worker thread, using pino-pretty for console logging if LOG_JSON
127
171
  // is not set, and an optional OTLP transport if the OTLP endpoint is provided.
@@ -129,7 +173,7 @@ function makeLogger() {
129
173
  ['1', 'true', 'TRUE'].includes(process.env.LOG_JSON ?? '') ? stdioTransport : prettyTransport,
130
174
  otlpEndpoint ? otelTransport : undefined,
131
175
  ]);
132
- return pino(pinoOpts, pino.transport({ targets }));
176
+ return pino(pinoOpts, pino.transport({ targets, levels: levels.values }));
133
177
  }
134
178
  }
135
179
 
@@ -146,6 +190,14 @@ logger.verbose(
146
190
  : `Browser console logger initialized with level ${logLevel}`,
147
191
  );
148
192
 
193
+ /**
194
+ * Overwrites the logging stream with a different destination.
195
+ * Used by jest/setup.mjs to set up a pretty logger.
196
+ */
197
+ export function overwriteLoggingStream(stream: Writable): void {
198
+ (logger as any)[symbols.streamSym] = stream;
199
+ }
200
+
149
201
  /**
150
202
  * Registers an additional destination to the pino logger.
151
203
  * Use only when working with destinations, not worker transports.
@@ -176,15 +228,9 @@ type ErrorLogFn = (msg: string, err?: Error | unknown, data?: LogData) => void;
176
228
  export type Logger = { [K in LogLevel]: LogFn } & { /** Error log function */ error: ErrorLogFn } & {
177
229
  level: LogLevel;
178
230
  isLevelEnabled: (level: LogLevel) => boolean;
231
+ module: string;
179
232
  };
180
233
 
181
- /**
182
- * Logger that supports multiple severity levels and can be called directly to issue a debug statement.
183
- * Intended as a drop-in replacement for the debug module.
184
- * TODO(palla/log): Remove this alias
185
- */
186
- export type DebugLogger = Logger;
187
-
188
234
  /**
189
235
  * Concatenates a log message and an exception.
190
236
  * @param msg - Log message
@@ -1,3 +1,4 @@
1
+ import { createLogger } from '../log/pino-logger.js';
1
2
  import { InterruptibleSleep } from '../sleep/index.js';
2
3
  import { type PromiseWithResolvers, promiseWithResolvers } from './utils.js';
3
4
 
@@ -12,7 +13,11 @@ export class RunningPromise {
12
13
  private interruptibleSleep = new InterruptibleSleep();
13
14
  private requested: PromiseWithResolvers<void> | undefined = undefined;
14
15
 
15
- constructor(private fn: () => void | Promise<void>, private pollingIntervalMS = 10000) {}
16
+ constructor(
17
+ private fn: () => void | Promise<void>,
18
+ private logger = createLogger('running-promise'),
19
+ private pollingIntervalMS = 10000,
20
+ ) {}
16
21
 
17
22
  /**
18
23
  * Starts the running promise.
@@ -23,7 +28,11 @@ export class RunningPromise {
23
28
  const poll = async () => {
24
29
  while (this.running) {
25
30
  const hasRequested = this.requested !== undefined;
26
- await this.fn();
31
+ try {
32
+ await this.fn();
33
+ } catch (err) {
34
+ this.logger.error('Error in running promise', err);
35
+ }
27
36
 
28
37
  // If an immediate run had been requested *before* the function started running, resolve the request.
29
38
  if (hasRequested) {
@@ -1,11 +1,11 @@
1
1
  import { TimeoutError } from '../error/index.js';
2
- import { createDebugLogger } from '../log/index.js';
2
+ import { createLogger } from '../log/index.js';
3
3
 
4
4
  export abstract class BaseMemoryQueue<T> {
5
5
  private waiting: ((item: T | null) => void)[] = [];
6
6
  private flushing = false;
7
7
 
8
- constructor(private log = createDebugLogger('aztec:foundation:memory_fifo')) {}
8
+ constructor(private log = createLogger('foundation:memory_fifo')) {}
9
9
 
10
10
  protected abstract get items(): {
11
11
  length: number;
@@ -1,4 +1,4 @@
1
- import { createDebugLogger } from '../log/index.js';
1
+ import { createLogger } from '../log/index.js';
2
2
  import { Semaphore } from './semaphore.js';
3
3
  import { SerialQueue } from './serial_queue.js';
4
4
 
@@ -10,7 +10,7 @@ export class BoundedSerialQueue {
10
10
  private readonly queue = new SerialQueue();
11
11
  private semaphore: Semaphore;
12
12
 
13
- constructor(maxQueueSize: number, private log = createDebugLogger('aztec:foundation:bounded_serial_queue')) {
13
+ constructor(maxQueueSize: number, private log = createLogger('foundation:bounded_serial_queue')) {
14
14
  this.semaphore = new Semaphore(maxQueueSize);
15
15
  }
16
16
 
@@ -1,4 +1,4 @@
1
- import { type DebugLogger } from '../log/index.js';
1
+ import { type Logger } from '../log/index.js';
2
2
  import { BaseMemoryQueue } from './base_memory_queue.js';
3
3
 
4
4
  /**
@@ -9,7 +9,7 @@ import { BaseMemoryQueue } from './base_memory_queue.js';
9
9
  export class FifoMemoryQueue<T> extends BaseMemoryQueue<T> {
10
10
  private container = new FifoQueue<T>();
11
11
 
12
- constructor(log?: DebugLogger) {
12
+ constructor(log?: Logger) {
13
13
  super(log);
14
14
  }
15
15
 
@@ -1,4 +1,4 @@
1
- import { createDebugLogger } from '../log/index.js';
1
+ import { createLogger } from '../log/index.js';
2
2
  import { sleep } from '../sleep/index.js';
3
3
  import { Timer } from '../timer/index.js';
4
4
 
@@ -48,7 +48,7 @@ export async function retry<Result>(
48
48
  fn: () => Promise<Result>,
49
49
  name = 'Operation',
50
50
  backoff = backoffGenerator(),
51
- log = createDebugLogger('aztec:foundation:retry'),
51
+ log = createLogger('foundation:retry'),
52
52
  failSilently = false,
53
53
  ) {
54
54
  while (true) {
@@ -64,7 +64,7 @@ export async function retry<Result>(
64
64
  throw err;
65
65
  }
66
66
  log.verbose(`${name} failed. Will retry in ${s}s...`);
67
- !failSilently && log.error(err);
67
+ !failSilently && log.error(`Error while retrying ${name}`, err);
68
68
  await sleep(s * 1000);
69
69
  continue;
70
70
  }
@@ -3,13 +3,16 @@ import {
3
3
  type ParseInput,
4
4
  type ParseReturnType,
5
5
  ZodFirstPartyTypeKind,
6
+ type ZodObject,
6
7
  ZodOptional,
7
8
  ZodParsedType,
9
+ type ZodRawShape,
8
10
  type ZodType,
9
11
  type ZodTypeAny,
10
12
  z,
11
13
  } from 'zod';
12
14
 
15
+ import { pick } from '../collection/object.js';
13
16
  import { isHex, withoutHexPrefix } from '../string/index.js';
14
17
  import { type ZodFor } from './types.js';
15
18
 
@@ -72,10 +75,10 @@ export function hexSchemaFor<TClass extends { fromString(str: string): any } | {
72
75
  string
73
76
  > {
74
77
  const stringSchema = refinement ? z.string().refine(refinement, `Not a valid instance`) : z.string();
75
- const hexSchema = stringSchema.refine(isHex, 'Not a valid hex string').transform(withoutHexPrefix);
78
+ const hexSchema = stringSchema.refine(isHex, 'Not a valid hex string');
76
79
  return 'fromString' in klazz
77
80
  ? hexSchema.transform(klazz.fromString.bind(klazz))
78
- : hexSchema.transform(str => Buffer.from(str, 'hex')).transform(klazz.fromBuffer.bind(klazz));
81
+ : hexSchema.transform(str => Buffer.from(withoutHexPrefix(str), 'hex')).transform(klazz.fromBuffer.bind(klazz));
79
82
  }
80
83
 
81
84
  /**
@@ -102,3 +105,8 @@ export function mapSchema<TKey, TValue>(key: ZodFor<TKey>, value: ZodFor<TValue>
102
105
  export function setSchema<T>(value: ZodFor<T>): ZodFor<Set<T>> {
103
106
  return z.array(value).transform(entries => new Set(entries));
104
107
  }
108
+
109
+ /** Given an already parsed and validated object, extracts the keys defined in the given schema. Does not validate again. */
110
+ export function pickFromSchema<T extends object, S extends ZodObject<ZodRawShape>>(obj: T, schema: S) {
111
+ return pick(obj, ...Object.keys(schema.shape));
112
+ }
@@ -140,4 +140,12 @@ export class FieldReader {
140
140
  }): T {
141
141
  return deserializer.fromFields(this);
142
142
  }
143
+
144
+ /**
145
+ * Returns whether the reader has finished reading all fields.
146
+ * @returns A bool.
147
+ */
148
+ public isFinished(): boolean {
149
+ return this.index === this.length;
150
+ }
143
151
  }
@@ -25,3 +25,7 @@ export function pluralize(str: string, count: number | bigint, plural?: string):
25
25
  export function count(count: number | bigint, str: string, plural?: string): string {
26
26
  return `${count} ${pluralize(str, count, plural)}`;
27
27
  }
28
+
29
+ export function truncate(str: string, length: number = 64): string {
30
+ return str.length > length ? str.slice(0, length) + '...' : str;
31
+ }
@@ -0,0 +1,76 @@
1
+ import { existsSync, readFileSync, writeFileSync } from 'fs';
2
+ import { dirname, join, resolve } from 'path';
3
+
4
+ import { createConsoleLogger } from '../../log/console.js';
5
+ import { fileURLToPath } from '../../url/index.js';
6
+ import { isGenerateTestDataEnabled } from '../test_data.js';
7
+
8
+ let generateProtocolCircuitTestData = false;
9
+
10
+ /**
11
+ * This is separate so Prover.tomls don't get edited everytime any test is run,
12
+ * Only full.test updates prover tomls, then switches this off.
13
+ */
14
+ export function switchGenerateProtocolCircuitTestData() {
15
+ generateProtocolCircuitTestData = !generateProtocolCircuitTestData;
16
+ }
17
+
18
+ /** Writes the contents specified to the target file if test data generation is enabled. */
19
+ export function writeTestData(targetFileFromRepoRoot: string, contents: string | Buffer) {
20
+ if (!isGenerateTestDataEnabled()) {
21
+ return;
22
+ }
23
+ const targetFile = getPathToFile(targetFileFromRepoRoot);
24
+ const toWrite = typeof contents === 'string' ? contents : contents.toString('hex');
25
+ writeFileSync(targetFile, toWrite);
26
+ const logger = createConsoleLogger('aztec:testing:test_data');
27
+ logger(`Wrote test data to ${targetFile}`);
28
+ }
29
+
30
+ /**
31
+ * Looks for a variable assignment in the target file and updates the value, only if test data generation is enabled.
32
+ * Note that a magic inline comment would be a cleaner approach, like `/* TEST-DATA-START *\/` and `/* TEST-DATA-END *\/`,
33
+ * but running nargo fmt on it panics since the comment would be erased, so we roll with this for now.
34
+ * @remarks Requires AZTEC_GENERATE_TEST_DATA=1 to be set
35
+ */
36
+ export function updateInlineTestData(targetFileFromRepoRoot: string, itemName: string, value: string) {
37
+ if (!isGenerateTestDataEnabled()) {
38
+ return;
39
+ }
40
+ const logger = createConsoleLogger('aztec:testing:test_data');
41
+ const targetFile = getPathToFile(targetFileFromRepoRoot);
42
+ const contents = readFileSync(targetFile, 'utf8').toString();
43
+ const regex = new RegExp(`let ${itemName} =[\\s\\S]*?;`, 'g');
44
+ if (!regex.exec(contents)) {
45
+ throw new Error(`Test data marker for ${itemName} not found in ${targetFile}`);
46
+ }
47
+
48
+ const updatedContents = contents.replaceAll(regex, `let ${itemName} = ${value};`);
49
+ writeFileSync(targetFile, updatedContents);
50
+ logger(`Updated test data in ${targetFile} for ${itemName} to ${value}`);
51
+ }
52
+
53
+ /**
54
+ * Updates the sample Prover.toml files in noir-projects/noir-protocol-circuits/crates/.
55
+ * @remarks Requires AZTEC_GENERATE_TEST_DATA=1 & generateProtocolCircuitTestData=true to be set
56
+ * To re-gen, run 'AZTEC_GENERATE_TEST_DATA=1 FAKE_PROOFS=1 yarn workspace @aztec/end-to-end test full.test'
57
+ */
58
+ export function updateProtocolCircuitSampleInputs(circuitName: string, value: string) {
59
+ if (!isGenerateTestDataEnabled() || !generateProtocolCircuitTestData) {
60
+ return;
61
+ }
62
+ const logger = createConsoleLogger('aztec:testing:test_data');
63
+ const targetFileFromRepoRoot = `noir-projects/noir-protocol-circuits/crates/${circuitName}/Prover.toml`;
64
+ const targetFile = getPathToFile(targetFileFromRepoRoot);
65
+ writeFileSync(targetFile, value);
66
+ logger(`Updated test data in ${targetFile} for ${circuitName}`);
67
+ }
68
+
69
+ function getPathToFile(targetFileFromRepoRoot: string) {
70
+ const repoRoot = resolve(dirname(fileURLToPath(import.meta.url)), '../../../../../');
71
+ if (!existsSync(join(repoRoot, 'CODEOWNERS'))) {
72
+ throw new Error(`Path to repo root is incorrect (got ${repoRoot})`);
73
+ }
74
+
75
+ return join(repoRoot, targetFileFromRepoRoot);
76
+ }
@@ -1,3 +1,3 @@
1
- export * from './test_data.js';
2
1
  export * from './snapshot_serializer.js';
3
2
  export * from './port_allocator.js';
3
+ export * from './test_data.js';
@@ -1,9 +1,3 @@
1
- import { existsSync, readFileSync, writeFileSync } from 'fs';
2
- import { dirname, join, resolve } from 'path';
3
-
4
- import { createConsoleLogger } from '../log/console.js';
5
- import { fileURLToPath } from '../url/index.js';
6
-
7
1
  const testData: { [key: string]: unknown[] } = {};
8
2
 
9
3
  /** Returns whether test data generation is enabled */
@@ -40,47 +34,3 @@ export function getTestData(itemName: string): unknown[] {
40
34
  const fullItemName = `${testName} ${itemName}`;
41
35
  return testData[fullItemName];
42
36
  }
43
-
44
- /** Writes the contents specified to the target file if test data generation is enabled. */
45
- export function writeTestData(targetFileFromRepoRoot: string, contents: string | Buffer) {
46
- if (!isGenerateTestDataEnabled()) {
47
- return;
48
- }
49
- const targetFile = getPathToFile(targetFileFromRepoRoot);
50
- const toWrite = typeof contents === 'string' ? contents : contents.toString('hex');
51
- writeFileSync(targetFile, toWrite);
52
- const logger = createConsoleLogger('aztec:testing:test_data');
53
- logger(`Wrote test data to ${targetFile}`);
54
- }
55
-
56
- /**
57
- * Looks for a variable assignment in the target file and updates the value, only if test data generation is enabled.
58
- * Note that a magic inline comment would be a cleaner approach, like `/* TEST-DATA-START *\/` and `/* TEST-DATA-END *\/`,
59
- * but running nargo fmt on it panics since the comment would be erased, so we roll with this for now.
60
- * @remarks Requires AZTEC_GENERATE_TEST_DATA=1 to be set
61
- */
62
- export function updateInlineTestData(targetFileFromRepoRoot: string, itemName: string, value: string) {
63
- if (!isGenerateTestDataEnabled()) {
64
- return;
65
- }
66
- const logger = createConsoleLogger('aztec:testing:test_data');
67
- const targetFile = getPathToFile(targetFileFromRepoRoot);
68
- const contents = readFileSync(targetFile, 'utf8').toString();
69
- const regex = new RegExp(`let ${itemName} =[\\s\\S]*?;`, 'g');
70
- if (!regex.exec(contents)) {
71
- throw new Error(`Test data marker for ${itemName} not found in ${targetFile}`);
72
- }
73
-
74
- const updatedContents = contents.replaceAll(regex, `let ${itemName} = ${value};`);
75
- writeFileSync(targetFile, updatedContents);
76
- logger(`Updated test data in ${targetFile} for ${itemName} to ${value}`);
77
- }
78
-
79
- function getPathToFile(targetFileFromRepoRoot: string) {
80
- const repoRoot = resolve(dirname(fileURLToPath(import.meta.url)), '../../../../');
81
- if (!existsSync(join(repoRoot, 'CODEOWNERS'))) {
82
- throw new Error(`Path to repo root is incorrect (got ${repoRoot})`);
83
- }
84
-
85
- return join(repoRoot, targetFileFromRepoRoot);
86
- }
@@ -0,0 +1,24 @@
1
+ import { createLogger } from '../log/pino-logger.js';
2
+
3
+ /** Returns current datetime. */
4
+ export class DateProvider {
5
+ public now(): number {
6
+ return Date.now();
7
+ }
8
+ }
9
+
10
+ /** Returns current datetime and allows to override it. */
11
+ export class TestDateProvider implements DateProvider {
12
+ private offset = 0;
13
+
14
+ constructor(private readonly logger = createLogger('foundation:test-date-provider')) {}
15
+
16
+ public now(): number {
17
+ return Date.now() + this.offset;
18
+ }
19
+
20
+ public setTime(timeMs: number) {
21
+ this.offset = timeMs - Date.now();
22
+ this.logger.warn(`Time set to ${timeMs}`);
23
+ }
24
+ }
@@ -1,3 +1,4 @@
1
1
  export { TimeoutTask, executeTimeoutWithCustomError } from './timeout.js';
2
2
  export { Timer } from './timer.js';
3
3
  export { elapsed, elapsedSync } from './elapsed.js';
4
+ export * from './date.js';
@@ -1,6 +1,6 @@
1
1
  import { format } from 'util';
2
2
 
3
- import { createDebugLogger } from '../../log/index.js';
3
+ import { createLogger } from '../../log/index.js';
4
4
 
5
5
  /**
6
6
  * Represents a message object for dispatching function calls.
@@ -26,7 +26,7 @@ export interface DispatchMsg {
26
26
  * @param log - Optional logging function for debugging purposes.
27
27
  * @returns A dispatch function that accepts a DispatchMsg object and calls the target's method with provided arguments.
28
28
  */
29
- export function createDispatchFn(targetFn: () => any, log = createDebugLogger('aztec:foundation:dispatch')) {
29
+ export function createDispatchFn(targetFn: () => any, log = createLogger('foundation:dispatch')) {
30
30
  return async ({ fn, args }: DispatchMsg) => {
31
31
  const target = targetFn();
32
32
  log.debug(format(`dispatching to ${target}: ${fn}`, args));
@@ -1,12 +1,12 @@
1
1
  import EventEmitter from 'events';
2
2
  import { format } from 'util';
3
3
 
4
- import { createDebugLogger } from '../log/index.js';
4
+ import { createLogger } from '../log/index.js';
5
5
  import { type EventMessage, type ResponseMessage, isEventMessage } from './dispatch/messages.js';
6
6
  import { type Connector } from './interface/connector.js';
7
7
  import { type Socket } from './interface/socket.js';
8
8
 
9
- const log = createDebugLogger('aztec:transport_client');
9
+ const log = createLogger('foundation:transport_client');
10
10
 
11
11
  /**
12
12
  * Represents a pending request in the TransportClient.
@@ -59,7 +59,7 @@ export class WasmModule implements IWasmModule {
59
59
  constructor(
60
60
  private module: WebAssembly.Module | Buffer,
61
61
  private importFn: (module: WasmModule) => any,
62
- loggerName = 'aztec:wasm',
62
+ loggerName = 'wasm',
63
63
  ) {
64
64
  this.debug = createDebugOnlyLogger(loggerName);
65
65
  this.mutexQ.put(true);
@@ -1,7 +1,7 @@
1
- import { createDebugLogger } from '../log/index.js';
1
+ import { createLogger } from '../log/index.js';
2
2
  import { type WasmWorker } from './wasm_worker.js';
3
3
 
4
- const log = createDebugLogger('bb:worker_pool');
4
+ const log = createLogger('foundation:worker_pool');
5
5
 
6
6
  /**
7
7
  * Type of a worker factory.