@aztec/foundation 0.65.2 → 0.67.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (111) hide show
  1. package/dest/abi/abi.d.ts.map +1 -1
  2. package/dest/abi/abi.js +25 -10
  3. package/dest/abi/encoder.js +3 -3
  4. package/dest/abi/event_selector.js +2 -2
  5. package/dest/abi/function_selector.js +2 -2
  6. package/dest/async-pool/index.d.ts +3 -0
  7. package/dest/async-pool/index.d.ts.map +1 -0
  8. package/dest/async-pool/index.js +50 -0
  9. package/dest/collection/array.d.ts +16 -0
  10. package/dest/collection/array.d.ts.map +1 -1
  11. package/dest/collection/array.js +32 -1
  12. package/dest/collection/object.d.ts +6 -0
  13. package/dest/collection/object.d.ts.map +1 -1
  14. package/dest/collection/object.js +15 -1
  15. package/dest/config/env_var.d.ts +1 -1
  16. package/dest/config/env_var.d.ts.map +1 -1
  17. package/dest/crypto/random/randomness_singleton.js +3 -3
  18. package/dest/fields/fields.d.ts +20 -2
  19. package/dest/fields/fields.d.ts.map +1 -1
  20. package/dest/fields/fields.js +37 -3
  21. package/dest/fs/run_in_dir.js +2 -2
  22. package/dest/iterable/index.d.ts +1 -0
  23. package/dest/iterable/index.d.ts.map +1 -1
  24. package/dest/iterable/index.js +2 -1
  25. package/dest/iterable/toArray.d.ts +2 -0
  26. package/dest/iterable/toArray.d.ts.map +1 -0
  27. package/dest/iterable/toArray.js +8 -0
  28. package/dest/json-rpc/client/fetch.d.ts +2 -2
  29. package/dest/json-rpc/client/fetch.d.ts.map +1 -1
  30. package/dest/json-rpc/client/fetch.js +3 -3
  31. package/dest/json-rpc/client/safe_json_rpc_client.d.ts +1 -1
  32. package/dest/json-rpc/client/safe_json_rpc_client.d.ts.map +1 -1
  33. package/dest/json-rpc/client/safe_json_rpc_client.js +7 -12
  34. package/dest/json-rpc/server/safe_json_rpc_server.d.ts +3 -2
  35. package/dest/json-rpc/server/safe_json_rpc_server.d.ts.map +1 -1
  36. package/dest/json-rpc/server/safe_json_rpc_server.js +5 -5
  37. package/dest/log/index.d.ts +1 -1
  38. package/dest/log/index.d.ts.map +1 -1
  39. package/dest/log/index.js +2 -2
  40. package/dest/log/log-filters.d.ts +7 -0
  41. package/dest/log/log-filters.d.ts.map +1 -0
  42. package/dest/log/log-filters.js +49 -0
  43. package/dest/log/log-levels.d.ts +3 -0
  44. package/dest/log/log-levels.d.ts.map +1 -0
  45. package/dest/log/log-levels.js +2 -0
  46. package/dest/log/log_fn.d.ts +1 -1
  47. package/dest/log/pino-logger.d.ts +50 -0
  48. package/dest/log/pino-logger.d.ts.map +1 -0
  49. package/dest/log/pino-logger.js +160 -0
  50. package/dest/queue/base_memory_queue.d.ts +1 -1
  51. package/dest/queue/base_memory_queue.d.ts.map +1 -1
  52. package/dest/queue/base_memory_queue.js +3 -3
  53. package/dest/queue/bounded_serial_queue.d.ts +1 -1
  54. package/dest/queue/bounded_serial_queue.d.ts.map +1 -1
  55. package/dest/queue/bounded_serial_queue.js +3 -3
  56. package/dest/queue/fifo_memory_queue.d.ts +2 -2
  57. package/dest/queue/fifo_memory_queue.d.ts.map +1 -1
  58. package/dest/queue/fifo_memory_queue.js +1 -1
  59. package/dest/retry/index.d.ts +1 -1
  60. package/dest/retry/index.d.ts.map +1 -1
  61. package/dest/retry/index.js +3 -3
  62. package/dest/schemas/utils.d.ts +3 -1
  63. package/dest/schemas/utils.d.ts.map +1 -1
  64. package/dest/schemas/utils.js +8 -3
  65. package/dest/testing/test_data.d.ts +11 -0
  66. package/dest/testing/test_data.d.ts.map +1 -1
  67. package/dest/testing/test_data.js +24 -1
  68. package/dest/transport/dispatch/create_dispatch_fn.d.ts +1 -1
  69. package/dest/transport/dispatch/create_dispatch_fn.d.ts.map +1 -1
  70. package/dest/transport/dispatch/create_dispatch_fn.js +3 -3
  71. package/dest/transport/transport_client.js +3 -3
  72. package/dest/wasm/wasm_module.d.ts.map +1 -1
  73. package/dest/wasm/wasm_module.js +2 -2
  74. package/dest/worker/worker_pool.js +3 -3
  75. package/package.json +13 -3
  76. package/src/abi/abi.ts +30 -11
  77. package/src/abi/encoder.ts +2 -2
  78. package/src/abi/event_selector.ts +1 -1
  79. package/src/abi/function_selector.ts +1 -1
  80. package/src/async-pool/index.ts +50 -0
  81. package/src/collection/array.ts +34 -0
  82. package/src/collection/object.ts +22 -0
  83. package/src/config/env_var.ts +23 -2
  84. package/src/crypto/random/randomness_singleton.ts +2 -2
  85. package/src/fields/fields.ts +40 -2
  86. package/src/fs/run_in_dir.ts +1 -1
  87. package/src/iterable/index.ts +1 -0
  88. package/src/iterable/toArray.ts +7 -0
  89. package/src/jest/setup.mjs +9 -0
  90. package/src/json-rpc/client/fetch.ts +3 -3
  91. package/src/json-rpc/client/safe_json_rpc_client.ts +7 -15
  92. package/src/json-rpc/server/safe_json_rpc_server.ts +5 -5
  93. package/src/log/index.ts +1 -1
  94. package/src/log/log-filters.ts +55 -0
  95. package/src/log/log-levels.ts +3 -0
  96. package/src/log/log_fn.ts +1 -1
  97. package/src/log/pino-logger.ts +203 -0
  98. package/src/queue/base_memory_queue.ts +2 -2
  99. package/src/queue/bounded_serial_queue.ts +2 -2
  100. package/src/queue/fifo_memory_queue.ts +2 -2
  101. package/src/retry/index.ts +2 -2
  102. package/src/schemas/utils.ts +10 -2
  103. package/src/testing/test_data.ts +25 -0
  104. package/src/transport/dispatch/create_dispatch_fn.ts +2 -2
  105. package/src/transport/transport_client.ts +2 -2
  106. package/src/wasm/wasm_module.ts +1 -1
  107. package/src/worker/worker_pool.ts +2 -2
  108. package/dest/log/logger.d.ts +0 -57
  109. package/dest/log/logger.d.ts.map +0 -1
  110. package/dest/log/logger.js +0 -139
  111. package/src/log/logger.ts +0 -179
@@ -8,7 +8,7 @@ import { type AddressInfo } from 'net';
8
8
  import { format, inspect } from 'util';
9
9
  import { ZodError } from 'zod';
10
10
 
11
- import { type DebugLogger, createDebugLogger } from '../../log/index.js';
11
+ import { type Logger, createLogger } from '../../log/index.js';
12
12
  import { promiseWithResolvers } from '../../promise/utils.js';
13
13
  import { type ApiSchema, type ApiSchemaFor, parseWithOptionals, schemaHasMethod } from '../../schemas/index.js';
14
14
  import { jsonStringify } from '../convert.js';
@@ -27,7 +27,7 @@ export class SafeJsonRpcServer {
27
27
  /** Health check function */
28
28
  private readonly healthCheck: StatusCheckFn = () => true,
29
29
  /** Logger */
30
- private log = createDebugLogger('json-rpc:server'),
30
+ private log = createLogger('json-rpc:server'),
31
31
  ) {}
32
32
 
33
33
  public isHealthy(): boolean | Promise<boolean> {
@@ -170,7 +170,7 @@ interface Proxy {
170
170
  * before forwarding calls, and then converts outputs into JSON using default conversions.
171
171
  */
172
172
  export class SafeJsonProxy<T extends object = any> implements Proxy {
173
- private log = createDebugLogger('json-rpc:proxy');
173
+ private log = createLogger('json-rpc:proxy');
174
174
  private schema: ApiSchema;
175
175
 
176
176
  constructor(private handler: T, schema: ApiSchemaFor<T>) {
@@ -233,7 +233,7 @@ export function makeHandler<T extends object>(handler: T, schema: ApiSchemaFor<T
233
233
  return [handler, schema];
234
234
  }
235
235
 
236
- function makeAggregateHealthcheck(namedHandlers: NamespacedApiHandlers, log?: DebugLogger): StatusCheckFn {
236
+ function makeAggregateHealthcheck(namedHandlers: NamespacedApiHandlers, log?: Logger): StatusCheckFn {
237
237
  return async () => {
238
238
  try {
239
239
  const results = await Promise.all(
@@ -259,7 +259,7 @@ function makeAggregateHealthcheck(namedHandlers: NamespacedApiHandlers, log?: De
259
259
  */
260
260
  export function createNamespacedSafeJsonRpcServer(
261
261
  handlers: NamespacedApiHandlers,
262
- log = createDebugLogger('json-rpc:server'),
262
+ log = createLogger('json-rpc:server'),
263
263
  ): SafeJsonRpcServer {
264
264
  const proxy = new NamespacedSafeJsonProxy(handlers);
265
265
  const healthCheck = makeAggregateHealthcheck(handlers, log);
package/src/log/index.ts CHANGED
@@ -1,5 +1,5 @@
1
1
  export * from './console.js';
2
2
  export * from './debug.js';
3
- export * from './logger.js';
3
+ export * from './pino-logger.js';
4
4
  export * from './log_history.js';
5
5
  export * from './log_fn.js';
@@ -0,0 +1,55 @@
1
+ import { type LogLevel, LogLevels } from './log-levels.js';
2
+
3
+ export type LogFilters = [string, LogLevel][];
4
+
5
+ export function getLogLevelFromFilters(filters: LogFilters, module: string): LogLevel | undefined {
6
+ for (const [filterModule, level] of filters) {
7
+ if (module.startsWith(filterModule)) {
8
+ return level as LogLevel;
9
+ }
10
+ }
11
+ return undefined;
12
+ }
13
+
14
+ export function assertLogLevel(level: string): asserts level is LogLevel {
15
+ if (!LogLevels.includes(level as LogLevel)) {
16
+ throw new Error(`Invalid log level: ${level}`);
17
+ }
18
+ }
19
+
20
+ export function parseEnv(env: string | undefined, defaultLevel: LogLevel): [LogLevel, LogFilters] {
21
+ if (!env) {
22
+ return [defaultLevel, []];
23
+ }
24
+ const [level] = env.split(';', 1);
25
+ assertLogLevel(level);
26
+ return [level, parseFilters(env.slice(level.length + 1))];
27
+ }
28
+
29
+ export function parseFilters(definition: string | undefined): LogFilters {
30
+ if (!definition) {
31
+ return [];
32
+ }
33
+
34
+ const statements = definition.split(';');
35
+ const filters: LogFilters = [];
36
+ for (const statement of statements) {
37
+ const [level] = statement.split(':', 1);
38
+ const modules = statement.slice(level.length + 1);
39
+ if (!modules || !level) {
40
+ throw new Error(`Invalid log filter statement: ${statement}`);
41
+ }
42
+ const sanitizedLevel = level.trim().toLowerCase();
43
+ assertLogLevel(sanitizedLevel);
44
+ for (const module of modules.split(',')) {
45
+ filters.push([
46
+ module
47
+ .trim()
48
+ .toLowerCase()
49
+ .replace(/^aztec:/, ''),
50
+ sanitizedLevel as LogLevel | 'silent',
51
+ ]);
52
+ }
53
+ }
54
+ return filters.reverse();
55
+ }
@@ -0,0 +1,3 @@
1
+ export const LogLevels = ['silent', 'fatal', 'error', 'warn', 'info', 'verbose', 'debug', 'trace'] as const;
2
+
3
+ export type LogLevel = (typeof LogLevels)[number];
package/src/log/log_fn.ts CHANGED
@@ -2,4 +2,4 @@
2
2
  export type LogData = Record<string, string | number | bigint | boolean | { toString(): string } | undefined>;
3
3
 
4
4
  /** A callable logger instance. */
5
- export type LogFn = (msg: string, data?: LogData) => void;
5
+ export type LogFn = (msg: string, data?: unknown) => void;
@@ -0,0 +1,203 @@
1
+ import { createColors } from 'colorette';
2
+ import isNode from 'detect-node';
3
+ import { pino, symbols } from 'pino';
4
+ import { type Writable } from 'stream';
5
+ import { inspect } from 'util';
6
+
7
+ import { compactArray } from '../collection/array.js';
8
+ import { getLogLevelFromFilters, parseEnv } from './log-filters.js';
9
+ import { type LogLevel } from './log-levels.js';
10
+ import { type LogData, type LogFn } from './log_fn.js';
11
+
12
+ export function createLogger(module: string): Logger {
13
+ module = module.replace(/^aztec:/, '');
14
+ const pinoLogger = logger.child({ module }, { level: getLogLevelFromFilters(logFilters, module) });
15
+
16
+ // We check manually for isLevelEnabled to avoid calling processLogData unnecessarily.
17
+ // Note that isLevelEnabled is missing from the browser version of pino.
18
+ const logFn = (level: LogLevel, msg: string, data?: unknown) =>
19
+ isLevelEnabled(pinoLogger, level) && pinoLogger[level](processLogData((data as LogData) ?? {}), msg);
20
+
21
+ return {
22
+ silent: () => {},
23
+ // TODO(palla/log): Should we move err to data instead of the text message?
24
+ /** Log as fatal. Use when an error has brought down the system. */
25
+ fatal: (msg: string, err?: unknown, data?: unknown) => logFn('fatal', formatErr(msg, err), data),
26
+ /** Log as error. Use for errors in general. */
27
+ error: (msg: string, err?: unknown, data?: unknown) => logFn('error', formatErr(msg, err), data),
28
+ /** Log as warn. Use for when we stray from the happy path. */
29
+ warn: (msg: string, data?: unknown) => logFn('warn', msg, data),
30
+ /** Log as info. Use for providing an operator with info on what the system is doing. */
31
+ info: (msg: string, data?: unknown) => logFn('info', msg, data),
32
+ /** Log as verbose. Use for when we need additional insight on what a subsystem is doing. */
33
+ verbose: (msg: string, data?: unknown) => logFn('verbose', msg, data),
34
+ /** Log as debug. Use for when we need debugging info to troubleshoot an issue on a specific component. */
35
+ debug: (msg: string, data?: unknown) => logFn('debug', msg, data),
36
+ /** Log as trace. Use for when we want to denial-of-service any recipient of the logs. */
37
+ trace: (msg: string, data?: unknown) => logFn('trace', msg, data),
38
+ level: pinoLogger.level as LogLevel,
39
+ /** Whether the given level is enabled for this logger. */
40
+ isLevelEnabled: (level: LogLevel) => isLevelEnabled(pinoLogger, level),
41
+ /** Module name for the logger. */
42
+ module,
43
+ };
44
+ }
45
+
46
+ // Allow global hooks for processing log data.
47
+ // Used for injecting OTEL trace_id in telemetry client.
48
+ type LogDataHandler = (data: LogData) => LogData;
49
+ const logDataHandlers: LogDataHandler[] = [];
50
+
51
+ export function addLogDataHandler(handler: LogDataHandler): void {
52
+ logDataHandlers.push(handler);
53
+ }
54
+
55
+ function processLogData(data: LogData): LogData {
56
+ return logDataHandlers.reduce((accum, handler) => handler(accum), data);
57
+ }
58
+
59
+ // Patch isLevelEnabled missing from pino/browser.
60
+ function isLevelEnabled(logger: pino.Logger<'verbose', boolean>, level: LogLevel): boolean {
61
+ return typeof logger.isLevelEnabled === 'function'
62
+ ? logger.isLevelEnabled(level)
63
+ : logger.levels.values[level] >= logger.levels.values[logger.level];
64
+ }
65
+
66
+ // Load log levels from environment variables.
67
+ const defaultLogLevel = process.env.NODE_ENV === 'test' ? 'silent' : 'info';
68
+ const [logLevel, logFilters] = parseEnv(process.env.LOG_LEVEL, defaultLogLevel);
69
+
70
+ // Define custom logging levels for pino.
71
+ const customLevels = { verbose: 25 };
72
+ const pinoOpts = { customLevels, useOnlyCustomLevels: false, level: logLevel };
73
+
74
+ export const levels = {
75
+ labels: { ...pino.levels.labels, ...Object.fromEntries(Object.entries(customLevels).map(e => e.reverse())) },
76
+ values: { ...pino.levels.values, ...customLevels },
77
+ };
78
+
79
+ // Transport options for pretty logging to stderr via pino-pretty.
80
+ const useColor = true;
81
+ const { bold, reset } = createColors({ useColor });
82
+ export const pinoPrettyOpts = {
83
+ destination: 2,
84
+ sync: true,
85
+ colorize: useColor,
86
+ ignore: 'module,pid,hostname,trace_id,span_id,trace_flags',
87
+ messageFormat: `${bold('{module}')} ${reset('{msg}')}`,
88
+ customLevels: 'fatal:60,error:50,warn:40,info:30,verbose:25,debug:20,trace:10',
89
+ customColors: 'fatal:bgRed,error:red,warn:yellow,info:green,verbose:magenta,debug:blue,trace:gray',
90
+ minimumLevel: 'trace' as const,
91
+ singleLine: !['1', 'true'].includes(process.env.LOG_MULTILINE ?? ''),
92
+ };
93
+
94
+ const prettyTransport: pino.TransportTargetOptions = {
95
+ target: 'pino-pretty',
96
+ options: pinoPrettyOpts,
97
+ level: 'trace',
98
+ };
99
+
100
+ // Transport for vanilla stdio logging as JSON.
101
+ const stdioTransport: pino.TransportTargetOptions = {
102
+ target: 'pino/file',
103
+ options: { destination: 2 },
104
+ level: 'trace',
105
+ };
106
+
107
+ // Transport for OpenTelemetry logging. While defining this here is an abstraction leakage since this
108
+ // should live in the telemetry-client, it is necessary to ensure that the logger is initialized with
109
+ // the correct transport. Tweaking transports of a live pino instance is tricky, and creating a new instance
110
+ // would mean that all child loggers created before the telemetry-client is initialized would not have
111
+ // this transport configured. Note that the target is defined as the export in the telemetry-client,
112
+ // since pino will load this transport separately on a worker thread, to minimize disruption to the main loop.
113
+ const otlpEndpoint = process.env.OTEL_EXPORTER_OTLP_LOGS_ENDPOINT;
114
+ const otelOpts = { levels };
115
+ const otelTransport: pino.TransportTargetOptions = {
116
+ target: '@aztec/telemetry-client/otel-pino-stream',
117
+ options: otelOpts,
118
+ level: 'trace',
119
+ };
120
+
121
+ function makeLogger() {
122
+ if (!isNode) {
123
+ // We are on the browser.
124
+ return pino({ ...pinoOpts, browser: { asObject: false } });
125
+ } else if (process.env.JEST_WORKER_ID) {
126
+ // We are on jest, so we need sync logging and stream to stderr.
127
+ // We expect jest/setup.mjs to kick in later and replace set up a pretty logger,
128
+ // but if for some reason it doesn't, at least we're covered with a default logger.
129
+ return pino(pinoOpts, pino.destination(2));
130
+ } else {
131
+ // Regular nodejs with transports on worker thread, using pino-pretty for console logging if LOG_JSON
132
+ // is not set, and an optional OTLP transport if the OTLP endpoint is provided.
133
+ const targets: pino.TransportSingleOptions[] = compactArray([
134
+ ['1', 'true', 'TRUE'].includes(process.env.LOG_JSON ?? '') ? stdioTransport : prettyTransport,
135
+ otlpEndpoint ? otelTransport : undefined,
136
+ ]);
137
+ return pino(pinoOpts, pino.transport({ targets, levels: levels.values }));
138
+ }
139
+ }
140
+
141
+ const logger = makeLogger();
142
+
143
+ // Log the logger configuration.
144
+ logger.verbose(
145
+ {
146
+ module: 'logger',
147
+ ...logFilters.reduce((accum, [module, level]) => ({ ...accum, [`log.${module}`]: level }), {}),
148
+ },
149
+ isNode
150
+ ? `Logger initialized with level ${logLevel}` + (otlpEndpoint ? ` with OTLP exporter to ${otlpEndpoint}` : '')
151
+ : `Browser console logger initialized with level ${logLevel}`,
152
+ );
153
+
154
+ /**
155
+ * Overwrites the logging stream with a different destination.
156
+ * Used by jest/setup.mjs to set up a pretty logger.
157
+ */
158
+ export function overwriteLoggingStream(stream: Writable): void {
159
+ (logger as any)[symbols.streamSym] = stream;
160
+ }
161
+
162
+ /**
163
+ * Registers an additional destination to the pino logger.
164
+ * Use only when working with destinations, not worker transports.
165
+ */
166
+ export function registerLoggingStream(stream: Writable): void {
167
+ logger.verbose({ module: 'logger' }, `Registering additional logging stream`);
168
+ const original = (logger as any)[symbols.streamSym];
169
+ const destination = original
170
+ ? pino.multistream(
171
+ [
172
+ // Set streams to lowest logging level, and control actual logging from the parent logger
173
+ // otherwise streams default to info and refuse to log anything below that.
174
+ { level: 'trace', stream: original },
175
+ { level: 'trace', stream },
176
+ ],
177
+ { levels: levels.values },
178
+ )
179
+ : stream;
180
+ (logger as any)[symbols.streamSym] = destination;
181
+ }
182
+
183
+ /** Log function that accepts an exception object */
184
+ type ErrorLogFn = (msg: string, err?: Error | unknown, data?: LogData) => void;
185
+
186
+ /**
187
+ * Logger that supports multiple severity levels.
188
+ */
189
+ export type Logger = { [K in LogLevel]: LogFn } & { /** Error log function */ error: ErrorLogFn } & {
190
+ level: LogLevel;
191
+ isLevelEnabled: (level: LogLevel) => boolean;
192
+ module: string;
193
+ };
194
+
195
+ /**
196
+ * Concatenates a log message and an exception.
197
+ * @param msg - Log message
198
+ * @param err - Error to log
199
+ * @returns A string with both the log message and the error message.
200
+ */
201
+ function formatErr(msg: string, err?: Error | unknown): string {
202
+ return err ? `${msg}: ${inspect(err)}` : msg;
203
+ }
@@ -1,11 +1,11 @@
1
1
  import { TimeoutError } from '../error/index.js';
2
- import { createDebugLogger } from '../log/index.js';
2
+ import { createLogger } from '../log/index.js';
3
3
 
4
4
  export abstract class BaseMemoryQueue<T> {
5
5
  private waiting: ((item: T | null) => void)[] = [];
6
6
  private flushing = false;
7
7
 
8
- constructor(private log = createDebugLogger('aztec:foundation:memory_fifo')) {}
8
+ constructor(private log = createLogger('foundation:memory_fifo')) {}
9
9
 
10
10
  protected abstract get items(): {
11
11
  length: number;
@@ -1,4 +1,4 @@
1
- import { createDebugLogger } from '../log/index.js';
1
+ import { createLogger } from '../log/index.js';
2
2
  import { Semaphore } from './semaphore.js';
3
3
  import { SerialQueue } from './serial_queue.js';
4
4
 
@@ -10,7 +10,7 @@ export class BoundedSerialQueue {
10
10
  private readonly queue = new SerialQueue();
11
11
  private semaphore: Semaphore;
12
12
 
13
- constructor(maxQueueSize: number, private log = createDebugLogger('aztec:foundation:bounded_serial_queue')) {
13
+ constructor(maxQueueSize: number, private log = createLogger('foundation:bounded_serial_queue')) {
14
14
  this.semaphore = new Semaphore(maxQueueSize);
15
15
  }
16
16
 
@@ -1,4 +1,4 @@
1
- import { type DebugLogger } from '../log/logger.js';
1
+ import { type Logger } from '../log/index.js';
2
2
  import { BaseMemoryQueue } from './base_memory_queue.js';
3
3
 
4
4
  /**
@@ -9,7 +9,7 @@ import { BaseMemoryQueue } from './base_memory_queue.js';
9
9
  export class FifoMemoryQueue<T> extends BaseMemoryQueue<T> {
10
10
  private container = new FifoQueue<T>();
11
11
 
12
- constructor(log?: DebugLogger) {
12
+ constructor(log?: Logger) {
13
13
  super(log);
14
14
  }
15
15
 
@@ -1,4 +1,4 @@
1
- import { createDebugLogger } from '../log/index.js';
1
+ import { createLogger } from '../log/index.js';
2
2
  import { sleep } from '../sleep/index.js';
3
3
  import { Timer } from '../timer/index.js';
4
4
 
@@ -48,7 +48,7 @@ export async function retry<Result>(
48
48
  fn: () => Promise<Result>,
49
49
  name = 'Operation',
50
50
  backoff = backoffGenerator(),
51
- log = createDebugLogger('aztec:foundation:retry'),
51
+ log = createLogger('foundation:retry'),
52
52
  failSilently = false,
53
53
  ) {
54
54
  while (true) {
@@ -3,13 +3,16 @@ import {
3
3
  type ParseInput,
4
4
  type ParseReturnType,
5
5
  ZodFirstPartyTypeKind,
6
+ type ZodObject,
6
7
  ZodOptional,
7
8
  ZodParsedType,
9
+ type ZodRawShape,
8
10
  type ZodType,
9
11
  type ZodTypeAny,
10
12
  z,
11
13
  } from 'zod';
12
14
 
15
+ import { pick } from '../collection/object.js';
13
16
  import { isHex, withoutHexPrefix } from '../string/index.js';
14
17
  import { type ZodFor } from './types.js';
15
18
 
@@ -72,10 +75,10 @@ export function hexSchemaFor<TClass extends { fromString(str: string): any } | {
72
75
  string
73
76
  > {
74
77
  const stringSchema = refinement ? z.string().refine(refinement, `Not a valid instance`) : z.string();
75
- const hexSchema = stringSchema.refine(isHex, 'Not a valid hex string').transform(withoutHexPrefix);
78
+ const hexSchema = stringSchema.refine(isHex, 'Not a valid hex string');
76
79
  return 'fromString' in klazz
77
80
  ? hexSchema.transform(klazz.fromString.bind(klazz))
78
- : hexSchema.transform(str => Buffer.from(str, 'hex')).transform(klazz.fromBuffer.bind(klazz));
81
+ : hexSchema.transform(str => Buffer.from(withoutHexPrefix(str), 'hex')).transform(klazz.fromBuffer.bind(klazz));
79
82
  }
80
83
 
81
84
  /**
@@ -102,3 +105,8 @@ export function mapSchema<TKey, TValue>(key: ZodFor<TKey>, value: ZodFor<TValue>
102
105
  export function setSchema<T>(value: ZodFor<T>): ZodFor<Set<T>> {
103
106
  return z.array(value).transform(entries => new Set(entries));
104
107
  }
108
+
109
+ /** Given an already parsed and validated object, extracts the keys defined in the given schema. Does not validate again. */
110
+ export function pickFromSchema<T extends object, S extends ZodObject<ZodRawShape>>(obj: T, schema: S) {
111
+ return pick(obj, ...Object.keys(schema.shape));
112
+ }
@@ -5,12 +5,21 @@ import { createConsoleLogger } from '../log/console.js';
5
5
  import { fileURLToPath } from '../url/index.js';
6
6
 
7
7
  const testData: { [key: string]: unknown[] } = {};
8
+ let generateProtocolCircuitTestData = false;
8
9
 
9
10
  /** Returns whether test data generation is enabled */
10
11
  export function isGenerateTestDataEnabled() {
11
12
  return ['1', 'true'].includes(process.env.AZTEC_GENERATE_TEST_DATA ?? '') && typeof expect !== 'undefined';
12
13
  }
13
14
 
15
+ /**
16
+ * This is separate so Prover.tomls don't get edited everytime any test is run,
17
+ * Only full.test updates prover tomls, then switches this off.
18
+ */
19
+ export function switchGenerateProtocolCircuitTestData() {
20
+ generateProtocolCircuitTestData = !generateProtocolCircuitTestData;
21
+ }
22
+
14
23
  /** Pushes test data with the given name, only if test data generation is enabled. */
15
24
  export function pushTestData<T>(itemName: string, data: T) {
16
25
  if (!isGenerateTestDataEnabled()) {
@@ -76,6 +85,22 @@ export function updateInlineTestData(targetFileFromRepoRoot: string, itemName: s
76
85
  logger(`Updated test data in ${targetFile} for ${itemName} to ${value}`);
77
86
  }
78
87
 
88
+ /**
89
+ * Updates the sample Prover.toml files in noir-projects/noir-protocol-circuits/crates/.
90
+ * @remarks Requires AZTEC_GENERATE_TEST_DATA=1 & generateProtocolCircuitTestData=true to be set
91
+ * To re-gen, run 'AZTEC_GENERATE_TEST_DATA=1 FAKE_PROOFS=1 yarn workspace @aztec/end-to-end test full.test'
92
+ */
93
+ export function updateProtocolCircuitSampleInputs(circuitName: string, value: string) {
94
+ if (!isGenerateTestDataEnabled() || !generateProtocolCircuitTestData) {
95
+ return;
96
+ }
97
+ const logger = createConsoleLogger('aztec:testing:test_data');
98
+ const targetFileFromRepoRoot = `noir-projects/noir-protocol-circuits/crates/${circuitName}/Prover.toml`;
99
+ const targetFile = getPathToFile(targetFileFromRepoRoot);
100
+ writeFileSync(targetFile, value);
101
+ logger(`Updated test data in ${targetFile} for ${circuitName}`);
102
+ }
103
+
79
104
  function getPathToFile(targetFileFromRepoRoot: string) {
80
105
  const repoRoot = resolve(dirname(fileURLToPath(import.meta.url)), '../../../../');
81
106
  if (!existsSync(join(repoRoot, 'CODEOWNERS'))) {
@@ -1,6 +1,6 @@
1
1
  import { format } from 'util';
2
2
 
3
- import { createDebugLogger } from '../../log/index.js';
3
+ import { createLogger } from '../../log/index.js';
4
4
 
5
5
  /**
6
6
  * Represents a message object for dispatching function calls.
@@ -26,7 +26,7 @@ export interface DispatchMsg {
26
26
  * @param log - Optional logging function for debugging purposes.
27
27
  * @returns A dispatch function that accepts a DispatchMsg object and calls the target's method with provided arguments.
28
28
  */
29
- export function createDispatchFn(targetFn: () => any, log = createDebugLogger('aztec:foundation:dispatch')) {
29
+ export function createDispatchFn(targetFn: () => any, log = createLogger('foundation:dispatch')) {
30
30
  return async ({ fn, args }: DispatchMsg) => {
31
31
  const target = targetFn();
32
32
  log.debug(format(`dispatching to ${target}: ${fn}`, args));
@@ -1,12 +1,12 @@
1
1
  import EventEmitter from 'events';
2
2
  import { format } from 'util';
3
3
 
4
- import { createDebugLogger } from '../log/index.js';
4
+ import { createLogger } from '../log/index.js';
5
5
  import { type EventMessage, type ResponseMessage, isEventMessage } from './dispatch/messages.js';
6
6
  import { type Connector } from './interface/connector.js';
7
7
  import { type Socket } from './interface/socket.js';
8
8
 
9
- const log = createDebugLogger('aztec:transport_client');
9
+ const log = createLogger('foundation:transport_client');
10
10
 
11
11
  /**
12
12
  * Represents a pending request in the TransportClient.
@@ -59,7 +59,7 @@ export class WasmModule implements IWasmModule {
59
59
  constructor(
60
60
  private module: WebAssembly.Module | Buffer,
61
61
  private importFn: (module: WasmModule) => any,
62
- loggerName = 'aztec:wasm',
62
+ loggerName = 'wasm',
63
63
  ) {
64
64
  this.debug = createDebugOnlyLogger(loggerName);
65
65
  this.mutexQ.put(true);
@@ -1,7 +1,7 @@
1
- import { createDebugLogger } from '../log/index.js';
1
+ import { createLogger } from '../log/index.js';
2
2
  import { type WasmWorker } from './wasm_worker.js';
3
3
 
4
- const log = createDebugLogger('bb:worker_pool');
4
+ const log = createLogger('foundation:worker_pool');
5
5
 
6
6
  /**
7
7
  * Type of a worker factory.
@@ -1,57 +0,0 @@
1
- import { type LogData, type LogFn } from './log_fn.js';
2
- declare const LogLevels: readonly ["silent", "error", "warn", "info", "verbose", "debug"];
3
- /**
4
- * A valid log severity level.
5
- */
6
- export type LogLevel = (typeof LogLevels)[number];
7
- export declare let currentLevel: "silent" | "error" | "warn" | "info" | "verbose" | "debug";
8
- /** Log function that accepts an exception object */
9
- type ErrorLogFn = (msg: string, err?: Error | unknown, data?: LogData) => void;
10
- /**
11
- * Logger that supports multiple severity levels.
12
- */
13
- export type Logger = {
14
- [K in LogLevel]: LogFn;
15
- } & {
16
- error: ErrorLogFn;
17
- };
18
- /**
19
- * Logger that supports multiple severity levels and can be called directly to issue a debug statement.
20
- * Intended as a drop-in replacement for the debug module.
21
- */
22
- export type DebugLogger = Logger;
23
- /**
24
- * Creates a new DebugLogger for the current module, defaulting to the LOG_LEVEL env var.
25
- * If DEBUG="[module]" env is set, will enable debug logging if the module matches.
26
- * Uses npm debug for debug level and console.error for other levels.
27
- * @param name - Name of the module.
28
- * @param fixedLogData - Additional data to include in the log message.
29
- * @usage createDebugLogger('aztec:validator');
30
- * // will always add the validator address to the log labels
31
- * @returns A debug logger.
32
- */
33
- export declare function createDebugLogger(name: string): DebugLogger;
34
- /**
35
- * A function to create a logger that automatically includes fixed data in each log entry.
36
- * @param debugLogger - The base DebugLogger instance to which we attach fixed log data.
37
- * @param fixedLogData - The data to be included in every log entry.
38
- * @returns A DebugLogger with log level methods (error, warn, info, verbose, debug) that
39
- * automatically attach `fixedLogData` to every log message.
40
- */
41
- export declare function attachedFixedDataToLogger(debugLogger: DebugLogger, fixedLogData: LogData): DebugLogger;
42
- /** A callback to capture all logs. */
43
- export type LogHandler = (level: LogLevel, namespace: string, msg: string, data?: LogData) => void;
44
- /**
45
- * Registers a callback for all logs, whether they are emitted in the current log level or not.
46
- * @param handler - Callback to be called on every log.
47
- */
48
- export declare function onLog(handler: LogHandler): void;
49
- /** Overrides current log level. */
50
- export declare function setLevel(level: LogLevel): void;
51
- /**
52
- * Formats structured log data as a string for console output.
53
- * @param data - Optional log data.
54
- */
55
- export declare function fmtLogData(data?: LogData): string;
56
- export {};
57
- //# sourceMappingURL=logger.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"logger.d.ts","sourceRoot":"","sources":["../../src/log/logger.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,KAAK,OAAO,EAAE,KAAK,KAAK,EAAE,MAAM,aAAa,CAAC;AAEvD,QAAA,MAAM,SAAS,kEAAmE,CAAC;AAEnF;;GAEG;AACH,MAAM,MAAM,QAAQ,GAAG,CAAC,OAAO,SAAS,CAAC,CAAC,MAAM,CAAC,CAAC;AAelD,eAAO,IAAI,YAAY,4DAAgB,CAAC;AAwBxC,oDAAoD;AACpD,KAAK,UAAU,GAAG,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,CAAC,EAAE,KAAK,GAAG,OAAO,EAAE,IAAI,CAAC,EAAE,OAAO,KAAK,IAAI,CAAC;AAE/E;;GAEG;AACH,MAAM,MAAM,MAAM,GAAG;KAAG,CAAC,IAAI,QAAQ,GAAG,KAAK;CAAE,GAAG;IAA4B,KAAK,EAAE,UAAU,CAAA;CAAE,CAAC;AAElG;;;GAGG;AACH,MAAM,MAAM,WAAW,GAAG,MAAM,CAAC;AAEjC;;;;;;;;;GASG;AAEH,wBAAgB,iBAAiB,CAAC,IAAI,EAAE,MAAM,GAAG,WAAW,CAqB3D;AAED;;;;;;GAMG;AACH,wBAAgB,yBAAyB,CAAC,WAAW,EAAE,WAAW,EAAE,YAAY,EAAE,OAAO,GAAG,WAAW,CActG;AAED,sCAAsC;AACtC,MAAM,MAAM,UAAU,GAAG,CAAC,KAAK,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,OAAO,KAAK,IAAI,CAAC;AAInG;;;GAGG;AACH,wBAAgB,KAAK,CAAC,OAAO,EAAE,UAAU,QAExC;AAED,mCAAmC;AACnC,wBAAgB,QAAQ,CAAC,KAAK,EAAE,QAAQ,QAEvC;AAkCD;;;GAGG;AACH,wBAAgB,UAAU,CAAC,IAAI,CAAC,EAAE,OAAO,GAAG,MAAM,CAIjD"}