@aztec/foundation 0.0.1-commit.d431d1c → 0.0.1-commit.e310a4c8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. package/dest/collection/array.d.ts +12 -1
  2. package/dest/collection/array.d.ts.map +1 -1
  3. package/dest/collection/array.js +51 -0
  4. package/dest/config/env_var.d.ts +2 -2
  5. package/dest/config/env_var.d.ts.map +1 -1
  6. package/dest/crypto/poseidon/index.d.ts +1 -2
  7. package/dest/crypto/poseidon/index.d.ts.map +1 -1
  8. package/dest/crypto/poseidon/index.js +0 -9
  9. package/dest/crypto/random/randomness_singleton.d.ts +4 -3
  10. package/dest/crypto/random/randomness_singleton.d.ts.map +1 -1
  11. package/dest/crypto/random/randomness_singleton.js +4 -4
  12. package/dest/crypto/sync/poseidon/index.d.ts +1 -2
  13. package/dest/crypto/sync/poseidon/index.d.ts.map +1 -1
  14. package/dest/crypto/sync/poseidon/index.js +0 -8
  15. package/dest/jest/setup.js +4 -1
  16. package/dest/json-rpc/client/undici.d.ts +1 -1
  17. package/dest/json-rpc/client/undici.d.ts.map +1 -1
  18. package/dest/json-rpc/client/undici.js +21 -4
  19. package/dest/json-rpc/server/safe_json_rpc_server.js +1 -1
  20. package/dest/log/libp2p_logger.d.ts +5 -2
  21. package/dest/log/libp2p_logger.d.ts.map +1 -1
  22. package/dest/log/libp2p_logger.js +14 -4
  23. package/dest/log/pino-logger-server.d.ts +9 -0
  24. package/dest/log/pino-logger-server.d.ts.map +1 -0
  25. package/dest/log/pino-logger-server.js +18 -0
  26. package/dest/log/pino-logger.d.ts +37 -8
  27. package/dest/log/pino-logger.d.ts.map +1 -1
  28. package/dest/log/pino-logger.js +115 -26
  29. package/dest/queue/semaphore.d.ts +5 -1
  30. package/dest/queue/semaphore.d.ts.map +1 -1
  31. package/dest/trees/balanced_merkle_tree_root.d.ts +2 -3
  32. package/dest/trees/balanced_merkle_tree_root.d.ts.map +1 -1
  33. package/dest/trees/balanced_merkle_tree_root.js +2 -3
  34. package/dest/trees/hasher.d.ts +1 -2
  35. package/dest/trees/hasher.d.ts.map +1 -1
  36. package/dest/trees/hasher.js +0 -5
  37. package/dest/trees/membership_witness.d.ts +5 -1
  38. package/dest/trees/membership_witness.d.ts.map +1 -1
  39. package/dest/trees/membership_witness.js +9 -0
  40. package/dest/trees/merkle_tree_calculator.d.ts +1 -1
  41. package/dest/trees/merkle_tree_calculator.d.ts.map +1 -1
  42. package/dest/trees/merkle_tree_calculator.js +2 -2
  43. package/dest/trees/sibling_path.d.ts +1 -1
  44. package/dest/trees/sibling_path.d.ts.map +1 -1
  45. package/dest/trees/sibling_path.js +2 -2
  46. package/dest/types/index.d.ts +3 -1
  47. package/dest/types/index.d.ts.map +1 -1
  48. package/dest/types/index.js +3 -0
  49. package/package.json +3 -2
  50. package/src/collection/array.ts +52 -0
  51. package/src/config/env_var.ts +9 -1
  52. package/src/crypto/poseidon/index.ts +0 -10
  53. package/src/crypto/random/randomness_singleton.ts +9 -5
  54. package/src/crypto/sync/poseidon/index.ts +0 -9
  55. package/src/jest/setup.mjs +4 -1
  56. package/src/json-rpc/client/undici.ts +21 -3
  57. package/src/json-rpc/server/safe_json_rpc_server.ts +1 -1
  58. package/src/log/libp2p_logger.ts +12 -5
  59. package/src/log/pino-logger-server.ts +25 -0
  60. package/src/log/pino-logger.ts +136 -36
  61. package/src/queue/semaphore.ts +5 -0
  62. package/src/trees/balanced_merkle_tree_root.ts +2 -5
  63. package/src/trees/hasher.ts +0 -4
  64. package/src/trees/membership_witness.ts +8 -0
  65. package/src/trees/merkle_tree_calculator.ts +2 -2
  66. package/src/trees/sibling_path.ts +2 -2
  67. package/src/types/index.ts +5 -0
@@ -250,6 +250,58 @@ export function chunk<T>(items: T[], chunkSize: number): T[][] {
250
250
  return chunks;
251
251
  }
252
252
 
253
+ /**
254
+ * Splits the given array into chunks of the given size, wrapping around to the beginning
255
+ * if the last chunk would be smaller than the requested size.
256
+ * Returns empty array for empty input. Returns single chunk with all items if chunkSize <= 0.
257
+ */
258
+ export function chunkWrapAround<T>(items: T[], chunkSize: number): T[][] {
259
+ if (items.length === 0) {
260
+ return [];
261
+ }
262
+ if (chunkSize <= 0 || items.length <= chunkSize) {
263
+ return [items];
264
+ }
265
+ const remainder = items.length % chunkSize;
266
+ if (remainder === 0) {
267
+ return chunk(items, chunkSize);
268
+ }
269
+ const wrapAroundCount = chunkSize - remainder;
270
+ const wrappedItems = [...items, ...items.slice(0, wrapAroundCount)];
271
+ return chunk(wrappedItems, chunkSize);
272
+ }
273
+
274
+ const UNINITIALIZED = Symbol('uninitialized');
275
+
276
+ /**
277
+ * Splits the given iterable into chunks based on the key returned by the given function.
278
+ * Items must be contiguous to be included in the same chunk.
279
+ */
280
+ export function chunkBy<T, U>(items: T[], fn: (item: T) => U): T[][] {
281
+ const chunks: T[][] = [];
282
+ let currentChunk: T[] = [];
283
+ let currentKey: U | typeof UNINITIALIZED = UNINITIALIZED;
284
+
285
+ for (const item of items) {
286
+ const key = fn(item);
287
+ if (currentKey === UNINITIALIZED || key !== currentKey) {
288
+ if (currentChunk.length > 0) {
289
+ chunks.push(currentChunk);
290
+ }
291
+ currentChunk = [item];
292
+ currentKey = key;
293
+ } else {
294
+ currentChunk.push(item);
295
+ }
296
+ }
297
+
298
+ if (currentChunk.length > 0) {
299
+ chunks.push(currentChunk);
300
+ }
301
+
302
+ return chunks;
303
+ }
304
+
253
305
  /** Partitions the given iterable into two arrays based on the predicate. */
254
306
  export function partition<T>(items: T[], predicate: (item: T) => boolean): [T[], T[]] {
255
307
  const pass: T[] = [];
@@ -77,6 +77,7 @@ export type EnvVar =
77
77
  | 'L1_CONSENSUS_HOST_API_KEY_HEADERS'
78
78
  | 'LOG_JSON'
79
79
  | 'LOG_MULTILINE'
80
+ | 'LOG_NO_COLOR_PER_ACTOR'
80
81
  | 'LOG_LEVEL'
81
82
  | 'MNEMONIC'
82
83
  | 'NETWORK'
@@ -94,6 +95,10 @@ export type EnvVar =
94
95
  | 'PUBLIC_OTEL_INCLUDE_METRICS'
95
96
  | 'PUBLIC_OTEL_COLLECT_FROM'
96
97
  | 'PUBLIC_OTEL_OPT_OUT'
98
+ | 'P2P_BATCH_TX_REQUESTER_SMART_PARALLEL_WORKER_COUNT'
99
+ | 'P2P_BATCH_TX_REQUESTER_DUMB_PARALLEL_WORKER_COUNT'
100
+ | 'P2P_BATCH_TX_REQUESTER_TX_BATCH_SIZE'
101
+ | 'P2P_BATCH_TX_REQUESTER_BAD_PEER_THRESHOLD'
97
102
  | 'P2P_BLOCK_CHECK_INTERVAL_MS'
98
103
  | 'P2P_BLOCK_REQUEST_BATCH_SIZE'
99
104
  | 'P2P_BOOTSTRAP_NODE_ENR_VERSION_CHECK'
@@ -151,6 +156,7 @@ export type EnvVar =
151
156
  | 'PROVER_BROKER_BATCH_INTERVAL_MS'
152
157
  | 'PROVER_BROKER_BATCH_SIZE'
153
158
  | 'PROVER_BROKER_MAX_EPOCHS_TO_KEEP_RESULTS_FOR'
159
+ | 'PROVER_CANCEL_JOBS_ON_STOP'
154
160
  | 'PROVER_COORDINATION_NODE_URLS'
155
161
  | 'PROVER_FAILED_PROOF_STORE'
156
162
  | 'PROVER_NODE_FAILED_EPOCH_STORE'
@@ -175,6 +181,7 @@ export type EnvVar =
175
181
  | 'PROVER_TEST_VERIFICATION_DELAY_MS'
176
182
  | 'PXE_L2_BLOCK_BATCH_SIZE'
177
183
  | 'PXE_PROVER_ENABLED'
184
+ | 'PXE_SYNC_CHAIN_TIP'
178
185
  | 'RPC_MAX_BATCH_SIZE'
179
186
  | 'RPC_MAX_BODY_SIZE'
180
187
  | 'RPC_SIMULATE_PUBLIC_MAX_GAS_LIMIT'
@@ -235,6 +242,7 @@ export type EnvVar =
235
242
  | 'TX_COLLECTION_FAST_MAX_PARALLEL_REQUESTS_PER_NODE'
236
243
  | 'TX_COLLECTION_NODE_RPC_MAX_BATCH_SIZE'
237
244
  | 'TX_COLLECTION_NODE_RPC_URLS'
245
+ | 'TX_COLLECTION_PROPOSAL_TX_COLLECTOR_TYPE'
238
246
  | 'TX_PUBLIC_SETUP_ALLOWLIST'
239
247
  | 'TXE_PORT'
240
248
  | 'TRANSACTIONS_DISABLED'
@@ -246,7 +254,6 @@ export type EnvVar =
246
254
  | 'VALIDATOR_ADDRESSES'
247
255
  | 'ROLLUP_VERSION'
248
256
  | 'WS_BLOCK_CHECK_INTERVAL_MS'
249
- | 'WS_PROVEN_BLOCKS_ONLY'
250
257
  | 'WS_BLOCK_REQUEST_BATCH_SIZE'
251
258
  | 'L1_READER_VIEM_POLLING_INTERVAL_MS'
252
259
  | 'WS_DATA_DIRECTORY'
@@ -264,6 +271,7 @@ export type EnvVar =
264
271
  | 'AZTEC_LOCAL_EJECTION_THRESHOLD'
265
272
  | 'AZTEC_MANA_TARGET'
266
273
  | 'AZTEC_PROVING_COST_PER_MANA'
274
+ | 'AZTEC_INITIAL_ETH_PER_FEE_ASSET'
267
275
  | 'AZTEC_SLASHING_QUORUM'
268
276
  | 'AZTEC_SLASHING_ROUND_SIZE_IN_EPOCHS'
269
277
  | 'AZTEC_SLASHING_LIFETIME_IN_ROUNDS'
@@ -35,16 +35,6 @@ export async function poseidon2HashWithSeparator(input: Fieldable[], separator:
35
35
  return Fr.fromBuffer(Buffer.from(response.hash));
36
36
  }
37
37
 
38
- export async function poseidon2HashAccumulate(input: Fieldable[]): Promise<Fr> {
39
- const inputFields = serializeToFields(input);
40
- await BarretenbergSync.initSingleton();
41
- const api = BarretenbergSync.getSingleton();
42
- const response = api.poseidon2HashAccumulate({
43
- inputs: inputFields.map(i => i.toBuffer()),
44
- });
45
- return Fr.fromBuffer(Buffer.from(response.hash));
46
- }
47
-
48
38
  /**
49
39
  * Runs a Poseidon2 permutation.
50
40
  * @param input the input state. Expected to be of size 4.
@@ -1,4 +1,4 @@
1
- import { createLogger } from '../../log/pino-logger.js';
1
+ import { type Logger, type LoggerBindings, createLogger } from '../../log/pino-logger.js';
2
2
 
3
3
  /**
4
4
  * A number generator which is used as a source of randomness in the system. If the SEED env variable is set, the
@@ -12,9 +12,13 @@ export class RandomnessSingleton {
12
12
  private static instance: RandomnessSingleton;
13
13
 
14
14
  private counter = 0;
15
- private readonly log = createLogger('foundation:randomness_singleton');
15
+ private log: Logger;
16
16
 
17
- private constructor(private readonly seed?: number) {
17
+ private constructor(
18
+ private readonly seed?: number,
19
+ bindings?: LoggerBindings,
20
+ ) {
21
+ this.log = createLogger('foundation:randomness_singleton', bindings);
18
22
  if (seed !== undefined) {
19
23
  this.log.debug(`Using pseudo-randomness with seed: ${seed}`);
20
24
  this.counter = seed;
@@ -23,10 +27,10 @@ export class RandomnessSingleton {
23
27
  }
24
28
  }
25
29
 
26
- public static getInstance(): RandomnessSingleton {
30
+ public static getInstance(bindings?: LoggerBindings): RandomnessSingleton {
27
31
  if (!RandomnessSingleton.instance) {
28
32
  const seed = process.env.SEED ? Number(process.env.SEED) : undefined;
29
- RandomnessSingleton.instance = new RandomnessSingleton(seed);
33
+ RandomnessSingleton.instance = new RandomnessSingleton(seed, bindings);
30
34
  }
31
35
 
32
36
  return RandomnessSingleton.instance;
@@ -34,15 +34,6 @@ export function poseidon2HashWithSeparator(input: Fieldable[], separator: number
34
34
  return Fr.fromBuffer(Buffer.from(response.hash));
35
35
  }
36
36
 
37
- export function poseidon2HashAccumulate(input: Fieldable[]): Fr {
38
- const inputFields = serializeToFields(input);
39
- const api = BarretenbergSync.getSingleton();
40
- const response = api.poseidon2HashAccumulate({
41
- inputs: inputFields.map(i => i.toBuffer()),
42
- });
43
- return Fr.fromBuffer(Buffer.from(response.hash));
44
- }
45
-
46
37
  /**
47
38
  * Runs a Poseidon2 permutation.
48
39
  * @param input the input state. Expected to be of size 4.
@@ -1,3 +1,4 @@
1
+ import { parseBooleanEnv } from '@aztec/foundation/config';
1
2
  import { overwriteLoggingStream, pinoPrettyOpts } from '@aztec/foundation/log';
2
3
 
3
4
  import pretty from 'pino-pretty';
@@ -6,4 +7,6 @@ import pretty from 'pino-pretty';
6
7
  // file so we don't mess up with dependencies in non-testing environments,
7
8
  // since pino-pretty messes up with browser bundles.
8
9
  // See also https://www.npmjs.com/package/pino-pretty?activeTab=readme#user-content-usage-with-jest
9
- overwriteLoggingStream(pretty(pinoPrettyOpts));
10
+ if (!parseBooleanEnv(process.env.LOG_JSON)) {
11
+ overwriteLoggingStream(pretty(pinoPrettyOpts));
12
+ }
@@ -1,3 +1,5 @@
1
+ import { promisify } from 'node:util';
2
+ import { gunzip as gunzipCb, gzip as gzipCb } from 'node:zlib';
1
3
  import { Agent, type Dispatcher } from 'undici';
2
4
 
3
5
  import { createLogger } from '../../log/pino-logger.js';
@@ -5,8 +7,14 @@ import { NoRetryError } from '../../retry/index.js';
5
7
  import { jsonStringify } from '../convert.js';
6
8
  import type { JsonRpcFetch } from './fetch.js';
7
9
 
10
+ const gzip = promisify(gzipCb);
11
+ const gunzip = promisify(gunzipCb);
12
+
8
13
  const log = createLogger('json-rpc:json_rpc_client:undici');
9
14
 
15
+ /** Minimum request size in bytes to trigger compression. */
16
+ const COMPRESSION_THRESHOLD = 1024;
17
+
10
18
  export { Agent };
11
19
 
12
20
  export function makeUndiciFetch(client = new Agent()): JsonRpcFetch {
@@ -14,14 +22,18 @@ export function makeUndiciFetch(client = new Agent()): JsonRpcFetch {
14
22
  log.trace(`JsonRpcClient.fetch: ${host}`, { host, body });
15
23
  let resp: Dispatcher.ResponseData;
16
24
  try {
25
+ const jsonBody = Buffer.from(jsonStringify(body));
26
+ const shouldCompress = jsonBody.length >= COMPRESSION_THRESHOLD;
17
27
  resp = await client.request({
18
28
  method: 'POST',
19
29
  origin: new URL(host),
20
30
  path: '/',
21
- body: jsonStringify(body),
31
+ body: shouldCompress ? await gzip(jsonBody) : jsonBody,
22
32
  headers: {
23
33
  ...extraHeaders,
24
34
  'content-type': 'application/json',
35
+ ...(shouldCompress && { 'content-encoding': 'gzip' }),
36
+ 'accept-encoding': 'gzip',
25
37
  },
26
38
  });
27
39
  } catch (err) {
@@ -31,13 +43,19 @@ export function makeUndiciFetch(client = new Agent()): JsonRpcFetch {
31
43
 
32
44
  let responseJson: any;
33
45
  const responseOk = resp.statusCode >= 200 && resp.statusCode <= 299;
46
+ const contentEncoding = resp.headers['content-encoding'];
34
47
  try {
35
- responseJson = await resp.body.json();
48
+ if (contentEncoding === 'gzip') {
49
+ const jsonBuffer = await gunzip(await resp.body.arrayBuffer());
50
+ responseJson = JSON.parse(jsonBuffer.toString('utf-8'));
51
+ } else {
52
+ responseJson = await resp.body.json();
53
+ }
36
54
  } catch {
37
55
  if (!responseOk) {
38
56
  throw new Error('HTTP ' + resp.statusCode);
39
57
  }
40
- throw new Error(`Failed to parse body as JSON: ${await resp.body.text()}`);
58
+ throw new Error(`Failed to parse body as JSON. encoding: ${contentEncoding}, body: ${await resp.body.text()}`);
41
59
  }
42
60
 
43
61
  if (!responseOk) {
@@ -35,7 +35,7 @@ export type SafeJsonRpcServerConfig = {
35
35
  const defaultServerConfig: SafeJsonRpcServerConfig = {
36
36
  http200OnError: false,
37
37
  maxBatchSize: 100,
38
- maxBodySizeBytes: '50mb',
38
+ maxBodySizeBytes: '1mb',
39
39
  };
40
40
 
41
41
  export class SafeJsonRpcServer {
@@ -2,15 +2,17 @@ import type { ComponentLogger, Logger } from '@libp2p/interface';
2
2
 
3
3
  import { getLogLevelFromFilters } from './log-filters.js';
4
4
  import type { LogLevel } from './log-levels.js';
5
- import { logFilters, logger } from './pino-logger.js';
5
+ import { type LoggerBindings, logFilters, logger } from './pino-logger.js';
6
6
 
7
7
  /**
8
8
  * Creates a libp2p compatible logger that wraps our pino logger.
9
9
  * This adapter implements the ComponentLogger interface required by libp2p.
10
+ * @param namespace - Base namespace for the logger
11
+ * @param bindings - Optional bindings to pass to the logger (actor, instanceId)
10
12
  */
11
- export function createLibp2pComponentLogger(namespace: string): ComponentLogger {
13
+ export function createLibp2pComponentLogger(namespace: string, bindings?: LoggerBindings): ComponentLogger {
12
14
  return {
13
- forComponent: (component: string) => createLibp2pLogger(`${namespace}:${component}`),
15
+ forComponent: (component: string) => createLibp2pLogger(`${namespace}:${component}`, bindings),
14
16
  };
15
17
  }
16
18
 
@@ -24,9 +26,14 @@ function replaceFormatting(message: string) {
24
26
  return message.replace(/(%p|%a)/g, '%s');
25
27
  }
26
28
 
27
- function createLibp2pLogger(component: string): Logger {
29
+ function createLibp2pLogger(component: string, bindings?: LoggerBindings): Logger {
28
30
  // Create a direct pino logger instance for libp2p that supports string interpolation
29
- const log = logger.child({ module: component }, { level: getLogLevelFromFilters(logFilters, component) });
31
+ const actor = bindings?.actor;
32
+ const instanceId = bindings?.instanceId;
33
+ const log = logger.child(
34
+ { module: component, ...(actor && { actor }), ...(instanceId && { instanceId }) },
35
+ { level: getLogLevelFromFilters(logFilters, component) },
36
+ );
30
37
 
31
38
  const logIfEnabled = (level: LogLevel, message: string, ...args: unknown[]) => {
32
39
  if (!log.isLevelEnabled(level)) {
@@ -0,0 +1,25 @@
1
+ import { AsyncLocalStorage } from 'node:async_hooks';
2
+
3
+ import { type LoggerBindings, addLogBindingsHandler, removeLogBindingsHandler } from './pino-logger.js';
4
+
5
+ /** AsyncLocalStorage for logger bindings context propagation (Node.js only). */
6
+ const bindingsStorage = new AsyncLocalStorage<LoggerBindings>();
7
+
8
+ /** Returns the current bindings from AsyncLocalStorage, if any. */
9
+ export function getBindings(): LoggerBindings | undefined {
10
+ return bindingsStorage.getStore();
11
+ }
12
+
13
+ /**
14
+ * Runs a callback within a bindings context. All loggers created within the callback
15
+ * will automatically inherit the bindings (actor, instanceId) via the log bindings handler.
16
+ */
17
+ export async function withLoggerBindings<T>(bindings: LoggerBindings, callback: () => Promise<T>): Promise<T> {
18
+ const handler = () => bindingsStorage.getStore();
19
+ addLogBindingsHandler(handler);
20
+ try {
21
+ return await bindingsStorage.run(bindings, callback);
22
+ } finally {
23
+ removeLogBindingsHandler(handler);
24
+ }
25
+ }
@@ -1,4 +1,4 @@
1
- import { createColors, isColorSupported } from 'colorette';
1
+ import { type Color, createColors, isColorSupported } from 'colorette';
2
2
  import isNode from 'detect-node';
3
3
  import { pino, symbols } from 'pino';
4
4
  import type { Writable } from 'stream';
@@ -12,9 +12,51 @@ import { getLogLevelFromFilters, parseEnv } from './log-filters.js';
12
12
  import type { LogLevel } from './log-levels.js';
13
13
  import type { LogData, LogFn } from './log_fn.js';
14
14
 
15
- export function createLogger(module: string): Logger {
16
- module = logNameHandlers.reduce((moduleName, handler) => handler(moduleName), module.replace(/^aztec:/, ''));
17
- const pinoLogger = logger.child({ module }, { level: getLogLevelFromFilters(logFilters, module) });
15
+ /** Optional bindings to pass to createLogger for additional context. */
16
+ export type LoggerBindings = {
17
+ /** Actor label shown in logs (e.g., 'MAIN', 'prover-node'). */
18
+ actor?: string;
19
+ /** Instance identifier for distinguishing multiple instances of the same component. */
20
+ instanceId?: string;
21
+ };
22
+
23
+ // Allow global hooks for providing default bindings.
24
+ // Used by withLoggerBindings in pino-logger-server to propagate bindings via AsyncLocalStorage.
25
+ type LogBindingsHandler = () => LoggerBindings | undefined;
26
+ const logBindingsHandlers: LogBindingsHandler[] = [];
27
+
28
+ export function addLogBindingsHandler(handler: LogBindingsHandler): void {
29
+ logBindingsHandlers.push(handler);
30
+ }
31
+
32
+ export function removeLogBindingsHandler(handler: LogBindingsHandler) {
33
+ const index = logBindingsHandlers.indexOf(handler);
34
+ if (index !== -1) {
35
+ logBindingsHandlers.splice(index, 1);
36
+ }
37
+ }
38
+
39
+ function getBindingsFromHandlers(): LoggerBindings | undefined {
40
+ for (const handler of logBindingsHandlers) {
41
+ const bindings = handler();
42
+ if (bindings) {
43
+ return bindings;
44
+ }
45
+ }
46
+ return undefined;
47
+ }
48
+
49
+ export function createLogger(module: string, bindings?: LoggerBindings): Logger {
50
+ module = module.replace(/^aztec:/, '');
51
+
52
+ const resolvedBindings = { ...getBindingsFromHandlers(), ...bindings };
53
+ const actor = resolvedBindings?.actor;
54
+ const instanceId = resolvedBindings?.instanceId;
55
+
56
+ const pinoLogger = logger.child(
57
+ { module, ...(actor && { actor }), ...(instanceId && { instanceId }) },
58
+ { level: getLogLevelFromFilters(logFilters, module) },
59
+ );
18
60
 
19
61
  // We check manually for isLevelEnabled to avoid calling processLogData unnecessarily.
20
62
  // Note that isLevelEnabled is missing from the browser version of pino.
@@ -44,11 +86,24 @@ export function createLogger(module: string): Logger {
44
86
  isLevelEnabled: (level: LogLevel) => isLevelEnabled(pinoLogger, level),
45
87
  /** Module name for the logger. */
46
88
  module,
47
- /** Creates another logger by extending this logger module name. */
48
- createChild: (childModule: string) => createLogger(`${module}:${childModule}`),
89
+ /** Creates another logger by extending this logger module name and preserving bindings. */
90
+ createChild: (childModule: string) => createLogger(`${module}:${childModule}`, { actor, instanceId }),
91
+ /** Returns the bindings (actor, instanceId) for this logger. */
92
+ getBindings: () => ({ actor, instanceId }),
49
93
  };
50
94
  }
51
95
 
96
+ /**
97
+ * Returns a logger for the given module. If loggerOrBindings is already a Logger, returns it directly.
98
+ * Otherwise, creates a new logger with the given module name and bindings.
99
+ */
100
+ export function resolveLogger(module: string, loggerOrBindings?: Logger | LoggerBindings): Logger {
101
+ if (loggerOrBindings && 'info' in loggerOrBindings) {
102
+ return loggerOrBindings as Logger;
103
+ }
104
+ return createLogger(module, loggerOrBindings);
105
+ }
106
+
52
107
  // Allow global hooks for processing log data.
53
108
  // Used for injecting OTEL trace_id in telemetry client.
54
109
  type LogDataHandler = (data: LogData) => LogData;
@@ -62,31 +117,6 @@ function processLogData(data: LogData): LogData {
62
117
  return logDataHandlers.reduce((accum, handler) => handler(accum), data);
63
118
  }
64
119
 
65
- // Allow global hooks for tweaking module names.
66
- // Used in tests to add a uid to modules, so we can differentiate multiple nodes in the same process.
67
- type LogNameHandler = (module: string) => string;
68
- const logNameHandlers: LogNameHandler[] = [];
69
-
70
- export function addLogNameHandler(handler: LogNameHandler): void {
71
- logNameHandlers.push(handler);
72
- }
73
-
74
- export function removeLogNameHandler(handler: LogNameHandler) {
75
- const index = logNameHandlers.indexOf(handler);
76
- if (index !== -1) {
77
- logNameHandlers.splice(index, 1);
78
- }
79
- }
80
-
81
- /** Creates all loggers within the given callback with the suffix appended to the module name. */
82
- export async function withLogNameSuffix<T>(suffix: string, callback: () => Promise<T>): Promise<T> {
83
- const logNameHandler = (module: string) => `${module}:${suffix}`;
84
- addLogNameHandler(logNameHandler);
85
- const result = await callback();
86
- removeLogNameHandler(logNameHandler);
87
- return result;
88
- }
89
-
90
120
  // Patch isLevelEnabled missing from pino/browser.
91
121
  function isLevelEnabled(logger: pino.Logger<'verbose', boolean>, level: LogLevel): boolean {
92
122
  return typeof logger.isLevelEnabled === 'function'
@@ -146,22 +176,90 @@ export const levels = {
146
176
  // Transport options for pretty logging to stderr via pino-pretty.
147
177
  const colorEnv = process.env['FORCE_COLOR' satisfies EnvVar];
148
178
  const useColor = colorEnv === undefined ? isColorSupported : parseBooleanEnv(colorEnv);
149
- const { bold, reset } = createColors({ useColor });
150
- export const pinoPrettyOpts = {
179
+ const { bold, reset, cyan, magenta, yellow, blue, green, magentaBright, yellowBright, blueBright, greenBright } =
180
+ createColors({ useColor });
181
+
182
+ // Per-actor coloring: each unique actor gets a different color for easier visual distinction.
183
+ // Disabled when LOG_NO_COLOR_PER_ACTOR is set to a truthy value.
184
+ const useColorPerActor = useColor && !parseBooleanEnv(process.env['LOG_NO_COLOR_PER_ACTOR' satisfies EnvVar]);
185
+ const actorColors: Color[] = [yellow, magenta, blue, green, magentaBright, yellowBright, blueBright, greenBright];
186
+ const actorColorMap = new Map<string, Color>();
187
+ let nextColorIndex = 0;
188
+
189
+ /** Returns the color function assigned to a given actor, assigning a new one if needed. */
190
+ export function getActorColor(actor: string): Color {
191
+ let color = actorColorMap.get(actor);
192
+ if (!color) {
193
+ color = actorColors[nextColorIndex % actorColors.length];
194
+ actorColorMap.set(actor, color);
195
+ nextColorIndex++;
196
+ }
197
+ return color;
198
+ }
199
+
200
+ /** Resets the actor-to-color mapping. Useful for testing. */
201
+ export function resetActorColors(): void {
202
+ actorColorMap.clear();
203
+ nextColorIndex = 0;
204
+ }
205
+
206
+ // String template for messageFormat (used in worker threads and when per-actor coloring is disabled).
207
+ const messageFormatString = `${bold('{module}')}{if actor} ${cyan('{actor}')}{end}{if instanceId} ${reset(cyan('{instanceId}'))}{end} ${reset('{msg}')}`;
208
+
209
+ // Function for messageFormat when per-actor coloring is enabled (can only be used in-process, not worker threads).
210
+ type LogObject = { actor?: string; module?: string; instanceId?: string; msg?: string };
211
+
212
+ /** Formats a log message with per-actor coloring. Actor, module, and instanceId share the same color. */
213
+ export function formatLogMessage(log: LogObject, messageKey: string): string {
214
+ const actor = log.actor;
215
+ const module = log.module ?? '';
216
+ const instanceId = log.instanceId;
217
+ const msg = log[messageKey as keyof LogObject] ?? '';
218
+
219
+ // Use actor color for actor, module, and instanceId when actor is present
220
+ const color = actor ? getActorColor(actor) : cyan;
221
+
222
+ let result = bold(color(module));
223
+ if (actor) {
224
+ result += ' ' + color(actor);
225
+ }
226
+ if (instanceId) {
227
+ result += ' ' + reset(color(instanceId));
228
+ }
229
+ result += ' ' + reset(String(msg));
230
+ return result;
231
+ }
232
+
233
+ // Base options for pino-pretty (shared between transport and direct use).
234
+ const pinoPrettyBaseOpts = {
151
235
  destination: 2,
152
236
  sync: true,
153
237
  colorize: useColor,
154
- ignore: 'module,pid,hostname,trace_id,span_id,trace_flags,severity',
155
- messageFormat: `${bold('{module}')} ${reset('{msg}')}`,
238
+ ignore: 'module,actor,instanceId,pid,hostname,trace_id,span_id,trace_flags,severity',
156
239
  customLevels: 'fatal:60,error:50,warn:40,info:30,verbose:25,debug:20,trace:10',
157
240
  customColors: 'fatal:bgRed,error:red,warn:yellow,info:green,verbose:magenta,debug:blue,trace:gray',
158
241
  minimumLevel: 'trace' as const,
159
242
  singleLine: !parseBooleanEnv(process.env['LOG_MULTILINE' satisfies EnvVar]),
160
243
  };
161
244
 
245
+ /**
246
+ * Pino-pretty options for direct use (e.g., jest/setup.mjs).
247
+ * Includes function-based messageFormat for per-actor coloring when enabled.
248
+ */
249
+ export const pinoPrettyOpts = {
250
+ ...pinoPrettyBaseOpts,
251
+ messageFormat: useColorPerActor ? formatLogMessage : messageFormatString,
252
+ };
253
+
254
+ // Transport options use string template only (functions can't be serialized to worker threads).
255
+ const prettyTransportOpts = {
256
+ ...pinoPrettyBaseOpts,
257
+ messageFormat: messageFormatString,
258
+ };
259
+
162
260
  const prettyTransport: pino.TransportTargetOptions = {
163
261
  target: 'pino-pretty',
164
- options: pinoPrettyOpts,
262
+ options: prettyTransportOpts,
165
263
  level: 'trace',
166
264
  };
167
265
 
@@ -262,6 +360,8 @@ export type Logger = { [K in LogLevel]: LogFn } & { /** Error log function */ er
262
360
  isLevelEnabled: (level: LogLevel) => boolean;
263
361
  module: string;
264
362
  createChild: (childModule: string) => Logger;
363
+ /** Returns the bindings (actor, instanceId) for this logger. */
364
+ getBindings: () => LoggerBindings;
265
365
  };
266
366
 
267
367
  /**
@@ -1,5 +1,10 @@
1
1
  import { FifoMemoryQueue } from './fifo_memory_queue.js';
2
2
 
3
+ export interface ISemaphore {
4
+ acquire(): Promise<void>;
5
+ release(): void;
6
+ }
7
+
3
8
  /**
4
9
  * Allows the acquiring of up to `size` tokens before calls to acquire block, waiting for a call to release().
5
10
  */
@@ -1,10 +1,7 @@
1
- import { pedersenMerkleHash, poseidonMerkleHash, shaMerkleHash } from './hasher.js';
1
+ import { poseidonMerkleHash, shaMerkleHash } from './hasher.js';
2
2
 
3
3
  export const computeBalancedShaRoot = (leaves: Buffer[]) => computeBalancedMerkleTreeRoot(leaves);
4
4
 
5
- export const computeBalancedPedersenRoot = async (leaves: Buffer[]) =>
6
- await computeBalancedMerkleTreeRootAsync(leaves, pedersenMerkleHash);
7
-
8
5
  export const computeBalancedPoseidonRoot = async (leaves: Buffer[]) =>
9
6
  await computeBalancedMerkleTreeRootAsync(leaves, poseidonMerkleHash);
10
7
 
@@ -33,7 +30,7 @@ export function computeBalancedMerkleTreeRoot(leaves: Buffer[], hasher = shaMerk
33
30
 
34
31
  /**
35
32
  * Computes the Merkle root with the provided leaves **asynchronously**.
36
- * This method uses an asynchronous hash function (defaults to `pedersenHash`).
33
+ * This method uses an asynchronous hash function (defaults to `poseidon2Hash`).
37
34
  *
38
35
  * @throws If the number of leaves is not a power of two.
39
36
  */
@@ -1,4 +1,3 @@
1
- import { pedersenHash as pedersenHashArray } from '../crypto/pedersen/index.js';
2
1
  import { poseidon2Hash } from '../crypto/poseidon/index.js';
3
2
  import { sha256Trunc } from '../crypto/sha256/index.js';
4
3
 
@@ -45,8 +44,5 @@ export interface AsyncHasher {
45
44
  export const shaMerkleHash: Hasher['hash'] = (left: Buffer, right: Buffer) =>
46
45
  sha256Trunc(Buffer.concat([left, right])) as Buffer<ArrayBuffer>;
47
46
 
48
- export const pedersenMerkleHash: AsyncHasher['hash'] = async (left: Buffer, right: Buffer) =>
49
- (await pedersenHashArray([left, right])).toBuffer() as Buffer<ArrayBuffer>;
50
-
51
47
  export const poseidonMerkleHash: AsyncHasher['hash'] = async (left: Buffer, right: Buffer) =>
52
48
  (await poseidon2Hash([left, right])).toBuffer() as Buffer<ArrayBuffer>;
@@ -38,6 +38,14 @@ export class MembershipWitness<N extends number> {
38
38
  return [new Fr(this.leafIndex), ...this.siblingPath];
39
39
  }
40
40
 
41
+ /**
42
+ * Returns a representation of the membership witness as expected by intrinsic Noir deserialization.
43
+ */
44
+ public toNoirRepresentation(): (string | string[])[] {
45
+ // TODO(#12874): remove the stupid as string conversion by modifying ForeignCallOutput type in acvm.js
46
+ return [new Fr(this.leafIndex).toString() as string, this.siblingPath.map(fr => fr.toString()) as string[]];
47
+ }
48
+
41
49
  static schemaFor<N extends number>(size: N) {
42
50
  return schemas.Buffer.transform(b => MembershipWitness.fromBuffer(b, size));
43
51
  }
@@ -1,4 +1,4 @@
1
- import { pedersenHash } from '@aztec/foundation/crypto/pedersen';
1
+ import { poseidon2Hash } from '@aztec/foundation/crypto/poseidon';
2
2
 
3
3
  import type { AsyncHasher } from './hasher.js';
4
4
  import { MerkleTree } from './merkle_tree.js';
@@ -19,7 +19,7 @@ export class MerkleTreeCalculator {
19
19
  height: number,
20
20
  zeroLeaf: Buffer = Buffer.alloc(32),
21
21
  hasher = async (left: Buffer, right: Buffer) =>
22
- (await pedersenHash([left, right])).toBuffer() as Buffer<ArrayBuffer>,
22
+ (await poseidon2Hash([left, right])).toBuffer() as Buffer<ArrayBuffer>,
23
23
  ) {
24
24
  const zeroHashes = [zeroLeaf];
25
25
  for (let i = 0; i < height; i++) {
@@ -1,5 +1,5 @@
1
1
  import { makeTuple } from '../array/array.js';
2
- import { pedersenHash } from '../crypto/pedersen/index.js';
2
+ import { poseidon2Hash } from '../crypto/poseidon/index.js';
3
3
  import { Fr } from '../curves/bn254/index.js';
4
4
  import { schemas } from '../schemas/index.js';
5
5
  import {
@@ -172,7 +172,7 @@ export async function computeRootFromSiblingPath(
172
172
  leaf: Buffer,
173
173
  siblingPath: Buffer[],
174
174
  index: number,
175
- hasher = async (left: Buffer, right: Buffer) => (await pedersenHash([left, right])).toBuffer(),
175
+ hasher = async (left: Buffer, right: Buffer) => (await poseidon2Hash([left, right])).toBuffer(),
176
176
  ) {
177
177
  let result = leaf;
178
178
  for (const sibling of siblingPath) {
@@ -24,6 +24,11 @@ export function isDefined<T>(value: T | undefined): value is T {
24
24
  return value !== undefined;
25
25
  }
26
26
 
27
+ /** Type guard for error classes */
28
+ export function isErrorClass<T extends Error>(value: unknown, errorClass: new (...args: any[]) => T): value is T {
29
+ return value instanceof errorClass || (value instanceof Error && value.name === errorClass.name);
30
+ }
31
+
27
32
  /** Resolves a record-like type. Lifted from viem. */
28
33
  export type Prettify<T> = {
29
34
  [K in keyof T]: T[K];