@aztec/foundation 0.69.1 → 0.71.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/abi/note_selector.d.ts +5 -2
- package/dest/abi/note_selector.d.ts.map +1 -1
- package/dest/abi/note_selector.js +12 -4
- package/dest/collection/array.d.ts +8 -0
- package/dest/collection/array.d.ts.map +1 -1
- package/dest/collection/array.js +28 -1
- package/dest/config/env_var.d.ts +1 -1
- package/dest/config/env_var.d.ts.map +1 -1
- package/dest/config/index.d.ts +3 -1
- package/dest/config/index.d.ts.map +1 -1
- package/dest/config/index.js +6 -2
- package/dest/json-rpc/server/safe_json_rpc_server.d.ts +7 -2
- package/dest/json-rpc/server/safe_json_rpc_server.d.ts.map +1 -1
- package/dest/json-rpc/server/safe_json_rpc_server.js +18 -16
- package/dest/log/gcloud-logger-config.d.ts +14 -0
- package/dest/log/gcloud-logger-config.d.ts.map +1 -0
- package/dest/log/gcloud-logger-config.js +64 -0
- package/dest/log/pino-logger.d.ts +3 -0
- package/dest/log/pino-logger.d.ts.map +1 -1
- package/dest/log/pino-logger.js +25 -39
- package/dest/noir/noir_package_config.d.ts +2 -2
- package/dest/promise/running-promise.d.ts.map +1 -1
- package/dest/promise/running-promise.js +9 -1
- package/dest/queue/serial_queue.d.ts +1 -0
- package/dest/queue/serial_queue.d.ts.map +1 -1
- package/dest/queue/serial_queue.js +6 -1
- package/dest/testing/files/index.d.ts +2 -1
- package/dest/testing/files/index.d.ts.map +1 -1
- package/dest/testing/files/index.js +6 -2
- package/dest/trees/index.d.ts +2 -1
- package/dest/trees/index.d.ts.map +1 -1
- package/dest/trees/index.js +3 -2
- package/dest/trees/{unbalanced_merkle_root.d.ts → unbalanced_merkle_tree.d.ts} +6 -2
- package/dest/trees/unbalanced_merkle_tree.d.ts.map +1 -0
- package/dest/trees/{unbalanced_merkle_root.js → unbalanced_merkle_tree.js} +40 -2
- package/dest/trees/unbalanced_tree_store.d.ts +19 -0
- package/dest/trees/unbalanced_tree_store.d.ts.map +1 -0
- package/dest/trees/unbalanced_tree_store.js +80 -0
- package/package.json +2 -2
- package/src/abi/note_selector.ts +11 -4
- package/src/collection/array.ts +31 -0
- package/src/config/env_var.ts +7 -3
- package/src/config/index.ts +7 -2
- package/src/json-rpc/server/safe_json_rpc_server.ts +22 -15
- package/src/log/gcloud-logger-config.ts +71 -0
- package/src/log/pino-logger.ts +30 -42
- package/src/promise/running-promise.ts +8 -0
- package/src/queue/serial_queue.ts +5 -0
- package/src/testing/files/index.ts +6 -1
- package/src/trees/index.ts +2 -1
- package/src/trees/unbalanced_merkle_tree.ts +103 -0
- package/src/trees/unbalanced_tree_store.ts +102 -0
- package/dest/trees/unbalanced_merkle_root.d.ts.map +0 -1
- package/src/trees/unbalanced_merkle_root.ts +0 -52
package/src/collection/array.ts
CHANGED
|
@@ -146,6 +146,11 @@ export function maxBy<T>(arr: T[], fn: (x: T) => number): T | undefined {
|
|
|
146
146
|
return arr.reduce((max, x) => (fn(x) > fn(max) ? x : max), arr[0]);
|
|
147
147
|
}
|
|
148
148
|
|
|
149
|
+
/** Computes the sum of a numeric array. */
|
|
150
|
+
export function sum(arr: number[]): number {
|
|
151
|
+
return arr.reduce((a, b) => a + b, 0);
|
|
152
|
+
}
|
|
153
|
+
|
|
149
154
|
/** Computes the median of a numeric array. Returns undefined if array is empty. */
|
|
150
155
|
export function median(arr: number[]) {
|
|
151
156
|
if (arr.length === 0) {
|
|
@@ -155,3 +160,29 @@ export function median(arr: number[]) {
|
|
|
155
160
|
const mid = Math.floor(sorted.length / 2);
|
|
156
161
|
return sorted.length % 2 !== 0 ? sorted[mid] : (sorted[mid - 1] + sorted[mid]) / 2;
|
|
157
162
|
}
|
|
163
|
+
|
|
164
|
+
/** Computes the mean of a numeric array. Returns undefined if the array is empty. */
|
|
165
|
+
export function mean(values: number[]) {
|
|
166
|
+
if (values.length === 0) {
|
|
167
|
+
return undefined;
|
|
168
|
+
}
|
|
169
|
+
return values.reduce((a, b) => a + b, 0) / values.length;
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
/** Computes the variance of a numeric array. Returns undefined if there are less than 2 points. */
|
|
173
|
+
export function variance(values: number[]) {
|
|
174
|
+
if (values.length < 2) {
|
|
175
|
+
return undefined;
|
|
176
|
+
}
|
|
177
|
+
const avg = mean(values)!;
|
|
178
|
+
const points = values.map(value => value * value + avg * avg - 2 * value * avg);
|
|
179
|
+
return sum(points) / (values.length - 1);
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
/** Computes the standard deviation of a numeric array. Returns undefined if there are less than 2 points. */
|
|
183
|
+
export function stdDev(values: number[]) {
|
|
184
|
+
if (values.length < 2) {
|
|
185
|
+
return undefined;
|
|
186
|
+
}
|
|
187
|
+
return Math.sqrt(variance(values)!);
|
|
188
|
+
}
|
package/src/config/env_var.ts
CHANGED
|
@@ -64,6 +64,7 @@ export type EnvVar =
|
|
|
64
64
|
| 'NETWORK'
|
|
65
65
|
| 'NO_PXE'
|
|
66
66
|
| 'COIN_ISSUER_CONTRACT_ADDRESS'
|
|
67
|
+
| 'USE_GCLOUD_OBSERVABILITY'
|
|
67
68
|
| 'OTEL_EXPORTER_OTLP_METRICS_ENDPOINT'
|
|
68
69
|
| 'OTEL_EXPORTER_OTLP_TRACES_ENDPOINT'
|
|
69
70
|
| 'OTEL_EXPORTER_OTLP_LOGS_ENDPOINT'
|
|
@@ -99,6 +100,7 @@ export type EnvVar =
|
|
|
99
100
|
| 'P2P_TX_PROTOCOL'
|
|
100
101
|
| 'P2P_UDP_ANNOUNCE_ADDR'
|
|
101
102
|
| 'P2P_UDP_LISTEN_ADDR'
|
|
103
|
+
| 'P2P_ARCHIVED_TX_LIMIT'
|
|
102
104
|
| 'PEER_ID_PRIVATE_KEY'
|
|
103
105
|
| 'PROVER_BLOB_SINK_URL'
|
|
104
106
|
| 'PROOF_VERIFIER_L1_START_BLOCK'
|
|
@@ -113,9 +115,9 @@ export type EnvVar =
|
|
|
113
115
|
| 'PROVER_BROKER_JOB_TIMEOUT_MS'
|
|
114
116
|
| 'PROVER_BROKER_POLL_INTERVAL_MS'
|
|
115
117
|
| 'PROVER_BROKER_JOB_MAX_RETRIES'
|
|
116
|
-
| 'PROVER_BROKER_DATA_DIRECTORY'
|
|
117
118
|
| 'PROVER_COORDINATION_NODE_URL'
|
|
118
119
|
| 'PROVER_DISABLED'
|
|
120
|
+
| 'PROVER_FAILED_PROOF_STORE'
|
|
119
121
|
| 'PROVER_ID'
|
|
120
122
|
| 'PROVER_JOB_POLL_INTERVAL_MS'
|
|
121
123
|
| 'PROVER_JOB_TIMEOUT_MS'
|
|
@@ -160,7 +162,6 @@ export type EnvVar =
|
|
|
160
162
|
| 'TX_GOSSIP_VERSION'
|
|
161
163
|
| 'TXE_PORT'
|
|
162
164
|
| 'VALIDATOR_ATTESTATIONS_POLLING_INTERVAL_MS'
|
|
163
|
-
| 'VALIDATOR_ATTESTATIONS_WAIT_TIMEOUT_MS'
|
|
164
165
|
| 'VALIDATOR_DISABLED'
|
|
165
166
|
| 'VALIDATOR_PRIVATE_KEY'
|
|
166
167
|
| 'VALIDATOR_REEXECUTE'
|
|
@@ -189,8 +190,10 @@ export type EnvVar =
|
|
|
189
190
|
| 'L1_GAS_LIMIT_BUFFER_FIXED'
|
|
190
191
|
| 'L1_GAS_PRICE_MIN'
|
|
191
192
|
| 'L1_GAS_PRICE_MAX'
|
|
193
|
+
| 'L1_BLOB_FEE_PER_GAS_MAX'
|
|
192
194
|
| 'L1_PRIORITY_FEE_BUMP_PERCENTAGE'
|
|
193
195
|
| 'L1_PRIORITY_FEE_RETRY_BUMP_PERCENTAGE'
|
|
196
|
+
| 'L1_FIXED_PRIORITY_FEE_PER_GAS'
|
|
194
197
|
| 'L1_TX_MONITOR_MAX_ATTEMPTS'
|
|
195
198
|
| 'L1_TX_MONITOR_CHECK_INTERVAL_MS'
|
|
196
199
|
| 'L1_TX_MONITOR_STALL_TIME_MS'
|
|
@@ -201,4 +204,5 @@ export type EnvVar =
|
|
|
201
204
|
| 'FAUCET_INTERVAL_MS'
|
|
202
205
|
| 'FAUCET_L1_ASSETS'
|
|
203
206
|
| 'K8S_POD_NAME'
|
|
204
|
-
| 'K8S_POD_UID'
|
|
207
|
+
| 'K8S_POD_UID'
|
|
208
|
+
| 'K8S_NAMESPACE_NAME';
|
package/src/config/index.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { type EnvVar } from './env_var.js';
|
|
2
2
|
|
|
3
|
-
export { EnvVar } from './env_var.js';
|
|
3
|
+
export { type EnvVar } from './env_var.js';
|
|
4
4
|
|
|
5
5
|
export interface ConfigMapping {
|
|
6
6
|
env?: EnvVar;
|
|
@@ -102,7 +102,7 @@ export function optionalNumberConfigHelper(): Pick<ConfigMapping, 'parseEnv'> {
|
|
|
102
102
|
export function booleanConfigHelper(
|
|
103
103
|
defaultVal = false,
|
|
104
104
|
): Required<Pick<ConfigMapping, 'parseEnv' | 'defaultValue' | 'isBoolean'> & { parseVal: (val: string) => boolean }> {
|
|
105
|
-
const parse = (val: string | boolean) => (typeof val === 'boolean' ? val :
|
|
105
|
+
const parse = (val: string | boolean) => (typeof val === 'boolean' ? val : parseBooleanEnv(val));
|
|
106
106
|
return {
|
|
107
107
|
parseEnv: parse,
|
|
108
108
|
parseVal: parse,
|
|
@@ -111,6 +111,11 @@ export function booleanConfigHelper(
|
|
|
111
111
|
};
|
|
112
112
|
}
|
|
113
113
|
|
|
114
|
+
/** Parses an env var as boolean. Returns true only if value is 1, true, or TRUE. */
|
|
115
|
+
export function parseBooleanEnv(val: string | undefined): boolean {
|
|
116
|
+
return val !== undefined && ['1', 'true', 'TRUE'].includes(val);
|
|
117
|
+
}
|
|
118
|
+
|
|
114
119
|
/**
|
|
115
120
|
* Safely parses a number from a string.
|
|
116
121
|
* If the value is not a number or is not a safe integer, the default value is returned.
|
|
@@ -60,6 +60,12 @@ export class SafeJsonRpcServer {
|
|
|
60
60
|
const message = err.issues.map(e => `${e.message} (${e.path.join('.')})`).join('. ') || 'Validation error';
|
|
61
61
|
ctx.status = 400;
|
|
62
62
|
ctx.body = { jsonrpc: '2.0', id: null, error: { code: -32701, message } };
|
|
63
|
+
} else if (this.http200OnError) {
|
|
64
|
+
ctx.body = {
|
|
65
|
+
jsonrpc: '2.0',
|
|
66
|
+
id: null,
|
|
67
|
+
error: { code: err.code || -32600, data: err.data, message: err.message },
|
|
68
|
+
};
|
|
63
69
|
} else {
|
|
64
70
|
ctx.status = 500;
|
|
65
71
|
ctx.body = { jsonrpc: '2.0', id: null, error: { code: -32600, message: err.message ?? 'Internal error' } };
|
|
@@ -111,16 +117,8 @@ export class SafeJsonRpcServer {
|
|
|
111
117
|
ctx.body = { jsonrpc, id, error: { code: -32601, message: `Method not found: ${method}` } };
|
|
112
118
|
} else {
|
|
113
119
|
ctx.status = 200;
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
ctx.body = { jsonrpc, id, result };
|
|
117
|
-
} catch (err: any) {
|
|
118
|
-
if (this.http200OnError) {
|
|
119
|
-
ctx.body = { jsonrpc, id, error: { code: err.code || -32600, data: err.data, message: err.message } };
|
|
120
|
-
} else {
|
|
121
|
-
throw err;
|
|
122
|
-
}
|
|
123
|
-
}
|
|
120
|
+
const result = await this.proxy.call(method, params);
|
|
121
|
+
ctx.body = { jsonrpc, id, result };
|
|
124
122
|
}
|
|
125
123
|
});
|
|
126
124
|
|
|
@@ -265,6 +263,12 @@ function makeAggregateHealthcheck(namedHandlers: NamespacedApiHandlers, log?: Lo
|
|
|
265
263
|
};
|
|
266
264
|
}
|
|
267
265
|
|
|
266
|
+
type SafeJsonRpcServerOptions = {
|
|
267
|
+
http200OnError: boolean;
|
|
268
|
+
healthCheck?: StatusCheckFn;
|
|
269
|
+
log?: Logger;
|
|
270
|
+
};
|
|
271
|
+
|
|
268
272
|
/**
|
|
269
273
|
* Creates a single SafeJsonRpcServer from multiple handlers.
|
|
270
274
|
* @param servers - List of handlers to be combined.
|
|
@@ -272,9 +276,12 @@ function makeAggregateHealthcheck(namedHandlers: NamespacedApiHandlers, log?: Lo
|
|
|
272
276
|
*/
|
|
273
277
|
export function createNamespacedSafeJsonRpcServer(
|
|
274
278
|
handlers: NamespacedApiHandlers,
|
|
275
|
-
|
|
276
|
-
|
|
279
|
+
options: Omit<SafeJsonRpcServerOptions, 'healthcheck'> = {
|
|
280
|
+
http200OnError: false,
|
|
281
|
+
log: createLogger('json-rpc:server'),
|
|
282
|
+
},
|
|
277
283
|
): SafeJsonRpcServer {
|
|
284
|
+
const { http200OnError, log } = options;
|
|
278
285
|
const proxy = new NamespacedSafeJsonProxy(handlers);
|
|
279
286
|
const healthCheck = makeAggregateHealthcheck(handlers, log);
|
|
280
287
|
return new SafeJsonRpcServer(proxy, http200OnError, healthCheck, log);
|
|
@@ -283,11 +290,11 @@ export function createNamespacedSafeJsonRpcServer(
|
|
|
283
290
|
export function createSafeJsonRpcServer<T extends object = any>(
|
|
284
291
|
handler: T,
|
|
285
292
|
schema: ApiSchemaFor<T>,
|
|
286
|
-
|
|
287
|
-
healthCheck?: StatusCheckFn,
|
|
293
|
+
options: SafeJsonRpcServerOptions = { http200OnError: false },
|
|
288
294
|
) {
|
|
295
|
+
const { http200OnError, log, healthCheck } = options;
|
|
289
296
|
const proxy = new SafeJsonProxy(handler, schema);
|
|
290
|
-
return new SafeJsonRpcServer(proxy, http200OnError, healthCheck);
|
|
297
|
+
return new SafeJsonRpcServer(proxy, http200OnError, healthCheck, log);
|
|
291
298
|
}
|
|
292
299
|
|
|
293
300
|
/**
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import { type pino } from 'pino';
|
|
2
|
+
|
|
3
|
+
/* eslint-disable camelcase */
|
|
4
|
+
|
|
5
|
+
const GOOGLE_CLOUD_TRACE_ID = 'logging.googleapis.com/trace';
|
|
6
|
+
const GOOGLE_CLOUD_SPAN_ID = 'logging.googleapis.com/spanId';
|
|
7
|
+
const GOOGLE_CLOUD_TRACE_SAMPLED = 'logging.googleapis.com/trace_sampled';
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* Pino configuration for google cloud observability. Tweaks message and timestamp,
|
|
11
|
+
* adds trace context attributes, and injects severity level.
|
|
12
|
+
* Adapted from https://cloud.google.com/trace/docs/setup/nodejs-ot#config-structured-logging.
|
|
13
|
+
*/
|
|
14
|
+
export const GoogleCloudLoggerConfig = {
|
|
15
|
+
messageKey: 'message',
|
|
16
|
+
// Same as pino.stdTimeFunctions.isoTime but uses "timestamp" key instead of "time"
|
|
17
|
+
timestamp(): string {
|
|
18
|
+
return `,"timestamp":"${new Date(Date.now()).toISOString()}"`;
|
|
19
|
+
},
|
|
20
|
+
formatters: {
|
|
21
|
+
log(object: Record<string, unknown>): Record<string, unknown> {
|
|
22
|
+
// Add trace context attributes following Cloud Logging structured log format described
|
|
23
|
+
// in https://cloud.google.com/logging/docs/structured-logging#special-payload-fields
|
|
24
|
+
const { trace_id, span_id, trace_flags, ...rest } = object;
|
|
25
|
+
|
|
26
|
+
if (trace_id && span_id) {
|
|
27
|
+
return {
|
|
28
|
+
[GOOGLE_CLOUD_TRACE_ID]: trace_id,
|
|
29
|
+
[GOOGLE_CLOUD_SPAN_ID]: span_id,
|
|
30
|
+
[GOOGLE_CLOUD_TRACE_SAMPLED]: trace_flags ? trace_flags === '01' : undefined,
|
|
31
|
+
trace_flags, // Keep the original trace_flags for otel-pino-stream
|
|
32
|
+
...rest,
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
return object;
|
|
36
|
+
},
|
|
37
|
+
level(label: string, level: number): object {
|
|
38
|
+
// Inspired by https://github.com/pinojs/pino/issues/726#issuecomment-605814879
|
|
39
|
+
// Severity labels https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#LogSeverity
|
|
40
|
+
let severity: string;
|
|
41
|
+
|
|
42
|
+
switch (label as pino.Level | keyof typeof customLevels) {
|
|
43
|
+
case 'trace':
|
|
44
|
+
case 'debug':
|
|
45
|
+
severity = 'DEBUG';
|
|
46
|
+
break;
|
|
47
|
+
case 'verbose':
|
|
48
|
+
case 'info':
|
|
49
|
+
severity = 'INFO';
|
|
50
|
+
break;
|
|
51
|
+
case 'warn':
|
|
52
|
+
severity = 'WARNING';
|
|
53
|
+
break;
|
|
54
|
+
case 'error':
|
|
55
|
+
severity = 'ERROR';
|
|
56
|
+
break;
|
|
57
|
+
case 'fatal':
|
|
58
|
+
severity = 'CRITICAL';
|
|
59
|
+
break;
|
|
60
|
+
default:
|
|
61
|
+
severity = 'DEFAULT';
|
|
62
|
+
break;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
return { severity, level };
|
|
66
|
+
},
|
|
67
|
+
},
|
|
68
|
+
} satisfies pino.LoggerOptions;
|
|
69
|
+
|
|
70
|
+
// Define custom logging levels for pino. Duplicate from pino-logger.ts.
|
|
71
|
+
const customLevels = { verbose: 25 };
|
package/src/log/pino-logger.ts
CHANGED
|
@@ -5,12 +5,14 @@ import { type Writable } from 'stream';
|
|
|
5
5
|
import { inspect } from 'util';
|
|
6
6
|
|
|
7
7
|
import { compactArray } from '../collection/array.js';
|
|
8
|
+
import { type EnvVar, parseBooleanEnv } from '../config/index.js';
|
|
9
|
+
import { GoogleCloudLoggerConfig } from './gcloud-logger-config.js';
|
|
8
10
|
import { getLogLevelFromFilters, parseEnv } from './log-filters.js';
|
|
9
11
|
import { type LogLevel } from './log-levels.js';
|
|
10
12
|
import { type LogData, type LogFn } from './log_fn.js';
|
|
11
13
|
|
|
12
14
|
export function createLogger(module: string): Logger {
|
|
13
|
-
module = module.replace(/^aztec:/, '');
|
|
15
|
+
module = logNameHandlers.reduce((moduleName, handler) => handler(moduleName), module.replace(/^aztec:/, ''));
|
|
14
16
|
const pinoLogger = logger.child({ module }, { level: getLogLevelFromFilters(logFilters, module) });
|
|
15
17
|
|
|
16
18
|
// We check manually for isLevelEnabled to avoid calling processLogData unnecessarily.
|
|
@@ -56,6 +58,22 @@ function processLogData(data: LogData): LogData {
|
|
|
56
58
|
return logDataHandlers.reduce((accum, handler) => handler(accum), data);
|
|
57
59
|
}
|
|
58
60
|
|
|
61
|
+
// Allow global hooks for tweaking module names.
|
|
62
|
+
// Used in tests to add a uid to modules, so we can differentiate multiple nodes in the same process.
|
|
63
|
+
type LogNameHandler = (module: string) => string;
|
|
64
|
+
const logNameHandlers: LogNameHandler[] = [];
|
|
65
|
+
|
|
66
|
+
export function addLogNameHandler(handler: LogNameHandler): void {
|
|
67
|
+
logNameHandlers.push(handler);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
export function removeLogNameHandler(handler: LogNameHandler) {
|
|
71
|
+
const index = logNameHandlers.indexOf(handler);
|
|
72
|
+
if (index !== -1) {
|
|
73
|
+
logNameHandlers.splice(index, 1);
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
59
77
|
// Patch isLevelEnabled missing from pino/browser.
|
|
60
78
|
function isLevelEnabled(logger: pino.Logger<'verbose', boolean>, level: LogLevel): boolean {
|
|
61
79
|
return typeof logger.isLevelEnabled === 'function'
|
|
@@ -70,44 +88,14 @@ const [logLevel, logFilters] = parseEnv(process.env.LOG_LEVEL, defaultLogLevel);
|
|
|
70
88
|
// Define custom logging levels for pino.
|
|
71
89
|
const customLevels = { verbose: 25 };
|
|
72
90
|
|
|
73
|
-
//
|
|
74
|
-
const
|
|
75
|
-
// Severity labels https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#LogSeverity
|
|
76
|
-
let severity: string;
|
|
77
|
-
|
|
78
|
-
switch (label as pino.Level | keyof typeof customLevels) {
|
|
79
|
-
case 'trace':
|
|
80
|
-
case 'debug':
|
|
81
|
-
severity = 'DEBUG';
|
|
82
|
-
break;
|
|
83
|
-
case 'verbose':
|
|
84
|
-
case 'info':
|
|
85
|
-
severity = 'INFO';
|
|
86
|
-
break;
|
|
87
|
-
case 'warn':
|
|
88
|
-
severity = 'WARNING';
|
|
89
|
-
break;
|
|
90
|
-
case 'error':
|
|
91
|
-
severity = 'ERROR';
|
|
92
|
-
break;
|
|
93
|
-
case 'fatal':
|
|
94
|
-
severity = 'CRITICAL';
|
|
95
|
-
break;
|
|
96
|
-
default:
|
|
97
|
-
severity = 'DEFAULT';
|
|
98
|
-
break;
|
|
99
|
-
}
|
|
100
|
-
|
|
101
|
-
return { severity, level };
|
|
102
|
-
};
|
|
103
|
-
|
|
91
|
+
// Global pino options, tweaked for google cloud if running there.
|
|
92
|
+
const useGcloudObservability = parseBooleanEnv(process.env['USE_GCLOUD_OBSERVABILITY' satisfies EnvVar]);
|
|
104
93
|
const pinoOpts: pino.LoggerOptions<keyof typeof customLevels> = {
|
|
105
94
|
customLevels,
|
|
95
|
+
messageKey: 'msg',
|
|
106
96
|
useOnlyCustomLevels: false,
|
|
107
97
|
level: logLevel,
|
|
108
|
-
|
|
109
|
-
level: levelToSeverityFormatter,
|
|
110
|
-
},
|
|
98
|
+
...(useGcloudObservability ? GoogleCloudLoggerConfig : {}),
|
|
111
99
|
};
|
|
112
100
|
|
|
113
101
|
export const levels = {
|
|
@@ -127,7 +115,7 @@ export const pinoPrettyOpts = {
|
|
|
127
115
|
customLevels: 'fatal:60,error:50,warn:40,info:30,verbose:25,debug:20,trace:10',
|
|
128
116
|
customColors: 'fatal:bgRed,error:red,warn:yellow,info:green,verbose:magenta,debug:blue,trace:gray',
|
|
129
117
|
minimumLevel: 'trace' as const,
|
|
130
|
-
singleLine: !
|
|
118
|
+
singleLine: !parseBooleanEnv(process.env['LOG_MULTILINE' satisfies EnvVar]),
|
|
131
119
|
};
|
|
132
120
|
|
|
133
121
|
const prettyTransport: pino.TransportTargetOptions = {
|
|
@@ -149,14 +137,14 @@ const stdioTransport: pino.TransportTargetOptions = {
|
|
|
149
137
|
// would mean that all child loggers created before the telemetry-client is initialized would not have
|
|
150
138
|
// this transport configured. Note that the target is defined as the export in the telemetry-client,
|
|
151
139
|
// since pino will load this transport separately on a worker thread, to minimize disruption to the main loop.
|
|
152
|
-
const otlpEndpoint = process.env
|
|
140
|
+
const otlpEndpoint = process.env['OTEL_EXPORTER_OTLP_LOGS_ENDPOINT' satisfies EnvVar];
|
|
141
|
+
const otlpEnabled = !!otlpEndpoint && !useGcloudObservability;
|
|
153
142
|
const otelOpts = { levels };
|
|
154
143
|
const otelTransport: pino.TransportTargetOptions = {
|
|
155
144
|
target: '@aztec/telemetry-client/otel-pino-stream',
|
|
156
145
|
options: otelOpts,
|
|
157
146
|
level: 'trace',
|
|
158
147
|
};
|
|
159
|
-
|
|
160
148
|
function makeLogger() {
|
|
161
149
|
if (!isNode) {
|
|
162
150
|
// We are on the browser.
|
|
@@ -168,10 +156,10 @@ function makeLogger() {
|
|
|
168
156
|
return pino(pinoOpts, pino.destination(2));
|
|
169
157
|
} else {
|
|
170
158
|
// Regular nodejs with transports on worker thread, using pino-pretty for console logging if LOG_JSON
|
|
171
|
-
// is not set, and an optional OTLP transport if the OTLP endpoint is
|
|
159
|
+
// is not set, and an optional OTLP transport if the OTLP endpoint is set.
|
|
172
160
|
const targets: pino.TransportSingleOptions[] = compactArray([
|
|
173
|
-
|
|
174
|
-
|
|
161
|
+
parseBooleanEnv(process.env.LOG_JSON) ? stdioTransport : prettyTransport,
|
|
162
|
+
otlpEnabled ? otelTransport : undefined,
|
|
175
163
|
]);
|
|
176
164
|
return pino(pinoOpts, pino.transport({ targets, levels: levels.values }));
|
|
177
165
|
}
|
|
@@ -186,7 +174,7 @@ logger.verbose(
|
|
|
186
174
|
...logFilters.reduce((accum, [module, level]) => ({ ...accum, [`log.${module}`]: level }), {}),
|
|
187
175
|
},
|
|
188
176
|
isNode
|
|
189
|
-
? `Logger initialized with level ${logLevel}` + (
|
|
177
|
+
? `Logger initialized with level ${logLevel}` + (otlpEnabled ? ` with OTLP exporter to ${otlpEndpoint}` : '')
|
|
190
178
|
: `Browser console logger initialized with level ${logLevel}`,
|
|
191
179
|
);
|
|
192
180
|
|
|
@@ -23,6 +23,10 @@ export class RunningPromise {
|
|
|
23
23
|
* Starts the running promise.
|
|
24
24
|
*/
|
|
25
25
|
public start() {
|
|
26
|
+
if (this.running) {
|
|
27
|
+
this.logger.warn(`Attempted to start running promise that was already started`);
|
|
28
|
+
return;
|
|
29
|
+
}
|
|
26
30
|
this.running = true;
|
|
27
31
|
|
|
28
32
|
const poll = async () => {
|
|
@@ -54,6 +58,10 @@ export class RunningPromise {
|
|
|
54
58
|
* and waits for the currently executing function to complete.
|
|
55
59
|
*/
|
|
56
60
|
async stop(): Promise<void> {
|
|
61
|
+
if (!this.running) {
|
|
62
|
+
this.logger.warn(`Running promise was not started`);
|
|
63
|
+
return;
|
|
64
|
+
}
|
|
57
65
|
this.running = false;
|
|
58
66
|
this.interruptibleSleep.interrupt();
|
|
59
67
|
await this.runningPromise;
|
|
@@ -6,6 +6,7 @@ import { FifoMemoryQueue } from './fifo_memory_queue.js';
|
|
|
6
6
|
export class SerialQueue {
|
|
7
7
|
private readonly queue = new FifoMemoryQueue<() => Promise<void>>();
|
|
8
8
|
private runningPromise!: Promise<void>;
|
|
9
|
+
private started = false;
|
|
9
10
|
|
|
10
11
|
/**
|
|
11
12
|
* Initializes the execution of enqueued functions in the serial queue.
|
|
@@ -14,7 +15,11 @@ export class SerialQueue {
|
|
|
14
15
|
* This method should be called once to start processing the queue.
|
|
15
16
|
*/
|
|
16
17
|
public start() {
|
|
18
|
+
if (this.started) {
|
|
19
|
+
return;
|
|
20
|
+
}
|
|
17
21
|
this.runningPromise = this.queue.process(fn => fn());
|
|
22
|
+
this.started = true;
|
|
18
23
|
}
|
|
19
24
|
|
|
20
25
|
/**
|
|
@@ -17,6 +17,11 @@ export function writeTestData(targetFileFromRepoRoot: string, contents: string |
|
|
|
17
17
|
logger(`Wrote test data to ${targetFile}`);
|
|
18
18
|
}
|
|
19
19
|
|
|
20
|
+
export function readTestData(repoPath: string): Buffer {
|
|
21
|
+
const targetFile = getPathToFile(repoPath);
|
|
22
|
+
return readFileSync(targetFile);
|
|
23
|
+
}
|
|
24
|
+
|
|
20
25
|
/**
|
|
21
26
|
* Looks for a variable assignment in the target file and updates the value, only if test data generation is enabled.
|
|
22
27
|
* Note that a magic inline comment would be a cleaner approach, like `/* TEST-DATA-START *\/` and `/* TEST-DATA-END *\/`,
|
|
@@ -43,7 +48,7 @@ export function updateInlineTestData(targetFileFromRepoRoot: string, itemName: s
|
|
|
43
48
|
/**
|
|
44
49
|
* Updates the sample Prover.toml files in noir-projects/noir-protocol-circuits/crates/.
|
|
45
50
|
* @remarks Requires AZTEC_GENERATE_TEST_DATA=1 to be set
|
|
46
|
-
* To re-gen, run 'AZTEC_GENERATE_TEST_DATA=1 FAKE_PROOFS=1 yarn test:e2e
|
|
51
|
+
* To re-gen, run 'AZTEC_GENERATE_TEST_DATA=1 FAKE_PROOFS=1 yarn test:e2e full.test '
|
|
47
52
|
*/
|
|
48
53
|
export function updateProtocolCircuitSampleInputs(circuitName: string, value: string) {
|
|
49
54
|
const logger = createConsoleLogger('aztec:testing:test_data');
|
package/src/trees/index.ts
CHANGED
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
import { padArrayEnd } from '@aztec/foundation/collection';
|
|
2
|
+
import { sha256Trunc } from '@aztec/foundation/crypto';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Computes the merkle root for an unbalanced tree.
|
|
6
|
+
*
|
|
7
|
+
* @dev Adapted from unbalanced_tree.ts.
|
|
8
|
+
* Calculates the tree upwards layer by layer until we reach the root.
|
|
9
|
+
* The L1 calculation instead computes the tree from right to left (slightly cheaper gas).
|
|
10
|
+
* TODO: A more thorough investigation of which method is cheaper, then use that method everywhere.
|
|
11
|
+
*/
|
|
12
|
+
export function computeUnbalancedMerkleRoot(leaves: Buffer[], emptyLeaf?: Buffer, hasher = sha256Trunc): Buffer {
|
|
13
|
+
// Pad leaves to 2
|
|
14
|
+
if (leaves.length < 2) {
|
|
15
|
+
if (emptyLeaf === undefined) {
|
|
16
|
+
throw new Error('Cannot compute a Merkle root with less than 2 leaves');
|
|
17
|
+
} else {
|
|
18
|
+
leaves = padArrayEnd(leaves, emptyLeaf, 2);
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
const depth = Math.ceil(Math.log2(leaves.length));
|
|
23
|
+
let [layerWidth, nodeToShift] =
|
|
24
|
+
leaves.length & 1 ? [leaves.length - 1, leaves[leaves.length - 1]] : [leaves.length, Buffer.alloc(0)];
|
|
25
|
+
// Allocate this layer's leaves and init the next layer up
|
|
26
|
+
let thisLayer = leaves.slice(0, layerWidth);
|
|
27
|
+
let nextLayer = [];
|
|
28
|
+
for (let i = 0; i < depth; i++) {
|
|
29
|
+
for (let j = 0; j < layerWidth; j += 2) {
|
|
30
|
+
// Store the hash of each pair one layer up
|
|
31
|
+
nextLayer[j / 2] = hasher(Buffer.concat([thisLayer[j], thisLayer[j + 1]]));
|
|
32
|
+
}
|
|
33
|
+
layerWidth /= 2;
|
|
34
|
+
if (layerWidth & 1) {
|
|
35
|
+
if (nodeToShift.length) {
|
|
36
|
+
// If the next layer has odd length, and we have a node that needs to be shifted up, add it here
|
|
37
|
+
nextLayer.push(nodeToShift);
|
|
38
|
+
layerWidth += 1;
|
|
39
|
+
nodeToShift = Buffer.alloc(0);
|
|
40
|
+
} else {
|
|
41
|
+
// If we don't have a node waiting to be shifted, store the next layer's final node to be shifted
|
|
42
|
+
layerWidth -= 1;
|
|
43
|
+
nodeToShift = nextLayer[layerWidth];
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
// reset the layers
|
|
47
|
+
thisLayer = nextLayer;
|
|
48
|
+
nextLayer = [];
|
|
49
|
+
}
|
|
50
|
+
// return the root
|
|
51
|
+
return thisLayer[0];
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
function getMaxBalancedTreeDepth(numLeaves: number) {
|
|
55
|
+
return Math.floor(Math.log2(numLeaves));
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
function getMaxUnbalancedTreeDepth(numLeaves: number) {
|
|
59
|
+
return Math.ceil(Math.log2(numLeaves));
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
function findPosition(
|
|
63
|
+
rootLevel: number,
|
|
64
|
+
leafLevel: number,
|
|
65
|
+
numLeaves: number,
|
|
66
|
+
indexOffset: number,
|
|
67
|
+
targetIndex: number,
|
|
68
|
+
): { level: number; indexAtLevel: number } {
|
|
69
|
+
if (numLeaves <= 1) {
|
|
70
|
+
// Single leaf.
|
|
71
|
+
return { level: rootLevel, indexAtLevel: indexOffset };
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
// The largest balanced tree that can be created with the given number of leaves.
|
|
75
|
+
const maxBalancedTreeDepth = getMaxBalancedTreeDepth(numLeaves);
|
|
76
|
+
const numBalancedLeaves = 2 ** maxBalancedTreeDepth;
|
|
77
|
+
const numRemainingLeaves = numLeaves - numBalancedLeaves;
|
|
78
|
+
|
|
79
|
+
if (targetIndex < numBalancedLeaves) {
|
|
80
|
+
// Target is in the balanced tree.
|
|
81
|
+
|
|
82
|
+
// - If numRemainingLeaves is 0: this balanced tree is grown from the current root.
|
|
83
|
+
// - If numRemainingLeaves is not 0: the remaining leaves will form another tree, which will become the right child of the root.
|
|
84
|
+
// And the balanced tree will be the left child of the root.
|
|
85
|
+
// There will be an extra level between the root of the balanced tree and the current root.
|
|
86
|
+
const extraLevel = numRemainingLeaves ? 1 : 0;
|
|
87
|
+
|
|
88
|
+
return { level: rootLevel + maxBalancedTreeDepth + extraLevel, indexAtLevel: indexOffset + targetIndex };
|
|
89
|
+
} else {
|
|
90
|
+
// Target is in the right branch.
|
|
91
|
+
const rightBranchMaxLevel = getMaxUnbalancedTreeDepth(numRemainingLeaves);
|
|
92
|
+
const shiftedUp = leafLevel - rootLevel - rightBranchMaxLevel - 1;
|
|
93
|
+
const nextLeafLevel = leafLevel - shiftedUp;
|
|
94
|
+
const newIndexOffset = (indexOffset + numBalancedLeaves) >> shiftedUp;
|
|
95
|
+
const shiftedTargetIndex = targetIndex - numBalancedLeaves;
|
|
96
|
+
return findPosition(rootLevel + 1, nextLeafLevel, numRemainingLeaves, newIndexOffset, shiftedTargetIndex);
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
export function findLeafLevelAndIndex(numLeaves: number, leafIndex: number) {
|
|
101
|
+
const maxLevel = getMaxUnbalancedTreeDepth(numLeaves);
|
|
102
|
+
return findPosition(0, maxLevel, numLeaves, 0, leafIndex);
|
|
103
|
+
}
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
import { findLeafLevelAndIndex } from './unbalanced_merkle_tree.js';
|
|
2
|
+
|
|
3
|
+
export interface TreeNodeLocation {
|
|
4
|
+
level: number;
|
|
5
|
+
index: number;
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
interface TreeNode<T> {
|
|
9
|
+
value: T;
|
|
10
|
+
location: TreeNodeLocation;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export class UnbalancedTreeStore<T> {
|
|
14
|
+
#nodeMapping: Map<string, TreeNode<T>> = new Map();
|
|
15
|
+
readonly #numLeaves: number;
|
|
16
|
+
|
|
17
|
+
constructor(numLeaves: number) {
|
|
18
|
+
this.#numLeaves = numLeaves;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
setLeaf(leafIndex: number, value: T): TreeNodeLocation {
|
|
22
|
+
if (leafIndex >= this.#numLeaves) {
|
|
23
|
+
throw new Error(`Expected at most ${this.#numLeaves} leaves. Received a leaf at index ${leafIndex}.`);
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
const { level, indexAtLevel } = findLeafLevelAndIndex(this.#numLeaves, leafIndex);
|
|
27
|
+
const location = {
|
|
28
|
+
level,
|
|
29
|
+
index: indexAtLevel,
|
|
30
|
+
};
|
|
31
|
+
this.#nodeMapping.set(this.#getKey(location), {
|
|
32
|
+
location,
|
|
33
|
+
value,
|
|
34
|
+
});
|
|
35
|
+
return location;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
setNode({ level, index }: TreeNodeLocation, value: T) {
|
|
39
|
+
const location = {
|
|
40
|
+
level,
|
|
41
|
+
index,
|
|
42
|
+
};
|
|
43
|
+
this.#nodeMapping.set(this.#getKey(location), {
|
|
44
|
+
location,
|
|
45
|
+
value,
|
|
46
|
+
});
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
getParentLocation({ level, index }: TreeNodeLocation): TreeNodeLocation {
|
|
50
|
+
if (level === 0) {
|
|
51
|
+
throw new Error('Tree root does not have a parent.');
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
return { level: level - 1, index: Math.floor(index / 2) };
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
getSiblingLocation({ level, index }: TreeNodeLocation): TreeNodeLocation {
|
|
58
|
+
if (level === 0) {
|
|
59
|
+
throw new Error('Tree root does not have a sibling.');
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
return { level, index: index % 2 ? index - 1 : index + 1 };
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
getChildLocations({ level, index }: TreeNodeLocation): [TreeNodeLocation, TreeNodeLocation] {
|
|
66
|
+
const left = { level: level + 1, index: index * 2 };
|
|
67
|
+
const right = { level: level + 1, index: index * 2 + 1 };
|
|
68
|
+
return [left, right];
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
getLeaf(leafIndex: number) {
|
|
72
|
+
const { level, indexAtLevel } = findLeafLevelAndIndex(this.#numLeaves, leafIndex);
|
|
73
|
+
const location = {
|
|
74
|
+
level,
|
|
75
|
+
index: indexAtLevel,
|
|
76
|
+
};
|
|
77
|
+
return this.getNode(location);
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
getNode(location: TreeNodeLocation): T | undefined {
|
|
81
|
+
return this.#nodeMapping.get(this.#getKey(location))?.value;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
getParent(location: TreeNodeLocation): T | undefined {
|
|
85
|
+
const parentLocation = this.getParentLocation(location);
|
|
86
|
+
return this.getNode(parentLocation);
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
getSibling(location: TreeNodeLocation): T | undefined {
|
|
90
|
+
const siblingLocation = this.getSiblingLocation(location);
|
|
91
|
+
return this.getNode(siblingLocation);
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
getChildren(location: TreeNodeLocation): [T | undefined, T | undefined] {
|
|
95
|
+
const [left, right] = this.getChildLocations(location);
|
|
96
|
+
return [this.getNode(left), this.getNode(right)];
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
#getKey(location: TreeNodeLocation) {
|
|
100
|
+
return `${location.level}-${location.index}`;
|
|
101
|
+
}
|
|
102
|
+
}
|