@aztec/foundation 0.0.1-commit.c7c42ec → 0.0.1-commit.cd76b27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (191) hide show
  1. package/dest/array/sorted_array.d.ts +6 -1
  2. package/dest/array/sorted_array.d.ts.map +1 -1
  3. package/dest/array/sorted_array.js +18 -15
  4. package/dest/branded-types/block_number.d.ts +4 -1
  5. package/dest/branded-types/block_number.d.ts.map +1 -1
  6. package/dest/branded-types/block_number.js +3 -0
  7. package/dest/branded-types/checkpoint_number.d.ts +10 -2
  8. package/dest/branded-types/checkpoint_number.d.ts.map +1 -1
  9. package/dest/branded-types/checkpoint_number.js +17 -8
  10. package/dest/branded-types/index.d.ts +3 -2
  11. package/dest/branded-types/index.d.ts.map +1 -1
  12. package/dest/branded-types/index.js +2 -1
  13. package/dest/branded-types/index_within_checkpoint.d.ts +42 -0
  14. package/dest/branded-types/index_within_checkpoint.d.ts.map +1 -0
  15. package/dest/branded-types/index_within_checkpoint.js +59 -0
  16. package/dest/buffer/buffer16.d.ts +4 -1
  17. package/dest/buffer/buffer16.d.ts.map +1 -1
  18. package/dest/buffer/buffer16.js +3 -1
  19. package/dest/buffer/buffer32.d.ts +4 -1
  20. package/dest/buffer/buffer32.d.ts.map +1 -1
  21. package/dest/buffer/buffer32.js +3 -1
  22. package/dest/collection/array.d.ts +12 -1
  23. package/dest/collection/array.d.ts.map +1 -1
  24. package/dest/collection/array.js +51 -0
  25. package/dest/config/env_var.d.ts +2 -2
  26. package/dest/config/env_var.d.ts.map +1 -1
  27. package/dest/config/index.d.ts +6 -3
  28. package/dest/config/index.d.ts.map +1 -1
  29. package/dest/config/index.js +16 -7
  30. package/dest/config/network_name.d.ts +2 -2
  31. package/dest/config/network_name.d.ts.map +1 -1
  32. package/dest/config/network_name.js +2 -0
  33. package/dest/config/parse-env.d.ts +3 -0
  34. package/dest/config/parse-env.d.ts.map +1 -0
  35. package/dest/config/parse-env.js +7 -0
  36. package/dest/config/secret_value.js +3 -1
  37. package/dest/crypto/ecdsa/signature.d.ts +10 -1
  38. package/dest/crypto/ecdsa/signature.d.ts.map +1 -1
  39. package/dest/crypto/poseidon/index.d.ts +1 -2
  40. package/dest/crypto/poseidon/index.d.ts.map +1 -1
  41. package/dest/crypto/poseidon/index.js +0 -9
  42. package/dest/crypto/random/randomness_singleton.d.ts +4 -3
  43. package/dest/crypto/random/randomness_singleton.d.ts.map +1 -1
  44. package/dest/crypto/random/randomness_singleton.js +5 -5
  45. package/dest/crypto/sync/poseidon/index.d.ts +1 -2
  46. package/dest/crypto/sync/poseidon/index.d.ts.map +1 -1
  47. package/dest/crypto/sync/poseidon/index.js +0 -8
  48. package/dest/curves/bls12/field.js +6 -3
  49. package/dest/curves/bls12/point.d.ts +10 -1
  50. package/dest/curves/bls12/point.d.ts.map +1 -1
  51. package/dest/curves/bls12/point.js +3 -1
  52. package/dest/curves/bn254/field.js +5 -2
  53. package/dest/curves/grumpkin/point.d.ts +11 -2
  54. package/dest/curves/grumpkin/point.d.ts.map +1 -1
  55. package/dest/error/index.d.ts +4 -4
  56. package/dest/error/index.d.ts.map +1 -1
  57. package/dest/eth-address/index.js +4 -2
  58. package/dest/eth-signature/eth_signature.d.ts +4 -1
  59. package/dest/eth-signature/eth_signature.d.ts.map +1 -1
  60. package/dest/jest/setup.js +4 -1
  61. package/dest/json-rpc/client/safe_json_rpc_client.d.ts +2 -1
  62. package/dest/json-rpc/client/safe_json_rpc_client.d.ts.map +1 -1
  63. package/dest/json-rpc/client/safe_json_rpc_client.js +1 -1
  64. package/dest/json-rpc/client/undici.d.ts +1 -1
  65. package/dest/json-rpc/client/undici.d.ts.map +1 -1
  66. package/dest/json-rpc/client/undici.js +21 -4
  67. package/dest/json-rpc/fixtures/class_a.d.ts +3 -3
  68. package/dest/json-rpc/fixtures/class_a.d.ts.map +1 -1
  69. package/dest/json-rpc/fixtures/class_b.d.ts +3 -3
  70. package/dest/json-rpc/fixtures/class_b.d.ts.map +1 -1
  71. package/dest/json-rpc/server/api_key_auth.d.ts +19 -0
  72. package/dest/json-rpc/server/api_key_auth.d.ts.map +1 -0
  73. package/dest/json-rpc/server/api_key_auth.js +57 -0
  74. package/dest/json-rpc/server/index.d.ts +2 -1
  75. package/dest/json-rpc/server/index.d.ts.map +1 -1
  76. package/dest/json-rpc/server/index.js +1 -0
  77. package/dest/json-rpc/server/safe_json_rpc_server.js +1 -1
  78. package/dest/log/bigint-utils.d.ts +5 -0
  79. package/dest/log/bigint-utils.d.ts.map +1 -0
  80. package/dest/log/bigint-utils.js +18 -0
  81. package/dest/log/gcloud-logger-config.d.ts +1 -1
  82. package/dest/log/gcloud-logger-config.d.ts.map +1 -1
  83. package/dest/log/gcloud-logger-config.js +3 -0
  84. package/dest/log/libp2p_logger.d.ts +5 -2
  85. package/dest/log/libp2p_logger.d.ts.map +1 -1
  86. package/dest/log/libp2p_logger.js +14 -4
  87. package/dest/log/log-filters.d.ts +17 -4
  88. package/dest/log/log-filters.d.ts.map +1 -1
  89. package/dest/log/log-filters.js +26 -12
  90. package/dest/log/pino-logger-server.d.ts +9 -0
  91. package/dest/log/pino-logger-server.d.ts.map +1 -0
  92. package/dest/log/pino-logger-server.js +18 -0
  93. package/dest/log/pino-logger.d.ts +37 -8
  94. package/dest/log/pino-logger.d.ts.map +1 -1
  95. package/dest/log/pino-logger.js +122 -29
  96. package/dest/queue/base_memory_queue.d.ts +2 -2
  97. package/dest/queue/base_memory_queue.d.ts.map +1 -1
  98. package/dest/queue/semaphore.d.ts +5 -1
  99. package/dest/queue/semaphore.d.ts.map +1 -1
  100. package/dest/retry/index.d.ts +11 -1
  101. package/dest/retry/index.d.ts.map +1 -1
  102. package/dest/retry/index.js +11 -0
  103. package/dest/serialize/buffer_reader.d.ts +14 -4
  104. package/dest/serialize/buffer_reader.d.ts.map +1 -1
  105. package/dest/serialize/buffer_reader.js +26 -4
  106. package/dest/serialize/serialize.d.ts +19 -1
  107. package/dest/serialize/serialize.d.ts.map +1 -1
  108. package/dest/serialize/serialize.js +31 -0
  109. package/dest/timer/date.d.ts +23 -1
  110. package/dest/timer/date.d.ts.map +1 -1
  111. package/dest/timer/date.js +29 -0
  112. package/dest/trees/balanced_merkle_tree_root.d.ts +17 -0
  113. package/dest/trees/balanced_merkle_tree_root.d.ts.map +1 -0
  114. package/dest/trees/{balanced_merkle_tree.js → balanced_merkle_tree_root.js} +2 -17
  115. package/dest/trees/hasher.d.ts +3 -1
  116. package/dest/trees/hasher.d.ts.map +1 -1
  117. package/dest/trees/hasher.js +10 -5
  118. package/dest/trees/index.d.ts +4 -4
  119. package/dest/trees/index.d.ts.map +1 -1
  120. package/dest/trees/index.js +3 -3
  121. package/dest/trees/membership_witness.d.ts +11 -1
  122. package/dest/trees/membership_witness.d.ts.map +1 -1
  123. package/dest/trees/membership_witness.js +9 -0
  124. package/dest/trees/merkle_tree_calculator.d.ts +1 -1
  125. package/dest/trees/merkle_tree_calculator.d.ts.map +1 -1
  126. package/dest/trees/merkle_tree_calculator.js +2 -2
  127. package/dest/trees/sibling_path.d.ts +2 -1
  128. package/dest/trees/sibling_path.d.ts.map +1 -1
  129. package/dest/trees/sibling_path.js +2 -2
  130. package/dest/trees/unbalanced_merkle_tree_calculator.d.ts +2 -3
  131. package/dest/trees/unbalanced_merkle_tree_calculator.d.ts.map +1 -1
  132. package/dest/trees/unbalanced_merkle_tree_calculator.js +1 -5
  133. package/dest/trees/{unbalanced_merkle_tree.d.ts → unbalanced_merkle_tree_root.d.ts} +3 -5
  134. package/dest/trees/unbalanced_merkle_tree_root.d.ts.map +1 -0
  135. package/dest/trees/{unbalanced_merkle_tree.js → unbalanced_merkle_tree_root.js} +11 -49
  136. package/dest/trees/unbalanced_tree_store.d.ts +5 -1
  137. package/dest/trees/unbalanced_tree_store.d.ts.map +1 -1
  138. package/dest/trees/unbalanced_tree_store.js +49 -1
  139. package/dest/types/index.d.ts +4 -2
  140. package/dest/types/index.d.ts.map +1 -1
  141. package/dest/types/index.js +3 -0
  142. package/package.json +25 -4
  143. package/src/array/sorted_array.ts +22 -17
  144. package/src/branded-types/block_number.ts +5 -0
  145. package/src/branded-types/checkpoint_number.ts +22 -7
  146. package/src/branded-types/index.ts +2 -1
  147. package/src/branded-types/index_within_checkpoint.ts +88 -0
  148. package/src/collection/array.ts +52 -0
  149. package/src/config/env_var.ts +55 -8
  150. package/src/config/index.ts +19 -4
  151. package/src/config/network_name.ts +4 -1
  152. package/src/config/parse-env.ts +4 -0
  153. package/src/crypto/poseidon/index.ts +0 -10
  154. package/src/crypto/random/randomness_singleton.ts +6 -4
  155. package/src/crypto/sync/poseidon/index.ts +0 -9
  156. package/src/eth-address/index.ts +1 -1
  157. package/src/jest/setup.mjs +4 -1
  158. package/src/json-rpc/client/safe_json_rpc_client.ts +2 -0
  159. package/src/json-rpc/client/undici.ts +21 -3
  160. package/src/json-rpc/server/api_key_auth.ts +63 -0
  161. package/src/json-rpc/server/index.ts +1 -0
  162. package/src/json-rpc/server/safe_json_rpc_server.ts +1 -1
  163. package/src/log/bigint-utils.ts +22 -0
  164. package/src/log/gcloud-logger-config.ts +5 -0
  165. package/src/log/libp2p_logger.ts +12 -5
  166. package/src/log/log-filters.ts +29 -11
  167. package/src/log/pino-logger-server.ts +25 -0
  168. package/src/log/pino-logger.ts +144 -39
  169. package/src/queue/base_memory_queue.ts +1 -1
  170. package/src/queue/semaphore.ts +5 -0
  171. package/src/retry/index.ts +18 -0
  172. package/src/serialize/buffer_reader.ts +36 -9
  173. package/src/serialize/serialize.ts +32 -0
  174. package/src/timer/date.ts +42 -0
  175. package/src/trees/{balanced_merkle_tree.ts → balanced_merkle_tree_root.ts} +2 -18
  176. package/src/trees/hasher.ts +9 -0
  177. package/src/trees/index.ts +3 -3
  178. package/src/trees/membership_witness.ts +8 -0
  179. package/src/trees/merkle_tree_calculator.ts +2 -2
  180. package/src/trees/sibling_path.ts +2 -2
  181. package/src/trees/unbalanced_merkle_tree_calculator.ts +1 -12
  182. package/src/trees/{unbalanced_merkle_tree.ts → unbalanced_merkle_tree_root.ts} +17 -61
  183. package/src/trees/unbalanced_tree_store.ts +57 -2
  184. package/src/types/index.ts +6 -1
  185. package/dest/json-rpc/server/telemetry.d.ts +0 -2
  186. package/dest/json-rpc/server/telemetry.d.ts.map +0 -1
  187. package/dest/json-rpc/server/telemetry.js +0 -0
  188. package/dest/trees/balanced_merkle_tree.d.ts +0 -22
  189. package/dest/trees/balanced_merkle_tree.d.ts.map +0 -1
  190. package/dest/trees/unbalanced_merkle_tree.d.ts.map +0 -1
  191. package/src/json-rpc/server/telemetry.ts +0 -0
@@ -12,6 +12,9 @@ export type EnvVar =
12
12
  | 'ARCHIVER_VIEM_POLLING_INTERVAL_MS'
13
13
  | 'ARCHIVER_BATCH_SIZE'
14
14
  | 'AZTEC_ADMIN_PORT'
15
+ | 'AZTEC_ADMIN_API_KEY_HASH'
16
+ | 'AZTEC_NO_ADMIN_API_KEY'
17
+ | 'AZTEC_RESET_ADMIN_API_KEY'
15
18
  | 'AZTEC_NODE_ADMIN_URL'
16
19
  | 'AZTEC_NODE_URL'
17
20
  | 'AZTEC_PORT'
@@ -21,15 +24,13 @@ export type EnvVar =
21
24
  | 'BB_NUM_IVC_VERIFIERS'
22
25
  | 'BB_IVC_CONCURRENCY'
23
26
  | 'BOOTSTRAP_NODES'
24
- | 'BLOB_SINK_ARCHIVE_API_URL'
25
- | 'BLOB_SINK_PORT'
26
- | 'BLOB_SINK_URL'
27
+ | 'BLOB_ARCHIVE_API_URL'
27
28
  | 'BLOB_FILE_STORE_URLS'
28
29
  | 'BLOB_FILE_STORE_UPLOAD_URL'
29
30
  | 'BLOB_HEALTHCHECK_UPLOAD_INTERVAL_MINUTES'
30
31
  | 'BOT_DA_GAS_LIMIT'
31
32
  | 'BOT_FEE_PAYMENT_METHOD'
32
- | 'BOT_BASE_FEE_PADDING'
33
+ | 'BOT_MIN_FEE_PADDING'
33
34
  | 'BOT_FLUSH_SETUP_TRANSACTIONS'
34
35
  | 'BOT_FOLLOW_CHAIN'
35
36
  | 'BOT_L2_GAS_LIMIT'
@@ -49,7 +50,10 @@ export type EnvVar =
49
50
  | 'BOT_TX_MINED_WAIT_SECONDS'
50
51
  | 'BOT_MAX_CONSECUTIVE_ERRORS'
51
52
  | 'BOT_STOP_WHEN_UNHEALTHY'
52
- | 'BOT_AMM_TXS'
53
+ | 'BOT_MODE'
54
+ | 'BOT_L2_TO_L1_MESSAGES_PER_TX'
55
+ | 'BOT_L1_TO_L2_SEED_COUNT'
56
+ | 'BOT_L1_TO_L2_SEED_INTERVAL'
53
57
  | 'COINBASE'
54
58
  | 'CRS_PATH'
55
59
  | 'DATA_DIRECTORY'
@@ -66,6 +70,7 @@ export type EnvVar =
66
70
  | 'PUBLIC_DATA_TREE_MAP_SIZE_KB'
67
71
  | 'DEBUG'
68
72
  | 'DEBUG_P2P_DISABLE_COLOCATION_PENALTY'
73
+ | 'ENABLE_PROVER_NODE'
69
74
  | 'ETHEREUM_HOSTS'
70
75
  | 'ETHEREUM_DEBUG_HOSTS'
71
76
  | 'ETHEREUM_ALLOW_NO_DEBUG_HOSTS'
@@ -79,6 +84,7 @@ export type EnvVar =
79
84
  | 'L1_CONSENSUS_HOST_API_KEY_HEADERS'
80
85
  | 'LOG_JSON'
81
86
  | 'LOG_MULTILINE'
87
+ | 'LOG_NO_COLOR_PER_ACTOR'
82
88
  | 'LOG_LEVEL'
83
89
  | 'MNEMONIC'
84
90
  | 'NETWORK'
@@ -96,7 +102,12 @@ export type EnvVar =
96
102
  | 'PUBLIC_OTEL_INCLUDE_METRICS'
97
103
  | 'PUBLIC_OTEL_COLLECT_FROM'
98
104
  | 'PUBLIC_OTEL_OPT_OUT'
105
+ | 'P2P_BATCH_TX_REQUESTER_SMART_PARALLEL_WORKER_COUNT'
106
+ | 'P2P_BATCH_TX_REQUESTER_DUMB_PARALLEL_WORKER_COUNT'
107
+ | 'P2P_BATCH_TX_REQUESTER_TX_BATCH_SIZE'
108
+ | 'P2P_BATCH_TX_REQUESTER_BAD_PEER_THRESHOLD'
99
109
  | 'P2P_BLOCK_CHECK_INTERVAL_MS'
110
+ | 'P2P_SLOT_CHECK_INTERVAL_MS'
100
111
  | 'P2P_BLOCK_REQUEST_BATCH_SIZE'
101
112
  | 'P2P_BOOTSTRAP_NODE_ENR_VERSION_CHECK'
102
113
  | 'P2P_BOOTSTRAP_NODES_AS_FULL_PEERS'
@@ -140,6 +151,7 @@ export type EnvVar =
140
151
  | 'P2P_DROP_TX'
141
152
  | 'P2P_DROP_TX_CHANCE'
142
153
  | 'P2P_TX_POOL_DELETE_TXS_AFTER_REORG'
154
+ | 'P2P_MIN_TX_POOL_AGE_MS'
143
155
  | 'DEBUG_P2P_INSTRUMENT_MESSAGES'
144
156
  | 'PEER_ID_PRIVATE_KEY'
145
157
  | 'PEER_ID_PRIVATE_KEY_PATH'
@@ -153,7 +165,10 @@ export type EnvVar =
153
165
  | 'PROVER_BROKER_BATCH_INTERVAL_MS'
154
166
  | 'PROVER_BROKER_BATCH_SIZE'
155
167
  | 'PROVER_BROKER_MAX_EPOCHS_TO_KEEP_RESULTS_FOR'
168
+ | 'PROVER_BROKER_DEBUG_REPLAY_ENABLED'
169
+ | 'PROVER_CANCEL_JOBS_ON_STOP'
156
170
  | 'PROVER_COORDINATION_NODE_URLS'
171
+ | 'PROVER_PROOF_STORE'
157
172
  | 'PROVER_FAILED_PROOF_STORE'
158
173
  | 'PROVER_NODE_FAILED_EPOCH_STORE'
159
174
  | 'PROVER_NODE_DISABLE_PROOF_PUBLISH'
@@ -177,6 +192,7 @@ export type EnvVar =
177
192
  | 'PROVER_TEST_VERIFICATION_DELAY_MS'
178
193
  | 'PXE_L2_BLOCK_BATCH_SIZE'
179
194
  | 'PXE_PROVER_ENABLED'
195
+ | 'PXE_SYNC_CHAIN_TIP'
180
196
  | 'RPC_MAX_BATCH_SIZE'
181
197
  | 'RPC_MAX_BODY_SIZE'
182
198
  | 'RPC_SIMULATE_PUBLIC_MAX_GAS_LIMIT'
@@ -200,9 +216,11 @@ export type EnvVar =
200
216
  | 'SEQ_L1_PUBLISHING_TIME_ALLOWANCE_IN_SLOT'
201
217
  | 'SEQ_ATTESTATION_PROPAGATION_TIME'
202
218
  | 'SEQ_BLOCK_DURATION_MS'
219
+ | 'SEQ_EXPECTED_BLOCK_PROPOSALS_PER_SLOT'
203
220
  | 'SEQ_BUILD_CHECKPOINT_IF_EMPTY'
204
221
  | 'SEQ_SECONDS_BEFORE_INVALIDATING_BLOCK_AS_COMMITTEE_MEMBER'
205
222
  | 'SEQ_SECONDS_BEFORE_INVALIDATING_BLOCK_AS_NON_COMMITTEE_MEMBER'
223
+ | 'SEQ_SKIP_CHECKPOINT_PUBLISH_PERCENT'
206
224
  | 'SLASH_MIN_PENALTY_PERCENTAGE'
207
225
  | 'SLASH_MAX_PENALTY_PERCENTAGE'
208
226
  | 'SLASH_VALIDATORS_ALWAYS'
@@ -213,6 +231,8 @@ export type EnvVar =
213
231
  | 'SLASH_INACTIVITY_TARGET_PERCENTAGE'
214
232
  | 'SLASH_INACTIVITY_CONSECUTIVE_EPOCH_THRESHOLD'
215
233
  | 'SLASH_INVALID_BLOCK_PENALTY'
234
+ | 'SLASH_DUPLICATE_PROPOSAL_PENALTY'
235
+ | 'SLASH_DUPLICATE_ATTESTATION_PENALTY'
216
236
  | 'SLASH_OVERRIDE_PAYLOAD'
217
237
  | 'SLASH_PROPOSE_INVALID_ATTESTATIONS_PENALTY'
218
238
  | 'SLASH_ATTEST_DESCENDANT_OF_INVALID_PENALTY'
@@ -237,6 +257,20 @@ export type EnvVar =
237
257
  | 'TX_COLLECTION_FAST_MAX_PARALLEL_REQUESTS_PER_NODE'
238
258
  | 'TX_COLLECTION_NODE_RPC_MAX_BATCH_SIZE'
239
259
  | 'TX_COLLECTION_NODE_RPC_URLS'
260
+ | 'TX_COLLECTION_MISSING_TXS_COLLECTOR_TYPE'
261
+ | 'TX_COLLECTION_FILE_STORE_URLS'
262
+ | 'TX_COLLECTION_FILE_STORE_SLOW_DELAY_MS'
263
+ | 'TX_COLLECTION_FILE_STORE_FAST_DELAY_MS'
264
+ | 'TX_COLLECTION_FILE_STORE_FAST_WORKER_COUNT'
265
+ | 'TX_COLLECTION_FILE_STORE_SLOW_WORKER_COUNT'
266
+ | 'TX_COLLECTION_FILE_STORE_FAST_BACKOFF_BASE_MS'
267
+ | 'TX_COLLECTION_FILE_STORE_SLOW_BACKOFF_BASE_MS'
268
+ | 'TX_COLLECTION_FILE_STORE_FAST_BACKOFF_MAX_MS'
269
+ | 'TX_COLLECTION_FILE_STORE_SLOW_BACKOFF_MAX_MS'
270
+ | 'TX_FILE_STORE_URL'
271
+ | 'TX_FILE_STORE_UPLOAD_CONCURRENCY'
272
+ | 'TX_FILE_STORE_MAX_QUEUE_SIZE'
273
+ | 'TX_FILE_STORE_ENABLED'
240
274
  | 'TX_PUBLIC_SETUP_ALLOWLIST'
241
275
  | 'TXE_PORT'
242
276
  | 'TRANSACTIONS_DISABLED'
@@ -248,10 +282,10 @@ export type EnvVar =
248
282
  | 'VALIDATOR_ADDRESSES'
249
283
  | 'ROLLUP_VERSION'
250
284
  | 'WS_BLOCK_CHECK_INTERVAL_MS'
251
- | 'WS_PROVEN_BLOCKS_ONLY'
252
285
  | 'WS_BLOCK_REQUEST_BATCH_SIZE'
253
286
  | 'L1_READER_VIEM_POLLING_INTERVAL_MS'
254
287
  | 'WS_DATA_DIRECTORY'
288
+ | 'WS_NUM_HISTORIC_CHECKPOINTS'
255
289
  | 'WS_NUM_HISTORIC_BLOCKS'
256
290
  | 'ETHEREUM_SLOT_DURATION'
257
291
  | 'AZTEC_SLOT_DURATION'
@@ -266,6 +300,7 @@ export type EnvVar =
266
300
  | 'AZTEC_LOCAL_EJECTION_THRESHOLD'
267
301
  | 'AZTEC_MANA_TARGET'
268
302
  | 'AZTEC_PROVING_COST_PER_MANA'
303
+ | 'AZTEC_INITIAL_ETH_PER_FEE_ASSET'
269
304
  | 'AZTEC_SLASHING_QUORUM'
270
305
  | 'AZTEC_SLASHING_ROUND_SIZE_IN_EPOCHS'
271
306
  | 'AZTEC_SLASHING_LIFETIME_IN_ROUNDS'
@@ -279,6 +314,7 @@ export type EnvVar =
279
314
  | 'AZTEC_SLASHER_FLAVOR'
280
315
  | 'AZTEC_GOVERNANCE_PROPOSER_QUORUM'
281
316
  | 'AZTEC_GOVERNANCE_PROPOSER_ROUND_SIZE'
317
+ | 'AZTEC_GOVERNANCE_VOTING_DURATION'
282
318
  | 'AZTEC_EXIT_DELAY_SECONDS'
283
319
  | 'L1_GAS_LIMIT_BUFFER_PERCENTAGE'
284
320
  | 'L1_GAS_PRICE_MAX'
@@ -305,7 +341,6 @@ export type EnvVar =
305
341
  | 'K8S_POD_UID'
306
342
  | 'K8S_NAMESPACE_NAME'
307
343
  | 'VALIDATOR_REEXECUTE_DEADLINE_MS'
308
- | 'ALWAYS_REEXECUTE_BLOCK_PROPOSALS'
309
344
  | 'AUTO_UPDATE'
310
345
  | 'AUTO_UPDATE_URL'
311
346
  | 'WEB3_SIGNER_URL'
@@ -314,4 +349,16 @@ export type EnvVar =
314
349
  | 'FISHERMAN_MODE'
315
350
  | 'MAX_ALLOWED_ETH_CLIENT_DRIFT_SECONDS'
316
351
  | 'LEGACY_BLS_CLI'
317
- | 'DEBUG_FORCE_TX_PROOF_VERIFICATION';
352
+ | 'DEBUG_FORCE_TX_PROOF_VERIFICATION'
353
+ | 'VALIDATOR_HA_SIGNING_ENABLED'
354
+ | 'VALIDATOR_HA_NODE_ID'
355
+ | 'VALIDATOR_HA_POLLING_INTERVAL_MS'
356
+ | 'VALIDATOR_HA_SIGNING_TIMEOUT_MS'
357
+ | 'VALIDATOR_HA_MAX_STUCK_DUTIES_AGE_MS'
358
+ | 'VALIDATOR_HA_OLD_DUTIES_MAX_AGE_H'
359
+ | 'VALIDATOR_HA_DATABASE_URL'
360
+ | 'VALIDATOR_HA_RUN_MIGRATIONS'
361
+ | 'VALIDATOR_HA_POOL_MAX'
362
+ | 'VALIDATOR_HA_POOL_MIN'
363
+ | 'VALIDATOR_HA_POOL_IDLE_TIMEOUT_MS'
364
+ | 'VALIDATOR_HA_POOL_CONNECTION_TIMEOUT_MS';
@@ -2,6 +2,7 @@ import { Fq, Fr } from '../curves/bn254/field.js';
2
2
  import { createConsoleLogger } from '../log/console.js';
3
3
  import type { EnvVar } from './env_var.js';
4
4
  import { type NetworkNames, getActiveNetworkName } from './network_name.js';
5
+ import { parseBooleanEnv } from './parse-env.js';
5
6
  import { SecretValue } from './secret_value.js';
6
7
 
7
8
  export { SecretValue, getActiveNetworkName };
@@ -148,6 +149,23 @@ export function floatConfigHelper(
148
149
  };
149
150
  }
150
151
 
152
+ /**
153
+ * Parses an environment variable to a 0-1 percentage value
154
+ */
155
+ export function percentageConfigHelper(defaultVal: number): Pick<ConfigMapping, 'parseEnv' | 'defaultValue'> {
156
+ return {
157
+ parseEnv: (val: string): number => {
158
+ const parsed = safeParseFloat(val, defaultVal);
159
+ if (parsed < 0 || parsed > 1) {
160
+ throw new TypeError(`Invalid percentage value: ${parsed} should be between 0 and 1`);
161
+ }
162
+
163
+ return parsed;
164
+ },
165
+ defaultValue: defaultVal,
166
+ };
167
+ }
168
+
151
169
  /**
152
170
  * Generates parseEnv and default values for a numerical config value.
153
171
  * @param defaultVal - The default numerical value to use if the environment variable is not set or is invalid
@@ -231,10 +249,7 @@ export function secretValueConfigHelper<T>(parse: (val: string | undefined) => T
231
249
  };
232
250
  }
233
251
 
234
- /** Parses an env var as boolean. Returns true only if value is 1, true, or TRUE. */
235
- export function parseBooleanEnv(val: string | undefined): boolean {
236
- return val !== undefined && ['1', 'true', 'TRUE'].includes(val);
237
- }
252
+ export { parseBooleanEnv } from './parse-env.js';
238
253
 
239
254
  export function secretStringConfigHelper(): Required<
240
255
  Pick<ConfigMapping, 'parseEnv' | 'defaultValue' | 'isBoolean'> & {
@@ -5,7 +5,8 @@ export type NetworkNames =
5
5
  | 'testnet'
6
6
  | 'mainnet'
7
7
  | 'next-net'
8
- | 'devnet';
8
+ | 'devnet'
9
+ | `v${number}-devnet-${number}`;
9
10
 
10
11
  export function getActiveNetworkName(name?: string): NetworkNames {
11
12
  const network = name || process.env.NETWORK;
@@ -23,6 +24,8 @@ export function getActiveNetworkName(name?: string): NetworkNames {
23
24
  return 'next-net';
24
25
  } else if (network === 'devnet') {
25
26
  return 'devnet';
27
+ } else if (/^v\d+-devnet-\d+$/.test(network)) {
28
+ return network as `v${number}-devnet-${number}`;
26
29
  }
27
30
  throw new Error(`Unknown network: ${network}`);
28
31
  }
@@ -0,0 +1,4 @@
1
+ /** Parses an env var as boolean. Returns true only if value is 1, true, or TRUE. */
2
+ export function parseBooleanEnv(val: string | undefined): boolean {
3
+ return val !== undefined && ['1', 'true', 'TRUE'].includes(val);
4
+ }
@@ -35,16 +35,6 @@ export async function poseidon2HashWithSeparator(input: Fieldable[], separator:
35
35
  return Fr.fromBuffer(Buffer.from(response.hash));
36
36
  }
37
37
 
38
- export async function poseidon2HashAccumulate(input: Fieldable[]): Promise<Fr> {
39
- const inputFields = serializeToFields(input);
40
- await BarretenbergSync.initSingleton();
41
- const api = BarretenbergSync.getSingleton();
42
- const response = api.poseidon2HashAccumulate({
43
- inputs: inputFields.map(i => i.toBuffer()),
44
- });
45
- return Fr.fromBuffer(Buffer.from(response.hash));
46
- }
47
-
48
38
  /**
49
39
  * Runs a Poseidon2 permutation.
50
40
  * @param input the input state. Expected to be of size 4.
@@ -1,4 +1,4 @@
1
- import { createLogger } from '../../log/pino-logger.js';
1
+ import { type Logger, type LoggerBindings, createLogger } from '../../log/pino-logger.js';
2
2
 
3
3
  /**
4
4
  * A number generator which is used as a source of randomness in the system. If the SEED env variable is set, the
@@ -12,11 +12,13 @@ export class RandomnessSingleton {
12
12
  private static instance: RandomnessSingleton;
13
13
 
14
14
  private counter = 0;
15
+ private log: Logger;
15
16
 
16
17
  private constructor(
17
18
  private readonly seed?: number,
18
- private readonly log = createLogger('foundation:randomness_singleton'),
19
+ bindings?: LoggerBindings,
19
20
  ) {
21
+ this.log = createLogger('foundation:randomness_singleton', bindings);
20
22
  if (seed !== undefined) {
21
23
  this.log.debug(`Using pseudo-randomness with seed: ${seed}`);
22
24
  this.counter = seed;
@@ -25,10 +27,10 @@ export class RandomnessSingleton {
25
27
  }
26
28
  }
27
29
 
28
- public static getInstance(): RandomnessSingleton {
30
+ public static getInstance(bindings?: LoggerBindings): RandomnessSingleton {
29
31
  if (!RandomnessSingleton.instance) {
30
32
  const seed = process.env.SEED ? Number(process.env.SEED) : undefined;
31
- RandomnessSingleton.instance = new RandomnessSingleton(seed);
33
+ RandomnessSingleton.instance = new RandomnessSingleton(seed, bindings);
32
34
  }
33
35
 
34
36
  return RandomnessSingleton.instance;
@@ -34,15 +34,6 @@ export function poseidon2HashWithSeparator(input: Fieldable[], separator: number
34
34
  return Fr.fromBuffer(Buffer.from(response.hash));
35
35
  }
36
36
 
37
- export function poseidon2HashAccumulate(input: Fieldable[]): Fr {
38
- const inputFields = serializeToFields(input);
39
- const api = BarretenbergSync.getSingleton();
40
- const response = api.poseidon2HashAccumulate({
41
- inputs: inputFields.map(i => i.toBuffer()),
42
- });
43
- return Fr.fromBuffer(Buffer.from(response.hash));
44
- }
45
-
46
37
  /**
47
38
  * Runs a Poseidon2 permutation.
48
39
  * @param input the input state. Expected to be of size 4.
@@ -249,7 +249,7 @@ export class EthAddress {
249
249
  /** Converts a number into an address. Useful for testing. */
250
250
  static fromNumber(num: bigint | number): EthAddress {
251
251
  const buffer = Buffer.alloc(EthAddress.SIZE_IN_BYTES);
252
- buffer.writeBigUInt64BE(BigInt(num), 0);
252
+ buffer.writeBigUInt64BE(BigInt(num), EthAddress.SIZE_IN_BYTES - 8);
253
253
  return new EthAddress(buffer);
254
254
  }
255
255
 
@@ -1,3 +1,4 @@
1
+ import { parseBooleanEnv } from '@aztec/foundation/config';
1
2
  import { overwriteLoggingStream, pinoPrettyOpts } from '@aztec/foundation/log';
2
3
 
3
4
  import pretty from 'pino-pretty';
@@ -6,4 +7,6 @@ import pretty from 'pino-pretty';
6
7
  // file so we don't mess up with dependencies in non-testing environments,
7
8
  // since pino-pretty messes up with browser bundles.
8
9
  // See also https://www.npmjs.com/package/pino-pretty?activeTab=readme#user-content-usage-with-jest
9
- overwriteLoggingStream(pretty(pinoPrettyOpts));
10
+ if (!parseBooleanEnv(process.env.LOG_JSON)) {
11
+ overwriteLoggingStream(pretty(pinoPrettyOpts));
12
+ }
@@ -24,6 +24,7 @@ export type SafeJsonRpcClientOptions = {
24
24
  batchWindowMS?: number;
25
25
  maxBatchSize?: number;
26
26
  maxRequestBodySize?: number;
27
+ extraHeaders?: Record<string, string>;
27
28
  onResponse?: (res: {
28
29
  response: any;
29
30
  headers: { get: (header: string) => string | null | undefined };
@@ -129,6 +130,7 @@ export function createSafeJsonRpcClient<T extends object>(
129
130
  const { response, headers } = await fetch(
130
131
  host,
131
132
  rpcCalls.map(({ request }) => request),
133
+ config.extraHeaders,
132
134
  );
133
135
 
134
136
  if (config.onResponse) {
@@ -1,3 +1,5 @@
1
+ import { promisify } from 'node:util';
2
+ import { gunzip as gunzipCb, gzip as gzipCb } from 'node:zlib';
1
3
  import { Agent, type Dispatcher } from 'undici';
2
4
 
3
5
  import { createLogger } from '../../log/pino-logger.js';
@@ -5,8 +7,14 @@ import { NoRetryError } from '../../retry/index.js';
5
7
  import { jsonStringify } from '../convert.js';
6
8
  import type { JsonRpcFetch } from './fetch.js';
7
9
 
10
+ const gzip = promisify(gzipCb);
11
+ const gunzip = promisify(gunzipCb);
12
+
8
13
  const log = createLogger('json-rpc:json_rpc_client:undici');
9
14
 
15
+ /** Minimum request size in bytes to trigger compression. */
16
+ const COMPRESSION_THRESHOLD = 1024;
17
+
10
18
  export { Agent };
11
19
 
12
20
  export function makeUndiciFetch(client = new Agent()): JsonRpcFetch {
@@ -14,14 +22,18 @@ export function makeUndiciFetch(client = new Agent()): JsonRpcFetch {
14
22
  log.trace(`JsonRpcClient.fetch: ${host}`, { host, body });
15
23
  let resp: Dispatcher.ResponseData;
16
24
  try {
25
+ const jsonBody = Buffer.from(jsonStringify(body));
26
+ const shouldCompress = jsonBody.length >= COMPRESSION_THRESHOLD;
17
27
  resp = await client.request({
18
28
  method: 'POST',
19
29
  origin: new URL(host),
20
30
  path: '/',
21
- body: jsonStringify(body),
31
+ body: shouldCompress ? await gzip(jsonBody) : jsonBody,
22
32
  headers: {
23
33
  ...extraHeaders,
24
34
  'content-type': 'application/json',
35
+ ...(shouldCompress && { 'content-encoding': 'gzip' }),
36
+ 'accept-encoding': 'gzip',
25
37
  },
26
38
  });
27
39
  } catch (err) {
@@ -31,13 +43,19 @@ export function makeUndiciFetch(client = new Agent()): JsonRpcFetch {
31
43
 
32
44
  let responseJson: any;
33
45
  const responseOk = resp.statusCode >= 200 && resp.statusCode <= 299;
46
+ const contentEncoding = resp.headers['content-encoding'];
34
47
  try {
35
- responseJson = await resp.body.json();
48
+ if (contentEncoding === 'gzip') {
49
+ const jsonBuffer = await gunzip(await resp.body.arrayBuffer());
50
+ responseJson = JSON.parse(jsonBuffer.toString('utf-8'));
51
+ } else {
52
+ responseJson = await resp.body.json();
53
+ }
36
54
  } catch {
37
55
  if (!responseOk) {
38
56
  throw new Error('HTTP ' + resp.statusCode);
39
57
  }
40
- throw new Error(`Failed to parse body as JSON: ${await resp.body.text()}`);
58
+ throw new Error(`Failed to parse body as JSON. encoding: ${contentEncoding}, body: ${await resp.body.text()}`);
41
59
  }
42
60
 
43
61
  if (!responseOk) {
@@ -0,0 +1,63 @@
1
+ import { timingSafeEqual } from 'crypto';
2
+ import type Koa from 'koa';
3
+
4
+ import { sha256 } from '../../crypto/sha256/index.js';
5
+ import { createLogger } from '../../log/index.js';
6
+
7
+ const log = createLogger('json-rpc:api-key-auth');
8
+
9
+ /**
10
+ * Computes the SHA-256 hash of a string and returns it as a Buffer.
11
+ * @param input - The input string to hash.
12
+ * @returns The SHA-256 hash as a Buffer.
13
+ */
14
+ export function sha256Hash(input: string): Buffer {
15
+ return sha256(Buffer.from(input));
16
+ }
17
+
18
+ /**
19
+ * Creates a Koa middleware that enforces API key authentication on all requests
20
+ * except the health check endpoint (GET /status).
21
+ *
22
+ * The API key can be provided via the `x-api-key` header or the `Authorization: Bearer <key>` header.
23
+ * Comparison is done by hashing the provided key with SHA-256 and comparing against the stored hash.
24
+ *
25
+ * @param apiKeyHash - The SHA-256 hash of the expected API key as a Buffer.
26
+ * @returns A Koa middleware that rejects requests without a valid API key.
27
+ */
28
+ export function getApiKeyAuthMiddleware(
29
+ apiKeyHash: Buffer,
30
+ ): (ctx: Koa.Context, next: () => Promise<void>) => Promise<void> {
31
+ return async (ctx: Koa.Context, next: () => Promise<void>) => {
32
+ // Allow health check through without auth
33
+ if (ctx.path === '/status' && ctx.method === 'GET') {
34
+ return next();
35
+ }
36
+
37
+ const providedKey = ctx.get('x-api-key') || ctx.get('authorization')?.replace(/^Bearer\s+/i, '');
38
+ if (!providedKey) {
39
+ log.warn(`Rejected admin RPC request from ${ctx.ip}: missing API key`);
40
+ ctx.status = 401;
41
+ ctx.body = {
42
+ jsonrpc: '2.0',
43
+ id: null,
44
+ error: { code: -32000, message: 'Unauthorized: invalid or missing API key' },
45
+ };
46
+ return;
47
+ }
48
+
49
+ const providedHashBuf = sha256Hash(providedKey);
50
+ if (!timingSafeEqual(apiKeyHash, providedHashBuf)) {
51
+ log.warn(`Rejected admin RPC request from ${ctx.ip}: invalid API key`);
52
+ ctx.status = 401;
53
+ ctx.body = {
54
+ jsonrpc: '2.0',
55
+ id: null,
56
+ error: { code: -32000, message: 'Unauthorized: invalid or missing API key' },
57
+ };
58
+ return;
59
+ }
60
+
61
+ await next();
62
+ };
63
+ }
@@ -1 +1,2 @@
1
+ export * from './api_key_auth.js';
1
2
  export * from './safe_json_rpc_server.js';
@@ -35,7 +35,7 @@ export type SafeJsonRpcServerConfig = {
35
35
  const defaultServerConfig: SafeJsonRpcServerConfig = {
36
36
  http200OnError: false,
37
37
  maxBatchSize: 100,
38
- maxBodySizeBytes: '50mb',
38
+ maxBodySizeBytes: '1mb',
39
39
  };
40
40
 
41
41
  export class SafeJsonRpcServer {
@@ -0,0 +1,22 @@
1
+ /**
2
+ * Converts bigint values to strings recursively in a log object to avoid serialization issues.
3
+ */
4
+ export function convertBigintsToStrings(obj: unknown): unknown {
5
+ if (typeof obj === 'bigint') {
6
+ return String(obj);
7
+ }
8
+
9
+ if (Array.isArray(obj)) {
10
+ return obj.map(item => convertBigintsToStrings(item));
11
+ }
12
+
13
+ if (obj !== null && typeof obj === 'object') {
14
+ const result: Record<string, unknown> = {};
15
+ for (const key in obj) {
16
+ result[key] = convertBigintsToStrings((obj as Record<string, unknown>)[key]);
17
+ }
18
+ return result;
19
+ }
20
+
21
+ return obj;
22
+ }
@@ -1,5 +1,7 @@
1
1
  import type { pino } from 'pino';
2
2
 
3
+ import { convertBigintsToStrings } from './bigint-utils.js';
4
+
3
5
  /* eslint-disable camelcase */
4
6
 
5
7
  const GOOGLE_CLOUD_TRACE_ID = 'logging.googleapis.com/trace';
@@ -15,6 +17,9 @@ export const GoogleCloudLoggerConfig = {
15
17
  messageKey: 'message',
16
18
  formatters: {
17
19
  log(object: Record<string, unknown>): Record<string, unknown> {
20
+ // Convert bigints to strings recursively to avoid serialization issues
21
+ object = convertBigintsToStrings(object) as Record<string, unknown>;
22
+
18
23
  // Add trace context attributes following Cloud Logging structured log format described
19
24
  // in https://cloud.google.com/logging/docs/structured-logging#special-payload-fields
20
25
  const { trace_id, span_id, trace_flags, ...rest } = object;
@@ -2,15 +2,17 @@ import type { ComponentLogger, Logger } from '@libp2p/interface';
2
2
 
3
3
  import { getLogLevelFromFilters } from './log-filters.js';
4
4
  import type { LogLevel } from './log-levels.js';
5
- import { logFilters, logger } from './pino-logger.js';
5
+ import { type LoggerBindings, logFilters, logger } from './pino-logger.js';
6
6
 
7
7
  /**
8
8
  * Creates a libp2p compatible logger that wraps our pino logger.
9
9
  * This adapter implements the ComponentLogger interface required by libp2p.
10
+ * @param namespace - Base namespace for the logger
11
+ * @param bindings - Optional bindings to pass to the logger (actor, instanceId)
10
12
  */
11
- export function createLibp2pComponentLogger(namespace: string): ComponentLogger {
13
+ export function createLibp2pComponentLogger(namespace: string, bindings?: LoggerBindings): ComponentLogger {
12
14
  return {
13
- forComponent: (component: string) => createLibp2pLogger(`${namespace}:${component}`),
15
+ forComponent: (component: string) => createLibp2pLogger(`${namespace}:${component}`, bindings),
14
16
  };
15
17
  }
16
18
 
@@ -24,9 +26,14 @@ function replaceFormatting(message: string) {
24
26
  return message.replace(/(%p|%a)/g, '%s');
25
27
  }
26
28
 
27
- function createLibp2pLogger(component: string): Logger {
29
+ function createLibp2pLogger(component: string, bindings?: LoggerBindings): Logger {
28
30
  // Create a direct pino logger instance for libp2p that supports string interpolation
29
- const log = logger.child({ module: component }, { level: getLogLevelFromFilters(logFilters, component) });
31
+ const actor = bindings?.actor;
32
+ const instanceId = bindings?.instanceId;
33
+ const log = logger.child(
34
+ { module: component, ...(actor && { actor }), ...(instanceId && { instanceId }) },
35
+ { level: getLogLevelFromFilters(logFilters, component) },
36
+ );
30
37
 
31
38
  const logIfEnabled = (level: LogLevel, message: string, ...args: unknown[]) => {
32
39
  if (!log.isLevelEnabled(level)) {
@@ -19,22 +19,40 @@ export function getLogLevelFromFilters(filters: LogFilters, module: string): Log
19
19
  return undefined;
20
20
  }
21
21
 
22
- export function assertLogLevel(level: string): asserts level is LogLevel {
23
- if (!LogLevels.includes(level as LogLevel)) {
24
- throw new Error(`Invalid log level: ${level}`);
22
+ /**
23
+ * Parses the LOG_LEVEL env string into a default level and per-module filter overrides.
24
+ *
25
+ * Format: `<default_level>;<level>:<module1>,<module2>;<level>:<module3>;...`
26
+ * - First segment (before the first `;`) is the default log level for all modules.
27
+ * - Remaining segments are `level:module` pairs: apply the given level to the listed modules (comma-separated).
28
+ * - Later filters override earlier ones for overlapping module matches.
29
+ * - The `aztec:` prefix is stripped from module names; spaces are trimmed.
30
+ *
31
+ * @example
32
+ * ```ts
33
+ * parseLogLevel('debug;warn:module1,module2;error:module3', 'info')
34
+ * // => ['debug', [['module3', 'error'], ['module2', 'warn'], ['module1', 'warn']]]
35
+ * ```
36
+ */
37
+ export function parseLogLevelEnvVar(
38
+ logLevelEnvVar: string | undefined,
39
+ defaultLevel: LogLevel,
40
+ ): [LogLevel, LogFilters] {
41
+ if (!logLevelEnvVar) {
42
+ return [defaultLevel, []];
25
43
  }
44
+ const [level] = logLevelEnvVar.split(';', 1);
45
+ assertValidLogLevel(level);
46
+ return [level, parseFilters(logLevelEnvVar.slice(level.length + 1))];
26
47
  }
27
48
 
28
- export function parseEnv(env: string | undefined, defaultLevel: LogLevel): [LogLevel, LogFilters] {
29
- if (!env) {
30
- return [defaultLevel, []];
49
+ function assertValidLogLevel(level: string): asserts level is LogLevel {
50
+ if (!LogLevels.includes(level as LogLevel)) {
51
+ throw new Error(`Invalid log level: ${level}`);
31
52
  }
32
- const [level] = env.split(';', 1);
33
- assertLogLevel(level);
34
- return [level, parseFilters(env.slice(level.length + 1))];
35
53
  }
36
54
 
37
- export function parseFilters(definition: string | undefined): LogFilters {
55
+ function parseFilters(definition: string | undefined): LogFilters {
38
56
  if (!definition) {
39
57
  return [];
40
58
  }
@@ -48,7 +66,7 @@ export function parseFilters(definition: string | undefined): LogFilters {
48
66
  throw new Error(`Invalid log filter statement: ${statement}`);
49
67
  }
50
68
  const sanitizedLevel = level.trim().toLowerCase();
51
- assertLogLevel(sanitizedLevel);
69
+ assertValidLogLevel(sanitizedLevel);
52
70
  for (const module of modules.split(',')) {
53
71
  filters.push([
54
72
  module
@@ -0,0 +1,25 @@
1
+ import { AsyncLocalStorage } from 'node:async_hooks';
2
+
3
+ import { type LoggerBindings, addLogBindingsHandler, removeLogBindingsHandler } from './pino-logger.js';
4
+
5
+ /** AsyncLocalStorage for logger bindings context propagation (Node.js only). */
6
+ const bindingsStorage = new AsyncLocalStorage<LoggerBindings>();
7
+
8
+ /** Returns the current bindings from AsyncLocalStorage, if any. */
9
+ export function getBindings(): LoggerBindings | undefined {
10
+ return bindingsStorage.getStore();
11
+ }
12
+
13
+ /**
14
+ * Runs a callback within a bindings context. All loggers created within the callback
15
+ * will automatically inherit the bindings (actor, instanceId) via the log bindings handler.
16
+ */
17
+ export async function withLoggerBindings<T>(bindings: LoggerBindings, callback: () => Promise<T>): Promise<T> {
18
+ const handler = () => bindingsStorage.getStore();
19
+ addLogBindingsHandler(handler);
20
+ try {
21
+ return await bindingsStorage.run(bindings, callback);
22
+ } finally {
23
+ removeLogBindingsHandler(handler);
24
+ }
25
+ }