@aztec/node-lib 3.0.0-canary.a9708bd → 3.0.0-devnet.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,264 @@
1
+ import { jsonStringify } from '@aztec/foundation/json-rpc';
2
+ import { createLogger } from '@aztec/foundation/log';
3
+ /**
4
+ * Store for persisting L1 transaction states across all L1TxUtils instances.
5
+ * Each state is stored individually with a unique ID, and blobs are stored separately.
6
+ * @remarks This class lives in this package instead of `ethereum` because it depends on `kv-store`.
7
+ */ export class L1TxStore {
8
+ store;
9
+ log;
10
+ static SCHEMA_VERSION = 2;
11
+ states;
12
+ blobs;
13
+ stateIdCounter;
14
+ constructor(store, log = createLogger('l1-tx-utils:store')){
15
+ this.store = store;
16
+ this.log = log;
17
+ this.states = store.openMap('l1_tx_states');
18
+ this.blobs = store.openMap('l1_tx_blobs');
19
+ this.stateIdCounter = store.openMap('l1_tx_state_id_counter');
20
+ }
21
+ /**
22
+ * Gets the next available state ID for an account.
23
+ */ consumeNextStateId(account) {
24
+ return this.store.transactionAsync(async ()=>{
25
+ const currentId = await this.stateIdCounter.getAsync(account) ?? 0;
26
+ const nextId = currentId + 1;
27
+ await this.stateIdCounter.set(account, nextId);
28
+ return nextId;
29
+ });
30
+ }
31
+ /**
32
+ * Creates a storage key for state/blob data.
33
+ */ makeKey(account, stateId) {
34
+ return `${account}-${stateId.toString().padStart(10, '0')}`;
35
+ }
36
+ /**
37
+ * Saves a single transaction state for a specific account.
38
+ * Blobs are not stored here, use saveBlobs instead.
39
+ * @param account - The sender account address
40
+ * @param state - Transaction state to save
41
+ */ async saveState(account, state) {
42
+ const key = this.makeKey(account, state.id);
43
+ const serializable = this.serializeState(state);
44
+ await this.states.set(key, jsonStringify(serializable));
45
+ this.log.debug(`Saved tx state ${state.id} for account ${account} with nonce ${state.nonce}`);
46
+ return state;
47
+ }
48
+ /**
49
+ * Saves blobs for a given state.
50
+ * @param account - The sender account address
51
+ * @param stateId - The state ID
52
+ * @param blobInputs - Blob inputs to save
53
+ */ async saveBlobs(account, stateId, blobInputs) {
54
+ if (!blobInputs) {
55
+ return;
56
+ }
57
+ const key = this.makeKey(account, stateId);
58
+ const blobData = this.serializeBlobInputs(blobInputs);
59
+ await this.blobs.set(key, jsonStringify(blobData));
60
+ this.log.debug(`Saved blobs for state ${stateId} of account ${account}`);
61
+ }
62
+ /**
63
+ * Loads all transaction states for a specific account.
64
+ * @param account - The sender account address
65
+ * @returns Array of transaction states with their IDs
66
+ */ async loadStates(account) {
67
+ const states = [];
68
+ const prefix = `${account}-`;
69
+ for await (const [key, stateJson] of this.states.entriesAsync({
70
+ start: prefix,
71
+ end: `${prefix}Z`
72
+ })){
73
+ const [keyAccount, stateIdStr] = key.split('-');
74
+ if (keyAccount !== account) {
75
+ throw new Error(`Mismatched account in key: expected ${account} but got ${keyAccount}`);
76
+ }
77
+ const stateId = parseInt(stateIdStr, 10);
78
+ try {
79
+ const serialized = JSON.parse(stateJson);
80
+ // Load blobs if they exist
81
+ let blobInputs;
82
+ if (serialized.hasBlobInputs) {
83
+ const blobJson = await this.blobs.getAsync(key);
84
+ if (blobJson) {
85
+ blobInputs = this.deserializeBlobInputs(JSON.parse(blobJson), serialized.blobMetadata);
86
+ }
87
+ }
88
+ const state = this.deserializeState(serialized, blobInputs);
89
+ states.push({
90
+ ...state,
91
+ id: stateId
92
+ });
93
+ } catch (err) {
94
+ this.log.error(`Failed to deserialize state ${key}`, err);
95
+ }
96
+ }
97
+ // Sort by ID
98
+ states.sort((a, b)=>a.id - b.id);
99
+ this.log.debug(`Loaded ${states.length} tx states for account ${account}`);
100
+ return states;
101
+ }
102
+ /**
103
+ * Loads a single state by ID.
104
+ * @param account - The sender account address
105
+ * @param stateId - The state ID
106
+ * @returns The transaction state or undefined if not found
107
+ */ async loadState(account, stateId) {
108
+ const key = this.makeKey(account, stateId);
109
+ const stateJson = await this.states.getAsync(key);
110
+ if (!stateJson) {
111
+ return undefined;
112
+ }
113
+ try {
114
+ const serialized = JSON.parse(stateJson);
115
+ // Load blobs if they exist
116
+ let blobInputs;
117
+ if (serialized.hasBlobInputs) {
118
+ const blobJson = await this.blobs.getAsync(key);
119
+ if (blobJson) {
120
+ blobInputs = this.deserializeBlobInputs(JSON.parse(blobJson), serialized.blobMetadata);
121
+ }
122
+ }
123
+ const state = this.deserializeState(serialized, blobInputs);
124
+ return {
125
+ ...state,
126
+ id: stateId
127
+ };
128
+ } catch (err) {
129
+ this.log.error(`Failed to deserialize state ${key}`, err);
130
+ return undefined;
131
+ }
132
+ }
133
+ /**
134
+ * Deletes a specific state and its associated blobs.
135
+ * @param account - The sender account address
136
+ * @param stateId - The state ID to delete
137
+ */ async deleteState(account, stateId) {
138
+ const key = this.makeKey(account, stateId);
139
+ await this.states.delete(key);
140
+ await this.blobs.delete(key);
141
+ this.log.debug(`Deleted state ${stateId} for account ${account}`);
142
+ }
143
+ /**
144
+ * Clears all transaction states for a specific account.
145
+ * @param account - The sender account address
146
+ */ async clearStates(account) {
147
+ const states = await this.loadStates(account);
148
+ for (const state of states){
149
+ await this.deleteState(account, state.id);
150
+ }
151
+ await this.stateIdCounter.delete(account);
152
+ this.log.info(`Cleared all tx states for account ${account}`);
153
+ }
154
+ /**
155
+ * Gets all accounts that have stored states.
156
+ * @returns Array of account addresses
157
+ */ async getAllAccounts() {
158
+ const accounts = new Set();
159
+ for await (const [key] of this.states.entriesAsync()){
160
+ const account = key.substring(0, key.lastIndexOf('-'));
161
+ accounts.add(account);
162
+ }
163
+ return Array.from(accounts);
164
+ }
165
+ /**
166
+ * Closes the store.
167
+ */ async close() {
168
+ await this.store.close();
169
+ this.log.info('Closed L1 tx state store');
170
+ }
171
+ /**
172
+ * Serializes an L1TxState for storage.
173
+ */ serializeState(state) {
174
+ const txConfigOverrides = {
175
+ ...state.txConfigOverrides,
176
+ gasLimit: state.txConfigOverrides.gasLimit?.toString(),
177
+ txTimeoutAt: state.txConfigOverrides.txTimeoutAt?.getTime()
178
+ };
179
+ return {
180
+ id: state.id,
181
+ txHashes: state.txHashes,
182
+ cancelTxHashes: state.cancelTxHashes,
183
+ gasLimit: state.gasLimit.toString(),
184
+ gasPrice: {
185
+ maxFeePerGas: state.gasPrice.maxFeePerGas.toString(),
186
+ maxPriorityFeePerGas: state.gasPrice.maxPriorityFeePerGas.toString(),
187
+ maxFeePerBlobGas: state.gasPrice.maxFeePerBlobGas?.toString()
188
+ },
189
+ txConfigOverrides,
190
+ request: {
191
+ ...state.request,
192
+ value: state.request.value?.toString()
193
+ },
194
+ status: state.status,
195
+ nonce: state.nonce,
196
+ sentAt: state.sentAtL1Ts.getTime(),
197
+ lastSentAt: state.lastSentAtL1Ts.getTime(),
198
+ receipt: state.receipt,
199
+ hasBlobInputs: state.blobInputs !== undefined,
200
+ blobMetadata: state.blobInputs?.maxFeePerBlobGas ? {
201
+ maxFeePerBlobGas: state.blobInputs.maxFeePerBlobGas.toString()
202
+ } : undefined
203
+ };
204
+ }
205
+ /**
206
+ * Deserializes a stored state back to L1TxState.
207
+ */ deserializeState(stored, blobInputs) {
208
+ const txConfigOverrides = {
209
+ ...stored.txConfigOverrides,
210
+ gasLimit: stored.txConfigOverrides.gasLimit !== undefined ? BigInt(stored.txConfigOverrides.gasLimit) : undefined,
211
+ txTimeoutAt: stored.txConfigOverrides.txTimeoutAt !== undefined ? new Date(stored.txConfigOverrides.txTimeoutAt) : undefined
212
+ };
213
+ const receipt = stored.receipt ? {
214
+ ...stored.receipt,
215
+ blockNumber: BigInt(stored.receipt.blockNumber),
216
+ cumulativeGasUsed: BigInt(stored.receipt.cumulativeGasUsed),
217
+ effectiveGasPrice: BigInt(stored.receipt.effectiveGasPrice),
218
+ gasUsed: BigInt(stored.receipt.gasUsed)
219
+ } : undefined;
220
+ return {
221
+ id: stored.id,
222
+ txHashes: stored.txHashes,
223
+ cancelTxHashes: stored.cancelTxHashes,
224
+ gasLimit: BigInt(stored.gasLimit),
225
+ gasPrice: {
226
+ maxFeePerGas: BigInt(stored.gasPrice.maxFeePerGas),
227
+ maxPriorityFeePerGas: BigInt(stored.gasPrice.maxPriorityFeePerGas),
228
+ maxFeePerBlobGas: stored.gasPrice.maxFeePerBlobGas ? BigInt(stored.gasPrice.maxFeePerBlobGas) : undefined
229
+ },
230
+ txConfigOverrides,
231
+ request: {
232
+ to: stored.request.to,
233
+ data: stored.request.data,
234
+ value: stored.request.value ? BigInt(stored.request.value) : undefined
235
+ },
236
+ status: stored.status,
237
+ nonce: stored.nonce,
238
+ sentAtL1Ts: new Date(stored.sentAt),
239
+ lastSentAtL1Ts: new Date(stored.lastSentAt),
240
+ receipt,
241
+ blobInputs
242
+ };
243
+ }
244
+ /**
245
+ * Serializes blob inputs for separate storage.
246
+ */ serializeBlobInputs(blobInputs) {
247
+ return {
248
+ blobs: blobInputs.blobs.map((b)=>Buffer.from(b).toString('base64')),
249
+ kzg: jsonStringify(blobInputs.kzg)
250
+ };
251
+ }
252
+ /**
253
+ * Deserializes blob inputs from storage, combining blob data with metadata.
254
+ */ deserializeBlobInputs(stored, metadata) {
255
+ const blobInputs = {
256
+ blobs: stored.blobs.map((b)=>new Uint8Array(Buffer.from(b, 'base64'))),
257
+ kzg: JSON.parse(stored.kzg)
258
+ };
259
+ if (metadata?.maxFeePerBlobGas) {
260
+ blobInputs.maxFeePerBlobGas = BigInt(metadata.maxFeePerBlobGas);
261
+ }
262
+ return blobInputs;
263
+ }
264
+ }
package/package.json CHANGED
@@ -1,10 +1,13 @@
1
1
  {
2
2
  "name": "@aztec/node-lib",
3
- "version": "3.0.0-canary.a9708bd",
3
+ "version": "3.0.0-devnet.3",
4
4
  "type": "module",
5
5
  "exports": {
6
6
  "./actions": "./dest/actions/index.js",
7
- "./config": "./dest/config/index.js"
7
+ "./config": "./dest/config/index.js",
8
+ "./factories": "./dest/factories/index.js",
9
+ "./metrics": "./dest/metrics/index.js",
10
+ "./stores": "./dest/stores/index.js"
8
11
  },
9
12
  "inherits": [
10
13
  "../package.common.json"
@@ -54,27 +57,28 @@
54
57
  ]
55
58
  },
56
59
  "dependencies": {
57
- "@aztec/archiver": "3.0.0-canary.a9708bd",
58
- "@aztec/bb-prover": "3.0.0-canary.a9708bd",
59
- "@aztec/blob-sink": "3.0.0-canary.a9708bd",
60
- "@aztec/constants": "3.0.0-canary.a9708bd",
61
- "@aztec/epoch-cache": "3.0.0-canary.a9708bd",
62
- "@aztec/ethereum": "3.0.0-canary.a9708bd",
63
- "@aztec/foundation": "3.0.0-canary.a9708bd",
64
- "@aztec/kv-store": "3.0.0-canary.a9708bd",
65
- "@aztec/merkle-tree": "3.0.0-canary.a9708bd",
66
- "@aztec/p2p": "3.0.0-canary.a9708bd",
67
- "@aztec/protocol-contracts": "3.0.0-canary.a9708bd",
68
- "@aztec/prover-client": "3.0.0-canary.a9708bd",
69
- "@aztec/sequencer-client": "3.0.0-canary.a9708bd",
70
- "@aztec/simulator": "3.0.0-canary.a9708bd",
71
- "@aztec/stdlib": "3.0.0-canary.a9708bd",
72
- "@aztec/telemetry-client": "3.0.0-canary.a9708bd",
73
- "@aztec/validator-client": "3.0.0-canary.a9708bd",
74
- "@aztec/world-state": "3.0.0-canary.a9708bd",
60
+ "@aztec/archiver": "3.0.0-devnet.3",
61
+ "@aztec/bb-prover": "3.0.0-devnet.3",
62
+ "@aztec/blob-sink": "3.0.0-devnet.3",
63
+ "@aztec/constants": "3.0.0-devnet.3",
64
+ "@aztec/epoch-cache": "3.0.0-devnet.3",
65
+ "@aztec/ethereum": "3.0.0-devnet.3",
66
+ "@aztec/foundation": "3.0.0-devnet.3",
67
+ "@aztec/kv-store": "3.0.0-devnet.3",
68
+ "@aztec/merkle-tree": "3.0.0-devnet.3",
69
+ "@aztec/p2p": "3.0.0-devnet.3",
70
+ "@aztec/protocol-contracts": "3.0.0-devnet.3",
71
+ "@aztec/prover-client": "3.0.0-devnet.3",
72
+ "@aztec/sequencer-client": "3.0.0-devnet.3",
73
+ "@aztec/simulator": "3.0.0-devnet.3",
74
+ "@aztec/stdlib": "3.0.0-devnet.3",
75
+ "@aztec/telemetry-client": "3.0.0-devnet.3",
76
+ "@aztec/validator-client": "3.0.0-devnet.3",
77
+ "@aztec/world-state": "3.0.0-devnet.3",
75
78
  "tslib": "^2.4.0"
76
79
  },
77
80
  "devDependencies": {
81
+ "@aztec/blob-lib": "3.0.0-devnet.3",
78
82
  "@jest/globals": "^30.0.0",
79
83
  "@types/jest": "^30.0.0",
80
84
  "@types/node": "^22.15.17",
@@ -26,11 +26,12 @@ import type { SharedNodeConfig } from '../config/index.js';
26
26
  // Half day worth of L1 blocks
27
27
  const MIN_L1_BLOCKS_TO_TRIGGER_REPLACE = 86400 / 2 / 12;
28
28
 
29
- type SnapshotSyncConfig = Pick<SharedNodeConfig, 'syncMode' | 'snapshotsUrl'> &
29
+ type SnapshotSyncConfig = Pick<SharedNodeConfig, 'syncMode'> &
30
30
  Pick<ChainConfig, 'l1ChainId' | 'rollupVersion'> &
31
31
  Pick<ArchiverConfig, 'archiverStoreMapSizeKb' | 'maxLogs'> &
32
32
  Required<DataStoreConfig> &
33
33
  EthereumClientConfig & {
34
+ snapshotsUrls?: string[];
34
35
  minL1BlocksToTriggerReplace?: number;
35
36
  };
36
37
 
@@ -39,14 +40,14 @@ type SnapshotSyncConfig = Pick<SharedNodeConfig, 'syncMode' | 'snapshotsUrl'> &
39
40
  * Behaviour depends on syncing mode.
40
41
  */
41
42
  export async function trySnapshotSync(config: SnapshotSyncConfig, log: Logger) {
42
- const { syncMode, snapshotsUrl, dataDirectory, l1ChainId, rollupVersion, l1Contracts } = config;
43
+ const { syncMode, snapshotsUrls, dataDirectory, l1ChainId, rollupVersion, l1Contracts } = config;
43
44
  if (syncMode === 'full') {
44
45
  log.debug('Snapshot sync is disabled. Running full sync.', { syncMode: syncMode });
45
46
  return false;
46
47
  }
47
48
 
48
- if (!snapshotsUrl) {
49
- log.verbose('Snapshot sync is disabled. No snapshots URL provided.');
49
+ if (!snapshotsUrls || snapshotsUrls.length === 0) {
50
+ log.verbose('Snapshot sync is disabled. No snapshots URLs provided.');
50
51
  return false;
51
52
  }
52
53
 
@@ -55,15 +56,7 @@ export async function trySnapshotSync(config: SnapshotSyncConfig, log: Logger) {
55
56
  return false;
56
57
  }
57
58
 
58
- let fileStore: ReadOnlyFileStore;
59
- try {
60
- fileStore = await createReadOnlyFileStore(snapshotsUrl, log);
61
- } catch (err) {
62
- log.error(`Invalid config for downloading snapshots`, err);
63
- return false;
64
- }
65
-
66
- // Create an archiver store to check the current state
59
+ // Create an archiver store to check the current state (do this only once)
67
60
  log.verbose(`Creating temporary archiver data store`);
68
61
  const archiverStore = await createArchiverStore(config);
69
62
  let archiverL1BlockNumber: bigint | undefined;
@@ -102,65 +95,111 @@ export async function trySnapshotSync(config: SnapshotSyncConfig, log: Logger) {
102
95
  rollupVersion,
103
96
  rollupAddress: l1Contracts.rollupAddress,
104
97
  };
105
- let snapshot: SnapshotMetadata | undefined;
106
- try {
107
- snapshot = await getLatestSnapshotMetadata(indexMetadata, fileStore);
108
- } catch (err) {
109
- log.error(`Failed to get latest snapshot metadata. Skipping snapshot sync.`, err, {
110
- ...indexMetadata,
111
- snapshotsUrl,
112
- });
113
- return false;
114
- }
115
98
 
116
- if (!snapshot) {
117
- log.verbose(`No snapshot found. Skipping snapshot sync.`, { ...indexMetadata, snapshotsUrl });
118
- return false;
119
- }
99
+ // Fetch latest snapshot from each URL
100
+ type SnapshotCandidate = { snapshot: SnapshotMetadata; url: string; fileStore: ReadOnlyFileStore };
101
+ const snapshotCandidates: SnapshotCandidate[] = [];
120
102
 
121
- if (snapshot.schemaVersions.archiver !== ARCHIVER_DB_VERSION) {
122
- log.warn(
123
- `Skipping snapshot sync as last snapshot has schema version ${snapshot.schemaVersions.archiver} but expected ${ARCHIVER_DB_VERSION}.`,
124
- snapshot,
125
- );
126
- return false;
127
- }
103
+ for (const snapshotsUrl of snapshotsUrls) {
104
+ let fileStore: ReadOnlyFileStore;
105
+ try {
106
+ fileStore = await createReadOnlyFileStore(snapshotsUrl, log);
107
+ } catch (err) {
108
+ log.error(`Invalid config for downloading snapshots from ${snapshotsUrl}`, err);
109
+ continue;
110
+ }
128
111
 
129
- if (snapshot.schemaVersions.worldState !== WORLD_STATE_DB_VERSION) {
130
- log.warn(
131
- `Skipping snapshot sync as last snapshot has world state schema version ${snapshot.schemaVersions.worldState} but we expected ${WORLD_STATE_DB_VERSION}.`,
132
- snapshot,
133
- );
134
- return false;
112
+ let snapshot: SnapshotMetadata | undefined;
113
+ try {
114
+ snapshot = await getLatestSnapshotMetadata(indexMetadata, fileStore);
115
+ } catch (err) {
116
+ log.error(`Failed to get latest snapshot metadata from ${snapshotsUrl}. Skipping this URL.`, err, {
117
+ ...indexMetadata,
118
+ snapshotsUrl,
119
+ });
120
+ continue;
121
+ }
122
+
123
+ if (!snapshot) {
124
+ log.verbose(`No snapshot found at ${snapshotsUrl}. Skipping this URL.`, { ...indexMetadata, snapshotsUrl });
125
+ continue;
126
+ }
127
+
128
+ if (snapshot.schemaVersions.archiver !== ARCHIVER_DB_VERSION) {
129
+ log.warn(
130
+ `Skipping snapshot from ${snapshotsUrl} as it has schema version ${snapshot.schemaVersions.archiver} but expected ${ARCHIVER_DB_VERSION}.`,
131
+ snapshot,
132
+ );
133
+ continue;
134
+ }
135
+
136
+ if (snapshot.schemaVersions.worldState !== WORLD_STATE_DB_VERSION) {
137
+ log.warn(
138
+ `Skipping snapshot from ${snapshotsUrl} as it has world state schema version ${snapshot.schemaVersions.worldState} but we expected ${WORLD_STATE_DB_VERSION}.`,
139
+ snapshot,
140
+ );
141
+ continue;
142
+ }
143
+
144
+ if (archiverL1BlockNumber && snapshot.l1BlockNumber < archiverL1BlockNumber) {
145
+ log.verbose(
146
+ `Skipping snapshot from ${snapshotsUrl} since local archiver is at L1 block ${archiverL1BlockNumber} which is further than snapshot at ${snapshot.l1BlockNumber}`,
147
+ { snapshot, archiverL1BlockNumber, snapshotsUrl },
148
+ );
149
+ continue;
150
+ }
151
+
152
+ if (archiverL1BlockNumber && snapshot.l1BlockNumber - Number(archiverL1BlockNumber) < minL1BlocksToTriggerReplace) {
153
+ log.verbose(
154
+ `Skipping snapshot from ${snapshotsUrl} as archiver is less than ${
155
+ snapshot.l1BlockNumber - Number(archiverL1BlockNumber)
156
+ } L1 blocks behind this snapshot.`,
157
+ { snapshot, archiverL1BlockNumber, snapshotsUrl },
158
+ );
159
+ continue;
160
+ }
161
+
162
+ snapshotCandidates.push({ snapshot, url: snapshotsUrl, fileStore });
135
163
  }
136
164
 
137
- if (archiverL1BlockNumber && snapshot.l1BlockNumber < archiverL1BlockNumber) {
138
- log.verbose(
139
- `Skipping snapshot sync since local archiver is at L1 block ${archiverL1BlockNumber} which is further than last snapshot at ${snapshot.l1BlockNumber}`,
140
- { snapshot, archiverL1BlockNumber },
141
- );
165
+ if (snapshotCandidates.length === 0) {
166
+ log.verbose(`No valid snapshots found from any URL. Skipping snapshot sync.`, { ...indexMetadata, snapshotsUrls });
142
167
  return false;
143
168
  }
144
169
 
145
- if (archiverL1BlockNumber && snapshot.l1BlockNumber - Number(archiverL1BlockNumber) < minL1BlocksToTriggerReplace) {
146
- log.verbose(
147
- `Skipping snapshot sync as archiver is less than ${
148
- snapshot.l1BlockNumber - Number(archiverL1BlockNumber)
149
- } L1 blocks behind latest snapshot.`,
150
- { snapshot, archiverL1BlockNumber },
151
- );
152
- return false;
170
+ // Sort candidates by L1 block number (highest first)
171
+ snapshotCandidates.sort((a, b) => b.snapshot.l1BlockNumber - a.snapshot.l1BlockNumber);
172
+
173
+ // Try each candidate in order until one succeeds
174
+ for (const { snapshot, url } of snapshotCandidates) {
175
+ const { l1BlockNumber, l2BlockNumber } = snapshot;
176
+ log.info(`Attempting to sync from snapshot at L1 block ${l1BlockNumber} L2 block ${l2BlockNumber}`, {
177
+ snapshot,
178
+ snapshotsUrl: url,
179
+ });
180
+
181
+ try {
182
+ await snapshotSync(snapshot, log, {
183
+ dataDirectory: config.dataDirectory!,
184
+ rollupAddress: config.l1Contracts.rollupAddress,
185
+ snapshotsUrl: url,
186
+ });
187
+ log.info(`Snapshot synced to L1 block ${l1BlockNumber} L2 block ${l2BlockNumber}`, {
188
+ snapshot,
189
+ snapshotsUrl: url,
190
+ });
191
+ return true;
192
+ } catch (err) {
193
+ log.error(`Failed to download snapshot from ${url}. Trying next candidate.`, err, {
194
+ snapshot,
195
+ snapshotsUrl: url,
196
+ });
197
+ continue;
198
+ }
153
199
  }
154
200
 
155
- const { l1BlockNumber, l2BlockNumber } = snapshot;
156
- log.info(`Syncing from snapshot at L1 block ${l1BlockNumber} L2 block ${l2BlockNumber}`, { snapshot, snapshotsUrl });
157
- await snapshotSync(snapshot, log, {
158
- dataDirectory: config.dataDirectory!,
159
- rollupAddress: config.l1Contracts.rollupAddress,
160
- snapshotsUrl,
161
- });
162
- log.info(`Snapshot synced to L1 block ${l1BlockNumber} L2 block ${l2BlockNumber}`, { snapshot });
163
- return true;
201
+ log.error(`Failed to download snapshot from all URLs.`, { snapshotsUrls });
202
+ return false;
164
203
  }
165
204
 
166
205
  /**
@@ -7,8 +7,8 @@ export type SharedNodeConfig = {
7
7
  sponsoredFPC: boolean;
8
8
  /** Sync mode: full to always sync via L1, snapshot to download a snapshot if there is no local data, force-snapshot to download even if there is local data. */
9
9
  syncMode: 'full' | 'snapshot' | 'force-snapshot';
10
- /** Base URL for snapshots index. Index file will be searched at `SNAPSHOTS_BASE_URL/aztec-L1_CHAIN_ID-VERSION-ROLLUP_ADDRESS/index.json` */
11
- snapshotsUrl?: string;
10
+ /** Base URLs for snapshots index. Index file will be searched at `SNAPSHOTS_BASE_URL/aztec-L1_CHAIN_ID-VERSION-ROLLUP_ADDRESS/index.json` */
11
+ snapshotsUrls?: string[];
12
12
 
13
13
  /** Auto update mode: disabled - to completely ignore remote signals to update the node. enabled - to respect the signals (potentially shutting this node down). log - check for updates but log a warning instead of applying them*/
14
14
  autoUpdate?: 'disabled' | 'notify' | 'config' | 'config-and-version';
@@ -36,9 +36,16 @@ export const sharedNodeConfigMappings: ConfigMappingsType<SharedNodeConfig> = {
36
36
  'Set sync mode to `full` to always sync via L1, `snapshot` to download a snapshot if there is no local data, `force-snapshot` to download even if there is local data.',
37
37
  defaultValue: 'snapshot',
38
38
  },
39
- snapshotsUrl: {
40
- env: 'SYNC_SNAPSHOTS_URL',
41
- description: 'Base URL for snapshots index.',
39
+ snapshotsUrls: {
40
+ env: 'SYNC_SNAPSHOTS_URLS',
41
+ description: 'Base URLs for snapshots index, comma-separated.',
42
+ parseEnv: (val: string) =>
43
+ val
44
+ .split(',')
45
+ .map(url => url.trim())
46
+ .filter(url => url.length > 0),
47
+ fallback: ['SYNC_SNAPSHOTS_URL'],
48
+ defaultValue: [],
42
49
  },
43
50
  autoUpdate: {
44
51
  env: 'AUTO_UPDATE',
@@ -0,0 +1 @@
1
+ export * from './l1_tx_utils.js';