ponder 0.9.2 → 0.9.4-debug.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bin/ponder.js +2470 -3762
- package/dist/bin/ponder.js.map +1 -1
- package/dist/chunk-6AOFLZJ4.js +1692 -0
- package/dist/chunk-6AOFLZJ4.js.map +1 -0
- package/dist/chunk-DZFRP3KH.js +70 -0
- package/dist/chunk-DZFRP3KH.js.map +1 -0
- package/dist/{chunk-IFTUFVCL.js → chunk-MJKRYIBO.js} +3 -73
- package/dist/chunk-MJKRYIBO.js.map +1 -0
- package/dist/db-in86nyw7.d.ts +625 -0
- package/dist/experimental_unsafe_stores.d.ts +375 -0
- package/dist/experimental_unsafe_stores.js +11 -0
- package/dist/experimental_unsafe_stores.js.map +1 -0
- package/dist/index.d.ts +17 -429
- package/dist/index.js +4 -2
- package/dist/index.js.map +1 -1
- package/package.json +5 -1
- package/src/bin/commands/codegen.ts +8 -10
- package/src/bin/commands/dev.ts +30 -42
- package/src/bin/commands/list.ts +9 -14
- package/src/bin/commands/serve.ts +26 -39
- package/src/bin/commands/start.ts +29 -42
- package/src/bin/utils/{shutdown.ts → exit.ts} +23 -37
- package/src/bin/utils/run.ts +275 -175
- package/src/bin/utils/runServer.ts +1 -5
- package/src/build/configAndIndexingFunctions.ts +547 -512
- package/src/build/index.ts +5 -8
- package/src/build/pre.ts +3 -0
- package/src/config/index.ts +9 -6
- package/src/database/index.ts +72 -72
- package/src/drizzle/kit/index.ts +3 -3
- package/src/experimental_unsafe_stores.ts +4 -0
- package/src/indexing/index.ts +0 -4
- package/src/indexing/service.ts +31 -93
- package/src/indexing-store/historical.ts +2 -4
- package/src/internal/common.ts +2 -0
- package/src/internal/errors.ts +9 -9
- package/src/internal/logger.ts +1 -1
- package/src/internal/metrics.ts +75 -103
- package/src/internal/shutdown.ts +25 -0
- package/src/internal/telemetry.ts +16 -18
- package/src/internal/types.ts +9 -1
- package/src/server/index.ts +3 -5
- package/src/sync/events.ts +4 -4
- package/src/sync/filter.ts +1 -0
- package/src/sync/index.ts +1046 -805
- package/src/sync-historical/index.ts +0 -37
- package/src/sync-realtime/index.ts +48 -48
- package/src/sync-store/encoding.ts +5 -5
- package/src/sync-store/index.ts +5 -23
- package/src/ui/index.ts +2 -11
- package/src/utils/checkpoint.ts +17 -3
- package/src/utils/chunk.ts +7 -0
- package/src/utils/generators.ts +66 -0
- package/src/utils/mutex.ts +34 -0
- package/src/utils/partition.ts +41 -0
- package/src/utils/requestQueue.ts +19 -10
- package/src/utils/zipper.ts +80 -0
- package/dist/chunk-IFTUFVCL.js.map +0 -1
package/src/sync/index.ts
CHANGED
|
@@ -1,9 +1,11 @@
|
|
|
1
1
|
import type { Common } from "@/internal/common.js";
|
|
2
2
|
import type {
|
|
3
3
|
Factory,
|
|
4
|
+
Filter,
|
|
4
5
|
IndexingBuild,
|
|
5
6
|
Network,
|
|
6
7
|
RawEvent,
|
|
8
|
+
Seconds,
|
|
7
9
|
Source,
|
|
8
10
|
Status,
|
|
9
11
|
} from "@/internal/types.js";
|
|
@@ -20,14 +22,19 @@ import type { SyncStore } from "@/sync-store/index.js";
|
|
|
20
22
|
import type { LightBlock, SyncBlock } from "@/types/sync.js";
|
|
21
23
|
import {
|
|
22
24
|
type Checkpoint,
|
|
25
|
+
MAX_CHECKPOINT,
|
|
26
|
+
ZERO_CHECKPOINT,
|
|
27
|
+
ZERO_CHECKPOINT_STRING,
|
|
23
28
|
decodeCheckpoint,
|
|
24
29
|
encodeCheckpoint,
|
|
25
|
-
|
|
26
|
-
zeroCheckpoint,
|
|
30
|
+
min,
|
|
27
31
|
} from "@/utils/checkpoint.js";
|
|
28
32
|
import { estimate } from "@/utils/estimate.js";
|
|
29
|
-
import {
|
|
30
|
-
import {
|
|
33
|
+
import { formatPercentage } from "@/utils/format.js";
|
|
34
|
+
import {
|
|
35
|
+
bufferAsyncGenerator,
|
|
36
|
+
mergeAsyncGenerators,
|
|
37
|
+
} from "@/utils/generators.js";
|
|
31
38
|
import {
|
|
32
39
|
type Interval,
|
|
33
40
|
intervalDifference,
|
|
@@ -37,31 +44,23 @@ import {
|
|
|
37
44
|
sortIntervals,
|
|
38
45
|
} from "@/utils/interval.js";
|
|
39
46
|
import { intervalUnion } from "@/utils/interval.js";
|
|
47
|
+
import { createMutex } from "@/utils/mutex.js";
|
|
40
48
|
import { never } from "@/utils/never.js";
|
|
41
|
-
import {
|
|
49
|
+
import { partition } from "@/utils/partition.js";
|
|
50
|
+
import type { RequestQueue } from "@/utils/requestQueue.js";
|
|
51
|
+
import { _eth_getBlockByNumber } from "@/utils/rpc.js";
|
|
42
52
|
import { startClock } from "@/utils/timer.js";
|
|
43
|
-
import {
|
|
44
|
-
import {
|
|
45
|
-
type Address,
|
|
46
|
-
type Hash,
|
|
47
|
-
type Transport,
|
|
48
|
-
hexToBigInt,
|
|
49
|
-
hexToNumber,
|
|
50
|
-
toHex,
|
|
51
|
-
} from "viem";
|
|
52
|
-
import { _eth_getBlockByNumber } from "../utils/rpc.js";
|
|
53
|
+
import { zipperMany } from "@/utils/zipper.js";
|
|
54
|
+
import { type Address, type Hash, hexToBigInt, hexToNumber, toHex } from "viem";
|
|
53
55
|
import { buildEvents } from "./events.js";
|
|
54
56
|
import { isAddressFactory } from "./filter.js";
|
|
55
|
-
import { cachedTransport } from "./transport.js";
|
|
56
57
|
|
|
57
58
|
export type Sync = {
|
|
58
|
-
getEvents(): AsyncGenerator<
|
|
59
|
+
getEvents(): AsyncGenerator<RawEvent[]>;
|
|
59
60
|
startRealtime(): Promise<void>;
|
|
60
61
|
getStatus(): Status;
|
|
61
|
-
|
|
62
|
+
seconds: Seconds;
|
|
62
63
|
getFinalizedCheckpoint(): string;
|
|
63
|
-
getCachedTransport(network: Network): Transport;
|
|
64
|
-
kill(): Promise<void>;
|
|
65
64
|
};
|
|
66
65
|
|
|
67
66
|
export type RealtimeEvent =
|
|
@@ -70,20 +69,22 @@ export type RealtimeEvent =
|
|
|
70
69
|
checkpoint: string;
|
|
71
70
|
status: Status;
|
|
72
71
|
events: RawEvent[];
|
|
72
|
+
network: Network;
|
|
73
73
|
}
|
|
74
74
|
| {
|
|
75
75
|
type: "reorg";
|
|
76
76
|
checkpoint: string;
|
|
77
|
+
network: Network;
|
|
77
78
|
}
|
|
78
79
|
| {
|
|
79
80
|
type: "finalize";
|
|
80
81
|
checkpoint: string;
|
|
82
|
+
network: Network;
|
|
81
83
|
};
|
|
82
84
|
|
|
83
85
|
export type SyncProgress = {
|
|
84
86
|
start: SyncBlock | LightBlock;
|
|
85
87
|
end: SyncBlock | LightBlock | undefined;
|
|
86
|
-
cached: SyncBlock | LightBlock | undefined;
|
|
87
88
|
current: SyncBlock | LightBlock | undefined;
|
|
88
89
|
finalized: SyncBlock | LightBlock;
|
|
89
90
|
};
|
|
@@ -107,7 +108,7 @@ export const blockToCheckpoint = (
|
|
|
107
108
|
rounding: "up" | "down",
|
|
108
109
|
): Checkpoint => {
|
|
109
110
|
return {
|
|
110
|
-
...(rounding === "up" ?
|
|
111
|
+
...(rounding === "up" ? MAX_CHECKPOINT : ZERO_CHECKPOINT),
|
|
111
112
|
blockTimestamp: hexToNumber(block.timestamp),
|
|
112
113
|
chainId: BigInt(chainId),
|
|
113
114
|
blockNumber: hexToBigInt(block.number),
|
|
@@ -153,34 +154,24 @@ const getHistoricalLast = (
|
|
|
153
154
|
: syncProgress.end;
|
|
154
155
|
};
|
|
155
156
|
|
|
156
|
-
/** Compute the minimum checkpoint, filtering out undefined */
|
|
157
|
-
const min = (...checkpoints: (string | undefined)[]) => {
|
|
158
|
-
return checkpoints.reduce((acc, cur) => {
|
|
159
|
-
if (cur === undefined) return acc;
|
|
160
|
-
if (acc === undefined) return cur;
|
|
161
|
-
if (acc < cur) return acc;
|
|
162
|
-
return cur;
|
|
163
|
-
})!;
|
|
164
|
-
};
|
|
165
|
-
|
|
166
157
|
export const splitEvents = (
|
|
167
158
|
events: RawEvent[],
|
|
168
159
|
): { checkpoint: string; events: RawEvent[] }[] => {
|
|
169
|
-
let
|
|
160
|
+
let hash: Hash | undefined;
|
|
170
161
|
const result: { checkpoint: string; events: RawEvent[] }[] = [];
|
|
171
162
|
|
|
172
163
|
for (const event of events) {
|
|
173
|
-
if (
|
|
164
|
+
if (hash === undefined || hash !== event.block.hash) {
|
|
174
165
|
result.push({
|
|
175
166
|
checkpoint: encodeCheckpoint({
|
|
176
|
-
...
|
|
167
|
+
...MAX_CHECKPOINT,
|
|
177
168
|
blockTimestamp: Number(event.block.timestamp),
|
|
178
169
|
chainId: BigInt(event.chainId),
|
|
179
170
|
blockNumber: event.block.number,
|
|
180
171
|
}),
|
|
181
172
|
events: [],
|
|
182
173
|
});
|
|
183
|
-
|
|
174
|
+
hash = event.block.hash;
|
|
184
175
|
}
|
|
185
176
|
|
|
186
177
|
result[result.length - 1]!.events.push(event);
|
|
@@ -219,153 +210,40 @@ export const getChainCheckpoint = ({
|
|
|
219
210
|
);
|
|
220
211
|
};
|
|
221
212
|
|
|
222
|
-
|
|
213
|
+
export const createSync = async (params: {
|
|
223
214
|
common: Common;
|
|
224
215
|
indexingBuild: Pick<IndexingBuild, "sources" | "networks">;
|
|
216
|
+
requestQueues: RequestQueue[];
|
|
225
217
|
syncStore: SyncStore;
|
|
226
218
|
onRealtimeEvent(event: RealtimeEvent): Promise<void>;
|
|
227
219
|
onFatalError(error: Error): void;
|
|
228
220
|
initialCheckpoint: string;
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
export const createSync = async (args: CreateSyncParameters): Promise<Sync> => {
|
|
221
|
+
ordering: "omnichain" | "multichain";
|
|
222
|
+
}): Promise<Sync> => {
|
|
232
223
|
const perNetworkSync = new Map<
|
|
233
224
|
Network,
|
|
234
225
|
{
|
|
235
|
-
requestQueue: RequestQueue;
|
|
236
226
|
syncProgress: SyncProgress;
|
|
237
227
|
historicalSync: HistoricalSync;
|
|
238
228
|
realtimeSync: RealtimeSync;
|
|
239
|
-
realtimeQueue: Queue<void, RealtimeSyncEvent>;
|
|
240
|
-
unfinalizedBlocks: Omit<
|
|
241
|
-
Extract<RealtimeSyncEvent, { type: "block" }>,
|
|
242
|
-
"type"
|
|
243
|
-
>[];
|
|
244
229
|
}
|
|
245
230
|
>();
|
|
246
|
-
/** Events that have been executed but not finalized. */
|
|
247
|
-
let executedEvents: RawEvent[] = [];
|
|
248
|
-
/** Events that have not been executed yet. */
|
|
249
|
-
let pendingEvents: RawEvent[] = [];
|
|
250
|
-
const status: Status = {};
|
|
251
|
-
let isKilled = false;
|
|
252
|
-
// Realtime events across all chains that can't be passed to the parent function
|
|
253
|
-
// because the overall checkpoint hasn't caught up to the events yet.
|
|
254
|
-
|
|
255
|
-
// Instantiate `localSyncData` and `status`
|
|
256
|
-
await Promise.all(
|
|
257
|
-
args.indexingBuild.networks.map(async (network) => {
|
|
258
|
-
const requestQueue = createRequestQueue({
|
|
259
|
-
network,
|
|
260
|
-
common: args.common,
|
|
261
|
-
});
|
|
262
|
-
const sources = args.indexingBuild.sources.filter(
|
|
263
|
-
({ filter }) => filter.chainId === network.chainId,
|
|
264
|
-
);
|
|
265
|
-
|
|
266
|
-
const { start, end, finalized } = await syncDiagnostic({
|
|
267
|
-
common: args.common,
|
|
268
|
-
sources,
|
|
269
|
-
requestQueue,
|
|
270
|
-
network,
|
|
271
|
-
});
|
|
272
|
-
|
|
273
|
-
// Invalidate sync cache for devnet sources
|
|
274
|
-
if (network.disableCache) {
|
|
275
|
-
args.common.logger.warn({
|
|
276
|
-
service: "sync",
|
|
277
|
-
msg: `Deleting cache records for '${network.name}' from block ${hexToNumber(start.number)}`,
|
|
278
|
-
});
|
|
279
|
-
|
|
280
|
-
await args.syncStore.pruneByChain({
|
|
281
|
-
fromBlock: hexToNumber(start.number),
|
|
282
|
-
chainId: network.chainId,
|
|
283
|
-
});
|
|
284
|
-
}
|
|
285
|
-
|
|
286
|
-
const historicalSync = await createHistoricalSync({
|
|
287
|
-
common: args.common,
|
|
288
|
-
sources,
|
|
289
|
-
syncStore: args.syncStore,
|
|
290
|
-
requestQueue,
|
|
291
|
-
network,
|
|
292
|
-
onFatalError: args.onFatalError,
|
|
293
|
-
});
|
|
294
|
-
|
|
295
|
-
const realtimeQueue = createQueue({
|
|
296
|
-
initialStart: true,
|
|
297
|
-
browser: false,
|
|
298
|
-
concurrency: 1,
|
|
299
|
-
worker: async (event: RealtimeSyncEvent) =>
|
|
300
|
-
onRealtimeSyncEvent({ event, network }),
|
|
301
|
-
});
|
|
302
|
-
|
|
303
|
-
const realtimeSync = createRealtimeSync({
|
|
304
|
-
common: args.common,
|
|
305
|
-
sources,
|
|
306
|
-
requestQueue,
|
|
307
|
-
network,
|
|
308
|
-
onEvent: (event) =>
|
|
309
|
-
realtimeQueue.add(event).catch((error) => {
|
|
310
|
-
args.common.logger.error({
|
|
311
|
-
service: "sync",
|
|
312
|
-
msg: `Fatal error: Unable to process ${event.type} event`,
|
|
313
|
-
error,
|
|
314
|
-
});
|
|
315
|
-
args.onFatalError(error);
|
|
316
|
-
}),
|
|
317
|
-
onFatalError: args.onFatalError,
|
|
318
|
-
});
|
|
319
|
-
|
|
320
|
-
const cached = await getCachedBlock({
|
|
321
|
-
sources,
|
|
322
|
-
requestQueue,
|
|
323
|
-
historicalSync,
|
|
324
|
-
});
|
|
325
|
-
|
|
326
|
-
// Update "ponder_sync_block" metric
|
|
327
|
-
if (cached !== undefined) {
|
|
328
|
-
args.common.metrics.ponder_sync_block.set(
|
|
329
|
-
{ network: network.name },
|
|
330
|
-
hexToNumber(cached.number),
|
|
331
|
-
);
|
|
332
|
-
}
|
|
333
|
-
|
|
334
|
-
const syncProgress: SyncProgress = {
|
|
335
|
-
start,
|
|
336
|
-
end,
|
|
337
|
-
finalized,
|
|
338
|
-
cached,
|
|
339
|
-
current: cached,
|
|
340
|
-
};
|
|
341
|
-
|
|
342
|
-
args.common.metrics.ponder_sync_is_realtime.set(
|
|
343
|
-
{ network: network.name },
|
|
344
|
-
0,
|
|
345
|
-
);
|
|
346
|
-
args.common.metrics.ponder_sync_is_complete.set(
|
|
347
|
-
{ network: network.name },
|
|
348
|
-
0,
|
|
349
|
-
);
|
|
350
231
|
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
}),
|
|
361
|
-
);
|
|
232
|
+
const getMultichainCheckpoint = ({
|
|
233
|
+
tag,
|
|
234
|
+
network,
|
|
235
|
+
}: { tag: "start" | "end" | "current" | "finalized"; network: Network }):
|
|
236
|
+
| string
|
|
237
|
+
| undefined => {
|
|
238
|
+
const syncProgress = perNetworkSync.get(network)!.syncProgress;
|
|
239
|
+
return getChainCheckpoint({ syncProgress, network, tag });
|
|
240
|
+
};
|
|
362
241
|
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
): string | undefined => {
|
|
242
|
+
const getOmnichainCheckpoint = ({
|
|
243
|
+
tag,
|
|
244
|
+
}: { tag: "start" | "end" | "current" | "finalized" }):
|
|
245
|
+
| string
|
|
246
|
+
| undefined => {
|
|
369
247
|
const checkpoints = Array.from(perNetworkSync.entries()).map(
|
|
370
248
|
([network, { syncProgress }]) =>
|
|
371
249
|
getChainCheckpoint({ syncProgress, network, tag }),
|
|
@@ -392,30 +270,29 @@ export const createSync = async (args: CreateSyncParameters): Promise<Sync> => {
|
|
|
392
270
|
timestamp: decodeCheckpoint(checkpoint).blockTimestamp,
|
|
393
271
|
number: Number(decodeCheckpoint(checkpoint).blockNumber),
|
|
394
272
|
};
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
while (i >= 0) {
|
|
398
|
-
const event = events[i]!;
|
|
273
|
+
return;
|
|
274
|
+
}
|
|
399
275
|
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
number: Number(decodeCheckpoint(event.checkpoint).blockNumber),
|
|
404
|
-
};
|
|
405
|
-
}
|
|
276
|
+
let i = events.length - 1;
|
|
277
|
+
while (i >= 0) {
|
|
278
|
+
const event = events[i]!;
|
|
406
279
|
|
|
407
|
-
|
|
280
|
+
if (network.chainId === event.chainId) {
|
|
281
|
+
status[network.name]!.block = {
|
|
282
|
+
timestamp: decodeCheckpoint(event.checkpoint).blockTimestamp,
|
|
283
|
+
number: Number(decodeCheckpoint(event.checkpoint).blockNumber),
|
|
284
|
+
};
|
|
285
|
+
return;
|
|
408
286
|
}
|
|
287
|
+
|
|
288
|
+
i--;
|
|
409
289
|
}
|
|
410
290
|
};
|
|
411
291
|
|
|
412
292
|
const updateRealtimeStatus = ({
|
|
413
293
|
checkpoint,
|
|
414
294
|
network,
|
|
415
|
-
}: {
|
|
416
|
-
checkpoint: string;
|
|
417
|
-
network: Network;
|
|
418
|
-
}) => {
|
|
295
|
+
}: { checkpoint: string; network: Network }) => {
|
|
419
296
|
const localBlock = perNetworkSync
|
|
420
297
|
.get(network)!
|
|
421
298
|
.realtimeSync.unfinalizedBlocks.findLast(
|
|
@@ -431,483 +308,501 @@ export const createSync = async (args: CreateSyncParameters): Promise<Sync> => {
|
|
|
431
308
|
}
|
|
432
309
|
};
|
|
433
310
|
|
|
434
|
-
/**
|
|
435
|
-
* Estimate optimal range (seconds) to query at a time, eventually
|
|
436
|
-
* used to determine `to` passed to `getEvents`
|
|
437
|
-
*/
|
|
438
|
-
let estimateSeconds = 1_000;
|
|
439
|
-
/**
|
|
440
|
-
* Omnichain `getEvents`
|
|
441
|
-
*
|
|
442
|
-
* Extract all events across `args.networks` ordered by checkpoint.
|
|
443
|
-
* The generator is "completed" when all event have been extracted
|
|
444
|
-
* before the minimum finalized checkpoint (supremum).
|
|
445
|
-
*
|
|
446
|
-
* Note: `syncStore.getEvents` is used to order between multiple
|
|
447
|
-
* networks. This approach is not future proof.
|
|
448
|
-
*/
|
|
449
311
|
async function* getEvents() {
|
|
450
|
-
let
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
: getOmnichainCheckpoint("start")!;
|
|
460
|
-
|
|
461
|
-
// Cursor used to track progress.
|
|
462
|
-
let from = start;
|
|
463
|
-
|
|
464
|
-
let showLogs = true;
|
|
465
|
-
while (true) {
|
|
466
|
-
const syncGenerator = mergeAsyncGenerators(
|
|
467
|
-
Array.from(perNetworkSync.entries()).map(
|
|
468
|
-
([network, { syncProgress, historicalSync }]) =>
|
|
469
|
-
localHistoricalSyncGenerator({
|
|
470
|
-
common: args.common,
|
|
471
|
-
network,
|
|
472
|
-
syncProgress,
|
|
473
|
-
historicalSync,
|
|
474
|
-
showLogs,
|
|
475
|
-
}),
|
|
476
|
-
),
|
|
477
|
-
);
|
|
478
|
-
|
|
479
|
-
// Only show logs on the first iteration
|
|
480
|
-
showLogs = false;
|
|
481
|
-
|
|
482
|
-
for await (const _ of syncGenerator) {
|
|
483
|
-
/**
|
|
484
|
-
* `current` is used to calculate the `to` checkpoint, if any
|
|
485
|
-
* network hasn't yet ingested a block, run another iteration of this loop.
|
|
486
|
-
* It is an invariant that `latestBlock` will eventually be defined.
|
|
487
|
-
*/
|
|
488
|
-
if (
|
|
489
|
-
Array.from(perNetworkSync.values()).some(
|
|
490
|
-
({ syncProgress }) => syncProgress.current === undefined,
|
|
491
|
-
)
|
|
492
|
-
) {
|
|
493
|
-
continue;
|
|
494
|
-
}
|
|
312
|
+
let cursor =
|
|
313
|
+
params.initialCheckpoint !== ZERO_CHECKPOINT_STRING
|
|
314
|
+
? params.initialCheckpoint
|
|
315
|
+
: getOmnichainCheckpoint({ tag: "start" })!;
|
|
316
|
+
|
|
317
|
+
const to = min(
|
|
318
|
+
getOmnichainCheckpoint({ tag: "end" }),
|
|
319
|
+
getOmnichainCheckpoint({ tag: "finalized" }),
|
|
320
|
+
);
|
|
495
321
|
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
getOmnichainCheckpoint("current"),
|
|
322
|
+
const eventGenerators = Array.from(perNetworkSync.entries()).map(
|
|
323
|
+
([network, { syncProgress, historicalSync }]) => {
|
|
324
|
+
const sources = params.indexingBuild.sources.filter(
|
|
325
|
+
({ filter }) => filter.chainId === network.chainId,
|
|
501
326
|
);
|
|
502
327
|
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
while (true) {
|
|
510
|
-
if (isKilled) return;
|
|
511
|
-
if (from >= to) break;
|
|
512
|
-
const getEventsMaxBatchSize = args.common.options.syncEventsQuerySize;
|
|
513
|
-
|
|
514
|
-
// convert `estimateSeconds` to checkpoint
|
|
515
|
-
const estimatedTo = encodeCheckpoint({
|
|
516
|
-
...zeroCheckpoint,
|
|
517
|
-
blockTimestamp: Math.min(
|
|
518
|
-
decodeCheckpoint(from).blockTimestamp + estimateSeconds,
|
|
519
|
-
maxCheckpoint.blockTimestamp,
|
|
520
|
-
),
|
|
521
|
-
});
|
|
522
|
-
|
|
523
|
-
try {
|
|
524
|
-
const { events, cursor } = await args.syncStore.getEvents({
|
|
525
|
-
filters: args.indexingBuild.sources.map(({ filter }) => filter),
|
|
526
|
-
from,
|
|
527
|
-
to: to < estimatedTo ? to : estimatedTo,
|
|
528
|
-
limit: getEventsMaxBatchSize,
|
|
529
|
-
});
|
|
530
|
-
|
|
531
|
-
args.common.logger.debug({
|
|
532
|
-
service: "sync",
|
|
533
|
-
msg: `Fetched ${events.length} events from the database for a ${formatEta(estimateSeconds * 1000)} range from timestamp ${decodeCheckpoint(from).blockTimestamp}`,
|
|
534
|
-
});
|
|
535
|
-
|
|
536
|
-
for (const network of args.indexingBuild.networks) {
|
|
537
|
-
updateHistoricalStatus({ events, checkpoint: cursor, network });
|
|
538
|
-
}
|
|
539
|
-
|
|
540
|
-
estimateSeconds = estimate({
|
|
541
|
-
from: decodeCheckpoint(from).blockTimestamp,
|
|
542
|
-
to: decodeCheckpoint(cursor).blockTimestamp,
|
|
543
|
-
target: getEventsMaxBatchSize,
|
|
544
|
-
result: events.length,
|
|
545
|
-
min: 10,
|
|
546
|
-
max: 86_400,
|
|
547
|
-
prev: estimateSeconds,
|
|
548
|
-
maxIncrease: 1.08,
|
|
549
|
-
});
|
|
328
|
+
const localSyncGenerator = getLocalSyncGenerator({
|
|
329
|
+
common: params.common,
|
|
330
|
+
network,
|
|
331
|
+
syncProgress,
|
|
332
|
+
historicalSync,
|
|
333
|
+
});
|
|
550
334
|
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
335
|
+
const localEventGenerator = getLocalEventGenerator({
|
|
336
|
+
common: params.common,
|
|
337
|
+
network,
|
|
338
|
+
syncStore: params.syncStore,
|
|
339
|
+
sources,
|
|
340
|
+
localSyncGenerator,
|
|
341
|
+
from:
|
|
342
|
+
params.initialCheckpoint !== ZERO_CHECKPOINT_STRING
|
|
343
|
+
? params.initialCheckpoint
|
|
344
|
+
: getChainCheckpoint({ syncProgress, network, tag: "start" })!,
|
|
345
|
+
to,
|
|
346
|
+
limit: Math.round(
|
|
347
|
+
params.common.options.syncEventsQuerySize /
|
|
348
|
+
(params.indexingBuild.networks.length * 2),
|
|
349
|
+
),
|
|
350
|
+
});
|
|
557
351
|
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
});
|
|
352
|
+
return bufferAsyncGenerator(localEventGenerator, 1);
|
|
353
|
+
},
|
|
354
|
+
);
|
|
562
355
|
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
356
|
+
const mergeAsync =
|
|
357
|
+
params.ordering === "multichain"
|
|
358
|
+
? mergeAsyncGenerators
|
|
359
|
+
: mergeAsyncGeneratorsWithEventOrder;
|
|
360
|
+
|
|
361
|
+
for await (const { events, checkpoint } of mergeAsync(eventGenerators)) {
|
|
362
|
+
if (params.ordering === "multichain") {
|
|
363
|
+
const network = params.indexingBuild.networks.find(
|
|
364
|
+
(network) =>
|
|
365
|
+
network.chainId === Number(decodeCheckpoint(checkpoint).chainId),
|
|
366
|
+
)!;
|
|
367
|
+
params.common.logger.debug({
|
|
368
|
+
service: "sync",
|
|
369
|
+
msg: `Sequenced ${events.length} '${network.name}' events for timestamp range [${decodeCheckpoint(cursor).blockTimestamp}, ${decodeCheckpoint(checkpoint).blockTimestamp}]`,
|
|
370
|
+
});
|
|
371
|
+
} else {
|
|
372
|
+
params.common.logger.debug({
|
|
373
|
+
service: "sync",
|
|
374
|
+
msg: `Sequenced ${events.length} events for timestamp range [${decodeCheckpoint(cursor).blockTimestamp}, ${decodeCheckpoint(checkpoint).blockTimestamp}]`,
|
|
375
|
+
});
|
|
566
376
|
}
|
|
567
377
|
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
const staleSeconds = (Date.now() - latestFinalizedFetch) / 1_000;
|
|
576
|
-
if (staleSeconds <= args.common.options.syncHandoffStaleSeconds) {
|
|
577
|
-
return true;
|
|
578
|
-
}
|
|
579
|
-
|
|
580
|
-
return false;
|
|
581
|
-
});
|
|
582
|
-
|
|
583
|
-
if (allHistoricalSyncExhaustive) break;
|
|
584
|
-
|
|
585
|
-
/** At least one network has a `finalized` block that is considered "stale". */
|
|
586
|
-
|
|
587
|
-
latestFinalizedFetch = Date.now();
|
|
588
|
-
|
|
589
|
-
await Promise.all(
|
|
590
|
-
Array.from(perNetworkSync.entries()).map(
|
|
591
|
-
async ([network, { requestQueue, syncProgress }]) => {
|
|
592
|
-
args.common.logger.debug({
|
|
593
|
-
service: "sync",
|
|
594
|
-
msg: `Refetching '${network.name}' finalized block`,
|
|
595
|
-
});
|
|
596
|
-
|
|
597
|
-
const latestBlock = await _eth_getBlockByNumber(requestQueue, {
|
|
598
|
-
blockTag: "latest",
|
|
599
|
-
});
|
|
600
|
-
|
|
601
|
-
const finalizedBlockNumber = Math.max(
|
|
602
|
-
0,
|
|
603
|
-
hexToNumber(latestBlock.number) - network.finalityBlockCount,
|
|
604
|
-
);
|
|
378
|
+
for (const network of params.indexingBuild.networks) {
|
|
379
|
+
updateHistoricalStatus({ events, checkpoint, network });
|
|
380
|
+
}
|
|
381
|
+
yield events;
|
|
382
|
+
cursor = checkpoint;
|
|
383
|
+
}
|
|
384
|
+
}
|
|
605
385
|
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
386
|
+
/** Events that have been executed but not finalized. */
|
|
387
|
+
let executedEvents: RawEvent[] = [];
|
|
388
|
+
/** Events that have not been executed. */
|
|
389
|
+
let pendingEvents: RawEvent[] = [];
|
|
609
390
|
|
|
610
|
-
|
|
391
|
+
const realtimeMutex = createMutex();
|
|
611
392
|
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
1,
|
|
618
|
-
);
|
|
619
|
-
},
|
|
620
|
-
),
|
|
621
|
-
);
|
|
622
|
-
}
|
|
623
|
-
}
|
|
393
|
+
const checkpoints = {
|
|
394
|
+
// Note: `checkpoints.current` not used in multichain ordering
|
|
395
|
+
current: ZERO_CHECKPOINT_STRING,
|
|
396
|
+
finalized: ZERO_CHECKPOINT_STRING,
|
|
397
|
+
};
|
|
624
398
|
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
*
|
|
628
|
-
* Handle callback events across all `args.networks`, and raising these
|
|
629
|
-
* events to `args.onRealtimeEvent` while maintaining checkpoint ordering.
|
|
630
|
-
*/
|
|
631
|
-
const onRealtimeSyncEvent = async ({
|
|
632
|
-
network,
|
|
633
|
-
event,
|
|
634
|
-
}: { network: Network; event: RealtimeSyncEvent }) => {
|
|
635
|
-
const { syncProgress, realtimeSync, unfinalizedBlocks } =
|
|
636
|
-
perNetworkSync.get(network)!;
|
|
399
|
+
// Note: `latencyTimers` not used in multichain ordering
|
|
400
|
+
const latencyTimers = new Map<string, () => number>();
|
|
637
401
|
|
|
402
|
+
const onRealtimeSyncEvent = (
|
|
403
|
+
event: RealtimeSyncEvent,
|
|
404
|
+
{
|
|
405
|
+
network,
|
|
406
|
+
syncProgress,
|
|
407
|
+
realtimeSync,
|
|
408
|
+
}: {
|
|
409
|
+
network: Network;
|
|
410
|
+
syncProgress: SyncProgress;
|
|
411
|
+
realtimeSync: RealtimeSync;
|
|
412
|
+
},
|
|
413
|
+
): void => {
|
|
638
414
|
switch (event.type) {
|
|
639
|
-
/**
|
|
640
|
-
* Handle a new block being ingested.
|
|
641
|
-
*/
|
|
642
415
|
case "block": {
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
syncProgress.current = event.block;
|
|
646
|
-
const to = getOmnichainCheckpoint("current")!;
|
|
647
|
-
|
|
648
|
-
// Update "ponder_sync_block" metric
|
|
649
|
-
args.common.metrics.ponder_sync_block.set(
|
|
650
|
-
{ network: network.name },
|
|
651
|
-
hexToNumber(syncProgress.current.number),
|
|
652
|
-
);
|
|
653
|
-
|
|
654
|
-
const newEvents = buildEvents({
|
|
655
|
-
sources: args.indexingBuild.sources,
|
|
416
|
+
const events = buildEvents({
|
|
417
|
+
sources: params.indexingBuild.sources,
|
|
656
418
|
chainId: network.chainId,
|
|
657
419
|
blockWithEventData: event,
|
|
658
420
|
finalizedChildAddresses: realtimeSync.finalizedChildAddresses,
|
|
659
421
|
unfinalizedChildAddresses: realtimeSync.unfinalizedChildAddresses,
|
|
660
422
|
});
|
|
661
423
|
|
|
662
|
-
|
|
663
|
-
|
|
424
|
+
params.common.logger.debug({
|
|
425
|
+
service: "sync",
|
|
426
|
+
msg: `Extracted ${events.length} '${network.name}' events for block ${hexToNumber(event.block.number)}`,
|
|
427
|
+
});
|
|
664
428
|
|
|
665
|
-
if (
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
429
|
+
if (params.ordering === "multichain") {
|
|
430
|
+
// Note: `checkpoints.current` not used in multichain ordering
|
|
431
|
+
const checkpoint = getMultichainCheckpoint({
|
|
432
|
+
tag: "current",
|
|
433
|
+
network,
|
|
434
|
+
})!;
|
|
669
435
|
|
|
670
|
-
|
|
436
|
+
status[network.name]!.block = {
|
|
437
|
+
timestamp: hexToNumber(event.block.timestamp),
|
|
438
|
+
number: hexToNumber(event.block.number),
|
|
439
|
+
};
|
|
671
440
|
|
|
672
|
-
const
|
|
673
|
-
|
|
674
|
-
|
|
441
|
+
const readyEvents = events.concat(pendingEvents);
|
|
442
|
+
pendingEvents = [];
|
|
443
|
+
executedEvents = executedEvents.concat(readyEvents);
|
|
675
444
|
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
445
|
+
params.common.logger.debug({
|
|
446
|
+
service: "sync",
|
|
447
|
+
msg: `Sequenced ${readyEvents.length} '${network.name}' events for block ${hexToNumber(event.block.number)}`,
|
|
448
|
+
});
|
|
680
449
|
|
|
681
|
-
|
|
450
|
+
params
|
|
682
451
|
.onRealtimeEvent({
|
|
683
452
|
type: "block",
|
|
684
|
-
checkpoint
|
|
453
|
+
checkpoint,
|
|
685
454
|
status: structuredClone(status),
|
|
686
|
-
events,
|
|
455
|
+
events: readyEvents.sort((a, b) =>
|
|
456
|
+
a.checkpoint < b.checkpoint ? -1 : 1,
|
|
457
|
+
),
|
|
458
|
+
network,
|
|
687
459
|
})
|
|
688
460
|
.then(() => {
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
461
|
+
// update `ponder_realtime_latency` metric
|
|
462
|
+
if (event.endClock) {
|
|
463
|
+
params.common.metrics.ponder_realtime_latency.observe(
|
|
464
|
+
{ network: network.name },
|
|
465
|
+
event.endClock(),
|
|
466
|
+
);
|
|
694
467
|
}
|
|
468
|
+
});
|
|
469
|
+
} else {
|
|
470
|
+
const from = checkpoints.current;
|
|
471
|
+
checkpoints.current = getOmnichainCheckpoint({ tag: "current" })!;
|
|
472
|
+
const to = getOmnichainCheckpoint({ tag: "current" })!;
|
|
473
|
+
|
|
474
|
+
if (event.endClock !== undefined) {
|
|
475
|
+
latencyTimers.set(
|
|
476
|
+
encodeCheckpoint(
|
|
477
|
+
blockToCheckpoint(event.block, network.chainId, "up"),
|
|
478
|
+
),
|
|
479
|
+
event.endClock,
|
|
480
|
+
);
|
|
481
|
+
}
|
|
695
482
|
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
|
|
703
|
-
|
|
704
|
-
|
|
483
|
+
if (to > from) {
|
|
484
|
+
for (const network of params.indexingBuild.networks) {
|
|
485
|
+
updateRealtimeStatus({ checkpoint: to, network });
|
|
486
|
+
}
|
|
487
|
+
|
|
488
|
+
// Move ready events from pending to executed
|
|
489
|
+
|
|
490
|
+
const readyEvents = pendingEvents
|
|
491
|
+
.concat(events)
|
|
492
|
+
.filter(({ checkpoint }) => checkpoint < to);
|
|
493
|
+
pendingEvents = pendingEvents
|
|
494
|
+
.concat(events)
|
|
495
|
+
.filter(({ checkpoint }) => checkpoint > to);
|
|
496
|
+
executedEvents = executedEvents.concat(readyEvents);
|
|
497
|
+
|
|
498
|
+
params.common.logger.debug({
|
|
499
|
+
service: "sync",
|
|
500
|
+
msg: `Sequenced ${readyEvents.length} '${network.name}' events for timestamp range [${decodeCheckpoint(from).blockTimestamp}, ${decodeCheckpoint(to).blockTimestamp}]`,
|
|
501
|
+
});
|
|
502
|
+
|
|
503
|
+
params
|
|
504
|
+
.onRealtimeEvent({
|
|
505
|
+
type: "block",
|
|
506
|
+
checkpoint: to,
|
|
507
|
+
status: structuredClone(status),
|
|
508
|
+
events: readyEvents.sort((a, b) =>
|
|
509
|
+
a.checkpoint < b.checkpoint ? -1 : 1,
|
|
510
|
+
),
|
|
511
|
+
network,
|
|
512
|
+
})
|
|
513
|
+
.then(() => {
|
|
514
|
+
// update `ponder_realtime_latency` metric
|
|
515
|
+
for (const [checkpoint, timer] of latencyTimers) {
|
|
516
|
+
if (checkpoint > from && checkpoint <= to) {
|
|
517
|
+
const chainId = Number(
|
|
518
|
+
decodeCheckpoint(checkpoint).chainId,
|
|
519
|
+
);
|
|
520
|
+
const network = params.indexingBuild.networks.find(
|
|
521
|
+
(network) => network.chainId === chainId,
|
|
522
|
+
)!;
|
|
523
|
+
params.common.metrics.ponder_realtime_latency.observe(
|
|
705
524
|
{ network: network.name },
|
|
706
|
-
|
|
525
|
+
timer(),
|
|
707
526
|
);
|
|
708
527
|
}
|
|
709
528
|
}
|
|
710
|
-
}
|
|
711
|
-
|
|
529
|
+
});
|
|
530
|
+
} else {
|
|
531
|
+
pendingEvents = pendingEvents.concat(events);
|
|
532
|
+
}
|
|
712
533
|
}
|
|
713
534
|
|
|
714
535
|
break;
|
|
715
536
|
}
|
|
716
|
-
/**
|
|
717
|
-
* Handle a new block being finalized.
|
|
718
|
-
*/
|
|
719
|
-
case "finalize": {
|
|
720
|
-
// Newly finalized range
|
|
721
|
-
const interval = [
|
|
722
|
-
hexToNumber(syncProgress.finalized.number),
|
|
723
|
-
hexToNumber(event.block.number),
|
|
724
|
-
] satisfies Interval;
|
|
725
537
|
|
|
726
|
-
|
|
727
|
-
const
|
|
728
|
-
|
|
729
|
-
const
|
|
538
|
+
case "finalize": {
|
|
539
|
+
const from = checkpoints.finalized;
|
|
540
|
+
checkpoints.finalized = getOmnichainCheckpoint({ tag: "finalized" })!;
|
|
541
|
+
const to = getOmnichainCheckpoint({ tag: "finalized" })!;
|
|
730
542
|
|
|
731
543
|
if (
|
|
544
|
+
params.ordering === "omnichain" &&
|
|
732
545
|
getChainCheckpoint({ syncProgress, network, tag: "finalized" })! >
|
|
733
|
-
|
|
546
|
+
getOmnichainCheckpoint({ tag: "current" })!
|
|
734
547
|
) {
|
|
735
|
-
|
|
548
|
+
params.common.logger.warn({
|
|
736
549
|
service: "sync",
|
|
737
|
-
msg: `Finalized
|
|
550
|
+
msg: `Finalized '${network.name}' block has surpassed overall indexing checkpoint`,
|
|
738
551
|
});
|
|
739
552
|
}
|
|
740
553
|
|
|
741
554
|
// Remove all finalized data
|
|
742
555
|
|
|
743
|
-
|
|
744
|
-
({ block }) =>
|
|
745
|
-
hexToNumber(block.number) <= hexToNumber(event.block.number),
|
|
746
|
-
);
|
|
556
|
+
executedEvents = executedEvents.filter((e) => e.checkpoint > to);
|
|
747
557
|
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
558
|
+
// Raise event to parent function (runtime)
|
|
559
|
+
if (to > from) {
|
|
560
|
+
params.onRealtimeEvent({
|
|
561
|
+
type: "finalize",
|
|
562
|
+
checkpoint: to,
|
|
563
|
+
network,
|
|
564
|
+
});
|
|
565
|
+
}
|
|
566
|
+
|
|
567
|
+
break;
|
|
568
|
+
}
|
|
569
|
+
|
|
570
|
+
case "reorg": {
|
|
571
|
+
// Remove all reorged data
|
|
572
|
+
|
|
573
|
+
let reorgedEvents = 0;
|
|
574
|
+
|
|
575
|
+
const isReorgedEvent = ({ chainId, block }: RawEvent) => {
|
|
576
|
+
if (
|
|
577
|
+
chainId === network.chainId &&
|
|
578
|
+
Number(block.number) > hexToNumber(event.block.number)
|
|
579
|
+
) {
|
|
580
|
+
reorgedEvents++;
|
|
581
|
+
return true;
|
|
582
|
+
}
|
|
583
|
+
return false;
|
|
584
|
+
};
|
|
753
585
|
|
|
586
|
+
pendingEvents = pendingEvents.filter(
|
|
587
|
+
(e) => isReorgedEvent(e) === false,
|
|
588
|
+
);
|
|
754
589
|
executedEvents = executedEvents.filter(
|
|
755
|
-
(e) => e
|
|
590
|
+
(e) => isReorgedEvent(e) === false,
|
|
756
591
|
);
|
|
757
592
|
|
|
758
|
-
|
|
593
|
+
params.common.logger.debug({
|
|
594
|
+
service: "sync",
|
|
595
|
+
msg: `Removed ${reorgedEvents} reorged '${network.name}' events`,
|
|
596
|
+
});
|
|
759
597
|
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
|
|
763
|
-
|
|
764
|
-
|
|
765
|
-
|
|
766
|
-
}),
|
|
767
|
-
args.syncStore.insertLogs({
|
|
768
|
-
logs: finalizedBlocks.flatMap(({ logs, block }) =>
|
|
769
|
-
logs.map((log) => ({ log, block })),
|
|
770
|
-
),
|
|
771
|
-
shouldUpdateCheckpoint: true,
|
|
772
|
-
chainId: network.chainId,
|
|
773
|
-
}),
|
|
774
|
-
args.syncStore.insertLogs({
|
|
775
|
-
logs: finalizedBlocks.flatMap(({ factoryLogs }) =>
|
|
776
|
-
factoryLogs.map((log) => ({ log })),
|
|
777
|
-
),
|
|
778
|
-
shouldUpdateCheckpoint: false,
|
|
779
|
-
chainId: network.chainId,
|
|
780
|
-
}),
|
|
781
|
-
args.syncStore.insertTransactions({
|
|
782
|
-
transactions: finalizedBlocks.flatMap(({ transactions, block }) =>
|
|
783
|
-
transactions.map((transaction) => ({
|
|
784
|
-
transaction,
|
|
785
|
-
block,
|
|
786
|
-
})),
|
|
787
|
-
),
|
|
788
|
-
chainId: network.chainId,
|
|
789
|
-
}),
|
|
790
|
-
args.syncStore.insertTransactionReceipts({
|
|
791
|
-
transactionReceipts: finalizedBlocks.flatMap(
|
|
792
|
-
({ transactionReceipts }) => transactionReceipts,
|
|
793
|
-
),
|
|
794
|
-
chainId: network.chainId,
|
|
795
|
-
}),
|
|
796
|
-
args.syncStore.insertTraces({
|
|
797
|
-
traces: finalizedBlocks.flatMap(({ traces, block, transactions }) =>
|
|
798
|
-
traces.map((trace) => ({
|
|
799
|
-
trace,
|
|
800
|
-
block,
|
|
801
|
-
transaction: transactions.find(
|
|
802
|
-
(t) => t.hash === trace.transactionHash,
|
|
803
|
-
)!,
|
|
804
|
-
})),
|
|
805
|
-
),
|
|
806
|
-
chainId: network.chainId,
|
|
807
|
-
}),
|
|
808
|
-
]);
|
|
598
|
+
if (params.ordering === "multichain") {
|
|
599
|
+
// Note: `checkpoints.current` not used in multichain ordering
|
|
600
|
+
const checkpoint = getMultichainCheckpoint({
|
|
601
|
+
tag: "current",
|
|
602
|
+
network,
|
|
603
|
+
})!;
|
|
809
604
|
|
|
810
|
-
|
|
811
|
-
// Note: this should happen after so the database doesn't become corrupted
|
|
605
|
+
// Move events from executed to pending
|
|
812
606
|
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
607
|
+
const events = executedEvents.filter(
|
|
608
|
+
(e) => e.checkpoint > checkpoint,
|
|
609
|
+
);
|
|
610
|
+
executedEvents = executedEvents.filter(
|
|
611
|
+
(e) => e.checkpoint < checkpoint,
|
|
612
|
+
);
|
|
613
|
+
pendingEvents = pendingEvents.concat(events);
|
|
614
|
+
|
|
615
|
+
params.common.logger.debug({
|
|
616
|
+
service: "sync",
|
|
617
|
+
msg: `Rescheduled ${events.length} reorged events`,
|
|
819
618
|
});
|
|
820
|
-
}
|
|
821
619
|
|
|
822
|
-
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
620
|
+
params.onRealtimeEvent({ type: "reorg", checkpoint, network });
|
|
621
|
+
} else {
|
|
622
|
+
const from = checkpoints.current;
|
|
623
|
+
checkpoints.current = getOmnichainCheckpoint({ tag: "current" })!;
|
|
624
|
+
const to = getOmnichainCheckpoint({ tag: "current" })!;
|
|
826
625
|
|
|
827
|
-
|
|
828
|
-
|
|
829
|
-
|
|
830
|
-
|
|
831
|
-
|
|
832
|
-
|
|
833
|
-
|
|
834
|
-
0,
|
|
835
|
-
);
|
|
836
|
-
args.common.metrics.ponder_sync_is_complete.set(
|
|
837
|
-
{ network: network.name },
|
|
838
|
-
1,
|
|
839
|
-
);
|
|
840
|
-
args.common.logger.info({
|
|
626
|
+
// Move events from executed to pending
|
|
627
|
+
|
|
628
|
+
const events = executedEvents.filter((e) => e.checkpoint > to);
|
|
629
|
+
executedEvents = executedEvents.filter((e) => e.checkpoint < to);
|
|
630
|
+
pendingEvents = pendingEvents.concat(events);
|
|
631
|
+
|
|
632
|
+
params.common.logger.debug({
|
|
841
633
|
service: "sync",
|
|
842
|
-
msg: `
|
|
634
|
+
msg: `Rescheduled ${events.length} reorged events`,
|
|
843
635
|
});
|
|
844
|
-
|
|
636
|
+
|
|
637
|
+
if (to < from) {
|
|
638
|
+
params.onRealtimeEvent({
|
|
639
|
+
type: "reorg",
|
|
640
|
+
checkpoint: to,
|
|
641
|
+
network,
|
|
642
|
+
});
|
|
643
|
+
}
|
|
845
644
|
}
|
|
645
|
+
|
|
846
646
|
break;
|
|
847
647
|
}
|
|
848
|
-
/**
|
|
849
|
-
* Handle a reorg with a new common ancestor block being found.
|
|
850
|
-
*/
|
|
851
|
-
case "reorg": {
|
|
852
|
-
syncProgress.current = event.block;
|
|
853
|
-
// Note: this checkpoint is <= the previous checkpoint
|
|
854
|
-
const checkpoint = getOmnichainCheckpoint("current")!;
|
|
855
648
|
|
|
856
|
-
|
|
857
|
-
|
|
858
|
-
|
|
859
|
-
|
|
860
|
-
);
|
|
649
|
+
default:
|
|
650
|
+
never(event);
|
|
651
|
+
}
|
|
652
|
+
};
|
|
861
653
|
|
|
862
|
-
|
|
654
|
+
await Promise.all(
|
|
655
|
+
params.indexingBuild.networks.map(async (network, index) => {
|
|
656
|
+
const requestQueue = params.requestQueues[index]!;
|
|
863
657
|
|
|
864
|
-
|
|
865
|
-
|
|
866
|
-
|
|
867
|
-
hexToNumber(block.number) <= hexToNumber(event.block.number),
|
|
868
|
-
);
|
|
658
|
+
const sources = params.indexingBuild.sources.filter(
|
|
659
|
+
({ filter }) => filter.chainId === network.chainId,
|
|
660
|
+
);
|
|
869
661
|
|
|
870
|
-
|
|
871
|
-
|
|
872
|
-
|
|
662
|
+
// Invalidate sync cache for devnet sources
|
|
663
|
+
if (network.disableCache) {
|
|
664
|
+
params.common.logger.warn({
|
|
665
|
+
service: "sync",
|
|
666
|
+
msg: `Deleting cache records for '${network.name}'`,
|
|
667
|
+
});
|
|
873
668
|
|
|
874
|
-
|
|
875
|
-
|
|
876
|
-
);
|
|
877
|
-
|
|
878
|
-
(e) => isReorgedEvent(e) === false,
|
|
879
|
-
);
|
|
669
|
+
await params.syncStore.pruneByChain({
|
|
670
|
+
chainId: network.chainId,
|
|
671
|
+
});
|
|
672
|
+
}
|
|
880
673
|
|
|
881
|
-
|
|
674
|
+
const historicalSync = await createHistoricalSync({
|
|
675
|
+
common: params.common,
|
|
676
|
+
sources,
|
|
677
|
+
syncStore: params.syncStore,
|
|
678
|
+
requestQueue,
|
|
679
|
+
network,
|
|
680
|
+
onFatalError: params.onFatalError,
|
|
681
|
+
});
|
|
882
682
|
|
|
883
|
-
|
|
884
|
-
|
|
885
|
-
|
|
886
|
-
|
|
887
|
-
|
|
683
|
+
const syncProgress = await getLocalSyncProgress({
|
|
684
|
+
common: params.common,
|
|
685
|
+
network,
|
|
686
|
+
sources,
|
|
687
|
+
requestQueue,
|
|
688
|
+
intervalsCache: historicalSync.intervalsCache,
|
|
689
|
+
});
|
|
888
690
|
|
|
889
|
-
|
|
890
|
-
|
|
891
|
-
|
|
892
|
-
|
|
691
|
+
const realtimeSync = createRealtimeSync({
|
|
692
|
+
common: params.common,
|
|
693
|
+
sources,
|
|
694
|
+
requestQueue,
|
|
695
|
+
network,
|
|
696
|
+
onEvent: realtimeMutex((event) =>
|
|
697
|
+
perChainOnRealtimeSyncEvent(event)
|
|
698
|
+
.then((event) => {
|
|
699
|
+
onRealtimeSyncEvent(event, {
|
|
700
|
+
network,
|
|
701
|
+
syncProgress,
|
|
702
|
+
realtimeSync,
|
|
703
|
+
});
|
|
893
704
|
|
|
894
|
-
|
|
895
|
-
|
|
705
|
+
if (isSyncFinalized(syncProgress) && isSyncEnd(syncProgress)) {
|
|
706
|
+
// The realtime service can be killed if `endBlock` is
|
|
707
|
+
// defined has become finalized.
|
|
708
|
+
|
|
709
|
+
params.common.metrics.ponder_sync_is_realtime.set(
|
|
710
|
+
{ network: network.name },
|
|
711
|
+
0,
|
|
712
|
+
);
|
|
713
|
+
params.common.metrics.ponder_sync_is_complete.set(
|
|
714
|
+
{ network: network.name },
|
|
715
|
+
1,
|
|
716
|
+
);
|
|
717
|
+
params.common.logger.info({
|
|
718
|
+
service: "sync",
|
|
719
|
+
msg: `Killing '${network.name}' live indexing because the end block ${hexToNumber(syncProgress.end!.number)} has been finalized`,
|
|
720
|
+
});
|
|
721
|
+
realtimeSync.kill();
|
|
722
|
+
}
|
|
723
|
+
})
|
|
724
|
+
.catch((error) => {
|
|
725
|
+
params.common.logger.error({
|
|
726
|
+
service: "sync",
|
|
727
|
+
msg: `Fatal error: Unable to process ${event.type} event`,
|
|
728
|
+
error,
|
|
729
|
+
});
|
|
730
|
+
params.onFatalError(error);
|
|
731
|
+
}),
|
|
732
|
+
),
|
|
733
|
+
onFatalError: params.onFatalError,
|
|
734
|
+
});
|
|
896
735
|
|
|
897
|
-
|
|
898
|
-
|
|
736
|
+
params.common.metrics.ponder_sync_is_realtime.set(
|
|
737
|
+
{ network: network.name },
|
|
738
|
+
0,
|
|
739
|
+
);
|
|
740
|
+
params.common.metrics.ponder_sync_is_complete.set(
|
|
741
|
+
{ network: network.name },
|
|
742
|
+
0,
|
|
743
|
+
);
|
|
899
744
|
|
|
900
|
-
|
|
901
|
-
|
|
745
|
+
perNetworkSync.set(network, {
|
|
746
|
+
syncProgress,
|
|
747
|
+
historicalSync,
|
|
748
|
+
realtimeSync,
|
|
749
|
+
});
|
|
750
|
+
|
|
751
|
+
const perChainOnRealtimeSyncEvent = getPerChainOnRealtimeSyncEvent({
|
|
752
|
+
common: params.common,
|
|
753
|
+
network,
|
|
754
|
+
sources,
|
|
755
|
+
syncStore: params.syncStore,
|
|
756
|
+
syncProgress,
|
|
757
|
+
});
|
|
758
|
+
}),
|
|
759
|
+
);
|
|
760
|
+
|
|
761
|
+
const status: Status = {};
|
|
762
|
+
const seconds: Seconds = {};
|
|
763
|
+
|
|
764
|
+
for (const network of params.indexingBuild.networks) {
|
|
765
|
+
status[network.name] = { block: null, ready: false };
|
|
766
|
+
}
|
|
767
|
+
|
|
768
|
+
if (params.ordering === "multichain") {
|
|
769
|
+
for (const network of params.indexingBuild.networks) {
|
|
770
|
+
seconds[network.name] = {
|
|
771
|
+
start: decodeCheckpoint(
|
|
772
|
+
getMultichainCheckpoint({ tag: "start", network })!,
|
|
773
|
+
).blockTimestamp,
|
|
774
|
+
end: decodeCheckpoint(
|
|
775
|
+
min(
|
|
776
|
+
getOmnichainCheckpoint({ tag: "end" }),
|
|
777
|
+
getOmnichainCheckpoint({ tag: "finalized" }),
|
|
778
|
+
),
|
|
779
|
+
).blockTimestamp,
|
|
780
|
+
cached: decodeCheckpoint(params.initialCheckpoint).blockTimestamp,
|
|
781
|
+
};
|
|
902
782
|
}
|
|
903
|
-
}
|
|
783
|
+
} else {
|
|
784
|
+
for (const network of params.indexingBuild.networks) {
|
|
785
|
+
seconds[network.name] = {
|
|
786
|
+
start: decodeCheckpoint(getOmnichainCheckpoint({ tag: "start" })!)
|
|
787
|
+
.blockTimestamp,
|
|
788
|
+
end: decodeCheckpoint(
|
|
789
|
+
min(
|
|
790
|
+
getOmnichainCheckpoint({ tag: "end" }),
|
|
791
|
+
getOmnichainCheckpoint({ tag: "finalized" }),
|
|
792
|
+
),
|
|
793
|
+
).blockTimestamp,
|
|
794
|
+
cached: decodeCheckpoint(params.initialCheckpoint).blockTimestamp,
|
|
795
|
+
};
|
|
796
|
+
}
|
|
797
|
+
}
|
|
798
|
+
|
|
904
799
|
return {
|
|
905
800
|
getEvents,
|
|
906
801
|
async startRealtime() {
|
|
907
|
-
for (const network of
|
|
802
|
+
for (const network of params.indexingBuild.networks) {
|
|
908
803
|
const { syncProgress, realtimeSync } = perNetworkSync.get(network)!;
|
|
909
804
|
|
|
910
|
-
const filters =
|
|
805
|
+
const filters = params.indexingBuild.sources
|
|
911
806
|
.filter(({ filter }) => filter.chainId === network.chainId)
|
|
912
807
|
.map(({ filter }) => filter);
|
|
913
808
|
|
|
@@ -921,7 +816,7 @@ export const createSync = async (args: CreateSyncParameters): Promise<Sync> => {
|
|
|
921
816
|
// finalized checkpoint and add them to pendingEvents. These events are synced during
|
|
922
817
|
// the historical phase, but must be indexed in the realtime phase because events
|
|
923
818
|
// synced in realtime on other chains might be ordered before them.
|
|
924
|
-
const from = getOmnichainCheckpoint("finalized")!;
|
|
819
|
+
const from = getOmnichainCheckpoint({ tag: "finalized" })!;
|
|
925
820
|
|
|
926
821
|
const finalized = getChainCheckpoint({
|
|
927
822
|
syncProgress,
|
|
@@ -936,17 +831,27 @@ export const createSync = async (args: CreateSyncParameters): Promise<Sync> => {
|
|
|
936
831
|
const to = min(finalized, end);
|
|
937
832
|
|
|
938
833
|
if (to > from) {
|
|
939
|
-
const events = await
|
|
940
|
-
|
|
834
|
+
const events = await params.syncStore.getEvents({
|
|
835
|
+
filters,
|
|
836
|
+
from,
|
|
837
|
+
to,
|
|
838
|
+
});
|
|
839
|
+
|
|
840
|
+
params.common.logger.debug({
|
|
841
|
+
service: "sync",
|
|
842
|
+
msg: `Extracted and scheduled ${events.events.length} '${network.name}' events`,
|
|
843
|
+
});
|
|
844
|
+
|
|
845
|
+
pendingEvents = pendingEvents.concat(events.events);
|
|
941
846
|
}
|
|
942
847
|
|
|
943
848
|
if (isSyncEnd(syncProgress)) {
|
|
944
|
-
|
|
849
|
+
params.common.metrics.ponder_sync_is_complete.set(
|
|
945
850
|
{ network: network.name },
|
|
946
851
|
1,
|
|
947
852
|
);
|
|
948
853
|
} else {
|
|
949
|
-
|
|
854
|
+
params.common.metrics.ponder_sync_is_realtime.set(
|
|
950
855
|
{ network: network.name },
|
|
951
856
|
1,
|
|
952
857
|
);
|
|
@@ -954,225 +859,389 @@ export const createSync = async (args: CreateSyncParameters): Promise<Sync> => {
|
|
|
954
859
|
const initialChildAddresses = new Map<Factory, Set<Address>>();
|
|
955
860
|
|
|
956
861
|
for (const filter of filters) {
|
|
957
|
-
|
|
958
|
-
|
|
959
|
-
|
|
960
|
-
|
|
862
|
+
switch (filter.type) {
|
|
863
|
+
case "log":
|
|
864
|
+
if (isAddressFactory(filter.address)) {
|
|
865
|
+
const addresses = await params.syncStore.getChildAddresses({
|
|
866
|
+
filter: filter.address,
|
|
867
|
+
});
|
|
868
|
+
|
|
869
|
+
initialChildAddresses.set(filter.address, new Set(addresses));
|
|
870
|
+
}
|
|
871
|
+
break;
|
|
872
|
+
|
|
873
|
+
case "transaction":
|
|
874
|
+
case "transfer":
|
|
875
|
+
case "trace":
|
|
876
|
+
if (isAddressFactory(filter.fromAddress)) {
|
|
877
|
+
const addresses = await params.syncStore.getChildAddresses({
|
|
878
|
+
filter: filter.fromAddress,
|
|
879
|
+
});
|
|
880
|
+
|
|
881
|
+
initialChildAddresses.set(
|
|
882
|
+
filter.fromAddress,
|
|
883
|
+
new Set(addresses),
|
|
884
|
+
);
|
|
885
|
+
}
|
|
886
|
+
|
|
887
|
+
if (isAddressFactory(filter.toAddress)) {
|
|
888
|
+
const addresses = await params.syncStore.getChildAddresses({
|
|
889
|
+
filter: filter.toAddress,
|
|
890
|
+
});
|
|
961
891
|
|
|
962
|
-
|
|
892
|
+
initialChildAddresses.set(
|
|
893
|
+
filter.toAddress,
|
|
894
|
+
new Set(addresses),
|
|
895
|
+
);
|
|
896
|
+
}
|
|
897
|
+
|
|
898
|
+
break;
|
|
963
899
|
}
|
|
964
900
|
}
|
|
965
901
|
|
|
902
|
+
params.common.logger.debug({
|
|
903
|
+
service: "sync",
|
|
904
|
+
msg: `Initialized '${network.name}' realtime sync with ${initialChildAddresses.size} factory child addresses`,
|
|
905
|
+
});
|
|
906
|
+
|
|
966
907
|
realtimeSync.start({ syncProgress, initialChildAddresses });
|
|
967
908
|
}
|
|
968
909
|
}
|
|
969
910
|
},
|
|
970
|
-
getStartCheckpoint() {
|
|
971
|
-
return getOmnichainCheckpoint("start")!;
|
|
972
|
-
},
|
|
973
|
-
getFinalizedCheckpoint() {
|
|
974
|
-
return getOmnichainCheckpoint("finalized")!;
|
|
975
|
-
},
|
|
976
911
|
getStatus() {
|
|
977
912
|
return status;
|
|
978
913
|
},
|
|
979
|
-
|
|
980
|
-
|
|
981
|
-
return
|
|
982
|
-
},
|
|
983
|
-
async kill() {
|
|
984
|
-
isKilled = true;
|
|
985
|
-
const promises: Promise<void>[] = [];
|
|
986
|
-
for (const network of args.indexingBuild.networks) {
|
|
987
|
-
const { historicalSync, realtimeSync, realtimeQueue } =
|
|
988
|
-
perNetworkSync.get(network)!;
|
|
989
|
-
historicalSync.kill();
|
|
990
|
-
realtimeQueue.pause();
|
|
991
|
-
realtimeQueue.clear();
|
|
992
|
-
promises.push(realtimeQueue.onIdle());
|
|
993
|
-
promises.push(realtimeSync.kill());
|
|
994
|
-
}
|
|
995
|
-
await Promise.all(promises);
|
|
914
|
+
seconds,
|
|
915
|
+
getFinalizedCheckpoint() {
|
|
916
|
+
return getOmnichainCheckpoint({ tag: "finalized" })!;
|
|
996
917
|
},
|
|
997
918
|
};
|
|
998
919
|
};
|
|
999
920
|
|
|
1000
|
-
|
|
1001
|
-
export const syncDiagnostic = async ({
|
|
921
|
+
export const getPerChainOnRealtimeSyncEvent = ({
|
|
1002
922
|
common,
|
|
1003
|
-
sources,
|
|
1004
923
|
network,
|
|
1005
|
-
|
|
924
|
+
sources,
|
|
925
|
+
syncStore,
|
|
926
|
+
syncProgress,
|
|
1006
927
|
}: {
|
|
1007
928
|
common: Common;
|
|
1008
|
-
sources: Source[];
|
|
1009
929
|
network: Network;
|
|
1010
|
-
|
|
930
|
+
sources: Source[];
|
|
931
|
+
syncStore: SyncStore;
|
|
932
|
+
syncProgress: SyncProgress;
|
|
1011
933
|
}) => {
|
|
1012
|
-
|
|
1013
|
-
|
|
1014
|
-
|
|
1015
|
-
|
|
1016
|
-
* of the filters doesn't have an `endBlock`.
|
|
1017
|
-
*/
|
|
1018
|
-
const end = sources.some(({ filter }) => filter.toBlock === undefined)
|
|
1019
|
-
? undefined
|
|
1020
|
-
: Math.max(...sources.map(({ filter }) => filter.toBlock!));
|
|
1021
|
-
|
|
1022
|
-
const [remoteChainId, startBlock, latestBlock] = await Promise.all([
|
|
1023
|
-
requestQueue.request({ method: "eth_chainId" }),
|
|
1024
|
-
_eth_getBlockByNumber(requestQueue, { blockNumber: start }),
|
|
1025
|
-
_eth_getBlockByNumber(requestQueue, { blockTag: "latest" }),
|
|
1026
|
-
]);
|
|
1027
|
-
|
|
1028
|
-
const endBlock =
|
|
1029
|
-
end === undefined
|
|
1030
|
-
? undefined
|
|
1031
|
-
: end > hexToBigInt(latestBlock.number)
|
|
1032
|
-
? ({
|
|
1033
|
-
number: toHex(end),
|
|
1034
|
-
hash: "0x",
|
|
1035
|
-
parentHash: "0x",
|
|
1036
|
-
timestamp: toHex(maxCheckpoint.blockTimestamp),
|
|
1037
|
-
} as LightBlock)
|
|
1038
|
-
: await _eth_getBlockByNumber(requestQueue, { blockNumber: end });
|
|
934
|
+
let unfinalizedBlocks: Omit<
|
|
935
|
+
Extract<RealtimeSyncEvent, { type: "block" }>,
|
|
936
|
+
"type"
|
|
937
|
+
>[] = [];
|
|
1039
938
|
|
|
1040
|
-
|
|
1041
|
-
|
|
1042
|
-
|
|
1043
|
-
|
|
1044
|
-
msg: `Remote chain ID (${remoteChainId}) does not match configured chain ID (${network.chainId}) for network "${network.name}"`,
|
|
1045
|
-
});
|
|
1046
|
-
}
|
|
939
|
+
return async (event: RealtimeSyncEvent): Promise<RealtimeSyncEvent> => {
|
|
940
|
+
switch (event.type) {
|
|
941
|
+
case "block": {
|
|
942
|
+
syncProgress.current = event.block;
|
|
1047
943
|
|
|
1048
|
-
|
|
1049
|
-
|
|
1050
|
-
|
|
1051
|
-
|
|
944
|
+
common.logger.debug({
|
|
945
|
+
service: "sync",
|
|
946
|
+
msg: `Updated '${network.name}' current block to ${hexToNumber(event.block.number)}`,
|
|
947
|
+
});
|
|
1052
948
|
|
|
1053
|
-
|
|
1054
|
-
|
|
1055
|
-
|
|
949
|
+
common.metrics.ponder_sync_block.set(
|
|
950
|
+
{ network: network.name },
|
|
951
|
+
hexToNumber(syncProgress.current.number),
|
|
952
|
+
);
|
|
1056
953
|
|
|
1057
|
-
|
|
1058
|
-
|
|
1059
|
-
|
|
1060
|
-
|
|
954
|
+
unfinalizedBlocks.push(event);
|
|
955
|
+
|
|
956
|
+
return event;
|
|
957
|
+
}
|
|
958
|
+
|
|
959
|
+
case "finalize": {
|
|
960
|
+
const finalizedInterval = [
|
|
961
|
+
hexToNumber(syncProgress.finalized.number),
|
|
962
|
+
hexToNumber(event.block.number),
|
|
963
|
+
] satisfies Interval;
|
|
964
|
+
|
|
965
|
+
syncProgress.finalized = event.block;
|
|
966
|
+
|
|
967
|
+
common.logger.debug({
|
|
968
|
+
service: "sync",
|
|
969
|
+
msg: `Updated '${network.name}' finalized block to ${hexToNumber(event.block.number)}`,
|
|
970
|
+
});
|
|
971
|
+
|
|
972
|
+
// Remove all finalized data
|
|
973
|
+
|
|
974
|
+
const finalizedBlocks = unfinalizedBlocks.filter(
|
|
975
|
+
({ block }) =>
|
|
976
|
+
hexToNumber(block.number) <= hexToNumber(event.block.number),
|
|
977
|
+
);
|
|
978
|
+
|
|
979
|
+
unfinalizedBlocks = unfinalizedBlocks.filter(
|
|
980
|
+
({ block }) =>
|
|
981
|
+
hexToNumber(block.number) > hexToNumber(event.block.number),
|
|
982
|
+
);
|
|
983
|
+
|
|
984
|
+
// Add finalized blocks, logs, transactions, receipts, and traces to the sync-store.
|
|
985
|
+
|
|
986
|
+
await Promise.all([
|
|
987
|
+
syncStore.insertBlocks({
|
|
988
|
+
blocks: finalizedBlocks
|
|
989
|
+
.filter(({ hasMatchedFilter }) => hasMatchedFilter)
|
|
990
|
+
.map(({ block }) => block),
|
|
991
|
+
chainId: network.chainId,
|
|
992
|
+
}),
|
|
993
|
+
syncStore.insertLogs({
|
|
994
|
+
logs: finalizedBlocks.flatMap(({ logs, block }) =>
|
|
995
|
+
logs.map((log) => ({ log, block })),
|
|
996
|
+
),
|
|
997
|
+
shouldUpdateCheckpoint: true,
|
|
998
|
+
chainId: network.chainId,
|
|
999
|
+
}),
|
|
1000
|
+
syncStore.insertLogs({
|
|
1001
|
+
logs: finalizedBlocks.flatMap(({ factoryLogs }) =>
|
|
1002
|
+
factoryLogs.map((log) => ({ log })),
|
|
1003
|
+
),
|
|
1004
|
+
shouldUpdateCheckpoint: false,
|
|
1005
|
+
chainId: network.chainId,
|
|
1006
|
+
}),
|
|
1007
|
+
syncStore.insertTransactions({
|
|
1008
|
+
transactions: finalizedBlocks.flatMap(({ transactions, block }) =>
|
|
1009
|
+
transactions.map((transaction) => ({
|
|
1010
|
+
transaction,
|
|
1011
|
+
block,
|
|
1012
|
+
})),
|
|
1013
|
+
),
|
|
1014
|
+
chainId: network.chainId,
|
|
1015
|
+
}),
|
|
1016
|
+
syncStore.insertTransactionReceipts({
|
|
1017
|
+
transactionReceipts: finalizedBlocks.flatMap(
|
|
1018
|
+
({ transactionReceipts }) => transactionReceipts,
|
|
1019
|
+
),
|
|
1020
|
+
chainId: network.chainId,
|
|
1021
|
+
}),
|
|
1022
|
+
syncStore.insertTraces({
|
|
1023
|
+
traces: finalizedBlocks.flatMap(({ traces, block, transactions }) =>
|
|
1024
|
+
traces.map((trace) => ({
|
|
1025
|
+
trace,
|
|
1026
|
+
block,
|
|
1027
|
+
transaction: transactions.find(
|
|
1028
|
+
(t) => t.hash === trace.transactionHash,
|
|
1029
|
+
)!,
|
|
1030
|
+
})),
|
|
1031
|
+
),
|
|
1032
|
+
chainId: network.chainId,
|
|
1033
|
+
}),
|
|
1034
|
+
]);
|
|
1035
|
+
|
|
1036
|
+
// Add corresponding intervals to the sync-store
|
|
1037
|
+
// Note: this should happen after insertion so the database doesn't become corrupted
|
|
1038
|
+
|
|
1039
|
+
if (network.disableCache === false) {
|
|
1040
|
+
const syncedIntervals: {
|
|
1041
|
+
interval: Interval;
|
|
1042
|
+
filter: Filter;
|
|
1043
|
+
}[] = [];
|
|
1044
|
+
|
|
1045
|
+
for (const { filter } of sources) {
|
|
1046
|
+
const intervals = intervalIntersection(
|
|
1047
|
+
[finalizedInterval],
|
|
1048
|
+
[
|
|
1049
|
+
[
|
|
1050
|
+
filter.fromBlock ?? 0,
|
|
1051
|
+
filter.toBlock ?? Number.POSITIVE_INFINITY,
|
|
1052
|
+
],
|
|
1053
|
+
],
|
|
1054
|
+
);
|
|
1055
|
+
|
|
1056
|
+
for (const interval of intervals) {
|
|
1057
|
+
syncedIntervals.push({ interval, filter });
|
|
1058
|
+
}
|
|
1059
|
+
}
|
|
1060
|
+
|
|
1061
|
+
await syncStore.insertIntervals({
|
|
1062
|
+
intervals: syncedIntervals,
|
|
1063
|
+
chainId: network.chainId,
|
|
1064
|
+
});
|
|
1065
|
+
}
|
|
1066
|
+
|
|
1067
|
+
return event;
|
|
1068
|
+
}
|
|
1069
|
+
|
|
1070
|
+
case "reorg": {
|
|
1071
|
+
syncProgress.current = event.block;
|
|
1072
|
+
|
|
1073
|
+
common.logger.debug({
|
|
1074
|
+
service: "sync",
|
|
1075
|
+
msg: `Updated '${network.name}' current block to ${hexToNumber(event.block.number)}`,
|
|
1076
|
+
});
|
|
1077
|
+
|
|
1078
|
+
common.metrics.ponder_sync_block.set(
|
|
1079
|
+
{ network: network.name },
|
|
1080
|
+
hexToNumber(syncProgress.current.number),
|
|
1081
|
+
);
|
|
1082
|
+
|
|
1083
|
+
// Remove all reorged data
|
|
1084
|
+
|
|
1085
|
+
unfinalizedBlocks = unfinalizedBlocks.filter(
|
|
1086
|
+
({ block }) =>
|
|
1087
|
+
hexToNumber(block.number) <= hexToNumber(event.block.number),
|
|
1088
|
+
);
|
|
1089
|
+
|
|
1090
|
+
await syncStore.pruneRpcRequestResult({
|
|
1091
|
+
chainId: network.chainId,
|
|
1092
|
+
blocks: event.reorgedBlocks,
|
|
1093
|
+
});
|
|
1094
|
+
|
|
1095
|
+
return event;
|
|
1096
|
+
}
|
|
1097
|
+
}
|
|
1061
1098
|
};
|
|
1062
1099
|
};
|
|
1063
1100
|
|
|
1064
|
-
|
|
1065
|
-
|
|
1066
|
-
|
|
1067
|
-
|
|
1068
|
-
historicalSync,
|
|
1069
|
-
}: {
|
|
1101
|
+
export async function* getLocalEventGenerator(params: {
|
|
1102
|
+
common: Common;
|
|
1103
|
+
network: Network;
|
|
1104
|
+
syncStore: SyncStore;
|
|
1070
1105
|
sources: Source[];
|
|
1071
|
-
|
|
1072
|
-
|
|
1073
|
-
|
|
1074
|
-
|
|
1075
|
-
|
|
1076
|
-
|
|
1077
|
-
|
|
1078
|
-
|
|
1079
|
-
|
|
1106
|
+
localSyncGenerator: AsyncGenerator<string>;
|
|
1107
|
+
from: string;
|
|
1108
|
+
to: string;
|
|
1109
|
+
limit: number;
|
|
1110
|
+
}): AsyncGenerator<{ events: RawEvent[]; checkpoint: string }> {
|
|
1111
|
+
let cursor = params.from;
|
|
1112
|
+
// Estimate optimal range (seconds) to query at a time, eventually
|
|
1113
|
+
// used to determine `to` passed to `getEvents`.
|
|
1114
|
+
let estimateSeconds = 1_000;
|
|
1080
1115
|
|
|
1081
|
-
|
|
1082
|
-
|
|
1083
|
-
|
|
1084
|
-
|
|
1085
|
-
|
|
1116
|
+
params.common.logger.debug({
|
|
1117
|
+
service: "sync",
|
|
1118
|
+
msg: `Initialized '${params.network.name}' extract query for timestamp range [${decodeCheckpoint(params.from).blockTimestamp}, ${decodeCheckpoint(params.to).blockTimestamp}]`,
|
|
1119
|
+
});
|
|
1120
|
+
|
|
1121
|
+
for await (const syncCheckpoint of bufferAsyncGenerator(
|
|
1122
|
+
params.localSyncGenerator,
|
|
1123
|
+
Number.POSITIVE_INFINITY,
|
|
1124
|
+
)) {
|
|
1125
|
+
let consecutiveErrors = 0;
|
|
1126
|
+
while (cursor < min(syncCheckpoint, params.to)) {
|
|
1127
|
+
const estimateCheckpoint = encodeCheckpoint({
|
|
1128
|
+
...ZERO_CHECKPOINT,
|
|
1129
|
+
chainId: BigInt(params.network.chainId),
|
|
1130
|
+
blockTimestamp: Math.min(
|
|
1131
|
+
decodeCheckpoint(cursor).blockTimestamp + estimateSeconds,
|
|
1132
|
+
MAX_CHECKPOINT.blockTimestamp,
|
|
1086
1133
|
),
|
|
1087
|
-
)
|
|
1088
|
-
|
|
1134
|
+
});
|
|
1135
|
+
const to = min(syncCheckpoint, estimateCheckpoint, params.to);
|
|
1136
|
+
try {
|
|
1137
|
+
const { events, cursor: queryCursor } =
|
|
1138
|
+
await params.syncStore.getEvents({
|
|
1139
|
+
filters: params.sources.map(({ filter }) => filter),
|
|
1140
|
+
from: cursor,
|
|
1141
|
+
to,
|
|
1142
|
+
limit: params.limit,
|
|
1143
|
+
});
|
|
1089
1144
|
|
|
1090
|
-
|
|
1145
|
+
params.common.logger.debug({
|
|
1146
|
+
service: "sync",
|
|
1147
|
+
msg: `Extracted ${events.length} '${params.network.name}' events for timestamp range [${decodeCheckpoint(cursor).blockTimestamp}, ${decodeCheckpoint(queryCursor).blockTimestamp}]`,
|
|
1148
|
+
});
|
|
1091
1149
|
|
|
1092
|
-
|
|
1093
|
-
|
|
1094
|
-
|
|
1095
|
-
|
|
1096
|
-
|
|
1097
|
-
|
|
1150
|
+
estimateSeconds = estimate({
|
|
1151
|
+
from: decodeCheckpoint(cursor).blockTimestamp,
|
|
1152
|
+
to: decodeCheckpoint(queryCursor).blockTimestamp,
|
|
1153
|
+
target: params.limit,
|
|
1154
|
+
result: events.length,
|
|
1155
|
+
min: 10,
|
|
1156
|
+
max: 86_400,
|
|
1157
|
+
prev: estimateSeconds,
|
|
1158
|
+
maxIncrease: 1.08,
|
|
1159
|
+
});
|
|
1098
1160
|
|
|
1099
|
-
|
|
1100
|
-
|
|
1101
|
-
|
|
1102
|
-
|
|
1103
|
-
);
|
|
1161
|
+
params.common.logger.debug({
|
|
1162
|
+
service: "sync",
|
|
1163
|
+
msg: `Updated '${params.network.name}' extract query estimate to ${estimateSeconds} seconds`,
|
|
1164
|
+
});
|
|
1104
1165
|
|
|
1105
|
-
|
|
1106
|
-
|
|
1107
|
-
|
|
1108
|
-
|
|
1109
|
-
|
|
1110
|
-
|
|
1111
|
-
|
|
1112
|
-
|
|
1113
|
-
|
|
1114
|
-
)
|
|
1115
|
-
) {
|
|
1116
|
-
return _eth_getBlockByNumber(requestQueue, {
|
|
1117
|
-
blockNumber: minCompletedBlock,
|
|
1118
|
-
});
|
|
1119
|
-
}
|
|
1166
|
+
consecutiveErrors = 0;
|
|
1167
|
+
cursor = queryCursor;
|
|
1168
|
+
yield { events, checkpoint: cursor };
|
|
1169
|
+
} catch (error) {
|
|
1170
|
+
params.common.logger.warn({
|
|
1171
|
+
service: "sync",
|
|
1172
|
+
msg: `Failed '${params.network.name}' extract query for timestamp range [${decodeCheckpoint(cursor).blockTimestamp}, ${decodeCheckpoint(to).blockTimestamp}]`,
|
|
1173
|
+
error: error as Error,
|
|
1174
|
+
});
|
|
1120
1175
|
|
|
1121
|
-
|
|
1122
|
-
|
|
1176
|
+
// Handle errors by reducing the requested range by 10x
|
|
1177
|
+
estimateSeconds = Math.max(10, Math.round(estimateSeconds / 10));
|
|
1178
|
+
|
|
1179
|
+
params.common.logger.debug({
|
|
1180
|
+
service: "sync",
|
|
1181
|
+
msg: `Updated '${params.network.name}' getEvents query estimate to ${estimateSeconds} seconds`,
|
|
1182
|
+
});
|
|
1123
1183
|
|
|
1124
|
-
|
|
1125
|
-
|
|
1184
|
+
if (++consecutiveErrors > 4) throw error;
|
|
1185
|
+
}
|
|
1186
|
+
}
|
|
1187
|
+
}
|
|
1188
|
+
}
|
|
1189
|
+
|
|
1190
|
+
export async function* getLocalSyncGenerator({
|
|
1126
1191
|
common,
|
|
1127
1192
|
network,
|
|
1128
1193
|
syncProgress,
|
|
1129
1194
|
historicalSync,
|
|
1130
|
-
showLogs,
|
|
1131
1195
|
}: {
|
|
1132
1196
|
common: Common;
|
|
1133
1197
|
network: Network;
|
|
1134
1198
|
syncProgress: SyncProgress;
|
|
1135
1199
|
historicalSync: HistoricalSync;
|
|
1136
|
-
|
|
1137
|
-
|
|
1138
|
-
|
|
1200
|
+
}): AsyncGenerator<string> {
|
|
1201
|
+
const label = { network: network.name };
|
|
1202
|
+
|
|
1203
|
+
let cursor = hexToNumber(syncProgress.start.number);
|
|
1204
|
+
const last = getHistoricalLast(syncProgress);
|
|
1205
|
+
|
|
1206
|
+
// Estimate optimal range (blocks) to sync at a time, eventually to be used to
|
|
1207
|
+
// determine `interval` passed to `historicalSync.sync()`.
|
|
1208
|
+
let estimateRange = 25;
|
|
1209
|
+
|
|
1210
|
+
// Handle two special cases:
|
|
1211
|
+
// 1. `syncProgress.start` > `syncProgress.finalized`
|
|
1212
|
+
// 2. `cached` is defined
|
|
1213
|
+
|
|
1139
1214
|
if (
|
|
1140
1215
|
hexToNumber(syncProgress.start.number) >
|
|
1141
1216
|
hexToNumber(syncProgress.finalized.number)
|
|
1142
1217
|
) {
|
|
1143
1218
|
syncProgress.current = syncProgress.finalized;
|
|
1144
1219
|
|
|
1145
|
-
|
|
1220
|
+
common.logger.warn({
|
|
1221
|
+
service: "sync",
|
|
1222
|
+
msg: `Skipped '${network.name}' historical sync because the start block is unfinalized`,
|
|
1223
|
+
});
|
|
1224
|
+
|
|
1146
1225
|
common.metrics.ponder_sync_block.set(
|
|
1147
|
-
|
|
1226
|
+
label,
|
|
1148
1227
|
hexToNumber(syncProgress.current.number),
|
|
1149
1228
|
);
|
|
1150
|
-
|
|
1151
|
-
if (showLogs) {
|
|
1152
|
-
common.logger.warn({
|
|
1153
|
-
service: "historical",
|
|
1154
|
-
msg: `Skipped historical sync for '${network.name}' because the start block is not finalized`,
|
|
1155
|
-
});
|
|
1156
|
-
}
|
|
1157
|
-
|
|
1158
|
-
const label = { network: network.name };
|
|
1159
|
-
// Set "ponder_historical_total_blocks"
|
|
1160
1229
|
common.metrics.ponder_historical_total_blocks.set(label, 0);
|
|
1161
|
-
// Set "ponder_historical_sync_cached_blocks"
|
|
1162
1230
|
common.metrics.ponder_historical_cached_blocks.set(label, 0);
|
|
1163
1231
|
|
|
1164
1232
|
return;
|
|
1165
1233
|
}
|
|
1166
1234
|
|
|
1167
|
-
const historicalLast = getHistoricalLast(syncProgress);
|
|
1168
|
-
|
|
1169
|
-
// Intialize metrics
|
|
1170
|
-
|
|
1171
1235
|
const totalInterval = [
|
|
1172
1236
|
hexToNumber(syncProgress.start.number),
|
|
1173
|
-
hexToNumber(
|
|
1237
|
+
hexToNumber(last.number),
|
|
1174
1238
|
] satisfies Interval;
|
|
1175
1239
|
|
|
1240
|
+
common.logger.debug({
|
|
1241
|
+
service: "sync",
|
|
1242
|
+
msg: `Initialized '${network.name}' historical sync for block range [${totalInterval[0]}, ${totalInterval[1]}]`,
|
|
1243
|
+
});
|
|
1244
|
+
|
|
1176
1245
|
const requiredIntervals = Array.from(
|
|
1177
1246
|
historicalSync.intervalsCache.entries(),
|
|
1178
1247
|
).flatMap(([filter, fragmentIntervals]) =>
|
|
@@ -1193,110 +1262,91 @@ export async function* localHistoricalSyncGenerator({
|
|
|
1193
1262
|
);
|
|
1194
1263
|
|
|
1195
1264
|
const required = intervalSum(intervalUnion(requiredIntervals));
|
|
1196
|
-
|
|
1197
1265
|
const total = totalInterval[1] - totalInterval[0] + 1;
|
|
1198
1266
|
|
|
1199
|
-
const label = { network: network.name };
|
|
1200
|
-
// Set "ponder_historical_total_blocks"
|
|
1201
1267
|
common.metrics.ponder_historical_total_blocks.set(label, total);
|
|
1202
|
-
// Set "ponder_historical_sync_cached_blocks"
|
|
1203
1268
|
common.metrics.ponder_historical_cached_blocks.set(label, total - required);
|
|
1204
1269
|
|
|
1205
|
-
|
|
1206
|
-
|
|
1207
|
-
|
|
1208
|
-
|
|
1209
|
-
|
|
1210
|
-
|
|
1211
|
-
});
|
|
1212
|
-
}
|
|
1270
|
+
// Handle cache hit
|
|
1271
|
+
if (syncProgress.current !== undefined) {
|
|
1272
|
+
common.metrics.ponder_sync_block.set(
|
|
1273
|
+
label,
|
|
1274
|
+
hexToNumber(syncProgress.current.number),
|
|
1275
|
+
);
|
|
1213
1276
|
|
|
1214
|
-
/**
|
|
1215
|
-
* Estimate optimal range (blocks) to sync at a time, eventually to be used to
|
|
1216
|
-
* determine `interval` passed to `historicalSync.sync()`.
|
|
1217
|
-
*/
|
|
1218
|
-
let estimateRange = 25;
|
|
1219
|
-
// Cursor to track progress.
|
|
1220
|
-
let fromBlock = hexToNumber(syncProgress.start.number);
|
|
1221
|
-
|
|
1222
|
-
/**
|
|
1223
|
-
* Handle a cache hit by fast forwarding and potentially exiting.
|
|
1224
|
-
* A cache hit can either be: (listed by priority)
|
|
1225
|
-
* 1) recovering progress from earlier invocations with different `finalized` blocks
|
|
1226
|
-
* 2) recovering progress from the interval cache
|
|
1227
|
-
*/
|
|
1228
|
-
if (
|
|
1229
|
-
syncProgress.current !== undefined &&
|
|
1230
|
-
(syncProgress.cached === undefined ||
|
|
1231
|
-
hexToNumber(syncProgress.current.number) >
|
|
1232
|
-
hexToNumber(syncProgress.cached.number))
|
|
1233
|
-
) {
|
|
1234
|
-
fromBlock = hexToNumber(syncProgress.current.number) + 1;
|
|
1235
|
-
} else if (syncProgress.cached !== undefined) {
|
|
1236
1277
|
// `getEvents` can make progress without calling `sync`, so immediately "yield"
|
|
1237
|
-
yield
|
|
1238
|
-
|
|
1239
|
-
|
|
1240
|
-
|
|
1241
|
-
|
|
1242
|
-
|
|
1243
|
-
|
|
1244
|
-
|
|
1245
|
-
|
|
1246
|
-
msg: `Skipped historical sync for '${network.name}' because all blocks are cached.`,
|
|
1247
|
-
});
|
|
1248
|
-
}
|
|
1278
|
+
yield encodeCheckpoint(
|
|
1279
|
+
blockToCheckpoint(syncProgress.current, network.chainId, "up"),
|
|
1280
|
+
);
|
|
1281
|
+
|
|
1282
|
+
if (hexToNumber(syncProgress.current.number) === hexToNumber(last.number)) {
|
|
1283
|
+
common.logger.info({
|
|
1284
|
+
service: "sync",
|
|
1285
|
+
msg: `Skipped '${network.name}' historical sync because all blocks are cached`,
|
|
1286
|
+
});
|
|
1249
1287
|
return;
|
|
1288
|
+
} else {
|
|
1289
|
+
common.logger.info({
|
|
1290
|
+
service: "sync",
|
|
1291
|
+
msg: `Started '${network.name}' historical sync with ${formatPercentage(
|
|
1292
|
+
(total - required) / total,
|
|
1293
|
+
)} cached`,
|
|
1294
|
+
});
|
|
1250
1295
|
}
|
|
1251
1296
|
|
|
1252
|
-
|
|
1297
|
+
cursor = hexToNumber(syncProgress.current.number) + 1;
|
|
1298
|
+
} else {
|
|
1299
|
+
common.logger.info({
|
|
1300
|
+
service: "historical",
|
|
1301
|
+
msg: `Started '${network.name}' historical sync with 0% cached`,
|
|
1302
|
+
});
|
|
1253
1303
|
}
|
|
1254
1304
|
|
|
1255
1305
|
while (true) {
|
|
1256
|
-
|
|
1257
|
-
|
|
1258
|
-
|
|
1259
|
-
|
|
1260
|
-
* time spent syncing ≈ time before indexing function feedback.
|
|
1261
|
-
*/
|
|
1306
|
+
// Select a range of blocks to sync bounded by `finalizedBlock`.
|
|
1307
|
+
// It is important for devEx that the interval is not too large, because
|
|
1308
|
+
// time spent syncing ≈ time before indexing function feedback.
|
|
1309
|
+
|
|
1262
1310
|
const interval: Interval = [
|
|
1263
|
-
Math.min(
|
|
1264
|
-
Math.min(
|
|
1311
|
+
Math.min(cursor, hexToNumber(last.number)),
|
|
1312
|
+
Math.min(cursor + estimateRange, hexToNumber(last.number)),
|
|
1265
1313
|
];
|
|
1266
1314
|
|
|
1267
1315
|
const endClock = startClock();
|
|
1268
1316
|
|
|
1269
|
-
const
|
|
1317
|
+
const synced = await historicalSync.sync(interval);
|
|
1318
|
+
|
|
1319
|
+
common.logger.debug({
|
|
1320
|
+
service: "sync",
|
|
1321
|
+
msg: `Synced ${interval[1] - interval[0] + 1} '${network.name}' blocks in range [${interval[0]}, ${interval[1]}]`,
|
|
1322
|
+
});
|
|
1270
1323
|
|
|
1271
1324
|
// Update cursor to record progress
|
|
1272
|
-
|
|
1273
|
-
|
|
1274
|
-
if
|
|
1275
|
-
|
|
1276
|
-
|
|
1277
|
-
|
|
1278
|
-
|
|
1279
|
-
|
|
1280
|
-
|
|
1281
|
-
syncProgress.current = historicalLast;
|
|
1325
|
+
cursor = interval[1] + 1;
|
|
1326
|
+
|
|
1327
|
+
// `synced` will be undefined if a cache hit occur in `historicalSync.sync()`.
|
|
1328
|
+
|
|
1329
|
+
if (synced === undefined) {
|
|
1330
|
+
// If the all known blocks are synced, then update `syncProgress.current`, else
|
|
1331
|
+
// progress to the next iteration.
|
|
1332
|
+
if (interval[1] === hexToNumber(last.number)) {
|
|
1333
|
+
syncProgress.current = last;
|
|
1282
1334
|
} else {
|
|
1283
1335
|
continue;
|
|
1284
1336
|
}
|
|
1285
1337
|
} else {
|
|
1286
|
-
if (interval[1] === hexToNumber(
|
|
1287
|
-
syncProgress.current =
|
|
1338
|
+
if (interval[1] === hexToNumber(last.number)) {
|
|
1339
|
+
syncProgress.current = last;
|
|
1288
1340
|
} else {
|
|
1289
|
-
syncProgress.current =
|
|
1341
|
+
syncProgress.current = synced;
|
|
1290
1342
|
}
|
|
1291
1343
|
|
|
1292
1344
|
const duration = endClock();
|
|
1293
1345
|
|
|
1294
|
-
// Update "ponder_sync_block" metric
|
|
1295
1346
|
common.metrics.ponder_sync_block.set(
|
|
1296
1347
|
label,
|
|
1297
|
-
hexToNumber(syncProgress.current
|
|
1348
|
+
hexToNumber(syncProgress.current!.number),
|
|
1298
1349
|
);
|
|
1299
|
-
|
|
1300
1350
|
common.metrics.ponder_historical_duration.observe(label, duration);
|
|
1301
1351
|
common.metrics.ponder_historical_completed_blocks.inc(
|
|
1302
1352
|
label,
|
|
@@ -1313,12 +1363,203 @@ export async function* localHistoricalSyncGenerator({
|
|
|
1313
1363
|
estimateRange * 2,
|
|
1314
1364
|
100_000,
|
|
1315
1365
|
);
|
|
1366
|
+
|
|
1367
|
+
common.logger.debug({
|
|
1368
|
+
service: "sync",
|
|
1369
|
+
msg: `Updated '${network.name}' historical sync estimate to ${estimateRange} blocks`,
|
|
1370
|
+
});
|
|
1316
1371
|
}
|
|
1317
1372
|
|
|
1318
|
-
yield
|
|
1373
|
+
yield encodeCheckpoint(
|
|
1374
|
+
blockToCheckpoint(syncProgress.current!, network.chainId, "up"),
|
|
1375
|
+
);
|
|
1319
1376
|
|
|
1320
1377
|
if (isSyncEnd(syncProgress) || isSyncFinalized(syncProgress)) {
|
|
1378
|
+
common.logger.info({
|
|
1379
|
+
service: "sync",
|
|
1380
|
+
msg: `Completed '${network.name}' historical sync`,
|
|
1381
|
+
});
|
|
1321
1382
|
return;
|
|
1322
1383
|
}
|
|
1323
1384
|
}
|
|
1324
1385
|
}
|
|
1386
|
+
|
|
1387
|
+
export const getLocalSyncProgress = async ({
|
|
1388
|
+
common,
|
|
1389
|
+
sources,
|
|
1390
|
+
network,
|
|
1391
|
+
requestQueue,
|
|
1392
|
+
intervalsCache,
|
|
1393
|
+
}: {
|
|
1394
|
+
common: Common;
|
|
1395
|
+
sources: Source[];
|
|
1396
|
+
network: Network;
|
|
1397
|
+
requestQueue: RequestQueue;
|
|
1398
|
+
intervalsCache: HistoricalSync["intervalsCache"];
|
|
1399
|
+
}): Promise<SyncProgress> => {
|
|
1400
|
+
const syncProgress = {} as SyncProgress;
|
|
1401
|
+
const filters = sources.map(({ filter }) => filter);
|
|
1402
|
+
|
|
1403
|
+
// Earliest `fromBlock` among all `filters`
|
|
1404
|
+
const start = Math.min(...filters.map((filter) => filter.fromBlock ?? 0));
|
|
1405
|
+
const cached = getCachedBlock({ filters, intervalsCache });
|
|
1406
|
+
|
|
1407
|
+
const diagnostics = await Promise.all(
|
|
1408
|
+
cached === undefined
|
|
1409
|
+
? [
|
|
1410
|
+
requestQueue.request({ method: "eth_chainId" }),
|
|
1411
|
+
_eth_getBlockByNumber(requestQueue, { blockTag: "latest" }),
|
|
1412
|
+
_eth_getBlockByNumber(requestQueue, { blockNumber: start }),
|
|
1413
|
+
]
|
|
1414
|
+
: [
|
|
1415
|
+
requestQueue.request({ method: "eth_chainId" }),
|
|
1416
|
+
_eth_getBlockByNumber(requestQueue, { blockTag: "latest" }),
|
|
1417
|
+
_eth_getBlockByNumber(requestQueue, { blockNumber: start }),
|
|
1418
|
+
_eth_getBlockByNumber(requestQueue, { blockNumber: cached }),
|
|
1419
|
+
],
|
|
1420
|
+
);
|
|
1421
|
+
|
|
1422
|
+
const finalized = Math.max(
|
|
1423
|
+
0,
|
|
1424
|
+
hexToNumber(diagnostics[1].number) - network.finalityBlockCount,
|
|
1425
|
+
);
|
|
1426
|
+
syncProgress.finalized = await _eth_getBlockByNumber(requestQueue, {
|
|
1427
|
+
blockNumber: finalized,
|
|
1428
|
+
});
|
|
1429
|
+
syncProgress.start = diagnostics[2];
|
|
1430
|
+
if (diagnostics.length === 4) {
|
|
1431
|
+
syncProgress.current = diagnostics[3];
|
|
1432
|
+
}
|
|
1433
|
+
|
|
1434
|
+
// Warn if the config has a different chainId than the remote.
|
|
1435
|
+
if (hexToNumber(diagnostics[0]) !== network.chainId) {
|
|
1436
|
+
common.logger.warn({
|
|
1437
|
+
service: "sync",
|
|
1438
|
+
msg: `Remote chain ID (${diagnostics[0]}) does not match configured chain ID (${network.chainId}) for network "${network.name}"`,
|
|
1439
|
+
});
|
|
1440
|
+
}
|
|
1441
|
+
|
|
1442
|
+
if (filters.some((filter) => filter.toBlock === undefined)) {
|
|
1443
|
+
return syncProgress;
|
|
1444
|
+
}
|
|
1445
|
+
|
|
1446
|
+
// Latest `toBlock` among all `filters`
|
|
1447
|
+
const end = Math.max(...filters.map((filter) => filter.toBlock!));
|
|
1448
|
+
|
|
1449
|
+
if (end > hexToNumber(diagnostics[1].number)) {
|
|
1450
|
+
syncProgress.end = {
|
|
1451
|
+
number: toHex(end),
|
|
1452
|
+
hash: "0x",
|
|
1453
|
+
parentHash: "0x",
|
|
1454
|
+
timestamp: toHex(MAX_CHECKPOINT.blockTimestamp),
|
|
1455
|
+
} satisfies LightBlock;
|
|
1456
|
+
} else {
|
|
1457
|
+
syncProgress.end = await _eth_getBlockByNumber(requestQueue, {
|
|
1458
|
+
blockNumber: end,
|
|
1459
|
+
});
|
|
1460
|
+
}
|
|
1461
|
+
|
|
1462
|
+
return syncProgress;
|
|
1463
|
+
};
|
|
1464
|
+
|
|
1465
|
+
/** Returns the closest-to-tip block that has been synced for all `sources`. */
|
|
1466
|
+
export const getCachedBlock = ({
|
|
1467
|
+
filters,
|
|
1468
|
+
intervalsCache,
|
|
1469
|
+
}: {
|
|
1470
|
+
filters: Filter[];
|
|
1471
|
+
intervalsCache: HistoricalSync["intervalsCache"];
|
|
1472
|
+
}): number | undefined => {
|
|
1473
|
+
const latestCompletedBlocks = filters.map((filter) => {
|
|
1474
|
+
const requiredInterval = [
|
|
1475
|
+
filter.fromBlock ?? 0,
|
|
1476
|
+
filter.toBlock ?? Number.POSITIVE_INFINITY,
|
|
1477
|
+
] satisfies Interval;
|
|
1478
|
+
const fragmentIntervals = intervalsCache.get(filter)!;
|
|
1479
|
+
|
|
1480
|
+
const completedIntervals = sortIntervals(
|
|
1481
|
+
intervalIntersection(
|
|
1482
|
+
[requiredInterval],
|
|
1483
|
+
intervalIntersectionMany(
|
|
1484
|
+
fragmentIntervals.map(({ intervals }) => intervals),
|
|
1485
|
+
),
|
|
1486
|
+
),
|
|
1487
|
+
);
|
|
1488
|
+
|
|
1489
|
+
if (completedIntervals.length === 0) return undefined;
|
|
1490
|
+
|
|
1491
|
+
const earliestCompletedInterval = completedIntervals[0]!;
|
|
1492
|
+
if (earliestCompletedInterval[0] !== (filter.fromBlock ?? 0)) {
|
|
1493
|
+
return undefined;
|
|
1494
|
+
}
|
|
1495
|
+
return earliestCompletedInterval[1];
|
|
1496
|
+
});
|
|
1497
|
+
|
|
1498
|
+
const minCompletedBlock = Math.min(
|
|
1499
|
+
...(latestCompletedBlocks.filter(
|
|
1500
|
+
(block) => block !== undefined,
|
|
1501
|
+
) as number[]),
|
|
1502
|
+
);
|
|
1503
|
+
|
|
1504
|
+
// Filter i has known progress if a completed interval is found or if
|
|
1505
|
+
// `_latestCompletedBlocks[i]` is undefined but `filters[i].fromBlock`
|
|
1506
|
+
// is > `_minCompletedBlock`.
|
|
1507
|
+
|
|
1508
|
+
if (
|
|
1509
|
+
latestCompletedBlocks.every(
|
|
1510
|
+
(block, i) =>
|
|
1511
|
+
block !== undefined || (filters[i]!.fromBlock ?? 0) > minCompletedBlock,
|
|
1512
|
+
)
|
|
1513
|
+
) {
|
|
1514
|
+
return minCompletedBlock;
|
|
1515
|
+
}
|
|
1516
|
+
|
|
1517
|
+
return undefined;
|
|
1518
|
+
};
|
|
1519
|
+
|
|
1520
|
+
/**
|
|
1521
|
+
* Merges multiple event generators into a single generator while preserving
|
|
1522
|
+
* the order of events.
|
|
1523
|
+
*
|
|
1524
|
+
* @param generators - Generators to merge.
|
|
1525
|
+
* @returns A single generator that yields events from all generators.
|
|
1526
|
+
*/
|
|
1527
|
+
export async function* mergeAsyncGeneratorsWithEventOrder(
|
|
1528
|
+
generators: AsyncGenerator<{ events: RawEvent[]; checkpoint: string }>[],
|
|
1529
|
+
): AsyncGenerator<{ events: RawEvent[]; checkpoint: string }> {
|
|
1530
|
+
const results = await Promise.all(generators.map((gen) => gen.next()));
|
|
1531
|
+
|
|
1532
|
+
while (results.some((res) => res.done !== true)) {
|
|
1533
|
+
const supremum = min(
|
|
1534
|
+
...results.map((res) => (res.done ? undefined : res.value.checkpoint)),
|
|
1535
|
+
);
|
|
1536
|
+
|
|
1537
|
+
const eventArrays: RawEvent[][] = [];
|
|
1538
|
+
|
|
1539
|
+
for (const result of results) {
|
|
1540
|
+
if (result.done === false) {
|
|
1541
|
+
const [left, right] = partition(
|
|
1542
|
+
result.value.events,
|
|
1543
|
+
(event) => event.checkpoint <= supremum,
|
|
1544
|
+
);
|
|
1545
|
+
|
|
1546
|
+
eventArrays.push(left);
|
|
1547
|
+
result.value.events = right;
|
|
1548
|
+
}
|
|
1549
|
+
}
|
|
1550
|
+
|
|
1551
|
+
const events = zipperMany(eventArrays).sort((a, b) =>
|
|
1552
|
+
a.checkpoint < b.checkpoint ? -1 : 1,
|
|
1553
|
+
);
|
|
1554
|
+
|
|
1555
|
+
const index = results.findIndex(
|
|
1556
|
+
(res) => res.done === false && res.value.checkpoint === supremum,
|
|
1557
|
+
);
|
|
1558
|
+
|
|
1559
|
+
const resultPromise = generators[index]!.next();
|
|
1560
|
+
if (events.length > 0) {
|
|
1561
|
+
yield { events, checkpoint: supremum };
|
|
1562
|
+
}
|
|
1563
|
+
results[index] = await resultPromise;
|
|
1564
|
+
}
|
|
1565
|
+
}
|