ponder 0.9.5-debug.1 → 0.9.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{types/bin → bin}/ponder.d.ts +6 -5
- package/dist/bin/ponder.js +12933 -0
- package/dist/bin/ponder.js.map +1 -0
- package/dist/chunk-K2TLRLX3.js +163 -0
- package/dist/chunk-K2TLRLX3.js.map +1 -0
- package/dist/chunk-LHCA5XFV.js +257 -0
- package/dist/chunk-LHCA5XFV.js.map +1 -0
- package/dist/{types/drizzle → drizzle}/onchain.d.ts +96 -49
- package/dist/drizzle/onchain.js +19 -0
- package/dist/drizzle/onchain.js.map +1 -0
- package/dist/index.d.ts +819 -0
- package/dist/index.js +2217 -0
- package/dist/index.js.map +1 -0
- package/dist/utils-ceNucOJb.d.ts +14 -0
- package/package.json +17 -17
- package/src/bin/commands/dev.ts +1 -1
- package/src/bin/ponder.ts +1 -1
- package/src/build/configAndIndexingFunctions.ts +1 -1
- package/src/build/factory.ts +1 -1
- package/src/client/index.ts +1 -1
- package/src/indexing-store/historical.ts +1 -1
- package/src/indexing-store/realtime.ts +1 -1
- package/src/internal/telemetry.ts +1 -1
- package/src/sync/fragments.ts +1 -1
- package/src/sync-realtime/index.ts +1 -1
- package/src/utils/generators.ts +1 -1
- package/src/utils/mutex.ts +1 -1
- package/src/utils/requestQueue.ts +1 -1
- package/CHANGELOG.md +0 -2201
- package/dist/esm/bin/commands/codegen.js +0 -37
- package/dist/esm/bin/commands/codegen.js.map +0 -1
- package/dist/esm/bin/commands/dev.js +0 -242
- package/dist/esm/bin/commands/dev.js.map +0 -1
- package/dist/esm/bin/commands/list.js +0 -103
- package/dist/esm/bin/commands/list.js.map +0 -1
- package/dist/esm/bin/commands/serve.js +0 -123
- package/dist/esm/bin/commands/serve.js.map +0 -1
- package/dist/esm/bin/commands/start.js +0 -136
- package/dist/esm/bin/commands/start.js.map +0 -1
- package/dist/esm/bin/ponder.js +0 -118
- package/dist/esm/bin/ponder.js.map +0 -1
- package/dist/esm/bin/utils/codegen.js +0 -26
- package/dist/esm/bin/utils/codegen.js.map +0 -1
- package/dist/esm/bin/utils/exit.js +0 -69
- package/dist/esm/bin/utils/exit.js.map +0 -1
- package/dist/esm/bin/utils/run.js +0 -247
- package/dist/esm/bin/utils/run.js.map +0 -1
- package/dist/esm/bin/utils/runServer.js +0 -8
- package/dist/esm/bin/utils/runServer.js.map +0 -1
- package/dist/esm/build/configAndIndexingFunctions.js +0 -654
- package/dist/esm/build/configAndIndexingFunctions.js.map +0 -1
- package/dist/esm/build/factory.js +0 -43
- package/dist/esm/build/factory.js.map +0 -1
- package/dist/esm/build/index.js +0 -431
- package/dist/esm/build/index.js.map +0 -1
- package/dist/esm/build/plugin.js +0 -43
- package/dist/esm/build/plugin.js.map +0 -1
- package/dist/esm/build/pre.js +0 -112
- package/dist/esm/build/pre.js.map +0 -1
- package/dist/esm/build/schema.js +0 -89
- package/dist/esm/build/schema.js.map +0 -1
- package/dist/esm/build/stacktrace.js +0 -137
- package/dist/esm/build/stacktrace.js.map +0 -1
- package/dist/esm/client/index.js +0 -124
- package/dist/esm/client/index.js.map +0 -1
- package/dist/esm/client/validate.js +0 -1151
- package/dist/esm/client/validate.js.map +0 -1
- package/dist/esm/config/address.js +0 -2
- package/dist/esm/config/address.js.map +0 -1
- package/dist/esm/config/eventFilter.js +0 -2
- package/dist/esm/config/eventFilter.js.map +0 -1
- package/dist/esm/config/index.js +0 -2
- package/dist/esm/config/index.js.map +0 -1
- package/dist/esm/config/networks.js +0 -120
- package/dist/esm/config/networks.js.map +0 -1
- package/dist/esm/config/utilityTypes.js +0 -2
- package/dist/esm/config/utilityTypes.js.map +0 -1
- package/dist/esm/database/index.js +0 -914
- package/dist/esm/database/index.js.map +0 -1
- package/dist/esm/drizzle/bigint.js +0 -36
- package/dist/esm/drizzle/bigint.js.map +0 -1
- package/dist/esm/drizzle/hex.js +0 -38
- package/dist/esm/drizzle/hex.js.map +0 -1
- package/dist/esm/drizzle/index.js +0 -43
- package/dist/esm/drizzle/index.js.map +0 -1
- package/dist/esm/drizzle/kit/index.js +0 -658
- package/dist/esm/drizzle/kit/index.js.map +0 -1
- package/dist/esm/drizzle/onchain.js +0 -102
- package/dist/esm/drizzle/onchain.js.map +0 -1
- package/dist/esm/graphql/index.js +0 -704
- package/dist/esm/graphql/index.js.map +0 -1
- package/dist/esm/graphql/json.js +0 -42
- package/dist/esm/graphql/json.js.map +0 -1
- package/dist/esm/graphql/middleware.js +0 -80
- package/dist/esm/graphql/middleware.js.map +0 -1
- package/dist/esm/index.js +0 -9
- package/dist/esm/index.js.map +0 -1
- package/dist/esm/indexing/addStackTrace.js +0 -54
- package/dist/esm/indexing/addStackTrace.js.map +0 -1
- package/dist/esm/indexing/index.js +0 -10
- package/dist/esm/indexing/index.js.map +0 -1
- package/dist/esm/indexing/ponderActions.js +0 -60
- package/dist/esm/indexing/ponderActions.js.map +0 -1
- package/dist/esm/indexing/service.js +0 -312
- package/dist/esm/indexing/service.js.map +0 -1
- package/dist/esm/indexing-store/historical.js +0 -591
- package/dist/esm/indexing-store/historical.js.map +0 -1
- package/dist/esm/indexing-store/index.js +0 -19
- package/dist/esm/indexing-store/index.js.map +0 -1
- package/dist/esm/indexing-store/metadata.js +0 -46
- package/dist/esm/indexing-store/metadata.js.map +0 -1
- package/dist/esm/indexing-store/realtime.js +0 -295
- package/dist/esm/indexing-store/realtime.js.map +0 -1
- package/dist/esm/internal/common.js +0 -2
- package/dist/esm/internal/common.js.map +0 -1
- package/dist/esm/internal/errors.js +0 -175
- package/dist/esm/internal/errors.js.map +0 -1
- package/dist/esm/internal/logger.js +0 -96
- package/dist/esm/internal/logger.js.map +0 -1
- package/dist/esm/internal/metrics.js +0 -569
- package/dist/esm/internal/metrics.js.map +0 -1
- package/dist/esm/internal/options.js +0 -69
- package/dist/esm/internal/options.js.map +0 -1
- package/dist/esm/internal/shutdown.js +0 -18
- package/dist/esm/internal/shutdown.js.map +0 -1
- package/dist/esm/internal/telemetry.js +0 -199
- package/dist/esm/internal/telemetry.js.map +0 -1
- package/dist/esm/internal/types.js +0 -2
- package/dist/esm/internal/types.js.map +0 -1
- package/dist/esm/server/error.js +0 -55
- package/dist/esm/server/error.js.map +0 -1
- package/dist/esm/server/index.js +0 -107
- package/dist/esm/server/index.js.map +0 -1
- package/dist/esm/sync/abi.js +0 -67
- package/dist/esm/sync/abi.js.map +0 -1
- package/dist/esm/sync/events.js +0 -607
- package/dist/esm/sync/events.js.map +0 -1
- package/dist/esm/sync/filter.js +0 -356
- package/dist/esm/sync/filter.js.map +0 -1
- package/dist/esm/sync/fragments.js +0 -300
- package/dist/esm/sync/fragments.js.map +0 -1
- package/dist/esm/sync/index.js +0 -1001
- package/dist/esm/sync/index.js.map +0 -1
- package/dist/esm/sync/transport.js +0 -94
- package/dist/esm/sync/transport.js.map +0 -1
- package/dist/esm/sync-historical/index.js +0 -590
- package/dist/esm/sync-historical/index.js.map +0 -1
- package/dist/esm/sync-realtime/bloom.js +0 -75
- package/dist/esm/sync-realtime/bloom.js.map +0 -1
- package/dist/esm/sync-realtime/index.js +0 -794
- package/dist/esm/sync-realtime/index.js.map +0 -1
- package/dist/esm/sync-store/encoding.js +0 -157
- package/dist/esm/sync-store/encoding.js.map +0 -1
- package/dist/esm/sync-store/index.js +0 -727
- package/dist/esm/sync-store/index.js.map +0 -1
- package/dist/esm/sync-store/migrations.js +0 -1186
- package/dist/esm/sync-store/migrations.js.map +0 -1
- package/dist/esm/types/db.js +0 -2
- package/dist/esm/types/db.js.map +0 -1
- package/dist/esm/types/eth.js +0 -2
- package/dist/esm/types/eth.js.map +0 -1
- package/dist/esm/types/sync.js +0 -2
- package/dist/esm/types/sync.js.map +0 -1
- package/dist/esm/types/utils.js +0 -2
- package/dist/esm/types/utils.js.map +0 -1
- package/dist/esm/types/virtual.js +0 -2
- package/dist/esm/types/virtual.js.map +0 -1
- package/dist/esm/ui/ProgressBar.js +0 -11
- package/dist/esm/ui/ProgressBar.js.map +0 -1
- package/dist/esm/ui/Table.js +0 -50
- package/dist/esm/ui/Table.js.map +0 -1
- package/dist/esm/ui/app.js +0 -113
- package/dist/esm/ui/app.js.map +0 -1
- package/dist/esm/ui/graphiql.html.js +0 -59
- package/dist/esm/ui/graphiql.html.js.map +0 -1
- package/dist/esm/ui/index.js +0 -21
- package/dist/esm/ui/index.js.map +0 -1
- package/dist/esm/utils/bigint.js +0 -37
- package/dist/esm/utils/bigint.js.map +0 -1
- package/dist/esm/utils/chains.js +0 -3
- package/dist/esm/utils/chains.js.map +0 -1
- package/dist/esm/utils/checkpoint.js +0 -114
- package/dist/esm/utils/checkpoint.js.map +0 -1
- package/dist/esm/utils/chunk.js +0 -8
- package/dist/esm/utils/chunk.js.map +0 -1
- package/dist/esm/utils/date.js +0 -27
- package/dist/esm/utils/date.js.map +0 -1
- package/dist/esm/utils/debug.js +0 -2
- package/dist/esm/utils/debug.js.map +0 -1
- package/dist/esm/utils/dedupe.js +0 -33
- package/dist/esm/utils/dedupe.js.map +0 -1
- package/dist/esm/utils/duplicates.js +0 -19
- package/dist/esm/utils/duplicates.js.map +0 -1
- package/dist/esm/utils/estimate.js +0 -6
- package/dist/esm/utils/estimate.js.map +0 -1
- package/dist/esm/utils/extend.js +0 -28
- package/dist/esm/utils/extend.js.map +0 -1
- package/dist/esm/utils/format.js +0 -20
- package/dist/esm/utils/format.js.map +0 -1
- package/dist/esm/utils/generators.js +0 -77
- package/dist/esm/utils/generators.js.map +0 -1
- package/dist/esm/utils/hash.js +0 -11
- package/dist/esm/utils/hash.js.map +0 -1
- package/dist/esm/utils/interval.js +0 -171
- package/dist/esm/utils/interval.js.map +0 -1
- package/dist/esm/utils/lowercase.js +0 -7
- package/dist/esm/utils/lowercase.js.map +0 -1
- package/dist/esm/utils/mutex.js +0 -25
- package/dist/esm/utils/mutex.js.map +0 -1
- package/dist/esm/utils/never.js +0 -4
- package/dist/esm/utils/never.js.map +0 -1
- package/dist/esm/utils/offset.js +0 -73
- package/dist/esm/utils/offset.js.map +0 -1
- package/dist/esm/utils/order.js +0 -18
- package/dist/esm/utils/order.js.map +0 -1
- package/dist/esm/utils/partition.js +0 -37
- package/dist/esm/utils/partition.js.map +0 -1
- package/dist/esm/utils/pg.js +0 -126
- package/dist/esm/utils/pg.js.map +0 -1
- package/dist/esm/utils/pglite.js +0 -80
- package/dist/esm/utils/pglite.js.map +0 -1
- package/dist/esm/utils/port.js +0 -30
- package/dist/esm/utils/port.js.map +0 -1
- package/dist/esm/utils/print.js +0 -23
- package/dist/esm/utils/print.js.map +0 -1
- package/dist/esm/utils/promiseWithResolvers.js +0 -13
- package/dist/esm/utils/promiseWithResolvers.js.map +0 -1
- package/dist/esm/utils/queue.js +0 -145
- package/dist/esm/utils/queue.js.map +0 -1
- package/dist/esm/utils/range.js +0 -8
- package/dist/esm/utils/range.js.map +0 -1
- package/dist/esm/utils/requestQueue.js +0 -127
- package/dist/esm/utils/requestQueue.js.map +0 -1
- package/dist/esm/utils/result.js +0 -10
- package/dist/esm/utils/result.js.map +0 -1
- package/dist/esm/utils/rpc.js +0 -202
- package/dist/esm/utils/rpc.js.map +0 -1
- package/dist/esm/utils/serialize.js +0 -23
- package/dist/esm/utils/serialize.js.map +0 -1
- package/dist/esm/utils/timer.js +0 -17
- package/dist/esm/utils/timer.js.map +0 -1
- package/dist/esm/utils/wait.js +0 -8
- package/dist/esm/utils/wait.js.map +0 -1
- package/dist/esm/utils/zipper.js +0 -67
- package/dist/esm/utils/zipper.js.map +0 -1
- package/dist/types/bin/commands/codegen.d.ts +0 -5
- package/dist/types/bin/commands/codegen.d.ts.map +0 -1
- package/dist/types/bin/commands/dev.d.ts +0 -5
- package/dist/types/bin/commands/dev.d.ts.map +0 -1
- package/dist/types/bin/commands/list.d.ts +0 -5
- package/dist/types/bin/commands/list.d.ts.map +0 -1
- package/dist/types/bin/commands/serve.d.ts +0 -5
- package/dist/types/bin/commands/serve.d.ts.map +0 -1
- package/dist/types/bin/commands/start.d.ts +0 -5
- package/dist/types/bin/commands/start.d.ts.map +0 -1
- package/dist/types/bin/ponder.d.ts.map +0 -1
- package/dist/types/bin/utils/codegen.d.ts +0 -6
- package/dist/types/bin/utils/codegen.d.ts.map +0 -1
- package/dist/types/bin/utils/exit.d.ts +0 -9
- package/dist/types/bin/utils/exit.d.ts.map +0 -1
- package/dist/types/bin/utils/run.d.ts +0 -14
- package/dist/types/bin/utils/run.d.ts.map +0 -1
- package/dist/types/bin/utils/runServer.d.ts +0 -12
- package/dist/types/bin/utils/runServer.d.ts.map +0 -1
- package/dist/types/build/configAndIndexingFunctions.d.ts +0 -37
- package/dist/types/build/configAndIndexingFunctions.d.ts.map +0 -1
- package/dist/types/build/factory.d.ts +0 -10
- package/dist/types/build/factory.d.ts.map +0 -1
- package/dist/types/build/index.d.ts +0 -70
- package/dist/types/build/index.d.ts.map +0 -1
- package/dist/types/build/plugin.d.ts +0 -4
- package/dist/types/build/plugin.d.ts.map +0 -1
- package/dist/types/build/pre.d.ts +0 -35
- package/dist/types/build/pre.d.ts.map +0 -1
- package/dist/types/build/schema.d.ts +0 -18
- package/dist/types/build/schema.d.ts.map +0 -1
- package/dist/types/build/stacktrace.d.ts +0 -13
- package/dist/types/build/stacktrace.d.ts.map +0 -1
- package/dist/types/client/index.d.ts +0 -27
- package/dist/types/client/index.d.ts.map +0 -1
- package/dist/types/client/validate.d.ts +0 -2
- package/dist/types/client/validate.d.ts.map +0 -1
- package/dist/types/config/address.d.ts +0 -14
- package/dist/types/config/address.d.ts.map +0 -1
- package/dist/types/config/eventFilter.d.ts +0 -18
- package/dist/types/config/eventFilter.d.ts.map +0 -1
- package/dist/types/config/index.d.ts +0 -143
- package/dist/types/config/index.d.ts.map +0 -1
- package/dist/types/config/networks.d.ts +0 -30
- package/dist/types/config/networks.d.ts.map +0 -1
- package/dist/types/config/utilityTypes.d.ts +0 -43
- package/dist/types/config/utilityTypes.d.ts.map +0 -1
- package/dist/types/database/index.d.ts +0 -91
- package/dist/types/database/index.d.ts.map +0 -1
- package/dist/types/drizzle/bigint.d.ts +0 -25
- package/dist/types/drizzle/bigint.d.ts.map +0 -1
- package/dist/types/drizzle/hex.d.ts +0 -25
- package/dist/types/drizzle/hex.d.ts.map +0 -1
- package/dist/types/drizzle/index.d.ts +0 -14
- package/dist/types/drizzle/index.d.ts.map +0 -1
- package/dist/types/drizzle/kit/index.d.ts +0 -49
- package/dist/types/drizzle/kit/index.d.ts.map +0 -1
- package/dist/types/drizzle/onchain.d.ts.map +0 -1
- package/dist/types/graphql/index.d.ts +0 -14
- package/dist/types/graphql/index.d.ts.map +0 -1
- package/dist/types/graphql/json.d.ts +0 -3
- package/dist/types/graphql/json.d.ts.map +0 -1
- package/dist/types/graphql/middleware.d.ts +0 -29
- package/dist/types/graphql/middleware.d.ts.map +0 -1
- package/dist/types/index.d.ts +0 -18
- package/dist/types/index.d.ts.map +0 -1
- package/dist/types/indexing/addStackTrace.d.ts +0 -3
- package/dist/types/indexing/addStackTrace.d.ts.map +0 -1
- package/dist/types/indexing/index.d.ts +0 -575
- package/dist/types/indexing/index.d.ts.map +0 -1
- package/dist/types/indexing/ponderActions.d.ts +0 -47
- package/dist/types/indexing/ponderActions.d.ts.map +0 -1
- package/dist/types/indexing/service.d.ts +0 -73
- package/dist/types/indexing/service.d.ts.map +0 -1
- package/dist/types/indexing-store/historical.d.ts +0 -19
- package/dist/types/indexing-store/historical.d.ts.map +0 -1
- package/dist/types/indexing-store/index.d.ts +0 -10
- package/dist/types/indexing-store/index.d.ts.map +0 -1
- package/dist/types/indexing-store/metadata.d.ts +0 -10
- package/dist/types/indexing-store/metadata.d.ts.map +0 -1
- package/dist/types/indexing-store/realtime.d.ts +0 -10
- package/dist/types/indexing-store/realtime.d.ts.map +0 -1
- package/dist/types/internal/common.d.ts +0 -13
- package/dist/types/internal/common.d.ts.map +0 -1
- package/dist/types/internal/errors.d.ts +0 -55
- package/dist/types/internal/errors.d.ts.map +0 -1
- package/dist/types/internal/logger.d.ts +0 -26
- package/dist/types/internal/logger.d.ts.map +0 -1
- package/dist/types/internal/metrics.d.ts +0 -77
- package/dist/types/internal/metrics.d.ts.map +0 -1
- package/dist/types/internal/options.d.ts +0 -59
- package/dist/types/internal/options.d.ts.map +0 -1
- package/dist/types/internal/shutdown.d.ts +0 -8
- package/dist/types/internal/shutdown.d.ts.map +0 -1
- package/dist/types/internal/telemetry.d.ts +0 -43
- package/dist/types/internal/telemetry.d.ts.map +0 -1
- package/dist/types/internal/types.d.ts +0 -328
- package/dist/types/internal/types.d.ts.map +0 -1
- package/dist/types/server/error.d.ts +0 -5
- package/dist/types/server/error.d.ts.map +0 -1
- package/dist/types/server/index.d.ts +0 -13
- package/dist/types/server/index.d.ts.map +0 -1
- package/dist/types/sync/abi.d.ts +0 -54
- package/dist/types/sync/abi.d.ts.map +0 -1
- package/dist/types/sync/events.d.ts +0 -24
- package/dist/types/sync/events.d.ts.map +0 -1
- package/dist/types/sync/filter.d.ts +0 -71
- package/dist/types/sync/filter.d.ts.map +0 -1
- package/dist/types/sync/fragments.d.ts +0 -21
- package/dist/types/sync/fragments.d.ts.map +0 -1
- package/dist/types/sync/index.d.ts +0 -112
- package/dist/types/sync/index.d.ts.map +0 -1
- package/dist/types/sync/transport.d.ts +0 -8
- package/dist/types/sync/transport.d.ts.map +0 -1
- package/dist/types/sync-historical/index.d.ts +0 -28
- package/dist/types/sync-historical/index.d.ts.map +0 -1
- package/dist/types/sync-realtime/bloom.d.ts +0 -19
- package/dist/types/sync-realtime/bloom.d.ts.map +0 -1
- package/dist/types/sync-realtime/index.d.ts +0 -48
- package/dist/types/sync-realtime/index.d.ts.map +0 -1
- package/dist/types/sync-store/encoding.d.ts +0 -151
- package/dist/types/sync-store/encoding.d.ts.map +0 -1
- package/dist/types/sync-store/index.d.ts +0 -104
- package/dist/types/sync-store/index.d.ts.map +0 -1
- package/dist/types/sync-store/migrations.d.ts +0 -13
- package/dist/types/sync-store/migrations.d.ts.map +0 -1
- package/dist/types/types/db.d.ts +0 -213
- package/dist/types/types/db.d.ts.map +0 -1
- package/dist/types/types/eth.d.ts +0 -196
- package/dist/types/types/eth.d.ts.map +0 -1
- package/dist/types/types/sync.d.ts +0 -15
- package/dist/types/types/sync.d.ts.map +0 -1
- package/dist/types/types/utils.d.ts +0 -22
- package/dist/types/types/utils.d.ts.map +0 -1
- package/dist/types/types/virtual.d.ts +0 -95
- package/dist/types/types/virtual.d.ts.map +0 -1
- package/dist/types/ui/ProgressBar.d.ts +0 -7
- package/dist/types/ui/ProgressBar.d.ts.map +0 -1
- package/dist/types/ui/Table.d.ts +0 -24
- package/dist/types/ui/Table.d.ts.map +0 -1
- package/dist/types/ui/app.d.ts +0 -14
- package/dist/types/ui/app.d.ts.map +0 -1
- package/dist/types/ui/graphiql.html.d.ts +0 -2
- package/dist/types/ui/graphiql.html.d.ts.map +0 -1
- package/dist/types/ui/index.d.ts +0 -5
- package/dist/types/ui/index.d.ts.map +0 -1
- package/dist/types/utils/bigint.d.ts +0 -15
- package/dist/types/utils/bigint.d.ts.map +0 -1
- package/dist/types/utils/chains.d.ts +0 -3
- package/dist/types/utils/chains.d.ts.map +0 -1
- package/dist/types/utils/checkpoint.d.ts +0 -40
- package/dist/types/utils/checkpoint.d.ts.map +0 -1
- package/dist/types/utils/chunk.d.ts +0 -2
- package/dist/types/utils/chunk.d.ts.map +0 -1
- package/dist/types/utils/date.d.ts +0 -7
- package/dist/types/utils/date.d.ts.map +0 -1
- package/dist/types/utils/debug.d.ts +0 -105
- package/dist/types/utils/debug.d.ts.map +0 -1
- package/dist/types/utils/dedupe.d.ts +0 -20
- package/dist/types/utils/dedupe.d.ts.map +0 -1
- package/dist/types/utils/duplicates.d.ts +0 -7
- package/dist/types/utils/duplicates.d.ts.map +0 -1
- package/dist/types/utils/estimate.d.ts +0 -11
- package/dist/types/utils/estimate.d.ts.map +0 -1
- package/dist/types/utils/extend.d.ts +0 -13
- package/dist/types/utils/extend.d.ts.map +0 -1
- package/dist/types/utils/format.d.ts +0 -3
- package/dist/types/utils/format.d.ts.map +0 -1
- package/dist/types/utils/generators.d.ts +0 -23
- package/dist/types/utils/generators.d.ts.map +0 -1
- package/dist/types/utils/hash.d.ts +0 -11
- package/dist/types/utils/hash.d.ts.map +0 -1
- package/dist/types/utils/interval.d.ts +0 -53
- package/dist/types/utils/interval.d.ts.map +0 -1
- package/dist/types/utils/lowercase.d.ts +0 -5
- package/dist/types/utils/lowercase.d.ts.map +0 -1
- package/dist/types/utils/mutex.d.ts +0 -8
- package/dist/types/utils/mutex.d.ts.map +0 -1
- package/dist/types/utils/never.d.ts +0 -2
- package/dist/types/utils/never.d.ts.map +0 -1
- package/dist/types/utils/offset.d.ts +0 -3
- package/dist/types/utils/offset.d.ts.map +0 -1
- package/dist/types/utils/order.d.ts +0 -2
- package/dist/types/utils/order.d.ts.map +0 -1
- package/dist/types/utils/partition.d.ts +0 -22
- package/dist/types/utils/partition.d.ts.map +0 -1
- package/dist/types/utils/pg.d.ts +0 -5
- package/dist/types/utils/pg.d.ts.map +0 -1
- package/dist/types/utils/pglite.d.ts +0 -25
- package/dist/types/utils/pglite.d.ts.map +0 -1
- package/dist/types/utils/port.d.ts +0 -5
- package/dist/types/utils/port.d.ts.map +0 -1
- package/dist/types/utils/print.d.ts +0 -2
- package/dist/types/utils/print.d.ts.map +0 -1
- package/dist/types/utils/promiseWithResolvers.d.ts +0 -10
- package/dist/types/utils/promiseWithResolvers.d.ts.map +0 -1
- package/dist/types/utils/queue.d.ts +0 -33
- package/dist/types/utils/queue.d.ts.map +0 -1
- package/dist/types/utils/range.d.ts +0 -8
- package/dist/types/utils/range.d.ts.map +0 -1
- package/dist/types/utils/requestQueue.d.ts +0 -21
- package/dist/types/utils/requestQueue.d.ts.map +0 -1
- package/dist/types/utils/result.d.ts +0 -17
- package/dist/types/utils/result.d.ts.map +0 -1
- package/dist/types/utils/rpc.d.ts +0 -57
- package/dist/types/utils/rpc.d.ts.map +0 -1
- package/dist/types/utils/serialize.d.ts +0 -19
- package/dist/types/utils/serialize.d.ts.map +0 -1
- package/dist/types/utils/timer.d.ts +0 -11
- package/dist/types/utils/timer.d.ts.map +0 -1
- package/dist/types/utils/wait.d.ts +0 -6
- package/dist/types/utils/wait.d.ts.map +0 -1
- package/dist/types/utils/zipper.d.ts +0 -36
- package/dist/types/utils/zipper.d.ts.map +0 -1
- package/src/utils/dedupe.ts +0 -40
- package/src/utils/promiseWithResolvers.ts +0 -20
- package/src/utils/queue.ts +0 -250
|
@@ -1,794 +0,0 @@
|
|
|
1
|
-
import { ShutdownError } from '../internal/errors.js';
|
|
2
|
-
import { getChildAddress, isAddressFactory, isBlockFilterMatched, isLogFactoryMatched, isLogFilterMatched, isTraceFilterMatched, isTransactionFilterMatched, isTransferFilterMatched, shouldGetTransactionReceipt, } from '../sync/filter.js';
|
|
3
|
-
import { syncBlockToLightBlock } from '../sync/index.js';
|
|
4
|
-
import { mutex } from '../utils/mutex.js';
|
|
5
|
-
import { range } from '../utils/range.js';
|
|
6
|
-
import { _debug_traceBlockByHash, _eth_getBlockByHash, _eth_getBlockByNumber, _eth_getBlockReceipts, _eth_getLogs, _eth_getTransactionReceipt, } from '../utils/rpc.js';
|
|
7
|
-
import { startClock } from '../utils/timer.js';
|
|
8
|
-
import { wait } from '../utils/wait.js';
|
|
9
|
-
import { hexToNumber, zeroHash } from "viem";
|
|
10
|
-
import { isFilterInBloom, zeroLogsBloom } from "./bloom.js";
|
|
11
|
-
const ERROR_TIMEOUT = [
|
|
12
|
-
1, 2, 5, 10, 30, 60, 60, 60, 60, 60, 60, 60, 60, 60,
|
|
13
|
-
];
|
|
14
|
-
const MAX_QUEUED_BLOCKS = 25;
|
|
15
|
-
export const createRealtimeSync = (args) => {
|
|
16
|
-
////////
|
|
17
|
-
// state
|
|
18
|
-
////////
|
|
19
|
-
let isBlockReceipts = true;
|
|
20
|
-
let finalizedBlock;
|
|
21
|
-
let finalizedChildAddresses;
|
|
22
|
-
const unfinalizedChildAddresses = new Map();
|
|
23
|
-
const factoryLogsPerBlock = new Map();
|
|
24
|
-
/**
|
|
25
|
-
* Blocks that have been ingested and are
|
|
26
|
-
* waiting to be finalized. It is an invariant that
|
|
27
|
-
* all blocks are linked to each other,
|
|
28
|
-
* `parentHash` => `hash`.
|
|
29
|
-
*/
|
|
30
|
-
let unfinalizedBlocks = [];
|
|
31
|
-
// let queue: Queue<void, BlockWithEventData & { endClock?: () => number }>;
|
|
32
|
-
let consecutiveErrors = 0;
|
|
33
|
-
let interval;
|
|
34
|
-
const factories = [];
|
|
35
|
-
const logFilters = [];
|
|
36
|
-
const traceFilters = [];
|
|
37
|
-
const transactionFilters = [];
|
|
38
|
-
const transferFilters = [];
|
|
39
|
-
const blockFilters = [];
|
|
40
|
-
for (const source of args.sources) {
|
|
41
|
-
// Collect filters from sources
|
|
42
|
-
if (source.type === "contract") {
|
|
43
|
-
if (source.filter.type === "log") {
|
|
44
|
-
logFilters.push(source.filter);
|
|
45
|
-
}
|
|
46
|
-
else if (source.filter.type === "trace") {
|
|
47
|
-
traceFilters.push(source.filter);
|
|
48
|
-
}
|
|
49
|
-
}
|
|
50
|
-
else if (source.type === "account") {
|
|
51
|
-
if (source.filter.type === "transaction") {
|
|
52
|
-
transactionFilters.push(source.filter);
|
|
53
|
-
}
|
|
54
|
-
else if (source.filter.type === "transfer") {
|
|
55
|
-
transferFilters.push(source.filter);
|
|
56
|
-
}
|
|
57
|
-
}
|
|
58
|
-
else if (source.type === "block") {
|
|
59
|
-
blockFilters.push(source.filter);
|
|
60
|
-
}
|
|
61
|
-
// Collect factories from sources
|
|
62
|
-
switch (source.filter.type) {
|
|
63
|
-
case "trace":
|
|
64
|
-
case "transaction":
|
|
65
|
-
case "transfer": {
|
|
66
|
-
const { fromAddress, toAddress } = source.filter;
|
|
67
|
-
if (isAddressFactory(fromAddress)) {
|
|
68
|
-
factories.push(fromAddress);
|
|
69
|
-
}
|
|
70
|
-
if (isAddressFactory(toAddress)) {
|
|
71
|
-
factories.push(toAddress);
|
|
72
|
-
}
|
|
73
|
-
break;
|
|
74
|
-
}
|
|
75
|
-
case "log": {
|
|
76
|
-
const { address } = source.filter;
|
|
77
|
-
if (isAddressFactory(address)) {
|
|
78
|
-
factories.push(address);
|
|
79
|
-
}
|
|
80
|
-
break;
|
|
81
|
-
}
|
|
82
|
-
}
|
|
83
|
-
}
|
|
84
|
-
for (const factory of factories) {
|
|
85
|
-
unfinalizedChildAddresses.set(factory, new Set());
|
|
86
|
-
}
|
|
87
|
-
/**
|
|
88
|
-
* 1) Determine if a reorg occurred.
|
|
89
|
-
* 2) Insert new event data into the store.
|
|
90
|
-
* 3) Determine if a new range of events has become finalized,
|
|
91
|
-
* if so insert interval to store and remove the finalized data.
|
|
92
|
-
*
|
|
93
|
-
* @param block Block to be injested. Must be exactly
|
|
94
|
-
* 1 block ahead of the local chain.
|
|
95
|
-
* @returns true if a reorg occurred
|
|
96
|
-
*/
|
|
97
|
-
const handleBlock = async ({ block, logs, factoryLogs, traces, transactions, transactionReceipts, endClock, }) => {
|
|
98
|
-
args.common.logger.debug({
|
|
99
|
-
service: "realtime",
|
|
100
|
-
msg: `Started syncing '${args.network.name}' block ${hexToNumber(block.number)}`,
|
|
101
|
-
});
|
|
102
|
-
// Update `unfinalizedChildAddresses`
|
|
103
|
-
for (const log of factoryLogs) {
|
|
104
|
-
for (const filter of factories) {
|
|
105
|
-
if (isLogFactoryMatched({ filter, log })) {
|
|
106
|
-
unfinalizedChildAddresses
|
|
107
|
-
.get(filter)
|
|
108
|
-
.add(getChildAddress({ log, factory: filter }));
|
|
109
|
-
}
|
|
110
|
-
}
|
|
111
|
-
}
|
|
112
|
-
/**
|
|
113
|
-
* `logs` and `callTraces` must be filtered again (already filtered in `extract`)
|
|
114
|
-
* because `extract` doesn't have factory address information.
|
|
115
|
-
*/
|
|
116
|
-
const matchedFilters = new Set();
|
|
117
|
-
// Remove logs that don't match a filter, accounting for factory addresses
|
|
118
|
-
logs = logs.filter((log) => {
|
|
119
|
-
let isMatched = false;
|
|
120
|
-
for (const filter of logFilters) {
|
|
121
|
-
const childAddresses = isAddressFactory(filter.address)
|
|
122
|
-
? [
|
|
123
|
-
finalizedChildAddresses.get(filter.address),
|
|
124
|
-
unfinalizedChildAddresses.get(filter.address),
|
|
125
|
-
]
|
|
126
|
-
: undefined;
|
|
127
|
-
if (isLogFilterMatched({
|
|
128
|
-
filter,
|
|
129
|
-
block,
|
|
130
|
-
log,
|
|
131
|
-
childAddresses,
|
|
132
|
-
})) {
|
|
133
|
-
matchedFilters.add(filter);
|
|
134
|
-
isMatched = true;
|
|
135
|
-
}
|
|
136
|
-
}
|
|
137
|
-
return isMatched;
|
|
138
|
-
});
|
|
139
|
-
traces = traces.filter((trace) => {
|
|
140
|
-
let isMatched = false;
|
|
141
|
-
for (const filter of transferFilters) {
|
|
142
|
-
const fromChildAddresses = isAddressFactory(filter.fromAddress)
|
|
143
|
-
? [
|
|
144
|
-
finalizedChildAddresses.get(filter.fromAddress),
|
|
145
|
-
unfinalizedChildAddresses.get(filter.fromAddress),
|
|
146
|
-
]
|
|
147
|
-
: undefined;
|
|
148
|
-
const toChildAddresses = isAddressFactory(filter.toAddress)
|
|
149
|
-
? [
|
|
150
|
-
finalizedChildAddresses.get(filter.toAddress),
|
|
151
|
-
unfinalizedChildAddresses.get(filter.toAddress),
|
|
152
|
-
]
|
|
153
|
-
: undefined;
|
|
154
|
-
if (isTransferFilterMatched({
|
|
155
|
-
filter,
|
|
156
|
-
block: { number: block.number },
|
|
157
|
-
trace: trace.trace,
|
|
158
|
-
fromChildAddresses,
|
|
159
|
-
toChildAddresses,
|
|
160
|
-
})) {
|
|
161
|
-
matchedFilters.add(filter);
|
|
162
|
-
isMatched = true;
|
|
163
|
-
}
|
|
164
|
-
}
|
|
165
|
-
for (const filter of traceFilters) {
|
|
166
|
-
const fromChildAddresses = isAddressFactory(filter.fromAddress)
|
|
167
|
-
? [
|
|
168
|
-
finalizedChildAddresses.get(filter.fromAddress),
|
|
169
|
-
unfinalizedChildAddresses.get(filter.fromAddress),
|
|
170
|
-
]
|
|
171
|
-
: undefined;
|
|
172
|
-
const toChildAddresses = isAddressFactory(filter.toAddress)
|
|
173
|
-
? [
|
|
174
|
-
finalizedChildAddresses.get(filter.toAddress),
|
|
175
|
-
unfinalizedChildAddresses.get(filter.toAddress),
|
|
176
|
-
]
|
|
177
|
-
: undefined;
|
|
178
|
-
if (isTraceFilterMatched({
|
|
179
|
-
filter,
|
|
180
|
-
block: { number: block.number },
|
|
181
|
-
trace: trace.trace,
|
|
182
|
-
fromChildAddresses,
|
|
183
|
-
toChildAddresses,
|
|
184
|
-
})) {
|
|
185
|
-
matchedFilters.add(filter);
|
|
186
|
-
isMatched = true;
|
|
187
|
-
}
|
|
188
|
-
}
|
|
189
|
-
return isMatched;
|
|
190
|
-
});
|
|
191
|
-
// Remove transactions and transaction receipts that may have been filtered out
|
|
192
|
-
const transactionHashes = new Set();
|
|
193
|
-
for (const log of logs) {
|
|
194
|
-
transactionHashes.add(log.transactionHash);
|
|
195
|
-
}
|
|
196
|
-
for (const trace of traces) {
|
|
197
|
-
transactionHashes.add(trace.transactionHash);
|
|
198
|
-
}
|
|
199
|
-
transactions = transactions.filter((transaction) => {
|
|
200
|
-
let isMatched = transactionHashes.has(transaction.hash);
|
|
201
|
-
for (const filter of transactionFilters) {
|
|
202
|
-
const fromChildAddresses = isAddressFactory(filter.fromAddress)
|
|
203
|
-
? [
|
|
204
|
-
finalizedChildAddresses.get(filter.fromAddress),
|
|
205
|
-
unfinalizedChildAddresses.get(filter.fromAddress),
|
|
206
|
-
]
|
|
207
|
-
: undefined;
|
|
208
|
-
const toChildAddresses = isAddressFactory(filter.toAddress)
|
|
209
|
-
? [
|
|
210
|
-
finalizedChildAddresses.get(filter.toAddress),
|
|
211
|
-
unfinalizedChildAddresses.get(filter.toAddress),
|
|
212
|
-
]
|
|
213
|
-
: undefined;
|
|
214
|
-
if (isTransactionFilterMatched({
|
|
215
|
-
filter,
|
|
216
|
-
block,
|
|
217
|
-
transaction,
|
|
218
|
-
fromChildAddresses,
|
|
219
|
-
toChildAddresses,
|
|
220
|
-
})) {
|
|
221
|
-
matchedFilters.add(filter);
|
|
222
|
-
isMatched = true;
|
|
223
|
-
}
|
|
224
|
-
}
|
|
225
|
-
return isMatched;
|
|
226
|
-
});
|
|
227
|
-
for (const transaction of transactions) {
|
|
228
|
-
transactionHashes.add(transaction.hash);
|
|
229
|
-
}
|
|
230
|
-
transactionReceipts = transactionReceipts.filter((t) => transactionHashes.has(t.transactionHash));
|
|
231
|
-
// Record matched block filters
|
|
232
|
-
for (const filter of blockFilters) {
|
|
233
|
-
if (isBlockFilterMatched({ filter, block })) {
|
|
234
|
-
matchedFilters.add(filter);
|
|
235
|
-
}
|
|
236
|
-
}
|
|
237
|
-
if (logs.length > 0 || traces.length > 0 || transactions.length > 0) {
|
|
238
|
-
const _text = [];
|
|
239
|
-
if (logs.length === 1) {
|
|
240
|
-
_text.push("1 log");
|
|
241
|
-
}
|
|
242
|
-
else if (logs.length > 1) {
|
|
243
|
-
_text.push(`${logs.length} logs`);
|
|
244
|
-
}
|
|
245
|
-
if (traces.length === 1) {
|
|
246
|
-
_text.push("1 trace");
|
|
247
|
-
}
|
|
248
|
-
else if (traces.length > 1) {
|
|
249
|
-
_text.push(`${traces.length} traces`);
|
|
250
|
-
}
|
|
251
|
-
if (transactions.length === 1) {
|
|
252
|
-
_text.push("1 transaction");
|
|
253
|
-
}
|
|
254
|
-
else if (transactions.length > 1) {
|
|
255
|
-
_text.push(`${transactions.length} transactions`);
|
|
256
|
-
}
|
|
257
|
-
const text = _text.filter((t) => t !== undefined).join(" and ");
|
|
258
|
-
args.common.logger.info({
|
|
259
|
-
service: "realtime",
|
|
260
|
-
msg: `Synced ${text} from '${args.network.name}' block ${hexToNumber(block.number)}`,
|
|
261
|
-
});
|
|
262
|
-
}
|
|
263
|
-
else {
|
|
264
|
-
args.common.logger.info({
|
|
265
|
-
service: "realtime",
|
|
266
|
-
msg: `Synced block ${hexToNumber(block.number)} from '${args.network.name}' `,
|
|
267
|
-
});
|
|
268
|
-
}
|
|
269
|
-
unfinalizedBlocks.push(syncBlockToLightBlock(block));
|
|
270
|
-
// Make sure `transactions` can be garbage collected
|
|
271
|
-
// @ts-ignore
|
|
272
|
-
block.transactions = undefined;
|
|
273
|
-
await args.onEvent({
|
|
274
|
-
type: "block",
|
|
275
|
-
hasMatchedFilter: matchedFilters.size > 0,
|
|
276
|
-
block,
|
|
277
|
-
factoryLogs,
|
|
278
|
-
logs,
|
|
279
|
-
traces,
|
|
280
|
-
transactions,
|
|
281
|
-
transactionReceipts,
|
|
282
|
-
endClock,
|
|
283
|
-
});
|
|
284
|
-
// Determine if a new range has become finalized by evaluating if the
|
|
285
|
-
// latest block number is 2 * finalityBlockCount >= finalized block number.
|
|
286
|
-
// Essentially, there is a range the width of finalityBlockCount that is entirely
|
|
287
|
-
// finalized.
|
|
288
|
-
const blockMovesFinality = hexToNumber(block.number) >=
|
|
289
|
-
hexToNumber(finalizedBlock.number) + 2 * args.network.finalityBlockCount;
|
|
290
|
-
if (blockMovesFinality) {
|
|
291
|
-
const pendingFinalizedBlock = unfinalizedBlocks.find((lb) => hexToNumber(lb.number) ===
|
|
292
|
-
hexToNumber(block.number) - args.network.finalityBlockCount);
|
|
293
|
-
args.common.logger.debug({
|
|
294
|
-
service: "realtime",
|
|
295
|
-
msg: `Finalized ${hexToNumber(pendingFinalizedBlock.number) - hexToNumber(finalizedBlock.number) + 1} '${args.network.name}' blocks [${hexToNumber(finalizedBlock.number) + 1}, ${hexToNumber(pendingFinalizedBlock.number)}]`,
|
|
296
|
-
});
|
|
297
|
-
const finalizedBlocks = unfinalizedBlocks.filter((lb) => hexToNumber(lb.number) <= hexToNumber(pendingFinalizedBlock.number));
|
|
298
|
-
unfinalizedBlocks = unfinalizedBlocks.filter((lb) => hexToNumber(lb.number) > hexToNumber(pendingFinalizedBlock.number));
|
|
299
|
-
// add child address from newly finalized blocks to `finalizedChildAddresses`
|
|
300
|
-
for (const filter of factories) {
|
|
301
|
-
for (const { hash } of finalizedBlocks) {
|
|
302
|
-
const factoryLogs = factoryLogsPerBlock.get(hash);
|
|
303
|
-
if (factoryLogs !== undefined) {
|
|
304
|
-
for (const log of factoryLogs) {
|
|
305
|
-
if (isLogFactoryMatched({ filter, log })) {
|
|
306
|
-
finalizedChildAddresses
|
|
307
|
-
.get(filter)
|
|
308
|
-
.add(getChildAddress({ log, factory: filter }));
|
|
309
|
-
}
|
|
310
|
-
}
|
|
311
|
-
}
|
|
312
|
-
}
|
|
313
|
-
}
|
|
314
|
-
// recompute `unfinalizedChildAddresses`
|
|
315
|
-
unfinalizedChildAddresses.clear();
|
|
316
|
-
for (const filter of factories) {
|
|
317
|
-
unfinalizedChildAddresses.set(filter, new Set());
|
|
318
|
-
for (const { hash } of unfinalizedBlocks) {
|
|
319
|
-
const factoryLogs = factoryLogsPerBlock.get(hash);
|
|
320
|
-
if (factoryLogs !== undefined) {
|
|
321
|
-
for (const log of factoryLogs) {
|
|
322
|
-
if (isLogFactoryMatched({ filter, log })) {
|
|
323
|
-
unfinalizedChildAddresses
|
|
324
|
-
.get(filter)
|
|
325
|
-
.add(getChildAddress({ log, factory: filter }));
|
|
326
|
-
}
|
|
327
|
-
}
|
|
328
|
-
}
|
|
329
|
-
}
|
|
330
|
-
}
|
|
331
|
-
// delete finalized blocks from `factoryLogsPerBlock`
|
|
332
|
-
for (const { hash } of finalizedBlocks) {
|
|
333
|
-
factoryLogsPerBlock.delete(hash);
|
|
334
|
-
}
|
|
335
|
-
finalizedBlock = pendingFinalizedBlock;
|
|
336
|
-
await args.onEvent({ type: "finalize", block: pendingFinalizedBlock });
|
|
337
|
-
}
|
|
338
|
-
};
|
|
339
|
-
/**
|
|
340
|
-
* Traverse the remote chain until we find a block that is
|
|
341
|
-
* compatible with out local chain.
|
|
342
|
-
*
|
|
343
|
-
* @param block Block that caused reorg to be detected.
|
|
344
|
-
* Must be at most 1 block ahead of the local chain.
|
|
345
|
-
*/
|
|
346
|
-
const handleReorg = async (block) => {
|
|
347
|
-
args.common.logger.warn({
|
|
348
|
-
service: "realtime",
|
|
349
|
-
msg: `Detected forked '${args.network.name}' block at height ${hexToNumber(block.number)}`,
|
|
350
|
-
});
|
|
351
|
-
// Record blocks that have been removed from the local chain.
|
|
352
|
-
const reorgedBlocks = unfinalizedBlocks.filter((lb) => hexToNumber(lb.number) >= hexToNumber(block.number));
|
|
353
|
-
// Prune the local chain of blocks that have been reorged out
|
|
354
|
-
unfinalizedBlocks = unfinalizedBlocks.filter((lb) => hexToNumber(lb.number) < hexToNumber(block.number));
|
|
355
|
-
// Block we are attempting to fit into the local chain.
|
|
356
|
-
let remoteBlock = block;
|
|
357
|
-
while (true) {
|
|
358
|
-
const parentBlock = getLatestUnfinalizedBlock();
|
|
359
|
-
if (parentBlock.hash === remoteBlock.parentHash)
|
|
360
|
-
break;
|
|
361
|
-
if (unfinalizedBlocks.length === 0) {
|
|
362
|
-
// No compatible block was found in the local chain, must be a deep reorg.
|
|
363
|
-
const msg = `Encountered unrecoverable '${args.network.name}' reorg beyond finalized block ${hexToNumber(finalizedBlock.number)}`;
|
|
364
|
-
args.common.logger.warn({ service: "realtime", msg });
|
|
365
|
-
throw new Error(msg);
|
|
366
|
-
}
|
|
367
|
-
else {
|
|
368
|
-
remoteBlock = await _eth_getBlockByHash(args.requestQueue, {
|
|
369
|
-
hash: remoteBlock.parentHash,
|
|
370
|
-
});
|
|
371
|
-
// Add tip to `reorgedBlocks`
|
|
372
|
-
reorgedBlocks.push(unfinalizedBlocks.pop());
|
|
373
|
-
}
|
|
374
|
-
}
|
|
375
|
-
const commonAncestor = getLatestUnfinalizedBlock();
|
|
376
|
-
await args.onEvent({ type: "reorg", block: commonAncestor, reorgedBlocks });
|
|
377
|
-
args.common.logger.warn({
|
|
378
|
-
service: "realtime",
|
|
379
|
-
msg: `Reconciled ${reorgedBlocks.length}-block '${args.network.name}' reorg with common ancestor block ${hexToNumber(commonAncestor.number)}`,
|
|
380
|
-
});
|
|
381
|
-
// recompute `unfinalizedChildAddresses`
|
|
382
|
-
unfinalizedChildAddresses.clear();
|
|
383
|
-
for (const filter of factories) {
|
|
384
|
-
unfinalizedChildAddresses.set(filter, new Set());
|
|
385
|
-
for (const { hash } of unfinalizedBlocks) {
|
|
386
|
-
const factoryLogs = factoryLogsPerBlock.get(hash);
|
|
387
|
-
if (factoryLogs !== undefined) {
|
|
388
|
-
for (const log of factoryLogs) {
|
|
389
|
-
if (isLogFactoryMatched({ filter, log })) {
|
|
390
|
-
unfinalizedChildAddresses
|
|
391
|
-
.get(filter)
|
|
392
|
-
.add(getChildAddress({ log, factory: filter }));
|
|
393
|
-
}
|
|
394
|
-
}
|
|
395
|
-
}
|
|
396
|
-
}
|
|
397
|
-
}
|
|
398
|
-
// delete reorged blocks from `factoryLogsPerBlock`
|
|
399
|
-
for (const { hash } of reorgedBlocks) {
|
|
400
|
-
factoryLogsPerBlock.delete(hash);
|
|
401
|
-
}
|
|
402
|
-
};
|
|
403
|
-
const syncTransactionReceipts = async (blockHash, transactionHashes) => {
|
|
404
|
-
if (transactionHashes.size === 0) {
|
|
405
|
-
return [];
|
|
406
|
-
}
|
|
407
|
-
if (isBlockReceipts === false) {
|
|
408
|
-
const transactionReceipts = await Promise.all(Array.from(transactionHashes).map(async (hash) => _eth_getTransactionReceipt(args.requestQueue, { hash })));
|
|
409
|
-
return transactionReceipts;
|
|
410
|
-
}
|
|
411
|
-
let blockReceipts;
|
|
412
|
-
try {
|
|
413
|
-
blockReceipts = await _eth_getBlockReceipts(args.requestQueue, {
|
|
414
|
-
blockHash,
|
|
415
|
-
});
|
|
416
|
-
}
|
|
417
|
-
catch (_error) {
|
|
418
|
-
const error = _error;
|
|
419
|
-
args.common.logger.warn({
|
|
420
|
-
service: "realtime",
|
|
421
|
-
msg: `Caught eth_getBlockReceipts error on '${args.network.name}', switching to eth_getTransactionReceipt method.`,
|
|
422
|
-
error,
|
|
423
|
-
});
|
|
424
|
-
isBlockReceipts = false;
|
|
425
|
-
return syncTransactionReceipts(blockHash, transactionHashes);
|
|
426
|
-
}
|
|
427
|
-
const blockReceiptsTransactionHashes = new Set(blockReceipts.map((r) => r.transactionHash));
|
|
428
|
-
// Validate that block transaction receipts include all required transactions
|
|
429
|
-
for (const hash of Array.from(transactionHashes)) {
|
|
430
|
-
if (blockReceiptsTransactionHashes.has(hash) === false) {
|
|
431
|
-
throw new Error(`Detected inconsistent RPC responses. 'transaction.hash' ${hash} not found in eth_getBlockReceipts response for block '${blockHash}'`);
|
|
432
|
-
}
|
|
433
|
-
}
|
|
434
|
-
const transactionReceipts = blockReceipts.filter((receipt) => transactionHashes.has(receipt.transactionHash));
|
|
435
|
-
return transactionReceipts;
|
|
436
|
-
};
|
|
437
|
-
/**
|
|
438
|
-
* Fetch all data (logs, traces, receipts) for the specified block required by `args.sources`
|
|
439
|
-
*
|
|
440
|
-
* Note: The data returned by this function may include false positives. This
|
|
441
|
-
* is due to the fact that factory addresses are unknown and are always
|
|
442
|
-
* treated as "matched".
|
|
443
|
-
*/
|
|
444
|
-
const fetchBlockEventData = async (block) => {
|
|
445
|
-
////////
|
|
446
|
-
// Logs
|
|
447
|
-
////////
|
|
448
|
-
// "eth_getLogs" calls can be skipped if no filters match `newHeadBlock.logsBloom`.
|
|
449
|
-
const shouldRequestLogs = block.logsBloom === zeroLogsBloom ||
|
|
450
|
-
logFilters.some((filter) => isFilterInBloom({ block, filter }));
|
|
451
|
-
let logs = [];
|
|
452
|
-
if (shouldRequestLogs) {
|
|
453
|
-
logs = await _eth_getLogs(args.requestQueue, { blockHash: block.hash });
|
|
454
|
-
// Protect against RPCs returning empty logs. Known to happen near chain tip.
|
|
455
|
-
if (block.logsBloom !== zeroLogsBloom && logs.length === 0) {
|
|
456
|
-
throw new Error("Detected invalid eth_getLogs response. `block.logsBloom` is not empty but zero logs were returned.");
|
|
457
|
-
}
|
|
458
|
-
const logIds = new Set();
|
|
459
|
-
for (const log of logs) {
|
|
460
|
-
if (log.blockHash !== block.hash) {
|
|
461
|
-
throw new Error(`Detected invalid eth_getLogs response. 'log.blockHash' ${log.blockHash} does not match requested block hash ${block.hash}`);
|
|
462
|
-
}
|
|
463
|
-
const id = `${log.blockHash}-${log.logIndex}`;
|
|
464
|
-
if (logIds.has(id)) {
|
|
465
|
-
args.common.logger.warn({
|
|
466
|
-
service: "sync",
|
|
467
|
-
msg: `Detected invalid eth_getLogs response. Duplicate log index ${log.logIndex} for block ${log.blockHash}.`,
|
|
468
|
-
});
|
|
469
|
-
}
|
|
470
|
-
else {
|
|
471
|
-
logIds.add(id);
|
|
472
|
-
}
|
|
473
|
-
if (block.transactions.find((t) => t.hash === log.transactionHash) ===
|
|
474
|
-
undefined) {
|
|
475
|
-
if (log.transactionHash === zeroHash) {
|
|
476
|
-
args.common.logger.warn({
|
|
477
|
-
service: "sync",
|
|
478
|
-
msg: `Detected '${args.network.name}' log with empty transaction hash in block ${block.hash} at log index ${hexToNumber(log.logIndex)}. This is expected for some networks like ZKsync.`,
|
|
479
|
-
});
|
|
480
|
-
}
|
|
481
|
-
else {
|
|
482
|
-
throw new Error(`Detected inconsistent '${args.network.name}' RPC responses. 'log.transactionHash' ${log.transactionHash} not found in 'block.transactions' ${block.hash}`);
|
|
483
|
-
}
|
|
484
|
-
}
|
|
485
|
-
}
|
|
486
|
-
}
|
|
487
|
-
if (shouldRequestLogs === false &&
|
|
488
|
-
args.sources.some((s) => s.filter.type === "log")) {
|
|
489
|
-
args.common.logger.debug({
|
|
490
|
-
service: "realtime",
|
|
491
|
-
msg: `Skipped fetching '${args.network.name}' logs for block ${hexToNumber(block.number)} due to bloom filter result`,
|
|
492
|
-
});
|
|
493
|
-
}
|
|
494
|
-
////////
|
|
495
|
-
// Traces
|
|
496
|
-
////////
|
|
497
|
-
const shouldRequestTraces = traceFilters.length > 0 || transferFilters.length > 0;
|
|
498
|
-
let traces = [];
|
|
499
|
-
if (shouldRequestTraces) {
|
|
500
|
-
traces = await _debug_traceBlockByHash(args.requestQueue, {
|
|
501
|
-
hash: block.hash,
|
|
502
|
-
});
|
|
503
|
-
// Protect against RPCs returning empty traces. Known to happen near chain tip.
|
|
504
|
-
// Use the fact that any transaction produces a trace.
|
|
505
|
-
if (block.transactions.length !== 0 && traces.length === 0) {
|
|
506
|
-
throw new Error("Detected invalid debug_traceBlock response. `block.transactions` is not empty but zero traces were returned.");
|
|
507
|
-
}
|
|
508
|
-
}
|
|
509
|
-
// Validate that each trace point to valid transaction in the block
|
|
510
|
-
for (const trace of traces) {
|
|
511
|
-
if (block.transactions.find((t) => t.hash === trace.transactionHash) ===
|
|
512
|
-
undefined) {
|
|
513
|
-
throw new Error(`Detected inconsistent RPC responses. 'trace.txHash' ${trace.transactionHash} not found in 'block' ${block.hash}`);
|
|
514
|
-
}
|
|
515
|
-
}
|
|
516
|
-
////////
|
|
517
|
-
// Get Matched
|
|
518
|
-
////////
|
|
519
|
-
// Record `logs` that contain factory child addresses
|
|
520
|
-
const factoryLogs = logs.filter((log) => {
|
|
521
|
-
let isMatched = false;
|
|
522
|
-
for (const filter of factories) {
|
|
523
|
-
if (isLogFactoryMatched({ filter, log })) {
|
|
524
|
-
if (factoryLogsPerBlock.has(block.hash) === false) {
|
|
525
|
-
factoryLogsPerBlock.set(block.hash, []);
|
|
526
|
-
}
|
|
527
|
-
factoryLogsPerBlock.get(block.hash).push(log);
|
|
528
|
-
isMatched = true;
|
|
529
|
-
}
|
|
530
|
-
}
|
|
531
|
-
return isMatched;
|
|
532
|
-
});
|
|
533
|
-
const requiredTransactions = new Set();
|
|
534
|
-
const requiredTransactionReceipts = new Set();
|
|
535
|
-
// Remove logs that don't match a filter, recording required transactions
|
|
536
|
-
logs = logs.filter((log) => {
|
|
537
|
-
let isMatched = false;
|
|
538
|
-
for (const filter of logFilters) {
|
|
539
|
-
if (isLogFilterMatched({ filter, block, log })) {
|
|
540
|
-
isMatched = true;
|
|
541
|
-
if (log.transactionHash === zeroHash) {
|
|
542
|
-
args.common.logger.warn({
|
|
543
|
-
service: "sync",
|
|
544
|
-
msg: `Detected '${args.network.name}' log with empty transaction hash in block ${block.hash} at log index ${hexToNumber(log.logIndex)}. This is expected for some networks like ZKsync.`,
|
|
545
|
-
});
|
|
546
|
-
}
|
|
547
|
-
else {
|
|
548
|
-
requiredTransactions.add(log.transactionHash);
|
|
549
|
-
if (shouldGetTransactionReceipt(filter)) {
|
|
550
|
-
requiredTransactionReceipts.add(log.transactionHash);
|
|
551
|
-
// skip to next log
|
|
552
|
-
break;
|
|
553
|
-
}
|
|
554
|
-
}
|
|
555
|
-
}
|
|
556
|
-
}
|
|
557
|
-
return isMatched;
|
|
558
|
-
});
|
|
559
|
-
// Initial weak trace filtering before full filtering with factory addresses in handleBlock
|
|
560
|
-
traces = traces.filter((trace) => {
|
|
561
|
-
let isMatched = false;
|
|
562
|
-
for (const filter of transferFilters) {
|
|
563
|
-
if (isTransferFilterMatched({
|
|
564
|
-
filter,
|
|
565
|
-
block: { number: block.number },
|
|
566
|
-
trace: trace.trace,
|
|
567
|
-
})) {
|
|
568
|
-
requiredTransactions.add(trace.transactionHash);
|
|
569
|
-
isMatched = true;
|
|
570
|
-
if (shouldGetTransactionReceipt(filter)) {
|
|
571
|
-
requiredTransactionReceipts.add(trace.transactionHash);
|
|
572
|
-
// skip to next trace
|
|
573
|
-
break;
|
|
574
|
-
}
|
|
575
|
-
}
|
|
576
|
-
}
|
|
577
|
-
for (const filter of traceFilters) {
|
|
578
|
-
if (isTraceFilterMatched({
|
|
579
|
-
filter,
|
|
580
|
-
block: { number: block.number },
|
|
581
|
-
trace: trace.trace,
|
|
582
|
-
})) {
|
|
583
|
-
requiredTransactions.add(trace.transactionHash);
|
|
584
|
-
isMatched = true;
|
|
585
|
-
if (shouldGetTransactionReceipt(filter)) {
|
|
586
|
-
requiredTransactionReceipts.add(trace.transactionHash);
|
|
587
|
-
// skip to next trace
|
|
588
|
-
break;
|
|
589
|
-
}
|
|
590
|
-
}
|
|
591
|
-
}
|
|
592
|
-
return isMatched;
|
|
593
|
-
});
|
|
594
|
-
////////
|
|
595
|
-
// Transactions
|
|
596
|
-
////////
|
|
597
|
-
const transactions = block.transactions.filter((transaction) => {
|
|
598
|
-
let isMatched = requiredTransactions.has(transaction.hash);
|
|
599
|
-
for (const filter of transactionFilters) {
|
|
600
|
-
if (isTransactionFilterMatched({ filter, block, transaction })) {
|
|
601
|
-
requiredTransactions.add(transaction.hash);
|
|
602
|
-
requiredTransactionReceipts.add(transaction.hash);
|
|
603
|
-
isMatched = true;
|
|
604
|
-
}
|
|
605
|
-
}
|
|
606
|
-
return isMatched;
|
|
607
|
-
});
|
|
608
|
-
// Validate that filtered logs/callTraces point to valid transaction in the block
|
|
609
|
-
const blockTransactionsHashes = new Set(block.transactions.map((t) => t.hash));
|
|
610
|
-
for (const hash of Array.from(requiredTransactions)) {
|
|
611
|
-
if (blockTransactionsHashes.has(hash) === false) {
|
|
612
|
-
throw new Error(`Detected inconsistent RPC responses. 'transaction.hash' ${hash} not found in eth_getBlockReceipts response for block '${block.hash}'.`);
|
|
613
|
-
}
|
|
614
|
-
}
|
|
615
|
-
////////
|
|
616
|
-
// Transaction Receipts
|
|
617
|
-
////////
|
|
618
|
-
const transactionReceipts = await syncTransactionReceipts(block.hash, requiredTransactionReceipts);
|
|
619
|
-
return {
|
|
620
|
-
block,
|
|
621
|
-
logs,
|
|
622
|
-
factoryLogs,
|
|
623
|
-
traces,
|
|
624
|
-
transactions,
|
|
625
|
-
transactionReceipts,
|
|
626
|
-
};
|
|
627
|
-
};
|
|
628
|
-
const getLatestUnfinalizedBlock = () => {
|
|
629
|
-
if (unfinalizedBlocks.length === 0) {
|
|
630
|
-
return finalizedBlock;
|
|
631
|
-
}
|
|
632
|
-
else
|
|
633
|
-
return unfinalizedBlocks[unfinalizedBlocks.length - 1];
|
|
634
|
-
};
|
|
635
|
-
return {
|
|
636
|
-
start(startArgs) {
|
|
637
|
-
finalizedBlock = startArgs.syncProgress.finalized;
|
|
638
|
-
finalizedChildAddresses = startArgs.initialChildAddresses;
|
|
639
|
-
/**
|
|
640
|
-
* The queue reacts to a new block. The four states are:
|
|
641
|
-
* 1) Block is the same as the one just processed, no-op.
|
|
642
|
-
* 2) Block is exactly one block ahead of the last processed,
|
|
643
|
-
* handle this new block (happy path).
|
|
644
|
-
* 3) Block is more than one ahead of the last processed,
|
|
645
|
-
* fetch all intermediate blocks and enqueue them again.
|
|
646
|
-
* 4) Block is behind the last processed. This is a sign that
|
|
647
|
-
* a reorg has occurred.
|
|
648
|
-
*/
|
|
649
|
-
const processBlock = mutex(async ({ block, ...rest }) => {
|
|
650
|
-
const latestBlock = getLatestUnfinalizedBlock();
|
|
651
|
-
// We already saw and handled this block. No-op.
|
|
652
|
-
if (latestBlock.hash === block.hash) {
|
|
653
|
-
args.common.logger.trace({
|
|
654
|
-
service: "realtime",
|
|
655
|
-
msg: `Skipped processing '${args.network.name}' block ${hexToNumber(block.number)}, already synced`,
|
|
656
|
-
});
|
|
657
|
-
return;
|
|
658
|
-
}
|
|
659
|
-
try {
|
|
660
|
-
// Quickly check for a reorg by comparing block numbers. If the block
|
|
661
|
-
// number has not increased, a reorg must have occurred.
|
|
662
|
-
if (hexToNumber(latestBlock.number) >= hexToNumber(block.number)) {
|
|
663
|
-
await handleReorg(block);
|
|
664
|
-
processBlock.clear();
|
|
665
|
-
return;
|
|
666
|
-
}
|
|
667
|
-
// Blocks are missing. They should be fetched and enqueued.
|
|
668
|
-
if (hexToNumber(latestBlock.number) + 1 <
|
|
669
|
-
hexToNumber(block.number)) {
|
|
670
|
-
// Retrieve missing blocks, but only fetch a certain amount.
|
|
671
|
-
const missingBlockRange = range(hexToNumber(latestBlock.number) + 1, Math.min(hexToNumber(block.number), hexToNumber(latestBlock.number) + MAX_QUEUED_BLOCKS));
|
|
672
|
-
const pendingBlocks = await Promise.all(missingBlockRange.map((blockNumber) => _eth_getBlockByNumber(args.requestQueue, {
|
|
673
|
-
blockNumber,
|
|
674
|
-
}).then((block) => fetchBlockEventData(block))));
|
|
675
|
-
args.common.logger.debug({
|
|
676
|
-
service: "realtime",
|
|
677
|
-
msg: `Fetched ${missingBlockRange.length} missing '${args.network.name}' blocks [${hexToNumber(latestBlock.number) + 1}, ${Math.min(hexToNumber(block.number), hexToNumber(latestBlock.number) + MAX_QUEUED_BLOCKS)}]`,
|
|
678
|
-
});
|
|
679
|
-
processBlock.clear();
|
|
680
|
-
for (const pendingBlock of pendingBlocks) {
|
|
681
|
-
processBlock(pendingBlock);
|
|
682
|
-
}
|
|
683
|
-
processBlock({ block, ...rest });
|
|
684
|
-
return;
|
|
685
|
-
}
|
|
686
|
-
// Check if a reorg occurred by validating the chain of block hashes.
|
|
687
|
-
if (block.parentHash !== latestBlock.hash) {
|
|
688
|
-
await handleReorg(block);
|
|
689
|
-
processBlock.clear();
|
|
690
|
-
return;
|
|
691
|
-
}
|
|
692
|
-
// New block is exactly one block ahead of the local chain.
|
|
693
|
-
// Attempt to ingest it.
|
|
694
|
-
await handleBlock({ block, ...rest });
|
|
695
|
-
// Reset the error state after successfully completing the happy path.
|
|
696
|
-
consecutiveErrors = 0;
|
|
697
|
-
return;
|
|
698
|
-
}
|
|
699
|
-
catch (_error) {
|
|
700
|
-
const error = _error;
|
|
701
|
-
if (args.common.shutdown.isKilled) {
|
|
702
|
-
throw new ShutdownError();
|
|
703
|
-
}
|
|
704
|
-
args.common.logger.warn({
|
|
705
|
-
service: "realtime",
|
|
706
|
-
msg: `Failed to process '${args.network.name}' block ${hexToNumber(block.number)}`,
|
|
707
|
-
error,
|
|
708
|
-
});
|
|
709
|
-
const duration = ERROR_TIMEOUT[consecutiveErrors];
|
|
710
|
-
args.common.logger.warn({
|
|
711
|
-
service: "realtime",
|
|
712
|
-
msg: `Retrying '${args.network.name}' sync after ${duration} ${duration === 1 ? "second" : "seconds"}.`,
|
|
713
|
-
});
|
|
714
|
-
await wait(duration * 1000);
|
|
715
|
-
// Remove all blocks from the queue. This protects against an
|
|
716
|
-
// erroneous block causing a fatal error.
|
|
717
|
-
processBlock.clear();
|
|
718
|
-
// After a certain number of attempts, emit a fatal error.
|
|
719
|
-
if (++consecutiveErrors === ERROR_TIMEOUT.length) {
|
|
720
|
-
args.common.logger.error({
|
|
721
|
-
service: "realtime",
|
|
722
|
-
msg: `Fatal error: Unable to process '${args.network.name}' block ${hexToNumber(block.number)} after ${ERROR_TIMEOUT.length} attempts.`,
|
|
723
|
-
error,
|
|
724
|
-
});
|
|
725
|
-
args.onFatalError(error);
|
|
726
|
-
}
|
|
727
|
-
}
|
|
728
|
-
});
|
|
729
|
-
const enqueue = async () => {
|
|
730
|
-
try {
|
|
731
|
-
const block = await _eth_getBlockByNumber(args.requestQueue, {
|
|
732
|
-
blockTag: "latest",
|
|
733
|
-
});
|
|
734
|
-
args.common.logger.debug({
|
|
735
|
-
service: "realtime",
|
|
736
|
-
msg: `Received latest '${args.network.name}' block ${hexToNumber(block.number)}`,
|
|
737
|
-
});
|
|
738
|
-
const latestBlock = getLatestUnfinalizedBlock();
|
|
739
|
-
// We already saw and handled this block. No-op.
|
|
740
|
-
if (latestBlock.hash === block.hash) {
|
|
741
|
-
args.common.logger.trace({
|
|
742
|
-
service: "realtime",
|
|
743
|
-
msg: `Skipped processing '${args.network.name}' block ${hexToNumber(block.number)}, already synced`,
|
|
744
|
-
});
|
|
745
|
-
return;
|
|
746
|
-
}
|
|
747
|
-
const endClock = startClock();
|
|
748
|
-
const blockWithEventData = await fetchBlockEventData(block);
|
|
749
|
-
consecutiveErrors = 0;
|
|
750
|
-
return processBlock({ ...blockWithEventData, endClock });
|
|
751
|
-
}
|
|
752
|
-
catch (_error) {
|
|
753
|
-
const error = _error;
|
|
754
|
-
if (args.common.shutdown.isKilled) {
|
|
755
|
-
throw new ShutdownError();
|
|
756
|
-
}
|
|
757
|
-
args.common.logger.warn({
|
|
758
|
-
service: "realtime",
|
|
759
|
-
msg: `Failed to fetch latest '${args.network.name}' block`,
|
|
760
|
-
error,
|
|
761
|
-
});
|
|
762
|
-
// After a certain number of attempts, emit a fatal error.
|
|
763
|
-
if (++consecutiveErrors === ERROR_TIMEOUT.length) {
|
|
764
|
-
args.common.logger.error({
|
|
765
|
-
service: "realtime",
|
|
766
|
-
msg: `Fatal error: Unable to fetch latest '${args.network.name}' block after ${ERROR_TIMEOUT.length} attempts.`,
|
|
767
|
-
error,
|
|
768
|
-
});
|
|
769
|
-
args.onFatalError(error);
|
|
770
|
-
}
|
|
771
|
-
}
|
|
772
|
-
};
|
|
773
|
-
interval = setInterval(enqueue, args.network.pollingInterval);
|
|
774
|
-
args.common.shutdown.add(() => {
|
|
775
|
-
clearInterval(interval);
|
|
776
|
-
});
|
|
777
|
-
// Note: this is done just for testing.
|
|
778
|
-
return enqueue().then(() => processBlock);
|
|
779
|
-
},
|
|
780
|
-
get unfinalizedBlocks() {
|
|
781
|
-
return unfinalizedBlocks;
|
|
782
|
-
},
|
|
783
|
-
get finalizedChildAddresses() {
|
|
784
|
-
return finalizedChildAddresses;
|
|
785
|
-
},
|
|
786
|
-
get unfinalizedChildAddresses() {
|
|
787
|
-
return unfinalizedChildAddresses;
|
|
788
|
-
},
|
|
789
|
-
async kill() {
|
|
790
|
-
clearInterval(interval);
|
|
791
|
-
},
|
|
792
|
-
};
|
|
793
|
-
};
|
|
794
|
-
//# sourceMappingURL=index.js.map
|