envio 3.0.0-alpha.2 → 3.0.0-alpha.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/evm.schema.json +44 -34
- package/fuel.schema.json +32 -21
- package/index.d.ts +4 -1
- package/index.js +1 -0
- package/package.json +7 -6
- package/src/Batch.res.mjs +1 -1
- package/src/Benchmark.res +394 -0
- package/src/Benchmark.res.mjs +398 -0
- package/src/ChainFetcher.res +459 -0
- package/src/ChainFetcher.res.mjs +281 -0
- package/src/ChainManager.res +179 -0
- package/src/ChainManager.res.mjs +139 -0
- package/src/Config.res +15 -1
- package/src/Config.res.mjs +28 -5
- package/src/Ecosystem.res +9 -124
- package/src/Ecosystem.res.mjs +19 -160
- package/src/Env.res +0 -1
- package/src/Env.res.mjs +0 -3
- package/src/Envio.gen.ts +9 -1
- package/src/Envio.res +12 -9
- package/src/EventProcessing.res +476 -0
- package/src/EventProcessing.res.mjs +341 -0
- package/src/FetchState.res +54 -29
- package/src/FetchState.res.mjs +62 -35
- package/src/GlobalState.res +1169 -0
- package/src/GlobalState.res.mjs +1196 -0
- package/src/Internal.res +43 -1
- package/src/LoadLayer.res +444 -0
- package/src/LoadLayer.res.mjs +296 -0
- package/src/LoadLayer.resi +32 -0
- package/src/Prometheus.res +8 -8
- package/src/Prometheus.res.mjs +10 -10
- package/src/ReorgDetection.res +6 -10
- package/src/ReorgDetection.res.mjs +6 -6
- package/src/Types.ts +1 -1
- package/src/UserContext.res +356 -0
- package/src/UserContext.res.mjs +238 -0
- package/src/Utils.res +15 -0
- package/src/Utils.res.mjs +18 -0
- package/src/bindings/ClickHouse.res +31 -1
- package/src/bindings/ClickHouse.res.mjs +27 -1
- package/src/bindings/DateFns.res +71 -0
- package/src/bindings/DateFns.res.mjs +22 -0
- package/src/bindings/Ethers.res +27 -63
- package/src/bindings/Ethers.res.mjs +18 -65
- package/src/sources/Evm.res +87 -0
- package/src/sources/Evm.res.mjs +105 -0
- package/src/sources/EvmChain.res +95 -0
- package/src/sources/EvmChain.res.mjs +61 -0
- package/src/sources/Fuel.res +19 -34
- package/src/sources/Fuel.res.mjs +34 -16
- package/src/sources/FuelSDK.res +37 -0
- package/src/sources/FuelSDK.res.mjs +29 -0
- package/src/sources/HyperFuel.res +2 -2
- package/src/sources/HyperFuel.resi +1 -1
- package/src/sources/HyperFuelClient.res +2 -2
- package/src/sources/HyperFuelSource.res +8 -8
- package/src/sources/HyperFuelSource.res.mjs +5 -5
- package/src/sources/HyperSyncHeightStream.res +28 -110
- package/src/sources/HyperSyncHeightStream.res.mjs +30 -63
- package/src/sources/HyperSyncSource.res +16 -18
- package/src/sources/HyperSyncSource.res.mjs +25 -25
- package/src/sources/Rpc.res +43 -0
- package/src/sources/Rpc.res.mjs +31 -0
- package/src/sources/RpcSource.res +13 -8
- package/src/sources/RpcSource.res.mjs +12 -7
- package/src/sources/Source.res +3 -2
- package/src/sources/SourceManager.res +183 -108
- package/src/sources/SourceManager.res.mjs +162 -99
- package/src/sources/SourceManager.resi +4 -5
- package/src/sources/Svm.res +59 -0
- package/src/sources/Svm.res.mjs +79 -0
- package/src/bindings/Ethers.gen.ts +0 -14
|
@@ -0,0 +1,476 @@
|
|
|
1
|
+
open Belt
|
|
2
|
+
|
|
3
|
+
let allChainsEventsProcessedToEndblock = (chainFetchers: ChainMap.t<ChainFetcher.t>) => {
|
|
4
|
+
chainFetchers
|
|
5
|
+
->ChainMap.values
|
|
6
|
+
->Array.every(cf => cf->ChainFetcher.hasProcessedToEndblock)
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
let computeChainsState = (chainFetchers: ChainMap.t<ChainFetcher.t>): Internal.chains => {
|
|
10
|
+
let chains = Js.Dict.empty()
|
|
11
|
+
|
|
12
|
+
chainFetchers
|
|
13
|
+
->ChainMap.entries
|
|
14
|
+
->Array.forEach(((chain, chainFetcher)) => {
|
|
15
|
+
let chainId = chain->ChainMap.Chain.toChainId->Int.toString
|
|
16
|
+
let isLive = chainFetcher.timestampCaughtUpToHeadOrEndblock !== None
|
|
17
|
+
|
|
18
|
+
chains->Js.Dict.set(
|
|
19
|
+
chainId,
|
|
20
|
+
{
|
|
21
|
+
Internal.id: chain->ChainMap.Chain.toChainId,
|
|
22
|
+
isLive,
|
|
23
|
+
},
|
|
24
|
+
)
|
|
25
|
+
})
|
|
26
|
+
|
|
27
|
+
chains
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
let convertFieldsToJson = (fields: option<dict<unknown>>) => {
|
|
31
|
+
switch fields {
|
|
32
|
+
| None => %raw(`{}`)
|
|
33
|
+
| Some(fields) => {
|
|
34
|
+
let keys = fields->Js.Dict.keys
|
|
35
|
+
let new = Js.Dict.empty()
|
|
36
|
+
for i in 0 to keys->Js.Array2.length - 1 {
|
|
37
|
+
let key = keys->Js.Array2.unsafe_get(i)
|
|
38
|
+
let value = fields->Js.Dict.unsafeGet(key)
|
|
39
|
+
// Skip `undefined` values and convert bigint fields to string
|
|
40
|
+
// There are not fields with nested bigints, so this is safe
|
|
41
|
+
new->Js.Dict.set(
|
|
42
|
+
key,
|
|
43
|
+
Js.typeof(value) === "bigint" ? value->Utils.magic->BigInt.toString->Utils.magic : value,
|
|
44
|
+
)
|
|
45
|
+
}
|
|
46
|
+
new->(Utils.magic: dict<unknown> => Js.Json.t)
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
let addItemToRawEvents = (
|
|
52
|
+
eventItem: Internal.eventItem,
|
|
53
|
+
~inMemoryStore: InMemoryStore.t,
|
|
54
|
+
~config: Config.t,
|
|
55
|
+
) => {
|
|
56
|
+
let {event, eventConfig, chain, blockNumber, timestamp: blockTimestamp} = eventItem
|
|
57
|
+
let {block, transaction, params, logIndex, srcAddress} = event
|
|
58
|
+
let chainId = chain->ChainMap.Chain.toChainId
|
|
59
|
+
let eventId = EventUtils.packEventIndex(~logIndex, ~blockNumber)
|
|
60
|
+
let blockFields =
|
|
61
|
+
block
|
|
62
|
+
->(Utils.magic: Internal.eventBlock => option<dict<unknown>>)
|
|
63
|
+
->convertFieldsToJson
|
|
64
|
+
let transactionFields =
|
|
65
|
+
transaction
|
|
66
|
+
->(Utils.magic: Internal.eventTransaction => option<dict<unknown>>)
|
|
67
|
+
->convertFieldsToJson
|
|
68
|
+
|
|
69
|
+
blockFields->config.ecosystem.cleanUpRawEventFieldsInPlace
|
|
70
|
+
|
|
71
|
+
// Serialize to unknown, because serializing to Js.Json.t fails for Bytes Fuel type, since it has unknown schema
|
|
72
|
+
let params =
|
|
73
|
+
params
|
|
74
|
+
->S.reverseConvertOrThrow(eventConfig.paramsRawEventSchema)
|
|
75
|
+
->(Utils.magic: unknown => Js.Json.t)
|
|
76
|
+
let params = if params === %raw(`null`) {
|
|
77
|
+
// Should probably make the params field nullable
|
|
78
|
+
// But this is currently needed to make events
|
|
79
|
+
// with empty params work
|
|
80
|
+
%raw(`"null"`)
|
|
81
|
+
} else {
|
|
82
|
+
params
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
let rawEvent: InternalTable.RawEvents.t = {
|
|
86
|
+
chainId,
|
|
87
|
+
eventId,
|
|
88
|
+
eventName: eventConfig.name,
|
|
89
|
+
contractName: eventConfig.contractName,
|
|
90
|
+
blockNumber,
|
|
91
|
+
logIndex,
|
|
92
|
+
srcAddress,
|
|
93
|
+
blockHash: block->config.ecosystem.getId,
|
|
94
|
+
blockTimestamp,
|
|
95
|
+
blockFields,
|
|
96
|
+
transactionFields,
|
|
97
|
+
params,
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
let eventIdStr = eventId->BigInt.toString
|
|
101
|
+
|
|
102
|
+
inMemoryStore.rawEvents->InMemoryTable.set({chainId, eventId: eventIdStr}, rawEvent)
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
exception ProcessingError({message: string, exn: exn, item: Internal.item})
|
|
106
|
+
|
|
107
|
+
let runEventHandlerOrThrow = async (
|
|
108
|
+
item: Internal.item,
|
|
109
|
+
~checkpointId,
|
|
110
|
+
~handler,
|
|
111
|
+
~inMemoryStore,
|
|
112
|
+
~loadManager,
|
|
113
|
+
~persistence,
|
|
114
|
+
~shouldSaveHistory,
|
|
115
|
+
~shouldBenchmark,
|
|
116
|
+
~chains: Internal.chains,
|
|
117
|
+
~config: Config.t,
|
|
118
|
+
) => {
|
|
119
|
+
let eventItem = item->Internal.castUnsafeEventItem
|
|
120
|
+
|
|
121
|
+
//Include the load in time before handler
|
|
122
|
+
let timeBeforeHandler = Hrtime.makeTimer()
|
|
123
|
+
|
|
124
|
+
try {
|
|
125
|
+
let contextParams: UserContext.contextParams = {
|
|
126
|
+
item,
|
|
127
|
+
checkpointId,
|
|
128
|
+
inMemoryStore,
|
|
129
|
+
loadManager,
|
|
130
|
+
persistence,
|
|
131
|
+
shouldSaveHistory,
|
|
132
|
+
isPreload: false,
|
|
133
|
+
chains,
|
|
134
|
+
config,
|
|
135
|
+
isResolved: false,
|
|
136
|
+
}
|
|
137
|
+
await handler(
|
|
138
|
+
(
|
|
139
|
+
{
|
|
140
|
+
event: eventItem.event,
|
|
141
|
+
context: UserContext.getHandlerContext(contextParams),
|
|
142
|
+
}: Internal.handlerArgs
|
|
143
|
+
),
|
|
144
|
+
)
|
|
145
|
+
contextParams.isResolved = true
|
|
146
|
+
} catch {
|
|
147
|
+
| exn =>
|
|
148
|
+
raise(
|
|
149
|
+
ProcessingError({
|
|
150
|
+
message: "Unexpected error in the event handler. Please handle the error to keep the indexer running smoothly.",
|
|
151
|
+
item,
|
|
152
|
+
exn,
|
|
153
|
+
}),
|
|
154
|
+
)
|
|
155
|
+
}
|
|
156
|
+
if shouldBenchmark {
|
|
157
|
+
let timeEnd = timeBeforeHandler->Hrtime.timeSince->Hrtime.toMillis->Hrtime.floatFromMillis
|
|
158
|
+
Benchmark.addSummaryData(
|
|
159
|
+
~group="Handlers Per Event",
|
|
160
|
+
~label=`${eventItem.eventConfig.contractName} ${eventItem.eventConfig.name} Handler (ms)`,
|
|
161
|
+
~value=timeEnd,
|
|
162
|
+
~decimalPlaces=4,
|
|
163
|
+
)
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
let runHandlerOrThrow = async (
|
|
168
|
+
item: Internal.item,
|
|
169
|
+
~checkpointId,
|
|
170
|
+
~inMemoryStore,
|
|
171
|
+
~loadManager,
|
|
172
|
+
~indexer: Indexer.t,
|
|
173
|
+
~shouldSaveHistory,
|
|
174
|
+
~shouldBenchmark,
|
|
175
|
+
~chains: Internal.chains,
|
|
176
|
+
) => {
|
|
177
|
+
switch item {
|
|
178
|
+
| Block({onBlockConfig: {handler}, blockNumber}) =>
|
|
179
|
+
try {
|
|
180
|
+
let contextParams: UserContext.contextParams = {
|
|
181
|
+
item,
|
|
182
|
+
inMemoryStore,
|
|
183
|
+
loadManager,
|
|
184
|
+
persistence: indexer.persistence,
|
|
185
|
+
shouldSaveHistory,
|
|
186
|
+
checkpointId,
|
|
187
|
+
isPreload: false,
|
|
188
|
+
chains,
|
|
189
|
+
config: indexer.config,
|
|
190
|
+
isResolved: false,
|
|
191
|
+
}
|
|
192
|
+
await handler(
|
|
193
|
+
Ecosystem.makeOnBlockArgs(
|
|
194
|
+
~blockNumber,
|
|
195
|
+
~ecosystem=indexer.config.ecosystem,
|
|
196
|
+
~context=UserContext.getHandlerContext(contextParams),
|
|
197
|
+
),
|
|
198
|
+
)
|
|
199
|
+
contextParams.isResolved = true
|
|
200
|
+
} catch {
|
|
201
|
+
| exn =>
|
|
202
|
+
raise(
|
|
203
|
+
ProcessingError({
|
|
204
|
+
message: "Unexpected error in the block handler. Please handle the error to keep the indexer running smoothly.",
|
|
205
|
+
item,
|
|
206
|
+
exn,
|
|
207
|
+
}),
|
|
208
|
+
)
|
|
209
|
+
}
|
|
210
|
+
| Event({eventConfig}) => {
|
|
211
|
+
switch eventConfig.handler {
|
|
212
|
+
| Some(handler) =>
|
|
213
|
+
await item->runEventHandlerOrThrow(
|
|
214
|
+
~handler,
|
|
215
|
+
~checkpointId,
|
|
216
|
+
~inMemoryStore,
|
|
217
|
+
~loadManager,
|
|
218
|
+
~persistence=indexer.persistence,
|
|
219
|
+
~shouldSaveHistory,
|
|
220
|
+
~shouldBenchmark,
|
|
221
|
+
~chains,
|
|
222
|
+
~config=indexer.config,
|
|
223
|
+
)
|
|
224
|
+
| None => ()
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
if indexer.config.enableRawEvents {
|
|
228
|
+
item
|
|
229
|
+
->Internal.castUnsafeEventItem
|
|
230
|
+
->addItemToRawEvents(~inMemoryStore, ~config=indexer.config)
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
let preloadBatchOrThrow = async (
|
|
237
|
+
batch: Batch.t,
|
|
238
|
+
~loadManager,
|
|
239
|
+
~persistence,
|
|
240
|
+
~config: Config.t,
|
|
241
|
+
~inMemoryStore,
|
|
242
|
+
~chains: Internal.chains,
|
|
243
|
+
) => {
|
|
244
|
+
// On the first run of loaders, we don't care about the result,
|
|
245
|
+
// whether it's an error or a return type.
|
|
246
|
+
// We'll rerun the loader again right before the handler run,
|
|
247
|
+
// to avoid having a stale data returned from the loader.
|
|
248
|
+
|
|
249
|
+
let promises = []
|
|
250
|
+
let itemIdx = ref(0)
|
|
251
|
+
|
|
252
|
+
for checkpointIdx in 0 to batch.checkpointIds->Array.length - 1 {
|
|
253
|
+
let checkpointId = batch.checkpointIds->Js.Array2.unsafe_get(checkpointIdx)
|
|
254
|
+
let checkpointEventsProcessed =
|
|
255
|
+
batch.checkpointEventsProcessed->Js.Array2.unsafe_get(checkpointIdx)
|
|
256
|
+
|
|
257
|
+
for idx in 0 to checkpointEventsProcessed - 1 {
|
|
258
|
+
let item = batch.items->Js.Array2.unsafe_get(itemIdx.contents + idx)
|
|
259
|
+
switch item {
|
|
260
|
+
| Event({eventConfig: {handler}, event}) =>
|
|
261
|
+
switch handler {
|
|
262
|
+
| None => ()
|
|
263
|
+
| Some(handler) =>
|
|
264
|
+
try {
|
|
265
|
+
promises->Array.push(
|
|
266
|
+
handler({
|
|
267
|
+
event,
|
|
268
|
+
context: UserContext.getHandlerContext({
|
|
269
|
+
item,
|
|
270
|
+
inMemoryStore,
|
|
271
|
+
loadManager,
|
|
272
|
+
persistence,
|
|
273
|
+
checkpointId,
|
|
274
|
+
isPreload: true,
|
|
275
|
+
shouldSaveHistory: false,
|
|
276
|
+
chains,
|
|
277
|
+
isResolved: false,
|
|
278
|
+
config,
|
|
279
|
+
}),
|
|
280
|
+
})->Promise.silentCatch,
|
|
281
|
+
// Must have Promise.catch as well as normal catch,
|
|
282
|
+
// because if user throws an error before await in the handler,
|
|
283
|
+
// it won't create a rejected promise
|
|
284
|
+
)
|
|
285
|
+
} catch {
|
|
286
|
+
| _ => ()
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
| Block({onBlockConfig: {handler}, blockNumber}) =>
|
|
290
|
+
try {
|
|
291
|
+
promises->Array.push(
|
|
292
|
+
handler({
|
|
293
|
+
Ecosystem.makeOnBlockArgs(
|
|
294
|
+
~blockNumber,
|
|
295
|
+
~ecosystem=config.ecosystem,
|
|
296
|
+
~context=UserContext.getHandlerContext({
|
|
297
|
+
item,
|
|
298
|
+
inMemoryStore,
|
|
299
|
+
loadManager,
|
|
300
|
+
persistence,
|
|
301
|
+
checkpointId,
|
|
302
|
+
isPreload: true,
|
|
303
|
+
shouldSaveHistory: false,
|
|
304
|
+
chains,
|
|
305
|
+
isResolved: false,
|
|
306
|
+
config,
|
|
307
|
+
}),
|
|
308
|
+
)
|
|
309
|
+
})->Promise.silentCatch,
|
|
310
|
+
)
|
|
311
|
+
} catch {
|
|
312
|
+
| _ => ()
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
itemIdx := itemIdx.contents + checkpointEventsProcessed
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
let _ = await Promise.all(promises)
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
let runBatchHandlersOrThrow = async (
|
|
324
|
+
batch: Batch.t,
|
|
325
|
+
~inMemoryStore,
|
|
326
|
+
~loadManager,
|
|
327
|
+
~indexer,
|
|
328
|
+
~shouldSaveHistory,
|
|
329
|
+
~shouldBenchmark,
|
|
330
|
+
~chains: Internal.chains,
|
|
331
|
+
) => {
|
|
332
|
+
let itemIdx = ref(0)
|
|
333
|
+
|
|
334
|
+
for checkpointIdx in 0 to batch.checkpointIds->Array.length - 1 {
|
|
335
|
+
let checkpointId = batch.checkpointIds->Js.Array2.unsafe_get(checkpointIdx)
|
|
336
|
+
let checkpointEventsProcessed =
|
|
337
|
+
batch.checkpointEventsProcessed->Js.Array2.unsafe_get(checkpointIdx)
|
|
338
|
+
|
|
339
|
+
for idx in 0 to checkpointEventsProcessed - 1 {
|
|
340
|
+
let item = batch.items->Js.Array2.unsafe_get(itemIdx.contents + idx)
|
|
341
|
+
|
|
342
|
+
await runHandlerOrThrow(
|
|
343
|
+
item,
|
|
344
|
+
~checkpointId,
|
|
345
|
+
~inMemoryStore,
|
|
346
|
+
~loadManager,
|
|
347
|
+
~indexer,
|
|
348
|
+
~shouldSaveHistory,
|
|
349
|
+
~shouldBenchmark,
|
|
350
|
+
~chains,
|
|
351
|
+
)
|
|
352
|
+
}
|
|
353
|
+
itemIdx := itemIdx.contents + checkpointEventsProcessed
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
let registerProcessEventBatchMetrics = (
|
|
358
|
+
~logger,
|
|
359
|
+
~loadDuration,
|
|
360
|
+
~handlerDuration,
|
|
361
|
+
~dbWriteDuration,
|
|
362
|
+
) => {
|
|
363
|
+
logger->Logging.childTrace({
|
|
364
|
+
"msg": "Finished processing batch",
|
|
365
|
+
"loader_time_elapsed": loadDuration,
|
|
366
|
+
"handlers_time_elapsed": handlerDuration,
|
|
367
|
+
"write_time_elapsed": dbWriteDuration,
|
|
368
|
+
})
|
|
369
|
+
|
|
370
|
+
Prometheus.incrementLoadEntityDurationCounter(~duration=loadDuration)
|
|
371
|
+
Prometheus.incrementEventRouterDurationCounter(~duration=handlerDuration)
|
|
372
|
+
Prometheus.incrementExecuteBatchDurationCounter(~duration=dbWriteDuration)
|
|
373
|
+
Prometheus.incrementStorageWriteTimeCounter(~duration=dbWriteDuration)
|
|
374
|
+
Prometheus.incrementStorageWriteCounter()
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
type logPartitionInfo = {
|
|
378
|
+
batchSize: int,
|
|
379
|
+
firstItemTimestamp: option<int>,
|
|
380
|
+
firstItemBlockNumber?: int,
|
|
381
|
+
lastItemBlockNumber?: int,
|
|
382
|
+
}
|
|
383
|
+
|
|
384
|
+
let processEventBatch = async (
|
|
385
|
+
~batch: Batch.t,
|
|
386
|
+
~inMemoryStore: InMemoryStore.t,
|
|
387
|
+
~isInReorgThreshold,
|
|
388
|
+
~loadManager,
|
|
389
|
+
~indexer: Indexer.t,
|
|
390
|
+
~chainFetchers: ChainMap.t<ChainFetcher.t>,
|
|
391
|
+
) => {
|
|
392
|
+
let totalBatchSize = batch.totalBatchSize
|
|
393
|
+
// Compute chains state for this batch
|
|
394
|
+
let chains: Internal.chains = chainFetchers->computeChainsState
|
|
395
|
+
|
|
396
|
+
let logger = Logging.getLogger()
|
|
397
|
+
logger->Logging.childTrace({
|
|
398
|
+
"msg": "Started processing batch",
|
|
399
|
+
"totalBatchSize": totalBatchSize,
|
|
400
|
+
"chains": batch.progressedChainsById->Utils.Dict.mapValues(chainAfterBatch => {
|
|
401
|
+
{
|
|
402
|
+
"batchSize": chainAfterBatch.batchSize,
|
|
403
|
+
"progress": chainAfterBatch.progressBlockNumber,
|
|
404
|
+
}
|
|
405
|
+
}),
|
|
406
|
+
})
|
|
407
|
+
|
|
408
|
+
try {
|
|
409
|
+
let timeRef = Hrtime.makeTimer()
|
|
410
|
+
|
|
411
|
+
if batch.items->Utils.Array.notEmpty {
|
|
412
|
+
await batch->preloadBatchOrThrow(
|
|
413
|
+
~loadManager,
|
|
414
|
+
~persistence=indexer.persistence,
|
|
415
|
+
~inMemoryStore,
|
|
416
|
+
~chains,
|
|
417
|
+
~config=indexer.config,
|
|
418
|
+
)
|
|
419
|
+
}
|
|
420
|
+
|
|
421
|
+
let elapsedTimeAfterLoaders = timeRef->Hrtime.timeSince->Hrtime.toMillis->Hrtime.intFromMillis
|
|
422
|
+
|
|
423
|
+
if batch.items->Utils.Array.notEmpty {
|
|
424
|
+
await batch->runBatchHandlersOrThrow(
|
|
425
|
+
~inMemoryStore,
|
|
426
|
+
~loadManager,
|
|
427
|
+
~indexer,
|
|
428
|
+
~shouldSaveHistory=indexer.config->Config.shouldSaveHistory(~isInReorgThreshold),
|
|
429
|
+
~shouldBenchmark=Env.Benchmark.shouldSaveData,
|
|
430
|
+
~chains,
|
|
431
|
+
)
|
|
432
|
+
}
|
|
433
|
+
|
|
434
|
+
let elapsedTimeAfterProcessing =
|
|
435
|
+
timeRef->Hrtime.timeSince->Hrtime.toMillis->Hrtime.intFromMillis
|
|
436
|
+
|
|
437
|
+
try {
|
|
438
|
+
await indexer.persistence->Persistence.writeBatch(
|
|
439
|
+
~batch,
|
|
440
|
+
~config=indexer.config,
|
|
441
|
+
~inMemoryStore,
|
|
442
|
+
~isInReorgThreshold,
|
|
443
|
+
)
|
|
444
|
+
|
|
445
|
+
let elapsedTimeAfterDbWrite = timeRef->Hrtime.timeSince->Hrtime.toMillis->Hrtime.intFromMillis
|
|
446
|
+
let loaderDuration = elapsedTimeAfterLoaders
|
|
447
|
+
let handlerDuration = elapsedTimeAfterProcessing - loaderDuration
|
|
448
|
+
let dbWriteDuration = elapsedTimeAfterDbWrite - elapsedTimeAfterProcessing
|
|
449
|
+
registerProcessEventBatchMetrics(
|
|
450
|
+
~logger,
|
|
451
|
+
~loadDuration=loaderDuration,
|
|
452
|
+
~handlerDuration,
|
|
453
|
+
~dbWriteDuration,
|
|
454
|
+
)
|
|
455
|
+
if Env.Benchmark.shouldSaveData {
|
|
456
|
+
Benchmark.addEventProcessing(
|
|
457
|
+
~batchSize=totalBatchSize,
|
|
458
|
+
~loadDuration=loaderDuration,
|
|
459
|
+
~handlerDuration,
|
|
460
|
+
~dbWriteDuration,
|
|
461
|
+
~totalTimeElapsed=elapsedTimeAfterDbWrite,
|
|
462
|
+
)
|
|
463
|
+
}
|
|
464
|
+
Ok()
|
|
465
|
+
} catch {
|
|
466
|
+
| Persistence.StorageError({message, reason}) =>
|
|
467
|
+
reason->ErrorHandling.make(~msg=message, ~logger)->Error
|
|
468
|
+
| exn => exn->ErrorHandling.make(~msg="Failed writing batch to database", ~logger)->Error
|
|
469
|
+
}
|
|
470
|
+
} catch {
|
|
471
|
+
| ProcessingError({message, exn, item}) =>
|
|
472
|
+
exn
|
|
473
|
+
->ErrorHandling.make(~msg=message, ~logger=item->Logging.getItemLogger)
|
|
474
|
+
->Error
|
|
475
|
+
}
|
|
476
|
+
}
|