envio 2.31.0-alpha.0 → 2.31.0-alpha.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +5 -5
- package/src/Batch.res +400 -28
- package/src/Batch.res.js +286 -24
- package/src/EventRegister.res +9 -3
- package/src/EventRegister.res.js +6 -3
- package/src/EventRegister.resi +4 -1
- package/src/FetchState.res +116 -155
- package/src/FetchState.res.js +116 -106
- package/src/Internal.res +49 -0
- package/src/InternalConfig.res +1 -1
- package/src/Persistence.res +16 -1
- package/src/Persistence.res.js +1 -1
- package/src/PgStorage.res +49 -61
- package/src/PgStorage.res.js +44 -37
- package/src/Prometheus.res +7 -1
- package/src/Prometheus.res.js +8 -1
- package/src/ReorgDetection.res +222 -235
- package/src/ReorgDetection.res.js +34 -28
- package/src/SafeCheckpointTracking.res +132 -0
- package/src/SafeCheckpointTracking.res.js +95 -0
- package/src/Utils.res +64 -21
- package/src/Utils.res.js +61 -30
- package/src/db/EntityHistory.res +172 -294
- package/src/db/EntityHistory.res.js +98 -218
- package/src/db/InternalTable.gen.ts +13 -13
- package/src/db/InternalTable.res +286 -77
- package/src/db/InternalTable.res.js +160 -79
- package/src/db/Table.res +1 -0
- package/src/db/Table.res.js +1 -1
- package/src/sources/EventRouter.res +1 -1
- package/src/sources/Source.res +1 -1
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "envio",
|
|
3
|
-
"version": "v2.31.0-alpha.
|
|
3
|
+
"version": "v2.31.0-alpha.2",
|
|
4
4
|
"description": "A latency and sync speed optimized, developer friendly blockchain data indexer.",
|
|
5
5
|
"bin": "./bin.js",
|
|
6
6
|
"main": "./index.js",
|
|
@@ -25,10 +25,10 @@
|
|
|
25
25
|
},
|
|
26
26
|
"homepage": "https://envio.dev",
|
|
27
27
|
"optionalDependencies": {
|
|
28
|
-
"envio-linux-x64": "v2.31.0-alpha.
|
|
29
|
-
"envio-linux-arm64": "v2.31.0-alpha.
|
|
30
|
-
"envio-darwin-x64": "v2.31.0-alpha.
|
|
31
|
-
"envio-darwin-arm64": "v2.31.0-alpha.
|
|
28
|
+
"envio-linux-x64": "v2.31.0-alpha.2",
|
|
29
|
+
"envio-linux-arm64": "v2.31.0-alpha.2",
|
|
30
|
+
"envio-darwin-x64": "v2.31.0-alpha.2",
|
|
31
|
+
"envio-darwin-arm64": "v2.31.0-alpha.2"
|
|
32
32
|
},
|
|
33
33
|
"dependencies": {
|
|
34
34
|
"@envio-dev/hypersync-client": "0.6.6",
|
package/src/Batch.res
CHANGED
|
@@ -1,17 +1,34 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
open Belt
|
|
2
|
+
|
|
3
|
+
@@warning("-44")
|
|
4
|
+
open Utils.UnsafeIntOperators
|
|
5
|
+
|
|
6
|
+
type chainAfterBatch = {
|
|
3
7
|
batchSize: int,
|
|
4
8
|
progressBlockNumber: int,
|
|
5
|
-
|
|
9
|
+
totalEventsProcessed: int,
|
|
10
|
+
fetchState: FetchState.t,
|
|
11
|
+
isProgressAtHeadWhenBatchCreated: bool,
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
type chainBeforeBatch = {
|
|
15
|
+
fetchState: FetchState.t,
|
|
16
|
+
reorgDetection: ReorgDetection.t,
|
|
17
|
+
progressBlockNumber: int,
|
|
18
|
+
sourceBlockNumber: int,
|
|
6
19
|
totalEventsProcessed: int,
|
|
7
20
|
}
|
|
8
21
|
|
|
9
22
|
type t = {
|
|
23
|
+
totalBatchSize: int,
|
|
10
24
|
items: array<Internal.item>,
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
25
|
+
progressedChainsById: dict<chainAfterBatch>,
|
|
26
|
+
// Unnest-like checkpoint fields:
|
|
27
|
+
checkpointIds: array<int>,
|
|
28
|
+
checkpointChainIds: array<int>,
|
|
29
|
+
checkpointBlockNumbers: array<int>,
|
|
30
|
+
checkpointBlockHashes: array<Js.Null.t<string>>,
|
|
31
|
+
checkpointEventsProcessed: array<int>,
|
|
15
32
|
}
|
|
16
33
|
|
|
17
34
|
/**
|
|
@@ -22,7 +39,7 @@ let getOrderedNextChain = (fetchStates: ChainMap.t<FetchState.t>, ~batchSizePerC
|
|
|
22
39
|
let earliestChainTimestamp = ref(0)
|
|
23
40
|
let chainKeys = fetchStates->ChainMap.keys
|
|
24
41
|
for idx in 0 to chainKeys->Array.length - 1 {
|
|
25
|
-
let chain = chainKeys->Array.
|
|
42
|
+
let chain = chainKeys->Array.getUnsafe(idx)
|
|
26
43
|
let fetchState = fetchStates->ChainMap.get(chain)
|
|
27
44
|
if fetchState->FetchState.isActivelyIndexing {
|
|
28
45
|
let timestamp = fetchState->FetchState.getTimestampAt(
|
|
@@ -75,37 +92,250 @@ let hasMultichainReadyItem = (
|
|
|
75
92
|
}
|
|
76
93
|
}
|
|
77
94
|
|
|
95
|
+
let getProgressedChainsById = {
|
|
96
|
+
let getChainAfterBatchIfProgressed = (
|
|
97
|
+
~chainBeforeBatch: chainBeforeBatch,
|
|
98
|
+
~progressBlockNumberAfterBatch,
|
|
99
|
+
~fetchStateAfterBatch,
|
|
100
|
+
~batchSize,
|
|
101
|
+
) => {
|
|
102
|
+
// The check is sufficient, since we guarantee to include a full block in a batch
|
|
103
|
+
// Also, this might be true even if batchSize is 0,
|
|
104
|
+
// eg when indexing at the head and chain doesn't have items in a block
|
|
105
|
+
if chainBeforeBatch.progressBlockNumber < progressBlockNumberAfterBatch {
|
|
106
|
+
Some(
|
|
107
|
+
(
|
|
108
|
+
{
|
|
109
|
+
batchSize,
|
|
110
|
+
progressBlockNumber: progressBlockNumberAfterBatch,
|
|
111
|
+
totalEventsProcessed: chainBeforeBatch.totalEventsProcessed + batchSize,
|
|
112
|
+
fetchState: fetchStateAfterBatch,
|
|
113
|
+
isProgressAtHeadWhenBatchCreated: progressBlockNumberAfterBatch >=
|
|
114
|
+
chainBeforeBatch.sourceBlockNumber,
|
|
115
|
+
}: chainAfterBatch
|
|
116
|
+
),
|
|
117
|
+
)
|
|
118
|
+
} else {
|
|
119
|
+
None
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
(
|
|
124
|
+
~chainsBeforeBatch: ChainMap.t<chainBeforeBatch>,
|
|
125
|
+
~batchSizePerChain: dict<int>,
|
|
126
|
+
~progressBlockNumberPerChain: dict<int>,
|
|
127
|
+
) => {
|
|
128
|
+
let progressedChainsById = Js.Dict.empty()
|
|
129
|
+
|
|
130
|
+
// Needed to:
|
|
131
|
+
// - Recalculate the computed queue sizes
|
|
132
|
+
// - Accumulate registered dynamic contracts to store in the db
|
|
133
|
+
// - Trigger onBlock pointer update
|
|
134
|
+
chainsBeforeBatch
|
|
135
|
+
->ChainMap.values
|
|
136
|
+
->Array.forEachU(chainBeforeBatch => {
|
|
137
|
+
let fetchState = chainBeforeBatch.fetchState
|
|
138
|
+
|
|
139
|
+
let progressBlockNumberAfterBatch = switch progressBlockNumberPerChain->Utils.Dict.dangerouslyGetNonOption(
|
|
140
|
+
fetchState.chainId->Int.toString,
|
|
141
|
+
) {
|
|
142
|
+
| Some(progressBlockNumber) => progressBlockNumber
|
|
143
|
+
| None => chainBeforeBatch.progressBlockNumber
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
switch switch batchSizePerChain->Utils.Dict.dangerouslyGetNonOption(
|
|
147
|
+
fetchState.chainId->Int.toString,
|
|
148
|
+
) {
|
|
149
|
+
| Some(batchSize) =>
|
|
150
|
+
let leftItems = fetchState.buffer->Js.Array2.sliceFrom(batchSize)
|
|
151
|
+
getChainAfterBatchIfProgressed(
|
|
152
|
+
~chainBeforeBatch,
|
|
153
|
+
~batchSize,
|
|
154
|
+
~fetchStateAfterBatch=fetchState->FetchState.updateInternal(~mutItems=leftItems),
|
|
155
|
+
~progressBlockNumberAfterBatch,
|
|
156
|
+
)
|
|
157
|
+
// Skip not affected chains
|
|
158
|
+
| None =>
|
|
159
|
+
getChainAfterBatchIfProgressed(
|
|
160
|
+
~chainBeforeBatch,
|
|
161
|
+
~batchSize=0,
|
|
162
|
+
~fetchStateAfterBatch=chainBeforeBatch.fetchState,
|
|
163
|
+
~progressBlockNumberAfterBatch,
|
|
164
|
+
)
|
|
165
|
+
} {
|
|
166
|
+
| Some(progressedChain) =>
|
|
167
|
+
progressedChainsById->Utils.Dict.setByInt(
|
|
168
|
+
chainBeforeBatch.fetchState.chainId,
|
|
169
|
+
progressedChain,
|
|
170
|
+
)
|
|
171
|
+
| None => ()
|
|
172
|
+
}
|
|
173
|
+
})
|
|
174
|
+
|
|
175
|
+
progressedChainsById
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
@inline
|
|
180
|
+
let addReorgCheckpoints = (
|
|
181
|
+
~prevCheckpointId,
|
|
182
|
+
~reorgDetection: ReorgDetection.t,
|
|
183
|
+
~fromBlockExclusive,
|
|
184
|
+
~toBlockExclusive,
|
|
185
|
+
~chainId,
|
|
186
|
+
~mutCheckpointIds,
|
|
187
|
+
~mutCheckpointChainIds,
|
|
188
|
+
~mutCheckpointBlockNumbers,
|
|
189
|
+
~mutCheckpointBlockHashes,
|
|
190
|
+
~mutCheckpointEventsProcessed,
|
|
191
|
+
) => {
|
|
192
|
+
if (
|
|
193
|
+
reorgDetection.shouldRollbackOnReorg && !(reorgDetection.dataByBlockNumber->Utils.Dict.isEmpty)
|
|
194
|
+
) {
|
|
195
|
+
let prevCheckpointId = ref(prevCheckpointId)
|
|
196
|
+
for blockNumber in fromBlockExclusive + 1 to toBlockExclusive - 1 {
|
|
197
|
+
switch reorgDetection->ReorgDetection.getHashByBlockNumber(~blockNumber) {
|
|
198
|
+
| Js.Null.Value(hash) =>
|
|
199
|
+
let checkpointId = prevCheckpointId.contents + 1
|
|
200
|
+
prevCheckpointId := checkpointId
|
|
201
|
+
|
|
202
|
+
mutCheckpointIds->Js.Array2.push(checkpointId)->ignore
|
|
203
|
+
mutCheckpointChainIds->Js.Array2.push(chainId)->ignore
|
|
204
|
+
mutCheckpointBlockNumbers->Js.Array2.push(blockNumber)->ignore
|
|
205
|
+
mutCheckpointBlockHashes->Js.Array2.push(Js.Null.Value(hash))->ignore
|
|
206
|
+
mutCheckpointEventsProcessed->Js.Array2.push(0)->ignore
|
|
207
|
+
| Js.Null.Null => ()
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
prevCheckpointId.contents
|
|
211
|
+
} else {
|
|
212
|
+
prevCheckpointId
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
|
|
78
216
|
let prepareOrderedBatch = (
|
|
217
|
+
~checkpointIdBeforeBatch,
|
|
218
|
+
~chainsBeforeBatch: ChainMap.t<chainBeforeBatch>,
|
|
79
219
|
~batchSizeTarget,
|
|
80
|
-
~fetchStates: ChainMap.t<FetchState.t>,
|
|
81
|
-
~mutBatchSizePerChain: dict<int>,
|
|
82
220
|
) => {
|
|
83
|
-
let
|
|
221
|
+
let totalBatchSize = ref(0)
|
|
84
222
|
let isFinished = ref(false)
|
|
223
|
+
let prevCheckpointId = ref(checkpointIdBeforeBatch)
|
|
224
|
+
let mutBatchSizePerChain = Js.Dict.empty()
|
|
225
|
+
let mutProgressBlockNumberPerChain = Js.Dict.empty()
|
|
226
|
+
|
|
227
|
+
let fetchStates = chainsBeforeBatch->ChainMap.map(chainBeforeBatch => chainBeforeBatch.fetchState)
|
|
228
|
+
|
|
85
229
|
let items = []
|
|
230
|
+
let checkpointIds = []
|
|
231
|
+
let checkpointChainIds = []
|
|
232
|
+
let checkpointBlockNumbers = []
|
|
233
|
+
let checkpointBlockHashes = []
|
|
234
|
+
let checkpointEventsProcessed = []
|
|
86
235
|
|
|
87
|
-
while
|
|
236
|
+
while totalBatchSize.contents < batchSizeTarget && !isFinished.contents {
|
|
88
237
|
switch fetchStates->getOrderedNextChain(~batchSizePerChain=mutBatchSizePerChain) {
|
|
89
238
|
| Some(fetchState) => {
|
|
239
|
+
let chainBeforeBatch =
|
|
240
|
+
chainsBeforeBatch->ChainMap.get(ChainMap.Chain.makeUnsafe(~chainId=fetchState.chainId))
|
|
90
241
|
let itemsCountBefore = switch mutBatchSizePerChain->Utils.Dict.dangerouslyGetByIntNonOption(
|
|
91
242
|
fetchState.chainId,
|
|
92
243
|
) {
|
|
93
244
|
| Some(batchSize) => batchSize
|
|
94
245
|
| None => 0
|
|
95
246
|
}
|
|
96
|
-
|
|
97
|
-
|
|
247
|
+
|
|
248
|
+
let prevBlockNumber = switch mutProgressBlockNumberPerChain->Utils.Dict.dangerouslyGetByIntNonOption(
|
|
249
|
+
fetchState.chainId,
|
|
250
|
+
) {
|
|
251
|
+
| Some(progressBlockNumber) => progressBlockNumber
|
|
252
|
+
| None => chainBeforeBatch.progressBlockNumber
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
let newItemsCount = fetchState->FetchState.getReadyItemsCount(
|
|
256
|
+
// We should get items only for a single block
|
|
257
|
+
// Since for the ordered mode next block could be after another chain's block
|
|
258
|
+
~targetSize=1,
|
|
259
|
+
~fromItem=itemsCountBefore,
|
|
260
|
+
)
|
|
98
261
|
|
|
99
262
|
if newItemsCount > 0 {
|
|
100
|
-
|
|
101
|
-
|
|
263
|
+
let item0 = fetchState.buffer->Array.getUnsafe(itemsCountBefore)
|
|
264
|
+
let blockNumber = item0->Internal.getItemBlockNumber
|
|
265
|
+
|
|
266
|
+
prevCheckpointId :=
|
|
267
|
+
addReorgCheckpoints(
|
|
268
|
+
~chainId=fetchState.chainId,
|
|
269
|
+
~reorgDetection=chainBeforeBatch.reorgDetection,
|
|
270
|
+
~prevCheckpointId=prevCheckpointId.contents,
|
|
271
|
+
~fromBlockExclusive=prevBlockNumber,
|
|
272
|
+
~toBlockExclusive=blockNumber,
|
|
273
|
+
~mutCheckpointIds=checkpointIds,
|
|
274
|
+
~mutCheckpointChainIds=checkpointChainIds,
|
|
275
|
+
~mutCheckpointBlockNumbers=checkpointBlockNumbers,
|
|
276
|
+
~mutCheckpointBlockHashes=checkpointBlockHashes,
|
|
277
|
+
~mutCheckpointEventsProcessed=checkpointEventsProcessed,
|
|
278
|
+
)
|
|
279
|
+
|
|
280
|
+
let checkpointId = prevCheckpointId.contents + 1
|
|
281
|
+
|
|
282
|
+
items
|
|
283
|
+
->Js.Array2.push(item0)
|
|
284
|
+
->ignore
|
|
285
|
+
for idx in 1 to newItemsCount - 1 {
|
|
286
|
+
items
|
|
287
|
+
->Js.Array2.push(fetchState.buffer->Belt.Array.getUnsafe(itemsCountBefore + idx))
|
|
288
|
+
->ignore
|
|
102
289
|
}
|
|
103
|
-
|
|
290
|
+
|
|
291
|
+
checkpointIds
|
|
292
|
+
->Js.Array2.push(checkpointId)
|
|
293
|
+
->ignore
|
|
294
|
+
checkpointChainIds
|
|
295
|
+
->Js.Array2.push(fetchState.chainId)
|
|
296
|
+
->ignore
|
|
297
|
+
checkpointBlockNumbers
|
|
298
|
+
->Js.Array2.push(blockNumber)
|
|
299
|
+
->ignore
|
|
300
|
+
checkpointBlockHashes
|
|
301
|
+
->Js.Array2.push(
|
|
302
|
+
chainBeforeBatch.reorgDetection->ReorgDetection.getHashByBlockNumber(~blockNumber),
|
|
303
|
+
)
|
|
304
|
+
->ignore
|
|
305
|
+
checkpointEventsProcessed
|
|
306
|
+
->Js.Array2.push(newItemsCount)
|
|
307
|
+
->ignore
|
|
308
|
+
|
|
309
|
+
prevCheckpointId := checkpointId
|
|
310
|
+
totalBatchSize := totalBatchSize.contents + newItemsCount
|
|
104
311
|
mutBatchSizePerChain->Utils.Dict.setByInt(
|
|
105
312
|
fetchState.chainId,
|
|
106
313
|
itemsCountBefore + newItemsCount,
|
|
107
314
|
)
|
|
315
|
+
mutProgressBlockNumberPerChain->Utils.Dict.setByInt(fetchState.chainId, blockNumber)
|
|
108
316
|
} else {
|
|
317
|
+
let blockNumberAfterBatch = fetchState->FetchState.bufferBlockNumber
|
|
318
|
+
|
|
319
|
+
prevCheckpointId :=
|
|
320
|
+
addReorgCheckpoints(
|
|
321
|
+
~chainId=fetchState.chainId,
|
|
322
|
+
~reorgDetection=chainBeforeBatch.reorgDetection,
|
|
323
|
+
~prevCheckpointId=prevCheckpointId.contents,
|
|
324
|
+
~fromBlockExclusive=prevBlockNumber,
|
|
325
|
+
~toBlockExclusive=blockNumberAfterBatch + 1, // Make it inclusive
|
|
326
|
+
~mutCheckpointIds=checkpointIds,
|
|
327
|
+
~mutCheckpointChainIds=checkpointChainIds,
|
|
328
|
+
~mutCheckpointBlockNumbers=checkpointBlockNumbers,
|
|
329
|
+
~mutCheckpointBlockHashes=checkpointBlockHashes,
|
|
330
|
+
~mutCheckpointEventsProcessed=checkpointEventsProcessed,
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
// Since the chain was chosen as next
|
|
334
|
+
// the fact that it doesn't have new items means that it reached the buffer block number
|
|
335
|
+
mutProgressBlockNumberPerChain->Utils.Dict.setByInt(
|
|
336
|
+
fetchState.chainId,
|
|
337
|
+
blockNumberAfterBatch,
|
|
338
|
+
)
|
|
109
339
|
isFinished := true
|
|
110
340
|
}
|
|
111
341
|
}
|
|
@@ -114,45 +344,187 @@ let prepareOrderedBatch = (
|
|
|
114
344
|
}
|
|
115
345
|
}
|
|
116
346
|
|
|
117
|
-
|
|
347
|
+
{
|
|
348
|
+
totalBatchSize: totalBatchSize.contents,
|
|
349
|
+
items,
|
|
350
|
+
progressedChainsById: getProgressedChainsById(
|
|
351
|
+
~chainsBeforeBatch,
|
|
352
|
+
~batchSizePerChain=mutBatchSizePerChain,
|
|
353
|
+
~progressBlockNumberPerChain=mutProgressBlockNumberPerChain,
|
|
354
|
+
),
|
|
355
|
+
checkpointIds,
|
|
356
|
+
checkpointChainIds,
|
|
357
|
+
checkpointBlockNumbers,
|
|
358
|
+
checkpointBlockHashes,
|
|
359
|
+
checkpointEventsProcessed,
|
|
360
|
+
}
|
|
118
361
|
}
|
|
119
362
|
|
|
120
363
|
let prepareUnorderedBatch = (
|
|
364
|
+
~checkpointIdBeforeBatch,
|
|
365
|
+
~chainsBeforeBatch: ChainMap.t<chainBeforeBatch>,
|
|
121
366
|
~batchSizeTarget,
|
|
122
|
-
~fetchStates: ChainMap.t<FetchState.t>,
|
|
123
|
-
~mutBatchSizePerChain: dict<int>,
|
|
124
367
|
) => {
|
|
125
368
|
let preparedFetchStates =
|
|
126
|
-
|
|
369
|
+
chainsBeforeBatch
|
|
127
370
|
->ChainMap.values
|
|
128
|
-
->
|
|
371
|
+
->Js.Array2.map(chainBeforeBatch => chainBeforeBatch.fetchState)
|
|
372
|
+
->FetchState.sortForUnorderedBatch(~batchSizeTarget)
|
|
129
373
|
|
|
130
374
|
let chainIdx = ref(0)
|
|
131
375
|
let preparedNumber = preparedFetchStates->Array.length
|
|
132
|
-
let
|
|
376
|
+
let totalBatchSize = ref(0)
|
|
377
|
+
|
|
378
|
+
let prevCheckpointId = ref(checkpointIdBeforeBatch)
|
|
379
|
+
let mutBatchSizePerChain = Js.Dict.empty()
|
|
380
|
+
let mutProgressBlockNumberPerChain = Js.Dict.empty()
|
|
133
381
|
|
|
134
382
|
let items = []
|
|
383
|
+
let checkpointIds = []
|
|
384
|
+
let checkpointChainIds = []
|
|
385
|
+
let checkpointBlockNumbers = []
|
|
386
|
+
let checkpointBlockHashes = []
|
|
387
|
+
let checkpointEventsProcessed = []
|
|
135
388
|
|
|
136
389
|
// Accumulate items for all actively indexing chains
|
|
137
390
|
// the way to group as many items from a single chain as possible
|
|
138
391
|
// This way the loaders optimisations will hit more often
|
|
139
|
-
while
|
|
392
|
+
while totalBatchSize.contents < batchSizeTarget && chainIdx.contents < preparedNumber {
|
|
140
393
|
let fetchState = preparedFetchStates->Js.Array2.unsafe_get(chainIdx.contents)
|
|
141
394
|
let chainBatchSize =
|
|
142
395
|
fetchState->FetchState.getReadyItemsCount(
|
|
143
|
-
~targetSize=batchSizeTarget -
|
|
396
|
+
~targetSize=batchSizeTarget - totalBatchSize.contents,
|
|
144
397
|
~fromItem=0,
|
|
145
398
|
)
|
|
399
|
+
let chainBeforeBatch =
|
|
400
|
+
chainsBeforeBatch->ChainMap.get(ChainMap.Chain.makeUnsafe(~chainId=fetchState.chainId))
|
|
401
|
+
|
|
402
|
+
let prevBlockNumber = ref(chainBeforeBatch.progressBlockNumber)
|
|
146
403
|
if chainBatchSize > 0 {
|
|
147
404
|
for idx in 0 to chainBatchSize - 1 {
|
|
148
|
-
|
|
405
|
+
let item = fetchState.buffer->Belt.Array.getUnsafe(idx)
|
|
406
|
+
let blockNumber = item->Internal.getItemBlockNumber
|
|
407
|
+
|
|
408
|
+
// Every new block we should create a new checkpoint
|
|
409
|
+
if blockNumber !== prevBlockNumber.contents {
|
|
410
|
+
prevCheckpointId :=
|
|
411
|
+
addReorgCheckpoints(
|
|
412
|
+
~chainId=fetchState.chainId,
|
|
413
|
+
~reorgDetection=chainBeforeBatch.reorgDetection,
|
|
414
|
+
~prevCheckpointId=prevCheckpointId.contents,
|
|
415
|
+
~fromBlockExclusive=prevBlockNumber.contents,
|
|
416
|
+
~toBlockExclusive=blockNumber,
|
|
417
|
+
~mutCheckpointIds=checkpointIds,
|
|
418
|
+
~mutCheckpointChainIds=checkpointChainIds,
|
|
419
|
+
~mutCheckpointBlockNumbers=checkpointBlockNumbers,
|
|
420
|
+
~mutCheckpointBlockHashes=checkpointBlockHashes,
|
|
421
|
+
~mutCheckpointEventsProcessed=checkpointEventsProcessed,
|
|
422
|
+
)
|
|
423
|
+
|
|
424
|
+
let checkpointId = prevCheckpointId.contents + 1
|
|
425
|
+
|
|
426
|
+
checkpointIds->Js.Array2.push(checkpointId)->ignore
|
|
427
|
+
checkpointChainIds->Js.Array2.push(fetchState.chainId)->ignore
|
|
428
|
+
checkpointBlockNumbers->Js.Array2.push(blockNumber)->ignore
|
|
429
|
+
checkpointBlockHashes
|
|
430
|
+
->Js.Array2.push(
|
|
431
|
+
chainBeforeBatch.reorgDetection->ReorgDetection.getHashByBlockNumber(~blockNumber),
|
|
432
|
+
)
|
|
433
|
+
->ignore
|
|
434
|
+
checkpointEventsProcessed->Js.Array2.push(1)->ignore
|
|
435
|
+
|
|
436
|
+
prevBlockNumber := blockNumber
|
|
437
|
+
prevCheckpointId := checkpointId
|
|
438
|
+
} else {
|
|
439
|
+
let lastIndex = checkpointEventsProcessed->Array.length - 1
|
|
440
|
+
checkpointEventsProcessed
|
|
441
|
+
->Belt.Array.setUnsafe(
|
|
442
|
+
lastIndex,
|
|
443
|
+
checkpointEventsProcessed->Array.getUnsafe(lastIndex) + 1,
|
|
444
|
+
)
|
|
445
|
+
->ignore
|
|
446
|
+
}
|
|
447
|
+
|
|
448
|
+
items->Js.Array2.push(item)->ignore
|
|
149
449
|
}
|
|
150
|
-
|
|
450
|
+
|
|
451
|
+
totalBatchSize := totalBatchSize.contents + chainBatchSize
|
|
151
452
|
mutBatchSizePerChain->Utils.Dict.setByInt(fetchState.chainId, chainBatchSize)
|
|
152
453
|
}
|
|
153
454
|
|
|
455
|
+
let progressBlockNumberAfterBatch =
|
|
456
|
+
fetchState->FetchState.getUnorderedMultichainProgressBlockNumberAt(~index=chainBatchSize)
|
|
457
|
+
|
|
458
|
+
prevCheckpointId :=
|
|
459
|
+
addReorgCheckpoints(
|
|
460
|
+
~chainId=fetchState.chainId,
|
|
461
|
+
~reorgDetection=chainBeforeBatch.reorgDetection,
|
|
462
|
+
~prevCheckpointId=prevCheckpointId.contents,
|
|
463
|
+
~fromBlockExclusive=prevBlockNumber.contents,
|
|
464
|
+
~toBlockExclusive=progressBlockNumberAfterBatch + 1, // Make it inclusive
|
|
465
|
+
~mutCheckpointIds=checkpointIds,
|
|
466
|
+
~mutCheckpointChainIds=checkpointChainIds,
|
|
467
|
+
~mutCheckpointBlockNumbers=checkpointBlockNumbers,
|
|
468
|
+
~mutCheckpointBlockHashes=checkpointBlockHashes,
|
|
469
|
+
~mutCheckpointEventsProcessed=checkpointEventsProcessed,
|
|
470
|
+
)
|
|
471
|
+
|
|
472
|
+
mutProgressBlockNumberPerChain->Utils.Dict.setByInt(
|
|
473
|
+
fetchState.chainId,
|
|
474
|
+
progressBlockNumberAfterBatch,
|
|
475
|
+
)
|
|
476
|
+
|
|
154
477
|
chainIdx := chainIdx.contents + 1
|
|
155
478
|
}
|
|
156
479
|
|
|
157
|
-
|
|
480
|
+
{
|
|
481
|
+
totalBatchSize: totalBatchSize.contents,
|
|
482
|
+
items,
|
|
483
|
+
progressedChainsById: getProgressedChainsById(
|
|
484
|
+
~chainsBeforeBatch,
|
|
485
|
+
~batchSizePerChain=mutBatchSizePerChain,
|
|
486
|
+
~progressBlockNumberPerChain=mutProgressBlockNumberPerChain,
|
|
487
|
+
),
|
|
488
|
+
checkpointIds,
|
|
489
|
+
checkpointChainIds,
|
|
490
|
+
checkpointBlockNumbers,
|
|
491
|
+
checkpointBlockHashes,
|
|
492
|
+
checkpointEventsProcessed,
|
|
493
|
+
}
|
|
494
|
+
}
|
|
495
|
+
|
|
496
|
+
let make = (
|
|
497
|
+
~checkpointIdBeforeBatch,
|
|
498
|
+
~chainsBeforeBatch: ChainMap.t<chainBeforeBatch>,
|
|
499
|
+
~multichain: InternalConfig.multichain,
|
|
500
|
+
~batchSizeTarget,
|
|
501
|
+
) => {
|
|
502
|
+
if (
|
|
503
|
+
switch multichain {
|
|
504
|
+
| Unordered => true
|
|
505
|
+
| Ordered => chainsBeforeBatch->ChainMap.size === 1
|
|
506
|
+
}
|
|
507
|
+
) {
|
|
508
|
+
prepareUnorderedBatch(~checkpointIdBeforeBatch, ~chainsBeforeBatch, ~batchSizeTarget)
|
|
509
|
+
} else {
|
|
510
|
+
prepareOrderedBatch(~checkpointIdBeforeBatch, ~chainsBeforeBatch, ~batchSizeTarget)
|
|
511
|
+
}
|
|
512
|
+
}
|
|
513
|
+
|
|
514
|
+
let findFirstEventBlockNumber = (batch: t, ~chainId) => {
|
|
515
|
+
let idx = ref(0)
|
|
516
|
+
let result = ref(None)
|
|
517
|
+
let checkpointsLength = batch.checkpointIds->Array.length
|
|
518
|
+
while idx.contents < checkpointsLength && result.contents === None {
|
|
519
|
+
let checkpointChainId = batch.checkpointChainIds->Array.getUnsafe(idx.contents)
|
|
520
|
+
if (
|
|
521
|
+
checkpointChainId === chainId &&
|
|
522
|
+
batch.checkpointEventsProcessed->Array.getUnsafe(idx.contents) > 0
|
|
523
|
+
) {
|
|
524
|
+
result := Some(batch.checkpointBlockNumbers->Array.getUnsafe(idx.contents))
|
|
525
|
+
} else {
|
|
526
|
+
idx := idx.contents + 1
|
|
527
|
+
}
|
|
528
|
+
}
|
|
529
|
+
result.contents
|
|
158
530
|
}
|