envio 2.10.0 → 2.11.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/evm.schema.json +103 -92
- package/package.json +5 -5
- package/src/Internal.gen.ts +47 -0
- package/src/Internal.res +124 -0
- package/src/ReorgDetection.res +432 -0
- package/src/sources/HyperSyncJsonApi.res +376 -0
- /package/src/{bindings → sources}/HyperSyncClient.res +0 -0
package/evm.schema.json
CHANGED
|
@@ -83,7 +83,7 @@
|
|
|
83
83
|
]
|
|
84
84
|
},
|
|
85
85
|
"field_selection": {
|
|
86
|
-
"description": "
|
|
86
|
+
"description": "Select the block and transaction fields to include in all events globally",
|
|
87
87
|
"anyOf": [
|
|
88
88
|
{
|
|
89
89
|
"$ref": "#/$defs/FieldSelection"
|
|
@@ -159,6 +159,17 @@
|
|
|
159
159
|
"string",
|
|
160
160
|
"null"
|
|
161
161
|
]
|
|
162
|
+
},
|
|
163
|
+
"field_selection": {
|
|
164
|
+
"description": "Select the block and transaction fields to include in the specific event",
|
|
165
|
+
"anyOf": [
|
|
166
|
+
{
|
|
167
|
+
"$ref": "#/$defs/FieldSelection"
|
|
168
|
+
},
|
|
169
|
+
{
|
|
170
|
+
"type": "null"
|
|
171
|
+
}
|
|
172
|
+
]
|
|
162
173
|
}
|
|
163
174
|
},
|
|
164
175
|
"additionalProperties": false,
|
|
@@ -166,6 +177,97 @@
|
|
|
166
177
|
"event"
|
|
167
178
|
]
|
|
168
179
|
},
|
|
180
|
+
"FieldSelection": {
|
|
181
|
+
"type": "object",
|
|
182
|
+
"properties": {
|
|
183
|
+
"transaction_fields": {
|
|
184
|
+
"description": "The transaction fields to include in the event, or in all events if applied globally",
|
|
185
|
+
"type": [
|
|
186
|
+
"array",
|
|
187
|
+
"null"
|
|
188
|
+
],
|
|
189
|
+
"items": {
|
|
190
|
+
"$ref": "#/$defs/TransactionField"
|
|
191
|
+
}
|
|
192
|
+
},
|
|
193
|
+
"block_fields": {
|
|
194
|
+
"description": "The block fields to include in the event, or in all events if applied globally",
|
|
195
|
+
"type": [
|
|
196
|
+
"array",
|
|
197
|
+
"null"
|
|
198
|
+
],
|
|
199
|
+
"items": {
|
|
200
|
+
"$ref": "#/$defs/BlockField"
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
},
|
|
204
|
+
"additionalProperties": false
|
|
205
|
+
},
|
|
206
|
+
"TransactionField": {
|
|
207
|
+
"type": "string",
|
|
208
|
+
"enum": [
|
|
209
|
+
"transactionIndex",
|
|
210
|
+
"hash",
|
|
211
|
+
"from",
|
|
212
|
+
"to",
|
|
213
|
+
"gas",
|
|
214
|
+
"gasPrice",
|
|
215
|
+
"maxPriorityFeePerGas",
|
|
216
|
+
"maxFeePerGas",
|
|
217
|
+
"cumulativeGasUsed",
|
|
218
|
+
"effectiveGasPrice",
|
|
219
|
+
"gasUsed",
|
|
220
|
+
"input",
|
|
221
|
+
"nonce",
|
|
222
|
+
"value",
|
|
223
|
+
"v",
|
|
224
|
+
"r",
|
|
225
|
+
"s",
|
|
226
|
+
"contractAddress",
|
|
227
|
+
"logsBloom",
|
|
228
|
+
"root",
|
|
229
|
+
"status",
|
|
230
|
+
"yParity",
|
|
231
|
+
"chainId",
|
|
232
|
+
"maxFeePerBlobGas",
|
|
233
|
+
"blobVersionedHashes",
|
|
234
|
+
"kind",
|
|
235
|
+
"l1Fee",
|
|
236
|
+
"l1GasPrice",
|
|
237
|
+
"l1GasUsed",
|
|
238
|
+
"l1FeeScalar",
|
|
239
|
+
"gasUsedForL1"
|
|
240
|
+
]
|
|
241
|
+
},
|
|
242
|
+
"BlockField": {
|
|
243
|
+
"type": "string",
|
|
244
|
+
"enum": [
|
|
245
|
+
"parentHash",
|
|
246
|
+
"nonce",
|
|
247
|
+
"sha3Uncles",
|
|
248
|
+
"logsBloom",
|
|
249
|
+
"transactionsRoot",
|
|
250
|
+
"stateRoot",
|
|
251
|
+
"receiptsRoot",
|
|
252
|
+
"miner",
|
|
253
|
+
"difficulty",
|
|
254
|
+
"totalDifficulty",
|
|
255
|
+
"extraData",
|
|
256
|
+
"size",
|
|
257
|
+
"gasLimit",
|
|
258
|
+
"gasUsed",
|
|
259
|
+
"uncles",
|
|
260
|
+
"baseFeePerGas",
|
|
261
|
+
"blobGasUsed",
|
|
262
|
+
"excessBlobGas",
|
|
263
|
+
"parentBeaconBlockRoot",
|
|
264
|
+
"withdrawalsRoot",
|
|
265
|
+
"l1BlockNumber",
|
|
266
|
+
"sendCount",
|
|
267
|
+
"sendRoot",
|
|
268
|
+
"mixHash"
|
|
269
|
+
]
|
|
270
|
+
},
|
|
169
271
|
"Network": {
|
|
170
272
|
"type": "object",
|
|
171
273
|
"properties": {
|
|
@@ -405,97 +507,6 @@
|
|
|
405
507
|
"viem",
|
|
406
508
|
"hypersync-client"
|
|
407
509
|
]
|
|
408
|
-
},
|
|
409
|
-
"FieldSelection": {
|
|
410
|
-
"type": "object",
|
|
411
|
-
"properties": {
|
|
412
|
-
"transaction_fields": {
|
|
413
|
-
"description": "Fields of a transaction to add to the event passed to handlers",
|
|
414
|
-
"type": [
|
|
415
|
-
"array",
|
|
416
|
-
"null"
|
|
417
|
-
],
|
|
418
|
-
"items": {
|
|
419
|
-
"$ref": "#/$defs/TransactionField"
|
|
420
|
-
}
|
|
421
|
-
},
|
|
422
|
-
"block_fields": {
|
|
423
|
-
"description": "Fields of a block to add to the event passed to handlers",
|
|
424
|
-
"type": [
|
|
425
|
-
"array",
|
|
426
|
-
"null"
|
|
427
|
-
],
|
|
428
|
-
"items": {
|
|
429
|
-
"$ref": "#/$defs/BlockField"
|
|
430
|
-
}
|
|
431
|
-
}
|
|
432
|
-
},
|
|
433
|
-
"additionalProperties": false
|
|
434
|
-
},
|
|
435
|
-
"TransactionField": {
|
|
436
|
-
"type": "string",
|
|
437
|
-
"enum": [
|
|
438
|
-
"transactionIndex",
|
|
439
|
-
"hash",
|
|
440
|
-
"from",
|
|
441
|
-
"to",
|
|
442
|
-
"gas",
|
|
443
|
-
"gasPrice",
|
|
444
|
-
"maxPriorityFeePerGas",
|
|
445
|
-
"maxFeePerGas",
|
|
446
|
-
"cumulativeGasUsed",
|
|
447
|
-
"effectiveGasPrice",
|
|
448
|
-
"gasUsed",
|
|
449
|
-
"input",
|
|
450
|
-
"nonce",
|
|
451
|
-
"value",
|
|
452
|
-
"v",
|
|
453
|
-
"r",
|
|
454
|
-
"s",
|
|
455
|
-
"contractAddress",
|
|
456
|
-
"logsBloom",
|
|
457
|
-
"root",
|
|
458
|
-
"status",
|
|
459
|
-
"yParity",
|
|
460
|
-
"chainId",
|
|
461
|
-
"maxFeePerBlobGas",
|
|
462
|
-
"blobVersionedHashes",
|
|
463
|
-
"kind",
|
|
464
|
-
"l1Fee",
|
|
465
|
-
"l1GasPrice",
|
|
466
|
-
"l1GasUsed",
|
|
467
|
-
"l1FeeScalar",
|
|
468
|
-
"gasUsedForL1"
|
|
469
|
-
]
|
|
470
|
-
},
|
|
471
|
-
"BlockField": {
|
|
472
|
-
"type": "string",
|
|
473
|
-
"enum": [
|
|
474
|
-
"parentHash",
|
|
475
|
-
"nonce",
|
|
476
|
-
"sha3Uncles",
|
|
477
|
-
"logsBloom",
|
|
478
|
-
"transactionsRoot",
|
|
479
|
-
"stateRoot",
|
|
480
|
-
"receiptsRoot",
|
|
481
|
-
"miner",
|
|
482
|
-
"difficulty",
|
|
483
|
-
"totalDifficulty",
|
|
484
|
-
"extraData",
|
|
485
|
-
"size",
|
|
486
|
-
"gasLimit",
|
|
487
|
-
"gasUsed",
|
|
488
|
-
"uncles",
|
|
489
|
-
"baseFeePerGas",
|
|
490
|
-
"blobGasUsed",
|
|
491
|
-
"excessBlobGas",
|
|
492
|
-
"parentBeaconBlockRoot",
|
|
493
|
-
"withdrawalsRoot",
|
|
494
|
-
"l1BlockNumber",
|
|
495
|
-
"sendCount",
|
|
496
|
-
"sendRoot",
|
|
497
|
-
"mixHash"
|
|
498
|
-
]
|
|
499
510
|
}
|
|
500
511
|
}
|
|
501
512
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "envio",
|
|
3
|
-
"version": "v2.
|
|
3
|
+
"version": "v2.11.1",
|
|
4
4
|
"description": "A latency and sync speed optimized, developer friendly blockchain data indexer.",
|
|
5
5
|
"bin": "./bin.js",
|
|
6
6
|
"repository": {
|
|
@@ -23,10 +23,10 @@
|
|
|
23
23
|
},
|
|
24
24
|
"homepage": "https://envio.dev",
|
|
25
25
|
"optionalDependencies": {
|
|
26
|
-
"envio-linux-x64": "v2.
|
|
27
|
-
"envio-linux-arm64": "v2.
|
|
28
|
-
"envio-darwin-x64": "v2.
|
|
29
|
-
"envio-darwin-arm64": "v2.
|
|
26
|
+
"envio-linux-x64": "v2.11.1",
|
|
27
|
+
"envio-linux-arm64": "v2.11.1",
|
|
28
|
+
"envio-darwin-x64": "v2.11.1",
|
|
29
|
+
"envio-darwin-arm64": "v2.11.1"
|
|
30
30
|
},
|
|
31
31
|
"dependencies": {
|
|
32
32
|
"@envio-dev/hypersync-client": "0.6.2",
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
/* TypeScript file generated from Internal.res by genType. */
|
|
2
|
+
|
|
3
|
+
/* eslint-disable */
|
|
4
|
+
/* tslint:disable */
|
|
5
|
+
|
|
6
|
+
import type {t as Address_t} from './Address.gen';
|
|
7
|
+
|
|
8
|
+
export type genericEvent<params,block,transaction> = {
|
|
9
|
+
readonly params: params;
|
|
10
|
+
readonly chainId: number;
|
|
11
|
+
readonly srcAddress: Address_t;
|
|
12
|
+
readonly logIndex: number;
|
|
13
|
+
readonly transaction: transaction;
|
|
14
|
+
readonly block: block
|
|
15
|
+
};
|
|
16
|
+
|
|
17
|
+
export type genericLoaderArgs<event,context> = { readonly event: event; readonly context: context };
|
|
18
|
+
|
|
19
|
+
export type genericLoader<args,loaderReturn> = (_1:args) => Promise<loaderReturn>;
|
|
20
|
+
|
|
21
|
+
export type genericContractRegisterArgs<event,context> = { readonly event: event; readonly context: context };
|
|
22
|
+
|
|
23
|
+
export type genericContractRegister<args> = (_1:args) => void;
|
|
24
|
+
|
|
25
|
+
export type genericHandlerArgs<event,context,loaderReturn> = {
|
|
26
|
+
readonly event: event;
|
|
27
|
+
readonly context: context;
|
|
28
|
+
readonly loaderReturn: loaderReturn
|
|
29
|
+
};
|
|
30
|
+
|
|
31
|
+
export type genericHandler<args> = (_1:args) => Promise<void>;
|
|
32
|
+
|
|
33
|
+
export type genericHandlerWithLoader<loader,handler,eventFilters> = {
|
|
34
|
+
readonly loader: loader;
|
|
35
|
+
readonly handler: handler;
|
|
36
|
+
readonly wildcard?: boolean;
|
|
37
|
+
readonly eventFilters?: eventFilters;
|
|
38
|
+
readonly preRegisterDynamicContracts?: boolean
|
|
39
|
+
};
|
|
40
|
+
|
|
41
|
+
export type fuelSupplyParams = { readonly subId: string; readonly amount: bigint };
|
|
42
|
+
|
|
43
|
+
export type fuelTransferParams = {
|
|
44
|
+
readonly to: Address_t;
|
|
45
|
+
readonly assetId: string;
|
|
46
|
+
readonly amount: bigint
|
|
47
|
+
};
|
package/src/Internal.res
ADDED
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
type eventParams
|
|
2
|
+
type eventBlock
|
|
3
|
+
type eventTransaction
|
|
4
|
+
|
|
5
|
+
@genType
|
|
6
|
+
type genericEvent<'params, 'block, 'transaction> = {
|
|
7
|
+
params: 'params,
|
|
8
|
+
chainId: int,
|
|
9
|
+
srcAddress: Address.t,
|
|
10
|
+
logIndex: int,
|
|
11
|
+
transaction: 'transaction,
|
|
12
|
+
block: 'block,
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
type event = genericEvent<eventParams, eventBlock, eventTransaction>
|
|
16
|
+
|
|
17
|
+
external fromGenericEvent: genericEvent<'a, 'b, 'c> => event = "%identity"
|
|
18
|
+
|
|
19
|
+
type loaderReturn
|
|
20
|
+
|
|
21
|
+
@genType
|
|
22
|
+
type genericLoaderArgs<'event, 'context> = {
|
|
23
|
+
event: 'event,
|
|
24
|
+
context: 'context,
|
|
25
|
+
}
|
|
26
|
+
@genType
|
|
27
|
+
type genericLoader<'args, 'loaderReturn> = 'args => promise<'loaderReturn>
|
|
28
|
+
|
|
29
|
+
type loaderContext
|
|
30
|
+
type loaderArgs = genericLoaderArgs<event, loaderContext>
|
|
31
|
+
type loader = genericLoader<loaderArgs, loaderReturn>
|
|
32
|
+
|
|
33
|
+
@genType
|
|
34
|
+
type genericContractRegisterArgs<'event, 'context> = {
|
|
35
|
+
event: 'event,
|
|
36
|
+
context: 'context,
|
|
37
|
+
}
|
|
38
|
+
@genType
|
|
39
|
+
type genericContractRegister<'args> = 'args => unit
|
|
40
|
+
|
|
41
|
+
type contractRegisterContext
|
|
42
|
+
type contractRegisterArgs = genericContractRegisterArgs<event, contractRegisterContext>
|
|
43
|
+
type contractRegister = genericContractRegister<contractRegisterArgs>
|
|
44
|
+
|
|
45
|
+
@genType
|
|
46
|
+
type genericHandlerArgs<'event, 'context, 'loaderReturn> = {
|
|
47
|
+
event: 'event,
|
|
48
|
+
context: 'context,
|
|
49
|
+
loaderReturn: 'loaderReturn,
|
|
50
|
+
}
|
|
51
|
+
@genType
|
|
52
|
+
type genericHandler<'args> = 'args => promise<unit>
|
|
53
|
+
|
|
54
|
+
type handlerContext
|
|
55
|
+
type handlerArgs = genericHandlerArgs<event, handlerContext, loaderReturn>
|
|
56
|
+
type handler = genericHandler<handlerArgs>
|
|
57
|
+
|
|
58
|
+
@genType
|
|
59
|
+
type genericHandlerWithLoader<'loader, 'handler, 'eventFilters> = {
|
|
60
|
+
loader: 'loader,
|
|
61
|
+
handler: 'handler,
|
|
62
|
+
wildcard?: bool,
|
|
63
|
+
eventFilters?: 'eventFilters,
|
|
64
|
+
preRegisterDynamicContracts?: bool,
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
type eventItem = {
|
|
68
|
+
eventName: string,
|
|
69
|
+
contractName: string,
|
|
70
|
+
loader: option<loader>,
|
|
71
|
+
handler: option<handler>,
|
|
72
|
+
contractRegister: option<contractRegister>,
|
|
73
|
+
timestamp: int,
|
|
74
|
+
chain: ChainMap.Chain.t,
|
|
75
|
+
blockNumber: int,
|
|
76
|
+
logIndex: int,
|
|
77
|
+
event: event,
|
|
78
|
+
paramsRawEventSchema: S.schema<eventParams>,
|
|
79
|
+
//Default to false, if an event needs to
|
|
80
|
+
//be reprocessed after it has loaded dynamic contracts
|
|
81
|
+
//This gets set to true and does not try and reload events
|
|
82
|
+
hasRegisteredDynamicContracts?: bool,
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
type fuelEventKind =
|
|
86
|
+
| LogData({logId: string, decode: string => eventParams})
|
|
87
|
+
| Mint
|
|
88
|
+
| Burn
|
|
89
|
+
| Transfer
|
|
90
|
+
| Call
|
|
91
|
+
type fuelEventConfig = {
|
|
92
|
+
name: string,
|
|
93
|
+
kind: fuelEventKind,
|
|
94
|
+
isWildcard: bool,
|
|
95
|
+
loader: option<loader>,
|
|
96
|
+
handler: option<handler>,
|
|
97
|
+
contractRegister: option<contractRegister>,
|
|
98
|
+
paramsRawEventSchema: S.schema<eventParams>,
|
|
99
|
+
}
|
|
100
|
+
type fuelContractConfig = {
|
|
101
|
+
name: string,
|
|
102
|
+
events: array<fuelEventConfig>,
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
@genType
|
|
106
|
+
type fuelSupplyParams = {
|
|
107
|
+
subId: string,
|
|
108
|
+
amount: bigint,
|
|
109
|
+
}
|
|
110
|
+
let fuelSupplyParamsSchema = S.schema(s => {
|
|
111
|
+
subId: s.matches(S.string),
|
|
112
|
+
amount: s.matches(BigInt.schema),
|
|
113
|
+
})
|
|
114
|
+
@genType
|
|
115
|
+
type fuelTransferParams = {
|
|
116
|
+
to: Address.t,
|
|
117
|
+
assetId: string,
|
|
118
|
+
amount: bigint,
|
|
119
|
+
}
|
|
120
|
+
let fuelTransferParamsSchema = S.schema(s => {
|
|
121
|
+
to: s.matches(Address.schema),
|
|
122
|
+
assetId: s.matches(S.string),
|
|
123
|
+
amount: s.matches(BigInt.schema),
|
|
124
|
+
})
|
|
@@ -0,0 +1,432 @@
|
|
|
1
|
+
type blockNumberAndHash = {
|
|
2
|
+
//Block hash is used for actual comparison to test for reorg
|
|
3
|
+
blockHash: string,
|
|
4
|
+
blockNumber: int,
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
type blockData = {
|
|
8
|
+
...blockNumberAndHash,
|
|
9
|
+
//Timestamp is needed for multichain to action reorgs across chains from given blocks to
|
|
10
|
+
//ensure ordering is kept constant
|
|
11
|
+
blockTimestamp: int,
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
module LastBlockScannedHashes: {
|
|
15
|
+
type t
|
|
16
|
+
/**Instantiat t with existing data*/
|
|
17
|
+
let makeWithData: (array<blockData>, ~confirmedBlockThreshold: int) => t
|
|
18
|
+
|
|
19
|
+
/**Instantiat empty t with no block data*/
|
|
20
|
+
let empty: (~confirmedBlockThreshold: int) => t
|
|
21
|
+
|
|
22
|
+
/**Add the latest scanned block data to t*/
|
|
23
|
+
let addLatestLastBlockData: (t, ~lastBlockScannedData: blockData) => t
|
|
24
|
+
|
|
25
|
+
/**Read the latest last block scanned data at the from the front of the queue*/
|
|
26
|
+
let getLatestLastBlockData: t => option<blockData>
|
|
27
|
+
/** Given the head block number, find the earliest timestamp from the data where the data
|
|
28
|
+
is still within the given block threshold from the head
|
|
29
|
+
*/
|
|
30
|
+
let getEarlistTimestampInThreshold: (~currentHeight: int, t) => option<int>
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
Prunes the back of the unneeded data on the queue.
|
|
34
|
+
|
|
35
|
+
In the case of a multichain indexer, pass in the earliest needed timestamp that
|
|
36
|
+
occurs within the chains threshold. Ensure that we keep track of one range before that
|
|
37
|
+
as this is that could be the target range block for a reorg
|
|
38
|
+
*/
|
|
39
|
+
let pruneStaleBlockData: (
|
|
40
|
+
~currentHeight: int,
|
|
41
|
+
~earliestMultiChainTimestampInThreshold: int=?,
|
|
42
|
+
t,
|
|
43
|
+
) => t
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
Return a BlockNumbersAndHashes.t rolled back to where hashes
|
|
47
|
+
match the provided blockNumberAndHashes
|
|
48
|
+
*/
|
|
49
|
+
let rollBackToValidHash: (t, ~blockNumbersAndHashes: array<blockData>) => result<t, exn>
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
A record that holds the current height of a chain and the lastBlockScannedHashes,
|
|
53
|
+
used for passing into getEarliestMultiChainTimestampInThreshold where these values
|
|
54
|
+
need to be zipped
|
|
55
|
+
*/
|
|
56
|
+
type currentHeightAndLastBlockHashes = {
|
|
57
|
+
currentHeight: int,
|
|
58
|
+
lastBlockScannedHashes: t,
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
Finds the earliest timestamp that is withtin the confirmedBlockThreshold of
|
|
63
|
+
each chain in a multi chain indexer. Returns None if its a single chain or if
|
|
64
|
+
the list is empty
|
|
65
|
+
*/
|
|
66
|
+
let getEarliestMultiChainTimestampInThreshold: array<currentHeightAndLastBlockHashes> => option<
|
|
67
|
+
int,
|
|
68
|
+
>
|
|
69
|
+
|
|
70
|
+
let getAllBlockNumbers: t => Belt.Array.t<int>
|
|
71
|
+
|
|
72
|
+
let hasReorgOccurred: (t, ~firstBlockParentNumberAndHash: option<blockNumberAndHash>) => bool
|
|
73
|
+
|
|
74
|
+
/**
|
|
75
|
+
Return a BlockNumbersAndHashes.t rolled back to where blockData is less
|
|
76
|
+
than the provided blockNumber
|
|
77
|
+
*/
|
|
78
|
+
let rollBackToBlockNumberLt: (~blockNumber: int, t) => t
|
|
79
|
+
} = {
|
|
80
|
+
type t = {
|
|
81
|
+
// Number of blocks behind head, we want to keep track
|
|
82
|
+
// as a threshold for reorgs. If for eg. this is 200,
|
|
83
|
+
// it means we are accounting for reorgs up to 200 blocks
|
|
84
|
+
// behind the head
|
|
85
|
+
confirmedBlockThreshold: int,
|
|
86
|
+
// A cached list of recent blockdata to make comparison checks
|
|
87
|
+
// for reorgs. Should be quite short data set
|
|
88
|
+
// so using built in array for data structure.
|
|
89
|
+
lastBlockScannedDataList: list<blockData>,
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
//Instantiates LastBlockHashes.t
|
|
93
|
+
let makeWithDataInternal = (lastBlockScannedDataList, ~confirmedBlockThreshold) => {
|
|
94
|
+
confirmedBlockThreshold,
|
|
95
|
+
lastBlockScannedDataList,
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
let makeWithData = (lastBlockScannedDataListArr, ~confirmedBlockThreshold) =>
|
|
99
|
+
lastBlockScannedDataListArr
|
|
100
|
+
->Belt.List.fromArray
|
|
101
|
+
->Belt.List.reverse
|
|
102
|
+
->makeWithDataInternal(~confirmedBlockThreshold)
|
|
103
|
+
//Instantiates empty LastBlockHashes
|
|
104
|
+
let empty = (~confirmedBlockThreshold) => makeWithDataInternal(list{}, ~confirmedBlockThreshold)
|
|
105
|
+
|
|
106
|
+
/** Given the head block number, find the earliest timestamp from the data where the data
|
|
107
|
+
is still within the given block threshold from the head
|
|
108
|
+
*/
|
|
109
|
+
let rec getEarlistTimestampInThresholdInternal = (
|
|
110
|
+
// The current block number at the head of the chain
|
|
111
|
+
~currentHeight,
|
|
112
|
+
~confirmedBlockThreshold,
|
|
113
|
+
//reversed so that head to tail is earlist to latest
|
|
114
|
+
reversedLastBlockDataList: list<blockData>,
|
|
115
|
+
): option<int> => {
|
|
116
|
+
switch reversedLastBlockDataList {
|
|
117
|
+
| list{lastBlockScannedData, ...tail} =>
|
|
118
|
+
// If the blocknumber is not in the threshold recurse with given blockdata's
|
|
119
|
+
// timestamp , incrementing the from index
|
|
120
|
+
if lastBlockScannedData.blockNumber >= currentHeight - confirmedBlockThreshold {
|
|
121
|
+
// If it's in the threshold return the last earliest timestamp
|
|
122
|
+
Some(lastBlockScannedData.blockTimestamp)
|
|
123
|
+
} else {
|
|
124
|
+
tail->getEarlistTimestampInThresholdInternal(~currentHeight, ~confirmedBlockThreshold)
|
|
125
|
+
}
|
|
126
|
+
| list{} => None
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
let getEarlistTimestampInThreshold = (
|
|
131
|
+
~currentHeight,
|
|
132
|
+
{lastBlockScannedDataList, confirmedBlockThreshold}: t,
|
|
133
|
+
) =>
|
|
134
|
+
lastBlockScannedDataList
|
|
135
|
+
->Belt.List.reverse
|
|
136
|
+
->getEarlistTimestampInThresholdInternal(~currentHeight, ~confirmedBlockThreshold)
|
|
137
|
+
|
|
138
|
+
/**
|
|
139
|
+
Inserts last scanned blockData in its positional order of blockNumber. Adds would usually
|
|
140
|
+
be always appending to the head with a new last scanned blockData but could be earlier in the
|
|
141
|
+
case of a dynamic contract.
|
|
142
|
+
*/
|
|
143
|
+
let rec addLatestLastBlockDataInternal = (
|
|
144
|
+
~lastBlockScannedData,
|
|
145
|
+
//Default empty, accumRev would be each item part of lastBlockScannedDataList that has
|
|
146
|
+
//a higher blockNumber than lastBlockScannedData
|
|
147
|
+
~accumRev=list{},
|
|
148
|
+
lastBlockScannedDataList,
|
|
149
|
+
) => {
|
|
150
|
+
switch lastBlockScannedDataList {
|
|
151
|
+
| list{head, ...tail} =>
|
|
152
|
+
if head.blockNumber <= lastBlockScannedData.blockNumber {
|
|
153
|
+
Belt.List.reverseConcat(accumRev, list{lastBlockScannedData, ...lastBlockScannedDataList})
|
|
154
|
+
} else {
|
|
155
|
+
tail->addLatestLastBlockDataInternal(
|
|
156
|
+
~lastBlockScannedData,
|
|
157
|
+
~accumRev=list{head, ...accumRev},
|
|
158
|
+
)
|
|
159
|
+
}
|
|
160
|
+
| list{} => Belt.List.reverseConcat(accumRev, list{lastBlockScannedData})
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
// Adds the latest blockData to the head of the list
|
|
165
|
+
let addLatestLastBlockData = (
|
|
166
|
+
{confirmedBlockThreshold, lastBlockScannedDataList}: t,
|
|
167
|
+
~lastBlockScannedData,
|
|
168
|
+
) =>
|
|
169
|
+
lastBlockScannedDataList
|
|
170
|
+
->addLatestLastBlockDataInternal(~lastBlockScannedData)
|
|
171
|
+
->makeWithDataInternal(~confirmedBlockThreshold)
|
|
172
|
+
|
|
173
|
+
let getLatestLastBlockData = (self: t) => self.lastBlockScannedDataList->Belt.List.head
|
|
174
|
+
|
|
175
|
+
let blockDataIsPastThreshold = (
|
|
176
|
+
lastBlockScannedData: blockData,
|
|
177
|
+
~currentHeight: int,
|
|
178
|
+
~confirmedBlockThreshold: int,
|
|
179
|
+
) => lastBlockScannedData.blockNumber < currentHeight - confirmedBlockThreshold
|
|
180
|
+
|
|
181
|
+
type rec trampoline<'a> = Data('a) | Callback(unit => trampoline<'a>)
|
|
182
|
+
|
|
183
|
+
/**
|
|
184
|
+
Trampolines are a method of handling mutual recursions without the risk of hitting stack limits
|
|
185
|
+
|
|
186
|
+
Tail Call Optimization is not possible on mutually recursive functions and so this is a manual optizimation
|
|
187
|
+
|
|
188
|
+
(note: this implementation of "trampoline" uses a tail call and so TCO tranfsorms it to a while loop in JS)
|
|
189
|
+
*/
|
|
190
|
+
let rec trampoline = value =>
|
|
191
|
+
switch value {
|
|
192
|
+
| Data(v) => v
|
|
193
|
+
| Callback(fn) => fn()->trampoline
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
//Prunes the back of the unneeded data on the queue
|
|
197
|
+
let rec pruneStaleBlockDataInternal = (
|
|
198
|
+
~currentHeight,
|
|
199
|
+
~earliestMultiChainTimestampInThreshold,
|
|
200
|
+
~confirmedBlockThreshold,
|
|
201
|
+
lastBlockScannedDataListReversed: list<blockData>,
|
|
202
|
+
) => {
|
|
203
|
+
switch earliestMultiChainTimestampInThreshold {
|
|
204
|
+
// If there is no "earlist multichain timestamp in threshold"
|
|
205
|
+
// simply prune the earliest block in the case that the block is
|
|
206
|
+
// outside of the confirmedBlockThreshold
|
|
207
|
+
| None =>
|
|
208
|
+
Callback(
|
|
209
|
+
() =>
|
|
210
|
+
lastBlockScannedDataListReversed->pruneEarliestBlockData(
|
|
211
|
+
~currentHeight,
|
|
212
|
+
~earliestMultiChainTimestampInThreshold,
|
|
213
|
+
~confirmedBlockThreshold,
|
|
214
|
+
),
|
|
215
|
+
)
|
|
216
|
+
| Some(timestampThresholdNeeded) =>
|
|
217
|
+
switch lastBlockScannedDataListReversed {
|
|
218
|
+
| list{_head, second, ..._tail} =>
|
|
219
|
+
// Ony prune in the case where the second lastBlockScannedData from the back
|
|
220
|
+
// Has an earlier timestamp than the timestampThresholdNeeded (this is
|
|
221
|
+
// the earliest timestamp across all chains where the lastBlockScannedData is
|
|
222
|
+
// still within the confirmedBlockThreshold)
|
|
223
|
+
if second.blockTimestamp < timestampThresholdNeeded {
|
|
224
|
+
Callback(
|
|
225
|
+
() =>
|
|
226
|
+
lastBlockScannedDataListReversed->pruneEarliestBlockData(
|
|
227
|
+
~currentHeight,
|
|
228
|
+
~earliestMultiChainTimestampInThreshold,
|
|
229
|
+
~confirmedBlockThreshold,
|
|
230
|
+
),
|
|
231
|
+
)
|
|
232
|
+
} else {
|
|
233
|
+
Data(lastBlockScannedDataListReversed)
|
|
234
|
+
}
|
|
235
|
+
| list{_} | list{} => Data(lastBlockScannedDataListReversed)
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
and pruneEarliestBlockData = (
|
|
240
|
+
lastBlockScannedDataListReversed: list<blockData>,
|
|
241
|
+
~currentHeight,
|
|
242
|
+
~earliestMultiChainTimestampInThreshold,
|
|
243
|
+
~confirmedBlockThreshold,
|
|
244
|
+
) => {
|
|
245
|
+
switch lastBlockScannedDataListReversed {
|
|
246
|
+
| list{earliestLastBlockData, ...tail} =>
|
|
247
|
+
// In the case that back is past the threshold, remove it and
|
|
248
|
+
// recurse
|
|
249
|
+
if earliestLastBlockData->blockDataIsPastThreshold(~currentHeight, ~confirmedBlockThreshold) {
|
|
250
|
+
// Recurse to check the next item
|
|
251
|
+
Callback(
|
|
252
|
+
() =>
|
|
253
|
+
tail->pruneStaleBlockDataInternal(
|
|
254
|
+
~currentHeight,
|
|
255
|
+
~earliestMultiChainTimestampInThreshold,
|
|
256
|
+
~confirmedBlockThreshold,
|
|
257
|
+
),
|
|
258
|
+
)
|
|
259
|
+
} else {
|
|
260
|
+
Data(lastBlockScannedDataListReversed)
|
|
261
|
+
}
|
|
262
|
+
| list{} => Data(list{})
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
//Prunes the back of the unneeded data on the queue
|
|
267
|
+
let pruneStaleBlockData = (
|
|
268
|
+
~currentHeight,
|
|
269
|
+
~earliestMultiChainTimestampInThreshold=?,
|
|
270
|
+
{confirmedBlockThreshold, lastBlockScannedDataList}: t,
|
|
271
|
+
) => {
|
|
272
|
+
trampoline(
|
|
273
|
+
lastBlockScannedDataList
|
|
274
|
+
->Belt.List.reverse
|
|
275
|
+
->pruneStaleBlockDataInternal(
|
|
276
|
+
~confirmedBlockThreshold,
|
|
277
|
+
~currentHeight,
|
|
278
|
+
~earliestMultiChainTimestampInThreshold,
|
|
279
|
+
),
|
|
280
|
+
)
|
|
281
|
+
->Belt.List.reverse
|
|
282
|
+
->makeWithDataInternal(~confirmedBlockThreshold)
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
type blockNumberToHashMap = Belt.Map.Int.t<string>
|
|
286
|
+
exception BlockNotIncludedInMap(int)
|
|
287
|
+
|
|
288
|
+
let doBlockHashesMatch = (lastBlockScannedData, ~latestBlockHashes: blockNumberToHashMap) => {
|
|
289
|
+
let {blockNumber, blockHash} = lastBlockScannedData
|
|
290
|
+
let matchingBlock = latestBlockHashes->Belt.Map.Int.get(blockNumber)
|
|
291
|
+
|
|
292
|
+
switch matchingBlock {
|
|
293
|
+
| None => Error(BlockNotIncludedInMap(blockNumber))
|
|
294
|
+
| Some(latestBlockHash) => Ok(blockHash == latestBlockHash)
|
|
295
|
+
}
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
let rec rollBackToValidHashInternal = (
|
|
299
|
+
latestBlockScannedData: list<blockData>,
|
|
300
|
+
~latestBlockHashes: blockNumberToHashMap,
|
|
301
|
+
) => {
|
|
302
|
+
switch latestBlockScannedData {
|
|
303
|
+
| list{} => Ok(list{}) //Nothing on the front to rollback to
|
|
304
|
+
| list{lastBlockScannedData, ...tail} =>
|
|
305
|
+
lastBlockScannedData
|
|
306
|
+
->doBlockHashesMatch(~latestBlockHashes)
|
|
307
|
+
->Belt.Result.flatMap(blockHashesDoMatch => {
|
|
308
|
+
if blockHashesDoMatch {
|
|
309
|
+
Ok(list{lastBlockScannedData, ...tail})
|
|
310
|
+
} else {
|
|
311
|
+
tail->rollBackToValidHashInternal(~latestBlockHashes)
|
|
312
|
+
}
|
|
313
|
+
})
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
/**
|
|
318
|
+
Return a BlockNumbersAndHashes.t rolled back to where hashes
|
|
319
|
+
match the provided blockNumberAndHashes
|
|
320
|
+
*/
|
|
321
|
+
let rollBackToValidHash = (self: t, ~blockNumbersAndHashes: array<blockData>) => {
|
|
322
|
+
let {confirmedBlockThreshold, lastBlockScannedDataList} = self
|
|
323
|
+
let latestBlockHashes =
|
|
324
|
+
blockNumbersAndHashes
|
|
325
|
+
->Belt.Array.map(({blockNumber, blockHash}) => (blockNumber, blockHash))
|
|
326
|
+
->Belt.Map.Int.fromArray
|
|
327
|
+
|
|
328
|
+
lastBlockScannedDataList
|
|
329
|
+
->rollBackToValidHashInternal(~latestBlockHashes)
|
|
330
|
+
->Belt.Result.map(list => list->makeWithDataInternal(~confirmedBlockThreshold))
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
let min = (arrInt: array<int>) => {
|
|
334
|
+
arrInt->Belt.Array.reduce(None, (current, val) => {
|
|
335
|
+
switch current {
|
|
336
|
+
| None => Some(val)
|
|
337
|
+
| Some(current) => Js.Math.min_int(current, val)->Some
|
|
338
|
+
}
|
|
339
|
+
})
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
let rec rollBackToBlockNumberLtInternal = (
|
|
343
|
+
~blockNumber: int,
|
|
344
|
+
latestBlockScannedData: list<blockData>,
|
|
345
|
+
) => {
|
|
346
|
+
switch latestBlockScannedData {
|
|
347
|
+
| list{} => list{}
|
|
348
|
+
| list{head, ...tail} =>
|
|
349
|
+
if head.blockNumber < blockNumber {
|
|
350
|
+
latestBlockScannedData
|
|
351
|
+
} else {
|
|
352
|
+
tail->rollBackToBlockNumberLtInternal(~blockNumber)
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
/**
|
|
358
|
+
Return a BlockNumbersAndHashes.t rolled back to where blockData is less
|
|
359
|
+
than the provided blockNumber
|
|
360
|
+
*/
|
|
361
|
+
let rollBackToBlockNumberLt = (~blockNumber: int, self: t) => {
|
|
362
|
+
let {confirmedBlockThreshold, lastBlockScannedDataList} = self
|
|
363
|
+
lastBlockScannedDataList
|
|
364
|
+
->rollBackToBlockNumberLtInternal(~blockNumber)
|
|
365
|
+
->makeWithDataInternal(~confirmedBlockThreshold)
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
type currentHeightAndLastBlockHashes = {
|
|
369
|
+
currentHeight: int,
|
|
370
|
+
lastBlockScannedHashes: t,
|
|
371
|
+
}
|
|
372
|
+
|
|
373
|
+
/**
|
|
374
|
+
Find the the earliest block time across multiple instances of self where the block timestamp
|
|
375
|
+
falls within its own confirmed block threshold
|
|
376
|
+
|
|
377
|
+
Return None if there is only one chain (since we don't want to take this val into account for a
|
|
378
|
+
single chain indexer) or if there are no chains (should never be the case)
|
|
379
|
+
*/
|
|
380
|
+
let getEarliestMultiChainTimestampInThreshold = (
|
|
381
|
+
multiSelf: array<currentHeightAndLastBlockHashes>,
|
|
382
|
+
) => {
|
|
383
|
+
switch multiSelf {
|
|
384
|
+
| [_singleVal] =>
|
|
385
|
+
//In the case where there is only one chain, return none as there would be no need to aggregate
|
|
386
|
+
//or keep track of the lowest timestamp. The chain can purge as far back as its confirmed block range
|
|
387
|
+
None
|
|
388
|
+
| multiSelf =>
|
|
389
|
+
multiSelf
|
|
390
|
+
->Belt.Array.keepMap(({currentHeight, lastBlockScannedHashes}) => {
|
|
391
|
+
lastBlockScannedHashes->getEarlistTimestampInThreshold(~currentHeight)
|
|
392
|
+
})
|
|
393
|
+
->min
|
|
394
|
+
}
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
let getAllBlockNumbers = (self: t) =>
|
|
398
|
+
self.lastBlockScannedDataList->Belt.List.reduceReverse([], (acc, v) => {
|
|
399
|
+
Belt.Array.concat(acc, [v.blockNumber])
|
|
400
|
+
})
|
|
401
|
+
|
|
402
|
+
/**
|
|
403
|
+
Checks whether reorg has occured by comparing the parent hash with the last saved block hash.
|
|
404
|
+
*/
|
|
405
|
+
let rec hasReorgOccurredInternal = (
|
|
406
|
+
lastBlockScannedDataList,
|
|
407
|
+
~firstBlockParentNumberAndHash: option<blockNumberAndHash>,
|
|
408
|
+
) => {
|
|
409
|
+
switch (firstBlockParentNumberAndHash, lastBlockScannedDataList) {
|
|
410
|
+
| (Some({blockHash: parentHash, blockNumber: parentBlockNumber}), list{head, ...tail}) =>
|
|
411
|
+
if parentBlockNumber == head.blockNumber {
|
|
412
|
+
parentHash != head.blockHash
|
|
413
|
+
} else {
|
|
414
|
+
//if block numbers do not match, this is a dynamic contract case and should recurse
|
|
415
|
+
//through the list to look for a matching block or nothing to validate
|
|
416
|
+
tail->hasReorgOccurredInternal(~firstBlockParentNumberAndHash)
|
|
417
|
+
}
|
|
418
|
+
| _ => //If parentHash is None, either it's the genesis block (no reorg)
|
|
419
|
+
//Or its already confirmed so no Reorg
|
|
420
|
+
//If recentLastBlockData is None, we have not yet saved blockData to compare against
|
|
421
|
+
false
|
|
422
|
+
}
|
|
423
|
+
}
|
|
424
|
+
|
|
425
|
+
let hasReorgOccurred = (
|
|
426
|
+
lastBlockScannedHashes: t,
|
|
427
|
+
~firstBlockParentNumberAndHash: option<blockNumberAndHash>,
|
|
428
|
+
) =>
|
|
429
|
+
lastBlockScannedHashes.lastBlockScannedDataList->hasReorgOccurredInternal(
|
|
430
|
+
~firstBlockParentNumberAndHash,
|
|
431
|
+
)
|
|
432
|
+
}
|
|
@@ -0,0 +1,376 @@
|
|
|
1
|
+
type unchecksummedEthAddress = string
|
|
2
|
+
|
|
3
|
+
module QueryTypes = {
|
|
4
|
+
type blockFieldOptions =
|
|
5
|
+
| @as("number") Number
|
|
6
|
+
| @as("hash") Hash
|
|
7
|
+
| @as("parent_hash") ParentHash
|
|
8
|
+
| @as("nonce") Nonce
|
|
9
|
+
| @as("sha3_uncles") Sha3Uncles
|
|
10
|
+
| @as("logs_bloom") LogsBloom
|
|
11
|
+
| @as("transactions_root") TransactionsRoot
|
|
12
|
+
| @as("state_root") StateRoot
|
|
13
|
+
| @as("receipts_root") ReceiptsRoot
|
|
14
|
+
| @as("miner") Miner
|
|
15
|
+
| @as("difficulty") Difficulty
|
|
16
|
+
| @as("total_difficulty") TotalDifficulty
|
|
17
|
+
| @as("extra_data") ExtraData
|
|
18
|
+
| @as("size") Size
|
|
19
|
+
| @as("gas_limit") GasLimit
|
|
20
|
+
| @as("gas_used") GasUsed
|
|
21
|
+
| @as("timestamp") Timestamp
|
|
22
|
+
| @as("uncles") Uncles
|
|
23
|
+
| @as("base_fee_per_gas") BaseFeePerGas
|
|
24
|
+
|
|
25
|
+
let blockFieldOptionsSchema = Utils.Schema.enum([
|
|
26
|
+
Number,
|
|
27
|
+
Hash,
|
|
28
|
+
ParentHash,
|
|
29
|
+
Nonce,
|
|
30
|
+
Sha3Uncles,
|
|
31
|
+
LogsBloom,
|
|
32
|
+
TransactionsRoot,
|
|
33
|
+
StateRoot,
|
|
34
|
+
ReceiptsRoot,
|
|
35
|
+
Miner,
|
|
36
|
+
Difficulty,
|
|
37
|
+
TotalDifficulty,
|
|
38
|
+
ExtraData,
|
|
39
|
+
Size,
|
|
40
|
+
GasLimit,
|
|
41
|
+
GasUsed,
|
|
42
|
+
Timestamp,
|
|
43
|
+
Uncles,
|
|
44
|
+
BaseFeePerGas,
|
|
45
|
+
])
|
|
46
|
+
|
|
47
|
+
type blockFieldSelection = array<blockFieldOptions>
|
|
48
|
+
|
|
49
|
+
let blockFieldSelectionSchema = S.array(blockFieldOptionsSchema)
|
|
50
|
+
|
|
51
|
+
type transactionFieldOptions =
|
|
52
|
+
| @as("block_hash") BlockHash
|
|
53
|
+
| @as("block_number") BlockNumber
|
|
54
|
+
| @as("from") From
|
|
55
|
+
| @as("gas") Gas
|
|
56
|
+
| @as("gas_price") GasPrice
|
|
57
|
+
| @as("hash") Hash
|
|
58
|
+
| @as("input") Input
|
|
59
|
+
| @as("nonce") Nonce
|
|
60
|
+
| @as("to") To
|
|
61
|
+
| @as("transaction_index") TransactionIndex
|
|
62
|
+
| @as("value") Value
|
|
63
|
+
| @as("v") V
|
|
64
|
+
| @as("r") R
|
|
65
|
+
| @as("s") S
|
|
66
|
+
| @as("max_priority_fee_per_gas") MaxPriorityFeePerGas
|
|
67
|
+
| @as("max_fee_per_gas") MaxFeePerGas
|
|
68
|
+
| @as("chain_id") ChainId
|
|
69
|
+
| @as("cumulative_gas_used") CumulativeGasUsed
|
|
70
|
+
| @as("effective_gas_price") EffectiveGasPrice
|
|
71
|
+
| @as("gas_used") GasUsed
|
|
72
|
+
| @as("contract_address") ContractAddress
|
|
73
|
+
| @as("logs_bloom") LogsBloom
|
|
74
|
+
| @as("type") Type
|
|
75
|
+
| @as("root") Root
|
|
76
|
+
| @as("status") Status
|
|
77
|
+
| @as("sighash") Sighash
|
|
78
|
+
|
|
79
|
+
let transactionFieldOptionsSchema = Utils.Schema.enum([
|
|
80
|
+
BlockHash,
|
|
81
|
+
BlockNumber,
|
|
82
|
+
From,
|
|
83
|
+
Gas,
|
|
84
|
+
GasPrice,
|
|
85
|
+
Hash,
|
|
86
|
+
Input,
|
|
87
|
+
Nonce,
|
|
88
|
+
To,
|
|
89
|
+
TransactionIndex,
|
|
90
|
+
Value,
|
|
91
|
+
V,
|
|
92
|
+
R,
|
|
93
|
+
S,
|
|
94
|
+
MaxPriorityFeePerGas,
|
|
95
|
+
MaxFeePerGas,
|
|
96
|
+
ChainId,
|
|
97
|
+
CumulativeGasUsed,
|
|
98
|
+
EffectiveGasPrice,
|
|
99
|
+
GasUsed,
|
|
100
|
+
ContractAddress,
|
|
101
|
+
LogsBloom,
|
|
102
|
+
Type,
|
|
103
|
+
Root,
|
|
104
|
+
Status,
|
|
105
|
+
Sighash,
|
|
106
|
+
])
|
|
107
|
+
|
|
108
|
+
type transactionFieldSelection = array<transactionFieldOptions>
|
|
109
|
+
|
|
110
|
+
let transactionFieldSelectionSchema = S.array(transactionFieldOptionsSchema)
|
|
111
|
+
|
|
112
|
+
type logFieldOptions =
|
|
113
|
+
| @as("removed") Removed
|
|
114
|
+
| @as("log_index") LogIndex
|
|
115
|
+
| @as("transaction_index") TransactionIndex
|
|
116
|
+
| @as("transaction_hash") TransactionHash
|
|
117
|
+
| @as("block_hash") BlockHash
|
|
118
|
+
| @as("block_number") BlockNumber
|
|
119
|
+
| @as("address") Address
|
|
120
|
+
| @as("data") Data
|
|
121
|
+
| @as("topic0") Topic0
|
|
122
|
+
| @as("topic1") Topic1
|
|
123
|
+
| @as("topic2") Topic2
|
|
124
|
+
| @as("topic3") Topic3
|
|
125
|
+
|
|
126
|
+
let logFieldOptionsSchema = Utils.Schema.enum([
|
|
127
|
+
Removed,
|
|
128
|
+
LogIndex,
|
|
129
|
+
TransactionIndex,
|
|
130
|
+
TransactionHash,
|
|
131
|
+
BlockHash,
|
|
132
|
+
BlockNumber,
|
|
133
|
+
Address,
|
|
134
|
+
Data,
|
|
135
|
+
Topic0,
|
|
136
|
+
Topic1,
|
|
137
|
+
Topic2,
|
|
138
|
+
Topic3,
|
|
139
|
+
])
|
|
140
|
+
|
|
141
|
+
type logFieldSelection = array<logFieldOptions>
|
|
142
|
+
|
|
143
|
+
let logFieldSelectionSchema = S.array(logFieldOptionsSchema)
|
|
144
|
+
|
|
145
|
+
type fieldSelection = {
|
|
146
|
+
block?: blockFieldSelection,
|
|
147
|
+
transaction?: transactionFieldSelection,
|
|
148
|
+
log?: logFieldSelection,
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
let fieldSelectionSchema = S.object(s => {
|
|
152
|
+
block: ?s.field("block", S.option(blockFieldSelectionSchema)),
|
|
153
|
+
transaction: ?s.field("transaction", S.option(transactionFieldSelectionSchema)),
|
|
154
|
+
log: ?s.field("log", S.option(logFieldSelectionSchema)),
|
|
155
|
+
})
|
|
156
|
+
|
|
157
|
+
type logParams = {
|
|
158
|
+
address?: array<Address.t>,
|
|
159
|
+
topics: array<array<Ethers.EventFilter.topic>>,
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
let logParamsSchema = S.object(s => {
|
|
163
|
+
address: ?s.field("address", S.option(S.array(Address.schema))),
|
|
164
|
+
topics: s.field("topics", S.array(S.array(EvmTypes.Hex.schema))),
|
|
165
|
+
})
|
|
166
|
+
|
|
167
|
+
type transactionParams = {
|
|
168
|
+
from?: array<Address.t>,
|
|
169
|
+
to?: array<Address.t>,
|
|
170
|
+
sighash?: array<string>,
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
let transactionParamsSchema = S.object(s => {
|
|
174
|
+
from: ?s.field("from", S.option(S.array(Address.schema))),
|
|
175
|
+
to: ?s.field("to", S.option(S.array(Address.schema))),
|
|
176
|
+
sighash: ?s.field("sighash", S.option(S.array(S.string))),
|
|
177
|
+
})
|
|
178
|
+
|
|
179
|
+
type postQueryBody = {
|
|
180
|
+
fromBlock: int,
|
|
181
|
+
toBlockExclusive?: int,
|
|
182
|
+
logs?: array<logParams>,
|
|
183
|
+
transactions?: array<transactionParams>,
|
|
184
|
+
fieldSelection: fieldSelection,
|
|
185
|
+
maxNumLogs?: int,
|
|
186
|
+
includeAllBlocks?: bool,
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
let postQueryBodySchema = S.object(s => {
|
|
190
|
+
fromBlock: s.field("from_block", S.int),
|
|
191
|
+
toBlockExclusive: ?s.field("to_block", S.option(S.int)),
|
|
192
|
+
logs: ?s.field("logs", S.option(S.array(logParamsSchema))),
|
|
193
|
+
transactions: ?s.field("transactions", S.option(S.array(transactionParamsSchema))),
|
|
194
|
+
fieldSelection: s.field("field_selection", fieldSelectionSchema),
|
|
195
|
+
maxNumLogs: ?s.field("max_num_logs", S.option(S.int)),
|
|
196
|
+
includeAllBlocks: ?s.field("include_all_blocks", S.option(S.bool)),
|
|
197
|
+
})
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
module ResponseTypes = {
|
|
201
|
+
type blockData = {
|
|
202
|
+
number?: int,
|
|
203
|
+
hash?: string,
|
|
204
|
+
parentHash?: string,
|
|
205
|
+
nonce?: option<int>,
|
|
206
|
+
sha3Uncles?: string,
|
|
207
|
+
logsBloom?: string,
|
|
208
|
+
transactionsRoot?: string,
|
|
209
|
+
stateRoot?: string,
|
|
210
|
+
receiptsRoot?: string,
|
|
211
|
+
miner?: unchecksummedEthAddress,
|
|
212
|
+
difficulty?: option<bigint>,
|
|
213
|
+
totalDifficulty?: option<bigint>,
|
|
214
|
+
extraData?: string,
|
|
215
|
+
size?: bigint,
|
|
216
|
+
gasLimit?: bigint,
|
|
217
|
+
gasUsed?: bigint,
|
|
218
|
+
timestamp?: bigint,
|
|
219
|
+
uncles?: option<string>,
|
|
220
|
+
baseFeePerGas?: option<bigint>,
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
let blockDataSchema = S.object(s => {
|
|
224
|
+
number: ?s.field("number", S.option(S.int)),
|
|
225
|
+
hash: ?s.field("hash", S.option(S.string)),
|
|
226
|
+
parentHash: ?s.field("parent_hash", S.option(S.string)),
|
|
227
|
+
nonce: ?s.field("nonce", S.option(S.null(S.int))),
|
|
228
|
+
sha3Uncles: ?s.field("sha3_uncles", S.option(S.string)),
|
|
229
|
+
logsBloom: ?s.field("logs_bloom", S.option(S.string)),
|
|
230
|
+
transactionsRoot: ?s.field("transactions_root", S.option(S.string)),
|
|
231
|
+
stateRoot: ?s.field("state_root", S.option(S.string)),
|
|
232
|
+
receiptsRoot: ?s.field("receipts_root", S.option(S.string)),
|
|
233
|
+
miner: ?s.field("miner", S.option(S.string)),
|
|
234
|
+
difficulty: ?s.field("difficulty", S.option(S.null(BigInt.schema))),
|
|
235
|
+
totalDifficulty: ?s.field("total_difficulty", S.option(S.null(BigInt.schema))),
|
|
236
|
+
extraData: ?s.field("extra_data", S.option(S.string)),
|
|
237
|
+
size: ?s.field("size", S.option(BigInt.schema)),
|
|
238
|
+
gasLimit: ?s.field("gas_limit", S.option(BigInt.schema)),
|
|
239
|
+
gasUsed: ?s.field("gas_used", S.option(BigInt.schema)),
|
|
240
|
+
timestamp: ?s.field("timestamp", S.option(BigInt.schema)),
|
|
241
|
+
uncles: ?s.field("unclus", S.option(S.null(S.string))),
|
|
242
|
+
baseFeePerGas: ?s.field("base_fee_per_gas", S.option(S.null(BigInt.schema))),
|
|
243
|
+
})
|
|
244
|
+
|
|
245
|
+
type transactionData = {
|
|
246
|
+
blockHash?: string,
|
|
247
|
+
blockNumber?: int,
|
|
248
|
+
from?: option<unchecksummedEthAddress>,
|
|
249
|
+
gas?: bigint,
|
|
250
|
+
gasPrice?: option<bigint>,
|
|
251
|
+
hash?: string,
|
|
252
|
+
input?: string,
|
|
253
|
+
nonce?: int,
|
|
254
|
+
to?: option<unchecksummedEthAddress>,
|
|
255
|
+
transactionIndex?: int,
|
|
256
|
+
value?: bigint,
|
|
257
|
+
v?: option<string>,
|
|
258
|
+
r?: option<string>,
|
|
259
|
+
s?: option<string>,
|
|
260
|
+
maxPriorityFeePerGas?: option<bigint>,
|
|
261
|
+
maxFeePerGas?: option<bigint>,
|
|
262
|
+
chainId?: option<int>,
|
|
263
|
+
cumulativeGasUsed?: bigint,
|
|
264
|
+
effectiveGasPrice?: bigint,
|
|
265
|
+
gasUsed?: bigint,
|
|
266
|
+
contractAddress?: option<unchecksummedEthAddress>,
|
|
267
|
+
logsBoom?: string,
|
|
268
|
+
type_?: option<int>,
|
|
269
|
+
root?: option<string>,
|
|
270
|
+
status?: option<int>,
|
|
271
|
+
sighash?: option<string>,
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
let transactionDataSchema = S.object(s => {
|
|
275
|
+
blockHash: ?s.field("block_hash", S.option(S.string)),
|
|
276
|
+
blockNumber: ?s.field("block_number", S.option(S.int)),
|
|
277
|
+
from: ?s.field("from", S.option(S.null(S.string))),
|
|
278
|
+
gas: ?s.field("gas", S.option(BigInt.schema)),
|
|
279
|
+
gasPrice: ?s.field("gas_price", S.option(S.null(BigInt.schema))),
|
|
280
|
+
hash: ?s.field("hash", S.option(S.string)),
|
|
281
|
+
input: ?s.field("input", S.option(S.string)),
|
|
282
|
+
nonce: ?s.field("nonce", S.option(S.int)),
|
|
283
|
+
to: ?s.field("to", S.option(S.null(S.string))),
|
|
284
|
+
transactionIndex: ?s.field("transaction_index", S.option(S.int)),
|
|
285
|
+
value: ?s.field("value", S.option(BigInt.schema)),
|
|
286
|
+
v: ?s.field("v", S.option(S.null(S.string))),
|
|
287
|
+
r: ?s.field("r", S.option(S.null(S.string))),
|
|
288
|
+
s: ?s.field("s", S.option(S.null(S.string))),
|
|
289
|
+
maxPriorityFeePerGas: ?s.field("max_priority_fee_per_gas", S.option(S.null(BigInt.schema))),
|
|
290
|
+
maxFeePerGas: ?s.field("max_fee_per_gas", S.option(S.null(BigInt.schema))),
|
|
291
|
+
chainId: ?s.field("chain_id", S.option(S.null(S.int))),
|
|
292
|
+
cumulativeGasUsed: ?s.field("cumulative_gas_used", S.option(BigInt.schema)),
|
|
293
|
+
effectiveGasPrice: ?s.field("effective_gas_price", S.option(BigInt.schema)),
|
|
294
|
+
gasUsed: ?s.field("gas_used", S.option(BigInt.schema)),
|
|
295
|
+
contractAddress: ?s.field("contract_address", S.option(S.null(S.string))),
|
|
296
|
+
logsBoom: ?s.field("logs_bloom", S.option(S.string)),
|
|
297
|
+
type_: ?s.field("type", S.option(S.null(S.int))),
|
|
298
|
+
root: ?s.field("root", S.option(S.null(S.string))),
|
|
299
|
+
status: ?s.field("status", S.option(S.null(S.int))),
|
|
300
|
+
sighash: ?s.field("sighash", S.option(S.null(S.string))),
|
|
301
|
+
})
|
|
302
|
+
|
|
303
|
+
type logData = {
|
|
304
|
+
removed?: option<bool>,
|
|
305
|
+
index?: int,
|
|
306
|
+
transactionIndex?: int,
|
|
307
|
+
transactionHash?: string,
|
|
308
|
+
blockHash?: string,
|
|
309
|
+
blockNumber?: int,
|
|
310
|
+
address?: unchecksummedEthAddress,
|
|
311
|
+
data?: string,
|
|
312
|
+
topic0?: option<Ethers.EventFilter.topic>,
|
|
313
|
+
topic1?: option<Ethers.EventFilter.topic>,
|
|
314
|
+
topic2?: option<Ethers.EventFilter.topic>,
|
|
315
|
+
topic3?: option<Ethers.EventFilter.topic>,
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
let logDataSchema = S.object(s => {
|
|
319
|
+
removed: ?s.field("removed", S.option(S.null(S.bool))),
|
|
320
|
+
index: ?s.field("log_index", S.option(S.int)),
|
|
321
|
+
transactionIndex: ?s.field("transaction_index", S.option(S.int)),
|
|
322
|
+
transactionHash: ?s.field("transaction_hash", S.option(S.string)),
|
|
323
|
+
blockHash: ?s.field("block_hash", S.option(S.string)),
|
|
324
|
+
blockNumber: ?s.field("block_number", S.option(S.int)),
|
|
325
|
+
address: ?s.field("address", S.option(S.string)),
|
|
326
|
+
data: ?s.field("data", S.option(S.string)),
|
|
327
|
+
topic0: ?s.field("topic0", S.option(S.null(EvmTypes.Hex.schema))),
|
|
328
|
+
topic1: ?s.field("topic1", S.option(S.null(EvmTypes.Hex.schema))),
|
|
329
|
+
topic2: ?s.field("topic2", S.option(S.null(EvmTypes.Hex.schema))),
|
|
330
|
+
topic3: ?s.field("topic3", S.option(S.null(EvmTypes.Hex.schema))),
|
|
331
|
+
})
|
|
332
|
+
|
|
333
|
+
type data = {
|
|
334
|
+
blocks?: array<blockData>,
|
|
335
|
+
transactions?: array<transactionData>,
|
|
336
|
+
logs?: array<logData>,
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
let dataSchema = S.object(s => {
|
|
340
|
+
blocks: ?s.field("blocks", S.array(blockDataSchema)->S.option),
|
|
341
|
+
transactions: ?s.field("transactions", S.array(transactionDataSchema)->S.option),
|
|
342
|
+
logs: ?s.field("logs", S.array(logDataSchema)->S.option),
|
|
343
|
+
})
|
|
344
|
+
|
|
345
|
+
type queryResponse = {
|
|
346
|
+
data: array<data>,
|
|
347
|
+
archiveHeight: int,
|
|
348
|
+
nextBlock: int,
|
|
349
|
+
totalTime: int,
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
let queryResponseSchema = S.object(s => {
|
|
353
|
+
data: s.field("data", S.array(dataSchema)),
|
|
354
|
+
archiveHeight: s.field("archive_height", S.int),
|
|
355
|
+
nextBlock: s.field("next_block", S.int),
|
|
356
|
+
totalTime: s.field("total_execution_time", S.int),
|
|
357
|
+
})
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
let queryRoute = Rest.route(() => {
|
|
361
|
+
path: "/query",
|
|
362
|
+
method: Post,
|
|
363
|
+
variables: s => s.body(QueryTypes.postQueryBodySchema),
|
|
364
|
+
responses: [
|
|
365
|
+
s => s.data(ResponseTypes.queryResponseSchema),
|
|
366
|
+
]
|
|
367
|
+
})
|
|
368
|
+
|
|
369
|
+
let heightRoute = Rest.route(() => {
|
|
370
|
+
path: "/height",
|
|
371
|
+
method: Get,
|
|
372
|
+
variables: _ => (),
|
|
373
|
+
responses: [
|
|
374
|
+
s => s.field("height", S.int),
|
|
375
|
+
]
|
|
376
|
+
})
|
|
File without changes
|