envio 2.29.2 → 2.30.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/evm.schema.json +18 -0
- package/package.json +5 -5
- package/src/Address.res +23 -0
- package/src/Address.res.js +14 -0
- package/src/Batch.res +103 -90
- package/src/Batch.res.js +81 -101
- package/src/FetchState.res +73 -129
- package/src/FetchState.res.js +87 -149
- package/src/Hasura.res +178 -124
- package/src/Hasura.res.js +115 -54
- package/src/Persistence.res +1 -13
- package/src/Persistence.res.js +1 -7
- package/src/PgStorage.res +0 -7
- package/src/PgStorage.res.js +1 -5
- package/src/Utils.res +10 -0
- package/src/Utils.res.js +5 -0
- package/src/bindings/Ethers.res +35 -11
- package/src/bindings/Ethers.res.js +21 -1
- package/src/bindings/PromClient.res +10 -0
- package/src/db/InternalTable.res +1 -59
- package/src/db/InternalTable.res.js +2 -34
- package/src/sources/HyperSyncClient.res +8 -2
- package/src/sources/HyperSyncClient.res.js +3 -2
- package/src/sources/HyperSyncSource.res +8 -1
- package/src/sources/HyperSyncSource.res.js +7 -2
- package/src/sources/RpcSource.res +153 -3
- package/src/sources/RpcSource.res.js +195 -73
package/src/Utils.res.js
CHANGED
|
@@ -214,6 +214,10 @@ function mergeSorted(f, xs, ys) {
|
|
|
214
214
|
return result;
|
|
215
215
|
}
|
|
216
216
|
|
|
217
|
+
var clearInPlace = ((arr) => {
|
|
218
|
+
arr.length = 0
|
|
219
|
+
});
|
|
220
|
+
|
|
217
221
|
function setIndexImmutable(arr, index, value) {
|
|
218
222
|
var shallowCopy = arr.slice(0);
|
|
219
223
|
shallowCopy[index] = value;
|
|
@@ -312,6 +316,7 @@ function interleave(arr, separator) {
|
|
|
312
316
|
|
|
313
317
|
var $$Array$1 = {
|
|
314
318
|
mergeSorted: mergeSorted,
|
|
319
|
+
clearInPlace: clearInPlace,
|
|
315
320
|
setIndexImmutable: setIndexImmutable,
|
|
316
321
|
transposeResults: transposeResults,
|
|
317
322
|
includes: includes,
|
package/src/bindings/Ethers.res
CHANGED
|
@@ -147,19 +147,43 @@ module JsonRpcProvider = {
|
|
|
147
147
|
@send
|
|
148
148
|
external getTransaction: (t, ~transactionHash: string) => promise<transaction> = "getTransaction"
|
|
149
149
|
|
|
150
|
-
let makeGetTransactionFields = (~getTransactionByHash) =>
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
150
|
+
let makeGetTransactionFields = (~getTransactionByHash, ~lowercaseAddresses: bool) => async (
|
|
151
|
+
log: log,
|
|
152
|
+
): promise<unknown> => {
|
|
153
|
+
let transaction = await getTransactionByHash(log.transactionHash)
|
|
154
|
+
// Mutating should be fine, since the transaction isn't used anywhere else outside the function
|
|
155
|
+
let fields: {..} = transaction->Obj.magic
|
|
156
|
+
|
|
157
|
+
// Make it compatible with HyperSync transaction fields
|
|
158
|
+
fields["transactionIndex"] = log.transactionIndex
|
|
159
|
+
fields["input"] = fields["data"]
|
|
160
|
+
|
|
161
|
+
// NOTE: this is wasteful if these fields are not selected in the users config.
|
|
162
|
+
// There might be a better way to do this in the `makeThrowingGetEventTransaction` function rather based on the schema.
|
|
163
|
+
// However this is not extremely expensive and good enough for now (only on rpc sync also).
|
|
164
|
+
if lowercaseAddresses {
|
|
165
|
+
open Js.Nullable
|
|
166
|
+
switch fields["from"] {
|
|
167
|
+
| Value(from) => fields["from"] = from->Js.String2.toLowerCase
|
|
168
|
+
| Undefined => ()
|
|
169
|
+
| Null => ()
|
|
170
|
+
}
|
|
171
|
+
switch fields["to"] {
|
|
172
|
+
| Value(to) => fields["to"] = to->Js.String2.toLowerCase
|
|
173
|
+
| Undefined => ()
|
|
174
|
+
| Null => ()
|
|
175
|
+
}
|
|
176
|
+
switch fields["contractAddress"] {
|
|
177
|
+
| Value(contractAddress) =>
|
|
178
|
+
fields["contractAddress"] = contractAddress->Js.String2.toLowerCase
|
|
179
|
+
| Undefined => ()
|
|
180
|
+
| Null => ()
|
|
181
|
+
}
|
|
161
182
|
}
|
|
162
183
|
|
|
184
|
+
fields->Obj.magic
|
|
185
|
+
}
|
|
186
|
+
|
|
163
187
|
type block = {
|
|
164
188
|
_difficulty: bigint,
|
|
165
189
|
difficulty: int,
|
|
@@ -78,11 +78,31 @@ function make(rpcUrl, chainId) {
|
|
|
78
78
|
return makeStatic(rpcUrl, network, undefined, undefined);
|
|
79
79
|
}
|
|
80
80
|
|
|
81
|
-
function makeGetTransactionFields(getTransactionByHash) {
|
|
81
|
+
function makeGetTransactionFields(getTransactionByHash, lowercaseAddresses) {
|
|
82
82
|
return async function (log) {
|
|
83
83
|
var transaction = await getTransactionByHash(log.transactionHash);
|
|
84
84
|
transaction.transactionIndex = log.transactionIndex;
|
|
85
85
|
transaction.input = transaction.data;
|
|
86
|
+
if (lowercaseAddresses) {
|
|
87
|
+
var from = transaction.from;
|
|
88
|
+
if (from === null || from === undefined) {
|
|
89
|
+
from === null;
|
|
90
|
+
} else {
|
|
91
|
+
transaction.from = from.toLowerCase();
|
|
92
|
+
}
|
|
93
|
+
var to = transaction.to;
|
|
94
|
+
if (to === null || to === undefined) {
|
|
95
|
+
to === null;
|
|
96
|
+
} else {
|
|
97
|
+
transaction.to = to.toLowerCase();
|
|
98
|
+
}
|
|
99
|
+
var contractAddress = transaction.contractAddress;
|
|
100
|
+
if (contractAddress === null || contractAddress === undefined) {
|
|
101
|
+
contractAddress === null;
|
|
102
|
+
} else {
|
|
103
|
+
transaction.contractAddress = contractAddress.toLowerCase();
|
|
104
|
+
}
|
|
105
|
+
}
|
|
86
106
|
return transaction;
|
|
87
107
|
};
|
|
88
108
|
}
|
|
@@ -11,6 +11,14 @@ type registry
|
|
|
11
11
|
@send external metrics: registry => Promise.t<string> = "metrics"
|
|
12
12
|
@get external getContentType: registry => string = "contentType"
|
|
13
13
|
|
|
14
|
+
type metricValue = {
|
|
15
|
+
value: int,
|
|
16
|
+
labels: dict<string>,
|
|
17
|
+
}
|
|
18
|
+
type metricInstance = {get: unit => promise<{"values": array<metricValue>}>}
|
|
19
|
+
@send external getSingleMetric: (registry, string) => option<metricInstance> = "getSingleMetric"
|
|
20
|
+
@send external resetMetrics: registry => unit = "resetMetrics"
|
|
21
|
+
|
|
14
22
|
module Counter = {
|
|
15
23
|
type counter
|
|
16
24
|
@new @module("prom-client") external makeCounter: customMetric<'a> => counter = "Counter"
|
|
@@ -36,6 +44,8 @@ module Gauge = {
|
|
|
36
44
|
@send external setFloat: (gauge, float) => unit = "set"
|
|
37
45
|
|
|
38
46
|
@send external labels: (gauge, 'labelsObject) => gauge = "labels"
|
|
47
|
+
|
|
48
|
+
@send external get: gauge => promise<{"values": array<dict<string>>}> = "get"
|
|
39
49
|
}
|
|
40
50
|
|
|
41
51
|
module Histogram = {
|
package/src/db/InternalTable.res
CHANGED
|
@@ -8,7 +8,6 @@ let isIndex = true
|
|
|
8
8
|
module Chains = {
|
|
9
9
|
type progressFields = [
|
|
10
10
|
| #progress_block
|
|
11
|
-
| #_progress_log_index
|
|
12
11
|
| #events_processed
|
|
13
12
|
]
|
|
14
13
|
|
|
@@ -35,7 +34,6 @@ module Chains = {
|
|
|
35
34
|
#progress_block,
|
|
36
35
|
#ready_at,
|
|
37
36
|
#events_processed,
|
|
38
|
-
#_progress_log_index,
|
|
39
37
|
#_is_hyper_sync,
|
|
40
38
|
#_num_batches_fetched,
|
|
41
39
|
]
|
|
@@ -55,7 +53,6 @@ module Chains = {
|
|
|
55
53
|
@as("start_block") startBlock: int,
|
|
56
54
|
@as("end_block") endBlock: Js.null<int>,
|
|
57
55
|
@as("progress_block") progressBlockNumber: int,
|
|
58
|
-
@as("_progress_log_index") progressNextBlockLogIndex: Js.null<int>,
|
|
59
56
|
@as("events_processed") numEventsProcessed: int,
|
|
60
57
|
...metaFields,
|
|
61
58
|
}
|
|
@@ -91,14 +88,6 @@ module Chains = {
|
|
|
91
88
|
mkField((#_is_hyper_sync: field :> string), Boolean, ~fieldSchema=S.bool),
|
|
92
89
|
// Fully processed block number
|
|
93
90
|
mkField((#progress_block: field :> string), Integer, ~fieldSchema=S.int),
|
|
94
|
-
// Optional log index of the next block after progress block
|
|
95
|
-
// To correctly resume indexing when we processed half of the block.
|
|
96
|
-
mkField(
|
|
97
|
-
(#_progress_log_index: field :> string),
|
|
98
|
-
Integer,
|
|
99
|
-
~fieldSchema=S.null(S.int),
|
|
100
|
-
~isNullable,
|
|
101
|
-
),
|
|
102
91
|
// TODO: Should deprecate after changing the ETA calculation logic
|
|
103
92
|
mkField((#_num_batches_fetched: field :> string), Integer, ~fieldSchema=S.int),
|
|
104
93
|
],
|
|
@@ -114,7 +103,6 @@ module Chains = {
|
|
|
114
103
|
latestFetchedBlockNumber: -1,
|
|
115
104
|
timestampCaughtUpToHeadOrEndblock: Js.Null.empty,
|
|
116
105
|
progressBlockNumber: -1,
|
|
117
|
-
progressNextBlockLogIndex: Js.Null.empty,
|
|
118
106
|
isHyperSync: false,
|
|
119
107
|
numEventsProcessed: 0,
|
|
120
108
|
numBatchesFetched: 0,
|
|
@@ -175,11 +163,7 @@ SET ${setClauses->Js.Array2.joinWith(",\n ")}
|
|
|
175
163
|
WHERE "id" = $1;`
|
|
176
164
|
}
|
|
177
165
|
|
|
178
|
-
let progressFields: array<progressFields> = [
|
|
179
|
-
#progress_block,
|
|
180
|
-
#_progress_log_index,
|
|
181
|
-
#events_processed,
|
|
182
|
-
]
|
|
166
|
+
let progressFields: array<progressFields> = [#progress_block, #events_processed]
|
|
183
167
|
|
|
184
168
|
let makeProgressFieldsUpdateQuery = (~pgSchema) => {
|
|
185
169
|
let setClauses = Belt.Array.mapWithIndex(progressFields, (index, field) => {
|
|
@@ -234,10 +218,6 @@ WHERE "id" = $1;`
|
|
|
234
218
|
->Js.Array2.push(
|
|
235
219
|
switch field {
|
|
236
220
|
| #progress_block => data.progressBlockNumber->(Utils.magic: int => unknown)
|
|
237
|
-
| #_progress_log_index =>
|
|
238
|
-
data.progressNextBlockLogIndex
|
|
239
|
-
->Js.Null.fromOption
|
|
240
|
-
->(Utils.magic: Js.null<int> => unknown)
|
|
241
221
|
| #events_processed => data.totalEventsProcessed->(Utils.magic: int => unknown)
|
|
242
222
|
},
|
|
243
223
|
)
|
|
@@ -448,42 +428,4 @@ module DynamicContractRegistry = {
|
|
|
448
428
|
table,
|
|
449
429
|
entityHistory,
|
|
450
430
|
}->Internal.fromGenericEntityConfig
|
|
451
|
-
|
|
452
|
-
let makeCleanUpOnRestartQuery = (~pgSchema, ~chains: array<Chains.t>) => {
|
|
453
|
-
let query = ref(``)
|
|
454
|
-
|
|
455
|
-
chains->Js.Array2.forEach(chain => {
|
|
456
|
-
if query.contents !== "" {
|
|
457
|
-
query := query.contents ++ "\n"
|
|
458
|
-
}
|
|
459
|
-
query :=
|
|
460
|
-
query.contents ++
|
|
461
|
-
`DELETE FROM "${pgSchema}"."${table.tableName}"
|
|
462
|
-
WHERE chain_id = ${chain.id->Belt.Int.toString}${switch chain {
|
|
463
|
-
| {progressBlockNumber, progressNextBlockLogIndex: Value(progressNextBlockLogIndex)} =>
|
|
464
|
-
` AND (
|
|
465
|
-
registering_event_block_number > ${(progressBlockNumber + 1)->Belt.Int.toString}
|
|
466
|
-
OR registering_event_block_number = ${(progressBlockNumber + 1)->Belt.Int.toString}
|
|
467
|
-
AND registering_event_log_index > ${progressNextBlockLogIndex->Belt.Int.toString}
|
|
468
|
-
)`
|
|
469
|
-
| {progressBlockNumber: -1, progressNextBlockLogIndex: Null} => ``
|
|
470
|
-
| {progressBlockNumber, progressNextBlockLogIndex: Null} =>
|
|
471
|
-
` AND registering_event_block_number > ${progressBlockNumber->Belt.Int.toString}`
|
|
472
|
-
}};
|
|
473
|
-
DELETE FROM "${pgSchema}"."${table.tableName}_history"
|
|
474
|
-
WHERE entity_history_chain_id = ${chain.id->Belt.Int.toString}${switch chain {
|
|
475
|
-
| {progressBlockNumber, progressNextBlockLogIndex: Value(progressNextBlockLogIndex)} =>
|
|
476
|
-
` AND (
|
|
477
|
-
entity_history_block_number > ${(progressBlockNumber + 1)->Belt.Int.toString}
|
|
478
|
-
OR entity_history_block_number = ${(progressBlockNumber + 1)->Belt.Int.toString}
|
|
479
|
-
AND entity_history_log_index > ${progressNextBlockLogIndex->Belt.Int.toString}
|
|
480
|
-
)`
|
|
481
|
-
| {progressBlockNumber: -1, progressNextBlockLogIndex: Null} => ``
|
|
482
|
-
| {progressBlockNumber, progressNextBlockLogIndex: Null} =>
|
|
483
|
-
` AND entity_history_block_number > ${progressBlockNumber->Belt.Int.toString}`
|
|
484
|
-
}};`
|
|
485
|
-
})
|
|
486
|
-
|
|
487
|
-
query.contents
|
|
488
|
-
}
|
|
489
431
|
}
|
|
@@ -21,7 +21,6 @@ var fields = [
|
|
|
21
21
|
"progress_block",
|
|
22
22
|
"ready_at",
|
|
23
23
|
"events_processed",
|
|
24
|
-
"_progress_log_index",
|
|
25
24
|
"_is_hyper_sync",
|
|
26
25
|
"_num_batches_fetched"
|
|
27
26
|
];
|
|
@@ -37,7 +36,6 @@ var table = Table.mkTable("envio_chains", undefined, [
|
|
|
37
36
|
Table.mkField("events_processed", "INTEGER", S$RescriptSchema.$$int, undefined, undefined, undefined, undefined, undefined, undefined),
|
|
38
37
|
Table.mkField("_is_hyper_sync", "BOOLEAN", S$RescriptSchema.bool, undefined, undefined, undefined, undefined, undefined, undefined),
|
|
39
38
|
Table.mkField("progress_block", "INTEGER", S$RescriptSchema.$$int, undefined, undefined, undefined, undefined, undefined, undefined),
|
|
40
|
-
Table.mkField("_progress_log_index", "INTEGER", S$RescriptSchema.$$null(S$RescriptSchema.$$int), undefined, undefined, true, undefined, undefined, undefined),
|
|
41
39
|
Table.mkField("_num_batches_fetched", "INTEGER", S$RescriptSchema.$$int, undefined, undefined, undefined, undefined, undefined, undefined)
|
|
42
40
|
]);
|
|
43
41
|
|
|
@@ -47,7 +45,6 @@ function initialFromConfig(chainConfig) {
|
|
|
47
45
|
start_block: chainConfig.startBlock,
|
|
48
46
|
end_block: Js_null.fromOption(chainConfig.endBlock),
|
|
49
47
|
progress_block: -1,
|
|
50
|
-
_progress_log_index: null,
|
|
51
48
|
events_processed: 0,
|
|
52
49
|
first_event_block: null,
|
|
53
50
|
buffer_block: -1,
|
|
@@ -109,7 +106,6 @@ function makeMetaFieldsUpdateQuery(pgSchema) {
|
|
|
109
106
|
|
|
110
107
|
var progressFields = [
|
|
111
108
|
"progress_block",
|
|
112
|
-
"_progress_log_index",
|
|
113
109
|
"events_processed"
|
|
114
110
|
];
|
|
115
111
|
|
|
@@ -143,9 +139,7 @@ function setProgressedChains(sql, pgSchema, progressedChains) {
|
|
|
143
139
|
var params = [];
|
|
144
140
|
params.push(data.chainId);
|
|
145
141
|
progressFields.forEach(function (field) {
|
|
146
|
-
params.push(field === "
|
|
147
|
-
field === "progress_block" ? data.progressBlockNumber : Js_null.fromOption(data.progressNextBlockLogIndex)
|
|
148
|
-
));
|
|
142
|
+
params.push(field === "progress_block" ? data.progressBlockNumber : data.totalEventsProcessed);
|
|
149
143
|
});
|
|
150
144
|
promises.push(sql.unsafe(query, params, {prepare: true}));
|
|
151
145
|
});
|
|
@@ -291,31 +285,6 @@ var config = {
|
|
|
291
285
|
entityHistory: entityHistory
|
|
292
286
|
};
|
|
293
287
|
|
|
294
|
-
function makeCleanUpOnRestartQuery(pgSchema, chains) {
|
|
295
|
-
var query = {
|
|
296
|
-
contents: ""
|
|
297
|
-
};
|
|
298
|
-
chains.forEach(function (chain) {
|
|
299
|
-
if (query.contents !== "") {
|
|
300
|
-
query.contents = query.contents + "\n";
|
|
301
|
-
}
|
|
302
|
-
var progressBlockNumber = chain.progress_block;
|
|
303
|
-
var progressNextBlockLogIndex = chain._progress_log_index;
|
|
304
|
-
var tmp;
|
|
305
|
-
tmp = progressNextBlockLogIndex === null ? (
|
|
306
|
-
progressBlockNumber !== -1 ? " AND registering_event_block_number > " + String(progressBlockNumber) : ""
|
|
307
|
-
) : " AND (\n registering_event_block_number > " + String(progressBlockNumber + 1 | 0) + "\n OR registering_event_block_number = " + String(progressBlockNumber + 1 | 0) + "\n AND registering_event_log_index > " + String(progressNextBlockLogIndex) + "\n)";
|
|
308
|
-
var progressBlockNumber$1 = chain.progress_block;
|
|
309
|
-
var progressNextBlockLogIndex$1 = chain._progress_log_index;
|
|
310
|
-
var tmp$1;
|
|
311
|
-
tmp$1 = progressNextBlockLogIndex$1 === null ? (
|
|
312
|
-
progressBlockNumber$1 !== -1 ? " AND entity_history_block_number > " + String(progressBlockNumber$1) : ""
|
|
313
|
-
) : " AND (\n entity_history_block_number > " + String(progressBlockNumber$1 + 1 | 0) + "\n OR entity_history_block_number = " + String(progressBlockNumber$1 + 1 | 0) + "\n AND entity_history_log_index > " + String(progressNextBlockLogIndex$1) + "\n)";
|
|
314
|
-
query.contents = query.contents + ("DELETE FROM \"" + pgSchema + "\".\"" + table$4.tableName + "\"\nWHERE chain_id = " + String(chain.id) + tmp + ";\nDELETE FROM \"" + pgSchema + "\".\"" + table$4.tableName + "_history\"\nWHERE entity_history_chain_id = " + String(chain.id) + tmp$1 + ";");
|
|
315
|
-
});
|
|
316
|
-
return query.contents;
|
|
317
|
-
}
|
|
318
|
-
|
|
319
288
|
var DynamicContractRegistry = {
|
|
320
289
|
name: name,
|
|
321
290
|
makeId: makeId,
|
|
@@ -323,8 +292,7 @@ var DynamicContractRegistry = {
|
|
|
323
292
|
rowsSchema: rowsSchema,
|
|
324
293
|
table: table$4,
|
|
325
294
|
entityHistory: entityHistory,
|
|
326
|
-
config: config
|
|
327
|
-
makeCleanUpOnRestartQuery: makeCleanUpOnRestartQuery
|
|
295
|
+
config: config
|
|
328
296
|
};
|
|
329
297
|
|
|
330
298
|
var isPrimaryKey = true;
|
|
@@ -448,10 +448,16 @@ type t = {
|
|
|
448
448
|
}
|
|
449
449
|
|
|
450
450
|
@module("@envio-dev/hypersync-client") @scope("HypersyncClient") external make: cfg => t = "new"
|
|
451
|
-
let make = (
|
|
451
|
+
let make = (
|
|
452
|
+
~url,
|
|
453
|
+
~apiToken,
|
|
454
|
+
~httpReqTimeoutMillis,
|
|
455
|
+
~maxNumRetries,
|
|
456
|
+
~enableChecksumAddresses=true,
|
|
457
|
+
) =>
|
|
452
458
|
make({
|
|
453
459
|
url,
|
|
454
|
-
enableChecksumAddresses
|
|
460
|
+
enableChecksumAddresses,
|
|
455
461
|
bearerToken: apiToken,
|
|
456
462
|
httpReqTimeoutMillis,
|
|
457
463
|
maxNumRetries,
|
|
@@ -57,13 +57,14 @@ var ResponseTypes = {
|
|
|
57
57
|
authorizationListSchema: authorizationListSchema
|
|
58
58
|
};
|
|
59
59
|
|
|
60
|
-
function make(url, apiToken, httpReqTimeoutMillis, maxNumRetries) {
|
|
60
|
+
function make(url, apiToken, httpReqTimeoutMillis, maxNumRetries, enableChecksumAddressesOpt) {
|
|
61
|
+
var enableChecksumAddresses = enableChecksumAddressesOpt !== undefined ? enableChecksumAddressesOpt : true;
|
|
61
62
|
return HypersyncClient.HypersyncClient.new({
|
|
62
63
|
url: url,
|
|
63
64
|
bearerToken: apiToken,
|
|
64
65
|
httpReqTimeoutMillis: httpReqTimeoutMillis,
|
|
65
66
|
maxNumRetries: maxNumRetries,
|
|
66
|
-
enableChecksumAddresses:
|
|
67
|
+
enableChecksumAddresses: enableChecksumAddresses
|
|
67
68
|
});
|
|
68
69
|
}
|
|
69
70
|
|
|
@@ -154,6 +154,7 @@ type options = {
|
|
|
154
154
|
apiToken: option<string>,
|
|
155
155
|
clientMaxRetries: int,
|
|
156
156
|
clientTimeoutMillis: int,
|
|
157
|
+
lowercaseAddresses: bool,
|
|
157
158
|
}
|
|
158
159
|
|
|
159
160
|
let make = (
|
|
@@ -167,6 +168,7 @@ let make = (
|
|
|
167
168
|
apiToken,
|
|
168
169
|
clientMaxRetries,
|
|
169
170
|
clientTimeoutMillis,
|
|
171
|
+
lowercaseAddresses,
|
|
170
172
|
}: options,
|
|
171
173
|
): t => {
|
|
172
174
|
let name = "HyperSync"
|
|
@@ -180,6 +182,7 @@ let make = (
|
|
|
180
182
|
~apiToken,
|
|
181
183
|
~maxNumRetries=clientMaxRetries,
|
|
182
184
|
~httpReqTimeoutMillis=clientTimeoutMillis,
|
|
185
|
+
~enableChecksumAddresses=!lowercaseAddresses,
|
|
183
186
|
)
|
|
184
187
|
|
|
185
188
|
let hscDecoder: ref<option<HyperSyncClient.Decoder.t>> = ref(None)
|
|
@@ -193,7 +196,11 @@ let make = (
|
|
|
193
196
|
~msg="Failed to instantiate a decoder from hypersync client, please double check your ABI or try using 'event_decoder: viem' config option",
|
|
194
197
|
)
|
|
195
198
|
| decoder =>
|
|
196
|
-
|
|
199
|
+
if lowercaseAddresses {
|
|
200
|
+
decoder.disableChecksummedAddresses()
|
|
201
|
+
} else {
|
|
202
|
+
decoder.enableChecksummedAddresses()
|
|
203
|
+
}
|
|
197
204
|
decoder
|
|
198
205
|
}
|
|
199
206
|
}
|
|
@@ -124,6 +124,7 @@ function memoGetSelectionConfig(chain) {
|
|
|
124
124
|
}
|
|
125
125
|
|
|
126
126
|
function make(param) {
|
|
127
|
+
var lowercaseAddresses = param.lowercaseAddresses;
|
|
127
128
|
var eventRouter = param.eventRouter;
|
|
128
129
|
var shouldUseHypersyncClientDecoder = param.shouldUseHypersyncClientDecoder;
|
|
129
130
|
var allEventSignatures = param.allEventSignatures;
|
|
@@ -131,7 +132,7 @@ function make(param) {
|
|
|
131
132
|
var chain = param.chain;
|
|
132
133
|
var getSelectionConfig = memoGetSelectionConfig(chain);
|
|
133
134
|
var apiToken = Belt_Option.getWithDefault(param.apiToken, "3dc856dd-b0ea-494f-b27e-017b8b6b7e07");
|
|
134
|
-
var client = HyperSyncClient.make(endpointUrl, apiToken, param.clientTimeoutMillis, param.clientMaxRetries);
|
|
135
|
+
var client = HyperSyncClient.make(endpointUrl, apiToken, param.clientTimeoutMillis, param.clientMaxRetries, !lowercaseAddresses);
|
|
135
136
|
var hscDecoder = {
|
|
136
137
|
contents: undefined
|
|
137
138
|
};
|
|
@@ -148,7 +149,11 @@ function make(param) {
|
|
|
148
149
|
var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);
|
|
149
150
|
return ErrorHandling.mkLogAndRaise(undefined, "Failed to instantiate a decoder from hypersync client, please double check your ABI or try using 'event_decoder: viem' config option", exn);
|
|
150
151
|
}
|
|
151
|
-
|
|
152
|
+
if (lowercaseAddresses) {
|
|
153
|
+
decoder$1.disableChecksummedAddresses();
|
|
154
|
+
} else {
|
|
155
|
+
decoder$1.enableChecksummedAddresses();
|
|
156
|
+
}
|
|
152
157
|
return decoder$1;
|
|
153
158
|
};
|
|
154
159
|
var UndefinedValue = /* @__PURE__ */Caml_exceptions.create("UndefinedValue");
|
|
@@ -22,6 +22,7 @@ let rec getKnownBlockWithBackoff = async (
|
|
|
22
22
|
~chain,
|
|
23
23
|
~blockNumber,
|
|
24
24
|
~backoffMsOnFailure,
|
|
25
|
+
~lowercaseAddresses: bool,
|
|
25
26
|
) =>
|
|
26
27
|
switch await getKnownBlock(provider, blockNumber) {
|
|
27
28
|
| exception err =>
|
|
@@ -39,8 +40,17 @@ let rec getKnownBlockWithBackoff = async (
|
|
|
39
40
|
~chain,
|
|
40
41
|
~blockNumber,
|
|
41
42
|
~backoffMsOnFailure=backoffMsOnFailure * 2,
|
|
43
|
+
~lowercaseAddresses,
|
|
42
44
|
)
|
|
43
|
-
| result =>
|
|
45
|
+
| result =>
|
|
46
|
+
if lowercaseAddresses {
|
|
47
|
+
// NOTE: this is wasteful if these fields are not selected in the users config.
|
|
48
|
+
// There might be a better way to do this based on the block schema.
|
|
49
|
+
// However this is not extremely expensive and good enough for now (only on rpc sync also).
|
|
50
|
+
// The in place mutation is cheapest.
|
|
51
|
+
(result->Obj.magic)["miner"] = result.miner->Address.Evm.fromAddressLowercaseOrThrow
|
|
52
|
+
}
|
|
53
|
+
result
|
|
44
54
|
}
|
|
45
55
|
let getSuggestedBlockIntervalFromExn = {
|
|
46
56
|
// Unknown provider: "retry with the range 123-456"
|
|
@@ -453,9 +463,24 @@ type options = {
|
|
|
453
463
|
chain: ChainMap.Chain.t,
|
|
454
464
|
contracts: array<Internal.evmContractConfig>,
|
|
455
465
|
eventRouter: EventRouter.t<Internal.evmEventConfig>,
|
|
466
|
+
allEventSignatures: array<string>,
|
|
467
|
+
shouldUseHypersyncClientDecoder: bool,
|
|
468
|
+
lowercaseAddresses: bool,
|
|
456
469
|
}
|
|
457
470
|
|
|
458
|
-
let make = (
|
|
471
|
+
let make = (
|
|
472
|
+
{
|
|
473
|
+
sourceFor,
|
|
474
|
+
syncConfig,
|
|
475
|
+
url,
|
|
476
|
+
chain,
|
|
477
|
+
contracts,
|
|
478
|
+
eventRouter,
|
|
479
|
+
allEventSignatures,
|
|
480
|
+
shouldUseHypersyncClientDecoder,
|
|
481
|
+
lowercaseAddresses,
|
|
482
|
+
}: options,
|
|
483
|
+
): t => {
|
|
459
484
|
let urlHost = switch sanitizeUrl(url) {
|
|
460
485
|
| None =>
|
|
461
486
|
Js.Exn.raiseError(
|
|
@@ -498,6 +523,7 @@ let make = ({sourceFor, syncConfig, url, chain, contracts, eventRouter}: options
|
|
|
498
523
|
~chain,
|
|
499
524
|
~backoffMsOnFailure=1000,
|
|
500
525
|
~blockNumber,
|
|
526
|
+
~lowercaseAddresses,
|
|
501
527
|
),
|
|
502
528
|
~onError=(am, ~exn) => {
|
|
503
529
|
Logging.error({
|
|
@@ -522,6 +548,7 @@ let make = ({sourceFor, syncConfig, url, chain, contracts, eventRouter}: options
|
|
|
522
548
|
let getEventTransactionOrThrow = makeThrowingGetEventTransaction(
|
|
523
549
|
~getTransactionFields=Ethers.JsonRpcProvider.makeGetTransactionFields(
|
|
524
550
|
~getTransactionByHash=LazyLoader.get(transactionLoader, _),
|
|
551
|
+
~lowercaseAddresses,
|
|
525
552
|
),
|
|
526
553
|
)
|
|
527
554
|
|
|
@@ -530,6 +557,32 @@ let make = ({sourceFor, syncConfig, url, chain, contracts, eventRouter}: options
|
|
|
530
557
|
contractNameAbiMapping->Js.Dict.set(contract.name, contract.abi)
|
|
531
558
|
})
|
|
532
559
|
|
|
560
|
+
let convertEthersLogToHyperSyncEvent = (log: Ethers.log): HyperSyncClient.ResponseTypes.event => {
|
|
561
|
+
let hyperSyncLog: HyperSyncClient.ResponseTypes.log = {
|
|
562
|
+
removed: log.removed->Option.getWithDefault(false),
|
|
563
|
+
index: log.logIndex,
|
|
564
|
+
transactionIndex: log.transactionIndex,
|
|
565
|
+
transactionHash: log.transactionHash,
|
|
566
|
+
blockHash: log.blockHash,
|
|
567
|
+
blockNumber: log.blockNumber,
|
|
568
|
+
address: log.address,
|
|
569
|
+
data: log.data,
|
|
570
|
+
topics: log.topics->Array.map(topic => Js.Nullable.return(topic)),
|
|
571
|
+
}
|
|
572
|
+
{log: hyperSyncLog}
|
|
573
|
+
}
|
|
574
|
+
|
|
575
|
+
let hscDecoder: ref<option<HyperSyncClient.Decoder.t>> = ref(None)
|
|
576
|
+
let getHscDecoder = () => {
|
|
577
|
+
switch hscDecoder.contents {
|
|
578
|
+
| Some(decoder) => decoder
|
|
579
|
+
| None => {
|
|
580
|
+
let decoder = HyperSyncClient.Decoder.fromSignatures(allEventSignatures)
|
|
581
|
+
decoder
|
|
582
|
+
}
|
|
583
|
+
}
|
|
584
|
+
}
|
|
585
|
+
|
|
533
586
|
let getItemsOrThrow = async (
|
|
534
587
|
~fromBlock,
|
|
535
588
|
~toBlock,
|
|
@@ -603,10 +656,106 @@ let make = ({sourceFor, syncConfig, url, chain, contracts, eventRouter}: options
|
|
|
603
656
|
)
|
|
604
657
|
}
|
|
605
658
|
|
|
606
|
-
let parsedQueueItems =
|
|
659
|
+
let parsedQueueItems = if shouldUseHypersyncClientDecoder {
|
|
660
|
+
// Convert Ethers logs to HyperSync events
|
|
661
|
+
let hyperSyncEvents = logs->Belt.Array.map(convertEthersLogToHyperSyncEvent)
|
|
662
|
+
|
|
663
|
+
// Decode using HyperSyncClient decoder
|
|
664
|
+
let parsedEvents = try await getHscDecoder().decodeEvents(hyperSyncEvents) catch {
|
|
665
|
+
| exn =>
|
|
666
|
+
raise(
|
|
667
|
+
Source.GetItemsError(
|
|
668
|
+
FailedParsingItems({
|
|
669
|
+
message: "Failed to parse events using hypersync client decoder. Please double-check your ABI.",
|
|
670
|
+
exn,
|
|
671
|
+
blockNumber: fromBlock,
|
|
672
|
+
logIndex: 0,
|
|
673
|
+
}),
|
|
674
|
+
),
|
|
675
|
+
)
|
|
676
|
+
}
|
|
677
|
+
|
|
678
|
+
await logs
|
|
679
|
+
->Array.zip(parsedEvents)
|
|
680
|
+
->Array.keepMap(((
|
|
681
|
+
log: Ethers.log,
|
|
682
|
+
maybeDecodedEvent: Js.Nullable.t<HyperSyncClient.Decoder.decodedEvent>,
|
|
683
|
+
)) => {
|
|
684
|
+
let topic0 = log.topics[0]->Option.getWithDefault("0x0"->EvmTypes.Hex.fromStringUnsafe)
|
|
685
|
+
let routedAddress = if lowercaseAddresses {
|
|
686
|
+
log.address->Address.Evm.fromAddressLowercaseOrThrow
|
|
687
|
+
} else {
|
|
688
|
+
log.address
|
|
689
|
+
}
|
|
690
|
+
|
|
691
|
+
switch eventRouter->EventRouter.get(
|
|
692
|
+
~tag=EventRouter.getEvmEventId(
|
|
693
|
+
~sighash=topic0->EvmTypes.Hex.toString,
|
|
694
|
+
~topicCount=log.topics->Array.length,
|
|
695
|
+
),
|
|
696
|
+
~indexingContracts,
|
|
697
|
+
~contractAddress=routedAddress,
|
|
698
|
+
~blockNumber=log.blockNumber,
|
|
699
|
+
) {
|
|
700
|
+
| None => None
|
|
701
|
+
| Some(eventConfig) =>
|
|
702
|
+
switch maybeDecodedEvent {
|
|
703
|
+
| Js.Nullable.Value(decoded) =>
|
|
704
|
+
Some(
|
|
705
|
+
(
|
|
706
|
+
async () => {
|
|
707
|
+
let (block, transaction) = try await Promise.all2((
|
|
708
|
+
log->getEventBlockOrThrow,
|
|
709
|
+
log->getEventTransactionOrThrow(
|
|
710
|
+
~transactionSchema=eventConfig.transactionSchema,
|
|
711
|
+
),
|
|
712
|
+
)) catch {
|
|
713
|
+
| exn =>
|
|
714
|
+
raise(
|
|
715
|
+
Source.GetItemsError(
|
|
716
|
+
FailedGettingFieldSelection({
|
|
717
|
+
message: "Failed getting selected fields. Please double-check your RPC provider returns correct data.",
|
|
718
|
+
exn,
|
|
719
|
+
blockNumber: log.blockNumber,
|
|
720
|
+
logIndex: log.logIndex,
|
|
721
|
+
}),
|
|
722
|
+
),
|
|
723
|
+
)
|
|
724
|
+
}
|
|
725
|
+
|
|
726
|
+
Internal.Event({
|
|
727
|
+
eventConfig: (eventConfig :> Internal.eventConfig),
|
|
728
|
+
timestamp: block.timestamp,
|
|
729
|
+
blockNumber: block.number,
|
|
730
|
+
chain,
|
|
731
|
+
logIndex: log.logIndex,
|
|
732
|
+
event: {
|
|
733
|
+
chainId: chain->ChainMap.Chain.toChainId,
|
|
734
|
+
params: decoded->eventConfig.convertHyperSyncEventArgs,
|
|
735
|
+
transaction,
|
|
736
|
+
block: block->(
|
|
737
|
+
Utils.magic: Ethers.JsonRpcProvider.block => Internal.eventBlock
|
|
738
|
+
),
|
|
739
|
+
srcAddress: routedAddress,
|
|
740
|
+
logIndex: log.logIndex,
|
|
741
|
+
}->Internal.fromGenericEvent,
|
|
742
|
+
})
|
|
743
|
+
}
|
|
744
|
+
)(),
|
|
745
|
+
)
|
|
746
|
+
| Js.Nullable.Null
|
|
747
|
+
| Js.Nullable.Undefined =>
|
|
748
|
+
None
|
|
749
|
+
}
|
|
750
|
+
}
|
|
751
|
+
})
|
|
752
|
+
->Promise.all
|
|
753
|
+
} else {
|
|
754
|
+
// Decode using Viem
|
|
607
755
|
await logs
|
|
608
756
|
->Belt.Array.keepMap(log => {
|
|
609
757
|
let topic0 = log.topics->Js.Array2.unsafe_get(0)
|
|
758
|
+
|
|
610
759
|
switch eventRouter->EventRouter.get(
|
|
611
760
|
~tag=EventRouter.getEvmEventId(
|
|
612
761
|
~sighash=topic0->EvmTypes.Hex.toString,
|
|
@@ -685,6 +834,7 @@ let make = ({sourceFor, syncConfig, url, chain, contracts, eventRouter}: options
|
|
|
685
834
|
}
|
|
686
835
|
})
|
|
687
836
|
->Promise.all
|
|
837
|
+
}
|
|
688
838
|
|
|
689
839
|
let optFirstBlockParent = await firstBlockParentPromise
|
|
690
840
|
|