envio 2.27.5 → 2.28.0-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +5 -5
- package/rescript.json +3 -0
- package/src/ErrorHandling.res +4 -5
- package/src/ErrorHandling.res.js +7 -7
- package/src/Hasura.res +139 -16
- package/src/Hasura.res.js +99 -18
- package/src/Internal.res +7 -11
- package/src/Internal.res.js +0 -11
- package/src/InternalConfig.res +20 -0
- package/src/InternalConfig.res.js +2 -0
- package/src/Js.shim.ts +11 -0
- package/src/LoadManager.res +13 -7
- package/src/LoadManager.res.js +14 -8
- package/src/Logging.res +1 -1
- package/src/Logging.res.js +2 -2
- package/src/Persistence.res +25 -33
- package/src/Persistence.res.js +18 -20
- package/src/PgStorage.res +158 -106
- package/src/PgStorage.res.js +143 -102
- package/src/Prometheus.res +2 -2
- package/src/Prometheus.res.js +2 -3
- package/src/Time.res +1 -1
- package/src/Time.res.js +1 -2
- package/src/Utils.res +7 -0
- package/src/Utils.res.js +11 -0
- package/src/bindings/Pino.res +1 -1
- package/src/bindings/Pino.res.js +2 -1
- package/src/db/EntityHistory.res +115 -50
- package/src/db/EntityHistory.res.js +43 -26
- package/src/db/InternalTable.gen.ts +43 -0
- package/src/db/InternalTable.res +392 -0
- package/src/db/InternalTable.res.js +295 -0
- package/src/sources/SourceManager.res +3 -3
- package/src/sources/SourceManager.res.js +3 -4
- package/src/vendored/Rest.res +11 -2
- package/src/vendored/Rest.res.js +44 -35
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "envio",
|
|
3
|
-
"version": "v2.
|
|
3
|
+
"version": "v2.28.0-alpha.1",
|
|
4
4
|
"description": "A latency and sync speed optimized, developer friendly blockchain data indexer.",
|
|
5
5
|
"bin": "./bin.js",
|
|
6
6
|
"main": "./index.js",
|
|
@@ -25,10 +25,10 @@
|
|
|
25
25
|
},
|
|
26
26
|
"homepage": "https://envio.dev",
|
|
27
27
|
"optionalDependencies": {
|
|
28
|
-
"envio-linux-x64": "v2.
|
|
29
|
-
"envio-linux-arm64": "v2.
|
|
30
|
-
"envio-darwin-x64": "v2.
|
|
31
|
-
"envio-darwin-arm64": "v2.
|
|
28
|
+
"envio-linux-x64": "v2.28.0-alpha.1",
|
|
29
|
+
"envio-linux-arm64": "v2.28.0-alpha.1",
|
|
30
|
+
"envio-darwin-x64": "v2.28.0-alpha.1",
|
|
31
|
+
"envio-darwin-arm64": "v2.28.0-alpha.1"
|
|
32
32
|
},
|
|
33
33
|
"dependencies": {
|
|
34
34
|
"@envio-dev/hypersync-client": "0.6.5",
|
package/rescript.json
CHANGED
package/src/ErrorHandling.res
CHANGED
|
@@ -6,18 +6,17 @@ let make = (exn, ~logger=Logging.getLogger(), ~msg=?) => {
|
|
|
6
6
|
|
|
7
7
|
let log = (self: t) => {
|
|
8
8
|
switch self {
|
|
9
|
-
| {exn, msg: Some(msg), logger} =>
|
|
10
|
-
|
|
11
|
-
| {exn, msg: None, logger} => logger->Logging.childError(exn->Internal.prettifyExn)
|
|
9
|
+
| {exn, msg: Some(msg), logger} => logger->Logging.childErrorWithExn(exn->Utils.prettifyExn, msg)
|
|
10
|
+
| {exn, msg: None, logger} => logger->Logging.childError(exn->Utils.prettifyExn)
|
|
12
11
|
}
|
|
13
12
|
}
|
|
14
13
|
|
|
15
14
|
let raiseExn = (self: t) => {
|
|
16
|
-
self.exn->
|
|
15
|
+
self.exn->Utils.prettifyExn->raise
|
|
17
16
|
}
|
|
18
17
|
|
|
19
18
|
let mkLogAndRaise = (~logger=?, ~msg=?, exn) => {
|
|
20
|
-
let exn = exn->
|
|
19
|
+
let exn = exn->Utils.prettifyExn
|
|
21
20
|
exn->make(~logger?, ~msg?)->log
|
|
22
21
|
exn->raise
|
|
23
22
|
}
|
package/src/ErrorHandling.res.js
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
// Generated by ReScript, PLEASE EDIT WITH CARE
|
|
2
2
|
'use strict';
|
|
3
3
|
|
|
4
|
+
var Utils = require("./Utils.res.js");
|
|
4
5
|
var Logging = require("./Logging.res.js");
|
|
5
|
-
var Internal = require("./Internal.res.js");
|
|
6
6
|
|
|
7
7
|
function make(exn, loggerOpt, msg) {
|
|
8
8
|
var logger = loggerOpt !== undefined ? loggerOpt : Logging.getLogger();
|
|
@@ -18,18 +18,18 @@ function log(self) {
|
|
|
18
18
|
var exn = self.exn;
|
|
19
19
|
var logger = self.logger;
|
|
20
20
|
if (msg !== undefined) {
|
|
21
|
-
return Logging.childErrorWithExn(logger,
|
|
21
|
+
return Logging.childErrorWithExn(logger, Utils.prettifyExn(exn), msg);
|
|
22
22
|
} else {
|
|
23
|
-
return Logging.childError(logger,
|
|
23
|
+
return Logging.childError(logger, Utils.prettifyExn(exn));
|
|
24
24
|
}
|
|
25
25
|
}
|
|
26
26
|
|
|
27
27
|
function raiseExn(self) {
|
|
28
|
-
throw
|
|
28
|
+
throw Utils.prettifyExn(self.exn);
|
|
29
29
|
}
|
|
30
30
|
|
|
31
31
|
function mkLogAndRaise(logger, msg, exn) {
|
|
32
|
-
var exn$1 =
|
|
32
|
+
var exn$1 = Utils.prettifyExn(exn);
|
|
33
33
|
log(make(exn$1, logger, msg));
|
|
34
34
|
throw exn$1;
|
|
35
35
|
}
|
|
@@ -44,7 +44,7 @@ function unwrapLogAndRaise(logger, msg, result) {
|
|
|
44
44
|
|
|
45
45
|
function logAndRaise(self) {
|
|
46
46
|
log(self);
|
|
47
|
-
throw
|
|
47
|
+
throw Utils.prettifyExn(self.exn);
|
|
48
48
|
}
|
|
49
49
|
|
|
50
50
|
exports.make = make;
|
|
@@ -53,4 +53,4 @@ exports.raiseExn = raiseExn;
|
|
|
53
53
|
exports.mkLogAndRaise = mkLogAndRaise;
|
|
54
54
|
exports.unwrapLogAndRaise = unwrapLogAndRaise;
|
|
55
55
|
exports.logAndRaise = logAndRaise;
|
|
56
|
-
/*
|
|
56
|
+
/* Utils Not a pure module */
|
package/src/Hasura.res
CHANGED
|
@@ -83,7 +83,7 @@ let clearHasuraMetadata = async (~endpoint, ~auth) => {
|
|
|
83
83
|
| exn =>
|
|
84
84
|
Logging.error({
|
|
85
85
|
"msg": `EE806: There was an issue clearing metadata in hasura - indexing may still work - but you may have issues querying the data in hasura.`,
|
|
86
|
-
"err": exn->
|
|
86
|
+
"err": exn->Utils.prettifyExn,
|
|
87
87
|
})
|
|
88
88
|
}
|
|
89
89
|
}
|
|
@@ -125,7 +125,7 @@ let trackTables = async (~endpoint, ~auth, ~pgSchema, ~tableNames: array<string>
|
|
|
125
125
|
Logging.error({
|
|
126
126
|
"msg": `EE807: There was an issue tracking tables in hasura - indexing may still work - but you may have issues querying the data in hasura.`,
|
|
127
127
|
"tableNames": tableNames,
|
|
128
|
-
"err": exn->
|
|
128
|
+
"err": exn->Utils.prettifyExn,
|
|
129
129
|
})
|
|
130
130
|
}
|
|
131
131
|
}
|
|
@@ -172,7 +172,7 @@ let createSelectPermissions = async (
|
|
|
172
172
|
Logging.error({
|
|
173
173
|
"msg": `EE808: There was an issue setting up view permissions for the ${tableName} table in hasura - indexing may still work - but you may have issues querying the data in hasura.`,
|
|
174
174
|
"tableName": tableName,
|
|
175
|
-
"err": exn->
|
|
175
|
+
"err": exn->Utils.prettifyExn,
|
|
176
176
|
})
|
|
177
177
|
}
|
|
178
178
|
}
|
|
@@ -213,7 +213,113 @@ let createEntityRelationship = async (
|
|
|
213
213
|
Logging.error({
|
|
214
214
|
"msg": `EE808: There was an issue setting up ${relationshipType} relationship for the ${tableName} table in hasura - indexing may still work - but you may have issues querying the data in hasura.`,
|
|
215
215
|
"tableName": tableName,
|
|
216
|
-
"err": exn->
|
|
216
|
+
"err": exn->Utils.prettifyExn,
|
|
217
|
+
})
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
let trackMeta = async (~auth, ~endpoint, ~pgSchema) => {
|
|
222
|
+
try {
|
|
223
|
+
// Track EnvioMeta logical model with scalar fields
|
|
224
|
+
let result = await rawBodyRoute->Rest.fetch(
|
|
225
|
+
{
|
|
226
|
+
"auth": auth,
|
|
227
|
+
"bodyString": `{"type": "pg_track_logical_model","args": {"source": "default","name":"EnvioMeta","fields":[{"name":"chainId","type":"int"},{"name":"startBlock","type":"int"},{"name":"endBlock","type":"int","nullable":true},{"name":"bufferBlock","type":"int"},{"name":"readyAt","type":"timestamptz","nullable":true},{"name":"firstEventBlock","type":"int","nullable":true},{"name":"eventsProcessed","type":"int"},{"name":"isReady","type":"bool"}]}}`,
|
|
228
|
+
},
|
|
229
|
+
~client=Rest.client(endpoint),
|
|
230
|
+
)
|
|
231
|
+
let msg = switch result {
|
|
232
|
+
| QuerySucceeded => `Hasura EnvioMeta logical model created`
|
|
233
|
+
| AlreadyDone => `Hasura EnvioMeta logical model already created`
|
|
234
|
+
}
|
|
235
|
+
Logging.trace({
|
|
236
|
+
"msg": msg,
|
|
237
|
+
})
|
|
238
|
+
|
|
239
|
+
// Update _meta native query to return hardcoded block object
|
|
240
|
+
let result = await rawBodyRoute->Rest.fetch(
|
|
241
|
+
{
|
|
242
|
+
"auth": auth,
|
|
243
|
+
"bodyString": `{"type":"pg_track_native_query","args":{"type":"query","source":"default","root_field_name":"_meta","arguments":{},"returns":"EnvioMeta","code":"SELECT \\\"${(#id: InternalTable.Chains.field :> string)}\\\" AS \\\"chainId\\\", \\\"${(#start_block: InternalTable.Chains.field :> string)}\\\" AS \\\"startBlock\\\", \\\"${(#end_block: InternalTable.Chains.field :> string)}\\\" AS \\\"endBlock\\\", \\\"${(#buffer_block: InternalTable.Chains.field :> string)}\\\" AS \\\"bufferBlock\\\", \\\"${(#ready_at: InternalTable.Chains.field :> string)}\\\" AS \\\"readyAt\\\", \\\"${(#first_event_block: InternalTable.Chains.field :> string)}\\\" AS \\\"firstEventBlock\\\", \\\"${(#events_processed: InternalTable.Chains.field :> string)}\\\" AS \\\"eventsProcessed\\\", (\\\"${(#ready_at: InternalTable.Chains.field :> string)}\\\" IS NOT NULL) AS \\\"isReady\\\" FROM \\\"${pgSchema}\\\".\\\"${InternalTable.Chains.table.tableName}\\\" ORDER BY \\\"id\\\""}}`,
|
|
244
|
+
},
|
|
245
|
+
~client=Rest.client(endpoint),
|
|
246
|
+
)
|
|
247
|
+
let msg = switch result {
|
|
248
|
+
| QuerySucceeded => `Hasura _meta native query created`
|
|
249
|
+
| AlreadyDone => `Hasura _meta native query already created`
|
|
250
|
+
}
|
|
251
|
+
Logging.trace({
|
|
252
|
+
"msg": msg,
|
|
253
|
+
})
|
|
254
|
+
|
|
255
|
+
// Add public select permissions for EnvioMeta logical model
|
|
256
|
+
let result = await rawBodyRoute->Rest.fetch(
|
|
257
|
+
{
|
|
258
|
+
"auth": auth,
|
|
259
|
+
"bodyString": `{"type": "pg_create_logical_model_select_permission", "args": {"source": "default", "name": "EnvioMeta", "role": "public", "permission": {"columns": "*", "filter": {}}}}`,
|
|
260
|
+
},
|
|
261
|
+
~client=Rest.client(endpoint),
|
|
262
|
+
)
|
|
263
|
+
let msg = switch result {
|
|
264
|
+
| QuerySucceeded => `Hasura _meta public select permission created`
|
|
265
|
+
| AlreadyDone => `Hasura _meta public select permission already exists`
|
|
266
|
+
}
|
|
267
|
+
Logging.trace({
|
|
268
|
+
"msg": msg,
|
|
269
|
+
})
|
|
270
|
+
|
|
271
|
+
let result = await rawBodyRoute->Rest.fetch(
|
|
272
|
+
{
|
|
273
|
+
"auth": auth,
|
|
274
|
+
"bodyString": `{"type": "pg_track_logical_model","args": {"source": "default","name":"chain_metadata","fields":[{"name":"block_height","type":"int"},{"name":"chain_id","type":"int"},{"name":"end_block","type":"int"},{"name":"first_event_block_number","type":"int"},{"name":"is_hyper_sync","type":"boolean"},{"name":"latest_fetched_block_number","type":"int"},{"name":"latest_processed_block","type":"int"},{"name":"num_batches_fetched","type":"int"},{"name":"num_events_processed","type":"int"},{"name":"start_block","type":"int"},{"name":"timestamp_caught_up_to_head_or_endblock","type":"timestamptz"}]}}`,
|
|
275
|
+
},
|
|
276
|
+
~client=Rest.client(endpoint),
|
|
277
|
+
)
|
|
278
|
+
let msg = switch result {
|
|
279
|
+
| QuerySucceeded => `Hasura chain_metadata logical model created`
|
|
280
|
+
| AlreadyDone => `Hasura chain_metadata logical model already created`
|
|
281
|
+
}
|
|
282
|
+
Logging.trace({
|
|
283
|
+
"msg": msg,
|
|
284
|
+
})
|
|
285
|
+
|
|
286
|
+
// Need this to keep backwards compatibility,
|
|
287
|
+
// since it's used on Hosted Service
|
|
288
|
+
let result = await rawBodyRoute->Rest.fetch(
|
|
289
|
+
{
|
|
290
|
+
"auth": auth,
|
|
291
|
+
"bodyString": `{"type":"pg_track_native_query","args":{"type":"query","source":"default","root_field_name":"chain_metadata","arguments":{},"returns":"chain_metadata","code":"SELECT \\\"${(#source_block: InternalTable.Chains.field :> string)}\\\" AS \\\"block_height\\\", \\\"${(#id: InternalTable.Chains.field :> string)}\\\" AS \\\"chain_id\\\", \\\"${(#end_block: InternalTable.Chains.field :> string)}\\\", \\\"${(#first_event_block: InternalTable.Chains.field :> string)}\\\" AS \\\"first_event_block_number\\\", \\\"${(#_is_hyper_sync: InternalTable.Chains.field :> string)}\\\" AS \\\"is_hyper_sync\\\", \\\"${(#buffer_block: InternalTable.Chains.field :> string)}\\\" AS \\\"latest_fetched_block_number\\\", \\\"${(#_latest_processed_block: InternalTable.Chains.field :> string)}\\\" AS \\\"latest_processed_block\\\", \\\"${(#_num_batches_fetched: InternalTable.Chains.field :> string)}\\\" AS \\\"num_batches_fetched\\\", \\\"${(#events_processed: InternalTable.Chains.field :> string)}\\\" AS \\\"num_events_processed\\\", \\\"${(#start_block: InternalTable.Chains.field :> string)}\\\", \\\"${(#ready_at: InternalTable.Chains.field :> string)}\\\" AS \\\"timestamp_caught_up_to_head_or_endblock\\\" FROM \\\"${pgSchema}\\\".\\\"${InternalTable.Chains.table.tableName}\\\""}}`,
|
|
292
|
+
},
|
|
293
|
+
~client=Rest.client(endpoint),
|
|
294
|
+
)
|
|
295
|
+
let msg = switch result {
|
|
296
|
+
| QuerySucceeded => `Hasura chain_metadata native query created`
|
|
297
|
+
| AlreadyDone => `Hasura chain_metadata native query already created`
|
|
298
|
+
}
|
|
299
|
+
Logging.trace({
|
|
300
|
+
"msg": msg,
|
|
301
|
+
})
|
|
302
|
+
|
|
303
|
+
// Add public select permissions for chain_metadata logical model
|
|
304
|
+
let result = await rawBodyRoute->Rest.fetch(
|
|
305
|
+
{
|
|
306
|
+
"auth": auth,
|
|
307
|
+
"bodyString": `{"type": "pg_create_logical_model_select_permission", "args": {"source": "default", "name": "chain_metadata", "role": "public", "permission": {"columns": "*", "filter": {}}}}`,
|
|
308
|
+
},
|
|
309
|
+
~client=Rest.client(endpoint),
|
|
310
|
+
)
|
|
311
|
+
let msg = switch result {
|
|
312
|
+
| QuerySucceeded => `Hasura chain_metadata public select permission created`
|
|
313
|
+
| AlreadyDone => `Hasura chain_metadata public select permission already exists`
|
|
314
|
+
}
|
|
315
|
+
Logging.trace({
|
|
316
|
+
"msg": msg,
|
|
317
|
+
})
|
|
318
|
+
} catch {
|
|
319
|
+
| exn =>
|
|
320
|
+
Logging.error({
|
|
321
|
+
"msg": `EE808: There was an issue setting up _meta field in hasura - indexing may still work - but you may have issues querying the data in hasura.`,
|
|
322
|
+
"err": exn->Utils.prettifyExn,
|
|
217
323
|
})
|
|
218
324
|
}
|
|
219
325
|
}
|
|
@@ -222,24 +328,39 @@ let trackDatabase = async (
|
|
|
222
328
|
~endpoint,
|
|
223
329
|
~auth,
|
|
224
330
|
~pgSchema,
|
|
225
|
-
~
|
|
226
|
-
~allEntityTables,
|
|
331
|
+
~userEntities: array<Internal.entityConfig>,
|
|
227
332
|
~aggregateEntities,
|
|
228
333
|
~responseLimit,
|
|
229
334
|
~schema,
|
|
230
335
|
) => {
|
|
336
|
+
let trackOnlyInternalTableNames = [
|
|
337
|
+
InternalTable.Chains.table.tableName,
|
|
338
|
+
InternalTable.EventSyncState.table.tableName,
|
|
339
|
+
InternalTable.PersistedState.table.tableName,
|
|
340
|
+
InternalTable.EndOfBlockRangeScannedData.table.tableName,
|
|
341
|
+
InternalTable.DynamicContractRegistry.table.tableName,
|
|
342
|
+
]
|
|
343
|
+
let exposedInternalTableNames = [InternalTable.RawEvents.table.tableName]
|
|
344
|
+
let userTableNames = userEntities->Js.Array2.map(entity => entity.table.tableName)
|
|
345
|
+
|
|
231
346
|
Logging.info("Tracking tables in Hasura")
|
|
232
347
|
|
|
233
348
|
let _ = await clearHasuraMetadata(~endpoint, ~auth)
|
|
234
|
-
let tableNames =
|
|
235
|
-
[allStaticTables, allEntityTables]
|
|
236
|
-
->Belt.Array.concatMany
|
|
237
|
-
->Js.Array2.map(({tableName}: Table.table) => tableName)
|
|
238
349
|
|
|
239
|
-
await trackTables(
|
|
350
|
+
await trackTables(
|
|
351
|
+
~endpoint,
|
|
352
|
+
~auth,
|
|
353
|
+
~pgSchema,
|
|
354
|
+
~tableNames=[
|
|
355
|
+
exposedInternalTableNames,
|
|
356
|
+
trackOnlyInternalTableNames,
|
|
357
|
+
userTableNames,
|
|
358
|
+
]->Belt.Array.concatMany,
|
|
359
|
+
)
|
|
240
360
|
|
|
241
361
|
let _ =
|
|
242
|
-
await
|
|
362
|
+
await [exposedInternalTableNames, userTableNames]
|
|
363
|
+
->Belt.Array.concatMany
|
|
243
364
|
->Js.Array2.map(tableName =>
|
|
244
365
|
createSelectPermissions(
|
|
245
366
|
~endpoint,
|
|
@@ -251,11 +372,11 @@ let trackDatabase = async (
|
|
|
251
372
|
)
|
|
252
373
|
)
|
|
253
374
|
->Js.Array2.concatMany(
|
|
254
|
-
|
|
255
|
-
let {tableName} = table
|
|
375
|
+
userEntities->Js.Array2.map(entityConfig => {
|
|
376
|
+
let {tableName} = entityConfig.table
|
|
256
377
|
[
|
|
257
378
|
//Set array relationships
|
|
258
|
-
table
|
|
379
|
+
entityConfig.table
|
|
259
380
|
->Table.getDerivedFromFields
|
|
260
381
|
->Js.Array2.map(derivedFromField => {
|
|
261
382
|
//determines the actual name of the underlying relational field (if it's an entity mapping then suffixes _id for eg.)
|
|
@@ -275,7 +396,7 @@ let trackDatabase = async (
|
|
|
275
396
|
)
|
|
276
397
|
}),
|
|
277
398
|
//Set object relationships
|
|
278
|
-
table
|
|
399
|
+
entityConfig.table
|
|
279
400
|
->Table.getLinkedEntityFields
|
|
280
401
|
->Js.Array2.map(((field, linkedEntityName)) => {
|
|
281
402
|
createEntityRelationship(
|
|
@@ -294,4 +415,6 @@ let trackDatabase = async (
|
|
|
294
415
|
}),
|
|
295
416
|
)
|
|
296
417
|
->Promise.all
|
|
418
|
+
|
|
419
|
+
await trackMeta(~auth, ~endpoint, ~pgSchema)
|
|
297
420
|
}
|
package/src/Hasura.res.js
CHANGED
|
@@ -6,8 +6,8 @@ var Table = require("./db/Table.res.js");
|
|
|
6
6
|
var Utils = require("./Utils.res.js");
|
|
7
7
|
var Schema = require("./db/Schema.res.js");
|
|
8
8
|
var Logging = require("./Logging.res.js");
|
|
9
|
-
var Internal = require("./Internal.res.js");
|
|
10
9
|
var Belt_Array = require("rescript/lib/js/belt_Array.js");
|
|
10
|
+
var InternalTable = require("./db/InternalTable.res.js");
|
|
11
11
|
var Caml_splice_call = require("rescript/lib/js/caml_splice_call.js");
|
|
12
12
|
var S$RescriptSchema = require("rescript-schema/src/S.res.js");
|
|
13
13
|
var Caml_js_exceptions = require("rescript/lib/js/caml_js_exceptions.js");
|
|
@@ -102,7 +102,7 @@ async function clearHasuraMetadata(endpoint, auth) {
|
|
|
102
102
|
var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);
|
|
103
103
|
return Logging.error({
|
|
104
104
|
msg: "EE806: There was an issue clearing metadata in hasura - indexing may still work - but you may have issues querying the data in hasura.",
|
|
105
|
-
err:
|
|
105
|
+
err: Utils.prettifyExn(exn)
|
|
106
106
|
});
|
|
107
107
|
}
|
|
108
108
|
}
|
|
@@ -138,7 +138,7 @@ async function trackTables(endpoint, auth, pgSchema, tableNames) {
|
|
|
138
138
|
return Logging.error({
|
|
139
139
|
msg: "EE807: There was an issue tracking tables in hasura - indexing may still work - but you may have issues querying the data in hasura.",
|
|
140
140
|
tableNames: tableNames,
|
|
141
|
-
err:
|
|
141
|
+
err: Utils.prettifyExn(exn)
|
|
142
142
|
});
|
|
143
143
|
}
|
|
144
144
|
}
|
|
@@ -174,7 +174,7 @@ async function createSelectPermissions(auth, endpoint, tableName, pgSchema, resp
|
|
|
174
174
|
return Logging.error({
|
|
175
175
|
msg: "EE808: There was an issue setting up view permissions for the " + tableName + " table in hasura - indexing may still work - but you may have issues querying the data in hasura.",
|
|
176
176
|
tableName: tableName,
|
|
177
|
-
err:
|
|
177
|
+
err: Utils.prettifyExn(exn)
|
|
178
178
|
});
|
|
179
179
|
}
|
|
180
180
|
}
|
|
@@ -199,36 +199,116 @@ async function createEntityRelationship(pgSchema, endpoint, auth, tableName, rel
|
|
|
199
199
|
return Logging.error({
|
|
200
200
|
msg: "EE808: There was an issue setting up " + relationshipType + " relationship for the " + tableName + " table in hasura - indexing may still work - but you may have issues querying the data in hasura.",
|
|
201
201
|
tableName: tableName,
|
|
202
|
-
err:
|
|
202
|
+
err: Utils.prettifyExn(exn)
|
|
203
203
|
});
|
|
204
204
|
}
|
|
205
205
|
}
|
|
206
206
|
|
|
207
|
-
async function
|
|
207
|
+
async function trackMeta(auth, endpoint, pgSchema) {
|
|
208
|
+
try {
|
|
209
|
+
var result = await Rest.$$fetch(rawBodyRoute, {
|
|
210
|
+
auth: auth,
|
|
211
|
+
bodyString: "{\"type\": \"pg_track_logical_model\",\"args\": {\"source\": \"default\",\"name\":\"EnvioMeta\",\"fields\":[{\"name\":\"chainId\",\"type\":\"int\"},{\"name\":\"startBlock\",\"type\":\"int\"},{\"name\":\"endBlock\",\"type\":\"int\",\"nullable\":true},{\"name\":\"bufferBlock\",\"type\":\"int\"},{\"name\":\"readyAt\",\"type\":\"timestamptz\",\"nullable\":true},{\"name\":\"firstEventBlock\",\"type\":\"int\",\"nullable\":true},{\"name\":\"eventsProcessed\",\"type\":\"int\"},{\"name\":\"isReady\",\"type\":\"bool\"}]}}"
|
|
212
|
+
}, Rest.client(endpoint, undefined));
|
|
213
|
+
var msg;
|
|
214
|
+
msg = result === "QuerySucceeded" ? "Hasura EnvioMeta logical model created" : "Hasura EnvioMeta logical model already created";
|
|
215
|
+
Logging.trace({
|
|
216
|
+
msg: msg
|
|
217
|
+
});
|
|
218
|
+
var result$1 = await Rest.$$fetch(rawBodyRoute, {
|
|
219
|
+
auth: auth,
|
|
220
|
+
bodyString: "{\"type\":\"pg_track_native_query\",\"args\":{\"type\":\"query\",\"source\":\"default\",\"root_field_name\":\"_meta\",\"arguments\":{},\"returns\":\"EnvioMeta\",\"code\":\"SELECT \\\"" + "id" + "\\\" AS \\\"chainId\\\", \\\"" + "start_block" + "\\\" AS \\\"startBlock\\\", \\\"" + "end_block" + "\\\" AS \\\"endBlock\\\", \\\"" + "buffer_block" + "\\\" AS \\\"bufferBlock\\\", \\\"" + "ready_at" + "\\\" AS \\\"readyAt\\\", \\\"" + "first_event_block" + "\\\" AS \\\"firstEventBlock\\\", \\\"" + "events_processed" + "\\\" AS \\\"eventsProcessed\\\", (\\\"" + "ready_at" + "\\\" IS NOT NULL) AS \\\"isReady\\\" FROM \\\"" + pgSchema + "\\\".\\\"" + InternalTable.Chains.table.tableName + "\\\" ORDER BY \\\"id\\\"\"}}"
|
|
221
|
+
}, Rest.client(endpoint, undefined));
|
|
222
|
+
var msg$1;
|
|
223
|
+
msg$1 = result$1 === "QuerySucceeded" ? "Hasura _meta native query created" : "Hasura _meta native query already created";
|
|
224
|
+
Logging.trace({
|
|
225
|
+
msg: msg$1
|
|
226
|
+
});
|
|
227
|
+
var result$2 = await Rest.$$fetch(rawBodyRoute, {
|
|
228
|
+
auth: auth,
|
|
229
|
+
bodyString: "{\"type\": \"pg_create_logical_model_select_permission\", \"args\": {\"source\": \"default\", \"name\": \"EnvioMeta\", \"role\": \"public\", \"permission\": {\"columns\": \"*\", \"filter\": {}}}}"
|
|
230
|
+
}, Rest.client(endpoint, undefined));
|
|
231
|
+
var msg$2;
|
|
232
|
+
msg$2 = result$2 === "QuerySucceeded" ? "Hasura _meta public select permission created" : "Hasura _meta public select permission already exists";
|
|
233
|
+
Logging.trace({
|
|
234
|
+
msg: msg$2
|
|
235
|
+
});
|
|
236
|
+
var result$3 = await Rest.$$fetch(rawBodyRoute, {
|
|
237
|
+
auth: auth,
|
|
238
|
+
bodyString: "{\"type\": \"pg_track_logical_model\",\"args\": {\"source\": \"default\",\"name\":\"chain_metadata\",\"fields\":[{\"name\":\"block_height\",\"type\":\"int\"},{\"name\":\"chain_id\",\"type\":\"int\"},{\"name\":\"end_block\",\"type\":\"int\"},{\"name\":\"first_event_block_number\",\"type\":\"int\"},{\"name\":\"is_hyper_sync\",\"type\":\"boolean\"},{\"name\":\"latest_fetched_block_number\",\"type\":\"int\"},{\"name\":\"latest_processed_block\",\"type\":\"int\"},{\"name\":\"num_batches_fetched\",\"type\":\"int\"},{\"name\":\"num_events_processed\",\"type\":\"int\"},{\"name\":\"start_block\",\"type\":\"int\"},{\"name\":\"timestamp_caught_up_to_head_or_endblock\",\"type\":\"timestamptz\"}]}}"
|
|
239
|
+
}, Rest.client(endpoint, undefined));
|
|
240
|
+
var msg$3;
|
|
241
|
+
msg$3 = result$3 === "QuerySucceeded" ? "Hasura chain_metadata logical model created" : "Hasura chain_metadata logical model already created";
|
|
242
|
+
Logging.trace({
|
|
243
|
+
msg: msg$3
|
|
244
|
+
});
|
|
245
|
+
var result$4 = await Rest.$$fetch(rawBodyRoute, {
|
|
246
|
+
auth: auth,
|
|
247
|
+
bodyString: "{\"type\":\"pg_track_native_query\",\"args\":{\"type\":\"query\",\"source\":\"default\",\"root_field_name\":\"chain_metadata\",\"arguments\":{},\"returns\":\"chain_metadata\",\"code\":\"SELECT \\\"" + "source_block" + "\\\" AS \\\"block_height\\\", \\\"" + "id" + "\\\" AS \\\"chain_id\\\", \\\"" + "end_block" + "\\\", \\\"" + "first_event_block" + "\\\" AS \\\"first_event_block_number\\\", \\\"" + "_is_hyper_sync" + "\\\" AS \\\"is_hyper_sync\\\", \\\"" + "buffer_block" + "\\\" AS \\\"latest_fetched_block_number\\\", \\\"" + "_latest_processed_block" + "\\\" AS \\\"latest_processed_block\\\", \\\"" + "_num_batches_fetched" + "\\\" AS \\\"num_batches_fetched\\\", \\\"" + "events_processed" + "\\\" AS \\\"num_events_processed\\\", \\\"" + "start_block" + "\\\", \\\"" + "ready_at" + "\\\" AS \\\"timestamp_caught_up_to_head_or_endblock\\\" FROM \\\"" + pgSchema + "\\\".\\\"" + InternalTable.Chains.table.tableName + "\\\"\"}}"
|
|
248
|
+
}, Rest.client(endpoint, undefined));
|
|
249
|
+
var msg$4;
|
|
250
|
+
msg$4 = result$4 === "QuerySucceeded" ? "Hasura chain_metadata native query created" : "Hasura chain_metadata native query already created";
|
|
251
|
+
Logging.trace({
|
|
252
|
+
msg: msg$4
|
|
253
|
+
});
|
|
254
|
+
var result$5 = await Rest.$$fetch(rawBodyRoute, {
|
|
255
|
+
auth: auth,
|
|
256
|
+
bodyString: "{\"type\": \"pg_create_logical_model_select_permission\", \"args\": {\"source\": \"default\", \"name\": \"chain_metadata\", \"role\": \"public\", \"permission\": {\"columns\": \"*\", \"filter\": {}}}}"
|
|
257
|
+
}, Rest.client(endpoint, undefined));
|
|
258
|
+
var msg$5;
|
|
259
|
+
msg$5 = result$5 === "QuerySucceeded" ? "Hasura chain_metadata public select permission created" : "Hasura chain_metadata public select permission already exists";
|
|
260
|
+
return Logging.trace({
|
|
261
|
+
msg: msg$5
|
|
262
|
+
});
|
|
263
|
+
}
|
|
264
|
+
catch (raw_exn){
|
|
265
|
+
var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);
|
|
266
|
+
return Logging.error({
|
|
267
|
+
msg: "EE808: There was an issue setting up _meta field in hasura - indexing may still work - but you may have issues querying the data in hasura.",
|
|
268
|
+
err: Utils.prettifyExn(exn)
|
|
269
|
+
});
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
async function trackDatabase(endpoint, auth, pgSchema, userEntities, aggregateEntities, responseLimit, schema) {
|
|
274
|
+
var trackOnlyInternalTableNames = [
|
|
275
|
+
InternalTable.Chains.table.tableName,
|
|
276
|
+
InternalTable.EventSyncState.table.tableName,
|
|
277
|
+
InternalTable.PersistedState.table.tableName,
|
|
278
|
+
InternalTable.EndOfBlockRangeScannedData.table.tableName,
|
|
279
|
+
InternalTable.DynamicContractRegistry.table.tableName
|
|
280
|
+
];
|
|
281
|
+
var exposedInternalTableNames = [InternalTable.RawEvents.table.tableName];
|
|
282
|
+
var userTableNames = userEntities.map(function (entity) {
|
|
283
|
+
return entity.table.tableName;
|
|
284
|
+
});
|
|
208
285
|
Logging.info("Tracking tables in Hasura");
|
|
209
286
|
await clearHasuraMetadata(endpoint, auth);
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
287
|
+
await trackTables(endpoint, auth, pgSchema, Belt_Array.concatMany([
|
|
288
|
+
exposedInternalTableNames,
|
|
289
|
+
trackOnlyInternalTableNames,
|
|
290
|
+
userTableNames
|
|
291
|
+
]));
|
|
292
|
+
await Promise.all(Caml_splice_call.spliceObjApply(Belt_Array.concatMany([
|
|
293
|
+
exposedInternalTableNames,
|
|
294
|
+
userTableNames
|
|
295
|
+
]).map(function (tableName) {
|
|
218
296
|
return createSelectPermissions(auth, endpoint, tableName, pgSchema, responseLimit, aggregateEntities);
|
|
219
|
-
}), "concat", [
|
|
220
|
-
var
|
|
297
|
+
}), "concat", [userEntities.map(function (entityConfig) {
|
|
298
|
+
var match = entityConfig.table;
|
|
299
|
+
var tableName = match.tableName;
|
|
221
300
|
return [
|
|
222
|
-
Table.getDerivedFromFields(table).map(function (derivedFromField) {
|
|
301
|
+
Table.getDerivedFromFields(entityConfig.table).map(function (derivedFromField) {
|
|
223
302
|
var relationalFieldName = Utils.unwrapResultExn(Schema.getDerivedFromFieldName(schema, derivedFromField));
|
|
224
303
|
return createEntityRelationship(pgSchema, endpoint, auth, tableName, "array", relationalFieldName, derivedFromField.fieldName, derivedFromField.derivedFromEntity, true);
|
|
225
304
|
}),
|
|
226
|
-
Table.getLinkedEntityFields(table).map(function (param) {
|
|
305
|
+
Table.getLinkedEntityFields(entityConfig.table).map(function (param) {
|
|
227
306
|
var field = param[0];
|
|
228
307
|
return createEntityRelationship(pgSchema, endpoint, auth, tableName, "object", field.fieldName, field.fieldName, param[1], false);
|
|
229
308
|
})
|
|
230
309
|
].flat(1);
|
|
231
310
|
})]));
|
|
311
|
+
return await trackMeta(auth, endpoint, pgSchema);
|
|
232
312
|
}
|
|
233
313
|
|
|
234
314
|
exports.auth = auth;
|
|
@@ -241,5 +321,6 @@ exports.clearHasuraMetadata = clearHasuraMetadata;
|
|
|
241
321
|
exports.trackTables = trackTables;
|
|
242
322
|
exports.createSelectPermissions = createSelectPermissions;
|
|
243
323
|
exports.createEntityRelationship = createEntityRelationship;
|
|
324
|
+
exports.trackMeta = trackMeta;
|
|
244
325
|
exports.trackDatabase = trackDatabase;
|
|
245
326
|
/* Rest Not a pure module */
|
package/src/Internal.res
CHANGED
|
@@ -176,13 +176,16 @@ let fuelTransferParamsSchema = S.schema(s => {
|
|
|
176
176
|
})
|
|
177
177
|
|
|
178
178
|
type entity = private {id: string}
|
|
179
|
-
type
|
|
179
|
+
type genericEntityConfig<'entity> = {
|
|
180
180
|
name: string,
|
|
181
|
-
schema: S.t<entity>,
|
|
182
|
-
rowsSchema: S.t<array<entity>>,
|
|
181
|
+
schema: S.t<'entity>,
|
|
182
|
+
rowsSchema: S.t<array<'entity>>,
|
|
183
183
|
table: Table.table,
|
|
184
|
-
entityHistory: EntityHistory.t<entity>,
|
|
184
|
+
entityHistory: EntityHistory.t<'entity>,
|
|
185
185
|
}
|
|
186
|
+
type entityConfig = genericEntityConfig<entity>
|
|
187
|
+
external fromGenericEntityConfig: genericEntityConfig<'entity> => entityConfig = "%identity"
|
|
188
|
+
|
|
186
189
|
type enum
|
|
187
190
|
type enumConfig<'enum> = {
|
|
188
191
|
name: string,
|
|
@@ -238,10 +241,3 @@ let makeCacheTable = (~effectName) => {
|
|
|
238
241
|
|
|
239
242
|
@genType.import(("./Types.ts", "Invalid"))
|
|
240
243
|
type noEventFilters
|
|
241
|
-
|
|
242
|
-
let prettifyExn = exn => {
|
|
243
|
-
switch exn->Js.Exn.anyToExnInternal {
|
|
244
|
-
| Js.Exn.Error(e) => e->(Utils.magic: Js.Exn.t => exn)
|
|
245
|
-
| exn => exn
|
|
246
|
-
}
|
|
247
|
-
}
|
package/src/Internal.res.js
CHANGED
|
@@ -8,7 +8,6 @@ var Address = require("./Address.res.js");
|
|
|
8
8
|
var Belt_Array = require("rescript/lib/js/belt_Array.js");
|
|
9
9
|
var Caml_option = require("rescript/lib/js/caml_option.js");
|
|
10
10
|
var S$RescriptSchema = require("rescript-schema/src/S.res.js");
|
|
11
|
-
var Caml_js_exceptions = require("rescript/lib/js/caml_js_exceptions.js");
|
|
12
11
|
|
|
13
12
|
var fuelSupplyParamsSchema = S$RescriptSchema.schema(function (s) {
|
|
14
13
|
return {
|
|
@@ -44,19 +43,9 @@ function makeCacheTable(effectName) {
|
|
|
44
43
|
]);
|
|
45
44
|
}
|
|
46
45
|
|
|
47
|
-
function prettifyExn(exn) {
|
|
48
|
-
var e = Caml_js_exceptions.internalToOCamlException(exn);
|
|
49
|
-
if (e.RE_EXN_ID === Js_exn.$$Error) {
|
|
50
|
-
return e._1;
|
|
51
|
-
} else {
|
|
52
|
-
return e;
|
|
53
|
-
}
|
|
54
|
-
}
|
|
55
|
-
|
|
56
46
|
exports.fuelSupplyParamsSchema = fuelSupplyParamsSchema;
|
|
57
47
|
exports.fuelTransferParamsSchema = fuelTransferParamsSchema;
|
|
58
48
|
exports.makeEnumConfig = makeEnumConfig;
|
|
59
49
|
exports.cacheTablePrefix = cacheTablePrefix;
|
|
60
50
|
exports.makeCacheTable = makeCacheTable;
|
|
61
|
-
exports.prettifyExn = prettifyExn;
|
|
62
51
|
/* fuelSupplyParamsSchema Not a pure module */
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
// TODO: rename the file to Config.res after finishing the migration from codegen
|
|
2
|
+
// And turn it into PublicConfig instead
|
|
3
|
+
// For internal use we should create Indexer.res with a stateful type
|
|
4
|
+
|
|
5
|
+
type contract = {
|
|
6
|
+
name: string,
|
|
7
|
+
abi: EvmTypes.Abi.t,
|
|
8
|
+
addresses: array<Address.t>,
|
|
9
|
+
events: array<Internal.eventConfig>,
|
|
10
|
+
startBlock: option<int>,
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
type chain = {
|
|
14
|
+
id: int,
|
|
15
|
+
startBlock: int,
|
|
16
|
+
endBlock?: int,
|
|
17
|
+
confirmedBlockThreshold: int,
|
|
18
|
+
contracts: array<contract>,
|
|
19
|
+
sources: array<Source.t>,
|
|
20
|
+
}
|
package/src/Js.shim.ts
ADDED
package/src/LoadManager.res
CHANGED
|
@@ -66,26 +66,32 @@ let schedule = async loadManager => {
|
|
|
66
66
|
}
|
|
67
67
|
})
|
|
68
68
|
|
|
69
|
-
if inputsToLoad->Utils.Array.isEmpty->not {
|
|
69
|
+
let isSuccess = if inputsToLoad->Utils.Array.isEmpty->not {
|
|
70
70
|
try {
|
|
71
71
|
await group.load(inputsToLoad)
|
|
72
|
+
true
|
|
72
73
|
} catch {
|
|
73
74
|
| exn => {
|
|
74
|
-
let exn = exn->
|
|
75
|
+
let exn = exn->Utils.prettifyExn
|
|
75
76
|
currentInputKeys->Array.forEach(inputKey => {
|
|
76
77
|
let call = calls->Js.Dict.unsafeGet(inputKey)
|
|
77
78
|
call.reject(exn)
|
|
78
79
|
})
|
|
80
|
+
false
|
|
79
81
|
}
|
|
80
82
|
}
|
|
83
|
+
} else {
|
|
84
|
+
true
|
|
81
85
|
}
|
|
82
86
|
|
|
83
87
|
if currentInputKeys->Utils.Array.isEmpty->not {
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
88
|
+
if isSuccess {
|
|
89
|
+
currentInputKeys->Js.Array2.forEach(inputKey => {
|
|
90
|
+
let call = calls->Js.Dict.unsafeGet(inputKey)
|
|
91
|
+
calls->Utils.Dict.deleteInPlace(inputKey)
|
|
92
|
+
call.resolve(group.getUnsafeInMemory(inputKey))
|
|
93
|
+
})
|
|
94
|
+
}
|
|
89
95
|
|
|
90
96
|
// Clean up executed batch to reset
|
|
91
97
|
// provided load function which
|