envio 2.31.0-alpha.2 → 2.31.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "envio",
3
- "version": "v2.31.0-alpha.2",
3
+ "version": "v2.31.0",
4
4
  "description": "A latency and sync speed optimized, developer friendly blockchain data indexer.",
5
5
  "bin": "./bin.js",
6
6
  "main": "./index.js",
@@ -25,10 +25,10 @@
25
25
  },
26
26
  "homepage": "https://envio.dev",
27
27
  "optionalDependencies": {
28
- "envio-linux-x64": "v2.31.0-alpha.2",
29
- "envio-linux-arm64": "v2.31.0-alpha.2",
30
- "envio-darwin-x64": "v2.31.0-alpha.2",
31
- "envio-darwin-arm64": "v2.31.0-alpha.2"
28
+ "envio-linux-x64": "v2.31.0",
29
+ "envio-linux-arm64": "v2.31.0",
30
+ "envio-darwin-x64": "v2.31.0",
31
+ "envio-darwin-arm64": "v2.31.0"
32
32
  },
33
33
  "dependencies": {
34
34
  "@envio-dev/hypersync-client": "0.6.6",
package/src/Envio.res CHANGED
@@ -59,7 +59,6 @@ let experimental_createEffect = (
59
59
  options: effectOptions<'input, 'output>,
60
60
  handler: effectArgs<'input> => promise<'output>,
61
61
  ) => {
62
- Prometheus.EffectCallsCount.set(~callsCount=0, ~effectName=options.name)
63
62
  let outputSchema =
64
63
  S.schema(_ => options.output)->(Utils.magic: S.t<S.t<'output>> => S.t<Internal.effectOutput>)
65
64
  {
@@ -86,7 +85,7 @@ let experimental_createEffect = (
86
85
  })
87
86
  Some({
88
87
  table: Internal.makeCacheTable(~effectName=options.name),
89
- rowsSchema: S.array(itemSchema),
88
+ outputSchema,
90
89
  itemSchema,
91
90
  })
92
91
  | None
package/src/Envio.res.js CHANGED
@@ -2,11 +2,9 @@
2
2
  'use strict';
3
3
 
4
4
  var Internal = require("./Internal.res.js");
5
- var Prometheus = require("./Prometheus.res.js");
6
5
  var S$RescriptSchema = require("rescript-schema/src/S.res.js");
7
6
 
8
7
  function experimental_createEffect(options, handler) {
9
- Prometheus.EffectCallsCount.set(0, options.name);
10
8
  var outputSchema = S$RescriptSchema.schema(function (param) {
11
9
  return options.output;
12
10
  });
@@ -21,7 +19,7 @@ function experimental_createEffect(options, handler) {
21
19
  });
22
20
  tmp = {
23
21
  itemSchema: itemSchema,
24
- rowsSchema: S$RescriptSchema.array(itemSchema),
22
+ outputSchema: outputSchema,
25
23
  table: Internal.makeCacheTable(options.name)
26
24
  };
27
25
  } else {
@@ -351,8 +351,11 @@ let registerDynamicContracts = (
351
351
  switch item->Internal.getItemDcs {
352
352
  | None => ()
353
353
  | Some(dcs) =>
354
- for idx in 0 to dcs->Array.length - 1 {
355
- let dc = dcs->Js.Array2.unsafe_get(idx)
354
+ let idx = ref(0)
355
+ while idx.contents < dcs->Array.length {
356
+ let dc = dcs->Js.Array2.unsafe_get(idx.contents)
357
+
358
+ let shouldRemove = ref(false)
356
359
 
357
360
  switch fetchState.contractConfigs->Utils.Dict.dangerouslyGetNonOption(dc.contractName) {
358
361
  | Some({filterByAddresses}) =>
@@ -378,8 +381,7 @@ let registerDynamicContracts = (
378
381
  )
379
382
  logger->Logging.childWarn(`Skipping contract registration: Contract address is already registered at a later block number. Currently registration of the same contract address is not supported by Envio. Reach out to us if it's a problem for you.`)
380
383
  }
381
- // Remove the DC from item to prevent it from saving to the db
382
- let _ = dcs->Js.Array2.removeCountInPlace(~count=1, ~pos=idx)
384
+ shouldRemove := true
383
385
  | None =>
384
386
  let shouldUpdate = switch registeringContracts->Utils.Dict.dangerouslyGetNonOption(
385
387
  dc.address->Address.toString,
@@ -401,8 +403,7 @@ let registerDynamicContracts = (
401
403
  Pervasives.min(earliestRegisteringEventBlockNumber.contents, dc.startBlock)
402
404
  registeringContracts->Js.Dict.set(dc.address->Address.toString, dc)
403
405
  } else {
404
- // Remove the DC from item to prevent it from saving to the db
405
- let _ = dcs->Js.Array2.removeCountInPlace(~count=1, ~pos=idx)
406
+ shouldRemove := true
406
407
  }
407
408
  }
408
409
  | None => {
@@ -414,9 +415,17 @@ let registerDynamicContracts = (
414
415
  },
415
416
  )
416
417
  logger->Logging.childWarn(`Skipping contract registration: Contract doesn't have any events to fetch.`)
417
- let _ = dcs->Js.Array2.removeCountInPlace(~count=1, ~pos=idx)
418
+ shouldRemove := true
418
419
  }
419
420
  }
421
+
422
+ if shouldRemove.contents {
423
+ // Remove the DC from item to prevent it from saving to the db
424
+ let _ = dcs->Js.Array2.removeCountInPlace(~count=1, ~pos=idx.contents)
425
+ // Don't increment idx - next element shifted into current position
426
+ } else {
427
+ idx := idx.contents + 1
428
+ }
420
429
  }
421
430
  }
422
431
  }
@@ -237,8 +237,10 @@ function registerDynamicContracts(fetchState, items) {
237
237
  var item = items[itemIdx];
238
238
  var dcs = item.dcs;
239
239
  if (dcs !== undefined) {
240
- for(var idx = 0 ,idx_finish = dcs.length; idx < idx_finish; ++idx){
240
+ var idx = 0;
241
+ while(idx < dcs.length) {
241
242
  var dc = dcs[idx];
243
+ var shouldRemove = false;
242
244
  var match = fetchState.contractConfigs[dc.contractName];
243
245
  if (match !== undefined) {
244
246
  var existingContract = indexingContracts[dc.address];
@@ -254,7 +256,7 @@ function registerDynamicContracts(fetchState, items) {
254
256
  });
255
257
  Logging.childWarn(logger, "Skipping contract registration: Contract address is already registered at a later block number. Currently registration of the same contract address is not supported by Envio. Reach out to us if it's a problem for you.");
256
258
  }
257
- dcs.splice(idx, 1);
259
+ shouldRemove = true;
258
260
  } else {
259
261
  var registeringContract = registeringContracts[dc.address];
260
262
  var shouldUpdate;
@@ -274,7 +276,7 @@ function registerDynamicContracts(fetchState, items) {
274
276
  earliestRegisteringEventBlockNumber = earliestRegisteringEventBlockNumber < dc.startBlock ? earliestRegisteringEventBlockNumber : dc.startBlock;
275
277
  registeringContracts[dc.address] = dc;
276
278
  } else {
277
- dcs.splice(idx, 1);
279
+ shouldRemove = true;
278
280
  }
279
281
  }
280
282
  } else {
@@ -284,9 +286,14 @@ function registerDynamicContracts(fetchState, items) {
284
286
  contractName: dc.contractName
285
287
  });
286
288
  Logging.childWarn(logger$1, "Skipping contract registration: Contract doesn't have any events to fetch.");
289
+ shouldRemove = true;
290
+ }
291
+ if (shouldRemove) {
287
292
  dcs.splice(idx, 1);
293
+ } else {
294
+ idx = idx + 1 | 0;
288
295
  }
289
- }
296
+ };
290
297
  }
291
298
 
292
299
  }
@@ -331,7 +338,7 @@ function registerDynamicContracts(fetchState, items) {
331
338
  addressesByContractName: pendingAddressesByContractName.contents
332
339
  });
333
340
  };
334
- for(var idx$1 = 0 ,idx_finish$1 = Object.keys(addressesByContractName).length; idx$1 < idx_finish$1; ++idx$1){
341
+ for(var idx$1 = 0 ,idx_finish = Object.keys(addressesByContractName).length; idx$1 < idx_finish; ++idx$1){
335
342
  var contractName = Object.keys(addressesByContractName)[idx$1];
336
343
  var addresses = addressesByContractName[contractName];
337
344
  var contractConfig = fetchState.contractConfigs[contractName];
package/src/Internal.res CHANGED
@@ -251,6 +251,7 @@ let fuelTransferParamsSchema = S.schema(s => {
251
251
  type entity = private {id: string}
252
252
  type genericEntityConfig<'entity> = {
253
253
  name: string,
254
+ index: int,
254
255
  schema: S.t<'entity>,
255
256
  rowsSchema: S.t<array<'entity>>,
256
257
  table: Table.table,
@@ -289,7 +290,7 @@ type effectArgs = {
289
290
  type effectCacheItem = {id: string, output: effectOutput}
290
291
  type effectCacheMeta = {
291
292
  itemSchema: S.t<effectCacheItem>,
292
- rowsSchema: S.t<array<effectCacheItem>>,
293
+ outputSchema: S.t<effectOutput>,
293
294
  table: Table.table,
294
295
  }
295
296
  type effect = {
@@ -301,14 +302,17 @@ type effect = {
301
302
  mutable callsCount: int,
302
303
  }
303
304
  let cacheTablePrefix = "envio_effect_"
305
+ let cacheOutputSchema = S.json(~validate=false)->(Utils.magic: S.t<Js.Json.t> => S.t<effectOutput>)
306
+ let effectCacheItemRowsSchema = S.array(
307
+ S.schema(s => {id: s.matches(S.string), output: s.matches(cacheOutputSchema)}),
308
+ )
304
309
  let makeCacheTable = (~effectName) => {
305
310
  Table.mkTable(
306
311
  cacheTablePrefix ++ effectName,
307
312
  ~fields=[
308
313
  Table.mkField("id", Text, ~fieldSchema=S.string, ~isPrimaryKey=true),
309
- Table.mkField("output", JsonB, ~fieldSchema=S.json(~validate=false), ~isNullable=true),
314
+ Table.mkField("output", JsonB, ~fieldSchema=cacheOutputSchema, ~isNullable=true),
310
315
  ],
311
- ~compositeIndices=[],
312
316
  )
313
317
  }
314
318
 
@@ -36,10 +36,19 @@ function makeEnumConfig(name, variants) {
36
36
 
37
37
  var cacheTablePrefix = "envio_effect_";
38
38
 
39
+ var cacheOutputSchema = S$RescriptSchema.json(false);
40
+
41
+ var effectCacheItemRowsSchema = S$RescriptSchema.array(S$RescriptSchema.schema(function (s) {
42
+ return {
43
+ id: s.m(S$RescriptSchema.string),
44
+ output: s.m(cacheOutputSchema)
45
+ };
46
+ }));
47
+
39
48
  function makeCacheTable(effectName) {
40
- return Table.mkTable(cacheTablePrefix + effectName, [], [
49
+ return Table.mkTable(cacheTablePrefix + effectName, undefined, [
41
50
  Table.mkField("id", "TEXT", S$RescriptSchema.string, undefined, undefined, undefined, true, undefined, undefined),
42
- Table.mkField("output", "JSONB", S$RescriptSchema.json(false), undefined, undefined, true, undefined, undefined, undefined)
51
+ Table.mkField("output", "JSONB", cacheOutputSchema, undefined, undefined, true, undefined, undefined, undefined)
43
52
  ]);
44
53
  }
45
54
 
@@ -47,5 +56,7 @@ exports.fuelSupplyParamsSchema = fuelSupplyParamsSchema;
47
56
  exports.fuelTransferParamsSchema = fuelTransferParamsSchema;
48
57
  exports.makeEnumConfig = makeEnumConfig;
49
58
  exports.cacheTablePrefix = cacheTablePrefix;
59
+ exports.cacheOutputSchema = cacheOutputSchema;
60
+ exports.effectCacheItemRowsSchema = effectCacheItemRowsSchema;
50
61
  exports.makeCacheTable = makeCacheTable;
51
62
  /* fuelSupplyParamsSchema Not a pure module */
@@ -156,13 +156,13 @@ let init = {
156
156
  Logging.info(`Found existing indexer storage. Resuming indexing state...`)
157
157
  let initialState = await persistence.storage.resumeInitialState()
158
158
  persistence.storageStatus = Ready(initialState)
159
- let checkpoints = Js.Dict.empty()
159
+ let progress = Js.Dict.empty()
160
160
  initialState.chains->Js.Array2.forEach(c => {
161
- checkpoints->Utils.Dict.setByInt(c.id, c.progressBlockNumber)
161
+ progress->Utils.Dict.setByInt(c.id, c.progressBlockNumber)
162
162
  })
163
163
  Logging.info({
164
164
  "msg": `Successfully resumed indexing state! Continuing from the last checkpoint.`,
165
- "checkpoints": checkpoints,
165
+ "progress": progress,
166
166
  })
167
167
  }
168
168
  resolveRef.contents()
@@ -192,7 +192,12 @@ let getInitializedState = persistence => {
192
192
  }
193
193
  }
194
194
 
195
- let setEffectCacheOrThrow = async (persistence, ~effect: Internal.effect, ~items) => {
195
+ let setEffectCacheOrThrow = async (
196
+ persistence,
197
+ ~effect: Internal.effect,
198
+ ~items,
199
+ ~invalidationsCount,
200
+ ) => {
196
201
  switch persistence.storageStatus {
197
202
  | Unknown
198
203
  | Initializing(_) =>
@@ -210,7 +215,8 @@ let setEffectCacheOrThrow = async (persistence, ~effect: Internal.effect, ~items
210
215
  }
211
216
  let initialize = effectCacheRecord.count === 0
212
217
  await storage.setEffectCacheOrThrow(~effect, ~items, ~initialize)
213
- effectCacheRecord.count = effectCacheRecord.count + items->Js.Array2.length
218
+ effectCacheRecord.count =
219
+ effectCacheRecord.count + items->Js.Array2.length - invalidationsCount
214
220
  Prometheus.EffectCacheCount.set(~count=effectCacheRecord.count, ~effectName)
215
221
  }
216
222
  }
@@ -82,13 +82,13 @@ async function init(persistence, chainConfigs, resetOpt) {
82
82
  TAG: "Ready",
83
83
  _0: initialState$1
84
84
  };
85
- var checkpoints = {};
85
+ var progress = {};
86
86
  initialState$1.chains.forEach(function (c) {
87
- checkpoints[c.id] = c.progressBlockNumber;
87
+ progress[c.id] = c.progressBlockNumber;
88
88
  });
89
89
  Logging.info({
90
90
  msg: "Successfully resumed indexing state! Continuing from the last checkpoint.",
91
- checkpoints: checkpoints
91
+ progress: progress
92
92
  });
93
93
  }
94
94
 
@@ -119,7 +119,7 @@ function getInitializedState(persistence) {
119
119
  }
120
120
  }
121
121
 
122
- async function setEffectCacheOrThrow(persistence, effect, items) {
122
+ async function setEffectCacheOrThrow(persistence, effect, items, invalidationsCount) {
123
123
  var match = persistence.storageStatus;
124
124
  if (typeof match !== "object") {
125
125
  return Js_exn.raiseError("Failed to access the indexer storage. The Persistence layer is not initialized.");
@@ -144,7 +144,7 @@ async function setEffectCacheOrThrow(persistence, effect, items) {
144
144
  }
145
145
  var initialize = effectCacheRecord.count === 0;
146
146
  await storage.setEffectCacheOrThrow(effect, items, initialize);
147
- effectCacheRecord.count = effectCacheRecord.count + items.length | 0;
147
+ effectCacheRecord.count = (effectCacheRecord.count + items.length | 0) - invalidationsCount | 0;
148
148
  return Prometheus.EffectCacheCount.set(effectCacheRecord.count, effectName);
149
149
  }
150
150
 
package/src/PgStorage.res CHANGED
@@ -22,7 +22,7 @@ let makeCreateTableIndicesQuery = (table: Table.table, ~pgSchema) => {
22
22
  compositeIndices->Array.map(createCompositeIndex)->Js.Array2.joinWith("\n")
23
23
  }
24
24
 
25
- let makeCreateTableQuery = (table: Table.table, ~pgSchema) => {
25
+ let makeCreateTableQuery = (table: Table.table, ~pgSchema, ~isNumericArrayAsText) => {
26
26
  open Belt
27
27
  let fieldsMapped =
28
28
  table
@@ -34,6 +34,8 @@ let makeCreateTableQuery = (table: Table.table, ~pgSchema) => {
34
34
  {
35
35
  `"${fieldName}" ${switch fieldType {
36
36
  | Custom(name) if !(name->Js.String2.startsWith("NUMERIC(")) => `"${pgSchema}".${name}`
37
+ // Workaround for Hasura bug https://github.com/enviodev/hyperindex/issues/788
38
+ | Numeric if isArray && isNumericArrayAsText => (Table.Text :> string)
37
39
  | _ => (fieldType :> string)
38
40
  }}${isArray ? "[]" : ""}${switch defaultValue {
39
41
  | Some(defaultValue) => ` DEFAULT ${defaultValue}`
@@ -57,6 +59,7 @@ let makeCreateTableQuery = (table: Table.table, ~pgSchema) => {
57
59
  let makeInitializeTransaction = (
58
60
  ~pgSchema,
59
61
  ~pgUser,
62
+ ~isHasuraEnabled,
60
63
  ~chainConfigs=[],
61
64
  ~entities=[],
62
65
  ~enums=[],
@@ -105,7 +108,10 @@ GRANT ALL ON SCHEMA "${pgSchema}" TO public;`,
105
108
 
106
109
  // Batch all table creation first (optimal for PostgreSQL)
107
110
  allTables->Js.Array2.forEach((table: Table.table) => {
108
- query := query.contents ++ "\n" ++ makeCreateTableQuery(table, ~pgSchema)
111
+ query :=
112
+ query.contents ++
113
+ "\n" ++
114
+ makeCreateTableQuery(table, ~pgSchema, ~isNumericArrayAsText=isHasuraEnabled)
109
115
  })
110
116
 
111
117
  // Then batch all indices (better performance when tables exist)
@@ -263,7 +269,7 @@ let makeTableBatchSetQuery = (~pgSchema, ~table: Table.table, ~itemSchema: S.t<'
263
269
  // Currently history update table uses S.object with transformation for schema,
264
270
  // which is being lossed during conversion to dbSchema.
265
271
  // So use simple insert values for now.
266
- let isHistoryUpdate = table.tableName->Js.String2.startsWith("envio_history_")
272
+ let isHistoryUpdate = table.tableName->Js.String2.startsWith(EntityHistory.historyTablePrefix)
267
273
 
268
274
  // Should experiment how much it'll affect performance
269
275
  // Although, it should be fine not to perform the validation check,
@@ -329,7 +335,7 @@ let removeInvalidUtf8InPlace = entities =>
329
335
  // This is unsafe, but we rely that it'll use
330
336
  // the mutated reference on retry.
331
337
  // TODO: Test it properly after we start using
332
- // in-memory PGLite for indexer test framework.
338
+ // real pg for indexer test framework.
333
339
  dict->Js.Dict.set(
334
340
  key,
335
341
  value
@@ -507,6 +513,7 @@ let make = (
507
513
  ~pgUser,
508
514
  ~pgDatabase,
509
515
  ~pgPassword,
516
+ ~isHasuraEnabled,
510
517
  ~onInitialize=?,
511
518
  ~onNewTables=?,
512
519
  ): Persistence.storage => {
@@ -552,7 +559,7 @@ let make = (
552
559
  let table = Internal.makeCacheTable(~effectName)
553
560
 
554
561
  sql
555
- ->Postgres.unsafe(makeCreateTableQuery(table, ~pgSchema))
562
+ ->Postgres.unsafe(makeCreateTableQuery(table, ~pgSchema, ~isNumericArrayAsText=false))
556
563
  ->Promise.then(() => {
557
564
  let inputFile = NodeJs.Path.join(cacheDirPath, entry)->NodeJs.Path.toString
558
565
 
@@ -645,6 +652,7 @@ let make = (
645
652
  ~enums,
646
653
  ~chainConfigs,
647
654
  ~isEmptyPgSchema=schemaTableNames->Utils.Array.isEmpty,
655
+ ~isHasuraEnabled,
648
656
  )
649
657
  // Execute all queries within a single transaction for integrity
650
658
  let _ = await sql->Postgres.beginSql(sql => {
@@ -790,7 +798,10 @@ let make = (
790
798
  }
791
799
 
792
800
  if initialize {
793
- let _ = await sql->Postgres.unsafe(makeCreateTableQuery(table, ~pgSchema))
801
+ let _ =
802
+ await sql->Postgres.unsafe(
803
+ makeCreateTableQuery(table, ~pgSchema, ~isNumericArrayAsText=false),
804
+ )
794
805
  // Integration with other tools like Hasura
795
806
  switch onNewTables {
796
807
  | Some(onNewTables) => await onNewTables(~tableNames=[table.tableName])
@@ -15,6 +15,7 @@ var Internal = require("./Internal.res.js");
15
15
  var Belt_Array = require("rescript/lib/js/belt_Array.js");
16
16
  var Caml_option = require("rescript/lib/js/caml_option.js");
17
17
  var Persistence = require("./Persistence.res.js");
18
+ var EntityHistory = require("./db/EntityHistory.res.js");
18
19
  var InternalTable = require("./db/InternalTable.res.js");
19
20
  var Child_process = require("child_process");
20
21
  var Caml_exceptions = require("rescript/lib/js/caml_exceptions.js");
@@ -44,15 +45,20 @@ function makeCreateTableIndicesQuery(table, pgSchema) {
44
45
  return Belt_Array.map(singleIndices, createIndex).join("\n") + Belt_Array.map(compositeIndices, createCompositeIndex).join("\n");
45
46
  }
46
47
 
47
- function makeCreateTableQuery(table, pgSchema) {
48
+ function makeCreateTableQuery(table, pgSchema, isNumericArrayAsText) {
48
49
  var fieldsMapped = Belt_Array.map(Table.getFields(table), (function (field) {
49
50
  var defaultValue = field.defaultValue;
51
+ var isArray = field.isArray;
50
52
  var fieldType = field.fieldType;
51
53
  var fieldName = Table.getDbFieldName(field);
52
54
  var tmp;
53
- tmp = fieldType === "TIMESTAMP" || fieldType === "TIMESTAMP WITH TIME ZONE" || fieldType === "JSONB" || fieldType === "SERIAL" || fieldType === "TEXT" || fieldType === "DOUBLE PRECISION" || fieldType === "NUMERIC" || fieldType === "BOOLEAN" || fieldType === "BIGINT" || fieldType === "INTEGER" || fieldType === "TIMESTAMP WITH TIME ZONE NULL" || fieldType.startsWith("NUMERIC(") ? fieldType : "\"" + pgSchema + "\"." + fieldType;
55
+ tmp = fieldType === "TIMESTAMP" || fieldType === "TIMESTAMP WITH TIME ZONE" || fieldType === "JSONB" || fieldType === "SERIAL" || fieldType === "TEXT" || fieldType === "DOUBLE PRECISION" || fieldType === "NUMERIC" || fieldType === "BOOLEAN" || fieldType === "BIGINT" || fieldType === "INTEGER" || fieldType === "TIMESTAMP WITH TIME ZONE NULL" ? (
56
+ fieldType === "NUMERIC" && isArray && isNumericArrayAsText ? "TEXT" : fieldType
57
+ ) : (
58
+ fieldType.startsWith("NUMERIC(") ? fieldType : "\"" + pgSchema + "\"." + fieldType
59
+ );
54
60
  return "\"" + fieldName + "\" " + tmp + (
55
- field.isArray ? "[]" : ""
61
+ isArray ? "[]" : ""
56
62
  ) + (
57
63
  defaultValue !== undefined ? " DEFAULT " + defaultValue : (
58
64
  field.isNullable ? "" : " NOT NULL"
@@ -68,7 +74,7 @@ function makeCreateTableQuery(table, pgSchema) {
68
74
  ) + ");";
69
75
  }
70
76
 
71
- function makeInitializeTransaction(pgSchema, pgUser, chainConfigsOpt, entitiesOpt, enumsOpt, isEmptyPgSchemaOpt) {
77
+ function makeInitializeTransaction(pgSchema, pgUser, isHasuraEnabled, chainConfigsOpt, entitiesOpt, enumsOpt, isEmptyPgSchemaOpt) {
72
78
  var chainConfigs = chainConfigsOpt !== undefined ? chainConfigsOpt : [];
73
79
  var entities = entitiesOpt !== undefined ? entitiesOpt : [];
74
80
  var enums = enumsOpt !== undefined ? enumsOpt : [];
@@ -99,7 +105,7 @@ function makeInitializeTransaction(pgSchema, pgUser, chainConfigsOpt, entitiesOp
99
105
  query.contents = query.contents + "\n" + enumCreateQuery;
100
106
  });
101
107
  allTables.forEach(function (table) {
102
- query.contents = query.contents + "\n" + makeCreateTableQuery(table, pgSchema);
108
+ query.contents = query.contents + "\n" + makeCreateTableQuery(table, pgSchema, isHasuraEnabled);
103
109
  });
104
110
  allTables.forEach(function (table) {
105
111
  var indices = makeCreateTableIndicesQuery(table, pgSchema);
@@ -194,7 +200,7 @@ function makeInsertValuesSetQuery(pgSchema, table, itemSchema, itemsCount) {
194
200
  function makeTableBatchSetQuery(pgSchema, table, itemSchema) {
195
201
  var match = Table.toSqlParams(table, itemSchema, pgSchema);
196
202
  var isRawEvents = table.tableName === InternalTable.RawEvents.table.tableName;
197
- var isHistoryUpdate = table.tableName.startsWith("envio_history_");
203
+ var isHistoryUpdate = table.tableName.startsWith(EntityHistory.historyTablePrefix);
198
204
  if ((isRawEvents || !match.hasArrayField) && !isHistoryUpdate) {
199
205
  return {
200
206
  query: makeInsertUnnestSetQuery(pgSchema, table, itemSchema, isRawEvents),
@@ -354,7 +360,7 @@ async function getConnectedPsqlExec(pgUser, pgHost, pgDatabase, pgPort) {
354
360
  return result;
355
361
  }
356
362
 
357
- function make(sql, pgHost, pgSchema, pgPort, pgUser, pgDatabase, pgPassword, onInitialize, onNewTables) {
363
+ function make(sql, pgHost, pgSchema, pgPort, pgUser, pgDatabase, pgPassword, isHasuraEnabled, onInitialize, onNewTables) {
358
364
  var psqlExecOptions_env = Js_dict.fromArray([
359
365
  [
360
366
  "PGPASSWORD",
@@ -403,7 +409,7 @@ function make(sql, pgHost, pgSchema, pgPort, pgUser, pgDatabase, pgPassword, onI
403
409
  await Promise.all(cacheFiles.map(function (entry) {
404
410
  var effectName = entry.slice(0, -4);
405
411
  var table = Internal.makeCacheTable(effectName);
406
- return sql.unsafe(makeCreateTableQuery(table, pgSchema)).then(function () {
412
+ return sql.unsafe(makeCreateTableQuery(table, pgSchema, false)).then(function () {
407
413
  var inputFile = Path.join(cacheDirPath, entry);
408
414
  var command = psqlExec$1 + " -c 'COPY \"" + pgSchema + "\".\"" + table.tableName + "\" FROM STDIN WITH (FORMAT text, HEADER);' < " + inputFile;
409
415
  return new Promise((function (resolve, reject) {
@@ -461,7 +467,7 @@ function make(sql, pgHost, pgSchema, pgPort, pgUser, pgDatabase, pgPassword, onI
461
467
  })) {
462
468
  Js_exn.raiseError("Cannot run Envio migrations on PostgreSQL schema \"" + pgSchema + "\" because it contains non-Envio tables. Running migrations would delete all data in this schema.\n\nTo resolve this:\n1. If you want to use this schema, first backup any important data, then drop it with: \"pnpm envio local db-migrate down\"\n2. Or specify a different schema name by setting the \"ENVIO_PG_PUBLIC_SCHEMA\" environment variable\n3. Or manually drop the schema in your database if you're certain the data is not needed.");
463
469
  }
464
- var queries = makeInitializeTransaction(pgSchema, pgUser, chainConfigs, entities, enums, Utils.$$Array.isEmpty(schemaTableNames));
470
+ var queries = makeInitializeTransaction(pgSchema, pgUser, isHasuraEnabled, chainConfigs, entities, enums, Utils.$$Array.isEmpty(schemaTableNames));
465
471
  await sql.begin(function (sql) {
466
472
  return Promise.all(queries.map(function (query) {
467
473
  return sql.unsafe(query);
@@ -568,7 +574,7 @@ function make(sql, pgHost, pgSchema, pgPort, pgUser, pgDatabase, pgPassword, onI
568
574
  var match = cacheMeta !== undefined ? cacheMeta : Js_exn.raiseError("Failed to set effect cache for \"" + effect.name + "\". Effect has no cache enabled.");
569
575
  var table = match.table;
570
576
  if (initialize) {
571
- await sql.unsafe(makeCreateTableQuery(table, pgSchema));
577
+ await sql.unsafe(makeCreateTableQuery(table, pgSchema, false));
572
578
  if (onNewTables !== undefined) {
573
579
  await onNewTables([table.tableName]);
574
580
  }
@@ -525,30 +525,6 @@ module RollbackTargetBlockNumber = {
525
525
  }
526
526
  }
527
527
 
528
- module ProcessingBlockNumber = {
529
- let gauge = SafeGauge.makeOrThrow(
530
- ~name="envio_processing_block_number",
531
- ~help="The latest item block number included in the currently processing batch for the chain.",
532
- ~labelSchema=chainIdLabelsSchema,
533
- )
534
-
535
- let set = (~blockNumber, ~chainId) => {
536
- gauge->SafeGauge.handleInt(~labels=chainId, ~value=blockNumber)
537
- }
538
- }
539
-
540
- module ProcessingBatchSize = {
541
- let gauge = SafeGauge.makeOrThrow(
542
- ~name="envio_processing_batch_size",
543
- ~help="The number of items included in the currently processing batch for the chain.",
544
- ~labelSchema=chainIdLabelsSchema,
545
- )
546
-
547
- let set = (~batchSize, ~chainId) => {
548
- gauge->SafeGauge.handleInt(~labels=chainId, ~value=batchSize)
549
- }
550
- }
551
-
552
528
  module ProcessingMaxBatchSize = {
553
529
  let gauge = PromClient.Gauge.makeGauge({
554
530
  "name": "envio_processing_max_batch_size",
@@ -593,6 +569,17 @@ module ProgressEventsCount = {
593
569
  }
594
570
  }
595
571
 
572
+ module ProgressBatchCount = {
573
+ let counter = PromClient.Counter.makeCounter({
574
+ "name": "envio_progress_batches_count",
575
+ "help": "The number of batches processed and reflected in the database.",
576
+ })
577
+
578
+ let increment = () => {
579
+ counter->PromClient.Counter.inc
580
+ }
581
+ }
582
+
596
583
  let effectLabelsSchema = S.object(s => {
597
584
  s.field("effect", S.string)
598
585
  })
@@ -621,6 +608,18 @@ module EffectCacheCount = {
621
608
  }
622
609
  }
623
610
 
611
+ module EffectCacheInvalidationsCount = {
612
+ let counter = SafeCounter.makeOrThrow(
613
+ ~name="envio_effect_cache_invalidations_count",
614
+ ~help="The number of effect cache invalidations.",
615
+ ~labelSchema=effectLabelsSchema,
616
+ )
617
+
618
+ let increment = (~effectName) => {
619
+ counter->SafeCounter.increment(~labels=effectName)
620
+ }
621
+ }
622
+
624
623
  module StorageLoad = {
625
624
  let operationLabelsSchema = S.object(s => s.field("operation", S.string))
626
625
 
@@ -628,51 +628,29 @@ var RollbackTargetBlockNumber = {
628
628
  set: set$15
629
629
  };
630
630
 
631
- var gauge$17 = makeOrThrow$1("envio_processing_block_number", "The latest item block number included in the currently processing batch for the chain.", chainIdLabelsSchema);
632
-
633
- function set$16(blockNumber, chainId) {
634
- handleInt$1(gauge$17, chainId, blockNumber);
635
- }
636
-
637
- var ProcessingBlockNumber = {
638
- gauge: gauge$17,
639
- set: set$16
640
- };
641
-
642
- var gauge$18 = makeOrThrow$1("envio_processing_batch_size", "The number of items included in the currently processing batch for the chain.", chainIdLabelsSchema);
643
-
644
- function set$17(batchSize, chainId) {
645
- handleInt$1(gauge$18, chainId, batchSize);
646
- }
647
-
648
- var ProcessingBatchSize = {
649
- gauge: gauge$18,
650
- set: set$17
651
- };
652
-
653
- var gauge$19 = new PromClient.Gauge({
631
+ var gauge$17 = new PromClient.Gauge({
654
632
  name: "envio_processing_max_batch_size",
655
633
  help: "The maximum number of items to process in a single batch."
656
634
  });
657
635
 
658
- function set$18(maxBatchSize) {
659
- gauge$19.set(maxBatchSize);
636
+ function set$16(maxBatchSize) {
637
+ gauge$17.set(maxBatchSize);
660
638
  }
661
639
 
662
640
  var ProcessingMaxBatchSize = {
663
- gauge: gauge$19,
664
- set: set$18
641
+ gauge: gauge$17,
642
+ set: set$16
665
643
  };
666
644
 
667
- var gauge$20 = makeOrThrow$1("envio_progress_block_number", "The block number of the latest block processed and stored in the database.", chainIdLabelsSchema);
645
+ var gauge$18 = makeOrThrow$1("envio_progress_block_number", "The block number of the latest block processed and stored in the database.", chainIdLabelsSchema);
668
646
 
669
- function set$19(blockNumber, chainId) {
670
- handleInt$1(gauge$20, chainId, blockNumber);
647
+ function set$17(blockNumber, chainId) {
648
+ handleInt$1(gauge$18, chainId, blockNumber);
671
649
  }
672
650
 
673
651
  var ProgressBlockNumber = {
674
- gauge: gauge$20,
675
- set: set$19
652
+ gauge: gauge$18,
653
+ set: set$17
676
654
  };
677
655
 
678
656
  var deprecatedGauge$1 = new PromClient.Gauge({
@@ -681,45 +659,70 @@ var deprecatedGauge$1 = new PromClient.Gauge({
681
659
  labelNames: ["chainId"]
682
660
  });
683
661
 
684
- var gauge$21 = makeOrThrow$1("envio_progress_events_count", "The number of events processed and reflected in the database.", chainIdLabelsSchema);
662
+ var gauge$19 = makeOrThrow$1("envio_progress_events_count", "The number of events processed and reflected in the database.", chainIdLabelsSchema);
685
663
 
686
- function set$20(processedCount, chainId) {
664
+ function set$18(processedCount, chainId) {
687
665
  deprecatedGauge$1.labels({
688
666
  chainId: chainId
689
667
  }).set(processedCount);
690
- handleInt$1(gauge$21, chainId, processedCount);
668
+ handleInt$1(gauge$19, chainId, processedCount);
691
669
  }
692
670
 
693
671
  var ProgressEventsCount = {
694
672
  deprecatedGauge: deprecatedGauge$1,
695
- gauge: gauge$21,
696
- set: set$20
673
+ gauge: gauge$19,
674
+ set: set$18
675
+ };
676
+
677
+ var counter$5 = new PromClient.Counter({
678
+ name: "envio_progress_batches_count",
679
+ help: "The number of batches processed and reflected in the database."
680
+ });
681
+
682
+ function increment$5() {
683
+ counter$5.inc();
684
+ }
685
+
686
+ var ProgressBatchCount = {
687
+ counter: counter$5,
688
+ increment: increment$5
697
689
  };
698
690
 
699
691
  var effectLabelsSchema = S$RescriptSchema.object(function (s) {
700
692
  return s.f("effect", S$RescriptSchema.string);
701
693
  });
702
694
 
703
- var gauge$22 = makeOrThrow$1("envio_effect_calls_count", "The number of calls to the effect. Including both handler execution and cache hits.", effectLabelsSchema);
695
+ var gauge$20 = makeOrThrow$1("envio_effect_calls_count", "The number of calls to the effect. Including both handler execution and cache hits.", effectLabelsSchema);
704
696
 
705
- function set$21(callsCount, effectName) {
706
- handleInt$1(gauge$22, effectName, callsCount);
697
+ function set$19(callsCount, effectName) {
698
+ handleInt$1(gauge$20, effectName, callsCount);
707
699
  }
708
700
 
709
701
  var EffectCallsCount = {
710
- gauge: gauge$22,
711
- set: set$21
702
+ gauge: gauge$20,
703
+ set: set$19
712
704
  };
713
705
 
714
- var gauge$23 = makeOrThrow$1("envio_effect_cache_count", "The number of items in the effect cache.", effectLabelsSchema);
706
+ var gauge$21 = makeOrThrow$1("envio_effect_cache_count", "The number of items in the effect cache.", effectLabelsSchema);
715
707
 
716
- function set$22(count, effectName) {
717
- handleInt$1(gauge$23, effectName, count);
708
+ function set$20(count, effectName) {
709
+ handleInt$1(gauge$21, effectName, count);
718
710
  }
719
711
 
720
712
  var EffectCacheCount = {
721
- gauge: gauge$23,
722
- set: set$22
713
+ gauge: gauge$21,
714
+ set: set$20
715
+ };
716
+
717
+ var counter$6 = makeOrThrow("envio_effect_cache_invalidations_count", "The number of effect cache invalidations.", effectLabelsSchema);
718
+
719
+ function increment$6(effectName) {
720
+ increment(counter$6, effectName);
721
+ }
722
+
723
+ var EffectCacheInvalidationsCount = {
724
+ counter: counter$6,
725
+ increment: increment$6
723
726
  };
724
727
 
725
728
  var operationLabelsSchema = S$RescriptSchema.object(function (s) {
@@ -730,7 +733,7 @@ var timeCounter$2 = makeOrThrow("envio_storage_load_time", "Processing time take
730
733
 
731
734
  var totalTimeCounter = makeOrThrow("envio_storage_load_total_time", "Cumulative time spent loading data from storage during the indexing process. (milliseconds)", operationLabelsSchema);
732
735
 
733
- var counter$5 = makeOrThrow("envio_storage_load_count", "Cumulative number of successful storage load operations during the indexing process.", operationLabelsSchema);
736
+ var counter$7 = makeOrThrow("envio_storage_load_count", "Cumulative number of successful storage load operations during the indexing process.", operationLabelsSchema);
734
737
 
735
738
  var whereSizeCounter = makeOrThrow("envio_storage_load_where_size", "Cumulative number of filter conditions ('where' items) used in storage load operations during the indexing process.", operationLabelsSchema);
736
739
 
@@ -759,7 +762,7 @@ function endOperation(timerRef, operation, whereSize, size) {
759
762
  Utils.Dict.deleteInPlace(operations, operation);
760
763
  }
761
764
  handleInt(totalTimeCounter, operation, Hrtime.intFromMillis(Hrtime.toMillis(Hrtime.timeSince(timerRef))));
762
- increment(counter$5, operation);
765
+ increment(counter$7, operation);
763
766
  handleInt(whereSizeCounter, operation, whereSize);
764
767
  handleInt(sizeCounter, operation, size);
765
768
  }
@@ -768,7 +771,7 @@ var StorageLoad = {
768
771
  operationLabelsSchema: operationLabelsSchema,
769
772
  timeCounter: timeCounter$2,
770
773
  totalTimeCounter: totalTimeCounter,
771
- counter: counter$5,
774
+ counter: counter$7,
772
775
  whereSizeCounter: whereSizeCounter,
773
776
  sizeCounter: sizeCounter,
774
777
  operations: operations,
@@ -817,13 +820,13 @@ exports.RollbackEnabled = RollbackEnabled;
817
820
  exports.RollbackSuccess = RollbackSuccess;
818
821
  exports.RollbackHistoryPrune = RollbackHistoryPrune;
819
822
  exports.RollbackTargetBlockNumber = RollbackTargetBlockNumber;
820
- exports.ProcessingBlockNumber = ProcessingBlockNumber;
821
- exports.ProcessingBatchSize = ProcessingBatchSize;
822
823
  exports.ProcessingMaxBatchSize = ProcessingMaxBatchSize;
823
824
  exports.ProgressBlockNumber = ProgressBlockNumber;
824
825
  exports.ProgressEventsCount = ProgressEventsCount;
826
+ exports.ProgressBatchCount = ProgressBatchCount;
825
827
  exports.effectLabelsSchema = effectLabelsSchema;
826
828
  exports.EffectCallsCount = EffectCallsCount;
827
829
  exports.EffectCacheCount = EffectCacheCount;
830
+ exports.EffectCacheInvalidationsCount = EffectCacheInvalidationsCount;
828
831
  exports.StorageLoad = StorageLoad;
829
832
  /* loadEntitiesDurationCounter Not a pure module */
@@ -36,32 +36,31 @@ let make = (
36
36
  let getSafeCheckpointId = (safeCheckpointTracking: t, ~sourceBlockNumber: int) => {
37
37
  let safeBlockNumber = sourceBlockNumber - safeCheckpointTracking.maxReorgDepth
38
38
 
39
- if safeCheckpointTracking.checkpointBlockNumbers->Belt.Array.getUnsafe(0) > safeBlockNumber {
40
- 0
41
- } else {
42
- let trackingCheckpointsCount = safeCheckpointTracking.checkpointBlockNumbers->Array.length
43
- switch trackingCheckpointsCount {
44
- | 1 => safeCheckpointTracking.checkpointIds->Belt.Array.getUnsafe(0)
45
- | _ => {
46
- let result = ref(None)
47
- let idx = ref(1)
39
+ switch safeCheckpointTracking.checkpointIds {
40
+ | [] => 0
41
+ | _
42
+ if safeCheckpointTracking.checkpointBlockNumbers->Belt.Array.getUnsafe(0) > safeBlockNumber => 0
43
+ | [checkpointId] => checkpointId
44
+ | _ => {
45
+ let trackingCheckpointsCount = safeCheckpointTracking.checkpointIds->Array.length
46
+ let result = ref(None)
47
+ let idx = ref(1)
48
48
 
49
- while idx.contents < trackingCheckpointsCount && result.contents === None {
50
- if (
51
- safeCheckpointTracking.checkpointBlockNumbers->Belt.Array.getUnsafe(idx.contents) >
52
- safeBlockNumber
53
- ) {
54
- result :=
55
- Some(safeCheckpointTracking.checkpointIds->Belt.Array.getUnsafe(idx.contents - 1))
56
- }
57
- idx := idx.contents + 1
49
+ while idx.contents < trackingCheckpointsCount && result.contents === None {
50
+ if (
51
+ safeCheckpointTracking.checkpointBlockNumbers->Belt.Array.getUnsafe(idx.contents) >
52
+ safeBlockNumber
53
+ ) {
54
+ result :=
55
+ Some(safeCheckpointTracking.checkpointIds->Belt.Array.getUnsafe(idx.contents - 1))
58
56
  }
57
+ idx := idx.contents + 1
58
+ }
59
59
 
60
- switch result.contents {
61
- | Some(checkpointId) => checkpointId
62
- | None =>
63
- safeCheckpointTracking.checkpointIds->Belt.Array.getUnsafe(trackingCheckpointsCount - 1)
64
- }
60
+ switch result.contents {
61
+ | Some(checkpointId) => checkpointId
62
+ | None =>
63
+ safeCheckpointTracking.checkpointIds->Belt.Array.getUnsafe(trackingCheckpointsCount - 1)
65
64
  }
66
65
  }
67
66
  }
@@ -21,13 +21,17 @@ function make(maxReorgDepth, shouldRollbackOnReorg, chainReorgCheckpoints) {
21
21
 
22
22
  function getSafeCheckpointId(safeCheckpointTracking, sourceBlockNumber) {
23
23
  var safeBlockNumber = sourceBlockNumber - safeCheckpointTracking.maxReorgDepth | 0;
24
+ var match = safeCheckpointTracking.checkpointIds;
25
+ if (match.length === 0) {
26
+ return 0;
27
+ }
24
28
  if (safeCheckpointTracking.checkpointBlockNumbers[0] > safeBlockNumber) {
25
29
  return 0;
26
30
  }
27
- var trackingCheckpointsCount = safeCheckpointTracking.checkpointBlockNumbers.length;
28
- if (trackingCheckpointsCount === 1) {
29
- return safeCheckpointTracking.checkpointIds[0];
31
+ if (match.length === 1) {
32
+ return match[0];
30
33
  }
34
+ var trackingCheckpointsCount = safeCheckpointTracking.checkpointIds.length;
31
35
  var result;
32
36
  var idx = 1;
33
37
  while(idx < trackingCheckpointsCount && result === undefined) {
@@ -42,9 +42,20 @@ type t<'entity> = {
42
42
  makeGetRollbackRestoredEntitiesQuery: (~pgSchema: string) => string,
43
43
  }
44
44
 
45
- let historyTableName = (~entityName) => "envio_history_" ++ entityName
45
+ let maxPgTableNameLength = 63
46
+ let historyTablePrefix = "envio_history_"
47
+ let historyTableName = (~entityName, ~entityIndex) => {
48
+ let fullName = historyTablePrefix ++ entityName
49
+ if fullName->String.length > maxPgTableNameLength {
50
+ let entityIndexStr = entityIndex->Belt.Int.toString
51
+ fullName->Js.String.slice(~from=0, ~to_=maxPgTableNameLength - entityIndexStr->String.length) ++
52
+ entityIndexStr
53
+ } else {
54
+ fullName
55
+ }
56
+ }
46
57
 
47
- let fromTable = (table: table, ~schema: S.t<'entity>): t<'entity> => {
58
+ let fromTable = (table: table, ~schema: S.t<'entity>, ~entityIndex): t<'entity> => {
48
59
  let id = "id"
49
60
 
50
61
  let dataFields = table.fields->Belt.Array.keepMap(field =>
@@ -76,10 +87,8 @@ let fromTable = (table: table, ~schema: S.t<'entity>): t<'entity> => {
76
87
  ~isPrimaryKey=true,
77
88
  )
78
89
 
79
- // let dataFieldNames = dataFields->Belt.Array.map(field => field->getFieldName)
80
-
81
90
  let entityTableName = table.tableName
82
- let historyTableName = historyTableName(~entityName=entityTableName)
91
+ let historyTableName = historyTableName(~entityName=entityTableName, ~entityIndex)
83
92
  //ignore composite indices
84
93
  let table = mkTable(
85
94
  historyTableName,
@@ -179,8 +188,8 @@ type safeReorgBlocks = {
179
188
  // - Rollbacks will not cross the safe checkpoint id, so rows older than the anchor can never be referenced again.
180
189
  // - If nothing changed in reorg threshold (after the safe checkpoint), the current state for that id can be reconstructed from the
181
190
  // origin table; we do not need a pre-safe anchor for it.
182
- let makePruneStaleEntityHistoryQuery = (~entityName, ~pgSchema) => {
183
- let historyTableRef = `"${pgSchema}"."${historyTableName(~entityName)}"`
191
+ let makePruneStaleEntityHistoryQuery = (~entityName, ~entityIndex, ~pgSchema) => {
192
+ let historyTableRef = `"${pgSchema}"."${historyTableName(~entityName, ~entityIndex)}"`
184
193
 
185
194
  `WITH anchors AS (
186
195
  SELECT t.id, MAX(t.${checkpointIdFieldName}) AS keep_checkpoint_id
@@ -202,16 +211,23 @@ WHERE d.id = a.id
202
211
  );`
203
212
  }
204
213
 
205
- let pruneStaleEntityHistory = (sql, ~entityName, ~pgSchema, ~safeCheckpointId): promise<unit> => {
214
+ let pruneStaleEntityHistory = (
215
+ sql,
216
+ ~entityName,
217
+ ~entityIndex,
218
+ ~pgSchema,
219
+ ~safeCheckpointId,
220
+ ): promise<unit> => {
206
221
  sql->Postgres.preparedUnsafe(
207
- makePruneStaleEntityHistoryQuery(~entityName, ~pgSchema),
222
+ makePruneStaleEntityHistoryQuery(~entityName, ~entityIndex, ~pgSchema),
208
223
  [safeCheckpointId]->Utils.magic,
209
224
  )
210
225
  }
211
226
 
212
227
  // If an entity doesn't have a history before the update
213
228
  // we create it automatically with checkpoint_id 0
214
- let makeBackfillHistoryQuery = (~pgSchema, ~entityName) => {
229
+ let makeBackfillHistoryQuery = (~pgSchema, ~entityName, ~entityIndex) => {
230
+ let historyTableRef = `"${pgSchema}"."${historyTableName(~entityName, ~entityIndex)}"`
215
231
  `WITH target_ids AS (
216
232
  SELECT UNNEST($1::${(Text: Table.fieldType :> string)}[]) AS id
217
233
  ),
@@ -219,17 +235,20 @@ missing_history AS (
219
235
  SELECT e.*
220
236
  FROM "${pgSchema}"."${entityName}" e
221
237
  JOIN target_ids t ON e.id = t.id
222
- LEFT JOIN "${pgSchema}"."${historyTableName(~entityName)}" h ON h.id = e.id
238
+ LEFT JOIN ${historyTableRef} h ON h.id = e.id
223
239
  WHERE h.id IS NULL
224
240
  )
225
- INSERT INTO "${pgSchema}"."${historyTableName(~entityName)}"
241
+ INSERT INTO ${historyTableRef}
226
242
  SELECT *, 0 AS ${checkpointIdFieldName}, '${(RowAction.SET :> string)}' as ${changeFieldName}
227
243
  FROM missing_history;`
228
244
  }
229
245
 
230
- let backfillHistory = (sql, ~pgSchema, ~entityName, ~ids: array<string>) => {
246
+ let backfillHistory = (sql, ~pgSchema, ~entityName, ~entityIndex, ~ids: array<string>) => {
231
247
  sql
232
- ->Postgres.preparedUnsafe(makeBackfillHistoryQuery(~entityName, ~pgSchema), [ids]->Obj.magic)
248
+ ->Postgres.preparedUnsafe(
249
+ makeBackfillHistoryQuery(~entityName, ~entityIndex, ~pgSchema),
250
+ [ids]->Obj.magic,
251
+ )
233
252
  ->Promise.ignoreValue
234
253
  }
235
254
 
@@ -248,11 +267,12 @@ let insertDeleteUpdates = (
248
267
  ->Promise.ignoreValue
249
268
  }
250
269
 
251
- let rollback = (sql, ~pgSchema, ~entityName, ~rollbackTargetCheckpointId: int) => {
270
+ let rollback = (sql, ~pgSchema, ~entityName, ~entityIndex, ~rollbackTargetCheckpointId: int) => {
252
271
  sql
253
272
  ->Postgres.preparedUnsafe(
254
273
  `DELETE FROM "${pgSchema}"."${historyTableName(
255
274
  ~entityName,
275
+ ~entityIndex,
256
276
  )}" WHERE "${checkpointIdFieldName}" > $1;`,
257
277
  [rollbackTargetCheckpointId]->Utils.magic,
258
278
  )
@@ -2,6 +2,7 @@
2
2
  'use strict';
3
3
 
4
4
  var Table = require("./Table.res.js");
5
+ var Js_string = require("rescript/lib/js/js_string.js");
5
6
  var Belt_Array = require("rescript/lib/js/belt_Array.js");
6
7
  var S$RescriptSchema = require("rescript-schema/src/S.res.js");
7
8
 
@@ -38,11 +39,18 @@ function makeSetUpdateSchema(entitySchema) {
38
39
  });
39
40
  }
40
41
 
41
- function historyTableName(entityName) {
42
- return "envio_history_" + entityName;
42
+ var historyTablePrefix = "envio_history_";
43
+
44
+ function historyTableName(entityName, entityIndex) {
45
+ var fullName = historyTablePrefix + entityName;
46
+ if (fullName.length <= 63) {
47
+ return fullName;
48
+ }
49
+ var entityIndexStr = String(entityIndex);
50
+ return Js_string.slice(0, 63 - entityIndexStr.length | 0, fullName) + entityIndexStr;
43
51
  }
44
52
 
45
- function fromTable(table, schema) {
53
+ function fromTable(table, schema, entityIndex) {
46
54
  var dataFields = Belt_Array.keepMap(table.fields, (function (field) {
47
55
  if (field.TAG !== "Field") {
48
56
  return ;
@@ -84,8 +92,8 @@ function fromTable(table, schema) {
84
92
  var actionField = Table.mkField(changeFieldName, name, S$RescriptSchema.never, undefined, undefined, undefined, undefined, undefined, undefined);
85
93
  var checkpointIdField = Table.mkField(checkpointIdFieldName, "INTEGER", S$RescriptSchema.$$int, undefined, undefined, undefined, true, undefined, undefined);
86
94
  var entityTableName = table.tableName;
87
- var historyTableName = "envio_history_" + entityTableName;
88
- var table$1 = Table.mkTable(historyTableName, undefined, Belt_Array.concat(dataFields, [
95
+ var historyTableName$1 = historyTableName(entityTableName, entityIndex);
96
+ var table$1 = Table.mkTable(historyTableName$1, undefined, Belt_Array.concat(dataFields, [
89
97
  checkpointIdField,
90
98
  actionField
91
99
  ]));
@@ -109,7 +117,7 @@ function fromTable(table, schema) {
109
117
  }));
110
118
  var selectPartsStr = selectParts.join(", ");
111
119
  var makeInsertDeleteUpdatesQuery = function (pgSchema) {
112
- return "INSERT INTO \"" + pgSchema + "\".\"" + historyTableName + "\" (" + allFieldNamesStr + ")\nSELECT " + selectPartsStr + "\nFROM UNNEST($1::text[], $2::int[]) AS u(id, checkpoint_id)";
120
+ return "INSERT INTO \"" + pgSchema + "\".\"" + historyTableName$1 + "\" (" + allFieldNamesStr + ")\nSELECT " + selectPartsStr + "\nFROM UNNEST($1::text[], $2::int[]) AS u(id, checkpoint_id)";
113
121
  };
114
122
  var dataFieldNames = Belt_Array.keep(Belt_Array.map(table$1.fields, (function (field) {
115
123
  return Table.getFieldName(field);
@@ -124,10 +132,10 @@ function fromTable(table, schema) {
124
132
  return "\"" + name + "\"";
125
133
  })).join(", ");
126
134
  var makeGetRollbackRemovedIdsQuery = function (pgSchema) {
127
- return "SELECT DISTINCT id\nFROM \"" + pgSchema + "\".\"" + historyTableName + "\"\nWHERE \"" + checkpointIdFieldName + "\" > $1\n AND NOT EXISTS (\n SELECT 1\n FROM \"" + pgSchema + "\".\"" + historyTableName + "\" h\n WHERE h.id = \"" + historyTableName + "\".id\n AND h.\"" + checkpointIdFieldName + "\" <= $1\n )";
135
+ return "SELECT DISTINCT id\nFROM \"" + pgSchema + "\".\"" + historyTableName$1 + "\"\nWHERE \"" + checkpointIdFieldName + "\" > $1\n AND NOT EXISTS (\n SELECT 1\n FROM \"" + pgSchema + "\".\"" + historyTableName$1 + "\" h\n WHERE h.id = \"" + historyTableName$1 + "\".id\n AND h.\"" + checkpointIdFieldName + "\" <= $1\n )";
128
136
  };
129
137
  var makeGetRollbackRestoredEntitiesQuery = function (pgSchema) {
130
- return "SELECT DISTINCT ON (id) " + dataFieldsCommaSeparated + "\nFROM \"" + pgSchema + "\".\"" + historyTableName + "\"\nWHERE \"" + checkpointIdFieldName + "\" <= $1\n AND EXISTS (\n SELECT 1\n FROM \"" + pgSchema + "\".\"" + historyTableName + "\" h\n WHERE h.id = \"" + historyTableName + "\".id\n AND h.\"" + checkpointIdFieldName + "\" > $1\n )\nORDER BY id, \"" + checkpointIdFieldName + "\" DESC";
138
+ return "SELECT DISTINCT ON (id) " + dataFieldsCommaSeparated + "\nFROM \"" + pgSchema + "\".\"" + historyTableName$1 + "\"\nWHERE \"" + checkpointIdFieldName + "\" <= $1\n AND EXISTS (\n SELECT 1\n FROM \"" + pgSchema + "\".\"" + historyTableName$1 + "\" h\n WHERE h.id = \"" + historyTableName$1 + "\".id\n AND h.\"" + checkpointIdFieldName + "\" > $1\n )\nORDER BY id, \"" + checkpointIdFieldName + "\" DESC";
131
139
  };
132
140
  return {
133
141
  table: table$1,
@@ -139,21 +147,22 @@ function fromTable(table, schema) {
139
147
  };
140
148
  }
141
149
 
142
- function makePruneStaleEntityHistoryQuery(entityName, pgSchema) {
143
- var historyTableRef = "\"" + pgSchema + "\".\"envio_history_" + entityName + "\"";
150
+ function makePruneStaleEntityHistoryQuery(entityName, entityIndex, pgSchema) {
151
+ var historyTableRef = "\"" + pgSchema + "\".\"" + historyTableName(entityName, entityIndex) + "\"";
144
152
  return "WITH anchors AS (\n SELECT t.id, MAX(t." + checkpointIdFieldName + ") AS keep_checkpoint_id\n FROM " + historyTableRef + " t WHERE t." + checkpointIdFieldName + " <= $1\n GROUP BY t.id\n)\nDELETE FROM " + historyTableRef + " d\nUSING anchors a\nWHERE d.id = a.id\n AND (\n d." + checkpointIdFieldName + " < a.keep_checkpoint_id\n OR (\n d." + checkpointIdFieldName + " = a.keep_checkpoint_id AND\n NOT EXISTS (\n SELECT 1 FROM " + historyTableRef + " ps \n WHERE ps.id = d.id AND ps." + checkpointIdFieldName + " > $1\n ) \n )\n );";
145
153
  }
146
154
 
147
- function pruneStaleEntityHistory(sql, entityName, pgSchema, safeCheckpointId) {
148
- return sql.unsafe(makePruneStaleEntityHistoryQuery(entityName, pgSchema), [safeCheckpointId], {prepare: true});
155
+ function pruneStaleEntityHistory(sql, entityName, entityIndex, pgSchema, safeCheckpointId) {
156
+ return sql.unsafe(makePruneStaleEntityHistoryQuery(entityName, entityIndex, pgSchema), [safeCheckpointId], {prepare: true});
149
157
  }
150
158
 
151
- function makeBackfillHistoryQuery(pgSchema, entityName) {
152
- return "WITH target_ids AS (\n SELECT UNNEST($1::TEXT[]) AS id\n),\nmissing_history AS (\n SELECT e.*\n FROM \"" + pgSchema + "\".\"" + entityName + "\" e\n JOIN target_ids t ON e.id = t.id\n LEFT JOIN \"" + pgSchema + "\".\"envio_history_" + entityName + "\" h ON h.id = e.id\n WHERE h.id IS NULL\n)\nINSERT INTO \"" + pgSchema + "\".\"envio_history_" + entityName + "\"\nSELECT *, 0 AS " + checkpointIdFieldName + ", '" + "SET" + "' as " + changeFieldName + "\nFROM missing_history;";
159
+ function makeBackfillHistoryQuery(pgSchema, entityName, entityIndex) {
160
+ var historyTableRef = "\"" + pgSchema + "\".\"" + historyTableName(entityName, entityIndex) + "\"";
161
+ return "WITH target_ids AS (\n SELECT UNNEST($1::TEXT[]) AS id\n),\nmissing_history AS (\n SELECT e.*\n FROM \"" + pgSchema + "\".\"" + entityName + "\" e\n JOIN target_ids t ON e.id = t.id\n LEFT JOIN " + historyTableRef + " h ON h.id = e.id\n WHERE h.id IS NULL\n)\nINSERT INTO " + historyTableRef + "\nSELECT *, 0 AS " + checkpointIdFieldName + ", '" + "SET" + "' as " + changeFieldName + "\nFROM missing_history;";
153
162
  }
154
163
 
155
- function backfillHistory(sql, pgSchema, entityName, ids) {
156
- return sql.unsafe(makeBackfillHistoryQuery(pgSchema, entityName), [ids], {prepare: true});
164
+ function backfillHistory(sql, pgSchema, entityName, entityIndex, ids) {
165
+ return sql.unsafe(makeBackfillHistoryQuery(pgSchema, entityName, entityIndex), [ids], {prepare: true});
157
166
  }
158
167
 
159
168
  function insertDeleteUpdates(sql, pgSchema, entityHistory, batchDeleteEntityIds, batchDeleteCheckpointIds) {
@@ -163,14 +172,18 @@ function insertDeleteUpdates(sql, pgSchema, entityHistory, batchDeleteEntityIds,
163
172
  ], {prepare: true});
164
173
  }
165
174
 
166
- function rollback(sql, pgSchema, entityName, rollbackTargetCheckpointId) {
167
- return sql.unsafe("DELETE FROM \"" + pgSchema + "\".\"envio_history_" + entityName + "\" WHERE \"" + checkpointIdFieldName + "\" > $1;", [rollbackTargetCheckpointId], {prepare: true});
175
+ function rollback(sql, pgSchema, entityName, entityIndex, rollbackTargetCheckpointId) {
176
+ return sql.unsafe("DELETE FROM \"" + pgSchema + "\".\"" + historyTableName(entityName, entityIndex) + "\" WHERE \"" + checkpointIdFieldName + "\" > $1;", [rollbackTargetCheckpointId], {prepare: true});
168
177
  }
169
178
 
179
+ var maxPgTableNameLength = 63;
180
+
170
181
  exports.RowAction = RowAction;
171
182
  exports.changeFieldName = changeFieldName;
172
183
  exports.checkpointIdFieldName = checkpointIdFieldName;
173
184
  exports.makeSetUpdateSchema = makeSetUpdateSchema;
185
+ exports.maxPgTableNameLength = maxPgTableNameLength;
186
+ exports.historyTablePrefix = historyTablePrefix;
174
187
  exports.historyTableName = historyTableName;
175
188
  exports.fromTable = fromTable;
176
189
  exports.makePruneStaleEntityHistoryQuery = makePruneStaleEntityHistoryQuery;
@@ -7,6 +7,7 @@ let isIndex = true
7
7
 
8
8
  module DynamicContractRegistry = {
9
9
  let name = "dynamic_contract_registry"
10
+ let index = -1
10
11
 
11
12
  let makeId = (~chainId, ~contractAddress) => {
12
13
  chainId->Belt.Int.toString ++ "-" ++ contractAddress->Address.toString
@@ -58,12 +59,13 @@ module DynamicContractRegistry = {
58
59
  ],
59
60
  )
60
61
 
61
- let entityHistory = table->EntityHistory.fromTable(~schema)
62
+ let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index)
62
63
 
63
64
  external castToInternal: t => Internal.entity = "%identity"
64
65
 
65
66
  let config = {
66
67
  name,
68
+ index,
67
69
  schema,
68
70
  rowsSchema,
69
71
  table,
@@ -606,35 +608,35 @@ module Views = {
606
608
 
607
609
  let makeMetaViewQuery = (~pgSchema) => {
608
610
  `CREATE VIEW "${pgSchema}"."${metaViewName}" AS
609
- SELECT
610
- "${(#id: Chains.field :> string)}" AS "chainId",
611
- "${(#start_block: Chains.field :> string)}" AS "startBlock",
612
- "${(#end_block: Chains.field :> string)}" AS "endBlock",
613
- "${(#progress_block: Chains.field :> string)}" AS "progressBlock",
614
- "${(#buffer_block: Chains.field :> string)}" AS "bufferBlock",
615
- "${(#first_event_block: Chains.field :> string)}" AS "firstEventBlock",
616
- "${(#events_processed: Chains.field :> string)}" AS "eventsProcessed",
617
- "${(#source_block: Chains.field :> string)}" AS "sourceBlock",
618
- "${(#ready_at: Chains.field :> string)}" AS "readyAt",
619
- ("${(#ready_at: Chains.field :> string)}" IS NOT NULL) AS "isReady"
620
- FROM "${pgSchema}"."${Chains.table.tableName}"
621
- ORDER BY "${(#id: Chains.field :> string)}";`
611
+ SELECT
612
+ "${(#id: Chains.field :> string)}" AS "chainId",
613
+ "${(#start_block: Chains.field :> string)}" AS "startBlock",
614
+ "${(#end_block: Chains.field :> string)}" AS "endBlock",
615
+ "${(#progress_block: Chains.field :> string)}" AS "progressBlock",
616
+ "${(#buffer_block: Chains.field :> string)}" AS "bufferBlock",
617
+ "${(#first_event_block: Chains.field :> string)}" AS "firstEventBlock",
618
+ "${(#events_processed: Chains.field :> string)}" AS "eventsProcessed",
619
+ "${(#source_block: Chains.field :> string)}" AS "sourceBlock",
620
+ "${(#ready_at: Chains.field :> string)}" AS "readyAt",
621
+ ("${(#ready_at: Chains.field :> string)}" IS NOT NULL) AS "isReady"
622
+ FROM "${pgSchema}"."${Chains.table.tableName}"
623
+ ORDER BY "${(#id: Chains.field :> string)}";`
622
624
  }
623
625
 
624
626
  let makeChainMetadataViewQuery = (~pgSchema) => {
625
627
  `CREATE VIEW "${pgSchema}"."${chainMetadataViewName}" AS
626
- SELECT
627
- "${(#source_block: Chains.field :> string)}" AS "block_height",
628
- "${(#id: Chains.field :> string)}" AS "chain_id",
629
- "${(#end_block: Chains.field :> string)}" AS "end_block",
630
- "${(#first_event_block: Chains.field :> string)}" AS "first_event_block_number",
631
- "${(#_is_hyper_sync: Chains.field :> string)}" AS "is_hyper_sync",
632
- "${(#buffer_block: Chains.field :> string)}" AS "latest_fetched_block_number",
633
- "${(#progress_block: Chains.field :> string)}" AS "latest_processed_block",
634
- "${(#_num_batches_fetched: Chains.field :> string)}" AS "num_batches_fetched",
635
- "${(#events_processed: Chains.field :> string)}" AS "num_events_processed",
636
- "${(#start_block: Chains.field :> string)}" AS "start_block",
637
- "${(#ready_at: Chains.field :> string)}" AS "timestamp_caught_up_to_head_or_endblock"
638
- FROM "${pgSchema}"."${Chains.table.tableName}";`
628
+ SELECT
629
+ "${(#source_block: Chains.field :> string)}" AS "block_height",
630
+ "${(#id: Chains.field :> string)}" AS "chain_id",
631
+ "${(#end_block: Chains.field :> string)}" AS "end_block",
632
+ "${(#first_event_block: Chains.field :> string)}" AS "first_event_block_number",
633
+ "${(#_is_hyper_sync: Chains.field :> string)}" AS "is_hyper_sync",
634
+ "${(#buffer_block: Chains.field :> string)}" AS "latest_fetched_block_number",
635
+ "${(#progress_block: Chains.field :> string)}" AS "latest_processed_block",
636
+ "${(#_num_batches_fetched: Chains.field :> string)}" AS "num_batches_fetched",
637
+ "${(#events_processed: Chains.field :> string)}" AS "num_events_processed",
638
+ "${(#start_block: Chains.field :> string)}" AS "start_block",
639
+ "${(#ready_at: Chains.field :> string)}" AS "timestamp_caught_up_to_head_or_endblock"
640
+ FROM "${pgSchema}"."${Chains.table.tableName}";`
639
641
  }
640
642
  }
@@ -47,10 +47,11 @@ var table = Table.mkTable(name, undefined, [
47
47
  Table.mkField("contract_name", "TEXT", S$RescriptSchema.string, undefined, undefined, undefined, undefined, undefined, undefined)
48
48
  ]);
49
49
 
50
- var entityHistory = EntityHistory.fromTable(table, schema);
50
+ var entityHistory = EntityHistory.fromTable(table, schema, -1);
51
51
 
52
52
  var config = {
53
53
  name: name,
54
+ index: -1,
54
55
  schema: schema,
55
56
  rowsSchema: rowsSchema,
56
57
  table: table,
@@ -59,6 +60,7 @@ var config = {
59
60
 
60
61
  var DynamicContractRegistry = {
61
62
  name: name,
63
+ index: -1,
62
64
  makeId: makeId,
63
65
  schema: schema,
64
66
  rowsSchema: rowsSchema,
@@ -362,11 +364,11 @@ var metaViewName = "_meta";
362
364
  var chainMetadataViewName = "chain_metadata";
363
365
 
364
366
  function makeMetaViewQuery(pgSchema) {
365
- return "CREATE VIEW \"" + pgSchema + "\".\"" + metaViewName + "\" AS \n SELECT \n \"" + "id" + "\" AS \"chainId\",\n \"" + "start_block" + "\" AS \"startBlock\", \n \"" + "end_block" + "\" AS \"endBlock\",\n \"" + "progress_block" + "\" AS \"progressBlock\",\n \"" + "buffer_block" + "\" AS \"bufferBlock\",\n \"" + "first_event_block" + "\" AS \"firstEventBlock\",\n \"" + "events_processed" + "\" AS \"eventsProcessed\",\n \"" + "source_block" + "\" AS \"sourceBlock\",\n \"" + "ready_at" + "\" AS \"readyAt\",\n (\"" + "ready_at" + "\" IS NOT NULL) AS \"isReady\"\n FROM \"" + pgSchema + "\".\"" + table$1.tableName + "\"\n ORDER BY \"" + "id" + "\";";
367
+ return "CREATE VIEW \"" + pgSchema + "\".\"" + metaViewName + "\" AS \nSELECT \n \"" + "id" + "\" AS \"chainId\",\n \"" + "start_block" + "\" AS \"startBlock\", \n \"" + "end_block" + "\" AS \"endBlock\",\n \"" + "progress_block" + "\" AS \"progressBlock\",\n \"" + "buffer_block" + "\" AS \"bufferBlock\",\n \"" + "first_event_block" + "\" AS \"firstEventBlock\",\n \"" + "events_processed" + "\" AS \"eventsProcessed\",\n \"" + "source_block" + "\" AS \"sourceBlock\",\n \"" + "ready_at" + "\" AS \"readyAt\",\n (\"" + "ready_at" + "\" IS NOT NULL) AS \"isReady\"\nFROM \"" + pgSchema + "\".\"" + table$1.tableName + "\"\nORDER BY \"" + "id" + "\";";
366
368
  }
367
369
 
368
370
  function makeChainMetadataViewQuery(pgSchema) {
369
- return "CREATE VIEW \"" + pgSchema + "\".\"" + chainMetadataViewName + "\" AS \n SELECT \n \"" + "source_block" + "\" AS \"block_height\",\n \"" + "id" + "\" AS \"chain_id\",\n \"" + "end_block" + "\" AS \"end_block\", \n \"" + "first_event_block" + "\" AS \"first_event_block_number\",\n \"" + "_is_hyper_sync" + "\" AS \"is_hyper_sync\",\n \"" + "buffer_block" + "\" AS \"latest_fetched_block_number\",\n \"" + "progress_block" + "\" AS \"latest_processed_block\",\n \"" + "_num_batches_fetched" + "\" AS \"num_batches_fetched\",\n \"" + "events_processed" + "\" AS \"num_events_processed\",\n \"" + "start_block" + "\" AS \"start_block\",\n \"" + "ready_at" + "\" AS \"timestamp_caught_up_to_head_or_endblock\"\n FROM \"" + pgSchema + "\".\"" + table$1.tableName + "\";";
371
+ return "CREATE VIEW \"" + pgSchema + "\".\"" + chainMetadataViewName + "\" AS \nSELECT \n \"" + "source_block" + "\" AS \"block_height\",\n \"" + "id" + "\" AS \"chain_id\",\n \"" + "end_block" + "\" AS \"end_block\", \n \"" + "first_event_block" + "\" AS \"first_event_block_number\",\n \"" + "_is_hyper_sync" + "\" AS \"is_hyper_sync\",\n \"" + "buffer_block" + "\" AS \"latest_fetched_block_number\",\n \"" + "progress_block" + "\" AS \"latest_processed_block\",\n \"" + "_num_batches_fetched" + "\" AS \"num_batches_fetched\",\n \"" + "events_processed" + "\" AS \"num_events_processed\",\n \"" + "start_block" + "\" AS \"start_block\",\n \"" + "ready_at" + "\" AS \"timestamp_caught_up_to_head_or_endblock\"\nFROM \"" + pgSchema + "\".\"" + table$1.tableName + "\";";
370
372
  }
371
373
 
372
374
  var Views = {