envio 2.32.1 → 3.0.0-alpha-main-clickhouse-sink

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/index.d.ts +1 -0
  2. package/package.json +6 -5
  3. package/src/Batch.res +4 -4
  4. package/src/Change.res +9 -0
  5. package/src/Change.res.js +2 -0
  6. package/src/Config.res +5 -5
  7. package/src/Config.res.js +3 -1
  8. package/src/Envio.gen.ts +3 -3
  9. package/src/Envio.res +14 -3
  10. package/src/EventRegister.res +3 -11
  11. package/src/EventRegister.res.js +4 -8
  12. package/src/EventRegister.resi +1 -1
  13. package/src/InMemoryStore.gen.ts +6 -0
  14. package/src/InMemoryStore.res +149 -0
  15. package/src/InMemoryStore.res.js +161 -0
  16. package/src/InMemoryTable.res +50 -35
  17. package/src/InMemoryTable.res.js +52 -84
  18. package/src/Internal.gen.ts +0 -2
  19. package/src/Internal.res +20 -38
  20. package/src/Internal.res.js +2 -16
  21. package/src/LoadManager.res +23 -16
  22. package/src/LoadManager.res.js +17 -15
  23. package/src/Persistence.res +190 -38
  24. package/src/Persistence.res.js +92 -39
  25. package/src/PgStorage.res +700 -14
  26. package/src/PgStorage.res.js +431 -19
  27. package/src/Platform.res +141 -0
  28. package/src/Platform.res.js +170 -0
  29. package/src/Prometheus.res +41 -0
  30. package/src/Prometheus.res.js +45 -0
  31. package/src/SafeCheckpointTracking.res +5 -4
  32. package/src/Sink.res +47 -0
  33. package/src/Sink.res.js +36 -0
  34. package/src/Utils.res +2 -0
  35. package/src/Utils.res.js +3 -0
  36. package/src/bindings/ClickHouse.res +387 -0
  37. package/src/bindings/ClickHouse.res.js +274 -0
  38. package/src/bindings/Postgres.res +15 -0
  39. package/src/bindings/Promise.res +3 -0
  40. package/src/db/EntityHistory.res +33 -156
  41. package/src/db/EntityHistory.res.js +40 -115
  42. package/src/db/InternalTable.res +56 -55
  43. package/src/db/InternalTable.res.js +49 -52
  44. package/src/db/Table.res +86 -22
  45. package/src/db/Table.res.js +77 -10
@@ -5,41 +5,47 @@ module RowAction = {
5
5
  let variants = [SET, DELETE]
6
6
  let name = "ENVIO_HISTORY_CHANGE"
7
7
  let schema = S.enum(variants)
8
- }
9
-
10
- type entityUpdateAction<'entityType> =
11
- | Set('entityType)
12
- | Delete
13
-
14
- type entityUpdate<'entityType> = {
15
- entityId: string,
16
- entityUpdateAction: entityUpdateAction<'entityType>,
17
- checkpointId: int,
8
+ let config: Table.enumConfig<t> = {
9
+ name,
10
+ variants,
11
+ schema,
12
+ }
18
13
  }
19
14
 
20
15
  // Prefix with envio_ to avoid colleasions
21
16
  let changeFieldName = "envio_change"
22
- let checkpointIdFieldName = "checkpoint_id"
17
+ let checkpointIdFieldName = "envio_checkpoint_id"
18
+ let checkpointIdFieldType = Uint32
19
+ let changeFieldType = Enum({config: RowAction.config->Table.fromGenericEnumConfig})
20
+
21
+ let unsafeCheckpointIdSchema =
22
+ S.string
23
+ ->S.setName("CheckpointId")
24
+ ->S.transform(s => {
25
+ parser: string =>
26
+ switch string->Belt.Float.fromString {
27
+ | Some(float) => float
28
+ | None => s.fail("The string is not valid CheckpointId")
29
+ },
30
+ serializer: float => float->Belt.Float.toString,
31
+ })
23
32
 
24
- let makeSetUpdateSchema: S.t<'entity> => S.t<entityUpdate<'entity>> = entitySchema => {
33
+ let makeSetUpdateSchema: S.t<'entity> => S.t<Change.t<'entity>> = entitySchema => {
25
34
  S.object(s => {
26
35
  s.tag(changeFieldName, RowAction.SET)
27
- {
28
- checkpointId: s.field(checkpointIdFieldName, S.int),
29
- entityId: s.field("id", S.string),
30
- entityUpdateAction: Set(s.flatten(entitySchema)),
31
- }
36
+ Change.Set({
37
+ checkpointId: s.field(checkpointIdFieldName, unsafeCheckpointIdSchema),
38
+ entityId: s.field(Table.idFieldName, S.string),
39
+ entity: s.flatten(entitySchema),
40
+ })
32
41
  })
33
42
  }
34
43
 
35
- type t<'entity> = {
36
- table: table,
37
- setUpdateSchema: S.t<entityUpdate<'entity>>,
44
+ type pgEntityHistory<'entity> = {
45
+ table: Table.table,
46
+ setChangeSchema: S.t<Change.t<'entity>>,
38
47
  // Used for parsing
39
- setUpdateSchemaRows: S.t<array<entityUpdate<'entity>>>,
40
- makeInsertDeleteUpdatesQuery: (~pgSchema: string) => string,
41
- makeGetRollbackRemovedIdsQuery: (~pgSchema: string) => string,
42
- makeGetRollbackRestoredEntitiesQuery: (~pgSchema: string) => string,
48
+ setChangeSchemaRows: S.t<array<Change.t<'entity>>>,
43
49
  }
44
50
 
45
51
  let maxPgTableNameLength = 63
@@ -55,120 +61,6 @@ let historyTableName = (~entityName, ~entityIndex) => {
55
61
  }
56
62
  }
57
63
 
58
- let fromTable = (table: table, ~schema: S.t<'entity>, ~entityIndex): t<'entity> => {
59
- let id = "id"
60
-
61
- let dataFields = table.fields->Belt.Array.keepMap(field =>
62
- switch field {
63
- | Field(field) =>
64
- switch field.fieldName {
65
- //id is not nullable and should be part of the pk
66
- | "id" => {...field, fieldName: id, isPrimaryKey: true}->Field->Some
67
- | _ =>
68
- {
69
- ...field,
70
- isNullable: true, //All entity fields are nullable in the case
71
- isIndex: false, //No need to index any additional entity data fields in entity history
72
- }
73
- ->Field
74
- ->Some
75
- }
76
-
77
- | DerivedFrom(_) => None
78
- }
79
- )
80
-
81
- let actionField = mkField(changeFieldName, Custom(RowAction.name), ~fieldSchema=S.never)
82
-
83
- let checkpointIdField = mkField(
84
- checkpointIdFieldName,
85
- Integer,
86
- ~fieldSchema=S.int,
87
- ~isPrimaryKey=true,
88
- )
89
-
90
- let entityTableName = table.tableName
91
- let historyTableName = historyTableName(~entityName=entityTableName, ~entityIndex)
92
- //ignore composite indices
93
- let table = mkTable(
94
- historyTableName,
95
- ~fields=dataFields->Belt.Array.concat([checkpointIdField, actionField]),
96
- )
97
-
98
- let setUpdateSchema = makeSetUpdateSchema(schema)
99
-
100
- let makeInsertDeleteUpdatesQuery = {
101
- // Get all field names for the INSERT statement
102
- let allFieldNames = table.fields->Belt.Array.map(field => field->getFieldName)
103
- let allFieldNamesStr =
104
- allFieldNames->Belt.Array.map(name => `"${name}"`)->Js.Array2.joinWith(", ")
105
-
106
- // Build the SELECT part: id from unnest, checkpoint_id from unnest, 'DELETE' for action, NULL for all other fields
107
- let selectParts = allFieldNames->Belt.Array.map(fieldName => {
108
- switch fieldName {
109
- | "id" => "u.id"
110
- | field if field == checkpointIdFieldName => "u.checkpoint_id"
111
- | field if field == changeFieldName => "'DELETE'"
112
- | _ => "NULL"
113
- }
114
- })
115
- let selectPartsStr = selectParts->Js.Array2.joinWith(", ")
116
- (~pgSchema) => {
117
- `INSERT INTO "${pgSchema}"."${historyTableName}" (${allFieldNamesStr})
118
- SELECT ${selectPartsStr}
119
- FROM UNNEST($1::text[], $2::int[]) AS u(id, checkpoint_id)`
120
- }
121
- }
122
-
123
- // Get data field names for rollback queries (exclude changeFieldName and checkpointIdFieldName)
124
- let dataFieldNames =
125
- table.fields
126
- ->Belt.Array.map(field => field->getFieldName)
127
- ->Belt.Array.keep(fieldName =>
128
- fieldName != changeFieldName && fieldName != checkpointIdFieldName
129
- )
130
- let dataFieldsCommaSeparated =
131
- dataFieldNames->Belt.Array.map(name => `"${name}"`)->Js.Array2.joinWith(", ")
132
-
133
- // Returns entity IDs that were created after the rollback target and have no history before it.
134
- // These entities should be deleted during rollback.
135
- let makeGetRollbackRemovedIdsQuery = (~pgSchema) => {
136
- `SELECT DISTINCT id
137
- FROM "${pgSchema}"."${historyTableName}"
138
- WHERE "${checkpointIdFieldName}" > $1
139
- AND NOT EXISTS (
140
- SELECT 1
141
- FROM "${pgSchema}"."${historyTableName}" h
142
- WHERE h.id = "${historyTableName}".id
143
- AND h."${checkpointIdFieldName}" <= $1
144
- )`
145
- }
146
-
147
- // Returns the most recent entity state for IDs that need to be restored during rollback.
148
- // For each ID modified after the rollback target, retrieves its latest state at or before the target.
149
- let makeGetRollbackRestoredEntitiesQuery = (~pgSchema) => {
150
- `SELECT DISTINCT ON (id) ${dataFieldsCommaSeparated}
151
- FROM "${pgSchema}"."${historyTableName}"
152
- WHERE "${checkpointIdFieldName}" <= $1
153
- AND EXISTS (
154
- SELECT 1
155
- FROM "${pgSchema}"."${historyTableName}" h
156
- WHERE h.id = "${historyTableName}".id
157
- AND h."${checkpointIdFieldName}" > $1
158
- )
159
- ORDER BY id, "${checkpointIdFieldName}" DESC`
160
- }
161
-
162
- {
163
- table,
164
- setUpdateSchema,
165
- setUpdateSchemaRows: S.array(setUpdateSchema),
166
- makeInsertDeleteUpdatesQuery,
167
- makeGetRollbackRemovedIdsQuery,
168
- makeGetRollbackRestoredEntitiesQuery,
169
- }
170
- }
171
-
172
64
  type safeReorgBlocks = {
173
65
  chainIds: array<int>,
174
66
  blockNumbers: array<int>,
@@ -225,11 +117,11 @@ let pruneStaleEntityHistory = (
225
117
  }
226
118
 
227
119
  // If an entity doesn't have a history before the update
228
- // we create it automatically with checkpoint_id 0
120
+ // we create it automatically with envio_checkpoint_id 0
229
121
  let makeBackfillHistoryQuery = (~pgSchema, ~entityName, ~entityIndex) => {
230
122
  let historyTableRef = `"${pgSchema}"."${historyTableName(~entityName, ~entityIndex)}"`
231
123
  `WITH target_ids AS (
232
- SELECT UNNEST($1::${(Text: Table.fieldType :> string)}[]) AS id
124
+ SELECT UNNEST($1::${(Text: Postgres.columnType :> string)}[]) AS id
233
125
  ),
234
126
  missing_history AS (
235
127
  SELECT e.*
@@ -252,22 +144,7 @@ let backfillHistory = (sql, ~pgSchema, ~entityName, ~entityIndex, ~ids: array<st
252
144
  ->Promise.ignoreValue
253
145
  }
254
146
 
255
- let insertDeleteUpdates = (
256
- sql,
257
- ~pgSchema,
258
- ~entityHistory,
259
- ~batchDeleteEntityIds,
260
- ~batchDeleteCheckpointIds,
261
- ) => {
262
- sql
263
- ->Postgres.preparedUnsafe(
264
- entityHistory.makeInsertDeleteUpdatesQuery(~pgSchema),
265
- (batchDeleteEntityIds, batchDeleteCheckpointIds)->Obj.magic,
266
- )
267
- ->Promise.ignoreValue
268
- }
269
-
270
- let rollback = (sql, ~pgSchema, ~entityName, ~entityIndex, ~rollbackTargetCheckpointId: int) => {
147
+ let rollback = (sql, ~pgSchema, ~entityName, ~entityIndex, ~rollbackTargetCheckpointId: float) => {
271
148
  sql
272
149
  ->Postgres.preparedUnsafe(
273
150
  `DELETE FROM "${pgSchema}"."${historyTableName(
@@ -3,7 +3,7 @@
3
3
 
4
4
  var Table = require("./Table.res.js");
5
5
  var Js_string = require("rescript/lib/js/js_string.js");
6
- var Belt_Array = require("rescript/lib/js/belt_Array.js");
6
+ var Belt_Float = require("rescript/lib/js/belt_Float.js");
7
7
  var S$RescriptSchema = require("rescript-schema/src/S.res.js");
8
8
 
9
9
  var variants = [
@@ -15,26 +15,52 @@ var name = "ENVIO_HISTORY_CHANGE";
15
15
 
16
16
  var schema = S$RescriptSchema.$$enum(variants);
17
17
 
18
+ var config = {
19
+ name: name,
20
+ variants: variants,
21
+ schema: schema
22
+ };
23
+
18
24
  var RowAction = {
19
25
  variants: variants,
20
26
  name: name,
21
- schema: schema
27
+ schema: schema,
28
+ config: config
22
29
  };
23
30
 
24
31
  var changeFieldName = "envio_change";
25
32
 
26
- var checkpointIdFieldName = "checkpoint_id";
33
+ var checkpointIdFieldName = "envio_checkpoint_id";
34
+
35
+ var changeFieldType = {
36
+ type: "Enum",
37
+ config: config
38
+ };
39
+
40
+ var unsafeCheckpointIdSchema = S$RescriptSchema.transform(S$RescriptSchema.setName(S$RescriptSchema.string, "CheckpointId"), (function (s) {
41
+ return {
42
+ p: (function (string) {
43
+ var $$float = Belt_Float.fromString(string);
44
+ if ($$float !== undefined) {
45
+ return $$float;
46
+ } else {
47
+ return s.fail("The string is not valid CheckpointId", undefined);
48
+ }
49
+ }),
50
+ s: (function ($$float) {
51
+ return String($$float);
52
+ })
53
+ };
54
+ }));
27
55
 
28
56
  function makeSetUpdateSchema(entitySchema) {
29
57
  return S$RescriptSchema.object(function (s) {
30
58
  s.tag(changeFieldName, "SET");
31
59
  return {
32
- entityId: s.f("id", S$RescriptSchema.string),
33
- entityUpdateAction: {
34
- TAG: "Set",
35
- _0: s.flatten(entitySchema)
36
- },
37
- checkpointId: s.f(checkpointIdFieldName, S$RescriptSchema.$$int)
60
+ type: "SET",
61
+ entityId: s.f(Table.idFieldName, S$RescriptSchema.string),
62
+ entity: s.flatten(entitySchema),
63
+ checkpointId: s.f(checkpointIdFieldName, unsafeCheckpointIdSchema)
38
64
  };
39
65
  });
40
66
  }
@@ -50,103 +76,6 @@ function historyTableName(entityName, entityIndex) {
50
76
  return Js_string.slice(0, 63 - entityIndexStr.length | 0, fullName) + entityIndexStr;
51
77
  }
52
78
 
53
- function fromTable(table, schema, entityIndex) {
54
- var dataFields = Belt_Array.keepMap(table.fields, (function (field) {
55
- if (field.TAG !== "Field") {
56
- return ;
57
- }
58
- var field$1 = field._0;
59
- var match = field$1.fieldName;
60
- if (match === "id") {
61
- return {
62
- TAG: "Field",
63
- _0: {
64
- fieldName: "id",
65
- fieldType: field$1.fieldType,
66
- fieldSchema: field$1.fieldSchema,
67
- isArray: field$1.isArray,
68
- isNullable: field$1.isNullable,
69
- isPrimaryKey: true,
70
- isIndex: field$1.isIndex,
71
- linkedEntity: field$1.linkedEntity,
72
- defaultValue: field$1.defaultValue
73
- }
74
- };
75
- } else {
76
- return {
77
- TAG: "Field",
78
- _0: {
79
- fieldName: field$1.fieldName,
80
- fieldType: field$1.fieldType,
81
- fieldSchema: field$1.fieldSchema,
82
- isArray: field$1.isArray,
83
- isNullable: true,
84
- isPrimaryKey: field$1.isPrimaryKey,
85
- isIndex: false,
86
- linkedEntity: field$1.linkedEntity,
87
- defaultValue: field$1.defaultValue
88
- }
89
- };
90
- }
91
- }));
92
- var actionField = Table.mkField(changeFieldName, name, S$RescriptSchema.never, undefined, undefined, undefined, undefined, undefined, undefined);
93
- var checkpointIdField = Table.mkField(checkpointIdFieldName, "INTEGER", S$RescriptSchema.$$int, undefined, undefined, undefined, true, undefined, undefined);
94
- var entityTableName = table.tableName;
95
- var historyTableName$1 = historyTableName(entityTableName, entityIndex);
96
- var table$1 = Table.mkTable(historyTableName$1, undefined, Belt_Array.concat(dataFields, [
97
- checkpointIdField,
98
- actionField
99
- ]));
100
- var setUpdateSchema = makeSetUpdateSchema(schema);
101
- var allFieldNames = Belt_Array.map(table$1.fields, (function (field) {
102
- return Table.getFieldName(field);
103
- }));
104
- var allFieldNamesStr = Belt_Array.map(allFieldNames, (function (name) {
105
- return "\"" + name + "\"";
106
- })).join(", ");
107
- var selectParts = Belt_Array.map(allFieldNames, (function (fieldName) {
108
- if (fieldName === "id") {
109
- return "u.id";
110
- } else if (fieldName === checkpointIdFieldName) {
111
- return "u.checkpoint_id";
112
- } else if (fieldName === changeFieldName) {
113
- return "'DELETE'";
114
- } else {
115
- return "NULL";
116
- }
117
- }));
118
- var selectPartsStr = selectParts.join(", ");
119
- var makeInsertDeleteUpdatesQuery = function (pgSchema) {
120
- return "INSERT INTO \"" + pgSchema + "\".\"" + historyTableName$1 + "\" (" + allFieldNamesStr + ")\nSELECT " + selectPartsStr + "\nFROM UNNEST($1::text[], $2::int[]) AS u(id, checkpoint_id)";
121
- };
122
- var dataFieldNames = Belt_Array.keep(Belt_Array.map(table$1.fields, (function (field) {
123
- return Table.getFieldName(field);
124
- })), (function (fieldName) {
125
- if (fieldName !== changeFieldName) {
126
- return fieldName !== checkpointIdFieldName;
127
- } else {
128
- return false;
129
- }
130
- }));
131
- var dataFieldsCommaSeparated = Belt_Array.map(dataFieldNames, (function (name) {
132
- return "\"" + name + "\"";
133
- })).join(", ");
134
- var makeGetRollbackRemovedIdsQuery = function (pgSchema) {
135
- return "SELECT DISTINCT id\nFROM \"" + pgSchema + "\".\"" + historyTableName$1 + "\"\nWHERE \"" + checkpointIdFieldName + "\" > $1\n AND NOT EXISTS (\n SELECT 1\n FROM \"" + pgSchema + "\".\"" + historyTableName$1 + "\" h\n WHERE h.id = \"" + historyTableName$1 + "\".id\n AND h.\"" + checkpointIdFieldName + "\" <= $1\n )";
136
- };
137
- var makeGetRollbackRestoredEntitiesQuery = function (pgSchema) {
138
- return "SELECT DISTINCT ON (id) " + dataFieldsCommaSeparated + "\nFROM \"" + pgSchema + "\".\"" + historyTableName$1 + "\"\nWHERE \"" + checkpointIdFieldName + "\" <= $1\n AND EXISTS (\n SELECT 1\n FROM \"" + pgSchema + "\".\"" + historyTableName$1 + "\" h\n WHERE h.id = \"" + historyTableName$1 + "\".id\n AND h.\"" + checkpointIdFieldName + "\" > $1\n )\nORDER BY id, \"" + checkpointIdFieldName + "\" DESC";
139
- };
140
- return {
141
- table: table$1,
142
- setUpdateSchema: setUpdateSchema,
143
- setUpdateSchemaRows: S$RescriptSchema.array(setUpdateSchema),
144
- makeInsertDeleteUpdatesQuery: makeInsertDeleteUpdatesQuery,
145
- makeGetRollbackRemovedIdsQuery: makeGetRollbackRemovedIdsQuery,
146
- makeGetRollbackRestoredEntitiesQuery: makeGetRollbackRestoredEntitiesQuery
147
- };
148
- }
149
-
150
79
  function makePruneStaleEntityHistoryQuery(entityName, entityIndex, pgSchema) {
151
80
  var historyTableRef = "\"" + pgSchema + "\".\"" + historyTableName(entityName, entityIndex) + "\"";
152
81
  return "WITH anchors AS (\n SELECT t.id, MAX(t." + checkpointIdFieldName + ") AS keep_checkpoint_id\n FROM " + historyTableRef + " t WHERE t." + checkpointIdFieldName + " <= $1\n GROUP BY t.id\n)\nDELETE FROM " + historyTableRef + " d\nUSING anchors a\nWHERE d.id = a.id\n AND (\n d." + checkpointIdFieldName + " < a.keep_checkpoint_id\n OR (\n d." + checkpointIdFieldName + " = a.keep_checkpoint_id AND\n NOT EXISTS (\n SELECT 1 FROM " + historyTableRef + " ps \n WHERE ps.id = d.id AND ps." + checkpointIdFieldName + " > $1\n ) \n )\n );";
@@ -165,31 +94,27 @@ function backfillHistory(sql, pgSchema, entityName, entityIndex, ids) {
165
94
  return sql.unsafe(makeBackfillHistoryQuery(pgSchema, entityName, entityIndex), [ids], {prepare: true});
166
95
  }
167
96
 
168
- function insertDeleteUpdates(sql, pgSchema, entityHistory, batchDeleteEntityIds, batchDeleteCheckpointIds) {
169
- return sql.unsafe(entityHistory.makeInsertDeleteUpdatesQuery(pgSchema), [
170
- batchDeleteEntityIds,
171
- batchDeleteCheckpointIds
172
- ], {prepare: true});
173
- }
174
-
175
97
  function rollback(sql, pgSchema, entityName, entityIndex, rollbackTargetCheckpointId) {
176
98
  return sql.unsafe("DELETE FROM \"" + pgSchema + "\".\"" + historyTableName(entityName, entityIndex) + "\" WHERE \"" + checkpointIdFieldName + "\" > $1;", [rollbackTargetCheckpointId], {prepare: true});
177
99
  }
178
100
 
101
+ var checkpointIdFieldType = "Uint32";
102
+
179
103
  var maxPgTableNameLength = 63;
180
104
 
181
105
  exports.RowAction = RowAction;
182
106
  exports.changeFieldName = changeFieldName;
183
107
  exports.checkpointIdFieldName = checkpointIdFieldName;
108
+ exports.checkpointIdFieldType = checkpointIdFieldType;
109
+ exports.changeFieldType = changeFieldType;
110
+ exports.unsafeCheckpointIdSchema = unsafeCheckpointIdSchema;
184
111
  exports.makeSetUpdateSchema = makeSetUpdateSchema;
185
112
  exports.maxPgTableNameLength = maxPgTableNameLength;
186
113
  exports.historyTablePrefix = historyTablePrefix;
187
114
  exports.historyTableName = historyTableName;
188
- exports.fromTable = fromTable;
189
115
  exports.makePruneStaleEntityHistoryQuery = makePruneStaleEntityHistoryQuery;
190
116
  exports.pruneStaleEntityHistory = pruneStaleEntityHistory;
191
117
  exports.makeBackfillHistoryQuery = makeBackfillHistoryQuery;
192
118
  exports.backfillHistory = backfillHistory;
193
- exports.insertDeleteUpdates = insertDeleteUpdates;
194
119
  exports.rollback = rollback;
195
120
  /* schema Not a pure module */
@@ -46,21 +46,19 @@ module DynamicContractRegistry = {
46
46
  let table = mkTable(
47
47
  name,
48
48
  ~fields=[
49
- mkField("id", Text, ~isPrimaryKey, ~fieldSchema=S.string),
50
- mkField("chain_id", Integer, ~fieldSchema=S.int),
51
- mkField("registering_event_block_number", Integer, ~fieldSchema=S.int),
52
- mkField("registering_event_log_index", Integer, ~fieldSchema=S.int),
53
- mkField("registering_event_block_timestamp", Integer, ~fieldSchema=S.int),
54
- mkField("registering_event_contract_name", Text, ~fieldSchema=S.string),
55
- mkField("registering_event_name", Text, ~fieldSchema=S.string),
56
- mkField("registering_event_src_address", Text, ~fieldSchema=Address.schema),
57
- mkField("contract_address", Text, ~fieldSchema=Address.schema),
58
- mkField("contract_name", Text, ~fieldSchema=S.string),
49
+ mkField("id", String, ~isPrimaryKey, ~fieldSchema=S.string),
50
+ mkField("chain_id", Int32, ~fieldSchema=S.int),
51
+ mkField("registering_event_block_number", Int32, ~fieldSchema=S.int),
52
+ mkField("registering_event_log_index", Int32, ~fieldSchema=S.int),
53
+ mkField("registering_event_block_timestamp", Int32, ~fieldSchema=S.int),
54
+ mkField("registering_event_contract_name", String, ~fieldSchema=S.string),
55
+ mkField("registering_event_name", String, ~fieldSchema=S.string),
56
+ mkField("registering_event_src_address", String, ~fieldSchema=Address.schema),
57
+ mkField("contract_address", String, ~fieldSchema=Address.schema),
58
+ mkField("contract_name", String, ~fieldSchema=S.string),
59
59
  ],
60
60
  )
61
61
 
62
- let entityHistory = table->EntityHistory.fromTable(~schema, ~entityIndex=index)
63
-
64
62
  external castToInternal: t => Internal.entity = "%identity"
65
63
 
66
64
  let config = {
@@ -69,7 +67,6 @@ module DynamicContractRegistry = {
69
67
  schema,
70
68
  rowsSchema,
71
69
  table,
72
- entityHistory,
73
70
  }->Internal.fromGenericEntityConfig
74
71
  }
75
72
 
@@ -131,19 +128,19 @@ module Chains = {
131
128
  let table = mkTable(
132
129
  "envio_chains",
133
130
  ~fields=[
134
- mkField((#id: field :> string), Integer, ~fieldSchema=S.int, ~isPrimaryKey),
131
+ mkField((#id: field :> string), Int32, ~fieldSchema=S.int, ~isPrimaryKey),
135
132
  // Values populated from config
136
- mkField((#start_block: field :> string), Integer, ~fieldSchema=S.int),
137
- mkField((#end_block: field :> string), Integer, ~fieldSchema=S.null(S.int), ~isNullable),
138
- mkField((#max_reorg_depth: field :> string), Integer, ~fieldSchema=S.int),
133
+ mkField((#start_block: field :> string), Int32, ~fieldSchema=S.int),
134
+ mkField((#end_block: field :> string), Int32, ~fieldSchema=S.null(S.int), ~isNullable),
135
+ mkField((#max_reorg_depth: field :> string), Int32, ~fieldSchema=S.int),
139
136
  // Block number of the latest block that was fetched from the source
140
- mkField((#buffer_block: field :> string), Integer, ~fieldSchema=S.int),
137
+ mkField((#buffer_block: field :> string), Int32, ~fieldSchema=S.int),
141
138
  // Block number of the currently active source
142
- mkField((#source_block: field :> string), Integer, ~fieldSchema=S.int),
139
+ mkField((#source_block: field :> string), Int32, ~fieldSchema=S.int),
143
140
  // Block number of the first event that was processed for this chain
144
141
  mkField(
145
142
  (#first_event_block: field :> string),
146
- Integer,
143
+ Int32,
147
144
  ~fieldSchema=S.null(S.int),
148
145
  ~isNullable,
149
146
  ),
@@ -151,17 +148,17 @@ module Chains = {
151
148
  // null during historical sync, set to current time when sync is complete
152
149
  mkField(
153
150
  (#ready_at: field :> string),
154
- TimestampWithNullTimezone,
151
+ Date,
155
152
  ~fieldSchema=S.null(Utils.Schema.dbDate),
156
153
  ~isNullable,
157
154
  ),
158
- mkField((#events_processed: field :> string), Integer, ~fieldSchema=S.int),
155
+ mkField((#events_processed: field :> string), Int32, ~fieldSchema=S.int),
159
156
  // TODO: In the future it should reference a table with sources
160
157
  mkField((#_is_hyper_sync: field :> string), Boolean, ~fieldSchema=S.bool),
161
158
  // Fully processed block number
162
- mkField((#progress_block: field :> string), Integer, ~fieldSchema=S.int),
159
+ mkField((#progress_block: field :> string), Int32, ~fieldSchema=S.int),
163
160
  // TODO: Should deprecate after changing the ETA calculation logic
164
- mkField((#_num_batches_fetched: field :> string), Integer, ~fieldSchema=S.int),
161
+ mkField((#_num_batches_fetched: field :> string), Int32, ~fieldSchema=S.int),
165
162
  ],
166
163
  )
167
164
 
@@ -368,11 +365,11 @@ module PersistedState = {
368
365
  "persisted_state",
369
366
  ~fields=[
370
367
  mkField("id", Serial, ~fieldSchema=S.int, ~isPrimaryKey),
371
- mkField("envio_version", Text, ~fieldSchema=S.string),
372
- mkField("config_hash", Text, ~fieldSchema=S.string),
373
- mkField("schema_hash", Text, ~fieldSchema=S.string),
374
- mkField("handler_files_hash", Text, ~fieldSchema=S.string),
375
- mkField("abi_files_hash", Text, ~fieldSchema=S.string),
368
+ mkField("envio_version", String, ~fieldSchema=S.string),
369
+ mkField("config_hash", String, ~fieldSchema=S.string),
370
+ mkField("schema_hash", String, ~fieldSchema=S.string),
371
+ mkField("handler_files_hash", String, ~fieldSchema=S.string),
372
+ mkField("abi_files_hash", String, ~fieldSchema=S.string),
376
373
  ],
377
374
  )
378
375
  }
@@ -387,7 +384,7 @@ module Checkpoints = {
387
384
  ]
388
385
 
389
386
  type t = {
390
- id: int,
387
+ id: float,
391
388
  @as("chain_id")
392
389
  chainId: int,
393
390
  @as("block_number")
@@ -398,16 +395,16 @@ module Checkpoints = {
398
395
  eventsProcessed: int,
399
396
  }
400
397
 
401
- let initialCheckpointId = 0
398
+ let initialCheckpointId = 0.
402
399
 
403
400
  let table = mkTable(
404
401
  "envio_checkpoints",
405
402
  ~fields=[
406
- mkField((#id: field :> string), Integer, ~fieldSchema=S.int, ~isPrimaryKey),
407
- mkField((#chain_id: field :> string), Integer, ~fieldSchema=S.int),
408
- mkField((#block_number: field :> string), Integer, ~fieldSchema=S.int),
409
- mkField((#block_hash: field :> string), Text, ~fieldSchema=S.null(S.string), ~isNullable),
410
- mkField((#events_processed: field :> string), Integer, ~fieldSchema=S.int),
403
+ mkField((#id: field :> string), Int32, ~fieldSchema=S.int, ~isPrimaryKey),
404
+ mkField((#chain_id: field :> string), Int32, ~fieldSchema=S.int),
405
+ mkField((#block_number: field :> string), Int32, ~fieldSchema=S.int),
406
+ mkField((#block_hash: field :> string), String, ~fieldSchema=S.null(S.string), ~isNullable),
407
+ mkField((#events_processed: field :> string), Int32, ~fieldSchema=S.int),
411
408
  ],
412
409
  )
413
410
 
@@ -438,12 +435,12 @@ WHERE cp."${(#block_hash: field :> string)}" IS NOT NULL
438
435
  }
439
436
 
440
437
  let makeCommitedCheckpointIdQuery = (~pgSchema) => {
441
- `SELECT COALESCE(MAX(${(#id: field :> string)}), ${initialCheckpointId->Belt.Int.toString}) AS id FROM "${pgSchema}"."${table.tableName}";`
438
+ `SELECT COALESCE(MAX(${(#id: field :> string)}), ${initialCheckpointId->Belt.Float.toString}) AS id FROM "${pgSchema}"."${table.tableName}";`
442
439
  }
443
440
 
444
441
  let makeInsertCheckpointQuery = (~pgSchema) => {
445
442
  `INSERT INTO "${pgSchema}"."${table.tableName}" ("${(#id: field :> string)}", "${(#chain_id: field :> string)}", "${(#block_number: field :> string)}", "${(#block_hash: field :> string)}", "${(#events_processed: field :> string)}")
446
- SELECT * FROM unnest($1::${(Integer :> string)}[],$2::${(Integer :> string)}[],$3::${(Integer :> string)}[],$4::${(Text :> string)}[],$5::${(Integer :> string)}[]);`
443
+ SELECT * FROM unnest($1::${(Integer: Postgres.columnType :> string)}[],$2::${(Integer: Postgres.columnType :> string)}[],$3::${(Integer: Postgres.columnType :> string)}[],$4::${(Text: Postgres.columnType :> string)}[],$5::${(Integer: Postgres.columnType :> string)}[]);`
447
444
  }
448
445
 
449
446
  let insert = (
@@ -468,14 +465,14 @@ SELECT * FROM unnest($1::${(Integer :> string)}[],$2::${(Integer :> string)}[],$
468
465
  checkpointEventsProcessed,
469
466
  )->(
470
467
  Utils.magic: (
471
- (array<int>, array<int>, array<int>, array<Js.Null.t<string>>, array<int>)
468
+ (array<float>, array<int>, array<int>, array<Js.Null.t<string>>, array<int>)
472
469
  ) => unknown
473
470
  ),
474
471
  )
475
472
  ->Promise.ignoreValue
476
473
  }
477
474
 
478
- let rollback = (sql, ~pgSchema, ~rollbackTargetCheckpointId: int) => {
475
+ let rollback = (sql, ~pgSchema, ~rollbackTargetCheckpointId: Internal.checkpointId) => {
479
476
  sql
480
477
  ->Postgres.preparedUnsafe(
481
478
  `DELETE FROM "${pgSchema}"."${table.tableName}" WHERE "${(#id: field :> string)}" > $1;`,
@@ -488,7 +485,7 @@ SELECT * FROM unnest($1::${(Integer :> string)}[],$2::${(Integer :> string)}[],$
488
485
  `DELETE FROM "${pgSchema}"."${table.tableName}" WHERE "${(#id: field :> string)}" < $1;`
489
486
  }
490
487
 
491
- let pruneStaleCheckpoints = (sql, ~pgSchema, ~safeCheckpointId: int) => {
488
+ let pruneStaleCheckpoints = (sql, ~pgSchema, ~safeCheckpointId: float) => {
492
489
  sql
493
490
  ->Postgres.preparedUnsafe(
494
491
  makePruneStaleCheckpointsQuery(~pgSchema),
@@ -517,7 +514,7 @@ LIMIT 1;`
517
514
  makeGetRollbackTargetCheckpointQuery(~pgSchema),
518
515
  (reorgChainId, lastKnownValidBlockNumber)->Obj.magic,
519
516
  )
520
- ->(Utils.magic: promise<unknown> => promise<array<{"id": int}>>)
517
+ ->(Utils.magic: promise<unknown> => promise<array<{"id": Internal.checkpointId}>>)
521
518
  }
522
519
 
523
520
  let makeGetRollbackProgressDiffQuery = (~pgSchema) => {
@@ -530,7 +527,11 @@ WHERE "${(#id: field :> string)}" > $1
530
527
  GROUP BY "${(#chain_id: field :> string)}";`
531
528
  }
532
529
 
533
- let getRollbackProgressDiff = (sql, ~pgSchema, ~rollbackTargetCheckpointId: int) => {
530
+ let getRollbackProgressDiff = (
531
+ sql,
532
+ ~pgSchema,
533
+ ~rollbackTargetCheckpointId: Internal.checkpointId,
534
+ ) => {
534
535
  sql
535
536
  ->Postgres.preparedUnsafe(
536
537
  makeGetRollbackProgressDiffQuery(~pgSchema),
@@ -584,18 +585,18 @@ module RawEvents = {
584
585
  let table = mkTable(
585
586
  "raw_events",
586
587
  ~fields=[
587
- mkField("chain_id", Integer, ~fieldSchema=S.int),
588
- mkField("event_id", Numeric, ~fieldSchema=S.bigint),
589
- mkField("event_name", Text, ~fieldSchema=S.string),
590
- mkField("contract_name", Text, ~fieldSchema=S.string),
591
- mkField("block_number", Integer, ~fieldSchema=S.int),
592
- mkField("log_index", Integer, ~fieldSchema=S.int),
593
- mkField("src_address", Text, ~fieldSchema=Address.schema),
594
- mkField("block_hash", Text, ~fieldSchema=S.string),
595
- mkField("block_timestamp", Integer, ~fieldSchema=S.int),
596
- mkField("block_fields", JsonB, ~fieldSchema=S.json(~validate=false)),
597
- mkField("transaction_fields", JsonB, ~fieldSchema=S.json(~validate=false)),
598
- mkField("params", JsonB, ~fieldSchema=S.json(~validate=false)),
588
+ mkField("chain_id", Int32, ~fieldSchema=S.int),
589
+ mkField("event_id", BigInt({}), ~fieldSchema=S.bigint),
590
+ mkField("event_name", String, ~fieldSchema=S.string),
591
+ mkField("contract_name", String, ~fieldSchema=S.string),
592
+ mkField("block_number", Int32, ~fieldSchema=S.int),
593
+ mkField("log_index", Int32, ~fieldSchema=S.int),
594
+ mkField("src_address", String, ~fieldSchema=Address.schema),
595
+ mkField("block_hash", String, ~fieldSchema=S.string),
596
+ mkField("block_timestamp", Int32, ~fieldSchema=S.int),
597
+ mkField("block_fields", Json, ~fieldSchema=S.json(~validate=false)),
598
+ mkField("transaction_fields", Json, ~fieldSchema=S.json(~validate=false)),
599
+ mkField("params", Json, ~fieldSchema=S.json(~validate=false)),
599
600
  mkField("serial", Serial, ~isNullable, ~isPrimaryKey, ~fieldSchema=S.null(S.int)),
600
601
  ],
601
602
  )