envio 2.32.1 → 3.0.0-alpha-main-clickhouse-sink

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/index.d.ts +1 -0
  2. package/package.json +6 -5
  3. package/src/Batch.res +4 -4
  4. package/src/Change.res +9 -0
  5. package/src/Change.res.js +2 -0
  6. package/src/Config.res +5 -5
  7. package/src/Config.res.js +3 -1
  8. package/src/Envio.gen.ts +3 -3
  9. package/src/Envio.res +14 -3
  10. package/src/EventRegister.res +3 -11
  11. package/src/EventRegister.res.js +4 -8
  12. package/src/EventRegister.resi +1 -1
  13. package/src/InMemoryStore.gen.ts +6 -0
  14. package/src/InMemoryStore.res +149 -0
  15. package/src/InMemoryStore.res.js +161 -0
  16. package/src/InMemoryTable.res +50 -35
  17. package/src/InMemoryTable.res.js +52 -84
  18. package/src/Internal.gen.ts +0 -2
  19. package/src/Internal.res +20 -38
  20. package/src/Internal.res.js +2 -16
  21. package/src/LoadManager.res +23 -16
  22. package/src/LoadManager.res.js +17 -15
  23. package/src/Persistence.res +190 -38
  24. package/src/Persistence.res.js +92 -39
  25. package/src/PgStorage.res +700 -14
  26. package/src/PgStorage.res.js +431 -19
  27. package/src/Platform.res +141 -0
  28. package/src/Platform.res.js +170 -0
  29. package/src/Prometheus.res +41 -0
  30. package/src/Prometheus.res.js +45 -0
  31. package/src/SafeCheckpointTracking.res +5 -4
  32. package/src/Sink.res +47 -0
  33. package/src/Sink.res.js +36 -0
  34. package/src/Utils.res +2 -0
  35. package/src/Utils.res.js +3 -0
  36. package/src/bindings/ClickHouse.res +387 -0
  37. package/src/bindings/ClickHouse.res.js +274 -0
  38. package/src/bindings/Postgres.res +15 -0
  39. package/src/bindings/Promise.res +3 -0
  40. package/src/db/EntityHistory.res +33 -156
  41. package/src/db/EntityHistory.res.js +40 -115
  42. package/src/db/InternalTable.res +56 -55
  43. package/src/db/InternalTable.res.js +49 -52
  44. package/src/db/Table.res +86 -22
  45. package/src/db/Table.res.js +77 -10
@@ -0,0 +1,170 @@
1
+ // Generated by ReScript, PLEASE EDIT WITH CARE
2
+ 'use strict';
3
+
4
+
5
+ var cleanUpRawEventFieldsInPlace = (fields => {
6
+ delete fields.hash
7
+ delete fields.number
8
+ delete fields.timestamp
9
+ });
10
+
11
+ var Evm = {
12
+ cleanUpRawEventFieldsInPlace: cleanUpRawEventFieldsInPlace
13
+ };
14
+
15
+ var evm_blockFields = [
16
+ "number",
17
+ "timestamp",
18
+ "hash",
19
+ "parentHash",
20
+ "nonce",
21
+ "sha3Uncles",
22
+ "logsBloom",
23
+ "transactionsRoot",
24
+ "stateRoot",
25
+ "receiptsRoot",
26
+ "miner",
27
+ "difficulty",
28
+ "totalDifficulty",
29
+ "extraData",
30
+ "size",
31
+ "gasLimit",
32
+ "gasUsed",
33
+ "uncles",
34
+ "baseFeePerGas",
35
+ "blobGasUsed",
36
+ "excessBlobGas",
37
+ "parentBeaconBlockRoot",
38
+ "withdrawalsRoot",
39
+ "l1BlockNumber",
40
+ "sendCount",
41
+ "sendRoot",
42
+ "mixHash"
43
+ ];
44
+
45
+ var evm_transactionFields = [
46
+ "transactionIndex",
47
+ "hash",
48
+ "from",
49
+ "to",
50
+ "gas",
51
+ "gasPrice",
52
+ "maxPriorityFeePerGas",
53
+ "maxFeePerGas",
54
+ "cumulativeGasUsed",
55
+ "effectiveGasPrice",
56
+ "gasUsed",
57
+ "input",
58
+ "nonce",
59
+ "value",
60
+ "v",
61
+ "r",
62
+ "s",
63
+ "contractAddress",
64
+ "logsBloom",
65
+ "root",
66
+ "status",
67
+ "yParity",
68
+ "chainId",
69
+ "maxFeePerBlobGas",
70
+ "blobVersionedHashes",
71
+ "kind",
72
+ "l1Fee",
73
+ "l1GasPrice",
74
+ "l1GasUsed",
75
+ "l1FeeScalar",
76
+ "gasUsedForL1",
77
+ "accessList",
78
+ "authorizationList"
79
+ ];
80
+
81
+ function evm_getNumber(prim) {
82
+ return prim.number;
83
+ }
84
+
85
+ function evm_getTimestamp(prim) {
86
+ return prim.timestamp;
87
+ }
88
+
89
+ function evm_getId(prim) {
90
+ return prim.hash;
91
+ }
92
+
93
+ var evm = {
94
+ name: "evm",
95
+ blockFields: evm_blockFields,
96
+ transactionFields: evm_transactionFields,
97
+ blockNumberName: "number",
98
+ blockTimestampName: "timestamp",
99
+ blockHashName: "hash",
100
+ getNumber: evm_getNumber,
101
+ getTimestamp: evm_getTimestamp,
102
+ getId: evm_getId,
103
+ cleanUpRawEventFieldsInPlace: cleanUpRawEventFieldsInPlace
104
+ };
105
+
106
+ var cleanUpRawEventFieldsInPlace$1 = (fields => {
107
+ delete fields.id
108
+ delete fields.height
109
+ delete fields.time
110
+ });
111
+
112
+ var Fuel = {
113
+ cleanUpRawEventFieldsInPlace: cleanUpRawEventFieldsInPlace$1
114
+ };
115
+
116
+ var fuel_blockFields = [
117
+ "id",
118
+ "height",
119
+ "time"
120
+ ];
121
+
122
+ var fuel_transactionFields = ["id"];
123
+
124
+ function fuel_getNumber(prim) {
125
+ return prim.height;
126
+ }
127
+
128
+ function fuel_getTimestamp(prim) {
129
+ return prim.time;
130
+ }
131
+
132
+ function fuel_getId(prim) {
133
+ return prim.id;
134
+ }
135
+
136
+ var fuel = {
137
+ name: "fuel",
138
+ blockFields: fuel_blockFields,
139
+ transactionFields: fuel_transactionFields,
140
+ blockNumberName: "height",
141
+ blockTimestampName: "time",
142
+ blockHashName: "id",
143
+ getNumber: fuel_getNumber,
144
+ getTimestamp: fuel_getTimestamp,
145
+ getId: fuel_getId,
146
+ cleanUpRawEventFieldsInPlace: cleanUpRawEventFieldsInPlace$1
147
+ };
148
+
149
+ function fromName(name) {
150
+ if (name === "evm") {
151
+ return evm;
152
+ } else {
153
+ return fuel;
154
+ }
155
+ }
156
+
157
+ function makeBlockEvent(blockNumber, chainId, platform) {
158
+ var blockEvent = {};
159
+ blockEvent["chainId"] = chainId;
160
+ blockEvent[platform.blockNumberName] = blockNumber;
161
+ return blockEvent;
162
+ }
163
+
164
+ exports.Evm = Evm;
165
+ exports.evm = evm;
166
+ exports.Fuel = Fuel;
167
+ exports.fuel = fuel;
168
+ exports.fromName = fromName;
169
+ exports.makeBlockEvent = makeBlockEvent;
170
+ /* No side effect */
@@ -16,6 +16,18 @@ let executeBatchDurationCounter = PromClient.Counter.makeCounter({
16
16
  "labelNames": [],
17
17
  })
18
18
 
19
+ let storageWriteTimeCounter = PromClient.Counter.makeCounter({
20
+ "name": "envio_storage_write_time",
21
+ "help": "Cumulative time spent writing batches to storage in milliseconds",
22
+ "labelNames": [],
23
+ })
24
+
25
+ let storageWriteCounter = PromClient.Counter.makeCounter({
26
+ "name": "envio_storage_write_count",
27
+ "help": "Total number of batch writes to storage",
28
+ "labelNames": [],
29
+ })
30
+
19
31
  let allChainsSyncedToHead = PromClient.Gauge.makeGauge({
20
32
  "name": "hyperindex_synced_to_head",
21
33
  "help": "All chains fully synced",
@@ -213,6 +225,14 @@ let incrementExecuteBatchDurationCounter = (~duration) => {
213
225
  executeBatchDurationCounter->PromClient.Counter.incMany(duration)
214
226
  }
215
227
 
228
+ let incrementStorageWriteTimeCounter = (~duration) => {
229
+ storageWriteTimeCounter->PromClient.Counter.incMany(duration)
230
+ }
231
+
232
+ let incrementStorageWriteCounter = () => {
233
+ storageWriteCounter->PromClient.Counter.inc
234
+ }
235
+
216
236
  let setSourceChainHeight = (~blockNumber, ~chainId) => {
217
237
  sourceChainHeight
218
238
  ->PromClient.Gauge.labels({"chainId": chainId})
@@ -739,3 +759,24 @@ module StorageLoad = {
739
759
  sizeCounter->SafeCounter.handleInt(~labels={operation}, ~value=size)
740
760
  }
741
761
  }
762
+
763
+ module SinkWrite = {
764
+ let sinkLabelsSchema = S.object(s => s.field("sink", S.string))
765
+
766
+ let timeCounter = SafeCounter.makeOrThrow(
767
+ ~name="envio_sink_write_time",
768
+ ~help="Processing time taken to write data to sink. (milliseconds)",
769
+ ~labelSchema=sinkLabelsSchema,
770
+ )
771
+
772
+ let counter = SafeCounter.makeOrThrow(
773
+ ~name="envio_sink_write_count",
774
+ ~help="Cumulative number of successful sink write operations during the indexing process.",
775
+ ~labelSchema=sinkLabelsSchema,
776
+ )
777
+
778
+ let increment = (~sinkName, ~timeMillis) => {
779
+ timeCounter->SafeCounter.handleInt(~labels={sinkName}, ~value=timeMillis)
780
+ counter->SafeCounter.increment(~labels={sinkName})
781
+ }
782
+ }
@@ -27,6 +27,18 @@ var executeBatchDurationCounter = new PromClient.Counter({
27
27
  labelNames: []
28
28
  });
29
29
 
30
+ var storageWriteTimeCounter = new PromClient.Counter({
31
+ name: "envio_storage_write_time",
32
+ help: "Cumulative time spent writing batches to storage in milliseconds",
33
+ labelNames: []
34
+ });
35
+
36
+ var storageWriteCounter = new PromClient.Counter({
37
+ name: "envio_storage_write_count",
38
+ help: "Total number of batch writes to storage",
39
+ labelNames: []
40
+ });
41
+
30
42
  var allChainsSyncedToHead = new PromClient.Gauge({
31
43
  name: "hyperindex_synced_to_head",
32
44
  help: "All chains fully synced",
@@ -314,6 +326,14 @@ function incrementExecuteBatchDurationCounter(duration) {
314
326
  executeBatchDurationCounter.inc(duration);
315
327
  }
316
328
 
329
+ function incrementStorageWriteTimeCounter(duration) {
330
+ storageWriteTimeCounter.inc(duration);
331
+ }
332
+
333
+ function incrementStorageWriteCounter() {
334
+ storageWriteCounter.inc();
335
+ }
336
+
317
337
  function setSourceChainHeight(blockNumber, chainId) {
318
338
  sourceChainHeight.labels({
319
339
  chainId: chainId
@@ -808,9 +828,31 @@ var StorageLoad = {
808
828
  endOperation: endOperation
809
829
  };
810
830
 
831
+ var sinkLabelsSchema = S$RescriptSchema.object(function (s) {
832
+ return s.f("sink", S$RescriptSchema.string);
833
+ });
834
+
835
+ var timeCounter$5 = makeOrThrow("envio_sink_write_time", "Processing time taken to write data to sink. (milliseconds)", sinkLabelsSchema);
836
+
837
+ var counter$8 = makeOrThrow("envio_sink_write_count", "Cumulative number of successful sink write operations during the indexing process.", sinkLabelsSchema);
838
+
839
+ function increment$7(sinkName, timeMillis) {
840
+ handleInt(timeCounter$5, sinkName, timeMillis);
841
+ increment(counter$8, sinkName);
842
+ }
843
+
844
+ var SinkWrite = {
845
+ sinkLabelsSchema: sinkLabelsSchema,
846
+ timeCounter: timeCounter$5,
847
+ counter: counter$8,
848
+ increment: increment$7
849
+ };
850
+
811
851
  exports.loadEntitiesDurationCounter = loadEntitiesDurationCounter;
812
852
  exports.eventRouterDurationCounter = eventRouterDurationCounter;
813
853
  exports.executeBatchDurationCounter = executeBatchDurationCounter;
854
+ exports.storageWriteTimeCounter = storageWriteTimeCounter;
855
+ exports.storageWriteCounter = storageWriteCounter;
814
856
  exports.allChainsSyncedToHead = allChainsSyncedToHead;
815
857
  exports.sourceChainHeight = sourceChainHeight;
816
858
  exports.Labels = Labels;
@@ -823,6 +865,8 @@ exports.BenchmarkSummaryData = BenchmarkSummaryData;
823
865
  exports.incrementLoadEntityDurationCounter = incrementLoadEntityDurationCounter;
824
866
  exports.incrementEventRouterDurationCounter = incrementEventRouterDurationCounter;
825
867
  exports.incrementExecuteBatchDurationCounter = incrementExecuteBatchDurationCounter;
868
+ exports.incrementStorageWriteTimeCounter = incrementStorageWriteTimeCounter;
869
+ exports.incrementStorageWriteCounter = incrementStorageWriteCounter;
826
870
  exports.setSourceChainHeight = setSourceChainHeight;
827
871
  exports.setAllChainsSyncedToHead = setAllChainsSyncedToHead;
828
872
  exports.BenchmarkCounters = BenchmarkCounters;
@@ -860,4 +904,5 @@ exports.EffectCacheCount = EffectCacheCount;
860
904
  exports.EffectCacheInvalidationsCount = EffectCacheInvalidationsCount;
861
905
  exports.EffectQueueCount = EffectQueueCount;
862
906
  exports.StorageLoad = StorageLoad;
907
+ exports.SinkWrite = SinkWrite;
863
908
  /* loadEntitiesDurationCounter Not a pure module */
@@ -4,7 +4,7 @@
4
4
  // The safe checkpoint id can be used to optimize checkpoints traverse logic and
5
5
  // make pruning operation super cheap.
6
6
  type t = {
7
- checkpointIds: array<int>,
7
+ checkpointIds: array<float>,
8
8
  checkpointBlockNumbers: array<int>,
9
9
  maxReorgDepth: int,
10
10
  }
@@ -37,9 +37,10 @@ let getSafeCheckpointId = (safeCheckpointTracking: t, ~sourceBlockNumber: int) =
37
37
  let safeBlockNumber = sourceBlockNumber - safeCheckpointTracking.maxReorgDepth
38
38
 
39
39
  switch safeCheckpointTracking.checkpointIds {
40
- | [] => 0
40
+ | [] => 0.
41
41
  | _
42
- if safeCheckpointTracking.checkpointBlockNumbers->Belt.Array.getUnsafe(0) > safeBlockNumber => 0
42
+ if safeCheckpointTracking.checkpointBlockNumbers->Belt.Array.getUnsafe(0) >
43
+ safeBlockNumber => 0.
43
44
  | [checkpointId] => checkpointId
44
45
  | _ => {
45
46
  let trackingCheckpointsCount = safeCheckpointTracking.checkpointIds->Array.length
@@ -70,7 +71,7 @@ let updateOnNewBatch = (
70
71
  safeCheckpointTracking: t,
71
72
  ~sourceBlockNumber: int,
72
73
  ~chainId: int,
73
- ~batchCheckpointIds: array<int>,
74
+ ~batchCheckpointIds: array<float>,
74
75
  ~batchCheckpointBlockNumbers: array<int>,
75
76
  ~batchCheckpointChainIds: array<int>,
76
77
  ) => {
package/src/Sink.res ADDED
@@ -0,0 +1,47 @@
1
+ type t = {
2
+ name: string,
3
+ initialize: (
4
+ ~chainConfigs: array<Config.chain>=?,
5
+ ~entities: array<Internal.entityConfig>=?,
6
+ ~enums: array<Table.enumConfig<Table.enum>>=?,
7
+ ) => promise<unit>,
8
+ resume: (~checkpointId: float) => promise<unit>,
9
+ writeBatch: (
10
+ ~batch: Batch.t,
11
+ ~updatedEntities: array<Persistence.updatedEntity>,
12
+ ) => promise<unit>,
13
+ }
14
+
15
+ let makeClickHouse = (~host, ~database, ~username, ~password): t => {
16
+ let client = ClickHouse.createClient({
17
+ url: host,
18
+ username,
19
+ password,
20
+ })
21
+
22
+ // Don't assign it to client immediately,
23
+ // since it will fail if the database doesn't exist
24
+ // Call USE database instead
25
+ let database = switch database {
26
+ | Some(database) => database
27
+ | None => "envio_sink"
28
+ }
29
+
30
+ {
31
+ name: "ClickHouse",
32
+ initialize: (~chainConfigs as _=[], ~entities=[], ~enums=[]) => {
33
+ ClickHouse.initialize(client, ~database, ~entities, ~enums)
34
+ },
35
+ resume: (~checkpointId) => {
36
+ ClickHouse.resume(client, ~database, ~checkpointId)
37
+ },
38
+ writeBatch: async (~batch, ~updatedEntities) => {
39
+ await Promise.all(
40
+ updatedEntities->Belt.Array.map(({entityConfig, updates}) => {
41
+ ClickHouse.setUpdatesOrThrow(client, ~updates, ~entityConfig, ~database)
42
+ }),
43
+ )->Promise.ignoreValue
44
+ await ClickHouse.setCheckpointsOrThrow(client, ~batch, ~database)
45
+ },
46
+ }
47
+ }
@@ -0,0 +1,36 @@
1
+ // Generated by ReScript, PLEASE EDIT WITH CARE
2
+ 'use strict';
3
+
4
+ var Belt_Array = require("rescript/lib/js/belt_Array.js");
5
+ var ClickHouse = require("./bindings/ClickHouse.res.js");
6
+ var Client = require("@clickhouse/client");
7
+
8
+ function makeClickHouse(host, database, username, password) {
9
+ var client = Client.createClient({
10
+ url: host,
11
+ username: username,
12
+ password: password
13
+ });
14
+ var database$1 = database !== undefined ? database : "envio_sink";
15
+ return {
16
+ name: "ClickHouse",
17
+ initialize: (function ($staropt$star, $staropt$star$1, $staropt$star$2) {
18
+ $staropt$star !== undefined;
19
+ var entities = $staropt$star$1 !== undefined ? $staropt$star$1 : [];
20
+ var enums = $staropt$star$2 !== undefined ? $staropt$star$2 : [];
21
+ return ClickHouse.initialize(client, database$1, entities, enums);
22
+ }),
23
+ resume: (function (checkpointId) {
24
+ return ClickHouse.resume(client, database$1, checkpointId);
25
+ }),
26
+ writeBatch: (async function (batch, updatedEntities) {
27
+ await Promise.all(Belt_Array.map(updatedEntities, (function (param) {
28
+ return ClickHouse.setUpdatesOrThrow(client, param.updates, param.entityConfig, database$1);
29
+ })));
30
+ return await ClickHouse.setCheckpointsOrThrow(client, batch, database$1);
31
+ })
32
+ };
33
+ }
34
+
35
+ exports.makeClickHouse = makeClickHouse;
36
+ /* ClickHouse Not a pure module */
package/src/Utils.res CHANGED
@@ -199,6 +199,8 @@ module UnsafeIntOperators = {
199
199
  }
200
200
 
201
201
  module Array = {
202
+ let immutableEmpty: array<unknown> = []
203
+
202
204
  @send
203
205
  external forEachAsync: (array<'a>, 'a => promise<unit>) => unit = "forEach"
204
206
 
package/src/Utils.res.js CHANGED
@@ -199,6 +199,8 @@ var $$Math = {
199
199
 
200
200
  var UnsafeIntOperators = {};
201
201
 
202
+ var immutableEmpty = [];
203
+
202
204
  function mergeSorted(f, xs, ys) {
203
205
  if (xs.length === 0) {
204
206
  return ys;
@@ -345,6 +347,7 @@ function interleave(arr, separator) {
345
347
  }
346
348
 
347
349
  var $$Array$1 = {
350
+ immutableEmpty: immutableEmpty,
348
351
  mergeSorted: mergeSorted,
349
352
  clearInPlace: clearInPlace,
350
353
  setIndexImmutable: setIndexImmutable,