envio 3.0.0-alpha.16 → 3.0.0-alpha.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. package/bin.mjs +49 -0
  2. package/package.json +8 -6
  3. package/src/Batch.res +7 -7
  4. package/src/Batch.res.mjs +8 -7
  5. package/src/ChainFetcher.res +2 -7
  6. package/src/ChainFetcher.res.mjs +3 -5
  7. package/src/ChainManager.res +11 -11
  8. package/src/ChainManager.res.mjs +9 -10
  9. package/src/Change.res +3 -3
  10. package/src/Env.res +0 -57
  11. package/src/Env.res.mjs +1 -61
  12. package/src/EventProcessing.res +29 -36
  13. package/src/EventProcessing.res.mjs +38 -42
  14. package/src/FetchState.res +1 -1
  15. package/src/FetchState.res.mjs +1 -1
  16. package/src/GlobalState.res +59 -63
  17. package/src/GlobalState.res.mjs +43 -73
  18. package/src/Internal.res +2 -2
  19. package/src/LoadLayer.res +10 -10
  20. package/src/LoadLayer.res.mjs +5 -5
  21. package/src/Main.res +3 -3
  22. package/src/Main.res.mjs +2 -2
  23. package/src/Persistence.res +2 -2
  24. package/src/PgStorage.res +20 -8
  25. package/src/PgStorage.res.mjs +15 -6
  26. package/src/Prometheus.res +253 -220
  27. package/src/Prometheus.res.mjs +268 -300
  28. package/src/SafeCheckpointTracking.res +4 -4
  29. package/src/SafeCheckpointTracking.res.mjs +2 -2
  30. package/src/Sink.res +1 -1
  31. package/src/TestIndexer.res +8 -8
  32. package/src/TestIndexer.res.mjs +5 -7
  33. package/src/TestIndexerProxyStorage.res +3 -3
  34. package/src/UserContext.res +1 -1
  35. package/src/Utils.res +1 -0
  36. package/src/bindings/BigInt.res +10 -0
  37. package/src/bindings/BigInt.res.mjs +15 -0
  38. package/src/bindings/ClickHouse.res +26 -6
  39. package/src/bindings/ClickHouse.res.mjs +34 -9
  40. package/src/bindings/Hrtime.res +13 -0
  41. package/src/bindings/Hrtime.res.mjs +20 -0
  42. package/src/bindings/Hrtime.resi +4 -0
  43. package/src/bindings/NodeJs.res +5 -0
  44. package/src/bindings/Postgres.res +1 -1
  45. package/src/bindings/PromClient.res +0 -9
  46. package/src/bindings/PromClient.res.mjs +0 -3
  47. package/src/db/EntityHistory.res +7 -7
  48. package/src/db/EntityHistory.res.mjs +9 -9
  49. package/src/db/InternalTable.res +48 -36
  50. package/src/db/InternalTable.res.mjs +46 -28
  51. package/src/db/Table.res +6 -0
  52. package/src/db/Table.res.mjs +5 -4
  53. package/src/sources/HyperFuelSource.res +9 -5
  54. package/src/sources/HyperFuelSource.res.mjs +14 -10
  55. package/src/sources/HyperSyncSource.res +9 -9
  56. package/src/sources/HyperSyncSource.res.mjs +17 -12
  57. package/src/sources/RpcSource.res +16 -8
  58. package/src/sources/RpcSource.res.mjs +17 -4
  59. package/src/sources/Source.res +3 -3
  60. package/src/sources/SourceManager.res +2 -8
  61. package/src/sources/SourceManager.res.mjs +1 -6
  62. package/src/sources/Svm.res +6 -2
  63. package/src/sources/Svm.res.mjs +7 -2
  64. package/src/tui/Tui.res +5 -6
  65. package/src/tui/Tui.res.mjs +4 -5
  66. package/src/tui/components/SyncETA.res +4 -23
  67. package/src/tui/components/SyncETA.res.mjs +7 -13
  68. package/src/tui/components/TuiData.res +5 -4
  69. package/src/tui/components/TuiData.res.mjs +5 -0
  70. package/src/Benchmark.res +0 -394
  71. package/src/Benchmark.res.mjs +0 -398
package/bin.mjs ADDED
@@ -0,0 +1,49 @@
1
+ #!/usr/bin/env node
2
+ //@ts-check
3
+
4
+ import { spawnSync } from "child_process";
5
+ import { createRequire } from "module";
6
+
7
+ const require = createRequire(import.meta.url);
8
+
9
+ /**
10
+ * Returns the executable path for envio located inside node_modules.
11
+ * The naming convention is envio-${os}-${arch}
12
+ * @see https://nodejs.org/api/os.html#osarch
13
+ * @see https://nodejs.org/api/os.html#osplatform
14
+ * @example "x/xx/node_modules/envio-darwin-arm64"
15
+ */
16
+ function getExePath() {
17
+ const pkg = `envio-${process.platform}-${process.arch}`;
18
+ const bin = "bin/envio";
19
+
20
+ try {
21
+ return require.resolve(`${pkg}/${bin}`);
22
+ } catch {}
23
+
24
+ throw new Error(
25
+ `Couldn't find envio binary package "${pkg}".\n` +
26
+ `Checked: require.resolve("${pkg}/${bin}")\n` +
27
+ `If you're using pnpm, yarn, or npm with --omit=optional, ensure optional ` +
28
+ `dependencies are installed:\n` +
29
+ ` npm install ${pkg}\n`
30
+ );
31
+ }
32
+
33
+ /**
34
+ * Runs `envio` with args using nodejs spawn
35
+ */
36
+ function runEnvio() {
37
+ const args = process.argv.slice(2);
38
+ const exePath = getExePath();
39
+
40
+ const processResult = spawnSync(exePath, args, { stdio: "inherit" });
41
+
42
+ if (processResult.error) {
43
+ console.error(`Failed to run envio binary at ${exePath}: ${processResult.error.message}`);
44
+ process.exit(1);
45
+ }
46
+ process.exit(processResult.status ?? 1);
47
+ }
48
+
49
+ runEnvio();
package/package.json CHANGED
@@ -1,8 +1,9 @@
1
1
  {
2
2
  "name": "envio",
3
- "version": "3.0.0-alpha.16",
3
+ "version": "3.0.0-alpha.18",
4
4
  "type": "module",
5
5
  "description": "A latency and sync speed optimized, developer friendly blockchain data indexer.",
6
+ "bin": "./bin.mjs",
6
7
  "main": "./index.js",
7
8
  "types": "./index.d.ts",
8
9
  "repository": {
@@ -29,6 +30,7 @@
29
30
  "node": ">=22.0.0"
30
31
  },
31
32
  "files": [
33
+ "bin.mjs",
32
34
  "evm.schema.json",
33
35
  "fuel.schema.json",
34
36
  "svm.schema.json",
@@ -63,9 +65,9 @@
63
65
  "tsx": "4.21.0"
64
66
  },
65
67
  "optionalDependencies": {
66
- "envio-linux-x64": "3.0.0-alpha.16",
67
- "envio-linux-arm64": "3.0.0-alpha.16",
68
- "envio-darwin-x64": "3.0.0-alpha.16",
69
- "envio-darwin-arm64": "3.0.0-alpha.16"
68
+ "envio-linux-x64": "3.0.0-alpha.18",
69
+ "envio-linux-arm64": "3.0.0-alpha.18",
70
+ "envio-darwin-x64": "3.0.0-alpha.18",
71
+ "envio-darwin-arm64": "3.0.0-alpha.18"
70
72
  }
71
- }
73
+ }
package/src/Batch.res CHANGED
@@ -7,7 +7,7 @@ type chainAfterBatch = {
7
7
  batchSize: int,
8
8
  progressBlockNumber: int,
9
9
  sourceBlockNumber: int,
10
- totalEventsProcessed: int,
10
+ totalEventsProcessed: float,
11
11
  fetchState: FetchState.t,
12
12
  isProgressAtHeadWhenBatchCreated: bool,
13
13
  }
@@ -17,7 +17,7 @@ type chainBeforeBatch = {
17
17
  reorgDetection: ReorgDetection.t,
18
18
  progressBlockNumber: int,
19
19
  sourceBlockNumber: int,
20
- totalEventsProcessed: int,
20
+ totalEventsProcessed: float,
21
21
  chainConfig: Config.chain,
22
22
  }
23
23
 
@@ -26,7 +26,7 @@ type t = {
26
26
  items: array<Internal.item>,
27
27
  progressedChainsById: dict<chainAfterBatch>,
28
28
  // Unnest-like checkpoint fields:
29
- checkpointIds: array<float>,
29
+ checkpointIds: array<bigint>,
30
30
  checkpointChainIds: array<int>,
31
31
  checkpointBlockNumbers: array<int>,
32
32
  checkpointBlockHashes: array<Js.Null.t<string>>,
@@ -111,7 +111,7 @@ let getProgressedChainsById = {
111
111
  batchSize,
112
112
  progressBlockNumber: progressBlockNumberAfterBatch,
113
113
  sourceBlockNumber: chainBeforeBatch.sourceBlockNumber,
114
- totalEventsProcessed: chainBeforeBatch.totalEventsProcessed + batchSize,
114
+ totalEventsProcessed: chainBeforeBatch.totalEventsProcessed +. batchSize->Int.toFloat,
115
115
  fetchState: fetchStateAfterBatch,
116
116
  isProgressAtHeadWhenBatchCreated: progressBlockNumberAfterBatch >=
117
117
  chainBeforeBatch.sourceBlockNumber - chainBeforeBatch.chainConfig.blockLag,
@@ -199,7 +199,7 @@ let addReorgCheckpoints = (
199
199
  for blockNumber in fromBlockExclusive + 1 to toBlockExclusive - 1 {
200
200
  switch reorgDetection->ReorgDetection.getHashByBlockNumber(~blockNumber) {
201
201
  | Js.Null.Value(hash) =>
202
- let checkpointId = prevCheckpointId.contents +. 1.
202
+ let checkpointId = prevCheckpointId.contents->BigInt.add(1n)
203
203
  prevCheckpointId := checkpointId
204
204
 
205
205
  mutCheckpointIds->Js.Array2.push(checkpointId)->ignore
@@ -280,7 +280,7 @@ let prepareOrderedBatch = (
280
280
  ~mutCheckpointEventsProcessed=checkpointEventsProcessed,
281
281
  )
282
282
 
283
- let checkpointId = prevCheckpointId.contents +. 1.
283
+ let checkpointId = prevCheckpointId.contents->BigInt.add(1n)
284
284
 
285
285
  items
286
286
  ->Js.Array2.push(item0)
@@ -424,7 +424,7 @@ let prepareUnorderedBatch = (
424
424
  ~mutCheckpointEventsProcessed=checkpointEventsProcessed,
425
425
  )
426
426
 
427
- let checkpointId = prevCheckpointId.contents +. 1.
427
+ let checkpointId = prevCheckpointId.contents->BigInt.add(1n)
428
428
 
429
429
  checkpointIds->Js.Array2.push(checkpointId)->ignore
430
430
  checkpointChainIds->Js.Array2.push(fetchState.chainId)->ignore
package/src/Batch.res.mjs CHANGED
@@ -1,6 +1,7 @@
1
1
  // Generated by ReScript, PLEASE EDIT WITH CARE
2
2
 
3
3
  import * as Utils from "./Utils.res.mjs";
4
+ import * as $$BigInt from "./bindings/BigInt.res.mjs";
4
5
  import * as ChainMap from "./ChainMap.res.mjs";
5
6
  import * as Belt_Array from "rescript/lib/es6/belt_Array.js";
6
7
  import * as FetchState from "./FetchState.res.mjs";
@@ -102,7 +103,7 @@ function addReorgCheckpoints(prevCheckpointId, reorgDetection, fromBlockExclusiv
102
103
  for(var blockNumber = fromBlockExclusive + 1 ,blockNumber_finish = toBlockExclusive - 1; blockNumber <= blockNumber_finish; ++blockNumber){
103
104
  var hash = ReorgDetection.getHashByBlockNumber(reorgDetection, blockNumber);
104
105
  if (hash !== null) {
105
- var checkpointId = prevCheckpointId$1 + 1;
106
+ var checkpointId = $$BigInt.add(prevCheckpointId$1, 1n);
106
107
  prevCheckpointId$1 = checkpointId;
107
108
  mutCheckpointIds.push(checkpointId);
108
109
  mutCheckpointChainIds.push(chainId);
@@ -151,7 +152,7 @@ function prepareOrderedBatch(checkpointIdBeforeBatch, chainsBeforeBatch, batchSi
151
152
  for(var blockNumber$1 = prevBlockNumber + 1 ,blockNumber_finish = blockNumber - 1; blockNumber$1 <= blockNumber_finish; ++blockNumber$1){
152
153
  var hash = ReorgDetection.getHashByBlockNumber(reorgDetection, blockNumber$1);
153
154
  if (hash !== null) {
154
- var checkpointId = prevCheckpointId$2 + 1;
155
+ var checkpointId = $$BigInt.add(prevCheckpointId$2, 1n);
155
156
  prevCheckpointId$2 = checkpointId;
156
157
  checkpointIds.push(checkpointId);
157
158
  checkpointChainIds.push(chainId);
@@ -166,7 +167,7 @@ function prepareOrderedBatch(checkpointIdBeforeBatch, chainsBeforeBatch, batchSi
166
167
  tmp = prevCheckpointId$1;
167
168
  }
168
169
  prevCheckpointId = tmp;
169
- var checkpointId$1 = prevCheckpointId + 1;
170
+ var checkpointId$1 = $$BigInt.add(prevCheckpointId, 1n);
170
171
  items.push(item0);
171
172
  for(var idx = 1 ,idx_finish = newItemsCount - 1; idx <= idx_finish; ++idx){
172
173
  items.push(fetchState.buffer[itemsCountBefore + idx]);
@@ -192,7 +193,7 @@ function prepareOrderedBatch(checkpointIdBeforeBatch, chainsBeforeBatch, batchSi
192
193
  for(var blockNumber$2 = prevBlockNumber + 1 ,blockNumber_finish$1 = toBlockExclusive - 1; blockNumber$2 <= blockNumber_finish$1; ++blockNumber$2){
193
194
  var hash$1 = ReorgDetection.getHashByBlockNumber(reorgDetection$1, blockNumber$2);
194
195
  if (hash$1 !== null) {
195
- var checkpointId$2 = prevCheckpointId$4 + 1;
196
+ var checkpointId$2 = $$BigInt.add(prevCheckpointId$4, 1n);
196
197
  prevCheckpointId$4 = checkpointId$2;
197
198
  checkpointIds.push(checkpointId$2);
198
199
  checkpointChainIds.push(chainId$1);
@@ -262,7 +263,7 @@ function prepareUnorderedBatch(checkpointIdBeforeBatch, chainsBeforeBatch, batch
262
263
  for(var blockNumber$1 = fromBlockExclusive + 1 ,blockNumber_finish = blockNumber - 1; blockNumber$1 <= blockNumber_finish; ++blockNumber$1){
263
264
  var hash = ReorgDetection.getHashByBlockNumber(reorgDetection, blockNumber$1);
264
265
  if (hash !== null) {
265
- var checkpointId = prevCheckpointId$2 + 1;
266
+ var checkpointId = $$BigInt.add(prevCheckpointId$2, 1n);
266
267
  prevCheckpointId$2 = checkpointId;
267
268
  checkpointIds.push(checkpointId);
268
269
  checkpointChainIds.push(chainId);
@@ -277,7 +278,7 @@ function prepareUnorderedBatch(checkpointIdBeforeBatch, chainsBeforeBatch, batch
277
278
  tmp = prevCheckpointId$1;
278
279
  }
279
280
  prevCheckpointId = tmp;
280
- var checkpointId$1 = prevCheckpointId + 1;
281
+ var checkpointId$1 = $$BigInt.add(prevCheckpointId, 1n);
281
282
  checkpointIds.push(checkpointId$1);
282
283
  checkpointChainIds.push(fetchState.chainId);
283
284
  checkpointBlockNumbers.push(blockNumber);
@@ -306,7 +307,7 @@ function prepareUnorderedBatch(checkpointIdBeforeBatch, chainsBeforeBatch, batch
306
307
  for(var blockNumber$2 = fromBlockExclusive$1 + 1 ,blockNumber_finish$1 = toBlockExclusive - 1; blockNumber$2 <= blockNumber_finish$1; ++blockNumber$2){
307
308
  var hash$1 = ReorgDetection.getHashByBlockNumber(reorgDetection$1, blockNumber$2);
308
309
  if (hash$1 !== null) {
309
- var checkpointId$2 = prevCheckpointId$4 + 1;
310
+ var checkpointId$2 = $$BigInt.add(prevCheckpointId$4, 1n);
310
311
  prevCheckpointId$4 = checkpointId$2;
311
312
  checkpointIds.push(checkpointId$2);
312
313
  checkpointChainIds.push(chainId$1);
@@ -15,8 +15,7 @@ type t = {
15
15
  isProgressAtHead: bool,
16
16
  timestampCaughtUpToHeadOrEndblock: option<Js.Date.t>,
17
17
  committedProgressBlockNumber: int,
18
- numEventsProcessed: int,
19
- numBatchesFetched: int,
18
+ numEventsProcessed: float,
20
19
  reorgDetection: ReorgDetection.t,
21
20
  safeCheckpointTracking: option<SafeCheckpointTracking.t>,
22
21
  }
@@ -35,7 +34,6 @@ let make = (
35
34
  ~logger,
36
35
  ~timestampCaughtUpToHeadOrEndblock,
37
36
  ~numEventsProcessed,
38
- ~numBatchesFetched,
39
37
  ~isInReorgThreshold,
40
38
  ~reorgCheckpoints: array<Internal.reorgCheckpoint>,
41
39
  ~maxReorgDepth,
@@ -261,7 +259,6 @@ let make = (
261
259
  committedProgressBlockNumber: progressBlockNumber,
262
260
  timestampCaughtUpToHeadOrEndblock,
263
261
  numEventsProcessed,
264
- numBatchesFetched,
265
262
  }
266
263
  }
267
264
 
@@ -284,8 +281,7 @@ let makeFromConfig = (
284
281
  ~maxReorgDepth=chainConfig.maxReorgDepth,
285
282
  ~progressBlockNumber=-1,
286
283
  ~timestampCaughtUpToHeadOrEndblock=None,
287
- ~numEventsProcessed=0,
288
- ~numBatchesFetched=0,
284
+ ~numEventsProcessed=0.,
289
285
  ~targetBufferSize,
290
286
  ~logger,
291
287
  ~dynamicContracts=[],
@@ -332,7 +328,6 @@ let makeFromDbState = async (
332
328
  ? None
333
329
  : resumedChainState.timestampCaughtUpToHeadOrEndblock,
334
330
  ~numEventsProcessed=resumedChainState.numEventsProcessed,
335
- ~numBatchesFetched=0,
336
331
  ~logger,
337
332
  ~targetBufferSize,
338
333
  ~isInReorgThreshold,
@@ -23,7 +23,7 @@ import * as HyperFuelSource from "./sources/HyperFuelSource.res.mjs";
23
23
  import * as Caml_js_exceptions from "rescript/lib/es6/caml_js_exceptions.js";
24
24
  import * as SafeCheckpointTracking from "./SafeCheckpointTracking.res.mjs";
25
25
 
26
- function make(chainConfig, dynamicContracts, startBlock, endBlock, firstEventBlockOpt, progressBlockNumber, config, registrations, targetBufferSize, logger, timestampCaughtUpToHeadOrEndblock, numEventsProcessed, numBatchesFetched, isInReorgThreshold, reorgCheckpoints, maxReorgDepth, knownHeightOpt) {
26
+ function make(chainConfig, dynamicContracts, startBlock, endBlock, firstEventBlockOpt, progressBlockNumber, config, registrations, targetBufferSize, logger, timestampCaughtUpToHeadOrEndblock, numEventsProcessed, isInReorgThreshold, reorgCheckpoints, maxReorgDepth, knownHeightOpt) {
27
27
  var firstEventBlock = firstEventBlockOpt !== undefined ? Caml_option.valFromOption(firstEventBlockOpt) : undefined;
28
28
  var knownHeight = knownHeightOpt !== undefined ? knownHeightOpt : 0;
29
29
  var eventRouter = EventRouter.empty();
@@ -158,7 +158,6 @@ function make(chainConfig, dynamicContracts, startBlock, endBlock, firstEventBlo
158
158
  timestampCaughtUpToHeadOrEndblock: timestampCaughtUpToHeadOrEndblock,
159
159
  committedProgressBlockNumber: progressBlockNumber,
160
160
  numEventsProcessed: numEventsProcessed,
161
- numBatchesFetched: numBatchesFetched,
162
161
  reorgDetection: ReorgDetection.make(chainReorgCheckpoints, maxReorgDepth, config.shouldRollbackOnReorg),
163
162
  safeCheckpointTracking: SafeCheckpointTracking.make(maxReorgDepth, config.shouldRollbackOnReorg, chainReorgCheckpoints)
164
163
  };
@@ -168,7 +167,7 @@ function makeFromConfig(chainConfig, config, registrations, targetBufferSize, kn
168
167
  var logger = Logging.createChild({
169
168
  chainId: chainConfig.id
170
169
  });
171
- return make(chainConfig, [], chainConfig.startBlock, chainConfig.endBlock, undefined, -1, config, registrations, targetBufferSize, logger, undefined, 0, 0, false, [], chainConfig.maxReorgDepth, knownHeight);
170
+ return make(chainConfig, [], chainConfig.startBlock, chainConfig.endBlock, undefined, -1, config, registrations, targetBufferSize, logger, undefined, 0, false, [], chainConfig.maxReorgDepth, knownHeight);
172
171
  }
173
172
 
174
173
  async function makeFromDbState(chainConfig, resumedChainState, reorgCheckpoints, isInReorgThreshold, config, registrations, targetBufferSize) {
@@ -178,7 +177,7 @@ async function makeFromDbState(chainConfig, resumedChainState, reorgCheckpoints,
178
177
  });
179
178
  Prometheus.ProgressEventsCount.set(resumedChainState.numEventsProcessed, chainId);
180
179
  var progressBlockNumber = resumedChainState.progressBlockNumber >= 0 ? resumedChainState.progressBlockNumber : resumedChainState.startBlock - 1 | 0;
181
- return make(chainConfig, resumedChainState.dynamicContracts, resumedChainState.startBlock, resumedChainState.endBlock, Caml_option.some(resumedChainState.firstEventBlockNumber), progressBlockNumber, config, registrations, targetBufferSize, logger, Env.updateSyncTimeOnRestart ? undefined : resumedChainState.timestampCaughtUpToHeadOrEndblock, resumedChainState.numEventsProcessed, 0, isInReorgThreshold, reorgCheckpoints, resumedChainState.maxReorgDepth, resumedChainState.sourceBlockNumber);
180
+ return make(chainConfig, resumedChainState.dynamicContracts, resumedChainState.startBlock, resumedChainState.endBlock, Caml_option.some(resumedChainState.firstEventBlockNumber), progressBlockNumber, config, registrations, targetBufferSize, logger, Env.updateSyncTimeOnRestart ? undefined : resumedChainState.timestampCaughtUpToHeadOrEndblock, resumedChainState.numEventsProcessed, isInReorgThreshold, reorgCheckpoints, resumedChainState.maxReorgDepth, resumedChainState.sourceBlockNumber);
182
181
  }
183
182
 
184
183
  function getContractStartBlock(config, chain, contractName) {
@@ -264,7 +263,6 @@ function handleQueryResult(chainFetcher, query, newItems, newItemsWithDcs, lates
264
263
  timestampCaughtUpToHeadOrEndblock: chainFetcher.timestampCaughtUpToHeadOrEndblock,
265
264
  committedProgressBlockNumber: chainFetcher.committedProgressBlockNumber,
266
265
  numEventsProcessed: chainFetcher.numEventsProcessed,
267
- numBatchesFetched: chainFetcher.numBatchesFetched,
268
266
  reorgDetection: chainFetcher.reorgDetection,
269
267
  safeCheckpointTracking: chainFetcher.safeCheckpointTracking
270
268
  };
@@ -1,7 +1,7 @@
1
1
  open Belt
2
2
 
3
3
  type t = {
4
- committedCheckpointId: float,
4
+ committedCheckpointId: bigint,
5
5
  chainFetchers: ChainMap.t<ChainFetcher.t>,
6
6
  multichain: Config.multichain,
7
7
  isInReorgThreshold: bool,
@@ -113,11 +113,11 @@ let nextItemIsNone = (chainManager: t): bool => {
113
113
 
114
114
  let createBatch = (chainManager: t, ~batchSizeTarget: int, ~isRollback: bool): Batch.t => {
115
115
  Batch.make(
116
- ~checkpointIdBeforeBatch=chainManager.committedCheckpointId +. (
116
+ ~checkpointIdBeforeBatch=chainManager.committedCheckpointId->BigInt.add(
117
117
  // Since for rollback we have a diff checkpoint id.
118
118
  // This is needed to currectly overwrite old state
119
119
  // in an append-only ClickHouse insert.
120
- isRollback ? 1. : 0.
120
+ isRollback ? 1n : 0n
121
121
  ),
122
122
  ~chainsBeforeBatch=chainManager.chainFetchers->ChainMap.map((cf): Batch.chainBeforeBatch => {
123
123
  fetchState: cf.fetchState,
@@ -145,8 +145,7 @@ let isActivelyIndexing = chainManager =>
145
145
  let getSafeCheckpointId = (chainManager: t) => {
146
146
  let chainFetchers = chainManager.chainFetchers->ChainMap.values
147
147
 
148
- let infinity = (%raw(`Infinity`): float)
149
- let result = ref(infinity)
148
+ let result: ref<option<bigint>> = ref(None)
150
149
 
151
150
  for idx in 0 to chainFetchers->Array.length - 1 {
152
151
  let chainFetcher = chainFetchers->Array.getUnsafe(idx)
@@ -157,16 +156,17 @@ let getSafeCheckpointId = (chainManager: t) => {
157
156
  safeCheckpointTracking->SafeCheckpointTracking.getSafeCheckpointId(
158
157
  ~sourceBlockNumber=chainFetcher.fetchState.knownHeight,
159
158
  )
160
- if safeCheckpointId < result.contents {
161
- result := safeCheckpointId
159
+ switch result.contents {
160
+ | None => result := Some(safeCheckpointId)
161
+ | Some(current) if safeCheckpointId < current => result := Some(safeCheckpointId)
162
+ | _ => ()
162
163
  }
163
164
  }
164
165
  }
165
166
  }
166
167
 
167
- if result.contents === infinity || result.contents === 0. {
168
- None // No safe checkpoint found
169
- } else {
170
- Some(result.contents)
168
+ switch result.contents {
169
+ | Some(id) if id > 0n => Some(id)
170
+ | _ => None // No safe checkpoint found
171
171
  }
172
172
  }
@@ -3,6 +3,7 @@
3
3
  import * as Env from "./Env.res.mjs";
4
4
  import * as Batch from "./Batch.res.mjs";
5
5
  import * as Utils from "./Utils.res.mjs";
6
+ import * as $$BigInt from "./bindings/BigInt.res.mjs";
6
7
  import * as Config from "./Config.res.mjs";
7
8
  import * as ChainMap from "./ChainMap.res.mjs";
8
9
  import * as Belt_Array from "rescript/lib/es6/belt_Array.js";
@@ -85,9 +86,7 @@ function nextItemIsNone(chainManager) {
85
86
  }
86
87
 
87
88
  function createBatch(chainManager, batchSizeTarget, isRollback) {
88
- return Batch.make(chainManager.committedCheckpointId + (
89
- isRollback ? 1 : 0
90
- ), ChainMap.map(chainManager.chainFetchers, (function (cf) {
89
+ return Batch.make($$BigInt.add(chainManager.committedCheckpointId, isRollback ? 1n : 0n), ChainMap.map(chainManager.chainFetchers, (function (cf) {
91
90
  return {
92
91
  fetchState: cf.fetchState,
93
92
  reorgDetection: cf.reorgDetection,
@@ -111,25 +110,25 @@ function isActivelyIndexing(chainManager) {
111
110
 
112
111
  function getSafeCheckpointId(chainManager) {
113
112
  var chainFetchers = ChainMap.values(chainManager.chainFetchers);
114
- var infinity = Infinity;
115
- var result = infinity;
113
+ var result;
116
114
  for(var idx = 0 ,idx_finish = chainFetchers.length; idx < idx_finish; ++idx){
117
115
  var chainFetcher = chainFetchers[idx];
118
116
  var safeCheckpointTracking = chainFetcher.safeCheckpointTracking;
119
117
  if (safeCheckpointTracking !== undefined) {
120
118
  var safeCheckpointId = SafeCheckpointTracking.getSafeCheckpointId(safeCheckpointTracking, chainFetcher.fetchState.knownHeight);
121
- if (safeCheckpointId < result) {
119
+ var current = result;
120
+ if (!(current !== undefined && safeCheckpointId >= current)) {
122
121
  result = safeCheckpointId;
123
122
  }
124
123
 
125
124
  }
126
125
 
127
126
  }
128
- if (result === infinity || result === 0) {
129
- return ;
130
- } else {
131
- return result;
127
+ var id = result;
128
+ if (id !== undefined && id > 0n) {
129
+ return id;
132
130
  }
131
+
133
132
  }
134
133
 
135
134
  export {
package/src/Change.res CHANGED
@@ -1,9 +1,9 @@
1
1
  @tag("type")
2
2
  type t<'entity> =
3
- | @as("SET") Set({entityId: string, entity: 'entity, checkpointId: float})
4
- | @as("DELETE") Delete({entityId: string, checkpointId: float})
3
+ | @as("SET") Set({entityId: string, entity: 'entity, checkpointId: bigint})
4
+ | @as("DELETE") Delete({entityId: string, checkpointId: bigint})
5
5
 
6
6
  @get
7
7
  external getEntityId: t<'entity> => string = "entityId"
8
8
  @get
9
- external getCheckpointId: t<'entity> => float = "checkpointId"
9
+ external getCheckpointId: t<'entity> => bigint = "checkpointId"
package/src/Env.res CHANGED
@@ -64,50 +64,6 @@ let hypersyncClientSerializationFormat =
64
64
  let hypersyncClientEnableQueryCaching =
65
65
  envSafe->EnvSafe.get("ENVIO_HYPERSYNC_CLIENT_ENABLE_QUERY_CACHING", S.bool, ~fallback=true)
66
66
 
67
- module Benchmark = {
68
- module SaveDataStrategy: {
69
- type t
70
- let schema: S.t<t>
71
- let default: t
72
- let shouldSaveJsonFile: t => bool
73
- let shouldSavePrometheus: t => bool
74
- let shouldSaveData: t => bool
75
- } = {
76
- @unboxed
77
- type t = Bool(bool) | @as("json-file") JsonFile | @as("prometheus") Prometheus
78
-
79
- let schema = S.enum([Bool(true), Bool(false), JsonFile, Prometheus])
80
- let default = Bool(false)
81
-
82
- let shouldSaveJsonFile = self =>
83
- switch self {
84
- | JsonFile | Bool(true) => true
85
- | _ => false
86
- }
87
-
88
- let shouldSavePrometheus = _ => true
89
-
90
- let shouldSaveData = self => self->shouldSavePrometheus || self->shouldSaveJsonFile
91
- }
92
-
93
- let saveDataStrategy =
94
- envSafe->EnvSafe.get(
95
- "ENVIO_SAVE_BENCHMARK_DATA",
96
- SaveDataStrategy.schema,
97
- ~fallback=SaveDataStrategy.default,
98
- )
99
-
100
- let shouldSaveData = saveDataStrategy->SaveDataStrategy.shouldSaveData
101
-
102
- /**
103
- StdDev involves saving sum of squares of data points, which could get very large.
104
-
105
- Currently only do this for local runs on json-file and not prometheus.
106
- */
107
- let shouldSaveStdDev =
108
- saveDataStrategy->SaveDataStrategy.shouldSaveJsonFile
109
- }
110
-
111
67
  let logStrategy =
112
68
  envSafe->EnvSafe.get(
113
69
  "LOG_STRATEGY",
@@ -239,19 +195,6 @@ module ThrottleWrites = {
239
195
  ~devFallback=30_000,
240
196
  )
241
197
 
242
- let liveMetricsBenchmarkIntervalMillis =
243
- envSafe->EnvSafe.get(
244
- "ENVIO_THROTTLE_LIVE_METRICS_BENCHMARK_INTERVAL_MILLIS",
245
- S.int,
246
- ~devFallback=1_000,
247
- )
248
-
249
- let jsonFileBenchmarkIntervalMillis =
250
- envSafe->EnvSafe.get(
251
- "ENVIO_THROTTLE_JSON_FILE_BENCHMARK_INTERVAL_MILLIS",
252
- S.int,
253
- ~devFallback=500,
254
- )
255
198
  }
256
199
 
257
200
  // You need to close the envSafe after you're done with it so that it immediately tells you about your misconfigured environment on startup.
package/src/Env.res.mjs CHANGED
@@ -4,7 +4,6 @@ import * as EnvSafe from "rescript-envsafe/src/EnvSafe.res.mjs";
4
4
  import * as Logging from "./Logging.res.mjs";
5
5
  import * as Postgres from "./bindings/Postgres.res.mjs";
6
6
  import * as Belt_Option from "rescript/lib/es6/belt_Option.js";
7
- import * as Caml_option from "rescript/lib/es6/caml_option.js";
8
7
  import * as HyperSyncClient from "./sources/HyperSyncClient.res.mjs";
9
8
  import * as S$RescriptSchema from "rescript-schema/src/S.res.mjs";
10
9
 
@@ -59,58 +58,6 @@ var hypersyncClientSerializationFormat = EnvSafe.get(envSafe, "ENVIO_HYPERSYNC_C
59
58
 
60
59
  var hypersyncClientEnableQueryCaching = EnvSafe.get(envSafe, "ENVIO_HYPERSYNC_CLIENT_ENABLE_QUERY_CACHING", S$RescriptSchema.bool, undefined, true, undefined, undefined);
61
60
 
62
- var schema = S$RescriptSchema.$$enum([
63
- true,
64
- false,
65
- "json-file",
66
- "prometheus"
67
- ]);
68
-
69
- var $$default = false;
70
-
71
- function shouldSaveJsonFile(self) {
72
- if (typeof self !== "boolean") {
73
- if (self === "json-file") {
74
- return true;
75
- } else {
76
- return false;
77
- }
78
- } else if (self) {
79
- return true;
80
- } else {
81
- return false;
82
- }
83
- }
84
-
85
- function shouldSavePrometheus(param) {
86
- return true;
87
- }
88
-
89
- function shouldSaveData(self) {
90
- return true;
91
- }
92
-
93
- var SaveDataStrategy = {
94
- schema: schema,
95
- $$default: $$default,
96
- shouldSaveJsonFile: shouldSaveJsonFile,
97
- shouldSavePrometheus: shouldSavePrometheus,
98
- shouldSaveData: shouldSaveData
99
- };
100
-
101
- var saveDataStrategy = EnvSafe.get(envSafe, "ENVIO_SAVE_BENCHMARK_DATA", schema, undefined, Caml_option.some($$default), undefined, undefined);
102
-
103
- var shouldSaveData$1 = shouldSaveData(saveDataStrategy);
104
-
105
- var shouldSaveStdDev = shouldSaveJsonFile(saveDataStrategy);
106
-
107
- var Benchmark = {
108
- SaveDataStrategy: SaveDataStrategy,
109
- saveDataStrategy: saveDataStrategy,
110
- shouldSaveData: shouldSaveData$1,
111
- shouldSaveStdDev: shouldSaveStdDev
112
- };
113
-
114
61
  var logStrategy = EnvSafe.get(envSafe, "LOG_STRATEGY", S$RescriptSchema.$$enum([
115
62
  "ecs-file",
116
63
  "ecs-console",
@@ -229,15 +176,9 @@ var chainMetadataIntervalMillis = EnvSafe.get(envSafe, "ENVIO_THROTTLE_CHAIN_MET
229
176
 
230
177
  var pruneStaleDataIntervalMillis = EnvSafe.get(envSafe, "ENVIO_THROTTLE_PRUNE_STALE_DATA_INTERVAL_MILLIS", S$RescriptSchema.$$int, undefined, undefined, 30000, undefined);
231
178
 
232
- var liveMetricsBenchmarkIntervalMillis = EnvSafe.get(envSafe, "ENVIO_THROTTLE_LIVE_METRICS_BENCHMARK_INTERVAL_MILLIS", S$RescriptSchema.$$int, undefined, undefined, 1000, undefined);
233
-
234
- var jsonFileBenchmarkIntervalMillis = EnvSafe.get(envSafe, "ENVIO_THROTTLE_JSON_FILE_BENCHMARK_INTERVAL_MILLIS", S$RescriptSchema.$$int, undefined, undefined, 500, undefined);
235
-
236
179
  var ThrottleWrites = {
237
180
  chainMetadataIntervalMillis: chainMetadataIntervalMillis,
238
- pruneStaleDataIntervalMillis: pruneStaleDataIntervalMillis,
239
- liveMetricsBenchmarkIntervalMillis: liveMetricsBenchmarkIntervalMillis,
240
- jsonFileBenchmarkIntervalMillis: jsonFileBenchmarkIntervalMillis
181
+ pruneStaleDataIntervalMillis: pruneStaleDataIntervalMillis
241
182
  };
242
183
 
243
184
  EnvSafe.close(envSafe);
@@ -260,7 +201,6 @@ export {
260
201
  hyperSyncClientMaxRetries ,
261
202
  hypersyncClientSerializationFormat ,
262
203
  hypersyncClientEnableQueryCaching ,
263
- Benchmark ,
264
204
  logStrategy ,
265
205
  Db ,
266
206
  ClickHouseSink ,