envio 3.0.0-alpha.2 → 3.0.0-alpha.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. package/evm.schema.json +44 -33
  2. package/fuel.schema.json +32 -21
  3. package/index.d.ts +1 -0
  4. package/package.json +7 -6
  5. package/src/Batch.res.mjs +1 -1
  6. package/src/Benchmark.res +394 -0
  7. package/src/Benchmark.res.mjs +398 -0
  8. package/src/ChainFetcher.res +459 -0
  9. package/src/ChainFetcher.res.mjs +281 -0
  10. package/src/ChainManager.res +179 -0
  11. package/src/ChainManager.res.mjs +139 -0
  12. package/src/Config.res +15 -1
  13. package/src/Config.res.mjs +27 -4
  14. package/src/Ecosystem.res +9 -124
  15. package/src/Ecosystem.res.mjs +19 -160
  16. package/src/Env.res +0 -1
  17. package/src/Env.res.mjs +0 -3
  18. package/src/Envio.gen.ts +9 -1
  19. package/src/Envio.res +12 -9
  20. package/src/EventProcessing.res +476 -0
  21. package/src/EventProcessing.res.mjs +341 -0
  22. package/src/FetchState.res +54 -29
  23. package/src/FetchState.res.mjs +62 -35
  24. package/src/GlobalState.res +1169 -0
  25. package/src/GlobalState.res.mjs +1196 -0
  26. package/src/Internal.res +2 -1
  27. package/src/LoadLayer.res +444 -0
  28. package/src/LoadLayer.res.mjs +296 -0
  29. package/src/LoadLayer.resi +32 -0
  30. package/src/Prometheus.res +8 -8
  31. package/src/Prometheus.res.mjs +10 -10
  32. package/src/ReorgDetection.res +6 -10
  33. package/src/ReorgDetection.res.mjs +6 -6
  34. package/src/UserContext.res +356 -0
  35. package/src/UserContext.res.mjs +238 -0
  36. package/src/bindings/DateFns.res +71 -0
  37. package/src/bindings/DateFns.res.mjs +22 -0
  38. package/src/sources/Evm.res +87 -0
  39. package/src/sources/Evm.res.mjs +105 -0
  40. package/src/sources/EvmChain.res +95 -0
  41. package/src/sources/EvmChain.res.mjs +61 -0
  42. package/src/sources/Fuel.res +19 -34
  43. package/src/sources/Fuel.res.mjs +34 -16
  44. package/src/sources/FuelSDK.res +37 -0
  45. package/src/sources/FuelSDK.res.mjs +29 -0
  46. package/src/sources/HyperFuel.res +2 -2
  47. package/src/sources/HyperFuel.resi +1 -1
  48. package/src/sources/HyperFuelClient.res +2 -2
  49. package/src/sources/HyperFuelSource.res +8 -8
  50. package/src/sources/HyperFuelSource.res.mjs +5 -5
  51. package/src/sources/HyperSyncSource.res +5 -5
  52. package/src/sources/HyperSyncSource.res.mjs +5 -5
  53. package/src/sources/RpcSource.res +4 -4
  54. package/src/sources/RpcSource.res.mjs +3 -3
  55. package/src/sources/Solana.res +59 -0
  56. package/src/sources/Solana.res.mjs +79 -0
  57. package/src/sources/Source.res +2 -2
  58. package/src/sources/SourceManager.res +24 -32
  59. package/src/sources/SourceManager.res.mjs +20 -20
  60. package/src/sources/SourceManager.resi +4 -5
@@ -0,0 +1,394 @@
1
+ module MillisAccum = {
2
+ type millis = float
3
+ type t = {counters: dict<millis>, startTime: Js.Date.t, mutable endTime: Js.Date.t}
4
+ let schema: S.t<t> = S.schema(s => {
5
+ counters: s.matches(S.dict(S.float)),
6
+ startTime: s.matches(S.string->S.datetime),
7
+ endTime: s.matches(S.string->S.datetime),
8
+ })
9
+ let make: unit => t = () => {
10
+ counters: Js.Dict.empty(),
11
+ startTime: Js.Date.make(),
12
+ endTime: Js.Date.make(),
13
+ }
14
+
15
+ let increment = (self: t, label, amount) => {
16
+ self.endTime = Js.Date.make()
17
+ let amount = amount->Belt.Float.fromInt
18
+ switch self.counters->Utils.Dict.dangerouslyGetNonOption(label) {
19
+ | None =>
20
+ self.counters->Js.Dict.set(label, amount)
21
+ amount
22
+ | Some(current) =>
23
+ let newAmount = current +. amount
24
+ self.counters->Js.Dict.set(label, newAmount)
25
+ newAmount
26
+ }
27
+ }
28
+ }
29
+
30
+ module SummaryData = {
31
+ module DataSet = {
32
+ type t = {
33
+ count: float,
34
+ min: float,
35
+ max: float,
36
+ sum: BigDecimal.t,
37
+ sumOfSquares: option<BigDecimal.t>,
38
+ decimalPlaces: int,
39
+ }
40
+
41
+ let schema = S.schema(s => {
42
+ count: s.matches(S.float),
43
+ min: s.matches(S.float),
44
+ max: s.matches(S.float),
45
+ sum: s.matches(BigDecimal.schema),
46
+ sumOfSquares: s.matches(S.option(BigDecimal.schema)),
47
+ decimalPlaces: s.matches(S.int),
48
+ })
49
+
50
+ let make = (val: float, ~decimalPlaces=2) => {
51
+ let bigDecimal = val->BigDecimal.fromFloat
52
+ {
53
+ count: 1.,
54
+ min: val,
55
+ max: val,
56
+ sum: bigDecimal,
57
+ sumOfSquares: Env.Benchmark.shouldSaveStdDev
58
+ ? Some(bigDecimal->BigDecimal.times(bigDecimal))
59
+ : None,
60
+ decimalPlaces,
61
+ }
62
+ }
63
+
64
+ let add = (self: t, val: float) => {
65
+ let bigDecimal = val->BigDecimal.fromFloat
66
+ {
67
+ count: self.count +. 1.,
68
+ min: Pervasives.min(self.min, val),
69
+ max: Pervasives.max(self.max, val),
70
+ sum: self.sum->BigDecimal.plus(bigDecimal),
71
+ sumOfSquares: self.sumOfSquares->Belt.Option.map(s =>
72
+ s->BigDecimal.plus(bigDecimal->BigDecimal.times(bigDecimal))
73
+ ),
74
+ decimalPlaces: self.decimalPlaces,
75
+ }
76
+ }
77
+ }
78
+ module Group = {
79
+ type t = dict<DataSet.t>
80
+ let schema: S.t<t> = S.dict(DataSet.schema)
81
+ let make = (): t => Js.Dict.empty()
82
+
83
+ /**
84
+ Adds a value to the data set for the given key. If the key does not exist, it will be created.
85
+
86
+ Returns the updated data set.
87
+ */
88
+ let add = (self: t, label, value: float, ~decimalPlaces=2) => {
89
+ switch self->Utils.Dict.dangerouslyGetNonOption(label) {
90
+ | None =>
91
+ let new = DataSet.make(value, ~decimalPlaces)
92
+ self->Js.Dict.set(label, new)
93
+ new
94
+ | Some(dataSet) =>
95
+ let updated = dataSet->DataSet.add(value)
96
+ self->Js.Dict.set(label, updated)
97
+ updated
98
+ }
99
+ }
100
+ }
101
+
102
+ type t = dict<Group.t>
103
+ let schema = S.dict(Group.schema)
104
+ let make = (): t => Js.Dict.empty()
105
+
106
+ let add = (self: t, ~group, ~label, ~value, ~decimalPlaces=2) => {
107
+ let group = switch self->Utils.Dict.dangerouslyGetNonOption(group) {
108
+ | None =>
109
+ let newGroup = Group.make()
110
+ self->Js.Dict.set(group, newGroup)
111
+ newGroup
112
+ | Some(group) => group
113
+ }
114
+
115
+ group->Group.add(label, value, ~decimalPlaces)
116
+ }
117
+ }
118
+
119
+ module Stats = {
120
+ open Belt
121
+ type t = {
122
+ n: float,
123
+ mean: float,
124
+ @as("std-dev") stdDev: option<float>,
125
+ min: float,
126
+ max: float,
127
+ sum: float,
128
+ }
129
+
130
+ let round = (float, ~precision=2) => {
131
+ let factor = Js.Math.pow_float(~base=10.0, ~exp=precision->Int.toFloat)
132
+ Js.Math.round(float *. factor) /. factor
133
+ }
134
+
135
+ let makeFromDataSet = (dataSet: SummaryData.DataSet.t) => {
136
+ let n = dataSet.count
137
+ let countBigDecimal = n->BigDecimal.fromFloat
138
+ let mean = dataSet.sum->BigDecimal.div(countBigDecimal)
139
+
140
+ let roundBigDecimal = bd =>
141
+ bd->BigDecimal.decimalPlaces(dataSet.decimalPlaces)->BigDecimal.toNumber
142
+ let roundFloat = float => float->round(~precision=dataSet.decimalPlaces)
143
+
144
+ let stdDev = dataSet.sumOfSquares->Option.map(sumOfSquares => {
145
+ let variance =
146
+ sumOfSquares
147
+ ->BigDecimal.div(countBigDecimal)
148
+ ->BigDecimal.minus(mean->BigDecimal.times(mean))
149
+ BigDecimal.sqrt(variance)->roundBigDecimal
150
+ })
151
+ {
152
+ n,
153
+ mean: mean->roundBigDecimal,
154
+ stdDev,
155
+ min: dataSet.min->roundFloat,
156
+ max: dataSet.max->roundFloat,
157
+ sum: dataSet.sum->roundBigDecimal,
158
+ }
159
+ }
160
+ }
161
+
162
+ module Data = {
163
+ type t = {
164
+ millisAccum: MillisAccum.t,
165
+ summaryData: SummaryData.t,
166
+ }
167
+
168
+ let schema = S.schema(s => {
169
+ millisAccum: s.matches(MillisAccum.schema),
170
+ summaryData: s.matches(SummaryData.schema),
171
+ })
172
+
173
+ let make = () => {
174
+ millisAccum: MillisAccum.make(),
175
+ summaryData: SummaryData.make(),
176
+ }
177
+
178
+ module LiveMetrics = {
179
+ let addDataSet = if (
180
+ Env.Benchmark.saveDataStrategy->Env.Benchmark.SaveDataStrategy.shouldSavePrometheus
181
+ ) {
182
+ (dataSet: SummaryData.DataSet.t, ~group, ~label) => {
183
+ let {n, mean, stdDev, min, max, sum} = dataSet->Stats.makeFromDataSet
184
+ Prometheus.BenchmarkSummaryData.set(~group, ~label, ~n, ~mean, ~stdDev, ~min, ~max, ~sum)
185
+ }
186
+ } else {
187
+ (_dataSet, ~group as _, ~label as _) => ()
188
+ }
189
+ let setCounterMillis = if (
190
+ Env.Benchmark.saveDataStrategy->Env.Benchmark.SaveDataStrategy.shouldSavePrometheus
191
+ ) {
192
+ (millisAccum: MillisAccum.t, ~label, ~millis) => {
193
+ let totalRuntimeMillis =
194
+ millisAccum.endTime->Js.Date.getTime -. millisAccum.startTime->Js.Date.getTime
195
+ Prometheus.BenchmarkCounters.set(~label, ~millis, ~totalRuntimeMillis)
196
+ }
197
+ } else {
198
+ (_, ~label as _, ~millis as _) => ()
199
+ }
200
+ }
201
+
202
+ let incrementMillis = (self: t, ~label, ~amount) => {
203
+ let nextMillis = self.millisAccum->MillisAccum.increment(label, amount)
204
+ self.millisAccum->LiveMetrics.setCounterMillis(~label, ~millis=nextMillis)
205
+ }
206
+
207
+ let addSummaryData = (self: t, ~group, ~label, ~value, ~decimalPlaces=2) => {
208
+ let updatedDataSet = self.summaryData->SummaryData.add(~group, ~label, ~value, ~decimalPlaces)
209
+ updatedDataSet->LiveMetrics.addDataSet(~group, ~label)
210
+ }
211
+ }
212
+
213
+ let data = Data.make()
214
+ let throttler = Throttler.make(
215
+ ~intervalMillis=Env.ThrottleWrites.jsonFileBenchmarkIntervalMillis,
216
+ ~logger=Logging.createChild(~params={"context": "Benchmarking framework"}),
217
+ )
218
+ let cacheFileName = "BenchmarkCache.json"
219
+ let cacheFilePath = NodeJs.Path.join(NodeJs.Path.__dirname, cacheFileName)
220
+
221
+ let saveToCacheFile = if (
222
+ Env.Benchmark.saveDataStrategy->Env.Benchmark.SaveDataStrategy.shouldSaveJsonFile
223
+ ) {
224
+ //Save to cache file only happens if the strategy is set to json-file
225
+ data => {
226
+ let write = () => {
227
+ let json = data->S.reverseConvertToJsonStringOrThrow(Data.schema)
228
+ NodeJs.Fs.Promises.writeFile(~filepath=cacheFilePath, ~content=json)
229
+ }
230
+ throttler->Throttler.schedule(write)
231
+ }
232
+ } else {
233
+ _ => ()
234
+ }
235
+
236
+ let readFromCacheFile = async () => {
237
+ switch await NodeJs.Fs.Promises.readFile(~filepath=cacheFilePath, ~encoding=Utf8) {
238
+ | exception _ => None
239
+ | content =>
240
+ try content->S.parseJsonStringOrThrow(Data.schema)->Some catch {
241
+ | S.Raised(e) =>
242
+ Logging.error(
243
+ "Failed to parse benchmark cache file, please delete it and rerun the benchmark",
244
+ )
245
+ e->S.Error.raise
246
+ }
247
+ }
248
+ }
249
+
250
+ let addSummaryData = (~group, ~label, ~value, ~decimalPlaces=2) => {
251
+ let _ = data->Data.addSummaryData(~group, ~label, ~value, ~decimalPlaces)
252
+ data->saveToCacheFile
253
+ }
254
+
255
+ let incrementMillis = (~label, ~amount) => {
256
+ let _ = data->Data.incrementMillis(~label, ~amount)
257
+ data->saveToCacheFile
258
+ }
259
+
260
+ let addBlockRangeFetched = (
261
+ ~totalTimeElapsed: int,
262
+ ~parsingTimeElapsed: int,
263
+ ~pageFetchTime: int,
264
+ ~chainId,
265
+ ~fromBlock,
266
+ ~toBlock,
267
+ ~numEvents,
268
+ ~numAddresses,
269
+ ~queryName,
270
+ ) => {
271
+ let group = `BlockRangeFetched Summary for Chain ${chainId->Belt.Int.toString} ${queryName}`
272
+ let add = (label, value) => data->Data.addSummaryData(~group, ~label, ~value=Utils.magic(value))
273
+
274
+ add("Total Time Elapsed (ms)", totalTimeElapsed)
275
+ add("Parsing Time Elapsed (ms)", parsingTimeElapsed)
276
+ add("Page Fetch Time (ms)", pageFetchTime)
277
+ add("Num Events", numEvents)
278
+ add("Num Addresses", numAddresses)
279
+ add("Block Range Size", toBlock - fromBlock)
280
+
281
+ data->Data.incrementMillis(
282
+ ~label=`Total Time Fetching Chain ${chainId->Belt.Int.toString} ${queryName}`,
283
+ ~amount=totalTimeElapsed,
284
+ )
285
+
286
+ data->saveToCacheFile
287
+ }
288
+
289
+ let eventProcessingGroup = "EventProcessing Summary"
290
+ let batchSizeLabel = "Batch Size"
291
+
292
+ let addEventProcessing = (
293
+ ~batchSize,
294
+ ~loadDuration,
295
+ ~handlerDuration,
296
+ ~dbWriteDuration,
297
+ ~totalTimeElapsed,
298
+ ) => {
299
+ let add = (label, value) =>
300
+ data->Data.addSummaryData(~group=eventProcessingGroup, ~label, ~value=value->Belt.Int.toFloat)
301
+
302
+ add(batchSizeLabel, batchSize)
303
+ add("Load Duration (ms)", loadDuration)
304
+ add("Handler Duration (ms)", handlerDuration)
305
+ add("DB Write Duration (ms)", dbWriteDuration)
306
+ add("Total Time Elapsed (ms)", totalTimeElapsed)
307
+
308
+ data->Data.incrementMillis(~label="Total Time Processing", ~amount=totalTimeElapsed)
309
+
310
+ data->saveToCacheFile
311
+ }
312
+
313
+ module Summary = {
314
+ open Belt
315
+
316
+ type summaryTable = dict<Stats.t>
317
+
318
+ external logSummaryTable: summaryTable => unit = "console.table"
319
+ external logArrTable: array<'a> => unit = "console.table"
320
+ external logObjTable: {..} => unit = "console.table"
321
+ external logDictTable: dict<'a> => unit = "console.table"
322
+
323
+ external arrayIntToFloat: array<int> => array<float> = "%identity"
324
+
325
+ let printSummary = async () => {
326
+ let data = await readFromCacheFile()
327
+ switch data {
328
+ | None =>
329
+ Logging.error(
330
+ "No benchmark cache file found, please use 'ENVIO_SAVE_BENCHMARK_DATA=true' and rerun the benchmark",
331
+ )
332
+ | Some({summaryData, millisAccum}) =>
333
+ Js.log("Time breakdown")
334
+ let timeBreakdown = [
335
+ (
336
+ "Total Runtime",
337
+ DateFns.intervalToDuration({
338
+ start: millisAccum.startTime,
339
+ end: millisAccum.endTime,
340
+ }),
341
+ ),
342
+ ]
343
+
344
+ millisAccum.counters
345
+ ->Js.Dict.entries
346
+ ->Array.forEach(((label, millis)) =>
347
+ timeBreakdown
348
+ ->Js.Array2.push((label, DateFns.durationFromMillis(millis->Belt.Int.fromFloat)))
349
+ ->ignore
350
+ )
351
+
352
+ timeBreakdown
353
+ ->Js.Dict.fromArray
354
+ ->logDictTable
355
+
356
+ Js.log("General")
357
+ let batchSizesSum =
358
+ summaryData
359
+ ->Js.Dict.get(eventProcessingGroup)
360
+ ->Option.flatMap(g => g->Js.Dict.get(batchSizeLabel))
361
+ ->Option.map(data => data.sum)
362
+ ->Option.getWithDefault(BigDecimal.zero)
363
+
364
+ let totalRuntimeMillis =
365
+ millisAccum.endTime->Js.Date.getTime -. millisAccum.startTime->Js.Date.getTime
366
+
367
+ let totalRuntimeSeconds = totalRuntimeMillis /. 1000.
368
+
369
+ let eventsPerSecond =
370
+ batchSizesSum
371
+ ->BigDecimal.div(BigDecimal.fromFloat(totalRuntimeSeconds))
372
+ ->BigDecimal.decimalPlaces(2)
373
+ ->BigDecimal.toNumber
374
+
375
+ logObjTable({
376
+ "batch sizes sum": batchSizesSum->BigDecimal.toNumber,
377
+ "total runtime (sec)": totalRuntimeSeconds,
378
+ "events per second": eventsPerSecond,
379
+ })
380
+
381
+ summaryData
382
+ ->Js.Dict.entries
383
+ ->Js.Array2.sortInPlaceWith(((a, _), (b, _)) => a < b ? -1 : 1)
384
+ ->Array.forEach(((groupName, group)) => {
385
+ Js.log(groupName)
386
+ group
387
+ ->Js.Dict.entries
388
+ ->Array.map(((label, values)) => (label, values->Stats.makeFromDataSet))
389
+ ->Js.Dict.fromArray
390
+ ->logDictTable
391
+ })
392
+ }
393
+ }
394
+ }