envio 2.25.2 → 2.26.0-alpha.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "envio",
3
- "version": "v2.25.2",
3
+ "version": "v2.26.0-alpha.2",
4
4
  "description": "A latency and sync speed optimized, developer friendly blockchain data indexer.",
5
5
  "bin": "./bin.js",
6
6
  "main": "./index.js",
@@ -25,10 +25,10 @@
25
25
  },
26
26
  "homepage": "https://envio.dev",
27
27
  "optionalDependencies": {
28
- "envio-linux-x64": "v2.25.2",
29
- "envio-linux-arm64": "v2.25.2",
30
- "envio-darwin-x64": "v2.25.2",
31
- "envio-darwin-arm64": "v2.25.2"
28
+ "envio-linux-x64": "v2.26.0-alpha.2",
29
+ "envio-linux-arm64": "v2.26.0-alpha.2",
30
+ "envio-darwin-x64": "v2.26.0-alpha.2",
31
+ "envio-darwin-arm64": "v2.26.0-alpha.2"
32
32
  },
33
33
  "dependencies": {
34
34
  "@envio-dev/hypersync-client": "0.6.5",
package/src/Envio.res CHANGED
@@ -50,6 +50,7 @@ let experimental_createEffect = (
50
50
  >
51
51
  ),
52
52
  callsCount: 0,
53
+ output: options.output,
53
54
  cache: options.cache->Belt.Option.getWithDefault(false),
54
55
  }->(Utils.magic: Internal.effect => effect<'input, 'output>)
55
56
  }
package/src/Envio.res.js CHANGED
@@ -10,6 +10,7 @@ function experimental_createEffect(options, handler) {
10
10
  name: options.name,
11
11
  handler: handler,
12
12
  cache: Belt_Option.getWithDefault(options.cache, false),
13
+ output: options.output,
13
14
  callsCount: 0
14
15
  };
15
16
  }
package/src/Internal.res CHANGED
@@ -198,6 +198,7 @@ type effect = {
198
198
  name: string,
199
199
  handler: effectArgs => promise<effectOutput>,
200
200
  cache: bool,
201
+ output: S.t<effectOutput>,
201
202
  mutable callsCount: int,
202
203
  }
203
204
 
@@ -5,14 +5,12 @@
5
5
  // Currently there are quite many code spread across
6
6
  // DbFunctions, Db, Migrations, InMemoryStore modules which use codegen code directly.
7
7
 
8
- // The type reflects an effect cache table in the db
8
+ // The type reflects an cache table in the db
9
9
  // It might be present even if the effect is not used in the application
10
- type effectCache = {
11
- name: string,
10
+ type effectCacheRecord = {
11
+ effectName: string,
12
12
  // Number of rows in the table
13
- mutable size: int,
14
- // Lazily attached table definition when effect is used in the application
15
- mutable table: option<Table.table>,
13
+ mutable count: int,
16
14
  }
17
15
 
18
16
  type operator = [#">" | #"="]
@@ -28,7 +26,6 @@ type storage = {
28
26
  ~generalTables: array<Table.table>=?,
29
27
  ~enums: array<Internal.enumConfig<Internal.enum>>=?,
30
28
  ) => promise<unit>,
31
- loadEffectCaches: unit => promise<array<effectCache>>,
32
29
  @raises("StorageError")
33
30
  loadByIdsOrThrow: 'item. (
34
31
  ~ids: array<string>,
@@ -50,6 +47,16 @@ type storage = {
50
47
  ~table: Table.table,
51
48
  ~itemSchema: S.t<'item>,
52
49
  ) => promise<unit>,
50
+ @raises("StorageError")
51
+ setEffectCacheOrThrow: (
52
+ ~effectName: string,
53
+ ~ids: array<string>,
54
+ ~outputs: array<Internal.effectOutput>,
55
+ ~outputSchema: S.t<Internal.effectOutput>,
56
+ ~initialize: bool,
57
+ ) => promise<unit>,
58
+ dumpEffectCache: unit => promise<unit>,
59
+ restoreEffectCache: (~withUpload: bool) => promise<array<effectCacheRecord>>,
53
60
  }
54
61
 
55
62
  exception StorageError({message: string, reason: exn})
@@ -57,7 +64,7 @@ exception StorageError({message: string, reason: exn})
57
64
  type storageStatus =
58
65
  | Unknown
59
66
  | Initializing(promise<unit>)
60
- | Ready({cleanRun: bool, effectCaches: dict<effectCache>})
67
+ | Ready({cleanRun: bool, cache: dict<effectCacheRecord>})
61
68
 
62
69
  type t = {
63
70
  userEntities: array<Internal.entityConfig>,
@@ -66,7 +73,6 @@ type t = {
66
73
  allEnums: array<Internal.enumConfig<Internal.enum>>,
67
74
  mutable storageStatus: storageStatus,
68
75
  storage: storage,
69
- onStorageInitialize: option<unit => promise<unit>>,
70
76
  }
71
77
 
72
78
  let entityHistoryActionEnumConfig: Internal.enumConfig<EntityHistory.RowAction.t> = {
@@ -83,7 +89,6 @@ let make = (
83
89
  ~allEnums,
84
90
  ~staticTables,
85
91
  ~storage,
86
- ~onStorageInitialize=?,
87
92
  ) => {
88
93
  let allEntities = userEntities->Js.Array2.concat([dcRegistryEntityConfig])
89
94
  let allEnums =
@@ -95,62 +100,70 @@ let make = (
95
100
  allEnums,
96
101
  storageStatus: Unknown,
97
102
  storage,
98
- onStorageInitialize,
99
103
  }
100
104
  }
101
105
 
102
- let init = async (persistence, ~reset=false) => {
103
- try {
104
- let shouldRun = switch persistence.storageStatus {
105
- | Unknown => true
106
- | Initializing(promise) => {
107
- await promise
108
- reset
109
- }
110
- | Ready(_) => reset
111
- }
112
- if shouldRun {
113
- let resolveRef = ref(%raw(`null`))
114
- let promise = Promise.make((resolve, _) => {
115
- resolveRef := resolve
116
- })
117
- persistence.storageStatus = Initializing(promise)
118
- if reset || !(await persistence.storage.isInitialized()) {
119
- let _ = await persistence.storage.initialize(
120
- ~entities=persistence.allEntities,
121
- ~generalTables=persistence.staticTables,
122
- ~enums=persistence.allEnums,
123
- )
106
+ let init = {
107
+ let loadInitialCache = async (persistence, ~withUpload) => {
108
+ let effectCacheRecords = await persistence.storage.restoreEffectCache(~withUpload)
109
+ let cache = Js.Dict.empty()
110
+ effectCacheRecords->Js.Array2.forEach(record => {
111
+ Prometheus.EffectCacheCount.set(~count=record.count, ~effectName=record.effectName)
112
+ cache->Js.Dict.set(record.effectName, record)
113
+ })
114
+ cache
115
+ }
124
116
 
125
- persistence.storageStatus = Ready({
126
- cleanRun: true,
127
- effectCaches: Js.Dict.empty(),
128
- })
129
- switch persistence.onStorageInitialize {
130
- | Some(onStorageInitialize) => await onStorageInitialize()
131
- | None => ()
132
- }
133
- } else if (
134
- // In case of a race condition,
135
- // we want to set the initial status to Ready only once.
136
- switch persistence.storageStatus {
137
- | Initializing(_) => true
138
- | _ => false
117
+ async (persistence, ~reset=false) => {
118
+ try {
119
+ let shouldRun = switch persistence.storageStatus {
120
+ | Unknown => true
121
+ | Initializing(promise) => {
122
+ await promise
123
+ reset
139
124
  }
140
- ) {
141
- let effectCaches = Js.Dict.empty()
142
- (await persistence.storage.loadEffectCaches())->Js.Array2.forEach(effectCache => {
143
- effectCaches->Js.Dict.set(effectCache.name, effectCache)
144
- })
145
- persistence.storageStatus = Ready({
146
- cleanRun: false,
147
- effectCaches,
125
+ | Ready(_) => reset
126
+ }
127
+ if shouldRun {
128
+ let resolveRef = ref(%raw(`null`))
129
+ let promise = Promise.make((resolve, _) => {
130
+ resolveRef := resolve
148
131
  })
132
+ persistence.storageStatus = Initializing(promise)
133
+ if reset || !(await persistence.storage.isInitialized()) {
134
+ Logging.info(`Initializing the indexer storage...`)
135
+
136
+ await persistence.storage.initialize(
137
+ ~entities=persistence.allEntities,
138
+ ~generalTables=persistence.staticTables,
139
+ ~enums=persistence.allEnums,
140
+ )
141
+
142
+ Logging.info(`The indexer storage is ready. Restoring cache...`)
143
+ persistence.storageStatus = Ready({
144
+ cleanRun: true,
145
+ cache: await loadInitialCache(persistence, ~withUpload=true),
146
+ })
147
+ } else if (
148
+ // In case of a race condition,
149
+ // we want to set the initial status to Ready only once.
150
+ switch persistence.storageStatus {
151
+ | Initializing(_) => true
152
+ | _ => false
153
+ }
154
+ ) {
155
+ Logging.info(`The indexer storage is initialized. Restoring cache...`)
156
+ persistence.storageStatus = Ready({
157
+ cleanRun: false,
158
+ cache: await loadInitialCache(persistence, ~withUpload=false),
159
+ })
160
+ }
161
+ resolveRef.contents()
149
162
  }
150
- resolveRef.contents()
163
+ } catch {
164
+ | exn =>
165
+ exn->ErrorHandling.mkLogAndRaise(~msg=`EE800: Failed to initialize the indexer storage.`)
151
166
  }
152
- } catch {
153
- | exn => exn->ErrorHandling.mkLogAndRaise(~msg=`EE800: Failed to initialize the indexer storage.`)
154
167
  }
155
168
  }
156
169
 
@@ -162,3 +175,26 @@ let getInitializedStorageOrThrow = persistence => {
162
175
  | Ready(_) => persistence.storage
163
176
  }
164
177
  }
178
+
179
+ let setEffectCacheOrThrow = async (persistence, ~effectName, ~ids, ~outputs, ~outputSchema) => {
180
+ switch persistence.storageStatus {
181
+ | Unknown
182
+ | Initializing(_) =>
183
+ Js.Exn.raiseError(`Failed to access the indexer storage. The Persistence layer is not initialized.`)
184
+ | Ready({cache}) => {
185
+ let storage = persistence.storage
186
+ let effectCacheRecord = switch cache->Utils.Dict.dangerouslyGetNonOption(effectName) {
187
+ | Some(c) => c
188
+ | None => {
189
+ let c = {effectName, count: 0}
190
+ cache->Js.Dict.set(effectName, c)
191
+ c
192
+ }
193
+ }
194
+ let initialize = effectCacheRecord.count === 0
195
+ await storage.setEffectCacheOrThrow(~effectName, ~ids, ~outputs, ~outputSchema, ~initialize)
196
+ effectCacheRecord.count = effectCacheRecord.count + ids->Js.Array2.length
197
+ Prometheus.EffectCacheCount.set(~count=effectCacheRecord.count, ~effectName)
198
+ }
199
+ }
200
+ }
@@ -2,6 +2,8 @@
2
2
  'use strict';
3
3
 
4
4
  var Js_exn = require("rescript/lib/js/js_exn.js");
5
+ var Logging = require("./Logging.res.js");
6
+ var Prometheus = require("./Prometheus.res.js");
5
7
  var EntityHistory = require("./db/EntityHistory.res.js");
6
8
  var ErrorHandling = require("./ErrorHandling.res.js");
7
9
  var Caml_exceptions = require("rescript/lib/js/caml_exceptions.js");
@@ -22,7 +24,7 @@ var entityHistoryActionEnumConfig = {
22
24
  default: "SET"
23
25
  };
24
26
 
25
- function make(userEntities, dcRegistryEntityConfig, allEnums, staticTables, storage, onStorageInitialize) {
27
+ function make(userEntities, dcRegistryEntityConfig, allEnums, staticTables, storage) {
26
28
  var allEntities = userEntities.concat([dcRegistryEntityConfig]);
27
29
  var allEnums$1 = allEnums.concat([entityHistoryActionEnumConfig]);
28
30
  return {
@@ -31,11 +33,20 @@ function make(userEntities, dcRegistryEntityConfig, allEnums, staticTables, stor
31
33
  allEntities: allEntities,
32
34
  allEnums: allEnums$1,
33
35
  storageStatus: "Unknown",
34
- storage: storage,
35
- onStorageInitialize: onStorageInitialize
36
+ storage: storage
36
37
  };
37
38
  }
38
39
 
40
+ async function loadInitialCache(persistence, withUpload) {
41
+ var effectCacheRecords = await persistence.storage.restoreEffectCache(withUpload);
42
+ var cache = {};
43
+ effectCacheRecords.forEach(function (record) {
44
+ Prometheus.EffectCacheCount.set(record.count, record.effectName);
45
+ cache[record.effectName] = record;
46
+ });
47
+ return cache;
48
+ }
49
+
39
50
  async function init(persistence, resetOpt) {
40
51
  var reset = resetOpt !== undefined ? resetOpt : false;
41
52
  try {
@@ -63,30 +74,24 @@ async function init(persistence, resetOpt) {
63
74
  _0: promise$1
64
75
  };
65
76
  if (reset || !await persistence.storage.isInitialized()) {
77
+ Logging.info("Initializing the indexer storage...");
66
78
  await persistence.storage.initialize(persistence.allEntities, persistence.staticTables, persistence.allEnums);
79
+ Logging.info("The indexer storage is ready. Restoring cache...");
67
80
  persistence.storageStatus = {
68
81
  TAG: "Ready",
69
82
  cleanRun: true,
70
- effectCaches: {}
83
+ cache: await loadInitialCache(persistence, true)
71
84
  };
72
- var onStorageInitialize = persistence.onStorageInitialize;
73
- if (onStorageInitialize !== undefined) {
74
- await onStorageInitialize();
75
- }
76
-
77
85
  } else {
78
86
  var match = persistence.storageStatus;
79
87
  var tmp;
80
88
  tmp = typeof match !== "object" || match.TAG !== "Initializing" ? false : true;
81
89
  if (tmp) {
82
- var effectCaches = {};
83
- (await persistence.storage.loadEffectCaches()).forEach(function (effectCache) {
84
- effectCaches[effectCache.name] = effectCache;
85
- });
90
+ Logging.info("The indexer storage is initialized. Restoring cache...");
86
91
  persistence.storageStatus = {
87
92
  TAG: "Ready",
88
93
  cleanRun: false,
89
- effectCaches: effectCaches
94
+ cache: await loadInitialCache(persistence, false)
90
95
  };
91
96
  }
92
97
 
@@ -108,9 +113,38 @@ function getInitializedStorageOrThrow(persistence) {
108
113
  }
109
114
  }
110
115
 
116
+ async function setEffectCacheOrThrow(persistence, effectName, ids, outputs, outputSchema) {
117
+ var match = persistence.storageStatus;
118
+ if (typeof match !== "object") {
119
+ return Js_exn.raiseError("Failed to access the indexer storage. The Persistence layer is not initialized.");
120
+ }
121
+ if (match.TAG === "Initializing") {
122
+ return Js_exn.raiseError("Failed to access the indexer storage. The Persistence layer is not initialized.");
123
+ }
124
+ var cache = match.cache;
125
+ var storage = persistence.storage;
126
+ var c = cache[effectName];
127
+ var effectCacheRecord;
128
+ if (c !== undefined) {
129
+ effectCacheRecord = c;
130
+ } else {
131
+ var c$1 = {
132
+ effectName: effectName,
133
+ count: 0
134
+ };
135
+ cache[effectName] = c$1;
136
+ effectCacheRecord = c$1;
137
+ }
138
+ var initialize = effectCacheRecord.count === 0;
139
+ await storage.setEffectCacheOrThrow(effectName, ids, outputs, outputSchema, initialize);
140
+ effectCacheRecord.count = effectCacheRecord.count + ids.length | 0;
141
+ return Prometheus.EffectCacheCount.set(effectCacheRecord.count, effectName);
142
+ }
143
+
111
144
  exports.StorageError = StorageError;
112
145
  exports.entityHistoryActionEnumConfig = entityHistoryActionEnumConfig;
113
146
  exports.make = make;
114
147
  exports.init = init;
115
148
  exports.getInitializedStorageOrThrow = getInitializedStorageOrThrow;
116
- /* EntityHistory Not a pure module */
149
+ exports.setEffectCacheOrThrow = setEffectCacheOrThrow;
150
+ /* Logging Not a pure module */
package/src/PgStorage.res CHANGED
@@ -128,6 +128,20 @@ GRANT ALL ON SCHEMA "${pgSchema}" TO public;`,
128
128
  })
129
129
  })
130
130
 
131
+ // Add cache row count function
132
+ functionsQuery :=
133
+ functionsQuery.contents ++
134
+ "\n" ++
135
+ `CREATE OR REPLACE FUNCTION get_cache_row_count(table_name text)
136
+ RETURNS integer AS $$
137
+ DECLARE
138
+ result integer;
139
+ BEGIN
140
+ EXECUTE format('SELECT COUNT(*) FROM "${pgSchema}".%I', table_name) INTO result;
141
+ RETURN result;
142
+ END;
143
+ $$ LANGUAGE plpgsql;`
144
+
131
145
  [query.contents]->Js.Array2.concat(
132
146
  functionsQuery.contents !== "" ? [functionsQuery.contents] : [],
133
147
  )
@@ -418,7 +432,86 @@ let makeSchemaTableNamesQuery = (~pgSchema) => {
418
432
  `SELECT table_name FROM information_schema.tables WHERE table_schema = '${pgSchema}';`
419
433
  }
420
434
 
421
- let make = (~sql: Postgres.sql, ~pgSchema, ~pgUser): Persistence.storage => {
435
+ let cacheTablePrefix = "envio_effect_"
436
+ let cacheTablePrefixLength = cacheTablePrefix->String.length
437
+
438
+ type schemaCacheTableInfo = {
439
+ @as("table_name")
440
+ tableName: string,
441
+ @as("count")
442
+ count: int,
443
+ }
444
+
445
+ let makeSchemaCacheTableInfoQuery = (~pgSchema) => {
446
+ `SELECT
447
+ t.table_name,
448
+ get_cache_row_count(t.table_name) as count
449
+ FROM information_schema.tables t
450
+ WHERE t.table_schema = '${pgSchema}'
451
+ AND t.table_name LIKE '${cacheTablePrefix}%';`
452
+ }
453
+
454
+ type psqlExecState =
455
+ Unknown | Pending(promise<result<string, string>>) | Resolved(result<string, string>)
456
+
457
+ let getPsqlExec = {
458
+ // For development: We run the indexer process locally,
459
+ // and there might not be psql installed on the user's machine.
460
+ // So we use docker-compose to run psql existing in the postgres container.
461
+ // For production: We expect indexer to be running in a container,
462
+ // with psql installed. So we can call it directly.
463
+ let psqlExecState = ref(Unknown)
464
+ async () => {
465
+ switch psqlExecState.contents {
466
+ | Unknown => {
467
+ let promise = Promise.make((resolve, _reject) => {
468
+ let binary = "psql"
469
+ NodeJs.ChildProcess.exec(`${binary} --version`, (~error, ~stdout as _, ~stderr as _) => {
470
+ switch error {
471
+ | Value(_) => {
472
+ let binary = "docker-compose exec -T -u postgres envio-postgres psql"
473
+ NodeJs.ChildProcess.exec(
474
+ `${binary} --version`,
475
+ (~error, ~stdout as _, ~stderr as _) => {
476
+ switch error {
477
+ | Value(_) => resolve(Error("Failed to find psql binary"))
478
+ | Null => resolve(Ok(binary))
479
+ }
480
+ },
481
+ )
482
+ }
483
+ | Null => resolve(Ok(binary))
484
+ }
485
+ })
486
+ })
487
+
488
+ psqlExecState := Pending(promise)
489
+ let result = await promise
490
+ psqlExecState := Resolved(result)
491
+ result
492
+ }
493
+ | Pending(promise) => await promise
494
+ | Resolved(result) => result
495
+ }
496
+ }
497
+ }
498
+ let psqlExecMissingErrorMessage = `Please check if "psql" binary is installed or docker-compose is running for the local indexer.`
499
+
500
+ let make = (
501
+ ~sql: Postgres.sql,
502
+ ~pgSchema,
503
+ ~pgUser,
504
+ ~pgDatabase,
505
+ ~onInitialize=?,
506
+ ~onNewTables=?,
507
+ ): Persistence.storage => {
508
+ let cacheDirPath = NodeJs.Path.resolve([
509
+ // Right outside of the generated directory
510
+ "..",
511
+ ".envio",
512
+ "cache",
513
+ ])
514
+
422
515
  let isInitialized = async () => {
423
516
  let envioTables =
424
517
  await sql->Postgres.unsafe(
@@ -459,26 +552,11 @@ let make = (~sql: Postgres.sql, ~pgSchema, ~pgUser): Persistence.storage => {
459
552
  let _ = await sql->Postgres.beginSql(sql => {
460
553
  queries->Js.Array2.map(query => sql->Postgres.unsafe(query))
461
554
  })
462
- }
463
555
 
464
- let loadEffectCaches = async () => {
465
- let schemaTableNames: array<schemaTableName> =
466
- await sql->Postgres.unsafe(makeSchemaTableNamesQuery(~pgSchema))
467
- schemaTableNames->Belt.Array.keepMapU(schemaTableName => {
468
- if schemaTableName.tableName->Js.String2.startsWith("effect_cache_") {
469
- Some(
470
- (
471
- {
472
- name: schemaTableName.tableName,
473
- size: 0,
474
- table: None,
475
- }: Persistence.effectCache
476
- ),
477
- )
478
- } else {
479
- None
480
- }
481
- })
556
+ switch onInitialize {
557
+ | Some(onInitialize) => await onInitialize()
558
+ | None => ()
559
+ }
482
560
  }
483
561
 
484
562
  let loadByIdsOrThrow = async (~ids, ~table: Table.table, ~rowsSchema) => {
@@ -577,12 +655,199 @@ let make = (~sql: Postgres.sql, ~pgSchema, ~pgUser): Persistence.storage => {
577
655
  )
578
656
  }
579
657
 
658
+ let setEffectCacheOrThrow = async (
659
+ ~effectName: string,
660
+ ~ids: array<string>,
661
+ ~outputs: array<Internal.effectOutput>,
662
+ ~outputSchema: S.t<Internal.effectOutput>,
663
+ ~initialize: bool,
664
+ ) => {
665
+ let table = Table.mkTable(
666
+ cacheTablePrefix ++ effectName,
667
+ ~fields=[
668
+ Table.mkField("id", Text, ~fieldSchema=S.string, ~isPrimaryKey=true),
669
+ Table.mkField("output", JsonB, ~fieldSchema=outputSchema),
670
+ ],
671
+ ~compositeIndices=[],
672
+ )
673
+
674
+ if initialize {
675
+ let _ = await sql->Postgres.unsafe(makeCreateTableQuery(table, ~pgSchema))
676
+ switch onNewTables {
677
+ | Some(onNewTables) => await onNewTables(~tableNames=[table.tableName])
678
+ | None => ()
679
+ }
680
+ }
681
+
682
+ let items = []
683
+ for idx in 0 to outputs->Array.length - 1 {
684
+ items
685
+ ->Js.Array2.push({
686
+ "id": ids[idx],
687
+ "output": outputs[idx],
688
+ })
689
+ ->ignore
690
+ }
691
+
692
+ await setOrThrow(
693
+ ~items,
694
+ ~table,
695
+ ~itemSchema=S.schema(s =>
696
+ {
697
+ "id": s.matches(S.string),
698
+ "output": s.matches(outputSchema),
699
+ }
700
+ ),
701
+ )
702
+ }
703
+
704
+ let dumpEffectCache = async () => {
705
+ let cacheTableInfo: array<schemaCacheTableInfo> =
706
+ (await sql
707
+ ->Postgres.unsafe(makeSchemaCacheTableInfoQuery(~pgSchema)))
708
+ ->Js.Array2.filter(i => i.count > 0)
709
+
710
+ if cacheTableInfo->Utils.Array.notEmpty {
711
+ // Create .envio/cache directory if it doesn't exist
712
+ try {
713
+ await NodeJs.Fs.Promises.access(cacheDirPath)
714
+ } catch {
715
+ | _ =>
716
+ // Create directory if it doesn't exist
717
+ await NodeJs.Fs.Promises.mkdir(~path=cacheDirPath, ~options={recursive: true})
718
+ }
719
+
720
+ // Command for testing. Run from generated
721
+ // docker-compose exec -T -u postgres envio-postgres psql -d envio-dev -c 'COPY "public"."envio_effect_getTokenMetadata" TO STDOUT (FORMAT text, HEADER);' > ../.envio/cache/getTokenMetadata.tsv
722
+
723
+ switch await getPsqlExec() {
724
+ | Ok(psqlExec) => {
725
+ Logging.info(
726
+ `Dumping cache: ${cacheTableInfo
727
+ ->Js.Array2.map(({tableName, count}) =>
728
+ tableName ++ " (" ++ count->Belt.Int.toString ++ " rows)"
729
+ )
730
+ ->Js.Array2.joinWith(", ")}`,
731
+ )
732
+
733
+ let promises = cacheTableInfo->Js.Array2.map(async ({tableName}) => {
734
+ let cacheName = tableName->Js.String2.sliceToEnd(~from=cacheTablePrefixLength)
735
+ let outputFile =
736
+ NodeJs.Path.join(cacheDirPath, cacheName ++ ".tsv")->NodeJs.Path.toString
737
+
738
+ let command = `${psqlExec} -d ${pgDatabase} -c 'COPY "${pgSchema}"."${tableName}" TO STDOUT WITH (FORMAT text, HEADER);' > ${outputFile}`
739
+
740
+ Promise.make((resolve, reject) => {
741
+ NodeJs.ChildProcess.exec(
742
+ command,
743
+ (~error, ~stdout, ~stderr as _) => {
744
+ switch error {
745
+ | Value(error) => reject(error)
746
+ | Null => resolve(stdout)
747
+ }
748
+ },
749
+ )
750
+ })
751
+ })
752
+
753
+ let _ = await promises->Promise.all
754
+ Logging.info(`Successfully dumped cache to ${cacheDirPath->NodeJs.Path.toString}`)
755
+ }
756
+ | Error(_) => Logging.error(`Failed to dump cache. ${psqlExecMissingErrorMessage}`)
757
+ }
758
+ }
759
+ }
760
+
761
+ let restoreEffectCache = async (~withUpload) => {
762
+ if withUpload {
763
+ // Try to restore cache tables from binary files
764
+ let (entries, psqlExecResult) = await Promise.all2((
765
+ NodeJs.Fs.Promises.readdir(cacheDirPath),
766
+ getPsqlExec(),
767
+ ))
768
+
769
+ switch psqlExecResult {
770
+ | Ok(psqlExec) => {
771
+ let cacheFiles = entries->Js.Array2.filter(entry => {
772
+ entry->Js.String2.endsWith(".tsv")
773
+ })
774
+
775
+ let _ =
776
+ await cacheFiles
777
+ ->Js.Array2.map(entry => {
778
+ let cacheName = entry->Js.String2.slice(~from=0, ~to_=-4) // Remove .tsv extension
779
+ let tableName = cacheTablePrefix ++ cacheName
780
+ let table = Table.mkTable(
781
+ tableName,
782
+ ~fields=[
783
+ Table.mkField("id", Text, ~fieldSchema=S.string, ~isPrimaryKey=true),
784
+ Table.mkField("output", JsonB, ~fieldSchema=S.json(~validate=false)),
785
+ ],
786
+ ~compositeIndices=[],
787
+ )
788
+
789
+ sql
790
+ ->Postgres.unsafe(makeCreateTableQuery(table, ~pgSchema))
791
+ ->Promise.then(() => {
792
+ let inputFile = NodeJs.Path.join(cacheDirPath, entry)->NodeJs.Path.toString
793
+
794
+ let command = `${psqlExec} -d ${pgDatabase} -c 'COPY "${pgSchema}"."${tableName}" FROM STDIN WITH (FORMAT text, HEADER);' < ${inputFile}`
795
+
796
+ Promise.make(
797
+ (resolve, reject) => {
798
+ NodeJs.ChildProcess.exec(
799
+ command,
800
+ (~error, ~stdout, ~stderr as _) => {
801
+ switch error {
802
+ | Value(error) => reject(error)
803
+ | Null => resolve(stdout)
804
+ }
805
+ },
806
+ )
807
+ },
808
+ )
809
+ })
810
+ })
811
+ ->Promise.all
812
+ }
813
+ | Error(_) =>
814
+ Logging.error(
815
+ `Failed to restore cache, continuing without it. ${psqlExecMissingErrorMessage}`,
816
+ )
817
+ }
818
+ }
819
+
820
+ let cacheTableInfo: array<schemaCacheTableInfo> =
821
+ await sql->Postgres.unsafe(makeSchemaCacheTableInfoQuery(~pgSchema))
822
+
823
+ if withUpload {
824
+ switch onNewTables {
825
+ | Some(onNewTables) =>
826
+ await onNewTables(
827
+ ~tableNames=cacheTableInfo->Js.Array2.map(info => {
828
+ info.tableName
829
+ }),
830
+ )
831
+ | None => ()
832
+ }
833
+ }
834
+
835
+ cacheTableInfo->Js.Array2.map((info): Persistence.effectCacheRecord => {
836
+ {
837
+ effectName: info.tableName->Js.String2.sliceToEnd(~from=cacheTablePrefixLength),
838
+ count: info.count,
839
+ }
840
+ })
841
+ }
842
+
580
843
  {
581
844
  isInitialized,
582
845
  initialize,
583
846
  loadByFieldOrThrow,
584
- loadEffectCaches,
585
847
  loadByIdsOrThrow,
586
848
  setOrThrow,
849
+ setEffectCacheOrThrow,
850
+ dumpEffectCache,
851
+ restoreEffectCache,
587
852
  }
588
853
  }
@@ -1,15 +1,20 @@
1
1
  // Generated by ReScript, PLEASE EDIT WITH CARE
2
2
  'use strict';
3
3
 
4
+ var Fs = require("fs");
5
+ var Path = require("path");
4
6
  var $$Array = require("rescript/lib/js/array.js");
5
7
  var Table = require("./db/Table.res.js");
6
8
  var Utils = require("./Utils.res.js");
7
9
  var Js_exn = require("rescript/lib/js/js_exn.js");
8
10
  var Schema = require("./db/Schema.res.js");
11
+ var Logging = require("./Logging.res.js");
9
12
  var Belt_Array = require("rescript/lib/js/belt_Array.js");
13
+ var Caml_array = require("rescript/lib/js/caml_array.js");
10
14
  var Belt_Option = require("rescript/lib/js/belt_Option.js");
11
15
  var Caml_option = require("rescript/lib/js/caml_option.js");
12
16
  var Persistence = require("./Persistence.res.js");
17
+ var Child_process = require("child_process");
13
18
  var Caml_exceptions = require("rescript/lib/js/caml_exceptions.js");
14
19
  var S$RescriptSchema = require("rescript-schema/src/S.res.js");
15
20
  var Caml_js_exceptions = require("rescript/lib/js/caml_js_exceptions.js");
@@ -104,6 +109,7 @@ function makeInitializeTransaction(pgSchema, pgUser, generalTablesOpt, entitiesO
104
109
  query.contents = query.contents + "\n" + makeCreateIndexQuery(derivedFromField.derivedFromEntity, [indexField], pgSchema);
105
110
  });
106
111
  });
112
+ functionsQuery.contents = functionsQuery.contents + "\n" + ("CREATE OR REPLACE FUNCTION get_cache_row_count(table_name text) \n RETURNS integer AS $$\n DECLARE\n result integer;\n BEGIN\n EXECUTE format('SELECT COUNT(*) FROM \"" + pgSchema + "\".%I', table_name) INTO result;\n RETURN result;\n END;\n $$ LANGUAGE plpgsql;");
107
113
  return [query.contents].concat(functionsQuery.contents !== "" ? [functionsQuery.contents] : []);
108
114
  }
109
115
 
@@ -288,7 +294,68 @@ function makeSchemaTableNamesQuery(pgSchema) {
288
294
  return "SELECT table_name FROM information_schema.tables WHERE table_schema = '" + pgSchema + "';";
289
295
  }
290
296
 
291
- function make(sql, pgSchema, pgUser) {
297
+ var cacheTablePrefix = "envio_effect_";
298
+
299
+ var cacheTablePrefixLength = cacheTablePrefix.length;
300
+
301
+ function makeSchemaCacheTableInfoQuery(pgSchema) {
302
+ return "SELECT \n t.table_name,\n get_cache_row_count(t.table_name) as count\n FROM information_schema.tables t\n WHERE t.table_schema = '" + pgSchema + "' \n AND t.table_name LIKE '" + cacheTablePrefix + "%';";
303
+ }
304
+
305
+ var psqlExecState = {
306
+ contents: "Unknown"
307
+ };
308
+
309
+ async function getPsqlExec() {
310
+ var promise = psqlExecState.contents;
311
+ if (typeof promise === "object") {
312
+ if (promise.TAG === "Pending") {
313
+ return await promise._0;
314
+ } else {
315
+ return promise._0;
316
+ }
317
+ }
318
+ var promise$1 = new Promise((function (resolve, _reject) {
319
+ var binary = "psql";
320
+ Child_process.exec(binary + " --version", (function (error, param, param$1) {
321
+ if (error === null) {
322
+ return resolve({
323
+ TAG: "Ok",
324
+ _0: binary
325
+ });
326
+ }
327
+ var binary$1 = "docker-compose exec -T -u postgres envio-postgres psql";
328
+ Child_process.exec(binary$1 + " --version", (function (error, param, param$1) {
329
+ if (error === null) {
330
+ return resolve({
331
+ TAG: "Ok",
332
+ _0: binary$1
333
+ });
334
+ } else {
335
+ return resolve({
336
+ TAG: "Error",
337
+ _0: "Failed to find psql binary"
338
+ });
339
+ }
340
+ }));
341
+ }));
342
+ }));
343
+ psqlExecState.contents = {
344
+ TAG: "Pending",
345
+ _0: promise$1
346
+ };
347
+ var result = await promise$1;
348
+ psqlExecState.contents = {
349
+ TAG: "Resolved",
350
+ _0: result
351
+ };
352
+ return result;
353
+ }
354
+
355
+ var psqlExecMissingErrorMessage = "Please check if \"psql\" binary is installed or docker-compose is running for the local indexer.";
356
+
357
+ function make(sql, pgSchema, pgUser, pgDatabase, onInitialize, onNewTables) {
358
+ var cacheDirPath = Path.resolve("..", ".envio", "cache");
292
359
  var isInitialized = async function () {
293
360
  var envioTables = await sql.unsafe("SELECT table_schema FROM information_schema.tables WHERE table_schema = '" + pgSchema + "' AND table_name = '" + eventSyncStateTableName + "';");
294
361
  return Utils.$$Array.notEmpty(envioTables);
@@ -309,19 +376,10 @@ function make(sql, pgSchema, pgUser) {
309
376
  return sql.unsafe(query);
310
377
  });
311
378
  });
312
- };
313
- var loadEffectCaches = async function () {
314
- var schemaTableNames = await sql.unsafe(makeSchemaTableNamesQuery(pgSchema));
315
- return Belt_Array.keepMapU(schemaTableNames, (function (schemaTableName) {
316
- if (schemaTableName.table_name.startsWith("effect_cache_")) {
317
- return {
318
- name: schemaTableName.table_name,
319
- size: 0,
320
- table: undefined
321
- };
322
- }
323
-
324
- }));
379
+ if (onInitialize !== undefined) {
380
+ return await onInitialize();
381
+ }
382
+
325
383
  };
326
384
  var loadByIdsOrThrow = async function (ids, table, rowsSchema) {
327
385
  var rows;
@@ -395,13 +453,132 @@ function make(sql, pgSchema, pgUser) {
395
453
  var setOrThrow$1 = function (items, table, itemSchema) {
396
454
  return setOrThrow(sql, items, table, itemSchema, pgSchema);
397
455
  };
456
+ var setEffectCacheOrThrow = async function (effectName, ids, outputs, outputSchema, initialize) {
457
+ var table = Table.mkTable(cacheTablePrefix + effectName, [], [
458
+ Table.mkField("id", "TEXT", S$RescriptSchema.string, undefined, undefined, undefined, true, undefined, undefined),
459
+ Table.mkField("output", "JSONB", outputSchema, undefined, undefined, undefined, undefined, undefined, undefined)
460
+ ]);
461
+ if (initialize) {
462
+ await sql.unsafe(makeCreateTableQuery(table, pgSchema));
463
+ if (onNewTables !== undefined) {
464
+ await onNewTables([table.tableName]);
465
+ }
466
+
467
+ }
468
+ var items = [];
469
+ for(var idx = 0 ,idx_finish = outputs.length; idx < idx_finish; ++idx){
470
+ items.push({
471
+ id: Caml_array.get(ids, idx),
472
+ output: Caml_array.get(outputs, idx)
473
+ });
474
+ }
475
+ return await setOrThrow$1(items, table, S$RescriptSchema.schema(function (s) {
476
+ return {
477
+ id: s.m(S$RescriptSchema.string),
478
+ output: s.m(outputSchema)
479
+ };
480
+ }));
481
+ };
482
+ var dumpEffectCache = async function () {
483
+ var cacheTableInfo = (await sql.unsafe(makeSchemaCacheTableInfoQuery(pgSchema))).filter(function (i) {
484
+ return i.count > 0;
485
+ });
486
+ if (!Utils.$$Array.notEmpty(cacheTableInfo)) {
487
+ return ;
488
+ }
489
+ try {
490
+ await Fs.promises.access(cacheDirPath);
491
+ }
492
+ catch (exn){
493
+ await Fs.promises.mkdir(cacheDirPath, {
494
+ recursive: true
495
+ });
496
+ }
497
+ var psqlExec = await getPsqlExec();
498
+ if (psqlExec.TAG !== "Ok") {
499
+ return Logging.error("Failed to dump cache. " + psqlExecMissingErrorMessage);
500
+ }
501
+ var psqlExec$1 = psqlExec._0;
502
+ Logging.info("Dumping cache: " + cacheTableInfo.map(function (param) {
503
+ return param.table_name + " (" + String(param.count) + " rows)";
504
+ }).join(", "));
505
+ var promises = cacheTableInfo.map(async function (param) {
506
+ var tableName = param.table_name;
507
+ var cacheName = tableName.slice(cacheTablePrefixLength);
508
+ var outputFile = Path.join(cacheDirPath, cacheName + ".tsv");
509
+ var command = psqlExec$1 + " -d " + pgDatabase + " -c 'COPY \"" + pgSchema + "\".\"" + tableName + "\" TO STDOUT WITH (FORMAT text, HEADER);' > " + outputFile;
510
+ return new Promise((function (resolve, reject) {
511
+ Child_process.exec(command, (function (error, stdout, param) {
512
+ if (error === null) {
513
+ return resolve(stdout);
514
+ } else {
515
+ return reject(error);
516
+ }
517
+ }));
518
+ }));
519
+ });
520
+ await Promise.all(promises);
521
+ return Logging.info("Successfully dumped cache to " + cacheDirPath);
522
+ };
523
+ var restoreEffectCache = async function (withUpload) {
524
+ if (withUpload) {
525
+ var match = await Promise.all([
526
+ Fs.promises.readdir(cacheDirPath),
527
+ getPsqlExec()
528
+ ]);
529
+ var psqlExecResult = match[1];
530
+ if (psqlExecResult.TAG === "Ok") {
531
+ var psqlExec = psqlExecResult._0;
532
+ var cacheFiles = match[0].filter(function (entry) {
533
+ return entry.endsWith(".tsv");
534
+ });
535
+ await Promise.all(cacheFiles.map(function (entry) {
536
+ var cacheName = entry.slice(0, -4);
537
+ var tableName = cacheTablePrefix + cacheName;
538
+ var table = Table.mkTable(tableName, [], [
539
+ Table.mkField("id", "TEXT", S$RescriptSchema.string, undefined, undefined, undefined, true, undefined, undefined),
540
+ Table.mkField("output", "JSONB", S$RescriptSchema.json(false), undefined, undefined, undefined, undefined, undefined, undefined)
541
+ ]);
542
+ return sql.unsafe(makeCreateTableQuery(table, pgSchema)).then(function () {
543
+ var inputFile = Path.join(cacheDirPath, entry);
544
+ var command = psqlExec + " -d " + pgDatabase + " -c 'COPY \"" + pgSchema + "\".\"" + tableName + "\" FROM STDIN WITH (FORMAT text, HEADER);' < " + inputFile;
545
+ return new Promise((function (resolve, reject) {
546
+ Child_process.exec(command, (function (error, stdout, param) {
547
+ if (error === null) {
548
+ return resolve(stdout);
549
+ } else {
550
+ return reject(error);
551
+ }
552
+ }));
553
+ }));
554
+ });
555
+ }));
556
+ } else {
557
+ Logging.error("Failed to restore cache, continuing without it. " + psqlExecMissingErrorMessage);
558
+ }
559
+ }
560
+ var cacheTableInfo = await sql.unsafe(makeSchemaCacheTableInfoQuery(pgSchema));
561
+ if (withUpload && onNewTables !== undefined) {
562
+ await onNewTables(cacheTableInfo.map(function (info) {
563
+ return info.table_name;
564
+ }));
565
+ }
566
+ return cacheTableInfo.map(function (info) {
567
+ return {
568
+ effectName: info.table_name.slice(cacheTablePrefixLength),
569
+ count: info.count
570
+ };
571
+ });
572
+ };
398
573
  return {
399
574
  isInitialized: isInitialized,
400
575
  initialize: initialize,
401
- loadEffectCaches: loadEffectCaches,
402
576
  loadByIdsOrThrow: loadByIdsOrThrow,
403
577
  loadByFieldOrThrow: loadByFieldOrThrow,
404
- setOrThrow: setOrThrow$1
578
+ setOrThrow: setOrThrow$1,
579
+ setEffectCacheOrThrow: setEffectCacheOrThrow,
580
+ dumpEffectCache: dumpEffectCache,
581
+ restoreEffectCache: restoreEffectCache
405
582
  };
406
583
  }
407
584
 
@@ -428,5 +605,10 @@ exports.setQueryCache = setQueryCache;
428
605
  exports.setOrThrow = setOrThrow;
429
606
  exports.setEntityHistoryOrThrow = setEntityHistoryOrThrow;
430
607
  exports.makeSchemaTableNamesQuery = makeSchemaTableNamesQuery;
608
+ exports.cacheTablePrefix = cacheTablePrefix;
609
+ exports.cacheTablePrefixLength = cacheTablePrefixLength;
610
+ exports.makeSchemaCacheTableInfoQuery = makeSchemaCacheTableInfoQuery;
611
+ exports.getPsqlExec = getPsqlExec;
612
+ exports.psqlExecMissingErrorMessage = psqlExecMissingErrorMessage;
431
613
  exports.make = make;
432
614
  /* pgEncodingErrorSchema Not a pure module */
@@ -590,3 +590,15 @@ module EffectCallsCount = {
590
590
  gauge->SafeGauge.handleInt(~labels=effectName, ~value=callsCount)
591
591
  }
592
592
  }
593
+
594
+ module EffectCacheCount = {
595
+ let gauge = SafeGauge.makeOrThrow(
596
+ ~name="envio_effect_cache_count",
597
+ ~help="The number of items in the effect cache.",
598
+ ~labelSchema=effectLabelsSchema,
599
+ )
600
+
601
+ let set = (~count, ~effectName) => {
602
+ gauge->SafeGauge.handleInt(~labels=effectName, ~value=count)
603
+ }
604
+ }
@@ -700,6 +700,17 @@ var EffectCallsCount = {
700
700
  set: set$21
701
701
  };
702
702
 
703
+ var gauge$22 = makeOrThrow$1("envio_effect_cache_count", "The number of items in the effect cache.", effectLabelsSchema);
704
+
705
+ function set$22(count, effectName) {
706
+ handleInt$1(gauge$22, effectName, count);
707
+ }
708
+
709
+ var EffectCacheCount = {
710
+ gauge: gauge$22,
711
+ set: set$22
712
+ };
713
+
703
714
  exports.loadEntitiesDurationCounter = loadEntitiesDurationCounter;
704
715
  exports.eventRouterDurationCounter = eventRouterDurationCounter;
705
716
  exports.executeBatchDurationCounter = executeBatchDurationCounter;
@@ -747,4 +758,5 @@ exports.ProgressBlockNumber = ProgressBlockNumber;
747
758
  exports.ProgressEventsCount = ProgressEventsCount;
748
759
  exports.effectLabelsSchema = effectLabelsSchema;
749
760
  exports.EffectCallsCount = EffectCallsCount;
761
+ exports.EffectCacheCount = EffectCacheCount;
750
762
  /* loadEntitiesDurationCounter Not a pure module */
package/src/Utils.res CHANGED
@@ -516,13 +516,17 @@ module Proxy = {
516
516
  }
517
517
 
518
518
  module Hash = {
519
+ // Hash to JSON string. No specific reason for this,
520
+ // just to stick to at least some sort of spec.
521
+ // After Sury v11 is out we'll be able to do it with schema
519
522
  let rec makeOrThrow = (any: 'a): string => {
520
523
  switch any->Js.typeof {
521
- | "string" => any->magic
524
+ | "string" => `"${any->magic}"` // Ideally should escape here,
525
+ // but since we don't parse it back, it's fine to keep it super simple
522
526
  | "number" => any->magic->Js.Int.toString
523
- | "bigint" => any->magic->BigInt.toString
527
+ | "bigint" => `"${any->magic->BigInt.toString}"`
524
528
  | "boolean" => any->magic ? "true" : "false"
525
- | "undefined" => "undefined"
529
+ | "undefined" => "null"
526
530
  | "object" =>
527
531
  if any === %raw(`null`) {
528
532
  "null"
@@ -530,7 +534,10 @@ module Hash = {
530
534
  let any: array<'a> = any->magic
531
535
  let hash = ref("[")
532
536
  for i in 0 to any->Js.Array2.length - 1 {
533
- hash := hash.contents ++ any->Js.Array2.unsafe_get(i)->makeOrThrow ++ ","
537
+ if i !== 0 {
538
+ hash := hash.contents ++ ","
539
+ }
540
+ hash := hash.contents ++ any->Js.Array2.unsafe_get(i)->makeOrThrow
534
541
  }
535
542
  hash.contents ++ "]"
536
543
  } else {
@@ -541,10 +548,16 @@ module Hash = {
541
548
  let keys = any->Js.Dict.keys->Js.Array2.sortInPlace
542
549
  for i in 0 to keys->Js.Array2.length - 1 {
543
550
  let key = keys->Js.Array2.unsafe_get(i)
544
- // Ideally should escape and wrap the key in double quotes
545
- // but since we don't need to decode the hash,
546
- // it's fine to keep it super simple
547
- hash := hash.contents ++ key ++ ":" ++ any->Js.Dict.unsafeGet(key)->makeOrThrow ++ ","
551
+ let value = any->Js.Dict.unsafeGet(key)
552
+ if i !== 0 {
553
+ hash := hash.contents ++ ","
554
+ }
555
+ if value !== %raw(`undefined`) {
556
+ // Ideally should escape and wrap the key in double quotes
557
+ // but since we don't need to decode the hash,
558
+ // it's fine to keep it super simple
559
+ hash := hash.contents ++ `"${key}":${any->Js.Dict.unsafeGet(key)->makeOrThrow}`
560
+ }
548
561
  }
549
562
  hash.contents ++ "}"
550
563
  } else if constructor["name"] === "BigNumber" {
package/src/Utils.res.js CHANGED
@@ -455,13 +455,14 @@ var $$Proxy = {};
455
455
  function makeOrThrow(any) {
456
456
  var $$typeof = typeof any;
457
457
  switch ($$typeof) {
458
+ case "bigint" :
459
+ return "\"" + any.toString() + "\"";
458
460
  case "boolean" :
459
461
  if (any) {
460
462
  return "true";
461
463
  } else {
462
464
  return "false";
463
465
  }
464
- case "bigint" :
465
466
  case "number" :
466
467
  return any.toString();
467
468
  case "object" :
@@ -471,7 +472,10 @@ function makeOrThrow(any) {
471
472
  if (Array.isArray(any)) {
472
473
  var hash = "[";
473
474
  for(var i = 0 ,i_finish = any.length; i < i_finish; ++i){
474
- hash = hash + makeOrThrow(any[i]) + ",";
475
+ if (i !== 0) {
476
+ hash = hash + ",";
477
+ }
478
+ hash = hash + makeOrThrow(any[i]);
475
479
  }
476
480
  return hash + "]";
477
481
  }
@@ -487,16 +491,23 @@ function makeOrThrow(any) {
487
491
  var keys = Object.keys(any).sort();
488
492
  for(var i$1 = 0 ,i_finish$1 = keys.length; i$1 < i_finish$1; ++i$1){
489
493
  var key = keys[i$1];
490
- hash$1 = hash$1 + key + ":" + makeOrThrow(any[key]) + ",";
494
+ var value = any[key];
495
+ if (i$1 !== 0) {
496
+ hash$1 = hash$1 + ",";
497
+ }
498
+ if (value !== undefined) {
499
+ hash$1 = hash$1 + ("\"" + key + "\":" + makeOrThrow(any[key]));
500
+ }
501
+
491
502
  }
492
503
  return hash$1 + "}";
493
504
  case "string" :
494
- return any;
505
+ return "\"" + any + "\"";
495
506
  case "function" :
496
507
  case "symbol" :
497
508
  return any.toString();
498
509
  case "undefined" :
499
- return "undefined";
510
+ return "null";
500
511
  default:
501
512
  return Js_exn.raiseError("Don't know how to serialize " + $$typeof);
502
513
  }
@@ -4,35 +4,23 @@ type t
4
4
  type exitCode = | @as(0) Success | @as(1) Failure
5
5
  @send external exitWithCode: (t, exitCode) => unit = "exit"
6
6
 
7
- module Util = {
8
- @unboxed
9
- type depth = Int(int) | @as(null) Null
10
- @unboxed
11
- type compact = Bool(bool) | Int(int)
12
- @unboxed
13
- type sorted = Bool(bool) | Fn((string, string) => int)
14
- @unboxed
15
- type getters = | @as(true) True | @as(false) False | @as("get") Get | @as("set") Set
16
-
17
- @unbox
18
- type inspectOptions = {
19
- showHidden?: bool,
20
- depth?: depth,
21
- colors?: bool,
22
- customInspect?: bool,
23
- showProxy?: bool,
24
- maxArrayLength?: int,
25
- maxStringLength?: int,
26
- breakLength?: int,
27
- @as("compact") compact?: compact,
28
- sorted?: sorted,
29
- getters?: string,
30
- numericSeparator?: bool,
31
- }
7
+ module Process = {
8
+ type t = {env: Js.Dict.t<string>}
9
+ @module external process: t = "process"
10
+ }
32
11
 
33
- @module("util") external inspect: ('a, inspectOptions) => string = "inspect"
12
+ module ChildProcess = {
13
+ type execOptions = {
14
+ cwd?: string,
15
+ env?: Js.Dict.t<string>,
16
+ shell?: string,
17
+ }
34
18
 
35
- let inspectObj = a => inspect(a, {showHidden: false, depth: Null, colors: true})
19
+ @module("child_process")
20
+ external exec: (
21
+ string,
22
+ (~error: Js.null<exn>, ~stdout: string, ~stderr: string) => unit,
23
+ ) => unit = "exec"
36
24
  }
37
25
 
38
26
  module Path = {
@@ -55,6 +43,11 @@ module Fs = {
55
43
  encoding?: string,
56
44
  }
57
45
 
46
+ type mkdirOptions = {
47
+ recursive?: bool,
48
+ mode?: int,
49
+ }
50
+
58
51
  module Promises = {
59
52
  @module("fs") @scope("promises")
60
53
  external writeFile: (
@@ -77,5 +70,11 @@ module Fs = {
77
70
 
78
71
  @module("fs") @scope("promises")
79
72
  external readFile: (~filepath: Path.t, ~encoding: encoding) => promise<string> = "readFile"
73
+
74
+ @module("fs") @scope("promises")
75
+ external mkdir: (~path: Path.t, ~options: mkdirOptions=?) => Js.Promise.t<unit> = "mkdir"
76
+
77
+ @module("fs") @scope("promises")
78
+ external readdir: Path.t => Js.Promise.t<array<string>> = "readdir"
80
79
  }
81
80
  }
@@ -1,19 +1,10 @@
1
1
  // Generated by ReScript, PLEASE EDIT WITH CARE
2
2
  'use strict';
3
3
 
4
- var Util = require("util");
5
4
 
6
- function inspectObj(a) {
7
- return Util.inspect(a, {
8
- showHidden: false,
9
- depth: null,
10
- colors: true
11
- });
12
- }
5
+ var Process = {};
13
6
 
14
- var Util$1 = {
15
- inspectObj: inspectObj
16
- };
7
+ var ChildProcess = {};
17
8
 
18
9
  var Path = {};
19
10
 
@@ -23,7 +14,8 @@ var Fs = {
23
14
  Promises: Promises
24
15
  };
25
16
 
26
- exports.Util = Util$1;
17
+ exports.Process = Process;
18
+ exports.ChildProcess = ChildProcess;
27
19
  exports.Path = Path;
28
20
  exports.Fs = Fs;
29
- /* util Not a pure module */
21
+ /* No side effect */