envio 2.26.0-rc.1 → 2.26.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +5 -5
- package/src/Envio.res +18 -4
- package/src/Envio.res.js +24 -6
- package/src/Internal.res +18 -1
- package/src/Internal.res.js +12 -0
- package/src/Persistence.res +10 -7
- package/src/Persistence.res.js +4 -3
- package/src/PgStorage.res +30 -56
- package/src/PgStorage.res.js +14 -32
- package/src/Utils.res +14 -5
- package/src/Utils.res.js +13 -5
- package/src/bindings/NodeJs.res +31 -0
- package/src/bindings/NodeJs.res.js +15 -1
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "envio",
|
|
3
|
-
"version": "v2.26.0
|
|
3
|
+
"version": "v2.26.0",
|
|
4
4
|
"description": "A latency and sync speed optimized, developer friendly blockchain data indexer.",
|
|
5
5
|
"bin": "./bin.js",
|
|
6
6
|
"main": "./index.js",
|
|
@@ -25,10 +25,10 @@
|
|
|
25
25
|
},
|
|
26
26
|
"homepage": "https://envio.dev",
|
|
27
27
|
"optionalDependencies": {
|
|
28
|
-
"envio-linux-x64": "v2.26.0
|
|
29
|
-
"envio-linux-arm64": "v2.26.0
|
|
30
|
-
"envio-darwin-x64": "v2.26.0
|
|
31
|
-
"envio-darwin-arm64": "v2.26.0
|
|
28
|
+
"envio-linux-x64": "v2.26.0",
|
|
29
|
+
"envio-linux-arm64": "v2.26.0",
|
|
30
|
+
"envio-darwin-x64": "v2.26.0",
|
|
31
|
+
"envio-darwin-arm64": "v2.26.0"
|
|
32
32
|
},
|
|
33
33
|
"dependencies": {
|
|
34
34
|
"@envio-dev/hypersync-client": "0.6.5",
|
package/src/Envio.res
CHANGED
|
@@ -42,6 +42,8 @@ let experimental_createEffect = (
|
|
|
42
42
|
handler: effectArgs<'input> => promise<'output>,
|
|
43
43
|
) => {
|
|
44
44
|
Prometheus.EffectCallsCount.set(~callsCount=0, ~effectName=options.name)
|
|
45
|
+
let outputSchema =
|
|
46
|
+
S.schema(_ => options.output)->(Utils.magic: S.t<S.t<'output>> => S.t<Internal.effectOutput>)
|
|
45
47
|
{
|
|
46
48
|
name: options.name,
|
|
47
49
|
handler: handler->(
|
|
@@ -57,9 +59,21 @@ let experimental_createEffect = (
|
|
|
57
59
|
input: S.schema(_ => options.input)->(
|
|
58
60
|
Utils.magic: S.t<S.t<'input>> => S.t<Internal.effectInput>
|
|
59
61
|
),
|
|
60
|
-
output:
|
|
61
|
-
|
|
62
|
-
)
|
|
63
|
-
|
|
62
|
+
output: outputSchema,
|
|
63
|
+
cache: switch options.cache {
|
|
64
|
+
| Some(true) =>
|
|
65
|
+
let itemSchema = S.schema((s): Internal.effectCacheItem => {
|
|
66
|
+
id: s.matches(S.string),
|
|
67
|
+
output: s.matches(outputSchema),
|
|
68
|
+
})
|
|
69
|
+
Some({
|
|
70
|
+
table: Internal.makeCacheTable(~effectName=options.name),
|
|
71
|
+
rowsSchema: S.array(itemSchema),
|
|
72
|
+
itemSchema,
|
|
73
|
+
})
|
|
74
|
+
| None
|
|
75
|
+
| Some(false) =>
|
|
76
|
+
None
|
|
77
|
+
},
|
|
64
78
|
}->(Utils.magic: Internal.effect => effect<'input, 'output>)
|
|
65
79
|
}
|
package/src/Envio.res.js
CHANGED
|
@@ -1,19 +1,37 @@
|
|
|
1
1
|
// Generated by ReScript, PLEASE EDIT WITH CARE
|
|
2
2
|
'use strict';
|
|
3
3
|
|
|
4
|
+
var Internal = require("./Internal.res.js");
|
|
4
5
|
var Prometheus = require("./Prometheus.res.js");
|
|
5
|
-
var Belt_Option = require("rescript/lib/js/belt_Option.js");
|
|
6
6
|
var S$RescriptSchema = require("rescript-schema/src/S.res.js");
|
|
7
7
|
|
|
8
8
|
function experimental_createEffect(options, handler) {
|
|
9
9
|
Prometheus.EffectCallsCount.set(0, options.name);
|
|
10
|
+
var outputSchema = S$RescriptSchema.schema(function (param) {
|
|
11
|
+
return options.output;
|
|
12
|
+
});
|
|
13
|
+
var match = options.cache;
|
|
14
|
+
var tmp;
|
|
15
|
+
if (match !== undefined && match) {
|
|
16
|
+
var itemSchema = S$RescriptSchema.schema(function (s) {
|
|
17
|
+
return {
|
|
18
|
+
id: s.m(S$RescriptSchema.string),
|
|
19
|
+
output: s.m(outputSchema)
|
|
20
|
+
};
|
|
21
|
+
});
|
|
22
|
+
tmp = {
|
|
23
|
+
itemSchema: itemSchema,
|
|
24
|
+
rowsSchema: S$RescriptSchema.array(itemSchema),
|
|
25
|
+
table: Internal.makeCacheTable(options.name)
|
|
26
|
+
};
|
|
27
|
+
} else {
|
|
28
|
+
tmp = undefined;
|
|
29
|
+
}
|
|
10
30
|
return {
|
|
11
31
|
name: options.name,
|
|
12
32
|
handler: handler,
|
|
13
|
-
cache:
|
|
14
|
-
output:
|
|
15
|
-
return options.output;
|
|
16
|
-
}),
|
|
33
|
+
cache: tmp,
|
|
34
|
+
output: outputSchema,
|
|
17
35
|
input: S$RescriptSchema.schema(function (param) {
|
|
18
36
|
return options.input;
|
|
19
37
|
}),
|
|
@@ -22,4 +40,4 @@ function experimental_createEffect(options, handler) {
|
|
|
22
40
|
}
|
|
23
41
|
|
|
24
42
|
exports.experimental_createEffect = experimental_createEffect;
|
|
25
|
-
/*
|
|
43
|
+
/* Internal Not a pure module */
|
package/src/Internal.res
CHANGED
|
@@ -194,14 +194,31 @@ type effectArgs = {
|
|
|
194
194
|
context: effectContext,
|
|
195
195
|
cacheKey: string,
|
|
196
196
|
}
|
|
197
|
+
type effectCacheItem = {id: string, output: effectOutput}
|
|
198
|
+
type effectCacheMeta = {
|
|
199
|
+
itemSchema: S.t<effectCacheItem>,
|
|
200
|
+
rowsSchema: S.t<array<effectCacheItem>>,
|
|
201
|
+
table: Table.table,
|
|
202
|
+
}
|
|
197
203
|
type effect = {
|
|
198
204
|
name: string,
|
|
199
205
|
handler: effectArgs => promise<effectOutput>,
|
|
200
|
-
cache:
|
|
206
|
+
cache: option<effectCacheMeta>,
|
|
201
207
|
output: S.t<effectOutput>,
|
|
202
208
|
input: S.t<effectInput>,
|
|
203
209
|
mutable callsCount: int,
|
|
204
210
|
}
|
|
211
|
+
let cacheTablePrefix = "envio_effect_"
|
|
212
|
+
let makeCacheTable = (~effectName) => {
|
|
213
|
+
Table.mkTable(
|
|
214
|
+
cacheTablePrefix ++ effectName,
|
|
215
|
+
~fields=[
|
|
216
|
+
Table.mkField("id", Text, ~fieldSchema=S.string, ~isPrimaryKey=true),
|
|
217
|
+
Table.mkField("output", JsonB, ~fieldSchema=S.json(~validate=false)),
|
|
218
|
+
],
|
|
219
|
+
~compositeIndices=[],
|
|
220
|
+
)
|
|
221
|
+
}
|
|
205
222
|
|
|
206
223
|
@genType.import(("./Types.ts", "Invalid"))
|
|
207
224
|
type noEventFilters
|
package/src/Internal.res.js
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
// Generated by ReScript, PLEASE EDIT WITH CARE
|
|
2
2
|
'use strict';
|
|
3
3
|
|
|
4
|
+
var Table = require("./db/Table.res.js");
|
|
4
5
|
var $$BigInt = require("./bindings/BigInt.res.js");
|
|
5
6
|
var Js_exn = require("rescript/lib/js/js_exn.js");
|
|
6
7
|
var Address = require("./Address.res.js");
|
|
@@ -34,6 +35,15 @@ function makeEnumConfig(name, variants) {
|
|
|
34
35
|
};
|
|
35
36
|
}
|
|
36
37
|
|
|
38
|
+
var cacheTablePrefix = "envio_effect_";
|
|
39
|
+
|
|
40
|
+
function makeCacheTable(effectName) {
|
|
41
|
+
return Table.mkTable(cacheTablePrefix + effectName, [], [
|
|
42
|
+
Table.mkField("id", "TEXT", S$RescriptSchema.string, undefined, undefined, undefined, true, undefined, undefined),
|
|
43
|
+
Table.mkField("output", "JSONB", S$RescriptSchema.json(false), undefined, undefined, undefined, undefined, undefined, undefined)
|
|
44
|
+
]);
|
|
45
|
+
}
|
|
46
|
+
|
|
37
47
|
function prettifyExn(exn) {
|
|
38
48
|
var e = Caml_js_exceptions.internalToOCamlException(exn);
|
|
39
49
|
if (e.RE_EXN_ID === Js_exn.$$Error) {
|
|
@@ -46,5 +56,7 @@ function prettifyExn(exn) {
|
|
|
46
56
|
exports.fuelSupplyParamsSchema = fuelSupplyParamsSchema;
|
|
47
57
|
exports.fuelTransferParamsSchema = fuelTransferParamsSchema;
|
|
48
58
|
exports.makeEnumConfig = makeEnumConfig;
|
|
59
|
+
exports.cacheTablePrefix = cacheTablePrefix;
|
|
60
|
+
exports.makeCacheTable = makeCacheTable;
|
|
49
61
|
exports.prettifyExn = prettifyExn;
|
|
50
62
|
/* fuelSupplyParamsSchema Not a pure module */
|
package/src/Persistence.res
CHANGED
|
@@ -49,13 +49,15 @@ type storage = {
|
|
|
49
49
|
) => promise<unit>,
|
|
50
50
|
@raises("StorageError")
|
|
51
51
|
setEffectCacheOrThrow: (
|
|
52
|
-
~
|
|
53
|
-
~
|
|
54
|
-
~outputs: array<Internal.effectOutput>,
|
|
55
|
-
~outputSchema: S.t<Internal.effectOutput>,
|
|
52
|
+
~effect: Internal.effect,
|
|
53
|
+
~items: array<Internal.effectCacheItem>,
|
|
56
54
|
~initialize: bool,
|
|
57
55
|
) => promise<unit>,
|
|
56
|
+
// This is to download cache from the database to .envio/cache
|
|
58
57
|
dumpEffectCache: unit => promise<unit>,
|
|
58
|
+
// This is not good, but the function does two things:
|
|
59
|
+
// - Gets info about existing cache tables
|
|
60
|
+
// - if withUpload is true, it also populates the cache from .envio/cache to the database
|
|
59
61
|
restoreEffectCache: (~withUpload: bool) => promise<array<effectCacheRecord>>,
|
|
60
62
|
}
|
|
61
63
|
|
|
@@ -176,13 +178,14 @@ let getInitializedStorageOrThrow = persistence => {
|
|
|
176
178
|
}
|
|
177
179
|
}
|
|
178
180
|
|
|
179
|
-
let setEffectCacheOrThrow = async (persistence, ~
|
|
181
|
+
let setEffectCacheOrThrow = async (persistence, ~effect: Internal.effect, ~items) => {
|
|
180
182
|
switch persistence.storageStatus {
|
|
181
183
|
| Unknown
|
|
182
184
|
| Initializing(_) =>
|
|
183
185
|
Js.Exn.raiseError(`Failed to access the indexer storage. The Persistence layer is not initialized.`)
|
|
184
186
|
| Ready({cache}) => {
|
|
185
187
|
let storage = persistence.storage
|
|
188
|
+
let effectName = effect.name
|
|
186
189
|
let effectCacheRecord = switch cache->Utils.Dict.dangerouslyGetNonOption(effectName) {
|
|
187
190
|
| Some(c) => c
|
|
188
191
|
| None => {
|
|
@@ -192,8 +195,8 @@ let setEffectCacheOrThrow = async (persistence, ~effectName, ~ids, ~outputs, ~ou
|
|
|
192
195
|
}
|
|
193
196
|
}
|
|
194
197
|
let initialize = effectCacheRecord.count === 0
|
|
195
|
-
await storage.setEffectCacheOrThrow(~
|
|
196
|
-
effectCacheRecord.count = effectCacheRecord.count +
|
|
198
|
+
await storage.setEffectCacheOrThrow(~effect, ~items, ~initialize)
|
|
199
|
+
effectCacheRecord.count = effectCacheRecord.count + items->Js.Array2.length
|
|
197
200
|
Prometheus.EffectCacheCount.set(~count=effectCacheRecord.count, ~effectName)
|
|
198
201
|
}
|
|
199
202
|
}
|
package/src/Persistence.res.js
CHANGED
|
@@ -113,7 +113,7 @@ function getInitializedStorageOrThrow(persistence) {
|
|
|
113
113
|
}
|
|
114
114
|
}
|
|
115
115
|
|
|
116
|
-
async function setEffectCacheOrThrow(persistence,
|
|
116
|
+
async function setEffectCacheOrThrow(persistence, effect, items) {
|
|
117
117
|
var match = persistence.storageStatus;
|
|
118
118
|
if (typeof match !== "object") {
|
|
119
119
|
return Js_exn.raiseError("Failed to access the indexer storage. The Persistence layer is not initialized.");
|
|
@@ -123,6 +123,7 @@ async function setEffectCacheOrThrow(persistence, effectName, ids, outputs, outp
|
|
|
123
123
|
}
|
|
124
124
|
var cache = match.cache;
|
|
125
125
|
var storage = persistence.storage;
|
|
126
|
+
var effectName = effect.name;
|
|
126
127
|
var c = cache[effectName];
|
|
127
128
|
var effectCacheRecord;
|
|
128
129
|
if (c !== undefined) {
|
|
@@ -136,8 +137,8 @@ async function setEffectCacheOrThrow(persistence, effectName, ids, outputs, outp
|
|
|
136
137
|
effectCacheRecord = c$1;
|
|
137
138
|
}
|
|
138
139
|
var initialize = effectCacheRecord.count === 0;
|
|
139
|
-
await storage.setEffectCacheOrThrow(
|
|
140
|
-
effectCacheRecord.count = effectCacheRecord.count +
|
|
140
|
+
await storage.setEffectCacheOrThrow(effect, items, initialize);
|
|
141
|
+
effectCacheRecord.count = effectCacheRecord.count + items.length | 0;
|
|
141
142
|
return Prometheus.EffectCacheCount.set(effectCacheRecord.count, effectName);
|
|
142
143
|
}
|
|
143
144
|
|
package/src/PgStorage.res
CHANGED
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
let getCacheRowCountFnName = "get_cache_row_count"
|
|
2
|
+
|
|
1
3
|
let makeCreateIndexQuery = (~tableName, ~indexFields, ~pgSchema) => {
|
|
2
4
|
let indexName = tableName ++ "_" ++ indexFields->Js.Array2.joinWith("_")
|
|
3
5
|
let index = indexFields->Belt.Array.map(idx => `"${idx}"`)->Js.Array2.joinWith(", ")
|
|
@@ -133,15 +135,15 @@ GRANT ALL ON SCHEMA "${pgSchema}" TO public;`,
|
|
|
133
135
|
functionsQuery :=
|
|
134
136
|
functionsQuery.contents ++
|
|
135
137
|
"\n" ++
|
|
136
|
-
`CREATE OR REPLACE FUNCTION
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
138
|
+
`CREATE OR REPLACE FUNCTION ${getCacheRowCountFnName}(table_name text)
|
|
139
|
+
RETURNS integer AS $$
|
|
140
|
+
DECLARE
|
|
141
|
+
result integer;
|
|
142
|
+
BEGIN
|
|
143
|
+
EXECUTE format('SELECT COUNT(*) FROM "${pgSchema}".%I', table_name) INTO result;
|
|
144
|
+
RETURN result;
|
|
145
|
+
END;
|
|
146
|
+
$$ LANGUAGE plpgsql;`
|
|
145
147
|
|
|
146
148
|
[query.contents]->Js.Array2.concat(
|
|
147
149
|
functionsQuery.contents !== "" ? [functionsQuery.contents] : [],
|
|
@@ -433,8 +435,7 @@ let makeSchemaTableNamesQuery = (~pgSchema) => {
|
|
|
433
435
|
`SELECT table_name FROM information_schema.tables WHERE table_schema = '${pgSchema}';`
|
|
434
436
|
}
|
|
435
437
|
|
|
436
|
-
let
|
|
437
|
-
let cacheTablePrefixLength = cacheTablePrefix->String.length
|
|
438
|
+
let cacheTablePrefixLength = Internal.cacheTablePrefix->String.length
|
|
438
439
|
|
|
439
440
|
type schemaCacheTableInfo = {
|
|
440
441
|
@as("table_name")
|
|
@@ -446,10 +447,10 @@ type schemaCacheTableInfo = {
|
|
|
446
447
|
let makeSchemaCacheTableInfoQuery = (~pgSchema) => {
|
|
447
448
|
`SELECT
|
|
448
449
|
t.table_name,
|
|
449
|
-
|
|
450
|
+
${getCacheRowCountFnName}(t.table_name) as count
|
|
450
451
|
FROM information_schema.tables t
|
|
451
452
|
WHERE t.table_schema = '${pgSchema}'
|
|
452
|
-
AND t.table_name LIKE '${cacheTablePrefix}%';`
|
|
453
|
+
AND t.table_name LIKE '${Internal.cacheTablePrefix}%';`
|
|
453
454
|
}
|
|
454
455
|
|
|
455
456
|
type psqlExecState =
|
|
@@ -578,6 +579,7 @@ let make = (
|
|
|
578
579
|
queries->Js.Array2.map(query => sql->Postgres.unsafe(query))
|
|
579
580
|
})
|
|
580
581
|
|
|
582
|
+
// Integration with other tools like Hasura
|
|
581
583
|
switch onInitialize {
|
|
582
584
|
| Some(onInitialize) => await onInitialize()
|
|
583
585
|
| None => ()
|
|
@@ -681,49 +683,28 @@ let make = (
|
|
|
681
683
|
}
|
|
682
684
|
|
|
683
685
|
let setEffectCacheOrThrow = async (
|
|
684
|
-
~
|
|
685
|
-
~
|
|
686
|
-
~outputs: array<Internal.effectOutput>,
|
|
687
|
-
~outputSchema: S.t<Internal.effectOutput>,
|
|
686
|
+
~effect: Internal.effect,
|
|
687
|
+
~items: array<Internal.effectCacheItem>,
|
|
688
688
|
~initialize: bool,
|
|
689
689
|
) => {
|
|
690
|
-
let table =
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
)
|
|
690
|
+
let {table, itemSchema} = switch effect.cache {
|
|
691
|
+
| Some(cacheMeta) => cacheMeta
|
|
692
|
+
| None =>
|
|
693
|
+
Js.Exn.raiseError(
|
|
694
|
+
`Failed to set effect cache for "${effect.name}". Effect has no cache enabled.`,
|
|
695
|
+
)
|
|
696
|
+
}
|
|
698
697
|
|
|
699
698
|
if initialize {
|
|
700
699
|
let _ = await sql->Postgres.unsafe(makeCreateTableQuery(table, ~pgSchema))
|
|
700
|
+
// Integration with other tools like Hasura
|
|
701
701
|
switch onNewTables {
|
|
702
702
|
| Some(onNewTables) => await onNewTables(~tableNames=[table.tableName])
|
|
703
703
|
| None => ()
|
|
704
704
|
}
|
|
705
705
|
}
|
|
706
706
|
|
|
707
|
-
|
|
708
|
-
for idx in 0 to outputs->Array.length - 1 {
|
|
709
|
-
items
|
|
710
|
-
->Js.Array2.push({
|
|
711
|
-
"id": ids[idx],
|
|
712
|
-
"output": outputs[idx],
|
|
713
|
-
})
|
|
714
|
-
->ignore
|
|
715
|
-
}
|
|
716
|
-
|
|
717
|
-
await setOrThrow(
|
|
718
|
-
~items,
|
|
719
|
-
~table,
|
|
720
|
-
~itemSchema=S.schema(s =>
|
|
721
|
-
{
|
|
722
|
-
"id": s.matches(S.string),
|
|
723
|
-
"output": s.matches(outputSchema),
|
|
724
|
-
}
|
|
725
|
-
),
|
|
726
|
-
)
|
|
707
|
+
await setOrThrow(~items, ~table, ~itemSchema)
|
|
727
708
|
}
|
|
728
709
|
|
|
729
710
|
let dumpEffectCache = async () => {
|
|
@@ -807,23 +788,15 @@ let make = (
|
|
|
807
788
|
let _ =
|
|
808
789
|
await cacheFiles
|
|
809
790
|
->Js.Array2.map(entry => {
|
|
810
|
-
let
|
|
811
|
-
let
|
|
812
|
-
let table = Table.mkTable(
|
|
813
|
-
tableName,
|
|
814
|
-
~fields=[
|
|
815
|
-
Table.mkField("id", Text, ~fieldSchema=S.string, ~isPrimaryKey=true),
|
|
816
|
-
Table.mkField("output", JsonB, ~fieldSchema=S.json(~validate=false)),
|
|
817
|
-
],
|
|
818
|
-
~compositeIndices=[],
|
|
819
|
-
)
|
|
791
|
+
let effectName = entry->Js.String2.slice(~from=0, ~to_=-4) // Remove .tsv extension
|
|
792
|
+
let table = Internal.makeCacheTable(~effectName)
|
|
820
793
|
|
|
821
794
|
sql
|
|
822
795
|
->Postgres.unsafe(makeCreateTableQuery(table, ~pgSchema))
|
|
823
796
|
->Promise.then(() => {
|
|
824
797
|
let inputFile = NodeJs.Path.join(cacheDirPath, entry)->NodeJs.Path.toString
|
|
825
798
|
|
|
826
|
-
let command = `${psqlExec} -c 'COPY "${pgSchema}"."${tableName}" FROM STDIN WITH (FORMAT text, HEADER);' < ${inputFile}`
|
|
799
|
+
let command = `${psqlExec} -c 'COPY "${pgSchema}"."${table.tableName}" FROM STDIN WITH (FORMAT text, HEADER);' < ${inputFile}`
|
|
827
800
|
|
|
828
801
|
Promise.make(
|
|
829
802
|
(resolve, reject) => {
|
|
@@ -859,6 +832,7 @@ let make = (
|
|
|
859
832
|
await sql->Postgres.unsafe(makeSchemaCacheTableInfoQuery(~pgSchema))
|
|
860
833
|
|
|
861
834
|
if withUpload && cacheTableInfo->Utils.Array.notEmpty {
|
|
835
|
+
// Integration with other tools like Hasura
|
|
862
836
|
switch onNewTables {
|
|
863
837
|
| Some(onNewTables) =>
|
|
864
838
|
await onNewTables(
|
package/src/PgStorage.res.js
CHANGED
|
@@ -13,7 +13,6 @@ var Logging = require("./Logging.res.js");
|
|
|
13
13
|
var $$Promise = require("./bindings/Promise.res.js");
|
|
14
14
|
var Internal = require("./Internal.res.js");
|
|
15
15
|
var Belt_Array = require("rescript/lib/js/belt_Array.js");
|
|
16
|
-
var Caml_array = require("rescript/lib/js/caml_array.js");
|
|
17
16
|
var Belt_Option = require("rescript/lib/js/belt_Option.js");
|
|
18
17
|
var Caml_option = require("rescript/lib/js/caml_option.js");
|
|
19
18
|
var Persistence = require("./Persistence.res.js");
|
|
@@ -22,6 +21,8 @@ var Caml_exceptions = require("rescript/lib/js/caml_exceptions.js");
|
|
|
22
21
|
var S$RescriptSchema = require("rescript-schema/src/S.res.js");
|
|
23
22
|
var Caml_js_exceptions = require("rescript/lib/js/caml_js_exceptions.js");
|
|
24
23
|
|
|
24
|
+
var getCacheRowCountFnName = "get_cache_row_count";
|
|
25
|
+
|
|
25
26
|
function makeCreateIndexQuery(tableName, indexFields, pgSchema) {
|
|
26
27
|
var indexName = tableName + "_" + indexFields.join("_");
|
|
27
28
|
var index = Belt_Array.map(indexFields, (function (idx) {
|
|
@@ -112,7 +113,7 @@ function makeInitializeTransaction(pgSchema, pgUser, generalTablesOpt, entitiesO
|
|
|
112
113
|
query.contents = query.contents + "\n" + makeCreateIndexQuery(derivedFromField.derivedFromEntity, [indexField], pgSchema);
|
|
113
114
|
});
|
|
114
115
|
});
|
|
115
|
-
functionsQuery.contents = functionsQuery.contents + "\n" + ("CREATE OR REPLACE FUNCTION
|
|
116
|
+
functionsQuery.contents = functionsQuery.contents + "\n" + ("CREATE OR REPLACE FUNCTION " + getCacheRowCountFnName + "(table_name text) \nRETURNS integer AS $$\nDECLARE\n result integer;\nBEGIN\n EXECUTE format('SELECT COUNT(*) FROM \"" + pgSchema + "\".%I', table_name) INTO result;\n RETURN result;\nEND;\n$$ LANGUAGE plpgsql;");
|
|
116
117
|
return [query.contents].concat(functionsQuery.contents !== "" ? [functionsQuery.contents] : []);
|
|
117
118
|
}
|
|
118
119
|
|
|
@@ -297,12 +298,10 @@ function makeSchemaTableNamesQuery(pgSchema) {
|
|
|
297
298
|
return "SELECT table_name FROM information_schema.tables WHERE table_schema = '" + pgSchema + "';";
|
|
298
299
|
}
|
|
299
300
|
|
|
300
|
-
var
|
|
301
|
-
|
|
302
|
-
var cacheTablePrefixLength = cacheTablePrefix.length;
|
|
301
|
+
var cacheTablePrefixLength = Internal.cacheTablePrefix.length;
|
|
303
302
|
|
|
304
303
|
function makeSchemaCacheTableInfoQuery(pgSchema) {
|
|
305
|
-
return "SELECT \n t.table_name,\n
|
|
304
|
+
return "SELECT \n t.table_name,\n " + getCacheRowCountFnName + "(t.table_name) as count\n FROM information_schema.tables t\n WHERE t.table_schema = '" + pgSchema + "' \n AND t.table_name LIKE '" + Internal.cacheTablePrefix + "%';";
|
|
306
305
|
}
|
|
307
306
|
|
|
308
307
|
var psqlExecState = {
|
|
@@ -467,11 +466,10 @@ function make(sql, pgHost, pgSchema, pgPort, pgUser, pgDatabase, pgPassword, onI
|
|
|
467
466
|
var setOrThrow$1 = function (items, table, itemSchema) {
|
|
468
467
|
return setOrThrow(sql, items, table, itemSchema, pgSchema);
|
|
469
468
|
};
|
|
470
|
-
var setEffectCacheOrThrow = async function (
|
|
471
|
-
var
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
]);
|
|
469
|
+
var setEffectCacheOrThrow = async function (effect, items, initialize) {
|
|
470
|
+
var cacheMeta = effect.cache;
|
|
471
|
+
var match = cacheMeta !== undefined ? cacheMeta : Js_exn.raiseError("Failed to set effect cache for \"" + effect.name + "\". Effect has no cache enabled.");
|
|
472
|
+
var table = match.table;
|
|
475
473
|
if (initialize) {
|
|
476
474
|
await sql.unsafe(makeCreateTableQuery(table, pgSchema));
|
|
477
475
|
if (onNewTables !== undefined) {
|
|
@@ -479,19 +477,7 @@ function make(sql, pgHost, pgSchema, pgPort, pgUser, pgDatabase, pgPassword, onI
|
|
|
479
477
|
}
|
|
480
478
|
|
|
481
479
|
}
|
|
482
|
-
|
|
483
|
-
for(var idx = 0 ,idx_finish = outputs.length; idx < idx_finish; ++idx){
|
|
484
|
-
items.push({
|
|
485
|
-
id: Caml_array.get(ids, idx),
|
|
486
|
-
output: Caml_array.get(outputs, idx)
|
|
487
|
-
});
|
|
488
|
-
}
|
|
489
|
-
return await setOrThrow$1(items, table, S$RescriptSchema.schema(function (s) {
|
|
490
|
-
return {
|
|
491
|
-
id: s.m(S$RescriptSchema.string),
|
|
492
|
-
output: s.m(outputSchema)
|
|
493
|
-
};
|
|
494
|
-
}));
|
|
480
|
+
return await setOrThrow$1(items, table, match.itemSchema);
|
|
495
481
|
};
|
|
496
482
|
var dumpEffectCache = async function () {
|
|
497
483
|
try {
|
|
@@ -568,15 +554,11 @@ function make(sql, pgHost, pgSchema, pgPort, pgUser, pgDatabase, pgPassword, onI
|
|
|
568
554
|
return entry.endsWith(".tsv");
|
|
569
555
|
});
|
|
570
556
|
await Promise.all(cacheFiles.map(function (entry) {
|
|
571
|
-
var
|
|
572
|
-
var
|
|
573
|
-
var table = Table.mkTable(tableName, [], [
|
|
574
|
-
Table.mkField("id", "TEXT", S$RescriptSchema.string, undefined, undefined, undefined, true, undefined, undefined),
|
|
575
|
-
Table.mkField("output", "JSONB", S$RescriptSchema.json(false), undefined, undefined, undefined, undefined, undefined, undefined)
|
|
576
|
-
]);
|
|
557
|
+
var effectName = entry.slice(0, -4);
|
|
558
|
+
var table = Internal.makeCacheTable(effectName);
|
|
577
559
|
return sql.unsafe(makeCreateTableQuery(table, pgSchema)).then(function () {
|
|
578
560
|
var inputFile = Path.join(cacheDirPath, entry);
|
|
579
|
-
var command = psqlExec$1 + " -c 'COPY \"" + pgSchema + "\".\"" + tableName + "\" FROM STDIN WITH (FORMAT text, HEADER);' < " + inputFile;
|
|
561
|
+
var command = psqlExec$1 + " -c 'COPY \"" + pgSchema + "\".\"" + table.tableName + "\" FROM STDIN WITH (FORMAT text, HEADER);' < " + inputFile;
|
|
580
562
|
return new Promise((function (resolve, reject) {
|
|
581
563
|
Child_process.exec(command, psqlExecOptions, (function (error, stdout, param) {
|
|
582
564
|
if (error === null) {
|
|
@@ -633,6 +615,7 @@ function make(sql, pgHost, pgSchema, pgPort, pgUser, pgDatabase, pgPassword, onI
|
|
|
633
615
|
|
|
634
616
|
var maxItemsPerQuery = 500;
|
|
635
617
|
|
|
618
|
+
exports.getCacheRowCountFnName = getCacheRowCountFnName;
|
|
636
619
|
exports.makeCreateIndexQuery = makeCreateIndexQuery;
|
|
637
620
|
exports.makeCreateTableIndicesQuery = makeCreateTableIndicesQuery;
|
|
638
621
|
exports.makeCreateTableQuery = makeCreateTableQuery;
|
|
@@ -654,7 +637,6 @@ exports.setQueryCache = setQueryCache;
|
|
|
654
637
|
exports.setOrThrow = setOrThrow;
|
|
655
638
|
exports.setEntityHistoryOrThrow = setEntityHistoryOrThrow;
|
|
656
639
|
exports.makeSchemaTableNamesQuery = makeSchemaTableNamesQuery;
|
|
657
|
-
exports.cacheTablePrefix = cacheTablePrefix;
|
|
658
640
|
exports.cacheTablePrefixLength = cacheTablePrefixLength;
|
|
659
641
|
exports.makeSchemaCacheTableInfoQuery = makeSchemaCacheTableInfoQuery;
|
|
660
642
|
exports.getConnectedPsqlExec = getConnectedPsqlExec;
|
package/src/Utils.res
CHANGED
|
@@ -516,6 +516,12 @@ module Proxy = {
|
|
|
516
516
|
}
|
|
517
517
|
|
|
518
518
|
module Hash = {
|
|
519
|
+
let fail = name => {
|
|
520
|
+
Js.Exn.raiseError(
|
|
521
|
+
`Failed to get hash for ${name}. If you're using a custom Sury schema make it based on the string type with a decoder: const myTypeSchema = S.transform(S.string, undefined, (yourType) => yourType.toString())`,
|
|
522
|
+
)
|
|
523
|
+
}
|
|
524
|
+
|
|
519
525
|
// Hash to JSON string. No specific reason for this,
|
|
520
526
|
// just to stick to at least some sort of spec.
|
|
521
527
|
// After Sury v11 is out we'll be able to do it with schema
|
|
@@ -546,13 +552,16 @@ module Hash = {
|
|
|
546
552
|
if constructor === %raw(`Object`) {
|
|
547
553
|
let hash = ref("{")
|
|
548
554
|
let keys = any->Js.Dict.keys->Js.Array2.sortInPlace
|
|
555
|
+
let isFirst = ref(true)
|
|
549
556
|
for i in 0 to keys->Js.Array2.length - 1 {
|
|
550
557
|
let key = keys->Js.Array2.unsafe_get(i)
|
|
551
558
|
let value = any->Js.Dict.unsafeGet(key)
|
|
552
|
-
if i !== 0 {
|
|
553
|
-
hash := hash.contents ++ ","
|
|
554
|
-
}
|
|
555
559
|
if value !== %raw(`undefined`) {
|
|
560
|
+
if isFirst.contents {
|
|
561
|
+
isFirst := false
|
|
562
|
+
} else {
|
|
563
|
+
hash := hash.contents ++ ","
|
|
564
|
+
}
|
|
556
565
|
// Ideally should escape and wrap the key in double quotes
|
|
557
566
|
// but since we don't need to decode the hash,
|
|
558
567
|
// it's fine to keep it super simple
|
|
@@ -563,13 +572,13 @@ module Hash = {
|
|
|
563
572
|
} else if constructor["name"] === "BigNumber" {
|
|
564
573
|
`"${(any->magic)["toString"]()}"`
|
|
565
574
|
} else {
|
|
566
|
-
|
|
575
|
+
fail((constructor->magic)["name"])
|
|
567
576
|
}
|
|
568
577
|
}
|
|
569
578
|
| "symbol"
|
|
570
579
|
| "function" =>
|
|
571
580
|
(any->magic)["toString"]()
|
|
572
|
-
| typeof =>
|
|
581
|
+
| typeof => fail(typeof)
|
|
573
582
|
}
|
|
574
583
|
}
|
|
575
584
|
}
|
package/src/Utils.res.js
CHANGED
|
@@ -452,6 +452,10 @@ var $$Map = {};
|
|
|
452
452
|
|
|
453
453
|
var $$Proxy = {};
|
|
454
454
|
|
|
455
|
+
function fail(name) {
|
|
456
|
+
return Js_exn.raiseError("Failed to get hash for " + name + ". If you're using a custom Sury schema make it based on the string type with a decoder: const myTypeSchema = S.transform(S.string, undefined, (yourType) => yourType.toString())");
|
|
457
|
+
}
|
|
458
|
+
|
|
455
459
|
function makeOrThrow(any) {
|
|
456
460
|
var $$typeof = typeof any;
|
|
457
461
|
switch ($$typeof) {
|
|
@@ -484,18 +488,21 @@ function makeOrThrow(any) {
|
|
|
484
488
|
if (constructor.name === "BigNumber") {
|
|
485
489
|
return "\"" + any.toString() + "\"";
|
|
486
490
|
} else {
|
|
487
|
-
return
|
|
491
|
+
return fail(constructor.name);
|
|
488
492
|
}
|
|
489
493
|
}
|
|
490
494
|
var hash$1 = "{";
|
|
491
495
|
var keys = Object.keys(any).sort();
|
|
496
|
+
var isFirst = true;
|
|
492
497
|
for(var i$1 = 0 ,i_finish$1 = keys.length; i$1 < i_finish$1; ++i$1){
|
|
493
498
|
var key = keys[i$1];
|
|
494
499
|
var value = any[key];
|
|
495
|
-
if (i$1 !== 0) {
|
|
496
|
-
hash$1 = hash$1 + ",";
|
|
497
|
-
}
|
|
498
500
|
if (value !== undefined) {
|
|
501
|
+
if (isFirst) {
|
|
502
|
+
isFirst = false;
|
|
503
|
+
} else {
|
|
504
|
+
hash$1 = hash$1 + ",";
|
|
505
|
+
}
|
|
499
506
|
hash$1 = hash$1 + ("\"" + key + "\":" + makeOrThrow(any[key]));
|
|
500
507
|
}
|
|
501
508
|
|
|
@@ -509,11 +516,12 @@ function makeOrThrow(any) {
|
|
|
509
516
|
case "undefined" :
|
|
510
517
|
return "null";
|
|
511
518
|
default:
|
|
512
|
-
return
|
|
519
|
+
return fail($$typeof);
|
|
513
520
|
}
|
|
514
521
|
}
|
|
515
522
|
|
|
516
523
|
var Hash = {
|
|
524
|
+
fail: fail,
|
|
517
525
|
makeOrThrow: makeOrThrow
|
|
518
526
|
};
|
|
519
527
|
|
package/src/bindings/NodeJs.res
CHANGED
|
@@ -4,6 +4,37 @@ type t
|
|
|
4
4
|
type exitCode = | @as(0) Success | @as(1) Failure
|
|
5
5
|
@send external exitWithCode: (t, exitCode) => unit = "exit"
|
|
6
6
|
|
|
7
|
+
module Util = {
|
|
8
|
+
@unboxed
|
|
9
|
+
type depth = Int(int) | @as(null) Null
|
|
10
|
+
@unboxed
|
|
11
|
+
type compact = Bool(bool) | Int(int)
|
|
12
|
+
@unboxed
|
|
13
|
+
type sorted = Bool(bool) | Fn((string, string) => int)
|
|
14
|
+
@unboxed
|
|
15
|
+
type getters = | @as(true) True | @as(false) False | @as("get") Get | @as("set") Set
|
|
16
|
+
|
|
17
|
+
@unbox
|
|
18
|
+
type inspectOptions = {
|
|
19
|
+
showHidden?: bool,
|
|
20
|
+
depth?: depth,
|
|
21
|
+
colors?: bool,
|
|
22
|
+
customInspect?: bool,
|
|
23
|
+
showProxy?: bool,
|
|
24
|
+
maxArrayLength?: int,
|
|
25
|
+
maxStringLength?: int,
|
|
26
|
+
breakLength?: int,
|
|
27
|
+
@as("compact") compact?: compact,
|
|
28
|
+
sorted?: sorted,
|
|
29
|
+
getters?: string,
|
|
30
|
+
numericSeparator?: bool,
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
@module("util") external inspect: ('a, inspectOptions) => string = "inspect"
|
|
34
|
+
|
|
35
|
+
let inspectObj = a => inspect(a, {showHidden: false, depth: Null, colors: true})
|
|
36
|
+
}
|
|
37
|
+
|
|
7
38
|
module Process = {
|
|
8
39
|
type t = {env: Js.Dict.t<string>}
|
|
9
40
|
@module external process: t = "process"
|
|
@@ -1,6 +1,19 @@
|
|
|
1
1
|
// Generated by ReScript, PLEASE EDIT WITH CARE
|
|
2
2
|
'use strict';
|
|
3
3
|
|
|
4
|
+
var Util = require("util");
|
|
5
|
+
|
|
6
|
+
function inspectObj(a) {
|
|
7
|
+
return Util.inspect(a, {
|
|
8
|
+
showHidden: false,
|
|
9
|
+
depth: null,
|
|
10
|
+
colors: true
|
|
11
|
+
});
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
var Util$1 = {
|
|
15
|
+
inspectObj: inspectObj
|
|
16
|
+
};
|
|
4
17
|
|
|
5
18
|
var Process = {};
|
|
6
19
|
|
|
@@ -14,8 +27,9 @@ var Fs = {
|
|
|
14
27
|
Promises: Promises
|
|
15
28
|
};
|
|
16
29
|
|
|
30
|
+
exports.Util = Util$1;
|
|
17
31
|
exports.Process = Process;
|
|
18
32
|
exports.ChildProcess = ChildProcess;
|
|
19
33
|
exports.Path = Path;
|
|
20
34
|
exports.Fs = Fs;
|
|
21
|
-
/*
|
|
35
|
+
/* util Not a pure module */
|