envio 2.26.0-rc.0 → 2.26.0-rc.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/evm.schema.json +7 -0
- package/fuel.schema.json +7 -0
- package/index.d.ts +6 -22
- package/index.js +2 -1
- package/package.json +5 -5
- package/src/Envio.gen.ts +3 -1
- package/src/Envio.res +13 -0
- package/src/Envio.res.js +9 -0
- package/src/FetchState.res +1 -4
- package/src/Internal.res +3 -0
- package/src/Logging.res +8 -0
- package/src/Logging.res.js +29 -0
- package/src/Persistence.res +110 -47
- package/src/Persistence.res.js +61 -17
- package/src/PgStorage.res +519 -86
- package/src/PgStorage.res.js +386 -57
- package/src/Prometheus.res +12 -0
- package/src/Prometheus.res.js +12 -0
- package/src/Utils.res +39 -9
- package/src/Utils.res.js +17 -6
- package/src/bindings/BigInt.gen.ts +10 -0
- package/src/bindings/BigInt.res +1 -0
- package/src/bindings/NodeJs.res +27 -26
- package/src/bindings/NodeJs.res.js +5 -13
- package/src/db/EntityHistory.res +5 -28
- package/src/db/EntityHistory.res.js +4 -23
- package/src/db/Table.res +3 -61
- package/src/db/Table.res.js +3 -42
package/src/PgStorage.res.js
CHANGED
|
@@ -1,19 +1,28 @@
|
|
|
1
1
|
// Generated by ReScript, PLEASE EDIT WITH CARE
|
|
2
2
|
'use strict';
|
|
3
3
|
|
|
4
|
+
var Fs = require("fs");
|
|
5
|
+
var Path = require("path");
|
|
4
6
|
var $$Array = require("rescript/lib/js/array.js");
|
|
5
7
|
var Table = require("./db/Table.res.js");
|
|
6
8
|
var Utils = require("./Utils.res.js");
|
|
7
9
|
var Js_exn = require("rescript/lib/js/js_exn.js");
|
|
8
10
|
var Schema = require("./db/Schema.res.js");
|
|
11
|
+
var Js_dict = require("rescript/lib/js/js_dict.js");
|
|
12
|
+
var Logging = require("./Logging.res.js");
|
|
13
|
+
var $$Promise = require("./bindings/Promise.res.js");
|
|
14
|
+
var Internal = require("./Internal.res.js");
|
|
9
15
|
var Belt_Array = require("rescript/lib/js/belt_Array.js");
|
|
10
|
-
var
|
|
16
|
+
var Caml_array = require("rescript/lib/js/caml_array.js");
|
|
17
|
+
var Belt_Option = require("rescript/lib/js/belt_Option.js");
|
|
11
18
|
var Caml_option = require("rescript/lib/js/caml_option.js");
|
|
12
19
|
var Persistence = require("./Persistence.res.js");
|
|
20
|
+
var Child_process = require("child_process");
|
|
21
|
+
var Caml_exceptions = require("rescript/lib/js/caml_exceptions.js");
|
|
13
22
|
var S$RescriptSchema = require("rescript-schema/src/S.res.js");
|
|
14
23
|
var Caml_js_exceptions = require("rescript/lib/js/caml_js_exceptions.js");
|
|
15
24
|
|
|
16
|
-
function
|
|
25
|
+
function makeCreateIndexQuery(tableName, indexFields, pgSchema) {
|
|
17
26
|
var indexName = tableName + "_" + indexFields.join("_");
|
|
18
27
|
var index = Belt_Array.map(indexFields, (function (idx) {
|
|
19
28
|
return "\"" + idx + "\"";
|
|
@@ -21,20 +30,20 @@ function makeCreateIndexSql(tableName, indexFields, pgSchema) {
|
|
|
21
30
|
return "CREATE INDEX IF NOT EXISTS \"" + indexName + "\" ON \"" + pgSchema + "\".\"" + tableName + "\"(" + index + ");";
|
|
22
31
|
}
|
|
23
32
|
|
|
24
|
-
function
|
|
33
|
+
function makeCreateTableIndicesQuery(table, pgSchema) {
|
|
25
34
|
var tableName = table.tableName;
|
|
26
35
|
var createIndex = function (indexField) {
|
|
27
|
-
return
|
|
36
|
+
return makeCreateIndexQuery(tableName, [indexField], pgSchema);
|
|
28
37
|
};
|
|
29
38
|
var createCompositeIndex = function (indexFields) {
|
|
30
|
-
return
|
|
39
|
+
return makeCreateIndexQuery(tableName, indexFields, pgSchema);
|
|
31
40
|
};
|
|
32
41
|
var singleIndices = Table.getSingleIndices(table);
|
|
33
42
|
var compositeIndices = Table.getCompositeIndices(table);
|
|
34
43
|
return Belt_Array.map(singleIndices, createIndex).join("\n") + Belt_Array.map(compositeIndices, createCompositeIndex).join("\n");
|
|
35
44
|
}
|
|
36
45
|
|
|
37
|
-
function
|
|
46
|
+
function makeCreateTableQuery(table, pgSchema) {
|
|
38
47
|
var fieldsMapped = Belt_Array.map(Table.getFields(table), (function (field) {
|
|
39
48
|
var defaultValue = field.defaultValue;
|
|
40
49
|
var fieldType = field.fieldType;
|
|
@@ -58,11 +67,11 @@ function makeCreateTableSql(table, pgSchema) {
|
|
|
58
67
|
) + ");";
|
|
59
68
|
}
|
|
60
69
|
|
|
61
|
-
function makeInitializeTransaction(pgSchema, pgUser, generalTablesOpt, entitiesOpt, enumsOpt,
|
|
70
|
+
function makeInitializeTransaction(pgSchema, pgUser, generalTablesOpt, entitiesOpt, enumsOpt, isEmptyPgSchemaOpt) {
|
|
62
71
|
var generalTables = generalTablesOpt !== undefined ? generalTablesOpt : [];
|
|
63
72
|
var entities = entitiesOpt !== undefined ? entitiesOpt : [];
|
|
64
73
|
var enums = enumsOpt !== undefined ? enumsOpt : [];
|
|
65
|
-
var
|
|
74
|
+
var isEmptyPgSchema = isEmptyPgSchemaOpt !== undefined ? isEmptyPgSchemaOpt : false;
|
|
66
75
|
var allTables = $$Array.copy(generalTables);
|
|
67
76
|
var allEntityTables = [];
|
|
68
77
|
entities.forEach(function (entity) {
|
|
@@ -73,22 +82,20 @@ function makeInitializeTransaction(pgSchema, pgUser, generalTablesOpt, entitiesO
|
|
|
73
82
|
var derivedSchema = Schema.make(allEntityTables);
|
|
74
83
|
var query = {
|
|
75
84
|
contents: (
|
|
76
|
-
|
|
77
|
-
) + ("GRANT ALL ON SCHEMA \"" + pgSchema + "\" TO " + pgUser + ";\nGRANT ALL ON SCHEMA \"" + pgSchema + "\" TO public;")
|
|
85
|
+
isEmptyPgSchema && pgSchema === "public" ? "" : "DROP SCHEMA IF EXISTS \"" + pgSchema + "\" CASCADE;\nCREATE SCHEMA \"" + pgSchema + "\";\n"
|
|
86
|
+
) + ("GRANT ALL ON SCHEMA \"" + pgSchema + "\" TO \"" + pgUser + "\";\nGRANT ALL ON SCHEMA \"" + pgSchema + "\" TO public;")
|
|
78
87
|
};
|
|
79
88
|
enums.forEach(function (enumConfig) {
|
|
80
89
|
var enumCreateQuery = "CREATE TYPE \"" + pgSchema + "\"." + enumConfig.name + " AS ENUM(" + enumConfig.variants.map(function (v) {
|
|
81
90
|
return "'" + v + "'";
|
|
82
91
|
}).join(", ") + ");";
|
|
83
|
-
query.contents = query.contents + "\n" +
|
|
84
|
-
cleanRun ? enumCreateQuery : "IF NOT EXISTS (\n SELECT 1 FROM pg_type \n WHERE typname = '" + enumConfig.name.toLowerCase() + "' \n AND typnamespace = (SELECT oid FROM pg_namespace WHERE nspname = '" + pgSchema + "')\n) THEN \n " + enumCreateQuery + "\nEND IF;"
|
|
85
|
-
);
|
|
92
|
+
query.contents = query.contents + "\n" + enumCreateQuery;
|
|
86
93
|
});
|
|
87
94
|
allTables.forEach(function (table) {
|
|
88
|
-
query.contents = query.contents + "\n" +
|
|
95
|
+
query.contents = query.contents + "\n" + makeCreateTableQuery(table, pgSchema);
|
|
89
96
|
});
|
|
90
97
|
allTables.forEach(function (table) {
|
|
91
|
-
var indices =
|
|
98
|
+
var indices = makeCreateTableIndicesQuery(table, pgSchema);
|
|
92
99
|
if (indices !== "") {
|
|
93
100
|
query.contents = query.contents + "\n" + indices;
|
|
94
101
|
return ;
|
|
@@ -102,22 +109,27 @@ function makeInitializeTransaction(pgSchema, pgUser, generalTablesOpt, entitiesO
|
|
|
102
109
|
functionsQuery.contents = functionsQuery.contents + "\n" + entity.entityHistory.createInsertFnQuery;
|
|
103
110
|
Table.getDerivedFromFields(entity.table).forEach(function (derivedFromField) {
|
|
104
111
|
var indexField = Utils.unwrapResultExn(Schema.getDerivedFromFieldName(derivedSchema, derivedFromField));
|
|
105
|
-
query.contents = query.contents + "\n" +
|
|
112
|
+
query.contents = query.contents + "\n" + makeCreateIndexQuery(derivedFromField.derivedFromEntity, [indexField], pgSchema);
|
|
106
113
|
});
|
|
107
114
|
});
|
|
108
|
-
|
|
115
|
+
functionsQuery.contents = functionsQuery.contents + "\n" + ("CREATE OR REPLACE FUNCTION get_cache_row_count(table_name text) \n RETURNS integer AS $$\n DECLARE\n result integer;\n BEGIN\n EXECUTE format('SELECT COUNT(*) FROM \"" + pgSchema + "\".%I', table_name) INTO result;\n RETURN result;\n END;\n $$ LANGUAGE plpgsql;");
|
|
116
|
+
return [query.contents].concat(functionsQuery.contents !== "" ? [functionsQuery.contents] : []);
|
|
109
117
|
}
|
|
110
118
|
|
|
111
|
-
function
|
|
119
|
+
function makeLoadByIdQuery(pgSchema, tableName) {
|
|
112
120
|
return "SELECT * FROM \"" + pgSchema + "\".\"" + tableName + "\" WHERE id = $1 LIMIT 1;";
|
|
113
121
|
}
|
|
114
122
|
|
|
115
|
-
function
|
|
123
|
+
function makeLoadByFieldQuery(pgSchema, tableName, fieldName, operator) {
|
|
124
|
+
return "SELECT * FROM \"" + pgSchema + "\".\"" + tableName + "\" WHERE \"" + fieldName + "\" " + operator + " $1;";
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
function makeLoadByIdsQuery(pgSchema, tableName) {
|
|
116
128
|
return "SELECT * FROM \"" + pgSchema + "\".\"" + tableName + "\" WHERE id = ANY($1::text[]);";
|
|
117
129
|
}
|
|
118
130
|
|
|
119
|
-
function
|
|
120
|
-
var match = Table.toSqlParams(table, itemSchema);
|
|
131
|
+
function makeInsertUnnestSetQuery(pgSchema, table, itemSchema, isRawEvents) {
|
|
132
|
+
var match = Table.toSqlParams(table, itemSchema, pgSchema);
|
|
121
133
|
var quotedNonPrimaryFieldNames = match.quotedNonPrimaryFieldNames;
|
|
122
134
|
var primaryKeyFieldNames = Table.getPrimaryKeyFieldNames(table);
|
|
123
135
|
return "INSERT INTO \"" + pgSchema + "\".\"" + table.tableName + "\" (" + match.quotedFieldNames.join(", ") + ")\nSELECT * FROM unnest(" + match.arrayFieldTypes.map(function (arrayFieldType, idx) {
|
|
@@ -133,8 +145,8 @@ function makeInsertUnnestSetSql(pgSchema, table, itemSchema, isRawEvents) {
|
|
|
133
145
|
) + ";";
|
|
134
146
|
}
|
|
135
147
|
|
|
136
|
-
function
|
|
137
|
-
var match = Table.toSqlParams(table, itemSchema);
|
|
148
|
+
function makeInsertValuesSetQuery(pgSchema, table, itemSchema, itemsCount) {
|
|
149
|
+
var match = Table.toSqlParams(table, itemSchema, pgSchema);
|
|
138
150
|
var quotedNonPrimaryFieldNames = match.quotedNonPrimaryFieldNames;
|
|
139
151
|
var quotedFieldNames = match.quotedFieldNames;
|
|
140
152
|
var primaryKeyFieldNames = Table.getPrimaryKeyFieldNames(table);
|
|
@@ -166,18 +178,20 @@ function makeInsertValuesSetSql(pgSchema, table, itemSchema, itemsCount) {
|
|
|
166
178
|
|
|
167
179
|
var rawEventsTableName = "raw_events";
|
|
168
180
|
|
|
181
|
+
var eventSyncStateTableName = "event_sync_state";
|
|
182
|
+
|
|
169
183
|
function makeTableBatchSetQuery(pgSchema, table, itemSchema) {
|
|
170
|
-
var match = Table.toSqlParams(table, itemSchema);
|
|
184
|
+
var match = Table.toSqlParams(table, itemSchema, pgSchema);
|
|
171
185
|
var isRawEvents = table.tableName === rawEventsTableName;
|
|
172
186
|
if (isRawEvents || !match.hasArrayField) {
|
|
173
187
|
return {
|
|
174
|
-
|
|
188
|
+
query: makeInsertUnnestSetQuery(pgSchema, table, itemSchema, isRawEvents),
|
|
175
189
|
convertOrThrow: S$RescriptSchema.compile(S$RescriptSchema.unnest(match.dbSchema), "Output", "Input", "Sync", false),
|
|
176
190
|
isInsertValues: false
|
|
177
191
|
};
|
|
178
192
|
} else {
|
|
179
193
|
return {
|
|
180
|
-
|
|
194
|
+
query: makeInsertValuesSetQuery(pgSchema, table, itemSchema, 500),
|
|
181
195
|
convertOrThrow: S$RescriptSchema.compile(S$RescriptSchema.preprocess(S$RescriptSchema.unnest(itemSchema), (function (param) {
|
|
182
196
|
return {
|
|
183
197
|
s: (function (prim) {
|
|
@@ -201,6 +215,24 @@ function chunkArray(arr, chunkSize) {
|
|
|
201
215
|
return chunks;
|
|
202
216
|
}
|
|
203
217
|
|
|
218
|
+
function removeInvalidUtf8InPlace(entities) {
|
|
219
|
+
entities.forEach(function (item) {
|
|
220
|
+
Utils.Dict.forEachWithKey(item, (function (key, value) {
|
|
221
|
+
if (typeof value === "string") {
|
|
222
|
+
item[key] = value.replaceAll("\x00", "");
|
|
223
|
+
return ;
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
}));
|
|
227
|
+
});
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
var pgEncodingErrorSchema = S$RescriptSchema.object(function (s) {
|
|
231
|
+
s.tag("message", "invalid byte sequence for encoding \"UTF8\": 0x00");
|
|
232
|
+
});
|
|
233
|
+
|
|
234
|
+
var PgEncodingError = /* @__PURE__ */Caml_exceptions.create("PgStorage.PgEncodingError");
|
|
235
|
+
|
|
204
236
|
var setQueryCache = new WeakMap();
|
|
205
237
|
|
|
206
238
|
async function setOrThrow(sql, items, table, itemSchema, pgSchema) {
|
|
@@ -208,30 +240,24 @@ async function setOrThrow(sql, items, table, itemSchema, pgSchema) {
|
|
|
208
240
|
return ;
|
|
209
241
|
}
|
|
210
242
|
var cached = setQueryCache.get(table);
|
|
211
|
-
var
|
|
243
|
+
var data;
|
|
212
244
|
if (cached !== undefined) {
|
|
213
|
-
|
|
245
|
+
data = Caml_option.valFromOption(cached);
|
|
214
246
|
} else {
|
|
215
247
|
var newQuery = makeTableBatchSetQuery(pgSchema, table, itemSchema);
|
|
216
248
|
setQueryCache.set(table, newQuery);
|
|
217
|
-
|
|
249
|
+
data = newQuery;
|
|
218
250
|
}
|
|
219
|
-
var sqlQuery = query.sql;
|
|
220
251
|
try {
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
return await sql.unsafe(sqlQuery, payload, {prepare: true});
|
|
252
|
+
if (!data.isInsertValues) {
|
|
253
|
+
return await sql.unsafe(data.query, data.convertOrThrow(items), {prepare: true});
|
|
224
254
|
}
|
|
225
|
-
var
|
|
226
|
-
var fieldsCount;
|
|
227
|
-
fieldsCount = typeof match !== "object" || match.TAG !== "object" ? Js_exn.raiseError("Expected an object schema for table") : match.items.length;
|
|
228
|
-
var maxChunkSize = Math.imul(500, fieldsCount);
|
|
229
|
-
var chunks = chunkArray(payload, maxChunkSize);
|
|
255
|
+
var chunks = chunkArray(items, 500);
|
|
230
256
|
var responses = [];
|
|
231
257
|
chunks.forEach(function (chunk) {
|
|
232
258
|
var chunkSize = chunk.length;
|
|
233
|
-
var isFullChunk = chunkSize ===
|
|
234
|
-
var response = sql.unsafe(isFullChunk ?
|
|
259
|
+
var isFullChunk = chunkSize === 500;
|
|
260
|
+
var response = sql.unsafe(isFullChunk ? data.query : makeInsertValuesSetQuery(pgSchema, table, itemSchema, chunkSize), data.convertOrThrow(chunk), {prepare: true});
|
|
235
261
|
responses.push(response);
|
|
236
262
|
});
|
|
237
263
|
await Promise.all(responses);
|
|
@@ -250,34 +276,130 @@ async function setOrThrow(sql, items, table, itemSchema, pgSchema) {
|
|
|
250
276
|
throw {
|
|
251
277
|
RE_EXN_ID: Persistence.StorageError,
|
|
252
278
|
message: "Failed to insert items into table \"" + table.tableName + "\"",
|
|
253
|
-
reason: exn,
|
|
279
|
+
reason: Internal.prettifyExn(exn),
|
|
254
280
|
Error: new Error()
|
|
255
281
|
};
|
|
256
282
|
}
|
|
257
283
|
}
|
|
258
284
|
|
|
259
|
-
function
|
|
285
|
+
function setEntityHistoryOrThrow(sql, entityHistory, rows, shouldCopyCurrentEntity, shouldRemoveInvalidUtf8Opt) {
|
|
286
|
+
var shouldRemoveInvalidUtf8 = shouldRemoveInvalidUtf8Opt !== undefined ? shouldRemoveInvalidUtf8Opt : false;
|
|
287
|
+
return Promise.all(Belt_Array.map(rows, (function (historyRow) {
|
|
288
|
+
var row = S$RescriptSchema.reverseConvertToJsonOrThrow(historyRow, entityHistory.schema);
|
|
289
|
+
if (shouldRemoveInvalidUtf8) {
|
|
290
|
+
removeInvalidUtf8InPlace([row]);
|
|
291
|
+
}
|
|
292
|
+
return entityHistory.insertFn(sql, row, shouldCopyCurrentEntity !== undefined ? shouldCopyCurrentEntity : !Belt_Option.getWithDefault(historyRow.containsRollbackDiffChange, false));
|
|
293
|
+
})));
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
function makeSchemaTableNamesQuery(pgSchema) {
|
|
297
|
+
return "SELECT table_name FROM information_schema.tables WHERE table_schema = '" + pgSchema + "';";
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
var cacheTablePrefix = "envio_effect_";
|
|
301
|
+
|
|
302
|
+
var cacheTablePrefixLength = cacheTablePrefix.length;
|
|
303
|
+
|
|
304
|
+
function makeSchemaCacheTableInfoQuery(pgSchema) {
|
|
305
|
+
return "SELECT \n t.table_name,\n get_cache_row_count(t.table_name) as count\n FROM information_schema.tables t\n WHERE t.table_schema = '" + pgSchema + "' \n AND t.table_name LIKE '" + cacheTablePrefix + "%';";
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
var psqlExecState = {
|
|
309
|
+
contents: "Unknown"
|
|
310
|
+
};
|
|
311
|
+
|
|
312
|
+
async function getConnectedPsqlExec(pgUser, pgHost, pgDatabase, pgPort) {
|
|
313
|
+
var promise = psqlExecState.contents;
|
|
314
|
+
if (typeof promise === "object") {
|
|
315
|
+
if (promise.TAG === "Pending") {
|
|
316
|
+
return await promise._0;
|
|
317
|
+
} else {
|
|
318
|
+
return promise._0;
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
var promise$1 = new Promise((function (resolve, _reject) {
|
|
322
|
+
var binary = "psql";
|
|
323
|
+
Child_process.exec(binary + " --version", (function (error, param, param$1) {
|
|
324
|
+
if (error === null) {
|
|
325
|
+
return resolve({
|
|
326
|
+
TAG: "Ok",
|
|
327
|
+
_0: binary + " -h " + pgHost + " -p " + pgPort.toString() + " -U " + pgUser + " -d " + pgDatabase
|
|
328
|
+
});
|
|
329
|
+
}
|
|
330
|
+
var binary$1 = "docker-compose exec -T -u " + pgUser + " envio-postgres psql";
|
|
331
|
+
Child_process.exec(binary$1 + " --version", (function (error, param, param$1) {
|
|
332
|
+
if (error === null) {
|
|
333
|
+
return resolve({
|
|
334
|
+
TAG: "Ok",
|
|
335
|
+
_0: binary$1 + " -h " + pgHost + " -p " + (5432).toString() + " -U " + pgUser + " -d " + pgDatabase
|
|
336
|
+
});
|
|
337
|
+
} else {
|
|
338
|
+
return resolve({
|
|
339
|
+
TAG: "Error",
|
|
340
|
+
_0: "Please check if \"psql\" binary is installed or docker-compose is running for the local indexer."
|
|
341
|
+
});
|
|
342
|
+
}
|
|
343
|
+
}));
|
|
344
|
+
}));
|
|
345
|
+
}));
|
|
346
|
+
psqlExecState.contents = {
|
|
347
|
+
TAG: "Pending",
|
|
348
|
+
_0: promise$1
|
|
349
|
+
};
|
|
350
|
+
var result = await promise$1;
|
|
351
|
+
psqlExecState.contents = {
|
|
352
|
+
TAG: "Resolved",
|
|
353
|
+
_0: result
|
|
354
|
+
};
|
|
355
|
+
return result;
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
function make(sql, pgHost, pgSchema, pgPort, pgUser, pgDatabase, pgPassword, onInitialize, onNewTables) {
|
|
359
|
+
var psqlExecOptions_env = Js_dict.fromArray([
|
|
360
|
+
[
|
|
361
|
+
"PGPASSWORD",
|
|
362
|
+
pgPassword
|
|
363
|
+
],
|
|
364
|
+
[
|
|
365
|
+
"PATH",
|
|
366
|
+
process.env.PATH
|
|
367
|
+
]
|
|
368
|
+
]);
|
|
369
|
+
var psqlExecOptions = {
|
|
370
|
+
env: psqlExecOptions_env
|
|
371
|
+
};
|
|
372
|
+
var cacheDirPath = Path.resolve("..", ".envio", "cache");
|
|
260
373
|
var isInitialized = async function () {
|
|
261
|
-
var
|
|
262
|
-
return Utils.$$Array.notEmpty(
|
|
374
|
+
var envioTables = await sql.unsafe("SELECT table_schema FROM information_schema.tables WHERE table_schema = '" + pgSchema + "' AND table_name = '" + eventSyncStateTableName + "';");
|
|
375
|
+
return Utils.$$Array.notEmpty(envioTables);
|
|
263
376
|
};
|
|
264
|
-
var initialize = async function (entitiesOpt, generalTablesOpt, enumsOpt
|
|
377
|
+
var initialize = async function (entitiesOpt, generalTablesOpt, enumsOpt) {
|
|
265
378
|
var entities = entitiesOpt !== undefined ? entitiesOpt : [];
|
|
266
379
|
var generalTables = generalTablesOpt !== undefined ? generalTablesOpt : [];
|
|
267
380
|
var enums = enumsOpt !== undefined ? enumsOpt : [];
|
|
268
|
-
var
|
|
269
|
-
|
|
381
|
+
var schemaTableNames = await sql.unsafe(makeSchemaTableNamesQuery(pgSchema));
|
|
382
|
+
if (Utils.$$Array.notEmpty(schemaTableNames) && !schemaTableNames.some(function (table) {
|
|
383
|
+
return table.table_name === eventSyncStateTableName;
|
|
384
|
+
})) {
|
|
385
|
+
Js_exn.raiseError("Cannot run Envio migrations on PostgreSQL schema \"" + pgSchema + "\" because it contains non-Envio tables. Running migrations would delete all data in this schema.\n\nTo resolve this:\n1. If you want to use this schema, first backup any important data, then drop it with: \"pnpm envio local db-migrate down\"\n2. Or specify a different schema name by setting the \"ENVIO_PG_PUBLIC_SCHEMA\" environment variable\n3. Or manually drop the schema in your database if you're certain the data is not needed.");
|
|
386
|
+
}
|
|
387
|
+
var queries = makeInitializeTransaction(pgSchema, pgUser, generalTables, entities, enums, Utils.$$Array.isEmpty(schemaTableNames));
|
|
270
388
|
await sql.begin(function (sql) {
|
|
271
389
|
return queries.map(function (query) {
|
|
272
390
|
return sql.unsafe(query);
|
|
273
391
|
});
|
|
274
392
|
});
|
|
393
|
+
if (onInitialize !== undefined) {
|
|
394
|
+
return await onInitialize();
|
|
395
|
+
}
|
|
396
|
+
|
|
275
397
|
};
|
|
276
398
|
var loadByIdsOrThrow = async function (ids, table, rowsSchema) {
|
|
277
399
|
var rows;
|
|
278
400
|
try {
|
|
279
401
|
rows = await (
|
|
280
|
-
ids.length !== 1 ? sql.unsafe(
|
|
402
|
+
ids.length !== 1 ? sql.unsafe(makeLoadByIdsQuery(pgSchema, table.tableName), [ids], {prepare: true}) : sql.unsafe(makeLoadByIdQuery(pgSchema, table.tableName), ids, {prepare: true})
|
|
281
403
|
);
|
|
282
404
|
}
|
|
283
405
|
catch (raw_exn){
|
|
@@ -302,32 +424,239 @@ function make(sql, pgSchema, pgUser) {
|
|
|
302
424
|
};
|
|
303
425
|
}
|
|
304
426
|
};
|
|
427
|
+
var loadByFieldOrThrow = async function (fieldName, fieldSchema, fieldValue, operator, table, rowsSchema) {
|
|
428
|
+
var params;
|
|
429
|
+
try {
|
|
430
|
+
params = [S$RescriptSchema.reverseConvertToJsonOrThrow(fieldValue, fieldSchema)];
|
|
431
|
+
}
|
|
432
|
+
catch (raw_exn){
|
|
433
|
+
var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);
|
|
434
|
+
throw {
|
|
435
|
+
RE_EXN_ID: Persistence.StorageError,
|
|
436
|
+
message: "Failed loading \"" + table.tableName + "\" from storage by field \"" + fieldName + "\". Couldn't serialize provided value.",
|
|
437
|
+
reason: exn,
|
|
438
|
+
Error: new Error()
|
|
439
|
+
};
|
|
440
|
+
}
|
|
441
|
+
var rows;
|
|
442
|
+
try {
|
|
443
|
+
rows = await sql.unsafe(makeLoadByFieldQuery(pgSchema, table.tableName, fieldName, operator), params, {prepare: true});
|
|
444
|
+
}
|
|
445
|
+
catch (raw_exn$1){
|
|
446
|
+
var exn$1 = Caml_js_exceptions.internalToOCamlException(raw_exn$1);
|
|
447
|
+
throw {
|
|
448
|
+
RE_EXN_ID: Persistence.StorageError,
|
|
449
|
+
message: "Failed loading \"" + table.tableName + "\" from storage by field \"" + fieldName + "\"",
|
|
450
|
+
reason: exn$1,
|
|
451
|
+
Error: new Error()
|
|
452
|
+
};
|
|
453
|
+
}
|
|
454
|
+
try {
|
|
455
|
+
return S$RescriptSchema.parseOrThrow(rows, rowsSchema);
|
|
456
|
+
}
|
|
457
|
+
catch (raw_exn$2){
|
|
458
|
+
var exn$2 = Caml_js_exceptions.internalToOCamlException(raw_exn$2);
|
|
459
|
+
throw {
|
|
460
|
+
RE_EXN_ID: Persistence.StorageError,
|
|
461
|
+
message: "Failed to parse \"" + table.tableName + "\" loaded from storage by ids",
|
|
462
|
+
reason: exn$2,
|
|
463
|
+
Error: new Error()
|
|
464
|
+
};
|
|
465
|
+
}
|
|
466
|
+
};
|
|
305
467
|
var setOrThrow$1 = function (items, table, itemSchema) {
|
|
306
468
|
return setOrThrow(sql, items, table, itemSchema, pgSchema);
|
|
307
469
|
};
|
|
470
|
+
var setEffectCacheOrThrow = async function (effectName, ids, outputs, outputSchema, initialize) {
|
|
471
|
+
var table = Table.mkTable(cacheTablePrefix + effectName, [], [
|
|
472
|
+
Table.mkField("id", "TEXT", S$RescriptSchema.string, undefined, undefined, undefined, true, undefined, undefined),
|
|
473
|
+
Table.mkField("output", "JSONB", S$RescriptSchema.json(false), undefined, undefined, undefined, undefined, undefined, undefined)
|
|
474
|
+
]);
|
|
475
|
+
if (initialize) {
|
|
476
|
+
await sql.unsafe(makeCreateTableQuery(table, pgSchema));
|
|
477
|
+
if (onNewTables !== undefined) {
|
|
478
|
+
await onNewTables([table.tableName]);
|
|
479
|
+
}
|
|
480
|
+
|
|
481
|
+
}
|
|
482
|
+
var items = [];
|
|
483
|
+
for(var idx = 0 ,idx_finish = outputs.length; idx < idx_finish; ++idx){
|
|
484
|
+
items.push({
|
|
485
|
+
id: Caml_array.get(ids, idx),
|
|
486
|
+
output: Caml_array.get(outputs, idx)
|
|
487
|
+
});
|
|
488
|
+
}
|
|
489
|
+
return await setOrThrow$1(items, table, S$RescriptSchema.schema(function (s) {
|
|
490
|
+
return {
|
|
491
|
+
id: s.m(S$RescriptSchema.string),
|
|
492
|
+
output: s.m(outputSchema)
|
|
493
|
+
};
|
|
494
|
+
}));
|
|
495
|
+
};
|
|
496
|
+
var dumpEffectCache = async function () {
|
|
497
|
+
try {
|
|
498
|
+
var cacheTableInfo = (await sql.unsafe(makeSchemaCacheTableInfoQuery(pgSchema))).filter(function (i) {
|
|
499
|
+
return i.count > 0;
|
|
500
|
+
});
|
|
501
|
+
if (!Utils.$$Array.notEmpty(cacheTableInfo)) {
|
|
502
|
+
return ;
|
|
503
|
+
}
|
|
504
|
+
try {
|
|
505
|
+
await Fs.promises.access(cacheDirPath);
|
|
506
|
+
}
|
|
507
|
+
catch (exn){
|
|
508
|
+
await Fs.promises.mkdir(cacheDirPath, {
|
|
509
|
+
recursive: true
|
|
510
|
+
});
|
|
511
|
+
}
|
|
512
|
+
var psqlExec = await getConnectedPsqlExec(pgUser, pgHost, pgDatabase, pgPort);
|
|
513
|
+
if (psqlExec.TAG !== "Ok") {
|
|
514
|
+
return Logging.error("Failed to dump cache. " + psqlExec._0);
|
|
515
|
+
}
|
|
516
|
+
var psqlExec$1 = psqlExec._0;
|
|
517
|
+
Logging.info("Dumping cache: " + cacheTableInfo.map(function (param) {
|
|
518
|
+
return param.table_name + " (" + String(param.count) + " rows)";
|
|
519
|
+
}).join(", "));
|
|
520
|
+
var promises = cacheTableInfo.map(async function (param) {
|
|
521
|
+
var tableName = param.table_name;
|
|
522
|
+
var cacheName = tableName.slice(cacheTablePrefixLength);
|
|
523
|
+
var outputFile = Path.join(cacheDirPath, cacheName + ".tsv");
|
|
524
|
+
var command = psqlExec$1 + " -c 'COPY \"" + pgSchema + "\".\"" + tableName + "\" TO STDOUT WITH (FORMAT text, HEADER);' > " + outputFile;
|
|
525
|
+
return new Promise((function (resolve, reject) {
|
|
526
|
+
Child_process.exec(command, psqlExecOptions, (function (error, stdout, param) {
|
|
527
|
+
if (error === null) {
|
|
528
|
+
return resolve(stdout);
|
|
529
|
+
} else {
|
|
530
|
+
return reject(error);
|
|
531
|
+
}
|
|
532
|
+
}));
|
|
533
|
+
}));
|
|
534
|
+
});
|
|
535
|
+
await Promise.all(promises);
|
|
536
|
+
return Logging.info("Successfully dumped cache to " + cacheDirPath);
|
|
537
|
+
}
|
|
538
|
+
catch (raw_exn){
|
|
539
|
+
var exn$1 = Caml_js_exceptions.internalToOCamlException(raw_exn);
|
|
540
|
+
return Logging.errorWithExn(Internal.prettifyExn(exn$1), "Failed to dump cache.");
|
|
541
|
+
}
|
|
542
|
+
};
|
|
543
|
+
var restoreEffectCache = async function (withUpload) {
|
|
544
|
+
if (withUpload) {
|
|
545
|
+
var nothingToUploadErrorMessage = "Nothing to upload.";
|
|
546
|
+
var match = await Promise.all([
|
|
547
|
+
$$Promise.$$catch(Fs.promises.readdir(cacheDirPath).then(function (e) {
|
|
548
|
+
return {
|
|
549
|
+
TAG: "Ok",
|
|
550
|
+
_0: e
|
|
551
|
+
};
|
|
552
|
+
}), (function (param) {
|
|
553
|
+
return Promise.resolve({
|
|
554
|
+
TAG: "Error",
|
|
555
|
+
_0: nothingToUploadErrorMessage
|
|
556
|
+
});
|
|
557
|
+
})),
|
|
558
|
+
getConnectedPsqlExec(pgUser, pgHost, pgDatabase, pgPort)
|
|
559
|
+
]);
|
|
560
|
+
var exit = 0;
|
|
561
|
+
var message;
|
|
562
|
+
var entries = match[0];
|
|
563
|
+
if (entries.TAG === "Ok") {
|
|
564
|
+
var psqlExec = match[1];
|
|
565
|
+
if (psqlExec.TAG === "Ok") {
|
|
566
|
+
var psqlExec$1 = psqlExec._0;
|
|
567
|
+
var cacheFiles = entries._0.filter(function (entry) {
|
|
568
|
+
return entry.endsWith(".tsv");
|
|
569
|
+
});
|
|
570
|
+
await Promise.all(cacheFiles.map(function (entry) {
|
|
571
|
+
var cacheName = entry.slice(0, -4);
|
|
572
|
+
var tableName = cacheTablePrefix + cacheName;
|
|
573
|
+
var table = Table.mkTable(tableName, [], [
|
|
574
|
+
Table.mkField("id", "TEXT", S$RescriptSchema.string, undefined, undefined, undefined, true, undefined, undefined),
|
|
575
|
+
Table.mkField("output", "JSONB", S$RescriptSchema.json(false), undefined, undefined, undefined, undefined, undefined, undefined)
|
|
576
|
+
]);
|
|
577
|
+
return sql.unsafe(makeCreateTableQuery(table, pgSchema)).then(function () {
|
|
578
|
+
var inputFile = Path.join(cacheDirPath, entry);
|
|
579
|
+
var command = psqlExec$1 + " -c 'COPY \"" + pgSchema + "\".\"" + tableName + "\" FROM STDIN WITH (FORMAT text, HEADER);' < " + inputFile;
|
|
580
|
+
return new Promise((function (resolve, reject) {
|
|
581
|
+
Child_process.exec(command, psqlExecOptions, (function (error, stdout, param) {
|
|
582
|
+
if (error === null) {
|
|
583
|
+
return resolve(stdout);
|
|
584
|
+
} else {
|
|
585
|
+
return reject(error);
|
|
586
|
+
}
|
|
587
|
+
}));
|
|
588
|
+
}));
|
|
589
|
+
});
|
|
590
|
+
}));
|
|
591
|
+
Logging.info("Successfully uploaded cache.");
|
|
592
|
+
} else {
|
|
593
|
+
message = match[1]._0;
|
|
594
|
+
exit = 1;
|
|
595
|
+
}
|
|
596
|
+
} else {
|
|
597
|
+
message = entries._0;
|
|
598
|
+
exit = 1;
|
|
599
|
+
}
|
|
600
|
+
if (exit === 1) {
|
|
601
|
+
if (message === nothingToUploadErrorMessage) {
|
|
602
|
+
Logging.info("No cache found to upload.");
|
|
603
|
+
} else {
|
|
604
|
+
Logging.error("Failed to upload cache, continuing without it. " + message);
|
|
605
|
+
}
|
|
606
|
+
}
|
|
607
|
+
|
|
608
|
+
}
|
|
609
|
+
var cacheTableInfo = await sql.unsafe(makeSchemaCacheTableInfoQuery(pgSchema));
|
|
610
|
+
if (withUpload && Utils.$$Array.notEmpty(cacheTableInfo) && onNewTables !== undefined) {
|
|
611
|
+
await onNewTables(cacheTableInfo.map(function (info) {
|
|
612
|
+
return info.table_name;
|
|
613
|
+
}));
|
|
614
|
+
}
|
|
615
|
+
return cacheTableInfo.map(function (info) {
|
|
616
|
+
return {
|
|
617
|
+
effectName: info.table_name.slice(cacheTablePrefixLength),
|
|
618
|
+
count: info.count
|
|
619
|
+
};
|
|
620
|
+
});
|
|
621
|
+
};
|
|
308
622
|
return {
|
|
309
623
|
isInitialized: isInitialized,
|
|
310
624
|
initialize: initialize,
|
|
311
625
|
loadByIdsOrThrow: loadByIdsOrThrow,
|
|
312
|
-
|
|
626
|
+
loadByFieldOrThrow: loadByFieldOrThrow,
|
|
627
|
+
setOrThrow: setOrThrow$1,
|
|
628
|
+
setEffectCacheOrThrow: setEffectCacheOrThrow,
|
|
629
|
+
dumpEffectCache: dumpEffectCache,
|
|
630
|
+
restoreEffectCache: restoreEffectCache
|
|
313
631
|
};
|
|
314
632
|
}
|
|
315
633
|
|
|
316
634
|
var maxItemsPerQuery = 500;
|
|
317
635
|
|
|
318
|
-
exports.
|
|
319
|
-
exports.
|
|
320
|
-
exports.
|
|
636
|
+
exports.makeCreateIndexQuery = makeCreateIndexQuery;
|
|
637
|
+
exports.makeCreateTableIndicesQuery = makeCreateTableIndicesQuery;
|
|
638
|
+
exports.makeCreateTableQuery = makeCreateTableQuery;
|
|
321
639
|
exports.makeInitializeTransaction = makeInitializeTransaction;
|
|
322
|
-
exports.
|
|
323
|
-
exports.
|
|
324
|
-
exports.
|
|
325
|
-
exports.
|
|
640
|
+
exports.makeLoadByIdQuery = makeLoadByIdQuery;
|
|
641
|
+
exports.makeLoadByFieldQuery = makeLoadByFieldQuery;
|
|
642
|
+
exports.makeLoadByIdsQuery = makeLoadByIdsQuery;
|
|
643
|
+
exports.makeInsertUnnestSetQuery = makeInsertUnnestSetQuery;
|
|
644
|
+
exports.makeInsertValuesSetQuery = makeInsertValuesSetQuery;
|
|
326
645
|
exports.rawEventsTableName = rawEventsTableName;
|
|
646
|
+
exports.eventSyncStateTableName = eventSyncStateTableName;
|
|
327
647
|
exports.maxItemsPerQuery = maxItemsPerQuery;
|
|
328
648
|
exports.makeTableBatchSetQuery = makeTableBatchSetQuery;
|
|
329
649
|
exports.chunkArray = chunkArray;
|
|
650
|
+
exports.removeInvalidUtf8InPlace = removeInvalidUtf8InPlace;
|
|
651
|
+
exports.pgEncodingErrorSchema = pgEncodingErrorSchema;
|
|
652
|
+
exports.PgEncodingError = PgEncodingError;
|
|
330
653
|
exports.setQueryCache = setQueryCache;
|
|
331
654
|
exports.setOrThrow = setOrThrow;
|
|
655
|
+
exports.setEntityHistoryOrThrow = setEntityHistoryOrThrow;
|
|
656
|
+
exports.makeSchemaTableNamesQuery = makeSchemaTableNamesQuery;
|
|
657
|
+
exports.cacheTablePrefix = cacheTablePrefix;
|
|
658
|
+
exports.cacheTablePrefixLength = cacheTablePrefixLength;
|
|
659
|
+
exports.makeSchemaCacheTableInfoQuery = makeSchemaCacheTableInfoQuery;
|
|
660
|
+
exports.getConnectedPsqlExec = getConnectedPsqlExec;
|
|
332
661
|
exports.make = make;
|
|
333
|
-
/*
|
|
662
|
+
/* pgEncodingErrorSchema Not a pure module */
|
package/src/Prometheus.res
CHANGED
|
@@ -590,3 +590,15 @@ module EffectCallsCount = {
|
|
|
590
590
|
gauge->SafeGauge.handleInt(~labels=effectName, ~value=callsCount)
|
|
591
591
|
}
|
|
592
592
|
}
|
|
593
|
+
|
|
594
|
+
module EffectCacheCount = {
|
|
595
|
+
let gauge = SafeGauge.makeOrThrow(
|
|
596
|
+
~name="envio_effect_cache_count",
|
|
597
|
+
~help="The number of items in the effect cache.",
|
|
598
|
+
~labelSchema=effectLabelsSchema,
|
|
599
|
+
)
|
|
600
|
+
|
|
601
|
+
let set = (~count, ~effectName) => {
|
|
602
|
+
gauge->SafeGauge.handleInt(~labels=effectName, ~value=count)
|
|
603
|
+
}
|
|
604
|
+
}
|
package/src/Prometheus.res.js
CHANGED
|
@@ -700,6 +700,17 @@ var EffectCallsCount = {
|
|
|
700
700
|
set: set$21
|
|
701
701
|
};
|
|
702
702
|
|
|
703
|
+
var gauge$22 = makeOrThrow$1("envio_effect_cache_count", "The number of items in the effect cache.", effectLabelsSchema);
|
|
704
|
+
|
|
705
|
+
function set$22(count, effectName) {
|
|
706
|
+
handleInt$1(gauge$22, effectName, count);
|
|
707
|
+
}
|
|
708
|
+
|
|
709
|
+
var EffectCacheCount = {
|
|
710
|
+
gauge: gauge$22,
|
|
711
|
+
set: set$22
|
|
712
|
+
};
|
|
713
|
+
|
|
703
714
|
exports.loadEntitiesDurationCounter = loadEntitiesDurationCounter;
|
|
704
715
|
exports.eventRouterDurationCounter = eventRouterDurationCounter;
|
|
705
716
|
exports.executeBatchDurationCounter = executeBatchDurationCounter;
|
|
@@ -747,4 +758,5 @@ exports.ProgressBlockNumber = ProgressBlockNumber;
|
|
|
747
758
|
exports.ProgressEventsCount = ProgressEventsCount;
|
|
748
759
|
exports.effectLabelsSchema = effectLabelsSchema;
|
|
749
760
|
exports.EffectCallsCount = EffectCallsCount;
|
|
761
|
+
exports.EffectCacheCount = EffectCacheCount;
|
|
750
762
|
/* loadEntitiesDurationCounter Not a pure module */
|