envio 2.21.5 → 2.22.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +2 -2
- package/package.json +11 -8
- package/rescript.json +1 -1
- package/src/Address.res.js +30 -0
- package/src/ChainMap.res.js +77 -0
- package/src/Envio.res +2 -0
- package/src/Envio.res.js +16 -0
- package/src/ErrorHandling.res +0 -26
- package/src/ErrorHandling.res.js +56 -0
- package/src/EventUtils.res.js +75 -0
- package/src/EvmTypes.res.js +16 -0
- package/src/FetchState.res.js +969 -0
- package/src/Hasura.res +297 -0
- package/src/Hasura.res.js +245 -0
- package/src/Internal.res +26 -0
- package/src/Internal.res.js +50 -0
- package/src/LazyLoader.res.js +117 -0
- package/src/LoadManager.res.js +124 -0
- package/src/LogSelection.res.js +203 -0
- package/src/Logging.res +31 -31
- package/src/Logging.res.js +247 -0
- package/src/Persistence.res +111 -0
- package/src/Persistence.res.js +90 -0
- package/src/PgStorage.res +165 -0
- package/src/PgStorage.res.js +125 -0
- package/src/Prometheus.res +75 -40
- package/src/Prometheus.res.js +750 -0
- package/src/ReorgDetection.res.js +223 -0
- package/src/Throttler.res.js +60 -0
- package/src/Time.res.js +41 -0
- package/src/TopicFilter.res.js +86 -0
- package/src/Utils.res.js +527 -0
- package/src/bindings/BigDecimal.gen.ts +1 -1
- package/src/bindings/BigDecimal.res.js +41 -0
- package/src/bindings/BigInt.res.js +138 -0
- package/src/bindings/Ethers.gen.ts +1 -1
- package/src/bindings/Ethers.res.js +109 -0
- package/src/bindings/Express.res.js +2 -0
- package/src/bindings/Hrtime.res.js +66 -0
- package/src/bindings/NodeJs.res.js +29 -0
- package/src/bindings/Pino.res.js +95 -0
- package/src/bindings/Postgres.res.js +16 -0
- package/src/bindings/PromClient.res.js +17 -0
- package/src/bindings/Promise.res +5 -0
- package/src/bindings/Promise.res.js +25 -0
- package/src/bindings/SDSL.res.js +8 -0
- package/src/bindings/Viem.res.js +45 -0
- package/src/db/EntityHistory.res +3 -3
- package/src/db/EntityHistory.res.js +307 -0
- package/src/db/Schema.res.js +54 -0
- package/src/db/Table.res.js +365 -0
- package/src/sources/Fuel.res.js +28 -0
- package/src/sources/HyperFuel.res.js +193 -0
- package/src/sources/HyperFuelClient.res.js +19 -0
- package/src/sources/HyperSync.res.js +301 -0
- package/src/sources/HyperSyncClient.res.js +99 -0
- package/src/sources/HyperSyncJsonApi.res.js +259 -0
- package/src/sources/Rpc.res.js +198 -0
- package/src/sources/Source.res.js +9 -0
- package/src/sources/SourceManager.res.js +366 -0
- package/src/vendored/Rest.res.js +574 -0
- package/src/Enum.res +0 -22
|
@@ -0,0 +1,247 @@
|
|
|
1
|
+
// Generated by ReScript, PLEASE EDIT WITH CARE
|
|
2
|
+
'use strict';
|
|
3
|
+
|
|
4
|
+
var Pino = require("./bindings/Pino.res.js");
|
|
5
|
+
var Pino$1 = require("pino");
|
|
6
|
+
var Js_exn = require("rescript/lib/js/js_exn.js");
|
|
7
|
+
var Js_dict = require("rescript/lib/js/js_dict.js");
|
|
8
|
+
var Caml_obj = require("rescript/lib/js/caml_obj.js");
|
|
9
|
+
var Internal = require("./Internal.res.js");
|
|
10
|
+
var Caml_option = require("rescript/lib/js/caml_option.js");
|
|
11
|
+
var EcsPinoFormat = require("@elastic/ecs-pino-format");
|
|
12
|
+
|
|
13
|
+
var logLevels = Js_dict.fromArray([
|
|
14
|
+
[
|
|
15
|
+
"udebug",
|
|
16
|
+
32
|
|
17
|
+
],
|
|
18
|
+
[
|
|
19
|
+
"uinfo",
|
|
20
|
+
34
|
|
21
|
+
],
|
|
22
|
+
[
|
|
23
|
+
"uwarn",
|
|
24
|
+
36
|
|
25
|
+
],
|
|
26
|
+
[
|
|
27
|
+
"uerror",
|
|
28
|
+
38
|
|
29
|
+
],
|
|
30
|
+
[
|
|
31
|
+
"trace",
|
|
32
|
+
10
|
|
33
|
+
],
|
|
34
|
+
[
|
|
35
|
+
"debug",
|
|
36
|
+
20
|
|
37
|
+
],
|
|
38
|
+
[
|
|
39
|
+
"info",
|
|
40
|
+
30
|
|
41
|
+
],
|
|
42
|
+
[
|
|
43
|
+
"warn",
|
|
44
|
+
40
|
|
45
|
+
],
|
|
46
|
+
[
|
|
47
|
+
"error",
|
|
48
|
+
50
|
|
49
|
+
],
|
|
50
|
+
[
|
|
51
|
+
"fatal",
|
|
52
|
+
60
|
|
53
|
+
]
|
|
54
|
+
]);
|
|
55
|
+
|
|
56
|
+
var logger = {
|
|
57
|
+
contents: undefined
|
|
58
|
+
};
|
|
59
|
+
|
|
60
|
+
function makeLogger(logStrategy, logFilePath, defaultFileLogLevel, userLogLevel) {
|
|
61
|
+
var pinoFile_target = "pino/file";
|
|
62
|
+
var pinoFile_options = Caml_option.some(Pino.Transport.makeTransportOptions({
|
|
63
|
+
destination: logFilePath,
|
|
64
|
+
append: true,
|
|
65
|
+
mkdir: true
|
|
66
|
+
}));
|
|
67
|
+
var pinoFile_level = defaultFileLogLevel;
|
|
68
|
+
var pinoFile = {
|
|
69
|
+
target: pinoFile_target,
|
|
70
|
+
options: pinoFile_options,
|
|
71
|
+
level: pinoFile_level
|
|
72
|
+
};
|
|
73
|
+
var makeMultiStreamLogger = function (none, none$1) {
|
|
74
|
+
return Pino.MultiStreamLogger.make(userLogLevel, logLevels, none, none$1, defaultFileLogLevel);
|
|
75
|
+
};
|
|
76
|
+
switch (logStrategy) {
|
|
77
|
+
case "ecs-file" :
|
|
78
|
+
var newrecord = Caml_obj.obj_dup(EcsPinoFormat());
|
|
79
|
+
return Pino$1((newrecord.customLevels = logLevels, newrecord), Pino$1.transport(pinoFile));
|
|
80
|
+
case "ecs-console" :
|
|
81
|
+
var newrecord$1 = Caml_obj.obj_dup(EcsPinoFormat());
|
|
82
|
+
return Pino$1((newrecord$1.customLevels = logLevels, newrecord$1.level = userLogLevel, newrecord$1));
|
|
83
|
+
case "ecs-console-multistream" :
|
|
84
|
+
return makeMultiStreamLogger(undefined, EcsPinoFormat());
|
|
85
|
+
case "file-only" :
|
|
86
|
+
return Pino$1({
|
|
87
|
+
level: defaultFileLogLevel,
|
|
88
|
+
customLevels: logLevels
|
|
89
|
+
}, Pino$1.transport(pinoFile));
|
|
90
|
+
case "console-raw" :
|
|
91
|
+
case "console-pretty" :
|
|
92
|
+
return makeMultiStreamLogger(undefined, undefined);
|
|
93
|
+
case "both-prettyconsole" :
|
|
94
|
+
return makeMultiStreamLogger(logFilePath, undefined);
|
|
95
|
+
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
function setLogger(l) {
|
|
100
|
+
logger.contents = l;
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
function getLogger() {
|
|
104
|
+
var logger$1 = logger.contents;
|
|
105
|
+
if (logger$1 !== undefined) {
|
|
106
|
+
return logger$1;
|
|
107
|
+
} else {
|
|
108
|
+
return Js_exn.raiseError("Unreachable code. Logger not initialized");
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
function setLogLevel(level) {
|
|
113
|
+
getLogger().level = level;
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
function trace(message) {
|
|
117
|
+
getLogger().trace(Pino.createPinoMessage(message));
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
function debug(message) {
|
|
121
|
+
getLogger().debug(Pino.createPinoMessage(message));
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
function info(message) {
|
|
125
|
+
getLogger().info(Pino.createPinoMessage(message));
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
function warn(message) {
|
|
129
|
+
getLogger().warn(Pino.createPinoMessage(message));
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
function error(message) {
|
|
133
|
+
getLogger().error(Pino.createPinoMessage(message));
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
function errorWithExn(error, message) {
|
|
137
|
+
getLogger().error(Pino.createPinoMessageWithError(message, error));
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
function fatal(message) {
|
|
141
|
+
getLogger().fatal(Pino.createPinoMessage(message));
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
function childTrace(logger, params) {
|
|
145
|
+
logger.trace(Pino.createPinoMessage(params));
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
function childDebug(logger, params) {
|
|
149
|
+
logger.debug(Pino.createPinoMessage(params));
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
function childInfo(logger, params) {
|
|
153
|
+
logger.info(Pino.createPinoMessage(params));
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
function childWarn(logger, params) {
|
|
157
|
+
logger.warn(Pino.createPinoMessage(params));
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
function childError(logger, params) {
|
|
161
|
+
logger.error(Pino.createPinoMessage(params));
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
function childErrorWithExn(logger, error, params) {
|
|
165
|
+
logger.error(Pino.createPinoMessageWithError(params, error));
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
function childFatal(logger, params) {
|
|
169
|
+
logger.fatal(Pino.createPinoMessage(params));
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
function createChild(params) {
|
|
173
|
+
return getLogger().child(Pino.createChildParams(params));
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
function createChildFrom(logger, params) {
|
|
177
|
+
return logger.child(Pino.createChildParams(params));
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
function getEventLogger(eventItem) {
|
|
181
|
+
var l = eventItem.loggerCache;
|
|
182
|
+
if (l !== undefined) {
|
|
183
|
+
return l;
|
|
184
|
+
}
|
|
185
|
+
var l$1 = getLogger().child(Pino.createChildParams({
|
|
186
|
+
contractName: eventItem.eventConfig.contractName,
|
|
187
|
+
eventName: eventItem.eventConfig.name,
|
|
188
|
+
chainId: eventItem.chain,
|
|
189
|
+
block: eventItem.blockNumber,
|
|
190
|
+
logIndex: eventItem.logIndex
|
|
191
|
+
}));
|
|
192
|
+
eventItem.loggerCache = l$1;
|
|
193
|
+
return l$1;
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
function logForItem(eventItem, level, message, params) {
|
|
197
|
+
return getEventLogger(eventItem)[level](params, message);
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
function getUserLogger(eventItem) {
|
|
201
|
+
return {
|
|
202
|
+
debug: (function (message, params) {
|
|
203
|
+
getEventLogger(eventItem)["udebug"](params, message);
|
|
204
|
+
}),
|
|
205
|
+
info: (function (message, params) {
|
|
206
|
+
getEventLogger(eventItem)["uinfo"](params, message);
|
|
207
|
+
}),
|
|
208
|
+
warn: (function (message, params) {
|
|
209
|
+
getEventLogger(eventItem)["uwarn"](params, message);
|
|
210
|
+
}),
|
|
211
|
+
error: (function (message, params) {
|
|
212
|
+
getEventLogger(eventItem)["uerror"](params, message);
|
|
213
|
+
}),
|
|
214
|
+
errorWithExn: (function (message, exn) {
|
|
215
|
+
var params = {
|
|
216
|
+
err: Internal.prettifyExn(exn)
|
|
217
|
+
};
|
|
218
|
+
getEventLogger(eventItem)["uerror"](params, message);
|
|
219
|
+
})
|
|
220
|
+
};
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
exports.logLevels = logLevels;
|
|
224
|
+
exports.makeLogger = makeLogger;
|
|
225
|
+
exports.setLogger = setLogger;
|
|
226
|
+
exports.getLogger = getLogger;
|
|
227
|
+
exports.setLogLevel = setLogLevel;
|
|
228
|
+
exports.trace = trace;
|
|
229
|
+
exports.debug = debug;
|
|
230
|
+
exports.info = info;
|
|
231
|
+
exports.warn = warn;
|
|
232
|
+
exports.error = error;
|
|
233
|
+
exports.errorWithExn = errorWithExn;
|
|
234
|
+
exports.fatal = fatal;
|
|
235
|
+
exports.childTrace = childTrace;
|
|
236
|
+
exports.childDebug = childDebug;
|
|
237
|
+
exports.childInfo = childInfo;
|
|
238
|
+
exports.childWarn = childWarn;
|
|
239
|
+
exports.childError = childError;
|
|
240
|
+
exports.childErrorWithExn = childErrorWithExn;
|
|
241
|
+
exports.childFatal = childFatal;
|
|
242
|
+
exports.createChild = createChild;
|
|
243
|
+
exports.createChildFrom = createChildFrom;
|
|
244
|
+
exports.getEventLogger = getEventLogger;
|
|
245
|
+
exports.logForItem = logForItem;
|
|
246
|
+
exports.getUserLogger = getUserLogger;
|
|
247
|
+
/* logLevels Not a pure module */
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
// A module for the persistence layer
|
|
2
|
+
// This is currently in a WIP state
|
|
3
|
+
// but in the future we should make all DB and in-memory state
|
|
4
|
+
// interactions to this layer with DI and easy for testing.
|
|
5
|
+
// Currently there are quite many code spread across
|
|
6
|
+
// DbFunctions, Db, Migrations, InMemoryStore modules which use codegen code directly.
|
|
7
|
+
|
|
8
|
+
type storage = {
|
|
9
|
+
// Should return true if we already have persisted data
|
|
10
|
+
// and we can skip initialization
|
|
11
|
+
isInitialized: unit => promise<bool>,
|
|
12
|
+
// Should initialize the storage so we can start interacting with it
|
|
13
|
+
// Eg create connection, schema, tables, etc.
|
|
14
|
+
initialize: (
|
|
15
|
+
~entities: array<Internal.entityConfig>,
|
|
16
|
+
~staticTables: array<Table.table>,
|
|
17
|
+
~enums: array<Internal.enumConfig<Internal.enum>>,
|
|
18
|
+
// If true, the storage should clear existing data
|
|
19
|
+
~cleanRun: bool,
|
|
20
|
+
) => promise<unit>,
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
type storageStatus =
|
|
24
|
+
| Unknown
|
|
25
|
+
| Initializing(promise<unit>)
|
|
26
|
+
| Ready({cleanRun: bool})
|
|
27
|
+
|
|
28
|
+
type t = {
|
|
29
|
+
userEntities: array<Internal.entityConfig>,
|
|
30
|
+
staticTables: array<Table.table>,
|
|
31
|
+
allEntities: array<Internal.entityConfig>,
|
|
32
|
+
allEnums: array<Internal.enumConfig<Internal.enum>>,
|
|
33
|
+
mutable storageStatus: storageStatus,
|
|
34
|
+
storage: storage,
|
|
35
|
+
onStorageInitialize: option<unit => promise<unit>>,
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
let entityHistoryActionEnumConfig: Internal.enumConfig<EntityHistory.RowAction.t> = {
|
|
39
|
+
name: EntityHistory.RowAction.name,
|
|
40
|
+
variants: EntityHistory.RowAction.variants,
|
|
41
|
+
schema: EntityHistory.RowAction.schema,
|
|
42
|
+
default: SET,
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
let make = (
|
|
46
|
+
~userEntities,
|
|
47
|
+
~dcRegistryEntityConfig,
|
|
48
|
+
// TODO: Should only pass userEnums and create internal config in runtime
|
|
49
|
+
~allEnums,
|
|
50
|
+
~staticTables,
|
|
51
|
+
~storage,
|
|
52
|
+
~onStorageInitialize=?,
|
|
53
|
+
) => {
|
|
54
|
+
let allEntities = userEntities->Js.Array2.concat([dcRegistryEntityConfig])
|
|
55
|
+
let allEnums =
|
|
56
|
+
allEnums->Js.Array2.concat([entityHistoryActionEnumConfig->Internal.fromGenericEnumConfig])
|
|
57
|
+
{
|
|
58
|
+
userEntities,
|
|
59
|
+
staticTables,
|
|
60
|
+
allEntities,
|
|
61
|
+
allEnums,
|
|
62
|
+
storageStatus: Unknown,
|
|
63
|
+
storage,
|
|
64
|
+
onStorageInitialize,
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
let init = async (
|
|
69
|
+
persistence,
|
|
70
|
+
// There are not much sense in the option,
|
|
71
|
+
// but this is how the runUpMigration used to work
|
|
72
|
+
// and we want to keep the upsert behavior without breaking changes.
|
|
73
|
+
~skipIsInitializedCheck=false,
|
|
74
|
+
~reset=false,
|
|
75
|
+
) => {
|
|
76
|
+
try {
|
|
77
|
+
let shouldRun = switch persistence.storageStatus {
|
|
78
|
+
| Unknown => true
|
|
79
|
+
| Initializing(promise) => {
|
|
80
|
+
await promise
|
|
81
|
+
reset
|
|
82
|
+
}
|
|
83
|
+
| Ready(_) => reset
|
|
84
|
+
}
|
|
85
|
+
if shouldRun {
|
|
86
|
+
let resolveRef = ref(%raw(`null`))
|
|
87
|
+
let promise = Promise.make((resolve, _) => {
|
|
88
|
+
resolveRef := resolve
|
|
89
|
+
})
|
|
90
|
+
persistence.storageStatus = Initializing(promise)
|
|
91
|
+
if !(reset || skipIsInitializedCheck) && (await persistence.storage.isInitialized()) {
|
|
92
|
+
persistence.storageStatus = Ready({cleanRun: false})
|
|
93
|
+
} else {
|
|
94
|
+
let _ = await persistence.storage.initialize(
|
|
95
|
+
~entities=persistence.allEntities,
|
|
96
|
+
~staticTables=persistence.staticTables,
|
|
97
|
+
~enums=persistence.allEnums,
|
|
98
|
+
~cleanRun=reset || !skipIsInitializedCheck,
|
|
99
|
+
)
|
|
100
|
+
persistence.storageStatus = Ready({cleanRun: true})
|
|
101
|
+
switch persistence.onStorageInitialize {
|
|
102
|
+
| Some(onStorageInitialize) => await onStorageInitialize()
|
|
103
|
+
| None => ()
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
resolveRef.contents()
|
|
107
|
+
}
|
|
108
|
+
} catch {
|
|
109
|
+
| exn => exn->ErrorHandling.mkLogAndRaise(~msg=`EE800: Failed to initialize the indexer storage.`)
|
|
110
|
+
}
|
|
111
|
+
}
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
// Generated by ReScript, PLEASE EDIT WITH CARE
|
|
2
|
+
'use strict';
|
|
3
|
+
|
|
4
|
+
var EntityHistory = require("./db/EntityHistory.res.js");
|
|
5
|
+
var ErrorHandling = require("./ErrorHandling.res.js");
|
|
6
|
+
var Caml_js_exceptions = require("rescript/lib/js/caml_js_exceptions.js");
|
|
7
|
+
|
|
8
|
+
var entityHistoryActionEnumConfig_name = EntityHistory.RowAction.name;
|
|
9
|
+
|
|
10
|
+
var entityHistoryActionEnumConfig_variants = EntityHistory.RowAction.variants;
|
|
11
|
+
|
|
12
|
+
var entityHistoryActionEnumConfig_schema = EntityHistory.RowAction.schema;
|
|
13
|
+
|
|
14
|
+
var entityHistoryActionEnumConfig = {
|
|
15
|
+
name: entityHistoryActionEnumConfig_name,
|
|
16
|
+
variants: entityHistoryActionEnumConfig_variants,
|
|
17
|
+
schema: entityHistoryActionEnumConfig_schema,
|
|
18
|
+
default: "SET"
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
function make(userEntities, dcRegistryEntityConfig, allEnums, staticTables, storage, onStorageInitialize) {
|
|
22
|
+
var allEntities = userEntities.concat([dcRegistryEntityConfig]);
|
|
23
|
+
var allEnums$1 = allEnums.concat([entityHistoryActionEnumConfig]);
|
|
24
|
+
return {
|
|
25
|
+
userEntities: userEntities,
|
|
26
|
+
staticTables: staticTables,
|
|
27
|
+
allEntities: allEntities,
|
|
28
|
+
allEnums: allEnums$1,
|
|
29
|
+
storageStatus: "Unknown",
|
|
30
|
+
storage: storage,
|
|
31
|
+
onStorageInitialize: onStorageInitialize
|
|
32
|
+
};
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
async function init(persistence, skipIsInitializedCheckOpt, resetOpt) {
|
|
36
|
+
var skipIsInitializedCheck = skipIsInitializedCheckOpt !== undefined ? skipIsInitializedCheckOpt : false;
|
|
37
|
+
var reset = resetOpt !== undefined ? resetOpt : false;
|
|
38
|
+
try {
|
|
39
|
+
var promise = persistence.storageStatus;
|
|
40
|
+
var shouldRun;
|
|
41
|
+
if (typeof promise !== "object") {
|
|
42
|
+
shouldRun = true;
|
|
43
|
+
} else if (promise.TAG === "Initializing") {
|
|
44
|
+
await promise._0;
|
|
45
|
+
shouldRun = reset;
|
|
46
|
+
} else {
|
|
47
|
+
shouldRun = reset;
|
|
48
|
+
}
|
|
49
|
+
if (!shouldRun) {
|
|
50
|
+
return ;
|
|
51
|
+
}
|
|
52
|
+
var resolveRef = {
|
|
53
|
+
contents: null
|
|
54
|
+
};
|
|
55
|
+
var promise$1 = new Promise((function (resolve, param) {
|
|
56
|
+
resolveRef.contents = resolve;
|
|
57
|
+
}));
|
|
58
|
+
persistence.storageStatus = {
|
|
59
|
+
TAG: "Initializing",
|
|
60
|
+
_0: promise$1
|
|
61
|
+
};
|
|
62
|
+
if (!(reset || skipIsInitializedCheck) && await persistence.storage.isInitialized()) {
|
|
63
|
+
persistence.storageStatus = {
|
|
64
|
+
TAG: "Ready",
|
|
65
|
+
cleanRun: false
|
|
66
|
+
};
|
|
67
|
+
} else {
|
|
68
|
+
await persistence.storage.initialize(persistence.allEntities, persistence.staticTables, persistence.allEnums, reset || !skipIsInitializedCheck);
|
|
69
|
+
persistence.storageStatus = {
|
|
70
|
+
TAG: "Ready",
|
|
71
|
+
cleanRun: true
|
|
72
|
+
};
|
|
73
|
+
var onStorageInitialize = persistence.onStorageInitialize;
|
|
74
|
+
if (onStorageInitialize !== undefined) {
|
|
75
|
+
await onStorageInitialize();
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
}
|
|
79
|
+
return resolveRef.contents();
|
|
80
|
+
}
|
|
81
|
+
catch (raw_exn){
|
|
82
|
+
var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);
|
|
83
|
+
return ErrorHandling.mkLogAndRaise(undefined, "EE800: Failed to initialize the indexer storage.", exn);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
exports.entityHistoryActionEnumConfig = entityHistoryActionEnumConfig;
|
|
88
|
+
exports.make = make;
|
|
89
|
+
exports.init = init;
|
|
90
|
+
/* EntityHistory Not a pure module */
|
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
let makeCreateIndexSqlUnsafe = (~tableName, ~indexFields, ~pgSchema) => {
|
|
2
|
+
let indexName = tableName ++ "_" ++ indexFields->Js.Array2.joinWith("_")
|
|
3
|
+
let index = indexFields->Belt.Array.map(idx => `"${idx}"`)->Js.Array2.joinWith(", ")
|
|
4
|
+
`CREATE INDEX IF NOT EXISTS "${indexName}" ON "${pgSchema}"."${tableName}"(${index});`
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
let makeCreateTableIndicesSqlUnsafe = (table: Table.table, ~pgSchema) => {
|
|
8
|
+
open Belt
|
|
9
|
+
let tableName = table.tableName
|
|
10
|
+
let createIndex = indexField =>
|
|
11
|
+
makeCreateIndexSqlUnsafe(~tableName, ~indexFields=[indexField], ~pgSchema)
|
|
12
|
+
let createCompositeIndex = indexFields => {
|
|
13
|
+
makeCreateIndexSqlUnsafe(~tableName, ~indexFields, ~pgSchema)
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
let singleIndices = table->Table.getSingleIndices
|
|
17
|
+
let compositeIndices = table->Table.getCompositeIndices
|
|
18
|
+
|
|
19
|
+
singleIndices->Array.map(createIndex)->Js.Array2.joinWith("\n") ++
|
|
20
|
+
compositeIndices->Array.map(createCompositeIndex)->Js.Array2.joinWith("\n")
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
let makeCreateTableSqlUnsafe = (table: Table.table, ~pgSchema) => {
|
|
24
|
+
open Belt
|
|
25
|
+
let fieldsMapped =
|
|
26
|
+
table
|
|
27
|
+
->Table.getFields
|
|
28
|
+
->Array.map(field => {
|
|
29
|
+
let {fieldType, isNullable, isArray, defaultValue} = field
|
|
30
|
+
let fieldName = field->Table.getDbFieldName
|
|
31
|
+
|
|
32
|
+
{
|
|
33
|
+
`"${fieldName}" ${switch fieldType {
|
|
34
|
+
| Custom(name) if !(name->Js.String2.startsWith("NUMERIC(")) => `"${pgSchema}".${name}`
|
|
35
|
+
| _ => (fieldType :> string)
|
|
36
|
+
}}${isArray ? "[]" : ""}${switch defaultValue {
|
|
37
|
+
| Some(defaultValue) => ` DEFAULT ${defaultValue}`
|
|
38
|
+
| None => isNullable ? `` : ` NOT NULL`
|
|
39
|
+
}}`
|
|
40
|
+
}
|
|
41
|
+
})
|
|
42
|
+
->Js.Array2.joinWith(", ")
|
|
43
|
+
|
|
44
|
+
let primaryKeyFieldNames = table->Table.getPrimaryKeyFieldNames
|
|
45
|
+
let primaryKey =
|
|
46
|
+
primaryKeyFieldNames
|
|
47
|
+
->Array.map(field => `"${field}"`)
|
|
48
|
+
->Js.Array2.joinWith(", ")
|
|
49
|
+
|
|
50
|
+
`CREATE TABLE IF NOT EXISTS "${pgSchema}"."${table.tableName}"(${fieldsMapped}${primaryKeyFieldNames->Array.length > 0
|
|
51
|
+
? `, PRIMARY KEY(${primaryKey})`
|
|
52
|
+
: ""});`
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
let makeInitializeTransaction = (~pgSchema, ~staticTables, ~entities, ~enums, ~cleanRun) => {
|
|
56
|
+
let allTables = staticTables->Array.copy
|
|
57
|
+
let allEntityTables = []
|
|
58
|
+
entities->Js.Array2.forEach((entity: Internal.entityConfig) => {
|
|
59
|
+
allEntityTables->Js.Array2.push(entity.table)->ignore
|
|
60
|
+
allTables->Js.Array2.push(entity.table)->ignore
|
|
61
|
+
allTables->Js.Array2.push(entity.entityHistory.table)->ignore
|
|
62
|
+
})
|
|
63
|
+
let derivedSchema = Schema.make(allEntityTables)
|
|
64
|
+
|
|
65
|
+
let query = ref(
|
|
66
|
+
(
|
|
67
|
+
cleanRun
|
|
68
|
+
? `DROP SCHEMA IF EXISTS "${pgSchema}" CASCADE;
|
|
69
|
+
CREATE SCHEMA "${pgSchema}";`
|
|
70
|
+
: `CREATE SCHEMA IF NOT EXISTS "${pgSchema}";`
|
|
71
|
+
) ++
|
|
72
|
+
`GRANT ALL ON SCHEMA "${pgSchema}" TO postgres;
|
|
73
|
+
GRANT ALL ON SCHEMA "${pgSchema}" TO public;`,
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
// Optimized enum creation - direct when cleanRun, conditional otherwise
|
|
77
|
+
enums->Js.Array2.forEach((enumConfig: Internal.enumConfig<Internal.enum>) => {
|
|
78
|
+
// Create base enum creation query once
|
|
79
|
+
let enumCreateQuery = `CREATE TYPE "${pgSchema}".${enumConfig.name} AS ENUM(${enumConfig.variants
|
|
80
|
+
->Js.Array2.map(v => `'${v->(Utils.magic: Internal.enum => string)}'`)
|
|
81
|
+
->Js.Array2.joinWith(", ")});`
|
|
82
|
+
|
|
83
|
+
query :=
|
|
84
|
+
query.contents ++
|
|
85
|
+
"\n" ++ if cleanRun {
|
|
86
|
+
// Direct creation when cleanRunting (faster)
|
|
87
|
+
enumCreateQuery
|
|
88
|
+
} else {
|
|
89
|
+
// Wrap with conditional check only when not cleanRunting
|
|
90
|
+
`IF NOT EXISTS (
|
|
91
|
+
SELECT 1 FROM pg_type
|
|
92
|
+
WHERE typname = '${enumConfig.name->Js.String2.toLowerCase}'
|
|
93
|
+
AND typnamespace = (SELECT oid FROM pg_namespace WHERE nspname = '${pgSchema}')
|
|
94
|
+
) THEN
|
|
95
|
+
${enumCreateQuery}
|
|
96
|
+
END IF;`
|
|
97
|
+
}
|
|
98
|
+
})
|
|
99
|
+
|
|
100
|
+
// Batch all table creation first (optimal for PostgreSQL)
|
|
101
|
+
allTables->Js.Array2.forEach((table: Table.table) => {
|
|
102
|
+
query := query.contents ++ "\n" ++ makeCreateTableSqlUnsafe(table, ~pgSchema)
|
|
103
|
+
})
|
|
104
|
+
|
|
105
|
+
// Then batch all indices (better performance when tables exist)
|
|
106
|
+
allTables->Js.Array2.forEach((table: Table.table) => {
|
|
107
|
+
let indices = makeCreateTableIndicesSqlUnsafe(table, ~pgSchema)
|
|
108
|
+
if indices !== "" {
|
|
109
|
+
query := query.contents ++ "\n" ++ indices
|
|
110
|
+
}
|
|
111
|
+
})
|
|
112
|
+
|
|
113
|
+
let functionsQuery = ref("")
|
|
114
|
+
|
|
115
|
+
// Add derived indices
|
|
116
|
+
entities->Js.Array2.forEach((entity: Internal.entityConfig) => {
|
|
117
|
+
functionsQuery := functionsQuery.contents ++ "\n" ++ entity.entityHistory.createInsertFnQuery
|
|
118
|
+
|
|
119
|
+
entity.table
|
|
120
|
+
->Table.getDerivedFromFields
|
|
121
|
+
->Js.Array2.forEach(derivedFromField => {
|
|
122
|
+
let indexField =
|
|
123
|
+
derivedSchema->Schema.getDerivedFromFieldName(derivedFromField)->Utils.unwrapResultExn
|
|
124
|
+
query :=
|
|
125
|
+
query.contents ++
|
|
126
|
+
"\n" ++
|
|
127
|
+
makeCreateIndexSqlUnsafe(
|
|
128
|
+
~tableName=derivedFromField.derivedFromEntity,
|
|
129
|
+
~indexFields=[indexField],
|
|
130
|
+
~pgSchema,
|
|
131
|
+
)
|
|
132
|
+
})
|
|
133
|
+
})
|
|
134
|
+
|
|
135
|
+
[
|
|
136
|
+
// Return optimized queries - main DDL in DO block, functions separate
|
|
137
|
+
// Note: DO $$ BEGIN wrapper is only needed for PL/pgSQL conditionals (IF NOT EXISTS)
|
|
138
|
+
// Reset case uses direct DDL (faster), non-cleanRun case uses conditionals (safer)
|
|
139
|
+
cleanRun ? query.contents : `DO $$ BEGIN ${query.contents} END $$;`,
|
|
140
|
+
// Functions query (separate as they can't be in DO block)
|
|
141
|
+
]->Js.Array2.concat(functionsQuery.contents !== "" ? [functionsQuery.contents] : [])
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
let make = (~sql: Postgres.sql, ~pgSchema): Persistence.storage => {
|
|
145
|
+
let isInitialized = async () => {
|
|
146
|
+
let schemas =
|
|
147
|
+
await sql->Postgres.unsafe(
|
|
148
|
+
`SELECT schema_name FROM information_schema.schemata WHERE schema_name = '${pgSchema}';`,
|
|
149
|
+
)
|
|
150
|
+
schemas->Utils.Array.notEmpty
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
let initialize = async (~entities, ~staticTables, ~enums, ~cleanRun) => {
|
|
154
|
+
let queries = makeInitializeTransaction(~pgSchema, ~staticTables, ~entities, ~enums, ~cleanRun)
|
|
155
|
+
// Execute all queries within a single transaction for integrity
|
|
156
|
+
let _ = await sql->Postgres.beginSql(sql => {
|
|
157
|
+
queries->Js.Array2.map(query => sql->Postgres.unsafe(query))
|
|
158
|
+
})
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
{
|
|
162
|
+
isInitialized,
|
|
163
|
+
initialize,
|
|
164
|
+
}
|
|
165
|
+
}
|