envio 2.32.2 → 3.0.0-alpha-main-clickhouse-sink

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,387 @@
1
+ // ClickHouse client bindings for @clickhouse/client
2
+
3
+ type client
4
+
5
+ type clientConfig = {
6
+ url?: string,
7
+ database?: string,
8
+ username?: string,
9
+ password?: string,
10
+ }
11
+
12
+ type execParams = {query: string}
13
+
14
+ @module("@clickhouse/client")
15
+ external createClient: clientConfig => client = "createClient"
16
+
17
+ @send
18
+ external exec: (client, execParams) => promise<unit> = "exec"
19
+
20
+ @send
21
+ external close: client => promise<unit> = "close"
22
+
23
+ type insertParams<'a> = {
24
+ table: string,
25
+ values: array<'a>,
26
+ format: string,
27
+ }
28
+
29
+ @send
30
+ external insert: (client, insertParams<'a>) => promise<unit> = "insert"
31
+
32
+ type queryParams = {query: string}
33
+ type queryResult<'a>
34
+
35
+ @send
36
+ external query: (client, queryParams) => promise<queryResult<'a>> = "query"
37
+
38
+ @send
39
+ external json: queryResult<'a> => promise<'a> = "json"
40
+
41
+ let getClickHouseFieldType = (
42
+ ~fieldType: Table.fieldType,
43
+ ~isNullable: bool,
44
+ ~isArray: bool,
45
+ ): string => {
46
+ let baseType = switch fieldType {
47
+ | Int32 => "Int32"
48
+ | Uint32 => "UInt32"
49
+ | Serial => "Int32"
50
+ | BigInt({?precision}) =>
51
+ switch precision {
52
+ | None => "String" // Fallback for unbounded BigInt
53
+ | Some(precision) =>
54
+ if precision > 38 {
55
+ "String"
56
+ } else {
57
+ `Decimal(${precision->Js.Int.toString},0)`
58
+ }
59
+ }
60
+ | BigDecimal({?config}) =>
61
+ switch config {
62
+ | None => "String" // Fallback for unbounded BigDecimal
63
+ | Some((precision, scale)) =>
64
+ if precision > 38 || scale > precision {
65
+ "String"
66
+ } else {
67
+ `Decimal(${precision->Js.Int.toString},${scale->Js.Int.toString})`
68
+ }
69
+ }
70
+ | Boolean => "Bool"
71
+ | Number => "Float64"
72
+ | String => "String"
73
+ | Json => "String"
74
+ | Date => "DateTime64(3, 'UTC')"
75
+ | Enum({config}) => {
76
+ let variantsLength = config.variants->Belt.Array.length
77
+ // Theoretically we can store 256 variants in Enum8,
78
+ // but it'd require to explicitly start with a negative index (probably)
79
+ let enumType = variantsLength <= 127 ? "Enum8" : "Enum16"
80
+ let enumValues =
81
+ config.variants
82
+ ->Belt.Array.map(variant => {
83
+ let variantStr = variant->(Utils.magic: 'a => string)
84
+ `'${variantStr}'`
85
+ })
86
+ ->Js.Array2.joinWith(", ")
87
+ `${enumType}(${enumValues})`
88
+ }
89
+ | Entity(_) => "String"
90
+ }
91
+
92
+ let baseType = if isArray {
93
+ `Array(${baseType})`
94
+ } else {
95
+ baseType
96
+ }
97
+
98
+ isNullable ? `Nullable(${baseType})` : baseType
99
+ }
100
+
101
+ let setCheckpointsOrThrow = async (client, ~batch: Batch.t, ~database: string) => {
102
+ let checkpointsCount = batch.checkpointIds->Array.length
103
+ if checkpointsCount === 0 {
104
+ ()
105
+ } else {
106
+ // Convert columnar data to row format for JSONCompactEachRow
107
+ let checkpointRows = []
108
+ for idx in 0 to checkpointsCount - 1 {
109
+ checkpointRows
110
+ ->Js.Array2.push((
111
+ batch.checkpointIds->Belt.Array.getUnsafe(idx),
112
+ batch.checkpointChainIds->Belt.Array.getUnsafe(idx),
113
+ batch.checkpointBlockNumbers->Belt.Array.getUnsafe(idx),
114
+ batch.checkpointBlockHashes->Belt.Array.getUnsafe(idx),
115
+ batch.checkpointEventsProcessed->Belt.Array.getUnsafe(idx),
116
+ ))
117
+ ->ignore
118
+ }
119
+
120
+ try {
121
+ await client->insert({
122
+ table: `${database}.\`${InternalTable.Checkpoints.table.tableName}\``,
123
+ values: checkpointRows,
124
+ format: "JSONCompactEachRow",
125
+ })
126
+ } catch {
127
+ | exn =>
128
+ raise(
129
+ Persistence.StorageError({
130
+ message: `Failed to insert checkpoints into ClickHouse table "${InternalTable.Checkpoints.table.tableName}"`,
131
+ reason: exn->Utils.prettifyExn,
132
+ }),
133
+ )
134
+ }
135
+ }
136
+ }
137
+
138
+ let setUpdatesOrThrow = async (
139
+ client,
140
+ ~updates: array<Internal.inMemoryStoreEntityUpdate<Internal.entity>>,
141
+ ~entityConfig: Internal.entityConfig,
142
+ ~database: string,
143
+ ) => {
144
+ if updates->Array.length === 0 {
145
+ ()
146
+ } else {
147
+ let {convertOrThrow, tableName} = switch entityConfig.clickHouseSetUpdatesCache {
148
+ | Some(cache) => cache
149
+ | None =>
150
+ let cache: Internal.clickHouseSetUpdatesCache = {
151
+ tableName: `${database}.\`${EntityHistory.historyTableName(
152
+ ~entityName=entityConfig.name,
153
+ ~entityIndex=entityConfig.index,
154
+ )}\``,
155
+ convertOrThrow: S.compile(
156
+ S.union([
157
+ EntityHistory.makeSetUpdateSchema(entityConfig.schema),
158
+ S.object(s => {
159
+ s.tag(EntityHistory.changeFieldName, EntityHistory.RowAction.DELETE)
160
+ Change.Delete({
161
+ entityId: s.field(Table.idFieldName, S.string),
162
+ checkpointId: s.field(
163
+ EntityHistory.checkpointIdFieldName,
164
+ EntityHistory.unsafeCheckpointIdSchema,
165
+ ),
166
+ })
167
+ }),
168
+ ]),
169
+ ~input=Value,
170
+ ~output=Json,
171
+ ~typeValidation=false,
172
+ ~mode=Sync,
173
+ ),
174
+ }
175
+
176
+ entityConfig.clickHouseSetUpdatesCache = Some(cache)
177
+ cache
178
+ }
179
+
180
+ try {
181
+ // Convert entity updates to ClickHouse row format
182
+ let values = updates->Js.Array2.map(update => {
183
+ update.latestChange->convertOrThrow
184
+ })
185
+
186
+ await client->insert({
187
+ table: tableName,
188
+ values,
189
+ format: "JSONEachRow",
190
+ })
191
+ } catch {
192
+ | exn =>
193
+ raise(
194
+ Persistence.StorageError({
195
+ message: `Failed to insert items into ClickHouse table "${tableName}"`,
196
+ reason: exn->Utils.prettifyExn,
197
+ }),
198
+ )
199
+ }
200
+ }
201
+ }
202
+
203
+ // Generate CREATE TABLE query for entity history table
204
+ let makeCreateHistoryTableQuery = (~entityConfig: Internal.entityConfig, ~database: string) => {
205
+ let fieldDefinitions = entityConfig.table.fields->Belt.Array.keepMap(field => {
206
+ switch field {
207
+ | Field(field) =>
208
+ Some({
209
+ let fieldName = field->Table.getDbFieldName
210
+ let clickHouseType = getClickHouseFieldType(
211
+ ~fieldType=field.fieldType,
212
+ ~isNullable=field.isNullable,
213
+ ~isArray=field.isArray,
214
+ )
215
+ `\`${fieldName}\` ${clickHouseType}`
216
+ })
217
+ | DerivedFrom(_) => None
218
+ }
219
+ })
220
+
221
+ `CREATE TABLE IF NOT EXISTS ${database}.\`${EntityHistory.historyTableName(
222
+ ~entityName=entityConfig.name,
223
+ ~entityIndex=entityConfig.index,
224
+ )}\` (
225
+ ${fieldDefinitions->Js.Array2.joinWith(",\n ")},
226
+ \`${EntityHistory.checkpointIdFieldName}\` ${getClickHouseFieldType(
227
+ ~fieldType=Uint32,
228
+ ~isNullable=false,
229
+ ~isArray=false,
230
+ )},
231
+ \`${EntityHistory.changeFieldName}\` ${getClickHouseFieldType(
232
+ ~fieldType=Enum({config: EntityHistory.RowAction.config->Table.fromGenericEnumConfig}),
233
+ ~isNullable=false,
234
+ ~isArray=false,
235
+ )}
236
+ )
237
+ ENGINE = MergeTree()
238
+ ORDER BY (${Table.idFieldName}, ${EntityHistory.checkpointIdFieldName})`
239
+ }
240
+
241
+ // Generate CREATE TABLE query for checkpoints
242
+ let makeCreateCheckpointsTableQuery = (~database: string) => {
243
+ let idField = (#id: InternalTable.Checkpoints.field :> string)
244
+ let chainIdField = (#chain_id: InternalTable.Checkpoints.field :> string)
245
+ let blockNumberField = (#block_number: InternalTable.Checkpoints.field :> string)
246
+ let blockHashField = (#block_hash: InternalTable.Checkpoints.field :> string)
247
+ let eventsProcessedField = (#events_processed: InternalTable.Checkpoints.field :> string)
248
+
249
+ `CREATE TABLE IF NOT EXISTS ${database}.\`${InternalTable.Checkpoints.table.tableName}\` (
250
+ \`${idField}\` ${getClickHouseFieldType(~fieldType=Int32, ~isNullable=false, ~isArray=false)},
251
+ \`${chainIdField}\` ${getClickHouseFieldType(
252
+ ~fieldType=Int32,
253
+ ~isNullable=false,
254
+ ~isArray=false,
255
+ )},
256
+ \`${blockNumberField}\` ${getClickHouseFieldType(
257
+ ~fieldType=Int32,
258
+ ~isNullable=false,
259
+ ~isArray=false,
260
+ )},
261
+ \`${blockHashField}\` ${getClickHouseFieldType(
262
+ ~fieldType=String,
263
+ ~isNullable=true,
264
+ ~isArray=false,
265
+ )},
266
+ \`${eventsProcessedField}\` ${getClickHouseFieldType(
267
+ ~fieldType=Int32,
268
+ ~isNullable=false,
269
+ ~isArray=false,
270
+ )}
271
+ )
272
+ ENGINE = MergeTree()
273
+ ORDER BY (${idField})`
274
+ }
275
+
276
+ // Generate CREATE VIEW query for entity current state
277
+ let makeCreateViewQuery = (~entityConfig: Internal.entityConfig, ~database: string) => {
278
+ let historyTableName = EntityHistory.historyTableName(
279
+ ~entityName=entityConfig.name,
280
+ ~entityIndex=entityConfig.index,
281
+ )
282
+
283
+ let checkpointsTableName = InternalTable.Checkpoints.table.tableName
284
+ let checkpointIdField = (#id: InternalTable.Checkpoints.field :> string)
285
+
286
+ let entityFields =
287
+ entityConfig.table.fields
288
+ ->Belt.Array.keepMap(field => {
289
+ switch field {
290
+ | Field(field) => {
291
+ let fieldName = field->Table.getDbFieldName
292
+ Some(`\`${fieldName}\``)
293
+ }
294
+ | DerivedFrom(_) => None
295
+ }
296
+ })
297
+ ->Js.Array2.joinWith(", ")
298
+
299
+ `CREATE VIEW IF NOT EXISTS ${database}.\`${entityConfig.name}\` AS
300
+ SELECT ${entityFields}
301
+ FROM (
302
+ SELECT ${entityFields}, \`${EntityHistory.changeFieldName}\`
303
+ FROM ${database}.\`${historyTableName}\`
304
+ WHERE \`${EntityHistory.checkpointIdFieldName}\` <= (SELECT max(${checkpointIdField}) FROM ${database}.\`${checkpointsTableName}\`)
305
+ ORDER BY \`${EntityHistory.checkpointIdFieldName}\` DESC
306
+ LIMIT 1 BY \`${Table.idFieldName}\`
307
+ )
308
+ WHERE \`${EntityHistory.changeFieldName}\` = '${(EntityHistory.RowAction.SET :> string)}'`
309
+ }
310
+
311
+ // Initialize ClickHouse tables for entities
312
+ let initialize = async (
313
+ client,
314
+ ~database: string,
315
+ ~entities: array<Internal.entityConfig>,
316
+ ~enums as _: array<Table.enumConfig<Table.enum>>,
317
+ ) => {
318
+ try {
319
+ await client->exec({query: `DROP DATABASE IF EXISTS ${database}`})
320
+ await client->exec({query: `CREATE DATABASE ${database}`})
321
+ await client->exec({query: `USE ${database}`})
322
+
323
+ await Promise.all(
324
+ entities->Belt.Array.map(entityConfig =>
325
+ client->exec({query: makeCreateHistoryTableQuery(~entityConfig, ~database)})
326
+ ),
327
+ )->Promise.ignoreValue
328
+ await client->exec({query: makeCreateCheckpointsTableQuery(~database)})
329
+
330
+ await Promise.all(
331
+ entities->Belt.Array.map(entityConfig =>
332
+ client->exec({query: makeCreateViewQuery(~entityConfig, ~database)})
333
+ ),
334
+ )->Promise.ignoreValue
335
+
336
+ Logging.trace("ClickHouse sink initialization completed successfully")
337
+ } catch {
338
+ | exn => {
339
+ Logging.errorWithExn(exn, "Failed to initialize ClickHouse sink")
340
+ Js.Exn.raiseError("ClickHouse initialization failed")
341
+ }
342
+ }
343
+ }
344
+
345
+ // Resume ClickHouse sink after reorg by deleting rows with checkpoint IDs higher than target
346
+ let resume = async (client, ~database: string, ~checkpointId: float) => {
347
+ try {
348
+ // Try to use the database - will throw if it doesn't exist
349
+ try {
350
+ await client->exec({query: `USE ${database}`})
351
+ } catch {
352
+ | exn =>
353
+ Logging.errorWithExn(
354
+ exn,
355
+ `ClickHouse sink database "${database}" not found. Please run 'envio start -r' to reinitialize the indexer (it'll also drop Postgres database).`,
356
+ )
357
+ Js.Exn.raiseError("ClickHouse resume failed")
358
+ }
359
+
360
+ // Get all history tables
361
+ let tablesResult = await client->query({
362
+ query: `SHOW TABLES FROM ${database} LIKE '${EntityHistory.historyTablePrefix}%'`,
363
+ })
364
+ let tables: array<{"name": string}> = await tablesResult->json
365
+
366
+ // Delete rows with checkpoint IDs higher than the target for each history table
367
+ await Promise.all(
368
+ tables->Belt.Array.map(table => {
369
+ let tableName = table["name"]
370
+ client->exec({
371
+ query: `ALTER TABLE ${database}.\`${tableName}\` DELETE WHERE \`${EntityHistory.checkpointIdFieldName}\` > ${checkpointId->Belt.Float.toString}`,
372
+ })
373
+ }),
374
+ )->Promise.ignoreValue
375
+
376
+ // Delete stale checkpoints
377
+ await client->exec({
378
+ query: `DELETE FROM ${database}.\`${InternalTable.Checkpoints.table.tableName}\` WHERE \`${Table.idFieldName}\` > ${checkpointId->Belt.Float.toString}`,
379
+ })
380
+ } catch {
381
+ | Persistence.StorageError(_) as exn => raise(exn)
382
+ | exn => {
383
+ Logging.errorWithExn(exn, "Failed to resume ClickHouse sink")
384
+ Js.Exn.raiseError("ClickHouse resume failed")
385
+ }
386
+ }
387
+ }
@@ -0,0 +1,274 @@
1
+ // Generated by ReScript, PLEASE EDIT WITH CARE
2
+ 'use strict';
3
+
4
+ var Table = require("../db/Table.res.js");
5
+ var Utils = require("../Utils.res.js");
6
+ var Js_exn = require("rescript/lib/js/js_exn.js");
7
+ var Logging = require("../Logging.res.js");
8
+ var Belt_Array = require("rescript/lib/js/belt_Array.js");
9
+ var Persistence = require("../Persistence.res.js");
10
+ var EntityHistory = require("../db/EntityHistory.res.js");
11
+ var InternalTable = require("../db/InternalTable.res.js");
12
+ var S$RescriptSchema = require("rescript-schema/src/S.res.js");
13
+ var Caml_js_exceptions = require("rescript/lib/js/caml_js_exceptions.js");
14
+
15
+ function getClickHouseFieldType(fieldType, isNullable, isArray) {
16
+ var baseType;
17
+ if (typeof fieldType !== "object") {
18
+ switch (fieldType) {
19
+ case "Boolean" :
20
+ baseType = "Bool";
21
+ break;
22
+ case "Uint32" :
23
+ baseType = "UInt32";
24
+ break;
25
+ case "Number" :
26
+ baseType = "Float64";
27
+ break;
28
+ case "Int32" :
29
+ case "Serial" :
30
+ baseType = "Int32";
31
+ break;
32
+ case "String" :
33
+ case "Json" :
34
+ baseType = "String";
35
+ break;
36
+ case "Date" :
37
+ baseType = "DateTime64(3, 'UTC')";
38
+ break;
39
+
40
+ }
41
+ } else {
42
+ switch (fieldType.type) {
43
+ case "BigInt" :
44
+ var precision = fieldType.precision;
45
+ baseType = precision !== undefined && precision <= 38 ? "Decimal(" + precision.toString() + ",0)" : "String";
46
+ break;
47
+ case "BigDecimal" :
48
+ var config = fieldType.config;
49
+ if (config !== undefined) {
50
+ var scale = config[1];
51
+ var precision$1 = config[0];
52
+ baseType = precision$1 > 38 || scale > precision$1 ? "String" : "Decimal(" + precision$1.toString() + "," + scale.toString() + ")";
53
+ } else {
54
+ baseType = "String";
55
+ }
56
+ break;
57
+ case "Enum" :
58
+ var config$1 = fieldType.config;
59
+ var variantsLength = config$1.variants.length;
60
+ var enumType = variantsLength <= 127 ? "Enum8" : "Enum16";
61
+ var enumValues = Belt_Array.map(config$1.variants, (function (variant) {
62
+ return "'" + variant + "'";
63
+ })).join(", ");
64
+ baseType = enumType + "(" + enumValues + ")";
65
+ break;
66
+ case "Entity" :
67
+ baseType = "String";
68
+ break;
69
+
70
+ }
71
+ }
72
+ var baseType$1 = isArray ? "Array(" + baseType + ")" : baseType;
73
+ if (isNullable) {
74
+ return "Nullable(" + baseType$1 + ")";
75
+ } else {
76
+ return baseType$1;
77
+ }
78
+ }
79
+
80
+ async function setCheckpointsOrThrow(client, batch, database) {
81
+ var checkpointsCount = batch.checkpointIds.length;
82
+ if (checkpointsCount === 0) {
83
+ return ;
84
+ }
85
+ var checkpointRows = [];
86
+ for(var idx = 0; idx < checkpointsCount; ++idx){
87
+ checkpointRows.push([
88
+ batch.checkpointIds[idx],
89
+ batch.checkpointChainIds[idx],
90
+ batch.checkpointBlockNumbers[idx],
91
+ batch.checkpointBlockHashes[idx],
92
+ batch.checkpointEventsProcessed[idx]
93
+ ]);
94
+ }
95
+ try {
96
+ return await client.insert({
97
+ table: database + ".\`" + InternalTable.Checkpoints.table.tableName + "\`",
98
+ values: checkpointRows,
99
+ format: "JSONCompactEachRow"
100
+ });
101
+ }
102
+ catch (raw_exn){
103
+ var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);
104
+ throw {
105
+ RE_EXN_ID: Persistence.StorageError,
106
+ message: "Failed to insert checkpoints into ClickHouse table \"" + InternalTable.Checkpoints.table.tableName + "\"",
107
+ reason: Utils.prettifyExn(exn),
108
+ Error: new Error()
109
+ };
110
+ }
111
+ }
112
+
113
+ async function setUpdatesOrThrow(client, updates, entityConfig, database) {
114
+ if (updates.length === 0) {
115
+ return ;
116
+ }
117
+ var cache = entityConfig.clickHouseSetUpdatesCache;
118
+ var match;
119
+ if (cache !== undefined) {
120
+ match = cache;
121
+ } else {
122
+ var cache_tableName = database + ".\`" + EntityHistory.historyTableName(entityConfig.name, entityConfig.index) + "\`";
123
+ var cache_convertOrThrow = S$RescriptSchema.compile(S$RescriptSchema.union([
124
+ EntityHistory.makeSetUpdateSchema(entityConfig.schema),
125
+ S$RescriptSchema.object(function (s) {
126
+ s.tag(EntityHistory.changeFieldName, "DELETE");
127
+ return {
128
+ type: "DELETE",
129
+ entityId: s.f(Table.idFieldName, S$RescriptSchema.string),
130
+ checkpointId: s.f(EntityHistory.checkpointIdFieldName, EntityHistory.unsafeCheckpointIdSchema)
131
+ };
132
+ })
133
+ ]), "Output", "Json", "Sync", false);
134
+ var cache$1 = {
135
+ tableName: cache_tableName,
136
+ convertOrThrow: cache_convertOrThrow
137
+ };
138
+ entityConfig.clickHouseSetUpdatesCache = cache$1;
139
+ match = cache$1;
140
+ }
141
+ var convertOrThrow = match.convertOrThrow;
142
+ var tableName = match.tableName;
143
+ try {
144
+ var values = updates.map(function (update) {
145
+ return convertOrThrow(update.latestChange);
146
+ });
147
+ return await client.insert({
148
+ table: tableName,
149
+ values: values,
150
+ format: "JSONEachRow"
151
+ });
152
+ }
153
+ catch (raw_exn){
154
+ var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);
155
+ throw {
156
+ RE_EXN_ID: Persistence.StorageError,
157
+ message: "Failed to insert items into ClickHouse table \"" + tableName + "\"",
158
+ reason: Utils.prettifyExn(exn),
159
+ Error: new Error()
160
+ };
161
+ }
162
+ }
163
+
164
+ function makeCreateHistoryTableQuery(entityConfig, database) {
165
+ var fieldDefinitions = Belt_Array.keepMap(entityConfig.table.fields, (function (field) {
166
+ if (field.TAG !== "Field") {
167
+ return ;
168
+ }
169
+ var field$1 = field._0;
170
+ var fieldName = Table.getDbFieldName(field$1);
171
+ var clickHouseType = getClickHouseFieldType(field$1.fieldType, field$1.isNullable, field$1.isArray);
172
+ return "\`" + fieldName + "\` " + clickHouseType;
173
+ }));
174
+ return "CREATE TABLE IF NOT EXISTS " + database + ".\`" + EntityHistory.historyTableName(entityConfig.name, entityConfig.index) + "\` (\n " + fieldDefinitions.join(",\n ") + ",\n \`" + EntityHistory.checkpointIdFieldName + "\` " + getClickHouseFieldType("Uint32", false, false) + ",\n \`" + EntityHistory.changeFieldName + "\` " + getClickHouseFieldType({
175
+ type: "Enum",
176
+ config: EntityHistory.RowAction.config
177
+ }, false, false) + "\n)\nENGINE = MergeTree()\nORDER BY (" + Table.idFieldName + ", " + EntityHistory.checkpointIdFieldName + ")";
178
+ }
179
+
180
+ function makeCreateCheckpointsTableQuery(database) {
181
+ return "CREATE TABLE IF NOT EXISTS " + database + ".\`" + InternalTable.Checkpoints.table.tableName + "\` (\n \`" + "id" + "\` " + getClickHouseFieldType("Int32", false, false) + ",\n \`" + "chain_id" + "\` " + getClickHouseFieldType("Int32", false, false) + ",\n \`" + "block_number" + "\` " + getClickHouseFieldType("Int32", false, false) + ",\n \`" + "block_hash" + "\` " + getClickHouseFieldType("String", true, false) + ",\n \`" + "events_processed" + "\` " + getClickHouseFieldType("Int32", false, false) + "\n)\nENGINE = MergeTree()\nORDER BY (" + "id" + ")";
182
+ }
183
+
184
+ function makeCreateViewQuery(entityConfig, database) {
185
+ var historyTableName = EntityHistory.historyTableName(entityConfig.name, entityConfig.index);
186
+ var checkpointsTableName = InternalTable.Checkpoints.table.tableName;
187
+ var entityFields = Belt_Array.keepMap(entityConfig.table.fields, (function (field) {
188
+ if (field.TAG !== "Field") {
189
+ return ;
190
+ }
191
+ var fieldName = Table.getDbFieldName(field._0);
192
+ return "\`" + fieldName + "\`";
193
+ })).join(", ");
194
+ return "CREATE VIEW IF NOT EXISTS " + database + ".\`" + entityConfig.name + "\` AS\nSELECT " + entityFields + "\nFROM (\n SELECT " + entityFields + ", \`" + EntityHistory.changeFieldName + "\`\n FROM " + database + ".\`" + historyTableName + "\`\n WHERE \`" + EntityHistory.checkpointIdFieldName + "\` <= (SELECT max(" + "id" + ") FROM " + database + ".\`" + checkpointsTableName + "\`)\n ORDER BY \`" + EntityHistory.checkpointIdFieldName + "\` DESC\n LIMIT 1 BY \`" + Table.idFieldName + "\`\n)\nWHERE \`" + EntityHistory.changeFieldName + "\` = '" + "SET" + "'";
195
+ }
196
+
197
+ async function initialize(client, database, entities, param) {
198
+ try {
199
+ await client.exec({
200
+ query: "DROP DATABASE IF EXISTS " + database
201
+ });
202
+ await client.exec({
203
+ query: "CREATE DATABASE " + database
204
+ });
205
+ await client.exec({
206
+ query: "USE " + database
207
+ });
208
+ await Promise.all(Belt_Array.map(entities, (function (entityConfig) {
209
+ return client.exec({
210
+ query: makeCreateHistoryTableQuery(entityConfig, database)
211
+ });
212
+ })));
213
+ await client.exec({
214
+ query: makeCreateCheckpointsTableQuery(database)
215
+ });
216
+ await Promise.all(Belt_Array.map(entities, (function (entityConfig) {
217
+ return client.exec({
218
+ query: makeCreateViewQuery(entityConfig, database)
219
+ });
220
+ })));
221
+ return Logging.trace("ClickHouse sink initialization completed successfully");
222
+ }
223
+ catch (raw_exn){
224
+ var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);
225
+ Logging.errorWithExn(exn, "Failed to initialize ClickHouse sink");
226
+ return Js_exn.raiseError("ClickHouse initialization failed");
227
+ }
228
+ }
229
+
230
+ async function resume(client, database, checkpointId) {
231
+ try {
232
+ try {
233
+ await client.exec({
234
+ query: "USE " + database
235
+ });
236
+ }
237
+ catch (raw_exn){
238
+ var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);
239
+ Logging.errorWithExn(exn, "ClickHouse sink database \"" + database + "\" not found. Please run 'envio start -r' to reinitialize the indexer (it'll also drop Postgres database).");
240
+ Js_exn.raiseError("ClickHouse resume failed");
241
+ }
242
+ var tablesResult = await client.query({
243
+ query: "SHOW TABLES FROM " + database + " LIKE '" + EntityHistory.historyTablePrefix + "%'"
244
+ });
245
+ var tables = await tablesResult.json();
246
+ await Promise.all(Belt_Array.map(tables, (function (table) {
247
+ var tableName = table.name;
248
+ return client.exec({
249
+ query: "ALTER TABLE " + database + ".\`" + tableName + "\` DELETE WHERE \`" + EntityHistory.checkpointIdFieldName + "\` > " + String(checkpointId)
250
+ });
251
+ })));
252
+ return await client.exec({
253
+ query: "DELETE FROM " + database + ".\`" + InternalTable.Checkpoints.table.tableName + "\` WHERE \`" + Table.idFieldName + "\` > " + String(checkpointId)
254
+ });
255
+ }
256
+ catch (raw_exn$1){
257
+ var exn$1 = Caml_js_exceptions.internalToOCamlException(raw_exn$1);
258
+ if (exn$1.RE_EXN_ID === Persistence.StorageError) {
259
+ throw exn$1;
260
+ }
261
+ Logging.errorWithExn(exn$1, "Failed to resume ClickHouse sink");
262
+ return Js_exn.raiseError("ClickHouse resume failed");
263
+ }
264
+ }
265
+
266
+ exports.getClickHouseFieldType = getClickHouseFieldType;
267
+ exports.setCheckpointsOrThrow = setCheckpointsOrThrow;
268
+ exports.setUpdatesOrThrow = setUpdatesOrThrow;
269
+ exports.makeCreateHistoryTableQuery = makeCreateHistoryTableQuery;
270
+ exports.makeCreateCheckpointsTableQuery = makeCreateCheckpointsTableQuery;
271
+ exports.makeCreateViewQuery = makeCreateViewQuery;
272
+ exports.initialize = initialize;
273
+ exports.resume = resume;
274
+ /* Table Not a pure module */
@@ -99,3 +99,18 @@ external makeSql: (~config: poolConfig) => sql = "postgres"
99
99
  @send
100
100
  external preparedUnsafe: (sql, string, unknown, @as(json`{prepare: true}`) _) => promise<'a> =
101
101
  "unsafe"
102
+
103
+ @unboxed
104
+ type columnType =
105
+ | @as("INTEGER") Integer
106
+ | @as("BIGINT") BigInt
107
+ | @as("BOOLEAN") Boolean
108
+ | @as("NUMERIC") Numeric
109
+ | @as("DOUBLE PRECISION") DoublePrecision
110
+ | @as("TEXT") Text
111
+ | @as("SERIAL") Serial
112
+ | @as("JSONB") JsonB
113
+ | @as("TIMESTAMP WITH TIME ZONE") TimestampWithTimezone
114
+ | @as("TIMESTAMP WITH TIME ZONE NULL") TimestampWithTimezoneNull
115
+ | @as("TIMESTAMP") TimestampWithoutTimezone
116
+ | Custom(string)