envio 2.8.1 → 2.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,335 @@
1
+ open Table
2
+
3
+ module RowAction = {
4
+ type t = SET | DELETE
5
+ let variants = [SET, DELETE]
6
+ let name = "ENTITY_HISTORY_ROW_ACTION"
7
+ let enum = Enum.make(~name, ~variants)
8
+ }
9
+
10
+ type historyFieldsGeneral<'a> = {
11
+ chain_id: 'a,
12
+ block_timestamp: 'a,
13
+ block_number: 'a,
14
+ log_index: 'a,
15
+ }
16
+
17
+ type historyFields = historyFieldsGeneral<int>
18
+
19
+ type entityIdOnly = {id: string}
20
+ let entityIdOnlySchema = S.schema(s => {id: s.matches(S.string)})
21
+ type entityData<'entity> = Delete(entityIdOnly) | Set('entity)
22
+
23
+ type historyRow<'entity> = {
24
+ current: historyFields,
25
+ previous: option<historyFields>,
26
+ entityData: entityData<'entity>,
27
+ }
28
+
29
+ type previousHistoryFields = historyFieldsGeneral<option<int>>
30
+
31
+ //For flattening the optional previous fields into their own individual nullable fields
32
+ let previousHistoryFieldsSchema = S.object(s => {
33
+ chain_id: s.field("previous_entity_history_chain_id", S.null(S.int)),
34
+ block_timestamp: s.field("previous_entity_history_block_timestamp", S.null(S.int)),
35
+ block_number: s.field("previous_entity_history_block_number", S.null(S.int)),
36
+ log_index: s.field("previous_entity_history_log_index", S.null(S.int)),
37
+ })
38
+
39
+ let currentHistoryFieldsSchema = S.object(s => {
40
+ chain_id: s.field("entity_history_chain_id", S.int),
41
+ block_timestamp: s.field("entity_history_block_timestamp", S.int),
42
+ block_number: s.field("entity_history_block_number", S.int),
43
+ log_index: s.field("entity_history_log_index", S.int),
44
+ })
45
+
46
+ let makeHistoryRowSchema: S.t<'entity> => S.t<historyRow<'entity>> = entitySchema => {
47
+ //Maps a schema object for the given entity with all fields nullable except for the id field
48
+ //Keeps any original nullable fields
49
+ let nullableEntitySchema: S.t<Js.Dict.t<unknown>> = S.schema(s =>
50
+ switch entitySchema->S.classify {
51
+ | Object({items}) =>
52
+ let nulldict = Js.Dict.empty()
53
+ items->Belt.Array.forEach(({location, schema}) => {
54
+ let nullableFieldSchema = switch (location, schema->S.classify) {
55
+ | ("id", _)
56
+ | (_, Null(_)) => schema //TODO double check this works for array types
57
+ | _ => S.null(schema)->S.toUnknown
58
+ }
59
+
60
+ nulldict->Js.Dict.set(location, s.matches(nullableFieldSchema))
61
+ })
62
+ nulldict
63
+ | _ =>
64
+ Js.Exn.raiseError(
65
+ "Failed creating nullableEntitySchema. Expected an object schema for entity",
66
+ )
67
+ }
68
+ )
69
+
70
+ let previousWithNullFields = {
71
+ chain_id: None,
72
+ block_timestamp: None,
73
+ block_number: None,
74
+ log_index: None,
75
+ }
76
+
77
+ S.object(s => {
78
+ {
79
+ "current": s.flatten(currentHistoryFieldsSchema),
80
+ "previous": s.flatten(previousHistoryFieldsSchema),
81
+ "entityData": s.flatten(nullableEntitySchema),
82
+ "action": s.field("action", RowAction.enum.schema),
83
+ }
84
+ })->S.transform(s => {
85
+ parser: v => {
86
+ current: v["current"],
87
+ previous: switch v["previous"] {
88
+ | {
89
+ chain_id: Some(chain_id),
90
+ block_timestamp: Some(block_timestamp),
91
+ block_number: Some(block_number),
92
+ log_index: Some(log_index),
93
+ } =>
94
+ Some({
95
+ chain_id,
96
+ block_timestamp,
97
+ block_number,
98
+ log_index,
99
+ })
100
+ | {chain_id: None, block_timestamp: None, block_number: None, log_index: None} => None
101
+ | _ => s.fail("Unexpected mix of null and non-null values in previous history fields")
102
+ },
103
+ entityData: switch v["action"] {
104
+ | SET => v["entityData"]->(Utils.magic: Js.Dict.t<unknown> => 'entity)->Set
105
+ | DELETE =>
106
+ let {id} = v["entityData"]->(Utils.magic: Js.Dict.t<unknown> => entityIdOnly)
107
+ Delete({id: id})
108
+ },
109
+ },
110
+ serializer: v => {
111
+ let (entityData, action) = switch v.entityData {
112
+ | Set(entityData) => (entityData->(Utils.magic: 'entity => Js.Dict.t<unknown>), RowAction.SET)
113
+ | Delete(entityIdOnly) => (
114
+ entityIdOnly->(Utils.magic: entityIdOnly => Js.Dict.t<unknown>),
115
+ DELETE,
116
+ )
117
+ }
118
+
119
+ {
120
+ "current": v.current,
121
+ "entityData": entityData,
122
+ "action": action,
123
+ "previous": switch v.previous {
124
+ | Some(historyFields) =>
125
+ historyFields->(Utils.magic: historyFields => previousHistoryFields) //Cast to previousHistoryFields (with "Some" field values)
126
+ | None => previousWithNullFields
127
+ },
128
+ }
129
+ },
130
+ })
131
+ }
132
+
133
+ type t<'entity> = {
134
+ table: table,
135
+ createInsertFnQuery: string,
136
+ schema: S.t<historyRow<'entity>>,
137
+ schemaRows: S.t<array<historyRow<'entity>>>,
138
+ insertFn: (Postgres.sql, Js.Json.t, ~shouldCopyCurrentEntity: bool) => promise<unit>,
139
+ }
140
+
141
+ let insertRow = (
142
+ self: t<'entity>,
143
+ ~sql,
144
+ ~historyRow: historyRow<'entity>,
145
+ ~shouldCopyCurrentEntity,
146
+ ) => {
147
+ let row = historyRow->S.serializeOrRaiseWith(self.schema)
148
+ self.insertFn(sql, row, ~shouldCopyCurrentEntity)
149
+ }
150
+
151
+ let batchInsertRows = (
152
+ self: t<'entity>,
153
+ ~sql,
154
+ ~rows: array<historyRow<'entity>>,
155
+ ~shouldCopyCurrentEntity,
156
+ ) => {
157
+ let rows =
158
+ rows->S.serializeOrRaiseWith(self.schemaRows)->(Utils.magic: Js.Json.t => array<Js.Json.t>)
159
+ rows
160
+ ->Belt.Array.map(row => {
161
+ self.insertFn(sql, row, ~shouldCopyCurrentEntity)
162
+ })
163
+ ->Promise.all
164
+ ->Promise.thenResolve(_ => ())
165
+ }
166
+
167
+ type entityInternal
168
+
169
+ external castInternal: t<'entity> => t<entityInternal> = "%identity"
170
+
171
+ let fromTable = (table: table, ~schema: S.t<'entity>): t<'entity> => {
172
+ let entity_history_block_timestamp = "entity_history_block_timestamp"
173
+ let entity_history_chain_id = "entity_history_chain_id"
174
+ let entity_history_block_number = "entity_history_block_number"
175
+ let entity_history_log_index = "entity_history_log_index"
176
+
177
+ //NB: Ordered by hirarchy of event ordering
178
+ let currentChangeFieldNames = [
179
+ entity_history_block_timestamp,
180
+ entity_history_chain_id,
181
+ entity_history_block_number,
182
+ entity_history_log_index,
183
+ ]
184
+
185
+ let currentHistoryFields =
186
+ currentChangeFieldNames->Belt.Array.map(fieldName =>
187
+ mkField(fieldName, Integer, ~isPrimaryKey=true)
188
+ )
189
+
190
+ let previousChangeFieldNames =
191
+ currentChangeFieldNames->Belt.Array.map(fieldName => "previous_" ++ fieldName)
192
+
193
+ let previousHistoryFields =
194
+ previousChangeFieldNames->Belt.Array.map(fieldName =>
195
+ mkField(fieldName, Integer, ~isNullable=true)
196
+ )
197
+
198
+ let id = "id"
199
+
200
+ let dataFields = table.fields->Belt.Array.keepMap(field =>
201
+ switch field {
202
+ | Field(field) =>
203
+ switch field.fieldName {
204
+ //id is not nullable and should be part of the pk
205
+ | "id" => {...field, fieldName: id, isPrimaryKey: true}->Field->Some
206
+ //db_write_timestamp can be removed for this. TODO: remove this when we depracate
207
+ //automatic db_write_timestamp creation
208
+ | "db_write_timestamp" => None
209
+ | _ =>
210
+ {
211
+ ...field,
212
+ isNullable: true, //All entity fields are nullable in the case
213
+ isIndex: false, //No need to index any additional entity data fields in entity history
214
+ }
215
+ ->Field
216
+ ->Some
217
+ }
218
+
219
+ | DerivedFrom(_) => None
220
+ }
221
+ )
222
+
223
+ let actionFieldName = "action"
224
+
225
+ let actionField = mkField(actionFieldName, Custom(RowAction.enum.name))
226
+
227
+ let serialField = mkField("serial", Serial, ~isNullable=true, ~isIndex=true)
228
+
229
+ let dataFieldNames = dataFields->Belt.Array.map(field => field->getFieldName)
230
+
231
+ let originTableName = table.tableName
232
+ let historyTableName = originTableName ++ "_history"
233
+ //ignore composite indices
234
+ let table = mkTable(
235
+ historyTableName,
236
+ ~fields=Belt.Array.concatMany([
237
+ currentHistoryFields,
238
+ previousHistoryFields,
239
+ dataFields,
240
+ [actionField, serialField],
241
+ ]),
242
+ )
243
+
244
+ let insertFnName = `"insert_${table.tableName}"`
245
+ let historyRowArg = "history_row"
246
+ let historyTablePath = `"public"."${historyTableName}"`
247
+ let originTablePath = `"public"."${originTableName}"`
248
+
249
+ let previousHistoryFieldsAreNullStr =
250
+ previousChangeFieldNames
251
+ ->Belt.Array.map(fieldName => `${historyRowArg}.${fieldName} IS NULL`)
252
+ ->Js.Array2.joinWith(" OR ")
253
+
254
+ let currentChangeFieldNamesCommaSeparated = currentChangeFieldNames->Js.Array2.joinWith(", ")
255
+
256
+ let dataFieldNamesDoubleQuoted = dataFieldNames->Belt.Array.map(fieldName => `"${fieldName}"`)
257
+ let dataFieldNamesCommaSeparated = dataFieldNamesDoubleQuoted->Js.Array2.joinWith(", ")
258
+
259
+ let allFieldNamesDoubleQuoted =
260
+ Belt.Array.concatMany([
261
+ currentChangeFieldNames,
262
+ previousChangeFieldNames,
263
+ dataFieldNames,
264
+ [actionFieldName],
265
+ ])->Belt.Array.map(fieldName => `"${fieldName}"`)
266
+
267
+ let createInsertFnQuery = {
268
+ `CREATE OR REPLACE FUNCTION ${insertFnName}(${historyRowArg} ${historyTablePath}, should_copy_current_entity BOOLEAN)
269
+ RETURNS void AS $$
270
+ DECLARE
271
+ v_previous_record RECORD;
272
+ v_origin_record RECORD;
273
+ BEGIN
274
+ -- Check if previous values are not provided
275
+ IF ${previousHistoryFieldsAreNullStr} THEN
276
+ -- Find the most recent record for the same id
277
+ SELECT ${currentChangeFieldNamesCommaSeparated} INTO v_previous_record
278
+ FROM ${historyTablePath}
279
+ WHERE ${id} = ${historyRowArg}.${id}
280
+ ORDER BY ${currentChangeFieldNames
281
+ ->Belt.Array.map(fieldName => fieldName ++ " DESC")
282
+ ->Js.Array2.joinWith(", ")}
283
+ LIMIT 1;
284
+
285
+ -- If a previous record exists, use its values
286
+ IF FOUND THEN
287
+ ${Belt.Array.zip(currentChangeFieldNames, previousChangeFieldNames)
288
+ ->Belt.Array.map(((currentFieldName, previousFieldName)) => {
289
+ `${historyRowArg}.${previousFieldName} := v_previous_record.${currentFieldName};`
290
+ })
291
+ ->Js.Array2.joinWith(" ")}
292
+ ElSIF should_copy_current_entity THEN
293
+ -- Check if a value for the id exists in the origin table and if so, insert a history row for it.
294
+ SELECT ${dataFieldNamesCommaSeparated} FROM ${originTablePath} WHERE id = ${historyRowArg}.${id} INTO v_origin_record;
295
+ IF FOUND THEN
296
+ INSERT INTO ${historyTablePath} (${currentChangeFieldNamesCommaSeparated}, ${dataFieldNamesCommaSeparated}, "${actionFieldName}")
297
+ -- SET the current change data fields to 0 since we don't know what they were
298
+ -- and it doesn't matter provided they are less than any new values
299
+ VALUES (${currentChangeFieldNames
300
+ ->Belt.Array.map(_ => "0")
301
+ ->Js.Array2.joinWith(", ")}, ${dataFieldNames
302
+ ->Belt.Array.map(fieldName => `v_origin_record."${fieldName}"`)
303
+ ->Js.Array2.joinWith(", ")}, 'SET');
304
+
305
+ ${previousChangeFieldNames
306
+ ->Belt.Array.map(previousFieldName => {
307
+ `${historyRowArg}.${previousFieldName} := 0;`
308
+ })
309
+ ->Js.Array2.joinWith(" ")}
310
+ END IF;
311
+ END IF;
312
+ END IF;
313
+
314
+ INSERT INTO ${historyTablePath} (${allFieldNamesDoubleQuoted->Js.Array2.joinWith(", ")})
315
+ VALUES (${allFieldNamesDoubleQuoted
316
+ ->Belt.Array.map(fieldName => `${historyRowArg}.${fieldName}`)
317
+ ->Js.Array2.joinWith(", ")});
318
+ END;
319
+ $$ LANGUAGE plpgsql;
320
+ `
321
+ }
322
+
323
+ let insertFnString = `(sql, rowArgs, shouldCopyCurrentEntity) =>
324
+ sql\`select ${insertFnName}(ROW(${allFieldNamesDoubleQuoted
325
+ ->Belt.Array.map(fieldNameDoubleQuoted => `\${rowArgs[${fieldNameDoubleQuoted}]\}`)
326
+ ->Js.Array2.joinWith(", ")}, NULL), --NULL argument for SERIAL field
327
+ \${shouldCopyCurrentEntity});\``
328
+
329
+ let insertFn: (Postgres.sql, Js.Json.t, ~shouldCopyCurrentEntity: bool) => promise<unit> =
330
+ insertFnString->Table.PostgresInterop.eval
331
+
332
+ let schema = makeHistoryRowSchema(schema)
333
+
334
+ {table, createInsertFnQuery, schema, schemaRows: S.array(schema), insertFn}
335
+ }
@@ -0,0 +1,18 @@
1
+ open Belt
2
+ type t = dict<Table.table>
3
+
4
+ let make = (tables: array<Table.table>) => {
5
+ tables->Array.map(table => (table.tableName, table))->Js.Dict.fromArray
6
+ }
7
+
8
+ exception UndefinedEntity(Table.derivedFromField)
9
+ exception UndefinedFieldInEntity(Table.derivedFromField)
10
+ let getDerivedFromFieldName = (schema: t, derivedFromField: Table.derivedFromField) =>
11
+ switch schema->Utils.Dict.dangerouslyGetNonOption(derivedFromField.derivedFromEntity) {
12
+ | Some(entity) =>
13
+ switch entity->Table.getFieldByName(derivedFromField.derivedFromField) {
14
+ | Some(field) => field->Table.getFieldName->Ok
15
+ | None => Error(UndefinedFieldInEntity(derivedFromField)) //Unexpected, schema should be parsed on codegen
16
+ }
17
+ | None => Error(UndefinedEntity(derivedFromField)) //Unexpected, schema should be parsed on codegen
18
+ }
@@ -0,0 +1,251 @@
1
+ open Belt
2
+
3
+ type primitive
4
+ type derived
5
+ @unboxed
6
+ type fieldType =
7
+ | @as("INTEGER") Integer
8
+ | @as("BOOLEAN") Boolean
9
+ | @as("NUMERIC") Numeric
10
+ | @as("DOUBLE PRECISION") DoublePrecision
11
+ | @as("TEXT") Text
12
+ | @as("SERIAL") Serial
13
+ | @as("JSONB") JsonB
14
+ | @as("TIMESTAMP WITH TIME ZONE") Timestamp
15
+ | @as("TIMESTAMP") TimestampWithoutTimezone
16
+ | @as("TIMESTAMP WITH TIME ZONE NULL") TimestampWithNullTimezone
17
+ | Custom(string)
18
+
19
+ type field = {
20
+ fieldName: string,
21
+ fieldType: fieldType,
22
+ isArray: bool,
23
+ isNullable: bool,
24
+ isPrimaryKey: bool,
25
+ isIndex: bool,
26
+ linkedEntity: option<string>,
27
+ defaultValue: option<string>,
28
+ }
29
+
30
+ type derivedFromField = {
31
+ fieldName: string,
32
+ derivedFromEntity: string,
33
+ derivedFromField: string,
34
+ }
35
+
36
+ type fieldOrDerived = Field(field) | DerivedFrom(derivedFromField)
37
+
38
+ let mkField = (
39
+ ~default=?,
40
+ ~isArray=false,
41
+ ~isNullable=false,
42
+ ~isPrimaryKey=false,
43
+ ~isIndex=false,
44
+ ~linkedEntity=?,
45
+ fieldName,
46
+ fieldType,
47
+ ) =>
48
+ {
49
+ fieldName,
50
+ fieldType,
51
+ isArray,
52
+ isNullable,
53
+ isPrimaryKey,
54
+ isIndex,
55
+ linkedEntity,
56
+ defaultValue: default,
57
+ }->Field
58
+
59
+ let mkDerivedFromField = (fieldName, ~derivedFromEntity, ~derivedFromField) =>
60
+ {
61
+ fieldName,
62
+ derivedFromField,
63
+ derivedFromEntity,
64
+ }->DerivedFrom
65
+
66
+ let getUserDefinedFieldName = fieldOrDerived =>
67
+ switch fieldOrDerived {
68
+ | Field({fieldName})
69
+ | DerivedFrom({fieldName}) => fieldName
70
+ }
71
+
72
+ let isLinkedEntityField = field => field.linkedEntity->Option.isSome
73
+
74
+ let getDbFieldName = field =>
75
+ field->isLinkedEntityField ? field.fieldName ++ "_id" : field.fieldName
76
+
77
+ let getFieldName = fieldOrDerived =>
78
+ switch fieldOrDerived {
79
+ | Field(field) => field->getDbFieldName
80
+ | DerivedFrom({fieldName}) => fieldName
81
+ }
82
+
83
+ type table = {
84
+ tableName: string,
85
+ fields: array<fieldOrDerived>,
86
+ compositeIndices: array<array<string>>,
87
+ }
88
+
89
+ let mkTable: 'b. (
90
+ ~compositeIndices: array<array<string>>=?,
91
+ ~fields: array<fieldOrDerived>,
92
+ string,
93
+ ) => 'c = (~compositeIndices=[], ~fields, tableName) => {
94
+ tableName,
95
+ fields,
96
+ compositeIndices,
97
+ }
98
+
99
+ let getPrimaryKeyFieldNames = table =>
100
+ table.fields->Array.keepMap(field =>
101
+ switch field {
102
+ | Field({isPrimaryKey: true, fieldName}) => Some(fieldName)
103
+ | _ => None
104
+ }
105
+ )
106
+
107
+ let getFields = table =>
108
+ table.fields->Array.keepMap(field =>
109
+ switch field {
110
+ | Field(field) => Some(field)
111
+ | DerivedFrom(_) => None
112
+ }
113
+ )
114
+
115
+ let getNonDefaultFields = table =>
116
+ table.fields->Array.keepMap(field =>
117
+ switch field {
118
+ | Field(field) if field.defaultValue->Option.isNone => Some(field)
119
+ | _ => None
120
+ }
121
+ )
122
+
123
+ let getLinkedEntityFields = table =>
124
+ table.fields->Array.keepMap(field =>
125
+ switch field {
126
+ | Field({linkedEntity: Some(linkedEntityName)} as field) => Some((field, linkedEntityName))
127
+ | Field({linkedEntity: None})
128
+ | DerivedFrom(_) =>
129
+ None
130
+ }
131
+ )
132
+
133
+ let getDerivedFromFields = table =>
134
+ table.fields->Array.keepMap(field =>
135
+ switch field {
136
+ | DerivedFrom(field) => Some(field)
137
+ | Field(_) => None
138
+ }
139
+ )
140
+
141
+ let getFieldNames = table => {
142
+ table->getFields->Array.map(getDbFieldName)
143
+ }
144
+
145
+ let getNonDefaultFieldNames = table => {
146
+ table->getNonDefaultFields->Array.map(getDbFieldName)
147
+ }
148
+
149
+ let getFieldByName = (table, fieldNameSearch) =>
150
+ table.fields->Js.Array2.find(field => field->getUserDefinedFieldName == fieldNameSearch)
151
+
152
+ exception NonExistingTableField(string)
153
+
154
+ /*
155
+ Gets all composite indicies (whether they are single indices or not)
156
+ And maps the fields defined to their actual db name (some have _id suffix)
157
+ */
158
+ let getUnfilteredCompositeIndicesUnsafe = (table): array<array<string>> => {
159
+ table.compositeIndices->Array.map(compositeIndex =>
160
+ compositeIndex->Array.map(userDefinedFieldName =>
161
+ switch table->getFieldByName(userDefinedFieldName) {
162
+ | Some(field) => field->getFieldName
163
+ | None => raise(NonExistingTableField(userDefinedFieldName)) //Unexpected should be validated in schema parser
164
+ }
165
+ )
166
+ )
167
+ }
168
+
169
+ /*
170
+ Gets all single indicies
171
+ And maps the fields defined to their actual db name (some have _id suffix)
172
+ */
173
+ let getSingleIndices = (table): array<string> => {
174
+ let indexFields = table.fields->Array.keepMap(field =>
175
+ switch field {
176
+ | Field(field) if field.isIndex => Some(field->getDbFieldName)
177
+ | _ => None
178
+ }
179
+ )
180
+
181
+ table
182
+ ->getUnfilteredCompositeIndicesUnsafe
183
+ //get all composite indices with only 1 field defined
184
+ //this is still a single index
185
+ ->Array.keep(cidx => cidx->Array.length == 1)
186
+ ->Array.concat([indexFields])
187
+ ->Array.concatMany
188
+ ->Set.String.fromArray
189
+ ->Set.String.toArray
190
+ ->Js.Array2.sortInPlace
191
+ }
192
+
193
+ /*
194
+ Gets all composite indicies
195
+ And maps the fields defined to their actual db name (some have _id suffix)
196
+ */
197
+ let getCompositeIndices = (table): array<array<string>> => {
198
+ table
199
+ ->getUnfilteredCompositeIndicesUnsafe
200
+ ->Array.keep(ind => ind->Array.length > 1)
201
+ }
202
+
203
+ module PostgresInterop = {
204
+ type pgFn<'payload, 'return> = (Postgres.sql, 'payload) => promise<'return>
205
+ type batchSetFn<'a> = (Postgres.sql, array<'a>) => promise<unit>
206
+ external eval: string => 'a = "eval"
207
+
208
+ let makeBatchSetFnString = (table: table) => {
209
+ let fieldNamesInQuotes =
210
+ table->getNonDefaultFieldNames->Array.map(fieldName => `"${fieldName}"`)
211
+ `(sql, rows) => {
212
+ return sql\`
213
+ INSERT INTO "public"."${table.tableName}"
214
+ \${sql(rows, ${fieldNamesInQuotes->Js.Array2.joinWith(", ")})}
215
+ ON CONFLICT(${table->getPrimaryKeyFieldNames->Js.Array2.joinWith(", ")}) DO UPDATE
216
+ SET
217
+ ${fieldNamesInQuotes
218
+ ->Array.map(fieldNameInQuotes => `${fieldNameInQuotes} = EXCLUDED.${fieldNameInQuotes}`)
219
+ ->Js.Array2.joinWith(", ")};\`
220
+ }`
221
+ }
222
+
223
+ let chunkBatchQuery = (
224
+ sql,
225
+ entityDataArray: array<'entity>,
226
+ queryToExecute: pgFn<array<'entity>, 'return>,
227
+ ~maxItemsPerQuery=500,
228
+ ): promise<array<'return>> => {
229
+ let responses = []
230
+ let i = ref(0)
231
+ let shouldContinue = () => i.contents < entityDataArray->Array.length
232
+ // Split entityDataArray into chunks of maxItemsPerQuery
233
+ while shouldContinue() {
234
+ let chunk =
235
+ entityDataArray->Js.Array2.slice(~start=i.contents, ~end_=i.contents + maxItemsPerQuery)
236
+ let response = queryToExecute(sql, chunk)
237
+ responses->Js.Array2.push(response)->ignore
238
+ i := i.contents + maxItemsPerQuery
239
+ }
240
+ Promise.all(responses)
241
+ }
242
+
243
+ let makeBatchSetFn = (~table, ~rowsSchema: S.t<array<'a>>): batchSetFn<'a> => {
244
+ let batchSetFn: pgFn<array<Js.Json.t>, unit> = table->makeBatchSetFnString->eval
245
+ async (sql, rows) => {
246
+ let rowsJson =
247
+ rows->S.serializeOrRaiseWith(rowsSchema)->(Utils.magic: Js.Json.t => array<Js.Json.t>)
248
+ let _res = await chunkBatchQuery(sql, rowsJson, batchSetFn)
249
+ }
250
+ }
251
+ }