@event-driven-io/pongo 0.17.0-beta.1 → 0.17.0-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. package/dist/{chunk-YLV7YIPZ.cjs → chunk-4BL6YWLW.cjs} +3 -7
  2. package/dist/chunk-4BL6YWLW.cjs.map +1 -0
  3. package/dist/chunk-ECQ2CKZE.cjs +330 -0
  4. package/dist/chunk-ECQ2CKZE.cjs.map +1 -0
  5. package/dist/{chunk-5LN762VW.js → chunk-EYQDS752.js} +5 -3
  6. package/dist/chunk-EYQDS752.js.map +1 -0
  7. package/dist/{chunk-DL4E3N6J.js → chunk-NCNRRYVE.js} +3 -7
  8. package/dist/chunk-NCNRRYVE.js.map +1 -0
  9. package/dist/chunk-WH26IXHN.js +10 -0
  10. package/dist/{chunk-7W6X4QGY.cjs → chunk-WKW4LGF6.cjs} +4 -4
  11. package/dist/{chunk-7W6X4QGY.cjs.map → chunk-WKW4LGF6.cjs.map} +1 -1
  12. package/dist/chunk-Y7LRKJLJ.js +330 -0
  13. package/dist/chunk-Y7LRKJLJ.js.map +1 -0
  14. package/dist/{chunk-3KNMMQUV.cjs → chunk-ZPWKWNK2.cjs} +20 -18
  15. package/dist/chunk-ZPWKWNK2.cjs.map +1 -0
  16. package/dist/cli.cjs +38 -21
  17. package/dist/cli.cjs.map +1 -1
  18. package/dist/cli.js +29 -12
  19. package/dist/cli.js.map +1 -1
  20. package/dist/d1.cjs +54 -0
  21. package/dist/d1.cjs.map +1 -0
  22. package/dist/d1.d.cts +11 -0
  23. package/dist/d1.d.ts +11 -0
  24. package/dist/d1.js +54 -0
  25. package/dist/d1.js.map +1 -0
  26. package/dist/index-BJopB-em.d.cts +7 -0
  27. package/dist/index-G5DECNb_.d.ts +7 -0
  28. package/dist/index.cjs +3 -3
  29. package/dist/index.d.cts +4 -4
  30. package/dist/index.d.ts +4 -4
  31. package/dist/index.js +2 -2
  32. package/dist/{pg-WUYRNGST.js → pg-73DOKU64.js} +3 -3
  33. package/dist/pg-I267A7IL.cjs +11 -0
  34. package/dist/{pg-XCWP4FAM.cjs.map → pg-I267A7IL.cjs.map} +1 -1
  35. package/dist/pg.cjs +4 -4
  36. package/dist/pg.d.cts +5 -2
  37. package/dist/pg.d.ts +5 -2
  38. package/dist/pg.js +3 -3
  39. package/dist/{pongoCollectionSchemaComponent-BsHlVyN-.d.cts → pongoCollectionSchemaComponent-t_e9n2Wc.d.cts} +12 -8
  40. package/dist/{pongoCollectionSchemaComponent-BsHlVyN-.d.ts → pongoCollectionSchemaComponent-t_e9n2Wc.d.ts} +12 -8
  41. package/dist/shim.cjs +16 -11
  42. package/dist/shim.cjs.map +1 -1
  43. package/dist/shim.d.cts +4 -3
  44. package/dist/shim.d.ts +4 -3
  45. package/dist/shim.js +12 -7
  46. package/dist/shim.js.map +1 -1
  47. package/dist/sqlite3.cjs +16 -332
  48. package/dist/sqlite3.cjs.map +1 -1
  49. package/dist/sqlite3.d.cts +8 -7
  50. package/dist/sqlite3.d.ts +8 -7
  51. package/dist/sqlite3.js +9 -325
  52. package/dist/sqlite3.js.map +1 -1
  53. package/package.json +16 -2
  54. package/dist/chunk-3KNMMQUV.cjs.map +0 -1
  55. package/dist/chunk-5LN762VW.js.map +0 -1
  56. package/dist/chunk-DL4E3N6J.js.map +0 -1
  57. package/dist/chunk-IBJKZ6TS.js +0 -10
  58. package/dist/chunk-YLV7YIPZ.cjs.map +0 -1
  59. package/dist/pg-XCWP4FAM.cjs +0 -11
  60. /package/dist/{chunk-IBJKZ6TS.js.map → chunk-WH26IXHN.js.map} +0 -0
  61. /package/dist/{pg-WUYRNGST.js.map → pg-73DOKU64.js.map} +0 -0
@@ -0,0 +1,330 @@
1
+ import {
2
+ OperatorMap,
3
+ QueryOperators,
4
+ expectedVersionValue,
5
+ hasOperators,
6
+ objectEntries
7
+ } from "./chunk-NCNRRYVE.js";
8
+
9
+ // src/storage/sqlite/core/sqlBuilder/index.ts
10
+ import {
11
+ isSQL,
12
+ JSONSerializer as JSONSerializer3,
13
+ SQL as SQL4,
14
+ sqlMigration
15
+ } from "@event-driven-io/dumbo";
16
+
17
+ // src/storage/sqlite/core/sqlBuilder/filter/index.ts
18
+ import { SQL as SQL2 } from "@event-driven-io/dumbo";
19
+
20
+ // src/storage/sqlite/core/sqlBuilder/filter/queryOperators.ts
21
+ import { JSONSerializer, SQL } from "@event-driven-io/dumbo";
22
+ var handleOperator = (path, operator, value) => {
23
+ if (path === "_id" || path === "_version") {
24
+ return handleMetadataOperator(path, operator, value);
25
+ }
26
+ switch (operator) {
27
+ case "$eq": {
28
+ const jsonPath = buildJsonPath(path);
29
+ return SQL`(
30
+ json_extract(data, '${SQL.plain(jsonPath)}') = ${value}
31
+ OR (
32
+ json_type(data, '${SQL.plain(jsonPath)}') = 'array'
33
+ AND EXISTS(
34
+ SELECT 1 FROM json_each(data, '${SQL.plain(jsonPath)}')
35
+ WHERE json_each.value = ${value}
36
+ )
37
+ )
38
+ )`;
39
+ }
40
+ case "$gt":
41
+ case "$gte":
42
+ case "$lt":
43
+ case "$lte":
44
+ case "$ne": {
45
+ const jsonPath = buildJsonPath(path);
46
+ return SQL`json_extract(data, '${SQL.plain(jsonPath)}') ${SQL.plain(OperatorMap[operator])} ${value}`;
47
+ }
48
+ case "$in": {
49
+ const jsonPath = buildJsonPath(path);
50
+ const values = value;
51
+ const inClause = SQL.merge(
52
+ values.map((v) => SQL`${v}`),
53
+ ", "
54
+ );
55
+ return SQL`json_extract(data, '${SQL.plain(jsonPath)}') IN (${inClause})`;
56
+ }
57
+ case "$nin": {
58
+ const jsonPath = buildJsonPath(path);
59
+ const values = value;
60
+ const inClause = SQL.merge(
61
+ values.map((v) => SQL`${v}`),
62
+ ", "
63
+ );
64
+ return SQL`json_extract(data, '${SQL.plain(jsonPath)}') NOT IN (${inClause})`;
65
+ }
66
+ case "$elemMatch": {
67
+ const subConditions = objectEntries(value).map(([subKey, subValue]) => {
68
+ const serializedValue = JSONSerializer.serialize(subValue);
69
+ return `json_extract(value, '$.${subKey}') = json('${serializedValue}')`;
70
+ }).join(" AND ");
71
+ const jsonPath = buildJsonPath(path);
72
+ return SQL`EXISTS(SELECT 1 FROM json_each(data, '${SQL.plain(jsonPath)}') WHERE ${SQL.plain(subConditions)})`;
73
+ }
74
+ case "$all": {
75
+ const jsonPath = buildJsonPath(path);
76
+ const serializedValue = JSONSerializer.serialize(value);
77
+ return SQL`(SELECT COUNT(*) FROM json_each(json(${serializedValue})) WHERE json_each.value NOT IN (SELECT value FROM json_each(data, '${SQL.plain(jsonPath)}'))) = 0`;
78
+ }
79
+ case "$size": {
80
+ const jsonPath = buildJsonPath(path);
81
+ return SQL`json_array_length(json_extract(data, '${SQL.plain(jsonPath)}')) = ${value}`;
82
+ }
83
+ default:
84
+ throw new Error(`Unsupported operator: ${operator}`);
85
+ }
86
+ };
87
+ var handleMetadataOperator = (fieldName, operator, value) => {
88
+ switch (operator) {
89
+ case "$eq":
90
+ return SQL`${SQL.plain(fieldName)} = ${value}`;
91
+ case "$gt":
92
+ case "$gte":
93
+ case "$lt":
94
+ case "$lte":
95
+ case "$ne":
96
+ return SQL`${SQL.plain(fieldName)} ${SQL.plain(OperatorMap[operator])} ${value}`;
97
+ case "$in": {
98
+ const values = value;
99
+ const inClause = SQL.merge(
100
+ values.map((v) => SQL`${v}`),
101
+ ", "
102
+ );
103
+ return SQL`${SQL.plain(fieldName)} IN (${inClause})`;
104
+ }
105
+ case "$nin": {
106
+ const values = value;
107
+ const inClause = SQL.merge(
108
+ values.map((v) => SQL`${v}`),
109
+ ", "
110
+ );
111
+ return SQL`${SQL.plain(fieldName)} NOT IN (${inClause})`;
112
+ }
113
+ default:
114
+ throw new Error(`Unsupported operator: ${operator}`);
115
+ }
116
+ };
117
+ var buildJsonPath = (path) => {
118
+ return `$.${path}`;
119
+ };
120
+
121
+ // src/storage/sqlite/core/sqlBuilder/filter/index.ts
122
+ var AND = "AND";
123
+ var constructFilterQuery = (filter) => SQL2.merge(
124
+ Object.entries(filter).map(
125
+ ([key, value]) => isRecord(value) ? constructComplexFilterQuery(key, value) : handleOperator(key, "$eq", value)
126
+ ),
127
+ ` ${AND} `
128
+ );
129
+ var constructComplexFilterQuery = (key, value) => {
130
+ const isEquality = !hasOperators(value);
131
+ return SQL2.merge(
132
+ objectEntries(value).map(
133
+ ([nestedKey, val]) => isEquality ? handleOperator(`${key}.${nestedKey}`, QueryOperators.$eq, val) : handleOperator(key, nestedKey, val)
134
+ ),
135
+ ` ${AND} `
136
+ );
137
+ };
138
+ var isRecord = (value) => value !== null && typeof value === "object" && !Array.isArray(value);
139
+
140
+ // src/storage/sqlite/core/sqlBuilder/update/index.ts
141
+ import { JSONSerializer as JSONSerializer2, SQL as SQL3 } from "@event-driven-io/dumbo";
142
+ var buildUpdateQuery = (update) => objectEntries(update).reduce(
143
+ (currentUpdateQuery, [op, value]) => {
144
+ switch (op) {
145
+ case "$set":
146
+ return buildSetQuery(value, currentUpdateQuery);
147
+ case "$unset":
148
+ return buildUnsetQuery(value, currentUpdateQuery);
149
+ case "$inc":
150
+ return buildIncQuery(value, currentUpdateQuery);
151
+ case "$push":
152
+ return buildPushQuery(value, currentUpdateQuery);
153
+ default:
154
+ return currentUpdateQuery;
155
+ }
156
+ },
157
+ SQL3`data`
158
+ );
159
+ var buildSetQuery = (set, currentUpdateQuery) => SQL3`json_patch(${currentUpdateQuery}, ${JSONSerializer2.serialize(set)})`;
160
+ var buildUnsetQuery = (unset, currentUpdateQuery) => {
161
+ const keys = Object.keys(unset);
162
+ let query = currentUpdateQuery;
163
+ for (const key of keys) {
164
+ query = SQL3`json_remove(${query}, '$.${SQL3.plain(key)}')`;
165
+ }
166
+ return query;
167
+ };
168
+ var buildIncQuery = (inc, currentUpdateQuery) => {
169
+ for (const [key, value] of Object.entries(inc)) {
170
+ currentUpdateQuery = typeof value === "bigint" ? SQL3`json_set(${currentUpdateQuery}, '$.${SQL3.plain(key)}', CAST((COALESCE(json_extract(${currentUpdateQuery}, '$.${SQL3.plain(key)}'), 0) + ${value}) AS TEXT))` : SQL3`json_set(${currentUpdateQuery}, '$.${SQL3.plain(key)}', COALESCE(json_extract(${currentUpdateQuery}, '$.${SQL3.plain(key)}'), 0) + ${value})`;
171
+ }
172
+ return currentUpdateQuery;
173
+ };
174
+ var buildPushQuery = (push, currentUpdateQuery) => {
175
+ for (const [key, value] of Object.entries(push)) {
176
+ const serializedValue = JSONSerializer2.serialize(value);
177
+ currentUpdateQuery = SQL3`json_set(${currentUpdateQuery}, '$.${SQL3.plain(key)}', CASE
178
+ WHEN json_type(json_extract(${currentUpdateQuery}, '$.${SQL3.plain(key)}')) = 'array'
179
+ THEN json_insert(json_extract(${currentUpdateQuery}, '$.${SQL3.plain(key)}'), '$[#]', json(${serializedValue}))
180
+ ELSE json_array(json(${serializedValue}))
181
+ END)`;
182
+ }
183
+ return currentUpdateQuery;
184
+ };
185
+
186
+ // src/storage/sqlite/core/sqlBuilder/index.ts
187
+ var createCollection = (collectionName) => SQL4`
188
+ CREATE TABLE IF NOT EXISTS ${SQL4.identifier(collectionName)} (
189
+ _id TEXT PRIMARY KEY,
190
+ data JSON NOT NULL,
191
+ metadata JSON NOT NULL DEFAULT '{}',
192
+ _version INTEGER NOT NULL DEFAULT 1,
193
+ _partition TEXT NOT NULL DEFAULT 'png_global',
194
+ _archived INTEGER NOT NULL DEFAULT 0,
195
+ _created TEXT NOT NULL DEFAULT (datetime('now')),
196
+ _updated TEXT NOT NULL DEFAULT (datetime('now'))
197
+ )`;
198
+ var pongoCollectionSQLiteMigrations = (collectionName) => [
199
+ sqlMigration(`pongoCollection:${collectionName}:001:createtable`, [
200
+ createCollection(collectionName)
201
+ ])
202
+ ];
203
+ var sqliteSQLBuilder = (collectionName) => ({
204
+ createCollection: () => createCollection(collectionName),
205
+ insertOne: (document) => {
206
+ const serialized = document;
207
+ const id = document._id;
208
+ const version = document._version ?? 1n;
209
+ return SQL4`
210
+ INSERT OR IGNORE INTO ${SQL4.identifier(collectionName)} (_id, data, _version)
211
+ VALUES (${id}, ${serialized}, ${version})
212
+ RETURNING _id;`;
213
+ },
214
+ insertMany: (documents) => {
215
+ const values = SQL4.merge(
216
+ documents.map(
217
+ (doc) => SQL4`(${doc._id}, ${JSONSerializer3.serialize(doc)}, ${doc._version ?? 1n})`
218
+ ),
219
+ ","
220
+ );
221
+ return SQL4`
222
+ INSERT OR IGNORE INTO ${SQL4.identifier(collectionName)} (_id, data, _version) VALUES ${values}
223
+ RETURNING _id;`;
224
+ },
225
+ updateOne: (filter, update, options) => {
226
+ const expectedVersion = expectedVersionValue(options?.expectedVersion);
227
+ const expectedVersionCheck = expectedVersion != null ? SQL4`AND _version = ${expectedVersion}` : SQL4``;
228
+ const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);
229
+ const updateQuery = isSQL(update) ? update : buildUpdateQuery(update);
230
+ return SQL4`
231
+ UPDATE ${SQL4.identifier(collectionName)}
232
+ SET
233
+ data = json_patch(${updateQuery}, json_object('_id', _id, '_version', cast(_version + 1 as TEXT))),
234
+ _version = _version + 1,
235
+ _updated = datetime('now')
236
+ WHERE _id = (
237
+ SELECT _id FROM ${SQL4.identifier(collectionName)}
238
+ ${where(filterQuery)}
239
+ LIMIT 1
240
+ ) ${expectedVersionCheck}
241
+ RETURNING
242
+ _id,
243
+ cast(_version as TEXT) as version,
244
+ 1 as matched,
245
+ 1 as modified;`;
246
+ },
247
+ replaceOne: (filter, document, options) => {
248
+ const expectedVersion = expectedVersionValue(options?.expectedVersion);
249
+ const expectedVersionCheck = expectedVersion != null ? SQL4`AND _version = ${expectedVersion}` : SQL4``;
250
+ const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);
251
+ return SQL4`
252
+ UPDATE ${SQL4.identifier(collectionName)}
253
+ SET
254
+ data = json_patch(${JSONSerializer3.serialize(document)}, json_object('_id', _id, '_version', cast(_version + 1 as TEXT))),
255
+ _version = _version + 1,
256
+ _updated = datetime('now')
257
+ WHERE _id = (
258
+ SELECT _id FROM ${SQL4.identifier(collectionName)}
259
+ ${where(filterQuery)}
260
+ LIMIT 1
261
+ ) ${expectedVersionCheck}
262
+ RETURNING
263
+ _id,
264
+ cast(_version as TEXT) AS version,
265
+ 1 AS matched,
266
+ 1 AS modified;`;
267
+ },
268
+ updateMany: (filter, update) => {
269
+ const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);
270
+ const updateQuery = isSQL(update) ? update : buildUpdateQuery(update);
271
+ return SQL4`
272
+ UPDATE ${SQL4.identifier(collectionName)}
273
+ SET
274
+ data = json_patch(${updateQuery}, json_object('_version', cast(_version + 1 as TEXT))),
275
+ _version = _version + 1,
276
+ _updated = datetime('now')
277
+ ${where(filterQuery)}
278
+ RETURNING _id;`;
279
+ },
280
+ deleteOne: (filter, options) => {
281
+ const expectedVersion = expectedVersionValue(options?.expectedVersion);
282
+ const expectedVersionCheck = expectedVersion != null ? SQL4`AND _version = ${expectedVersion}` : SQL4``;
283
+ const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);
284
+ return SQL4`
285
+ DELETE FROM ${SQL4.identifier(collectionName)}
286
+ WHERE _id = (
287
+ SELECT _id FROM ${SQL4.identifier(collectionName)}
288
+ ${where(filterQuery)}
289
+ LIMIT 1
290
+ ) ${expectedVersionCheck}
291
+ RETURNING
292
+ _id,
293
+ 1 AS matched,
294
+ 1 AS deleted;`;
295
+ },
296
+ deleteMany: (filter) => {
297
+ const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);
298
+ return SQL4`DELETE FROM ${SQL4.identifier(collectionName)} ${where(filterQuery)} RETURNING _id`;
299
+ },
300
+ findOne: (filter) => {
301
+ const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);
302
+ return SQL4`SELECT data FROM ${SQL4.identifier(collectionName)} ${where(filterQuery)} LIMIT 1;`;
303
+ },
304
+ find: (filter, options) => {
305
+ const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);
306
+ const query = [];
307
+ query.push(SQL4`SELECT data FROM ${SQL4.identifier(collectionName)}`);
308
+ query.push(where(filterQuery));
309
+ if (options?.limit) {
310
+ query.push(SQL4`LIMIT ${options.limit}`);
311
+ }
312
+ if (options?.skip) {
313
+ query.push(SQL4`OFFSET ${options.skip}`);
314
+ }
315
+ return SQL4.merge([...query, SQL4`;`]);
316
+ },
317
+ countDocuments: (filter) => {
318
+ const filterQuery = SQL4.check.isSQL(filter) ? filter : constructFilterQuery(filter);
319
+ return SQL4`SELECT COUNT(1) as count FROM ${SQL4.identifier(collectionName)} ${where(filterQuery)};`;
320
+ },
321
+ rename: (newName) => SQL4`ALTER TABLE ${SQL4.identifier(collectionName)} RENAME TO ${SQL4.identifier(newName)};`,
322
+ drop: (targetName = collectionName) => SQL4`DROP TABLE IF EXISTS ${SQL4.identifier(targetName)}`
323
+ });
324
+ var where = (filterQuery) => SQL4.check.isEmpty(filterQuery) ? SQL4.EMPTY : SQL4.merge([SQL4`WHERE `, filterQuery]);
325
+
326
+ export {
327
+ pongoCollectionSQLiteMigrations,
328
+ sqliteSQLBuilder
329
+ };
330
+ //# sourceMappingURL=chunk-Y7LRKJLJ.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/storage/sqlite/core/sqlBuilder/index.ts","../src/storage/sqlite/core/sqlBuilder/filter/index.ts","../src/storage/sqlite/core/sqlBuilder/filter/queryOperators.ts","../src/storage/sqlite/core/sqlBuilder/update/index.ts"],"sourcesContent":["import {\n isSQL,\n JSONSerializer,\n SQL,\n sqlMigration,\n} from '@event-driven-io/dumbo';\nimport {\n expectedVersionValue,\n type DeleteOneOptions,\n type FindOptions,\n type OptionalUnlessRequiredIdAndVersion,\n type PongoCollectionSQLBuilder,\n type PongoFilter,\n type PongoUpdate,\n type ReplaceOneOptions,\n type UpdateOneOptions,\n type WithoutId,\n} from '../../../../core';\nimport { constructFilterQuery } from './filter';\nimport { buildUpdateQuery } from './update';\n\nconst createCollection = (collectionName: string): SQL =>\n SQL`\n CREATE TABLE IF NOT EXISTS ${SQL.identifier(collectionName)} (\n _id TEXT PRIMARY KEY,\n data JSON NOT NULL,\n metadata JSON NOT NULL DEFAULT '{}',\n _version INTEGER NOT NULL DEFAULT 1,\n _partition TEXT NOT NULL DEFAULT 'png_global',\n _archived INTEGER NOT NULL DEFAULT 0,\n _created TEXT NOT NULL DEFAULT (datetime('now')),\n _updated TEXT NOT NULL DEFAULT (datetime('now'))\n )`;\n\nexport const pongoCollectionSQLiteMigrations = (collectionName: string) => [\n sqlMigration(`pongoCollection:${collectionName}:001:createtable`, [\n createCollection(collectionName),\n ]),\n];\n\nexport const sqliteSQLBuilder = (\n collectionName: string,\n): PongoCollectionSQLBuilder => ({\n createCollection: (): SQL => createCollection(collectionName),\n insertOne: <T>(document: OptionalUnlessRequiredIdAndVersion<T>): SQL => {\n const serialized = document;\n const id = document._id;\n const version = document._version ?? 1n;\n\n return SQL`\n INSERT OR IGNORE INTO ${SQL.identifier(collectionName)} (_id, data, _version)\n VALUES (${id}, ${serialized}, ${version})\n RETURNING _id;`;\n },\n insertMany: <T>(documents: OptionalUnlessRequiredIdAndVersion<T>[]): SQL => {\n const values = SQL.merge(\n documents.map(\n (doc) =>\n SQL`(${doc._id}, ${JSONSerializer.serialize(doc)}, ${doc._version ?? 1n})`,\n ),\n ',',\n );\n\n return SQL`\n INSERT OR IGNORE INTO ${SQL.identifier(collectionName)} (_id, data, _version) VALUES ${values}\n RETURNING _id;`;\n },\n updateOne: <T>(\n filter: PongoFilter<T> | SQL,\n update: PongoUpdate<T> | SQL,\n options?: UpdateOneOptions,\n ): SQL => {\n const expectedVersion = expectedVersionValue(options?.expectedVersion);\n const expectedVersionCheck =\n expectedVersion != null ? SQL`AND _version = ${expectedVersion}` : SQL``;\n\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n const updateQuery = isSQL(update) ? update : buildUpdateQuery(update);\n\n return SQL`\n UPDATE ${SQL.identifier(collectionName)}\n SET\n data = json_patch(${updateQuery}, json_object('_id', _id, '_version', cast(_version + 1 as TEXT))),\n _version = _version + 1,\n _updated = datetime('now')\n WHERE _id = (\n SELECT _id FROM ${SQL.identifier(collectionName)}\n ${where(filterQuery)}\n LIMIT 1\n ) ${expectedVersionCheck}\n RETURNING\n _id,\n cast(_version as TEXT) as version,\n 1 as matched,\n 1 as modified;`;\n },\n replaceOne: <T>(\n filter: PongoFilter<T> | SQL,\n document: WithoutId<T>,\n options?: ReplaceOneOptions,\n ): SQL => {\n const expectedVersion = expectedVersionValue(options?.expectedVersion);\n const expectedVersionCheck =\n expectedVersion != null ? SQL`AND _version = ${expectedVersion}` : SQL``;\n\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n\n return SQL`\n UPDATE ${SQL.identifier(collectionName)}\n SET\n data = json_patch(${JSONSerializer.serialize(document)}, json_object('_id', _id, '_version', cast(_version + 1 as TEXT))),\n _version = _version + 1,\n _updated = datetime('now')\n WHERE _id = (\n SELECT _id FROM ${SQL.identifier(collectionName)}\n ${where(filterQuery)}\n LIMIT 1\n ) ${expectedVersionCheck}\n RETURNING\n _id,\n cast(_version as TEXT) AS version,\n 1 AS matched,\n 1 AS modified;`;\n },\n updateMany: <T>(\n filter: PongoFilter<T> | SQL,\n update: PongoUpdate<T> | SQL,\n ): SQL => {\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n const updateQuery = isSQL(update) ? update : buildUpdateQuery(update);\n\n return SQL`\n UPDATE ${SQL.identifier(collectionName)}\n SET\n data = json_patch(${updateQuery}, json_object('_version', cast(_version + 1 as TEXT))),\n _version = _version + 1,\n _updated = datetime('now')\n ${where(filterQuery)}\n RETURNING _id;`;\n },\n deleteOne: <T>(\n filter: PongoFilter<T> | SQL,\n options?: DeleteOneOptions,\n ): SQL => {\n const expectedVersion = expectedVersionValue(options?.expectedVersion);\n const expectedVersionCheck =\n expectedVersion != null ? SQL`AND _version = ${expectedVersion}` : SQL``;\n\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n\n return SQL`\n DELETE FROM ${SQL.identifier(collectionName)}\n WHERE _id = (\n SELECT _id FROM ${SQL.identifier(collectionName)}\n ${where(filterQuery)}\n LIMIT 1\n ) ${expectedVersionCheck}\n RETURNING\n _id,\n 1 AS matched,\n 1 AS deleted;`;\n },\n deleteMany: <T>(filter: PongoFilter<T> | SQL): SQL => {\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n\n return SQL`DELETE FROM ${SQL.identifier(collectionName)} ${where(filterQuery)} RETURNING _id`;\n },\n findOne: <T>(filter: PongoFilter<T> | SQL): SQL => {\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n\n return SQL`SELECT data FROM ${SQL.identifier(collectionName)} ${where(filterQuery)} LIMIT 1;`;\n },\n find: <T>(filter: PongoFilter<T> | SQL, options?: FindOptions): SQL => {\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n const query: SQL[] = [];\n\n query.push(SQL`SELECT data FROM ${SQL.identifier(collectionName)}`);\n\n query.push(where(filterQuery));\n\n if (options?.limit) {\n query.push(SQL`LIMIT ${options.limit}`);\n }\n\n if (options?.skip) {\n query.push(SQL`OFFSET ${options.skip}`);\n }\n\n return SQL.merge([...query, SQL`;`]);\n },\n countDocuments: <T>(filter: PongoFilter<T> | SQL): SQL => {\n const filterQuery = SQL.check.isSQL(filter)\n ? filter\n : constructFilterQuery(filter);\n return SQL`SELECT COUNT(1) as count FROM ${SQL.identifier(collectionName)} ${where(filterQuery)};`;\n },\n rename: (newName: string): SQL =>\n SQL`ALTER TABLE ${SQL.identifier(collectionName)} RENAME TO ${SQL.identifier(newName)};`,\n drop: (targetName: string = collectionName): SQL =>\n SQL`DROP TABLE IF EXISTS ${SQL.identifier(targetName)}`,\n});\n\nconst where = (filterQuery: SQL): SQL =>\n SQL.check.isEmpty(filterQuery)\n ? SQL.EMPTY\n : SQL.merge([SQL`WHERE `, filterQuery]);\n","import { SQL } from '@event-driven-io/dumbo';\nimport {\n hasOperators,\n objectEntries,\n QueryOperators,\n type PongoFilter,\n} from '../../../../../core';\nimport { handleOperator } from './queryOperators';\n\nexport * from './queryOperators';\n\nconst AND = 'AND';\n\nexport const constructFilterQuery = <T>(filter: PongoFilter<T>): SQL =>\n SQL.merge(\n Object.entries(filter).map(([key, value]) =>\n isRecord(value)\n ? constructComplexFilterQuery(key, value)\n : handleOperator(key, '$eq', value),\n ),\n ` ${AND} `,\n );\n\nconst constructComplexFilterQuery = (\n key: string,\n value: Record<string, unknown>,\n): SQL => {\n const isEquality = !hasOperators(value);\n\n return SQL.merge(\n objectEntries(value).map(([nestedKey, val]) =>\n isEquality\n ? handleOperator(`${key}.${nestedKey}`, QueryOperators.$eq, val)\n : handleOperator(key, nestedKey, val),\n ),\n ` ${AND} `,\n );\n};\n\nconst isRecord = (value: unknown): value is Record<string, unknown> =>\n value !== null && typeof value === 'object' && !Array.isArray(value);\n","import { JSONSerializer, SQL } from '@event-driven-io/dumbo';\nimport { objectEntries, OperatorMap } from '../../../../../core';\n\nexport const handleOperator = (\n path: string,\n operator: string,\n value: unknown,\n): SQL => {\n if (path === '_id' || path === '_version') {\n return handleMetadataOperator(path, operator, value);\n }\n\n switch (operator) {\n case '$eq': {\n const jsonPath = buildJsonPath(path);\n\n return SQL`(\n json_extract(data, '${SQL.plain(jsonPath)}') = ${value}\n OR (\n json_type(data, '${SQL.plain(jsonPath)}') = 'array'\n AND EXISTS(\n SELECT 1 FROM json_each(data, '${SQL.plain(jsonPath)}')\n WHERE json_each.value = ${value}\n )\n )\n )`;\n }\n case '$gt':\n case '$gte':\n case '$lt':\n case '$lte':\n case '$ne': {\n const jsonPath = buildJsonPath(path);\n\n return SQL`json_extract(data, '${SQL.plain(jsonPath)}') ${SQL.plain(OperatorMap[operator])} ${value}`;\n }\n case '$in': {\n const jsonPath = buildJsonPath(path);\n const values = value as unknown[];\n const inClause = SQL.merge(\n values.map((v) => SQL`${v}`),\n ', ',\n );\n\n return SQL`json_extract(data, '${SQL.plain(jsonPath)}') IN (${inClause})`;\n }\n case '$nin': {\n const jsonPath = buildJsonPath(path);\n const values = value as unknown[];\n const inClause = SQL.merge(\n values.map((v) => SQL`${v}`),\n ', ',\n );\n\n return SQL`json_extract(data, '${SQL.plain(jsonPath)}') NOT IN (${inClause})`;\n }\n case '$elemMatch': {\n const subConditions = objectEntries(value as Record<string, unknown>)\n .map(([subKey, subValue]) => {\n const serializedValue = JSONSerializer.serialize(subValue);\n return `json_extract(value, '$.${subKey}') = json('${serializedValue}')`;\n })\n .join(' AND ');\n\n const jsonPath = buildJsonPath(path);\n return SQL`EXISTS(SELECT 1 FROM json_each(data, '${SQL.plain(jsonPath)}') WHERE ${SQL.plain(subConditions)})`;\n }\n case '$all': {\n const jsonPath = buildJsonPath(path);\n const serializedValue = JSONSerializer.serialize(value);\n\n return SQL`(SELECT COUNT(*) FROM json_each(json(${serializedValue})) WHERE json_each.value NOT IN (SELECT value FROM json_each(data, '${SQL.plain(jsonPath)}'))) = 0`;\n }\n case '$size': {\n const jsonPath = buildJsonPath(path);\n\n return SQL`json_array_length(json_extract(data, '${SQL.plain(jsonPath)}')) = ${value}`;\n }\n default:\n throw new Error(`Unsupported operator: ${operator}`);\n }\n};\n\nconst handleMetadataOperator = (\n fieldName: string,\n operator: string,\n value: unknown,\n): SQL => {\n switch (operator) {\n case '$eq':\n return SQL`${SQL.plain(fieldName)} = ${value}`;\n case '$gt':\n case '$gte':\n case '$lt':\n case '$lte':\n case '$ne':\n return SQL`${SQL.plain(fieldName)} ${SQL.plain(OperatorMap[operator])} ${value}`;\n case '$in': {\n const values = value as unknown[];\n const inClause = SQL.merge(\n values.map((v) => SQL`${v}`),\n ', ',\n );\n return SQL`${SQL.plain(fieldName)} IN (${inClause})`;\n }\n case '$nin': {\n const values = value as unknown[];\n const inClause = SQL.merge(\n values.map((v) => SQL`${v}`),\n ', ',\n );\n return SQL`${SQL.plain(fieldName)} NOT IN (${inClause})`;\n }\n default:\n throw new Error(`Unsupported operator: ${operator}`);\n }\n};\n\nconst buildJsonPath = (path: string): string => {\n return `$.${path}`;\n};\n","import { JSONSerializer, SQL } from '@event-driven-io/dumbo';\nimport {\n objectEntries,\n type $inc,\n type $push,\n type $set,\n type $unset,\n type PongoUpdate,\n} from '../../../../../core';\n\nexport const buildUpdateQuery = <T>(update: PongoUpdate<T>): SQL =>\n objectEntries(update).reduce(\n (currentUpdateQuery, [op, value]) => {\n switch (op) {\n case '$set':\n return buildSetQuery(value, currentUpdateQuery);\n case '$unset':\n return buildUnsetQuery(value, currentUpdateQuery);\n case '$inc':\n return buildIncQuery(value, currentUpdateQuery);\n case '$push':\n return buildPushQuery(value, currentUpdateQuery);\n default:\n return currentUpdateQuery;\n }\n },\n SQL`data`,\n );\n\nexport const buildSetQuery = <T>(set: $set<T>, currentUpdateQuery: SQL): SQL =>\n SQL`json_patch(${currentUpdateQuery}, ${JSONSerializer.serialize(set)})`;\n\nexport const buildUnsetQuery = <T>(\n unset: $unset<T>,\n currentUpdateQuery: SQL,\n): SQL => {\n const keys = Object.keys(unset);\n let query = currentUpdateQuery;\n for (const key of keys) {\n query = SQL`json_remove(${query}, '$.${SQL.plain(key)}')`;\n }\n return query;\n};\n\nexport const buildIncQuery = <T>(\n inc: $inc<T>,\n currentUpdateQuery: SQL,\n): SQL => {\n for (const [key, value] of Object.entries(inc)) {\n currentUpdateQuery =\n typeof value === 'bigint'\n ? SQL`json_set(${currentUpdateQuery}, '$.${SQL.plain(key)}', CAST((COALESCE(json_extract(${currentUpdateQuery}, '$.${SQL.plain(key)}'), 0) + ${value}) AS TEXT))`\n : SQL`json_set(${currentUpdateQuery}, '$.${SQL.plain(key)}', COALESCE(json_extract(${currentUpdateQuery}, '$.${SQL.plain(key)}'), 0) + ${value})`;\n }\n return currentUpdateQuery;\n};\n\nexport const buildPushQuery = <T>(\n push: $push<T>,\n currentUpdateQuery: SQL,\n): SQL => {\n for (const [key, value] of Object.entries(push)) {\n const serializedValue = JSONSerializer.serialize(value);\n currentUpdateQuery = SQL`json_set(${currentUpdateQuery}, '$.${SQL.plain(key)}', CASE\n WHEN json_type(json_extract(${currentUpdateQuery}, '$.${SQL.plain(key)}')) = 'array'\n THEN json_insert(json_extract(${currentUpdateQuery}, '$.${SQL.plain(key)}'), '$[#]', json(${serializedValue}))\n ELSE json_array(json(${serializedValue}))\n END)`;\n }\n return currentUpdateQuery;\n};\n"],"mappings":";;;;;;;;;AAAA;AAAA,EACE;AAAA,EACA,kBAAAA;AAAA,EACA,OAAAC;AAAA,EACA;AAAA,OACK;;;ACLP,SAAS,OAAAC,YAAW;;;ACApB,SAAS,gBAAgB,WAAW;AAG7B,IAAM,iBAAiB,CAC5B,MACA,UACA,UACQ;AACR,MAAI,SAAS,SAAS,SAAS,YAAY;AACzC,WAAO,uBAAuB,MAAM,UAAU,KAAK;AAAA,EACrD;AAEA,UAAQ,UAAU;AAAA,IAChB,KAAK,OAAO;AACV,YAAM,WAAW,cAAc,IAAI;AAEnC,aAAO;AAAA,8BACiB,IAAI,MAAM,QAAQ,CAAC,QAAQ,KAAK;AAAA;AAAA,6BAEjC,IAAI,MAAM,QAAQ,CAAC;AAAA;AAAA,6CAEH,IAAI,MAAM,QAAQ,CAAC;AAAA,sCAC1B,KAAK;AAAA;AAAA;AAAA;AAAA,IAIvC;AAAA,IACA,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK,OAAO;AACV,YAAM,WAAW,cAAc,IAAI;AAEnC,aAAO,0BAA0B,IAAI,MAAM,QAAQ,CAAC,MAAM,IAAI,MAAM,YAAY,QAAQ,CAAC,CAAC,IAAI,KAAK;AAAA,IACrG;AAAA,IACA,KAAK,OAAO;AACV,YAAM,WAAW,cAAc,IAAI;AACnC,YAAM,SAAS;AACf,YAAM,WAAW,IAAI;AAAA,QACnB,OAAO,IAAI,CAAC,MAAM,MAAM,CAAC,EAAE;AAAA,QAC3B;AAAA,MACF;AAEA,aAAO,0BAA0B,IAAI,MAAM,QAAQ,CAAC,UAAU,QAAQ;AAAA,IACxE;AAAA,IACA,KAAK,QAAQ;AACX,YAAM,WAAW,cAAc,IAAI;AACnC,YAAM,SAAS;AACf,YAAM,WAAW,IAAI;AAAA,QACnB,OAAO,IAAI,CAAC,MAAM,MAAM,CAAC,EAAE;AAAA,QAC3B;AAAA,MACF;AAEA,aAAO,0BAA0B,IAAI,MAAM,QAAQ,CAAC,cAAc,QAAQ;AAAA,IAC5E;AAAA,IACA,KAAK,cAAc;AACjB,YAAM,gBAAgB,cAAc,KAAgC,EACjE,IAAI,CAAC,CAAC,QAAQ,QAAQ,MAAM;AAC3B,cAAM,kBAAkB,eAAe,UAAU,QAAQ;AACzD,eAAO,0BAA0B,MAAM,cAAc,eAAe;AAAA,MACtE,CAAC,EACA,KAAK,OAAO;AAEf,YAAM,WAAW,cAAc,IAAI;AACnC,aAAO,4CAA4C,IAAI,MAAM,QAAQ,CAAC,YAAY,IAAI,MAAM,aAAa,CAAC;AAAA,IAC5G;AAAA,IACA,KAAK,QAAQ;AACX,YAAM,WAAW,cAAc,IAAI;AACnC,YAAM,kBAAkB,eAAe,UAAU,KAAK;AAEtD,aAAO,2CAA2C,eAAe,uEAAuE,IAAI,MAAM,QAAQ,CAAC;AAAA,IAC7J;AAAA,IACA,KAAK,SAAS;AACZ,YAAM,WAAW,cAAc,IAAI;AAEnC,aAAO,4CAA4C,IAAI,MAAM,QAAQ,CAAC,SAAS,KAAK;AAAA,IACtF;AAAA,IACA;AACE,YAAM,IAAI,MAAM,yBAAyB,QAAQ,EAAE;AAAA,EACvD;AACF;AAEA,IAAM,yBAAyB,CAC7B,WACA,UACA,UACQ;AACR,UAAQ,UAAU;AAAA,IAChB,KAAK;AACH,aAAO,MAAM,IAAI,MAAM,SAAS,CAAC,MAAM,KAAK;AAAA,IAC9C,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,MAAM,IAAI,MAAM,SAAS,CAAC,IAAI,IAAI,MAAM,YAAY,QAAQ,CAAC,CAAC,IAAI,KAAK;AAAA,IAChF,KAAK,OAAO;AACV,YAAM,SAAS;AACf,YAAM,WAAW,IAAI;AAAA,QACnB,OAAO,IAAI,CAAC,MAAM,MAAM,CAAC,EAAE;AAAA,QAC3B;AAAA,MACF;AACA,aAAO,MAAM,IAAI,MAAM,SAAS,CAAC,QAAQ,QAAQ;AAAA,IACnD;AAAA,IACA,KAAK,QAAQ;AACX,YAAM,SAAS;AACf,YAAM,WAAW,IAAI;AAAA,QACnB,OAAO,IAAI,CAAC,MAAM,MAAM,CAAC,EAAE;AAAA,QAC3B;AAAA,MACF;AACA,aAAO,MAAM,IAAI,MAAM,SAAS,CAAC,YAAY,QAAQ;AAAA,IACvD;AAAA,IACA;AACE,YAAM,IAAI,MAAM,yBAAyB,QAAQ,EAAE;AAAA,EACvD;AACF;AAEA,IAAM,gBAAgB,CAAC,SAAyB;AAC9C,SAAO,KAAK,IAAI;AAClB;;;AD7GA,IAAM,MAAM;AAEL,IAAM,uBAAuB,CAAI,WACtCC,KAAI;AAAA,EACF,OAAO,QAAQ,MAAM,EAAE;AAAA,IAAI,CAAC,CAAC,KAAK,KAAK,MACrC,SAAS,KAAK,IACV,4BAA4B,KAAK,KAAK,IACtC,eAAe,KAAK,OAAO,KAAK;AAAA,EACtC;AAAA,EACA,IAAI,GAAG;AACT;AAEF,IAAM,8BAA8B,CAClC,KACA,UACQ;AACR,QAAM,aAAa,CAAC,aAAa,KAAK;AAEtC,SAAOA,KAAI;AAAA,IACT,cAAc,KAAK,EAAE;AAAA,MAAI,CAAC,CAAC,WAAW,GAAG,MACvC,aACI,eAAe,GAAG,GAAG,IAAI,SAAS,IAAI,eAAe,KAAK,GAAG,IAC7D,eAAe,KAAK,WAAW,GAAG;AAAA,IACxC;AAAA,IACA,IAAI,GAAG;AAAA,EACT;AACF;AAEA,IAAM,WAAW,CAAC,UAChB,UAAU,QAAQ,OAAO,UAAU,YAAY,CAAC,MAAM,QAAQ,KAAK;;;AExCrE,SAAS,kBAAAC,iBAAgB,OAAAC,YAAW;AAU7B,IAAM,mBAAmB,CAAI,WAClC,cAAc,MAAM,EAAE;AAAA,EACpB,CAAC,oBAAoB,CAAC,IAAI,KAAK,MAAM;AACnC,YAAQ,IAAI;AAAA,MACV,KAAK;AACH,eAAO,cAAc,OAAO,kBAAkB;AAAA,MAChD,KAAK;AACH,eAAO,gBAAgB,OAAO,kBAAkB;AAAA,MAClD,KAAK;AACH,eAAO,cAAc,OAAO,kBAAkB;AAAA,MAChD,KAAK;AACH,eAAO,eAAe,OAAO,kBAAkB;AAAA,MACjD;AACE,eAAO;AAAA,IACX;AAAA,EACF;AAAA,EACAC;AACF;AAEK,IAAM,gBAAgB,CAAI,KAAc,uBAC7CA,kBAAiB,kBAAkB,KAAKC,gBAAe,UAAU,GAAG,CAAC;AAEhE,IAAM,kBAAkB,CAC7B,OACA,uBACQ;AACR,QAAM,OAAO,OAAO,KAAK,KAAK;AAC9B,MAAI,QAAQ;AACZ,aAAW,OAAO,MAAM;AACtB,YAAQD,mBAAkB,KAAK,QAAQA,KAAI,MAAM,GAAG,CAAC;AAAA,EACvD;AACA,SAAO;AACT;AAEO,IAAM,gBAAgB,CAC3B,KACA,uBACQ;AACR,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,GAAG,GAAG;AAC9C,yBACE,OAAO,UAAU,WACbA,gBAAe,kBAAkB,QAAQA,KAAI,MAAM,GAAG,CAAC,kCAAkC,kBAAkB,QAAQA,KAAI,MAAM,GAAG,CAAC,YAAY,KAAK,gBAClJA,gBAAe,kBAAkB,QAAQA,KAAI,MAAM,GAAG,CAAC,4BAA4B,kBAAkB,QAAQA,KAAI,MAAM,GAAG,CAAC,YAAY,KAAK;AAAA,EACpJ;AACA,SAAO;AACT;AAEO,IAAM,iBAAiB,CAC5B,MACA,uBACQ;AACR,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,IAAI,GAAG;AAC/C,UAAM,kBAAkBC,gBAAe,UAAU,KAAK;AACtD,yBAAqBD,gBAAe,kBAAkB,QAAQA,KAAI,MAAM,GAAG,CAAC;AAAA,oCAC5C,kBAAkB,QAAQA,KAAI,MAAM,GAAG,CAAC;AAAA,sCACtC,kBAAkB,QAAQA,KAAI,MAAM,GAAG,CAAC,oBAAoB,eAAe;AAAA,6BACpF,eAAe;AAAA;AAAA,EAE1C;AACA,SAAO;AACT;;;AHjDA,IAAM,mBAAmB,CAAC,mBACxBE;AAAA,iCAC+BA,KAAI,WAAW,cAAc,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAWxD,IAAM,kCAAkC,CAAC,mBAA2B;AAAA,EACzE,aAAa,mBAAmB,cAAc,oBAAoB;AAAA,IAChE,iBAAiB,cAAc;AAAA,EACjC,CAAC;AACH;AAEO,IAAM,mBAAmB,CAC9B,oBAC+B;AAAA,EAC/B,kBAAkB,MAAW,iBAAiB,cAAc;AAAA,EAC5D,WAAW,CAAI,aAAyD;AACtE,UAAM,aAAa;AACnB,UAAM,KAAK,SAAS;AACpB,UAAM,UAAU,SAAS,YAAY;AAErC,WAAOA;AAAA,8BACmBA,KAAI,WAAW,cAAc,CAAC;AAAA,gBAC5C,EAAE,KAAK,UAAU,KAAK,OAAO;AAAA;AAAA,EAE3C;AAAA,EACA,YAAY,CAAI,cAA4D;AAC1E,UAAM,SAASA,KAAI;AAAA,MACjB,UAAU;AAAA,QACR,CAAC,QACCA,QAAO,IAAI,GAAG,KAAKC,gBAAe,UAAU,GAAG,CAAC,KAAK,IAAI,YAAY,EAAE;AAAA,MAC3E;AAAA,MACA;AAAA,IACF;AAEA,WAAOD;AAAA,8BACmBA,KAAI,WAAW,cAAc,CAAC,iCAAiC,MAAM;AAAA;AAAA,EAEjG;AAAA,EACA,WAAW,CACT,QACA,QACA,YACQ;AACR,UAAM,kBAAkB,qBAAqB,SAAS,eAAe;AACrE,UAAM,uBACJ,mBAAmB,OAAOA,sBAAqB,eAAe,KAAKA;AAErE,UAAM,cAAc,MAAM,MAAM,IAAI,SAAS,qBAAqB,MAAM;AACxE,UAAM,cAAc,MAAM,MAAM,IAAI,SAAS,iBAAiB,MAAM;AAEpE,WAAOA;AAAA,eACIA,KAAI,WAAW,cAAc,CAAC;AAAA;AAAA,4BAEjB,WAAW;AAAA;AAAA;AAAA;AAAA,0BAIbA,KAAI,WAAW,cAAc,CAAC;AAAA,UAC9C,MAAM,WAAW,CAAC;AAAA;AAAA,UAElB,oBAAoB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAM5B;AAAA,EACA,YAAY,CACV,QACA,UACA,YACQ;AACR,UAAM,kBAAkB,qBAAqB,SAAS,eAAe;AACrE,UAAM,uBACJ,mBAAmB,OAAOA,sBAAqB,eAAe,KAAKA;AAErE,UAAM,cAAc,MAAM,MAAM,IAAI,SAAS,qBAAqB,MAAM;AAExE,WAAOA;AAAA,eACIA,KAAI,WAAW,cAAc,CAAC;AAAA;AAAA,4BAEjBC,gBAAe,UAAU,QAAQ,CAAC;AAAA;AAAA;AAAA;AAAA,0BAIpCD,KAAI,WAAW,cAAc,CAAC;AAAA,UAC9C,MAAM,WAAW,CAAC;AAAA;AAAA,UAElB,oBAAoB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAM5B;AAAA,EACA,YAAY,CACV,QACA,WACQ;AACR,UAAM,cAAc,MAAM,MAAM,IAAI,SAAS,qBAAqB,MAAM;AACxE,UAAM,cAAc,MAAM,MAAM,IAAI,SAAS,iBAAiB,MAAM;AAEpE,WAAOA;AAAA,eACIA,KAAI,WAAW,cAAc,CAAC;AAAA;AAAA,4BAEjB,WAAW;AAAA;AAAA;AAAA,QAG/B,MAAM,WAAW,CAAC;AAAA;AAAA,EAExB;AAAA,EACA,WAAW,CACT,QACA,YACQ;AACR,UAAM,kBAAkB,qBAAqB,SAAS,eAAe;AACrE,UAAM,uBACJ,mBAAmB,OAAOA,sBAAqB,eAAe,KAAKA;AAErE,UAAM,cAAc,MAAM,MAAM,IAAI,SAAS,qBAAqB,MAAM;AAExE,WAAOA;AAAA,oBACSA,KAAI,WAAW,cAAc,CAAC;AAAA;AAAA,0BAExBA,KAAI,WAAW,cAAc,CAAC;AAAA,UAC9C,MAAM,WAAW,CAAC;AAAA;AAAA,UAElB,oBAAoB;AAAA;AAAA;AAAA;AAAA;AAAA,EAK5B;AAAA,EACA,YAAY,CAAI,WAAsC;AACpD,UAAM,cAAc,MAAM,MAAM,IAAI,SAAS,qBAAqB,MAAM;AAExE,WAAOA,mBAAkBA,KAAI,WAAW,cAAc,CAAC,IAAI,MAAM,WAAW,CAAC;AAAA,EAC/E;AAAA,EACA,SAAS,CAAI,WAAsC;AACjD,UAAM,cAAc,MAAM,MAAM,IAAI,SAAS,qBAAqB,MAAM;AAExE,WAAOA,wBAAuBA,KAAI,WAAW,cAAc,CAAC,IAAI,MAAM,WAAW,CAAC;AAAA,EACpF;AAAA,EACA,MAAM,CAAI,QAA8B,YAA+B;AACrE,UAAM,cAAc,MAAM,MAAM,IAAI,SAAS,qBAAqB,MAAM;AACxE,UAAM,QAAe,CAAC;AAEtB,UAAM,KAAKA,wBAAuBA,KAAI,WAAW,cAAc,CAAC,EAAE;AAElE,UAAM,KAAK,MAAM,WAAW,CAAC;AAE7B,QAAI,SAAS,OAAO;AAClB,YAAM,KAAKA,aAAY,QAAQ,KAAK,EAAE;AAAA,IACxC;AAEA,QAAI,SAAS,MAAM;AACjB,YAAM,KAAKA,cAAa,QAAQ,IAAI,EAAE;AAAA,IACxC;AAEA,WAAOA,KAAI,MAAM,CAAC,GAAG,OAAOA,OAAM,CAAC;AAAA,EACrC;AAAA,EACA,gBAAgB,CAAI,WAAsC;AACxD,UAAM,cAAcA,KAAI,MAAM,MAAM,MAAM,IACtC,SACA,qBAAqB,MAAM;AAC/B,WAAOA,qCAAoCA,KAAI,WAAW,cAAc,CAAC,IAAI,MAAM,WAAW,CAAC;AAAA,EACjG;AAAA,EACA,QAAQ,CAAC,YACPA,mBAAkBA,KAAI,WAAW,cAAc,CAAC,cAAcA,KAAI,WAAW,OAAO,CAAC;AAAA,EACvF,MAAM,CAAC,aAAqB,mBAC1BA,4BAA2BA,KAAI,WAAW,UAAU,CAAC;AACzD;AAEA,IAAM,QAAQ,CAAC,gBACbA,KAAI,MAAM,QAAQ,WAAW,IACzBA,KAAI,QACJA,KAAI,MAAM,CAACA,cAAa,WAAW,CAAC;","names":["JSONSerializer","SQL","SQL","SQL","JSONSerializer","SQL","SQL","JSONSerializer","SQL","JSONSerializer"]}
@@ -9,7 +9,7 @@
9
9
 
10
10
 
11
11
 
12
- var _chunkYLV7YIPZcjs = require('./chunk-YLV7YIPZ.cjs');
12
+ var _chunk4BL6YWLWcjs = require('./chunk-4BL6YWLW.cjs');
13
13
 
14
14
  // src/storage/postgresql/pg/index.ts
15
15
  var _dumbo = require('@event-driven-io/dumbo');
@@ -51,7 +51,7 @@ var handleOperator = (path, operator, value) => {
51
51
  case "$lte":
52
52
  case "$ne": {
53
53
  const jsonPath = _dumbo.SQL.plain(path.split(".").join(","));
54
- return _dumbo.SQL`data ->> '${jsonPath}' ${_dumbo.SQL.plain(_chunkYLV7YIPZcjs.OperatorMap[operator])} ${value}`;
54
+ return _dumbo.SQL`data ->> '${jsonPath}' ${_dumbo.SQL.plain(_chunk4BL6YWLWcjs.OperatorMap[operator])} ${value}`;
55
55
  }
56
56
  case "$in": {
57
57
  const jsonPath = `{${path.split(".").join(",")}}`;
@@ -62,7 +62,7 @@ var handleOperator = (path, operator, value) => {
62
62
  return _dumbo.SQL`data #>> ${jsonPath} NOT IN ${value}`;
63
63
  }
64
64
  case "$elemMatch": {
65
- const subQuery = _chunkYLV7YIPZcjs.objectEntries.call(void 0, value).map(
65
+ const subQuery = _chunk4BL6YWLWcjs.objectEntries.call(void 0, value).map(
66
66
  ([subKey, subValue]) => `@."${subKey}" == ${_dumbo.JSONSerializer.serialize(subValue)}`
67
67
  ).join(" && ");
68
68
  return _dumbo.SQL`jsonb_path_exists(data, '$.${_dumbo.SQL.plain(path)}[*] ? (${_dumbo.SQL.plain(subQuery)})')`;
@@ -90,7 +90,7 @@ var handleMetadataOperator = (fieldName, operator, value) => {
90
90
  case "$lt":
91
91
  case "$lte":
92
92
  case "$ne":
93
- return _dumbo.SQL`${_dumbo.SQL.plain(fieldName)} ${_dumbo.SQL.plain(_chunkYLV7YIPZcjs.OperatorMap[operator])} ${value}`;
93
+ return _dumbo.SQL`${_dumbo.SQL.plain(fieldName)} ${_dumbo.SQL.plain(_chunk4BL6YWLWcjs.OperatorMap[operator])} ${value}`;
94
94
  case "$in":
95
95
  return _dumbo.SQL`${_dumbo.SQL.plain(fieldName)} IN ${value}`;
96
96
  case "$nin":
@@ -110,10 +110,10 @@ var constructFilterQuery = (filter) => _dumbo.SQL.merge(
110
110
  ` ${AND} `
111
111
  );
112
112
  var constructComplexFilterQuery = (key, value) => {
113
- const isEquality = !_chunkYLV7YIPZcjs.hasOperators.call(void 0, value);
113
+ const isEquality = !_chunk4BL6YWLWcjs.hasOperators.call(void 0, value);
114
114
  return _dumbo.SQL.merge(
115
- _chunkYLV7YIPZcjs.objectEntries.call(void 0, value).map(
116
- ([nestedKey, val]) => isEquality ? handleOperator(`${key}.${nestedKey}`, _chunkYLV7YIPZcjs.QueryOperators.$eq, val) : handleOperator(key, nestedKey, val)
115
+ _chunk4BL6YWLWcjs.objectEntries.call(void 0, value).map(
116
+ ([nestedKey, val]) => isEquality ? handleOperator(`${key}.${nestedKey}`, _chunk4BL6YWLWcjs.QueryOperators.$eq, val) : handleOperator(key, nestedKey, val)
117
117
  ),
118
118
  ` ${AND} `
119
119
  );
@@ -122,7 +122,7 @@ var isRecord = (value) => value !== null && typeof value === "object" && !Array.
122
122
 
123
123
  // src/storage/postgresql/core/sqlBuilder/update/index.ts
124
124
 
125
- var buildUpdateQuery = (update) => _chunkYLV7YIPZcjs.objectEntries.call(void 0, update).reduce(
125
+ var buildUpdateQuery = (update) => _chunk4BL6YWLWcjs.objectEntries.call(void 0, update).reduce(
126
126
  (currentUpdateQuery, [op, value]) => {
127
127
  switch (op) {
128
128
  case "$set":
@@ -195,7 +195,7 @@ var postgresSQLBuilder = (collectionName) => ({
195
195
  RETURNING _id;`;
196
196
  },
197
197
  updateOne: (filter, update, options) => {
198
- const expectedVersion = _chunkYLV7YIPZcjs.expectedVersionValue.call(void 0, _optionalChain([options, 'optionalAccess', _ => _.expectedVersion]));
198
+ const expectedVersion = _chunk4BL6YWLWcjs.expectedVersionValue.call(void 0, _optionalChain([options, 'optionalAccess', _ => _.expectedVersion]));
199
199
  const expectedVersionUpdate = expectedVersion != null ? _dumbo.SQL`AND ${_dumbo.SQL.identifier(collectionName)}._version = ${expectedVersion}` : _dumbo.SQL``;
200
200
  const filterQuery = _dumbo.isSQL.call(void 0, filter) ? filter : constructFilterQuery(filter);
201
201
  const updateQuery = _dumbo.isSQL.call(void 0, update) ? update : buildUpdateQuery(update);
@@ -224,7 +224,7 @@ var postgresSQLBuilder = (collectionName) => ({
224
224
  ON existing._id = updated._id;`;
225
225
  },
226
226
  replaceOne: (filter, document, options) => {
227
- const expectedVersion = _chunkYLV7YIPZcjs.expectedVersionValue.call(void 0, _optionalChain([options, 'optionalAccess', _2 => _2.expectedVersion]));
227
+ const expectedVersion = _chunk4BL6YWLWcjs.expectedVersionValue.call(void 0, _optionalChain([options, 'optionalAccess', _2 => _2.expectedVersion]));
228
228
  const expectedVersionUpdate = expectedVersion != null ? _dumbo.SQL`AND ${_dumbo.SQL.identifier(collectionName)}._version = ${expectedVersion}` : _dumbo.SQL``;
229
229
  const filterQuery = _dumbo.isSQL.call(void 0, filter) ? filter : constructFilterQuery(filter);
230
230
  return _dumbo.SQL`
@@ -262,7 +262,7 @@ var postgresSQLBuilder = (collectionName) => ({
262
262
  ${where(filterQuery)};`;
263
263
  },
264
264
  deleteOne: (filter, options) => {
265
- const expectedVersion = _chunkYLV7YIPZcjs.expectedVersionValue.call(void 0, _optionalChain([options, 'optionalAccess', _3 => _3.expectedVersion]));
265
+ const expectedVersion = _chunk4BL6YWLWcjs.expectedVersionValue.call(void 0, _optionalChain([options, 'optionalAccess', _3 => _3.expectedVersion]));
266
266
  const expectedVersionUpdate = expectedVersion != null ? _dumbo.SQL`AND ${_dumbo.SQL.identifier(collectionName)}._version = ${expectedVersion}` : _dumbo.SQL``;
267
267
  const filterQuery = _dumbo.isSQL.call(void 0, filter) ? filter : constructFilterQuery(filter);
268
268
  return _dumbo.SQL`
@@ -320,16 +320,16 @@ var pgDatabaseDriver = {
320
320
  driverType: _pg.NodePostgresDriverType,
321
321
  databaseFactory: (options) => {
322
322
  const databaseName = _nullishCoalesce(options.databaseName, () => ( _pg.getDatabaseNameOrDefault.call(void 0, options.connectionString)));
323
- return _chunkYLV7YIPZcjs.PongoDatabase.call(void 0, {
323
+ return _chunk4BL6YWLWcjs.PongoDatabase.call(void 0, {
324
324
  ...options,
325
325
  pool: _dumbo.dumbo.call(void 0, {
326
326
  connectionString: options.connectionString,
327
327
  driver: _pg.pgDatabaseDriver,
328
328
  ...options.connectionOptions
329
329
  }),
330
- schemaComponent: _chunkYLV7YIPZcjs.PongoDatabaseSchemaComponent.call(void 0, {
330
+ schemaComponent: _chunk4BL6YWLWcjs.PongoDatabaseSchemaComponent.call(void 0, {
331
331
  driverType: _pg.NodePostgresDriverType,
332
- collectionFactory: (schema) => _chunkYLV7YIPZcjs.PongoCollectionSchemaComponent.call(void 0, {
332
+ collectionFactory: (schema) => _chunk4BL6YWLWcjs.PongoCollectionSchemaComponent.call(void 0, {
333
333
  driverType: _pg.NodePostgresDriverType,
334
334
  definition: schema,
335
335
  migrationsOrSchemaComponents: {
@@ -337,16 +337,18 @@ var pgDatabaseDriver = {
337
337
  },
338
338
  sqlBuilder: postgresSQLBuilder(schema.name)
339
339
  }),
340
- definition: _nullishCoalesce(_optionalChain([options, 'access', _6 => _6.schema, 'optionalAccess', _7 => _7.definition]), () => ( _chunkYLV7YIPZcjs.pongoSchema.db(databaseName, {})))
340
+ definition: _nullishCoalesce(_optionalChain([options, 'access', _6 => _6.schema, 'optionalAccess', _7 => _7.definition]), () => ( _chunk4BL6YWLWcjs.pongoSchema.db(databaseName, {})))
341
341
  }),
342
342
  databaseName
343
343
  });
344
344
  },
345
- getDatabaseNameOrDefault: _pg.getDatabaseNameOrDefault,
345
+ getDatabaseNameOrDefault: (options) => {
346
+ return _nullishCoalesce(options.databaseName, () => ( _pg.getDatabaseNameOrDefault.call(void 0, options.connectionString)));
347
+ },
346
348
  defaultConnectionString: "postgresql://localhost:5432/postgres"
347
349
  };
348
350
  var usePgDatabaseDriver = () => {
349
- _chunkYLV7YIPZcjs.pongoDatabaseDriverRegistry.register(
351
+ _chunk4BL6YWLWcjs.pongoDatabaseDriverRegistry.register(
350
352
  _pg.NodePostgresDriverType,
351
353
  pgDatabaseDriver
352
354
  );
@@ -359,4 +361,4 @@ usePgDatabaseDriver();
359
361
 
360
362
 
361
363
  exports.pongoCollectionPostgreSQLMigrations = pongoCollectionPostgreSQLMigrations; exports.postgresSQLBuilder = postgresSQLBuilder; exports.pgDatabaseDriver = pgDatabaseDriver; exports.usePgDatabaseDriver = usePgDatabaseDriver;
362
- //# sourceMappingURL=chunk-3KNMMQUV.cjs.map
364
+ //# sourceMappingURL=chunk-ZPWKWNK2.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["/home/runner/work/Pongo/Pongo/src/packages/pongo/dist/chunk-ZPWKWNK2.cjs","../src/storage/postgresql/pg/index.ts","../src/storage/postgresql/core/sqlBuilder/index.ts","../src/storage/postgresql/core/sqlBuilder/filter/index.ts","../src/storage/postgresql/core/sqlBuilder/filter/queryOperators.ts","../src/storage/postgresql/core/sqlBuilder/update/index.ts"],"names":["SQL","JSONSerializer"],"mappings":"AAAA;AACE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACF,wDAA6B;AAC7B;AACA;ACbA,+CAAsB;AACtB;AACE;AACA;AACA;AAAA,+CAEK;AACP,cAAe;ADcf;AACA;AEtBA;AACE;AACA;AACA;AACA;AAAA;AFyBF;AACA;AG9BA;AHgCA;AACA;AIjCA;AAGO,IAAM,eAAA,EAAiB,CAC5B,IAAA,EACA,QAAA,EACA,KAAA,EAAA,GACQ;AACR,EAAA,GAAA,CAAI,KAAA,IAAS,MAAA,GAAS,KAAA,IAAS,UAAA,EAAY;AACzC,IAAA,OAAO,sBAAA,CAAuB,IAAA,EAAM,QAAA,EAAU,KAAK,CAAA;AAAA,EACrD;AAEA,EAAA,OAAA,CAAQ,QAAA,EAAU;AAAA,IAChB,KAAK,KAAA,EAAO;AACV,MAAA,MAAM,WAAA,EAAa,qBAAA,CAAe,SAAA;AAAA,QAChC,iBAAA,CAAkB,IAAA,EAAM,KAAK;AAAA,MAC/B,CAAA;AACA,MAAA,MAAM,gBAAA,EAAkB,qBAAA,CAAe,SAAA,CAAU,KAAK,CAAA;AAEtD,MAAA,OAAO,UAAA,CAAA,SAAA,EAAe,UAAU,CAAA,sCAAA,EAAyC,UAAA,CAAI,KAAA,CAAM,IAAI,CAAC,CAAA,YAAA,EAAe,UAAA,CAAI,KAAA,CAAM,eAAe,CAAC,CAAA,IAAA,CAAA;AAAA,IACnI;AAAA,IACA,KAAK,KAAA;AAAA,IACL,KAAK,MAAA;AAAA,IACL,KAAK,KAAA;AAAA,IACL,KAAK,MAAA;AAAA,IACL,KAAK,KAAA,EAAO;AACV,MAAA,MAAM,SAAA,EAAW,UAAA,CAAI,KAAA,CAAM,IAAA,CAAK,KAAA,CAAM,GAAG,CAAA,CAAE,IAAA,CAAK,GAAG,CAAC,CAAA;AAEpD,MAAA,OAAO,UAAA,CAAA,UAAA,EAAgB,QAAQ,CAAA,EAAA,EAAK,UAAA,CAAI,KAAA,CAAM,6BAAA,CAAY,QAAQ,CAAC,CAAC,CAAA,CAAA,EAAI,KAAK,CAAA,CAAA;AAC/E,IAAA;AACY,IAAA;AACoC,MAAA;AAES,MAAA;AACzD,IAAA;AACa,IAAA;AACmC,MAAA;AAEa,MAAA;AAC7D,IAAA;AACmB,IAAA;AAEd,MAAA;AAEgD,QAAA;AAErC,MAAA;AAC6D,MAAA;AAC7E,IAAA;AACa,IAAA;AACuB,MAAA;AACH,QAAA;AAC/B,MAAA;AAC+B,MAAA;AACjC,IAAA;AACc,IAAA;AACkC,MAAA;AAEc,MAAA;AAC9D,IAAA;AACA,IAAA;AACqD,MAAA;AACvD,EAAA;AACF;AAMU;AACU,EAAA;AACX,IAAA;AACyC,MAAA;AACzC,IAAA;AACA,IAAA;AACA,IAAA;AACA,IAAA;AACA,IAAA;AAC2E,MAAA;AAC3E,IAAA;AACuD,MAAA;AACvD,IAAA;AAC2D,MAAA;AAChE,IAAA;AACqD,MAAA;AACvD,EAAA;AACF;AASkB;AJOkE;AACA;AG5FxE;AAGN;AACqB,EAAA;AAGjB,IAAA;AACN,EAAA;AACO,EAAA;AACT;AAKQ;AAC8B,EAAA;AAE3B,EAAA;AACY,IAAA;AAEuB,MAAA;AAE5C,IAAA;AACO,IAAA;AACT,EAAA;AACF;AAGwD;AHiF4B;AACA;AK1HhD;AAWZ;AACiB,EAAA;AACvB,IAAA;AACL,MAAA;AAC2C,QAAA;AAC3C,MAAA;AAC6C,QAAA;AAC7C,MAAA;AAC2C,QAAA;AAC3C,MAAA;AAC4C,QAAA;AACjD,MAAA;AACS,QAAA;AACX,IAAA;AACF,EAAA;AACAA,EAAAA;AACF;AAG+BC;AAMD;AAOtB;AACwC,EAAA;AAGxB,IAAA;AAExB,EAAA;AACO,EAAA;AACT;AAKU;AACyC,EAAA;AACS,IAAA;AACoB,IAAA;AAC9E,EAAA;AACO,EAAA;AACT;AL8FoF;AACA;AEvIlFD;AAC6D,+BAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAAA;AAWgB;AACX,EAAA;AACjC,IAAA;AAChC,EAAA;AACH;AAIiC;AAC6B,EAAA;AACY,EAAA;AAClB,IAAA;AAChC,IAAA;AACiB,IAAA;AAE9BA,IAAAA;AACuC,kBAAA;AACL,cAAA;AAC3C,EAAA;AAC4E,EAAA;AACvD,IAAA;AACP,MAAA;AAEmD,QAAA;AAC7D,MAAA;AACA,MAAA;AACF,IAAA;AAEOA,IAAAA;AACwE,kBAAA;AAAM;AAAA,oBAAA;AAGvF,EAAA;AAKU,EAAA;AAC6D,IAAA;AAGjD,IAAA;AAGoD,IAAA;AACJ,IAAA;AAE7DA,IAAAA;AAAA;AAAA;AAGwD,aAAA;AAAA;AAAA;AAAA;AAIpB,eAAA;AAAA;AAE+B,iBAAA;AAAe;AAAA;AAGxB,cAAA;AACK,kBAAA;AAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAAA;AAUvF,EAAA;AAKU,EAAA;AAC6D,IAAA;AAGjD,IAAA;AAGoD,IAAA;AAEjEA,IAAAA;AAAA;AAAA;AAGwD,aAAA;AAAA;AAAA;AAAA;AAIpB,eAAA;AAAA;AAEM,iBAAA;AAA+D;AAAA;AAG/C,cAAA;AACK,kBAAA;AAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAAA;AAUvF,EAAA;AAIU,EAAA;AACgE,IAAA;AACJ,IAAA;AAE7DA,IAAAA;AACkC,aAAA;AAAA;AAEjB,eAAA;AAAA;AAEF,MAAA;AACxB,EAAA;AAIU,EAAA;AAC6D,IAAA;AAGjD,IAAA;AAGoD,IAAA;AAEjEA,IAAAA;AAAA;AAAA;AAGwD,aAAA;AAAA;AAAA;AAAA;AAIf,oBAAA;AAAA;AAEiB,cAAA;AACnB,kBAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAAA;AAShD,EAAA;AACsD,EAAA;AACoB,IAAA;AAEK,IAAA;AAC/E,EAAA;AACmD,EAAA;AACuB,IAAA;AAEF,IAAA;AACxE,EAAA;AACuE,EAAA;AACG,IAAA;AAClD,IAAA;AAE4C,IAAA;AAErC,IAAA;AAET,IAAA;AACoB,MAAA;AACxC,IAAA;AAEmB,IAAA;AACqB,MAAA;AACxC,IAAA;AAEmC,IAAA;AACrC,EAAA;AAC0D,EAAA;AAG/B,IAAA;AACoD,IAAA;AAC/E,EAAA;AAEkD,EAAA;AAEjB,EAAA;AACnC;AAKU;AFqF0E;AACA;AC5PhF;AACU,EAAA;AACkB,EAAA;AAGO,IAAA;AAEd,IAAA;AAChB,MAAA;AACS,MAAA;AACgB,QAAA;AAClB,QAAA;AACG,QAAA;AACZ,MAAA;AAC6C,MAAA;AAChC,QAAA;AAEqB,QAAA;AACjB,UAAA;AACA,UAAA;AACkB,UAAA;AAC+B,YAAA;AAC7D,UAAA;AAC0C,UAAA;AAC3C,QAAA;AAE4D,QAAA;AAChE,MAAA;AACD,MAAA;AACD,IAAA;AACH,EAAA;AACuC,EAAA;AAEsC,IAAA;AAE7E,EAAA;AACyB,EAAA;AAC3B;AAEyC;AACX,EAAA;AAC1B,IAAA;AACA,IAAA;AACF,EAAA;AACF;AAEoB;ADqPgE;AACA;AACA;AACA;AACA;AACA;AACA","file":"/home/runner/work/Pongo/Pongo/src/packages/pongo/dist/chunk-ZPWKWNK2.cjs","sourcesContent":[null,"import { dumbo } from '@event-driven-io/dumbo';\nimport {\n pgDatabaseDriver as dumboDriver,\n getDatabaseNameOrDefault,\n NodePostgresDriverType,\n type NodePostgresConnection,\n} from '@event-driven-io/dumbo/pg';\nimport pg from 'pg';\nimport {\n PongoCollectionSchemaComponent,\n PongoDatabase,\n pongoDatabaseDriverRegistry,\n PongoDatabaseSchemaComponent,\n pongoSchema,\n type PongoDatabaseDriver,\n type PongoDatabaseDriverOptions,\n type PongoDb,\n} from '../../../core';\nimport {\n pongoCollectionPostgreSQLMigrations,\n postgresSQLBuilder,\n} from '../core';\n\nexport type NodePostgresPongoClientOptions =\n | PooledPongoClientOptions\n | NotPooledPongoOptions;\n\nexport type PooledPongoClientOptions =\n | {\n pool: pg.Pool;\n }\n | {\n pooled: true;\n }\n | {\n pool: pg.Pool;\n pooled: true;\n }\n | object;\n\nexport type NotPooledPongoOptions =\n | {\n client: pg.Client;\n }\n | {\n pooled: false;\n }\n | {\n client: pg.Client;\n pooled: false;\n }\n | {\n connection: NodePostgresConnection;\n pooled?: false;\n };\n\ntype NodePostgresDatabaseDriverOptions =\n PongoDatabaseDriverOptions<NodePostgresPongoClientOptions> & {\n databaseName?: string | undefined;\n connectionString: string;\n };\n\nconst pgDatabaseDriver: PongoDatabaseDriver<\n PongoDb<NodePostgresDriverType>,\n NodePostgresDatabaseDriverOptions\n> = {\n driverType: NodePostgresDriverType,\n databaseFactory: (options) => {\n const databaseName =\n options.databaseName ??\n getDatabaseNameOrDefault(options.connectionString);\n\n return PongoDatabase({\n ...options,\n pool: dumbo({\n connectionString: options.connectionString,\n driver: dumboDriver,\n ...options.connectionOptions,\n }),\n schemaComponent: PongoDatabaseSchemaComponent({\n driverType: NodePostgresDriverType,\n collectionFactory: (schema) =>\n PongoCollectionSchemaComponent({\n driverType: NodePostgresDriverType,\n definition: schema,\n migrationsOrSchemaComponents: {\n migrations: pongoCollectionPostgreSQLMigrations(schema.name),\n },\n sqlBuilder: postgresSQLBuilder(schema.name),\n }),\n definition:\n options.schema?.definition ?? pongoSchema.db(databaseName, {}),\n }),\n databaseName,\n });\n },\n getDatabaseNameOrDefault: (options) => {\n return (\n options.databaseName ?? getDatabaseNameOrDefault(options.connectionString)\n );\n },\n defaultConnectionString: 'postgresql://localhost:5432/postgres',\n};\n\nexport const usePgDatabaseDriver = () => {\n pongoDatabaseDriverRegistry.register(\n NodePostgresDriverType,\n pgDatabaseDriver,\n );\n};\n\nusePgDatabaseDriver();\n\nexport { pgDatabaseDriver as databaseDriver, pgDatabaseDriver as pgDriver };\n","import {\n isSQL,\n JSONSerializer,\n SQL,\n sqlMigration,\n} from '@event-driven-io/dumbo';\nimport {\n expectedVersionValue,\n type DeleteOneOptions,\n type FindOptions,\n type OptionalUnlessRequiredIdAndVersion,\n type PongoCollectionSQLBuilder,\n type PongoFilter,\n type PongoUpdate,\n type ReplaceOneOptions,\n type UpdateOneOptions,\n type WithoutId,\n} from '../../../../core';\nimport { constructFilterQuery } from './filter';\nimport { buildUpdateQuery } from './update';\n\nconst createCollection = (collectionName: string): SQL =>\n SQL`\n CREATE TABLE IF NOT EXISTS ${SQL.identifier(collectionName)} (\n _id TEXT PRIMARY KEY, \n data JSONB NOT NULL, \n metadata JSONB NOT NULL DEFAULT '{}',\n _version BIGINT NOT NULL DEFAULT 1,\n _partition TEXT NOT NULL DEFAULT 'png_global',\n _archived BOOLEAN NOT NULL DEFAULT FALSE,\n _created TIMESTAMPTZ NOT NULL DEFAULT now(),\n _updated TIMESTAMPTZ NOT NULL DEFAULT now()\n )`;\n\nexport const pongoCollectionPostgreSQLMigrations = (collectionName: string) => [\n sqlMigration(`pongoCollection:${collectionName}:001:createtable`, [\n createCollection(collectionName),\n ]),\n];\n\nexport const postgresSQLBuilder = (\n collectionName: string,\n): PongoCollectionSQLBuilder => ({\n createCollection: (): SQL => createCollection(collectionName),\n insertOne: <T>(document: OptionalUnlessRequiredIdAndVersion<T>): SQL => {\n const serialized = JSONSerializer.serialize(document);\n const id = document._id;\n const version = document._version ?? 1n;\n\n return SQL`\n INSERT INTO ${SQL.identifier(collectionName)} (_id, data, _version) \n VALUES (${id}, ${serialized}, ${version}) ON CONFLICT(_id) DO NOTHING;`;\n },\n insertMany: <T>(documents: OptionalUnlessRequiredIdAndVersion<T>[]): SQL => {\n const values = SQL.merge(\n documents.map(\n (doc) =>\n SQL`(${doc._id}, ${JSONSerializer.serialize(doc)}, ${doc._version ?? 1n})`,\n ),\n ',',\n );\n\n return SQL`\n INSERT INTO ${SQL.identifier(collectionName)} (_id, data, _version) VALUES ${values}\n ON CONFLICT(_id) DO NOTHING\n RETURNING _id;`;\n },\n updateOne: <T>(\n filter: PongoFilter<T> | SQL,\n update: PongoUpdate<T> | SQL,\n options?: UpdateOneOptions,\n ): SQL => {\n const expectedVersion = expectedVersionValue(options?.expectedVersion);\n const expectedVersionUpdate =\n expectedVersion != null\n ? SQL`AND ${SQL.identifier(collectionName)}._version = ${expectedVersion}`\n : SQL``;\n\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n const updateQuery = isSQL(update) ? update : buildUpdateQuery(update);\n\n return SQL`\n WITH existing AS (\n SELECT _id, _version as current_version\n FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}\n LIMIT 1\n ),\n updated AS (\n UPDATE ${SQL.identifier(collectionName)} \n SET \n data = ${updateQuery} || jsonb_build_object('_id', ${SQL.identifier(collectionName)}._id) || jsonb_build_object('_version', (_version + 1)::text),\n _version = _version + 1\n FROM existing \n WHERE ${SQL.identifier(collectionName)}._id = existing._id ${expectedVersionUpdate}\n RETURNING ${SQL.identifier(collectionName)}._id, ${SQL.identifier(collectionName)}._version\n )\n SELECT \n existing._id,\n COALESCE(updated._version, existing.current_version) AS version,\n COUNT(existing._id) over() AS matched,\n COUNT(updated._id) over() AS modified\n FROM existing\n LEFT JOIN updated \n ON existing._id = updated._id;`;\n },\n replaceOne: <T>(\n filter: PongoFilter<T> | SQL,\n document: WithoutId<T>,\n options?: ReplaceOneOptions,\n ): SQL => {\n const expectedVersion = expectedVersionValue(options?.expectedVersion);\n const expectedVersionUpdate =\n expectedVersion != null\n ? SQL`AND ${SQL.identifier(collectionName)}._version = ${expectedVersion}`\n : SQL``;\n\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n\n return SQL`\n WITH existing AS (\n SELECT _id, _version as current_version\n FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}\n LIMIT 1\n ),\n updated AS (\n UPDATE ${SQL.identifier(collectionName)} \n SET \n data = ${JSONSerializer.serialize(document)} || jsonb_build_object('_id', ${SQL.identifier(collectionName)}._id) || jsonb_build_object('_version', (_version + 1)::text),\n _version = _version + 1\n FROM existing \n WHERE ${SQL.identifier(collectionName)}._id = existing._id ${expectedVersionUpdate}\n RETURNING ${SQL.identifier(collectionName)}._id, ${SQL.identifier(collectionName)}._version\n )\n SELECT \n existing._id,\n COALESCE(updated._version, existing.current_version) AS version,\n COUNT(existing._id) over() AS matched,\n COUNT(updated._id) over() AS modified\n FROM existing\n LEFT JOIN updated \n ON existing._id = updated._id;`;\n },\n updateMany: <T>(\n filter: PongoFilter<T> | SQL,\n update: PongoUpdate<T> | SQL,\n ): SQL => {\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n const updateQuery = isSQL(update) ? update : buildUpdateQuery(update);\n\n return SQL`\n UPDATE ${SQL.identifier(collectionName)} \n SET \n data = ${updateQuery} || jsonb_build_object('_version', (_version + 1)::text),\n _version = _version + 1\n ${where(filterQuery)};`;\n },\n deleteOne: <T>(\n filter: PongoFilter<T> | SQL,\n options?: DeleteOneOptions,\n ): SQL => {\n const expectedVersion = expectedVersionValue(options?.expectedVersion);\n const expectedVersionUpdate =\n expectedVersion != null\n ? SQL`AND ${SQL.identifier(collectionName)}._version = ${expectedVersion}`\n : SQL``;\n\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n\n return SQL`\n WITH existing AS (\n SELECT _id\n FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}\n LIMIT 1\n ),\n deleted AS (\n DELETE FROM ${SQL.identifier(collectionName)}\n USING existing\n WHERE ${SQL.identifier(collectionName)}._id = existing._id ${expectedVersionUpdate}\n RETURNING ${SQL.identifier(collectionName)}._id\n )\n SELECT \n existing._id,\n COUNT(existing._id) over() AS matched,\n COUNT(deleted._id) over() AS deleted\n FROM existing\n LEFT JOIN deleted \n ON existing._id = deleted._id;`;\n },\n deleteMany: <T>(filter: PongoFilter<T> | SQL): SQL => {\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n\n return SQL`DELETE FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}`;\n },\n findOne: <T>(filter: PongoFilter<T> | SQL): SQL => {\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n\n return SQL`SELECT data FROM ${SQL.identifier(collectionName)} ${where(filterQuery)} LIMIT 1;`;\n },\n find: <T>(filter: PongoFilter<T> | SQL, options?: FindOptions): SQL => {\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n const query: SQL[] = [];\n\n query.push(SQL`SELECT data FROM ${SQL.identifier(collectionName)}`);\n\n query.push(where(filterQuery));\n\n if (options?.limit) {\n query.push(SQL`LIMIT ${options.limit}`);\n }\n\n if (options?.skip) {\n query.push(SQL`OFFSET ${options.skip}`);\n }\n\n return SQL.merge([...query, SQL`;`]);\n },\n countDocuments: <T>(filter: PongoFilter<T> | SQL): SQL => {\n const filterQuery = SQL.check.isSQL(filter)\n ? filter\n : constructFilterQuery(filter);\n return SQL`SELECT COUNT(1) as count FROM ${SQL.identifier(collectionName)} ${where(filterQuery)};`;\n },\n rename: (newName: string): SQL =>\n SQL`ALTER TABLE ${SQL.identifier(collectionName)} RENAME TO ${SQL.identifier(newName)};`,\n drop: (targetName: string = collectionName): SQL =>\n SQL`DROP TABLE IF EXISTS ${SQL.identifier(targetName)}`,\n});\n\nconst where = (filterQuery: SQL): SQL =>\n SQL.check.isEmpty(filterQuery)\n ? SQL.EMPTY\n : SQL.merge([SQL`WHERE `, filterQuery]);\n","import { SQL } from '@event-driven-io/dumbo';\nimport {\n hasOperators,\n objectEntries,\n QueryOperators,\n type PongoFilter,\n} from '../../../../../core';\nimport { handleOperator } from './queryOperators';\n\nexport * from './queryOperators';\n\nconst AND = 'AND';\n\nexport const constructFilterQuery = <T>(filter: PongoFilter<T>): SQL =>\n SQL.merge(\n Object.entries(filter).map(([key, value]) =>\n isRecord(value)\n ? constructComplexFilterQuery(key, value)\n : handleOperator(key, '$eq', value),\n ),\n ` ${AND} `,\n );\n\nconst constructComplexFilterQuery = (\n key: string,\n value: Record<string, unknown>,\n): SQL => {\n const isEquality = !hasOperators(value);\n\n return SQL.merge(\n objectEntries(value).map(([nestedKey, val]) =>\n isEquality\n ? handleOperator(`${key}.${nestedKey}`, QueryOperators.$eq, val)\n : handleOperator(key, nestedKey, val),\n ),\n ` ${AND} `,\n );\n};\n\nconst isRecord = (value: unknown): value is Record<string, unknown> =>\n value !== null && typeof value === 'object' && !Array.isArray(value);\n","import { JSONSerializer, SQL } from '@event-driven-io/dumbo';\nimport { objectEntries, OperatorMap } from '../../../../../core';\n\nexport const handleOperator = (\n path: string,\n operator: string,\n value: unknown,\n): SQL => {\n if (path === '_id' || path === '_version') {\n return handleMetadataOperator(path, operator, value);\n }\n\n switch (operator) {\n case '$eq': {\n const nestedPath = JSONSerializer.serialize(\n buildNestedObject(path, value),\n );\n const serializedValue = JSONSerializer.serialize(value);\n\n return SQL`(data @> ${nestedPath}::jsonb OR jsonb_path_exists(data, '$.${SQL.plain(path)}[*] ? (@ == ${SQL.plain(serializedValue)})'))`;\n }\n case '$gt':\n case '$gte':\n case '$lt':\n case '$lte':\n case '$ne': {\n const jsonPath = SQL.plain(path.split('.').join(','));\n\n return SQL`data ->> '${jsonPath}' ${SQL.plain(OperatorMap[operator])} ${value}`;\n }\n case '$in': {\n const jsonPath = `{${path.split('.').join(',')}}`;\n\n return SQL`data #>> ${jsonPath} IN ${value as unknown[]}`;\n }\n case '$nin': {\n const jsonPath = `{${path.split('.').join(',')}}`;\n\n return SQL`data #>> ${jsonPath} NOT IN ${value as unknown[]}`;\n }\n case '$elemMatch': {\n const subQuery = objectEntries(value as Record<string, unknown>)\n .map(\n ([subKey, subValue]) =>\n `@.\"${subKey}\" == ${JSONSerializer.serialize(subValue)}`,\n )\n .join(' && ');\n return SQL`jsonb_path_exists(data, '$.${SQL.plain(path)}[*] ? (${SQL.plain(subQuery)})')`;\n }\n case '$all': {\n const nestedPath = JSONSerializer.serialize(\n buildNestedObject(path, value),\n );\n return SQL`data @> ${nestedPath}::jsonb`;\n }\n case '$size': {\n const jsonPath = `{${path.split('.').join(',')}}`;\n\n return SQL`jsonb_array_length(data #> ${jsonPath}) = ${value}`;\n }\n default:\n throw new Error(`Unsupported operator: ${operator}`);\n }\n};\n\nconst handleMetadataOperator = (\n fieldName: string,\n operator: string,\n value: unknown,\n): SQL => {\n switch (operator) {\n case '$eq':\n return SQL`${SQL.plain(fieldName)} = ${value}`;\n case '$gt':\n case '$gte':\n case '$lt':\n case '$lte':\n case '$ne':\n return SQL`${SQL.plain(fieldName)} ${SQL.plain(OperatorMap[operator])} ${value}`;\n case '$in':\n return SQL`${SQL.plain(fieldName)} IN ${value as unknown[]}`;\n case '$nin':\n return SQL`${SQL.plain(fieldName)} NOT IN ${value as unknown[]}`;\n default:\n throw new Error(`Unsupported operator: ${operator}`);\n }\n};\n\nconst buildNestedObject = (\n path: string,\n value: unknown,\n): Record<string, unknown> =>\n path\n .split('.')\n .reverse()\n .reduce((acc, key) => ({ [key]: acc }), value as Record<string, unknown>);\n","import { JSONSerializer, SQL } from '@event-driven-io/dumbo';\nimport {\n objectEntries,\n type $inc,\n type $push,\n type $set,\n type $unset,\n type PongoUpdate,\n} from '../../../../../core';\n\nexport const buildUpdateQuery = <T>(update: PongoUpdate<T>): SQL =>\n objectEntries(update).reduce(\n (currentUpdateQuery, [op, value]) => {\n switch (op) {\n case '$set':\n return buildSetQuery(value, currentUpdateQuery);\n case '$unset':\n return buildUnsetQuery(value, currentUpdateQuery);\n case '$inc':\n return buildIncQuery(value, currentUpdateQuery);\n case '$push':\n return buildPushQuery(value, currentUpdateQuery);\n default:\n return currentUpdateQuery;\n }\n },\n SQL`data`,\n );\n\nexport const buildSetQuery = <T>(set: $set<T>, currentUpdateQuery: SQL): SQL =>\n SQL`${currentUpdateQuery} || ${JSONSerializer.serialize(set)}::jsonb`;\n\nexport const buildUnsetQuery = <T>(\n unset: $unset<T>,\n currentUpdateQuery: SQL,\n): SQL =>\n SQL`${currentUpdateQuery} - ${Object.keys(unset)\n .map((k) => `{${k}}`)\n .join(', ')}`;\n\nexport const buildIncQuery = <T>(\n inc: $inc<T>,\n currentUpdateQuery: SQL,\n): SQL => {\n for (const [key, value] of Object.entries(inc)) {\n currentUpdateQuery =\n typeof value === 'bigint'\n ? SQL`jsonb_set(${currentUpdateQuery}, '{${SQL.plain(key)}}', to_jsonb((COALESCE((data->>'${SQL.plain(key)}')::BIGINT, 0) + ${value})::TEXT), true)`\n : SQL`jsonb_set(${currentUpdateQuery}, '{${SQL.plain(key)}}', to_jsonb(COALESCE((data->>'${SQL.plain(key)}')::NUMERIC, 0) + ${value}), true)`;\n }\n return currentUpdateQuery;\n};\n\nexport const buildPushQuery = <T>(\n push: $push<T>,\n currentUpdateQuery: SQL,\n): SQL => {\n for (const [key, value] of Object.entries(push)) {\n const serializedValue = JSONSerializer.serialize([value]);\n currentUpdateQuery = SQL`jsonb_set(${currentUpdateQuery}, '{${SQL.plain(key)}}', (coalesce(data->'${SQL.plain(key)}', '[]'::jsonb) || ${serializedValue}::jsonb), true)`;\n }\n return currentUpdateQuery;\n};\n"]}