@event-driven-io/pongo 0.17.0-beta.1 → 0.17.0-beta.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. package/dist/{chunk-DL4E3N6J.js → chunk-RBJRJFQY.js} +3 -7
  2. package/dist/chunk-RBJRJFQY.js.map +1 -0
  3. package/dist/chunk-TFE4LVVL.cjs +330 -0
  4. package/dist/chunk-TFE4LVVL.cjs.map +1 -0
  5. package/dist/{chunk-YLV7YIPZ.cjs → chunk-TP73JUIX.cjs} +3 -7
  6. package/dist/chunk-TP73JUIX.cjs.map +1 -0
  7. package/dist/chunk-XOVARYG4.js +330 -0
  8. package/dist/chunk-XOVARYG4.js.map +1 -0
  9. package/dist/cli.cjs +38 -22
  10. package/dist/cli.cjs.map +1 -1
  11. package/dist/cli.js +28 -12
  12. package/dist/cli.js.map +1 -1
  13. package/dist/cloudflare.cjs +53 -0
  14. package/dist/cloudflare.cjs.map +1 -0
  15. package/dist/cloudflare.d.cts +11 -0
  16. package/dist/cloudflare.d.ts +11 -0
  17. package/dist/cloudflare.js +53 -0
  18. package/dist/cloudflare.js.map +1 -0
  19. package/dist/index-BJopB-em.d.cts +7 -0
  20. package/dist/index-G5DECNb_.d.ts +7 -0
  21. package/dist/index.cjs +3 -4
  22. package/dist/index.cjs.map +1 -1
  23. package/dist/index.d.cts +4 -4
  24. package/dist/index.d.ts +4 -4
  25. package/dist/index.js +1 -2
  26. package/dist/pg.cjs +347 -4
  27. package/dist/pg.cjs.map +1 -1
  28. package/dist/pg.d.cts +10 -7
  29. package/dist/pg.d.ts +10 -7
  30. package/dist/pg.js +350 -7
  31. package/dist/pg.js.map +1 -1
  32. package/dist/{pongoCollectionSchemaComponent-BsHlVyN-.d.cts → pongoCollectionSchemaComponent-t_e9n2Wc.d.cts} +12 -8
  33. package/dist/{pongoCollectionSchemaComponent-BsHlVyN-.d.ts → pongoCollectionSchemaComponent-t_e9n2Wc.d.ts} +12 -8
  34. package/dist/shim.cjs +16 -12
  35. package/dist/shim.cjs.map +1 -1
  36. package/dist/shim.d.cts +4 -3
  37. package/dist/shim.d.ts +4 -3
  38. package/dist/shim.js +11 -7
  39. package/dist/shim.js.map +1 -1
  40. package/dist/sqlite3.cjs +15 -332
  41. package/dist/sqlite3.cjs.map +1 -1
  42. package/dist/sqlite3.d.cts +8 -7
  43. package/dist/sqlite3.d.ts +8 -7
  44. package/dist/sqlite3.js +8 -325
  45. package/dist/sqlite3.js.map +1 -1
  46. package/package.json +45 -13
  47. package/dist/chunk-3KNMMQUV.cjs +0 -362
  48. package/dist/chunk-3KNMMQUV.cjs.map +0 -1
  49. package/dist/chunk-5LN762VW.js +0 -362
  50. package/dist/chunk-5LN762VW.js.map +0 -1
  51. package/dist/chunk-7W6X4QGY.cjs +0 -10
  52. package/dist/chunk-7W6X4QGY.cjs.map +0 -1
  53. package/dist/chunk-DL4E3N6J.js.map +0 -1
  54. package/dist/chunk-IBJKZ6TS.js +0 -10
  55. package/dist/chunk-IBJKZ6TS.js.map +0 -1
  56. package/dist/chunk-YLV7YIPZ.cjs.map +0 -1
  57. package/dist/pg-WUYRNGST.js +0 -11
  58. package/dist/pg-WUYRNGST.js.map +0 -1
  59. package/dist/pg-XCWP4FAM.cjs +0 -11
  60. package/dist/pg-XCWP4FAM.cjs.map +0 -1
@@ -0,0 +1,53 @@
1
+ import {
2
+ pongoCollectionSQLiteMigrations,
3
+ sqliteSQLBuilder
4
+ } from "./chunk-XOVARYG4.js";
5
+ import {
6
+ PongoCollectionSchemaComponent,
7
+ PongoDatabase,
8
+ PongoDatabaseSchemaComponent,
9
+ pongoDatabaseDriverRegistry,
10
+ pongoSchema
11
+ } from "./chunk-RBJRJFQY.js";
12
+
13
+ // src/storage/sqlite/d1/index.ts
14
+ import { D1DriverType, d1Pool } from "@event-driven-io/dumbo/cloudflare";
15
+ var d1DatabaseDriver = {
16
+ driverType: D1DriverType,
17
+ databaseFactory: (options) => {
18
+ const databaseName = "d1:default";
19
+ return PongoDatabase({
20
+ ...options,
21
+ pool: d1Pool(options),
22
+ schemaComponent: PongoDatabaseSchemaComponent({
23
+ driverType: D1DriverType,
24
+ collectionFactory: (schema) => PongoCollectionSchemaComponent({
25
+ driverType: D1DriverType,
26
+ definition: schema,
27
+ migrationsOrSchemaComponents: {
28
+ migrations: pongoCollectionSQLiteMigrations(schema.name)
29
+ },
30
+ sqlBuilder: sqliteSQLBuilder(schema.name)
31
+ }),
32
+ definition: options.schema?.definition ?? pongoSchema.db(databaseName, {})
33
+ }),
34
+ databaseName
35
+ });
36
+ },
37
+ getDatabaseNameOrDefault: () => {
38
+ return "d1://default";
39
+ },
40
+ defaultConnectionString: "d1://default"
41
+ };
42
+ var useSqlite3DatabaseDriver = () => {
43
+ pongoDatabaseDriverRegistry.register(D1DriverType, d1DatabaseDriver);
44
+ };
45
+ useSqlite3DatabaseDriver();
46
+ export {
47
+ d1DatabaseDriver as d1Driver,
48
+ d1DatabaseDriver as databaseDriver,
49
+ pongoCollectionSQLiteMigrations,
50
+ sqliteSQLBuilder,
51
+ useSqlite3DatabaseDriver
52
+ };
53
+ //# sourceMappingURL=cloudflare.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/storage/sqlite/d1/index.ts"],"sourcesContent":["import type { D1PoolOptions } from '@event-driven-io/dumbo/cloudflare';\nimport { D1DriverType, d1Pool } from '@event-driven-io/dumbo/cloudflare';\nimport {\n PongoCollectionSchemaComponent,\n PongoDatabase,\n pongoDatabaseDriverRegistry,\n PongoDatabaseSchemaComponent,\n pongoSchema,\n type PongoDatabaseDriver,\n type PongoDatabaseDriverOptions,\n type PongoDb,\n} from '../../../core';\nimport { pongoCollectionSQLiteMigrations, sqliteSQLBuilder } from '../core';\n\nexport type SQLitePongoClientOptions = object;\n\ntype D1DatabaseDriverOptions = PongoDatabaseDriverOptions<never> &\n D1PoolOptions;\n\nconst d1DatabaseDriver: PongoDatabaseDriver<\n PongoDb<D1DriverType>,\n D1DatabaseDriverOptions\n> = {\n driverType: D1DriverType,\n databaseFactory: (options) => {\n const databaseName = 'd1:default';\n\n return PongoDatabase({\n ...options,\n pool: d1Pool(options),\n schemaComponent: PongoDatabaseSchemaComponent({\n driverType: D1DriverType,\n collectionFactory: (schema) =>\n PongoCollectionSchemaComponent({\n driverType: D1DriverType,\n definition: schema,\n migrationsOrSchemaComponents: {\n migrations: pongoCollectionSQLiteMigrations(schema.name),\n },\n sqlBuilder: sqliteSQLBuilder(schema.name),\n }),\n definition:\n options.schema?.definition ?? pongoSchema.db(databaseName, {}),\n }),\n databaseName,\n });\n },\n getDatabaseNameOrDefault: () => {\n return 'd1://default';\n },\n defaultConnectionString: 'd1://default',\n};\n\nexport const useSqlite3DatabaseDriver = () => {\n pongoDatabaseDriverRegistry.register(D1DriverType, d1DatabaseDriver);\n};\n\nuseSqlite3DatabaseDriver();\n\nexport { d1DatabaseDriver as d1Driver, d1DatabaseDriver as databaseDriver };\n"],"mappings":";;;;;;;;;;;;;AACA,SAAS,cAAc,cAAc;AAkBrC,IAAM,mBAGF;AAAA,EACF,YAAY;AAAA,EACZ,iBAAiB,CAAC,YAAY;AAC5B,UAAM,eAAe;AAErB,WAAO,cAAc;AAAA,MACnB,GAAG;AAAA,MACH,MAAM,OAAO,OAAO;AAAA,MACpB,iBAAiB,6BAA6B;AAAA,QAC5C,YAAY;AAAA,QACZ,mBAAmB,CAAC,WAClB,+BAA+B;AAAA,UAC7B,YAAY;AAAA,UACZ,YAAY;AAAA,UACZ,8BAA8B;AAAA,YAC5B,YAAY,gCAAgC,OAAO,IAAI;AAAA,UACzD;AAAA,UACA,YAAY,iBAAiB,OAAO,IAAI;AAAA,QAC1C,CAAC;AAAA,QACH,YACE,QAAQ,QAAQ,cAAc,YAAY,GAAG,cAAc,CAAC,CAAC;AAAA,MACjE,CAAC;AAAA,MACD;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EACA,0BAA0B,MAAM;AAC9B,WAAO;AAAA,EACT;AAAA,EACA,yBAAyB;AAC3B;AAEO,IAAM,2BAA2B,MAAM;AAC5C,8BAA4B,SAAS,cAAc,gBAAgB;AACrE;AAEA,yBAAyB;","names":[]}
@@ -0,0 +1,7 @@
1
+ import * as _event_driven_io_dumbo from '@event-driven-io/dumbo';
2
+ import { r as PongoCollectionSQLBuilder } from './pongoCollectionSchemaComponent-t_e9n2Wc.cjs';
3
+
4
+ declare const pongoCollectionSQLiteMigrations: (collectionName: string) => _event_driven_io_dumbo.SQLMigration[];
5
+ declare const sqliteSQLBuilder: (collectionName: string) => PongoCollectionSQLBuilder;
6
+
7
+ export { pongoCollectionSQLiteMigrations as p, sqliteSQLBuilder as s };
@@ -0,0 +1,7 @@
1
+ import * as _event_driven_io_dumbo from '@event-driven-io/dumbo';
2
+ import { r as PongoCollectionSQLBuilder } from './pongoCollectionSchemaComponent-t_e9n2Wc.js';
3
+
4
+ declare const pongoCollectionSQLiteMigrations: (collectionName: string) => _event_driven_io_dumbo.SQLMigration[];
5
+ declare const sqliteSQLBuilder: (collectionName: string) => PongoCollectionSQLBuilder;
6
+
7
+ export { pongoCollectionSQLiteMigrations as p, sqliteSQLBuilder as s };
package/dist/index.cjs CHANGED
@@ -1,4 +1,4 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true});require('./chunk-7W6X4QGY.cjs');
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true});
2
2
 
3
3
 
4
4
 
@@ -34,8 +34,8 @@
34
34
 
35
35
 
36
36
 
37
+ var _chunkTP73JUIXcjs = require('./chunk-TP73JUIX.cjs');
37
38
 
38
- var _chunkYLV7YIPZcjs = require('./chunk-YLV7YIPZ.cjs');
39
39
 
40
40
 
41
41
 
@@ -71,6 +71,5 @@ var _chunkYLV7YIPZcjs = require('./chunk-YLV7YIPZ.cjs');
71
71
 
72
72
 
73
73
 
74
-
75
- exports.ConcurrencyError = _chunkYLV7YIPZcjs.ConcurrencyError; exports.DOCUMENT_DOES_NOT_EXIST = _chunkYLV7YIPZcjs.DOCUMENT_DOES_NOT_EXIST; exports.DOCUMENT_EXISTS = _chunkYLV7YIPZcjs.DOCUMENT_EXISTS; exports.NO_CONCURRENCY_CHECK = _chunkYLV7YIPZcjs.NO_CONCURRENCY_CHECK; exports.ObjectId = _chunkYLV7YIPZcjs.ObjectId; exports.OperatorMap = _chunkYLV7YIPZcjs.OperatorMap; exports.PongoCollectionSchemaComponent = _chunkYLV7YIPZcjs.PongoCollectionSchemaComponent; exports.PongoDatabase = _chunkYLV7YIPZcjs.PongoDatabase; exports.PongoDatabaseCache = _chunkYLV7YIPZcjs.PongoDatabaseCache; exports.PongoDatabaseDriverRegistry = _chunkYLV7YIPZcjs.PongoDatabaseDriverRegistry; exports.PongoDatabaseSchemaComponent = _chunkYLV7YIPZcjs.PongoDatabaseSchemaComponent; exports.PongoError = _chunkYLV7YIPZcjs.PongoError; exports.QueryOperators = _chunkYLV7YIPZcjs.QueryOperators; exports.deepEquals = _chunkYLV7YIPZcjs.deepEquals; exports.expectedVersion = _chunkYLV7YIPZcjs.expectedVersion; exports.expectedVersionValue = _chunkYLV7YIPZcjs.expectedVersionValue; exports.hasOperators = _chunkYLV7YIPZcjs.hasOperators; exports.isEquatable = _chunkYLV7YIPZcjs.isEquatable; exports.isGeneralExpectedDocumentVersion = _chunkYLV7YIPZcjs.isGeneralExpectedDocumentVersion; exports.isNumber = _chunkYLV7YIPZcjs.isNumber; exports.isOperator = _chunkYLV7YIPZcjs.isOperator; exports.isString = _chunkYLV7YIPZcjs.isString; exports.objectEntries = _chunkYLV7YIPZcjs.objectEntries; exports.operationResult = _chunkYLV7YIPZcjs.operationResult; exports.pongoClient = _chunkYLV7YIPZcjs.pongoClient; exports.pongoCollection = _chunkYLV7YIPZcjs.pongoCollection; exports.pongoDatabaseDriverRegistry = _chunkYLV7YIPZcjs.pongoDatabaseDriverRegistry; exports.pongoSchema = _chunkYLV7YIPZcjs.pongoSchema; exports.pongoSession = _chunkYLV7YIPZcjs.pongoSession; exports.pongoTransaction = _chunkYLV7YIPZcjs.pongoTransaction; exports.proxyClientWithSchema = _chunkYLV7YIPZcjs.proxyClientWithSchema; exports.proxyPongoDbWithSchema = _chunkYLV7YIPZcjs.proxyPongoDbWithSchema; exports.toClientSchemaMetadata = _chunkYLV7YIPZcjs.toClientSchemaMetadata; exports.toDbSchemaMetadata = _chunkYLV7YIPZcjs.toDbSchemaMetadata; exports.transactionExecutorOrDefault = _chunkYLV7YIPZcjs.transactionExecutorOrDefault;
74
+ exports.ConcurrencyError = _chunkTP73JUIXcjs.ConcurrencyError; exports.DOCUMENT_DOES_NOT_EXIST = _chunkTP73JUIXcjs.DOCUMENT_DOES_NOT_EXIST; exports.DOCUMENT_EXISTS = _chunkTP73JUIXcjs.DOCUMENT_EXISTS; exports.NO_CONCURRENCY_CHECK = _chunkTP73JUIXcjs.NO_CONCURRENCY_CHECK; exports.ObjectId = _chunkTP73JUIXcjs.ObjectId; exports.OperatorMap = _chunkTP73JUIXcjs.OperatorMap; exports.PongoCollectionSchemaComponent = _chunkTP73JUIXcjs.PongoCollectionSchemaComponent; exports.PongoDatabase = _chunkTP73JUIXcjs.PongoDatabase; exports.PongoDatabaseCache = _chunkTP73JUIXcjs.PongoDatabaseCache; exports.PongoDatabaseDriverRegistry = _chunkTP73JUIXcjs.PongoDatabaseDriverRegistry; exports.PongoDatabaseSchemaComponent = _chunkTP73JUIXcjs.PongoDatabaseSchemaComponent; exports.PongoError = _chunkTP73JUIXcjs.PongoError; exports.QueryOperators = _chunkTP73JUIXcjs.QueryOperators; exports.deepEquals = _chunkTP73JUIXcjs.deepEquals; exports.expectedVersion = _chunkTP73JUIXcjs.expectedVersion; exports.expectedVersionValue = _chunkTP73JUIXcjs.expectedVersionValue; exports.hasOperators = _chunkTP73JUIXcjs.hasOperators; exports.isEquatable = _chunkTP73JUIXcjs.isEquatable; exports.isGeneralExpectedDocumentVersion = _chunkTP73JUIXcjs.isGeneralExpectedDocumentVersion; exports.isNumber = _chunkTP73JUIXcjs.isNumber; exports.isOperator = _chunkTP73JUIXcjs.isOperator; exports.isString = _chunkTP73JUIXcjs.isString; exports.objectEntries = _chunkTP73JUIXcjs.objectEntries; exports.operationResult = _chunkTP73JUIXcjs.operationResult; exports.pongoClient = _chunkTP73JUIXcjs.pongoClient; exports.pongoCollection = _chunkTP73JUIXcjs.pongoCollection; exports.pongoDatabaseDriverRegistry = _chunkTP73JUIXcjs.pongoDatabaseDriverRegistry; exports.pongoSchema = _chunkTP73JUIXcjs.pongoSchema; exports.pongoSession = _chunkTP73JUIXcjs.pongoSession; exports.pongoTransaction = _chunkTP73JUIXcjs.pongoTransaction; exports.proxyClientWithSchema = _chunkTP73JUIXcjs.proxyClientWithSchema; exports.proxyPongoDbWithSchema = _chunkTP73JUIXcjs.proxyPongoDbWithSchema; exports.toClientSchemaMetadata = _chunkTP73JUIXcjs.toClientSchemaMetadata; exports.toDbSchemaMetadata = _chunkTP73JUIXcjs.toDbSchemaMetadata; exports.transactionExecutorOrDefault = _chunkTP73JUIXcjs.transactionExecutorOrDefault;
76
75
  //# sourceMappingURL=index.cjs.map
@@ -1 +1 @@
1
- {"version":3,"sources":["/home/runner/work/Pongo/Pongo/src/packages/pongo/dist/index.cjs"],"names":[],"mappings":"AAAA,yGAA6B;AAC7B;AACE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACF,wDAA6B;AAC7B;AACE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACF,wtEAAC","file":"/home/runner/work/Pongo/Pongo/src/packages/pongo/dist/index.cjs"}
1
+ {"version":3,"sources":["/home/runner/work/Pongo/Pongo/src/packages/pongo/dist/index.cjs"],"names":[],"mappings":"AAAA;AACE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACF,wDAA6B;AAC7B;AACE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACF,wtEAAC","file":"/home/runner/work/Pongo/Pongo/src/packages/pongo/dist/index.cjs"}
package/dist/index.d.cts CHANGED
@@ -1,6 +1,6 @@
1
- import { DatabaseDriverType, Dumbo, MigrationStyle, SQLExecutor, DatabaseConnectionString, InferDriverDatabaseType } from '@event-driven-io/dumbo';
2
- import { P as PongoDb, a as PongoCollectionSchemaComponent, C as CollectionOperationOptions, b as PongoDocument, c as PongoCollection, d as PongoClientSchema, e as PongoDatabaseDriver, f as PongoCollectionSchema, g as PongoDatabaseFactoryOptions, h as PongoDatabaseSchemaComponent, i as PongoDbSchema, A as AnyPongoDb, j as AnyPongoDatabaseDriver, k as PongoClientOptions, l as PongoClient, E as ExtractPongoDatabaseTypeFromDriver, m as PongoClientWithSchema, n as PongoTransactionOptions, o as PongoSession, p as PongoDbTransaction } from './pongoCollectionSchemaComponent-BsHlVyN-.cjs';
3
- export { aq as $inc, ar as $push, ao as $set, ap as $unset, aj as AlternativeType, z as AnyPongoDatabaseDriverOptions, G as CollectionsMap, ak as Condition, I as DBsMap, aw as DOCUMENT_DOES_NOT_EXIST, av as DOCUMENT_EXISTS, _ as DeleteManyOptions, Z as DeleteOneOptions, ae as Document, aL as DocumentHandler, a3 as EnhancedOmit, au as ExpectedDocumentVersion, as as ExpectedDocumentVersionGeneral, at as ExpectedDocumentVersionValue, B as ExtractPongoDatabaseDriverOptions, $ as FindOptions, X as HandleOptions, a1 as HasId, a2 as InferIdType, U as InsertManyOptions, T as InsertOneOptions, ax as NO_CONCURRENCY_CHECK, ai as NonObjectIdLikeDocument, a0 as ObjectId, ah as ObjectIdLike, aC as OperationResult, af as OptionalId, a4 as OptionalUnlessRequiredId, a6 as OptionalUnlessRequiredIdAndVersion, a5 as OptionalUnlessRequiredVersion, ag as OptionalVersion, O as PongoClientSchemaMetadata, q as PongoCollectionSQLBuilder, t as PongoCollectionSchemaComponentOptions, M as PongoCollectionSchemaMetadata, s as PongoCollectionURN, r as PongoCollectionURNType, y as PongoDatabaseDriverOptions, D as PongoDatabaseDriverRegistry, x as PongoDatabaseSQLBuilder, w as PongoDatabaseSchemaComponentOptions, v as PongoDatabaseURN, u as PongoDatabaseURNType, N as PongoDbSchemaMetadata, H as PongoDbWithSchema, aJ as PongoDeleteManyResult, aI as PongoDeleteResult, al as PongoFilter, an as PongoFilterOperator, aK as PongoHandleResult, aF as PongoInsertManyResult, aE as PongoInsertOneResult, S as PongoSchemaConfig, aB as PongoUpdate, aH as PongoUpdateManyResult, aG as PongoUpdateResult, ad as RegExpOrString, Y as ReplaceOneOptions, am as RootFilterOperators, W as UpdateManyOptions, V as UpdateOneOptions, a7 as WithId, ab as WithIdAndVersion, a9 as WithVersion, a8 as WithoutId, ac as WithoutIdAndVersion, aa as WithoutVersion, aA as expectedVersion, az as expectedVersionValue, ay as isGeneralExpectedDocumentVersion, aD as operationResult, F as pongoDatabaseDriverRegistry, J as pongoSchema, L as proxyClientWithSchema, K as proxyPongoDbWithSchema, R as toClientSchemaMetadata, Q as toDbSchemaMetadata } from './pongoCollectionSchemaComponent-BsHlVyN-.cjs';
1
+ import { DatabaseDriverType, Dumbo, MigrationStyle, SQLExecutor } from '@event-driven-io/dumbo';
2
+ import { a as PongoDb, c as PongoCollectionSchemaComponent, C as CollectionOperationOptions, d as PongoDocument, e as PongoCollection, f as PongoClientSchema, P as PongoDatabaseDriver, g as PongoCollectionSchema, h as PongoDatabaseFactoryOptions, i as PongoDatabaseSchemaComponent, j as PongoDbSchema, A as AnyPongoDb, k as AnyPongoDatabaseDriver, l as PongoClientOptions, m as PongoClient, E as ExtractPongoDatabaseTypeFromDriver, n as PongoClientWithSchema, o as PongoTransactionOptions, p as PongoSession, q as PongoDbTransaction } from './pongoCollectionSchemaComponent-t_e9n2Wc.cjs';
3
+ export { as as $inc, at as $push, aq as $set, ar as $unset, al as AlternativeType, z as AnyPongoDatabaseDriverOptions, I as CollectionsMap, am as Condition, K as DBsMap, ay as DOCUMENT_DOES_NOT_EXIST, ax as DOCUMENT_EXISTS, B as DatabaseDriverOptionsWithConnectionString, D as DatabaseDriverOptionsWithDatabaseName, a0 as DeleteManyOptions, $ as DeleteOneOptions, ag as Document, aN as DocumentHandler, a5 as EnhancedOmit, aw as ExpectedDocumentVersion, au as ExpectedDocumentVersionGeneral, av as ExpectedDocumentVersionValue, F as ExtractPongoDatabaseDriverOptions, a1 as FindOptions, Z as HandleOptions, a3 as HasId, a4 as InferIdType, W as InsertManyOptions, V as InsertOneOptions, az as NO_CONCURRENCY_CHECK, ak as NonObjectIdLikeDocument, a2 as ObjectId, aj as ObjectIdLike, aE as OperationResult, ah as OptionalId, a6 as OptionalUnlessRequiredId, a8 as OptionalUnlessRequiredIdAndVersion, a7 as OptionalUnlessRequiredVersion, ai as OptionalVersion, R as PongoClientSchemaMetadata, r as PongoCollectionSQLBuilder, u as PongoCollectionSchemaComponentOptions, O as PongoCollectionSchemaMetadata, t as PongoCollectionURN, s as PongoCollectionURNType, b as PongoDatabaseDriverOptions, G as PongoDatabaseDriverRegistry, y as PongoDatabaseSQLBuilder, x as PongoDatabaseSchemaComponentOptions, w as PongoDatabaseURN, v as PongoDatabaseURNType, Q as PongoDbSchemaMetadata, J as PongoDbWithSchema, aL as PongoDeleteManyResult, aK as PongoDeleteResult, an as PongoFilter, ap as PongoFilterOperator, aM as PongoHandleResult, aH as PongoInsertManyResult, aG as PongoInsertOneResult, U as PongoSchemaConfig, aD as PongoUpdate, aJ as PongoUpdateManyResult, aI as PongoUpdateResult, af as RegExpOrString, _ as ReplaceOneOptions, ao as RootFilterOperators, Y as UpdateManyOptions, X as UpdateOneOptions, a9 as WithId, ad as WithIdAndVersion, ab as WithVersion, aa as WithoutId, ae as WithoutIdAndVersion, ac as WithoutVersion, aC as expectedVersion, aB as expectedVersionValue, aA as isGeneralExpectedDocumentVersion, aF as operationResult, H as pongoDatabaseDriverRegistry, L as pongoSchema, N as proxyClientWithSchema, M as proxyPongoDbWithSchema, T as toClientSchemaMetadata, S as toDbSchemaMetadata } from './pongoCollectionSchemaComponent-t_e9n2Wc.cjs';
4
4
 
5
5
  type PongoCollectionOptions<DriverType extends DatabaseDriverType = DatabaseDriverType> = {
6
6
  db: PongoDb<DriverType>;
@@ -91,7 +91,7 @@ declare class ConcurrencyError extends PongoError {
91
91
  constructor(message?: string);
92
92
  }
93
93
 
94
- declare const pongoClient: <DatabaseDriver extends AnyPongoDatabaseDriver, ConnectionString extends DatabaseConnectionString<InferDriverDatabaseType<DatabaseDriver["driverType"]>>, TypedClientSchema extends PongoClientSchema = PongoClientSchema>(options: PongoClientOptions<DatabaseDriver, ConnectionString, TypedClientSchema>) => PongoClient<DatabaseDriver["driverType"], ExtractPongoDatabaseTypeFromDriver<DatabaseDriver>> & PongoClientWithSchema<TypedClientSchema>;
94
+ declare const pongoClient: <DatabaseDriver extends AnyPongoDatabaseDriver, TypedClientSchema extends PongoClientSchema = PongoClientSchema>(options: PongoClientOptions<DatabaseDriver, TypedClientSchema>) => PongoClient<DatabaseDriver["driverType"], ExtractPongoDatabaseTypeFromDriver<DatabaseDriver>> & PongoClientWithSchema<TypedClientSchema>;
95
95
 
96
96
  type PongoSessionOptions = {
97
97
  explicit?: boolean;
package/dist/index.d.ts CHANGED
@@ -1,6 +1,6 @@
1
- import { DatabaseDriverType, Dumbo, MigrationStyle, SQLExecutor, DatabaseConnectionString, InferDriverDatabaseType } from '@event-driven-io/dumbo';
2
- import { P as PongoDb, a as PongoCollectionSchemaComponent, C as CollectionOperationOptions, b as PongoDocument, c as PongoCollection, d as PongoClientSchema, e as PongoDatabaseDriver, f as PongoCollectionSchema, g as PongoDatabaseFactoryOptions, h as PongoDatabaseSchemaComponent, i as PongoDbSchema, A as AnyPongoDb, j as AnyPongoDatabaseDriver, k as PongoClientOptions, l as PongoClient, E as ExtractPongoDatabaseTypeFromDriver, m as PongoClientWithSchema, n as PongoTransactionOptions, o as PongoSession, p as PongoDbTransaction } from './pongoCollectionSchemaComponent-BsHlVyN-.js';
3
- export { aq as $inc, ar as $push, ao as $set, ap as $unset, aj as AlternativeType, z as AnyPongoDatabaseDriverOptions, G as CollectionsMap, ak as Condition, I as DBsMap, aw as DOCUMENT_DOES_NOT_EXIST, av as DOCUMENT_EXISTS, _ as DeleteManyOptions, Z as DeleteOneOptions, ae as Document, aL as DocumentHandler, a3 as EnhancedOmit, au as ExpectedDocumentVersion, as as ExpectedDocumentVersionGeneral, at as ExpectedDocumentVersionValue, B as ExtractPongoDatabaseDriverOptions, $ as FindOptions, X as HandleOptions, a1 as HasId, a2 as InferIdType, U as InsertManyOptions, T as InsertOneOptions, ax as NO_CONCURRENCY_CHECK, ai as NonObjectIdLikeDocument, a0 as ObjectId, ah as ObjectIdLike, aC as OperationResult, af as OptionalId, a4 as OptionalUnlessRequiredId, a6 as OptionalUnlessRequiredIdAndVersion, a5 as OptionalUnlessRequiredVersion, ag as OptionalVersion, O as PongoClientSchemaMetadata, q as PongoCollectionSQLBuilder, t as PongoCollectionSchemaComponentOptions, M as PongoCollectionSchemaMetadata, s as PongoCollectionURN, r as PongoCollectionURNType, y as PongoDatabaseDriverOptions, D as PongoDatabaseDriverRegistry, x as PongoDatabaseSQLBuilder, w as PongoDatabaseSchemaComponentOptions, v as PongoDatabaseURN, u as PongoDatabaseURNType, N as PongoDbSchemaMetadata, H as PongoDbWithSchema, aJ as PongoDeleteManyResult, aI as PongoDeleteResult, al as PongoFilter, an as PongoFilterOperator, aK as PongoHandleResult, aF as PongoInsertManyResult, aE as PongoInsertOneResult, S as PongoSchemaConfig, aB as PongoUpdate, aH as PongoUpdateManyResult, aG as PongoUpdateResult, ad as RegExpOrString, Y as ReplaceOneOptions, am as RootFilterOperators, W as UpdateManyOptions, V as UpdateOneOptions, a7 as WithId, ab as WithIdAndVersion, a9 as WithVersion, a8 as WithoutId, ac as WithoutIdAndVersion, aa as WithoutVersion, aA as expectedVersion, az as expectedVersionValue, ay as isGeneralExpectedDocumentVersion, aD as operationResult, F as pongoDatabaseDriverRegistry, J as pongoSchema, L as proxyClientWithSchema, K as proxyPongoDbWithSchema, R as toClientSchemaMetadata, Q as toDbSchemaMetadata } from './pongoCollectionSchemaComponent-BsHlVyN-.js';
1
+ import { DatabaseDriverType, Dumbo, MigrationStyle, SQLExecutor } from '@event-driven-io/dumbo';
2
+ import { a as PongoDb, c as PongoCollectionSchemaComponent, C as CollectionOperationOptions, d as PongoDocument, e as PongoCollection, f as PongoClientSchema, P as PongoDatabaseDriver, g as PongoCollectionSchema, h as PongoDatabaseFactoryOptions, i as PongoDatabaseSchemaComponent, j as PongoDbSchema, A as AnyPongoDb, k as AnyPongoDatabaseDriver, l as PongoClientOptions, m as PongoClient, E as ExtractPongoDatabaseTypeFromDriver, n as PongoClientWithSchema, o as PongoTransactionOptions, p as PongoSession, q as PongoDbTransaction } from './pongoCollectionSchemaComponent-t_e9n2Wc.js';
3
+ export { as as $inc, at as $push, aq as $set, ar as $unset, al as AlternativeType, z as AnyPongoDatabaseDriverOptions, I as CollectionsMap, am as Condition, K as DBsMap, ay as DOCUMENT_DOES_NOT_EXIST, ax as DOCUMENT_EXISTS, B as DatabaseDriverOptionsWithConnectionString, D as DatabaseDriverOptionsWithDatabaseName, a0 as DeleteManyOptions, $ as DeleteOneOptions, ag as Document, aN as DocumentHandler, a5 as EnhancedOmit, aw as ExpectedDocumentVersion, au as ExpectedDocumentVersionGeneral, av as ExpectedDocumentVersionValue, F as ExtractPongoDatabaseDriverOptions, a1 as FindOptions, Z as HandleOptions, a3 as HasId, a4 as InferIdType, W as InsertManyOptions, V as InsertOneOptions, az as NO_CONCURRENCY_CHECK, ak as NonObjectIdLikeDocument, a2 as ObjectId, aj as ObjectIdLike, aE as OperationResult, ah as OptionalId, a6 as OptionalUnlessRequiredId, a8 as OptionalUnlessRequiredIdAndVersion, a7 as OptionalUnlessRequiredVersion, ai as OptionalVersion, R as PongoClientSchemaMetadata, r as PongoCollectionSQLBuilder, u as PongoCollectionSchemaComponentOptions, O as PongoCollectionSchemaMetadata, t as PongoCollectionURN, s as PongoCollectionURNType, b as PongoDatabaseDriverOptions, G as PongoDatabaseDriverRegistry, y as PongoDatabaseSQLBuilder, x as PongoDatabaseSchemaComponentOptions, w as PongoDatabaseURN, v as PongoDatabaseURNType, Q as PongoDbSchemaMetadata, J as PongoDbWithSchema, aL as PongoDeleteManyResult, aK as PongoDeleteResult, an as PongoFilter, ap as PongoFilterOperator, aM as PongoHandleResult, aH as PongoInsertManyResult, aG as PongoInsertOneResult, U as PongoSchemaConfig, aD as PongoUpdate, aJ as PongoUpdateManyResult, aI as PongoUpdateResult, af as RegExpOrString, _ as ReplaceOneOptions, ao as RootFilterOperators, Y as UpdateManyOptions, X as UpdateOneOptions, a9 as WithId, ad as WithIdAndVersion, ab as WithVersion, aa as WithoutId, ae as WithoutIdAndVersion, ac as WithoutVersion, aC as expectedVersion, aB as expectedVersionValue, aA as isGeneralExpectedDocumentVersion, aF as operationResult, H as pongoDatabaseDriverRegistry, L as pongoSchema, N as proxyClientWithSchema, M as proxyPongoDbWithSchema, T as toClientSchemaMetadata, S as toDbSchemaMetadata } from './pongoCollectionSchemaComponent-t_e9n2Wc.js';
4
4
 
5
5
  type PongoCollectionOptions<DriverType extends DatabaseDriverType = DatabaseDriverType> = {
6
6
  db: PongoDb<DriverType>;
@@ -91,7 +91,7 @@ declare class ConcurrencyError extends PongoError {
91
91
  constructor(message?: string);
92
92
  }
93
93
 
94
- declare const pongoClient: <DatabaseDriver extends AnyPongoDatabaseDriver, ConnectionString extends DatabaseConnectionString<InferDriverDatabaseType<DatabaseDriver["driverType"]>>, TypedClientSchema extends PongoClientSchema = PongoClientSchema>(options: PongoClientOptions<DatabaseDriver, ConnectionString, TypedClientSchema>) => PongoClient<DatabaseDriver["driverType"], ExtractPongoDatabaseTypeFromDriver<DatabaseDriver>> & PongoClientWithSchema<TypedClientSchema>;
94
+ declare const pongoClient: <DatabaseDriver extends AnyPongoDatabaseDriver, TypedClientSchema extends PongoClientSchema = PongoClientSchema>(options: PongoClientOptions<DatabaseDriver, TypedClientSchema>) => PongoClient<DatabaseDriver["driverType"], ExtractPongoDatabaseTypeFromDriver<DatabaseDriver>> & PongoClientWithSchema<TypedClientSchema>;
95
95
 
96
96
  type PongoSessionOptions = {
97
97
  explicit?: boolean;
package/dist/index.js CHANGED
@@ -1,4 +1,3 @@
1
- import "./chunk-IBJKZ6TS.js";
2
1
  import {
3
2
  ConcurrencyError,
4
3
  DOCUMENT_DOES_NOT_EXIST,
@@ -35,7 +34,7 @@ import {
35
34
  toClientSchemaMetadata,
36
35
  toDbSchemaMetadata,
37
36
  transactionExecutorOrDefault
38
- } from "./chunk-DL4E3N6J.js";
37
+ } from "./chunk-RBJRJFQY.js";
39
38
  export {
40
39
  ConcurrencyError,
41
40
  DOCUMENT_DOES_NOT_EXIST,
package/dist/pg.cjs CHANGED
@@ -1,16 +1,359 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true});require('./chunk-7W6X4QGY.cjs');
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }
2
2
 
3
3
 
4
4
 
5
5
 
6
6
 
7
- var _chunk3KNMMQUVcjs = require('./chunk-3KNMMQUV.cjs');
8
- require('./chunk-YLV7YIPZ.cjs');
9
7
 
10
8
 
11
9
 
12
10
 
13
11
 
12
+ var _chunkTP73JUIXcjs = require('./chunk-TP73JUIX.cjs');
14
13
 
15
- exports.databaseDriver = _chunk3KNMMQUVcjs.pgDatabaseDriver; exports.pgDriver = _chunk3KNMMQUVcjs.pgDatabaseDriver; exports.pongoCollectionPostgreSQLMigrations = _chunk3KNMMQUVcjs.pongoCollectionPostgreSQLMigrations; exports.postgresSQLBuilder = _chunk3KNMMQUVcjs.postgresSQLBuilder; exports.usePgDatabaseDriver = _chunk3KNMMQUVcjs.usePgDatabaseDriver;
14
+ // src/storage/postgresql/core/sqlBuilder/index.ts
15
+
16
+
17
+
18
+
19
+
20
+ var _dumbo = require('@event-driven-io/dumbo');
21
+
22
+ // src/storage/postgresql/core/sqlBuilder/filter/index.ts
23
+
24
+
25
+ // src/storage/postgresql/core/sqlBuilder/filter/queryOperators.ts
26
+
27
+ var handleOperator = (path, operator, value) => {
28
+ if (path === "_id" || path === "_version") {
29
+ return handleMetadataOperator(path, operator, value);
30
+ }
31
+ switch (operator) {
32
+ case "$eq": {
33
+ const nestedPath = _dumbo.JSONSerializer.serialize(
34
+ buildNestedObject(path, value)
35
+ );
36
+ const serializedValue = _dumbo.JSONSerializer.serialize(value);
37
+ return _dumbo.SQL`(data @> ${nestedPath}::jsonb OR jsonb_path_exists(data, '$.${_dumbo.SQL.plain(path)}[*] ? (@ == ${_dumbo.SQL.plain(serializedValue)})'))`;
38
+ }
39
+ case "$gt":
40
+ case "$gte":
41
+ case "$lt":
42
+ case "$lte":
43
+ case "$ne": {
44
+ const jsonPath = _dumbo.SQL.plain(path.split(".").join(","));
45
+ return _dumbo.SQL`data ->> '${jsonPath}' ${_dumbo.SQL.plain(_chunkTP73JUIXcjs.OperatorMap[operator])} ${value}`;
46
+ }
47
+ case "$in": {
48
+ const jsonPath = `{${path.split(".").join(",")}}`;
49
+ return _dumbo.SQL`data #>> ${jsonPath} IN ${value}`;
50
+ }
51
+ case "$nin": {
52
+ const jsonPath = `{${path.split(".").join(",")}}`;
53
+ return _dumbo.SQL`data #>> ${jsonPath} NOT IN ${value}`;
54
+ }
55
+ case "$elemMatch": {
56
+ const subQuery = _chunkTP73JUIXcjs.objectEntries.call(void 0, value).map(
57
+ ([subKey, subValue]) => `@."${subKey}" == ${_dumbo.JSONSerializer.serialize(subValue)}`
58
+ ).join(" && ");
59
+ return _dumbo.SQL`jsonb_path_exists(data, '$.${_dumbo.SQL.plain(path)}[*] ? (${_dumbo.SQL.plain(subQuery)})')`;
60
+ }
61
+ case "$all": {
62
+ const nestedPath = _dumbo.JSONSerializer.serialize(
63
+ buildNestedObject(path, value)
64
+ );
65
+ return _dumbo.SQL`data @> ${nestedPath}::jsonb`;
66
+ }
67
+ case "$size": {
68
+ const jsonPath = `{${path.split(".").join(",")}}`;
69
+ return _dumbo.SQL`jsonb_array_length(data #> ${jsonPath}) = ${value}`;
70
+ }
71
+ default:
72
+ throw new Error(`Unsupported operator: ${operator}`);
73
+ }
74
+ };
75
+ var handleMetadataOperator = (fieldName, operator, value) => {
76
+ switch (operator) {
77
+ case "$eq":
78
+ return _dumbo.SQL`${_dumbo.SQL.plain(fieldName)} = ${value}`;
79
+ case "$gt":
80
+ case "$gte":
81
+ case "$lt":
82
+ case "$lte":
83
+ case "$ne":
84
+ return _dumbo.SQL`${_dumbo.SQL.plain(fieldName)} ${_dumbo.SQL.plain(_chunkTP73JUIXcjs.OperatorMap[operator])} ${value}`;
85
+ case "$in":
86
+ return _dumbo.SQL`${_dumbo.SQL.plain(fieldName)} IN ${value}`;
87
+ case "$nin":
88
+ return _dumbo.SQL`${_dumbo.SQL.plain(fieldName)} NOT IN ${value}`;
89
+ default:
90
+ throw new Error(`Unsupported operator: ${operator}`);
91
+ }
92
+ };
93
+ var buildNestedObject = (path, value) => path.split(".").reverse().reduce((acc, key) => ({ [key]: acc }), value);
94
+
95
+ // src/storage/postgresql/core/sqlBuilder/filter/index.ts
96
+ var AND = "AND";
97
+ var constructFilterQuery = (filter) => _dumbo.SQL.merge(
98
+ Object.entries(filter).map(
99
+ ([key, value]) => isRecord(value) ? constructComplexFilterQuery(key, value) : handleOperator(key, "$eq", value)
100
+ ),
101
+ ` ${AND} `
102
+ );
103
+ var constructComplexFilterQuery = (key, value) => {
104
+ const isEquality = !_chunkTP73JUIXcjs.hasOperators.call(void 0, value);
105
+ return _dumbo.SQL.merge(
106
+ _chunkTP73JUIXcjs.objectEntries.call(void 0, value).map(
107
+ ([nestedKey, val]) => isEquality ? handleOperator(`${key}.${nestedKey}`, _chunkTP73JUIXcjs.QueryOperators.$eq, val) : handleOperator(key, nestedKey, val)
108
+ ),
109
+ ` ${AND} `
110
+ );
111
+ };
112
+ var isRecord = (value) => value !== null && typeof value === "object" && !Array.isArray(value);
113
+
114
+ // src/storage/postgresql/core/sqlBuilder/update/index.ts
115
+
116
+ var buildUpdateQuery = (update) => _chunkTP73JUIXcjs.objectEntries.call(void 0, update).reduce(
117
+ (currentUpdateQuery, [op, value]) => {
118
+ switch (op) {
119
+ case "$set":
120
+ return buildSetQuery(value, currentUpdateQuery);
121
+ case "$unset":
122
+ return buildUnsetQuery(value, currentUpdateQuery);
123
+ case "$inc":
124
+ return buildIncQuery(value, currentUpdateQuery);
125
+ case "$push":
126
+ return buildPushQuery(value, currentUpdateQuery);
127
+ default:
128
+ return currentUpdateQuery;
129
+ }
130
+ },
131
+ _dumbo.SQL`data`
132
+ );
133
+ var buildSetQuery = (set, currentUpdateQuery) => _dumbo.SQL`${currentUpdateQuery} || ${_dumbo.JSONSerializer.serialize(set)}::jsonb`;
134
+ var buildUnsetQuery = (unset, currentUpdateQuery) => _dumbo.SQL`${currentUpdateQuery} - ${Object.keys(unset).map((k) => `{${k}}`).join(", ")}`;
135
+ var buildIncQuery = (inc, currentUpdateQuery) => {
136
+ for (const [key, value] of Object.entries(inc)) {
137
+ currentUpdateQuery = typeof value === "bigint" ? _dumbo.SQL`jsonb_set(${currentUpdateQuery}, '{${_dumbo.SQL.plain(key)}}', to_jsonb((COALESCE((data->>'${_dumbo.SQL.plain(key)}')::BIGINT, 0) + ${value})::TEXT), true)` : _dumbo.SQL`jsonb_set(${currentUpdateQuery}, '{${_dumbo.SQL.plain(key)}}', to_jsonb(COALESCE((data->>'${_dumbo.SQL.plain(key)}')::NUMERIC, 0) + ${value}), true)`;
138
+ }
139
+ return currentUpdateQuery;
140
+ };
141
+ var buildPushQuery = (push, currentUpdateQuery) => {
142
+ for (const [key, value] of Object.entries(push)) {
143
+ const serializedValue = _dumbo.JSONSerializer.serialize([value]);
144
+ currentUpdateQuery = _dumbo.SQL`jsonb_set(${currentUpdateQuery}, '{${_dumbo.SQL.plain(key)}}', (coalesce(data->'${_dumbo.SQL.plain(key)}', '[]'::jsonb) || ${serializedValue}::jsonb), true)`;
145
+ }
146
+ return currentUpdateQuery;
147
+ };
148
+
149
+ // src/storage/postgresql/core/sqlBuilder/index.ts
150
+ var createCollection = (collectionName) => _dumbo.SQL`
151
+ CREATE TABLE IF NOT EXISTS ${_dumbo.SQL.identifier(collectionName)} (
152
+ _id TEXT PRIMARY KEY,
153
+ data JSONB NOT NULL,
154
+ metadata JSONB NOT NULL DEFAULT '{}',
155
+ _version BIGINT NOT NULL DEFAULT 1,
156
+ _partition TEXT NOT NULL DEFAULT 'png_global',
157
+ _archived BOOLEAN NOT NULL DEFAULT FALSE,
158
+ _created TIMESTAMPTZ NOT NULL DEFAULT now(),
159
+ _updated TIMESTAMPTZ NOT NULL DEFAULT now()
160
+ )`;
161
+ var pongoCollectionPostgreSQLMigrations = (collectionName) => [
162
+ _dumbo.sqlMigration.call(void 0, `pongoCollection:${collectionName}:001:createtable`, [
163
+ createCollection(collectionName)
164
+ ])
165
+ ];
166
+ var postgresSQLBuilder = (collectionName) => ({
167
+ createCollection: () => createCollection(collectionName),
168
+ insertOne: (document) => {
169
+ const serialized = _dumbo.JSONSerializer.serialize(document);
170
+ const id = document._id;
171
+ const version = _nullishCoalesce(document._version, () => ( 1n));
172
+ return _dumbo.SQL`
173
+ INSERT INTO ${_dumbo.SQL.identifier(collectionName)} (_id, data, _version)
174
+ VALUES (${id}, ${serialized}, ${version}) ON CONFLICT(_id) DO NOTHING;`;
175
+ },
176
+ insertMany: (documents) => {
177
+ const values = _dumbo.SQL.merge(
178
+ documents.map(
179
+ (doc) => _dumbo.SQL`(${doc._id}, ${_dumbo.JSONSerializer.serialize(doc)}, ${_nullishCoalesce(doc._version, () => ( 1n))})`
180
+ ),
181
+ ","
182
+ );
183
+ return _dumbo.SQL`
184
+ INSERT INTO ${_dumbo.SQL.identifier(collectionName)} (_id, data, _version) VALUES ${values}
185
+ ON CONFLICT(_id) DO NOTHING
186
+ RETURNING _id;`;
187
+ },
188
+ updateOne: (filter, update, options) => {
189
+ const expectedVersion = _chunkTP73JUIXcjs.expectedVersionValue.call(void 0, _optionalChain([options, 'optionalAccess', _ => _.expectedVersion]));
190
+ const expectedVersionUpdate = expectedVersion != null ? _dumbo.SQL`AND ${_dumbo.SQL.identifier(collectionName)}._version = ${expectedVersion}` : _dumbo.SQL``;
191
+ const filterQuery = _dumbo.isSQL.call(void 0, filter) ? filter : constructFilterQuery(filter);
192
+ const updateQuery = _dumbo.isSQL.call(void 0, update) ? update : buildUpdateQuery(update);
193
+ return _dumbo.SQL`
194
+ WITH existing AS (
195
+ SELECT _id, _version as current_version
196
+ FROM ${_dumbo.SQL.identifier(collectionName)} ${where(filterQuery)}
197
+ LIMIT 1
198
+ ),
199
+ updated AS (
200
+ UPDATE ${_dumbo.SQL.identifier(collectionName)}
201
+ SET
202
+ data = ${updateQuery} || jsonb_build_object('_id', ${_dumbo.SQL.identifier(collectionName)}._id) || jsonb_build_object('_version', (_version + 1)::text),
203
+ _version = _version + 1
204
+ FROM existing
205
+ WHERE ${_dumbo.SQL.identifier(collectionName)}._id = existing._id ${expectedVersionUpdate}
206
+ RETURNING ${_dumbo.SQL.identifier(collectionName)}._id, ${_dumbo.SQL.identifier(collectionName)}._version
207
+ )
208
+ SELECT
209
+ existing._id,
210
+ COALESCE(updated._version, existing.current_version) AS version,
211
+ COUNT(existing._id) over() AS matched,
212
+ COUNT(updated._id) over() AS modified
213
+ FROM existing
214
+ LEFT JOIN updated
215
+ ON existing._id = updated._id;`;
216
+ },
217
+ replaceOne: (filter, document, options) => {
218
+ const expectedVersion = _chunkTP73JUIXcjs.expectedVersionValue.call(void 0, _optionalChain([options, 'optionalAccess', _2 => _2.expectedVersion]));
219
+ const expectedVersionUpdate = expectedVersion != null ? _dumbo.SQL`AND ${_dumbo.SQL.identifier(collectionName)}._version = ${expectedVersion}` : _dumbo.SQL``;
220
+ const filterQuery = _dumbo.isSQL.call(void 0, filter) ? filter : constructFilterQuery(filter);
221
+ return _dumbo.SQL`
222
+ WITH existing AS (
223
+ SELECT _id, _version as current_version
224
+ FROM ${_dumbo.SQL.identifier(collectionName)} ${where(filterQuery)}
225
+ LIMIT 1
226
+ ),
227
+ updated AS (
228
+ UPDATE ${_dumbo.SQL.identifier(collectionName)}
229
+ SET
230
+ data = ${_dumbo.JSONSerializer.serialize(document)} || jsonb_build_object('_id', ${_dumbo.SQL.identifier(collectionName)}._id) || jsonb_build_object('_version', (_version + 1)::text),
231
+ _version = _version + 1
232
+ FROM existing
233
+ WHERE ${_dumbo.SQL.identifier(collectionName)}._id = existing._id ${expectedVersionUpdate}
234
+ RETURNING ${_dumbo.SQL.identifier(collectionName)}._id, ${_dumbo.SQL.identifier(collectionName)}._version
235
+ )
236
+ SELECT
237
+ existing._id,
238
+ COALESCE(updated._version, existing.current_version) AS version,
239
+ COUNT(existing._id) over() AS matched,
240
+ COUNT(updated._id) over() AS modified
241
+ FROM existing
242
+ LEFT JOIN updated
243
+ ON existing._id = updated._id;`;
244
+ },
245
+ updateMany: (filter, update) => {
246
+ const filterQuery = _dumbo.isSQL.call(void 0, filter) ? filter : constructFilterQuery(filter);
247
+ const updateQuery = _dumbo.isSQL.call(void 0, update) ? update : buildUpdateQuery(update);
248
+ return _dumbo.SQL`
249
+ UPDATE ${_dumbo.SQL.identifier(collectionName)}
250
+ SET
251
+ data = ${updateQuery} || jsonb_build_object('_version', (_version + 1)::text),
252
+ _version = _version + 1
253
+ ${where(filterQuery)};`;
254
+ },
255
+ deleteOne: (filter, options) => {
256
+ const expectedVersion = _chunkTP73JUIXcjs.expectedVersionValue.call(void 0, _optionalChain([options, 'optionalAccess', _3 => _3.expectedVersion]));
257
+ const expectedVersionUpdate = expectedVersion != null ? _dumbo.SQL`AND ${_dumbo.SQL.identifier(collectionName)}._version = ${expectedVersion}` : _dumbo.SQL``;
258
+ const filterQuery = _dumbo.isSQL.call(void 0, filter) ? filter : constructFilterQuery(filter);
259
+ return _dumbo.SQL`
260
+ WITH existing AS (
261
+ SELECT _id
262
+ FROM ${_dumbo.SQL.identifier(collectionName)} ${where(filterQuery)}
263
+ LIMIT 1
264
+ ),
265
+ deleted AS (
266
+ DELETE FROM ${_dumbo.SQL.identifier(collectionName)}
267
+ USING existing
268
+ WHERE ${_dumbo.SQL.identifier(collectionName)}._id = existing._id ${expectedVersionUpdate}
269
+ RETURNING ${_dumbo.SQL.identifier(collectionName)}._id
270
+ )
271
+ SELECT
272
+ existing._id,
273
+ COUNT(existing._id) over() AS matched,
274
+ COUNT(deleted._id) over() AS deleted
275
+ FROM existing
276
+ LEFT JOIN deleted
277
+ ON existing._id = deleted._id;`;
278
+ },
279
+ deleteMany: (filter) => {
280
+ const filterQuery = _dumbo.isSQL.call(void 0, filter) ? filter : constructFilterQuery(filter);
281
+ return _dumbo.SQL`DELETE FROM ${_dumbo.SQL.identifier(collectionName)} ${where(filterQuery)}`;
282
+ },
283
+ findOne: (filter) => {
284
+ const filterQuery = _dumbo.isSQL.call(void 0, filter) ? filter : constructFilterQuery(filter);
285
+ return _dumbo.SQL`SELECT data FROM ${_dumbo.SQL.identifier(collectionName)} ${where(filterQuery)} LIMIT 1;`;
286
+ },
287
+ find: (filter, options) => {
288
+ const filterQuery = _dumbo.isSQL.call(void 0, filter) ? filter : constructFilterQuery(filter);
289
+ const query = [];
290
+ query.push(_dumbo.SQL`SELECT data FROM ${_dumbo.SQL.identifier(collectionName)}`);
291
+ query.push(where(filterQuery));
292
+ if (_optionalChain([options, 'optionalAccess', _4 => _4.limit])) {
293
+ query.push(_dumbo.SQL`LIMIT ${options.limit}`);
294
+ }
295
+ if (_optionalChain([options, 'optionalAccess', _5 => _5.skip])) {
296
+ query.push(_dumbo.SQL`OFFSET ${options.skip}`);
297
+ }
298
+ return _dumbo.SQL.merge([...query, _dumbo.SQL`;`]);
299
+ },
300
+ countDocuments: (filter) => {
301
+ const filterQuery = _dumbo.SQL.check.isSQL(filter) ? filter : constructFilterQuery(filter);
302
+ return _dumbo.SQL`SELECT COUNT(1) as count FROM ${_dumbo.SQL.identifier(collectionName)} ${where(filterQuery)};`;
303
+ },
304
+ rename: (newName) => _dumbo.SQL`ALTER TABLE ${_dumbo.SQL.identifier(collectionName)} RENAME TO ${_dumbo.SQL.identifier(newName)};`,
305
+ drop: (targetName = collectionName) => _dumbo.SQL`DROP TABLE IF EXISTS ${_dumbo.SQL.identifier(targetName)}`
306
+ });
307
+ var where = (filterQuery) => _dumbo.SQL.check.isEmpty(filterQuery) ? _dumbo.SQL.EMPTY : _dumbo.SQL.merge([_dumbo.SQL`WHERE `, filterQuery]);
308
+
309
+ // src/storage/postgresql/pg/index.ts
310
+
311
+
312
+
313
+
314
+
315
+ var _pg = require('@event-driven-io/dumbo/pg');
316
+ require('pg');
317
+ var pgDatabaseDriver = {
318
+ driverType: _pg.PgDriverType,
319
+ databaseFactory: (options) => {
320
+ const databaseName = _nullishCoalesce(options.databaseName, () => ( _pg.getDatabaseNameOrDefault.call(void 0, options.connectionString)));
321
+ return _chunkTP73JUIXcjs.PongoDatabase.call(void 0, {
322
+ ...options,
323
+ pool: _dumbo.dumbo.call(void 0, {
324
+ connectionString: options.connectionString,
325
+ driver: _pg.pgDatabaseDriver,
326
+ ...options.connectionOptions
327
+ }),
328
+ schemaComponent: _chunkTP73JUIXcjs.PongoDatabaseSchemaComponent.call(void 0, {
329
+ driverType: _pg.PgDriverType,
330
+ collectionFactory: (schema) => _chunkTP73JUIXcjs.PongoCollectionSchemaComponent.call(void 0, {
331
+ driverType: _pg.PgDriverType,
332
+ definition: schema,
333
+ migrationsOrSchemaComponents: {
334
+ migrations: pongoCollectionPostgreSQLMigrations(schema.name)
335
+ },
336
+ sqlBuilder: postgresSQLBuilder(schema.name)
337
+ }),
338
+ definition: _nullishCoalesce(_optionalChain([options, 'access', _6 => _6.schema, 'optionalAccess', _7 => _7.definition]), () => ( _chunkTP73JUIXcjs.pongoSchema.db(databaseName, {})))
339
+ }),
340
+ databaseName
341
+ });
342
+ },
343
+ getDatabaseNameOrDefault: (options) => {
344
+ return _nullishCoalesce(options.databaseName, () => ( _pg.getDatabaseNameOrDefault.call(void 0, options.connectionString)));
345
+ },
346
+ defaultConnectionString: "postgresql://localhost:5432/postgres"
347
+ };
348
+ var usePgDatabaseDriver = () => {
349
+ _chunkTP73JUIXcjs.pongoDatabaseDriverRegistry.register(_pg.PgDriverType, pgDatabaseDriver);
350
+ };
351
+ usePgDatabaseDriver();
352
+
353
+
354
+
355
+
356
+
357
+
358
+ exports.databaseDriver = pgDatabaseDriver; exports.pgDriver = pgDatabaseDriver; exports.pongoCollectionPostgreSQLMigrations = pongoCollectionPostgreSQLMigrations; exports.postgresSQLBuilder = postgresSQLBuilder; exports.usePgDatabaseDriver = usePgDatabaseDriver;
16
359
  //# sourceMappingURL=pg.cjs.map
package/dist/pg.cjs.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["/home/runner/work/Pongo/Pongo/src/packages/pongo/dist/pg.cjs"],"names":[],"mappings":"AAAA,yGAA6B;AAC7B;AACE;AACA;AACA;AACA;AACF,wDAA6B;AAC7B,gCAA6B;AAC7B;AACE;AACA;AACA;AACA;AACA;AACF,gWAAC","file":"/home/runner/work/Pongo/Pongo/src/packages/pongo/dist/pg.cjs"}
1
+ {"version":3,"sources":["/home/runner/work/Pongo/Pongo/src/packages/pongo/dist/pg.cjs","../src/storage/postgresql/core/sqlBuilder/index.ts","../src/storage/postgresql/core/sqlBuilder/filter/index.ts","../src/storage/postgresql/core/sqlBuilder/filter/queryOperators.ts","../src/storage/postgresql/core/sqlBuilder/update/index.ts","../src/storage/postgresql/pg/index.ts"],"names":["SQL","JSONSerializer"],"mappings":"AAAA;AACE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACF,wDAA6B;AAC7B;AACA;ACbA;AACE;AACA;AACA;AACA;AAAA,+CACK;ADeP;AACA;AErBA;AFuBA;AACA;AGxBA;AAGO,IAAM,eAAA,EAAiB,CAC5B,IAAA,EACA,QAAA,EACA,KAAA,EAAA,GACQ;AACR,EAAA,GAAA,CAAI,KAAA,IAAS,MAAA,GAAS,KAAA,IAAS,UAAA,EAAY;AACzC,IAAA,OAAO,sBAAA,CAAuB,IAAA,EAAM,QAAA,EAAU,KAAK,CAAA;AAAA,EACrD;AAEA,EAAA,OAAA,CAAQ,QAAA,EAAU;AAAA,IAChB,KAAK,KAAA,EAAO;AACV,MAAA,MAAM,WAAA,EAAa,qBAAA,CAAe,SAAA;AAAA,QAChC,iBAAA,CAAkB,IAAA,EAAM,KAAK;AAAA,MAC/B,CAAA;AACA,MAAA,MAAM,gBAAA,EAAkB,qBAAA,CAAe,SAAA,CAAU,KAAK,CAAA;AAEtD,MAAA,OAAO,UAAA,CAAA,SAAA,EAAe,UAAU,CAAA,sCAAA,EAAyC,UAAA,CAAI,KAAA,CAAM,IAAI,CAAC,CAAA,YAAA,EAAe,UAAA,CAAI,KAAA,CAAM,eAAe,CAAC,CAAA,IAAA,CAAA;AAAA,IACnI;AAAA,IACA,KAAK,KAAA;AAAA,IACL,KAAK,MAAA;AAAA,IACL,KAAK,KAAA;AAAA,IACL,KAAK,MAAA;AAAA,IACL,KAAK,KAAA,EAAO;AACV,MAAA,MAAM,SAAA,EAAW,UAAA,CAAI,KAAA,CAAM,IAAA,CAAK,KAAA,CAAM,GAAG,CAAA,CAAE,IAAA,CAAK,GAAG,CAAC,CAAA;AAEpD,MAAA,OAAO,UAAA,CAAA,UAAA,EAAgB,QAAQ,CAAA,EAAA,EAAK,UAAA,CAAI,KAAA,CAAM,6BAAA,CAAY,QAAQ,CAAC,CAAC,CAAA,CAAA,EAAI,KAAK,CAAA,CAAA;AAC/E,IAAA;AACY,IAAA;AACoC,MAAA;AAES,MAAA;AACzD,IAAA;AACa,IAAA;AACmC,MAAA;AAEa,MAAA;AAC7D,IAAA;AACmB,IAAA;AAEd,MAAA;AAEgD,QAAA;AAErC,MAAA;AAC6D,MAAA;AAC7E,IAAA;AACa,IAAA;AACuB,MAAA;AACH,QAAA;AAC/B,MAAA;AAC+B,MAAA;AACjC,IAAA;AACc,IAAA;AACkC,MAAA;AAEc,MAAA;AAC9D,IAAA;AACA,IAAA;AACqD,MAAA;AACvD,EAAA;AACF;AAMU;AACU,EAAA;AACX,IAAA;AACyC,MAAA;AACzC,IAAA;AACA,IAAA;AACA,IAAA;AACA,IAAA;AACA,IAAA;AAC2E,MAAA;AAC3E,IAAA;AACuD,MAAA;AACvD,IAAA;AAC2D,MAAA;AAChE,IAAA;AACqD,MAAA;AACvD,EAAA;AACF;AASkB;AHFkE;AACA;AEnFxE;AAGN;AACqB,EAAA;AAGjB,IAAA;AACN,EAAA;AACO,EAAA;AACT;AAKQ;AAC8B,EAAA;AAE3B,EAAA;AACY,IAAA;AAEuB,MAAA;AAE5C,IAAA;AACO,IAAA;AACT,EAAA;AACF;AAGwD;AFwE4B;AACA;AIjHhD;AAWZ;AACiB,EAAA;AACvB,IAAA;AACL,MAAA;AAC2C,QAAA;AAC3C,MAAA;AAC6C,QAAA;AAC7C,MAAA;AAC2C,QAAA;AAC3C,MAAA;AAC4C,QAAA;AACjD,MAAA;AACS,QAAA;AACX,IAAA;AACF,EAAA;AACAA,EAAAA;AACF;AAG+BC;AAMD;AAOtB;AACwC,EAAA;AAGxB,IAAA;AAExB,EAAA;AACO,EAAA;AACT;AAKU;AACyC,EAAA;AACS,IAAA;AACoB,IAAA;AAC9E,EAAA;AACO,EAAA;AACT;AJqFoF;AACA;AC9HlFD;AAC6D,+BAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAAA;AAWgB;AACX,EAAA;AACjC,IAAA;AAChC,EAAA;AACH;AAIiC;AAC6B,EAAA;AACY,EAAA;AAClB,IAAA;AAChC,IAAA;AACiB,IAAA;AAE9BA,IAAAA;AACuC,kBAAA;AACL,cAAA;AAC3C,EAAA;AAC4E,EAAA;AACvD,IAAA;AACP,MAAA;AAEmD,QAAA;AAC7D,MAAA;AACA,MAAA;AACF,IAAA;AAEOA,IAAAA;AACwE,kBAAA;AAAM;AAAA,oBAAA;AAGvF,EAAA;AAKU,EAAA;AAC6D,IAAA;AAGjD,IAAA;AAGoD,IAAA;AACJ,IAAA;AAE7DA,IAAAA;AAAA;AAAA;AAGwD,aAAA;AAAA;AAAA;AAAA;AAIpB,eAAA;AAAA;AAE+B,iBAAA;AAAe;AAAA;AAGxB,cAAA;AACK,kBAAA;AAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAAA;AAUvF,EAAA;AAKU,EAAA;AAC6D,IAAA;AAGjD,IAAA;AAGoD,IAAA;AAEjEA,IAAAA;AAAA;AAAA;AAGwD,aAAA;AAAA;AAAA;AAAA;AAIpB,eAAA;AAAA;AAEM,iBAAA;AAA+D;AAAA;AAG/C,cAAA;AACK,kBAAA;AAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAAA;AAUvF,EAAA;AAIU,EAAA;AACgE,IAAA;AACJ,IAAA;AAE7DA,IAAAA;AACkC,aAAA;AAAA;AAEjB,eAAA;AAAA;AAEF,MAAA;AACxB,EAAA;AAIU,EAAA;AAC6D,IAAA;AAGjD,IAAA;AAGoD,IAAA;AAEjEA,IAAAA;AAAA;AAAA;AAGwD,aAAA;AAAA;AAAA;AAAA;AAIf,oBAAA;AAAA;AAEiB,cAAA;AACnB,kBAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAAA;AAShD,EAAA;AACsD,EAAA;AACoB,IAAA;AAEK,IAAA;AAC/E,EAAA;AACmD,EAAA;AACuB,IAAA;AAEF,IAAA;AACxE,EAAA;AACuE,EAAA;AACG,IAAA;AAClD,IAAA;AAE4C,IAAA;AAErC,IAAA;AAET,IAAA;AACoB,MAAA;AACxC,IAAA;AAEmB,IAAA;AACqB,MAAA;AACxC,IAAA;AAEmC,IAAA;AACrC,EAAA;AAC0D,EAAA;AAG/B,IAAA;AACoD,IAAA;AAC/E,EAAA;AAEkD,EAAA;AAEjB,EAAA;AACnC;AAKU;AD4E0E;AACA;AKpT9D;AACtB;AACsB;AACpB;AACA;AAEK;AACQ;AA0DX;AACU,EAAA;AACkB,EAAA;AAGO,IAAA;AAEd,IAAA;AAChB,MAAA;AACS,MAAA;AACgB,QAAA;AAClB,QAAA;AACG,QAAA;AACZ,MAAA;AAC6C,MAAA;AAChC,QAAA;AAEqB,QAAA;AACjB,UAAA;AACA,UAAA;AACkB,UAAA;AAC+B,YAAA;AAC7D,UAAA;AAC0C,UAAA;AAC3C,QAAA;AAE4D,QAAA;AAChE,MAAA;AACD,MAAA;AACD,IAAA;AACH,EAAA;AACuC,EAAA;AAEsC,IAAA;AAE7E,EAAA;AACyB,EAAA;AAC3B;AAEyC;AAC4B,EAAA;AACrE;AAEoB;ALmPgE;AACA;AACA;AACA;AACA;AACA;AACA","file":"/home/runner/work/Pongo/Pongo/src/packages/pongo/dist/pg.cjs","sourcesContent":[null,"import {\n isSQL,\n JSONSerializer,\n SQL,\n sqlMigration,\n} from '@event-driven-io/dumbo';\nimport {\n expectedVersionValue,\n type DeleteOneOptions,\n type FindOptions,\n type OptionalUnlessRequiredIdAndVersion,\n type PongoCollectionSQLBuilder,\n type PongoFilter,\n type PongoUpdate,\n type ReplaceOneOptions,\n type UpdateOneOptions,\n type WithoutId,\n} from '../../../../core';\nimport { constructFilterQuery } from './filter';\nimport { buildUpdateQuery } from './update';\n\nconst createCollection = (collectionName: string): SQL =>\n SQL`\n CREATE TABLE IF NOT EXISTS ${SQL.identifier(collectionName)} (\n _id TEXT PRIMARY KEY, \n data JSONB NOT NULL, \n metadata JSONB NOT NULL DEFAULT '{}',\n _version BIGINT NOT NULL DEFAULT 1,\n _partition TEXT NOT NULL DEFAULT 'png_global',\n _archived BOOLEAN NOT NULL DEFAULT FALSE,\n _created TIMESTAMPTZ NOT NULL DEFAULT now(),\n _updated TIMESTAMPTZ NOT NULL DEFAULT now()\n )`;\n\nexport const pongoCollectionPostgreSQLMigrations = (collectionName: string) => [\n sqlMigration(`pongoCollection:${collectionName}:001:createtable`, [\n createCollection(collectionName),\n ]),\n];\n\nexport const postgresSQLBuilder = (\n collectionName: string,\n): PongoCollectionSQLBuilder => ({\n createCollection: (): SQL => createCollection(collectionName),\n insertOne: <T>(document: OptionalUnlessRequiredIdAndVersion<T>): SQL => {\n const serialized = JSONSerializer.serialize(document);\n const id = document._id;\n const version = document._version ?? 1n;\n\n return SQL`\n INSERT INTO ${SQL.identifier(collectionName)} (_id, data, _version) \n VALUES (${id}, ${serialized}, ${version}) ON CONFLICT(_id) DO NOTHING;`;\n },\n insertMany: <T>(documents: OptionalUnlessRequiredIdAndVersion<T>[]): SQL => {\n const values = SQL.merge(\n documents.map(\n (doc) =>\n SQL`(${doc._id}, ${JSONSerializer.serialize(doc)}, ${doc._version ?? 1n})`,\n ),\n ',',\n );\n\n return SQL`\n INSERT INTO ${SQL.identifier(collectionName)} (_id, data, _version) VALUES ${values}\n ON CONFLICT(_id) DO NOTHING\n RETURNING _id;`;\n },\n updateOne: <T>(\n filter: PongoFilter<T> | SQL,\n update: PongoUpdate<T> | SQL,\n options?: UpdateOneOptions,\n ): SQL => {\n const expectedVersion = expectedVersionValue(options?.expectedVersion);\n const expectedVersionUpdate =\n expectedVersion != null\n ? SQL`AND ${SQL.identifier(collectionName)}._version = ${expectedVersion}`\n : SQL``;\n\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n const updateQuery = isSQL(update) ? update : buildUpdateQuery(update);\n\n return SQL`\n WITH existing AS (\n SELECT _id, _version as current_version\n FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}\n LIMIT 1\n ),\n updated AS (\n UPDATE ${SQL.identifier(collectionName)} \n SET \n data = ${updateQuery} || jsonb_build_object('_id', ${SQL.identifier(collectionName)}._id) || jsonb_build_object('_version', (_version + 1)::text),\n _version = _version + 1\n FROM existing \n WHERE ${SQL.identifier(collectionName)}._id = existing._id ${expectedVersionUpdate}\n RETURNING ${SQL.identifier(collectionName)}._id, ${SQL.identifier(collectionName)}._version\n )\n SELECT \n existing._id,\n COALESCE(updated._version, existing.current_version) AS version,\n COUNT(existing._id) over() AS matched,\n COUNT(updated._id) over() AS modified\n FROM existing\n LEFT JOIN updated \n ON existing._id = updated._id;`;\n },\n replaceOne: <T>(\n filter: PongoFilter<T> | SQL,\n document: WithoutId<T>,\n options?: ReplaceOneOptions,\n ): SQL => {\n const expectedVersion = expectedVersionValue(options?.expectedVersion);\n const expectedVersionUpdate =\n expectedVersion != null\n ? SQL`AND ${SQL.identifier(collectionName)}._version = ${expectedVersion}`\n : SQL``;\n\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n\n return SQL`\n WITH existing AS (\n SELECT _id, _version as current_version\n FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}\n LIMIT 1\n ),\n updated AS (\n UPDATE ${SQL.identifier(collectionName)} \n SET \n data = ${JSONSerializer.serialize(document)} || jsonb_build_object('_id', ${SQL.identifier(collectionName)}._id) || jsonb_build_object('_version', (_version + 1)::text),\n _version = _version + 1\n FROM existing \n WHERE ${SQL.identifier(collectionName)}._id = existing._id ${expectedVersionUpdate}\n RETURNING ${SQL.identifier(collectionName)}._id, ${SQL.identifier(collectionName)}._version\n )\n SELECT \n existing._id,\n COALESCE(updated._version, existing.current_version) AS version,\n COUNT(existing._id) over() AS matched,\n COUNT(updated._id) over() AS modified\n FROM existing\n LEFT JOIN updated \n ON existing._id = updated._id;`;\n },\n updateMany: <T>(\n filter: PongoFilter<T> | SQL,\n update: PongoUpdate<T> | SQL,\n ): SQL => {\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n const updateQuery = isSQL(update) ? update : buildUpdateQuery(update);\n\n return SQL`\n UPDATE ${SQL.identifier(collectionName)} \n SET \n data = ${updateQuery} || jsonb_build_object('_version', (_version + 1)::text),\n _version = _version + 1\n ${where(filterQuery)};`;\n },\n deleteOne: <T>(\n filter: PongoFilter<T> | SQL,\n options?: DeleteOneOptions,\n ): SQL => {\n const expectedVersion = expectedVersionValue(options?.expectedVersion);\n const expectedVersionUpdate =\n expectedVersion != null\n ? SQL`AND ${SQL.identifier(collectionName)}._version = ${expectedVersion}`\n : SQL``;\n\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n\n return SQL`\n WITH existing AS (\n SELECT _id\n FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}\n LIMIT 1\n ),\n deleted AS (\n DELETE FROM ${SQL.identifier(collectionName)}\n USING existing\n WHERE ${SQL.identifier(collectionName)}._id = existing._id ${expectedVersionUpdate}\n RETURNING ${SQL.identifier(collectionName)}._id\n )\n SELECT \n existing._id,\n COUNT(existing._id) over() AS matched,\n COUNT(deleted._id) over() AS deleted\n FROM existing\n LEFT JOIN deleted \n ON existing._id = deleted._id;`;\n },\n deleteMany: <T>(filter: PongoFilter<T> | SQL): SQL => {\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n\n return SQL`DELETE FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}`;\n },\n findOne: <T>(filter: PongoFilter<T> | SQL): SQL => {\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n\n return SQL`SELECT data FROM ${SQL.identifier(collectionName)} ${where(filterQuery)} LIMIT 1;`;\n },\n find: <T>(filter: PongoFilter<T> | SQL, options?: FindOptions): SQL => {\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n const query: SQL[] = [];\n\n query.push(SQL`SELECT data FROM ${SQL.identifier(collectionName)}`);\n\n query.push(where(filterQuery));\n\n if (options?.limit) {\n query.push(SQL`LIMIT ${options.limit}`);\n }\n\n if (options?.skip) {\n query.push(SQL`OFFSET ${options.skip}`);\n }\n\n return SQL.merge([...query, SQL`;`]);\n },\n countDocuments: <T>(filter: PongoFilter<T> | SQL): SQL => {\n const filterQuery = SQL.check.isSQL(filter)\n ? filter\n : constructFilterQuery(filter);\n return SQL`SELECT COUNT(1) as count FROM ${SQL.identifier(collectionName)} ${where(filterQuery)};`;\n },\n rename: (newName: string): SQL =>\n SQL`ALTER TABLE ${SQL.identifier(collectionName)} RENAME TO ${SQL.identifier(newName)};`,\n drop: (targetName: string = collectionName): SQL =>\n SQL`DROP TABLE IF EXISTS ${SQL.identifier(targetName)}`,\n});\n\nconst where = (filterQuery: SQL): SQL =>\n SQL.check.isEmpty(filterQuery)\n ? SQL.EMPTY\n : SQL.merge([SQL`WHERE `, filterQuery]);\n","import { SQL } from '@event-driven-io/dumbo';\nimport {\n hasOperators,\n objectEntries,\n QueryOperators,\n type PongoFilter,\n} from '../../../../../core';\nimport { handleOperator } from './queryOperators';\n\nexport * from './queryOperators';\n\nconst AND = 'AND';\n\nexport const constructFilterQuery = <T>(filter: PongoFilter<T>): SQL =>\n SQL.merge(\n Object.entries(filter).map(([key, value]) =>\n isRecord(value)\n ? constructComplexFilterQuery(key, value)\n : handleOperator(key, '$eq', value),\n ),\n ` ${AND} `,\n );\n\nconst constructComplexFilterQuery = (\n key: string,\n value: Record<string, unknown>,\n): SQL => {\n const isEquality = !hasOperators(value);\n\n return SQL.merge(\n objectEntries(value).map(([nestedKey, val]) =>\n isEquality\n ? handleOperator(`${key}.${nestedKey}`, QueryOperators.$eq, val)\n : handleOperator(key, nestedKey, val),\n ),\n ` ${AND} `,\n );\n};\n\nconst isRecord = (value: unknown): value is Record<string, unknown> =>\n value !== null && typeof value === 'object' && !Array.isArray(value);\n","import { JSONSerializer, SQL } from '@event-driven-io/dumbo';\nimport { objectEntries, OperatorMap } from '../../../../../core';\n\nexport const handleOperator = (\n path: string,\n operator: string,\n value: unknown,\n): SQL => {\n if (path === '_id' || path === '_version') {\n return handleMetadataOperator(path, operator, value);\n }\n\n switch (operator) {\n case '$eq': {\n const nestedPath = JSONSerializer.serialize(\n buildNestedObject(path, value),\n );\n const serializedValue = JSONSerializer.serialize(value);\n\n return SQL`(data @> ${nestedPath}::jsonb OR jsonb_path_exists(data, '$.${SQL.plain(path)}[*] ? (@ == ${SQL.plain(serializedValue)})'))`;\n }\n case '$gt':\n case '$gte':\n case '$lt':\n case '$lte':\n case '$ne': {\n const jsonPath = SQL.plain(path.split('.').join(','));\n\n return SQL`data ->> '${jsonPath}' ${SQL.plain(OperatorMap[operator])} ${value}`;\n }\n case '$in': {\n const jsonPath = `{${path.split('.').join(',')}}`;\n\n return SQL`data #>> ${jsonPath} IN ${value as unknown[]}`;\n }\n case '$nin': {\n const jsonPath = `{${path.split('.').join(',')}}`;\n\n return SQL`data #>> ${jsonPath} NOT IN ${value as unknown[]}`;\n }\n case '$elemMatch': {\n const subQuery = objectEntries(value as Record<string, unknown>)\n .map(\n ([subKey, subValue]) =>\n `@.\"${subKey}\" == ${JSONSerializer.serialize(subValue)}`,\n )\n .join(' && ');\n return SQL`jsonb_path_exists(data, '$.${SQL.plain(path)}[*] ? (${SQL.plain(subQuery)})')`;\n }\n case '$all': {\n const nestedPath = JSONSerializer.serialize(\n buildNestedObject(path, value),\n );\n return SQL`data @> ${nestedPath}::jsonb`;\n }\n case '$size': {\n const jsonPath = `{${path.split('.').join(',')}}`;\n\n return SQL`jsonb_array_length(data #> ${jsonPath}) = ${value}`;\n }\n default:\n throw new Error(`Unsupported operator: ${operator}`);\n }\n};\n\nconst handleMetadataOperator = (\n fieldName: string,\n operator: string,\n value: unknown,\n): SQL => {\n switch (operator) {\n case '$eq':\n return SQL`${SQL.plain(fieldName)} = ${value}`;\n case '$gt':\n case '$gte':\n case '$lt':\n case '$lte':\n case '$ne':\n return SQL`${SQL.plain(fieldName)} ${SQL.plain(OperatorMap[operator])} ${value}`;\n case '$in':\n return SQL`${SQL.plain(fieldName)} IN ${value as unknown[]}`;\n case '$nin':\n return SQL`${SQL.plain(fieldName)} NOT IN ${value as unknown[]}`;\n default:\n throw new Error(`Unsupported operator: ${operator}`);\n }\n};\n\nconst buildNestedObject = (\n path: string,\n value: unknown,\n): Record<string, unknown> =>\n path\n .split('.')\n .reverse()\n .reduce((acc, key) => ({ [key]: acc }), value as Record<string, unknown>);\n","import { JSONSerializer, SQL } from '@event-driven-io/dumbo';\nimport {\n objectEntries,\n type $inc,\n type $push,\n type $set,\n type $unset,\n type PongoUpdate,\n} from '../../../../../core';\n\nexport const buildUpdateQuery = <T>(update: PongoUpdate<T>): SQL =>\n objectEntries(update).reduce(\n (currentUpdateQuery, [op, value]) => {\n switch (op) {\n case '$set':\n return buildSetQuery(value, currentUpdateQuery);\n case '$unset':\n return buildUnsetQuery(value, currentUpdateQuery);\n case '$inc':\n return buildIncQuery(value, currentUpdateQuery);\n case '$push':\n return buildPushQuery(value, currentUpdateQuery);\n default:\n return currentUpdateQuery;\n }\n },\n SQL`data`,\n );\n\nexport const buildSetQuery = <T>(set: $set<T>, currentUpdateQuery: SQL): SQL =>\n SQL`${currentUpdateQuery} || ${JSONSerializer.serialize(set)}::jsonb`;\n\nexport const buildUnsetQuery = <T>(\n unset: $unset<T>,\n currentUpdateQuery: SQL,\n): SQL =>\n SQL`${currentUpdateQuery} - ${Object.keys(unset)\n .map((k) => `{${k}}`)\n .join(', ')}`;\n\nexport const buildIncQuery = <T>(\n inc: $inc<T>,\n currentUpdateQuery: SQL,\n): SQL => {\n for (const [key, value] of Object.entries(inc)) {\n currentUpdateQuery =\n typeof value === 'bigint'\n ? SQL`jsonb_set(${currentUpdateQuery}, '{${SQL.plain(key)}}', to_jsonb((COALESCE((data->>'${SQL.plain(key)}')::BIGINT, 0) + ${value})::TEXT), true)`\n : SQL`jsonb_set(${currentUpdateQuery}, '{${SQL.plain(key)}}', to_jsonb(COALESCE((data->>'${SQL.plain(key)}')::NUMERIC, 0) + ${value}), true)`;\n }\n return currentUpdateQuery;\n};\n\nexport const buildPushQuery = <T>(\n push: $push<T>,\n currentUpdateQuery: SQL,\n): SQL => {\n for (const [key, value] of Object.entries(push)) {\n const serializedValue = JSONSerializer.serialize([value]);\n currentUpdateQuery = SQL`jsonb_set(${currentUpdateQuery}, '{${SQL.plain(key)}}', (coalesce(data->'${SQL.plain(key)}', '[]'::jsonb) || ${serializedValue}::jsonb), true)`;\n }\n return currentUpdateQuery;\n};\n","import { dumbo } from '@event-driven-io/dumbo';\nimport {\n pgDatabaseDriver as dumboDriver,\n getDatabaseNameOrDefault,\n PgDriverType,\n type PgConnection,\n} from '@event-driven-io/dumbo/pg';\nimport pg from 'pg';\nimport {\n PongoCollectionSchemaComponent,\n PongoDatabase,\n pongoDatabaseDriverRegistry,\n PongoDatabaseSchemaComponent,\n pongoSchema,\n type PongoDatabaseDriver,\n type PongoDatabaseDriverOptions,\n type PongoDb,\n} from '../../../core';\nimport {\n pongoCollectionPostgreSQLMigrations,\n postgresSQLBuilder,\n} from '../core';\n\nexport type PgPongoClientOptions =\n | PooledPongoClientOptions\n | NotPooledPongoOptions;\n\nexport type PooledPongoClientOptions =\n | {\n pool: pg.Pool;\n }\n | {\n pooled: true;\n }\n | {\n pool: pg.Pool;\n pooled: true;\n }\n | object;\n\nexport type NotPooledPongoOptions =\n | {\n client: pg.Client;\n }\n | {\n pooled: false;\n }\n | {\n client: pg.Client;\n pooled: false;\n }\n | {\n connection: PgConnection;\n pooled?: false;\n };\n\ntype PgDatabaseDriverOptions =\n PongoDatabaseDriverOptions<PgPongoClientOptions> & {\n databaseName?: string | undefined;\n connectionString: string;\n };\n\nconst pgDatabaseDriver: PongoDatabaseDriver<\n PongoDb<PgDriverType>,\n PgDatabaseDriverOptions\n> = {\n driverType: PgDriverType,\n databaseFactory: (options) => {\n const databaseName =\n options.databaseName ??\n getDatabaseNameOrDefault(options.connectionString);\n\n return PongoDatabase({\n ...options,\n pool: dumbo({\n connectionString: options.connectionString,\n driver: dumboDriver,\n ...options.connectionOptions,\n }),\n schemaComponent: PongoDatabaseSchemaComponent({\n driverType: PgDriverType,\n collectionFactory: (schema) =>\n PongoCollectionSchemaComponent({\n driverType: PgDriverType,\n definition: schema,\n migrationsOrSchemaComponents: {\n migrations: pongoCollectionPostgreSQLMigrations(schema.name),\n },\n sqlBuilder: postgresSQLBuilder(schema.name),\n }),\n definition:\n options.schema?.definition ?? pongoSchema.db(databaseName, {}),\n }),\n databaseName,\n });\n },\n getDatabaseNameOrDefault: (options) => {\n return (\n options.databaseName ?? getDatabaseNameOrDefault(options.connectionString)\n );\n },\n defaultConnectionString: 'postgresql://localhost:5432/postgres',\n};\n\nexport const usePgDatabaseDriver = () => {\n pongoDatabaseDriverRegistry.register(PgDriverType, pgDatabaseDriver);\n};\n\nusePgDatabaseDriver();\n\nexport { pgDatabaseDriver as databaseDriver, pgDatabaseDriver as pgDriver };\n"]}