@event-driven-io/pongo 0.17.0-beta.33 → 0.17.0-beta.35
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunk-EQ3T4XHT.cjs +1583 -0
- package/dist/chunk-EQ3T4XHT.cjs.map +1 -0
- package/dist/{chunk-TTZGGAZV.js → chunk-H6YIW5C5.js} +69 -2
- package/dist/chunk-H6YIW5C5.js.map +1 -0
- package/dist/{chunk-A4DCNQJR.cjs → chunk-NTHMSHNG.cjs} +79 -12
- package/dist/chunk-NTHMSHNG.cjs.map +1 -0
- package/dist/{chunk-H637RRXS.js → chunk-NWYI26VT.js} +1310 -632
- package/dist/chunk-NWYI26VT.js.map +1 -0
- package/dist/cli.cjs +13 -13
- package/dist/cli.cjs.map +1 -1
- package/dist/cli.js +2 -2
- package/dist/cli.js.map +1 -1
- package/dist/cloudflare.cjs +10 -10
- package/dist/cloudflare.d.cts +3 -2
- package/dist/cloudflare.d.ts +3 -2
- package/dist/cloudflare.js +2 -2
- package/dist/{index-gHRYr05w.d.ts → index-BfBszcG4.d.ts} +1 -1
- package/dist/{index-DxHXL62G.d.cts → index-a-fxODW1.d.cts} +1 -1
- package/dist/index.cjs +26 -2
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +38 -16
- package/dist/index.d.ts +38 -16
- package/dist/index.js +25 -1
- package/dist/index.js.map +1 -1
- package/dist/pg.cjs +102 -16
- package/dist/pg.cjs.map +1 -1
- package/dist/pg.d.cts +2 -1
- package/dist/pg.d.ts +2 -1
- package/dist/pg.js +87 -1
- package/dist/pg.js.map +1 -1
- package/dist/{pongoCollectionSchemaComponent-B5Oatwu0.d.cts → pongoTransactionCache-CRuPGag3.d.cts} +155 -35
- package/dist/{pongoCollectionSchemaComponent-B5Oatwu0.d.ts → pongoTransactionCache-CRuPGag3.d.ts} +155 -35
- package/dist/shim.cjs +5 -5
- package/dist/shim.d.cts +2 -1
- package/dist/shim.d.ts +2 -1
- package/dist/shim.js +1 -1
- package/dist/sqlite3.cjs +10 -10
- package/dist/sqlite3.d.cts +3 -2
- package/dist/sqlite3.d.ts +3 -2
- package/dist/sqlite3.js +2 -2
- package/package.json +20 -18
- package/dist/chunk-A4DCNQJR.cjs.map +0 -1
- package/dist/chunk-BZRKCNRY.cjs +0 -905
- package/dist/chunk-BZRKCNRY.cjs.map +0 -1
- package/dist/chunk-H637RRXS.js.map +0 -1
- package/dist/chunk-TTZGGAZV.js.map +0 -1
package/dist/index.js
CHANGED
|
@@ -2,6 +2,7 @@ import {
|
|
|
2
2
|
ConcurrencyError,
|
|
3
3
|
DOCUMENT_DOES_NOT_EXIST,
|
|
4
4
|
DOCUMENT_EXISTS,
|
|
5
|
+
DocumentCommandHandler,
|
|
5
6
|
NO_CONCURRENCY_CHECK,
|
|
6
7
|
ObjectId,
|
|
7
8
|
OperatorMap,
|
|
@@ -15,26 +16,37 @@ import {
|
|
|
15
16
|
deepEquals,
|
|
16
17
|
expectedVersion,
|
|
17
18
|
expectedVersionValue,
|
|
19
|
+
getIdsFromIdOnlyFilter,
|
|
18
20
|
hasOperators,
|
|
21
|
+
idFromFilter,
|
|
22
|
+
identityMapCache,
|
|
19
23
|
isEquatable,
|
|
20
24
|
isGeneralExpectedDocumentVersion,
|
|
21
25
|
isNumber,
|
|
22
26
|
isOperator,
|
|
23
27
|
isString,
|
|
28
|
+
lruCache,
|
|
29
|
+
mapAsync,
|
|
30
|
+
mapParallel,
|
|
31
|
+
mapSequential,
|
|
32
|
+
noopCacheProvider,
|
|
24
33
|
objectEntries,
|
|
25
34
|
operationResult,
|
|
35
|
+
pongoCache,
|
|
36
|
+
pongoCacheWrapper,
|
|
26
37
|
pongoClient,
|
|
27
38
|
pongoCollection,
|
|
28
39
|
pongoDriverRegistry as pongoDriverRegistry2,
|
|
29
40
|
pongoSchema,
|
|
30
41
|
pongoSession,
|
|
31
42
|
pongoTransaction,
|
|
43
|
+
pongoTransactionCache,
|
|
32
44
|
proxyClientWithSchema,
|
|
33
45
|
proxyPongoDbWithSchema,
|
|
34
46
|
toClientSchemaMetadata,
|
|
35
47
|
toDbSchemaMetadata,
|
|
36
48
|
transactionExecutorOrDefault
|
|
37
|
-
} from "./chunk-
|
|
49
|
+
} from "./chunk-NWYI26VT.js";
|
|
38
50
|
|
|
39
51
|
// src/index.ts
|
|
40
52
|
pongoDriverRegistry.register(`PostgreSQL:pg`, () => loadPongoClient("pg"));
|
|
@@ -63,6 +75,7 @@ export {
|
|
|
63
75
|
ConcurrencyError,
|
|
64
76
|
DOCUMENT_DOES_NOT_EXIST,
|
|
65
77
|
DOCUMENT_EXISTS,
|
|
78
|
+
DocumentCommandHandler,
|
|
66
79
|
NO_CONCURRENCY_CHECK,
|
|
67
80
|
ObjectId,
|
|
68
81
|
OperatorMap,
|
|
@@ -76,21 +89,32 @@ export {
|
|
|
76
89
|
deepEquals,
|
|
77
90
|
expectedVersion,
|
|
78
91
|
expectedVersionValue,
|
|
92
|
+
getIdsFromIdOnlyFilter,
|
|
79
93
|
hasOperators,
|
|
94
|
+
idFromFilter,
|
|
95
|
+
identityMapCache,
|
|
80
96
|
isEquatable,
|
|
81
97
|
isGeneralExpectedDocumentVersion,
|
|
82
98
|
isNumber,
|
|
83
99
|
isOperator,
|
|
84
100
|
isString,
|
|
85
101
|
loadPongoClient,
|
|
102
|
+
lruCache,
|
|
103
|
+
mapAsync,
|
|
104
|
+
mapParallel,
|
|
105
|
+
mapSequential,
|
|
106
|
+
noopCacheProvider,
|
|
86
107
|
objectEntries,
|
|
87
108
|
operationResult,
|
|
109
|
+
pongoCache,
|
|
110
|
+
pongoCacheWrapper,
|
|
88
111
|
pongoClient,
|
|
89
112
|
pongoCollection,
|
|
90
113
|
pongoDriverRegistry2 as pongoDriverRegistry,
|
|
91
114
|
pongoSchema,
|
|
92
115
|
pongoSession,
|
|
93
116
|
pongoTransaction,
|
|
117
|
+
pongoTransactionCache,
|
|
94
118
|
proxyClientWithSchema,
|
|
95
119
|
proxyPongoDbWithSchema,
|
|
96
120
|
toClientSchemaMetadata,
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/index.ts"],"sourcesContent":["import type { AnyPongoDriver } from './core';\n\nexport * from './core';\n\npongoDriverRegistry.register(`PostgreSQL:pg`, () => loadPongoClient('pg'));\npongoDriverRegistry.register(`SQLite:sqlite3`, () =>\n loadPongoClient('sqlite3'),\n);\npongoDriverRegistry.register(`SQLite:d1`, () => loadPongoClient('d1'));\n\nexport const loadPongoClient = async (\n path: 'pg' | 'sqlite3' | 'd1',\n): Promise<AnyPongoDriver> => {\n let module;\n\n if (path === 'pg') {\n module = await import('./pg');\n } else if (path === 'sqlite3') {\n module = await import('./sqlite3');\n } else if (path === 'd1') {\n module = await import('./cloudflare');\n } else {\n // eslint-disable-next-line @typescript-eslint/restrict-template-expressions\n throw new Error(`Unknown path: ${path}`);\n }\n\n if (!module.pongoDriver) {\n throw new Error(`Failed to load Pongo client for ${path}`);\n }\n\n return module.pongoDriver;\n};\n"],"mappings":"
|
|
1
|
+
{"version":3,"sources":["../src/index.ts"],"sourcesContent":["import type { AnyPongoDriver } from './core';\n\nexport * from './core';\n\npongoDriverRegistry.register(`PostgreSQL:pg`, () => loadPongoClient('pg'));\npongoDriverRegistry.register(`SQLite:sqlite3`, () =>\n loadPongoClient('sqlite3'),\n);\npongoDriverRegistry.register(`SQLite:d1`, () => loadPongoClient('d1'));\n\nexport const loadPongoClient = async (\n path: 'pg' | 'sqlite3' | 'd1',\n): Promise<AnyPongoDriver> => {\n let module;\n\n if (path === 'pg') {\n module = await import('./pg');\n } else if (path === 'sqlite3') {\n module = await import('./sqlite3');\n } else if (path === 'd1') {\n module = await import('./cloudflare');\n } else {\n // eslint-disable-next-line @typescript-eslint/restrict-template-expressions\n throw new Error(`Unknown path: ${path}`);\n }\n\n if (!module.pongoDriver) {\n throw new Error(`Failed to load Pongo client for ${path}`);\n }\n\n return module.pongoDriver;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAIA,oBAAoB,SAAS,iBAAiB,MAAM,gBAAgB,IAAI,CAAC;AACzE,oBAAoB;AAAA,EAAS;AAAA,EAAkB,MAC7C,gBAAgB,SAAS;AAC3B;AACA,oBAAoB,SAAS,aAAa,MAAM,gBAAgB,IAAI,CAAC;AAE9D,IAAM,kBAAkB,OAC7B,SAC4B;AAC5B,MAAI;AAEJ,MAAI,SAAS,MAAM;AACjB,aAAS,MAAM,OAAO,SAAM;AAAA,EAC9B,WAAW,SAAS,WAAW;AAC7B,aAAS,MAAM,OAAO,cAAW;AAAA,EACnC,WAAW,SAAS,MAAM;AACxB,aAAS,MAAM,OAAO,iBAAc;AAAA,EACtC,OAAO;AAEL,UAAM,IAAI,MAAM,iBAAiB,IAAI,EAAE;AAAA,EACzC;AAEA,MAAI,CAAC,OAAO,aAAa;AACvB,UAAM,IAAI,MAAM,mCAAmC,IAAI,EAAE;AAAA,EAC3D;AAEA,SAAO,OAAO;AAChB;","names":[]}
|
package/dist/pg.cjs
CHANGED
|
@@ -9,7 +9,7 @@
|
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
|
|
12
|
-
var
|
|
12
|
+
var _chunkEQ3T4XHTcjs = require('./chunk-EQ3T4XHT.cjs');
|
|
13
13
|
|
|
14
14
|
// src/storage/postgresql/core/sqlBuilder/index.ts
|
|
15
15
|
var _dumbo = require('@event-driven-io/dumbo');
|
|
@@ -35,7 +35,7 @@ var handleOperator = (path, operator, value, serializer) => {
|
|
|
35
35
|
case "$lte":
|
|
36
36
|
case "$ne": {
|
|
37
37
|
const jsonPath = _dumbo.SQL.plain(path.split(".").join(","));
|
|
38
|
-
return _dumbo.SQL`data ->> '${jsonPath}' ${_dumbo.SQL.plain(
|
|
38
|
+
return _dumbo.SQL`data ->> '${jsonPath}' ${_dumbo.SQL.plain(_chunkEQ3T4XHTcjs.OperatorMap[operator])} ${value}`;
|
|
39
39
|
}
|
|
40
40
|
case "$in": {
|
|
41
41
|
const jsonPath = `{${path.split(".").join(",")}}`;
|
|
@@ -46,7 +46,7 @@ var handleOperator = (path, operator, value, serializer) => {
|
|
|
46
46
|
return _dumbo.SQL`data #>> ${jsonPath} != ALL (${value})`;
|
|
47
47
|
}
|
|
48
48
|
case "$elemMatch": {
|
|
49
|
-
const subQuery =
|
|
49
|
+
const subQuery = _chunkEQ3T4XHTcjs.objectEntries.call(void 0, value).map(
|
|
50
50
|
([subKey, subValue]) => `@."${subKey}" == ${serializer.serialize(subValue)}`
|
|
51
51
|
).join(" && ");
|
|
52
52
|
return _dumbo.SQL`jsonb_path_exists(data, '$.${_dumbo.SQL.plain(path)}[*] ? (${_dumbo.SQL.plain(subQuery)})')`;
|
|
@@ -72,7 +72,7 @@ var handleMetadataOperator = (fieldName, operator, value) => {
|
|
|
72
72
|
case "$lt":
|
|
73
73
|
case "$lte":
|
|
74
74
|
case "$ne":
|
|
75
|
-
return _dumbo.SQL`${_dumbo.SQL.plain(fieldName)} ${_dumbo.SQL.plain(
|
|
75
|
+
return _dumbo.SQL`${_dumbo.SQL.plain(fieldName)} ${_dumbo.SQL.plain(_chunkEQ3T4XHTcjs.OperatorMap[operator])} ${value}`;
|
|
76
76
|
case "$in":
|
|
77
77
|
return _dumbo.SQL`${_dumbo.SQL.plain(fieldName)} = ANY (${value})`;
|
|
78
78
|
case "$nin":
|
|
@@ -92,12 +92,12 @@ var constructFilterQuery = (filter, serializer) => _dumbo.SQL.merge(
|
|
|
92
92
|
` ${AND} `
|
|
93
93
|
);
|
|
94
94
|
var constructComplexFilterQuery = (key, value, serializer) => {
|
|
95
|
-
const isEquality = !
|
|
95
|
+
const isEquality = !_chunkEQ3T4XHTcjs.hasOperators.call(void 0, value);
|
|
96
96
|
return _dumbo.SQL.merge(
|
|
97
|
-
|
|
97
|
+
_chunkEQ3T4XHTcjs.objectEntries.call(void 0, value).map(
|
|
98
98
|
([nestedKey, val]) => isEquality ? handleOperator(
|
|
99
99
|
`${key}.${nestedKey}`,
|
|
100
|
-
|
|
100
|
+
_chunkEQ3T4XHTcjs.QueryOperators.$eq,
|
|
101
101
|
val,
|
|
102
102
|
serializer
|
|
103
103
|
) : handleOperator(key, nestedKey, val, serializer)
|
|
@@ -109,7 +109,7 @@ var isRecord = (value) => value !== null && typeof value === "object" && !Array.
|
|
|
109
109
|
|
|
110
110
|
// src/storage/postgresql/core/sqlBuilder/update/index.ts
|
|
111
111
|
|
|
112
|
-
var buildUpdateQuery = (update, serializer) =>
|
|
112
|
+
var buildUpdateQuery = (update, serializer) => _chunkEQ3T4XHTcjs.objectEntries.call(void 0, update).reduce(
|
|
113
113
|
(currentUpdateQuery, [op, value]) => {
|
|
114
114
|
switch (op) {
|
|
115
115
|
case "$set":
|
|
@@ -182,7 +182,7 @@ var postgresSQLBuilder = (collectionName, serializer) => ({
|
|
|
182
182
|
RETURNING _id;`;
|
|
183
183
|
},
|
|
184
184
|
updateOne: (filter, update, options) => {
|
|
185
|
-
const expectedVersion =
|
|
185
|
+
const expectedVersion = _chunkEQ3T4XHTcjs.expectedVersionValue.call(void 0, _optionalChain([options, 'optionalAccess', _ => _.expectedVersion]));
|
|
186
186
|
const expectedVersionUpdate = expectedVersion != null ? _dumbo.SQL`AND ${_dumbo.SQL.identifier(collectionName)}._version = ${expectedVersion}` : _dumbo.SQL``;
|
|
187
187
|
const filterQuery = _dumbo.isSQL.call(void 0, filter) ? filter : constructFilterQuery(filter, serializer);
|
|
188
188
|
const updateQuery = _dumbo.isSQL.call(void 0, update) ? update : buildUpdateQuery(update, serializer);
|
|
@@ -211,7 +211,7 @@ var postgresSQLBuilder = (collectionName, serializer) => ({
|
|
|
211
211
|
ON existing._id = updated._id;`;
|
|
212
212
|
},
|
|
213
213
|
replaceOne: (filter, document, options) => {
|
|
214
|
-
const expectedVersion =
|
|
214
|
+
const expectedVersion = _chunkEQ3T4XHTcjs.expectedVersionValue.call(void 0, _optionalChain([options, 'optionalAccess', _2 => _2.expectedVersion]));
|
|
215
215
|
const expectedVersionUpdate = expectedVersion != null ? _dumbo.SQL`AND ${_dumbo.SQL.identifier(collectionName)}._version = ${expectedVersion}` : _dumbo.SQL``;
|
|
216
216
|
const filterQuery = _dumbo.isSQL.call(void 0, filter) ? filter : constructFilterQuery(filter, serializer);
|
|
217
217
|
return _dumbo.SQL`
|
|
@@ -249,7 +249,7 @@ var postgresSQLBuilder = (collectionName, serializer) => ({
|
|
|
249
249
|
${where(filterQuery)};`;
|
|
250
250
|
},
|
|
251
251
|
deleteOne: (filter, options) => {
|
|
252
|
-
const expectedVersion =
|
|
252
|
+
const expectedVersion = _chunkEQ3T4XHTcjs.expectedVersionValue.call(void 0, _optionalChain([options, 'optionalAccess', _3 => _3.expectedVersion]));
|
|
253
253
|
const expectedVersionUpdate = expectedVersion != null ? _dumbo.SQL`AND ${_dumbo.SQL.identifier(collectionName)}._version = ${expectedVersion}` : _dumbo.SQL``;
|
|
254
254
|
const filterQuery = _dumbo.isSQL.call(void 0, filter) ? filter : constructFilterQuery(filter, serializer);
|
|
255
255
|
return _dumbo.SQL`
|
|
@@ -276,6 +276,92 @@ var postgresSQLBuilder = (collectionName, serializer) => ({
|
|
|
276
276
|
const filterQuery = _dumbo.isSQL.call(void 0, filter) ? filter : constructFilterQuery(filter, serializer);
|
|
277
277
|
return _dumbo.SQL`DELETE FROM ${_dumbo.SQL.identifier(collectionName)} ${where(filterQuery)}`;
|
|
278
278
|
},
|
|
279
|
+
replaceMany: (documents) => {
|
|
280
|
+
const hasVersions = documents.some(
|
|
281
|
+
(d) => "_version" in d && d._version !== void 0
|
|
282
|
+
);
|
|
283
|
+
if (hasVersions) {
|
|
284
|
+
const values2 = _dumbo.SQL.merge(
|
|
285
|
+
documents.map(
|
|
286
|
+
(d) => _dumbo.SQL`(${d._id}::text, ${serializer.serialize(d)}::jsonb, ${_nullishCoalesce(d._version, () => ( 0n))}::bigint)`
|
|
287
|
+
),
|
|
288
|
+
","
|
|
289
|
+
);
|
|
290
|
+
return _dumbo.SQL`
|
|
291
|
+
WITH replacements(_id, data, expected_version) AS (
|
|
292
|
+
VALUES ${values2}
|
|
293
|
+
)
|
|
294
|
+
UPDATE ${_dumbo.SQL.identifier(collectionName)} t
|
|
295
|
+
SET
|
|
296
|
+
data = r.data
|
|
297
|
+
|| jsonb_build_object('_id', t._id)
|
|
298
|
+
|| jsonb_build_object('_version', (t._version + 1)::text),
|
|
299
|
+
_version = t._version + 1
|
|
300
|
+
FROM replacements r
|
|
301
|
+
WHERE t._id = r._id AND t._version = r.expected_version
|
|
302
|
+
RETURNING t._id, t._version AS version;`;
|
|
303
|
+
}
|
|
304
|
+
const values = _dumbo.SQL.merge(
|
|
305
|
+
documents.map(
|
|
306
|
+
(d) => _dumbo.SQL`(${d._id}::text, ${serializer.serialize(d)}::jsonb)`
|
|
307
|
+
),
|
|
308
|
+
","
|
|
309
|
+
);
|
|
310
|
+
return _dumbo.SQL`
|
|
311
|
+
WITH replacements(_id, data) AS (
|
|
312
|
+
VALUES ${values}
|
|
313
|
+
)
|
|
314
|
+
UPDATE ${_dumbo.SQL.identifier(collectionName)} t
|
|
315
|
+
SET
|
|
316
|
+
data = r.data
|
|
317
|
+
|| jsonb_build_object('_id', t._id)
|
|
318
|
+
|| jsonb_build_object('_version', (t._version + 1)::text),
|
|
319
|
+
_version = t._version + 1
|
|
320
|
+
FROM replacements r
|
|
321
|
+
WHERE t._id = r._id
|
|
322
|
+
RETURNING t._id, t._version AS version;`;
|
|
323
|
+
},
|
|
324
|
+
deleteManyByIds: (ids) => {
|
|
325
|
+
const hasVersions = ids.some((d) => d._version !== void 0);
|
|
326
|
+
if (hasVersions) {
|
|
327
|
+
const values2 = _dumbo.SQL.merge(
|
|
328
|
+
ids.map((d) => _dumbo.SQL`(${d._id}::text, ${_nullishCoalesce(d._version, () => ( 0n))}::bigint)`),
|
|
329
|
+
","
|
|
330
|
+
);
|
|
331
|
+
return _dumbo.SQL`
|
|
332
|
+
WITH targets(_id, expected_version) AS (
|
|
333
|
+
VALUES ${values2}
|
|
334
|
+
),
|
|
335
|
+
deleted AS (
|
|
336
|
+
DELETE FROM ${_dumbo.SQL.identifier(collectionName)} t
|
|
337
|
+
USING targets r
|
|
338
|
+
WHERE t._id = r._id AND t._version = r.expected_version
|
|
339
|
+
RETURNING t._id
|
|
340
|
+
)
|
|
341
|
+
SELECT r._id,
|
|
342
|
+
CASE WHEN d._id IS NOT NULL THEN 1 ELSE 0 END as deleted
|
|
343
|
+
FROM targets r
|
|
344
|
+
LEFT JOIN deleted d ON r._id = d._id;`;
|
|
345
|
+
}
|
|
346
|
+
const values = _dumbo.SQL.merge(
|
|
347
|
+
ids.map((d) => _dumbo.SQL`(${d._id}::text)`),
|
|
348
|
+
","
|
|
349
|
+
);
|
|
350
|
+
return _dumbo.SQL`
|
|
351
|
+
WITH targets(_id) AS (
|
|
352
|
+
VALUES ${values}
|
|
353
|
+
),
|
|
354
|
+
deleted AS (
|
|
355
|
+
DELETE FROM ${_dumbo.SQL.identifier(collectionName)} t
|
|
356
|
+
USING targets r
|
|
357
|
+
WHERE t._id = r._id
|
|
358
|
+
RETURNING t._id
|
|
359
|
+
)
|
|
360
|
+
SELECT r._id,
|
|
361
|
+
CASE WHEN d._id IS NOT NULL THEN 1 ELSE 0 END as deleted
|
|
362
|
+
FROM targets r
|
|
363
|
+
LEFT JOIN deleted d ON r._id = d._id;`;
|
|
364
|
+
},
|
|
279
365
|
findOne: (filter) => {
|
|
280
366
|
const filterQuery = _dumbo.isSQL.call(void 0, filter) ? filter : constructFilterQuery(filter, serializer);
|
|
281
367
|
return _dumbo.SQL`SELECT data, _version FROM ${_dumbo.SQL.identifier(collectionName)} ${where(filterQuery)} LIMIT 1;`;
|
|
@@ -315,7 +401,7 @@ var pgPongoDriver = {
|
|
|
315
401
|
driverType: _pg.PgDriverType,
|
|
316
402
|
databaseFactory: (options) => {
|
|
317
403
|
const databaseName = _nullishCoalesce(_nullishCoalesce(options.databaseName, () => ( _pg.postgreSQLMetadata.parseDatabaseName(options.connectionString))), () => ( _pg.postgreSQLMetadata.defaultDatabaseName));
|
|
318
|
-
return
|
|
404
|
+
return _chunkEQ3T4XHTcjs.PongoDatabase.call(void 0, {
|
|
319
405
|
...options,
|
|
320
406
|
pool: _dumbo.dumbo.call(void 0, {
|
|
321
407
|
connectionString: options.connectionString,
|
|
@@ -323,9 +409,9 @@ var pgPongoDriver = {
|
|
|
323
409
|
...options.connectionOptions,
|
|
324
410
|
serialization: { serializer: options.serializer }
|
|
325
411
|
}),
|
|
326
|
-
schemaComponent:
|
|
412
|
+
schemaComponent: _chunkEQ3T4XHTcjs.PongoDatabaseSchemaComponent.call(void 0, {
|
|
327
413
|
driverType: _pg.PgDriverType,
|
|
328
|
-
collectionFactory: (schema) =>
|
|
414
|
+
collectionFactory: (schema) => _chunkEQ3T4XHTcjs.PongoCollectionSchemaComponent.call(void 0, {
|
|
329
415
|
driverType: _pg.PgDriverType,
|
|
330
416
|
definition: schema,
|
|
331
417
|
migrationsOrSchemaComponents: {
|
|
@@ -336,14 +422,14 @@ var pgPongoDriver = {
|
|
|
336
422
|
_nullishCoalesce(_optionalChain([options, 'access', _6 => _6.serialization, 'optionalAccess', _7 => _7.serializer]), () => ( _dumbo.JSONSerializer))
|
|
337
423
|
)
|
|
338
424
|
}),
|
|
339
|
-
definition: _nullishCoalesce(_optionalChain([options, 'access', _8 => _8.schema, 'optionalAccess', _9 => _9.definition]), () => (
|
|
425
|
+
definition: _nullishCoalesce(_optionalChain([options, 'access', _8 => _8.schema, 'optionalAccess', _9 => _9.definition]), () => ( _chunkEQ3T4XHTcjs.pongoSchema.db(databaseName, {})))
|
|
340
426
|
}),
|
|
341
427
|
databaseName
|
|
342
428
|
});
|
|
343
429
|
}
|
|
344
430
|
};
|
|
345
431
|
var usePgPongoDriver = () => {
|
|
346
|
-
|
|
432
|
+
_chunkEQ3T4XHTcjs.pongoDriverRegistry.register(_pg.PgDriverType, pgPongoDriver);
|
|
347
433
|
};
|
|
348
434
|
usePgPongoDriver();
|
|
349
435
|
|
package/dist/pg.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/home/runner/work/Pongo/Pongo/src/packages/pongo/dist/pg.cjs","../src/storage/postgresql/core/sqlBuilder/index.ts","../src/storage/postgresql/core/sqlBuilder/filter/index.ts","../src/storage/postgresql/core/sqlBuilder/filter/queryOperators.ts","../src/storage/postgresql/core/sqlBuilder/update/index.ts","../src/storage/postgresql/pg/index.ts"],"names":["SQL"],"mappings":"AAAA;AACE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACF,wDAA6B;AAC7B;AACA;ACZA,+CAAyC;ADczC;AACA;AEfA;AFiBA;AACA;AGlBA;AAGO,IAAM,eAAA,EAAiB,CAC5B,IAAA,EACA,QAAA,EACA,KAAA,EACA,UAAA,EAAA,GACQ;AACR,EAAA,GAAA,CAAI,KAAA,IAAS,MAAA,GAAS,KAAA,IAAS,UAAA,EAAY;AACzC,IAAA,OAAO,sBAAA,CAAuB,IAAA,EAAM,QAAA,EAAU,KAAK,CAAA;AAAA,EACrD;AAEA,EAAA,OAAA,CAAQ,QAAA,EAAU;AAAA,IAChB,KAAK,KAAA,EAAO;AACV,MAAA,MAAM,WAAA,EAAa,UAAA,CAAW,SAAA,CAAU,iBAAA,CAAkB,IAAA,EAAM,KAAK,CAAC,CAAA;AACtE,MAAA,MAAM,gBAAA,EAAkB,UAAA,CAAW,SAAA,CAAU,KAAK,CAAA;AAElD,MAAA,OAAO,UAAA,CAAA,SAAA,EAAe,UAAU,CAAA,sCAAA,EAAyC,UAAA,CAAI,KAAA,CAAM,IAAI,CAAC,CAAA,YAAA,EAAe,UAAA,CAAI,KAAA,CAAM,eAAe,CAAC,CAAA,IAAA,CAAA;AAAA,IACnI;AAAA,IACA,KAAK,KAAA;AAAA,IACL,KAAK,MAAA;AAAA,IACL,KAAK,KAAA;AAAA,IACL,KAAK,MAAA;AAAA,IACL,KAAK,KAAA,EAAO;AACV,MAAA,MAAM,SAAA,EAAW,UAAA,CAAI,KAAA,CAAM,IAAA,CAAK,KAAA,CAAM,GAAG,CAAA,CAAE,IAAA,CAAK,GAAG,CAAC,CAAA;AAEpD,MAAA,OAAO,UAAA,CAAA,UAAA,EAAgB,QAAQ,CAAA,EAAA,EAAK,UAAA,CAAI,KAAA,CAAM,6BAAA,CAAY,QAAQ,CAAC,CAAC,CAAA,CAAA,EAAI,KAAK,CAAA,CAAA;AAC/E,IAAA;AACY,IAAA;AACoC,MAAA;AAEa,MAAA;AAC7D,IAAA;AACa,IAAA;AACmC,MAAA;AAEc,MAAA;AAC9D,IAAA;AACmB,IAAA;AAEd,MAAA;AAEqD,QAAA;AAE1C,MAAA;AAC6D,MAAA;AAC7E,IAAA;AACa,IAAA;AAC2D,MAAA;AACvC,MAAA;AACjC,IAAA;AACc,IAAA;AACkC,MAAA;AAEc,MAAA;AAC9D,IAAA;AACA,IAAA;AACqD,MAAA;AACvD,EAAA;AACF;AAMU;AACU,EAAA;AACX,IAAA;AACyC,MAAA;AACzC,IAAA;AACA,IAAA;AACA,IAAA;AACA,IAAA;AACA,IAAA;AAC2E,MAAA;AAC3E,IAAA;AAC2D,MAAA;AAC3D,IAAA;AAC4D,MAAA;AACjE,IAAA;AACqD,MAAA;AACvD,EAAA;AACF;AASkB;AHTkE;AACA;AEzExE;AAMN;AACqB,EAAA;AAEuB,IAAA;AAE9C,EAAA;AACO,EAAA;AACT;AAMQ;AAC8B,EAAA;AAE3B,EAAA;AACY,IAAA;AAEf,MAAA;AACqB,QAAA;AACJ,QAAA;AACf,QAAA;AACA,QAAA;AAE4C,MAAA;AACpD,IAAA;AACO,IAAA;AACT,EAAA;AACF;AAGwD;AF0D4B;AACA;AI5GhE;AAcI;AACiB,EAAA;AACvB,IAAA;AACL,MAAA;AACuD,QAAA;AACvD,MAAA;AAC6C,QAAA;AAC7C,MAAA;AAC2C,QAAA;AAC3C,MAAA;AACwD,QAAA;AAC7D,MAAA;AACS,QAAA;AACX,IAAA;AACF,EAAA;AACAA,EAAAA;AACF;AAMc;AAMgB;AAOtB;AACwC,EAAA;AAGxB,IAAA;AAExB,EAAA;AACO,EAAA;AACT;AAMU;AACyC,EAAA;AACK,IAAA;AACwB,IAAA;AAC9E,EAAA;AACO,EAAA;AACT;AJyEoF;AACA;AC9HlFA;AAC6D,+BAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAAA;AAWgB;AACX,EAAA;AACjC,IAAA;AAChC,EAAA;AACH;AAKiC;AAC6B,EAAA;AACY,EAAA;AACtB,IAAA;AAC5B,IAAA;AACiB,IAAA;AAE9BA,IAAAA;AACuC,kBAAA;AACL,cAAA;AAC3C,EAAA;AAC4E,EAAA;AACvD,IAAA;AACP,MAAA;AAE2D,QAAA;AACrE,MAAA;AACA,MAAA;AACF,IAAA;AAEOA,IAAAA;AACwE,kBAAA;AAAM;AAAA,oBAAA;AAGvF,EAAA;AAKU,EAAA;AAC6D,IAAA;AAGjD,IAAA;AAKa,IAAA;AAGM,IAAA;AAEhCA,IAAAA;AAAA;AAAA;AAGwD,aAAA;AAAA;AAAA;AAAA;AAIpB,eAAA;AAAA;AAE+B,iBAAA;AAAe;AAAA;AAGxB,cAAA;AACK,kBAAA;AAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAAA;AAUvF,EAAA;AAKU,EAAA;AAC6D,IAAA;AAGjD,IAAA;AAKa,IAAA;AAE1BA,IAAAA;AAAA;AAAA;AAGwD,aAAA;AAAA;AAAA;AAAA;AAIpB,eAAA;AAAA;AAEmCA,iBAAAA;AAA8B;AAAA;AAG3C,cAAA;AACK,kBAAA;AAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAAA;AAUvF,EAAA;AAIU,EAAA;AAGyB,IAAA;AAGM,IAAA;AAEhCA,IAAAA;AACkC,aAAA;AAAA;AAEjB,eAAA;AAAA;AAEF,MAAA;AACxB,EAAA;AAIU,EAAA;AAC6D,IAAA;AAGjD,IAAA;AAKa,IAAA;AAE1BA,IAAAA;AAAA;AAAA;AAGwD,aAAA;AAAA;AAAA;AAAA;AAIf,oBAAA;AAAA;AAEiB,cAAA;AACnB,kBAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAAA;AAShD,EAAA;AACsD,EAAA;AAGnB,IAAA;AAE4C,IAAA;AAC/E,EAAA;AACmD,EAAA;AAGhB,IAAA;AAEyC,IAAA;AAC5E,EAAA;AACuE,EAAA;AAGpC,IAAA;AACX,IAAA;AAEhB,IAAA;AAC2D,MAAA;AACjE,IAAA;AAE6B,IAAA;AAET,IAAA;AACoB,MAAA;AACxC,IAAA;AAEmB,IAAA;AACqB,MAAA;AACxC,IAAA;AAEmC,IAAA;AACrC,EAAA;AAC0D,EAAA;AAG/B,IAAA;AACoD,IAAA;AAC/E,EAAA;AAEkD,EAAA;AAEjB,EAAA;AACnC;AAKU;ADyD0E;AACA;AKlT9C;AACtC;AACmB;AACjB;AACA;AAEK;AA0DH;AACU,EAAA;AACkB,EAAA;AAGP,IAAA;AAGA,IAAA;AAChB,MAAA;AACS,MAAA;AACgB,QAAA;AAClB,QAAA;AACG,QAAA;AACqC,QAAA;AACjD,MAAA;AAC6C,MAAA;AAChC,QAAA;AAEqB,QAAA;AACjB,UAAA;AACA,UAAA;AACkB,UAAA;AAC+B,YAAA;AAC7D,UAAA;AACY,UAAA;AACH,YAAA;AAC8B,6CAAA;AACvC,UAAA;AACD,QAAA;AAE4D,QAAA;AAChE,MAAA;AACD,MAAA;AACD,IAAA;AACH,EAAA;AACF;AAEsC;AACoB,EAAA;AAC1D;AAEiB;ALkPmE;AACA;AACA;AACA;AACA;AACA;AACA","file":"/home/runner/work/Pongo/Pongo/src/packages/pongo/dist/pg.cjs","sourcesContent":[null,"import type { JSONSerializer } from '@event-driven-io/dumbo';\nimport { isSQL, SQL, sqlMigration } from '@event-driven-io/dumbo';\nimport {\n expectedVersionValue,\n type DeleteOneOptions,\n type FindOptions,\n type OptionalUnlessRequiredIdAndVersion,\n type PongoCollectionSQLBuilder,\n type PongoFilter,\n type PongoUpdate,\n type ReplaceOneOptions,\n type UpdateOneOptions,\n type WithoutId,\n} from '../../../../core';\nimport { constructFilterQuery } from './filter';\nimport { buildUpdateQuery } from './update';\n\nconst createCollection = (collectionName: string): SQL =>\n SQL`\n CREATE TABLE IF NOT EXISTS ${SQL.identifier(collectionName)} (\n _id TEXT PRIMARY KEY, \n data JSONB NOT NULL, \n metadata JSONB NOT NULL DEFAULT '{}',\n _version BIGINT NOT NULL DEFAULT 1,\n _partition TEXT NOT NULL DEFAULT 'png_global',\n _archived BOOLEAN NOT NULL DEFAULT FALSE,\n _created TIMESTAMPTZ NOT NULL DEFAULT now(),\n _updated TIMESTAMPTZ NOT NULL DEFAULT now()\n )`;\n\nexport const pongoCollectionPostgreSQLMigrations = (collectionName: string) => [\n sqlMigration(`pongoCollection:${collectionName}:001:createtable`, [\n createCollection(collectionName),\n ]),\n];\n\nexport const postgresSQLBuilder = (\n collectionName: string,\n serializer: JSONSerializer,\n): PongoCollectionSQLBuilder => ({\n createCollection: (): SQL => createCollection(collectionName),\n insertOne: <T>(document: OptionalUnlessRequiredIdAndVersion<T>): SQL => {\n const serialized = serializer.serialize(document);\n const id = document._id;\n const version = document._version ?? 1n;\n\n return SQL`\n INSERT INTO ${SQL.identifier(collectionName)} (_id, data, _version) \n VALUES (${id}, ${serialized}, ${version}) ON CONFLICT(_id) DO NOTHING;`;\n },\n insertMany: <T>(documents: OptionalUnlessRequiredIdAndVersion<T>[]): SQL => {\n const values = SQL.merge(\n documents.map(\n (doc) =>\n SQL`(${doc._id}, ${serializer.serialize(doc)}, ${doc._version ?? 1n})`,\n ),\n ',',\n );\n\n return SQL`\n INSERT INTO ${SQL.identifier(collectionName)} (_id, data, _version) VALUES ${values}\n ON CONFLICT(_id) DO NOTHING\n RETURNING _id;`;\n },\n updateOne: <T>(\n filter: PongoFilter<T> | SQL,\n update: PongoUpdate<T> | SQL,\n options?: UpdateOneOptions,\n ): SQL => {\n const expectedVersion = expectedVersionValue(options?.expectedVersion);\n const expectedVersionUpdate =\n expectedVersion != null\n ? SQL`AND ${SQL.identifier(collectionName)}._version = ${expectedVersion}`\n : SQL``;\n\n const filterQuery = isSQL(filter)\n ? filter\n : constructFilterQuery(filter, serializer);\n const updateQuery = isSQL(update)\n ? update\n : buildUpdateQuery(update, serializer);\n\n return SQL`\n WITH existing AS (\n SELECT _id, _version as current_version\n FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}\n LIMIT 1\n ),\n updated AS (\n UPDATE ${SQL.identifier(collectionName)} \n SET \n data = ${updateQuery} || jsonb_build_object('_id', ${SQL.identifier(collectionName)}._id) || jsonb_build_object('_version', (_version + 1)::text),\n _version = _version + 1\n FROM existing \n WHERE ${SQL.identifier(collectionName)}._id = existing._id ${expectedVersionUpdate}\n RETURNING ${SQL.identifier(collectionName)}._id, ${SQL.identifier(collectionName)}._version\n )\n SELECT \n existing._id,\n COALESCE(updated._version, existing.current_version) AS version,\n COUNT(existing._id) over() AS matched,\n COUNT(updated._id) over() AS modified\n FROM existing\n LEFT JOIN updated \n ON existing._id = updated._id;`;\n },\n replaceOne: <T>(\n filter: PongoFilter<T> | SQL,\n document: WithoutId<T>,\n options?: ReplaceOneOptions,\n ): SQL => {\n const expectedVersion = expectedVersionValue(options?.expectedVersion);\n const expectedVersionUpdate =\n expectedVersion != null\n ? SQL`AND ${SQL.identifier(collectionName)}._version = ${expectedVersion}`\n : SQL``;\n\n const filterQuery = isSQL(filter)\n ? filter\n : constructFilterQuery(filter, serializer);\n\n return SQL`\n WITH existing AS (\n SELECT _id, _version as current_version\n FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}\n LIMIT 1\n ),\n updated AS (\n UPDATE ${SQL.identifier(collectionName)} \n SET \n data = ${serializer.serialize(document)} || jsonb_build_object('_id', ${SQL.identifier(collectionName)}._id) || jsonb_build_object('_version', (_version + 1)::text),\n _version = _version + 1\n FROM existing \n WHERE ${SQL.identifier(collectionName)}._id = existing._id ${expectedVersionUpdate}\n RETURNING ${SQL.identifier(collectionName)}._id, ${SQL.identifier(collectionName)}._version\n )\n SELECT \n existing._id,\n COALESCE(updated._version, existing.current_version) AS version,\n COUNT(existing._id) over() AS matched,\n COUNT(updated._id) over() AS modified\n FROM existing\n LEFT JOIN updated \n ON existing._id = updated._id;`;\n },\n updateMany: <T>(\n filter: PongoFilter<T> | SQL,\n update: PongoUpdate<T> | SQL,\n ): SQL => {\n const filterQuery = isSQL(filter)\n ? filter\n : constructFilterQuery(filter, serializer);\n const updateQuery = isSQL(update)\n ? update\n : buildUpdateQuery(update, serializer);\n\n return SQL`\n UPDATE ${SQL.identifier(collectionName)} \n SET \n data = ${updateQuery} || jsonb_build_object('_version', (_version + 1)::text),\n _version = _version + 1\n ${where(filterQuery)};`;\n },\n deleteOne: <T>(\n filter: PongoFilter<T> | SQL,\n options?: DeleteOneOptions,\n ): SQL => {\n const expectedVersion = expectedVersionValue(options?.expectedVersion);\n const expectedVersionUpdate =\n expectedVersion != null\n ? SQL`AND ${SQL.identifier(collectionName)}._version = ${expectedVersion}`\n : SQL``;\n\n const filterQuery = isSQL(filter)\n ? filter\n : constructFilterQuery(filter, serializer);\n\n return SQL`\n WITH existing AS (\n SELECT _id\n FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}\n LIMIT 1\n ),\n deleted AS (\n DELETE FROM ${SQL.identifier(collectionName)}\n USING existing\n WHERE ${SQL.identifier(collectionName)}._id = existing._id ${expectedVersionUpdate}\n RETURNING ${SQL.identifier(collectionName)}._id\n )\n SELECT \n existing._id,\n COUNT(existing._id) over() AS matched,\n COUNT(deleted._id) over() AS deleted\n FROM existing\n LEFT JOIN deleted \n ON existing._id = deleted._id;`;\n },\n deleteMany: <T>(filter: PongoFilter<T> | SQL): SQL => {\n const filterQuery = isSQL(filter)\n ? filter\n : constructFilterQuery(filter, serializer);\n\n return SQL`DELETE FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}`;\n },\n findOne: <T>(filter: PongoFilter<T> | SQL): SQL => {\n const filterQuery = isSQL(filter)\n ? filter\n : constructFilterQuery(filter, serializer);\n\n return SQL`SELECT data, _version FROM ${SQL.identifier(collectionName)} ${where(filterQuery)} LIMIT 1;`;\n },\n find: <T>(filter: PongoFilter<T> | SQL, options?: FindOptions): SQL => {\n const filterQuery = isSQL(filter)\n ? filter\n : constructFilterQuery(filter, serializer);\n const query: SQL[] = [];\n\n query.push(\n SQL`SELECT data, _version FROM ${SQL.identifier(collectionName)}`,\n );\n\n query.push(where(filterQuery));\n\n if (options?.limit) {\n query.push(SQL`LIMIT ${options.limit}`);\n }\n\n if (options?.skip) {\n query.push(SQL`OFFSET ${options.skip}`);\n }\n\n return SQL.merge([...query, SQL`;`]);\n },\n countDocuments: <T>(filter: PongoFilter<T> | SQL): SQL => {\n const filterQuery = SQL.check.isSQL(filter)\n ? filter\n : constructFilterQuery(filter, serializer);\n return SQL`SELECT COUNT(1) as count FROM ${SQL.identifier(collectionName)} ${where(filterQuery)};`;\n },\n rename: (newName: string): SQL =>\n SQL`ALTER TABLE ${SQL.identifier(collectionName)} RENAME TO ${SQL.identifier(newName)};`,\n drop: (targetName: string = collectionName): SQL =>\n SQL`DROP TABLE IF EXISTS ${SQL.identifier(targetName)}`,\n});\n\nconst where = (filterQuery: SQL): SQL =>\n SQL.check.isEmpty(filterQuery)\n ? SQL.EMPTY\n : SQL.merge([SQL`WHERE `, filterQuery]);\n","import type { JSONSerializer } from '@event-driven-io/dumbo';\nimport { SQL } from '@event-driven-io/dumbo';\nimport {\n hasOperators,\n objectEntries,\n QueryOperators,\n type PongoFilter,\n} from '../../../../../core';\nimport { handleOperator } from './queryOperators';\n\nexport * from './queryOperators';\n\nconst AND = 'AND';\n\nexport const constructFilterQuery = <T>(\n filter: PongoFilter<T>,\n serializer: JSONSerializer,\n): SQL =>\n SQL.merge(\n Object.entries(filter).map(([key, value]) =>\n isRecord(value)\n ? constructComplexFilterQuery(key, value, serializer)\n : handleOperator(key, '$eq', value, serializer),\n ),\n ` ${AND} `,\n );\n\nconst constructComplexFilterQuery = (\n key: string,\n value: Record<string, unknown>,\n serializer: JSONSerializer,\n): SQL => {\n const isEquality = !hasOperators(value);\n\n return SQL.merge(\n objectEntries(value).map(([nestedKey, val]) =>\n isEquality\n ? handleOperator(\n `${key}.${nestedKey}`,\n QueryOperators.$eq,\n val,\n serializer,\n )\n : handleOperator(key, nestedKey, val, serializer),\n ),\n ` ${AND} `,\n );\n};\n\nconst isRecord = (value: unknown): value is Record<string, unknown> =>\n value !== null && typeof value === 'object' && !Array.isArray(value);\n","import type { JSONSerializer } from '@event-driven-io/dumbo';\nimport { SQL } from '@event-driven-io/dumbo';\nimport { objectEntries, OperatorMap } from '../../../../../core';\n\nexport const handleOperator = (\n path: string,\n operator: string,\n value: unknown,\n serializer: JSONSerializer,\n): SQL => {\n if (path === '_id' || path === '_version') {\n return handleMetadataOperator(path, operator, value);\n }\n\n switch (operator) {\n case '$eq': {\n const nestedPath = serializer.serialize(buildNestedObject(path, value));\n const serializedValue = serializer.serialize(value);\n\n return SQL`(data @> ${nestedPath}::jsonb OR jsonb_path_exists(data, '$.${SQL.plain(path)}[*] ? (@ == ${SQL.plain(serializedValue)})'))`;\n }\n case '$gt':\n case '$gte':\n case '$lt':\n case '$lte':\n case '$ne': {\n const jsonPath = SQL.plain(path.split('.').join(','));\n\n return SQL`data ->> '${jsonPath}' ${SQL.plain(OperatorMap[operator])} ${value}`;\n }\n case '$in': {\n const jsonPath = `{${path.split('.').join(',')}}`;\n\n return SQL`data #>> ${jsonPath} = ANY (${value as unknown[]})`;\n }\n case '$nin': {\n const jsonPath = `{${path.split('.').join(',')}}`;\n\n return SQL`data #>> ${jsonPath} != ALL (${value as unknown[]})`;\n }\n case '$elemMatch': {\n const subQuery = objectEntries(value as Record<string, unknown>)\n .map(\n ([subKey, subValue]) =>\n `@.\"${subKey}\" == ${serializer.serialize(subValue)}`,\n )\n .join(' && ');\n return SQL`jsonb_path_exists(data, '$.${SQL.plain(path)}[*] ? (${SQL.plain(subQuery)})')`;\n }\n case '$all': {\n const nestedPath = serializer.serialize(buildNestedObject(path, value));\n return SQL`data @> ${nestedPath}::jsonb`;\n }\n case '$size': {\n const jsonPath = `{${path.split('.').join(',')}}`;\n\n return SQL`jsonb_array_length(data #> ${jsonPath}) = ${value}`;\n }\n default:\n throw new Error(`Unsupported operator: ${operator}`);\n }\n};\n\nconst handleMetadataOperator = (\n fieldName: string,\n operator: string,\n value: unknown,\n): SQL => {\n switch (operator) {\n case '$eq':\n return SQL`${SQL.plain(fieldName)} = ${value}`;\n case '$gt':\n case '$gte':\n case '$lt':\n case '$lte':\n case '$ne':\n return SQL`${SQL.plain(fieldName)} ${SQL.plain(OperatorMap[operator])} ${value}`;\n case '$in':\n return SQL`${SQL.plain(fieldName)} = ANY (${value as unknown[]})`;\n case '$nin':\n return SQL`${SQL.plain(fieldName)} != ALL (${value as unknown[]})`;\n default:\n throw new Error(`Unsupported operator: ${operator}`);\n }\n};\n\nconst buildNestedObject = (\n path: string,\n value: unknown,\n): Record<string, unknown> =>\n path\n .split('.')\n .reverse()\n .reduce((acc, key) => ({ [key]: acc }), value as Record<string, unknown>);\n","import type { JSONSerializer } from '@event-driven-io/dumbo';\nimport { SQL } from '@event-driven-io/dumbo';\nimport {\n objectEntries,\n type $inc,\n type $push,\n type $set,\n type $unset,\n type PongoUpdate,\n} from '../../../../../core';\n\nexport const buildUpdateQuery = <T>(\n update: PongoUpdate<T>,\n serializer: JSONSerializer,\n): SQL =>\n objectEntries(update).reduce(\n (currentUpdateQuery, [op, value]) => {\n switch (op) {\n case '$set':\n return buildSetQuery(value, currentUpdateQuery, serializer);\n case '$unset':\n return buildUnsetQuery(value, currentUpdateQuery);\n case '$inc':\n return buildIncQuery(value, currentUpdateQuery);\n case '$push':\n return buildPushQuery(value, currentUpdateQuery, serializer);\n default:\n return currentUpdateQuery;\n }\n },\n SQL`data`,\n );\n\nexport const buildSetQuery = <T>(\n set: $set<T>,\n currentUpdateQuery: SQL,\n serializer: JSONSerializer,\n): SQL => SQL`${currentUpdateQuery} || ${serializer.serialize(set)}::jsonb`;\n\nexport const buildUnsetQuery = <T>(\n unset: $unset<T>,\n currentUpdateQuery: SQL,\n): SQL =>\n SQL`${currentUpdateQuery} - ${Object.keys(unset)\n .map((k) => `{${k}}`)\n .join(', ')}`;\n\nexport const buildIncQuery = <T>(\n inc: $inc<T>,\n currentUpdateQuery: SQL,\n): SQL => {\n for (const [key, value] of Object.entries(inc)) {\n currentUpdateQuery =\n typeof value === 'bigint'\n ? SQL`jsonb_set(${currentUpdateQuery}, '{${SQL.plain(key)}}', to_jsonb((COALESCE((data->>'${SQL.plain(key)}')::BIGINT, 0) + ${value})::TEXT), true)`\n : SQL`jsonb_set(${currentUpdateQuery}, '{${SQL.plain(key)}}', to_jsonb(COALESCE((data->>'${SQL.plain(key)}')::NUMERIC, 0) + ${value}), true)`;\n }\n return currentUpdateQuery;\n};\n\nexport const buildPushQuery = <T>(\n push: $push<T>,\n currentUpdateQuery: SQL,\n serializer: JSONSerializer,\n): SQL => {\n for (const [key, value] of Object.entries(push)) {\n const serializedValue = serializer.serialize([value]);\n currentUpdateQuery = SQL`jsonb_set(${currentUpdateQuery}, '{${SQL.plain(key)}}', (coalesce(data->'${SQL.plain(key)}', '[]'::jsonb) || ${serializedValue}::jsonb), true)`;\n }\n return currentUpdateQuery;\n};\n","import { dumbo, JSONSerializer } from '@event-driven-io/dumbo';\nimport {\n pgDumboDriver as dumboDriver,\n PgDriverType,\n postgreSQLMetadata,\n type PgConnection,\n} from '@event-driven-io/dumbo/pg';\nimport type pg from 'pg';\nimport {\n PongoCollectionSchemaComponent,\n PongoDatabase,\n PongoDatabaseSchemaComponent,\n pongoDriverRegistry,\n pongoSchema,\n type PongoDb,\n type PongoDriver,\n type PongoDriverOptions,\n} from '../../../core';\nimport {\n pongoCollectionPostgreSQLMigrations,\n postgresSQLBuilder,\n} from '../core';\n\nexport type PgPongoClientOptions =\n | PooledPongoClientOptions\n | NotPooledPongoOptions;\n\nexport type PooledPongoClientOptions =\n | {\n pool: pg.Pool;\n }\n | {\n pooled: true;\n }\n | {\n pool: pg.Pool;\n pooled: true;\n }\n | object;\n\nexport type NotPooledPongoOptions =\n | {\n client: pg.Client;\n }\n | {\n pooled: false;\n }\n | {\n client: pg.Client;\n pooled: false;\n }\n | {\n connection: PgConnection;\n pooled?: false;\n };\n\ntype PgDatabaseDriverOptions = PongoDriverOptions<PgPongoClientOptions> & {\n databaseName?: string | undefined;\n connectionString: string;\n};\n\nconst pgPongoDriver: PongoDriver<\n PongoDb<PgDriverType>,\n PgDatabaseDriverOptions\n> = {\n driverType: PgDriverType,\n databaseFactory: (options) => {\n const databaseName =\n options.databaseName ??\n postgreSQLMetadata.parseDatabaseName(options.connectionString) ??\n postgreSQLMetadata.defaultDatabaseName;\n\n return PongoDatabase({\n ...options,\n pool: dumbo({\n connectionString: options.connectionString,\n driver: dumboDriver,\n ...options.connectionOptions,\n serialization: { serializer: options.serializer },\n }),\n schemaComponent: PongoDatabaseSchemaComponent({\n driverType: PgDriverType,\n collectionFactory: (schema) =>\n PongoCollectionSchemaComponent({\n driverType: PgDriverType,\n definition: schema,\n migrationsOrSchemaComponents: {\n migrations: pongoCollectionPostgreSQLMigrations(schema.name),\n },\n sqlBuilder: postgresSQLBuilder(\n schema.name,\n options.serialization?.serializer ?? JSONSerializer,\n ),\n }),\n definition:\n options.schema?.definition ?? pongoSchema.db(databaseName, {}),\n }),\n databaseName,\n });\n },\n};\n\nexport const usePgPongoDriver = () => {\n pongoDriverRegistry.register(PgDriverType, pgPongoDriver);\n};\n\nusePgPongoDriver();\n\nexport { pgPongoDriver as pgDriver, pgPongoDriver as pongoDriver };\n"]}
|
|
1
|
+
{"version":3,"sources":["/home/runner/work/Pongo/Pongo/src/packages/pongo/dist/pg.cjs","../src/storage/postgresql/core/sqlBuilder/index.ts","../src/storage/postgresql/core/sqlBuilder/filter/index.ts","../src/storage/postgresql/core/sqlBuilder/filter/queryOperators.ts","../src/storage/postgresql/core/sqlBuilder/update/index.ts","../src/storage/postgresql/pg/index.ts"],"names":["SQL"],"mappings":"AAAA;AACE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACF,wDAA6B;AAC7B;AACA;ACZA,+CAAyC;ADczC;AACA;AEfA;AFiBA;AACA;AGlBA;AAGO,IAAM,eAAA,EAAiB,CAC5B,IAAA,EACA,QAAA,EACA,KAAA,EACA,UAAA,EAAA,GACQ;AACR,EAAA,GAAA,CAAI,KAAA,IAAS,MAAA,GAAS,KAAA,IAAS,UAAA,EAAY;AACzC,IAAA,OAAO,sBAAA,CAAuB,IAAA,EAAM,QAAA,EAAU,KAAK,CAAA;AAAA,EACrD;AAEA,EAAA,OAAA,CAAQ,QAAA,EAAU;AAAA,IAChB,KAAK,KAAA,EAAO;AACV,MAAA,MAAM,WAAA,EAAa,UAAA,CAAW,SAAA,CAAU,iBAAA,CAAkB,IAAA,EAAM,KAAK,CAAC,CAAA;AACtE,MAAA,MAAM,gBAAA,EAAkB,UAAA,CAAW,SAAA,CAAU,KAAK,CAAA;AAElD,MAAA,OAAO,UAAA,CAAA,SAAA,EAAe,UAAU,CAAA,sCAAA,EAAyC,UAAA,CAAI,KAAA,CAAM,IAAI,CAAC,CAAA,YAAA,EAAe,UAAA,CAAI,KAAA,CAAM,eAAe,CAAC,CAAA,IAAA,CAAA;AAAA,IACnI;AAAA,IACA,KAAK,KAAA;AAAA,IACL,KAAK,MAAA;AAAA,IACL,KAAK,KAAA;AAAA,IACL,KAAK,MAAA;AAAA,IACL,KAAK,KAAA,EAAO;AACV,MAAA,MAAM,SAAA,EAAW,UAAA,CAAI,KAAA,CAAM,IAAA,CAAK,KAAA,CAAM,GAAG,CAAA,CAAE,IAAA,CAAK,GAAG,CAAC,CAAA;AAEpD,MAAA,OAAO,UAAA,CAAA,UAAA,EAAgB,QAAQ,CAAA,EAAA,EAAK,UAAA,CAAI,KAAA,CAAM,6BAAA,CAAY,QAAQ,CAAC,CAAC,CAAA,CAAA,EAAI,KAAK,CAAA,CAAA;AAC/E,IAAA;AACY,IAAA;AACoC,MAAA;AAEa,MAAA;AAC7D,IAAA;AACa,IAAA;AACmC,MAAA;AAEc,MAAA;AAC9D,IAAA;AACmB,IAAA;AAEd,MAAA;AAEqD,QAAA;AAE1C,MAAA;AAC6D,MAAA;AAC7E,IAAA;AACa,IAAA;AAC2D,MAAA;AACvC,MAAA;AACjC,IAAA;AACc,IAAA;AACkC,MAAA;AAEc,MAAA;AAC9D,IAAA;AACA,IAAA;AACqD,MAAA;AACvD,EAAA;AACF;AAMU;AACU,EAAA;AACX,IAAA;AACyC,MAAA;AACzC,IAAA;AACA,IAAA;AACA,IAAA;AACA,IAAA;AACA,IAAA;AAC2E,MAAA;AAC3E,IAAA;AAC2D,MAAA;AAC3D,IAAA;AAC4D,MAAA;AACjE,IAAA;AACqD,MAAA;AACvD,EAAA;AACF;AASkB;AHTkE;AACA;AEzExE;AAMN;AACqB,EAAA;AAEuB,IAAA;AAE9C,EAAA;AACO,EAAA;AACT;AAMQ;AAC8B,EAAA;AAE3B,EAAA;AACY,IAAA;AAEf,MAAA;AACqB,QAAA;AACJ,QAAA;AACf,QAAA;AACA,QAAA;AAE4C,MAAA;AACpD,IAAA;AACO,IAAA;AACT,EAAA;AACF;AAGwD;AF0D4B;AACA;AI5GhE;AAcI;AACiB,EAAA;AACvB,IAAA;AACL,MAAA;AACuD,QAAA;AACvD,MAAA;AAC6C,QAAA;AAC7C,MAAA;AAC2C,QAAA;AAC3C,MAAA;AACwD,QAAA;AAC7D,MAAA;AACS,QAAA;AACX,IAAA;AACF,EAAA;AACAA,EAAAA;AACF;AAMc;AAMgB;AAOtB;AACwC,EAAA;AAGxB,IAAA;AAExB,EAAA;AACO,EAAA;AACT;AAMU;AACyC,EAAA;AACK,IAAA;AACwB,IAAA;AAC9E,EAAA;AACO,EAAA;AACT;AJyEoF;AACA;AC5HlFA;AAC6D,+BAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAAA;AAWgB;AACX,EAAA;AACjC,IAAA;AAChC,EAAA;AACH;AAKiC;AAC6B,EAAA;AACY,EAAA;AACtB,IAAA;AAC5B,IAAA;AACiB,IAAA;AAE9BA,IAAAA;AACuC,kBAAA;AACL,cAAA;AAC3C,EAAA;AAC4E,EAAA;AACvD,IAAA;AACP,MAAA;AAE2D,QAAA;AACrE,MAAA;AACA,MAAA;AACF,IAAA;AAEOA,IAAAA;AACwE,kBAAA;AAAM;AAAA,oBAAA;AAGvF,EAAA;AAKU,EAAA;AAC6D,IAAA;AAGjD,IAAA;AAKa,IAAA;AAGM,IAAA;AAEhCA,IAAAA;AAAA;AAAA;AAGwD,aAAA;AAAA;AAAA;AAAA;AAIpB,eAAA;AAAA;AAE+B,iBAAA;AAAe;AAAA;AAGxB,cAAA;AACK,kBAAA;AAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAAA;AAUvF,EAAA;AAKU,EAAA;AAC6D,IAAA;AAGjD,IAAA;AAKa,IAAA;AAE1BA,IAAAA;AAAA;AAAA;AAGwD,aAAA;AAAA;AAAA;AAAA;AAIpB,eAAA;AAAA;AAEmCA,iBAAAA;AAA8B;AAAA;AAG3C,cAAA;AACK,kBAAA;AAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAAA;AAUvF,EAAA;AAIU,EAAA;AAGyB,IAAA;AAGM,IAAA;AAEhCA,IAAAA;AACkC,aAAA;AAAA;AAEjB,eAAA;AAAA;AAEF,MAAA;AACxB,EAAA;AAIU,EAAA;AAC6D,IAAA;AAGjD,IAAA;AAKa,IAAA;AAE1BA,IAAAA;AAAA;AAAA;AAGwD,aAAA;AAAA;AAAA;AAAA;AAIf,oBAAA;AAAA;AAEiB,cAAA;AACnB,kBAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAAA;AAShD,EAAA;AACsD,EAAA;AAGnB,IAAA;AAE4C,IAAA;AAC/E,EAAA;AAC4E,EAAA;AAC5C,IAAA;AACa,MAAA;AAC3C,IAAA;AAEiB,IAAA;AACI,MAAA;AACP,QAAA;AAE+E,UAAA;AACzF,QAAA;AACA,QAAA;AACF,MAAA;AACOA,MAAAA;AAAA;AAEY,iBAAA;AAAA;AAEsB,eAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,+CAAA;AAS3C,IAAA;AAEmB,IAAA;AACP,MAAA;AAC6C,QAAA;AACvD,MAAA;AACA,MAAA;AACF,IAAA;AACOA,IAAAA;AAAA;AAEY,eAAA;AAAA;AAEsB,aAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,6CAAA;AAS3C,EAAA;AAC0E,EAAA;AACZ,IAAA;AAE3C,IAAA;AACI,MAAA;AACgD,QAAA;AACjE,QAAA;AACF,MAAA;AAEOA,MAAAA;AAAA;AAEY,iBAAA;AAAA;AAAA;AAG6B,sBAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,6CAAA;AASlD,IAAA;AAEmB,IAAA;AACmB,MAAA;AACpC,MAAA;AACF,IAAA;AAEOA,IAAAA;AAAA;AAEY,eAAA;AAAA;AAAA;AAG6B,oBAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2CAAA;AASlD,EAAA;AACmD,EAAA;AAGhB,IAAA;AAEyC,IAAA;AAC5E,EAAA;AACuE,EAAA;AAGpC,IAAA;AACX,IAAA;AAEhB,IAAA;AAC2D,MAAA;AACjE,IAAA;AAE6B,IAAA;AAET,IAAA;AACoB,MAAA;AACxC,IAAA;AAEmB,IAAA;AACqB,MAAA;AACxC,IAAA;AAEmC,IAAA;AACrC,EAAA;AAC0D,EAAA;AAG/B,IAAA;AACoD,IAAA;AAC/E,EAAA;AAEkD,EAAA;AAEjB,EAAA;AACnC;AAKU;ADgD0E;AACA;AKxY9C;AACtC;AACmB;AACjB;AACA;AAEK;AA0DH;AACU,EAAA;AACkB,EAAA;AAGP,IAAA;AAGA,IAAA;AAChB,MAAA;AACS,MAAA;AACgB,QAAA;AAClB,QAAA;AACG,QAAA;AACqC,QAAA;AACjD,MAAA;AAC6C,MAAA;AAChC,QAAA;AAEqB,QAAA;AACjB,UAAA;AACA,UAAA;AACkB,UAAA;AAC+B,YAAA;AAC7D,UAAA;AACY,UAAA;AACH,YAAA;AAC8B,6CAAA;AACvC,UAAA;AACD,QAAA;AAE4D,QAAA;AAChE,MAAA;AACD,MAAA;AACD,IAAA;AACH,EAAA;AACF;AAEsC;AACoB,EAAA;AAC1D;AAEiB;ALwUmE;AACA;AACA;AACA;AACA;AACA;AACA","file":"/home/runner/work/Pongo/Pongo/src/packages/pongo/dist/pg.cjs","sourcesContent":[null,"import type { JSONSerializer } from '@event-driven-io/dumbo';\nimport { isSQL, SQL, sqlMigration } from '@event-driven-io/dumbo';\nimport {\n expectedVersionValue,\n type DeleteOneOptions,\n type FindOptions,\n type OptionalUnlessRequiredIdAndVersion,\n type PongoCollectionSQLBuilder,\n type PongoFilter,\n type PongoUpdate,\n type ReplaceOneOptions,\n type UpdateOneOptions,\n type WithId,\n type WithIdAndVersion,\n type WithoutId,\n} from '../../../../core';\nimport { constructFilterQuery } from './filter';\nimport { buildUpdateQuery } from './update';\n\nconst createCollection = (collectionName: string): SQL =>\n SQL`\n CREATE TABLE IF NOT EXISTS ${SQL.identifier(collectionName)} (\n _id TEXT PRIMARY KEY, \n data JSONB NOT NULL, \n metadata JSONB NOT NULL DEFAULT '{}',\n _version BIGINT NOT NULL DEFAULT 1,\n _partition TEXT NOT NULL DEFAULT 'png_global',\n _archived BOOLEAN NOT NULL DEFAULT FALSE,\n _created TIMESTAMPTZ NOT NULL DEFAULT now(),\n _updated TIMESTAMPTZ NOT NULL DEFAULT now()\n )`;\n\nexport const pongoCollectionPostgreSQLMigrations = (collectionName: string) => [\n sqlMigration(`pongoCollection:${collectionName}:001:createtable`, [\n createCollection(collectionName),\n ]),\n];\n\nexport const postgresSQLBuilder = (\n collectionName: string,\n serializer: JSONSerializer,\n): PongoCollectionSQLBuilder => ({\n createCollection: (): SQL => createCollection(collectionName),\n insertOne: <T>(document: OptionalUnlessRequiredIdAndVersion<T>): SQL => {\n const serialized = serializer.serialize(document);\n const id = document._id;\n const version = document._version ?? 1n;\n\n return SQL`\n INSERT INTO ${SQL.identifier(collectionName)} (_id, data, _version) \n VALUES (${id}, ${serialized}, ${version}) ON CONFLICT(_id) DO NOTHING;`;\n },\n insertMany: <T>(documents: OptionalUnlessRequiredIdAndVersion<T>[]): SQL => {\n const values = SQL.merge(\n documents.map(\n (doc) =>\n SQL`(${doc._id}, ${serializer.serialize(doc)}, ${doc._version ?? 1n})`,\n ),\n ',',\n );\n\n return SQL`\n INSERT INTO ${SQL.identifier(collectionName)} (_id, data, _version) VALUES ${values}\n ON CONFLICT(_id) DO NOTHING\n RETURNING _id;`;\n },\n updateOne: <T>(\n filter: PongoFilter<T> | SQL,\n update: PongoUpdate<T> | SQL,\n options?: UpdateOneOptions,\n ): SQL => {\n const expectedVersion = expectedVersionValue(options?.expectedVersion);\n const expectedVersionUpdate =\n expectedVersion != null\n ? SQL`AND ${SQL.identifier(collectionName)}._version = ${expectedVersion}`\n : SQL``;\n\n const filterQuery = isSQL(filter)\n ? filter\n : constructFilterQuery(filter, serializer);\n const updateQuery = isSQL(update)\n ? update\n : buildUpdateQuery(update, serializer);\n\n return SQL`\n WITH existing AS (\n SELECT _id, _version as current_version\n FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}\n LIMIT 1\n ),\n updated AS (\n UPDATE ${SQL.identifier(collectionName)} \n SET \n data = ${updateQuery} || jsonb_build_object('_id', ${SQL.identifier(collectionName)}._id) || jsonb_build_object('_version', (_version + 1)::text),\n _version = _version + 1\n FROM existing \n WHERE ${SQL.identifier(collectionName)}._id = existing._id ${expectedVersionUpdate}\n RETURNING ${SQL.identifier(collectionName)}._id, ${SQL.identifier(collectionName)}._version\n )\n SELECT \n existing._id,\n COALESCE(updated._version, existing.current_version) AS version,\n COUNT(existing._id) over() AS matched,\n COUNT(updated._id) over() AS modified\n FROM existing\n LEFT JOIN updated \n ON existing._id = updated._id;`;\n },\n replaceOne: <T>(\n filter: PongoFilter<T> | SQL,\n document: WithoutId<T>,\n options?: ReplaceOneOptions,\n ): SQL => {\n const expectedVersion = expectedVersionValue(options?.expectedVersion);\n const expectedVersionUpdate =\n expectedVersion != null\n ? SQL`AND ${SQL.identifier(collectionName)}._version = ${expectedVersion}`\n : SQL``;\n\n const filterQuery = isSQL(filter)\n ? filter\n : constructFilterQuery(filter, serializer);\n\n return SQL`\n WITH existing AS (\n SELECT _id, _version as current_version\n FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}\n LIMIT 1\n ),\n updated AS (\n UPDATE ${SQL.identifier(collectionName)} \n SET \n data = ${serializer.serialize(document)} || jsonb_build_object('_id', ${SQL.identifier(collectionName)}._id) || jsonb_build_object('_version', (_version + 1)::text),\n _version = _version + 1\n FROM existing \n WHERE ${SQL.identifier(collectionName)}._id = existing._id ${expectedVersionUpdate}\n RETURNING ${SQL.identifier(collectionName)}._id, ${SQL.identifier(collectionName)}._version\n )\n SELECT \n existing._id,\n COALESCE(updated._version, existing.current_version) AS version,\n COUNT(existing._id) over() AS matched,\n COUNT(updated._id) over() AS modified\n FROM existing\n LEFT JOIN updated \n ON existing._id = updated._id;`;\n },\n updateMany: <T>(\n filter: PongoFilter<T> | SQL,\n update: PongoUpdate<T> | SQL,\n ): SQL => {\n const filterQuery = isSQL(filter)\n ? filter\n : constructFilterQuery(filter, serializer);\n const updateQuery = isSQL(update)\n ? update\n : buildUpdateQuery(update, serializer);\n\n return SQL`\n UPDATE ${SQL.identifier(collectionName)} \n SET \n data = ${updateQuery} || jsonb_build_object('_version', (_version + 1)::text),\n _version = _version + 1\n ${where(filterQuery)};`;\n },\n deleteOne: <T>(\n filter: PongoFilter<T> | SQL,\n options?: DeleteOneOptions,\n ): SQL => {\n const expectedVersion = expectedVersionValue(options?.expectedVersion);\n const expectedVersionUpdate =\n expectedVersion != null\n ? SQL`AND ${SQL.identifier(collectionName)}._version = ${expectedVersion}`\n : SQL``;\n\n const filterQuery = isSQL(filter)\n ? filter\n : constructFilterQuery(filter, serializer);\n\n return SQL`\n WITH existing AS (\n SELECT _id\n FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}\n LIMIT 1\n ),\n deleted AS (\n DELETE FROM ${SQL.identifier(collectionName)}\n USING existing\n WHERE ${SQL.identifier(collectionName)}._id = existing._id ${expectedVersionUpdate}\n RETURNING ${SQL.identifier(collectionName)}._id\n )\n SELECT \n existing._id,\n COUNT(existing._id) over() AS matched,\n COUNT(deleted._id) over() AS deleted\n FROM existing\n LEFT JOIN deleted \n ON existing._id = deleted._id;`;\n },\n deleteMany: <T>(filter: PongoFilter<T> | SQL): SQL => {\n const filterQuery = isSQL(filter)\n ? filter\n : constructFilterQuery(filter, serializer);\n\n return SQL`DELETE FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}`;\n },\n replaceMany: <T>(documents: Array<WithIdAndVersion<T> | WithId<T>>): SQL => {\n const hasVersions = documents.some(\n (d) => '_version' in d && d._version !== undefined,\n );\n\n if (hasVersions) {\n const values = SQL.merge(\n documents.map(\n (d) =>\n SQL`(${d._id}::text, ${serializer.serialize(d)}::jsonb, ${(d as WithIdAndVersion<T>)._version ?? 0n}::bigint)`,\n ),\n ',',\n );\n return SQL`\n WITH replacements(_id, data, expected_version) AS (\n VALUES ${values}\n )\n UPDATE ${SQL.identifier(collectionName)} t\n SET\n data = r.data\n || jsonb_build_object('_id', t._id)\n || jsonb_build_object('_version', (t._version + 1)::text),\n _version = t._version + 1\n FROM replacements r\n WHERE t._id = r._id AND t._version = r.expected_version\n RETURNING t._id, t._version AS version;`;\n }\n\n const values = SQL.merge(\n documents.map(\n (d) => SQL`(${d._id}::text, ${serializer.serialize(d)}::jsonb)`,\n ),\n ',',\n );\n return SQL`\n WITH replacements(_id, data) AS (\n VALUES ${values}\n )\n UPDATE ${SQL.identifier(collectionName)} t\n SET\n data = r.data\n || jsonb_build_object('_id', t._id)\n || jsonb_build_object('_version', (t._version + 1)::text),\n _version = t._version + 1\n FROM replacements r\n WHERE t._id = r._id\n RETURNING t._id, t._version AS version;`;\n },\n deleteManyByIds: (ids: Array<{ _id: string; _version?: bigint }>): SQL => {\n const hasVersions = ids.some((d) => d._version !== undefined);\n\n if (hasVersions) {\n const values = SQL.merge(\n ids.map((d) => SQL`(${d._id}::text, ${d._version ?? 0n}::bigint)`),\n ',',\n );\n\n return SQL`\n WITH targets(_id, expected_version) AS (\n VALUES ${values}\n ),\n deleted AS (\n DELETE FROM ${SQL.identifier(collectionName)} t\n USING targets r\n WHERE t._id = r._id AND t._version = r.expected_version\n RETURNING t._id\n )\n SELECT r._id,\n CASE WHEN d._id IS NOT NULL THEN 1 ELSE 0 END as deleted\n FROM targets r\n LEFT JOIN deleted d ON r._id = d._id;`;\n }\n\n const values = SQL.merge(\n ids.map((d) => SQL`(${d._id}::text)`),\n ',',\n );\n\n return SQL`\n WITH targets(_id) AS (\n VALUES ${values}\n ),\n deleted AS (\n DELETE FROM ${SQL.identifier(collectionName)} t\n USING targets r\n WHERE t._id = r._id\n RETURNING t._id\n )\n SELECT r._id,\n CASE WHEN d._id IS NOT NULL THEN 1 ELSE 0 END as deleted\n FROM targets r\n LEFT JOIN deleted d ON r._id = d._id;`;\n },\n findOne: <T>(filter: PongoFilter<T> | SQL): SQL => {\n const filterQuery = isSQL(filter)\n ? filter\n : constructFilterQuery(filter, serializer);\n\n return SQL`SELECT data, _version FROM ${SQL.identifier(collectionName)} ${where(filterQuery)} LIMIT 1;`;\n },\n find: <T>(filter: PongoFilter<T> | SQL, options?: FindOptions): SQL => {\n const filterQuery = isSQL(filter)\n ? filter\n : constructFilterQuery(filter, serializer);\n const query: SQL[] = [];\n\n query.push(\n SQL`SELECT data, _version FROM ${SQL.identifier(collectionName)}`,\n );\n\n query.push(where(filterQuery));\n\n if (options?.limit) {\n query.push(SQL`LIMIT ${options.limit}`);\n }\n\n if (options?.skip) {\n query.push(SQL`OFFSET ${options.skip}`);\n }\n\n return SQL.merge([...query, SQL`;`]);\n },\n countDocuments: <T>(filter: PongoFilter<T> | SQL): SQL => {\n const filterQuery = SQL.check.isSQL(filter)\n ? filter\n : constructFilterQuery(filter, serializer);\n return SQL`SELECT COUNT(1) as count FROM ${SQL.identifier(collectionName)} ${where(filterQuery)};`;\n },\n rename: (newName: string): SQL =>\n SQL`ALTER TABLE ${SQL.identifier(collectionName)} RENAME TO ${SQL.identifier(newName)};`,\n drop: (targetName: string = collectionName): SQL =>\n SQL`DROP TABLE IF EXISTS ${SQL.identifier(targetName)}`,\n});\n\nconst where = (filterQuery: SQL): SQL =>\n SQL.check.isEmpty(filterQuery)\n ? SQL.EMPTY\n : SQL.merge([SQL`WHERE `, filterQuery]);\n","import type { JSONSerializer } from '@event-driven-io/dumbo';\nimport { SQL } from '@event-driven-io/dumbo';\nimport {\n hasOperators,\n objectEntries,\n QueryOperators,\n type PongoFilter,\n} from '../../../../../core';\nimport { handleOperator } from './queryOperators';\n\nexport * from './queryOperators';\n\nconst AND = 'AND';\n\nexport const constructFilterQuery = <T>(\n filter: PongoFilter<T>,\n serializer: JSONSerializer,\n): SQL =>\n SQL.merge(\n Object.entries(filter).map(([key, value]) =>\n isRecord(value)\n ? constructComplexFilterQuery(key, value, serializer)\n : handleOperator(key, '$eq', value, serializer),\n ),\n ` ${AND} `,\n );\n\nconst constructComplexFilterQuery = (\n key: string,\n value: Record<string, unknown>,\n serializer: JSONSerializer,\n): SQL => {\n const isEquality = !hasOperators(value);\n\n return SQL.merge(\n objectEntries(value).map(([nestedKey, val]) =>\n isEquality\n ? handleOperator(\n `${key}.${nestedKey}`,\n QueryOperators.$eq,\n val,\n serializer,\n )\n : handleOperator(key, nestedKey, val, serializer),\n ),\n ` ${AND} `,\n );\n};\n\nconst isRecord = (value: unknown): value is Record<string, unknown> =>\n value !== null && typeof value === 'object' && !Array.isArray(value);\n","import type { JSONSerializer } from '@event-driven-io/dumbo';\nimport { SQL } from '@event-driven-io/dumbo';\nimport { objectEntries, OperatorMap } from '../../../../../core';\n\nexport const handleOperator = (\n path: string,\n operator: string,\n value: unknown,\n serializer: JSONSerializer,\n): SQL => {\n if (path === '_id' || path === '_version') {\n return handleMetadataOperator(path, operator, value);\n }\n\n switch (operator) {\n case '$eq': {\n const nestedPath = serializer.serialize(buildNestedObject(path, value));\n const serializedValue = serializer.serialize(value);\n\n return SQL`(data @> ${nestedPath}::jsonb OR jsonb_path_exists(data, '$.${SQL.plain(path)}[*] ? (@ == ${SQL.plain(serializedValue)})'))`;\n }\n case '$gt':\n case '$gte':\n case '$lt':\n case '$lte':\n case '$ne': {\n const jsonPath = SQL.plain(path.split('.').join(','));\n\n return SQL`data ->> '${jsonPath}' ${SQL.plain(OperatorMap[operator])} ${value}`;\n }\n case '$in': {\n const jsonPath = `{${path.split('.').join(',')}}`;\n\n return SQL`data #>> ${jsonPath} = ANY (${value as unknown[]})`;\n }\n case '$nin': {\n const jsonPath = `{${path.split('.').join(',')}}`;\n\n return SQL`data #>> ${jsonPath} != ALL (${value as unknown[]})`;\n }\n case '$elemMatch': {\n const subQuery = objectEntries(value as Record<string, unknown>)\n .map(\n ([subKey, subValue]) =>\n `@.\"${subKey}\" == ${serializer.serialize(subValue)}`,\n )\n .join(' && ');\n return SQL`jsonb_path_exists(data, '$.${SQL.plain(path)}[*] ? (${SQL.plain(subQuery)})')`;\n }\n case '$all': {\n const nestedPath = serializer.serialize(buildNestedObject(path, value));\n return SQL`data @> ${nestedPath}::jsonb`;\n }\n case '$size': {\n const jsonPath = `{${path.split('.').join(',')}}`;\n\n return SQL`jsonb_array_length(data #> ${jsonPath}) = ${value}`;\n }\n default:\n throw new Error(`Unsupported operator: ${operator}`);\n }\n};\n\nconst handleMetadataOperator = (\n fieldName: string,\n operator: string,\n value: unknown,\n): SQL => {\n switch (operator) {\n case '$eq':\n return SQL`${SQL.plain(fieldName)} = ${value}`;\n case '$gt':\n case '$gte':\n case '$lt':\n case '$lte':\n case '$ne':\n return SQL`${SQL.plain(fieldName)} ${SQL.plain(OperatorMap[operator])} ${value}`;\n case '$in':\n return SQL`${SQL.plain(fieldName)} = ANY (${value as unknown[]})`;\n case '$nin':\n return SQL`${SQL.plain(fieldName)} != ALL (${value as unknown[]})`;\n default:\n throw new Error(`Unsupported operator: ${operator}`);\n }\n};\n\nconst buildNestedObject = (\n path: string,\n value: unknown,\n): Record<string, unknown> =>\n path\n .split('.')\n .reverse()\n .reduce((acc, key) => ({ [key]: acc }), value as Record<string, unknown>);\n","import type { JSONSerializer } from '@event-driven-io/dumbo';\nimport { SQL } from '@event-driven-io/dumbo';\nimport {\n objectEntries,\n type $inc,\n type $push,\n type $set,\n type $unset,\n type PongoUpdate,\n} from '../../../../../core';\n\nexport const buildUpdateQuery = <T>(\n update: PongoUpdate<T>,\n serializer: JSONSerializer,\n): SQL =>\n objectEntries(update).reduce(\n (currentUpdateQuery, [op, value]) => {\n switch (op) {\n case '$set':\n return buildSetQuery(value, currentUpdateQuery, serializer);\n case '$unset':\n return buildUnsetQuery(value, currentUpdateQuery);\n case '$inc':\n return buildIncQuery(value, currentUpdateQuery);\n case '$push':\n return buildPushQuery(value, currentUpdateQuery, serializer);\n default:\n return currentUpdateQuery;\n }\n },\n SQL`data`,\n );\n\nexport const buildSetQuery = <T>(\n set: $set<T>,\n currentUpdateQuery: SQL,\n serializer: JSONSerializer,\n): SQL => SQL`${currentUpdateQuery} || ${serializer.serialize(set)}::jsonb`;\n\nexport const buildUnsetQuery = <T>(\n unset: $unset<T>,\n currentUpdateQuery: SQL,\n): SQL =>\n SQL`${currentUpdateQuery} - ${Object.keys(unset)\n .map((k) => `{${k}}`)\n .join(', ')}`;\n\nexport const buildIncQuery = <T>(\n inc: $inc<T>,\n currentUpdateQuery: SQL,\n): SQL => {\n for (const [key, value] of Object.entries(inc)) {\n currentUpdateQuery =\n typeof value === 'bigint'\n ? SQL`jsonb_set(${currentUpdateQuery}, '{${SQL.plain(key)}}', to_jsonb((COALESCE((data->>'${SQL.plain(key)}')::BIGINT, 0) + ${value})::TEXT), true)`\n : SQL`jsonb_set(${currentUpdateQuery}, '{${SQL.plain(key)}}', to_jsonb(COALESCE((data->>'${SQL.plain(key)}')::NUMERIC, 0) + ${value}), true)`;\n }\n return currentUpdateQuery;\n};\n\nexport const buildPushQuery = <T>(\n push: $push<T>,\n currentUpdateQuery: SQL,\n serializer: JSONSerializer,\n): SQL => {\n for (const [key, value] of Object.entries(push)) {\n const serializedValue = serializer.serialize([value]);\n currentUpdateQuery = SQL`jsonb_set(${currentUpdateQuery}, '{${SQL.plain(key)}}', (coalesce(data->'${SQL.plain(key)}', '[]'::jsonb) || ${serializedValue}::jsonb), true)`;\n }\n return currentUpdateQuery;\n};\n","import { dumbo, JSONSerializer } from '@event-driven-io/dumbo';\nimport {\n pgDumboDriver as dumboDriver,\n PgDriverType,\n postgreSQLMetadata,\n type PgConnection,\n} from '@event-driven-io/dumbo/pg';\nimport type pg from 'pg';\nimport {\n PongoCollectionSchemaComponent,\n PongoDatabase,\n PongoDatabaseSchemaComponent,\n pongoDriverRegistry,\n pongoSchema,\n type PongoDb,\n type PongoDriver,\n type PongoDriverOptions,\n} from '../../../core';\nimport {\n pongoCollectionPostgreSQLMigrations,\n postgresSQLBuilder,\n} from '../core';\n\nexport type PgPongoClientOptions =\n | PooledPongoClientOptions\n | NotPooledPongoOptions;\n\nexport type PooledPongoClientOptions =\n | {\n pool: pg.Pool;\n }\n | {\n pooled: true;\n }\n | {\n pool: pg.Pool;\n pooled: true;\n }\n | object;\n\nexport type NotPooledPongoOptions =\n | {\n client: pg.Client;\n }\n | {\n pooled: false;\n }\n | {\n client: pg.Client;\n pooled: false;\n }\n | {\n connection: PgConnection;\n pooled?: false;\n };\n\ntype PgDatabaseDriverOptions = PongoDriverOptions<PgPongoClientOptions> & {\n databaseName?: string | undefined;\n connectionString: string;\n};\n\nconst pgPongoDriver: PongoDriver<\n PongoDb<PgDriverType>,\n PgDatabaseDriverOptions\n> = {\n driverType: PgDriverType,\n databaseFactory: (options) => {\n const databaseName =\n options.databaseName ??\n postgreSQLMetadata.parseDatabaseName(options.connectionString) ??\n postgreSQLMetadata.defaultDatabaseName;\n\n return PongoDatabase({\n ...options,\n pool: dumbo({\n connectionString: options.connectionString,\n driver: dumboDriver,\n ...options.connectionOptions,\n serialization: { serializer: options.serializer },\n }),\n schemaComponent: PongoDatabaseSchemaComponent({\n driverType: PgDriverType,\n collectionFactory: (schema) =>\n PongoCollectionSchemaComponent({\n driverType: PgDriverType,\n definition: schema,\n migrationsOrSchemaComponents: {\n migrations: pongoCollectionPostgreSQLMigrations(schema.name),\n },\n sqlBuilder: postgresSQLBuilder(\n schema.name,\n options.serialization?.serializer ?? JSONSerializer,\n ),\n }),\n definition:\n options.schema?.definition ?? pongoSchema.db(databaseName, {}),\n }),\n databaseName,\n });\n },\n};\n\nexport const usePgPongoDriver = () => {\n pongoDriverRegistry.register(PgDriverType, pgPongoDriver);\n};\n\nusePgPongoDriver();\n\nexport { pgPongoDriver as pgDriver, pgPongoDriver as pongoDriver };\n"]}
|
package/dist/pg.d.cts
CHANGED
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
import * as _event_driven_io_dumbo from '@event-driven-io/dumbo';
|
|
2
2
|
import { JSONSerializer } from '@event-driven-io/dumbo';
|
|
3
|
-
import {
|
|
3
|
+
import { aj as PongoCollectionSQLBuilder, P as PongoDriver, a as PongoDb, b as PongoDriverOptions } from './pongoTransactionCache-CRuPGag3.cjs';
|
|
4
4
|
import { PgConnection, PgDriverType } from '@event-driven-io/dumbo/pg';
|
|
5
5
|
import pg from 'pg';
|
|
6
|
+
import 'lru-cache';
|
|
6
7
|
|
|
7
8
|
declare const pongoCollectionPostgreSQLMigrations: (collectionName: string) => _event_driven_io_dumbo.SQLMigration[];
|
|
8
9
|
declare const postgresSQLBuilder: (collectionName: string, serializer: JSONSerializer) => PongoCollectionSQLBuilder;
|
package/dist/pg.d.ts
CHANGED
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
import * as _event_driven_io_dumbo from '@event-driven-io/dumbo';
|
|
2
2
|
import { JSONSerializer } from '@event-driven-io/dumbo';
|
|
3
|
-
import {
|
|
3
|
+
import { aj as PongoCollectionSQLBuilder, P as PongoDriver, a as PongoDb, b as PongoDriverOptions } from './pongoTransactionCache-CRuPGag3.js';
|
|
4
4
|
import { PgConnection, PgDriverType } from '@event-driven-io/dumbo/pg';
|
|
5
5
|
import pg from 'pg';
|
|
6
|
+
import 'lru-cache';
|
|
6
7
|
|
|
7
8
|
declare const pongoCollectionPostgreSQLMigrations: (collectionName: string) => _event_driven_io_dumbo.SQLMigration[];
|
|
8
9
|
declare const postgresSQLBuilder: (collectionName: string, serializer: JSONSerializer) => PongoCollectionSQLBuilder;
|
package/dist/pg.js
CHANGED
|
@@ -9,7 +9,7 @@ import {
|
|
|
9
9
|
objectEntries,
|
|
10
10
|
pongoDriverRegistry,
|
|
11
11
|
pongoSchema
|
|
12
|
-
} from "./chunk-
|
|
12
|
+
} from "./chunk-NWYI26VT.js";
|
|
13
13
|
|
|
14
14
|
// src/storage/postgresql/core/sqlBuilder/index.ts
|
|
15
15
|
import { isSQL, SQL as SQL4, sqlMigration } from "@event-driven-io/dumbo";
|
|
@@ -276,6 +276,92 @@ var postgresSQLBuilder = (collectionName, serializer) => ({
|
|
|
276
276
|
const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter, serializer);
|
|
277
277
|
return SQL4`DELETE FROM ${SQL4.identifier(collectionName)} ${where(filterQuery)}`;
|
|
278
278
|
},
|
|
279
|
+
replaceMany: (documents) => {
|
|
280
|
+
const hasVersions = documents.some(
|
|
281
|
+
(d) => "_version" in d && d._version !== void 0
|
|
282
|
+
);
|
|
283
|
+
if (hasVersions) {
|
|
284
|
+
const values2 = SQL4.merge(
|
|
285
|
+
documents.map(
|
|
286
|
+
(d) => SQL4`(${d._id}::text, ${serializer.serialize(d)}::jsonb, ${d._version ?? 0n}::bigint)`
|
|
287
|
+
),
|
|
288
|
+
","
|
|
289
|
+
);
|
|
290
|
+
return SQL4`
|
|
291
|
+
WITH replacements(_id, data, expected_version) AS (
|
|
292
|
+
VALUES ${values2}
|
|
293
|
+
)
|
|
294
|
+
UPDATE ${SQL4.identifier(collectionName)} t
|
|
295
|
+
SET
|
|
296
|
+
data = r.data
|
|
297
|
+
|| jsonb_build_object('_id', t._id)
|
|
298
|
+
|| jsonb_build_object('_version', (t._version + 1)::text),
|
|
299
|
+
_version = t._version + 1
|
|
300
|
+
FROM replacements r
|
|
301
|
+
WHERE t._id = r._id AND t._version = r.expected_version
|
|
302
|
+
RETURNING t._id, t._version AS version;`;
|
|
303
|
+
}
|
|
304
|
+
const values = SQL4.merge(
|
|
305
|
+
documents.map(
|
|
306
|
+
(d) => SQL4`(${d._id}::text, ${serializer.serialize(d)}::jsonb)`
|
|
307
|
+
),
|
|
308
|
+
","
|
|
309
|
+
);
|
|
310
|
+
return SQL4`
|
|
311
|
+
WITH replacements(_id, data) AS (
|
|
312
|
+
VALUES ${values}
|
|
313
|
+
)
|
|
314
|
+
UPDATE ${SQL4.identifier(collectionName)} t
|
|
315
|
+
SET
|
|
316
|
+
data = r.data
|
|
317
|
+
|| jsonb_build_object('_id', t._id)
|
|
318
|
+
|| jsonb_build_object('_version', (t._version + 1)::text),
|
|
319
|
+
_version = t._version + 1
|
|
320
|
+
FROM replacements r
|
|
321
|
+
WHERE t._id = r._id
|
|
322
|
+
RETURNING t._id, t._version AS version;`;
|
|
323
|
+
},
|
|
324
|
+
deleteManyByIds: (ids) => {
|
|
325
|
+
const hasVersions = ids.some((d) => d._version !== void 0);
|
|
326
|
+
if (hasVersions) {
|
|
327
|
+
const values2 = SQL4.merge(
|
|
328
|
+
ids.map((d) => SQL4`(${d._id}::text, ${d._version ?? 0n}::bigint)`),
|
|
329
|
+
","
|
|
330
|
+
);
|
|
331
|
+
return SQL4`
|
|
332
|
+
WITH targets(_id, expected_version) AS (
|
|
333
|
+
VALUES ${values2}
|
|
334
|
+
),
|
|
335
|
+
deleted AS (
|
|
336
|
+
DELETE FROM ${SQL4.identifier(collectionName)} t
|
|
337
|
+
USING targets r
|
|
338
|
+
WHERE t._id = r._id AND t._version = r.expected_version
|
|
339
|
+
RETURNING t._id
|
|
340
|
+
)
|
|
341
|
+
SELECT r._id,
|
|
342
|
+
CASE WHEN d._id IS NOT NULL THEN 1 ELSE 0 END as deleted
|
|
343
|
+
FROM targets r
|
|
344
|
+
LEFT JOIN deleted d ON r._id = d._id;`;
|
|
345
|
+
}
|
|
346
|
+
const values = SQL4.merge(
|
|
347
|
+
ids.map((d) => SQL4`(${d._id}::text)`),
|
|
348
|
+
","
|
|
349
|
+
);
|
|
350
|
+
return SQL4`
|
|
351
|
+
WITH targets(_id) AS (
|
|
352
|
+
VALUES ${values}
|
|
353
|
+
),
|
|
354
|
+
deleted AS (
|
|
355
|
+
DELETE FROM ${SQL4.identifier(collectionName)} t
|
|
356
|
+
USING targets r
|
|
357
|
+
WHERE t._id = r._id
|
|
358
|
+
RETURNING t._id
|
|
359
|
+
)
|
|
360
|
+
SELECT r._id,
|
|
361
|
+
CASE WHEN d._id IS NOT NULL THEN 1 ELSE 0 END as deleted
|
|
362
|
+
FROM targets r
|
|
363
|
+
LEFT JOIN deleted d ON r._id = d._id;`;
|
|
364
|
+
},
|
|
279
365
|
findOne: (filter) => {
|
|
280
366
|
const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter, serializer);
|
|
281
367
|
return SQL4`SELECT data, _version FROM ${SQL4.identifier(collectionName)} ${where(filterQuery)} LIMIT 1;`;
|