@event-driven-io/pongo 0.16.3 → 0.16.4-alpha.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunk-HZVM5GBH.cjs +1160 -0
- package/dist/chunk-HZVM5GBH.cjs.map +1 -0
- package/dist/chunk-IXL27BW5.js +1160 -0
- package/dist/{chunk-CYDDN3CZ.js.map → chunk-IXL27BW5.js.map} +1 -1
- package/dist/cli.cjs +436 -14
- package/dist/cli.cjs.map +1 -1
- package/dist/cli.js +436 -14
- package/dist/cli.js.map +1 -1
- package/dist/index.cjs +78 -1
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +78 -1
- package/dist/shim.cjs +332 -1
- package/dist/shim.cjs.map +1 -1
- package/dist/shim.js +332 -1
- package/dist/shim.js.map +1 -1
- package/package.json +3 -3
- package/dist/chunk-CYDDN3CZ.js +0 -76
- package/dist/chunk-PUUNCOTH.cjs +0 -76
- package/dist/chunk-PUUNCOTH.cjs.map +0 -1
|
@@ -0,0 +1,1160 @@
|
|
|
1
|
+
// src/postgres/sqlBuilder/index.ts
|
|
2
|
+
import {
|
|
3
|
+
isSQL,
|
|
4
|
+
JSONSerializer as JSONSerializer4,
|
|
5
|
+
rawSql,
|
|
6
|
+
sql as sql3,
|
|
7
|
+
sqlMigration
|
|
8
|
+
} from "@event-driven-io/dumbo";
|
|
9
|
+
|
|
10
|
+
// src/core/collection/pongoCollection.ts
|
|
11
|
+
import {
|
|
12
|
+
runPostgreSQLMigrations as runPostgreSQLMigrations2,
|
|
13
|
+
schemaComponent as schemaComponent2,
|
|
14
|
+
single
|
|
15
|
+
} from "@event-driven-io/dumbo";
|
|
16
|
+
import { v7 as uuid } from "uuid";
|
|
17
|
+
|
|
18
|
+
// src/postgres/dbClient.ts
|
|
19
|
+
import {
|
|
20
|
+
dumbo,
|
|
21
|
+
getDatabaseNameOrDefault,
|
|
22
|
+
NodePostgresConnectorType,
|
|
23
|
+
runPostgreSQLMigrations,
|
|
24
|
+
schemaComponent
|
|
25
|
+
} from "@event-driven-io/dumbo";
|
|
26
|
+
var isPostgresClientOptions = (options) => options.connectorType === NodePostgresConnectorType;
|
|
27
|
+
var postgresDb = (options) => {
|
|
28
|
+
const { connectionString, dbName } = options;
|
|
29
|
+
const databaseName = dbName ?? getDatabaseNameOrDefault(connectionString);
|
|
30
|
+
const pool = dumbo({
|
|
31
|
+
connectionString,
|
|
32
|
+
...options.connectionOptions
|
|
33
|
+
});
|
|
34
|
+
const collections = /* @__PURE__ */ new Map();
|
|
35
|
+
const command = async (sql4, options2) => (await transactionExecutorOrDefault(db, options2, pool.execute)).command(sql4);
|
|
36
|
+
const query = async (sql4, options2) => (await transactionExecutorOrDefault(db, options2, pool.execute)).query(
|
|
37
|
+
sql4
|
|
38
|
+
);
|
|
39
|
+
const db = {
|
|
40
|
+
connectorType: options.connectorType,
|
|
41
|
+
databaseName,
|
|
42
|
+
connect: () => Promise.resolve(),
|
|
43
|
+
close: () => pool.close(),
|
|
44
|
+
collections: () => [...collections.values()],
|
|
45
|
+
collection: (collectionName) => pongoCollection({
|
|
46
|
+
collectionName,
|
|
47
|
+
db,
|
|
48
|
+
pool,
|
|
49
|
+
sqlBuilder: postgresSQLBuilder(collectionName),
|
|
50
|
+
schema: options.schema ? options.schema : {},
|
|
51
|
+
errors: options.errors ? options.errors : {}
|
|
52
|
+
}),
|
|
53
|
+
transaction: () => pool.transaction(),
|
|
54
|
+
withTransaction: (handle) => pool.withTransaction(handle),
|
|
55
|
+
schema: {
|
|
56
|
+
get component() {
|
|
57
|
+
return schemaComponent("pongoDb", {
|
|
58
|
+
components: [...collections.values()].map((c) => c.schema.component)
|
|
59
|
+
});
|
|
60
|
+
},
|
|
61
|
+
migrate: () => runPostgreSQLMigrations(
|
|
62
|
+
pool,
|
|
63
|
+
[...collections.values()].flatMap(
|
|
64
|
+
(c) => (
|
|
65
|
+
// TODO: This needs to change to support more connectors
|
|
66
|
+
c.schema.component.migrations({ connector: "PostgreSQL:pg" })
|
|
67
|
+
)
|
|
68
|
+
)
|
|
69
|
+
)
|
|
70
|
+
},
|
|
71
|
+
sql: {
|
|
72
|
+
async query(sql4, options2) {
|
|
73
|
+
const result = await query(sql4, options2);
|
|
74
|
+
return result.rows;
|
|
75
|
+
},
|
|
76
|
+
async command(sql4, options2) {
|
|
77
|
+
return command(sql4, options2);
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
};
|
|
81
|
+
const dbsSchema = options?.schema?.definition?.dbs;
|
|
82
|
+
if (dbsSchema) {
|
|
83
|
+
const dbSchema = objectEntries(dbsSchema).map((e) => e[1]).find((db2) => db2.name === dbName || db2.name === databaseName);
|
|
84
|
+
if (dbSchema) return proxyPongoDbWithSchema(db, dbSchema, collections);
|
|
85
|
+
}
|
|
86
|
+
return db;
|
|
87
|
+
};
|
|
88
|
+
var pongoDbSchemaComponent = (collections) => {
|
|
89
|
+
const components = collections.length > 0 && typeof collections[0] === "string" ? collections.map(
|
|
90
|
+
(collectionName) => pongoCollectionSchemaComponent(collectionName)
|
|
91
|
+
) : collections;
|
|
92
|
+
return schemaComponent("pongo:schema_component:db", {
|
|
93
|
+
components
|
|
94
|
+
});
|
|
95
|
+
};
|
|
96
|
+
|
|
97
|
+
// src/core/collection/pongoCollection.ts
|
|
98
|
+
var enlistIntoTransactionIfActive = async (db, options) => {
|
|
99
|
+
const transaction = options?.session?.transaction;
|
|
100
|
+
if (!transaction || !transaction.isActive) return null;
|
|
101
|
+
return await transaction.enlistDatabase(db);
|
|
102
|
+
};
|
|
103
|
+
var transactionExecutorOrDefault = async (db, options, defaultSqlExecutor) => {
|
|
104
|
+
const existingTransaction = await enlistIntoTransactionIfActive(db, options);
|
|
105
|
+
return existingTransaction?.execute ?? defaultSqlExecutor;
|
|
106
|
+
};
|
|
107
|
+
var pongoCollection = ({
|
|
108
|
+
db,
|
|
109
|
+
collectionName,
|
|
110
|
+
pool,
|
|
111
|
+
sqlBuilder: SqlFor,
|
|
112
|
+
schema,
|
|
113
|
+
errors
|
|
114
|
+
}) => {
|
|
115
|
+
const sqlExecutor = pool.execute;
|
|
116
|
+
const command = async (sql4, options) => (await transactionExecutorOrDefault(db, options, sqlExecutor)).command(sql4);
|
|
117
|
+
const query = async (sql4, options) => (await transactionExecutorOrDefault(db, options, sqlExecutor)).query(
|
|
118
|
+
sql4
|
|
119
|
+
);
|
|
120
|
+
let shouldMigrate = schema?.autoMigration !== "None";
|
|
121
|
+
const createCollection2 = (options) => {
|
|
122
|
+
shouldMigrate = false;
|
|
123
|
+
if (options?.session) return command(SqlFor.createCollection(), options);
|
|
124
|
+
else return command(SqlFor.createCollection());
|
|
125
|
+
};
|
|
126
|
+
const ensureCollectionCreated = (options) => {
|
|
127
|
+
if (!shouldMigrate) {
|
|
128
|
+
return Promise.resolve();
|
|
129
|
+
}
|
|
130
|
+
return createCollection2(options);
|
|
131
|
+
};
|
|
132
|
+
const collection = {
|
|
133
|
+
dbName: db.databaseName,
|
|
134
|
+
collectionName,
|
|
135
|
+
createCollection: async (options) => {
|
|
136
|
+
await createCollection2(options);
|
|
137
|
+
},
|
|
138
|
+
insertOne: async (document, options) => {
|
|
139
|
+
await ensureCollectionCreated(options);
|
|
140
|
+
const _id = document._id ?? uuid();
|
|
141
|
+
const _version = document._version ?? 1n;
|
|
142
|
+
const result = await command(
|
|
143
|
+
SqlFor.insertOne({
|
|
144
|
+
...document,
|
|
145
|
+
_id,
|
|
146
|
+
_version
|
|
147
|
+
}),
|
|
148
|
+
options
|
|
149
|
+
);
|
|
150
|
+
const successful = (result.rowCount ?? 0) > 0;
|
|
151
|
+
return operationResult(
|
|
152
|
+
{
|
|
153
|
+
successful,
|
|
154
|
+
insertedId: successful ? _id : null,
|
|
155
|
+
nextExpectedVersion: _version
|
|
156
|
+
},
|
|
157
|
+
{ operationName: "insertOne", collectionName, errors }
|
|
158
|
+
);
|
|
159
|
+
},
|
|
160
|
+
insertMany: async (documents, options) => {
|
|
161
|
+
await ensureCollectionCreated(options);
|
|
162
|
+
const rows = documents.map((doc) => ({
|
|
163
|
+
...doc,
|
|
164
|
+
_id: doc._id ?? uuid(),
|
|
165
|
+
_version: doc._version ?? 1n
|
|
166
|
+
}));
|
|
167
|
+
const result = await command(
|
|
168
|
+
SqlFor.insertMany(rows),
|
|
169
|
+
options
|
|
170
|
+
);
|
|
171
|
+
return operationResult(
|
|
172
|
+
{
|
|
173
|
+
successful: result.rowCount === rows.length,
|
|
174
|
+
insertedCount: result.rowCount ?? 0,
|
|
175
|
+
insertedIds: result.rows.map((d) => d._id)
|
|
176
|
+
},
|
|
177
|
+
{ operationName: "insertMany", collectionName, errors }
|
|
178
|
+
);
|
|
179
|
+
},
|
|
180
|
+
updateOne: async (filter, update, options) => {
|
|
181
|
+
await ensureCollectionCreated(options);
|
|
182
|
+
const result = await command(
|
|
183
|
+
SqlFor.updateOne(filter, update, options),
|
|
184
|
+
options
|
|
185
|
+
);
|
|
186
|
+
return operationResult(
|
|
187
|
+
{
|
|
188
|
+
successful: result.rows.length > 0 && result.rows[0].modified === result.rows[0].matched,
|
|
189
|
+
modifiedCount: Number(result.rows[0]?.modified ?? 0),
|
|
190
|
+
matchedCount: Number(result.rows[0]?.matched ?? 0),
|
|
191
|
+
nextExpectedVersion: result.rows[0]?.version ?? 0n
|
|
192
|
+
},
|
|
193
|
+
{ operationName: "updateOne", collectionName, errors }
|
|
194
|
+
);
|
|
195
|
+
},
|
|
196
|
+
replaceOne: async (filter, document, options) => {
|
|
197
|
+
await ensureCollectionCreated(options);
|
|
198
|
+
const result = await command(
|
|
199
|
+
SqlFor.replaceOne(filter, document, options),
|
|
200
|
+
options
|
|
201
|
+
);
|
|
202
|
+
return operationResult(
|
|
203
|
+
{
|
|
204
|
+
successful: result.rows.length > 0 && result.rows[0].modified > 0,
|
|
205
|
+
modifiedCount: Number(result.rows[0]?.modified ?? 0),
|
|
206
|
+
matchedCount: Number(result.rows[0]?.matched ?? 0),
|
|
207
|
+
nextExpectedVersion: result.rows[0]?.version ?? 0n
|
|
208
|
+
},
|
|
209
|
+
{ operationName: "replaceOne", collectionName, errors }
|
|
210
|
+
);
|
|
211
|
+
},
|
|
212
|
+
updateMany: async (filter, update, options) => {
|
|
213
|
+
await ensureCollectionCreated(options);
|
|
214
|
+
const result = await command(SqlFor.updateMany(filter, update), options);
|
|
215
|
+
return operationResult(
|
|
216
|
+
{
|
|
217
|
+
successful: true,
|
|
218
|
+
modifiedCount: result.rowCount ?? 0,
|
|
219
|
+
matchedCount: result.rowCount ?? 0
|
|
220
|
+
},
|
|
221
|
+
{ operationName: "updateMany", collectionName, errors }
|
|
222
|
+
);
|
|
223
|
+
},
|
|
224
|
+
deleteOne: async (filter, options) => {
|
|
225
|
+
await ensureCollectionCreated(options);
|
|
226
|
+
const result = await command(
|
|
227
|
+
SqlFor.deleteOne(filter ?? {}, options),
|
|
228
|
+
options
|
|
229
|
+
);
|
|
230
|
+
return operationResult(
|
|
231
|
+
{
|
|
232
|
+
successful: result.rows.length > 0 && result.rows[0].deleted > 0,
|
|
233
|
+
deletedCount: Number(result.rows[0]?.deleted ?? 0),
|
|
234
|
+
matchedCount: Number(result.rows[0]?.matched ?? 0)
|
|
235
|
+
},
|
|
236
|
+
{ operationName: "deleteOne", collectionName, errors }
|
|
237
|
+
);
|
|
238
|
+
},
|
|
239
|
+
deleteMany: async (filter, options) => {
|
|
240
|
+
await ensureCollectionCreated(options);
|
|
241
|
+
const result = await command(SqlFor.deleteMany(filter ?? {}), options);
|
|
242
|
+
return operationResult(
|
|
243
|
+
{
|
|
244
|
+
successful: (result.rowCount ?? 0) > 0,
|
|
245
|
+
deletedCount: result.rowCount ?? 0,
|
|
246
|
+
matchedCount: result.rowCount ?? 0
|
|
247
|
+
},
|
|
248
|
+
{ operationName: "deleteMany", collectionName, errors }
|
|
249
|
+
);
|
|
250
|
+
},
|
|
251
|
+
findOne: async (filter, options) => {
|
|
252
|
+
await ensureCollectionCreated(options);
|
|
253
|
+
const result = await query(SqlFor.findOne(filter ?? {}), options);
|
|
254
|
+
return result.rows[0]?.data ?? null;
|
|
255
|
+
},
|
|
256
|
+
findOneAndDelete: async (filter, options) => {
|
|
257
|
+
await ensureCollectionCreated(options);
|
|
258
|
+
const existingDoc = await collection.findOne(filter, options);
|
|
259
|
+
if (existingDoc === null) return null;
|
|
260
|
+
await collection.deleteOne(filter, options);
|
|
261
|
+
return existingDoc;
|
|
262
|
+
},
|
|
263
|
+
findOneAndReplace: async (filter, replacement, options) => {
|
|
264
|
+
await ensureCollectionCreated(options);
|
|
265
|
+
const existingDoc = await collection.findOne(filter, options);
|
|
266
|
+
if (existingDoc === null) return null;
|
|
267
|
+
await collection.replaceOne(filter, replacement, options);
|
|
268
|
+
return existingDoc;
|
|
269
|
+
},
|
|
270
|
+
findOneAndUpdate: async (filter, update, options) => {
|
|
271
|
+
await ensureCollectionCreated(options);
|
|
272
|
+
const existingDoc = await collection.findOne(filter, options);
|
|
273
|
+
if (existingDoc === null) return null;
|
|
274
|
+
await collection.updateOne(filter, update, options);
|
|
275
|
+
return existingDoc;
|
|
276
|
+
},
|
|
277
|
+
handle: async (id, handle, options) => {
|
|
278
|
+
const { expectedVersion: version, ...operationOptions } = options ?? {};
|
|
279
|
+
await ensureCollectionCreated(options);
|
|
280
|
+
const byId = { _id: id };
|
|
281
|
+
const existing = await collection.findOne(
|
|
282
|
+
byId,
|
|
283
|
+
options
|
|
284
|
+
);
|
|
285
|
+
const expectedVersion2 = expectedVersionValue(version);
|
|
286
|
+
if (existing == null && version === "DOCUMENT_EXISTS" || existing == null && expectedVersion2 != null || existing != null && version === "DOCUMENT_DOES_NOT_EXIST" || existing != null && expectedVersion2 !== null && existing._version !== expectedVersion2) {
|
|
287
|
+
return operationResult(
|
|
288
|
+
{
|
|
289
|
+
successful: false,
|
|
290
|
+
document: existing
|
|
291
|
+
},
|
|
292
|
+
{ operationName: "handle", collectionName, errors }
|
|
293
|
+
);
|
|
294
|
+
}
|
|
295
|
+
const result = await handle(
|
|
296
|
+
existing !== null ? { ...existing } : null
|
|
297
|
+
);
|
|
298
|
+
if (deepEquals(existing, result))
|
|
299
|
+
return operationResult(
|
|
300
|
+
{
|
|
301
|
+
successful: true,
|
|
302
|
+
document: existing
|
|
303
|
+
},
|
|
304
|
+
{ operationName: "handle", collectionName, errors }
|
|
305
|
+
);
|
|
306
|
+
if (!existing && result) {
|
|
307
|
+
const newDoc = { ...result, _id: id };
|
|
308
|
+
const insertResult = await collection.insertOne(
|
|
309
|
+
{ ...newDoc, _id: id },
|
|
310
|
+
{
|
|
311
|
+
...operationOptions,
|
|
312
|
+
expectedVersion: "DOCUMENT_DOES_NOT_EXIST"
|
|
313
|
+
}
|
|
314
|
+
);
|
|
315
|
+
return {
|
|
316
|
+
...insertResult,
|
|
317
|
+
document: {
|
|
318
|
+
...newDoc,
|
|
319
|
+
_version: insertResult.nextExpectedVersion
|
|
320
|
+
}
|
|
321
|
+
};
|
|
322
|
+
}
|
|
323
|
+
if (existing && !result) {
|
|
324
|
+
const deleteResult = await collection.deleteOne(byId, {
|
|
325
|
+
...operationOptions,
|
|
326
|
+
expectedVersion: expectedVersion2 ?? "DOCUMENT_EXISTS"
|
|
327
|
+
});
|
|
328
|
+
return { ...deleteResult, document: null };
|
|
329
|
+
}
|
|
330
|
+
if (existing && result) {
|
|
331
|
+
const replaceResult = await collection.replaceOne(byId, result, {
|
|
332
|
+
...operationOptions,
|
|
333
|
+
expectedVersion: expectedVersion2 ?? "DOCUMENT_EXISTS"
|
|
334
|
+
});
|
|
335
|
+
return {
|
|
336
|
+
...replaceResult,
|
|
337
|
+
document: {
|
|
338
|
+
...result,
|
|
339
|
+
_version: replaceResult.nextExpectedVersion
|
|
340
|
+
}
|
|
341
|
+
};
|
|
342
|
+
}
|
|
343
|
+
return operationResult(
|
|
344
|
+
{
|
|
345
|
+
successful: true,
|
|
346
|
+
document: existing
|
|
347
|
+
},
|
|
348
|
+
{ operationName: "handle", collectionName, errors }
|
|
349
|
+
);
|
|
350
|
+
},
|
|
351
|
+
find: async (filter, options) => {
|
|
352
|
+
await ensureCollectionCreated(options);
|
|
353
|
+
const result = await query(SqlFor.find(filter ?? {}));
|
|
354
|
+
return result.rows.map((row) => row.data);
|
|
355
|
+
},
|
|
356
|
+
countDocuments: async (filter, options) => {
|
|
357
|
+
await ensureCollectionCreated(options);
|
|
358
|
+
const { count } = await single(
|
|
359
|
+
query(SqlFor.countDocuments(filter ?? {}))
|
|
360
|
+
);
|
|
361
|
+
return count;
|
|
362
|
+
},
|
|
363
|
+
drop: async (options) => {
|
|
364
|
+
await ensureCollectionCreated(options);
|
|
365
|
+
const result = await command(SqlFor.drop());
|
|
366
|
+
return (result?.rowCount ?? 0) > 0;
|
|
367
|
+
},
|
|
368
|
+
rename: async (newName, options) => {
|
|
369
|
+
await ensureCollectionCreated(options);
|
|
370
|
+
await command(SqlFor.rename(newName));
|
|
371
|
+
collectionName = newName;
|
|
372
|
+
return collection;
|
|
373
|
+
},
|
|
374
|
+
sql: {
|
|
375
|
+
async query(sql4, options) {
|
|
376
|
+
await ensureCollectionCreated(options);
|
|
377
|
+
const result = await query(sql4, options);
|
|
378
|
+
return result.rows;
|
|
379
|
+
},
|
|
380
|
+
async command(sql4, options) {
|
|
381
|
+
await ensureCollectionCreated(options);
|
|
382
|
+
return command(sql4, options);
|
|
383
|
+
}
|
|
384
|
+
},
|
|
385
|
+
schema: {
|
|
386
|
+
get component() {
|
|
387
|
+
return schemaComponent2("pongo:schema_component:collection", {
|
|
388
|
+
migrations: SqlFor.migrations
|
|
389
|
+
});
|
|
390
|
+
},
|
|
391
|
+
migrate: () => runPostgreSQLMigrations2(pool, SqlFor.migrations())
|
|
392
|
+
// TODO: This needs to change to support more connectors
|
|
393
|
+
}
|
|
394
|
+
};
|
|
395
|
+
return collection;
|
|
396
|
+
};
|
|
397
|
+
var pongoCollectionSchemaComponent = (collectionName) => schemaComponent2("pongo:schema_component:collection", {
|
|
398
|
+
migrations: () => pongoCollectionPostgreSQLMigrations(collectionName)
|
|
399
|
+
// TODO: This needs to change to support more connectors
|
|
400
|
+
});
|
|
401
|
+
|
|
402
|
+
// src/core/collection/query.ts
|
|
403
|
+
var QueryOperators = {
|
|
404
|
+
$eq: "$eq",
|
|
405
|
+
$gt: "$gt",
|
|
406
|
+
$gte: "$gte",
|
|
407
|
+
$lt: "$lt",
|
|
408
|
+
$lte: "$lte",
|
|
409
|
+
$ne: "$ne",
|
|
410
|
+
$in: "$in",
|
|
411
|
+
$nin: "$nin",
|
|
412
|
+
$elemMatch: "$elemMatch",
|
|
413
|
+
$all: "$all",
|
|
414
|
+
$size: "$size"
|
|
415
|
+
};
|
|
416
|
+
var OperatorMap = {
|
|
417
|
+
$gt: ">",
|
|
418
|
+
$gte: ">=",
|
|
419
|
+
$lt: "<",
|
|
420
|
+
$lte: "<=",
|
|
421
|
+
$ne: "!="
|
|
422
|
+
};
|
|
423
|
+
var isOperator = (key) => key.startsWith("$");
|
|
424
|
+
var hasOperators = (value) => Object.keys(value).some(isOperator);
|
|
425
|
+
|
|
426
|
+
// src/core/errors/index.ts
|
|
427
|
+
var isNumber = (val) => typeof val === "number" && val === val;
|
|
428
|
+
var isString = (val) => typeof val === "string";
|
|
429
|
+
var PongoError = class _PongoError extends Error {
|
|
430
|
+
errorCode;
|
|
431
|
+
constructor(options) {
|
|
432
|
+
const errorCode = options && typeof options === "object" && "errorCode" in options ? options.errorCode : isNumber(options) ? options : 500;
|
|
433
|
+
const message = options && typeof options === "object" && "message" in options ? options.message : isString(options) ? options : `Error with status code '${errorCode}' ocurred during Pongo processing`;
|
|
434
|
+
super(message);
|
|
435
|
+
this.errorCode = errorCode;
|
|
436
|
+
Object.setPrototypeOf(this, _PongoError.prototype);
|
|
437
|
+
}
|
|
438
|
+
};
|
|
439
|
+
var ConcurrencyError = class _ConcurrencyError extends PongoError {
|
|
440
|
+
constructor(message) {
|
|
441
|
+
super({
|
|
442
|
+
errorCode: 412,
|
|
443
|
+
message: message ?? `Expected document state does not match current one!`
|
|
444
|
+
});
|
|
445
|
+
Object.setPrototypeOf(this, _ConcurrencyError.prototype);
|
|
446
|
+
}
|
|
447
|
+
};
|
|
448
|
+
|
|
449
|
+
// src/core/pongoClient.ts
|
|
450
|
+
import {
|
|
451
|
+
NodePostgresConnectorType as NodePostgresConnectorType2
|
|
452
|
+
} from "@event-driven-io/dumbo";
|
|
453
|
+
import "pg";
|
|
454
|
+
|
|
455
|
+
// src/core/pongoDb.ts
|
|
456
|
+
var getPongoDb = (options) => {
|
|
457
|
+
const { connectorType: type } = options;
|
|
458
|
+
if (!isPostgresClientOptions(options))
|
|
459
|
+
throw new Error(`Unsupported db type: ${type}`);
|
|
460
|
+
return postgresDb(options);
|
|
461
|
+
};
|
|
462
|
+
|
|
463
|
+
// src/core/pongoTransaction.ts
|
|
464
|
+
var pongoTransaction = (options) => {
|
|
465
|
+
let isCommitted = false;
|
|
466
|
+
let isRolledBack = false;
|
|
467
|
+
let databaseName = null;
|
|
468
|
+
let transaction = null;
|
|
469
|
+
return {
|
|
470
|
+
enlistDatabase: async (db) => {
|
|
471
|
+
if (transaction && databaseName !== db.databaseName)
|
|
472
|
+
throw new Error(
|
|
473
|
+
"There's already other database assigned to transaction"
|
|
474
|
+
);
|
|
475
|
+
if (transaction && databaseName === db.databaseName) return transaction;
|
|
476
|
+
databaseName = db.databaseName;
|
|
477
|
+
transaction = db.transaction();
|
|
478
|
+
await transaction.begin();
|
|
479
|
+
return transaction;
|
|
480
|
+
},
|
|
481
|
+
commit: async () => {
|
|
482
|
+
if (!transaction) throw new Error("No database transaction started!");
|
|
483
|
+
if (isCommitted) return;
|
|
484
|
+
if (isRolledBack) throw new Error("Transaction is not active!");
|
|
485
|
+
isCommitted = true;
|
|
486
|
+
await transaction.commit();
|
|
487
|
+
transaction = null;
|
|
488
|
+
},
|
|
489
|
+
rollback: async (error) => {
|
|
490
|
+
if (!transaction) throw new Error("No database transaction started!");
|
|
491
|
+
if (isCommitted) throw new Error("Cannot rollback commited transaction!");
|
|
492
|
+
if (isRolledBack) return;
|
|
493
|
+
isRolledBack = true;
|
|
494
|
+
await transaction.rollback(error);
|
|
495
|
+
transaction = null;
|
|
496
|
+
},
|
|
497
|
+
databaseName,
|
|
498
|
+
isStarting: false,
|
|
499
|
+
isCommitted,
|
|
500
|
+
get isActive() {
|
|
501
|
+
return !isCommitted && !isRolledBack;
|
|
502
|
+
},
|
|
503
|
+
get sqlExecutor() {
|
|
504
|
+
if (transaction === null)
|
|
505
|
+
throw new Error("No database transaction was started");
|
|
506
|
+
return transaction.execute;
|
|
507
|
+
},
|
|
508
|
+
options
|
|
509
|
+
};
|
|
510
|
+
};
|
|
511
|
+
|
|
512
|
+
// src/core/pongoSession.ts
|
|
513
|
+
var isActive = (transaction) => transaction?.isActive === true;
|
|
514
|
+
function assertInActiveTransaction(transaction) {
|
|
515
|
+
if (!isActive(transaction)) throw new Error("No active transaction exists!");
|
|
516
|
+
}
|
|
517
|
+
function assertNotInActiveTransaction(transaction) {
|
|
518
|
+
if (isActive(transaction))
|
|
519
|
+
throw new Error("Active transaction already exists!");
|
|
520
|
+
}
|
|
521
|
+
var pongoSession = (options) => {
|
|
522
|
+
const explicit = options?.explicit === true;
|
|
523
|
+
const defaultTransactionOptions = options?.defaultTransactionOptions ?? {
|
|
524
|
+
get snapshotEnabled() {
|
|
525
|
+
return false;
|
|
526
|
+
}
|
|
527
|
+
};
|
|
528
|
+
let transaction = null;
|
|
529
|
+
let hasEnded = false;
|
|
530
|
+
const startTransaction = (options2) => {
|
|
531
|
+
assertNotInActiveTransaction(transaction);
|
|
532
|
+
transaction = pongoTransaction(options2 ?? defaultTransactionOptions);
|
|
533
|
+
};
|
|
534
|
+
const commitTransaction = async () => {
|
|
535
|
+
assertInActiveTransaction(transaction);
|
|
536
|
+
await transaction.commit();
|
|
537
|
+
};
|
|
538
|
+
const abortTransaction = async () => {
|
|
539
|
+
assertInActiveTransaction(transaction);
|
|
540
|
+
await transaction.rollback();
|
|
541
|
+
};
|
|
542
|
+
const endSession = async () => {
|
|
543
|
+
if (hasEnded) return;
|
|
544
|
+
hasEnded = true;
|
|
545
|
+
if (isActive(transaction)) await transaction.rollback();
|
|
546
|
+
};
|
|
547
|
+
const session = {
|
|
548
|
+
get hasEnded() {
|
|
549
|
+
return hasEnded;
|
|
550
|
+
},
|
|
551
|
+
explicit,
|
|
552
|
+
defaultTransactionOptions: defaultTransactionOptions ?? {
|
|
553
|
+
get snapshotEnabled() {
|
|
554
|
+
return false;
|
|
555
|
+
}
|
|
556
|
+
},
|
|
557
|
+
get transaction() {
|
|
558
|
+
return transaction;
|
|
559
|
+
},
|
|
560
|
+
get snapshotEnabled() {
|
|
561
|
+
return defaultTransactionOptions.snapshotEnabled;
|
|
562
|
+
},
|
|
563
|
+
endSession,
|
|
564
|
+
incrementTransactionNumber: () => {
|
|
565
|
+
},
|
|
566
|
+
inTransaction: () => isActive(transaction),
|
|
567
|
+
startTransaction,
|
|
568
|
+
commitTransaction,
|
|
569
|
+
abortTransaction,
|
|
570
|
+
withTransaction: async (fn, options2) => {
|
|
571
|
+
startTransaction(options2);
|
|
572
|
+
try {
|
|
573
|
+
const result = await fn(session);
|
|
574
|
+
await commitTransaction();
|
|
575
|
+
return result;
|
|
576
|
+
} catch (error) {
|
|
577
|
+
await abortTransaction();
|
|
578
|
+
throw error;
|
|
579
|
+
}
|
|
580
|
+
}
|
|
581
|
+
};
|
|
582
|
+
return session;
|
|
583
|
+
};
|
|
584
|
+
|
|
585
|
+
// src/core/typing/entries.ts
|
|
586
|
+
var objectEntries = (obj) => Object.entries(obj).map(([key, value]) => [key, value]);
|
|
587
|
+
|
|
588
|
+
// src/core/typing/operations.ts
|
|
589
|
+
import {
|
|
590
|
+
JSONSerializer
|
|
591
|
+
} from "@event-driven-io/dumbo";
|
|
592
|
+
import { v7 as uuid2 } from "uuid";
|
|
593
|
+
var ObjectId = (value) => value ?? uuid2();
|
|
594
|
+
var DOCUMENT_EXISTS = "DOCUMENT_EXISTS";
|
|
595
|
+
var DOCUMENT_DOES_NOT_EXIST = "DOCUMENT_DOES_NOT_EXIST";
|
|
596
|
+
var NO_CONCURRENCY_CHECK = "NO_CONCURRENCY_CHECK";
|
|
597
|
+
var isGeneralExpectedDocumentVersion = (version) => version === "DOCUMENT_DOES_NOT_EXIST" || version === "DOCUMENT_EXISTS" || version === "NO_CONCURRENCY_CHECK";
|
|
598
|
+
var expectedVersionValue = (version) => version === void 0 || isGeneralExpectedDocumentVersion(version) ? null : version;
|
|
599
|
+
var expectedVersion = (version) => {
|
|
600
|
+
return version ? BigInt(version) : NO_CONCURRENCY_CHECK;
|
|
601
|
+
};
|
|
602
|
+
var operationResult = (result, options) => {
|
|
603
|
+
const operationResult2 = {
|
|
604
|
+
...result,
|
|
605
|
+
acknowledged: true,
|
|
606
|
+
successful: result.successful,
|
|
607
|
+
assertSuccessful: (errorMessage) => {
|
|
608
|
+
const { successful } = result;
|
|
609
|
+
const { operationName, collectionName } = options;
|
|
610
|
+
if (!successful)
|
|
611
|
+
throw new ConcurrencyError(
|
|
612
|
+
errorMessage ?? `${operationName} on ${collectionName} failed. Expected document state does not match current one! Result: ${JSONSerializer.serialize(result)}!`
|
|
613
|
+
);
|
|
614
|
+
}
|
|
615
|
+
};
|
|
616
|
+
if (options.errors?.throwOnOperationFailures)
|
|
617
|
+
operationResult2.assertSuccessful();
|
|
618
|
+
return operationResult2;
|
|
619
|
+
};
|
|
620
|
+
|
|
621
|
+
// src/core/schema/index.ts
|
|
622
|
+
var pongoCollectionSchema = (name) => ({
|
|
623
|
+
name
|
|
624
|
+
});
|
|
625
|
+
function pongoDbSchema(nameOrCollections, collections) {
|
|
626
|
+
if (collections === void 0) {
|
|
627
|
+
if (typeof nameOrCollections === "string") {
|
|
628
|
+
throw new Error("You need to provide colleciton definition");
|
|
629
|
+
}
|
|
630
|
+
return {
|
|
631
|
+
collections: nameOrCollections
|
|
632
|
+
};
|
|
633
|
+
}
|
|
634
|
+
return nameOrCollections && typeof nameOrCollections === "string" ? {
|
|
635
|
+
name: nameOrCollections,
|
|
636
|
+
collections
|
|
637
|
+
} : { collections };
|
|
638
|
+
}
|
|
639
|
+
var pongoClientSchema = (dbs) => ({
|
|
640
|
+
dbs
|
|
641
|
+
});
|
|
642
|
+
var pongoSchema = {
|
|
643
|
+
client: pongoClientSchema,
|
|
644
|
+
db: pongoDbSchema,
|
|
645
|
+
collection: pongoCollectionSchema
|
|
646
|
+
};
|
|
647
|
+
var proxyPongoDbWithSchema = (pongoDb, dbSchema, collections) => {
|
|
648
|
+
const collectionNames = Object.keys(dbSchema.collections);
|
|
649
|
+
for (const collectionName of collectionNames) {
|
|
650
|
+
collections.set(collectionName, pongoDb.collection(collectionName));
|
|
651
|
+
}
|
|
652
|
+
return new Proxy(
|
|
653
|
+
pongoDb,
|
|
654
|
+
{
|
|
655
|
+
get(target, prop) {
|
|
656
|
+
return collections.get(prop) ?? target[prop];
|
|
657
|
+
}
|
|
658
|
+
}
|
|
659
|
+
);
|
|
660
|
+
};
|
|
661
|
+
var proxyClientWithSchema = (client, schema) => {
|
|
662
|
+
if (!schema) return client;
|
|
663
|
+
const dbNames = Object.keys(schema.dbs);
|
|
664
|
+
return new Proxy(
|
|
665
|
+
client,
|
|
666
|
+
{
|
|
667
|
+
get(target, prop) {
|
|
668
|
+
if (dbNames.includes(prop)) return client.db(schema.dbs[prop]?.name);
|
|
669
|
+
return target[prop];
|
|
670
|
+
}
|
|
671
|
+
}
|
|
672
|
+
);
|
|
673
|
+
};
|
|
674
|
+
var toDbSchemaMetadata = (schema) => ({
|
|
675
|
+
name: schema.name,
|
|
676
|
+
collections: objectEntries(schema.collections).map((c) => ({
|
|
677
|
+
name: c[1].name
|
|
678
|
+
}))
|
|
679
|
+
});
|
|
680
|
+
var toClientSchemaMetadata = (schema) => {
|
|
681
|
+
const databases = objectEntries(schema.dbs).map(
|
|
682
|
+
(e) => toDbSchemaMetadata(e[1])
|
|
683
|
+
);
|
|
684
|
+
return {
|
|
685
|
+
databases,
|
|
686
|
+
database: (name) => databases.find((db) => db.name === name)
|
|
687
|
+
};
|
|
688
|
+
};
|
|
689
|
+
|
|
690
|
+
// src/core/pongoClient.ts
|
|
691
|
+
var pongoClient = (connectionString, options = {}) => {
|
|
692
|
+
const dbClients = /* @__PURE__ */ new Map();
|
|
693
|
+
const dbClient = getPongoDb(
|
|
694
|
+
clientToDbOptions({
|
|
695
|
+
connectionString,
|
|
696
|
+
clientOptions: options
|
|
697
|
+
})
|
|
698
|
+
);
|
|
699
|
+
dbClients.set(dbClient.databaseName, dbClient);
|
|
700
|
+
const pongoClient2 = {
|
|
701
|
+
connect: async () => {
|
|
702
|
+
await dbClient.connect();
|
|
703
|
+
return pongoClient2;
|
|
704
|
+
},
|
|
705
|
+
close: async () => {
|
|
706
|
+
for (const db of dbClients.values()) {
|
|
707
|
+
await db.close();
|
|
708
|
+
}
|
|
709
|
+
},
|
|
710
|
+
db: (dbName) => {
|
|
711
|
+
if (!dbName) return dbClient;
|
|
712
|
+
return dbClients.get(dbName) ?? dbClients.set(
|
|
713
|
+
dbName,
|
|
714
|
+
getPongoDb(
|
|
715
|
+
clientToDbOptions({
|
|
716
|
+
connectionString,
|
|
717
|
+
dbName,
|
|
718
|
+
clientOptions: options
|
|
719
|
+
})
|
|
720
|
+
)
|
|
721
|
+
).get(dbName);
|
|
722
|
+
},
|
|
723
|
+
startSession: pongoSession,
|
|
724
|
+
withSession: async (callback) => {
|
|
725
|
+
const session = pongoSession();
|
|
726
|
+
try {
|
|
727
|
+
return await callback(session);
|
|
728
|
+
} finally {
|
|
729
|
+
await session.endSession();
|
|
730
|
+
}
|
|
731
|
+
}
|
|
732
|
+
};
|
|
733
|
+
return proxyClientWithSchema(pongoClient2, options?.schema?.definition);
|
|
734
|
+
};
|
|
735
|
+
var clientToDbOptions = (options) => {
|
|
736
|
+
const postgreSQLOptions = {
|
|
737
|
+
connectorType: NodePostgresConnectorType2,
|
|
738
|
+
connectionString: options.connectionString,
|
|
739
|
+
dbName: options.dbName,
|
|
740
|
+
...options.clientOptions
|
|
741
|
+
};
|
|
742
|
+
return postgreSQLOptions;
|
|
743
|
+
};
|
|
744
|
+
|
|
745
|
+
// src/core/utils/deepEquals.ts
|
|
746
|
+
var deepEquals = (left, right) => {
|
|
747
|
+
if (isEquatable(left)) {
|
|
748
|
+
return left.equals(right);
|
|
749
|
+
}
|
|
750
|
+
if (Array.isArray(left)) {
|
|
751
|
+
return Array.isArray(right) && left.length === right.length && left.every((val, index) => deepEquals(val, right[index]));
|
|
752
|
+
}
|
|
753
|
+
if (typeof left !== "object" || typeof right !== "object" || left === null || right === null) {
|
|
754
|
+
return left === right;
|
|
755
|
+
}
|
|
756
|
+
if (Array.isArray(right)) return false;
|
|
757
|
+
const keys1 = Object.keys(left);
|
|
758
|
+
const keys2 = Object.keys(right);
|
|
759
|
+
if (keys1.length !== keys2.length || !keys1.every((key) => keys2.includes(key)))
|
|
760
|
+
return false;
|
|
761
|
+
for (const key in left) {
|
|
762
|
+
if (left[key] instanceof Function && right[key] instanceof Function)
|
|
763
|
+
continue;
|
|
764
|
+
const isEqual = deepEquals(left[key], right[key]);
|
|
765
|
+
if (!isEqual) {
|
|
766
|
+
return false;
|
|
767
|
+
}
|
|
768
|
+
}
|
|
769
|
+
return true;
|
|
770
|
+
};
|
|
771
|
+
var isEquatable = (left) => {
|
|
772
|
+
return left && typeof left === "object" && "equals" in left && typeof left["equals"] === "function";
|
|
773
|
+
};
|
|
774
|
+
|
|
775
|
+
// src/postgres/sqlBuilder/filter/queryOperators.ts
|
|
776
|
+
import { JSONSerializer as JSONSerializer2, sql } from "@event-driven-io/dumbo";
|
|
777
|
+
var handleOperator = (path, operator, value) => {
|
|
778
|
+
if (path === "_id" || path === "_version") {
|
|
779
|
+
return handleMetadataOperator(path, operator, value);
|
|
780
|
+
}
|
|
781
|
+
switch (operator) {
|
|
782
|
+
case "$eq":
|
|
783
|
+
return sql(
|
|
784
|
+
`(data @> %L::jsonb OR jsonb_path_exists(data, '$.%s[*] ? (@ == %s)'))`,
|
|
785
|
+
JSONSerializer2.serialize(buildNestedObject(path, value)),
|
|
786
|
+
path,
|
|
787
|
+
JSONSerializer2.serialize(value)
|
|
788
|
+
);
|
|
789
|
+
case "$gt":
|
|
790
|
+
case "$gte":
|
|
791
|
+
case "$lt":
|
|
792
|
+
case "$lte":
|
|
793
|
+
case "$ne":
|
|
794
|
+
return sql(
|
|
795
|
+
`data #>> %L ${OperatorMap[operator]} %L`,
|
|
796
|
+
`{${path.split(".").join(",")}}`,
|
|
797
|
+
value
|
|
798
|
+
);
|
|
799
|
+
case "$in":
|
|
800
|
+
return sql(
|
|
801
|
+
"data #>> %L IN (%s)",
|
|
802
|
+
`{${path.split(".").join(",")}}`,
|
|
803
|
+
value.map((v) => sql("%L", v)).join(", ")
|
|
804
|
+
);
|
|
805
|
+
case "$nin":
|
|
806
|
+
return sql(
|
|
807
|
+
"data #>> %L NOT IN (%s)",
|
|
808
|
+
`{${path.split(".").join(",")}}`,
|
|
809
|
+
value.map((v) => sql("%L", v)).join(", ")
|
|
810
|
+
);
|
|
811
|
+
case "$elemMatch": {
|
|
812
|
+
const subQuery = objectEntries(value).map(
|
|
813
|
+
([subKey, subValue]) => sql(`@."%s" == %s`, subKey, JSONSerializer2.serialize(subValue))
|
|
814
|
+
).join(" && ");
|
|
815
|
+
return sql(`jsonb_path_exists(data, '$.%s[*] ? (%s)')`, path, subQuery);
|
|
816
|
+
}
|
|
817
|
+
case "$all":
|
|
818
|
+
return sql(
|
|
819
|
+
"data @> %L::jsonb",
|
|
820
|
+
JSONSerializer2.serialize(buildNestedObject(path, value))
|
|
821
|
+
);
|
|
822
|
+
case "$size":
|
|
823
|
+
return sql(
|
|
824
|
+
"jsonb_array_length(data #> %L) = %L",
|
|
825
|
+
`{${path.split(".").join(",")}}`,
|
|
826
|
+
value
|
|
827
|
+
);
|
|
828
|
+
default:
|
|
829
|
+
throw new Error(`Unsupported operator: ${operator}`);
|
|
830
|
+
}
|
|
831
|
+
};
|
|
832
|
+
var handleMetadataOperator = (fieldName, operator, value) => {
|
|
833
|
+
switch (operator) {
|
|
834
|
+
case "$eq":
|
|
835
|
+
return sql(`${fieldName} = %L`, value);
|
|
836
|
+
case "$gt":
|
|
837
|
+
case "$gte":
|
|
838
|
+
case "$lt":
|
|
839
|
+
case "$lte":
|
|
840
|
+
case "$ne":
|
|
841
|
+
return sql(`${fieldName} ${OperatorMap[operator]} %L`, value);
|
|
842
|
+
case "$in":
|
|
843
|
+
return sql(
|
|
844
|
+
`${fieldName} IN (%s)`,
|
|
845
|
+
value.map((v) => sql("%L", v)).join(", ")
|
|
846
|
+
);
|
|
847
|
+
case "$nin":
|
|
848
|
+
return sql(
|
|
849
|
+
`${fieldName} NOT IN (%s)`,
|
|
850
|
+
value.map((v) => sql("%L", v)).join(", ")
|
|
851
|
+
);
|
|
852
|
+
default:
|
|
853
|
+
throw new Error(`Unsupported operator: ${operator}`);
|
|
854
|
+
}
|
|
855
|
+
};
|
|
856
|
+
var buildNestedObject = (path, value) => path.split(".").reverse().reduce((acc, key) => ({ [key]: acc }), value);
|
|
857
|
+
|
|
858
|
+
// src/postgres/sqlBuilder/filter/index.ts
|
|
859
|
+
var AND = "AND";
|
|
860
|
+
var constructFilterQuery = (filter) => Object.entries(filter).map(
|
|
861
|
+
([key, value]) => isRecord(value) ? constructComplexFilterQuery(key, value) : handleOperator(key, "$eq", value)
|
|
862
|
+
).join(` ${AND} `);
|
|
863
|
+
var constructComplexFilterQuery = (key, value) => {
|
|
864
|
+
const isEquality = !hasOperators(value);
|
|
865
|
+
return objectEntries(value).map(
|
|
866
|
+
([nestedKey, val]) => isEquality ? handleOperator(`${key}.${nestedKey}`, QueryOperators.$eq, val) : handleOperator(key, nestedKey, val)
|
|
867
|
+
// operator
|
|
868
|
+
).join(` ${AND} `);
|
|
869
|
+
};
|
|
870
|
+
var isRecord = (value) => value !== null && typeof value === "object" && !Array.isArray(value);
|
|
871
|
+
|
|
872
|
+
// src/postgres/sqlBuilder/update/index.ts
|
|
873
|
+
import { JSONSerializer as JSONSerializer3, sql as sql2 } from "@event-driven-io/dumbo";
|
|
874
|
+
var buildUpdateQuery = (update) => objectEntries(update).reduce((currentUpdateQuery, [op, value]) => {
|
|
875
|
+
switch (op) {
|
|
876
|
+
case "$set":
|
|
877
|
+
return buildSetQuery(value, currentUpdateQuery);
|
|
878
|
+
case "$unset":
|
|
879
|
+
return buildUnsetQuery(value, currentUpdateQuery);
|
|
880
|
+
case "$inc":
|
|
881
|
+
return buildIncQuery(value, currentUpdateQuery);
|
|
882
|
+
case "$push":
|
|
883
|
+
return buildPushQuery(value, currentUpdateQuery);
|
|
884
|
+
default:
|
|
885
|
+
return currentUpdateQuery;
|
|
886
|
+
}
|
|
887
|
+
}, sql2("data"));
|
|
888
|
+
var buildSetQuery = (set, currentUpdateQuery) => sql2("%s || %L::jsonb", currentUpdateQuery, JSONSerializer3.serialize(set));
|
|
889
|
+
var buildUnsetQuery = (unset, currentUpdateQuery) => sql2(
|
|
890
|
+
"%s - %L",
|
|
891
|
+
currentUpdateQuery,
|
|
892
|
+
Object.keys(unset).map((k) => `{${k}}`).join(", ")
|
|
893
|
+
);
|
|
894
|
+
var buildIncQuery = (inc, currentUpdateQuery) => {
|
|
895
|
+
for (const [key, value] of Object.entries(inc)) {
|
|
896
|
+
currentUpdateQuery = sql2(
|
|
897
|
+
typeof value === "bigint" ? "jsonb_set(%s, '{%s}', to_jsonb((COALESCE((data->>'%s')::BIGINT, 0) + %L)::TEXT), true)" : "jsonb_set(%s, '{%s}', to_jsonb(COALESCE((data->>'%s')::NUMERIC, 0) + %L), true)",
|
|
898
|
+
currentUpdateQuery,
|
|
899
|
+
key,
|
|
900
|
+
key,
|
|
901
|
+
value
|
|
902
|
+
);
|
|
903
|
+
}
|
|
904
|
+
return currentUpdateQuery;
|
|
905
|
+
};
|
|
906
|
+
var buildPushQuery = (push, currentUpdateQuery) => {
|
|
907
|
+
for (const [key, value] of Object.entries(push)) {
|
|
908
|
+
currentUpdateQuery = sql2(
|
|
909
|
+
"jsonb_set(%s, '{%s}', (coalesce(data->'%s', '[]'::jsonb) || %L::jsonb), true)",
|
|
910
|
+
currentUpdateQuery,
|
|
911
|
+
key,
|
|
912
|
+
key,
|
|
913
|
+
JSONSerializer3.serialize([value])
|
|
914
|
+
);
|
|
915
|
+
}
|
|
916
|
+
return currentUpdateQuery;
|
|
917
|
+
};
|
|
918
|
+
|
|
919
|
+
// src/postgres/sqlBuilder/index.ts
|
|
920
|
+
var createCollection = (collectionName) => sql3(
|
|
921
|
+
`CREATE TABLE IF NOT EXISTS %I (
|
|
922
|
+
_id TEXT PRIMARY KEY,
|
|
923
|
+
data JSONB NOT NULL,
|
|
924
|
+
metadata JSONB NOT NULL DEFAULT '{}',
|
|
925
|
+
_version BIGINT NOT NULL DEFAULT 1,
|
|
926
|
+
_partition TEXT NOT NULL DEFAULT 'png_global',
|
|
927
|
+
_archived BOOLEAN NOT NULL DEFAULT FALSE,
|
|
928
|
+
_created TIMESTAMPTZ NOT NULL DEFAULT now(),
|
|
929
|
+
_updated TIMESTAMPTZ NOT NULL DEFAULT now()
|
|
930
|
+
)`,
|
|
931
|
+
collectionName
|
|
932
|
+
);
|
|
933
|
+
var pongoCollectionPostgreSQLMigrations = (collectionName) => [
|
|
934
|
+
sqlMigration(`pongoCollection:${collectionName}:001:createtable`, [
|
|
935
|
+
createCollection(collectionName)
|
|
936
|
+
])
|
|
937
|
+
];
|
|
938
|
+
var postgresSQLBuilder = (collectionName) => ({
|
|
939
|
+
migrations: () => pongoCollectionPostgreSQLMigrations(collectionName),
|
|
940
|
+
createCollection: () => createCollection(collectionName),
|
|
941
|
+
insertOne: (document) => {
|
|
942
|
+
return sql3(
|
|
943
|
+
"INSERT INTO %I (_id, data, _version) VALUES (%L, %L, %L) ON CONFLICT(_id) DO NOTHING;",
|
|
944
|
+
collectionName,
|
|
945
|
+
document._id,
|
|
946
|
+
JSONSerializer4.serialize(document),
|
|
947
|
+
document._version ?? 1n
|
|
948
|
+
);
|
|
949
|
+
},
|
|
950
|
+
insertMany: (documents) => {
|
|
951
|
+
const values = documents.map(
|
|
952
|
+
(doc) => sql3(
|
|
953
|
+
"(%L, %L, %L)",
|
|
954
|
+
doc._id,
|
|
955
|
+
JSONSerializer4.serialize(doc),
|
|
956
|
+
doc._version ?? 1n
|
|
957
|
+
)
|
|
958
|
+
).join(", ");
|
|
959
|
+
return sql3(
|
|
960
|
+
`INSERT INTO %I (_id, data, _version) VALUES %s
|
|
961
|
+
ON CONFLICT(_id) DO NOTHING
|
|
962
|
+
RETURNING _id;`,
|
|
963
|
+
collectionName,
|
|
964
|
+
values
|
|
965
|
+
);
|
|
966
|
+
},
|
|
967
|
+
updateOne: (filter, update, options) => {
|
|
968
|
+
const expectedVersion2 = expectedVersionValue(options?.expectedVersion);
|
|
969
|
+
const expectedVersionUpdate = expectedVersion2 != null ? "AND %I._version = %L" : "";
|
|
970
|
+
const expectedVersionParams = expectedVersion2 != null ? [collectionName, expectedVersion2] : [];
|
|
971
|
+
const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);
|
|
972
|
+
const updateQuery = isSQL(update) ? update : buildUpdateQuery(update);
|
|
973
|
+
return sql3(
|
|
974
|
+
`WITH existing AS (
|
|
975
|
+
SELECT _id, _version as current_version
|
|
976
|
+
FROM %I %s
|
|
977
|
+
LIMIT 1
|
|
978
|
+
),
|
|
979
|
+
updated AS (
|
|
980
|
+
UPDATE %I
|
|
981
|
+
SET
|
|
982
|
+
data = %s || jsonb_build_object('_id', %I._id) || jsonb_build_object('_version', (_version + 1)::text),
|
|
983
|
+
_version = _version + 1
|
|
984
|
+
FROM existing
|
|
985
|
+
WHERE %I._id = existing._id ${expectedVersionUpdate}
|
|
986
|
+
RETURNING %I._id, %I._version
|
|
987
|
+
)
|
|
988
|
+
SELECT
|
|
989
|
+
existing._id,
|
|
990
|
+
COALESCE(updated._version, existing.current_version) AS version,
|
|
991
|
+
COUNT(existing._id) over() AS matched,
|
|
992
|
+
COUNT(updated._id) over() AS modified
|
|
993
|
+
FROM existing
|
|
994
|
+
LEFT JOIN updated
|
|
995
|
+
ON existing._id = updated._id;`,
|
|
996
|
+
collectionName,
|
|
997
|
+
where(filterQuery),
|
|
998
|
+
collectionName,
|
|
999
|
+
updateQuery,
|
|
1000
|
+
collectionName,
|
|
1001
|
+
collectionName,
|
|
1002
|
+
...expectedVersionParams,
|
|
1003
|
+
collectionName,
|
|
1004
|
+
collectionName
|
|
1005
|
+
);
|
|
1006
|
+
},
|
|
1007
|
+
replaceOne: (filter, document, options) => {
|
|
1008
|
+
const expectedVersion2 = expectedVersionValue(options?.expectedVersion);
|
|
1009
|
+
const expectedVersionUpdate = expectedVersion2 != null ? "AND %I._version = %L" : "";
|
|
1010
|
+
const expectedVersionParams = expectedVersion2 != null ? [collectionName, expectedVersion2] : [];
|
|
1011
|
+
const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);
|
|
1012
|
+
return sql3(
|
|
1013
|
+
`WITH existing AS (
|
|
1014
|
+
SELECT _id, _version as current_version
|
|
1015
|
+
FROM %I %s
|
|
1016
|
+
LIMIT 1
|
|
1017
|
+
),
|
|
1018
|
+
updated AS (
|
|
1019
|
+
UPDATE %I
|
|
1020
|
+
SET
|
|
1021
|
+
data = %L || jsonb_build_object('_id', %I._id) || jsonb_build_object('_version', (_version + 1)::text),
|
|
1022
|
+
_version = _version + 1
|
|
1023
|
+
FROM existing
|
|
1024
|
+
WHERE %I._id = existing._id ${expectedVersionUpdate}
|
|
1025
|
+
RETURNING %I._id, %I._version
|
|
1026
|
+
)
|
|
1027
|
+
SELECT
|
|
1028
|
+
existing._id,
|
|
1029
|
+
COALESCE(updated._version, existing.current_version) AS version,
|
|
1030
|
+
COUNT(existing._id) over() AS matched,
|
|
1031
|
+
COUNT(updated._id) over() AS modified
|
|
1032
|
+
FROM existing
|
|
1033
|
+
LEFT JOIN updated
|
|
1034
|
+
ON existing._id = updated._id;`,
|
|
1035
|
+
collectionName,
|
|
1036
|
+
where(filterQuery),
|
|
1037
|
+
collectionName,
|
|
1038
|
+
JSONSerializer4.serialize(document),
|
|
1039
|
+
collectionName,
|
|
1040
|
+
collectionName,
|
|
1041
|
+
...expectedVersionParams,
|
|
1042
|
+
collectionName,
|
|
1043
|
+
collectionName
|
|
1044
|
+
);
|
|
1045
|
+
},
|
|
1046
|
+
updateMany: (filter, update) => {
|
|
1047
|
+
const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);
|
|
1048
|
+
const updateQuery = isSQL(update) ? update : buildUpdateQuery(update);
|
|
1049
|
+
return sql3(
|
|
1050
|
+
`UPDATE %I
|
|
1051
|
+
SET
|
|
1052
|
+
data = %s || jsonb_build_object('_version', (_version + 1)::text),
|
|
1053
|
+
_version = _version + 1
|
|
1054
|
+
%s;`,
|
|
1055
|
+
collectionName,
|
|
1056
|
+
updateQuery,
|
|
1057
|
+
where(filterQuery)
|
|
1058
|
+
);
|
|
1059
|
+
},
|
|
1060
|
+
deleteOne: (filter, options) => {
|
|
1061
|
+
const expectedVersion2 = expectedVersionValue(options?.expectedVersion);
|
|
1062
|
+
const expectedVersionUpdate = expectedVersion2 != null ? "AND %I._version = %L" : "";
|
|
1063
|
+
const expectedVersionParams = expectedVersion2 != null ? [collectionName, expectedVersion2] : [];
|
|
1064
|
+
const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);
|
|
1065
|
+
return sql3(
|
|
1066
|
+
`WITH existing AS (
|
|
1067
|
+
SELECT _id
|
|
1068
|
+
FROM %I %s
|
|
1069
|
+
LIMIT 1
|
|
1070
|
+
),
|
|
1071
|
+
deleted AS (
|
|
1072
|
+
DELETE FROM %I
|
|
1073
|
+
USING existing
|
|
1074
|
+
WHERE %I._id = existing._id ${expectedVersionUpdate}
|
|
1075
|
+
RETURNING %I._id
|
|
1076
|
+
)
|
|
1077
|
+
SELECT
|
|
1078
|
+
existing._id,
|
|
1079
|
+
COUNT(existing._id) over() AS matched,
|
|
1080
|
+
COUNT(deleted._id) over() AS deleted
|
|
1081
|
+
FROM existing
|
|
1082
|
+
LEFT JOIN deleted
|
|
1083
|
+
ON existing._id = deleted._id;`,
|
|
1084
|
+
collectionName,
|
|
1085
|
+
where(filterQuery),
|
|
1086
|
+
collectionName,
|
|
1087
|
+
collectionName,
|
|
1088
|
+
...expectedVersionParams,
|
|
1089
|
+
collectionName
|
|
1090
|
+
);
|
|
1091
|
+
},
|
|
1092
|
+
deleteMany: (filter) => {
|
|
1093
|
+
const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);
|
|
1094
|
+
return sql3("DELETE FROM %I %s", collectionName, where(filterQuery));
|
|
1095
|
+
},
|
|
1096
|
+
findOne: (filter) => {
|
|
1097
|
+
const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);
|
|
1098
|
+
return sql3(
|
|
1099
|
+
"SELECT data FROM %I %s LIMIT 1;",
|
|
1100
|
+
collectionName,
|
|
1101
|
+
where(filterQuery)
|
|
1102
|
+
);
|
|
1103
|
+
},
|
|
1104
|
+
find: (filter) => {
|
|
1105
|
+
const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);
|
|
1106
|
+
return sql3("SELECT data FROM %I %s;", collectionName, where(filterQuery));
|
|
1107
|
+
},
|
|
1108
|
+
countDocuments: (filter) => {
|
|
1109
|
+
const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);
|
|
1110
|
+
return sql3(
|
|
1111
|
+
"SELECT COUNT(1) as count FROM %I %s;",
|
|
1112
|
+
collectionName,
|
|
1113
|
+
where(filterQuery)
|
|
1114
|
+
);
|
|
1115
|
+
},
|
|
1116
|
+
rename: (newName) => sql3("ALTER TABLE %I RENAME TO %I;", collectionName, newName),
|
|
1117
|
+
drop: (targetName = collectionName) => sql3("DROP TABLE IF EXISTS %I", targetName)
|
|
1118
|
+
});
|
|
1119
|
+
var where = (filter) => filter.length > 0 ? sql3("WHERE %s", filter) : rawSql("");
|
|
1120
|
+
|
|
1121
|
+
export {
|
|
1122
|
+
pongoCollectionPostgreSQLMigrations,
|
|
1123
|
+
postgresSQLBuilder,
|
|
1124
|
+
isPostgresClientOptions,
|
|
1125
|
+
postgresDb,
|
|
1126
|
+
pongoDbSchemaComponent,
|
|
1127
|
+
transactionExecutorOrDefault,
|
|
1128
|
+
pongoCollection,
|
|
1129
|
+
pongoCollectionSchemaComponent,
|
|
1130
|
+
QueryOperators,
|
|
1131
|
+
OperatorMap,
|
|
1132
|
+
isOperator,
|
|
1133
|
+
hasOperators,
|
|
1134
|
+
isNumber,
|
|
1135
|
+
isString,
|
|
1136
|
+
PongoError,
|
|
1137
|
+
ConcurrencyError,
|
|
1138
|
+
getPongoDb,
|
|
1139
|
+
pongoTransaction,
|
|
1140
|
+
pongoSession,
|
|
1141
|
+
objectEntries,
|
|
1142
|
+
ObjectId,
|
|
1143
|
+
DOCUMENT_EXISTS,
|
|
1144
|
+
DOCUMENT_DOES_NOT_EXIST,
|
|
1145
|
+
NO_CONCURRENCY_CHECK,
|
|
1146
|
+
isGeneralExpectedDocumentVersion,
|
|
1147
|
+
expectedVersionValue,
|
|
1148
|
+
expectedVersion,
|
|
1149
|
+
operationResult,
|
|
1150
|
+
pongoSchema,
|
|
1151
|
+
proxyPongoDbWithSchema,
|
|
1152
|
+
proxyClientWithSchema,
|
|
1153
|
+
toDbSchemaMetadata,
|
|
1154
|
+
toClientSchemaMetadata,
|
|
1155
|
+
pongoClient,
|
|
1156
|
+
clientToDbOptions,
|
|
1157
|
+
deepEquals,
|
|
1158
|
+
isEquatable
|
|
1159
|
+
};
|
|
1160
|
+
//# sourceMappingURL=chunk-IXL27BW5.js.map
|