@lobb-js/core 0.13.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +48 -0
- package/src/Lobb.ts +150 -0
- package/src/LobbError.ts +105 -0
- package/src/TypesGenerator.ts +11 -0
- package/src/api/WebServer.ts +126 -0
- package/src/api/collections/CollectionControllers.ts +485 -0
- package/src/api/collections/CollectionService.ts +162 -0
- package/src/api/collections/collectionRoutes.ts +105 -0
- package/src/api/collections/collectionStore.ts +647 -0
- package/src/api/collections/transactions.ts +166 -0
- package/src/api/collections/utils.ts +73 -0
- package/src/api/errorHandler.ts +73 -0
- package/src/api/events/index.ts +129 -0
- package/src/api/meta/route.ts +66 -0
- package/src/api/meta/service.ts +163 -0
- package/src/api/middlewares.ts +71 -0
- package/src/api/openApiRoute.ts +1017 -0
- package/src/api/schema/SchemaService.ts +71 -0
- package/src/api/schema/schemaRoutes.ts +13 -0
- package/src/config/ConfigManager.ts +252 -0
- package/src/config/validations.ts +49 -0
- package/src/coreCollections/collectionsCollection.ts +56 -0
- package/src/coreCollections/index.ts +14 -0
- package/src/coreCollections/migrationsCollection.ts +36 -0
- package/src/coreCollections/queryCollection.ts +26 -0
- package/src/coreCollections/workflowsCollection.ts +73 -0
- package/src/coreDbSetup/index.ts +72 -0
- package/src/coreMigrations/index.ts +3 -0
- package/src/database/DatabaseService.ts +44 -0
- package/src/database/DatabaseSyncManager.ts +173 -0
- package/src/database/MigrationsManager.ts +95 -0
- package/src/database/drivers/MongoDriver.ts +750 -0
- package/src/database/drivers/pgDriver/PGDriver.ts +655 -0
- package/src/database/drivers/pgDriver/QueryBuilder.ts +474 -0
- package/src/database/drivers/pgDriver/utils.ts +6 -0
- package/src/events/EventSystem.ts +191 -0
- package/src/events/coreEvents/index.ts +218 -0
- package/src/events/studioEvents/index.ts +32 -0
- package/src/extension/ExtensionSystem.ts +236 -0
- package/src/extension/dashboardRoute.ts +35 -0
- package/src/fields/ArrayField.ts +33 -0
- package/src/fields/BoolField.ts +34 -0
- package/src/fields/DateField.ts +13 -0
- package/src/fields/DateTimeField.ts +13 -0
- package/src/fields/DecimalField.ts +13 -0
- package/src/fields/FieldUtils.ts +56 -0
- package/src/fields/FloatField.ts +13 -0
- package/src/fields/IntegerField.ts +13 -0
- package/src/fields/LongField.ts +13 -0
- package/src/fields/ObjectField.ts +15 -0
- package/src/fields/StringField.ts +13 -0
- package/src/fields/TextField.ts +13 -0
- package/src/fields/TimeField.ts +13 -0
- package/src/index.ts +53 -0
- package/src/studio/Studio.ts +108 -0
- package/src/types/CollectionControllers.ts +15 -0
- package/src/types/DatabaseDriver.ts +115 -0
- package/src/types/Extension.ts +46 -0
- package/src/types/Field.ts +29 -0
- package/src/types/apiSchema.ts +12 -0
- package/src/types/collectionServiceSchema.ts +18 -0
- package/src/types/config/collectionFields.ts +85 -0
- package/src/types/config/collectionsConfig.ts +50 -0
- package/src/types/config/config.ts +66 -0
- package/src/types/config/relations.ts +17 -0
- package/src/types/filterSchema.ts +88 -0
- package/src/types/index.ts +38 -0
- package/src/types/migrations.ts +12 -0
- package/src/types/websockets.ts +34 -0
- package/src/types/workflows/processors.ts +1 -0
- package/src/utils/lockCollectionToObject.ts +204 -0
- package/src/utils/utils.ts +310 -0
- package/src/workflows/WorkflowSystem.ts +182 -0
- package/src/workflows/coreWorkflows/collectionsTable/index.ts +118 -0
- package/src/workflows/coreWorkflows/index.ts +18 -0
- package/src/workflows/coreWorkflows/processors/postOperationsWorkflows.ts +46 -0
- package/src/workflows/coreWorkflows/processors/preOperationsWorkflows.ts +27 -0
- package/src/workflows/coreWorkflows/processors/processorForDB.ts +13 -0
- package/src/workflows/coreWorkflows/processors/processors/processor.ts +23 -0
- package/src/workflows/coreWorkflows/processors/processors/processorsFunctions.ts +47 -0
- package/src/workflows/coreWorkflows/processors/utils.ts +102 -0
- package/src/workflows/coreWorkflows/processors/validator/validator.ts +19 -0
- package/src/workflows/coreWorkflows/processors/validator/validatorsFunction.ts +52 -0
- package/src/workflows/coreWorkflows/queryCoreWorkflows.ts +31 -0
- package/src/workflows/coreWorkflows/utilsCoreWorkflows.ts +40 -0
- package/src/workflows/coreWorkflows/workflowsCollection/workflowsCollectionWorkflows.ts +101 -0
|
@@ -0,0 +1,655 @@
|
|
|
1
|
+
import format from "pg-format";
|
|
2
|
+
import type { FindAllParamsOutput } from "../../../types/index.ts";
|
|
3
|
+
import type {
|
|
4
|
+
CollectionConfig,
|
|
5
|
+
CollectionIndex,
|
|
6
|
+
CollectionIndexes,
|
|
7
|
+
CollectionsConfig,
|
|
8
|
+
} from "../../../types/index.ts";
|
|
9
|
+
import type { FindAllResult } from "../../../types/index.ts";
|
|
10
|
+
import type { CollectionField } from "../../../types/index.ts";
|
|
11
|
+
|
|
12
|
+
import pg, { Pool, type PoolClient } from "pg";
|
|
13
|
+
import { DatabaseDriver } from "../../../types/index.ts";
|
|
14
|
+
import { LobbError } from "../../../LobbError.ts";
|
|
15
|
+
import { QueryBuilder } from "./QueryBuilder.ts";
|
|
16
|
+
import { fieldsHasId } from "./utils.ts";
|
|
17
|
+
import { cpus } from "node:os";
|
|
18
|
+
import { Lobb } from "../../../Lobb.ts";
|
|
19
|
+
|
|
20
|
+
// Parse int8 (bigint) as string to avoid precision loss
|
|
21
|
+
pg.types.setTypeParser(20, (val: string) => val);
|
|
22
|
+
|
|
23
|
+
// Helper: add Symbol.dispose to a pg PoolClient so `using` syntax works
|
|
24
|
+
function asDisposable<T extends { release(): void }>(client: T): T & Disposable {
|
|
25
|
+
(client as any)[Symbol.dispose] = () => client.release();
|
|
26
|
+
return client as T & Disposable;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
export class PGDriver extends DatabaseDriver {
|
|
30
|
+
protected pool!: Pool;
|
|
31
|
+
|
|
32
|
+
constructor() {
|
|
33
|
+
super();
|
|
34
|
+
// Set databaseConfig here instead of at class field initialization
|
|
35
|
+
// This avoids the circular dependency at module load time
|
|
36
|
+
this.databaseConfig = Lobb.instance.configManager.config.database;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
public async createConnection() {
|
|
40
|
+
const default_pool_connections = (cpus().length * 2) + 4;
|
|
41
|
+
|
|
42
|
+
// Ensure the database exists before connecting
|
|
43
|
+
await this.ensureDatabaseExists();
|
|
44
|
+
|
|
45
|
+
// Now connect to the actual target database
|
|
46
|
+
this.pool = new Pool({
|
|
47
|
+
host: this.databaseConfig.host,
|
|
48
|
+
port: this.databaseConfig.port,
|
|
49
|
+
user: this.databaseConfig.username,
|
|
50
|
+
password: this.databaseConfig.password,
|
|
51
|
+
database: this.databaseConfig.database,
|
|
52
|
+
max: this.databaseConfig.pool_connections ?? default_pool_connections,
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
public getConnection() {
|
|
57
|
+
// Wrap the pool to ensure connections are cleaned IMMEDIATELY after acquiring
|
|
58
|
+
// This protects against dirty connections left by previous users
|
|
59
|
+
const originalPool = this.pool;
|
|
60
|
+
|
|
61
|
+
return {
|
|
62
|
+
connect: async () => {
|
|
63
|
+
const client = await originalPool.connect();
|
|
64
|
+
|
|
65
|
+
// CRITICAL FIX: Clean the connection immediately after acquiring from pool
|
|
66
|
+
// If a previous user left it in a dirty transaction state, we clean it here
|
|
67
|
+
try {
|
|
68
|
+
await client.query("ROLLBACK");
|
|
69
|
+
} catch {
|
|
70
|
+
// Ignore "no transaction in progress" errors - expected for clean connections
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
return asDisposable(client);
|
|
74
|
+
},
|
|
75
|
+
};
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
public async close() {
|
|
79
|
+
await this.pool.end();
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
public async query<T>(sql: string) {
|
|
83
|
+
const client = asDisposable(await this.pool.connect());
|
|
84
|
+
using _ = client;
|
|
85
|
+
const result = await this.runQuery<T>(client, sql);
|
|
86
|
+
return result.rows;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
public async dropDatabase() {
|
|
90
|
+
const tempPool = new Pool({
|
|
91
|
+
host: this.databaseConfig.host,
|
|
92
|
+
port: this.databaseConfig.port,
|
|
93
|
+
user: this.databaseConfig.username,
|
|
94
|
+
password: this.databaseConfig.password,
|
|
95
|
+
database: "postgres",
|
|
96
|
+
max: 1,
|
|
97
|
+
});
|
|
98
|
+
try {
|
|
99
|
+
const client = await tempPool.connect();
|
|
100
|
+
try {
|
|
101
|
+
await client.query(format("DROP DATABASE IF EXISTS %I", this.databaseConfig.database));
|
|
102
|
+
} finally {
|
|
103
|
+
client.release();
|
|
104
|
+
}
|
|
105
|
+
} finally {
|
|
106
|
+
await tempPool.end();
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
public async createCollection(
|
|
111
|
+
collectionName: string,
|
|
112
|
+
collectionConfig: CollectionConfig,
|
|
113
|
+
) {
|
|
114
|
+
const collectionEntries = Object.entries(collectionConfig.fields);
|
|
115
|
+
const createTableBodySql: string = collectionEntries.map(
|
|
116
|
+
([fieldName, fieldConfig]) => {
|
|
117
|
+
return `"${fieldName}" ${
|
|
118
|
+
this.generateFieldSignature(fieldName, fieldConfig)
|
|
119
|
+
}`;
|
|
120
|
+
},
|
|
121
|
+
).join(", ");
|
|
122
|
+
|
|
123
|
+
const query = `CREATE TABLE IF NOT EXISTS ${collectionName} (${createTableBodySql})`;
|
|
124
|
+
|
|
125
|
+
const client = asDisposable(await this.pool.connect());
|
|
126
|
+
using _ = client;
|
|
127
|
+
await this.runQuery(client, query);
|
|
128
|
+
|
|
129
|
+
// creating the indexes
|
|
130
|
+
const indexes = collectionConfig.indexes;
|
|
131
|
+
for (
|
|
132
|
+
const [indexName, value] of Object.entries(indexes)
|
|
133
|
+
) {
|
|
134
|
+
await this.createIndex(
|
|
135
|
+
collectionName,
|
|
136
|
+
indexName,
|
|
137
|
+
value,
|
|
138
|
+
);
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
public async dropCollection(collectionName: string) {
|
|
143
|
+
const client = asDisposable(await this.pool.connect());
|
|
144
|
+
using _ = client;
|
|
145
|
+
const sql = format("DROP TABLE %I", collectionName);
|
|
146
|
+
await this.runQuery(client, sql);
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
public async addField(
|
|
150
|
+
collectionName: string,
|
|
151
|
+
fieldName: string,
|
|
152
|
+
field: CollectionField,
|
|
153
|
+
): Promise<void> {
|
|
154
|
+
const sql = `ALTER TABLE ${collectionName}\n ADD COLUMN ${fieldName} ${
|
|
155
|
+
this.generateFieldSignature(fieldName, field)
|
|
156
|
+
}`;
|
|
157
|
+
const client = asDisposable(await this.pool.connect());
|
|
158
|
+
using _ = client;
|
|
159
|
+
await this.runQuery(client, sql);
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
public async removeField(
|
|
163
|
+
collectionName: string,
|
|
164
|
+
fieldName: string,
|
|
165
|
+
): Promise<void> {
|
|
166
|
+
const client = asDisposable(await this.pool.connect());
|
|
167
|
+
using _ = client;
|
|
168
|
+
const sql = `ALTER TABLE ${collectionName} DROP COLUMN ${fieldName}`;
|
|
169
|
+
await this.runQuery(client, sql);
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
public async getDbSchema(): Promise<CollectionsConfig> {
|
|
173
|
+
const client = asDisposable(await this.pool.connect());
|
|
174
|
+
using _ = client;
|
|
175
|
+
const tablesQuery = `
|
|
176
|
+
SELECT table_name
|
|
177
|
+
FROM information_schema.tables
|
|
178
|
+
WHERE table_schema = 'public'
|
|
179
|
+
`;
|
|
180
|
+
const tablesResult = await this.runQuery<{ table_name: string }>(
|
|
181
|
+
client,
|
|
182
|
+
tablesQuery,
|
|
183
|
+
);
|
|
184
|
+
|
|
185
|
+
const tableNames: string[] = tablesResult.rows.map((item) =>
|
|
186
|
+
item.table_name
|
|
187
|
+
);
|
|
188
|
+
|
|
189
|
+
const schema: CollectionsConfig = {};
|
|
190
|
+
for (let index = 0; index < tableNames.length; index++) {
|
|
191
|
+
const tableName = tableNames[index];
|
|
192
|
+
const fieldsQuery = `
|
|
193
|
+
SELECT column_name, data_type
|
|
194
|
+
FROM information_schema.columns
|
|
195
|
+
WHERE table_name = '${tableName}';
|
|
196
|
+
`;
|
|
197
|
+
const fieldsInfos = await this.runQuery<
|
|
198
|
+
{ column_name: string; data_type: string }
|
|
199
|
+
>(client, fieldsQuery);
|
|
200
|
+
|
|
201
|
+
const localFields: any = {};
|
|
202
|
+
for (let index = 0; index < fieldsInfos.rows.length; index++) {
|
|
203
|
+
const field = fieldsInfos.rows[index];
|
|
204
|
+
const fieldType = this.getLobbTypeFromPgType(field.data_type);
|
|
205
|
+
localFields[field.column_name] = {
|
|
206
|
+
type: fieldType,
|
|
207
|
+
...(await this.getFieldDBProperties(
|
|
208
|
+
fieldType,
|
|
209
|
+
field.column_name,
|
|
210
|
+
tableName,
|
|
211
|
+
)),
|
|
212
|
+
};
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
schema[tableName] = {
|
|
216
|
+
indexes: await this.getIndexes(tableName),
|
|
217
|
+
fields: localFields,
|
|
218
|
+
};
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
return schema;
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
public async getIdsByFilter(
|
|
225
|
+
collectionName: string,
|
|
226
|
+
filter: any,
|
|
227
|
+
localClient?: PoolClient,
|
|
228
|
+
): Promise<string[]> {
|
|
229
|
+
const where = QueryBuilder.getWhere(filter, collectionName);
|
|
230
|
+
const query = format(
|
|
231
|
+
`SELECT id FROM ${collectionName} %s`,
|
|
232
|
+
where,
|
|
233
|
+
);
|
|
234
|
+
|
|
235
|
+
if (localClient) {
|
|
236
|
+
const result = await this.runQuery<{ id: string }>(localClient, query);
|
|
237
|
+
return result.rows.map((row) => row.id);
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
const client = asDisposable(await this.pool.connect());
|
|
241
|
+
using _ = client;
|
|
242
|
+
const result = await this.runQuery<{ id: string }>(client, query);
|
|
243
|
+
return result.rows.map((row) => row.id);
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
////////////////////////
|
|
247
|
+
// Indexes Operations //
|
|
248
|
+
////////////////////////
|
|
249
|
+
public async createIndex(
|
|
250
|
+
collectionName: string,
|
|
251
|
+
indexName: string,
|
|
252
|
+
indexInfo: CollectionIndex,
|
|
253
|
+
) {
|
|
254
|
+
const indexesFields = Object.entries(indexInfo.fields).map(
|
|
255
|
+
([fieldName, fieldOrder]) => {
|
|
256
|
+
return format("%I %s", fieldName, fieldOrder.order.toUpperCase());
|
|
257
|
+
},
|
|
258
|
+
);
|
|
259
|
+
|
|
260
|
+
const uniqe = indexInfo.unique ?? false;
|
|
261
|
+
const query = format(
|
|
262
|
+
`
|
|
263
|
+
CREATE ${uniqe ? "UNIQUE" : ""} INDEX IF NOT EXISTS %I
|
|
264
|
+
ON ${collectionName} (${indexesFields.join(", ")})
|
|
265
|
+
`,
|
|
266
|
+
indexName,
|
|
267
|
+
);
|
|
268
|
+
const client = asDisposable(await this.pool.connect());
|
|
269
|
+
using _ = client;
|
|
270
|
+
await this.runQuery(client, query);
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
public async dropIndex(collectionName: string, indexName: string) {
|
|
274
|
+
const client = asDisposable(await this.pool.connect());
|
|
275
|
+
using _ = client;
|
|
276
|
+
const sql = format("DROP INDEX %I", indexName);
|
|
277
|
+
await this.runQuery(client, sql);
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
public async getIndexes(collectionName: string): Promise<CollectionIndexes> {
|
|
281
|
+
const client = asDisposable(await this.pool.connect());
|
|
282
|
+
using _ = client;
|
|
283
|
+
const indexesQuery = `
|
|
284
|
+
SELECT indexname, indexdef
|
|
285
|
+
FROM pg_indexes
|
|
286
|
+
WHERE tablename = '${collectionName}';
|
|
287
|
+
`;
|
|
288
|
+
const indexesInfos = await this.runQuery<
|
|
289
|
+
{ indexname: string; indexdef: string }
|
|
290
|
+
>(client, indexesQuery);
|
|
291
|
+
|
|
292
|
+
const dbIndexes = indexesInfos.rows.filter(
|
|
293
|
+
(index) => index.indexname !== collectionName + "_pkey",
|
|
294
|
+
);
|
|
295
|
+
|
|
296
|
+
const indexes: CollectionIndexes = {};
|
|
297
|
+
|
|
298
|
+
for (let index = 0; index < dbIndexes.length; index++) {
|
|
299
|
+
const dbIndex = dbIndexes[index];
|
|
300
|
+
indexes[dbIndex.indexname] = this.extractIndexFields(
|
|
301
|
+
dbIndex.indexdef,
|
|
302
|
+
);
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
return indexes;
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
private extractIndexFields(indexdef: string) {
|
|
309
|
+
// Check if the index is unique
|
|
310
|
+
const isUnique = indexdef.includes("UNIQUE");
|
|
311
|
+
|
|
312
|
+
// Regular expression to capture the field names and their order (ASC/DESC)
|
|
313
|
+
const regex = /\(([^)]+)\)/;
|
|
314
|
+
const match = indexdef.match(regex);
|
|
315
|
+
|
|
316
|
+
const fields: any = {};
|
|
317
|
+
|
|
318
|
+
if (match && match[1]) {
|
|
319
|
+
// Split the field list and clean each part
|
|
320
|
+
const fieldList = match[1].split(",").map((field) => field.trim());
|
|
321
|
+
|
|
322
|
+
// Process each field and determine its order (default to 'asc' if no order is specified)
|
|
323
|
+
fieldList.forEach((field) => {
|
|
324
|
+
const parts = field.split(/\s+/); // Split by spaces to handle ASC/DESC
|
|
325
|
+
const fieldName = parts[0];
|
|
326
|
+
const order = parts[1] ? parts[1].toLowerCase() : "asc"; // Default to 'asc' if no order is specified
|
|
327
|
+
fields[fieldName] = { order };
|
|
328
|
+
});
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
return {
|
|
332
|
+
unique: isUnique,
|
|
333
|
+
fields: fields,
|
|
334
|
+
};
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
/////////////////////
|
|
338
|
+
// CRUD OPERATIONS //
|
|
339
|
+
/////////////////////
|
|
340
|
+
public async findAll(
|
|
341
|
+
collectionName: string,
|
|
342
|
+
params: FindAllParamsOutput,
|
|
343
|
+
localClient?: PoolClient,
|
|
344
|
+
): Promise<FindAllResult> {
|
|
345
|
+
const query = QueryBuilder.build(collectionName, params);
|
|
346
|
+
const totalCountQuery = QueryBuilder.buildTotalCountQuery(
|
|
347
|
+
collectionName,
|
|
348
|
+
params,
|
|
349
|
+
);
|
|
350
|
+
|
|
351
|
+
if (localClient) {
|
|
352
|
+
return this.runFindAll(localClient, query, totalCountQuery, params);
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
const client = asDisposable(await this.pool.connect());
|
|
356
|
+
using _ = client;
|
|
357
|
+
return this.runFindAll(client, query, totalCountQuery, params);
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
private async runFindAll(
|
|
361
|
+
client: PoolClient,
|
|
362
|
+
query: string,
|
|
363
|
+
totalCountQuery: string,
|
|
364
|
+
params: FindAllParamsOutput,
|
|
365
|
+
): Promise<FindAllResult> {
|
|
366
|
+
const result = await this.runQuery(client, query);
|
|
367
|
+
const totalCountResult = await this.runQuery<{ count: string }>(
|
|
368
|
+
client,
|
|
369
|
+
totalCountQuery,
|
|
370
|
+
);
|
|
371
|
+
|
|
372
|
+
// remove the id if it wasn't selected
|
|
373
|
+
if (!fieldsHasId(params.fields)) {
|
|
374
|
+
result.rows = result.rows.map((row: any) => {
|
|
375
|
+
const { id, ...rest } = row;
|
|
376
|
+
return rest;
|
|
377
|
+
});
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
return {
|
|
381
|
+
data: result.rows as any,
|
|
382
|
+
meta: {
|
|
383
|
+
totalCount: Number(totalCountResult.rows[0].count) ?? 0,
|
|
384
|
+
},
|
|
385
|
+
};
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
public async createOne(
|
|
389
|
+
collectionName: string,
|
|
390
|
+
data: any,
|
|
391
|
+
localClient?: PoolClient,
|
|
392
|
+
) {
|
|
393
|
+
delete data.id;
|
|
394
|
+
|
|
395
|
+
const query = format(
|
|
396
|
+
`INSERT INTO ${collectionName} (%I) VALUES (%L) RETURNING *`,
|
|
397
|
+
Object.keys(data),
|
|
398
|
+
Object.values(data),
|
|
399
|
+
);
|
|
400
|
+
|
|
401
|
+
if (localClient) {
|
|
402
|
+
const result = await this.runQuery(localClient, query);
|
|
403
|
+
return result.rows[0] as any;
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
const client = asDisposable(await this.pool.connect());
|
|
407
|
+
using _ = client;
|
|
408
|
+
const result = await this.runQuery(client, query);
|
|
409
|
+
return result.rows[0] as any;
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
public async updateOne(
|
|
413
|
+
collectionName: string,
|
|
414
|
+
id: string,
|
|
415
|
+
data: any,
|
|
416
|
+
localClient?: PoolClient,
|
|
417
|
+
) {
|
|
418
|
+
const setClause = Object.keys(data).map((key) =>
|
|
419
|
+
format("%I = %L", key, data[key])
|
|
420
|
+
).join(", ");
|
|
421
|
+
|
|
422
|
+
const filter = { id };
|
|
423
|
+
const where = QueryBuilder.getWhere(filter, collectionName);
|
|
424
|
+
const query = format(
|
|
425
|
+
`UPDATE ${collectionName} SET ${setClause} %s RETURNING *`,
|
|
426
|
+
where,
|
|
427
|
+
);
|
|
428
|
+
|
|
429
|
+
if (localClient) {
|
|
430
|
+
return this.runUpdateOne(localClient, query);
|
|
431
|
+
}
|
|
432
|
+
|
|
433
|
+
const client = asDisposable(await this.pool.connect());
|
|
434
|
+
using _ = client;
|
|
435
|
+
return this.runUpdateOne(client, query);
|
|
436
|
+
}
|
|
437
|
+
|
|
438
|
+
private async runUpdateOne(client: PoolClient, query: string) {
|
|
439
|
+
const result = await this.runQuery(client, query);
|
|
440
|
+
|
|
441
|
+
if (!result.rows.length) {
|
|
442
|
+
throw new LobbError({
|
|
443
|
+
code: "NOT_FOUND",
|
|
444
|
+
message: `The record is not found`,
|
|
445
|
+
});
|
|
446
|
+
}
|
|
447
|
+
|
|
448
|
+
return result.rows[0] as any;
|
|
449
|
+
}
|
|
450
|
+
|
|
451
|
+
public async deleteOne(
|
|
452
|
+
collectionName: string,
|
|
453
|
+
id: string,
|
|
454
|
+
localClient?: PoolClient,
|
|
455
|
+
) {
|
|
456
|
+
const filter = { id };
|
|
457
|
+
const where = QueryBuilder.getWhere(filter, collectionName);
|
|
458
|
+
const query = format(
|
|
459
|
+
`DELETE FROM ${collectionName} %s RETURNING *`,
|
|
460
|
+
where,
|
|
461
|
+
);
|
|
462
|
+
|
|
463
|
+
if (localClient) {
|
|
464
|
+
return this.runDeleteOne(localClient, query);
|
|
465
|
+
}
|
|
466
|
+
|
|
467
|
+
const client = asDisposable(await this.pool.connect());
|
|
468
|
+
using _ = client;
|
|
469
|
+
return this.runDeleteOne(client, query);
|
|
470
|
+
}
|
|
471
|
+
|
|
472
|
+
private async runDeleteOne(client: PoolClient, query: string) {
|
|
473
|
+
const result = await this.runQuery(client, query);
|
|
474
|
+
|
|
475
|
+
if (!result.rows.length) {
|
|
476
|
+
throw new LobbError({
|
|
477
|
+
code: "NOT_FOUND",
|
|
478
|
+
message: `The record is not found`,
|
|
479
|
+
});
|
|
480
|
+
}
|
|
481
|
+
|
|
482
|
+
return result.rows[0] as any;
|
|
483
|
+
}
|
|
484
|
+
|
|
485
|
+
/////////////////////
|
|
486
|
+
// META OPERATIONS //
|
|
487
|
+
/////////////////////
|
|
488
|
+
public async getFieldDBProperties(
|
|
489
|
+
fieldType: string,
|
|
490
|
+
fieldName: string,
|
|
491
|
+
collectionName: string,
|
|
492
|
+
) {
|
|
493
|
+
if (fieldType === "string") {
|
|
494
|
+
const query = format(
|
|
495
|
+
`
|
|
496
|
+
SELECT character_maximum_length
|
|
497
|
+
FROM information_schema.columns
|
|
498
|
+
WHERE table_name = %L
|
|
499
|
+
AND column_name = %L;
|
|
500
|
+
`,
|
|
501
|
+
collectionName,
|
|
502
|
+
fieldName,
|
|
503
|
+
);
|
|
504
|
+
const client = asDisposable(await this.pool.connect());
|
|
505
|
+
using _ = client;
|
|
506
|
+
const result = await this.runQuery<
|
|
507
|
+
{ character_maximum_length: string }
|
|
508
|
+
>(client, query);
|
|
509
|
+
return {
|
|
510
|
+
length: result.rows[0]?.character_maximum_length ?? 255,
|
|
511
|
+
};
|
|
512
|
+
}
|
|
513
|
+
|
|
514
|
+
return {};
|
|
515
|
+
}
|
|
516
|
+
|
|
517
|
+
////////////////////
|
|
518
|
+
// HELPER METHODS //
|
|
519
|
+
////////////////////
|
|
520
|
+
private async ensureDatabaseExists() {
|
|
521
|
+
// Connect to the 'postgres' database to check if our target database exists
|
|
522
|
+
const tempPool = new Pool({
|
|
523
|
+
host: this.databaseConfig.host,
|
|
524
|
+
port: this.databaseConfig.port,
|
|
525
|
+
user: this.databaseConfig.username,
|
|
526
|
+
password: this.databaseConfig.password,
|
|
527
|
+
database: "postgres", // Connect to default postgres database
|
|
528
|
+
max: 1, // Only need one connection for this check
|
|
529
|
+
});
|
|
530
|
+
|
|
531
|
+
try {
|
|
532
|
+
const client = asDisposable(await tempPool.connect());
|
|
533
|
+
using _ = client;
|
|
534
|
+
|
|
535
|
+
// Check if the target database exists
|
|
536
|
+
const existsQuery = format(
|
|
537
|
+
"SELECT EXISTS(SELECT 1 FROM pg_database WHERE datname = %L) as exists",
|
|
538
|
+
this.databaseConfig.database,
|
|
539
|
+
);
|
|
540
|
+
const result = await this.runQuery<{ exists: boolean }>(
|
|
541
|
+
client,
|
|
542
|
+
existsQuery,
|
|
543
|
+
);
|
|
544
|
+
|
|
545
|
+
const databaseExists = result.rows[0].exists;
|
|
546
|
+
|
|
547
|
+
if (!databaseExists) {
|
|
548
|
+
console.log(
|
|
549
|
+
`Database '${this.databaseConfig.database}' does not exist. Creating it...`,
|
|
550
|
+
);
|
|
551
|
+
const createDbQuery = format(
|
|
552
|
+
"CREATE DATABASE %I",
|
|
553
|
+
this.databaseConfig.database,
|
|
554
|
+
);
|
|
555
|
+
await this.runQuery(client, createDbQuery);
|
|
556
|
+
console.log(
|
|
557
|
+
`Database '${this.databaseConfig.database}' created successfully.`,
|
|
558
|
+
);
|
|
559
|
+
}
|
|
560
|
+
} finally {
|
|
561
|
+
// Close the temporary connection pool
|
|
562
|
+
await tempPool.end();
|
|
563
|
+
}
|
|
564
|
+
}
|
|
565
|
+
|
|
566
|
+
private async runQuery<T = Record<string, unknown>>(
|
|
567
|
+
client: PoolClient,
|
|
568
|
+
sql: string,
|
|
569
|
+
) {
|
|
570
|
+
try {
|
|
571
|
+
return await client.query<T>(sql);
|
|
572
|
+
} catch (error) {
|
|
573
|
+
console.error(`[PGDriver] Failed SQL:\n${sql}`);
|
|
574
|
+
throw error;
|
|
575
|
+
}
|
|
576
|
+
}
|
|
577
|
+
|
|
578
|
+
private getLobbTypeFromPgType(
|
|
579
|
+
pgType: string,
|
|
580
|
+
): CollectionField["type"] {
|
|
581
|
+
if (pgType === "character varying") {
|
|
582
|
+
return "string";
|
|
583
|
+
} else if (pgType === "text") {
|
|
584
|
+
return "text";
|
|
585
|
+
} else if (pgType === "date") {
|
|
586
|
+
return "date";
|
|
587
|
+
} else if (pgType === "time with time zone") {
|
|
588
|
+
return "time";
|
|
589
|
+
} else if (pgType === "timestamp with time zone") {
|
|
590
|
+
return "datetime";
|
|
591
|
+
} else if (pgType === "numeric") {
|
|
592
|
+
return "decimal";
|
|
593
|
+
} else if (pgType === "double precision") {
|
|
594
|
+
return "float";
|
|
595
|
+
} else if (pgType === "integer") {
|
|
596
|
+
return "integer";
|
|
597
|
+
} else if (pgType === "bigint") {
|
|
598
|
+
return "long";
|
|
599
|
+
} else if (pgType === "boolean") {
|
|
600
|
+
return "bool";
|
|
601
|
+
} else {
|
|
602
|
+
throw new LobbError({
|
|
603
|
+
code: "INTERNAL_SERVER_ERROR",
|
|
604
|
+
message:
|
|
605
|
+
`The (${pgType}) PG field type is not implemented in getLobbTypeFromPgType`,
|
|
606
|
+
});
|
|
607
|
+
}
|
|
608
|
+
}
|
|
609
|
+
|
|
610
|
+
private generateFieldSignature(
|
|
611
|
+
fieldName: string,
|
|
612
|
+
fieldConfig: CollectionField,
|
|
613
|
+
) {
|
|
614
|
+
if (fieldName === "id") {
|
|
615
|
+
return `SERIAL PRIMARY KEY`;
|
|
616
|
+
} else if (fieldConfig.type === "string") {
|
|
617
|
+
return `VARCHAR(${fieldConfig.length || 255})`;
|
|
618
|
+
} else if (fieldConfig.type === "text") {
|
|
619
|
+
return `TEXT`;
|
|
620
|
+
} else if (fieldConfig.type === "date") {
|
|
621
|
+
return `DATE`;
|
|
622
|
+
} else if (fieldConfig.type === "time") {
|
|
623
|
+
return `TIMETZ`;
|
|
624
|
+
} else if (fieldConfig.type === "datetime") {
|
|
625
|
+
return `TIMESTAMPTZ`;
|
|
626
|
+
} else if (fieldConfig.type === "decimal") {
|
|
627
|
+
return `NUMERIC`;
|
|
628
|
+
} else if (fieldConfig.type === "float") {
|
|
629
|
+
return `FLOAT`;
|
|
630
|
+
} else if (fieldConfig.type === "integer") {
|
|
631
|
+
return `INTEGER`;
|
|
632
|
+
} else if (fieldConfig.type === "long") {
|
|
633
|
+
return `BIGINT`;
|
|
634
|
+
} else if (fieldConfig.type === "bool") {
|
|
635
|
+
return `BOOLEAN`;
|
|
636
|
+
}
|
|
637
|
+
|
|
638
|
+
throw new LobbError({
|
|
639
|
+
code: "INTERNAL_SERVER_ERROR",
|
|
640
|
+
message:
|
|
641
|
+
`The (${fieldName}) field type is not implemented in generateFieldSignature`,
|
|
642
|
+
});
|
|
643
|
+
}
|
|
644
|
+
|
|
645
|
+
///////////////////////////////////////////////////////////
|
|
646
|
+
// parse some types of data to be compatible with the db //
|
|
647
|
+
///////////////////////////////////////////////////////////
|
|
648
|
+
public override encodeFieldValue(fieldType: string, value: any) {
|
|
649
|
+
return value;
|
|
650
|
+
}
|
|
651
|
+
|
|
652
|
+
public override decodeFieldValue(fieldType: string, value: any) {
|
|
653
|
+
return value;
|
|
654
|
+
}
|
|
655
|
+
}
|