@loomcore/api 0.1.43 → 0.1.47
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +201 -201
- package/README.md +49 -49
- package/dist/__tests__/postgres-test-migrations/100-create-test-entities-table.migration.js +22 -22
- package/dist/__tests__/postgres-test-migrations/101-create-categories-table.migration.js +12 -12
- package/dist/__tests__/postgres-test-migrations/102-create-products-table.migration.js +21 -21
- package/dist/__tests__/postgres-test-migrations/103-create-test-items-table.migration.js +20 -20
- package/dist/__tests__/postgres.test-database.js +8 -8
- package/dist/databases/db-type.type.d.ts +1 -0
- package/dist/databases/db-type.type.js +1 -0
- package/dist/databases/index.d.ts +4 -3
- package/dist/databases/index.js +4 -3
- package/dist/databases/migrations/migration-runner.d.ts +6 -8
- package/dist/databases/migrations/migration-runner.js +71 -18
- package/dist/databases/mongo-db/utils/build-mongo-url.util.d.ts +2 -0
- package/dist/databases/mongo-db/utils/build-mongo-url.util.js +13 -0
- package/dist/databases/mongo-db/utils/index.d.ts +1 -0
- package/dist/databases/mongo-db/utils/index.js +1 -0
- package/dist/databases/postgres/commands/postgres-batch-update.command.js +7 -7
- package/dist/databases/postgres/commands/postgres-create-many.command.js +10 -10
- package/dist/databases/postgres/commands/postgres-create.command.js +4 -4
- package/dist/databases/postgres/commands/postgres-full-update-by-id.command.js +13 -13
- package/dist/databases/postgres/commands/postgres-partial-update-by-id.command.js +7 -7
- package/dist/databases/postgres/commands/postgres-update.command.js +7 -7
- package/dist/databases/postgres/migrations/001-create-migrations-table.migration.js +12 -12
- package/dist/databases/postgres/migrations/002-create-organizations-table.migration.js +24 -24
- package/dist/databases/postgres/migrations/003-create-users-table.migration.js +26 -26
- package/dist/databases/postgres/migrations/004-create-refresh-tokens-table.migration.js +18 -18
- package/dist/databases/postgres/migrations/005-create-meta-org.migration.js +9 -9
- package/dist/databases/postgres/migrations/006-create-admin-user.migration.js +3 -3
- package/dist/databases/postgres/migrations/007-create-roles-table.migration.js +14 -14
- package/dist/databases/postgres/migrations/008-create-user-roles-table.migration.js +24 -24
- package/dist/databases/postgres/migrations/009-create-features-table.migration.js +14 -14
- package/dist/databases/postgres/migrations/010-create-authorizations-table.migration.js +26 -26
- package/dist/databases/postgres/migrations/011-create-admin-authorization.migration.js +44 -44
- package/dist/databases/postgres/migrations/database-builder.js +4 -4
- package/dist/databases/postgres/postgres.database.js +17 -17
- package/dist/databases/postgres/utils/build-postgres-url.util.d.ts +2 -0
- package/dist/databases/postgres/utils/build-postgres-url.util.js +13 -0
- package/dist/databases/postgres/utils/build-select-clause.js +6 -6
- package/dist/databases/postgres/utils/does-table-exist.util.js +4 -4
- package/dist/databases/postgres/utils/index.d.ts +1 -0
- package/dist/databases/postgres/utils/index.js +1 -0
- package/dist/models/base-api-config.interface.d.ts +6 -0
- package/package.json +88 -88
|
@@ -8,29 +8,29 @@ export class CreateProductsTableMigration {
|
|
|
8
8
|
async execute() {
|
|
9
9
|
const _id = randomUUID().toString();
|
|
10
10
|
try {
|
|
11
|
-
await this.client.query(`
|
|
12
|
-
CREATE TABLE "products" (
|
|
13
|
-
"_id" VARCHAR(255) PRIMARY KEY,
|
|
14
|
-
"_orgId" VARCHAR(255),
|
|
15
|
-
"name" VARCHAR(255) NOT NULL,
|
|
16
|
-
"description" TEXT,
|
|
17
|
-
"internalNumber" VARCHAR(255),
|
|
18
|
-
"categoryId" VARCHAR(255) NOT NULL REFERENCES "categories"("_id"),
|
|
19
|
-
"_created" TIMESTAMP NOT NULL,
|
|
20
|
-
"_createdBy" VARCHAR(255) NOT NULL,
|
|
21
|
-
"_updated" TIMESTAMP NOT NULL,
|
|
22
|
-
"_updatedBy" VARCHAR(255) NOT NULL,
|
|
23
|
-
"_deleted" TIMESTAMP,
|
|
24
|
-
"_deletedBy" VARCHAR(255)
|
|
25
|
-
)
|
|
11
|
+
await this.client.query(`
|
|
12
|
+
CREATE TABLE "products" (
|
|
13
|
+
"_id" VARCHAR(255) PRIMARY KEY,
|
|
14
|
+
"_orgId" VARCHAR(255),
|
|
15
|
+
"name" VARCHAR(255) NOT NULL,
|
|
16
|
+
"description" TEXT,
|
|
17
|
+
"internalNumber" VARCHAR(255),
|
|
18
|
+
"categoryId" VARCHAR(255) NOT NULL REFERENCES "categories"("_id"),
|
|
19
|
+
"_created" TIMESTAMP NOT NULL,
|
|
20
|
+
"_createdBy" VARCHAR(255) NOT NULL,
|
|
21
|
+
"_updated" TIMESTAMP NOT NULL,
|
|
22
|
+
"_updatedBy" VARCHAR(255) NOT NULL,
|
|
23
|
+
"_deleted" TIMESTAMP,
|
|
24
|
+
"_deletedBy" VARCHAR(255)
|
|
25
|
+
)
|
|
26
26
|
`);
|
|
27
27
|
}
|
|
28
28
|
catch (error) {
|
|
29
29
|
return { success: false, error: new Error(`Error creating products table: ${error.message}`) };
|
|
30
30
|
}
|
|
31
31
|
try {
|
|
32
|
-
await this.client.query(`
|
|
33
|
-
Insert into "migrations" ("_id", "index", "hasRun", "reverted") values ('${_id}', ${this.index}, TRUE, FALSE);
|
|
32
|
+
await this.client.query(`
|
|
33
|
+
Insert into "migrations" ("_id", "index", "hasRun", "reverted") values ('${_id}', ${this.index}, TRUE, FALSE);
|
|
34
34
|
`);
|
|
35
35
|
}
|
|
36
36
|
catch (error) {
|
|
@@ -40,16 +40,16 @@ export class CreateProductsTableMigration {
|
|
|
40
40
|
}
|
|
41
41
|
async revert() {
|
|
42
42
|
try {
|
|
43
|
-
await this.client.query(`
|
|
44
|
-
DROP TABLE "products";
|
|
43
|
+
await this.client.query(`
|
|
44
|
+
DROP TABLE "products";
|
|
45
45
|
`);
|
|
46
46
|
}
|
|
47
47
|
catch (error) {
|
|
48
48
|
return { success: false, error: new Error(`Error dropping products table: ${error.message}`) };
|
|
49
49
|
}
|
|
50
50
|
try {
|
|
51
|
-
await this.client.query(`
|
|
52
|
-
Update "migrations" SET "reverted" = TRUE WHERE "index" = '${this.index}';
|
|
51
|
+
await this.client.query(`
|
|
52
|
+
Update "migrations" SET "reverted" = TRUE WHERE "index" = '${this.index}';
|
|
53
53
|
`);
|
|
54
54
|
}
|
|
55
55
|
catch (error) {
|
|
@@ -8,28 +8,28 @@ export class CreateTestItemsTableMigration {
|
|
|
8
8
|
async execute() {
|
|
9
9
|
const _id = randomUUID().toString();
|
|
10
10
|
try {
|
|
11
|
-
await this.client.query(`
|
|
12
|
-
CREATE TABLE "testItems" (
|
|
13
|
-
"_id" VARCHAR(255) PRIMARY KEY,
|
|
14
|
-
"_orgId" VARCHAR(255),
|
|
15
|
-
"name" VARCHAR(255) NOT NULL,
|
|
16
|
-
"value" INTEGER,
|
|
17
|
-
"eventDate" TIMESTAMP,
|
|
18
|
-
"_created" TIMESTAMP NOT NULL,
|
|
19
|
-
"_createdBy" VARCHAR(255) NOT NULL,
|
|
20
|
-
"_updated" TIMESTAMP NOT NULL,
|
|
21
|
-
"_updatedBy" VARCHAR(255) NOT NULL,
|
|
22
|
-
"_deleted" TIMESTAMP,
|
|
23
|
-
"_deletedBy" VARCHAR(255)
|
|
24
|
-
)
|
|
11
|
+
await this.client.query(`
|
|
12
|
+
CREATE TABLE "testItems" (
|
|
13
|
+
"_id" VARCHAR(255) PRIMARY KEY,
|
|
14
|
+
"_orgId" VARCHAR(255),
|
|
15
|
+
"name" VARCHAR(255) NOT NULL,
|
|
16
|
+
"value" INTEGER,
|
|
17
|
+
"eventDate" TIMESTAMP,
|
|
18
|
+
"_created" TIMESTAMP NOT NULL,
|
|
19
|
+
"_createdBy" VARCHAR(255) NOT NULL,
|
|
20
|
+
"_updated" TIMESTAMP NOT NULL,
|
|
21
|
+
"_updatedBy" VARCHAR(255) NOT NULL,
|
|
22
|
+
"_deleted" TIMESTAMP,
|
|
23
|
+
"_deletedBy" VARCHAR(255)
|
|
24
|
+
)
|
|
25
25
|
`);
|
|
26
26
|
}
|
|
27
27
|
catch (error) {
|
|
28
28
|
return { success: false, error: new Error(`Error creating test items table: ${error.message}`) };
|
|
29
29
|
}
|
|
30
30
|
try {
|
|
31
|
-
await this.client.query(`
|
|
32
|
-
Insert into "migrations" ("_id", "index", "hasRun", "reverted") values ('${_id}', ${this.index}, TRUE, FALSE);
|
|
31
|
+
await this.client.query(`
|
|
32
|
+
Insert into "migrations" ("_id", "index", "hasRun", "reverted") values ('${_id}', ${this.index}, TRUE, FALSE);
|
|
33
33
|
`);
|
|
34
34
|
}
|
|
35
35
|
catch (error) {
|
|
@@ -39,16 +39,16 @@ export class CreateTestItemsTableMigration {
|
|
|
39
39
|
}
|
|
40
40
|
async revert() {
|
|
41
41
|
try {
|
|
42
|
-
await this.client.query(`
|
|
43
|
-
DROP TABLE "testItems";
|
|
42
|
+
await this.client.query(`
|
|
43
|
+
DROP TABLE "testItems";
|
|
44
44
|
`);
|
|
45
45
|
}
|
|
46
46
|
catch (error) {
|
|
47
47
|
return { success: false, error: new Error(`Error dropping test items table: ${error.message}`) };
|
|
48
48
|
}
|
|
49
49
|
try {
|
|
50
|
-
await this.client.query(`
|
|
51
|
-
Update "migrations" SET "reverted" = TRUE WHERE "index" = '${this.index}';
|
|
50
|
+
await this.client.query(`
|
|
51
|
+
Update "migrations" SET "reverted" = TRUE WHERE "index" = '${this.index}';
|
|
52
52
|
`);
|
|
53
53
|
}
|
|
54
54
|
catch (error) {
|
|
@@ -39,9 +39,9 @@ export class TestPostgresDatabase {
|
|
|
39
39
|
}
|
|
40
40
|
async createIndexes(client) {
|
|
41
41
|
try {
|
|
42
|
-
await client.query(`
|
|
43
|
-
CREATE INDEX IF NOT EXISTS email_index ON users (LOWER(email));
|
|
44
|
-
CREATE UNIQUE INDEX IF NOT EXISTS email_unique_index ON users (LOWER(email));
|
|
42
|
+
await client.query(`
|
|
43
|
+
CREATE INDEX IF NOT EXISTS email_index ON users (LOWER(email));
|
|
44
|
+
CREATE UNIQUE INDEX IF NOT EXISTS email_unique_index ON users (LOWER(email));
|
|
45
45
|
`);
|
|
46
46
|
}
|
|
47
47
|
catch (error) {
|
|
@@ -52,11 +52,11 @@ export class TestPostgresDatabase {
|
|
|
52
52
|
if (!this.postgresClient) {
|
|
53
53
|
throw new Error('Database not initialized');
|
|
54
54
|
}
|
|
55
|
-
const result = await this.postgresClient.query(`
|
|
56
|
-
SELECT "table_name"
|
|
57
|
-
FROM information_schema.tables
|
|
58
|
-
WHERE "table_schema" = 'public'
|
|
59
|
-
AND "table_type" = 'BASE TABLE'
|
|
55
|
+
const result = await this.postgresClient.query(`
|
|
56
|
+
SELECT "table_name"
|
|
57
|
+
FROM information_schema.tables
|
|
58
|
+
WHERE "table_schema" = 'public'
|
|
59
|
+
AND "table_type" = 'BASE TABLE'
|
|
60
60
|
`);
|
|
61
61
|
result.rows.forEach(async (row) => {
|
|
62
62
|
await this.postgresClient?.query(`TRUNCATE TABLE "${row.table_name}" RESTART IDENTITY CASCADE`);
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export type DbType = 'postgres' | 'mongodb';
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -1,5 +1,6 @@
|
|
|
1
|
-
export * from './
|
|
2
|
-
export * from './
|
|
1
|
+
export * from './db-type.type.js';
|
|
2
|
+
export * from './migrations/migration-runner.js';
|
|
3
3
|
export * from './models/index.js';
|
|
4
|
+
export * from './mongo-db/index.js';
|
|
4
5
|
export * from './operations/index.js';
|
|
5
|
-
export * from './
|
|
6
|
+
export * from './postgres/index.js';
|
package/dist/databases/index.js
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
|
-
export * from './
|
|
2
|
-
export * from './
|
|
1
|
+
export * from './db-type.type.js';
|
|
2
|
+
export * from './migrations/migration-runner.js';
|
|
3
3
|
export * from './models/index.js';
|
|
4
|
+
export * from './mongo-db/index.js';
|
|
4
5
|
export * from './operations/index.js';
|
|
5
|
-
export * from './
|
|
6
|
+
export * from './postgres/index.js';
|
|
@@ -1,16 +1,14 @@
|
|
|
1
|
-
|
|
2
|
-
export interface MigrationConfig {
|
|
3
|
-
dbType: DbType;
|
|
4
|
-
dbUrl: string;
|
|
5
|
-
migrationsDir: string;
|
|
6
|
-
}
|
|
1
|
+
import { IBaseApiConfig } from '../../models/base-api-config.interface.js';
|
|
7
2
|
export declare class MigrationRunner {
|
|
8
|
-
private
|
|
3
|
+
private dbType;
|
|
4
|
+
private dbUrl;
|
|
5
|
+
private migrationsDir;
|
|
9
6
|
private mongoClient;
|
|
10
|
-
constructor(config:
|
|
7
|
+
constructor(config: IBaseApiConfig);
|
|
11
8
|
private parseSql;
|
|
12
9
|
private getMigrator;
|
|
13
10
|
private wipeDatabase;
|
|
14
11
|
run(command: 'up' | 'down' | 'reset'): Promise<void>;
|
|
15
12
|
private closeConnection;
|
|
13
|
+
create(name: string): Promise<void>;
|
|
16
14
|
}
|
|
@@ -3,11 +3,22 @@ import { Pool } from 'pg';
|
|
|
3
3
|
import { MongoClient } from 'mongodb';
|
|
4
4
|
import fs from 'fs';
|
|
5
5
|
import path from 'path';
|
|
6
|
+
import { buildMongoUrl } from '../mongo-db/utils/build-mongo-url.util.js';
|
|
7
|
+
import { buildPostgresUrl } from '../postgres/utils/build-postgres-url.util.js';
|
|
8
|
+
const getTimestamp = () => {
|
|
9
|
+
const now = new Date();
|
|
10
|
+
return now.toISOString().replace(/[-T:.Z]/g, '').slice(0, 14);
|
|
11
|
+
};
|
|
6
12
|
export class MigrationRunner {
|
|
7
|
-
|
|
13
|
+
dbType;
|
|
14
|
+
dbUrl;
|
|
15
|
+
migrationsDir;
|
|
8
16
|
mongoClient = null;
|
|
9
17
|
constructor(config) {
|
|
10
|
-
this.
|
|
18
|
+
this.dbType = config.app.dbType || 'mongodb';
|
|
19
|
+
console.log('config', config);
|
|
20
|
+
this.dbUrl = this.dbType === 'postgres' ? buildPostgresUrl(config) : buildMongoUrl(config);
|
|
21
|
+
this.migrationsDir = path.join(process.cwd(), 'database', 'migrations');
|
|
11
22
|
}
|
|
12
23
|
parseSql(content) {
|
|
13
24
|
const upMatch = content.match(/--\s*up\s([\s\S]+?)(?=--\s*down|$)/i);
|
|
@@ -18,12 +29,11 @@ export class MigrationRunner {
|
|
|
18
29
|
};
|
|
19
30
|
}
|
|
20
31
|
async getMigrator() {
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
const pool = new Pool({ connectionString: dbUrl });
|
|
32
|
+
if (this.dbType === 'postgres') {
|
|
33
|
+
const pool = new Pool({ connectionString: this.dbUrl });
|
|
24
34
|
return new Umzug({
|
|
25
35
|
migrations: {
|
|
26
|
-
glob: path.join(migrationsDir, '*.sql'),
|
|
36
|
+
glob: path.join(this.migrationsDir, '*.sql'),
|
|
27
37
|
resolve: ({ name, path: filePath, context }) => {
|
|
28
38
|
const content = fs.readFileSync(filePath, 'utf8');
|
|
29
39
|
const { up, down } = this.parseSql(content);
|
|
@@ -51,24 +61,23 @@ export class MigrationRunner {
|
|
|
51
61
|
logger: console,
|
|
52
62
|
});
|
|
53
63
|
}
|
|
54
|
-
else if (dbType === '
|
|
55
|
-
const client = await MongoClient.connect(dbUrl);
|
|
64
|
+
else if (this.dbType === 'mongodb') {
|
|
65
|
+
const client = await MongoClient.connect(this.dbUrl);
|
|
56
66
|
this.mongoClient = client;
|
|
57
67
|
const db = client.db();
|
|
58
68
|
return new Umzug({
|
|
59
|
-
migrations: { glob: path.join(migrationsDir, '*.ts') },
|
|
69
|
+
migrations: { glob: path.join(this.migrationsDir, '*.ts') },
|
|
60
70
|
context: db,
|
|
61
71
|
storage: new MongoDBStorage({ collection: db.collection('migrations') }),
|
|
62
72
|
logger: console,
|
|
63
73
|
});
|
|
64
74
|
}
|
|
65
|
-
throw new Error(`Unsupported DB_TYPE: ${dbType}`);
|
|
75
|
+
throw new Error(`Unsupported DB_TYPE: ${this.dbType}`);
|
|
66
76
|
}
|
|
67
77
|
async wipeDatabase() {
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
const pool = new Pool({ connectionString: dbUrl });
|
|
78
|
+
console.log(`⚠️ Wiping ${this.dbType} database...`);
|
|
79
|
+
if (this.dbType === 'postgres') {
|
|
80
|
+
const pool = new Pool({ connectionString: this.dbUrl });
|
|
72
81
|
try {
|
|
73
82
|
await pool.query('DROP SCHEMA public CASCADE; CREATE SCHEMA public;');
|
|
74
83
|
}
|
|
@@ -76,8 +85,8 @@ export class MigrationRunner {
|
|
|
76
85
|
await pool.end();
|
|
77
86
|
}
|
|
78
87
|
}
|
|
79
|
-
else if (dbType === '
|
|
80
|
-
const client = await MongoClient.connect(dbUrl);
|
|
88
|
+
else if (this.dbType === 'mongodb') {
|
|
89
|
+
const client = await MongoClient.connect(this.dbUrl);
|
|
81
90
|
await client.db().dropDatabase();
|
|
82
91
|
await client.close();
|
|
83
92
|
}
|
|
@@ -113,12 +122,56 @@ export class MigrationRunner {
|
|
|
113
122
|
}
|
|
114
123
|
}
|
|
115
124
|
async closeConnection(migrator) {
|
|
116
|
-
if (this.
|
|
125
|
+
if (this.dbType === 'postgres') {
|
|
117
126
|
await migrator.context.end();
|
|
118
127
|
}
|
|
119
|
-
else if (this.
|
|
128
|
+
else if (this.dbType === 'mongodb' && this.mongoClient) {
|
|
120
129
|
await this.mongoClient.close();
|
|
121
130
|
this.mongoClient = null;
|
|
122
131
|
}
|
|
123
132
|
}
|
|
133
|
+
async create(name) {
|
|
134
|
+
if (!name) {
|
|
135
|
+
throw new Error('Migration name is required');
|
|
136
|
+
}
|
|
137
|
+
const safeName = name.toLowerCase().replace(/[^a-z0-9]+/g, '_');
|
|
138
|
+
const filename = `${getTimestamp()}_${safeName}`;
|
|
139
|
+
let extension = '';
|
|
140
|
+
let content = '';
|
|
141
|
+
if (this.dbType === 'postgres') {
|
|
142
|
+
extension = 'sql';
|
|
143
|
+
content = `-- Migration: ${safeName}
|
|
144
|
+
-- Created: ${new Date().toISOString()}
|
|
145
|
+
|
|
146
|
+
-- up
|
|
147
|
+
-- Write your CREATE/ALTER statements here...
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
-- down
|
|
151
|
+
-- Write your DROP/UNDO statements here...
|
|
152
|
+
`;
|
|
153
|
+
}
|
|
154
|
+
else {
|
|
155
|
+
extension = 'ts';
|
|
156
|
+
content = `import { Db } from 'mongodb';
|
|
157
|
+
|
|
158
|
+
// Migration: ${safeName}
|
|
159
|
+
// Created: ${new Date().toISOString()}
|
|
160
|
+
|
|
161
|
+
export const up = async ({ context: db }: { context: Db }) => {
|
|
162
|
+
// await db.collection('...')....
|
|
163
|
+
};
|
|
164
|
+
|
|
165
|
+
export const down = async ({ context: db }: { context: Db }) => {
|
|
166
|
+
// await db.collection('...')....
|
|
167
|
+
};
|
|
168
|
+
`;
|
|
169
|
+
}
|
|
170
|
+
const fullPath = path.join(this.migrationsDir, `${filename}.${extension}`);
|
|
171
|
+
if (!fs.existsSync(this.migrationsDir)) {
|
|
172
|
+
fs.mkdirSync(this.migrationsDir, { recursive: true });
|
|
173
|
+
}
|
|
174
|
+
fs.writeFileSync(fullPath, content);
|
|
175
|
+
console.log(`✅ Created migration:\n ${fullPath}`);
|
|
176
|
+
}
|
|
124
177
|
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
export function buildMongoUrl(config) {
|
|
2
|
+
const { database } = config;
|
|
3
|
+
if (!database) {
|
|
4
|
+
throw new Error("Database configuration is required to build the MongoDB URL.");
|
|
5
|
+
}
|
|
6
|
+
const { user, password, host, port, name } = database;
|
|
7
|
+
if (!user || !password || !host || !port || !name) {
|
|
8
|
+
throw new Error("Database configuration must include user, password, host, port, and name to build the MongoDB URL.");
|
|
9
|
+
}
|
|
10
|
+
const encodedUser = encodeURIComponent(user);
|
|
11
|
+
const encodedPassword = encodeURIComponent(password);
|
|
12
|
+
return `mongodb://${encodedUser}:${encodedPassword}@${host}:${port}/${name}`;
|
|
13
|
+
}
|
|
@@ -26,10 +26,10 @@ export async function batchUpdate(client, entities, operations, queryObject, plu
|
|
|
26
26
|
const setClause = columns.map((col, index) => `${col} = $${index + 1}`).join(', ');
|
|
27
27
|
queryObject.filters._id = { eq: _id };
|
|
28
28
|
const { whereClause } = buildWhereClause(queryObject, values);
|
|
29
|
-
const query = `
|
|
30
|
-
UPDATE "${pluralResourceName}"
|
|
31
|
-
SET ${setClause}
|
|
32
|
-
${whereClause}
|
|
29
|
+
const query = `
|
|
30
|
+
UPDATE "${pluralResourceName}"
|
|
31
|
+
SET ${setClause}
|
|
32
|
+
${whereClause}
|
|
33
33
|
`;
|
|
34
34
|
await client.query(query, values);
|
|
35
35
|
}
|
|
@@ -39,9 +39,9 @@ export async function batchUpdate(client, entities, operations, queryObject, plu
|
|
|
39
39
|
const tablePrefix = hasJoins ? pluralResourceName : undefined;
|
|
40
40
|
queryObject.filters._id = { in: entityIds };
|
|
41
41
|
const { whereClause, values } = buildWhereClause(queryObject, [], tablePrefix);
|
|
42
|
-
const selectQuery = `
|
|
43
|
-
SELECT * FROM "${pluralResourceName}" ${joinClauses}
|
|
44
|
-
${whereClause}
|
|
42
|
+
const selectQuery = `
|
|
43
|
+
SELECT * FROM "${pluralResourceName}" ${joinClauses}
|
|
44
|
+
${whereClause}
|
|
45
45
|
`;
|
|
46
46
|
const result = await client.query(selectQuery, values);
|
|
47
47
|
return result.rows;
|
|
@@ -12,12 +12,12 @@ export async function createMany(client, pluralResourceName, entities) {
|
|
|
12
12
|
entity._id = entity._id ?? randomUUID().toString();
|
|
13
13
|
return entity;
|
|
14
14
|
});
|
|
15
|
-
const tableColumns = await client.query(`
|
|
16
|
-
SELECT column_name, column_default
|
|
17
|
-
FROM information_schema.columns
|
|
18
|
-
WHERE table_schema = current_schema()
|
|
19
|
-
AND table_name = $1
|
|
20
|
-
ORDER BY ordinal_position
|
|
15
|
+
const tableColumns = await client.query(`
|
|
16
|
+
SELECT column_name, column_default
|
|
17
|
+
FROM information_schema.columns
|
|
18
|
+
WHERE table_schema = current_schema()
|
|
19
|
+
AND table_name = $1
|
|
20
|
+
ORDER BY ordinal_position
|
|
21
21
|
`, [pluralResourceName]);
|
|
22
22
|
if (tableColumns.rows.length === 0) {
|
|
23
23
|
throw new BadRequestError(`Unable to resolve columns for ${pluralResourceName}`);
|
|
@@ -39,10 +39,10 @@ export async function createMany(client, pluralResourceName, entities) {
|
|
|
39
39
|
valueClauses.push(`(${placeholders})`);
|
|
40
40
|
allValues.push(...values);
|
|
41
41
|
});
|
|
42
|
-
const query = `
|
|
43
|
-
INSERT INTO "${pluralResourceName}" (${tableColumns.rows.map(column => `"${column.column_name}"`).join(', ')})
|
|
44
|
-
VALUES ${valueClauses.join(', ')}
|
|
45
|
-
RETURNING _id
|
|
42
|
+
const query = `
|
|
43
|
+
INSERT INTO "${pluralResourceName}" (${tableColumns.rows.map(column => `"${column.column_name}"`).join(', ')})
|
|
44
|
+
VALUES ${valueClauses.join(', ')}
|
|
45
|
+
RETURNING _id
|
|
46
46
|
`;
|
|
47
47
|
const result = await client.query(query, allValues);
|
|
48
48
|
if (result.rows.length !== entitiesWithIds.length) {
|
|
@@ -6,10 +6,10 @@ export async function create(client, pluralResourceName, entity) {
|
|
|
6
6
|
entity._id = entity._id ?? randomUUID().toString();
|
|
7
7
|
const { columns, values } = columnsAndValuesFromEntity(entity);
|
|
8
8
|
const placeholders = columns.map((_, index) => `$${index + 1}`).join(', ');
|
|
9
|
-
const query = `
|
|
10
|
-
INSERT INTO "${pluralResourceName}" (${columns.join(', ')})
|
|
11
|
-
VALUES (${placeholders})
|
|
12
|
-
RETURNING _id
|
|
9
|
+
const query = `
|
|
10
|
+
INSERT INTO "${pluralResourceName}" (${columns.join(', ')})
|
|
11
|
+
VALUES (${placeholders})
|
|
12
|
+
RETURNING _id
|
|
13
13
|
`;
|
|
14
14
|
const result = await client.query(query, values);
|
|
15
15
|
if (result.rows.length === 0) {
|
|
@@ -2,12 +2,12 @@ import { BadRequestError, IdNotFoundError } from "../../../errors/index.js";
|
|
|
2
2
|
import { buildJoinClauses } from '../utils/build-join-clauses.js';
|
|
3
3
|
export async function fullUpdateById(client, operations, id, entity, pluralResourceName) {
|
|
4
4
|
try {
|
|
5
|
-
const tableColumns = await client.query(`
|
|
6
|
-
SELECT column_name, column_default
|
|
7
|
-
FROM information_schema.columns
|
|
8
|
-
WHERE table_schema = current_schema()
|
|
9
|
-
AND table_name = $1
|
|
10
|
-
ORDER BY ordinal_position
|
|
5
|
+
const tableColumns = await client.query(`
|
|
6
|
+
SELECT column_name, column_default
|
|
7
|
+
FROM information_schema.columns
|
|
8
|
+
WHERE table_schema = current_schema()
|
|
9
|
+
AND table_name = $1
|
|
10
|
+
ORDER BY ordinal_position
|
|
11
11
|
`, [pluralResourceName]);
|
|
12
12
|
if (tableColumns.rows.length === 0) {
|
|
13
13
|
throw new BadRequestError(`Unable to resolve columns for ${pluralResourceName}`);
|
|
@@ -40,19 +40,19 @@ export async function fullUpdateById(client, operations, id, entity, pluralResou
|
|
|
40
40
|
throw new BadRequestError('Cannot perform full update with no fields to update');
|
|
41
41
|
}
|
|
42
42
|
const setClause = updateColumns.join(', ');
|
|
43
|
-
const query = `
|
|
44
|
-
UPDATE "${pluralResourceName}"
|
|
45
|
-
SET ${setClause}
|
|
46
|
-
WHERE "_id" = $${paramIndex}
|
|
43
|
+
const query = `
|
|
44
|
+
UPDATE "${pluralResourceName}"
|
|
45
|
+
SET ${setClause}
|
|
46
|
+
WHERE "_id" = $${paramIndex}
|
|
47
47
|
`;
|
|
48
48
|
const result = await client.query(query, [...updateValues, id]);
|
|
49
49
|
if (result.rowCount === 0) {
|
|
50
50
|
throw new IdNotFoundError();
|
|
51
51
|
}
|
|
52
52
|
const joinClauses = buildJoinClauses(operations);
|
|
53
|
-
const selectQuery = `
|
|
54
|
-
SELECT * FROM "${pluralResourceName}" ${joinClauses}
|
|
55
|
-
WHERE "_id" = $1 LIMIT 1
|
|
53
|
+
const selectQuery = `
|
|
54
|
+
SELECT * FROM "${pluralResourceName}" ${joinClauses}
|
|
55
|
+
WHERE "_id" = $1 LIMIT 1
|
|
56
56
|
`;
|
|
57
57
|
const selectResult = await client.query(selectQuery, [id]);
|
|
58
58
|
if (selectResult.rows.length === 0) {
|
|
@@ -10,19 +10,19 @@ export async function partialUpdateById(client, operations, id, entity, pluralRe
|
|
|
10
10
|
throw new BadRequestError('Cannot perform partial update with no fields to update');
|
|
11
11
|
}
|
|
12
12
|
const setClause = updateColumns.map((col, index) => `${col} = $${index + 1}`).join(', ');
|
|
13
|
-
const query = `
|
|
14
|
-
UPDATE "${pluralResourceName}"
|
|
15
|
-
SET ${setClause}
|
|
16
|
-
WHERE "_id" = $${updateValues.length + 1}
|
|
13
|
+
const query = `
|
|
14
|
+
UPDATE "${pluralResourceName}"
|
|
15
|
+
SET ${setClause}
|
|
16
|
+
WHERE "_id" = $${updateValues.length + 1}
|
|
17
17
|
`;
|
|
18
18
|
const result = await client.query(query, [...updateValues, id]);
|
|
19
19
|
if (result.rowCount === 0) {
|
|
20
20
|
throw new IdNotFoundError();
|
|
21
21
|
}
|
|
22
22
|
const joinClauses = buildJoinClauses(operations);
|
|
23
|
-
const selectQuery = `
|
|
24
|
-
SELECT * FROM "${pluralResourceName}" ${joinClauses}
|
|
25
|
-
WHERE "_id" = $1 LIMIT 1
|
|
23
|
+
const selectQuery = `
|
|
24
|
+
SELECT * FROM "${pluralResourceName}" ${joinClauses}
|
|
25
|
+
WHERE "_id" = $1 LIMIT 1
|
|
26
26
|
`;
|
|
27
27
|
const selectResult = await client.query(selectQuery, [id]);
|
|
28
28
|
if (selectResult.rows.length === 0) {
|
|
@@ -17,10 +17,10 @@ export async function update(client, queryObject, entity, operations, pluralReso
|
|
|
17
17
|
const originalIndex = parseInt(num, 10);
|
|
18
18
|
return `$${originalIndex + updateValues.length}`;
|
|
19
19
|
});
|
|
20
|
-
const updateQuery = `
|
|
21
|
-
UPDATE "${pluralResourceName}"
|
|
22
|
-
SET ${setClause}
|
|
23
|
-
${whereClauseWithAdjustedParams}
|
|
20
|
+
const updateQuery = `
|
|
21
|
+
UPDATE "${pluralResourceName}"
|
|
22
|
+
SET ${setClause}
|
|
23
|
+
${whereClauseWithAdjustedParams}
|
|
24
24
|
`;
|
|
25
25
|
const allUpdateValues = [...updateValues, ...whereValues];
|
|
26
26
|
const result = await client.query(updateQuery, allUpdateValues);
|
|
@@ -29,9 +29,9 @@ export async function update(client, queryObject, entity, operations, pluralReso
|
|
|
29
29
|
}
|
|
30
30
|
const joinClauses = buildJoinClauses(operations);
|
|
31
31
|
const orderByClause = buildOrderByClause(queryObject);
|
|
32
|
-
const selectQuery = `
|
|
33
|
-
SELECT * FROM "${pluralResourceName}" ${joinClauses}
|
|
34
|
-
${whereClause} ${orderByClause}
|
|
32
|
+
const selectQuery = `
|
|
33
|
+
SELECT * FROM "${pluralResourceName}" ${joinClauses}
|
|
34
|
+
${whereClause} ${orderByClause}
|
|
35
35
|
`.trim();
|
|
36
36
|
const selectResult = await client.query(selectQuery, whereValues);
|
|
37
37
|
return selectResult.rows;
|
|
@@ -12,18 +12,18 @@ export class CreateMigrationTableMigration {
|
|
|
12
12
|
await this.client.query('BEGIN');
|
|
13
13
|
const tableExists = await doesTableExist(this.client, 'migrations');
|
|
14
14
|
if (!tableExists) {
|
|
15
|
-
await this.client.query(`
|
|
16
|
-
CREATE TABLE "migrations" (
|
|
17
|
-
"_id" VARCHAR(255) PRIMARY KEY,
|
|
18
|
-
"index" INTEGER NOT NULL UNIQUE,
|
|
19
|
-
"hasRun" BOOLEAN NOT NULL,
|
|
20
|
-
"reverted" BOOLEAN NOT NULL
|
|
21
|
-
)
|
|
15
|
+
await this.client.query(`
|
|
16
|
+
CREATE TABLE "migrations" (
|
|
17
|
+
"_id" VARCHAR(255) PRIMARY KEY,
|
|
18
|
+
"index" INTEGER NOT NULL UNIQUE,
|
|
19
|
+
"hasRun" BOOLEAN NOT NULL,
|
|
20
|
+
"reverted" BOOLEAN NOT NULL
|
|
21
|
+
)
|
|
22
22
|
`);
|
|
23
23
|
}
|
|
24
|
-
const result = await this.client.query(`
|
|
25
|
-
INSERT INTO "migrations" ("_id", "index", "hasRun", "reverted")
|
|
26
|
-
VALUES ('${_id}', ${this.index}, TRUE, FALSE);
|
|
24
|
+
const result = await this.client.query(`
|
|
25
|
+
INSERT INTO "migrations" ("_id", "index", "hasRun", "reverted")
|
|
26
|
+
VALUES ('${_id}', ${this.index}, TRUE, FALSE);
|
|
27
27
|
`);
|
|
28
28
|
if (result.rowCount === 0) {
|
|
29
29
|
await this.client.query('ROLLBACK');
|
|
@@ -39,8 +39,8 @@ export class CreateMigrationTableMigration {
|
|
|
39
39
|
}
|
|
40
40
|
async revert() {
|
|
41
41
|
try {
|
|
42
|
-
await this.client.query(`
|
|
43
|
-
DROP TABLE "migrations";
|
|
42
|
+
await this.client.query(`
|
|
43
|
+
DROP TABLE "migrations";
|
|
44
44
|
`);
|
|
45
45
|
}
|
|
46
46
|
catch (error) {
|