pgsql-test 2.1.9 → 2.1.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/admin.js +37 -24
- package/esm/admin.js +37 -24
- package/esm/manager.js +17 -23
- package/esm/seed/csv.js +9 -8
- package/manager.d.ts +0 -1
- package/manager.js +17 -26
- package/package.json +4 -4
- package/seed/csv.js +9 -8
package/admin.js
CHANGED
|
@@ -2,9 +2,11 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.DbAdmin = void 0;
|
|
4
4
|
const child_process_1 = require("child_process");
|
|
5
|
-
const types_1 = require("@launchql/types");
|
|
6
5
|
const fs_1 = require("fs");
|
|
6
|
+
const types_1 = require("@launchql/types");
|
|
7
7
|
const stream_1 = require("./stream");
|
|
8
|
+
const server_utils_1 = require("@launchql/server-utils");
|
|
9
|
+
const log = new server_utils_1.Logger('db-admin');
|
|
8
10
|
class DbAdmin {
|
|
9
11
|
config;
|
|
10
12
|
verbose;
|
|
@@ -18,26 +20,35 @@ class DbAdmin {
|
|
|
18
20
|
PGHOST: this.config.host,
|
|
19
21
|
PGPORT: String(this.config.port),
|
|
20
22
|
PGUSER: this.config.user,
|
|
21
|
-
PGPASSWORD: this.config.password
|
|
23
|
+
PGPASSWORD: this.config.password
|
|
22
24
|
};
|
|
23
25
|
}
|
|
24
26
|
run(command) {
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
27
|
+
try {
|
|
28
|
+
(0, child_process_1.execSync)(command, {
|
|
29
|
+
stdio: this.verbose ? 'inherit' : 'pipe',
|
|
30
|
+
env: {
|
|
31
|
+
...process.env,
|
|
32
|
+
...this.getEnv()
|
|
33
|
+
}
|
|
34
|
+
});
|
|
35
|
+
if (this.verbose)
|
|
36
|
+
log.success(`Executed: ${command}`);
|
|
37
|
+
}
|
|
38
|
+
catch (err) {
|
|
39
|
+
log.error(`Command failed: ${command}`);
|
|
40
|
+
if (this.verbose)
|
|
41
|
+
log.error(err.message);
|
|
42
|
+
throw err;
|
|
43
|
+
}
|
|
32
44
|
}
|
|
33
45
|
safeDropDb(name) {
|
|
34
46
|
try {
|
|
35
47
|
this.run(`dropdb "${name}"`);
|
|
36
48
|
}
|
|
37
49
|
catch (err) {
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
console.warn(`⚠️ Could not drop database ${name}: ${message}`);
|
|
50
|
+
if (!err.message.includes('does not exist')) {
|
|
51
|
+
log.warn(`Could not drop database ${name}: ${err.message}`);
|
|
41
52
|
}
|
|
42
53
|
}
|
|
43
54
|
}
|
|
@@ -76,7 +87,9 @@ class DbAdmin {
|
|
|
76
87
|
try {
|
|
77
88
|
this.run(`psql -c "UPDATE pg_database SET datistemplate = false WHERE datname = '${template}'"`);
|
|
78
89
|
}
|
|
79
|
-
catch {
|
|
90
|
+
catch {
|
|
91
|
+
log.warn(`Skipping failed UPDATE of datistemplate for ${template}`);
|
|
92
|
+
}
|
|
80
93
|
this.safeDropDb(template);
|
|
81
94
|
}
|
|
82
95
|
async grantRole(role, user, dbName) {
|
|
@@ -91,16 +104,16 @@ class DbAdmin {
|
|
|
91
104
|
}
|
|
92
105
|
async createUserRole(user, password, dbName) {
|
|
93
106
|
const sql = `
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
107
|
+
DO $$
|
|
108
|
+
BEGIN
|
|
109
|
+
IF NOT EXISTS (SELECT 1 FROM pg_roles WHERE rolname = '${user}') THEN
|
|
110
|
+
CREATE ROLE ${user} LOGIN PASSWORD '${password}';
|
|
111
|
+
GRANT anonymous TO ${user};
|
|
112
|
+
GRANT authenticated TO ${user};
|
|
113
|
+
END IF;
|
|
114
|
+
END $$;
|
|
102
115
|
`.trim();
|
|
103
|
-
this.streamSql(sql, dbName);
|
|
116
|
+
await this.streamSql(sql, dbName);
|
|
104
117
|
}
|
|
105
118
|
loadSql(file, dbName) {
|
|
106
119
|
if (!(0, fs_1.existsSync)(file)) {
|
|
@@ -120,8 +133,8 @@ class DbAdmin {
|
|
|
120
133
|
await adapter.seed({
|
|
121
134
|
admin: this,
|
|
122
135
|
config: this.config,
|
|
123
|
-
pg: null, //
|
|
124
|
-
connect: null
|
|
136
|
+
pg: null, // placeholder for PgTestClient
|
|
137
|
+
connect: null // placeholder for connection factory
|
|
125
138
|
});
|
|
126
139
|
this.cleanupTemplate(templateName);
|
|
127
140
|
this.createTemplateFromBase(seedDb, templateName);
|
package/esm/admin.js
CHANGED
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
import { execSync } from 'child_process';
|
|
2
|
-
import { getPgEnvOptions } from '@launchql/types';
|
|
3
2
|
import { existsSync } from 'fs';
|
|
3
|
+
import { getPgEnvOptions } from '@launchql/types';
|
|
4
4
|
import { streamSql as stream } from './stream';
|
|
5
|
+
import { Logger } from '@launchql/server-utils';
|
|
6
|
+
const log = new Logger('db-admin');
|
|
5
7
|
export class DbAdmin {
|
|
6
8
|
config;
|
|
7
9
|
verbose;
|
|
@@ -15,26 +17,35 @@ export class DbAdmin {
|
|
|
15
17
|
PGHOST: this.config.host,
|
|
16
18
|
PGPORT: String(this.config.port),
|
|
17
19
|
PGUSER: this.config.user,
|
|
18
|
-
PGPASSWORD: this.config.password
|
|
20
|
+
PGPASSWORD: this.config.password
|
|
19
21
|
};
|
|
20
22
|
}
|
|
21
23
|
run(command) {
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
24
|
+
try {
|
|
25
|
+
execSync(command, {
|
|
26
|
+
stdio: this.verbose ? 'inherit' : 'pipe',
|
|
27
|
+
env: {
|
|
28
|
+
...process.env,
|
|
29
|
+
...this.getEnv()
|
|
30
|
+
}
|
|
31
|
+
});
|
|
32
|
+
if (this.verbose)
|
|
33
|
+
log.success(`Executed: ${command}`);
|
|
34
|
+
}
|
|
35
|
+
catch (err) {
|
|
36
|
+
log.error(`Command failed: ${command}`);
|
|
37
|
+
if (this.verbose)
|
|
38
|
+
log.error(err.message);
|
|
39
|
+
throw err;
|
|
40
|
+
}
|
|
29
41
|
}
|
|
30
42
|
safeDropDb(name) {
|
|
31
43
|
try {
|
|
32
44
|
this.run(`dropdb "${name}"`);
|
|
33
45
|
}
|
|
34
46
|
catch (err) {
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
console.warn(`⚠️ Could not drop database ${name}: ${message}`);
|
|
47
|
+
if (!err.message.includes('does not exist')) {
|
|
48
|
+
log.warn(`Could not drop database ${name}: ${err.message}`);
|
|
38
49
|
}
|
|
39
50
|
}
|
|
40
51
|
}
|
|
@@ -73,7 +84,9 @@ export class DbAdmin {
|
|
|
73
84
|
try {
|
|
74
85
|
this.run(`psql -c "UPDATE pg_database SET datistemplate = false WHERE datname = '${template}'"`);
|
|
75
86
|
}
|
|
76
|
-
catch {
|
|
87
|
+
catch {
|
|
88
|
+
log.warn(`Skipping failed UPDATE of datistemplate for ${template}`);
|
|
89
|
+
}
|
|
77
90
|
this.safeDropDb(template);
|
|
78
91
|
}
|
|
79
92
|
async grantRole(role, user, dbName) {
|
|
@@ -88,16 +101,16 @@ export class DbAdmin {
|
|
|
88
101
|
}
|
|
89
102
|
async createUserRole(user, password, dbName) {
|
|
90
103
|
const sql = `
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
104
|
+
DO $$
|
|
105
|
+
BEGIN
|
|
106
|
+
IF NOT EXISTS (SELECT 1 FROM pg_roles WHERE rolname = '${user}') THEN
|
|
107
|
+
CREATE ROLE ${user} LOGIN PASSWORD '${password}';
|
|
108
|
+
GRANT anonymous TO ${user};
|
|
109
|
+
GRANT authenticated TO ${user};
|
|
110
|
+
END IF;
|
|
111
|
+
END $$;
|
|
99
112
|
`.trim();
|
|
100
|
-
this.streamSql(sql, dbName);
|
|
113
|
+
await this.streamSql(sql, dbName);
|
|
101
114
|
}
|
|
102
115
|
loadSql(file, dbName) {
|
|
103
116
|
if (!existsSync(file)) {
|
|
@@ -117,8 +130,8 @@ export class DbAdmin {
|
|
|
117
130
|
await adapter.seed({
|
|
118
131
|
admin: this,
|
|
119
132
|
config: this.config,
|
|
120
|
-
pg: null, //
|
|
121
|
-
connect: null
|
|
133
|
+
pg: null, // placeholder for PgTestClient
|
|
134
|
+
connect: null // placeholder for connection factory
|
|
122
135
|
});
|
|
123
136
|
this.cleanupTemplate(templateName);
|
|
124
137
|
this.createTemplateFromBase(seedDb, templateName);
|
package/esm/manager.js
CHANGED
|
@@ -1,19 +1,20 @@
|
|
|
1
1
|
import { Pool } from 'pg';
|
|
2
|
-
import chalk from 'chalk';
|
|
3
2
|
import { DbAdmin } from './admin';
|
|
4
3
|
import { getPgEnvOptions } from '@launchql/types';
|
|
5
4
|
import { PgTestClient } from './test-client';
|
|
5
|
+
import { Logger } from '@launchql/server-utils';
|
|
6
|
+
const log = new Logger('test-connector');
|
|
6
7
|
const SYS_EVENTS = ['SIGTERM'];
|
|
7
8
|
const end = (pool) => {
|
|
8
9
|
try {
|
|
9
10
|
if (pool.ended || pool.ending) {
|
|
10
|
-
|
|
11
|
+
log.warn('⚠️ pg pool already ended or ending');
|
|
11
12
|
return;
|
|
12
13
|
}
|
|
13
14
|
pool.end();
|
|
14
15
|
}
|
|
15
16
|
catch (err) {
|
|
16
|
-
|
|
17
|
+
log.error('❌ pg pool termination error:', err);
|
|
17
18
|
}
|
|
18
19
|
};
|
|
19
20
|
export class PgTestConnector {
|
|
@@ -26,7 +27,7 @@ export class PgTestConnector {
|
|
|
26
27
|
this.verbose = verbose;
|
|
27
28
|
SYS_EVENTS.forEach((event) => {
|
|
28
29
|
process.on(event, () => {
|
|
29
|
-
|
|
30
|
+
log.info(`⏹ Received ${event}, closing all connections...`);
|
|
30
31
|
this.closeAll();
|
|
31
32
|
});
|
|
32
33
|
});
|
|
@@ -37,10 +38,6 @@ export class PgTestConnector {
|
|
|
37
38
|
}
|
|
38
39
|
return PgTestConnector.instance;
|
|
39
40
|
}
|
|
40
|
-
log(...args) {
|
|
41
|
-
if (this.verbose)
|
|
42
|
-
console.log(...args);
|
|
43
|
-
}
|
|
44
41
|
poolKey(config) {
|
|
45
42
|
return `${config.user}@${config.host}:${config.port}/${config.database}`;
|
|
46
43
|
}
|
|
@@ -52,7 +49,7 @@ export class PgTestConnector {
|
|
|
52
49
|
if (!this.pgPools.has(key)) {
|
|
53
50
|
const pool = new Pool(config);
|
|
54
51
|
this.pgPools.set(key, pool);
|
|
55
|
-
|
|
52
|
+
log.info(`📘 Created new pg pool: ${key}`);
|
|
56
53
|
}
|
|
57
54
|
return this.pgPools.get(key);
|
|
58
55
|
}
|
|
@@ -61,53 +58,50 @@ export class PgTestConnector {
|
|
|
61
58
|
this.clients.add(client);
|
|
62
59
|
const key = this.dbKey(config);
|
|
63
60
|
this.seenDbConfigs.set(key, config);
|
|
64
|
-
|
|
61
|
+
log.info(`🔌 New PgTestClient connected to ${config.database}`);
|
|
65
62
|
return client;
|
|
66
63
|
}
|
|
67
64
|
async closeAll() {
|
|
68
|
-
|
|
65
|
+
log.info('\n🧹 Closing all PgTestClients...');
|
|
69
66
|
await Promise.all(Array.from(this.clients).map(async (client) => {
|
|
70
67
|
try {
|
|
71
68
|
await client.close();
|
|
72
|
-
|
|
69
|
+
log.success(`✅ Closed client for ${client.config.database}`);
|
|
73
70
|
}
|
|
74
71
|
catch (err) {
|
|
75
|
-
|
|
72
|
+
log.error(`❌ Error closing PgTestClient for ${client.config.database}:`, err);
|
|
76
73
|
}
|
|
77
74
|
}));
|
|
78
75
|
this.clients.clear();
|
|
79
|
-
|
|
76
|
+
log.info('\n🧯 Disposing pg pools...');
|
|
80
77
|
for (const [key, pool] of this.pgPools.entries()) {
|
|
81
|
-
|
|
78
|
+
log.debug(`🧯 Disposing pg pool [${key}]`);
|
|
82
79
|
end(pool);
|
|
83
80
|
}
|
|
84
81
|
this.pgPools.clear();
|
|
85
|
-
|
|
82
|
+
log.info('\n🗑️ Dropping seen databases...');
|
|
86
83
|
await Promise.all(Array.from(this.seenDbConfigs.values()).map(async (config) => {
|
|
87
84
|
try {
|
|
88
|
-
// somehow an "admin" db had app_user creds?
|
|
89
85
|
const rootPg = getPgEnvOptions();
|
|
90
86
|
const admin = new DbAdmin({ ...config, user: rootPg.user, password: rootPg.password }, this.verbose);
|
|
91
|
-
// console.log(config);
|
|
92
87
|
admin.drop();
|
|
93
|
-
|
|
88
|
+
log.warn(`🧨 Dropped database: ${config.database}`);
|
|
94
89
|
}
|
|
95
90
|
catch (err) {
|
|
96
|
-
|
|
91
|
+
log.error(`❌ Failed to drop database ${config.database}:`, err);
|
|
97
92
|
}
|
|
98
93
|
}));
|
|
99
94
|
this.seenDbConfigs.clear();
|
|
100
|
-
|
|
95
|
+
log.success('\n✅ All PgTestClients closed, pools disposed, databases dropped.');
|
|
101
96
|
}
|
|
102
97
|
close() {
|
|
103
98
|
this.closeAll();
|
|
104
99
|
}
|
|
105
100
|
drop(config) {
|
|
106
101
|
const key = this.dbKey(config);
|
|
107
|
-
// for drop, no need for conn opts
|
|
108
102
|
const admin = new DbAdmin(config, this.verbose);
|
|
109
103
|
admin.drop();
|
|
110
|
-
|
|
104
|
+
log.warn(`🧨 Dropped database: ${config.database}`);
|
|
111
105
|
this.seenDbConfigs.delete(key);
|
|
112
106
|
}
|
|
113
107
|
kill(client) {
|
package/esm/seed/csv.js
CHANGED
|
@@ -1,15 +1,16 @@
|
|
|
1
|
-
import { createReadStream, existsSync } from 'fs';
|
|
1
|
+
import { createReadStream, existsSync, createWriteStream } from 'fs';
|
|
2
2
|
import { pipeline } from 'node:stream/promises';
|
|
3
3
|
import { from as copyFrom, to as copyTo } from 'pg-copy-streams';
|
|
4
|
-
import {
|
|
4
|
+
import { Logger } from '@launchql/server-utils';
|
|
5
|
+
const log = new Logger('csv');
|
|
5
6
|
export function csv(tables) {
|
|
6
7
|
return {
|
|
7
8
|
async seed(ctx) {
|
|
8
9
|
for (const [table, filePath] of Object.entries(tables)) {
|
|
9
10
|
if (!existsSync(filePath)) {
|
|
10
|
-
throw new Error(
|
|
11
|
+
throw new Error(`CSV file not found: ${filePath}`);
|
|
11
12
|
}
|
|
12
|
-
|
|
13
|
+
log.info(`📥 Seeding "${table}" from ${filePath}`);
|
|
13
14
|
await copyCsvIntoTable(ctx.pg, table, filePath);
|
|
14
15
|
}
|
|
15
16
|
}
|
|
@@ -21,10 +22,10 @@ export async function copyCsvIntoTable(pg, table, filePath) {
|
|
|
21
22
|
const source = createReadStream(filePath);
|
|
22
23
|
try {
|
|
23
24
|
await pipeline(source, stream);
|
|
24
|
-
|
|
25
|
+
log.success(`✅ Successfully seeded "${table}"`);
|
|
25
26
|
}
|
|
26
27
|
catch (err) {
|
|
27
|
-
|
|
28
|
+
log.error(`❌ COPY failed for "${table}": ${err.message}`);
|
|
28
29
|
throw err;
|
|
29
30
|
}
|
|
30
31
|
}
|
|
@@ -34,10 +35,10 @@ export async function exportTableToCsv(pg, table, filePath) {
|
|
|
34
35
|
const target = createWriteStream(filePath);
|
|
35
36
|
try {
|
|
36
37
|
await pipeline(stream, target);
|
|
37
|
-
|
|
38
|
+
log.success(`✅ Exported "${table}" to ${filePath}`);
|
|
38
39
|
}
|
|
39
40
|
catch (err) {
|
|
40
|
-
|
|
41
|
+
log.error(`❌ Failed to export "${table}": ${err.message}`);
|
|
41
42
|
throw err;
|
|
42
43
|
}
|
|
43
44
|
}
|
package/manager.d.ts
CHANGED
package/manager.js
CHANGED
|
@@ -1,25 +1,23 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
3
|
exports.PgTestConnector = void 0;
|
|
7
4
|
const pg_1 = require("pg");
|
|
8
|
-
const chalk_1 = __importDefault(require("chalk"));
|
|
9
5
|
const admin_1 = require("./admin");
|
|
10
6
|
const types_1 = require("@launchql/types");
|
|
11
7
|
const test_client_1 = require("./test-client");
|
|
8
|
+
const server_utils_1 = require("@launchql/server-utils");
|
|
9
|
+
const log = new server_utils_1.Logger('test-connector');
|
|
12
10
|
const SYS_EVENTS = ['SIGTERM'];
|
|
13
11
|
const end = (pool) => {
|
|
14
12
|
try {
|
|
15
13
|
if (pool.ended || pool.ending) {
|
|
16
|
-
|
|
14
|
+
log.warn('⚠️ pg pool already ended or ending');
|
|
17
15
|
return;
|
|
18
16
|
}
|
|
19
17
|
pool.end();
|
|
20
18
|
}
|
|
21
19
|
catch (err) {
|
|
22
|
-
|
|
20
|
+
log.error('❌ pg pool termination error:', err);
|
|
23
21
|
}
|
|
24
22
|
};
|
|
25
23
|
class PgTestConnector {
|
|
@@ -32,7 +30,7 @@ class PgTestConnector {
|
|
|
32
30
|
this.verbose = verbose;
|
|
33
31
|
SYS_EVENTS.forEach((event) => {
|
|
34
32
|
process.on(event, () => {
|
|
35
|
-
|
|
33
|
+
log.info(`⏹ Received ${event}, closing all connections...`);
|
|
36
34
|
this.closeAll();
|
|
37
35
|
});
|
|
38
36
|
});
|
|
@@ -43,10 +41,6 @@ class PgTestConnector {
|
|
|
43
41
|
}
|
|
44
42
|
return PgTestConnector.instance;
|
|
45
43
|
}
|
|
46
|
-
log(...args) {
|
|
47
|
-
if (this.verbose)
|
|
48
|
-
console.log(...args);
|
|
49
|
-
}
|
|
50
44
|
poolKey(config) {
|
|
51
45
|
return `${config.user}@${config.host}:${config.port}/${config.database}`;
|
|
52
46
|
}
|
|
@@ -58,7 +52,7 @@ class PgTestConnector {
|
|
|
58
52
|
if (!this.pgPools.has(key)) {
|
|
59
53
|
const pool = new pg_1.Pool(config);
|
|
60
54
|
this.pgPools.set(key, pool);
|
|
61
|
-
|
|
55
|
+
log.info(`📘 Created new pg pool: ${key}`);
|
|
62
56
|
}
|
|
63
57
|
return this.pgPools.get(key);
|
|
64
58
|
}
|
|
@@ -67,53 +61,50 @@ class PgTestConnector {
|
|
|
67
61
|
this.clients.add(client);
|
|
68
62
|
const key = this.dbKey(config);
|
|
69
63
|
this.seenDbConfigs.set(key, config);
|
|
70
|
-
|
|
64
|
+
log.info(`🔌 New PgTestClient connected to ${config.database}`);
|
|
71
65
|
return client;
|
|
72
66
|
}
|
|
73
67
|
async closeAll() {
|
|
74
|
-
|
|
68
|
+
log.info('\n🧹 Closing all PgTestClients...');
|
|
75
69
|
await Promise.all(Array.from(this.clients).map(async (client) => {
|
|
76
70
|
try {
|
|
77
71
|
await client.close();
|
|
78
|
-
|
|
72
|
+
log.success(`✅ Closed client for ${client.config.database}`);
|
|
79
73
|
}
|
|
80
74
|
catch (err) {
|
|
81
|
-
|
|
75
|
+
log.error(`❌ Error closing PgTestClient for ${client.config.database}:`, err);
|
|
82
76
|
}
|
|
83
77
|
}));
|
|
84
78
|
this.clients.clear();
|
|
85
|
-
|
|
79
|
+
log.info('\n🧯 Disposing pg pools...');
|
|
86
80
|
for (const [key, pool] of this.pgPools.entries()) {
|
|
87
|
-
|
|
81
|
+
log.debug(`🧯 Disposing pg pool [${key}]`);
|
|
88
82
|
end(pool);
|
|
89
83
|
}
|
|
90
84
|
this.pgPools.clear();
|
|
91
|
-
|
|
85
|
+
log.info('\n🗑️ Dropping seen databases...');
|
|
92
86
|
await Promise.all(Array.from(this.seenDbConfigs.values()).map(async (config) => {
|
|
93
87
|
try {
|
|
94
|
-
// somehow an "admin" db had app_user creds?
|
|
95
88
|
const rootPg = (0, types_1.getPgEnvOptions)();
|
|
96
89
|
const admin = new admin_1.DbAdmin({ ...config, user: rootPg.user, password: rootPg.password }, this.verbose);
|
|
97
|
-
// console.log(config);
|
|
98
90
|
admin.drop();
|
|
99
|
-
|
|
91
|
+
log.warn(`🧨 Dropped database: ${config.database}`);
|
|
100
92
|
}
|
|
101
93
|
catch (err) {
|
|
102
|
-
|
|
94
|
+
log.error(`❌ Failed to drop database ${config.database}:`, err);
|
|
103
95
|
}
|
|
104
96
|
}));
|
|
105
97
|
this.seenDbConfigs.clear();
|
|
106
|
-
|
|
98
|
+
log.success('\n✅ All PgTestClients closed, pools disposed, databases dropped.');
|
|
107
99
|
}
|
|
108
100
|
close() {
|
|
109
101
|
this.closeAll();
|
|
110
102
|
}
|
|
111
103
|
drop(config) {
|
|
112
104
|
const key = this.dbKey(config);
|
|
113
|
-
// for drop, no need for conn opts
|
|
114
105
|
const admin = new admin_1.DbAdmin(config, this.verbose);
|
|
115
106
|
admin.drop();
|
|
116
|
-
|
|
107
|
+
log.warn(`🧨 Dropped database: ${config.database}`);
|
|
117
108
|
this.seenDbConfigs.delete(key);
|
|
118
109
|
}
|
|
119
110
|
kill(client) {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "pgsql-test",
|
|
3
|
-
"version": "2.1.
|
|
3
|
+
"version": "2.1.10",
|
|
4
4
|
"author": "Dan Lynch <pyramation@gmail.com>",
|
|
5
5
|
"description": "pgsql-test offers isolated, role-aware, and rollback-friendly PostgreSQL environments for integration tests — giving developers realistic test coverage without external state pollution",
|
|
6
6
|
"main": "index.js",
|
|
@@ -59,13 +59,13 @@
|
|
|
59
59
|
"@types/pg-copy-streams": "^1.2.5"
|
|
60
60
|
},
|
|
61
61
|
"dependencies": {
|
|
62
|
-
"@launchql/migrate": "^2.1.
|
|
63
|
-
"@launchql/server-utils": "^2.1.
|
|
62
|
+
"@launchql/migrate": "^2.1.10",
|
|
63
|
+
"@launchql/server-utils": "^2.1.6",
|
|
64
64
|
"@launchql/types": "^2.1.5",
|
|
65
65
|
"chalk": "^4.1.0",
|
|
66
66
|
"deepmerge": "^4.3.1",
|
|
67
67
|
"pg": "^8.16.0",
|
|
68
68
|
"pg-copy-streams": "^6.0.6"
|
|
69
69
|
},
|
|
70
|
-
"gitHead": "
|
|
70
|
+
"gitHead": "a925fe2ca332f6384b9ae52c89906868b5c9a703"
|
|
71
71
|
}
|
package/seed/csv.js
CHANGED
|
@@ -6,15 +6,16 @@ exports.exportTableToCsv = exportTableToCsv;
|
|
|
6
6
|
const fs_1 = require("fs");
|
|
7
7
|
const promises_1 = require("node:stream/promises");
|
|
8
8
|
const pg_copy_streams_1 = require("pg-copy-streams");
|
|
9
|
-
const
|
|
9
|
+
const server_utils_1 = require("@launchql/server-utils");
|
|
10
|
+
const log = new server_utils_1.Logger('csv');
|
|
10
11
|
function csv(tables) {
|
|
11
12
|
return {
|
|
12
13
|
async seed(ctx) {
|
|
13
14
|
for (const [table, filePath] of Object.entries(tables)) {
|
|
14
15
|
if (!(0, fs_1.existsSync)(filePath)) {
|
|
15
|
-
throw new Error(
|
|
16
|
+
throw new Error(`CSV file not found: ${filePath}`);
|
|
16
17
|
}
|
|
17
|
-
|
|
18
|
+
log.info(`📥 Seeding "${table}" from ${filePath}`);
|
|
18
19
|
await copyCsvIntoTable(ctx.pg, table, filePath);
|
|
19
20
|
}
|
|
20
21
|
}
|
|
@@ -26,23 +27,23 @@ async function copyCsvIntoTable(pg, table, filePath) {
|
|
|
26
27
|
const source = (0, fs_1.createReadStream)(filePath);
|
|
27
28
|
try {
|
|
28
29
|
await (0, promises_1.pipeline)(source, stream);
|
|
29
|
-
|
|
30
|
+
log.success(`✅ Successfully seeded "${table}"`);
|
|
30
31
|
}
|
|
31
32
|
catch (err) {
|
|
32
|
-
|
|
33
|
+
log.error(`❌ COPY failed for "${table}": ${err.message}`);
|
|
33
34
|
throw err;
|
|
34
35
|
}
|
|
35
36
|
}
|
|
36
37
|
async function exportTableToCsv(pg, table, filePath) {
|
|
37
38
|
const client = pg.client;
|
|
38
39
|
const stream = client.query((0, pg_copy_streams_1.to)(`COPY ${table} TO STDOUT WITH CSV HEADER`));
|
|
39
|
-
const target = (0,
|
|
40
|
+
const target = (0, fs_1.createWriteStream)(filePath);
|
|
40
41
|
try {
|
|
41
42
|
await (0, promises_1.pipeline)(stream, target);
|
|
42
|
-
|
|
43
|
+
log.success(`✅ Exported "${table}" to ${filePath}`);
|
|
43
44
|
}
|
|
44
45
|
catch (err) {
|
|
45
|
-
|
|
46
|
+
log.error(`❌ Failed to export "${table}": ${err.message}`);
|
|
46
47
|
throw err;
|
|
47
48
|
}
|
|
48
49
|
}
|