pgsql-seed 0.0.1 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +23 -0
- package/README.md +39 -625
- package/esm/index.d.ts +2 -0
- package/esm/index.js +12 -7
- package/esm/pgpm.d.ts +37 -0
- package/esm/pgpm.js +52 -0
- package/index.d.ts +2 -7
- package/index.js +20 -23
- package/package.json +27 -32
- package/pgpm.d.ts +37 -0
- package/pgpm.js +56 -0
- package/admin.d.ts +0 -26
- package/admin.js +0 -144
- package/connect.d.ts +0 -19
- package/connect.js +0 -95
- package/context-utils.d.ts +0 -8
- package/context-utils.js +0 -28
- package/esm/admin.js +0 -140
- package/esm/connect.js +0 -90
- package/esm/context-utils.js +0 -25
- package/esm/manager.js +0 -138
- package/esm/roles.js +0 -32
- package/esm/seed/adapters.js +0 -23
- package/esm/seed/csv.js +0 -108
- package/esm/seed/index.js +0 -14
- package/esm/seed/json.js +0 -36
- package/esm/seed/pgpm.js +0 -28
- package/esm/seed/sql.js +0 -15
- package/esm/seed/types.js +0 -1
- package/esm/stream.js +0 -96
- package/esm/test-client.js +0 -168
- package/esm/utils.js +0 -91
- package/manager.d.ts +0 -26
- package/manager.js +0 -142
- package/roles.d.ts +0 -17
- package/roles.js +0 -38
- package/seed/adapters.d.ts +0 -4
- package/seed/adapters.js +0 -28
- package/seed/csv.d.ts +0 -15
- package/seed/csv.js +0 -114
- package/seed/index.d.ts +0 -14
- package/seed/index.js +0 -31
- package/seed/json.d.ts +0 -12
- package/seed/json.js +0 -40
- package/seed/pgpm.d.ts +0 -10
- package/seed/pgpm.js +0 -32
- package/seed/sql.d.ts +0 -7
- package/seed/sql.js +0 -18
- package/seed/types.d.ts +0 -13
- package/seed/types.js +0 -2
- package/stream.d.ts +0 -33
- package/stream.js +0 -99
- package/test-client.d.ts +0 -55
- package/test-client.js +0 -172
- package/utils.d.ts +0 -17
- package/utils.js +0 -105
package/esm/admin.js
DELETED
|
@@ -1,140 +0,0 @@
|
|
|
1
|
-
import { generateCreateUserWithGrantsSQL, generateGrantRoleSQL } from '@pgpmjs/core';
|
|
2
|
-
import { Logger } from '@pgpmjs/logger';
|
|
3
|
-
import { execSync } from 'child_process';
|
|
4
|
-
import { existsSync } from 'fs';
|
|
5
|
-
import { getPgEnvOptions } from 'pg-env';
|
|
6
|
-
import { getRoleName } from './roles';
|
|
7
|
-
import { streamSql as stream } from './stream';
|
|
8
|
-
const log = new Logger('db-admin');
|
|
9
|
-
export class DbAdmin {
|
|
10
|
-
config;
|
|
11
|
-
verbose;
|
|
12
|
-
roleConfig;
|
|
13
|
-
constructor(config, verbose = false, roleConfig) {
|
|
14
|
-
this.config = config;
|
|
15
|
-
this.verbose = verbose;
|
|
16
|
-
this.roleConfig = roleConfig;
|
|
17
|
-
this.config = getPgEnvOptions(config);
|
|
18
|
-
}
|
|
19
|
-
getEnv() {
|
|
20
|
-
return {
|
|
21
|
-
PGHOST: this.config.host,
|
|
22
|
-
PGPORT: String(this.config.port),
|
|
23
|
-
PGUSER: this.config.user,
|
|
24
|
-
PGPASSWORD: this.config.password
|
|
25
|
-
};
|
|
26
|
-
}
|
|
27
|
-
run(command) {
|
|
28
|
-
try {
|
|
29
|
-
execSync(command, {
|
|
30
|
-
stdio: this.verbose ? 'inherit' : 'pipe',
|
|
31
|
-
env: {
|
|
32
|
-
...process.env,
|
|
33
|
-
...this.getEnv()
|
|
34
|
-
}
|
|
35
|
-
});
|
|
36
|
-
if (this.verbose)
|
|
37
|
-
log.success(`Executed: ${command}`);
|
|
38
|
-
}
|
|
39
|
-
catch (err) {
|
|
40
|
-
log.error(`Command failed: ${command}`);
|
|
41
|
-
if (this.verbose)
|
|
42
|
-
log.error(err.message);
|
|
43
|
-
throw err;
|
|
44
|
-
}
|
|
45
|
-
}
|
|
46
|
-
safeDropDb(name) {
|
|
47
|
-
try {
|
|
48
|
-
this.run(`dropdb "${name}"`);
|
|
49
|
-
}
|
|
50
|
-
catch (err) {
|
|
51
|
-
if (!err.message.includes('does not exist')) {
|
|
52
|
-
log.warn(`Could not drop database ${name}: ${err.message}`);
|
|
53
|
-
}
|
|
54
|
-
}
|
|
55
|
-
}
|
|
56
|
-
drop(dbName) {
|
|
57
|
-
this.safeDropDb(dbName ?? this.config.database);
|
|
58
|
-
}
|
|
59
|
-
dropTemplate(dbName) {
|
|
60
|
-
this.run(`psql -c "UPDATE pg_database SET datistemplate='false' WHERE datname='${dbName}';"`);
|
|
61
|
-
this.drop(dbName);
|
|
62
|
-
}
|
|
63
|
-
create(dbName) {
|
|
64
|
-
const db = dbName ?? this.config.database;
|
|
65
|
-
this.run(`createdb -U ${this.config.user} -h ${this.config.host} -p ${this.config.port} "${db}"`);
|
|
66
|
-
}
|
|
67
|
-
createFromTemplate(template, dbName) {
|
|
68
|
-
const db = dbName ?? this.config.database;
|
|
69
|
-
this.run(`createdb -U ${this.config.user} -h ${this.config.host} -p ${this.config.port} -e "${db}" -T "${template}"`);
|
|
70
|
-
}
|
|
71
|
-
installExtensions(extensions, dbName) {
|
|
72
|
-
const db = dbName ?? this.config.database;
|
|
73
|
-
const extList = typeof extensions === 'string' ? extensions.split(',') : extensions;
|
|
74
|
-
for (const extension of extList) {
|
|
75
|
-
this.run(`psql --dbname "${db}" -c 'CREATE EXTENSION IF NOT EXISTS "${extension}" CASCADE;'`);
|
|
76
|
-
}
|
|
77
|
-
}
|
|
78
|
-
connectionString(dbName) {
|
|
79
|
-
const { user, password, host, port } = this.config;
|
|
80
|
-
const db = dbName ?? this.config.database;
|
|
81
|
-
return `postgres://${user}:${password}@${host}:${port}/${db}`;
|
|
82
|
-
}
|
|
83
|
-
createTemplateFromBase(base, template) {
|
|
84
|
-
this.run(`createdb -T "${base}" "${template}"`);
|
|
85
|
-
this.run(`psql -c "UPDATE pg_database SET datistemplate = true WHERE datname = '${template}';"`);
|
|
86
|
-
}
|
|
87
|
-
cleanupTemplate(template) {
|
|
88
|
-
try {
|
|
89
|
-
this.run(`psql -c "UPDATE pg_database SET datistemplate = false WHERE datname = '${template}'"`);
|
|
90
|
-
}
|
|
91
|
-
catch {
|
|
92
|
-
log.warn(`Skipping failed UPDATE of datistemplate for ${template}`);
|
|
93
|
-
}
|
|
94
|
-
this.safeDropDb(template);
|
|
95
|
-
}
|
|
96
|
-
async grantRole(role, user, dbName) {
|
|
97
|
-
const db = dbName ?? this.config.database;
|
|
98
|
-
const sql = generateGrantRoleSQL(role, user);
|
|
99
|
-
await this.streamSql(sql, db);
|
|
100
|
-
}
|
|
101
|
-
async grantConnect(role, dbName) {
|
|
102
|
-
const db = dbName ?? this.config.database;
|
|
103
|
-
const sql = `GRANT CONNECT ON DATABASE "${db}" TO ${role};`;
|
|
104
|
-
await this.streamSql(sql, db);
|
|
105
|
-
}
|
|
106
|
-
// ONLY granting admin role for testing purposes, normally the db connection for apps won't have admin role
|
|
107
|
-
// DO NOT USE THIS FOR PRODUCTION
|
|
108
|
-
async createUserRole(user, password, dbName, useLocksForRoles = false) {
|
|
109
|
-
const anonRole = getRoleName('anonymous', this.roleConfig);
|
|
110
|
-
const authRole = getRoleName('authenticated', this.roleConfig);
|
|
111
|
-
const adminRole = getRoleName('administrator', this.roleConfig);
|
|
112
|
-
const sql = generateCreateUserWithGrantsSQL(user, password, [anonRole, authRole, adminRole], useLocksForRoles);
|
|
113
|
-
await this.streamSql(sql, dbName);
|
|
114
|
-
}
|
|
115
|
-
loadSql(file, dbName) {
|
|
116
|
-
if (!existsSync(file)) {
|
|
117
|
-
throw new Error(`Missing SQL file: ${file}`);
|
|
118
|
-
}
|
|
119
|
-
this.run(`psql -f ${file} ${dbName}`);
|
|
120
|
-
}
|
|
121
|
-
async streamSql(sql, dbName) {
|
|
122
|
-
await stream({
|
|
123
|
-
...this.config,
|
|
124
|
-
database: dbName
|
|
125
|
-
}, sql);
|
|
126
|
-
}
|
|
127
|
-
async createSeededTemplate(templateName, adapter) {
|
|
128
|
-
const seedDb = this.config.database;
|
|
129
|
-
this.create(seedDb);
|
|
130
|
-
await adapter.seed({
|
|
131
|
-
admin: this,
|
|
132
|
-
config: this.config,
|
|
133
|
-
pg: null, // placeholder for PgTestClient
|
|
134
|
-
connect: null // placeholder for connection factory
|
|
135
|
-
});
|
|
136
|
-
this.cleanupTemplate(templateName);
|
|
137
|
-
this.createTemplateFromBase(seedDb, templateName);
|
|
138
|
-
this.drop(seedDb);
|
|
139
|
-
}
|
|
140
|
-
}
|
package/esm/connect.js
DELETED
|
@@ -1,90 +0,0 @@
|
|
|
1
|
-
import { getConnEnvOptions } from '@pgpmjs/env';
|
|
2
|
-
import { randomUUID } from 'crypto';
|
|
3
|
-
import { teardownPgPools } from 'pg-cache';
|
|
4
|
-
import { getPgEnvOptions, } from 'pg-env';
|
|
5
|
-
import { DbAdmin } from './admin';
|
|
6
|
-
import { PgTestConnector } from './manager';
|
|
7
|
-
import { getDefaultRole } from './roles';
|
|
8
|
-
import { seed } from './seed';
|
|
9
|
-
let manager;
|
|
10
|
-
export const getPgRootAdmin = (config, connOpts = {}) => {
|
|
11
|
-
const opts = getPgEnvOptions({
|
|
12
|
-
user: config.user,
|
|
13
|
-
password: config.password,
|
|
14
|
-
host: config.host,
|
|
15
|
-
port: config.port,
|
|
16
|
-
database: connOpts.rootDb
|
|
17
|
-
});
|
|
18
|
-
const admin = new DbAdmin(opts, false, connOpts);
|
|
19
|
-
return admin;
|
|
20
|
-
};
|
|
21
|
-
const getConnOopts = (cn = {}) => {
|
|
22
|
-
const connect = getConnEnvOptions(cn.db);
|
|
23
|
-
const config = getPgEnvOptions({
|
|
24
|
-
database: `${connect.prefix}${randomUUID()}`,
|
|
25
|
-
...cn.pg
|
|
26
|
-
});
|
|
27
|
-
return {
|
|
28
|
-
pg: config,
|
|
29
|
-
db: connect
|
|
30
|
-
};
|
|
31
|
-
};
|
|
32
|
-
export const getConnections = async (cn = {}, seedAdapters = [seed.pgpm()]) => {
|
|
33
|
-
cn = getConnOopts(cn);
|
|
34
|
-
const config = cn.pg;
|
|
35
|
-
const connOpts = cn.db;
|
|
36
|
-
const root = getPgRootAdmin(config, connOpts);
|
|
37
|
-
await root.createUserRole(connOpts.connections.app.user, connOpts.connections.app.password, connOpts.rootDb);
|
|
38
|
-
const admin = new DbAdmin(config, false, connOpts);
|
|
39
|
-
if (process.env.TEST_DB) {
|
|
40
|
-
config.database = process.env.TEST_DB;
|
|
41
|
-
}
|
|
42
|
-
else if (connOpts.template) {
|
|
43
|
-
admin.createFromTemplate(connOpts.template, config.database);
|
|
44
|
-
}
|
|
45
|
-
else {
|
|
46
|
-
admin.create(config.database);
|
|
47
|
-
admin.installExtensions(connOpts.extensions);
|
|
48
|
-
}
|
|
49
|
-
await admin.grantConnect(connOpts.connections.app.user, config.database);
|
|
50
|
-
manager = PgTestConnector.getInstance(config);
|
|
51
|
-
const pg = manager.getClient(config);
|
|
52
|
-
let teardownPromise = null;
|
|
53
|
-
const teardown = async () => {
|
|
54
|
-
if (teardownPromise)
|
|
55
|
-
return teardownPromise;
|
|
56
|
-
teardownPromise = (async () => {
|
|
57
|
-
manager.beginTeardown();
|
|
58
|
-
await teardownPgPools();
|
|
59
|
-
await manager.closeAll();
|
|
60
|
-
})();
|
|
61
|
-
return teardownPromise;
|
|
62
|
-
};
|
|
63
|
-
if (seedAdapters.length) {
|
|
64
|
-
try {
|
|
65
|
-
await seed.compose(seedAdapters).seed({
|
|
66
|
-
connect: connOpts,
|
|
67
|
-
admin,
|
|
68
|
-
config: config,
|
|
69
|
-
pg: manager.getClient(config)
|
|
70
|
-
});
|
|
71
|
-
}
|
|
72
|
-
catch (error) {
|
|
73
|
-
const err = error;
|
|
74
|
-
const msg = err && (err.stack || err.message) ? (err.stack || err.message) : String(err);
|
|
75
|
-
process.stderr.write(`[pgsql-test] Seed error (continuing): ${msg}\n`);
|
|
76
|
-
// continue without teardown to allow caller-managed lifecycle
|
|
77
|
-
}
|
|
78
|
-
}
|
|
79
|
-
const dbConfig = {
|
|
80
|
-
...config,
|
|
81
|
-
user: connOpts.connections.app.user,
|
|
82
|
-
password: connOpts.connections.app.password
|
|
83
|
-
};
|
|
84
|
-
const db = manager.getClient(dbConfig, {
|
|
85
|
-
auth: connOpts.auth,
|
|
86
|
-
roles: connOpts.roles
|
|
87
|
-
});
|
|
88
|
-
db.setContext({ role: getDefaultRole(connOpts) });
|
|
89
|
-
return { pg, db, teardown, manager, admin };
|
|
90
|
-
};
|
package/esm/context-utils.js
DELETED
|
@@ -1,25 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Generate SQL statements to set PostgreSQL session context variables
|
|
3
|
-
* Uses SET LOCAL ROLE for the 'role' key and set_config() for other variables
|
|
4
|
-
* @param context - Context settings to apply
|
|
5
|
-
* @returns SQL string with SET LOCAL ROLE and set_config() statements
|
|
6
|
-
*/
|
|
7
|
-
export function generateContextStatements(context) {
|
|
8
|
-
return Object.entries(context)
|
|
9
|
-
.map(([key, val]) => {
|
|
10
|
-
if (key === 'role') {
|
|
11
|
-
if (val === null || val === undefined) {
|
|
12
|
-
return 'SET LOCAL ROLE NONE;';
|
|
13
|
-
}
|
|
14
|
-
const escapedRole = val.replace(/"/g, '""');
|
|
15
|
-
return `SET LOCAL ROLE "${escapedRole}";`;
|
|
16
|
-
}
|
|
17
|
-
// Use set_config for other context variables
|
|
18
|
-
if (val === null || val === undefined) {
|
|
19
|
-
return `SELECT set_config('${key}', NULL, true);`;
|
|
20
|
-
}
|
|
21
|
-
const escapedVal = val.replace(/'/g, "''");
|
|
22
|
-
return `SELECT set_config('${key}', '${escapedVal}', true);`;
|
|
23
|
-
})
|
|
24
|
-
.join('\n');
|
|
25
|
-
}
|
package/esm/manager.js
DELETED
|
@@ -1,138 +0,0 @@
|
|
|
1
|
-
import { Logger } from '@pgpmjs/logger';
|
|
2
|
-
import { Pool } from 'pg';
|
|
3
|
-
import { getPgEnvOptions } from 'pg-env';
|
|
4
|
-
import { DbAdmin } from './admin';
|
|
5
|
-
import { PgTestClient } from './test-client';
|
|
6
|
-
const log = new Logger('test-connector');
|
|
7
|
-
const SYS_EVENTS = ['SIGTERM'];
|
|
8
|
-
const end = (pool) => {
|
|
9
|
-
try {
|
|
10
|
-
if (pool.ended || pool.ending) {
|
|
11
|
-
log.warn('⚠️ pg pool already ended or ending');
|
|
12
|
-
return;
|
|
13
|
-
}
|
|
14
|
-
pool.end();
|
|
15
|
-
}
|
|
16
|
-
catch (err) {
|
|
17
|
-
log.error('❌ pg pool termination error:', err);
|
|
18
|
-
}
|
|
19
|
-
};
|
|
20
|
-
export class PgTestConnector {
|
|
21
|
-
static instance;
|
|
22
|
-
clients = new Set();
|
|
23
|
-
pgPools = new Map();
|
|
24
|
-
seenDbConfigs = new Map();
|
|
25
|
-
pendingConnects = new Set();
|
|
26
|
-
config;
|
|
27
|
-
verbose = false;
|
|
28
|
-
shuttingDown = false;
|
|
29
|
-
constructor(config, verbose = false) {
|
|
30
|
-
this.verbose = verbose;
|
|
31
|
-
this.config = config;
|
|
32
|
-
SYS_EVENTS.forEach((event) => {
|
|
33
|
-
process.on(event, () => {
|
|
34
|
-
log.info(`⏹ Received ${event}, closing all connections...`);
|
|
35
|
-
this.closeAll();
|
|
36
|
-
});
|
|
37
|
-
});
|
|
38
|
-
}
|
|
39
|
-
static getInstance(config, verbose = false) {
|
|
40
|
-
if (!PgTestConnector.instance) {
|
|
41
|
-
PgTestConnector.instance = new PgTestConnector(config, verbose);
|
|
42
|
-
}
|
|
43
|
-
return PgTestConnector.instance;
|
|
44
|
-
}
|
|
45
|
-
poolKey(config) {
|
|
46
|
-
return `${config.user}@${config.host}:${config.port}/${config.database}`;
|
|
47
|
-
}
|
|
48
|
-
dbKey(config) {
|
|
49
|
-
return `${config.host}:${config.port}/${config.database}`;
|
|
50
|
-
}
|
|
51
|
-
beginTeardown() {
|
|
52
|
-
this.shuttingDown = true;
|
|
53
|
-
}
|
|
54
|
-
registerConnect(p) {
|
|
55
|
-
this.pendingConnects.add(p);
|
|
56
|
-
p.finally(() => this.pendingConnects.delete(p));
|
|
57
|
-
}
|
|
58
|
-
async awaitPendingConnects() {
|
|
59
|
-
const arr = Array.from(this.pendingConnects);
|
|
60
|
-
if (arr.length) {
|
|
61
|
-
await Promise.allSettled(arr);
|
|
62
|
-
}
|
|
63
|
-
}
|
|
64
|
-
getPool(config) {
|
|
65
|
-
const key = this.poolKey(config);
|
|
66
|
-
if (!this.pgPools.has(key)) {
|
|
67
|
-
const pool = new Pool(config);
|
|
68
|
-
this.pgPools.set(key, pool);
|
|
69
|
-
log.info(`📘 Created new pg pool: ${key}`);
|
|
70
|
-
}
|
|
71
|
-
return this.pgPools.get(key);
|
|
72
|
-
}
|
|
73
|
-
getClient(config, opts = {}) {
|
|
74
|
-
if (this.shuttingDown) {
|
|
75
|
-
throw new Error('PgTestConnector is shutting down; no new clients allowed');
|
|
76
|
-
}
|
|
77
|
-
const client = new PgTestClient(config, {
|
|
78
|
-
trackConnect: (p) => this.registerConnect(p),
|
|
79
|
-
...opts
|
|
80
|
-
});
|
|
81
|
-
this.clients.add(client);
|
|
82
|
-
const key = this.dbKey(config);
|
|
83
|
-
this.seenDbConfigs.set(key, config);
|
|
84
|
-
log.info(`🔌 New PgTestClient connected to ${config.database}`);
|
|
85
|
-
return client;
|
|
86
|
-
}
|
|
87
|
-
async closeAll() {
|
|
88
|
-
this.beginTeardown();
|
|
89
|
-
await this.awaitPendingConnects();
|
|
90
|
-
log.info('🧹 Closing all PgTestClients...');
|
|
91
|
-
await Promise.all(Array.from(this.clients).map(async (client) => {
|
|
92
|
-
try {
|
|
93
|
-
await client.close();
|
|
94
|
-
log.success(`✅ Closed client for ${client.config.database}`);
|
|
95
|
-
}
|
|
96
|
-
catch (err) {
|
|
97
|
-
log.error(`❌ Error closing PgTestClient for ${client.config.database}:`, err);
|
|
98
|
-
}
|
|
99
|
-
}));
|
|
100
|
-
this.clients.clear();
|
|
101
|
-
log.info('🧯 Disposing pg pools...');
|
|
102
|
-
for (const [key, pool] of this.pgPools.entries()) {
|
|
103
|
-
log.debug(`🧯 Disposing pg pool [${key}]`);
|
|
104
|
-
end(pool);
|
|
105
|
-
}
|
|
106
|
-
this.pgPools.clear();
|
|
107
|
-
log.info('🗑️ Dropping seen databases...');
|
|
108
|
-
await Promise.all(Array.from(this.seenDbConfigs.values()).map(async (config) => {
|
|
109
|
-
try {
|
|
110
|
-
const rootPg = getPgEnvOptions(this.config);
|
|
111
|
-
const admin = new DbAdmin({ ...config, user: rootPg.user, password: rootPg.password }, this.verbose);
|
|
112
|
-
admin.drop();
|
|
113
|
-
log.warn(`🧨 Dropped database: ${config.database}`);
|
|
114
|
-
}
|
|
115
|
-
catch (err) {
|
|
116
|
-
log.error(`❌ Failed to drop database ${config.database}:`, err);
|
|
117
|
-
}
|
|
118
|
-
}));
|
|
119
|
-
this.seenDbConfigs.clear();
|
|
120
|
-
log.success('✅ All PgTestClients closed, pools disposed, databases dropped.');
|
|
121
|
-
this.pendingConnects.clear();
|
|
122
|
-
this.shuttingDown = false;
|
|
123
|
-
}
|
|
124
|
-
close() {
|
|
125
|
-
this.closeAll();
|
|
126
|
-
}
|
|
127
|
-
drop(config) {
|
|
128
|
-
const key = this.dbKey(config);
|
|
129
|
-
const admin = new DbAdmin(config, this.verbose);
|
|
130
|
-
admin.drop();
|
|
131
|
-
log.warn(`🧨 Dropped database: ${config.database}`);
|
|
132
|
-
this.seenDbConfigs.delete(key);
|
|
133
|
-
}
|
|
134
|
-
async kill(client) {
|
|
135
|
-
await client.close();
|
|
136
|
-
this.drop(client.config);
|
|
137
|
-
}
|
|
138
|
-
}
|
package/esm/roles.js
DELETED
|
@@ -1,32 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Default role mapping configuration
|
|
3
|
-
*/
|
|
4
|
-
export const DEFAULT_ROLE_MAPPING = {
|
|
5
|
-
anonymous: 'anonymous',
|
|
6
|
-
authenticated: 'authenticated',
|
|
7
|
-
administrator: 'administrator',
|
|
8
|
-
default: 'anonymous'
|
|
9
|
-
};
|
|
10
|
-
/**
|
|
11
|
-
* Get resolved role mapping with defaults
|
|
12
|
-
*/
|
|
13
|
-
export const getRoleMapping = (options) => {
|
|
14
|
-
return {
|
|
15
|
-
...DEFAULT_ROLE_MAPPING,
|
|
16
|
-
...(options?.roles || {})
|
|
17
|
-
};
|
|
18
|
-
};
|
|
19
|
-
/**
|
|
20
|
-
* Get role name by key with fallback to default mapping
|
|
21
|
-
*/
|
|
22
|
-
export const getRoleName = (roleKey, options) => {
|
|
23
|
-
const mapping = getRoleMapping(options);
|
|
24
|
-
return mapping[roleKey];
|
|
25
|
-
};
|
|
26
|
-
/**
|
|
27
|
-
* Get default role name
|
|
28
|
-
*/
|
|
29
|
-
export const getDefaultRole = (options) => {
|
|
30
|
-
const mapping = getRoleMapping(options);
|
|
31
|
-
return mapping.default;
|
|
32
|
-
};
|
package/esm/seed/adapters.js
DELETED
|
@@ -1,23 +0,0 @@
|
|
|
1
|
-
export function sqlfile(files) {
|
|
2
|
-
return {
|
|
3
|
-
seed(ctx) {
|
|
4
|
-
for (const file of files) {
|
|
5
|
-
ctx.admin.loadSql(file, ctx.config.database);
|
|
6
|
-
}
|
|
7
|
-
}
|
|
8
|
-
};
|
|
9
|
-
}
|
|
10
|
-
export function fn(fn) {
|
|
11
|
-
return {
|
|
12
|
-
seed: fn
|
|
13
|
-
};
|
|
14
|
-
}
|
|
15
|
-
export function compose(adapters) {
|
|
16
|
-
return {
|
|
17
|
-
async seed(ctx) {
|
|
18
|
-
for (const adapter of adapters) {
|
|
19
|
-
await adapter.seed(ctx);
|
|
20
|
-
}
|
|
21
|
-
}
|
|
22
|
-
};
|
|
23
|
-
}
|
package/esm/seed/csv.js
DELETED
|
@@ -1,108 +0,0 @@
|
|
|
1
|
-
import { pipeline } from 'node:stream/promises';
|
|
2
|
-
import { Logger } from '@pgpmjs/logger';
|
|
3
|
-
import { parse } from 'csv-parse';
|
|
4
|
-
import { createReadStream, createWriteStream, existsSync } from 'fs';
|
|
5
|
-
import { from as copyFrom, to as copyTo } from 'pg-copy-streams';
|
|
6
|
-
const log = new Logger('csv');
|
|
7
|
-
/**
|
|
8
|
-
* Standalone helper function to load CSV files into PostgreSQL tables
|
|
9
|
-
* @param client - PostgreSQL client instance
|
|
10
|
-
* @param tables - Map of table names to CSV file paths
|
|
11
|
-
*/
|
|
12
|
-
export async function loadCsvMap(client, tables) {
|
|
13
|
-
for (const [table, filePath] of Object.entries(tables)) {
|
|
14
|
-
if (!existsSync(filePath)) {
|
|
15
|
-
throw new Error(`CSV file not found: ${filePath}`);
|
|
16
|
-
}
|
|
17
|
-
log.info(`📥 Seeding "${table}" from ${filePath}`);
|
|
18
|
-
const columns = await parseCsvHeader(filePath);
|
|
19
|
-
const quotedColumns = columns.map(col => `"${col.replace(/"/g, '""')}"`);
|
|
20
|
-
const columnList = quotedColumns.join(', ');
|
|
21
|
-
const copyCommand = `COPY ${table} (${columnList}) FROM STDIN WITH CSV HEADER`;
|
|
22
|
-
log.info(`Using columns: ${columnList}`);
|
|
23
|
-
const stream = client.query(copyFrom(copyCommand));
|
|
24
|
-
const source = createReadStream(filePath);
|
|
25
|
-
try {
|
|
26
|
-
await pipeline(source, stream);
|
|
27
|
-
log.success(`✅ Successfully seeded "${table}"`);
|
|
28
|
-
}
|
|
29
|
-
catch (err) {
|
|
30
|
-
log.error(`❌ COPY failed for "${table}": ${err.message}`);
|
|
31
|
-
throw err;
|
|
32
|
-
}
|
|
33
|
-
}
|
|
34
|
-
}
|
|
35
|
-
export function csv(tables) {
|
|
36
|
-
return {
|
|
37
|
-
async seed(ctx) {
|
|
38
|
-
for (const [table, filePath] of Object.entries(tables)) {
|
|
39
|
-
if (!existsSync(filePath)) {
|
|
40
|
-
throw new Error(`CSV file not found: ${filePath}`);
|
|
41
|
-
}
|
|
42
|
-
log.info(`📥 Seeding "${table}" from ${filePath}`);
|
|
43
|
-
await copyCsvIntoTable(ctx.pg, table, filePath);
|
|
44
|
-
}
|
|
45
|
-
}
|
|
46
|
-
};
|
|
47
|
-
}
|
|
48
|
-
async function parseCsvHeader(filePath) {
|
|
49
|
-
const file = createReadStream(filePath);
|
|
50
|
-
const parser = parse({
|
|
51
|
-
bom: true,
|
|
52
|
-
to_line: 1,
|
|
53
|
-
skip_empty_lines: true,
|
|
54
|
-
});
|
|
55
|
-
return new Promise((resolve, reject) => {
|
|
56
|
-
const cleanup = (err) => {
|
|
57
|
-
parser.destroy();
|
|
58
|
-
file.destroy();
|
|
59
|
-
if (err)
|
|
60
|
-
reject(err);
|
|
61
|
-
};
|
|
62
|
-
parser.on('readable', () => {
|
|
63
|
-
const row = parser.read();
|
|
64
|
-
if (!row)
|
|
65
|
-
return;
|
|
66
|
-
if (row.length === 0) {
|
|
67
|
-
cleanup(new Error('CSV header has no columns'));
|
|
68
|
-
return;
|
|
69
|
-
}
|
|
70
|
-
cleanup();
|
|
71
|
-
resolve(row);
|
|
72
|
-
});
|
|
73
|
-
parser.on('error', cleanup);
|
|
74
|
-
file.on('error', cleanup);
|
|
75
|
-
file.pipe(parser);
|
|
76
|
-
});
|
|
77
|
-
}
|
|
78
|
-
export async function copyCsvIntoTable(pg, table, filePath) {
|
|
79
|
-
const client = pg.client;
|
|
80
|
-
const columns = await parseCsvHeader(filePath);
|
|
81
|
-
const quotedColumns = columns.map(col => `"${col.replace(/"/g, '""')}"`);
|
|
82
|
-
const columnList = quotedColumns.join(', ');
|
|
83
|
-
const copyCommand = `COPY ${table} (${columnList}) FROM STDIN WITH CSV HEADER`;
|
|
84
|
-
log.info(`Using columns: ${columnList}`);
|
|
85
|
-
const stream = client.query(copyFrom(copyCommand));
|
|
86
|
-
const source = createReadStream(filePath);
|
|
87
|
-
try {
|
|
88
|
-
await pipeline(source, stream);
|
|
89
|
-
log.success(`✅ Successfully seeded "${table}"`);
|
|
90
|
-
}
|
|
91
|
-
catch (err) {
|
|
92
|
-
log.error(`❌ COPY failed for "${table}": ${err.message}`);
|
|
93
|
-
throw err;
|
|
94
|
-
}
|
|
95
|
-
}
|
|
96
|
-
export async function exportTableToCsv(pg, table, filePath) {
|
|
97
|
-
const client = pg.client;
|
|
98
|
-
const stream = client.query(copyTo(`COPY ${table} TO STDOUT WITH CSV HEADER`));
|
|
99
|
-
const target = createWriteStream(filePath);
|
|
100
|
-
try {
|
|
101
|
-
await pipeline(stream, target);
|
|
102
|
-
log.success(`✅ Exported "${table}" to ${filePath}`);
|
|
103
|
-
}
|
|
104
|
-
catch (err) {
|
|
105
|
-
log.error(`❌ Failed to export "${table}": ${err.message}`);
|
|
106
|
-
throw err;
|
|
107
|
-
}
|
|
108
|
-
}
|
package/esm/seed/index.js
DELETED
|
@@ -1,14 +0,0 @@
|
|
|
1
|
-
import { compose, fn, sqlfile } from './adapters';
|
|
2
|
-
import { csv } from './csv';
|
|
3
|
-
import { json } from './json';
|
|
4
|
-
import { pgpm } from './pgpm';
|
|
5
|
-
export * from './csv';
|
|
6
|
-
export * from './types';
|
|
7
|
-
export const seed = {
|
|
8
|
-
pgpm,
|
|
9
|
-
json,
|
|
10
|
-
csv,
|
|
11
|
-
compose,
|
|
12
|
-
fn,
|
|
13
|
-
sqlfile
|
|
14
|
-
};
|
package/esm/seed/json.js
DELETED
|
@@ -1,36 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Standalone helper function to insert JSON data into PostgreSQL tables
|
|
3
|
-
* @param client - PostgreSQL client instance
|
|
4
|
-
* @param data - Map of table names to arrays of row objects
|
|
5
|
-
*/
|
|
6
|
-
export async function insertJson(client, data) {
|
|
7
|
-
for (const [table, rows] of Object.entries(data)) {
|
|
8
|
-
if (!Array.isArray(rows) || rows.length === 0)
|
|
9
|
-
continue;
|
|
10
|
-
const columns = Object.keys(rows[0]);
|
|
11
|
-
const placeholders = columns.map((_, i) => `$${i + 1}`).join(', ');
|
|
12
|
-
const sql = `INSERT INTO ${table} (${columns.join(', ')}) VALUES (${placeholders})`;
|
|
13
|
-
for (const row of rows) {
|
|
14
|
-
const values = columns.map((c) => row[c]);
|
|
15
|
-
await client.query(sql, values);
|
|
16
|
-
}
|
|
17
|
-
}
|
|
18
|
-
}
|
|
19
|
-
export function json(data) {
|
|
20
|
-
return {
|
|
21
|
-
async seed(ctx) {
|
|
22
|
-
const { pg } = ctx;
|
|
23
|
-
for (const [table, rows] of Object.entries(data)) {
|
|
24
|
-
if (!Array.isArray(rows) || rows.length === 0)
|
|
25
|
-
continue;
|
|
26
|
-
const columns = Object.keys(rows[0]);
|
|
27
|
-
const placeholders = columns.map((_, i) => `$${i + 1}`).join(', ');
|
|
28
|
-
const sql = `INSERT INTO ${table} (${columns.join(', ')}) VALUES (${placeholders})`;
|
|
29
|
-
for (const row of rows) {
|
|
30
|
-
const values = columns.map((c) => row[c]);
|
|
31
|
-
await pg.query(sql, values);
|
|
32
|
-
}
|
|
33
|
-
}
|
|
34
|
-
}
|
|
35
|
-
};
|
|
36
|
-
}
|
package/esm/seed/pgpm.js
DELETED
|
@@ -1,28 +0,0 @@
|
|
|
1
|
-
import { PgpmPackage } from '@pgpmjs/core';
|
|
2
|
-
import { getEnvOptions } from '@pgpmjs/env';
|
|
3
|
-
/**
|
|
4
|
-
* Standalone helper function to deploy pgpm package
|
|
5
|
-
* @param config - PostgreSQL configuration
|
|
6
|
-
* @param cwd - Current working directory (defaults to process.cwd())
|
|
7
|
-
* @param cache - Whether to enable caching (defaults to false)
|
|
8
|
-
*/
|
|
9
|
-
export async function deployPgpm(config, cwd, cache = false) {
|
|
10
|
-
const proj = new PgpmPackage(cwd ?? process.cwd());
|
|
11
|
-
if (!proj.isInModule())
|
|
12
|
-
return;
|
|
13
|
-
await proj.deploy(getEnvOptions({
|
|
14
|
-
pg: config,
|
|
15
|
-
deployment: {
|
|
16
|
-
fast: true,
|
|
17
|
-
usePlan: true,
|
|
18
|
-
cache
|
|
19
|
-
}
|
|
20
|
-
}), proj.getModuleName());
|
|
21
|
-
}
|
|
22
|
-
export function pgpm(cwd, cache = false) {
|
|
23
|
-
return {
|
|
24
|
-
async seed(ctx) {
|
|
25
|
-
await deployPgpm(ctx.config, cwd ?? ctx.connect.cwd, cache);
|
|
26
|
-
}
|
|
27
|
-
};
|
|
28
|
-
}
|
package/esm/seed/sql.js
DELETED
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
import { existsSync, readFileSync } from 'fs';
|
|
2
|
-
/**
|
|
3
|
-
* Standalone helper function to load SQL files into PostgreSQL
|
|
4
|
-
* @param client - PostgreSQL client instance
|
|
5
|
-
* @param files - Array of SQL file paths to execute
|
|
6
|
-
*/
|
|
7
|
-
export async function loadSqlFiles(client, files) {
|
|
8
|
-
for (const file of files) {
|
|
9
|
-
if (!existsSync(file)) {
|
|
10
|
-
throw new Error(`SQL file not found: ${file}`);
|
|
11
|
-
}
|
|
12
|
-
const sql = readFileSync(file, 'utf-8');
|
|
13
|
-
await client.query(sql);
|
|
14
|
-
}
|
|
15
|
-
}
|
package/esm/seed/types.js
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|