supabase-test 0.0.1 → 0.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +58 -410
- package/connect.d.ts +11 -19
- package/connect.js +62 -73
- package/esm/connect.js +62 -72
- package/esm/index.js +4 -6
- package/index.d.ts +3 -6
- package/index.js +6 -6
- package/package.json +9 -21
- package/admin.d.ts +0 -26
- package/admin.js +0 -182
- package/dist/README.md +0 -557
- package/dist/package.json +0 -72
- package/esm/admin.js +0 -178
- package/esm/manager.js +0 -136
- package/esm/roles.js +0 -32
- package/esm/seed/adapters.js +0 -23
- package/esm/seed/csv.js +0 -44
- package/esm/seed/index.js +0 -16
- package/esm/seed/json.js +0 -18
- package/esm/seed/launchql.js +0 -19
- package/esm/seed/sqitch.js +0 -17
- package/esm/seed/types.js +0 -1
- package/esm/stream.js +0 -43
- package/esm/test-client.js +0 -150
- package/manager.d.ts +0 -25
- package/manager.js +0 -140
- package/roles.d.ts +0 -17
- package/roles.js +0 -38
- package/seed/adapters.d.ts +0 -4
- package/seed/adapters.js +0 -28
- package/seed/csv.d.ts +0 -9
- package/seed/csv.js +0 -49
- package/seed/index.d.ts +0 -16
- package/seed/index.js +0 -33
- package/seed/json.d.ts +0 -6
- package/seed/json.js +0 -21
- package/seed/launchql.d.ts +0 -2
- package/seed/launchql.js +0 -22
- package/seed/sqitch.d.ts +0 -2
- package/seed/sqitch.js +0 -20
- package/seed/types.d.ts +0 -13
- package/seed/types.js +0 -2
- package/stream.d.ts +0 -2
- package/stream.js +0 -46
- package/test-client.d.ts +0 -49
- package/test-client.js +0 -154
package/esm/admin.js
DELETED
|
@@ -1,178 +0,0 @@
|
|
|
1
|
-
import { Logger } from '@launchql/logger';
|
|
2
|
-
import { execSync } from 'child_process';
|
|
3
|
-
import { existsSync } from 'fs';
|
|
4
|
-
import { getPgEnvOptions } from 'pg-env';
|
|
5
|
-
import { getRoleName } from './roles';
|
|
6
|
-
import { streamSql as stream } from './stream';
|
|
7
|
-
const log = new Logger('db-admin');
|
|
8
|
-
export class DbAdmin {
|
|
9
|
-
config;
|
|
10
|
-
verbose;
|
|
11
|
-
roleConfig;
|
|
12
|
-
constructor(config, verbose = false, roleConfig) {
|
|
13
|
-
this.config = config;
|
|
14
|
-
this.verbose = verbose;
|
|
15
|
-
this.roleConfig = roleConfig;
|
|
16
|
-
this.config = getPgEnvOptions(config);
|
|
17
|
-
}
|
|
18
|
-
getEnv() {
|
|
19
|
-
return {
|
|
20
|
-
PGHOST: this.config.host,
|
|
21
|
-
PGPORT: String(this.config.port),
|
|
22
|
-
PGUSER: this.config.user,
|
|
23
|
-
PGPASSWORD: this.config.password
|
|
24
|
-
};
|
|
25
|
-
}
|
|
26
|
-
run(command) {
|
|
27
|
-
try {
|
|
28
|
-
execSync(command, {
|
|
29
|
-
stdio: this.verbose ? 'inherit' : 'pipe',
|
|
30
|
-
env: {
|
|
31
|
-
...process.env,
|
|
32
|
-
...this.getEnv()
|
|
33
|
-
}
|
|
34
|
-
});
|
|
35
|
-
if (this.verbose)
|
|
36
|
-
log.success(`Executed: ${command}`);
|
|
37
|
-
}
|
|
38
|
-
catch (err) {
|
|
39
|
-
log.error(`Command failed: ${command}`);
|
|
40
|
-
if (this.verbose)
|
|
41
|
-
log.error(err.message);
|
|
42
|
-
throw err;
|
|
43
|
-
}
|
|
44
|
-
}
|
|
45
|
-
safeDropDb(name) {
|
|
46
|
-
try {
|
|
47
|
-
this.run(`dropdb "${name}"`);
|
|
48
|
-
}
|
|
49
|
-
catch (err) {
|
|
50
|
-
if (!err.message.includes('does not exist')) {
|
|
51
|
-
log.warn(`Could not drop database ${name}: ${err.message}`);
|
|
52
|
-
}
|
|
53
|
-
}
|
|
54
|
-
}
|
|
55
|
-
drop(dbName) {
|
|
56
|
-
this.safeDropDb(dbName ?? this.config.database);
|
|
57
|
-
}
|
|
58
|
-
dropTemplate(dbName) {
|
|
59
|
-
this.run(`psql -c "UPDATE pg_database SET datistemplate='false' WHERE datname='${dbName}';"`);
|
|
60
|
-
this.drop(dbName);
|
|
61
|
-
}
|
|
62
|
-
create(dbName) {
|
|
63
|
-
const db = dbName ?? this.config.database;
|
|
64
|
-
this.run(`createdb -U ${this.config.user} -h ${this.config.host} -p ${this.config.port} "${db}"`);
|
|
65
|
-
}
|
|
66
|
-
createFromTemplate(template, dbName) {
|
|
67
|
-
const db = dbName ?? this.config.database;
|
|
68
|
-
this.run(`createdb -U ${this.config.user} -h ${this.config.host} -p ${this.config.port} -e "${db}" -T "${template}"`);
|
|
69
|
-
}
|
|
70
|
-
installExtensions(extensions, dbName) {
|
|
71
|
-
const db = dbName ?? this.config.database;
|
|
72
|
-
const extList = typeof extensions === 'string' ? extensions.split(',') : extensions;
|
|
73
|
-
for (const extension of extList) {
|
|
74
|
-
this.run(`psql --dbname "${db}" -c 'CREATE EXTENSION IF NOT EXISTS "${extension}" CASCADE;'`);
|
|
75
|
-
}
|
|
76
|
-
}
|
|
77
|
-
connectionString(dbName) {
|
|
78
|
-
const { user, password, host, port } = this.config;
|
|
79
|
-
const db = dbName ?? this.config.database;
|
|
80
|
-
return `postgres://${user}:${password}@${host}:${port}/${db}`;
|
|
81
|
-
}
|
|
82
|
-
createTemplateFromBase(base, template) {
|
|
83
|
-
this.run(`createdb -T "${base}" "${template}"`);
|
|
84
|
-
this.run(`psql -c "UPDATE pg_database SET datistemplate = true WHERE datname = '${template}';"`);
|
|
85
|
-
}
|
|
86
|
-
cleanupTemplate(template) {
|
|
87
|
-
try {
|
|
88
|
-
this.run(`psql -c "UPDATE pg_database SET datistemplate = false WHERE datname = '${template}'"`);
|
|
89
|
-
}
|
|
90
|
-
catch {
|
|
91
|
-
log.warn(`Skipping failed UPDATE of datistemplate for ${template}`);
|
|
92
|
-
}
|
|
93
|
-
this.safeDropDb(template);
|
|
94
|
-
}
|
|
95
|
-
async grantRole(role, user, dbName) {
|
|
96
|
-
const db = dbName ?? this.config.database;
|
|
97
|
-
const sql = `GRANT ${role} TO ${user};`;
|
|
98
|
-
await this.streamSql(sql, db);
|
|
99
|
-
}
|
|
100
|
-
async grantConnect(role, dbName) {
|
|
101
|
-
const db = dbName ?? this.config.database;
|
|
102
|
-
const sql = `GRANT CONNECT ON DATABASE "${db}" TO ${role};`;
|
|
103
|
-
await this.streamSql(sql, db);
|
|
104
|
-
}
|
|
105
|
-
// TODO: make adminRole a configurable option
|
|
106
|
-
// ONLY granting admin role for testing purposes, normally the db connection for apps won't have admin role
|
|
107
|
-
// DO NOT USE THIS FOR PRODUCTION
|
|
108
|
-
async createUserRole(user, password, dbName) {
|
|
109
|
-
const anonRole = getRoleName('anonymous', this.roleConfig);
|
|
110
|
-
const authRole = getRoleName('authenticated', this.roleConfig);
|
|
111
|
-
const adminRole = getRoleName('administrator', this.roleConfig);
|
|
112
|
-
const sql = `
|
|
113
|
-
DO $$
|
|
114
|
-
BEGIN
|
|
115
|
-
-- Create role if it doesn't exist
|
|
116
|
-
IF NOT EXISTS (SELECT 1 FROM pg_roles WHERE rolname = '${user}') THEN
|
|
117
|
-
CREATE ROLE ${user} LOGIN PASSWORD '${password}';
|
|
118
|
-
END IF;
|
|
119
|
-
|
|
120
|
-
-- Grant anonymous role if not already granted
|
|
121
|
-
IF NOT EXISTS (
|
|
122
|
-
SELECT 1 FROM pg_auth_members am
|
|
123
|
-
JOIN pg_roles r1 ON am.roleid = r1.oid
|
|
124
|
-
JOIN pg_roles r2 ON am.member = r2.oid
|
|
125
|
-
WHERE r1.rolname = '${anonRole}' AND r2.rolname = '${user}'
|
|
126
|
-
) THEN
|
|
127
|
-
GRANT ${anonRole} TO ${user};
|
|
128
|
-
END IF;
|
|
129
|
-
|
|
130
|
-
-- Grant authenticated role if not already granted
|
|
131
|
-
IF NOT EXISTS (
|
|
132
|
-
SELECT 1 FROM pg_auth_members am
|
|
133
|
-
JOIN pg_roles r1 ON am.roleid = r1.oid
|
|
134
|
-
JOIN pg_roles r2 ON am.member = r2.oid
|
|
135
|
-
WHERE r1.rolname = '${authRole}' AND r2.rolname = '${user}'
|
|
136
|
-
) THEN
|
|
137
|
-
GRANT ${authRole} TO ${user};
|
|
138
|
-
END IF;
|
|
139
|
-
|
|
140
|
-
-- Grant administrator role if not already granted
|
|
141
|
-
IF NOT EXISTS (
|
|
142
|
-
SELECT 1 FROM pg_auth_members am
|
|
143
|
-
JOIN pg_roles r1 ON am.roleid = r1.oid
|
|
144
|
-
JOIN pg_roles r2 ON am.member = r2.oid
|
|
145
|
-
WHERE r1.rolname = '${adminRole}' AND r2.rolname = '${user}'
|
|
146
|
-
) THEN
|
|
147
|
-
GRANT ${adminRole} TO ${user};
|
|
148
|
-
END IF;
|
|
149
|
-
END $$;
|
|
150
|
-
`.trim();
|
|
151
|
-
await this.streamSql(sql, dbName);
|
|
152
|
-
}
|
|
153
|
-
loadSql(file, dbName) {
|
|
154
|
-
if (!existsSync(file)) {
|
|
155
|
-
throw new Error(`Missing SQL file: ${file}`);
|
|
156
|
-
}
|
|
157
|
-
this.run(`psql -f ${file} ${dbName}`);
|
|
158
|
-
}
|
|
159
|
-
async streamSql(sql, dbName) {
|
|
160
|
-
await stream({
|
|
161
|
-
...this.config,
|
|
162
|
-
database: dbName
|
|
163
|
-
}, sql);
|
|
164
|
-
}
|
|
165
|
-
async createSeededTemplate(templateName, adapter) {
|
|
166
|
-
const seedDb = this.config.database;
|
|
167
|
-
this.create(seedDb);
|
|
168
|
-
await adapter.seed({
|
|
169
|
-
admin: this,
|
|
170
|
-
config: this.config,
|
|
171
|
-
pg: null, // placeholder for PgTestClient
|
|
172
|
-
connect: null // placeholder for connection factory
|
|
173
|
-
});
|
|
174
|
-
this.cleanupTemplate(templateName);
|
|
175
|
-
this.createTemplateFromBase(seedDb, templateName);
|
|
176
|
-
this.drop(seedDb);
|
|
177
|
-
}
|
|
178
|
-
}
|
package/esm/manager.js
DELETED
|
@@ -1,136 +0,0 @@
|
|
|
1
|
-
import { Logger } from '@launchql/logger';
|
|
2
|
-
import { Pool } from 'pg';
|
|
3
|
-
import { getPgEnvOptions } from 'pg-env';
|
|
4
|
-
import { DbAdmin } from './admin';
|
|
5
|
-
import { PgTestClient } from './test-client';
|
|
6
|
-
const log = new Logger('test-connector');
|
|
7
|
-
const SYS_EVENTS = ['SIGTERM'];
|
|
8
|
-
const end = (pool) => {
|
|
9
|
-
try {
|
|
10
|
-
if (pool.ended || pool.ending) {
|
|
11
|
-
log.warn('⚠️ pg pool already ended or ending');
|
|
12
|
-
return;
|
|
13
|
-
}
|
|
14
|
-
pool.end();
|
|
15
|
-
}
|
|
16
|
-
catch (err) {
|
|
17
|
-
log.error('❌ pg pool termination error:', err);
|
|
18
|
-
}
|
|
19
|
-
};
|
|
20
|
-
export class PgTestConnector {
|
|
21
|
-
static instance;
|
|
22
|
-
clients = new Set();
|
|
23
|
-
pgPools = new Map();
|
|
24
|
-
seenDbConfigs = new Map();
|
|
25
|
-
pendingConnects = new Set();
|
|
26
|
-
verbose = false;
|
|
27
|
-
shuttingDown = false;
|
|
28
|
-
constructor(verbose = false) {
|
|
29
|
-
this.verbose = verbose;
|
|
30
|
-
SYS_EVENTS.forEach((event) => {
|
|
31
|
-
process.on(event, () => {
|
|
32
|
-
log.info(`⏹ Received ${event}, closing all connections...`);
|
|
33
|
-
this.closeAll();
|
|
34
|
-
});
|
|
35
|
-
});
|
|
36
|
-
}
|
|
37
|
-
static getInstance(verbose = false) {
|
|
38
|
-
if (!PgTestConnector.instance) {
|
|
39
|
-
PgTestConnector.instance = new PgTestConnector(verbose);
|
|
40
|
-
}
|
|
41
|
-
return PgTestConnector.instance;
|
|
42
|
-
}
|
|
43
|
-
poolKey(config) {
|
|
44
|
-
return `${config.user}@${config.host}:${config.port}/${config.database}`;
|
|
45
|
-
}
|
|
46
|
-
dbKey(config) {
|
|
47
|
-
return `${config.host}:${config.port}/${config.database}`;
|
|
48
|
-
}
|
|
49
|
-
beginTeardown() {
|
|
50
|
-
this.shuttingDown = true;
|
|
51
|
-
}
|
|
52
|
-
registerConnect(p) {
|
|
53
|
-
this.pendingConnects.add(p);
|
|
54
|
-
p.finally(() => this.pendingConnects.delete(p));
|
|
55
|
-
}
|
|
56
|
-
async awaitPendingConnects() {
|
|
57
|
-
const arr = Array.from(this.pendingConnects);
|
|
58
|
-
if (arr.length) {
|
|
59
|
-
await Promise.allSettled(arr);
|
|
60
|
-
}
|
|
61
|
-
}
|
|
62
|
-
getPool(config) {
|
|
63
|
-
const key = this.poolKey(config);
|
|
64
|
-
if (!this.pgPools.has(key)) {
|
|
65
|
-
const pool = new Pool(config);
|
|
66
|
-
this.pgPools.set(key, pool);
|
|
67
|
-
log.info(`📘 Created new pg pool: ${key}`);
|
|
68
|
-
}
|
|
69
|
-
return this.pgPools.get(key);
|
|
70
|
-
}
|
|
71
|
-
getClient(config, opts = {}) {
|
|
72
|
-
if (this.shuttingDown) {
|
|
73
|
-
throw new Error('PgTestConnector is shutting down; no new clients allowed');
|
|
74
|
-
}
|
|
75
|
-
const client = new PgTestClient(config, {
|
|
76
|
-
trackConnect: (p) => this.registerConnect(p),
|
|
77
|
-
...opts
|
|
78
|
-
});
|
|
79
|
-
this.clients.add(client);
|
|
80
|
-
const key = this.dbKey(config);
|
|
81
|
-
this.seenDbConfigs.set(key, config);
|
|
82
|
-
log.info(`🔌 New PgTestClient connected to ${config.database}`);
|
|
83
|
-
return client;
|
|
84
|
-
}
|
|
85
|
-
async closeAll() {
|
|
86
|
-
this.beginTeardown();
|
|
87
|
-
await this.awaitPendingConnects();
|
|
88
|
-
log.info('🧹 Closing all PgTestClients...');
|
|
89
|
-
await Promise.all(Array.from(this.clients).map(async (client) => {
|
|
90
|
-
try {
|
|
91
|
-
await client.close();
|
|
92
|
-
log.success(`✅ Closed client for ${client.config.database}`);
|
|
93
|
-
}
|
|
94
|
-
catch (err) {
|
|
95
|
-
log.error(`❌ Error closing PgTestClient for ${client.config.database}:`, err);
|
|
96
|
-
}
|
|
97
|
-
}));
|
|
98
|
-
this.clients.clear();
|
|
99
|
-
log.info('🧯 Disposing pg pools...');
|
|
100
|
-
for (const [key, pool] of this.pgPools.entries()) {
|
|
101
|
-
log.debug(`🧯 Disposing pg pool [${key}]`);
|
|
102
|
-
end(pool);
|
|
103
|
-
}
|
|
104
|
-
this.pgPools.clear();
|
|
105
|
-
log.info('🗑️ Dropping seen databases...');
|
|
106
|
-
await Promise.all(Array.from(this.seenDbConfigs.values()).map(async (config) => {
|
|
107
|
-
try {
|
|
108
|
-
const rootPg = getPgEnvOptions();
|
|
109
|
-
const admin = new DbAdmin({ ...config, user: rootPg.user, password: rootPg.password }, this.verbose);
|
|
110
|
-
admin.drop();
|
|
111
|
-
log.warn(`🧨 Dropped database: ${config.database}`);
|
|
112
|
-
}
|
|
113
|
-
catch (err) {
|
|
114
|
-
log.error(`❌ Failed to drop database ${config.database}:`, err);
|
|
115
|
-
}
|
|
116
|
-
}));
|
|
117
|
-
this.seenDbConfigs.clear();
|
|
118
|
-
log.success('✅ All PgTestClients closed, pools disposed, databases dropped.');
|
|
119
|
-
this.pendingConnects.clear();
|
|
120
|
-
this.shuttingDown = false;
|
|
121
|
-
}
|
|
122
|
-
close() {
|
|
123
|
-
this.closeAll();
|
|
124
|
-
}
|
|
125
|
-
drop(config) {
|
|
126
|
-
const key = this.dbKey(config);
|
|
127
|
-
const admin = new DbAdmin(config, this.verbose);
|
|
128
|
-
admin.drop();
|
|
129
|
-
log.warn(`🧨 Dropped database: ${config.database}`);
|
|
130
|
-
this.seenDbConfigs.delete(key);
|
|
131
|
-
}
|
|
132
|
-
async kill(client) {
|
|
133
|
-
await client.close();
|
|
134
|
-
this.drop(client.config);
|
|
135
|
-
}
|
|
136
|
-
}
|
package/esm/roles.js
DELETED
|
@@ -1,32 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Default role mapping configuration
|
|
3
|
-
*/
|
|
4
|
-
export const DEFAULT_ROLE_MAPPING = {
|
|
5
|
-
anonymous: 'anonymous',
|
|
6
|
-
authenticated: 'authenticated',
|
|
7
|
-
administrator: 'administrator',
|
|
8
|
-
default: 'anonymous'
|
|
9
|
-
};
|
|
10
|
-
/**
|
|
11
|
-
* Get resolved role mapping with defaults
|
|
12
|
-
*/
|
|
13
|
-
export const getRoleMapping = (options) => {
|
|
14
|
-
return {
|
|
15
|
-
...DEFAULT_ROLE_MAPPING,
|
|
16
|
-
...(options?.roles || {})
|
|
17
|
-
};
|
|
18
|
-
};
|
|
19
|
-
/**
|
|
20
|
-
* Get role name by key with fallback to default mapping
|
|
21
|
-
*/
|
|
22
|
-
export const getRoleName = (roleKey, options) => {
|
|
23
|
-
const mapping = getRoleMapping(options);
|
|
24
|
-
return mapping[roleKey];
|
|
25
|
-
};
|
|
26
|
-
/**
|
|
27
|
-
* Get default role name
|
|
28
|
-
*/
|
|
29
|
-
export const getDefaultRole = (options) => {
|
|
30
|
-
const mapping = getRoleMapping(options);
|
|
31
|
-
return mapping.default;
|
|
32
|
-
};
|
package/esm/seed/adapters.js
DELETED
|
@@ -1,23 +0,0 @@
|
|
|
1
|
-
export function sqlfile(files) {
|
|
2
|
-
return {
|
|
3
|
-
seed(ctx) {
|
|
4
|
-
for (const file of files) {
|
|
5
|
-
ctx.admin.loadSql(file, ctx.config.database);
|
|
6
|
-
}
|
|
7
|
-
}
|
|
8
|
-
};
|
|
9
|
-
}
|
|
10
|
-
export function fn(fn) {
|
|
11
|
-
return {
|
|
12
|
-
seed: fn
|
|
13
|
-
};
|
|
14
|
-
}
|
|
15
|
-
export function compose(adapters) {
|
|
16
|
-
return {
|
|
17
|
-
async seed(ctx) {
|
|
18
|
-
for (const adapter of adapters) {
|
|
19
|
-
await adapter.seed(ctx);
|
|
20
|
-
}
|
|
21
|
-
}
|
|
22
|
-
};
|
|
23
|
-
}
|
package/esm/seed/csv.js
DELETED
|
@@ -1,44 +0,0 @@
|
|
|
1
|
-
import { pipeline } from 'node:stream/promises';
|
|
2
|
-
import { Logger } from '@launchql/logger';
|
|
3
|
-
import { createReadStream, createWriteStream, existsSync } from 'fs';
|
|
4
|
-
import { from as copyFrom, to as copyTo } from 'pg-copy-streams';
|
|
5
|
-
const log = new Logger('csv');
|
|
6
|
-
export function csv(tables) {
|
|
7
|
-
return {
|
|
8
|
-
async seed(ctx) {
|
|
9
|
-
for (const [table, filePath] of Object.entries(tables)) {
|
|
10
|
-
if (!existsSync(filePath)) {
|
|
11
|
-
throw new Error(`CSV file not found: ${filePath}`);
|
|
12
|
-
}
|
|
13
|
-
log.info(`📥 Seeding "${table}" from ${filePath}`);
|
|
14
|
-
await copyCsvIntoTable(ctx.pg, table, filePath);
|
|
15
|
-
}
|
|
16
|
-
}
|
|
17
|
-
};
|
|
18
|
-
}
|
|
19
|
-
export async function copyCsvIntoTable(pg, table, filePath) {
|
|
20
|
-
const client = pg.client;
|
|
21
|
-
const stream = client.query(copyFrom(`COPY ${table} FROM STDIN WITH CSV HEADER`));
|
|
22
|
-
const source = createReadStream(filePath);
|
|
23
|
-
try {
|
|
24
|
-
await pipeline(source, stream);
|
|
25
|
-
log.success(`✅ Successfully seeded "${table}"`);
|
|
26
|
-
}
|
|
27
|
-
catch (err) {
|
|
28
|
-
log.error(`❌ COPY failed for "${table}": ${err.message}`);
|
|
29
|
-
throw err;
|
|
30
|
-
}
|
|
31
|
-
}
|
|
32
|
-
export async function exportTableToCsv(pg, table, filePath) {
|
|
33
|
-
const client = pg.client;
|
|
34
|
-
const stream = client.query(copyTo(`COPY ${table} TO STDOUT WITH CSV HEADER`));
|
|
35
|
-
const target = createWriteStream(filePath);
|
|
36
|
-
try {
|
|
37
|
-
await pipeline(stream, target);
|
|
38
|
-
log.success(`✅ Exported "${table}" to ${filePath}`);
|
|
39
|
-
}
|
|
40
|
-
catch (err) {
|
|
41
|
-
log.error(`❌ Failed to export "${table}": ${err.message}`);
|
|
42
|
-
throw err;
|
|
43
|
-
}
|
|
44
|
-
}
|
package/esm/seed/index.js
DELETED
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
import { compose, fn, sqlfile } from './adapters';
|
|
2
|
-
import { csv } from './csv';
|
|
3
|
-
import { json } from './json';
|
|
4
|
-
import { launchql } from './launchql';
|
|
5
|
-
import { sqitch } from './sqitch';
|
|
6
|
-
export * from './csv';
|
|
7
|
-
export * from './types';
|
|
8
|
-
export const seed = {
|
|
9
|
-
launchql,
|
|
10
|
-
sqitch,
|
|
11
|
-
json,
|
|
12
|
-
csv,
|
|
13
|
-
compose,
|
|
14
|
-
fn,
|
|
15
|
-
sqlfile
|
|
16
|
-
};
|
package/esm/seed/json.js
DELETED
|
@@ -1,18 +0,0 @@
|
|
|
1
|
-
export function json(data) {
|
|
2
|
-
return {
|
|
3
|
-
async seed(ctx) {
|
|
4
|
-
const { pg } = ctx;
|
|
5
|
-
for (const [table, rows] of Object.entries(data)) {
|
|
6
|
-
if (!Array.isArray(rows) || rows.length === 0)
|
|
7
|
-
continue;
|
|
8
|
-
const columns = Object.keys(rows[0]);
|
|
9
|
-
const placeholders = columns.map((_, i) => `$${i + 1}`).join(', ');
|
|
10
|
-
const sql = `INSERT INTO ${table} (${columns.join(', ')}) VALUES (${placeholders})`;
|
|
11
|
-
for (const row of rows) {
|
|
12
|
-
const values = columns.map((c) => row[c]);
|
|
13
|
-
await pg.query(sql, values);
|
|
14
|
-
}
|
|
15
|
-
}
|
|
16
|
-
}
|
|
17
|
-
};
|
|
18
|
-
}
|
package/esm/seed/launchql.js
DELETED
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
import { LaunchQLPackage } from '@launchql/core';
|
|
2
|
-
import { getEnvOptions } from '@launchql/env';
|
|
3
|
-
export function launchql(cwd, cache = false) {
|
|
4
|
-
return {
|
|
5
|
-
async seed(ctx) {
|
|
6
|
-
const proj = new LaunchQLPackage(cwd ?? ctx.connect.cwd);
|
|
7
|
-
if (!proj.isInModule())
|
|
8
|
-
return;
|
|
9
|
-
await proj.deploy(getEnvOptions({
|
|
10
|
-
pg: ctx.config,
|
|
11
|
-
deployment: {
|
|
12
|
-
fast: true,
|
|
13
|
-
usePlan: true,
|
|
14
|
-
cache
|
|
15
|
-
}
|
|
16
|
-
}), proj.getModuleName());
|
|
17
|
-
}
|
|
18
|
-
};
|
|
19
|
-
}
|
package/esm/seed/sqitch.js
DELETED
|
@@ -1,17 +0,0 @@
|
|
|
1
|
-
import { LaunchQLPackage } from '@launchql/core';
|
|
2
|
-
import { getEnvOptions } from '@launchql/env';
|
|
3
|
-
export function sqitch(cwd) {
|
|
4
|
-
return {
|
|
5
|
-
async seed(ctx) {
|
|
6
|
-
const proj = new LaunchQLPackage(cwd ?? ctx.connect.cwd);
|
|
7
|
-
if (!proj.isInModule())
|
|
8
|
-
return;
|
|
9
|
-
await proj.deploy(getEnvOptions({
|
|
10
|
-
pg: ctx.config,
|
|
11
|
-
deployment: {
|
|
12
|
-
fast: false
|
|
13
|
-
}
|
|
14
|
-
}), proj.getModuleName(), true);
|
|
15
|
-
}
|
|
16
|
-
};
|
|
17
|
-
}
|
package/esm/seed/types.js
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|
package/esm/stream.js
DELETED
|
@@ -1,43 +0,0 @@
|
|
|
1
|
-
import { spawn } from 'child_process';
|
|
2
|
-
import { getSpawnEnvWithPg } from 'pg-env';
|
|
3
|
-
import { Readable } from 'stream';
|
|
4
|
-
function setArgs(config) {
|
|
5
|
-
const args = [
|
|
6
|
-
'-U', config.user,
|
|
7
|
-
'-h', config.host,
|
|
8
|
-
'-d', config.database
|
|
9
|
-
];
|
|
10
|
-
if (config.port) {
|
|
11
|
-
args.push('-p', String(config.port));
|
|
12
|
-
}
|
|
13
|
-
return args;
|
|
14
|
-
}
|
|
15
|
-
// Converts a string to a readable stream (replaces streamify-string)
|
|
16
|
-
function stringToStream(text) {
|
|
17
|
-
const stream = new Readable({
|
|
18
|
-
read() {
|
|
19
|
-
this.push(text);
|
|
20
|
-
this.push(null);
|
|
21
|
-
}
|
|
22
|
-
});
|
|
23
|
-
return stream;
|
|
24
|
-
}
|
|
25
|
-
export async function streamSql(config, sql) {
|
|
26
|
-
const args = setArgs(config);
|
|
27
|
-
return new Promise((resolve, reject) => {
|
|
28
|
-
const sqlStream = stringToStream(sql);
|
|
29
|
-
const proc = spawn('psql', args, {
|
|
30
|
-
env: getSpawnEnvWithPg(config)
|
|
31
|
-
});
|
|
32
|
-
sqlStream.pipe(proc.stdin);
|
|
33
|
-
proc.on('close', (code) => {
|
|
34
|
-
resolve();
|
|
35
|
-
});
|
|
36
|
-
proc.on('error', (error) => {
|
|
37
|
-
reject(error);
|
|
38
|
-
});
|
|
39
|
-
proc.stderr.on('data', (data) => {
|
|
40
|
-
reject(new Error(data.toString()));
|
|
41
|
-
});
|
|
42
|
-
});
|
|
43
|
-
}
|
package/esm/test-client.js
DELETED
|
@@ -1,150 +0,0 @@
|
|
|
1
|
-
import { Client } from 'pg';
|
|
2
|
-
import { getRoleName } from './roles';
|
|
3
|
-
export class PgTestClient {
|
|
4
|
-
config;
|
|
5
|
-
client;
|
|
6
|
-
opts;
|
|
7
|
-
ctxStmts = '';
|
|
8
|
-
contextSettings = {};
|
|
9
|
-
_ended = false;
|
|
10
|
-
connectPromise = null;
|
|
11
|
-
constructor(config, opts = {}) {
|
|
12
|
-
this.opts = opts;
|
|
13
|
-
this.config = config;
|
|
14
|
-
this.client = new Client({
|
|
15
|
-
host: this.config.host,
|
|
16
|
-
port: this.config.port,
|
|
17
|
-
database: this.config.database,
|
|
18
|
-
user: this.config.user,
|
|
19
|
-
password: this.config.password
|
|
20
|
-
});
|
|
21
|
-
if (!opts.deferConnect) {
|
|
22
|
-
this.connectPromise = this.client.connect();
|
|
23
|
-
if (opts.trackConnect)
|
|
24
|
-
opts.trackConnect(this.connectPromise);
|
|
25
|
-
}
|
|
26
|
-
}
|
|
27
|
-
async ensureConnected() {
|
|
28
|
-
if (this.connectPromise) {
|
|
29
|
-
try {
|
|
30
|
-
await this.connectPromise;
|
|
31
|
-
}
|
|
32
|
-
catch { }
|
|
33
|
-
}
|
|
34
|
-
}
|
|
35
|
-
async close() {
|
|
36
|
-
if (!this._ended) {
|
|
37
|
-
this._ended = true;
|
|
38
|
-
await this.ensureConnected();
|
|
39
|
-
this.client.end();
|
|
40
|
-
}
|
|
41
|
-
}
|
|
42
|
-
async begin() {
|
|
43
|
-
await this.client.query('BEGIN;');
|
|
44
|
-
}
|
|
45
|
-
async savepoint(name = 'lqlsavepoint') {
|
|
46
|
-
await this.client.query(`SAVEPOINT "${name}";`);
|
|
47
|
-
}
|
|
48
|
-
async rollback(name = 'lqlsavepoint') {
|
|
49
|
-
await this.client.query(`ROLLBACK TO SAVEPOINT "${name}";`);
|
|
50
|
-
}
|
|
51
|
-
async commit() {
|
|
52
|
-
await this.client.query('COMMIT;');
|
|
53
|
-
}
|
|
54
|
-
async beforeEach() {
|
|
55
|
-
await this.begin();
|
|
56
|
-
await this.savepoint();
|
|
57
|
-
}
|
|
58
|
-
async afterEach() {
|
|
59
|
-
await this.rollback();
|
|
60
|
-
await this.commit();
|
|
61
|
-
}
|
|
62
|
-
setContext(ctx) {
|
|
63
|
-
Object.assign(this.contextSettings, ctx);
|
|
64
|
-
this.ctxStmts = Object.entries(this.contextSettings)
|
|
65
|
-
.map(([key, val]) => val === null
|
|
66
|
-
? `SELECT set_config('${key}', NULL, true);`
|
|
67
|
-
: `SELECT set_config('${key}', '${val}', true);`)
|
|
68
|
-
.join('\n');
|
|
69
|
-
}
|
|
70
|
-
/**
|
|
71
|
-
* Set authentication context for the current session.
|
|
72
|
-
* Configures role and user ID using cascading defaults from options → opts.auth → RoleMapping.
|
|
73
|
-
*/
|
|
74
|
-
auth(options = {}) {
|
|
75
|
-
const role = options.role ?? this.opts.auth?.role ?? getRoleName('authenticated', this.opts);
|
|
76
|
-
const userIdKey = options.userIdKey ?? this.opts.auth?.userIdKey ?? 'jwt.claims.user_id';
|
|
77
|
-
const userId = options.userId ?? this.opts.auth?.userId ?? null;
|
|
78
|
-
this.setContext({
|
|
79
|
-
role,
|
|
80
|
-
[userIdKey]: userId !== null ? String(userId) : null
|
|
81
|
-
});
|
|
82
|
-
}
|
|
83
|
-
/**
|
|
84
|
-
* Commit current transaction to make data visible to other connections, then start fresh transaction.
|
|
85
|
-
* Maintains test isolation by creating a savepoint and reapplying session context.
|
|
86
|
-
*/
|
|
87
|
-
async publish() {
|
|
88
|
-
await this.commit(); // make data visible to other sessions
|
|
89
|
-
await this.begin(); // fresh tx
|
|
90
|
-
await this.savepoint(); // keep rollback harness
|
|
91
|
-
await this.ctxQuery(); // reapply all setContext()
|
|
92
|
-
}
|
|
93
|
-
/**
|
|
94
|
-
* Clear all session context variables and reset to default anonymous role.
|
|
95
|
-
*/
|
|
96
|
-
clearContext() {
|
|
97
|
-
const defaultRole = getRoleName('anonymous', this.opts);
|
|
98
|
-
const nulledSettings = {};
|
|
99
|
-
Object.keys(this.contextSettings).forEach(key => {
|
|
100
|
-
nulledSettings[key] = null;
|
|
101
|
-
});
|
|
102
|
-
nulledSettings.role = defaultRole;
|
|
103
|
-
this.ctxStmts = Object.entries(nulledSettings)
|
|
104
|
-
.map(([key, val]) => val === null
|
|
105
|
-
? `SELECT set_config('${key}', NULL, true);`
|
|
106
|
-
: `SELECT set_config('${key}', '${val}', true);`)
|
|
107
|
-
.join('\n');
|
|
108
|
-
this.contextSettings = { role: defaultRole };
|
|
109
|
-
}
|
|
110
|
-
async any(query, values) {
|
|
111
|
-
const result = await this.query(query, values);
|
|
112
|
-
return result.rows;
|
|
113
|
-
}
|
|
114
|
-
async one(query, values) {
|
|
115
|
-
const rows = await this.any(query, values);
|
|
116
|
-
if (rows.length !== 1) {
|
|
117
|
-
throw new Error('Expected exactly one result');
|
|
118
|
-
}
|
|
119
|
-
return rows[0];
|
|
120
|
-
}
|
|
121
|
-
async oneOrNone(query, values) {
|
|
122
|
-
const rows = await this.any(query, values);
|
|
123
|
-
return rows[0] || null;
|
|
124
|
-
}
|
|
125
|
-
async many(query, values) {
|
|
126
|
-
const rows = await this.any(query, values);
|
|
127
|
-
if (rows.length === 0)
|
|
128
|
-
throw new Error('Expected many rows, got none');
|
|
129
|
-
return rows;
|
|
130
|
-
}
|
|
131
|
-
async manyOrNone(query, values) {
|
|
132
|
-
return this.any(query, values);
|
|
133
|
-
}
|
|
134
|
-
async none(query, values) {
|
|
135
|
-
await this.query(query, values);
|
|
136
|
-
}
|
|
137
|
-
async result(query, values) {
|
|
138
|
-
return this.query(query, values);
|
|
139
|
-
}
|
|
140
|
-
async query(query, values) {
|
|
141
|
-
await this.ctxQuery();
|
|
142
|
-
const result = await this.client.query(query, values);
|
|
143
|
-
return result;
|
|
144
|
-
}
|
|
145
|
-
async ctxQuery() {
|
|
146
|
-
if (this.ctxStmts) {
|
|
147
|
-
await this.client.query(this.ctxStmts);
|
|
148
|
-
}
|
|
149
|
-
}
|
|
150
|
-
}
|