pgsql-test 2.0.1 → 2.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +172 -27
- package/admin.d.ts +1 -1
- package/admin.js +2 -1
- package/connect.d.ts +2 -2
- package/connect.js +12 -35
- package/esm/admin.js +2 -1
- package/esm/connect.js +13 -36
- package/esm/index.js +5 -2
- package/esm/{seed.js → seed/adapters.js} +3 -15
- package/esm/seed/csv.js +43 -0
- package/esm/seed/index.js +15 -0
- package/esm/seed/json.js +18 -0
- package/esm/seed/launchql.js +20 -0
- package/esm/seed/sqitch.js +13 -0
- package/esm/seed/types.js +1 -0
- package/index.d.ts +5 -2
- package/index.js +5 -2
- package/package.json +11 -5
- package/seed/adapters.d.ts +4 -0
- package/{seed.js → seed/adapters.js} +3 -13
- package/seed/csv.d.ts +9 -0
- package/seed/csv.js +48 -0
- package/seed/index.d.ts +15 -0
- package/seed/index.js +32 -0
- package/seed/json.d.ts +6 -0
- package/seed/json.js +21 -0
- package/seed/launchql.d.ts +2 -0
- package/seed/launchql.js +23 -0
- package/seed/sqitch.d.ts +2 -0
- package/seed/sqitch.js +16 -0
- package/seed/types.d.ts +12 -0
- package/seed/types.js +2 -0
- package/test-client.d.ts +2 -2
- package/esm/legacy-connect.js +0 -25
- package/legacy-connect.d.ts +0 -11
- package/legacy-connect.js +0 -30
- package/seed.d.ts +0 -22
package/README.md
CHANGED
|
@@ -17,7 +17,7 @@
|
|
|
17
17
|
</p>
|
|
18
18
|
|
|
19
19
|
|
|
20
|
-
`pgsql-test`
|
|
20
|
+
`pgsql-test` gives you instant, isolated PostgreSQL databases for each test — with automatic transaction rollbacks, context switching, and clean seeding. Forget flaky tests and brittle environments. Write real SQL. Get real coverage. Stay fast.
|
|
21
21
|
|
|
22
22
|
## Install
|
|
23
23
|
|
|
@@ -27,29 +27,37 @@ npm install pgsql-test
|
|
|
27
27
|
|
|
28
28
|
## Features
|
|
29
29
|
|
|
30
|
-
* ⚡
|
|
31
|
-
*
|
|
32
|
-
*
|
|
33
|
-
*
|
|
34
|
-
*
|
|
35
|
-
*
|
|
36
|
-
|
|
37
|
-
|
|
30
|
+
* ⚡ **Instant test DBs** — each one seeded, isolated, and UUID-named
|
|
31
|
+
* 🔄 **Per-test rollback** — every test runs in its own transaction or savepoint
|
|
32
|
+
* 🛡️ **RLS-friendly** — test with role-based auth via `.setContext()`
|
|
33
|
+
* 🌱 **Flexible seeding** — run `.sql` files, programmatic seeds, or even load fixtures
|
|
34
|
+
* 🧪 **Compatible with any async runner** — works with `Jest`, `Mocha`, etc.
|
|
35
|
+
* 🧹 **Auto teardown** — no residue, no reboots, just clean exits
|
|
36
|
+
|
|
37
|
+
### LaunchQL migrations
|
|
38
|
+
|
|
39
|
+
Part of the [LaunchQL](https://github.com/launchql) ecosystem, `pgsql-test` is built to pair seamlessly with our TypeScript-based [Sqitch](https://sqitch.org/) engine rewrite:
|
|
40
|
+
|
|
41
|
+
* 🚀 **Lightning-fast migrations** — powered by LaunchQL’s native deployer (10x faster than legacy Sqitch)
|
|
42
|
+
* 🔧 **Composable test scaffolds** — integrate with full LaunchQL stacks or use standalone
|
|
38
43
|
|
|
39
44
|
|
|
40
45
|
## Table of Contents
|
|
41
46
|
|
|
42
47
|
1. [Install](#install)
|
|
43
48
|
2. [Features](#features)
|
|
44
|
-
3. [Quick Start](
|
|
49
|
+
3. [Quick Start](#-quick-start)
|
|
45
50
|
4. [getConnections() Overview](#getconnections-overview)
|
|
46
51
|
5. [PgTestClient API Overview](#pgtestclient-api-overview)
|
|
47
52
|
6. [Usage Examples](#usage-examples)
|
|
48
|
-
* [Basic Setup](
|
|
49
|
-
* [Role-Based Context](
|
|
50
|
-
* [SQL File Seeding](
|
|
51
|
-
* [Programmatic Seeding](
|
|
52
|
-
* [
|
|
53
|
+
* [Basic Setup](#-basic-setup)
|
|
54
|
+
* [Role-Based Context](#-role-based-context)
|
|
55
|
+
* [SQL File Seeding](#-sql-file-seeding)
|
|
56
|
+
* [Programmatic Seeding](#-programmatic-seeding)
|
|
57
|
+
* [CSV Seeding](#️-csv-seeding)
|
|
58
|
+
* [JSON Seeding](#️-json-seeding)
|
|
59
|
+
* [Sqitch Seeding](#️-sqitch-seeding)
|
|
60
|
+
* [LaunchQL Seeding](#-launchql-seeding)
|
|
53
61
|
7. [Environment Overrides](#environment-overrides)
|
|
54
62
|
8. [Disclaimer](#disclaimer)
|
|
55
63
|
|
|
@@ -202,37 +210,174 @@ beforeAll(async () => {
|
|
|
202
210
|
});
|
|
203
211
|
```
|
|
204
212
|
|
|
205
|
-
|
|
213
|
+
## 🗃️ CSV Seeding
|
|
206
214
|
|
|
207
|
-
|
|
215
|
+
You can load tables from CSV files using `seed.csv({ ... })`. CSV headers must match the table column names exactly. This is useful for loading stable fixture data for integration tests or CI environments.
|
|
208
216
|
|
|
209
217
|
```ts
|
|
210
218
|
import path from 'path';
|
|
211
219
|
import { getConnections, seed } from 'pgsql-test';
|
|
212
220
|
|
|
213
|
-
const
|
|
221
|
+
const csv = (file: string) => path.resolve(__dirname, '../csv', file);
|
|
214
222
|
|
|
215
223
|
let db;
|
|
216
224
|
let teardown;
|
|
217
225
|
|
|
218
226
|
beforeAll(async () => {
|
|
219
|
-
({ db, teardown } = await getConnections({},
|
|
220
|
-
|
|
221
|
-
sql('schema.sql'),
|
|
222
|
-
sql('roles.sql')
|
|
223
|
-
]),
|
|
227
|
+
({ db, teardown } = await getConnections({}, [
|
|
228
|
+
// Create schema
|
|
224
229
|
seed.fn(async ({ pg }) => {
|
|
225
|
-
await pg.query(`
|
|
230
|
+
await pg.query(`
|
|
231
|
+
CREATE TABLE users (
|
|
232
|
+
id SERIAL PRIMARY KEY,
|
|
233
|
+
name TEXT NOT NULL
|
|
234
|
+
);
|
|
235
|
+
|
|
236
|
+
CREATE TABLE posts (
|
|
237
|
+
id SERIAL PRIMARY KEY,
|
|
238
|
+
user_id INT REFERENCES users(id),
|
|
239
|
+
content TEXT NOT NULL
|
|
240
|
+
);
|
|
241
|
+
`);
|
|
242
|
+
}),
|
|
243
|
+
// Load from CSV
|
|
244
|
+
seed.csv({
|
|
245
|
+
users: csv('users.csv'),
|
|
246
|
+
posts: csv('posts.csv')
|
|
247
|
+
}),
|
|
248
|
+
// Adjust SERIAL sequences to avoid conflicts
|
|
249
|
+
seed.fn(async ({ pg }) => {
|
|
250
|
+
await pg.query(`SELECT setval(pg_get_serial_sequence('users', 'id'), (SELECT MAX(id) FROM users));`);
|
|
251
|
+
await pg.query(`SELECT setval(pg_get_serial_sequence('posts', 'id'), (SELECT MAX(id) FROM posts));`);
|
|
226
252
|
})
|
|
227
|
-
]))
|
|
253
|
+
]));
|
|
254
|
+
});
|
|
255
|
+
|
|
256
|
+
afterAll(() => teardown());
|
|
257
|
+
|
|
258
|
+
it('has loaded rows', async () => {
|
|
259
|
+
const res = await db.query('SELECT COUNT(*) FROM users');
|
|
260
|
+
expect(+res.rows[0].count).toBeGreaterThan(0);
|
|
261
|
+
});
|
|
262
|
+
```
|
|
263
|
+
|
|
264
|
+
## 🗃️ JSON Seeding
|
|
265
|
+
|
|
266
|
+
You can seed tables using in-memory JSON objects. This is useful when you want fast, inline fixtures without managing external files.
|
|
267
|
+
|
|
268
|
+
```ts
|
|
269
|
+
import { getConnections, seed } from 'pgsql-test';
|
|
270
|
+
|
|
271
|
+
let db;
|
|
272
|
+
let teardown;
|
|
273
|
+
|
|
274
|
+
beforeAll(async () => {
|
|
275
|
+
({ db, teardown } = await getConnections({}, [
|
|
276
|
+
// Create schema
|
|
277
|
+
seed.fn(async ({ pg }) => {
|
|
278
|
+
await pg.query(`
|
|
279
|
+
CREATE SCHEMA custom;
|
|
280
|
+
CREATE TABLE custom.users (
|
|
281
|
+
id SERIAL PRIMARY KEY,
|
|
282
|
+
name TEXT NOT NULL
|
|
283
|
+
);
|
|
284
|
+
|
|
285
|
+
CREATE TABLE custom.posts (
|
|
286
|
+
id SERIAL PRIMARY KEY,
|
|
287
|
+
user_id INT REFERENCES custom.users(id),
|
|
288
|
+
content TEXT NOT NULL
|
|
289
|
+
);
|
|
290
|
+
`);
|
|
291
|
+
}),
|
|
292
|
+
// Seed with in-memory JSON
|
|
293
|
+
seed.json({
|
|
294
|
+
'custom.users': [
|
|
295
|
+
{ id: 1, name: 'Alice' },
|
|
296
|
+
{ id: 2, name: 'Bob' }
|
|
297
|
+
],
|
|
298
|
+
'custom.posts': [
|
|
299
|
+
{ id: 1, user_id: 1, content: 'Hello world!' },
|
|
300
|
+
{ id: 2, user_id: 2, content: 'Graphile is cool!' }
|
|
301
|
+
]
|
|
302
|
+
}),
|
|
303
|
+
// Fix SERIAL sequences
|
|
304
|
+
seed.fn(async ({ pg }) => {
|
|
305
|
+
await pg.query(`SELECT setval(pg_get_serial_sequence('custom.users', 'id'), (SELECT MAX(id) FROM custom.users));`);
|
|
306
|
+
await pg.query(`SELECT setval(pg_get_serial_sequence('custom.posts', 'id'), (SELECT MAX(id) FROM custom.posts));`);
|
|
307
|
+
})
|
|
308
|
+
]));
|
|
309
|
+
});
|
|
310
|
+
|
|
311
|
+
afterAll(() => teardown());
|
|
312
|
+
|
|
313
|
+
it('has loaded rows', async () => {
|
|
314
|
+
const res = await db.query('SELECT COUNT(*) FROM custom.users');
|
|
315
|
+
expect(+res.rows[0].count).toBeGreaterThan(0);
|
|
228
316
|
});
|
|
229
317
|
```
|
|
230
318
|
|
|
231
|
-
|
|
319
|
+
## 🏗️ Sqitch Seeding
|
|
320
|
+
|
|
321
|
+
*Note: While compatible with Sqitch syntax, LaunchQL uses its own high-performance [TypeScript-based deploy engine.](#-launchql-seeding) that we encourage using for sqitch projects*
|
|
322
|
+
|
|
323
|
+
You can seed your test database using a Sqitch project but with significantly improved performance by leveraging LaunchQL's TypeScript deployment engine:
|
|
324
|
+
|
|
325
|
+
```ts
|
|
326
|
+
import path from 'path';
|
|
327
|
+
import { getConnections, seed } from 'pgsql-test';
|
|
328
|
+
|
|
329
|
+
const cwd = path.resolve(__dirname, '../path/to/sqitch');
|
|
330
|
+
|
|
331
|
+
beforeAll(async () => {
|
|
332
|
+
({ db, teardown } = await getConnections({}, [
|
|
333
|
+
seed.sqitch(cwd)
|
|
334
|
+
]));
|
|
335
|
+
});
|
|
336
|
+
|
|
337
|
+
it('runs a schema query', async () => {
|
|
338
|
+
const res = await db.query('SELECT COUNT(*) FROM myapp.users');
|
|
339
|
+
expect(+res.rows[0].count).toBeGreaterThanOrEqual(0);
|
|
340
|
+
});
|
|
341
|
+
```
|
|
342
|
+
|
|
343
|
+
This works for any Sqitch-compatible module, now accelerated by LaunchQL's deployment tooling.
|
|
344
|
+
|
|
345
|
+
## 🚀 LaunchQL Seeding
|
|
346
|
+
|
|
347
|
+
For LaunchQL modules with precompiled `sqitch.plan`, use `seed.launchql(cwd)` to apply a schema quickly with `deployFast()`:
|
|
348
|
+
For maximum performance with precompiled LaunchQL modules, use `seed.launchql(cwd)` to apply a schema at lightning speed with our TypeScript-powered `deployFast()`:
|
|
349
|
+
|
|
350
|
+
```ts
|
|
351
|
+
import path from 'path';
|
|
352
|
+
import { getConnections, seed } from 'pgsql-test';
|
|
353
|
+
|
|
354
|
+
const cwd = path.resolve(__dirname, '../path/to/launchql');
|
|
355
|
+
|
|
356
|
+
beforeAll(async () => {
|
|
357
|
+
({ db, teardown } = await getConnections({}, [
|
|
358
|
+
seed.launchql(cwd) // uses deployFast() - up to 10x faster than traditional Sqitch!
|
|
359
|
+
]));
|
|
360
|
+
});
|
|
361
|
+
|
|
362
|
+
it('creates user records', async () => {
|
|
363
|
+
await db.query(`INSERT INTO myapp.users (username, email) VALUES ('testuser', 'test@example.com')`);
|
|
364
|
+
const res = await db.query(`SELECT COUNT(*) FROM myapp.users`);
|
|
365
|
+
expect(+res.rows[0].count).toBeGreaterThan(0);
|
|
366
|
+
});
|
|
367
|
+
```
|
|
368
|
+
|
|
369
|
+
This is the fastest way to bring up a ready-to-query schema from a compiled LaunchQL module - perfect for both development and CI environments.
|
|
370
|
+
|
|
371
|
+
## Why LaunchQL's Approach?
|
|
232
372
|
|
|
233
|
-
|
|
373
|
+
LaunchQL provides the best of both worlds:
|
|
234
374
|
|
|
375
|
+
1. **Sqitch Compatibility**: Keep your familiar Sqitch syntax and migration approach
|
|
376
|
+
2. **TypeScript Performance**: Our TS-rewritten deployment engine delivers up to 10x faster schema deployments
|
|
377
|
+
3. **Developer Experience**: Tight feedback loops with near-instant schema setup for tests
|
|
378
|
+
4. **CI Optimization**: Dramatically reduced test suite run times with optimized deployment
|
|
235
379
|
|
|
380
|
+
By maintaining Sqitch compatibility while supercharging performance, LaunchQL enables you to keep your existing migration patterns while enjoying the speed benefits of our TypeScript engine.
|
|
236
381
|
|
|
237
382
|
## Environment Overrides
|
|
238
383
|
|
package/admin.d.ts
CHANGED
package/admin.js
CHANGED
|
@@ -120,7 +120,8 @@ class DbAdmin {
|
|
|
120
120
|
await adapter.seed({
|
|
121
121
|
admin: this,
|
|
122
122
|
config: this.config,
|
|
123
|
-
pg: null // sorry!
|
|
123
|
+
pg: null, // sorry!
|
|
124
|
+
connect: null, // sorry!
|
|
124
125
|
});
|
|
125
126
|
this.cleanupTemplate(templateName);
|
|
126
127
|
this.createTemplateFromBase(seedDb, templateName);
|
package/connect.d.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { DbAdmin } from './admin';
|
|
2
2
|
import { TestConnectionOptions, PgConfig } from '@launchql/types';
|
|
3
3
|
import { PgTestConnector } from './manager';
|
|
4
|
-
import { SeedAdapter } from './seed';
|
|
4
|
+
import { SeedAdapter } from './seed/types';
|
|
5
5
|
import { PgTestClient } from './test-client';
|
|
6
6
|
export declare const getPgRootAdmin: (connOpts?: TestConnectionOptions) => DbAdmin;
|
|
7
7
|
export interface GetConnectionOpts {
|
|
@@ -15,4 +15,4 @@ export interface GetConnectionResult {
|
|
|
15
15
|
teardown: () => Promise<void>;
|
|
16
16
|
manager: PgTestConnector;
|
|
17
17
|
}
|
|
18
|
-
export declare const getConnections: (cn?: GetConnectionOpts,
|
|
18
|
+
export declare const getConnections: (cn?: GetConnectionOpts, seedAdapters?: SeedAdapter[]) => Promise<GetConnectionResult>;
|
package/connect.js
CHANGED
|
@@ -3,10 +3,10 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.getConnections = exports.getPgRootAdmin = void 0;
|
|
4
4
|
const admin_1 = require("./admin");
|
|
5
5
|
const types_1 = require("@launchql/types");
|
|
6
|
-
const migrate_1 = require("@launchql/migrate");
|
|
7
6
|
const manager_1 = require("./manager");
|
|
8
7
|
const crypto_1 = require("crypto");
|
|
9
8
|
const server_utils_1 = require("@launchql/server-utils");
|
|
9
|
+
const seed_1 = require("./seed");
|
|
10
10
|
let manager;
|
|
11
11
|
const getPgRootAdmin = (connOpts = {}) => {
|
|
12
12
|
const opts = (0, types_1.getPgEnvOptions)({
|
|
@@ -27,53 +27,30 @@ const getConnOopts = (cn = {}) => {
|
|
|
27
27
|
db: connect
|
|
28
28
|
};
|
|
29
29
|
};
|
|
30
|
-
const getConnections = async (cn = {},
|
|
30
|
+
const getConnections = async (cn = {}, seedAdapters = []) => {
|
|
31
31
|
cn = getConnOopts(cn);
|
|
32
32
|
const config = cn.pg;
|
|
33
33
|
const connOpts = cn.db;
|
|
34
34
|
const root = (0, exports.getPgRootAdmin)(connOpts);
|
|
35
35
|
await root.createUserRole(connOpts.connection.user, connOpts.connection.password, connOpts.rootDb);
|
|
36
36
|
const admin = new admin_1.DbAdmin(config);
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
pg: config
|
|
43
|
-
});
|
|
44
|
-
if (connOpts.deployFast) {
|
|
45
|
-
await (0, migrate_1.deployFast)({
|
|
46
|
-
opts,
|
|
47
|
-
name: proj.getModuleName(),
|
|
48
|
-
database: config.database,
|
|
49
|
-
dir: proj.modulePath,
|
|
50
|
-
usePlan: true,
|
|
51
|
-
verbose: false
|
|
52
|
-
});
|
|
53
|
-
}
|
|
54
|
-
else {
|
|
55
|
-
await (0, migrate_1.deploy)(opts, proj.getModuleName(), config.database, proj.modulePath);
|
|
56
|
-
}
|
|
37
|
+
if (process.env.TEST_DB) {
|
|
38
|
+
config.database = process.env.TEST_DB;
|
|
39
|
+
}
|
|
40
|
+
else if (connOpts.template) {
|
|
41
|
+
admin.createFromTemplate(connOpts.template, config.database);
|
|
57
42
|
}
|
|
58
43
|
else {
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
config.database = process.env.TEST_DB;
|
|
62
|
-
}
|
|
63
|
-
else if (connOpts.template) {
|
|
64
|
-
admin.createFromTemplate(connOpts.template, config.database);
|
|
65
|
-
}
|
|
66
|
-
else {
|
|
67
|
-
admin.create(config.database);
|
|
68
|
-
admin.installExtensions(connOpts.extensions);
|
|
69
|
-
}
|
|
44
|
+
admin.create(config.database);
|
|
45
|
+
admin.installExtensions(connOpts.extensions);
|
|
70
46
|
}
|
|
71
47
|
await admin.grantConnect(connOpts.connection.user, config.database);
|
|
72
48
|
// Main admin client (optional unless needed elsewhere)
|
|
73
49
|
manager = manager_1.PgTestConnector.getInstance();
|
|
74
50
|
const pg = manager.getClient(config);
|
|
75
|
-
if (
|
|
76
|
-
await
|
|
51
|
+
if (seedAdapters.length) {
|
|
52
|
+
await seed_1.seed.compose(seedAdapters).seed({
|
|
53
|
+
connect: connOpts,
|
|
77
54
|
admin,
|
|
78
55
|
config: config,
|
|
79
56
|
pg: manager.getClient(config)
|
package/esm/admin.js
CHANGED
|
@@ -117,7 +117,8 @@ export class DbAdmin {
|
|
|
117
117
|
await adapter.seed({
|
|
118
118
|
admin: this,
|
|
119
119
|
config: this.config,
|
|
120
|
-
pg: null // sorry!
|
|
120
|
+
pg: null, // sorry!
|
|
121
|
+
connect: null, // sorry!
|
|
121
122
|
});
|
|
122
123
|
this.cleanupTemplate(templateName);
|
|
123
124
|
this.createTemplateFromBase(seedDb, templateName);
|
package/esm/connect.js
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import { DbAdmin } from './admin';
|
|
2
|
-
import {
|
|
3
|
-
import { deploy, deployFast, LaunchQLProject } from '@launchql/migrate';
|
|
2
|
+
import { getPgEnvOptions, getConnEnvOptions } from '@launchql/types';
|
|
4
3
|
import { PgTestConnector } from './manager';
|
|
5
4
|
import { randomUUID } from 'crypto';
|
|
6
5
|
import { teardownPgPools } from '@launchql/server-utils';
|
|
6
|
+
import { seed } from './seed';
|
|
7
7
|
let manager;
|
|
8
8
|
export const getPgRootAdmin = (connOpts = {}) => {
|
|
9
9
|
const opts = getPgEnvOptions({
|
|
@@ -23,53 +23,30 @@ const getConnOopts = (cn = {}) => {
|
|
|
23
23
|
db: connect
|
|
24
24
|
};
|
|
25
25
|
};
|
|
26
|
-
export const getConnections = async (cn = {},
|
|
26
|
+
export const getConnections = async (cn = {}, seedAdapters = []) => {
|
|
27
27
|
cn = getConnOopts(cn);
|
|
28
28
|
const config = cn.pg;
|
|
29
29
|
const connOpts = cn.db;
|
|
30
30
|
const root = getPgRootAdmin(connOpts);
|
|
31
31
|
await root.createUserRole(connOpts.connection.user, connOpts.connection.password, connOpts.rootDb);
|
|
32
32
|
const admin = new DbAdmin(config);
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
pg: config
|
|
39
|
-
});
|
|
40
|
-
if (connOpts.deployFast) {
|
|
41
|
-
await deployFast({
|
|
42
|
-
opts,
|
|
43
|
-
name: proj.getModuleName(),
|
|
44
|
-
database: config.database,
|
|
45
|
-
dir: proj.modulePath,
|
|
46
|
-
usePlan: true,
|
|
47
|
-
verbose: false
|
|
48
|
-
});
|
|
49
|
-
}
|
|
50
|
-
else {
|
|
51
|
-
await deploy(opts, proj.getModuleName(), config.database, proj.modulePath);
|
|
52
|
-
}
|
|
33
|
+
if (process.env.TEST_DB) {
|
|
34
|
+
config.database = process.env.TEST_DB;
|
|
35
|
+
}
|
|
36
|
+
else if (connOpts.template) {
|
|
37
|
+
admin.createFromTemplate(connOpts.template, config.database);
|
|
53
38
|
}
|
|
54
39
|
else {
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
config.database = process.env.TEST_DB;
|
|
58
|
-
}
|
|
59
|
-
else if (connOpts.template) {
|
|
60
|
-
admin.createFromTemplate(connOpts.template, config.database);
|
|
61
|
-
}
|
|
62
|
-
else {
|
|
63
|
-
admin.create(config.database);
|
|
64
|
-
admin.installExtensions(connOpts.extensions);
|
|
65
|
-
}
|
|
40
|
+
admin.create(config.database);
|
|
41
|
+
admin.installExtensions(connOpts.extensions);
|
|
66
42
|
}
|
|
67
43
|
await admin.grantConnect(connOpts.connection.user, config.database);
|
|
68
44
|
// Main admin client (optional unless needed elsewhere)
|
|
69
45
|
manager = PgTestConnector.getInstance();
|
|
70
46
|
const pg = manager.getClient(config);
|
|
71
|
-
if (
|
|
72
|
-
await
|
|
47
|
+
if (seedAdapters.length) {
|
|
48
|
+
await seed.compose(seedAdapters).seed({
|
|
49
|
+
connect: connOpts,
|
|
73
50
|
admin,
|
|
74
51
|
config: config,
|
|
75
52
|
pg: manager.getClient(config)
|
package/esm/index.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
function sqlfile(files) {
|
|
1
|
+
export function sqlfile(files) {
|
|
2
2
|
return {
|
|
3
3
|
seed(ctx) {
|
|
4
4
|
for (const file of files) {
|
|
@@ -7,18 +7,12 @@ function sqlfile(files) {
|
|
|
7
7
|
}
|
|
8
8
|
};
|
|
9
9
|
}
|
|
10
|
-
function fn(fn) {
|
|
10
|
+
export function fn(fn) {
|
|
11
11
|
return {
|
|
12
12
|
seed: fn
|
|
13
13
|
};
|
|
14
14
|
}
|
|
15
|
-
function
|
|
16
|
-
throw new Error('not yet implemented');
|
|
17
|
-
return {
|
|
18
|
-
seed: fn
|
|
19
|
-
};
|
|
20
|
-
}
|
|
21
|
-
function compose(adapters) {
|
|
15
|
+
export function compose(adapters) {
|
|
22
16
|
return {
|
|
23
17
|
async seed(ctx) {
|
|
24
18
|
for (const adapter of adapters) {
|
|
@@ -27,9 +21,3 @@ function compose(adapters) {
|
|
|
27
21
|
}
|
|
28
22
|
};
|
|
29
23
|
}
|
|
30
|
-
export const seed = {
|
|
31
|
-
compose,
|
|
32
|
-
fn,
|
|
33
|
-
csv,
|
|
34
|
-
sqlfile
|
|
35
|
-
};
|
package/esm/seed/csv.js
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
import { createReadStream, existsSync } from 'fs';
|
|
2
|
+
import { pipeline } from 'node:stream/promises';
|
|
3
|
+
import { from as copyFrom, to as copyTo } from 'pg-copy-streams';
|
|
4
|
+
import { createWriteStream } from 'node:fs';
|
|
5
|
+
export function csv(tables) {
|
|
6
|
+
return {
|
|
7
|
+
async seed(ctx) {
|
|
8
|
+
for (const [table, filePath] of Object.entries(tables)) {
|
|
9
|
+
if (!existsSync(filePath)) {
|
|
10
|
+
throw new Error(`❌ CSV file not found: ${filePath}`);
|
|
11
|
+
}
|
|
12
|
+
console.log(`📥 Seeding "${table}" from ${filePath}`);
|
|
13
|
+
await copyCsvIntoTable(ctx.pg, table, filePath);
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
};
|
|
17
|
+
}
|
|
18
|
+
export async function copyCsvIntoTable(pg, table, filePath) {
|
|
19
|
+
const client = pg.client;
|
|
20
|
+
const stream = client.query(copyFrom(`COPY ${table} FROM STDIN WITH CSV HEADER`));
|
|
21
|
+
const source = createReadStream(filePath);
|
|
22
|
+
try {
|
|
23
|
+
await pipeline(source, stream);
|
|
24
|
+
console.log(`✅ Successfully seeded "${table}"`);
|
|
25
|
+
}
|
|
26
|
+
catch (err) {
|
|
27
|
+
console.error(`❌ COPY failed for "${table}":`, err);
|
|
28
|
+
throw err;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
export async function exportTableToCsv(pg, table, filePath) {
|
|
32
|
+
const client = pg.client;
|
|
33
|
+
const stream = client.query(copyTo(`COPY ${table} TO STDOUT WITH CSV HEADER`));
|
|
34
|
+
const target = createWriteStream(filePath);
|
|
35
|
+
try {
|
|
36
|
+
await pipeline(stream, target);
|
|
37
|
+
console.log(`✅ Exported "${table}" to ${filePath}`);
|
|
38
|
+
}
|
|
39
|
+
catch (err) {
|
|
40
|
+
console.error(`❌ Failed to export "${table}":`, err);
|
|
41
|
+
throw err;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import { fn, sqlfile, compose } from './adapters';
|
|
2
|
+
import { csv } from './csv';
|
|
3
|
+
import { json } from './json';
|
|
4
|
+
import { sqitch } from './sqitch';
|
|
5
|
+
import { launchql } from './launchql';
|
|
6
|
+
export * from './types';
|
|
7
|
+
export const seed = {
|
|
8
|
+
launchql,
|
|
9
|
+
sqitch,
|
|
10
|
+
json,
|
|
11
|
+
csv,
|
|
12
|
+
compose,
|
|
13
|
+
fn,
|
|
14
|
+
sqlfile
|
|
15
|
+
};
|
package/esm/seed/json.js
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
export function json(data) {
|
|
2
|
+
return {
|
|
3
|
+
async seed(ctx) {
|
|
4
|
+
const { pg } = ctx;
|
|
5
|
+
for (const [table, rows] of Object.entries(data)) {
|
|
6
|
+
if (!Array.isArray(rows) || rows.length === 0)
|
|
7
|
+
continue;
|
|
8
|
+
const columns = Object.keys(rows[0]);
|
|
9
|
+
const placeholders = columns.map((_, i) => `$${i + 1}`).join(', ');
|
|
10
|
+
const sql = `INSERT INTO ${table} (${columns.join(', ')}) VALUES (${placeholders})`;
|
|
11
|
+
for (const row of rows) {
|
|
12
|
+
const values = columns.map((c) => row[c]);
|
|
13
|
+
await pg.query(sql, values);
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
};
|
|
18
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import { getEnvOptions } from '@launchql/types';
|
|
2
|
+
import { LaunchQLProject, deployFast } from '@launchql/migrate';
|
|
3
|
+
export function launchql(cwd) {
|
|
4
|
+
return {
|
|
5
|
+
async seed(ctx) {
|
|
6
|
+
const proj = new LaunchQLProject(cwd ?? ctx.connect.cwd);
|
|
7
|
+
if (!proj.isInModule())
|
|
8
|
+
return;
|
|
9
|
+
const opts = getEnvOptions({ pg: ctx.config });
|
|
10
|
+
await deployFast({
|
|
11
|
+
opts,
|
|
12
|
+
name: proj.getModuleName(),
|
|
13
|
+
database: ctx.config.database,
|
|
14
|
+
dir: proj.modulePath,
|
|
15
|
+
usePlan: true,
|
|
16
|
+
verbose: false
|
|
17
|
+
});
|
|
18
|
+
}
|
|
19
|
+
};
|
|
20
|
+
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { getEnvOptions } from '@launchql/types';
|
|
2
|
+
import { LaunchQLProject, deploy } from '@launchql/migrate';
|
|
3
|
+
export function sqitch(cwd) {
|
|
4
|
+
return {
|
|
5
|
+
async seed(ctx) {
|
|
6
|
+
const proj = new LaunchQLProject(cwd ?? ctx.connect.cwd);
|
|
7
|
+
if (!proj.isInModule())
|
|
8
|
+
return;
|
|
9
|
+
const opts = getEnvOptions({ pg: ctx.config });
|
|
10
|
+
await deploy(opts, proj.getModuleName(), ctx.config.database, proj.modulePath);
|
|
11
|
+
}
|
|
12
|
+
};
|
|
13
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
package/index.d.ts
CHANGED
package/index.js
CHANGED
|
@@ -14,5 +14,8 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
|
14
14
|
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
15
|
};
|
|
16
16
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
-
__exportStar(require("./
|
|
18
|
-
__exportStar(require("./
|
|
17
|
+
__exportStar(require("./admin"), exports);
|
|
18
|
+
__exportStar(require("./connect"), exports);
|
|
19
|
+
__exportStar(require("./manager"), exports);
|
|
20
|
+
__exportStar(require("./seed"), exports);
|
|
21
|
+
__exportStar(require("./test-client"), exports);
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "pgsql-test",
|
|
3
|
-
"version": "2.0.
|
|
3
|
+
"version": "2.0.3",
|
|
4
4
|
"author": "Dan Lynch <pyramation@gmail.com>",
|
|
5
5
|
"description": "PostgreSQL Testing in TypeScript",
|
|
6
6
|
"main": "index.js",
|
|
@@ -29,12 +29,18 @@
|
|
|
29
29
|
"test": "jest",
|
|
30
30
|
"test:watch": "jest --watch"
|
|
31
31
|
},
|
|
32
|
+
"devDependencies": {
|
|
33
|
+
"@types/pg": "^8.15.2",
|
|
34
|
+
"@types/pg-copy-streams": "^1.2.5"
|
|
35
|
+
},
|
|
32
36
|
"dependencies": {
|
|
33
|
-
"@launchql/migrate": "^2.0.
|
|
34
|
-
"@launchql/server-utils": "^2.0.
|
|
37
|
+
"@launchql/migrate": "^2.0.13",
|
|
38
|
+
"@launchql/server-utils": "^2.0.6",
|
|
35
39
|
"@launchql/types": "^2.0.5",
|
|
36
40
|
"chalk": "^4.1.0",
|
|
37
|
-
"deepmerge": "^4.3.1"
|
|
41
|
+
"deepmerge": "^4.3.1",
|
|
42
|
+
"pg": "^8.16.0",
|
|
43
|
+
"pg-copy-streams": "^6.0.6"
|
|
38
44
|
},
|
|
39
|
-
"gitHead": "
|
|
45
|
+
"gitHead": "5a6be2ac585f936256fe95f9d3ff0690bd768d87"
|
|
40
46
|
}
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.
|
|
3
|
+
exports.sqlfile = sqlfile;
|
|
4
|
+
exports.fn = fn;
|
|
5
|
+
exports.compose = compose;
|
|
4
6
|
function sqlfile(files) {
|
|
5
7
|
return {
|
|
6
8
|
seed(ctx) {
|
|
@@ -15,12 +17,6 @@ function fn(fn) {
|
|
|
15
17
|
seed: fn
|
|
16
18
|
};
|
|
17
19
|
}
|
|
18
|
-
function csv(fn) {
|
|
19
|
-
throw new Error('not yet implemented');
|
|
20
|
-
return {
|
|
21
|
-
seed: fn
|
|
22
|
-
};
|
|
23
|
-
}
|
|
24
20
|
function compose(adapters) {
|
|
25
21
|
return {
|
|
26
22
|
async seed(ctx) {
|
|
@@ -30,9 +26,3 @@ function compose(adapters) {
|
|
|
30
26
|
}
|
|
31
27
|
};
|
|
32
28
|
}
|
|
33
|
-
exports.seed = {
|
|
34
|
-
compose,
|
|
35
|
-
fn,
|
|
36
|
-
csv,
|
|
37
|
-
sqlfile
|
|
38
|
-
};
|
package/seed/csv.d.ts
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { SeedAdapter } from './types';
|
|
2
|
+
import { PgTestClient } from '../test-client';
|
|
3
|
+
interface CsvSeedMap {
|
|
4
|
+
[tableName: string]: string;
|
|
5
|
+
}
|
|
6
|
+
export declare function csv(tables: CsvSeedMap): SeedAdapter;
|
|
7
|
+
export declare function copyCsvIntoTable(pg: PgTestClient, table: string, filePath: string): Promise<void>;
|
|
8
|
+
export declare function exportTableToCsv(pg: PgTestClient, table: string, filePath: string): Promise<void>;
|
|
9
|
+
export {};
|
package/seed/csv.js
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.csv = csv;
|
|
4
|
+
exports.copyCsvIntoTable = copyCsvIntoTable;
|
|
5
|
+
exports.exportTableToCsv = exportTableToCsv;
|
|
6
|
+
const fs_1 = require("fs");
|
|
7
|
+
const promises_1 = require("node:stream/promises");
|
|
8
|
+
const pg_copy_streams_1 = require("pg-copy-streams");
|
|
9
|
+
const node_fs_1 = require("node:fs");
|
|
10
|
+
function csv(tables) {
|
|
11
|
+
return {
|
|
12
|
+
async seed(ctx) {
|
|
13
|
+
for (const [table, filePath] of Object.entries(tables)) {
|
|
14
|
+
if (!(0, fs_1.existsSync)(filePath)) {
|
|
15
|
+
throw new Error(`❌ CSV file not found: ${filePath}`);
|
|
16
|
+
}
|
|
17
|
+
console.log(`📥 Seeding "${table}" from ${filePath}`);
|
|
18
|
+
await copyCsvIntoTable(ctx.pg, table, filePath);
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
};
|
|
22
|
+
}
|
|
23
|
+
async function copyCsvIntoTable(pg, table, filePath) {
|
|
24
|
+
const client = pg.client;
|
|
25
|
+
const stream = client.query((0, pg_copy_streams_1.from)(`COPY ${table} FROM STDIN WITH CSV HEADER`));
|
|
26
|
+
const source = (0, fs_1.createReadStream)(filePath);
|
|
27
|
+
try {
|
|
28
|
+
await (0, promises_1.pipeline)(source, stream);
|
|
29
|
+
console.log(`✅ Successfully seeded "${table}"`);
|
|
30
|
+
}
|
|
31
|
+
catch (err) {
|
|
32
|
+
console.error(`❌ COPY failed for "${table}":`, err);
|
|
33
|
+
throw err;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
async function exportTableToCsv(pg, table, filePath) {
|
|
37
|
+
const client = pg.client;
|
|
38
|
+
const stream = client.query((0, pg_copy_streams_1.to)(`COPY ${table} TO STDOUT WITH CSV HEADER`));
|
|
39
|
+
const target = (0, node_fs_1.createWriteStream)(filePath);
|
|
40
|
+
try {
|
|
41
|
+
await (0, promises_1.pipeline)(stream, target);
|
|
42
|
+
console.log(`✅ Exported "${table}" to ${filePath}`);
|
|
43
|
+
}
|
|
44
|
+
catch (err) {
|
|
45
|
+
console.error(`❌ Failed to export "${table}":`, err);
|
|
46
|
+
throw err;
|
|
47
|
+
}
|
|
48
|
+
}
|
package/seed/index.d.ts
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import { fn, sqlfile, compose } from './adapters';
|
|
2
|
+
import { csv } from './csv';
|
|
3
|
+
import { json } from './json';
|
|
4
|
+
import { sqitch } from './sqitch';
|
|
5
|
+
import { launchql } from './launchql';
|
|
6
|
+
export * from './types';
|
|
7
|
+
export declare const seed: {
|
|
8
|
+
launchql: typeof launchql;
|
|
9
|
+
sqitch: typeof sqitch;
|
|
10
|
+
json: typeof json;
|
|
11
|
+
csv: typeof csv;
|
|
12
|
+
compose: typeof compose;
|
|
13
|
+
fn: typeof fn;
|
|
14
|
+
sqlfile: typeof sqlfile;
|
|
15
|
+
};
|
package/seed/index.js
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
+
};
|
|
16
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
+
exports.seed = void 0;
|
|
18
|
+
const adapters_1 = require("./adapters");
|
|
19
|
+
const csv_1 = require("./csv");
|
|
20
|
+
const json_1 = require("./json");
|
|
21
|
+
const sqitch_1 = require("./sqitch");
|
|
22
|
+
const launchql_1 = require("./launchql");
|
|
23
|
+
__exportStar(require("./types"), exports);
|
|
24
|
+
exports.seed = {
|
|
25
|
+
launchql: launchql_1.launchql,
|
|
26
|
+
sqitch: sqitch_1.sqitch,
|
|
27
|
+
json: json_1.json,
|
|
28
|
+
csv: csv_1.csv,
|
|
29
|
+
compose: adapters_1.compose,
|
|
30
|
+
fn: adapters_1.fn,
|
|
31
|
+
sqlfile: adapters_1.sqlfile
|
|
32
|
+
};
|
package/seed/json.d.ts
ADDED
package/seed/json.js
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.json = json;
|
|
4
|
+
function json(data) {
|
|
5
|
+
return {
|
|
6
|
+
async seed(ctx) {
|
|
7
|
+
const { pg } = ctx;
|
|
8
|
+
for (const [table, rows] of Object.entries(data)) {
|
|
9
|
+
if (!Array.isArray(rows) || rows.length === 0)
|
|
10
|
+
continue;
|
|
11
|
+
const columns = Object.keys(rows[0]);
|
|
12
|
+
const placeholders = columns.map((_, i) => `$${i + 1}`).join(', ');
|
|
13
|
+
const sql = `INSERT INTO ${table} (${columns.join(', ')}) VALUES (${placeholders})`;
|
|
14
|
+
for (const row of rows) {
|
|
15
|
+
const values = columns.map((c) => row[c]);
|
|
16
|
+
await pg.query(sql, values);
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
};
|
|
21
|
+
}
|
package/seed/launchql.js
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.launchql = launchql;
|
|
4
|
+
const types_1 = require("@launchql/types");
|
|
5
|
+
const migrate_1 = require("@launchql/migrate");
|
|
6
|
+
function launchql(cwd) {
|
|
7
|
+
return {
|
|
8
|
+
async seed(ctx) {
|
|
9
|
+
const proj = new migrate_1.LaunchQLProject(cwd ?? ctx.connect.cwd);
|
|
10
|
+
if (!proj.isInModule())
|
|
11
|
+
return;
|
|
12
|
+
const opts = (0, types_1.getEnvOptions)({ pg: ctx.config });
|
|
13
|
+
await (0, migrate_1.deployFast)({
|
|
14
|
+
opts,
|
|
15
|
+
name: proj.getModuleName(),
|
|
16
|
+
database: ctx.config.database,
|
|
17
|
+
dir: proj.modulePath,
|
|
18
|
+
usePlan: true,
|
|
19
|
+
verbose: false
|
|
20
|
+
});
|
|
21
|
+
}
|
|
22
|
+
};
|
|
23
|
+
}
|
package/seed/sqitch.d.ts
ADDED
package/seed/sqitch.js
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.sqitch = sqitch;
|
|
4
|
+
const types_1 = require("@launchql/types");
|
|
5
|
+
const migrate_1 = require("@launchql/migrate");
|
|
6
|
+
function sqitch(cwd) {
|
|
7
|
+
return {
|
|
8
|
+
async seed(ctx) {
|
|
9
|
+
const proj = new migrate_1.LaunchQLProject(cwd ?? ctx.connect.cwd);
|
|
10
|
+
if (!proj.isInModule())
|
|
11
|
+
return;
|
|
12
|
+
const opts = (0, types_1.getEnvOptions)({ pg: ctx.config });
|
|
13
|
+
await (0, migrate_1.deploy)(opts, proj.getModuleName(), ctx.config.database, proj.modulePath);
|
|
14
|
+
}
|
|
15
|
+
};
|
|
16
|
+
}
|
package/seed/types.d.ts
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { PgConfig, TestConnectionOptions } from "@launchql/types";
|
|
2
|
+
import { DbAdmin } from "../admin";
|
|
3
|
+
import { PgTestClient } from "../test-client";
|
|
4
|
+
export interface SeedContext {
|
|
5
|
+
connect: TestConnectionOptions;
|
|
6
|
+
admin: DbAdmin;
|
|
7
|
+
config: PgConfig;
|
|
8
|
+
pg: PgTestClient;
|
|
9
|
+
}
|
|
10
|
+
export interface SeedAdapter {
|
|
11
|
+
seed(ctx: SeedContext): Promise<void> | void;
|
|
12
|
+
}
|
package/seed/types.js
ADDED
package/test-client.d.ts
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import { QueryResult } from 'pg';
|
|
1
|
+
import { Client, QueryResult } from 'pg';
|
|
2
2
|
import { PgConfig } from '@launchql/types';
|
|
3
3
|
export declare class PgTestClient {
|
|
4
4
|
config: PgConfig;
|
|
5
|
-
|
|
5
|
+
client: Client;
|
|
6
6
|
private ctxStmts;
|
|
7
7
|
private _ended;
|
|
8
8
|
constructor(config: PgConfig);
|
package/esm/legacy-connect.js
DELETED
|
@@ -1,25 +0,0 @@
|
|
|
1
|
-
import { PgTestConnector } from './manager';
|
|
2
|
-
import { getPgEnvOptions } from '@launchql/types';
|
|
3
|
-
export function connect(config) {
|
|
4
|
-
const manager = PgTestConnector.getInstance();
|
|
5
|
-
return manager.getClient(config);
|
|
6
|
-
}
|
|
7
|
-
export function close(client) {
|
|
8
|
-
client.close();
|
|
9
|
-
}
|
|
10
|
-
const manager = PgTestConnector.getInstance();
|
|
11
|
-
export const Connection = {
|
|
12
|
-
connect(config) {
|
|
13
|
-
const creds = getPgEnvOptions(config);
|
|
14
|
-
return manager.getClient(creds);
|
|
15
|
-
},
|
|
16
|
-
close(client) {
|
|
17
|
-
client.close();
|
|
18
|
-
},
|
|
19
|
-
closeAll() {
|
|
20
|
-
return manager.closeAll();
|
|
21
|
-
},
|
|
22
|
-
getManager() {
|
|
23
|
-
return manager;
|
|
24
|
-
}
|
|
25
|
-
};
|
package/legacy-connect.d.ts
DELETED
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
import { PgTestClient } from './test-client';
|
|
2
|
-
import { PgTestConnector } from './manager';
|
|
3
|
-
import { PgConfig } from '@launchql/types';
|
|
4
|
-
export declare function connect(config: PgConfig): PgTestClient;
|
|
5
|
-
export declare function close(client: PgTestClient): void;
|
|
6
|
-
export declare const Connection: {
|
|
7
|
-
connect(config: Partial<PgConfig>): PgTestClient;
|
|
8
|
-
close(client: PgTestClient): void;
|
|
9
|
-
closeAll(): Promise<void>;
|
|
10
|
-
getManager(): PgTestConnector;
|
|
11
|
-
};
|
package/legacy-connect.js
DELETED
|
@@ -1,30 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.Connection = void 0;
|
|
4
|
-
exports.connect = connect;
|
|
5
|
-
exports.close = close;
|
|
6
|
-
const manager_1 = require("./manager");
|
|
7
|
-
const types_1 = require("@launchql/types");
|
|
8
|
-
function connect(config) {
|
|
9
|
-
const manager = manager_1.PgTestConnector.getInstance();
|
|
10
|
-
return manager.getClient(config);
|
|
11
|
-
}
|
|
12
|
-
function close(client) {
|
|
13
|
-
client.close();
|
|
14
|
-
}
|
|
15
|
-
const manager = manager_1.PgTestConnector.getInstance();
|
|
16
|
-
exports.Connection = {
|
|
17
|
-
connect(config) {
|
|
18
|
-
const creds = (0, types_1.getPgEnvOptions)(config);
|
|
19
|
-
return manager.getClient(creds);
|
|
20
|
-
},
|
|
21
|
-
close(client) {
|
|
22
|
-
client.close();
|
|
23
|
-
},
|
|
24
|
-
closeAll() {
|
|
25
|
-
return manager.closeAll();
|
|
26
|
-
},
|
|
27
|
-
getManager() {
|
|
28
|
-
return manager;
|
|
29
|
-
}
|
|
30
|
-
};
|
package/seed.d.ts
DELETED
|
@@ -1,22 +0,0 @@
|
|
|
1
|
-
import { PgConfig } from "@launchql/types";
|
|
2
|
-
import { DbAdmin } from "./admin";
|
|
3
|
-
import { PgTestClient } from "./test-client";
|
|
4
|
-
interface SeedContext {
|
|
5
|
-
admin: DbAdmin;
|
|
6
|
-
config: PgConfig;
|
|
7
|
-
pg: PgTestClient;
|
|
8
|
-
}
|
|
9
|
-
export interface SeedAdapter {
|
|
10
|
-
seed(ctx: SeedContext): Promise<void> | void;
|
|
11
|
-
}
|
|
12
|
-
declare function sqlfile(files: string[]): SeedAdapter;
|
|
13
|
-
declare function fn(fn: (ctx: SeedContext) => Promise<void>): SeedAdapter;
|
|
14
|
-
declare function csv(fn: (ctx: SeedContext) => Promise<void>): SeedAdapter;
|
|
15
|
-
declare function compose(adapters: SeedAdapter[]): SeedAdapter;
|
|
16
|
-
export declare const seed: {
|
|
17
|
-
compose: typeof compose;
|
|
18
|
-
fn: typeof fn;
|
|
19
|
-
csv: typeof csv;
|
|
20
|
-
sqlfile: typeof sqlfile;
|
|
21
|
-
};
|
|
22
|
-
export {};
|