pgsql-test 2.0.2 → 2.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -17,7 +17,7 @@
17
17
  </p>
18
18
 
19
19
 
20
- `pgsql-test` provides an isolated PostgreSQL testing environment with per-test transaction rollback, ideal for integration tests involving SQL, roles, simulations, and complex migrations. With automatic rollbacks and isolated contexts, it eliminates test interference while delivering tight feedback loops for happier developers. We made database testing simple so you can focus on writing good tests instead of fighting your environment.
20
+ `pgsql-test` gives you instant, isolated PostgreSQL databases for each test with automatic transaction rollbacks, context switching, and clean seeding. Forget flaky tests and brittle environments. Write real SQL. Get real coverage. Stay fast.
21
21
 
22
22
  ## Install
23
23
 
@@ -27,30 +27,38 @@ npm install pgsql-test
27
27
 
28
28
  ## Features
29
29
 
30
- * ⚡ Quick-start setup with `getConnections()`
31
- * 🧹 Easy teardown and cleanup
32
- * 🔄 Per-test isolation using transactions and savepoints
33
- * 🛡️ Role-based context for RLS testing
34
- * 🌱 Flexible seed support via `.sql` files and programmatic functions
35
- * 🧪 Auto-generated test databases with `UUID` suffix
36
- * 📦 Built for tools like `sqitch`, supporting full schema initialization workflows
37
- * 🧰 Designed for `Jest`, `Mocha`, or any async test runner
30
+ * ⚡ **Instant test DBs** — each one seeded, isolated, and UUID-named
31
+ * 🔄 **Per-test rollback** every test runs in its own transaction or savepoint
32
+ * 🛡️ **RLS-friendly** — test with role-based auth via `.setContext()`
33
+ * 🌱 **Flexible seeding** run `.sql` files, programmatic seeds, or even load fixtures
34
+ * 🧪 **Compatible with any async runner** works with `Jest`, `Mocha`, etc.
35
+ * 🧹 **Auto teardown** no residue, no reboots, just clean exits
36
+
37
+ ### LaunchQL migrations
38
+
39
+ Part of the [LaunchQL](https://github.com/launchql) ecosystem, `pgsql-test` is built to pair seamlessly with our TypeScript-based [Sqitch](https://sqitch.org/) engine rewrite:
40
+
41
+ * 🚀 **Lightning-fast migrations** — powered by LaunchQL’s native deployer (10x faster than legacy Sqitch)
42
+ * 🔧 **Composable test scaffolds** — integrate with full LaunchQL stacks or use standalone
38
43
 
39
44
 
40
45
  ## Table of Contents
41
46
 
42
47
  1. [Install](#install)
43
48
  2. [Features](#features)
44
- 3. [Quick Start](#quick-start)
45
- 4. [getConnections() Overview](#getconnections-overview)
49
+ 3. [Quick Start](#-quick-start)
50
+ 4. [`getConnections()` Overview](#getconnections-overview)
46
51
  5. [PgTestClient API Overview](#pgtestclient-api-overview)
47
52
  6. [Usage Examples](#usage-examples)
48
- * [Basic Setup](#basic-setup)
49
- * [Role-Based Context](#role-based-context)
50
- * [SQL File Seeding](#sql-file-seeding)
51
- * [Programmatic Seeding](#programmatic-seeding)
52
- * [Composed Seeding](#composed-seeding)
53
- 7. [Environment Overrides](#environment-overrides)
53
+ * [Basic Setup](#-basic-setup)
54
+ * [Role-Based Context](#-role-based-context)
55
+ * [SQL File Seeding](#-sql-file-seeding)
56
+ * [Programmatic Seeding](#-programmatic-seeding)
57
+ * [CSV Seeding](#️-csv-seeding)
58
+ * [JSON Seeding](#️-json-seeding)
59
+ * [Sqitch Seeding](#️-sqitch-seeding)
60
+ * [LaunchQL Seeding](#-launchql-seeding)
61
+ 7. [`getConnections() Options` ](#getconnections-options)
54
62
  8. [Disclaimer](#disclaimer)
55
63
 
56
64
 
@@ -202,55 +210,220 @@ beforeAll(async () => {
202
210
  });
203
211
  ```
204
212
 
205
- ### 🧬 Composed Seeding
213
+ ## 🗃️ CSV Seeding
206
214
 
207
- Combine multiple seeders with `seed.compose()`:
215
+ You can load tables from CSV files using `seed.csv({ ... })`. CSV headers must match the table column names exactly. This is useful for loading stable fixture data for integration tests or CI environments.
208
216
 
209
217
  ```ts
210
218
  import path from 'path';
211
219
  import { getConnections, seed } from 'pgsql-test';
212
220
 
213
- const sql = (f: string) => path.join(__dirname, 'sql', f);
221
+ const csv = (file: string) => path.resolve(__dirname, '../csv', file);
214
222
 
215
223
  let db;
216
224
  let teardown;
217
225
 
218
226
  beforeAll(async () => {
219
- ({ db, teardown } = await getConnections({}, seed.compose([
220
- seed.sqlfile([
221
- sql('schema.sql'),
222
- sql('roles.sql')
223
- ]),
227
+ ({ db, teardown } = await getConnections({}, [
228
+ // Create schema
229
+ seed.fn(async ({ pg }) => {
230
+ await pg.query(`
231
+ CREATE TABLE users (
232
+ id SERIAL PRIMARY KEY,
233
+ name TEXT NOT NULL
234
+ );
235
+
236
+ CREATE TABLE posts (
237
+ id SERIAL PRIMARY KEY,
238
+ user_id INT REFERENCES users(id),
239
+ content TEXT NOT NULL
240
+ );
241
+ `);
242
+ }),
243
+ // Load from CSV
244
+ seed.csv({
245
+ users: csv('users.csv'),
246
+ posts: csv('posts.csv')
247
+ }),
248
+ // Adjust SERIAL sequences to avoid conflicts
224
249
  seed.fn(async ({ pg }) => {
225
- await pg.query(`INSERT INTO users (name) VALUES ('Composed');`);
250
+ await pg.query(`SELECT setval(pg_get_serial_sequence('users', 'id'), (SELECT MAX(id) FROM users));`);
251
+ await pg.query(`SELECT setval(pg_get_serial_sequence('posts', 'id'), (SELECT MAX(id) FROM posts));`);
226
252
  })
227
- ])));
253
+ ]));
254
+ });
255
+
256
+ afterAll(() => teardown());
257
+
258
+ it('has loaded rows', async () => {
259
+ const res = await db.query('SELECT COUNT(*) FROM users');
260
+ expect(+res.rows[0].count).toBeGreaterThan(0);
261
+ });
262
+ ```
263
+
264
+ ## 🗃️ JSON Seeding
265
+
266
+ You can seed tables using in-memory JSON objects. This is useful when you want fast, inline fixtures without managing external files.
267
+
268
+ ```ts
269
+ import { getConnections, seed } from 'pgsql-test';
270
+
271
+ let db;
272
+ let teardown;
273
+
274
+ beforeAll(async () => {
275
+ ({ db, teardown } = await getConnections({}, [
276
+ // Create schema
277
+ seed.fn(async ({ pg }) => {
278
+ await pg.query(`
279
+ CREATE SCHEMA custom;
280
+ CREATE TABLE custom.users (
281
+ id SERIAL PRIMARY KEY,
282
+ name TEXT NOT NULL
283
+ );
284
+
285
+ CREATE TABLE custom.posts (
286
+ id SERIAL PRIMARY KEY,
287
+ user_id INT REFERENCES custom.users(id),
288
+ content TEXT NOT NULL
289
+ );
290
+ `);
291
+ }),
292
+ // Seed with in-memory JSON
293
+ seed.json({
294
+ 'custom.users': [
295
+ { id: 1, name: 'Alice' },
296
+ { id: 2, name: 'Bob' }
297
+ ],
298
+ 'custom.posts': [
299
+ { id: 1, user_id: 1, content: 'Hello world!' },
300
+ { id: 2, user_id: 2, content: 'Graphile is cool!' }
301
+ ]
302
+ }),
303
+ // Fix SERIAL sequences
304
+ seed.fn(async ({ pg }) => {
305
+ await pg.query(`SELECT setval(pg_get_serial_sequence('custom.users', 'id'), (SELECT MAX(id) FROM custom.users));`);
306
+ await pg.query(`SELECT setval(pg_get_serial_sequence('custom.posts', 'id'), (SELECT MAX(id) FROM custom.posts));`);
307
+ })
308
+ ]));
309
+ });
310
+
311
+ afterAll(() => teardown());
312
+
313
+ it('has loaded rows', async () => {
314
+ const res = await db.query('SELECT COUNT(*) FROM custom.users');
315
+ expect(+res.rows[0].count).toBeGreaterThan(0);
316
+ });
317
+ ```
318
+
319
+ ## 🏗️ Sqitch Seeding
320
+
321
+ *Note: While compatible with Sqitch syntax, LaunchQL uses its own high-performance [TypeScript-based deploy engine.](#-launchql-seeding) that we encourage using for sqitch projects*
322
+
323
+ You can seed your test database using a Sqitch project but with significantly improved performance by leveraging LaunchQL's TypeScript deployment engine:
324
+
325
+ ```ts
326
+ import path from 'path';
327
+ import { getConnections, seed } from 'pgsql-test';
328
+
329
+ const cwd = path.resolve(__dirname, '../path/to/sqitch');
330
+
331
+ beforeAll(async () => {
332
+ ({ db, teardown } = await getConnections({}, [
333
+ seed.sqitch(cwd)
334
+ ]));
335
+ });
336
+
337
+ it('runs a schema query', async () => {
338
+ const res = await db.query('SELECT COUNT(*) FROM myapp.users');
339
+ expect(+res.rows[0].count).toBeGreaterThanOrEqual(0);
228
340
  });
229
341
  ```
230
342
 
231
- ---
343
+ This works for any Sqitch-compatible module, now accelerated by LaunchQL's deployment tooling.
232
344
 
233
- These examples show how flexible `pgsql-test` is for composing repeatable and transactional test database environments.
345
+ ## 🚀 LaunchQL Seeding
234
346
 
347
+ For LaunchQL modules with precompiled `sqitch.plan`, use `seed.launchql(cwd)` to apply a schema quickly with `deployFast()`:
348
+ For maximum performance with precompiled LaunchQL modules, use `seed.launchql(cwd)` to apply a schema at lightning speed with our TypeScript-powered `deployFast()`:
235
349
 
350
+ ```ts
351
+ import path from 'path';
352
+ import { getConnections, seed } from 'pgsql-test';
236
353
 
237
- ## Environment Overrides
354
+ const cwd = path.resolve(__dirname, '../path/to/launchql');
238
355
 
239
- `pgsql-test` respects the following env vars for DB connectivity:
356
+ beforeAll(async () => {
357
+ ({ db, teardown } = await getConnections({}, [
358
+ seed.launchql(cwd) // uses deployFast() - up to 10x faster than traditional Sqitch!
359
+ ]));
360
+ });
240
361
 
241
- * `PGHOST`
242
- * `PGPORT`
243
- * `PGUSER`
244
- * `PGPASSWORD`
362
+ it('creates user records', async () => {
363
+ await db.query(`INSERT INTO myapp.users (username, email) VALUES ('testuser', 'test@example.com')`);
364
+ const res = await db.query(`SELECT COUNT(*) FROM myapp.users`);
365
+ expect(+res.rows[0].count).toBeGreaterThan(0);
366
+ });
367
+ ```
368
+
369
+ This is the fastest way to bring up a ready-to-query schema from a compiled LaunchQL module - perfect for both development and CI environments.
370
+
371
+ ## Why LaunchQL's Approach?
372
+
373
+ LaunchQL provides the best of both worlds:
374
+
375
+ 1. **Sqitch Compatibility**: Keep your familiar Sqitch syntax and migration approach
376
+ 2. **TypeScript Performance**: Our TS-rewritten deployment engine delivers up to 10x faster schema deployments
377
+ 3. **Developer Experience**: Tight feedback loops with near-instant schema setup for tests
378
+ 4. **CI Optimization**: Dramatically reduced test suite run times with optimized deployment
379
+
380
+ By maintaining Sqitch compatibility while supercharging performance, LaunchQL enables you to keep your existing migration patterns while enjoying the speed benefits of our TypeScript engine.
381
+
382
+ ## `getConnections` Options
383
+
384
+ This table documents the available options for the `getConnections` function. The options are passed as a combination of `pg` and `db` configuration objects.
245
385
 
246
- Override them in your test runner or CI config:
386
+ ### `db` Options (PgTestConnectionOptions)
247
387
 
248
- ```yaml
249
- env:
250
- PGHOST: localhost
251
- PGPORT: 5432
252
- PGUSER: postgres
253
- PGPASSWORD: password
388
+ | Option | Type | Default | Description |
389
+ | ------------------------ | ---------- | ---------------- | --------------------------------------------------------------------------- |
390
+ | `db.extensions` | `string[]` | `[]` | Array of PostgreSQL extensions to include in the test database |
391
+ | `db.cwd` | `string` | `process.cwd()` | Working directory used for LaunchQL/Sqitch projects |
392
+ | `db.connection.user` | `string` | `'app_user'` | User for simulating RLS via `setContext()` |
393
+ | `db.connection.password` | `string` | `'app_password'` | Password for RLS test user |
394
+ | `db.connection.role` | `string` | `'anonymous'` | Default role used during `setContext()` |
395
+ | `db.template` | `string` | `undefined` | Template database used for faster test DB creation |
396
+ | `db.rootDb` | `string` | `'postgres'` | Root database used for administrative operations (e.g., creating databases) |
397
+ | `db.prefix` | `string` | `'db-'` | Prefix used when generating test database names |
398
+
399
+ ### `pg` Options (PgConfig)
400
+
401
+ Environment variables will override these options when available:
402
+
403
+ * `PGHOST`, `PGPORT`, `PGUSER`, `PGPASSWORD`, `PGDATABASE`
404
+
405
+ | Option | Type | Default | Description |
406
+ | ------------- | -------- | ------------- | ----------------------------------------------- |
407
+ | `pg.user` | `string` | `'postgres'` | Superuser for PostgreSQL |
408
+ | `pg.password` | `string` | `'password'` | Password for the PostgreSQL superuser |
409
+ | `pg.host` | `string` | `'localhost'` | Hostname for PostgreSQL |
410
+ | `pg.port` | `number` | `5423` | Port for PostgreSQL |
411
+ | `pg.database` | `string` | `'postgres'` | Default database used when connecting initially |
412
+
413
+ ### Usage
414
+
415
+ ```ts
416
+ const { conn, db, teardown } = await getConnections({
417
+ pg: { user: 'postgres', password: 'secret' },
418
+ db: {
419
+ extensions: ['uuid-ossp'],
420
+ cwd: '/path/to/project',
421
+ connection: { user: 'test_user', password: 'secret', role: 'authenticated' },
422
+ template: 'test_template',
423
+ prefix: 'test_',
424
+ rootDb: 'postgres'
425
+ }
426
+ });
254
427
  ```
255
428
 
256
429
  ## Disclaimer
package/admin.d.ts CHANGED
@@ -1,5 +1,5 @@
1
1
  import { PgConfig } from '@launchql/types';
2
- import { SeedAdapter } from './seed';
2
+ import { SeedAdapter } from './seed/types';
3
3
  export declare class DbAdmin {
4
4
  private config;
5
5
  private verbose;
package/admin.js CHANGED
@@ -120,7 +120,8 @@ class DbAdmin {
120
120
  await adapter.seed({
121
121
  admin: this,
122
122
  config: this.config,
123
- pg: null // sorry!
123
+ pg: null, // sorry!
124
+ connect: null, // sorry!
124
125
  });
125
126
  this.cleanupTemplate(templateName);
126
127
  this.createTemplateFromBase(seedDb, templateName);
package/connect.d.ts CHANGED
@@ -1,12 +1,12 @@
1
1
  import { DbAdmin } from './admin';
2
- import { TestConnectionOptions, PgConfig } from '@launchql/types';
2
+ import { PgTestConnectionOptions, PgConfig } from '@launchql/types';
3
3
  import { PgTestConnector } from './manager';
4
- import { SeedAdapter } from './seed';
4
+ import { SeedAdapter } from './seed/types';
5
5
  import { PgTestClient } from './test-client';
6
- export declare const getPgRootAdmin: (connOpts?: TestConnectionOptions) => DbAdmin;
6
+ export declare const getPgRootAdmin: (connOpts?: PgTestConnectionOptions) => DbAdmin;
7
7
  export interface GetConnectionOpts {
8
8
  pg?: Partial<PgConfig>;
9
- db?: Partial<TestConnectionOptions>;
9
+ db?: Partial<PgTestConnectionOptions>;
10
10
  }
11
11
  export interface GetConnectionResult {
12
12
  pg: PgTestClient;
@@ -15,4 +15,4 @@ export interface GetConnectionResult {
15
15
  teardown: () => Promise<void>;
16
16
  manager: PgTestConnector;
17
17
  }
18
- export declare const getConnections: (cn?: GetConnectionOpts, seedAdapter?: SeedAdapter) => Promise<GetConnectionResult>;
18
+ export declare const getConnections: (cn?: GetConnectionOpts, seedAdapters?: SeedAdapter[]) => Promise<GetConnectionResult>;
package/connect.js CHANGED
@@ -3,10 +3,10 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.getConnections = exports.getPgRootAdmin = void 0;
4
4
  const admin_1 = require("./admin");
5
5
  const types_1 = require("@launchql/types");
6
- const migrate_1 = require("@launchql/migrate");
7
6
  const manager_1 = require("./manager");
8
7
  const crypto_1 = require("crypto");
9
8
  const server_utils_1 = require("@launchql/server-utils");
9
+ const seed_1 = require("./seed");
10
10
  let manager;
11
11
  const getPgRootAdmin = (connOpts = {}) => {
12
12
  const opts = (0, types_1.getPgEnvOptions)({
@@ -27,53 +27,30 @@ const getConnOopts = (cn = {}) => {
27
27
  db: connect
28
28
  };
29
29
  };
30
- const getConnections = async (cn = {}, seedAdapter) => {
30
+ const getConnections = async (cn = {}, seedAdapters = [seed_1.seed.launchql()]) => {
31
31
  cn = getConnOopts(cn);
32
32
  const config = cn.pg;
33
33
  const connOpts = cn.db;
34
34
  const root = (0, exports.getPgRootAdmin)(connOpts);
35
35
  await root.createUserRole(connOpts.connection.user, connOpts.connection.password, connOpts.rootDb);
36
36
  const admin = new admin_1.DbAdmin(config);
37
- const proj = new migrate_1.LaunchQLProject(connOpts.cwd);
38
- if (proj.isInModule()) {
39
- admin.create(config.database);
40
- admin.installExtensions(connOpts.extensions);
41
- const opts = (0, types_1.getEnvOptions)({
42
- pg: config
43
- });
44
- if (connOpts.deployFast) {
45
- await (0, migrate_1.deployFast)({
46
- opts,
47
- name: proj.getModuleName(),
48
- database: config.database,
49
- dir: proj.modulePath,
50
- usePlan: true,
51
- verbose: false
52
- });
53
- }
54
- else {
55
- await (0, migrate_1.deploy)(opts, proj.getModuleName(), config.database, proj.modulePath);
56
- }
37
+ if (process.env.TEST_DB) {
38
+ config.database = process.env.TEST_DB;
39
+ }
40
+ else if (connOpts.template) {
41
+ admin.createFromTemplate(connOpts.template, config.database);
57
42
  }
58
43
  else {
59
- // Create the test database
60
- if (process.env.TEST_DB) {
61
- config.database = process.env.TEST_DB;
62
- }
63
- else if (connOpts.template) {
64
- admin.createFromTemplate(connOpts.template, config.database);
65
- }
66
- else {
67
- admin.create(config.database);
68
- admin.installExtensions(connOpts.extensions);
69
- }
44
+ admin.create(config.database);
45
+ admin.installExtensions(connOpts.extensions);
70
46
  }
71
47
  await admin.grantConnect(connOpts.connection.user, config.database);
72
48
  // Main admin client (optional unless needed elsewhere)
73
49
  manager = manager_1.PgTestConnector.getInstance();
74
50
  const pg = manager.getClient(config);
75
- if (seedAdapter) {
76
- await seedAdapter.seed({
51
+ if (seedAdapters.length) {
52
+ await seed_1.seed.compose(seedAdapters).seed({
53
+ connect: connOpts,
77
54
  admin,
78
55
  config: config,
79
56
  pg: manager.getClient(config)
package/esm/admin.js CHANGED
@@ -117,7 +117,8 @@ export class DbAdmin {
117
117
  await adapter.seed({
118
118
  admin: this,
119
119
  config: this.config,
120
- pg: null // sorry!
120
+ pg: null, // sorry!
121
+ connect: null, // sorry!
121
122
  });
122
123
  this.cleanupTemplate(templateName);
123
124
  this.createTemplateFromBase(seedDb, templateName);
package/esm/connect.js CHANGED
@@ -1,9 +1,9 @@
1
1
  import { DbAdmin } from './admin';
2
- import { getEnvOptions, getPgEnvOptions, getConnEnvOptions } from '@launchql/types';
3
- import { deploy, deployFast, LaunchQLProject } from '@launchql/migrate';
2
+ import { getPgEnvOptions, getConnEnvOptions } from '@launchql/types';
4
3
  import { PgTestConnector } from './manager';
5
4
  import { randomUUID } from 'crypto';
6
5
  import { teardownPgPools } from '@launchql/server-utils';
6
+ import { seed } from './seed';
7
7
  let manager;
8
8
  export const getPgRootAdmin = (connOpts = {}) => {
9
9
  const opts = getPgEnvOptions({
@@ -23,53 +23,30 @@ const getConnOopts = (cn = {}) => {
23
23
  db: connect
24
24
  };
25
25
  };
26
- export const getConnections = async (cn = {}, seedAdapter) => {
26
+ export const getConnections = async (cn = {}, seedAdapters = [seed.launchql()]) => {
27
27
  cn = getConnOopts(cn);
28
28
  const config = cn.pg;
29
29
  const connOpts = cn.db;
30
30
  const root = getPgRootAdmin(connOpts);
31
31
  await root.createUserRole(connOpts.connection.user, connOpts.connection.password, connOpts.rootDb);
32
32
  const admin = new DbAdmin(config);
33
- const proj = new LaunchQLProject(connOpts.cwd);
34
- if (proj.isInModule()) {
35
- admin.create(config.database);
36
- admin.installExtensions(connOpts.extensions);
37
- const opts = getEnvOptions({
38
- pg: config
39
- });
40
- if (connOpts.deployFast) {
41
- await deployFast({
42
- opts,
43
- name: proj.getModuleName(),
44
- database: config.database,
45
- dir: proj.modulePath,
46
- usePlan: true,
47
- verbose: false
48
- });
49
- }
50
- else {
51
- await deploy(opts, proj.getModuleName(), config.database, proj.modulePath);
52
- }
33
+ if (process.env.TEST_DB) {
34
+ config.database = process.env.TEST_DB;
35
+ }
36
+ else if (connOpts.template) {
37
+ admin.createFromTemplate(connOpts.template, config.database);
53
38
  }
54
39
  else {
55
- // Create the test database
56
- if (process.env.TEST_DB) {
57
- config.database = process.env.TEST_DB;
58
- }
59
- else if (connOpts.template) {
60
- admin.createFromTemplate(connOpts.template, config.database);
61
- }
62
- else {
63
- admin.create(config.database);
64
- admin.installExtensions(connOpts.extensions);
65
- }
40
+ admin.create(config.database);
41
+ admin.installExtensions(connOpts.extensions);
66
42
  }
67
43
  await admin.grantConnect(connOpts.connection.user, config.database);
68
44
  // Main admin client (optional unless needed elsewhere)
69
45
  manager = PgTestConnector.getInstance();
70
46
  const pg = manager.getClient(config);
71
- if (seedAdapter) {
72
- await seedAdapter.seed({
47
+ if (seedAdapters.length) {
48
+ await seed.compose(seedAdapters).seed({
49
+ connect: connOpts,
73
50
  admin,
74
51
  config: config,
75
52
  pg: manager.getClient(config)
@@ -1,4 +1,4 @@
1
- function sqlfile(files) {
1
+ export function sqlfile(files) {
2
2
  return {
3
3
  seed(ctx) {
4
4
  for (const file of files) {
@@ -7,18 +7,12 @@ function sqlfile(files) {
7
7
  }
8
8
  };
9
9
  }
10
- function fn(fn) {
10
+ export function fn(fn) {
11
11
  return {
12
12
  seed: fn
13
13
  };
14
14
  }
15
- function csv(fn) {
16
- throw new Error('not yet implemented');
17
- return {
18
- seed: fn
19
- };
20
- }
21
- function compose(adapters) {
15
+ export function compose(adapters) {
22
16
  return {
23
17
  async seed(ctx) {
24
18
  for (const adapter of adapters) {
@@ -27,9 +21,3 @@ function compose(adapters) {
27
21
  }
28
22
  };
29
23
  }
30
- export const seed = {
31
- compose,
32
- fn,
33
- csv,
34
- sqlfile
35
- };
@@ -0,0 +1,43 @@
1
+ import { createReadStream, existsSync } from 'fs';
2
+ import { pipeline } from 'node:stream/promises';
3
+ import { from as copyFrom, to as copyTo } from 'pg-copy-streams';
4
+ import { createWriteStream } from 'node:fs';
5
+ export function csv(tables) {
6
+ return {
7
+ async seed(ctx) {
8
+ for (const [table, filePath] of Object.entries(tables)) {
9
+ if (!existsSync(filePath)) {
10
+ throw new Error(`❌ CSV file not found: ${filePath}`);
11
+ }
12
+ console.log(`📥 Seeding "${table}" from ${filePath}`);
13
+ await copyCsvIntoTable(ctx.pg, table, filePath);
14
+ }
15
+ }
16
+ };
17
+ }
18
+ export async function copyCsvIntoTable(pg, table, filePath) {
19
+ const client = pg.client;
20
+ const stream = client.query(copyFrom(`COPY ${table} FROM STDIN WITH CSV HEADER`));
21
+ const source = createReadStream(filePath);
22
+ try {
23
+ await pipeline(source, stream);
24
+ console.log(`✅ Successfully seeded "${table}"`);
25
+ }
26
+ catch (err) {
27
+ console.error(`❌ COPY failed for "${table}":`, err);
28
+ throw err;
29
+ }
30
+ }
31
+ export async function exportTableToCsv(pg, table, filePath) {
32
+ const client = pg.client;
33
+ const stream = client.query(copyTo(`COPY ${table} TO STDOUT WITH CSV HEADER`));
34
+ const target = createWriteStream(filePath);
35
+ try {
36
+ await pipeline(stream, target);
37
+ console.log(`✅ Exported "${table}" to ${filePath}`);
38
+ }
39
+ catch (err) {
40
+ console.error(`❌ Failed to export "${table}":`, err);
41
+ throw err;
42
+ }
43
+ }
@@ -0,0 +1,15 @@
1
+ import { fn, sqlfile, compose } from './adapters';
2
+ import { csv } from './csv';
3
+ import { json } from './json';
4
+ import { sqitch } from './sqitch';
5
+ import { launchql } from './launchql';
6
+ export * from './types';
7
+ export const seed = {
8
+ launchql,
9
+ sqitch,
10
+ json,
11
+ csv,
12
+ compose,
13
+ fn,
14
+ sqlfile
15
+ };
@@ -0,0 +1,18 @@
1
+ export function json(data) {
2
+ return {
3
+ async seed(ctx) {
4
+ const { pg } = ctx;
5
+ for (const [table, rows] of Object.entries(data)) {
6
+ if (!Array.isArray(rows) || rows.length === 0)
7
+ continue;
8
+ const columns = Object.keys(rows[0]);
9
+ const placeholders = columns.map((_, i) => `$${i + 1}`).join(', ');
10
+ const sql = `INSERT INTO ${table} (${columns.join(', ')}) VALUES (${placeholders})`;
11
+ for (const row of rows) {
12
+ const values = columns.map((c) => row[c]);
13
+ await pg.query(sql, values);
14
+ }
15
+ }
16
+ }
17
+ };
18
+ }
@@ -0,0 +1,20 @@
1
+ import { getEnvOptions } from '@launchql/types';
2
+ import { LaunchQLProject, deployFast } from '@launchql/migrate';
3
+ export function launchql(cwd) {
4
+ return {
5
+ async seed(ctx) {
6
+ const proj = new LaunchQLProject(cwd ?? ctx.connect.cwd);
7
+ if (!proj.isInModule())
8
+ return;
9
+ const opts = getEnvOptions({ pg: ctx.config });
10
+ await deployFast({
11
+ opts,
12
+ name: proj.getModuleName(),
13
+ database: ctx.config.database,
14
+ dir: proj.modulePath,
15
+ usePlan: true,
16
+ verbose: false
17
+ });
18
+ }
19
+ };
20
+ }
@@ -0,0 +1,13 @@
1
+ import { getEnvOptions } from '@launchql/types';
2
+ import { LaunchQLProject, deploy } from '@launchql/migrate';
3
+ export function sqitch(cwd) {
4
+ return {
5
+ async seed(ctx) {
6
+ const proj = new LaunchQLProject(cwd ?? ctx.connect.cwd);
7
+ if (!proj.isInModule())
8
+ return;
9
+ const opts = getEnvOptions({ pg: ctx.config });
10
+ await deploy(opts, proj.getModuleName(), ctx.config.database, proj.modulePath);
11
+ }
12
+ };
13
+ }
@@ -0,0 +1 @@
1
+ export {};
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "pgsql-test",
3
- "version": "2.0.2",
3
+ "version": "2.0.4",
4
4
  "author": "Dan Lynch <pyramation@gmail.com>",
5
5
  "description": "PostgreSQL Testing in TypeScript",
6
6
  "main": "index.js",
@@ -29,12 +29,18 @@
29
29
  "test": "jest",
30
30
  "test:watch": "jest --watch"
31
31
  },
32
+ "devDependencies": {
33
+ "@types/pg": "^8.15.2",
34
+ "@types/pg-copy-streams": "^1.2.5"
35
+ },
32
36
  "dependencies": {
33
- "@launchql/migrate": "^2.0.12",
34
- "@launchql/server-utils": "^2.0.5",
35
- "@launchql/types": "^2.0.5",
37
+ "@launchql/migrate": "^2.0.14",
38
+ "@launchql/server-utils": "^2.0.7",
39
+ "@launchql/types": "^2.0.6",
36
40
  "chalk": "^4.1.0",
37
- "deepmerge": "^4.3.1"
41
+ "deepmerge": "^4.3.1",
42
+ "pg": "^8.16.0",
43
+ "pg-copy-streams": "^6.0.6"
38
44
  },
39
- "gitHead": "db786d9e5d7a784c57f2bacbf1eb2f4b541cb21e"
45
+ "gitHead": "b347f01f28aa33c195f78d911eab8757777b0c88"
40
46
  }
@@ -0,0 +1,4 @@
1
+ import { SeedAdapter, SeedContext } from "./types";
2
+ export declare function sqlfile(files: string[]): SeedAdapter;
3
+ export declare function fn(fn: (ctx: SeedContext) => Promise<void>): SeedAdapter;
4
+ export declare function compose(adapters: SeedAdapter[]): SeedAdapter;
@@ -1,6 +1,8 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.seed = void 0;
3
+ exports.sqlfile = sqlfile;
4
+ exports.fn = fn;
5
+ exports.compose = compose;
4
6
  function sqlfile(files) {
5
7
  return {
6
8
  seed(ctx) {
@@ -15,12 +17,6 @@ function fn(fn) {
15
17
  seed: fn
16
18
  };
17
19
  }
18
- function csv(fn) {
19
- throw new Error('not yet implemented');
20
- return {
21
- seed: fn
22
- };
23
- }
24
20
  function compose(adapters) {
25
21
  return {
26
22
  async seed(ctx) {
@@ -30,9 +26,3 @@ function compose(adapters) {
30
26
  }
31
27
  };
32
28
  }
33
- exports.seed = {
34
- compose,
35
- fn,
36
- csv,
37
- sqlfile
38
- };
package/seed/csv.d.ts ADDED
@@ -0,0 +1,9 @@
1
+ import { SeedAdapter } from './types';
2
+ import { PgTestClient } from '../test-client';
3
+ interface CsvSeedMap {
4
+ [tableName: string]: string;
5
+ }
6
+ export declare function csv(tables: CsvSeedMap): SeedAdapter;
7
+ export declare function copyCsvIntoTable(pg: PgTestClient, table: string, filePath: string): Promise<void>;
8
+ export declare function exportTableToCsv(pg: PgTestClient, table: string, filePath: string): Promise<void>;
9
+ export {};
package/seed/csv.js ADDED
@@ -0,0 +1,48 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.csv = csv;
4
+ exports.copyCsvIntoTable = copyCsvIntoTable;
5
+ exports.exportTableToCsv = exportTableToCsv;
6
+ const fs_1 = require("fs");
7
+ const promises_1 = require("node:stream/promises");
8
+ const pg_copy_streams_1 = require("pg-copy-streams");
9
+ const node_fs_1 = require("node:fs");
10
+ function csv(tables) {
11
+ return {
12
+ async seed(ctx) {
13
+ for (const [table, filePath] of Object.entries(tables)) {
14
+ if (!(0, fs_1.existsSync)(filePath)) {
15
+ throw new Error(`❌ CSV file not found: ${filePath}`);
16
+ }
17
+ console.log(`📥 Seeding "${table}" from ${filePath}`);
18
+ await copyCsvIntoTable(ctx.pg, table, filePath);
19
+ }
20
+ }
21
+ };
22
+ }
23
+ async function copyCsvIntoTable(pg, table, filePath) {
24
+ const client = pg.client;
25
+ const stream = client.query((0, pg_copy_streams_1.from)(`COPY ${table} FROM STDIN WITH CSV HEADER`));
26
+ const source = (0, fs_1.createReadStream)(filePath);
27
+ try {
28
+ await (0, promises_1.pipeline)(source, stream);
29
+ console.log(`✅ Successfully seeded "${table}"`);
30
+ }
31
+ catch (err) {
32
+ console.error(`❌ COPY failed for "${table}":`, err);
33
+ throw err;
34
+ }
35
+ }
36
+ async function exportTableToCsv(pg, table, filePath) {
37
+ const client = pg.client;
38
+ const stream = client.query((0, pg_copy_streams_1.to)(`COPY ${table} TO STDOUT WITH CSV HEADER`));
39
+ const target = (0, node_fs_1.createWriteStream)(filePath);
40
+ try {
41
+ await (0, promises_1.pipeline)(stream, target);
42
+ console.log(`✅ Exported "${table}" to ${filePath}`);
43
+ }
44
+ catch (err) {
45
+ console.error(`❌ Failed to export "${table}":`, err);
46
+ throw err;
47
+ }
48
+ }
@@ -0,0 +1,15 @@
1
+ import { fn, sqlfile, compose } from './adapters';
2
+ import { csv } from './csv';
3
+ import { json } from './json';
4
+ import { sqitch } from './sqitch';
5
+ import { launchql } from './launchql';
6
+ export * from './types';
7
+ export declare const seed: {
8
+ launchql: typeof launchql;
9
+ sqitch: typeof sqitch;
10
+ json: typeof json;
11
+ csv: typeof csv;
12
+ compose: typeof compose;
13
+ fn: typeof fn;
14
+ sqlfile: typeof sqlfile;
15
+ };
package/seed/index.js ADDED
@@ -0,0 +1,32 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __exportStar = (this && this.__exportStar) || function(m, exports) {
14
+ for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
15
+ };
16
+ Object.defineProperty(exports, "__esModule", { value: true });
17
+ exports.seed = void 0;
18
+ const adapters_1 = require("./adapters");
19
+ const csv_1 = require("./csv");
20
+ const json_1 = require("./json");
21
+ const sqitch_1 = require("./sqitch");
22
+ const launchql_1 = require("./launchql");
23
+ __exportStar(require("./types"), exports);
24
+ exports.seed = {
25
+ launchql: launchql_1.launchql,
26
+ sqitch: sqitch_1.sqitch,
27
+ json: json_1.json,
28
+ csv: csv_1.csv,
29
+ compose: adapters_1.compose,
30
+ fn: adapters_1.fn,
31
+ sqlfile: adapters_1.sqlfile
32
+ };
package/seed/json.d.ts ADDED
@@ -0,0 +1,6 @@
1
+ import { SeedAdapter } from "./types";
2
+ interface JsonSeedMap {
3
+ [table: string]: Record<string, any>[];
4
+ }
5
+ export declare function json(data: JsonSeedMap): SeedAdapter;
6
+ export {};
package/seed/json.js ADDED
@@ -0,0 +1,21 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.json = json;
4
+ function json(data) {
5
+ return {
6
+ async seed(ctx) {
7
+ const { pg } = ctx;
8
+ for (const [table, rows] of Object.entries(data)) {
9
+ if (!Array.isArray(rows) || rows.length === 0)
10
+ continue;
11
+ const columns = Object.keys(rows[0]);
12
+ const placeholders = columns.map((_, i) => `$${i + 1}`).join(', ');
13
+ const sql = `INSERT INTO ${table} (${columns.join(', ')}) VALUES (${placeholders})`;
14
+ for (const row of rows) {
15
+ const values = columns.map((c) => row[c]);
16
+ await pg.query(sql, values);
17
+ }
18
+ }
19
+ }
20
+ };
21
+ }
@@ -0,0 +1,2 @@
1
+ import { SeedAdapter } from './types';
2
+ export declare function launchql(cwd?: string): SeedAdapter;
@@ -0,0 +1,23 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.launchql = launchql;
4
+ const types_1 = require("@launchql/types");
5
+ const migrate_1 = require("@launchql/migrate");
6
+ function launchql(cwd) {
7
+ return {
8
+ async seed(ctx) {
9
+ const proj = new migrate_1.LaunchQLProject(cwd ?? ctx.connect.cwd);
10
+ if (!proj.isInModule())
11
+ return;
12
+ const opts = (0, types_1.getEnvOptions)({ pg: ctx.config });
13
+ await (0, migrate_1.deployFast)({
14
+ opts,
15
+ name: proj.getModuleName(),
16
+ database: ctx.config.database,
17
+ dir: proj.modulePath,
18
+ usePlan: true,
19
+ verbose: false
20
+ });
21
+ }
22
+ };
23
+ }
@@ -0,0 +1,2 @@
1
+ import { SeedAdapter } from './types';
2
+ export declare function sqitch(cwd?: string): SeedAdapter;
package/seed/sqitch.js ADDED
@@ -0,0 +1,16 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.sqitch = sqitch;
4
+ const types_1 = require("@launchql/types");
5
+ const migrate_1 = require("@launchql/migrate");
6
+ function sqitch(cwd) {
7
+ return {
8
+ async seed(ctx) {
9
+ const proj = new migrate_1.LaunchQLProject(cwd ?? ctx.connect.cwd);
10
+ if (!proj.isInModule())
11
+ return;
12
+ const opts = (0, types_1.getEnvOptions)({ pg: ctx.config });
13
+ await (0, migrate_1.deploy)(opts, proj.getModuleName(), ctx.config.database, proj.modulePath);
14
+ }
15
+ };
16
+ }
@@ -0,0 +1,12 @@
1
+ import { PgConfig, PgTestConnectionOptions } from "@launchql/types";
2
+ import { DbAdmin } from "../admin";
3
+ import { PgTestClient } from "../test-client";
4
+ export interface SeedContext {
5
+ connect: PgTestConnectionOptions;
6
+ admin: DbAdmin;
7
+ config: PgConfig;
8
+ pg: PgTestClient;
9
+ }
10
+ export interface SeedAdapter {
11
+ seed(ctx: SeedContext): Promise<void> | void;
12
+ }
package/seed/types.js ADDED
@@ -0,0 +1,2 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
package/test-client.d.ts CHANGED
@@ -1,8 +1,8 @@
1
- import { QueryResult } from 'pg';
1
+ import { Client, QueryResult } from 'pg';
2
2
  import { PgConfig } from '@launchql/types';
3
3
  export declare class PgTestClient {
4
4
  config: PgConfig;
5
- private client;
5
+ client: Client;
6
6
  private ctxStmts;
7
7
  private _ended;
8
8
  constructor(config: PgConfig);
package/seed.d.ts DELETED
@@ -1,22 +0,0 @@
1
- import { PgConfig } from "@launchql/types";
2
- import { DbAdmin } from "./admin";
3
- import { PgTestClient } from "./test-client";
4
- interface SeedContext {
5
- admin: DbAdmin;
6
- config: PgConfig;
7
- pg: PgTestClient;
8
- }
9
- export interface SeedAdapter {
10
- seed(ctx: SeedContext): Promise<void> | void;
11
- }
12
- declare function sqlfile(files: string[]): SeedAdapter;
13
- declare function fn(fn: (ctx: SeedContext) => Promise<void>): SeedAdapter;
14
- declare function csv(fn: (ctx: SeedContext) => Promise<void>): SeedAdapter;
15
- declare function compose(adapters: SeedAdapter[]): SeedAdapter;
16
- export declare const seed: {
17
- compose: typeof compose;
18
- fn: typeof fn;
19
- csv: typeof csv;
20
- sqlfile: typeof sqlfile;
21
- };
22
- export {};