pgsql-test 2.11.13 → 2.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +142 -25
- package/context-utils.d.ts +8 -0
- package/context-utils.js +28 -0
- package/esm/context-utils.js +25 -0
- package/esm/seed/csv.js +28 -0
- package/esm/seed/index.js +0 -2
- package/esm/seed/json.js +18 -0
- package/esm/seed/launchql.js +20 -11
- package/esm/seed/sql.js +15 -0
- package/esm/test-client.js +32 -14
- package/package.json +7 -7
- package/seed/csv.d.ts +9 -3
- package/seed/csv.js +29 -0
- package/seed/index.d.ts +0 -2
- package/seed/index.js +0 -2
- package/seed/json.d.ts +8 -2
- package/seed/json.js +19 -0
- package/seed/launchql.d.ts +8 -0
- package/seed/launchql.js +21 -11
- package/seed/sql.d.ts +7 -0
- package/seed/sql.js +18 -0
- package/test-client.d.ts +7 -1
- package/test-client.js +32 -14
- package/esm/seed/sqitch.js +0 -17
- package/seed/sqitch.d.ts +0 -2
- package/seed/sqitch.js +0 -20
package/README.md
CHANGED
|
@@ -58,7 +58,6 @@ Part of the [LaunchQL](https://github.com/launchql) ecosystem, `pgsql-test` is b
|
|
|
58
58
|
* [Programmatic Seeding](#-programmatic-seeding)
|
|
59
59
|
* [CSV Seeding](#️-csv-seeding)
|
|
60
60
|
* [JSON Seeding](#️-json-seeding)
|
|
61
|
-
* [Sqitch Seeding](#️-sqitch-seeding)
|
|
62
61
|
* [LaunchQL Seeding](#-launchql-seeding)
|
|
63
62
|
7. [`getConnections() Options` ](#getconnections-options)
|
|
64
63
|
8. [Disclaimer](#disclaimer)
|
|
@@ -240,16 +239,48 @@ This array lets you fully customize how your test database is seeded. You can co
|
|
|
240
239
|
* [`seed.fn()`](#-programmatic-seeding) – Run JavaScript/TypeScript logic to programmatically insert data
|
|
241
240
|
* [`seed.csv()`](#️-csv-seeding) – Load tabular data from CSV files
|
|
242
241
|
* [`seed.json()`](#️-json-seeding) – Use in-memory objects as seed data
|
|
243
|
-
* [`seed.
|
|
244
|
-
* [`seed.launchql()`](#-launchql-seeding) – Apply a LaunchQL module using `deployFast()` (compatible with sqitch)
|
|
242
|
+
* [`seed.launchql()`](#-launchql-seeding) – Apply a LaunchQL project or set of packages (compatible with sqitch)
|
|
245
243
|
|
|
246
244
|
> ✨ **Default Behavior:** If no `SeedAdapter[]` is passed, LaunchQL seeding is assumed. This makes `pgsql-test` zero-config for LaunchQL-based projects.
|
|
247
245
|
|
|
248
246
|
This composable system allows you to mix-and-match data setup strategies for flexible, realistic, and fast database tests.
|
|
249
247
|
|
|
248
|
+
#### Two Seeding Patterns
|
|
249
|
+
|
|
250
|
+
You can seed data using either approach:
|
|
251
|
+
|
|
252
|
+
**1. Adapter Pattern** (setup phase via `getConnections`)
|
|
253
|
+
```ts
|
|
254
|
+
const { db, teardown } = await getConnections({}, [
|
|
255
|
+
seed.json({ 'users': [{ id: 1, name: 'Alice' }] })
|
|
256
|
+
]);
|
|
257
|
+
```
|
|
258
|
+
|
|
259
|
+
**2. Direct Load Methods** (runtime via `PgTestClient`)
|
|
260
|
+
```ts
|
|
261
|
+
await db.loadJson({ 'users': [{ id: 1, name: 'Alice' }] });
|
|
262
|
+
await db.loadCsv({ 'users': '/path/to/users.csv' });
|
|
263
|
+
await db.loadSql(['/path/to/schema.sql']);
|
|
264
|
+
```
|
|
265
|
+
|
|
266
|
+
> **Note:** `loadCsv()` and `loadLaunchql()` do not apply RLS context (PostgreSQL limitation). Use `loadJson()` or `loadSql()` for RLS-aware seeding.
|
|
267
|
+
|
|
250
268
|
### 🔌 SQL File Seeding
|
|
251
269
|
|
|
252
|
-
|
|
270
|
+
**Adapter Pattern:**
|
|
271
|
+
```ts
|
|
272
|
+
const { db, teardown } = await getConnections({}, [
|
|
273
|
+
seed.sqlfile(['schema.sql', 'fixtures.sql'])
|
|
274
|
+
]);
|
|
275
|
+
```
|
|
276
|
+
|
|
277
|
+
**Direct Load Method:**
|
|
278
|
+
```ts
|
|
279
|
+
await db.loadSql(['schema.sql', 'fixtures.sql']);
|
|
280
|
+
```
|
|
281
|
+
|
|
282
|
+
<details>
|
|
283
|
+
<summary>Full example</summary>
|
|
253
284
|
|
|
254
285
|
```ts
|
|
255
286
|
import path from 'path';
|
|
@@ -274,9 +305,27 @@ afterAll(async () => {
|
|
|
274
305
|
});
|
|
275
306
|
```
|
|
276
307
|
|
|
308
|
+
</details>
|
|
309
|
+
|
|
277
310
|
### 🧠 Programmatic Seeding
|
|
278
311
|
|
|
279
|
-
|
|
312
|
+
**Adapter Pattern:**
|
|
313
|
+
```ts
|
|
314
|
+
const { db, teardown } = await getConnections({}, [
|
|
315
|
+
seed.fn(async ({ pg }) => {
|
|
316
|
+
await pg.query(`INSERT INTO users (name) VALUES ('Seeded User')`);
|
|
317
|
+
})
|
|
318
|
+
]);
|
|
319
|
+
```
|
|
320
|
+
|
|
321
|
+
**Direct Load Method:**
|
|
322
|
+
```ts
|
|
323
|
+
// Use any PgTestClient method directly
|
|
324
|
+
await db.query(`INSERT INTO users (name) VALUES ('Seeded User')`);
|
|
325
|
+
```
|
|
326
|
+
|
|
327
|
+
<details>
|
|
328
|
+
<summary>Full example</summary>
|
|
280
329
|
|
|
281
330
|
```ts
|
|
282
331
|
import { getConnections, seed } from 'pgsql-test';
|
|
@@ -295,8 +344,33 @@ beforeAll(async () => {
|
|
|
295
344
|
});
|
|
296
345
|
```
|
|
297
346
|
|
|
347
|
+
</details>
|
|
348
|
+
|
|
298
349
|
## 🗃️ CSV Seeding
|
|
299
350
|
|
|
351
|
+
**Adapter Pattern:**
|
|
352
|
+
```ts
|
|
353
|
+
const { db, teardown } = await getConnections({}, [
|
|
354
|
+
seed.csv({
|
|
355
|
+
'users': '/path/to/users.csv',
|
|
356
|
+
'posts': '/path/to/posts.csv'
|
|
357
|
+
})
|
|
358
|
+
]);
|
|
359
|
+
```
|
|
360
|
+
|
|
361
|
+
**Direct Load Method:**
|
|
362
|
+
```ts
|
|
363
|
+
await db.loadCsv({
|
|
364
|
+
'users': '/path/to/users.csv',
|
|
365
|
+
'posts': '/path/to/posts.csv'
|
|
366
|
+
});
|
|
367
|
+
```
|
|
368
|
+
|
|
369
|
+
> **Note:** CSV loading uses PostgreSQL COPY which does not support RLS context.
|
|
370
|
+
|
|
371
|
+
<details>
|
|
372
|
+
<summary>Full example</summary>
|
|
373
|
+
|
|
300
374
|
You can load tables from CSV files using `seed.csv({ ... })`. CSV headers must match the table column names exactly. This is useful for loading stable fixture data for integration tests or CI environments.
|
|
301
375
|
|
|
302
376
|
```ts
|
|
@@ -346,8 +420,35 @@ it('has loaded rows', async () => {
|
|
|
346
420
|
});
|
|
347
421
|
```
|
|
348
422
|
|
|
423
|
+
</details>
|
|
424
|
+
|
|
349
425
|
## 🗃️ JSON Seeding
|
|
350
426
|
|
|
427
|
+
**Adapter Pattern:**
|
|
428
|
+
```ts
|
|
429
|
+
const { db, teardown } = await getConnections({}, [
|
|
430
|
+
seed.json({
|
|
431
|
+
'custom.users': [
|
|
432
|
+
{ id: 1, name: 'Alice' },
|
|
433
|
+
{ id: 2, name: 'Bob' }
|
|
434
|
+
]
|
|
435
|
+
})
|
|
436
|
+
]);
|
|
437
|
+
```
|
|
438
|
+
|
|
439
|
+
**Direct Load Method:**
|
|
440
|
+
```ts
|
|
441
|
+
await db.loadJson({
|
|
442
|
+
'custom.users': [
|
|
443
|
+
{ id: 1, name: 'Alice' },
|
|
444
|
+
{ id: 2, name: 'Bob' }
|
|
445
|
+
]
|
|
446
|
+
});
|
|
447
|
+
```
|
|
448
|
+
|
|
449
|
+
<details>
|
|
450
|
+
<summary>Full example</summary>
|
|
451
|
+
|
|
351
452
|
You can seed tables using in-memory JSON objects. This is useful when you want fast, inline fixtures without managing external files.
|
|
352
453
|
|
|
353
454
|
```ts
|
|
@@ -401,28 +502,32 @@ it('has loaded rows', async () => {
|
|
|
401
502
|
});
|
|
402
503
|
```
|
|
403
504
|
|
|
404
|
-
|
|
505
|
+
</details>
|
|
405
506
|
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
You can seed your test database using a Sqitch project but with significantly improved performance by leveraging LaunchQL's TypeScript deployment engine:
|
|
507
|
+
## 🚀 LaunchQL Seeding
|
|
409
508
|
|
|
509
|
+
**Zero Configuration (Default):**
|
|
410
510
|
```ts
|
|
411
|
-
|
|
412
|
-
|
|
511
|
+
// LaunchQL migrate is used automatically
|
|
512
|
+
const { db, teardown } = await getConnections();
|
|
513
|
+
```
|
|
413
514
|
|
|
414
|
-
|
|
515
|
+
**Adapter Pattern (Custom Path):**
|
|
516
|
+
```ts
|
|
517
|
+
const { db, teardown } = await getConnections({}, [
|
|
518
|
+
seed.launchql('/path/to/launchql', true) // with cache
|
|
519
|
+
]);
|
|
520
|
+
```
|
|
415
521
|
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
]));
|
|
420
|
-
});
|
|
522
|
+
**Direct Load Method:**
|
|
523
|
+
```ts
|
|
524
|
+
await db.loadLaunchql('/path/to/launchql', true); // with cache
|
|
421
525
|
```
|
|
422
526
|
|
|
423
|
-
|
|
527
|
+
> **Note:** LaunchQL deployment has its own client handling and does not apply RLS context.
|
|
424
528
|
|
|
425
|
-
|
|
529
|
+
<details>
|
|
530
|
+
<summary>Full example</summary>
|
|
426
531
|
|
|
427
532
|
If your project uses LaunchQL modules with a precompiled `launchql.plan`, you can use `pgsql-test` with **zero configuration**. Just call `getConnections()` — and it *just works*:
|
|
428
533
|
|
|
@@ -432,14 +537,13 @@ import { getConnections } from 'pgsql-test';
|
|
|
432
537
|
let db, teardown;
|
|
433
538
|
|
|
434
539
|
beforeAll(async () => {
|
|
435
|
-
({ db, teardown } = await getConnections()); //
|
|
540
|
+
({ db, teardown } = await getConnections()); // LaunchQL module is deployed automatically
|
|
436
541
|
});
|
|
437
542
|
```
|
|
438
543
|
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
If you want to specify a custom path to your LaunchQL module, use `seed.launchql()` explicitly:
|
|
544
|
+
LaunchQL uses Sqitch-compatible syntax with a TypeScript-based migration engine. By default, `pgsql-test` automatically deploys any LaunchQL module found in the current working directory (`process.cwd()`).
|
|
442
545
|
|
|
546
|
+
To specify a custom path to your LaunchQL module, use `seed.launchql()` explicitly:
|
|
443
547
|
|
|
444
548
|
```ts
|
|
445
549
|
import path from 'path';
|
|
@@ -449,11 +553,24 @@ const cwd = path.resolve(__dirname, '../path/to/launchql');
|
|
|
449
553
|
|
|
450
554
|
beforeAll(async () => {
|
|
451
555
|
({ db, teardown } = await getConnections({}, [
|
|
452
|
-
seed.launchql(cwd)
|
|
556
|
+
seed.launchql(cwd)
|
|
453
557
|
]));
|
|
454
558
|
});
|
|
455
559
|
```
|
|
456
560
|
|
|
561
|
+
</details>
|
|
562
|
+
|
|
563
|
+
## Why LaunchQL's Approach?
|
|
564
|
+
|
|
565
|
+
LaunchQL provides the best of both worlds:
|
|
566
|
+
|
|
567
|
+
1. **Sqitch Compatibility**: Keep your familiar Sqitch syntax and migration approach
|
|
568
|
+
2. **TypeScript Performance**: Our TS-rewritten deployment engine delivers up to 10x faster schema deployments
|
|
569
|
+
3. **Developer Experience**: Tight feedback loops with near-instant schema setup for tests
|
|
570
|
+
4. **CI Optimization**: Dramatically reduced test suite run times with optimized deployment
|
|
571
|
+
|
|
572
|
+
By maintaining Sqitch compatibility while supercharging performance, LaunchQL enables you to keep your existing migration patterns while enjoying the speed benefits of our TypeScript engine.
|
|
573
|
+
|
|
457
574
|
## Why LaunchQL's Approach?
|
|
458
575
|
|
|
459
576
|
LaunchQL provides the best of both worlds:
|
|
@@ -474,7 +591,7 @@ This table documents the available options for the `getConnections` function. Th
|
|
|
474
591
|
| Option | Type | Default | Description |
|
|
475
592
|
| ------------------------ | ---------- | ---------------- | --------------------------------------------------------------------------- |
|
|
476
593
|
| `db.extensions` | `string[]` | `[]` | Array of PostgreSQL extensions to include in the test database |
|
|
477
|
-
| `db.cwd` | `string` | `process.cwd()` | Working directory used for LaunchQL
|
|
594
|
+
| `db.cwd` | `string` | `process.cwd()` | Working directory used for LaunchQL or Sqitch projects |
|
|
478
595
|
| `db.connection.user` | `string` | `'app_user'` | User for simulating RLS via `setContext()` |
|
|
479
596
|
| `db.connection.password` | `string` | `'app_password'` | Password for RLS test user |
|
|
480
597
|
| `db.connection.role` | `string` | `'anonymous'` | Default role used during `setContext()` |
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import type { PgTestClientContext } from '@launchql/types';
|
|
2
|
+
/**
|
|
3
|
+
* Generate SQL statements to set PostgreSQL session context variables
|
|
4
|
+
* Uses SET LOCAL ROLE for the 'role' key and set_config() for other variables
|
|
5
|
+
* @param context - Context settings to apply
|
|
6
|
+
* @returns SQL string with SET LOCAL ROLE and set_config() statements
|
|
7
|
+
*/
|
|
8
|
+
export declare function generateContextStatements(context: PgTestClientContext): string;
|
package/context-utils.js
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.generateContextStatements = generateContextStatements;
|
|
4
|
+
/**
|
|
5
|
+
* Generate SQL statements to set PostgreSQL session context variables
|
|
6
|
+
* Uses SET LOCAL ROLE for the 'role' key and set_config() for other variables
|
|
7
|
+
* @param context - Context settings to apply
|
|
8
|
+
* @returns SQL string with SET LOCAL ROLE and set_config() statements
|
|
9
|
+
*/
|
|
10
|
+
function generateContextStatements(context) {
|
|
11
|
+
return Object.entries(context)
|
|
12
|
+
.map(([key, val]) => {
|
|
13
|
+
if (key === 'role') {
|
|
14
|
+
if (val === null || val === undefined) {
|
|
15
|
+
return 'SET LOCAL ROLE NONE;';
|
|
16
|
+
}
|
|
17
|
+
const escapedRole = val.replace(/"/g, '""');
|
|
18
|
+
return `SET LOCAL ROLE "${escapedRole}";`;
|
|
19
|
+
}
|
|
20
|
+
// Use set_config for other context variables
|
|
21
|
+
if (val === null || val === undefined) {
|
|
22
|
+
return `SELECT set_config('${key}', NULL, true);`;
|
|
23
|
+
}
|
|
24
|
+
const escapedVal = val.replace(/'/g, "''");
|
|
25
|
+
return `SELECT set_config('${key}', '${escapedVal}', true);`;
|
|
26
|
+
})
|
|
27
|
+
.join('\n');
|
|
28
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Generate SQL statements to set PostgreSQL session context variables
|
|
3
|
+
* Uses SET LOCAL ROLE for the 'role' key and set_config() for other variables
|
|
4
|
+
* @param context - Context settings to apply
|
|
5
|
+
* @returns SQL string with SET LOCAL ROLE and set_config() statements
|
|
6
|
+
*/
|
|
7
|
+
export function generateContextStatements(context) {
|
|
8
|
+
return Object.entries(context)
|
|
9
|
+
.map(([key, val]) => {
|
|
10
|
+
if (key === 'role') {
|
|
11
|
+
if (val === null || val === undefined) {
|
|
12
|
+
return 'SET LOCAL ROLE NONE;';
|
|
13
|
+
}
|
|
14
|
+
const escapedRole = val.replace(/"/g, '""');
|
|
15
|
+
return `SET LOCAL ROLE "${escapedRole}";`;
|
|
16
|
+
}
|
|
17
|
+
// Use set_config for other context variables
|
|
18
|
+
if (val === null || val === undefined) {
|
|
19
|
+
return `SELECT set_config('${key}', NULL, true);`;
|
|
20
|
+
}
|
|
21
|
+
const escapedVal = val.replace(/'/g, "''");
|
|
22
|
+
return `SELECT set_config('${key}', '${escapedVal}', true);`;
|
|
23
|
+
})
|
|
24
|
+
.join('\n');
|
|
25
|
+
}
|
package/esm/seed/csv.js
CHANGED
|
@@ -4,6 +4,34 @@ import { parse } from 'csv-parse';
|
|
|
4
4
|
import { createReadStream, createWriteStream, existsSync } from 'fs';
|
|
5
5
|
import { from as copyFrom, to as copyTo } from 'pg-copy-streams';
|
|
6
6
|
const log = new Logger('csv');
|
|
7
|
+
/**
|
|
8
|
+
* Standalone helper function to load CSV files into PostgreSQL tables
|
|
9
|
+
* @param client - PostgreSQL client instance
|
|
10
|
+
* @param tables - Map of table names to CSV file paths
|
|
11
|
+
*/
|
|
12
|
+
export async function loadCsvMap(client, tables) {
|
|
13
|
+
for (const [table, filePath] of Object.entries(tables)) {
|
|
14
|
+
if (!existsSync(filePath)) {
|
|
15
|
+
throw new Error(`CSV file not found: ${filePath}`);
|
|
16
|
+
}
|
|
17
|
+
log.info(`📥 Seeding "${table}" from ${filePath}`);
|
|
18
|
+
const columns = await parseCsvHeader(filePath);
|
|
19
|
+
const quotedColumns = columns.map(col => `"${col.replace(/"/g, '""')}"`);
|
|
20
|
+
const columnList = quotedColumns.join(', ');
|
|
21
|
+
const copyCommand = `COPY ${table} (${columnList}) FROM STDIN WITH CSV HEADER`;
|
|
22
|
+
log.info(`Using columns: ${columnList}`);
|
|
23
|
+
const stream = client.query(copyFrom(copyCommand));
|
|
24
|
+
const source = createReadStream(filePath);
|
|
25
|
+
try {
|
|
26
|
+
await pipeline(source, stream);
|
|
27
|
+
log.success(`✅ Successfully seeded "${table}"`);
|
|
28
|
+
}
|
|
29
|
+
catch (err) {
|
|
30
|
+
log.error(`❌ COPY failed for "${table}": ${err.message}`);
|
|
31
|
+
throw err;
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
}
|
|
7
35
|
export function csv(tables) {
|
|
8
36
|
return {
|
|
9
37
|
async seed(ctx) {
|
package/esm/seed/index.js
CHANGED
|
@@ -2,12 +2,10 @@ import { compose, fn, sqlfile } from './adapters';
|
|
|
2
2
|
import { csv } from './csv';
|
|
3
3
|
import { json } from './json';
|
|
4
4
|
import { launchql } from './launchql';
|
|
5
|
-
import { sqitch } from './sqitch';
|
|
6
5
|
export * from './csv';
|
|
7
6
|
export * from './types';
|
|
8
7
|
export const seed = {
|
|
9
8
|
launchql,
|
|
10
|
-
sqitch,
|
|
11
9
|
json,
|
|
12
10
|
csv,
|
|
13
11
|
compose,
|
package/esm/seed/json.js
CHANGED
|
@@ -1,3 +1,21 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Standalone helper function to insert JSON data into PostgreSQL tables
|
|
3
|
+
* @param client - PostgreSQL client instance
|
|
4
|
+
* @param data - Map of table names to arrays of row objects
|
|
5
|
+
*/
|
|
6
|
+
export async function insertJson(client, data) {
|
|
7
|
+
for (const [table, rows] of Object.entries(data)) {
|
|
8
|
+
if (!Array.isArray(rows) || rows.length === 0)
|
|
9
|
+
continue;
|
|
10
|
+
const columns = Object.keys(rows[0]);
|
|
11
|
+
const placeholders = columns.map((_, i) => `$${i + 1}`).join(', ');
|
|
12
|
+
const sql = `INSERT INTO ${table} (${columns.join(', ')}) VALUES (${placeholders})`;
|
|
13
|
+
for (const row of rows) {
|
|
14
|
+
const values = columns.map((c) => row[c]);
|
|
15
|
+
await client.query(sql, values);
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
}
|
|
1
19
|
export function json(data) {
|
|
2
20
|
return {
|
|
3
21
|
async seed(ctx) {
|
package/esm/seed/launchql.js
CHANGED
|
@@ -1,19 +1,28 @@
|
|
|
1
1
|
import { LaunchQLPackage } from '@launchql/core';
|
|
2
2
|
import { getEnvOptions } from '@launchql/env';
|
|
3
|
+
/**
|
|
4
|
+
* Standalone helper function to deploy LaunchQL package
|
|
5
|
+
* @param config - PostgreSQL configuration
|
|
6
|
+
* @param cwd - Current working directory (defaults to process.cwd())
|
|
7
|
+
* @param cache - Whether to enable caching (defaults to false)
|
|
8
|
+
*/
|
|
9
|
+
export async function deployLaunchql(config, cwd, cache = false) {
|
|
10
|
+
const proj = new LaunchQLPackage(cwd ?? process.cwd());
|
|
11
|
+
if (!proj.isInModule())
|
|
12
|
+
return;
|
|
13
|
+
await proj.deploy(getEnvOptions({
|
|
14
|
+
pg: config,
|
|
15
|
+
deployment: {
|
|
16
|
+
fast: true,
|
|
17
|
+
usePlan: true,
|
|
18
|
+
cache
|
|
19
|
+
}
|
|
20
|
+
}), proj.getModuleName());
|
|
21
|
+
}
|
|
3
22
|
export function launchql(cwd, cache = false) {
|
|
4
23
|
return {
|
|
5
24
|
async seed(ctx) {
|
|
6
|
-
|
|
7
|
-
if (!proj.isInModule())
|
|
8
|
-
return;
|
|
9
|
-
await proj.deploy(getEnvOptions({
|
|
10
|
-
pg: ctx.config,
|
|
11
|
-
deployment: {
|
|
12
|
-
fast: true,
|
|
13
|
-
usePlan: true,
|
|
14
|
-
cache
|
|
15
|
-
}
|
|
16
|
-
}), proj.getModuleName());
|
|
25
|
+
await deployLaunchql(ctx.config, cwd ?? ctx.connect.cwd, cache);
|
|
17
26
|
}
|
|
18
27
|
};
|
|
19
28
|
}
|
package/esm/seed/sql.js
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import { existsSync, readFileSync } from 'fs';
|
|
2
|
+
/**
|
|
3
|
+
* Standalone helper function to load SQL files into PostgreSQL
|
|
4
|
+
* @param client - PostgreSQL client instance
|
|
5
|
+
* @param files - Array of SQL file paths to execute
|
|
6
|
+
*/
|
|
7
|
+
export async function loadSqlFiles(client, files) {
|
|
8
|
+
for (const file of files) {
|
|
9
|
+
if (!existsSync(file)) {
|
|
10
|
+
throw new Error(`SQL file not found: ${file}`);
|
|
11
|
+
}
|
|
12
|
+
const sql = readFileSync(file, 'utf-8');
|
|
13
|
+
await client.query(sql);
|
|
14
|
+
}
|
|
15
|
+
}
|
package/esm/test-client.js
CHANGED
|
@@ -1,5 +1,10 @@
|
|
|
1
1
|
import { Client } from 'pg';
|
|
2
2
|
import { getRoleName } from './roles';
|
|
3
|
+
import { generateContextStatements } from './context-utils';
|
|
4
|
+
import { insertJson } from './seed/json';
|
|
5
|
+
import { loadCsvMap } from './seed/csv';
|
|
6
|
+
import { loadSqlFiles } from './seed/sql';
|
|
7
|
+
import { deployLaunchql } from './seed/launchql';
|
|
3
8
|
export class PgTestClient {
|
|
4
9
|
config;
|
|
5
10
|
client;
|
|
@@ -61,11 +66,7 @@ export class PgTestClient {
|
|
|
61
66
|
}
|
|
62
67
|
setContext(ctx) {
|
|
63
68
|
Object.assign(this.contextSettings, ctx);
|
|
64
|
-
this.ctxStmts =
|
|
65
|
-
.map(([key, val]) => val === null
|
|
66
|
-
? `SELECT set_config('${key}', NULL, true);`
|
|
67
|
-
: `SELECT set_config('${key}', '${val}', true);`)
|
|
68
|
-
.join('\n');
|
|
69
|
+
this.ctxStmts = generateContextStatements(this.contextSettings);
|
|
69
70
|
}
|
|
70
71
|
/**
|
|
71
72
|
* Set authentication context for the current session.
|
|
@@ -100,11 +101,7 @@ export class PgTestClient {
|
|
|
100
101
|
nulledSettings[key] = null;
|
|
101
102
|
});
|
|
102
103
|
nulledSettings.role = defaultRole;
|
|
103
|
-
this.ctxStmts =
|
|
104
|
-
.map(([key, val]) => val === null
|
|
105
|
-
? `SELECT set_config('${key}', NULL, true);`
|
|
106
|
-
: `SELECT set_config('${key}', '${val}', true);`)
|
|
107
|
-
.join('\n');
|
|
104
|
+
this.ctxStmts = generateContextStatements(nulledSettings);
|
|
108
105
|
this.contextSettings = { role: defaultRole };
|
|
109
106
|
}
|
|
110
107
|
async any(query, values) {
|
|
@@ -137,14 +134,35 @@ export class PgTestClient {
|
|
|
137
134
|
async result(query, values) {
|
|
138
135
|
return this.query(query, values);
|
|
139
136
|
}
|
|
137
|
+
async ctxQuery() {
|
|
138
|
+
if (this.ctxStmts) {
|
|
139
|
+
await this.client.query(this.ctxStmts);
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
// NOTE: all queries should call ctxQuery() before executing the query
|
|
140
143
|
async query(query, values) {
|
|
141
144
|
await this.ctxQuery();
|
|
142
145
|
const result = await this.client.query(query, values);
|
|
143
146
|
return result;
|
|
144
147
|
}
|
|
145
|
-
async
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
148
|
+
async loadJson(data) {
|
|
149
|
+
await this.ctxQuery();
|
|
150
|
+
await insertJson(this.client, data);
|
|
151
|
+
}
|
|
152
|
+
async loadSql(files) {
|
|
153
|
+
await this.ctxQuery();
|
|
154
|
+
await loadSqlFiles(this.client, files);
|
|
155
|
+
}
|
|
156
|
+
// NON-RLS load/seed methods:
|
|
157
|
+
async loadCsv(tables) {
|
|
158
|
+
// await this.ctxQuery(); // no point to call ctxQuery() here
|
|
159
|
+
// because POSTGRES doesn't support row-level security on COPY FROM...
|
|
160
|
+
await loadCsvMap(this.client, tables);
|
|
161
|
+
}
|
|
162
|
+
async loadLaunchql(cwd, cache = false) {
|
|
163
|
+
// await this.ctxQuery(); // no point to call ctxQuery() here
|
|
164
|
+
// because deployLaunchql() has it's own way of getting the client...
|
|
165
|
+
// so for now, we'll expose this but it's limited
|
|
166
|
+
await deployLaunchql(this.config, cwd, cache);
|
|
149
167
|
}
|
|
150
168
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "pgsql-test",
|
|
3
|
-
"version": "2.
|
|
3
|
+
"version": "2.12.0",
|
|
4
4
|
"author": "Dan Lynch <pyramation@gmail.com>",
|
|
5
5
|
"description": "pgsql-test offers isolated, role-aware, and rollback-friendly PostgreSQL environments for integration tests — giving developers realistic test coverage without external state pollution",
|
|
6
6
|
"main": "index.js",
|
|
@@ -61,16 +61,16 @@
|
|
|
61
61
|
"@types/pg-copy-streams": "^1.2.5"
|
|
62
62
|
},
|
|
63
63
|
"dependencies": {
|
|
64
|
-
"@launchql/core": "^2.12.
|
|
65
|
-
"@launchql/env": "^2.4.
|
|
64
|
+
"@launchql/core": "^2.12.2",
|
|
65
|
+
"@launchql/env": "^2.4.5",
|
|
66
66
|
"@launchql/logger": "^1.1.2",
|
|
67
|
-
"@launchql/server-utils": "^2.4.
|
|
68
|
-
"@launchql/types": "^2.
|
|
67
|
+
"@launchql/server-utils": "^2.4.5",
|
|
68
|
+
"@launchql/types": "^2.7.0",
|
|
69
69
|
"csv-parse": "^5.5.5",
|
|
70
70
|
"pg": "^8.16.0",
|
|
71
|
-
"pg-cache": "^1.3.
|
|
71
|
+
"pg-cache": "^1.3.6",
|
|
72
72
|
"pg-copy-streams": "^6.0.6",
|
|
73
73
|
"pg-env": "^1.1.1"
|
|
74
74
|
},
|
|
75
|
-
"gitHead": "
|
|
75
|
+
"gitHead": "7a71d63b0ac5b0ea13f2c55cbb09d5fc0957a4f5"
|
|
76
76
|
}
|
package/seed/csv.d.ts
CHANGED
|
@@ -1,9 +1,15 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { Client } from 'pg';
|
|
2
|
+
import type { PgTestClient } from '../test-client';
|
|
2
3
|
import { SeedAdapter } from './types';
|
|
3
|
-
interface CsvSeedMap {
|
|
4
|
+
export interface CsvSeedMap {
|
|
4
5
|
[tableName: string]: string;
|
|
5
6
|
}
|
|
7
|
+
/**
|
|
8
|
+
* Standalone helper function to load CSV files into PostgreSQL tables
|
|
9
|
+
* @param client - PostgreSQL client instance
|
|
10
|
+
* @param tables - Map of table names to CSV file paths
|
|
11
|
+
*/
|
|
12
|
+
export declare function loadCsvMap(client: Client, tables: CsvSeedMap): Promise<void>;
|
|
6
13
|
export declare function csv(tables: CsvSeedMap): SeedAdapter;
|
|
7
14
|
export declare function copyCsvIntoTable(pg: PgTestClient, table: string, filePath: string): Promise<void>;
|
|
8
15
|
export declare function exportTableToCsv(pg: PgTestClient, table: string, filePath: string): Promise<void>;
|
|
9
|
-
export {};
|
package/seed/csv.js
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.loadCsvMap = loadCsvMap;
|
|
3
4
|
exports.csv = csv;
|
|
4
5
|
exports.copyCsvIntoTable = copyCsvIntoTable;
|
|
5
6
|
exports.exportTableToCsv = exportTableToCsv;
|
|
@@ -9,6 +10,34 @@ const csv_parse_1 = require("csv-parse");
|
|
|
9
10
|
const fs_1 = require("fs");
|
|
10
11
|
const pg_copy_streams_1 = require("pg-copy-streams");
|
|
11
12
|
const log = new logger_1.Logger('csv');
|
|
13
|
+
/**
|
|
14
|
+
* Standalone helper function to load CSV files into PostgreSQL tables
|
|
15
|
+
* @param client - PostgreSQL client instance
|
|
16
|
+
* @param tables - Map of table names to CSV file paths
|
|
17
|
+
*/
|
|
18
|
+
async function loadCsvMap(client, tables) {
|
|
19
|
+
for (const [table, filePath] of Object.entries(tables)) {
|
|
20
|
+
if (!(0, fs_1.existsSync)(filePath)) {
|
|
21
|
+
throw new Error(`CSV file not found: ${filePath}`);
|
|
22
|
+
}
|
|
23
|
+
log.info(`📥 Seeding "${table}" from ${filePath}`);
|
|
24
|
+
const columns = await parseCsvHeader(filePath);
|
|
25
|
+
const quotedColumns = columns.map(col => `"${col.replace(/"/g, '""')}"`);
|
|
26
|
+
const columnList = quotedColumns.join(', ');
|
|
27
|
+
const copyCommand = `COPY ${table} (${columnList}) FROM STDIN WITH CSV HEADER`;
|
|
28
|
+
log.info(`Using columns: ${columnList}`);
|
|
29
|
+
const stream = client.query((0, pg_copy_streams_1.from)(copyCommand));
|
|
30
|
+
const source = (0, fs_1.createReadStream)(filePath);
|
|
31
|
+
try {
|
|
32
|
+
await (0, promises_1.pipeline)(source, stream);
|
|
33
|
+
log.success(`✅ Successfully seeded "${table}"`);
|
|
34
|
+
}
|
|
35
|
+
catch (err) {
|
|
36
|
+
log.error(`❌ COPY failed for "${table}": ${err.message}`);
|
|
37
|
+
throw err;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
}
|
|
12
41
|
function csv(tables) {
|
|
13
42
|
return {
|
|
14
43
|
async seed(ctx) {
|
package/seed/index.d.ts
CHANGED
|
@@ -2,12 +2,10 @@ import { compose, fn, sqlfile } from './adapters';
|
|
|
2
2
|
import { csv } from './csv';
|
|
3
3
|
import { json } from './json';
|
|
4
4
|
import { launchql } from './launchql';
|
|
5
|
-
import { sqitch } from './sqitch';
|
|
6
5
|
export * from './csv';
|
|
7
6
|
export * from './types';
|
|
8
7
|
export declare const seed: {
|
|
9
8
|
launchql: typeof launchql;
|
|
10
|
-
sqitch: typeof sqitch;
|
|
11
9
|
json: typeof json;
|
|
12
10
|
csv: typeof csv;
|
|
13
11
|
compose: typeof compose;
|
package/seed/index.js
CHANGED
|
@@ -19,12 +19,10 @@ const adapters_1 = require("./adapters");
|
|
|
19
19
|
const csv_1 = require("./csv");
|
|
20
20
|
const json_1 = require("./json");
|
|
21
21
|
const launchql_1 = require("./launchql");
|
|
22
|
-
const sqitch_1 = require("./sqitch");
|
|
23
22
|
__exportStar(require("./csv"), exports);
|
|
24
23
|
__exportStar(require("./types"), exports);
|
|
25
24
|
exports.seed = {
|
|
26
25
|
launchql: launchql_1.launchql,
|
|
27
|
-
sqitch: sqitch_1.sqitch,
|
|
28
26
|
json: json_1.json,
|
|
29
27
|
csv: csv_1.csv,
|
|
30
28
|
compose: adapters_1.compose,
|
package/seed/json.d.ts
CHANGED
|
@@ -1,6 +1,12 @@
|
|
|
1
|
+
import type { Client } from 'pg';
|
|
1
2
|
import { SeedAdapter } from './types';
|
|
2
|
-
interface JsonSeedMap {
|
|
3
|
+
export interface JsonSeedMap {
|
|
3
4
|
[table: string]: Record<string, any>[];
|
|
4
5
|
}
|
|
6
|
+
/**
|
|
7
|
+
* Standalone helper function to insert JSON data into PostgreSQL tables
|
|
8
|
+
* @param client - PostgreSQL client instance
|
|
9
|
+
* @param data - Map of table names to arrays of row objects
|
|
10
|
+
*/
|
|
11
|
+
export declare function insertJson(client: Client, data: JsonSeedMap): Promise<void>;
|
|
5
12
|
export declare function json(data: JsonSeedMap): SeedAdapter;
|
|
6
|
-
export {};
|
package/seed/json.js
CHANGED
|
@@ -1,6 +1,25 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.insertJson = insertJson;
|
|
3
4
|
exports.json = json;
|
|
5
|
+
/**
|
|
6
|
+
* Standalone helper function to insert JSON data into PostgreSQL tables
|
|
7
|
+
* @param client - PostgreSQL client instance
|
|
8
|
+
* @param data - Map of table names to arrays of row objects
|
|
9
|
+
*/
|
|
10
|
+
async function insertJson(client, data) {
|
|
11
|
+
for (const [table, rows] of Object.entries(data)) {
|
|
12
|
+
if (!Array.isArray(rows) || rows.length === 0)
|
|
13
|
+
continue;
|
|
14
|
+
const columns = Object.keys(rows[0]);
|
|
15
|
+
const placeholders = columns.map((_, i) => `$${i + 1}`).join(', ');
|
|
16
|
+
const sql = `INSERT INTO ${table} (${columns.join(', ')}) VALUES (${placeholders})`;
|
|
17
|
+
for (const row of rows) {
|
|
18
|
+
const values = columns.map((c) => row[c]);
|
|
19
|
+
await client.query(sql, values);
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
}
|
|
4
23
|
function json(data) {
|
|
5
24
|
return {
|
|
6
25
|
async seed(ctx) {
|
package/seed/launchql.d.ts
CHANGED
|
@@ -1,2 +1,10 @@
|
|
|
1
|
+
import type { PgConfig } from 'pg-env';
|
|
1
2
|
import { SeedAdapter } from './types';
|
|
3
|
+
/**
|
|
4
|
+
* Standalone helper function to deploy LaunchQL package
|
|
5
|
+
* @param config - PostgreSQL configuration
|
|
6
|
+
* @param cwd - Current working directory (defaults to process.cwd())
|
|
7
|
+
* @param cache - Whether to enable caching (defaults to false)
|
|
8
|
+
*/
|
|
9
|
+
export declare function deployLaunchql(config: PgConfig, cwd?: string, cache?: boolean): Promise<void>;
|
|
2
10
|
export declare function launchql(cwd?: string, cache?: boolean): SeedAdapter;
|
package/seed/launchql.js
CHANGED
|
@@ -1,22 +1,32 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.deployLaunchql = deployLaunchql;
|
|
3
4
|
exports.launchql = launchql;
|
|
4
5
|
const core_1 = require("@launchql/core");
|
|
5
6
|
const env_1 = require("@launchql/env");
|
|
7
|
+
/**
|
|
8
|
+
* Standalone helper function to deploy LaunchQL package
|
|
9
|
+
* @param config - PostgreSQL configuration
|
|
10
|
+
* @param cwd - Current working directory (defaults to process.cwd())
|
|
11
|
+
* @param cache - Whether to enable caching (defaults to false)
|
|
12
|
+
*/
|
|
13
|
+
async function deployLaunchql(config, cwd, cache = false) {
|
|
14
|
+
const proj = new core_1.LaunchQLPackage(cwd ?? process.cwd());
|
|
15
|
+
if (!proj.isInModule())
|
|
16
|
+
return;
|
|
17
|
+
await proj.deploy((0, env_1.getEnvOptions)({
|
|
18
|
+
pg: config,
|
|
19
|
+
deployment: {
|
|
20
|
+
fast: true,
|
|
21
|
+
usePlan: true,
|
|
22
|
+
cache
|
|
23
|
+
}
|
|
24
|
+
}), proj.getModuleName());
|
|
25
|
+
}
|
|
6
26
|
function launchql(cwd, cache = false) {
|
|
7
27
|
return {
|
|
8
28
|
async seed(ctx) {
|
|
9
|
-
|
|
10
|
-
if (!proj.isInModule())
|
|
11
|
-
return;
|
|
12
|
-
await proj.deploy((0, env_1.getEnvOptions)({
|
|
13
|
-
pg: ctx.config,
|
|
14
|
-
deployment: {
|
|
15
|
-
fast: true,
|
|
16
|
-
usePlan: true,
|
|
17
|
-
cache
|
|
18
|
-
}
|
|
19
|
-
}), proj.getModuleName());
|
|
29
|
+
await deployLaunchql(ctx.config, cwd ?? ctx.connect.cwd, cache);
|
|
20
30
|
}
|
|
21
31
|
};
|
|
22
32
|
}
|
package/seed/sql.d.ts
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import type { Client } from 'pg';
|
|
2
|
+
/**
|
|
3
|
+
* Standalone helper function to load SQL files into PostgreSQL
|
|
4
|
+
* @param client - PostgreSQL client instance
|
|
5
|
+
* @param files - Array of SQL file paths to execute
|
|
6
|
+
*/
|
|
7
|
+
export declare function loadSqlFiles(client: Client, files: string[]): Promise<void>;
|
package/seed/sql.js
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.loadSqlFiles = loadSqlFiles;
|
|
4
|
+
const fs_1 = require("fs");
|
|
5
|
+
/**
|
|
6
|
+
* Standalone helper function to load SQL files into PostgreSQL
|
|
7
|
+
* @param client - PostgreSQL client instance
|
|
8
|
+
* @param files - Array of SQL file paths to execute
|
|
9
|
+
*/
|
|
10
|
+
async function loadSqlFiles(client, files) {
|
|
11
|
+
for (const file of files) {
|
|
12
|
+
if (!(0, fs_1.existsSync)(file)) {
|
|
13
|
+
throw new Error(`SQL file not found: ${file}`);
|
|
14
|
+
}
|
|
15
|
+
const sql = (0, fs_1.readFileSync)(file, 'utf-8');
|
|
16
|
+
await client.query(sql);
|
|
17
|
+
}
|
|
18
|
+
}
|
package/test-client.d.ts
CHANGED
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
import { Client, QueryResult } from 'pg';
|
|
2
2
|
import { PgConfig } from 'pg-env';
|
|
3
3
|
import { AuthOptions, PgTestConnectionOptions } from '@launchql/types';
|
|
4
|
+
import { type JsonSeedMap } from './seed/json';
|
|
5
|
+
import { type CsvSeedMap } from './seed/csv';
|
|
4
6
|
export type PgTestClientOpts = {
|
|
5
7
|
deferConnect?: boolean;
|
|
6
8
|
trackConnect?: (p: Promise<any>) => void;
|
|
@@ -44,6 +46,10 @@ export declare class PgTestClient {
|
|
|
44
46
|
manyOrNone<T = any>(query: string, values?: any[]): Promise<T[]>;
|
|
45
47
|
none(query: string, values?: any[]): Promise<void>;
|
|
46
48
|
result(query: string, values?: any[]): Promise<import('pg').QueryResult>;
|
|
47
|
-
query<T = any>(query: string, values?: any[]): Promise<QueryResult<T>>;
|
|
48
49
|
ctxQuery(): Promise<void>;
|
|
50
|
+
query<T = any>(query: string, values?: any[]): Promise<QueryResult<T>>;
|
|
51
|
+
loadJson(data: JsonSeedMap): Promise<void>;
|
|
52
|
+
loadSql(files: string[]): Promise<void>;
|
|
53
|
+
loadCsv(tables: CsvSeedMap): Promise<void>;
|
|
54
|
+
loadLaunchql(cwd?: string, cache?: boolean): Promise<void>;
|
|
49
55
|
}
|
package/test-client.js
CHANGED
|
@@ -3,6 +3,11 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.PgTestClient = void 0;
|
|
4
4
|
const pg_1 = require("pg");
|
|
5
5
|
const roles_1 = require("./roles");
|
|
6
|
+
const context_utils_1 = require("./context-utils");
|
|
7
|
+
const json_1 = require("./seed/json");
|
|
8
|
+
const csv_1 = require("./seed/csv");
|
|
9
|
+
const sql_1 = require("./seed/sql");
|
|
10
|
+
const launchql_1 = require("./seed/launchql");
|
|
6
11
|
class PgTestClient {
|
|
7
12
|
config;
|
|
8
13
|
client;
|
|
@@ -64,11 +69,7 @@ class PgTestClient {
|
|
|
64
69
|
}
|
|
65
70
|
setContext(ctx) {
|
|
66
71
|
Object.assign(this.contextSettings, ctx);
|
|
67
|
-
this.ctxStmts =
|
|
68
|
-
.map(([key, val]) => val === null
|
|
69
|
-
? `SELECT set_config('${key}', NULL, true);`
|
|
70
|
-
: `SELECT set_config('${key}', '${val}', true);`)
|
|
71
|
-
.join('\n');
|
|
72
|
+
this.ctxStmts = (0, context_utils_1.generateContextStatements)(this.contextSettings);
|
|
72
73
|
}
|
|
73
74
|
/**
|
|
74
75
|
* Set authentication context for the current session.
|
|
@@ -103,11 +104,7 @@ class PgTestClient {
|
|
|
103
104
|
nulledSettings[key] = null;
|
|
104
105
|
});
|
|
105
106
|
nulledSettings.role = defaultRole;
|
|
106
|
-
this.ctxStmts =
|
|
107
|
-
.map(([key, val]) => val === null
|
|
108
|
-
? `SELECT set_config('${key}', NULL, true);`
|
|
109
|
-
: `SELECT set_config('${key}', '${val}', true);`)
|
|
110
|
-
.join('\n');
|
|
107
|
+
this.ctxStmts = (0, context_utils_1.generateContextStatements)(nulledSettings);
|
|
111
108
|
this.contextSettings = { role: defaultRole };
|
|
112
109
|
}
|
|
113
110
|
async any(query, values) {
|
|
@@ -140,15 +137,36 @@ class PgTestClient {
|
|
|
140
137
|
async result(query, values) {
|
|
141
138
|
return this.query(query, values);
|
|
142
139
|
}
|
|
140
|
+
async ctxQuery() {
|
|
141
|
+
if (this.ctxStmts) {
|
|
142
|
+
await this.client.query(this.ctxStmts);
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
// NOTE: all queries should call ctxQuery() before executing the query
|
|
143
146
|
async query(query, values) {
|
|
144
147
|
await this.ctxQuery();
|
|
145
148
|
const result = await this.client.query(query, values);
|
|
146
149
|
return result;
|
|
147
150
|
}
|
|
148
|
-
async
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
151
|
+
async loadJson(data) {
|
|
152
|
+
await this.ctxQuery();
|
|
153
|
+
await (0, json_1.insertJson)(this.client, data);
|
|
154
|
+
}
|
|
155
|
+
async loadSql(files) {
|
|
156
|
+
await this.ctxQuery();
|
|
157
|
+
await (0, sql_1.loadSqlFiles)(this.client, files);
|
|
158
|
+
}
|
|
159
|
+
// NON-RLS load/seed methods:
|
|
160
|
+
async loadCsv(tables) {
|
|
161
|
+
// await this.ctxQuery(); // no point to call ctxQuery() here
|
|
162
|
+
// because POSTGRES doesn't support row-level security on COPY FROM...
|
|
163
|
+
await (0, csv_1.loadCsvMap)(this.client, tables);
|
|
164
|
+
}
|
|
165
|
+
async loadLaunchql(cwd, cache = false) {
|
|
166
|
+
// await this.ctxQuery(); // no point to call ctxQuery() here
|
|
167
|
+
// because deployLaunchql() has it's own way of getting the client...
|
|
168
|
+
// so for now, we'll expose this but it's limited
|
|
169
|
+
await (0, launchql_1.deployLaunchql)(this.config, cwd, cache);
|
|
152
170
|
}
|
|
153
171
|
}
|
|
154
172
|
exports.PgTestClient = PgTestClient;
|
package/esm/seed/sqitch.js
DELETED
|
@@ -1,17 +0,0 @@
|
|
|
1
|
-
import { LaunchQLPackage } from '@launchql/core';
|
|
2
|
-
import { getEnvOptions } from '@launchql/env';
|
|
3
|
-
export function sqitch(cwd) {
|
|
4
|
-
return {
|
|
5
|
-
async seed(ctx) {
|
|
6
|
-
const proj = new LaunchQLPackage(cwd ?? ctx.connect.cwd);
|
|
7
|
-
if (!proj.isInModule())
|
|
8
|
-
return;
|
|
9
|
-
await proj.deploy(getEnvOptions({
|
|
10
|
-
pg: ctx.config,
|
|
11
|
-
deployment: {
|
|
12
|
-
fast: false
|
|
13
|
-
}
|
|
14
|
-
}), proj.getModuleName(), true);
|
|
15
|
-
}
|
|
16
|
-
};
|
|
17
|
-
}
|
package/seed/sqitch.d.ts
DELETED
package/seed/sqitch.js
DELETED
|
@@ -1,20 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.sqitch = sqitch;
|
|
4
|
-
const core_1 = require("@launchql/core");
|
|
5
|
-
const env_1 = require("@launchql/env");
|
|
6
|
-
function sqitch(cwd) {
|
|
7
|
-
return {
|
|
8
|
-
async seed(ctx) {
|
|
9
|
-
const proj = new core_1.LaunchQLPackage(cwd ?? ctx.connect.cwd);
|
|
10
|
-
if (!proj.isInModule())
|
|
11
|
-
return;
|
|
12
|
-
await proj.deploy((0, env_1.getEnvOptions)({
|
|
13
|
-
pg: ctx.config,
|
|
14
|
-
deployment: {
|
|
15
|
-
fast: false
|
|
16
|
-
}
|
|
17
|
-
}), proj.getModuleName(), true);
|
|
18
|
-
}
|
|
19
|
-
};
|
|
20
|
-
}
|