pgsql-test 2.18.18 → 2.19.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/esm/seed/csv.js +2 -100
- package/esm/seed/index.js +0 -1
- package/esm/seed/json.js +2 -30
- package/esm/seed/pgpm.js +1 -21
- package/esm/test-client.js +5 -5
- package/package.json +5 -6
- package/seed/csv.d.ts +1 -13
- package/seed/csv.js +2 -103
- package/seed/index.d.ts +0 -1
- package/seed/index.js +0 -1
- package/seed/json.d.ts +1 -10
- package/seed/json.js +2 -31
- package/seed/pgpm.d.ts +0 -8
- package/seed/pgpm.js +2 -23
- package/test-client.d.ts +2 -2
- package/test-client.js +8 -8
- package/esm/seed/sql.js +0 -15
- package/seed/sql.d.ts +0 -7
- package/seed/sql.js +0 -18
package/esm/seed/csv.js
CHANGED
|
@@ -1,108 +1,10 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { Logger } from '@pgpmjs/logger';
|
|
3
|
-
import { parse } from 'csv-parse';
|
|
4
|
-
import { createReadStream, createWriteStream, existsSync } from 'fs';
|
|
5
|
-
import { from as copyFrom, to as copyTo } from 'pg-copy-streams';
|
|
6
|
-
const log = new Logger('csv');
|
|
7
|
-
/**
|
|
8
|
-
* Standalone helper function to load CSV files into PostgreSQL tables
|
|
9
|
-
* @param client - PostgreSQL client instance
|
|
10
|
-
* @param tables - Map of table names to CSV file paths
|
|
11
|
-
*/
|
|
12
|
-
export async function loadCsvMap(client, tables) {
|
|
13
|
-
for (const [table, filePath] of Object.entries(tables)) {
|
|
14
|
-
if (!existsSync(filePath)) {
|
|
15
|
-
throw new Error(`CSV file not found: ${filePath}`);
|
|
16
|
-
}
|
|
17
|
-
log.info(`📥 Seeding "${table}" from ${filePath}`);
|
|
18
|
-
const columns = await parseCsvHeader(filePath);
|
|
19
|
-
const quotedColumns = columns.map(col => `"${col.replace(/"/g, '""')}"`);
|
|
20
|
-
const columnList = quotedColumns.join(', ');
|
|
21
|
-
const copyCommand = `COPY ${table} (${columnList}) FROM STDIN WITH CSV HEADER`;
|
|
22
|
-
log.info(`Using columns: ${columnList}`);
|
|
23
|
-
const stream = client.query(copyFrom(copyCommand));
|
|
24
|
-
const source = createReadStream(filePath);
|
|
25
|
-
try {
|
|
26
|
-
await pipeline(source, stream);
|
|
27
|
-
log.success(`✅ Successfully seeded "${table}"`);
|
|
28
|
-
}
|
|
29
|
-
catch (err) {
|
|
30
|
-
log.error(`❌ COPY failed for "${table}": ${err.message}`);
|
|
31
|
-
throw err;
|
|
32
|
-
}
|
|
33
|
-
}
|
|
34
|
-
}
|
|
1
|
+
import { loadCsv } from 'pgsql-seed';
|
|
35
2
|
export function csv(tables) {
|
|
36
3
|
return {
|
|
37
4
|
async seed(ctx) {
|
|
38
5
|
for (const [table, filePath] of Object.entries(tables)) {
|
|
39
|
-
|
|
40
|
-
throw new Error(`CSV file not found: ${filePath}`);
|
|
41
|
-
}
|
|
42
|
-
log.info(`📥 Seeding "${table}" from ${filePath}`);
|
|
43
|
-
await copyCsvIntoTable(ctx.pg, table, filePath);
|
|
6
|
+
await loadCsv(ctx.pg, table, filePath);
|
|
44
7
|
}
|
|
45
8
|
}
|
|
46
9
|
};
|
|
47
10
|
}
|
|
48
|
-
async function parseCsvHeader(filePath) {
|
|
49
|
-
const file = createReadStream(filePath);
|
|
50
|
-
const parser = parse({
|
|
51
|
-
bom: true,
|
|
52
|
-
to_line: 1,
|
|
53
|
-
skip_empty_lines: true,
|
|
54
|
-
});
|
|
55
|
-
return new Promise((resolve, reject) => {
|
|
56
|
-
const cleanup = (err) => {
|
|
57
|
-
parser.destroy();
|
|
58
|
-
file.destroy();
|
|
59
|
-
if (err)
|
|
60
|
-
reject(err);
|
|
61
|
-
};
|
|
62
|
-
parser.on('readable', () => {
|
|
63
|
-
const row = parser.read();
|
|
64
|
-
if (!row)
|
|
65
|
-
return;
|
|
66
|
-
if (row.length === 0) {
|
|
67
|
-
cleanup(new Error('CSV header has no columns'));
|
|
68
|
-
return;
|
|
69
|
-
}
|
|
70
|
-
cleanup();
|
|
71
|
-
resolve(row);
|
|
72
|
-
});
|
|
73
|
-
parser.on('error', cleanup);
|
|
74
|
-
file.on('error', cleanup);
|
|
75
|
-
file.pipe(parser);
|
|
76
|
-
});
|
|
77
|
-
}
|
|
78
|
-
export async function copyCsvIntoTable(pg, table, filePath) {
|
|
79
|
-
const client = pg.client;
|
|
80
|
-
const columns = await parseCsvHeader(filePath);
|
|
81
|
-
const quotedColumns = columns.map(col => `"${col.replace(/"/g, '""')}"`);
|
|
82
|
-
const columnList = quotedColumns.join(', ');
|
|
83
|
-
const copyCommand = `COPY ${table} (${columnList}) FROM STDIN WITH CSV HEADER`;
|
|
84
|
-
log.info(`Using columns: ${columnList}`);
|
|
85
|
-
const stream = client.query(copyFrom(copyCommand));
|
|
86
|
-
const source = createReadStream(filePath);
|
|
87
|
-
try {
|
|
88
|
-
await pipeline(source, stream);
|
|
89
|
-
log.success(`✅ Successfully seeded "${table}"`);
|
|
90
|
-
}
|
|
91
|
-
catch (err) {
|
|
92
|
-
log.error(`❌ COPY failed for "${table}": ${err.message}`);
|
|
93
|
-
throw err;
|
|
94
|
-
}
|
|
95
|
-
}
|
|
96
|
-
export async function exportTableToCsv(pg, table, filePath) {
|
|
97
|
-
const client = pg.client;
|
|
98
|
-
const stream = client.query(copyTo(`COPY ${table} TO STDOUT WITH CSV HEADER`));
|
|
99
|
-
const target = createWriteStream(filePath);
|
|
100
|
-
try {
|
|
101
|
-
await pipeline(stream, target);
|
|
102
|
-
log.success(`✅ Exported "${table}" to ${filePath}`);
|
|
103
|
-
}
|
|
104
|
-
catch (err) {
|
|
105
|
-
log.error(`❌ Failed to export "${table}": ${err.message}`);
|
|
106
|
-
throw err;
|
|
107
|
-
}
|
|
108
|
-
}
|
package/esm/seed/index.js
CHANGED
package/esm/seed/json.js
CHANGED
|
@@ -1,36 +1,8 @@
|
|
|
1
|
-
|
|
2
|
-
* Standalone helper function to insert JSON data into PostgreSQL tables
|
|
3
|
-
* @param client - PostgreSQL client instance
|
|
4
|
-
* @param data - Map of table names to arrays of row objects
|
|
5
|
-
*/
|
|
6
|
-
export async function insertJson(client, data) {
|
|
7
|
-
for (const [table, rows] of Object.entries(data)) {
|
|
8
|
-
if (!Array.isArray(rows) || rows.length === 0)
|
|
9
|
-
continue;
|
|
10
|
-
const columns = Object.keys(rows[0]);
|
|
11
|
-
const placeholders = columns.map((_, i) => `$${i + 1}`).join(', ');
|
|
12
|
-
const sql = `INSERT INTO ${table} (${columns.join(', ')}) VALUES (${placeholders})`;
|
|
13
|
-
for (const row of rows) {
|
|
14
|
-
const values = columns.map((c) => row[c]);
|
|
15
|
-
await client.query(sql, values);
|
|
16
|
-
}
|
|
17
|
-
}
|
|
18
|
-
}
|
|
1
|
+
import { insertJsonMap } from 'pgsql-seed';
|
|
19
2
|
export function json(data) {
|
|
20
3
|
return {
|
|
21
4
|
async seed(ctx) {
|
|
22
|
-
|
|
23
|
-
for (const [table, rows] of Object.entries(data)) {
|
|
24
|
-
if (!Array.isArray(rows) || rows.length === 0)
|
|
25
|
-
continue;
|
|
26
|
-
const columns = Object.keys(rows[0]);
|
|
27
|
-
const placeholders = columns.map((_, i) => `$${i + 1}`).join(', ');
|
|
28
|
-
const sql = `INSERT INTO ${table} (${columns.join(', ')}) VALUES (${placeholders})`;
|
|
29
|
-
for (const row of rows) {
|
|
30
|
-
const values = columns.map((c) => row[c]);
|
|
31
|
-
await pg.query(sql, values);
|
|
32
|
-
}
|
|
33
|
-
}
|
|
5
|
+
await insertJsonMap(ctx.pg, data);
|
|
34
6
|
}
|
|
35
7
|
};
|
|
36
8
|
}
|
package/esm/seed/pgpm.js
CHANGED
|
@@ -1,24 +1,4 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { getEnvOptions } from '@pgpmjs/env';
|
|
3
|
-
/**
|
|
4
|
-
* Standalone helper function to deploy pgpm package
|
|
5
|
-
* @param config - PostgreSQL configuration
|
|
6
|
-
* @param cwd - Current working directory (defaults to process.cwd())
|
|
7
|
-
* @param cache - Whether to enable caching (defaults to false)
|
|
8
|
-
*/
|
|
9
|
-
export async function deployPgpm(config, cwd, cache = false) {
|
|
10
|
-
const proj = new PgpmPackage(cwd ?? process.cwd());
|
|
11
|
-
if (!proj.isInModule())
|
|
12
|
-
return;
|
|
13
|
-
await proj.deploy(getEnvOptions({
|
|
14
|
-
pg: config,
|
|
15
|
-
deployment: {
|
|
16
|
-
fast: true,
|
|
17
|
-
usePlan: true,
|
|
18
|
-
cache
|
|
19
|
-
}
|
|
20
|
-
}), proj.getModuleName());
|
|
21
|
-
}
|
|
1
|
+
import { deployPgpm } from 'pgsql-seed';
|
|
22
2
|
export function pgpm(cwd, cache = false) {
|
|
23
3
|
return {
|
|
24
4
|
async seed(ctx) {
|
package/esm/test-client.js
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
import { Client } from 'pg';
|
|
2
2
|
import { getRoleName } from './roles';
|
|
3
3
|
import { generateContextStatements } from './context-utils';
|
|
4
|
-
import {
|
|
5
|
-
import { loadCsvMap } from '
|
|
6
|
-
import { loadSqlFiles } from '
|
|
7
|
-
import { deployPgpm } from '
|
|
4
|
+
import { insertJsonMap } from 'pgsql-seed';
|
|
5
|
+
import { loadCsvMap } from 'pgsql-seed';
|
|
6
|
+
import { loadSqlFiles } from 'pgsql-seed';
|
|
7
|
+
import { deployPgpm } from 'pgsql-seed';
|
|
8
8
|
export class PgTestClient {
|
|
9
9
|
config;
|
|
10
10
|
client;
|
|
@@ -147,7 +147,7 @@ export class PgTestClient {
|
|
|
147
147
|
}
|
|
148
148
|
async loadJson(data) {
|
|
149
149
|
await this.ctxQuery();
|
|
150
|
-
await
|
|
150
|
+
await insertJsonMap(this.client, data);
|
|
151
151
|
}
|
|
152
152
|
async loadSql(files) {
|
|
153
153
|
await this.ctxQuery();
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "pgsql-test",
|
|
3
|
-
"version": "2.
|
|
3
|
+
"version": "2.19.1",
|
|
4
4
|
"author": "Constructive <developers@constructive.io>",
|
|
5
5
|
"description": "pgsql-test offers isolated, role-aware, and rollback-friendly PostgreSQL environments for integration tests — giving developers realistic test coverage without external state pollution",
|
|
6
6
|
"main": "index.js",
|
|
@@ -60,16 +60,15 @@
|
|
|
60
60
|
"makage": "^0.1.9"
|
|
61
61
|
},
|
|
62
62
|
"dependencies": {
|
|
63
|
-
"@pgpmjs/core": "^4.
|
|
63
|
+
"@pgpmjs/core": "^4.4.0",
|
|
64
64
|
"@pgpmjs/env": "^2.8.11",
|
|
65
65
|
"@pgpmjs/logger": "^1.3.5",
|
|
66
66
|
"@pgpmjs/server-utils": "^2.8.11",
|
|
67
67
|
"@pgpmjs/types": "^2.12.8",
|
|
68
|
-
"csv-parse": "^6.1.0",
|
|
69
68
|
"pg": "^8.16.3",
|
|
70
69
|
"pg-cache": "^1.6.11",
|
|
71
|
-
"pg-
|
|
72
|
-
"
|
|
70
|
+
"pg-env": "^1.2.4",
|
|
71
|
+
"pgsql-seed": "^0.2.1"
|
|
73
72
|
},
|
|
74
|
-
"gitHead": "
|
|
73
|
+
"gitHead": "bda56442f70c77c98276bc7bab0450308c975df8"
|
|
75
74
|
}
|
package/seed/csv.d.ts
CHANGED
|
@@ -1,15 +1,3 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import type { PgTestClient } from '../test-client';
|
|
1
|
+
import { type CsvSeedMap } from 'pgsql-seed';
|
|
3
2
|
import { SeedAdapter } from './types';
|
|
4
|
-
export interface CsvSeedMap {
|
|
5
|
-
[tableName: string]: string;
|
|
6
|
-
}
|
|
7
|
-
/**
|
|
8
|
-
* Standalone helper function to load CSV files into PostgreSQL tables
|
|
9
|
-
* @param client - PostgreSQL client instance
|
|
10
|
-
* @param tables - Map of table names to CSV file paths
|
|
11
|
-
*/
|
|
12
|
-
export declare function loadCsvMap(client: Client, tables: CsvSeedMap): Promise<void>;
|
|
13
3
|
export declare function csv(tables: CsvSeedMap): SeedAdapter;
|
|
14
|
-
export declare function copyCsvIntoTable(pg: PgTestClient, table: string, filePath: string): Promise<void>;
|
|
15
|
-
export declare function exportTableToCsv(pg: PgTestClient, table: string, filePath: string): Promise<void>;
|
package/seed/csv.js
CHANGED
|
@@ -1,114 +1,13 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.loadCsvMap = loadCsvMap;
|
|
4
3
|
exports.csv = csv;
|
|
5
|
-
|
|
6
|
-
exports.exportTableToCsv = exportTableToCsv;
|
|
7
|
-
const promises_1 = require("node:stream/promises");
|
|
8
|
-
const logger_1 = require("@pgpmjs/logger");
|
|
9
|
-
const csv_parse_1 = require("csv-parse");
|
|
10
|
-
const fs_1 = require("fs");
|
|
11
|
-
const pg_copy_streams_1 = require("pg-copy-streams");
|
|
12
|
-
const log = new logger_1.Logger('csv');
|
|
13
|
-
/**
|
|
14
|
-
* Standalone helper function to load CSV files into PostgreSQL tables
|
|
15
|
-
* @param client - PostgreSQL client instance
|
|
16
|
-
* @param tables - Map of table names to CSV file paths
|
|
17
|
-
*/
|
|
18
|
-
async function loadCsvMap(client, tables) {
|
|
19
|
-
for (const [table, filePath] of Object.entries(tables)) {
|
|
20
|
-
if (!(0, fs_1.existsSync)(filePath)) {
|
|
21
|
-
throw new Error(`CSV file not found: ${filePath}`);
|
|
22
|
-
}
|
|
23
|
-
log.info(`📥 Seeding "${table}" from ${filePath}`);
|
|
24
|
-
const columns = await parseCsvHeader(filePath);
|
|
25
|
-
const quotedColumns = columns.map(col => `"${col.replace(/"/g, '""')}"`);
|
|
26
|
-
const columnList = quotedColumns.join(', ');
|
|
27
|
-
const copyCommand = `COPY ${table} (${columnList}) FROM STDIN WITH CSV HEADER`;
|
|
28
|
-
log.info(`Using columns: ${columnList}`);
|
|
29
|
-
const stream = client.query((0, pg_copy_streams_1.from)(copyCommand));
|
|
30
|
-
const source = (0, fs_1.createReadStream)(filePath);
|
|
31
|
-
try {
|
|
32
|
-
await (0, promises_1.pipeline)(source, stream);
|
|
33
|
-
log.success(`✅ Successfully seeded "${table}"`);
|
|
34
|
-
}
|
|
35
|
-
catch (err) {
|
|
36
|
-
log.error(`❌ COPY failed for "${table}": ${err.message}`);
|
|
37
|
-
throw err;
|
|
38
|
-
}
|
|
39
|
-
}
|
|
40
|
-
}
|
|
4
|
+
const pgsql_seed_1 = require("pgsql-seed");
|
|
41
5
|
function csv(tables) {
|
|
42
6
|
return {
|
|
43
7
|
async seed(ctx) {
|
|
44
8
|
for (const [table, filePath] of Object.entries(tables)) {
|
|
45
|
-
|
|
46
|
-
throw new Error(`CSV file not found: ${filePath}`);
|
|
47
|
-
}
|
|
48
|
-
log.info(`📥 Seeding "${table}" from ${filePath}`);
|
|
49
|
-
await copyCsvIntoTable(ctx.pg, table, filePath);
|
|
9
|
+
await (0, pgsql_seed_1.loadCsv)(ctx.pg, table, filePath);
|
|
50
10
|
}
|
|
51
11
|
}
|
|
52
12
|
};
|
|
53
13
|
}
|
|
54
|
-
async function parseCsvHeader(filePath) {
|
|
55
|
-
const file = (0, fs_1.createReadStream)(filePath);
|
|
56
|
-
const parser = (0, csv_parse_1.parse)({
|
|
57
|
-
bom: true,
|
|
58
|
-
to_line: 1,
|
|
59
|
-
skip_empty_lines: true,
|
|
60
|
-
});
|
|
61
|
-
return new Promise((resolve, reject) => {
|
|
62
|
-
const cleanup = (err) => {
|
|
63
|
-
parser.destroy();
|
|
64
|
-
file.destroy();
|
|
65
|
-
if (err)
|
|
66
|
-
reject(err);
|
|
67
|
-
};
|
|
68
|
-
parser.on('readable', () => {
|
|
69
|
-
const row = parser.read();
|
|
70
|
-
if (!row)
|
|
71
|
-
return;
|
|
72
|
-
if (row.length === 0) {
|
|
73
|
-
cleanup(new Error('CSV header has no columns'));
|
|
74
|
-
return;
|
|
75
|
-
}
|
|
76
|
-
cleanup();
|
|
77
|
-
resolve(row);
|
|
78
|
-
});
|
|
79
|
-
parser.on('error', cleanup);
|
|
80
|
-
file.on('error', cleanup);
|
|
81
|
-
file.pipe(parser);
|
|
82
|
-
});
|
|
83
|
-
}
|
|
84
|
-
async function copyCsvIntoTable(pg, table, filePath) {
|
|
85
|
-
const client = pg.client;
|
|
86
|
-
const columns = await parseCsvHeader(filePath);
|
|
87
|
-
const quotedColumns = columns.map(col => `"${col.replace(/"/g, '""')}"`);
|
|
88
|
-
const columnList = quotedColumns.join(', ');
|
|
89
|
-
const copyCommand = `COPY ${table} (${columnList}) FROM STDIN WITH CSV HEADER`;
|
|
90
|
-
log.info(`Using columns: ${columnList}`);
|
|
91
|
-
const stream = client.query((0, pg_copy_streams_1.from)(copyCommand));
|
|
92
|
-
const source = (0, fs_1.createReadStream)(filePath);
|
|
93
|
-
try {
|
|
94
|
-
await (0, promises_1.pipeline)(source, stream);
|
|
95
|
-
log.success(`✅ Successfully seeded "${table}"`);
|
|
96
|
-
}
|
|
97
|
-
catch (err) {
|
|
98
|
-
log.error(`❌ COPY failed for "${table}": ${err.message}`);
|
|
99
|
-
throw err;
|
|
100
|
-
}
|
|
101
|
-
}
|
|
102
|
-
async function exportTableToCsv(pg, table, filePath) {
|
|
103
|
-
const client = pg.client;
|
|
104
|
-
const stream = client.query((0, pg_copy_streams_1.to)(`COPY ${table} TO STDOUT WITH CSV HEADER`));
|
|
105
|
-
const target = (0, fs_1.createWriteStream)(filePath);
|
|
106
|
-
try {
|
|
107
|
-
await (0, promises_1.pipeline)(stream, target);
|
|
108
|
-
log.success(`✅ Exported "${table}" to ${filePath}`);
|
|
109
|
-
}
|
|
110
|
-
catch (err) {
|
|
111
|
-
log.error(`❌ Failed to export "${table}": ${err.message}`);
|
|
112
|
-
throw err;
|
|
113
|
-
}
|
|
114
|
-
}
|
package/seed/index.d.ts
CHANGED
package/seed/index.js
CHANGED
|
@@ -19,7 +19,6 @@ const adapters_1 = require("./adapters");
|
|
|
19
19
|
const csv_1 = require("./csv");
|
|
20
20
|
const json_1 = require("./json");
|
|
21
21
|
const pgpm_1 = require("./pgpm");
|
|
22
|
-
__exportStar(require("./csv"), exports);
|
|
23
22
|
__exportStar(require("./types"), exports);
|
|
24
23
|
exports.seed = {
|
|
25
24
|
pgpm: pgpm_1.pgpm,
|
package/seed/json.d.ts
CHANGED
|
@@ -1,12 +1,3 @@
|
|
|
1
|
-
import type
|
|
1
|
+
import { type JsonSeedMap } from 'pgsql-seed';
|
|
2
2
|
import { SeedAdapter } from './types';
|
|
3
|
-
export interface JsonSeedMap {
|
|
4
|
-
[table: string]: Record<string, any>[];
|
|
5
|
-
}
|
|
6
|
-
/**
|
|
7
|
-
* Standalone helper function to insert JSON data into PostgreSQL tables
|
|
8
|
-
* @param client - PostgreSQL client instance
|
|
9
|
-
* @param data - Map of table names to arrays of row objects
|
|
10
|
-
*/
|
|
11
|
-
export declare function insertJson(client: Client, data: JsonSeedMap): Promise<void>;
|
|
12
3
|
export declare function json(data: JsonSeedMap): SeedAdapter;
|
package/seed/json.js
CHANGED
|
@@ -1,40 +1,11 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.insertJson = insertJson;
|
|
4
3
|
exports.json = json;
|
|
5
|
-
|
|
6
|
-
* Standalone helper function to insert JSON data into PostgreSQL tables
|
|
7
|
-
* @param client - PostgreSQL client instance
|
|
8
|
-
* @param data - Map of table names to arrays of row objects
|
|
9
|
-
*/
|
|
10
|
-
async function insertJson(client, data) {
|
|
11
|
-
for (const [table, rows] of Object.entries(data)) {
|
|
12
|
-
if (!Array.isArray(rows) || rows.length === 0)
|
|
13
|
-
continue;
|
|
14
|
-
const columns = Object.keys(rows[0]);
|
|
15
|
-
const placeholders = columns.map((_, i) => `$${i + 1}`).join(', ');
|
|
16
|
-
const sql = `INSERT INTO ${table} (${columns.join(', ')}) VALUES (${placeholders})`;
|
|
17
|
-
for (const row of rows) {
|
|
18
|
-
const values = columns.map((c) => row[c]);
|
|
19
|
-
await client.query(sql, values);
|
|
20
|
-
}
|
|
21
|
-
}
|
|
22
|
-
}
|
|
4
|
+
const pgsql_seed_1 = require("pgsql-seed");
|
|
23
5
|
function json(data) {
|
|
24
6
|
return {
|
|
25
7
|
async seed(ctx) {
|
|
26
|
-
|
|
27
|
-
for (const [table, rows] of Object.entries(data)) {
|
|
28
|
-
if (!Array.isArray(rows) || rows.length === 0)
|
|
29
|
-
continue;
|
|
30
|
-
const columns = Object.keys(rows[0]);
|
|
31
|
-
const placeholders = columns.map((_, i) => `$${i + 1}`).join(', ');
|
|
32
|
-
const sql = `INSERT INTO ${table} (${columns.join(', ')}) VALUES (${placeholders})`;
|
|
33
|
-
for (const row of rows) {
|
|
34
|
-
const values = columns.map((c) => row[c]);
|
|
35
|
-
await pg.query(sql, values);
|
|
36
|
-
}
|
|
37
|
-
}
|
|
8
|
+
await (0, pgsql_seed_1.insertJsonMap)(ctx.pg, data);
|
|
38
9
|
}
|
|
39
10
|
};
|
|
40
11
|
}
|
package/seed/pgpm.d.ts
CHANGED
|
@@ -1,10 +1,2 @@
|
|
|
1
|
-
import type { PgConfig } from 'pg-env';
|
|
2
1
|
import { SeedAdapter } from './types';
|
|
3
|
-
/**
|
|
4
|
-
* Standalone helper function to deploy pgpm package
|
|
5
|
-
* @param config - PostgreSQL configuration
|
|
6
|
-
* @param cwd - Current working directory (defaults to process.cwd())
|
|
7
|
-
* @param cache - Whether to enable caching (defaults to false)
|
|
8
|
-
*/
|
|
9
|
-
export declare function deployPgpm(config: PgConfig, cwd?: string, cache?: boolean): Promise<void>;
|
|
10
2
|
export declare function pgpm(cwd?: string, cache?: boolean): SeedAdapter;
|
package/seed/pgpm.js
CHANGED
|
@@ -1,32 +1,11 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.deployPgpm = deployPgpm;
|
|
4
3
|
exports.pgpm = pgpm;
|
|
5
|
-
const
|
|
6
|
-
const env_1 = require("@pgpmjs/env");
|
|
7
|
-
/**
|
|
8
|
-
* Standalone helper function to deploy pgpm package
|
|
9
|
-
* @param config - PostgreSQL configuration
|
|
10
|
-
* @param cwd - Current working directory (defaults to process.cwd())
|
|
11
|
-
* @param cache - Whether to enable caching (defaults to false)
|
|
12
|
-
*/
|
|
13
|
-
async function deployPgpm(config, cwd, cache = false) {
|
|
14
|
-
const proj = new core_1.PgpmPackage(cwd ?? process.cwd());
|
|
15
|
-
if (!proj.isInModule())
|
|
16
|
-
return;
|
|
17
|
-
await proj.deploy((0, env_1.getEnvOptions)({
|
|
18
|
-
pg: config,
|
|
19
|
-
deployment: {
|
|
20
|
-
fast: true,
|
|
21
|
-
usePlan: true,
|
|
22
|
-
cache
|
|
23
|
-
}
|
|
24
|
-
}), proj.getModuleName());
|
|
25
|
-
}
|
|
4
|
+
const pgsql_seed_1 = require("pgsql-seed");
|
|
26
5
|
function pgpm(cwd, cache = false) {
|
|
27
6
|
return {
|
|
28
7
|
async seed(ctx) {
|
|
29
|
-
await deployPgpm(ctx.config, cwd ?? ctx.connect.cwd, cache);
|
|
8
|
+
await (0, pgsql_seed_1.deployPgpm)(ctx.config, cwd ?? ctx.connect.cwd, cache);
|
|
30
9
|
}
|
|
31
10
|
};
|
|
32
11
|
}
|
package/test-client.d.ts
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import { Client, QueryResult } from 'pg';
|
|
2
2
|
import { PgConfig } from 'pg-env';
|
|
3
3
|
import { AuthOptions, PgTestConnectionOptions } from '@pgpmjs/types';
|
|
4
|
-
import { type JsonSeedMap } from '
|
|
5
|
-
import { type CsvSeedMap } from '
|
|
4
|
+
import { type JsonSeedMap } from 'pgsql-seed';
|
|
5
|
+
import { type CsvSeedMap } from 'pgsql-seed';
|
|
6
6
|
export type PgTestClientOpts = {
|
|
7
7
|
deferConnect?: boolean;
|
|
8
8
|
trackConnect?: (p: Promise<any>) => void;
|
package/test-client.js
CHANGED
|
@@ -4,10 +4,10 @@ exports.PgTestClient = void 0;
|
|
|
4
4
|
const pg_1 = require("pg");
|
|
5
5
|
const roles_1 = require("./roles");
|
|
6
6
|
const context_utils_1 = require("./context-utils");
|
|
7
|
-
const
|
|
8
|
-
const
|
|
9
|
-
const
|
|
10
|
-
const
|
|
7
|
+
const pgsql_seed_1 = require("pgsql-seed");
|
|
8
|
+
const pgsql_seed_2 = require("pgsql-seed");
|
|
9
|
+
const pgsql_seed_3 = require("pgsql-seed");
|
|
10
|
+
const pgsql_seed_4 = require("pgsql-seed");
|
|
11
11
|
class PgTestClient {
|
|
12
12
|
config;
|
|
13
13
|
client;
|
|
@@ -150,23 +150,23 @@ class PgTestClient {
|
|
|
150
150
|
}
|
|
151
151
|
async loadJson(data) {
|
|
152
152
|
await this.ctxQuery();
|
|
153
|
-
await (0,
|
|
153
|
+
await (0, pgsql_seed_1.insertJsonMap)(this.client, data);
|
|
154
154
|
}
|
|
155
155
|
async loadSql(files) {
|
|
156
156
|
await this.ctxQuery();
|
|
157
|
-
await (0,
|
|
157
|
+
await (0, pgsql_seed_3.loadSqlFiles)(this.client, files);
|
|
158
158
|
}
|
|
159
159
|
// NON-RLS load/seed methods:
|
|
160
160
|
async loadCsv(tables) {
|
|
161
161
|
// await this.ctxQuery(); // no point to call ctxQuery() here
|
|
162
162
|
// because POSTGRES doesn't support row-level security on COPY FROM...
|
|
163
|
-
await (0,
|
|
163
|
+
await (0, pgsql_seed_2.loadCsvMap)(this.client, tables);
|
|
164
164
|
}
|
|
165
165
|
async loadPgpm(cwd, cache = false) {
|
|
166
166
|
// await this.ctxQuery(); // no point to call ctxQuery() here
|
|
167
167
|
// because deployPgpm() has it's own way of getting the client...
|
|
168
168
|
// so for now, we'll expose this but it's limited
|
|
169
|
-
await (0,
|
|
169
|
+
await (0, pgsql_seed_4.deployPgpm)(this.config, cwd, cache);
|
|
170
170
|
}
|
|
171
171
|
}
|
|
172
172
|
exports.PgTestClient = PgTestClient;
|
package/esm/seed/sql.js
DELETED
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
import { existsSync, readFileSync } from 'fs';
|
|
2
|
-
/**
|
|
3
|
-
* Standalone helper function to load SQL files into PostgreSQL
|
|
4
|
-
* @param client - PostgreSQL client instance
|
|
5
|
-
* @param files - Array of SQL file paths to execute
|
|
6
|
-
*/
|
|
7
|
-
export async function loadSqlFiles(client, files) {
|
|
8
|
-
for (const file of files) {
|
|
9
|
-
if (!existsSync(file)) {
|
|
10
|
-
throw new Error(`SQL file not found: ${file}`);
|
|
11
|
-
}
|
|
12
|
-
const sql = readFileSync(file, 'utf-8');
|
|
13
|
-
await client.query(sql);
|
|
14
|
-
}
|
|
15
|
-
}
|
package/seed/sql.d.ts
DELETED
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
import type { Client } from 'pg';
|
|
2
|
-
/**
|
|
3
|
-
* Standalone helper function to load SQL files into PostgreSQL
|
|
4
|
-
* @param client - PostgreSQL client instance
|
|
5
|
-
* @param files - Array of SQL file paths to execute
|
|
6
|
-
*/
|
|
7
|
-
export declare function loadSqlFiles(client: Client, files: string[]): Promise<void>;
|
package/seed/sql.js
DELETED
|
@@ -1,18 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.loadSqlFiles = loadSqlFiles;
|
|
4
|
-
const fs_1 = require("fs");
|
|
5
|
-
/**
|
|
6
|
-
* Standalone helper function to load SQL files into PostgreSQL
|
|
7
|
-
* @param client - PostgreSQL client instance
|
|
8
|
-
* @param files - Array of SQL file paths to execute
|
|
9
|
-
*/
|
|
10
|
-
async function loadSqlFiles(client, files) {
|
|
11
|
-
for (const file of files) {
|
|
12
|
-
if (!(0, fs_1.existsSync)(file)) {
|
|
13
|
-
throw new Error(`SQL file not found: ${file}`);
|
|
14
|
-
}
|
|
15
|
-
const sql = (0, fs_1.readFileSync)(file, 'utf-8');
|
|
16
|
-
await client.query(sql);
|
|
17
|
-
}
|
|
18
|
-
}
|