pgsql-test 2.11.10 → 2.11.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/esm/seed/csv.js +37 -1
- package/package.json +3 -2
- package/seed/csv.js +37 -1
package/esm/seed/csv.js
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { pipeline } from 'node:stream/promises';
|
|
2
2
|
import { Logger } from '@launchql/logger';
|
|
3
|
+
import { parse } from 'csv-parse';
|
|
3
4
|
import { createReadStream, createWriteStream, existsSync } from 'fs';
|
|
4
5
|
import { from as copyFrom, to as copyTo } from 'pg-copy-streams';
|
|
5
6
|
const log = new Logger('csv');
|
|
@@ -16,9 +17,44 @@ export function csv(tables) {
|
|
|
16
17
|
}
|
|
17
18
|
};
|
|
18
19
|
}
|
|
20
|
+
async function parseCsvHeader(filePath) {
|
|
21
|
+
const file = createReadStream(filePath);
|
|
22
|
+
const parser = parse({
|
|
23
|
+
bom: true,
|
|
24
|
+
to_line: 1,
|
|
25
|
+
skip_empty_lines: true,
|
|
26
|
+
});
|
|
27
|
+
return new Promise((resolve, reject) => {
|
|
28
|
+
const cleanup = (err) => {
|
|
29
|
+
parser.destroy();
|
|
30
|
+
file.destroy();
|
|
31
|
+
if (err)
|
|
32
|
+
reject(err);
|
|
33
|
+
};
|
|
34
|
+
parser.on('readable', () => {
|
|
35
|
+
const row = parser.read();
|
|
36
|
+
if (!row)
|
|
37
|
+
return;
|
|
38
|
+
if (row.length === 0) {
|
|
39
|
+
cleanup(new Error('CSV header has no columns'));
|
|
40
|
+
return;
|
|
41
|
+
}
|
|
42
|
+
cleanup();
|
|
43
|
+
resolve(row);
|
|
44
|
+
});
|
|
45
|
+
parser.on('error', cleanup);
|
|
46
|
+
file.on('error', cleanup);
|
|
47
|
+
file.pipe(parser);
|
|
48
|
+
});
|
|
49
|
+
}
|
|
19
50
|
export async function copyCsvIntoTable(pg, table, filePath) {
|
|
20
51
|
const client = pg.client;
|
|
21
|
-
const
|
|
52
|
+
const columns = await parseCsvHeader(filePath);
|
|
53
|
+
const quotedColumns = columns.map(col => `"${col.replace(/"/g, '""')}"`);
|
|
54
|
+
const columnList = quotedColumns.join(', ');
|
|
55
|
+
const copyCommand = `COPY ${table} (${columnList}) FROM STDIN WITH CSV HEADER`;
|
|
56
|
+
log.info(`Using columns: ${columnList}`);
|
|
57
|
+
const stream = client.query(copyFrom(copyCommand));
|
|
22
58
|
const source = createReadStream(filePath);
|
|
23
59
|
try {
|
|
24
60
|
await pipeline(source, stream);
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "pgsql-test",
|
|
3
|
-
"version": "2.11.
|
|
3
|
+
"version": "2.11.11",
|
|
4
4
|
"author": "Dan Lynch <pyramation@gmail.com>",
|
|
5
5
|
"description": "pgsql-test offers isolated, role-aware, and rollback-friendly PostgreSQL environments for integration tests — giving developers realistic test coverage without external state pollution",
|
|
6
6
|
"main": "index.js",
|
|
@@ -64,10 +64,11 @@
|
|
|
64
64
|
"@launchql/env": "^2.4.3",
|
|
65
65
|
"@launchql/server-utils": "^2.4.3",
|
|
66
66
|
"@launchql/types": "^2.6.2",
|
|
67
|
+
"csv-parse": "^6.1.0",
|
|
67
68
|
"pg": "^8.16.0",
|
|
68
69
|
"pg-cache": "^1.3.4",
|
|
69
70
|
"pg-copy-streams": "^6.0.6",
|
|
70
71
|
"pg-env": "^1.1.0"
|
|
71
72
|
},
|
|
72
|
-
"gitHead": "
|
|
73
|
+
"gitHead": "4f242233161fd1101d4de871ee2f1dbb7c6b9cff"
|
|
73
74
|
}
|
package/seed/csv.js
CHANGED
|
@@ -5,6 +5,7 @@ exports.copyCsvIntoTable = copyCsvIntoTable;
|
|
|
5
5
|
exports.exportTableToCsv = exportTableToCsv;
|
|
6
6
|
const promises_1 = require("node:stream/promises");
|
|
7
7
|
const logger_1 = require("@launchql/logger");
|
|
8
|
+
const csv_parse_1 = require("csv-parse");
|
|
8
9
|
const fs_1 = require("fs");
|
|
9
10
|
const pg_copy_streams_1 = require("pg-copy-streams");
|
|
10
11
|
const log = new logger_1.Logger('csv');
|
|
@@ -21,9 +22,44 @@ function csv(tables) {
|
|
|
21
22
|
}
|
|
22
23
|
};
|
|
23
24
|
}
|
|
25
|
+
async function parseCsvHeader(filePath) {
|
|
26
|
+
const file = (0, fs_1.createReadStream)(filePath);
|
|
27
|
+
const parser = (0, csv_parse_1.parse)({
|
|
28
|
+
bom: true,
|
|
29
|
+
to_line: 1,
|
|
30
|
+
skip_empty_lines: true,
|
|
31
|
+
});
|
|
32
|
+
return new Promise((resolve, reject) => {
|
|
33
|
+
const cleanup = (err) => {
|
|
34
|
+
parser.destroy();
|
|
35
|
+
file.destroy();
|
|
36
|
+
if (err)
|
|
37
|
+
reject(err);
|
|
38
|
+
};
|
|
39
|
+
parser.on('readable', () => {
|
|
40
|
+
const row = parser.read();
|
|
41
|
+
if (!row)
|
|
42
|
+
return;
|
|
43
|
+
if (row.length === 0) {
|
|
44
|
+
cleanup(new Error('CSV header has no columns'));
|
|
45
|
+
return;
|
|
46
|
+
}
|
|
47
|
+
cleanup();
|
|
48
|
+
resolve(row);
|
|
49
|
+
});
|
|
50
|
+
parser.on('error', cleanup);
|
|
51
|
+
file.on('error', cleanup);
|
|
52
|
+
file.pipe(parser);
|
|
53
|
+
});
|
|
54
|
+
}
|
|
24
55
|
async function copyCsvIntoTable(pg, table, filePath) {
|
|
25
56
|
const client = pg.client;
|
|
26
|
-
const
|
|
57
|
+
const columns = await parseCsvHeader(filePath);
|
|
58
|
+
const quotedColumns = columns.map(col => `"${col.replace(/"/g, '""')}"`);
|
|
59
|
+
const columnList = quotedColumns.join(', ');
|
|
60
|
+
const copyCommand = `COPY ${table} (${columnList}) FROM STDIN WITH CSV HEADER`;
|
|
61
|
+
log.info(`Using columns: ${columnList}`);
|
|
62
|
+
const stream = client.query((0, pg_copy_streams_1.from)(copyCommand));
|
|
27
63
|
const source = (0, fs_1.createReadStream)(filePath);
|
|
28
64
|
try {
|
|
29
65
|
await (0, promises_1.pipeline)(source, stream);
|