pgsql-seed 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/esm/utils.js ADDED
@@ -0,0 +1,91 @@
1
+ const uuidRegexp = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i;
2
+ const idReplacement = (v, idHash) => {
3
+ if (!v)
4
+ return v;
5
+ if (!idHash)
6
+ return '[ID]';
7
+ const key = String(v);
8
+ return idHash[key] !== undefined ? `[ID-${idHash[key]}]` : '[ID]';
9
+ };
10
+ function mapValues(obj, fn) {
11
+ return Object.entries(obj).reduce((acc, [key, value]) => {
12
+ acc[key] = fn(value, key);
13
+ return acc;
14
+ }, {});
15
+ }
16
+ export const pruneDates = (row) => mapValues(row, (v, k) => {
17
+ if (!v) {
18
+ return v;
19
+ }
20
+ if (v instanceof Date) {
21
+ return '[DATE]';
22
+ }
23
+ else if (typeof v === 'string' &&
24
+ /(_at|At)$/.test(k) &&
25
+ /^20[0-9]{2}-[0-9]{2}-[0-9]{2}/.test(v)) {
26
+ return '[DATE]';
27
+ }
28
+ return v;
29
+ });
30
+ export const pruneIds = (row, idHash) => mapValues(row, (v, k) => (k === 'id' || (typeof k === 'string' && k.endsWith('_id'))) &&
31
+ (typeof v === 'string' || typeof v === 'number')
32
+ ? idReplacement(v, idHash)
33
+ : v);
34
+ export const pruneIdArrays = (row) => mapValues(row, (v, k) => typeof k === 'string' && k.endsWith('_ids') && Array.isArray(v)
35
+ ? `[UUIDs-${v.length}]`
36
+ : v);
37
+ export const pruneUUIDs = (row) => mapValues(row, (v, k) => {
38
+ if (typeof v !== 'string') {
39
+ return v;
40
+ }
41
+ if (['uuid', 'queue_name'].includes(k) && uuidRegexp.test(v)) {
42
+ return '[UUID]';
43
+ }
44
+ if (k === 'gravatar' && /^[0-9a-f]{32}$/i.test(v)) {
45
+ return '[gUUID]';
46
+ }
47
+ return v;
48
+ });
49
+ export const pruneHashes = (row) => mapValues(row, (v, k) => typeof k === 'string' &&
50
+ k.endsWith('_hash') &&
51
+ typeof v === 'string' &&
52
+ v.startsWith('$')
53
+ ? '[hash]'
54
+ : v);
55
+ export const pruneSchemas = (row) => mapValues(row, (v, k) => typeof v === 'string' && /^zz-/.test(v) ? '[schemahash]' : v);
56
+ export const prunePeoplestamps = (row) => mapValues(row, (v, k) => k.endsWith('_by') && typeof v === 'string' ? '[peoplestamp]' : v);
57
+ export const pruneTokens = (row) => mapValues(row, (v, k) => (k === 'token' || k.endsWith('_token')) && typeof v === 'string'
58
+ ? '[token]'
59
+ : v);
60
+ export const composePruners = (...pruners) => (row) => pruners.reduce((acc, pruner) => pruner(acc), row);
61
+ // Default pruners used by prune/snapshot (without IdHash)
62
+ export const defaultPruners = [
63
+ pruneTokens,
64
+ prunePeoplestamps,
65
+ pruneDates,
66
+ pruneIdArrays,
67
+ pruneUUIDs,
68
+ pruneHashes
69
+ ];
70
+ // Compose pruners and apply pruneIds with IdHash support
71
+ export const prune = (row, idHash) => {
72
+ const pruned = composePruners(...defaultPruners)(row);
73
+ return pruneIds(pruned, idHash);
74
+ };
75
+ // Factory to create a snapshot function with custom pruners
76
+ export const createSnapshot = (pruners) => {
77
+ const pruneFn = composePruners(...pruners);
78
+ const snap = (obj, idHash) => {
79
+ if (Array.isArray(obj)) {
80
+ return obj.map((el) => snap(el, idHash));
81
+ }
82
+ else if (obj && typeof obj === 'object') {
83
+ const pruned = pruneFn(obj);
84
+ const prunedWithIds = pruneIds(pruned, idHash);
85
+ return mapValues(prunedWithIds, (v) => snap(v, idHash));
86
+ }
87
+ return obj;
88
+ };
89
+ return snap;
90
+ };
91
+ export const snapshot = createSnapshot(defaultPruners);
package/index.d.ts ADDED
@@ -0,0 +1,7 @@
1
+ export * from './admin';
2
+ export * from './connect';
3
+ export * from './manager';
4
+ export * from './roles';
5
+ export * from './seed';
6
+ export * from './test-client';
7
+ export { snapshot } from './utils';
package/index.js ADDED
@@ -0,0 +1,25 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __exportStar = (this && this.__exportStar) || function(m, exports) {
14
+ for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
15
+ };
16
+ Object.defineProperty(exports, "__esModule", { value: true });
17
+ exports.snapshot = void 0;
18
+ __exportStar(require("./admin"), exports);
19
+ __exportStar(require("./connect"), exports);
20
+ __exportStar(require("./manager"), exports);
21
+ __exportStar(require("./roles"), exports);
22
+ __exportStar(require("./seed"), exports);
23
+ __exportStar(require("./test-client"), exports);
24
+ var utils_1 = require("./utils");
25
+ Object.defineProperty(exports, "snapshot", { enumerable: true, get: function () { return utils_1.snapshot; } });
package/manager.d.ts ADDED
@@ -0,0 +1,26 @@
1
+ import { Pool } from 'pg';
2
+ import { PgConfig } from 'pg-env';
3
+ import { PgTestClient, PgTestClientOpts } from './test-client';
4
+ export declare class PgTestConnector {
5
+ private static instance;
6
+ private readonly clients;
7
+ private readonly pgPools;
8
+ private readonly seenDbConfigs;
9
+ private readonly pendingConnects;
10
+ private config;
11
+ private verbose;
12
+ private shuttingDown;
13
+ private constructor();
14
+ static getInstance(config: PgConfig, verbose?: boolean): PgTestConnector;
15
+ private poolKey;
16
+ private dbKey;
17
+ beginTeardown(): void;
18
+ private registerConnect;
19
+ private awaitPendingConnects;
20
+ getPool(config: PgConfig): Pool;
21
+ getClient(config: PgConfig, opts?: Partial<PgTestClientOpts>): PgTestClient;
22
+ closeAll(): Promise<void>;
23
+ close(): void;
24
+ drop(config: PgConfig): void;
25
+ kill(client: PgTestClient): Promise<void>;
26
+ }
package/manager.js ADDED
@@ -0,0 +1,142 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.PgTestConnector = void 0;
4
+ const logger_1 = require("@pgpmjs/logger");
5
+ const pg_1 = require("pg");
6
+ const pg_env_1 = require("pg-env");
7
+ const admin_1 = require("./admin");
8
+ const test_client_1 = require("./test-client");
9
+ const log = new logger_1.Logger('test-connector');
10
+ const SYS_EVENTS = ['SIGTERM'];
11
+ const end = (pool) => {
12
+ try {
13
+ if (pool.ended || pool.ending) {
14
+ log.warn('⚠️ pg pool already ended or ending');
15
+ return;
16
+ }
17
+ pool.end();
18
+ }
19
+ catch (err) {
20
+ log.error('❌ pg pool termination error:', err);
21
+ }
22
+ };
23
+ class PgTestConnector {
24
+ static instance;
25
+ clients = new Set();
26
+ pgPools = new Map();
27
+ seenDbConfigs = new Map();
28
+ pendingConnects = new Set();
29
+ config;
30
+ verbose = false;
31
+ shuttingDown = false;
32
+ constructor(config, verbose = false) {
33
+ this.verbose = verbose;
34
+ this.config = config;
35
+ SYS_EVENTS.forEach((event) => {
36
+ process.on(event, () => {
37
+ log.info(`⏹ Received ${event}, closing all connections...`);
38
+ this.closeAll();
39
+ });
40
+ });
41
+ }
42
+ static getInstance(config, verbose = false) {
43
+ if (!PgTestConnector.instance) {
44
+ PgTestConnector.instance = new PgTestConnector(config, verbose);
45
+ }
46
+ return PgTestConnector.instance;
47
+ }
48
+ poolKey(config) {
49
+ return `${config.user}@${config.host}:${config.port}/${config.database}`;
50
+ }
51
+ dbKey(config) {
52
+ return `${config.host}:${config.port}/${config.database}`;
53
+ }
54
+ beginTeardown() {
55
+ this.shuttingDown = true;
56
+ }
57
+ registerConnect(p) {
58
+ this.pendingConnects.add(p);
59
+ p.finally(() => this.pendingConnects.delete(p));
60
+ }
61
+ async awaitPendingConnects() {
62
+ const arr = Array.from(this.pendingConnects);
63
+ if (arr.length) {
64
+ await Promise.allSettled(arr);
65
+ }
66
+ }
67
+ getPool(config) {
68
+ const key = this.poolKey(config);
69
+ if (!this.pgPools.has(key)) {
70
+ const pool = new pg_1.Pool(config);
71
+ this.pgPools.set(key, pool);
72
+ log.info(`📘 Created new pg pool: ${key}`);
73
+ }
74
+ return this.pgPools.get(key);
75
+ }
76
+ getClient(config, opts = {}) {
77
+ if (this.shuttingDown) {
78
+ throw new Error('PgTestConnector is shutting down; no new clients allowed');
79
+ }
80
+ const client = new test_client_1.PgTestClient(config, {
81
+ trackConnect: (p) => this.registerConnect(p),
82
+ ...opts
83
+ });
84
+ this.clients.add(client);
85
+ const key = this.dbKey(config);
86
+ this.seenDbConfigs.set(key, config);
87
+ log.info(`🔌 New PgTestClient connected to ${config.database}`);
88
+ return client;
89
+ }
90
+ async closeAll() {
91
+ this.beginTeardown();
92
+ await this.awaitPendingConnects();
93
+ log.info('🧹 Closing all PgTestClients...');
94
+ await Promise.all(Array.from(this.clients).map(async (client) => {
95
+ try {
96
+ await client.close();
97
+ log.success(`✅ Closed client for ${client.config.database}`);
98
+ }
99
+ catch (err) {
100
+ log.error(`❌ Error closing PgTestClient for ${client.config.database}:`, err);
101
+ }
102
+ }));
103
+ this.clients.clear();
104
+ log.info('🧯 Disposing pg pools...');
105
+ for (const [key, pool] of this.pgPools.entries()) {
106
+ log.debug(`🧯 Disposing pg pool [${key}]`);
107
+ end(pool);
108
+ }
109
+ this.pgPools.clear();
110
+ log.info('🗑️ Dropping seen databases...');
111
+ await Promise.all(Array.from(this.seenDbConfigs.values()).map(async (config) => {
112
+ try {
113
+ const rootPg = (0, pg_env_1.getPgEnvOptions)(this.config);
114
+ const admin = new admin_1.DbAdmin({ ...config, user: rootPg.user, password: rootPg.password }, this.verbose);
115
+ admin.drop();
116
+ log.warn(`🧨 Dropped database: ${config.database}`);
117
+ }
118
+ catch (err) {
119
+ log.error(`❌ Failed to drop database ${config.database}:`, err);
120
+ }
121
+ }));
122
+ this.seenDbConfigs.clear();
123
+ log.success('✅ All PgTestClients closed, pools disposed, databases dropped.');
124
+ this.pendingConnects.clear();
125
+ this.shuttingDown = false;
126
+ }
127
+ close() {
128
+ this.closeAll();
129
+ }
130
+ drop(config) {
131
+ const key = this.dbKey(config);
132
+ const admin = new admin_1.DbAdmin(config, this.verbose);
133
+ admin.drop();
134
+ log.warn(`🧨 Dropped database: ${config.database}`);
135
+ this.seenDbConfigs.delete(key);
136
+ }
137
+ async kill(client) {
138
+ await client.close();
139
+ this.drop(client.config);
140
+ }
141
+ }
142
+ exports.PgTestConnector = PgTestConnector;
package/package.json ADDED
@@ -0,0 +1,61 @@
1
+ {
2
+ "name": "pgsql-seed",
3
+ "version": "0.0.1",
4
+ "author": "Constructive <developers@constructive.io>",
5
+ "description": "pgsql-seed offers isolated, role-aware, and rollback-friendly PostgreSQL environments for integration tests — giving developers realistic test coverage without external state pollution",
6
+ "main": "index.js",
7
+ "module": "esm/index.js",
8
+ "types": "index.d.ts",
9
+ "homepage": "https://github.com/constructive-io/constructive",
10
+ "license": "MIT",
11
+ "publishConfig": {
12
+ "access": "public",
13
+ "directory": "dist"
14
+ },
15
+ "repository": {
16
+ "type": "git",
17
+ "url": "https://github.com/constructive-io/constructive"
18
+ },
19
+ "bugs": {
20
+ "url": "https://github.com/constructive-io/constructive/issues"
21
+ },
22
+ "keywords": [
23
+ "postgres",
24
+ "postgresql",
25
+ "testing",
26
+ "integration-tests",
27
+ "database-testing",
28
+ "pg",
29
+ "rls",
30
+ "role-based-access",
31
+ "test-database",
32
+ "test-runner",
33
+ "jest",
34
+ "mocha",
35
+ "sqitch",
36
+ "constructive",
37
+ "graphile",
38
+ "typeorm",
39
+ "knex",
40
+ "seed",
41
+ "fixtures",
42
+ "transactions",
43
+ "rollback",
44
+ "node-postgres",
45
+ "pg-pool",
46
+ "pg-client"
47
+ ],
48
+ "dependencies": {
49
+ "@pgpmjs/core": "^4.2.0",
50
+ "@pgpmjs/env": "^2.8.11",
51
+ "@pgpmjs/logger": "^1.3.5",
52
+ "@pgpmjs/server-utils": "^2.8.11",
53
+ "@pgpmjs/types": "^2.12.8",
54
+ "csv-parse": "^6.1.0",
55
+ "pg": "^8.16.3",
56
+ "pg-cache": "^1.6.11",
57
+ "pg-copy-streams": "^7.0.0",
58
+ "pg-env": "^1.2.4"
59
+ },
60
+ "gitHead": "9b68e2d19937ad4e2a81a5de110a197a8572e3d9"
61
+ }
package/roles.d.ts ADDED
@@ -0,0 +1,17 @@
1
+ import { PgTestConnectionOptions, RoleMapping } from '@pgpmjs/types';
2
+ /**
3
+ * Default role mapping configuration
4
+ */
5
+ export declare const DEFAULT_ROLE_MAPPING: Required<RoleMapping>;
6
+ /**
7
+ * Get resolved role mapping with defaults
8
+ */
9
+ export declare const getRoleMapping: (options?: PgTestConnectionOptions) => Required<RoleMapping>;
10
+ /**
11
+ * Get role name by key with fallback to default mapping
12
+ */
13
+ export declare const getRoleName: (roleKey: keyof Omit<RoleMapping, "default">, options?: PgTestConnectionOptions) => string;
14
+ /**
15
+ * Get default role name
16
+ */
17
+ export declare const getDefaultRole: (options?: PgTestConnectionOptions) => string;
package/roles.js ADDED
@@ -0,0 +1,38 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.getDefaultRole = exports.getRoleName = exports.getRoleMapping = exports.DEFAULT_ROLE_MAPPING = void 0;
4
+ /**
5
+ * Default role mapping configuration
6
+ */
7
+ exports.DEFAULT_ROLE_MAPPING = {
8
+ anonymous: 'anonymous',
9
+ authenticated: 'authenticated',
10
+ administrator: 'administrator',
11
+ default: 'anonymous'
12
+ };
13
+ /**
14
+ * Get resolved role mapping with defaults
15
+ */
16
+ const getRoleMapping = (options) => {
17
+ return {
18
+ ...exports.DEFAULT_ROLE_MAPPING,
19
+ ...(options?.roles || {})
20
+ };
21
+ };
22
+ exports.getRoleMapping = getRoleMapping;
23
+ /**
24
+ * Get role name by key with fallback to default mapping
25
+ */
26
+ const getRoleName = (roleKey, options) => {
27
+ const mapping = (0, exports.getRoleMapping)(options);
28
+ return mapping[roleKey];
29
+ };
30
+ exports.getRoleName = getRoleName;
31
+ /**
32
+ * Get default role name
33
+ */
34
+ const getDefaultRole = (options) => {
35
+ const mapping = (0, exports.getRoleMapping)(options);
36
+ return mapping.default;
37
+ };
38
+ exports.getDefaultRole = getDefaultRole;
@@ -0,0 +1,4 @@
1
+ import { SeedAdapter, SeedContext } from './types';
2
+ export declare function sqlfile(files: string[]): SeedAdapter;
3
+ export declare function fn(fn: (ctx: SeedContext) => Promise<void>): SeedAdapter;
4
+ export declare function compose(adapters: SeedAdapter[]): SeedAdapter;
@@ -0,0 +1,28 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.sqlfile = sqlfile;
4
+ exports.fn = fn;
5
+ exports.compose = compose;
6
+ function sqlfile(files) {
7
+ return {
8
+ seed(ctx) {
9
+ for (const file of files) {
10
+ ctx.admin.loadSql(file, ctx.config.database);
11
+ }
12
+ }
13
+ };
14
+ }
15
+ function fn(fn) {
16
+ return {
17
+ seed: fn
18
+ };
19
+ }
20
+ function compose(adapters) {
21
+ return {
22
+ async seed(ctx) {
23
+ for (const adapter of adapters) {
24
+ await adapter.seed(ctx);
25
+ }
26
+ }
27
+ };
28
+ }
package/seed/csv.d.ts ADDED
@@ -0,0 +1,15 @@
1
+ import { Client } from 'pg';
2
+ import type { PgTestClient } from '../test-client';
3
+ import { SeedAdapter } from './types';
4
+ export interface CsvSeedMap {
5
+ [tableName: string]: string;
6
+ }
7
+ /**
8
+ * Standalone helper function to load CSV files into PostgreSQL tables
9
+ * @param client - PostgreSQL client instance
10
+ * @param tables - Map of table names to CSV file paths
11
+ */
12
+ export declare function loadCsvMap(client: Client, tables: CsvSeedMap): Promise<void>;
13
+ export declare function csv(tables: CsvSeedMap): SeedAdapter;
14
+ export declare function copyCsvIntoTable(pg: PgTestClient, table: string, filePath: string): Promise<void>;
15
+ export declare function exportTableToCsv(pg: PgTestClient, table: string, filePath: string): Promise<void>;
package/seed/csv.js ADDED
@@ -0,0 +1,114 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.loadCsvMap = loadCsvMap;
4
+ exports.csv = csv;
5
+ exports.copyCsvIntoTable = copyCsvIntoTable;
6
+ exports.exportTableToCsv = exportTableToCsv;
7
+ const promises_1 = require("node:stream/promises");
8
+ const logger_1 = require("@pgpmjs/logger");
9
+ const csv_parse_1 = require("csv-parse");
10
+ const fs_1 = require("fs");
11
+ const pg_copy_streams_1 = require("pg-copy-streams");
12
+ const log = new logger_1.Logger('csv');
13
+ /**
14
+ * Standalone helper function to load CSV files into PostgreSQL tables
15
+ * @param client - PostgreSQL client instance
16
+ * @param tables - Map of table names to CSV file paths
17
+ */
18
+ async function loadCsvMap(client, tables) {
19
+ for (const [table, filePath] of Object.entries(tables)) {
20
+ if (!(0, fs_1.existsSync)(filePath)) {
21
+ throw new Error(`CSV file not found: ${filePath}`);
22
+ }
23
+ log.info(`📥 Seeding "${table}" from ${filePath}`);
24
+ const columns = await parseCsvHeader(filePath);
25
+ const quotedColumns = columns.map(col => `"${col.replace(/"/g, '""')}"`);
26
+ const columnList = quotedColumns.join(', ');
27
+ const copyCommand = `COPY ${table} (${columnList}) FROM STDIN WITH CSV HEADER`;
28
+ log.info(`Using columns: ${columnList}`);
29
+ const stream = client.query((0, pg_copy_streams_1.from)(copyCommand));
30
+ const source = (0, fs_1.createReadStream)(filePath);
31
+ try {
32
+ await (0, promises_1.pipeline)(source, stream);
33
+ log.success(`✅ Successfully seeded "${table}"`);
34
+ }
35
+ catch (err) {
36
+ log.error(`❌ COPY failed for "${table}": ${err.message}`);
37
+ throw err;
38
+ }
39
+ }
40
+ }
41
+ function csv(tables) {
42
+ return {
43
+ async seed(ctx) {
44
+ for (const [table, filePath] of Object.entries(tables)) {
45
+ if (!(0, fs_1.existsSync)(filePath)) {
46
+ throw new Error(`CSV file not found: ${filePath}`);
47
+ }
48
+ log.info(`📥 Seeding "${table}" from ${filePath}`);
49
+ await copyCsvIntoTable(ctx.pg, table, filePath);
50
+ }
51
+ }
52
+ };
53
+ }
54
+ async function parseCsvHeader(filePath) {
55
+ const file = (0, fs_1.createReadStream)(filePath);
56
+ const parser = (0, csv_parse_1.parse)({
57
+ bom: true,
58
+ to_line: 1,
59
+ skip_empty_lines: true,
60
+ });
61
+ return new Promise((resolve, reject) => {
62
+ const cleanup = (err) => {
63
+ parser.destroy();
64
+ file.destroy();
65
+ if (err)
66
+ reject(err);
67
+ };
68
+ parser.on('readable', () => {
69
+ const row = parser.read();
70
+ if (!row)
71
+ return;
72
+ if (row.length === 0) {
73
+ cleanup(new Error('CSV header has no columns'));
74
+ return;
75
+ }
76
+ cleanup();
77
+ resolve(row);
78
+ });
79
+ parser.on('error', cleanup);
80
+ file.on('error', cleanup);
81
+ file.pipe(parser);
82
+ });
83
+ }
84
+ async function copyCsvIntoTable(pg, table, filePath) {
85
+ const client = pg.client;
86
+ const columns = await parseCsvHeader(filePath);
87
+ const quotedColumns = columns.map(col => `"${col.replace(/"/g, '""')}"`);
88
+ const columnList = quotedColumns.join(', ');
89
+ const copyCommand = `COPY ${table} (${columnList}) FROM STDIN WITH CSV HEADER`;
90
+ log.info(`Using columns: ${columnList}`);
91
+ const stream = client.query((0, pg_copy_streams_1.from)(copyCommand));
92
+ const source = (0, fs_1.createReadStream)(filePath);
93
+ try {
94
+ await (0, promises_1.pipeline)(source, stream);
95
+ log.success(`✅ Successfully seeded "${table}"`);
96
+ }
97
+ catch (err) {
98
+ log.error(`❌ COPY failed for "${table}": ${err.message}`);
99
+ throw err;
100
+ }
101
+ }
102
+ async function exportTableToCsv(pg, table, filePath) {
103
+ const client = pg.client;
104
+ const stream = client.query((0, pg_copy_streams_1.to)(`COPY ${table} TO STDOUT WITH CSV HEADER`));
105
+ const target = (0, fs_1.createWriteStream)(filePath);
106
+ try {
107
+ await (0, promises_1.pipeline)(stream, target);
108
+ log.success(`✅ Exported "${table}" to ${filePath}`);
109
+ }
110
+ catch (err) {
111
+ log.error(`❌ Failed to export "${table}": ${err.message}`);
112
+ throw err;
113
+ }
114
+ }
@@ -0,0 +1,14 @@
1
+ import { compose, fn, sqlfile } from './adapters';
2
+ import { csv } from './csv';
3
+ import { json } from './json';
4
+ import { pgpm } from './pgpm';
5
+ export * from './csv';
6
+ export * from './types';
7
+ export declare const seed: {
8
+ pgpm: typeof pgpm;
9
+ json: typeof json;
10
+ csv: typeof csv;
11
+ compose: typeof compose;
12
+ fn: typeof fn;
13
+ sqlfile: typeof sqlfile;
14
+ };
package/seed/index.js ADDED
@@ -0,0 +1,31 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __exportStar = (this && this.__exportStar) || function(m, exports) {
14
+ for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
15
+ };
16
+ Object.defineProperty(exports, "__esModule", { value: true });
17
+ exports.seed = void 0;
18
+ const adapters_1 = require("./adapters");
19
+ const csv_1 = require("./csv");
20
+ const json_1 = require("./json");
21
+ const pgpm_1 = require("./pgpm");
22
+ __exportStar(require("./csv"), exports);
23
+ __exportStar(require("./types"), exports);
24
+ exports.seed = {
25
+ pgpm: pgpm_1.pgpm,
26
+ json: json_1.json,
27
+ csv: csv_1.csv,
28
+ compose: adapters_1.compose,
29
+ fn: adapters_1.fn,
30
+ sqlfile: adapters_1.sqlfile
31
+ };
package/seed/json.d.ts ADDED
@@ -0,0 +1,12 @@
1
+ import type { Client } from 'pg';
2
+ import { SeedAdapter } from './types';
3
+ export interface JsonSeedMap {
4
+ [table: string]: Record<string, any>[];
5
+ }
6
+ /**
7
+ * Standalone helper function to insert JSON data into PostgreSQL tables
8
+ * @param client - PostgreSQL client instance
9
+ * @param data - Map of table names to arrays of row objects
10
+ */
11
+ export declare function insertJson(client: Client, data: JsonSeedMap): Promise<void>;
12
+ export declare function json(data: JsonSeedMap): SeedAdapter;
package/seed/json.js ADDED
@@ -0,0 +1,40 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.insertJson = insertJson;
4
+ exports.json = json;
5
+ /**
6
+ * Standalone helper function to insert JSON data into PostgreSQL tables
7
+ * @param client - PostgreSQL client instance
8
+ * @param data - Map of table names to arrays of row objects
9
+ */
10
+ async function insertJson(client, data) {
11
+ for (const [table, rows] of Object.entries(data)) {
12
+ if (!Array.isArray(rows) || rows.length === 0)
13
+ continue;
14
+ const columns = Object.keys(rows[0]);
15
+ const placeholders = columns.map((_, i) => `$${i + 1}`).join(', ');
16
+ const sql = `INSERT INTO ${table} (${columns.join(', ')}) VALUES (${placeholders})`;
17
+ for (const row of rows) {
18
+ const values = columns.map((c) => row[c]);
19
+ await client.query(sql, values);
20
+ }
21
+ }
22
+ }
23
+ function json(data) {
24
+ return {
25
+ async seed(ctx) {
26
+ const { pg } = ctx;
27
+ for (const [table, rows] of Object.entries(data)) {
28
+ if (!Array.isArray(rows) || rows.length === 0)
29
+ continue;
30
+ const columns = Object.keys(rows[0]);
31
+ const placeholders = columns.map((_, i) => `$${i + 1}`).join(', ');
32
+ const sql = `INSERT INTO ${table} (${columns.join(', ')}) VALUES (${placeholders})`;
33
+ for (const row of rows) {
34
+ const values = columns.map((c) => row[c]);
35
+ await pg.query(sql, values);
36
+ }
37
+ }
38
+ }
39
+ };
40
+ }