schema-seed-adapter-postgres 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +5 -0
- package/dist/index.cjs +178 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +24 -0
- package/dist/index.d.ts +24 -0
- package/dist/index.js +144 -0
- package/dist/index.js.map +1 -0
- package/package.json +58 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 schema-seed contributors
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
|
|
30
|
+
// src/index.ts
|
|
31
|
+
var index_exports = {};
|
|
32
|
+
__export(index_exports, {
|
|
33
|
+
PostgresAdapter: () => PostgresAdapter,
|
|
34
|
+
createPostgresAdapter: () => createPostgresAdapter
|
|
35
|
+
});
|
|
36
|
+
module.exports = __toCommonJS(index_exports);
|
|
37
|
+
var import_pg = __toESM(require("pg"), 1);
|
|
38
|
+
var import_schema_seed_core = require("schema-seed-core");
|
|
39
|
+
var { Client } = import_pg.default;
|
|
40
|
+
var PostgresAdapter = class {
|
|
41
|
+
client;
|
|
42
|
+
constructor(connectionString) {
|
|
43
|
+
this.client = new Client({ connectionString });
|
|
44
|
+
}
|
|
45
|
+
async connect() {
|
|
46
|
+
await this.client.connect();
|
|
47
|
+
}
|
|
48
|
+
async disconnect() {
|
|
49
|
+
await this.client.end();
|
|
50
|
+
}
|
|
51
|
+
async begin() {
|
|
52
|
+
await this.client.query("BEGIN");
|
|
53
|
+
}
|
|
54
|
+
async commit() {
|
|
55
|
+
await this.client.query("COMMIT");
|
|
56
|
+
}
|
|
57
|
+
async rollback() {
|
|
58
|
+
await this.client.query("ROLLBACK");
|
|
59
|
+
}
|
|
60
|
+
async introspectSchema() {
|
|
61
|
+
const schema = { tables: {} };
|
|
62
|
+
const tablesRes = await this.client.query(`
|
|
63
|
+
SELECT table_name, table_schema
|
|
64
|
+
FROM information_schema.tables
|
|
65
|
+
WHERE table_schema NOT IN ('information_schema', 'pg_catalog')
|
|
66
|
+
AND table_type = 'BASE TABLE'
|
|
67
|
+
`);
|
|
68
|
+
for (const row of tablesRes.rows) {
|
|
69
|
+
const tableName = row.table_name;
|
|
70
|
+
schema.tables[tableName] = {
|
|
71
|
+
name: tableName,
|
|
72
|
+
schema: row.table_schema,
|
|
73
|
+
columns: {},
|
|
74
|
+
foreignKeys: [],
|
|
75
|
+
uniqueConstraints: []
|
|
76
|
+
};
|
|
77
|
+
}
|
|
78
|
+
const columnsRes = await this.client.query(`
|
|
79
|
+
SELECT table_name, column_name, data_type, is_nullable, column_default, udt_name
|
|
80
|
+
FROM information_schema.columns
|
|
81
|
+
WHERE table_schema NOT IN ('information_schema', 'pg_catalog')
|
|
82
|
+
`);
|
|
83
|
+
for (const row of columnsRes.rows) {
|
|
84
|
+
if (!schema.tables[row.table_name]) continue;
|
|
85
|
+
const col = {
|
|
86
|
+
name: row.column_name,
|
|
87
|
+
type: this.mapPostgresType(row.data_type, row.udt_name),
|
|
88
|
+
rawType: row.data_type,
|
|
89
|
+
nullable: row.is_nullable === "YES",
|
|
90
|
+
defaultValue: row.column_default,
|
|
91
|
+
isAutoIncrement: row.column_default?.includes("nextval") || false
|
|
92
|
+
};
|
|
93
|
+
schema.tables[row.table_name].columns[row.column_name] = col;
|
|
94
|
+
}
|
|
95
|
+
const fkRes = await this.client.query(`
|
|
96
|
+
SELECT
|
|
97
|
+
tc.table_name, kcu.column_name,
|
|
98
|
+
ccu.table_name AS foreign_table_name,
|
|
99
|
+
ccu.column_name AS foreign_column_name
|
|
100
|
+
FROM
|
|
101
|
+
information_schema.table_constraints AS tc
|
|
102
|
+
JOIN information_schema.key_column_usage AS kcu
|
|
103
|
+
ON tc.constraint_name = kcu.constraint_name
|
|
104
|
+
JOIN information_schema.constraint_column_usage AS ccu
|
|
105
|
+
ON ccu.constraint_name = tc.constraint_name
|
|
106
|
+
WHERE tc.constraint_type = 'FOREIGN KEY'
|
|
107
|
+
`);
|
|
108
|
+
for (const row of fkRes.rows) {
|
|
109
|
+
if (!schema.tables[row.table_name]) continue;
|
|
110
|
+
schema.tables[row.table_name].foreignKeys.push({
|
|
111
|
+
columns: [row.column_name],
|
|
112
|
+
referencedTable: row.foreign_table_name,
|
|
113
|
+
referencedColumns: [row.foreign_column_name]
|
|
114
|
+
});
|
|
115
|
+
}
|
|
116
|
+
const pkRes = await this.client.query(`
|
|
117
|
+
SELECT tc.table_name, kcu.column_name
|
|
118
|
+
FROM information_schema.table_constraints tc
|
|
119
|
+
JOIN information_schema.key_column_usage kcu ON tc.constraint_name = kcu.constraint_name
|
|
120
|
+
WHERE tc.constraint_type = 'PRIMARY KEY'
|
|
121
|
+
`);
|
|
122
|
+
for (const row of pkRes.rows) {
|
|
123
|
+
if (!schema.tables[row.table_name]) continue;
|
|
124
|
+
if (!schema.tables[row.table_name].primaryKey) {
|
|
125
|
+
schema.tables[row.table_name].primaryKey = { columns: [] };
|
|
126
|
+
}
|
|
127
|
+
schema.tables[row.table_name].primaryKey.columns.push(row.column_name);
|
|
128
|
+
}
|
|
129
|
+
return schema;
|
|
130
|
+
}
|
|
131
|
+
async insertBatch(batch) {
|
|
132
|
+
const { tableName, rows } = batch;
|
|
133
|
+
if (rows.length === 0) return;
|
|
134
|
+
const columns = Object.keys(rows[0]);
|
|
135
|
+
const placeholders = rows.map(
|
|
136
|
+
(_, rowIndex) => `(${columns.map((_2, colIndex) => `$${rowIndex * columns.length + colIndex + 1}`).join(", ")})`
|
|
137
|
+
).join(", ");
|
|
138
|
+
const values = rows.flatMap((row) => columns.map((col) => row[col]));
|
|
139
|
+
const query = `INSERT INTO "${tableName}" (${columns.map((c) => `"${c}"`).join(", ")}) VALUES ${placeholders}`;
|
|
140
|
+
await this.client.query(query, values);
|
|
141
|
+
}
|
|
142
|
+
async truncateTables(tableNames) {
|
|
143
|
+
if (tableNames.length === 0) return;
|
|
144
|
+
const quotedNames = tableNames.map((t) => `"${t}"`).join(", ");
|
|
145
|
+
await this.client.query(`TRUNCATE ${quotedNames} RESTART IDENTITY CASCADE`);
|
|
146
|
+
}
|
|
147
|
+
capabilities = {
|
|
148
|
+
enums: true,
|
|
149
|
+
deferrableConstraints: true,
|
|
150
|
+
returning: true,
|
|
151
|
+
identityInsert: false
|
|
152
|
+
};
|
|
153
|
+
mapPostgresType(pgType, udtName) {
|
|
154
|
+
const type = pgType.toLowerCase();
|
|
155
|
+
const udt = udtName.toLowerCase();
|
|
156
|
+
if (type.includes("int") || type.includes("serial")) {
|
|
157
|
+
if (type.includes("big") || udt.includes("int8")) return import_schema_seed_core.NormalizedSqlType.BIGINT;
|
|
158
|
+
return import_schema_seed_core.NormalizedSqlType.INT;
|
|
159
|
+
}
|
|
160
|
+
if (type.includes("char") || type.includes("text") || type === "character varying") return import_schema_seed_core.NormalizedSqlType.STRING;
|
|
161
|
+
if (type.includes("bool")) return import_schema_seed_core.NormalizedSqlType.BOOLEAN;
|
|
162
|
+
if (type.includes("timestamp") || type.includes("date")) return import_schema_seed_core.NormalizedSqlType.DATETIME;
|
|
163
|
+
if (type.includes("json")) return import_schema_seed_core.NormalizedSqlType.JSON;
|
|
164
|
+
if (type.includes("numeric") || type.includes("decimal")) return import_schema_seed_core.NormalizedSqlType.DECIMAL;
|
|
165
|
+
if (type.includes("float") || type.includes("real")) return import_schema_seed_core.NormalizedSqlType.FLOAT;
|
|
166
|
+
if (udt === "uuid") return import_schema_seed_core.NormalizedSqlType.UUID;
|
|
167
|
+
return import_schema_seed_core.NormalizedSqlType.STRING;
|
|
168
|
+
}
|
|
169
|
+
};
|
|
170
|
+
function createPostgresAdapter(connectionString) {
|
|
171
|
+
return new PostgresAdapter(connectionString);
|
|
172
|
+
}
|
|
173
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
174
|
+
0 && (module.exports = {
|
|
175
|
+
PostgresAdapter,
|
|
176
|
+
createPostgresAdapter
|
|
177
|
+
});
|
|
178
|
+
//# sourceMappingURL=index.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts"],"sourcesContent":["import pg from 'pg'\nconst { Client } = pg\nimport {\n SqlAdapter,\n SchemaGraph,\n SeedBatch,\n NormalizedSqlType,\n TableSchema,\n ColumnSchema\n} from 'schema-seed-core'\n\nexport class PostgresAdapter implements SqlAdapter {\n private client: pg.Client\n\n constructor(connectionString: string) {\n this.client = new Client({ connectionString })\n }\n\n async connect(): Promise<void> {\n await this.client.connect()\n }\n\n async disconnect(): Promise<void> {\n await this.client.end()\n }\n\n async begin(): Promise<void> {\n await this.client.query('BEGIN')\n }\n\n async commit(): Promise<void> {\n await this.client.query('COMMIT')\n }\n\n async rollback(): Promise<void> {\n await this.client.query('ROLLBACK')\n }\n\n async introspectSchema(): Promise<SchemaGraph> {\n const schema: SchemaGraph = { tables: {} }\n\n // Get tables\n const tablesRes = await this.client.query(`\n SELECT table_name, table_schema\n FROM information_schema.tables\n WHERE table_schema NOT IN ('information_schema', 'pg_catalog')\n AND table_type = 'BASE TABLE'\n `)\n\n for (const row of tablesRes.rows) {\n const tableName = row.table_name\n schema.tables[tableName] = {\n name: tableName,\n schema: row.table_schema,\n columns: {},\n foreignKeys: [],\n uniqueConstraints: []\n }\n }\n\n // Get columns\n const columnsRes = await this.client.query(`\n SELECT table_name, column_name, data_type, is_nullable, column_default, udt_name\n FROM information_schema.columns\n WHERE table_schema NOT IN ('information_schema', 'pg_catalog')\n `)\n\n for (const row of columnsRes.rows) {\n if (!schema.tables[row.table_name]) continue\n\n const col: ColumnSchema = {\n name: row.column_name,\n type: this.mapPostgresType(row.data_type, row.udt_name),\n rawType: row.data_type,\n nullable: row.is_nullable === 'YES',\n defaultValue: row.column_default,\n isAutoIncrement: row.column_default?.includes('nextval') || false\n }\n schema.tables[row.table_name].columns[row.column_name] = col\n }\n\n // Get Foreign Keys\n const fkRes = await this.client.query(`\n SELECT\n tc.table_name, kcu.column_name, \n ccu.table_name AS foreign_table_name,\n ccu.column_name AS foreign_column_name \n FROM \n information_schema.table_constraints AS tc \n JOIN information_schema.key_column_usage AS kcu\n ON tc.constraint_name = kcu.constraint_name\n JOIN information_schema.constraint_column_usage AS ccu\n ON ccu.constraint_name = tc.constraint_name\n WHERE tc.constraint_type = 'FOREIGN KEY'\n `)\n\n for (const row of fkRes.rows) {\n if (!schema.tables[row.table_name]) continue\n schema.tables[row.table_name].foreignKeys.push({\n columns: [row.column_name],\n referencedTable: row.foreign_table_name,\n referencedColumns: [row.foreign_column_name]\n })\n }\n\n // Get Primary Keys\n const pkRes = await this.client.query(`\n SELECT tc.table_name, kcu.column_name\n FROM information_schema.table_constraints tc\n JOIN information_schema.key_column_usage kcu ON tc.constraint_name = kcu.constraint_name\n WHERE tc.constraint_type = 'PRIMARY KEY'\n `)\n\n for (const row of pkRes.rows) {\n if (!schema.tables[row.table_name]) continue\n if (!schema.tables[row.table_name].primaryKey) {\n schema.tables[row.table_name].primaryKey = { columns: [] }\n }\n schema.tables[row.table_name].primaryKey!.columns.push(row.column_name)\n }\n\n return schema\n }\n\n async insertBatch(batch: SeedBatch): Promise<void> {\n const { tableName, rows } = batch\n if (rows.length === 0) return\n\n const columns = Object.keys(rows[0])\n const placeholders = rows.map((_, rowIndex) =>\n `(${columns.map((_, colIndex) => `$${rowIndex * columns.length + colIndex + 1}`).join(', ')})`\n ).join(', ')\n\n const values = rows.flatMap(row => columns.map(col => row[col]))\n const query = `INSERT INTO \"${tableName}\" (${columns.map(c => `\"${c}\"`).join(', ')}) VALUES ${placeholders}`\n\n await this.client.query(query, values)\n }\n\n async truncateTables(tableNames: string[]): Promise<void> {\n if (tableNames.length === 0) return\n const quotedNames = tableNames.map(t => `\"${t}\"`).join(', ')\n await this.client.query(`TRUNCATE ${quotedNames} RESTART IDENTITY CASCADE`)\n }\n\n readonly capabilities = {\n enums: true,\n deferrableConstraints: true,\n returning: true,\n identityInsert: false\n }\n\n private mapPostgresType(pgType: string, udtName: string): NormalizedSqlType {\n const type = pgType.toLowerCase()\n const udt = udtName.toLowerCase()\n\n if (type.includes('int') || type.includes('serial')) {\n if (type.includes('big') || udt.includes('int8')) return NormalizedSqlType.BIGINT\n return NormalizedSqlType.INT\n }\n if (type.includes('char') || type.includes('text') || type === 'character varying') return NormalizedSqlType.STRING\n if (type.includes('bool')) return NormalizedSqlType.BOOLEAN\n if (type.includes('timestamp') || type.includes('date')) return NormalizedSqlType.DATETIME\n if (type.includes('json')) return NormalizedSqlType.JSON\n if (type.includes('numeric') || type.includes('decimal')) return NormalizedSqlType.DECIMAL\n if (type.includes('float') || type.includes('real')) return NormalizedSqlType.FLOAT\n if (udt === 'uuid') return NormalizedSqlType.UUID\n\n return NormalizedSqlType.STRING\n }\n}\n\nexport function createPostgresAdapter(connectionString: string): PostgresAdapter {\n return new PostgresAdapter(connectionString)\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAe;AAEf,8BAOO;AARP,IAAM,EAAE,OAAO,IAAI,UAAAA;AAUZ,IAAM,kBAAN,MAA4C;AAAA,EACvC;AAAA,EAER,YAAY,kBAA0B;AAClC,SAAK,SAAS,IAAI,OAAO,EAAE,iBAAiB,CAAC;AAAA,EACjD;AAAA,EAEA,MAAM,UAAyB;AAC3B,UAAM,KAAK,OAAO,QAAQ;AAAA,EAC9B;AAAA,EAEA,MAAM,aAA4B;AAC9B,UAAM,KAAK,OAAO,IAAI;AAAA,EAC1B;AAAA,EAEA,MAAM,QAAuB;AACzB,UAAM,KAAK,OAAO,MAAM,OAAO;AAAA,EACnC;AAAA,EAEA,MAAM,SAAwB;AAC1B,UAAM,KAAK,OAAO,MAAM,QAAQ;AAAA,EACpC;AAAA,EAEA,MAAM,WAA0B;AAC5B,UAAM,KAAK,OAAO,MAAM,UAAU;AAAA,EACtC;AAAA,EAEA,MAAM,mBAAyC;AAC3C,UAAM,SAAsB,EAAE,QAAQ,CAAC,EAAE;AAGzC,UAAM,YAAY,MAAM,KAAK,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA,KAK7C;AAEG,eAAW,OAAO,UAAU,MAAM;AAC9B,YAAM,YAAY,IAAI;AACtB,aAAO,OAAO,SAAS,IAAI;AAAA,QACvB,MAAM;AAAA,QACN,QAAQ,IAAI;AAAA,QACZ,SAAS,CAAC;AAAA,QACV,aAAa,CAAC;AAAA,QACd,mBAAmB,CAAC;AAAA,MACxB;AAAA,IACJ;AAGA,UAAM,aAAa,MAAM,KAAK,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA,KAI9C;AAEG,eAAW,OAAO,WAAW,MAAM;AAC/B,UAAI,CAAC,OAAO,OAAO,IAAI,UAAU,EAAG;AAEpC,YAAM,MAAoB;AAAA,QACtB,MAAM,IAAI;AAAA,QACV,MAAM,KAAK,gBAAgB,IAAI,WAAW,IAAI,QAAQ;AAAA,QACtD,SAAS,IAAI;AAAA,QACb,UAAU,IAAI,gBAAgB;AAAA,QAC9B,cAAc,IAAI;AAAA,QAClB,iBAAiB,IAAI,gBAAgB,SAAS,SAAS,KAAK;AAAA,MAChE;AACA,aAAO,OAAO,IAAI,UAAU,EAAE,QAAQ,IAAI,WAAW,IAAI;AAAA,IAC7D;AAGA,UAAM,QAAQ,MAAM,KAAK,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAYzC;AAEG,eAAW,OAAO,MAAM,MAAM;AAC1B,UAAI,CAAC,OAAO,OAAO,IAAI,UAAU,EAAG;AACpC,aAAO,OAAO,IAAI,UAAU,EAAE,YAAY,KAAK;AAAA,QAC3C,SAAS,CAAC,IAAI,WAAW;AAAA,QACzB,iBAAiB,IAAI;AAAA,QACrB,mBAAmB,CAAC,IAAI,mBAAmB;AAAA,MAC/C,CAAC;AAAA,IACL;AAGA,UAAM,QAAQ,MAAM,KAAK,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA,KAKzC;AAEG,eAAW,OAAO,MAAM,MAAM;AAC1B,UAAI,CAAC,OAAO,OAAO,IAAI,UAAU,EAAG;AACpC,UAAI,CAAC,OAAO,OAAO,IAAI,UAAU,EAAE,YAAY;AAC3C,eAAO,OAAO,IAAI,UAAU,EAAE,aAAa,EAAE,SAAS,CAAC,EAAE;AAAA,MAC7D;AACA,aAAO,OAAO,IAAI,UAAU,EAAE,WAAY,QAAQ,KAAK,IAAI,WAAW;AAAA,IAC1E;AAEA,WAAO;AAAA,EACX;AAAA,EAEA,MAAM,YAAY,OAAiC;AAC/C,UAAM,EAAE,WAAW,KAAK,IAAI;AAC5B,QAAI,KAAK,WAAW,EAAG;AAEvB,UAAM,UAAU,OAAO,KAAK,KAAK,CAAC,CAAC;AACnC,UAAM,eAAe,KAAK;AAAA,MAAI,CAAC,GAAG,aAC9B,IAAI,QAAQ,IAAI,CAACC,IAAG,aAAa,IAAI,WAAW,QAAQ,SAAS,WAAW,CAAC,EAAE,EAAE,KAAK,IAAI,CAAC;AAAA,IAC/F,EAAE,KAAK,IAAI;AAEX,UAAM,SAAS,KAAK,QAAQ,SAAO,QAAQ,IAAI,SAAO,IAAI,GAAG,CAAC,CAAC;AAC/D,UAAM,QAAQ,gBAAgB,SAAS,MAAM,QAAQ,IAAI,OAAK,IAAI,CAAC,GAAG,EAAE,KAAK,IAAI,CAAC,YAAY,YAAY;AAE1G,UAAM,KAAK,OAAO,MAAM,OAAO,MAAM;AAAA,EACzC;AAAA,EAEA,MAAM,eAAe,YAAqC;AACtD,QAAI,WAAW,WAAW,EAAG;AAC7B,UAAM,cAAc,WAAW,IAAI,OAAK,IAAI,CAAC,GAAG,EAAE,KAAK,IAAI;AAC3D,UAAM,KAAK,OAAO,MAAM,YAAY,WAAW,2BAA2B;AAAA,EAC9E;AAAA,EAES,eAAe;AAAA,IACpB,OAAO;AAAA,IACP,uBAAuB;AAAA,IACvB,WAAW;AAAA,IACX,gBAAgB;AAAA,EACpB;AAAA,EAEQ,gBAAgB,QAAgB,SAAoC;AACxE,UAAM,OAAO,OAAO,YAAY;AAChC,UAAM,MAAM,QAAQ,YAAY;AAEhC,QAAI,KAAK,SAAS,KAAK,KAAK,KAAK,SAAS,QAAQ,GAAG;AACjD,UAAI,KAAK,SAAS,KAAK,KAAK,IAAI,SAAS,MAAM,EAAG,QAAO,0CAAkB;AAC3E,aAAO,0CAAkB;AAAA,IAC7B;AACA,QAAI,KAAK,SAAS,MAAM,KAAK,KAAK,SAAS,MAAM,KAAK,SAAS,oBAAqB,QAAO,0CAAkB;AAC7G,QAAI,KAAK,SAAS,MAAM,EAAG,QAAO,0CAAkB;AACpD,QAAI,KAAK,SAAS,WAAW,KAAK,KAAK,SAAS,MAAM,EAAG,QAAO,0CAAkB;AAClF,QAAI,KAAK,SAAS,MAAM,EAAG,QAAO,0CAAkB;AACpD,QAAI,KAAK,SAAS,SAAS,KAAK,KAAK,SAAS,SAAS,EAAG,QAAO,0CAAkB;AACnF,QAAI,KAAK,SAAS,OAAO,KAAK,KAAK,SAAS,MAAM,EAAG,QAAO,0CAAkB;AAC9E,QAAI,QAAQ,OAAQ,QAAO,0CAAkB;AAE7C,WAAO,0CAAkB;AAAA,EAC7B;AACJ;AAEO,SAAS,sBAAsB,kBAA2C;AAC7E,SAAO,IAAI,gBAAgB,gBAAgB;AAC/C;","names":["pg","_"]}
|
package/dist/index.d.cts
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { SqlAdapter, SchemaGraph, SeedBatch } from 'schema-seed-core';
|
|
2
|
+
|
|
3
|
+
declare class PostgresAdapter implements SqlAdapter {
|
|
4
|
+
private client;
|
|
5
|
+
constructor(connectionString: string);
|
|
6
|
+
connect(): Promise<void>;
|
|
7
|
+
disconnect(): Promise<void>;
|
|
8
|
+
begin(): Promise<void>;
|
|
9
|
+
commit(): Promise<void>;
|
|
10
|
+
rollback(): Promise<void>;
|
|
11
|
+
introspectSchema(): Promise<SchemaGraph>;
|
|
12
|
+
insertBatch(batch: SeedBatch): Promise<void>;
|
|
13
|
+
truncateTables(tableNames: string[]): Promise<void>;
|
|
14
|
+
readonly capabilities: {
|
|
15
|
+
enums: boolean;
|
|
16
|
+
deferrableConstraints: boolean;
|
|
17
|
+
returning: boolean;
|
|
18
|
+
identityInsert: boolean;
|
|
19
|
+
};
|
|
20
|
+
private mapPostgresType;
|
|
21
|
+
}
|
|
22
|
+
declare function createPostgresAdapter(connectionString: string): PostgresAdapter;
|
|
23
|
+
|
|
24
|
+
export { PostgresAdapter, createPostgresAdapter };
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { SqlAdapter, SchemaGraph, SeedBatch } from 'schema-seed-core';
|
|
2
|
+
|
|
3
|
+
declare class PostgresAdapter implements SqlAdapter {
|
|
4
|
+
private client;
|
|
5
|
+
constructor(connectionString: string);
|
|
6
|
+
connect(): Promise<void>;
|
|
7
|
+
disconnect(): Promise<void>;
|
|
8
|
+
begin(): Promise<void>;
|
|
9
|
+
commit(): Promise<void>;
|
|
10
|
+
rollback(): Promise<void>;
|
|
11
|
+
introspectSchema(): Promise<SchemaGraph>;
|
|
12
|
+
insertBatch(batch: SeedBatch): Promise<void>;
|
|
13
|
+
truncateTables(tableNames: string[]): Promise<void>;
|
|
14
|
+
readonly capabilities: {
|
|
15
|
+
enums: boolean;
|
|
16
|
+
deferrableConstraints: boolean;
|
|
17
|
+
returning: boolean;
|
|
18
|
+
identityInsert: boolean;
|
|
19
|
+
};
|
|
20
|
+
private mapPostgresType;
|
|
21
|
+
}
|
|
22
|
+
declare function createPostgresAdapter(connectionString: string): PostgresAdapter;
|
|
23
|
+
|
|
24
|
+
export { PostgresAdapter, createPostgresAdapter };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
// src/index.ts
|
|
2
|
+
import pg from "pg";
|
|
3
|
+
import {
|
|
4
|
+
NormalizedSqlType
|
|
5
|
+
} from "schema-seed-core";
|
|
6
|
+
var { Client } = pg;
|
|
7
|
+
var PostgresAdapter = class {
|
|
8
|
+
client;
|
|
9
|
+
constructor(connectionString) {
|
|
10
|
+
this.client = new Client({ connectionString });
|
|
11
|
+
}
|
|
12
|
+
async connect() {
|
|
13
|
+
await this.client.connect();
|
|
14
|
+
}
|
|
15
|
+
async disconnect() {
|
|
16
|
+
await this.client.end();
|
|
17
|
+
}
|
|
18
|
+
async begin() {
|
|
19
|
+
await this.client.query("BEGIN");
|
|
20
|
+
}
|
|
21
|
+
async commit() {
|
|
22
|
+
await this.client.query("COMMIT");
|
|
23
|
+
}
|
|
24
|
+
async rollback() {
|
|
25
|
+
await this.client.query("ROLLBACK");
|
|
26
|
+
}
|
|
27
|
+
async introspectSchema() {
|
|
28
|
+
const schema = { tables: {} };
|
|
29
|
+
const tablesRes = await this.client.query(`
|
|
30
|
+
SELECT table_name, table_schema
|
|
31
|
+
FROM information_schema.tables
|
|
32
|
+
WHERE table_schema NOT IN ('information_schema', 'pg_catalog')
|
|
33
|
+
AND table_type = 'BASE TABLE'
|
|
34
|
+
`);
|
|
35
|
+
for (const row of tablesRes.rows) {
|
|
36
|
+
const tableName = row.table_name;
|
|
37
|
+
schema.tables[tableName] = {
|
|
38
|
+
name: tableName,
|
|
39
|
+
schema: row.table_schema,
|
|
40
|
+
columns: {},
|
|
41
|
+
foreignKeys: [],
|
|
42
|
+
uniqueConstraints: []
|
|
43
|
+
};
|
|
44
|
+
}
|
|
45
|
+
const columnsRes = await this.client.query(`
|
|
46
|
+
SELECT table_name, column_name, data_type, is_nullable, column_default, udt_name
|
|
47
|
+
FROM information_schema.columns
|
|
48
|
+
WHERE table_schema NOT IN ('information_schema', 'pg_catalog')
|
|
49
|
+
`);
|
|
50
|
+
for (const row of columnsRes.rows) {
|
|
51
|
+
if (!schema.tables[row.table_name]) continue;
|
|
52
|
+
const col = {
|
|
53
|
+
name: row.column_name,
|
|
54
|
+
type: this.mapPostgresType(row.data_type, row.udt_name),
|
|
55
|
+
rawType: row.data_type,
|
|
56
|
+
nullable: row.is_nullable === "YES",
|
|
57
|
+
defaultValue: row.column_default,
|
|
58
|
+
isAutoIncrement: row.column_default?.includes("nextval") || false
|
|
59
|
+
};
|
|
60
|
+
schema.tables[row.table_name].columns[row.column_name] = col;
|
|
61
|
+
}
|
|
62
|
+
const fkRes = await this.client.query(`
|
|
63
|
+
SELECT
|
|
64
|
+
tc.table_name, kcu.column_name,
|
|
65
|
+
ccu.table_name AS foreign_table_name,
|
|
66
|
+
ccu.column_name AS foreign_column_name
|
|
67
|
+
FROM
|
|
68
|
+
information_schema.table_constraints AS tc
|
|
69
|
+
JOIN information_schema.key_column_usage AS kcu
|
|
70
|
+
ON tc.constraint_name = kcu.constraint_name
|
|
71
|
+
JOIN information_schema.constraint_column_usage AS ccu
|
|
72
|
+
ON ccu.constraint_name = tc.constraint_name
|
|
73
|
+
WHERE tc.constraint_type = 'FOREIGN KEY'
|
|
74
|
+
`);
|
|
75
|
+
for (const row of fkRes.rows) {
|
|
76
|
+
if (!schema.tables[row.table_name]) continue;
|
|
77
|
+
schema.tables[row.table_name].foreignKeys.push({
|
|
78
|
+
columns: [row.column_name],
|
|
79
|
+
referencedTable: row.foreign_table_name,
|
|
80
|
+
referencedColumns: [row.foreign_column_name]
|
|
81
|
+
});
|
|
82
|
+
}
|
|
83
|
+
const pkRes = await this.client.query(`
|
|
84
|
+
SELECT tc.table_name, kcu.column_name
|
|
85
|
+
FROM information_schema.table_constraints tc
|
|
86
|
+
JOIN information_schema.key_column_usage kcu ON tc.constraint_name = kcu.constraint_name
|
|
87
|
+
WHERE tc.constraint_type = 'PRIMARY KEY'
|
|
88
|
+
`);
|
|
89
|
+
for (const row of pkRes.rows) {
|
|
90
|
+
if (!schema.tables[row.table_name]) continue;
|
|
91
|
+
if (!schema.tables[row.table_name].primaryKey) {
|
|
92
|
+
schema.tables[row.table_name].primaryKey = { columns: [] };
|
|
93
|
+
}
|
|
94
|
+
schema.tables[row.table_name].primaryKey.columns.push(row.column_name);
|
|
95
|
+
}
|
|
96
|
+
return schema;
|
|
97
|
+
}
|
|
98
|
+
async insertBatch(batch) {
|
|
99
|
+
const { tableName, rows } = batch;
|
|
100
|
+
if (rows.length === 0) return;
|
|
101
|
+
const columns = Object.keys(rows[0]);
|
|
102
|
+
const placeholders = rows.map(
|
|
103
|
+
(_, rowIndex) => `(${columns.map((_2, colIndex) => `$${rowIndex * columns.length + colIndex + 1}`).join(", ")})`
|
|
104
|
+
).join(", ");
|
|
105
|
+
const values = rows.flatMap((row) => columns.map((col) => row[col]));
|
|
106
|
+
const query = `INSERT INTO "${tableName}" (${columns.map((c) => `"${c}"`).join(", ")}) VALUES ${placeholders}`;
|
|
107
|
+
await this.client.query(query, values);
|
|
108
|
+
}
|
|
109
|
+
async truncateTables(tableNames) {
|
|
110
|
+
if (tableNames.length === 0) return;
|
|
111
|
+
const quotedNames = tableNames.map((t) => `"${t}"`).join(", ");
|
|
112
|
+
await this.client.query(`TRUNCATE ${quotedNames} RESTART IDENTITY CASCADE`);
|
|
113
|
+
}
|
|
114
|
+
capabilities = {
|
|
115
|
+
enums: true,
|
|
116
|
+
deferrableConstraints: true,
|
|
117
|
+
returning: true,
|
|
118
|
+
identityInsert: false
|
|
119
|
+
};
|
|
120
|
+
mapPostgresType(pgType, udtName) {
|
|
121
|
+
const type = pgType.toLowerCase();
|
|
122
|
+
const udt = udtName.toLowerCase();
|
|
123
|
+
if (type.includes("int") || type.includes("serial")) {
|
|
124
|
+
if (type.includes("big") || udt.includes("int8")) return NormalizedSqlType.BIGINT;
|
|
125
|
+
return NormalizedSqlType.INT;
|
|
126
|
+
}
|
|
127
|
+
if (type.includes("char") || type.includes("text") || type === "character varying") return NormalizedSqlType.STRING;
|
|
128
|
+
if (type.includes("bool")) return NormalizedSqlType.BOOLEAN;
|
|
129
|
+
if (type.includes("timestamp") || type.includes("date")) return NormalizedSqlType.DATETIME;
|
|
130
|
+
if (type.includes("json")) return NormalizedSqlType.JSON;
|
|
131
|
+
if (type.includes("numeric") || type.includes("decimal")) return NormalizedSqlType.DECIMAL;
|
|
132
|
+
if (type.includes("float") || type.includes("real")) return NormalizedSqlType.FLOAT;
|
|
133
|
+
if (udt === "uuid") return NormalizedSqlType.UUID;
|
|
134
|
+
return NormalizedSqlType.STRING;
|
|
135
|
+
}
|
|
136
|
+
};
|
|
137
|
+
function createPostgresAdapter(connectionString) {
|
|
138
|
+
return new PostgresAdapter(connectionString);
|
|
139
|
+
}
|
|
140
|
+
export {
|
|
141
|
+
PostgresAdapter,
|
|
142
|
+
createPostgresAdapter
|
|
143
|
+
};
|
|
144
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts"],"sourcesContent":["import pg from 'pg'\nconst { Client } = pg\nimport {\n SqlAdapter,\n SchemaGraph,\n SeedBatch,\n NormalizedSqlType,\n TableSchema,\n ColumnSchema\n} from 'schema-seed-core'\n\nexport class PostgresAdapter implements SqlAdapter {\n private client: pg.Client\n\n constructor(connectionString: string) {\n this.client = new Client({ connectionString })\n }\n\n async connect(): Promise<void> {\n await this.client.connect()\n }\n\n async disconnect(): Promise<void> {\n await this.client.end()\n }\n\n async begin(): Promise<void> {\n await this.client.query('BEGIN')\n }\n\n async commit(): Promise<void> {\n await this.client.query('COMMIT')\n }\n\n async rollback(): Promise<void> {\n await this.client.query('ROLLBACK')\n }\n\n async introspectSchema(): Promise<SchemaGraph> {\n const schema: SchemaGraph = { tables: {} }\n\n // Get tables\n const tablesRes = await this.client.query(`\n SELECT table_name, table_schema\n FROM information_schema.tables\n WHERE table_schema NOT IN ('information_schema', 'pg_catalog')\n AND table_type = 'BASE TABLE'\n `)\n\n for (const row of tablesRes.rows) {\n const tableName = row.table_name\n schema.tables[tableName] = {\n name: tableName,\n schema: row.table_schema,\n columns: {},\n foreignKeys: [],\n uniqueConstraints: []\n }\n }\n\n // Get columns\n const columnsRes = await this.client.query(`\n SELECT table_name, column_name, data_type, is_nullable, column_default, udt_name\n FROM information_schema.columns\n WHERE table_schema NOT IN ('information_schema', 'pg_catalog')\n `)\n\n for (const row of columnsRes.rows) {\n if (!schema.tables[row.table_name]) continue\n\n const col: ColumnSchema = {\n name: row.column_name,\n type: this.mapPostgresType(row.data_type, row.udt_name),\n rawType: row.data_type,\n nullable: row.is_nullable === 'YES',\n defaultValue: row.column_default,\n isAutoIncrement: row.column_default?.includes('nextval') || false\n }\n schema.tables[row.table_name].columns[row.column_name] = col\n }\n\n // Get Foreign Keys\n const fkRes = await this.client.query(`\n SELECT\n tc.table_name, kcu.column_name, \n ccu.table_name AS foreign_table_name,\n ccu.column_name AS foreign_column_name \n FROM \n information_schema.table_constraints AS tc \n JOIN information_schema.key_column_usage AS kcu\n ON tc.constraint_name = kcu.constraint_name\n JOIN information_schema.constraint_column_usage AS ccu\n ON ccu.constraint_name = tc.constraint_name\n WHERE tc.constraint_type = 'FOREIGN KEY'\n `)\n\n for (const row of fkRes.rows) {\n if (!schema.tables[row.table_name]) continue\n schema.tables[row.table_name].foreignKeys.push({\n columns: [row.column_name],\n referencedTable: row.foreign_table_name,\n referencedColumns: [row.foreign_column_name]\n })\n }\n\n // Get Primary Keys\n const pkRes = await this.client.query(`\n SELECT tc.table_name, kcu.column_name\n FROM information_schema.table_constraints tc\n JOIN information_schema.key_column_usage kcu ON tc.constraint_name = kcu.constraint_name\n WHERE tc.constraint_type = 'PRIMARY KEY'\n `)\n\n for (const row of pkRes.rows) {\n if (!schema.tables[row.table_name]) continue\n if (!schema.tables[row.table_name].primaryKey) {\n schema.tables[row.table_name].primaryKey = { columns: [] }\n }\n schema.tables[row.table_name].primaryKey!.columns.push(row.column_name)\n }\n\n return schema\n }\n\n async insertBatch(batch: SeedBatch): Promise<void> {\n const { tableName, rows } = batch\n if (rows.length === 0) return\n\n const columns = Object.keys(rows[0])\n const placeholders = rows.map((_, rowIndex) =>\n `(${columns.map((_, colIndex) => `$${rowIndex * columns.length + colIndex + 1}`).join(', ')})`\n ).join(', ')\n\n const values = rows.flatMap(row => columns.map(col => row[col]))\n const query = `INSERT INTO \"${tableName}\" (${columns.map(c => `\"${c}\"`).join(', ')}) VALUES ${placeholders}`\n\n await this.client.query(query, values)\n }\n\n async truncateTables(tableNames: string[]): Promise<void> {\n if (tableNames.length === 0) return\n const quotedNames = tableNames.map(t => `\"${t}\"`).join(', ')\n await this.client.query(`TRUNCATE ${quotedNames} RESTART IDENTITY CASCADE`)\n }\n\n readonly capabilities = {\n enums: true,\n deferrableConstraints: true,\n returning: true,\n identityInsert: false\n }\n\n private mapPostgresType(pgType: string, udtName: string): NormalizedSqlType {\n const type = pgType.toLowerCase()\n const udt = udtName.toLowerCase()\n\n if (type.includes('int') || type.includes('serial')) {\n if (type.includes('big') || udt.includes('int8')) return NormalizedSqlType.BIGINT\n return NormalizedSqlType.INT\n }\n if (type.includes('char') || type.includes('text') || type === 'character varying') return NormalizedSqlType.STRING\n if (type.includes('bool')) return NormalizedSqlType.BOOLEAN\n if (type.includes('timestamp') || type.includes('date')) return NormalizedSqlType.DATETIME\n if (type.includes('json')) return NormalizedSqlType.JSON\n if (type.includes('numeric') || type.includes('decimal')) return NormalizedSqlType.DECIMAL\n if (type.includes('float') || type.includes('real')) return NormalizedSqlType.FLOAT\n if (udt === 'uuid') return NormalizedSqlType.UUID\n\n return NormalizedSqlType.STRING\n }\n}\n\nexport function createPostgresAdapter(connectionString: string): PostgresAdapter {\n return new PostgresAdapter(connectionString)\n}\n"],"mappings":";AAAA,OAAO,QAAQ;AAEf;AAAA,EAII;AAAA,OAGG;AARP,IAAM,EAAE,OAAO,IAAI;AAUZ,IAAM,kBAAN,MAA4C;AAAA,EACvC;AAAA,EAER,YAAY,kBAA0B;AAClC,SAAK,SAAS,IAAI,OAAO,EAAE,iBAAiB,CAAC;AAAA,EACjD;AAAA,EAEA,MAAM,UAAyB;AAC3B,UAAM,KAAK,OAAO,QAAQ;AAAA,EAC9B;AAAA,EAEA,MAAM,aAA4B;AAC9B,UAAM,KAAK,OAAO,IAAI;AAAA,EAC1B;AAAA,EAEA,MAAM,QAAuB;AACzB,UAAM,KAAK,OAAO,MAAM,OAAO;AAAA,EACnC;AAAA,EAEA,MAAM,SAAwB;AAC1B,UAAM,KAAK,OAAO,MAAM,QAAQ;AAAA,EACpC;AAAA,EAEA,MAAM,WAA0B;AAC5B,UAAM,KAAK,OAAO,MAAM,UAAU;AAAA,EACtC;AAAA,EAEA,MAAM,mBAAyC;AAC3C,UAAM,SAAsB,EAAE,QAAQ,CAAC,EAAE;AAGzC,UAAM,YAAY,MAAM,KAAK,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA,KAK7C;AAEG,eAAW,OAAO,UAAU,MAAM;AAC9B,YAAM,YAAY,IAAI;AACtB,aAAO,OAAO,SAAS,IAAI;AAAA,QACvB,MAAM;AAAA,QACN,QAAQ,IAAI;AAAA,QACZ,SAAS,CAAC;AAAA,QACV,aAAa,CAAC;AAAA,QACd,mBAAmB,CAAC;AAAA,MACxB;AAAA,IACJ;AAGA,UAAM,aAAa,MAAM,KAAK,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA,KAI9C;AAEG,eAAW,OAAO,WAAW,MAAM;AAC/B,UAAI,CAAC,OAAO,OAAO,IAAI,UAAU,EAAG;AAEpC,YAAM,MAAoB;AAAA,QACtB,MAAM,IAAI;AAAA,QACV,MAAM,KAAK,gBAAgB,IAAI,WAAW,IAAI,QAAQ;AAAA,QACtD,SAAS,IAAI;AAAA,QACb,UAAU,IAAI,gBAAgB;AAAA,QAC9B,cAAc,IAAI;AAAA,QAClB,iBAAiB,IAAI,gBAAgB,SAAS,SAAS,KAAK;AAAA,MAChE;AACA,aAAO,OAAO,IAAI,UAAU,EAAE,QAAQ,IAAI,WAAW,IAAI;AAAA,IAC7D;AAGA,UAAM,QAAQ,MAAM,KAAK,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAYzC;AAEG,eAAW,OAAO,MAAM,MAAM;AAC1B,UAAI,CAAC,OAAO,OAAO,IAAI,UAAU,EAAG;AACpC,aAAO,OAAO,IAAI,UAAU,EAAE,YAAY,KAAK;AAAA,QAC3C,SAAS,CAAC,IAAI,WAAW;AAAA,QACzB,iBAAiB,IAAI;AAAA,QACrB,mBAAmB,CAAC,IAAI,mBAAmB;AAAA,MAC/C,CAAC;AAAA,IACL;AAGA,UAAM,QAAQ,MAAM,KAAK,OAAO,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA,KAKzC;AAEG,eAAW,OAAO,MAAM,MAAM;AAC1B,UAAI,CAAC,OAAO,OAAO,IAAI,UAAU,EAAG;AACpC,UAAI,CAAC,OAAO,OAAO,IAAI,UAAU,EAAE,YAAY;AAC3C,eAAO,OAAO,IAAI,UAAU,EAAE,aAAa,EAAE,SAAS,CAAC,EAAE;AAAA,MAC7D;AACA,aAAO,OAAO,IAAI,UAAU,EAAE,WAAY,QAAQ,KAAK,IAAI,WAAW;AAAA,IAC1E;AAEA,WAAO;AAAA,EACX;AAAA,EAEA,MAAM,YAAY,OAAiC;AAC/C,UAAM,EAAE,WAAW,KAAK,IAAI;AAC5B,QAAI,KAAK,WAAW,EAAG;AAEvB,UAAM,UAAU,OAAO,KAAK,KAAK,CAAC,CAAC;AACnC,UAAM,eAAe,KAAK;AAAA,MAAI,CAAC,GAAG,aAC9B,IAAI,QAAQ,IAAI,CAACA,IAAG,aAAa,IAAI,WAAW,QAAQ,SAAS,WAAW,CAAC,EAAE,EAAE,KAAK,IAAI,CAAC;AAAA,IAC/F,EAAE,KAAK,IAAI;AAEX,UAAM,SAAS,KAAK,QAAQ,SAAO,QAAQ,IAAI,SAAO,IAAI,GAAG,CAAC,CAAC;AAC/D,UAAM,QAAQ,gBAAgB,SAAS,MAAM,QAAQ,IAAI,OAAK,IAAI,CAAC,GAAG,EAAE,KAAK,IAAI,CAAC,YAAY,YAAY;AAE1G,UAAM,KAAK,OAAO,MAAM,OAAO,MAAM;AAAA,EACzC;AAAA,EAEA,MAAM,eAAe,YAAqC;AACtD,QAAI,WAAW,WAAW,EAAG;AAC7B,UAAM,cAAc,WAAW,IAAI,OAAK,IAAI,CAAC,GAAG,EAAE,KAAK,IAAI;AAC3D,UAAM,KAAK,OAAO,MAAM,YAAY,WAAW,2BAA2B;AAAA,EAC9E;AAAA,EAES,eAAe;AAAA,IACpB,OAAO;AAAA,IACP,uBAAuB;AAAA,IACvB,WAAW;AAAA,IACX,gBAAgB;AAAA,EACpB;AAAA,EAEQ,gBAAgB,QAAgB,SAAoC;AACxE,UAAM,OAAO,OAAO,YAAY;AAChC,UAAM,MAAM,QAAQ,YAAY;AAEhC,QAAI,KAAK,SAAS,KAAK,KAAK,KAAK,SAAS,QAAQ,GAAG;AACjD,UAAI,KAAK,SAAS,KAAK,KAAK,IAAI,SAAS,MAAM,EAAG,QAAO,kBAAkB;AAC3E,aAAO,kBAAkB;AAAA,IAC7B;AACA,QAAI,KAAK,SAAS,MAAM,KAAK,KAAK,SAAS,MAAM,KAAK,SAAS,oBAAqB,QAAO,kBAAkB;AAC7G,QAAI,KAAK,SAAS,MAAM,EAAG,QAAO,kBAAkB;AACpD,QAAI,KAAK,SAAS,WAAW,KAAK,KAAK,SAAS,MAAM,EAAG,QAAO,kBAAkB;AAClF,QAAI,KAAK,SAAS,MAAM,EAAG,QAAO,kBAAkB;AACpD,QAAI,KAAK,SAAS,SAAS,KAAK,KAAK,SAAS,SAAS,EAAG,QAAO,kBAAkB;AACnF,QAAI,KAAK,SAAS,OAAO,KAAK,KAAK,SAAS,MAAM,EAAG,QAAO,kBAAkB;AAC9E,QAAI,QAAQ,OAAQ,QAAO,kBAAkB;AAE7C,WAAO,kBAAkB;AAAA,EAC7B;AACJ;AAEO,SAAS,sBAAsB,kBAA2C;AAC7E,SAAO,IAAI,gBAAgB,gBAAgB;AAC/C;","names":["_"]}
|
package/package.json
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "schema-seed-adapter-postgres",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"description": "PostgreSQL adapter for schema-seed",
|
|
6
|
+
"author": "Ali Nazar",
|
|
7
|
+
"license": "MIT",
|
|
8
|
+
"private": false,
|
|
9
|
+
"sideEffects": false,
|
|
10
|
+
"repository": {
|
|
11
|
+
"type": "git",
|
|
12
|
+
"url": "https://github.com/alinazar-111/schema-seed.git"
|
|
13
|
+
},
|
|
14
|
+
"homepage": "https://github.com/alinazar-111/schema-seed#readme",
|
|
15
|
+
"bugs": {
|
|
16
|
+
"url": "https://github.com/alinazar-111/schema-seed/issues"
|
|
17
|
+
},
|
|
18
|
+
"keywords": [
|
|
19
|
+
"database",
|
|
20
|
+
"seeding",
|
|
21
|
+
"test-data",
|
|
22
|
+
"adapter",
|
|
23
|
+
"schema-seed"
|
|
24
|
+
],
|
|
25
|
+
"main": "./dist/index.cjs",
|
|
26
|
+
"module": "./dist/index.js",
|
|
27
|
+
"types": "./dist/index.d.ts",
|
|
28
|
+
"exports": {
|
|
29
|
+
".": {
|
|
30
|
+
"types": "./dist/index.d.ts",
|
|
31
|
+
"import": "./dist/index.js",
|
|
32
|
+
"require": "./dist/index.cjs"
|
|
33
|
+
}
|
|
34
|
+
},
|
|
35
|
+
"files": [
|
|
36
|
+
"dist",
|
|
37
|
+
"README.md",
|
|
38
|
+
"LICENSE"
|
|
39
|
+
],
|
|
40
|
+
"dependencies": {
|
|
41
|
+
"pg": "^8.13.1",
|
|
42
|
+
"schema-seed-core": "0.1.0"
|
|
43
|
+
},
|
|
44
|
+
"devDependencies": {
|
|
45
|
+
"@types/pg": "^8.11.10",
|
|
46
|
+
"tsup": "^8.3.5",
|
|
47
|
+
"typescript": "^5.7.2",
|
|
48
|
+
"vitest": "^2.1.8"
|
|
49
|
+
},
|
|
50
|
+
"scripts": {
|
|
51
|
+
"build": "tsup",
|
|
52
|
+
"clean": "rm -rf dist",
|
|
53
|
+
"dev": "tsup --watch",
|
|
54
|
+
"lint": "eslint .",
|
|
55
|
+
"test": "vitest run --passWithNoTests",
|
|
56
|
+
"typecheck": "tsc --noEmit"
|
|
57
|
+
}
|
|
58
|
+
}
|