@toichubek/pg-ddl-extractor 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +417 -0
- package/dist/cli/diff.d.ts +2 -0
- package/dist/cli/diff.js +4 -0
- package/dist/cli/extract.d.ts +2 -0
- package/dist/cli/extract.js +4 -0
- package/dist/cli/migrate.d.ts +2 -0
- package/dist/cli/migrate.js +4 -0
- package/dist/compare.d.ts +22 -0
- package/dist/compare.js +802 -0
- package/dist/config.d.ts +9 -0
- package/dist/config.js +66 -0
- package/dist/diff.d.ts +1 -0
- package/dist/diff.js +100 -0
- package/dist/extract.d.ts +1 -0
- package/dist/extract.js +190 -0
- package/dist/extractor.d.ts +23 -0
- package/dist/extractor.js +438 -0
- package/dist/index.d.ts +7 -0
- package/dist/index.js +23 -0
- package/dist/migrate.d.ts +1 -0
- package/dist/migrate.js +90 -0
- package/dist/migration-generator.d.ts +21 -0
- package/dist/migration-generator.js +287 -0
- package/dist/tunnel.d.ts +26 -0
- package/dist/tunnel.js +148 -0
- package/dist/writer.d.ts +22 -0
- package/dist/writer.js +156 -0
- package/package.json +65 -0
package/dist/config.d.ts
ADDED
package/dist/config.js
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.getDbConfig = getDbConfig;
|
|
37
|
+
const dotenv = __importStar(require("dotenv"));
|
|
38
|
+
// ─── Load .env ────────────────────────────────────────────────────
|
|
39
|
+
dotenv.config();
|
|
40
|
+
function getDbConfig(env) {
|
|
41
|
+
const prefix = env.toUpperCase(); // DEV or PROD
|
|
42
|
+
const host = process.env[`${prefix}_DB_HOST`];
|
|
43
|
+
const port = process.env[`${prefix}_DB_PORT`];
|
|
44
|
+
const database = process.env[`${prefix}_DB_NAME`];
|
|
45
|
+
const user = process.env[`${prefix}_DB_USER`];
|
|
46
|
+
const password = process.env[`${prefix}_DB_PASSWORD`];
|
|
47
|
+
if (!host || !database || !user) {
|
|
48
|
+
throw new Error(`Missing DB config for env "${env}". ` +
|
|
49
|
+
`Expected ${prefix}_DB_HOST, ${prefix}_DB_NAME, ${prefix}_DB_USER in .env`);
|
|
50
|
+
}
|
|
51
|
+
// Validate port number if provided
|
|
52
|
+
const portNumber = port ? parseInt(port, 10) : 5432;
|
|
53
|
+
if (port && (isNaN(portNumber) || portNumber < 1 || portNumber > 65535)) {
|
|
54
|
+
throw new Error(`Invalid port number in ${prefix}_DB_PORT: "${port}". Port must be between 1 and 65535`);
|
|
55
|
+
}
|
|
56
|
+
return {
|
|
57
|
+
host,
|
|
58
|
+
port: portNumber,
|
|
59
|
+
database,
|
|
60
|
+
user,
|
|
61
|
+
password: password || "",
|
|
62
|
+
// safe defaults
|
|
63
|
+
connectionTimeoutMillis: 10000,
|
|
64
|
+
query_timeout: 30000,
|
|
65
|
+
};
|
|
66
|
+
}
|
package/dist/diff.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
package/dist/diff.js
ADDED
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
const fs = __importStar(require("fs"));
|
|
37
|
+
const path = __importStar(require("path"));
|
|
38
|
+
const commander_1 = require("commander");
|
|
39
|
+
const compare_1 = require("./compare");
|
|
40
|
+
function parseArgs() {
|
|
41
|
+
commander_1.program
|
|
42
|
+
.name("pg-ddl-diff")
|
|
43
|
+
.description("Compare dev and prod PostgreSQL schemas")
|
|
44
|
+
.version("1.0.0")
|
|
45
|
+
.option("--report", "Generate markdown and HTML reports")
|
|
46
|
+
.option("--sql-dir <path>", "Path to SQL directory (default: ../../sql)")
|
|
47
|
+
.option("--dev <path>", "Path to dev schema directory")
|
|
48
|
+
.option("--prod <path>", "Path to prod schema directory")
|
|
49
|
+
.parse(process.argv);
|
|
50
|
+
return commander_1.program.opts();
|
|
51
|
+
}
|
|
52
|
+
// ─── Main ─────────────────────────────────────────────────────────
|
|
53
|
+
function main() {
|
|
54
|
+
const options = parseArgs();
|
|
55
|
+
// Determine SQL root directory
|
|
56
|
+
const sqlRoot = options.sqlDir
|
|
57
|
+
? path.resolve(options.sqlDir)
|
|
58
|
+
: process.env.SQL_OUTPUT_DIR
|
|
59
|
+
? path.resolve(process.env.SQL_OUTPUT_DIR)
|
|
60
|
+
: path.resolve(__dirname, "..", "..", "sql");
|
|
61
|
+
if (!fs.existsSync(sqlRoot)) {
|
|
62
|
+
console.error(`❌ sql/ folder not found at: ${sqlRoot}`);
|
|
63
|
+
console.error(" Run extract:dev and extract:prod first.");
|
|
64
|
+
process.exit(1);
|
|
65
|
+
}
|
|
66
|
+
// Determine dev and prod directories
|
|
67
|
+
const devDir = options.dev ? path.resolve(options.dev) : path.join(sqlRoot, "dev");
|
|
68
|
+
const prodDir = options.prod ? path.resolve(options.prod) : path.join(sqlRoot, "prod");
|
|
69
|
+
if (!fs.existsSync(devDir)) {
|
|
70
|
+
console.error("❌ sql/dev/ not found. Run: npm run extract:dev");
|
|
71
|
+
process.exit(1);
|
|
72
|
+
}
|
|
73
|
+
if (!fs.existsSync(prodDir)) {
|
|
74
|
+
console.error("❌ sql/prod/ not found. Run: npm run extract:prod");
|
|
75
|
+
process.exit(1);
|
|
76
|
+
}
|
|
77
|
+
try {
|
|
78
|
+
const summary = (0, compare_1.compareDdl)(sqlRoot);
|
|
79
|
+
// Always print to console
|
|
80
|
+
console.log((0, compare_1.formatConsoleReport)(summary));
|
|
81
|
+
// Optionally save reports (markdown + HTML)
|
|
82
|
+
if (options.report) {
|
|
83
|
+
const reportDir = path.join(sqlRoot, "reports");
|
|
84
|
+
fs.mkdirSync(reportDir, { recursive: true });
|
|
85
|
+
const timestamp = new Date().toISOString().slice(0, 10);
|
|
86
|
+
const mdPath = path.join(reportDir, `diff_${timestamp}.md`);
|
|
87
|
+
fs.writeFileSync(mdPath, (0, compare_1.formatMarkdownReport)(summary), "utf-8");
|
|
88
|
+
const htmlPath = path.join(reportDir, `diff_${timestamp}.html`);
|
|
89
|
+
fs.writeFileSync(htmlPath, (0, compare_1.formatHtmlReport)(summary), "utf-8");
|
|
90
|
+
console.log(`\n📄 Markdown: ${mdPath}`);
|
|
91
|
+
console.log(`🌐 HTML: ${htmlPath}`);
|
|
92
|
+
console.log(`\n Open in browser: open ${htmlPath}`);
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
catch (err) {
|
|
96
|
+
console.error(`❌ ${err.message}`);
|
|
97
|
+
process.exit(1);
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
main();
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
package/dist/extract.js
ADDED
|
@@ -0,0 +1,190 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
const path = __importStar(require("path"));
|
|
37
|
+
const dotenv = __importStar(require("dotenv"));
|
|
38
|
+
const pg_1 = require("pg");
|
|
39
|
+
const commander_1 = require("commander");
|
|
40
|
+
const config_1 = require("./config");
|
|
41
|
+
const writer_1 = require("./writer");
|
|
42
|
+
const extractor_1 = require("./extractor");
|
|
43
|
+
const tunnel_1 = require("./tunnel");
|
|
44
|
+
// ─── Load .env ────────────────────────────────────────────────────
|
|
45
|
+
dotenv.config();
|
|
46
|
+
function parseArgs() {
|
|
47
|
+
commander_1.program
|
|
48
|
+
.name("pg-ddl-extract")
|
|
49
|
+
.description("Extract PostgreSQL DDL into organized folder structure")
|
|
50
|
+
.version("1.0.0")
|
|
51
|
+
.option("--env <environment>", "Environment (dev or prod)", "dev")
|
|
52
|
+
.option("--host <host>", "Database host")
|
|
53
|
+
.option("--port <port>", "Database port")
|
|
54
|
+
.option("--database <database>", "Database name")
|
|
55
|
+
.option("--user <user>", "Database user")
|
|
56
|
+
.option("--password <password>", "Database password")
|
|
57
|
+
.option("--output <path>", "Output directory path")
|
|
58
|
+
.parse(process.argv);
|
|
59
|
+
const options = commander_1.program.opts();
|
|
60
|
+
// Validate env if provided
|
|
61
|
+
if (options.env && !["dev", "prod"].includes(options.env)) {
|
|
62
|
+
console.error(`❌ Invalid env: "${options.env}". Use --env dev or --env prod`);
|
|
63
|
+
process.exit(1);
|
|
64
|
+
}
|
|
65
|
+
return options;
|
|
66
|
+
}
|
|
67
|
+
// ─── Main ─────────────────────────────────────────────────────────
|
|
68
|
+
async function main() {
|
|
69
|
+
const options = parseArgs();
|
|
70
|
+
const env = options.env || "dev";
|
|
71
|
+
// Determine output directory
|
|
72
|
+
const outputDir = options.output
|
|
73
|
+
? path.resolve(options.output)
|
|
74
|
+
: process.env.SQL_OUTPUT_DIR
|
|
75
|
+
? path.resolve(process.env.SQL_OUTPUT_DIR, env)
|
|
76
|
+
: path.resolve(__dirname, "..", "..", "sql", env);
|
|
77
|
+
console.log("═══════════════════════════════════════════════════");
|
|
78
|
+
console.log(` PostgreSQL DDL Extractor`);
|
|
79
|
+
console.log(` Environment: ${env.toUpperCase()}`);
|
|
80
|
+
console.log(` Output: ${outputDir}`);
|
|
81
|
+
console.log("═══════════════════════════════════════════════════");
|
|
82
|
+
// Check if SSH tunnel is needed
|
|
83
|
+
const sshConfig = (0, tunnel_1.getSshConfig)(env);
|
|
84
|
+
let tunnel = null;
|
|
85
|
+
// Get DB config - use CLI options if provided, otherwise use env-based config
|
|
86
|
+
let pgConfig = options.host || options.database || options.user
|
|
87
|
+
? {
|
|
88
|
+
host: options.host || "localhost",
|
|
89
|
+
port: options.port ? parseInt(options.port, 10) : 5432,
|
|
90
|
+
database: options.database,
|
|
91
|
+
user: options.user,
|
|
92
|
+
password: options.password || "",
|
|
93
|
+
connectionTimeoutMillis: 10000,
|
|
94
|
+
query_timeout: 30000,
|
|
95
|
+
}
|
|
96
|
+
: (0, config_1.getDbConfig)(env);
|
|
97
|
+
// Validate required fields if using CLI options
|
|
98
|
+
if (options.host || options.database || options.user) {
|
|
99
|
+
if (!options.database || !options.user) {
|
|
100
|
+
console.error("❌ When using CLI flags, --database and --user are required");
|
|
101
|
+
process.exit(1);
|
|
102
|
+
}
|
|
103
|
+
// Validate port number
|
|
104
|
+
if (options.port) {
|
|
105
|
+
const port = parseInt(options.port, 10);
|
|
106
|
+
if (isNaN(port) || port < 1 || port > 65535) {
|
|
107
|
+
console.error(`❌ Invalid port number: "${options.port}". Port must be between 1 and 65535`);
|
|
108
|
+
process.exit(1);
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
if (sshConfig) {
|
|
113
|
+
console.log(`\n🔒 SSH tunnel: ${sshConfig.sshUser}@${sshConfig.sshHost}:${sshConfig.sshPort}`);
|
|
114
|
+
console.log(` Remote DB: ${sshConfig.remoteHost}:${sshConfig.remotePort}`);
|
|
115
|
+
try {
|
|
116
|
+
tunnel = await (0, tunnel_1.createSshTunnel)(sshConfig);
|
|
117
|
+
console.log(` Local port: 127.0.0.1:${tunnel.localPort}`);
|
|
118
|
+
// Override pg config to connect through tunnel
|
|
119
|
+
pgConfig = {
|
|
120
|
+
...pgConfig,
|
|
121
|
+
host: "127.0.0.1",
|
|
122
|
+
port: tunnel.localPort,
|
|
123
|
+
};
|
|
124
|
+
}
|
|
125
|
+
catch (err) {
|
|
126
|
+
console.error(`\n❌ SSH tunnel failed: ${err.message}`);
|
|
127
|
+
if (err.message.includes("Authentication")) {
|
|
128
|
+
console.error(" → Check SSH_USER, SSH_PASSWORD or SSH_KEY_PATH in .env");
|
|
129
|
+
}
|
|
130
|
+
if (err.message.includes("ECONNREFUSED")) {
|
|
131
|
+
console.error(" → SSH server not reachable");
|
|
132
|
+
}
|
|
133
|
+
process.exit(1);
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
console.log(`\n🔌 Connecting to ${pgConfig.host}:${pgConfig.port}/${pgConfig.database}...`);
|
|
137
|
+
const client = new pg_1.Client(pgConfig);
|
|
138
|
+
try {
|
|
139
|
+
await client.connect();
|
|
140
|
+
console.log("✅ Connected\n");
|
|
141
|
+
// Get db version for info
|
|
142
|
+
const { rows } = await client.query("SELECT version();");
|
|
143
|
+
console.log(` DB: ${rows[0].version.split(",")[0]}\n`);
|
|
144
|
+
// Extract
|
|
145
|
+
const writer = new writer_1.SqlFileWriter(outputDir);
|
|
146
|
+
const extractor = new extractor_1.DdlExtractor(client, writer);
|
|
147
|
+
await extractor.extractAll();
|
|
148
|
+
// Summary
|
|
149
|
+
const summary = writer.getSummary();
|
|
150
|
+
const total = Object.values(summary).reduce((a, b) => a + b, 0);
|
|
151
|
+
const stats = writer.getChangeStats();
|
|
152
|
+
console.log("\n═══════════════════════════════════════════════════");
|
|
153
|
+
console.log(` ✅ Done! Extracted ${total} objects into sql/${env}/`);
|
|
154
|
+
console.log("═══════════════════════════════════════════════════");
|
|
155
|
+
console.log(`\n 📁 ${outputDir}`);
|
|
156
|
+
console.log(` 📄 Full dump: sql/${env}/_full_dump.sql`);
|
|
157
|
+
console.log("\n Change Summary:");
|
|
158
|
+
console.log(` 🆕 Created: ${stats.created}`);
|
|
159
|
+
console.log(` 🔄 Updated: ${stats.updated}`);
|
|
160
|
+
console.log(` ✅ Unchanged: ${stats.unchanged}`);
|
|
161
|
+
if (stats.created === 0 && stats.updated === 0) {
|
|
162
|
+
console.log(`\n 🎉 No changes - database structure is unchanged!\n`);
|
|
163
|
+
}
|
|
164
|
+
else {
|
|
165
|
+
console.log(`\n Ready to commit to Git! 🎉\n`);
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
catch (err) {
|
|
169
|
+
console.error(`\n❌ Error: ${err.message}`);
|
|
170
|
+
if (err.code === "ECONNREFUSED") {
|
|
171
|
+
console.error(" → Check that the database server is running");
|
|
172
|
+
}
|
|
173
|
+
if (err.code === "28P01") {
|
|
174
|
+
console.error(" → Invalid username or password");
|
|
175
|
+
}
|
|
176
|
+
if (err.code === "3D000") {
|
|
177
|
+
console.error(" → Database does not exist");
|
|
178
|
+
}
|
|
179
|
+
process.exit(1);
|
|
180
|
+
}
|
|
181
|
+
finally {
|
|
182
|
+
await client.end();
|
|
183
|
+
// Close SSH tunnel if it was opened
|
|
184
|
+
if (tunnel) {
|
|
185
|
+
await tunnel.close();
|
|
186
|
+
console.log("🔒 SSH tunnel closed");
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
main();
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import { Client } from "pg";
|
|
2
|
+
import { SqlFileWriter } from "./writer";
|
|
3
|
+
export declare class DdlExtractor {
|
|
4
|
+
private client;
|
|
5
|
+
private writer;
|
|
6
|
+
private allDdl;
|
|
7
|
+
constructor(client: Client, writer: SqlFileWriter);
|
|
8
|
+
/** Run full extraction */
|
|
9
|
+
extractAll(): Promise<void>;
|
|
10
|
+
private extractSchemas;
|
|
11
|
+
private extractTypes;
|
|
12
|
+
private extractSequences;
|
|
13
|
+
private extractTables;
|
|
14
|
+
private buildTableDdl;
|
|
15
|
+
private buildColumnType;
|
|
16
|
+
private extractViews;
|
|
17
|
+
private extractMaterializedViews;
|
|
18
|
+
private extractFunctions;
|
|
19
|
+
private extractTriggers;
|
|
20
|
+
private extractIndexes;
|
|
21
|
+
private save;
|
|
22
|
+
private log;
|
|
23
|
+
}
|