@dbml/cli 5.5.1 → 5.6.0-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (95) hide show
  1. package/__tests__/db2dbml/mssql/dbml-error.log +22 -0
  2. package/__tests__/db2dbml/mysql/dbml-error.log +6 -0
  3. package/__tests__/db2dbml/oracle/dbml-error.log +28 -0
  4. package/__tests__/db2dbml/postgres/dbml-error.log +14 -0
  5. package/__tests__/dbml2sql/filename --mysql --out-file/dbml-error.log +0 -0
  6. package/__tests__/dbml2sql/filename --mysql --out-file/out-files/schema.sql +65 -0
  7. package/__tests__/dbml2sql/filename --mysql stdout/dbml-error.log +0 -0
  8. package/__tests__/dbml2sql/filename --oracle --out-file/dbml-error.log +0 -0
  9. package/__tests__/dbml2sql/filename --oracle --out-file/out-files/schema.sql +61 -0
  10. package/__tests__/dbml2sql/filename --oracle stdout/dbml-error.log +0 -0
  11. package/__tests__/dbml2sql/filename --out-file/dbml-error.log +0 -0
  12. package/__tests__/dbml2sql/filename --out-file/out-files/schema.sql +77 -0
  13. package/__tests__/dbml2sql/filename --postgres --out-file/dbml-error.log +0 -0
  14. package/__tests__/dbml2sql/filename --postgres --out-file/out-files/schema.sql +77 -0
  15. package/__tests__/dbml2sql/filename --postgres stdout/dbml-error.log +0 -0
  16. package/__tests__/dbml2sql/filename stdout/dbml-error.log +0 -0
  17. package/__tests__/dbml2sql/filenames --mysql --out-file/dbml-error.log +0 -0
  18. package/__tests__/dbml2sql/filenames --mysql --out-file/out-files/schema.sql +172 -0
  19. package/__tests__/dbml2sql/filenames --mysql stdout/dbml-error.log +0 -0
  20. package/__tests__/dbml2sql/filenames --oracle --out-file/dbml-error.log +0 -0
  21. package/__tests__/dbml2sql/filenames --oracle --out-file/out-files/schema.sql +172 -0
  22. package/__tests__/dbml2sql/filenames --oracle stdout/dbml-error.log +0 -0
  23. package/__tests__/dbml2sql/filenames --out-file/dbml-error.log +0 -0
  24. package/__tests__/dbml2sql/filenames --out-file/out-files/schema.sql +172 -0
  25. package/__tests__/dbml2sql/filenames --postgres --out-file/dbml-error.log +0 -0
  26. package/__tests__/dbml2sql/filenames --postgres --out-file/out-files/schema.sql +172 -0
  27. package/__tests__/dbml2sql/filenames --postgres stdout/dbml-error.log +0 -0
  28. package/__tests__/dbml2sql/filenames stdout/dbml-error.log +0 -0
  29. package/__tests__/dbml2sql/multiple_schema_mssql/dbml-error.log +0 -0
  30. package/__tests__/dbml2sql/multiple_schema_mssql/out-files/multiple_schema.out.sql +62 -0
  31. package/__tests__/dbml2sql/multiple_schema_mysql/dbml-error.log +0 -0
  32. package/__tests__/dbml2sql/multiple_schema_mysql/out-files/multiple_schema.out.sql +50 -0
  33. package/__tests__/dbml2sql/multiple_schema_oracle/dbml-error.log +0 -0
  34. package/__tests__/dbml2sql/multiple_schema_oracle/out-files/multiple_schema.out.sql +88 -0
  35. package/__tests__/dbml2sql/multiple_schema_pg/dbml-error.log +0 -0
  36. package/__tests__/dbml2sql/multiple_schema_pg/out-files/multiple_schema.out.sql +67 -0
  37. package/__tests__/dbml2sql/syntax-error/dbml-error.log +6 -0
  38. package/__tests__/sql2dbml/custom-error-alter-table-column-not-found --oracle/dbml-error.log +6 -0
  39. package/__tests__/sql2dbml/custom-error-alter-table-table-not-found --oracle/dbml-error.log +6 -0
  40. package/__tests__/sql2dbml/custom-error-column-comment-column-not-found --oracle/dbml-error.log +6 -0
  41. package/__tests__/sql2dbml/custom-error-column-comment-table-not-found --oracle/dbml-error.log +6 -0
  42. package/__tests__/sql2dbml/custom-error-create-index-table-not-found --oracle/dbml-error.log +6 -0
  43. package/__tests__/sql2dbml/custom-error-table-comment-table-not-found --oracle/dbml-error.log +6 -0
  44. package/__tests__/sql2dbml/filename --mssql --out-file/dbml-error.log +0 -0
  45. package/__tests__/sql2dbml/filename --mssql --out-file/out-files/schema.dbml +25 -0
  46. package/__tests__/sql2dbml/filename --mysql --out-file/dbml-error.log +0 -0
  47. package/__tests__/sql2dbml/filename --mysql --out-file/out-files/schema.dbml +74 -0
  48. package/__tests__/sql2dbml/filename --mysql stdout/dbml-error.log +0 -0
  49. package/__tests__/sql2dbml/filename --oracle --out-file/dbml-error.log +0 -0
  50. package/__tests__/sql2dbml/filename --oracle --out-file/out-files/schema.dbml +83 -0
  51. package/__tests__/sql2dbml/filename --out-file/dbml-error.log +0 -0
  52. package/__tests__/sql2dbml/filename --out-file/out-files/schema.dbml +74 -0
  53. package/__tests__/sql2dbml/filename --postgres --out-file/dbml-error.log +0 -0
  54. package/__tests__/sql2dbml/filename --postgres --out-file/out-files/schema.dbml +74 -0
  55. package/__tests__/sql2dbml/filename --postgres stdout/dbml-error.log +0 -0
  56. package/__tests__/sql2dbml/filename --snowflake stdout/dbml-error.log +0 -0
  57. package/__tests__/sql2dbml/filename stdout/dbml-error.log +0 -0
  58. package/__tests__/sql2dbml/filenames --mysql --out-file/dbml-error.log +0 -0
  59. package/__tests__/sql2dbml/filenames --mysql --out-file/out-files/schema.dbml +170 -0
  60. package/__tests__/sql2dbml/filenames --mysql stdout/dbml-error.log +0 -0
  61. package/__tests__/sql2dbml/filenames --out-file/dbml-error.log +0 -0
  62. package/__tests__/sql2dbml/filenames --out-file/out-files/schema.dbml +170 -0
  63. package/__tests__/sql2dbml/filenames --postgres --out-file/dbml-error.log +0 -0
  64. package/__tests__/sql2dbml/filenames --postgres --out-file/out-files/schema.dbml +170 -0
  65. package/__tests__/sql2dbml/filenames --postgres stdout/dbml-error.log +0 -0
  66. package/__tests__/sql2dbml/filenames stdout/dbml-error.log +0 -0
  67. package/__tests__/sql2dbml/multiple_schema_mssql/dbml-error.log +0 -0
  68. package/__tests__/sql2dbml/multiple_schema_mssql/out-files/multiple_schema.out.dbml +58 -0
  69. package/__tests__/sql2dbml/multiple_schema_mysql/dbml-error.log +0 -0
  70. package/__tests__/sql2dbml/multiple_schema_mysql/out-files/multiple_schema.out.dbml +136 -0
  71. package/__tests__/sql2dbml/multiple_schema_pg/dbml-error.log +0 -0
  72. package/__tests__/sql2dbml/multiple_schema_pg/out-files/multiple_schema.out.dbml +101 -0
  73. package/__tests__/sql2dbml/syntax-error/dbml-error.log +6 -0
  74. package/__tests__/sql2dbml/syntax-error-duplicate-endpoints --mssql/dbml-error.log +6 -0
  75. package/__tests__/sql2dbml/syntax-error-duplicate-endpoints --mysql/dbml-error.log +6 -0
  76. package/eslint.config.ts +13 -6
  77. package/lib/index.js +374 -20
  78. package/package.json +9 -26
  79. package/tsconfig.json +2 -1
  80. package/vite.config.ts +50 -0
  81. package/.babelrc +0 -19
  82. package/jest.config.ts +0 -14
  83. package/lib/cli/config.js +0 -29
  84. package/lib/cli/connector.js +0 -38
  85. package/lib/cli/export.js +0 -37
  86. package/lib/cli/import.js +0 -36
  87. package/lib/cli/index.js +0 -76
  88. package/lib/cli/outputPlugins/outputConsolePlugin.js +0 -12
  89. package/lib/cli/outputPlugins/outputFilePlugin.js +0 -28
  90. package/lib/cli/utils.js +0 -81
  91. package/lib/cli/validatePlugins/validatePlugins.js +0 -14
  92. package/lib/errors/domainError.js +0 -15
  93. package/lib/errors/index.js +0 -20
  94. package/lib/errors/syntaxError.js +0 -23
  95. package/lib/helpers/logger.js +0 -74
package/lib/index.js CHANGED
@@ -1,24 +1,378 @@
1
- "use strict";
2
-
3
- Object.defineProperty(exports, "__esModule", {
4
- value: true
1
+ Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __copyProps = (to, from, except, desc) => {
9
+ if (from && typeof from === "object" || typeof from === "function") for (var keys = __getOwnPropNames(from), i = 0, n = keys.length, key; i < n; i++) {
10
+ key = keys[i];
11
+ if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, {
12
+ get: ((k) => from[k]).bind(null, key),
13
+ enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
14
+ });
15
+ }
16
+ return to;
17
+ };
18
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", {
19
+ value: mod,
20
+ enumerable: true
21
+ }) : target, mod));
22
+ let commander = require("commander");
23
+ commander = __toESM(commander);
24
+ let __dbml_core = require("@dbml/core");
25
+ let figures = require("figures");
26
+ figures = __toESM(figures);
27
+ let chalk = require("chalk");
28
+ chalk = __toESM(chalk);
29
+ let path = require("path");
30
+ path = __toESM(path);
31
+ let fs = require("fs");
32
+ fs = __toESM(fs);
33
+ let lodash = require("lodash");
34
+ lodash = __toESM(lodash);
35
+ let winston = require("winston");
36
+ let __dbml_connector = require("@dbml/connector");
37
+ var config_default = {
38
+ mysql: { name: "MySQL" },
39
+ mysqlLegacy: { name: "MySQL" },
40
+ postgres: { name: "PostgreSQL" },
41
+ postgresLegacy: { name: "PostgreSQL" },
42
+ mssql: { name: "SQL Server" },
43
+ oracle: { name: "Oracle" },
44
+ snowflake: { name: "Snowflake" }
45
+ };
46
+ function resolvePaths(paths) {
47
+ if (!Array.isArray(paths)) return path.default.resolve(process.cwd(), paths);
48
+ return paths.map((_path) => path.default.resolve(process.cwd(), _path));
49
+ }
50
+ function validateInputFilePaths(paths, validatePlugin) {
51
+ return paths.every((_path) => validatePlugin(_path));
52
+ }
53
+ function getFormatOpt(opts) {
54
+ const formatOpts = Object.keys(opts).filter((opt) => {
55
+ return [
56
+ "postgres",
57
+ "mysql",
58
+ "mssql",
59
+ "postgresLegacy",
60
+ "mysqlLegacy",
61
+ "mssqlLegacy",
62
+ "oracle",
63
+ "snowflake"
64
+ ].includes(opt);
65
+ });
66
+ let format$1 = "postgres";
67
+ let cnt = 0;
68
+ formatOpts.forEach((opt) => {
69
+ if (opts[opt]) {
70
+ cnt += 1;
71
+ if (cnt > 1) throw new Error("Too many format options");
72
+ format$1 = opt;
73
+ }
74
+ });
75
+ return format$1;
76
+ }
77
+ function getConnectionOpt(args) {
78
+ const supportedDatabases = [
79
+ "postgres",
80
+ "mysql",
81
+ "mssql",
82
+ "snowflake",
83
+ "bigquery",
84
+ "oracle"
85
+ ];
86
+ return (0, lodash.reduce)(args, (connectionOpt, arg) => {
87
+ if (supportedDatabases.includes(arg)) connectionOpt.databaseType = arg;
88
+ if (/^.*[:;]/.test(arg)) connectionOpt.connection = arg;
89
+ if (/^[a-zA-Z]:[\\/](?:[^<>:"/\\|?*\n\r]+[\\/])*[^<>:"/\\|?*\n\r]*$/.test(arg) || /^(\/|\.\/|~\/|\.\.\/)([^<>:"|?*\n\r]*\/?)*[^<>:"|?*\n\r]*$/.test(arg)) connectionOpt.connection = arg;
90
+ return connectionOpt;
91
+ }, {
92
+ connection: args[0],
93
+ databaseType: "unknown"
94
+ });
95
+ }
96
+ function generate(inputPaths, transform, outputPlugin) {
97
+ inputPaths.forEach((_path) => {
98
+ const source = fs.default.readFileSync(_path, "utf-8");
99
+ try {
100
+ const content = transform(source);
101
+ outputPlugin.write(content);
102
+ } catch (e) {
103
+ if (e instanceof __dbml_core.CompilerError) throw e.map((diag) => ({
104
+ ...diag,
105
+ message: diag.message,
106
+ filepath: path.default.basename(_path),
107
+ stack: diag.stack
108
+ }));
109
+ throw e;
110
+ }
111
+ });
112
+ }
113
+ function validateFilePlugin(_path) {
114
+ if (fs.default.statSync(_path).isDirectory(_path)) throw new Error("Expect input to be files");
115
+ }
116
+ var OutputConsolePlugin = class {
117
+ static write(content) {
118
+ console.log(content);
119
+ }
120
+ };
121
+ var outputConsolePlugin_default = OutputConsolePlugin;
122
+ var OutputFilePlugin = class {
123
+ constructor(filePath, header) {
124
+ this.filePath = filePath;
125
+ this.header = header;
126
+ this.isWrite = false;
127
+ }
128
+ start() {
129
+ fs.default.writeFileSync(this.filePath, "");
130
+ this.stream = fs.default.createWriteStream(this.filePath, { flags: "a" });
131
+ if (this.header) this.stream.write(this.header);
132
+ this.isWrite = true;
133
+ }
134
+ write(content) {
135
+ if (!this.isWrite) this.start();
136
+ this.stream.write(content);
137
+ }
138
+ };
139
+ var outputFilePlugin_default = OutputFilePlugin;
140
+ var { combine, timestamp, printf } = winston.format;
141
+ var consoleFormat = printf((info) => {
142
+ const { level, message } = info;
143
+ return ` ${chalk.default.red(level.toUpperCase())}: ${message}\n
144
+ A complete log can be found in:
145
+ ${path.default.resolve(process.cwd(), "dbml-error.log")}`;
5
146
  });
6
- Object.defineProperty(exports, "db2dbml", {
7
- enumerable: true,
8
- get: function () {
9
- return _cli.db2dbml;
10
- }
147
+ var fileFormat = printf((info) => {
148
+ const { timestamp: timestamp$1, stack, rootError } = info;
149
+ let logContent = `${timestamp$1}\n${stack}\n`;
150
+ if (rootError) {
151
+ logContent += "\nROOT_ERROR:";
152
+ logContent += `\n${rootError.stack}`;
153
+ if (rootError.location) logContent += `\n${JSON.stringify(rootError.location)}`;
154
+ logContent += "\n";
155
+ }
156
+ return logContent;
11
157
  });
12
- Object.defineProperty(exports, "dbml2sql", {
13
- enumerable: true,
14
- get: function () {
15
- return _cli.dbml2sql;
16
- }
158
+ var consoleLogger = (0, winston.createLogger)({
159
+ format: combine(consoleFormat),
160
+ transports: [new winston.transports.Console({ level: "error" })]
17
161
  });
18
- Object.defineProperty(exports, "sql2dbml", {
19
- enumerable: true,
20
- get: function () {
21
- return _cli.sql2dbml;
22
- }
162
+ var fileLogger = (0, winston.createLogger)({
163
+ format: combine(timestamp(), fileFormat),
164
+ transports: [new winston.transports.File({
165
+ filename: "dbml-error.log",
166
+ level: "error"
167
+ })]
23
168
  });
24
- var _cli = require("./cli");
169
+ var logger = {
170
+ debug(msg) {
171
+ consoleLogger.debug(msg);
172
+ },
173
+ info(msg) {
174
+ consoleLogger.info(msg);
175
+ },
176
+ warn(msg) {
177
+ consoleLogger.warn(msg);
178
+ },
179
+ error(msg) {
180
+ consoleLogger.error(msg);
181
+ fileLogger.error(msg);
182
+ },
183
+ log(level, msg) {
184
+ const lvl = exports[level];
185
+ lvl(msg);
186
+ }
187
+ };
188
+ var logger_default = logger;
189
+ var DomainError = class extends Error {
190
+ constructor(message, rootError = {}) {
191
+ super(message);
192
+ this.name = this.constructor.name;
193
+ this.rootError = rootError;
194
+ Error.captureStackTrace(this, this.constructor);
195
+ }
196
+ };
197
+ var domainError_default = DomainError;
198
+ var SyntaxError = class extends domainError_default {
199
+ constructor(fileName, rootError = {}) {
200
+ let message = `You have a syntax error at "${fileName}"`;
201
+ if (rootError.location) message += ` line ${rootError.location.start.line} column ${rootError.location.start.column}`;
202
+ message += ".";
203
+ if (!lodash.default.isEmpty(rootError)) message += ` ${rootError.message}`;
204
+ super(message, rootError);
205
+ }
206
+ };
207
+ var syntaxError_default = SyntaxError;
208
+ async function importHandler(program$1) {
209
+ try {
210
+ const inputPaths = resolvePaths(program$1.args);
211
+ validateInputFilePaths(inputPaths, validateFilePlugin);
212
+ const opts = program$1.opts();
213
+ const format$1 = getFormatOpt(opts);
214
+ if (!opts.outFile && !opts.outDir) generate(inputPaths, (sql) => __dbml_core.importer.import(sql, format$1), outputConsolePlugin_default);
215
+ else if (opts.outFile) {
216
+ generate(inputPaths, (sql) => __dbml_core.importer.import(sql, format$1), new outputFilePlugin_default(resolvePaths(opts.outFile)));
217
+ console.log(` ${chalk.default.green(figures.default.main.tick)} Generated DBML file from SQL file (${config_default[format$1].name}): ${path.default.basename(opts.outFile)}`);
218
+ }
219
+ } catch (error) {
220
+ logger_default.error(`\n ${error.diags.map((diag) => new syntaxError_default(diag.filepath, diag)).map(({ message }) => message).join("\n ")}`);
221
+ }
222
+ }
223
+ async function exportHandler(program$1) {
224
+ try {
225
+ const inputPaths = resolvePaths(program$1.args);
226
+ validateInputFilePaths(inputPaths, validateFilePlugin);
227
+ const opts = program$1.opts();
228
+ const format$1 = getFormatOpt(opts);
229
+ if (!opts.outFile && !opts.outDir) generate(inputPaths, (dbml) => __dbml_core.exporter.export(dbml, format$1), outputConsolePlugin_default);
230
+ else if (opts.outFile) {
231
+ const header = [
232
+ "-- SQL dump generated using DBML (dbml.dbdiagram.io)\n",
233
+ `-- Database: ${config_default[format$1].name}\n`,
234
+ `-- Generated at: ${(/* @__PURE__ */ new Date()).toISOString()}\n\n`
235
+ ].join("");
236
+ generate(inputPaths, (dbml) => __dbml_core.exporter.export(dbml, format$1), new outputFilePlugin_default(resolvePaths(opts.outFile), header));
237
+ console.log(` ${chalk.default.green(figures.default.main.tick)} Generated SQL dump file (${config_default[format$1].name}): ${path.default.basename(opts.outFile)}`);
238
+ }
239
+ } catch (error) {
240
+ logger_default.error(`\n ${error.diags.map((diag) => new syntaxError_default(diag.filepath, diag)).map(({ message }) => message).join("\n ")}`);
241
+ }
242
+ }
243
+ async function connectionHandler(program$1) {
244
+ try {
245
+ const { connection, databaseType } = getConnectionOpt(program$1.args);
246
+ const opts = program$1.opts();
247
+ const schemaJson = await __dbml_connector.connector.fetchSchemaJson(connection, databaseType);
248
+ if (!opts.outFile && !opts.outDir) {
249
+ const res = __dbml_core.importer.generateDbml(schemaJson);
250
+ outputConsolePlugin_default.write(res);
251
+ } else if (opts.outFile) {
252
+ const res = __dbml_core.importer.generateDbml(schemaJson);
253
+ new outputFilePlugin_default(resolvePaths(opts.outFile)).write(res);
254
+ console.log(` ${chalk.default.green(figures.default.main.tick)} Generated DBML file from database's connection: ${path.default.basename(opts.outFile)}`);
255
+ }
256
+ } catch (error) {
257
+ logger_default.error(error);
258
+ }
259
+ }
260
+ const $schema = "https://json.schemastore.org/package";
261
+ const name = "@dbml/cli";
262
+ const version = "5.6.0-alpha.0";
263
+ const description = "";
264
+ const main = "lib/index.js";
265
+ const license = "Apache-2.0";
266
+ const scripts = {
267
+ "test": "vitest",
268
+ "test:watch": "vitest tests",
269
+ "coverage": "vitest --coverage",
270
+ "build": "rm -rf ./lib && tsc --noEmit && vite build",
271
+ "dev": "vite build --watch",
272
+ "prepublish": "npm run build",
273
+ "lint": "eslint .",
274
+ "lint:fix": "eslint --fix ."
275
+ };
276
+ const publishConfig = { "access": "public" };
277
+ const bin = {
278
+ "dbml2sql": "bin/dbml2sql.js",
279
+ "sql2dbml": "bin/sql2dbml.js",
280
+ "db2dbml": "bin/db2dbml.js"
281
+ };
282
+ const author = "Holistics <dev@holistics.io>";
283
+ const homepage = "https://dbml.dbdiagram.io";
284
+ const repository = "https://github.com/holistics/dbml/tree/master/packages/dbml-cli";
285
+ const keywords = ["dbml", "dbml-cli"];
286
+ const dependencies = {
287
+ "@dbml/connector": "^5.6.0-alpha.0",
288
+ "@dbml/core": "^5.6.0-alpha.0",
289
+ "bluebird": "^3.5.5",
290
+ "chalk": "^2.4.2",
291
+ "commander": "^2.20.0",
292
+ "esm": "^3.2.25",
293
+ "figures": "^3.2.0",
294
+ "lodash": "^4.17.15",
295
+ "pegjs-require-import": "^0.0.2",
296
+ "strip-ansi": "^5.2.0",
297
+ "winston": "^3.2.1"
298
+ };
299
+ const gitHead = "e0cd0b00099344c05af0a83e30c2c9c2ccc325cc";
300
+ const engines = { "node": ">=18" };
301
+ var package_default = {
302
+ $schema,
303
+ name,
304
+ version,
305
+ description: "",
306
+ main,
307
+ license,
308
+ scripts,
309
+ publishConfig,
310
+ bin,
311
+ author,
312
+ homepage,
313
+ repository,
314
+ keywords,
315
+ dependencies,
316
+ gitHead,
317
+ engines
318
+ };
319
+ function showHelp(args) {
320
+ if (args.length < 3) commander.default.help();
321
+ }
322
+ function dbml2sql(args) {
323
+ commander.default.version(package_default.version);
324
+ commander.default.usage("[options] <files...>").option("--mysql").option("--postgres").option("--mssql").option("--oracle").option("-o, --out-file <pathspec>", "compile all input files into a single files");
325
+ showHelp(args);
326
+ commander.default.parse(args);
327
+ exportHandler(commander.default);
328
+ }
329
+ function sql2dbml(args) {
330
+ commander.default.version(package_default.version);
331
+ commander.default.usage("[options] <files...>").option("--mysql").option("--mysql-legacy").option("--postgres").option("--postgres-legacy").option("--mssql").option("--mssql-legacy").option("--snowflake").option("--oracle").option("-o, --out-file <pathspec>", "compile all input files into a single files");
332
+ showHelp(args);
333
+ commander.default.parse(args);
334
+ importHandler(commander.default);
335
+ }
336
+ function db2dbml(args) {
337
+ commander.default.version(package_default.version);
338
+ commander.default.usage("<database-type> <connection-string> [options]").description(`Generate DBML directly from a database
339
+ <database-type> your database format (postgres, mysql, mssql, snowflake, bigquery)
340
+ <connection-string> your database connection string:
341
+ - postgres: 'postgresql://user:password@localhost:5432/dbname?schemas=schema1,schema2,schema3'
342
+ - mysql: 'mysql://user:password@localhost:3306/dbname'
343
+ - mssql: 'Server=localhost,1433;Database=master;User Id=sa;Password=your_password;Encrypt=true;TrustServerCertificate=true;Schemas=schema1,schema2,schema3;'
344
+ - oracle: 'username/password@[//]host[:port][/service_name]'
345
+ - snowflake:
346
+ - password-based authentication: 'SERVER=<account_identifier>.<region>;UID=<your_username>;PWD=<your_password>;DATABASE=<your_database>;WAREHOUSE=<your_warehouse>;ROLE=<your_role>;SCHEMAS=schema1,schema2,schema3;'
347
+ - key pair authentication: 'SERVER=<account_identifier>.<region>;UID=<your_username>;AUTHENTICATOR=SNOWFLAKE_JWT;PRIVATE_KEY_PATH=<path_to_your_private_key.p8>;PASSPHRASE=<your_private_key_passphrase>;DATABASE=<your_database>;WAREHOUSE=<your_warehouse>;ROLE=<your_role>;SCHEMAS=schema1,schema2,schema3;'
348
+
349
+ Note: If you did not use passphrase to encrypt your private key, you can leave the "PASSPHRASE" empty.
350
+
351
+ - bigquery: /path_to_json_credential.json
352
+
353
+ For BigQuery, the credential file supports flexible authentication:
354
+
355
+ 1. Application Default Credentials (ADC):
356
+ - Empty file: {} - uses environment authentication
357
+ - Override specific fields: {"project_id": "my-project", "datasets": [...]}
358
+
359
+ For more information about ADC, see https://cloud.google.com/docs/authentication/application-default-credentials
360
+
361
+ 2. Explicit Service Account (bypasses ADC):
362
+ {
363
+ "project_id": "your-project-id",
364
+ "client_email": "your-client-email",
365
+ "private_key": "your-private-key",
366
+ "datasets": ["dataset_1", "dataset_2", ...]
367
+ }
368
+ Note: Both client_email and private_key must be provided together.
369
+
370
+ If "datasets" is not specified or is empty, all accessible datasets will be fetched.
371
+ `).option("-o, --out-file <pathspec>", "compile all input files into a single files");
372
+ showHelp(args);
373
+ commander.default.parse(args);
374
+ connectionHandler(commander.default);
375
+ }
376
+ exports.db2dbml = db2dbml;
377
+ exports.dbml2sql = dbml2sql;
378
+ exports.sql2dbml = sql2dbml;
package/package.json CHANGED
@@ -1,14 +1,16 @@
1
1
  {
2
2
  "$schema": "https://json.schemastore.org/package",
3
3
  "name": "@dbml/cli",
4
- "version": "5.5.1",
4
+ "version": "5.6.0-alpha.0",
5
5
  "description": "",
6
6
  "main": "lib/index.js",
7
7
  "license": "Apache-2.0",
8
8
  "scripts": {
9
- "test": "jest --coverage=false",
10
- "coverage": "jest --coverage",
11
- "build": "babel src --out-dir lib --copy-files",
9
+ "test": "vitest",
10
+ "test:watch": "vitest tests",
11
+ "coverage": "vitest --coverage",
12
+ "build": "rm -rf ./lib && tsc --noEmit && vite build",
13
+ "dev": "vite build --watch",
12
14
  "prepublish": "npm run build",
13
15
  "lint": "eslint .",
14
16
  "lint:fix": "eslint --fix ."
@@ -29,9 +31,8 @@
29
31
  "dbml-cli"
30
32
  ],
31
33
  "dependencies": {
32
- "@babel/cli": "^7.21.0",
33
- "@dbml/connector": "^5.5.1",
34
- "@dbml/core": "^5.5.1",
34
+ "@dbml/connector": "^5.6.0-alpha.0",
35
+ "@dbml/core": "^5.6.0-alpha.0",
35
36
  "bluebird": "^3.5.5",
36
37
  "chalk": "^2.4.2",
37
38
  "commander": "^2.20.0",
@@ -42,25 +43,7 @@
42
43
  "strip-ansi": "^5.2.0",
43
44
  "winston": "^3.2.1"
44
45
  },
45
- "devDependencies": {
46
- "@babel/core": "^7.21.4",
47
- "@babel/node": "^7.20.7",
48
- "@babel/plugin-transform-runtime": "^7.21.4",
49
- "@babel/preset-env": "^7.21.4",
50
- "@babel/runtime": "^7.21.0",
51
- "@stylistic/eslint-plugin": "^5.5.0",
52
- "@typescript-eslint/eslint-plugin": "^8.46.3",
53
- "@typescript-eslint/parser": "^8.46.3",
54
- "babel-jest": "^29.5.0",
55
- "eslint": "^9.39.1",
56
- "eslint-config-airbnb-base": "^15.0.0",
57
- "eslint-plugin-jest": "^29.0.1",
58
- "jest": "^29.5.0",
59
- "ts-jest": "^29.4.5",
60
- "typescript": "^5.9.3",
61
- "typescript-eslint": "^8.46.3"
62
- },
63
- "gitHead": "4519233511e253390842ffe363dd881e3413fd0a",
46
+ "gitHead": "e0cd0b00099344c05af0a83e30c2c9c2ccc325cc",
64
47
  "engines": {
65
48
  "node": ">=18"
66
49
  }
package/tsconfig.json CHANGED
@@ -6,7 +6,8 @@
6
6
  "./"
7
7
  ], /* Allow multiple folders to be treated as one when resolving modules. */
8
8
  "baseUrl": "./src", /* Specify the base directory to resolve non-relative module names. */
9
- "outDir": "./lib" /* Specify an output folder for all emitted files. */
9
+ "outDir": "./lib", /* Specify an output folder for all emitted files. */
10
+ "types": ["vitest/globals", "node"]
10
11
  },
11
12
  "files": [
12
13
  "package.json"
package/vite.config.ts ADDED
@@ -0,0 +1,50 @@
1
+ /// <reference types="vitest" />
2
+
3
+ import path from 'path';
4
+ import { defineConfig } from 'vite';
5
+
6
+ export default defineConfig({
7
+ resolve: {
8
+ alias: {
9
+ "@": path.resolve(__dirname, "src/"),
10
+ },
11
+ },
12
+ build: {
13
+ target: 'node18',
14
+ outDir: 'lib',
15
+ minify: false,
16
+ lib: {
17
+ entry: path.resolve(__dirname, "src/index.js"),
18
+ fileName: 'index',
19
+ formats: ['cjs'],
20
+ },
21
+ rollupOptions: {
22
+ output: {
23
+ exports: 'named',
24
+ },
25
+ external: [
26
+ '@dbml/connector',
27
+ '@dbml/core',
28
+ 'bluebird',
29
+ 'chalk',
30
+ 'commander',
31
+ 'esm',
32
+ 'figures',
33
+ 'lodash',
34
+ 'pegjs-require-import',
35
+ 'strip-ansi',
36
+ 'winston',
37
+ 'path',
38
+ 'fs',
39
+ /^node:.*/,
40
+ ],
41
+ },
42
+ },
43
+ test: {
44
+ globals: true,
45
+ coverage: {
46
+ provider: 'v8',
47
+ reporter: ['json', 'json-summary', 'html', 'text'],
48
+ },
49
+ },
50
+ });
package/.babelrc DELETED
@@ -1,19 +0,0 @@
1
- {
2
- "presets": [
3
- [
4
- "@babel/preset-env",
5
- {
6
- "targets": {
7
- "node": "current"
8
- }
9
- }
10
- ]
11
- ],
12
- "env": {
13
- "test": {
14
- "plugins": [
15
- "@babel/plugin-transform-runtime"
16
- ]
17
- }
18
- }
19
- }
package/jest.config.ts DELETED
@@ -1,14 +0,0 @@
1
- import { type Config } from 'jest';
2
-
3
- const config: Config = {
4
- testMatch: ["**/?(*.)+(spec|test).?([mc])[jt]s?(x)"],
5
- preset: 'ts-jest',
6
- transform: {
7
- "^.+\\.js$": "babel-jest",
8
- },
9
- collectCoverage: true,
10
- coverageReporters: ["json", "json-summary", "html", "text"],
11
- coverageDirectory: "coverage",
12
- };
13
-
14
- export default config;
package/lib/cli/config.js DELETED
@@ -1,29 +0,0 @@
1
- "use strict";
2
-
3
- Object.defineProperty(exports, "__esModule", {
4
- value: true
5
- });
6
- exports.default = void 0;
7
- var _default = exports.default = {
8
- mysql: {
9
- name: 'MySQL'
10
- },
11
- mysqlLegacy: {
12
- name: 'MySQL'
13
- },
14
- postgres: {
15
- name: 'PostgreSQL'
16
- },
17
- postgresLegacy: {
18
- name: 'PostgreSQL'
19
- },
20
- mssql: {
21
- name: 'SQL Server'
22
- },
23
- oracle: {
24
- name: 'Oracle'
25
- },
26
- snowflake: {
27
- name: 'Snowflake'
28
- }
29
- };
@@ -1,38 +0,0 @@
1
- "use strict";
2
-
3
- Object.defineProperty(exports, "__esModule", {
4
- value: true
5
- });
6
- exports.default = connectionHandler;
7
- var _core = require("@dbml/core");
8
- var _connector = require("@dbml/connector");
9
- var _figures = _interopRequireDefault(require("figures"));
10
- var _chalk = _interopRequireDefault(require("chalk"));
11
- var _path = _interopRequireDefault(require("path"));
12
- var _utils = require("./utils");
13
- var _outputConsolePlugin = _interopRequireDefault(require("./outputPlugins/outputConsolePlugin"));
14
- var _outputFilePlugin = _interopRequireDefault(require("./outputPlugins/outputFilePlugin"));
15
- var _logger = _interopRequireDefault(require("../helpers/logger"));
16
- function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
17
- async function connectionHandler(program) {
18
- try {
19
- const {
20
- connection,
21
- databaseType
22
- } = (0, _utils.getConnectionOpt)(program.args);
23
- const opts = program.opts();
24
- const schemaJson = await _connector.connector.fetchSchemaJson(connection, databaseType);
25
- if (!opts.outFile && !opts.outDir) {
26
- const res = _core.importer.generateDbml(schemaJson);
27
- _outputConsolePlugin.default.write(res);
28
- } else if (opts.outFile) {
29
- const res = _core.importer.generateDbml(schemaJson);
30
- new _outputFilePlugin.default((0, _utils.resolvePaths)(opts.outFile)).write(res);
31
-
32
- // bearer:disable javascript_lang_logger
33
- console.log(` ${_chalk.default.green(_figures.default.main.tick)} Generated DBML file from database's connection: ${_path.default.basename(opts.outFile)}`);
34
- }
35
- } catch (error) {
36
- _logger.default.error(error);
37
- }
38
- }
package/lib/cli/export.js DELETED
@@ -1,37 +0,0 @@
1
- "use strict";
2
-
3
- Object.defineProperty(exports, "__esModule", {
4
- value: true
5
- });
6
- exports.default = exportHandler;
7
- var _figures = _interopRequireDefault(require("figures"));
8
- var _chalk = _interopRequireDefault(require("chalk"));
9
- var _path = _interopRequireDefault(require("path"));
10
- var _core = require("@dbml/core");
11
- var _utils = require("./utils");
12
- var _validatePlugins = require("./validatePlugins/validatePlugins");
13
- var _outputConsolePlugin = _interopRequireDefault(require("./outputPlugins/outputConsolePlugin"));
14
- var _outputFilePlugin = _interopRequireDefault(require("./outputPlugins/outputFilePlugin"));
15
- var _config = _interopRequireDefault(require("./config"));
16
- var _logger = _interopRequireDefault(require("../helpers/logger"));
17
- var _errors = require("../errors");
18
- function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
19
- async function exportHandler(program) {
20
- try {
21
- const inputPaths = (0, _utils.resolvePaths)(program.args);
22
- (0, _utils.validateInputFilePaths)(inputPaths, _validatePlugins.validateFilePlugin);
23
- const opts = program.opts();
24
- const format = (0, _utils.getFormatOpt)(opts);
25
- if (!opts.outFile && !opts.outDir) {
26
- (0, _utils.generate)(inputPaths, dbml => _core.exporter.export(dbml, format), _outputConsolePlugin.default);
27
- } else if (opts.outFile) {
28
- const header = ['-- SQL dump generated using DBML (dbml.dbdiagram.io)\n', `-- Database: ${_config.default[format].name}\n`, `-- Generated at: ${new Date().toISOString()}\n\n`].join('');
29
- (0, _utils.generate)(inputPaths, dbml => _core.exporter.export(dbml, format), new _outputFilePlugin.default((0, _utils.resolvePaths)(opts.outFile), header));
30
- console.log(` ${_chalk.default.green(_figures.default.main.tick)} Generated SQL dump file (${_config.default[format].name}): ${_path.default.basename(opts.outFile)}`);
31
- }
32
- } catch (error) {
33
- _logger.default.error(`\n ${error.diags.map(diag => new _errors.SyntaxError(diag.filepath, diag)).map(({
34
- message
35
- }) => message).join('\n ')}`);
36
- }
37
- }