schema2md-cli 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,123 @@
1
+ # schema2md-cli
2
+
3
+ Generate database documentation (Markdown or LaTeX) with an optional ER diagram from your schema.
4
+
5
+ - TypeScript/Node.js CLI
6
+ - Supported sources: SQLite, PostgreSQL, MySQL/MariaDB
7
+ - Outputs: Markdown (.md) or LaTeX (.tex), with optional PDF compile
8
+ - ER diagram: Mermaid.js → rendered to image and embedded (Markdown/LaTeX)
9
+
10
+ ## Installation
11
+
12
+ - Requirements: Node.js 18+
13
+ - Global install:
14
+ - `npm install -g schema2md-cli`
15
+ - Run: `db-doc --help`
16
+ - Without install: `npx schema2md-cli@latest --help`
17
+
18
+ ## CLI
19
+
20
+ Required:
21
+ - `--url` Database URL (sqlite://, postgres://, mysql://)
22
+ - `--output` Output path (.md or .tex)
23
+
24
+ Common:
25
+ - `--exclude` Comma-separated glob patterns to ignore tables, e.g. `migrations,temp_*`
26
+ - `--title` Document title (default: `Database Documentation`)
27
+
28
+ Formats:
29
+ - `--format md` (default)
30
+ - `--format latex`
31
+
32
+ LaTeX options:
33
+ - `--summary` Generate a condensed LaTeX (counts + index + relations). Omit to include full per-table details.
34
+ - `--compile` Compile `.tex` into `.pdf` (requires local LaTeX or Docker)
35
+ - `--cleanup aux|all` Clean LaTeX aux files; `all` also deletes the `.tex`
36
+ - `--docker` Use Docker to compile (when no local LaTeX): recommend `--docker-image blang/latex:ctanfull`
37
+ - `--docker-image <image>` LaTeX Docker image (default: `paperist/alpine-texlive` if not set; `blang/latex:ctanfull` is safer)
38
+
39
+ ER diagram options (Mermaid):
40
+ - `--er` / `--no-er` Include/exclude ER diagram in LaTeX (default: include)
41
+ - `--diagram-format svg|png|pdf` Render format for ER (default: `png` for LaTeX/pdflatex compatibility)
42
+ - `--er-docker-image <image>` Mermaid CLI Docker image (e.g., `minlag/mermaid-cli:latest`)
43
+
44
+ Notes on Mermaid rendering:
45
+ - The CLI first tries Docker (Mermaid CLI). If not available or fails, it falls back to Kroki (https://kroki.io) to render the image. You can disable the fallback by providing an invalid URL via `KROKI_URL` env or removing network access.
46
+
47
+ ## Examples
48
+
49
+ Project ships with ready SQLite examples:
50
+ - `examples/sqlite/shop/`
51
+ - `examples/sqlite/university/`
52
+ - `examples/sqlite/org/`
53
+
54
+ Each folder contains:
55
+ - `*.sqlite` sample DB
56
+ - `*.tex` LaTeX source
57
+ - `*.pdf` compiled output
58
+ - `*-er.mmd` Mermaid source of the ER diagram
59
+ - `*-er.png` rendered ER diagram image
60
+
61
+ Example command (Shop, LaTeX + PDF + ER diagram via Docker):
62
+
63
+ ```bash
64
+ # Windows paths: ensure sqlite URL uses an absolute path
65
+ # Use PNG diagram for pdflatex compatibility
66
+
67
+ db-doc \
68
+ --url "sqlite:///C:/ABS/PATH/examples/sqlite/shop/shop.sqlite" \
69
+ --output "examples/sqlite/shop/shop.tex" \
70
+ --format latex \
71
+ --summary \
72
+ --er --diagram-format png \
73
+ --compile --cleanup aux \
74
+ --docker --docker-image "blang/latex:ctanfull"
75
+ ```
76
+
77
+ PostgreSQL example:
78
+
79
+ ```bash
80
+ db-doc \
81
+ --url "postgres://user:pass@host:5432/db?schema=public" \
82
+ --output out.tex \
83
+ --format latex \
84
+ --er --diagram-format png \
85
+ --compile --cleanup aux \
86
+ --docker --docker-image "blang/latex:ctanfull"
87
+ ```
88
+
89
+ Markdown example (no PDF):
90
+
91
+ ```bash
92
+ db-doc --url "sqlite:///C:/ABS/PATH/db.sqlite" --output README.md --format md --exclude "migrations,temp_*"
93
+ ```
94
+
95
+ ## Configuration (.dbdoc.json)
96
+
97
+ Place at repo root to persist exclusions and future options:
98
+
99
+ ```json
100
+ {
101
+ "exclude": ["migrations*", "sqlite_%"]
102
+ }
103
+ ```
104
+
105
+ CLI `--exclude` merges with config values (deduped).
106
+
107
+ ## Engine specifics
108
+
109
+ - SQLite: PRAGMA introspection; no native comments → Description shows `-`.
110
+ - PostgreSQL: information_schema + pg_description for comments; select schema via `?schema=`.
111
+ - MySQL/MariaDB: information_schema; comments supported when present.
112
+
113
+ ## Development
114
+
115
+ ```bash
116
+ npm install
117
+ npm run build
118
+ node dist/cli.js --url "sqlite:///C:/ABS/PATH/examples/sqlite/shop/shop.sqlite" \
119
+ --output "examples/sqlite/shop/shop.tex" --format latex --summary --er \
120
+ --diagram-format png --compile --cleanup aux --docker --docker-image blang/latex:ctanfull
121
+ ```
122
+
123
+
package/dist/cli.js ADDED
@@ -0,0 +1,116 @@
1
+ #!/usr/bin/env node
2
+ "use strict";
3
+ Object.defineProperty(exports, "__esModule", { value: true });
4
+ const node_util_1 = require("node:util");
5
+ const node_fs_1 = require("node:fs");
6
+ const node_path_1 = require("node:path");
7
+ const config_1 = require("./utils/config");
8
+ const filters_1 = require("./utils/filters");
9
+ const sqliteExtractor_1 = require("./extractors/sqliteExtractor");
10
+ const postgresExtractor_1 = require("./extractors/postgresExtractor");
11
+ const mysqlExtractor_1 = require("./extractors/mysqlExtractor");
12
+ const markdown_1 = require("./emitters/markdown");
13
+ const latex_1 = require("./emitters/latex");
14
+ const mermaidExport_1 = require("./utils/mermaidExport");
15
+ const latexCompile_1 = require("./utils/latexCompile");
16
+ function parseUrlToPath(urlStr) {
17
+ const u = new URL(urlStr);
18
+ if (u.protocol !== 'sqlite:')
19
+ throw new Error('Only sqlite URLs are supported (sqlite:///absolute/path.db)');
20
+ let p = u.pathname;
21
+ if (process.platform === 'win32' && p.startsWith('/') && p[2] === ':')
22
+ p = p.slice(1);
23
+ return p;
24
+ }
25
+ async function loadSchema(url, patterns) {
26
+ const proto = new URL(url).protocol;
27
+ if (proto.startsWith('sqlite')) {
28
+ const p = parseUrlToPath(url);
29
+ if (!(0, node_fs_1.existsSync)(p))
30
+ throw new Error(`SQLite file not found: ${p}`);
31
+ const ex = new sqliteExtractor_1.SQLiteExtractor(p);
32
+ const all = await ex.load();
33
+ const names = all.tables.map(t => t.name);
34
+ const kept = (0, filters_1.excludeNames)(names, patterns);
35
+ return (kept.length === names.length) ? all : await ex.load(kept);
36
+ }
37
+ if (proto.startsWith('postgres')) {
38
+ const schema = new URL(url).searchParams.get('schema') || 'public';
39
+ const ex = new postgresExtractor_1.PostgresExtractor(url, schema);
40
+ const all = await ex.load();
41
+ const names = all.tables.map(t => t.name);
42
+ const kept = (0, filters_1.excludeNames)(names, patterns);
43
+ return (kept.length === names.length) ? all : await ex.load(kept);
44
+ }
45
+ if (proto.startsWith('mysql')) {
46
+ const ex = new mysqlExtractor_1.MySQLExtractor(url);
47
+ const all = await ex.load();
48
+ const names = all.tables.map(t => t.name);
49
+ const kept = (0, filters_1.excludeNames)(names, patterns);
50
+ return (kept.length === names.length) ? all : await ex.load(kept);
51
+ }
52
+ throw new Error('Unsupported URL scheme. Use sqlite://, postgres://, or mysql://');
53
+ }
54
+ async function main() {
55
+ const { values } = (0, node_util_1.parseArgs)({
56
+ options: {
57
+ url: { type: 'string' },
58
+ output: { type: 'string' },
59
+ exclude: { type: 'string', default: '' },
60
+ title: { type: 'string', default: 'Database Documentation' },
61
+ format: { type: 'string', default: 'md' },
62
+ summary: { type: 'boolean', default: false },
63
+ compile: { type: 'boolean', default: false },
64
+ cleanup: { type: 'string', default: 'aux' },
65
+ docker: { type: 'boolean', default: false },
66
+ 'docker-image': { type: 'string', default: 'paperist/alpine-texlive' },
67
+ er: { type: 'boolean', default: true },
68
+ 'er-docker-image': { type: 'string', default: 'minlag/mermaid-cli:latest' },
69
+ 'diagram-format': { type: 'string', default: 'png' },
70
+ 'diagram-only': { type: 'boolean', default: false }
71
+ },
72
+ allowPositionals: false
73
+ });
74
+ if (!values.url || !values.output)
75
+ throw new Error('--url and --output are required');
76
+ const conf = (0, config_1.loadConfig)();
77
+ const excludeCli = String(values.exclude || '').split(',').map((s) => s.trim()).filter(Boolean);
78
+ const patterns = (0, config_1.mergePatterns)(excludeCli, conf.exclude);
79
+ const schema = await loadSchema(String(values.url), patterns);
80
+ const outPath = String(values.output);
81
+ const dir = (0, node_path_1.dirname)(outPath);
82
+ if (dir && !(0, node_fs_1.existsSync)(dir))
83
+ (0, node_fs_1.mkdirSync)(dir, { recursive: true });
84
+ const fmt = String(values.format || 'md').toLowerCase();
85
+ if (Boolean(values['diagram-only'])) {
86
+ const desiredOut = String(values.output);
87
+ const fmtFromExt = /\.(png|svg|pdf)$/i.test(desiredOut) ? desiredOut.replace(/^.*\./, '').toLowerCase() : String(values['diagram-format'] || 'png');
88
+ const diag = await (0, mermaidExport_1.renderMermaidDiagram)(schema, desiredOut, { dockerImage: String(values['er-docker-image'] || ''), format: fmtFromExt });
89
+ if (!diag.ok || !diag.outPath) {
90
+ console.error(diag.log || 'Diagram generation failed');
91
+ process.exit(1);
92
+ }
93
+ console.log(`Wrote diagram: ${diag.outPath}`);
94
+ return;
95
+ }
96
+ const isSummary = Boolean(values.summary);
97
+ let figure;
98
+ if (fmt === 'latex' && values.er !== false) {
99
+ const diag = await (0, mermaidExport_1.renderMermaidDiagram)(schema, outPath, { dockerImage: String(values['er-docker-image'] || ''), format: String(values['diagram-format'] || 'png') });
100
+ if (diag.ok && diag.outPath)
101
+ figure = require('node:path').basename(diag.outPath);
102
+ }
103
+ const content = fmt === 'latex' ? (0, latex_1.renderLatex)(schema, String(values.title), isSummary, figure) : (0, markdown_1.renderMarkdown)(schema, String(values.title));
104
+ (0, node_fs_1.writeFileSync)(outPath, content, 'utf-8');
105
+ if (fmt === 'latex' && values.compile) {
106
+ const res = (0, latexCompile_1.compileLatex)(outPath, { docker: Boolean(values.docker), dockerImage: String(values['docker-image'] || 'paperist/alpine-texlive') });
107
+ if (!res.ok) {
108
+ console.error(res.log || 'LaTeX compile failed');
109
+ process.exit(1);
110
+ }
111
+ const mode = String(values.cleanup || 'aux');
112
+ (0, latexCompile_1.cleanupLatex)(outPath, mode);
113
+ }
114
+ console.log(`Wrote: ${outPath}`);
115
+ }
116
+ main().catch(err => { console.error(err.message); process.exit(1); });
@@ -0,0 +1,2 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
@@ -0,0 +1,23 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.toGeneric = toGeneric;
4
+ const GENERIC = {
5
+ 'INT': 'Integer', 'INTEGER': 'Integer', 'TINYINT': 'Integer', 'SMALLINT': 'Integer',
6
+ 'BIGINT': 'Integer', 'UNSIGNED BIG INT': 'Integer',
7
+ 'REAL': 'Float', 'DOUBLE': 'Float', 'DOUBLE PRECISION': 'Float', 'FLOAT': 'Float',
8
+ 'NUMERIC': 'Numeric', 'DECIMAL': 'Numeric', 'BOOLEAN': 'Boolean',
9
+ 'DATE': 'Date', 'DATETIME': 'Datetime', 'TIMESTAMP': 'Datetime',
10
+ 'TEXT': 'String', 'CLOB': 'String', 'CHAR': 'String', 'VARCHAR': 'String', 'NVARCHAR': 'String',
11
+ 'BLOB': 'Blob'
12
+ };
13
+ function toGeneric(dbType) {
14
+ if (!dbType)
15
+ return 'Unknown';
16
+ const t = String(dbType).trim().toUpperCase();
17
+ const base = t.match(/^([A-Z ]+)/)?.[1].trim() ?? t;
18
+ const norm = base.replace(/\s+/g, ' ');
19
+ return GENERIC[norm] ?? (norm.includes('CHAR') || norm.includes('TEXT') ? 'String' : capitalize(norm));
20
+ }
21
+ function capitalize(s) {
22
+ return s.toLowerCase().replace(/(^|\s)([a-z])/g, (_, a, b) => a + b.toUpperCase());
23
+ }
@@ -0,0 +1,12 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.setupCounter = setupCounter;
4
+ function setupCounter(element) {
5
+ let counter = 0;
6
+ const setCounter = (count) => {
7
+ counter = count;
8
+ element.innerHTML = `Count is ${counter}`;
9
+ };
10
+ element.addEventListener('click', () => setCounter(counter + 1));
11
+ setCounter(0);
12
+ }
@@ -0,0 +1,105 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.renderLatex = renderLatex;
4
+ function esc(s) {
5
+ return s
6
+ .replace(/\\/g, '\\textbackslash{}')
7
+ .replace(/([#%&_{}$])/g, '\\$1')
8
+ .replace(/\^/g, '\\textasciicircum{}')
9
+ .replace(/~/g, '\\textasciitilde{}');
10
+ }
11
+ function header(title) {
12
+ return [
13
+ '\\documentclass[11pt]{article}',
14
+ '\\usepackage[margin=2.5cm]{geometry}',
15
+ '\\usepackage[T1]{fontenc}',
16
+ '\\usepackage{graphicx}',
17
+ '\\usepackage{hyperref}',
18
+ '\\usepackage{longtable}',
19
+ '\\usepackage{booktabs}',
20
+ '\\title{' + esc(title) + '}',
21
+ '\\begin{document}',
22
+ '\\maketitle',
23
+ ];
24
+ }
25
+ function tableIndex(schema) {
26
+ const lines = [];
27
+ lines.push('\\section*{Table Index}');
28
+ lines.push('\\begin{itemize}');
29
+ for (const t of [...schema.tables].sort((a, b) => a.name.localeCompare(b.name))) {
30
+ lines.push(' \\item ' + esc(t.name));
31
+ }
32
+ lines.push('\\end{itemize}');
33
+ return lines;
34
+ }
35
+ function relationships(schema) {
36
+ const lines = [];
37
+ lines.push('\\section*{Relations (FK)}');
38
+ if (!schema.tables.some(t => t.fks.length)) {
39
+ lines.push('No foreign keys detected.');
40
+ return lines;
41
+ }
42
+ lines.push('\\begin{itemize}');
43
+ for (const t of schema.tables) {
44
+ for (const fk of t.fks) {
45
+ lines.push(' \\item ' + esc(`${fk.from_table}.${fk.from_column} -> ${fk.to_table}.${fk.to_column}`));
46
+ }
47
+ }
48
+ lines.push('\\end{itemize}');
49
+ return lines;
50
+ }
51
+ function tableSummary(schema) {
52
+ const lines = [];
53
+ lines.push('\\section*{Summary}');
54
+ const tableCount = schema.tables.length;
55
+ const colCount = schema.tables.reduce((n, t) => n + t.columns.length, 0);
56
+ const fkCount = schema.tables.reduce((n, t) => n + t.fks.length, 0);
57
+ lines.push(`Total tables: ${tableCount}\\\\`);
58
+ lines.push(`Total columns: ${colCount}\\\\`);
59
+ lines.push(`Total foreign keys: ${fkCount}`);
60
+ return lines;
61
+ }
62
+ function tableDetails(schema) {
63
+ const lines = [];
64
+ for (const t of [...schema.tables].sort((a, b) => a.name.localeCompare(b.name))) {
65
+ lines.push('\\section*{Table: ' + esc(t.name) + '}');
66
+ if (t.comment)
67
+ lines.push(esc(t.comment));
68
+ lines.push('\\begin{longtable}{@{}llllll@{}}');
69
+ lines.push('\\toprule');
70
+ lines.push('Column & Type & Attr & Null & Default & Description \\\\');
71
+ lines.push('\\midrule');
72
+ const cols = [...t.columns].sort((a, b) => (Number(b.is_pk) - Number(a.is_pk)) || a.name.localeCompare(b.name));
73
+ for (const c of cols) {
74
+ const attrs = [c.is_pk ? 'PK' : '', c.is_fk ? 'FK' : ''].filter(Boolean).join('/') || '-';
75
+ const nullable = c.nullable ? 'Yes' : 'No';
76
+ const defv = c.default != null && String(c.default).trim() !== '' ? esc(String(c.default)) : '-';
77
+ const desc = c.comment?.trim() ? esc(c.comment) : '-';
78
+ lines.push(`${esc(c.name)} & ${esc(c.type)} & ${esc(attrs)} & ${nullable} & ${defv} & ${desc} \\\\`);
79
+ }
80
+ lines.push('\\bottomrule');
81
+ lines.push('\\end{longtable}');
82
+ }
83
+ return lines;
84
+ }
85
+ function renderLatex(schema, title = 'Database Documentation', summaryOnly = false, figurePath) {
86
+ const out = [];
87
+ out.push(...header(title));
88
+ out.push(...tableSummary(schema));
89
+ out.push('');
90
+ out.push(...tableIndex(schema));
91
+ out.push('');
92
+ if (figurePath) {
93
+ out.push('\\section*{ER Diagram}');
94
+ out.push('\\begin{center}');
95
+ out.push('\\includegraphics[width=\\textwidth]{' + (figurePath.replace(/\\\\/g, '/')) + '}');
96
+ out.push('\\end{center}');
97
+ }
98
+ out.push(...relationships(schema));
99
+ if (!summaryOnly) {
100
+ out.push('');
101
+ out.push(...tableDetails(schema));
102
+ }
103
+ out.push('\\end{document}');
104
+ return out.join('\n');
105
+ }
@@ -0,0 +1,71 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.renderMarkdown = renderMarkdown;
4
+ const mermaid_1 = require("./mermaid");
5
+ function slug(name) {
6
+ return name.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-+|-+$/g, '');
7
+ }
8
+ function sortColumns(cols) {
9
+ return [...cols].sort((a, b) => (Number(b.is_pk) - Number(a.is_pk)) || a.name.localeCompare(b.name));
10
+ }
11
+ function header() {
12
+ return '| Column | Type | Attr | Null | Default | Description |\n|---|---|---|---|---|---|';
13
+ }
14
+ function row(c) {
15
+ const attrs = [c.is_pk ? 'PK' : '', c.is_fk ? 'FK' : ''].filter(Boolean).join('/') || '-';
16
+ const nullable = c.nullable ? 'Yes' : 'No';
17
+ const defv = c.default != null && String(c.default).trim() !== '' ? `\`${String(c.default)}\`` : '-';
18
+ const desc = c.comment?.trim() ? c.comment : '-';
19
+ return `| ${c.name} | \`${c.type}\` | ${attrs} | ${nullable} | ${defv} | ${desc} |`;
20
+ }
21
+ function sectionForTable(t) {
22
+ const lines = [];
23
+ const id = slug(t.name);
24
+ const pkCount = t.columns.filter(c => c.is_pk).length;
25
+ const fkCount = t.fks.length;
26
+ lines.push(`### ${t.name}`);
27
+ lines.push(`<a id="table-${id}"></a>`);
28
+ if (t.comment && t.comment.trim()) {
29
+ lines.push('');
30
+ lines.push(`_${t.comment.trim()}_`);
31
+ }
32
+ lines.push('');
33
+ lines.push(`Columns: ${t.columns.length} / PK: ${pkCount} / FKs: ${fkCount}`);
34
+ lines.push('');
35
+ lines.push(header());
36
+ for (const c of sortColumns(t.columns))
37
+ lines.push(row(c));
38
+ if (t.fks.length) {
39
+ lines.push('');
40
+ lines.push('Foreign Keys');
41
+ lines.push('');
42
+ for (const fk of t.fks) {
43
+ lines.push(`- \`${fk.from_table}.${fk.from_column}\` -> \`${fk.to_table}.${fk.to_column}\` (\`${fk.name}\`)`);
44
+ }
45
+ }
46
+ lines.push('');
47
+ lines.push('[Back to index](#table-index)');
48
+ lines.push('');
49
+ return lines;
50
+ }
51
+ function renderMarkdown(schema, title = 'Database Documentation') {
52
+ const out = [];
53
+ const tables = [...schema.tables].sort((a, b) => a.name.localeCompare(b.name));
54
+ out.push(`# ${title}`);
55
+ out.push('');
56
+ out.push('## Table Index');
57
+ out.push('<a id="table-index"></a>');
58
+ for (const t of tables)
59
+ out.push(`- [${t.name}](#table-${slug(t.name)})`);
60
+ out.push('');
61
+ out.push('## Global ER Diagram');
62
+ out.push('```mermaid');
63
+ out.push((0, mermaid_1.erDiagram)(schema));
64
+ out.push('```');
65
+ out.push('');
66
+ out.push('## Tables');
67
+ out.push('');
68
+ for (const t of tables)
69
+ out.push(...sectionForTable(t));
70
+ return out.join('\n');
71
+ }
@@ -0,0 +1,22 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.erDiagram = erDiagram;
4
+ function erDiagram(schema) {
5
+ const lines = [];
6
+ lines.push('erDiagram');
7
+ for (const table of schema.tables) {
8
+ lines.push(` ${table.name} {`);
9
+ for (const col of table.columns) {
10
+ const pk = col.is_pk ? ' PK' : '';
11
+ const coltype = col.type.toLowerCase();
12
+ lines.push(` ${coltype} ${col.name}${pk}`);
13
+ }
14
+ lines.push(' }');
15
+ }
16
+ for (const table of schema.tables) {
17
+ for (const fk of table.fks) {
18
+ lines.push(` ${fk.to_table} ||--o{ ${fk.from_table} : \"${fk.name}\"`);
19
+ }
20
+ }
21
+ return lines.join('\n');
22
+ }
@@ -0,0 +1,75 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.MySQLExtractor = void 0;
7
+ const promise_1 = __importDefault(require("mysql2/promise"));
8
+ const typeMapping_1 = require("../core/typeMapping");
9
+ class MySQLExtractor {
10
+ constructor(url, database) {
11
+ this.url = url;
12
+ this.database = database;
13
+ }
14
+ async load(includeTables) {
15
+ const conn = await promise_1.default.createConnection(this.url);
16
+ try {
17
+ const [dbRow] = await conn.query("SELECT DATABASE() AS db");
18
+ // @ts-ignore
19
+ const dbName = (Array.isArray(dbRow) ? dbRow[0].db : dbRow.db);
20
+ const [tablesRows] = await conn.query("SELECT table_name, table_comment FROM information_schema.tables WHERE table_type='BASE TABLE' AND table_schema=? ORDER BY table_name", [dbName]);
21
+ let tableNames = tablesRows.map(r => r.table_name);
22
+ const tableComment = new Map(tablesRows.map(r => [r.table_name, r.table_comment ?? null]));
23
+ if (includeTables)
24
+ tableNames = includeTables;
25
+ const [colsRows] = await conn.query("SELECT table_name, column_name, data_type, is_nullable, column_default, column_comment FROM information_schema.columns WHERE table_schema=? AND table_name IN (?)", [dbName, tableNames]);
26
+ const [pkRows] = await conn.query("SELECT kcu.table_name, kcu.column_name FROM information_schema.table_constraints tc JOIN information_schema.key_column_usage kcu ON tc.constraint_name=kcu.constraint_name AND tc.table_schema=kcu.table_schema WHERE tc.constraint_type='PRIMARY KEY' AND tc.table_schema=? AND kcu.table_name IN (?)", [dbName, tableNames]);
27
+ const pkSet = new Set(pkRows.map(r => `${r.table_name}.${r.column_name}`));
28
+ const [fkRows] = await conn.query("SELECT kcu.constraint_name, kcu.table_name AS from_table, kcu.column_name AS from_column, kcu.referenced_table_name AS to_table, kcu.referenced_column_name AS to_column FROM information_schema.key_column_usage kcu WHERE kcu.table_schema=? AND kcu.referenced_table_schema=? AND kcu.table_name IN (?) AND kcu.referenced_table_name IS NOT NULL", [dbName, dbName, tableNames]);
29
+ const colComment = new Map();
30
+ const columnsByTable = new Map();
31
+ for (const r of colsRows) {
32
+ const key = r.table_name;
33
+ const id = `${r.table_name}.${r.column_name}`;
34
+ const cols = columnsByTable.get(key) || [];
35
+ cols.push({
36
+ name: r.column_name,
37
+ type: (0, typeMapping_1.toGeneric)(r.data_type),
38
+ nullable: String(r.is_nullable).toUpperCase() === 'YES',
39
+ default: r.column_default ?? null,
40
+ is_pk: pkSet.has(id),
41
+ is_fk: false,
42
+ comment: (r.column_comment ?? null)
43
+ });
44
+ columnsByTable.set(key, cols);
45
+ }
46
+ const fks = fkRows.map(r => ({
47
+ name: r.constraint_name,
48
+ from_table: r.from_table,
49
+ from_column: r.from_column,
50
+ to_table: r.to_table,
51
+ to_column: r.to_column
52
+ }));
53
+ for (const fk of fks) {
54
+ const cols = columnsByTable.get(fk.from_table) || [];
55
+ const c = cols.find(x => x.name === fk.from_column);
56
+ if (c)
57
+ c.is_fk = true;
58
+ }
59
+ const tables = [];
60
+ for (const t of tableNames) {
61
+ tables.push({
62
+ name: t,
63
+ comment: tableComment.get(t) ?? null,
64
+ columns: columnsByTable.get(t) || [],
65
+ fks: fks.filter(f => f.from_table === t)
66
+ });
67
+ }
68
+ return { tables };
69
+ }
70
+ finally {
71
+ await conn.end();
72
+ }
73
+ }
74
+ }
75
+ exports.MySQLExtractor = MySQLExtractor;
@@ -0,0 +1,107 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.PostgresExtractor = void 0;
4
+ const pg_1 = require("pg");
5
+ const typeMapping_1 = require("../core/typeMapping");
6
+ class PostgresExtractor {
7
+ constructor(url, schema = 'public') {
8
+ this.url = url;
9
+ this.schema = schema;
10
+ }
11
+ async load(includeTables) {
12
+ const client = new pg_1.Client({ connectionString: this.url });
13
+ await client.connect();
14
+ try {
15
+ const tablesRes = await client.query(`SELECT table_name FROM information_schema.tables
16
+ WHERE table_type='BASE TABLE' AND table_schema=$1
17
+ ORDER BY table_name`, [this.schema]);
18
+ let tableNames = tablesRes.rows.map((r) => r.table_name);
19
+ if (includeTables)
20
+ tableNames = includeTables;
21
+ // Columns
22
+ const colsRes = await client.query(`SELECT table_name, column_name, data_type, is_nullable, column_default
23
+ FROM information_schema.columns
24
+ WHERE table_schema=$1 AND table_name = ANY($2::text[])`, [this.schema, tableNames]);
25
+ // PKs
26
+ const pksRes = await client.query(`SELECT kcu.table_name, kcu.column_name
27
+ FROM information_schema.table_constraints tc
28
+ JOIN information_schema.key_column_usage kcu
29
+ ON tc.constraint_name = kcu.constraint_name
30
+ AND tc.table_schema = kcu.table_schema
31
+ WHERE tc.constraint_type = 'PRIMARY KEY'
32
+ AND tc.table_schema = $1
33
+ AND kcu.table_name = ANY($2::text[])`, [this.schema, tableNames]);
34
+ const pkSet = new Set(pksRes.rows.map((r) => `${r.table_name}.${r.column_name}`));
35
+ // FKs
36
+ const fksRes = await client.query(`SELECT tc.constraint_name,
37
+ kcu.table_name AS from_table,
38
+ kcu.column_name AS from_column,
39
+ ccu.table_name AS to_table,
40
+ ccu.column_name AS to_column
41
+ FROM information_schema.table_constraints tc
42
+ JOIN information_schema.key_column_usage kcu
43
+ ON tc.constraint_name = kcu.constraint_name AND tc.table_schema = kcu.table_schema
44
+ JOIN information_schema.constraint_column_usage ccu
45
+ ON ccu.constraint_name = tc.constraint_name AND ccu.table_schema = tc.table_schema
46
+ WHERE tc.constraint_type = 'FOREIGN KEY'
47
+ AND tc.table_schema = $1
48
+ AND kcu.table_name = ANY($2::text[])`, [this.schema, tableNames]);
49
+ // Comments (optional)
50
+ const commentsRes = await client.query(`SELECT c.relname AS table_name, obj_description(c.oid) AS table_comment
51
+ FROM pg_class c
52
+ JOIN pg_namespace n ON n.oid = c.relnamespace
53
+ WHERE n.nspname = $1 AND c.relkind='r'`, [this.schema]);
54
+ const tableComment = new Map(commentsRes.rows.map((r) => [r.table_name, r.table_comment]));
55
+ const colCommentsRes = await client.query(`SELECT c.relname AS table_name, a.attname AS column_name, col_description(c.oid, a.attnum) AS column_comment
56
+ FROM pg_class c
57
+ JOIN pg_namespace n ON n.oid = c.relnamespace
58
+ JOIN pg_attribute a ON a.attrelid = c.oid AND a.attnum > 0 AND NOT a.attisdropped
59
+ WHERE n.nspname = $1`, [this.schema]);
60
+ const colComment = new Map(colCommentsRes.rows.map((r) => [`${r.table_name}.${r.column_name}`, r.column_comment]));
61
+ const columnsByTable = new Map();
62
+ for (const row of colsRes.rows) {
63
+ const key = `${row.table_name}`;
64
+ const cols = columnsByTable.get(key) || [];
65
+ const id = `${row.table_name}.${row.column_name}`;
66
+ cols.push({
67
+ name: row.column_name,
68
+ type: (0, typeMapping_1.toGeneric)(row.data_type),
69
+ nullable: String(row.is_nullable).toUpperCase() === 'YES',
70
+ default: row.column_default ?? null,
71
+ is_pk: pkSet.has(id),
72
+ is_fk: false,
73
+ comment: colComment.get(id) ?? null
74
+ });
75
+ columnsByTable.set(key, cols);
76
+ }
77
+ const fks = fksRes.rows.map((r) => ({
78
+ name: r.constraint_name,
79
+ from_table: r.from_table,
80
+ from_column: r.from_column,
81
+ to_table: r.to_table,
82
+ to_column: r.to_column
83
+ }));
84
+ // mark FK flags
85
+ for (const fk of fks) {
86
+ const cols = columnsByTable.get(fk.from_table) || [];
87
+ const c = cols.find(x => x.name === fk.from_column);
88
+ if (c)
89
+ c.is_fk = true;
90
+ }
91
+ const tables = [];
92
+ for (const t of tableNames) {
93
+ tables.push({
94
+ name: t,
95
+ comment: tableComment.get(t) ?? null,
96
+ columns: columnsByTable.get(t) || [],
97
+ fks: fks.filter(f => f.from_table === t)
98
+ });
99
+ }
100
+ return { tables };
101
+ }
102
+ finally {
103
+ await client.end();
104
+ }
105
+ }
106
+ }
107
+ exports.PostgresExtractor = PostgresExtractor;
@@ -0,0 +1,63 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.SQLiteExtractor = void 0;
7
+ const node_fs_1 = require("node:fs");
8
+ const sql_js_1 = __importDefault(require("sql.js"));
9
+ const typeMapping_1 = require("../core/typeMapping");
10
+ function rows(res) {
11
+ if (!res)
12
+ return [];
13
+ const { columns, values } = res;
14
+ return values.map((v) => Object.fromEntries(v.map((val, i) => [columns[i], val])));
15
+ }
16
+ class SQLiteExtractor {
17
+ constructor(path) {
18
+ this.path = path;
19
+ }
20
+ async load(includeTables) {
21
+ const SQL = await (0, sql_js_1.default)();
22
+ const data = (0, node_fs_1.readFileSync)(this.path);
23
+ const db = new SQL.Database(new Uint8Array(data));
24
+ try {
25
+ const result = db.exec("SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%' ORDER BY name")[0];
26
+ let tableNames = rows(result).map(r => String(r.name));
27
+ if (includeTables)
28
+ tableNames = includeTables;
29
+ const tables = [];
30
+ for (const tname of tableNames) {
31
+ const info = db.exec(`PRAGMA table_info('${tname.replace(/'/g, "''")}')`)[0];
32
+ const cols = rows(info).map(r => ({
33
+ name: String(r.name),
34
+ type: (0, typeMapping_1.toGeneric)(String(r.type ?? '')),
35
+ nullable: Number(r.notnull) === 0,
36
+ default: r.dflt_value == null ? null : String(r.dflt_value),
37
+ is_pk: Number(r.pk) > 0,
38
+ is_fk: false,
39
+ comment: null
40
+ }));
41
+ const fkRes = db.exec(`PRAGMA foreign_key_list('${tname.replace(/'/g, "''")}')`)[0];
42
+ const fks = rows(fkRes).map(r => ({
43
+ name: `fk_${tname}_${r.from}_to_${r.table}_${r.to}`,
44
+ from_table: tname,
45
+ from_column: String(r.from),
46
+ to_table: String(r.table),
47
+ to_column: String(r.to)
48
+ }));
49
+ for (const fk of fks) {
50
+ const c = cols.find(x => x.name === fk.from_column);
51
+ if (c)
52
+ c.is_fk = true;
53
+ }
54
+ tables.push({ name: tname, columns: cols, fks });
55
+ }
56
+ return { tables };
57
+ }
58
+ finally {
59
+ db.close();
60
+ }
61
+ }
62
+ }
63
+ exports.SQLiteExtractor = SQLiteExtractor;
package/dist/main.js ADDED
@@ -0,0 +1,63 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ require("./style.css");
7
+ const typescript_svg_1 = __importDefault(require("./assets/typescript.svg"));
8
+ const vite_svg_1 = __importDefault(require("./assets/vite.svg"));
9
+ const hero_png_1 = __importDefault(require("./assets/hero.png"));
10
+ const counter_ts_1 = require("./counter.ts");
11
+ document.querySelector('#app').innerHTML = `
12
+ <section id="center">
13
+ <div class="hero">
14
+ <img src="${hero_png_1.default}" class="base" width="170" height="179">
15
+ <img src="${typescript_svg_1.default}" class="framework" alt="TypeScript logo"/>
16
+ <img src=${vite_svg_1.default} class="vite" alt="Vite logo" />
17
+ </div>
18
+ <div>
19
+ <h1>Get started</h1>
20
+ <p>Edit <code>src/main.ts</code> and save to test <code>HMR</code></p>
21
+ </div>
22
+ <button id="counter" type="button" class="counter"></button>
23
+ </section>
24
+
25
+ <div class="ticks"></div>
26
+
27
+ <section id="next-steps">
28
+ <div id="docs">
29
+ <svg class="icon" role="presentation" aria-hidden="true"><use href="/icons.svg#documentation-icon"></use></svg>
30
+ <h2>Documentation</h2>
31
+ <p>Your questions, answered</p>
32
+ <ul>
33
+ <li>
34
+ <a href="https://vite.dev/" target="_blank">
35
+ <img class="logo" src=${vite_svg_1.default} alt="" />
36
+ Explore Vite
37
+ </a>
38
+ </li>
39
+ <li>
40
+ <a href="https://www.typescriptlang.org" target="_blank">
41
+ <img class="button-icon" src="${typescript_svg_1.default}" alt="">
42
+ Learn more
43
+ </a>
44
+ </li>
45
+ </ul>
46
+ </div>
47
+ <div id="social">
48
+ <svg class="icon" role="presentation" aria-hidden="true"><use href="/icons.svg#social-icon"></use></svg>
49
+ <h2>Connect with us</h2>
50
+ <p>Join the Vite community</p>
51
+ <ul>
52
+ <li><a href="https://github.com/vitejs/vite" target="_blank"><svg class="button-icon" role="presentation" aria-hidden="true"><use href="/icons.svg#github-icon"></use></svg>GitHub</a></li>
53
+ <li><a href="https://chat.vite.dev/" target="_blank"><svg class="button-icon" role="presentation" aria-hidden="true"><use href="/icons.svg#discord-icon"></use></svg>Discord</a></li>
54
+ <li><a href="https://x.com/vite_js" target="_blank"><svg class="button-icon" role="presentation" aria-hidden="true"><use href="/icons.svg#x-icon"></use></svg>X.com</a></li>
55
+ <li><a href="https://bsky.app/profile/vite.dev" target="_blank"><svg class="button-icon" role="presentation" aria-hidden="true"><use href="/icons.svg#bluesky-icon"></use></svg>Bluesky</a></li>
56
+ </ul>
57
+ </div>
58
+ </section>
59
+
60
+ <div class="ticks"></div>
61
+ <section id="spacer"></section>
62
+ `;
63
+ (0, counter_ts_1.setupCounter)(document.querySelector('#counter'));
@@ -0,0 +1,35 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.loadConfig = loadConfig;
4
+ exports.mergePatterns = mergePatterns;
5
+ const node_fs_1 = require("node:fs");
6
+ const node_path_1 = require("node:path");
7
+ const DEFAULT = { exclude: [] };
8
+ const FILENAME = '.dbdoc.json';
9
+ function loadConfig(cwd = process.cwd()) {
10
+ const path = (0, node_path_1.join)(cwd, FILENAME);
11
+ if (!(0, node_fs_1.existsSync)(path))
12
+ return { ...DEFAULT };
13
+ try {
14
+ const raw = (0, node_fs_1.readFileSync)(path, 'utf-8');
15
+ const obj = JSON.parse(raw);
16
+ const out = { ...DEFAULT };
17
+ if (obj && Array.isArray(obj.exclude))
18
+ out.exclude = obj.exclude.map(String);
19
+ return out;
20
+ }
21
+ catch {
22
+ return { ...DEFAULT };
23
+ }
24
+ }
25
+ function mergePatterns(cli, conf) {
26
+ const list = [...(conf || []), ...(cli || [])];
27
+ const seen = new Set();
28
+ const out = [];
29
+ for (const p of list)
30
+ if (!seen.has(p)) {
31
+ seen.add(p);
32
+ out.push(p);
33
+ }
34
+ return out;
35
+ }
@@ -0,0 +1,21 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.excludeNames = excludeNames;
4
+ exports.matchPattern = matchPattern;
5
+ function excludeNames(names, patterns) {
6
+ if (!patterns || patterns.length === 0)
7
+ return Array.from(names);
8
+ const out = [];
9
+ for (const n of names) {
10
+ if (patterns.some((p) => matchPattern(n, p)))
11
+ continue;
12
+ out.push(n);
13
+ }
14
+ return out;
15
+ }
16
+ // Simple wildcard: * and ? only
17
+ function matchPattern(text, pattern) {
18
+ const esc = pattern.replace(/[.+^${}()|\[\]\\]/g, '\\$&');
19
+ const rx = '^' + esc.replace(/\*/g, '.*').replace(/\?/g, '.') + '$';
20
+ return new RegExp(rx).test(text);
21
+ }
@@ -0,0 +1,64 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.compileLatex = compileLatex;
4
+ exports.cleanupLatex = cleanupLatex;
5
+ const node_child_process_1 = require("node:child_process");
6
+ const node_path_1 = require("node:path");
7
+ const node_fs_1 = require("node:fs");
8
+ function which(cmd) {
9
+ const r = (0, node_child_process_1.spawnSync)(process.platform === 'win32' ? 'where' : 'which', [cmd], { stdio: 'ignore' });
10
+ return r.status === 0;
11
+ }
12
+ function hasDocker() { return which('docker'); }
13
+ function compileLatex(texPath, opts = {}) {
14
+ const texAbs = (0, node_path_1.resolve)(texPath);
15
+ const dir = (0, node_path_1.dirname)(texAbs);
16
+ const pdfPath = texAbs.replace(/\.tex$/i, '.pdf');
17
+ const localEngine = which('pdflatex') ? 'pdflatex' : (which('xelatex') ? 'xelatex' : null);
18
+ const wantDocker = !!opts.docker || !localEngine;
19
+ if (!wantDocker) {
20
+ const args = ['-interaction=nonstopmode', '-halt-on-error', texAbs];
21
+ const r = (0, node_child_process_1.spawnSync)(localEngine, args, { cwd: dir, encoding: 'utf-8' });
22
+ const ok = r.status === 0 && (0, node_fs_1.existsSync)(pdfPath);
23
+ return { ok, pdfPath, log: r.stdout + (r.stderr || '') };
24
+ }
25
+ if (!hasDocker()) {
26
+ return { ok: false, log: 'No LaTeX engine (pdflatex/xelatex) and Docker not found.' };
27
+ }
28
+ const image = opts.dockerImage || 'tectonicapp/tectonic:latest';
29
+ const fileBase = texAbs.replace(/^.*[\\\/]/, '');
30
+ let args;
31
+ if ((image || '').includes('tectonic')) {
32
+ args = ['run', '--rm', '-v', `${dir}:/work`, '-w', '/work', image, 'tectonic', '-X', 'compile', fileBase];
33
+ }
34
+ else {
35
+ args = ['run', '--rm', '-v', `${dir}:/work`, '-w', '/work', image, 'latexmk', '-pdf', '-interaction=nonstopmode', '-halt-on-error', fileBase];
36
+ }
37
+ const r = (0, node_child_process_1.spawnSync)('docker', args, { encoding: 'utf-8' });
38
+ const ok = r.status === 0 && (0, node_fs_1.existsSync)(pdfPath);
39
+ return { ok, pdfPath, log: r.stdout + (r.stderr || '') };
40
+ }
41
+ function cleanupLatex(texPath, mode) {
42
+ const fs = require('node:fs');
43
+ const path = require('node:path');
44
+ const abs = path.resolve(texPath);
45
+ const base = abs.replace(/\.tex$/i, '');
46
+ const exts = ['.aux', '.log', '.out', '.toc', '.synctex.gz'];
47
+ if (mode === 'aux' || mode === 'all') {
48
+ for (const ext of exts) {
49
+ const p = base + ext;
50
+ try {
51
+ if (fs.existsSync(p))
52
+ fs.unlinkSync(p);
53
+ }
54
+ catch { }
55
+ }
56
+ }
57
+ if (mode === 'all') {
58
+ try {
59
+ if (fs.existsSync(abs))
60
+ fs.unlinkSync(abs);
61
+ }
62
+ catch { }
63
+ }
64
+ }
@@ -0,0 +1,56 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.renderMermaidDiagram = renderMermaidDiagram;
4
+ const node_fs_1 = require("node:fs");
5
+ const node_path_1 = require("node:path");
6
+ const node_child_process_1 = require("node:child_process");
7
+ const mermaid_1 = require("../emitters/mermaid");
8
+ function which(cmd) {
9
+ const r = (0, node_child_process_1.spawnSync)(process.platform === 'win32' ? 'where' : 'which', [cmd], { stdio: 'ignore' });
10
+ return r.status === 0;
11
+ }
12
+ async function tryKroki(diagram, outPath, format) {
13
+ const urlBase = process.env.KROKI_URL || 'https://kroki.io';
14
+ const endpoint = `${urlBase}/mermaid/${format}`;
15
+ const resp = await fetch(endpoint, {
16
+ method: 'POST',
17
+ headers: { 'Content-Type': 'text/plain' },
18
+ body: diagram
19
+ });
20
+ if (!resp.ok)
21
+ return false;
22
+ const buf = new Uint8Array(await resp.arrayBuffer());
23
+ (0, node_fs_1.writeFileSync)(outPath, buf);
24
+ return (0, node_fs_1.existsSync)(outPath);
25
+ }
26
+ async function renderMermaidDiagram(schema, outPath, opts) {
27
+ const format = (opts?.format || 'png');
28
+ const isImage = /\.(png|svg|pdf)$/i.test(outPath);
29
+ const baseNoExt = outPath.replace(/\.[^.]+$/, '');
30
+ const base = isImage ? baseNoExt : (baseNoExt + '-er');
31
+ const imgPath = isImage ? outPath : `${base}.${format}`;
32
+ const mmdPath = `${base}.mmd`;
33
+ const mmd = (0, mermaid_1.erDiagram)(schema);
34
+ (0, node_fs_1.writeFileSync)(mmdPath, mmd, 'utf-8');
35
+ // 1) Try Docker Mermaid CLI if provided
36
+ const image = opts?.dockerImage;
37
+ if (image && which('docker')) {
38
+ const wd = (0, node_path_1.resolve)((0, node_path_1.dirname)(outPath));
39
+ const input = (0, node_path_1.basename)(mmdPath);
40
+ const output = (0, node_path_1.basename)(imgPath);
41
+ const args = ['run', '--rm', '-v', `${wd}:/data`, '-w', '/data', image, 'mmdc', '-i', input, '-o', output, '-b', 'transparent'];
42
+ const r = (0, node_child_process_1.spawnSync)('docker', args, { encoding: 'utf-8' });
43
+ if (r.status === 0 && (0, node_fs_1.existsSync)(imgPath))
44
+ return { ok: true, outPath: imgPath };
45
+ }
46
+ // 2) Try Kroki HTTP fallback
47
+ try {
48
+ const ok = await tryKroki(mmd, imgPath, format);
49
+ if (ok)
50
+ return { ok: true, outPath: imgPath };
51
+ }
52
+ catch (e) {
53
+ return { ok: false, log: e?.message || String(e) };
54
+ }
55
+ return { ok: false, log: 'Mermaid render failed (Docker+Kroki)' };
56
+ }
package/package.json ADDED
@@ -0,0 +1,38 @@
1
+ {
2
+ "name": "schema2md-cli",
3
+ "version": "0.3.0",
4
+ "description": "Generate Markdown/LaTeX + Mermaid ER (CLI for DB schemas)",
5
+ "private": false,
6
+ "type": "commonjs",
7
+ "bin": {
8
+ "db-doc": "dist/cli.js"
9
+ },
10
+ "scripts": {
11
+ "build": "tsc",
12
+ "start": "node dist/cli.js",
13
+ "clean": "node -e \"require(\u0027fs\u0027).rmSync(\u0027dist\u0027,{recursive:true,force:true})\"",
14
+ "prepublishOnly": "npm run build"
15
+ },
16
+ "dependencies": {
17
+ "sql.js": "^1.9.0",
18
+ "pg": "^8.11.5",
19
+ "mysql2": "^3.11.0"
20
+ },
21
+ "devDependencies": {
22
+ "typescript": "^5.4.0",
23
+ "@types/node": "^20.11.30"
24
+ },
25
+ "license": "MIT",
26
+ "repository": {
27
+ "type": "git",
28
+ "url": ""
29
+ },
30
+ "files": [
31
+ "dist",
32
+ "README.md",
33
+ "LICENSE"
34
+ ],
35
+ "engines": {
36
+
37
+ }
38
+ }