metal-orm 1.0.48 → 1.0.50

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "metal-orm",
3
- "version": "1.0.48",
3
+ "version": "1.0.50",
4
4
  "type": "module",
5
5
  "types": "./dist/index.d.ts",
6
6
  "engines": {
@@ -0,0 +1,198 @@
1
+ import fs from 'node:fs';
2
+ import process from 'node:process';
3
+ import path from 'node:path';
4
+ import { createRequire } from 'node:module';
5
+ import { parseArgs as parseCliArgs } from 'node:util';
6
+
7
+ const DIALECTS = new Set(['postgres', 'mysql', 'sqlite', 'mssql']);
8
+ const NODE_NEXT_MODULE_RESOLUTIONS = new Set(['node16', 'nodenext']);
9
+
10
+ const TS_CONFIG_BASE_NAMES = ['tsconfig.json', 'tsconfig.base.json', 'tsconfig.app.json', 'tsconfig.build.json'];
11
+ const nodeRequire = createRequire(import.meta.url);
12
+
13
+ const normalizeCompilerOption = value => (typeof value === 'string' ? value.trim().toLowerCase() : '');
14
+
15
+ const hasNodeNextModuleResolution = compilerOptions => {
16
+ if (!compilerOptions || typeof compilerOptions !== 'object') return false;
17
+ const moduleResolution = normalizeCompilerOption(compilerOptions.moduleResolution);
18
+ const moduleOption = normalizeCompilerOption(compilerOptions.module);
19
+ return (
20
+ NODE_NEXT_MODULE_RESOLUTIONS.has(moduleResolution) || NODE_NEXT_MODULE_RESOLUTIONS.has(moduleOption)
21
+ );
22
+ };
23
+
24
+ const resolveExtendsPath = (extendsValue, baseDir) => {
25
+ if (!extendsValue || typeof extendsValue !== 'string') return undefined;
26
+ const candidatePaths = [];
27
+ const normalizedValue = extendsValue.trim();
28
+ candidatePaths.push(path.resolve(baseDir, normalizedValue));
29
+ if (!path.extname(normalizedValue)) {
30
+ candidatePaths.push(`${path.resolve(baseDir, normalizedValue)}.json`);
31
+ }
32
+ for (const candidate of candidatePaths) {
33
+ if (fs.existsSync(candidate)) return candidate;
34
+ }
35
+ try {
36
+ return nodeRequire.resolve(normalizedValue, { paths: [baseDir] });
37
+ } catch {
38
+ return undefined;
39
+ }
40
+ };
41
+
42
+ const inspectTsConfig = (configPath, visited) => {
43
+ if (!configPath) return false;
44
+ const normalized = path.resolve(configPath);
45
+ if (visited.has(normalized)) return false;
46
+ if (!fs.existsSync(normalized)) return false;
47
+ visited.add(normalized);
48
+ let raw;
49
+ try {
50
+ raw = fs.readFileSync(normalized, 'utf8');
51
+ } catch {
52
+ return false;
53
+ }
54
+ let parsed;
55
+ try {
56
+ parsed = JSON.parse(raw);
57
+ } catch {
58
+ return false;
59
+ }
60
+ if (hasNodeNextModuleResolution(parsed.compilerOptions)) {
61
+ return true;
62
+ }
63
+ if (parsed.extends) {
64
+ const extended = resolveExtendsPath(parsed.extends, path.dirname(normalized));
65
+ if (extended && inspectTsConfig(extended, visited)) {
66
+ return true;
67
+ }
68
+ }
69
+ return false;
70
+ };
71
+
72
+ const discoverTsConfigPaths = cwd => {
73
+ const candidates = new Set();
74
+ for (const name of TS_CONFIG_BASE_NAMES) {
75
+ const fullPath = path.join(cwd, name);
76
+ if (fs.existsSync(fullPath)) {
77
+ candidates.add(fullPath);
78
+ }
79
+ }
80
+ try {
81
+ const entries = fs.readdirSync(cwd);
82
+ for (const entry of entries) {
83
+ const lower = entry.toLowerCase();
84
+ if (lower.startsWith('tsconfig') && lower.endsWith('.json')) {
85
+ candidates.add(path.join(cwd, entry));
86
+ }
87
+ }
88
+ } catch {
89
+ // ignore readdir errors
90
+ }
91
+ return Array.from(candidates);
92
+ };
93
+
94
+ const shouldUseJsImportExtensions = cwd => {
95
+ const paths = discoverTsConfigPaths(cwd);
96
+ if (!paths.length) return false;
97
+ const visited = new Set();
98
+ for (const configPath of paths) {
99
+ if (inspectTsConfig(configPath, visited)) {
100
+ return true;
101
+ }
102
+ }
103
+ return false;
104
+ };
105
+
106
+ export const parseOptions = (argv = process.argv.slice(2), env = process.env, cwd = process.cwd()) => {
107
+ const parser = {
108
+ options: {
109
+ dialect: { type: 'string' },
110
+ url: { type: 'string' },
111
+ db: { type: 'string' },
112
+ schema: { type: 'string' },
113
+ include: { type: 'string' },
114
+ exclude: { type: 'string' },
115
+ out: { type: 'string' },
116
+ locale: { type: 'string' },
117
+ 'naming-overrides': { type: 'string' },
118
+ 'dry-run': { type: 'boolean' },
119
+ 'out-dir': { type: 'string' },
120
+ help: { type: 'boolean', short: 'h' },
121
+ version: { type: 'boolean' }
122
+ },
123
+ strict: true
124
+ };
125
+
126
+ const { values, positionals } = parseCliArgs(parser, { argv });
127
+
128
+ if (values.help) {
129
+ return { kind: 'help' };
130
+ }
131
+
132
+ if (values.version) {
133
+ return { kind: 'version' };
134
+ }
135
+
136
+ if (positionals.length) {
137
+ throw new Error(`Unexpected positional args: ${positionals.join(' ')}`);
138
+ }
139
+
140
+ const opts = {
141
+ dialect: (values.dialect || 'postgres').toLowerCase(),
142
+ url: values.url || env.DATABASE_URL,
143
+ dbPath: values.db,
144
+ schema: values.schema,
145
+ include: values.include ? values.include.split(',').map(v => v.trim()).filter(Boolean) : undefined,
146
+ exclude: values.exclude ? values.exclude.split(',').map(v => v.trim()).filter(Boolean) : undefined,
147
+ out: values.out ? path.resolve(cwd, values.out) : undefined,
148
+ outDir: values['out-dir'] ? path.resolve(cwd, values['out-dir']) : undefined,
149
+ locale: (values.locale || 'en').toLowerCase(),
150
+ namingOverrides: values['naming-overrides'] ? path.resolve(cwd, values['naming-overrides']) : undefined,
151
+ dryRun: Boolean(values['dry-run'])
152
+ };
153
+
154
+ opts.useJsImportExtensions = shouldUseJsImportExtensions(cwd);
155
+
156
+ if (!DIALECTS.has(opts.dialect)) {
157
+ throw new Error(`Unsupported dialect "${opts.dialect}". Supported: ${Array.from(DIALECTS).join(', ')}`);
158
+ }
159
+
160
+ if (opts.dialect === 'sqlite' && !opts.dbPath) {
161
+ opts.dbPath = ':memory:';
162
+ }
163
+
164
+ if (opts.dialect !== 'sqlite' && !opts.url) {
165
+ throw new Error('Missing connection string. Provide --url or set DATABASE_URL.');
166
+ }
167
+
168
+ if (!opts.out) {
169
+ opts.out = opts.outDir ? path.join(opts.outDir, 'index.ts') : path.join(cwd, 'generated-entities.ts');
170
+ }
171
+
172
+ return { kind: 'generate', options: opts };
173
+ };
174
+
175
+ export const printUsage = () => {
176
+ console.log(
177
+ `
178
+ MetalORM decorator generator
179
+ ---------------------------
180
+ Usage:
181
+ node scripts/generate-entities.mjs --dialect=postgres --url=<connection> --schema=public --include=users,orders [--out=src/entities.ts]
182
+ node scripts/generate-entities.mjs --dialect=mysql --url=<connection> --schema=mydb --exclude=archived [--out=src/entities.ts]
183
+ node scripts/generate-entities.mjs --dialect=sqlite --db=./my.db [--out=src/entities.ts]
184
+ node scripts/generate-entities.mjs --dialect=mssql --url=mssql://user:pass@host/db [--out=src/entities.ts]
185
+ node scripts/generate-entities.mjs --dialect=postgres --url=<connection> --schema=public --out-dir=src/entities
186
+
187
+ Flags:
188
+ --include=tbl1,tbl2 Only include these tables
189
+ --exclude=tbl3,tbl4 Exclude these tables
190
+ --locale=pt-BR Naming locale for class/relation names (default: en)
191
+ --naming-overrides Path to JSON map of irregular plurals { "singular": "plural" }
192
+ --dry-run Print to stdout instead of writing a file
193
+ --out=<file> Override the generated file (defaults to generated-entities.ts or the index inside --out-dir)
194
+ --out-dir=<dir> Emit one file per entity inside this directory plus the shared index
195
+ --help Show this help
196
+ `
197
+ );
198
+ };
@@ -0,0 +1,183 @@
1
+ import { createPostgresExecutor, createMysqlExecutor, createSqliteExecutor, createMssqlExecutor } from '../../dist/index.js';
2
+
3
+ const parseSqlServerOptionValue = value => {
4
+ if (!value) return value;
5
+ if (/^-?\d+$/.test(value)) return Number(value);
6
+ if (/^(true|false)$/i.test(value)) return value.toLowerCase() === 'true';
7
+ return value;
8
+ };
9
+
10
+ const parseSqlServerConnectionConfig = connectionString => {
11
+ if (!connectionString) {
12
+ throw new Error('Missing connection string for SQL Server');
13
+ }
14
+ const url = new URL(connectionString);
15
+ const config = {
16
+ server: url.hostname,
17
+ authentication: {
18
+ type: 'default',
19
+ options: {
20
+ userName: decodeURIComponent(url.username || ''),
21
+ password: decodeURIComponent(url.password || '')
22
+ }
23
+ },
24
+ options: {}
25
+ };
26
+
27
+ const database = url.pathname ? url.pathname.replace(/^\//, '') : '';
28
+ if (database) {
29
+ config.options.database = database;
30
+ }
31
+ if (url.port) {
32
+ config.options.port = Number(url.port);
33
+ }
34
+
35
+ for (const [key, value] of url.searchParams) {
36
+ config.options[key] = parseSqlServerOptionValue(value);
37
+ }
38
+
39
+ return config;
40
+ };
41
+
42
+ const getTediousParameterType = (value, TYPES) => {
43
+ if (value === null || value === undefined) {
44
+ return TYPES.NVarChar;
45
+ }
46
+ if (typeof value === 'number') {
47
+ return Number.isInteger(value) ? TYPES.Int : TYPES.Float;
48
+ }
49
+ if (typeof value === 'bigint') {
50
+ return TYPES.BigInt;
51
+ }
52
+ if (typeof value === 'boolean') {
53
+ return TYPES.Bit;
54
+ }
55
+ if (value instanceof Date) {
56
+ return TYPES.DateTime;
57
+ }
58
+ if (Buffer.isBuffer(value)) {
59
+ return TYPES.VarBinary;
60
+ }
61
+ return TYPES.NVarChar;
62
+ };
63
+
64
+ export const loadDriver = async (dialect, url, dbPath) => {
65
+ switch (dialect) {
66
+ case 'postgres': {
67
+ const mod = await import('pg');
68
+ const { Client } = mod;
69
+ const client = new Client({ connectionString: url });
70
+ await client.connect();
71
+ const executor = createPostgresExecutor(client);
72
+ return { executor, cleanup: async () => client.end() };
73
+ }
74
+ case 'mysql': {
75
+ const mod = await import('mysql2/promise');
76
+ const conn = await mod.createConnection(url);
77
+ const executor = createMysqlExecutor({
78
+ query: (...args) => conn.execute(...args),
79
+ beginTransaction: () => conn.beginTransaction(),
80
+ commit: () => conn.commit(),
81
+ rollback: () => conn.rollback()
82
+ });
83
+ return { executor, cleanup: async () => conn.end() };
84
+ }
85
+ case 'sqlite': {
86
+ const mod = await import('sqlite3');
87
+ const sqlite3 = mod.default || mod;
88
+ const db = new sqlite3.Database(dbPath);
89
+ const execAll = (sql, params) =>
90
+ new Promise((resolve, reject) => {
91
+ db.all(sql, params || [], (err, rows) => {
92
+ if (err) return reject(err);
93
+ resolve(rows);
94
+ });
95
+ });
96
+ const executor = createSqliteExecutor({
97
+ all: execAll,
98
+ beginTransaction: () => execAll('BEGIN'),
99
+ commitTransaction: () => execAll('COMMIT'),
100
+ rollbackTransaction: () => execAll('ROLLBACK')
101
+ });
102
+ const cleanup = async () =>
103
+ new Promise((resolve, reject) => db.close(err => (err ? reject(err) : resolve())));
104
+ return { executor, cleanup };
105
+ }
106
+ case 'mssql': {
107
+ const mod = await import('tedious');
108
+ const { Connection, Request, TYPES } = mod;
109
+ const config = parseSqlServerConnectionConfig(url);
110
+ const connection = new Connection(config);
111
+
112
+ await new Promise((resolve, reject) => {
113
+ const onConnect = err => {
114
+ connection.removeListener('error', onError);
115
+ if (err) return reject(err);
116
+ resolve();
117
+ };
118
+ const onError = err => {
119
+ connection.removeListener('connect', onConnect);
120
+ reject(err);
121
+ };
122
+ connection.once('connect', onConnect);
123
+ connection.once('error', onError);
124
+ connection.connect();
125
+ });
126
+
127
+ const execQuery = (sql, params) =>
128
+ new Promise((resolve, reject) => {
129
+ const rows = [];
130
+ const request = new Request(sql, err => {
131
+ if (err) return reject(err);
132
+ resolve({ recordset: rows });
133
+ });
134
+ request.on('row', columns => {
135
+ const row = {};
136
+ for (const column of columns) {
137
+ row[column.metadata.colName] = column.value;
138
+ }
139
+ rows.push(row);
140
+ });
141
+ params?.forEach((value, index) => {
142
+ request.addParameter(`p${index + 1}`, getTediousParameterType(value, TYPES), value);
143
+ });
144
+ connection.execSql(request);
145
+ });
146
+
147
+ const executor = createMssqlExecutor({
148
+ query: execQuery,
149
+ beginTransaction: () =>
150
+ new Promise((resolve, reject) => {
151
+ connection.beginTransaction(err => (err ? reject(err) : resolve()));
152
+ }),
153
+ commit: () =>
154
+ new Promise((resolve, reject) => {
155
+ connection.commitTransaction(err => (err ? reject(err) : resolve()));
156
+ }),
157
+ rollback: () =>
158
+ new Promise((resolve, reject) => {
159
+ connection.rollbackTransaction(err => (err ? reject(err) : resolve()));
160
+ })
161
+ });
162
+
163
+ const cleanup = async () =>
164
+ new Promise((resolve, reject) => {
165
+ const onEnd = () => {
166
+ connection.removeListener('error', onError);
167
+ resolve();
168
+ };
169
+ const onError = err => {
170
+ connection.removeListener('end', onEnd);
171
+ reject(err);
172
+ };
173
+ connection.once('end', onEnd);
174
+ connection.once('error', onError);
175
+ connection.close();
176
+ });
177
+
178
+ return { executor, cleanup };
179
+ }
180
+ default:
181
+ throw new Error(`Unsupported dialect ${dialect}`);
182
+ }
183
+ };
@@ -0,0 +1,24 @@
1
+ import path from 'node:path';
2
+
3
+ export const writeSingleFile = async (fsPromises, out, code) => {
4
+ await fsPromises.mkdir(path.dirname(out), { recursive: true });
5
+ await fsPromises.writeFile(out, code, 'utf8');
6
+ };
7
+
8
+ export const writeSplitFiles = async (fsPromises, tableFiles, outDir, indexOut, indexCode) => {
9
+ await fsPromises.mkdir(outDir, { recursive: true });
10
+ for (const file of tableFiles) {
11
+ await fsPromises.writeFile(file.path, file.code, 'utf8');
12
+ }
13
+ await fsPromises.mkdir(path.dirname(indexOut), { recursive: true });
14
+ await fsPromises.writeFile(indexOut, indexCode, 'utf8');
15
+ };
16
+
17
+ export const printDryRun = (logger, tableFiles, indexCode, indexPath) => {
18
+ for (const file of tableFiles) {
19
+ logger.log(`\n==> ${file.path}\n`);
20
+ logger.log(file.code);
21
+ }
22
+ logger.log(`\n==> ${indexPath}\n`);
23
+ logger.log(indexCode);
24
+ };
@@ -0,0 +1,68 @@
1
+ import fs from 'node:fs/promises';
2
+ import { introspectSchema } from '../../dist/index.js';
3
+ import { createNamingStrategy } from '../naming-strategy.mjs';
4
+ import { loadDriver } from './drivers.mjs';
5
+ import { renderEntityFile, renderSplitEntityFiles, renderSplitIndexFile } from './render.mjs';
6
+ import { printDryRun, writeSingleFile, writeSplitFiles } from './emit.mjs';
7
+
8
+ const loadIrregulars = async (filePath, fsPromises) => {
9
+ const raw = await fsPromises.readFile(filePath, 'utf8');
10
+ let parsed;
11
+ try {
12
+ parsed = JSON.parse(raw);
13
+ } catch (err) {
14
+ throw new Error(`Failed to parse naming overrides at ${filePath}: ${err.message || err}`);
15
+ }
16
+ const irregulars =
17
+ parsed && typeof parsed === 'object' && !Array.isArray(parsed)
18
+ ? parsed.irregulars && typeof parsed.irregulars === 'object' && !Array.isArray(parsed.irregulars)
19
+ ? parsed.irregulars
20
+ : parsed
21
+ : undefined;
22
+ if (!irregulars) {
23
+ throw new Error(`Naming overrides at ${filePath} must be an object or { "irregulars": { ... } }`);
24
+ }
25
+ return irregulars;
26
+ };
27
+
28
+ export const generateEntities = async (opts, context = {}) => {
29
+ const { fs: fsPromises = fs, logger = console } = context;
30
+ const irregulars = opts.namingOverrides ? await loadIrregulars(opts.namingOverrides, fsPromises) : undefined;
31
+ const naming = createNamingStrategy(opts.locale, irregulars);
32
+
33
+ const { executor, cleanup } = await loadDriver(opts.dialect, opts.url, opts.dbPath);
34
+ let schema;
35
+ try {
36
+ schema = await introspectSchema(executor, opts.dialect, {
37
+ schema: opts.schema,
38
+ includeTables: opts.include,
39
+ excludeTables: opts.exclude
40
+ });
41
+ } finally {
42
+ await cleanup?.();
43
+ }
44
+
45
+ if (opts.outDir) {
46
+ const { tableFiles, metadata } = renderSplitEntityFiles(schema, { ...opts, naming });
47
+ const indexCode = renderSplitIndexFile(metadata, { ...opts, naming });
48
+
49
+ if (opts.dryRun) {
50
+ printDryRun(logger, tableFiles, indexCode, opts.out);
51
+ return;
52
+ }
53
+
54
+ await writeSplitFiles(fsPromises, tableFiles, opts.outDir, opts.out, indexCode);
55
+ logger.log(`Wrote ${tableFiles.length} entity files to ${opts.outDir} and index ${opts.out} (${schema.tables.length} tables)`);
56
+ return;
57
+ }
58
+
59
+ const code = renderEntityFile(schema, { ...opts, naming });
60
+
61
+ if (opts.dryRun) {
62
+ logger.log(code);
63
+ return;
64
+ }
65
+
66
+ await writeSingleFile(fsPromises, opts.out, code);
67
+ logger.log(`Wrote ${opts.out} (${schema.tables.length} tables)`);
68
+ };