@neupgroup/mapper 1.4.3 → 1.5.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -35,6 +35,7 @@ export declare class APIAdapter implements DbAdapter {
35
35
  updateDocument(collectionName: string, docId: string, data: DocumentData): Promise<void>;
36
36
  deleteDocument(collectionName: string, docId: string): Promise<void>;
37
37
  request(method: string, endpoint: string, data?: any, customHeaders?: Record<string, string | string[]>): Promise<any>;
38
+ raw(query: string, params?: any[]): Promise<any>;
38
39
  }
39
40
  /**
40
41
  * Factory function to create API adapter
@@ -168,6 +168,10 @@ export class APIAdapter {
168
168
  headers: customHeaders,
169
169
  });
170
170
  }
171
+ async raw(query, params) {
172
+ // API adapter raw could be a generic request or just return null
173
+ return null;
174
+ }
171
175
  }
172
176
  /**
173
177
  * Factory function to create API adapter
@@ -49,6 +49,7 @@ export declare class MongoDBAdapter implements DbAdapter {
49
49
  * Get collection statistics
50
50
  */
51
51
  getStats(collectionName: string): Promise<any>;
52
+ raw(query: string, params?: any[]): Promise<any>;
52
53
  }
53
54
  /**
54
55
  * Factory function to create MongoDB adapter
@@ -218,6 +218,19 @@ export class MongoDBAdapter {
218
218
  const collection = this.db.collection(collectionName);
219
219
  return await collection.stats();
220
220
  }
221
+ async raw(query, params) {
222
+ await this.ensureConnected();
223
+ try {
224
+ // Try to parse query as a command object if it's a string
225
+ const command = typeof query === 'string' ? JSON.parse(query) : query;
226
+ return await this.db.command(command);
227
+ }
228
+ catch (e) {
229
+ // For migrations, if it's not JSON, we might not be able to do much
230
+ // but we can at least return the database object for custom logic if params are provided
231
+ return this.db;
232
+ }
233
+ }
221
234
  }
222
235
  /**
223
236
  * Factory function to create MongoDB adapter
@@ -2,10 +2,28 @@
2
2
  import * as fs from 'fs';
3
3
  import * as path from 'path';
4
4
  const args = process.argv.slice(2);
5
+ if (args.includes('--help') || args.includes('-h')) {
6
+ console.log(`
7
+ Usage: npm run create-connection <connectionName> [type]
8
+
9
+ Arguments:
10
+ connectionName Name for your connection (e.g., 'primary_db')
11
+ type Database type (default: 'api').
12
+ Supported types: mysql, sqlite, postgres, mongodb, api
13
+
14
+ Options:
15
+ --help, -h Show this help message
16
+
17
+ Example:
18
+ npm run create-connection my_db mysql
19
+ `);
20
+ process.exit(0);
21
+ }
5
22
  const connectionName = args[0];
6
23
  const type = args[1] || 'api';
7
24
  if (!connectionName) {
8
- console.error('Usage: npm run create-connection <connectionName> [type]');
25
+ console.error('Error: Connection name is required.');
26
+ console.log('Usage: npm run create-connection <connectionName> [type]');
9
27
  process.exit(1);
10
28
  }
11
29
  const connectionDir = path.resolve(process.cwd(), 'src/connection');
@@ -2,10 +2,27 @@
2
2
  import * as fs from 'fs';
3
3
  import * as path from 'path';
4
4
  const args = process.argv.slice(2);
5
+ if (args.includes('--help') || args.includes('-h')) {
6
+ console.log(`
7
+ Usage: npm run create-migration <tableName> [remarks]
8
+
9
+ Arguments:
10
+ tableName The name of the database table (will be used for schema and migration file)
11
+ remarks Optional description of the migration (e.g., 'add_index')
12
+
13
+ Options:
14
+ --help, -h Show this help message
15
+
16
+ Example:
17
+ npm run create-migration users initial_schema
18
+ `);
19
+ process.exit(0);
20
+ }
5
21
  const tableName = args[0];
6
22
  const remarks = args[1] || '';
7
23
  if (!tableName) {
8
- console.error('Usage: npm run create-migration <tableName> [remarks]');
24
+ console.error('Error: Table name is required.');
25
+ console.log('Usage: npm run create-migration <tableName> [remarks]');
9
26
  process.exit(1);
10
27
  }
11
28
  // Ensure directories exist
@@ -28,8 +45,10 @@ import { Mapper, TableMigrator } from '@neupgroup/mapper';
28
45
 
29
46
  export async function up() {
30
47
  // const table = Mapper.schemas().table('${tableName}');
31
- // table.addColumn('id').type('int').isPrimary().autoIncrement().exec();
48
+ // table.useConnection('default');
49
+ // table.addColumn('id').type('int').isPrimary().autoIncrement();
32
50
  // ... add more columns
51
+ // await table.exec();
33
52
  console.log('Migrating up: ${tableName}');
34
53
  }
35
54
 
@@ -2,6 +2,24 @@
2
2
  import * as fs from 'fs';
3
3
  import * as path from 'path';
4
4
  const args = process.argv.slice(2);
5
+ if (args.includes('--help') || args.includes('-h')) {
6
+ console.log(`
7
+ Usage: npm run migrate [command]
8
+
9
+ Commands:
10
+ up Run all pending migrations (default)
11
+ down Roll back the last completed migration
12
+
13
+ Options:
14
+ --help, -h Show this help message
15
+
16
+ Description:
17
+ This command will look for migration files in src/migration,
18
+ load database connections from src/connection, and execute
19
+ pending changes on your database while updating local schema files.
20
+ `);
21
+ process.exit(0);
22
+ }
5
23
  const command = args[0] || 'up'; // 'up' or 'down'
6
24
  const migrationDir = path.resolve(process.cwd(), 'src/migration');
7
25
  const indexFilePath = path.join(migrationDir, 'index.ts');
@@ -10,6 +28,27 @@ if (!fs.existsSync(indexFilePath)) {
10
28
  process.exit(0);
11
29
  }
12
30
  async function run() {
31
+ // Load connections
32
+ const connectionDir = path.resolve(process.cwd(), 'src/connection');
33
+ if (fs.existsSync(connectionDir)) {
34
+ const { StaticMapper } = await import('../fluent-mapper.js');
35
+ const connFiles = fs.readdirSync(connectionDir).filter(f => f.endsWith('.ts'));
36
+ for (const file of connFiles) {
37
+ const name = file.replace('.ts', '');
38
+ const filePath = path.resolve(connectionDir, file);
39
+ try {
40
+ const mod = await import('file://' + filePath);
41
+ const config = mod.config;
42
+ if (config) {
43
+ console.log(`Loading connection: ${name}`);
44
+ StaticMapper.makeConnection(name, config.type, config);
45
+ }
46
+ }
47
+ catch (e) {
48
+ console.warn(`Failed to load connection ${name}: ${e.message}`);
49
+ }
50
+ }
51
+ }
13
52
  const content = fs.readFileSync(indexFilePath, 'utf-8');
14
53
  const matchMigrations = content.match(/migrations = \[(.*?)\]/s);
15
54
  const matchCompleted = content.match(/completed = \[(.*?)\]/s);
@@ -17,9 +17,13 @@ export declare class ColumnBuilder {
17
17
  export declare class TableMigrator {
18
18
  private name;
19
19
  private columns;
20
+ private connectionName;
20
21
  constructor(name: string);
22
+ useConnection(name: string): this;
21
23
  addColumn(name: string): ColumnBuilder;
22
24
  getColumns(): any[];
25
+ private getAdapter;
26
+ private generateCreateSql;
23
27
  exec(): Promise<void>;
24
28
  drop(): Promise<void>;
25
29
  dropColumn(columnName: string): Promise<void>;
package/dist/migrator.js CHANGED
@@ -56,6 +56,11 @@ export class TableMigrator {
56
56
  constructor(name) {
57
57
  this.name = name;
58
58
  this.columns = [];
59
+ this.connectionName = 'default';
60
+ }
61
+ useConnection(name) {
62
+ this.connectionName = name;
63
+ return this;
59
64
  }
60
65
  addColumn(name) {
61
66
  const col = new ColumnBuilder(name);
@@ -65,28 +70,78 @@ export class TableMigrator {
65
70
  getColumns() {
66
71
  return this.columns.map(c => c.getDefinition());
67
72
  }
73
+ async getAdapter() {
74
+ const { StaticMapper } = await import('./fluent-mapper.js');
75
+ try {
76
+ const conn = StaticMapper.connection(this.connectionName);
77
+ // Accessing internal mapper instance safely
78
+ const adapter = conn.mapper.getConnections().getAdapter(this.connectionName);
79
+ const config = conn.mapper.getConnections().get(this.connectionName);
80
+ return { adapter, config };
81
+ }
82
+ catch (e) {
83
+ console.error(`Failed to get adapter for connection: ${this.connectionName}`);
84
+ return { adapter: null, config: null };
85
+ }
86
+ }
87
+ generateCreateSql(type) {
88
+ const columns = this.getColumns();
89
+ let sql = `CREATE TABLE IF NOT EXISTS \`${this.name}\` (\n`;
90
+ const columnDefs = columns.map(col => {
91
+ let def = ` \`${col.name}\` `;
92
+ // Map types
93
+ let dbType = 'VARCHAR(255)';
94
+ if (col.type === 'int')
95
+ dbType = 'INT';
96
+ else if (col.type === 'number')
97
+ dbType = 'DECIMAL(10,2)';
98
+ else if (col.type === 'boolean')
99
+ dbType = 'TINYINT(1)';
100
+ else if (col.type === 'date')
101
+ dbType = 'DATETIME';
102
+ def += dbType;
103
+ if (col.notNull)
104
+ def += ' NOT NULL';
105
+ if (col.isPrimary)
106
+ def += ' PRIMARY KEY';
107
+ if (col.autoIncrement) {
108
+ if (type === 'mysql')
109
+ def += ' AUTO_INCREMENT';
110
+ else if (type === 'sqlite')
111
+ def += ' AUTOINCREMENT';
112
+ else if (type === 'sql')
113
+ def += ' SERIAL'; // Postgres handled differently usually but okay for simple
114
+ }
115
+ if (col.defaultValue !== undefined) {
116
+ def += ` DEFAULT ${typeof col.defaultValue === 'string' ? `'${col.defaultValue}'` : col.defaultValue}`;
117
+ }
118
+ if (col.isUnique && !col.isPrimary)
119
+ def += ' UNIQUE';
120
+ return def;
121
+ });
122
+ sql += columnDefs.join(',\n');
123
+ // Foreign keys
124
+ columns.filter(c => c.foreignKey).forEach(c => {
125
+ sql += `,\n FOREIGN KEY (\`${c.name}\`) REFERENCES \`${c.foreignKey.table}\`(\`${c.foreignKey.column}\`)`;
126
+ });
127
+ sql += '\n)';
128
+ if (type === 'postgres' || type === 'sql') {
129
+ // Replace backticks with double quotes for Postgres
130
+ sql = sql.replace(/`/g, '"');
131
+ }
132
+ return sql;
133
+ }
68
134
  async exec() {
69
- // This is where we trigger schema file update
135
+ // 1. Update schema file
70
136
  const fs = await import('fs');
71
137
  const path = await import('path');
72
138
  const schemasDir = path.resolve(process.cwd(), 'src/schemas');
139
+ if (!fs.existsSync(schemasDir))
140
+ fs.mkdirSync(schemasDir, { recursive: true });
73
141
  const schemaFilePath = path.join(schemasDir, `${this.name}.ts`);
74
142
  const columns = this.getColumns();
75
- let existingSchema = {
76
- fields: [],
77
- insertableFields: [],
78
- updatableFields: [],
79
- massUpdateable: false,
80
- massDeletable: false,
81
- usesConnection: 'default'
82
- };
83
- if (fs.existsSync(schemaFilePath)) {
84
- // Very basic parser/updater.
85
- // Real implementation would use AST, but for now we'll do simple string replacement or overwrite if it's the first time.
86
- // For the sake of this demo, we will generate the fields array.
87
- }
88
143
  const fieldsContent = columns.map(col => {
89
- return ` { name: '${col.name}', type: '${col.type}'${col.isPrimary ? ', isPrimary: true' : ''}${col.autoIncrement ? ', autoIncrement: true' : ''}${col.notNull ? ', notNull: true' : ''}${col.isUnique ? ', isUnique: true' : ''} }`;
144
+ return ` { name: '${col.name}', type: '${col.type}'${col.isPrimary ? ', isPrimary: true' : ''}${col.autoIncrement ? ', autoIncrement: true' : ''}${col.notNull ? ', notNull: true' : ''}${col.isUnique ? ', isUnique: true' : ''}${col.defaultValue !== undefined ? `, defaultValue: ${JSON.stringify(col.defaultValue)}` : ''} }`;
90
145
  }).join(',\n');
91
146
  const schemaContent = `
92
147
  export const ${this.name} = {
@@ -97,51 +152,107 @@ ${fieldsContent}
97
152
  updatableFields: [${columns.filter(c => !c.autoIncrement && !c.isPrimary).map(c => `'${c.name}'`).join(', ')}],
98
153
  massUpdateable: false,
99
154
  massDeletable: false,
100
- usesConnection: 'default'
155
+ usesConnection: '${this.connectionName}'
101
156
  };
102
157
  `;
103
158
  fs.writeFileSync(schemaFilePath, schemaContent.trim() + '\n');
104
- console.log(`Updated schema: ${schemaFilePath}`);
159
+ console.log(`Updated schema file: ${schemaFilePath}`);
160
+ // 2. Execute on Database
161
+ const { adapter, config } = await this.getAdapter();
162
+ if (adapter && config) {
163
+ console.log(`Executing migration on database (${config.type})...`);
164
+ const sql = this.generateCreateSql(config.type);
165
+ try {
166
+ await adapter.raw(sql);
167
+ console.log(`Successfully executed SQL on database.`);
168
+ }
169
+ catch (err) {
170
+ console.error(`Database execution failed: ${err.message}`);
171
+ throw err;
172
+ }
173
+ }
174
+ else {
175
+ console.log(`Skipping database execution: Connection "${this.connectionName}" not found or adapter not attached.`);
176
+ }
105
177
  }
106
178
  async drop() {
179
+ // 1. Remove schema file
107
180
  const fs = await import('fs');
108
181
  const path = await import('path');
109
182
  const schemasDir = path.resolve(process.cwd(), 'src/schemas');
110
183
  const schemaFilePath = path.join(schemasDir, `${this.name}.ts`);
111
184
  if (fs.existsSync(schemaFilePath)) {
112
185
  fs.unlinkSync(schemaFilePath);
113
- console.log(`Deleted schema: ${schemaFilePath}`);
186
+ console.log(`Deleted schema file: ${schemaFilePath}`);
187
+ }
188
+ // 2. Execute on Database
189
+ const { adapter, config } = await this.getAdapter();
190
+ if (adapter && config) {
191
+ const quote = (config.type === 'postgres' || config.type === 'sql') ? '"' : '`';
192
+ const sql = `DROP TABLE IF EXISTS ${quote}${this.name}${quote}`;
193
+ try {
194
+ await adapter.raw(sql);
195
+ console.log(`Dropped table ${this.name} from database.`);
196
+ }
197
+ catch (err) {
198
+ console.error(`Failed to drop table from database: ${err.message}`);
199
+ }
114
200
  }
115
201
  }
116
202
  async dropColumn(columnName) {
203
+ // 1. Update schema file
117
204
  const fs = await import('fs');
118
205
  const path = await import('path');
119
206
  const schemasDir = path.resolve(process.cwd(), 'src/schemas');
120
207
  const schemaFilePath = path.join(schemasDir, `${this.name}.ts`);
121
208
  if (fs.existsSync(schemaFilePath)) {
122
- // Very simple implementation: filter out the line with the column
123
209
  const content = fs.readFileSync(schemaFilePath, 'utf-8');
124
210
  const lines = content.split('\n');
125
211
  const filteredLines = lines.filter(line => !line.includes(`name: '${columnName}'`));
126
212
  fs.writeFileSync(schemaFilePath, filteredLines.join('\n'));
127
- console.log(`Dropped column ${columnName} from schema ${this.name}`);
213
+ console.log(`Dropped column ${columnName} from schema file.`);
214
+ }
215
+ // 2. Execute on Database
216
+ const { adapter, config } = await this.getAdapter();
217
+ if (adapter && config) {
218
+ const quote = (config.type === 'postgres' || config.type === 'sql') ? '"' : '`';
219
+ const sql = `ALTER TABLE ${quote}${this.name}${quote} DROP COLUMN ${quote}${columnName}${quote}`;
220
+ try {
221
+ await adapter.raw(sql);
222
+ console.log(`Dropped column ${columnName} from database.`);
223
+ }
224
+ catch (err) {
225
+ console.error(`Failed to drop column from database: ${err.message}`);
226
+ }
128
227
  }
129
228
  }
130
229
  async dropUnique(columnName) {
230
+ // 1. Update schema file
131
231
  const fs = await import('fs');
132
232
  const path = await import('path');
133
233
  const schemasDir = path.resolve(process.cwd(), 'src/schemas');
134
234
  const schemaFilePath = path.join(schemasDir, `${this.name}.ts`);
135
235
  if (fs.existsSync(schemaFilePath)) {
136
236
  let content = fs.readFileSync(schemaFilePath, 'utf-8');
137
- // Remove ", isUnique: true" from the line containing the column
138
237
  const regex = new RegExp(`({ name: '${columnName}', .*?), isUnique: true(.*})`);
139
238
  content = content.replace(regex, '$1$2');
140
239
  fs.writeFileSync(schemaFilePath, content);
141
- console.log(`Dropped unique constraint from ${columnName} in schema ${this.name}`);
240
+ console.log(`Dropped unique constraint from ${columnName} in schema file.`);
241
+ }
242
+ // 2. Execute on Database (Database specific, simplified for MySQL)
243
+ const { adapter, config } = await this.getAdapter();
244
+ if (adapter && config && config.type === 'mysql') {
245
+ try {
246
+ await adapter.raw(`ALTER TABLE \`${this.name}\` DROP INDEX \`${columnName}\``);
247
+ console.log(`Dropped unique index ${columnName} from database.`);
248
+ }
249
+ catch (err) {
250
+ console.error(`Failed to drop unique index from database: ${err.message}`);
251
+ }
142
252
  }
143
253
  }
144
254
  async dropPrimaryKey(columnName) {
255
+ // 1. Update schema file
145
256
  const fs = await import('fs');
146
257
  const path = await import('path');
147
258
  const schemasDir = path.resolve(process.cwd(), 'src/schemas');
@@ -151,7 +262,19 @@ ${fieldsContent}
151
262
  const regex = new RegExp(`({ name: '${columnName}', .*?), isPrimary: true(.*})`);
152
263
  content = content.replace(regex, '$1$2');
153
264
  fs.writeFileSync(schemaFilePath, content);
154
- console.log(`Dropped primary key constraint from ${columnName} in schema ${this.name}`);
265
+ console.log(`Dropped primary key constraint from ${columnName} in schema file.`);
266
+ }
267
+ // 2. Execute on Database
268
+ const { adapter, config } = await this.getAdapter();
269
+ if (adapter && config) {
270
+ const quote = (config.type === 'postgres' || config.type === 'sql') ? '"' : '`';
271
+ try {
272
+ await adapter.raw(`ALTER TABLE ${quote}${this.name}${quote} DROP PRIMARY KEY`);
273
+ console.log(`Dropped primary key from database.`);
274
+ }
275
+ catch (err) {
276
+ console.warn(`Failed to drop primary key: ${err.message}. (Some databases require more complex PK drops)`);
277
+ }
155
278
  }
156
279
  }
157
280
  }
@@ -22,4 +22,5 @@ export interface DbAdapter {
22
22
  addDocument(collectionName: string, data: DocumentData): Promise<string>;
23
23
  updateDocument(collectionName: string, docId: string, data: DocumentData): Promise<void>;
24
24
  deleteDocument(collectionName: string, docId: string): Promise<void>;
25
+ raw(query: string, params?: any[]): Promise<any>;
25
26
  }
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@neupgroup/mapper",
3
- "version": "1.4.3",
4
3
  "description": "Neup.Mapper core library for schema and mapping utilities",
4
+ "version": "1.5.2",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",
7
7
  "module": "dist/index.js",