@neupgroup/mapper 1.4.2 → 1.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -35,6 +35,7 @@ export declare class APIAdapter implements DbAdapter {
35
35
  updateDocument(collectionName: string, docId: string, data: DocumentData): Promise<void>;
36
36
  deleteDocument(collectionName: string, docId: string): Promise<void>;
37
37
  request(method: string, endpoint: string, data?: any, customHeaders?: Record<string, string | string[]>): Promise<any>;
38
+ raw(query: string, params?: any[]): Promise<any>;
38
39
  }
39
40
  /**
40
41
  * Factory function to create API adapter
@@ -168,6 +168,10 @@ export class APIAdapter {
168
168
  headers: customHeaders,
169
169
  });
170
170
  }
171
+ async raw(query, params) {
172
+ // API adapter raw could be a generic request or just return null
173
+ return null;
174
+ }
171
175
  }
172
176
  /**
173
177
  * Factory function to create API adapter
@@ -49,6 +49,7 @@ export declare class MongoDBAdapter implements DbAdapter {
49
49
  * Get collection statistics
50
50
  */
51
51
  getStats(collectionName: string): Promise<any>;
52
+ raw(query: string, params?: any[]): Promise<any>;
52
53
  }
53
54
  /**
54
55
  * Factory function to create MongoDB adapter
@@ -218,6 +218,19 @@ export class MongoDBAdapter {
218
218
  const collection = this.db.collection(collectionName);
219
219
  return await collection.stats();
220
220
  }
221
+ async raw(query, params) {
222
+ await this.ensureConnected();
223
+ try {
224
+ // Try to parse query as a command object if it's a string
225
+ const command = typeof query === 'string' ? JSON.parse(query) : query;
226
+ return await this.db.command(command);
227
+ }
228
+ catch (e) {
229
+ // For migrations, if it's not JSON, we might not be able to do much
230
+ // but we can at least return the database object for custom logic if params are provided
231
+ return this.db;
232
+ }
233
+ }
221
234
  }
222
235
  /**
223
236
  * Factory function to create MongoDB adapter
@@ -10,6 +10,27 @@ if (!fs.existsSync(indexFilePath)) {
10
10
  process.exit(0);
11
11
  }
12
12
  async function run() {
13
+ // Load connections
14
+ const connectionDir = path.resolve(process.cwd(), 'src/connection');
15
+ if (fs.existsSync(connectionDir)) {
16
+ const { StaticMapper } = await import('../fluent-mapper.js');
17
+ const connFiles = fs.readdirSync(connectionDir).filter(f => f.endsWith('.ts'));
18
+ for (const file of connFiles) {
19
+ const name = file.replace('.ts', '');
20
+ const filePath = path.resolve(connectionDir, file);
21
+ try {
22
+ const mod = await import('file://' + filePath);
23
+ const config = mod.config;
24
+ if (config) {
25
+ console.log(`Loading connection: ${name}`);
26
+ StaticMapper.makeConnection(name, config.type, config);
27
+ }
28
+ }
29
+ catch (e) {
30
+ console.warn(`Failed to load connection ${name}: ${e.message}`);
31
+ }
32
+ }
33
+ }
13
34
  const content = fs.readFileSync(indexFilePath, 'utf-8');
14
35
  const matchMigrations = content.match(/migrations = \[(.*?)\]/s);
15
36
  const matchCompleted = content.match(/completed = \[(.*?)\]/s);
@@ -0,0 +1,3 @@
1
+ export declare const migrations: string[];
2
+ export declare const completed: never[];
3
+ export declare const currentVersion = -1;
@@ -0,0 +1,5 @@
1
+ export const migrations = [
2
+ '20260131_132407_users_initial schema'
3
+ ];
4
+ export const completed = [];
5
+ export const currentVersion = -1;
@@ -17,9 +17,13 @@ export declare class ColumnBuilder {
17
17
  export declare class TableMigrator {
18
18
  private name;
19
19
  private columns;
20
+ private connectionName;
20
21
  constructor(name: string);
22
+ useConnection(name: string): this;
21
23
  addColumn(name: string): ColumnBuilder;
22
24
  getColumns(): any[];
25
+ private getAdapter;
26
+ private generateCreateSql;
23
27
  exec(): Promise<void>;
24
28
  drop(): Promise<void>;
25
29
  dropColumn(columnName: string): Promise<void>;
package/dist/migrator.js CHANGED
@@ -56,6 +56,11 @@ export class TableMigrator {
56
56
  constructor(name) {
57
57
  this.name = name;
58
58
  this.columns = [];
59
+ this.connectionName = 'default';
60
+ }
61
+ useConnection(name) {
62
+ this.connectionName = name;
63
+ return this;
59
64
  }
60
65
  addColumn(name) {
61
66
  const col = new ColumnBuilder(name);
@@ -65,28 +70,78 @@ export class TableMigrator {
65
70
  getColumns() {
66
71
  return this.columns.map(c => c.getDefinition());
67
72
  }
73
+ async getAdapter() {
74
+ const { StaticMapper } = await import('./fluent-mapper.js');
75
+ try {
76
+ const conn = StaticMapper.connection(this.connectionName);
77
+ // Accessing internal mapper instance safely
78
+ const adapter = conn.mapper.getConnections().getAdapter(this.connectionName);
79
+ const config = conn.mapper.getConnections().get(this.connectionName);
80
+ return { adapter, config };
81
+ }
82
+ catch (e) {
83
+ console.error(`Failed to get adapter for connection: ${this.connectionName}`);
84
+ return { adapter: null, config: null };
85
+ }
86
+ }
87
+ generateCreateSql(type) {
88
+ const columns = this.getColumns();
89
+ let sql = `CREATE TABLE IF NOT EXISTS \`${this.name}\` (\n`;
90
+ const columnDefs = columns.map(col => {
91
+ let def = ` \`${col.name}\` `;
92
+ // Map types
93
+ let dbType = 'VARCHAR(255)';
94
+ if (col.type === 'int')
95
+ dbType = 'INT';
96
+ else if (col.type === 'number')
97
+ dbType = 'DECIMAL(10,2)';
98
+ else if (col.type === 'boolean')
99
+ dbType = 'TINYINT(1)';
100
+ else if (col.type === 'date')
101
+ dbType = 'DATETIME';
102
+ def += dbType;
103
+ if (col.notNull)
104
+ def += ' NOT NULL';
105
+ if (col.isPrimary)
106
+ def += ' PRIMARY KEY';
107
+ if (col.autoIncrement) {
108
+ if (type === 'mysql')
109
+ def += ' AUTO_INCREMENT';
110
+ else if (type === 'sqlite')
111
+ def += ' AUTOINCREMENT';
112
+ else if (type === 'sql')
113
+ def += ' SERIAL'; // Postgres handled differently usually but okay for simple
114
+ }
115
+ if (col.defaultValue !== undefined) {
116
+ def += ` DEFAULT ${typeof col.defaultValue === 'string' ? `'${col.defaultValue}'` : col.defaultValue}`;
117
+ }
118
+ if (col.isUnique && !col.isPrimary)
119
+ def += ' UNIQUE';
120
+ return def;
121
+ });
122
+ sql += columnDefs.join(',\n');
123
+ // Foreign keys
124
+ columns.filter(c => c.foreignKey).forEach(c => {
125
+ sql += `,\n FOREIGN KEY (\`${c.name}\`) REFERENCES \`${c.foreignKey.table}\`(\`${c.foreignKey.column}\`)`;
126
+ });
127
+ sql += '\n)';
128
+ if (type === 'postgres' || type === 'sql') {
129
+ // Replace backticks with double quotes for Postgres
130
+ sql = sql.replace(/`/g, '"');
131
+ }
132
+ return sql;
133
+ }
68
134
  async exec() {
69
- // This is where we trigger schema file update
135
+ // 1. Update schema file
70
136
  const fs = await import('fs');
71
137
  const path = await import('path');
72
138
  const schemasDir = path.resolve(process.cwd(), 'src/schemas');
139
+ if (!fs.existsSync(schemasDir))
140
+ fs.mkdirSync(schemasDir, { recursive: true });
73
141
  const schemaFilePath = path.join(schemasDir, `${this.name}.ts`);
74
142
  const columns = this.getColumns();
75
- let existingSchema = {
76
- fields: [],
77
- insertableFields: [],
78
- updatableFields: [],
79
- massUpdateable: false,
80
- massDeletable: false,
81
- usesConnection: 'default'
82
- };
83
- if (fs.existsSync(schemaFilePath)) {
84
- // Very basic parser/updater.
85
- // Real implementation would use AST, but for now we'll do simple string replacement or overwrite if it's the first time.
86
- // For the sake of this demo, we will generate the fields array.
87
- }
88
143
  const fieldsContent = columns.map(col => {
89
- return ` { name: '${col.name}', type: '${col.type}'${col.isPrimary ? ', isPrimary: true' : ''}${col.autoIncrement ? ', autoIncrement: true' : ''}${col.notNull ? ', notNull: true' : ''}${col.isUnique ? ', isUnique: true' : ''} }`;
144
+ return ` { name: '${col.name}', type: '${col.type}'${col.isPrimary ? ', isPrimary: true' : ''}${col.autoIncrement ? ', autoIncrement: true' : ''}${col.notNull ? ', notNull: true' : ''}${col.isUnique ? ', isUnique: true' : ''}${col.defaultValue !== undefined ? `, defaultValue: ${JSON.stringify(col.defaultValue)}` : ''} }`;
90
145
  }).join(',\n');
91
146
  const schemaContent = `
92
147
  export const ${this.name} = {
@@ -97,51 +152,107 @@ ${fieldsContent}
97
152
  updatableFields: [${columns.filter(c => !c.autoIncrement && !c.isPrimary).map(c => `'${c.name}'`).join(', ')}],
98
153
  massUpdateable: false,
99
154
  massDeletable: false,
100
- usesConnection: 'default'
155
+ usesConnection: '${this.connectionName}'
101
156
  };
102
157
  `;
103
158
  fs.writeFileSync(schemaFilePath, schemaContent.trim() + '\n');
104
- console.log(`Updated schema: ${schemaFilePath}`);
159
+ console.log(`Updated schema file: ${schemaFilePath}`);
160
+ // 2. Execute on Database
161
+ const { adapter, config } = await this.getAdapter();
162
+ if (adapter && config) {
163
+ console.log(`Executing migration on database (${config.type})...`);
164
+ const sql = this.generateCreateSql(config.type);
165
+ try {
166
+ await adapter.raw(sql);
167
+ console.log(`Successfully executed SQL on database.`);
168
+ }
169
+ catch (err) {
170
+ console.error(`Database execution failed: ${err.message}`);
171
+ throw err;
172
+ }
173
+ }
174
+ else {
175
+ console.log(`Skipping database execution: Connection "${this.connectionName}" not found or adapter not attached.`);
176
+ }
105
177
  }
106
178
  async drop() {
179
+ // 1. Remove schema file
107
180
  const fs = await import('fs');
108
181
  const path = await import('path');
109
182
  const schemasDir = path.resolve(process.cwd(), 'src/schemas');
110
183
  const schemaFilePath = path.join(schemasDir, `${this.name}.ts`);
111
184
  if (fs.existsSync(schemaFilePath)) {
112
185
  fs.unlinkSync(schemaFilePath);
113
- console.log(`Deleted schema: ${schemaFilePath}`);
186
+ console.log(`Deleted schema file: ${schemaFilePath}`);
187
+ }
188
+ // 2. Execute on Database
189
+ const { adapter, config } = await this.getAdapter();
190
+ if (adapter && config) {
191
+ const quote = (config.type === 'postgres' || config.type === 'sql') ? '"' : '`';
192
+ const sql = `DROP TABLE IF EXISTS ${quote}${this.name}${quote}`;
193
+ try {
194
+ await adapter.raw(sql);
195
+ console.log(`Dropped table ${this.name} from database.`);
196
+ }
197
+ catch (err) {
198
+ console.error(`Failed to drop table from database: ${err.message}`);
199
+ }
114
200
  }
115
201
  }
116
202
  async dropColumn(columnName) {
203
+ // 1. Update schema file
117
204
  const fs = await import('fs');
118
205
  const path = await import('path');
119
206
  const schemasDir = path.resolve(process.cwd(), 'src/schemas');
120
207
  const schemaFilePath = path.join(schemasDir, `${this.name}.ts`);
121
208
  if (fs.existsSync(schemaFilePath)) {
122
- // Very simple implementation: filter out the line with the column
123
209
  const content = fs.readFileSync(schemaFilePath, 'utf-8');
124
210
  const lines = content.split('\n');
125
211
  const filteredLines = lines.filter(line => !line.includes(`name: '${columnName}'`));
126
212
  fs.writeFileSync(schemaFilePath, filteredLines.join('\n'));
127
- console.log(`Dropped column ${columnName} from schema ${this.name}`);
213
+ console.log(`Dropped column ${columnName} from schema file.`);
214
+ }
215
+ // 2. Execute on Database
216
+ const { adapter, config } = await this.getAdapter();
217
+ if (adapter && config) {
218
+ const quote = (config.type === 'postgres' || config.type === 'sql') ? '"' : '`';
219
+ const sql = `ALTER TABLE ${quote}${this.name}${quote} DROP COLUMN ${quote}${columnName}${quote}`;
220
+ try {
221
+ await adapter.raw(sql);
222
+ console.log(`Dropped column ${columnName} from database.`);
223
+ }
224
+ catch (err) {
225
+ console.error(`Failed to drop column from database: ${err.message}`);
226
+ }
128
227
  }
129
228
  }
130
229
  async dropUnique(columnName) {
230
+ // 1. Update schema file
131
231
  const fs = await import('fs');
132
232
  const path = await import('path');
133
233
  const schemasDir = path.resolve(process.cwd(), 'src/schemas');
134
234
  const schemaFilePath = path.join(schemasDir, `${this.name}.ts`);
135
235
  if (fs.existsSync(schemaFilePath)) {
136
236
  let content = fs.readFileSync(schemaFilePath, 'utf-8');
137
- // Remove ", isUnique: true" from the line containing the column
138
237
  const regex = new RegExp(`({ name: '${columnName}', .*?), isUnique: true(.*})`);
139
238
  content = content.replace(regex, '$1$2');
140
239
  fs.writeFileSync(schemaFilePath, content);
141
- console.log(`Dropped unique constraint from ${columnName} in schema ${this.name}`);
240
+ console.log(`Dropped unique constraint from ${columnName} in schema file.`);
241
+ }
242
+ // 2. Execute on Database (Database specific, simplified for MySQL)
243
+ const { adapter, config } = await this.getAdapter();
244
+ if (adapter && config && config.type === 'mysql') {
245
+ try {
246
+ await adapter.raw(`ALTER TABLE \`${this.name}\` DROP INDEX \`${columnName}\``);
247
+ console.log(`Dropped unique index ${columnName} from database.`);
248
+ }
249
+ catch (err) {
250
+ console.error(`Failed to drop unique index from database: ${err.message}`);
251
+ }
142
252
  }
143
253
  }
144
254
  async dropPrimaryKey(columnName) {
255
+ // 1. Update schema file
145
256
  const fs = await import('fs');
146
257
  const path = await import('path');
147
258
  const schemasDir = path.resolve(process.cwd(), 'src/schemas');
@@ -151,7 +262,19 @@ ${fieldsContent}
151
262
  const regex = new RegExp(`({ name: '${columnName}', .*?), isPrimary: true(.*})`);
152
263
  content = content.replace(regex, '$1$2');
153
264
  fs.writeFileSync(schemaFilePath, content);
154
- console.log(`Dropped primary key constraint from ${columnName} in schema ${this.name}`);
265
+ console.log(`Dropped primary key constraint from ${columnName} in schema file.`);
266
+ }
267
+ // 2. Execute on Database
268
+ const { adapter, config } = await this.getAdapter();
269
+ if (adapter && config) {
270
+ const quote = (config.type === 'postgres' || config.type === 'sql') ? '"' : '`';
271
+ try {
272
+ await adapter.raw(`ALTER TABLE ${quote}${this.name}${quote} DROP PRIMARY KEY`);
273
+ console.log(`Dropped primary key from database.`);
274
+ }
275
+ catch (err) {
276
+ console.warn(`Failed to drop primary key: ${err.message}. (Some databases require more complex PK drops)`);
277
+ }
155
278
  }
156
279
  }
157
280
  }
@@ -22,4 +22,5 @@ export interface DbAdapter {
22
22
  addDocument(collectionName: string, data: DocumentData): Promise<string>;
23
23
  updateDocument(collectionName: string, docId: string, data: DocumentData): Promise<void>;
24
24
  deleteDocument(collectionName: string, docId: string): Promise<void>;
25
+ raw(query: string, params?: any[]): Promise<any>;
25
26
  }
@@ -0,0 +1,8 @@
1
+ export declare const users: {
2
+ fields: never[];
3
+ insertableFields: never[];
4
+ updatableFields: never[];
5
+ massUpdateable: boolean;
6
+ massDeletable: boolean;
7
+ usesConnection: string;
8
+ };
@@ -0,0 +1,10 @@
1
+ export const users = {
2
+ fields: [
3
+ // { name: 'id', type: 'int', isPrimary: true, autoIncrement: true }
4
+ ],
5
+ insertableFields: [],
6
+ updatableFields: [],
7
+ massUpdateable: false,
8
+ massDeletable: false,
9
+ usesConnection: 'default' // Update this
10
+ };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@neupgroup/mapper",
3
- "version": "1.4.2",
3
+ "version": "1.5.0",
4
4
  "description": "Neup.Mapper core library for schema and mapping utilities",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",
@@ -31,5 +31,11 @@
31
31
  "devDependencies": {
32
32
  "@types/node": "^24.10.1",
33
33
  "typescript": "^5.9.3"
34
+ },
35
+ "dependencies": {
36
+ "mongodb": "^7.0.0",
37
+ "mysql2": "^3.16.2",
38
+ "pg": "^8.18.0",
39
+ "sqlite3": "^5.1.7"
34
40
  }
35
41
  }