lamix 4.2.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +5 -0
- package/README.md +118 -0
- package/artisan.js +613 -0
- package/examples/Alternative.md +23 -0
- package/examples/CRUD.md +51 -0
- package/examples/Post.md +28 -0
- package/examples/PostController.md +93 -0
- package/examples/Query Builder.md +55 -0
- package/examples/Relations.md +16 -0
- package/examples/Role.md +26 -0
- package/examples/RoleController.md +65 -0
- package/examples/Usages.md +132 -0
- package/examples/User.md +40 -0
- package/examples/UserController.md +98 -0
- package/index.d.ts +580 -0
- package/index.js +3718 -0
- package/package.json +63 -0
package/artisan.js
ADDED
|
@@ -0,0 +1,613 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
const path = require('path');
|
|
5
|
+
require('dotenv').config();
|
|
6
|
+
const { DB } = require('lamix');
|
|
7
|
+
const chalk = require('chalk');
|
|
8
|
+
const args = process.argv.slice(2);
|
|
9
|
+
|
|
10
|
+
// ------------------ LOGGER ------------------
|
|
11
|
+
|
|
12
|
+
const log = {
|
|
13
|
+
success: (msg) => console.log(chalk.green(msg)),
|
|
14
|
+
warn: (msg) => console.log(chalk.yellow(msg)),
|
|
15
|
+
error: (msg) => console.log(chalk.red(msg)),
|
|
16
|
+
info: (msg) => console.log(chalk.blue(msg))
|
|
17
|
+
};
|
|
18
|
+
|
|
19
|
+
// ------------------ CONSTANTS ------------------
|
|
20
|
+
|
|
21
|
+
const MIGRATIONS_DIR = './database/migrations';
|
|
22
|
+
const MIGRATIONS_TABLE = 'migrations';
|
|
23
|
+
const SEEDERS_DIR = './database/seeders';
|
|
24
|
+
const SEEDS_TABLE = 'seeds';
|
|
25
|
+
|
|
26
|
+
function getTimestamp() {
|
|
27
|
+
const now = new Date();
|
|
28
|
+
return now.toISOString().replace(/[-:T.Z]/g, '').slice(0, 14);
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
// ------------------ SCHEMA BUILDER ------------------
|
|
32
|
+
|
|
33
|
+
DB.schema = {
|
|
34
|
+
createTable: async (tableName, callback) => {
|
|
35
|
+
// --- Check if table exists dynamically ---
|
|
36
|
+
async function tableExists(name) {
|
|
37
|
+
if (DB.driver === 'mysql') {
|
|
38
|
+
const rows = await DB.raw('SHOW TABLES LIKE ?', [name]);
|
|
39
|
+
return rows.length > 0;
|
|
40
|
+
} else if (DB.driver === 'sqlite') {
|
|
41
|
+
const rows = await DB.raw(
|
|
42
|
+
'SELECT name FROM sqlite_master WHERE type="table" AND name=?',
|
|
43
|
+
[name]
|
|
44
|
+
);
|
|
45
|
+
return rows.length > 0;
|
|
46
|
+
} else if (DB.driver === 'pg') {
|
|
47
|
+
const rows = await DB.raw(
|
|
48
|
+
`SELECT table_name FROM information_schema.tables WHERE table_schema='public' AND table_name=$1`,
|
|
49
|
+
[name]
|
|
50
|
+
);
|
|
51
|
+
return rows.length > 0;
|
|
52
|
+
}
|
|
53
|
+
return false;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
if (await tableExists(tableName)) {
|
|
57
|
+
log.warn(`⚠️ Skipping: table '${tableName}' already exists.`);
|
|
58
|
+
return;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// --- Columns collection ---
|
|
62
|
+
const columns = [];
|
|
63
|
+
|
|
64
|
+
// --- Column wrapper for chainable modifiers ---
|
|
65
|
+
function wrapColumn(name, type) {
|
|
66
|
+
const col = { name, sql: `${DB.driver === 'pg' ? `"${name}"` : `\`${name}\``} ${type}` };
|
|
67
|
+
columns.push(col);
|
|
68
|
+
|
|
69
|
+
return {
|
|
70
|
+
notNullable() {
|
|
71
|
+
col.sql += ' NOT NULL';
|
|
72
|
+
return this;
|
|
73
|
+
},
|
|
74
|
+
nullable() {
|
|
75
|
+
col.sql += ' NULL';
|
|
76
|
+
return this;
|
|
77
|
+
},
|
|
78
|
+
defaultTo(value) {
|
|
79
|
+
if (value === null) col.sql += ' DEFAULT NULL';
|
|
80
|
+
else if (typeof value === 'string' && !/^CURRENT_TIMESTAMP$/i.test(value)) col.sql += ` DEFAULT '${value}'`;
|
|
81
|
+
else col.sql += ` DEFAULT ${value}`;
|
|
82
|
+
return this;
|
|
83
|
+
},
|
|
84
|
+
unsigned() {
|
|
85
|
+
if (DB.driver === 'mysql') col.sql += ' UNSIGNED';
|
|
86
|
+
return this;
|
|
87
|
+
},
|
|
88
|
+
unique() {
|
|
89
|
+
col.sql += ' UNIQUE';
|
|
90
|
+
return this;
|
|
91
|
+
},
|
|
92
|
+
primary() {
|
|
93
|
+
col.sql += ' PRIMARY KEY';
|
|
94
|
+
return this;
|
|
95
|
+
},
|
|
96
|
+
autoIncrement() {
|
|
97
|
+
if (DB.driver === 'mysql') col.sql += ' AUTO_INCREMENT';
|
|
98
|
+
if (DB.driver === 'pg') col.sql = `"${name}" SERIAL`;
|
|
99
|
+
if (DB.driver === 'sqlite') col.sql = `"${name}" INTEGER PRIMARY KEY AUTOINCREMENT`;
|
|
100
|
+
return this;
|
|
101
|
+
},
|
|
102
|
+
comment(text) {
|
|
103
|
+
if (DB.driver === 'mysql') col.sql += ` COMMENT '${text}'`;
|
|
104
|
+
return this;
|
|
105
|
+
},
|
|
106
|
+
after(columnName) {
|
|
107
|
+
if (DB.driver === 'mysql') col.sql += ` AFTER \`${columnName}\``;
|
|
108
|
+
return this;
|
|
109
|
+
}
|
|
110
|
+
};
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
// --- Column type helper ---
|
|
115
|
+
function typeMapping(type) {
|
|
116
|
+
switch (DB.driver) {
|
|
117
|
+
case 'mysql':
|
|
118
|
+
return {
|
|
119
|
+
increments: 'INT AUTO_INCREMENT PRIMARY KEY',
|
|
120
|
+
string: 'VARCHAR(255)',
|
|
121
|
+
integer: 'INT',
|
|
122
|
+
boolean: 'TINYINT(1)',
|
|
123
|
+
text: 'TEXT',
|
|
124
|
+
float: 'FLOAT',
|
|
125
|
+
decimal: 'DECIMAL(10,2)',
|
|
126
|
+
date: 'DATE',
|
|
127
|
+
dateTime: 'DATETIME',
|
|
128
|
+
json: 'JSON',
|
|
129
|
+
binary: 'BLOB'
|
|
130
|
+
}[type];
|
|
131
|
+
case 'sqlite':
|
|
132
|
+
return {
|
|
133
|
+
increments: 'INTEGER PRIMARY KEY AUTOINCREMENT',
|
|
134
|
+
string: 'TEXT',
|
|
135
|
+
integer: 'INTEGER',
|
|
136
|
+
boolean: 'INTEGER',
|
|
137
|
+
text: 'TEXT',
|
|
138
|
+
float: 'REAL',
|
|
139
|
+
decimal: 'REAL',
|
|
140
|
+
date: 'TEXT',
|
|
141
|
+
dateTime: 'TEXT',
|
|
142
|
+
json: 'TEXT',
|
|
143
|
+
binary: 'BLOB'
|
|
144
|
+
}[type];
|
|
145
|
+
case 'pg':
|
|
146
|
+
return {
|
|
147
|
+
increments: 'SERIAL PRIMARY KEY',
|
|
148
|
+
string: 'VARCHAR(255)',
|
|
149
|
+
integer: 'INTEGER',
|
|
150
|
+
boolean: 'BOOLEAN',
|
|
151
|
+
text: 'TEXT',
|
|
152
|
+
float: 'REAL',
|
|
153
|
+
decimal: 'NUMERIC(10,2)',
|
|
154
|
+
date: 'DATE',
|
|
155
|
+
dateTime: 'TIMESTAMP',
|
|
156
|
+
json: 'JSONB',
|
|
157
|
+
binary: 'BYTEA'
|
|
158
|
+
}[type];
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
// --- Table object with types and timestamps ---
|
|
163
|
+
const table = {
|
|
164
|
+
increments: (name) => wrapColumn(name, typeMapping('increments')),
|
|
165
|
+
string: (name) => wrapColumn(name, typeMapping('string')),
|
|
166
|
+
integer: (name) => wrapColumn(name, typeMapping('integer')),
|
|
167
|
+
boolean: (name) => wrapColumn(name, typeMapping('boolean')),
|
|
168
|
+
text: (name) => wrapColumn(name, typeMapping('text')),
|
|
169
|
+
float: (name) => wrapColumn(name, typeMapping('float')),
|
|
170
|
+
decimal: (name) => wrapColumn(name, typeMapping('decimal')),
|
|
171
|
+
date: (name) => wrapColumn(name, typeMapping('date')),
|
|
172
|
+
dateTime: (name) => wrapColumn(name, typeMapping('dateTime')),
|
|
173
|
+
json: (name) => wrapColumn(name, typeMapping('json')),
|
|
174
|
+
binary: (name) => wrapColumn(name, typeMapping('binary')),
|
|
175
|
+
timestamps: () => {
|
|
176
|
+
if (DB.driver === 'pg') {
|
|
177
|
+
columns.push({ sql: `"created_at" TIMESTAMP DEFAULT NOW()` });
|
|
178
|
+
columns.push({ sql: `"updated_at" TIMESTAMP DEFAULT NOW()` });
|
|
179
|
+
} else if (DB.driver === 'sqlite') {
|
|
180
|
+
columns.push({ sql: `"created_at" TEXT DEFAULT (DATETIME('now'))` });
|
|
181
|
+
columns.push({ sql: `"updated_at" TEXT DEFAULT (DATETIME('now'))` });
|
|
182
|
+
} else {
|
|
183
|
+
columns.push({ sql: '`created_at` TIMESTAMP DEFAULT CURRENT_TIMESTAMP' });
|
|
184
|
+
columns.push({ sql: '`updated_at` TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP' });
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
};
|
|
188
|
+
|
|
189
|
+
await callback(table);
|
|
190
|
+
|
|
191
|
+
// --- Build SQL ---
|
|
192
|
+
const sql = `CREATE TABLE ${DB.driver === 'pg' ? `"${tableName}"` : `\`${tableName}\``} (\n ${columns.map(c => c.sql).join(',\n ')}\n)`;
|
|
193
|
+
|
|
194
|
+
return await DB.raw(sql);
|
|
195
|
+
},
|
|
196
|
+
|
|
197
|
+
dropTableIfExists: async (tableName) => {
|
|
198
|
+
const sql = DB.driver === 'pg' || DB.driver === 'sqlite'
|
|
199
|
+
? `DROP TABLE IF EXISTS "${tableName}"`
|
|
200
|
+
: `DROP TABLE IF EXISTS \`${tableName}\``;
|
|
201
|
+
return await DB.raw(sql);
|
|
202
|
+
}
|
|
203
|
+
};
|
|
204
|
+
|
|
205
|
+
// ------------------ MIGRATION GENERATOR ------------------
|
|
206
|
+
|
|
207
|
+
function parseFields(fieldArgs) {
|
|
208
|
+
const typeMap = {
|
|
209
|
+
string: 'string',
|
|
210
|
+
integer: 'integer',
|
|
211
|
+
boolean: 'boolean',
|
|
212
|
+
text: 'text',
|
|
213
|
+
date: 'date',
|
|
214
|
+
datetime: 'dateTime',
|
|
215
|
+
float: 'float',
|
|
216
|
+
decimal: 'decimal',
|
|
217
|
+
json: 'json',
|
|
218
|
+
binary: 'binary'
|
|
219
|
+
};
|
|
220
|
+
|
|
221
|
+
return fieldArgs.map(arg => {
|
|
222
|
+
const [name, type] = arg.split(':');
|
|
223
|
+
const fn = typeMap[type];
|
|
224
|
+
if (!name || !fn) throw new Error(`Invalid field: ${arg}`);
|
|
225
|
+
return `table.${fn}('${name}');`;
|
|
226
|
+
});
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
function generateMigration(table, fields) {
|
|
230
|
+
const timestamp = getTimestamp();
|
|
231
|
+
const fileName = `${timestamp}_create_${table}_table.js`;
|
|
232
|
+
const filePath = path.join(MIGRATIONS_DIR, fileName);
|
|
233
|
+
|
|
234
|
+
const content = `
|
|
235
|
+
// Migration: Create ${table} table
|
|
236
|
+
|
|
237
|
+
module.exports = {
|
|
238
|
+
up: async function (db) {
|
|
239
|
+
await db.schema.createTable('${table}', (table) => {
|
|
240
|
+
table.increments('id');
|
|
241
|
+
${fields.map(f => ` ${f}`).join('\n')}
|
|
242
|
+
table.timestamps();
|
|
243
|
+
});
|
|
244
|
+
},
|
|
245
|
+
|
|
246
|
+
down: async function (db) {
|
|
247
|
+
await db.schema.dropTableIfExists('${table}');
|
|
248
|
+
}
|
|
249
|
+
};
|
|
250
|
+
`;
|
|
251
|
+
|
|
252
|
+
fs.mkdirSync(MIGRATIONS_DIR, { recursive: true });
|
|
253
|
+
fs.writeFileSync(filePath, content.trimStart());
|
|
254
|
+
log.success(`✅ Migration generated: ${filePath}`);
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
// ------------------ MIGRATIONS: UP / ROLLBACK ------------------
|
|
258
|
+
|
|
259
|
+
async function ensureMigrationsTable() {
|
|
260
|
+
await DB.raw(`CREATE TABLE IF NOT EXISTS \`${MIGRATIONS_TABLE}\` (
|
|
261
|
+
id INT AUTO_INCREMENT PRIMARY KEY,
|
|
262
|
+
name VARCHAR(255) UNIQUE,
|
|
263
|
+
run_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
264
|
+
)`);
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
async function getAppliedMigrations() {
|
|
268
|
+
await ensureMigrationsTable();
|
|
269
|
+
const rows = await DB.raw(`SELECT name FROM \`${MIGRATIONS_TABLE}\``);
|
|
270
|
+
return new Set(rows.map(r => r.name));
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
async function runMigrations() {
|
|
274
|
+
DB.initFromEnv();
|
|
275
|
+
await DB.connect();
|
|
276
|
+
|
|
277
|
+
const applied = await getAppliedMigrations();
|
|
278
|
+
const files = fs.readdirSync(MIGRATIONS_DIR)
|
|
279
|
+
.filter(f => f.endsWith('.js'))
|
|
280
|
+
.sort();
|
|
281
|
+
|
|
282
|
+
// Helper to extract table name from migration file
|
|
283
|
+
const extractTableName = (filePath) => {
|
|
284
|
+
const content = fs.readFileSync(filePath, 'utf-8');
|
|
285
|
+
const match = content.match(/db\.schema\.createTable\s*\(\s*['"]([^'"]+)['"]/);
|
|
286
|
+
return match ? match[1] : null;
|
|
287
|
+
};
|
|
288
|
+
|
|
289
|
+
for (const file of files) {
|
|
290
|
+
const migrationPath = path.resolve(MIGRATIONS_DIR, file);
|
|
291
|
+
const migration = require(migrationPath);
|
|
292
|
+
|
|
293
|
+
// Auto-detect tableName if not defined
|
|
294
|
+
if (!migration.tableName) {
|
|
295
|
+
migration.tableName = extractTableName(migrationPath);
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
let shouldRun = true;
|
|
299
|
+
|
|
300
|
+
if (applied.has(file)) {
|
|
301
|
+
if (migration.tableName) {
|
|
302
|
+
const exists = await tableExists(migration.tableName);
|
|
303
|
+
if (!exists) {
|
|
304
|
+
log.warn(`⚠️ Migration '${file}' applied but table '${migration.tableName}' missing. Re-running...`);
|
|
305
|
+
} else {
|
|
306
|
+
log.info(`✅ Migration '${file}' already applied and table exists. Skipping.`);
|
|
307
|
+
shouldRun = false;
|
|
308
|
+
}
|
|
309
|
+
} else {
|
|
310
|
+
log.warn(`⚠️ Migration '${file}' applied but table unknown. Running migration.`);
|
|
311
|
+
}
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
if (!shouldRun) continue;
|
|
315
|
+
|
|
316
|
+
log.info(`⏳ Running migration: ${file}`);
|
|
317
|
+
await migration.up(DB);
|
|
318
|
+
|
|
319
|
+
if (!applied.has(file)) {
|
|
320
|
+
await DB.raw(`INSERT INTO \`${MIGRATIONS_TABLE}\` (name) VALUES (?)`, [file]);
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
log.success(`✅ Applied: ${file}`);
|
|
324
|
+
}
|
|
325
|
+
|
|
326
|
+
await DB.end();
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
// ------------------ helper ------------------
|
|
330
|
+
|
|
331
|
+
async function tableExists(tableName) {
|
|
332
|
+
if (DB.driver === 'mysql') {
|
|
333
|
+
const rows = await DB.raw('SHOW TABLES LIKE ?', [tableName]);
|
|
334
|
+
return rows.length > 0;
|
|
335
|
+
} else if (DB.driver === 'sqlite') {
|
|
336
|
+
const rows = await DB.raw(
|
|
337
|
+
'SELECT name FROM sqlite_master WHERE type="table" AND name=?',
|
|
338
|
+
[tableName]
|
|
339
|
+
);
|
|
340
|
+
return rows.length > 0;
|
|
341
|
+
} else if (DB.driver === 'pg') {
|
|
342
|
+
const rows = await DB.raw(
|
|
343
|
+
`SELECT table_name FROM information_schema.tables WHERE table_schema='public' AND table_name=$1`,
|
|
344
|
+
[tableName]
|
|
345
|
+
);
|
|
346
|
+
return rows.length > 0;
|
|
347
|
+
}
|
|
348
|
+
return false;
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
|
|
352
|
+
async function rollbackLastMigration() {
|
|
353
|
+
DB.initFromEnv();
|
|
354
|
+
await DB.connect();
|
|
355
|
+
|
|
356
|
+
const [last] = await DB.raw(`SELECT name FROM \`${MIGRATIONS_TABLE}\` ORDER BY run_at DESC LIMIT 1`);
|
|
357
|
+
|
|
358
|
+
if (!last) {
|
|
359
|
+
log.info('ℹ️ No migrations to rollback.');
|
|
360
|
+
return;
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
const file = last.name;
|
|
364
|
+
const migration = require(path.resolve(MIGRATIONS_DIR, file));
|
|
365
|
+
|
|
366
|
+
log.info(`⏳ Rolling back: ${file}`);
|
|
367
|
+
await migration.down(DB);
|
|
368
|
+
await DB.raw(`DELETE FROM \`${MIGRATIONS_TABLE}\` WHERE name = ?`, [file]);
|
|
369
|
+
log.success(`✅ Rolled back: ${file}`);
|
|
370
|
+
|
|
371
|
+
await DB.end();
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
// ------------------ SEEDERS ------------------
|
|
375
|
+
|
|
376
|
+
function generateSeeder(name) {
|
|
377
|
+
const timestamp = getTimestamp();
|
|
378
|
+
const fileName = `${timestamp}_${name}.js`;
|
|
379
|
+
const filePath = path.join(SEEDERS_DIR, fileName);
|
|
380
|
+
|
|
381
|
+
const content = `
|
|
382
|
+
// Seeder: ${name}
|
|
383
|
+
|
|
384
|
+
module.exports = {
|
|
385
|
+
seed: async function (db) {
|
|
386
|
+
// Example:
|
|
387
|
+
// await db.raw("INSERT INTO users (name) VALUES (?)", ['Example']);
|
|
388
|
+
}
|
|
389
|
+
};
|
|
390
|
+
`;
|
|
391
|
+
|
|
392
|
+
fs.mkdirSync(SEEDERS_DIR, { recursive: true });
|
|
393
|
+
fs.writeFileSync(filePath, content.trimStart());
|
|
394
|
+
log.success(`✅ Seeder generated: ${filePath}`);
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
async function ensureSeedsTable() {
|
|
398
|
+
await DB.raw(`CREATE TABLE IF NOT EXISTS \`${SEEDS_TABLE}\` (
|
|
399
|
+
id INT AUTO_INCREMENT PRIMARY KEY,
|
|
400
|
+
name VARCHAR(255) UNIQUE,
|
|
401
|
+
run_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
402
|
+
)`);
|
|
403
|
+
}
|
|
404
|
+
|
|
405
|
+
async function getAppliedSeeds() {
|
|
406
|
+
const rows = await DB.raw(`SELECT name FROM \`${SEEDS_TABLE}\``);
|
|
407
|
+
return new Set(rows.map(r => r.name));
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
async function runSeeders({ refresh = false, only = null } = {}) {
|
|
411
|
+
DB.initFromEnv();
|
|
412
|
+
await DB.connect();
|
|
413
|
+
await ensureSeedsTable();
|
|
414
|
+
|
|
415
|
+
const files = fs.readdirSync(SEEDERS_DIR).filter(f => f.endsWith('.js')).sort();
|
|
416
|
+
let targets = files;
|
|
417
|
+
|
|
418
|
+
if (only) {
|
|
419
|
+
if (!files.includes(only)) {
|
|
420
|
+
log.warn(`⚠️ Seed file not found: ${only}`);
|
|
421
|
+
await DB.end();
|
|
422
|
+
return;
|
|
423
|
+
}
|
|
424
|
+
targets = [only];
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
if (refresh) {
|
|
428
|
+
await DB.raw(`DELETE FROM \`${SEEDS_TABLE}\``);
|
|
429
|
+
log.info(`🔁 Refreshing all seeds...`);
|
|
430
|
+
} else {
|
|
431
|
+
const applied = await getAppliedSeeds();
|
|
432
|
+
targets = targets.filter(f => !applied.has(f));
|
|
433
|
+
}
|
|
434
|
+
|
|
435
|
+
if (!targets.length) {
|
|
436
|
+
log.success('✅ No seeders to run.');
|
|
437
|
+
await DB.end();
|
|
438
|
+
return;
|
|
439
|
+
}
|
|
440
|
+
|
|
441
|
+
for (const file of targets) {
|
|
442
|
+
const seeder = require(path.resolve(SEEDERS_DIR, file));
|
|
443
|
+
|
|
444
|
+
if (typeof seeder.seed !== 'function') {
|
|
445
|
+
log.warn(`⚠️ Skipping invalid seeder: ${file}`);
|
|
446
|
+
continue;
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
try {
|
|
450
|
+
log.info(`🌱 Running seeder: ${file}`);
|
|
451
|
+
await seeder.seed(DB);
|
|
452
|
+
await DB.raw(`INSERT INTO \`${SEEDS_TABLE}\` (name) VALUES (?)`, [file]);
|
|
453
|
+
log.success(`✅ Seeded: ${file}`);
|
|
454
|
+
} catch (err) {
|
|
455
|
+
log.error(`❌ Seeder failed: ${file}\n${err}`);
|
|
456
|
+
break;
|
|
457
|
+
}
|
|
458
|
+
}
|
|
459
|
+
|
|
460
|
+
await DB.end();
|
|
461
|
+
}
|
|
462
|
+
|
|
463
|
+
// ------------------ MODEL GENERATOR ------------------
|
|
464
|
+
|
|
465
|
+
function generateModel(name, fieldArgs = []) {
|
|
466
|
+
try {
|
|
467
|
+
if (!name || typeof name !== 'string') {
|
|
468
|
+
log.error('❌ Invalid model name.');
|
|
469
|
+
process.exit(1);
|
|
470
|
+
}
|
|
471
|
+
|
|
472
|
+
const modelDir = './models';
|
|
473
|
+
fs.mkdirSync(modelDir, { recursive: true });
|
|
474
|
+
|
|
475
|
+
let tableName = name.toLowerCase() + 's';
|
|
476
|
+
|
|
477
|
+
const tableOptionIndex = fieldArgs.findIndex(arg => arg.startsWith('--table='));
|
|
478
|
+
|
|
479
|
+
if (tableOptionIndex !== -1) {
|
|
480
|
+
const parts = fieldArgs[tableOptionIndex].split('=');
|
|
481
|
+
|
|
482
|
+
if (!parts[1]) {
|
|
483
|
+
log.error('❌ Invalid --table option. Usage: --table=table_name');
|
|
484
|
+
process.exit(1);
|
|
485
|
+
}
|
|
486
|
+
|
|
487
|
+
tableName = parts[1];
|
|
488
|
+
fieldArgs.splice(tableOptionIndex, 1);
|
|
489
|
+
}
|
|
490
|
+
|
|
491
|
+
const fields = fieldArgs
|
|
492
|
+
.filter(arg => arg.includes(':'))
|
|
493
|
+
.map(arg => arg.split(':')[0])
|
|
494
|
+
.filter(Boolean);
|
|
495
|
+
|
|
496
|
+
const fillableArray = fields.length ? `['${fields.join("', '")}']` : `[]`;
|
|
497
|
+
|
|
498
|
+
const className = name.charAt(0).toUpperCase() + name.slice(1);
|
|
499
|
+
const filePath = path.join(modelDir, `${className}.js`);
|
|
500
|
+
|
|
501
|
+
const content = `
|
|
502
|
+
// ${className} Model
|
|
503
|
+
const { BaseModel } = require('lamix');
|
|
504
|
+
|
|
505
|
+
class ${className} extends BaseModel {
|
|
506
|
+
static table = '${tableName}';
|
|
507
|
+
static primaryKey = 'id';
|
|
508
|
+
static timestamps = true;
|
|
509
|
+
static fillable = ${fillableArray};
|
|
510
|
+
}
|
|
511
|
+
|
|
512
|
+
module.exports = ${className};
|
|
513
|
+
`;
|
|
514
|
+
|
|
515
|
+
fs.writeFileSync(filePath, content.trimStart());
|
|
516
|
+
log.success(`✅ Model generated successfully: ${filePath}`);
|
|
517
|
+
|
|
518
|
+
} catch (err) {
|
|
519
|
+
log.error(`❌ Failed to generate model: ${err.message}`);
|
|
520
|
+
process.exit(1);
|
|
521
|
+
}
|
|
522
|
+
}
|
|
523
|
+
|
|
524
|
+
// ------------------ CLI HANDLER ------------------
|
|
525
|
+
|
|
526
|
+
(async () => {
|
|
527
|
+
const command = args[0];
|
|
528
|
+
|
|
529
|
+
switch (command) {
|
|
530
|
+
case 'make:migration': {
|
|
531
|
+
const table = args[1];
|
|
532
|
+
const fields = args.slice(2);
|
|
533
|
+
if (!table || fields.length === 0) {
|
|
534
|
+
log.error('❌ Usage: make:migration <table> <field:type>...');
|
|
535
|
+
process.exit(1);
|
|
536
|
+
}
|
|
537
|
+
generateMigration(table, parseFields(fields));
|
|
538
|
+
break;
|
|
539
|
+
}
|
|
540
|
+
|
|
541
|
+
case 'migrate':
|
|
542
|
+
await runMigrations();
|
|
543
|
+
break;
|
|
544
|
+
|
|
545
|
+
case 'migrate:rollback':
|
|
546
|
+
await rollbackLastMigration();
|
|
547
|
+
break;
|
|
548
|
+
|
|
549
|
+
case 'make:seeder': {
|
|
550
|
+
const seedName = args[1];
|
|
551
|
+
if (!seedName) {
|
|
552
|
+
log.error('❌ Usage: make:seeder <name>');
|
|
553
|
+
process.exit(1);
|
|
554
|
+
}
|
|
555
|
+
generateSeeder(seedName);
|
|
556
|
+
break;
|
|
557
|
+
}
|
|
558
|
+
|
|
559
|
+
case 'db:seed':
|
|
560
|
+
await runSeeders();
|
|
561
|
+
break;
|
|
562
|
+
|
|
563
|
+
case 'db:seed:refresh':
|
|
564
|
+
await runSeeders({ refresh: true });
|
|
565
|
+
break;
|
|
566
|
+
|
|
567
|
+
case 'db:seed:only': {
|
|
568
|
+
const onlySeed = args[1];
|
|
569
|
+
if (!onlySeed) {
|
|
570
|
+
log.error('❌ Usage: db:seed:only <filename.js>');
|
|
571
|
+
process.exit(1);
|
|
572
|
+
}
|
|
573
|
+
await runSeeders({ only: onlySeed });
|
|
574
|
+
break;
|
|
575
|
+
}
|
|
576
|
+
|
|
577
|
+
case 'make:model': {
|
|
578
|
+
const modelName = args[1];
|
|
579
|
+
const modelFields = args.slice(2);
|
|
580
|
+
|
|
581
|
+
if (!modelName) {
|
|
582
|
+
log.error('❌ Usage: make:model <ModelName> [field:type ...]');
|
|
583
|
+
process.exit(1);
|
|
584
|
+
}
|
|
585
|
+
|
|
586
|
+
generateModel(modelName, modelFields);
|
|
587
|
+
break;
|
|
588
|
+
}
|
|
589
|
+
|
|
590
|
+
default:
|
|
591
|
+
log.info(`
|
|
592
|
+
📦 CLI Migration & Seeder Tool
|
|
593
|
+
|
|
594
|
+
Usage:
|
|
595
|
+
npm run artisan --
|
|
596
|
+
🔹 Migration Commands:
|
|
597
|
+
npm run artisan -- make:migration <TableName> [columns]
|
|
598
|
+
npm run artisan -- migrate
|
|
599
|
+
npm run artisan -- migrate:rollback
|
|
600
|
+
|
|
601
|
+
🔹 Seeder Commands:
|
|
602
|
+
npm run artisan -- make:seeder <name>
|
|
603
|
+
npm run artisan -- db:seed
|
|
604
|
+
npm run artisan -- db:seed:refresh
|
|
605
|
+
npm run artisan -- db:seed:only <filename.js>
|
|
606
|
+
|
|
607
|
+
🔹 Model Generator:
|
|
608
|
+
npm run artisan -- make:model <name>
|
|
609
|
+
npm run artisan -- make:model <ModelName> [field:type ...]
|
|
610
|
+
npm run artisan -- make:model <name> name:string price:decimal stock:integer
|
|
611
|
+
`);
|
|
612
|
+
}
|
|
613
|
+
})();
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
const { name, status } = req.body;
|
|
2
|
+
|
|
3
|
+
const category = await BlogCategory.find(req.params.id);
|
|
4
|
+
|
|
5
|
+
const existingCategory = await BlogCategory.query()
|
|
6
|
+
.where('name', name)
|
|
7
|
+
.whereNot('id', category.id)
|
|
8
|
+
.first();
|
|
9
|
+
|
|
10
|
+
if (existingCategory) {
|
|
11
|
+
req.flash('error', 'Category name already exists.');
|
|
12
|
+
return res.redirect('/blogcategory');
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
const { value } = req.body;
|
|
16
|
+
|
|
17
|
+
const categorry = await Category.find(req.params.id);
|
|
18
|
+
const existCategory = await Category.findBy('name', value);
|
|
19
|
+
|
|
20
|
+
if (existCategory && existCategory.id !== categorry.id) {
|
|
21
|
+
req.flash('error', 'Category name already exists.');
|
|
22
|
+
return res.redirect('/category');
|
|
23
|
+
}
|
package/examples/CRUD.md
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
const User = require('./models/User');
|
|
2
|
+
|
|
3
|
+
// Create
|
|
4
|
+
const user = await User.create({
|
|
5
|
+
name: 'Alice',
|
|
6
|
+
email: 'alice@example.com',
|
|
7
|
+
password: 'secret'
|
|
8
|
+
});
|
|
9
|
+
|
|
10
|
+
// Update
|
|
11
|
+
user.name = 'Alice Smith';
|
|
12
|
+
await user.update();
|
|
13
|
+
|
|
14
|
+
// Find
|
|
15
|
+
const user1 = await User.find(1);
|
|
16
|
+
const user2 = await User.findOrFail(param.id);
|
|
17
|
+
|
|
18
|
+
// Query with where
|
|
19
|
+
const someUsers = await User.where('email', 'alice@example.com').get();
|
|
20
|
+
|
|
21
|
+
// First / firstOrFail
|
|
22
|
+
const firstUser = await User.where('name', 'Alice').first();
|
|
23
|
+
const firstOrFail = await User.where('name', 'Alice').firstOrFail();
|
|
24
|
+
|
|
25
|
+
// Find by field
|
|
26
|
+
const user = await User.findBy('email', 'jane@example.com');
|
|
27
|
+
|
|
28
|
+
// Check hashed password
|
|
29
|
+
const isValid = await user.checkPassword('password');
|
|
30
|
+
|
|
31
|
+
// Delete / soft delete
|
|
32
|
+
await user.delete();
|
|
33
|
+
|
|
34
|
+
// Destroy multiple
|
|
35
|
+
await user.destroy();
|
|
36
|
+
|
|
37
|
+
// Update using fill
|
|
38
|
+
await user.fill({ body });
|
|
39
|
+
await user.save();
|
|
40
|
+
|
|
41
|
+
//OR Update using without fill
|
|
42
|
+
await User.update({ body });
|
|
43
|
+
|
|
44
|
+
// Restore
|
|
45
|
+
await user.restore();
|
|
46
|
+
|
|
47
|
+
// List all
|
|
48
|
+
const allUsers = await User.all();
|
|
49
|
+
|
|
50
|
+
// With trashed
|
|
51
|
+
const withTrashed = await User.withTrashed().where('id', 1).first();
|
package/examples/Post.md
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
const { BaseModel } = require('lamix');
|
|
2
|
+
|
|
3
|
+
class Post extends BaseModel {
|
|
4
|
+
static table = 'posts';
|
|
5
|
+
static primaryKey = 'id';
|
|
6
|
+
static timestamps = true;
|
|
7
|
+
static softDeletes = true;// Optional if you don't need it
|
|
8
|
+
static fillable = ['title', 'body', 'user_id'];
|
|
9
|
+
static rules = {
|
|
10
|
+
name: 'required|string',
|
|
11
|
+
body: 'nullable|string',
|
|
12
|
+
user_id: 'nullable|integer'
|
|
13
|
+
};
|
|
14
|
+
|
|
15
|
+
user() {
|
|
16
|
+
const User = require('./User');
|
|
17
|
+
return this.belongsTo(User, 'user_id', 'id').onDelete('detach');
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
categories() {
|
|
21
|
+
const Category = require('./Category');
|
|
22
|
+
return this.belongsToMany(Category)
|
|
23
|
+
.withPivot("featured")
|
|
24
|
+
.withTimestamps().onDelete('detach');
|
|
25
|
+
};
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
module.exports = Post;
|