offbyt 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -0
- package/cli/index.js +2 -0
- package/cli.js +206 -0
- package/core/detector/detectAxios.js +107 -0
- package/core/detector/detectFetch.js +148 -0
- package/core/detector/detectForms.js +55 -0
- package/core/detector/detectSocket.js +341 -0
- package/core/generator/generateControllers.js +17 -0
- package/core/generator/generateModels.js +25 -0
- package/core/generator/generateRoutes.js +17 -0
- package/core/generator/generateServer.js +18 -0
- package/core/generator/generateSocket.js +160 -0
- package/core/index.js +14 -0
- package/core/ir/IRTypes.js +25 -0
- package/core/ir/buildIR.js +83 -0
- package/core/parser/parseJS.js +26 -0
- package/core/parser/parseTS.js +27 -0
- package/core/rules/relationRules.js +38 -0
- package/core/rules/resourceRules.js +32 -0
- package/core/rules/schemaInference.js +26 -0
- package/core/scanner/scanProject.js +58 -0
- package/deploy/cloudflare.js +41 -0
- package/deploy/cloudflareWorker.js +122 -0
- package/deploy/connect.js +198 -0
- package/deploy/flyio.js +51 -0
- package/deploy/index.js +322 -0
- package/deploy/netlify.js +29 -0
- package/deploy/railway.js +215 -0
- package/deploy/render.js +195 -0
- package/deploy/utils.js +383 -0
- package/deploy/vercel.js +29 -0
- package/index.js +18 -0
- package/lib/generator/advancedCrudGenerator.js +475 -0
- package/lib/generator/crudCodeGenerator.js +486 -0
- package/lib/generator/irBasedGenerator.js +360 -0
- package/lib/ir-builder/index.js +16 -0
- package/lib/ir-builder/irBuilder.js +330 -0
- package/lib/ir-builder/rulesEngine.js +353 -0
- package/lib/ir-builder/templateEngine.js +193 -0
- package/lib/ir-builder/templates/index.js +14 -0
- package/lib/ir-builder/templates/model.template.js +47 -0
- package/lib/ir-builder/templates/routes-generic.template.js +66 -0
- package/lib/ir-builder/templates/routes-user.template.js +105 -0
- package/lib/ir-builder/templates/routes.template.js +102 -0
- package/lib/ir-builder/templates/validation.template.js +15 -0
- package/lib/ir-integration.js +349 -0
- package/lib/modes/benchmark.js +162 -0
- package/lib/modes/configBasedGenerator.js +2258 -0
- package/lib/modes/connect.js +1125 -0
- package/lib/modes/doctorAi.js +172 -0
- package/lib/modes/generateApi.js +435 -0
- package/lib/modes/interactiveSetup.js +548 -0
- package/lib/modes/offline.clean.js +14 -0
- package/lib/modes/offline.enhanced.js +787 -0
- package/lib/modes/offline.js +295 -0
- package/lib/modes/offline.v2.js +13 -0
- package/lib/modes/sync.js +629 -0
- package/lib/scanner/apiEndpointExtractor.js +387 -0
- package/lib/scanner/authPatternDetector.js +54 -0
- package/lib/scanner/frontendScanner.js +642 -0
- package/lib/utils/apiClientGenerator.js +242 -0
- package/lib/utils/apiScanner.js +95 -0
- package/lib/utils/codeInjector.js +350 -0
- package/lib/utils/doctor.js +381 -0
- package/lib/utils/envGenerator.js +36 -0
- package/lib/utils/loadTester.js +61 -0
- package/lib/utils/performanceAnalyzer.js +298 -0
- package/lib/utils/resourceDetector.js +281 -0
- package/package.json +20 -0
- package/templates/.env.template +31 -0
- package/templates/advanced.model.template.js +201 -0
- package/templates/advanced.route.template.js +341 -0
- package/templates/auth.middleware.template.js +87 -0
- package/templates/auth.routes.template.js +238 -0
- package/templates/auth.user.model.template.js +78 -0
- package/templates/cache.middleware.js +34 -0
- package/templates/chat.models.template.js +260 -0
- package/templates/chat.routes.template.js +478 -0
- package/templates/compression.middleware.js +19 -0
- package/templates/database.config.js +74 -0
- package/templates/errorHandler.middleware.js +54 -0
- package/templates/express/controller.ejs +26 -0
- package/templates/express/model.ejs +9 -0
- package/templates/express/route.ejs +18 -0
- package/templates/express/server.ejs +16 -0
- package/templates/frontend.env.template +14 -0
- package/templates/model.template.js +86 -0
- package/templates/package.production.json +51 -0
- package/templates/package.template.json +41 -0
- package/templates/pagination.utility.js +110 -0
- package/templates/production.server.template.js +233 -0
- package/templates/rateLimiter.middleware.js +36 -0
- package/templates/requestLogger.middleware.js +19 -0
- package/templates/response.helper.js +179 -0
- package/templates/route.template.js +130 -0
- package/templates/security.middleware.js +78 -0
- package/templates/server.template.js +91 -0
- package/templates/socket.server.template.js +433 -0
- package/templates/utils.helper.js +157 -0
- package/templates/validation.middleware.js +63 -0
- package/templates/validation.schema.js +128 -0
- package/utils/fileWriter.js +15 -0
- package/utils/logger.js +18 -0
|
@@ -0,0 +1,629 @@
|
|
|
1
|
+
// sync.js - offbyt Sync Mode
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import chalk from 'chalk';
|
|
4
|
+
import ora from 'ora';
|
|
5
|
+
import fs from 'fs';
|
|
6
|
+
import { scanFrontendCode } from '../scanner/frontendScanner.js';
|
|
7
|
+
import { extractAllApiEndpoints } from '../scanner/apiEndpointExtractor.js';
|
|
8
|
+
import { generateAdvancedCrudModel, generateAdvancedCrudRoutes } from '../generator/advancedCrudGenerator.js';
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Sync backend with frontend changes
|
|
12
|
+
* Only adds/updates backend files for new/changed endpoints
|
|
13
|
+
*/
|
|
14
|
+
export async function syncBackendWithFrontend(projectPath) {
|
|
15
|
+
const backendPath = path.join(projectPath, 'backend');
|
|
16
|
+
if (!fs.existsSync(backendPath)) {
|
|
17
|
+
console.log(chalk.red('⌠Backend folder not found. Please run `offbyt generate` first.'));
|
|
18
|
+
return;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
// Detect database type from backend config
|
|
22
|
+
const dbType = detectDatabaseType(backendPath);
|
|
23
|
+
const isSQL = ['mysql', 'postgresql', 'sqlite'].includes(dbType);
|
|
24
|
+
|
|
25
|
+
const spinner = ora('🔄 Scanning frontend for API changes...').start();
|
|
26
|
+
const apiCalls = scanFrontendCode(projectPath);
|
|
27
|
+
spinner.succeed(`✅ Found ${apiCalls.length} API calls in frontend`);
|
|
28
|
+
|
|
29
|
+
// Extract endpoints grouped by resource
|
|
30
|
+
const endpointsByResource = extractAllApiEndpoints('', apiCalls);
|
|
31
|
+
|
|
32
|
+
// Convert to resources Map for SQL generation
|
|
33
|
+
const resources = new Map();
|
|
34
|
+
const createdResources = [];
|
|
35
|
+
|
|
36
|
+
// For each resource, check if model/route exists, else create or update/merge
|
|
37
|
+
for (const resourceName in endpointsByResource) {
|
|
38
|
+
const resource = endpointsByResource[resourceName];
|
|
39
|
+
const modelName = resolveModelName(backendPath, resourceName);
|
|
40
|
+
const modelFile = path.join(backendPath, 'models', `${modelName}.js`);
|
|
41
|
+
const routeFile = path.join(backendPath, 'routes', `${resourceName}.routes.js`);
|
|
42
|
+
|
|
43
|
+
// Store resource info for SQL generation
|
|
44
|
+
resources.set(resourceName, {
|
|
45
|
+
name: resourceName,
|
|
46
|
+
fields: Array.from(resource.fields),
|
|
47
|
+
hasAuth: resource.hasAuth
|
|
48
|
+
});
|
|
49
|
+
|
|
50
|
+
// --- MODEL SYNC ---
|
|
51
|
+
if (!fs.existsSync(modelFile)) {
|
|
52
|
+
if (isSQL) {
|
|
53
|
+
// Generate Sequelize model for SQL databases
|
|
54
|
+
const modelCode = generateSequelizeModel(resourceName, Array.from(resource.fields), resource.hasAuth);
|
|
55
|
+
fs.writeFileSync(modelFile, modelCode, 'utf8');
|
|
56
|
+
} else {
|
|
57
|
+
// Generate Mongoose model for MongoDB
|
|
58
|
+
const modelCode = generateAdvancedCrudModel(resourceName, Array.from(resource.fields), resource.hasAuth);
|
|
59
|
+
fs.writeFileSync(modelFile, modelCode, 'utf8');
|
|
60
|
+
}
|
|
61
|
+
console.log(chalk.green(`🆕 Model created: models/${modelName}.js`));
|
|
62
|
+
createdResources.push(resourceName);
|
|
63
|
+
} else {
|
|
64
|
+
// Smart merge: add missing fields to schema at a stable insertion point.
|
|
65
|
+
let modelContent = fs.readFileSync(modelFile, 'utf8');
|
|
66
|
+
const missingFields = Array.from(resource.fields).filter(
|
|
67
|
+
(field) => !new RegExp(`\\b${escapeRegex(field)}\\s*:`).test(modelContent)
|
|
68
|
+
);
|
|
69
|
+
|
|
70
|
+
if (missingFields.length > 0) {
|
|
71
|
+
if (isSQL) {
|
|
72
|
+
// Add fields to Sequelize model
|
|
73
|
+
const fieldBlock = missingFields
|
|
74
|
+
.map((field) => ` ${field}: {\n type: DataTypes.STRING,\n allowNull: true\n },`)
|
|
75
|
+
.join('\n');
|
|
76
|
+
|
|
77
|
+
// Find insertion point before timestamps or closing brace
|
|
78
|
+
if (/isActive:/.test(modelContent)) {
|
|
79
|
+
modelContent = modelContent.replace(/(\n\s*)isActive:/, `\n${fieldBlock}\n$1isActive:`);
|
|
80
|
+
} else if (/},\s*\{\s*timestamps:/.test(modelContent)) {
|
|
81
|
+
modelContent = modelContent.replace(/(},\s*\{\s*timestamps:)/, `\n${fieldBlock}\n $1`);
|
|
82
|
+
}
|
|
83
|
+
} else {
|
|
84
|
+
// Add fields to Mongoose model
|
|
85
|
+
const fieldBlock = missingFields
|
|
86
|
+
.map((field) => ` ${field}: { type: String, trim: true },`)
|
|
87
|
+
.join('\n');
|
|
88
|
+
|
|
89
|
+
if (/\n\s*\/\/ Metadata/.test(modelContent)) {
|
|
90
|
+
modelContent = modelContent.replace(/\n\s*\/\/ Metadata/, `\n${fieldBlock}\n\n // Metadata`);
|
|
91
|
+
} else if (/\n\s*isActive\s*:/.test(modelContent)) {
|
|
92
|
+
modelContent = modelContent.replace(/\n\s*isActive\s*:/, `\n${fieldBlock}\n\n isActive:`);
|
|
93
|
+
} else {
|
|
94
|
+
modelContent = modelContent.replace(
|
|
95
|
+
/new mongoose\.Schema\s*\(\s*\{\s*/,
|
|
96
|
+
(match) => `${match}\n${fieldBlock}\n`
|
|
97
|
+
);
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
fs.writeFileSync(modelFile, modelContent, 'utf8');
|
|
102
|
+
missingFields.forEach((field) => {
|
|
103
|
+
console.log(chalk.yellow(`âž• Field '${field}' added to models/${modelName}.js`));
|
|
104
|
+
});
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
// --- ROUTE SYNC ---
|
|
109
|
+
if (!fs.existsSync(routeFile)) {
|
|
110
|
+
if (isSQL) {
|
|
111
|
+
// Generate Sequelize-compatible routes
|
|
112
|
+
const routeCode = generateSequelizeRoutes(resourceName, Array.from(resource.fields), resource.hasAuth);
|
|
113
|
+
fs.writeFileSync(routeFile, routeCode, 'utf8');
|
|
114
|
+
} else {
|
|
115
|
+
// Generate Mongoose routes
|
|
116
|
+
const routeCode = generateAdvancedCrudRoutes(resourceName, Array.from(resource.fields), resource.hasAuth);
|
|
117
|
+
fs.writeFileSync(routeFile, routeCode, 'utf8');
|
|
118
|
+
}
|
|
119
|
+
console.log(chalk.green(`🆕 Route created: routes/${resourceName}.routes.js`));
|
|
120
|
+
if (!createdResources.includes(resourceName)) {
|
|
121
|
+
createdResources.push(resourceName);
|
|
122
|
+
}
|
|
123
|
+
} else {
|
|
124
|
+
// Keep existing routes stable; sync only scaffolds missing route files.
|
|
125
|
+
// Route-level AST merging will be handled in a future hardening pass.
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
// --- UPDATE SERVER.JS WITH ROUTES ---
|
|
130
|
+
if (createdResources.length > 0) {
|
|
131
|
+
updateServerWithRoutes(backendPath, createdResources);
|
|
132
|
+
console.log(chalk.green(`✅ Updated server.js with ${createdResources.length} route(s)`));
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
// --- GENERATE SQL FILES FOR SQL DATABASES ---
|
|
136
|
+
if (isSQL && resources.size > 0) {
|
|
137
|
+
generateSQLScripts(backendPath, resources, dbType);
|
|
138
|
+
console.log(chalk.green(`✅ Generated SQL scripts in backend/sql/`));
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
console.log(chalk.cyan('\n✅ Backend sync complete!'));
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
function capitalize(str) {
|
|
145
|
+
return str.charAt(0).toUpperCase() + str.slice(1);
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
function singularize(str) {
|
|
149
|
+
if (!str) return str;
|
|
150
|
+
if (str.endsWith('ies')) return `${str.slice(0, -3)}y`;
|
|
151
|
+
if (str.endsWith('ses')) return str.slice(0, -2);
|
|
152
|
+
if (str.endsWith('s') && !str.endsWith('ss')) return str.slice(0, -1);
|
|
153
|
+
return str;
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
function resolveModelName(backendPath, resourceName) {
|
|
157
|
+
const modelsDir = path.join(backendPath, 'models');
|
|
158
|
+
const singularName = capitalize(singularize(resourceName));
|
|
159
|
+
const pluralName = capitalize(resourceName);
|
|
160
|
+
|
|
161
|
+
const singularPath = path.join(modelsDir, `${singularName}.js`);
|
|
162
|
+
const pluralPath = path.join(modelsDir, `${pluralName}.js`);
|
|
163
|
+
|
|
164
|
+
if (fs.existsSync(singularPath)) return singularName;
|
|
165
|
+
if (fs.existsSync(pluralPath)) return pluralName;
|
|
166
|
+
|
|
167
|
+
return singularName;
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
function escapeRegex(string) {
|
|
171
|
+
return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
function detectDatabaseType(backendPath) {
|
|
175
|
+
const dbConfigPath = path.join(backendPath, 'config', 'database.js');
|
|
176
|
+
|
|
177
|
+
if (!fs.existsSync(dbConfigPath)) {
|
|
178
|
+
return 'mongodb'; // Default
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
const dbConfig = fs.readFileSync(dbConfigPath, 'utf8');
|
|
182
|
+
|
|
183
|
+
if (dbConfig.includes('mongoose')) {
|
|
184
|
+
return 'mongodb';
|
|
185
|
+
} else if (dbConfig.includes('mysql') || dbConfig.includes('dialect: \'mysql\'')) {
|
|
186
|
+
return 'mysql';
|
|
187
|
+
} else if (dbConfig.includes('postgresql') || dbConfig.includes('dialect: \'postgres\'')) {
|
|
188
|
+
return 'postgresql';
|
|
189
|
+
} else if (dbConfig.includes('sqlite') || dbConfig.includes('dialect: \'sqlite\'')) {
|
|
190
|
+
return 'sqlite';
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
return 'mongodb'; // Default
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
function generateSequelizeModel(resourceName, fields = [], hasAuth = false) {
|
|
197
|
+
const modelName = resourceName.charAt(0).toUpperCase() + resourceName.slice(1).replace(/s$/, '');
|
|
198
|
+
|
|
199
|
+
const fieldDefinitions = fields
|
|
200
|
+
.map(field => ` ${field}: {\n type: DataTypes.STRING,\n allowNull: true\n },`)
|
|
201
|
+
.join('\n');
|
|
202
|
+
|
|
203
|
+
return `import { DataTypes } from 'sequelize';
|
|
204
|
+
import { sequelize } from '../config/database.js';
|
|
205
|
+
|
|
206
|
+
const ${modelName} = sequelize.define(
|
|
207
|
+
'${modelName}',
|
|
208
|
+
{
|
|
209
|
+
id: {
|
|
210
|
+
type: DataTypes.INTEGER,
|
|
211
|
+
primaryKey: true,
|
|
212
|
+
autoIncrement: true
|
|
213
|
+
},
|
|
214
|
+
${fieldDefinitions}
|
|
215
|
+
isActive: {
|
|
216
|
+
type: DataTypes.BOOLEAN,
|
|
217
|
+
defaultValue: true,
|
|
218
|
+
allowNull: false
|
|
219
|
+
},
|
|
220
|
+
isDeleted: {
|
|
221
|
+
type: DataTypes.BOOLEAN,
|
|
222
|
+
defaultValue: false,
|
|
223
|
+
allowNull: false
|
|
224
|
+
},
|
|
225
|
+
version: {
|
|
226
|
+
type: DataTypes.INTEGER,
|
|
227
|
+
defaultValue: 1
|
|
228
|
+
}
|
|
229
|
+
},
|
|
230
|
+
{
|
|
231
|
+
tableName: '${resourceName}',
|
|
232
|
+
timestamps: true,
|
|
233
|
+
indexes: [
|
|
234
|
+
{ fields: ['isActive'] },
|
|
235
|
+
{ fields: ['isDeleted'] },
|
|
236
|
+
{ fields: ['createdAt'] }
|
|
237
|
+
],
|
|
238
|
+
hooks: {
|
|
239
|
+
beforeUpdate: (instance) => {
|
|
240
|
+
if (instance.changed()) {
|
|
241
|
+
instance.version = (instance.version || 0) + 1;
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
);
|
|
247
|
+
|
|
248
|
+
// Static method for soft delete
|
|
249
|
+
${modelName}.softDelete = async function(id) {
|
|
250
|
+
return this.update(
|
|
251
|
+
{ isDeleted: true },
|
|
252
|
+
{ where: { id } }
|
|
253
|
+
);
|
|
254
|
+
};
|
|
255
|
+
|
|
256
|
+
// Static method to find all active
|
|
257
|
+
${modelName}.findAllActive = async function(options = {}) {
|
|
258
|
+
return this.findAll({
|
|
259
|
+
where: { isActive: true, isDeleted: false },
|
|
260
|
+
order: options.sort || [['createdAt', 'DESC']],
|
|
261
|
+
limit: options.limit || 100,
|
|
262
|
+
offset: options.skip || 0
|
|
263
|
+
});
|
|
264
|
+
};
|
|
265
|
+
|
|
266
|
+
export default ${modelName};
|
|
267
|
+
`;
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
function generateSequelizeRoutes(resourceName, fields = [], hasAuth = false) {
|
|
271
|
+
const modelName = resourceName.charAt(0).toUpperCase() + resourceName.slice(1).replace(/s$/, '');
|
|
272
|
+
|
|
273
|
+
return `import express from 'express';
|
|
274
|
+
import { query, body, param } from 'express-validator';
|
|
275
|
+
import { validateErrors } from '../middleware/validation.js';
|
|
276
|
+
import { ResponseHelper } from '../utils/helper.js';
|
|
277
|
+
import ${modelName} from '../models/${modelName}.js';
|
|
278
|
+
import { Op } from 'sequelize';
|
|
279
|
+
|
|
280
|
+
const router = express.Router();
|
|
281
|
+
|
|
282
|
+
// GET ALL - With pagination
|
|
283
|
+
router.get(
|
|
284
|
+
'/',
|
|
285
|
+
[
|
|
286
|
+
query('page').optional().isInt({ min: 1 }).toInt(),
|
|
287
|
+
query('limit').optional().isInt({ min: 1, max: 100 }).toInt(),
|
|
288
|
+
validateErrors
|
|
289
|
+
],
|
|
290
|
+
async (req, res, next) => {
|
|
291
|
+
try {
|
|
292
|
+
const page = parseInt(req.query.page) || 1;
|
|
293
|
+
const limit = parseInt(req.query.limit) || 10;
|
|
294
|
+
const offset = (page - 1) * limit;
|
|
295
|
+
|
|
296
|
+
const { count, rows } = await ${modelName}.findAndCountAll({
|
|
297
|
+
where: { isDeleted: false },
|
|
298
|
+
limit,
|
|
299
|
+
offset,
|
|
300
|
+
order: [['createdAt', 'DESC']]
|
|
301
|
+
});
|
|
302
|
+
|
|
303
|
+
const pagination = {
|
|
304
|
+
page,
|
|
305
|
+
limit,
|
|
306
|
+
total: count,
|
|
307
|
+
totalPages: Math.ceil(count / limit)
|
|
308
|
+
};
|
|
309
|
+
|
|
310
|
+
return ResponseHelper.paginated(res, rows, pagination, '${resourceName} loaded successfully');
|
|
311
|
+
} catch (error) {
|
|
312
|
+
next(error);
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
);
|
|
316
|
+
|
|
317
|
+
// GET BY ID
|
|
318
|
+
router.get(
|
|
319
|
+
'/:id',
|
|
320
|
+
[param('id').isInt(), validateErrors],
|
|
321
|
+
async (req, res, next) => {
|
|
322
|
+
try {
|
|
323
|
+
const item = await ${modelName}.findOne({
|
|
324
|
+
where: { id: req.params.id, isDeleted: false }
|
|
325
|
+
});
|
|
326
|
+
|
|
327
|
+
if (!item) {
|
|
328
|
+
return ResponseHelper.notFound(res, '${modelName}');
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
return ResponseHelper.success(res, item, 'Item retrieved successfully');
|
|
332
|
+
} catch (error) {
|
|
333
|
+
next(error);
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
);
|
|
337
|
+
|
|
338
|
+
// CREATE
|
|
339
|
+
router.post(
|
|
340
|
+
'/',
|
|
341
|
+
async (req, res, next) => {
|
|
342
|
+
try {
|
|
343
|
+
const saved = await ${modelName}.create(req.body);
|
|
344
|
+
return ResponseHelper.success(res, saved, '${modelName} created successfully', 201);
|
|
345
|
+
} catch (error) {
|
|
346
|
+
if (error.name === 'SequelizeValidationError') {
|
|
347
|
+
const errors = error.errors.map(e => e.message);
|
|
348
|
+
return ResponseHelper.validationError(res, errors);
|
|
349
|
+
}
|
|
350
|
+
next(error);
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
);
|
|
354
|
+
|
|
355
|
+
// UPDATE
|
|
356
|
+
router.put(
|
|
357
|
+
'/:id',
|
|
358
|
+
[param('id').isInt(), validateErrors],
|
|
359
|
+
async (req, res, next) => {
|
|
360
|
+
try {
|
|
361
|
+
const [updatedCount] = await ${modelName}.update(
|
|
362
|
+
req.body,
|
|
363
|
+
{ where: { id: req.params.id } }
|
|
364
|
+
);
|
|
365
|
+
|
|
366
|
+
if (updatedCount === 0) {
|
|
367
|
+
return ResponseHelper.notFound(res, '${modelName}');
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
const updated = await ${modelName}.findByPk(req.params.id);
|
|
371
|
+
return ResponseHelper.success(res, updated, '${modelName} updated successfully');
|
|
372
|
+
} catch (error) {
|
|
373
|
+
next(error);
|
|
374
|
+
}
|
|
375
|
+
}
|
|
376
|
+
);
|
|
377
|
+
|
|
378
|
+
// DELETE (Soft Delete)
|
|
379
|
+
router.delete(
|
|
380
|
+
'/:id',
|
|
381
|
+
[param('id').isInt(), validateErrors],
|
|
382
|
+
async (req, res, next) => {
|
|
383
|
+
try {
|
|
384
|
+
const [updatedCount] = await ${modelName}.update(
|
|
385
|
+
{ isDeleted: true },
|
|
386
|
+
{ where: { id: req.params.id } }
|
|
387
|
+
);
|
|
388
|
+
|
|
389
|
+
if (updatedCount === 0) {
|
|
390
|
+
return ResponseHelper.notFound(res, '${modelName}');
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
return ResponseHelper.success(res, null, '${modelName} deleted successfully');
|
|
394
|
+
} catch (error) {
|
|
395
|
+
next(error);
|
|
396
|
+
}
|
|
397
|
+
}
|
|
398
|
+
);
|
|
399
|
+
|
|
400
|
+
export default router;
|
|
401
|
+
`;
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
function updateServerWithRoutes(backendPath, resources) {
|
|
405
|
+
const serverPath = path.join(backendPath, 'server.js');
|
|
406
|
+
|
|
407
|
+
if (!fs.existsSync(serverPath)) {
|
|
408
|
+
return;
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
let serverCode = fs.readFileSync(serverPath, 'utf8');
|
|
412
|
+
|
|
413
|
+
// Check if already updated
|
|
414
|
+
if (serverCode.includes('// Auto-generated routes')) {
|
|
415
|
+
// Update existing auto-generated section
|
|
416
|
+
const newImports = resources
|
|
417
|
+
.map(r => `import ${r}Router from './routes/${r}.routes.js';`)
|
|
418
|
+
.join('\n');
|
|
419
|
+
|
|
420
|
+
const newRoutes = resources
|
|
421
|
+
.map(r => `app.use('/api/${r}', ${r}Router);`)
|
|
422
|
+
.join('\n');
|
|
423
|
+
|
|
424
|
+
// Find and replace the auto-generated routes section
|
|
425
|
+
const routesSectionRegex = /(\/\/ Auto-generated routes\n)([\s\S]*?)(?=\n\/\/|app\.use\(errorHandler\))/;
|
|
426
|
+
if (routesSectionRegex.test(serverCode)) {
|
|
427
|
+
serverCode = serverCode.replace(routesSectionRegex, `$1${newRoutes}\n`);
|
|
428
|
+
}
|
|
429
|
+
} else {
|
|
430
|
+
// Add imports at the top (after other imports)
|
|
431
|
+
const importLines = resources
|
|
432
|
+
.map(r => `import ${r}Router from './routes/${r}.routes.js';`)
|
|
433
|
+
.join('\n');
|
|
434
|
+
|
|
435
|
+
const lastImportMatch = serverCode.match(/import .* from .*;\n/g);
|
|
436
|
+
if (lastImportMatch && lastImportMatch.length > 0) {
|
|
437
|
+
const lastImport = lastImportMatch[lastImportMatch.length - 1];
|
|
438
|
+
const lastImportIndex = serverCode.lastIndexOf(lastImport);
|
|
439
|
+
serverCode = serverCode.slice(0, lastImportIndex + lastImport.length) + '\n' + importLines + serverCode.slice(lastImportIndex + lastImport.length);
|
|
440
|
+
}
|
|
441
|
+
|
|
442
|
+
// Add route registrations after health check - find the complete block
|
|
443
|
+
const healthCheckRegex = /app\.get\('\/api\/health',\s*\(req,\s*res\)\s*=>\s*\{[^}]*\}\);/;
|
|
444
|
+
const match = healthCheckRegex.exec(serverCode);
|
|
445
|
+
if (match) {
|
|
446
|
+
const insertPoint = match.index + match[0].length;
|
|
447
|
+
const routeLines = '\n\n// Auto-generated routes\n' + resources
|
|
448
|
+
.map(r => `app.use('/api/${r}', ${r}Router);`)
|
|
449
|
+
.join('\n');
|
|
450
|
+
serverCode = serverCode.slice(0, insertPoint) + routeLines + serverCode.slice(insertPoint);
|
|
451
|
+
}
|
|
452
|
+
}
|
|
453
|
+
|
|
454
|
+
fs.writeFileSync(serverPath, serverCode);
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
function generateSQLScripts(backendPath, resources, dbType) {
|
|
458
|
+
const sqlPath = path.join(backendPath, 'sql');
|
|
459
|
+
if (!fs.existsSync(sqlPath)) {
|
|
460
|
+
fs.mkdirSync(sqlPath, { recursive: true });
|
|
461
|
+
}
|
|
462
|
+
|
|
463
|
+
// Generate schema SQL
|
|
464
|
+
let schemaSQL = generateSchemaSQL(resources, dbType);
|
|
465
|
+
fs.writeFileSync(path.join(sqlPath, '01_schema.sql'), schemaSQL);
|
|
466
|
+
|
|
467
|
+
// Generate README
|
|
468
|
+
const readmeContent = `# SQL Scripts for ${dbType.toUpperCase()}
|
|
469
|
+
|
|
470
|
+
These SQL scripts were auto-generated by offbyt based on your frontend API calls.
|
|
471
|
+
|
|
472
|
+
## Files
|
|
473
|
+
|
|
474
|
+
1. **01_schema.sql** - Database schema (CREATE TABLE statements)
|
|
475
|
+
- Run this first to create all tables
|
|
476
|
+
- Contains indexes for optimal performance
|
|
477
|
+
- Includes foreign key relationships
|
|
478
|
+
|
|
479
|
+
## How to Use
|
|
480
|
+
|
|
481
|
+
### MySQL
|
|
482
|
+
\`\`\`bash
|
|
483
|
+
# Option 1: MySQL Workbench
|
|
484
|
+
1. Open MySQL Workbench
|
|
485
|
+
2. Connect to your database
|
|
486
|
+
3. File > Run SQL Script
|
|
487
|
+
4. Select 01_schema.sql
|
|
488
|
+
5. Click Run
|
|
489
|
+
|
|
490
|
+
# Option 2: Command Line
|
|
491
|
+
mysql -u root -p ${process.env.DB_NAME || 'offbyt'} < sql/01_schema.sql
|
|
492
|
+
\`\`\`
|
|
493
|
+
|
|
494
|
+
### PostgreSQL
|
|
495
|
+
\`\`\`bash
|
|
496
|
+
# Option 1: pgAdmin
|
|
497
|
+
1. Open pgAdmin
|
|
498
|
+
2. Connect to your database
|
|
499
|
+
3. Tools > Query Tool
|
|
500
|
+
4. Open and run 01_schema.sql
|
|
501
|
+
|
|
502
|
+
# Option 2: Command Line
|
|
503
|
+
psql -U postgres -d ${process.env.DB_NAME || 'offbyt'} -a -f sql/01_schema.sql
|
|
504
|
+
\`\`\`
|
|
505
|
+
|
|
506
|
+
### SQLite
|
|
507
|
+
\`\`\`bash
|
|
508
|
+
sqlite3 database.db < sql/01_schema.sql
|
|
509
|
+
\`\`\`
|
|
510
|
+
|
|
511
|
+
## Important Notes
|
|
512
|
+
|
|
513
|
+
- Review the schema before running in production
|
|
514
|
+
- Backup your database before making changes
|
|
515
|
+
- Update your .env file with correct database credentials
|
|
516
|
+
- The schema includes soft delete functionality (isDeleted field)
|
|
517
|
+
`;
|
|
518
|
+
|
|
519
|
+
fs.writeFileSync(path.join(sqlPath, 'README.md'), readmeContent);
|
|
520
|
+
}
|
|
521
|
+
|
|
522
|
+
function generateSchemaSQL(resources, dbType) {
|
|
523
|
+
const isPostgres = dbType === 'postgresql';
|
|
524
|
+
const isSQLite = dbType === 'sqlite';
|
|
525
|
+
const isMySQL = dbType === 'mysql';
|
|
526
|
+
|
|
527
|
+
let sql = `-- ============================================\n`;
|
|
528
|
+
sql += `-- offbyt Auto-Generated SQL Schema\n`;
|
|
529
|
+
sql += `-- Database: ${dbType.toUpperCase()}\n`;
|
|
530
|
+
sql += `-- Generated: ${new Date().toISOString()}\n`;
|
|
531
|
+
sql += `-- ============================================\n\n`;
|
|
532
|
+
|
|
533
|
+
if (!isSQLite) {
|
|
534
|
+
sql += `-- Drop existing tables (commented for safety)\n`;
|
|
535
|
+
resources.forEach((_, resourceName) => {
|
|
536
|
+
sql += `-- DROP TABLE IF EXISTS ${resourceName}${isPostgres ? ' CASCADE' : ''};\n`;
|
|
537
|
+
});
|
|
538
|
+
sql += `\n`;
|
|
539
|
+
}
|
|
540
|
+
|
|
541
|
+
// Generate CREATE TABLE for each resource
|
|
542
|
+
resources.forEach((resourceInfo, resourceName) => {
|
|
543
|
+
const fields = resourceInfo.fields || [];
|
|
544
|
+
|
|
545
|
+
sql += `-- Table: ${resourceName}\n`;
|
|
546
|
+
const tableLines = [];
|
|
547
|
+
|
|
548
|
+
if (isPostgres) {
|
|
549
|
+
tableLines.push(' id SERIAL PRIMARY KEY');
|
|
550
|
+
} else if (isSQLite) {
|
|
551
|
+
tableLines.push(' id INTEGER PRIMARY KEY AUTOINCREMENT');
|
|
552
|
+
} else {
|
|
553
|
+
tableLines.push(' id INT AUTO_INCREMENT PRIMARY KEY');
|
|
554
|
+
}
|
|
555
|
+
|
|
556
|
+
fields.forEach(field => {
|
|
557
|
+
const fieldDef = getSQLFieldType(field, dbType);
|
|
558
|
+
tableLines.push(` ${field} ${fieldDef}`);
|
|
559
|
+
});
|
|
560
|
+
|
|
561
|
+
tableLines.push(' isActive BOOLEAN DEFAULT TRUE');
|
|
562
|
+
tableLines.push(' isDeleted BOOLEAN DEFAULT FALSE');
|
|
563
|
+
tableLines.push(' version INT DEFAULT 1');
|
|
564
|
+
|
|
565
|
+
if (isPostgres) {
|
|
566
|
+
tableLines.push(' "createdAt" TIMESTAMP DEFAULT CURRENT_TIMESTAMP');
|
|
567
|
+
tableLines.push(' "updatedAt" TIMESTAMP DEFAULT CURRENT_TIMESTAMP');
|
|
568
|
+
} else if (isSQLite) {
|
|
569
|
+
tableLines.push(' createdAt DATETIME DEFAULT CURRENT_TIMESTAMP');
|
|
570
|
+
tableLines.push(' updatedAt DATETIME DEFAULT CURRENT_TIMESTAMP');
|
|
571
|
+
} else {
|
|
572
|
+
tableLines.push(' createdAt DATETIME DEFAULT CURRENT_TIMESTAMP');
|
|
573
|
+
tableLines.push(' updatedAt DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP');
|
|
574
|
+
}
|
|
575
|
+
|
|
576
|
+
if (isSQLite) {
|
|
577
|
+
sql += `CREATE TABLE IF NOT EXISTS ${resourceName} (\n${tableLines.join(',\n')}\n`;
|
|
578
|
+
} else {
|
|
579
|
+
sql += `CREATE TABLE ${resourceName} (\n${tableLines.join(',\n')}\n`;
|
|
580
|
+
}
|
|
581
|
+
|
|
582
|
+
if (isMySQL) {
|
|
583
|
+
sql += `) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;\n\n`;
|
|
584
|
+
} else {
|
|
585
|
+
sql += `);\n\n`;
|
|
586
|
+
}
|
|
587
|
+
|
|
588
|
+
// Add indexes
|
|
589
|
+
if (fields.includes('email')) {
|
|
590
|
+
sql += `CREATE INDEX idx_${resourceName}_email ON ${resourceName}(email);\n`;
|
|
591
|
+
}
|
|
592
|
+
if (fields.includes('name')) {
|
|
593
|
+
sql += `CREATE INDEX idx_${resourceName}_name ON ${resourceName}(name);\n`;
|
|
594
|
+
}
|
|
595
|
+
sql += `CREATE INDEX idx_${resourceName}_isDeleted ON ${resourceName}(isDeleted);\n`;
|
|
596
|
+
sql += `\n`;
|
|
597
|
+
});
|
|
598
|
+
|
|
599
|
+
sql += `-- ============================================\n`;
|
|
600
|
+
sql += `-- Schema created successfully!\n`;
|
|
601
|
+
sql += `-- ============================================\n`;
|
|
602
|
+
|
|
603
|
+
return sql;
|
|
604
|
+
}
|
|
605
|
+
|
|
606
|
+
function getSQLFieldType(fieldName, dbType) {
|
|
607
|
+
const isPostgres = dbType === 'postgresql';
|
|
608
|
+
const isSQLite = dbType === 'sqlite';
|
|
609
|
+
|
|
610
|
+
if (fieldName.endsWith('Id') || fieldName.includes('_id')) {
|
|
611
|
+
return isPostgres || isSQLite ? 'INTEGER' : 'INT';
|
|
612
|
+
} else if (fieldName.includes('email')) {
|
|
613
|
+
return 'VARCHAR(255)';
|
|
614
|
+
} else if (fieldName.includes('price') || fieldName.includes('amount')) {
|
|
615
|
+
return 'DECIMAL(10, 2)';
|
|
616
|
+
} else if (fieldName.includes('count') || fieldName.includes('quantity')) {
|
|
617
|
+
return isPostgres || isSQLite ? 'INTEGER DEFAULT 0' : 'INT DEFAULT 0';
|
|
618
|
+
} else if (fieldName.includes('description') || fieldName.includes('content')) {
|
|
619
|
+
return 'TEXT';
|
|
620
|
+
} else if (fieldName.includes('is') || fieldName.includes('has')) {
|
|
621
|
+
return 'BOOLEAN DEFAULT FALSE';
|
|
622
|
+
} else if (fieldName.includes('date') || fieldName.includes('time')) {
|
|
623
|
+
return isPostgres ? 'TIMESTAMP' : 'DATETIME';
|
|
624
|
+
} else {
|
|
625
|
+
return 'VARCHAR(255)';
|
|
626
|
+
}
|
|
627
|
+
}
|
|
628
|
+
|
|
629
|
+
|