chadstart 1.0.1 → 1.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,484 @@
1
+ 'use strict';
2
+
3
+ const fs = require('fs');
4
+ const path = require('path');
5
+ const { execFileSync } = require('child_process');
6
+ const YAML = require('yaml');
7
+ const logger = require('../utils/logger');
8
+
9
+ const { buildCore, toSnakeCase } = require('./entity-engine');
10
+
11
+ // ─── Git helpers ──────────────────────────────────────────────────────────────
12
+
13
+ /**
14
+ * Retrieve the last committed version of a file using git.
15
+ * Returns null if the file has no committed history (brand-new / untracked).
16
+ */
17
+ function getLastCommittedYaml(yamlPath) {
18
+ try {
19
+ const resolved = path.resolve(yamlPath);
20
+ const repoRoot = execFileSync('git', ['rev-parse', '--show-toplevel'], {
21
+ cwd: path.dirname(resolved),
22
+ stdio: ['pipe', 'pipe', 'pipe'],
23
+ }).toString().trim();
24
+
25
+ const relPath = path.relative(repoRoot, resolved);
26
+
27
+ const raw = execFileSync('git', ['show', `HEAD:${relPath}`], {
28
+ cwd: repoRoot,
29
+ stdio: ['pipe', 'pipe', 'pipe'],
30
+ }).toString();
31
+
32
+ return YAML.parse(raw);
33
+ } catch {
34
+ return null;
35
+ }
36
+ }
37
+
38
+ /**
39
+ * Load the current YAML file from disk and return the parsed object.
40
+ */
41
+ function loadCurrentYaml(yamlPath) {
42
+ const resolved = path.resolve(yamlPath);
43
+ if (!fs.existsSync(resolved)) {
44
+ throw new Error(`YAML config not found: ${resolved}`);
45
+ }
46
+ return YAML.parse(fs.readFileSync(resolved, 'utf8'));
47
+ }
48
+
49
+ // ─── SQL generation helpers ───────────────────────────────────────────────────
50
+
51
+ const DB_ENGINE = (process.env.DB_ENGINE || 'sqlite').toLowerCase();
52
+
53
+ const SQL_TYPE_SQLITE = {
54
+ text: 'TEXT', string: 'TEXT', richText: 'TEXT',
55
+ integer: 'INTEGER', int: 'INTEGER',
56
+ number: 'REAL', float: 'REAL', real: 'REAL', money: 'REAL',
57
+ boolean: 'INTEGER', bool: 'INTEGER',
58
+ date: 'TEXT', timestamp: 'TEXT', email: 'TEXT', link: 'TEXT',
59
+ password: 'TEXT', choice: 'TEXT', location: 'TEXT',
60
+ file: 'TEXT', image: 'TEXT', group: 'TEXT', json: 'TEXT',
61
+ };
62
+
63
+ const SQL_TYPE_PG = {
64
+ text: 'TEXT', string: 'TEXT', richText: 'TEXT',
65
+ integer: 'INTEGER', int: 'INTEGER',
66
+ number: 'NUMERIC', float: 'NUMERIC', real: 'NUMERIC', money: 'NUMERIC',
67
+ boolean: 'BOOLEAN', bool: 'BOOLEAN',
68
+ date: 'TEXT', timestamp: 'TEXT', email: 'TEXT', link: 'TEXT',
69
+ password: 'TEXT', choice: 'TEXT', location: 'TEXT',
70
+ file: 'TEXT', image: 'TEXT', group: 'TEXT', json: 'TEXT',
71
+ };
72
+
73
+ const SQL_TYPE_MYSQL = {
74
+ text: 'TEXT', string: 'TEXT', richText: 'TEXT',
75
+ integer: 'INT', int: 'INT',
76
+ number: 'DECIMAL(15,4)', float: 'DECIMAL(15,4)', real: 'DECIMAL(15,4)', money: 'DECIMAL(15,4)',
77
+ boolean: 'TINYINT(1)', bool: 'TINYINT(1)',
78
+ date: 'TEXT', timestamp: 'TEXT', email: 'TEXT', link: 'TEXT',
79
+ password: 'TEXT', choice: 'TEXT', location: 'TEXT',
80
+ file: 'TEXT', image: 'TEXT', group: 'TEXT', json: 'TEXT',
81
+ };
82
+
83
+ function sqlType(type) {
84
+ if (DB_ENGINE === 'postgres') return SQL_TYPE_PG[type] || 'TEXT';
85
+ if (DB_ENGINE === 'mysql') return SQL_TYPE_MYSQL[type] || 'TEXT';
86
+ return SQL_TYPE_SQLITE[type] || 'TEXT';
87
+ }
88
+
89
+ function idColType() {
90
+ return DB_ENGINE === 'mysql' ? 'VARCHAR(36)' : 'TEXT';
91
+ }
92
+
93
+ function authStrType() {
94
+ return DB_ENGINE === 'mysql' ? 'VARCHAR(191)' : 'TEXT';
95
+ }
96
+
97
+ function q(name) {
98
+ if (DB_ENGINE === 'mysql') return `\`${name}\``;
99
+ return `"${name}"`;
100
+ }
101
+
102
+ // ─── Diff engine ──────────────────────────────────────────────────────────────
103
+
104
+ /**
105
+ * Compare two core objects and return structured diff describing schema changes.
106
+ *
107
+ * Returns { newEntities, newColumns, newJunctionTables }.
108
+ */
109
+ function diffCores(oldCore, newCore) {
110
+ const newEntities = [];
111
+ const newColumns = [];
112
+ const newJunctionTables = [];
113
+
114
+ const oldEntityMap = oldCore ? oldCore.entities : {};
115
+
116
+ for (const [name, entity] of Object.entries(newCore.entities)) {
117
+ const oldEntity = oldEntityMap[name];
118
+
119
+ if (!oldEntity) {
120
+ // Entirely new entity
121
+ newEntities.push(entity);
122
+ } else {
123
+ // Entity already exists — look for new properties
124
+ const oldPropNames = new Set(oldEntity.properties.map((p) => p.name));
125
+ const oldBelongsToNames = new Set(
126
+ (oldEntity.belongsTo || []).map((r) =>
127
+ typeof r === 'string' ? r : (r.entity || r.name)
128
+ )
129
+ );
130
+
131
+ // New properties
132
+ for (const prop of entity.properties) {
133
+ if (entity.authenticable && (prop.name === 'email' || prop.name === 'password')) continue;
134
+ if (!oldPropNames.has(prop.name)) {
135
+ newColumns.push({ entity, prop });
136
+ }
137
+ }
138
+
139
+ // New belongsTo relations
140
+ for (const rel of entity.belongsTo || []) {
141
+ const relName = typeof rel === 'string' ? rel : (rel.entity || rel.name);
142
+ if (!oldBelongsToNames.has(relName)) {
143
+ const refEntity = newCore.entities[relName];
144
+ if (refEntity) {
145
+ newColumns.push({
146
+ entity,
147
+ prop: { name: `${refEntity.tableName}_id`, type: '__fk__', refTable: refEntity.tableName },
148
+ });
149
+ }
150
+ }
151
+ }
152
+
153
+ // New authenticable flag (adds email + password columns)
154
+ if (entity.authenticable && !oldEntity.authenticable) {
155
+ if (!oldPropNames.has('email')) {
156
+ newColumns.push({ entity, prop: { name: 'email', type: '__auth_email__' } });
157
+ }
158
+ if (!oldPropNames.has('password')) {
159
+ newColumns.push({ entity, prop: { name: 'password', type: '__auth_password__' } });
160
+ }
161
+ }
162
+ }
163
+
164
+ // New belongsToMany junction tables
165
+ for (const rel of entity.belongsToMany || []) {
166
+ const relName = typeof rel === 'string' ? rel : (rel.entity || rel.name);
167
+ const relEntity = newCore.entities[relName];
168
+ if (!relEntity) continue;
169
+
170
+ const [a, b] = [entity.tableName, relEntity.tableName].sort();
171
+ const jt = `${a}_${b}`;
172
+
173
+ // Check if old core had this junction
174
+ const oldJt = oldCore && oldEntityMap[name] &&
175
+ (oldEntityMap[name].belongsToMany || []).some((oldRel) => {
176
+ const oldRelName = typeof oldRel === 'string' ? oldRel : (oldRel.entity || oldRel.name);
177
+ return oldRelName === relName;
178
+ });
179
+
180
+ if (!oldJt) {
181
+ // Avoid duplicates (A→B and B→A produce the same junction)
182
+ if (!newJunctionTables.some((j) => j.tableName === jt)) {
183
+ newJunctionTables.push({
184
+ tableName: jt,
185
+ tableA: a,
186
+ tableB: b,
187
+ });
188
+ }
189
+ }
190
+ }
191
+ }
192
+
193
+ return { newEntities, newColumns, newJunctionTables };
194
+ }
195
+
196
+ // ─── SQL statement generation ─────────────────────────────────────────────────
197
+
198
+ /**
199
+ * Generate a CREATE TABLE SQL statement for a new entity.
200
+ */
201
+ function generateCreateTableSql(entity, allEntities) {
202
+ const cols = [
203
+ `${q('id')} ${idColType()} PRIMARY KEY`,
204
+ `${q('createdAt')} TEXT`,
205
+ `${q('updatedAt')} TEXT`,
206
+ ];
207
+
208
+ if (entity.authenticable) {
209
+ cols.push(`${q('email')} ${authStrType()} NOT NULL UNIQUE`);
210
+ cols.push(`${q('password')} ${authStrType()} NOT NULL`);
211
+ }
212
+
213
+ for (const p of entity.properties) {
214
+ if (entity.authenticable && (p.name === 'email' || p.name === 'password')) continue;
215
+ cols.push(`${q(p.name)} ${sqlType(p.type)}`);
216
+ }
217
+
218
+ for (const rel of entity.belongsTo || []) {
219
+ const relName = typeof rel === 'string' ? rel : (rel.entity || rel.name);
220
+ const ref = allEntities[relName];
221
+ if (ref) {
222
+ const fk = `${ref.tableName}_id`;
223
+ cols.push(`${q(fk)} ${idColType()} REFERENCES ${q(ref.tableName)}(id)`);
224
+ }
225
+ }
226
+
227
+ return `CREATE TABLE IF NOT EXISTS ${q(entity.tableName)} (${cols.join(', ')});`;
228
+ }
229
+
230
+ /**
231
+ * Generate a DROP TABLE SQL statement for an entity.
232
+ */
233
+ function generateDropTableSql(entity) {
234
+ return `DROP TABLE IF EXISTS ${q(entity.tableName)};`;
235
+ }
236
+
237
+ /**
238
+ * Generate ALTER TABLE ADD COLUMN SQL for a new column.
239
+ */
240
+ function generateAddColumnSql(entity, prop) {
241
+ let colDef;
242
+ if (prop.type === '__fk__') {
243
+ colDef = `${q(prop.name)} ${idColType()}`;
244
+ } else if (prop.type === '__auth_email__') {
245
+ colDef = `${q(prop.name)} ${authStrType()}`;
246
+ } else if (prop.type === '__auth_password__') {
247
+ colDef = `${q(prop.name)} ${authStrType()}`;
248
+ } else {
249
+ colDef = `${q(prop.name)} ${sqlType(prop.type)}`;
250
+ }
251
+ return `ALTER TABLE ${q(entity.tableName)} ADD COLUMN ${colDef};`;
252
+ }
253
+
254
+ /**
255
+ * Generate CREATE TABLE SQL for a junction table.
256
+ */
257
+ function generateCreateJunctionSql(junction) {
258
+ const { tableName, tableA, tableB } = junction;
259
+ const aCol = `${q(`${tableA}_id`)} ${idColType()} REFERENCES ${q(tableA)}(id)`;
260
+ const bCol = `${q(`${tableB}_id`)} ${idColType()} REFERENCES ${q(tableB)}(id)`;
261
+ return `CREATE TABLE IF NOT EXISTS ${q(tableName)} (${aCol}, ${bCol}, PRIMARY KEY (${q(`${tableA}_id`)}, ${q(`${tableB}_id`)}));`;
262
+ }
263
+
264
+ /**
265
+ * Generate DROP TABLE SQL for a junction table.
266
+ */
267
+ function generateDropJunctionSql(junction) {
268
+ return `DROP TABLE IF EXISTS ${q(junction.tableName)};`;
269
+ }
270
+
271
+ // ─── Migration file generation ────────────────────────────────────────────────
272
+
273
+ /**
274
+ * Given a diff, generate the "do" (up) and "undo" (down) SQL scripts.
275
+ */
276
+ function generateMigrationScripts(diff, allEntities) {
277
+ const doStatements = [];
278
+ const undoStatements = [];
279
+
280
+ // New entities
281
+ for (const entity of diff.newEntities) {
282
+ doStatements.push(generateCreateTableSql(entity, allEntities));
283
+ undoStatements.push(generateDropTableSql(entity));
284
+ }
285
+
286
+ // New columns
287
+ for (const { entity, prop } of diff.newColumns) {
288
+ doStatements.push(generateAddColumnSql(entity, prop));
289
+ // Most databases don't support DROP COLUMN easily (especially SQLite),
290
+ // so undo for columns is a comment placeholder.
291
+ undoStatements.push(`-- ALTER TABLE ${q(entity.tableName)} DROP COLUMN ${q(prop.name)};`);
292
+ }
293
+
294
+ // New junction tables
295
+ for (const jt of diff.newJunctionTables) {
296
+ doStatements.push(generateCreateJunctionSql(jt));
297
+ undoStatements.push(generateDropJunctionSql(jt));
298
+ }
299
+
300
+ return {
301
+ do: doStatements.join('\n'),
302
+ undo: undoStatements.join('\n'),
303
+ };
304
+ }
305
+
306
+ /**
307
+ * Determine the next migration version number from files in a directory.
308
+ */
309
+ function getNextVersion(migrationsDir) {
310
+ if (!fs.existsSync(migrationsDir)) return 1;
311
+
312
+ const files = fs.readdirSync(migrationsDir).filter((f) => /^\d+\./.test(f));
313
+ if (!files.length) return 1;
314
+
315
+ const versions = files.map((f) => parseInt(f.split('.')[0], 10));
316
+ return Math.max(...versions) + 1;
317
+ }
318
+
319
+ /**
320
+ * Write migration SQL files to the migrations directory.
321
+ * Returns the paths of files written.
322
+ */
323
+ function writeMigrationFiles(migrationsDir, doSql, undoSql, description) {
324
+ fs.mkdirSync(migrationsDir, { recursive: true });
325
+
326
+ const version = String(getNextVersion(migrationsDir)).padStart(3, '0');
327
+ const desc = description ? `.${description.replace(/[^a-zA-Z0-9_-]/g, '-')}` : '';
328
+
329
+ const doFile = `${version}.do${desc}.sql`;
330
+ const undoFile = `${version}.undo${desc}.sql`;
331
+
332
+ const doPath = path.join(migrationsDir, doFile);
333
+ const undoPath = path.join(migrationsDir, undoFile);
334
+
335
+ fs.writeFileSync(doPath, doSql, 'utf8');
336
+ fs.writeFileSync(undoPath, undoSql, 'utf8');
337
+
338
+ return { doPath, undoPath, version: parseInt(version, 10) };
339
+ }
340
+
341
+ // ─── Postgrator integration ──────────────────────────────────────────────────
342
+
343
+ /**
344
+ * Build an execQuery function suitable for postgrator from the db module.
345
+ *
346
+ * Postgrator calls execQuery for ALL queries (SELECT, CREATE, INSERT, ALTER, etc.)
347
+ * and always expects `{ rows: [...] }` back. For non-SELECT statements on SQLite,
348
+ * better-sqlite3's `.prepare().all()` throws, so we catch and return `{ rows: [] }`.
349
+ */
350
+ function buildExecQueryFn(dbModule) {
351
+ return async function execQuery(query) {
352
+ try {
353
+ const rows = await dbModule.queryAll(query);
354
+ return { rows };
355
+ } catch {
356
+ // Non-SELECT statement (CREATE TABLE, INSERT, ALTER TABLE, DELETE, etc.)
357
+ await dbModule.exec(query);
358
+ return { rows: [] };
359
+ }
360
+ };
361
+ }
362
+
363
+ /**
364
+ * Create a Postgrator instance configured for the current database engine.
365
+ * Uses dynamic import because postgrator is an ES module.
366
+ */
367
+ async function createPostgrator(migrationsDir, execQueryFn) {
368
+ const { default: Postgrator } = await import('postgrator');
369
+
370
+ const driver = DB_ENGINE === 'postgres' ? 'pg'
371
+ : DB_ENGINE === 'mysql' ? 'mysql'
372
+ : 'sqlite3';
373
+
374
+ return new Postgrator({
375
+ migrationPattern: path.join(migrationsDir, '*'),
376
+ driver,
377
+ database: process.env.DB_DATABASE || 'chadstart',
378
+ schemaTable: '_cs_migrations',
379
+ execQuery: execQueryFn,
380
+ validateChecksum: true,
381
+ });
382
+ }
383
+
384
+ /**
385
+ * Run all pending migrations up to the latest version.
386
+ */
387
+ async function runMigrations(migrationsDir, execQueryFn) {
388
+ if (!fs.existsSync(migrationsDir)) {
389
+ logger.info('No migrations directory found — nothing to run.');
390
+ return [];
391
+ }
392
+
393
+ const postgrator = await createPostgrator(migrationsDir, execQueryFn);
394
+ const applied = await postgrator.migrate();
395
+ return applied;
396
+ }
397
+
398
+ /**
399
+ * Get the current migration version.
400
+ */
401
+ async function getMigrationVersion(migrationsDir, execQueryFn) {
402
+ const postgrator = await createPostgrator(migrationsDir, execQueryFn);
403
+ return postgrator.getDatabaseVersion();
404
+ }
405
+
406
+ /**
407
+ * Get all migrations and their status.
408
+ */
409
+ async function getMigrationStatus(migrationsDir, execQueryFn) {
410
+ if (!fs.existsSync(migrationsDir)) {
411
+ return { currentVersion: 0, pending: [], applied: [] };
412
+ }
413
+
414
+ const postgrator = await createPostgrator(migrationsDir, execQueryFn);
415
+ const currentVersion = await postgrator.getDatabaseVersion();
416
+ const allMigrations = await postgrator.getMigrations();
417
+
418
+ const doMigrations = allMigrations.filter((m) => m.action === 'do');
419
+ const applied = doMigrations.filter((m) => m.version <= currentVersion);
420
+ const pending = doMigrations.filter((m) => m.version > currentVersion);
421
+
422
+ return { currentVersion, pending, applied };
423
+ }
424
+
425
+ // ─── High-level commands ──────────────────────────────────────────────────────
426
+
427
+ /**
428
+ * Generate a migration by diffing the current YAML against the last committed
429
+ * version in git. Writes numbered SQL files to the migrations directory.
430
+ *
431
+ * @param {string} yamlPath Path to the chadstart YAML config file.
432
+ * @param {string} migrationsDir Path to the migrations directory.
433
+ * @param {string} [description] Optional description for the migration.
434
+ * @returns {{ doPath, undoPath, version, isEmpty } | null}
435
+ */
436
+ function generateMigration(yamlPath, migrationsDir, description) {
437
+ const currentConfig = loadCurrentYaml(yamlPath);
438
+ const oldConfig = getLastCommittedYaml(yamlPath);
439
+
440
+ const newCore = buildCore(currentConfig);
441
+ const oldCore = oldConfig ? buildCore(oldConfig) : null;
442
+
443
+ const diff = diffCores(oldCore, newCore);
444
+
445
+ const hasChanges =
446
+ diff.newEntities.length > 0 ||
447
+ diff.newColumns.length > 0 ||
448
+ diff.newJunctionTables.length > 0;
449
+
450
+ if (!hasChanges) {
451
+ return { isEmpty: true };
452
+ }
453
+
454
+ const scripts = generateMigrationScripts(diff, newCore.entities);
455
+ const result = writeMigrationFiles(migrationsDir, scripts.do, scripts.undo, description);
456
+
457
+ return { ...result, isEmpty: false };
458
+ }
459
+
460
+ module.exports = {
461
+ // Git helpers
462
+ getLastCommittedYaml,
463
+ loadCurrentYaml,
464
+ // Diff engine
465
+ diffCores,
466
+ // SQL generation
467
+ generateCreateTableSql,
468
+ generateDropTableSql,
469
+ generateAddColumnSql,
470
+ generateCreateJunctionSql,
471
+ generateDropJunctionSql,
472
+ generateMigrationScripts,
473
+ // File operations
474
+ getNextVersion,
475
+ writeMigrationFiles,
476
+ // Postgrator integration
477
+ buildExecQueryFn,
478
+ createPostgrator,
479
+ runMigrations,
480
+ getMigrationVersion,
481
+ getMigrationStatus,
482
+ // High-level
483
+ generateMigration,
484
+ };