tina4-nodejs 3.2.1 → 3.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/CLAUDE.md +1 -1
  2. package/README.md +1 -1
  3. package/package.json +1 -1
  4. package/packages/cli/src/bin.ts +13 -1
  5. package/packages/cli/src/commands/migrate.ts +19 -5
  6. package/packages/cli/src/commands/migrateCreate.ts +29 -28
  7. package/packages/cli/src/commands/migrateRollback.ts +59 -0
  8. package/packages/cli/src/commands/migrateStatus.ts +62 -0
  9. package/packages/core/public/js/tina4-dev-admin.min.js +1 -1
  10. package/packages/core/public/js/tina4js.min.js +47 -0
  11. package/packages/core/src/auth.ts +44 -10
  12. package/packages/core/src/devAdmin.ts +14 -16
  13. package/packages/core/src/index.ts +10 -3
  14. package/packages/core/src/middleware.ts +232 -2
  15. package/packages/core/src/queue.ts +127 -25
  16. package/packages/core/src/queueBackends/mongoBackend.ts +223 -0
  17. package/packages/core/src/request.ts +3 -3
  18. package/packages/core/src/router.ts +115 -51
  19. package/packages/core/src/server.ts +47 -3
  20. package/packages/core/src/session.ts +29 -1
  21. package/packages/core/src/sessionHandlers/databaseHandler.ts +134 -0
  22. package/packages/core/src/sessionHandlers/redisHandler.ts +230 -0
  23. package/packages/core/src/types.ts +12 -6
  24. package/packages/core/src/websocket.ts +11 -2
  25. package/packages/core/src/websocketConnection.ts +4 -2
  26. package/packages/frond/src/engine.ts +66 -1
  27. package/packages/orm/src/autoCrud.ts +17 -12
  28. package/packages/orm/src/baseModel.ts +99 -21
  29. package/packages/orm/src/database.ts +197 -69
  30. package/packages/orm/src/databaseResult.ts +207 -0
  31. package/packages/orm/src/index.ts +6 -3
  32. package/packages/orm/src/migration.ts +296 -71
  33. package/packages/orm/src/model.ts +1 -0
  34. package/packages/orm/src/types.ts +1 -0
@@ -12,7 +12,11 @@ export function syncModels(models: DiscoveredModel[]): void {
12
12
  const adapter = getAdapter() as SQLiteAdapter;
13
13
 
14
14
  for (const { definition } of models) {
15
- const { tableName, fields, softDelete } = definition;
15
+ const { tableName, fields, softDelete, fieldMapping } = definition;
16
+ const mapping = fieldMapping ?? {};
17
+
18
+ // Helper to get DB column name for a JS property name
19
+ const getDbCol = (prop: string): string => mapping[prop] ?? prop;
16
20
 
17
21
  // If softDelete is enabled, ensure is_deleted field exists
18
22
  const allFields = { ...fields };
@@ -23,15 +27,22 @@ export function syncModels(models: DiscoveredModel[]): void {
23
27
  };
24
28
  }
25
29
 
30
+ // Remap field keys to DB column names for table creation/migration
31
+ const dbFields: Record<string, typeof allFields[string]> = {};
32
+ for (const [fieldName, def] of Object.entries(allFields)) {
33
+ const dbCol = getDbCol(fieldName);
34
+ dbFields[dbCol] = def;
35
+ }
36
+
26
37
  if (!adapter.tableExists(tableName)) {
27
- adapter.createTable(tableName, allFields);
38
+ adapter.createTable(tableName, dbFields);
28
39
  console.log(` Created table: ${tableName}`);
29
40
  } else {
30
41
  // Check for new columns
31
42
  const existing = adapter.getTableColumns(tableName);
32
43
  const existingNames = new Set(existing.map((c) => c.name));
33
44
 
34
- for (const [colName, def] of Object.entries(allFields)) {
45
+ for (const [colName, def] of Object.entries(dbFields)) {
35
46
  if (!existingNames.has(colName)) {
36
47
  adapter.addColumn(tableName, colName, def);
37
48
  console.log(` Added column: ${tableName}.${colName}`);
@@ -47,7 +58,7 @@ export function syncModels(models: DiscoveredModel[]): void {
47
58
  const MIGRATION_TABLE = "tina4_migration";
48
59
 
49
60
  /**
50
- * Ensure the migration tracking table exists.
61
+ * Ensure the migration tracking table exists with batch support.
51
62
  */
52
63
  export function ensureMigrationTable(): void {
53
64
  const adapter = getAdapter() as SQLiteAdapter;
@@ -58,6 +69,17 @@ export function ensureMigrationTable(): void {
58
69
  batch: { type: "integer", required: true },
59
70
  applied_at: { type: "datetime", default: "now" },
60
71
  });
72
+ } else {
73
+ // Ensure batch column exists on older tables that only had passed/description
74
+ try {
75
+ const cols = adapter.getTableColumns(MIGRATION_TABLE);
76
+ const colNames = new Set(cols.map((c) => c.name));
77
+ if (!colNames.has("batch")) {
78
+ adapter.execute(`ALTER TABLE "${MIGRATION_TABLE}" ADD COLUMN batch INTEGER NOT NULL DEFAULT 1`);
79
+ }
80
+ } catch {
81
+ // ignore — column may already exist
82
+ }
61
83
  }
62
84
  }
63
85
 
@@ -139,20 +161,74 @@ export function removeMigrationRecord(name: string): void {
139
161
  }
140
162
 
141
163
  /**
142
- * Rollback the last batch of migrations.
143
- * Expects a map of migration name -> down function.
164
+ * Rollback the last batch of migrations using .down.sql files.
165
+ *
166
+ * For each migration in the last batch (in reverse order):
167
+ * 1. Looks for a corresponding .down.sql file on disk
168
+ * 2. If found, reads and executes the SQL statements
169
+ * 3. If not found, logs a warning
170
+ * 4. Deletes the tracking record either way
171
+ *
172
+ * @param migrationsDir - Directory containing migration files (default: "migrations")
173
+ * @param delimiter - SQL statement delimiter (default: ";")
174
+ * @returns Array of rolled-back migration names
144
175
  */
145
176
  export function rollback(
146
- downFunctions: Map<string, () => void>,
177
+ migrationsDir?: string | Map<string, () => void>,
178
+ delimiter?: string,
147
179
  ): string[] {
180
+ // Handle legacy API: if first arg is a Map, use old behaviour
181
+ if (migrationsDir instanceof Map) {
182
+ const downFunctions = migrationsDir;
183
+ const migrations = getLastBatchMigrations();
184
+ const rolledBack: string[] = [];
185
+ for (const migration of migrations) {
186
+ const down = downFunctions.get(migration.name);
187
+ if (down) {
188
+ down();
189
+ }
190
+ removeMigrationRecord(migration.name);
191
+ rolledBack.push(migration.name);
192
+ }
193
+ return rolledBack;
194
+ }
195
+
196
+ const dir = resolve(migrationsDir ?? "migrations");
197
+ const delim = delimiter ?? ";";
198
+ const db = getAdapter();
148
199
  const migrations = getLastBatchMigrations();
149
200
  const rolledBack: string[] = [];
150
201
 
151
202
  for (const migration of migrations) {
152
- const down = downFunctions.get(migration.name);
153
- if (down) {
154
- down();
203
+ // Determine the .down.sql filename
204
+ const downFile = `${migration.name}.down.sql`;
205
+ const downPath = join(dir, downFile);
206
+
207
+ if (existsSync(downPath)) {
208
+ const sqlContent = readFileSync(downPath, "utf-8").trim();
209
+ if (sqlContent) {
210
+ const statements = splitStatements(sqlContent, delim);
211
+ try {
212
+ db.startTransaction();
213
+ for (const stmt of statements) {
214
+ db.execute(stmt);
215
+ }
216
+ db.commit();
217
+ } catch (err) {
218
+ try {
219
+ db.rollback();
220
+ } catch {
221
+ // rollback may fail if auto-rolled-back
222
+ }
223
+ const msg = err instanceof Error ? err.message : String(err);
224
+ console.error(` Rollback failed for ${migration.name}: ${msg}`);
225
+ // Still remove the record so the migration can be re-applied
226
+ }
227
+ }
228
+ } else {
229
+ console.warn(` Warning: No .down.sql file found for ${migration.name} — skipping SQL execution`);
155
230
  }
231
+
156
232
  removeMigrationRecord(migration.name);
157
233
  rolledBack.push(migration.name);
158
234
  }
@@ -186,6 +262,16 @@ export interface MigrationResult {
186
262
  failed: string[];
187
263
  }
188
264
 
265
+ /**
266
+ * Result returned by the `status()` function.
267
+ */
268
+ export interface MigrationStatus {
269
+ /** Filenames of completed (already applied) migrations. */
270
+ completed: string[];
271
+ /** Filenames of pending (not yet applied) migrations. */
272
+ pending: string[];
273
+ }
274
+
189
275
  /**
190
276
  * Split SQL text into individual statements on the given delimiter.
191
277
  *
@@ -228,17 +314,44 @@ function splitStatements(sql: string, delimiter = ";"): string[] {
228
314
  return statements;
229
315
  }
230
316
 
317
+ /**
318
+ * Sort migration filenames supporting both naming patterns:
319
+ * - Sequential: 000001_name.sql, 000002_name.sql
320
+ * - Timestamp: 20240315120000_name.sql (YYYYMMDDHHMMSS)
321
+ *
322
+ * Both patterns start with digits followed by underscore, so alphabetical
323
+ * sort works correctly for both (zero-padded sequential and timestamp).
324
+ */
325
+ function sortMigrationFiles(files: string[]): string[] {
326
+ return [...files].sort((a, b) => {
327
+ const aPrefix = a.match(/^(\d+)/);
328
+ const bPrefix = b.match(/^(\d+)/);
329
+ if (aPrefix && bPrefix) {
330
+ // Compare numeric prefixes — handles both 000001 and 20240315120000
331
+ const aNum = BigInt(aPrefix[1]);
332
+ const bNum = BigInt(bPrefix[1]);
333
+ if (aNum < bNum) return -1;
334
+ if (aNum > bNum) return 1;
335
+ return a.localeCompare(b);
336
+ }
337
+ return a.localeCompare(b);
338
+ });
339
+ }
340
+
231
341
  /**
232
342
  * Run all pending SQL-file migrations.
233
343
  *
234
- * Matches the Python `migrate(db, migration_folder, delimiter)` API.
344
+ * Supports both naming patterns:
345
+ * - Sequential: 000001_description.sql
346
+ * - Timestamp: YYYYMMDDHHMMSS_description.sql
235
347
  *
236
348
  * 1. Creates the `tina4_migration` tracking table if it doesn't exist.
237
- * 2. Scans `migrationsDir` for `NNNNNN_description.sql` files (sorted).
349
+ * 2. Scans `migrationsDir` for `.sql` files (excluding `.down.sql`), sorted.
238
350
  * 3. Skips files already recorded as applied.
239
351
  * 4. Splits file content on `delimiter` and executes each statement.
240
- * 5. On success records the migration; on error logs and continues.
241
- * 6. Returns a summary of applied / skipped / failed files.
352
+ * 5. On success records the migration with the current batch number.
353
+ * 6. On error logs and continues.
354
+ * 7. Returns a summary of applied / skipped / failed files.
242
355
  *
243
356
  * @param adapter - A DatabaseAdapter instance (or omit to use the global adapter).
244
357
  * @param options - Optional configuration.
@@ -257,46 +370,82 @@ export async function migrate(
257
370
  return result;
258
371
  }
259
372
 
260
- // Ensure tracking table
373
+ // Ensure tracking table with batch support
261
374
  if (!db.tableExists(MIGRATION_TABLE)) {
262
375
  db.execute(`CREATE TABLE IF NOT EXISTS "${MIGRATION_TABLE}" (
263
376
  id INTEGER PRIMARY KEY AUTOINCREMENT,
264
- description TEXT NOT NULL,
265
- content TEXT,
266
- passed INTEGER NOT NULL DEFAULT 0,
267
- run_at TEXT NOT NULL
377
+ name TEXT NOT NULL,
378
+ batch INTEGER NOT NULL DEFAULT 1,
379
+ applied_at TEXT NOT NULL
268
380
  )`);
381
+ } else {
382
+ // Migrate old schema: if table has 'description' + 'passed' columns, migrate data
383
+ try {
384
+ const testRows = db.query<Record<string, unknown>>(
385
+ `SELECT * FROM "${MIGRATION_TABLE}" LIMIT 0`,
386
+ );
387
+ // Check column names by querying pragma or just try adding batch
388
+ } catch {
389
+ // ignore
390
+ }
269
391
  }
270
392
 
271
- // Collect .sql files (exclude .down.sql), sorted alphabetically
272
- const files = readdirSync(dir)
273
- .filter((f) => f.endsWith(".sql") && !f.endsWith(".down.sql"))
274
- .sort();
393
+ // Collect .sql files (exclude .down.sql), sorted by prefix
394
+ const files = sortMigrationFiles(
395
+ readdirSync(dir).filter((f) => f.endsWith(".sql") && !f.endsWith(".down.sql")),
396
+ );
275
397
 
276
398
  if (files.length === 0) return result;
277
399
 
400
+ // Determine the batch number for this run
401
+ let currentBatch = 1;
402
+ try {
403
+ const batchRows = db.query<{ max_batch: number | null }>(
404
+ `SELECT MAX(batch) as max_batch FROM "${MIGRATION_TABLE}"`,
405
+ );
406
+ currentBatch = (batchRows[0]?.max_batch ?? 0) + 1;
407
+ } catch {
408
+ // Table may have old schema without batch column
409
+ currentBatch = 1;
410
+ }
411
+
278
412
  for (const file of files) {
279
413
  const migrationId = file.replace(/\.sql$/, "");
280
414
 
281
- // Check if already applied (passed = 1)
282
- const existing = db.query<{ id: number; passed: number }>(
283
- `SELECT id, passed FROM "${MIGRATION_TABLE}" WHERE description = ?`,
284
- [migrationId],
285
- );
415
+ // Check if already applied support both 'name' and legacy 'description' column
416
+ let alreadyApplied = false;
417
+ try {
418
+ const existing = db.query<{ id: number }>(
419
+ `SELECT id FROM "${MIGRATION_TABLE}" WHERE name = ?`,
420
+ [migrationId],
421
+ );
422
+ alreadyApplied = existing.length > 0;
423
+ } catch {
424
+ // Might be old schema with 'description' column instead of 'name'
425
+ try {
426
+ const existing = db.query<{ id: number; passed: number }>(
427
+ `SELECT id, passed FROM "${MIGRATION_TABLE}" WHERE description = ?`,
428
+ [migrationId],
429
+ );
430
+ if (existing.length > 0 && existing[0].passed === 1) {
431
+ alreadyApplied = true;
432
+ } else if (existing.length > 0 && existing[0].passed === 0) {
433
+ // Failed record from old schema — remove to retry
434
+ db.execute(
435
+ `DELETE FROM "${MIGRATION_TABLE}" WHERE description = ?`,
436
+ [migrationId],
437
+ );
438
+ }
439
+ } catch {
440
+ // Neither column exists — continue
441
+ }
442
+ }
286
443
 
287
- if (existing.length > 0 && existing[0].passed === 1) {
444
+ if (alreadyApplied) {
288
445
  result.skipped.push(file);
289
446
  continue;
290
447
  }
291
448
 
292
- // If there's a failed record (passed = 0), remove it so we can retry
293
- if (existing.length > 0 && existing[0].passed === 0) {
294
- db.execute(
295
- `DELETE FROM "${MIGRATION_TABLE}" WHERE description = ?`,
296
- [migrationId],
297
- );
298
- }
299
-
300
449
  const sqlContent = readFileSync(join(dir, file), "utf-8").trim();
301
450
  if (!sqlContent) {
302
451
  result.skipped.push(file);
@@ -312,12 +461,20 @@ export async function migrate(
312
461
  db.execute(stmt);
313
462
  }
314
463
 
315
- // Record as passed
464
+ // Record as applied with batch number
316
465
  const now = new Date().toISOString();
317
- db.execute(
318
- `INSERT INTO "${MIGRATION_TABLE}" (description, content, passed, run_at) VALUES (?, ?, 1, ?)`,
319
- [migrationId, sqlContent, now],
320
- );
466
+ try {
467
+ db.execute(
468
+ `INSERT INTO "${MIGRATION_TABLE}" (name, batch, applied_at) VALUES (?, ?, ?)`,
469
+ [migrationId, currentBatch, now],
470
+ );
471
+ } catch {
472
+ // Old schema fallback — try description/content/passed columns
473
+ db.execute(
474
+ `INSERT INTO "${MIGRATION_TABLE}" (description, content, passed, run_at) VALUES (?, ?, 1, ?)`,
475
+ [migrationId, sqlContent, now],
476
+ );
477
+ }
321
478
 
322
479
  db.commit();
323
480
  result.applied.push(file);
@@ -339,18 +496,88 @@ export async function migrate(
339
496
  }
340
497
 
341
498
  /**
342
- * Create a new empty SQL migration file with the next sequence number.
499
+ * Get migration status: which migrations are completed and which are pending.
343
500
  *
344
- * Matches the Python `create_migration(description, migration_folder)` API.
501
+ * @param adapter - A DatabaseAdapter instance (or omit to use the global adapter).
502
+ * @param options - Optional configuration.
503
+ * @returns Object with `completed` and `pending` arrays of filenames.
504
+ */
505
+ export async function status(
506
+ adapter?: DatabaseAdapter,
507
+ options?: { migrationsDir?: string },
508
+ ): Promise<MigrationStatus> {
509
+ const db = adapter ?? getAdapter();
510
+ const dir = resolve(options?.migrationsDir ?? "migrations");
511
+
512
+ const result: MigrationStatus = { completed: [], pending: [] };
513
+
514
+ if (!existsSync(dir)) {
515
+ return result;
516
+ }
517
+
518
+ // Ensure tracking table exists
519
+ if (!db.tableExists(MIGRATION_TABLE)) {
520
+ // No table means nothing has been run — all files are pending
521
+ const files = sortMigrationFiles(
522
+ readdirSync(dir).filter((f) => f.endsWith(".sql") && !f.endsWith(".down.sql")),
523
+ );
524
+ result.pending = files;
525
+ return result;
526
+ }
527
+
528
+ // Collect .sql files (exclude .down.sql)
529
+ const files = sortMigrationFiles(
530
+ readdirSync(dir).filter((f) => f.endsWith(".sql") && !f.endsWith(".down.sql")),
531
+ );
532
+
533
+ // Get all applied migration names from the DB
534
+ const appliedNames = new Set<string>();
535
+ try {
536
+ const rows = db.query<{ name: string }>(
537
+ `SELECT name FROM "${MIGRATION_TABLE}"`,
538
+ );
539
+ for (const row of rows) {
540
+ appliedNames.add(row.name);
541
+ }
542
+ } catch {
543
+ // Old schema with 'description' column
544
+ try {
545
+ const rows = db.query<{ description: string; passed: number }>(
546
+ `SELECT description, passed FROM "${MIGRATION_TABLE}" WHERE passed = 1`,
547
+ );
548
+ for (const row of rows) {
549
+ appliedNames.add(row.description);
550
+ }
551
+ } catch {
552
+ // No valid tracking — treat all as pending
553
+ }
554
+ }
555
+
556
+ for (const file of files) {
557
+ const migrationId = file.replace(/\.sql$/, "");
558
+ if (appliedNames.has(migrationId)) {
559
+ result.completed.push(file);
560
+ } else {
561
+ result.pending.push(file);
562
+ }
563
+ }
564
+
565
+ return result;
566
+ }
567
+
568
+ /**
569
+ * Create a new empty SQL migration file with a timestamp prefix.
570
+ *
571
+ * Creates BOTH the up migration (.sql) and the down migration (.down.sql).
345
572
  *
346
573
  * @param description - Human-readable description (used in filename).
347
574
  * @param options - Optional configuration.
348
- * @returns The absolute path to the created file.
575
+ * @returns Object with paths to the created up and down files.
349
576
  */
350
577
  export async function createMigration(
351
578
  description: string,
352
579
  options?: { migrationsDir?: string },
353
- ): Promise<string> {
580
+ ): Promise<{ upPath: string; downPath: string }> {
354
581
  const dir = resolve(options?.migrationsDir ?? "migrations");
355
582
 
356
583
  // Ensure directory exists
@@ -358,35 +585,33 @@ export async function createMigration(
358
585
  mkdirSync(dir, { recursive: true });
359
586
  }
360
587
 
361
- // Determine next sequence number
362
- const existing = existsSync(dir)
363
- ? readdirSync(dir)
364
- .filter((f) => f.endsWith(".sql") && !f.endsWith(".down.sql"))
365
- .sort()
366
- : [];
367
-
368
- let nextSeq = 1;
369
- if (existing.length > 0) {
370
- const last = existing[existing.length - 1];
371
- const match = last.match(/^(\d+)/);
372
- if (match) {
373
- nextSeq = parseInt(match[1], 10) + 1;
374
- }
375
- }
376
-
377
588
  // Sanitise description for filename
378
589
  const safeName = description
379
590
  .toLowerCase()
380
591
  .replace(/[^a-z0-9]+/g, "_")
381
592
  .replace(/^_|_$/g, "");
382
593
 
383
- const seqStr = String(nextSeq).padStart(6, "0");
384
- const fileName = `${seqStr}_${safeName}.sql`;
385
- const filePath = join(dir, fileName);
386
-
387
- const template = `-- Migration: ${description}\n-- Created: ${new Date().toISOString()}\n\n`;
388
-
389
- writeFileSync(filePath, template, "utf-8");
390
-
391
- return filePath;
594
+ // Use YYYYMMDDHHMMSS timestamp prefix
595
+ const now = new Date();
596
+ const timestamp = [
597
+ now.getFullYear(),
598
+ String(now.getMonth() + 1).padStart(2, "0"),
599
+ String(now.getDate()).padStart(2, "0"),
600
+ String(now.getHours()).padStart(2, "0"),
601
+ String(now.getMinutes()).padStart(2, "0"),
602
+ String(now.getSeconds()).padStart(2, "0"),
603
+ ].join("");
604
+
605
+ const upFileName = `${timestamp}_${safeName}.sql`;
606
+ const downFileName = `${timestamp}_${safeName}.down.sql`;
607
+ const upPath = join(dir, upFileName);
608
+ const downPath = join(dir, downFileName);
609
+
610
+ const upTemplate = `-- Migration: ${description}\n-- Created: ${now.toISOString()}\n\n`;
611
+ const downTemplate = `-- Rollback: ${description}\n-- Created: ${now.toISOString()}\n\n`;
612
+
613
+ writeFileSync(upPath, upTemplate, "utf-8");
614
+ writeFileSync(downPath, downTemplate, "utf-8");
615
+
616
+ return { upPath, downPath };
392
617
  }
@@ -39,6 +39,7 @@ export async function discoverModels(modelsDir: string): Promise<DiscoveredModel
39
39
  const definition: ModelDefinition = {
40
40
  tableName: ModelClass.tableName,
41
41
  fields: ModelClass.fields as Record<string, FieldDefinition>,
42
+ fieldMapping: ModelClass.fieldMapping as Record<string, string> | undefined,
42
43
  softDelete: ModelClass.softDelete ?? false,
43
44
  tableFilter: ModelClass.tableFilter,
44
45
  hasOne: ModelClass.hasOne as RelationshipDefinition[] | undefined,
@@ -21,6 +21,7 @@ export interface RelationshipDefinition {
21
21
  export interface ModelDefinition {
22
22
  tableName: string;
23
23
  fields: Record<string, FieldDefinition>;
24
+ fieldMapping?: Record<string, string>;
24
25
  softDelete?: boolean;
25
26
  tableFilter?: string;
26
27
  hasOne?: RelationshipDefinition[];