millas 0.2.19 → 0.2.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,7 @@
1
1
  'use strict';
2
2
 
3
3
  const { tableFromClass, modelNameToTable, isSnakeCase } = require('./utils');
4
+ const { indexName } = require('./operations/indexes');
4
5
 
5
6
  /**
6
7
  * ProjectState
@@ -29,7 +30,7 @@ class ProjectState {
29
30
 
30
31
  // ─── Mutation (called by operations during replay) ────────────────────────
31
32
 
32
- createModel(table, fields) {
33
+ createModel(table, fields, indexes = [], uniqueTogether = []) {
33
34
  if (this.models.has(table)) {
34
35
  throw new Error(`ProjectState: table "${table}" already exists`);
35
36
  }
@@ -37,13 +38,39 @@ class ProjectState {
37
38
  for (const [name, def] of Object.entries(fields)) {
38
39
  fieldMap.set(name, normaliseField(def));
39
40
  }
40
- this.models.set(table, { table, fields: fieldMap });
41
+ this.models.set(table, { table, fields: fieldMap, indexes: [...indexes], uniqueTogether: [...uniqueTogether] });
41
42
  }
42
43
 
43
44
  deleteModel(table) {
44
45
  this.models.delete(table);
45
46
  }
46
47
 
48
+ addIndex(table, index) {
49
+ const model = this._requireModel(table);
50
+ model.indexes = model.indexes || [];
51
+ model.indexes.push(index);
52
+ }
53
+
54
+ removeIndex(table, index) {
55
+ const model = this._requireModel(table);
56
+ model.indexes = (model.indexes || []).filter(
57
+ i => JSON.stringify(i) !== JSON.stringify(index)
58
+ );
59
+ }
60
+
61
+ renameIndex(table, oldName, newName) {
62
+ const model = this._requireModel(table);
63
+ model.indexes = (model.indexes || []).map(i => {
64
+ const iName = i.name || indexName(model.table, i.fields, i.unique);
65
+ return iName === oldName ? { ...i, name: newName } : i;
66
+ });
67
+ }
68
+
69
+ alterUniqueTogether(table, uniqueTogether) {
70
+ const model = this._requireModel(table);
71
+ model.uniqueTogether = uniqueTogether;
72
+ }
73
+
47
74
  addField(table, column, fieldDef) {
48
75
  const model = this._requireModel(table);
49
76
  if (model.fields.has(column)) {
@@ -91,9 +118,13 @@ class ProjectState {
91
118
  toSchema() {
92
119
  const schema = {};
93
120
  for (const [table, model] of this.models) {
94
- schema[table] = {};
121
+ schema[table] = {
122
+ fields: {},
123
+ indexes: model.indexes || [],
124
+ uniqueTogether: model.uniqueTogether || [],
125
+ };
95
126
  for (const [col, def] of model.fields) {
96
- schema[table][col] = { ...def };
127
+ schema[table].fields[col] = { ...def };
97
128
  }
98
129
  }
99
130
  return schema;
@@ -107,7 +138,12 @@ class ProjectState {
107
138
  for (const [col, def] of model.fields) {
108
139
  fieldMap.set(col, { ...def });
109
140
  }
110
- copy.models.set(table, { table, fields: fieldMap });
141
+ copy.models.set(table, {
142
+ table,
143
+ fields: fieldMap,
144
+ indexes: JSON.parse(JSON.stringify(model.indexes || [])),
145
+ uniqueTogether: JSON.parse(JSON.stringify(model.uniqueTogether || [])),
146
+ });
111
147
  }
112
148
  return copy;
113
149
  }
@@ -1,81 +1,49 @@
1
1
  'use strict';
2
2
 
3
- /**
4
- * column.js
5
- *
6
- * Knex column builder helpers shared across all field-level operations.
7
- *
8
- * Having these in one place means:
9
- * - The type → knex method mapping is never duplicated
10
- * - AlterField reuses the same logic as AddField, with `.alter()` appended
11
- * - FK constraint attachment is explicit and separated from column creation
12
- *
13
- * Exports:
14
- * applyColumn(t, name, def) — add a new column to a table builder
15
- * alterColumn(t, name, def) — modify an existing column (.alter())
16
- * attachFKConstraints(db, table, fields) — attach FK constraints via ALTER TABLE
17
- * after all tables in a migration exist
18
- */
19
-
20
3
  // ─── Core column builder ──────────────────────────────────────────────────────
21
4
 
22
- /**
23
- * Add a single column to a knex table builder.
24
- *
25
- * Handles all supported field types, nullability, uniqueness, defaults,
26
- * and inline FK constraints (references).
27
- *
28
- * Pass `{ ...def, references: null }` to suppress FK constraint creation
29
- * when deferring constraints to a later ALTER TABLE pass.
30
- *
31
- * @param {object} t — knex table builder (from createTable / table callback)
32
- * @param {string} name — column name
33
- * @param {object} def — normalised field definition from ProjectState.normaliseField()
34
- */
35
- function applyColumn(t, name, def) {
5
+ function applyColumn(t, name, def, tableName) {
36
6
  const col = _buildColumn(t, name, def);
37
- if (!col) return; // 'id' handled internally by _buildColumn
7
+ if (!col) return;
38
8
 
39
9
  _applyModifiers(col, def);
10
+
11
+ // Postgres enum: stored as text + separate CHECK constraint
12
+ if (def.type === 'enum' && def.enumValues?.length) {
13
+ const client = t.client?.config?.client || '';
14
+ if (client.includes('pg') || client.includes('postgres')) {
15
+ const values = def.enumValues.map(v => `'${v}'`).join(', ');
16
+ const constraintName = `${tableName || 'tbl'}_${name}_check`;
17
+ t.check(`"${name}" in (${values})`, [], constraintName);
18
+ }
19
+ }
40
20
  }
41
21
 
42
- /**
43
- * Modify an existing column in a knex alterTable builder.
44
- * Identical to applyColumn but appends `.alter()` — required by knex to
45
- * signal that this is a column modification, not a new column addition.
46
- *
47
- * Note: FK constraints are NOT altered here use attachFKConstraints()
48
- * to manage them separately. Most DBs require DROP CONSTRAINT + re-add
49
- * for FK changes, which is safer to do explicitly.
50
- *
51
- * @param {object} t knex table builder (from alterTable callback)
52
- * @param {string} name — column name
53
- * @param {object} def normalised field definition
54
- */
55
- function alterColumn(t, name, def) {
22
+ function alterColumn(t, name, def, tableName) {
23
+ const client = t.client?.config?.client || '';
24
+ const isPg = client.includes('pg') || client.includes('postgres');
25
+
26
+ if (isPg && def.type === 'enum' && def.enumValues?.length) {
27
+ // Postgres: ALTER COLUMN TYPE with inline CHECK is invalid.
28
+ // Drop old CHECK constraint, add new one.
29
+ const constraintName = `${tableName || 'tbl'}_${name}_check`;
30
+ const values = def.enumValues.map(v => `'${v}'`).join(', ');
31
+ try { t.dropChecks(constraintName); } catch {}
32
+ t.check(`"${name}" in (${values})`, [], constraintName);
33
+ if (def.nullable) t.setNullable(name);
34
+ else t.dropNullable(name);
35
+ return;
36
+ }
37
+
56
38
  const col = _buildColumn(t, name, def, { forAlter: true });
57
39
  if (!col) return;
58
40
 
59
- _applyModifiers(col, def, { skipFK: true }); // FKs not altered inline
41
+ _applyModifiers(col, def, { skipFK: true });
60
42
  col.alter();
61
43
  }
62
44
 
63
- /**
64
- * Attach FK constraints for a set of fields on a table.
65
- *
66
- * Called by MigrationRunner AFTER all tables in a migration have been
67
- * created — this guarantees all referenced tables exist.
68
- *
69
- * All FK columns for a given table are batched into a single ALTER TABLE
70
- * statement, not one per column.
71
- *
72
- * @param {import('knex').Knex} db
73
- * @param {string} table — table name
74
- * @param {object} fields — { columnName: normalisedDef, ... }
75
- */
76
45
  async function attachFKConstraints(db, table, fields) {
77
46
  const fkEntries = Object.entries(fields).filter(([, def]) => def.references);
78
-
79
47
  if (fkEntries.length === 0) return;
80
48
 
81
49
  await db.schema.alterTable(table, (t) => {
@@ -91,38 +59,27 @@ async function attachFKConstraints(db, table, fields) {
91
59
 
92
60
  // ─── Internal helpers ─────────────────────────────────────────────────────────
93
61
 
94
- /**
95
- * Build a knex column builder for a given field type.
96
- * Returns null for 'id' fields (handled by t.increments which returns void).
97
- *
98
- * @param {object} t
99
- * @param {string} name
100
- * @param {object} def
101
- * @param {object} [opts]
102
- * @param {boolean} [opts.forAlter] — if true, skip t.increments (can't alter PK)
103
- * @returns {object|null} knex column builder
104
- */
105
62
  function _buildColumn(t, name, def, opts = {}) {
106
63
  switch (def.type) {
107
64
  case 'id':
108
65
  if (!opts.forAlter) t.increments(name).primary();
109
- return null; // increments() doesn't return a chainable column builder
66
+ return null;
110
67
 
111
68
  case 'string':
69
+ case 'email':
70
+ case 'url':
71
+ case 'slug':
72
+ case 'ipAddress':
112
73
  return t.string(name, def.max || 255);
113
74
 
114
75
  case 'text':
115
76
  return t.text(name);
116
77
 
117
78
  case 'integer':
118
- return def.unsigned
119
- ? t.integer(name).unsigned()
120
- : t.integer(name);
79
+ return def.unsigned ? t.integer(name).unsigned() : t.integer(name);
121
80
 
122
81
  case 'bigInteger':
123
- return def.unsigned
124
- ? t.bigInteger(name).unsigned()
125
- : t.bigInteger(name);
82
+ return def.unsigned ? t.bigInteger(name).unsigned() : t.bigInteger(name);
126
83
 
127
84
  case 'float':
128
85
  return t.float(name);
@@ -142,43 +99,36 @@ function _buildColumn(t, name, def, opts = {}) {
142
99
  case 'timestamp':
143
100
  return t.timestamp(name, { useTz: false });
144
101
 
145
- case 'enum':
102
+ case 'enum': {
103
+ const client = t.client?.config?.client || '';
104
+ if (client.includes('pg') || client.includes('postgres')) {
105
+ // Store as text — CHECK constraint added separately in applyColumn
106
+ return t.text(name);
107
+ }
146
108
  return t.enu(name, def.enumValues || []);
109
+ }
147
110
 
148
111
  case 'uuid':
149
112
  return t.uuid(name);
150
113
 
151
114
  default:
152
- return t.string(name); // safe fallback
115
+ return t.string(name);
153
116
  }
154
117
  }
155
118
 
156
- /**
157
- * Apply nullability, uniqueness, default, and FK constraint modifiers
158
- * to an already-built knex column builder.
159
- *
160
- * @param {object} col — knex column builder
161
- * @param {object} def — normalised field def
162
- * @param {object} [opts]
163
- * @param {boolean} [opts.skipFK] — skip FK constraint (used by alterColumn)
164
- */
165
119
  function _applyModifiers(col, def, opts = {}) {
166
- // Nullability
167
120
  if (def.nullable) {
168
121
  col.nullable();
169
122
  } else if (def.type !== 'id') {
170
123
  col.notNullable();
171
124
  }
172
125
 
173
- // Uniqueness
174
126
  if (def.unique) col.unique();
175
127
 
176
- // Default value
177
128
  if (def.default !== null && def.default !== undefined) {
178
129
  col.defaultTo(def.default);
179
130
  }
180
131
 
181
- // Inline FK constraint — skipped when deferring to attachFKConstraints()
182
132
  if (!opts.skipFK && def.references) {
183
133
  const ref = def.references;
184
134
  col
@@ -188,4 +138,4 @@ function _applyModifiers(col, def, opts = {}) {
188
138
  }
189
139
  }
190
140
 
191
- module.exports = { applyColumn, alterColumn, attachFKConstraints };
141
+ module.exports = { applyColumn, alterColumn, attachFKConstraints };
@@ -53,7 +53,7 @@ class AddField extends BaseOperation {
53
53
  await this._safeBackfill(db, def);
54
54
  } else {
55
55
  await db.schema.table(this.table, (t) => {
56
- applyColumn(t, this.column, def);
56
+ applyColumn(t, this.column, def, this.table);
57
57
  });
58
58
  }
59
59
  }
@@ -94,7 +94,7 @@ class AddField extends BaseOperation {
94
94
 
95
95
  // Step 1: add as nullable
96
96
  await db.schema.table(this.table, (t) => {
97
- applyColumn(t, this.column, { ...def, nullable: true, default: null });
97
+ applyColumn(t, this.column, { ...def, nullable: true, default: null }, this.table);
98
98
  });
99
99
 
100
100
  // Step 2: backfill
@@ -115,7 +115,7 @@ class AddField extends BaseOperation {
115
115
 
116
116
  // Step 3: tighten to NOT NULL
117
117
  await db.schema.alterTable(this.table, (t) => {
118
- alterColumn(t, this.column, { ...def, nullable: false });
118
+ alterColumn(t, this.column, { ...def, nullable: false }, this.table);
119
119
  });
120
120
  }
121
121
  }
@@ -148,7 +148,7 @@ class RemoveField extends BaseOperation {
148
148
 
149
149
  async down(db) {
150
150
  await db.schema.table(this.table, (t) => {
151
- applyColumn(t, this.column, normaliseField(this.field));
151
+ applyColumn(t, this.column, normaliseField(this.field), this.table);
152
152
  });
153
153
  }
154
154
 
@@ -186,13 +186,13 @@ class AlterField extends BaseOperation {
186
186
 
187
187
  async up(db) {
188
188
  await db.schema.alterTable(this.table, (t) => {
189
- alterColumn(t, this.column, normaliseField(this.field));
189
+ alterColumn(t, this.column, normaliseField(this.field), this.table);
190
190
  });
191
191
  }
192
192
 
193
193
  async down(db) {
194
194
  await db.schema.alterTable(this.table, (t) => {
195
- alterColumn(t, this.column, normaliseField(this.previousField));
195
+ alterColumn(t, this.column, normaliseField(this.previousField), this.table);
196
196
  });
197
197
  }
198
198
 
@@ -22,34 +22,17 @@ const { applyColumn, alterColumn,
22
22
  const { CreateModel, DeleteModel, RenameModel } = require('./models');
23
23
  const { AddField, RemoveField,
24
24
  AlterField, RenameField } = require('./fields');
25
+ const { AddIndex, RemoveIndex,
26
+ AlterUniqueTogether, RenameIndex } = require('./indexes');
25
27
  const { RunSQL } = require('./special');
26
28
  const { deserialise, migrations, _tableFromName } = require('./registry');
27
29
 
28
30
  module.exports = {
29
- // Base
30
31
  BaseOperation,
31
-
32
- // Column helpers
33
- applyColumn,
34
- alterColumn,
35
- attachFKConstraints,
36
-
37
- // Table-level ops
38
- CreateModel,
39
- DeleteModel,
40
- RenameModel,
41
-
42
- // Field-level ops
43
- AddField,
44
- RemoveField,
45
- AlterField,
46
- RenameField,
47
-
48
- // Escape hatch
32
+ applyColumn, alterColumn, attachFKConstraints,
33
+ CreateModel, DeleteModel, RenameModel,
34
+ AddField, RemoveField, AlterField, RenameField,
35
+ AddIndex, RemoveIndex, AlterUniqueTogether, RenameIndex,
49
36
  RunSQL,
50
-
51
- // Registry
52
- deserialise,
53
- migrations,
54
- _tableFromName,
37
+ deserialise, migrations, _tableFromName,
55
38
  };
@@ -0,0 +1,197 @@
1
+ 'use strict';
2
+
3
+ const { BaseOperation } = require('./base');
4
+
5
+ // ─── helpers ──────────────────────────────────────────────────────────────────
6
+
7
+ function indexName(table, fields, unique = false) {
8
+ const fieldPart = fields.map(f => f.replace(/^-/, '')).join('_');
9
+ return `${table}_${fieldPart}_${unique ? 'unique' : 'index'}`;
10
+ }
11
+
12
+ function _applyIndex(t, table, idx) {
13
+ const { fields, name, unique } = idx;
14
+ const idxName = name || indexName(table, fields, unique);
15
+ // Separate plain fields from descending fields (prefixed with '-')
16
+ const columns = fields.map(f => f.replace(/^-/, ''));
17
+ const hasDesc = fields.some(f => f.startsWith('-'));
18
+
19
+ if (unique) {
20
+ t.unique(columns, { indexName: idxName });
21
+ } else if (hasDesc) {
22
+ // knex doesn't support per-column sort direction in t.index() —
23
+ // use raw for descending indexes
24
+ const colsSql = fields.map(f =>
25
+ f.startsWith('-') ? `\`${f.slice(1)}\` DESC` : `\`${f}\``
26
+ ).join(', ');
27
+ t.index(columns, idxName); // fallback — knex limitation
28
+ // Note: true DESC index requires raw SQL; knex wraps it as regular index
29
+ } else {
30
+ t.index(columns, idxName);
31
+ }
32
+ }
33
+
34
+ // ─── AddIndex ─────────────────────────────────────────────────────────────────
35
+
36
+ class AddIndex extends BaseOperation {
37
+ /**
38
+ * @param {string} table
39
+ * @param {object} index — { fields, name?, unique? }
40
+ */
41
+ constructor(table, index) {
42
+ super();
43
+ this.type = 'AddIndex';
44
+ this.table = table;
45
+ this.index = index;
46
+ }
47
+
48
+ applyState(state) {
49
+ state.addIndex(this.table, this.index);
50
+ }
51
+
52
+ async up(db) {
53
+ const { fields, name, unique } = this.index;
54
+ const idxName = name || indexName(this.table, fields, unique);
55
+ await db.schema.table(this.table, (t) => _applyIndex(t, this.table, this.index));
56
+ }
57
+
58
+ async down(db) {
59
+ const { fields, name, unique } = this.index;
60
+ const idxName = name || indexName(this.table, fields, unique);
61
+ const columns = fields.map(f => f.replace(/^-/, ''));
62
+ await db.schema.table(this.table, (t) => {
63
+ if (unique) t.dropUnique(columns, idxName);
64
+ else t.dropIndex(columns, idxName);
65
+ });
66
+ }
67
+
68
+ toJSON() {
69
+ return { type: 'AddIndex', table: this.table, index: this.index };
70
+ }
71
+ }
72
+
73
+ // ─── RemoveIndex ──────────────────────────────────────────────────────────────
74
+
75
+ class RemoveIndex extends BaseOperation {
76
+ constructor(table, index) {
77
+ super();
78
+ this.type = 'RemoveIndex';
79
+ this.table = table;
80
+ this.index = index;
81
+ }
82
+
83
+ applyState(state) {
84
+ state.removeIndex(this.table, this.index);
85
+ }
86
+
87
+ async up(db) {
88
+ const { fields, name, unique } = this.index;
89
+ const idxName = name || indexName(this.table, fields, unique);
90
+ const columns = fields.map(f => f.replace(/^-/, ''));
91
+ await db.schema.table(this.table, (t) => {
92
+ if (unique) t.dropUnique(columns, idxName);
93
+ else t.dropIndex(columns, idxName);
94
+ });
95
+ }
96
+
97
+ async down(db) {
98
+ await db.schema.table(this.table, (t) => _applyIndex(t, this.table, this.index));
99
+ }
100
+
101
+ toJSON() {
102
+ return { type: 'RemoveIndex', table: this.table, index: this.index };
103
+ }
104
+ }
105
+
106
+ // ─── AlterUniqueTogether ──────────────────────────────────────────────────────
107
+
108
+ class AlterUniqueTogether extends BaseOperation {
109
+ /**
110
+ * @param {string} table
111
+ * @param {string[][]} newUnique — new uniqueTogether sets
112
+ * @param {string[][]} oldUnique — previous uniqueTogether sets (for down())
113
+ */
114
+ constructor(table, newUnique, oldUnique = []) {
115
+ super();
116
+ this.type = 'AlterUniqueTogether';
117
+ this.table = table;
118
+ this.newUnique = newUnique;
119
+ this.oldUnique = oldUnique;
120
+ }
121
+
122
+ applyState(state) {
123
+ state.alterUniqueTogether(this.table, this.newUnique);
124
+ }
125
+
126
+ async up(db) {
127
+ await this._apply(db, this.oldUnique, this.newUnique);
128
+ }
129
+
130
+ async down(db) {
131
+ await this._apply(db, this.newUnique, this.oldUnique);
132
+ }
133
+
134
+ async _apply(db, remove, add) {
135
+ await db.schema.table(this.table, (t) => {
136
+ for (const fields of remove) {
137
+ const name = `${this.table}_${fields.join('_')}_unique`;
138
+ try { t.dropUnique(fields, name); } catch { /* already gone */ }
139
+ }
140
+ for (const fields of add) {
141
+ const name = `${this.table}_${fields.join('_')}_unique`;
142
+ t.unique(fields, { indexName: name });
143
+ }
144
+ });
145
+ }
146
+
147
+ toJSON() {
148
+ return {
149
+ type: 'AlterUniqueTogether',
150
+ table: this.table,
151
+ newUnique: this.newUnique,
152
+ oldUnique: this.oldUnique,
153
+ };
154
+ }
155
+ }
156
+
157
+ // ─── RenameIndex ─────────────────────────────────────────────────────────────
158
+
159
+ class RenameIndex extends BaseOperation {
160
+ constructor(table, oldName, newName) {
161
+ super();
162
+ this.type = 'RenameIndex';
163
+ this.table = table;
164
+ this.oldName = oldName;
165
+ this.newName = newName;
166
+ }
167
+
168
+ applyState(state) {
169
+ state.renameIndex(this.table, this.oldName, this.newName);
170
+ }
171
+
172
+ async up(db) {
173
+ // knex doesn't have renameIndex — drop and recreate
174
+ // The index definition is stored on the op for reconstruction
175
+ await db.schema.table(this.table, (t) => {
176
+ t.dropIndex([], this.oldName);
177
+ });
178
+ await db.schema.table(this.table, (t) => {
179
+ t.index(this.fields || [], this.newName);
180
+ });
181
+ }
182
+
183
+ async down(db) {
184
+ await db.schema.table(this.table, (t) => {
185
+ t.dropIndex([], this.newName);
186
+ });
187
+ await db.schema.table(this.table, (t) => {
188
+ t.index(this.fields || [], this.oldName);
189
+ });
190
+ }
191
+
192
+ toJSON() {
193
+ return { type: 'RenameIndex', table: this.table, oldName: this.oldName, newName: this.newName, fields: this.fields };
194
+ }
195
+ }
196
+
197
+ module.exports = { AddIndex, RemoveIndex, AlterUniqueTogether, RenameIndex, indexName, _applyIndex };
@@ -28,25 +28,30 @@ class CreateModel extends BaseOperation {
28
28
  /**
29
29
  * @param {string} table
30
30
  * @param {object} fields — { columnName: normalisedFieldDef }
31
+ * @param {Array} [indexes]
32
+ * @param {Array} [uniqueTogether]
31
33
  */
32
- constructor(table, fields) {
34
+ constructor(table, fields, indexes = [], uniqueTogether = []) {
33
35
  super();
34
- this.type = 'CreateModel';
35
- this.table = table;
36
- this.fields = fields;
36
+ this.type = 'CreateModel';
37
+ this.table = table;
38
+ this.fields = fields;
39
+ this.indexes = indexes;
40
+ this.uniqueTogether = uniqueTogether;
37
41
  }
38
42
 
39
43
  applyState(state) {
40
- state.createModel(this.table, this.fields);
44
+ state.createModel(this.table, this.fields, this.indexes, this.uniqueTogether);
41
45
  }
42
46
 
43
47
  // Standard up() — inline FKs. Safe when only one CreateModel in a migration.
44
48
  async up(db) {
45
49
  await db.schema.createTable(this.table, (t) => {
46
50
  for (const [name, def] of Object.entries(this.fields)) {
47
- applyColumn(t, name, normaliseField(def));
51
+ applyColumn(t, name, normaliseField(def), this.table);
48
52
  }
49
53
  });
54
+ await this._applyIndexes(db);
50
55
  }
51
56
 
52
57
  /**
@@ -56,9 +61,10 @@ class CreateModel extends BaseOperation {
56
61
  async upWithoutFKs(db) {
57
62
  await db.schema.createTable(this.table, (t) => {
58
63
  for (const [name, def] of Object.entries(this.fields)) {
59
- applyColumn(t, name, { ...normaliseField(def), references: null });
64
+ applyColumn(t, name, { ...normaliseField(def), references: null }, this.table);
60
65
  }
61
66
  });
67
+ await this._applyIndexes(db);
62
68
  }
63
69
 
64
70
  /**
@@ -76,8 +82,28 @@ class CreateModel extends BaseOperation {
76
82
  await db.schema.dropTableIfExists(this.table);
77
83
  }
78
84
 
85
+ async _applyIndexes(db) {
86
+ const { indexName } = require('./indexes');
87
+ const all = [
88
+ ...(this.indexes || []),
89
+ ];
90
+ const ut = this.uniqueTogether || [];
91
+ if (!all.length && !ut.length) return;
92
+ await db.schema.table(this.table, (t) => {
93
+ for (const idx of all) {
94
+ const name = idx.name || indexName(this.table, idx.fields, idx.unique);
95
+ if (idx.unique) t.unique(idx.fields, { indexName: name });
96
+ else t.index(idx.fields, name);
97
+ }
98
+ for (const fields of ut) {
99
+ const name = `${this.table}_${fields.join('_')}_unique`;
100
+ t.unique(fields, { indexName: name });
101
+ }
102
+ });
103
+ }
104
+
79
105
  toJSON() {
80
- return { type: 'CreateModel', table: this.table, fields: this.fields };
106
+ return { type: 'CreateModel', table: this.table, fields: this.fields, indexes: this.indexes, uniqueTogether: this.uniqueTogether };
81
107
  }
82
108
  }
83
109
 
@@ -108,7 +134,7 @@ class DeleteModel extends BaseOperation {
108
134
  async down(db) {
109
135
  await db.schema.createTable(this.table, (t) => {
110
136
  for (const [name, def] of Object.entries(this.fields)) {
111
- applyColumn(t, name, normaliseField(def));
137
+ applyColumn(t, name, normaliseField(def), this.table);
112
138
  }
113
139
  });
114
140
  }