odac 1.4.1 → 1.4.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.agent/rules/memory.md +5 -0
- package/.releaserc.js +9 -2
- package/CHANGELOG.md +64 -0
- package/README.md +1 -1
- package/bin/odac.js +3 -2
- package/client/odac.js +124 -28
- package/docs/ai/skills/backend/database.md +19 -0
- package/docs/ai/skills/backend/forms.md +107 -13
- package/docs/ai/skills/backend/migrations.md +8 -2
- package/docs/ai/skills/backend/validation.md +132 -32
- package/docs/ai/skills/frontend/forms.md +43 -15
- package/docs/backend/08-database/02-basics.md +49 -9
- package/docs/backend/08-database/04-migrations.md +1 -0
- package/package.json +1 -1
- package/src/Auth.js +15 -2
- package/src/Database/ConnectionFactory.js +1 -0
- package/src/Database/Migration.js +26 -1
- package/src/Database/nanoid.js +30 -0
- package/src/Database.js +122 -11
- package/src/Ipc.js +37 -0
- package/src/Odac.js +1 -1
- package/src/Route/Cron.js +11 -0
- package/src/Route.js +49 -30
- package/src/Server.js +77 -23
- package/src/Storage.js +15 -1
- package/src/Validator.js +22 -20
- package/test/{Auth.test.js → Auth/check.test.js} +91 -5
- package/test/Client/data.test.js +91 -0
- package/test/Client/get.test.js +90 -0
- package/test/Client/storage.test.js +87 -0
- package/test/Client/token.test.js +82 -0
- package/test/Client/ws.test.js +118 -0
- package/test/Config/deepMerge.test.js +14 -0
- package/test/Config/init.test.js +66 -0
- package/test/Config/interpolate.test.js +35 -0
- package/test/Database/ConnectionFactory/buildConnectionConfig.test.js +13 -0
- package/test/Database/ConnectionFactory/buildConnections.test.js +31 -0
- package/test/Database/ConnectionFactory/resolveClient.test.js +12 -0
- package/test/Database/Migration/migrate_column.test.js +52 -0
- package/test/Database/Migration/migrate_files.test.js +70 -0
- package/test/Database/Migration/migrate_index.test.js +89 -0
- package/test/Database/Migration/migrate_nanoid.test.js +160 -0
- package/test/Database/Migration/migrate_seed.test.js +77 -0
- package/test/Database/Migration/migrate_table.test.js +88 -0
- package/test/Database/Migration/rollback.test.js +61 -0
- package/test/Database/Migration/snapshot.test.js +38 -0
- package/test/Database/Migration/status.test.js +41 -0
- package/test/Database/autoNanoid.test.js +215 -0
- package/test/Database/nanoid.test.js +19 -0
- package/test/Lang/constructor.test.js +25 -0
- package/test/Lang/get.test.js +65 -0
- package/test/Lang/set.test.js +49 -0
- package/test/Odac/init.test.js +42 -0
- package/test/Odac/instance.test.js +58 -0
- package/test/Route/{Middleware.test.js → Middleware/chaining.test.js} +5 -29
- package/test/Route/Middleware/use.test.js +35 -0
- package/test/{Route.test.js → Route/check.test.js} +100 -50
- package/test/Route/set.test.js +52 -0
- package/test/Route/ws.test.js +23 -0
- package/test/View/EarlyHints/cache.test.js +32 -0
- package/test/View/EarlyHints/extractFromHtml.test.js +143 -0
- package/test/View/EarlyHints/formatLinkHeader.test.js +33 -0
- package/test/View/EarlyHints/send.test.js +99 -0
- package/test/View/{Form.test.js → Form/generateFieldHtml.test.js} +2 -2
- package/test/View/constructor.test.js +22 -0
- package/test/View/print.test.js +19 -0
- package/test/WebSocket/Client/limits.test.js +55 -0
- package/test/WebSocket/Server/broadcast.test.js +33 -0
- package/test/WebSocket/Server/route.test.js +37 -0
- package/test/Client.test.js +0 -197
- package/test/Config.test.js +0 -119
- package/test/Database/ConnectionFactory.test.js +0 -80
- package/test/Lang.test.js +0 -92
- package/test/Migration.test.js +0 -943
- package/test/Odac.test.js +0 -88
- package/test/View/EarlyHints.test.js +0 -282
- package/test/WebSocket.test.js +0 -238
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const path = require('node:path')
|
|
4
|
+
const fs = require('node:fs')
|
|
5
|
+
const os = require('node:os')
|
|
6
|
+
const knex = require('knex')
|
|
7
|
+
const Migration = require('../../../src/Database/Migration')
|
|
8
|
+
|
|
9
|
+
let db, tmpDir
|
|
10
|
+
|
|
11
|
+
beforeEach(async () => {
|
|
12
|
+
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'odac-migration-index-'))
|
|
13
|
+
db = knex({client: 'sqlite3', connection: {filename: ':memory:'}, useNullAsDefault: true})
|
|
14
|
+
Migration.init(tmpDir, {default: db})
|
|
15
|
+
})
|
|
16
|
+
|
|
17
|
+
afterEach(async () => {
|
|
18
|
+
await db.destroy()
|
|
19
|
+
fs.rmSync(tmpDir, {recursive: true, force: true})
|
|
20
|
+
})
|
|
21
|
+
|
|
22
|
+
function writeSchema(name, content) {
|
|
23
|
+
const dir = path.join(tmpDir, 'schema')
|
|
24
|
+
fs.mkdirSync(dir, {recursive: true})
|
|
25
|
+
fs.writeFileSync(path.join(dir, `${name}.js`), `module.exports = ${JSON.stringify(content, null, 2)}`)
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
describe('Migration.migrate() - Index Diff', () => {
|
|
29
|
+
it('should add a new index to an existing table', async () => {
|
|
30
|
+
writeSchema('articles', {
|
|
31
|
+
columns: {id: {type: 'increments'}, slug: {type: 'string', length: 255}, status: {type: 'string', length: 50}},
|
|
32
|
+
indexes: []
|
|
33
|
+
})
|
|
34
|
+
await Migration.migrate()
|
|
35
|
+
|
|
36
|
+
writeSchema('articles', {
|
|
37
|
+
columns: {id: {type: 'increments'}, slug: {type: 'string', length: 255}, status: {type: 'string', length: 50}},
|
|
38
|
+
indexes: [{columns: ['slug'], unique: true}]
|
|
39
|
+
})
|
|
40
|
+
|
|
41
|
+
const result = await Migration.migrate()
|
|
42
|
+
const indexOps = result.default.schema.filter(op => op.type === 'add_index')
|
|
43
|
+
expect(indexOps).toHaveLength(1)
|
|
44
|
+
})
|
|
45
|
+
|
|
46
|
+
it('should drop an index removed from schema', async () => {
|
|
47
|
+
writeSchema('tags', {
|
|
48
|
+
columns: {id: {type: 'increments'}, name: {type: 'string', length: 100}},
|
|
49
|
+
indexes: [{columns: ['name'], unique: false}]
|
|
50
|
+
})
|
|
51
|
+
await Migration.migrate()
|
|
52
|
+
|
|
53
|
+
writeSchema('tags', {
|
|
54
|
+
columns: {id: {type: 'increments'}, name: {type: 'string', length: 100}},
|
|
55
|
+
indexes: []
|
|
56
|
+
})
|
|
57
|
+
|
|
58
|
+
const result = await Migration.migrate()
|
|
59
|
+
const dropIndexOps = result.default.schema.filter(op => op.type === 'drop_index')
|
|
60
|
+
expect(dropIndexOps.length).toBeGreaterThanOrEqual(1)
|
|
61
|
+
})
|
|
62
|
+
|
|
63
|
+
it('should normalize column-level unique into indexes and be idempotent', async () => {
|
|
64
|
+
writeSchema('apps', {
|
|
65
|
+
columns: {id: {type: 'increments'}, name: {type: 'string', length: 100, unique: true}},
|
|
66
|
+
indexes: []
|
|
67
|
+
})
|
|
68
|
+
|
|
69
|
+
await Migration.migrate()
|
|
70
|
+
const result2 = await Migration.migrate()
|
|
71
|
+
const indexOps2 = result2.default.schema.filter(op => op.type === 'add_index' || op.type === 'drop_index')
|
|
72
|
+
expect(indexOps2).toHaveLength(0)
|
|
73
|
+
})
|
|
74
|
+
|
|
75
|
+
it('should survive add_index when constraint already exists (idempotent)', async () => {
|
|
76
|
+
await db.schema.createTable('apps_ext', t => {
|
|
77
|
+
t.increments('id')
|
|
78
|
+
t.string('name', 100).notNullable()
|
|
79
|
+
t.unique(['name'])
|
|
80
|
+
})
|
|
81
|
+
|
|
82
|
+
writeSchema('apps_ext', {
|
|
83
|
+
columns: {id: {type: 'increments'}, name: {type: 'string', length: 100, nullable: false, unique: true}},
|
|
84
|
+
indexes: []
|
|
85
|
+
})
|
|
86
|
+
|
|
87
|
+
await expect(Migration.migrate()).resolves.toBeDefined()
|
|
88
|
+
})
|
|
89
|
+
})
|
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const path = require('node:path')
|
|
4
|
+
const fs = require('node:fs')
|
|
5
|
+
const os = require('node:os')
|
|
6
|
+
const knex = require('knex')
|
|
7
|
+
const Migration = require('../../../src/Database/Migration')
|
|
8
|
+
|
|
9
|
+
let db, tmpDir
|
|
10
|
+
|
|
11
|
+
beforeEach(async () => {
|
|
12
|
+
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'odac-migration-nanoid-'))
|
|
13
|
+
db = knex({client: 'sqlite3', connection: {filename: ':memory:'}, useNullAsDefault: true})
|
|
14
|
+
Migration.init(tmpDir, {default: db})
|
|
15
|
+
})
|
|
16
|
+
|
|
17
|
+
afterEach(async () => {
|
|
18
|
+
await db.destroy()
|
|
19
|
+
fs.rmSync(tmpDir, {recursive: true, force: true})
|
|
20
|
+
})
|
|
21
|
+
|
|
22
|
+
function writeSchema(name, content) {
|
|
23
|
+
const dir = path.join(tmpDir, 'schema')
|
|
24
|
+
fs.mkdirSync(dir, {recursive: true})
|
|
25
|
+
fs.writeFileSync(path.join(dir, `${name}.js`), `module.exports = ${JSON.stringify(content, null, 2)}`)
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
describe('Migration - NanoID Column Type', () => {
|
|
29
|
+
it('should create a nanoid column as string(21) by default', async () => {
|
|
30
|
+
writeSchema('tokens', {
|
|
31
|
+
columns: {
|
|
32
|
+
id: {type: 'nanoid', primary: true},
|
|
33
|
+
name: {type: 'string', length: 100}
|
|
34
|
+
}
|
|
35
|
+
})
|
|
36
|
+
|
|
37
|
+
await Migration.migrate()
|
|
38
|
+
const exists = await db.schema.hasTable('tokens')
|
|
39
|
+
expect(exists).toBe(true)
|
|
40
|
+
|
|
41
|
+
const info = await db('tokens').columnInfo()
|
|
42
|
+
expect(info).toHaveProperty('id')
|
|
43
|
+
expect(info.id.type).toBe('varchar')
|
|
44
|
+
expect(Number(info.id.maxLength)).toBe(21)
|
|
45
|
+
})
|
|
46
|
+
|
|
47
|
+
it('should support custom length for nanoid column', async () => {
|
|
48
|
+
writeSchema('short_ids', {
|
|
49
|
+
columns: {
|
|
50
|
+
id: {type: 'nanoid', length: 12, primary: true},
|
|
51
|
+
label: {type: 'string'}
|
|
52
|
+
}
|
|
53
|
+
})
|
|
54
|
+
|
|
55
|
+
await Migration.migrate()
|
|
56
|
+
const info = await db('short_ids').columnInfo()
|
|
57
|
+
expect(Number(info.id.maxLength)).toBe(12)
|
|
58
|
+
})
|
|
59
|
+
|
|
60
|
+
it('should auto-generate nanoid for seed data', async () => {
|
|
61
|
+
writeSchema('articles', {
|
|
62
|
+
columns: {
|
|
63
|
+
id: {type: 'nanoid', primary: true},
|
|
64
|
+
title: {type: 'string', length: 255},
|
|
65
|
+
slug: {type: 'string', length: 255}
|
|
66
|
+
},
|
|
67
|
+
seed: [{title: 'Hello World', slug: 'hello-world'}],
|
|
68
|
+
seedKey: 'slug'
|
|
69
|
+
})
|
|
70
|
+
|
|
71
|
+
await Migration.migrate()
|
|
72
|
+
const row = await db('articles').where('slug', 'hello-world').first()
|
|
73
|
+
|
|
74
|
+
expect(row).toBeDefined()
|
|
75
|
+
expect(row.id).toBeDefined()
|
|
76
|
+
expect(typeof row.id).toBe('string')
|
|
77
|
+
expect(row.id.length).toBe(21)
|
|
78
|
+
expect(row.id).toMatch(/^[a-zA-Z0-9]+$/)
|
|
79
|
+
})
|
|
80
|
+
|
|
81
|
+
it('should not overwrite nanoid if seed data provides it explicitly', async () => {
|
|
82
|
+
writeSchema('tags', {
|
|
83
|
+
columns: {
|
|
84
|
+
id: {type: 'nanoid', primary: true},
|
|
85
|
+
name: {type: 'string', length: 100}
|
|
86
|
+
},
|
|
87
|
+
seed: [{id: 'EXPLICIT_ID_12345678', name: 'featured'}],
|
|
88
|
+
seedKey: 'name'
|
|
89
|
+
})
|
|
90
|
+
|
|
91
|
+
await Migration.migrate()
|
|
92
|
+
const row = await db('tags').where('name', 'featured').first()
|
|
93
|
+
|
|
94
|
+
expect(row.id).toBe('EXPLICIT_ID_12345678')
|
|
95
|
+
})
|
|
96
|
+
|
|
97
|
+
it('should preserve nanoid value on subsequent seed runs (idempotent)', async () => {
|
|
98
|
+
writeSchema('categories', {
|
|
99
|
+
columns: {
|
|
100
|
+
id: {type: 'nanoid', primary: true},
|
|
101
|
+
name: {type: 'string', length: 100}
|
|
102
|
+
},
|
|
103
|
+
seed: [{name: 'tech'}],
|
|
104
|
+
seedKey: 'name'
|
|
105
|
+
})
|
|
106
|
+
|
|
107
|
+
await Migration.migrate()
|
|
108
|
+
const firstRow = await db('categories').where('name', 'tech').first()
|
|
109
|
+
const firstId = firstRow.id
|
|
110
|
+
|
|
111
|
+
// Run migrate again — should not change the ID
|
|
112
|
+
await Migration.migrate()
|
|
113
|
+
const secondRow = await db('categories').where('name', 'tech').first()
|
|
114
|
+
|
|
115
|
+
expect(secondRow.id).toBe(firstId)
|
|
116
|
+
})
|
|
117
|
+
|
|
118
|
+
it('should respect custom length in seed nanoid generation', async () => {
|
|
119
|
+
writeSchema('codes', {
|
|
120
|
+
columns: {
|
|
121
|
+
code: {type: 'nanoid', length: 8, primary: true},
|
|
122
|
+
label: {type: 'string', length: 50}
|
|
123
|
+
},
|
|
124
|
+
seed: [{label: 'discount-10'}],
|
|
125
|
+
seedKey: 'label'
|
|
126
|
+
})
|
|
127
|
+
|
|
128
|
+
await Migration.migrate()
|
|
129
|
+
const row = await db('codes').where('label', 'discount-10').first()
|
|
130
|
+
|
|
131
|
+
expect(row.code).toBeDefined()
|
|
132
|
+
expect(row.code.length).toBe(8)
|
|
133
|
+
})
|
|
134
|
+
|
|
135
|
+
it('should add nanoid column to existing table via diff', async () => {
|
|
136
|
+
writeSchema('events', {
|
|
137
|
+
columns: {
|
|
138
|
+
id: {type: 'increments'},
|
|
139
|
+
name: {type: 'string'}
|
|
140
|
+
}
|
|
141
|
+
})
|
|
142
|
+
await Migration.migrate()
|
|
143
|
+
|
|
144
|
+
writeSchema('events', {
|
|
145
|
+
columns: {
|
|
146
|
+
id: {type: 'increments'},
|
|
147
|
+
public_id: {type: 'nanoid', length: 16},
|
|
148
|
+
name: {type: 'string'}
|
|
149
|
+
}
|
|
150
|
+
})
|
|
151
|
+
|
|
152
|
+
const result = await Migration.migrate()
|
|
153
|
+
const addOps = result.default.schema.filter(op => op.type === 'add_column')
|
|
154
|
+
expect(addOps).toEqual(expect.arrayContaining([expect.objectContaining({type: 'add_column', column: 'public_id'})]))
|
|
155
|
+
|
|
156
|
+
const info = await db('events').columnInfo()
|
|
157
|
+
expect(info).toHaveProperty('public_id')
|
|
158
|
+
expect(Number(info.public_id.maxLength)).toBe(16)
|
|
159
|
+
})
|
|
160
|
+
})
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const path = require('node:path')
|
|
4
|
+
const fs = require('node:fs')
|
|
5
|
+
const os = require('node:os')
|
|
6
|
+
const knex = require('knex')
|
|
7
|
+
const Migration = require('../../../src/Database/Migration')
|
|
8
|
+
|
|
9
|
+
let db, tmpDir
|
|
10
|
+
|
|
11
|
+
beforeEach(async () => {
|
|
12
|
+
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'odac-migration-seed-'))
|
|
13
|
+
db = knex({client: 'sqlite3', connection: {filename: ':memory:'}, useNullAsDefault: true})
|
|
14
|
+
Migration.init(tmpDir, {default: db})
|
|
15
|
+
})
|
|
16
|
+
|
|
17
|
+
afterEach(async () => {
|
|
18
|
+
await db.destroy()
|
|
19
|
+
fs.rmSync(tmpDir, {recursive: true, force: true})
|
|
20
|
+
})
|
|
21
|
+
|
|
22
|
+
function writeSchema(name, content) {
|
|
23
|
+
const dir = path.join(tmpDir, 'schema')
|
|
24
|
+
fs.mkdirSync(dir, {recursive: true})
|
|
25
|
+
fs.writeFileSync(path.join(dir, `${name}.js`), `module.exports = ${JSON.stringify(content, null, 2)}`)
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
describe('Migration.migrate() - Seed Data', () => {
|
|
29
|
+
it('should insert seed data on first migrate', async () => {
|
|
30
|
+
writeSchema('roles', {
|
|
31
|
+
columns: {id: {type: 'increments'}, name: {type: 'string', length: 50}, level: {type: 'integer', default: 0}},
|
|
32
|
+
seed: [
|
|
33
|
+
{name: 'admin', level: 100},
|
|
34
|
+
{name: 'user', level: 1}
|
|
35
|
+
],
|
|
36
|
+
seedKey: 'name'
|
|
37
|
+
})
|
|
38
|
+
|
|
39
|
+
const result = await Migration.migrate()
|
|
40
|
+
expect(result.default.seeds).toHaveLength(2)
|
|
41
|
+
|
|
42
|
+
const rows = await db('roles').select()
|
|
43
|
+
expect(rows).toHaveLength(2)
|
|
44
|
+
})
|
|
45
|
+
|
|
46
|
+
it('should update seed data if values changed', async () => {
|
|
47
|
+
writeSchema('settings', {
|
|
48
|
+
columns: {id: {type: 'increments'}, key: {type: 'string', length: 100}, value: {type: 'string', length: 255}},
|
|
49
|
+
seed: [{key: 'site_name', value: 'My App'}],
|
|
50
|
+
seedKey: 'key'
|
|
51
|
+
})
|
|
52
|
+
|
|
53
|
+
await Migration.migrate()
|
|
54
|
+
|
|
55
|
+
writeSchema('settings', {
|
|
56
|
+
columns: {id: {type: 'increments'}, key: {type: 'string', length: 100}, value: {type: 'string', length: 255}},
|
|
57
|
+
seed: [{key: 'site_name', value: 'New App Name'}],
|
|
58
|
+
seedKey: 'key'
|
|
59
|
+
})
|
|
60
|
+
|
|
61
|
+
await Migration.migrate()
|
|
62
|
+
const row = await db('settings').where('key', 'site_name').first()
|
|
63
|
+
expect(row.value).toBe('New App Name')
|
|
64
|
+
})
|
|
65
|
+
|
|
66
|
+
it('should handle JSON/JSONB seeds without false positives', async () => {
|
|
67
|
+
writeSchema('apps_json', {
|
|
68
|
+
columns: {id: {type: 'increments'}, name: {type: 'string', length: 100}, config: {type: 'json'}},
|
|
69
|
+
seed: [{name: 'myapp', config: JSON.stringify({host: 'data'})}],
|
|
70
|
+
seedKey: 'name'
|
|
71
|
+
})
|
|
72
|
+
|
|
73
|
+
await Migration.migrate()
|
|
74
|
+
const result2 = await Migration.migrate()
|
|
75
|
+
expect(result2.default.seeds.filter(s => s.type === 'seed_update')).toHaveLength(0)
|
|
76
|
+
})
|
|
77
|
+
})
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const path = require('node:path')
|
|
4
|
+
const fs = require('node:fs')
|
|
5
|
+
const os = require('node:os')
|
|
6
|
+
const knex = require('knex')
|
|
7
|
+
const Migration = require('../../../src/Database/Migration')
|
|
8
|
+
|
|
9
|
+
let db, tmpDir
|
|
10
|
+
|
|
11
|
+
beforeEach(async () => {
|
|
12
|
+
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'odac-migration-table-'))
|
|
13
|
+
db = knex({client: 'sqlite3', connection: {filename: ':memory:'}, useNullAsDefault: true})
|
|
14
|
+
Migration.init(tmpDir, {default: db})
|
|
15
|
+
})
|
|
16
|
+
|
|
17
|
+
afterEach(async () => {
|
|
18
|
+
await db.destroy()
|
|
19
|
+
fs.rmSync(tmpDir, {recursive: true, force: true})
|
|
20
|
+
})
|
|
21
|
+
|
|
22
|
+
function writeSchema(name, content) {
|
|
23
|
+
const dir = path.join(tmpDir, 'schema')
|
|
24
|
+
fs.mkdirSync(dir, {recursive: true})
|
|
25
|
+
fs.writeFileSync(path.join(dir, `${name}.js`), `module.exports = ${JSON.stringify(content, null, 2)}`)
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
describe('Migration.migrate() - Table Creation', () => {
|
|
29
|
+
it('should create a new table from a schema file', async () => {
|
|
30
|
+
writeSchema('products', {
|
|
31
|
+
columns: {
|
|
32
|
+
id: {type: 'increments'},
|
|
33
|
+
name: {type: 'string', length: 100, nullable: false},
|
|
34
|
+
price: {type: 'decimal', precision: 10, scale: 2},
|
|
35
|
+
is_active: {type: 'boolean', default: true}
|
|
36
|
+
}
|
|
37
|
+
})
|
|
38
|
+
|
|
39
|
+
await Migration.migrate()
|
|
40
|
+
const exists = await db.schema.hasTable('products')
|
|
41
|
+
expect(exists).toBe(true)
|
|
42
|
+
|
|
43
|
+
const info = await db('products').columnInfo()
|
|
44
|
+
expect(info).toHaveProperty('id')
|
|
45
|
+
expect(info).toHaveProperty('name')
|
|
46
|
+
})
|
|
47
|
+
|
|
48
|
+
it('should create a table with timestamps virtual type', async () => {
|
|
49
|
+
writeSchema('logs', {
|
|
50
|
+
columns: {
|
|
51
|
+
id: {type: 'increments'},
|
|
52
|
+
message: {type: 'text'},
|
|
53
|
+
timestamps: {type: 'timestamps'}
|
|
54
|
+
}
|
|
55
|
+
})
|
|
56
|
+
|
|
57
|
+
await Migration.migrate()
|
|
58
|
+
const info = await db('logs').columnInfo()
|
|
59
|
+
expect(info).toHaveProperty('created_at')
|
|
60
|
+
expect(info).toHaveProperty('updated_at')
|
|
61
|
+
})
|
|
62
|
+
|
|
63
|
+
it('should create a table with indexes', async () => {
|
|
64
|
+
writeSchema('users', {
|
|
65
|
+
columns: {
|
|
66
|
+
id: {type: 'increments'},
|
|
67
|
+
email: {type: 'string', length: 255, nullable: false},
|
|
68
|
+
role: {type: 'string', length: 50}
|
|
69
|
+
},
|
|
70
|
+
indexes: [{columns: ['email'], unique: true}, {columns: ['role']}]
|
|
71
|
+
})
|
|
72
|
+
|
|
73
|
+
await Migration.migrate()
|
|
74
|
+
const exists = await db.schema.hasTable('users')
|
|
75
|
+
expect(exists).toBe(true)
|
|
76
|
+
})
|
|
77
|
+
|
|
78
|
+
it('should skip creation if table already exists', async () => {
|
|
79
|
+
writeSchema('existing', {
|
|
80
|
+
columns: {id: {type: 'increments'}, name: {type: 'string'}}
|
|
81
|
+
})
|
|
82
|
+
|
|
83
|
+
await Migration.migrate()
|
|
84
|
+
const result2 = await Migration.migrate()
|
|
85
|
+
const createOps = result2.default.schema.filter(op => op.type === 'create_table')
|
|
86
|
+
expect(createOps).toHaveLength(0)
|
|
87
|
+
})
|
|
88
|
+
})
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const path = require('node:path')
|
|
4
|
+
const fs = require('node:fs')
|
|
5
|
+
const os = require('node:os')
|
|
6
|
+
const knex = require('knex')
|
|
7
|
+
const Migration = require('../../../src/Database/Migration')
|
|
8
|
+
|
|
9
|
+
let db, tmpDir
|
|
10
|
+
|
|
11
|
+
beforeEach(async () => {
|
|
12
|
+
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'odac-migration-rollback-'))
|
|
13
|
+
db = knex({client: 'sqlite3', connection: {filename: ':memory:'}, useNullAsDefault: true})
|
|
14
|
+
Migration.init(tmpDir, {default: db})
|
|
15
|
+
})
|
|
16
|
+
|
|
17
|
+
afterEach(async () => {
|
|
18
|
+
await db.destroy()
|
|
19
|
+
fs.rmSync(tmpDir, {recursive: true, force: true})
|
|
20
|
+
})
|
|
21
|
+
|
|
22
|
+
function writeMigrationFile(name, upFn, downFn) {
|
|
23
|
+
const dir = path.join(tmpDir, 'migration')
|
|
24
|
+
fs.mkdirSync(dir, {recursive: true})
|
|
25
|
+
|
|
26
|
+
const content = `
|
|
27
|
+
module.exports = {
|
|
28
|
+
up: ${upFn.toString()},
|
|
29
|
+
down: ${downFn ? downFn.toString() : 'undefined'}
|
|
30
|
+
}
|
|
31
|
+
`
|
|
32
|
+
fs.writeFileSync(path.join(dir, name), content)
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
describe('Migration.rollback()', () => {
|
|
36
|
+
it('should rollback the last batch', async () => {
|
|
37
|
+
await db.schema.createTable('entries', t => {
|
|
38
|
+
t.increments('id')
|
|
39
|
+
t.string('name')
|
|
40
|
+
})
|
|
41
|
+
|
|
42
|
+
writeMigrationFile(
|
|
43
|
+
'20260225_001_add_entry.js',
|
|
44
|
+
async db => {
|
|
45
|
+
await db('entries').insert({name: 'first'})
|
|
46
|
+
},
|
|
47
|
+
async db => {
|
|
48
|
+
await db('entries').where('name', 'first').del()
|
|
49
|
+
}
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
await Migration.migrate()
|
|
53
|
+
const result = await Migration.rollback()
|
|
54
|
+
expect(result.default).toEqual(
|
|
55
|
+
expect.arrayContaining([expect.objectContaining({type: 'rolled_back', name: '20260225_001_add_entry.js'})])
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
const rows = await db('entries').select()
|
|
59
|
+
expect(rows).toHaveLength(0)
|
|
60
|
+
})
|
|
61
|
+
})
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const path = require('node:path')
|
|
4
|
+
const fs = require('node:fs')
|
|
5
|
+
const os = require('node:os')
|
|
6
|
+
const knex = require('knex')
|
|
7
|
+
const Migration = require('../../../src/Database/Migration')
|
|
8
|
+
|
|
9
|
+
let db, tmpDir
|
|
10
|
+
|
|
11
|
+
beforeEach(async () => {
|
|
12
|
+
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'odac-migration-snapshot-'))
|
|
13
|
+
db = knex({client: 'sqlite3', connection: {filename: ':memory:'}, useNullAsDefault: true})
|
|
14
|
+
Migration.init(tmpDir, {default: db})
|
|
15
|
+
})
|
|
16
|
+
|
|
17
|
+
afterEach(async () => {
|
|
18
|
+
await db.destroy()
|
|
19
|
+
fs.rmSync(tmpDir, {recursive: true, force: true})
|
|
20
|
+
})
|
|
21
|
+
|
|
22
|
+
describe('Migration.snapshot()', () => {
|
|
23
|
+
it('should reverse-engineer existing tables into schema files', async () => {
|
|
24
|
+
await db.schema.createTable('customers', t => {
|
|
25
|
+
t.increments('id')
|
|
26
|
+
t.string('name', 100)
|
|
27
|
+
t.boolean('vip').defaultTo(false)
|
|
28
|
+
})
|
|
29
|
+
|
|
30
|
+
const result = await Migration.snapshot()
|
|
31
|
+
const files = result.default
|
|
32
|
+
|
|
33
|
+
expect(files.length).toBeGreaterThanOrEqual(1)
|
|
34
|
+
const customerFile = files.find(f => f.includes('customers'))
|
|
35
|
+
expect(customerFile).toBeDefined()
|
|
36
|
+
expect(fs.existsSync(customerFile)).toBe(true)
|
|
37
|
+
})
|
|
38
|
+
})
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const path = require('node:path')
|
|
4
|
+
const fs = require('node:fs')
|
|
5
|
+
const os = require('node:os')
|
|
6
|
+
const knex = require('knex')
|
|
7
|
+
const Migration = require('../../../src/Database/Migration')
|
|
8
|
+
|
|
9
|
+
let db, tmpDir
|
|
10
|
+
|
|
11
|
+
beforeEach(async () => {
|
|
12
|
+
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'odac-migration-status-'))
|
|
13
|
+
db = knex({client: 'sqlite3', connection: {filename: ':memory:'}, useNullAsDefault: true})
|
|
14
|
+
Migration.init(tmpDir, {default: db})
|
|
15
|
+
})
|
|
16
|
+
|
|
17
|
+
afterEach(async () => {
|
|
18
|
+
await db.destroy()
|
|
19
|
+
fs.rmSync(tmpDir, {recursive: true, force: true})
|
|
20
|
+
})
|
|
21
|
+
|
|
22
|
+
function writeSchema(name, content) {
|
|
23
|
+
const dir = path.join(tmpDir, 'schema')
|
|
24
|
+
fs.mkdirSync(dir, {recursive: true})
|
|
25
|
+
fs.writeFileSync(path.join(dir, `${name}.js`), `module.exports = ${JSON.stringify(content, null, 2)}`)
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
describe('Migration.status()', () => {
|
|
29
|
+
it('should show pending changes without applying them (dry run)', async () => {
|
|
30
|
+
writeSchema('preview', {
|
|
31
|
+
columns: {id: {type: 'increments'}, name: {type: 'string'}},
|
|
32
|
+
indexes: []
|
|
33
|
+
})
|
|
34
|
+
|
|
35
|
+
const result = await Migration.status()
|
|
36
|
+
expect(result.default.schema).toEqual(expect.arrayContaining([expect.objectContaining({type: 'create_table', table: 'preview'})]))
|
|
37
|
+
|
|
38
|
+
const exists = await db.schema.hasTable('preview')
|
|
39
|
+
expect(exists).toBe(false)
|
|
40
|
+
})
|
|
41
|
+
})
|