odac 1.4.0 → 1.4.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.agent/rules/memory.md +8 -0
- package/.github/workflows/release.yml +1 -1
- package/.releaserc.js +9 -2
- package/CHANGELOG.md +61 -0
- package/README.md +10 -0
- package/bin/odac.js +193 -2
- package/client/odac.js +32 -13
- package/docs/ai/skills/SKILL.md +4 -3
- package/docs/ai/skills/backend/authentication.md +7 -0
- package/docs/ai/skills/backend/config.md +7 -0
- package/docs/ai/skills/backend/controllers.md +7 -0
- package/docs/ai/skills/backend/cron.md +9 -2
- package/docs/ai/skills/backend/database.md +37 -2
- package/docs/ai/skills/backend/forms.md +112 -11
- package/docs/ai/skills/backend/ipc.md +7 -0
- package/docs/ai/skills/backend/mail.md +7 -0
- package/docs/ai/skills/backend/migrations.md +86 -0
- package/docs/ai/skills/backend/request_response.md +7 -0
- package/docs/ai/skills/backend/routing.md +7 -0
- package/docs/ai/skills/backend/storage.md +7 -0
- package/docs/ai/skills/backend/streaming.md +7 -0
- package/docs/ai/skills/backend/structure.md +8 -1
- package/docs/ai/skills/backend/translations.md +7 -0
- package/docs/ai/skills/backend/utilities.md +7 -0
- package/docs/ai/skills/backend/validation.md +138 -31
- package/docs/ai/skills/backend/views.md +7 -0
- package/docs/ai/skills/frontend/core.md +7 -0
- package/docs/ai/skills/frontend/forms.md +48 -13
- package/docs/ai/skills/frontend/navigation.md +7 -0
- package/docs/ai/skills/frontend/realtime.md +7 -0
- package/docs/backend/08-database/02-basics.md +49 -9
- package/docs/backend/08-database/04-migrations.md +259 -37
- package/package.json +1 -1
- package/src/Auth.js +82 -43
- package/src/Config.js +1 -1
- package/src/Database/ConnectionFactory.js +70 -0
- package/src/Database/Migration.js +1228 -0
- package/src/Database/nanoid.js +30 -0
- package/src/Database.js +157 -46
- package/src/Ipc.js +37 -0
- package/src/Odac.js +1 -1
- package/src/Route/Cron.js +11 -0
- package/src/Route.js +8 -0
- package/src/Server.js +77 -23
- package/src/Storage.js +15 -1
- package/src/Validator.js +22 -20
- package/template/schema/users.js +23 -0
- package/test/{Auth.test.js → Auth/check.test.js} +153 -6
- package/test/Client/data.test.js +91 -0
- package/test/Client/get.test.js +90 -0
- package/test/Client/storage.test.js +87 -0
- package/test/Client/token.test.js +82 -0
- package/test/Client/ws.test.js +86 -0
- package/test/Config/deepMerge.test.js +14 -0
- package/test/Config/init.test.js +66 -0
- package/test/Config/interpolate.test.js +35 -0
- package/test/Database/ConnectionFactory/buildConnectionConfig.test.js +13 -0
- package/test/Database/ConnectionFactory/buildConnections.test.js +31 -0
- package/test/Database/ConnectionFactory/resolveClient.test.js +12 -0
- package/test/Database/Migration/migrate_column.test.js +52 -0
- package/test/Database/Migration/migrate_files.test.js +70 -0
- package/test/Database/Migration/migrate_index.test.js +89 -0
- package/test/Database/Migration/migrate_nanoid.test.js +160 -0
- package/test/Database/Migration/migrate_seed.test.js +77 -0
- package/test/Database/Migration/migrate_table.test.js +88 -0
- package/test/Database/Migration/rollback.test.js +61 -0
- package/test/Database/Migration/snapshot.test.js +38 -0
- package/test/Database/Migration/status.test.js +41 -0
- package/test/Database/autoNanoid.test.js +215 -0
- package/test/Database/nanoid.test.js +19 -0
- package/test/Lang/constructor.test.js +25 -0
- package/test/Lang/get.test.js +65 -0
- package/test/Lang/set.test.js +49 -0
- package/test/Odac/init.test.js +42 -0
- package/test/Odac/instance.test.js +58 -0
- package/test/Route/{Middleware.test.js → Middleware/chaining.test.js} +5 -29
- package/test/Route/Middleware/use.test.js +35 -0
- package/test/{Route.test.js → Route/check.test.js} +4 -55
- package/test/Route/set.test.js +52 -0
- package/test/Route/ws.test.js +23 -0
- package/test/View/EarlyHints/cache.test.js +32 -0
- package/test/View/EarlyHints/extractFromHtml.test.js +143 -0
- package/test/View/EarlyHints/formatLinkHeader.test.js +33 -0
- package/test/View/EarlyHints/send.test.js +99 -0
- package/test/View/{Form.test.js → Form/generateFieldHtml.test.js} +2 -2
- package/test/View/constructor.test.js +22 -0
- package/test/View/print.test.js +19 -0
- package/test/WebSocket/Client/limits.test.js +55 -0
- package/test/WebSocket/Server/broadcast.test.js +33 -0
- package/test/WebSocket/Server/route.test.js +37 -0
- package/test/Client.test.js +0 -197
- package/test/Config.test.js +0 -112
- package/test/Lang.test.js +0 -92
- package/test/Odac.test.js +0 -88
- package/test/View/EarlyHints.test.js +0 -282
- package/test/WebSocket.test.js +0 -238
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
const fs = require('fs')
|
|
2
|
+
const os = require('os')
|
|
3
|
+
const Config = require('../../src/Config')
|
|
4
|
+
|
|
5
|
+
jest.mock('fs')
|
|
6
|
+
jest.mock('os')
|
|
7
|
+
|
|
8
|
+
describe('Config.init()', () => {
|
|
9
|
+
beforeEach(() => {
|
|
10
|
+
jest.clearAllMocks()
|
|
11
|
+
global.__dir = '/mock/project'
|
|
12
|
+
Config.system = undefined
|
|
13
|
+
Config.encrypt.key = 'odac'
|
|
14
|
+
})
|
|
15
|
+
|
|
16
|
+
it('should load system config from home directory', () => {
|
|
17
|
+
os.homedir.mockReturnValue('/home/user')
|
|
18
|
+
fs.readFileSync.mockImplementation(path => {
|
|
19
|
+
if (path === '/home/user/.odac/config.json') {
|
|
20
|
+
return JSON.stringify({deviceId: '123'})
|
|
21
|
+
}
|
|
22
|
+
return '{}'
|
|
23
|
+
})
|
|
24
|
+
fs.existsSync.mockReturnValue(false)
|
|
25
|
+
|
|
26
|
+
Config.init()
|
|
27
|
+
|
|
28
|
+
expect(Config.system).toEqual({deviceId: '123'})
|
|
29
|
+
expect(fs.readFileSync).toHaveBeenCalledWith('/home/user/.odac/config.json')
|
|
30
|
+
})
|
|
31
|
+
|
|
32
|
+
it('should load project config and merge it', () => {
|
|
33
|
+
os.homedir.mockReturnValue('/home/user')
|
|
34
|
+
fs.existsSync.mockImplementation(path => {
|
|
35
|
+
if (path === '/mock/project/odac.json') return true
|
|
36
|
+
return false
|
|
37
|
+
})
|
|
38
|
+
fs.readFileSync.mockImplementation(path => {
|
|
39
|
+
if (path === '/mock/project/odac.json') {
|
|
40
|
+
return JSON.stringify({encrypt: {key: 'secret'}})
|
|
41
|
+
}
|
|
42
|
+
return '{}'
|
|
43
|
+
})
|
|
44
|
+
|
|
45
|
+
Config.init()
|
|
46
|
+
|
|
47
|
+
// The key gets hashed in init(), so it won't be 'secret' anymore
|
|
48
|
+
expect(Config.encrypt.key).not.toBe('secret')
|
|
49
|
+
expect(Config.encrypt.key).toBeInstanceOf(Buffer)
|
|
50
|
+
})
|
|
51
|
+
|
|
52
|
+
it('should interpolate variables in config', () => {
|
|
53
|
+
process.env.TEST_VAR = 'env_value'
|
|
54
|
+
os.homedir.mockReturnValue('/home/user')
|
|
55
|
+
fs.existsSync.mockReturnValue(true)
|
|
56
|
+
fs.readFileSync.mockReturnValue(
|
|
57
|
+
JSON.stringify({
|
|
58
|
+
custom: 'value-${TEST_VAR}'
|
|
59
|
+
})
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
Config.init()
|
|
63
|
+
|
|
64
|
+
expect(Config.custom).toBe('value-env_value')
|
|
65
|
+
})
|
|
66
|
+
})
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
const Config = require('../../src/Config')
|
|
2
|
+
|
|
3
|
+
describe('Config._interpolate()', () => {
|
|
4
|
+
it('should replace ${VAR} with environment variables', () => {
|
|
5
|
+
process.env.FOO = 'bar'
|
|
6
|
+
const result = Config._interpolate('hello-${FOO}')
|
|
7
|
+
expect(result).toBe('hello-bar')
|
|
8
|
+
})
|
|
9
|
+
|
|
10
|
+
it('should replace ${VAR} when variable name includes hyphen', () => {
|
|
11
|
+
process.env['MY-VAR'] = 'hyphen-value'
|
|
12
|
+
const result = Config._interpolate('hello-${MY-VAR}')
|
|
13
|
+
expect(result).toBe('hello-hyphen-value')
|
|
14
|
+
delete process.env['MY-VAR']
|
|
15
|
+
})
|
|
16
|
+
|
|
17
|
+
it('should replace ${odac} with client path', () => {
|
|
18
|
+
// __dirname in Config.js is /.../src, so it replaces /src with /client
|
|
19
|
+
const result = Config._interpolate('path-${odac}')
|
|
20
|
+
expect(result).toMatch(/\/client$/)
|
|
21
|
+
})
|
|
22
|
+
|
|
23
|
+
it('should handle nested objects and arrays', () => {
|
|
24
|
+
process.env.VAR = 'x'
|
|
25
|
+
const obj = {
|
|
26
|
+
a: ['${VAR}'],
|
|
27
|
+
b: {c: '${VAR}'}
|
|
28
|
+
}
|
|
29
|
+
const result = Config._interpolate(obj)
|
|
30
|
+
expect(result).toEqual({
|
|
31
|
+
a: ['x'],
|
|
32
|
+
b: {c: 'x'}
|
|
33
|
+
})
|
|
34
|
+
})
|
|
35
|
+
})
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
const {buildConnectionConfig} = require('../../../src/Database/ConnectionFactory')
|
|
2
|
+
|
|
3
|
+
describe('ConnectionFactory.buildConnectionConfig()', () => {
|
|
4
|
+
it('should create sqlite filename config', () => {
|
|
5
|
+
const config = buildConnectionConfig({database: 'db.sqlite3'}, 'sqlite3')
|
|
6
|
+
expect(config).toEqual({filename: 'db.sqlite3'})
|
|
7
|
+
})
|
|
8
|
+
|
|
9
|
+
it('should create host based config for non-sqlite', () => {
|
|
10
|
+
const config = buildConnectionConfig({user: 'root', password: 'secret', database: 'app', port: 3306}, 'mysql2')
|
|
11
|
+
expect(config).toEqual({host: '127.0.0.1', user: 'root', password: 'secret', database: 'app', port: 3306})
|
|
12
|
+
})
|
|
13
|
+
})
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
const mockKnex = jest.fn()
|
|
2
|
+
jest.mock(
|
|
3
|
+
'knex',
|
|
4
|
+
() =>
|
|
5
|
+
(...args) =>
|
|
6
|
+
mockKnex(...args)
|
|
7
|
+
)
|
|
8
|
+
|
|
9
|
+
const {buildConnections} = require('../../../src/Database/ConnectionFactory')
|
|
10
|
+
|
|
11
|
+
describe('ConnectionFactory.buildConnections()', () => {
|
|
12
|
+
beforeEach(() => {
|
|
13
|
+
mockKnex.mockReset()
|
|
14
|
+
mockKnex.mockImplementation(options => ({options, raw: jest.fn()}))
|
|
15
|
+
})
|
|
16
|
+
|
|
17
|
+
it('should support single database config', () => {
|
|
18
|
+
const connections = buildConnections({type: 'mysql', user: 'root', database: 'app'})
|
|
19
|
+
expect(Object.keys(connections)).toEqual(['default'])
|
|
20
|
+
expect(mockKnex).toHaveBeenCalledTimes(1)
|
|
21
|
+
})
|
|
22
|
+
|
|
23
|
+
it('should support multi database config', () => {
|
|
24
|
+
const connections = buildConnections({
|
|
25
|
+
analytics: {type: 'postgres', user: 'u', database: 'a'},
|
|
26
|
+
default: {type: 'sqlite', filename: './dev.sqlite3'}
|
|
27
|
+
})
|
|
28
|
+
expect(Object.keys(connections).sort()).toEqual(['analytics', 'default'])
|
|
29
|
+
expect(mockKnex).toHaveBeenCalledTimes(2)
|
|
30
|
+
})
|
|
31
|
+
})
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
const {resolveClient} = require('../../../src/Database/ConnectionFactory')
|
|
2
|
+
|
|
3
|
+
describe('ConnectionFactory.resolveClient()', () => {
|
|
4
|
+
it('should map known database aliases', () => {
|
|
5
|
+
expect(resolveClient('postgres')).toBe('pg')
|
|
6
|
+
expect(resolveClient('postgresql')).toBe('pg')
|
|
7
|
+
expect(resolveClient('pg')).toBe('pg')
|
|
8
|
+
expect(resolveClient('sqlite')).toBe('sqlite3')
|
|
9
|
+
expect(resolveClient('sqlite3')).toBe('sqlite3')
|
|
10
|
+
expect(resolveClient('mysql')).toBe('mysql2')
|
|
11
|
+
})
|
|
12
|
+
})
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const path = require('node:path')
|
|
4
|
+
const fs = require('node:fs')
|
|
5
|
+
const os = require('node:os')
|
|
6
|
+
const knex = require('knex')
|
|
7
|
+
const Migration = require('../../../src/Database/Migration')
|
|
8
|
+
|
|
9
|
+
let db, tmpDir
|
|
10
|
+
|
|
11
|
+
beforeEach(async () => {
|
|
12
|
+
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'odac-migration-column-'))
|
|
13
|
+
db = knex({client: 'sqlite3', connection: {filename: ':memory:'}, useNullAsDefault: true})
|
|
14
|
+
Migration.init(tmpDir, {default: db})
|
|
15
|
+
})
|
|
16
|
+
|
|
17
|
+
afterEach(async () => {
|
|
18
|
+
await db.destroy()
|
|
19
|
+
fs.rmSync(tmpDir, {recursive: true, force: true})
|
|
20
|
+
})
|
|
21
|
+
|
|
22
|
+
function writeSchema(name, content) {
|
|
23
|
+
const dir = path.join(tmpDir, 'schema')
|
|
24
|
+
fs.mkdirSync(dir, {recursive: true})
|
|
25
|
+
fs.writeFileSync(path.join(dir, `${name}.js`), `module.exports = ${JSON.stringify(content, null, 2)}`)
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
describe('Migration.migrate() - Column Diff', () => {
|
|
29
|
+
it('should add a new column to an existing table', async () => {
|
|
30
|
+
writeSchema('posts', {columns: {id: {type: 'increments'}, title: {type: 'string'}}})
|
|
31
|
+
await Migration.migrate()
|
|
32
|
+
|
|
33
|
+
writeSchema('posts', {columns: {id: {type: 'increments'}, title: {type: 'string'}, body: {type: 'text', nullable: true}}})
|
|
34
|
+
const result = await Migration.migrate()
|
|
35
|
+
const addOps = result.default.schema.filter(op => op.type === 'add_column')
|
|
36
|
+
|
|
37
|
+
expect(addOps).toEqual(expect.arrayContaining([expect.objectContaining({type: 'add_column', column: 'body', table: 'posts'})]))
|
|
38
|
+
const info = await db('posts').columnInfo()
|
|
39
|
+
expect(info).toHaveProperty('body')
|
|
40
|
+
})
|
|
41
|
+
|
|
42
|
+
it('should drop a column removed from schema', async () => {
|
|
43
|
+
writeSchema('items', {columns: {id: {type: 'increments'}, name: {type: 'string'}, obsolete: {type: 'string'}}})
|
|
44
|
+
await Migration.migrate()
|
|
45
|
+
|
|
46
|
+
writeSchema('items', {columns: {id: {type: 'increments'}, name: {type: 'string'}}})
|
|
47
|
+
const result = await Migration.migrate()
|
|
48
|
+
const dropOps = result.default.schema.filter(op => op.type === 'drop_column')
|
|
49
|
+
|
|
50
|
+
expect(dropOps).toEqual(expect.arrayContaining([expect.objectContaining({type: 'drop_column', column: 'obsolete', table: 'items'})]))
|
|
51
|
+
})
|
|
52
|
+
})
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const path = require('node:path')
|
|
4
|
+
const fs = require('node:fs')
|
|
5
|
+
const os = require('node:os')
|
|
6
|
+
const knex = require('knex')
|
|
7
|
+
const Migration = require('../../../src/Database/Migration')
|
|
8
|
+
|
|
9
|
+
let db, tmpDir
|
|
10
|
+
|
|
11
|
+
beforeEach(async () => {
|
|
12
|
+
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'odac-migration-files-'))
|
|
13
|
+
db = knex({client: 'sqlite3', connection: {filename: ':memory:'}, useNullAsDefault: true})
|
|
14
|
+
Migration.init(tmpDir, {default: db})
|
|
15
|
+
})
|
|
16
|
+
|
|
17
|
+
afterEach(async () => {
|
|
18
|
+
await db.destroy()
|
|
19
|
+
fs.rmSync(tmpDir, {recursive: true, force: true})
|
|
20
|
+
})
|
|
21
|
+
|
|
22
|
+
function writeMigrationFile(name, upFn, downFn) {
|
|
23
|
+
const dir = path.join(tmpDir, 'migration')
|
|
24
|
+
fs.mkdirSync(dir, {recursive: true})
|
|
25
|
+
|
|
26
|
+
const content = `
|
|
27
|
+
module.exports = {
|
|
28
|
+
up: ${upFn.toString()},
|
|
29
|
+
down: ${downFn ? downFn.toString() : 'undefined'}
|
|
30
|
+
}
|
|
31
|
+
`
|
|
32
|
+
fs.writeFileSync(path.join(dir, name), content)
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
describe('Migration.migrate() - Migration Files', () => {
|
|
36
|
+
it('should run pending migration files in order', async () => {
|
|
37
|
+
await db.schema.createTable('counters', t => {
|
|
38
|
+
t.increments('id')
|
|
39
|
+
t.string('name')
|
|
40
|
+
t.integer('value').defaultTo(0)
|
|
41
|
+
})
|
|
42
|
+
|
|
43
|
+
writeMigrationFile('20260225_001_init.js', async db => {
|
|
44
|
+
await db('counters').insert({name: 'visits', value: 0})
|
|
45
|
+
})
|
|
46
|
+
writeMigrationFile('20260225_002_add.js', async db => {
|
|
47
|
+
await db('counters').insert({name: 'signups', value: 0})
|
|
48
|
+
})
|
|
49
|
+
|
|
50
|
+
const result = await Migration.migrate()
|
|
51
|
+
expect(result.default.files).toHaveLength(2)
|
|
52
|
+
|
|
53
|
+
const rows = await db('counters').select()
|
|
54
|
+
expect(rows).toHaveLength(2)
|
|
55
|
+
})
|
|
56
|
+
|
|
57
|
+
it('should not re-run already applied migration files', async () => {
|
|
58
|
+
await db.schema.createTable('data', t => {
|
|
59
|
+
t.increments('id')
|
|
60
|
+
t.string('value')
|
|
61
|
+
})
|
|
62
|
+
|
|
63
|
+
writeMigrationFile('20260225_001_insert.js', async db => {
|
|
64
|
+
await db('data').insert({value: 'test'})
|
|
65
|
+
})
|
|
66
|
+
await Migration.migrate()
|
|
67
|
+
const result2 = await Migration.migrate()
|
|
68
|
+
expect(result2.default.files).toHaveLength(0)
|
|
69
|
+
})
|
|
70
|
+
})
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const path = require('node:path')
|
|
4
|
+
const fs = require('node:fs')
|
|
5
|
+
const os = require('node:os')
|
|
6
|
+
const knex = require('knex')
|
|
7
|
+
const Migration = require('../../../src/Database/Migration')
|
|
8
|
+
|
|
9
|
+
let db, tmpDir
|
|
10
|
+
|
|
11
|
+
beforeEach(async () => {
|
|
12
|
+
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'odac-migration-index-'))
|
|
13
|
+
db = knex({client: 'sqlite3', connection: {filename: ':memory:'}, useNullAsDefault: true})
|
|
14
|
+
Migration.init(tmpDir, {default: db})
|
|
15
|
+
})
|
|
16
|
+
|
|
17
|
+
afterEach(async () => {
|
|
18
|
+
await db.destroy()
|
|
19
|
+
fs.rmSync(tmpDir, {recursive: true, force: true})
|
|
20
|
+
})
|
|
21
|
+
|
|
22
|
+
function writeSchema(name, content) {
|
|
23
|
+
const dir = path.join(tmpDir, 'schema')
|
|
24
|
+
fs.mkdirSync(dir, {recursive: true})
|
|
25
|
+
fs.writeFileSync(path.join(dir, `${name}.js`), `module.exports = ${JSON.stringify(content, null, 2)}`)
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
describe('Migration.migrate() - Index Diff', () => {
|
|
29
|
+
it('should add a new index to an existing table', async () => {
|
|
30
|
+
writeSchema('articles', {
|
|
31
|
+
columns: {id: {type: 'increments'}, slug: {type: 'string', length: 255}, status: {type: 'string', length: 50}},
|
|
32
|
+
indexes: []
|
|
33
|
+
})
|
|
34
|
+
await Migration.migrate()
|
|
35
|
+
|
|
36
|
+
writeSchema('articles', {
|
|
37
|
+
columns: {id: {type: 'increments'}, slug: {type: 'string', length: 255}, status: {type: 'string', length: 50}},
|
|
38
|
+
indexes: [{columns: ['slug'], unique: true}]
|
|
39
|
+
})
|
|
40
|
+
|
|
41
|
+
const result = await Migration.migrate()
|
|
42
|
+
const indexOps = result.default.schema.filter(op => op.type === 'add_index')
|
|
43
|
+
expect(indexOps).toHaveLength(1)
|
|
44
|
+
})
|
|
45
|
+
|
|
46
|
+
it('should drop an index removed from schema', async () => {
|
|
47
|
+
writeSchema('tags', {
|
|
48
|
+
columns: {id: {type: 'increments'}, name: {type: 'string', length: 100}},
|
|
49
|
+
indexes: [{columns: ['name'], unique: false}]
|
|
50
|
+
})
|
|
51
|
+
await Migration.migrate()
|
|
52
|
+
|
|
53
|
+
writeSchema('tags', {
|
|
54
|
+
columns: {id: {type: 'increments'}, name: {type: 'string', length: 100}},
|
|
55
|
+
indexes: []
|
|
56
|
+
})
|
|
57
|
+
|
|
58
|
+
const result = await Migration.migrate()
|
|
59
|
+
const dropIndexOps = result.default.schema.filter(op => op.type === 'drop_index')
|
|
60
|
+
expect(dropIndexOps.length).toBeGreaterThanOrEqual(1)
|
|
61
|
+
})
|
|
62
|
+
|
|
63
|
+
it('should normalize column-level unique into indexes and be idempotent', async () => {
|
|
64
|
+
writeSchema('apps', {
|
|
65
|
+
columns: {id: {type: 'increments'}, name: {type: 'string', length: 100, unique: true}},
|
|
66
|
+
indexes: []
|
|
67
|
+
})
|
|
68
|
+
|
|
69
|
+
await Migration.migrate()
|
|
70
|
+
const result2 = await Migration.migrate()
|
|
71
|
+
const indexOps2 = result2.default.schema.filter(op => op.type === 'add_index' || op.type === 'drop_index')
|
|
72
|
+
expect(indexOps2).toHaveLength(0)
|
|
73
|
+
})
|
|
74
|
+
|
|
75
|
+
it('should survive add_index when constraint already exists (idempotent)', async () => {
|
|
76
|
+
await db.schema.createTable('apps_ext', t => {
|
|
77
|
+
t.increments('id')
|
|
78
|
+
t.string('name', 100).notNullable()
|
|
79
|
+
t.unique(['name'])
|
|
80
|
+
})
|
|
81
|
+
|
|
82
|
+
writeSchema('apps_ext', {
|
|
83
|
+
columns: {id: {type: 'increments'}, name: {type: 'string', length: 100, nullable: false, unique: true}},
|
|
84
|
+
indexes: []
|
|
85
|
+
})
|
|
86
|
+
|
|
87
|
+
await expect(Migration.migrate()).resolves.toBeDefined()
|
|
88
|
+
})
|
|
89
|
+
})
|
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const path = require('node:path')
|
|
4
|
+
const fs = require('node:fs')
|
|
5
|
+
const os = require('node:os')
|
|
6
|
+
const knex = require('knex')
|
|
7
|
+
const Migration = require('../../../src/Database/Migration')
|
|
8
|
+
|
|
9
|
+
let db, tmpDir
|
|
10
|
+
|
|
11
|
+
beforeEach(async () => {
|
|
12
|
+
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'odac-migration-nanoid-'))
|
|
13
|
+
db = knex({client: 'sqlite3', connection: {filename: ':memory:'}, useNullAsDefault: true})
|
|
14
|
+
Migration.init(tmpDir, {default: db})
|
|
15
|
+
})
|
|
16
|
+
|
|
17
|
+
afterEach(async () => {
|
|
18
|
+
await db.destroy()
|
|
19
|
+
fs.rmSync(tmpDir, {recursive: true, force: true})
|
|
20
|
+
})
|
|
21
|
+
|
|
22
|
+
function writeSchema(name, content) {
|
|
23
|
+
const dir = path.join(tmpDir, 'schema')
|
|
24
|
+
fs.mkdirSync(dir, {recursive: true})
|
|
25
|
+
fs.writeFileSync(path.join(dir, `${name}.js`), `module.exports = ${JSON.stringify(content, null, 2)}`)
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
describe('Migration - NanoID Column Type', () => {
|
|
29
|
+
it('should create a nanoid column as string(21) by default', async () => {
|
|
30
|
+
writeSchema('tokens', {
|
|
31
|
+
columns: {
|
|
32
|
+
id: {type: 'nanoid', primary: true},
|
|
33
|
+
name: {type: 'string', length: 100}
|
|
34
|
+
}
|
|
35
|
+
})
|
|
36
|
+
|
|
37
|
+
await Migration.migrate()
|
|
38
|
+
const exists = await db.schema.hasTable('tokens')
|
|
39
|
+
expect(exists).toBe(true)
|
|
40
|
+
|
|
41
|
+
const info = await db('tokens').columnInfo()
|
|
42
|
+
expect(info).toHaveProperty('id')
|
|
43
|
+
expect(info.id.type).toBe('varchar')
|
|
44
|
+
expect(Number(info.id.maxLength)).toBe(21)
|
|
45
|
+
})
|
|
46
|
+
|
|
47
|
+
it('should support custom length for nanoid column', async () => {
|
|
48
|
+
writeSchema('short_ids', {
|
|
49
|
+
columns: {
|
|
50
|
+
id: {type: 'nanoid', length: 12, primary: true},
|
|
51
|
+
label: {type: 'string'}
|
|
52
|
+
}
|
|
53
|
+
})
|
|
54
|
+
|
|
55
|
+
await Migration.migrate()
|
|
56
|
+
const info = await db('short_ids').columnInfo()
|
|
57
|
+
expect(Number(info.id.maxLength)).toBe(12)
|
|
58
|
+
})
|
|
59
|
+
|
|
60
|
+
it('should auto-generate nanoid for seed data', async () => {
|
|
61
|
+
writeSchema('articles', {
|
|
62
|
+
columns: {
|
|
63
|
+
id: {type: 'nanoid', primary: true},
|
|
64
|
+
title: {type: 'string', length: 255},
|
|
65
|
+
slug: {type: 'string', length: 255}
|
|
66
|
+
},
|
|
67
|
+
seed: [{title: 'Hello World', slug: 'hello-world'}],
|
|
68
|
+
seedKey: 'slug'
|
|
69
|
+
})
|
|
70
|
+
|
|
71
|
+
await Migration.migrate()
|
|
72
|
+
const row = await db('articles').where('slug', 'hello-world').first()
|
|
73
|
+
|
|
74
|
+
expect(row).toBeDefined()
|
|
75
|
+
expect(row.id).toBeDefined()
|
|
76
|
+
expect(typeof row.id).toBe('string')
|
|
77
|
+
expect(row.id.length).toBe(21)
|
|
78
|
+
expect(row.id).toMatch(/^[a-zA-Z0-9]+$/)
|
|
79
|
+
})
|
|
80
|
+
|
|
81
|
+
it('should not overwrite nanoid if seed data provides it explicitly', async () => {
|
|
82
|
+
writeSchema('tags', {
|
|
83
|
+
columns: {
|
|
84
|
+
id: {type: 'nanoid', primary: true},
|
|
85
|
+
name: {type: 'string', length: 100}
|
|
86
|
+
},
|
|
87
|
+
seed: [{id: 'EXPLICIT_ID_12345678', name: 'featured'}],
|
|
88
|
+
seedKey: 'name'
|
|
89
|
+
})
|
|
90
|
+
|
|
91
|
+
await Migration.migrate()
|
|
92
|
+
const row = await db('tags').where('name', 'featured').first()
|
|
93
|
+
|
|
94
|
+
expect(row.id).toBe('EXPLICIT_ID_12345678')
|
|
95
|
+
})
|
|
96
|
+
|
|
97
|
+
it('should preserve nanoid value on subsequent seed runs (idempotent)', async () => {
|
|
98
|
+
writeSchema('categories', {
|
|
99
|
+
columns: {
|
|
100
|
+
id: {type: 'nanoid', primary: true},
|
|
101
|
+
name: {type: 'string', length: 100}
|
|
102
|
+
},
|
|
103
|
+
seed: [{name: 'tech'}],
|
|
104
|
+
seedKey: 'name'
|
|
105
|
+
})
|
|
106
|
+
|
|
107
|
+
await Migration.migrate()
|
|
108
|
+
const firstRow = await db('categories').where('name', 'tech').first()
|
|
109
|
+
const firstId = firstRow.id
|
|
110
|
+
|
|
111
|
+
// Run migrate again — should not change the ID
|
|
112
|
+
await Migration.migrate()
|
|
113
|
+
const secondRow = await db('categories').where('name', 'tech').first()
|
|
114
|
+
|
|
115
|
+
expect(secondRow.id).toBe(firstId)
|
|
116
|
+
})
|
|
117
|
+
|
|
118
|
+
it('should respect custom length in seed nanoid generation', async () => {
|
|
119
|
+
writeSchema('codes', {
|
|
120
|
+
columns: {
|
|
121
|
+
code: {type: 'nanoid', length: 8, primary: true},
|
|
122
|
+
label: {type: 'string', length: 50}
|
|
123
|
+
},
|
|
124
|
+
seed: [{label: 'discount-10'}],
|
|
125
|
+
seedKey: 'label'
|
|
126
|
+
})
|
|
127
|
+
|
|
128
|
+
await Migration.migrate()
|
|
129
|
+
const row = await db('codes').where('label', 'discount-10').first()
|
|
130
|
+
|
|
131
|
+
expect(row.code).toBeDefined()
|
|
132
|
+
expect(row.code.length).toBe(8)
|
|
133
|
+
})
|
|
134
|
+
|
|
135
|
+
it('should add nanoid column to existing table via diff', async () => {
|
|
136
|
+
writeSchema('events', {
|
|
137
|
+
columns: {
|
|
138
|
+
id: {type: 'increments'},
|
|
139
|
+
name: {type: 'string'}
|
|
140
|
+
}
|
|
141
|
+
})
|
|
142
|
+
await Migration.migrate()
|
|
143
|
+
|
|
144
|
+
writeSchema('events', {
|
|
145
|
+
columns: {
|
|
146
|
+
id: {type: 'increments'},
|
|
147
|
+
public_id: {type: 'nanoid', length: 16},
|
|
148
|
+
name: {type: 'string'}
|
|
149
|
+
}
|
|
150
|
+
})
|
|
151
|
+
|
|
152
|
+
const result = await Migration.migrate()
|
|
153
|
+
const addOps = result.default.schema.filter(op => op.type === 'add_column')
|
|
154
|
+
expect(addOps).toEqual(expect.arrayContaining([expect.objectContaining({type: 'add_column', column: 'public_id'})]))
|
|
155
|
+
|
|
156
|
+
const info = await db('events').columnInfo()
|
|
157
|
+
expect(info).toHaveProperty('public_id')
|
|
158
|
+
expect(Number(info.public_id.maxLength)).toBe(16)
|
|
159
|
+
})
|
|
160
|
+
})
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const path = require('node:path')
|
|
4
|
+
const fs = require('node:fs')
|
|
5
|
+
const os = require('node:os')
|
|
6
|
+
const knex = require('knex')
|
|
7
|
+
const Migration = require('../../../src/Database/Migration')
|
|
8
|
+
|
|
9
|
+
let db, tmpDir
|
|
10
|
+
|
|
11
|
+
beforeEach(async () => {
|
|
12
|
+
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'odac-migration-seed-'))
|
|
13
|
+
db = knex({client: 'sqlite3', connection: {filename: ':memory:'}, useNullAsDefault: true})
|
|
14
|
+
Migration.init(tmpDir, {default: db})
|
|
15
|
+
})
|
|
16
|
+
|
|
17
|
+
afterEach(async () => {
|
|
18
|
+
await db.destroy()
|
|
19
|
+
fs.rmSync(tmpDir, {recursive: true, force: true})
|
|
20
|
+
})
|
|
21
|
+
|
|
22
|
+
function writeSchema(name, content) {
|
|
23
|
+
const dir = path.join(tmpDir, 'schema')
|
|
24
|
+
fs.mkdirSync(dir, {recursive: true})
|
|
25
|
+
fs.writeFileSync(path.join(dir, `${name}.js`), `module.exports = ${JSON.stringify(content, null, 2)}`)
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
describe('Migration.migrate() - Seed Data', () => {
|
|
29
|
+
it('should insert seed data on first migrate', async () => {
|
|
30
|
+
writeSchema('roles', {
|
|
31
|
+
columns: {id: {type: 'increments'}, name: {type: 'string', length: 50}, level: {type: 'integer', default: 0}},
|
|
32
|
+
seed: [
|
|
33
|
+
{name: 'admin', level: 100},
|
|
34
|
+
{name: 'user', level: 1}
|
|
35
|
+
],
|
|
36
|
+
seedKey: 'name'
|
|
37
|
+
})
|
|
38
|
+
|
|
39
|
+
const result = await Migration.migrate()
|
|
40
|
+
expect(result.default.seeds).toHaveLength(2)
|
|
41
|
+
|
|
42
|
+
const rows = await db('roles').select()
|
|
43
|
+
expect(rows).toHaveLength(2)
|
|
44
|
+
})
|
|
45
|
+
|
|
46
|
+
it('should update seed data if values changed', async () => {
|
|
47
|
+
writeSchema('settings', {
|
|
48
|
+
columns: {id: {type: 'increments'}, key: {type: 'string', length: 100}, value: {type: 'string', length: 255}},
|
|
49
|
+
seed: [{key: 'site_name', value: 'My App'}],
|
|
50
|
+
seedKey: 'key'
|
|
51
|
+
})
|
|
52
|
+
|
|
53
|
+
await Migration.migrate()
|
|
54
|
+
|
|
55
|
+
writeSchema('settings', {
|
|
56
|
+
columns: {id: {type: 'increments'}, key: {type: 'string', length: 100}, value: {type: 'string', length: 255}},
|
|
57
|
+
seed: [{key: 'site_name', value: 'New App Name'}],
|
|
58
|
+
seedKey: 'key'
|
|
59
|
+
})
|
|
60
|
+
|
|
61
|
+
await Migration.migrate()
|
|
62
|
+
const row = await db('settings').where('key', 'site_name').first()
|
|
63
|
+
expect(row.value).toBe('New App Name')
|
|
64
|
+
})
|
|
65
|
+
|
|
66
|
+
it('should handle JSON/JSONB seeds without false positives', async () => {
|
|
67
|
+
writeSchema('apps_json', {
|
|
68
|
+
columns: {id: {type: 'increments'}, name: {type: 'string', length: 100}, config: {type: 'json'}},
|
|
69
|
+
seed: [{name: 'myapp', config: JSON.stringify({host: 'data'})}],
|
|
70
|
+
seedKey: 'name'
|
|
71
|
+
})
|
|
72
|
+
|
|
73
|
+
await Migration.migrate()
|
|
74
|
+
const result2 = await Migration.migrate()
|
|
75
|
+
expect(result2.default.seeds.filter(s => s.type === 'seed_update')).toHaveLength(0)
|
|
76
|
+
})
|
|
77
|
+
})
|