@sqldoc/cli 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +37 -0
- package/src/__tests__/binary-entry.test.ts +19 -0
- package/src/__tests__/codegen.test.ts +103 -0
- package/src/__tests__/destructive.test.ts +132 -0
- package/src/__tests__/migration-formats.test.ts +480 -0
- package/src/__tests__/migrations.test.ts +129 -0
- package/src/__tests__/pretty-changes.test.ts +153 -0
- package/src/__tests__/rename-detection.test.ts +142 -0
- package/src/__tests__/validate.test.ts +110 -0
- package/src/commands/codegen.ts +127 -0
- package/src/commands/doctor.ts +175 -0
- package/src/commands/lint.ts +102 -0
- package/src/commands/migrate.ts +345 -0
- package/src/commands/schema.ts +329 -0
- package/src/commands/validate.ts +100 -0
- package/src/errors.ts +24 -0
- package/src/index.ts +103 -0
- package/src/runtime.ts +17 -0
- package/src/utils/auto-install.ts +116 -0
- package/src/utils/destructive.ts +99 -0
- package/src/utils/discover.ts +35 -0
- package/src/utils/format.ts +23 -0
- package/src/utils/generate-config-types.ts +73 -0
- package/src/utils/migration-formats.ts +347 -0
- package/src/utils/migrations.ts +74 -0
- package/src/utils/pipeline.ts +194 -0
- package/src/utils/pretty-changes.ts +149 -0
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
import type { AtlasChange } from '@sqldoc/atlas'
|
|
2
|
+
import { describe, expect, it } from 'vitest'
|
|
3
|
+
import { renderChanges } from '../utils/pretty-changes.ts'
|
|
4
|
+
|
|
5
|
+
// Strip ANSI escape sequences for assertion
|
|
6
|
+
function stripAnsi(s: string): string {
|
|
7
|
+
// biome-ignore lint/suspicious/noControlCharactersInRegex: matching ANSI escape sequences
|
|
8
|
+
return s.replace(/\x1b\[[0-9;]*m/g, '')
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
describe('renderChanges', () => {
|
|
12
|
+
it('returns empty array for no changes', () => {
|
|
13
|
+
expect(renderChanges([])).toEqual([])
|
|
14
|
+
})
|
|
15
|
+
|
|
16
|
+
it('renders add_table', () => {
|
|
17
|
+
const changes: AtlasChange[] = [{ type: 'add_table', table: 'users' }]
|
|
18
|
+
const lines = renderChanges(changes).map(stripAnsi)
|
|
19
|
+
expect(lines).toHaveLength(1)
|
|
20
|
+
expect(lines[0]).toBe(' + users (new table)')
|
|
21
|
+
})
|
|
22
|
+
|
|
23
|
+
it('renders drop_table', () => {
|
|
24
|
+
const changes: AtlasChange[] = [{ type: 'drop_table', table: 'old_logs' }]
|
|
25
|
+
const lines = renderChanges(changes).map(stripAnsi)
|
|
26
|
+
expect(lines).toHaveLength(1)
|
|
27
|
+
expect(lines[0]).toBe(' - old_logs (dropped)')
|
|
28
|
+
})
|
|
29
|
+
|
|
30
|
+
it('renders rename_table', () => {
|
|
31
|
+
const changes: AtlasChange[] = [{ type: 'rename_table', table: 'accounts', detail: 'users -> accounts' }]
|
|
32
|
+
const lines = renderChanges(changes).map(stripAnsi)
|
|
33
|
+
expect(lines).toHaveLength(1)
|
|
34
|
+
expect(lines[0]).toBe(' ~ accounts (renamed from users)')
|
|
35
|
+
})
|
|
36
|
+
|
|
37
|
+
it('renders add_view', () => {
|
|
38
|
+
const changes: AtlasChange[] = [{ type: 'add_view', table: 'active_posts' }]
|
|
39
|
+
const lines = renderChanges(changes).map(stripAnsi)
|
|
40
|
+
expect(lines).toHaveLength(1)
|
|
41
|
+
expect(lines[0]).toBe(' + active_posts (view)')
|
|
42
|
+
})
|
|
43
|
+
|
|
44
|
+
it('renders drop_view', () => {
|
|
45
|
+
const changes: AtlasChange[] = [{ type: 'drop_view', table: 'old_view' }]
|
|
46
|
+
const lines = renderChanges(changes).map(stripAnsi)
|
|
47
|
+
expect(lines).toHaveLength(1)
|
|
48
|
+
expect(lines[0]).toBe(' - old_view (view dropped)')
|
|
49
|
+
})
|
|
50
|
+
|
|
51
|
+
it('renders add_function', () => {
|
|
52
|
+
const changes: AtlasChange[] = [{ type: 'add_function', table: 'calculate_total' }]
|
|
53
|
+
const lines = renderChanges(changes).map(stripAnsi)
|
|
54
|
+
expect(lines).toHaveLength(1)
|
|
55
|
+
expect(lines[0]).toBe(' + calculate_total (function)')
|
|
56
|
+
})
|
|
57
|
+
|
|
58
|
+
it('renders drop_function', () => {
|
|
59
|
+
const changes: AtlasChange[] = [{ type: 'drop_function', table: 'old_func' }]
|
|
60
|
+
const lines = renderChanges(changes).map(stripAnsi)
|
|
61
|
+
expect(lines).toHaveLength(1)
|
|
62
|
+
expect(lines[0]).toBe(' - old_func (function dropped)')
|
|
63
|
+
})
|
|
64
|
+
|
|
65
|
+
it('renders column changes nested under table', () => {
|
|
66
|
+
const changes: AtlasChange[] = [
|
|
67
|
+
{ type: 'add_column', table: 'users', name: 'age', detail: 'integer' },
|
|
68
|
+
{ type: 'drop_column', table: 'users', name: 'legacy_id' },
|
|
69
|
+
]
|
|
70
|
+
const lines = renderChanges(changes).map(stripAnsi)
|
|
71
|
+
expect(lines).toHaveLength(3)
|
|
72
|
+
expect(lines[0]).toBe(' users') // table header
|
|
73
|
+
expect(lines[1]).toBe(' + age (integer)')
|
|
74
|
+
expect(lines[2]).toBe(' - legacy_id')
|
|
75
|
+
})
|
|
76
|
+
|
|
77
|
+
it('renders rename_column', () => {
|
|
78
|
+
const changes: AtlasChange[] = [
|
|
79
|
+
{ type: 'rename_column', table: 'users', name: 'email_address', detail: 'email -> email_address' },
|
|
80
|
+
]
|
|
81
|
+
const lines = renderChanges(changes).map(stripAnsi)
|
|
82
|
+
expect(lines).toHaveLength(2) // table header + change
|
|
83
|
+
expect(lines[1]).toBe(' ~ email -> email_address (renamed)')
|
|
84
|
+
})
|
|
85
|
+
|
|
86
|
+
it('renders modify_column', () => {
|
|
87
|
+
const changes: AtlasChange[] = [
|
|
88
|
+
{ type: 'modify_column', table: 'users', name: 'email', detail: 'varchar(100) -> varchar(255)' },
|
|
89
|
+
]
|
|
90
|
+
const lines = renderChanges(changes).map(stripAnsi)
|
|
91
|
+
expect(lines).toHaveLength(2) // table header + change
|
|
92
|
+
expect(lines[1]).toBe(' ~ email (varchar(100) -> varchar(255))')
|
|
93
|
+
})
|
|
94
|
+
|
|
95
|
+
it('renders add_index nested under table', () => {
|
|
96
|
+
const changes: AtlasChange[] = [{ type: 'add_index', table: 'users', name: 'idx_users_email' }]
|
|
97
|
+
const lines = renderChanges(changes).map(stripAnsi)
|
|
98
|
+
expect(lines).toHaveLength(2)
|
|
99
|
+
expect(lines[1]).toBe(' + idx_users_email (index)')
|
|
100
|
+
})
|
|
101
|
+
|
|
102
|
+
it('renders drop_index nested under table', () => {
|
|
103
|
+
const changes: AtlasChange[] = [{ type: 'drop_index', table: 'users', name: 'idx_old' }]
|
|
104
|
+
const lines = renderChanges(changes).map(stripAnsi)
|
|
105
|
+
expect(lines).toHaveLength(2)
|
|
106
|
+
expect(lines[1]).toBe(' - idx_old (index)')
|
|
107
|
+
})
|
|
108
|
+
|
|
109
|
+
it('nests sub-changes under table-level change', () => {
|
|
110
|
+
const changes: AtlasChange[] = [
|
|
111
|
+
{ type: 'add_table', table: 'users' },
|
|
112
|
+
{ type: 'add_column', table: 'users', name: 'id', detail: 'bigint' },
|
|
113
|
+
{ type: 'add_column', table: 'users', name: 'email', detail: 'text' },
|
|
114
|
+
]
|
|
115
|
+
const lines = renderChanges(changes).map(stripAnsi)
|
|
116
|
+
expect(lines).toHaveLength(3)
|
|
117
|
+
expect(lines[0]).toBe(' + users (new table)')
|
|
118
|
+
expect(lines[1]).toBe(' + id (bigint)')
|
|
119
|
+
expect(lines[2]).toBe(' + email (text)')
|
|
120
|
+
})
|
|
121
|
+
|
|
122
|
+
it('renders mixed table-level and column changes', () => {
|
|
123
|
+
const changes: AtlasChange[] = [
|
|
124
|
+
{ type: 'add_table', table: 'posts' },
|
|
125
|
+
{ type: 'rename_table', table: 'accounts', detail: 'users -> accounts' },
|
|
126
|
+
{ type: 'add_column', table: 'accounts', name: 'age', detail: 'integer' },
|
|
127
|
+
{ type: 'drop_column', table: 'orders', name: 'legacy_id' },
|
|
128
|
+
{ type: 'drop_table', table: 'old_logs' },
|
|
129
|
+
]
|
|
130
|
+
const lines = renderChanges(changes).map(stripAnsi)
|
|
131
|
+
expect(lines).toHaveLength(6)
|
|
132
|
+
expect(lines[0]).toBe(' + posts (new table)')
|
|
133
|
+
expect(lines[1]).toBe(' ~ accounts (renamed from users)')
|
|
134
|
+
expect(lines[2]).toBe(' + age (integer)')
|
|
135
|
+
expect(lines[3]).toBe(' - old_logs (dropped)')
|
|
136
|
+
expect(lines[4]).toBe(' orders') // table header for sub-changes
|
|
137
|
+
expect(lines[5]).toBe(' - legacy_id')
|
|
138
|
+
})
|
|
139
|
+
|
|
140
|
+
it('handles add_column without detail', () => {
|
|
141
|
+
const changes: AtlasChange[] = [{ type: 'add_column', table: 'users', name: 'status' }]
|
|
142
|
+
const lines = renderChanges(changes).map(stripAnsi)
|
|
143
|
+
expect(lines).toHaveLength(2)
|
|
144
|
+
expect(lines[1]).toBe(' + status')
|
|
145
|
+
})
|
|
146
|
+
|
|
147
|
+
it('handles modify_column without detail', () => {
|
|
148
|
+
const changes: AtlasChange[] = [{ type: 'modify_column', table: 'users', name: 'email' }]
|
|
149
|
+
const lines = renderChanges(changes).map(stripAnsi)
|
|
150
|
+
expect(lines).toHaveLength(2)
|
|
151
|
+
expect(lines[1]).toBe(' ~ email')
|
|
152
|
+
})
|
|
153
|
+
})
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
import type { CompilerOutput } from '@sqldoc/core'
|
|
2
|
+
import { describe, expect, it } from 'vitest'
|
|
3
|
+
import { buildRenamesFromPreviously } from '../commands/migrate.ts'
|
|
4
|
+
|
|
5
|
+
function makeOutput(
|
|
6
|
+
fileTags: CompilerOutput['fileTags'] = [],
|
|
7
|
+
overrides: Partial<CompilerOutput> = {},
|
|
8
|
+
): CompilerOutput {
|
|
9
|
+
return {
|
|
10
|
+
sourceFile: 'schema.sql',
|
|
11
|
+
mergedSql: '',
|
|
12
|
+
sqlOutputs: [],
|
|
13
|
+
codeOutputs: [],
|
|
14
|
+
errors: [],
|
|
15
|
+
docsMeta: [],
|
|
16
|
+
fileTags,
|
|
17
|
+
...overrides,
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
describe('buildRenamesFromPreviously', () => {
|
|
22
|
+
it('extracts column renames from @docs.previously tags', () => {
|
|
23
|
+
const outputs = [
|
|
24
|
+
makeOutput([
|
|
25
|
+
{
|
|
26
|
+
objectName: 'users.email',
|
|
27
|
+
target: 'column',
|
|
28
|
+
tags: [{ namespace: 'docs', tag: 'previously', args: ['email_address'] }],
|
|
29
|
+
},
|
|
30
|
+
]),
|
|
31
|
+
]
|
|
32
|
+
|
|
33
|
+
const renames = buildRenamesFromPreviously(outputs)
|
|
34
|
+
|
|
35
|
+
expect(renames).toHaveLength(1)
|
|
36
|
+
expect(renames[0]).toEqual({
|
|
37
|
+
type: 'column',
|
|
38
|
+
table: 'users',
|
|
39
|
+
oldName: 'email_address',
|
|
40
|
+
newName: 'email',
|
|
41
|
+
})
|
|
42
|
+
})
|
|
43
|
+
|
|
44
|
+
it('extracts table renames from @docs.previously tags', () => {
|
|
45
|
+
const outputs = [
|
|
46
|
+
makeOutput([
|
|
47
|
+
{
|
|
48
|
+
objectName: 'accounts',
|
|
49
|
+
target: 'table',
|
|
50
|
+
tags: [{ namespace: 'docs', tag: 'previously', args: ['users'] }],
|
|
51
|
+
},
|
|
52
|
+
]),
|
|
53
|
+
]
|
|
54
|
+
|
|
55
|
+
const renames = buildRenamesFromPreviously(outputs)
|
|
56
|
+
|
|
57
|
+
expect(renames).toHaveLength(1)
|
|
58
|
+
expect(renames[0]).toEqual({
|
|
59
|
+
type: 'table',
|
|
60
|
+
table: 'accounts',
|
|
61
|
+
oldName: 'users',
|
|
62
|
+
newName: 'accounts',
|
|
63
|
+
})
|
|
64
|
+
})
|
|
65
|
+
|
|
66
|
+
it('returns empty array when no @docs.previously tags', () => {
|
|
67
|
+
const outputs = [
|
|
68
|
+
makeOutput([
|
|
69
|
+
{
|
|
70
|
+
objectName: 'users',
|
|
71
|
+
target: 'table',
|
|
72
|
+
tags: [{ namespace: 'audit', tag: 'track', args: { on: ['delete'] } }],
|
|
73
|
+
},
|
|
74
|
+
]),
|
|
75
|
+
]
|
|
76
|
+
|
|
77
|
+
const renames = buildRenamesFromPreviously(outputs)
|
|
78
|
+
|
|
79
|
+
expect(renames).toHaveLength(0)
|
|
80
|
+
})
|
|
81
|
+
|
|
82
|
+
it('handles multiple renames across files', () => {
|
|
83
|
+
const outputs = [
|
|
84
|
+
makeOutput([
|
|
85
|
+
{
|
|
86
|
+
objectName: 'users.email',
|
|
87
|
+
target: 'column',
|
|
88
|
+
tags: [{ namespace: 'docs', tag: 'previously', args: ['email_address'] }],
|
|
89
|
+
},
|
|
90
|
+
]),
|
|
91
|
+
makeOutput([
|
|
92
|
+
{
|
|
93
|
+
objectName: 'posts.title',
|
|
94
|
+
target: 'column',
|
|
95
|
+
tags: [{ namespace: 'docs', tag: 'previously', args: ['subject'] }],
|
|
96
|
+
},
|
|
97
|
+
]),
|
|
98
|
+
]
|
|
99
|
+
|
|
100
|
+
const renames = buildRenamesFromPreviously(outputs)
|
|
101
|
+
|
|
102
|
+
expect(renames).toHaveLength(2)
|
|
103
|
+
expect(renames[0].table).toBe('users')
|
|
104
|
+
expect(renames[0].oldName).toBe('email_address')
|
|
105
|
+
expect(renames[0].newName).toBe('email')
|
|
106
|
+
expect(renames[1].table).toBe('posts')
|
|
107
|
+
expect(renames[1].oldName).toBe('subject')
|
|
108
|
+
expect(renames[1].newName).toBe('title')
|
|
109
|
+
})
|
|
110
|
+
|
|
111
|
+
it('ignores tags with non-string args', () => {
|
|
112
|
+
const outputs = [
|
|
113
|
+
makeOutput([
|
|
114
|
+
{
|
|
115
|
+
objectName: 'users.email',
|
|
116
|
+
target: 'column',
|
|
117
|
+
tags: [{ namespace: 'docs', tag: 'previously', args: [] }],
|
|
118
|
+
},
|
|
119
|
+
]),
|
|
120
|
+
]
|
|
121
|
+
|
|
122
|
+
const renames = buildRenamesFromPreviously(outputs)
|
|
123
|
+
|
|
124
|
+
expect(renames).toHaveLength(0)
|
|
125
|
+
})
|
|
126
|
+
|
|
127
|
+
it('ignores @docs tags that are not "previously"', () => {
|
|
128
|
+
const outputs = [
|
|
129
|
+
makeOutput([
|
|
130
|
+
{
|
|
131
|
+
objectName: 'users.email',
|
|
132
|
+
target: 'column',
|
|
133
|
+
tags: [{ namespace: 'docs', tag: 'description', args: ['User email'] }],
|
|
134
|
+
},
|
|
135
|
+
]),
|
|
136
|
+
]
|
|
137
|
+
|
|
138
|
+
const renames = buildRenamesFromPreviously(outputs)
|
|
139
|
+
|
|
140
|
+
expect(renames).toHaveLength(0)
|
|
141
|
+
})
|
|
142
|
+
})
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
import * as fs from 'node:fs'
|
|
2
|
+
import * as os from 'node:os'
|
|
3
|
+
import * as path from 'node:path'
|
|
4
|
+
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
|
5
|
+
|
|
6
|
+
describe('validateCommand', () => {
|
|
7
|
+
let tmpDir: string
|
|
8
|
+
|
|
9
|
+
beforeEach(() => {
|
|
10
|
+
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'sqldoc-cli-validate-'))
|
|
11
|
+
})
|
|
12
|
+
|
|
13
|
+
afterEach(() => {
|
|
14
|
+
fs.rmSync(tmpDir, { recursive: true, force: true })
|
|
15
|
+
process.exitCode = undefined as any
|
|
16
|
+
})
|
|
17
|
+
|
|
18
|
+
it('returns exit code 0 when validation passes', async () => {
|
|
19
|
+
// Create a valid namespace
|
|
20
|
+
const nsFile = path.join(tmpDir, 'ns-valid.ts')
|
|
21
|
+
fs.writeFileSync(
|
|
22
|
+
nsFile,
|
|
23
|
+
`
|
|
24
|
+
export default {
|
|
25
|
+
name: 'valid',
|
|
26
|
+
tags: {
|
|
27
|
+
ok: { description: 'valid tag' },
|
|
28
|
+
},
|
|
29
|
+
}
|
|
30
|
+
`,
|
|
31
|
+
'utf-8',
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
const sqlFile = path.join(tmpDir, 'good.sql')
|
|
35
|
+
fs.writeFileSync(
|
|
36
|
+
sqlFile,
|
|
37
|
+
`-- @import './ns-valid.ts'
|
|
38
|
+
|
|
39
|
+
-- @valid.ok
|
|
40
|
+
CREATE TABLE good (
|
|
41
|
+
id serial PRIMARY KEY
|
|
42
|
+
);
|
|
43
|
+
`,
|
|
44
|
+
'utf-8',
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {})
|
|
48
|
+
|
|
49
|
+
const { validateCommand } = await import('../commands/validate.ts')
|
|
50
|
+
process.exitCode = undefined as any
|
|
51
|
+
await validateCommand(sqlFile, {})
|
|
52
|
+
|
|
53
|
+
// No errors = no exitCode set (remains undefined or 0)
|
|
54
|
+
expect(process.exitCode).not.toBe(1)
|
|
55
|
+
|
|
56
|
+
consoleSpy.mockRestore()
|
|
57
|
+
})
|
|
58
|
+
|
|
59
|
+
it('throws CliError when errors found', async () => {
|
|
60
|
+
// SQL file referencing unknown namespace (no import for it)
|
|
61
|
+
const sqlFile = path.join(tmpDir, 'bad.sql')
|
|
62
|
+
fs.writeFileSync(
|
|
63
|
+
sqlFile,
|
|
64
|
+
`-- @unknown.tag
|
|
65
|
+
CREATE TABLE bad (
|
|
66
|
+
id serial PRIMARY KEY
|
|
67
|
+
);
|
|
68
|
+
`,
|
|
69
|
+
'utf-8',
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {})
|
|
73
|
+
|
|
74
|
+
const { validateCommand } = await import('../commands/validate.ts')
|
|
75
|
+
const { CliError } = await import('../errors.ts')
|
|
76
|
+
await expect(validateCommand(sqlFile, {})).rejects.toThrow(CliError)
|
|
77
|
+
|
|
78
|
+
consoleSpy.mockRestore()
|
|
79
|
+
})
|
|
80
|
+
|
|
81
|
+
it('formats diagnostics as file:line:col: severity: message', async () => {
|
|
82
|
+
const sqlFile = path.join(tmpDir, 'diag.sql')
|
|
83
|
+
fs.writeFileSync(
|
|
84
|
+
sqlFile,
|
|
85
|
+
`-- @unknown.tag
|
|
86
|
+
CREATE TABLE diag (
|
|
87
|
+
id serial PRIMARY KEY
|
|
88
|
+
);
|
|
89
|
+
`,
|
|
90
|
+
'utf-8',
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
const logOutput: string[] = []
|
|
94
|
+
const consoleSpy = vi.spyOn(console, 'log').mockImplementation((...args: any[]) => {
|
|
95
|
+
logOutput.push(args.map(String).join(' '))
|
|
96
|
+
})
|
|
97
|
+
|
|
98
|
+
const { validateCommand } = await import('../commands/validate.ts')
|
|
99
|
+
const { CliError } = await import('../errors.ts')
|
|
100
|
+
await expect(validateCommand(sqlFile, {})).rejects.toThrow(CliError)
|
|
101
|
+
|
|
102
|
+
// Check that at least one output line matches the file:line:col format
|
|
103
|
+
const diagLine = logOutput.find((l) => l.includes(sqlFile) && l.includes(':1:'))
|
|
104
|
+
expect(diagLine).toBeDefined()
|
|
105
|
+
expect(diagLine).toContain('error')
|
|
106
|
+
expect(diagLine).toContain("Unknown namespace '@unknown'")
|
|
107
|
+
|
|
108
|
+
consoleSpy.mockRestore()
|
|
109
|
+
})
|
|
110
|
+
})
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
import * as fs from 'node:fs'
|
|
2
|
+
import * as path from 'node:path'
|
|
3
|
+
import { createRunner } from '@sqldoc/atlas'
|
|
4
|
+
import type { DocsMeta, ProjectContext, ResolvedConfig } from '@sqldoc/core'
|
|
5
|
+
import { loadConfig, resolveProject } from '@sqldoc/core'
|
|
6
|
+
import pc from 'picocolors'
|
|
7
|
+
import { CliError, formatPipelineError } from '../errors.ts'
|
|
8
|
+
import { generateConfigTypes } from '../utils/generate-config-types.ts'
|
|
9
|
+
import { runCompilePipeline } from '../utils/pipeline.ts'
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* compile command: reads SQL files, runs the full pipeline
|
|
13
|
+
* (parse, load namespaces, AST, compile), runs afterCompile hooks,
|
|
14
|
+
* THEN outputs merged SQL.
|
|
15
|
+
*/
|
|
16
|
+
export async function codegenCommand(
|
|
17
|
+
inputPath: string | undefined,
|
|
18
|
+
options: { config?: string; plugins?: string; project?: string },
|
|
19
|
+
): Promise<void> {
|
|
20
|
+
const configRoot = options.config
|
|
21
|
+
? path.dirname(path.resolve(options.config))
|
|
22
|
+
: process.env.SQLDOC_PROJECT_ROOT || process.cwd()
|
|
23
|
+
const { config: rawConfig, configPath } = await loadConfig(configRoot, options.config)
|
|
24
|
+
const config: ResolvedConfig = resolveProject(rawConfig, options.project)
|
|
25
|
+
|
|
26
|
+
// Resolve input path: explicit arg > config.schema > error
|
|
27
|
+
const resolvedInput = inputPath ?? config.schema
|
|
28
|
+
if (!resolvedInput) {
|
|
29
|
+
throw new CliError('No input path provided. Specify a path argument or set "schema" in sqldoc.config.ts')
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
// Regenerate .sqldoc/config.d.ts for typed config
|
|
33
|
+
const sqldocDir = path.join(configRoot, '.sqldoc')
|
|
34
|
+
if (fs.existsSync(sqldocDir)) {
|
|
35
|
+
generateConfigTypes(sqldocDir)
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
const pluginFilter = options.plugins ? new Set(options.plugins.split(',').map((s) => s.trim())) : null
|
|
39
|
+
|
|
40
|
+
let result
|
|
41
|
+
try {
|
|
42
|
+
result = await runCompilePipeline(resolvedInput, config, configRoot)
|
|
43
|
+
} catch (err: any) {
|
|
44
|
+
throw formatPipelineError(err, config)
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
const { mergedSql, outputs, plugins, totalErrors } = result
|
|
48
|
+
|
|
49
|
+
// Run afterCompile hooks BEFORE outputting SQL
|
|
50
|
+
const projectPlugins = [...plugins.entries()].filter(
|
|
51
|
+
([name, p]) =>
|
|
52
|
+
(typeof p.afterCompile === 'function' || typeof p.generateProject === 'function') &&
|
|
53
|
+
(pluginFilter === null || pluginFilter.has(name)),
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
if (projectPlugins.length > 0) {
|
|
57
|
+
const dialect = config.dialect ?? 'postgres'
|
|
58
|
+
const allFileTags = outputs.map((o) => ({
|
|
59
|
+
sourceFile: o.sourceFile,
|
|
60
|
+
objects: o.fileTags.map((ft) => ({
|
|
61
|
+
objectName: ft.objectName,
|
|
62
|
+
target: ft.target,
|
|
63
|
+
tags: ft.tags,
|
|
64
|
+
})),
|
|
65
|
+
}))
|
|
66
|
+
const allDocsMeta: DocsMeta[] = outputs.flatMap((o) => o.docsMeta)
|
|
67
|
+
|
|
68
|
+
// Check if plugins generated any additional SQL
|
|
69
|
+
const hasGeneratedSql = outputs.some((o) => o.sqlOutputs.length > 0)
|
|
70
|
+
let postCompileRealm = result.atlasRealm
|
|
71
|
+
|
|
72
|
+
if (hasGeneratedSql) {
|
|
73
|
+
// Re-inspect with the merged SQL (includes generated tables like audit_log)
|
|
74
|
+
const freshRunner = await createRunner({ dialect, devUrl: config.devUrl, sqlFiles: [mergedSql] })
|
|
75
|
+
const postCompileResult = await freshRunner.inspect([mergedSql], {
|
|
76
|
+
schema: dialect === 'postgres' ? 'public' : undefined,
|
|
77
|
+
dialect,
|
|
78
|
+
})
|
|
79
|
+
await freshRunner.close()
|
|
80
|
+
|
|
81
|
+
if (postCompileResult.error) {
|
|
82
|
+
throw new CliError(`Post-compile schema inspect failed: ${postCompileResult.error}`)
|
|
83
|
+
}
|
|
84
|
+
postCompileRealm = postCompileResult.schema
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
for (const [nsName, plugin] of projectPlugins) {
|
|
88
|
+
const hook = plugin.afterCompile ?? plugin.generateProject
|
|
89
|
+
const ctx: ProjectContext = {
|
|
90
|
+
outputs,
|
|
91
|
+
mergedSql,
|
|
92
|
+
allFileTags,
|
|
93
|
+
docsMeta: allDocsMeta,
|
|
94
|
+
config: (config.namespaces?.[nsName] ?? {}) as Record<string, unknown>,
|
|
95
|
+
projectRoot: configRoot,
|
|
96
|
+
atlasRealm: postCompileRealm,
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
try {
|
|
100
|
+
const result = await hook!(ctx)
|
|
101
|
+
if (result?.files) {
|
|
102
|
+
for (const file of result.files) {
|
|
103
|
+
const outPath = path.resolve(configRoot, file.filePath)
|
|
104
|
+
try {
|
|
105
|
+
fs.mkdirSync(path.dirname(outPath), { recursive: true })
|
|
106
|
+
fs.writeFileSync(outPath, file.content, 'utf-8')
|
|
107
|
+
} catch (writeErr: any) {
|
|
108
|
+
throw new CliError(`Failed to write ${outPath}: ${writeErr?.message ?? writeErr}`)
|
|
109
|
+
}
|
|
110
|
+
const label = (file as any).source ? `${nsName}/${(file as any).source}` : nsName
|
|
111
|
+
console.error(pc.green(`[${label}] wrote ${path.relative(process.cwd(), outPath)}`))
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
} catch (err: any) {
|
|
115
|
+
if (err instanceof CliError) throw err
|
|
116
|
+
if (configPath) {
|
|
117
|
+
console.error(pc.dim(`config: ${configPath}`))
|
|
118
|
+
}
|
|
119
|
+
throw new CliError(`[${nsName}] ${err?.message ?? err}`)
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
if (totalErrors > 0) {
|
|
125
|
+
throw new CliError(`${totalErrors} error(s) encountered`)
|
|
126
|
+
}
|
|
127
|
+
}
|