@sqldoc/cli 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,194 @@
1
+ import * as fs from 'node:fs'
2
+ import * as path from 'node:path'
3
+ import { createRunner } from '@sqldoc/atlas'
4
+ import type { CompilerOutput, NamespacePlugin, ResolvedConfig, SqlStatement } from '@sqldoc/core'
5
+ import { compile, loadImports, parse, SqlparserTsAdapter, validate } from '@sqldoc/core'
6
+ import pc from 'picocolors'
7
+ import { promptAndInstallMissing } from './auto-install.ts'
8
+ import { discoverSqlFiles } from './discover.ts'
9
+ import { formatDiagnostic } from './format.ts'
10
+
11
+ /** Result from running the compile pipeline */
12
+ export interface PipelineResult {
13
+ /** All mergedSql joined with newline */
14
+ mergedSql: string
15
+ /** All per-file CompilerOutput objects */
16
+ outputs: CompilerOutput[]
17
+ /** All loaded plugins keyed by namespace name */
18
+ plugins: Map<string, NamespacePlugin>
19
+ /** Count of errors encountered */
20
+ totalErrors: number
21
+ /** Atlas realm from initial inspect (pre-compile schema) */
22
+ atlasRealm?: unknown
23
+ }
24
+
25
+ /**
26
+ * Run the full sqldoc compile pipeline: discover files, parse, validate, compile.
27
+ *
28
+ * This is the core logic extracted from the codegen command for reuse by
29
+ * both `codegen` and `migrate` commands. It does NOT handle:
30
+ * - afterCompile hooks
31
+ * - process.exit
32
+ *
33
+ * Those concerns remain in the caller.
34
+ */
35
+ export async function runCompilePipeline(
36
+ inputPath: string,
37
+ config: ResolvedConfig,
38
+ _configRoot: string,
39
+ ): Promise<PipelineResult> {
40
+ // Discover SQL files
41
+ const sqlFiles = await discoverSqlFiles(inputPath, config.include)
42
+ if (sqlFiles.length === 0) {
43
+ console.error(pc.yellow('No SQL files found'))
44
+ return { mergedSql: '', outputs: [], plugins: new Map(), totalErrors: 0 }
45
+ }
46
+
47
+ // Initialize AST adapter once
48
+ const adapter = new SqlparserTsAdapter()
49
+ await adapter.init()
50
+
51
+ // ── Atlas -- required for compilation ──────────────────────────────
52
+ const dialect = config.dialect ?? 'postgres'
53
+ const allRawContents = sqlFiles.map((f) => fs.readFileSync(f, 'utf-8'))
54
+
55
+ // Detect goose migration format and warn once
56
+ if (allRawContents.some((sql) => /^--\s*\+goose\s+(Up|Down)/m.test(sql))) {
57
+ console.error(pc.yellow('Warning: detected goose migration format. Down scripts will be stripped.'))
58
+ }
59
+
60
+ const allSqlContents = allRawContents.map(stripMigrationDown)
61
+
62
+ // Pass SQL files to createRunner so it can detect CREATE EXTENSION
63
+ // and load the right extensions into pglite (or validate on real postgres)
64
+ const atlasRunner = await createRunner({ dialect, devUrl: config.devUrl, sqlFiles: allSqlContents })
65
+
66
+ const mergedOutputs: string[] = []
67
+ const allOutputs: CompilerOutput[] = []
68
+ const allPlugins = new Map<string, NamespacePlugin>()
69
+ let totalErrors = 0
70
+ let atlasRealm: unknown
71
+
72
+ try {
73
+ const relFiles = sqlFiles.map((f) => path.relative(process.cwd(), f))
74
+ const inspectResult = await atlasRunner.inspect(allSqlContents, {
75
+ schema: dialect === 'postgres' ? 'public' : undefined,
76
+ dialect,
77
+ fileNames: relFiles,
78
+ })
79
+ if (!inspectResult.schema) {
80
+ throw new Error(inspectResult.error ?? 'Atlas failed to parse schema')
81
+ }
82
+ if (inspectResult.error) {
83
+ console.error(pc.yellow(inspectResult.error))
84
+ }
85
+ atlasRealm = inspectResult.schema
86
+
87
+ for (const filePath of sqlFiles) {
88
+ const rel = path.relative(process.cwd(), filePath)
89
+ console.log(pc.cyan(`── ${rel}`))
90
+ const source = fs.readFileSync(filePath, 'utf-8')
91
+
92
+ // Parse tags and imports
93
+ const { imports, tags } = parse(source)
94
+
95
+ // Load namespace plugins (with auto-install for missing packages)
96
+ let { namespaces, errors: loadErrors } = await loadImports(
97
+ imports.map((i) => i.path),
98
+ filePath,
99
+ )
100
+
101
+ if (loadErrors.length > 0) {
102
+ // Try auto-installing missing packages
103
+ const retryResult = await promptAndInstallMissing(
104
+ loadErrors,
105
+ imports.map((i) => i.path),
106
+ filePath,
107
+ )
108
+ if (retryResult) {
109
+ namespaces = retryResult.namespaces
110
+ loadErrors = retryResult.errors
111
+ }
112
+
113
+ // Report any remaining errors
114
+ if (loadErrors.length > 0) {
115
+ for (const err of loadErrors) {
116
+ console.error(pc.red(`Error loading ${err.importPath}: ${err.message}`))
117
+ }
118
+ totalErrors += loadErrors.length
119
+ }
120
+ }
121
+
122
+ // Cast TagNamespace to NamespacePlugin (plugins extend TagNamespace)
123
+ const plugins = new Map<string, NamespacePlugin>([...namespaces].map(([k, v]) => [k, v as NamespacePlugin]))
124
+
125
+ // Parse SQL AST
126
+ let statements: SqlStatement[] = []
127
+ try {
128
+ statements = adapter.parseStatements(source)
129
+ } catch {
130
+ // AST parse failure is non-fatal
131
+ }
132
+
133
+ // Validate before compiling
134
+ const diagnostics = validate(tags, namespaces, source, statements)
135
+ for (const d of diagnostics) {
136
+ console.error(formatDiagnostic(filePath, d))
137
+ if (d.severity === 'error') totalErrors++
138
+ }
139
+
140
+ // Abort this file if validation errors found
141
+ if (diagnostics.some((d) => d.severity === 'error')) {
142
+ continue
143
+ }
144
+
145
+ // Compile with Atlas schema
146
+ const output = compile({ source, filePath, plugins, statements, adapter, config, atlasRealm })
147
+
148
+ mergedOutputs.push(output.mergedSql)
149
+ allOutputs.push(output)
150
+ for (const [name, plugin] of plugins) {
151
+ if (!allPlugins.has(name)) allPlugins.set(name, plugin)
152
+ }
153
+
154
+ if (output.errors.length > 0) {
155
+ for (const err of output.errors) {
156
+ console.error(pc.red(`[${err.namespace}] ${err.message}`))
157
+ }
158
+ totalErrors += output.errors.length
159
+ }
160
+
161
+ // Write code outputs if any
162
+ if (output.codeOutputs.length > 0) {
163
+ const codeOutDir = config.codeOutDir ?? './sqldoc-out'
164
+ for (const codeOutput of output.codeOutputs) {
165
+ const outPath = path.resolve(codeOutDir, codeOutput.filePath)
166
+ fs.mkdirSync(path.dirname(outPath), { recursive: true })
167
+ fs.writeFileSync(outPath, codeOutput.content, 'utf-8')
168
+ }
169
+ }
170
+ }
171
+ } finally {
172
+ await atlasRunner.close()
173
+ }
174
+
175
+ return {
176
+ mergedSql: mergedOutputs.join('\n'),
177
+ outputs: allOutputs,
178
+ plugins: allPlugins,
179
+ totalErrors,
180
+ atlasRealm,
181
+ }
182
+ }
183
+
184
+ /**
185
+ * Strip everything after -- +goose Down (or similar migration tool markers).
186
+ * Only the "up" portion is relevant for schema inspection.
187
+ */
188
+ function stripMigrationDown(sql: string): string {
189
+ // Remove -- +goose Up/Down markers and everything after Down
190
+ const downIdx = sql.search(/^--\s*\+goose\s+Down/m)
191
+ const stripped = downIdx === -1 ? sql : sql.substring(0, downIdx).trimEnd()
192
+ // Remove the -- +goose Up marker itself
193
+ return stripped.replace(/^--\s*\+goose\s+Up\s*$/gm, '').trimStart()
194
+ }
@@ -0,0 +1,149 @@
1
+ import type { AtlasChange } from '@sqldoc/atlas'
2
+ import pc from 'picocolors'
3
+
4
+ /**
5
+ * Render structured schema changes with colors and grouping.
6
+ *
7
+ * Table-level changes (add/drop/rename table/view/function) get their own line.
8
+ * Column/index changes are nested under their parent table.
9
+ *
10
+ * Color coding:
11
+ * - Green (+) for additions
12
+ * - Red (-) for drops (destructive)
13
+ * - Yellow (~) for modifications/renames
14
+ */
15
+ export function renderChanges(changes: AtlasChange[]): string[] {
16
+ if (changes.length === 0) return []
17
+
18
+ // Separate table-level changes from sub-changes
19
+ const tableLevelTypes = new Set([
20
+ 'add_table',
21
+ 'drop_table',
22
+ 'rename_table',
23
+ 'add_view',
24
+ 'drop_view',
25
+ 'add_function',
26
+ 'drop_function',
27
+ ])
28
+
29
+ const tableLevel: AtlasChange[] = []
30
+ const subChanges: AtlasChange[] = []
31
+
32
+ for (const c of changes) {
33
+ if (tableLevelTypes.has(c.type)) {
34
+ tableLevel.push(c)
35
+ } else {
36
+ subChanges.push(c)
37
+ }
38
+ }
39
+
40
+ // Group sub-changes by table
41
+ const byTable = new Map<string, AtlasChange[]>()
42
+ for (const c of subChanges) {
43
+ const existing = byTable.get(c.table) ?? []
44
+ existing.push(c)
45
+ byTable.set(c.table, existing)
46
+ }
47
+
48
+ // Tables that have table-level changes (so we don't print a header for them)
49
+ const tableLevelNames = new Set(tableLevel.map((c) => c.table))
50
+
51
+ const lines: string[] = []
52
+
53
+ // Render table-level changes first
54
+ for (const c of tableLevel) {
55
+ lines.push(formatTableChange(c))
56
+
57
+ // If this table also has sub-changes, render them nested
58
+ const nested = byTable.get(c.table)
59
+ if (nested) {
60
+ for (const sc of nested) {
61
+ lines.push(formatSubChange(sc))
62
+ }
63
+ byTable.delete(c.table)
64
+ }
65
+ }
66
+
67
+ // Render remaining sub-changes grouped by table
68
+ for (const [table, tChanges] of byTable) {
69
+ if (!tableLevelNames.has(table)) {
70
+ lines.push(pc.dim(` ${table}`))
71
+ }
72
+ for (const sc of tChanges) {
73
+ lines.push(formatSubChange(sc))
74
+ }
75
+ }
76
+
77
+ return lines
78
+ }
79
+
80
+ /**
81
+ * Print rendered changes to stderr.
82
+ */
83
+ export function printChanges(changes: AtlasChange[], header?: string): void {
84
+ const lines = renderChanges(changes)
85
+ if (lines.length === 0) return
86
+
87
+ if (header) {
88
+ console.error(header)
89
+ console.error('')
90
+ }
91
+ for (const line of lines) {
92
+ console.error(line)
93
+ }
94
+ console.error('')
95
+ }
96
+
97
+ function formatTableChange(c: AtlasChange): string {
98
+ switch (c.type) {
99
+ case 'add_table':
100
+ return pc.green(` + ${c.table} (new table)`)
101
+ case 'drop_table':
102
+ return pc.red(` - ${c.table} (dropped)`)
103
+ case 'rename_table':
104
+ return pc.yellow(` ~ ${c.table} (renamed from ${oldName(c.detail)})`)
105
+ case 'add_view':
106
+ return pc.green(` + ${c.table} (view)`)
107
+ case 'drop_view':
108
+ return pc.red(` - ${c.table} (view dropped)`)
109
+ case 'add_function':
110
+ return pc.green(` + ${c.table} (function)`)
111
+ case 'drop_function':
112
+ return pc.red(` - ${c.table} (function dropped)`)
113
+ default:
114
+ return ` ${c.table}`
115
+ }
116
+ }
117
+
118
+ function formatSubChange(c: AtlasChange): string {
119
+ switch (c.type) {
120
+ case 'add_column': {
121
+ const detail = c.detail ? ` (${c.detail})` : ''
122
+ return pc.green(` + ${c.name}${detail}`)
123
+ }
124
+ case 'drop_column':
125
+ return pc.red(` - ${c.name}`)
126
+ case 'rename_column':
127
+ return pc.yellow(` ~ ${oldName(c.detail)} -> ${c.name} (renamed)`)
128
+ case 'modify_column': {
129
+ const detail = c.detail ? ` (${c.detail})` : ''
130
+ return pc.yellow(` ~ ${c.name}${detail}`)
131
+ }
132
+ case 'add_index':
133
+ return pc.green(` + ${c.name} (index)`)
134
+ case 'drop_index':
135
+ return pc.red(` - ${c.name} (index)`)
136
+ default:
137
+ return ` ${c.name ?? c.type}`
138
+ }
139
+ }
140
+
141
+ /**
142
+ * Extract the old name from a "old -> new" detail string.
143
+ */
144
+ function oldName(detail?: string): string {
145
+ if (!detail) return '?'
146
+ const arrow = detail.indexOf(' -> ')
147
+ if (arrow === -1) return detail
148
+ return detail.substring(0, arrow)
149
+ }