@sqldoc/cli 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,175 @@
1
+ import * as fs from 'node:fs'
2
+ import * as path from 'node:path'
3
+ import { fileURLToPath } from 'node:url'
4
+ import { findSqldocDir, loadConfig } from '@sqldoc/core'
5
+ import pc from 'picocolors'
6
+
7
+ const CHECK = pc.green('\u2713')
8
+ const CROSS = pc.red('\u2717')
9
+
10
+ interface CheckResult {
11
+ label: string
12
+ ok: boolean
13
+ detail?: string
14
+ }
15
+
16
+ /**
17
+ * sqldoc doctor: checks the project setup and reports status.
18
+ */
19
+ export async function doctorCommand(): Promise<void> {
20
+ const projectRoot = process.env.SQLDOC_PROJECT_ROOT || process.cwd()
21
+ const results: CheckResult[] = []
22
+
23
+ // 1. .sqldoc/ directory exists
24
+ const sqldocDir = findSqldocDir(projectRoot)
25
+ results.push({
26
+ label: '.sqldoc/ directory exists',
27
+ ok: sqldocDir !== null,
28
+ detail: sqldocDir ? sqldocDir : 'Not found. Run: sqldoc init',
29
+ })
30
+
31
+ // 2. package.json has @sqldoc/cli dependency
32
+ let hasPkgJson = false
33
+ let hasCliDep = false
34
+ if (sqldocDir) {
35
+ const pkgJsonPath = path.join(sqldocDir, 'package.json')
36
+ if (fs.existsSync(pkgJsonPath)) {
37
+ hasPkgJson = true
38
+ try {
39
+ const pkg = JSON.parse(fs.readFileSync(pkgJsonPath, 'utf-8'))
40
+ const deps = { ...pkg.dependencies, ...pkg.devDependencies }
41
+ hasCliDep = '@sqldoc/cli' in deps
42
+ } catch {}
43
+ }
44
+ }
45
+ results.push({
46
+ label: 'package.json has @sqldoc/cli dependency',
47
+ ok: hasCliDep,
48
+ detail: !sqldocDir
49
+ ? 'No .sqldoc/ directory'
50
+ : !hasPkgJson
51
+ ? 'No package.json in .sqldoc/'
52
+ : hasCliDep
53
+ ? undefined
54
+ : '@sqldoc/cli not listed in dependencies',
55
+ })
56
+
57
+ // 3. node_modules exists and has @sqldoc/cli
58
+ let hasNodeModules = false
59
+ let hasCliInstalled = false
60
+ if (sqldocDir) {
61
+ const nmDir = path.join(sqldocDir, 'node_modules')
62
+ hasNodeModules = fs.existsSync(nmDir)
63
+ if (hasNodeModules) {
64
+ hasCliInstalled = fs.existsSync(path.join(nmDir, '@sqldoc', 'cli'))
65
+ }
66
+ }
67
+ results.push({
68
+ label: 'node_modules/ has @sqldoc/cli installed',
69
+ ok: hasCliInstalled,
70
+ detail: !sqldocDir
71
+ ? 'No .sqldoc/ directory'
72
+ : !hasNodeModules
73
+ ? 'node_modules/ missing. Run install in .sqldoc/'
74
+ : hasCliInstalled
75
+ ? undefined
76
+ : '@sqldoc/cli not found in node_modules/',
77
+ })
78
+
79
+ // 4. atlas.wasm is findable
80
+ let wasmFound = false
81
+ let wasmPath: string | undefined
82
+ if (process.env.ATLAS_WASM_PATH && fs.existsSync(process.env.ATLAS_WASM_PATH)) {
83
+ wasmFound = true
84
+ wasmPath = process.env.ATLAS_WASM_PATH
85
+ } else {
86
+ // Walk up from current file's directory looking for atlas.wasm (same logic as @sqldoc/atlas)
87
+ let dir = path.dirname(fileURLToPath(import.meta.url))
88
+ const searched: string[] = []
89
+ outer: while (true) {
90
+ for (const candidate of [
91
+ path.join(dir, 'wasm', 'atlas.wasm'),
92
+ path.join(dir, '..', 'wasm', 'atlas.wasm'),
93
+ path.join(dir, 'node_modules', '@sqldoc', 'atlas', 'wasm', 'atlas.wasm'),
94
+ path.join(dir, 'packages', 'atlas', 'wasm', 'atlas.wasm'),
95
+ ]) {
96
+ searched.push(candidate)
97
+ if (fs.existsSync(candidate)) {
98
+ wasmFound = true
99
+ wasmPath = candidate
100
+ break outer
101
+ }
102
+ }
103
+ const parent = path.dirname(dir)
104
+ if (parent === dir) break
105
+ dir = parent
106
+ }
107
+ }
108
+ results.push({
109
+ label: 'atlas.wasm is findable',
110
+ ok: wasmFound,
111
+ detail: wasmFound ? wasmPath : 'atlas.wasm not found. Set ATLAS_WASM_PATH or check installation.',
112
+ })
113
+
114
+ // 5. pglite loads successfully
115
+ let pgliteOk = false
116
+ let pgliteDetail: string | undefined
117
+ try {
118
+ const { createPgliteAdapter } = await import('@sqldoc/atlas')
119
+ const adapter = await createPgliteAdapter()
120
+ const result = await adapter.query('SELECT 1 AS ok')
121
+ pgliteOk = result.rows.length > 0
122
+ await adapter.close()
123
+ } catch (err: any) {
124
+ pgliteDetail = err?.message ?? String(err)
125
+ }
126
+ results.push({
127
+ label: 'pglite loads successfully',
128
+ ok: pgliteOk,
129
+ detail: pgliteOk ? undefined : pgliteDetail,
130
+ })
131
+
132
+ // 6. Config file exists and is parseable
133
+ let configOk = false
134
+ let configDetail: string | undefined
135
+ try {
136
+ const { configPath } = await loadConfig(projectRoot)
137
+ if (configPath) {
138
+ configOk = true
139
+ configDetail = path.relative(projectRoot, configPath)
140
+ } else {
141
+ // No config file found — this is acceptable (defaults used)
142
+ configOk = true
143
+ configDetail = 'No config file found (using defaults)'
144
+ }
145
+ } catch (err: any) {
146
+ configDetail = err?.message ?? String(err)
147
+ }
148
+ results.push({
149
+ label: 'Config file is parseable',
150
+ ok: configOk,
151
+ detail: configDetail,
152
+ })
153
+
154
+ // Print results
155
+ console.error('')
156
+ console.error(pc.bold('sqldoc doctor'))
157
+ console.error('')
158
+ let allOk = true
159
+ for (const r of results) {
160
+ const icon = r.ok ? CHECK : CROSS
161
+ console.error(` ${icon} ${r.label}`)
162
+ if (r.detail && !r.ok) {
163
+ console.error(` ${pc.dim(r.detail)}`)
164
+ }
165
+ if (!r.ok) allOk = false
166
+ }
167
+ console.error('')
168
+
169
+ if (allOk) {
170
+ console.error(pc.green('All checks passed!'))
171
+ } else {
172
+ console.error(pc.yellow('Some checks failed. See details above.'))
173
+ process.exitCode = 1
174
+ }
175
+ }
@@ -0,0 +1,102 @@
1
+ import * as path from 'node:path'
2
+ import type { LintResult, ResolvedConfig } from '@sqldoc/core'
3
+ import { lint, loadConfig, resolveProject } from '@sqldoc/core'
4
+ import pc from 'picocolors'
5
+ import { CliError, formatPipelineError } from '../errors.ts'
6
+ import { runCompilePipeline } from '../utils/pipeline.ts'
7
+
8
+ /**
9
+ * lint command: runs lint rules from loaded namespace plugins against
10
+ * compiled SQL files and reports results with colors.
11
+ */
12
+ export async function lintCommand(
13
+ inputPath: string | undefined,
14
+ options: { config?: string; verbose?: boolean; project?: string },
15
+ ): Promise<void> {
16
+ const configRoot = options.config
17
+ ? path.dirname(path.resolve(options.config))
18
+ : process.env.SQLDOC_PROJECT_ROOT || process.cwd()
19
+ const { config: rawConfig } = await loadConfig(configRoot, options.config)
20
+ const config: ResolvedConfig = resolveProject(rawConfig, options.project)
21
+
22
+ // Resolve input path: explicit arg > config.schema > error
23
+ const resolvedInput = inputPath ?? config.schema
24
+ if (!resolvedInput) {
25
+ throw new CliError('No input path provided. Specify a path argument or set "schema" in sqldoc.config.ts')
26
+ }
27
+
28
+ let result
29
+ try {
30
+ result = await runCompilePipeline(resolvedInput, config, configRoot)
31
+ } catch (err: any) {
32
+ throw formatPipelineError(err, config)
33
+ }
34
+
35
+ const { outputs, plugins, totalErrors } = result
36
+
37
+ if (totalErrors > 0) {
38
+ throw new CliError(`${totalErrors} compilation error(s) — fix before linting`)
39
+ }
40
+
41
+ // Run the lint engine
42
+ const results = lint(outputs, plugins, config)
43
+
44
+ if (results.length === 0) {
45
+ console.log(pc.green('No lint issues found'))
46
+ return
47
+ }
48
+
49
+ // Format and print results
50
+ let errorCount = 0
51
+ let warnCount = 0
52
+ let skipCount = 0
53
+
54
+ for (const r of results) {
55
+ if (r.severity === 'skip') {
56
+ skipCount++
57
+ if (options.verbose) console.log(formatLintResult(r))
58
+ } else {
59
+ console.log(formatLintResult(r))
60
+ if (r.severity === 'error') errorCount++
61
+ else if (r.severity === 'warn') warnCount++
62
+ }
63
+ }
64
+
65
+ // Summary line
66
+ console.log('')
67
+ const parts: string[] = []
68
+ if (errorCount > 0) parts.push(pc.red(`${errorCount} error(s)`))
69
+ if (warnCount > 0) parts.push(pc.yellow(`${warnCount} warning(s)`))
70
+ if (skipCount > 0) parts.push(pc.dim(`${skipCount} ignored`))
71
+ console.log(parts.join(', '))
72
+
73
+ if (errorCount > 0) {
74
+ throw new CliError(`${errorCount} lint error(s)`)
75
+ }
76
+ }
77
+
78
+ /** Format a single lint result for terminal display */
79
+ function formatLintResult(r: LintResult): string {
80
+ const file = path.relative(process.cwd(), r.sourceFile)
81
+ const severity = formatSeverity(r.severity)
82
+ const rule = pc.dim(r.ruleName)
83
+
84
+ if (r.severity === 'skip') {
85
+ return `${file} ${severity} ${rule.padEnd(35)} ${r.message} ${pc.dim(`(${r.ignoreReason})`)}`
86
+ }
87
+
88
+ return `${file} ${severity} ${rule.padEnd(35)} ${r.message}`
89
+ }
90
+
91
+ function formatSeverity(severity: LintResult['severity']): string {
92
+ switch (severity) {
93
+ case 'error':
94
+ return pc.red('error')
95
+ case 'warn':
96
+ return pc.yellow('warn ')
97
+ case 'skip':
98
+ return pc.dim('skip ')
99
+ default:
100
+ return severity
101
+ }
102
+ }
@@ -0,0 +1,345 @@
1
+ import * as path from 'node:path'
2
+ import * as readline from 'node:readline'
3
+ import type { AtlasRename, AtlasRenameCandidate } from '@sqldoc/atlas'
4
+ import { createRunner } from '@sqldoc/atlas'
5
+ import type { CompilerOutput, ResolvedConfig } from '@sqldoc/core'
6
+ import { loadConfig, resolveProject } from '@sqldoc/core'
7
+ import pc from 'picocolors'
8
+ import { CliError, formatPipelineError } from '../errors.ts'
9
+ import { detectDestructiveChanges } from '../utils/destructive.ts'
10
+ import { concatUpScripts, readMigrations, writeMigration } from '../utils/migration-formats.ts'
11
+ import { runCompilePipeline } from '../utils/pipeline.ts'
12
+ import { printChanges } from '../utils/pretty-changes.ts'
13
+
14
+ /**
15
+ * migrate command: generate migration files or check for schema drift.
16
+ *
17
+ * Flow:
18
+ * 1. Read config's migrations.dir -> parse up scripts based on migrations.format
19
+ * 2. Apply up scripts in order to dev DB -> get "current" realm
20
+ * 3. Compile schema files through pipeline -> get "desired" realm
21
+ * 4. Diff current vs desired -> up SQL
22
+ * 5. Diff desired vs current -> down SQL
23
+ * 6. Detect renames via @docs.previously and Atlas-side candidate detection
24
+ * 7. Check for destructive changes (unless --force)
25
+ * 8. If --check: exit 0 if no diff, non-zero if drift
26
+ * 9. Otherwise: write migration file in configured format
27
+ */
28
+ export async function migrateCommand(options: {
29
+ config?: string
30
+ project?: string
31
+ check?: boolean
32
+ name?: string
33
+ force?: boolean
34
+ }): Promise<void> {
35
+ const configRoot = options.config
36
+ ? path.dirname(path.resolve(options.config))
37
+ : process.env.SQLDOC_PROJECT_ROOT || process.cwd()
38
+ const { config: rawConfig } = await loadConfig(configRoot, options.config)
39
+ const config: ResolvedConfig = resolveProject(rawConfig, options.project)
40
+
41
+ if (!config.schema) {
42
+ throw new CliError('No "schema" configured. Set "schema" in sqldoc.config.ts')
43
+ }
44
+
45
+ if (!config.migrations?.dir) {
46
+ throw new CliError('No "migrations.dir" configured. Set "migrations.dir" in sqldoc.config.ts')
47
+ }
48
+
49
+ const dialect = config.dialect ?? 'postgres'
50
+ const format = config.migrations.format ?? 'plain'
51
+ const namingConfig = config.migrations.naming ?? 'timestamp'
52
+
53
+ // Resolve naming strategy — AI naming falls back to provided --name or generic
54
+ let naming: 'timestamp' | 'sequential'
55
+ if (typeof namingConfig === 'object' && namingConfig.provider === 'claude-code') {
56
+ naming = 'timestamp' // AI naming still uses timestamp prefix, name comes from AI
57
+ } else {
58
+ naming = namingConfig as 'timestamp' | 'sequential'
59
+ }
60
+
61
+ // ── Step 1: Read existing migrations ────────────────────────────────
62
+ const migrationsDir = path.resolve(configRoot, config.migrations.dir)
63
+ const existingMigrations = readMigrations(migrationsDir, format)
64
+ const currentSql = concatUpScripts(existingMigrations)
65
+
66
+ console.error(pc.cyan(`Found ${existingMigrations.length} existing migration(s) in ${config.migrations.dir}`))
67
+
68
+ // ── Step 2: Compile schema files -> "desired" state ────────────────
69
+ let desiredSql: string
70
+ let pipelineResult: Awaited<ReturnType<typeof runCompilePipeline>>
71
+ try {
72
+ pipelineResult = await runCompilePipeline(path.resolve(configRoot, config.schema), config, configRoot)
73
+ if (pipelineResult.totalErrors > 0) {
74
+ throw new CliError(`${pipelineResult.totalErrors} compilation error(s) — fix before migrating`)
75
+ }
76
+ desiredSql = pipelineResult.mergedSql
77
+ } catch (err: any) {
78
+ if (err instanceof CliError) throw err
79
+ throw formatPipelineError(err, config)
80
+ }
81
+
82
+ // ── Step 3: Build known renames from @docs.previously tags ─────────
83
+ const knownRenames = buildRenamesFromPreviously(pipelineResult.outputs)
84
+
85
+ if (knownRenames.length > 0) {
86
+ for (const r of knownRenames) {
87
+ const desc = r.type === 'column' ? `${r.table}.${r.oldName} -> ${r.newName}` : `${r.oldName} -> ${r.newName}`
88
+ console.error(pc.cyan(` Rename (via @docs.previously): ${desc}`))
89
+ }
90
+ }
91
+
92
+ // ── Step 4: Diff current -> desired (up migration) ─────────────────
93
+ const schemaOpt = dialect === 'postgres' ? 'public' : undefined
94
+
95
+ // We need a runner that has both SQL contents loaded for extension detection
96
+ const allSql = [currentSql, desiredSql].filter(Boolean)
97
+ const runner = await createRunner({ dialect, devUrl: config.devUrl, sqlFiles: allSql })
98
+
99
+ let upStatements: string[]
100
+ let upChanges: import('@sqldoc/atlas').AtlasChange[] | undefined
101
+ let downStatements: string[]
102
+
103
+ try {
104
+ // First diff: pass known renames, get back SQL + candidates
105
+ const upResult = await runner.diff(currentSql ? [currentSql] : [], [desiredSql], {
106
+ schema: schemaOpt,
107
+ dialect,
108
+ renames: knownRenames.length > 0 ? knownRenames : undefined,
109
+ })
110
+
111
+ if (upResult.error) {
112
+ throw new CliError(`Schema diff error: ${upResult.error}`)
113
+ }
114
+
115
+ upStatements = upResult.statements ?? []
116
+ upChanges = upResult.changes
117
+ const candidates = upResult.renameCandidates ?? []
118
+
119
+ // ── Step 4a: Interactive rename prompting (TTY only) ──────────────
120
+ if (candidates.length > 0 && process.stdin.isTTY) {
121
+ const accepted = await promptRenameCandidates(candidates)
122
+ if (accepted.length > 0) {
123
+ // Re-diff with the accepted renames added to the known set
124
+ const allRenames = [...knownRenames, ...accepted]
125
+ const rediffResult = await runner.diff(currentSql ? [currentSql] : [], [desiredSql], {
126
+ schema: schemaOpt,
127
+ dialect,
128
+ renames: allRenames,
129
+ })
130
+
131
+ if (rediffResult.error) {
132
+ throw new CliError(`Schema diff (with renames) error: ${rediffResult.error}`)
133
+ }
134
+
135
+ upStatements = rediffResult.statements ?? []
136
+ upChanges = rediffResult.changes
137
+ }
138
+ }
139
+
140
+ // ── Step 5: Diff desired -> current (down migration) ──────────────
141
+ const downResult = await runner.diff([desiredSql], currentSql ? [currentSql] : [], { schema: schemaOpt, dialect })
142
+
143
+ if (downResult.error) {
144
+ throw new CliError(`Schema diff (reverse) error: ${downResult.error}`)
145
+ }
146
+
147
+ downStatements = downResult.statements ?? []
148
+ } finally {
149
+ await runner.close()
150
+ }
151
+
152
+ // ── Step 6: Handle --check mode ────────────────────────────────────
153
+ if (options.check) {
154
+ if (upStatements.length === 0) {
155
+ console.error(pc.green('Schema is in sync with migrations. No drift detected.'))
156
+ return
157
+ }
158
+
159
+ console.error(pc.red(pc.bold('Schema drift detected!')))
160
+ console.error('')
161
+ for (const stmt of upStatements) {
162
+ console.error(pc.yellow(` ${stmt};`))
163
+ }
164
+ console.error('')
165
+ console.error(` ${upStatements.length} statement(s) needed to reach desired state`)
166
+ throw new CliError('Schema drift detected', 1)
167
+ }
168
+
169
+ // ── Step 7: Generate migration file ────────────────────────────────
170
+ if (upStatements.length === 0) {
171
+ console.error(pc.green('No schema changes detected. Nothing to migrate.'))
172
+ return
173
+ }
174
+
175
+ // Show structured change summary if available
176
+ if (upChanges && upChanges.length > 0) {
177
+ printChanges(upChanges, pc.cyan(pc.bold('Changes:')))
178
+ }
179
+
180
+ // ── Step 7a: Destructive change detection ──────────────────────────
181
+ const destructiveChanges = detectDestructiveChanges(upStatements)
182
+
183
+ if (destructiveChanges.length > 0 && !options.force) {
184
+ console.error(pc.red(pc.bold('Error: Migration contains destructive changes:')))
185
+ for (const change of destructiveChanges) {
186
+ console.error(pc.red(` - ${change.description}`))
187
+ }
188
+ console.error('')
189
+ console.error('Use --force to generate the migration anyway.')
190
+ throw new CliError('Migration contains destructive changes. Use --force to proceed.', 1)
191
+ }
192
+
193
+ if (destructiveChanges.length > 0 && options.force) {
194
+ console.error(pc.yellow(`Warning: ${destructiveChanges.length} destructive change(s) included (--force)`))
195
+ }
196
+
197
+ // Determine migration name
198
+ let migrationName = options.name ?? ''
199
+
200
+ // AI naming: pipe diff SQL to claude-code for a name
201
+ if (typeof namingConfig === 'object' && namingConfig.provider === 'claude-code' && !options.name) {
202
+ migrationName = await aiMigrationName(upStatements)
203
+ }
204
+
205
+ if (!migrationName) {
206
+ migrationName = 'migration'
207
+ }
208
+
209
+ // Build up/down SQL content
210
+ const upSql = upStatements.map((s) => `${s};`).join('\n')
211
+ const downSql = downStatements.length > 0 ? downStatements.map((s) => `${s};`).join('\n') : undefined
212
+
213
+ const writtenFiles = writeMigration({
214
+ dir: migrationsDir,
215
+ name: migrationName,
216
+ up: upSql,
217
+ down: downSql,
218
+ format,
219
+ naming,
220
+ existing: existingMigrations,
221
+ })
222
+
223
+ for (const file of writtenFiles) {
224
+ const rel = path.relative(process.cwd(), file)
225
+ console.error(pc.green(`Created ${rel}`))
226
+ }
227
+
228
+ console.error('')
229
+ console.error(` ${upStatements.length} statement(s) in up migration`)
230
+ if (downStatements.length > 0) {
231
+ console.error(` ${downStatements.length} statement(s) in down migration`)
232
+ }
233
+ }
234
+
235
+ // ── Internal helpers ──────────────────────────────────────────────────
236
+
237
+ /**
238
+ * Build a list of known renames from @docs.previously tags in compiled outputs.
239
+ * Scans fileTags for tags with namespace "docs" and tag "previously".
240
+ */
241
+ export function buildRenamesFromPreviously(outputs: CompilerOutput[]): AtlasRename[] {
242
+ const renames: AtlasRename[] = []
243
+
244
+ for (const output of outputs) {
245
+ for (const fileTag of output.fileTags) {
246
+ for (const tag of fileTag.tags) {
247
+ if (tag.namespace === 'docs' && tag.tag === 'previously') {
248
+ const args = tag.args as unknown[]
249
+ if (args.length > 0 && typeof args[0] === 'string') {
250
+ const oldName = args[0]
251
+
252
+ if (fileTag.target === 'column') {
253
+ // objectName is "table.column" for columns
254
+ const dotIdx = fileTag.objectName.lastIndexOf('.')
255
+ if (dotIdx !== -1) {
256
+ const tableName = fileTag.objectName.substring(0, dotIdx)
257
+ const newColName = fileTag.objectName.substring(dotIdx + 1)
258
+ renames.push({
259
+ type: 'column',
260
+ table: tableName,
261
+ oldName,
262
+ newName: newColName,
263
+ })
264
+ }
265
+ } else if (fileTag.target === 'table') {
266
+ renames.push({
267
+ type: 'table',
268
+ table: fileTag.objectName,
269
+ oldName,
270
+ newName: fileTag.objectName,
271
+ })
272
+ }
273
+ }
274
+ }
275
+ }
276
+ }
277
+ }
278
+
279
+ return renames
280
+ }
281
+
282
+ /**
283
+ * Prompt the user interactively for rename candidates detected by Atlas.
284
+ * Returns the list of accepted renames as AtlasRename objects.
285
+ */
286
+ async function promptRenameCandidates(candidates: AtlasRenameCandidate[]): Promise<AtlasRename[]> {
287
+ const rl = readline.createInterface({
288
+ input: process.stdin,
289
+ output: process.stderr,
290
+ })
291
+
292
+ const accepted: AtlasRename[] = []
293
+
294
+ for (const candidate of candidates) {
295
+ const typeLabel = candidate.type === 'column' ? 'Column' : 'Table'
296
+ const typeInfo = candidate.colType ? ` (${candidate.colType})` : ''
297
+ const context =
298
+ candidate.type === 'column'
299
+ ? `${typeLabel} '${candidate.oldName}' was removed and '${candidate.newName}'${typeInfo} was added on table '${candidate.table}'.`
300
+ : `${typeLabel} '${candidate.oldName}' was removed and '${candidate.newName}' was added.`
301
+
302
+ const answer = await new Promise<string>((resolve) => {
303
+ rl.question(`${context} Is this a rename? (Y/n) `, resolve)
304
+ })
305
+
306
+ const normalized = answer.trim().toLowerCase()
307
+ if (normalized === '' || normalized === 'y' || normalized === 'yes') {
308
+ accepted.push({
309
+ type: candidate.type,
310
+ table: candidate.table,
311
+ oldName: candidate.oldName,
312
+ newName: candidate.newName,
313
+ })
314
+ }
315
+ }
316
+
317
+ rl.close()
318
+ return accepted
319
+ }
320
+
321
+ /**
322
+ * Use claude-code CLI to generate a migration name from diff SQL.
323
+ * Falls back to 'migration' if claude-code is not available.
324
+ */
325
+ async function aiMigrationName(statements: string[]): Promise<string> {
326
+ try {
327
+ const { execSync } = await import('node:child_process')
328
+ const diffSql = statements.join(';\n')
329
+ const result = execSync(
330
+ `echo ${JSON.stringify(diffSql)} | claude -p "Name this migration in 3-5 words, snake_case, no prefix. Output ONLY the name, nothing else."`,
331
+ { encoding: 'utf-8', timeout: 30000, stdio: ['pipe', 'pipe', 'pipe'] },
332
+ )
333
+ const name = result
334
+ .trim()
335
+ .replace(/[^a-z0-9_]/gi, '_')
336
+ .replace(/^_+|_+$/g, '')
337
+ .toLowerCase()
338
+ if (name && name.length > 2 && name.length < 60) {
339
+ return name
340
+ }
341
+ } catch {
342
+ // claude-code not available or failed — fall back silently
343
+ }
344
+ return 'migration'
345
+ }