@sqldoc/cli 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +37 -0
- package/src/__tests__/binary-entry.test.ts +19 -0
- package/src/__tests__/codegen.test.ts +103 -0
- package/src/__tests__/destructive.test.ts +132 -0
- package/src/__tests__/migration-formats.test.ts +480 -0
- package/src/__tests__/migrations.test.ts +129 -0
- package/src/__tests__/pretty-changes.test.ts +153 -0
- package/src/__tests__/rename-detection.test.ts +142 -0
- package/src/__tests__/validate.test.ts +110 -0
- package/src/commands/codegen.ts +127 -0
- package/src/commands/doctor.ts +175 -0
- package/src/commands/lint.ts +102 -0
- package/src/commands/migrate.ts +345 -0
- package/src/commands/schema.ts +329 -0
- package/src/commands/validate.ts +100 -0
- package/src/errors.ts +24 -0
- package/src/index.ts +103 -0
- package/src/runtime.ts +17 -0
- package/src/utils/auto-install.ts +116 -0
- package/src/utils/destructive.ts +99 -0
- package/src/utils/discover.ts +35 -0
- package/src/utils/format.ts +23 -0
- package/src/utils/generate-config-types.ts +73 -0
- package/src/utils/migration-formats.ts +347 -0
- package/src/utils/migrations.ts +74 -0
- package/src/utils/pipeline.ts +194 -0
- package/src/utils/pretty-changes.ts +149 -0
|
@@ -0,0 +1,329 @@
|
|
|
1
|
+
import * as fs from 'node:fs'
|
|
2
|
+
import * as path from 'node:path'
|
|
3
|
+
import type { AtlasResult } from '@sqldoc/atlas'
|
|
4
|
+
import { createRunner } from '@sqldoc/atlas'
|
|
5
|
+
import type { ResolvedConfig } from '@sqldoc/core'
|
|
6
|
+
import { loadConfig, resolveProject } from '@sqldoc/core'
|
|
7
|
+
import pc from 'picocolors'
|
|
8
|
+
import { CliError } from '../errors.ts'
|
|
9
|
+
import { runCompilePipeline } from '../utils/pipeline.ts'
|
|
10
|
+
import { printChanges } from '../utils/pretty-changes.ts'
|
|
11
|
+
|
|
12
|
+
type Format = 'sql' | 'json' | 'pretty'
|
|
13
|
+
|
|
14
|
+
// ── Source resolution ────────────────────────────────────────────────
|
|
15
|
+
|
|
16
|
+
interface ResolvedSource {
|
|
17
|
+
type: 'file' | 'directory' | 'database'
|
|
18
|
+
/** For database: connection URL. For file/directory: merged compiled SQL */
|
|
19
|
+
value: string
|
|
20
|
+
/** Atlas realm from the pipeline (reuse instead of re-inspecting) */
|
|
21
|
+
atlasRealm?: unknown
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
async function resolveSource(source: string, config: ResolvedConfig, configRoot: string): Promise<ResolvedSource> {
|
|
25
|
+
if (
|
|
26
|
+
source.startsWith('postgres://') ||
|
|
27
|
+
source.startsWith('postgresql://') ||
|
|
28
|
+
source.startsWith('mysql://') ||
|
|
29
|
+
source.startsWith('sqlite://')
|
|
30
|
+
) {
|
|
31
|
+
return { type: 'database', value: source }
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
const resolved = path.resolve(source)
|
|
35
|
+
const result = await runCompilePipeline(resolved, config, configRoot)
|
|
36
|
+
if (result.totalErrors > 0) {
|
|
37
|
+
throw new Error(`${result.totalErrors} compilation error(s)`)
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
return {
|
|
41
|
+
type: fs.statSync(resolved).isDirectory() ? 'directory' : 'file',
|
|
42
|
+
value: result.mergedSql,
|
|
43
|
+
atlasRealm: result.atlasRealm,
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// ── schema inspect ───────────────────────────────────────────────────
|
|
48
|
+
|
|
49
|
+
export async function schemaInspectCommand(
|
|
50
|
+
source: string | undefined,
|
|
51
|
+
options: { config?: string; format?: string; devUrl?: string; project?: string },
|
|
52
|
+
): Promise<void> {
|
|
53
|
+
const configRoot = options.config
|
|
54
|
+
? path.dirname(path.resolve(options.config))
|
|
55
|
+
: process.env.SQLDOC_PROJECT_ROOT || process.cwd()
|
|
56
|
+
const { config: rawConfig } = await loadConfig(configRoot, options.config)
|
|
57
|
+
const config = resolveProject(rawConfig, options.project)
|
|
58
|
+
if (options.devUrl) config.devUrl = options.devUrl
|
|
59
|
+
|
|
60
|
+
// Resolve source: explicit arg > config.schema > error
|
|
61
|
+
const resolvedSource = source ?? config.schema
|
|
62
|
+
if (!resolvedSource) {
|
|
63
|
+
throw new CliError('No source provided. Specify a path argument or set "schema" in sqldoc.config.ts')
|
|
64
|
+
}
|
|
65
|
+
const dialect = (config.dialect ?? 'postgres') as 'postgres' | 'mysql' | 'sqlite'
|
|
66
|
+
const format = (options.format ?? 'sql') as Format
|
|
67
|
+
|
|
68
|
+
try {
|
|
69
|
+
const resolved = await resolveSource(resolvedSource, config, configRoot)
|
|
70
|
+
|
|
71
|
+
if (resolved.type === 'database') {
|
|
72
|
+
// Live database — inspect directly, no compilation needed
|
|
73
|
+
const runner = await createRunner({ dialect, devUrl: resolved.value })
|
|
74
|
+
try {
|
|
75
|
+
const result = await runner.inspect([], {
|
|
76
|
+
schema: dialect === 'postgres' ? 'public' : undefined,
|
|
77
|
+
dialect,
|
|
78
|
+
})
|
|
79
|
+
if (result.error) {
|
|
80
|
+
throw new CliError(`Inspect error: ${result.error}`)
|
|
81
|
+
}
|
|
82
|
+
outputInspect(result, format)
|
|
83
|
+
} finally {
|
|
84
|
+
await runner.close()
|
|
85
|
+
}
|
|
86
|
+
} else if (resolved.atlasRealm) {
|
|
87
|
+
// Pipeline already inspected — reuse the realm
|
|
88
|
+
outputInspect({ schema: resolved.atlasRealm } as AtlasResult, format)
|
|
89
|
+
} else {
|
|
90
|
+
throw new CliError('No schema available')
|
|
91
|
+
}
|
|
92
|
+
} catch (err: any) {
|
|
93
|
+
if (err instanceof CliError) throw err
|
|
94
|
+
throw new CliError(friendlyError(err, config))
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
function outputInspect(result: AtlasResult, format: Format): void {
|
|
99
|
+
if (format === 'json') {
|
|
100
|
+
console.log(JSON.stringify(result.schema, null, 2))
|
|
101
|
+
} else {
|
|
102
|
+
if (result.statements?.length) {
|
|
103
|
+
console.log(`${result.statements.join(';\n')};`)
|
|
104
|
+
} else if (result.schema) {
|
|
105
|
+
console.log(JSON.stringify(result.schema, null, 2))
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
// ── schema diff ──────────────────────────────────────────────────────
|
|
111
|
+
|
|
112
|
+
export async function schemaDiffCommand(options: {
|
|
113
|
+
config?: string
|
|
114
|
+
from?: string
|
|
115
|
+
to?: string
|
|
116
|
+
format?: string
|
|
117
|
+
devUrl?: string
|
|
118
|
+
check?: boolean
|
|
119
|
+
project?: string
|
|
120
|
+
}): Promise<void> {
|
|
121
|
+
const configRoot = options.config
|
|
122
|
+
? path.dirname(path.resolve(options.config))
|
|
123
|
+
: process.env.SQLDOC_PROJECT_ROOT || process.cwd()
|
|
124
|
+
const { config: rawConfig } = await loadConfig(configRoot, options.config)
|
|
125
|
+
const config = resolveProject(rawConfig, options.project)
|
|
126
|
+
if (options.devUrl) config.devUrl = options.devUrl
|
|
127
|
+
const dialect = (config.dialect ?? 'postgres') as 'postgres' | 'mysql' | 'sqlite'
|
|
128
|
+
const format = (options.format ?? 'sql') as Format
|
|
129
|
+
|
|
130
|
+
// Default --to to config.schema, --from to config.migrations.dir when both omitted
|
|
131
|
+
let toSource = options.to
|
|
132
|
+
let fromSource = options.from
|
|
133
|
+
|
|
134
|
+
if (!toSource && !fromSource && config.schema && config.migrations?.dir) {
|
|
135
|
+
// Smart default: diff migrations -> schema
|
|
136
|
+
fromSource = path.resolve(configRoot, config.migrations.dir)
|
|
137
|
+
toSource = config.schema
|
|
138
|
+
} else {
|
|
139
|
+
toSource = toSource ?? config.schema
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
if (!toSource) {
|
|
143
|
+
throw new CliError('--to is required. Usage: sqldoc schema diff --to <source> [--from <source>]')
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
try {
|
|
147
|
+
const toResolved = await resolveSource(toSource, config, configRoot)
|
|
148
|
+
const fromResolved = fromSource
|
|
149
|
+
? await resolveSource(fromSource, config, configRoot)
|
|
150
|
+
: { type: 'file' as const, value: '' } // empty = no existing schema
|
|
151
|
+
|
|
152
|
+
const fromSql: string[] = fromResolved.type === 'database' ? [] : [fromResolved.value]
|
|
153
|
+
const toSql: string[] = toResolved.type === 'database' ? [] : [toResolved.value]
|
|
154
|
+
|
|
155
|
+
if (fromResolved.type === 'database' || toResolved.type === 'database') {
|
|
156
|
+
await diffWithLiveDb(fromResolved, toResolved, config, dialect, format, options.check ?? false)
|
|
157
|
+
return
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
// Pass both from + to SQL so extensions are detected and loaded
|
|
161
|
+
const allSql = [...fromSql, ...toSql].filter(Boolean)
|
|
162
|
+
const runner = await createRunner({ dialect, devUrl: config.devUrl, sqlFiles: allSql })
|
|
163
|
+
try {
|
|
164
|
+
const result = await runner.diff(fromSql, toSql, {
|
|
165
|
+
schema: dialect === 'postgres' ? 'public' : undefined,
|
|
166
|
+
dialect,
|
|
167
|
+
})
|
|
168
|
+
outputDiff(result, format, options.check ?? false)
|
|
169
|
+
} finally {
|
|
170
|
+
await runner.close()
|
|
171
|
+
}
|
|
172
|
+
} catch (err: any) {
|
|
173
|
+
if (err instanceof CliError) throw err
|
|
174
|
+
throw new CliError(friendlyError(err, config))
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
async function diffWithLiveDb(
|
|
179
|
+
from: ResolvedSource,
|
|
180
|
+
to: ResolvedSource,
|
|
181
|
+
config: ResolvedConfig,
|
|
182
|
+
dialect: 'postgres' | 'mysql' | 'sqlite',
|
|
183
|
+
format: Format,
|
|
184
|
+
check: boolean,
|
|
185
|
+
): Promise<void> {
|
|
186
|
+
const schemaOpt = dialect === 'postgres' ? 'public' : undefined
|
|
187
|
+
|
|
188
|
+
const liveSource = from.type === 'database' ? from : to
|
|
189
|
+
const sqlSource = from.type === 'database' ? to : from
|
|
190
|
+
|
|
191
|
+
const liveRunner = await createRunner({ dialect, devUrl: liveSource.value })
|
|
192
|
+
let liveRealm
|
|
193
|
+
try {
|
|
194
|
+
const liveResult = await liveRunner.inspect([], { schema: schemaOpt, dialect })
|
|
195
|
+
if (liveResult.error) throw new Error(`Live DB inspect: ${liveResult.error}`)
|
|
196
|
+
liveRealm = liveResult.schema
|
|
197
|
+
} finally {
|
|
198
|
+
await liveRunner.close()
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
const devRunner = await createRunner({ dialect, devUrl: config.devUrl, sqlFiles: [sqlSource.value] })
|
|
202
|
+
let sqlRealm
|
|
203
|
+
try {
|
|
204
|
+
const sqlResult = await devRunner.inspect([sqlSource.value], { schema: schemaOpt, dialect })
|
|
205
|
+
if (sqlResult.error) throw new Error(`SQL inspect: ${sqlResult.error}`)
|
|
206
|
+
sqlRealm = sqlResult.schema
|
|
207
|
+
} finally {
|
|
208
|
+
await devRunner.close()
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
const diffRunner = await createRunner({
|
|
212
|
+
dialect,
|
|
213
|
+
devUrl: config.devUrl,
|
|
214
|
+
sqlFiles: [...(from.type !== 'database' ? [from.value] : []), ...(to.type !== 'database' ? [to.value] : [])],
|
|
215
|
+
})
|
|
216
|
+
try {
|
|
217
|
+
const fromSql = from.type === 'database' ? [] : [from.value]
|
|
218
|
+
const toSql = to.type === 'database' ? [] : [to.value]
|
|
219
|
+
|
|
220
|
+
if (from.type === 'database' && to.type !== 'database') {
|
|
221
|
+
if (format === 'json') {
|
|
222
|
+
console.log(JSON.stringify({ from: liveRealm, to: sqlRealm }, null, 2))
|
|
223
|
+
return
|
|
224
|
+
} else if (format === 'pretty') {
|
|
225
|
+
prettyCompareRealms(liveRealm, sqlRealm, check)
|
|
226
|
+
return
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
const result = await diffRunner.diff(fromSql, toSql, { schema: schemaOpt, dialect })
|
|
230
|
+
outputDiff(result, format, check)
|
|
231
|
+
} finally {
|
|
232
|
+
await diffRunner.close()
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
function outputDiff(result: AtlasResult, format: Format, check: boolean): void {
|
|
237
|
+
if (result.error) {
|
|
238
|
+
throw new CliError(`Diff error: ${result.error}`)
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
const stmts = result.statements ?? []
|
|
242
|
+
|
|
243
|
+
if (stmts.length === 0) {
|
|
244
|
+
if (format === 'pretty' || check) {
|
|
245
|
+
console.error(pc.green('Schemas are identical. No changes detected.'))
|
|
246
|
+
}
|
|
247
|
+
return
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
switch (format) {
|
|
251
|
+
case 'sql':
|
|
252
|
+
for (const stmt of stmts) {
|
|
253
|
+
console.log(`${stmt};`)
|
|
254
|
+
}
|
|
255
|
+
break
|
|
256
|
+
case 'json':
|
|
257
|
+
console.log(JSON.stringify({ statements: stmts }, null, 2))
|
|
258
|
+
break
|
|
259
|
+
case 'pretty':
|
|
260
|
+
if (result.changes && result.changes.length > 0) {
|
|
261
|
+
const header = check ? pc.red(pc.bold('Schema drift detected!')) : pc.yellow(pc.bold('Schema differences:'))
|
|
262
|
+
printChanges(result.changes, header)
|
|
263
|
+
console.error(` ${stmts.length} statement(s)`)
|
|
264
|
+
} else {
|
|
265
|
+
prettyDiff(stmts, check)
|
|
266
|
+
}
|
|
267
|
+
break
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
if (check) {
|
|
271
|
+
throw new CliError('Schema drift detected')
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
function prettyDiff(statements: string[], check: boolean): void {
|
|
276
|
+
const header = check ? pc.red(pc.bold('Schema drift detected!')) : pc.yellow(pc.bold('Schema differences:'))
|
|
277
|
+
console.error(header)
|
|
278
|
+
console.error('')
|
|
279
|
+
|
|
280
|
+
for (const stmt of statements) {
|
|
281
|
+
const upper = stmt.trimStart().toUpperCase()
|
|
282
|
+
if (upper.startsWith('CREATE')) {
|
|
283
|
+
console.error(pc.green(` + ${stmt}`))
|
|
284
|
+
} else if (upper.startsWith('DROP')) {
|
|
285
|
+
console.error(pc.red(` - ${stmt}`))
|
|
286
|
+
} else if (upper.startsWith('ALTER')) {
|
|
287
|
+
if (upper.includes('ADD')) {
|
|
288
|
+
console.error(pc.green(` ~ ${stmt}`))
|
|
289
|
+
} else if (upper.includes('DROP')) {
|
|
290
|
+
console.error(pc.red(` ~ ${stmt}`))
|
|
291
|
+
} else {
|
|
292
|
+
console.error(pc.yellow(` ~ ${stmt}`))
|
|
293
|
+
}
|
|
294
|
+
} else {
|
|
295
|
+
console.error(` ${stmt}`)
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
console.error('')
|
|
300
|
+
console.error(` ${statements.length} statement(s)`)
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
function prettyCompareRealms(from: any, to: any, check: boolean): void {
|
|
304
|
+
const fromTables = new Set((from?.schemas?.[0]?.tables ?? []).map((t: any) => t.name))
|
|
305
|
+
const toTables = new Set((to?.schemas?.[0]?.tables ?? []).map((t: any) => t.name))
|
|
306
|
+
|
|
307
|
+
const stmts: string[] = []
|
|
308
|
+
for (const name of toTables) {
|
|
309
|
+
if (!fromTables.has(name)) stmts.push(`CREATE TABLE ${name} (new)`)
|
|
310
|
+
}
|
|
311
|
+
for (const name of fromTables) {
|
|
312
|
+
if (!toTables.has(name)) stmts.push(`DROP TABLE ${name}`)
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
if (stmts.length === 0) {
|
|
316
|
+
console.error(pc.green('Schemas are identical. No changes detected.'))
|
|
317
|
+
return
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
prettyDiff(stmts, check)
|
|
321
|
+
if (check) throw new CliError('Schema drift detected')
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
function friendlyError(err: any, config: ResolvedConfig): string {
|
|
325
|
+
if (err?.code === 'ECONNREFUSED') {
|
|
326
|
+
return `Cannot connect to database${config.devUrl ? ` at ${config.devUrl}` : ''}. Is it running?`
|
|
327
|
+
}
|
|
328
|
+
return err?.message ?? String(err)
|
|
329
|
+
}
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import * as fs from 'node:fs'
|
|
2
|
+
import * as path from 'node:path'
|
|
3
|
+
import type { Diagnostic, ResolvedConfig, SqlStatement } from '@sqldoc/core'
|
|
4
|
+
import { loadConfig, loadImports, parse, resolveProject, SqlparserTsAdapter, validate } from '@sqldoc/core'
|
|
5
|
+
import pc from 'picocolors'
|
|
6
|
+
import { CliError } from '../errors.ts'
|
|
7
|
+
import { promptAndInstallMissing } from '../utils/auto-install.ts'
|
|
8
|
+
import { discoverSqlFiles } from '../utils/discover.ts'
|
|
9
|
+
import { formatDiagnostic, formatSummary } from '../utils/format.ts'
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* validate command: checks tags in SQL files and reports diagnostics
|
|
13
|
+
* with file:line:col format. Exits non-zero when errors are found.
|
|
14
|
+
*/
|
|
15
|
+
export async function validateCommand(
|
|
16
|
+
inputPath: string | undefined,
|
|
17
|
+
options: { config?: string; project?: string },
|
|
18
|
+
): Promise<void> {
|
|
19
|
+
// Load project config
|
|
20
|
+
const configRoot = options.config
|
|
21
|
+
? path.dirname(path.resolve(options.config))
|
|
22
|
+
: process.env.SQLDOC_PROJECT_ROOT || process.cwd()
|
|
23
|
+
const { config: rawConfig } = await loadConfig(configRoot, options.config)
|
|
24
|
+
const config: ResolvedConfig = resolveProject(rawConfig, options.project)
|
|
25
|
+
|
|
26
|
+
// Resolve input path: explicit arg > config.schema > error
|
|
27
|
+
const resolvedInput = inputPath ?? config.schema
|
|
28
|
+
if (!resolvedInput) {
|
|
29
|
+
throw new CliError('No input path provided. Specify a path argument or set "schema" in sqldoc.config.ts')
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
// Discover SQL files
|
|
33
|
+
const sqlFiles = await discoverSqlFiles(resolvedInput, config.include)
|
|
34
|
+
if (sqlFiles.length === 0) {
|
|
35
|
+
console.log(pc.yellow('No SQL files found'))
|
|
36
|
+
return
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
let totalErrors = 0
|
|
40
|
+
let totalWarnings = 0
|
|
41
|
+
|
|
42
|
+
for (const filePath of sqlFiles) {
|
|
43
|
+
const source = fs.readFileSync(filePath, 'utf-8')
|
|
44
|
+
|
|
45
|
+
// Parse tags and imports
|
|
46
|
+
const { imports, tags } = parse(source)
|
|
47
|
+
|
|
48
|
+
// Load namespace definitions (with auto-install for missing packages)
|
|
49
|
+
let { namespaces, errors: loadErrors } = await loadImports(
|
|
50
|
+
imports.map((i) => i.path),
|
|
51
|
+
filePath,
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
if (loadErrors.length > 0) {
|
|
55
|
+
const retryResult = await promptAndInstallMissing(
|
|
56
|
+
loadErrors,
|
|
57
|
+
imports.map((i) => i.path),
|
|
58
|
+
filePath,
|
|
59
|
+
)
|
|
60
|
+
if (retryResult) {
|
|
61
|
+
namespaces = retryResult.namespaces
|
|
62
|
+
loadErrors = retryResult.errors
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
if (loadErrors.length > 0) {
|
|
66
|
+
for (const err of loadErrors) {
|
|
67
|
+
console.log(pc.red(`Error loading ${err.importPath}: ${err.message}`))
|
|
68
|
+
}
|
|
69
|
+
totalErrors += loadErrors.length
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// Parse SQL AST for enriched validation
|
|
74
|
+
let statements: SqlStatement[] = []
|
|
75
|
+
try {
|
|
76
|
+
const adapter = new SqlparserTsAdapter()
|
|
77
|
+
await adapter.init()
|
|
78
|
+
statements = adapter.parseStatements(source)
|
|
79
|
+
} catch {
|
|
80
|
+
// AST parse failure is non-fatal
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
// Validate: signature is validate(tags, namespaces, docText, stmts?)
|
|
84
|
+
const diagnostics: Diagnostic[] = validate(tags, namespaces, source, statements)
|
|
85
|
+
|
|
86
|
+
// Format and print diagnostics
|
|
87
|
+
for (const d of diagnostics) {
|
|
88
|
+
console.log(formatDiagnostic(filePath, d))
|
|
89
|
+
if (d.severity === 'error') totalErrors++
|
|
90
|
+
else totalWarnings++
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
// Print summary
|
|
95
|
+
console.log(formatSummary(totalErrors, totalWarnings))
|
|
96
|
+
|
|
97
|
+
if (totalErrors > 0) {
|
|
98
|
+
throw new CliError(`${totalErrors} validation error(s)`)
|
|
99
|
+
}
|
|
100
|
+
}
|
package/src/errors.ts
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* CLI error with an exit code and user-friendly message.
|
|
3
|
+
* Thrown by command functions, caught at the entry point.
|
|
4
|
+
*/
|
|
5
|
+
export class CliError extends Error {
|
|
6
|
+
exitCode: number
|
|
7
|
+
constructor(message: string, exitCode: number = 1) {
|
|
8
|
+
super(message)
|
|
9
|
+
this.name = 'CliError'
|
|
10
|
+
this.exitCode = exitCode
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Format a pipeline error into a user-friendly CliError.
|
|
16
|
+
* Handles ECONNREFUSED (database not reachable) and falls back to the raw message.
|
|
17
|
+
*/
|
|
18
|
+
export function formatPipelineError(err: any, config: { devUrl?: string }): CliError {
|
|
19
|
+
const msg =
|
|
20
|
+
err?.code === 'ECONNREFUSED'
|
|
21
|
+
? `Cannot connect to database${config.devUrl ? ` at ${config.devUrl}` : ''}. Is it running?`
|
|
22
|
+
: (err?.message ?? String(err))
|
|
23
|
+
return new CliError(msg)
|
|
24
|
+
}
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
// Suppress Node experimental warnings (WASI)
|
|
2
|
+
process.removeAllListeners('warning')
|
|
3
|
+
|
|
4
|
+
import { createRequire } from 'node:module'
|
|
5
|
+
import { Command } from 'commander'
|
|
6
|
+
import pc from 'picocolors'
|
|
7
|
+
import { codegenCommand } from './commands/codegen.ts'
|
|
8
|
+
import { doctorCommand } from './commands/doctor.ts'
|
|
9
|
+
import { lintCommand } from './commands/lint.ts'
|
|
10
|
+
import { migrateCommand } from './commands/migrate.ts'
|
|
11
|
+
import { schemaDiffCommand, schemaInspectCommand } from './commands/schema.ts'
|
|
12
|
+
import { validateCommand } from './commands/validate.ts'
|
|
13
|
+
import { CliError } from './errors.ts'
|
|
14
|
+
|
|
15
|
+
const req = createRequire(import.meta.url)
|
|
16
|
+
const version: string = req('../package.json').version
|
|
17
|
+
|
|
18
|
+
const program = new Command()
|
|
19
|
+
|
|
20
|
+
program.name('sqldoc').description('SQL documentation and code generation tool').version(version)
|
|
21
|
+
|
|
22
|
+
program
|
|
23
|
+
.command('codegen')
|
|
24
|
+
.description('Run code generation plugins (templates, docs, etc.)')
|
|
25
|
+
.argument('[path]', 'Path to SQL files or directory (defaults to config schema)')
|
|
26
|
+
.option('-c, --config <path>', 'Path to sqldoc.config.ts')
|
|
27
|
+
.option('-p, --plugins <names>', 'Comma-separated project-level plugin names to run (default: all)')
|
|
28
|
+
.option('--project <name>', 'Select a named project from multi-project config')
|
|
29
|
+
.action(codegenCommand)
|
|
30
|
+
|
|
31
|
+
program
|
|
32
|
+
.command('validate')
|
|
33
|
+
.description('Validate tags in SQL files')
|
|
34
|
+
.argument('[path]', 'Path to SQL files or directory (defaults to config schema)')
|
|
35
|
+
.option('-c, --config <path>', 'Path to sqldoc.config.ts')
|
|
36
|
+
.option('--project <name>', 'Select a named project from multi-project config')
|
|
37
|
+
.action(validateCommand)
|
|
38
|
+
|
|
39
|
+
program
|
|
40
|
+
.command('lint')
|
|
41
|
+
.description('Run lint rules from namespace plugins against SQL files')
|
|
42
|
+
.argument('[path]', 'Path to SQL files or directory (defaults to config schema)')
|
|
43
|
+
.option('-c, --config <path>', 'Path to sqldoc.config.ts')
|
|
44
|
+
.option('-v, --verbose', 'Show ignored rules')
|
|
45
|
+
.option('--project <name>', 'Select a named project from multi-project config')
|
|
46
|
+
.action(lintCommand)
|
|
47
|
+
|
|
48
|
+
const schema = program.command('schema').description('Schema inspection and comparison')
|
|
49
|
+
|
|
50
|
+
schema
|
|
51
|
+
.command('inspect')
|
|
52
|
+
.description('Inspect schema from SQL files, directory, or database')
|
|
53
|
+
.argument('[source]', 'SQL file, directory, or database URL (defaults to config schema)')
|
|
54
|
+
.option('-c, --config <path>', 'Path to sqldoc.config.ts')
|
|
55
|
+
.option('-f, --format <format>', 'Output format: sql, json', 'sql')
|
|
56
|
+
.option('--dev-url <url>', 'Dev database URL (pglite, docker://<image>, dockerfile://<path>, postgres://...)')
|
|
57
|
+
.option('--project <name>', 'Select a named project from multi-project config')
|
|
58
|
+
.action(schemaInspectCommand)
|
|
59
|
+
|
|
60
|
+
schema
|
|
61
|
+
.command('diff')
|
|
62
|
+
.description('Compare two schema states')
|
|
63
|
+
.option('--from <source>', 'Source state: SQL file, directory, or database URL (default: empty)')
|
|
64
|
+
.option('--to <source>', 'Target state: SQL file, directory, or database URL')
|
|
65
|
+
.option('-c, --config <path>', 'Path to sqldoc.config.ts')
|
|
66
|
+
.option('-f, --format <format>', 'Output format: sql, json, pretty', 'sql')
|
|
67
|
+
.option('--dev-url <url>', 'Dev database URL (pglite, docker://<image>, dockerfile://<path>, postgres://...)')
|
|
68
|
+
.option('--check', 'Exit non-zero if schemas differ (CI mode)')
|
|
69
|
+
.option('--project <name>', 'Select a named project from multi-project config')
|
|
70
|
+
.action(schemaDiffCommand)
|
|
71
|
+
|
|
72
|
+
program
|
|
73
|
+
.command('migrate')
|
|
74
|
+
.description('Generate migration files or check for schema drift')
|
|
75
|
+
.option('-c, --config <path>', 'Path to sqldoc.config.ts')
|
|
76
|
+
.option('--project <name>', 'Select a named project from multi-project config')
|
|
77
|
+
.option('--check', 'Exit non-zero if schema differs from migrations (CI mode)')
|
|
78
|
+
.option('--name <name>', 'Custom migration name')
|
|
79
|
+
.option('--force', 'Allow destructive changes (DROP TABLE, DROP COLUMN, etc.)')
|
|
80
|
+
.action(migrateCommand)
|
|
81
|
+
|
|
82
|
+
program.command('doctor').description('Check project setup and report status').action(doctorCommand)
|
|
83
|
+
|
|
84
|
+
// Global handler — force exit on both success and error
|
|
85
|
+
// (pglite/WASI worker threads keep the process alive otherwise)
|
|
86
|
+
program
|
|
87
|
+
.parseAsync()
|
|
88
|
+
.then(() => {
|
|
89
|
+
process.exit(0)
|
|
90
|
+
})
|
|
91
|
+
.catch((err) => {
|
|
92
|
+
if (err instanceof CliError) {
|
|
93
|
+
console.error(pc.red(err.message))
|
|
94
|
+
process.exit(err.exitCode)
|
|
95
|
+
}
|
|
96
|
+
if (err?.code === 'ECONNREFUSED') {
|
|
97
|
+
console.error(pc.red('Cannot connect to database. Is it running?'))
|
|
98
|
+
process.exit(1)
|
|
99
|
+
}
|
|
100
|
+
console.error(pc.red(err?.message ?? String(err)))
|
|
101
|
+
if (err?.stack) console.error(pc.dim(err.stack))
|
|
102
|
+
process.exit(1)
|
|
103
|
+
})
|
package/src/runtime.ts
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Runtime detection helpers for the sqldoc CLI.
|
|
3
|
+
* Used by binary-entry.ts to determine execution context.
|
|
4
|
+
*
|
|
5
|
+
* In Node.js (dev mode), both return false.
|
|
6
|
+
* In a Bun-compiled binary, both return true.
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
/** Whether we're running inside a Bun runtime (compiled or not) */
|
|
10
|
+
export function isBunRuntime(): boolean {
|
|
11
|
+
return typeof (globalThis as any).Bun !== 'undefined'
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
/** Whether we're running as a Bun-compiled binary (not just bun CLI) */
|
|
15
|
+
export function isCompiledBinary(): boolean {
|
|
16
|
+
return isBunRuntime() && process.execPath.includes('sqldoc')
|
|
17
|
+
}
|
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
import { spawnSync } from 'node:child_process'
|
|
2
|
+
import * as fs from 'node:fs'
|
|
3
|
+
import * as path from 'node:path'
|
|
4
|
+
import * as readline from 'node:readline'
|
|
5
|
+
import type { ImportError } from '@sqldoc/core'
|
|
6
|
+
import { findSqldocDir, loadImports } from '@sqldoc/core'
|
|
7
|
+
import pc from 'picocolors'
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* Detect missing npm packages from loadImport errors.
|
|
11
|
+
* Returns an array of package names that look like "Cannot find module/package"
|
|
12
|
+
* for npm scoped/unscoped packages (not relative paths).
|
|
13
|
+
*/
|
|
14
|
+
export function extractMissingPackages(errors: ImportError[]): string[] {
|
|
15
|
+
const missing: string[] = []
|
|
16
|
+
for (const err of errors) {
|
|
17
|
+
if (
|
|
18
|
+
/cannot find (module|package)/i.test(err.message) &&
|
|
19
|
+
!err.importPath.startsWith('.') &&
|
|
20
|
+
!err.importPath.startsWith('/')
|
|
21
|
+
) {
|
|
22
|
+
missing.push(err.importPath)
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
return [...new Set(missing)]
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Prompt the user via stdin whether to install missing packages.
|
|
30
|
+
* Returns true if user agrees (Enter or 'y'/'Y').
|
|
31
|
+
*/
|
|
32
|
+
export async function promptInstall(packages: string[]): Promise<boolean> {
|
|
33
|
+
// Non-interactive (piped stdin) — skip prompt
|
|
34
|
+
if (!process.stdin.isTTY) return false
|
|
35
|
+
|
|
36
|
+
const names = packages.map((p) => pc.cyan(p)).join(', ')
|
|
37
|
+
const rl = readline.createInterface({ input: process.stdin, output: process.stderr })
|
|
38
|
+
const answer = await new Promise<string>((resolve) =>
|
|
39
|
+
rl.question(`Package ${names} is not installed. Install it? (Y/n) `, resolve),
|
|
40
|
+
)
|
|
41
|
+
rl.close()
|
|
42
|
+
return answer === '' || answer.toLowerCase() === 'y'
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* Find the .sqldoc/ directory, checking SQLDOC_PROJECT_ROOT first, then walking up from cwd.
|
|
47
|
+
*/
|
|
48
|
+
function findSqldocDirWithEnv(): string | null {
|
|
49
|
+
const envRoot = process.env.SQLDOC_PROJECT_ROOT
|
|
50
|
+
if (envRoot) {
|
|
51
|
+
const candidate = path.join(envRoot, '.sqldoc')
|
|
52
|
+
if (fs.existsSync(candidate)) return candidate
|
|
53
|
+
}
|
|
54
|
+
return findSqldocDir()
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
/**
|
|
58
|
+
* Install packages into .sqldoc/node_modules using the same mechanism as `sqldoc add`.
|
|
59
|
+
* Returns true if install succeeded.
|
|
60
|
+
*/
|
|
61
|
+
export function installPackages(sqldocDir: string, packages: string[]): boolean {
|
|
62
|
+
// Detect if running as compiled Bun binary
|
|
63
|
+
const isBunBinary = typeof (globalThis as any).Bun !== 'undefined' && !process.execPath.match(/\/(bun|node)(\.exe)?$/)
|
|
64
|
+
|
|
65
|
+
let installArgs: string[]
|
|
66
|
+
const env: Record<string, string> = { ...process.env } as Record<string, string>
|
|
67
|
+
|
|
68
|
+
if (isBunBinary) {
|
|
69
|
+
installArgs = [process.execPath, 'install', ...packages]
|
|
70
|
+
env.BUN_BE_BUN = '1'
|
|
71
|
+
} else {
|
|
72
|
+
// Detect package manager from lockfiles
|
|
73
|
+
const projectRoot = path.dirname(sqldocDir)
|
|
74
|
+
let pm = 'npm'
|
|
75
|
+
if (fs.existsSync(path.join(projectRoot, 'pnpm-lock.yaml'))) pm = 'pnpm'
|
|
76
|
+
else if (fs.existsSync(path.join(projectRoot, 'yarn.lock'))) pm = 'yarn'
|
|
77
|
+
else if (fs.existsSync(path.join(projectRoot, 'bun.lockb')) || fs.existsSync(path.join(projectRoot, 'bun.lock')))
|
|
78
|
+
pm = 'bun'
|
|
79
|
+
|
|
80
|
+
installArgs = pm === 'yarn' ? ['yarn', 'add', ...packages] : [pm, 'install', ...packages]
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
console.error(pc.dim(`Installing ${packages.join(', ')}...`))
|
|
84
|
+
const result = spawnSync(installArgs[0], installArgs.slice(1), {
|
|
85
|
+
cwd: sqldocDir,
|
|
86
|
+
stdio: 'inherit',
|
|
87
|
+
env,
|
|
88
|
+
})
|
|
89
|
+
|
|
90
|
+
return result.status === 0
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
/**
|
|
94
|
+
* Check for missing package errors, prompt the user to install, and retry loadImports.
|
|
95
|
+
* Returns updated namespaces and remaining errors.
|
|
96
|
+
*/
|
|
97
|
+
export async function promptAndInstallMissing(
|
|
98
|
+
loadErrors: ImportError[],
|
|
99
|
+
importPaths: string[],
|
|
100
|
+
filePath: string,
|
|
101
|
+
): Promise<{ namespaces: Map<string, any>; errors: ImportError[] } | null> {
|
|
102
|
+
const missingPackages = extractMissingPackages(loadErrors)
|
|
103
|
+
if (missingPackages.length === 0) return null
|
|
104
|
+
|
|
105
|
+
const sqldocDir = findSqldocDirWithEnv()
|
|
106
|
+
if (!sqldocDir) return null
|
|
107
|
+
|
|
108
|
+
if (await promptInstall(missingPackages)) {
|
|
109
|
+
if (installPackages(sqldocDir, missingPackages)) {
|
|
110
|
+
// Retry loading after install
|
|
111
|
+
return await loadImports(importPaths, filePath)
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
return null
|
|
116
|
+
}
|