@sqldoc/cli 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,99 @@
1
+ /**
2
+ * Detect destructive SQL statements in migration diff output.
3
+ *
4
+ * Scans individual SQL statements for patterns that would destroy data:
5
+ * - DROP TABLE
6
+ * - ALTER TABLE ... DROP COLUMN
7
+ * - DROP INDEX
8
+ * - DROP VIEW
9
+ * - DROP FUNCTION
10
+ * - TRUNCATE
11
+ */
12
+
13
+ export interface DestructiveChange {
14
+ /** Human-readable description of the destructive change */
15
+ description: string
16
+ /** The full SQL statement */
17
+ statement: string
18
+ }
19
+
20
+ /**
21
+ * Scan an array of SQL statements for destructive operations.
22
+ * Returns a list of destructive changes found (empty if none).
23
+ */
24
+ export function detectDestructiveChanges(statements: string[]): DestructiveChange[] {
25
+ const changes: DestructiveChange[] = []
26
+
27
+ for (const stmt of statements) {
28
+ // Normalize whitespace but preserve original case for names
29
+ const normalized = stmt.replace(/\s+/g, ' ').trim()
30
+
31
+ // DROP TABLE
32
+ const dropTable = normalized.match(/DROP\s+TABLE\s+(?:IF\s+EXISTS\s+)?("?[\w.]+"?)/i)
33
+ if (dropTable) {
34
+ changes.push({
35
+ description: `DROP TABLE ${unquote(dropTable[1])}`,
36
+ statement: stmt,
37
+ })
38
+ continue
39
+ }
40
+
41
+ // ALTER TABLE ... DROP COLUMN
42
+ const dropColumn = normalized.match(
43
+ /ALTER\s+TABLE\s+(?:ONLY\s+)?("?[\w.]+"?)\s+DROP\s+COLUMN\s+(?:IF\s+EXISTS\s+)?("?[\w.]+"?)/i,
44
+ )
45
+ if (dropColumn) {
46
+ changes.push({
47
+ description: `ALTER TABLE ${unquote(dropColumn[1])} DROP COLUMN ${unquote(dropColumn[2])}`,
48
+ statement: stmt,
49
+ })
50
+ continue
51
+ }
52
+
53
+ // DROP INDEX
54
+ const dropIndex = normalized.match(/DROP\s+INDEX\s+(?:CONCURRENTLY\s+)?(?:IF\s+EXISTS\s+)?("?[\w.]+"?)/i)
55
+ if (dropIndex) {
56
+ changes.push({
57
+ description: `DROP INDEX ${unquote(dropIndex[1])}`,
58
+ statement: stmt,
59
+ })
60
+ continue
61
+ }
62
+
63
+ // DROP VIEW
64
+ const dropView = normalized.match(/DROP\s+VIEW\s+(?:IF\s+EXISTS\s+)?("?[\w.]+"?)/i)
65
+ if (dropView) {
66
+ changes.push({
67
+ description: `DROP VIEW ${unquote(dropView[1])}`,
68
+ statement: stmt,
69
+ })
70
+ continue
71
+ }
72
+
73
+ // DROP FUNCTION
74
+ const dropFunction = normalized.match(/DROP\s+FUNCTION\s+(?:IF\s+EXISTS\s+)?("?[\w.]+"?)/i)
75
+ if (dropFunction) {
76
+ changes.push({
77
+ description: `DROP FUNCTION ${unquote(dropFunction[1])}`,
78
+ statement: stmt,
79
+ })
80
+ continue
81
+ }
82
+
83
+ // TRUNCATE
84
+ const truncate = normalized.match(/TRUNCATE\s+(?:TABLE\s+)?("?[\w.]+"?)/i)
85
+ if (truncate) {
86
+ changes.push({
87
+ description: `TRUNCATE ${unquote(truncate[1])}`,
88
+ statement: stmt,
89
+ })
90
+ }
91
+ }
92
+
93
+ return changes
94
+ }
95
+
96
+ /** Remove surrounding double quotes from an identifier */
97
+ function unquote(name: string): string {
98
+ return name.replace(/^"(.*)"$/, '$1')
99
+ }
@@ -0,0 +1,35 @@
1
+ import * as fs from 'node:fs'
2
+ import * as path from 'node:path'
3
+ import fg from 'fast-glob'
4
+
5
+ /**
6
+ * Discover SQL files from a path. If inputPath is a .sql file, returns it directly.
7
+ * If a directory, globs for SQL files within it.
8
+ * Returns absolute paths sorted lexicographically for deterministic ordering.
9
+ */
10
+ export async function discoverSqlFiles(inputPath: string, includePatterns?: string[]): Promise<string[]> {
11
+ const resolved = path.resolve(inputPath)
12
+
13
+ // Single file
14
+ if (resolved.endsWith('.sql')) {
15
+ if (!fs.existsSync(resolved)) {
16
+ throw new Error(`SQL file not found: ${resolved}`)
17
+ }
18
+ return [resolved]
19
+ }
20
+
21
+ // Directory
22
+ if (!fs.existsSync(resolved) || !fs.statSync(resolved).isDirectory()) {
23
+ throw new Error(`Path is not a file or directory: ${resolved}`)
24
+ }
25
+
26
+ const patterns = includePatterns ?? ['**/*.sql']
27
+ const files = await fg(patterns, {
28
+ cwd: resolved,
29
+ absolute: true,
30
+ onlyFiles: true,
31
+ ignore: ['**/node_modules/**', '**/__tests__/**', '**/test/**', '**/*.test.*', '**/dist/**', '**/.sqldoc/**'],
32
+ })
33
+
34
+ return files.sort()
35
+ }
@@ -0,0 +1,23 @@
1
+ import type { Diagnostic } from '@sqldoc/core'
2
+ import pc from 'picocolors'
3
+
4
+ /**
5
+ * Format a diagnostic for terminal display.
6
+ * Output: filePath:line:col: severity: message
7
+ * Lines and columns are 1-based for editor compatibility.
8
+ */
9
+ export function formatDiagnostic(filePath: string, d: Diagnostic): string {
10
+ const location = `${filePath}:${d.line + 1}:${d.startCol + 1}`
11
+ const severity =
12
+ d.severity === 'error' ? pc.red(d.severity) : d.severity === 'warning' ? pc.yellow(d.severity) : pc.blue(d.severity)
13
+ return `${location}: ${severity}: ${d.message}`
14
+ }
15
+
16
+ /**
17
+ * Format a summary line with error and warning counts.
18
+ */
19
+ export function formatSummary(errorCount: number, warningCount: number): string {
20
+ const errors = errorCount > 0 ? pc.red(`${errorCount} error(s)`) : `${errorCount} error(s)`
21
+ const warnings = warningCount > 0 ? pc.yellow(`${warningCount} warning(s)`) : `${warningCount} warning(s)`
22
+ return `${errors}, ${warnings}`
23
+ }
@@ -0,0 +1,73 @@
1
+ import * as fs from 'node:fs'
2
+ import * as path from 'node:path'
3
+
4
+ /**
5
+ * Scan .sqldoc/node_modules for installed @sqldoc namespace plugins,
6
+ * and generate .sqldoc/config.d.ts with a typed SqldocConfig interface.
7
+ *
8
+ * Base config fields are imported from @sqldoc/core so they stay in sync.
9
+ * Only the namespaces block is generated based on installed plugins.
10
+ */
11
+ export function generateConfigTypes(sqldocDir: string): void {
12
+ const nodeModules = path.join(sqldocDir, 'node_modules', '@sqldoc')
13
+ if (!fs.existsSync(nodeModules)) return
14
+
15
+ const imports: string[] = ["import type { ProjectConfig as BaseProjectConfig } from '@sqldoc/core'"]
16
+ const namespaceFields: string[] = []
17
+
18
+ const dirs = fs.readdirSync(nodeModules).sort()
19
+ for (const dir of dirs) {
20
+ if (!dir.startsWith('ns-')) continue
21
+
22
+ const pkgPath = path.join(nodeModules, dir)
23
+ const pkgJsonPath = path.join(pkgPath, 'package.json')
24
+ if (!fs.existsSync(pkgJsonPath)) continue
25
+
26
+ const nsName = dir.replace('ns-', '')
27
+ const pkgName = `@sqldoc/${dir}`
28
+
29
+ const configTypeName = findConfigTypeName(pkgPath, nsName)
30
+
31
+ if (configTypeName) {
32
+ imports.push(`import type { ${configTypeName} } from '${pkgName}'`)
33
+ namespaceFields.push(` ${nsName}?: Partial<${configTypeName}>`)
34
+ } else {
35
+ namespaceFields.push(` ${nsName}?: Record<string, unknown>`)
36
+ }
37
+ }
38
+
39
+ const namespacesBlock =
40
+ namespaceFields.length > 0
41
+ ? ` namespaces?: {
42
+ ${namespaceFields.join('\n')}
43
+ }`
44
+ : ` namespaces?: Record<string, Record<string, unknown>>`
45
+
46
+ const content = `// Auto-generated by sqldoc -- DO NOT EDIT
47
+ // Regenerated on: sqldoc init, sqldoc add, sqldoc codegen
48
+ ${imports.join('\n')}
49
+
50
+ interface ProjectConfig extends BaseProjectConfig {
51
+ ${namespacesBlock}
52
+ }
53
+
54
+ export type SqldocConfig = ProjectConfig | ProjectConfig[]
55
+ export type Config = SqldocConfig
56
+ `
57
+
58
+ fs.writeFileSync(path.join(sqldocDir, 'config.d.ts'), content)
59
+ }
60
+
61
+ function findConfigTypeName(pkgPath: string, _nsName: string): string | null {
62
+ const candidates = ['src/index.ts', 'index.ts', 'src/index.js', 'index.js']
63
+ for (const candidate of candidates) {
64
+ const filePath = path.join(pkgPath, candidate)
65
+ if (!fs.existsSync(filePath)) continue
66
+
67
+ const content = fs.readFileSync(filePath, 'utf-8')
68
+ const match = content.match(/export\s+(?:type\s+)?{\s*[^}]*\b(\w+Config)\b/)
69
+ if (match) return match[1]
70
+ }
71
+
72
+ return null
73
+ }
@@ -0,0 +1,347 @@
1
+ /**
2
+ * Format-aware migration file reading and writing.
3
+ *
4
+ * Each format has its own conventions for:
5
+ * - File naming and patterns
6
+ * - Up/down script extraction from file content
7
+ * - Writing new migration files with up+down sections
8
+ *
9
+ * Supported formats:
10
+ * - plain: NNN_name.sql (entire file = up, no down)
11
+ * - atlas: YYYYMMDDHHMMSS_name.sql (entire file = up, no down)
12
+ * - goose: NNN_name.sql (-- +goose Up / -- +goose Down markers)
13
+ * - golang-migrate: NNN_name.up.sql / NNN_name.down.sql (separate files)
14
+ * - flyway: V1__name.sql (up), U1__name.sql (down/undo)
15
+ * - dbmate: NNN_name.sql (-- migrate:up / -- migrate:down markers)
16
+ */
17
+
18
+ import * as fs from 'node:fs'
19
+ import * as path from 'node:path'
20
+ import type { MigrationFormat } from '@sqldoc/core'
21
+
22
+ // ── Types ────────────────────────────────────────────────────────────
23
+
24
+ /** A parsed migration with its up and optional down SQL */
25
+ export interface ParsedMigration {
26
+ /** Filename (primary file, e.g. "001_init.sql" or "001_init.up.sql") */
27
+ filename: string
28
+ /** The up (forward) SQL */
29
+ up: string
30
+ /** The down (rollback) SQL, if available */
31
+ down?: string
32
+ /** Sort key extracted from filename for ordering */
33
+ sortKey: string
34
+ }
35
+
36
+ /** Options for writing a new migration file */
37
+ export interface WriteMigrationOptions {
38
+ /** Directory to write to */
39
+ dir: string
40
+ /** Migration name (e.g. "add_users") */
41
+ name: string
42
+ /** Up SQL content */
43
+ up: string
44
+ /** Down SQL content (optional) */
45
+ down?: string
46
+ /** Format to write in */
47
+ format: MigrationFormat
48
+ /** Naming strategy */
49
+ naming: 'timestamp' | 'sequential'
50
+ /** Existing migrations (for sequential naming) */
51
+ existing?: ParsedMigration[]
52
+ }
53
+
54
+ // ── Format-specific parsers ──────────────────────────────────────────
55
+
56
+ /** Parse a single migration file based on format */
57
+ function parsePlain(filename: string, content: string): ParsedMigration {
58
+ const sortKey = filename.split('_')[0]
59
+ return { filename, up: content.trim(), sortKey }
60
+ }
61
+
62
+ function parseAtlas(filename: string, content: string): ParsedMigration {
63
+ const sortKey = filename.split('_')[0]
64
+ return { filename, up: content.trim(), sortKey }
65
+ }
66
+
67
+ function parseGoose(filename: string, content: string): ParsedMigration {
68
+ const sortKey = filename.split('_')[0]
69
+
70
+ // Find the positions of -- +goose Up and -- +goose Down markers
71
+ const upIdx = content.search(/^--\s*\+goose\s+Up\s*$/m)
72
+ const downIdx = content.search(/^--\s*\+goose\s+Down\s*$/m)
73
+
74
+ let up: string
75
+ let down: string | undefined
76
+
77
+ if (upIdx !== -1) {
78
+ // Find end of the Up marker line
79
+ const afterUp = content.indexOf('\n', upIdx)
80
+ if (downIdx !== -1) {
81
+ up = content.substring(afterUp + 1, downIdx).trim()
82
+ const afterDown = content.indexOf('\n', downIdx)
83
+ down = content.substring(afterDown + 1).trim() || undefined
84
+ } else {
85
+ up = content.substring(afterUp + 1).trim()
86
+ }
87
+ } else {
88
+ up = content.trim()
89
+ }
90
+
91
+ return { filename, up, down, sortKey }
92
+ }
93
+
94
+ function parseDbmate(filename: string, content: string): ParsedMigration {
95
+ const sortKey = filename.split('_')[0]
96
+
97
+ // Find the positions of -- migrate:up and -- migrate:down markers
98
+ const upIdx = content.search(/^--\s*migrate:up\s*$/m)
99
+ const downIdx = content.search(/^--\s*migrate:down\s*$/m)
100
+
101
+ let up: string
102
+ let down: string | undefined
103
+
104
+ if (upIdx !== -1) {
105
+ const afterUp = content.indexOf('\n', upIdx)
106
+ if (downIdx !== -1) {
107
+ up = content.substring(afterUp + 1, downIdx).trim()
108
+ const afterDown = content.indexOf('\n', downIdx)
109
+ down = content.substring(afterDown + 1).trim() || undefined
110
+ } else {
111
+ up = content.substring(afterUp + 1).trim()
112
+ }
113
+ } else {
114
+ up = content.trim()
115
+ }
116
+
117
+ return { filename, up, down, sortKey }
118
+ }
119
+
120
+ // ── Directory readers ────────────────────────────────────────────────
121
+
122
+ /**
123
+ * Read and parse all migrations from a directory in the given format.
124
+ * Returns migrations sorted by their sort key (chronological order).
125
+ */
126
+ export function readMigrations(dir: string, format: MigrationFormat): ParsedMigration[] {
127
+ const absDir = path.resolve(dir)
128
+ if (!fs.existsSync(absDir)) return []
129
+
130
+ const allFiles = fs
131
+ .readdirSync(absDir)
132
+ .filter((f) => f.endsWith('.sql'))
133
+ .sort()
134
+
135
+ switch (format) {
136
+ case 'plain':
137
+ case 'atlas':
138
+ return allFiles.map((f) => {
139
+ const content = fs.readFileSync(path.join(absDir, f), 'utf-8')
140
+ return format === 'atlas' ? parseAtlas(f, content) : parsePlain(f, content)
141
+ })
142
+
143
+ case 'goose':
144
+ return allFiles.map((f) => {
145
+ const content = fs.readFileSync(path.join(absDir, f), 'utf-8')
146
+ return parseGoose(f, content)
147
+ })
148
+
149
+ case 'golang-migrate':
150
+ return readGolangMigrate(absDir, allFiles)
151
+
152
+ case 'flyway':
153
+ return readFlyway(absDir, allFiles)
154
+
155
+ case 'dbmate':
156
+ return allFiles.map((f) => {
157
+ const content = fs.readFileSync(path.join(absDir, f), 'utf-8')
158
+ return parseDbmate(f, content)
159
+ })
160
+ }
161
+ }
162
+
163
+ /** golang-migrate: pair .up.sql and .down.sql files */
164
+ function readGolangMigrate(absDir: string, files: string[]): ParsedMigration[] {
165
+ const upFiles = files.filter((f) => f.includes('.up.sql'))
166
+ return upFiles.map((upFile) => {
167
+ const downFile = upFile.replace('.up.sql', '.down.sql')
168
+ const upContent = fs.readFileSync(path.join(absDir, upFile), 'utf-8').trim()
169
+ const downContent = files.includes(downFile)
170
+ ? fs.readFileSync(path.join(absDir, downFile), 'utf-8').trim()
171
+ : undefined
172
+
173
+ // Sort key: everything before .up.sql
174
+ const sortKey = upFile.replace('.up.sql', '').split('_')[0]
175
+ return { filename: upFile, up: upContent, down: downContent || undefined, sortKey }
176
+ })
177
+ }
178
+
179
+ /** flyway: V{n}__{name}.sql (up) and U{n}__{name}.sql (undo/down) */
180
+ function readFlyway(absDir: string, files: string[]): ParsedMigration[] {
181
+ const vFiles = files.filter((f) => /^V\d+/.test(f))
182
+ return vFiles.map((vFile) => {
183
+ const upContent = fs.readFileSync(path.join(absDir, vFile), 'utf-8').trim()
184
+
185
+ // Find matching U file: V1__name.sql -> U1__name.sql
186
+ const versionMatch = vFile.match(/^V(\d+)/)
187
+ const version = versionMatch ? versionMatch[1] : ''
188
+ const uFile = files.find((f) => f.startsWith(`U${version}`))
189
+ const downContent = uFile ? fs.readFileSync(path.join(absDir, uFile), 'utf-8').trim() : undefined
190
+
191
+ return { filename: vFile, up: upContent, down: downContent || undefined, sortKey: version.padStart(6, '0') }
192
+ })
193
+ }
194
+
195
+ // ── File writers ─────────────────────────────────────────────────────
196
+
197
+ /**
198
+ * Write a new migration file in the given format.
199
+ * Returns the absolute path(s) of written files.
200
+ */
201
+ export function writeMigration(opts: WriteMigrationOptions): string[] {
202
+ const absDir = path.resolve(opts.dir)
203
+ fs.mkdirSync(absDir, { recursive: true })
204
+
205
+ const prefix = generatePrefix(opts)
206
+ const safeName = sanitizeName(opts.name)
207
+ const written: string[] = []
208
+
209
+ switch (opts.format) {
210
+ case 'plain':
211
+ case 'atlas': {
212
+ const filename = `${prefix}_${safeName}.sql`
213
+ const content = `${opts.up.trim()}\n`
214
+ const filePath = path.join(absDir, filename)
215
+ fs.writeFileSync(filePath, content, 'utf-8')
216
+ written.push(filePath)
217
+ break
218
+ }
219
+
220
+ case 'goose': {
221
+ const filename = `${prefix}_${safeName}.sql`
222
+ let content = `-- +goose Up\n${opts.up.trim()}\n`
223
+ if (opts.down) {
224
+ content += `\n-- +goose Down\n${opts.down.trim()}\n`
225
+ }
226
+ const filePath = path.join(absDir, filename)
227
+ fs.writeFileSync(filePath, content, 'utf-8')
228
+ written.push(filePath)
229
+ break
230
+ }
231
+
232
+ case 'golang-migrate': {
233
+ const upFilename = `${prefix}_${safeName}.up.sql`
234
+ const upPath = path.join(absDir, upFilename)
235
+ fs.writeFileSync(upPath, `${opts.up.trim()}\n`, 'utf-8')
236
+ written.push(upPath)
237
+
238
+ if (opts.down) {
239
+ const downFilename = `${prefix}_${safeName}.down.sql`
240
+ const downPath = path.join(absDir, downFilename)
241
+ fs.writeFileSync(downPath, `${opts.down.trim()}\n`, 'utf-8')
242
+ written.push(downPath)
243
+ }
244
+ break
245
+ }
246
+
247
+ case 'flyway': {
248
+ const version = prefix
249
+ const upFilename = `V${version}__${safeName}.sql`
250
+ const upPath = path.join(absDir, upFilename)
251
+ fs.writeFileSync(upPath, `${opts.up.trim()}\n`, 'utf-8')
252
+ written.push(upPath)
253
+
254
+ if (opts.down) {
255
+ const downFilename = `U${version}__${safeName}.sql`
256
+ const downPath = path.join(absDir, downFilename)
257
+ fs.writeFileSync(downPath, `${opts.down.trim()}\n`, 'utf-8')
258
+ written.push(downPath)
259
+ }
260
+ break
261
+ }
262
+
263
+ case 'dbmate': {
264
+ const filename = `${prefix}_${safeName}.sql`
265
+ let content = `-- migrate:up\n${opts.up.trim()}\n`
266
+ if (opts.down) {
267
+ content += `\n-- migrate:down\n${opts.down.trim()}\n`
268
+ }
269
+ const filePath = path.join(absDir, filename)
270
+ fs.writeFileSync(filePath, content, 'utf-8')
271
+ written.push(filePath)
272
+ break
273
+ }
274
+ }
275
+
276
+ return written
277
+ }
278
+
279
+ // ── Helpers ──────────────────────────────────────────────────────────
280
+
281
+ /** Generate a filename prefix based on naming strategy */
282
+ function generatePrefix(opts: WriteMigrationOptions): string {
283
+ if (opts.naming === 'sequential') {
284
+ return nextSequentialPrefix(opts.existing ?? [], opts.format)
285
+ }
286
+ return timestampPrefix()
287
+ }
288
+
289
+ /** Generate a YYYYMMDDHHMMSS timestamp prefix */
290
+ function timestampPrefix(): string {
291
+ const now = new Date()
292
+ return [
293
+ now.getFullYear(),
294
+ String(now.getMonth() + 1).padStart(2, '0'),
295
+ String(now.getDate()).padStart(2, '0'),
296
+ String(now.getHours()).padStart(2, '0'),
297
+ String(now.getMinutes()).padStart(2, '0'),
298
+ String(now.getSeconds()).padStart(2, '0'),
299
+ ].join('')
300
+ }
301
+
302
+ /** Generate the next sequential prefix based on existing migrations */
303
+ function nextSequentialPrefix(existing: ParsedMigration[], format: MigrationFormat): string {
304
+ if (format === 'flyway') {
305
+ // Flyway uses V1, V2, etc. Extract the max version number.
306
+ let max = 0
307
+ for (const m of existing) {
308
+ const match = m.filename.match(/^V(\d+)/)
309
+ if (match) {
310
+ const n = parseInt(match[1], 10)
311
+ if (n > max) max = n
312
+ }
313
+ }
314
+ return String(max + 1)
315
+ }
316
+
317
+ // For other formats, find the highest numeric prefix and increment
318
+ let max = 0
319
+ for (const m of existing) {
320
+ const match = m.sortKey.match(/^(\d+)/)
321
+ if (match) {
322
+ const n = parseInt(match[1], 10)
323
+ if (n > max) max = n
324
+ }
325
+ }
326
+ return String(max + 1).padStart(3, '0')
327
+ }
328
+
329
+ /** Sanitize a migration name for use in filenames */
330
+ export function sanitizeName(name: string): string {
331
+ const safe = name
332
+ .toLowerCase()
333
+ .replace(/[^a-z0-9]+/g, '_')
334
+ .replace(/^_+|_+$/g, '')
335
+ return safe || 'migration'
336
+ }
337
+
338
+ /**
339
+ * Extract all up scripts from parsed migrations and concatenate them.
340
+ * This gives the "current" schema state from migrations.
341
+ */
342
+ export function concatUpScripts(migrations: ParsedMigration[]): string {
343
+ return migrations
344
+ .map((m) => m.up)
345
+ .filter(Boolean)
346
+ .join('\n\n')
347
+ }
@@ -0,0 +1,74 @@
1
+ import * as fs from 'node:fs'
2
+ import * as path from 'node:path'
3
+
4
+ /** A migration file read from the migrations directory */
5
+ export interface MigrationFile {
6
+ /** Filename (e.g. "20260321143000_initial.sql") */
7
+ filename: string
8
+ /** Full file content */
9
+ content: string
10
+ /** Timestamp prefix extracted from filename (before first underscore) */
11
+ timestamp: string
12
+ }
13
+
14
+ /**
15
+ * Read all migration files from a directory, sorted lexicographically (chronological).
16
+ * Returns empty array if directory doesn't exist.
17
+ */
18
+ export function readMigrationDir(dir: string): MigrationFile[] {
19
+ const absDir = path.resolve(dir)
20
+ if (!fs.existsSync(absDir)) return []
21
+
22
+ const files = fs
23
+ .readdirSync(absDir)
24
+ .filter((f) => f.endsWith('.sql'))
25
+ .sort()
26
+
27
+ return files.map((filename) => ({
28
+ filename,
29
+ content: fs.readFileSync(path.join(absDir, filename), 'utf-8'),
30
+ timestamp: filename.split('_')[0],
31
+ }))
32
+ }
33
+
34
+ /**
35
+ * Generate a timestamped migration filename.
36
+ * Format: YYYYMMDDHHMMSS_name.sql
37
+ *
38
+ * Name is sanitized: lowercased, non-alphanumeric replaced with underscore,
39
+ * leading/trailing underscores stripped. Defaults to "migration" if empty.
40
+ */
41
+ export function migrationFilename(name: string): string {
42
+ const now = new Date()
43
+ const ts = [
44
+ now.getFullYear(),
45
+ String(now.getMonth() + 1).padStart(2, '0'),
46
+ String(now.getDate()).padStart(2, '0'),
47
+ String(now.getHours()).padStart(2, '0'),
48
+ String(now.getMinutes()).padStart(2, '0'),
49
+ String(now.getSeconds()).padStart(2, '0'),
50
+ ].join('')
51
+
52
+ const safeName = name
53
+ .toLowerCase()
54
+ .replace(/[^a-z0-9]+/g, '_')
55
+ .replace(/^_+|_+$/g, '')
56
+
57
+ return `${ts}_${safeName || 'migration'}.sql`
58
+ }
59
+
60
+ /**
61
+ * Write a migration file to the migrations directory.
62
+ * Creates the directory recursively if it doesn't exist.
63
+ * Statements are joined with ";\n" and a trailing ";\n" is appended.
64
+ * Returns the absolute path of the written file.
65
+ */
66
+ export function writeMigrationFile(dir: string, filename: string, statements: string[]): string {
67
+ const absDir = path.resolve(dir)
68
+ fs.mkdirSync(absDir, { recursive: true })
69
+
70
+ const content = `${statements.join(';\n')};\n`
71
+ const filePath = path.join(absDir, filename)
72
+ fs.writeFileSync(filePath, content, 'utf-8')
73
+ return filePath
74
+ }