@take-out/postgres 0.0.28

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (186) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +242 -0
  3. package/cli.cjs +3 -0
  4. package/dist/cjs/build.cjs +26 -0
  5. package/dist/cjs/build.js +21 -0
  6. package/dist/cjs/build.js.map +6 -0
  7. package/dist/cjs/build.native.js +29 -0
  8. package/dist/cjs/build.native.js.map +1 -0
  9. package/dist/cjs/cli.cjs +156 -0
  10. package/dist/cjs/cli.js +128 -0
  11. package/dist/cjs/cli.js.map +6 -0
  12. package/dist/cjs/cli.native.js +164 -0
  13. package/dist/cjs/cli.native.js.map +1 -0
  14. package/dist/cjs/createServerHelpers.cjs +37 -0
  15. package/dist/cjs/createServerHelpers.js +28 -0
  16. package/dist/cjs/createServerHelpers.js.map +6 -0
  17. package/dist/cjs/createServerHelpers.native.js +43 -0
  18. package/dist/cjs/createServerHelpers.native.js.map +1 -0
  19. package/dist/cjs/helpers/chunkedQuery.cjs +60 -0
  20. package/dist/cjs/helpers/chunkedQuery.js +51 -0
  21. package/dist/cjs/helpers/chunkedQuery.js.map +6 -0
  22. package/dist/cjs/helpers/chunkedQuery.native.js +73 -0
  23. package/dist/cjs/helpers/chunkedQuery.native.js.map +1 -0
  24. package/dist/cjs/helpers/getDBClient.cjs +172 -0
  25. package/dist/cjs/helpers/getDBClient.js +179 -0
  26. package/dist/cjs/helpers/getDBClient.js.map +6 -0
  27. package/dist/cjs/helpers/getDBClient.native.js +189 -0
  28. package/dist/cjs/helpers/getDBClient.native.js.map +1 -0
  29. package/dist/cjs/index.cjs +59 -0
  30. package/dist/cjs/index.js +45 -0
  31. package/dist/cjs/index.js.map +6 -0
  32. package/dist/cjs/index.native.js +64 -0
  33. package/dist/cjs/index.native.js.map +1 -0
  34. package/dist/cjs/migrate.cjs +117 -0
  35. package/dist/cjs/migrate.js +106 -0
  36. package/dist/cjs/migrate.js.map +6 -0
  37. package/dist/cjs/migrate.native.js +185 -0
  38. package/dist/cjs/migrate.native.js.map +1 -0
  39. package/dist/cjs/scripts/build-migrations.cjs +81 -0
  40. package/dist/cjs/scripts/build-migrations.js +69 -0
  41. package/dist/cjs/scripts/build-migrations.js.map +6 -0
  42. package/dist/cjs/scripts/build-migrations.native.js +86 -0
  43. package/dist/cjs/scripts/build-migrations.native.js.map +1 -0
  44. package/dist/cjs/scripts/drizzle-migrations-sync.cjs +111 -0
  45. package/dist/cjs/scripts/drizzle-migrations-sync.js +101 -0
  46. package/dist/cjs/scripts/drizzle-migrations-sync.js.map +6 -0
  47. package/dist/cjs/scripts/drizzle-migrations-sync.native.js +209 -0
  48. package/dist/cjs/scripts/drizzle-migrations-sync.native.js.map +1 -0
  49. package/dist/cjs/scripts/migration-add.cjs +53 -0
  50. package/dist/cjs/scripts/migration-add.js +40 -0
  51. package/dist/cjs/scripts/migration-add.js.map +6 -0
  52. package/dist/cjs/scripts/migration-add.native.js +72 -0
  53. package/dist/cjs/scripts/migration-add.native.js.map +1 -0
  54. package/dist/cjs/scripts/pg_dump.cjs +49 -0
  55. package/dist/cjs/scripts/pg_dump.js +36 -0
  56. package/dist/cjs/scripts/pg_dump.js.map +6 -0
  57. package/dist/cjs/scripts/pg_dump.native.js +55 -0
  58. package/dist/cjs/scripts/pg_dump.native.js.map +1 -0
  59. package/dist/cjs/scripts/psql.cjs +50 -0
  60. package/dist/cjs/scripts/psql.js +37 -0
  61. package/dist/cjs/scripts/psql.js.map +6 -0
  62. package/dist/cjs/scripts/psql.native.js +56 -0
  63. package/dist/cjs/scripts/psql.native.js.map +1 -0
  64. package/dist/cjs/sql.cjs +40 -0
  65. package/dist/cjs/sql.js +35 -0
  66. package/dist/cjs/sql.js.map +6 -0
  67. package/dist/cjs/sql.native.js +49 -0
  68. package/dist/cjs/sql.native.js.map +1 -0
  69. package/dist/esm/build.js +5 -0
  70. package/dist/esm/build.js.map +6 -0
  71. package/dist/esm/build.mjs +3 -0
  72. package/dist/esm/build.mjs.map +1 -0
  73. package/dist/esm/build.native.js +3 -0
  74. package/dist/esm/build.native.js.map +1 -0
  75. package/dist/esm/cli.js +134 -0
  76. package/dist/esm/cli.js.map +6 -0
  77. package/dist/esm/cli.mjs +157 -0
  78. package/dist/esm/cli.mjs.map +1 -0
  79. package/dist/esm/cli.native.js +162 -0
  80. package/dist/esm/cli.native.js.map +1 -0
  81. package/dist/esm/createServerHelpers.js +13 -0
  82. package/dist/esm/createServerHelpers.js.map +6 -0
  83. package/dist/esm/createServerHelpers.mjs +14 -0
  84. package/dist/esm/createServerHelpers.mjs.map +1 -0
  85. package/dist/esm/createServerHelpers.native.js +17 -0
  86. package/dist/esm/createServerHelpers.native.js.map +1 -0
  87. package/dist/esm/helpers/chunkedQuery.js +35 -0
  88. package/dist/esm/helpers/chunkedQuery.js.map +6 -0
  89. package/dist/esm/helpers/chunkedQuery.mjs +36 -0
  90. package/dist/esm/helpers/chunkedQuery.mjs.map +1 -0
  91. package/dist/esm/helpers/chunkedQuery.native.js +46 -0
  92. package/dist/esm/helpers/chunkedQuery.native.js.map +1 -0
  93. package/dist/esm/helpers/getDBClient.js +155 -0
  94. package/dist/esm/helpers/getDBClient.js.map +6 -0
  95. package/dist/esm/helpers/getDBClient.mjs +136 -0
  96. package/dist/esm/helpers/getDBClient.mjs.map +1 -0
  97. package/dist/esm/helpers/getDBClient.native.js +150 -0
  98. package/dist/esm/helpers/getDBClient.native.js.map +1 -0
  99. package/dist/esm/index.js +29 -0
  100. package/dist/esm/index.js.map +6 -0
  101. package/dist/esm/index.mjs +18 -0
  102. package/dist/esm/index.mjs.map +1 -0
  103. package/dist/esm/index.native.js +20 -0
  104. package/dist/esm/index.native.js.map +1 -0
  105. package/dist/esm/migrate.js +91 -0
  106. package/dist/esm/migrate.js.map +6 -0
  107. package/dist/esm/migrate.mjs +94 -0
  108. package/dist/esm/migrate.mjs.map +1 -0
  109. package/dist/esm/migrate.native.js +159 -0
  110. package/dist/esm/migrate.native.js.map +1 -0
  111. package/dist/esm/scripts/build-migrations.js +46 -0
  112. package/dist/esm/scripts/build-migrations.js.map +6 -0
  113. package/dist/esm/scripts/build-migrations.mjs +47 -0
  114. package/dist/esm/scripts/build-migrations.mjs.map +1 -0
  115. package/dist/esm/scripts/build-migrations.native.js +49 -0
  116. package/dist/esm/scripts/build-migrations.native.js.map +1 -0
  117. package/dist/esm/scripts/drizzle-migrations-sync.js +87 -0
  118. package/dist/esm/scripts/drizzle-migrations-sync.js.map +6 -0
  119. package/dist/esm/scripts/drizzle-migrations-sync.mjs +88 -0
  120. package/dist/esm/scripts/drizzle-migrations-sync.mjs.map +1 -0
  121. package/dist/esm/scripts/drizzle-migrations-sync.native.js +183 -0
  122. package/dist/esm/scripts/drizzle-migrations-sync.native.js.map +1 -0
  123. package/dist/esm/scripts/migration-add.js +25 -0
  124. package/dist/esm/scripts/migration-add.js.map +6 -0
  125. package/dist/esm/scripts/migration-add.mjs +30 -0
  126. package/dist/esm/scripts/migration-add.mjs.map +1 -0
  127. package/dist/esm/scripts/migration-add.native.js +46 -0
  128. package/dist/esm/scripts/migration-add.native.js.map +1 -0
  129. package/dist/esm/scripts/pg_dump.js +20 -0
  130. package/dist/esm/scripts/pg_dump.js.map +6 -0
  131. package/dist/esm/scripts/pg_dump.mjs +26 -0
  132. package/dist/esm/scripts/pg_dump.mjs.map +1 -0
  133. package/dist/esm/scripts/pg_dump.native.js +29 -0
  134. package/dist/esm/scripts/pg_dump.native.js.map +1 -0
  135. package/dist/esm/scripts/psql.js +21 -0
  136. package/dist/esm/scripts/psql.js.map +6 -0
  137. package/dist/esm/scripts/psql.mjs +27 -0
  138. package/dist/esm/scripts/psql.mjs.map +1 -0
  139. package/dist/esm/scripts/psql.native.js +30 -0
  140. package/dist/esm/scripts/psql.native.js.map +1 -0
  141. package/dist/esm/sql.js +19 -0
  142. package/dist/esm/sql.js.map +6 -0
  143. package/dist/esm/sql.mjs +15 -0
  144. package/dist/esm/sql.mjs.map +1 -0
  145. package/dist/esm/sql.native.js +21 -0
  146. package/dist/esm/sql.native.js.map +1 -0
  147. package/package.json +67 -0
  148. package/src/build.ts +2 -0
  149. package/src/cli.ts +153 -0
  150. package/src/createServerHelpers.ts +20 -0
  151. package/src/helpers/chunkedQuery.ts +91 -0
  152. package/src/helpers/getDBClient.ts +264 -0
  153. package/src/index.ts +36 -0
  154. package/src/migrate.ts +192 -0
  155. package/src/scripts/build-migrations.ts +66 -0
  156. package/src/scripts/drizzle-migrations-sync.ts +179 -0
  157. package/src/scripts/migration-add.ts +54 -0
  158. package/src/scripts/pg_dump.ts +46 -0
  159. package/src/scripts/psql.ts +51 -0
  160. package/src/sql.ts +36 -0
  161. package/types/build.d.ts +2 -0
  162. package/types/build.d.ts.map +1 -0
  163. package/types/cli.d.ts +3 -0
  164. package/types/cli.d.ts.map +1 -0
  165. package/types/createServerHelpers.d.ts +9 -0
  166. package/types/createServerHelpers.d.ts.map +1 -0
  167. package/types/helpers/chunkedQuery.d.ts +17 -0
  168. package/types/helpers/chunkedQuery.d.ts.map +1 -0
  169. package/types/helpers/getDBClient.d.ts +11 -0
  170. package/types/helpers/getDBClient.d.ts.map +1 -0
  171. package/types/index.d.ts +10 -0
  172. package/types/index.d.ts.map +1 -0
  173. package/types/migrate.d.ts +25 -0
  174. package/types/migrate.d.ts.map +1 -0
  175. package/types/scripts/build-migrations.d.ts +8 -0
  176. package/types/scripts/build-migrations.d.ts.map +1 -0
  177. package/types/scripts/drizzle-migrations-sync.d.ts +11 -0
  178. package/types/scripts/drizzle-migrations-sync.d.ts.map +1 -0
  179. package/types/scripts/migration-add.d.ts +6 -0
  180. package/types/scripts/migration-add.d.ts.map +1 -0
  181. package/types/scripts/pg_dump.d.ts +11 -0
  182. package/types/scripts/pg_dump.d.ts.map +1 -0
  183. package/types/scripts/psql.d.ts +11 -0
  184. package/types/scripts/psql.d.ts.map +1 -0
  185. package/types/sql.d.ts +9 -0
  186. package/types/sql.d.ts.map +1 -0
package/src/migrate.ts ADDED
@@ -0,0 +1,192 @@
1
+ /**
2
+ * Custom migration script - why?
3
+ * Well migration scripts aren't so complex, and we haven't found a simple enough library
4
+ * to deserve a new dependency. We like this setup because we can customize it easily
5
+ * and it's pretty easy to understand.
6
+ *
7
+ * This migration system handles both custom TypeScript migrations and
8
+ * SQL migrations generated by Drizzle.
9
+ */
10
+
11
+ import { basename } from 'node:path'
12
+ import type { PoolClient } from 'pg'
13
+ import { getDBClient } from './helpers/getDBClient'
14
+
15
+ export type Migration = {
16
+ name: string
17
+ up?: (client: PoolClient) => Promise<void>
18
+ }
19
+
20
+ export type MigrateOptions = {
21
+ connectionString: string
22
+ migrationsGlob: Record<string, () => Promise<unknown>>
23
+ createDatabases?: string[]
24
+ onMigrationComplete?: () => Promise<void>
25
+ gitSha?: string
26
+ cvrDb?: string
27
+ changeDb?: string
28
+ }
29
+
30
+ // check if we're running in AWS Lambda or other serverless environment
31
+ const isServerless = !!(
32
+ process.env.AWS_LAMBDA_FUNCTION_NAME ||
33
+ process.env.AWS_LAMBDA_RUNTIME_API ||
34
+ process.env.LAMBDA_RUNTIME_DIR ||
35
+ process.env.IS_SERVERLESS
36
+ )
37
+
38
+ export async function migrate(options: MigrateOptions) {
39
+ const {
40
+ connectionString,
41
+ migrationsGlob,
42
+ createDatabases = [],
43
+ onMigrationComplete,
44
+ gitSha,
45
+ cvrDb,
46
+ changeDb,
47
+ } = options
48
+
49
+ console.info(`Running migrations${gitSha ? ` for git version: ${gitSha}` : ''}`)
50
+
51
+ const client = await getDBClient({ connectionString })
52
+
53
+ const hasDB = async (name: string) => {
54
+ const result = !!(
55
+ await client.query(`
56
+ SELECT 1 FROM pg_database WHERE datname = '${name}'
57
+ `)
58
+ ).rows.length
59
+
60
+ console.info(result ? `${name} db exists` : `creating ${name} db`)
61
+
62
+ return result
63
+ }
64
+
65
+ // create zero databases if specified
66
+ if (cvrDb || changeDb) {
67
+ if (!cvrDb) {
68
+ throw new Error(`Missing cvrDb`)
69
+ }
70
+
71
+ const zeroDBNames = [basename(cvrDb || ''), basename(changeDb || '')].filter(Boolean)
72
+
73
+ for (const name of zeroDBNames) {
74
+ if (!(await hasDB(name))) {
75
+ await client.query(`CREATE DATABASE ${name};`)
76
+ }
77
+ }
78
+ }
79
+
80
+ // create additional databases if specified
81
+ for (const dbUrl of createDatabases) {
82
+ const name = basename(dbUrl)
83
+ if (!(await hasDB(name))) {
84
+ await client.query(`CREATE DATABASE ${name};`)
85
+ }
86
+ }
87
+
88
+ try {
89
+ await client.query('BEGIN')
90
+ await client.query(`
91
+ CREATE TABLE IF NOT EXISTS migrations (
92
+ id SERIAL PRIMARY KEY,
93
+ name VARCHAR(255) NOT NULL,
94
+ run_on TIMESTAMP NOT NULL DEFAULT NOW()
95
+ )
96
+ `)
97
+
98
+ const appliedMigrations = await client.query('SELECT name FROM migrations')
99
+ const appliedMigrationNames = new Set(appliedMigrations.rows.map((row) => row.name))
100
+
101
+ // get TypeScript migrations
102
+ const tsMigrationsSorted = Object.entries(migrationsGlob)
103
+ .sort(([a], [b]) => a.localeCompare(b))
104
+ .map(([file, run]) => ({
105
+ name: basename(file).replace('.ts', ''),
106
+ run,
107
+ }))
108
+ .filter(({ name }) => /^[\d]+/.test(name))
109
+
110
+ console.info(`Found ${tsMigrationsSorted.length} TypeScript migrations`)
111
+
112
+ // process TypeScript migrations
113
+ const tsMigrations: Migration[] = await Promise.all(
114
+ tsMigrationsSorted.map(async ({ name, run }) => {
115
+ if (appliedMigrationNames.has(name)) {
116
+ console.info(`TypeScript migration applied already: ${name}`)
117
+ return null
118
+ }
119
+ try {
120
+ const migration = (await run()) as object
121
+ return { ...migration, name }
122
+ } catch (error) {
123
+ console.error(`Failed to load TypeScript migration ${name}:`, error)
124
+ throw error
125
+ }
126
+ })
127
+ ).then((migrations) => migrations.filter(Boolean) as Migration[])
128
+
129
+ // combine TypeScript and SQL migrations, sorted by name to ensure correct order
130
+ const migrations = [...tsMigrations].sort((a, b) => a.name.localeCompare(b.name))
131
+
132
+ if (!migrations.length) {
133
+ console.info(`No migrations to apply!`)
134
+ await client.query('COMMIT')
135
+ } else {
136
+ for (const migration of migrations) {
137
+ // don't try catch here, we want to exit and rollback all migrations if one fails
138
+ console.info(`Migrating: ${migration.name}`)
139
+
140
+ if (migration.up) {
141
+ // TypeScript migration
142
+ console.info(`Applying migration: ${migration.name}`)
143
+ await migration.up(client)
144
+ }
145
+
146
+ await client.query('INSERT INTO migrations (name) VALUES ($1)', [migration.name])
147
+ console.info(`Successfully applied migration: ${migration.name}`)
148
+ }
149
+
150
+ await client.query('COMMIT')
151
+ console.info(`Successfully committed all migrations`)
152
+ }
153
+ } catch (e) {
154
+ console.error(`Migration failed, rolling back:`, e)
155
+ await client.query('ROLLBACK')
156
+ console.info(`Releasing client connection...`)
157
+ try {
158
+ client.release(false)
159
+ } catch (releaseErr) {
160
+ console.error(`Error releasing connection after rollback:`, releaseErr)
161
+ }
162
+ throw e
163
+ }
164
+
165
+ if (onMigrationComplete) {
166
+ await onMigrationComplete()
167
+ }
168
+
169
+ console.info(`Releasing client connection...`)
170
+ try {
171
+ // don't destroy the connection forcefully - let it return to pool gracefully
172
+ client.release(false)
173
+ } catch (err) {
174
+ console.error(`Error releasing connection gracefully, trying to destroy:`, err)
175
+ try {
176
+ client.release(true)
177
+ } catch (destroyErr) {
178
+ console.error(`Error destroying connection:`, destroyErr)
179
+ }
180
+ }
181
+
182
+ console.info(`🙌 Done migrating`)
183
+
184
+ exitProcess()
185
+ }
186
+
187
+ function exitProcess() {
188
+ if (typeof process === 'undefined') return
189
+ // lambda was complaining about process.exit
190
+ if (isServerless) return
191
+ process.exit(0)
192
+ }
@@ -0,0 +1,66 @@
1
+ import { execSync } from 'node:child_process'
2
+ import path from 'node:path'
3
+
4
+ export type BuildMigrationsOptions = {
5
+ migrationsDir: string
6
+ outFile?: string
7
+ target?: string
8
+ aliases?: Record<string, string>
9
+ }
10
+
11
+ export async function buildMigrations(options: BuildMigrationsOptions) {
12
+ const {
13
+ migrationsDir,
14
+ outFile = 'migrate-dist.js',
15
+ target = 'node22',
16
+ aliases = {},
17
+ } = options
18
+
19
+ const { build } = await import('vite')
20
+
21
+ const migrateFile = path.join(migrationsDir, '..', 'migrate.ts')
22
+
23
+ const result = await build({
24
+ configFile: false,
25
+ resolve: {
26
+ alias: aliases,
27
+ },
28
+ define: {
29
+ 'process.env.GIT_SHA': JSON.stringify(
30
+ execSync('git rev-parse HEAD').toString().trim()
31
+ ),
32
+ },
33
+ build: {
34
+ outDir: path.dirname(migrateFile),
35
+ target,
36
+ minify: false,
37
+ emptyOutDir: false,
38
+ copyPublicDir: false,
39
+ lib: {
40
+ name: 'migrate',
41
+ formats: ['es'],
42
+ entry: migrateFile,
43
+ },
44
+ rollupOptions: {
45
+ external: (id) => {
46
+ // externalize all node modules and node: imports
47
+ if (id.startsWith('node:') || id === 'pg') return true
48
+ // externalize all absolute paths (like aliases)
49
+ if (id.startsWith('/')) return false
50
+ // externalize node_modules
51
+ if (!id.startsWith('.') && !id.startsWith('/')) return true
52
+ return false
53
+ },
54
+ output: {
55
+ format: 'es',
56
+ inlineDynamicImports: true,
57
+ exports: 'named',
58
+ entryFileNames: outFile,
59
+ },
60
+ },
61
+ },
62
+ })
63
+
64
+ console.info(`✓ Built migration bundle: ${outFile}`)
65
+ return result
66
+ }
@@ -0,0 +1,179 @@
1
+ #!/usr/bin/env bun
2
+
3
+ /**
4
+ * This script scans for SQL files in migrations directory,
5
+ * creates corresponding TypeScript migration files,
6
+ * and imports the SQL with ?raw
7
+ */
8
+
9
+ import { readdir, writeFile, stat, rename } from 'node:fs/promises'
10
+ import { join, basename, extname } from 'node:path'
11
+ import { existsSync } from 'node:fs'
12
+
13
+ export type DrizzleSyncOptions = {
14
+ migrationsDir: string
15
+ }
16
+
17
+ /**
18
+ * Extracts the numeric prefix from a migration filename
19
+ */
20
+ function getMigrationNumber(filename: string): number | null {
21
+ const match = filename.match(/^(\d+)/)
22
+ return match && match[1] ? Number.parseInt(match[1], 10) : null
23
+ }
24
+
25
+ /**
26
+ * Finds the highest migration number in the directory
27
+ */
28
+ async function getHighestMigrationNumber(dir: string): Promise<number> {
29
+ const files = await readdir(dir)
30
+ let highest = -1
31
+
32
+ for (const file of files) {
33
+ const num = getMigrationNumber(file)
34
+ if (num !== null && num > highest) {
35
+ highest = num
36
+ }
37
+ }
38
+
39
+ return highest
40
+ }
41
+
42
+ /**
43
+ * Renames newly generated drizzle migrations to use correct sequential numbering
44
+ */
45
+ async function renameNewDrizzleMigrations(
46
+ migrationsPath: string,
47
+ sqlFiles: string[]
48
+ ): Promise<string[]> {
49
+ const drizzlePattern = /^(\d{4})_[a-z]+_[a-z_]+\.sql$/
50
+ const files = await readdir(migrationsPath)
51
+
52
+ // get the highest existing migration number
53
+ const highestNumber = await getHighestMigrationNumber(migrationsPath)
54
+
55
+ // find new drizzle SQL files (without corresponding .ts files yet)
56
+ const newDrizzleSqlFiles = sqlFiles.filter((file) => {
57
+ if (!drizzlePattern.test(file)) return false
58
+ const tsFile = file.replace('.sql', '.ts')
59
+ return !files.includes(tsFile)
60
+ })
61
+
62
+ if (newDrizzleSqlFiles.length === 0) return sqlFiles
63
+
64
+ // group by migration number
65
+ const migrationGroups = new Map<string, string[]>()
66
+ for (const file of newDrizzleSqlFiles) {
67
+ const num = file.substring(0, 4)
68
+ if (!migrationGroups.has(num)) {
69
+ migrationGroups.set(num, [])
70
+ }
71
+ migrationGroups.get(num)!.push(file)
72
+ }
73
+
74
+ let nextNumber = highestNumber + 1
75
+ const renamedFiles: string[] = []
76
+
77
+ // process each group of new migrations
78
+ for (const [originalNum, groupFiles] of migrationGroups) {
79
+ const drizzleNum = Number.parseInt(originalNum, 10)
80
+
81
+ // if drizzle's number is less than or equal to our highest, we need to renumber
82
+ if (drizzleNum <= highestNumber) {
83
+ const newNumStr = nextNumber.toString().padStart(4, '0')
84
+
85
+ console.info(`Renumbering new drizzle migration ${originalNum} to ${newNumStr}`)
86
+
87
+ for (const file of groupFiles) {
88
+ const newName = file.replace(/^\d{4}/, newNumStr)
89
+ const oldPath = join(migrationsPath, file)
90
+ const newPath = join(migrationsPath, newName)
91
+
92
+ await rename(oldPath, newPath)
93
+ console.info(` Renamed ${file} -> ${newName}`)
94
+ renamedFiles.push(newName)
95
+ }
96
+
97
+ // also rename the meta snapshot if it exists
98
+ const metaDir = join(migrationsPath, 'meta')
99
+ if (existsSync(metaDir)) {
100
+ const metaFiles = await readdir(metaDir)
101
+ const snapshotFile = `${originalNum}_snapshot.json`
102
+ if (metaFiles.includes(snapshotFile)) {
103
+ const newSnapshotName = `${newNumStr}_snapshot.json`
104
+ await rename(join(metaDir, snapshotFile), join(metaDir, newSnapshotName))
105
+ console.info(` Renamed meta/${snapshotFile} -> meta/${newSnapshotName}`)
106
+ }
107
+ }
108
+
109
+ nextNumber++
110
+ } else {
111
+ // keep files that don't need renaming
112
+ renamedFiles.push(...groupFiles)
113
+ }
114
+ }
115
+
116
+ // return updated list of SQL files (with renamed files + unchanged files)
117
+ return sqlFiles.map((file) => {
118
+ const idx = newDrizzleSqlFiles.indexOf(file)
119
+ if (idx !== -1) {
120
+ // find what this file was renamed to
121
+ for (const renamed of renamedFiles) {
122
+ if (renamed.includes(file.substring(5))) {
123
+ // match by the descriptive part after the number
124
+ return renamed
125
+ }
126
+ }
127
+ }
128
+ return file
129
+ })
130
+ }
131
+
132
+ export async function syncDrizzleMigrations(options: DrizzleSyncOptions) {
133
+ const { migrationsDir } = options
134
+
135
+ // get all sql files in the migrations directory
136
+ const files = await readdir(migrationsDir)
137
+ let sqlFiles = files.filter((file) => extname(file) === '.sql')
138
+
139
+ console.info(`Found ${sqlFiles.length} SQL files to convert to migrations.`)
140
+
141
+ // rename any new drizzle migrations to continue from the highest number
142
+ sqlFiles = await renameNewDrizzleMigrations(migrationsDir, sqlFiles)
143
+
144
+ // for each sql file, create a typescript migration
145
+ for (const sqlFile of sqlFiles) {
146
+ const baseName = basename(sqlFile, '.sql')
147
+ const tsFileName = `${baseName}.ts`
148
+ const tsFilePath = join(migrationsDir, tsFileName)
149
+
150
+ // skip if typescript file already exists
151
+ if (existsSync(tsFilePath)) {
152
+ const sqlStat = await stat(join(migrationsDir, sqlFile))
153
+ const tsStat = await stat(tsFilePath)
154
+
155
+ if (tsStat.mtimeMs > sqlStat.mtimeMs) {
156
+ continue
157
+ }
158
+
159
+ console.info(`Updating ${tsFileName} as SQL file has been modified.`)
160
+ } else {
161
+ console.info(`Creating ${tsFileName}`)
162
+ }
163
+
164
+ // generate the migration content
165
+ const migrationContent = `import type { PoolClient } from 'pg'
166
+ import sql from './${sqlFile}?raw'
167
+
168
+ export async function up(client: PoolClient) {
169
+ await client.query(sql)
170
+ }
171
+ `
172
+
173
+ // write the typescript file
174
+ await writeFile(tsFilePath, migrationContent)
175
+ console.info(`Successfully created ${tsFileName}`)
176
+ }
177
+
178
+ console.info('Migration sync completed.')
179
+ }
@@ -0,0 +1,54 @@
1
+ import { join } from 'node:path'
2
+ import { readdirSync, writeFileSync } from 'node:fs'
3
+
4
+ export type MigrationAddOptions = {
5
+ migrationsDir: string
6
+ name?: string
7
+ }
8
+
9
+ const commonWords = ['sapphire', 'emerald', 'ruby', 'amber', 'topaz', 'onyx', 'pearl']
10
+
11
+ export function addMigration(options: MigrationAddOptions): string {
12
+ const { migrationsDir, name } = options
13
+
14
+ // if no name is provided, pick a random one from common words
15
+ const migrationName =
16
+ name || commonWords[Math.floor(Math.random() * commonWords.length)]!
17
+
18
+ // read all files in the migrations directory
19
+ const files = readdirSync(migrationsDir)
20
+
21
+ // find the highest migration number from both .ts and .sql files
22
+ const migrationRegex = /^(\d+)[-_].*\.(ts|sql)$/
23
+ let maxNumber = 0
24
+ for (const file of files) {
25
+ const match = file.match(migrationRegex)
26
+ if (match?.[1]) {
27
+ const num = Number.parseInt(match[1], 10)
28
+ if (!Number.isNaN(num) && num > maxNumber) {
29
+ maxNumber = num
30
+ }
31
+ }
32
+ }
33
+
34
+ // calculate the next migration number and pad to 4 digits
35
+ const nextNumber = (maxNumber + 1).toString().padStart(4, '0')
36
+ const newFilename = `${nextNumber}-${migrationName}.ts`
37
+ const newFilePath = join(migrationsDir, newFilename)
38
+
39
+ // create a template for custom TypeScript migrations
40
+ const templateContent = `import type { PoolClient } from 'pg'
41
+
42
+ export async function up(client: PoolClient) {
43
+ // implementation for applying this migration
44
+ }
45
+ `
46
+
47
+ // write the template to the new file
48
+ writeFileSync(newFilePath, templateContent)
49
+
50
+ console.info(`Created custom migration: ${newFilePath}`)
51
+ console.info(`For Drizzle schema migrations, run 'drizzle-kit generate' instead`)
52
+
53
+ return newFilePath
54
+ }
@@ -0,0 +1,46 @@
1
+ export type PgDumpOptions = {
2
+ connectionString?: string
3
+ host?: string
4
+ port?: number
5
+ database?: string
6
+ user?: string
7
+ password?: string
8
+ args?: string[]
9
+ }
10
+
11
+ export function runPgDump(options: PgDumpOptions = {}) {
12
+ const { connectionString, host, port, database, user, password, args = [] } = options
13
+
14
+ const env: Record<string, string> = {
15
+ ...process.env,
16
+ } as any
17
+
18
+ if (connectionString) {
19
+ // parse connection string and set individual env vars
20
+ const url = new URL(connectionString)
21
+ env.PGHOST = url.hostname
22
+ env.PGPORT = url.port || '5432'
23
+ env.PGDATABASE = url.pathname.slice(1)
24
+ env.PGUSER = url.username
25
+ if (url.password) {
26
+ env.PGPASSWORD = url.password
27
+ }
28
+ } else {
29
+ if (host) env.PGHOST = host
30
+ if (port) env.PGPORT = port.toString()
31
+ if (database) env.PGDATABASE = database
32
+ if (user) env.PGUSER = user
33
+ if (password) env.PGPASSWORD = password
34
+ }
35
+
36
+ console.info(`Running pg_dump on postgres ${env.PGHOST}/${env.PGDATABASE}`)
37
+
38
+ const result = Bun.spawnSync(['pg_dump', ...args], {
39
+ stdin: 'ignore',
40
+ stdout: 'inherit',
41
+ stderr: 'inherit',
42
+ env,
43
+ })
44
+
45
+ return result.exitCode
46
+ }
@@ -0,0 +1,51 @@
1
+ export type PsqlOptions = {
2
+ connectionString?: string
3
+ host?: string
4
+ port?: number
5
+ database?: string
6
+ user?: string
7
+ password?: string
8
+ query?: string
9
+ }
10
+
11
+ export function runPsql(options: PsqlOptions = {}) {
12
+ const { connectionString, host, port, database, user, password, query } = options
13
+
14
+ const env: Record<string, string> = {
15
+ ...process.env,
16
+ } as any
17
+
18
+ if (connectionString) {
19
+ // parse connection string and set individual env vars
20
+ const url = new URL(connectionString)
21
+ env.PGHOST = url.hostname
22
+ env.PGPORT = url.port || '5432'
23
+ env.PGDATABASE = url.pathname.slice(1)
24
+ env.PGUSER = url.username
25
+ if (url.password) {
26
+ env.PGPASSWORD = url.password
27
+ }
28
+ } else {
29
+ if (host) env.PGHOST = host
30
+ if (port) env.PGPORT = port.toString()
31
+ if (database) env.PGDATABASE = database
32
+ if (user) env.PGUSER = user
33
+ if (password) env.PGPASSWORD = password
34
+ }
35
+
36
+ const args: string[] = []
37
+ if (query) {
38
+ args.push('-c', query)
39
+ }
40
+
41
+ console.info(`Connecting to postgres ${env.PGHOST}/${env.PGDATABASE}`)
42
+
43
+ const result = Bun.spawnSync(['psql', ...args], {
44
+ stdin: 'ignore',
45
+ stdout: 'inherit',
46
+ stderr: 'inherit',
47
+ env,
48
+ })
49
+
50
+ return result.exitCode
51
+ }
package/src/sql.ts ADDED
@@ -0,0 +1,36 @@
1
+ import type { Pool, QueryResult } from 'pg'
2
+ import { ellipsis } from '@take-out/helpers'
3
+
4
+ export type SqlQuery = {
5
+ text: string
6
+ values: any[]
7
+ }
8
+
9
+ export const createSql = (pool: Pool) => {
10
+ return (strings: TemplateStringsArray, ...values: any[]): Promise<QueryResult<any>> => {
11
+ const text = strings.reduce((result, str, i) => {
12
+ return result + str + (i < values.length ? `$${i + 1}` : '')
13
+ }, '')
14
+
15
+ console.info(`sql: ${ellipsis(text, 80)}`)
16
+
17
+ return pool.query(text.trim(), values)
18
+ }
19
+ }
20
+
21
+ // default export for backward compatibility
22
+ let defaultPool: Pool | null = null
23
+
24
+ export const setDefaultPool = (pool: Pool) => {
25
+ defaultPool = pool
26
+ }
27
+
28
+ export const sql = (
29
+ strings: TemplateStringsArray,
30
+ ...values: any[]
31
+ ): Promise<QueryResult<any>> => {
32
+ if (!defaultPool) {
33
+ throw new Error('No default pool set. Call setDefaultPool() first or use createSql()')
34
+ }
35
+ return createSql(defaultPool)(strings, ...values)
36
+ }
@@ -0,0 +1,2 @@
1
+ export { buildMigrations } from './scripts/build-migrations';
2
+ //# sourceMappingURL=build.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"build.d.ts","sourceRoot":"","sources":["../src/build.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,eAAe,EAAE,MAAM,4BAA4B,CAAA"}
package/types/cli.d.ts ADDED
@@ -0,0 +1,3 @@
1
+ #!/usr/bin/env node
2
+ export {};
3
+ //# sourceMappingURL=cli.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"cli.d.ts","sourceRoot":"","sources":["../src/cli.ts"],"names":[],"mappings":""}
@@ -0,0 +1,9 @@
1
+ import type { Pool } from 'pg';
2
+ import { createSql } from './sql';
3
+ import { getDBClient, type GetDBClientOptions } from './helpers/getDBClient';
4
+ export type ServerHelpers = {
5
+ sql: ReturnType<typeof createSql>;
6
+ getDBClient: (options?: Omit<GetDBClientOptions, 'pool' | 'connectionString'>) => ReturnType<typeof getDBClient>;
7
+ };
8
+ export declare function createServerHelpers(pool: Pool): ServerHelpers;
9
+ //# sourceMappingURL=createServerHelpers.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"createServerHelpers.d.ts","sourceRoot":"","sources":["../src/createServerHelpers.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,IAAI,CAAA;AAC9B,OAAO,EAAE,SAAS,EAAkB,MAAM,OAAO,CAAA;AACjD,OAAO,EAAE,WAAW,EAAE,KAAK,kBAAkB,EAAE,MAAM,uBAAuB,CAAA;AAE5E,MAAM,MAAM,aAAa,GAAG;IAC1B,GAAG,EAAE,UAAU,CAAC,OAAO,SAAS,CAAC,CAAA;IACjC,WAAW,EAAE,CACX,OAAO,CAAC,EAAE,IAAI,CAAC,kBAAkB,EAAE,MAAM,GAAG,kBAAkB,CAAC,KAC5D,UAAU,CAAC,OAAO,WAAW,CAAC,CAAA;CACpC,CAAA;AAED,wBAAgB,mBAAmB,CAAC,IAAI,EAAE,IAAI,GAAG,aAAa,CAQ7D"}
@@ -0,0 +1,17 @@
1
+ import type { PoolClient, QueryResultRow } from 'pg';
2
+ interface ChunkedQueryOptions {
3
+ chunkSize?: number;
4
+ onProgress?: (processed: number, total: number) => void;
5
+ }
6
+ /**
7
+ * Process database records in chunks to avoid memory issues with large datasets
8
+ */
9
+ export declare function processInChunks<T extends QueryResultRow = QueryResultRow>(client: PoolClient, query: string, processor: (rows: T[]) => Promise<void>, options?: ChunkedQueryOptions): Promise<void>;
10
+ /**
11
+ * Update records in chunks with a transformer function
12
+ */
13
+ export declare function updateInChunks<T extends QueryResultRow & {
14
+ id: string;
15
+ }>(client: PoolClient, tableName: string, selectQuery: string, transformer: (row: T) => Promise<Partial<T> | null>, options?: ChunkedQueryOptions): Promise<number>;
16
+ export {};
17
+ //# sourceMappingURL=chunkedQuery.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"chunkedQuery.d.ts","sourceRoot":"","sources":["../../src/helpers/chunkedQuery.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,cAAc,EAAE,MAAM,IAAI,CAAA;AAEpD,UAAU,mBAAmB;IAC3B,SAAS,CAAC,EAAE,MAAM,CAAA;IAClB,UAAU,CAAC,EAAE,CAAC,SAAS,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,KAAK,IAAI,CAAA;CACxD;AAED;;GAEG;AACH,wBAAsB,eAAe,CAAC,CAAC,SAAS,cAAc,GAAG,cAAc,EAC7E,MAAM,EAAE,UAAU,EAClB,KAAK,EAAE,MAAM,EACb,SAAS,EAAE,CAAC,IAAI,EAAE,CAAC,EAAE,KAAK,OAAO,CAAC,IAAI,CAAC,EACvC,OAAO,GAAE,mBAAwB,GAChC,OAAO,CAAC,IAAI,CAAC,CAoCf;AAED;;GAEG;AACH,wBAAsB,cAAc,CAAC,CAAC,SAAS,cAAc,GAAG;IAAE,EAAE,EAAE,MAAM,CAAA;CAAE,EAC5E,MAAM,EAAE,UAAU,EAClB,SAAS,EAAE,MAAM,EACjB,WAAW,EAAE,MAAM,EACnB,WAAW,EAAE,CAAC,GAAG,EAAE,CAAC,KAAK,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,EACnD,OAAO,GAAE,mBAAwB,GAChC,OAAO,CAAC,MAAM,CAAC,CA4BjB"}
@@ -0,0 +1,11 @@
1
+ import pg, { type Pool, type PoolClient } from 'pg';
2
+ export type GetDBClientOptions = {
3
+ pool?: Pool;
4
+ connectionString?: string;
5
+ retries?: number;
6
+ onRetry?: (error: Error, attempt: number) => void;
7
+ };
8
+ export declare function getDBClient(options?: GetDBClientOptions): Promise<PoolClient>;
9
+ export declare function queryDb(queryText: string, params?: any[], options?: GetDBClientOptions): Promise<pg.QueryResult<any>>;
10
+ export declare function getNewClient(options?: GetDBClientOptions): Promise<pg.Client>;
11
+ //# sourceMappingURL=getDBClient.d.ts.map