@take-out/postgres 0.0.28
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +242 -0
- package/cli.cjs +3 -0
- package/dist/cjs/build.cjs +26 -0
- package/dist/cjs/build.js +21 -0
- package/dist/cjs/build.js.map +6 -0
- package/dist/cjs/build.native.js +29 -0
- package/dist/cjs/build.native.js.map +1 -0
- package/dist/cjs/cli.cjs +156 -0
- package/dist/cjs/cli.js +128 -0
- package/dist/cjs/cli.js.map +6 -0
- package/dist/cjs/cli.native.js +164 -0
- package/dist/cjs/cli.native.js.map +1 -0
- package/dist/cjs/createServerHelpers.cjs +37 -0
- package/dist/cjs/createServerHelpers.js +28 -0
- package/dist/cjs/createServerHelpers.js.map +6 -0
- package/dist/cjs/createServerHelpers.native.js +43 -0
- package/dist/cjs/createServerHelpers.native.js.map +1 -0
- package/dist/cjs/helpers/chunkedQuery.cjs +60 -0
- package/dist/cjs/helpers/chunkedQuery.js +51 -0
- package/dist/cjs/helpers/chunkedQuery.js.map +6 -0
- package/dist/cjs/helpers/chunkedQuery.native.js +73 -0
- package/dist/cjs/helpers/chunkedQuery.native.js.map +1 -0
- package/dist/cjs/helpers/getDBClient.cjs +172 -0
- package/dist/cjs/helpers/getDBClient.js +179 -0
- package/dist/cjs/helpers/getDBClient.js.map +6 -0
- package/dist/cjs/helpers/getDBClient.native.js +189 -0
- package/dist/cjs/helpers/getDBClient.native.js.map +1 -0
- package/dist/cjs/index.cjs +59 -0
- package/dist/cjs/index.js +45 -0
- package/dist/cjs/index.js.map +6 -0
- package/dist/cjs/index.native.js +64 -0
- package/dist/cjs/index.native.js.map +1 -0
- package/dist/cjs/migrate.cjs +117 -0
- package/dist/cjs/migrate.js +106 -0
- package/dist/cjs/migrate.js.map +6 -0
- package/dist/cjs/migrate.native.js +185 -0
- package/dist/cjs/migrate.native.js.map +1 -0
- package/dist/cjs/scripts/build-migrations.cjs +81 -0
- package/dist/cjs/scripts/build-migrations.js +69 -0
- package/dist/cjs/scripts/build-migrations.js.map +6 -0
- package/dist/cjs/scripts/build-migrations.native.js +86 -0
- package/dist/cjs/scripts/build-migrations.native.js.map +1 -0
- package/dist/cjs/scripts/drizzle-migrations-sync.cjs +111 -0
- package/dist/cjs/scripts/drizzle-migrations-sync.js +101 -0
- package/dist/cjs/scripts/drizzle-migrations-sync.js.map +6 -0
- package/dist/cjs/scripts/drizzle-migrations-sync.native.js +209 -0
- package/dist/cjs/scripts/drizzle-migrations-sync.native.js.map +1 -0
- package/dist/cjs/scripts/migration-add.cjs +53 -0
- package/dist/cjs/scripts/migration-add.js +40 -0
- package/dist/cjs/scripts/migration-add.js.map +6 -0
- package/dist/cjs/scripts/migration-add.native.js +72 -0
- package/dist/cjs/scripts/migration-add.native.js.map +1 -0
- package/dist/cjs/scripts/pg_dump.cjs +49 -0
- package/dist/cjs/scripts/pg_dump.js +36 -0
- package/dist/cjs/scripts/pg_dump.js.map +6 -0
- package/dist/cjs/scripts/pg_dump.native.js +55 -0
- package/dist/cjs/scripts/pg_dump.native.js.map +1 -0
- package/dist/cjs/scripts/psql.cjs +50 -0
- package/dist/cjs/scripts/psql.js +37 -0
- package/dist/cjs/scripts/psql.js.map +6 -0
- package/dist/cjs/scripts/psql.native.js +56 -0
- package/dist/cjs/scripts/psql.native.js.map +1 -0
- package/dist/cjs/sql.cjs +40 -0
- package/dist/cjs/sql.js +35 -0
- package/dist/cjs/sql.js.map +6 -0
- package/dist/cjs/sql.native.js +49 -0
- package/dist/cjs/sql.native.js.map +1 -0
- package/dist/esm/build.js +5 -0
- package/dist/esm/build.js.map +6 -0
- package/dist/esm/build.mjs +3 -0
- package/dist/esm/build.mjs.map +1 -0
- package/dist/esm/build.native.js +3 -0
- package/dist/esm/build.native.js.map +1 -0
- package/dist/esm/cli.js +134 -0
- package/dist/esm/cli.js.map +6 -0
- package/dist/esm/cli.mjs +157 -0
- package/dist/esm/cli.mjs.map +1 -0
- package/dist/esm/cli.native.js +162 -0
- package/dist/esm/cli.native.js.map +1 -0
- package/dist/esm/createServerHelpers.js +13 -0
- package/dist/esm/createServerHelpers.js.map +6 -0
- package/dist/esm/createServerHelpers.mjs +14 -0
- package/dist/esm/createServerHelpers.mjs.map +1 -0
- package/dist/esm/createServerHelpers.native.js +17 -0
- package/dist/esm/createServerHelpers.native.js.map +1 -0
- package/dist/esm/helpers/chunkedQuery.js +35 -0
- package/dist/esm/helpers/chunkedQuery.js.map +6 -0
- package/dist/esm/helpers/chunkedQuery.mjs +36 -0
- package/dist/esm/helpers/chunkedQuery.mjs.map +1 -0
- package/dist/esm/helpers/chunkedQuery.native.js +46 -0
- package/dist/esm/helpers/chunkedQuery.native.js.map +1 -0
- package/dist/esm/helpers/getDBClient.js +155 -0
- package/dist/esm/helpers/getDBClient.js.map +6 -0
- package/dist/esm/helpers/getDBClient.mjs +136 -0
- package/dist/esm/helpers/getDBClient.mjs.map +1 -0
- package/dist/esm/helpers/getDBClient.native.js +150 -0
- package/dist/esm/helpers/getDBClient.native.js.map +1 -0
- package/dist/esm/index.js +29 -0
- package/dist/esm/index.js.map +6 -0
- package/dist/esm/index.mjs +18 -0
- package/dist/esm/index.mjs.map +1 -0
- package/dist/esm/index.native.js +20 -0
- package/dist/esm/index.native.js.map +1 -0
- package/dist/esm/migrate.js +91 -0
- package/dist/esm/migrate.js.map +6 -0
- package/dist/esm/migrate.mjs +94 -0
- package/dist/esm/migrate.mjs.map +1 -0
- package/dist/esm/migrate.native.js +159 -0
- package/dist/esm/migrate.native.js.map +1 -0
- package/dist/esm/scripts/build-migrations.js +46 -0
- package/dist/esm/scripts/build-migrations.js.map +6 -0
- package/dist/esm/scripts/build-migrations.mjs +47 -0
- package/dist/esm/scripts/build-migrations.mjs.map +1 -0
- package/dist/esm/scripts/build-migrations.native.js +49 -0
- package/dist/esm/scripts/build-migrations.native.js.map +1 -0
- package/dist/esm/scripts/drizzle-migrations-sync.js +87 -0
- package/dist/esm/scripts/drizzle-migrations-sync.js.map +6 -0
- package/dist/esm/scripts/drizzle-migrations-sync.mjs +88 -0
- package/dist/esm/scripts/drizzle-migrations-sync.mjs.map +1 -0
- package/dist/esm/scripts/drizzle-migrations-sync.native.js +183 -0
- package/dist/esm/scripts/drizzle-migrations-sync.native.js.map +1 -0
- package/dist/esm/scripts/migration-add.js +25 -0
- package/dist/esm/scripts/migration-add.js.map +6 -0
- package/dist/esm/scripts/migration-add.mjs +30 -0
- package/dist/esm/scripts/migration-add.mjs.map +1 -0
- package/dist/esm/scripts/migration-add.native.js +46 -0
- package/dist/esm/scripts/migration-add.native.js.map +1 -0
- package/dist/esm/scripts/pg_dump.js +20 -0
- package/dist/esm/scripts/pg_dump.js.map +6 -0
- package/dist/esm/scripts/pg_dump.mjs +26 -0
- package/dist/esm/scripts/pg_dump.mjs.map +1 -0
- package/dist/esm/scripts/pg_dump.native.js +29 -0
- package/dist/esm/scripts/pg_dump.native.js.map +1 -0
- package/dist/esm/scripts/psql.js +21 -0
- package/dist/esm/scripts/psql.js.map +6 -0
- package/dist/esm/scripts/psql.mjs +27 -0
- package/dist/esm/scripts/psql.mjs.map +1 -0
- package/dist/esm/scripts/psql.native.js +30 -0
- package/dist/esm/scripts/psql.native.js.map +1 -0
- package/dist/esm/sql.js +19 -0
- package/dist/esm/sql.js.map +6 -0
- package/dist/esm/sql.mjs +15 -0
- package/dist/esm/sql.mjs.map +1 -0
- package/dist/esm/sql.native.js +21 -0
- package/dist/esm/sql.native.js.map +1 -0
- package/package.json +67 -0
- package/src/build.ts +2 -0
- package/src/cli.ts +153 -0
- package/src/createServerHelpers.ts +20 -0
- package/src/helpers/chunkedQuery.ts +91 -0
- package/src/helpers/getDBClient.ts +264 -0
- package/src/index.ts +36 -0
- package/src/migrate.ts +192 -0
- package/src/scripts/build-migrations.ts +66 -0
- package/src/scripts/drizzle-migrations-sync.ts +179 -0
- package/src/scripts/migration-add.ts +54 -0
- package/src/scripts/pg_dump.ts +46 -0
- package/src/scripts/psql.ts +51 -0
- package/src/sql.ts +36 -0
- package/types/build.d.ts +2 -0
- package/types/build.d.ts.map +1 -0
- package/types/cli.d.ts +3 -0
- package/types/cli.d.ts.map +1 -0
- package/types/createServerHelpers.d.ts +9 -0
- package/types/createServerHelpers.d.ts.map +1 -0
- package/types/helpers/chunkedQuery.d.ts +17 -0
- package/types/helpers/chunkedQuery.d.ts.map +1 -0
- package/types/helpers/getDBClient.d.ts +11 -0
- package/types/helpers/getDBClient.d.ts.map +1 -0
- package/types/index.d.ts +10 -0
- package/types/index.d.ts.map +1 -0
- package/types/migrate.d.ts +25 -0
- package/types/migrate.d.ts.map +1 -0
- package/types/scripts/build-migrations.d.ts +8 -0
- package/types/scripts/build-migrations.d.ts.map +1 -0
- package/types/scripts/drizzle-migrations-sync.d.ts +11 -0
- package/types/scripts/drizzle-migrations-sync.d.ts.map +1 -0
- package/types/scripts/migration-add.d.ts +6 -0
- package/types/scripts/migration-add.d.ts.map +1 -0
- package/types/scripts/pg_dump.d.ts +11 -0
- package/types/scripts/pg_dump.d.ts.map +1 -0
- package/types/scripts/psql.d.ts +11 -0
- package/types/scripts/psql.d.ts.map +1 -0
- package/types/sql.d.ts +9 -0
- package/types/sql.d.ts.map +1 -0
package/package.json
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@take-out/postgres",
|
|
3
|
+
"version": "0.0.28",
|
|
4
|
+
"sideEffects": false,
|
|
5
|
+
"source": "src/index.ts",
|
|
6
|
+
"main": "dist/cjs",
|
|
7
|
+
"module": "dist/esm",
|
|
8
|
+
"type": "module",
|
|
9
|
+
"types": "./src/index.ts",
|
|
10
|
+
"bin": {
|
|
11
|
+
"postgres": "./cli.cjs"
|
|
12
|
+
},
|
|
13
|
+
"files": [
|
|
14
|
+
"src",
|
|
15
|
+
"types",
|
|
16
|
+
"dist",
|
|
17
|
+
"cli.cjs"
|
|
18
|
+
],
|
|
19
|
+
"publishConfig": {
|
|
20
|
+
"access": "public"
|
|
21
|
+
},
|
|
22
|
+
"scripts": {
|
|
23
|
+
"build": "tamagui-build",
|
|
24
|
+
"watch": "tamagui-build --watch",
|
|
25
|
+
"lint": "biome check src",
|
|
26
|
+
"lint:fix": "biome check --write src",
|
|
27
|
+
"clean": "tamagui-build clean",
|
|
28
|
+
"clean:build": "tamagui-build clean:build"
|
|
29
|
+
},
|
|
30
|
+
"exports": {
|
|
31
|
+
"./package.json": "./package.json",
|
|
32
|
+
".": {
|
|
33
|
+
"types": "./src/index.ts",
|
|
34
|
+
"import": "./dist/esm/index.mjs",
|
|
35
|
+
"require": "./dist/cjs/index.cjs",
|
|
36
|
+
"default": "./dist/esm/index.mjs"
|
|
37
|
+
},
|
|
38
|
+
"./migrate": {
|
|
39
|
+
"types": "./src/migrate.ts",
|
|
40
|
+
"import": "./dist/esm/migrate.mjs",
|
|
41
|
+
"require": "./dist/cjs/migrate.cjs"
|
|
42
|
+
},
|
|
43
|
+
"./build": {
|
|
44
|
+
"types": "./src/build.ts",
|
|
45
|
+
"import": "./dist/esm/build.mjs",
|
|
46
|
+
"require": "./dist/cjs/build.cjs"
|
|
47
|
+
}
|
|
48
|
+
},
|
|
49
|
+
"dependencies": {
|
|
50
|
+
"@take-out/helpers": "0.0.28",
|
|
51
|
+
"async-retry": "^1.3.3",
|
|
52
|
+
"citty": "^0.1.6",
|
|
53
|
+
"vite": "^6.0.11"
|
|
54
|
+
},
|
|
55
|
+
"peerDependencies": {
|
|
56
|
+
"drizzle-orm": "*",
|
|
57
|
+
"pg": "^8.16.3"
|
|
58
|
+
},
|
|
59
|
+
"devDependencies": {
|
|
60
|
+
"@biomejs/biome": "^2.1.2",
|
|
61
|
+
"@tamagui/build": "*",
|
|
62
|
+
"@types/async-retry": "^1.4.8",
|
|
63
|
+
"@types/node": "24.0.3",
|
|
64
|
+
"drizzle-kit": "^0.30.5",
|
|
65
|
+
"drizzle-orm": "^0.40.0"
|
|
66
|
+
}
|
|
67
|
+
}
|
package/src/build.ts
ADDED
package/src/cli.ts
ADDED
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { defineCommand, runMain } from 'citty'
|
|
3
|
+
import { join } from 'node:path'
|
|
4
|
+
import { buildMigrations as buildMigrationsRun } from './scripts/build-migrations'
|
|
5
|
+
import { syncDrizzleMigrations } from './scripts/drizzle-migrations-sync'
|
|
6
|
+
import { addMigration } from './scripts/migration-add'
|
|
7
|
+
import { runPgDump } from './scripts/pg_dump'
|
|
8
|
+
import { runPsql } from './scripts/psql'
|
|
9
|
+
|
|
10
|
+
const syncDrizzle = defineCommand({
|
|
11
|
+
meta: {
|
|
12
|
+
name: 'sync-drizzle',
|
|
13
|
+
description: 'Sync Drizzle SQL migrations to TypeScript wrappers',
|
|
14
|
+
},
|
|
15
|
+
args: {
|
|
16
|
+
dir: {
|
|
17
|
+
type: 'string',
|
|
18
|
+
description: 'Migrations directory',
|
|
19
|
+
required: false,
|
|
20
|
+
default: './src/database/migrations',
|
|
21
|
+
},
|
|
22
|
+
},
|
|
23
|
+
async run({ args }) {
|
|
24
|
+
const migrationsDir = join(process.cwd(), args.dir)
|
|
25
|
+
console.info(`Syncing migrations in ${migrationsDir}`)
|
|
26
|
+
await syncDrizzleMigrations({ migrationsDir })
|
|
27
|
+
},
|
|
28
|
+
})
|
|
29
|
+
|
|
30
|
+
const buildMigrations = defineCommand({
|
|
31
|
+
meta: {
|
|
32
|
+
name: 'build-migrations',
|
|
33
|
+
description: 'Build migration bundle for deployment',
|
|
34
|
+
},
|
|
35
|
+
args: {
|
|
36
|
+
dir: {
|
|
37
|
+
type: 'string',
|
|
38
|
+
description: 'Migrations directory',
|
|
39
|
+
required: false,
|
|
40
|
+
default: './src/database/migrations',
|
|
41
|
+
},
|
|
42
|
+
out: {
|
|
43
|
+
type: 'string',
|
|
44
|
+
description: 'Output file name',
|
|
45
|
+
required: false,
|
|
46
|
+
default: 'migrate-dist.js',
|
|
47
|
+
},
|
|
48
|
+
},
|
|
49
|
+
async run({ args }) {
|
|
50
|
+
const migrationsDir = join(process.cwd(), args.dir)
|
|
51
|
+
await buildMigrationsRun({
|
|
52
|
+
migrationsDir,
|
|
53
|
+
outFile: args.out,
|
|
54
|
+
})
|
|
55
|
+
},
|
|
56
|
+
})
|
|
57
|
+
|
|
58
|
+
const migrationAdd = defineCommand({
|
|
59
|
+
meta: {
|
|
60
|
+
name: 'migrate:add',
|
|
61
|
+
description: 'Create a new custom TypeScript migration',
|
|
62
|
+
},
|
|
63
|
+
args: {
|
|
64
|
+
name: {
|
|
65
|
+
type: 'positional',
|
|
66
|
+
description: 'Migration name',
|
|
67
|
+
required: false,
|
|
68
|
+
},
|
|
69
|
+
dir: {
|
|
70
|
+
type: 'string',
|
|
71
|
+
description: 'Migrations directory',
|
|
72
|
+
required: false,
|
|
73
|
+
default: './src/database/migrations',
|
|
74
|
+
},
|
|
75
|
+
},
|
|
76
|
+
async run({ args }) {
|
|
77
|
+
const migrationsDir = join(process.cwd(), args.dir)
|
|
78
|
+
addMigration({ migrationsDir, name: args.name })
|
|
79
|
+
},
|
|
80
|
+
})
|
|
81
|
+
|
|
82
|
+
const psql = defineCommand({
|
|
83
|
+
meta: {
|
|
84
|
+
name: 'psql',
|
|
85
|
+
description: 'Connect to PostgreSQL database with psql',
|
|
86
|
+
},
|
|
87
|
+
args: {
|
|
88
|
+
connectionString: {
|
|
89
|
+
type: 'string',
|
|
90
|
+
description: 'PostgreSQL connection string',
|
|
91
|
+
required: false,
|
|
92
|
+
},
|
|
93
|
+
query: {
|
|
94
|
+
type: 'string',
|
|
95
|
+
description: 'Query to execute',
|
|
96
|
+
required: false,
|
|
97
|
+
},
|
|
98
|
+
},
|
|
99
|
+
async run({ args }) {
|
|
100
|
+
const connectionString = args.connectionString || process.env.ZERO_UPSTREAM_DB
|
|
101
|
+
if (!connectionString) {
|
|
102
|
+
console.error(
|
|
103
|
+
'No connection string provided. Set ZERO_UPSTREAM_DB or pass --connectionString'
|
|
104
|
+
)
|
|
105
|
+
process.exit(1)
|
|
106
|
+
}
|
|
107
|
+
const exitCode = runPsql({ connectionString, query: args.query })
|
|
108
|
+
process.exit(exitCode || 0)
|
|
109
|
+
},
|
|
110
|
+
})
|
|
111
|
+
|
|
112
|
+
const pgDump = defineCommand({
|
|
113
|
+
meta: {
|
|
114
|
+
name: 'pg_dump',
|
|
115
|
+
description: 'Dump PostgreSQL database using pg_dump',
|
|
116
|
+
},
|
|
117
|
+
args: {
|
|
118
|
+
connectionString: {
|
|
119
|
+
type: 'string',
|
|
120
|
+
description: 'PostgreSQL connection string',
|
|
121
|
+
required: false,
|
|
122
|
+
},
|
|
123
|
+
},
|
|
124
|
+
async run({ args }) {
|
|
125
|
+
const connectionString = args.connectionString || process.env.ZERO_UPSTREAM_DB
|
|
126
|
+
if (!connectionString) {
|
|
127
|
+
console.error(
|
|
128
|
+
'No connection string provided. Set ZERO_UPSTREAM_DB or pass --connectionString'
|
|
129
|
+
)
|
|
130
|
+
process.exit(1)
|
|
131
|
+
}
|
|
132
|
+
const cliArgs = process.argv.slice(3) // get args after command name
|
|
133
|
+
const exitCode = runPgDump({ connectionString, args: cliArgs })
|
|
134
|
+
process.exit(exitCode || 0)
|
|
135
|
+
},
|
|
136
|
+
})
|
|
137
|
+
|
|
138
|
+
const main = defineCommand({
|
|
139
|
+
meta: {
|
|
140
|
+
name: 'postgres',
|
|
141
|
+
description: 'PostgreSQL database utilities and migration tools',
|
|
142
|
+
version: '0.0.1',
|
|
143
|
+
},
|
|
144
|
+
subCommands: {
|
|
145
|
+
'sync-drizzle': syncDrizzle,
|
|
146
|
+
'build-migrations': buildMigrations,
|
|
147
|
+
'migrate:add': migrationAdd,
|
|
148
|
+
psql,
|
|
149
|
+
pg_dump: pgDump,
|
|
150
|
+
},
|
|
151
|
+
})
|
|
152
|
+
|
|
153
|
+
runMain(main)
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import type { Pool } from 'pg'
|
|
2
|
+
import { createSql, setDefaultPool } from './sql'
|
|
3
|
+
import { getDBClient, type GetDBClientOptions } from './helpers/getDBClient'
|
|
4
|
+
|
|
5
|
+
export type ServerHelpers = {
|
|
6
|
+
sql: ReturnType<typeof createSql>
|
|
7
|
+
getDBClient: (
|
|
8
|
+
options?: Omit<GetDBClientOptions, 'pool' | 'connectionString'>
|
|
9
|
+
) => ReturnType<typeof getDBClient>
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export function createServerHelpers(pool: Pool): ServerHelpers {
|
|
13
|
+
const sql = createSql(pool)
|
|
14
|
+
setDefaultPool(pool)
|
|
15
|
+
|
|
16
|
+
return {
|
|
17
|
+
sql,
|
|
18
|
+
getDBClient: (options = {}) => getDBClient({ pool, ...options }),
|
|
19
|
+
}
|
|
20
|
+
}
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
import type { PoolClient, QueryResultRow } from 'pg'
|
|
2
|
+
|
|
3
|
+
interface ChunkedQueryOptions {
|
|
4
|
+
chunkSize?: number
|
|
5
|
+
onProgress?: (processed: number, total: number) => void
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Process database records in chunks to avoid memory issues with large datasets
|
|
10
|
+
*/
|
|
11
|
+
export async function processInChunks<T extends QueryResultRow = QueryResultRow>(
|
|
12
|
+
client: PoolClient,
|
|
13
|
+
query: string,
|
|
14
|
+
processor: (rows: T[]) => Promise<void>,
|
|
15
|
+
options: ChunkedQueryOptions = {}
|
|
16
|
+
): Promise<void> {
|
|
17
|
+
const { chunkSize = 1000, onProgress } = options
|
|
18
|
+
let offset = 0
|
|
19
|
+
let hasMore = true
|
|
20
|
+
let totalProcessed = 0
|
|
21
|
+
|
|
22
|
+
// first get total count for progress reporting
|
|
23
|
+
const countQuery = query
|
|
24
|
+
.replace(/SELECT .+ FROM/, 'SELECT COUNT(*) FROM')
|
|
25
|
+
.replace(/ORDER BY .+/, '')
|
|
26
|
+
const countResult = await client.query(countQuery)
|
|
27
|
+
const totalCount = Number.parseInt(countResult.rows[0].count, 10)
|
|
28
|
+
|
|
29
|
+
while (hasMore) {
|
|
30
|
+
const paginatedQuery = `${query} LIMIT ${chunkSize} OFFSET ${offset}`
|
|
31
|
+
const result = await client.query<T>(paginatedQuery)
|
|
32
|
+
|
|
33
|
+
if (result.rows.length === 0) {
|
|
34
|
+
hasMore = false
|
|
35
|
+
break
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
await processor(result.rows)
|
|
39
|
+
|
|
40
|
+
totalProcessed += result.rows.length
|
|
41
|
+
offset += chunkSize
|
|
42
|
+
|
|
43
|
+
if (onProgress) {
|
|
44
|
+
onProgress(totalProcessed, totalCount)
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// check if we've processed all records
|
|
48
|
+
if (result.rows.length < chunkSize) {
|
|
49
|
+
hasMore = false
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
/**
|
|
55
|
+
* Update records in chunks with a transformer function
|
|
56
|
+
*/
|
|
57
|
+
export async function updateInChunks<T extends QueryResultRow & { id: string }>(
|
|
58
|
+
client: PoolClient,
|
|
59
|
+
tableName: string,
|
|
60
|
+
selectQuery: string,
|
|
61
|
+
transformer: (row: T) => Promise<Partial<T> | null>,
|
|
62
|
+
options: ChunkedQueryOptions = {}
|
|
63
|
+
): Promise<number> {
|
|
64
|
+
let totalUpdated = 0
|
|
65
|
+
|
|
66
|
+
await processInChunks<T>(
|
|
67
|
+
client,
|
|
68
|
+
selectQuery,
|
|
69
|
+
async (rows) => {
|
|
70
|
+
for (const row of rows) {
|
|
71
|
+
const updates = await transformer(row)
|
|
72
|
+
|
|
73
|
+
if (updates && Object.keys(updates).length > 0) {
|
|
74
|
+
// build update query dynamically
|
|
75
|
+
const setClause = Object.keys(updates)
|
|
76
|
+
.map((key, index) => `${key} = $${index + 2}`)
|
|
77
|
+
.join(', ')
|
|
78
|
+
|
|
79
|
+
const values = [row.id, ...Object.values(updates)]
|
|
80
|
+
|
|
81
|
+
await client.query(`UPDATE ${tableName} SET ${setClause} WHERE id = $1`, values)
|
|
82
|
+
|
|
83
|
+
totalUpdated++
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
},
|
|
87
|
+
options
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
return totalUpdated
|
|
91
|
+
}
|
|
@@ -0,0 +1,264 @@
|
|
|
1
|
+
import pg, { type Pool, type PoolClient } from 'pg'
|
|
2
|
+
|
|
3
|
+
// some of this file retry logic taken from:
|
|
4
|
+
// https://github.com/brianc/node-postgres/issues/2718#issuecomment-1074019993
|
|
5
|
+
|
|
6
|
+
export type GetDBClientOptions = {
|
|
7
|
+
pool?: Pool
|
|
8
|
+
connectionString?: string
|
|
9
|
+
retries?: number
|
|
10
|
+
onRetry?: (error: Error, attempt: number) => void
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
const cache = new Map<
|
|
14
|
+
string,
|
|
15
|
+
{
|
|
16
|
+
pool: pg.Pool
|
|
17
|
+
maxConnections: number | null
|
|
18
|
+
reservedConnections: number | null
|
|
19
|
+
openedConnections: number | null
|
|
20
|
+
openedConnectionsLastUpdate: number | null
|
|
21
|
+
}
|
|
22
|
+
>()
|
|
23
|
+
|
|
24
|
+
const createPoolKey = (connectionString: string) => connectionString
|
|
25
|
+
|
|
26
|
+
const getOrCreatePoolCache = (connectionString: string, config: pg.PoolConfig) => {
|
|
27
|
+
const key = createPoolKey(connectionString)
|
|
28
|
+
|
|
29
|
+
if (!cache.has(key)) {
|
|
30
|
+
cache.set(key, {
|
|
31
|
+
pool: new pg.Pool(config),
|
|
32
|
+
maxConnections: null,
|
|
33
|
+
reservedConnections: null,
|
|
34
|
+
openedConnections: null,
|
|
35
|
+
openedConnectionsLastUpdate: null,
|
|
36
|
+
})
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
return cache.get(key)!
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
export async function getDBClient(options: GetDBClientOptions = {}): Promise<PoolClient> {
|
|
43
|
+
const { pool, connectionString, retries = 8 } = options
|
|
44
|
+
|
|
45
|
+
if (!pool && !connectionString) {
|
|
46
|
+
throw new Error('Either pool or connectionString must be provided')
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
let client: PoolClient | null = null
|
|
50
|
+
|
|
51
|
+
try {
|
|
52
|
+
client = await tryToGetNewClientFromPool(pool, connectionString, retries)
|
|
53
|
+
return client
|
|
54
|
+
} catch (error) {
|
|
55
|
+
console.error(`Failed to get DB client:`, error)
|
|
56
|
+
throw error
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
async function tryToGetNewClientFromPool(
|
|
61
|
+
providedPool: Pool | undefined,
|
|
62
|
+
connectionString: string | undefined,
|
|
63
|
+
retries: number
|
|
64
|
+
): Promise<PoolClient> {
|
|
65
|
+
const { default: retry } = await import('async-retry')
|
|
66
|
+
const clientFromPool = await retry(
|
|
67
|
+
async () => {
|
|
68
|
+
if (providedPool) {
|
|
69
|
+
console.info(`Connecting to provided pool...`)
|
|
70
|
+
const client = await providedPool.connect()
|
|
71
|
+
console.info(`Connected to pool`)
|
|
72
|
+
return client
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
if (!connectionString) {
|
|
76
|
+
throw new Error('No connection string provided')
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
const configurations: pg.PoolConfig = {
|
|
80
|
+
connectionString,
|
|
81
|
+
connectionTimeoutMillis: 5_000,
|
|
82
|
+
// idle_session_timeout set to 35s on server, client timeout at 30s
|
|
83
|
+
// fix via https://github.com/brianc/node-postgres/issues/2718#issuecomment-2094885323
|
|
84
|
+
idleTimeoutMillis: 30_000,
|
|
85
|
+
allowExitOnIdle: true,
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
const poolCache = getOrCreatePoolCache(connectionString, configurations)
|
|
89
|
+
|
|
90
|
+
console.info(`Connecting to pool ${connectionString}...`)
|
|
91
|
+
const client = await poolCache.pool.connect()
|
|
92
|
+
console.info(`Connected to pool`)
|
|
93
|
+
return client
|
|
94
|
+
},
|
|
95
|
+
{
|
|
96
|
+
retries,
|
|
97
|
+
minTimeout: 300,
|
|
98
|
+
factor: 2,
|
|
99
|
+
maxTimeout: 8000,
|
|
100
|
+
}
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
return clientFromPool
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
export async function queryDb(
|
|
107
|
+
queryText: string,
|
|
108
|
+
params?: any[],
|
|
109
|
+
options: GetDBClientOptions = {}
|
|
110
|
+
): Promise<pg.QueryResult<any>> {
|
|
111
|
+
let client: PoolClient | null = null
|
|
112
|
+
|
|
113
|
+
try {
|
|
114
|
+
client = await tryToGetNewClientFromPool(
|
|
115
|
+
options.pool,
|
|
116
|
+
options.connectionString,
|
|
117
|
+
options.retries || 8
|
|
118
|
+
)
|
|
119
|
+
return await client.query(queryText, params)
|
|
120
|
+
} catch (error) {
|
|
121
|
+
console.error(`Database query failed:`, {
|
|
122
|
+
query: queryText,
|
|
123
|
+
error: error instanceof Error ? error.message : String(error),
|
|
124
|
+
})
|
|
125
|
+
throw error
|
|
126
|
+
} finally {
|
|
127
|
+
if (client && options.connectionString) {
|
|
128
|
+
const tooManyConnections = await checkForTooManyConnections(
|
|
129
|
+
client,
|
|
130
|
+
options.connectionString
|
|
131
|
+
)
|
|
132
|
+
|
|
133
|
+
if (tooManyConnections) {
|
|
134
|
+
const poolCache = cache.get(createPoolKey(options.connectionString))
|
|
135
|
+
client.release()
|
|
136
|
+
await poolCache?.pool.end()
|
|
137
|
+
if (poolCache) {
|
|
138
|
+
cache.delete(createPoolKey(options.connectionString))
|
|
139
|
+
}
|
|
140
|
+
} else {
|
|
141
|
+
client.release()
|
|
142
|
+
}
|
|
143
|
+
} else if (client) {
|
|
144
|
+
client.release()
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
async function checkForTooManyConnections(
|
|
150
|
+
client: PoolClient,
|
|
151
|
+
connectionString: string
|
|
152
|
+
): Promise<boolean> {
|
|
153
|
+
const poolCache = cache.get(createPoolKey(connectionString))
|
|
154
|
+
if (!poolCache) return false
|
|
155
|
+
|
|
156
|
+
const currentTime = Date.now()
|
|
157
|
+
const openedConnectionsMaxAge = 10000
|
|
158
|
+
const maxConnectionsTolerance = 0.9
|
|
159
|
+
|
|
160
|
+
if (poolCache.maxConnections === null || poolCache.reservedConnections === null) {
|
|
161
|
+
const [maxConnections, reservedConnections] = await getConnectionLimits(client)
|
|
162
|
+
console.info(
|
|
163
|
+
`Max connections: ${maxConnections}, Reserved connections: ${reservedConnections}`
|
|
164
|
+
)
|
|
165
|
+
poolCache.maxConnections = maxConnections
|
|
166
|
+
poolCache.reservedConnections = reservedConnections
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
if (
|
|
170
|
+
poolCache.openedConnections === null ||
|
|
171
|
+
poolCache.openedConnectionsLastUpdate === null ||
|
|
172
|
+
currentTime - poolCache.openedConnectionsLastUpdate > openedConnectionsMaxAge
|
|
173
|
+
) {
|
|
174
|
+
const openedConnections = await getOpenedConnections(client, connectionString)
|
|
175
|
+
poolCache.openedConnections = openedConnections
|
|
176
|
+
poolCache.openedConnectionsLastUpdate = currentTime
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
if (
|
|
180
|
+
poolCache.openedConnections >
|
|
181
|
+
(poolCache.maxConnections - poolCache.reservedConnections) * maxConnectionsTolerance
|
|
182
|
+
) {
|
|
183
|
+
console.warn(
|
|
184
|
+
`Too many connections detected: ${poolCache.openedConnections}/${poolCache.maxConnections - poolCache.reservedConnections}`
|
|
185
|
+
)
|
|
186
|
+
return true
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
return false
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
async function getConnectionLimits(client: PoolClient): Promise<[number, number]> {
|
|
193
|
+
console.info(`Getting connection limits...`)
|
|
194
|
+
const maxConnectionsResult = await client.query('SHOW max_connections')
|
|
195
|
+
const reservedConnectionResult = await client.query(
|
|
196
|
+
'SHOW superuser_reserved_connections'
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
return [
|
|
200
|
+
Number.parseInt(maxConnectionsResult.rows[0].max_connections, 10),
|
|
201
|
+
Number.parseInt(reservedConnectionResult.rows[0].superuser_reserved_connections, 10),
|
|
202
|
+
]
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
async function getOpenedConnections(
|
|
206
|
+
client: PoolClient,
|
|
207
|
+
connectionString: string
|
|
208
|
+
): Promise<number> {
|
|
209
|
+
// For Aurora/RDS, we need to get the database name from connection string
|
|
210
|
+
const dbName = new URL(connectionString).pathname.slice(1)
|
|
211
|
+
console.info(`Getting opened connections...`)
|
|
212
|
+
const openConnectionsResult = await client.query(
|
|
213
|
+
'SELECT numbackends as opened_connections FROM pg_stat_database WHERE datname = $1',
|
|
214
|
+
[dbName]
|
|
215
|
+
)
|
|
216
|
+
const result = Number.parseInt(
|
|
217
|
+
openConnectionsResult.rows[0]?.opened_connections || 0,
|
|
218
|
+
10
|
|
219
|
+
)
|
|
220
|
+
console.info(`Opened connections: ${result}`)
|
|
221
|
+
return result
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
export async function getNewClient(options: GetDBClientOptions = {}): Promise<pg.Client> {
|
|
225
|
+
const { connectionString } = options
|
|
226
|
+
|
|
227
|
+
if (!connectionString) {
|
|
228
|
+
throw new Error('connectionString is required for getNewClient')
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
try {
|
|
232
|
+
const client = await tryToGetNewClient(connectionString)
|
|
233
|
+
return client
|
|
234
|
+
} catch (error) {
|
|
235
|
+
console.error(`Failed to get new client:`, error)
|
|
236
|
+
throw error
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
async function tryToGetNewClient(connectionString: string): Promise<pg.Client> {
|
|
241
|
+
const configurations: pg.PoolConfig = {
|
|
242
|
+
connectionString,
|
|
243
|
+
connectionTimeoutMillis: 5_000,
|
|
244
|
+
idleTimeoutMillis: 30_000,
|
|
245
|
+
allowExitOnIdle: true,
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
const { default: retry } = await import('async-retry')
|
|
249
|
+
const client = await retry(
|
|
250
|
+
async () => {
|
|
251
|
+
const newClient = new pg.Client(configurations)
|
|
252
|
+
await newClient.connect()
|
|
253
|
+
return newClient
|
|
254
|
+
},
|
|
255
|
+
{
|
|
256
|
+
retries: 10,
|
|
257
|
+
minTimeout: 100,
|
|
258
|
+
factor: 2,
|
|
259
|
+
maxTimeout: 5000,
|
|
260
|
+
}
|
|
261
|
+
)
|
|
262
|
+
|
|
263
|
+
return client
|
|
264
|
+
}
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import type { NodePgDatabase } from 'drizzle-orm/node-postgres'
|
|
2
|
+
import { drizzle } from 'drizzle-orm/node-postgres'
|
|
3
|
+
import type { Pool } from 'pg'
|
|
4
|
+
import pg from 'pg'
|
|
5
|
+
|
|
6
|
+
export const createPool = (connectionString: string): Pool => {
|
|
7
|
+
return new pg.Pool({
|
|
8
|
+
connectionString,
|
|
9
|
+
})
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export const createDb = <TSchema extends Record<string, unknown>>(
|
|
13
|
+
connectionString: string,
|
|
14
|
+
schema: TSchema
|
|
15
|
+
): NodePgDatabase<TSchema> => {
|
|
16
|
+
const pool = createPool(connectionString)
|
|
17
|
+
return drizzle(pool, {
|
|
18
|
+
schema,
|
|
19
|
+
logger: false,
|
|
20
|
+
}) as NodePgDatabase<TSchema>
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export type { NodePgDatabase }
|
|
24
|
+
|
|
25
|
+
// re-export for convenience
|
|
26
|
+
export { createSql, type SqlQuery } from './sql'
|
|
27
|
+
|
|
28
|
+
export { createServerHelpers, type ServerHelpers } from './createServerHelpers'
|
|
29
|
+
|
|
30
|
+
export {
|
|
31
|
+
getDBClient,
|
|
32
|
+
queryDb,
|
|
33
|
+
type GetDBClientOptions,
|
|
34
|
+
} from './helpers/getDBClient'
|
|
35
|
+
|
|
36
|
+
export { processInChunks, updateInChunks } from './helpers/chunkedQuery'
|