@juit/pgproxy-utils 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/extract.ts ADDED
@@ -0,0 +1,137 @@
1
+ import { PGClient } from '@juit/pgproxy-client'
2
+ import '@juit/pgproxy-client-psql'
3
+
4
+ import type { Schema } from './index'
5
+
6
+ /* ========================================================================== *
7
+ * INTERNALS *
8
+ * ========================================================================== */
9
+
10
+ /** Master query to analyse the structure of our database */
11
+ const query = `
12
+ SELECT
13
+ "c"."table_schema" AS "schema",
14
+ "c"."table_name" AS "table",
15
+ "c"."column_name" AS "column",
16
+ "c"."is_nullable"::bool AS "isNullable",
17
+ CASE WHEN "c"."column_default" IS NULL
18
+ THEN false
19
+ ELSE true
20
+ END AS "hasDefault",
21
+ "t"."oid" AS "oid",
22
+ "e"."enumValues" AS "enumValues",
23
+ "d"."description" AS "description"
24
+ FROM
25
+ "information_schema"."columns" AS "c"
26
+ -- join the pg_catalog.pg_type to bring in the OIDs
27
+ INNER JOIN
28
+ "pg_catalog"."pg_type" AS "t"
29
+ ON
30
+ "c"."udt_name" = "t"."typname"
31
+ -- join the pg_catalog.pg_statio_all_tables to bring in the OBJIDs
32
+ INNER JOIN
33
+ "pg_catalog"."pg_statio_all_tables" AS "s"
34
+ ON
35
+ "c"."table_schema" = "s"."schemaname" AND
36
+ "c"."table_name" = "s"."relname"
37
+ -- join the pg_catalog.pg_enum to bring in the ENUM VALUESs
38
+ LEFT JOIN (
39
+ SELECT
40
+ "enumtypid",
41
+ "array_agg"("enumlabel")::varchar[] AS "enumValues"
42
+ FROM
43
+ "pg_catalog"."pg_enum"
44
+ GROUP BY
45
+ "enumtypid"
46
+ ) "e"
47
+ ON
48
+ "t"."oid" = "e"."enumtypid"
49
+ -- join the pg_catalog.pg_description to bring in the column DESCRIPTIONs
50
+ LEFT JOIN
51
+ "pg_catalog"."pg_description" AS "d"
52
+ ON
53
+ "d"."objoid" = "s"."relid" AND
54
+ "d"."objsubid" = "c"."ordinal_position"
55
+ -- restrict to our schemas
56
+ WHERE
57
+ "c"."table_schema" = ANY($1)
58
+ -- sanity in ordering
59
+ ORDER BY
60
+ "c"."table_schema",
61
+ "c"."table_name",
62
+ "c"."ordinal_position"`
63
+
64
+ /** Interface describing the result from our query above */
65
+ interface ResultRow {
66
+ schema: string,
67
+ table: string,
68
+ column: string,
69
+ isNullable: boolean,
70
+ hasDefault: boolean,
71
+ oid: number,
72
+ enumValues: string[] | null,
73
+ description: string | null,
74
+ }
75
+
76
+ /** Strip all undefined values from a record */
77
+ function stripUndefined<T extends Record<string, any>>(object: T): T {
78
+ for (const key of Object.keys(object)) {
79
+ if (object[key] === undefined) delete object[key]
80
+ }
81
+ return object
82
+ }
83
+
84
+ /* ========================================================================== *
85
+ * EXPORTED *
86
+ * ========================================================================== */
87
+
88
+ /**
89
+ * Extract a {@link Schema} from an array of PosgreSQL schema names.
90
+ *
91
+ * When the `schemas` parameter is undefined (or an empty array), then the
92
+ * single `public` schema will be targeted for extraction.
93
+ *
94
+ * Furthermore, unless the `extractAll` flag is set to `true`, only tables and
95
+ * columns starting with a _latin letter_ (a...z) will be included in the
96
+ * resulting {@link Schema}.
97
+ *
98
+ * @param url - The URL of the database to connect to.
99
+ * @param schemas - The array of schema names to target for extraction.
100
+ * @param extractAll - Extract all tables and column definitions.
101
+ */
102
+ export async function extractSchema(
103
+ url: URL | string,
104
+ schemas: string[] = [],
105
+ extractAll: boolean = false,
106
+ ): Promise<Schema> {
107
+ if (schemas.length === 0) schemas.push('public')
108
+
109
+ const client = new PGClient(url)
110
+ let rows: ResultRow[]
111
+ try {
112
+ const result = await client.query<ResultRow>(query, [ schemas ])
113
+ rows = result.rows
114
+ } finally {
115
+ await client.destroy()
116
+ }
117
+
118
+ const schemaDef: Schema = {}
119
+
120
+ for (const row of rows) {
121
+ const { schema, table, column, description, enumValues, ...def } = row
122
+
123
+ if (! extractAll) {
124
+ if ((table.match(/^[^a-z]/i)) || (column.match(/^[^a-z]/i))) continue
125
+ }
126
+
127
+ const name = schema === 'public' ? `${table}` : `${schema}.${table}`
128
+ const tableDef = schemaDef[name] || (schemaDef[name] = {})
129
+
130
+ tableDef[column] = stripUndefined({ ...def,
131
+ description: description?.trim() ? description.trim() : undefined,
132
+ enumValues: (enumValues && enumValues.length) ? enumValues as any : undefined,
133
+ })
134
+ }
135
+
136
+ return schemaDef
137
+ }
package/src/helpers.ts ADDED
@@ -0,0 +1,30 @@
1
+ import ts from 'typescript'
2
+
3
+ /**
4
+ * Create a PosgreSQL array type for the given type, that is, given the type
5
+ * `T`, return `(T | null)[]`
6
+ */
7
+ export function makePostgresArrayType(type: ts.TypeNode): ts.ArrayTypeNode {
8
+ const nullable = ts.factory.createLiteralTypeNode(ts.factory.createNull())
9
+ const union = ts.factory.createUnionTypeNode([ type, nullable ])
10
+ const array = ts.factory.createArrayTypeNode(union)
11
+ return array
12
+ }
13
+
14
+ /**
15
+ * Create an _import_ type, like `import('module').Name<arg0, arg1, ...>`.
16
+ */
17
+ export function makeImportType(
18
+ module: string,
19
+ name: string,
20
+ args: ts.TypeNode | ts.TypeNode[] = [],
21
+ ): ts.ImportTypeNode {
22
+ if (! Array.isArray(args)) args = [ args ]
23
+
24
+ return ts.factory.createImportTypeNode( // ..................... "import"
25
+ ts.factory.createLiteralTypeNode(
26
+ ts.factory.createStringLiteral(module)), // ............ "('module')"
27
+ undefined, // import assertions
28
+ ts.factory.createIdentifier(name), // ........................ ".Type"
29
+ args) // ................................................... "<Arg, ...>"
30
+ }
package/src/index.ts ADDED
@@ -0,0 +1,22 @@
1
+ /** Schema definition extracted from PostgreSQL */
2
+ export interface Schema {
3
+ [ table: string ] : {
4
+ [ column: string ] : {
5
+ oid: number,
6
+ isNullable?: boolean,
7
+ hasDefault?: boolean,
8
+ description?: string,
9
+ enumValues?: readonly [ string, ...string[] ],
10
+ }
11
+ }
12
+ }
13
+
14
+ export * from './database'
15
+ export * from './extract'
16
+ export * from './migrate'
17
+ export * from './serialize'
18
+
19
+ /** Helper functions for serializing schemas */
20
+ export * as helpers from './helpers'
21
+ /** Known/basic types for serializing schemas */
22
+ export * as types from './types'
package/src/migrate.ts ADDED
@@ -0,0 +1,177 @@
1
+ import crypto from 'node:crypto'
2
+ import { basename } from 'node:path'
3
+
4
+ import { Persister } from '@juit/pgproxy-persister'
5
+ import { $blu, $grn, $gry, $ms, $und, $ylw, find, fs, log, merge, resolve } from '@plugjs/plug'
6
+
7
+ import type { InferSelectType } from '@juit/pgproxy-persister'
8
+
9
+ /* ========================================================================== *
10
+ * INTERNALS *
11
+ * ========================================================================== */
12
+
13
+ const migrationsExpression = /^([0-9]+)[^\w](.*)\.(sql)$/i
14
+
15
+ type Migration = {
16
+ sha256sum: Buffer,
17
+ contents: string,
18
+ number: number,
19
+ name: string,
20
+ }
21
+
22
+ interface MigrationSchema {
23
+ $migrations: {
24
+ group: { type: string, hasDefault: true },
25
+ number: { type: number },
26
+ name: { type: string },
27
+ timestamp: { type: Date, hasDefault: true },
28
+ sha256sum: { type: Buffer },
29
+ },
30
+ }
31
+
32
+ /* ========================================================================== *
33
+ * EXPORTS *
34
+ * ========================================================================== */
35
+
36
+ export type MigrationOptions = {
37
+ /** The directory where migrations SQL files reside (default: `./sql`) */
38
+ migrations?: string,
39
+ /**
40
+ * The directory (or directories) where _additional_ migrations SQL files
41
+ * reside (default: _undefined_).
42
+ */
43
+ additional?: string | string[],
44
+ /** The group identifier for this migrations (default: `default`) */
45
+ group?: string,
46
+ }
47
+
48
+ /** Migrate a database, applying all changes from a set of SQL files */
49
+ export async function migrate(
50
+ url: string | URL,
51
+ options?: MigrationOptions,
52
+ ): Promise<number> {
53
+ const {
54
+ /* Default to our "../sql" migrations directory */
55
+ migrations: migrationsDirectory = resolve('sql'),
56
+ /* Our default group name is "default" */
57
+ group = 'default',
58
+ /* Optional additional directory for migrations */
59
+ additional,
60
+ } = { ...options }
61
+
62
+ /* Read our directory containing all our migrations files */
63
+ let entries = await find('*.sql', { directory: migrationsDirectory })
64
+
65
+ /* If we have additional paths configured, read those too */
66
+ if (additional) {
67
+ for (const addition of [ additional ].flat()) {
68
+ const additional = await find('*.sql', { directory: addition })
69
+ entries = await merge([ entries, additional ])
70
+ }
71
+ }
72
+
73
+ /* For each entry, map it to null or a migration entry */
74
+ const promises = [ ...entries.absolutePaths() ].map(async (file) => {
75
+ /* Match our file name, the groups identify our variables */
76
+ const match = migrationsExpression.exec(basename(file))
77
+ if (! match) return // no match, no migration
78
+
79
+ /* Extract file, number and name from match */
80
+ const [ , number, name ] = match
81
+
82
+ /* Read up our source file (additions have "source" in the dirent) */
83
+ const contents = await fs.readFile(file)
84
+
85
+ /* Return our migration entry */
86
+ return {
87
+ sha256sum: crypto.createHash('sha256').update(contents).digest(),
88
+ contents: contents.toString('utf8'),
89
+ number: parseInt(number!),
90
+ name: name!,
91
+ }
92
+ })
93
+
94
+ /* Filter unmatched migrations and sort them by migration _number_ */
95
+ const migrationFiles = (await Promise.all(promises))
96
+ .filter((migration): migration is Migration => !! migration)
97
+ .sort((a, b) => a!.number - b!.number)
98
+
99
+ /* Start our gigantic migrations transaction */
100
+ const now = Date.now()
101
+ const persister = new Persister<MigrationSchema>(url)
102
+ return await persister.connect(async (connection) => {
103
+ const info = await connection.query<{ name: string }>('SELECT current_database() AS name')
104
+ log.notice(`Migrating database ${$ylw((info.rows[0]!.name))}`)
105
+
106
+ const model = connection.in('$migrations')
107
+
108
+ log.info('Beginning migrations transaction')
109
+ await connection.begin()
110
+
111
+ /* First of all, make sure we have our "$migrations" table */
112
+ log.info(`Ensuring presence of ${$blu('$migrations')} table`)
113
+ await connection.query(`
114
+ SET LOCAL client_min_messages TO WARNING;
115
+ CREATE TABLE IF NOT EXISTS "$migrations" (
116
+ "group" VARCHAR(32) NOT NULL DEFAULT 'default',
117
+ "number" INTEGER NOT NULL,
118
+ "name" TEXT NOT NULL,
119
+ "timestamp" TIMESTAMPTZ NOT NULL DEFAULT NOW(),
120
+ "sha256sum" BYTEA NOT NULL,
121
+ PRIMARY KEY ("group", "number")
122
+ );`)
123
+
124
+ /* Lock our migrations table */
125
+ log.info(`Lock exclusive use of ${$blu('$migrations')} table`)
126
+ await connection.query('LOCK TABLE "$migrations"')
127
+
128
+ /* Gather all applied migrations */
129
+ log.info(`Looking for entries in ${$blu('$migrations')} table`)
130
+ const result = await model.read({ group })
131
+
132
+ /* Reduce all existing migration, keying them by number */
133
+ const applied = result.reduce((applied, row) => {
134
+ const { group, number, name, timestamp, sha256sum } = row
135
+ applied[number] = { group, number, name, timestamp, sha256sum }
136
+ return applied
137
+ }, {} as Record<number, InferSelectType<MigrationSchema['$migrations']>>)
138
+
139
+ /* Apply our migrations */
140
+ let count = 0
141
+ for (const { number, name, contents, sha256sum } of migrationFiles) {
142
+ const num = `${number}`.padStart(3, '0')
143
+ const prev = applied[number]
144
+ if (prev) {
145
+ if (sha256sum.equals(prev.sha256sum)) {
146
+ const timestamp = prev.timestamp.toISOString().substring(0, 19).replace('T', ' ')
147
+ log.notice(`Skipping migration ${$gry(`${group}@`)}${$grn(num)}: ${$blu(name)}`, $gry(`applied on ${$und(timestamp)}`))
148
+ } else {
149
+ log.error(`Failed migration ${$gry(`${group}@`)}${$grn(num)}: ${$ylw(name)}`)
150
+ const currHash = sha256sum.toString('hex').substring(0, 6)
151
+ const prevHash = Buffer.from(prev.sha256sum).toString('hex').substring(0, 6)
152
+ throw new Error(`Migration ${group}@${num} (${name}) has checksum "${currHash}" but was recorded as "${prevHash}"`)
153
+ }
154
+ } else {
155
+ try {
156
+ log.notice(`Applying migration ${$gry(`${group}@`)}${$grn(num)}: ${$blu(name)}`)
157
+ await connection.query(contents)
158
+ await model.create({ group, number, name, sha256sum })
159
+ count ++
160
+ } catch (error: any) {
161
+ log.error(`Failed migration ${$gry(`${group}@`)}${$grn(num)}: ${$ylw(name)}`)
162
+ const message = error.message.split('\n').map((s: string) => ` ${s}`).join('\n')
163
+ error.message = `Failed migration ${group}@${num} (${name}):\n${message}`
164
+ throw error
165
+ }
166
+ }
167
+ }
168
+
169
+ /* Commit our migrations */
170
+ log.info('Committing migrations transaction')
171
+ await connection.commit()
172
+
173
+ /* All done */
174
+ log.notice(`Applied ${$ylw(count)} migrations ${$ms(Date.now() - now)}`)
175
+ return count
176
+ }).finally(() => persister.destroy())
177
+ }
@@ -0,0 +1,217 @@
1
+ import '@juit/pgproxy-client-psql'
2
+ import { PGOIDs } from '@juit/pgproxy-types'
3
+ import ts from 'typescript'
4
+
5
+ import * as types from './types'
6
+
7
+ import type { Schema } from './index'
8
+
9
+ /* ========================================================================== *
10
+ * TYPES AND CONSTANTS *
11
+ * ========================================================================== */
12
+
13
+ const exportModifier = ts.factory.createModifier(ts.SyntaxKind.ExportKeyword)
14
+ const endOfFileToken = ts.factory.createToken(ts.SyntaxKind.EndOfFileToken)
15
+
16
+ const oidTypes = {
17
+ /* Basic known types |_oid__|_typname______| */
18
+ [PGOIDs.bool]: types.booleanType, /* | 16 | bool | */
19
+ [PGOIDs.bytea]: types.uint8ArrayType, /* | 17 | bytea | */
20
+ [PGOIDs.int8]: types.bigintType, /* | 20 | int8 | */
21
+ [PGOIDs.int2]: types.numberType, /* | 21 | int2 | */
22
+ [PGOIDs.int4]: types.numberType, /* | 23 | int4 | */
23
+ [PGOIDs.oid]: types.numberType, /* | 26 | oid | */
24
+ [PGOIDs.json]: types.anyType, /* | 114 | json | */
25
+ [PGOIDs.point]: types.pgPointType, /* | 600 | point | */
26
+ [PGOIDs.float4]: types.numberType, /* | 700 | float4 | */
27
+ [PGOIDs.float8]: types.numberType, /* | 701 | float8 | */
28
+ [PGOIDs.circle]: types.pgCircleType, /* | 718 | circle | */
29
+ [PGOIDs.varchar]: types.stringType, /* | 1043 | varchar | */
30
+ [PGOIDs.timestamp]: types.dateType, /* | 1114 | timestamp | */
31
+ [PGOIDs.timestamptz]: types.dateType, /* | 1184 | timestamptz | */
32
+ [PGOIDs.interval]: types.pgIntervalType, /* | 1186 | interval | */
33
+ [PGOIDs.numeric]: types.stringType, /* | 1700 | numeric | */
34
+ [PGOIDs.jsonb]: types.anyType, /* | 3802 | jsonb | */
35
+
36
+ /* Special types |_oid__|_typname______| */
37
+ [PGOIDs.void]: types.voidType, /* | 2278 | void | */
38
+ [PGOIDs.xid]: types.numberType, /* | 28 | xid | */
39
+ [PGOIDs.xid8]: types.bigintType, /* | 5069 | xid8 | */
40
+ [PGOIDs._xid]: types.numberArrayType, /* | 1011 | _xid | */
41
+ [PGOIDs._xid8]: types.bigintArrayType, /* | 271 | _xid8 | */
42
+
43
+ /* Native array types of the above |_oid__|_typname______| */
44
+ [PGOIDs._bool]: types.booleanArrayType, /* | 1000 | _bool | */
45
+ [PGOIDs._bytea]: types.uint8ArrayArrayType, /* | 1001 | _bytea | */
46
+ [PGOIDs._int8]: types.bigintArrayType, /* | 1016 | _int8 | */
47
+ [PGOIDs._int2]: types.numberArrayType, /* | 1005 | _int2 | */
48
+ [PGOIDs._int4]: types.numberArrayType, /* | 1007 | _int4 | */
49
+ [PGOIDs._oid]: types.numberArrayType, /* | 1028 | _oid | */
50
+ [PGOIDs._json]: types.anyArrayType, /* | 199 | _json | */
51
+ [PGOIDs._point]: types.pgPointArrayType, /* | 1017 | _point | */
52
+ [PGOIDs._float4]: types.numberArrayType, /* | 1021 | _float4 | */
53
+ [PGOIDs._float8]: types.numberArrayType, /* | 1022 | _float8 | */
54
+ [PGOIDs._circle]: types.pgCircleArrayType, /* | 719 | _circle | */
55
+ [PGOIDs._timestamp]: types.dateArrayType, /* | 1115 | _timestamp | */
56
+ [PGOIDs._timestamptz]: types.dateArrayType, /* | 1185 | _timestamptz | */
57
+ [PGOIDs._interval]: types.pgIntervalArrayType, /* | 1187 | _interval | */
58
+ [PGOIDs._numeric]: types.stringArrayType, /* | 1231 | _numeric | */
59
+ [PGOIDs._jsonb]: types.anyArrayType, /* | 3807 | _jsonb | */
60
+
61
+ /* Other known array types |_oid__|_typname______| */
62
+ [PGOIDs._cidr]: types.stringArrayType, /* | 651 | _cidr | */
63
+ [PGOIDs._money]: types.stringArrayType, /* | 791 | _money | */
64
+ [PGOIDs._regproc]: types.stringArrayType, /* | 1008 | _regproc | */
65
+ [PGOIDs._text]: types.stringArrayType, /* | 1009 | _text | */
66
+ [PGOIDs._bpchar]: types.stringArrayType, /* | 1014 | _bpchar | */
67
+ [PGOIDs._varchar]: types.stringArrayType, /* | 1015 | _varchar | */
68
+ [PGOIDs._macaddr]: types.stringArrayType, /* | 1040 | _macaddr | */
69
+ [PGOIDs._inet]: types.stringArrayType, /* | 1041 | _inet | */
70
+ [PGOIDs._date]: types.stringArrayType, /* | 1182 | _date | */
71
+ [PGOIDs._time]: types.stringArrayType, /* | 1183 | _time | */
72
+ [PGOIDs._timetz]: types.stringArrayType, /* | 1270 | _timetz | */
73
+ [PGOIDs._uuid]: types.stringArrayType, /* | 2951 | _uuid | */
74
+
75
+ /* Range types |_oid__|_typname______| */
76
+ [PGOIDs.int4range]: types.numberRangeType, /* | 3904 | int4range | */
77
+ [PGOIDs.numrange]: types.numberRangeType, /* | 3906 | numrange | */
78
+ [PGOIDs.tsrange]: types.dateRangeType, /* | 3908 | tsrange | */
79
+ [PGOIDs.tstzrange]: types.dateRangeType, /* | 3910 | tstzrange | */
80
+ [PGOIDs.daterange]: types.stringRangeType, /* | 3912 | daterange | */
81
+ [PGOIDs.int8range]: types.bigintRangeType, /* | 3926 | int8range | */
82
+
83
+ /* Array of range types |_oid__|_typname______| */
84
+ [PGOIDs._int4range]: types.numberRangeArrayType, /* | 3905 | _int4range | */
85
+ [PGOIDs._numrange]: types.numberRangeArrayType, /* | 3907 | _numrange | */
86
+ [PGOIDs._tsrange]: types.dateRangeArrayType, /* | 3909 | _tsrange | */
87
+ [PGOIDs._tstzrange]: types.dateRangeArrayType, /* | 3911 | _tstzrange | */
88
+ [PGOIDs._daterange]: types.stringRangeArrayType, /* | 3913 | _daterange | */
89
+ [PGOIDs._int8range]: types.bigintRangeArrayType, /* | 3927 | _int8range | */
90
+ } satisfies Record<PGOIDs[keyof PGOIDs], ts.TypeNode>
91
+
92
+ const trueLiteralTypeNode = ts.factory.createLiteralTypeNode(
93
+ ts.factory.createToken(ts.SyntaxKind.TrueKeyword))
94
+
95
+ const isNullableSignature = ts.factory.createPropertySignature(
96
+ undefined, // no modifiers
97
+ 'isNullable',
98
+ undefined, // no question mark
99
+ trueLiteralTypeNode)
100
+
101
+ const hasDefaultSignature = ts.factory.createPropertySignature(
102
+ undefined, // no modifiers
103
+ 'hasDefault',
104
+ undefined, // no question mark
105
+ trueLiteralTypeNode)
106
+
107
+ /* ========================================================================== *
108
+ * EXPORTED *
109
+ * ========================================================================== */
110
+
111
+ /**
112
+ * Serialize the specified `Schema` as a TypeScript source file.
113
+ *
114
+ * If the `id` is unspecified, the default name `Schema` will be used.
115
+ */
116
+ export function serializeSchema(
117
+ schema: Schema,
118
+ id: string = 'Schema',
119
+ overrides: Record<number, ts.TypeNode> = {},
120
+ ): string {
121
+ /* Property signatures of all tables */
122
+ const tables: ts.PropertySignature[] = []
123
+
124
+ /* Iterate through our tables */
125
+ for (const [ tableName, table ] of Object.entries(schema)) {
126
+ /* Property signatures of all columns in the current table */
127
+ const columns: ts.PropertySignature[] = []
128
+
129
+ /* Iterate through our table's columns */
130
+ for (const [ columnName, column ] of Object.entries(table)) {
131
+ let typeNode: ts.TypeNode
132
+
133
+ /* First look at any type overridden when calling this */
134
+ if (column.oid in overrides) {
135
+ typeNode = overrides[column.oid]!
136
+
137
+ /* Then look at our well-known types */
138
+ } else if (column.oid in oidTypes) {
139
+ typeNode = oidTypes[column.oid as keyof typeof oidTypes]
140
+
141
+ /* Still nothing? Maybe it's an enum (a union type) */
142
+ } else if (column.enumValues) {
143
+ typeNode = ts.factory.createUnionTypeNode(
144
+ column.enumValues.map((value) =>
145
+ ts.factory.createLiteralTypeNode(
146
+ ts.factory.createStringLiteral(value),
147
+ )))
148
+
149
+ /* Anything else is a string... */
150
+ } else {
151
+ typeNode = types.stringType
152
+ }
153
+
154
+ /* Create the _type_ signature for this column */
155
+ const typeSignature = ts.factory.createPropertySignature(
156
+ undefined, // no modifiers
157
+ 'type',
158
+ undefined, // no question mark
159
+ typeNode,
160
+ )
161
+
162
+ /* Create the property signature for this column */
163
+ const definition: ts.PropertySignature[] = [ typeSignature ]
164
+ if (column.hasDefault) definition.push(hasDefaultSignature)
165
+ if (column.isNullable) definition.push(isNullableSignature)
166
+
167
+ const columnSignature = ts.factory.createPropertySignature(
168
+ undefined, // no modifiers
169
+ ts.factory.createStringLiteral(columnName),
170
+ undefined, // no question mark
171
+ ts.factory.createTypeLiteralNode(definition),
172
+ )
173
+
174
+ /* If we have a description, add it as a JSDoc comment */
175
+ if (column.description) {
176
+ ts.addSyntheticLeadingComment(
177
+ columnSignature,
178
+ ts.SyntaxKind.MultiLineCommentTrivia,
179
+ `* ${column.description} `,
180
+ true, // trailing newline!
181
+ )
182
+ }
183
+
184
+ /* All done with this column, push its signature */
185
+ columns.push(columnSignature)
186
+ }
187
+
188
+ /* Create the table signature from all the columns */
189
+ const tableSignature = ts.factory.createPropertySignature(
190
+ undefined, // modifiers
191
+ ts.factory.createStringLiteral(tableName),
192
+ undefined, // question mark
193
+ ts.factory.createTypeLiteralNode(columns), // as any,
194
+ )
195
+ tables.push(tableSignature)
196
+ }
197
+
198
+ /* Create our schema declaration, as an exported interface */
199
+ const declaration = ts.factory.createInterfaceDeclaration(
200
+ [ exportModifier ], // export modifier
201
+ id, // the name of the schema, "Schema" or whatever we were given
202
+ undefined, // no type parameters
203
+ undefined, // no heritage clause
204
+ tables, // all our tables signatures
205
+ )
206
+
207
+ /* Wrap our source interface declaration in a source file */
208
+ const source = ts.factory.createSourceFile(
209
+ [ declaration ],
210
+ endOfFileToken,
211
+ ts.NodeFlags.None,
212
+ )
213
+
214
+ /* Create a printer, and stringify our source file */
215
+ const content = ts.createPrinter().printFile(source)
216
+ return content
217
+ }
package/src/types.ts ADDED
@@ -0,0 +1,51 @@
1
+ import ts from 'typescript'
2
+
3
+ import { makeImportType, makePostgresArrayType } from './helpers'
4
+
5
+ /* Null and void */
6
+ export const nullType = ts.factory.createLiteralTypeNode(ts.factory.createNull())
7
+ export const voidType = ts.factory.createKeywordTypeNode(ts.SyntaxKind.VoidKeyword)
8
+
9
+ /* Basic types and primitives */
10
+ export const anyType = ts.factory.createKeywordTypeNode(ts.SyntaxKind.AnyKeyword)
11
+ export const bigintType = ts.factory.createKeywordTypeNode(ts.SyntaxKind.BigIntKeyword)
12
+ export const booleanType = ts.factory.createKeywordTypeNode(ts.SyntaxKind.BooleanKeyword)
13
+ export const numberType = ts.factory.createKeywordTypeNode(ts.SyntaxKind.NumberKeyword)
14
+ export const stringType = ts.factory.createKeywordTypeNode(ts.SyntaxKind.StringKeyword)
15
+
16
+ /* Objects */
17
+ export const dateType = ts.factory.createTypeReferenceNode('Date')
18
+ export const uint8ArrayType = ts.factory.createTypeReferenceNode('Uint8Array')
19
+
20
+ /* Basic arrays */
21
+ export const anyArrayType = makePostgresArrayType(anyType)
22
+ export const bigintArrayType = makePostgresArrayType(bigintType)
23
+ export const booleanArrayType = makePostgresArrayType(booleanType)
24
+ export const numberArrayType = makePostgresArrayType(numberType)
25
+ export const stringArrayType = makePostgresArrayType(stringType)
26
+
27
+ /* Object arrays */
28
+ export const dateArrayType = makePostgresArrayType(dateType)
29
+ export const uint8ArrayArrayType = makePostgresArrayType(uint8ArrayType)
30
+
31
+ /* Imported */
32
+ export const pgCircleType: ts.TypeNode = makeImportType('@juit/pgproxy-types', 'PGCircle')
33
+ export const pgIntervalType: ts.TypeNode = makeImportType('@juit/pgproxy-types', 'PGInterval')
34
+ export const pgPointType: ts.TypeNode = makeImportType('@juit/pgproxy-types', 'PGPoint')
35
+
36
+ /* Imported arrays */
37
+ export const pgCircleArrayType = makePostgresArrayType(pgCircleType)
38
+ export const pgIntervalArrayType = makePostgresArrayType(pgIntervalType)
39
+ export const pgPointArrayType = makePostgresArrayType(pgPointType)
40
+
41
+ /* Ranges */
42
+ export const bigintRangeType = makeImportType('@juit/pgproxy-types', 'PGRange', bigintType)
43
+ export const numberRangeType = makeImportType('@juit/pgproxy-types', 'PGRange', numberType)
44
+ export const stringRangeType = makeImportType('@juit/pgproxy-types', 'PGRange', stringType)
45
+ export const dateRangeType = makeImportType('@juit/pgproxy-types', 'PGRange', dateType)
46
+
47
+ /* Range arrays */
48
+ export const bigintRangeArrayType = makePostgresArrayType(bigintRangeType)
49
+ export const numberRangeArrayType = makePostgresArrayType(numberRangeType)
50
+ export const stringRangeArrayType = makePostgresArrayType(stringRangeType)
51
+ export const dateRangeArrayType = makePostgresArrayType(dateRangeType)