@juit/pgproxy-utils 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,119 @@
1
+ # PostgreSQL Proxy Utilities
2
+
3
+ This package provides a number of different utilities for developing and testing
4
+ with `PGProxy`.
5
+
6
+ * [Test Databases](#test-databases)
7
+ * [Database Migrations](#database-migrations)
8
+ * [Persister Schema Generation](#persister-schema-generation)
9
+ * [PGProxy](https://github.com/juitnow/juit-pgproxy/blob/main/README.md)
10
+ * [Copyright Notice](https://github.com/juitnow/juit-pgproxy/blob/main/NOTICE.md)
11
+ * [License](https://github.com/juitnow/juit-pgproxy/blob/main/NOTICE.md)
12
+
13
+ ### Test Databases
14
+
15
+ Few helpers are available to create and drop test databases while developing:
16
+
17
+ * `testdb(...)`: return a test database _name_. An optional parameter can be
18
+ used to specify the database name _prefix_ (defaults to `test`).
19
+ * `createdb(name, url)`: actually _create_ a new database, and return its name.
20
+ * `name`: the name of the database to create, defaults to the value returned
21
+ by calling `testdb()`.
22
+ * `url`: the URL to connect to for creating the database, defaults to
23
+ `psql:///postgres` (the local PostgreSQL instance via `libpq`)
24
+ * `dropdb(name, url)`: drop the specified database.
25
+ * `name`: the name of the database to drop, _required_.
26
+ * `url`: the URL to connect to for dropping the database, defaults to
27
+ `psql:///postgres` (the local PostgreSQL instance via `libpq`)
28
+
29
+ Normally, those methods are used when running tests, in a pattern similar to
30
+ the following:
31
+
32
+ ```ts
33
+ let databaseName: string
34
+
35
+ beforeAll(async () => {
36
+ databaseName = await createdb()
37
+ })
38
+
39
+ afterAll(async () => {
40
+ await dropdb(databasename)
41
+ })
42
+
43
+ it('should run a test', async () => {
44
+ const client = new PGClient(databaseName)
45
+ /// ... use the client to test
46
+ })
47
+ ```
48
+
49
+
50
+ ### Database Migrations
51
+
52
+ The `migrate(...)` function provides an extremely simplistic way to migrate
53
+ databases.
54
+
55
+ Migration files should have names like `001-initial.sql`, `002-second.sql`,
56
+ basically stating the migration _order_ followed by a simple name describing it.
57
+
58
+ All migrations will be recorded in the database using the `$migrations` table.
59
+
60
+ The `migrate(...)` function requires two arguments:
61
+
62
+ * `url`: the URL of the database to migrate, _required_.
63
+ * `options`: an optional set of options including:
64
+ * `migrations`: the _directory_ where migration files reside, relative to the
65
+ current working directory, defaults to `./sql`.
66
+ * `additional`: an additional set of migrations to be run (for example)
67
+ migrations required to run unit tests, defaults to _undefined_.
68
+ * `group`: a logical name grouping migrations together, when multiple sources
69
+ of database migrations exist in the same database, defaults to `default`.
70
+
71
+ In unit tests, for example, migrations can be applied in the following way:
72
+
73
+ ```ts
74
+ let databaseName: string
75
+
76
+ beforeAll(async () => {
77
+ databaseName = await createdb()
78
+ await migrate(databaseName, {
79
+ migrations: './migrations',
80
+ additional: './test/migrations',
81
+ })
82
+ })
83
+
84
+ // run your tests, all migrations will be applied beforehand
85
+ ```
86
+
87
+
88
+ ### Persister Schema Genration
89
+
90
+ Schema definitions for our `Persister` models (see `@juit/pgproxy-persister`)
91
+ can be generated using a couple of functions:
92
+
93
+ * `extractSchema(...)`: extract the `Schema` definition from a database.
94
+ * `serializeSchema(...)`: serialize the extracted `Schema` as a Typescript DTS.
95
+
96
+ The `extractSchema(...)` function takes a couple of arguments:
97
+
98
+ * `url`: the URL of the database whose schemas are to be extracted, _required_.
99
+ * `schemas`: an array of _database schema names_ to extract, defaulting to the
100
+ single `['public']` schema.
101
+
102
+ The `serializeSchema(...)` takes the following arguments:
103
+
104
+ * `schema`: the `Schema` for which the DTS should be generated, _required_.
105
+ * `id`: the exported identifier of the schema, optional, defaults to `Schema`.
106
+ * `overrides`: A `Record` mapping OID numbers to TypeScript types, in case
107
+ the registry used by the client is capable of handling them. All known OIDs
108
+ from the `@juit/pgproxy-types` library are already covered.
109
+
110
+ An extra couple of utilities are available for the schema extractor:
111
+
112
+ * `types`: a collection of TypeScript types representing the common, well known
113
+ types converted by `PGProxy` (e.g. _strings_, _numbers_, _arrays_, ...).
114
+ * `helpers`: helper functions to generate extra types for `serializeSchema`:
115
+ * `makePostgresArrayType(...)`: given a type `T`, it'll return a type
116
+ representing a postgres array, that is `(T | null)[]`.
117
+ * `makeImportType(module, name, args)`: generate a type imported from the
118
+ specified module, using the specified type arguments, for example:
119
+ `import('myModule').MyType<MyArg1, MyArg2>`
@@ -0,0 +1,60 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // database.ts
21
+ var database_exports = {};
22
+ __export(database_exports, {
23
+ createdb: () => createdb,
24
+ dropdb: () => dropdb,
25
+ testdb: () => testdb
26
+ });
27
+ module.exports = __toCommonJS(database_exports);
28
+ var import_pgproxy_persister = require("@juit/pgproxy-persister");
29
+ var import_plug = require("@plugjs/plug");
30
+ var NAME_EXPR = /^[-\w]{4,}$/;
31
+ function testdb(prefix = "test") {
32
+ if (!NAME_EXPR.test(prefix))
33
+ throw new Error(`Invalid database name prefix "${prefix}"`);
34
+ const random = String(Math.floor(Math.random() * 1e4)).padStart(4, "0");
35
+ return `${prefix}-${(/* @__PURE__ */ new Date()).toISOString().replace(/[^\d]/g, "")}-${random}`;
36
+ }
37
+ async function createdb(name = testdb(), url = "psql:///postgres") {
38
+ if (!NAME_EXPR.test(name))
39
+ throw new Error(`Invalid database name "${name}"`);
40
+ import_plug.log.notice(`Creating database ${(0, import_plug.$ylw)(name)}`);
41
+ const persister = new import_pgproxy_persister.Persister(url);
42
+ await persister.query(`CREATE DATABASE ${(0, import_pgproxy_persister.escape)(name)}`);
43
+ await persister.destroy();
44
+ return name;
45
+ }
46
+ async function dropdb(name, url = "psql:///postgres") {
47
+ if (!NAME_EXPR.test(name))
48
+ throw new Error(`Invalid database name "${name}"`);
49
+ import_plug.log.notice(`Dropping database ${(0, import_plug.$ylw)(name)}`);
50
+ const persister = new import_pgproxy_persister.Persister(url);
51
+ await persister.query(`DROP DATABASE IF EXISTS ${(0, import_pgproxy_persister.escape)(name)}`);
52
+ await persister.destroy();
53
+ }
54
+ // Annotate the CommonJS export names for ESM import in node:
55
+ 0 && (module.exports = {
56
+ createdb,
57
+ dropdb,
58
+ testdb
59
+ });
60
+ //# sourceMappingURL=database.cjs.map
@@ -0,0 +1,6 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../src/database.ts"],
4
+ "mappings": ";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,+BAAkC;AAClC,kBAA0B;AAE1B,IAAM,YAAY;AAGX,SAAS,OAAO,SAAS,QAAgB;AAC9C,MAAI,CAAE,UAAU,KAAK,MAAM;AAAG,UAAM,IAAI,MAAM,iCAAiC,MAAM,GAAG;AACxF,QAAM,SAAS,OAAO,KAAK,MAAM,KAAK,OAAO,IAAI,GAAK,CAAC,EAAE,SAAS,GAAG,GAAG;AACxE,SAAO,GAAG,MAAM,KAAI,oBAAI,KAAK,GAAE,YAAY,EAAE,QAAQ,UAAU,EAAE,CAAC,IAAI,MAAM;AAC9E;AASA,eAAsB,SAClB,OAAO,OAAO,GACd,MAAoB,oBACL;AACjB,MAAI,CAAE,UAAU,KAAK,IAAI;AAAG,UAAM,IAAI,MAAM,0BAA0B,IAAI,GAAG;AAC7E,kBAAI,OAAO,yBAAqB,kBAAK,IAAI,CAAC,EAAE;AAE5C,QAAM,YAAY,IAAI,mCAAU,GAAG;AACnC,QAAM,UAAU,MAAM,uBAAmB,iCAAO,IAAI,CAAC,EAAE;AACvD,QAAM,UAAU,QAAQ;AACxB,SAAO;AACT;AAOA,eAAsB,OAClB,MACA,MAAoB,oBACP;AACf,MAAI,CAAE,UAAU,KAAK,IAAI;AAAG,UAAM,IAAI,MAAM,0BAA0B,IAAI,GAAG;AAC7E,kBAAI,OAAO,yBAAqB,kBAAK,IAAI,CAAC,EAAE;AAE5C,QAAM,YAAY,IAAI,mCAAU,GAAG;AACnC,QAAM,UAAU,MAAM,+BAA2B,iCAAO,IAAI,CAAC,EAAE;AAC/D,QAAM,UAAU,QAAQ;AAC1B;",
5
+ "names": []
6
+ }
@@ -0,0 +1,17 @@
1
+ /// <reference types="node" />
2
+ /** Create a test database name from a prefix and some randomness */
3
+ export declare function testdb(prefix?: string): string;
4
+ /**
5
+ * Create a database with the specified name (or a test database).
6
+ *
7
+ * The default database name to use is the result of calling {@link testdb()}.
8
+ *
9
+ * The default URL to use when creating the database is `psql:///postgres`.
10
+ */
11
+ export declare function createdb(name?: string, url?: string | URL): Promise<string>;
12
+ /**
13
+ * Drop the database with the specified name.
14
+ *
15
+ * The default URL to use when creating the database is `psql:///postgres`.
16
+ */
17
+ export declare function dropdb(name: string, url?: string | URL): Promise<void>;
@@ -0,0 +1,33 @@
1
+ // database.ts
2
+ import { Persister, escape } from "@juit/pgproxy-persister";
3
+ import { $ylw, log } from "@plugjs/plug";
4
+ var NAME_EXPR = /^[-\w]{4,}$/;
5
+ function testdb(prefix = "test") {
6
+ if (!NAME_EXPR.test(prefix))
7
+ throw new Error(`Invalid database name prefix "${prefix}"`);
8
+ const random = String(Math.floor(Math.random() * 1e4)).padStart(4, "0");
9
+ return `${prefix}-${(/* @__PURE__ */ new Date()).toISOString().replace(/[^\d]/g, "")}-${random}`;
10
+ }
11
+ async function createdb(name = testdb(), url = "psql:///postgres") {
12
+ if (!NAME_EXPR.test(name))
13
+ throw new Error(`Invalid database name "${name}"`);
14
+ log.notice(`Creating database ${$ylw(name)}`);
15
+ const persister = new Persister(url);
16
+ await persister.query(`CREATE DATABASE ${escape(name)}`);
17
+ await persister.destroy();
18
+ return name;
19
+ }
20
+ async function dropdb(name, url = "psql:///postgres") {
21
+ if (!NAME_EXPR.test(name))
22
+ throw new Error(`Invalid database name "${name}"`);
23
+ log.notice(`Dropping database ${$ylw(name)}`);
24
+ const persister = new Persister(url);
25
+ await persister.query(`DROP DATABASE IF EXISTS ${escape(name)}`);
26
+ await persister.destroy();
27
+ }
28
+ export {
29
+ createdb,
30
+ dropdb,
31
+ testdb
32
+ };
33
+ //# sourceMappingURL=database.mjs.map
@@ -0,0 +1,6 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../src/database.ts"],
4
+ "mappings": ";AAAA,SAAS,WAAW,cAAc;AAClC,SAAS,MAAM,WAAW;AAE1B,IAAM,YAAY;AAGX,SAAS,OAAO,SAAS,QAAgB;AAC9C,MAAI,CAAE,UAAU,KAAK,MAAM;AAAG,UAAM,IAAI,MAAM,iCAAiC,MAAM,GAAG;AACxF,QAAM,SAAS,OAAO,KAAK,MAAM,KAAK,OAAO,IAAI,GAAK,CAAC,EAAE,SAAS,GAAG,GAAG;AACxE,SAAO,GAAG,MAAM,KAAI,oBAAI,KAAK,GAAE,YAAY,EAAE,QAAQ,UAAU,EAAE,CAAC,IAAI,MAAM;AAC9E;AASA,eAAsB,SAClB,OAAO,OAAO,GACd,MAAoB,oBACL;AACjB,MAAI,CAAE,UAAU,KAAK,IAAI;AAAG,UAAM,IAAI,MAAM,0BAA0B,IAAI,GAAG;AAC7E,MAAI,OAAO,qBAAqB,KAAK,IAAI,CAAC,EAAE;AAE5C,QAAM,YAAY,IAAI,UAAU,GAAG;AACnC,QAAM,UAAU,MAAM,mBAAmB,OAAO,IAAI,CAAC,EAAE;AACvD,QAAM,UAAU,QAAQ;AACxB,SAAO;AACT;AAOA,eAAsB,OAClB,MACA,MAAoB,oBACP;AACf,MAAI,CAAE,UAAU,KAAK,IAAI;AAAG,UAAM,IAAI,MAAM,0BAA0B,IAAI,GAAG;AAC7E,MAAI,OAAO,qBAAqB,KAAK,IAAI,CAAC,EAAE;AAE5C,QAAM,YAAY,IAAI,UAAU,GAAG;AACnC,QAAM,UAAU,MAAM,2BAA2B,OAAO,IAAI,CAAC,EAAE;AAC/D,QAAM,UAAU,QAAQ;AAC1B;",
5
+ "names": []
6
+ }
@@ -0,0 +1,119 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // extract.ts
21
+ var extract_exports = {};
22
+ __export(extract_exports, {
23
+ extractSchema: () => extractSchema
24
+ });
25
+ module.exports = __toCommonJS(extract_exports);
26
+ var import_pgproxy_client = require("@juit/pgproxy-client");
27
+ var import_pgproxy_client_psql = require("@juit/pgproxy-client-psql");
28
+ var query = `
29
+ SELECT
30
+ "c"."table_schema" AS "schema",
31
+ "c"."table_name" AS "table",
32
+ "c"."column_name" AS "column",
33
+ "c"."is_nullable"::bool AS "isNullable",
34
+ CASE WHEN "c"."column_default" IS NULL
35
+ THEN false
36
+ ELSE true
37
+ END AS "hasDefault",
38
+ "t"."oid" AS "oid",
39
+ "e"."enumValues" AS "enumValues",
40
+ "d"."description" AS "description"
41
+ FROM
42
+ "information_schema"."columns" AS "c"
43
+ -- join the pg_catalog.pg_type to bring in the OIDs
44
+ INNER JOIN
45
+ "pg_catalog"."pg_type" AS "t"
46
+ ON
47
+ "c"."udt_name" = "t"."typname"
48
+ -- join the pg_catalog.pg_statio_all_tables to bring in the OBJIDs
49
+ INNER JOIN
50
+ "pg_catalog"."pg_statio_all_tables" AS "s"
51
+ ON
52
+ "c"."table_schema" = "s"."schemaname" AND
53
+ "c"."table_name" = "s"."relname"
54
+ -- join the pg_catalog.pg_enum to bring in the ENUM VALUESs
55
+ LEFT JOIN (
56
+ SELECT
57
+ "enumtypid",
58
+ "array_agg"("enumlabel")::varchar[] AS "enumValues"
59
+ FROM
60
+ "pg_catalog"."pg_enum"
61
+ GROUP BY
62
+ "enumtypid"
63
+ ) "e"
64
+ ON
65
+ "t"."oid" = "e"."enumtypid"
66
+ -- join the pg_catalog.pg_description to bring in the column DESCRIPTIONs
67
+ LEFT JOIN
68
+ "pg_catalog"."pg_description" AS "d"
69
+ ON
70
+ "d"."objoid" = "s"."relid" AND
71
+ "d"."objsubid" = "c"."ordinal_position"
72
+ -- restrict to our schemas
73
+ WHERE
74
+ "c"."table_schema" = ANY($1)
75
+ -- sanity in ordering
76
+ ORDER BY
77
+ "c"."table_schema",
78
+ "c"."table_name",
79
+ "c"."ordinal_position"`;
80
+ function stripUndefined(object) {
81
+ for (const key of Object.keys(object)) {
82
+ if (object[key] === void 0)
83
+ delete object[key];
84
+ }
85
+ return object;
86
+ }
87
+ async function extractSchema(url, schemas = [], extractAll = false) {
88
+ if (schemas.length === 0)
89
+ schemas.push("public");
90
+ const client = new import_pgproxy_client.PGClient(url);
91
+ let rows;
92
+ try {
93
+ const result = await client.query(query, [schemas]);
94
+ rows = result.rows;
95
+ } finally {
96
+ await client.destroy();
97
+ }
98
+ const schemaDef = {};
99
+ for (const row of rows) {
100
+ const { schema, table, column, description, enumValues, ...def } = row;
101
+ if (!extractAll) {
102
+ if (table.match(/^[^a-z]/i) || column.match(/^[^a-z]/i))
103
+ continue;
104
+ }
105
+ const name = schema === "public" ? `${table}` : `${schema}.${table}`;
106
+ const tableDef = schemaDef[name] || (schemaDef[name] = {});
107
+ tableDef[column] = stripUndefined({
108
+ ...def,
109
+ description: description?.trim() ? description.trim() : void 0,
110
+ enumValues: enumValues && enumValues.length ? enumValues : void 0
111
+ });
112
+ }
113
+ return schemaDef;
114
+ }
115
+ // Annotate the CommonJS export names for ESM import in node:
116
+ 0 && (module.exports = {
117
+ extractSchema
118
+ });
119
+ //# sourceMappingURL=extract.cjs.map
@@ -0,0 +1,6 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../src/extract.ts"],
4
+ "mappings": ";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,4BAAyB;AACzB,iCAAO;AASP,IAAM,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAkEd,SAAS,eAA8C,QAAc;AACnE,aAAW,OAAO,OAAO,KAAK,MAAM,GAAG;AACrC,QAAI,OAAO,GAAG,MAAM;AAAW,aAAO,OAAO,GAAG;AAAA,EAClD;AACA,SAAO;AACT;AAoBA,eAAsB,cAClB,KACA,UAAoB,CAAC,GACrB,aAAsB,OACP;AACjB,MAAI,QAAQ,WAAW;AAAG,YAAQ,KAAK,QAAQ;AAE/C,QAAM,SAAS,IAAI,+BAAS,GAAG;AAC/B,MAAI;AACJ,MAAI;AACF,UAAM,SAAS,MAAM,OAAO,MAAiB,OAAO,CAAE,OAAQ,CAAC;AAC/D,WAAO,OAAO;AAAA,EAChB,UAAE;AACA,UAAM,OAAO,QAAQ;AAAA,EACvB;AAEA,QAAM,YAAoB,CAAC;AAE3B,aAAW,OAAO,MAAM;AACtB,UAAM,EAAE,QAAQ,OAAO,QAAQ,aAAa,YAAY,GAAG,IAAI,IAAI;AAEnE,QAAI,CAAE,YAAY;AAChB,UAAK,MAAM,MAAM,UAAU,KAAO,OAAO,MAAM,UAAU;AAAI;AAAA,IAC/D;AAEA,UAAM,OAAO,WAAW,WAAW,GAAG,KAAK,KAAK,GAAG,MAAM,IAAI,KAAK;AAClE,UAAM,WAAW,UAAU,IAAI,MAAM,UAAU,IAAI,IAAI,CAAC;AAExD,aAAS,MAAM,IAAI,eAAe;AAAA,MAAE,GAAG;AAAA,MACrC,aAAa,aAAa,KAAK,IAAI,YAAY,KAAK,IAAI;AAAA,MACxD,YAAa,cAAc,WAAW,SAAU,aAAoB;AAAA,IACtE,CAAC;AAAA,EACH;AAEA,SAAO;AACT;",
5
+ "names": []
6
+ }
@@ -0,0 +1,18 @@
1
+ /// <reference types="node" />
2
+ import '@juit/pgproxy-client-psql';
3
+ import type { Schema } from './index';
4
+ /**
5
+ * Extract a {@link Schema} from an array of PosgreSQL schema names.
6
+ *
7
+ * When the `schemas` parameter is undefined (or an empty array), then the
8
+ * single `public` schema will be targeted for extraction.
9
+ *
10
+ * Furthermore, unless the `extractAll` flag is set to `true`, only tables and
11
+ * columns starting with a _latin letter_ (a...z) will be included in the
12
+ * resulting {@link Schema}.
13
+ *
14
+ * @param url - The URL of the database to connect to.
15
+ * @param schemas - The array of schema names to target for extraction.
16
+ * @param extractAll - Extract all tables and column definitions.
17
+ */
18
+ export declare function extractSchema(url: URL | string, schemas?: string[], extractAll?: boolean): Promise<Schema>;
@@ -0,0 +1,94 @@
1
+ // extract.ts
2
+ import { PGClient } from "@juit/pgproxy-client";
3
+ import "@juit/pgproxy-client-psql";
4
+ var query = `
5
+ SELECT
6
+ "c"."table_schema" AS "schema",
7
+ "c"."table_name" AS "table",
8
+ "c"."column_name" AS "column",
9
+ "c"."is_nullable"::bool AS "isNullable",
10
+ CASE WHEN "c"."column_default" IS NULL
11
+ THEN false
12
+ ELSE true
13
+ END AS "hasDefault",
14
+ "t"."oid" AS "oid",
15
+ "e"."enumValues" AS "enumValues",
16
+ "d"."description" AS "description"
17
+ FROM
18
+ "information_schema"."columns" AS "c"
19
+ -- join the pg_catalog.pg_type to bring in the OIDs
20
+ INNER JOIN
21
+ "pg_catalog"."pg_type" AS "t"
22
+ ON
23
+ "c"."udt_name" = "t"."typname"
24
+ -- join the pg_catalog.pg_statio_all_tables to bring in the OBJIDs
25
+ INNER JOIN
26
+ "pg_catalog"."pg_statio_all_tables" AS "s"
27
+ ON
28
+ "c"."table_schema" = "s"."schemaname" AND
29
+ "c"."table_name" = "s"."relname"
30
+ -- join the pg_catalog.pg_enum to bring in the ENUM VALUESs
31
+ LEFT JOIN (
32
+ SELECT
33
+ "enumtypid",
34
+ "array_agg"("enumlabel")::varchar[] AS "enumValues"
35
+ FROM
36
+ "pg_catalog"."pg_enum"
37
+ GROUP BY
38
+ "enumtypid"
39
+ ) "e"
40
+ ON
41
+ "t"."oid" = "e"."enumtypid"
42
+ -- join the pg_catalog.pg_description to bring in the column DESCRIPTIONs
43
+ LEFT JOIN
44
+ "pg_catalog"."pg_description" AS "d"
45
+ ON
46
+ "d"."objoid" = "s"."relid" AND
47
+ "d"."objsubid" = "c"."ordinal_position"
48
+ -- restrict to our schemas
49
+ WHERE
50
+ "c"."table_schema" = ANY($1)
51
+ -- sanity in ordering
52
+ ORDER BY
53
+ "c"."table_schema",
54
+ "c"."table_name",
55
+ "c"."ordinal_position"`;
56
+ function stripUndefined(object) {
57
+ for (const key of Object.keys(object)) {
58
+ if (object[key] === void 0)
59
+ delete object[key];
60
+ }
61
+ return object;
62
+ }
63
+ async function extractSchema(url, schemas = [], extractAll = false) {
64
+ if (schemas.length === 0)
65
+ schemas.push("public");
66
+ const client = new PGClient(url);
67
+ let rows;
68
+ try {
69
+ const result = await client.query(query, [schemas]);
70
+ rows = result.rows;
71
+ } finally {
72
+ await client.destroy();
73
+ }
74
+ const schemaDef = {};
75
+ for (const row of rows) {
76
+ const { schema, table, column, description, enumValues, ...def } = row;
77
+ if (!extractAll) {
78
+ if (table.match(/^[^a-z]/i) || column.match(/^[^a-z]/i))
79
+ continue;
80
+ }
81
+ const name = schema === "public" ? `${table}` : `${schema}.${table}`;
82
+ const tableDef = schemaDef[name] || (schemaDef[name] = {});
83
+ tableDef[column] = stripUndefined({
84
+ ...def,
85
+ description: description?.trim() ? description.trim() : void 0,
86
+ enumValues: enumValues && enumValues.length ? enumValues : void 0
87
+ });
88
+ }
89
+ return schemaDef;
90
+ }
91
+ export {
92
+ extractSchema
93
+ };
94
+ //# sourceMappingURL=extract.mjs.map
@@ -0,0 +1,6 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../src/extract.ts"],
4
+ "mappings": ";AAAA,SAAS,gBAAgB;AACzB,OAAO;AASP,IAAM,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAkEd,SAAS,eAA8C,QAAc;AACnE,aAAW,OAAO,OAAO,KAAK,MAAM,GAAG;AACrC,QAAI,OAAO,GAAG,MAAM;AAAW,aAAO,OAAO,GAAG;AAAA,EAClD;AACA,SAAO;AACT;AAoBA,eAAsB,cAClB,KACA,UAAoB,CAAC,GACrB,aAAsB,OACP;AACjB,MAAI,QAAQ,WAAW;AAAG,YAAQ,KAAK,QAAQ;AAE/C,QAAM,SAAS,IAAI,SAAS,GAAG;AAC/B,MAAI;AACJ,MAAI;AACF,UAAM,SAAS,MAAM,OAAO,MAAiB,OAAO,CAAE,OAAQ,CAAC;AAC/D,WAAO,OAAO;AAAA,EAChB,UAAE;AACA,UAAM,OAAO,QAAQ;AAAA,EACvB;AAEA,QAAM,YAAoB,CAAC;AAE3B,aAAW,OAAO,MAAM;AACtB,UAAM,EAAE,QAAQ,OAAO,QAAQ,aAAa,YAAY,GAAG,IAAI,IAAI;AAEnE,QAAI,CAAE,YAAY;AAChB,UAAK,MAAM,MAAM,UAAU,KAAO,OAAO,MAAM,UAAU;AAAI;AAAA,IAC/D;AAEA,UAAM,OAAO,WAAW,WAAW,GAAG,KAAK,KAAK,GAAG,MAAM,IAAI,KAAK;AAClE,UAAM,WAAW,UAAU,IAAI,MAAM,UAAU,IAAI,IAAI,CAAC;AAExD,aAAS,MAAM,IAAI,eAAe;AAAA,MAAE,GAAG;AAAA,MACrC,aAAa,aAAa,KAAK,IAAI,YAAY,KAAK,IAAI;AAAA,MACxD,YAAa,cAAc,WAAW,SAAU,aAAoB;AAAA,IACtE,CAAC;AAAA,EACH;AAEA,SAAO;AACT;",
5
+ "names": []
6
+ }
@@ -0,0 +1,65 @@
1
+ "use strict";
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __export = (target, all) => {
9
+ for (var name in all)
10
+ __defProp(target, name, { get: all[name], enumerable: true });
11
+ };
12
+ var __copyProps = (to, from, except, desc) => {
13
+ if (from && typeof from === "object" || typeof from === "function") {
14
+ for (let key of __getOwnPropNames(from))
15
+ if (!__hasOwnProp.call(to, key) && key !== except)
16
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
+ }
18
+ return to;
19
+ };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
28
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
+
30
+ // helpers.ts
31
+ var helpers_exports = {};
32
+ __export(helpers_exports, {
33
+ makeImportType: () => makeImportType,
34
+ makePostgresArrayType: () => makePostgresArrayType
35
+ });
36
+ module.exports = __toCommonJS(helpers_exports);
37
+ var import_typescript = __toESM(require("typescript"));
38
+ function makePostgresArrayType(type) {
39
+ const nullable = import_typescript.default.factory.createLiteralTypeNode(import_typescript.default.factory.createNull());
40
+ const union = import_typescript.default.factory.createUnionTypeNode([type, nullable]);
41
+ const array = import_typescript.default.factory.createArrayTypeNode(union);
42
+ return array;
43
+ }
44
+ function makeImportType(module2, name, args = []) {
45
+ if (!Array.isArray(args))
46
+ args = [args];
47
+ return import_typescript.default.factory.createImportTypeNode(
48
+ // ..................... "import"
49
+ import_typescript.default.factory.createLiteralTypeNode(
50
+ import_typescript.default.factory.createStringLiteral(module2)
51
+ ),
52
+ // ............ "('module')"
53
+ void 0,
54
+ // import assertions
55
+ import_typescript.default.factory.createIdentifier(name),
56
+ // ........................ ".Type"
57
+ args
58
+ );
59
+ }
60
+ // Annotate the CommonJS export names for ESM import in node:
61
+ 0 && (module.exports = {
62
+ makeImportType,
63
+ makePostgresArrayType
64
+ });
65
+ //# sourceMappingURL=helpers.cjs.map
@@ -0,0 +1,6 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../src/helpers.ts"],
4
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAAe;AAMR,SAAS,sBAAsB,MAAqC;AACzE,QAAM,WAAW,kBAAAA,QAAG,QAAQ,sBAAsB,kBAAAA,QAAG,QAAQ,WAAW,CAAC;AACzE,QAAM,QAAQ,kBAAAA,QAAG,QAAQ,oBAAoB,CAAE,MAAM,QAAS,CAAC;AAC/D,QAAM,QAAQ,kBAAAA,QAAG,QAAQ,oBAAoB,KAAK;AAClD,SAAO;AACT;AAKO,SAAS,eACZC,SACA,MACA,OAAoC,CAAC,GACpB;AACnB,MAAI,CAAE,MAAM,QAAQ,IAAI;AAAG,WAAO,CAAE,IAAK;AAEzC,SAAO,kBAAAD,QAAG,QAAQ;AAAA;AAAA,IACd,kBAAAA,QAAG,QAAQ;AAAA,MACP,kBAAAA,QAAG,QAAQ,oBAAoBC,OAAM;AAAA,IAAC;AAAA;AAAA,IAC1C;AAAA;AAAA,IACA,kBAAAD,QAAG,QAAQ,iBAAiB,IAAI;AAAA;AAAA,IAChC;AAAA,EAAI;AACV;",
5
+ "names": ["ts", "module"]
6
+ }
@@ -0,0 +1,10 @@
1
+ import ts from 'typescript';
2
+ /**
3
+ * Create a PosgreSQL array type for the given type, that is, given the type
4
+ * `T`, return `(T | null)[]`
5
+ */
6
+ export declare function makePostgresArrayType(type: ts.TypeNode): ts.ArrayTypeNode;
7
+ /**
8
+ * Create an _import_ type, like `import('module').Name<arg0, arg1, ...>`.
9
+ */
10
+ export declare function makeImportType(module: string, name: string, args?: ts.TypeNode | ts.TypeNode[]): ts.ImportTypeNode;
@@ -0,0 +1,29 @@
1
+ // helpers.ts
2
+ import ts from "typescript";
3
+ function makePostgresArrayType(type) {
4
+ const nullable = ts.factory.createLiteralTypeNode(ts.factory.createNull());
5
+ const union = ts.factory.createUnionTypeNode([type, nullable]);
6
+ const array = ts.factory.createArrayTypeNode(union);
7
+ return array;
8
+ }
9
+ function makeImportType(module, name, args = []) {
10
+ if (!Array.isArray(args))
11
+ args = [args];
12
+ return ts.factory.createImportTypeNode(
13
+ // ..................... "import"
14
+ ts.factory.createLiteralTypeNode(
15
+ ts.factory.createStringLiteral(module)
16
+ ),
17
+ // ............ "('module')"
18
+ void 0,
19
+ // import assertions
20
+ ts.factory.createIdentifier(name),
21
+ // ........................ ".Type"
22
+ args
23
+ );
24
+ }
25
+ export {
26
+ makeImportType,
27
+ makePostgresArrayType
28
+ };
29
+ //# sourceMappingURL=helpers.mjs.map
@@ -0,0 +1,6 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../src/helpers.ts"],
4
+ "mappings": ";AAAA,OAAO,QAAQ;AAMR,SAAS,sBAAsB,MAAqC;AACzE,QAAM,WAAW,GAAG,QAAQ,sBAAsB,GAAG,QAAQ,WAAW,CAAC;AACzE,QAAM,QAAQ,GAAG,QAAQ,oBAAoB,CAAE,MAAM,QAAS,CAAC;AAC/D,QAAM,QAAQ,GAAG,QAAQ,oBAAoB,KAAK;AAClD,SAAO;AACT;AAKO,SAAS,eACZ,QACA,MACA,OAAoC,CAAC,GACpB;AACnB,MAAI,CAAE,MAAM,QAAQ,IAAI;AAAG,WAAO,CAAE,IAAK;AAEzC,SAAO,GAAG,QAAQ;AAAA;AAAA,IACd,GAAG,QAAQ;AAAA,MACP,GAAG,QAAQ,oBAAoB,MAAM;AAAA,IAAC;AAAA;AAAA,IAC1C;AAAA;AAAA,IACA,GAAG,QAAQ,iBAAiB,IAAI;AAAA;AAAA,IAChC;AAAA,EAAI;AACV;",
5
+ "names": []
6
+ }