introspeql 0.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +35 -0
- package/dist/append-schema.d.ts +2 -0
- package/dist/append-schema.js +92 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +1 -0
- package/dist/introspect-columns.d.ts +22 -0
- package/dist/introspect-columns.js +46 -0
- package/dist/introspect-database.d.ts +2 -0
- package/dist/introspect-database.js +100 -0
- package/dist/introspect-enum.d.ts +10 -0
- package/dist/introspect-enum.js +17 -0
- package/dist/introspect-procedures.d.ts +45 -0
- package/dist/introspect-procedures.js +95 -0
- package/dist/introspect-tables.d.ts +15 -0
- package/dist/introspect-tables.js +32 -0
- package/dist/introspeql-config.d.ts +43 -0
- package/dist/introspeql-config.js +65 -0
- package/dist/prepare-data-for-writing.d.ts +27 -0
- package/dist/prepare-data-for-writing.js +134 -0
- package/dist/snake-case-to-pascal-case.d.ts +1 -0
- package/dist/snake-case-to-pascal-case.js +10 -0
- package/dist/write-header.d.ts +2 -0
- package/dist/write-header.js +10 -0
- package/package.json +33 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Joseph Dvorak
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
# IntrospeQL
|
|
2
|
+
|
|
3
|
+
IntrospeQL is a simple package for introspecting PostgreSQL tables and functions
|
|
4
|
+
and generating TypeScript types from them. The aim of the package is to
|
|
5
|
+
make the capturing of PostgreSQL types and the generation of TypeScript types
|
|
6
|
+
easy, comprehensive, and accurate.
|
|
7
|
+
|
|
8
|
+
## Not Production Ready
|
|
9
|
+
|
|
10
|
+
This project is currently in alpha. Do not use it in production at this time.
|
|
11
|
+
Full documentation will be available in subsequent releases.
|
|
12
|
+
|
|
13
|
+
## License
|
|
14
|
+
|
|
15
|
+
MIT License
|
|
16
|
+
|
|
17
|
+
Copyright (c) 2025 Joseph Dvorak
|
|
18
|
+
|
|
19
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
20
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
21
|
+
in the Software without restriction, including without limitation the rights
|
|
22
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
23
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
24
|
+
furnished to do so, subject to the following conditions:
|
|
25
|
+
|
|
26
|
+
The above copyright notice and this permission notice shall be included in all
|
|
27
|
+
copies or substantial portions of the Software.
|
|
28
|
+
|
|
29
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
30
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
31
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
32
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
33
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
34
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
35
|
+
SOFTWARE.
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
export function appendSchema(outputPath, schema) {
|
|
3
|
+
let schemaTypeDefinitions = `export namespace ${schema.formattedName} {\n`;
|
|
4
|
+
schemaTypeDefinitions += ` export const SchemaName = '${schema.rawName}';\n`;
|
|
5
|
+
if (schema.enums.length > 0) {
|
|
6
|
+
schemaTypeDefinitions = appendEnums(schemaTypeDefinitions, schema);
|
|
7
|
+
}
|
|
8
|
+
if (schema.tables.length > 0) {
|
|
9
|
+
schemaTypeDefinitions = appendTables(schemaTypeDefinitions, schema);
|
|
10
|
+
}
|
|
11
|
+
if (schema.procedures.length > 0) {
|
|
12
|
+
schemaTypeDefinitions = appendProcedures(schemaTypeDefinitions, schema);
|
|
13
|
+
}
|
|
14
|
+
schemaTypeDefinitions += `};`;
|
|
15
|
+
fs.appendFileSync(outputPath, schemaTypeDefinitions, "utf-8");
|
|
16
|
+
}
|
|
17
|
+
function appendEnums(typeDefs, schema) {
|
|
18
|
+
typeDefs += "\n" + " ".repeat(2) + "export namespace Enums {";
|
|
19
|
+
for (const e of schema.enums) {
|
|
20
|
+
typeDefs += "\n" + " ".repeat(4) + `export enum ${e.name} {\n`;
|
|
21
|
+
typeDefs += e.values
|
|
22
|
+
.map((v) => " ".repeat(6) + `${v} = '${v}',\n`)
|
|
23
|
+
.join("");
|
|
24
|
+
typeDefs += " ".repeat(4) + "};\n";
|
|
25
|
+
}
|
|
26
|
+
typeDefs += " ".repeat(2) + `};\n`;
|
|
27
|
+
return typeDefs;
|
|
28
|
+
}
|
|
29
|
+
function appendTables(typeDefs, schema) {
|
|
30
|
+
typeDefs += "\n" + " ".repeat(2) + "export namespace Tables {";
|
|
31
|
+
for (const t of schema.tables) {
|
|
32
|
+
typeDefs +=
|
|
33
|
+
"\n" + " ".repeat(4) + `export namespace ${t.formattedName} {\n`;
|
|
34
|
+
typeDefs += " ".repeat(6) + `export const TableName = '${t.rawName}';\n`;
|
|
35
|
+
if (Object.entries(t.columnNames).length > 0) {
|
|
36
|
+
typeDefs += "\n" + " ".repeat(6) + "export enum ColumnNames {\n";
|
|
37
|
+
for (const [formattedName, rawName] of Object.entries(t.columnNames)) {
|
|
38
|
+
typeDefs += " ".repeat(8) + `${formattedName} = '${rawName}',\n`;
|
|
39
|
+
}
|
|
40
|
+
typeDefs += " ".repeat(6) + "};\n";
|
|
41
|
+
}
|
|
42
|
+
if (Object.entries(t.rowType).length > 0) {
|
|
43
|
+
typeDefs += "\n" + " ".repeat(6) + "export interface RowType {\n";
|
|
44
|
+
for (const [rawName, type] of Object.entries(t.rowType)) {
|
|
45
|
+
const columnName = Object.entries(t.columnNames).find((entry) => {
|
|
46
|
+
return entry[1] === rawName;
|
|
47
|
+
})[0];
|
|
48
|
+
typeDefs += " ".repeat(8) + `[ColumnNames.${columnName}]: ${type};\n`;
|
|
49
|
+
}
|
|
50
|
+
typeDefs += " ".repeat(6) + "};\n";
|
|
51
|
+
}
|
|
52
|
+
typeDefs += " ".repeat(4) + "};\n";
|
|
53
|
+
}
|
|
54
|
+
typeDefs += " ".repeat(2) + "};\n";
|
|
55
|
+
return typeDefs;
|
|
56
|
+
}
|
|
57
|
+
function appendProcedures(typeDefs, schema) {
|
|
58
|
+
typeDefs += "\n" + " ".repeat(2) + "export namespace Procedures {";
|
|
59
|
+
for (const proc of schema.procedures) {
|
|
60
|
+
typeDefs +=
|
|
61
|
+
"\n" + " ".repeat(4) + `export namespace ${proc.formattedName} {\n`;
|
|
62
|
+
typeDefs +=
|
|
63
|
+
" ".repeat(6) + `export const ProcedureName = '${proc.rawName}';\n`;
|
|
64
|
+
typeDefs +=
|
|
65
|
+
"\n" +
|
|
66
|
+
" ".repeat(6) +
|
|
67
|
+
`export const ArgNames = [\n` +
|
|
68
|
+
proc.argNames
|
|
69
|
+
.map((n) => {
|
|
70
|
+
return " ".repeat(8) + `'${n}',\n`;
|
|
71
|
+
})
|
|
72
|
+
.join("") +
|
|
73
|
+
" ".repeat(6) +
|
|
74
|
+
"] as const;\n";
|
|
75
|
+
typeDefs +=
|
|
76
|
+
"\n" +
|
|
77
|
+
" ".repeat(6) +
|
|
78
|
+
`export type ArgTypes = [\n` +
|
|
79
|
+
proc.argTypes
|
|
80
|
+
.map((t) => {
|
|
81
|
+
return " ".repeat(8) + t + ",\n";
|
|
82
|
+
})
|
|
83
|
+
.join("") +
|
|
84
|
+
" ".repeat(6) +
|
|
85
|
+
"];\n";
|
|
86
|
+
typeDefs +=
|
|
87
|
+
"\n" + " ".repeat(6) + `export type ReturnType = ${proc.returnType};\n`;
|
|
88
|
+
typeDefs += " ".repeat(4) + "};\n";
|
|
89
|
+
}
|
|
90
|
+
typeDefs += " ".repeat(2) + "};\n";
|
|
91
|
+
return typeDefs;
|
|
92
|
+
}
|
package/dist/index.d.ts
ADDED
package/dist/index.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { introspectDatabase as introspeql } from "./introspect-database";
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
import type { Client } from "pg";
|
|
3
|
+
declare const columnDataSchema: z.ZodObject<{
|
|
4
|
+
column_name: z.ZodString;
|
|
5
|
+
column_type_schema: z.ZodString;
|
|
6
|
+
column_type_id: z.ZodNumber;
|
|
7
|
+
column_type: z.ZodString;
|
|
8
|
+
is_enum: z.ZodBoolean;
|
|
9
|
+
num_dimensions: z.ZodNumber;
|
|
10
|
+
nullable: z.ZodBoolean;
|
|
11
|
+
}, z.core.$strip>;
|
|
12
|
+
export type ColumnData = z.infer<typeof columnDataSchema>;
|
|
13
|
+
export declare function introspectColumns(client: Client, tableId: number): Promise<{
|
|
14
|
+
column_name: string;
|
|
15
|
+
column_type_schema: string;
|
|
16
|
+
column_type_id: number;
|
|
17
|
+
column_type: string;
|
|
18
|
+
is_enum: boolean;
|
|
19
|
+
num_dimensions: number;
|
|
20
|
+
nullable: boolean;
|
|
21
|
+
}[]>;
|
|
22
|
+
export {};
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
const columnDataSchema = z.object({
|
|
3
|
+
column_name: z.string(),
|
|
4
|
+
column_type_schema: z.string(),
|
|
5
|
+
column_type_id: z.number(),
|
|
6
|
+
column_type: z.string(),
|
|
7
|
+
is_enum: z.boolean(),
|
|
8
|
+
num_dimensions: z.number(),
|
|
9
|
+
nullable: z.boolean(),
|
|
10
|
+
});
|
|
11
|
+
export async function introspectColumns(client, tableId) {
|
|
12
|
+
const result = await client.query(`
|
|
13
|
+
SELECT
|
|
14
|
+
a.attname AS column_name,
|
|
15
|
+
(
|
|
16
|
+
SELECT nspname
|
|
17
|
+
FROM pg_catalog.pg_namespace
|
|
18
|
+
WHERE oid = t.typnamespace
|
|
19
|
+
) AS column_type_schema,
|
|
20
|
+
CASE WHEN a.attndims > 0 THEN (
|
|
21
|
+
SELECT base_type.oid
|
|
22
|
+
FROM pg_catalog.pg_type AS array_type, pg_catalog.pg_type as base_type
|
|
23
|
+
WHERE array_type.oid = a.atttypid AND base_type.typarray = array_type.oid
|
|
24
|
+
) ELSE a.atttypid END AS column_type_id,
|
|
25
|
+
CASE WHEN a.attndims > 0 THEN (
|
|
26
|
+
SELECT base_type.typname
|
|
27
|
+
FROM pg_catalog.pg_type AS array_type, pg_catalog.pg_type as base_type
|
|
28
|
+
WHERE array_type.oid = a.atttypid AND base_type.typarray = array_type.oid
|
|
29
|
+
) ELSE t.typname END AS column_type,
|
|
30
|
+
CASE WHEN a.attndims > 0 THEN (
|
|
31
|
+
SELECT t2.typtype = 'e' AS is_enum
|
|
32
|
+
FROM pg_catalog.pg_type AS t1, pg_catalog.pg_type as t2
|
|
33
|
+
INNER JOIN pg_catalog.pg_namespace AS n ON t2.typnamespace = n.oid
|
|
34
|
+
WHERE t1.oid = a.atttypid AND t2.typarray = t1.oid
|
|
35
|
+
) ELSE t.typtype = 'e' END AS is_enum,
|
|
36
|
+
a.attndims AS num_dimensions,
|
|
37
|
+
NOT a.attnotnull As nullable
|
|
38
|
+
FROM pg_catalog.pg_class AS c
|
|
39
|
+
INNER JOIN pg_catalog.pg_namespace AS n ON c.relnamespace = n.oid
|
|
40
|
+
INNER JOIN pg_catalog.pg_attribute AS a ON c.oid = a.attrelid
|
|
41
|
+
INNER JOIN pg_catalog.pg_type AS t ON a.atttypid = t.oid
|
|
42
|
+
WHERE c.oid = $1
|
|
43
|
+
AND a.attnum >= 1;`, [tableId]);
|
|
44
|
+
const columnData = columnDataSchema.array().parse(result.rows);
|
|
45
|
+
return columnData;
|
|
46
|
+
}
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
import { Client } from "pg";
|
|
3
|
+
import { introspectTables } from "./introspect-tables";
|
|
4
|
+
import { introspectColumns } from "./introspect-columns";
|
|
5
|
+
import { introspectProcedures } from "./introspect-procedures";
|
|
6
|
+
import { introspectEnum, } from "./introspect-enum";
|
|
7
|
+
import { writeHeader } from "./write-header";
|
|
8
|
+
import { prepareDataForWriting } from "./prepare-data-for-writing";
|
|
9
|
+
import { appendSchema } from "./append-schema";
|
|
10
|
+
export async function introspectDatabase(config) {
|
|
11
|
+
const client = "dbConnectionString" in config
|
|
12
|
+
? new Client({
|
|
13
|
+
connectionString: config.dbConnectionString,
|
|
14
|
+
})
|
|
15
|
+
: new Client({
|
|
16
|
+
...config.dbConnectionParams,
|
|
17
|
+
});
|
|
18
|
+
try {
|
|
19
|
+
await client.connect();
|
|
20
|
+
}
|
|
21
|
+
catch (e) {
|
|
22
|
+
throw new Error("Failed to connect to the database.", { cause: e });
|
|
23
|
+
}
|
|
24
|
+
// Read table data
|
|
25
|
+
let tableDataObjects;
|
|
26
|
+
try {
|
|
27
|
+
tableDataObjects = await introspectTables(client, config);
|
|
28
|
+
}
|
|
29
|
+
catch (e) {
|
|
30
|
+
throw new Error("Failed to introspect tables.", { cause: e });
|
|
31
|
+
}
|
|
32
|
+
// Read column data and update enums object when enum types are found
|
|
33
|
+
const columnDataObjectsByTableId = {};
|
|
34
|
+
const partialEnumDataObjects = [];
|
|
35
|
+
for (const tableDataObj of tableDataObjects) {
|
|
36
|
+
try {
|
|
37
|
+
const columnDataObjects = await introspectColumns(client, tableDataObj.id);
|
|
38
|
+
columnDataObjectsByTableId[tableDataObj.id] = columnDataObjects;
|
|
39
|
+
for (const columnDataObj of columnDataObjects) {
|
|
40
|
+
if (columnDataObj.is_enum &&
|
|
41
|
+
!partialEnumDataObjects.find((d) => d.id === columnDataObj.column_type_id)) {
|
|
42
|
+
partialEnumDataObjects.push({
|
|
43
|
+
id: columnDataObj.column_type_id,
|
|
44
|
+
schema: columnDataObj.column_type_schema,
|
|
45
|
+
name: columnDataObj.column_type,
|
|
46
|
+
});
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
catch (e) {
|
|
51
|
+
throw new Error("Failed to introspect columns.", { cause: e });
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
// Read procedure data and update enums object when enum types are found
|
|
55
|
+
let procedureDataObjects;
|
|
56
|
+
try {
|
|
57
|
+
procedureDataObjects = await introspectProcedures(client, config);
|
|
58
|
+
}
|
|
59
|
+
catch (e) {
|
|
60
|
+
throw new Error("Failed to introspect procedures.", { cause: e });
|
|
61
|
+
}
|
|
62
|
+
for (const procedureDataObject of procedureDataObjects) {
|
|
63
|
+
for (const argType of procedureDataObject.arg_types) {
|
|
64
|
+
if (argType.is_enum &&
|
|
65
|
+
!partialEnumDataObjects.find((d) => d.id === argType.id)) {
|
|
66
|
+
partialEnumDataObjects.push({
|
|
67
|
+
id: argType.id,
|
|
68
|
+
schema: argType.schema,
|
|
69
|
+
name: argType.name,
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
if (procedureDataObject.return_type.is_enum &&
|
|
74
|
+
!partialEnumDataObjects.find((d) => d.id === procedureDataObject.return_type.id)) {
|
|
75
|
+
partialEnumDataObjects.push({
|
|
76
|
+
id: procedureDataObject.return_type.id,
|
|
77
|
+
schema: procedureDataObject.return_type.schema,
|
|
78
|
+
name: procedureDataObject.return_type.name,
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
// introspect enums
|
|
83
|
+
const enumDataObjects = [];
|
|
84
|
+
for (const partialEnumDataObj of partialEnumDataObjects) {
|
|
85
|
+
try {
|
|
86
|
+
const enumData = await introspectEnum(client, partialEnumDataObj);
|
|
87
|
+
enumDataObjects.push(enumData);
|
|
88
|
+
}
|
|
89
|
+
catch (e) {
|
|
90
|
+
throw new Error("Failed to introspect enum.", { cause: e });
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
await client.end();
|
|
94
|
+
const outputPath = "outFile" in config
|
|
95
|
+
? config.outFile
|
|
96
|
+
: path.join(config.outDir, "introspeql-types.ts");
|
|
97
|
+
writeHeader(outputPath, config);
|
|
98
|
+
const schemas = prepareDataForWriting(enumDataObjects, tableDataObjects, columnDataObjectsByTableId, procedureDataObjects, config);
|
|
99
|
+
schemas.forEach((schema) => appendSchema(outputPath, schema));
|
|
100
|
+
}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import type { Client } from "pg";
|
|
2
|
+
export type PartialEnumData = {
|
|
3
|
+
id: number;
|
|
4
|
+
schema: string;
|
|
5
|
+
name: string;
|
|
6
|
+
};
|
|
7
|
+
export type EnumData = PartialEnumData & {
|
|
8
|
+
values: string[];
|
|
9
|
+
};
|
|
10
|
+
export declare function introspectEnum(client: Client, data: PartialEnumData): Promise<EnumData>;
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
export async function introspectEnum(client, data) {
|
|
3
|
+
const query = `
|
|
4
|
+
SELECT ARRAY(SELECT enumlabel FROM pg_enum WHERE enumtypid = $1)::text[] AS enum_values;
|
|
5
|
+
`;
|
|
6
|
+
const parameters = [data.id];
|
|
7
|
+
const result = await client.query(query, parameters);
|
|
8
|
+
const { enum_values: enumValues } = z
|
|
9
|
+
.object({
|
|
10
|
+
enum_values: z.string().array(),
|
|
11
|
+
})
|
|
12
|
+
.parse(result.rows[0]);
|
|
13
|
+
return {
|
|
14
|
+
...data,
|
|
15
|
+
values: enumValues,
|
|
16
|
+
};
|
|
17
|
+
}
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
import type { Client } from "pg";
|
|
3
|
+
import type { ParsedIntrospeQLConfig } from "./introspeql-config";
|
|
4
|
+
declare const procedureDataSchema: z.ZodObject<{
|
|
5
|
+
id: z.ZodNumber;
|
|
6
|
+
schema: z.ZodString;
|
|
7
|
+
name: z.ZodString;
|
|
8
|
+
arg_names: z.ZodPipe<z.ZodNullable<z.ZodArray<z.ZodString>>, z.ZodTransform<string[], string[]>>;
|
|
9
|
+
arg_types: z.ZodArray<z.ZodObject<{
|
|
10
|
+
id: z.ZodCoercedNumber<unknown>;
|
|
11
|
+
schema: z.ZodString;
|
|
12
|
+
name: z.ZodString;
|
|
13
|
+
is_array: z.ZodBoolean;
|
|
14
|
+
is_enum: z.ZodBoolean;
|
|
15
|
+
}, z.core.$strip>>;
|
|
16
|
+
return_type: z.ZodObject<{
|
|
17
|
+
id: z.ZodCoercedNumber<unknown>;
|
|
18
|
+
schema: z.ZodString;
|
|
19
|
+
name: z.ZodString;
|
|
20
|
+
is_array: z.ZodBoolean;
|
|
21
|
+
is_enum: z.ZodBoolean;
|
|
22
|
+
}, z.core.$strip>;
|
|
23
|
+
}, z.core.$strip>;
|
|
24
|
+
export type ProcedureData = z.infer<typeof procedureDataSchema>;
|
|
25
|
+
export declare function introspectProcedures(client: Client, config: ParsedIntrospeQLConfig): Promise<{
|
|
26
|
+
id: number;
|
|
27
|
+
schema: string;
|
|
28
|
+
name: string;
|
|
29
|
+
arg_types: {
|
|
30
|
+
id: number;
|
|
31
|
+
schema: string;
|
|
32
|
+
name: string;
|
|
33
|
+
is_array: boolean;
|
|
34
|
+
is_enum: boolean;
|
|
35
|
+
}[];
|
|
36
|
+
return_type: {
|
|
37
|
+
id: number;
|
|
38
|
+
schema: string;
|
|
39
|
+
name: string;
|
|
40
|
+
is_array: boolean;
|
|
41
|
+
is_enum: boolean;
|
|
42
|
+
};
|
|
43
|
+
arg_names?: string[];
|
|
44
|
+
}[]>;
|
|
45
|
+
export {};
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
const typeSchema = z.object({
|
|
3
|
+
id: z.coerce.number(),
|
|
4
|
+
schema: z.string(),
|
|
5
|
+
name: z.string(),
|
|
6
|
+
is_array: z.boolean(),
|
|
7
|
+
is_enum: z.boolean(),
|
|
8
|
+
});
|
|
9
|
+
const procedureDataSchema = z.object({
|
|
10
|
+
id: z.number(),
|
|
11
|
+
schema: z.string(),
|
|
12
|
+
name: z.string(),
|
|
13
|
+
arg_names: z
|
|
14
|
+
.string()
|
|
15
|
+
.array()
|
|
16
|
+
.nullable()
|
|
17
|
+
.transform((v) => (v ? v : [])),
|
|
18
|
+
arg_types: typeSchema.array(),
|
|
19
|
+
return_type: typeSchema,
|
|
20
|
+
});
|
|
21
|
+
export async function introspectProcedures(client, config) {
|
|
22
|
+
const schemaPlaceholders = config.schemas
|
|
23
|
+
.map((_, i) => `$${i + 1}`)
|
|
24
|
+
.join(", ");
|
|
25
|
+
let query = `
|
|
26
|
+
SELECT
|
|
27
|
+
p.oid AS id,
|
|
28
|
+
n.nspname AS schema,
|
|
29
|
+
p.proname AS name,
|
|
30
|
+
p.proargnames AS arg_names,
|
|
31
|
+
ARRAY(
|
|
32
|
+
SELECT jsonb_build_object(
|
|
33
|
+
'id',
|
|
34
|
+
t.oid,
|
|
35
|
+
'schema',
|
|
36
|
+
n.nspname,
|
|
37
|
+
'name',
|
|
38
|
+
t.typname,
|
|
39
|
+
'is_array',
|
|
40
|
+
EXISTS(SELECT 1 FROM pg_catalog.pg_type AS t WHERE t.typarray = pt.*),
|
|
41
|
+
'is_enum',
|
|
42
|
+
t.typtype = 'e'
|
|
43
|
+
) FROM UNNEST(p.proargtypes) AS pt
|
|
44
|
+
INNER JOIN pg_catalog.pg_type as t
|
|
45
|
+
ON
|
|
46
|
+
CASE
|
|
47
|
+
WHEN EXISTS(SELECT 1 FROM pg_catalog.pg_type AS t WHERE t.typarray = pt.*)
|
|
48
|
+
THEN pt.* = t.typarray
|
|
49
|
+
ELSE pt.* = t.oid
|
|
50
|
+
END
|
|
51
|
+
INNER JOIN pg_catalog.pg_namespace AS n
|
|
52
|
+
ON t.typnamespace = n.oid
|
|
53
|
+
) AS arg_types,
|
|
54
|
+
(
|
|
55
|
+
SELECT jsonb_build_object(
|
|
56
|
+
'id',
|
|
57
|
+
t.oid,
|
|
58
|
+
'schema',
|
|
59
|
+
n.nspname,
|
|
60
|
+
'name',
|
|
61
|
+
t.typname,
|
|
62
|
+
'is_array',
|
|
63
|
+
EXISTS(SELECT 1 FROM pg_catalog.pg_type AS t WHERE t.typarray = p.prorettype),
|
|
64
|
+
'is_enum',
|
|
65
|
+
t.typtype = 'e'
|
|
66
|
+
) FROM pg_catalog.pg_type AS t
|
|
67
|
+
INNER JOIN pg_catalog.pg_namespace as n
|
|
68
|
+
ON t.typnamespace = n.oid
|
|
69
|
+
WHERE
|
|
70
|
+
CASE
|
|
71
|
+
WHEN EXISTS(SELECT 1 FROM pg_catalog.pg_type AS t WHERE t.typarray = p.prorettype)
|
|
72
|
+
THEN t.typarray = p.prorettype
|
|
73
|
+
ELSE t.oid = p.prorettype
|
|
74
|
+
END
|
|
75
|
+
) AS return_type
|
|
76
|
+
FROM pg_catalog.pg_proc AS p
|
|
77
|
+
INNER JOIN pg_catalog.pg_namespace AS n ON p.pronamespace = n.oid
|
|
78
|
+
WHERE n.nspname IN (${schemaPlaceholders})
|
|
79
|
+
`;
|
|
80
|
+
const parameters = [...config.schemas];
|
|
81
|
+
if (config.ignoreProcedures.length > 0) {
|
|
82
|
+
let offset = schemaPlaceholders.length;
|
|
83
|
+
const filters = [];
|
|
84
|
+
for (const procedureToIgnore of config.ignoreProcedures) {
|
|
85
|
+
filters.push(`(n.nspname = $${offset++} AND p.proname = $${offset++})`);
|
|
86
|
+
parameters.push(procedureToIgnore.schema);
|
|
87
|
+
parameters.push(procedureToIgnore.name);
|
|
88
|
+
}
|
|
89
|
+
query += `AND NOT (${filters.join(" OR\n")})`;
|
|
90
|
+
}
|
|
91
|
+
query += ";";
|
|
92
|
+
const result = await client.query(query, parameters);
|
|
93
|
+
const procedureData = procedureDataSchema.array().parse(result.rows);
|
|
94
|
+
return procedureData;
|
|
95
|
+
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
import type { Client } from "pg";
|
|
3
|
+
import type { ParsedIntrospeQLConfig } from "./introspeql-config";
|
|
4
|
+
declare const tableDataSchema: z.ZodObject<{
|
|
5
|
+
id: z.ZodNumber;
|
|
6
|
+
schema: z.ZodString;
|
|
7
|
+
name: z.ZodString;
|
|
8
|
+
}, z.core.$strip>;
|
|
9
|
+
export type TableData = z.infer<typeof tableDataSchema>;
|
|
10
|
+
export declare function introspectTables(client: Client, config: ParsedIntrospeQLConfig): Promise<{
|
|
11
|
+
id: number;
|
|
12
|
+
schema: string;
|
|
13
|
+
name: string;
|
|
14
|
+
}[]>;
|
|
15
|
+
export {};
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
const tableDataSchema = z.object({
|
|
3
|
+
id: z.number(),
|
|
4
|
+
schema: z.string(),
|
|
5
|
+
name: z.string(),
|
|
6
|
+
});
|
|
7
|
+
export async function introspectTables(client, config) {
|
|
8
|
+
const schemaPlaceholders = config.schemas
|
|
9
|
+
.map((_, i) => `$${i + 1}`)
|
|
10
|
+
.join(", ");
|
|
11
|
+
let query = `
|
|
12
|
+
SELECT c.oid AS id, n.nspname AS schema, c.relname AS name
|
|
13
|
+
FROM pg_catalog.pg_class AS c
|
|
14
|
+
INNER JOIN pg_catalog.pg_namespace AS n ON c.relnamespace = n.oid
|
|
15
|
+
WHERE c.relkind = 'r' AND n.nspname IN (${schemaPlaceholders})
|
|
16
|
+
`;
|
|
17
|
+
const parameters = [...config.schemas];
|
|
18
|
+
if (config.ignoreTables.length > 0) {
|
|
19
|
+
let offset = schemaPlaceholders.length;
|
|
20
|
+
const filters = [];
|
|
21
|
+
for (const tableToIgnore of config.ignoreTables) {
|
|
22
|
+
filters.push(`(n.nspname = $${offset++} AND c.relname = $${offset++})`);
|
|
23
|
+
parameters.push(tableToIgnore.schema);
|
|
24
|
+
parameters.push(tableToIgnore.name);
|
|
25
|
+
}
|
|
26
|
+
query += `AND NOT (${filters.join(" OR\n")})`;
|
|
27
|
+
}
|
|
28
|
+
query += ";";
|
|
29
|
+
const result = await client.query(query, parameters);
|
|
30
|
+
const tableData = tableDataSchema.array().parse(result.rows);
|
|
31
|
+
return tableData;
|
|
32
|
+
}
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
declare const introspeqlConfig: z.ZodIntersection<z.ZodIntersection<z.ZodUnion<readonly [z.ZodObject<{
|
|
3
|
+
dbConnectionString: z.ZodString;
|
|
4
|
+
}, z.core.$strip>, z.ZodObject<{
|
|
5
|
+
dbConnectionParams: z.ZodObject<{
|
|
6
|
+
user: z.ZodOptional<z.ZodString>;
|
|
7
|
+
password: z.ZodOptional<z.ZodString>;
|
|
8
|
+
host: z.ZodOptional<z.ZodString>;
|
|
9
|
+
port: z.ZodOptional<z.ZodNumber>;
|
|
10
|
+
database: z.ZodOptional<z.ZodString>;
|
|
11
|
+
}, z.core.$strip>;
|
|
12
|
+
}, z.core.$strip>]>, z.ZodUnion<readonly [z.ZodObject<{
|
|
13
|
+
outDir: z.ZodString;
|
|
14
|
+
}, z.core.$strip>, z.ZodObject<{
|
|
15
|
+
outFile: z.ZodString;
|
|
16
|
+
}, z.core.$strip>]>>, z.ZodObject<{
|
|
17
|
+
schemas: z.ZodDefault<z.ZodOptional<z.ZodArray<z.ZodString>>>;
|
|
18
|
+
header: z.ZodOptional<z.ZodString>;
|
|
19
|
+
types: z.ZodPipe<z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodString>>, z.ZodTransform<{
|
|
20
|
+
"pg_catalog.bool": string;
|
|
21
|
+
"pg_catalog.int2": string;
|
|
22
|
+
"pg_catalog.int4": string;
|
|
23
|
+
"pg_catalog.float4": string;
|
|
24
|
+
"pg_catalog.float8": string;
|
|
25
|
+
"pg_catalog.json": string;
|
|
26
|
+
"pg_catalog.jsonb": string;
|
|
27
|
+
"pg_catalog.date": string;
|
|
28
|
+
"pg_catalog.timestamp": string;
|
|
29
|
+
"pg_catalog.timestamptz": string;
|
|
30
|
+
"pg_catalog.void": string;
|
|
31
|
+
}, Record<string, string>>>;
|
|
32
|
+
ignoreTables: z.ZodDefault<z.ZodOptional<z.ZodArray<z.ZodObject<{
|
|
33
|
+
schema: z.ZodString;
|
|
34
|
+
name: z.ZodString;
|
|
35
|
+
}, z.core.$strip>>>>;
|
|
36
|
+
ignoreProcedures: z.ZodDefault<z.ZodOptional<z.ZodArray<z.ZodObject<{
|
|
37
|
+
schema: z.ZodString;
|
|
38
|
+
name: z.ZodString;
|
|
39
|
+
}, z.core.$strip>>>>;
|
|
40
|
+
}, z.core.$strip>>;
|
|
41
|
+
type IntrospeQLConfig = z.input<typeof introspeqlConfig>;
|
|
42
|
+
type ParsedIntrospeQLConfig = z.infer<typeof introspeqlConfig>;
|
|
43
|
+
export { introspeqlConfig, type IntrospeQLConfig, type ParsedIntrospeQLConfig };
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
const databaseConnectionConfig = z.union([
|
|
3
|
+
z.object({
|
|
4
|
+
dbConnectionString: z.string(),
|
|
5
|
+
}),
|
|
6
|
+
z.object({
|
|
7
|
+
dbConnectionParams: z.object({
|
|
8
|
+
user: z.string().optional(),
|
|
9
|
+
password: z.string().optional(),
|
|
10
|
+
host: z.string().optional(),
|
|
11
|
+
port: z.number().optional(),
|
|
12
|
+
database: z.string().optional(),
|
|
13
|
+
}),
|
|
14
|
+
}),
|
|
15
|
+
]);
|
|
16
|
+
const outputConfig = z.union([
|
|
17
|
+
z.object({
|
|
18
|
+
outDir: z.string(),
|
|
19
|
+
}),
|
|
20
|
+
z.object({
|
|
21
|
+
outFile: z.string(),
|
|
22
|
+
}),
|
|
23
|
+
]);
|
|
24
|
+
const entityData = z.object({
|
|
25
|
+
schema: z.string(),
|
|
26
|
+
name: z.string(),
|
|
27
|
+
});
|
|
28
|
+
const otherConfig = z.object({
|
|
29
|
+
schemas: z
|
|
30
|
+
.string()
|
|
31
|
+
.array()
|
|
32
|
+
.nonempty({
|
|
33
|
+
message: "Please provide at least one schema.",
|
|
34
|
+
})
|
|
35
|
+
.optional()
|
|
36
|
+
.default(() => ["public"]),
|
|
37
|
+
header: z.string().optional(),
|
|
38
|
+
types: z
|
|
39
|
+
.record(z.string(), z.string())
|
|
40
|
+
.optional()
|
|
41
|
+
.transform((t) => ({
|
|
42
|
+
"pg_catalog.bool": "boolean",
|
|
43
|
+
"pg_catalog.int2": "number",
|
|
44
|
+
"pg_catalog.int4": "number",
|
|
45
|
+
"pg_catalog.float4": "number",
|
|
46
|
+
"pg_catalog.float8": "number",
|
|
47
|
+
"pg_catalog.json": "object",
|
|
48
|
+
"pg_catalog.jsonb": "object",
|
|
49
|
+
"pg_catalog.date": "Date",
|
|
50
|
+
"pg_catalog.timestamp": "Date",
|
|
51
|
+
"pg_catalog.timestamptz": "Date",
|
|
52
|
+
"pg_catalog.void": "void",
|
|
53
|
+
...t,
|
|
54
|
+
})),
|
|
55
|
+
ignoreTables: entityData
|
|
56
|
+
.array()
|
|
57
|
+
.optional()
|
|
58
|
+
.default(() => []),
|
|
59
|
+
ignoreProcedures: entityData
|
|
60
|
+
.array()
|
|
61
|
+
.optional()
|
|
62
|
+
.default(() => []),
|
|
63
|
+
});
|
|
64
|
+
const introspeqlConfig = z.intersection(z.intersection(databaseConnectionConfig, outputConfig), otherConfig);
|
|
65
|
+
export { introspeqlConfig };
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import type { EnumData } from "./introspect-enum";
|
|
2
|
+
import type { TableData } from "./introspect-tables";
|
|
3
|
+
import type { ColumnData } from "./introspect-columns";
|
|
4
|
+
import type { ProcedureData } from "./introspect-procedures";
|
|
5
|
+
import type { ParsedIntrospeQLConfig } from "./introspeql-config";
|
|
6
|
+
export interface Schema {
|
|
7
|
+
formattedName: string;
|
|
8
|
+
rawName: string;
|
|
9
|
+
enums: Array<{
|
|
10
|
+
name: string;
|
|
11
|
+
values: string[];
|
|
12
|
+
}>;
|
|
13
|
+
tables: Array<{
|
|
14
|
+
formattedName: string;
|
|
15
|
+
rawName: string;
|
|
16
|
+
columnNames: Record<string, string>;
|
|
17
|
+
rowType: Record<string, string>;
|
|
18
|
+
}>;
|
|
19
|
+
procedures: Array<{
|
|
20
|
+
formattedName: string;
|
|
21
|
+
rawName: string;
|
|
22
|
+
argNames: string[];
|
|
23
|
+
argTypes: string[];
|
|
24
|
+
returnType: string;
|
|
25
|
+
}>;
|
|
26
|
+
}
|
|
27
|
+
export declare function prepareDataForWriting(enumDataObjects: EnumData[], tableDataObjects: TableData[], columnDataObjectsByTableId: Record<number, ColumnData[]>, procedureDataObjects: ProcedureData[], config: ParsedIntrospeQLConfig): Schema[];
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
import { snakeCaseToPascalCase } from "./snake-case-to-pascal-case";
|
|
2
|
+
export function prepareDataForWriting(enumDataObjects, tableDataObjects, columnDataObjectsByTableId, procedureDataObjects, config) {
|
|
3
|
+
const schemas = [];
|
|
4
|
+
prepareEnums(enumDataObjects, schemas, config);
|
|
5
|
+
prepareTables(tableDataObjects, columnDataObjectsByTableId, schemas, config);
|
|
6
|
+
prepareProcedures(procedureDataObjects, schemas, config);
|
|
7
|
+
return schemas;
|
|
8
|
+
}
|
|
9
|
+
function prepareEnums(enumDataObjects, schemas, config) {
|
|
10
|
+
for (const enumDataObj of enumDataObjects) {
|
|
11
|
+
// If the enum is overridden by configuration options, skip it.
|
|
12
|
+
const typeMappingKey = `${enumDataObj.schema}.${enumDataObj.name}`;
|
|
13
|
+
if (typeMappingKey in config.types)
|
|
14
|
+
continue;
|
|
15
|
+
const schema = findOrInsertSchema(schemas, enumDataObj.schema);
|
|
16
|
+
const formattedEnumName = snakeCaseToPascalCase(enumDataObj.name);
|
|
17
|
+
schema.enums.push({
|
|
18
|
+
name: formattedEnumName,
|
|
19
|
+
values: enumDataObj.values,
|
|
20
|
+
});
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
function prepareTables(tableDataObjects, columnDataObjectsByTableId, schemas, config) {
|
|
24
|
+
for (const tableDataObj of tableDataObjects) {
|
|
25
|
+
const schema = findOrInsertSchema(schemas, tableDataObj.schema);
|
|
26
|
+
const tableData = {
|
|
27
|
+
formattedName: snakeCaseToPascalCase(tableDataObj.name),
|
|
28
|
+
rawName: tableDataObj.name,
|
|
29
|
+
columnNames: columnDataObjectsByTableId[tableDataObj.id].reduce((acc, current) => {
|
|
30
|
+
const formattedColumnName = snakeCaseToPascalCase(current.column_name);
|
|
31
|
+
acc[formattedColumnName] = current.column_name;
|
|
32
|
+
return acc;
|
|
33
|
+
}, {}),
|
|
34
|
+
rowType: columnDataObjectsByTableId[tableDataObj.id].reduce((acc, current) => {
|
|
35
|
+
let type;
|
|
36
|
+
const typeMappingKey = `${current.column_type_schema}.${current.column_type}`;
|
|
37
|
+
if (typeMappingKey in config.types) {
|
|
38
|
+
type = config.types[typeMappingKey];
|
|
39
|
+
}
|
|
40
|
+
else if (current.is_enum) {
|
|
41
|
+
const formattedEnumName = snakeCaseToPascalCase(current.column_type);
|
|
42
|
+
type = `Enums.${formattedEnumName}`;
|
|
43
|
+
if (current.column_type_schema !== schema.rawName) {
|
|
44
|
+
const formattedEnumSchemaName = snakeCaseToPascalCase(current.column_type_schema);
|
|
45
|
+
type = `${formattedEnumSchemaName}.${type}`;
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
else {
|
|
49
|
+
type = "string";
|
|
50
|
+
}
|
|
51
|
+
type += "[]".repeat(current.num_dimensions);
|
|
52
|
+
if (current.nullable) {
|
|
53
|
+
type += " | null";
|
|
54
|
+
}
|
|
55
|
+
acc[current.column_name] = type;
|
|
56
|
+
return acc;
|
|
57
|
+
}, {}),
|
|
58
|
+
};
|
|
59
|
+
schema.tables.push(tableData);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
function prepareProcedures(procedureDataObjects, schemas, config) {
|
|
63
|
+
for (const procDataObj of procedureDataObjects) {
|
|
64
|
+
const schema = findOrInsertSchema(schemas, procDataObj.schema);
|
|
65
|
+
const extantProcedureDeclarations = schema.procedures.filter((p) => p.rawName === procDataObj.name).length;
|
|
66
|
+
const procData = {
|
|
67
|
+
formattedName: snakeCaseToPascalCase(procDataObj.name) +
|
|
68
|
+
(extantProcedureDeclarations > 0
|
|
69
|
+
? "_" + extantProcedureDeclarations
|
|
70
|
+
: ""),
|
|
71
|
+
rawName: procDataObj.name,
|
|
72
|
+
argNames: procDataObj.arg_names,
|
|
73
|
+
argTypes: procDataObj.arg_types.map((a) => {
|
|
74
|
+
let type;
|
|
75
|
+
const typeMappingKey = `${a.schema}.${a.name}`;
|
|
76
|
+
if (typeMappingKey in config.types) {
|
|
77
|
+
type = config.types[typeMappingKey];
|
|
78
|
+
}
|
|
79
|
+
else if (a.is_enum) {
|
|
80
|
+
const formattedEnumName = snakeCaseToPascalCase(a.name);
|
|
81
|
+
type = `Enums.${formattedEnumName}`;
|
|
82
|
+
if (a.schema !== schema.rawName) {
|
|
83
|
+
const formattedEnumSchemaName = snakeCaseToPascalCase(a.schema);
|
|
84
|
+
type = `${formattedEnumSchemaName}.${type}`;
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
else {
|
|
88
|
+
type = "string";
|
|
89
|
+
}
|
|
90
|
+
if (a.is_array) {
|
|
91
|
+
type += "[]";
|
|
92
|
+
}
|
|
93
|
+
return type;
|
|
94
|
+
}),
|
|
95
|
+
returnType: (() => {
|
|
96
|
+
let type;
|
|
97
|
+
const typeMappingKey = `${procDataObj.return_type.schema}.${procDataObj.return_type.name}`;
|
|
98
|
+
if (typeMappingKey in config.types) {
|
|
99
|
+
type = config.types[typeMappingKey];
|
|
100
|
+
}
|
|
101
|
+
else if (procDataObj.return_type.is_enum) {
|
|
102
|
+
const formattedEnumName = snakeCaseToPascalCase(procDataObj.return_type.name);
|
|
103
|
+
type = `Enums.${formattedEnumName}`;
|
|
104
|
+
if (procDataObj.return_type.schema !== schema.rawName) {
|
|
105
|
+
const formattedEnumSchemaName = snakeCaseToPascalCase(procDataObj.return_type.schema);
|
|
106
|
+
type = `${formattedEnumSchemaName}.${type}`;
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
else {
|
|
110
|
+
type = "string";
|
|
111
|
+
}
|
|
112
|
+
if (procDataObj.return_type.is_array) {
|
|
113
|
+
type += "[]";
|
|
114
|
+
}
|
|
115
|
+
return type;
|
|
116
|
+
})(),
|
|
117
|
+
};
|
|
118
|
+
schema.procedures.push(procData);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
function findOrInsertSchema(schemas, rawName) {
|
|
122
|
+
let schema = schemas.find((s) => s.rawName === rawName);
|
|
123
|
+
if (schema)
|
|
124
|
+
return schema;
|
|
125
|
+
schema = {
|
|
126
|
+
formattedName: snakeCaseToPascalCase(rawName),
|
|
127
|
+
rawName,
|
|
128
|
+
enums: [],
|
|
129
|
+
tables: [],
|
|
130
|
+
procedures: [],
|
|
131
|
+
};
|
|
132
|
+
schemas.push(schema);
|
|
133
|
+
return schema;
|
|
134
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function snakeCaseToPascalCase(str: string): string;
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
export function writeHeader(outputPath, config) {
|
|
3
|
+
const comment = `/* This file was generated by IntrospeQL. Do not edit manually. */\n\n`;
|
|
4
|
+
const header = config.header
|
|
5
|
+
? comment +
|
|
6
|
+
config.header +
|
|
7
|
+
"\n".repeat(config.header.endsWith("\n") ? 1 : 2)
|
|
8
|
+
: comment;
|
|
9
|
+
fs.writeFileSync(outputPath, header, "utf-8");
|
|
10
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "introspeql",
|
|
3
|
+
"version": "0.0.0",
|
|
4
|
+
"description": "Easy Postgres to TypeScript type generation.",
|
|
5
|
+
"main": "./dist/index.js",
|
|
6
|
+
"types": "./dist/index.d.ts",
|
|
7
|
+
"scripts": {
|
|
8
|
+
"test": "echo \"Error: no test specified\" && exit 1",
|
|
9
|
+
"build": "tsc"
|
|
10
|
+
},
|
|
11
|
+
"files": [
|
|
12
|
+
"dist",
|
|
13
|
+
"README.md"
|
|
14
|
+
],
|
|
15
|
+
"keywords": [
|
|
16
|
+
"PostgreSQL",
|
|
17
|
+
"Postgres",
|
|
18
|
+
"TypeScript"
|
|
19
|
+
],
|
|
20
|
+
"author": "Joseph Dvorak",
|
|
21
|
+
"license": "ISC",
|
|
22
|
+
"type": "commonjs",
|
|
23
|
+
"devDependencies": {
|
|
24
|
+
"@types/node": "^24.5.2",
|
|
25
|
+
"tsx": "^4.20.6",
|
|
26
|
+
"typescript": "^5.9.2"
|
|
27
|
+
},
|
|
28
|
+
"dependencies": {
|
|
29
|
+
"@types/pg": "^8.15.5",
|
|
30
|
+
"pg": "^8.16.3",
|
|
31
|
+
"zod": "^4.1.11"
|
|
32
|
+
}
|
|
33
|
+
}
|