@quillsql/node 0.3.6 → 0.3.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/assets/pgtypes.js +2785 -0
- package/dist/db/BigQuery.js +189 -0
- package/dist/db/{CachedPools.js → CachedConnection.js} +11 -23
- package/dist/db/DatabaseHelper.js +187 -0
- package/dist/db/Mysql.js +187 -0
- package/dist/db/Postgres.js +156 -0
- package/dist/db/Snowflake.js +179 -0
- package/dist/index.js +47 -18
- package/dist/index.uspec.js +3 -2
- package/dist/models/Client.js +2 -0
- package/dist/utils/RunQueryProcesses.js +4 -4
- package/dist/utils/textProcessing.js +17 -0
- package/examples/node-server/app.ts +6 -8
- package/package.json +5 -1
- package/src/assets/pgtypes.ts +2782 -0
- package/src/db/BigQuery.ts +201 -0
- package/src/db/{CachedPools.ts → CachedConnection.ts} +25 -21
- package/src/db/DatabaseHelper.ts +340 -0
- package/src/db/Mysql.ts +209 -0
- package/src/db/Postgres.ts +178 -0
- package/src/db/Snowflake.ts +191 -0
- package/src/index.ts +69 -18
- package/src/index.uspec.ts +9 -2
- package/src/models/Client.ts +29 -0
- package/src/models/Quill.ts +0 -6
- package/src/utils/RunQueryProcesses.ts +5 -5
- package/src/utils/textProcessing.ts +13 -0
|
@@ -0,0 +1,201 @@
|
|
|
1
|
+
import { Client } from "../models/Client";
|
|
2
|
+
import { BigQuery } from "@google-cloud/bigquery";
|
|
3
|
+
import { QuillQueryResults } from "./DatabaseHelper";
|
|
4
|
+
import { capitalize, depluralize } from "../utils/textProcessing";
|
|
5
|
+
|
|
6
|
+
export interface BigQueryConfig {
|
|
7
|
+
datasetName: string;
|
|
8
|
+
projectId: string;
|
|
9
|
+
credentials: any;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export function connectToBigQuery(config: BigQueryConfig) {
|
|
13
|
+
return new BigQuery(config);
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
export async function runQueryBigQuery(
|
|
17
|
+
sql: string,
|
|
18
|
+
bigQuery: BigQuery
|
|
19
|
+
): Promise<QuillQueryResults> {
|
|
20
|
+
const rows = await bigQuery.query(sql);
|
|
21
|
+
if (!rows[0] || rows[0].length === 0) return { fields: [], rows: [] };
|
|
22
|
+
const typedRows = rows[0] as { [fieldName: string]: any }[];
|
|
23
|
+
const fields = Object.keys(typedRows[0]).map((name: string) => ({
|
|
24
|
+
name,
|
|
25
|
+
dataTypeID: 1043,
|
|
26
|
+
}));
|
|
27
|
+
fields.forEach((field) => {
|
|
28
|
+
typedRows.some((row) => {
|
|
29
|
+
if (row[field.name] === null) return false;
|
|
30
|
+
field.dataTypeID = inferType(row[field.name]);
|
|
31
|
+
return true;
|
|
32
|
+
});
|
|
33
|
+
});
|
|
34
|
+
return {
|
|
35
|
+
fields: fields,
|
|
36
|
+
rows: typedRows,
|
|
37
|
+
};
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
export async function getSchemaBigQuery(bigQuery: BigQuery): Promise<string[]> {
|
|
41
|
+
const [datasets] = await bigQuery.getDatasets();
|
|
42
|
+
const definedDatasets = datasets.map((dataset) => dataset.id);
|
|
43
|
+
const filtered: string[] = [];
|
|
44
|
+
definedDatasets.forEach((dataset) => {
|
|
45
|
+
if (dataset !== undefined) {
|
|
46
|
+
filtered.push(dataset);
|
|
47
|
+
}
|
|
48
|
+
});
|
|
49
|
+
return filtered;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
export async function getTablesBySchemaBigQuery(
|
|
53
|
+
bigQuery: BigQuery,
|
|
54
|
+
schemaName: string
|
|
55
|
+
): Promise<string[]> {
|
|
56
|
+
const sql = `SELECT table_name FROM ${schemaName}.INFORMATION_SCHEMA.TABLES WHERE table_type = 'BASE TABLE'`;
|
|
57
|
+
const rows = await bigQuery.query(sql);
|
|
58
|
+
return rows[0].map((row: any) => row.table_name);
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
export async function getColumnsByTableBigQuery(
|
|
62
|
+
bigQuery: BigQuery,
|
|
63
|
+
schemaName: string,
|
|
64
|
+
tableName: string
|
|
65
|
+
): Promise<string[]> {
|
|
66
|
+
const sql = `SELECT column_name FROM ${schemaName}.INFORMATION_SCHEMA.COLUMNS WHERE table_name = '${tableName}'`;
|
|
67
|
+
const rows = await bigQuery.query(sql);
|
|
68
|
+
return rows[0].map((row: any) => row.column_name);
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
export function formatBigQueryConfig(connectionString: string): BigQueryConfig {
|
|
72
|
+
const jsonStartIndex = connectionString.indexOf("{");
|
|
73
|
+
if (jsonStartIndex === -1) {
|
|
74
|
+
throw new Error("Invalid input string. No JSON data found.");
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
const datasetName = connectionString.substring(0, jsonStartIndex).trim();
|
|
78
|
+
const jsonString = connectionString.substring(jsonStartIndex);
|
|
79
|
+
|
|
80
|
+
try {
|
|
81
|
+
const serviceAccount = JSON.parse(jsonString);
|
|
82
|
+
|
|
83
|
+
// Validate if required fields are present
|
|
84
|
+
if (!serviceAccount.project_id || !serviceAccount.private_key) {
|
|
85
|
+
throw new Error(
|
|
86
|
+
"Invalid service account JSON. Required fields are missing."
|
|
87
|
+
);
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
return {
|
|
91
|
+
datasetName,
|
|
92
|
+
projectId: serviceAccount.project_id,
|
|
93
|
+
credentials: serviceAccount,
|
|
94
|
+
};
|
|
95
|
+
} catch (error) {
|
|
96
|
+
throw new Error("Failed to parse JSON string: " + error);
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
export async function getForeignKeysBigQuery(
|
|
101
|
+
connection: BigQuery,
|
|
102
|
+
schemaName: string,
|
|
103
|
+
tableName: string,
|
|
104
|
+
primaryKey: string
|
|
105
|
+
): Promise<string[]> {
|
|
106
|
+
const depluralizedTableName = depluralize(tableName);
|
|
107
|
+
let sql = `SELECT column_name FROM ${schemaName}.INFORMATION_SCHEMA.COLUMNS
|
|
108
|
+
WHERE table_name != '${tableName}'
|
|
109
|
+
and (column_name = '${primaryKey}'
|
|
110
|
+
or column_name = '${depluralizedTableName}_${primaryKey}'
|
|
111
|
+
or column_name = '${depluralizedTableName}${capitalize(primaryKey)}')`;
|
|
112
|
+
const results = await runQueryBigQuery(sql, connection);
|
|
113
|
+
let foreignKeysString = results.rows.map((key) => {
|
|
114
|
+
return key.column_name;
|
|
115
|
+
});
|
|
116
|
+
foreignKeysString = foreignKeysString.filter(
|
|
117
|
+
(key) => key !== "id" && key !== "_id_"
|
|
118
|
+
);
|
|
119
|
+
foreignKeysString = [...new Set(foreignKeysString)];
|
|
120
|
+
if (foreignKeysString.length === 0) {
|
|
121
|
+
sql = `SELECT column_name FROM ${schemaName}.INFORMATION_SCHEMA.COLUMNS
|
|
122
|
+
WHERE table_name != '${tableName}'
|
|
123
|
+
and (column_name like '${depluralizedTableName}%'
|
|
124
|
+
or column_name like '%_id'
|
|
125
|
+
or column_name like '%Id'
|
|
126
|
+
or column_name like '%_${primaryKey}'
|
|
127
|
+
or column_name like '%${capitalize(primaryKey)}')`;
|
|
128
|
+
const results = await runQueryBigQuery(sql, connection);
|
|
129
|
+
foreignKeysString = results.rows.map((key) => {
|
|
130
|
+
return key.column_name;
|
|
131
|
+
});
|
|
132
|
+
foreignKeysString = [...new Set(foreignKeysString)];
|
|
133
|
+
}
|
|
134
|
+
return foreignKeysString;
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
export async function getSchemaColumnInfoBigQuery(
|
|
138
|
+
connection: BigQuery,
|
|
139
|
+
schemaName: string,
|
|
140
|
+
tableNames: string[]
|
|
141
|
+
): Promise<
|
|
142
|
+
{ tableName: string; columns: { columnName: string; dataTypeId: number }[] }[]
|
|
143
|
+
> {
|
|
144
|
+
const allColumns = await Promise.all(
|
|
145
|
+
tableNames.map(async (tableName) => {
|
|
146
|
+
const query = `
|
|
147
|
+
SELECT column_name as columnName, data_type as dataType
|
|
148
|
+
FROM ${schemaName}.INFORMATION_SCHEMA.COLUMNS
|
|
149
|
+
WHERE table_name = '${tableName}'
|
|
150
|
+
ORDER BY ordinal_position;
|
|
151
|
+
`;
|
|
152
|
+
const results = await runQueryBigQuery(query, connection);
|
|
153
|
+
return {
|
|
154
|
+
tableName,
|
|
155
|
+
displayName: tableName,
|
|
156
|
+
columns: results.rows.map((row: any) => ({
|
|
157
|
+
columnName: row.columnName,
|
|
158
|
+
displayName: row.columnName,
|
|
159
|
+
dataTypeId: inferType(convertBigQueryTypeToPostgresOID(row.dataType)),
|
|
160
|
+
fieldType: row.dataType,
|
|
161
|
+
})),
|
|
162
|
+
};
|
|
163
|
+
})
|
|
164
|
+
);
|
|
165
|
+
return allColumns;
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
function convertBigQueryTypeToPostgresOID(type: string): number {
|
|
169
|
+
const typeToOidMap: { [key: string]: number } = {
|
|
170
|
+
VARCHAR: 1043,
|
|
171
|
+
INTEGER: 23,
|
|
172
|
+
FLOAT: 700,
|
|
173
|
+
TIMESTAMP: 1114,
|
|
174
|
+
DATE: 1082,
|
|
175
|
+
};
|
|
176
|
+
|
|
177
|
+
const postgresType = typeToOidMap[type.toUpperCase()] || "VARCHAR"; // Default to 'text' if the type is not recognized
|
|
178
|
+
return typeToOidMap[postgresType] || 1043; // Default to OID for 'text' if the type is not recognized
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
function inferType(elem: any) {
|
|
182
|
+
if (typeof elem === "number") {
|
|
183
|
+
// Check if the number is a float or an integer
|
|
184
|
+
return Number.isInteger(elem) ? 23 : 700; // 23: integer, 700: real
|
|
185
|
+
}
|
|
186
|
+
if (typeof elem === "string") {
|
|
187
|
+
// Attempt to infer date, time, and timestamp formats
|
|
188
|
+
// Date in YYYY-MM-DD format
|
|
189
|
+
if (/^\d{4}-\d{2}-\d{2}$/.test(elem)) return 1082; // date
|
|
190
|
+
// Timestamp in YYYY-MM-DDTHH:MM:SS[.fraction] format
|
|
191
|
+
if (/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?$/.test(elem)) return 1114; // timestamp without timezone
|
|
192
|
+
// Timestamp in YYYY-MM-DDTHH:MM:SS[.fraction]Z format
|
|
193
|
+
if (/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?Z$/.test(elem))
|
|
194
|
+
return 1184; // timestamp with timezone
|
|
195
|
+
// Time in HH:MM:SS format
|
|
196
|
+
if (/^\d{2}:\d{2}:\d{2}$/.test(elem)) return 1083; // time
|
|
197
|
+
return 1043; // varchar
|
|
198
|
+
}
|
|
199
|
+
// Add more specific cases or different data types as needed
|
|
200
|
+
return 1043; // default or unknown type
|
|
201
|
+
}
|
|
@@ -3,6 +3,13 @@ import { Mapable, CacheCredentials } from "../models/Cache";
|
|
|
3
3
|
import { QuillConfig } from "../models/Quill";
|
|
4
4
|
import { createClient } from "redis";
|
|
5
5
|
import { isSuperset } from "../utils/Error";
|
|
6
|
+
import {
|
|
7
|
+
DatabaseConnection,
|
|
8
|
+
DatabaseType,
|
|
9
|
+
connectToDatabase,
|
|
10
|
+
disconnectFromDatabase,
|
|
11
|
+
runQueryByDatabase,
|
|
12
|
+
} from "./DatabaseHelper";
|
|
6
13
|
|
|
7
14
|
class PgError extends Error {
|
|
8
15
|
code?: string;
|
|
@@ -21,14 +28,20 @@ class PgError extends Error {
|
|
|
21
28
|
/** The TTL for new cache entries (default: 1h) */
|
|
22
29
|
const DEFAULT_CACHE_TTL = 24 * 60 * 60;
|
|
23
30
|
|
|
24
|
-
export class
|
|
25
|
-
public
|
|
31
|
+
export class CachedConnection {
|
|
32
|
+
public databaseType: DatabaseType;
|
|
33
|
+
public pool: DatabaseConnection;
|
|
26
34
|
public orgId: any;
|
|
27
35
|
public ttl: number;
|
|
28
36
|
public cache: Mapable | null;
|
|
29
37
|
|
|
30
|
-
constructor(
|
|
31
|
-
|
|
38
|
+
constructor(
|
|
39
|
+
databaseType: DatabaseType,
|
|
40
|
+
config: any,
|
|
41
|
+
cacheConfig: Partial<CacheCredentials> = {}
|
|
42
|
+
) {
|
|
43
|
+
this.databaseType = databaseType;
|
|
44
|
+
this.pool = connectToDatabase(databaseType, config);
|
|
32
45
|
this.ttl = cacheConfig?.ttl ?? DEFAULT_CACHE_TTL;
|
|
33
46
|
this.cache = this.getCache(cacheConfig);
|
|
34
47
|
}
|
|
@@ -36,30 +49,21 @@ export class CachedPool {
|
|
|
36
49
|
public async query(text: string, values?: any[]): Promise<any> {
|
|
37
50
|
try {
|
|
38
51
|
if (!this.cache) {
|
|
39
|
-
|
|
40
|
-
return {
|
|
41
|
-
fields: results.fields.map((field: any) => ({
|
|
42
|
-
name: field.name,
|
|
43
|
-
dataTypeID: field.dataTypeID,
|
|
44
|
-
})),
|
|
45
|
-
rows: results.rows,
|
|
46
|
-
};
|
|
52
|
+
return await runQueryByDatabase(this.databaseType, this.pool, text);
|
|
47
53
|
}
|
|
48
54
|
const key: string = `${this.orgId}:${text}`;
|
|
49
55
|
const cachedResult: string | null = await this.cache.get(key);
|
|
50
56
|
if (cachedResult) {
|
|
51
57
|
return JSON.parse(cachedResult);
|
|
52
58
|
} else {
|
|
53
|
-
const newResult
|
|
59
|
+
const newResult = await runQueryByDatabase(
|
|
60
|
+
this.databaseType,
|
|
61
|
+
this.pool,
|
|
62
|
+
text
|
|
63
|
+
);
|
|
54
64
|
const newResultString: string = JSON.stringify(newResult);
|
|
55
65
|
await this.cache.set(key, newResultString, "EX", DEFAULT_CACHE_TTL);
|
|
56
|
-
return
|
|
57
|
-
fields: newResult.fields.map((field: any) => ({
|
|
58
|
-
name: field.name,
|
|
59
|
-
dataTypeID: field.dataTypeID,
|
|
60
|
-
})),
|
|
61
|
-
rows: newResult.rows,
|
|
62
|
-
};
|
|
66
|
+
return newResult;
|
|
63
67
|
}
|
|
64
68
|
} catch (err) {
|
|
65
69
|
if (isSuperset(err, PgError)) {
|
|
@@ -92,6 +96,6 @@ export class CachedPool {
|
|
|
92
96
|
}
|
|
93
97
|
|
|
94
98
|
async close() {
|
|
95
|
-
await this.pool
|
|
99
|
+
await disconnectFromDatabase(this.databaseType, this.pool);
|
|
96
100
|
}
|
|
97
101
|
}
|
|
@@ -0,0 +1,340 @@
|
|
|
1
|
+
import { Pool } from "pg";
|
|
2
|
+
import snowflake from "snowflake-sdk";
|
|
3
|
+
import { Client } from "../models/Client";
|
|
4
|
+
import { Pool as MysqlPool } from "mysql2";
|
|
5
|
+
import { BigQuery } from "@google-cloud/bigquery";
|
|
6
|
+
import {
|
|
7
|
+
PostgresConnectionConfig,
|
|
8
|
+
connectToPostgres,
|
|
9
|
+
disconnectFromPostgres,
|
|
10
|
+
formatPostgresConfig,
|
|
11
|
+
getTablesBySchemaPostgres,
|
|
12
|
+
getColumnsByTablePostgres,
|
|
13
|
+
runQueryPostgres,
|
|
14
|
+
getForeignKeysPostgres,
|
|
15
|
+
getSchemaColumnInfoPostgress,
|
|
16
|
+
getSchemasPostgres,
|
|
17
|
+
} from "./Postgres";
|
|
18
|
+
import {
|
|
19
|
+
SnowflakeConnectionConfig,
|
|
20
|
+
connectToSnowflake,
|
|
21
|
+
disconnectFromSnowflake,
|
|
22
|
+
formatSnowflakeConfig,
|
|
23
|
+
getTablesBySchemaSnowflake,
|
|
24
|
+
getColumnsByTableSnowflake,
|
|
25
|
+
runQuerySnowflake,
|
|
26
|
+
getForeignKeysSnowflake,
|
|
27
|
+
getSchemaColumnInfoSnowflake,
|
|
28
|
+
getSchemasSnowflake,
|
|
29
|
+
} from "./Snowflake";
|
|
30
|
+
import {
|
|
31
|
+
formatBigQueryConfig,
|
|
32
|
+
BigQueryConfig,
|
|
33
|
+
connectToBigQuery,
|
|
34
|
+
runQueryBigQuery,
|
|
35
|
+
getTablesBySchemaBigQuery,
|
|
36
|
+
getColumnsByTableBigQuery,
|
|
37
|
+
getForeignKeysBigQuery,
|
|
38
|
+
getSchemaColumnInfoBigQuery,
|
|
39
|
+
getSchemaBigQuery,
|
|
40
|
+
} from "./BigQuery";
|
|
41
|
+
import {
|
|
42
|
+
MysqlConnectionConfig,
|
|
43
|
+
connectToMysql,
|
|
44
|
+
disconnectFromMysql,
|
|
45
|
+
formatMysqlConfig,
|
|
46
|
+
runQueryMysql,
|
|
47
|
+
getTablesBySchemaMysql,
|
|
48
|
+
getColumnsByTableMysql,
|
|
49
|
+
getForeignKeysMysql,
|
|
50
|
+
getSchemaColumnInfoMysql,
|
|
51
|
+
getSchemasMysql,
|
|
52
|
+
} from "./Mysql";
|
|
53
|
+
|
|
54
|
+
export enum DatabaseType {
|
|
55
|
+
postgres = "postgres",
|
|
56
|
+
postgresql = "PostgreSQL",
|
|
57
|
+
snowflake = "Snowflake",
|
|
58
|
+
bigquery = "BigQuery",
|
|
59
|
+
mysql = "MySQL",
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
// export all database connections types
|
|
63
|
+
export type DatabaseConnection =
|
|
64
|
+
| Pool
|
|
65
|
+
| snowflake.Connection
|
|
66
|
+
| BigQuery
|
|
67
|
+
| MysqlPool;
|
|
68
|
+
|
|
69
|
+
export interface QuillQueryResults {
|
|
70
|
+
fields: { name: string; dataTypeID: number }[];
|
|
71
|
+
rows: { [fieldName: string]: any }[];
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
export function getDatabaseCredentials(
|
|
75
|
+
databaseType: DatabaseType,
|
|
76
|
+
connectionString: string
|
|
77
|
+
):
|
|
78
|
+
| PostgresConnectionConfig
|
|
79
|
+
| SnowflakeConnectionConfig
|
|
80
|
+
| BigQueryConfig
|
|
81
|
+
| MysqlConnectionConfig
|
|
82
|
+
| undefined {
|
|
83
|
+
switch (databaseType) {
|
|
84
|
+
case "postgres":
|
|
85
|
+
return formatPostgresConfig(connectionString);
|
|
86
|
+
case "PostgreSQL":
|
|
87
|
+
return formatPostgresConfig(connectionString);
|
|
88
|
+
case "Snowflake":
|
|
89
|
+
return formatSnowflakeConfig(connectionString);
|
|
90
|
+
case "BigQuery":
|
|
91
|
+
return formatBigQueryConfig(connectionString);
|
|
92
|
+
case "MySQL":
|
|
93
|
+
return formatMysqlConfig(connectionString);
|
|
94
|
+
default:
|
|
95
|
+
return undefined;
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
export function connectToDatabase(
|
|
100
|
+
databaseType: DatabaseType,
|
|
101
|
+
config:
|
|
102
|
+
| PostgresConnectionConfig
|
|
103
|
+
| SnowflakeConnectionConfig
|
|
104
|
+
| BigQueryConfig
|
|
105
|
+
| MysqlConnectionConfig
|
|
106
|
+
): DatabaseConnection {
|
|
107
|
+
switch (databaseType) {
|
|
108
|
+
case "postgres":
|
|
109
|
+
return connectToPostgres(config as PostgresConnectionConfig);
|
|
110
|
+
case "PostgreSQL":
|
|
111
|
+
return connectToPostgres(config as PostgresConnectionConfig);
|
|
112
|
+
case "Snowflake":
|
|
113
|
+
return connectToSnowflake(config as SnowflakeConnectionConfig);
|
|
114
|
+
case "BigQuery":
|
|
115
|
+
return connectToBigQuery(config as BigQueryConfig);
|
|
116
|
+
case "MySQL":
|
|
117
|
+
return connectToMysql(config as MysqlConnectionConfig);
|
|
118
|
+
default:
|
|
119
|
+
return connectToPostgres(config as PostgresConnectionConfig);
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
export function runQueryByDatabase(
|
|
124
|
+
databaseType: DatabaseType,
|
|
125
|
+
connection: DatabaseConnection,
|
|
126
|
+
sql: string
|
|
127
|
+
): Promise<QuillQueryResults> | undefined {
|
|
128
|
+
switch (databaseType) {
|
|
129
|
+
case "postgres":
|
|
130
|
+
return runQueryPostgres(sql, connection as Pool);
|
|
131
|
+
case "PostgreSQL":
|
|
132
|
+
return runQueryPostgres(sql, connection as Pool);
|
|
133
|
+
case "Snowflake":
|
|
134
|
+
return runQuerySnowflake(sql, connection as snowflake.Connection);
|
|
135
|
+
case "BigQuery":
|
|
136
|
+
return runQueryBigQuery(sql, connection as BigQuery);
|
|
137
|
+
case "MySQL":
|
|
138
|
+
return runQueryMysql(sql, connection as MysqlPool);
|
|
139
|
+
default:
|
|
140
|
+
return undefined;
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
export function disconnectFromDatabase(
|
|
145
|
+
databaseType: DatabaseType,
|
|
146
|
+
database: DatabaseConnection
|
|
147
|
+
) {
|
|
148
|
+
switch (databaseType) {
|
|
149
|
+
case "postgres":
|
|
150
|
+
return disconnectFromPostgres(database as Pool);
|
|
151
|
+
case "PostgreSQL":
|
|
152
|
+
return disconnectFromPostgres(database as Pool);
|
|
153
|
+
case "Snowflake":
|
|
154
|
+
return disconnectFromSnowflake(database as snowflake.Connection);
|
|
155
|
+
case "BigQuery":
|
|
156
|
+
return; // BigQuery does not need to be disconnected
|
|
157
|
+
case "MySQL":
|
|
158
|
+
return disconnectFromMysql(database as MysqlPool);
|
|
159
|
+
default:
|
|
160
|
+
return undefined;
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
export async function getSchemasByDatabase(
|
|
165
|
+
databaseType: DatabaseType,
|
|
166
|
+
connection: DatabaseConnection
|
|
167
|
+
): Promise<string[] | undefined> {
|
|
168
|
+
switch (databaseType) {
|
|
169
|
+
case "postgres":
|
|
170
|
+
return getSchemasPostgres(connection as Pool);
|
|
171
|
+
case "PostgreSQL":
|
|
172
|
+
return getSchemasPostgres(connection as Pool);
|
|
173
|
+
case "Snowflake":
|
|
174
|
+
return getSchemasSnowflake(connection as snowflake.Connection);
|
|
175
|
+
case "BigQuery":
|
|
176
|
+
return getSchemaBigQuery(connection as BigQuery);
|
|
177
|
+
case "MySQL":
|
|
178
|
+
return getSchemasMysql(connection as MysqlPool);
|
|
179
|
+
default:
|
|
180
|
+
return undefined;
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
// INFORMATION SCHEMA SELECTS
|
|
185
|
+
export async function getTablesBySchemaByDatabase(
|
|
186
|
+
databaseType: DatabaseType,
|
|
187
|
+
connection: DatabaseConnection,
|
|
188
|
+
schemaName: string
|
|
189
|
+
): Promise<string[] | undefined> {
|
|
190
|
+
switch (databaseType) {
|
|
191
|
+
case "postgres":
|
|
192
|
+
return getTablesBySchemaPostgres(connection as Pool, schemaName);
|
|
193
|
+
case "PostgreSQL":
|
|
194
|
+
return getTablesBySchemaPostgres(connection as Pool, schemaName);
|
|
195
|
+
case "Snowflake":
|
|
196
|
+
return getTablesBySchemaSnowflake(
|
|
197
|
+
connection as snowflake.Connection,
|
|
198
|
+
schemaName
|
|
199
|
+
);
|
|
200
|
+
case "BigQuery":
|
|
201
|
+
return getTablesBySchemaBigQuery(connection as BigQuery, schemaName);
|
|
202
|
+
case "MySQL":
|
|
203
|
+
return getTablesBySchemaMysql(connection as MysqlPool, schemaName);
|
|
204
|
+
default:
|
|
205
|
+
return undefined;
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
// INFORMATION SCHEMA SELECTS
|
|
210
|
+
export async function getColumnsByTableByDatabase(
|
|
211
|
+
databaseType: DatabaseType,
|
|
212
|
+
connection: DatabaseConnection,
|
|
213
|
+
schemaName: string,
|
|
214
|
+
tableName: string
|
|
215
|
+
): Promise<string[] | undefined> {
|
|
216
|
+
switch (databaseType) {
|
|
217
|
+
case "postgres":
|
|
218
|
+
return getColumnsByTablePostgres(
|
|
219
|
+
connection as Pool,
|
|
220
|
+
schemaName,
|
|
221
|
+
tableName
|
|
222
|
+
);
|
|
223
|
+
case "PostgreSQL":
|
|
224
|
+
return getColumnsByTablePostgres(
|
|
225
|
+
connection as Pool,
|
|
226
|
+
schemaName,
|
|
227
|
+
tableName
|
|
228
|
+
);
|
|
229
|
+
case "Snowflake":
|
|
230
|
+
return getColumnsByTableSnowflake(
|
|
231
|
+
connection as snowflake.Connection,
|
|
232
|
+
schemaName,
|
|
233
|
+
tableName
|
|
234
|
+
);
|
|
235
|
+
case "BigQuery":
|
|
236
|
+
return getColumnsByTableBigQuery(
|
|
237
|
+
connection as BigQuery,
|
|
238
|
+
schemaName,
|
|
239
|
+
tableName
|
|
240
|
+
);
|
|
241
|
+
case "MySQL":
|
|
242
|
+
return getColumnsByTableMysql(
|
|
243
|
+
connection as MysqlPool,
|
|
244
|
+
schemaName,
|
|
245
|
+
tableName
|
|
246
|
+
);
|
|
247
|
+
default:
|
|
248
|
+
return undefined;
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
export async function getForiegnKeysByDatabase(
|
|
253
|
+
databaseType: DatabaseType,
|
|
254
|
+
connection: DatabaseConnection,
|
|
255
|
+
schemaName: string,
|
|
256
|
+
tableName: string,
|
|
257
|
+
primaryKey: string
|
|
258
|
+
): Promise<string[] | undefined> {
|
|
259
|
+
switch (databaseType) {
|
|
260
|
+
case "postgres":
|
|
261
|
+
return getForeignKeysPostgres(
|
|
262
|
+
connection as Pool,
|
|
263
|
+
schemaName,
|
|
264
|
+
tableName,
|
|
265
|
+
primaryKey
|
|
266
|
+
);
|
|
267
|
+
case "PostgreSQL":
|
|
268
|
+
return getForeignKeysPostgres(
|
|
269
|
+
connection as Pool,
|
|
270
|
+
schemaName,
|
|
271
|
+
tableName,
|
|
272
|
+
primaryKey
|
|
273
|
+
);
|
|
274
|
+
case "Snowflake":
|
|
275
|
+
return getForeignKeysSnowflake(
|
|
276
|
+
connection as snowflake.Connection,
|
|
277
|
+
schemaName,
|
|
278
|
+
tableName,
|
|
279
|
+
primaryKey
|
|
280
|
+
);
|
|
281
|
+
case "BigQuery":
|
|
282
|
+
return getForeignKeysBigQuery(
|
|
283
|
+
connection as BigQuery,
|
|
284
|
+
schemaName,
|
|
285
|
+
tableName,
|
|
286
|
+
primaryKey
|
|
287
|
+
);
|
|
288
|
+
case "MySQL":
|
|
289
|
+
return getForeignKeysMysql(
|
|
290
|
+
connection as MysqlPool,
|
|
291
|
+
schemaName,
|
|
292
|
+
tableName,
|
|
293
|
+
primaryKey
|
|
294
|
+
);
|
|
295
|
+
default:
|
|
296
|
+
return undefined;
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
export function getColumnInfoBySchemaByDatabase(
|
|
301
|
+
databaseType: DatabaseType,
|
|
302
|
+
connection: DatabaseConnection,
|
|
303
|
+
schemaName: string,
|
|
304
|
+
tables: string[]
|
|
305
|
+
) {
|
|
306
|
+
switch (databaseType) {
|
|
307
|
+
case "postgres":
|
|
308
|
+
return getSchemaColumnInfoPostgress(
|
|
309
|
+
connection as Pool,
|
|
310
|
+
schemaName,
|
|
311
|
+
tables
|
|
312
|
+
);
|
|
313
|
+
case "PostgreSQL":
|
|
314
|
+
return getSchemaColumnInfoPostgress(
|
|
315
|
+
connection as Pool,
|
|
316
|
+
schemaName,
|
|
317
|
+
tables
|
|
318
|
+
);
|
|
319
|
+
case "Snowflake":
|
|
320
|
+
return getSchemaColumnInfoSnowflake(
|
|
321
|
+
connection as snowflake.Connection,
|
|
322
|
+
schemaName,
|
|
323
|
+
tables
|
|
324
|
+
);
|
|
325
|
+
case "BigQuery":
|
|
326
|
+
return getSchemaColumnInfoBigQuery(
|
|
327
|
+
connection as BigQuery,
|
|
328
|
+
schemaName,
|
|
329
|
+
tables
|
|
330
|
+
);
|
|
331
|
+
case "MySQL":
|
|
332
|
+
return getSchemaColumnInfoMysql(
|
|
333
|
+
connection as MysqlPool,
|
|
334
|
+
schemaName,
|
|
335
|
+
tables
|
|
336
|
+
);
|
|
337
|
+
default:
|
|
338
|
+
return undefined;
|
|
339
|
+
}
|
|
340
|
+
}
|