@quillsql/node 0.3.7 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,213 @@
1
+ import { Client } from "../models/Client";
2
+ import { BigQuery } from "@google-cloud/bigquery";
3
+ import { QuillQueryResults } from "./DatabaseHelper";
4
+ import { capitalize, depluralize } from "../utils/textProcessing";
5
+
6
+ export interface BigQueryConfig {
7
+ datasetName: string;
8
+ projectId: string;
9
+ credentials: any;
10
+ }
11
+
12
+ export function formatBigQueryConfig(connectionString: string): BigQueryConfig {
13
+ const jsonStartIndex = connectionString.indexOf("{");
14
+ if (jsonStartIndex === -1) {
15
+ throw new Error("Invalid input string. No JSON data found.");
16
+ }
17
+
18
+ const datasetName = connectionString.substring(0, jsonStartIndex).trim();
19
+ const jsonString = connectionString.substring(jsonStartIndex);
20
+
21
+ try {
22
+ const serviceAccount = JSON.parse(jsonString);
23
+
24
+ // Validate if required fields are present
25
+ if (!serviceAccount.project_id || !serviceAccount.private_key) {
26
+ throw new Error(
27
+ "Invalid service account JSON. Required fields are missing."
28
+ );
29
+ }
30
+
31
+ return {
32
+ datasetName,
33
+ projectId: serviceAccount.project_id,
34
+ credentials: serviceAccount,
35
+ };
36
+ } catch (error) {
37
+ throw new Error("Failed to parse JSON string: " + error);
38
+ }
39
+ }
40
+
41
+ export function connectToBigQuery(config: BigQueryConfig) {
42
+ return new BigQuery(config);
43
+ }
44
+
45
+ export async function runQueryBigQuery(
46
+ sql: string,
47
+ bigQuery: BigQuery
48
+ ): Promise<QuillQueryResults> {
49
+ const rows = await bigQuery.query(sql);
50
+ if (!rows[0] || rows[0].length === 0) return { fields: [], rows: [] };
51
+ const typedRows = rows[0] as { [fieldName: string]: any }[];
52
+ const fields = Object.keys(typedRows[0]).map((name: string) => ({
53
+ name,
54
+ dataTypeID: 1043,
55
+ }));
56
+ fields.forEach((field) => {
57
+ typedRows.some((row) => {
58
+ if (row[field.name] === null) return false;
59
+ field.dataTypeID = inferType(row[field.name]);
60
+ return true;
61
+ });
62
+ });
63
+ return {
64
+ fields: fields,
65
+ rows: typedRows,
66
+ };
67
+ }
68
+
69
+ export async function getSchemaBigQuery(bigQuery: BigQuery): Promise<string[]> {
70
+ const [datasets] = await bigQuery.getDatasets();
71
+ const definedDatasets = datasets.map((dataset) => dataset.id);
72
+ const filtered: string[] = [];
73
+ definedDatasets.forEach((dataset) => {
74
+ if (dataset !== undefined) {
75
+ filtered.push(dataset);
76
+ }
77
+ });
78
+ return filtered;
79
+ }
80
+
81
+ export async function getTablesBySchemaBigQuery(
82
+ bigQuery: BigQuery,
83
+ schemaNames: string[]
84
+ ): Promise<{ tableName: string; schemaName: string }[]> {
85
+ const allColumns = await Promise.all(
86
+ schemaNames.map(async (schema) => {
87
+ const sql = `SELECT table_name FROM ${schema}.INFORMATION_SCHEMA.TABLES WHERE table_type = 'BASE TABLE'`;
88
+ const rows = await bigQuery.query(sql);
89
+ return rows[0].map((row) => {
90
+ return { tableName: row.table_name, schemaName: schema };
91
+ });
92
+ })
93
+ );
94
+ return allColumns.flat();
95
+ }
96
+
97
+ export async function getColumnsByTableBigQuery(
98
+ bigQuery: BigQuery,
99
+ schemaName: string,
100
+ tableName: string
101
+ ): Promise<string[]> {
102
+ const sql = `SELECT column_name FROM ${schemaName}.INFORMATION_SCHEMA.COLUMNS WHERE table_name = '${tableName}'`;
103
+ const rows = await bigQuery.query(sql);
104
+ return rows[0].map((row: any) => row.column_name);
105
+ }
106
+
107
+ export async function getForeignKeysBigQuery(
108
+ connection: BigQuery,
109
+ schemaName: string,
110
+ tableName: string,
111
+ primaryKey: string
112
+ ): Promise<string[]> {
113
+ const depluralizedTableName = depluralize(tableName);
114
+ let sql = `SELECT column_name FROM ${schemaName}.INFORMATION_SCHEMA.COLUMNS
115
+ WHERE table_name != '${tableName}'
116
+ and (column_name = '${primaryKey}'
117
+ or column_name = '${depluralizedTableName}_${primaryKey}'
118
+ or column_name = '${depluralizedTableName}${capitalize(primaryKey)}')`;
119
+ const results = await runQueryBigQuery(sql, connection);
120
+ let foreignKeysString = results.rows.map((key) => {
121
+ return key.column_name;
122
+ });
123
+ foreignKeysString = foreignKeysString.filter(
124
+ (key) => key !== "id" && key !== "_id_"
125
+ );
126
+ foreignKeysString = [...new Set(foreignKeysString)];
127
+ if (foreignKeysString.length === 0) {
128
+ sql = `SELECT column_name FROM ${schemaName}.INFORMATION_SCHEMA.COLUMNS
129
+ WHERE table_name != '${tableName}'
130
+ and (column_name like '${depluralizedTableName}%'
131
+ or column_name like '%_id'
132
+ or column_name like '%Id'
133
+ or column_name like '%_${primaryKey}'
134
+ or column_name like '%${capitalize(primaryKey)}')`;
135
+ const results = await runQueryBigQuery(sql, connection);
136
+ foreignKeysString = results.rows.map((key) => {
137
+ return key.column_name;
138
+ });
139
+ foreignKeysString = [...new Set(foreignKeysString)];
140
+ }
141
+ return foreignKeysString;
142
+ }
143
+
144
+ export async function getSchemaColumnInfoBigQuery(
145
+ connection: BigQuery,
146
+ schemaName: string,
147
+ tableNames: { tableName: string; schemaName: string }[]
148
+ ): Promise<
149
+ { tableName: string; columns: { columnName: string; dataTypeID: number }[] }[]
150
+ > {
151
+ const allColumns = await Promise.all(
152
+ tableNames.map(async (tableName) => {
153
+ const query = `
154
+ SELECT column_name as columnName, data_type as dataType
155
+ FROM ${tableName.schemaName}.INFORMATION_SCHEMA.COLUMNS
156
+ WHERE table_name = '${tableName.tableName}'
157
+ ORDER BY ordinal_position;
158
+ `;
159
+ const results = await runQueryBigQuery(query, connection);
160
+ return {
161
+ tableName: `${tableName.schemaName}.${tableName.tableName}`,
162
+ displayName: `${tableName.schemaName}.${tableName.tableName}`,
163
+ columns: results.rows.map((row: any) => ({
164
+ columnName: row.columnName,
165
+ displayName: row.columnName,
166
+ dataTypeID: convertBigQueryTypeToPostgresOID(row.dataType),
167
+ fieldType: row.dataType,
168
+ })),
169
+ };
170
+ })
171
+ );
172
+ return allColumns;
173
+ }
174
+
175
+ function convertBigQueryTypeToPostgresOID(type: string): number {
176
+ const typeToOidMap: { [key: string]: number } = {
177
+ VARCHAR: 1043,
178
+ INTEGER: 23,
179
+ FLOAT: 700,
180
+ TIMESTAMP: 1114,
181
+ DATE: 1082,
182
+ };
183
+
184
+ const postgresType = typeToOidMap[type.toUpperCase()] || "VARCHAR"; // Default to 'text' if the type is not recognized
185
+ return typeToOidMap[postgresType] || 1043; // Default to OID for 'text' if the type is not recognized
186
+ }
187
+
188
+ function inferType(elem: any) {
189
+ if (typeof elem === "number") {
190
+ // Check if the number is a float or an integer
191
+ return Number.isInteger(elem) ? 23 : 700; // 23: integer, 700: real
192
+ }
193
+ if (typeof elem === "object") {
194
+ if (/^\d{4}-\d{2}-\d{2}$/.test(elem.value)) return 1082; // date
195
+ }
196
+ if (typeof elem === "string") {
197
+ // Attempt to infer date, time, and timestamp formats
198
+ // Date in YYYY-MM-DD format
199
+ if (/^\d{4}-\d{2}-\d{2}$/.test(elem)) return 1082; // date
200
+ // Date in MM\DD\YYYY or MM\DD\YY format
201
+ if (/^\d{2}\/\d{2}\/\d{2,4}$/.test(elem)) return 1082; // date
202
+ // Timestamp in YYYY-MM-DDTHH:MM:SS[.fraction] format
203
+ if (/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?$/.test(elem)) return 1114; // timestamp without timezone
204
+ // Timestamp in YYYY-MM-DDTHH:MM:SS[.fraction]Z format
205
+ if (/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?Z$/.test(elem))
206
+ return 1184; // timestamp with timezone
207
+ // Time in HH:MM:SS format
208
+ if (/^\d{2}:\d{2}:\d{2}$/.test(elem)) return 1083; // time
209
+ return 1043; // varchar
210
+ }
211
+ // Add more specific cases or different data types as needed
212
+ return 1043; // default or unknown type
213
+ }
@@ -3,6 +3,13 @@ import { Mapable, CacheCredentials } from "../models/Cache";
3
3
  import { QuillConfig } from "../models/Quill";
4
4
  import { createClient } from "redis";
5
5
  import { isSuperset } from "../utils/Error";
6
+ import {
7
+ DatabaseConnection,
8
+ DatabaseType,
9
+ connectToDatabase,
10
+ disconnectFromDatabase,
11
+ runQueryByDatabase,
12
+ } from "./DatabaseHelper";
6
13
 
7
14
  class PgError extends Error {
8
15
  code?: string;
@@ -21,14 +28,20 @@ class PgError extends Error {
21
28
  /** The TTL for new cache entries (default: 1h) */
22
29
  const DEFAULT_CACHE_TTL = 24 * 60 * 60;
23
30
 
24
- export class CachedPool {
25
- public pool: Pool;
31
+ export class CachedConnection {
32
+ public databaseType: DatabaseType;
33
+ public pool: DatabaseConnection;
26
34
  public orgId: any;
27
35
  public ttl: number;
28
36
  public cache: Mapable | null;
29
37
 
30
- constructor(config: any, cacheConfig: Partial<CacheCredentials> = {}) {
31
- this.pool = new Pool(config);
38
+ constructor(
39
+ databaseType: DatabaseType,
40
+ config: any,
41
+ cacheConfig: Partial<CacheCredentials> = {}
42
+ ) {
43
+ this.databaseType = databaseType;
44
+ this.pool = connectToDatabase(databaseType, config);
32
45
  this.ttl = cacheConfig?.ttl ?? DEFAULT_CACHE_TTL;
33
46
  this.cache = this.getCache(cacheConfig);
34
47
  }
@@ -36,30 +49,21 @@ export class CachedPool {
36
49
  public async query(text: string, values?: any[]): Promise<any> {
37
50
  try {
38
51
  if (!this.cache) {
39
- const results = await this.pool.query(text, values);
40
- return {
41
- fields: results.fields.map((field: any) => ({
42
- name: field.name,
43
- dataTypeID: field.dataTypeID,
44
- })),
45
- rows: results.rows,
46
- };
52
+ return await runQueryByDatabase(this.databaseType, this.pool, text);
47
53
  }
48
54
  const key: string = `${this.orgId}:${text}`;
49
55
  const cachedResult: string | null = await this.cache.get(key);
50
56
  if (cachedResult) {
51
57
  return JSON.parse(cachedResult);
52
58
  } else {
53
- const newResult: any = await this.pool.query(text, values);
59
+ const newResult = await runQueryByDatabase(
60
+ this.databaseType,
61
+ this.pool,
62
+ text
63
+ );
54
64
  const newResultString: string = JSON.stringify(newResult);
55
65
  await this.cache.set(key, newResultString, "EX", DEFAULT_CACHE_TTL);
56
- return {
57
- fields: newResult.fields.map((field: any) => ({
58
- name: field.name,
59
- dataTypeID: field.dataTypeID,
60
- })),
61
- rows: newResult.rows,
62
- };
66
+ return newResult;
63
67
  }
64
68
  } catch (err) {
65
69
  if (isSuperset(err, PgError)) {
@@ -92,6 +96,6 @@ export class CachedPool {
92
96
  }
93
97
 
94
98
  async close() {
95
- await this.pool.end();
99
+ await disconnectFromDatabase(this.databaseType, this.pool);
96
100
  }
97
101
  }
@@ -0,0 +1,352 @@
1
+ import { Pool } from "pg";
2
+ import snowflake from "snowflake-sdk";
3
+ import { Client } from "../models/Client";
4
+ import { Pool as MysqlPool } from "mysql2";
5
+ import { BigQuery } from "@google-cloud/bigquery";
6
+ import {
7
+ PostgresConnectionConfig,
8
+ connectToPostgres,
9
+ disconnectFromPostgres,
10
+ formatPostgresConfig,
11
+ getTablesBySchemaPostgres,
12
+ getColumnsByTablePostgres,
13
+ runQueryPostgres,
14
+ getForeignKeysPostgres,
15
+ getSchemaColumnInfoPostgress,
16
+ getSchemasPostgres,
17
+ } from "./Postgres";
18
+ import {
19
+ SnowflakeConnectionConfig,
20
+ connectToSnowflake,
21
+ disconnectFromSnowflake,
22
+ formatSnowflakeConfig,
23
+ getTablesBySchemaSnowflake,
24
+ getColumnsByTableSnowflake,
25
+ runQuerySnowflake,
26
+ getForeignKeysSnowflake,
27
+ getSchemaColumnInfoSnowflake,
28
+ getSchemasSnowflake,
29
+ } from "./Snowflake";
30
+ import {
31
+ formatBigQueryConfig,
32
+ BigQueryConfig,
33
+ connectToBigQuery,
34
+ runQueryBigQuery,
35
+ getTablesBySchemaBigQuery,
36
+ getColumnsByTableBigQuery,
37
+ getForeignKeysBigQuery,
38
+ getSchemaColumnInfoBigQuery,
39
+ getSchemaBigQuery,
40
+ } from "./BigQuery";
41
+ import {
42
+ MysqlConnectionConfig,
43
+ connectToMysql,
44
+ disconnectFromMysql,
45
+ formatMysqlConfig,
46
+ runQueryMysql,
47
+ getTablesBySchemaMysql,
48
+ getColumnsByTableMysql,
49
+ getForeignKeysMysql,
50
+ getSchemaColumnInfoMysql,
51
+ getSchemasMysql,
52
+ } from "./Mysql";
53
+
54
+ export enum DatabaseType {
55
+ postgres = "postgres",
56
+ postgresql = "postgresql",
57
+ snowflake = "snowflake",
58
+ bigquery = "bigquery",
59
+ mysql = "mysql",
60
+ }
61
+
62
+ // export all database connections types
63
+ export type DatabaseConnection =
64
+ | Pool
65
+ | snowflake.Connection
66
+ | BigQuery
67
+ | MysqlPool;
68
+
69
+ export interface QuillQueryResults {
70
+ fields: { name: string; dataTypeID: number }[];
71
+ rows: { [fieldName: string]: any }[];
72
+ }
73
+
74
+ export function getDatabaseCredentials(
75
+ databaseType: DatabaseType,
76
+ connectionString: string
77
+ ):
78
+ | PostgresConnectionConfig
79
+ | SnowflakeConnectionConfig
80
+ | BigQueryConfig
81
+ | MysqlConnectionConfig
82
+ | undefined {
83
+ switch (databaseType.toLowerCase()) {
84
+ case "postgres":
85
+ return formatPostgresConfig(connectionString);
86
+ case "postgresql":
87
+ return formatPostgresConfig(connectionString);
88
+ case "snowflake":
89
+ return formatSnowflakeConfig(connectionString);
90
+ case "bigquery":
91
+ return formatBigQueryConfig(connectionString);
92
+ case "mysql":
93
+ return formatMysqlConfig(connectionString);
94
+ default:
95
+ return undefined;
96
+ }
97
+ }
98
+
99
+ export function connectToDatabase(
100
+ databaseType: DatabaseType,
101
+ config:
102
+ | PostgresConnectionConfig
103
+ | SnowflakeConnectionConfig
104
+ | BigQueryConfig
105
+ | MysqlConnectionConfig
106
+ ): DatabaseConnection {
107
+ switch (databaseType.toLowerCase()) {
108
+ case "postgres":
109
+ return connectToPostgres(config as PostgresConnectionConfig);
110
+ case "postgresql":
111
+ return connectToPostgres(config as PostgresConnectionConfig);
112
+ case "snowflake":
113
+ return connectToSnowflake(config as SnowflakeConnectionConfig);
114
+ case "bigquery":
115
+ return connectToBigQuery(config as BigQueryConfig);
116
+ case "mysql":
117
+ return connectToMysql(config as MysqlConnectionConfig);
118
+ default:
119
+ return connectToPostgres(config as PostgresConnectionConfig);
120
+ }
121
+ }
122
+
123
+ export function runQueryByDatabase(
124
+ databaseType: DatabaseType,
125
+ connection: DatabaseConnection,
126
+ sql: string
127
+ ): Promise<QuillQueryResults> | undefined {
128
+ switch (databaseType.toLowerCase()) {
129
+ case "postgres":
130
+ return runQueryPostgres(sql, connection as Pool);
131
+ case "postgresql":
132
+ return runQueryPostgres(sql, connection as Pool);
133
+ case "snowflake":
134
+ return runQuerySnowflake(sql, connection as snowflake.Connection);
135
+ case "bigquery":
136
+ return runQueryBigQuery(sql, connection as BigQuery);
137
+ case "mysql":
138
+ return runQueryMysql(sql, connection as MysqlPool);
139
+ default:
140
+ return undefined;
141
+ }
142
+ }
143
+
144
+ export function disconnectFromDatabase(
145
+ databaseType: DatabaseType,
146
+ database: DatabaseConnection
147
+ ) {
148
+ switch (databaseType.toLowerCase()) {
149
+ case "postgres":
150
+ return disconnectFromPostgres(database as Pool);
151
+ case "postgresql":
152
+ return disconnectFromPostgres(database as Pool);
153
+ case "snowflake":
154
+ return disconnectFromSnowflake(database as snowflake.Connection);
155
+ case "bigquery":
156
+ return; // BigQuery does not need to be disconnected
157
+ case "mysql":
158
+ return disconnectFromMysql(database as MysqlPool);
159
+ default:
160
+ return undefined;
161
+ }
162
+ }
163
+
164
+ export async function getSchemasByDatabase(
165
+ databaseType: DatabaseType,
166
+ connection: DatabaseConnection
167
+ ): Promise<string[] | undefined> {
168
+ switch (databaseType.toLowerCase()) {
169
+ case "postgres":
170
+ return getSchemasPostgres(connection as Pool);
171
+ case "postgresql":
172
+ return getSchemasPostgres(connection as Pool);
173
+ case "snowflake":
174
+ return getSchemasSnowflake(connection as snowflake.Connection);
175
+ case "bigquery":
176
+ return getSchemaBigQuery(connection as BigQuery);
177
+ case "mysql":
178
+ return getSchemasMysql(connection as MysqlPool);
179
+ default:
180
+ return undefined;
181
+ }
182
+ }
183
+
184
+ // INFORMATION SCHEMA SELECTS
185
+ export async function getTablesBySchemaByDatabase(
186
+ databaseType: DatabaseType,
187
+ connection: DatabaseConnection,
188
+ schemaName: string | string[]
189
+ ): Promise<string[] | { tableName: string; schemaName: string }[] | undefined> {
190
+ switch (databaseType.toLowerCase()) {
191
+ case "postgres":
192
+ return getTablesBySchemaPostgres(
193
+ connection as Pool,
194
+ schemaName as string[]
195
+ );
196
+ case "postgresql":
197
+ return getTablesBySchemaPostgres(
198
+ connection as Pool,
199
+ schemaName as string[]
200
+ );
201
+ case "snowflake":
202
+ return getTablesBySchemaSnowflake(
203
+ connection as snowflake.Connection,
204
+ schemaName as string[]
205
+ );
206
+ case "bigquery":
207
+ return getTablesBySchemaBigQuery(
208
+ connection as BigQuery,
209
+ schemaName as string[]
210
+ );
211
+ case "mysql":
212
+ return getTablesBySchemaMysql(
213
+ connection as MysqlPool,
214
+ schemaName as string[]
215
+ );
216
+ default:
217
+ return undefined;
218
+ }
219
+ }
220
+
221
+ // INFORMATION SCHEMA SELECTS
222
+ export async function getColumnsByTableByDatabase(
223
+ databaseType: DatabaseType,
224
+ connection: DatabaseConnection,
225
+ schemaName: string,
226
+ tableName: string
227
+ ): Promise<string[] | undefined> {
228
+ switch (databaseType.toLowerCase()) {
229
+ case "postgres":
230
+ return getColumnsByTablePostgres(
231
+ connection as Pool,
232
+ schemaName,
233
+ tableName
234
+ );
235
+ case "postgresql":
236
+ return getColumnsByTablePostgres(
237
+ connection as Pool,
238
+ schemaName,
239
+ tableName
240
+ );
241
+ case "snowflake":
242
+ return getColumnsByTableSnowflake(
243
+ connection as snowflake.Connection,
244
+ schemaName,
245
+ tableName
246
+ );
247
+ case "bigquery":
248
+ return getColumnsByTableBigQuery(
249
+ connection as BigQuery,
250
+ schemaName,
251
+ tableName
252
+ );
253
+ case "mysql":
254
+ return getColumnsByTableMysql(
255
+ connection as MysqlPool,
256
+ schemaName,
257
+ tableName
258
+ );
259
+ default:
260
+ return undefined;
261
+ }
262
+ }
263
+
264
+ export async function getForiegnKeysByDatabase(
265
+ databaseType: DatabaseType,
266
+ connection: DatabaseConnection,
267
+ schemaName: string,
268
+ tableName: string,
269
+ primaryKey: string
270
+ ): Promise<string[] | undefined> {
271
+ switch (databaseType.toLowerCase()) {
272
+ case "postgres":
273
+ return getForeignKeysPostgres(
274
+ connection as Pool,
275
+ schemaName,
276
+ tableName,
277
+ primaryKey
278
+ );
279
+ case "postgresql":
280
+ return getForeignKeysPostgres(
281
+ connection as Pool,
282
+ schemaName,
283
+ tableName,
284
+ primaryKey
285
+ );
286
+ case "snowflake":
287
+ return getForeignKeysSnowflake(
288
+ connection as snowflake.Connection,
289
+ schemaName,
290
+ tableName,
291
+ primaryKey
292
+ );
293
+ case "bigquery":
294
+ return getForeignKeysBigQuery(
295
+ connection as BigQuery,
296
+ schemaName,
297
+ tableName,
298
+ primaryKey
299
+ );
300
+ case "mysql":
301
+ return getForeignKeysMysql(
302
+ connection as MysqlPool,
303
+ schemaName,
304
+ tableName,
305
+ primaryKey
306
+ );
307
+ default:
308
+ return undefined;
309
+ }
310
+ }
311
+
312
+ export function getColumnInfoBySchemaByDatabase(
313
+ databaseType: DatabaseType,
314
+ connection: DatabaseConnection,
315
+ schemaName: string,
316
+ tables: string[] | { tableName: string; schemaName: string }[]
317
+ ) {
318
+ switch (databaseType.toLowerCase()) {
319
+ case "postgres":
320
+ return getSchemaColumnInfoPostgress(
321
+ connection as Pool,
322
+ schemaName,
323
+ tables as { tableName: string; schemaName: string }[]
324
+ );
325
+ case "postgresql":
326
+ return getSchemaColumnInfoPostgress(
327
+ connection as Pool,
328
+ schemaName,
329
+ tables as { tableName: string; schemaName: string }[]
330
+ );
331
+ case "snowflake":
332
+ return getSchemaColumnInfoSnowflake(
333
+ connection as snowflake.Connection,
334
+ schemaName,
335
+ tables as { tableName: string; schemaName: string }[]
336
+ );
337
+ case "bigquery":
338
+ return getSchemaColumnInfoBigQuery(
339
+ connection as BigQuery,
340
+ schemaName,
341
+ tables as { tableName: string; schemaName: string }[]
342
+ );
343
+ case "mysql":
344
+ return getSchemaColumnInfoMysql(
345
+ connection as MysqlPool,
346
+ schemaName,
347
+ tables as { tableName: string; schemaName: string }[]
348
+ );
349
+ default:
350
+ return undefined;
351
+ }
352
+ }