@malloy-publisher/server 0.0.119 → 0.0.121

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. package/dist/app/api-doc.yaml +324 -335
  2. package/dist/app/assets/{HomePage-BxFnfH3M.js → HomePage-z6NLKLPp.js} +1 -1
  3. package/dist/app/assets/{MainPage-D301Y0mT.js → MainPage-C9McOjLb.js} +2 -2
  4. package/dist/app/assets/{ModelPage-Df8ivC1J.js → ModelPage-DjlTuT2G.js} +1 -1
  5. package/dist/app/assets/{PackagePage-CE41SCV_.js → PackagePage-CDh_gnAZ.js} +1 -1
  6. package/dist/app/assets/ProjectPage-vyvZZWAB.js +1 -0
  7. package/dist/app/assets/{RouteError-l_WGtNhS.js → RouteError-FbxztVnz.js} +1 -1
  8. package/dist/app/assets/{WorkbookPage-CY-1oBvt.js → WorkbookPage-DNXFxaeZ.js} +1 -1
  9. package/dist/app/assets/{index-D5BBaLz8.js → index-BMyI9XZS.js} +1 -1
  10. package/dist/app/assets/{index-DlZbNvNc.js → index-DHFp2DLx.js} +1 -1
  11. package/dist/app/assets/{index-DjbXd602.js → index-a6hx_UrL.js} +113 -113
  12. package/dist/app/assets/{index.umd-DQiSWsWe.js → index.umd-Cv1NyZL8.js} +1 -1
  13. package/dist/app/index.html +1 -1
  14. package/dist/server.js +35395 -144722
  15. package/k6-tests/common.ts +12 -3
  16. package/package.json +1 -1
  17. package/src/controller/connection.controller.ts +82 -72
  18. package/src/controller/query.controller.ts +1 -1
  19. package/src/server.ts +6 -48
  20. package/src/service/connection.ts +384 -305
  21. package/src/service/db_utils.ts +416 -301
  22. package/src/service/package.spec.ts +8 -97
  23. package/src/service/package.ts +24 -46
  24. package/src/service/project.ts +8 -24
  25. package/src/service/project_store.ts +0 -1
  26. package/dist/app/assets/ProjectPage-DA66xbmQ.js +0 -1
  27. package/src/controller/schedule.controller.ts +0 -21
  28. package/src/service/scheduler.ts +0 -190
@@ -1,141 +1,90 @@
1
1
  import { BigQuery } from "@google-cloud/bigquery";
2
- import fs from "fs";
3
- import os from "os";
4
- import path from "path";
5
- import { Pool } from "pg";
6
- import * as snowflake from "snowflake-sdk";
7
- import { v4 as uuidv4 } from "uuid";
2
+ import { Connection, TableSourceDef } from "@malloydata/malloy";
8
3
  import { components } from "../api";
4
+ import { ConnectionError } from "../errors";
9
5
  import { logger } from "../logger";
10
- import {
11
- ApiConnection,
12
- // @ts-expect-error TODO: Fix missing MysqlConnection type in API
13
- MysqlConnection,
14
- PostgresConnection,
15
- SnowflakeConnection,
16
- TrinoConnection,
17
- } from "./model";
6
+ import { ApiConnection } from "./model";
18
7
 
19
- import { BasicAuth, Trino } from "trino-client";
8
+ type ApiSchema = components["schemas"]["Schema"];
9
+ type ApiTable = components["schemas"]["Table"];
10
+ type ApiTableSource = components["schemas"]["TableSource"];
20
11
 
21
- type ApiSchemaName = components["schemas"]["SchemaName"];
22
-
23
- async function getPostgresConnection(
24
- apiPostgresConnection: PostgresConnection,
25
- ): Promise<Pool> {
26
- return new Pool({
27
- user: apiPostgresConnection.userName,
28
- host: apiPostgresConnection.host,
29
- database: apiPostgresConnection.databaseName,
30
- password: apiPostgresConnection.password,
31
- port: apiPostgresConnection.port,
32
- connectionString: apiPostgresConnection.connectionString,
33
- max: 10,
34
- idleTimeoutMillis: 30000,
35
- });
36
- }
12
+ function createBigQueryClient(connection: ApiConnection): BigQuery {
13
+ if (!connection.bigqueryConnection) {
14
+ throw new Error("BigQuery connection is required");
15
+ }
37
16
 
38
- async function getMysqlConnection(apiMysqlConnection: MysqlConnection) {
39
- // Dynamically import mysql2/promise to avoid import issues if not needed
40
- const mysql = await import("mysql2/promise");
41
- return mysql.createPool({
42
- host: apiMysqlConnection.host,
43
- port: apiMysqlConnection.port,
44
- user: apiMysqlConnection.user,
45
- password: apiMysqlConnection.password,
46
- database: apiMysqlConnection.database,
47
- waitForConnections: true,
48
- connectionLimit: 10,
49
- queueLimit: 0,
50
- });
51
- }
17
+ const config: {
18
+ projectId: string;
19
+ credentials?: object;
20
+ keyFilename?: string;
21
+ } = {
22
+ projectId: connection.bigqueryConnection.defaultProjectId || "",
23
+ };
52
24
 
53
- function getBigqueryConnection(apiConnection: ApiConnection): BigQuery {
54
- if (!apiConnection.bigqueryConnection?.serviceAccountKeyJson) {
55
- // Use default credentials
56
- return new BigQuery();
57
- } else {
58
- const tmpKeyPath = getTempServiceKeyPath(apiConnection);
59
- if (!tmpKeyPath) {
60
- throw new Error(
61
- `Failed to create temporary service key file for connection: ${apiConnection.name}`,
25
+ // Add service account key if provided
26
+ if (connection.bigqueryConnection.serviceAccountKeyJson) {
27
+ try {
28
+ const credentials = JSON.parse(
29
+ connection.bigqueryConnection.serviceAccountKeyJson,
62
30
  );
63
- }
64
- return new BigQuery({ keyFilename: tmpKeyPath });
65
- }
66
- }
31
+ config.credentials = credentials;
67
32
 
68
- async function getSnowflakeConnection(
69
- apiSnowflakeConnection: SnowflakeConnection,
70
- ): Promise<snowflake.Connection> {
71
- if (!apiSnowflakeConnection.account) {
72
- throw new Error("Snowflake account is required");
73
- }
74
- return new Promise((resolve, reject) => {
75
- const connection = snowflake.createConnection({
76
- account: apiSnowflakeConnection.account as string,
77
- username: apiSnowflakeConnection.username,
78
- password: apiSnowflakeConnection.password,
79
- database: apiSnowflakeConnection.database,
80
- warehouse: apiSnowflakeConnection.warehouse || undefined,
81
- role: apiSnowflakeConnection.role || undefined,
82
- });
83
- connection.connect((err, conn) => {
84
- if (err) {
85
- reject(err);
86
- } else {
87
- resolve(conn);
33
+ // Use project_id from credentials if defaultProjectId is not set
34
+ if (!config.projectId && credentials.project_id) {
35
+ config.projectId = credentials.project_id;
88
36
  }
89
- });
90
- });
91
- }
92
37
 
93
- function getTrinoClient(trinoConn: TrinoConnection) {
94
- let auth: BasicAuth;
95
- if (trinoConn.server?.startsWith("https://")) {
96
- // HTTPS allows password authentication
97
- auth = new BasicAuth(trinoConn?.user || "", trinoConn?.password || "");
38
+ if (!config.projectId) {
39
+ throw new Error(
40
+ "BigQuery project ID is required. Either set the defaultProjectId in the connection configuration or the project_id in the service account key JSON.",
41
+ );
42
+ }
43
+ } catch (error) {
44
+ logger.warn(
45
+ "Failed to parse service account key JSON, using default credentials",
46
+ { error },
47
+ );
48
+ }
49
+ } else if (
50
+ Object.keys(connection.bigqueryConnection).length === 0 &&
51
+ process.env.GOOGLE_APPLICATION_CREDENTIALS
52
+ ) {
53
+ // Note: The BigQuery client will infer the project ID from the ADC file.
54
+ config.keyFilename = process.env.GOOGLE_APPLICATION_CREDENTIALS || "";
98
55
  } else {
99
- // HTTP only allows username, no password for security
100
- auth = new BasicAuth(trinoConn?.user || "");
56
+ throw new Error(
57
+ "BigQuery connection is required, either set the bigqueryConnection in the connection configuration or set the GOOGLE_APPLICATION_CREDENTIALS environment variable.",
58
+ );
101
59
  }
102
60
 
103
- return Trino.create({
104
- server: trinoConn.server,
105
- catalog: trinoConn.catalog,
106
- schema: trinoConn.schema,
107
- auth,
108
- });
61
+ return new BigQuery(config);
62
+ }
63
+
64
+ function standardizeRunSQLResult(result: unknown): unknown[] {
65
+ // Handle different result formats from malloyConnection.runSQL
66
+ return Array.isArray(result)
67
+ ? result
68
+ : (result as { rows?: unknown[] }).rows || [];
109
69
  }
110
70
 
111
71
  export async function getSchemasForConnection(
112
72
  connection: ApiConnection,
113
- ): Promise<ApiSchemaName[]> {
73
+ malloyConnection: Connection,
74
+ ): Promise<ApiSchema[]> {
114
75
  if (connection.type === "bigquery") {
115
76
  if (!connection.bigqueryConnection) {
116
77
  throw new Error("BigQuery connection is required");
117
78
  }
118
79
  try {
119
- const bigquery = getBigqueryConnection(connection);
120
- const projectId = connection.bigqueryConnection.defaultProjectId;
121
- const options = projectId ? { projectId } : {};
122
- const [datasets] = await bigquery.getDatasets(options);
123
- const schemas = await Promise.all(
124
- datasets
125
- .filter((dataset) => dataset.id)
126
- .map(async (dataset) => {
127
- const [metadata] = await dataset.getMetadata();
128
- return {
129
- name: dataset.id,
130
- isHidden: false,
131
- isDefault: false,
132
- // Include description from dataset metadata if available
133
- description: (metadata as { description?: string })
134
- ?.description,
135
- };
136
- }),
137
- );
138
- return schemas;
80
+ const bigquery = createBigQueryClient(connection);
81
+ const [datasets] = await bigquery.getDatasets();
82
+
83
+ return datasets.map((dataset) => ({
84
+ name: dataset.id,
85
+ isHidden: false,
86
+ isDefault: false,
87
+ }));
139
88
  } catch (error) {
140
89
  console.error(
141
90
  `Error getting schemas for BigQuery connection ${connection.name}:`,
@@ -149,67 +98,161 @@ export async function getSchemasForConnection(
149
98
  if (!connection.postgresConnection) {
150
99
  throw new Error("Postgres connection is required");
151
100
  }
152
- const pool = await getPostgresConnection(connection.postgresConnection);
153
- const res = await pool.query(
154
- "SELECT schema_name FROM information_schema.schemata",
155
- );
156
- return res.rows.map((row) => {
157
- return {
158
- name: row.schema_name,
159
- isHidden: ["information_schema", "pg_catalog"].includes(
160
- row.schema_name,
161
- ),
162
- isDefault: row.schema_name === "public",
163
- };
164
- });
101
+ try {
102
+ // Use the connection's runSQL method to query schemas
103
+ const result = await malloyConnection.runSQL(
104
+ "SELECT schema_name as row FROM information_schema.schemata ORDER BY schema_name",
105
+ );
106
+
107
+ const rows = standardizeRunSQLResult(result);
108
+ return rows.map((row: unknown) => {
109
+ const schemaName = row as string;
110
+ return {
111
+ name: schemaName,
112
+ isHidden: [
113
+ "information_schema",
114
+ "pg_catalog",
115
+ "pg_toast",
116
+ ].includes(schemaName),
117
+ isDefault: schemaName === "public",
118
+ };
119
+ });
120
+ } catch (error) {
121
+ console.error(
122
+ `Error getting schemas for Postgres connection ${connection.name}:`,
123
+ error,
124
+ );
125
+ throw new Error(
126
+ `Failed to get schemas for Postgres connection ${connection.name}: ${(error as Error).message}`,
127
+ );
128
+ }
165
129
  } else if (connection.type === "mysql") {
166
130
  if (!connection.mysqlConnection) {
167
131
  throw new Error("Mysql connection is required");
168
132
  }
169
- return [
170
- {
171
- name: connection.mysqlConnection.database || "mysql",
172
- isHidden: false,
173
- isDefault: true,
174
- },
175
- ];
133
+ try {
134
+ // For MySQL, return the database name as the schema
135
+ return [
136
+ {
137
+ name: connection.mysqlConnection.database || "mysql",
138
+ isHidden: false,
139
+ isDefault: true,
140
+ },
141
+ ];
142
+ } catch (error) {
143
+ console.error(
144
+ `Error getting schemas for MySQL connection ${connection.name}:`,
145
+ error,
146
+ );
147
+ throw new Error(
148
+ `Failed to get schemas for MySQL connection ${connection.name}: ${(error as Error).message}`,
149
+ );
150
+ }
176
151
  } else if (connection.type === "snowflake") {
177
152
  if (!connection.snowflakeConnection) {
178
153
  throw new Error("Snowflake connection is required");
179
154
  }
180
- const snowflakeConn = await getSnowflakeConnection(
181
- connection.snowflakeConnection,
182
- );
183
155
  try {
184
- return await getSnowflakeSchemas(snowflakeConn);
185
- } finally {
186
- snowflakeConn.destroy((error) => {
187
- if (error) {
188
- logger.error(`Error closing SnowflakeConnection: ${error}`);
189
- }
156
+ // Use the connection's runSQL method to query schemas
157
+ const result = await malloyConnection.runSQL("SHOW SCHEMAS");
158
+
159
+ const rows = standardizeRunSQLResult(result);
160
+ return rows.map((row: unknown) => {
161
+ const typedRow = row as Record<string, unknown>;
162
+ return {
163
+ name: typedRow.name as string,
164
+ isHidden: ["SNOWFLAKE", ""].includes(typedRow.owner as string),
165
+ isDefault: typedRow.isDefault === "Y",
166
+ };
190
167
  });
168
+ } catch (error) {
169
+ console.error(
170
+ `Error getting schemas for Snowflake connection ${connection.name}:`,
171
+ error,
172
+ );
173
+ throw new Error(
174
+ `Failed to get schemas for Snowflake connection ${connection.name}: ${(error as Error).message}`,
175
+ );
191
176
  }
192
177
  } else if (connection.type === "trino") {
193
178
  if (!connection.trinoConnection) {
194
179
  throw new Error("Trino connection is required");
195
180
  }
196
- const client = getTrinoClient(connection.trinoConnection);
197
- const result = await client.query(
198
- `SHOW SCHEMAS FROM ${connection.trinoConnection.catalog}`,
199
- );
200
- const rows: string[] = [];
201
- let next = await result.next();
202
- while (!next.done) {
203
- if (next.value.data) {
204
- rows.push(...next.value.data.map((r: string[]) => r[0]));
205
- }
206
- next = await result.next();
181
+ try {
182
+ // Use the connection's runSQL method to query schemas
183
+ const result = await malloyConnection.runSQL(
184
+ `SHOW SCHEMAS FROM ${connection.trinoConnection.catalog}`,
185
+ );
186
+
187
+ const rows = standardizeRunSQLResult(result);
188
+ return rows.map((row: unknown) => {
189
+ const typedRow = row as Record<string, unknown>;
190
+ return {
191
+ name: typedRow.Schema as string,
192
+ isHidden: ["information_schema", "performance_schema"].includes(
193
+ typedRow.Schema as string,
194
+ ),
195
+ isDefault:
196
+ typedRow.Schema === connection.trinoConnection?.schema,
197
+ };
198
+ });
199
+ } catch (error) {
200
+ console.error(
201
+ `Error getting schemas for Trino connection ${connection.name}:`,
202
+ error,
203
+ );
204
+ throw new Error(
205
+ `Failed to get schemas for Trino connection ${connection.name}: ${(error as Error).message}`,
206
+ );
207
+ }
208
+ } else if (connection.type === "duckdb") {
209
+ if (!connection.duckdbConnection) {
210
+ throw new Error("DuckDB connection is required");
211
+ }
212
+ try {
213
+ // Use DuckDB's INFORMATION_SCHEMA.SCHEMATA to list schemas
214
+ // Use DISTINCT to avoid duplicates from attached databases
215
+ const result = await malloyConnection.runSQL(
216
+ "SELECT DISTINCT schema_name FROM information_schema.schemata ORDER BY schema_name",
217
+ );
218
+
219
+ const rows = standardizeRunSQLResult(result);
220
+
221
+ // Check if this DuckDB connection has attached databases
222
+ const hasAttachedDatabases =
223
+ connection.duckdbConnection?.attachedDatabases &&
224
+ Array.isArray(connection.duckdbConnection.attachedDatabases) &&
225
+ connection.duckdbConnection.attachedDatabases.length > 0;
226
+
227
+ return rows.map((row: unknown) => {
228
+ const typedRow = row as Record<string, unknown>;
229
+ let schemaName = typedRow.schema_name as string;
230
+
231
+ // If we have attached databases and this is not the main schema, prepend the attached database name
232
+ if (hasAttachedDatabases && schemaName !== "main") {
233
+ const attachedDbName = (
234
+ connection.duckdbConnection!.attachedDatabases as Array<{
235
+ name: string;
236
+ }>
237
+ )[0].name;
238
+ schemaName = `${attachedDbName}.${schemaName}`;
239
+ }
240
+
241
+ return {
242
+ name: schemaName,
243
+ isHidden: false,
244
+ isDefault: typedRow.schema_name === "main",
245
+ };
246
+ });
247
+ } catch (error) {
248
+ console.error(
249
+ `Error getting schemas for DuckDB connection ${connection.name}:`,
250
+ error,
251
+ );
252
+ throw new Error(
253
+ `Failed to get schemas for DuckDB connection ${connection.name}: ${(error as Error).message}`,
254
+ );
207
255
  }
208
- return rows.map((r) => ({
209
- name: r,
210
- isHidden: false,
211
- isDefault: r === connection.trinoConnection?.schema,
212
- }));
213
256
  } else {
214
257
  throw new Error(`Unsupported connection type: ${connection.type}`);
215
258
  }
@@ -218,25 +261,113 @@ export async function getSchemasForConnection(
218
261
  export async function getTablesForSchema(
219
262
  connection: ApiConnection,
220
263
  schemaName: string,
221
- ): Promise<string[]> {
222
- if (connection.type === "bigquery") {
264
+ malloyConnection: Connection,
265
+ ): Promise<ApiTable[]> {
266
+ // First get the list of table names
267
+ const tableNames = await listTablesForSchema(
268
+ connection,
269
+ schemaName,
270
+ malloyConnection,
271
+ );
272
+
273
+ // Fetch all table sources in parallel
274
+ const tableSourcePromises = tableNames.map(async (tableName) => {
223
275
  try {
224
- const bigquery = getBigqueryConnection(connection);
225
- const options = connection.bigqueryConnection?.defaultProjectId
226
- ? {
227
- projectId: connection.bigqueryConnection?.defaultProjectId,
228
- }
229
- : {};
230
- const dataset = bigquery.dataset(schemaName, options);
231
- const [exists] = await dataset.exists();
232
- if (!exists) {
233
- throw new Error(
234
- `Dataset ${schemaName} does not exist in connection ${connection.name}`,
235
- );
276
+ const tablePath = `${schemaName}.${tableName}`;
277
+
278
+ logger.info(
279
+ `Processing table: ${tableName} in schema: ${schemaName}`,
280
+ { tablePath, connectionType: connection.type },
281
+ );
282
+ const tableSource = await getConnectionTableSource(
283
+ malloyConnection,
284
+ tableName,
285
+ tablePath,
286
+ );
287
+
288
+ return {
289
+ resource: tablePath,
290
+ columns: tableSource.columns,
291
+ };
292
+ } catch (error) {
293
+ logger.warn(`Failed to get schema for table ${tableName}`, {
294
+ error,
295
+ schemaName,
296
+ tableName,
297
+ });
298
+ // Return table without columns if schema fetch fails
299
+ return {
300
+ resource: `${schemaName}.${tableName}`,
301
+ columns: [],
302
+ };
303
+ }
304
+ });
305
+
306
+ // Wait for all table sources to be fetched
307
+ const tableResults = await Promise.all(tableSourcePromises);
308
+
309
+ return tableResults;
310
+ }
311
+
312
+ export async function getConnectionTableSource(
313
+ malloyConnection: Connection,
314
+ tableKey: string,
315
+ tablePath: string,
316
+ ): Promise<ApiTableSource> {
317
+ try {
318
+ logger.info(`Attempting to fetch table schema for: ${tablePath}`, {
319
+ tableKey,
320
+ tablePath,
321
+ });
322
+ const source = await (
323
+ malloyConnection as Connection & {
324
+ fetchTableSchema: (
325
+ tableKey: string,
326
+ tablePath: string,
327
+ ) => Promise<TableSourceDef | undefined>;
236
328
  }
329
+ ).fetchTableSchema(tableKey, tablePath);
330
+ if (source === undefined) {
331
+ throw new ConnectionError(`Table ${tablePath} not found`);
332
+ }
333
+ const malloyFields = (source as TableSourceDef).fields;
334
+ const fields = malloyFields.map((field) => {
335
+ return {
336
+ name: field.name,
337
+ type: field.type,
338
+ };
339
+ });
340
+ logger.info(`Successfully fetched schema for ${tablePath}`, {
341
+ fieldCount: fields.length,
342
+ });
343
+ return {
344
+ source: JSON.stringify(source),
345
+ resource: tablePath,
346
+ columns: fields,
347
+ };
348
+ } catch (error) {
349
+ logger.error("fetchTableSchema error", { error, tableKey, tablePath });
350
+ throw new ConnectionError((error as Error).message);
351
+ }
352
+ }
237
353
 
354
+ export async function listTablesForSchema(
355
+ connection: ApiConnection,
356
+ schemaName: string,
357
+ malloyConnection: Connection,
358
+ ): Promise<string[]> {
359
+ if (connection.type === "bigquery") {
360
+ try {
361
+ // Use BigQuery client directly for efficient table listing
362
+ // This is much faster than querying all regions
363
+ const bigquery = createBigQueryClient(connection);
364
+ const dataset = bigquery.dataset(schemaName);
238
365
  const [tables] = await dataset.getTables();
239
- return tables.map((table) => table.id).filter((id) => id) as string[];
366
+
367
+ // Return table names, filtering out any undefined values
368
+ return tables
369
+ .map((table) => table.id)
370
+ .filter((id): id is string => id !== undefined);
240
371
  } catch (error) {
241
372
  logger.error(
242
373
  `Error getting tables for BigQuery schema ${schemaName} in connection ${connection.name}`,
@@ -250,153 +381,137 @@ export async function getTablesForSchema(
250
381
  if (!connection.mysqlConnection) {
251
382
  throw new Error("Mysql connection is required");
252
383
  }
253
- const pool = await getMysqlConnection(connection.mysqlConnection);
254
- const [rows] = await pool.query(
255
- "SELECT TABLE_NAME FROM information_schema.tables WHERE table_schema = ? AND table_type = 'BASE TABLE'",
256
- [schemaName],
257
- );
258
- return (rows as { TABLE_NAME: string }[]).map((row) => row.TABLE_NAME);
384
+ try {
385
+ const result = await malloyConnection.runSQL(
386
+ `SELECT TABLE_NAME FROM information_schema.tables WHERE table_schema = '${schemaName}' AND table_type = 'BASE TABLE'`,
387
+ );
388
+ const rows = standardizeRunSQLResult(result);
389
+ return rows.map((row: unknown) => {
390
+ const typedRow = row as Record<string, unknown>;
391
+ return typedRow.TABLE_NAME as string;
392
+ });
393
+ } catch (error) {
394
+ logger.error(
395
+ `Error getting tables for MySQL schema ${schemaName} in connection ${connection.name}`,
396
+ { error },
397
+ );
398
+ throw new Error(
399
+ `Failed to get tables for MySQL schema ${schemaName} in connection ${connection.name}: ${(error as Error).message}`,
400
+ );
401
+ }
259
402
  } else if (connection.type === "postgres") {
260
403
  if (!connection.postgresConnection) {
261
404
  throw new Error("Postgres connection is required");
262
405
  }
263
- const pool = await getPostgresConnection(connection.postgresConnection);
264
- const res = await pool.query(
265
- "SELECT table_name FROM information_schema.tables WHERE table_schema = $1",
266
- [schemaName],
267
- );
268
- return res.rows.map((row) => row.table_name);
406
+ try {
407
+ const result = await malloyConnection.runSQL(
408
+ `SELECT table_name as row FROM information_schema.tables WHERE table_schema = '${schemaName}' ORDER BY table_name`,
409
+ );
410
+ const rows = standardizeRunSQLResult(result);
411
+ return rows as string[];
412
+ } catch (error) {
413
+ logger.error(
414
+ `Error getting tables for Postgres schema ${schemaName} in connection ${connection.name}`,
415
+ { error },
416
+ );
417
+ throw new Error(
418
+ `Failed to get tables for Postgres schema ${schemaName} in connection ${connection.name}: ${(error as Error).message}`,
419
+ );
420
+ }
269
421
  } else if (connection.type === "snowflake") {
270
422
  if (!connection.snowflakeConnection) {
271
423
  throw new Error("Snowflake connection is required");
272
424
  }
273
- const snowflakeConn = await getSnowflakeConnection(
274
- connection.snowflakeConnection,
275
- );
276
425
  try {
277
- return await getSnowflakeTables(
278
- snowflakeConn,
279
- connection.snowflakeConnection,
280
- schemaName,
426
+ const result = await malloyConnection.runSQL(
427
+ `SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = '${schemaName}' AND TABLE_TYPE = 'BASE TABLE'`,
281
428
  );
282
- } finally {
283
- snowflakeConn.destroy((error) => {
284
- if (error) {
285
- logger.error(`Error closing SnowflakeConnection`, { error });
286
- }
429
+ const rows = standardizeRunSQLResult(result);
430
+ return rows.map((row: unknown) => {
431
+ const typedRow = row as Record<string, unknown>;
432
+ return typedRow.TABLE_NAME as string;
287
433
  });
434
+ } catch (error) {
435
+ logger.error(
436
+ `Error getting tables for Snowflake schema ${schemaName} in connection ${connection.name}`,
437
+ { error },
438
+ );
439
+ throw new Error(
440
+ `Failed to get tables for Snowflake schema ${schemaName} in connection ${connection.name}: ${(error as Error).message}`,
441
+ );
288
442
  }
289
443
  } else if (connection.type === "trino") {
290
444
  if (!connection.trinoConnection) {
291
445
  throw new Error("Trino connection is required");
292
446
  }
293
- const client = getTrinoClient(connection.trinoConnection);
294
- const result = await client.query(
295
- `SHOW TABLES FROM ${connection.trinoConnection.catalog}.${schemaName}`,
296
- );
447
+ try {
448
+ const result = await malloyConnection.runSQL(
449
+ `SHOW TABLES FROM ${connection.trinoConnection.catalog}.${schemaName}`,
450
+ );
451
+ const rows = standardizeRunSQLResult(result);
452
+ return rows.map((row: unknown) => {
453
+ const typedRow = row as Record<string, unknown>;
454
+ return typedRow.Table as string;
455
+ });
456
+ } catch (error) {
457
+ logger.error(
458
+ `Error getting tables for Trino schema ${schemaName} in connection ${connection.name}`,
459
+ { error },
460
+ );
461
+ throw new Error(
462
+ `Failed to get tables for Trino schema ${schemaName} in connection ${connection.name}: ${(error as Error).message}`,
463
+ );
464
+ }
465
+ } else if (connection.type === "duckdb") {
466
+ if (!connection.duckdbConnection) {
467
+ throw new Error("DuckDB connection is required");
468
+ }
469
+ try {
470
+ // Check if this DuckDB connection has attached databases and if the schema name is prepended
471
+ const hasAttachedDatabases =
472
+ connection.duckdbConnection?.attachedDatabases &&
473
+ Array.isArray(connection.duckdbConnection.attachedDatabases) &&
474
+ connection.duckdbConnection.attachedDatabases.length > 0;
475
+
476
+ let actualSchemaName = schemaName;
297
477
 
298
- const rows: string[] = [];
299
- let next = await result.next();
300
- while (!next.done) {
301
- if (next.value.data) {
302
- rows.push(...next.value.data.map((r: string[]) => r[0]));
478
+ // If we have attached databases and the schema name is prepended, extract the actual schema name
479
+ if (hasAttachedDatabases && schemaName.includes(".")) {
480
+ const attachedDbName = (
481
+ connection.duckdbConnection!.attachedDatabases as Array<{
482
+ name: string;
483
+ }>
484
+ )[0].name;
485
+ if (schemaName.startsWith(`${attachedDbName}.`)) {
486
+ actualSchemaName = schemaName.substring(
487
+ attachedDbName.length + 1,
488
+ );
489
+ }
303
490
  }
304
- next = await result.next();
491
+
492
+ // Use DuckDB's INFORMATION_SCHEMA.TABLES to list tables in the specified schema
493
+ // This follows the DuckDB documentation for listing tables
494
+ // For DuckDB, we'll use string interpolation to avoid parameter binding issues
495
+ const result = await malloyConnection.runSQL(
496
+ `SELECT table_name FROM information_schema.tables WHERE table_schema = '${actualSchemaName}' ORDER BY table_name`,
497
+ );
498
+
499
+ const rows = standardizeRunSQLResult(result);
500
+ return rows.map((row: unknown) => {
501
+ const typedRow = row as Record<string, unknown>;
502
+ return typedRow.table_name as string;
503
+ });
504
+ } catch (error) {
505
+ logger.error(
506
+ `Error getting tables for DuckDB schema ${schemaName} in connection ${connection.name}`,
507
+ { error },
508
+ );
509
+ throw new Error(
510
+ `Failed to get tables for DuckDB schema ${schemaName} in connection ${connection.name}: ${(error as Error).message}`,
511
+ );
305
512
  }
306
- return rows;
307
513
  } else {
308
514
  // TODO(jjs) - implement
309
515
  return [];
310
516
  }
311
517
  }
312
-
313
- function getTempServiceKeyPath(connection: ApiConnection): string {
314
- // If the connection is bigquery and the service account key is provided as
315
- // JSON, we need to write it to a temporary file for the Malloy compiling to work
316
- if (!connection.bigqueryConnection) {
317
- throw new Error(
318
- `BigQuery connection ${connection.name} is missing bigqueryConnection ${JSON.stringify(connection)}`,
319
- );
320
- }
321
- const keyJson = connection.bigqueryConnection.serviceAccountKeyJson;
322
- if (!keyJson) {
323
- throw new Error(
324
- `BigQuery connection ${connection.name} is missing service account key JSON`,
325
- );
326
- }
327
- const tmpFilepath = path.join(
328
- os.tmpdir(),
329
- `ms2_connection_${connection.name}_${uuidv4()}_key.json`,
330
- );
331
- fs.writeFileSync(tmpFilepath, keyJson, "utf8");
332
- return tmpFilepath;
333
- }
334
-
335
- async function getSnowflakeTables(
336
- connection: snowflake.Connection,
337
- connInfo: SnowflakeConnection,
338
- schemaName: string,
339
- ): Promise<string[]> {
340
- return new Promise((resolve, reject) => {
341
- connection.execute({
342
- sqlText: `USE DATABASE ${connInfo?.database} `,
343
- complete: (err) => {
344
- if (err) {
345
- logger.error(`Error setting database ${connInfo.database}:`, {
346
- error: err,
347
- });
348
- reject([]);
349
- return;
350
- }
351
-
352
- const query = `
353
- SELECT TABLE_NAME, TABLE_SCHEMA
354
- FROM INFORMATION_SCHEMA.TABLES
355
- WHERE TABLE_SCHEMA=? AND TABLE_TYPE = 'BASE TABLE';
356
- `;
357
-
358
- connection.execute({
359
- sqlText: query,
360
- binds: [schemaName],
361
- complete: (err, _, rows) => {
362
- if (err) {
363
- logger.error(
364
- `Error fetching tables from ${connInfo.database}:`,
365
- { error: err },
366
- );
367
- reject([]);
368
- } else {
369
- resolve(rows?.map((row) => `${row.TABLE_NAME}`) || []);
370
- }
371
- },
372
- });
373
- },
374
- });
375
- });
376
- }
377
-
378
- async function getSnowflakeSchemas(
379
- connection: snowflake.Connection,
380
- ): Promise<ApiSchemaName[]> {
381
- return new Promise((resolve, reject) => {
382
- connection.execute({
383
- sqlText: "SHOW SCHEMAS",
384
- complete: (err, _stmt, rows) => {
385
- if (err) {
386
- reject(err);
387
- } else {
388
- resolve(
389
- rows?.map((row) => {
390
- logger.info("row", { row });
391
- return {
392
- name: row.name,
393
- isDefault: row.isDefault === "Y",
394
- isHidden: ["SNOWFLAKE", ""].includes(row.owner),
395
- };
396
- }) || [],
397
- );
398
- }
399
- },
400
- });
401
- });
402
- }