@malloy-publisher/server 0.0.119 → 0.0.120

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. package/dist/app/api-doc.yaml +324 -335
  2. package/dist/app/assets/{HomePage-BxFnfH3M.js → HomePage-xhvGPTSO.js} +1 -1
  3. package/dist/app/assets/{MainPage-D301Y0mT.js → MainPage-Bq95p8Cl.js} +1 -1
  4. package/dist/app/assets/{ModelPage-Df8ivC1J.js → ModelPage-CbfBNWIi.js} +1 -1
  5. package/dist/app/assets/{PackagePage-CE41SCV_.js → PackagePage-CGS612C4.js} +1 -1
  6. package/dist/app/assets/ProjectPage-Dpn9pqSB.js +1 -0
  7. package/dist/app/assets/{RouteError-l_WGtNhS.js → RouteError-BLPhl1wC.js} +1 -1
  8. package/dist/app/assets/{WorkbookPage-CY-1oBvt.js → WorkbookPage-Dt93gSZ3.js} +1 -1
  9. package/dist/app/assets/{index-D5BBaLz8.js → index-B8wuAjgG.js} +1 -1
  10. package/dist/app/assets/{index-DjbXd602.js → index-CVbROKL7.js} +113 -113
  11. package/dist/app/assets/{index-DlZbNvNc.js → index-DxKW6bXB.js} +1 -1
  12. package/dist/app/assets/{index.umd-DQiSWsWe.js → index.umd-CVy5LWk2.js} +1 -1
  13. package/dist/app/index.html +1 -1
  14. package/dist/server.js +35396 -144724
  15. package/k6-tests/common.ts +12 -3
  16. package/package.json +1 -1
  17. package/src/controller/connection.controller.ts +82 -72
  18. package/src/controller/query.controller.ts +1 -1
  19. package/src/server.ts +6 -48
  20. package/src/service/connection.ts +384 -305
  21. package/src/service/db_utils.ts +407 -303
  22. package/src/service/package.spec.ts +8 -97
  23. package/src/service/package.ts +24 -46
  24. package/src/service/project.ts +8 -24
  25. package/src/service/project_store.ts +0 -1
  26. package/dist/app/assets/ProjectPage-DA66xbmQ.js +0 -1
  27. package/src/controller/schedule.controller.ts +0 -21
  28. package/src/service/scheduler.ts +0 -190
@@ -1,141 +1,76 @@
1
1
  import { BigQuery } from "@google-cloud/bigquery";
2
- import fs from "fs";
3
- import os from "os";
4
- import path from "path";
5
- import { Pool } from "pg";
6
- import * as snowflake from "snowflake-sdk";
7
- import { v4 as uuidv4 } from "uuid";
2
+ import { Connection, TableSourceDef } from "@malloydata/malloy";
8
3
  import { components } from "../api";
4
+ import { ConnectionError } from "../errors";
9
5
  import { logger } from "../logger";
10
- import {
11
- ApiConnection,
12
- // @ts-expect-error TODO: Fix missing MysqlConnection type in API
13
- MysqlConnection,
14
- PostgresConnection,
15
- SnowflakeConnection,
16
- TrinoConnection,
17
- } from "./model";
18
-
19
- import { BasicAuth, Trino } from "trino-client";
20
-
21
- type ApiSchemaName = components["schemas"]["SchemaName"];
22
-
23
- async function getPostgresConnection(
24
- apiPostgresConnection: PostgresConnection,
25
- ): Promise<Pool> {
26
- return new Pool({
27
- user: apiPostgresConnection.userName,
28
- host: apiPostgresConnection.host,
29
- database: apiPostgresConnection.databaseName,
30
- password: apiPostgresConnection.password,
31
- port: apiPostgresConnection.port,
32
- connectionString: apiPostgresConnection.connectionString,
33
- max: 10,
34
- idleTimeoutMillis: 30000,
35
- });
36
- }
6
+ import { ApiConnection } from "./model";
37
7
 
38
- async function getMysqlConnection(apiMysqlConnection: MysqlConnection) {
39
- // Dynamically import mysql2/promise to avoid import issues if not needed
40
- const mysql = await import("mysql2/promise");
41
- return mysql.createPool({
42
- host: apiMysqlConnection.host,
43
- port: apiMysqlConnection.port,
44
- user: apiMysqlConnection.user,
45
- password: apiMysqlConnection.password,
46
- database: apiMysqlConnection.database,
47
- waitForConnections: true,
48
- connectionLimit: 10,
49
- queueLimit: 0,
50
- });
51
- }
8
+ type ApiSchema = components["schemas"]["Schema"];
9
+ type ApiTable = components["schemas"]["Table"];
10
+ type ApiTableSource = components["schemas"]["TableSource"];
52
11
 
53
- function getBigqueryConnection(apiConnection: ApiConnection): BigQuery {
54
- if (!apiConnection.bigqueryConnection?.serviceAccountKeyJson) {
55
- // Use default credentials
56
- return new BigQuery();
57
- } else {
58
- const tmpKeyPath = getTempServiceKeyPath(apiConnection);
59
- if (!tmpKeyPath) {
60
- throw new Error(
61
- `Failed to create temporary service key file for connection: ${apiConnection.name}`,
62
- );
63
- }
64
- return new BigQuery({ keyFilename: tmpKeyPath });
12
+ function createBigQueryClient(connection: ApiConnection): BigQuery {
13
+ if (!connection.bigqueryConnection) {
14
+ throw new Error("BigQuery connection is required");
65
15
  }
66
- }
67
16
 
68
- async function getSnowflakeConnection(
69
- apiSnowflakeConnection: SnowflakeConnection,
70
- ): Promise<snowflake.Connection> {
71
- if (!apiSnowflakeConnection.account) {
72
- throw new Error("Snowflake account is required");
17
+ if (!connection.bigqueryConnection.defaultProjectId) {
18
+ throw new Error("BigQuery defaultProjectId is required");
73
19
  }
74
- return new Promise((resolve, reject) => {
75
- const connection = snowflake.createConnection({
76
- account: apiSnowflakeConnection.account as string,
77
- username: apiSnowflakeConnection.username,
78
- password: apiSnowflakeConnection.password,
79
- database: apiSnowflakeConnection.database,
80
- warehouse: apiSnowflakeConnection.warehouse || undefined,
81
- role: apiSnowflakeConnection.role || undefined,
82
- });
83
- connection.connect((err, conn) => {
84
- if (err) {
85
- reject(err);
86
- } else {
87
- resolve(conn);
88
- }
89
- });
90
- });
91
- }
92
20
 
93
- function getTrinoClient(trinoConn: TrinoConnection) {
94
- let auth: BasicAuth;
95
- if (trinoConn.server?.startsWith("https://")) {
96
- // HTTPS allows password authentication
97
- auth = new BasicAuth(trinoConn?.user || "", trinoConn?.password || "");
98
- } else {
99
- // HTTP only allows username, no password for security
100
- auth = new BasicAuth(trinoConn?.user || "");
21
+ const config: {
22
+ projectId: string;
23
+ keyFilename?: string;
24
+ } = {
25
+ projectId: connection.bigqueryConnection.defaultProjectId,
26
+ };
27
+
28
+ // Add service account key if provided
29
+ if (connection.bigqueryConnection.serviceAccountKeyJson) {
30
+ try {
31
+ config.keyFilename = JSON.parse(
32
+ connection.bigqueryConnection.serviceAccountKeyJson,
33
+ );
34
+ } catch (error) {
35
+ logger.warn(
36
+ "Failed to parse service account key JSON, using default credentials",
37
+ { error },
38
+ );
39
+ }
101
40
  }
102
41
 
103
- return Trino.create({
104
- server: trinoConn.server,
105
- catalog: trinoConn.catalog,
106
- schema: trinoConn.schema,
107
- auth,
108
- });
42
+ return new BigQuery(config);
43
+ }
44
+
45
+ function standardizeRunSQLResult(result: unknown): unknown[] {
46
+ // Handle different result formats from malloyConnection.runSQL
47
+ return Array.isArray(result)
48
+ ? result
49
+ : (result as { rows?: unknown[] }).rows || [];
109
50
  }
110
51
 
111
52
  export async function getSchemasForConnection(
112
53
  connection: ApiConnection,
113
- ): Promise<ApiSchemaName[]> {
54
+ malloyConnection: Connection,
55
+ ): Promise<ApiSchema[]> {
114
56
  if (connection.type === "bigquery") {
115
57
  if (!connection.bigqueryConnection) {
116
58
  throw new Error("BigQuery connection is required");
117
59
  }
118
60
  try {
119
- const bigquery = getBigqueryConnection(connection);
120
- const projectId = connection.bigqueryConnection.defaultProjectId;
121
- const options = projectId ? { projectId } : {};
122
- const [datasets] = await bigquery.getDatasets(options);
123
- const schemas = await Promise.all(
124
- datasets
125
- .filter((dataset) => dataset.id)
126
- .map(async (dataset) => {
127
- const [metadata] = await dataset.getMetadata();
128
- return {
129
- name: dataset.id,
130
- isHidden: false,
131
- isDefault: false,
132
- // Include description from dataset metadata if available
133
- description: (metadata as { description?: string })
134
- ?.description,
135
- };
136
- }),
137
- );
138
- return schemas;
61
+ const projectId = connection.bigqueryConnection?.defaultProjectId;
62
+ if (!projectId) {
63
+ throw new Error("BigQuery project ID is required");
64
+ }
65
+
66
+ const bigquery = createBigQueryClient(connection);
67
+ const [datasets] = await bigquery.getDatasets();
68
+
69
+ return datasets.map((dataset) => ({
70
+ name: dataset.id,
71
+ isHidden: false,
72
+ isDefault: false,
73
+ }));
139
74
  } catch (error) {
140
75
  console.error(
141
76
  `Error getting schemas for BigQuery connection ${connection.name}:`,
@@ -149,67 +84,156 @@ export async function getSchemasForConnection(
149
84
  if (!connection.postgresConnection) {
150
85
  throw new Error("Postgres connection is required");
151
86
  }
152
- const pool = await getPostgresConnection(connection.postgresConnection);
153
- const res = await pool.query(
154
- "SELECT schema_name FROM information_schema.schemata",
155
- );
156
- return res.rows.map((row) => {
157
- return {
158
- name: row.schema_name,
159
- isHidden: ["information_schema", "pg_catalog"].includes(
160
- row.schema_name,
161
- ),
162
- isDefault: row.schema_name === "public",
163
- };
164
- });
87
+ try {
88
+ // Use the connection's runSQL method to query schemas
89
+ const result = await malloyConnection.runSQL(
90
+ "SELECT schema_name FROM information_schema.schemata",
91
+ );
92
+
93
+ const rows = standardizeRunSQLResult(result);
94
+ return rows.map((row: unknown) => {
95
+ const typedRow = row as Record<string, unknown>;
96
+ return {
97
+ name: typedRow.schema_name as string,
98
+ isHidden: ["information_schema", "pg_catalog"].includes(
99
+ typedRow.schema_name as string,
100
+ ),
101
+ isDefault: typedRow.schema_name === "public",
102
+ };
103
+ });
104
+ } catch (error) {
105
+ console.error(
106
+ `Error getting schemas for Postgres connection ${connection.name}:`,
107
+ error,
108
+ );
109
+ throw new Error(
110
+ `Failed to get schemas for Postgres connection ${connection.name}: ${(error as Error).message}`,
111
+ );
112
+ }
165
113
  } else if (connection.type === "mysql") {
166
114
  if (!connection.mysqlConnection) {
167
115
  throw new Error("Mysql connection is required");
168
116
  }
169
- return [
170
- {
171
- name: connection.mysqlConnection.database || "mysql",
172
- isHidden: false,
173
- isDefault: true,
174
- },
175
- ];
117
+ try {
118
+ // For MySQL, return the database name as the schema
119
+ return [
120
+ {
121
+ name: connection.mysqlConnection.database || "mysql",
122
+ isHidden: false,
123
+ isDefault: true,
124
+ },
125
+ ];
126
+ } catch (error) {
127
+ console.error(
128
+ `Error getting schemas for MySQL connection ${connection.name}:`,
129
+ error,
130
+ );
131
+ throw new Error(
132
+ `Failed to get schemas for MySQL connection ${connection.name}: ${(error as Error).message}`,
133
+ );
134
+ }
176
135
  } else if (connection.type === "snowflake") {
177
136
  if (!connection.snowflakeConnection) {
178
137
  throw new Error("Snowflake connection is required");
179
138
  }
180
- const snowflakeConn = await getSnowflakeConnection(
181
- connection.snowflakeConnection,
182
- );
183
139
  try {
184
- return await getSnowflakeSchemas(snowflakeConn);
185
- } finally {
186
- snowflakeConn.destroy((error) => {
187
- if (error) {
188
- logger.error(`Error closing SnowflakeConnection: ${error}`);
189
- }
140
+ // Use the connection's runSQL method to query schemas
141
+ const result = await malloyConnection.runSQL("SHOW SCHEMAS");
142
+
143
+ const rows = standardizeRunSQLResult(result);
144
+ return rows.map((row: unknown) => {
145
+ const typedRow = row as Record<string, unknown>;
146
+ return {
147
+ name: typedRow.name as string,
148
+ isHidden: ["SNOWFLAKE", ""].includes(typedRow.owner as string),
149
+ isDefault: typedRow.isDefault === "Y",
150
+ };
190
151
  });
152
+ } catch (error) {
153
+ console.error(
154
+ `Error getting schemas for Snowflake connection ${connection.name}:`,
155
+ error,
156
+ );
157
+ throw new Error(
158
+ `Failed to get schemas for Snowflake connection ${connection.name}: ${(error as Error).message}`,
159
+ );
191
160
  }
192
161
  } else if (connection.type === "trino") {
193
162
  if (!connection.trinoConnection) {
194
163
  throw new Error("Trino connection is required");
195
164
  }
196
- const client = getTrinoClient(connection.trinoConnection);
197
- const result = await client.query(
198
- `SHOW SCHEMAS FROM ${connection.trinoConnection.catalog}`,
199
- );
200
- const rows: string[] = [];
201
- let next = await result.next();
202
- while (!next.done) {
203
- if (next.value.data) {
204
- rows.push(...next.value.data.map((r: string[]) => r[0]));
205
- }
206
- next = await result.next();
165
+ try {
166
+ // Use the connection's runSQL method to query schemas
167
+ const result = await malloyConnection.runSQL(
168
+ `SHOW SCHEMAS FROM ${connection.trinoConnection.catalog}`,
169
+ );
170
+
171
+ const rows = standardizeRunSQLResult(result);
172
+ return rows.map((row: unknown) => {
173
+ const typedRow = row as Record<string, unknown>;
174
+ return {
175
+ name: typedRow.name as string,
176
+ isHidden: false,
177
+ isDefault: typedRow.name === connection.trinoConnection?.schema,
178
+ };
179
+ });
180
+ } catch (error) {
181
+ console.error(
182
+ `Error getting schemas for Trino connection ${connection.name}:`,
183
+ error,
184
+ );
185
+ throw new Error(
186
+ `Failed to get schemas for Trino connection ${connection.name}: ${(error as Error).message}`,
187
+ );
188
+ }
189
+ } else if (connection.type === "duckdb") {
190
+ if (!connection.duckdbConnection) {
191
+ throw new Error("DuckDB connection is required");
192
+ }
193
+ try {
194
+ // Use DuckDB's INFORMATION_SCHEMA.SCHEMATA to list schemas
195
+ // Use DISTINCT to avoid duplicates from attached databases
196
+ const result = await malloyConnection.runSQL(
197
+ "SELECT DISTINCT schema_name FROM information_schema.schemata ORDER BY schema_name",
198
+ );
199
+
200
+ const rows = standardizeRunSQLResult(result);
201
+
202
+ // Check if this DuckDB connection has attached databases
203
+ const hasAttachedDatabases =
204
+ connection.duckdbConnection?.attachedDatabases &&
205
+ Array.isArray(connection.duckdbConnection.attachedDatabases) &&
206
+ connection.duckdbConnection.attachedDatabases.length > 0;
207
+
208
+ return rows.map((row: unknown) => {
209
+ const typedRow = row as Record<string, unknown>;
210
+ let schemaName = typedRow.schema_name as string;
211
+
212
+ // If we have attached databases and this is not the main schema, prepend the attached database name
213
+ if (hasAttachedDatabases && schemaName !== "main") {
214
+ const attachedDbName = (
215
+ connection.duckdbConnection!.attachedDatabases as Array<{
216
+ name: string;
217
+ }>
218
+ )[0].name;
219
+ schemaName = `${attachedDbName}.${schemaName}`;
220
+ }
221
+
222
+ return {
223
+ name: schemaName,
224
+ isHidden: false,
225
+ isDefault: typedRow.schema_name === "main",
226
+ };
227
+ });
228
+ } catch (error) {
229
+ console.error(
230
+ `Error getting schemas for DuckDB connection ${connection.name}:`,
231
+ error,
232
+ );
233
+ throw new Error(
234
+ `Failed to get schemas for DuckDB connection ${connection.name}: ${(error as Error).message}`,
235
+ );
207
236
  }
208
- return rows.map((r) => ({
209
- name: r,
210
- isHidden: false,
211
- isDefault: r === connection.trinoConnection?.schema,
212
- }));
213
237
  } else {
214
238
  throw new Error(`Unsupported connection type: ${connection.type}`);
215
239
  }
@@ -218,25 +242,118 @@ export async function getSchemasForConnection(
218
242
  export async function getTablesForSchema(
219
243
  connection: ApiConnection,
220
244
  schemaName: string,
245
+ malloyConnection: Connection,
246
+ ): Promise<ApiTable[]> {
247
+ // First get the list of table names
248
+ const tableNames = await listTablesForSchema(
249
+ connection,
250
+ schemaName,
251
+ malloyConnection,
252
+ );
253
+
254
+ // Fetch all table sources in parallel
255
+ const tableSourcePromises = tableNames.map(async (tableName) => {
256
+ try {
257
+ const tablePath = `${schemaName}.${tableName}`;
258
+
259
+ logger.info(
260
+ `Processing table: ${tableName} in schema: ${schemaName}`,
261
+ { tablePath, connectionType: connection.type },
262
+ );
263
+ const tableSource = await getConnectionTableSource(
264
+ malloyConnection,
265
+ tableName,
266
+ tablePath,
267
+ );
268
+
269
+ return {
270
+ resource: tablePath,
271
+ columns: tableSource.columns,
272
+ };
273
+ } catch (error) {
274
+ logger.warn(`Failed to get schema for table ${tableName}`, {
275
+ error,
276
+ schemaName,
277
+ tableName,
278
+ });
279
+ // Return table without columns if schema fetch fails
280
+ return {
281
+ resource: `${schemaName}.${tableName}`,
282
+ columns: [],
283
+ };
284
+ }
285
+ });
286
+
287
+ // Wait for all table sources to be fetched
288
+ const tableResults = await Promise.all(tableSourcePromises);
289
+
290
+ return tableResults;
291
+ }
292
+
293
+ export async function getConnectionTableSource(
294
+ malloyConnection: Connection,
295
+ tableKey: string,
296
+ tablePath: string,
297
+ ): Promise<ApiTableSource> {
298
+ try {
299
+ logger.info(`Attempting to fetch table schema for: ${tablePath}`, {
300
+ tableKey,
301
+ tablePath,
302
+ });
303
+ const source = await (
304
+ malloyConnection as Connection & {
305
+ fetchTableSchema: (
306
+ tableKey: string,
307
+ tablePath: string,
308
+ ) => Promise<TableSourceDef | undefined>;
309
+ }
310
+ ).fetchTableSchema(tableKey, tablePath);
311
+ if (source === undefined) {
312
+ throw new ConnectionError(`Table ${tablePath} not found`);
313
+ }
314
+ const malloyFields = (source as TableSourceDef).fields;
315
+ const fields = malloyFields.map((field) => {
316
+ return {
317
+ name: field.name,
318
+ type: field.type,
319
+ };
320
+ });
321
+ logger.info(`Successfully fetched schema for ${tablePath}`, {
322
+ fieldCount: fields.length,
323
+ });
324
+ return {
325
+ source: JSON.stringify(source),
326
+ resource: tablePath,
327
+ columns: fields,
328
+ };
329
+ } catch (error) {
330
+ logger.error("fetchTableSchema error", { error, tableKey, tablePath });
331
+ throw new ConnectionError((error as Error).message);
332
+ }
333
+ }
334
+
335
+ export async function listTablesForSchema(
336
+ connection: ApiConnection,
337
+ schemaName: string,
338
+ malloyConnection: Connection,
221
339
  ): Promise<string[]> {
222
340
  if (connection.type === "bigquery") {
223
341
  try {
224
- const bigquery = getBigqueryConnection(connection);
225
- const options = connection.bigqueryConnection?.defaultProjectId
226
- ? {
227
- projectId: connection.bigqueryConnection?.defaultProjectId,
228
- }
229
- : {};
230
- const dataset = bigquery.dataset(schemaName, options);
231
- const [exists] = await dataset.exists();
232
- if (!exists) {
233
- throw new Error(
234
- `Dataset ${schemaName} does not exist in connection ${connection.name}`,
235
- );
342
+ const projectId = connection.bigqueryConnection?.defaultProjectId;
343
+ if (!projectId) {
344
+ throw new Error("BigQuery project ID is required");
236
345
  }
237
346
 
347
+ // Use BigQuery client directly for efficient table listing
348
+ // This is much faster than querying all regions
349
+ const bigquery = createBigQueryClient(connection);
350
+ const dataset = bigquery.dataset(schemaName);
238
351
  const [tables] = await dataset.getTables();
239
- return tables.map((table) => table.id).filter((id) => id) as string[];
352
+
353
+ // Return table names, filtering out any undefined values
354
+ return tables
355
+ .map((table) => table.id)
356
+ .filter((id): id is string => id !== undefined);
240
357
  } catch (error) {
241
358
  logger.error(
242
359
  `Error getting tables for BigQuery schema ${schemaName} in connection ${connection.name}`,
@@ -250,153 +367,140 @@ export async function getTablesForSchema(
250
367
  if (!connection.mysqlConnection) {
251
368
  throw new Error("Mysql connection is required");
252
369
  }
253
- const pool = await getMysqlConnection(connection.mysqlConnection);
254
- const [rows] = await pool.query(
255
- "SELECT TABLE_NAME FROM information_schema.tables WHERE table_schema = ? AND table_type = 'BASE TABLE'",
256
- [schemaName],
257
- );
258
- return (rows as { TABLE_NAME: string }[]).map((row) => row.TABLE_NAME);
370
+ try {
371
+ const result = await malloyConnection.runSQL(
372
+ `SELECT TABLE_NAME FROM information_schema.tables WHERE table_schema = '${schemaName}' AND table_type = 'BASE TABLE'`,
373
+ );
374
+ const rows = standardizeRunSQLResult(result);
375
+ return rows.map((row: unknown) => {
376
+ const typedRow = row as Record<string, unknown>;
377
+ return typedRow.TABLE_NAME as string;
378
+ });
379
+ } catch (error) {
380
+ logger.error(
381
+ `Error getting tables for MySQL schema ${schemaName} in connection ${connection.name}`,
382
+ { error },
383
+ );
384
+ throw new Error(
385
+ `Failed to get tables for MySQL schema ${schemaName} in connection ${connection.name}: ${(error as Error).message}`,
386
+ );
387
+ }
259
388
  } else if (connection.type === "postgres") {
260
389
  if (!connection.postgresConnection) {
261
390
  throw new Error("Postgres connection is required");
262
391
  }
263
- const pool = await getPostgresConnection(connection.postgresConnection);
264
- const res = await pool.query(
265
- "SELECT table_name FROM information_schema.tables WHERE table_schema = $1",
266
- [schemaName],
267
- );
268
- return res.rows.map((row) => row.table_name);
392
+ try {
393
+ const result = await malloyConnection.runSQL(
394
+ `SELECT table_name FROM information_schema.tables WHERE table_schema = '${schemaName}'`,
395
+ );
396
+ const rows = standardizeRunSQLResult(result);
397
+ return rows.map((row: unknown) => {
398
+ const typedRow = row as Record<string, unknown>;
399
+ return typedRow.table_name as string;
400
+ });
401
+ } catch (error) {
402
+ logger.error(
403
+ `Error getting tables for Postgres schema ${schemaName} in connection ${connection.name}`,
404
+ { error },
405
+ );
406
+ throw new Error(
407
+ `Failed to get tables for Postgres schema ${schemaName} in connection ${connection.name}: ${(error as Error).message}`,
408
+ );
409
+ }
269
410
  } else if (connection.type === "snowflake") {
270
411
  if (!connection.snowflakeConnection) {
271
412
  throw new Error("Snowflake connection is required");
272
413
  }
273
- const snowflakeConn = await getSnowflakeConnection(
274
- connection.snowflakeConnection,
275
- );
276
414
  try {
277
- return await getSnowflakeTables(
278
- snowflakeConn,
279
- connection.snowflakeConnection,
280
- schemaName,
415
+ const result = await malloyConnection.runSQL(
416
+ `SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = '${schemaName}' AND TABLE_TYPE = 'BASE TABLE'`,
281
417
  );
282
- } finally {
283
- snowflakeConn.destroy((error) => {
284
- if (error) {
285
- logger.error(`Error closing SnowflakeConnection`, { error });
286
- }
418
+ const rows = standardizeRunSQLResult(result);
419
+ return rows.map((row: unknown) => {
420
+ const typedRow = row as Record<string, unknown>;
421
+ return typedRow.TABLE_NAME as string;
287
422
  });
423
+ } catch (error) {
424
+ logger.error(
425
+ `Error getting tables for Snowflake schema ${schemaName} in connection ${connection.name}`,
426
+ { error },
427
+ );
428
+ throw new Error(
429
+ `Failed to get tables for Snowflake schema ${schemaName} in connection ${connection.name}: ${(error as Error).message}`,
430
+ );
288
431
  }
289
432
  } else if (connection.type === "trino") {
290
433
  if (!connection.trinoConnection) {
291
434
  throw new Error("Trino connection is required");
292
435
  }
293
- const client = getTrinoClient(connection.trinoConnection);
294
- const result = await client.query(
295
- `SHOW TABLES FROM ${connection.trinoConnection.catalog}.${schemaName}`,
296
- );
297
-
298
- const rows: string[] = [];
299
- let next = await result.next();
300
- while (!next.done) {
301
- if (next.value.data) {
302
- rows.push(...next.value.data.map((r: string[]) => r[0]));
303
- }
304
- next = await result.next();
436
+ try {
437
+ const result = await malloyConnection.runSQL(
438
+ `SHOW TABLES FROM ${connection.trinoConnection.catalog}.${schemaName}`,
439
+ );
440
+ const rows = standardizeRunSQLResult(result);
441
+ return rows.map((row: unknown) => {
442
+ const typedRow = row as Record<string, unknown>;
443
+ return typedRow.name as string;
444
+ });
445
+ } catch (error) {
446
+ logger.error(
447
+ `Error getting tables for Trino schema ${schemaName} in connection ${connection.name}`,
448
+ { error },
449
+ );
450
+ throw new Error(
451
+ `Failed to get tables for Trino schema ${schemaName} in connection ${connection.name}: ${(error as Error).message}`,
452
+ );
305
453
  }
306
- return rows;
307
- } else {
308
- // TODO(jjs) - implement
309
- return [];
310
- }
311
- }
454
+ } else if (connection.type === "duckdb") {
455
+ if (!connection.duckdbConnection) {
456
+ throw new Error("DuckDB connection is required");
457
+ }
458
+ try {
459
+ // Check if this DuckDB connection has attached databases and if the schema name is prepended
460
+ const hasAttachedDatabases =
461
+ connection.duckdbConnection?.attachedDatabases &&
462
+ Array.isArray(connection.duckdbConnection.attachedDatabases) &&
463
+ connection.duckdbConnection.attachedDatabases.length > 0;
312
464
 
313
- function getTempServiceKeyPath(connection: ApiConnection): string {
314
- // If the connection is bigquery and the service account key is provided as
315
- // JSON, we need to write it to a temporary file for the Malloy compiling to work
316
- if (!connection.bigqueryConnection) {
317
- throw new Error(
318
- `BigQuery connection ${connection.name} is missing bigqueryConnection ${JSON.stringify(connection)}`,
319
- );
320
- }
321
- const keyJson = connection.bigqueryConnection.serviceAccountKeyJson;
322
- if (!keyJson) {
323
- throw new Error(
324
- `BigQuery connection ${connection.name} is missing service account key JSON`,
325
- );
326
- }
327
- const tmpFilepath = path.join(
328
- os.tmpdir(),
329
- `ms2_connection_${connection.name}_${uuidv4()}_key.json`,
330
- );
331
- fs.writeFileSync(tmpFilepath, keyJson, "utf8");
332
- return tmpFilepath;
333
- }
465
+ let actualSchemaName = schemaName;
334
466
 
335
- async function getSnowflakeTables(
336
- connection: snowflake.Connection,
337
- connInfo: SnowflakeConnection,
338
- schemaName: string,
339
- ): Promise<string[]> {
340
- return new Promise((resolve, reject) => {
341
- connection.execute({
342
- sqlText: `USE DATABASE ${connInfo?.database} `,
343
- complete: (err) => {
344
- if (err) {
345
- logger.error(`Error setting database ${connInfo.database}:`, {
346
- error: err,
347
- });
348
- reject([]);
349
- return;
467
+ // If we have attached databases and the schema name is prepended, extract the actual schema name
468
+ if (hasAttachedDatabases && schemaName.includes(".")) {
469
+ const attachedDbName = (
470
+ connection.duckdbConnection!.attachedDatabases as Array<{
471
+ name: string;
472
+ }>
473
+ )[0].name;
474
+ if (schemaName.startsWith(`${attachedDbName}.`)) {
475
+ actualSchemaName = schemaName.substring(
476
+ attachedDbName.length + 1,
477
+ );
350
478
  }
479
+ }
351
480
 
352
- const query = `
353
- SELECT TABLE_NAME, TABLE_SCHEMA
354
- FROM INFORMATION_SCHEMA.TABLES
355
- WHERE TABLE_SCHEMA=? AND TABLE_TYPE = 'BASE TABLE';
356
- `;
357
-
358
- connection.execute({
359
- sqlText: query,
360
- binds: [schemaName],
361
- complete: (err, _, rows) => {
362
- if (err) {
363
- logger.error(
364
- `Error fetching tables from ${connInfo.database}:`,
365
- { error: err },
366
- );
367
- reject([]);
368
- } else {
369
- resolve(rows?.map((row) => `${row.TABLE_NAME}`) || []);
370
- }
371
- },
372
- });
373
- },
374
- });
375
- });
376
- }
481
+ // Use DuckDB's INFORMATION_SCHEMA.TABLES to list tables in the specified schema
482
+ // This follows the DuckDB documentation for listing tables
483
+ // For DuckDB, we'll use string interpolation to avoid parameter binding issues
484
+ const result = await malloyConnection.runSQL(
485
+ `SELECT table_name FROM information_schema.tables WHERE table_schema = '${actualSchemaName}' ORDER BY table_name`,
486
+ );
377
487
 
378
- async function getSnowflakeSchemas(
379
- connection: snowflake.Connection,
380
- ): Promise<ApiSchemaName[]> {
381
- return new Promise((resolve, reject) => {
382
- connection.execute({
383
- sqlText: "SHOW SCHEMAS",
384
- complete: (err, _stmt, rows) => {
385
- if (err) {
386
- reject(err);
387
- } else {
388
- resolve(
389
- rows?.map((row) => {
390
- logger.info("row", { row });
391
- return {
392
- name: row.name,
393
- isDefault: row.isDefault === "Y",
394
- isHidden: ["SNOWFLAKE", ""].includes(row.owner),
395
- };
396
- }) || [],
397
- );
398
- }
399
- },
400
- });
401
- });
488
+ const rows = standardizeRunSQLResult(result);
489
+ return rows.map((row: unknown) => {
490
+ const typedRow = row as Record<string, unknown>;
491
+ return typedRow.table_name as string;
492
+ });
493
+ } catch (error) {
494
+ logger.error(
495
+ `Error getting tables for DuckDB schema ${schemaName} in connection ${connection.name}`,
496
+ { error },
497
+ );
498
+ throw new Error(
499
+ `Failed to get tables for DuckDB schema ${schemaName} in connection ${connection.name}: ${(error as Error).message}`,
500
+ );
501
+ }
502
+ } else {
503
+ // TODO(jjs) - implement
504
+ return [];
505
+ }
402
506
  }