@malloy-publisher/server 0.0.122 → 0.0.124

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/server.js CHANGED
@@ -131921,138 +131921,211 @@ function validateAndBuildTrinoConfig(trinoConfig) {
131921
131921
  throw new Error(`Invalid Trino connection: expected "http://server:port" (no password) or "https://server:port" (with username and password).`);
131922
131922
  }
131923
131923
  }
131924
+ async function installAndLoadExtension(connection, extensionName, fromCommunity = false) {
131925
+ try {
131926
+ const installCommand = fromCommunity ? `FORCE INSTALL '${extensionName}' FROM community;` : `INSTALL ${extensionName};`;
131927
+ await connection.runSQL(installCommand);
131928
+ logger2.info(`${extensionName} extension installed`);
131929
+ } catch (error) {
131930
+ logger2.info(`${extensionName} extension already installed or install skipped`, { error });
131931
+ }
131932
+ await connection.runSQL(`LOAD ${extensionName};`);
131933
+ logger2.info(`${extensionName} extension loaded`);
131934
+ }
131935
+ async function isDatabaseAttached(connection, dbName) {
131936
+ try {
131937
+ const existingDatabases = await connection.runSQL("SHOW DATABASES");
131938
+ const rows = Array.isArray(existingDatabases) ? existingDatabases : existingDatabases.rows || [];
131939
+ logger2.debug(`Existing databases:`, rows);
131940
+ return rows.some((row) => Object.values(row).some((value) => typeof value === "string" && value === dbName));
131941
+ } catch (error) {
131942
+ logger2.warn(`Failed to check existing databases:`, error);
131943
+ return false;
131944
+ }
131945
+ }
131946
+ function sanitizeSecretName(name) {
131947
+ return `secret_${name.replace(/[^a-zA-Z0-9_]/g, "_")}`;
131948
+ }
131949
+ function escapeSQL(value) {
131950
+ return value.replace(/'/g, "''");
131951
+ }
131952
+ function handleAlreadyAttachedError(error, dbName) {
131953
+ if (error instanceof Error && error.message.includes("already exists")) {
131954
+ logger2.info(`Database ${dbName} is already attached, skipping`);
131955
+ } else {
131956
+ throw error;
131957
+ }
131958
+ }
131959
+ async function attachBigQuery(connection, attachedDb) {
131960
+ if (!attachedDb.bigqueryConnection) {
131961
+ throw new Error(`BigQuery connection configuration missing for: ${attachedDb.name}`);
131962
+ }
131963
+ const config = attachedDb.bigqueryConnection;
131964
+ let projectId = config.defaultProjectId;
131965
+ let serviceAccountJson;
131966
+ if (config.serviceAccountKeyJson) {
131967
+ const keyData = JSON.parse(config.serviceAccountKeyJson);
131968
+ const requiredFields = [
131969
+ "type",
131970
+ "project_id",
131971
+ "private_key",
131972
+ "client_email"
131973
+ ];
131974
+ for (const field of requiredFields) {
131975
+ if (!keyData[field]) {
131976
+ throw new Error(`Invalid service account key: missing "${field}" field`);
131977
+ }
131978
+ }
131979
+ if (keyData.type !== "service_account") {
131980
+ throw new Error('Invalid service account key: incorrect "type" field');
131981
+ }
131982
+ projectId = keyData.project_id || config.defaultProjectId;
131983
+ serviceAccountJson = config.serviceAccountKeyJson;
131984
+ logger2.info(`Using service account: ${keyData.client_email}`);
131985
+ }
131986
+ if (!projectId || !serviceAccountJson) {
131987
+ throw new Error(`BigQuery project_id and service account key required for: ${attachedDb.name}`);
131988
+ }
131989
+ await installAndLoadExtension(connection, "bigquery", true);
131990
+ const secretName = sanitizeSecretName(`bigquery_${attachedDb.name}`);
131991
+ const escapedJson = escapeSQL(serviceAccountJson);
131992
+ const createSecretCommand = `
131993
+ CREATE OR REPLACE SECRET ${secretName} (
131994
+ TYPE BIGQUERY,
131995
+ SCOPE 'bq://${projectId}',
131996
+ SERVICE_ACCOUNT_JSON '${escapedJson}'
131997
+ );
131998
+ `;
131999
+ await connection.runSQL(createSecretCommand);
132000
+ logger2.info(`Created BigQuery secret: ${secretName} for project: ${projectId}`);
132001
+ const attachCommand = `ATTACH 'project=${projectId}' AS ${attachedDb.name} (TYPE bigquery, READ_ONLY);`;
132002
+ await connection.runSQL(attachCommand);
132003
+ logger2.info(`Successfully attached BigQuery database: ${attachedDb.name}`);
132004
+ }
132005
+ async function attachSnowflake(connection, attachedDb) {
132006
+ if (!attachedDb.snowflakeConnection) {
132007
+ throw new Error(`Snowflake connection configuration missing for: ${attachedDb.name}`);
132008
+ }
132009
+ const config = attachedDb.snowflakeConnection;
132010
+ const requiredFields = {
132011
+ account: config.account,
132012
+ username: config.username,
132013
+ password: config.password
132014
+ };
132015
+ for (const [field, value] of Object.entries(requiredFields)) {
132016
+ if (!value) {
132017
+ throw new Error(`Snowflake ${field} is required for: ${attachedDb.name}`);
132018
+ }
132019
+ }
132020
+ await installAndLoadExtension(connection, "snowflake", true);
132021
+ try {
132022
+ const version = await connection.runSQL("SELECT snowflake_version();");
132023
+ logger2.info(`Snowflake ADBC driver verified with version:`, version.rows);
132024
+ } catch (error) {
132025
+ throw new Error(`Snowflake ADBC driver verification failed: ${error instanceof Error ? error.message : String(error)}`);
132026
+ }
132027
+ const params = {
132028
+ account: escapeSQL(config.account || ""),
132029
+ user: escapeSQL(config.username || ""),
132030
+ password: escapeSQL(config.password || ""),
132031
+ database: config.database ? escapeSQL(config.database) : undefined,
132032
+ warehouse: config.warehouse ? escapeSQL(config.warehouse) : undefined,
132033
+ schema: config.schema ? escapeSQL(config.schema) : undefined,
132034
+ role: config.role ? escapeSQL(config.role) : undefined
132035
+ };
132036
+ const attachParts = [
132037
+ `account=${params.account}`,
132038
+ `user=${params.user}`,
132039
+ `password=${params.password}`
132040
+ ];
132041
+ if (params.database)
132042
+ attachParts.push(`database=${params.database}`);
132043
+ if (params.warehouse)
132044
+ attachParts.push(`warehouse=${params.warehouse}`);
132045
+ const secretString = `CREATE OR REPLACE SECRET ${attachedDb.name}_secret (
132046
+ TYPE snowflake,
132047
+ ACCOUNT '${params.account}',
132048
+ USER '${params.user}',
132049
+ PASSWORD '${params.password}',
132050
+ DATABASE '${params.database}',
132051
+ WAREHOUSE '${params.warehouse}'
132052
+ );`;
132053
+ await connection.runSQL(secretString);
132054
+ const testresult = await connection.runSQL(`SELECT * FROM snowflake_scan('SELECT 1', '${attachedDb.name}_secret');`);
132055
+ logger2.info(`Testing Snowflake connection:`, testresult.rows);
132056
+ const attachCommand = `ATTACH '${attachedDb.name}' AS ${attachedDb.name} (TYPE snowflake, SECRET ${attachedDb.name}_secret, READ_ONLY);`;
132057
+ await connection.runSQL(attachCommand);
132058
+ logger2.info(`Successfully attached Snowflake database: ${attachedDb.name}`);
132059
+ }
132060
+ async function attachPostgres(connection, attachedDb) {
132061
+ if (!attachedDb.postgresConnection) {
132062
+ throw new Error(`PostgreSQL connection configuration missing for: ${attachedDb.name}`);
132063
+ }
132064
+ await installAndLoadExtension(connection, "postgres");
132065
+ const config = attachedDb.postgresConnection;
132066
+ let attachString;
132067
+ if (config.connectionString) {
132068
+ attachString = config.connectionString;
132069
+ } else {
132070
+ const parts = [];
132071
+ if (config.host)
132072
+ parts.push(`host=${config.host}`);
132073
+ if (config.port)
132074
+ parts.push(`port=${config.port}`);
132075
+ if (config.databaseName)
132076
+ parts.push(`dbname=${config.databaseName}`);
132077
+ if (config.userName)
132078
+ parts.push(`user=${config.userName}`);
132079
+ if (config.password)
132080
+ parts.push(`password=${config.password}`);
132081
+ if (process.env.PGSSLMODE === "no-verify")
132082
+ parts.push(`sslmode=disable`);
132083
+ attachString = parts.join(" ");
132084
+ }
132085
+ const attachCommand = `ATTACH '${attachString}' AS ${attachedDb.name} (TYPE postgres, READ_ONLY);`;
132086
+ await connection.runSQL(attachCommand);
132087
+ logger2.info(`Successfully attached PostgreSQL database: ${attachedDb.name}`);
132088
+ }
132089
+ async function attachMotherDuck(connection, attachedDb) {
132090
+ if (!attachedDb.motherDuckConnection) {
132091
+ throw new Error(`MotherDuck connection configuration missing for: ${attachedDb.name}`);
132092
+ }
132093
+ const config = attachedDb.motherDuckConnection;
132094
+ if (!config.database) {
132095
+ throw new Error(`MotherDuck database name is required for: ${attachedDb.name}`);
132096
+ }
132097
+ await installAndLoadExtension(connection, "motherduck");
132098
+ if (config.accessToken) {
132099
+ const escapedToken = escapeSQL(config.accessToken);
132100
+ await connection.runSQL(`SET motherduck_token = '${escapedToken}';`);
132101
+ }
132102
+ const connectionString = `md:${config.database}`;
132103
+ logger2.info(`Connecting to MotherDuck database: ${config.database} as ${attachedDb.name}`);
132104
+ const attachCommand = `ATTACH '${connectionString}' AS ${attachedDb.name} (TYPE motherduck, READ_ONLY);`;
132105
+ await connection.runSQL(attachCommand);
132106
+ logger2.info(`Successfully attached MotherDuck database: ${attachedDb.name}`);
132107
+ }
131924
132108
  async function attachDatabasesToDuckDB(duckdbConnection, attachedDatabases) {
132109
+ const attachHandlers = {
132110
+ bigquery: attachBigQuery,
132111
+ snowflake: attachSnowflake,
132112
+ postgres: attachPostgres,
132113
+ motherduck: attachMotherDuck
132114
+ };
131925
132115
  for (const attachedDb of attachedDatabases) {
131926
132116
  try {
132117
+ if (await isDatabaseAttached(duckdbConnection, attachedDb.name || "")) {
132118
+ logger2.info(`Database ${attachedDb.name} is already attached, skipping`);
132119
+ continue;
132120
+ }
132121
+ const handler = attachHandlers[attachedDb.type];
132122
+ if (!handler) {
132123
+ throw new Error(`Unsupported database type: ${attachedDb.type}`);
132124
+ }
131927
132125
  try {
131928
- const checkQuery = `SHOW DATABASES`;
131929
- const existingDatabases = await duckdbConnection.runSQL(checkQuery);
131930
- const rows = Array.isArray(existingDatabases) ? existingDatabases : existingDatabases.rows || [];
131931
- logger2.debug(`Existing databases:`, rows);
131932
- const isAlreadyAttached = rows.some((row) => {
131933
- return Object.values(row).some((value) => typeof value === "string" && value === attachedDb.name);
131934
- });
131935
- if (isAlreadyAttached) {
131936
- logger2.info(`Database ${attachedDb.name} is already attached, skipping`);
131937
- continue;
131938
- }
131939
- } catch (error) {
131940
- logger2.warn(`Failed to check existing databases, proceeding with attachment:`, error);
131941
- }
131942
- switch (attachedDb.type) {
131943
- case "bigquery": {
131944
- if (!attachedDb.bigqueryConnection) {
131945
- throw new Error(`BigQuery connection configuration is missing for attached database: ${attachedDb.name}`);
131946
- }
131947
- await duckdbConnection.runSQL("INSTALL bigquery FROM community;");
131948
- await duckdbConnection.runSQL("LOAD bigquery;");
131949
- const bigqueryConfig = attachedDb.bigqueryConnection;
131950
- const attachParams = new URLSearchParams;
131951
- if (!bigqueryConfig.defaultProjectId) {
131952
- throw new Error(`BigQuery defaultProjectId is required for attached database: ${attachedDb.name}`);
131953
- }
131954
- attachParams.set("project", bigqueryConfig.defaultProjectId);
131955
- if (bigqueryConfig.serviceAccountKeyJson) {
131956
- const serviceAccountKeyPath = import_path2.default.join(TEMP_DIR_PATH, `duckdb-${attachedDb.name}-${v4_default()}-service-account-key.json`);
131957
- await import_promises.default.writeFile(serviceAccountKeyPath, bigqueryConfig.serviceAccountKeyJson);
131958
- attachParams.set("service_account_key", serviceAccountKeyPath);
131959
- }
131960
- const attachCommand = `ATTACH '${attachParams.toString()}' AS ${attachedDb.name} (TYPE bigquery, READ_ONLY);`;
131961
- try {
131962
- await duckdbConnection.runSQL(attachCommand);
131963
- logger2.info(`Successfully attached BigQuery database: ${attachedDb.name}`);
131964
- } catch (attachError) {
131965
- if (attachError instanceof Error && attachError.message && attachError.message.includes("already exists")) {
131966
- logger2.info(`BigQuery database ${attachedDb.name} is already attached, skipping`);
131967
- } else {
131968
- throw attachError;
131969
- }
131970
- }
131971
- break;
131972
- }
131973
- case "snowflake": {
131974
- if (!attachedDb.snowflakeConnection) {
131975
- throw new Error(`Snowflake connection configuration is missing for attached database: ${attachedDb.name}`);
131976
- }
131977
- await duckdbConnection.runSQL("INSTALL snowflake FROM community;");
131978
- await duckdbConnection.runSQL("LOAD snowflake;");
131979
- const snowflakeConfig = attachedDb.snowflakeConnection;
131980
- const attachParams = new URLSearchParams;
131981
- if (snowflakeConfig.account) {
131982
- attachParams.set("account", snowflakeConfig.account);
131983
- }
131984
- if (snowflakeConfig.username) {
131985
- attachParams.set("username", snowflakeConfig.username);
131986
- }
131987
- if (snowflakeConfig.password) {
131988
- attachParams.set("password", snowflakeConfig.password);
131989
- }
131990
- if (snowflakeConfig.database) {
131991
- attachParams.set("database", snowflakeConfig.database);
131992
- }
131993
- if (snowflakeConfig.warehouse) {
131994
- attachParams.set("warehouse", snowflakeConfig.warehouse);
131995
- }
131996
- if (snowflakeConfig.role) {
131997
- attachParams.set("role", snowflakeConfig.role);
131998
- }
131999
- const attachCommand = `ATTACH '${attachParams.toString()}' AS ${attachedDb.name} (TYPE snowflake, READ_ONLY);`;
132000
- try {
132001
- await duckdbConnection.runSQL(attachCommand);
132002
- logger2.info(`Successfully attached Snowflake database: ${attachedDb.name}`);
132003
- } catch (attachError) {
132004
- if (attachError instanceof Error && attachError.message && attachError.message.includes("already exists")) {
132005
- logger2.info(`Snowflake database ${attachedDb.name} is already attached, skipping`);
132006
- } else {
132007
- throw attachError;
132008
- }
132009
- }
132010
- break;
132011
- }
132012
- case "postgres": {
132013
- if (!attachedDb.postgresConnection) {
132014
- throw new Error(`PostgreSQL connection configuration is missing for attached database: ${attachedDb.name}`);
132015
- }
132016
- await duckdbConnection.runSQL("INSTALL postgres FROM community;");
132017
- await duckdbConnection.runSQL("LOAD postgres;");
132018
- const postgresConfig = attachedDb.postgresConnection;
132019
- let attachString;
132020
- if (postgresConfig.connectionString) {
132021
- attachString = postgresConfig.connectionString;
132022
- } else {
132023
- const params = new URLSearchParams;
132024
- if (postgresConfig.host) {
132025
- params.set("host", postgresConfig.host);
132026
- }
132027
- if (postgresConfig.port) {
132028
- params.set("port", postgresConfig.port.toString());
132029
- }
132030
- if (postgresConfig.databaseName) {
132031
- params.set("dbname", postgresConfig.databaseName);
132032
- }
132033
- if (postgresConfig.userName) {
132034
- params.set("user", postgresConfig.userName);
132035
- }
132036
- if (postgresConfig.password) {
132037
- params.set("password", postgresConfig.password);
132038
- }
132039
- attachString = params.toString();
132040
- }
132041
- const attachCommand = `ATTACH '${attachString}' AS ${attachedDb.name} (TYPE postgres, READ_ONLY);`;
132042
- try {
132043
- await duckdbConnection.runSQL(attachCommand);
132044
- logger2.info(`Successfully attached PostgreSQL database: ${attachedDb.name}`);
132045
- } catch (attachError) {
132046
- if (attachError instanceof Error && attachError.message && attachError.message.includes("already exists")) {
132047
- logger2.info(`PostgreSQL database ${attachedDb.name} is already attached, skipping`);
132048
- } else {
132049
- throw attachError;
132050
- }
132051
- }
132052
- break;
132053
- }
132054
- default:
132055
- throw new Error(`Unsupported attached database type: ${attachedDb.type}`);
132126
+ await handler(duckdbConnection, attachedDb);
132127
+ } catch (attachError) {
132128
+ handleAlreadyAttachedError(attachError, attachedDb.name || "");
132056
132129
  }
132057
132130
  } catch (error) {
132058
132131
  logger2.error(`Failed to attach database ${attachedDb.name}:`, error);
@@ -132060,7 +132133,7 @@ async function attachDatabasesToDuckDB(duckdbConnection, attachedDatabases) {
132060
132133
  }
132061
132134
  }
132062
132135
  }
132063
- async function createProjectConnections(connections = []) {
132136
+ async function createProjectConnections(connections = [], projectPath = "") {
132064
132137
  const connectionMap = new Map;
132065
132138
  const processedConnections = new Set;
132066
132139
  const apiConnections = [];
@@ -132177,6 +132250,15 @@ async function createProjectConnections(connections = []) {
132177
132250
  break;
132178
132251
  }
132179
132252
  case "duckdb": {
132253
+ if (!connection.duckdbConnection) {
132254
+ throw new Error("DuckDB connection configuration is missing.");
132255
+ }
132256
+ const duckdbConnection = new import_db_duckdb.DuckDBConnection(connection.name, ":memory:", projectPath);
132257
+ if (connection.duckdbConnection.attachedDatabases && Array.isArray(connection.duckdbConnection.attachedDatabases) && connection.duckdbConnection.attachedDatabases.length > 0) {
132258
+ await attachDatabasesToDuckDB(duckdbConnection, connection.duckdbConnection.attachedDatabases);
132259
+ }
132260
+ connectionMap.set(connection.name, duckdbConnection);
132261
+ connection.attributes = getConnectionAttributes(duckdbConnection);
132180
132262
  break;
132181
132263
  }
132182
132264
  default: {
@@ -132255,6 +132337,12 @@ async function testConnectionConfig(connectionConfig) {
132255
132337
  if (!connectionConfig.name) {
132256
132338
  throw new Error("Connection name is required");
132257
132339
  }
132340
+ if (connectionConfig.type === "duckdb") {
132341
+ return {
132342
+ status: "ok",
132343
+ errorMessage: ""
132344
+ };
132345
+ }
132258
132346
  const { malloyConnections } = await createProjectConnections([connectionConfig]);
132259
132347
  const connection = malloyConnections.get(connectionConfig.name);
132260
132348
  if (!connection) {
@@ -132318,11 +132406,16 @@ async function getSchemasForConnection(connection, malloyConnection) {
132318
132406
  try {
132319
132407
  const bigquery = createBigQueryClient(connection);
132320
132408
  const [datasets] = await bigquery.getDatasets();
132321
- return datasets.map((dataset) => ({
132322
- name: dataset.id,
132323
- isHidden: false,
132324
- isDefault: false
132409
+ const schemas = await Promise.all(datasets.map(async (dataset) => {
132410
+ const [metadata] = await dataset.getMetadata();
132411
+ return {
132412
+ name: dataset.id,
132413
+ isHidden: false,
132414
+ isDefault: false,
132415
+ description: metadata?.description
132416
+ };
132325
132417
  }));
132418
+ return schemas;
132326
132419
  } catch (error) {
132327
132420
  console.error(`Error getting schemas for BigQuery connection ${connection.name}:`, error);
132328
132421
  throw new Error(`Failed to get schemas for BigQuery connection ${connection.name}: ${error.message}`);
@@ -132409,20 +132502,24 @@ async function getSchemasForConnection(connection, malloyConnection) {
132409
132502
  throw new Error("DuckDB connection is required");
132410
132503
  }
132411
132504
  try {
132412
- const result = await malloyConnection.runSQL("SELECT DISTINCT schema_name FROM information_schema.schemata ORDER BY schema_name");
132505
+ const result = await malloyConnection.runSQL("SELECT DISTINCT schema_name,catalog_name FROM information_schema.schemata ORDER BY catalog_name,schema_name", { rowLimit: 1000 });
132413
132506
  const rows = standardizeRunSQLResult(result);
132414
- const hasAttachedDatabases = connection.duckdbConnection?.attachedDatabases && Array.isArray(connection.duckdbConnection.attachedDatabases) && connection.duckdbConnection.attachedDatabases.length > 0;
132415
132507
  return rows.map((row) => {
132416
132508
  const typedRow = row;
132417
- let schemaName = typedRow.schema_name;
132418
- if (hasAttachedDatabases && schemaName !== "main") {
132419
- const attachedDbName = connection.duckdbConnection.attachedDatabases[0].name;
132420
- schemaName = `${attachedDbName}.${schemaName}`;
132421
- }
132509
+ const schemaName = typedRow.schema_name;
132510
+ const catalogName = typedRow.catalog_name;
132422
132511
  return {
132423
- name: schemaName,
132424
- isHidden: false,
132425
- isDefault: typedRow.schema_name === "main"
132512
+ name: `${catalogName}.${schemaName}`,
132513
+ isHidden: [
132514
+ "information_schema",
132515
+ "performance_schema",
132516
+ "",
132517
+ "SNOWFLAKE",
132518
+ "information_schema",
132519
+ "pg_catalog",
132520
+ "pg_toast"
132521
+ ].includes(schemaName) || ["md_information_schema", "system"].includes(catalogName),
132522
+ isDefault: catalogName === "main"
132426
132523
  };
132427
132524
  });
132428
132525
  } catch (error) {
@@ -132568,15 +132665,9 @@ async function listTablesForSchema(connection, schemaName, malloyConnection) {
132568
132665
  throw new Error("DuckDB connection is required");
132569
132666
  }
132570
132667
  try {
132571
- const hasAttachedDatabases = connection.duckdbConnection?.attachedDatabases && Array.isArray(connection.duckdbConnection.attachedDatabases) && connection.duckdbConnection.attachedDatabases.length > 0;
132572
- let actualSchemaName = schemaName;
132573
- if (hasAttachedDatabases && schemaName.includes(".")) {
132574
- const attachedDbName = connection.duckdbConnection.attachedDatabases[0].name;
132575
- if (schemaName.startsWith(`${attachedDbName}.`)) {
132576
- actualSchemaName = schemaName.substring(attachedDbName.length + 1);
132577
- }
132578
- }
132579
- const result = await malloyConnection.runSQL(`SELECT table_name FROM information_schema.tables WHERE table_schema = '${actualSchemaName}' ORDER BY table_name`);
132668
+ const catalogName = schemaName.split(".")[0];
132669
+ schemaName = schemaName.split(".")[1];
132670
+ const result = await malloyConnection.runSQL(`SELECT table_name FROM information_schema.tables WHERE table_schema = '${schemaName}' and table_catalog = '${catalogName}' ORDER BY table_name`, { rowLimit: 1000 });
132580
132671
  const rows = standardizeRunSQLResult(result);
132581
132672
  return rows.map((row) => {
132582
132673
  const typedRow = row;
@@ -132603,7 +132694,7 @@ class ConnectionController {
132603
132694
  if (connection.type === "duckdb") {
132604
132695
  const packages = await project.listPackages();
132605
132696
  if (packages.length === 0) {
132606
- throw new ConnectionError("No packages found for DuckDB connection");
132697
+ return project.getMalloyConnection(connectionName);
132607
132698
  }
132608
132699
  const packageName = packages[0].name;
132609
132700
  if (!packageName) {
@@ -132657,7 +132748,8 @@ class ConnectionController {
132657
132748
  const tableSource = await getConnectionTableSource(malloyConnection, tablePath.split(".").pop(), tablePath);
132658
132749
  return {
132659
132750
  resource: tablePath,
132660
- columns: tableSource.columns
132751
+ columns: tableSource.columns,
132752
+ source: tableSource.source
132661
132753
  };
132662
132754
  }
132663
132755
  async getConnectionQueryData(projectName, connectionName, sqlStatement, options) {
@@ -139347,7 +139439,7 @@ class Project {
139347
139439
  }
139348
139440
  if (payload.connections) {
139349
139441
  logger2.info(`Updating ${payload.connections.length} connections for project ${this.projectName}`);
139350
- const { malloyConnections, apiConnections } = await createProjectConnections(payload.connections);
139442
+ const { malloyConnections, apiConnections } = await createProjectConnections(payload.connections, this.projectPath);
139351
139443
  this.malloyConnections = malloyConnections;
139352
139444
  this.apiConnections = apiConnections;
139353
139445
  logger2.info(`Successfully updated connections for project ${this.projectName}`, {
@@ -139363,7 +139455,7 @@ class Project {
139363
139455
  throw new ProjectNotFoundError(`Project path ${projectPath} not found`);
139364
139456
  }
139365
139457
  logger2.info(`Creating project with connection configuration`);
139366
- const { malloyConnections, apiConnections } = await createProjectConnections(connections);
139458
+ const { malloyConnections, apiConnections } = await createProjectConnections(connections, projectPath);
139367
139459
  logger2.info(`Loaded ${malloyConnections.size + apiConnections.length} connections for project ${projectName}`, {
139368
139460
  malloyConnections,
139369
139461
  apiConnections
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@malloy-publisher/server",
3
3
  "description": "Malloy Publisher Server",
4
- "version": "0.0.122",
4
+ "version": "0.0.124",
5
5
  "main": "dist/server.js",
6
6
  "bin": {
7
7
  "malloy-publisher": "dist/server.js"
@@ -40,9 +40,7 @@ export class ConnectionController {
40
40
  if (connection.type === "duckdb") {
41
41
  const packages = await project.listPackages();
42
42
  if (packages.length === 0) {
43
- throw new ConnectionError(
44
- "No packages found for DuckDB connection",
45
- );
43
+ return project.getMalloyConnection(connectionName);
46
44
  }
47
45
  // For now, use the first package's DuckDB connection
48
46
  const packageName = packages[0].name;
@@ -165,6 +163,7 @@ export class ConnectionController {
165
163
  return {
166
164
  resource: tablePath,
167
165
  columns: tableSource.columns,
166
+ source: tableSource.source,
168
167
  };
169
168
  }
170
169