@malloy-publisher/server 0.0.176 → 0.0.177

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/server.js CHANGED
@@ -212003,6 +212003,7 @@ function httpMetricsMiddleware(req, res, next) {
212003
212003
  // src/server.ts
212004
212004
  var exports_server = {};
212005
212005
  __export(exports_server, {
212006
+ normalizeQueryArray: () => normalizeQueryArray,
212006
212007
  mcpApp: () => mcpApp
212007
212008
  });
212008
212009
  module.exports = __toCommonJS(exports_server);
@@ -221720,6 +221721,31 @@ async function listCloudDirectorySchemas(credentials) {
221720
221721
  }
221721
221722
 
221722
221723
  // src/service/db_utils.ts
221724
+ function sqlInFilter(columnName, values) {
221725
+ if (!values || values.length === 0)
221726
+ return "";
221727
+ const escaped = values.map((v) => `'${v.replace(/'/g, "''")}'`);
221728
+ return `AND ${columnName} IN (${escaped.join(", ")})`;
221729
+ }
221730
+ function groupColumnRowsIntoTables(rows, buildResource) {
221731
+ const tableMap = new Map;
221732
+ for (const row of rows) {
221733
+ const r = row;
221734
+ const tableName = String(r.TABLE_NAME ?? r.table_name ?? "");
221735
+ const columnName = String(r.COLUMN_NAME ?? r.column_name ?? "");
221736
+ const dataType = String(r.DATA_TYPE ?? r.data_type ?? "").toLowerCase();
221737
+ if (!tableName)
221738
+ continue;
221739
+ if (!tableMap.has(tableName))
221740
+ tableMap.set(tableName, []);
221741
+ tableMap.get(tableName).push({ name: columnName, type: dataType });
221742
+ }
221743
+ const tables = [];
221744
+ for (const [tableName, columns] of tableMap) {
221745
+ tables.push({ resource: buildResource(tableName), columns });
221746
+ }
221747
+ return tables;
221748
+ }
221723
221749
  function createBigQueryClient(connection) {
221724
221750
  if (!connection.bigqueryConnection) {
221725
221751
  throw new Error("BigQuery connection is required");
@@ -221728,17 +221754,18 @@ function createBigQueryClient(connection) {
221728
221754
  projectId: connection.bigqueryConnection.defaultProjectId || ""
221729
221755
  };
221730
221756
  if (connection.bigqueryConnection.serviceAccountKeyJson) {
221757
+ let credentials;
221731
221758
  try {
221732
- const credentials = JSON.parse(connection.bigqueryConnection.serviceAccountKeyJson);
221733
- config.credentials = credentials;
221734
- if (!config.projectId && credentials.project_id) {
221735
- config.projectId = credentials.project_id;
221736
- }
221737
- if (!config.projectId) {
221738
- throw new Error("BigQuery project ID is required. Either set the defaultProjectId in the connection configuration or the project_id in the service account key JSON.");
221739
- }
221740
- } catch (error) {
221741
- logger.warn("Failed to parse service account key JSON, using default credentials", { error });
221759
+ credentials = JSON.parse(connection.bigqueryConnection.serviceAccountKeyJson);
221760
+ } catch (parseError) {
221761
+ throw new Error(`Failed to parse BigQuery service account key JSON: ${parseError.message}`);
221762
+ }
221763
+ config.credentials = credentials;
221764
+ if (!config.projectId && credentials.project_id) {
221765
+ config.projectId = credentials.project_id;
221766
+ }
221767
+ if (!config.projectId) {
221768
+ throw new Error("BigQuery project ID is required. Either set the defaultProjectId in the connection configuration or the project_id in the service account key JSON.");
221742
221769
  }
221743
221770
  } else if (Object.keys(connection.bigqueryConnection).length === 0 && process.env.GOOGLE_APPLICATION_CREDENTIALS) {
221744
221771
  config.keyFilename = process.env.GOOGLE_APPLICATION_CREDENTIALS || "";
@@ -221761,238 +221788,264 @@ function getCloudCredentialsFromAttachedDatabases(attachedDatabases, storageType
221761
221788
  }
221762
221789
  return null;
221763
221790
  }
221764
- async function getSchemasForConnection(connection, malloyConnection) {
221765
- if (connection.type === "bigquery") {
221766
- if (!connection.bigqueryConnection) {
221767
- throw new Error("BigQuery connection is required");
221768
- }
221769
- try {
221770
- const bigquery = createBigQueryClient(connection);
221771
- const [datasets] = await bigquery.getDatasets();
221772
- const schemas = await Promise.all(datasets.map(async (dataset) => {
221773
- const [metadata] = await dataset.getMetadata();
221774
- return {
221775
- name: dataset.id,
221776
- isHidden: false,
221777
- isDefault: false,
221778
- description: metadata?.description
221779
- };
221780
- }));
221781
- return schemas;
221782
- } catch (error) {
221783
- console.error(`Error getting schemas for BigQuery connection ${connection.name}:`, error);
221784
- throw new Error(`Failed to get schemas for BigQuery connection ${connection.name}: ${error.message}`);
221785
- }
221786
- } else if (connection.type === "postgres") {
221787
- if (!connection.postgresConnection) {
221788
- throw new Error("Postgres connection is required");
221789
- }
221790
- try {
221791
- const result = await malloyConnection.runSQL("SELECT schema_name as row FROM information_schema.schemata ORDER BY schema_name");
221792
- const rows = standardizeRunSQLResult2(result);
221793
- return rows.map((row) => {
221794
- const schemaName = row;
221795
- return {
221796
- name: schemaName,
221797
- isHidden: [
221798
- "information_schema",
221799
- "pg_catalog",
221800
- "pg_toast"
221801
- ].includes(schemaName),
221802
- isDefault: schemaName === "public"
221803
- };
221804
- });
221805
- } catch (error) {
221806
- console.error(`Error getting schemas for Postgres connection ${connection.name}:`, error);
221807
- throw new Error(`Failed to get schemas for Postgres connection ${connection.name}: ${error.message}`);
221808
- }
221809
- } else if (connection.type === "mysql") {
221810
- if (!connection.mysqlConnection) {
221811
- throw new Error("Mysql connection is required");
221812
- }
221813
- try {
221814
- return [
221815
- {
221816
- name: connection.mysqlConnection.database || "mysql",
221817
- isHidden: false,
221818
- isDefault: true
221819
- }
221820
- ];
221821
- } catch (error) {
221822
- console.error(`Error getting schemas for MySQL connection ${connection.name}:`, error);
221823
- throw new Error(`Failed to get schemas for MySQL connection ${connection.name}: ${error.message}`);
221824
- }
221825
- } else if (connection.type === "snowflake") {
221826
- if (!connection.snowflakeConnection) {
221827
- throw new Error("Snowflake connection is required");
221828
- }
221829
- try {
221830
- const result = await malloyConnection.runSQL("SHOW SCHEMAS");
221831
- const rows = standardizeRunSQLResult2(result);
221832
- return rows.map((row) => {
221833
- const typedRow = row;
221834
- const databaseName = String(typedRow.database_name ?? typedRow.DATABASE_NAME ?? "");
221835
- const name = String(typedRow.name ?? typedRow.NAME ?? "");
221836
- const owner = String(typedRow.owner ?? typedRow.OWNER ?? "");
221837
- const isDefaultVal = typedRow.is_default ?? typedRow.isDefault ?? typedRow.IS_DEFAULT;
221838
- return {
221839
- name: `${databaseName}.${name}`,
221840
- isHidden: ["SNOWFLAKE", ""].includes(owner),
221841
- isDefault: isDefaultVal === "Y"
221842
- };
221843
- });
221844
- } catch (error) {
221845
- console.error(`Error getting schemas for Snowflake connection ${connection.name}:`, error);
221846
- throw new Error(`Failed to get schemas for Snowflake connection ${connection.name}: ${error.message}`);
221847
- }
221848
- } else if (connection.type === "trino") {
221849
- if (!connection.trinoConnection) {
221850
- throw new Error("Trino connection is required");
221791
+ async function getSchemasForBigQuery(connection) {
221792
+ if (!connection.bigqueryConnection) {
221793
+ throw new Error("BigQuery connection is required");
221794
+ }
221795
+ try {
221796
+ const bigquery = createBigQueryClient(connection);
221797
+ const [datasets] = await bigquery.getDatasets();
221798
+ return await Promise.all(datasets.map(async (dataset) => {
221799
+ const [metadata] = await dataset.getMetadata();
221800
+ return {
221801
+ name: dataset.id,
221802
+ isHidden: false,
221803
+ isDefault: false,
221804
+ description: metadata?.description
221805
+ };
221806
+ }));
221807
+ } catch (error) {
221808
+ logger.error(`Error getting schemas for BigQuery connection ${connection.name}`, { error });
221809
+ throw new Error(`Failed to get schemas for BigQuery connection ${connection.name}: ${error.message}`);
221810
+ }
221811
+ }
221812
+ async function getSchemasForPostgres(connection, malloyConnection) {
221813
+ if (!connection.postgresConnection) {
221814
+ throw new Error("Postgres connection is required");
221815
+ }
221816
+ try {
221817
+ const result = await malloyConnection.runSQL("SELECT schema_name FROM information_schema.schemata ORDER BY schema_name");
221818
+ const rows = standardizeRunSQLResult2(result);
221819
+ return rows.map((row) => {
221820
+ const typedRow = row;
221821
+ const schemaName = String(typedRow.schema_name ?? typedRow.SCHEMA_NAME ?? "");
221822
+ return {
221823
+ name: schemaName,
221824
+ isHidden: ["information_schema", "pg_catalog", "pg_toast"].includes(schemaName),
221825
+ isDefault: schemaName === "public"
221826
+ };
221827
+ });
221828
+ } catch (error) {
221829
+ logger.error(`Error getting schemas for Postgres connection ${connection.name}`, { error });
221830
+ throw new Error(`Failed to get schemas for Postgres connection ${connection.name}: ${error.message}`);
221831
+ }
221832
+ }
221833
+ async function getSchemasForMySQL(connection) {
221834
+ if (!connection.mysqlConnection) {
221835
+ throw new Error("Mysql connection is required");
221836
+ }
221837
+ return [
221838
+ {
221839
+ name: connection.mysqlConnection.database || "mysql",
221840
+ isHidden: false,
221841
+ isDefault: true
221851
221842
  }
221852
- try {
221853
- let result;
221854
- if (connection.trinoConnection.catalog) {
221855
- result = await malloyConnection.runSQL(`SHOW SCHEMAS FROM ${connection.trinoConnection.catalog}`);
221856
- } else {
221857
- const catalogs = await malloyConnection.runSQL(`SHOW CATALOGS`);
221858
- console.log("catalogs", catalogs);
221859
- let catalogNames = standardizeRunSQLResult2(catalogs);
221860
- catalogNames = catalogNames.map((catalog) => {
221861
- const typedCatalog = catalog;
221862
- return typedCatalog.Catalog;
221863
- });
221864
- const schemas = [];
221865
- console.log("catalogNames", catalogNames);
221866
- for (const catalog of catalogNames) {
221867
- const schemasResult = await malloyConnection.runSQL(`SHOW SCHEMAS FROM ${catalog}`);
221868
- const schemasResultRows = standardizeRunSQLResult2(schemasResult);
221869
- console.log("schemasResultRows", schemasResultRows);
221870
- const schemasWithCatalog = schemasResultRows.map((row) => {
221871
- const typedRow = row;
221872
- return {
221873
- ...typedRow,
221874
- Schema: `${catalog}.${typedRow.Schema ?? typedRow.schema ?? ""}`
221875
- };
221876
- });
221877
- schemas.push(...schemasWithCatalog);
221878
- console.log("schemas", schemas);
221879
- }
221880
- result = schemas;
221881
- }
221843
+ ];
221844
+ }
221845
+ async function getSchemasForSnowflake(connection, malloyConnection) {
221846
+ if (!connection.snowflakeConnection) {
221847
+ throw new Error("Snowflake connection is required");
221848
+ }
221849
+ try {
221850
+ const database = connection.snowflakeConnection.database;
221851
+ const schema = connection.snowflakeConnection.schema;
221852
+ const filters = [];
221853
+ if (database) {
221854
+ filters.push(`CATALOG_NAME = '${database}'`);
221855
+ }
221856
+ if (schema) {
221857
+ filters.push(`SCHEMA_NAME = '${schema}'`);
221858
+ }
221859
+ const whereClause = filters.length > 0 ? `WHERE ${filters.join(" AND ")}` : "";
221860
+ const result = await malloyConnection.runSQL(`SELECT CATALOG_NAME, SCHEMA_NAME, SCHEMA_OWNER FROM ${database ? `${database}.` : ""}INFORMATION_SCHEMA.SCHEMATA ${whereClause} ORDER BY SCHEMA_NAME`);
221861
+ const rows = standardizeRunSQLResult2(result);
221862
+ return rows.map((row) => {
221863
+ const typedRow = row;
221864
+ const catalogName = String(typedRow.CATALOG_NAME ?? typedRow.catalog_name ?? "");
221865
+ const schemaName = String(typedRow.SCHEMA_NAME ?? typedRow.schema_name ?? "");
221866
+ const owner = String(typedRow.SCHEMA_OWNER ?? typedRow.schema_owner ?? "");
221867
+ return {
221868
+ name: `${catalogName}.${schemaName}`,
221869
+ isHidden: ["SNOWFLAKE", ""].includes(owner) || schemaName === "INFORMATION_SCHEMA",
221870
+ isDefault: schema ? schemaName === schema : false
221871
+ };
221872
+ });
221873
+ } catch (error) {
221874
+ logger.error(`Error getting schemas for Snowflake connection ${connection.name}`, { error });
221875
+ throw new Error(`Failed to get schemas for Snowflake connection ${connection.name}: ${error.message}`);
221876
+ }
221877
+ }
221878
+ async function getSchemasForTrino(connection, malloyConnection) {
221879
+ if (!connection.trinoConnection) {
221880
+ throw new Error("Trino connection is required");
221881
+ }
221882
+ try {
221883
+ const configuredSchema = connection.trinoConnection.schema;
221884
+ let allRows = [];
221885
+ if (connection.trinoConnection.catalog) {
221886
+ const catalog = connection.trinoConnection.catalog;
221887
+ const result = await malloyConnection.runSQL(`SELECT schema_name FROM ${catalog}.information_schema.schemata ORDER BY schema_name`);
221882
221888
  const rows = standardizeRunSQLResult2(result);
221883
- return rows.map((row) => {
221884
- const typedRow = row;
221889
+ allRows = rows.map((row) => {
221890
+ const r = row;
221885
221891
  return {
221886
- name: typedRow.Schema,
221887
- isHidden: ["information_schema", "performance_schema"].includes(typedRow.Schema),
221888
- isDefault: typedRow.Schema === connection.trinoConnection?.schema
221892
+ catalog,
221893
+ schema: String(r.schema_name ?? r.Schema ?? "")
221889
221894
  };
221890
221895
  });
221891
- } catch (error) {
221892
- console.error(`Error getting schemas for Trino connection ${connection.name}:`, error);
221893
- throw new Error(`Failed to get schemas for Trino connection ${connection.name}: ${error.message}`);
221894
- }
221895
- } else if (connection.type === "duckdb") {
221896
- if (!connection.duckdbConnection) {
221897
- throw new Error("DuckDB connection is required");
221898
- }
221899
- try {
221900
- const result = await malloyConnection.runSQL("SELECT DISTINCT schema_name,catalog_name FROM information_schema.schemata ORDER BY catalog_name,schema_name", { rowLimit: 1000 });
221901
- const rows = standardizeRunSQLResult2(result);
221902
- const schemas = rows.map((row) => {
221903
- const typedRow = row;
221904
- const schemaName = typedRow.schema_name;
221905
- const catalogName = typedRow.catalog_name;
221906
- return {
221907
- name: `${catalogName}.${schemaName}`,
221908
- isHidden: [
221909
- "information_schema",
221910
- "performance_schema",
221911
- "",
221912
- "SNOWFLAKE",
221913
- "information_schema",
221914
- "pg_catalog",
221915
- "pg_toast"
221916
- ].includes(schemaName) || ["md_information_schema", "system"].includes(catalogName),
221917
- isDefault: catalogName === "main"
221918
- };
221896
+ } else {
221897
+ const catalogsResult = await malloyConnection.runSQL(`SHOW CATALOGS`);
221898
+ const catalogNames = standardizeRunSQLResult2(catalogsResult).map((row) => {
221899
+ const r = row;
221900
+ return String(r.Catalog ?? r.catalog ?? "");
221919
221901
  });
221920
- const attachedDatabases = connection.duckdbConnection.attachedDatabases || [];
221921
- const cloudDatabases = attachedDatabases.filter((attachedDb) => (attachedDb.type === "gcs" || attachedDb.type === "s3") && (attachedDb.gcsConnection || attachedDb.s3Connection));
221922
- const cloudDbPromises = cloudDatabases.map(async (attachedDb) => {
221923
- const dbType = attachedDb.type;
221924
- const credentials = dbType === "gcs" ? gcsConnectionToCredentials(attachedDb.gcsConnection) : s3ConnectionToCredentials(attachedDb.s3Connection);
221902
+ for (const catalog of catalogNames) {
221925
221903
  try {
221926
- return await listCloudDirectorySchemas(credentials);
221927
- } catch (cloudError) {
221928
- logger.warn(`Failed to list ${dbType.toUpperCase()} directory schemas for ${attachedDb.name}`, { error: cloudError });
221929
- return [];
221930
- }
221931
- });
221932
- const cloudSchemaArrays = await Promise.all(cloudDbPromises);
221933
- for (const cloudSchemas of cloudSchemaArrays) {
221934
- schemas.push(...cloudSchemas);
221935
- }
221936
- const azureDatabases = attachedDatabases.filter((attachedDb) => attachedDb.type === "azure" && attachedDb.azureConnection);
221937
- for (const attachedDb of azureDatabases) {
221938
- if (attachedDb.name) {
221939
- schemas.push({
221940
- name: attachedDb.name,
221941
- isHidden: false,
221942
- isDefault: false
221943
- });
221904
+ const result = await malloyConnection.runSQL(`SELECT schema_name FROM ${catalog}.information_schema.schemata ORDER BY schema_name`);
221905
+ const rows = standardizeRunSQLResult2(result);
221906
+ for (const row of rows) {
221907
+ const r = row;
221908
+ allRows.push({
221909
+ catalog,
221910
+ schema: String(r.schema_name ?? r.Schema ?? "")
221911
+ });
221912
+ }
221913
+ } catch (catalogError) {
221914
+ logger.warn(`Failed to list schemas for Trino catalog ${catalog}`, { error: catalogError });
221944
221915
  }
221945
221916
  }
221946
- return schemas;
221947
- } catch (error) {
221948
- console.error(`Error getting schemas for DuckDB connection ${connection.name}:`, error);
221949
- throw new Error(`Failed to get schemas for DuckDB connection ${connection.name}: ${error.message}`);
221950
- }
221951
- } else if (connection.type === "motherduck") {
221952
- if (!connection.motherduckConnection) {
221953
- throw new Error("MotherDuck connection is required");
221954
221917
  }
221955
- try {
221956
- const result = await malloyConnection.runSQL("SELECT DISTINCT schema_name as row FROM information_schema.schemata ORDER BY schema_name", { rowLimit: 1000 });
221957
- const rows = standardizeRunSQLResult2(result);
221958
- console.log(rows);
221959
- return rows.map((row) => {
221960
- const typedRow = row;
221961
- return {
221962
- name: typedRow.row,
221963
- isHidden: [
221964
- "information_schema",
221965
- "performance_schema",
221966
- ""
221967
- ].includes(typedRow.row),
221968
- isDefault: false
221969
- };
221970
- });
221971
- } catch (error) {
221972
- console.error(`Error getting schemas for MotherDuck connection ${connection.name}:`, error);
221973
- throw new Error(`Failed to get schemas for MotherDuck connection ${connection.name}: ${error.message}`);
221918
+ return allRows.map(({ catalog, schema }) => {
221919
+ const name = connection.trinoConnection?.catalog ? schema : `${catalog}.${schema}`;
221920
+ return {
221921
+ name,
221922
+ isHidden: ["information_schema", "performance_schema"].includes(schema),
221923
+ isDefault: configuredSchema ? schema === configuredSchema : false
221924
+ };
221925
+ });
221926
+ } catch (error) {
221927
+ logger.error(`Error getting schemas for Trino connection ${connection.name}`, { error });
221928
+ throw new Error(`Failed to get schemas for Trino connection ${connection.name}: ${error.message}`);
221929
+ }
221930
+ }
221931
+ async function getSchemasForDuckDB(connection, malloyConnection) {
221932
+ if (!connection.duckdbConnection) {
221933
+ throw new Error("DuckDB connection is required");
221934
+ }
221935
+ try {
221936
+ const result = await malloyConnection.runSQL("SELECT DISTINCT schema_name,catalog_name FROM information_schema.schemata ORDER BY catalog_name,schema_name", { rowLimit: 1000 });
221937
+ const rows = standardizeRunSQLResult2(result);
221938
+ const schemas = rows.map((row) => {
221939
+ const typedRow = row;
221940
+ const schemaName = String(typedRow.schema_name ?? "");
221941
+ const catalogName = String(typedRow.catalog_name ?? "");
221942
+ return {
221943
+ name: `${catalogName}.${schemaName}`,
221944
+ isHidden: [
221945
+ "information_schema",
221946
+ "performance_schema",
221947
+ "pg_catalog",
221948
+ "pg_toast",
221949
+ ""
221950
+ ].includes(schemaName) || ["md_information_schema", "system"].includes(catalogName),
221951
+ isDefault: catalogName === "main"
221952
+ };
221953
+ });
221954
+ const attachedDatabases = connection.duckdbConnection.attachedDatabases || [];
221955
+ const cloudDatabases = attachedDatabases.filter((attachedDb) => (attachedDb.type === "gcs" || attachedDb.type === "s3") && (attachedDb.gcsConnection || attachedDb.s3Connection));
221956
+ const cloudDbPromises = cloudDatabases.map(async (attachedDb) => {
221957
+ const dbType = attachedDb.type;
221958
+ const credentials = dbType === "gcs" ? gcsConnectionToCredentials(attachedDb.gcsConnection) : s3ConnectionToCredentials(attachedDb.s3Connection);
221959
+ try {
221960
+ return await listCloudDirectorySchemas(credentials);
221961
+ } catch (cloudError) {
221962
+ logger.warn(`Failed to list ${dbType.toUpperCase()} directory schemas for ${attachedDb.name}`, { error: cloudError });
221963
+ return [];
221964
+ }
221965
+ });
221966
+ const cloudSchemaArrays = await Promise.all(cloudDbPromises);
221967
+ for (const cloudSchemas of cloudSchemaArrays) {
221968
+ schemas.push(...cloudSchemas);
221974
221969
  }
221975
- } else if (connection.type === "ducklake") {
221976
- try {
221977
- const catalogName = connection.name;
221978
- const result = await malloyConnection.runSQL(`SELECT schema_name FROM information_schema.schemata WHERE catalog_name = '${catalogName}' ORDER BY schema_name`, { rowLimit: 1000 });
221979
- const rows = standardizeRunSQLResult2(result);
221980
- return rows.map((row) => {
221981
- const typedRow = row;
221982
- const schemaName = typedRow.schema_name;
221983
- const shouldShow = schemaName === "main" || schemaName === "public";
221984
- return {
221985
- name: schemaName,
221986
- isHidden: !shouldShow,
221970
+ const azureDatabases = attachedDatabases.filter((attachedDb) => attachedDb.type === "azure" && attachedDb.azureConnection);
221971
+ for (const attachedDb of azureDatabases) {
221972
+ if (attachedDb.name) {
221973
+ schemas.push({
221974
+ name: attachedDb.name,
221975
+ isHidden: false,
221987
221976
  isDefault: false
221988
- };
221989
- });
221990
- } catch (error) {
221991
- logger.error(`Error getting schemas for DuckLake connection ${connection.name}`, { error });
221992
- throw new Error(`Failed to get schemas for DuckLake connection ${connection.name}: ${error.message}`);
221977
+ });
221978
+ }
221993
221979
  }
221994
- } else {
221995
- throw new Error(`Unsupported connection type: ${connection.type}`);
221980
+ return schemas;
221981
+ } catch (error) {
221982
+ logger.error(`Error getting schemas for DuckDB connection ${connection.name}`, { error });
221983
+ throw new Error(`Failed to get schemas for DuckDB connection ${connection.name}: ${error.message}`);
221984
+ }
221985
+ }
221986
+ async function getSchemasForMotherDuck(connection, malloyConnection) {
221987
+ if (!connection.motherduckConnection) {
221988
+ throw new Error("MotherDuck connection is required");
221989
+ }
221990
+ try {
221991
+ const database = connection.motherduckConnection.database;
221992
+ const whereClause = database ? `WHERE catalog_name = '${database}'` : "";
221993
+ const result = await malloyConnection.runSQL(`SELECT DISTINCT schema_name FROM information_schema.schemata ${whereClause} ORDER BY schema_name`);
221994
+ const rows = standardizeRunSQLResult2(result);
221995
+ return rows.map((row) => {
221996
+ const typedRow = row;
221997
+ const schemaName = String(typedRow.schema_name ?? typedRow.SCHEMA_NAME ?? "");
221998
+ return {
221999
+ name: schemaName,
222000
+ isHidden: ["information_schema", "performance_schema", ""].includes(schemaName),
222001
+ isDefault: schemaName === "main"
222002
+ };
222003
+ });
222004
+ } catch (error) {
222005
+ logger.error(`Error getting schemas for MotherDuck connection ${connection.name}`, { error });
222006
+ throw new Error(`Failed to get schemas for MotherDuck connection ${connection.name}: ${error.message}`);
222007
+ }
222008
+ }
222009
+ async function getSchemasForDuckLake(connection, malloyConnection) {
222010
+ try {
222011
+ const catalogName = connection.name;
222012
+ const result = await malloyConnection.runSQL(`SELECT schema_name FROM information_schema.schemata WHERE catalog_name = '${catalogName}' ORDER BY schema_name`, { rowLimit: 1000 });
222013
+ const rows = standardizeRunSQLResult2(result);
222014
+ return rows.map((row) => {
222015
+ const typedRow = row;
222016
+ const schemaName = typedRow.schema_name;
222017
+ const shouldShow = schemaName === "main" || schemaName === "public";
222018
+ return {
222019
+ name: schemaName,
222020
+ isHidden: !shouldShow,
222021
+ isDefault: false
222022
+ };
222023
+ });
222024
+ } catch (error) {
222025
+ logger.error(`Error getting schemas for DuckLake connection ${connection.name}`, { error });
222026
+ throw new Error(`Failed to get schemas for DuckLake connection ${connection.name}: ${error.message}`);
222027
+ }
222028
+ }
222029
+ async function getSchemasForConnection(connection, malloyConnection) {
222030
+ switch (connection.type) {
222031
+ case "bigquery":
222032
+ return getSchemasForBigQuery(connection);
222033
+ case "postgres":
222034
+ return getSchemasForPostgres(connection, malloyConnection);
222035
+ case "mysql":
222036
+ return getSchemasForMySQL(connection);
222037
+ case "snowflake":
222038
+ return getSchemasForSnowflake(connection, malloyConnection);
222039
+ case "trino":
222040
+ return getSchemasForTrino(connection, malloyConnection);
222041
+ case "duckdb":
222042
+ return getSchemasForDuckDB(connection, malloyConnection);
222043
+ case "motherduck":
222044
+ return getSchemasForMotherDuck(connection, malloyConnection);
222045
+ case "ducklake":
222046
+ return getSchemasForDuckLake(connection, malloyConnection);
222047
+ default:
222048
+ throw new Error(`Unsupported connection type: ${connection.type}`);
221996
222049
  }
221997
222050
  }
221998
222051
  function getFileType2(key) {
@@ -222155,281 +222208,217 @@ async function describeAzureFile(malloyConnection, fileUri, azureConnection) {
222155
222208
  throw new Error(`Failed to describe Azure file: ${error instanceof Error ? error.message : String(error)}`);
222156
222209
  }
222157
222210
  }
222158
- async function getTablesForSchema(connection, schemaName, malloyConnection, fetchTableSchema = true) {
222159
- if (connection.type === "duckdb") {
222160
- const attachedDbs = connection.duckdbConnection?.attachedDatabases || [];
222161
- const azureDb = attachedDbs.find((db) => db.type === "azure" && db.name === schemaName && db.azureConnection);
222162
- if (azureDb) {
222163
- const azureConn = azureDb.azureConnection;
222164
- const fileUrl = azureConn.authType === "sas_token" ? azureConn.sasUrl : azureConn.fileUrl;
222165
- if (fileUrl) {
222166
- return await describeAzureFile(malloyConnection, fileUrl, azureConn);
222211
+ async function listTablesForSchema(connection, schemaName, malloyConnection, tableNames) {
222212
+ switch (connection.type) {
222213
+ case "bigquery":
222214
+ return listTablesForBigQuery(connection, schemaName, malloyConnection, tableNames);
222215
+ case "mysql":
222216
+ return listTablesForMySQL(connection, schemaName, malloyConnection, tableNames);
222217
+ case "postgres":
222218
+ return listTablesForPostgres(connection, schemaName, malloyConnection, tableNames);
222219
+ case "snowflake":
222220
+ return listTablesForSnowflake(connection, schemaName, malloyConnection, tableNames);
222221
+ case "trino":
222222
+ return listTablesForTrino(connection, schemaName, malloyConnection, tableNames);
222223
+ case "duckdb":
222224
+ return listTablesForDuckDB(connection, schemaName, malloyConnection, tableNames);
222225
+ case "motherduck":
222226
+ return listTablesForMotherDuck(connection, schemaName, malloyConnection, tableNames);
222227
+ case "ducklake":
222228
+ return listTablesForDuckLake(connection, schemaName, malloyConnection, tableNames);
222229
+ default:
222230
+ throw new Error(`Unsupported connection type: ${connection.type}`);
222231
+ }
222232
+ }
222233
+ async function listTablesForBigQuery(connection, schemaName, malloyConnection, tableNames) {
222234
+ try {
222235
+ const bigquery = createBigQueryClient(connection);
222236
+ const dataset = bigquery.dataset(schemaName);
222237
+ const [tables] = await dataset.getTables();
222238
+ let names = tables.map((table) => table.id).filter((id) => id !== undefined);
222239
+ if (tableNames) {
222240
+ const allowed = new Set(tableNames);
222241
+ names = names.filter((id) => allowed.has(id));
222242
+ }
222243
+ const results = await Promise.all(names.map(async (tableName) => {
222244
+ const tablePath = `${schemaName}.${tableName}`;
222245
+ try {
222246
+ const source = await malloyConnection.fetchTableSchema(tableName, tablePath);
222247
+ const columns = source?.fields?.map((field) => ({
222248
+ name: field.name,
222249
+ type: field.type
222250
+ })) || [];
222251
+ return { resource: tablePath, columns };
222252
+ } catch (error) {
222253
+ logger.warn(`Failed to get schema for table ${tableName}`, {
222254
+ error: extractErrorDataFromError(error),
222255
+ schemaName,
222256
+ tableName
222257
+ });
222258
+ return { resource: tablePath, columns: [] };
222167
222259
  }
222260
+ }));
222261
+ return results;
222262
+ } catch (error) {
222263
+ logger.error(`Error getting tables for BigQuery schema ${schemaName} in connection ${connection.name}`, { error });
222264
+ throw new Error(`Failed to get tables for BigQuery schema ${schemaName} in connection ${connection.name}: ${error.message}`);
222265
+ }
222266
+ }
222267
+ async function listTablesForMySQL(connection, schemaName, malloyConnection, tableNames) {
222268
+ if (!connection.mysqlConnection) {
222269
+ throw new Error("Mysql connection is required");
222270
+ }
222271
+ try {
222272
+ const result = await malloyConnection.runSQL(`SELECT TABLE_NAME, COLUMN_NAME, DATA_TYPE FROM information_schema.columns WHERE table_schema = '${schemaName}' ${sqlInFilter("TABLE_NAME", tableNames)} ORDER BY TABLE_NAME, ORDINAL_POSITION`);
222273
+ const rows = standardizeRunSQLResult2(result);
222274
+ return groupColumnRowsIntoTables(rows, (t) => `${schemaName}.${t}`);
222275
+ } catch (error) {
222276
+ logger.error(`Error getting tables for MySQL schema ${schemaName} in connection ${connection.name}`, { error });
222277
+ throw new Error(`Failed to get tables for MySQL schema ${schemaName} in connection ${connection.name}: ${error.message}`);
222278
+ }
222279
+ }
222280
+ async function listTablesForPostgres(connection, schemaName, malloyConnection, tableNames) {
222281
+ if (!connection.postgresConnection) {
222282
+ throw new Error("Postgres connection is required");
222283
+ }
222284
+ try {
222285
+ const result = await malloyConnection.runSQL(`SELECT table_name, column_name, data_type FROM information_schema.columns WHERE table_schema = '${schemaName}' ${sqlInFilter("table_name", tableNames)} ORDER BY table_name, ordinal_position`);
222286
+ const rows = standardizeRunSQLResult2(result);
222287
+ return groupColumnRowsIntoTables(rows, (t) => `${schemaName}.${t}`);
222288
+ } catch (error) {
222289
+ logger.error(`Error getting tables for Postgres schema ${schemaName} in connection ${connection.name}`, { error });
222290
+ throw new Error(`Failed to get tables for Postgres schema ${schemaName} in connection ${connection.name}: ${error.message}`);
222291
+ }
222292
+ }
222293
+ async function listTablesForSnowflake(connection, schemaName, malloyConnection, tableNames) {
222294
+ if (!connection.snowflakeConnection) {
222295
+ throw new Error("Snowflake connection is required");
222296
+ }
222297
+ try {
222298
+ const parts = schemaName.split(".");
222299
+ let databaseName;
222300
+ let schemaOnly;
222301
+ if (parts.length >= 2) {
222302
+ databaseName = parts[0];
222303
+ schemaOnly = parts[1];
222304
+ } else {
222305
+ databaseName = connection.snowflakeConnection.database ?? "";
222306
+ schemaOnly = parts[0];
222307
+ }
222308
+ if (!databaseName) {
222309
+ throw new Error(`Cannot resolve database for schema "${schemaName}": provide DATABASE.SCHEMA or configure a database on the connection`);
222168
222310
  }
222311
+ const qualifiedSchema = `${databaseName}.${schemaOnly}`;
222312
+ const result = await malloyConnection.runSQL(`SELECT TABLE_NAME, COLUMN_NAME, DATA_TYPE FROM ${databaseName}.INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA = '${schemaOnly}' ${sqlInFilter("TABLE_NAME", tableNames)} ORDER BY TABLE_NAME, ORDINAL_POSITION`);
222313
+ const rows = standardizeRunSQLResult2(result);
222314
+ return groupColumnRowsIntoTables(rows, (t) => `${qualifiedSchema}.${t}`);
222315
+ } catch (error) {
222316
+ logger.error(`Error getting tables for Snowflake schema ${schemaName} in connection ${connection.name}`, { error });
222317
+ throw new Error(`Failed to get tables for Snowflake schema ${schemaName} in connection ${connection.name}: ${error.message}`);
222169
222318
  }
222170
- if (connection.type === "duckdb" && (schemaName.startsWith("abfss://") || schemaName.startsWith("https://") || schemaName.startsWith("az://"))) {
222171
- return await describeAzureFile(malloyConnection, schemaName);
222319
+ }
222320
+ async function listTablesForTrino(connection, schemaName, malloyConnection, tableNames) {
222321
+ if (!connection.trinoConnection) {
222322
+ throw new Error("Trino connection is required");
222323
+ }
222324
+ try {
222325
+ let catalogPrefix;
222326
+ let schemaOnly;
222327
+ let resourcePrefix;
222328
+ if (connection.trinoConnection.catalog) {
222329
+ catalogPrefix = `${connection.trinoConnection.catalog}.`;
222330
+ schemaOnly = schemaName;
222331
+ resourcePrefix = `${connection.trinoConnection.catalog}.${schemaName}`;
222332
+ } else {
222333
+ const dotIdx = schemaName.indexOf(".");
222334
+ if (dotIdx > 0) {
222335
+ catalogPrefix = `${schemaName.substring(0, dotIdx)}.`;
222336
+ schemaOnly = schemaName.substring(dotIdx + 1);
222337
+ } else {
222338
+ catalogPrefix = "";
222339
+ schemaOnly = schemaName;
222340
+ }
222341
+ resourcePrefix = schemaName;
222342
+ }
222343
+ const result = await malloyConnection.runSQL(`SELECT table_name, column_name, data_type FROM ${catalogPrefix}information_schema.columns WHERE table_schema = '${schemaOnly}' ${sqlInFilter("table_name", tableNames)} ORDER BY table_name, ordinal_position`);
222344
+ const rows = standardizeRunSQLResult2(result);
222345
+ return groupColumnRowsIntoTables(rows, (t) => `${resourcePrefix}.${t}`);
222346
+ } catch (error) {
222347
+ logger.error(`Error getting tables for Trino schema ${schemaName} in connection ${connection.name}`, { error });
222348
+ throw new Error(`Failed to get tables for Trino schema ${schemaName} in connection ${connection.name}: ${error.message}`);
222349
+ }
222350
+ }
222351
+ async function listTablesForDuckDB(connection, schemaName, malloyConnection, tableNames) {
222352
+ if (!connection.duckdbConnection) {
222353
+ throw new Error("DuckDB connection is required");
222354
+ }
222355
+ const attachedDbs = connection.duckdbConnection.attachedDatabases || [];
222356
+ const azureDb = attachedDbs.find((db) => db.type === "azure" && db.name === schemaName && db.azureConnection);
222357
+ if (azureDb) {
222358
+ const azureConn = azureDb.azureConnection;
222359
+ const fileUrl = azureConn.authType === "sas_token" ? azureConn.sasUrl : azureConn.fileUrl;
222360
+ if (fileUrl) {
222361
+ return describeAzureFile(malloyConnection, fileUrl, azureConn);
222362
+ }
222363
+ }
222364
+ if (schemaName.startsWith("abfss://") || schemaName.startsWith("https://") || schemaName.startsWith("az://")) {
222365
+ return describeAzureFile(malloyConnection, schemaName);
222172
222366
  }
222173
222367
  const parsedUri = parseCloudUri(schemaName);
222174
- if (parsedUri && connection.type === "duckdb") {
222368
+ if (parsedUri) {
222175
222369
  const {
222176
222370
  type: cloudType,
222177
222371
  bucket: bucketName,
222178
222372
  path: directoryPath
222179
222373
  } = parsedUri;
222180
- const attachedDatabases = connection.duckdbConnection?.attachedDatabases || [];
222181
- const credentials = getCloudCredentialsFromAttachedDatabases(attachedDatabases, cloudType);
222374
+ const credentials = getCloudCredentialsFromAttachedDatabases(attachedDbs, cloudType);
222182
222375
  if (!credentials) {
222183
222376
  throw new Error(`${cloudType.toUpperCase()} credentials not found in attached databases`);
222184
222377
  }
222185
222378
  const fileKeys = await listDataFilesInDirectory(credentials, bucketName, directoryPath);
222186
- return await getCloudTablesWithColumns(malloyConnection, credentials, bucketName, fileKeys);
222187
- } else if (connection.type === "ducklake") {
222188
- if (schemaName.split(".").length == 2) {
222189
- schemaName = `${connection.name}.${schemaName}`;
222190
- } else if (schemaName.split(".").length === 1) {
222191
- schemaName = `${connection.name}.${schemaName}`;
222192
- }
222379
+ return getCloudTablesWithColumns(malloyConnection, credentials, bucketName, fileKeys);
222380
+ }
222381
+ const dotIdx = schemaName.indexOf(".");
222382
+ if (dotIdx < 0) {
222383
+ throw new Error(`DuckDB schema name must be qualified as "catalog.schema", got "${schemaName}"`);
222384
+ }
222385
+ const catalogName = schemaName.substring(0, dotIdx);
222386
+ const actualSchemaName = schemaName.substring(dotIdx + 1);
222387
+ try {
222388
+ const result = await malloyConnection.runSQL(`SELECT table_name, column_name, data_type FROM information_schema.columns WHERE table_schema = '${actualSchemaName}' AND table_catalog = '${catalogName}' ${sqlInFilter("table_name", tableNames)} ORDER BY table_name, ordinal_position`);
222389
+ const rows = standardizeRunSQLResult2(result);
222390
+ return groupColumnRowsIntoTables(rows, (t) => `${schemaName}.${t}`);
222391
+ } catch (error) {
222392
+ logger.error(`Error getting tables for DuckDB schema ${schemaName} in connection ${connection.name}`, { error });
222393
+ throw new Error(`Failed to get tables for DuckDB schema ${schemaName} in connection ${connection.name}: ${error.message}`);
222193
222394
  }
222194
- const tableNames = await listTablesForSchema(connection, schemaName, malloyConnection);
222195
- const tableSourcePromises = tableNames.map(async (tableName) => {
222196
- try {
222197
- let tablePath;
222198
- if (connection.type === "trino") {
222199
- if (connection.trinoConnection?.catalog) {
222200
- tablePath = `${connection.trinoConnection?.catalog}.${schemaName}.${tableName}`;
222201
- } else {
222202
- tablePath = `${schemaName}.${tableName}`;
222203
- }
222204
- } else if (connection.type === "ducklake") {
222205
- tablePath = `${schemaName}.${tableName}`;
222206
- } else {
222207
- tablePath = `${schemaName}.${tableName}`;
222208
- }
222209
- logger.info(`Processing table: ${tableName} in schema: ${schemaName}`, { tablePath, connectionType: connection.type });
222210
- let tableSource;
222211
- if (fetchTableSchema) {
222212
- tableSource = await getConnectionTableSource(malloyConnection, tableName, tablePath);
222213
- }
222214
- return {
222215
- resource: tablePath,
222216
- columns: tableSource?.columns || []
222217
- };
222218
- } catch (error) {
222219
- logger.warn(`Failed to get schema for table ${tableName}`, {
222220
- error: extractErrorDataFromError(error),
222221
- schemaName,
222222
- tableName
222223
- });
222224
- return {
222225
- resource: `${schemaName}.${tableName}`,
222226
- columns: []
222227
- };
222228
- }
222229
- });
222230
- const tableResults = await Promise.all(tableSourcePromises);
222231
- return tableResults;
222232
222395
  }
222233
- async function getConnectionTableSource(malloyConnection, tableKey, tablePath) {
222396
+ async function listTablesForMotherDuck(connection, schemaName, malloyConnection, tableNames) {
222397
+ if (!connection.motherduckConnection) {
222398
+ throw new Error("MotherDuck connection is required");
222399
+ }
222234
222400
  try {
222235
- logger.info(`Attempting to fetch table schema for: ${tablePath}`, {
222236
- tableKey,
222237
- tablePath
222238
- });
222239
- const source = await malloyConnection.fetchTableSchema(tableKey, tablePath);
222240
- if (source === undefined) {
222241
- throw new ConnectionError(`Table ${tablePath} not found: ${JSON.stringify(source)}`);
222242
- }
222243
- if (!source) {
222244
- throw new ConnectionError(`Invalid table source returned for ${tablePath}`);
222245
- } else if (typeof source !== "object") {
222246
- throw new ConnectionError(JSON.stringify(source));
222247
- }
222248
- const malloyFields = source.fields;
222249
- if (!malloyFields || !Array.isArray(malloyFields)) {
222250
- throw new ConnectionError(`Table ${tablePath} has no fields or invalid field structure`);
222251
- }
222252
- if (malloyFields.length === 0) {
222253
- throw new ConnectionError(`Table ${tablePath} not found`);
222254
- }
222255
- const fields = malloyFields.map((field) => {
222256
- return {
222257
- name: field.name,
222258
- type: field.type
222259
- };
222260
- });
222261
- logger.debug(`Successfully fetched schema for ${tablePath}`, {
222262
- fieldCount: fields.length
222263
- });
222264
- return {
222265
- source: JSON.stringify(source),
222266
- resource: tablePath,
222267
- columns: fields
222268
- };
222401
+ const result = await malloyConnection.runSQL(`SELECT table_name, column_name, data_type FROM information_schema.columns WHERE table_schema = '${schemaName}' ${sqlInFilter("table_name", tableNames)} ORDER BY table_name, ordinal_position`);
222402
+ const rows = standardizeRunSQLResult2(result);
222403
+ return groupColumnRowsIntoTables(rows, (t) => `${schemaName}.${t}`);
222269
222404
  } catch (error) {
222270
- const errorMessage = error instanceof Error ? error.message : typeof error === "string" ? error : JSON.stringify(error);
222271
- logger.error("fetchTableSchema error", {
222272
- error,
222273
- tableKey,
222274
- tablePath
222275
- });
222276
- throw new ConnectionError(errorMessage);
222405
+ logger.error(`Error getting tables for MotherDuck schema ${schemaName} in connection ${connection.name}`, { error });
222406
+ throw new Error(`Failed to get tables for MotherDuck schema ${schemaName} in connection ${connection.name}: ${error.message}`);
222277
222407
  }
222278
222408
  }
222279
- async function listTablesForSchema(connection, schemaName, malloyConnection) {
222280
- if (connection.type === "bigquery") {
222281
- try {
222282
- const bigquery = createBigQueryClient(connection);
222283
- const dataset = bigquery.dataset(schemaName);
222284
- const [tables] = await dataset.getTables();
222285
- return tables.map((table) => table.id).filter((id) => id !== undefined);
222286
- } catch (error) {
222287
- logger.error(`Error getting tables for BigQuery schema ${schemaName} in connection ${connection.name}`, { error });
222288
- throw new Error(`Failed to get tables for BigQuery schema ${schemaName} in connection ${connection.name}: ${error.message}`);
222289
- }
222290
- } else if (connection.type === "mysql") {
222291
- if (!connection.mysqlConnection) {
222292
- throw new Error("Mysql connection is required");
222293
- }
222294
- try {
222295
- const result = await malloyConnection.runSQL(`SELECT TABLE_NAME FROM information_schema.tables WHERE table_schema = '${schemaName}' AND table_type = 'BASE TABLE'`);
222296
- const rows = standardizeRunSQLResult2(result);
222297
- return rows.map((row) => {
222298
- const typedRow = row;
222299
- return typedRow.TABLE_NAME;
222300
- });
222301
- } catch (error) {
222302
- logger.error(`Error getting tables for MySQL schema ${schemaName} in connection ${connection.name}`, { error });
222303
- throw new Error(`Failed to get tables for MySQL schema ${schemaName} in connection ${connection.name}: ${error.message}`);
222304
- }
222305
- } else if (connection.type === "postgres") {
222306
- if (!connection.postgresConnection) {
222307
- throw new Error("Postgres connection is required");
222308
- }
222309
- try {
222310
- const result = await malloyConnection.runSQL(`SELECT table_name as row FROM information_schema.tables WHERE table_schema = '${schemaName}' ORDER BY table_name`);
222311
- const rows = standardizeRunSQLResult2(result);
222312
- return rows;
222313
- } catch (error) {
222314
- logger.error(`Error getting tables for Postgres schema ${schemaName} in connection ${connection.name}`, { error });
222315
- throw new Error(`Failed to get tables for Postgres schema ${schemaName} in connection ${connection.name}: ${error.message}`);
222316
- }
222317
- } else if (connection.type === "snowflake") {
222318
- if (!connection.snowflakeConnection) {
222319
- throw new Error("Snowflake connection is required");
222320
- }
222321
- try {
222322
- const tablesResult = await malloyConnection.runSQL(`SHOW TABLES IN SCHEMA ${schemaName} LIMIT 1000`);
222323
- const viewsResult = await malloyConnection.runSQL(`SHOW VIEWS IN SCHEMA ${schemaName} LIMIT 1000`);
222324
- const tableRows = standardizeRunSQLResult2(tablesResult);
222325
- const viewRows = standardizeRunSQLResult2(viewsResult);
222326
- logger.debug("Snowflake Tables Listed", { tableRows });
222327
- logger.debug("Snowflake Views Listed", { viewRows });
222328
- const rows = [...tableRows, ...viewRows];
222329
- return rows.map((row) => {
222330
- const typedRow = row;
222331
- const name = typedRow.name ?? typedRow.NAME;
222332
- return typeof name === "string" ? name : String(name);
222333
- }).filter((id) => id.length > 0);
222334
- } catch (error) {
222335
- logger.error(`Error getting tables for Snowflake schema ${schemaName} in connection ${connection.name}`, { error });
222336
- throw new Error(`Failed to get tables for Snowflake schema ${schemaName} in connection ${connection.name}: ${error.message}`);
222337
- }
222338
- } else if (connection.type === "trino") {
222339
- if (!connection.trinoConnection) {
222340
- throw new Error("Trino connection is required");
222341
- }
222342
- try {
222343
- let result;
222344
- if (connection.trinoConnection?.catalog) {
222345
- result = await malloyConnection.runSQL(`SHOW TABLES FROM ${connection.trinoConnection.catalog}.${schemaName}`);
222346
- } else {
222347
- result = await malloyConnection.runSQL(`SHOW TABLES FROM ${schemaName}`);
222348
- }
222349
- const rows = standardizeRunSQLResult2(result);
222350
- return rows.map((row) => {
222351
- const typedRow = row;
222352
- return typedRow.Table;
222353
- });
222354
- } catch (error) {
222355
- logger.error(`Error getting tables for Trino schema ${schemaName} in connection ${connection.name}`, { error });
222356
- throw new Error(`Failed to get tables for Trino schema ${schemaName} in connection ${connection.name}: ${error.message}`);
222357
- }
222358
- } else if (connection.type === "duckdb") {
222359
- if (!connection.duckdbConnection) {
222360
- throw new Error("DuckDB connection is required");
222361
- }
222362
- const parsedUri = parseCloudUri(schemaName);
222363
- if (parsedUri) {
222364
- const {
222365
- type: cloudType,
222366
- bucket: bucketName,
222367
- path: directoryPath
222368
- } = parsedUri;
222369
- const attachedDatabases = connection.duckdbConnection.attachedDatabases || [];
222370
- const credentials = getCloudCredentialsFromAttachedDatabases(attachedDatabases, cloudType);
222371
- if (!credentials) {
222372
- throw new Error(`${cloudType.toUpperCase()} credentials not found in attached databases`);
222373
- }
222374
- try {
222375
- const fileKeys = await listDataFilesInDirectory(credentials, bucketName, directoryPath);
222376
- return fileKeys.map((key) => {
222377
- const lastSlash = key.lastIndexOf("/");
222378
- return lastSlash > 0 ? key.substring(lastSlash + 1) : key;
222379
- });
222380
- } catch (error) {
222381
- logger.error(`Error listing ${cloudType.toUpperCase()} objects in ${schemaName}`, {
222382
- error
222383
- });
222384
- throw new Error(`Failed to list files in ${schemaName}: ${error.message}`);
222385
- }
222386
- }
222387
- const catalogName = schemaName.split(".")[0];
222388
- const actualSchemaName = schemaName.split(".")[1];
222389
- try {
222390
- const result = await malloyConnection.runSQL(`SELECT table_name FROM information_schema.tables WHERE table_schema = '${actualSchemaName}' and table_catalog = '${catalogName}' ORDER BY table_name`, { rowLimit: 1000 });
222391
- const rows = standardizeRunSQLResult2(result);
222392
- return rows.map((row) => {
222393
- const typedRow = row;
222394
- return typedRow.table_name;
222395
- });
222396
- } catch (error) {
222397
- logger.error(`Error getting tables for DuckDB schema ${schemaName} in connection ${connection.name}`, { error });
222398
- throw new Error(`Failed to get tables for DuckDB schema ${schemaName} in connection ${connection.name}: ${error.message}`);
222399
- }
222400
- } else if (connection.type === "motherduck") {
222401
- if (!connection.motherduckConnection) {
222402
- throw new Error("MotherDuck connection is required");
222403
- }
222404
- try {
222405
- const result = await malloyConnection.runSQL(`SELECT table_name as row FROM information_schema.tables WHERE table_schema = '${schemaName}' ORDER BY table_name`, { rowLimit: 1000 });
222406
- const rows = standardizeRunSQLResult2(result);
222407
- return rows.map((row) => {
222408
- const typedRow = row;
222409
- return typedRow.row;
222410
- });
222411
- } catch (error) {
222412
- logger.error(`Error getting tables for MotherDuck schema ${schemaName} in connection ${connection.name}`, { error });
222413
- throw new Error(`Failed to get tables for MotherDuck schema ${schemaName} in connection ${connection.name}: ${error.message}`);
222414
- }
222415
- } else if (connection.type === "ducklake") {
222416
- const catalogName = schemaName.split(".")[0];
222417
- const actualSchemaName = schemaName.split(".")[1];
222418
- console.error("catalogName", catalogName);
222419
- console.error("actualSchemaName", actualSchemaName);
222420
- try {
222421
- const result = await malloyConnection.runSQL(`SELECT table_name FROM information_schema.tables WHERE table_schema = '${actualSchemaName}' AND table_catalog = '${catalogName}' ORDER BY table_name`, { rowLimit: 1000 });
222422
- const rows = standardizeRunSQLResult2(result);
222423
- return rows.map((row) => {
222424
- const typedRow = row;
222425
- return typedRow.table_name;
222426
- });
222427
- } catch (error) {
222428
- logger.error(`Error getting tables for DuckLake schema ${schemaName} in connection ${connection.name}`, { error });
222429
- throw new Error(`Failed to get tables for DuckLake schema ${schemaName} in connection ${connection.name}: ${error.message}`);
222430
- }
222431
- } else {
222432
- throw new Error(`Unsupported connection type: ${connection.type}`);
222409
+ async function listTablesForDuckLake(connection, schemaName, malloyConnection, tableNames) {
222410
+ if (!schemaName.includes(".")) {
222411
+ schemaName = `${connection.name}.${schemaName}`;
222412
+ }
222413
+ const catalogName = schemaName.split(".")[0];
222414
+ const actualSchemaName = schemaName.split(".")[1];
222415
+ try {
222416
+ const result = await malloyConnection.runSQL(`SELECT table_name, column_name, data_type FROM information_schema.columns WHERE table_schema = '${actualSchemaName}' AND table_catalog = '${catalogName}' ${sqlInFilter("table_name", tableNames)} ORDER BY table_name, ordinal_position`);
222417
+ const rows = standardizeRunSQLResult2(result);
222418
+ return groupColumnRowsIntoTables(rows, (t) => `${schemaName}.${t}`);
222419
+ } catch (error) {
222420
+ logger.error(`Error getting tables for DuckLake schema ${schemaName} in connection ${connection.name}`, { error });
222421
+ throw new Error(`Failed to get tables for DuckLake schema ${schemaName} in connection ${connection.name}: ${error.message}`);
222433
222422
  }
222434
222423
  }
222435
222424
  function extractErrorDataFromError(error) {
@@ -222455,23 +222444,6 @@ var AZURE_DATA_EXTENSIONS = [
222455
222444
  ".jsonl",
222456
222445
  ".ndjson"
222457
222446
  ];
222458
- function parseFetchTableSchemaQueryParam(raw) {
222459
- if (raw === undefined || raw === null) {
222460
- return true;
222461
- }
222462
- const v = Array.isArray(raw) ? raw[0] : raw;
222463
- if (v === "" || v === undefined) {
222464
- return true;
222465
- }
222466
- if (typeof v === "boolean") {
222467
- return v;
222468
- }
222469
- const s = String(v).trim().toLowerCase();
222470
- if (s === "false" || s === "0") {
222471
- return false;
222472
- }
222473
- return true;
222474
- }
222475
222447
  function validateAzureUrl(url2, fieldName) {
222476
222448
  if (!AZURE_SUPPORTED_SCHEMES.some((s) => url2.startsWith(s))) {
222477
222449
  throw new BadRequestError(`Azure ${fieldName} must use one of: ${AZURE_SUPPORTED_SCHEMES.join(", ")}`);
@@ -222535,6 +222507,30 @@ class ConnectionController {
222535
222507
  return project.getMalloyConnection(connectionName);
222536
222508
  }
222537
222509
  }
222510
+ async fetchTable(malloyConnection, tableKey, tablePath) {
222511
+ try {
222512
+ const source = await malloyConnection.fetchTableSchema(tableKey, tablePath);
222513
+ if (!source) {
222514
+ throw new ConnectionError(`Table ${tablePath} not found`);
222515
+ }
222516
+ return {
222517
+ source: JSON.stringify(source),
222518
+ resource: tablePath,
222519
+ columns: (source.fields || []).map((f) => ({
222520
+ name: f.name,
222521
+ type: f.type
222522
+ }))
222523
+ };
222524
+ } catch (error) {
222525
+ const errorMessage = error instanceof Error ? error.message : typeof error === "string" ? error : JSON.stringify(error);
222526
+ logger.error("fetchTableSchema error", {
222527
+ error,
222528
+ tableKey,
222529
+ tablePath
222530
+ });
222531
+ throw new ConnectionError(errorMessage);
222532
+ }
222533
+ }
222538
222534
  async getConnection(projectName, connectionName) {
222539
222535
  if (!projectName || !connectionName) {
222540
222536
  throw new BadRequestError("Connection payload is required");
@@ -222552,11 +222548,11 @@ class ConnectionController {
222552
222548
  const malloyConnection = await this.getMalloyConnection(projectName, connectionName);
222553
222549
  return getSchemasForConnection(connection, malloyConnection);
222554
222550
  }
222555
- async listTables(projectName, connectionName, schemaName, fetchTableSchema = true) {
222551
+ async listTables(projectName, connectionName, schemaName, tableNames) {
222556
222552
  const project = await this.projectStore.getProject(projectName, false);
222557
222553
  const connection = project.getApiConnection(connectionName);
222558
222554
  const malloyConnection = await this.getMalloyConnection(projectName, connectionName);
222559
- return getTablesForSchema(connection, schemaName, malloyConnection, fetchTableSchema);
222555
+ return listTablesForSchema(connection, schemaName, malloyConnection, tableNames);
222560
222556
  }
222561
222557
  async getConnectionSqlSource(projectName, connectionName, sqlStatement) {
222562
222558
  const malloyConnection = await this.getMalloyConnection(projectName, connectionName);
@@ -222571,10 +222567,6 @@ class ConnectionController {
222571
222567
  throw new ConnectionError(error.message);
222572
222568
  }
222573
222569
  }
222574
- async getConnectionTableSource(projectName, connectionName, tableKey, tablePath) {
222575
- const malloyConnection = await this.getMalloyConnection(projectName, connectionName);
222576
- return getConnectionTableSource(malloyConnection, tableKey, tablePath);
222577
- }
222578
222570
  async getTable(projectName, connectionName, schemaName, tablePath) {
222579
222571
  const malloyConnection = await this.getMalloyConnection(projectName, connectionName);
222580
222572
  const project = await this.projectStore.getProject(projectName, false);
@@ -222599,12 +222591,8 @@ class ConnectionController {
222599
222591
  const queryString = urlParts[1] ? `?${urlParts[1]}` : "";
222600
222592
  const dirPath = basePath.substring(0, basePath.lastIndexOf("/") + 1);
222601
222593
  const fullFileUrl = `${dirPath}${fileName}${queryString}`;
222602
- const tableSource2 = await getConnectionTableSource(malloyConnection, fileName, fullFileUrl);
222603
- return {
222604
- resource: tablePath,
222605
- columns: tableSource2.columns,
222606
- source: tableSource2.source
222607
- };
222594
+ const table = await this.fetchTable(malloyConnection, fileName, fullFileUrl);
222595
+ return { ...table, resource: tablePath };
222608
222596
  }
222609
222597
  }
222610
222598
  }
@@ -222612,12 +222600,7 @@ class ConnectionController {
222612
222600
  if (!tableKey) {
222613
222601
  throw new Error(`Invalid tablePath: ${tablePath}`);
222614
222602
  }
222615
- const tableSource = await getConnectionTableSource(malloyConnection, tableKey, tablePath);
222616
- return {
222617
- resource: tablePath,
222618
- columns: tableSource.columns,
222619
- source: tableSource.source
222620
- };
222603
+ return this.fetchTable(malloyConnection, tableKey, tablePath);
222621
222604
  }
222622
222605
  async getConnectionQueryData(projectName, connectionName, sqlStatement, options) {
222623
222606
  const malloyConnection = await this.getMalloyConnection(projectName, connectionName);
@@ -230423,7 +230406,7 @@ class Project {
230423
230406
  return this;
230424
230407
  }
230425
230408
  static async create(projectName, projectPath, connections) {
230426
- if (!(await fs6.promises.stat(projectPath)).isDirectory()) {
230409
+ if (!(await fs6.promises.stat(projectPath))?.isDirectory()) {
230427
230410
  throw new ProjectNotFoundError(`Project path ${projectPath} not found`);
230428
230411
  }
230429
230412
  logger.info(`Creating project with connection configuration`);
@@ -230575,7 +230558,7 @@ ${source}` : source;
230575
230558
  }
230576
230559
  async addPackage(packageName) {
230577
230560
  const packagePath = path7.join(this.projectPath, packageName);
230578
- if (!await fs6.promises.access(packagePath).then(() => true).catch(() => false) || !(await fs6.promises.stat(packagePath)).isDirectory()) {
230561
+ if (!await fs6.promises.access(packagePath).then(() => true).catch(() => false) || !(await fs6.promises.stat(packagePath))?.isDirectory()) {
230579
230562
  throw new PackageNotFoundError(`Package ${packageName} not found`);
230580
230563
  }
230581
230564
  logger.info(`Adding package ${packageName} to project ${this.projectName}`, {
@@ -231070,18 +231053,7 @@ class ProjectStore {
231070
231053
  }
231071
231054
  async cleanupAndCreatePublisherPath() {
231072
231055
  const reInit = process.env.INITIALIZE_STORAGE === "true";
231073
- try {
231074
- const stats = await fs7.promises.stat(this.serverRootPath);
231075
- if (!stats.isDirectory()) {
231076
- throw new Error(`Server root path ${this.serverRootPath} exists but is not a directory`);
231077
- }
231078
- } catch (error) {
231079
- if (error.code === "ENOENT") {
231080
- await fs7.promises.mkdir(this.serverRootPath, { recursive: true });
231081
- } else {
231082
- throw error;
231083
- }
231084
- }
231056
+ await fs7.promises.mkdir(this.serverRootPath, { recursive: true });
231085
231057
  if (reInit) {
231086
231058
  const uploadDocsPath2 = path8.join(this.serverRootPath, PUBLISHER_DATA_DIR);
231087
231059
  logger.info(`Reinitialization mode: Cleaning up upload documents path ${uploadDocsPath2}`);
@@ -231491,7 +231463,7 @@ class ProjectStore {
231491
231463
  if (projectPath.endsWith(".zip")) {
231492
231464
  projectPath = await this.unzipProject(projectPath);
231493
231465
  }
231494
- const projectDirExists = (await fs7.promises.stat(projectPath)).isDirectory();
231466
+ const projectDirExists = (await fs7.promises.stat(projectPath))?.isDirectory() ?? false;
231495
231467
  if (projectDirExists) {
231496
231468
  await fs7.promises.rm(absoluteTargetPath, {
231497
231469
  recursive: true,
@@ -235629,6 +235601,13 @@ function initializeMcpServer(projectStore) {
235629
235601
 
235630
235602
  // src/server.ts
235631
235603
  var __filename = "/home/runner/work/publisher/publisher/packages/server/src/server.ts";
235604
+ function normalizeQueryArray(value) {
235605
+ if (value === undefined || value === null)
235606
+ return;
235607
+ if (Array.isArray(value))
235608
+ return value.map(String);
235609
+ return [String(value)];
235610
+ }
235632
235611
  function parseArgs() {
235633
235612
  const args = process.argv.slice(2);
235634
235613
  for (let i = 0;i < args.length; i++) {
@@ -235922,7 +235901,7 @@ app.get(`${API_PREFIX2}/projects/:projectName/connections/:connectionName/schema
235922
235901
  app.get(`${API_PREFIX2}/projects/:projectName/connections/:connectionName/schemas/:schemaName/tables`, async (req, res) => {
235923
235902
  logger.info("req.params", { params: req.params });
235924
235903
  try {
235925
- const results = await connectionController.listTables(req.params.projectName, req.params.connectionName, req.params.schemaName, parseFetchTableSchemaQueryParam(req.query.fetchTableSchema));
235904
+ const results = await connectionController.listTables(req.params.projectName, req.params.connectionName, req.params.schemaName, normalizeQueryArray(req.query.tableNames));
235926
235905
  res.status(200).json(results);
235927
235906
  } catch (error) {
235928
235907
  logger.error(error);
@@ -235959,15 +235938,6 @@ app.post(`${API_PREFIX2}/projects/:projectName/connections/:connectionName/sqlSo
235959
235938
  res.status(status).json(json2);
235960
235939
  }
235961
235940
  });
235962
- app.get(`${API_PREFIX2}/projects/:projectName/connections/:connectionName/tableSource`, async (req, res) => {
235963
- try {
235964
- res.status(200).json(await connectionController.getConnectionTableSource(req.params.projectName, req.params.connectionName, req.query.tableKey, req.query.tablePath));
235965
- } catch (error) {
235966
- logger.error(error);
235967
- const { json: json2, status } = internalErrorToHttpError(error);
235968
- res.status(status).json(json2);
235969
- }
235970
- });
235971
235941
  app.get(`${API_PREFIX2}/projects/:projectName/connections/:connectionName/queryData`, async (req, res) => {
235972
235942
  try {
235973
235943
  res.status(200).json(await connectionController.getConnectionQueryData(req.params.projectName, req.params.connectionName, req.query.sqlStatement, req.query.options));