@malloy-publisher/server 0.0.176 → 0.0.178
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/app/api-doc.yaml +7 -58
- package/dist/app/assets/{HomePage-_GXfGtrF.js → HomePage-CwUkFsA8.js} +1 -1
- package/dist/app/assets/{MainPage-UqGzQjP1.js → MainPage-JYvDXOkC.js} +1 -1
- package/dist/app/assets/{ModelPage-CnwgkZU6.js → ModelPage-TEQrhaqq.js} +1 -1
- package/dist/app/assets/{PackagePage-CqmgyERX.js → PackagePage-CgE-izLw.js} +1 -1
- package/dist/app/assets/{ProjectPage-5jrEp4pr.js → ProjectPage-PiMPpFX8.js} +1 -1
- package/dist/app/assets/{RouteError-CjrNNYGo.js → RouteError-DnSZEzkT.js} +1 -1
- package/dist/app/assets/{WorkbookPage-Cy2UHLkL.js → WorkbookPage-DjQ8u5DD.js} +1 -1
- package/dist/app/assets/{index-DfOHc8G8.js → index--80Q7qw1.js} +1 -1
- package/dist/app/assets/{index-ZpoRvcIi.js → index-BJUsHnGO.js} +40 -40
- package/dist/app/assets/{index-_xQHc-7Q.js → index-CZ4G_NMp.js} +1 -1
- package/dist/app/assets/{index.umd-Bq6CbCSn.js → index.umd-Cf-wqh-R.js} +1 -1
- package/dist/app/index.html +1 -1
- package/dist/server.js +518 -545
- package/package.json +1 -1
- package/src/controller/connection.controller.ts +56 -60
- package/src/server.ts +10 -25
- package/src/service/connection.ts +3 -0
- package/src/service/db_utils.spec.ts +712 -0
- package/src/service/db_utils.ts +786 -755
- package/src/service/project.ts +2 -2
- package/src/service/project_store.ts +6 -19
- package/tests/unit/ducklake/ducklake.test.ts +3 -3
package/dist/server.js
CHANGED
|
@@ -212003,6 +212003,7 @@ function httpMetricsMiddleware(req, res, next) {
|
|
|
212003
212003
|
// src/server.ts
|
|
212004
212004
|
var exports_server = {};
|
|
212005
212005
|
__export(exports_server, {
|
|
212006
|
+
normalizeQueryArray: () => normalizeQueryArray,
|
|
212006
212007
|
mcpApp: () => mcpApp
|
|
212007
212008
|
});
|
|
212008
212009
|
module.exports = __toCommonJS(exports_server);
|
|
@@ -221170,7 +221171,10 @@ async function createProjectConnections(connections = [], projectPath = "", isUp
|
|
|
221170
221171
|
},
|
|
221171
221172
|
poolOptions: {
|
|
221172
221173
|
min: 1,
|
|
221173
|
-
max: 5
|
|
221174
|
+
max: 5,
|
|
221175
|
+
testOnBorrow: false,
|
|
221176
|
+
testOnReturn: false,
|
|
221177
|
+
testWhileIdle: true
|
|
221174
221178
|
}
|
|
221175
221179
|
};
|
|
221176
221180
|
const snowflakeConnection = new import_db_snowflake.SnowflakeConnection(connection.name, snowflakeConnectionOptions);
|
|
@@ -221720,6 +221724,31 @@ async function listCloudDirectorySchemas(credentials) {
|
|
|
221720
221724
|
}
|
|
221721
221725
|
|
|
221722
221726
|
// src/service/db_utils.ts
|
|
221727
|
+
function sqlInFilter(columnName, values) {
|
|
221728
|
+
if (!values || values.length === 0)
|
|
221729
|
+
return "";
|
|
221730
|
+
const escaped = values.map((v) => `'${v.replace(/'/g, "''")}'`);
|
|
221731
|
+
return `AND ${columnName} IN (${escaped.join(", ")})`;
|
|
221732
|
+
}
|
|
221733
|
+
function groupColumnRowsIntoTables(rows, buildResource) {
|
|
221734
|
+
const tableMap = new Map;
|
|
221735
|
+
for (const row of rows) {
|
|
221736
|
+
const r = row;
|
|
221737
|
+
const tableName = String(r.TABLE_NAME ?? r.table_name ?? "");
|
|
221738
|
+
const columnName = String(r.COLUMN_NAME ?? r.column_name ?? "");
|
|
221739
|
+
const dataType = String(r.DATA_TYPE ?? r.data_type ?? "").toLowerCase();
|
|
221740
|
+
if (!tableName)
|
|
221741
|
+
continue;
|
|
221742
|
+
if (!tableMap.has(tableName))
|
|
221743
|
+
tableMap.set(tableName, []);
|
|
221744
|
+
tableMap.get(tableName).push({ name: columnName, type: dataType });
|
|
221745
|
+
}
|
|
221746
|
+
const tables = [];
|
|
221747
|
+
for (const [tableName, columns] of tableMap) {
|
|
221748
|
+
tables.push({ resource: buildResource(tableName), columns });
|
|
221749
|
+
}
|
|
221750
|
+
return tables;
|
|
221751
|
+
}
|
|
221723
221752
|
function createBigQueryClient(connection) {
|
|
221724
221753
|
if (!connection.bigqueryConnection) {
|
|
221725
221754
|
throw new Error("BigQuery connection is required");
|
|
@@ -221728,17 +221757,18 @@ function createBigQueryClient(connection) {
|
|
|
221728
221757
|
projectId: connection.bigqueryConnection.defaultProjectId || ""
|
|
221729
221758
|
};
|
|
221730
221759
|
if (connection.bigqueryConnection.serviceAccountKeyJson) {
|
|
221760
|
+
let credentials;
|
|
221731
221761
|
try {
|
|
221732
|
-
|
|
221733
|
-
|
|
221734
|
-
|
|
221735
|
-
|
|
221736
|
-
|
|
221737
|
-
|
|
221738
|
-
|
|
221739
|
-
|
|
221740
|
-
|
|
221741
|
-
|
|
221762
|
+
credentials = JSON.parse(connection.bigqueryConnection.serviceAccountKeyJson);
|
|
221763
|
+
} catch (parseError) {
|
|
221764
|
+
throw new Error(`Failed to parse BigQuery service account key JSON: ${parseError.message}`);
|
|
221765
|
+
}
|
|
221766
|
+
config.credentials = credentials;
|
|
221767
|
+
if (!config.projectId && credentials.project_id) {
|
|
221768
|
+
config.projectId = credentials.project_id;
|
|
221769
|
+
}
|
|
221770
|
+
if (!config.projectId) {
|
|
221771
|
+
throw new Error("BigQuery project ID is required. Either set the defaultProjectId in the connection configuration or the project_id in the service account key JSON.");
|
|
221742
221772
|
}
|
|
221743
221773
|
} else if (Object.keys(connection.bigqueryConnection).length === 0 && process.env.GOOGLE_APPLICATION_CREDENTIALS) {
|
|
221744
221774
|
config.keyFilename = process.env.GOOGLE_APPLICATION_CREDENTIALS || "";
|
|
@@ -221761,238 +221791,264 @@ function getCloudCredentialsFromAttachedDatabases(attachedDatabases, storageType
|
|
|
221761
221791
|
}
|
|
221762
221792
|
return null;
|
|
221763
221793
|
}
|
|
221764
|
-
async function
|
|
221765
|
-
if (connection.
|
|
221766
|
-
|
|
221767
|
-
|
|
221768
|
-
|
|
221769
|
-
|
|
221770
|
-
|
|
221771
|
-
|
|
221772
|
-
const
|
|
221773
|
-
|
|
221774
|
-
|
|
221775
|
-
|
|
221776
|
-
|
|
221777
|
-
|
|
221778
|
-
|
|
221779
|
-
|
|
221780
|
-
|
|
221781
|
-
|
|
221782
|
-
|
|
221783
|
-
|
|
221784
|
-
|
|
221785
|
-
|
|
221786
|
-
|
|
221787
|
-
|
|
221788
|
-
|
|
221789
|
-
|
|
221790
|
-
|
|
221791
|
-
|
|
221792
|
-
|
|
221793
|
-
|
|
221794
|
-
|
|
221795
|
-
|
|
221796
|
-
|
|
221797
|
-
|
|
221798
|
-
|
|
221799
|
-
|
|
221800
|
-
|
|
221801
|
-
|
|
221802
|
-
|
|
221803
|
-
|
|
221804
|
-
|
|
221805
|
-
|
|
221806
|
-
|
|
221807
|
-
|
|
221808
|
-
|
|
221809
|
-
}
|
|
221810
|
-
|
|
221811
|
-
|
|
221812
|
-
|
|
221813
|
-
|
|
221814
|
-
|
|
221815
|
-
{
|
|
221816
|
-
name: connection.mysqlConnection.database || "mysql",
|
|
221817
|
-
isHidden: false,
|
|
221818
|
-
isDefault: true
|
|
221819
|
-
}
|
|
221820
|
-
];
|
|
221821
|
-
} catch (error) {
|
|
221822
|
-
console.error(`Error getting schemas for MySQL connection ${connection.name}:`, error);
|
|
221823
|
-
throw new Error(`Failed to get schemas for MySQL connection ${connection.name}: ${error.message}`);
|
|
221824
|
-
}
|
|
221825
|
-
} else if (connection.type === "snowflake") {
|
|
221826
|
-
if (!connection.snowflakeConnection) {
|
|
221827
|
-
throw new Error("Snowflake connection is required");
|
|
221828
|
-
}
|
|
221829
|
-
try {
|
|
221830
|
-
const result = await malloyConnection.runSQL("SHOW SCHEMAS");
|
|
221831
|
-
const rows = standardizeRunSQLResult2(result);
|
|
221832
|
-
return rows.map((row) => {
|
|
221833
|
-
const typedRow = row;
|
|
221834
|
-
const databaseName = String(typedRow.database_name ?? typedRow.DATABASE_NAME ?? "");
|
|
221835
|
-
const name = String(typedRow.name ?? typedRow.NAME ?? "");
|
|
221836
|
-
const owner = String(typedRow.owner ?? typedRow.OWNER ?? "");
|
|
221837
|
-
const isDefaultVal = typedRow.is_default ?? typedRow.isDefault ?? typedRow.IS_DEFAULT;
|
|
221838
|
-
return {
|
|
221839
|
-
name: `${databaseName}.${name}`,
|
|
221840
|
-
isHidden: ["SNOWFLAKE", ""].includes(owner),
|
|
221841
|
-
isDefault: isDefaultVal === "Y"
|
|
221842
|
-
};
|
|
221843
|
-
});
|
|
221844
|
-
} catch (error) {
|
|
221845
|
-
console.error(`Error getting schemas for Snowflake connection ${connection.name}:`, error);
|
|
221846
|
-
throw new Error(`Failed to get schemas for Snowflake connection ${connection.name}: ${error.message}`);
|
|
221847
|
-
}
|
|
221848
|
-
} else if (connection.type === "trino") {
|
|
221849
|
-
if (!connection.trinoConnection) {
|
|
221850
|
-
throw new Error("Trino connection is required");
|
|
221794
|
+
async function getSchemasForBigQuery(connection) {
|
|
221795
|
+
if (!connection.bigqueryConnection) {
|
|
221796
|
+
throw new Error("BigQuery connection is required");
|
|
221797
|
+
}
|
|
221798
|
+
try {
|
|
221799
|
+
const bigquery = createBigQueryClient(connection);
|
|
221800
|
+
const [datasets] = await bigquery.getDatasets();
|
|
221801
|
+
return await Promise.all(datasets.map(async (dataset) => {
|
|
221802
|
+
const [metadata] = await dataset.getMetadata();
|
|
221803
|
+
return {
|
|
221804
|
+
name: dataset.id,
|
|
221805
|
+
isHidden: false,
|
|
221806
|
+
isDefault: false,
|
|
221807
|
+
description: metadata?.description
|
|
221808
|
+
};
|
|
221809
|
+
}));
|
|
221810
|
+
} catch (error) {
|
|
221811
|
+
logger.error(`Error getting schemas for BigQuery connection ${connection.name}`, { error });
|
|
221812
|
+
throw new Error(`Failed to get schemas for BigQuery connection ${connection.name}: ${error.message}`);
|
|
221813
|
+
}
|
|
221814
|
+
}
|
|
221815
|
+
async function getSchemasForPostgres(connection, malloyConnection) {
|
|
221816
|
+
if (!connection.postgresConnection) {
|
|
221817
|
+
throw new Error("Postgres connection is required");
|
|
221818
|
+
}
|
|
221819
|
+
try {
|
|
221820
|
+
const result = await malloyConnection.runSQL("SELECT schema_name FROM information_schema.schemata ORDER BY schema_name");
|
|
221821
|
+
const rows = standardizeRunSQLResult2(result);
|
|
221822
|
+
return rows.map((row) => {
|
|
221823
|
+
const typedRow = row;
|
|
221824
|
+
const schemaName = String(typedRow.schema_name ?? typedRow.SCHEMA_NAME ?? "");
|
|
221825
|
+
return {
|
|
221826
|
+
name: schemaName,
|
|
221827
|
+
isHidden: ["information_schema", "pg_catalog", "pg_toast"].includes(schemaName),
|
|
221828
|
+
isDefault: schemaName === "public"
|
|
221829
|
+
};
|
|
221830
|
+
});
|
|
221831
|
+
} catch (error) {
|
|
221832
|
+
logger.error(`Error getting schemas for Postgres connection ${connection.name}`, { error });
|
|
221833
|
+
throw new Error(`Failed to get schemas for Postgres connection ${connection.name}: ${error.message}`);
|
|
221834
|
+
}
|
|
221835
|
+
}
|
|
221836
|
+
async function getSchemasForMySQL(connection) {
|
|
221837
|
+
if (!connection.mysqlConnection) {
|
|
221838
|
+
throw new Error("Mysql connection is required");
|
|
221839
|
+
}
|
|
221840
|
+
return [
|
|
221841
|
+
{
|
|
221842
|
+
name: connection.mysqlConnection.database || "mysql",
|
|
221843
|
+
isHidden: false,
|
|
221844
|
+
isDefault: true
|
|
221851
221845
|
}
|
|
221852
|
-
|
|
221853
|
-
|
|
221854
|
-
|
|
221855
|
-
|
|
221856
|
-
|
|
221857
|
-
|
|
221858
|
-
|
|
221859
|
-
|
|
221860
|
-
|
|
221861
|
-
|
|
221862
|
-
|
|
221863
|
-
|
|
221864
|
-
|
|
221865
|
-
|
|
221866
|
-
|
|
221867
|
-
|
|
221868
|
-
|
|
221869
|
-
|
|
221870
|
-
|
|
221871
|
-
|
|
221872
|
-
|
|
221873
|
-
|
|
221874
|
-
|
|
221875
|
-
|
|
221876
|
-
|
|
221877
|
-
|
|
221878
|
-
|
|
221879
|
-
|
|
221880
|
-
|
|
221881
|
-
|
|
221846
|
+
];
|
|
221847
|
+
}
|
|
221848
|
+
async function getSchemasForSnowflake(connection, malloyConnection) {
|
|
221849
|
+
if (!connection.snowflakeConnection) {
|
|
221850
|
+
throw new Error("Snowflake connection is required");
|
|
221851
|
+
}
|
|
221852
|
+
try {
|
|
221853
|
+
const database = connection.snowflakeConnection.database;
|
|
221854
|
+
const schema = connection.snowflakeConnection.schema;
|
|
221855
|
+
const filters = [];
|
|
221856
|
+
if (database) {
|
|
221857
|
+
filters.push(`CATALOG_NAME = '${database}'`);
|
|
221858
|
+
}
|
|
221859
|
+
if (schema) {
|
|
221860
|
+
filters.push(`SCHEMA_NAME = '${schema}'`);
|
|
221861
|
+
}
|
|
221862
|
+
const whereClause = filters.length > 0 ? `WHERE ${filters.join(" AND ")}` : "";
|
|
221863
|
+
const result = await malloyConnection.runSQL(`SELECT CATALOG_NAME, SCHEMA_NAME, SCHEMA_OWNER FROM ${database ? `${database}.` : ""}INFORMATION_SCHEMA.SCHEMATA ${whereClause} ORDER BY SCHEMA_NAME`);
|
|
221864
|
+
const rows = standardizeRunSQLResult2(result);
|
|
221865
|
+
return rows.map((row) => {
|
|
221866
|
+
const typedRow = row;
|
|
221867
|
+
const catalogName = String(typedRow.CATALOG_NAME ?? typedRow.catalog_name ?? "");
|
|
221868
|
+
const schemaName = String(typedRow.SCHEMA_NAME ?? typedRow.schema_name ?? "");
|
|
221869
|
+
const owner = String(typedRow.SCHEMA_OWNER ?? typedRow.schema_owner ?? "");
|
|
221870
|
+
return {
|
|
221871
|
+
name: `${catalogName}.${schemaName}`,
|
|
221872
|
+
isHidden: ["SNOWFLAKE", ""].includes(owner) || schemaName === "INFORMATION_SCHEMA",
|
|
221873
|
+
isDefault: schema ? schemaName === schema : false
|
|
221874
|
+
};
|
|
221875
|
+
});
|
|
221876
|
+
} catch (error) {
|
|
221877
|
+
logger.error(`Error getting schemas for Snowflake connection ${connection.name}`, { error });
|
|
221878
|
+
throw new Error(`Failed to get schemas for Snowflake connection ${connection.name}: ${error.message}`);
|
|
221879
|
+
}
|
|
221880
|
+
}
|
|
221881
|
+
async function getSchemasForTrino(connection, malloyConnection) {
|
|
221882
|
+
if (!connection.trinoConnection) {
|
|
221883
|
+
throw new Error("Trino connection is required");
|
|
221884
|
+
}
|
|
221885
|
+
try {
|
|
221886
|
+
const configuredSchema = connection.trinoConnection.schema;
|
|
221887
|
+
let allRows = [];
|
|
221888
|
+
if (connection.trinoConnection.catalog) {
|
|
221889
|
+
const catalog = connection.trinoConnection.catalog;
|
|
221890
|
+
const result = await malloyConnection.runSQL(`SELECT schema_name FROM ${catalog}.information_schema.schemata ORDER BY schema_name`);
|
|
221882
221891
|
const rows = standardizeRunSQLResult2(result);
|
|
221883
|
-
|
|
221884
|
-
const
|
|
221892
|
+
allRows = rows.map((row) => {
|
|
221893
|
+
const r = row;
|
|
221885
221894
|
return {
|
|
221886
|
-
|
|
221887
|
-
|
|
221888
|
-
isDefault: typedRow.Schema === connection.trinoConnection?.schema
|
|
221895
|
+
catalog,
|
|
221896
|
+
schema: String(r.schema_name ?? r.Schema ?? "")
|
|
221889
221897
|
};
|
|
221890
221898
|
});
|
|
221891
|
-
}
|
|
221892
|
-
|
|
221893
|
-
|
|
221894
|
-
|
|
221895
|
-
|
|
221896
|
-
if (!connection.duckdbConnection) {
|
|
221897
|
-
throw new Error("DuckDB connection is required");
|
|
221898
|
-
}
|
|
221899
|
-
try {
|
|
221900
|
-
const result = await malloyConnection.runSQL("SELECT DISTINCT schema_name,catalog_name FROM information_schema.schemata ORDER BY catalog_name,schema_name", { rowLimit: 1000 });
|
|
221901
|
-
const rows = standardizeRunSQLResult2(result);
|
|
221902
|
-
const schemas = rows.map((row) => {
|
|
221903
|
-
const typedRow = row;
|
|
221904
|
-
const schemaName = typedRow.schema_name;
|
|
221905
|
-
const catalogName = typedRow.catalog_name;
|
|
221906
|
-
return {
|
|
221907
|
-
name: `${catalogName}.${schemaName}`,
|
|
221908
|
-
isHidden: [
|
|
221909
|
-
"information_schema",
|
|
221910
|
-
"performance_schema",
|
|
221911
|
-
"",
|
|
221912
|
-
"SNOWFLAKE",
|
|
221913
|
-
"information_schema",
|
|
221914
|
-
"pg_catalog",
|
|
221915
|
-
"pg_toast"
|
|
221916
|
-
].includes(schemaName) || ["md_information_schema", "system"].includes(catalogName),
|
|
221917
|
-
isDefault: catalogName === "main"
|
|
221918
|
-
};
|
|
221899
|
+
} else {
|
|
221900
|
+
const catalogsResult = await malloyConnection.runSQL(`SHOW CATALOGS`);
|
|
221901
|
+
const catalogNames = standardizeRunSQLResult2(catalogsResult).map((row) => {
|
|
221902
|
+
const r = row;
|
|
221903
|
+
return String(r.Catalog ?? r.catalog ?? "");
|
|
221919
221904
|
});
|
|
221920
|
-
const
|
|
221921
|
-
const cloudDatabases = attachedDatabases.filter((attachedDb) => (attachedDb.type === "gcs" || attachedDb.type === "s3") && (attachedDb.gcsConnection || attachedDb.s3Connection));
|
|
221922
|
-
const cloudDbPromises = cloudDatabases.map(async (attachedDb) => {
|
|
221923
|
-
const dbType = attachedDb.type;
|
|
221924
|
-
const credentials = dbType === "gcs" ? gcsConnectionToCredentials(attachedDb.gcsConnection) : s3ConnectionToCredentials(attachedDb.s3Connection);
|
|
221905
|
+
for (const catalog of catalogNames) {
|
|
221925
221906
|
try {
|
|
221926
|
-
|
|
221927
|
-
|
|
221928
|
-
|
|
221929
|
-
|
|
221930
|
-
|
|
221931
|
-
|
|
221932
|
-
|
|
221933
|
-
|
|
221934
|
-
|
|
221935
|
-
|
|
221936
|
-
|
|
221937
|
-
for (const attachedDb of azureDatabases) {
|
|
221938
|
-
if (attachedDb.name) {
|
|
221939
|
-
schemas.push({
|
|
221940
|
-
name: attachedDb.name,
|
|
221941
|
-
isHidden: false,
|
|
221942
|
-
isDefault: false
|
|
221943
|
-
});
|
|
221907
|
+
const result = await malloyConnection.runSQL(`SELECT schema_name FROM ${catalog}.information_schema.schemata ORDER BY schema_name`);
|
|
221908
|
+
const rows = standardizeRunSQLResult2(result);
|
|
221909
|
+
for (const row of rows) {
|
|
221910
|
+
const r = row;
|
|
221911
|
+
allRows.push({
|
|
221912
|
+
catalog,
|
|
221913
|
+
schema: String(r.schema_name ?? r.Schema ?? "")
|
|
221914
|
+
});
|
|
221915
|
+
}
|
|
221916
|
+
} catch (catalogError) {
|
|
221917
|
+
logger.warn(`Failed to list schemas for Trino catalog ${catalog}`, { error: catalogError });
|
|
221944
221918
|
}
|
|
221945
221919
|
}
|
|
221946
|
-
return schemas;
|
|
221947
|
-
} catch (error) {
|
|
221948
|
-
console.error(`Error getting schemas for DuckDB connection ${connection.name}:`, error);
|
|
221949
|
-
throw new Error(`Failed to get schemas for DuckDB connection ${connection.name}: ${error.message}`);
|
|
221950
|
-
}
|
|
221951
|
-
} else if (connection.type === "motherduck") {
|
|
221952
|
-
if (!connection.motherduckConnection) {
|
|
221953
|
-
throw new Error("MotherDuck connection is required");
|
|
221954
221920
|
}
|
|
221955
|
-
|
|
221956
|
-
const
|
|
221957
|
-
|
|
221958
|
-
|
|
221959
|
-
|
|
221960
|
-
|
|
221961
|
-
|
|
221962
|
-
|
|
221963
|
-
|
|
221964
|
-
|
|
221965
|
-
|
|
221966
|
-
|
|
221967
|
-
|
|
221968
|
-
|
|
221969
|
-
|
|
221970
|
-
|
|
221971
|
-
|
|
221972
|
-
|
|
221973
|
-
|
|
221921
|
+
return allRows.map(({ catalog, schema }) => {
|
|
221922
|
+
const name = connection.trinoConnection?.catalog ? schema : `${catalog}.${schema}`;
|
|
221923
|
+
return {
|
|
221924
|
+
name,
|
|
221925
|
+
isHidden: ["information_schema", "performance_schema"].includes(schema),
|
|
221926
|
+
isDefault: configuredSchema ? schema === configuredSchema : false
|
|
221927
|
+
};
|
|
221928
|
+
});
|
|
221929
|
+
} catch (error) {
|
|
221930
|
+
logger.error(`Error getting schemas for Trino connection ${connection.name}`, { error });
|
|
221931
|
+
throw new Error(`Failed to get schemas for Trino connection ${connection.name}: ${error.message}`);
|
|
221932
|
+
}
|
|
221933
|
+
}
|
|
221934
|
+
async function getSchemasForDuckDB(connection, malloyConnection) {
|
|
221935
|
+
if (!connection.duckdbConnection) {
|
|
221936
|
+
throw new Error("DuckDB connection is required");
|
|
221937
|
+
}
|
|
221938
|
+
try {
|
|
221939
|
+
const result = await malloyConnection.runSQL("SELECT DISTINCT schema_name,catalog_name FROM information_schema.schemata ORDER BY catalog_name,schema_name", { rowLimit: 1000 });
|
|
221940
|
+
const rows = standardizeRunSQLResult2(result);
|
|
221941
|
+
const schemas = rows.map((row) => {
|
|
221942
|
+
const typedRow = row;
|
|
221943
|
+
const schemaName = String(typedRow.schema_name ?? "");
|
|
221944
|
+
const catalogName = String(typedRow.catalog_name ?? "");
|
|
221945
|
+
return {
|
|
221946
|
+
name: `${catalogName}.${schemaName}`,
|
|
221947
|
+
isHidden: [
|
|
221948
|
+
"information_schema",
|
|
221949
|
+
"performance_schema",
|
|
221950
|
+
"pg_catalog",
|
|
221951
|
+
"pg_toast",
|
|
221952
|
+
""
|
|
221953
|
+
].includes(schemaName) || ["md_information_schema", "system"].includes(catalogName),
|
|
221954
|
+
isDefault: catalogName === "main"
|
|
221955
|
+
};
|
|
221956
|
+
});
|
|
221957
|
+
const attachedDatabases = connection.duckdbConnection.attachedDatabases || [];
|
|
221958
|
+
const cloudDatabases = attachedDatabases.filter((attachedDb) => (attachedDb.type === "gcs" || attachedDb.type === "s3") && (attachedDb.gcsConnection || attachedDb.s3Connection));
|
|
221959
|
+
const cloudDbPromises = cloudDatabases.map(async (attachedDb) => {
|
|
221960
|
+
const dbType = attachedDb.type;
|
|
221961
|
+
const credentials = dbType === "gcs" ? gcsConnectionToCredentials(attachedDb.gcsConnection) : s3ConnectionToCredentials(attachedDb.s3Connection);
|
|
221962
|
+
try {
|
|
221963
|
+
return await listCloudDirectorySchemas(credentials);
|
|
221964
|
+
} catch (cloudError) {
|
|
221965
|
+
logger.warn(`Failed to list ${dbType.toUpperCase()} directory schemas for ${attachedDb.name}`, { error: cloudError });
|
|
221966
|
+
return [];
|
|
221967
|
+
}
|
|
221968
|
+
});
|
|
221969
|
+
const cloudSchemaArrays = await Promise.all(cloudDbPromises);
|
|
221970
|
+
for (const cloudSchemas of cloudSchemaArrays) {
|
|
221971
|
+
schemas.push(...cloudSchemas);
|
|
221974
221972
|
}
|
|
221975
|
-
|
|
221976
|
-
|
|
221977
|
-
|
|
221978
|
-
|
|
221979
|
-
|
|
221980
|
-
|
|
221981
|
-
const typedRow = row;
|
|
221982
|
-
const schemaName = typedRow.schema_name;
|
|
221983
|
-
const shouldShow = schemaName === "main" || schemaName === "public";
|
|
221984
|
-
return {
|
|
221985
|
-
name: schemaName,
|
|
221986
|
-
isHidden: !shouldShow,
|
|
221973
|
+
const azureDatabases = attachedDatabases.filter((attachedDb) => attachedDb.type === "azure" && attachedDb.azureConnection);
|
|
221974
|
+
for (const attachedDb of azureDatabases) {
|
|
221975
|
+
if (attachedDb.name) {
|
|
221976
|
+
schemas.push({
|
|
221977
|
+
name: attachedDb.name,
|
|
221978
|
+
isHidden: false,
|
|
221987
221979
|
isDefault: false
|
|
221988
|
-
};
|
|
221989
|
-
}
|
|
221990
|
-
} catch (error) {
|
|
221991
|
-
logger.error(`Error getting schemas for DuckLake connection ${connection.name}`, { error });
|
|
221992
|
-
throw new Error(`Failed to get schemas for DuckLake connection ${connection.name}: ${error.message}`);
|
|
221980
|
+
});
|
|
221981
|
+
}
|
|
221993
221982
|
}
|
|
221994
|
-
|
|
221995
|
-
|
|
221983
|
+
return schemas;
|
|
221984
|
+
} catch (error) {
|
|
221985
|
+
logger.error(`Error getting schemas for DuckDB connection ${connection.name}`, { error });
|
|
221986
|
+
throw new Error(`Failed to get schemas for DuckDB connection ${connection.name}: ${error.message}`);
|
|
221987
|
+
}
|
|
221988
|
+
}
|
|
221989
|
+
async function getSchemasForMotherDuck(connection, malloyConnection) {
|
|
221990
|
+
if (!connection.motherduckConnection) {
|
|
221991
|
+
throw new Error("MotherDuck connection is required");
|
|
221992
|
+
}
|
|
221993
|
+
try {
|
|
221994
|
+
const database = connection.motherduckConnection.database;
|
|
221995
|
+
const whereClause = database ? `WHERE catalog_name = '${database}'` : "";
|
|
221996
|
+
const result = await malloyConnection.runSQL(`SELECT DISTINCT schema_name FROM information_schema.schemata ${whereClause} ORDER BY schema_name`);
|
|
221997
|
+
const rows = standardizeRunSQLResult2(result);
|
|
221998
|
+
return rows.map((row) => {
|
|
221999
|
+
const typedRow = row;
|
|
222000
|
+
const schemaName = String(typedRow.schema_name ?? typedRow.SCHEMA_NAME ?? "");
|
|
222001
|
+
return {
|
|
222002
|
+
name: schemaName,
|
|
222003
|
+
isHidden: ["information_schema", "performance_schema", ""].includes(schemaName),
|
|
222004
|
+
isDefault: schemaName === "main"
|
|
222005
|
+
};
|
|
222006
|
+
});
|
|
222007
|
+
} catch (error) {
|
|
222008
|
+
logger.error(`Error getting schemas for MotherDuck connection ${connection.name}`, { error });
|
|
222009
|
+
throw new Error(`Failed to get schemas for MotherDuck connection ${connection.name}: ${error.message}`);
|
|
222010
|
+
}
|
|
222011
|
+
}
|
|
222012
|
+
async function getSchemasForDuckLake(connection, malloyConnection) {
|
|
222013
|
+
try {
|
|
222014
|
+
const catalogName = connection.name;
|
|
222015
|
+
const result = await malloyConnection.runSQL(`SELECT schema_name FROM information_schema.schemata WHERE catalog_name = '${catalogName}' ORDER BY schema_name`, { rowLimit: 1000 });
|
|
222016
|
+
const rows = standardizeRunSQLResult2(result);
|
|
222017
|
+
return rows.map((row) => {
|
|
222018
|
+
const typedRow = row;
|
|
222019
|
+
const schemaName = typedRow.schema_name;
|
|
222020
|
+
const shouldShow = schemaName === "main" || schemaName === "public";
|
|
222021
|
+
return {
|
|
222022
|
+
name: schemaName,
|
|
222023
|
+
isHidden: !shouldShow,
|
|
222024
|
+
isDefault: false
|
|
222025
|
+
};
|
|
222026
|
+
});
|
|
222027
|
+
} catch (error) {
|
|
222028
|
+
logger.error(`Error getting schemas for DuckLake connection ${connection.name}`, { error });
|
|
222029
|
+
throw new Error(`Failed to get schemas for DuckLake connection ${connection.name}: ${error.message}`);
|
|
222030
|
+
}
|
|
222031
|
+
}
|
|
222032
|
+
async function getSchemasForConnection(connection, malloyConnection) {
|
|
222033
|
+
switch (connection.type) {
|
|
222034
|
+
case "bigquery":
|
|
222035
|
+
return getSchemasForBigQuery(connection);
|
|
222036
|
+
case "postgres":
|
|
222037
|
+
return getSchemasForPostgres(connection, malloyConnection);
|
|
222038
|
+
case "mysql":
|
|
222039
|
+
return getSchemasForMySQL(connection);
|
|
222040
|
+
case "snowflake":
|
|
222041
|
+
return getSchemasForSnowflake(connection, malloyConnection);
|
|
222042
|
+
case "trino":
|
|
222043
|
+
return getSchemasForTrino(connection, malloyConnection);
|
|
222044
|
+
case "duckdb":
|
|
222045
|
+
return getSchemasForDuckDB(connection, malloyConnection);
|
|
222046
|
+
case "motherduck":
|
|
222047
|
+
return getSchemasForMotherDuck(connection, malloyConnection);
|
|
222048
|
+
case "ducklake":
|
|
222049
|
+
return getSchemasForDuckLake(connection, malloyConnection);
|
|
222050
|
+
default:
|
|
222051
|
+
throw new Error(`Unsupported connection type: ${connection.type}`);
|
|
221996
222052
|
}
|
|
221997
222053
|
}
|
|
221998
222054
|
function getFileType2(key) {
|
|
@@ -222155,281 +222211,217 @@ async function describeAzureFile(malloyConnection, fileUri, azureConnection) {
|
|
|
222155
222211
|
throw new Error(`Failed to describe Azure file: ${error instanceof Error ? error.message : String(error)}`);
|
|
222156
222212
|
}
|
|
222157
222213
|
}
|
|
222158
|
-
async function
|
|
222159
|
-
|
|
222160
|
-
|
|
222161
|
-
|
|
222162
|
-
|
|
222163
|
-
|
|
222164
|
-
|
|
222165
|
-
|
|
222166
|
-
|
|
222214
|
+
async function listTablesForSchema(connection, schemaName, malloyConnection, tableNames) {
|
|
222215
|
+
switch (connection.type) {
|
|
222216
|
+
case "bigquery":
|
|
222217
|
+
return listTablesForBigQuery(connection, schemaName, malloyConnection, tableNames);
|
|
222218
|
+
case "mysql":
|
|
222219
|
+
return listTablesForMySQL(connection, schemaName, malloyConnection, tableNames);
|
|
222220
|
+
case "postgres":
|
|
222221
|
+
return listTablesForPostgres(connection, schemaName, malloyConnection, tableNames);
|
|
222222
|
+
case "snowflake":
|
|
222223
|
+
return listTablesForSnowflake(connection, schemaName, malloyConnection, tableNames);
|
|
222224
|
+
case "trino":
|
|
222225
|
+
return listTablesForTrino(connection, schemaName, malloyConnection, tableNames);
|
|
222226
|
+
case "duckdb":
|
|
222227
|
+
return listTablesForDuckDB(connection, schemaName, malloyConnection, tableNames);
|
|
222228
|
+
case "motherduck":
|
|
222229
|
+
return listTablesForMotherDuck(connection, schemaName, malloyConnection, tableNames);
|
|
222230
|
+
case "ducklake":
|
|
222231
|
+
return listTablesForDuckLake(connection, schemaName, malloyConnection, tableNames);
|
|
222232
|
+
default:
|
|
222233
|
+
throw new Error(`Unsupported connection type: ${connection.type}`);
|
|
222234
|
+
}
|
|
222235
|
+
}
|
|
222236
|
+
async function listTablesForBigQuery(connection, schemaName, malloyConnection, tableNames) {
|
|
222237
|
+
try {
|
|
222238
|
+
const bigquery = createBigQueryClient(connection);
|
|
222239
|
+
const dataset = bigquery.dataset(schemaName);
|
|
222240
|
+
const [tables] = await dataset.getTables();
|
|
222241
|
+
let names = tables.map((table) => table.id).filter((id) => id !== undefined);
|
|
222242
|
+
if (tableNames) {
|
|
222243
|
+
const allowed = new Set(tableNames);
|
|
222244
|
+
names = names.filter((id) => allowed.has(id));
|
|
222245
|
+
}
|
|
222246
|
+
const results = await Promise.all(names.map(async (tableName) => {
|
|
222247
|
+
const tablePath = `${schemaName}.${tableName}`;
|
|
222248
|
+
try {
|
|
222249
|
+
const source = await malloyConnection.fetchTableSchema(tableName, tablePath);
|
|
222250
|
+
const columns = source?.fields?.map((field) => ({
|
|
222251
|
+
name: field.name,
|
|
222252
|
+
type: field.type
|
|
222253
|
+
})) || [];
|
|
222254
|
+
return { resource: tablePath, columns };
|
|
222255
|
+
} catch (error) {
|
|
222256
|
+
logger.warn(`Failed to get schema for table ${tableName}`, {
|
|
222257
|
+
error: extractErrorDataFromError(error),
|
|
222258
|
+
schemaName,
|
|
222259
|
+
tableName
|
|
222260
|
+
});
|
|
222261
|
+
return { resource: tablePath, columns: [] };
|
|
222167
222262
|
}
|
|
222263
|
+
}));
|
|
222264
|
+
return results;
|
|
222265
|
+
} catch (error) {
|
|
222266
|
+
logger.error(`Error getting tables for BigQuery schema ${schemaName} in connection ${connection.name}`, { error });
|
|
222267
|
+
throw new Error(`Failed to get tables for BigQuery schema ${schemaName} in connection ${connection.name}: ${error.message}`);
|
|
222268
|
+
}
|
|
222269
|
+
}
|
|
222270
|
+
async function listTablesForMySQL(connection, schemaName, malloyConnection, tableNames) {
|
|
222271
|
+
if (!connection.mysqlConnection) {
|
|
222272
|
+
throw new Error("Mysql connection is required");
|
|
222273
|
+
}
|
|
222274
|
+
try {
|
|
222275
|
+
const result = await malloyConnection.runSQL(`SELECT TABLE_NAME, COLUMN_NAME, DATA_TYPE FROM information_schema.columns WHERE table_schema = '${schemaName}' ${sqlInFilter("TABLE_NAME", tableNames)} ORDER BY TABLE_NAME, ORDINAL_POSITION`);
|
|
222276
|
+
const rows = standardizeRunSQLResult2(result);
|
|
222277
|
+
return groupColumnRowsIntoTables(rows, (t) => `${schemaName}.${t}`);
|
|
222278
|
+
} catch (error) {
|
|
222279
|
+
logger.error(`Error getting tables for MySQL schema ${schemaName} in connection ${connection.name}`, { error });
|
|
222280
|
+
throw new Error(`Failed to get tables for MySQL schema ${schemaName} in connection ${connection.name}: ${error.message}`);
|
|
222281
|
+
}
|
|
222282
|
+
}
|
|
222283
|
+
async function listTablesForPostgres(connection, schemaName, malloyConnection, tableNames) {
|
|
222284
|
+
if (!connection.postgresConnection) {
|
|
222285
|
+
throw new Error("Postgres connection is required");
|
|
222286
|
+
}
|
|
222287
|
+
try {
|
|
222288
|
+
const result = await malloyConnection.runSQL(`SELECT table_name, column_name, data_type FROM information_schema.columns WHERE table_schema = '${schemaName}' ${sqlInFilter("table_name", tableNames)} ORDER BY table_name, ordinal_position`);
|
|
222289
|
+
const rows = standardizeRunSQLResult2(result);
|
|
222290
|
+
return groupColumnRowsIntoTables(rows, (t) => `${schemaName}.${t}`);
|
|
222291
|
+
} catch (error) {
|
|
222292
|
+
logger.error(`Error getting tables for Postgres schema ${schemaName} in connection ${connection.name}`, { error });
|
|
222293
|
+
throw new Error(`Failed to get tables for Postgres schema ${schemaName} in connection ${connection.name}: ${error.message}`);
|
|
222294
|
+
}
|
|
222295
|
+
}
|
|
222296
|
+
async function listTablesForSnowflake(connection, schemaName, malloyConnection, tableNames) {
|
|
222297
|
+
if (!connection.snowflakeConnection) {
|
|
222298
|
+
throw new Error("Snowflake connection is required");
|
|
222299
|
+
}
|
|
222300
|
+
try {
|
|
222301
|
+
const parts = schemaName.split(".");
|
|
222302
|
+
let databaseName;
|
|
222303
|
+
let schemaOnly;
|
|
222304
|
+
if (parts.length >= 2) {
|
|
222305
|
+
databaseName = parts[0];
|
|
222306
|
+
schemaOnly = parts[1];
|
|
222307
|
+
} else {
|
|
222308
|
+
databaseName = connection.snowflakeConnection.database ?? "";
|
|
222309
|
+
schemaOnly = parts[0];
|
|
222310
|
+
}
|
|
222311
|
+
if (!databaseName) {
|
|
222312
|
+
throw new Error(`Cannot resolve database for schema "${schemaName}": provide DATABASE.SCHEMA or configure a database on the connection`);
|
|
222168
222313
|
}
|
|
222314
|
+
const qualifiedSchema = `${databaseName}.${schemaOnly}`;
|
|
222315
|
+
const result = await malloyConnection.runSQL(`SELECT TABLE_NAME, COLUMN_NAME, DATA_TYPE FROM ${databaseName}.INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA = '${schemaOnly}' ${sqlInFilter("TABLE_NAME", tableNames)} ORDER BY TABLE_NAME, ORDINAL_POSITION`);
|
|
222316
|
+
const rows = standardizeRunSQLResult2(result);
|
|
222317
|
+
return groupColumnRowsIntoTables(rows, (t) => `${qualifiedSchema}.${t}`);
|
|
222318
|
+
} catch (error) {
|
|
222319
|
+
logger.error(`Error getting tables for Snowflake schema ${schemaName} in connection ${connection.name}`, { error });
|
|
222320
|
+
throw new Error(`Failed to get tables for Snowflake schema ${schemaName} in connection ${connection.name}: ${error.message}`);
|
|
222169
222321
|
}
|
|
222170
|
-
|
|
222171
|
-
|
|
222322
|
+
}
|
|
222323
|
+
async function listTablesForTrino(connection, schemaName, malloyConnection, tableNames) {
|
|
222324
|
+
if (!connection.trinoConnection) {
|
|
222325
|
+
throw new Error("Trino connection is required");
|
|
222326
|
+
}
|
|
222327
|
+
try {
|
|
222328
|
+
let catalogPrefix;
|
|
222329
|
+
let schemaOnly;
|
|
222330
|
+
let resourcePrefix;
|
|
222331
|
+
if (connection.trinoConnection.catalog) {
|
|
222332
|
+
catalogPrefix = `${connection.trinoConnection.catalog}.`;
|
|
222333
|
+
schemaOnly = schemaName;
|
|
222334
|
+
resourcePrefix = `${connection.trinoConnection.catalog}.${schemaName}`;
|
|
222335
|
+
} else {
|
|
222336
|
+
const dotIdx = schemaName.indexOf(".");
|
|
222337
|
+
if (dotIdx > 0) {
|
|
222338
|
+
catalogPrefix = `${schemaName.substring(0, dotIdx)}.`;
|
|
222339
|
+
schemaOnly = schemaName.substring(dotIdx + 1);
|
|
222340
|
+
} else {
|
|
222341
|
+
catalogPrefix = "";
|
|
222342
|
+
schemaOnly = schemaName;
|
|
222343
|
+
}
|
|
222344
|
+
resourcePrefix = schemaName;
|
|
222345
|
+
}
|
|
222346
|
+
const result = await malloyConnection.runSQL(`SELECT table_name, column_name, data_type FROM ${catalogPrefix}information_schema.columns WHERE table_schema = '${schemaOnly}' ${sqlInFilter("table_name", tableNames)} ORDER BY table_name, ordinal_position`);
|
|
222347
|
+
const rows = standardizeRunSQLResult2(result);
|
|
222348
|
+
return groupColumnRowsIntoTables(rows, (t) => `${resourcePrefix}.${t}`);
|
|
222349
|
+
} catch (error) {
|
|
222350
|
+
logger.error(`Error getting tables for Trino schema ${schemaName} in connection ${connection.name}`, { error });
|
|
222351
|
+
throw new Error(`Failed to get tables for Trino schema ${schemaName} in connection ${connection.name}: ${error.message}`);
|
|
222352
|
+
}
|
|
222353
|
+
}
|
|
222354
|
+
async function listTablesForDuckDB(connection, schemaName, malloyConnection, tableNames) {
|
|
222355
|
+
if (!connection.duckdbConnection) {
|
|
222356
|
+
throw new Error("DuckDB connection is required");
|
|
222357
|
+
}
|
|
222358
|
+
const attachedDbs = connection.duckdbConnection.attachedDatabases || [];
|
|
222359
|
+
const azureDb = attachedDbs.find((db) => db.type === "azure" && db.name === schemaName && db.azureConnection);
|
|
222360
|
+
if (azureDb) {
|
|
222361
|
+
const azureConn = azureDb.azureConnection;
|
|
222362
|
+
const fileUrl = azureConn.authType === "sas_token" ? azureConn.sasUrl : azureConn.fileUrl;
|
|
222363
|
+
if (fileUrl) {
|
|
222364
|
+
return describeAzureFile(malloyConnection, fileUrl, azureConn);
|
|
222365
|
+
}
|
|
222366
|
+
}
|
|
222367
|
+
if (schemaName.startsWith("abfss://") || schemaName.startsWith("https://") || schemaName.startsWith("az://")) {
|
|
222368
|
+
return describeAzureFile(malloyConnection, schemaName);
|
|
222172
222369
|
}
|
|
222173
222370
|
const parsedUri = parseCloudUri(schemaName);
|
|
222174
|
-
if (parsedUri
|
|
222371
|
+
if (parsedUri) {
|
|
222175
222372
|
const {
|
|
222176
222373
|
type: cloudType,
|
|
222177
222374
|
bucket: bucketName,
|
|
222178
222375
|
path: directoryPath
|
|
222179
222376
|
} = parsedUri;
|
|
222180
|
-
const
|
|
222181
|
-
const credentials = getCloudCredentialsFromAttachedDatabases(attachedDatabases, cloudType);
|
|
222377
|
+
const credentials = getCloudCredentialsFromAttachedDatabases(attachedDbs, cloudType);
|
|
222182
222378
|
if (!credentials) {
|
|
222183
222379
|
throw new Error(`${cloudType.toUpperCase()} credentials not found in attached databases`);
|
|
222184
222380
|
}
|
|
222185
222381
|
const fileKeys = await listDataFilesInDirectory(credentials, bucketName, directoryPath);
|
|
222186
|
-
return
|
|
222187
|
-
}
|
|
222188
|
-
|
|
222189
|
-
|
|
222190
|
-
|
|
222191
|
-
|
|
222192
|
-
|
|
222382
|
+
return getCloudTablesWithColumns(malloyConnection, credentials, bucketName, fileKeys);
|
|
222383
|
+
}
|
|
222384
|
+
const dotIdx = schemaName.indexOf(".");
|
|
222385
|
+
if (dotIdx < 0) {
|
|
222386
|
+
throw new Error(`DuckDB schema name must be qualified as "catalog.schema", got "${schemaName}"`);
|
|
222387
|
+
}
|
|
222388
|
+
const catalogName = schemaName.substring(0, dotIdx);
|
|
222389
|
+
const actualSchemaName = schemaName.substring(dotIdx + 1);
|
|
222390
|
+
try {
|
|
222391
|
+
const result = await malloyConnection.runSQL(`SELECT table_name, column_name, data_type FROM information_schema.columns WHERE table_schema = '${actualSchemaName}' AND table_catalog = '${catalogName}' ${sqlInFilter("table_name", tableNames)} ORDER BY table_name, ordinal_position`);
|
|
222392
|
+
const rows = standardizeRunSQLResult2(result);
|
|
222393
|
+
return groupColumnRowsIntoTables(rows, (t) => `${schemaName}.${t}`);
|
|
222394
|
+
} catch (error) {
|
|
222395
|
+
logger.error(`Error getting tables for DuckDB schema ${schemaName} in connection ${connection.name}`, { error });
|
|
222396
|
+
throw new Error(`Failed to get tables for DuckDB schema ${schemaName} in connection ${connection.name}: ${error.message}`);
|
|
222193
222397
|
}
|
|
222194
|
-
const tableNames = await listTablesForSchema(connection, schemaName, malloyConnection);
|
|
222195
|
-
const tableSourcePromises = tableNames.map(async (tableName) => {
|
|
222196
|
-
try {
|
|
222197
|
-
let tablePath;
|
|
222198
|
-
if (connection.type === "trino") {
|
|
222199
|
-
if (connection.trinoConnection?.catalog) {
|
|
222200
|
-
tablePath = `${connection.trinoConnection?.catalog}.${schemaName}.${tableName}`;
|
|
222201
|
-
} else {
|
|
222202
|
-
tablePath = `${schemaName}.${tableName}`;
|
|
222203
|
-
}
|
|
222204
|
-
} else if (connection.type === "ducklake") {
|
|
222205
|
-
tablePath = `${schemaName}.${tableName}`;
|
|
222206
|
-
} else {
|
|
222207
|
-
tablePath = `${schemaName}.${tableName}`;
|
|
222208
|
-
}
|
|
222209
|
-
logger.info(`Processing table: ${tableName} in schema: ${schemaName}`, { tablePath, connectionType: connection.type });
|
|
222210
|
-
let tableSource;
|
|
222211
|
-
if (fetchTableSchema) {
|
|
222212
|
-
tableSource = await getConnectionTableSource(malloyConnection, tableName, tablePath);
|
|
222213
|
-
}
|
|
222214
|
-
return {
|
|
222215
|
-
resource: tablePath,
|
|
222216
|
-
columns: tableSource?.columns || []
|
|
222217
|
-
};
|
|
222218
|
-
} catch (error) {
|
|
222219
|
-
logger.warn(`Failed to get schema for table ${tableName}`, {
|
|
222220
|
-
error: extractErrorDataFromError(error),
|
|
222221
|
-
schemaName,
|
|
222222
|
-
tableName
|
|
222223
|
-
});
|
|
222224
|
-
return {
|
|
222225
|
-
resource: `${schemaName}.${tableName}`,
|
|
222226
|
-
columns: []
|
|
222227
|
-
};
|
|
222228
|
-
}
|
|
222229
|
-
});
|
|
222230
|
-
const tableResults = await Promise.all(tableSourcePromises);
|
|
222231
|
-
return tableResults;
|
|
222232
222398
|
}
|
|
222233
|
-
async function
|
|
222399
|
+
async function listTablesForMotherDuck(connection, schemaName, malloyConnection, tableNames) {
|
|
222400
|
+
if (!connection.motherduckConnection) {
|
|
222401
|
+
throw new Error("MotherDuck connection is required");
|
|
222402
|
+
}
|
|
222234
222403
|
try {
|
|
222235
|
-
|
|
222236
|
-
|
|
222237
|
-
|
|
222238
|
-
});
|
|
222239
|
-
const source = await malloyConnection.fetchTableSchema(tableKey, tablePath);
|
|
222240
|
-
if (source === undefined) {
|
|
222241
|
-
throw new ConnectionError(`Table ${tablePath} not found: ${JSON.stringify(source)}`);
|
|
222242
|
-
}
|
|
222243
|
-
if (!source) {
|
|
222244
|
-
throw new ConnectionError(`Invalid table source returned for ${tablePath}`);
|
|
222245
|
-
} else if (typeof source !== "object") {
|
|
222246
|
-
throw new ConnectionError(JSON.stringify(source));
|
|
222247
|
-
}
|
|
222248
|
-
const malloyFields = source.fields;
|
|
222249
|
-
if (!malloyFields || !Array.isArray(malloyFields)) {
|
|
222250
|
-
throw new ConnectionError(`Table ${tablePath} has no fields or invalid field structure`);
|
|
222251
|
-
}
|
|
222252
|
-
if (malloyFields.length === 0) {
|
|
222253
|
-
throw new ConnectionError(`Table ${tablePath} not found`);
|
|
222254
|
-
}
|
|
222255
|
-
const fields = malloyFields.map((field) => {
|
|
222256
|
-
return {
|
|
222257
|
-
name: field.name,
|
|
222258
|
-
type: field.type
|
|
222259
|
-
};
|
|
222260
|
-
});
|
|
222261
|
-
logger.debug(`Successfully fetched schema for ${tablePath}`, {
|
|
222262
|
-
fieldCount: fields.length
|
|
222263
|
-
});
|
|
222264
|
-
return {
|
|
222265
|
-
source: JSON.stringify(source),
|
|
222266
|
-
resource: tablePath,
|
|
222267
|
-
columns: fields
|
|
222268
|
-
};
|
|
222404
|
+
const result = await malloyConnection.runSQL(`SELECT table_name, column_name, data_type FROM information_schema.columns WHERE table_schema = '${schemaName}' ${sqlInFilter("table_name", tableNames)} ORDER BY table_name, ordinal_position`);
|
|
222405
|
+
const rows = standardizeRunSQLResult2(result);
|
|
222406
|
+
return groupColumnRowsIntoTables(rows, (t) => `${schemaName}.${t}`);
|
|
222269
222407
|
} catch (error) {
|
|
222270
|
-
|
|
222271
|
-
|
|
222272
|
-
error,
|
|
222273
|
-
tableKey,
|
|
222274
|
-
tablePath
|
|
222275
|
-
});
|
|
222276
|
-
throw new ConnectionError(errorMessage);
|
|
222408
|
+
logger.error(`Error getting tables for MotherDuck schema ${schemaName} in connection ${connection.name}`, { error });
|
|
222409
|
+
throw new Error(`Failed to get tables for MotherDuck schema ${schemaName} in connection ${connection.name}: ${error.message}`);
|
|
222277
222410
|
}
|
|
222278
222411
|
}
|
|
222279
|
-
async function
|
|
222280
|
-
if (
|
|
222281
|
-
|
|
222282
|
-
|
|
222283
|
-
|
|
222284
|
-
|
|
222285
|
-
|
|
222286
|
-
}
|
|
222287
|
-
|
|
222288
|
-
|
|
222289
|
-
|
|
222290
|
-
|
|
222291
|
-
|
|
222292
|
-
throw new Error("Mysql connection is required");
|
|
222293
|
-
}
|
|
222294
|
-
try {
|
|
222295
|
-
const result = await malloyConnection.runSQL(`SELECT TABLE_NAME FROM information_schema.tables WHERE table_schema = '${schemaName}' AND table_type = 'BASE TABLE'`);
|
|
222296
|
-
const rows = standardizeRunSQLResult2(result);
|
|
222297
|
-
return rows.map((row) => {
|
|
222298
|
-
const typedRow = row;
|
|
222299
|
-
return typedRow.TABLE_NAME;
|
|
222300
|
-
});
|
|
222301
|
-
} catch (error) {
|
|
222302
|
-
logger.error(`Error getting tables for MySQL schema ${schemaName} in connection ${connection.name}`, { error });
|
|
222303
|
-
throw new Error(`Failed to get tables for MySQL schema ${schemaName} in connection ${connection.name}: ${error.message}`);
|
|
222304
|
-
}
|
|
222305
|
-
} else if (connection.type === "postgres") {
|
|
222306
|
-
if (!connection.postgresConnection) {
|
|
222307
|
-
throw new Error("Postgres connection is required");
|
|
222308
|
-
}
|
|
222309
|
-
try {
|
|
222310
|
-
const result = await malloyConnection.runSQL(`SELECT table_name as row FROM information_schema.tables WHERE table_schema = '${schemaName}' ORDER BY table_name`);
|
|
222311
|
-
const rows = standardizeRunSQLResult2(result);
|
|
222312
|
-
return rows;
|
|
222313
|
-
} catch (error) {
|
|
222314
|
-
logger.error(`Error getting tables for Postgres schema ${schemaName} in connection ${connection.name}`, { error });
|
|
222315
|
-
throw new Error(`Failed to get tables for Postgres schema ${schemaName} in connection ${connection.name}: ${error.message}`);
|
|
222316
|
-
}
|
|
222317
|
-
} else if (connection.type === "snowflake") {
|
|
222318
|
-
if (!connection.snowflakeConnection) {
|
|
222319
|
-
throw new Error("Snowflake connection is required");
|
|
222320
|
-
}
|
|
222321
|
-
try {
|
|
222322
|
-
const tablesResult = await malloyConnection.runSQL(`SHOW TABLES IN SCHEMA ${schemaName} LIMIT 1000`);
|
|
222323
|
-
const viewsResult = await malloyConnection.runSQL(`SHOW VIEWS IN SCHEMA ${schemaName} LIMIT 1000`);
|
|
222324
|
-
const tableRows = standardizeRunSQLResult2(tablesResult);
|
|
222325
|
-
const viewRows = standardizeRunSQLResult2(viewsResult);
|
|
222326
|
-
logger.debug("Snowflake Tables Listed", { tableRows });
|
|
222327
|
-
logger.debug("Snowflake Views Listed", { viewRows });
|
|
222328
|
-
const rows = [...tableRows, ...viewRows];
|
|
222329
|
-
return rows.map((row) => {
|
|
222330
|
-
const typedRow = row;
|
|
222331
|
-
const name = typedRow.name ?? typedRow.NAME;
|
|
222332
|
-
return typeof name === "string" ? name : String(name);
|
|
222333
|
-
}).filter((id) => id.length > 0);
|
|
222334
|
-
} catch (error) {
|
|
222335
|
-
logger.error(`Error getting tables for Snowflake schema ${schemaName} in connection ${connection.name}`, { error });
|
|
222336
|
-
throw new Error(`Failed to get tables for Snowflake schema ${schemaName} in connection ${connection.name}: ${error.message}`);
|
|
222337
|
-
}
|
|
222338
|
-
} else if (connection.type === "trino") {
|
|
222339
|
-
if (!connection.trinoConnection) {
|
|
222340
|
-
throw new Error("Trino connection is required");
|
|
222341
|
-
}
|
|
222342
|
-
try {
|
|
222343
|
-
let result;
|
|
222344
|
-
if (connection.trinoConnection?.catalog) {
|
|
222345
|
-
result = await malloyConnection.runSQL(`SHOW TABLES FROM ${connection.trinoConnection.catalog}.${schemaName}`);
|
|
222346
|
-
} else {
|
|
222347
|
-
result = await malloyConnection.runSQL(`SHOW TABLES FROM ${schemaName}`);
|
|
222348
|
-
}
|
|
222349
|
-
const rows = standardizeRunSQLResult2(result);
|
|
222350
|
-
return rows.map((row) => {
|
|
222351
|
-
const typedRow = row;
|
|
222352
|
-
return typedRow.Table;
|
|
222353
|
-
});
|
|
222354
|
-
} catch (error) {
|
|
222355
|
-
logger.error(`Error getting tables for Trino schema ${schemaName} in connection ${connection.name}`, { error });
|
|
222356
|
-
throw new Error(`Failed to get tables for Trino schema ${schemaName} in connection ${connection.name}: ${error.message}`);
|
|
222357
|
-
}
|
|
222358
|
-
} else if (connection.type === "duckdb") {
|
|
222359
|
-
if (!connection.duckdbConnection) {
|
|
222360
|
-
throw new Error("DuckDB connection is required");
|
|
222361
|
-
}
|
|
222362
|
-
const parsedUri = parseCloudUri(schemaName);
|
|
222363
|
-
if (parsedUri) {
|
|
222364
|
-
const {
|
|
222365
|
-
type: cloudType,
|
|
222366
|
-
bucket: bucketName,
|
|
222367
|
-
path: directoryPath
|
|
222368
|
-
} = parsedUri;
|
|
222369
|
-
const attachedDatabases = connection.duckdbConnection.attachedDatabases || [];
|
|
222370
|
-
const credentials = getCloudCredentialsFromAttachedDatabases(attachedDatabases, cloudType);
|
|
222371
|
-
if (!credentials) {
|
|
222372
|
-
throw new Error(`${cloudType.toUpperCase()} credentials not found in attached databases`);
|
|
222373
|
-
}
|
|
222374
|
-
try {
|
|
222375
|
-
const fileKeys = await listDataFilesInDirectory(credentials, bucketName, directoryPath);
|
|
222376
|
-
return fileKeys.map((key) => {
|
|
222377
|
-
const lastSlash = key.lastIndexOf("/");
|
|
222378
|
-
return lastSlash > 0 ? key.substring(lastSlash + 1) : key;
|
|
222379
|
-
});
|
|
222380
|
-
} catch (error) {
|
|
222381
|
-
logger.error(`Error listing ${cloudType.toUpperCase()} objects in ${schemaName}`, {
|
|
222382
|
-
error
|
|
222383
|
-
});
|
|
222384
|
-
throw new Error(`Failed to list files in ${schemaName}: ${error.message}`);
|
|
222385
|
-
}
|
|
222386
|
-
}
|
|
222387
|
-
const catalogName = schemaName.split(".")[0];
|
|
222388
|
-
const actualSchemaName = schemaName.split(".")[1];
|
|
222389
|
-
try {
|
|
222390
|
-
const result = await malloyConnection.runSQL(`SELECT table_name FROM information_schema.tables WHERE table_schema = '${actualSchemaName}' and table_catalog = '${catalogName}' ORDER BY table_name`, { rowLimit: 1000 });
|
|
222391
|
-
const rows = standardizeRunSQLResult2(result);
|
|
222392
|
-
return rows.map((row) => {
|
|
222393
|
-
const typedRow = row;
|
|
222394
|
-
return typedRow.table_name;
|
|
222395
|
-
});
|
|
222396
|
-
} catch (error) {
|
|
222397
|
-
logger.error(`Error getting tables for DuckDB schema ${schemaName} in connection ${connection.name}`, { error });
|
|
222398
|
-
throw new Error(`Failed to get tables for DuckDB schema ${schemaName} in connection ${connection.name}: ${error.message}`);
|
|
222399
|
-
}
|
|
222400
|
-
} else if (connection.type === "motherduck") {
|
|
222401
|
-
if (!connection.motherduckConnection) {
|
|
222402
|
-
throw new Error("MotherDuck connection is required");
|
|
222403
|
-
}
|
|
222404
|
-
try {
|
|
222405
|
-
const result = await malloyConnection.runSQL(`SELECT table_name as row FROM information_schema.tables WHERE table_schema = '${schemaName}' ORDER BY table_name`, { rowLimit: 1000 });
|
|
222406
|
-
const rows = standardizeRunSQLResult2(result);
|
|
222407
|
-
return rows.map((row) => {
|
|
222408
|
-
const typedRow = row;
|
|
222409
|
-
return typedRow.row;
|
|
222410
|
-
});
|
|
222411
|
-
} catch (error) {
|
|
222412
|
-
logger.error(`Error getting tables for MotherDuck schema ${schemaName} in connection ${connection.name}`, { error });
|
|
222413
|
-
throw new Error(`Failed to get tables for MotherDuck schema ${schemaName} in connection ${connection.name}: ${error.message}`);
|
|
222414
|
-
}
|
|
222415
|
-
} else if (connection.type === "ducklake") {
|
|
222416
|
-
const catalogName = schemaName.split(".")[0];
|
|
222417
|
-
const actualSchemaName = schemaName.split(".")[1];
|
|
222418
|
-
console.error("catalogName", catalogName);
|
|
222419
|
-
console.error("actualSchemaName", actualSchemaName);
|
|
222420
|
-
try {
|
|
222421
|
-
const result = await malloyConnection.runSQL(`SELECT table_name FROM information_schema.tables WHERE table_schema = '${actualSchemaName}' AND table_catalog = '${catalogName}' ORDER BY table_name`, { rowLimit: 1000 });
|
|
222422
|
-
const rows = standardizeRunSQLResult2(result);
|
|
222423
|
-
return rows.map((row) => {
|
|
222424
|
-
const typedRow = row;
|
|
222425
|
-
return typedRow.table_name;
|
|
222426
|
-
});
|
|
222427
|
-
} catch (error) {
|
|
222428
|
-
logger.error(`Error getting tables for DuckLake schema ${schemaName} in connection ${connection.name}`, { error });
|
|
222429
|
-
throw new Error(`Failed to get tables for DuckLake schema ${schemaName} in connection ${connection.name}: ${error.message}`);
|
|
222430
|
-
}
|
|
222431
|
-
} else {
|
|
222432
|
-
throw new Error(`Unsupported connection type: ${connection.type}`);
|
|
222412
|
+
async function listTablesForDuckLake(connection, schemaName, malloyConnection, tableNames) {
|
|
222413
|
+
if (!schemaName.includes(".")) {
|
|
222414
|
+
schemaName = `${connection.name}.${schemaName}`;
|
|
222415
|
+
}
|
|
222416
|
+
const catalogName = schemaName.split(".")[0];
|
|
222417
|
+
const actualSchemaName = schemaName.split(".")[1];
|
|
222418
|
+
try {
|
|
222419
|
+
const result = await malloyConnection.runSQL(`SELECT table_name, column_name, data_type FROM information_schema.columns WHERE table_schema = '${actualSchemaName}' AND table_catalog = '${catalogName}' ${sqlInFilter("table_name", tableNames)} ORDER BY table_name, ordinal_position`);
|
|
222420
|
+
const rows = standardizeRunSQLResult2(result);
|
|
222421
|
+
return groupColumnRowsIntoTables(rows, (t) => `${schemaName}.${t}`);
|
|
222422
|
+
} catch (error) {
|
|
222423
|
+
logger.error(`Error getting tables for DuckLake schema ${schemaName} in connection ${connection.name}`, { error });
|
|
222424
|
+
throw new Error(`Failed to get tables for DuckLake schema ${schemaName} in connection ${connection.name}: ${error.message}`);
|
|
222433
222425
|
}
|
|
222434
222426
|
}
|
|
222435
222427
|
function extractErrorDataFromError(error) {
|
|
@@ -222455,23 +222447,6 @@ var AZURE_DATA_EXTENSIONS = [
|
|
|
222455
222447
|
".jsonl",
|
|
222456
222448
|
".ndjson"
|
|
222457
222449
|
];
|
|
222458
|
-
function parseFetchTableSchemaQueryParam(raw) {
|
|
222459
|
-
if (raw === undefined || raw === null) {
|
|
222460
|
-
return true;
|
|
222461
|
-
}
|
|
222462
|
-
const v = Array.isArray(raw) ? raw[0] : raw;
|
|
222463
|
-
if (v === "" || v === undefined) {
|
|
222464
|
-
return true;
|
|
222465
|
-
}
|
|
222466
|
-
if (typeof v === "boolean") {
|
|
222467
|
-
return v;
|
|
222468
|
-
}
|
|
222469
|
-
const s = String(v).trim().toLowerCase();
|
|
222470
|
-
if (s === "false" || s === "0") {
|
|
222471
|
-
return false;
|
|
222472
|
-
}
|
|
222473
|
-
return true;
|
|
222474
|
-
}
|
|
222475
222450
|
function validateAzureUrl(url2, fieldName) {
|
|
222476
222451
|
if (!AZURE_SUPPORTED_SCHEMES.some((s) => url2.startsWith(s))) {
|
|
222477
222452
|
throw new BadRequestError(`Azure ${fieldName} must use one of: ${AZURE_SUPPORTED_SCHEMES.join(", ")}`);
|
|
@@ -222535,6 +222510,30 @@ class ConnectionController {
|
|
|
222535
222510
|
return project.getMalloyConnection(connectionName);
|
|
222536
222511
|
}
|
|
222537
222512
|
}
|
|
222513
|
+
async fetchTable(malloyConnection, tableKey, tablePath) {
|
|
222514
|
+
try {
|
|
222515
|
+
const source = await malloyConnection.fetchTableSchema(tableKey, tablePath);
|
|
222516
|
+
if (!source) {
|
|
222517
|
+
throw new ConnectionError(`Table ${tablePath} not found`);
|
|
222518
|
+
}
|
|
222519
|
+
return {
|
|
222520
|
+
source: JSON.stringify(source),
|
|
222521
|
+
resource: tablePath,
|
|
222522
|
+
columns: (source.fields || []).map((f) => ({
|
|
222523
|
+
name: f.name,
|
|
222524
|
+
type: f.type
|
|
222525
|
+
}))
|
|
222526
|
+
};
|
|
222527
|
+
} catch (error) {
|
|
222528
|
+
const errorMessage = error instanceof Error ? error.message : typeof error === "string" ? error : JSON.stringify(error);
|
|
222529
|
+
logger.error("fetchTableSchema error", {
|
|
222530
|
+
error,
|
|
222531
|
+
tableKey,
|
|
222532
|
+
tablePath
|
|
222533
|
+
});
|
|
222534
|
+
throw new ConnectionError(errorMessage);
|
|
222535
|
+
}
|
|
222536
|
+
}
|
|
222538
222537
|
async getConnection(projectName, connectionName) {
|
|
222539
222538
|
if (!projectName || !connectionName) {
|
|
222540
222539
|
throw new BadRequestError("Connection payload is required");
|
|
@@ -222552,11 +222551,11 @@ class ConnectionController {
|
|
|
222552
222551
|
const malloyConnection = await this.getMalloyConnection(projectName, connectionName);
|
|
222553
222552
|
return getSchemasForConnection(connection, malloyConnection);
|
|
222554
222553
|
}
|
|
222555
|
-
async listTables(projectName, connectionName, schemaName,
|
|
222554
|
+
async listTables(projectName, connectionName, schemaName, tableNames) {
|
|
222556
222555
|
const project = await this.projectStore.getProject(projectName, false);
|
|
222557
222556
|
const connection = project.getApiConnection(connectionName);
|
|
222558
222557
|
const malloyConnection = await this.getMalloyConnection(projectName, connectionName);
|
|
222559
|
-
return
|
|
222558
|
+
return listTablesForSchema(connection, schemaName, malloyConnection, tableNames);
|
|
222560
222559
|
}
|
|
222561
222560
|
async getConnectionSqlSource(projectName, connectionName, sqlStatement) {
|
|
222562
222561
|
const malloyConnection = await this.getMalloyConnection(projectName, connectionName);
|
|
@@ -222571,10 +222570,6 @@ class ConnectionController {
|
|
|
222571
222570
|
throw new ConnectionError(error.message);
|
|
222572
222571
|
}
|
|
222573
222572
|
}
|
|
222574
|
-
async getConnectionTableSource(projectName, connectionName, tableKey, tablePath) {
|
|
222575
|
-
const malloyConnection = await this.getMalloyConnection(projectName, connectionName);
|
|
222576
|
-
return getConnectionTableSource(malloyConnection, tableKey, tablePath);
|
|
222577
|
-
}
|
|
222578
222573
|
async getTable(projectName, connectionName, schemaName, tablePath) {
|
|
222579
222574
|
const malloyConnection = await this.getMalloyConnection(projectName, connectionName);
|
|
222580
222575
|
const project = await this.projectStore.getProject(projectName, false);
|
|
@@ -222599,12 +222594,8 @@ class ConnectionController {
|
|
|
222599
222594
|
const queryString = urlParts[1] ? `?${urlParts[1]}` : "";
|
|
222600
222595
|
const dirPath = basePath.substring(0, basePath.lastIndexOf("/") + 1);
|
|
222601
222596
|
const fullFileUrl = `${dirPath}${fileName}${queryString}`;
|
|
222602
|
-
const
|
|
222603
|
-
return {
|
|
222604
|
-
resource: tablePath,
|
|
222605
|
-
columns: tableSource2.columns,
|
|
222606
|
-
source: tableSource2.source
|
|
222607
|
-
};
|
|
222597
|
+
const table = await this.fetchTable(malloyConnection, fileName, fullFileUrl);
|
|
222598
|
+
return { ...table, resource: tablePath };
|
|
222608
222599
|
}
|
|
222609
222600
|
}
|
|
222610
222601
|
}
|
|
@@ -222612,12 +222603,7 @@ class ConnectionController {
|
|
|
222612
222603
|
if (!tableKey) {
|
|
222613
222604
|
throw new Error(`Invalid tablePath: ${tablePath}`);
|
|
222614
222605
|
}
|
|
222615
|
-
|
|
222616
|
-
return {
|
|
222617
|
-
resource: tablePath,
|
|
222618
|
-
columns: tableSource.columns,
|
|
222619
|
-
source: tableSource.source
|
|
222620
|
-
};
|
|
222606
|
+
return this.fetchTable(malloyConnection, tableKey, tablePath);
|
|
222621
222607
|
}
|
|
222622
222608
|
async getConnectionQueryData(projectName, connectionName, sqlStatement, options) {
|
|
222623
222609
|
const malloyConnection = await this.getMalloyConnection(projectName, connectionName);
|
|
@@ -230423,7 +230409,7 @@ class Project {
|
|
|
230423
230409
|
return this;
|
|
230424
230410
|
}
|
|
230425
230411
|
static async create(projectName, projectPath, connections) {
|
|
230426
|
-
if (!(await fs6.promises.stat(projectPath))
|
|
230412
|
+
if (!(await fs6.promises.stat(projectPath))?.isDirectory()) {
|
|
230427
230413
|
throw new ProjectNotFoundError(`Project path ${projectPath} not found`);
|
|
230428
230414
|
}
|
|
230429
230415
|
logger.info(`Creating project with connection configuration`);
|
|
@@ -230575,7 +230561,7 @@ ${source}` : source;
|
|
|
230575
230561
|
}
|
|
230576
230562
|
async addPackage(packageName) {
|
|
230577
230563
|
const packagePath = path7.join(this.projectPath, packageName);
|
|
230578
|
-
if (!await fs6.promises.access(packagePath).then(() => true).catch(() => false) || !(await fs6.promises.stat(packagePath))
|
|
230564
|
+
if (!await fs6.promises.access(packagePath).then(() => true).catch(() => false) || !(await fs6.promises.stat(packagePath))?.isDirectory()) {
|
|
230579
230565
|
throw new PackageNotFoundError(`Package ${packageName} not found`);
|
|
230580
230566
|
}
|
|
230581
230567
|
logger.info(`Adding package ${packageName} to project ${this.projectName}`, {
|
|
@@ -231070,18 +231056,7 @@ class ProjectStore {
|
|
|
231070
231056
|
}
|
|
231071
231057
|
async cleanupAndCreatePublisherPath() {
|
|
231072
231058
|
const reInit = process.env.INITIALIZE_STORAGE === "true";
|
|
231073
|
-
|
|
231074
|
-
const stats = await fs7.promises.stat(this.serverRootPath);
|
|
231075
|
-
if (!stats.isDirectory()) {
|
|
231076
|
-
throw new Error(`Server root path ${this.serverRootPath} exists but is not a directory`);
|
|
231077
|
-
}
|
|
231078
|
-
} catch (error) {
|
|
231079
|
-
if (error.code === "ENOENT") {
|
|
231080
|
-
await fs7.promises.mkdir(this.serverRootPath, { recursive: true });
|
|
231081
|
-
} else {
|
|
231082
|
-
throw error;
|
|
231083
|
-
}
|
|
231084
|
-
}
|
|
231059
|
+
await fs7.promises.mkdir(this.serverRootPath, { recursive: true });
|
|
231085
231060
|
if (reInit) {
|
|
231086
231061
|
const uploadDocsPath2 = path8.join(this.serverRootPath, PUBLISHER_DATA_DIR);
|
|
231087
231062
|
logger.info(`Reinitialization mode: Cleaning up upload documents path ${uploadDocsPath2}`);
|
|
@@ -231491,7 +231466,7 @@ class ProjectStore {
|
|
|
231491
231466
|
if (projectPath.endsWith(".zip")) {
|
|
231492
231467
|
projectPath = await this.unzipProject(projectPath);
|
|
231493
231468
|
}
|
|
231494
|
-
const projectDirExists = (await fs7.promises.stat(projectPath))
|
|
231469
|
+
const projectDirExists = (await fs7.promises.stat(projectPath))?.isDirectory() ?? false;
|
|
231495
231470
|
if (projectDirExists) {
|
|
231496
231471
|
await fs7.promises.rm(absoluteTargetPath, {
|
|
231497
231472
|
recursive: true,
|
|
@@ -235629,6 +235604,13 @@ function initializeMcpServer(projectStore) {
|
|
|
235629
235604
|
|
|
235630
235605
|
// src/server.ts
|
|
235631
235606
|
var __filename = "/home/runner/work/publisher/publisher/packages/server/src/server.ts";
|
|
235607
|
+
function normalizeQueryArray(value) {
|
|
235608
|
+
if (value === undefined || value === null)
|
|
235609
|
+
return;
|
|
235610
|
+
if (Array.isArray(value))
|
|
235611
|
+
return value.map(String);
|
|
235612
|
+
return [String(value)];
|
|
235613
|
+
}
|
|
235632
235614
|
function parseArgs() {
|
|
235633
235615
|
const args = process.argv.slice(2);
|
|
235634
235616
|
for (let i = 0;i < args.length; i++) {
|
|
@@ -235922,7 +235904,7 @@ app.get(`${API_PREFIX2}/projects/:projectName/connections/:connectionName/schema
|
|
|
235922
235904
|
app.get(`${API_PREFIX2}/projects/:projectName/connections/:connectionName/schemas/:schemaName/tables`, async (req, res) => {
|
|
235923
235905
|
logger.info("req.params", { params: req.params });
|
|
235924
235906
|
try {
|
|
235925
|
-
const results = await connectionController.listTables(req.params.projectName, req.params.connectionName, req.params.schemaName,
|
|
235907
|
+
const results = await connectionController.listTables(req.params.projectName, req.params.connectionName, req.params.schemaName, normalizeQueryArray(req.query.tableNames));
|
|
235926
235908
|
res.status(200).json(results);
|
|
235927
235909
|
} catch (error) {
|
|
235928
235910
|
logger.error(error);
|
|
@@ -235959,15 +235941,6 @@ app.post(`${API_PREFIX2}/projects/:projectName/connections/:connectionName/sqlSo
|
|
|
235959
235941
|
res.status(status).json(json2);
|
|
235960
235942
|
}
|
|
235961
235943
|
});
|
|
235962
|
-
app.get(`${API_PREFIX2}/projects/:projectName/connections/:connectionName/tableSource`, async (req, res) => {
|
|
235963
|
-
try {
|
|
235964
|
-
res.status(200).json(await connectionController.getConnectionTableSource(req.params.projectName, req.params.connectionName, req.query.tableKey, req.query.tablePath));
|
|
235965
|
-
} catch (error) {
|
|
235966
|
-
logger.error(error);
|
|
235967
|
-
const { json: json2, status } = internalErrorToHttpError(error);
|
|
235968
|
-
res.status(status).json(json2);
|
|
235969
|
-
}
|
|
235970
|
-
});
|
|
235971
235944
|
app.get(`${API_PREFIX2}/projects/:projectName/connections/:connectionName/queryData`, async (req, res) => {
|
|
235972
235945
|
try {
|
|
235973
235946
|
res.status(200).json(await connectionController.getConnectionQueryData(req.params.projectName, req.params.connectionName, req.query.sqlStatement, req.query.options));
|