@malloy-publisher/server 0.0.122 → 0.0.123
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/app/api-doc.yaml +20 -9
- package/dist/app/assets/{HomePage-z6NLKLPp.js → HomePage-DXKA9tWd.js} +1 -1
- package/dist/app/assets/{MainPage-C9McOjLb.js → MainPage-BdYxYmkS.js} +1 -1
- package/dist/app/assets/{ModelPage-DjlTuT2G.js → ModelPage-mQttYUXZ.js} +1 -1
- package/dist/app/assets/{PackagePage-CDh_gnAZ.js → PackagePage-8dgNIkwK.js} +1 -1
- package/dist/app/assets/{ProjectPage-vyvZZWAB.js → ProjectPage-D3lTBcOF.js} +1 -1
- package/dist/app/assets/{RouteError-FbxztVnz.js → RouteError-B1FrgvdL.js} +1 -1
- package/dist/app/assets/{WorkbookPage-DNXFxaeZ.js → WorkbookPage-uhPZOv8J.js} +1 -1
- package/dist/app/assets/{index-DHFp2DLx.js → index-2wN22fP5.js} +1 -1
- package/dist/app/assets/{index-a6hx_UrL.js → index-CfR2coZN.js} +4 -4
- package/dist/app/assets/{index-BMyI9XZS.js → index-DiPnMvhX.js} +1 -1
- package/dist/app/assets/{index.umd-Cv1NyZL8.js → index.umd-DaPh4mA_.js} +1 -1
- package/dist/app/index.html +1 -1
- package/dist/server.js +234 -150
- package/package.json +1 -1
- package/src/service/connection.ts +333 -213
- package/src/service/db_utils.ts +23 -47
- package/src/service/project.ts +5 -2
- package/tests/integration/mcp/mcp_transport.integration.spec.ts +4 -4
package/dist/server.js
CHANGED
|
@@ -131921,138 +131921,209 @@ function validateAndBuildTrinoConfig(trinoConfig) {
|
|
|
131921
131921
|
throw new Error(`Invalid Trino connection: expected "http://server:port" (no password) or "https://server:port" (with username and password).`);
|
|
131922
131922
|
}
|
|
131923
131923
|
}
|
|
131924
|
+
async function installAndLoadExtension(connection, extensionName, fromCommunity = false) {
|
|
131925
|
+
try {
|
|
131926
|
+
const installCommand = fromCommunity ? `FORCE INSTALL '${extensionName}' FROM community;` : `INSTALL ${extensionName};`;
|
|
131927
|
+
await connection.runSQL(installCommand);
|
|
131928
|
+
logger2.info(`${extensionName} extension installed`);
|
|
131929
|
+
} catch (error) {
|
|
131930
|
+
logger2.info(`${extensionName} extension already installed or install skipped`, { error });
|
|
131931
|
+
}
|
|
131932
|
+
await connection.runSQL(`LOAD ${extensionName};`);
|
|
131933
|
+
logger2.info(`${extensionName} extension loaded`);
|
|
131934
|
+
}
|
|
131935
|
+
async function isDatabaseAttached(connection, dbName) {
|
|
131936
|
+
try {
|
|
131937
|
+
const existingDatabases = await connection.runSQL("SHOW DATABASES");
|
|
131938
|
+
const rows = Array.isArray(existingDatabases) ? existingDatabases : existingDatabases.rows || [];
|
|
131939
|
+
logger2.debug(`Existing databases:`, rows);
|
|
131940
|
+
return rows.some((row) => Object.values(row).some((value) => typeof value === "string" && value === dbName));
|
|
131941
|
+
} catch (error) {
|
|
131942
|
+
logger2.warn(`Failed to check existing databases:`, error);
|
|
131943
|
+
return false;
|
|
131944
|
+
}
|
|
131945
|
+
}
|
|
131946
|
+
function sanitizeSecretName(name) {
|
|
131947
|
+
return `secret_${name.replace(/[^a-zA-Z0-9_]/g, "_")}`;
|
|
131948
|
+
}
|
|
131949
|
+
function escapeSQL(value) {
|
|
131950
|
+
return value.replace(/'/g, "''");
|
|
131951
|
+
}
|
|
131952
|
+
function handleAlreadyAttachedError(error, dbName) {
|
|
131953
|
+
if (error instanceof Error && error.message.includes("already exists")) {
|
|
131954
|
+
logger2.info(`Database ${dbName} is already attached, skipping`);
|
|
131955
|
+
} else {
|
|
131956
|
+
throw error;
|
|
131957
|
+
}
|
|
131958
|
+
}
|
|
131959
|
+
async function attachBigQuery(connection, attachedDb) {
|
|
131960
|
+
if (!attachedDb.bigqueryConnection) {
|
|
131961
|
+
throw new Error(`BigQuery connection configuration missing for: ${attachedDb.name}`);
|
|
131962
|
+
}
|
|
131963
|
+
const config = attachedDb.bigqueryConnection;
|
|
131964
|
+
let projectId = config.defaultProjectId;
|
|
131965
|
+
let serviceAccountJson;
|
|
131966
|
+
if (config.serviceAccountKeyJson) {
|
|
131967
|
+
const keyData = JSON.parse(config.serviceAccountKeyJson);
|
|
131968
|
+
const requiredFields = [
|
|
131969
|
+
"type",
|
|
131970
|
+
"project_id",
|
|
131971
|
+
"private_key",
|
|
131972
|
+
"client_email"
|
|
131973
|
+
];
|
|
131974
|
+
for (const field of requiredFields) {
|
|
131975
|
+
if (!keyData[field]) {
|
|
131976
|
+
throw new Error(`Invalid service account key: missing "${field}" field`);
|
|
131977
|
+
}
|
|
131978
|
+
}
|
|
131979
|
+
if (keyData.type !== "service_account") {
|
|
131980
|
+
throw new Error('Invalid service account key: incorrect "type" field');
|
|
131981
|
+
}
|
|
131982
|
+
projectId = keyData.project_id || config.defaultProjectId;
|
|
131983
|
+
serviceAccountJson = config.serviceAccountKeyJson;
|
|
131984
|
+
logger2.info(`Using service account: ${keyData.client_email}`);
|
|
131985
|
+
}
|
|
131986
|
+
if (!projectId || !serviceAccountJson) {
|
|
131987
|
+
throw new Error(`BigQuery project_id and service account key required for: ${attachedDb.name}`);
|
|
131988
|
+
}
|
|
131989
|
+
await installAndLoadExtension(connection, "bigquery", true);
|
|
131990
|
+
const secretName = sanitizeSecretName(`bigquery_${attachedDb.name}`);
|
|
131991
|
+
const escapedJson = escapeSQL(serviceAccountJson);
|
|
131992
|
+
const createSecretCommand = `
|
|
131993
|
+
CREATE OR REPLACE SECRET ${secretName} (
|
|
131994
|
+
TYPE BIGQUERY,
|
|
131995
|
+
SCOPE 'bq://${projectId}',
|
|
131996
|
+
SERVICE_ACCOUNT_JSON '${escapedJson}'
|
|
131997
|
+
);
|
|
131998
|
+
`;
|
|
131999
|
+
await connection.runSQL(createSecretCommand);
|
|
132000
|
+
logger2.info(`Created BigQuery secret: ${secretName} for project: ${projectId}`);
|
|
132001
|
+
const attachCommand = `ATTACH 'project=${projectId}' AS ${attachedDb.name} (TYPE bigquery, READ_ONLY);`;
|
|
132002
|
+
await connection.runSQL(attachCommand);
|
|
132003
|
+
logger2.info(`Successfully attached BigQuery database: ${attachedDb.name}`);
|
|
132004
|
+
}
|
|
132005
|
+
async function attachSnowflake(connection, attachedDb) {
|
|
132006
|
+
if (!attachedDb.snowflakeConnection) {
|
|
132007
|
+
throw new Error(`Snowflake connection configuration missing for: ${attachedDb.name}`);
|
|
132008
|
+
}
|
|
132009
|
+
const config = attachedDb.snowflakeConnection;
|
|
132010
|
+
const requiredFields = {
|
|
132011
|
+
account: config.account,
|
|
132012
|
+
username: config.username,
|
|
132013
|
+
password: config.password
|
|
132014
|
+
};
|
|
132015
|
+
for (const [field, value] of Object.entries(requiredFields)) {
|
|
132016
|
+
if (!value) {
|
|
132017
|
+
throw new Error(`Snowflake ${field} is required for: ${attachedDb.name}`);
|
|
132018
|
+
}
|
|
132019
|
+
}
|
|
132020
|
+
await installAndLoadExtension(connection, "snowflake", true);
|
|
132021
|
+
try {
|
|
132022
|
+
const version = await connection.runSQL("SELECT snowflake_version();");
|
|
132023
|
+
logger2.info(`Snowflake ADBC driver verified with version:`, version.rows);
|
|
132024
|
+
} catch (error) {
|
|
132025
|
+
throw new Error(`Snowflake ADBC driver verification failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
132026
|
+
}
|
|
132027
|
+
const params = {
|
|
132028
|
+
account: escapeSQL(config.account || ""),
|
|
132029
|
+
user: escapeSQL(config.username || ""),
|
|
132030
|
+
password: escapeSQL(config.password || ""),
|
|
132031
|
+
database: config.database ? escapeSQL(config.database) : undefined,
|
|
132032
|
+
warehouse: config.warehouse ? escapeSQL(config.warehouse) : undefined,
|
|
132033
|
+
schema: config.schema ? escapeSQL(config.schema) : undefined,
|
|
132034
|
+
role: config.role ? escapeSQL(config.role) : undefined
|
|
132035
|
+
};
|
|
132036
|
+
const attachParts = [
|
|
132037
|
+
`account=${params.account}`,
|
|
132038
|
+
`user=${params.user}`,
|
|
132039
|
+
`password=${params.password}`
|
|
132040
|
+
];
|
|
132041
|
+
if (params.database)
|
|
132042
|
+
attachParts.push(`database=${params.database}`);
|
|
132043
|
+
if (params.warehouse)
|
|
132044
|
+
attachParts.push(`warehouse=${params.warehouse}`);
|
|
132045
|
+
const secretString = `CREATE OR REPLACE SECRET ${attachedDb.name}_secret (
|
|
132046
|
+
TYPE snowflake,
|
|
132047
|
+
ACCOUNT '${params.account}',
|
|
132048
|
+
USER '${params.user}',
|
|
132049
|
+
PASSWORD '${params.password}',
|
|
132050
|
+
DATABASE '${params.database}',
|
|
132051
|
+
WAREHOUSE '${params.warehouse}'
|
|
132052
|
+
);`;
|
|
132053
|
+
await connection.runSQL(secretString);
|
|
132054
|
+
const testresult = await connection.runSQL(`SELECT * FROM snowflake_scan('SELECT 1', '${attachedDb.name}_secret');`);
|
|
132055
|
+
logger2.info(`Testing Snowflake connection:`, testresult.rows);
|
|
132056
|
+
const attachCommand = `ATTACH '${attachedDb.name}' AS ${attachedDb.name} (TYPE snowflake, SECRET ${attachedDb.name}_secret, READ_ONLY);`;
|
|
132057
|
+
await connection.runSQL(attachCommand);
|
|
132058
|
+
logger2.info(`Successfully attached Snowflake database: ${attachedDb.name}`);
|
|
132059
|
+
}
|
|
132060
|
+
async function attachPostgres(connection, attachedDb) {
|
|
132061
|
+
if (!attachedDb.postgresConnection) {
|
|
132062
|
+
throw new Error(`PostgreSQL connection configuration missing for: ${attachedDb.name}`);
|
|
132063
|
+
}
|
|
132064
|
+
await installAndLoadExtension(connection, "postgres");
|
|
132065
|
+
const config = attachedDb.postgresConnection;
|
|
132066
|
+
let attachString;
|
|
132067
|
+
if (config.connectionString) {
|
|
132068
|
+
attachString = config.connectionString;
|
|
132069
|
+
} else {
|
|
132070
|
+
const parts = [];
|
|
132071
|
+
if (config.host)
|
|
132072
|
+
parts.push(`host=${config.host}`);
|
|
132073
|
+
if (config.port)
|
|
132074
|
+
parts.push(`port=${config.port}`);
|
|
132075
|
+
if (config.databaseName)
|
|
132076
|
+
parts.push(`dbname=${config.databaseName}`);
|
|
132077
|
+
if (config.userName)
|
|
132078
|
+
parts.push(`user=${config.userName}`);
|
|
132079
|
+
if (config.password)
|
|
132080
|
+
parts.push(`password=${config.password}`);
|
|
132081
|
+
attachString = parts.join(" ");
|
|
132082
|
+
}
|
|
132083
|
+
const attachCommand = `ATTACH '${attachString}' AS ${attachedDb.name} (TYPE postgres, READ_ONLY);`;
|
|
132084
|
+
await connection.runSQL(attachCommand);
|
|
132085
|
+
logger2.info(`Successfully attached PostgreSQL database: ${attachedDb.name}`);
|
|
132086
|
+
}
|
|
132087
|
+
async function attachMotherDuck(connection, attachedDb) {
|
|
132088
|
+
if (!attachedDb.motherDuckConnection) {
|
|
132089
|
+
throw new Error(`MotherDuck connection configuration missing for: ${attachedDb.name}`);
|
|
132090
|
+
}
|
|
132091
|
+
const config = attachedDb.motherDuckConnection;
|
|
132092
|
+
if (!config.database) {
|
|
132093
|
+
throw new Error(`MotherDuck database name is required for: ${attachedDb.name}`);
|
|
132094
|
+
}
|
|
132095
|
+
await installAndLoadExtension(connection, "motherduck");
|
|
132096
|
+
if (config.accessToken) {
|
|
132097
|
+
const escapedToken = escapeSQL(config.accessToken);
|
|
132098
|
+
await connection.runSQL(`SET motherduck_token = '${escapedToken}';`);
|
|
132099
|
+
}
|
|
132100
|
+
const connectionString = `md:${config.database}`;
|
|
132101
|
+
logger2.info(`Connecting to MotherDuck database: ${config.database} as ${attachedDb.name}`);
|
|
132102
|
+
const attachCommand = `ATTACH '${connectionString}' AS ${attachedDb.name} (TYPE motherduck, READ_ONLY);`;
|
|
132103
|
+
await connection.runSQL(attachCommand);
|
|
132104
|
+
logger2.info(`Successfully attached MotherDuck database: ${attachedDb.name}`);
|
|
132105
|
+
}
|
|
131924
132106
|
async function attachDatabasesToDuckDB(duckdbConnection, attachedDatabases) {
|
|
132107
|
+
const attachHandlers = {
|
|
132108
|
+
bigquery: attachBigQuery,
|
|
132109
|
+
snowflake: attachSnowflake,
|
|
132110
|
+
postgres: attachPostgres,
|
|
132111
|
+
motherduck: attachMotherDuck
|
|
132112
|
+
};
|
|
131925
132113
|
for (const attachedDb of attachedDatabases) {
|
|
131926
132114
|
try {
|
|
132115
|
+
if (await isDatabaseAttached(duckdbConnection, attachedDb.name || "")) {
|
|
132116
|
+
logger2.info(`Database ${attachedDb.name} is already attached, skipping`);
|
|
132117
|
+
continue;
|
|
132118
|
+
}
|
|
132119
|
+
const handler = attachHandlers[attachedDb.type];
|
|
132120
|
+
if (!handler) {
|
|
132121
|
+
throw new Error(`Unsupported database type: ${attachedDb.type}`);
|
|
132122
|
+
}
|
|
131927
132123
|
try {
|
|
131928
|
-
|
|
131929
|
-
|
|
131930
|
-
|
|
131931
|
-
logger2.debug(`Existing databases:`, rows);
|
|
131932
|
-
const isAlreadyAttached = rows.some((row) => {
|
|
131933
|
-
return Object.values(row).some((value) => typeof value === "string" && value === attachedDb.name);
|
|
131934
|
-
});
|
|
131935
|
-
if (isAlreadyAttached) {
|
|
131936
|
-
logger2.info(`Database ${attachedDb.name} is already attached, skipping`);
|
|
131937
|
-
continue;
|
|
131938
|
-
}
|
|
131939
|
-
} catch (error) {
|
|
131940
|
-
logger2.warn(`Failed to check existing databases, proceeding with attachment:`, error);
|
|
131941
|
-
}
|
|
131942
|
-
switch (attachedDb.type) {
|
|
131943
|
-
case "bigquery": {
|
|
131944
|
-
if (!attachedDb.bigqueryConnection) {
|
|
131945
|
-
throw new Error(`BigQuery connection configuration is missing for attached database: ${attachedDb.name}`);
|
|
131946
|
-
}
|
|
131947
|
-
await duckdbConnection.runSQL("INSTALL bigquery FROM community;");
|
|
131948
|
-
await duckdbConnection.runSQL("LOAD bigquery;");
|
|
131949
|
-
const bigqueryConfig = attachedDb.bigqueryConnection;
|
|
131950
|
-
const attachParams = new URLSearchParams;
|
|
131951
|
-
if (!bigqueryConfig.defaultProjectId) {
|
|
131952
|
-
throw new Error(`BigQuery defaultProjectId is required for attached database: ${attachedDb.name}`);
|
|
131953
|
-
}
|
|
131954
|
-
attachParams.set("project", bigqueryConfig.defaultProjectId);
|
|
131955
|
-
if (bigqueryConfig.serviceAccountKeyJson) {
|
|
131956
|
-
const serviceAccountKeyPath = import_path2.default.join(TEMP_DIR_PATH, `duckdb-${attachedDb.name}-${v4_default()}-service-account-key.json`);
|
|
131957
|
-
await import_promises.default.writeFile(serviceAccountKeyPath, bigqueryConfig.serviceAccountKeyJson);
|
|
131958
|
-
attachParams.set("service_account_key", serviceAccountKeyPath);
|
|
131959
|
-
}
|
|
131960
|
-
const attachCommand = `ATTACH '${attachParams.toString()}' AS ${attachedDb.name} (TYPE bigquery, READ_ONLY);`;
|
|
131961
|
-
try {
|
|
131962
|
-
await duckdbConnection.runSQL(attachCommand);
|
|
131963
|
-
logger2.info(`Successfully attached BigQuery database: ${attachedDb.name}`);
|
|
131964
|
-
} catch (attachError) {
|
|
131965
|
-
if (attachError instanceof Error && attachError.message && attachError.message.includes("already exists")) {
|
|
131966
|
-
logger2.info(`BigQuery database ${attachedDb.name} is already attached, skipping`);
|
|
131967
|
-
} else {
|
|
131968
|
-
throw attachError;
|
|
131969
|
-
}
|
|
131970
|
-
}
|
|
131971
|
-
break;
|
|
131972
|
-
}
|
|
131973
|
-
case "snowflake": {
|
|
131974
|
-
if (!attachedDb.snowflakeConnection) {
|
|
131975
|
-
throw new Error(`Snowflake connection configuration is missing for attached database: ${attachedDb.name}`);
|
|
131976
|
-
}
|
|
131977
|
-
await duckdbConnection.runSQL("INSTALL snowflake FROM community;");
|
|
131978
|
-
await duckdbConnection.runSQL("LOAD snowflake;");
|
|
131979
|
-
const snowflakeConfig = attachedDb.snowflakeConnection;
|
|
131980
|
-
const attachParams = new URLSearchParams;
|
|
131981
|
-
if (snowflakeConfig.account) {
|
|
131982
|
-
attachParams.set("account", snowflakeConfig.account);
|
|
131983
|
-
}
|
|
131984
|
-
if (snowflakeConfig.username) {
|
|
131985
|
-
attachParams.set("username", snowflakeConfig.username);
|
|
131986
|
-
}
|
|
131987
|
-
if (snowflakeConfig.password) {
|
|
131988
|
-
attachParams.set("password", snowflakeConfig.password);
|
|
131989
|
-
}
|
|
131990
|
-
if (snowflakeConfig.database) {
|
|
131991
|
-
attachParams.set("database", snowflakeConfig.database);
|
|
131992
|
-
}
|
|
131993
|
-
if (snowflakeConfig.warehouse) {
|
|
131994
|
-
attachParams.set("warehouse", snowflakeConfig.warehouse);
|
|
131995
|
-
}
|
|
131996
|
-
if (snowflakeConfig.role) {
|
|
131997
|
-
attachParams.set("role", snowflakeConfig.role);
|
|
131998
|
-
}
|
|
131999
|
-
const attachCommand = `ATTACH '${attachParams.toString()}' AS ${attachedDb.name} (TYPE snowflake, READ_ONLY);`;
|
|
132000
|
-
try {
|
|
132001
|
-
await duckdbConnection.runSQL(attachCommand);
|
|
132002
|
-
logger2.info(`Successfully attached Snowflake database: ${attachedDb.name}`);
|
|
132003
|
-
} catch (attachError) {
|
|
132004
|
-
if (attachError instanceof Error && attachError.message && attachError.message.includes("already exists")) {
|
|
132005
|
-
logger2.info(`Snowflake database ${attachedDb.name} is already attached, skipping`);
|
|
132006
|
-
} else {
|
|
132007
|
-
throw attachError;
|
|
132008
|
-
}
|
|
132009
|
-
}
|
|
132010
|
-
break;
|
|
132011
|
-
}
|
|
132012
|
-
case "postgres": {
|
|
132013
|
-
if (!attachedDb.postgresConnection) {
|
|
132014
|
-
throw new Error(`PostgreSQL connection configuration is missing for attached database: ${attachedDb.name}`);
|
|
132015
|
-
}
|
|
132016
|
-
await duckdbConnection.runSQL("INSTALL postgres FROM community;");
|
|
132017
|
-
await duckdbConnection.runSQL("LOAD postgres;");
|
|
132018
|
-
const postgresConfig = attachedDb.postgresConnection;
|
|
132019
|
-
let attachString;
|
|
132020
|
-
if (postgresConfig.connectionString) {
|
|
132021
|
-
attachString = postgresConfig.connectionString;
|
|
132022
|
-
} else {
|
|
132023
|
-
const params = new URLSearchParams;
|
|
132024
|
-
if (postgresConfig.host) {
|
|
132025
|
-
params.set("host", postgresConfig.host);
|
|
132026
|
-
}
|
|
132027
|
-
if (postgresConfig.port) {
|
|
132028
|
-
params.set("port", postgresConfig.port.toString());
|
|
132029
|
-
}
|
|
132030
|
-
if (postgresConfig.databaseName) {
|
|
132031
|
-
params.set("dbname", postgresConfig.databaseName);
|
|
132032
|
-
}
|
|
132033
|
-
if (postgresConfig.userName) {
|
|
132034
|
-
params.set("user", postgresConfig.userName);
|
|
132035
|
-
}
|
|
132036
|
-
if (postgresConfig.password) {
|
|
132037
|
-
params.set("password", postgresConfig.password);
|
|
132038
|
-
}
|
|
132039
|
-
attachString = params.toString();
|
|
132040
|
-
}
|
|
132041
|
-
const attachCommand = `ATTACH '${attachString}' AS ${attachedDb.name} (TYPE postgres, READ_ONLY);`;
|
|
132042
|
-
try {
|
|
132043
|
-
await duckdbConnection.runSQL(attachCommand);
|
|
132044
|
-
logger2.info(`Successfully attached PostgreSQL database: ${attachedDb.name}`);
|
|
132045
|
-
} catch (attachError) {
|
|
132046
|
-
if (attachError instanceof Error && attachError.message && attachError.message.includes("already exists")) {
|
|
132047
|
-
logger2.info(`PostgreSQL database ${attachedDb.name} is already attached, skipping`);
|
|
132048
|
-
} else {
|
|
132049
|
-
throw attachError;
|
|
132050
|
-
}
|
|
132051
|
-
}
|
|
132052
|
-
break;
|
|
132053
|
-
}
|
|
132054
|
-
default:
|
|
132055
|
-
throw new Error(`Unsupported attached database type: ${attachedDb.type}`);
|
|
132124
|
+
await handler(duckdbConnection, attachedDb);
|
|
132125
|
+
} catch (attachError) {
|
|
132126
|
+
handleAlreadyAttachedError(attachError, attachedDb.name || "");
|
|
132056
132127
|
}
|
|
132057
132128
|
} catch (error) {
|
|
132058
132129
|
logger2.error(`Failed to attach database ${attachedDb.name}:`, error);
|
|
@@ -132060,7 +132131,7 @@ async function attachDatabasesToDuckDB(duckdbConnection, attachedDatabases) {
|
|
|
132060
132131
|
}
|
|
132061
132132
|
}
|
|
132062
132133
|
}
|
|
132063
|
-
async function createProjectConnections(connections = []) {
|
|
132134
|
+
async function createProjectConnections(connections = [], projectPath = "") {
|
|
132064
132135
|
const connectionMap = new Map;
|
|
132065
132136
|
const processedConnections = new Set;
|
|
132066
132137
|
const apiConnections = [];
|
|
@@ -132177,6 +132248,15 @@ async function createProjectConnections(connections = []) {
|
|
|
132177
132248
|
break;
|
|
132178
132249
|
}
|
|
132179
132250
|
case "duckdb": {
|
|
132251
|
+
if (!connection.duckdbConnection) {
|
|
132252
|
+
throw new Error("DuckDB connection configuration is missing.");
|
|
132253
|
+
}
|
|
132254
|
+
const duckdbConnection = new import_db_duckdb.DuckDBConnection(connection.name, ":memory:", projectPath);
|
|
132255
|
+
if (connection.duckdbConnection.attachedDatabases && Array.isArray(connection.duckdbConnection.attachedDatabases) && connection.duckdbConnection.attachedDatabases.length > 0) {
|
|
132256
|
+
await attachDatabasesToDuckDB(duckdbConnection, connection.duckdbConnection.attachedDatabases);
|
|
132257
|
+
}
|
|
132258
|
+
connectionMap.set(connection.name, duckdbConnection);
|
|
132259
|
+
connection.attributes = getConnectionAttributes(duckdbConnection);
|
|
132180
132260
|
break;
|
|
132181
132261
|
}
|
|
132182
132262
|
default: {
|
|
@@ -132255,6 +132335,12 @@ async function testConnectionConfig(connectionConfig) {
|
|
|
132255
132335
|
if (!connectionConfig.name) {
|
|
132256
132336
|
throw new Error("Connection name is required");
|
|
132257
132337
|
}
|
|
132338
|
+
if (connectionConfig.type === "duckdb") {
|
|
132339
|
+
return {
|
|
132340
|
+
status: "ok",
|
|
132341
|
+
errorMessage: ""
|
|
132342
|
+
};
|
|
132343
|
+
}
|
|
132258
132344
|
const { malloyConnections } = await createProjectConnections([connectionConfig]);
|
|
132259
132345
|
const connection = malloyConnections.get(connectionConfig.name);
|
|
132260
132346
|
if (!connection) {
|
|
@@ -132409,20 +132495,24 @@ async function getSchemasForConnection(connection, malloyConnection) {
|
|
|
132409
132495
|
throw new Error("DuckDB connection is required");
|
|
132410
132496
|
}
|
|
132411
132497
|
try {
|
|
132412
|
-
const result = await malloyConnection.runSQL("SELECT DISTINCT schema_name FROM information_schema.schemata ORDER BY schema_name");
|
|
132498
|
+
const result = await malloyConnection.runSQL("SELECT DISTINCT schema_name,catalog_name FROM information_schema.schemata ORDER BY catalog_name,schema_name", { rowLimit: 1000 });
|
|
132413
132499
|
const rows = standardizeRunSQLResult(result);
|
|
132414
|
-
const hasAttachedDatabases = connection.duckdbConnection?.attachedDatabases && Array.isArray(connection.duckdbConnection.attachedDatabases) && connection.duckdbConnection.attachedDatabases.length > 0;
|
|
132415
132500
|
return rows.map((row) => {
|
|
132416
132501
|
const typedRow = row;
|
|
132417
|
-
|
|
132418
|
-
|
|
132419
|
-
const attachedDbName = connection.duckdbConnection.attachedDatabases[0].name;
|
|
132420
|
-
schemaName = `${attachedDbName}.${schemaName}`;
|
|
132421
|
-
}
|
|
132502
|
+
const schemaName = typedRow.schema_name;
|
|
132503
|
+
const catalogName = typedRow.catalog_name;
|
|
132422
132504
|
return {
|
|
132423
|
-
name: schemaName
|
|
132424
|
-
isHidden:
|
|
132425
|
-
|
|
132505
|
+
name: `${catalogName}.${schemaName}`,
|
|
132506
|
+
isHidden: [
|
|
132507
|
+
"information_schema",
|
|
132508
|
+
"performance_schema",
|
|
132509
|
+
"",
|
|
132510
|
+
"SNOWFLAKE",
|
|
132511
|
+
"information_schema",
|
|
132512
|
+
"pg_catalog",
|
|
132513
|
+
"pg_toast"
|
|
132514
|
+
].includes(schemaName) || ["md_information_schema", "system"].includes(catalogName),
|
|
132515
|
+
isDefault: catalogName === "main"
|
|
132426
132516
|
};
|
|
132427
132517
|
});
|
|
132428
132518
|
} catch (error) {
|
|
@@ -132568,15 +132658,9 @@ async function listTablesForSchema(connection, schemaName, malloyConnection) {
|
|
|
132568
132658
|
throw new Error("DuckDB connection is required");
|
|
132569
132659
|
}
|
|
132570
132660
|
try {
|
|
132571
|
-
const
|
|
132572
|
-
|
|
132573
|
-
|
|
132574
|
-
const attachedDbName = connection.duckdbConnection.attachedDatabases[0].name;
|
|
132575
|
-
if (schemaName.startsWith(`${attachedDbName}.`)) {
|
|
132576
|
-
actualSchemaName = schemaName.substring(attachedDbName.length + 1);
|
|
132577
|
-
}
|
|
132578
|
-
}
|
|
132579
|
-
const result = await malloyConnection.runSQL(`SELECT table_name FROM information_schema.tables WHERE table_schema = '${actualSchemaName}' ORDER BY table_name`);
|
|
132661
|
+
const catalogName = schemaName.split(".")[0];
|
|
132662
|
+
schemaName = schemaName.split(".")[1];
|
|
132663
|
+
const result = await malloyConnection.runSQL(`SELECT table_name FROM information_schema.tables WHERE table_schema = '${schemaName}' and table_catalog = '${catalogName}' ORDER BY table_name`, { rowLimit: 1000 });
|
|
132580
132664
|
const rows = standardizeRunSQLResult(result);
|
|
132581
132665
|
return rows.map((row) => {
|
|
132582
132666
|
const typedRow = row;
|
|
@@ -139347,7 +139431,7 @@ class Project {
|
|
|
139347
139431
|
}
|
|
139348
139432
|
if (payload.connections) {
|
|
139349
139433
|
logger2.info(`Updating ${payload.connections.length} connections for project ${this.projectName}`);
|
|
139350
|
-
const { malloyConnections, apiConnections } = await createProjectConnections(payload.connections);
|
|
139434
|
+
const { malloyConnections, apiConnections } = await createProjectConnections(payload.connections, this.projectPath);
|
|
139351
139435
|
this.malloyConnections = malloyConnections;
|
|
139352
139436
|
this.apiConnections = apiConnections;
|
|
139353
139437
|
logger2.info(`Successfully updated connections for project ${this.projectName}`, {
|
|
@@ -139363,7 +139447,7 @@ class Project {
|
|
|
139363
139447
|
throw new ProjectNotFoundError(`Project path ${projectPath} not found`);
|
|
139364
139448
|
}
|
|
139365
139449
|
logger2.info(`Creating project with connection configuration`);
|
|
139366
|
-
const { malloyConnections, apiConnections } = await createProjectConnections(connections);
|
|
139450
|
+
const { malloyConnections, apiConnections } = await createProjectConnections(connections, projectPath);
|
|
139367
139451
|
logger2.info(`Loaded ${malloyConnections.size + apiConnections.length} connections for project ${projectName}`, {
|
|
139368
139452
|
malloyConnections,
|
|
139369
139453
|
apiConnections
|