@malloy-publisher/server 0.0.193 → 0.0.195
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/build.ts +1 -0
- package/dist/app/api-doc.yaml +41 -5
- package/dist/app/assets/{HomePage-Di9MU3lS.js → HomePage-DbZS0N7G.js} +1 -1
- package/dist/app/assets/MainPage-CBuWkbmr.js +2 -0
- package/dist/app/assets/{ModelPage-Dx2mHWeT.js → ModelPage-Bt37smot.js} +1 -1
- package/dist/app/assets/{PackagePage-Q386Py9t.js → PackagePage-DLZe50WG.js} +1 -1
- package/dist/app/assets/{ProjectPage-WR7wPQB-.js → ProjectPage-FQTEPXP4.js} +1 -1
- package/dist/app/assets/{RouteError-stRGU4aW.js → RouteError-DefbDO7F.js} +1 -1
- package/dist/app/assets/{WorkbookPage-D3iX0djH.js → WorkbookPage-CkAo16ar.js} +1 -1
- package/dist/app/assets/{core-QH4HZQVz.es-CqlQLZdl.js → core-BrfQApxh.es-DnvCX4oH.js} +14 -14
- package/dist/app/assets/index-5eLCcNmP.css +1 -0
- package/dist/app/assets/{index-CVHzPJwN.js → index-Bu0ub036.js} +53 -53
- package/dist/app/assets/index-CkzK3JIl.js +40 -0
- package/dist/app/assets/index-CoA6HIGS.js +1742 -0
- package/dist/app/assets/{index.umd-Bp8OIhfV.js → index.umd-B6Ms2PpL.js} +1 -1
- package/dist/app/index.html +2 -2
- package/dist/server.mjs +168 -1
- package/package.json +2 -1
- package/src/config.ts +7 -2
- package/src/dto/connection.dto.spec.ts +51 -0
- package/src/dto/connection.dto.ts +41 -0
- package/src/service/connection.ts +2 -0
- package/src/service/connection_config.spec.ts +168 -0
- package/src/service/connection_config.ts +77 -0
- package/src/service/db_utils.spec.ts +161 -0
- package/src/service/db_utils.ts +131 -0
- package/src/service/project.ts +4 -0
- package/src/service/project_store.spec.ts +75 -0
- package/src/service/project_store.ts +4 -0
- package/dist/app/assets/MainPage-yZQo2HSL.js +0 -2
- package/dist/app/assets/index-CMlGQMcl.css +0 -1
- package/dist/app/assets/index-DavAceYD.js +0 -1276
- package/dist/app/assets/index-Y3Y-VRna.js +0 -676
package/dist/server.mjs
CHANGED
|
@@ -217105,6 +217105,7 @@ import {
|
|
|
217105
217105
|
SnowflakeConnection
|
|
217106
217106
|
} from "@malloydata/db-snowflake";
|
|
217107
217107
|
import"@malloydata/db-trino";
|
|
217108
|
+
import"@malloydata/db-databricks";
|
|
217108
217109
|
import {
|
|
217109
217110
|
contextOverlay,
|
|
217110
217111
|
MalloyConfig
|
|
@@ -220380,6 +220381,7 @@ import fs from "fs/promises";
|
|
|
220380
220381
|
import path2 from "path";
|
|
220381
220382
|
|
|
220382
220383
|
// src/service/connection_config.ts
|
|
220384
|
+
import { createPrivateKey } from "crypto";
|
|
220383
220385
|
import path from "path";
|
|
220384
220386
|
var PUBLISHER_DUCKDB_API_FIELDS = new Set(["attachedDatabases"]);
|
|
220385
220387
|
function normalizeSnowflakePrivateKey(privateKey) {
|
|
@@ -220398,6 +220400,12 @@ function normalizeSnowflakePrivateKey(privateKey) {
|
|
|
220398
220400
|
endRegex: /-----END\s+PRIVATE\s+KEY-----/i,
|
|
220399
220401
|
beginMarker: "-----BEGIN PRIVATE KEY-----",
|
|
220400
220402
|
endMarker: "-----END PRIVATE KEY-----"
|
|
220403
|
+
},
|
|
220404
|
+
{
|
|
220405
|
+
beginRegex: /-----BEGIN\s+RSA\s+PRIVATE\s+KEY-----/i,
|
|
220406
|
+
endRegex: /-----END\s+RSA\s+PRIVATE\s+KEY-----/i,
|
|
220407
|
+
beginMarker: "-----BEGIN RSA PRIVATE KEY-----",
|
|
220408
|
+
endMarker: "-----END RSA PRIVATE KEY-----"
|
|
220401
220409
|
}
|
|
220402
220410
|
];
|
|
220403
220411
|
for (const pattern of keyPatterns) {
|
|
@@ -220424,6 +220432,21 @@ ${pattern.endMarker}
|
|
|
220424
220432
|
privateKeyContent += `
|
|
220425
220433
|
`;
|
|
220426
220434
|
}
|
|
220435
|
+
if (/-----BEGIN\s+RSA\s+PRIVATE\s+KEY-----/i.test(privateKeyContent)) {
|
|
220436
|
+
try {
|
|
220437
|
+
privateKeyContent = createPrivateKey({
|
|
220438
|
+
key: privateKeyContent,
|
|
220439
|
+
format: "pem"
|
|
220440
|
+
}).export({ type: "pkcs8", format: "pem" }).toString();
|
|
220441
|
+
} catch (err) {
|
|
220442
|
+
throw new Error(`Failed to convert Snowflake RSA private key (PKCS#1) to PKCS#8: ${err instanceof Error ? err.message : String(err)}`);
|
|
220443
|
+
}
|
|
220444
|
+
if (!privateKeyContent.endsWith(`
|
|
220445
|
+
`)) {
|
|
220446
|
+
privateKeyContent += `
|
|
220447
|
+
`;
|
|
220448
|
+
}
|
|
220449
|
+
}
|
|
220427
220450
|
return privateKeyContent;
|
|
220428
220451
|
}
|
|
220429
220452
|
function validateDuckdbApiSurface(connection) {
|
|
@@ -220467,6 +220490,13 @@ function getStaticConnectionAttributes(type) {
|
|
|
220467
220490
|
canPersist: true,
|
|
220468
220491
|
canStream: false
|
|
220469
220492
|
};
|
|
220493
|
+
case "databricks":
|
|
220494
|
+
return {
|
|
220495
|
+
dialectName: "databricks",
|
|
220496
|
+
isPool: false,
|
|
220497
|
+
canPersist: true,
|
|
220498
|
+
canStream: false
|
|
220499
|
+
};
|
|
220470
220500
|
case "mysql":
|
|
220471
220501
|
return {
|
|
220472
220502
|
dialectName: "mysql",
|
|
@@ -220544,6 +220574,28 @@ function validateConnectionShape(connection) {
|
|
|
220544
220574
|
throw new Error("Trino connection configuration is missing.");
|
|
220545
220575
|
}
|
|
220546
220576
|
break;
|
|
220577
|
+
case "databricks": {
|
|
220578
|
+
const databricks = connection.databricksConnection;
|
|
220579
|
+
if (!databricks) {
|
|
220580
|
+
throw new Error("Databricks connection configuration is missing.");
|
|
220581
|
+
}
|
|
220582
|
+
if (!databricks.host) {
|
|
220583
|
+
throw new Error("Databricks host is required.");
|
|
220584
|
+
}
|
|
220585
|
+
if (!databricks.path) {
|
|
220586
|
+
throw new Error("Databricks SQL warehouse HTTP path is required.");
|
|
220587
|
+
}
|
|
220588
|
+
const hasToken = !!databricks.token;
|
|
220589
|
+
const hasOAuth = !!databricks.oauthClientId && !!databricks.oauthClientSecret;
|
|
220590
|
+
if (!hasToken && !hasOAuth) {
|
|
220591
|
+
throw new Error("Databricks requires either a personal access token or OAuth M2M client ID and secret.");
|
|
220592
|
+
}
|
|
220593
|
+
const hasDefaultCatalog = !!databricks.defaultCatalog;
|
|
220594
|
+
if (!hasDefaultCatalog) {
|
|
220595
|
+
throw new Error("Databricks default catalog is required.");
|
|
220596
|
+
}
|
|
220597
|
+
break;
|
|
220598
|
+
}
|
|
220547
220599
|
case "snowflake": {
|
|
220548
220600
|
const snowflakeConnection = connection.snowflakeConnection;
|
|
220549
220601
|
if (!snowflakeConnection) {
|
|
@@ -220661,6 +220713,21 @@ function assembleProjectConnections(connections = [], projectPath = "") {
|
|
|
220661
220713
|
};
|
|
220662
220714
|
break;
|
|
220663
220715
|
}
|
|
220716
|
+
case "databricks": {
|
|
220717
|
+
const databricks = connection.databricksConnection;
|
|
220718
|
+
pojo.connections[connection.name] = {
|
|
220719
|
+
is: "databricks",
|
|
220720
|
+
host: databricks?.host,
|
|
220721
|
+
path: databricks?.path,
|
|
220722
|
+
token: databricks?.token,
|
|
220723
|
+
oauthClientId: databricks?.oauthClientId,
|
|
220724
|
+
oauthClientSecret: databricks?.oauthClientSecret,
|
|
220725
|
+
defaultCatalog: databricks?.defaultCatalog,
|
|
220726
|
+
defaultSchema: databricks?.defaultSchema,
|
|
220727
|
+
setupSQL: databricks?.setupSQL
|
|
220728
|
+
};
|
|
220729
|
+
break;
|
|
220730
|
+
}
|
|
220664
220731
|
case "duckdb": {
|
|
220665
220732
|
if (attachedDatabases.some((database) => database.name === connection.name)) {
|
|
220666
220733
|
throw new Error(`DuckDB attached database names cannot conflict with connection name ${connection.name}`);
|
|
@@ -222066,6 +222133,60 @@ async function getSchemasForTrino(connection, malloyConnection) {
|
|
|
222066
222133
|
throw new Error(`Failed to get schemas for Trino connection ${connection.name}: ${error.message}`);
|
|
222067
222134
|
}
|
|
222068
222135
|
}
|
|
222136
|
+
async function getSchemasForDatabricks(connection, malloyConnection) {
|
|
222137
|
+
if (!connection.databricksConnection) {
|
|
222138
|
+
throw new Error("Databricks connection is required");
|
|
222139
|
+
}
|
|
222140
|
+
try {
|
|
222141
|
+
const configuredSchema = connection.databricksConnection.defaultSchema;
|
|
222142
|
+
let allRows = [];
|
|
222143
|
+
if (connection.databricksConnection.defaultCatalog) {
|
|
222144
|
+
const catalog = connection.databricksConnection.defaultCatalog;
|
|
222145
|
+
const result = await malloyConnection.runSQL(`SELECT schema_name FROM ${catalog}.information_schema.schemata ORDER BY schema_name`);
|
|
222146
|
+
const rows = standardizeRunSQLResult2(result);
|
|
222147
|
+
allRows = rows.map((row) => {
|
|
222148
|
+
const r = row;
|
|
222149
|
+
return {
|
|
222150
|
+
catalog,
|
|
222151
|
+
schema: String(r.schema_name ?? r.Schema ?? "")
|
|
222152
|
+
};
|
|
222153
|
+
});
|
|
222154
|
+
} else {
|
|
222155
|
+
const catalogsResult = await malloyConnection.runSQL(`SHOW CATALOGS`);
|
|
222156
|
+
const catalogNames = standardizeRunSQLResult2(catalogsResult).map((row) => {
|
|
222157
|
+
const r = row;
|
|
222158
|
+
return String(r.catalog ?? r.Catalog ?? r.catalog_name ?? "");
|
|
222159
|
+
});
|
|
222160
|
+
for (const catalog of catalogNames) {
|
|
222161
|
+
try {
|
|
222162
|
+
const result = await malloyConnection.runSQL(`SELECT schema_name FROM ${catalog}.information_schema.schemata ORDER BY schema_name`);
|
|
222163
|
+
const rows = standardizeRunSQLResult2(result);
|
|
222164
|
+
for (const row of rows) {
|
|
222165
|
+
const r = row;
|
|
222166
|
+
allRows.push({
|
|
222167
|
+
catalog,
|
|
222168
|
+
schema: String(r.schema_name ?? r.Schema ?? "")
|
|
222169
|
+
});
|
|
222170
|
+
}
|
|
222171
|
+
} catch (catalogError) {
|
|
222172
|
+
logger.warn(`Failed to list schemas for Databricks catalog ${catalog}`, { error: catalogError });
|
|
222173
|
+
}
|
|
222174
|
+
}
|
|
222175
|
+
}
|
|
222176
|
+
logger.info("allRows for Schemas for Databricks", { allRows });
|
|
222177
|
+
return allRows.map(({ catalog, schema }) => {
|
|
222178
|
+
const name = connection.databricksConnection?.defaultCatalog ? schema : `${catalog}.${schema}`;
|
|
222179
|
+
return {
|
|
222180
|
+
name,
|
|
222181
|
+
isHidden: ["information_schema"].includes(schema),
|
|
222182
|
+
isDefault: configuredSchema ? schema === configuredSchema : false
|
|
222183
|
+
};
|
|
222184
|
+
});
|
|
222185
|
+
} catch (error) {
|
|
222186
|
+
logger.error(`Error getting schemas for Databricks connection ${connection.name}`, { error });
|
|
222187
|
+
throw new Error(`Failed to get schemas for Databricks connection ${connection.name}: ${error.message}`);
|
|
222188
|
+
}
|
|
222189
|
+
}
|
|
222069
222190
|
async function getSchemasForDuckDB(connection, malloyConnection) {
|
|
222070
222191
|
if (!connection.duckdbConnection) {
|
|
222071
222192
|
throw new Error("DuckDB connection is required");
|
|
@@ -222176,6 +222297,8 @@ async function getSchemasForConnection(connection, malloyConnection) {
|
|
|
222176
222297
|
return getSchemasForSnowflake(connection, malloyConnection);
|
|
222177
222298
|
case "trino":
|
|
222178
222299
|
return getSchemasForTrino(connection, malloyConnection);
|
|
222300
|
+
case "databricks":
|
|
222301
|
+
return getSchemasForDatabricks(connection, malloyConnection);
|
|
222179
222302
|
case "duckdb":
|
|
222180
222303
|
return getSchemasForDuckDB(connection, malloyConnection);
|
|
222181
222304
|
case "motherduck":
|
|
@@ -222358,6 +222481,8 @@ async function listTablesForSchema(connection, schemaName, malloyConnection, tab
|
|
|
222358
222481
|
return listTablesForSnowflake(connection, schemaName, malloyConnection, tableNames);
|
|
222359
222482
|
case "trino":
|
|
222360
222483
|
return listTablesForTrino(connection, schemaName, malloyConnection, tableNames);
|
|
222484
|
+
case "databricks":
|
|
222485
|
+
return listTablesForDatabricks(connection, schemaName, malloyConnection, tableNames);
|
|
222361
222486
|
case "duckdb":
|
|
222362
222487
|
return listTablesForDuckDB(connection, schemaName, malloyConnection, tableNames);
|
|
222363
222488
|
case "motherduck":
|
|
@@ -222486,6 +222611,37 @@ async function listTablesForTrino(connection, schemaName, malloyConnection, tabl
|
|
|
222486
222611
|
throw new Error(`Failed to get tables for Trino schema ${schemaName} in connection ${connection.name}: ${error.message}`);
|
|
222487
222612
|
}
|
|
222488
222613
|
}
|
|
222614
|
+
async function listTablesForDatabricks(connection, schemaName, malloyConnection, tableNames) {
|
|
222615
|
+
if (!connection.databricksConnection) {
|
|
222616
|
+
throw new Error("Databricks connection is required");
|
|
222617
|
+
}
|
|
222618
|
+
try {
|
|
222619
|
+
let catalogPrefix;
|
|
222620
|
+
let schemaOnly;
|
|
222621
|
+
let resourcePrefix;
|
|
222622
|
+
if (connection.databricksConnection.defaultCatalog) {
|
|
222623
|
+
catalogPrefix = `${connection.databricksConnection.defaultCatalog}.`;
|
|
222624
|
+
schemaOnly = schemaName;
|
|
222625
|
+
resourcePrefix = `${connection.databricksConnection.defaultCatalog}.${schemaName}`;
|
|
222626
|
+
} else {
|
|
222627
|
+
const dotIdx = schemaName.indexOf(".");
|
|
222628
|
+
if (dotIdx > 0) {
|
|
222629
|
+
catalogPrefix = `${schemaName.substring(0, dotIdx)}.`;
|
|
222630
|
+
schemaOnly = schemaName.substring(dotIdx + 1);
|
|
222631
|
+
} else {
|
|
222632
|
+
catalogPrefix = "";
|
|
222633
|
+
schemaOnly = schemaName;
|
|
222634
|
+
}
|
|
222635
|
+
resourcePrefix = schemaName;
|
|
222636
|
+
}
|
|
222637
|
+
const result = await malloyConnection.runSQL(`SELECT table_name, column_name, data_type FROM ${catalogPrefix}information_schema.columns WHERE table_schema = '${schemaOnly}' ${sqlInFilter("table_name", tableNames)} ORDER BY table_name, ordinal_position`);
|
|
222638
|
+
const rows = standardizeRunSQLResult2(result);
|
|
222639
|
+
return groupColumnRowsIntoTables(rows, (t) => `${resourcePrefix}.${t}`);
|
|
222640
|
+
} catch (error) {
|
|
222641
|
+
logger.error(`Error getting tables for Databricks schema ${schemaName} in connection ${connection.name}`, { error });
|
|
222642
|
+
throw new Error(`Failed to get tables for Databricks schema ${schemaName} in connection ${connection.name}: ${error.message}`);
|
|
222643
|
+
}
|
|
222644
|
+
}
|
|
222489
222645
|
async function listTablesForDuckDB(connection, schemaName, malloyConnection, tableNames) {
|
|
222490
222646
|
if (!connection.duckdbConnection) {
|
|
222491
222647
|
throw new Error("DuckDB connection is required");
|
|
@@ -228884,7 +229040,12 @@ var getPublisherConfig = (serverRoot) => {
|
|
|
228884
229040
|
const fileContent = fs2.readFileSync(publisherConfigPath, "utf8");
|
|
228885
229041
|
rawConfig = JSON.parse(fileContent);
|
|
228886
229042
|
} catch (error) {
|
|
228887
|
-
|
|
229043
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
229044
|
+
logger.error(`Failed to parse ${publisherConfigPath}: ${message}. Using default empty config.`, {
|
|
229045
|
+
path: publisherConfigPath,
|
|
229046
|
+
error: message,
|
|
229047
|
+
stack: error instanceof Error ? error.stack : undefined
|
|
229048
|
+
});
|
|
228888
229049
|
return {
|
|
228889
229050
|
frozenConfig: false,
|
|
228890
229051
|
projects: []
|
|
@@ -231417,6 +231578,9 @@ class Project {
|
|
|
231417
231578
|
this.metadata.readme = payload.readme;
|
|
231418
231579
|
await this.writeProjectReadme(payload.readme);
|
|
231419
231580
|
}
|
|
231581
|
+
if (payload.materializationStorage !== undefined) {
|
|
231582
|
+
this.metadata.materializationStorage = payload.materializationStorage;
|
|
231583
|
+
}
|
|
231420
231584
|
if (payload.connections) {
|
|
231421
231585
|
const payloadConnections = payload.connections;
|
|
231422
231586
|
await this.runConnectionUpdateExclusive(async () => {
|
|
@@ -232268,6 +232432,9 @@ class ProjectStore {
|
|
|
232268
232432
|
if (!newProject.metadata)
|
|
232269
232433
|
newProject.metadata = {};
|
|
232270
232434
|
newProject.metadata.location = absoluteProjectPath;
|
|
232435
|
+
if (project.materializationStorage !== undefined) {
|
|
232436
|
+
newProject.metadata.materializationStorage = project.materializationStorage;
|
|
232437
|
+
}
|
|
232271
232438
|
this.projects.set(projectName, newProject);
|
|
232272
232439
|
project?.packages?.forEach((_package) => {
|
|
232273
232440
|
if (_package.name) {
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@malloy-publisher/server",
|
|
3
3
|
"description": "Malloy Publisher Server",
|
|
4
|
-
"version": "0.0.
|
|
4
|
+
"version": "0.0.195",
|
|
5
5
|
"main": "dist/server.mjs",
|
|
6
6
|
"bin": {
|
|
7
7
|
"malloy-publisher": "dist/server.mjs"
|
|
@@ -35,6 +35,7 @@
|
|
|
35
35
|
"@azure/storage-blob": "^12.26.0",
|
|
36
36
|
"@google-cloud/storage": "^7.16.0",
|
|
37
37
|
"@malloydata/db-bigquery": "^0.0.383",
|
|
38
|
+
"@malloydata/db-databricks": "^0.0.383",
|
|
38
39
|
"@malloydata/db-duckdb": "^0.0.383",
|
|
39
40
|
"@malloydata/db-mysql": "^0.0.383",
|
|
40
41
|
"@malloydata/db-postgres": "^0.0.383",
|
package/src/config.ts
CHANGED
|
@@ -90,9 +90,14 @@ export const getPublisherConfig = (serverRoot: string): PublisherConfig => {
|
|
|
90
90
|
const fileContent = fs.readFileSync(publisherConfigPath, "utf8");
|
|
91
91
|
rawConfig = JSON.parse(fileContent);
|
|
92
92
|
} catch (error) {
|
|
93
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
93
94
|
logger.error(
|
|
94
|
-
`Failed to parse ${
|
|
95
|
-
{
|
|
95
|
+
`Failed to parse ${publisherConfigPath}: ${message}. Using default empty config.`,
|
|
96
|
+
{
|
|
97
|
+
path: publisherConfigPath,
|
|
98
|
+
error: message,
|
|
99
|
+
stack: error instanceof Error ? error.stack : undefined,
|
|
100
|
+
},
|
|
96
101
|
);
|
|
97
102
|
return {
|
|
98
103
|
frozenConfig: false,
|
|
@@ -5,6 +5,7 @@ import { validate } from "class-validator";
|
|
|
5
5
|
import {
|
|
6
6
|
BigqueryConnectionDto,
|
|
7
7
|
ConnectionDto,
|
|
8
|
+
DatabricksConnectionDto,
|
|
8
9
|
PostgresConnectionDto,
|
|
9
10
|
SnowflakeConnectionDto,
|
|
10
11
|
MysqlConnectionDto,
|
|
@@ -100,6 +101,56 @@ describe("dto/connection", () => {
|
|
|
100
101
|
expect(errors).toHaveLength(0);
|
|
101
102
|
});
|
|
102
103
|
|
|
104
|
+
it("should validate a valid DatabricksConnection object", async () => {
|
|
105
|
+
const validData = {
|
|
106
|
+
host: "dbc-xxxxxxxx-xxxx.cloud.databricks.com",
|
|
107
|
+
path: "/sql/1.0/warehouses/abc123",
|
|
108
|
+
token: "dapiXXXX",
|
|
109
|
+
oauthClientId: "client-id",
|
|
110
|
+
oauthClientSecret: "client-secret",
|
|
111
|
+
defaultCatalog: "main",
|
|
112
|
+
defaultSchema: "default",
|
|
113
|
+
setupSQL: "USE CATALOG main",
|
|
114
|
+
};
|
|
115
|
+
const databricksConnection = plainToInstance(
|
|
116
|
+
DatabricksConnectionDto,
|
|
117
|
+
validData,
|
|
118
|
+
);
|
|
119
|
+
|
|
120
|
+
const errors = await validate(databricksConnection);
|
|
121
|
+
expect(errors).toHaveLength(0);
|
|
122
|
+
});
|
|
123
|
+
|
|
124
|
+
it("should return errors for invalid DatabricksConnection object", async () => {
|
|
125
|
+
const invalidData = {
|
|
126
|
+
host: 123, // Invalid type
|
|
127
|
+
path: false, // Invalid type
|
|
128
|
+
};
|
|
129
|
+
const databricksConnection = plainToInstance(
|
|
130
|
+
DatabricksConnectionDto,
|
|
131
|
+
invalidData,
|
|
132
|
+
);
|
|
133
|
+
|
|
134
|
+
const errors = await validate(databricksConnection);
|
|
135
|
+
expect(errors).toHaveLength(2);
|
|
136
|
+
});
|
|
137
|
+
|
|
138
|
+
it("should validate a valid Connection object with databricks type", async () => {
|
|
139
|
+
const validData = {
|
|
140
|
+
name: "My Databricks Connection",
|
|
141
|
+
type: "databricks",
|
|
142
|
+
databricksConnection: {
|
|
143
|
+
host: "dbc-xxxxxxxx-xxxx.cloud.databricks.com",
|
|
144
|
+
path: "/sql/1.0/warehouses/abc123",
|
|
145
|
+
token: "dapiXXXX",
|
|
146
|
+
},
|
|
147
|
+
};
|
|
148
|
+
const connection = plainToInstance(ConnectionDto, validData);
|
|
149
|
+
|
|
150
|
+
const errors = await validate(connection);
|
|
151
|
+
expect(errors).toHaveLength(0);
|
|
152
|
+
});
|
|
153
|
+
|
|
103
154
|
it("should validate a valid Connection object with postgres type", async () => {
|
|
104
155
|
const validData = {
|
|
105
156
|
name: "My Postgres Connection",
|
|
@@ -129,6 +129,40 @@ export class SnowflakeConnectionDto {
|
|
|
129
129
|
responseTimeoutMilliseconds?: number;
|
|
130
130
|
}
|
|
131
131
|
|
|
132
|
+
export class DatabricksConnectionDto {
|
|
133
|
+
@IsOptional()
|
|
134
|
+
@IsString()
|
|
135
|
+
host?: string;
|
|
136
|
+
|
|
137
|
+
@IsOptional()
|
|
138
|
+
@IsString()
|
|
139
|
+
path?: string;
|
|
140
|
+
|
|
141
|
+
@IsOptional()
|
|
142
|
+
@IsString()
|
|
143
|
+
token?: string;
|
|
144
|
+
|
|
145
|
+
@IsOptional()
|
|
146
|
+
@IsString()
|
|
147
|
+
oauthClientId?: string;
|
|
148
|
+
|
|
149
|
+
@IsOptional()
|
|
150
|
+
@IsString()
|
|
151
|
+
oauthClientSecret?: string;
|
|
152
|
+
|
|
153
|
+
@IsOptional()
|
|
154
|
+
@IsString()
|
|
155
|
+
defaultCatalog?: string;
|
|
156
|
+
|
|
157
|
+
@IsOptional()
|
|
158
|
+
@IsString()
|
|
159
|
+
defaultSchema?: string;
|
|
160
|
+
|
|
161
|
+
@IsOptional()
|
|
162
|
+
@IsString()
|
|
163
|
+
setupSQL?: string;
|
|
164
|
+
}
|
|
165
|
+
|
|
132
166
|
export class TrinoConnectionDto {
|
|
133
167
|
@IsOptional()
|
|
134
168
|
@IsString()
|
|
@@ -176,6 +210,7 @@ export class ConnectionDto implements ApiConnection {
|
|
|
176
210
|
"bigquery",
|
|
177
211
|
"snowflake",
|
|
178
212
|
"trino",
|
|
213
|
+
"databricks",
|
|
179
214
|
"mysql",
|
|
180
215
|
"duckdb",
|
|
181
216
|
"motherduck",
|
|
@@ -186,6 +221,7 @@ export class ConnectionDto implements ApiConnection {
|
|
|
186
221
|
| "bigquery"
|
|
187
222
|
| "snowflake"
|
|
188
223
|
| "trino"
|
|
224
|
+
| "databricks"
|
|
189
225
|
| "mysql"
|
|
190
226
|
| "duckdb"
|
|
191
227
|
| "motherduck"
|
|
@@ -211,6 +247,11 @@ export class ConnectionDto implements ApiConnection {
|
|
|
211
247
|
@Type(() => TrinoConnectionDto)
|
|
212
248
|
TrinoConnection?: TrinoConnectionDto;
|
|
213
249
|
|
|
250
|
+
@IsOptional()
|
|
251
|
+
@ValidateNested()
|
|
252
|
+
@Type(() => DatabricksConnectionDto)
|
|
253
|
+
databricksConnection?: DatabricksConnectionDto;
|
|
254
|
+
|
|
214
255
|
@IsOptional()
|
|
215
256
|
@ValidateNested()
|
|
216
257
|
@Type(() => DuckdbConnectionDto)
|
|
@@ -12,6 +12,7 @@ import {
|
|
|
12
12
|
} from "@malloydata/db-snowflake";
|
|
13
13
|
import type { TrinoConnection } from "@malloydata/db-trino";
|
|
14
14
|
import "@malloydata/db-trino";
|
|
15
|
+
import "@malloydata/db-databricks";
|
|
15
16
|
import {
|
|
16
17
|
Connection,
|
|
17
18
|
contextOverlay,
|
|
@@ -50,6 +51,7 @@ export type InternalConnection = ApiConnection & {
|
|
|
50
51
|
bigqueryConnection?: components["schemas"]["BigqueryConnection"];
|
|
51
52
|
snowflakeConnection?: components["schemas"]["SnowflakeConnection"];
|
|
52
53
|
trinoConnection?: components["schemas"]["TrinoConnection"];
|
|
54
|
+
databricksConnection?: components["schemas"]["DatabricksConnection"];
|
|
53
55
|
mysqlConnection?: components["schemas"]["MysqlConnection"];
|
|
54
56
|
duckdbConnection?: components["schemas"]["DuckdbConnection"];
|
|
55
57
|
ducklakeConnection?: components["schemas"]["DucklakeConnection"];
|
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
import { generateKeyPairSync } from "crypto";
|
|
2
|
+
import { describe, expect, it } from "bun:test";
|
|
3
|
+
import {
|
|
4
|
+
assembleProjectConnections,
|
|
5
|
+
normalizeSnowflakePrivateKey,
|
|
6
|
+
} from "./connection_config";
|
|
7
|
+
import { components } from "../api";
|
|
8
|
+
|
|
9
|
+
type ApiConnection = components["schemas"]["Connection"];
|
|
10
|
+
|
|
11
|
+
describe("assembleProjectConnections — databricks", () => {
|
|
12
|
+
const validBase: ApiConnection = {
|
|
13
|
+
name: "dbx",
|
|
14
|
+
type: "databricks",
|
|
15
|
+
databricksConnection: {
|
|
16
|
+
host: "dbc.cloud.databricks.com",
|
|
17
|
+
path: "/sql/1.0/warehouses/abc",
|
|
18
|
+
token: "dapiXXXX",
|
|
19
|
+
defaultCatalog: "main",
|
|
20
|
+
defaultSchema: "default",
|
|
21
|
+
},
|
|
22
|
+
};
|
|
23
|
+
|
|
24
|
+
it("emits a databricks core entry with all known fields preserved", () => {
|
|
25
|
+
const { pojo, apiConnections } = assembleProjectConnections([validBase]);
|
|
26
|
+
|
|
27
|
+
const entry = pojo.connections["dbx"];
|
|
28
|
+
expect(entry.is).toBe("databricks");
|
|
29
|
+
expect(entry.host).toBe("dbc.cloud.databricks.com");
|
|
30
|
+
expect(entry.path).toBe("/sql/1.0/warehouses/abc");
|
|
31
|
+
expect(entry.token).toBe("dapiXXXX");
|
|
32
|
+
expect(entry.defaultCatalog).toBe("main");
|
|
33
|
+
expect(entry.defaultSchema).toBe("default");
|
|
34
|
+
|
|
35
|
+
expect(apiConnections).toHaveLength(1);
|
|
36
|
+
expect(apiConnections[0].attributes?.dialectName).toBe("databricks");
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
it("accepts OAuth M2M auth (clientId + secret) without a token", () => {
|
|
40
|
+
const conn: ApiConnection = {
|
|
41
|
+
name: "dbx-oauth",
|
|
42
|
+
type: "databricks",
|
|
43
|
+
databricksConnection: {
|
|
44
|
+
host: "dbc.cloud.databricks.com",
|
|
45
|
+
path: "/sql/1.0/warehouses/abc",
|
|
46
|
+
oauthClientId: "client-id",
|
|
47
|
+
oauthClientSecret: "client-secret",
|
|
48
|
+
defaultCatalog: "main",
|
|
49
|
+
},
|
|
50
|
+
};
|
|
51
|
+
const { pojo } = assembleProjectConnections([conn]);
|
|
52
|
+
const entry = pojo.connections["dbx-oauth"];
|
|
53
|
+
expect(entry.is).toBe("databricks");
|
|
54
|
+
expect(entry.oauthClientId).toBe("client-id");
|
|
55
|
+
expect(entry.oauthClientSecret).toBe("client-secret");
|
|
56
|
+
expect(entry.token).toBeUndefined();
|
|
57
|
+
});
|
|
58
|
+
|
|
59
|
+
it("rejects connections missing the databricksConnection block", () => {
|
|
60
|
+
const conn: ApiConnection = {
|
|
61
|
+
name: "dbx",
|
|
62
|
+
type: "databricks",
|
|
63
|
+
};
|
|
64
|
+
expect(() => assembleProjectConnections([conn])).toThrow(
|
|
65
|
+
"Databricks connection configuration is missing.",
|
|
66
|
+
);
|
|
67
|
+
});
|
|
68
|
+
|
|
69
|
+
it("rejects connections with a missing host", () => {
|
|
70
|
+
const conn: ApiConnection = {
|
|
71
|
+
...validBase,
|
|
72
|
+
databricksConnection: {
|
|
73
|
+
...validBase.databricksConnection!,
|
|
74
|
+
host: undefined,
|
|
75
|
+
},
|
|
76
|
+
};
|
|
77
|
+
expect(() => assembleProjectConnections([conn])).toThrow(
|
|
78
|
+
"Databricks host is required",
|
|
79
|
+
);
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
it("rejects connections with a missing path", () => {
|
|
83
|
+
const conn: ApiConnection = {
|
|
84
|
+
...validBase,
|
|
85
|
+
databricksConnection: {
|
|
86
|
+
...validBase.databricksConnection!,
|
|
87
|
+
path: undefined,
|
|
88
|
+
},
|
|
89
|
+
};
|
|
90
|
+
expect(() => assembleProjectConnections([conn])).toThrow(
|
|
91
|
+
"Databricks SQL warehouse HTTP path is required",
|
|
92
|
+
);
|
|
93
|
+
});
|
|
94
|
+
|
|
95
|
+
it("rejects when defaultCatalog is missing", () => {
|
|
96
|
+
const conn: ApiConnection = {
|
|
97
|
+
name: "dbx",
|
|
98
|
+
type: "databricks",
|
|
99
|
+
databricksConnection: {
|
|
100
|
+
host: "dbc.cloud.databricks.com",
|
|
101
|
+
path: "/sql/1.0/warehouses/abc",
|
|
102
|
+
token: "dapiXXXX",
|
|
103
|
+
// defaultCatalog deliberately omitted
|
|
104
|
+
},
|
|
105
|
+
};
|
|
106
|
+
expect(() => assembleProjectConnections([conn])).toThrow(
|
|
107
|
+
"Databricks default catalog is required",
|
|
108
|
+
);
|
|
109
|
+
});
|
|
110
|
+
|
|
111
|
+
it("rejects when neither token nor full OAuth credentials are provided", () => {
|
|
112
|
+
const conn: ApiConnection = {
|
|
113
|
+
name: "dbx",
|
|
114
|
+
type: "databricks",
|
|
115
|
+
databricksConnection: {
|
|
116
|
+
host: "dbc.cloud.databricks.com",
|
|
117
|
+
path: "/sql/1.0/warehouses/abc",
|
|
118
|
+
// Only oauthClientId, missing secret → rejected.
|
|
119
|
+
oauthClientId: "client-id",
|
|
120
|
+
defaultCatalog: "main",
|
|
121
|
+
},
|
|
122
|
+
};
|
|
123
|
+
expect(() => assembleProjectConnections([conn])).toThrow(
|
|
124
|
+
"Databricks requires",
|
|
125
|
+
);
|
|
126
|
+
});
|
|
127
|
+
});
|
|
128
|
+
|
|
129
|
+
describe("normalizeSnowflakePrivateKey", () => {
|
|
130
|
+
const { privateKey: pkcs8Pem } = generateKeyPairSync("rsa", {
|
|
131
|
+
modulusLength: 2048,
|
|
132
|
+
privateKeyEncoding: { type: "pkcs8", format: "pem" },
|
|
133
|
+
publicKeyEncoding: { type: "spki", format: "pem" },
|
|
134
|
+
});
|
|
135
|
+
const { privateKey: pkcs1Pem } = generateKeyPairSync("rsa", {
|
|
136
|
+
modulusLength: 2048,
|
|
137
|
+
privateKeyEncoding: { type: "pkcs1", format: "pem" },
|
|
138
|
+
publicKeyEncoding: { type: "pkcs1", format: "pem" },
|
|
139
|
+
});
|
|
140
|
+
|
|
141
|
+
it("passes a multi-line PKCS#8 key through and adds a trailing newline", () => {
|
|
142
|
+
const trimmed = (pkcs8Pem as string).trimEnd();
|
|
143
|
+
const result = normalizeSnowflakePrivateKey(trimmed);
|
|
144
|
+
expect(result).toContain("-----BEGIN PRIVATE KEY-----");
|
|
145
|
+
expect(result.endsWith("\n")).toBe(true);
|
|
146
|
+
});
|
|
147
|
+
|
|
148
|
+
it("converts a multi-line PKCS#1 RSA key to PKCS#8", () => {
|
|
149
|
+
const result = normalizeSnowflakePrivateKey(pkcs1Pem as string);
|
|
150
|
+
expect(result).toContain("-----BEGIN PRIVATE KEY-----");
|
|
151
|
+
expect(result).not.toContain("BEGIN RSA PRIVATE KEY");
|
|
152
|
+
expect(result.endsWith("\n")).toBe(true);
|
|
153
|
+
});
|
|
154
|
+
|
|
155
|
+
it("converts a single-line PKCS#1 RSA key to PKCS#8", () => {
|
|
156
|
+
const singleLine = (pkcs1Pem as string).replace(/\n/g, "");
|
|
157
|
+
const result = normalizeSnowflakePrivateKey(singleLine);
|
|
158
|
+
expect(result).toContain("-----BEGIN PRIVATE KEY-----");
|
|
159
|
+
expect(result).not.toContain("BEGIN RSA PRIVATE KEY");
|
|
160
|
+
});
|
|
161
|
+
|
|
162
|
+
it("reconstructs a single-line PKCS#8 key without conversion", () => {
|
|
163
|
+
const singleLine = (pkcs8Pem as string).replace(/\n/g, "");
|
|
164
|
+
const result = normalizeSnowflakePrivateKey(singleLine);
|
|
165
|
+
expect(result.startsWith("-----BEGIN PRIVATE KEY-----\n")).toBe(true);
|
|
166
|
+
expect(result.endsWith("-----END PRIVATE KEY-----\n")).toBe(true);
|
|
167
|
+
});
|
|
168
|
+
});
|