@malloy-publisher/server 0.0.193 → 0.0.194

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. package/build.ts +1 -0
  2. package/dist/app/api-doc.yaml +41 -5
  3. package/dist/app/assets/{HomePage-Di9MU3lS.js → HomePage-DbZS0N7G.js} +1 -1
  4. package/dist/app/assets/MainPage-CBuWkbmr.js +2 -0
  5. package/dist/app/assets/{ModelPage-Dx2mHWeT.js → ModelPage-Bt37smot.js} +1 -1
  6. package/dist/app/assets/{PackagePage-Q386Py9t.js → PackagePage-DLZe50WG.js} +1 -1
  7. package/dist/app/assets/{ProjectPage-WR7wPQB-.js → ProjectPage-FQTEPXP4.js} +1 -1
  8. package/dist/app/assets/{RouteError-stRGU4aW.js → RouteError-DefbDO7F.js} +1 -1
  9. package/dist/app/assets/{WorkbookPage-D3iX0djH.js → WorkbookPage-CkAo16ar.js} +1 -1
  10. package/dist/app/assets/{core-QH4HZQVz.es-CqlQLZdl.js → core-BrfQApxh.es-DnvCX4oH.js} +14 -14
  11. package/dist/app/assets/index-5eLCcNmP.css +1 -0
  12. package/dist/app/assets/{index-CVHzPJwN.js → index-Bu0ub036.js} +53 -53
  13. package/dist/app/assets/index-CkzK3JIl.js +40 -0
  14. package/dist/app/assets/index-CoA6HIGS.js +1742 -0
  15. package/dist/app/assets/{index.umd-Bp8OIhfV.js → index.umd-B6Ms2PpL.js} +1 -1
  16. package/dist/app/index.html +2 -2
  17. package/dist/server.mjs +140 -1
  18. package/package.json +2 -1
  19. package/src/config.ts +7 -2
  20. package/src/dto/connection.dto.spec.ts +51 -0
  21. package/src/dto/connection.dto.ts +41 -0
  22. package/src/service/connection.ts +2 -0
  23. package/src/service/connection_config.spec.ts +123 -0
  24. package/src/service/connection_config.ts +48 -0
  25. package/src/service/db_utils.spec.ts +161 -0
  26. package/src/service/db_utils.ts +131 -0
  27. package/dist/app/assets/MainPage-yZQo2HSL.js +0 -2
  28. package/dist/app/assets/index-CMlGQMcl.css +0 -1
  29. package/dist/app/assets/index-DavAceYD.js +0 -1276
  30. package/dist/app/assets/index-Y3Y-VRna.js +0 -676
package/dist/server.mjs CHANGED
@@ -217105,6 +217105,7 @@ import {
217105
217105
  SnowflakeConnection
217106
217106
  } from "@malloydata/db-snowflake";
217107
217107
  import"@malloydata/db-trino";
217108
+ import"@malloydata/db-databricks";
217108
217109
  import {
217109
217110
  contextOverlay,
217110
217111
  MalloyConfig
@@ -220467,6 +220468,13 @@ function getStaticConnectionAttributes(type) {
220467
220468
  canPersist: true,
220468
220469
  canStream: false
220469
220470
  };
220471
+ case "databricks":
220472
+ return {
220473
+ dialectName: "databricks",
220474
+ isPool: false,
220475
+ canPersist: true,
220476
+ canStream: false
220477
+ };
220470
220478
  case "mysql":
220471
220479
  return {
220472
220480
  dialectName: "mysql",
@@ -220544,6 +220552,28 @@ function validateConnectionShape(connection) {
220544
220552
  throw new Error("Trino connection configuration is missing.");
220545
220553
  }
220546
220554
  break;
220555
+ case "databricks": {
220556
+ const databricks = connection.databricksConnection;
220557
+ if (!databricks) {
220558
+ throw new Error("Databricks connection configuration is missing.");
220559
+ }
220560
+ if (!databricks.host) {
220561
+ throw new Error("Databricks host is required.");
220562
+ }
220563
+ if (!databricks.path) {
220564
+ throw new Error("Databricks SQL warehouse HTTP path is required.");
220565
+ }
220566
+ const hasToken = !!databricks.token;
220567
+ const hasOAuth = !!databricks.oauthClientId && !!databricks.oauthClientSecret;
220568
+ if (!hasToken && !hasOAuth) {
220569
+ throw new Error("Databricks requires either a personal access token or OAuth M2M client ID and secret.");
220570
+ }
220571
+ const hasDefaultCatalog = !!databricks.defaultCatalog;
220572
+ if (!hasDefaultCatalog) {
220573
+ throw new Error("Databricks default catalog is required.");
220574
+ }
220575
+ break;
220576
+ }
220547
220577
  case "snowflake": {
220548
220578
  const snowflakeConnection = connection.snowflakeConnection;
220549
220579
  if (!snowflakeConnection) {
@@ -220661,6 +220691,21 @@ function assembleProjectConnections(connections = [], projectPath = "") {
220661
220691
  };
220662
220692
  break;
220663
220693
  }
220694
+ case "databricks": {
220695
+ const databricks = connection.databricksConnection;
220696
+ pojo.connections[connection.name] = {
220697
+ is: "databricks",
220698
+ host: databricks?.host,
220699
+ path: databricks?.path,
220700
+ token: databricks?.token,
220701
+ oauthClientId: databricks?.oauthClientId,
220702
+ oauthClientSecret: databricks?.oauthClientSecret,
220703
+ defaultCatalog: databricks?.defaultCatalog,
220704
+ defaultSchema: databricks?.defaultSchema,
220705
+ setupSQL: databricks?.setupSQL
220706
+ };
220707
+ break;
220708
+ }
220664
220709
  case "duckdb": {
220665
220710
  if (attachedDatabases.some((database) => database.name === connection.name)) {
220666
220711
  throw new Error(`DuckDB attached database names cannot conflict with connection name ${connection.name}`);
@@ -222066,6 +222111,60 @@ async function getSchemasForTrino(connection, malloyConnection) {
222066
222111
  throw new Error(`Failed to get schemas for Trino connection ${connection.name}: ${error.message}`);
222067
222112
  }
222068
222113
  }
222114
+ async function getSchemasForDatabricks(connection, malloyConnection) {
222115
+ if (!connection.databricksConnection) {
222116
+ throw new Error("Databricks connection is required");
222117
+ }
222118
+ try {
222119
+ const configuredSchema = connection.databricksConnection.defaultSchema;
222120
+ let allRows = [];
222121
+ if (connection.databricksConnection.defaultCatalog) {
222122
+ const catalog = connection.databricksConnection.defaultCatalog;
222123
+ const result = await malloyConnection.runSQL(`SELECT schema_name FROM ${catalog}.information_schema.schemata ORDER BY schema_name`);
222124
+ const rows = standardizeRunSQLResult2(result);
222125
+ allRows = rows.map((row) => {
222126
+ const r = row;
222127
+ return {
222128
+ catalog,
222129
+ schema: String(r.schema_name ?? r.Schema ?? "")
222130
+ };
222131
+ });
222132
+ } else {
222133
+ const catalogsResult = await malloyConnection.runSQL(`SHOW CATALOGS`);
222134
+ const catalogNames = standardizeRunSQLResult2(catalogsResult).map((row) => {
222135
+ const r = row;
222136
+ return String(r.catalog ?? r.Catalog ?? r.catalog_name ?? "");
222137
+ });
222138
+ for (const catalog of catalogNames) {
222139
+ try {
222140
+ const result = await malloyConnection.runSQL(`SELECT schema_name FROM ${catalog}.information_schema.schemata ORDER BY schema_name`);
222141
+ const rows = standardizeRunSQLResult2(result);
222142
+ for (const row of rows) {
222143
+ const r = row;
222144
+ allRows.push({
222145
+ catalog,
222146
+ schema: String(r.schema_name ?? r.Schema ?? "")
222147
+ });
222148
+ }
222149
+ } catch (catalogError) {
222150
+ logger.warn(`Failed to list schemas for Databricks catalog ${catalog}`, { error: catalogError });
222151
+ }
222152
+ }
222153
+ }
222154
+ logger.info("allRows for Schemas for Databricks", { allRows });
222155
+ return allRows.map(({ catalog, schema }) => {
222156
+ const name = connection.databricksConnection?.defaultCatalog ? schema : `${catalog}.${schema}`;
222157
+ return {
222158
+ name,
222159
+ isHidden: ["information_schema"].includes(schema),
222160
+ isDefault: configuredSchema ? schema === configuredSchema : false
222161
+ };
222162
+ });
222163
+ } catch (error) {
222164
+ logger.error(`Error getting schemas for Databricks connection ${connection.name}`, { error });
222165
+ throw new Error(`Failed to get schemas for Databricks connection ${connection.name}: ${error.message}`);
222166
+ }
222167
+ }
222069
222168
  async function getSchemasForDuckDB(connection, malloyConnection) {
222070
222169
  if (!connection.duckdbConnection) {
222071
222170
  throw new Error("DuckDB connection is required");
@@ -222176,6 +222275,8 @@ async function getSchemasForConnection(connection, malloyConnection) {
222176
222275
  return getSchemasForSnowflake(connection, malloyConnection);
222177
222276
  case "trino":
222178
222277
  return getSchemasForTrino(connection, malloyConnection);
222278
+ case "databricks":
222279
+ return getSchemasForDatabricks(connection, malloyConnection);
222179
222280
  case "duckdb":
222180
222281
  return getSchemasForDuckDB(connection, malloyConnection);
222181
222282
  case "motherduck":
@@ -222358,6 +222459,8 @@ async function listTablesForSchema(connection, schemaName, malloyConnection, tab
222358
222459
  return listTablesForSnowflake(connection, schemaName, malloyConnection, tableNames);
222359
222460
  case "trino":
222360
222461
  return listTablesForTrino(connection, schemaName, malloyConnection, tableNames);
222462
+ case "databricks":
222463
+ return listTablesForDatabricks(connection, schemaName, malloyConnection, tableNames);
222361
222464
  case "duckdb":
222362
222465
  return listTablesForDuckDB(connection, schemaName, malloyConnection, tableNames);
222363
222466
  case "motherduck":
@@ -222486,6 +222589,37 @@ async function listTablesForTrino(connection, schemaName, malloyConnection, tabl
222486
222589
  throw new Error(`Failed to get tables for Trino schema ${schemaName} in connection ${connection.name}: ${error.message}`);
222487
222590
  }
222488
222591
  }
222592
+ async function listTablesForDatabricks(connection, schemaName, malloyConnection, tableNames) {
222593
+ if (!connection.databricksConnection) {
222594
+ throw new Error("Databricks connection is required");
222595
+ }
222596
+ try {
222597
+ let catalogPrefix;
222598
+ let schemaOnly;
222599
+ let resourcePrefix;
222600
+ if (connection.databricksConnection.defaultCatalog) {
222601
+ catalogPrefix = `${connection.databricksConnection.defaultCatalog}.`;
222602
+ schemaOnly = schemaName;
222603
+ resourcePrefix = `${connection.databricksConnection.defaultCatalog}.${schemaName}`;
222604
+ } else {
222605
+ const dotIdx = schemaName.indexOf(".");
222606
+ if (dotIdx > 0) {
222607
+ catalogPrefix = `${schemaName.substring(0, dotIdx)}.`;
222608
+ schemaOnly = schemaName.substring(dotIdx + 1);
222609
+ } else {
222610
+ catalogPrefix = "";
222611
+ schemaOnly = schemaName;
222612
+ }
222613
+ resourcePrefix = schemaName;
222614
+ }
222615
+ const result = await malloyConnection.runSQL(`SELECT table_name, column_name, data_type FROM ${catalogPrefix}information_schema.columns WHERE table_schema = '${schemaOnly}' ${sqlInFilter("table_name", tableNames)} ORDER BY table_name, ordinal_position`);
222616
+ const rows = standardizeRunSQLResult2(result);
222617
+ return groupColumnRowsIntoTables(rows, (t) => `${resourcePrefix}.${t}`);
222618
+ } catch (error) {
222619
+ logger.error(`Error getting tables for Databricks schema ${schemaName} in connection ${connection.name}`, { error });
222620
+ throw new Error(`Failed to get tables for Databricks schema ${schemaName} in connection ${connection.name}: ${error.message}`);
222621
+ }
222622
+ }
222489
222623
  async function listTablesForDuckDB(connection, schemaName, malloyConnection, tableNames) {
222490
222624
  if (!connection.duckdbConnection) {
222491
222625
  throw new Error("DuckDB connection is required");
@@ -228884,7 +229018,12 @@ var getPublisherConfig = (serverRoot) => {
228884
229018
  const fileContent = fs2.readFileSync(publisherConfigPath, "utf8");
228885
229019
  rawConfig = JSON.parse(fileContent);
228886
229020
  } catch (error) {
228887
- logger.error(`Failed to parse ${PUBLISHER_CONFIG_NAME}. Using default empty config.`, { error });
229021
+ const message = error instanceof Error ? error.message : String(error);
229022
+ logger.error(`Failed to parse ${publisherConfigPath}: ${message}. Using default empty config.`, {
229023
+ path: publisherConfigPath,
229024
+ error: message,
229025
+ stack: error instanceof Error ? error.stack : undefined
229026
+ });
228888
229027
  return {
228889
229028
  frozenConfig: false,
228890
229029
  projects: []
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@malloy-publisher/server",
3
3
  "description": "Malloy Publisher Server",
4
- "version": "0.0.193",
4
+ "version": "0.0.194",
5
5
  "main": "dist/server.mjs",
6
6
  "bin": {
7
7
  "malloy-publisher": "dist/server.mjs"
@@ -35,6 +35,7 @@
35
35
  "@azure/storage-blob": "^12.26.0",
36
36
  "@google-cloud/storage": "^7.16.0",
37
37
  "@malloydata/db-bigquery": "^0.0.383",
38
+ "@malloydata/db-databricks": "^0.0.383",
38
39
  "@malloydata/db-duckdb": "^0.0.383",
39
40
  "@malloydata/db-mysql": "^0.0.383",
40
41
  "@malloydata/db-postgres": "^0.0.383",
package/src/config.ts CHANGED
@@ -90,9 +90,14 @@ export const getPublisherConfig = (serverRoot: string): PublisherConfig => {
90
90
  const fileContent = fs.readFileSync(publisherConfigPath, "utf8");
91
91
  rawConfig = JSON.parse(fileContent);
92
92
  } catch (error) {
93
+ const message = error instanceof Error ? error.message : String(error);
93
94
  logger.error(
94
- `Failed to parse ${PUBLISHER_CONFIG_NAME}. Using default empty config.`,
95
- { error },
95
+ `Failed to parse ${publisherConfigPath}: ${message}. Using default empty config.`,
96
+ {
97
+ path: publisherConfigPath,
98
+ error: message,
99
+ stack: error instanceof Error ? error.stack : undefined,
100
+ },
96
101
  );
97
102
  return {
98
103
  frozenConfig: false,
@@ -5,6 +5,7 @@ import { validate } from "class-validator";
5
5
  import {
6
6
  BigqueryConnectionDto,
7
7
  ConnectionDto,
8
+ DatabricksConnectionDto,
8
9
  PostgresConnectionDto,
9
10
  SnowflakeConnectionDto,
10
11
  MysqlConnectionDto,
@@ -100,6 +101,56 @@ describe("dto/connection", () => {
100
101
  expect(errors).toHaveLength(0);
101
102
  });
102
103
 
104
+ it("should validate a valid DatabricksConnection object", async () => {
105
+ const validData = {
106
+ host: "dbc-xxxxxxxx-xxxx.cloud.databricks.com",
107
+ path: "/sql/1.0/warehouses/abc123",
108
+ token: "dapiXXXX",
109
+ oauthClientId: "client-id",
110
+ oauthClientSecret: "client-secret",
111
+ defaultCatalog: "main",
112
+ defaultSchema: "default",
113
+ setupSQL: "USE CATALOG main",
114
+ };
115
+ const databricksConnection = plainToInstance(
116
+ DatabricksConnectionDto,
117
+ validData,
118
+ );
119
+
120
+ const errors = await validate(databricksConnection);
121
+ expect(errors).toHaveLength(0);
122
+ });
123
+
124
+ it("should return errors for invalid DatabricksConnection object", async () => {
125
+ const invalidData = {
126
+ host: 123, // Invalid type
127
+ path: false, // Invalid type
128
+ };
129
+ const databricksConnection = plainToInstance(
130
+ DatabricksConnectionDto,
131
+ invalidData,
132
+ );
133
+
134
+ const errors = await validate(databricksConnection);
135
+ expect(errors).toHaveLength(2);
136
+ });
137
+
138
+ it("should validate a valid Connection object with databricks type", async () => {
139
+ const validData = {
140
+ name: "My Databricks Connection",
141
+ type: "databricks",
142
+ databricksConnection: {
143
+ host: "dbc-xxxxxxxx-xxxx.cloud.databricks.com",
144
+ path: "/sql/1.0/warehouses/abc123",
145
+ token: "dapiXXXX",
146
+ },
147
+ };
148
+ const connection = plainToInstance(ConnectionDto, validData);
149
+
150
+ const errors = await validate(connection);
151
+ expect(errors).toHaveLength(0);
152
+ });
153
+
103
154
  it("should validate a valid Connection object with postgres type", async () => {
104
155
  const validData = {
105
156
  name: "My Postgres Connection",
@@ -129,6 +129,40 @@ export class SnowflakeConnectionDto {
129
129
  responseTimeoutMilliseconds?: number;
130
130
  }
131
131
 
132
+ export class DatabricksConnectionDto {
133
+ @IsOptional()
134
+ @IsString()
135
+ host?: string;
136
+
137
+ @IsOptional()
138
+ @IsString()
139
+ path?: string;
140
+
141
+ @IsOptional()
142
+ @IsString()
143
+ token?: string;
144
+
145
+ @IsOptional()
146
+ @IsString()
147
+ oauthClientId?: string;
148
+
149
+ @IsOptional()
150
+ @IsString()
151
+ oauthClientSecret?: string;
152
+
153
+ @IsOptional()
154
+ @IsString()
155
+ defaultCatalog?: string;
156
+
157
+ @IsOptional()
158
+ @IsString()
159
+ defaultSchema?: string;
160
+
161
+ @IsOptional()
162
+ @IsString()
163
+ setupSQL?: string;
164
+ }
165
+
132
166
  export class TrinoConnectionDto {
133
167
  @IsOptional()
134
168
  @IsString()
@@ -176,6 +210,7 @@ export class ConnectionDto implements ApiConnection {
176
210
  "bigquery",
177
211
  "snowflake",
178
212
  "trino",
213
+ "databricks",
179
214
  "mysql",
180
215
  "duckdb",
181
216
  "motherduck",
@@ -186,6 +221,7 @@ export class ConnectionDto implements ApiConnection {
186
221
  | "bigquery"
187
222
  | "snowflake"
188
223
  | "trino"
224
+ | "databricks"
189
225
  | "mysql"
190
226
  | "duckdb"
191
227
  | "motherduck"
@@ -211,6 +247,11 @@ export class ConnectionDto implements ApiConnection {
211
247
  @Type(() => TrinoConnectionDto)
212
248
  TrinoConnection?: TrinoConnectionDto;
213
249
 
250
+ @IsOptional()
251
+ @ValidateNested()
252
+ @Type(() => DatabricksConnectionDto)
253
+ databricksConnection?: DatabricksConnectionDto;
254
+
214
255
  @IsOptional()
215
256
  @ValidateNested()
216
257
  @Type(() => DuckdbConnectionDto)
@@ -12,6 +12,7 @@ import {
12
12
  } from "@malloydata/db-snowflake";
13
13
  import type { TrinoConnection } from "@malloydata/db-trino";
14
14
  import "@malloydata/db-trino";
15
+ import "@malloydata/db-databricks";
15
16
  import {
16
17
  Connection,
17
18
  contextOverlay,
@@ -50,6 +51,7 @@ export type InternalConnection = ApiConnection & {
50
51
  bigqueryConnection?: components["schemas"]["BigqueryConnection"];
51
52
  snowflakeConnection?: components["schemas"]["SnowflakeConnection"];
52
53
  trinoConnection?: components["schemas"]["TrinoConnection"];
54
+ databricksConnection?: components["schemas"]["DatabricksConnection"];
53
55
  mysqlConnection?: components["schemas"]["MysqlConnection"];
54
56
  duckdbConnection?: components["schemas"]["DuckdbConnection"];
55
57
  ducklakeConnection?: components["schemas"]["DucklakeConnection"];
@@ -0,0 +1,123 @@
1
+ import { describe, expect, it } from "bun:test";
2
+ import { assembleProjectConnections } from "./connection_config";
3
+ import { components } from "../api";
4
+
5
+ type ApiConnection = components["schemas"]["Connection"];
6
+
7
+ describe("assembleProjectConnections — databricks", () => {
8
+ const validBase: ApiConnection = {
9
+ name: "dbx",
10
+ type: "databricks",
11
+ databricksConnection: {
12
+ host: "dbc.cloud.databricks.com",
13
+ path: "/sql/1.0/warehouses/abc",
14
+ token: "dapiXXXX",
15
+ defaultCatalog: "main",
16
+ defaultSchema: "default",
17
+ },
18
+ };
19
+
20
+ it("emits a databricks core entry with all known fields preserved", () => {
21
+ const { pojo, apiConnections } = assembleProjectConnections([validBase]);
22
+
23
+ const entry = pojo.connections["dbx"];
24
+ expect(entry.is).toBe("databricks");
25
+ expect(entry.host).toBe("dbc.cloud.databricks.com");
26
+ expect(entry.path).toBe("/sql/1.0/warehouses/abc");
27
+ expect(entry.token).toBe("dapiXXXX");
28
+ expect(entry.defaultCatalog).toBe("main");
29
+ expect(entry.defaultSchema).toBe("default");
30
+
31
+ expect(apiConnections).toHaveLength(1);
32
+ expect(apiConnections[0].attributes?.dialectName).toBe("databricks");
33
+ });
34
+
35
+ it("accepts OAuth M2M auth (clientId + secret) without a token", () => {
36
+ const conn: ApiConnection = {
37
+ name: "dbx-oauth",
38
+ type: "databricks",
39
+ databricksConnection: {
40
+ host: "dbc.cloud.databricks.com",
41
+ path: "/sql/1.0/warehouses/abc",
42
+ oauthClientId: "client-id",
43
+ oauthClientSecret: "client-secret",
44
+ defaultCatalog: "main",
45
+ },
46
+ };
47
+ const { pojo } = assembleProjectConnections([conn]);
48
+ const entry = pojo.connections["dbx-oauth"];
49
+ expect(entry.is).toBe("databricks");
50
+ expect(entry.oauthClientId).toBe("client-id");
51
+ expect(entry.oauthClientSecret).toBe("client-secret");
52
+ expect(entry.token).toBeUndefined();
53
+ });
54
+
55
+ it("rejects connections missing the databricksConnection block", () => {
56
+ const conn: ApiConnection = {
57
+ name: "dbx",
58
+ type: "databricks",
59
+ };
60
+ expect(() => assembleProjectConnections([conn])).toThrow(
61
+ "Databricks connection configuration is missing.",
62
+ );
63
+ });
64
+
65
+ it("rejects connections with a missing host", () => {
66
+ const conn: ApiConnection = {
67
+ ...validBase,
68
+ databricksConnection: {
69
+ ...validBase.databricksConnection!,
70
+ host: undefined,
71
+ },
72
+ };
73
+ expect(() => assembleProjectConnections([conn])).toThrow(
74
+ "Databricks host is required",
75
+ );
76
+ });
77
+
78
+ it("rejects connections with a missing path", () => {
79
+ const conn: ApiConnection = {
80
+ ...validBase,
81
+ databricksConnection: {
82
+ ...validBase.databricksConnection!,
83
+ path: undefined,
84
+ },
85
+ };
86
+ expect(() => assembleProjectConnections([conn])).toThrow(
87
+ "Databricks SQL warehouse HTTP path is required",
88
+ );
89
+ });
90
+
91
+ it("rejects when defaultCatalog is missing", () => {
92
+ const conn: ApiConnection = {
93
+ name: "dbx",
94
+ type: "databricks",
95
+ databricksConnection: {
96
+ host: "dbc.cloud.databricks.com",
97
+ path: "/sql/1.0/warehouses/abc",
98
+ token: "dapiXXXX",
99
+ // defaultCatalog deliberately omitted
100
+ },
101
+ };
102
+ expect(() => assembleProjectConnections([conn])).toThrow(
103
+ "Databricks default catalog is required",
104
+ );
105
+ });
106
+
107
+ it("rejects when neither token nor full OAuth credentials are provided", () => {
108
+ const conn: ApiConnection = {
109
+ name: "dbx",
110
+ type: "databricks",
111
+ databricksConnection: {
112
+ host: "dbc.cloud.databricks.com",
113
+ path: "/sql/1.0/warehouses/abc",
114
+ // Only oauthClientId, missing secret → rejected.
115
+ oauthClientId: "client-id",
116
+ defaultCatalog: "main",
117
+ },
118
+ };
119
+ expect(() => assembleProjectConnections([conn])).toThrow(
120
+ "Databricks requires",
121
+ );
122
+ });
123
+ });
@@ -139,6 +139,13 @@ function getStaticConnectionAttributes(
139
139
  canPersist: true,
140
140
  canStream: false,
141
141
  };
142
+ case "databricks":
143
+ return {
144
+ dialectName: "databricks",
145
+ isPool: false,
146
+ canPersist: true,
147
+ canStream: false,
148
+ };
142
149
  case "mysql":
143
150
  return {
144
151
  dialectName: "mysql",
@@ -245,6 +252,31 @@ function validateConnectionShape(connection: ApiConnection): void {
245
252
  throw new Error("Trino connection configuration is missing.");
246
253
  }
247
254
  break;
255
+ case "databricks": {
256
+ const databricks = connection.databricksConnection;
257
+ if (!databricks) {
258
+ throw new Error("Databricks connection configuration is missing.");
259
+ }
260
+ if (!databricks.host) {
261
+ throw new Error("Databricks host is required.");
262
+ }
263
+ if (!databricks.path) {
264
+ throw new Error("Databricks SQL warehouse HTTP path is required.");
265
+ }
266
+ const hasToken = !!databricks.token;
267
+ const hasOAuth =
268
+ !!databricks.oauthClientId && !!databricks.oauthClientSecret;
269
+ if (!hasToken && !hasOAuth) {
270
+ throw new Error(
271
+ "Databricks requires either a personal access token or OAuth M2M client ID and secret.",
272
+ );
273
+ }
274
+ const hasDefaultCatalog = !!databricks.defaultCatalog;
275
+ if (!hasDefaultCatalog) {
276
+ throw new Error("Databricks default catalog is required.");
277
+ }
278
+ break;
279
+ }
248
280
  case "snowflake": {
249
281
  const snowflakeConnection = connection.snowflakeConnection;
250
282
  if (!snowflakeConnection) {
@@ -410,6 +442,22 @@ export function assembleProjectConnections(
410
442
  break;
411
443
  }
412
444
 
445
+ case "databricks": {
446
+ const databricks = connection.databricksConnection;
447
+ pojo.connections[connection.name] = {
448
+ is: "databricks",
449
+ host: databricks?.host,
450
+ path: databricks?.path,
451
+ token: databricks?.token,
452
+ oauthClientId: databricks?.oauthClientId,
453
+ oauthClientSecret: databricks?.oauthClientSecret,
454
+ defaultCatalog: databricks?.defaultCatalog,
455
+ defaultSchema: databricks?.defaultSchema,
456
+ setupSQL: databricks?.setupSQL,
457
+ };
458
+ break;
459
+ }
460
+
413
461
  case "duckdb": {
414
462
  if (
415
463
  attachedDatabases.some(