@malloy-publisher/server 0.0.192 → 0.0.194

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/build.ts +1 -0
  2. package/dist/app/api-doc.yaml +558 -1
  3. package/dist/app/assets/{HomePage-H1OH-VW5.js → HomePage-DbZS0N7G.js} +1 -1
  4. package/dist/app/assets/MainPage-CBuWkbmr.js +2 -0
  5. package/dist/app/assets/{ModelPage-Crau5hgZ.js → ModelPage-Bt37smot.js} +1 -1
  6. package/dist/app/assets/{PackagePage-CbubRhgE.js → PackagePage-DLZe50WG.js} +1 -1
  7. package/dist/app/assets/{ProjectPage-DUlJkYJ4.js → ProjectPage-FQTEPXP4.js} +1 -1
  8. package/dist/app/assets/{RouteError-DrNXNihc.js → RouteError-DefbDO7F.js} +1 -1
  9. package/dist/app/assets/{WorkbookPage-CBBv7n5U.js → WorkbookPage-CkAo16ar.js} +1 -1
  10. package/dist/app/assets/{core-Dzx75uJR.es-DwnFZnyO.js → core-BrfQApxh.es-DnvCX4oH.js} +14 -14
  11. package/dist/app/assets/index-5eLCcNmP.css +1 -0
  12. package/dist/app/assets/{index-d5rvmoZ7.js → index-Bu0ub036.js} +119 -119
  13. package/dist/app/assets/index-CkzK3JIl.js +40 -0
  14. package/dist/app/assets/index-CoA6HIGS.js +1742 -0
  15. package/dist/app/assets/{index.umd-CetYIBQY.js → index.umd-B6Ms2PpL.js} +46 -46
  16. package/dist/app/index.html +2 -2
  17. package/dist/server.mjs +1529 -985
  18. package/package.json +11 -10
  19. package/src/config.ts +7 -2
  20. package/src/controller/connection.controller.ts +102 -27
  21. package/src/dto/connection.dto.spec.ts +55 -0
  22. package/src/dto/connection.dto.ts +87 -2
  23. package/src/server.ts +201 -2
  24. package/src/service/connection.spec.ts +250 -4
  25. package/src/service/connection.ts +328 -473
  26. package/src/service/connection_config.spec.ts +123 -0
  27. package/src/service/connection_config.ts +562 -0
  28. package/src/service/connection_service.spec.ts +50 -0
  29. package/src/service/connection_service.ts +125 -32
  30. package/src/service/db_utils.spec.ts +161 -0
  31. package/src/service/db_utils.ts +131 -0
  32. package/src/service/materialization_service.spec.ts +18 -12
  33. package/src/service/materialization_service.ts +54 -7
  34. package/src/service/model.ts +24 -27
  35. package/src/service/package.spec.ts +125 -1
  36. package/src/service/package.ts +86 -44
  37. package/src/service/project.ts +172 -94
  38. package/src/service/project_store.spec.ts +72 -0
  39. package/src/service/project_store.ts +98 -81
  40. package/tests/unit/duckdb/attached_databases.test.ts +1 -19
  41. package/dist/app/assets/MainPage-GL06aMke.js +0 -2
  42. package/dist/app/assets/index-CMlGQMcl.css +0 -1
  43. package/dist/app/assets/index-CzjyS9cx.js +0 -1276
  44. package/dist/app/assets/index-HHdhLUpv.js +0 -676
@@ -611,6 +611,56 @@ describe("service/connection_service", () => {
611
611
  ).toBe(true);
612
612
  });
613
613
 
614
+ it("should defer DuckDB file cleanup until the previous config release callback", async () => {
615
+ const connectionName = "local-file";
616
+ const duckdbConnection: ApiConnection = {
617
+ name: connectionName,
618
+ type: "duckdb",
619
+ duckdbConnection: {
620
+ attachedDatabases: [],
621
+ },
622
+ };
623
+ const mockDbConnection = {
624
+ id: "conn-123",
625
+ name: duckdbConnection.name,
626
+ type: "duckdb",
627
+ config: duckdbConnection,
628
+ };
629
+ const mockProject = {
630
+ listApiConnections: sinon.stub().returns([duckdbConnection]),
631
+ getApiConnection: sinon.stub().returns(duckdbConnection),
632
+ updateConnections: sinon.stub(),
633
+ deleteDuckDBConnection: sinon.stub().resolves(),
634
+ metadata: { location: "/test/path" },
635
+ };
636
+ const getConnectionStub = sinon.stub(
637
+ connectionService,
638
+ "getConnection",
639
+ );
640
+ getConnectionStub.resolves({
641
+ dbConnection: mockDbConnection,
642
+ repository: mockRepository,
643
+ } as unknown as Awaited<
644
+ ReturnType<typeof connectionService.getConnection>
645
+ >);
646
+ (mockProjectStore.getProject as sinon.SinonStub).resolves(mockProject);
647
+
648
+ await connectionService.deleteConnection(
649
+ "test-project",
650
+ connectionName,
651
+ );
652
+
653
+ expect(mockProject.deleteDuckDBConnection.called).toBe(false);
654
+ const releaseCallback = mockProject.updateConnections.getCall(0)
655
+ .args[2] as () => Promise<void>;
656
+
657
+ expect(typeof releaseCallback).toBe("function");
658
+ await releaseCallback();
659
+ expect(
660
+ mockProject.deleteDuckDBConnection.calledWith(connectionName),
661
+ ).toBe(true);
662
+ });
663
+
614
664
  it("should throw FrozenConfigError when config is frozen", async () => {
615
665
  mockProjectStore.publisherConfigIsFrozen = true;
616
666
 
@@ -1,10 +1,66 @@
1
1
  import { components } from "../api";
2
2
  import { ConnectionNotFoundError, FrozenConfigError } from "../errors";
3
3
  import { logger } from "../logger";
4
- import { createProjectConnections } from "./connection";
4
+ import { buildProjectMalloyConfig } from "./connection";
5
5
  import { ProjectStore } from "./project_store";
6
6
 
7
7
  type ApiConnection = components["schemas"]["Connection"];
8
+ type ReleaseCallback = () => Promise<void>;
9
+ type ConnectionUpdateProject = {
10
+ runConnectionUpdateExclusive?: <T>(fn: () => Promise<T>) => Promise<T>;
11
+ updateConnections?: (
12
+ nextMalloyConfig: ReturnType<typeof buildProjectMalloyConfig>,
13
+ apiConnections?: ApiConnection[],
14
+ afterPreviousRelease?: ReleaseCallback,
15
+ ) => void;
16
+ deleteConnection?: (connectionName: string) => Promise<void>;
17
+ deleteDuckDBConnection?: (connectionName: string) => Promise<void>;
18
+ deleteDuckLakeConnection?: (connectionName: string) => Promise<void>;
19
+ };
20
+
21
+ async function runProjectConnectionUpdate<T>(
22
+ project: ConnectionUpdateProject,
23
+ fn: () => Promise<T>,
24
+ ): Promise<T> {
25
+ if (project.runConnectionUpdateExclusive) {
26
+ return project.runConnectionUpdateExclusive(fn);
27
+ }
28
+ return fn();
29
+ }
30
+
31
+ function updateProjectConnections(
32
+ project: ConnectionUpdateProject,
33
+ nextMalloyConfig: ReturnType<typeof buildProjectMalloyConfig>,
34
+ afterPreviousRelease?: ReleaseCallback,
35
+ ): void {
36
+ project.updateConnections?.(
37
+ nextMalloyConfig,
38
+ nextMalloyConfig.apiConnections,
39
+ afterPreviousRelease,
40
+ );
41
+ }
42
+
43
+ function buildDeletedConnectionCleanup(
44
+ project: ConnectionUpdateProject,
45
+ deletedConnection: ApiConnection,
46
+ connectionName: string,
47
+ ): ReleaseCallback | undefined {
48
+ if (
49
+ deletedConnection.type === "duckdb" &&
50
+ typeof project.deleteDuckDBConnection === "function"
51
+ ) {
52
+ return () => project.deleteDuckDBConnection!(connectionName);
53
+ }
54
+
55
+ if (
56
+ deletedConnection.type === "ducklake" &&
57
+ typeof project.deleteDuckLakeConnection === "function"
58
+ ) {
59
+ return () => project.deleteDuckLakeConnection!(connectionName);
60
+ }
61
+
62
+ return undefined;
63
+ }
8
64
 
9
65
  export class ConnectionService {
10
66
  private projectStore: ProjectStore;
@@ -74,21 +130,21 @@ export class ConnectionService {
74
130
 
75
131
  // Update in-memory connections
76
132
  const project = await this.projectStore.getProject(projectName, false);
77
- const existingConnections = project.listApiConnections();
78
-
79
- const { malloyConnections, apiConnections } =
80
- await createProjectConnections(
133
+ await runProjectConnectionUpdate(project, async () => {
134
+ const existingConnections = project.listApiConnections();
135
+ const nextMalloyConfig = buildProjectMalloyConfig(
81
136
  [...existingConnections, connection],
82
137
  project.metadata.location || "",
83
138
  );
84
139
 
85
- project.updateConnections(malloyConnections, apiConnections);
140
+ await this.projectStore.addConnection(
141
+ connection,
142
+ dbProject.id,
143
+ repository,
144
+ );
86
145
 
87
- await this.projectStore.addConnection(
88
- connection,
89
- dbProject.id,
90
- repository,
91
- );
146
+ updateProjectConnections(project, nextMalloyConfig);
147
+ });
92
148
 
93
149
  logger.info(
94
150
  `Successfully added connection "${connection.name}" to project "${projectName}"`,
@@ -117,31 +173,36 @@ export class ConnectionService {
117
173
 
118
174
  // Update in-memory connections
119
175
  const project = await this.projectStore.getProject(projectName, false);
120
- const existingConnections = project.listApiConnections();
176
+ await runProjectConnectionUpdate(project, async () => {
177
+ const existingConnections = project.listApiConnections();
121
178
 
122
- const updatedConnection = {
123
- ...dbConnection.config,
124
- ...connection,
125
- name: connectionName,
126
- };
179
+ const updatedConnection = {
180
+ ...dbConnection.config,
181
+ ...connection,
182
+ name: connectionName,
183
+ };
127
184
 
128
- const updatedConnections = existingConnections.map((conn) =>
129
- conn.name === connectionName ? updatedConnection : conn,
130
- );
185
+ const updatedConnections = existingConnections.map((conn) =>
186
+ conn.name === connectionName ? updatedConnection : conn,
187
+ );
131
188
 
132
- const { malloyConnections, apiConnections } =
133
- await createProjectConnections(
189
+ // Pass isUpdateConnectionRequest=true so the DuckLake wrapper
190
+ // re-attaches against the updated catalog/storage settings instead
191
+ // of trusting the prior generation's persisted attach state.
192
+ const nextMalloyConfig = buildProjectMalloyConfig(
134
193
  updatedConnections,
135
194
  project.metadata.location || "",
195
+ true,
136
196
  );
137
197
 
138
- project.updateConnections(malloyConnections, apiConnections);
198
+ await this.projectStore.updateConnection(
199
+ updatedConnection,
200
+ dbProject.id,
201
+ repository,
202
+ );
139
203
 
140
- await this.projectStore.updateConnection(
141
- updatedConnection,
142
- dbProject.id,
143
- repository,
144
- );
204
+ updateProjectConnections(project, nextMalloyConfig);
205
+ });
145
206
 
146
207
  logger.info(
147
208
  `Successfully updated connection "${connectionName}" in project "${projectName}"`,
@@ -169,10 +230,42 @@ export class ConnectionService {
169
230
 
170
231
  // Update in-memory connections
171
232
  const project = await this.projectStore.getProject(projectName, false);
172
- await project.deleteConnection(connectionName);
173
-
174
- // Delete from database
175
- await repository.deleteConnection(dbConnection.id);
233
+ await runProjectConnectionUpdate(project, async () => {
234
+ if (typeof project.listApiConnections !== "function") {
235
+ if (typeof project.deleteConnection === "function") {
236
+ await project.deleteConnection(connectionName);
237
+ }
238
+ await repository.deleteConnection(dbConnection.id);
239
+ return;
240
+ }
241
+
242
+ const deletedConnection =
243
+ "getApiConnection" in project &&
244
+ typeof project.getApiConnection === "function"
245
+ ? project.getApiConnection(connectionName)
246
+ : dbConnection.config;
247
+ const updatedConnections = project
248
+ .listApiConnections()
249
+ .filter((connection) => connection.name !== connectionName);
250
+ const nextMalloyConfig = buildProjectMalloyConfig(
251
+ updatedConnections,
252
+ project.metadata.location || "",
253
+ );
254
+ const deleteConnectionFilesAfterRelease =
255
+ buildDeletedConnectionCleanup(
256
+ project,
257
+ deletedConnection,
258
+ connectionName,
259
+ );
260
+
261
+ await repository.deleteConnection(dbConnection.id);
262
+
263
+ updateProjectConnections(
264
+ project,
265
+ nextMalloyConfig,
266
+ deleteConnectionFilesAfterRelease,
267
+ );
268
+ });
176
269
 
177
270
  logger.info(
178
271
  `Successfully deleted connection "${connectionName}" from project "${projectName}"`,
@@ -262,6 +262,61 @@ describe("listTablesForSchema", () => {
262
262
  });
263
263
  });
264
264
 
265
+ describe("databricks", () => {
266
+ it("uses defaultCatalog-prefixed information_schema.columns", async () => {
267
+ const conn: ApiConnection = {
268
+ name: "test",
269
+ type: "databricks",
270
+ databricksConnection: {
271
+ host: "dbc.cloud.databricks.com",
272
+ path: "/sql/1.0/warehouses/abc",
273
+ token: "dapi",
274
+ defaultCatalog: "main",
275
+ },
276
+ };
277
+ const m = mockConnection(columnRows);
278
+ const tables = await listTablesForSchema(conn, "default", m.conn);
279
+
280
+ expect(m.lastSQL).toContain("main.information_schema.columns");
281
+ expect(m.lastSQL).toContain("table_schema = 'default'");
282
+ expect(tables[0].resource).toBe("main.default.orders");
283
+ });
284
+
285
+ it("extracts catalog from schemaName when no defaultCatalog", async () => {
286
+ const conn: ApiConnection = {
287
+ name: "test",
288
+ type: "databricks",
289
+ databricksConnection: {
290
+ host: "dbc.cloud.databricks.com",
291
+ path: "/sql/1.0/warehouses/abc",
292
+ token: "dapi",
293
+ },
294
+ };
295
+ const m = mockConnection(columnRows);
296
+ const tables = await listTablesForSchema(conn, "main.default", m.conn);
297
+
298
+ expect(m.lastSQL).toContain("main.information_schema.columns");
299
+ expect(m.lastSQL).toContain("table_schema = 'default'");
300
+ expect(tables[0].resource).toBe("main.default.orders");
301
+ });
302
+
303
+ it("includes IN filter when tableNames provided", async () => {
304
+ const conn: ApiConnection = {
305
+ name: "test",
306
+ type: "databricks",
307
+ databricksConnection: {
308
+ host: "dbc.cloud.databricks.com",
309
+ path: "/sql/1.0/warehouses/abc",
310
+ token: "dapi",
311
+ defaultCatalog: "main",
312
+ },
313
+ };
314
+ const m = mockConnection(columnRows);
315
+ await listTablesForSchema(conn, "default", m.conn, ["orders"]);
316
+ expect(m.lastSQL).toContain("table_name IN ('orders')");
317
+ });
318
+ });
319
+
265
320
  describe("duckdb", () => {
266
321
  const conn: ApiConnection = {
267
322
  name: "test",
@@ -674,6 +729,112 @@ describe("getSchemasForConnection", () => {
674
729
  });
675
730
  });
676
731
 
732
+ describe("databricks", () => {
733
+ it("queries catalog.information_schema.schemata when defaultCatalog is set", async () => {
734
+ const conn: ApiConnection = {
735
+ name: "test",
736
+ type: "databricks",
737
+ databricksConnection: {
738
+ host: "dbc.cloud.databricks.com",
739
+ path: "/sql/1.0/warehouses/abc",
740
+ token: "dapi",
741
+ defaultCatalog: "main",
742
+ defaultSchema: "default",
743
+ },
744
+ };
745
+ const rows = [
746
+ { schema_name: "default" },
747
+ { schema_name: "information_schema" },
748
+ ];
749
+ const m = mockConnection(rows);
750
+ const schemas = await getSchemasForConnection(conn, m.conn);
751
+
752
+ expect(m.lastSQL).toContain("main.information_schema.schemata");
753
+ expect(schemas).toHaveLength(2);
754
+ expect(schemas.find((s) => s.name === "default")?.isDefault).toBe(
755
+ true,
756
+ );
757
+ expect(
758
+ schemas.find((s) => s.name === "information_schema")?.isHidden,
759
+ ).toBe(true);
760
+ });
761
+
762
+ it("falls back to SHOW CATALOGS when defaultCatalog is unset", async () => {
763
+ const conn: ApiConnection = {
764
+ name: "test",
765
+ type: "databricks",
766
+ databricksConnection: {
767
+ host: "dbc.cloud.databricks.com",
768
+ path: "/sql/1.0/warehouses/abc",
769
+ token: "dapi",
770
+ },
771
+ };
772
+ // First runSQL returns catalog list; subsequent runSQL calls (one
773
+ // per catalog) return schema rows. We use a dedicated mock so we
774
+ // can switch behavior across calls.
775
+ let callIndex = 0;
776
+ const calls: string[] = [];
777
+ const fakeConn = {
778
+ runSQL: async (sql: string) => {
779
+ calls.push(sql);
780
+ if (callIndex++ === 0) {
781
+ return {
782
+ rows: [{ catalog: "main" }, { catalog: "samples" }],
783
+ };
784
+ }
785
+ return { rows: [{ schema_name: "default" }] };
786
+ },
787
+ } as unknown as Connection;
788
+
789
+ const schemas = await getSchemasForConnection(conn, fakeConn);
790
+
791
+ expect(calls[0]).toContain("SHOW CATALOGS");
792
+ expect(
793
+ calls.some((c) => c.includes("main.information_schema.schemata")),
794
+ ).toBe(true);
795
+ expect(
796
+ calls.some((c) =>
797
+ c.includes("samples.information_schema.schemata"),
798
+ ),
799
+ ).toBe(true);
800
+ // Two catalogs each contribute one schema → catalog-qualified names.
801
+ expect(schemas.map((s) => s.name)).toEqual([
802
+ "main.default",
803
+ "samples.default",
804
+ ]);
805
+ });
806
+
807
+ it("warns and continues when a catalog rejects information_schema", async () => {
808
+ const conn: ApiConnection = {
809
+ name: "test",
810
+ type: "databricks",
811
+ databricksConnection: {
812
+ host: "dbc.cloud.databricks.com",
813
+ path: "/sql/1.0/warehouses/abc",
814
+ token: "dapi",
815
+ },
816
+ };
817
+ let callIndex = 0;
818
+ const fakeConn = {
819
+ runSQL: async (sql: string) => {
820
+ if (callIndex++ === 0) {
821
+ return { rows: [{ catalog: "denied" }, { catalog: "ok" }] };
822
+ }
823
+ if (sql.includes("denied")) {
824
+ throw new Error("USE CATALOG denied");
825
+ }
826
+ return { rows: [{ schema_name: "default" }] };
827
+ },
828
+ } as unknown as Connection;
829
+
830
+ const schemas = await getSchemasForConnection(conn, fakeConn);
831
+
832
+ // Denied catalog is skipped, ok catalog contributes its schema.
833
+ expect(schemas).toHaveLength(1);
834
+ expect(schemas[0].name).toBe("ok.default");
835
+ });
836
+ });
837
+
677
838
  it("throws for unsupported connection type", async () => {
678
839
  const conn = {
679
840
  name: "test",
@@ -353,6 +353,82 @@ async function getSchemasForTrino(
353
353
  }
354
354
  }
355
355
 
356
+ async function getSchemasForDatabricks(
357
+ connection: ApiConnection,
358
+ malloyConnection: Connection,
359
+ ): Promise<ApiSchema[]> {
360
+ if (!connection.databricksConnection) {
361
+ throw new Error("Databricks connection is required");
362
+ }
363
+ try {
364
+ const configuredSchema = connection.databricksConnection.defaultSchema;
365
+ let allRows: { catalog: string; schema: string }[] = [];
366
+
367
+ if (connection.databricksConnection.defaultCatalog) {
368
+ const catalog = connection.databricksConnection.defaultCatalog;
369
+ const result = await malloyConnection.runSQL(
370
+ `SELECT schema_name FROM ${catalog}.information_schema.schemata ORDER BY schema_name`,
371
+ );
372
+ const rows = standardizeRunSQLResult(result);
373
+ allRows = rows.map((row: unknown) => {
374
+ const r = row as Record<string, unknown>;
375
+ return {
376
+ catalog,
377
+ schema: String(r.schema_name ?? r.Schema ?? ""),
378
+ };
379
+ });
380
+ } else {
381
+ const catalogsResult = await malloyConnection.runSQL(`SHOW CATALOGS`);
382
+ const catalogNames = standardizeRunSQLResult(catalogsResult).map(
383
+ (row: unknown) => {
384
+ const r = row as Record<string, unknown>;
385
+ return String(r.catalog ?? r.Catalog ?? r.catalog_name ?? "");
386
+ },
387
+ );
388
+
389
+ for (const catalog of catalogNames) {
390
+ try {
391
+ const result = await malloyConnection.runSQL(
392
+ `SELECT schema_name FROM ${catalog}.information_schema.schemata ORDER BY schema_name`,
393
+ );
394
+ const rows = standardizeRunSQLResult(result);
395
+ for (const row of rows) {
396
+ const r = row as Record<string, unknown>;
397
+ allRows.push({
398
+ catalog,
399
+ schema: String(r.schema_name ?? r.Schema ?? ""),
400
+ });
401
+ }
402
+ } catch (catalogError) {
403
+ logger.warn(
404
+ `Failed to list schemas for Databricks catalog ${catalog}`,
405
+ { error: catalogError },
406
+ );
407
+ }
408
+ }
409
+ }
410
+ logger.info("allRows for Schemas for Databricks", { allRows });
411
+ return allRows.map(({ catalog, schema }) => {
412
+ const name = connection.databricksConnection?.defaultCatalog
413
+ ? schema
414
+ : `${catalog}.${schema}`;
415
+ return {
416
+ name,
417
+ isHidden: ["information_schema"].includes(schema),
418
+ isDefault: configuredSchema ? schema === configuredSchema : false,
419
+ };
420
+ });
421
+ } catch (error) {
422
+ logger.error(
423
+ `Error getting schemas for Databricks connection ${connection.name}`,
424
+ { error },
425
+ );
426
+ throw new Error(
427
+ `Failed to get schemas for Databricks connection ${connection.name}: ${(error as Error).message}`,
428
+ );
429
+ }
430
+ }
431
+
356
432
  async function getSchemasForDuckDB(
357
433
  connection: ApiConnection,
358
434
  malloyConnection: Connection,
@@ -533,6 +609,8 @@ export async function getSchemasForConnection(
533
609
  return getSchemasForSnowflake(connection, malloyConnection);
534
610
  case "trino":
535
611
  return getSchemasForTrino(connection, malloyConnection);
612
+ case "databricks":
613
+ return getSchemasForDatabricks(connection, malloyConnection);
536
614
  case "duckdb":
537
615
  return getSchemasForDuckDB(connection, malloyConnection);
538
616
  case "motherduck":
@@ -823,6 +901,13 @@ export async function listTablesForSchema(
823
901
  malloyConnection,
824
902
  tableNames,
825
903
  );
904
+ case "databricks":
905
+ return listTablesForDatabricks(
906
+ connection,
907
+ schemaName,
908
+ malloyConnection,
909
+ tableNames,
910
+ );
826
911
  case "duckdb":
827
912
  return listTablesForDuckDB(
828
913
  connection,
@@ -1057,6 +1142,52 @@ async function listTablesForTrino(
1057
1142
  }
1058
1143
  }
1059
1144
 
1145
+ async function listTablesForDatabricks(
1146
+ connection: ApiConnection,
1147
+ schemaName: string,
1148
+ malloyConnection: Connection,
1149
+ tableNames?: string[],
1150
+ ): Promise<ApiTable[]> {
1151
+ if (!connection.databricksConnection) {
1152
+ throw new Error("Databricks connection is required");
1153
+ }
1154
+ try {
1155
+ let catalogPrefix: string;
1156
+ let schemaOnly: string;
1157
+ let resourcePrefix: string;
1158
+
1159
+ if (connection.databricksConnection.defaultCatalog) {
1160
+ catalogPrefix = `${connection.databricksConnection.defaultCatalog}.`;
1161
+ schemaOnly = schemaName;
1162
+ resourcePrefix = `${connection.databricksConnection.defaultCatalog}.${schemaName}`;
1163
+ } else {
1164
+ const dotIdx = schemaName.indexOf(".");
1165
+ if (dotIdx > 0) {
1166
+ catalogPrefix = `${schemaName.substring(0, dotIdx)}.`;
1167
+ schemaOnly = schemaName.substring(dotIdx + 1);
1168
+ } else {
1169
+ catalogPrefix = "";
1170
+ schemaOnly = schemaName;
1171
+ }
1172
+ resourcePrefix = schemaName;
1173
+ }
1174
+
1175
+ const result = await malloyConnection.runSQL(
1176
+ `SELECT table_name, column_name, data_type FROM ${catalogPrefix}information_schema.columns WHERE table_schema = '${schemaOnly}' ${sqlInFilter("table_name", tableNames)} ORDER BY table_name, ordinal_position`,
1177
+ );
1178
+ const rows = standardizeRunSQLResult(result);
1179
+ return groupColumnRowsIntoTables(rows, (t) => `${resourcePrefix}.${t}`);
1180
+ } catch (error) {
1181
+ logger.error(
1182
+ `Error getting tables for Databricks schema ${schemaName} in connection ${connection.name}`,
1183
+ { error },
1184
+ );
1185
+ throw new Error(
1186
+ `Failed to get tables for Databricks schema ${schemaName} in connection ${connection.name}: ${(error as Error).message}`,
1187
+ );
1188
+ }
1189
+ }
1190
+
1060
1191
  async function listTablesForDuckDB(
1061
1192
  connection: ApiConnection,
1062
1193
  schemaName: string,
@@ -480,10 +480,12 @@ describe("MaterializationService", () => {
480
480
  dialectName: "duckdb",
481
481
  runSQL,
482
482
  } as unknown as Connection;
483
- const connections = new Map<string, Connection>([
484
- ["conn", connection],
485
- ]);
486
- const pkg = { getConnections: () => connections };
483
+ const pkg = {
484
+ getMalloyConnection: async (name: string): Promise<Connection> => {
485
+ if (name === "conn") return connection;
486
+ throw new Error(`unknown connection: ${name}`);
487
+ },
488
+ };
487
489
  (ctx.projectStore.getProject as sinon.SinonStub).resolves({
488
490
  getPackage: sinon.stub().resolves(pkg),
489
491
  });
@@ -535,10 +537,12 @@ describe("MaterializationService", () => {
535
537
  // a vanished "ghost_conn", which used to be impossible to tear down.
536
538
  // `teardownPackage` must force-delete the row anyway so teardown
537
539
  // can complete.
538
- const connections = new Map<string, Connection>([
539
- ["live_conn", livingConn],
540
- ]);
541
- const pkg = { getConnections: () => connections };
540
+ const pkg = {
541
+ getMalloyConnection: async (name: string): Promise<Connection> => {
542
+ if (name === "live_conn") return livingConn;
543
+ throw new Error(`unknown connection: ${name}`);
544
+ },
545
+ };
542
546
  (ctx.projectStore.getProject as sinon.SinonStub).resolves({
543
547
  getPackage: sinon.stub().resolves(pkg),
544
548
  });
@@ -576,10 +580,12 @@ describe("MaterializationService", () => {
576
580
  dialectName: "duckdb",
577
581
  runSQL,
578
582
  } as unknown as Connection;
579
- const connections = new Map<string, Connection>([
580
- ["conn", connection],
581
- ]);
582
- const pkg = { getConnections: () => connections };
583
+ const pkg = {
584
+ getMalloyConnection: async (name: string): Promise<Connection> => {
585
+ if (name === "conn") return connection;
586
+ throw new Error(`unknown connection: ${name}`);
587
+ },
588
+ };
583
589
  (ctx.projectStore.getProject as sinon.SinonStub).resolves({
584
590
  getPackage: sinon.stub().resolves(pkg),
585
591
  });