@malloy-publisher/server 0.0.192 → 0.0.194

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/build.ts +1 -0
  2. package/dist/app/api-doc.yaml +558 -1
  3. package/dist/app/assets/{HomePage-H1OH-VW5.js → HomePage-DbZS0N7G.js} +1 -1
  4. package/dist/app/assets/MainPage-CBuWkbmr.js +2 -0
  5. package/dist/app/assets/{ModelPage-Crau5hgZ.js → ModelPage-Bt37smot.js} +1 -1
  6. package/dist/app/assets/{PackagePage-CbubRhgE.js → PackagePage-DLZe50WG.js} +1 -1
  7. package/dist/app/assets/{ProjectPage-DUlJkYJ4.js → ProjectPage-FQTEPXP4.js} +1 -1
  8. package/dist/app/assets/{RouteError-DrNXNihc.js → RouteError-DefbDO7F.js} +1 -1
  9. package/dist/app/assets/{WorkbookPage-CBBv7n5U.js → WorkbookPage-CkAo16ar.js} +1 -1
  10. package/dist/app/assets/{core-Dzx75uJR.es-DwnFZnyO.js → core-BrfQApxh.es-DnvCX4oH.js} +14 -14
  11. package/dist/app/assets/index-5eLCcNmP.css +1 -0
  12. package/dist/app/assets/{index-d5rvmoZ7.js → index-Bu0ub036.js} +119 -119
  13. package/dist/app/assets/index-CkzK3JIl.js +40 -0
  14. package/dist/app/assets/index-CoA6HIGS.js +1742 -0
  15. package/dist/app/assets/{index.umd-CetYIBQY.js → index.umd-B6Ms2PpL.js} +46 -46
  16. package/dist/app/index.html +2 -2
  17. package/dist/server.mjs +1529 -985
  18. package/package.json +11 -10
  19. package/src/config.ts +7 -2
  20. package/src/controller/connection.controller.ts +102 -27
  21. package/src/dto/connection.dto.spec.ts +55 -0
  22. package/src/dto/connection.dto.ts +87 -2
  23. package/src/server.ts +201 -2
  24. package/src/service/connection.spec.ts +250 -4
  25. package/src/service/connection.ts +328 -473
  26. package/src/service/connection_config.spec.ts +123 -0
  27. package/src/service/connection_config.ts +562 -0
  28. package/src/service/connection_service.spec.ts +50 -0
  29. package/src/service/connection_service.ts +125 -32
  30. package/src/service/db_utils.spec.ts +161 -0
  31. package/src/service/db_utils.ts +131 -0
  32. package/src/service/materialization_service.spec.ts +18 -12
  33. package/src/service/materialization_service.ts +54 -7
  34. package/src/service/model.ts +24 -27
  35. package/src/service/package.spec.ts +125 -1
  36. package/src/service/package.ts +86 -44
  37. package/src/service/project.ts +172 -94
  38. package/src/service/project_store.spec.ts +72 -0
  39. package/src/service/project_store.ts +98 -81
  40. package/tests/unit/duckdb/attached_databases.test.ts +1 -19
  41. package/dist/app/assets/MainPage-GL06aMke.js +0 -2
  42. package/dist/app/assets/index-CMlGQMcl.css +0 -1
  43. package/dist/app/assets/index-CzjyS9cx.js +0 -1276
  44. package/dist/app/assets/index-HHdhLUpv.js +0 -676
@@ -0,0 +1,123 @@
1
+ import { describe, expect, it } from "bun:test";
2
+ import { assembleProjectConnections } from "./connection_config";
3
+ import { components } from "../api";
4
+
5
+ type ApiConnection = components["schemas"]["Connection"];
6
+
7
+ describe("assembleProjectConnections — databricks", () => {
8
+ const validBase: ApiConnection = {
9
+ name: "dbx",
10
+ type: "databricks",
11
+ databricksConnection: {
12
+ host: "dbc.cloud.databricks.com",
13
+ path: "/sql/1.0/warehouses/abc",
14
+ token: "dapiXXXX",
15
+ defaultCatalog: "main",
16
+ defaultSchema: "default",
17
+ },
18
+ };
19
+
20
+ it("emits a databricks core entry with all known fields preserved", () => {
21
+ const { pojo, apiConnections } = assembleProjectConnections([validBase]);
22
+
23
+ const entry = pojo.connections["dbx"];
24
+ expect(entry.is).toBe("databricks");
25
+ expect(entry.host).toBe("dbc.cloud.databricks.com");
26
+ expect(entry.path).toBe("/sql/1.0/warehouses/abc");
27
+ expect(entry.token).toBe("dapiXXXX");
28
+ expect(entry.defaultCatalog).toBe("main");
29
+ expect(entry.defaultSchema).toBe("default");
30
+
31
+ expect(apiConnections).toHaveLength(1);
32
+ expect(apiConnections[0].attributes?.dialectName).toBe("databricks");
33
+ });
34
+
35
+ it("accepts OAuth M2M auth (clientId + secret) without a token", () => {
36
+ const conn: ApiConnection = {
37
+ name: "dbx-oauth",
38
+ type: "databricks",
39
+ databricksConnection: {
40
+ host: "dbc.cloud.databricks.com",
41
+ path: "/sql/1.0/warehouses/abc",
42
+ oauthClientId: "client-id",
43
+ oauthClientSecret: "client-secret",
44
+ defaultCatalog: "main",
45
+ },
46
+ };
47
+ const { pojo } = assembleProjectConnections([conn]);
48
+ const entry = pojo.connections["dbx-oauth"];
49
+ expect(entry.is).toBe("databricks");
50
+ expect(entry.oauthClientId).toBe("client-id");
51
+ expect(entry.oauthClientSecret).toBe("client-secret");
52
+ expect(entry.token).toBeUndefined();
53
+ });
54
+
55
+ it("rejects connections missing the databricksConnection block", () => {
56
+ const conn: ApiConnection = {
57
+ name: "dbx",
58
+ type: "databricks",
59
+ };
60
+ expect(() => assembleProjectConnections([conn])).toThrow(
61
+ "Databricks connection configuration is missing.",
62
+ );
63
+ });
64
+
65
+ it("rejects connections with a missing host", () => {
66
+ const conn: ApiConnection = {
67
+ ...validBase,
68
+ databricksConnection: {
69
+ ...validBase.databricksConnection!,
70
+ host: undefined,
71
+ },
72
+ };
73
+ expect(() => assembleProjectConnections([conn])).toThrow(
74
+ "Databricks host is required",
75
+ );
76
+ });
77
+
78
+ it("rejects connections with a missing path", () => {
79
+ const conn: ApiConnection = {
80
+ ...validBase,
81
+ databricksConnection: {
82
+ ...validBase.databricksConnection!,
83
+ path: undefined,
84
+ },
85
+ };
86
+ expect(() => assembleProjectConnections([conn])).toThrow(
87
+ "Databricks SQL warehouse HTTP path is required",
88
+ );
89
+ });
90
+
91
+ it("rejects when defaultCatalog is missing", () => {
92
+ const conn: ApiConnection = {
93
+ name: "dbx",
94
+ type: "databricks",
95
+ databricksConnection: {
96
+ host: "dbc.cloud.databricks.com",
97
+ path: "/sql/1.0/warehouses/abc",
98
+ token: "dapiXXXX",
99
+ // defaultCatalog deliberately omitted
100
+ },
101
+ };
102
+ expect(() => assembleProjectConnections([conn])).toThrow(
103
+ "Databricks default catalog is required",
104
+ );
105
+ });
106
+
107
+ it("rejects when neither token nor full OAuth credentials are provided", () => {
108
+ const conn: ApiConnection = {
109
+ name: "dbx",
110
+ type: "databricks",
111
+ databricksConnection: {
112
+ host: "dbc.cloud.databricks.com",
113
+ path: "/sql/1.0/warehouses/abc",
114
+ // Only oauthClientId, missing secret → rejected.
115
+ oauthClientId: "client-id",
116
+ defaultCatalog: "main",
117
+ },
118
+ };
119
+ expect(() => assembleProjectConnections([conn])).toThrow(
120
+ "Databricks requires",
121
+ );
122
+ });
123
+ });
@@ -0,0 +1,562 @@
1
+ import path from "path";
2
+ import { components } from "../api";
3
+
4
+ type ApiConnection = components["schemas"]["Connection"];
5
+ type AttachedDatabase = components["schemas"]["AttachedDatabase"];
6
+
7
+ export type CoreConnectionEntry = {
8
+ is: string;
9
+ [key: string]: unknown;
10
+ };
11
+
12
+ export type CoreConnectionsPojo = {
13
+ connections: Record<string, CoreConnectionEntry>;
14
+ };
15
+
16
+ export type ProjectConnectionMetadata = {
17
+ apiConnection: ApiConnection;
18
+ attachedDatabases: AttachedDatabase[];
19
+ hasAzureAttachment: boolean;
20
+ hasSnowflakePrivateKey: boolean;
21
+ isDuckLake: boolean;
22
+ databasePath?: string;
23
+ workingDirectory: string;
24
+ };
25
+
26
+ export type AssembledProjectConnections = {
27
+ pojo: CoreConnectionsPojo;
28
+ metadata: Map<string, ProjectConnectionMetadata>;
29
+ apiConnections: ApiConnection[];
30
+ };
31
+
32
+ const PUBLISHER_DUCKDB_API_FIELDS = new Set<string>(["attachedDatabases"]);
33
+
34
+ export function normalizeSnowflakePrivateKey(privateKey: string): string {
35
+ let privateKeyContent = privateKey.trim();
36
+
37
+ if (!privateKeyContent.includes("\n")) {
38
+ const keyPatterns = [
39
+ {
40
+ beginRegex: /-----BEGIN\s+ENCRYPTED\s+PRIVATE\s+KEY-----/i,
41
+ endRegex: /-----END\s+ENCRYPTED\s+PRIVATE\s+KEY-----/i,
42
+ beginMarker: "-----BEGIN ENCRYPTED PRIVATE KEY-----",
43
+ endMarker: "-----END ENCRYPTED PRIVATE KEY-----",
44
+ },
45
+ {
46
+ beginRegex: /-----BEGIN\s+PRIVATE\s+KEY-----/i,
47
+ endRegex: /-----END\s+PRIVATE\s+KEY-----/i,
48
+ beginMarker: "-----BEGIN PRIVATE KEY-----",
49
+ endMarker: "-----END PRIVATE KEY-----",
50
+ },
51
+ ];
52
+
53
+ for (const pattern of keyPatterns) {
54
+ const beginMatch = privateKeyContent.match(pattern.beginRegex);
55
+ const endMatch = privateKeyContent.match(pattern.endRegex);
56
+
57
+ if (beginMatch && endMatch) {
58
+ const beginPos = beginMatch.index! + beginMatch[0].length;
59
+ const endPos = endMatch.index!;
60
+ const keyData = privateKeyContent
61
+ .substring(beginPos, endPos)
62
+ .replace(/\s+/g, "");
63
+
64
+ const lines: string[] = [];
65
+ for (let i = 0; i < keyData.length; i += 64) {
66
+ lines.push(keyData.slice(i, i + 64));
67
+ }
68
+ privateKeyContent = `${pattern.beginMarker}\n${lines.join("\n")}\n${pattern.endMarker}\n`;
69
+ break;
70
+ }
71
+ }
72
+ } else if (!privateKeyContent.endsWith("\n")) {
73
+ privateKeyContent += "\n";
74
+ }
75
+
76
+ return privateKeyContent;
77
+ }
78
+
79
+ // NOTE: This narrows the project-author API surface (it rejects securityPolicy,
80
+ // allowedDirectories, setupSQL, etc.). It is NOT a filesystem isolation
81
+ // boundary: attachedDatabases[].path is not normalized or constrained to stay
82
+ // under the project root, and DuckDB's local-file access is unchanged.
83
+ // Adversarial filesystem isolation is explicit non-goal of the MalloyConfig
84
+ // adoption — see PR #682 release notes ("DuckDB hardening knobs are not
85
+ // exposed", "no adversarial DuckDB filesystem isolation"). Future work owns
86
+ // any path-traversal/allowlist enforcement.
87
+ export function validateDuckdbApiSurface(connection: ApiConnection): void {
88
+ if (connection.type !== "duckdb" || !connection.duckdbConnection) return;
89
+
90
+ const unsupportedFields = Object.keys(connection.duckdbConnection).filter(
91
+ (field) =>
92
+ !PUBLISHER_DUCKDB_API_FIELDS.has(field) &&
93
+ (connection.duckdbConnection as Record<string, unknown>)[field] !==
94
+ undefined,
95
+ );
96
+
97
+ if (unsupportedFields.length > 0) {
98
+ throw new Error(
99
+ `Unsupported DuckDB connection field(s): ${unsupportedFields.join(
100
+ ", ",
101
+ )}. Publisher only supports attachedDatabases for project-authored DuckDB connections.`,
102
+ );
103
+ }
104
+ }
105
+
106
+ function cloneApiConnection(connection: ApiConnection): ApiConnection {
107
+ return { ...connection };
108
+ }
109
+
110
+ function getStaticConnectionAttributes(
111
+ type: ApiConnection["type"],
112
+ ): components["schemas"]["ConnectionAttributes"] | undefined {
113
+ switch (type) {
114
+ case "postgres":
115
+ return {
116
+ dialectName: "postgres",
117
+ isPool: false,
118
+ canPersist: true,
119
+ canStream: true,
120
+ };
121
+ case "bigquery":
122
+ return {
123
+ dialectName: "standardsql",
124
+ isPool: false,
125
+ canPersist: true,
126
+ canStream: true,
127
+ };
128
+ case "snowflake":
129
+ return {
130
+ dialectName: "snowflake",
131
+ isPool: true,
132
+ canPersist: true,
133
+ canStream: true,
134
+ };
135
+ case "trino":
136
+ return {
137
+ dialectName: "trino",
138
+ isPool: false,
139
+ canPersist: true,
140
+ canStream: false,
141
+ };
142
+ case "databricks":
143
+ return {
144
+ dialectName: "databricks",
145
+ isPool: false,
146
+ canPersist: true,
147
+ canStream: false,
148
+ };
149
+ case "mysql":
150
+ return {
151
+ dialectName: "mysql",
152
+ isPool: false,
153
+ canPersist: true,
154
+ canStream: false,
155
+ };
156
+ case "duckdb":
157
+ case "motherduck":
158
+ case "ducklake":
159
+ return {
160
+ dialectName: "duckdb",
161
+ isPool: false,
162
+ canPersist: true,
163
+ canStream: true,
164
+ };
165
+ default:
166
+ return undefined;
167
+ }
168
+ }
169
+
170
+ type ServiceAccountKey = {
171
+ type?: string;
172
+ project_id?: string;
173
+ private_key?: string;
174
+ client_email?: string;
175
+ [key: string]: unknown;
176
+ };
177
+
178
+ function parseServiceAccountKey(json?: string): ServiceAccountKey | undefined {
179
+ if (!json) return undefined;
180
+ const keyData = JSON.parse(json) as ServiceAccountKey;
181
+ const requiredFields = ["type", "project_id", "private_key", "client_email"];
182
+ for (const field of requiredFields) {
183
+ if (!keyData[field]) {
184
+ throw new Error(
185
+ `Invalid service account key: missing "${field}" field`,
186
+ );
187
+ }
188
+ }
189
+ if (keyData.type !== "service_account") {
190
+ throw new Error('Invalid service account key: incorrect "type" field');
191
+ }
192
+ return keyData;
193
+ }
194
+
195
+ function buildPostgresConnectionString(
196
+ config: components["schemas"]["PostgresConnection"],
197
+ ): string | undefined {
198
+ if (config.connectionString || !process.env.PGSSLMODE) {
199
+ return config.connectionString;
200
+ }
201
+
202
+ const params = new URLSearchParams();
203
+ params.set("sslmode", process.env.PGSSLMODE);
204
+ const auth =
205
+ config.userName && config.password
206
+ ? `${encodeURIComponent(config.userName)}:${encodeURIComponent(
207
+ config.password,
208
+ )}@`
209
+ : config.userName
210
+ ? `${encodeURIComponent(config.userName)}@`
211
+ : "";
212
+ const host = config.host ?? "localhost";
213
+ const port = config.port ? `:${config.port}` : "";
214
+ const database = config.databaseName
215
+ ? `/${encodeURIComponent(config.databaseName)}`
216
+ : "";
217
+ return `postgresql://${auth}${host}${port}${database}?${params.toString()}`;
218
+ }
219
+
220
+ function buildDuckdbEntry(
221
+ name: string,
222
+ projectPath: string,
223
+ databaseFilename = `${name}.duckdb`,
224
+ ): CoreConnectionEntry {
225
+ return {
226
+ is: "duckdb",
227
+ databasePath: path.join(projectPath, databaseFilename),
228
+ };
229
+ }
230
+
231
+ function validateConnectionShape(connection: ApiConnection): void {
232
+ switch (connection.type) {
233
+ case "postgres":
234
+ case "mysql":
235
+ case "bigquery":
236
+ break;
237
+ case "duckdb":
238
+ if (!connection.duckdbConnection) {
239
+ throw new Error("DuckDB connection configuration is missing.");
240
+ }
241
+ break;
242
+ case "motherduck":
243
+ if (!connection.motherduckConnection) {
244
+ throw new Error("MotherDuck connection configuration is missing.");
245
+ }
246
+ if (!connection.motherduckConnection.accessToken) {
247
+ throw new Error("MotherDuck access token is required.");
248
+ }
249
+ break;
250
+ case "trino":
251
+ if (!connection.trinoConnection) {
252
+ throw new Error("Trino connection configuration is missing.");
253
+ }
254
+ break;
255
+ case "databricks": {
256
+ const databricks = connection.databricksConnection;
257
+ if (!databricks) {
258
+ throw new Error("Databricks connection configuration is missing.");
259
+ }
260
+ if (!databricks.host) {
261
+ throw new Error("Databricks host is required.");
262
+ }
263
+ if (!databricks.path) {
264
+ throw new Error("Databricks SQL warehouse HTTP path is required.");
265
+ }
266
+ const hasToken = !!databricks.token;
267
+ const hasOAuth =
268
+ !!databricks.oauthClientId && !!databricks.oauthClientSecret;
269
+ if (!hasToken && !hasOAuth) {
270
+ throw new Error(
271
+ "Databricks requires either a personal access token or OAuth M2M client ID and secret.",
272
+ );
273
+ }
274
+ const hasDefaultCatalog = !!databricks.defaultCatalog;
275
+ if (!hasDefaultCatalog) {
276
+ throw new Error("Databricks default catalog is required.");
277
+ }
278
+ break;
279
+ }
280
+ case "snowflake": {
281
+ const snowflakeConnection = connection.snowflakeConnection;
282
+ if (!snowflakeConnection) {
283
+ throw new Error("Snowflake connection configuration is missing.");
284
+ }
285
+ if (!snowflakeConnection.account) {
286
+ throw new Error("Snowflake account is required.");
287
+ }
288
+ if (!snowflakeConnection.username) {
289
+ throw new Error("Snowflake username is required.");
290
+ }
291
+ if (!snowflakeConnection.password && !snowflakeConnection.privateKey) {
292
+ throw new Error(
293
+ "Snowflake password or private key or private key path is required.",
294
+ );
295
+ }
296
+ if (!snowflakeConnection.warehouse) {
297
+ throw new Error("Snowflake warehouse is required.");
298
+ }
299
+ break;
300
+ }
301
+ }
302
+ }
303
+
304
+ export function assembleProjectConnections(
305
+ connections: ApiConnection[] = [],
306
+ projectPath = "",
307
+ ): AssembledProjectConnections {
308
+ const pojo: CoreConnectionsPojo = { connections: {} };
309
+ const metadata = new Map<string, ProjectConnectionMetadata>();
310
+ const apiConnections: ApiConnection[] = [];
311
+ const processedConnections = new Set<string>();
312
+
313
+ for (const connection of connections) {
314
+ if (!connection.name) {
315
+ throw new Error("Invalid connection configuration. No name.");
316
+ }
317
+
318
+ if (processedConnections.has(connection.name)) {
319
+ continue;
320
+ }
321
+
322
+ if (connection.name === "duckdb") {
323
+ throw new Error(
324
+ "DuckDB connection name cannot be 'duckdb'; it is reserved for Publisher package sandboxes.",
325
+ );
326
+ }
327
+
328
+ processedConnections.add(connection.name);
329
+ validateDuckdbApiSurface(connection);
330
+ validateConnectionShape(connection);
331
+
332
+ const apiConnection = cloneApiConnection(connection);
333
+ apiConnection.attributes = getStaticConnectionAttributes(connection.type);
334
+ const attachedDatabases =
335
+ connection.duckdbConnection?.attachedDatabases ?? [];
336
+ const isDuckLake = connection.type === "ducklake";
337
+ const isDuckdb = connection.type === "duckdb";
338
+ const databasePath = isDuckLake
339
+ ? path.join(projectPath, `${connection.name}_ducklake.duckdb`)
340
+ : isDuckdb
341
+ ? path.join(projectPath, `${connection.name}.duckdb`)
342
+ : undefined;
343
+
344
+ metadata.set(connection.name, {
345
+ apiConnection,
346
+ attachedDatabases,
347
+ hasAzureAttachment: attachedDatabases.some(
348
+ (database) => database.type === "azure",
349
+ ),
350
+ hasSnowflakePrivateKey:
351
+ connection.type === "snowflake" &&
352
+ !!connection.snowflakeConnection?.privateKey,
353
+ isDuckLake,
354
+ databasePath,
355
+ workingDirectory: projectPath,
356
+ });
357
+
358
+ switch (connection.type) {
359
+ case "postgres": {
360
+ const postgresConnection = connection.postgresConnection;
361
+ pojo.connections[connection.name] = {
362
+ is: "postgres",
363
+ host: postgresConnection?.host,
364
+ port: postgresConnection?.port,
365
+ username: postgresConnection?.userName,
366
+ password: postgresConnection?.password,
367
+ databaseName: postgresConnection?.databaseName,
368
+ connectionString: postgresConnection
369
+ ? buildPostgresConnectionString(postgresConnection)
370
+ : undefined,
371
+ };
372
+ break;
373
+ }
374
+
375
+ case "mysql": {
376
+ pojo.connections[connection.name] = {
377
+ is: "mysql",
378
+ host: connection.mysqlConnection?.host,
379
+ port: connection.mysqlConnection?.port,
380
+ user: connection.mysqlConnection?.user,
381
+ password: connection.mysqlConnection?.password,
382
+ database: connection.mysqlConnection?.database,
383
+ };
384
+ break;
385
+ }
386
+
387
+ case "bigquery": {
388
+ const serviceAccountKey = parseServiceAccountKey(
389
+ connection.bigqueryConnection?.serviceAccountKeyJson as
390
+ | string
391
+ | undefined,
392
+ );
393
+ pojo.connections[connection.name] = {
394
+ is: "bigquery",
395
+ projectId:
396
+ connection.bigqueryConnection?.defaultProjectId ??
397
+ serviceAccountKey?.project_id,
398
+ serviceAccountKey,
399
+ location: connection.bigqueryConnection?.location,
400
+ maximumBytesBilled:
401
+ connection.bigqueryConnection?.maximumBytesBilled,
402
+ timeoutMs:
403
+ connection.bigqueryConnection?.queryTimeoutMilliseconds,
404
+ billingProjectId:
405
+ connection.bigqueryConnection?.billingProjectId,
406
+ };
407
+ break;
408
+ }
409
+
410
+ case "snowflake": {
411
+ pojo.connections[connection.name] = {
412
+ is: "snowflake",
413
+ account: connection.snowflakeConnection?.account,
414
+ username: connection.snowflakeConnection?.username,
415
+ password: connection.snowflakeConnection?.password,
416
+ privateKey: connection.snowflakeConnection?.privateKey
417
+ ? normalizeSnowflakePrivateKey(
418
+ connection.snowflakeConnection.privateKey,
419
+ )
420
+ : undefined,
421
+ privateKeyPass: connection.snowflakeConnection?.privateKeyPass,
422
+ warehouse: connection.snowflakeConnection?.warehouse,
423
+ database: connection.snowflakeConnection?.database,
424
+ schema: connection.snowflakeConnection?.schema,
425
+ role: connection.snowflakeConnection?.role,
426
+ timeoutMs:
427
+ connection.snowflakeConnection?.responseTimeoutMilliseconds,
428
+ // Pool sizing is server-owned policy (matches the values
429
+ // main's deleted switch passed pre-MalloyConfig adoption).
430
+ // Not exposed through the public API.
431
+ poolMin: 1,
432
+ poolMax: 20,
433
+ };
434
+ break;
435
+ }
436
+
437
+ case "trino": {
438
+ pojo.connections[connection.name] = {
439
+ is: "trino",
440
+ ...validateAndBuildTrinoCoreConfig(connection.trinoConnection),
441
+ };
442
+ break;
443
+ }
444
+
445
+ case "databricks": {
446
+ const databricks = connection.databricksConnection;
447
+ pojo.connections[connection.name] = {
448
+ is: "databricks",
449
+ host: databricks?.host,
450
+ path: databricks?.path,
451
+ token: databricks?.token,
452
+ oauthClientId: databricks?.oauthClientId,
453
+ oauthClientSecret: databricks?.oauthClientSecret,
454
+ defaultCatalog: databricks?.defaultCatalog,
455
+ defaultSchema: databricks?.defaultSchema,
456
+ setupSQL: databricks?.setupSQL,
457
+ };
458
+ break;
459
+ }
460
+
461
+ case "duckdb": {
462
+ if (
463
+ attachedDatabases.some(
464
+ (database) => database.name === connection.name,
465
+ )
466
+ ) {
467
+ throw new Error(
468
+ `DuckDB attached database names cannot conflict with connection name ${connection.name}`,
469
+ );
470
+ }
471
+ pojo.connections[connection.name] = buildDuckdbEntry(
472
+ connection.name,
473
+ projectPath,
474
+ `${connection.name}.duckdb`,
475
+ );
476
+ break;
477
+ }
478
+
479
+ case "motherduck": {
480
+ if (!connection.motherduckConnection?.accessToken) {
481
+ throw new Error("MotherDuck access token is required.");
482
+ }
483
+
484
+ pojo.connections[connection.name] = {
485
+ is: "duckdb",
486
+ databasePath: connection.motherduckConnection.database
487
+ ? `md:${connection.motherduckConnection.database}?attach_mode=single`
488
+ : "md:",
489
+ motherDuckToken: connection.motherduckConnection.accessToken,
490
+ };
491
+ break;
492
+ }
493
+
494
+ case "ducklake": {
495
+ if (!connection.ducklakeConnection) {
496
+ throw new Error("DuckLake connection configuration is missing.");
497
+ }
498
+ if (!connection.ducklakeConnection.catalog?.postgresConnection) {
499
+ throw new Error(
500
+ `PostgreSQL connection configuration is required for DuckLake catalog: ${connection.name}`,
501
+ );
502
+ }
503
+ pojo.connections[connection.name] = buildDuckdbEntry(
504
+ connection.name,
505
+ projectPath,
506
+ `${connection.name}_ducklake.duckdb`,
507
+ );
508
+ break;
509
+ }
510
+
511
+ default: {
512
+ throw new Error(`Unsupported connection type: ${connection.type}`);
513
+ }
514
+ }
515
+
516
+ apiConnections.push(apiConnection);
517
+ }
518
+
519
+ return { pojo, metadata, apiConnections };
520
+ }
521
+
522
+ function validateAndBuildTrinoCoreConfig(
523
+ trinoConfig: components["schemas"]["TrinoConnection"] | undefined,
524
+ ): Record<string, unknown> {
525
+ if (!trinoConfig) {
526
+ return {};
527
+ }
528
+
529
+ const server =
530
+ trinoConfig.server && trinoConfig.port
531
+ ? trinoConfig.server.includes(trinoConfig.port.toString())
532
+ ? trinoConfig.server
533
+ : `${trinoConfig.server}:${trinoConfig.port}`
534
+ : trinoConfig.server;
535
+
536
+ const baseConfig: Record<string, unknown> = {
537
+ server,
538
+ port: trinoConfig.port,
539
+ catalog: trinoConfig.catalog,
540
+ schema: trinoConfig.schema,
541
+ user: trinoConfig.user,
542
+ };
543
+
544
+ if (trinoConfig.peakaKey) {
545
+ baseConfig.extraCredential = {
546
+ peakaKey: trinoConfig.peakaKey,
547
+ };
548
+ return baseConfig;
549
+ }
550
+
551
+ if (server?.startsWith("https://") && trinoConfig.password) {
552
+ baseConfig.password = trinoConfig.password;
553
+ }
554
+
555
+ if (server?.startsWith("http://") || server?.startsWith("https://")) {
556
+ return baseConfig;
557
+ }
558
+
559
+ throw new Error(
560
+ `Invalid Trino connection: expected "http://server:port" or "https://server:port".`,
561
+ );
562
+ }