@malloy-publisher/server 0.0.192 → 0.0.193

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/dist/app/api-doc.yaml +522 -1
  2. package/dist/app/assets/{HomePage-H1OH-VW5.js → HomePage-Di9MU3lS.js} +1 -1
  3. package/dist/app/assets/{MainPage-GL06aMke.js → MainPage-yZQo2HSL.js} +1 -1
  4. package/dist/app/assets/{ModelPage-Crau5hgZ.js → ModelPage-Dx2mHWeT.js} +1 -1
  5. package/dist/app/assets/{PackagePage-CbubRhgE.js → PackagePage-Q386Py9t.js} +1 -1
  6. package/dist/app/assets/{ProjectPage-DUlJkYJ4.js → ProjectPage-WR7wPQB-.js} +1 -1
  7. package/dist/app/assets/{RouteError-DrNXNihc.js → RouteError-stRGU4aW.js} +1 -1
  8. package/dist/app/assets/{WorkbookPage-CBBv7n5U.js → WorkbookPage-D3iX0djH.js} +1 -1
  9. package/dist/app/assets/{core-Dzx75uJR.es-DwnFZnyO.js → core-QH4HZQVz.es-CqlQLZdl.js} +1 -1
  10. package/dist/app/assets/{index-d5rvmoZ7.js → index-CVHzPJwN.js} +119 -119
  11. package/dist/app/assets/{index-CzjyS9cx.js → index-DavAceYD.js} +50 -50
  12. package/dist/app/assets/{index-HHdhLUpv.js → index-Y3Y-VRna.js} +1 -1
  13. package/dist/app/assets/{index.umd-CetYIBQY.js → index.umd-Bp8OIhfV.js} +46 -46
  14. package/dist/app/index.html +1 -1
  15. package/dist/server.mjs +1389 -984
  16. package/package.json +10 -10
  17. package/src/controller/connection.controller.ts +102 -27
  18. package/src/dto/connection.dto.spec.ts +4 -0
  19. package/src/dto/connection.dto.ts +46 -2
  20. package/src/server.ts +201 -2
  21. package/src/service/connection.spec.ts +250 -4
  22. package/src/service/connection.ts +326 -473
  23. package/src/service/connection_config.ts +514 -0
  24. package/src/service/connection_service.spec.ts +50 -0
  25. package/src/service/connection_service.ts +125 -32
  26. package/src/service/materialization_service.spec.ts +18 -12
  27. package/src/service/materialization_service.ts +54 -7
  28. package/src/service/model.ts +24 -27
  29. package/src/service/package.spec.ts +125 -1
  30. package/src/service/package.ts +86 -44
  31. package/src/service/project.ts +172 -94
  32. package/src/service/project_store.spec.ts +72 -0
  33. package/src/service/project_store.ts +98 -81
  34. package/tests/unit/duckdb/attached_databases.test.ts +1 -19
@@ -0,0 +1,514 @@
1
+ import path from "path";
2
+ import { components } from "../api";
3
+
4
+ type ApiConnection = components["schemas"]["Connection"];
5
+ type AttachedDatabase = components["schemas"]["AttachedDatabase"];
6
+
7
+ export type CoreConnectionEntry = {
8
+ is: string;
9
+ [key: string]: unknown;
10
+ };
11
+
12
+ export type CoreConnectionsPojo = {
13
+ connections: Record<string, CoreConnectionEntry>;
14
+ };
15
+
16
+ export type ProjectConnectionMetadata = {
17
+ apiConnection: ApiConnection;
18
+ attachedDatabases: AttachedDatabase[];
19
+ hasAzureAttachment: boolean;
20
+ hasSnowflakePrivateKey: boolean;
21
+ isDuckLake: boolean;
22
+ databasePath?: string;
23
+ workingDirectory: string;
24
+ };
25
+
26
+ export type AssembledProjectConnections = {
27
+ pojo: CoreConnectionsPojo;
28
+ metadata: Map<string, ProjectConnectionMetadata>;
29
+ apiConnections: ApiConnection[];
30
+ };
31
+
32
+ const PUBLISHER_DUCKDB_API_FIELDS = new Set<string>(["attachedDatabases"]);
33
+
34
+ export function normalizeSnowflakePrivateKey(privateKey: string): string {
35
+ let privateKeyContent = privateKey.trim();
36
+
37
+ if (!privateKeyContent.includes("\n")) {
38
+ const keyPatterns = [
39
+ {
40
+ beginRegex: /-----BEGIN\s+ENCRYPTED\s+PRIVATE\s+KEY-----/i,
41
+ endRegex: /-----END\s+ENCRYPTED\s+PRIVATE\s+KEY-----/i,
42
+ beginMarker: "-----BEGIN ENCRYPTED PRIVATE KEY-----",
43
+ endMarker: "-----END ENCRYPTED PRIVATE KEY-----",
44
+ },
45
+ {
46
+ beginRegex: /-----BEGIN\s+PRIVATE\s+KEY-----/i,
47
+ endRegex: /-----END\s+PRIVATE\s+KEY-----/i,
48
+ beginMarker: "-----BEGIN PRIVATE KEY-----",
49
+ endMarker: "-----END PRIVATE KEY-----",
50
+ },
51
+ ];
52
+
53
+ for (const pattern of keyPatterns) {
54
+ const beginMatch = privateKeyContent.match(pattern.beginRegex);
55
+ const endMatch = privateKeyContent.match(pattern.endRegex);
56
+
57
+ if (beginMatch && endMatch) {
58
+ const beginPos = beginMatch.index! + beginMatch[0].length;
59
+ const endPos = endMatch.index!;
60
+ const keyData = privateKeyContent
61
+ .substring(beginPos, endPos)
62
+ .replace(/\s+/g, "");
63
+
64
+ const lines: string[] = [];
65
+ for (let i = 0; i < keyData.length; i += 64) {
66
+ lines.push(keyData.slice(i, i + 64));
67
+ }
68
+ privateKeyContent = `${pattern.beginMarker}\n${lines.join("\n")}\n${pattern.endMarker}\n`;
69
+ break;
70
+ }
71
+ }
72
+ } else if (!privateKeyContent.endsWith("\n")) {
73
+ privateKeyContent += "\n";
74
+ }
75
+
76
+ return privateKeyContent;
77
+ }
78
+
79
+ // NOTE: This narrows the project-author API surface (it rejects securityPolicy,
80
+ // allowedDirectories, setupSQL, etc.). It is NOT a filesystem isolation
81
+ // boundary: attachedDatabases[].path is not normalized or constrained to stay
82
+ // under the project root, and DuckDB's local-file access is unchanged.
83
+ // Adversarial filesystem isolation is explicit non-goal of the MalloyConfig
84
+ // adoption — see PR #682 release notes ("DuckDB hardening knobs are not
85
+ // exposed", "no adversarial DuckDB filesystem isolation"). Future work owns
86
+ // any path-traversal/allowlist enforcement.
87
+ export function validateDuckdbApiSurface(connection: ApiConnection): void {
88
+ if (connection.type !== "duckdb" || !connection.duckdbConnection) return;
89
+
90
+ const unsupportedFields = Object.keys(connection.duckdbConnection).filter(
91
+ (field) =>
92
+ !PUBLISHER_DUCKDB_API_FIELDS.has(field) &&
93
+ (connection.duckdbConnection as Record<string, unknown>)[field] !==
94
+ undefined,
95
+ );
96
+
97
+ if (unsupportedFields.length > 0) {
98
+ throw new Error(
99
+ `Unsupported DuckDB connection field(s): ${unsupportedFields.join(
100
+ ", ",
101
+ )}. Publisher only supports attachedDatabases for project-authored DuckDB connections.`,
102
+ );
103
+ }
104
+ }
105
+
106
+ function cloneApiConnection(connection: ApiConnection): ApiConnection {
107
+ return { ...connection };
108
+ }
109
+
110
+ function getStaticConnectionAttributes(
111
+ type: ApiConnection["type"],
112
+ ): components["schemas"]["ConnectionAttributes"] | undefined {
113
+ switch (type) {
114
+ case "postgres":
115
+ return {
116
+ dialectName: "postgres",
117
+ isPool: false,
118
+ canPersist: true,
119
+ canStream: true,
120
+ };
121
+ case "bigquery":
122
+ return {
123
+ dialectName: "standardsql",
124
+ isPool: false,
125
+ canPersist: true,
126
+ canStream: true,
127
+ };
128
+ case "snowflake":
129
+ return {
130
+ dialectName: "snowflake",
131
+ isPool: true,
132
+ canPersist: true,
133
+ canStream: true,
134
+ };
135
+ case "trino":
136
+ return {
137
+ dialectName: "trino",
138
+ isPool: false,
139
+ canPersist: true,
140
+ canStream: false,
141
+ };
142
+ case "mysql":
143
+ return {
144
+ dialectName: "mysql",
145
+ isPool: false,
146
+ canPersist: true,
147
+ canStream: false,
148
+ };
149
+ case "duckdb":
150
+ case "motherduck":
151
+ case "ducklake":
152
+ return {
153
+ dialectName: "duckdb",
154
+ isPool: false,
155
+ canPersist: true,
156
+ canStream: true,
157
+ };
158
+ default:
159
+ return undefined;
160
+ }
161
+ }
162
+
163
+ type ServiceAccountKey = {
164
+ type?: string;
165
+ project_id?: string;
166
+ private_key?: string;
167
+ client_email?: string;
168
+ [key: string]: unknown;
169
+ };
170
+
171
+ function parseServiceAccountKey(json?: string): ServiceAccountKey | undefined {
172
+ if (!json) return undefined;
173
+ const keyData = JSON.parse(json) as ServiceAccountKey;
174
+ const requiredFields = ["type", "project_id", "private_key", "client_email"];
175
+ for (const field of requiredFields) {
176
+ if (!keyData[field]) {
177
+ throw new Error(
178
+ `Invalid service account key: missing "${field}" field`,
179
+ );
180
+ }
181
+ }
182
+ if (keyData.type !== "service_account") {
183
+ throw new Error('Invalid service account key: incorrect "type" field');
184
+ }
185
+ return keyData;
186
+ }
187
+
188
+ function buildPostgresConnectionString(
189
+ config: components["schemas"]["PostgresConnection"],
190
+ ): string | undefined {
191
+ if (config.connectionString || !process.env.PGSSLMODE) {
192
+ return config.connectionString;
193
+ }
194
+
195
+ const params = new URLSearchParams();
196
+ params.set("sslmode", process.env.PGSSLMODE);
197
+ const auth =
198
+ config.userName && config.password
199
+ ? `${encodeURIComponent(config.userName)}:${encodeURIComponent(
200
+ config.password,
201
+ )}@`
202
+ : config.userName
203
+ ? `${encodeURIComponent(config.userName)}@`
204
+ : "";
205
+ const host = config.host ?? "localhost";
206
+ const port = config.port ? `:${config.port}` : "";
207
+ const database = config.databaseName
208
+ ? `/${encodeURIComponent(config.databaseName)}`
209
+ : "";
210
+ return `postgresql://${auth}${host}${port}${database}?${params.toString()}`;
211
+ }
212
+
213
+ function buildDuckdbEntry(
214
+ name: string,
215
+ projectPath: string,
216
+ databaseFilename = `${name}.duckdb`,
217
+ ): CoreConnectionEntry {
218
+ return {
219
+ is: "duckdb",
220
+ databasePath: path.join(projectPath, databaseFilename),
221
+ };
222
+ }
223
+
224
+ function validateConnectionShape(connection: ApiConnection): void {
225
+ switch (connection.type) {
226
+ case "postgres":
227
+ case "mysql":
228
+ case "bigquery":
229
+ break;
230
+ case "duckdb":
231
+ if (!connection.duckdbConnection) {
232
+ throw new Error("DuckDB connection configuration is missing.");
233
+ }
234
+ break;
235
+ case "motherduck":
236
+ if (!connection.motherduckConnection) {
237
+ throw new Error("MotherDuck connection configuration is missing.");
238
+ }
239
+ if (!connection.motherduckConnection.accessToken) {
240
+ throw new Error("MotherDuck access token is required.");
241
+ }
242
+ break;
243
+ case "trino":
244
+ if (!connection.trinoConnection) {
245
+ throw new Error("Trino connection configuration is missing.");
246
+ }
247
+ break;
248
+ case "snowflake": {
249
+ const snowflakeConnection = connection.snowflakeConnection;
250
+ if (!snowflakeConnection) {
251
+ throw new Error("Snowflake connection configuration is missing.");
252
+ }
253
+ if (!snowflakeConnection.account) {
254
+ throw new Error("Snowflake account is required.");
255
+ }
256
+ if (!snowflakeConnection.username) {
257
+ throw new Error("Snowflake username is required.");
258
+ }
259
+ if (!snowflakeConnection.password && !snowflakeConnection.privateKey) {
260
+ throw new Error(
261
+ "Snowflake password or private key or private key path is required.",
262
+ );
263
+ }
264
+ if (!snowflakeConnection.warehouse) {
265
+ throw new Error("Snowflake warehouse is required.");
266
+ }
267
+ break;
268
+ }
269
+ }
270
+ }
271
+
272
+ export function assembleProjectConnections(
273
+ connections: ApiConnection[] = [],
274
+ projectPath = "",
275
+ ): AssembledProjectConnections {
276
+ const pojo: CoreConnectionsPojo = { connections: {} };
277
+ const metadata = new Map<string, ProjectConnectionMetadata>();
278
+ const apiConnections: ApiConnection[] = [];
279
+ const processedConnections = new Set<string>();
280
+
281
+ for (const connection of connections) {
282
+ if (!connection.name) {
283
+ throw new Error("Invalid connection configuration. No name.");
284
+ }
285
+
286
+ if (processedConnections.has(connection.name)) {
287
+ continue;
288
+ }
289
+
290
+ if (connection.name === "duckdb") {
291
+ throw new Error(
292
+ "DuckDB connection name cannot be 'duckdb'; it is reserved for Publisher package sandboxes.",
293
+ );
294
+ }
295
+
296
+ processedConnections.add(connection.name);
297
+ validateDuckdbApiSurface(connection);
298
+ validateConnectionShape(connection);
299
+
300
+ const apiConnection = cloneApiConnection(connection);
301
+ apiConnection.attributes = getStaticConnectionAttributes(connection.type);
302
+ const attachedDatabases =
303
+ connection.duckdbConnection?.attachedDatabases ?? [];
304
+ const isDuckLake = connection.type === "ducklake";
305
+ const isDuckdb = connection.type === "duckdb";
306
+ const databasePath = isDuckLake
307
+ ? path.join(projectPath, `${connection.name}_ducklake.duckdb`)
308
+ : isDuckdb
309
+ ? path.join(projectPath, `${connection.name}.duckdb`)
310
+ : undefined;
311
+
312
+ metadata.set(connection.name, {
313
+ apiConnection,
314
+ attachedDatabases,
315
+ hasAzureAttachment: attachedDatabases.some(
316
+ (database) => database.type === "azure",
317
+ ),
318
+ hasSnowflakePrivateKey:
319
+ connection.type === "snowflake" &&
320
+ !!connection.snowflakeConnection?.privateKey,
321
+ isDuckLake,
322
+ databasePath,
323
+ workingDirectory: projectPath,
324
+ });
325
+
326
+ switch (connection.type) {
327
+ case "postgres": {
328
+ const postgresConnection = connection.postgresConnection;
329
+ pojo.connections[connection.name] = {
330
+ is: "postgres",
331
+ host: postgresConnection?.host,
332
+ port: postgresConnection?.port,
333
+ username: postgresConnection?.userName,
334
+ password: postgresConnection?.password,
335
+ databaseName: postgresConnection?.databaseName,
336
+ connectionString: postgresConnection
337
+ ? buildPostgresConnectionString(postgresConnection)
338
+ : undefined,
339
+ };
340
+ break;
341
+ }
342
+
343
+ case "mysql": {
344
+ pojo.connections[connection.name] = {
345
+ is: "mysql",
346
+ host: connection.mysqlConnection?.host,
347
+ port: connection.mysqlConnection?.port,
348
+ user: connection.mysqlConnection?.user,
349
+ password: connection.mysqlConnection?.password,
350
+ database: connection.mysqlConnection?.database,
351
+ };
352
+ break;
353
+ }
354
+
355
+ case "bigquery": {
356
+ const serviceAccountKey = parseServiceAccountKey(
357
+ connection.bigqueryConnection?.serviceAccountKeyJson as
358
+ | string
359
+ | undefined,
360
+ );
361
+ pojo.connections[connection.name] = {
362
+ is: "bigquery",
363
+ projectId:
364
+ connection.bigqueryConnection?.defaultProjectId ??
365
+ serviceAccountKey?.project_id,
366
+ serviceAccountKey,
367
+ location: connection.bigqueryConnection?.location,
368
+ maximumBytesBilled:
369
+ connection.bigqueryConnection?.maximumBytesBilled,
370
+ timeoutMs:
371
+ connection.bigqueryConnection?.queryTimeoutMilliseconds,
372
+ billingProjectId:
373
+ connection.bigqueryConnection?.billingProjectId,
374
+ };
375
+ break;
376
+ }
377
+
378
+ case "snowflake": {
379
+ pojo.connections[connection.name] = {
380
+ is: "snowflake",
381
+ account: connection.snowflakeConnection?.account,
382
+ username: connection.snowflakeConnection?.username,
383
+ password: connection.snowflakeConnection?.password,
384
+ privateKey: connection.snowflakeConnection?.privateKey
385
+ ? normalizeSnowflakePrivateKey(
386
+ connection.snowflakeConnection.privateKey,
387
+ )
388
+ : undefined,
389
+ privateKeyPass: connection.snowflakeConnection?.privateKeyPass,
390
+ warehouse: connection.snowflakeConnection?.warehouse,
391
+ database: connection.snowflakeConnection?.database,
392
+ schema: connection.snowflakeConnection?.schema,
393
+ role: connection.snowflakeConnection?.role,
394
+ timeoutMs:
395
+ connection.snowflakeConnection?.responseTimeoutMilliseconds,
396
+ // Pool sizing is server-owned policy (matches the values
397
+ // main's deleted switch passed pre-MalloyConfig adoption).
398
+ // Not exposed through the public API.
399
+ poolMin: 1,
400
+ poolMax: 20,
401
+ };
402
+ break;
403
+ }
404
+
405
+ case "trino": {
406
+ pojo.connections[connection.name] = {
407
+ is: "trino",
408
+ ...validateAndBuildTrinoCoreConfig(connection.trinoConnection),
409
+ };
410
+ break;
411
+ }
412
+
413
+ case "duckdb": {
414
+ if (
415
+ attachedDatabases.some(
416
+ (database) => database.name === connection.name,
417
+ )
418
+ ) {
419
+ throw new Error(
420
+ `DuckDB attached database names cannot conflict with connection name ${connection.name}`,
421
+ );
422
+ }
423
+ pojo.connections[connection.name] = buildDuckdbEntry(
424
+ connection.name,
425
+ projectPath,
426
+ `${connection.name}.duckdb`,
427
+ );
428
+ break;
429
+ }
430
+
431
+ case "motherduck": {
432
+ if (!connection.motherduckConnection?.accessToken) {
433
+ throw new Error("MotherDuck access token is required.");
434
+ }
435
+
436
+ pojo.connections[connection.name] = {
437
+ is: "duckdb",
438
+ databasePath: connection.motherduckConnection.database
439
+ ? `md:${connection.motherduckConnection.database}?attach_mode=single`
440
+ : "md:",
441
+ motherDuckToken: connection.motherduckConnection.accessToken,
442
+ };
443
+ break;
444
+ }
445
+
446
+ case "ducklake": {
447
+ if (!connection.ducklakeConnection) {
448
+ throw new Error("DuckLake connection configuration is missing.");
449
+ }
450
+ if (!connection.ducklakeConnection.catalog?.postgresConnection) {
451
+ throw new Error(
452
+ `PostgreSQL connection configuration is required for DuckLake catalog: ${connection.name}`,
453
+ );
454
+ }
455
+ pojo.connections[connection.name] = buildDuckdbEntry(
456
+ connection.name,
457
+ projectPath,
458
+ `${connection.name}_ducklake.duckdb`,
459
+ );
460
+ break;
461
+ }
462
+
463
+ default: {
464
+ throw new Error(`Unsupported connection type: ${connection.type}`);
465
+ }
466
+ }
467
+
468
+ apiConnections.push(apiConnection);
469
+ }
470
+
471
+ return { pojo, metadata, apiConnections };
472
+ }
473
+
474
+ function validateAndBuildTrinoCoreConfig(
475
+ trinoConfig: components["schemas"]["TrinoConnection"] | undefined,
476
+ ): Record<string, unknown> {
477
+ if (!trinoConfig) {
478
+ return {};
479
+ }
480
+
481
+ const server =
482
+ trinoConfig.server && trinoConfig.port
483
+ ? trinoConfig.server.includes(trinoConfig.port.toString())
484
+ ? trinoConfig.server
485
+ : `${trinoConfig.server}:${trinoConfig.port}`
486
+ : trinoConfig.server;
487
+
488
+ const baseConfig: Record<string, unknown> = {
489
+ server,
490
+ port: trinoConfig.port,
491
+ catalog: trinoConfig.catalog,
492
+ schema: trinoConfig.schema,
493
+ user: trinoConfig.user,
494
+ };
495
+
496
+ if (trinoConfig.peakaKey) {
497
+ baseConfig.extraCredential = {
498
+ peakaKey: trinoConfig.peakaKey,
499
+ };
500
+ return baseConfig;
501
+ }
502
+
503
+ if (server?.startsWith("https://") && trinoConfig.password) {
504
+ baseConfig.password = trinoConfig.password;
505
+ }
506
+
507
+ if (server?.startsWith("http://") || server?.startsWith("https://")) {
508
+ return baseConfig;
509
+ }
510
+
511
+ throw new Error(
512
+ `Invalid Trino connection: expected "http://server:port" or "https://server:port".`,
513
+ );
514
+ }
@@ -611,6 +611,56 @@ describe("service/connection_service", () => {
611
611
  ).toBe(true);
612
612
  });
613
613
 
614
+ it("should defer DuckDB file cleanup until the previous config release callback", async () => {
615
+ const connectionName = "local-file";
616
+ const duckdbConnection: ApiConnection = {
617
+ name: connectionName,
618
+ type: "duckdb",
619
+ duckdbConnection: {
620
+ attachedDatabases: [],
621
+ },
622
+ };
623
+ const mockDbConnection = {
624
+ id: "conn-123",
625
+ name: duckdbConnection.name,
626
+ type: "duckdb",
627
+ config: duckdbConnection,
628
+ };
629
+ const mockProject = {
630
+ listApiConnections: sinon.stub().returns([duckdbConnection]),
631
+ getApiConnection: sinon.stub().returns(duckdbConnection),
632
+ updateConnections: sinon.stub(),
633
+ deleteDuckDBConnection: sinon.stub().resolves(),
634
+ metadata: { location: "/test/path" },
635
+ };
636
+ const getConnectionStub = sinon.stub(
637
+ connectionService,
638
+ "getConnection",
639
+ );
640
+ getConnectionStub.resolves({
641
+ dbConnection: mockDbConnection,
642
+ repository: mockRepository,
643
+ } as unknown as Awaited<
644
+ ReturnType<typeof connectionService.getConnection>
645
+ >);
646
+ (mockProjectStore.getProject as sinon.SinonStub).resolves(mockProject);
647
+
648
+ await connectionService.deleteConnection(
649
+ "test-project",
650
+ connectionName,
651
+ );
652
+
653
+ expect(mockProject.deleteDuckDBConnection.called).toBe(false);
654
+ const releaseCallback = mockProject.updateConnections.getCall(0)
655
+ .args[2] as () => Promise<void>;
656
+
657
+ expect(typeof releaseCallback).toBe("function");
658
+ await releaseCallback();
659
+ expect(
660
+ mockProject.deleteDuckDBConnection.calledWith(connectionName),
661
+ ).toBe(true);
662
+ });
663
+
614
664
  it("should throw FrozenConfigError when config is frozen", async () => {
615
665
  mockProjectStore.publisherConfigIsFrozen = true;
616
666