postgresai 0.14.0-dev.70 → 0.14.0-dev.72
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/postgres-ai.ts +403 -95
- package/dist/bin/postgres-ai.js +1126 -158
- package/lib/init.ts +76 -19
- package/lib/issues.ts +453 -7
- package/lib/mcp-server.ts +180 -3
- package/lib/metrics-embedded.ts +1 -1
- package/lib/supabase.ts +52 -0
- package/package.json +1 -1
- package/test/config-consistency.test.ts +36 -0
- package/test/init.integration.test.ts +78 -70
- package/test/init.test.ts +155 -0
- package/test/issues.cli.test.ts +224 -0
- package/test/mcp-server.test.ts +551 -12
package/lib/mcp-server.ts
CHANGED
|
@@ -1,6 +1,19 @@
|
|
|
1
1
|
import pkg from "../package.json";
|
|
2
2
|
import * as config from "./config";
|
|
3
|
-
import {
|
|
3
|
+
import {
|
|
4
|
+
fetchIssues,
|
|
5
|
+
fetchIssueComments,
|
|
6
|
+
createIssueComment,
|
|
7
|
+
fetchIssue,
|
|
8
|
+
createIssue,
|
|
9
|
+
updateIssue,
|
|
10
|
+
updateIssueComment,
|
|
11
|
+
fetchActionItem,
|
|
12
|
+
fetchActionItems,
|
|
13
|
+
createActionItem,
|
|
14
|
+
updateActionItem,
|
|
15
|
+
type ConfigChange,
|
|
16
|
+
} from "./issues";
|
|
4
17
|
import { resolveBaseUrls } from "./util";
|
|
5
18
|
|
|
6
19
|
// MCP SDK imports - Bun handles these directly
|
|
@@ -64,7 +77,14 @@ export async function handleToolCall(
|
|
|
64
77
|
|
|
65
78
|
try {
|
|
66
79
|
if (toolName === "list_issues") {
|
|
67
|
-
const
|
|
80
|
+
const orgId = args.org_id !== undefined ? Number(args.org_id) : cfg.orgId ?? undefined;
|
|
81
|
+
const statusArg = args.status ? String(args.status) : undefined;
|
|
82
|
+
let status: "open" | "closed" | undefined;
|
|
83
|
+
if (statusArg === "open") status = "open";
|
|
84
|
+
else if (statusArg === "closed") status = "closed";
|
|
85
|
+
const limit = args.limit !== undefined ? Number(args.limit) : undefined;
|
|
86
|
+
const offset = args.offset !== undefined ? Number(args.offset) : undefined;
|
|
87
|
+
const issues = await fetchIssues({ apiKey, apiBaseUrl, orgId, status, limit, offset, debug });
|
|
68
88
|
return { content: [{ type: "text", text: JSON.stringify(issues, null, 2) }] };
|
|
69
89
|
}
|
|
70
90
|
|
|
@@ -154,6 +174,82 @@ export async function handleToolCall(
|
|
|
154
174
|
return { content: [{ type: "text", text: JSON.stringify(result, null, 2) }] };
|
|
155
175
|
}
|
|
156
176
|
|
|
177
|
+
// Action Items Tools
|
|
178
|
+
if (toolName === "view_action_item") {
|
|
179
|
+
// Support both single ID and array of IDs
|
|
180
|
+
let actionItemIds: string[];
|
|
181
|
+
if (Array.isArray(args.action_item_ids)) {
|
|
182
|
+
actionItemIds = args.action_item_ids.map((id: unknown) => String(id).trim()).filter((id: string) => id);
|
|
183
|
+
} else if (args.action_item_id) {
|
|
184
|
+
actionItemIds = [String(args.action_item_id).trim()];
|
|
185
|
+
} else {
|
|
186
|
+
actionItemIds = [];
|
|
187
|
+
}
|
|
188
|
+
if (actionItemIds.length === 0) {
|
|
189
|
+
return { content: [{ type: "text", text: "action_item_id or action_item_ids is required" }], isError: true };
|
|
190
|
+
}
|
|
191
|
+
const actionItems = await fetchActionItem({ apiKey, apiBaseUrl, actionItemIds, debug });
|
|
192
|
+
if (actionItems.length === 0) {
|
|
193
|
+
return { content: [{ type: "text", text: "Action item(s) not found" }], isError: true };
|
|
194
|
+
}
|
|
195
|
+
return { content: [{ type: "text", text: JSON.stringify(actionItems, null, 2) }] };
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
if (toolName === "list_action_items") {
|
|
199
|
+
const issueId = String(args.issue_id || "").trim();
|
|
200
|
+
if (!issueId) {
|
|
201
|
+
return { content: [{ type: "text", text: "issue_id is required" }], isError: true };
|
|
202
|
+
}
|
|
203
|
+
const actionItems = await fetchActionItems({ apiKey, apiBaseUrl, issueId, debug });
|
|
204
|
+
return { content: [{ type: "text", text: JSON.stringify(actionItems, null, 2) }] };
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
if (toolName === "create_action_item") {
|
|
208
|
+
const issueId = String(args.issue_id || "").trim();
|
|
209
|
+
const rawTitle = String(args.title || "").trim();
|
|
210
|
+
if (!issueId) {
|
|
211
|
+
return { content: [{ type: "text", text: "issue_id is required" }], isError: true };
|
|
212
|
+
}
|
|
213
|
+
if (!rawTitle) {
|
|
214
|
+
return { content: [{ type: "text", text: "title is required" }], isError: true };
|
|
215
|
+
}
|
|
216
|
+
const title = interpretEscapes(rawTitle);
|
|
217
|
+
const rawDescription = args.description ? String(args.description) : undefined;
|
|
218
|
+
const description = rawDescription ? interpretEscapes(rawDescription) : undefined;
|
|
219
|
+
const sqlAction = args.sql_action !== undefined ? String(args.sql_action) : undefined;
|
|
220
|
+
const configs = Array.isArray(args.configs) ? args.configs as ConfigChange[] : undefined;
|
|
221
|
+
const result = await createActionItem({ apiKey, apiBaseUrl, issueId, title, description, sqlAction, configs, debug });
|
|
222
|
+
return { content: [{ type: "text", text: JSON.stringify({ id: result }, null, 2) }] };
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
if (toolName === "update_action_item") {
|
|
226
|
+
const actionItemId = String(args.action_item_id || "").trim();
|
|
227
|
+
if (!actionItemId) {
|
|
228
|
+
return { content: [{ type: "text", text: "action_item_id is required" }], isError: true };
|
|
229
|
+
}
|
|
230
|
+
const rawTitle = args.title !== undefined ? String(args.title) : undefined;
|
|
231
|
+
const title = rawTitle !== undefined ? interpretEscapes(rawTitle) : undefined;
|
|
232
|
+
const rawDescription = args.description !== undefined ? String(args.description) : undefined;
|
|
233
|
+
const description = rawDescription !== undefined ? interpretEscapes(rawDescription) : undefined;
|
|
234
|
+
const isDone = args.is_done !== undefined ? Boolean(args.is_done) : undefined;
|
|
235
|
+
const status = args.status !== undefined ? String(args.status) : undefined;
|
|
236
|
+
const statusReason = args.status_reason !== undefined ? String(args.status_reason) : undefined;
|
|
237
|
+
|
|
238
|
+
// Validate that at least one update field is provided
|
|
239
|
+
if (title === undefined && description === undefined &&
|
|
240
|
+
isDone === undefined && status === undefined && statusReason === undefined) {
|
|
241
|
+
return { content: [{ type: "text", text: "At least one field to update is required (title, description, is_done, status, or status_reason)" }], isError: true };
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
// Validate status value if provided
|
|
245
|
+
if (status !== undefined && !["waiting_for_approval", "approved", "rejected"].includes(status)) {
|
|
246
|
+
return { content: [{ type: "text", text: "status must be 'waiting_for_approval', 'approved', or 'rejected'" }], isError: true };
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
await updateActionItem({ apiKey, apiBaseUrl, actionItemId, title, description, isDone, status, statusReason, debug });
|
|
250
|
+
return { content: [{ type: "text", text: JSON.stringify({ success: true }, null, 2) }] };
|
|
251
|
+
}
|
|
252
|
+
|
|
157
253
|
throw new Error(`Unknown tool: ${toolName}`);
|
|
158
254
|
} catch (err) {
|
|
159
255
|
const message = err instanceof Error ? err.message : String(err);
|
|
@@ -163,7 +259,11 @@ export async function handleToolCall(
|
|
|
163
259
|
|
|
164
260
|
export async function startMcpServer(rootOpts?: RootOptsLike, extra?: { debug?: boolean }): Promise<void> {
|
|
165
261
|
const server = new Server(
|
|
166
|
-
{
|
|
262
|
+
{
|
|
263
|
+
name: "postgresai-mcp",
|
|
264
|
+
version: pkg.version,
|
|
265
|
+
title: "PostgresAI MCP Server",
|
|
266
|
+
},
|
|
167
267
|
{ capabilities: { tools: {} } }
|
|
168
268
|
);
|
|
169
269
|
|
|
@@ -176,6 +276,10 @@ export async function startMcpServer(rootOpts?: RootOptsLike, extra?: { debug?:
|
|
|
176
276
|
inputSchema: {
|
|
177
277
|
type: "object",
|
|
178
278
|
properties: {
|
|
279
|
+
org_id: { type: "number", description: "Organization ID (optional, falls back to config)" },
|
|
280
|
+
status: { type: "string", description: "Filter by status: 'open', 'closed', or omit for all" },
|
|
281
|
+
limit: { type: "number", description: "Max number of issues to return (default: 20)" },
|
|
282
|
+
offset: { type: "number", description: "Number of issues to skip (default: 0)" },
|
|
179
283
|
debug: { type: "boolean", description: "Enable verbose debug logs" },
|
|
180
284
|
},
|
|
181
285
|
additionalProperties: false,
|
|
@@ -265,6 +369,79 @@ export async function startMcpServer(rootOpts?: RootOptsLike, extra?: { debug?:
|
|
|
265
369
|
additionalProperties: false,
|
|
266
370
|
},
|
|
267
371
|
},
|
|
372
|
+
// Action Items Tools
|
|
373
|
+
{
|
|
374
|
+
name: "view_action_item",
|
|
375
|
+
description: "View action item(s) with all details. Supports single ID or multiple IDs.",
|
|
376
|
+
inputSchema: {
|
|
377
|
+
type: "object",
|
|
378
|
+
properties: {
|
|
379
|
+
action_item_id: { type: "string", description: "Single action item ID (UUID)" },
|
|
380
|
+
action_item_ids: { type: "array", items: { type: "string" }, description: "Multiple action item IDs (UUIDs)" },
|
|
381
|
+
debug: { type: "boolean", description: "Enable verbose debug logs" },
|
|
382
|
+
},
|
|
383
|
+
additionalProperties: false,
|
|
384
|
+
},
|
|
385
|
+
},
|
|
386
|
+
{
|
|
387
|
+
name: "list_action_items",
|
|
388
|
+
description: "List action items for an issue",
|
|
389
|
+
inputSchema: {
|
|
390
|
+
type: "object",
|
|
391
|
+
properties: {
|
|
392
|
+
issue_id: { type: "string", description: "Issue ID (UUID)" },
|
|
393
|
+
debug: { type: "boolean", description: "Enable verbose debug logs" },
|
|
394
|
+
},
|
|
395
|
+
required: ["issue_id"],
|
|
396
|
+
additionalProperties: false,
|
|
397
|
+
},
|
|
398
|
+
},
|
|
399
|
+
{
|
|
400
|
+
name: "create_action_item",
|
|
401
|
+
description: "Create a new action item for an issue",
|
|
402
|
+
inputSchema: {
|
|
403
|
+
type: "object",
|
|
404
|
+
properties: {
|
|
405
|
+
issue_id: { type: "string", description: "Issue ID (UUID)" },
|
|
406
|
+
title: { type: "string", description: "Action item title" },
|
|
407
|
+
description: { type: "string", description: "Detailed description" },
|
|
408
|
+
sql_action: { type: "string", description: "SQL command to execute, e.g. 'DROP INDEX CONCURRENTLY idx_unused;'" },
|
|
409
|
+
configs: {
|
|
410
|
+
type: "array",
|
|
411
|
+
items: {
|
|
412
|
+
type: "object",
|
|
413
|
+
properties: {
|
|
414
|
+
parameter: { type: "string" },
|
|
415
|
+
value: { type: "string" },
|
|
416
|
+
},
|
|
417
|
+
required: ["parameter", "value"],
|
|
418
|
+
},
|
|
419
|
+
description: "Configuration parameter changes",
|
|
420
|
+
},
|
|
421
|
+
debug: { type: "boolean", description: "Enable verbose debug logs" },
|
|
422
|
+
},
|
|
423
|
+
required: ["issue_id", "title"],
|
|
424
|
+
additionalProperties: false,
|
|
425
|
+
},
|
|
426
|
+
},
|
|
427
|
+
{
|
|
428
|
+
name: "update_action_item",
|
|
429
|
+
description: "Update an action item: mark as done/not done, approve/reject, or edit title/description",
|
|
430
|
+
inputSchema: {
|
|
431
|
+
type: "object",
|
|
432
|
+
properties: {
|
|
433
|
+
action_item_id: { type: "string", description: "Action item ID (UUID)" },
|
|
434
|
+
title: { type: "string", description: "New title" },
|
|
435
|
+
description: { type: "string", description: "New description" },
|
|
436
|
+
is_done: { type: "boolean", description: "Mark as done (true) or not done (false)" },
|
|
437
|
+
status: { type: "string", description: "Approval status: 'waiting_for_approval', 'approved', or 'rejected'" },
|
|
438
|
+
status_reason: { type: "string", description: "Reason for approval/rejection" },
|
|
439
|
+
debug: { type: "boolean", description: "Enable verbose debug logs" },
|
|
440
|
+
},
|
|
441
|
+
required: ["action_item_id"],
|
|
442
|
+
additionalProperties: false,
|
|
443
|
+
},
|
|
444
|
+
},
|
|
268
445
|
],
|
|
269
446
|
};
|
|
270
447
|
});
|
package/lib/metrics-embedded.ts
CHANGED
package/lib/supabase.ts
CHANGED
|
@@ -333,6 +333,58 @@ export class SupabaseClient {
|
|
|
333
333
|
}
|
|
334
334
|
}
|
|
335
335
|
|
|
336
|
+
/**
|
|
337
|
+
* Fetch the database pooler connection string from Supabase Management API.
|
|
338
|
+
* Returns a postgresql:// URL with username but no password.
|
|
339
|
+
*
|
|
340
|
+
* @param config Supabase configuration with projectRef and accessToken
|
|
341
|
+
* @returns Database URL without password (e.g., "postgresql://postgres.ref@host:port/postgres")
|
|
342
|
+
*/
|
|
343
|
+
export async function fetchPoolerDatabaseUrl(
|
|
344
|
+
config: SupabaseConfig
|
|
345
|
+
): Promise<string | null> {
|
|
346
|
+
const url = `${SUPABASE_API_BASE}/v1/projects/${encodeURIComponent(config.projectRef)}/config/database/pooler`;
|
|
347
|
+
|
|
348
|
+
try {
|
|
349
|
+
const response = await fetch(url, {
|
|
350
|
+
method: "GET",
|
|
351
|
+
headers: {
|
|
352
|
+
Authorization: `Bearer ${config.accessToken}`,
|
|
353
|
+
},
|
|
354
|
+
});
|
|
355
|
+
|
|
356
|
+
if (!response.ok) {
|
|
357
|
+
return null;
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
const data = await response.json();
|
|
361
|
+
|
|
362
|
+
// The API returns an array of pooler configurations
|
|
363
|
+
// Look for a connection string in the response
|
|
364
|
+
if (Array.isArray(data) && data.length > 0) {
|
|
365
|
+
const pooler = data[0];
|
|
366
|
+
// Build URL from components if available
|
|
367
|
+
if (pooler.db_host && pooler.db_port && pooler.db_name && pooler.db_user) {
|
|
368
|
+
return `postgresql://${pooler.db_user}@${pooler.db_host}:${pooler.db_port}/${pooler.db_name}`;
|
|
369
|
+
}
|
|
370
|
+
// Fallback: try to extract from connection_string if present
|
|
371
|
+
if (typeof pooler.connection_string === "string") {
|
|
372
|
+
try {
|
|
373
|
+
const connUrl = new URL(pooler.connection_string);
|
|
374
|
+
// Remove password from URL
|
|
375
|
+
return `postgresql://${connUrl.username}@${connUrl.hostname}:${connUrl.port}${connUrl.pathname}`;
|
|
376
|
+
} catch {
|
|
377
|
+
return null;
|
|
378
|
+
}
|
|
379
|
+
}
|
|
380
|
+
}
|
|
381
|
+
|
|
382
|
+
return null;
|
|
383
|
+
} catch {
|
|
384
|
+
return null;
|
|
385
|
+
}
|
|
386
|
+
}
|
|
387
|
+
|
|
336
388
|
/**
|
|
337
389
|
* Resolve Supabase configuration from options and environment variables.
|
|
338
390
|
*/
|
package/package.json
CHANGED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Tests that config files are consistent with what the CLI expects.
|
|
3
|
+
* Catches schema mismatches like pg_statistic in wrong schema.
|
|
4
|
+
*/
|
|
5
|
+
import { describe, test, expect } from "bun:test";
|
|
6
|
+
import { readFileSync } from "fs";
|
|
7
|
+
import { resolve } from "path";
|
|
8
|
+
|
|
9
|
+
const configDir = resolve(import.meta.dir, "../../config");
|
|
10
|
+
|
|
11
|
+
describe("Config consistency", () => {
|
|
12
|
+
test("target-db/init.sql creates pg_statistic in postgres_ai schema", () => {
|
|
13
|
+
const initSql = readFileSync(resolve(configDir, "target-db/init.sql"), "utf8");
|
|
14
|
+
|
|
15
|
+
// Must create postgres_ai schema
|
|
16
|
+
expect(initSql).toMatch(/create\s+schema\s+if\s+not\s+exists\s+postgres_ai/i);
|
|
17
|
+
|
|
18
|
+
// Must create view in postgres_ai schema, not public
|
|
19
|
+
expect(initSql).toMatch(/create\s+or\s+replace\s+view\s+postgres_ai\.pg_statistic/i);
|
|
20
|
+
expect(initSql).not.toMatch(/create\s+or\s+replace\s+view\s+public\.pg_statistic/i);
|
|
21
|
+
|
|
22
|
+
// Must grant on postgres_ai.pg_statistic
|
|
23
|
+
expect(initSql).toMatch(/grant\s+select\s+on\s+postgres_ai\.pg_statistic/i);
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
test("pgwatch metrics.yml uses postgres_ai.pg_statistic", () => {
|
|
27
|
+
const metricsYml = readFileSync(
|
|
28
|
+
resolve(configDir, "pgwatch-prometheus/metrics.yml"),
|
|
29
|
+
"utf8"
|
|
30
|
+
);
|
|
31
|
+
|
|
32
|
+
// Should reference postgres_ai.pg_statistic, not public.pg_statistic
|
|
33
|
+
expect(metricsYml).not.toMatch(/public\.pg_statistic/);
|
|
34
|
+
expect(metricsYml).toMatch(/postgres_ai\.pg_statistic/);
|
|
35
|
+
});
|
|
36
|
+
});
|
|
@@ -241,70 +241,76 @@ describe.skipIf(skipTests)("integration: prepare-db", () => {
|
|
|
241
241
|
}
|
|
242
242
|
});
|
|
243
243
|
|
|
244
|
-
test(
|
|
245
|
-
|
|
244
|
+
test(
|
|
245
|
+
"fixes slightly-off permissions idempotently",
|
|
246
|
+
async () => {
|
|
247
|
+
pg = await createTempPostgres();
|
|
246
248
|
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
249
|
+
try {
|
|
250
|
+
// Create monitoring role with wrong password, no grants.
|
|
251
|
+
{
|
|
252
|
+
const c = new Client({ connectionString: pg.adminUri });
|
|
253
|
+
await c.connect();
|
|
254
|
+
await c.query(
|
|
255
|
+
"do $$ begin if not exists (select 1 from pg_roles where rolname='postgres_ai_mon') then create role postgres_ai_mon login password 'wrong'; end if; end $$;"
|
|
256
|
+
);
|
|
257
|
+
await c.end();
|
|
258
|
+
}
|
|
257
259
|
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
260
|
+
// Run init (should grant everything).
|
|
261
|
+
{
|
|
262
|
+
const r = runCliInit([pg.adminUri, "--password", "correctpw", "--skip-optional-permissions"]);
|
|
263
|
+
expect(r.status).toBe(0);
|
|
264
|
+
}
|
|
263
265
|
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
266
|
+
// Verify privileges.
|
|
267
|
+
{
|
|
268
|
+
const c = new Client({ connectionString: pg.adminUri });
|
|
269
|
+
await c.connect();
|
|
270
|
+
const dbOk = await c.query(
|
|
271
|
+
"select has_database_privilege('postgres_ai_mon', current_database(), 'CONNECT') as ok"
|
|
272
|
+
);
|
|
273
|
+
expect(dbOk.rows[0].ok).toBe(true);
|
|
274
|
+
const roleOk = await c.query("select pg_has_role('postgres_ai_mon', 'pg_monitor', 'member') as ok");
|
|
275
|
+
expect(roleOk.rows[0].ok).toBe(true);
|
|
276
|
+
const idxOk = await c.query(
|
|
277
|
+
"select has_table_privilege('postgres_ai_mon', 'pg_catalog.pg_index', 'SELECT') as ok"
|
|
278
|
+
);
|
|
279
|
+
expect(idxOk.rows[0].ok).toBe(true);
|
|
280
|
+
const viewOk = await c.query(
|
|
281
|
+
"select has_table_privilege('postgres_ai_mon', 'postgres_ai.pg_statistic', 'SELECT') as ok"
|
|
282
|
+
);
|
|
283
|
+
expect(viewOk.rows[0].ok).toBe(true);
|
|
284
|
+
const sp = await c.query("select rolconfig from pg_roles where rolname='postgres_ai_mon'");
|
|
285
|
+
expect(Array.isArray(sp.rows[0].rolconfig)).toBe(true);
|
|
286
|
+
expect(sp.rows[0].rolconfig.some((v: string) => String(v).includes("search_path="))).toBe(true);
|
|
287
|
+
await c.end();
|
|
288
|
+
}
|
|
287
289
|
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
290
|
+
// Run init again (idempotent).
|
|
291
|
+
{
|
|
292
|
+
const r = runCliInit([pg.adminUri, "--password", "correctpw", "--skip-optional-permissions"]);
|
|
293
|
+
expect(r.status).toBe(0);
|
|
294
|
+
}
|
|
295
|
+
} finally {
|
|
296
|
+
await pg.cleanup();
|
|
292
297
|
}
|
|
293
|
-
}
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
});
|
|
298
|
+
},
|
|
299
|
+
{ timeout: 15000 }
|
|
300
|
+
);
|
|
297
301
|
|
|
298
|
-
test(
|
|
299
|
-
|
|
302
|
+
test(
|
|
303
|
+
"reports nicely when lacking permissions",
|
|
304
|
+
async () => {
|
|
305
|
+
pg = await createTempPostgres();
|
|
300
306
|
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
307
|
+
try {
|
|
308
|
+
// Create limited user that can connect but cannot create roles / grant.
|
|
309
|
+
const limitedPw = "limitedpw";
|
|
310
|
+
{
|
|
311
|
+
const c = new Client({ connectionString: pg.adminUri });
|
|
312
|
+
await c.connect();
|
|
313
|
+
await c.query(`do $$ begin
|
|
308
314
|
if not exists (select 1 from pg_roles where rolname='limited') then
|
|
309
315
|
begin
|
|
310
316
|
create role limited login password ${sqlLiteral(limitedPw)};
|
|
@@ -313,20 +319,22 @@ describe.skipIf(skipTests)("integration: prepare-db", () => {
|
|
|
313
319
|
end;
|
|
314
320
|
end if;
|
|
315
321
|
end $$;`);
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
322
|
+
await c.query("grant connect on database testdb to limited");
|
|
323
|
+
await c.end();
|
|
324
|
+
}
|
|
319
325
|
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
326
|
+
const limitedUri = `postgresql://limited:${limitedPw}@127.0.0.1:${pg.port}/testdb`;
|
|
327
|
+
const r = runCliInit([limitedUri, "--password", "monpw", "--skip-optional-permissions"]);
|
|
328
|
+
expect(r.status).not.toBe(0);
|
|
329
|
+
expect(r.stderr).toMatch(/Error: prepare-db:/);
|
|
330
|
+
expect(r.stderr).toMatch(/Failed at step "/);
|
|
331
|
+
expect(r.stderr).toMatch(/Fix: connect as a superuser/i);
|
|
332
|
+
} finally {
|
|
333
|
+
await pg.cleanup();
|
|
334
|
+
}
|
|
335
|
+
},
|
|
336
|
+
{ timeout: 15000 }
|
|
337
|
+
);
|
|
330
338
|
|
|
331
339
|
test(
|
|
332
340
|
"--verify returns 0 when ok and non-zero when missing",
|