postgresai 0.14.0-beta.12 → 0.14.0-beta.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/README.md +32 -0
  2. package/bin/postgres-ai.ts +1234 -170
  3. package/dist/bin/postgres-ai.js +2480 -410
  4. package/dist/sql/02.extensions.sql +8 -0
  5. package/dist/sql/{02.permissions.sql → 03.permissions.sql} +1 -0
  6. package/dist/sql/sql/02.extensions.sql +8 -0
  7. package/dist/sql/sql/{02.permissions.sql → 03.permissions.sql} +1 -0
  8. package/dist/sql/sql/uninit/01.helpers.sql +5 -0
  9. package/dist/sql/sql/uninit/02.permissions.sql +30 -0
  10. package/dist/sql/sql/uninit/03.role.sql +27 -0
  11. package/dist/sql/uninit/01.helpers.sql +5 -0
  12. package/dist/sql/uninit/02.permissions.sql +30 -0
  13. package/dist/sql/uninit/03.role.sql +27 -0
  14. package/lib/checkup.ts +69 -3
  15. package/lib/init.ts +184 -26
  16. package/lib/issues.ts +453 -7
  17. package/lib/mcp-server.ts +180 -3
  18. package/lib/metrics-embedded.ts +3 -3
  19. package/lib/supabase.ts +824 -0
  20. package/package.json +1 -1
  21. package/sql/02.extensions.sql +8 -0
  22. package/sql/{02.permissions.sql → 03.permissions.sql} +1 -0
  23. package/sql/uninit/01.helpers.sql +5 -0
  24. package/sql/uninit/02.permissions.sql +30 -0
  25. package/sql/uninit/03.role.sql +27 -0
  26. package/test/checkup.test.ts +240 -14
  27. package/test/config-consistency.test.ts +36 -0
  28. package/test/init.integration.test.ts +80 -71
  29. package/test/init.test.ts +501 -2
  30. package/test/issues.cli.test.ts +224 -0
  31. package/test/mcp-server.test.ts +551 -12
  32. package/test/supabase.test.ts +568 -0
  33. package/test/test-utils.ts +6 -0
  34. /package/dist/sql/{03.optional_rds.sql → 04.optional_rds.sql} +0 -0
  35. /package/dist/sql/{04.optional_self_managed.sql → 05.optional_self_managed.sql} +0 -0
  36. /package/dist/sql/{05.helpers.sql → 06.helpers.sql} +0 -0
  37. /package/dist/sql/sql/{03.optional_rds.sql → 04.optional_rds.sql} +0 -0
  38. /package/dist/sql/sql/{04.optional_self_managed.sql → 05.optional_self_managed.sql} +0 -0
  39. /package/dist/sql/sql/{05.helpers.sql → 06.helpers.sql} +0 -0
  40. /package/sql/{03.optional_rds.sql → 04.optional_rds.sql} +0 -0
  41. /package/sql/{04.optional_self_managed.sql → 05.optional_self_managed.sql} +0 -0
  42. /package/sql/{05.helpers.sql → 06.helpers.sql} +0 -0
package/lib/mcp-server.ts CHANGED
@@ -1,6 +1,19 @@
1
1
  import pkg from "../package.json";
2
2
  import * as config from "./config";
3
- import { fetchIssues, fetchIssueComments, createIssueComment, fetchIssue, createIssue, updateIssue, updateIssueComment } from "./issues";
3
+ import {
4
+ fetchIssues,
5
+ fetchIssueComments,
6
+ createIssueComment,
7
+ fetchIssue,
8
+ createIssue,
9
+ updateIssue,
10
+ updateIssueComment,
11
+ fetchActionItem,
12
+ fetchActionItems,
13
+ createActionItem,
14
+ updateActionItem,
15
+ type ConfigChange,
16
+ } from "./issues";
4
17
  import { resolveBaseUrls } from "./util";
5
18
 
6
19
  // MCP SDK imports - Bun handles these directly
@@ -64,7 +77,14 @@ export async function handleToolCall(
64
77
 
65
78
  try {
66
79
  if (toolName === "list_issues") {
67
- const issues = await fetchIssues({ apiKey, apiBaseUrl, debug });
80
+ const orgId = args.org_id !== undefined ? Number(args.org_id) : cfg.orgId ?? undefined;
81
+ const statusArg = args.status ? String(args.status) : undefined;
82
+ let status: "open" | "closed" | undefined;
83
+ if (statusArg === "open") status = "open";
84
+ else if (statusArg === "closed") status = "closed";
85
+ const limit = args.limit !== undefined ? Number(args.limit) : undefined;
86
+ const offset = args.offset !== undefined ? Number(args.offset) : undefined;
87
+ const issues = await fetchIssues({ apiKey, apiBaseUrl, orgId, status, limit, offset, debug });
68
88
  return { content: [{ type: "text", text: JSON.stringify(issues, null, 2) }] };
69
89
  }
70
90
 
@@ -154,6 +174,82 @@ export async function handleToolCall(
154
174
  return { content: [{ type: "text", text: JSON.stringify(result, null, 2) }] };
155
175
  }
156
176
 
177
+ // Action Items Tools
178
+ if (toolName === "view_action_item") {
179
+ // Support both single ID and array of IDs
180
+ let actionItemIds: string[];
181
+ if (Array.isArray(args.action_item_ids)) {
182
+ actionItemIds = args.action_item_ids.map((id: unknown) => String(id).trim()).filter((id: string) => id);
183
+ } else if (args.action_item_id) {
184
+ actionItemIds = [String(args.action_item_id).trim()];
185
+ } else {
186
+ actionItemIds = [];
187
+ }
188
+ if (actionItemIds.length === 0) {
189
+ return { content: [{ type: "text", text: "action_item_id or action_item_ids is required" }], isError: true };
190
+ }
191
+ const actionItems = await fetchActionItem({ apiKey, apiBaseUrl, actionItemIds, debug });
192
+ if (actionItems.length === 0) {
193
+ return { content: [{ type: "text", text: "Action item(s) not found" }], isError: true };
194
+ }
195
+ return { content: [{ type: "text", text: JSON.stringify(actionItems, null, 2) }] };
196
+ }
197
+
198
+ if (toolName === "list_action_items") {
199
+ const issueId = String(args.issue_id || "").trim();
200
+ if (!issueId) {
201
+ return { content: [{ type: "text", text: "issue_id is required" }], isError: true };
202
+ }
203
+ const actionItems = await fetchActionItems({ apiKey, apiBaseUrl, issueId, debug });
204
+ return { content: [{ type: "text", text: JSON.stringify(actionItems, null, 2) }] };
205
+ }
206
+
207
+ if (toolName === "create_action_item") {
208
+ const issueId = String(args.issue_id || "").trim();
209
+ const rawTitle = String(args.title || "").trim();
210
+ if (!issueId) {
211
+ return { content: [{ type: "text", text: "issue_id is required" }], isError: true };
212
+ }
213
+ if (!rawTitle) {
214
+ return { content: [{ type: "text", text: "title is required" }], isError: true };
215
+ }
216
+ const title = interpretEscapes(rawTitle);
217
+ const rawDescription = args.description ? String(args.description) : undefined;
218
+ const description = rawDescription ? interpretEscapes(rawDescription) : undefined;
219
+ const sqlAction = args.sql_action !== undefined ? String(args.sql_action) : undefined;
220
+ const configs = Array.isArray(args.configs) ? args.configs as ConfigChange[] : undefined;
221
+ const result = await createActionItem({ apiKey, apiBaseUrl, issueId, title, description, sqlAction, configs, debug });
222
+ return { content: [{ type: "text", text: JSON.stringify({ id: result }, null, 2) }] };
223
+ }
224
+
225
+ if (toolName === "update_action_item") {
226
+ const actionItemId = String(args.action_item_id || "").trim();
227
+ if (!actionItemId) {
228
+ return { content: [{ type: "text", text: "action_item_id is required" }], isError: true };
229
+ }
230
+ const rawTitle = args.title !== undefined ? String(args.title) : undefined;
231
+ const title = rawTitle !== undefined ? interpretEscapes(rawTitle) : undefined;
232
+ const rawDescription = args.description !== undefined ? String(args.description) : undefined;
233
+ const description = rawDescription !== undefined ? interpretEscapes(rawDescription) : undefined;
234
+ const isDone = args.is_done !== undefined ? Boolean(args.is_done) : undefined;
235
+ const status = args.status !== undefined ? String(args.status) : undefined;
236
+ const statusReason = args.status_reason !== undefined ? String(args.status_reason) : undefined;
237
+
238
+ // Validate that at least one update field is provided
239
+ if (title === undefined && description === undefined &&
240
+ isDone === undefined && status === undefined && statusReason === undefined) {
241
+ return { content: [{ type: "text", text: "At least one field to update is required (title, description, is_done, status, or status_reason)" }], isError: true };
242
+ }
243
+
244
+ // Validate status value if provided
245
+ if (status !== undefined && !["waiting_for_approval", "approved", "rejected"].includes(status)) {
246
+ return { content: [{ type: "text", text: "status must be 'waiting_for_approval', 'approved', or 'rejected'" }], isError: true };
247
+ }
248
+
249
+ await updateActionItem({ apiKey, apiBaseUrl, actionItemId, title, description, isDone, status, statusReason, debug });
250
+ return { content: [{ type: "text", text: JSON.stringify({ success: true }, null, 2) }] };
251
+ }
252
+
157
253
  throw new Error(`Unknown tool: ${toolName}`);
158
254
  } catch (err) {
159
255
  const message = err instanceof Error ? err.message : String(err);
@@ -163,7 +259,11 @@ export async function handleToolCall(
163
259
 
164
260
  export async function startMcpServer(rootOpts?: RootOptsLike, extra?: { debug?: boolean }): Promise<void> {
165
261
  const server = new Server(
166
- { name: "postgresai-mcp", version: pkg.version },
262
+ {
263
+ name: "postgresai-mcp",
264
+ version: pkg.version,
265
+ title: "PostgresAI MCP Server",
266
+ },
167
267
  { capabilities: { tools: {} } }
168
268
  );
169
269
 
@@ -176,6 +276,10 @@ export async function startMcpServer(rootOpts?: RootOptsLike, extra?: { debug?:
176
276
  inputSchema: {
177
277
  type: "object",
178
278
  properties: {
279
+ org_id: { type: "number", description: "Organization ID (optional, falls back to config)" },
280
+ status: { type: "string", description: "Filter by status: 'open', 'closed', or omit for all" },
281
+ limit: { type: "number", description: "Max number of issues to return (default: 20)" },
282
+ offset: { type: "number", description: "Number of issues to skip (default: 0)" },
179
283
  debug: { type: "boolean", description: "Enable verbose debug logs" },
180
284
  },
181
285
  additionalProperties: false,
@@ -265,6 +369,79 @@ export async function startMcpServer(rootOpts?: RootOptsLike, extra?: { debug?:
265
369
  additionalProperties: false,
266
370
  },
267
371
  },
372
+ // Action Items Tools
373
+ {
374
+ name: "view_action_item",
375
+ description: "View action item(s) with all details. Supports single ID or multiple IDs.",
376
+ inputSchema: {
377
+ type: "object",
378
+ properties: {
379
+ action_item_id: { type: "string", description: "Single action item ID (UUID)" },
380
+ action_item_ids: { type: "array", items: { type: "string" }, description: "Multiple action item IDs (UUIDs)" },
381
+ debug: { type: "boolean", description: "Enable verbose debug logs" },
382
+ },
383
+ additionalProperties: false,
384
+ },
385
+ },
386
+ {
387
+ name: "list_action_items",
388
+ description: "List action items for an issue",
389
+ inputSchema: {
390
+ type: "object",
391
+ properties: {
392
+ issue_id: { type: "string", description: "Issue ID (UUID)" },
393
+ debug: { type: "boolean", description: "Enable verbose debug logs" },
394
+ },
395
+ required: ["issue_id"],
396
+ additionalProperties: false,
397
+ },
398
+ },
399
+ {
400
+ name: "create_action_item",
401
+ description: "Create a new action item for an issue",
402
+ inputSchema: {
403
+ type: "object",
404
+ properties: {
405
+ issue_id: { type: "string", description: "Issue ID (UUID)" },
406
+ title: { type: "string", description: "Action item title" },
407
+ description: { type: "string", description: "Detailed description" },
408
+ sql_action: { type: "string", description: "SQL command to execute, e.g. 'DROP INDEX CONCURRENTLY idx_unused;'" },
409
+ configs: {
410
+ type: "array",
411
+ items: {
412
+ type: "object",
413
+ properties: {
414
+ parameter: { type: "string" },
415
+ value: { type: "string" },
416
+ },
417
+ required: ["parameter", "value"],
418
+ },
419
+ description: "Configuration parameter changes",
420
+ },
421
+ debug: { type: "boolean", description: "Enable verbose debug logs" },
422
+ },
423
+ required: ["issue_id", "title"],
424
+ additionalProperties: false,
425
+ },
426
+ },
427
+ {
428
+ name: "update_action_item",
429
+ description: "Update an action item: mark as done/not done, approve/reject, or edit title/description",
430
+ inputSchema: {
431
+ type: "object",
432
+ properties: {
433
+ action_item_id: { type: "string", description: "Action item ID (UUID)" },
434
+ title: { type: "string", description: "New title" },
435
+ description: { type: "string", description: "New description" },
436
+ is_done: { type: "boolean", description: "Mark as done (true) or not done (false)" },
437
+ status: { type: "string", description: "Approval status: 'waiting_for_approval', 'approved', or 'rejected'" },
438
+ status_reason: { type: "string", description: "Reason for approval/rejection" },
439
+ debug: { type: "boolean", description: "Enable verbose debug logs" },
440
+ },
441
+ required: ["action_item_id"],
442
+ additionalProperties: false,
443
+ },
444
+ },
268
445
  ],
269
446
  };
270
447
  });
@@ -1,6 +1,6 @@
1
1
  // AUTO-GENERATED FILE - DO NOT EDIT
2
2
  // Generated from config/pgwatch-prometheus/metrics.yml by scripts/embed-metrics.ts
3
- // Generated at: 2025-12-30T08:09:59.566Z
3
+ // Generated at: 2026-01-13T04:02:21.955Z
4
4
 
5
5
  /**
6
6
  * Metric definition from metrics.yml
@@ -45,9 +45,9 @@ export const METRICS: Record<string, MetricDefinition> = {
45
45
  statement_timeout_seconds: 300,
46
46
  },
47
47
  "pg_invalid_indexes": {
48
- description: "This metric identifies invalid indexes in the database. It provides insights into the number of invalid indexes and their details. This metric helps administrators identify and fix invalid indexes to improve database performance.",
48
+ description: "This metric identifies invalid indexes in the database with decision tree data for remediation. It provides insights into whether to DROP (if duplicate exists), RECREATE (if backs constraint), or flag as UNCERTAIN (if additional RCA is needed to check query plans). Decision tree: 1) Valid duplicate exists -> DROP, 2) Backs PK/UNIQUE constraint -> RECREATE, 3) Table < 10K rows -> RECREATE (small tables rebuild quickly, typically under 1 second), 4) Otherwise -> UNCERTAIN (need query plan analysis to assess impact).",
49
49
  sqls: {
50
- 11: "with fk_indexes as ( /* pgwatch_generated */\n select\n schemaname as tag_schema_name,\n (indexrelid::regclass)::text as tag_index_name,\n (relid::regclass)::text as tag_table_name,\n (confrelid::regclass)::text as tag_fk_table_ref,\n array_to_string(indclass, ', ') as tag_opclasses\n from\n pg_stat_all_indexes\n join pg_index using (indexrelid)\n left join pg_constraint\n on array_to_string(indkey, ',') = array_to_string(conkey, ',')\n and schemaname = (connamespace::regnamespace)::text\n and conrelid = relid\n and contype = 'f'\n where idx_scan = 0\n and indisunique is false\n and conkey is not null --conkey is not null then true else false end as is_fk_idx\n), data as (\n select\n pci.relname as tag_index_name,\n pn.nspname as tag_schema_name,\n pct.relname as tag_table_name,\n quote_ident(pn.nspname) as tag_schema_name,\n quote_ident(pci.relname) as tag_index_name,\n quote_ident(pct.relname) as tag_table_name,\n coalesce(nullif(quote_ident(pn.nspname), 'public') || '.', '') || quote_ident(pct.relname) as tag_relation_name,\n pg_get_indexdef(pidx.indexrelid) as index_definition,\n pg_relation_size(pidx.indexrelid) index_size_bytes,\n ((\n select count(1)\n from fk_indexes fi\n where\n fi.tag_fk_table_ref = pct.relname\n and fi.tag_opclasses like (array_to_string(pidx.indclass, ', ') || '%')\n ) > 0)::int as supports_fk\n from pg_index pidx\n join pg_class as pci on pci.oid = pidx.indexrelid\n join pg_class as pct on pct.oid = pidx.indrelid\n left join pg_namespace pn on pn.oid = pct.relnamespace\n where pidx.indisvalid = false\n), data_total as (\n select\n sum(index_size_bytes) as index_size_bytes_sum\n from data\n), num_data as (\n select\n row_number() over () num,\n data.*\n from data\n)\nselect\n (extract(epoch from now()) * 1e9)::int8 as epoch_ns,\n current_database() as tag_datname,\n num_data.*\nfrom num_data\nlimit 1000;\n",
50
+ 11: "with fk_indexes as ( /* pgwatch_generated */\n select\n schemaname as schema_name,\n indexrelid,\n (indexrelid::regclass)::text as index_name,\n (relid::regclass)::text as table_name,\n (confrelid::regclass)::text as fk_table_ref,\n array_to_string(indclass, ', ') as opclasses\n from pg_stat_all_indexes\n join pg_index using (indexrelid)\n left join pg_constraint\n on array_to_string(indkey, ',') = array_to_string(conkey, ',')\n and schemaname = (connamespace::regnamespace)::text\n and conrelid = relid\n and contype = 'f'\n where idx_scan = 0\n and indisunique is false\n and conkey is not null\n),\n-- Find valid indexes that could be duplicates (same table, same columns)\nvalid_duplicates as (\n select\n inv.indexrelid as invalid_indexrelid,\n val.indexrelid as valid_indexrelid,\n (val.indexrelid::regclass)::text as valid_index_name,\n pg_get_indexdef(val.indexrelid) as valid_index_definition\n from pg_index inv\n join pg_index val on inv.indrelid = val.indrelid -- same table\n and inv.indkey = val.indkey -- same columns (in same order)\n and inv.indexrelid != val.indexrelid -- different index\n and val.indisvalid = true -- valid index\n where inv.indisvalid = false\n),\ndata as (\n select\n pci.relname as tag_index_name,\n pn.nspname as tag_schema_name,\n pct.relname as tag_table_name,\n coalesce(nullif(quote_ident(pn.nspname), 'public') || '.', '') || quote_ident(pct.relname) as tag_relation_name,\n pg_get_indexdef(pidx.indexrelid) as index_definition,\n pg_relation_size(pidx.indexrelid) as index_size_bytes,\n -- Constraint info\n pidx.indisprimary as is_pk,\n pidx.indisunique as is_unique,\n con.conname as constraint_name,\n -- Table row estimate\n pct.reltuples::bigint as table_row_estimate,\n -- Valid duplicate check\n (vd.valid_indexrelid is not null) as has_valid_duplicate,\n vd.valid_index_name,\n vd.valid_index_definition,\n -- FK support check\n ((\n select count(1)\n from fk_indexes fi\n where fi.fk_table_ref = pct.relname\n and fi.opclasses like (array_to_string(pidx.indclass, ', ') || '%')\n ) > 0)::int as supports_fk\n from pg_index pidx\n join pg_class pci on pci.oid = pidx.indexrelid\n join pg_class pct on pct.oid = pidx.indrelid\n left join pg_namespace pn on pn.oid = pct.relnamespace\n left join pg_constraint con on con.conindid = pidx.indexrelid\n left join valid_duplicates vd on vd.invalid_indexrelid = pidx.indexrelid\n where pidx.indisvalid = false\n),\nnum_data as (\n select\n row_number() over () as num,\n data.*\n from data\n)\nselect\n (extract(epoch from now()) * 1e9)::int8 as epoch_ns,\n current_database() as tag_datname,\n num_data.*\nfrom num_data\nlimit 1000;\n",
51
51
  },
52
52
  gauges: ["*"],
53
53
  statement_timeout_seconds: 15,