postgresai 0.14.0-dev.81 → 0.14.0-dev.83

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -758,7 +758,7 @@ program
758
758
 
759
759
  const supabaseClient = new SupabaseClient(supabaseConfig);
760
760
 
761
- // Fetch database URL for JSON output (non-blocking, best-effort)
761
+ // Fetch database URL for JSON output (best-effort, errors return null)
762
762
  let databaseUrl: string | null = null;
763
763
  if (jsonOutput) {
764
764
  databaseUrl = await fetchPoolerDatabaseUrl(supabaseConfig, opts.monitoringUser);
@@ -1648,12 +1648,13 @@ program
1648
1648
  .option("--check-id <id>", `specific check to run (see list below), or ALL`, "ALL")
1649
1649
  .option("--node-name <name>", "node name for reports", "node-01")
1650
1650
  .option("--output <path>", "output directory for JSON files")
1651
- .option("--[no-]upload", "upload JSON results to PostgresAI (default: enabled; requires API key)", undefined)
1651
+ .option("--upload", "upload JSON results to PostgresAI (requires API key)")
1652
+ .option("--no-upload", "disable upload to PostgresAI")
1652
1653
  .option(
1653
1654
  "--project <project>",
1654
1655
  "project name or ID for remote upload (used with --upload; defaults to config defaultProject; auto-generated on first run)"
1655
1656
  )
1656
- .option("--json", "output JSON to stdout (implies --no-upload)")
1657
+ .option("--json", "output JSON to stdout")
1657
1658
  .addHelpText(
1658
1659
  "after",
1659
1660
  [
@@ -1678,7 +1679,9 @@ program
1678
1679
 
1679
1680
  const shouldPrintJson = !!opts.json;
1680
1681
  const uploadExplicitlyRequested = opts.upload === true;
1681
- const uploadExplicitlyDisabled = opts.upload === false || shouldPrintJson;
1682
+ // Note: --json and --upload/--no-upload are independent flags.
1683
+ // Use --no-upload to explicitly disable upload when using --json.
1684
+ const uploadExplicitlyDisabled = opts.upload === false;
1682
1685
  let shouldUpload = !uploadExplicitlyDisabled;
1683
1686
 
1684
1687
  // Preflight: validate/create output directory BEFORE connecting / running checks.
@@ -1830,7 +1833,8 @@ async function resolveOrInitPaths(): Promise<PathResolution> {
1830
1833
  */
1831
1834
  function isDockerRunning(): boolean {
1832
1835
  try {
1833
- const result = spawnSync("docker", ["info"], { stdio: "pipe", timeout: 5000 });
1836
+ // Note: timeout is supported by Bun but not in @types/bun
1837
+ const result = spawnSync("docker", ["info"], { stdio: "pipe", timeout: 5000 } as Parameters<typeof spawnSync>[2]);
1834
1838
  return result.status === 0;
1835
1839
  } catch {
1836
1840
  return false;
@@ -1842,7 +1846,7 @@ function isDockerRunning(): boolean {
1842
1846
  */
1843
1847
  function getComposeCmd(): string[] | null {
1844
1848
  const tryCmd = (cmd: string, args: string[]): boolean =>
1845
- spawnSync(cmd, args, { stdio: "ignore", timeout: 5000 }).status === 0;
1849
+ spawnSync(cmd, args, { stdio: "ignore", timeout: 5000 } as Parameters<typeof spawnSync>[2]).status === 0;
1846
1850
  if (tryCmd("docker-compose", ["version"])) return ["docker-compose"];
1847
1851
  if (tryCmd("docker", ["compose", "version"])) return ["docker", "compose"];
1848
1852
  return null;
@@ -1856,7 +1860,7 @@ function checkRunningContainers(): { running: boolean; containers: string[] } {
1856
1860
  const result = spawnSync(
1857
1861
  "docker",
1858
1862
  ["ps", "--filter", "name=grafana-with-datasources", "--filter", "name=pgwatch", "--format", "{{.Names}}"],
1859
- { stdio: "pipe", encoding: "utf8", timeout: 5000 }
1863
+ { stdio: "pipe", encoding: "utf8", timeout: 5000 } as Parameters<typeof spawnSync>[2]
1860
1864
  );
1861
1865
 
1862
1866
  if (result.status === 0 && result.stdout) {
package/bun.lock CHANGED
@@ -11,12 +11,12 @@
11
11
  "pg": "^8.16.3",
12
12
  },
13
13
  "devDependencies": {
14
- "@types/bun": "^1.1.14",
14
+ "@types/bun": "^1.3.6",
15
15
  "@types/js-yaml": "^4.0.9",
16
16
  "@types/pg": "^8.15.6",
17
17
  "ajv": "^8.17.1",
18
18
  "ajv-formats": "^3.0.1",
19
- "typescript": "^5.3.3",
19
+ "typescript": "^5.9.3",
20
20
  },
21
21
  },
22
22
  },
@@ -25,7 +25,7 @@
25
25
 
26
26
  "@modelcontextprotocol/sdk": ["@modelcontextprotocol/sdk@1.25.1", "", { "dependencies": { "@hono/node-server": "^1.19.7", "ajv": "^8.17.1", "ajv-formats": "^3.0.1", "content-type": "^1.0.5", "cors": "^2.8.5", "cross-spawn": "^7.0.5", "eventsource": "^3.0.2", "eventsource-parser": "^3.0.0", "express": "^5.0.1", "express-rate-limit": "^7.5.0", "jose": "^6.1.1", "json-schema-typed": "^8.0.2", "pkce-challenge": "^5.0.0", "raw-body": "^3.0.0", "zod": "^3.25 || ^4.0", "zod-to-json-schema": "^3.25.0" }, "peerDependencies": { "@cfworker/json-schema": "^4.1.1", "zod": "^3.25 || ^4.0" }, "optionalPeers": ["@cfworker/json-schema"] }, "sha512-yO28oVFFC7EBoiKdAn+VqRm+plcfv4v0xp6osG/VsCB0NlPZWi87ajbCZZ8f/RvOFLEu7//rSRmuZZ7lMoe3gQ=="],
27
27
 
28
- "@types/bun": ["@types/bun@1.3.5", "", { "dependencies": { "bun-types": "1.3.5" } }, "sha512-RnygCqNrd3srIPEWBd5LFeUYG7plCoH2Yw9WaZGyNmdTEei+gWaHqydbaIRkIkcbXwhBT94q78QljxN0Sk838w=="],
28
+ "@types/bun": ["@types/bun@1.3.6", "", { "dependencies": { "bun-types": "1.3.6" } }, "sha512-uWCv6FO/8LcpREhenN1d1b6fcspAB+cefwD7uti8C8VffIv0Um08TKMn98FynpTiU38+y2dUO55T11NgDt8VAA=="],
29
29
 
30
30
  "@types/js-yaml": ["@types/js-yaml@4.0.9", "", {}, "sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg=="],
31
31
 
@@ -43,7 +43,7 @@
43
43
 
44
44
  "body-parser": ["body-parser@2.2.1", "", { "dependencies": { "bytes": "^3.1.2", "content-type": "^1.0.5", "debug": "^4.4.3", "http-errors": "^2.0.0", "iconv-lite": "^0.7.0", "on-finished": "^2.4.1", "qs": "^6.14.0", "raw-body": "^3.0.1", "type-is": "^2.0.1" } }, "sha512-nfDwkulwiZYQIGwxdy0RUmowMhKcFVcYXUU7m4QlKYim1rUtg83xm2yjZ40QjDuc291AJjjeSc9b++AWHSgSHw=="],
45
45
 
46
- "bun-types": ["bun-types@1.3.5", "", { "dependencies": { "@types/node": "*" } }, "sha512-inmAYe2PFLs0SUbFOWSVD24sg1jFlMPxOjOSSCYqUgn4Hsc3rDc7dFvfVYjFPNHtov6kgUeulV4SxbuIV/stPw=="],
46
+ "bun-types": ["bun-types@1.3.6", "", { "dependencies": { "@types/node": "*" } }, "sha512-OlFwHcnNV99r//9v5IIOgQ9Uk37gZqrNMCcqEaExdkVq3Avwqok1bJFmvGMCkCE0FqzdY8VMOZpfpR3lwI+CsQ=="],
47
47
 
48
48
  "bytes": ["bytes@3.1.2", "", {}, "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg=="],
49
49
 
@@ -13064,7 +13064,7 @@ var {
13064
13064
  // package.json
13065
13065
  var package_default = {
13066
13066
  name: "postgresai",
13067
- version: "0.14.0-dev.81",
13067
+ version: "0.14.0-dev.83",
13068
13068
  description: "postgres_ai CLI",
13069
13069
  license: "Apache-2.0",
13070
13070
  private: false,
@@ -13098,7 +13098,7 @@ var package_default = {
13098
13098
  "start:node": "node ./dist/bin/postgres-ai.js --help",
13099
13099
  dev: "bun run embed-all && bun --watch ./bin/postgres-ai.ts",
13100
13100
  test: "bun run embed-all && bun test",
13101
- "test:fast": "bun run embed-all && bun test",
13101
+ "test:fast": "bun run embed-all && bun test --coverage=false",
13102
13102
  "test:coverage": "bun run embed-all && bun test --coverage && echo 'Coverage report: cli/coverage/lcov-report/index.html'",
13103
13103
  typecheck: "bun run embed-all && bunx tsc --noEmit"
13104
13104
  },
@@ -13109,12 +13109,12 @@ var package_default = {
13109
13109
  pg: "^8.16.3"
13110
13110
  },
13111
13111
  devDependencies: {
13112
- "@types/bun": "^1.1.14",
13112
+ "@types/bun": "^1.3.6",
13113
13113
  "@types/js-yaml": "^4.0.9",
13114
13114
  "@types/pg": "^8.15.6",
13115
13115
  ajv: "^8.17.1",
13116
13116
  "ajv-formats": "^3.0.1",
13117
- typescript: "^5.3.3"
13117
+ typescript: "^5.9.3"
13118
13118
  },
13119
13119
  publishConfig: {
13120
13120
  access: "public"
@@ -15889,7 +15889,7 @@ var Result = import_lib.default.Result;
15889
15889
  var TypeOverrides = import_lib.default.TypeOverrides;
15890
15890
  var defaults = import_lib.default.defaults;
15891
15891
  // package.json
15892
- var version = "0.14.0-dev.81";
15892
+ var version = "0.14.0-dev.83";
15893
15893
  var package_default2 = {
15894
15894
  name: "postgresai",
15895
15895
  version,
@@ -15926,7 +15926,7 @@ var package_default2 = {
15926
15926
  "start:node": "node ./dist/bin/postgres-ai.js --help",
15927
15927
  dev: "bun run embed-all && bun --watch ./bin/postgres-ai.ts",
15928
15928
  test: "bun run embed-all && bun test",
15929
- "test:fast": "bun run embed-all && bun test",
15929
+ "test:fast": "bun run embed-all && bun test --coverage=false",
15930
15930
  "test:coverage": "bun run embed-all && bun test --coverage && echo 'Coverage report: cli/coverage/lcov-report/index.html'",
15931
15931
  typecheck: "bun run embed-all && bunx tsc --noEmit"
15932
15932
  },
@@ -15937,12 +15937,12 @@ var package_default2 = {
15937
15937
  pg: "^8.16.3"
15938
15938
  },
15939
15939
  devDependencies: {
15940
- "@types/bun": "^1.1.14",
15940
+ "@types/bun": "^1.3.6",
15941
15941
  "@types/js-yaml": "^4.0.9",
15942
15942
  "@types/pg": "^8.15.6",
15943
15943
  ajv: "^8.17.1",
15944
15944
  "ajv-formats": "^3.0.1",
15945
- typescript: "^5.3.3"
15945
+ typescript: "^5.9.3"
15946
15946
  },
15947
15947
  publishConfig: {
15948
15948
  access: "public"
@@ -25378,6 +25378,9 @@ class SupabaseClient {
25378
25378
  }
25379
25379
  async function fetchPoolerDatabaseUrl(config2, username) {
25380
25380
  const url = `${SUPABASE_API_BASE}/v1/projects/${encodeURIComponent(config2.projectRef)}/config/database/pooler`;
25381
+ const suffix = `.${config2.projectRef}`;
25382
+ const effectiveUsername = username.endsWith(suffix) ? username : `${username}${suffix}`;
25383
+ const encodedUsername = encodeURIComponent(effectiveUsername);
25381
25384
  try {
25382
25385
  const response = await fetch(url, {
25383
25386
  method: "GET",
@@ -25392,13 +25395,13 @@ async function fetchPoolerDatabaseUrl(config2, username) {
25392
25395
  if (Array.isArray(data) && data.length > 0) {
25393
25396
  const pooler = data[0];
25394
25397
  if (pooler.db_host && pooler.db_port && pooler.db_name) {
25395
- return `postgresql://${username}@${pooler.db_host}:${pooler.db_port}/${pooler.db_name}`;
25398
+ return `postgresql://${encodedUsername}@${pooler.db_host}:${pooler.db_port}/${pooler.db_name}`;
25396
25399
  }
25397
25400
  if (typeof pooler.connection_string === "string") {
25398
25401
  try {
25399
25402
  const connUrl = new URL(pooler.connection_string);
25400
25403
  const portPart = connUrl.port ? `:${connUrl.port}` : "";
25401
- return `postgresql://${username}@${connUrl.hostname}${portPart}${connUrl.pathname}`;
25404
+ return `postgresql://${encodedUsername}@${connUrl.hostname}${portPart}${connUrl.pathname}`;
25402
25405
  } catch {
25403
25406
  return null;
25404
25407
  }
@@ -27103,7 +27106,7 @@ var CHECKUP_DICTIONARY_DATA = [
27103
27106
  ];
27104
27107
 
27105
27108
  // lib/checkup-dictionary.ts
27106
- var dictionaryByCode = new Map(CHECKUP_DICTIONARY_DATA.map((entry) => [entry.code, entry]));
27109
+ var dictionaryByCode = new Map(CHECKUP_DICTIONARY_DATA.map((entry) => [entry.code.toUpperCase(), entry]));
27107
27110
  function buildCheckInfoMap() {
27108
27111
  const result = {};
27109
27112
  for (const entry of CHECKUP_DICTIONARY_DATA) {
@@ -27973,7 +27976,7 @@ async function generateAllReports(client, nodeName = "node-01", onProgress) {
27973
27976
  }
27974
27977
 
27975
27978
  // lib/checkup-dictionary.ts
27976
- var dictionaryByCode2 = new Map(CHECKUP_DICTIONARY_DATA.map((entry) => [entry.code, entry]));
27979
+ var dictionaryByCode2 = new Map(CHECKUP_DICTIONARY_DATA.map((entry) => [entry.code.toUpperCase(), entry]));
27977
27980
  function getCheckupEntry(code) {
27978
27981
  return dictionaryByCode2.get(code.toUpperCase()) ?? null;
27979
27982
  }
@@ -29484,7 +29487,7 @@ program2.command("unprepare-db [conn]").description("remove monitoring setup: dr
29484
29487
  closeReadline();
29485
29488
  }
29486
29489
  });
29487
- program2.command("checkup [conn]").description("generate health check reports directly from PostgreSQL (express mode)").option("--check-id <id>", `specific check to run (see list below), or ALL`, "ALL").option("--node-name <name>", "node name for reports", "node-01").option("--output <path>", "output directory for JSON files").option("--[no-]upload", "upload JSON results to PostgresAI (default: enabled; requires API key)", undefined).option("--project <project>", "project name or ID for remote upload (used with --upload; defaults to config defaultProject; auto-generated on first run)").option("--json", "output JSON to stdout (implies --no-upload)").addHelpText("after", [
29490
+ program2.command("checkup [conn]").description("generate health check reports directly from PostgreSQL (express mode)").option("--check-id <id>", `specific check to run (see list below), or ALL`, "ALL").option("--node-name <name>", "node name for reports", "node-01").option("--output <path>", "output directory for JSON files").option("--upload", "upload JSON results to PostgresAI (requires API key)").option("--no-upload", "disable upload to PostgresAI").option("--project <project>", "project name or ID for remote upload (used with --upload; defaults to config defaultProject; auto-generated on first run)").option("--json", "output JSON to stdout").addHelpText("after", [
29488
29491
  "",
29489
29492
  "Available checks:",
29490
29493
  ...Object.entries(CHECK_INFO).map(([id, title]) => ` ${id}: ${title}`),
@@ -29504,7 +29507,7 @@ program2.command("checkup [conn]").description("generate health check reports di
29504
29507
  }
29505
29508
  const shouldPrintJson = !!opts.json;
29506
29509
  const uploadExplicitlyRequested = opts.upload === true;
29507
- const uploadExplicitlyDisabled = opts.upload === false || shouldPrintJson;
29510
+ const uploadExplicitlyDisabled = opts.upload === false;
29508
29511
  let shouldUpload = !uploadExplicitlyDisabled;
29509
29512
  const outputPath = prepareOutputDirectory(opts.output);
29510
29513
  if (outputPath === null) {
@@ -32,9 +32,10 @@ export interface CheckupDictionaryEntry {
32
32
  /**
33
33
  * Module-level cache for O(1) lookups by code.
34
34
  * Initialized at module load time from embedded data.
35
+ * Keys are normalized to uppercase for case-insensitive lookups.
35
36
  */
36
37
  const dictionaryByCode: Map<string, CheckupDictionaryEntry> = new Map(
37
- CHECKUP_DICTIONARY_DATA.map((entry) => [entry.code, entry])
38
+ CHECKUP_DICTIONARY_DATA.map((entry) => [entry.code.toUpperCase(), entry])
38
39
  );
39
40
 
40
41
  /**
@@ -49,7 +50,7 @@ export function getAllCheckupEntries(): CheckupDictionaryEntry[] {
49
50
  /**
50
51
  * Get a checkup dictionary entry by its code.
51
52
  *
52
- * @param code - The check code (e.g., "A001", "H002")
53
+ * @param code - The check code (e.g., "A001", "H002"). Lookup is case-insensitive.
53
54
  * @returns The dictionary entry or null if not found
54
55
  */
55
56
  export function getCheckupEntry(code: string): CheckupDictionaryEntry | null {
package/lib/mcp-server.ts CHANGED
@@ -447,7 +447,7 @@ export async function startMcpServer(rootOpts?: RootOptsLike, extra?: { debug?:
447
447
  });
448
448
 
449
449
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
450
- server.setRequestHandler(CallToolRequestSchema, async (req: any) => {
450
+ server.setRequestHandler(CallToolRequestSchema, async (req: any): Promise<any> => {
451
451
  return handleToolCall(req, rootOpts, extra);
452
452
  });
453
453
 
package/lib/supabase.ts CHANGED
@@ -337,9 +337,14 @@ export class SupabaseClient {
337
337
  * Fetch the database pooler connection string from Supabase Management API.
338
338
  * Returns a postgresql:// URL with the specified username but no password.
339
339
  *
340
+ * Note: The username will be automatically suffixed with `.<projectRef>` if not
341
+ * already present, as required by Supabase pooler connections.
342
+ *
340
343
  * @param config Supabase configuration with projectRef and accessToken
341
- * @param username Username to include in the URL (e.g., monitoring user)
342
- * @returns Database URL without password (e.g., "postgresql://user@host:port/postgres")
344
+ * @param username Username to include in the URL (e.g., monitoring user).
345
+ * Will be transformed to `<username>.<projectRef>` format.
346
+ * @returns Database URL without password (e.g., "postgresql://user.project@host:port/postgres"),
347
+ * or null if the API call fails or returns no pooler config.
343
348
  */
344
349
  export async function fetchPoolerDatabaseUrl(
345
350
  config: SupabaseConfig,
@@ -347,6 +352,14 @@ export async function fetchPoolerDatabaseUrl(
347
352
  ): Promise<string | null> {
348
353
  const url = `${SUPABASE_API_BASE}/v1/projects/${encodeURIComponent(config.projectRef)}/config/database/pooler`;
349
354
 
355
+ // For Supabase pooler connections, the username must include the project ref:
356
+ // <user>.<project_ref>
357
+ // Example:
358
+ // postgresql://postgres_ai_mon.xhaqmsvczjkkvkgdyast@aws-1-eu-west-1.pooler.supabase.com:6543/postgres
359
+ const suffix = `.${config.projectRef}`;
360
+ const effectiveUsername = username.endsWith(suffix) ? username : `${username}${suffix}`;
361
+ // URL-encode the username to handle special characters safely
362
+ const encodedUsername = encodeURIComponent(effectiveUsername);
350
363
  try {
351
364
  const response = await fetch(url, {
352
365
  method: "GET",
@@ -367,7 +380,7 @@ export async function fetchPoolerDatabaseUrl(
367
380
  const pooler = data[0];
368
381
  // Build URL from components if available
369
382
  if (pooler.db_host && pooler.db_port && pooler.db_name) {
370
- return `postgresql://${username}@${pooler.db_host}:${pooler.db_port}/${pooler.db_name}`;
383
+ return `postgresql://${encodedUsername}@${pooler.db_host}:${pooler.db_port}/${pooler.db_name}`;
371
384
  }
372
385
  // Fallback: try to extract from connection_string if present
373
386
  if (typeof pooler.connection_string === "string") {
@@ -375,7 +388,7 @@ export async function fetchPoolerDatabaseUrl(
375
388
  const connUrl = new URL(pooler.connection_string);
376
389
  // Use provided username; handle empty port for default ports (e.g., 5432)
377
390
  const portPart = connUrl.port ? `:${connUrl.port}` : "";
378
- return `postgresql://${username}@${connUrl.hostname}${portPart}${connUrl.pathname}`;
391
+ return `postgresql://${encodedUsername}@${connUrl.hostname}${portPart}${connUrl.pathname}`;
379
392
  } catch {
380
393
  return null;
381
394
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "postgresai",
3
- "version": "0.14.0-dev.81",
3
+ "version": "0.14.0-dev.83",
4
4
  "description": "postgres_ai CLI",
5
5
  "license": "Apache-2.0",
6
6
  "private": false,
@@ -34,7 +34,7 @@
34
34
  "start:node": "node ./dist/bin/postgres-ai.js --help",
35
35
  "dev": "bun run embed-all && bun --watch ./bin/postgres-ai.ts",
36
36
  "test": "bun run embed-all && bun test",
37
- "test:fast": "bun run embed-all && bun test",
37
+ "test:fast": "bun run embed-all && bun test --coverage=false",
38
38
  "test:coverage": "bun run embed-all && bun test --coverage && echo 'Coverage report: cli/coverage/lcov-report/index.html'",
39
39
  "typecheck": "bun run embed-all && bunx tsc --noEmit"
40
40
  },
@@ -45,12 +45,12 @@
45
45
  "pg": "^8.16.3"
46
46
  },
47
47
  "devDependencies": {
48
- "@types/bun": "^1.1.14",
48
+ "@types/bun": "^1.3.6",
49
49
  "@types/js-yaml": "^4.0.9",
50
50
  "@types/pg": "^8.15.6",
51
51
  "ajv": "^8.17.1",
52
52
  "ajv-formats": "^3.0.1",
53
- "typescript": "^5.3.3"
53
+ "typescript": "^5.9.3"
54
54
  },
55
55
  "publishConfig": {
56
56
  "access": "public"
@@ -1,5 +1,6 @@
1
1
  import { describe, test, expect } from "bun:test";
2
2
  import { resolve } from "path";
3
+ import type { Client } from "pg";
3
4
 
4
5
  // Import from source directly since we're using Bun
5
6
  import * as checkup from "../lib/checkup";
@@ -947,6 +948,66 @@ describe("CLI tests", () => {
947
948
  expect(r.stdout).toMatch(/available checks/i);
948
949
  expect(r.stdout).toMatch(/A002/);
949
950
  });
951
+
952
+ test("checkup --help shows --upload and --no-upload options", () => {
953
+ const r = runCli(["checkup", "--help"]);
954
+ expect(r.status).toBe(0);
955
+ expect(r.stdout).toMatch(/--upload/);
956
+ expect(r.stdout).toMatch(/--no-upload/);
957
+ });
958
+
959
+ test("checkup --no-upload is recognized as valid option", () => {
960
+ // Should not produce "unknown option" error for --no-upload
961
+ const r = runCli(["checkup", "postgresql://test:test@localhost:5432/test", "--no-upload"]);
962
+ // Connection will fail, but option parsing should succeed
963
+ expect(r.stderr).not.toMatch(/unknown option/i);
964
+ expect(r.stderr).not.toMatch(/did you mean/i);
965
+ });
966
+
967
+ test("checkup --upload is recognized as valid option", () => {
968
+ // Should not produce "unknown option" error for --upload
969
+ const r = runCli(["checkup", "postgresql://test:test@localhost:5432/test", "--upload"]);
970
+ // Connection will fail, but option parsing should succeed
971
+ expect(r.stderr).not.toMatch(/unknown option/i);
972
+ expect(r.stderr).not.toMatch(/did you mean/i);
973
+ });
974
+
975
+ test("checkup --json does not imply --no-upload (decoupled behavior)", () => {
976
+ // Use empty config dir to ensure no API key is configured
977
+ const env = { XDG_CONFIG_HOME: "/tmp/postgresai-test-empty-config" };
978
+ // --json alone should NOT disable upload - when --upload is explicitly requested
979
+ // with --json, it should require API key (proving upload is not disabled)
980
+ const r = runCli(["checkup", "postgresql://test:test@localhost:5432/test", "--json", "--upload"], env);
981
+ // Should fail with "API key is required" because upload is enabled
982
+ expect(r.stderr).toMatch(/API key is required/i);
983
+ expect(r.stderr).not.toMatch(/unknown option/i);
984
+ });
985
+
986
+ test("checkup --json --no-upload explicitly disables upload", () => {
987
+ // Use empty config dir to ensure no API key is configured
988
+ const env = { XDG_CONFIG_HOME: "/tmp/postgresai-test-empty-config" };
989
+ // --json with --no-upload should disable upload (no API key error)
990
+ const r = runCli(["checkup", "postgresql://test:test@localhost:5432/test", "--json", "--no-upload"], env);
991
+ // Should NOT show "API key is required" because upload is disabled
992
+ expect(r.stderr).not.toMatch(/API key is required/i);
993
+ expect(r.stderr).not.toMatch(/unknown option/i);
994
+ });
995
+
996
+ test("checkup --upload requires API key", () => {
997
+ // Use empty config dir to ensure no API key is configured
998
+ const env = { XDG_CONFIG_HOME: "/tmp/postgresai-test-empty-config" };
999
+ // --upload explicitly requests upload, should fail without API key
1000
+ const r = runCli(["checkup", "postgresql://test:test@localhost:5432/test", "--upload"], env);
1001
+ expect(r.stderr).toMatch(/API key is required/i);
1002
+ });
1003
+
1004
+ test("checkup --no-upload does not require API key", () => {
1005
+ // Use empty config dir to ensure no API key is configured
1006
+ const env = { XDG_CONFIG_HOME: "/tmp/postgresai-test-empty-config" };
1007
+ // --no-upload disables upload, should not require API key
1008
+ const r = runCli(["checkup", "postgresql://test:test@localhost:5432/test", "--no-upload"], env);
1009
+ expect(r.stderr).not.toMatch(/API key is required/i);
1010
+ });
950
1011
  });
951
1012
 
952
1013
  // Tests for checkup-api module
package/test/init.test.ts CHANGED
@@ -68,9 +68,9 @@ describe("init module", () => {
68
68
  expect(plan.database).toBe("mydb");
69
69
  const roleStep = plan.steps.find((s: { name: string }) => s.name === "01.role");
70
70
  expect(roleStep).toBeTruthy();
71
- expect(roleStep.sql).toMatch(/do\s+\$\$/i);
72
- expect(roleStep.sql).toMatch(/create\s+user/i);
73
- expect(roleStep.sql).toMatch(/alter\s+user/i);
71
+ expect(roleStep!.sql).toMatch(/do\s+\$\$/i);
72
+ expect(roleStep!.sql).toMatch(/create\s+user/i);
73
+ expect(roleStep!.sql).toMatch(/alter\s+user/i);
74
74
  expect(plan.steps.some((s: { optional?: boolean }) => s.optional)).toBe(false);
75
75
  });
76
76
 
@@ -86,12 +86,12 @@ describe("init module", () => {
86
86
 
87
87
  const roleStep = plan.steps.find((s: { name: string }) => s.name === "01.role");
88
88
  expect(roleStep).toBeTruthy();
89
- expect(roleStep.sql).toMatch(/create\s+user\s+"user ""with"" quotes ✓"/i);
90
- expect(roleStep.sql).toMatch(/alter\s+user\s+"user ""with"" quotes ✓"/i);
89
+ expect(roleStep!.sql).toMatch(/create\s+user\s+"user ""with"" quotes ✓"/i);
90
+ expect(roleStep!.sql).toMatch(/alter\s+user\s+"user ""with"" quotes ✓"/i);
91
91
 
92
92
  const permStep = plan.steps.find((s: { name: string }) => s.name === "03.permissions");
93
93
  expect(permStep).toBeTruthy();
94
- expect(permStep.sql).toMatch(/grant connect on database "db name ""with"" quotes ✓" to "user ""with"" quotes ✓"/i);
94
+ expect(permStep!.sql).toMatch(/grant connect on database "db name ""with"" quotes ✓" to "user ""with"" quotes ✓"/i);
95
95
  });
96
96
 
97
97
  test("buildInitPlan keeps backslashes in passwords (no unintended escaping)", async () => {
@@ -104,7 +104,7 @@ describe("init module", () => {
104
104
  });
105
105
  const roleStep = plan.steps.find((s: { name: string }) => s.name === "01.role");
106
106
  expect(roleStep).toBeTruthy();
107
- expect(roleStep.sql).toContain(`password '${pw}'`);
107
+ expect(roleStep!.sql).toContain(`password '${pw}'`);
108
108
  });
109
109
 
110
110
  test("buildInitPlan rejects identifiers with null bytes", async () => {
@@ -138,8 +138,8 @@ describe("init module", () => {
138
138
  });
139
139
  const step = plan.steps.find((s: { name: string }) => s.name === "01.role");
140
140
  expect(step).toBeTruthy();
141
- expect(step.sql).toMatch(/password 'pa''ss'/);
142
- expect(step.params).toBeUndefined();
141
+ expect(step!.sql).toMatch(/password 'pa''ss'/);
142
+ expect(step!.params).toBeUndefined();
143
143
  });
144
144
 
145
145
  test("buildInitPlan includes optional steps when enabled", async () => {
@@ -420,7 +420,7 @@ describe("init module", () => {
420
420
  });
421
421
  const step = plan.steps.find((s: { name: string }) => s.name === "01.role");
422
422
  expect(step).toBeTruthy();
423
- const redacted = init.redactPasswordsInSql(step.sql);
423
+ const redacted = init.redactPasswordsInSql(step!.sql);
424
424
  expect(redacted).toMatch(/password '<redacted>'/i);
425
425
  });
426
426
 
@@ -60,7 +60,7 @@ describe("createIssue", () => {
60
60
  headers: { "Content-Type": "application/json" },
61
61
  })
62
62
  )
63
- );
63
+ ) as unknown as typeof fetch;
64
64
 
65
65
  const result = await createIssue({
66
66
  apiKey: "test-key",
@@ -93,7 +93,7 @@ describe("createIssue", () => {
93
93
  headers: { "Content-Type": "application/json" },
94
94
  })
95
95
  );
96
- });
96
+ }) as unknown as typeof fetch;
97
97
 
98
98
  const result = await createIssue({
99
99
  apiKey: "test-key",
@@ -127,7 +127,7 @@ describe("createIssue", () => {
127
127
  headers: { "Content-Type": "application/json" },
128
128
  })
129
129
  )
130
- );
130
+ ) as unknown as typeof fetch;
131
131
 
132
132
  await expect(
133
133
  createIssue({
@@ -194,7 +194,7 @@ describe("updateIssue", () => {
194
194
  headers: { "Content-Type": "application/json" },
195
195
  })
196
196
  )
197
- );
197
+ ) as unknown as typeof fetch;
198
198
 
199
199
  const result = await updateIssue({
200
200
  apiKey: "test-key",
@@ -223,7 +223,7 @@ describe("updateIssue", () => {
223
223
  headers: { "Content-Type": "application/json" },
224
224
  })
225
225
  )
226
- );
226
+ ) as unknown as typeof fetch;
227
227
 
228
228
  const result = await updateIssue({
229
229
  apiKey: "test-key",
@@ -252,7 +252,7 @@ describe("updateIssue", () => {
252
252
  headers: { "Content-Type": "application/json" },
253
253
  })
254
254
  )
255
- );
255
+ ) as unknown as typeof fetch;
256
256
 
257
257
  const result = await updateIssue({
258
258
  apiKey: "test-key",
@@ -281,7 +281,7 @@ describe("updateIssue", () => {
281
281
  headers: { "Content-Type": "application/json" },
282
282
  })
283
283
  )
284
- );
284
+ ) as unknown as typeof fetch;
285
285
 
286
286
  const result = await updateIssue({
287
287
  apiKey: "test-key",
@@ -313,7 +313,7 @@ describe("updateIssue", () => {
313
313
  headers: { "Content-Type": "application/json" },
314
314
  })
315
315
  );
316
- });
316
+ }) as unknown as typeof fetch;
317
317
 
318
318
  await updateIssue({
319
319
  apiKey: "test-key",
@@ -345,7 +345,7 @@ describe("updateIssue", () => {
345
345
  headers: { "Content-Type": "application/json" },
346
346
  })
347
347
  )
348
- );
348
+ ) as unknown as typeof fetch;
349
349
 
350
350
  await expect(
351
351
  updateIssue({
@@ -414,7 +414,7 @@ describe("updateIssueComment", () => {
414
414
  headers: { "Content-Type": "application/json" },
415
415
  })
416
416
  );
417
- });
417
+ }) as unknown as typeof fetch;
418
418
 
419
419
  const result = await updateIssueComment({
420
420
  apiKey: "test-key",
@@ -442,7 +442,7 @@ describe("updateIssueComment", () => {
442
442
  headers: { "Content-Type": "application/json" },
443
443
  })
444
444
  )
445
- );
445
+ ) as unknown as typeof fetch;
446
446
 
447
447
  await expect(
448
448
  updateIssueComment({
@@ -96,21 +96,21 @@ describe("MCP Server", () => {
96
96
  });
97
97
 
98
98
  // Mock fetch to verify API key is used
99
- let capturedHeaders: HeadersInit | undefined;
99
+ let capturedHeaders: Record<string, string> | undefined;
100
100
  globalThis.fetch = mock((url: string, options?: RequestInit) => {
101
- capturedHeaders = options?.headers;
101
+ capturedHeaders = options?.headers as Record<string, string> | undefined;
102
102
  return Promise.resolve(
103
103
  new Response(JSON.stringify([]), {
104
104
  status: 200,
105
105
  headers: { "Content-Type": "application/json" },
106
106
  })
107
107
  );
108
- });
108
+ }) as unknown as typeof fetch;
109
109
 
110
110
  await handleToolCall(createRequest("list_issues"), { apiKey: "test-api-key" });
111
111
 
112
112
  expect(capturedHeaders).toBeDefined();
113
- expect((capturedHeaders as Record<string, string>)["access-token"]).toBe("test-api-key");
113
+ expect(capturedHeaders!["access-token"]).toBe("test-api-key");
114
114
 
115
115
  readConfigSpy.mockRestore();
116
116
  });
@@ -123,21 +123,21 @@ describe("MCP Server", () => {
123
123
  defaultProject: null,
124
124
  });
125
125
 
126
- let capturedHeaders: HeadersInit | undefined;
126
+ let capturedHeaders: Record<string, string> | undefined;
127
127
  globalThis.fetch = mock((url: string, options?: RequestInit) => {
128
- capturedHeaders = options?.headers;
128
+ capturedHeaders = options?.headers as Record<string, string> | undefined;
129
129
  return Promise.resolve(
130
130
  new Response(JSON.stringify([]), {
131
131
  status: 200,
132
132
  headers: { "Content-Type": "application/json" },
133
133
  })
134
134
  );
135
- });
135
+ }) as unknown as typeof fetch;
136
136
 
137
137
  await handleToolCall(createRequest("list_issues"));
138
138
 
139
139
  expect(capturedHeaders).toBeDefined();
140
- expect((capturedHeaders as Record<string, string>)["access-token"]).toBe("config-api-key");
140
+ expect(capturedHeaders!["access-token"]).toBe("config-api-key");
141
141
 
142
142
  readConfigSpy.mockRestore();
143
143
  });
@@ -152,21 +152,21 @@ describe("MCP Server", () => {
152
152
  defaultProject: null,
153
153
  });
154
154
 
155
- let capturedHeaders: HeadersInit | undefined;
155
+ let capturedHeaders: Record<string, string> | undefined;
156
156
  globalThis.fetch = mock((url: string, options?: RequestInit) => {
157
- capturedHeaders = options?.headers;
157
+ capturedHeaders = options?.headers as Record<string, string> | undefined;
158
158
  return Promise.resolve(
159
159
  new Response(JSON.stringify([]), {
160
160
  status: 200,
161
161
  headers: { "Content-Type": "application/json" },
162
162
  })
163
163
  );
164
- });
164
+ }) as unknown as typeof fetch;
165
165
 
166
166
  await handleToolCall(createRequest("list_issues"));
167
167
 
168
168
  expect(capturedHeaders).toBeDefined();
169
- expect((capturedHeaders as Record<string, string>)["access-token"]).toBe("env-api-key");
169
+ expect(capturedHeaders!["access-token"]).toBe("env-api-key");
170
170
 
171
171
  readConfigSpy.mockRestore();
172
172
  });
@@ -193,7 +193,7 @@ describe("MCP Server", () => {
193
193
  headers: { "Content-Type": "application/json" },
194
194
  })
195
195
  )
196
- );
196
+ ) as unknown as typeof fetch;
197
197
 
198
198
  const response = await handleToolCall(createRequest("list_issues"));
199
199
 
@@ -220,7 +220,7 @@ describe("MCP Server", () => {
220
220
  headers: { "Content-Type": "application/json" },
221
221
  })
222
222
  )
223
- );
223
+ ) as unknown as typeof fetch;
224
224
 
225
225
  const response = await handleToolCall(createRequest("list_issues"));
226
226
 
@@ -280,7 +280,7 @@ describe("MCP Server", () => {
280
280
  headers: { "Content-Type": "application/json" },
281
281
  })
282
282
  )
283
- );
283
+ ) as unknown as typeof fetch;
284
284
 
285
285
  const response = await handleToolCall(createRequest("view_issue", { issue_id: "nonexistent-id" }));
286
286
 
@@ -319,7 +319,7 @@ describe("MCP Server", () => {
319
319
  headers: { "Content-Type": "application/json" },
320
320
  })
321
321
  );
322
- });
322
+ }) as unknown as typeof fetch;
323
323
 
324
324
  const response = await handleToolCall(createRequest("view_issue", { issue_id: "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa" }));
325
325
 
@@ -386,7 +386,7 @@ describe("MCP Server", () => {
386
386
  headers: { "Content-Type": "application/json" },
387
387
  })
388
388
  );
389
- });
389
+ }) as unknown as typeof fetch;
390
390
 
391
391
  await handleToolCall(
392
392
  createRequest("post_issue_comment", {
@@ -419,7 +419,7 @@ describe("MCP Server", () => {
419
419
  headers: { "Content-Type": "application/json" },
420
420
  })
421
421
  );
422
- });
422
+ }) as unknown as typeof fetch;
423
423
 
424
424
  const response = await handleToolCall(
425
425
  createRequest("post_issue_comment", {
@@ -504,7 +504,7 @@ describe("MCP Server", () => {
504
504
  headers: { "Content-Type": "application/json" },
505
505
  })
506
506
  );
507
- });
507
+ }) as unknown as typeof fetch;
508
508
 
509
509
  await handleToolCall(createRequest("create_issue", { title: "Test Issue" }));
510
510
 
@@ -532,7 +532,7 @@ describe("MCP Server", () => {
532
532
  headers: { "Content-Type": "application/json" },
533
533
  })
534
534
  );
535
- });
535
+ }) as unknown as typeof fetch;
536
536
 
537
537
  await handleToolCall(
538
538
  createRequest("create_issue", {
@@ -566,7 +566,7 @@ describe("MCP Server", () => {
566
566
  headers: { "Content-Type": "application/json" },
567
567
  })
568
568
  );
569
- });
569
+ }) as unknown as typeof fetch;
570
570
 
571
571
  const response = await handleToolCall(
572
572
  createRequest("create_issue", {
@@ -679,7 +679,7 @@ describe("MCP Server", () => {
679
679
  headers: { "Content-Type": "application/json" },
680
680
  })
681
681
  );
682
- });
682
+ }) as unknown as typeof fetch;
683
683
 
684
684
  await handleToolCall(
685
685
  createRequest("update_issue", {
@@ -712,7 +712,7 @@ describe("MCP Server", () => {
712
712
  headers: { "Content-Type": "application/json" },
713
713
  })
714
714
  )
715
- );
715
+ ) as unknown as typeof fetch;
716
716
 
717
717
  const response = await handleToolCall(
718
718
  createRequest("update_issue", { issue_id: "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", title: "New Title" })
@@ -740,7 +740,7 @@ describe("MCP Server", () => {
740
740
  headers: { "Content-Type": "application/json" },
741
741
  })
742
742
  );
743
- });
743
+ }) as unknown as typeof fetch;
744
744
 
745
745
  const response = await handleToolCall(
746
746
  createRequest("update_issue", { issue_id: "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", status: 1 })
@@ -771,7 +771,7 @@ describe("MCP Server", () => {
771
771
  headers: { "Content-Type": "application/json" },
772
772
  })
773
773
  );
774
- });
774
+ }) as unknown as typeof fetch;
775
775
 
776
776
  const response = await handleToolCall(
777
777
  createRequest("update_issue", { issue_id: "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", labels: ["new-label"] })
@@ -802,7 +802,7 @@ describe("MCP Server", () => {
802
802
  headers: { "Content-Type": "application/json" },
803
803
  })
804
804
  );
805
- });
805
+ }) as unknown as typeof fetch;
806
806
 
807
807
  const response = await handleToolCall(
808
808
  createRequest("update_issue", { issue_id: "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", status: 0 })
@@ -871,7 +871,7 @@ describe("MCP Server", () => {
871
871
  headers: { "Content-Type": "application/json" },
872
872
  })
873
873
  );
874
- });
874
+ }) as unknown as typeof fetch;
875
875
 
876
876
  await handleToolCall(
877
877
  createRequest("update_issue_comment", {
@@ -902,7 +902,7 @@ describe("MCP Server", () => {
902
902
  headers: { "Content-Type": "application/json" },
903
903
  })
904
904
  )
905
- );
905
+ ) as unknown as typeof fetch;
906
906
 
907
907
  const response = await handleToolCall(
908
908
  createRequest("update_issue_comment", {
@@ -999,7 +999,7 @@ describe("MCP Server", () => {
999
999
  headers: { "Content-Type": "application/json" },
1000
1000
  })
1001
1001
  )
1002
- );
1002
+ ) as unknown as typeof fetch;
1003
1003
 
1004
1004
  const response = await handleToolCall(createRequest("view_action_item", { action_item_id: "00000000-0000-0000-0000-000000000000" }));
1005
1005
 
@@ -1036,7 +1036,7 @@ describe("MCP Server", () => {
1036
1036
  headers: { "Content-Type": "application/json" },
1037
1037
  })
1038
1038
  )
1039
- );
1039
+ ) as unknown as typeof fetch;
1040
1040
 
1041
1041
  const response = await handleToolCall(createRequest("view_action_item", { action_item_id: "11111111-1111-1111-1111-111111111111" }));
1042
1042
 
@@ -1072,7 +1072,7 @@ describe("MCP Server", () => {
1072
1072
  headers: { "Content-Type": "application/json" },
1073
1073
  })
1074
1074
  );
1075
- });
1075
+ }) as unknown as typeof fetch;
1076
1076
 
1077
1077
  const response = await handleToolCall(createRequest("view_action_item", { action_item_ids: ["11111111-1111-1111-1111-111111111111", "22222222-2222-2222-2222-222222222222"] }));
1078
1078
 
@@ -1141,7 +1141,7 @@ describe("MCP Server", () => {
1141
1141
  headers: { "Content-Type": "application/json" },
1142
1142
  })
1143
1143
  )
1144
- );
1144
+ ) as unknown as typeof fetch;
1145
1145
 
1146
1146
  const response = await handleToolCall(createRequest("list_action_items", { issue_id: "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa" }));
1147
1147
 
@@ -1208,7 +1208,7 @@ describe("MCP Server", () => {
1208
1208
  headers: { "Content-Type": "application/json" },
1209
1209
  })
1210
1210
  );
1211
- });
1211
+ }) as unknown as typeof fetch;
1212
1212
 
1213
1213
  const response = await handleToolCall(
1214
1214
  createRequest("create_action_item", {
@@ -1243,7 +1243,7 @@ describe("MCP Server", () => {
1243
1243
  headers: { "Content-Type": "application/json" },
1244
1244
  })
1245
1245
  );
1246
- });
1246
+ }) as unknown as typeof fetch;
1247
1247
 
1248
1248
  const response = await handleToolCall(
1249
1249
  createRequest("create_action_item", {
@@ -1284,7 +1284,7 @@ describe("MCP Server", () => {
1284
1284
  headers: { "Content-Type": "application/json" },
1285
1285
  })
1286
1286
  );
1287
- });
1287
+ }) as unknown as typeof fetch;
1288
1288
 
1289
1289
  await handleToolCall(
1290
1290
  createRequest("create_action_item", {
@@ -1375,7 +1375,7 @@ describe("MCP Server", () => {
1375
1375
  headers: { "Content-Type": "application/json" },
1376
1376
  })
1377
1377
  );
1378
- });
1378
+ }) as unknown as typeof fetch;
1379
1379
 
1380
1380
  const response = await handleToolCall(
1381
1381
  createRequest("update_action_item", { action_item_id: "bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb", title: "New Title" })
@@ -1407,7 +1407,7 @@ describe("MCP Server", () => {
1407
1407
  headers: { "Content-Type": "application/json" },
1408
1408
  })
1409
1409
  );
1410
- });
1410
+ }) as unknown as typeof fetch;
1411
1411
 
1412
1412
  const response = await handleToolCall(
1413
1413
  createRequest("update_action_item", { action_item_id: "bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb", is_done: true })
@@ -1438,7 +1438,7 @@ describe("MCP Server", () => {
1438
1438
  headers: { "Content-Type": "application/json" },
1439
1439
  })
1440
1440
  );
1441
- });
1441
+ }) as unknown as typeof fetch;
1442
1442
 
1443
1443
  const response = await handleToolCall(
1444
1444
  createRequest("update_action_item", {
@@ -1492,7 +1492,7 @@ describe("MCP Server", () => {
1492
1492
  headers: { "Content-Type": "application/json" },
1493
1493
  })
1494
1494
  )
1495
- );
1495
+ ) as unknown as typeof fetch;
1496
1496
 
1497
1497
  const response = await handleToolCall(
1498
1498
  createRequest("create_issue", { title: "Test Issue" })
@@ -1512,7 +1512,7 @@ describe("MCP Server", () => {
1512
1512
  defaultProject: null,
1513
1513
  });
1514
1514
 
1515
- globalThis.fetch = mock(() => Promise.reject(new Error("Network error")));
1515
+ globalThis.fetch = mock(() => Promise.reject(new Error("Network error"))) as unknown as typeof fetch;
1516
1516
 
1517
1517
  const response = await handleToolCall(
1518
1518
  createRequest("create_issue", { title: "Test Issue" })
@@ -2,6 +2,7 @@ import { describe, expect, test, beforeEach, afterEach, mock } from "bun:test";
2
2
  import {
3
3
  resolveSupabaseConfig,
4
4
  extractProjectRefFromUrl,
5
+ fetchPoolerDatabaseUrl,
5
6
  SupabaseClient,
6
7
  applyInitPlanViaSupabase,
7
8
  verifyInitSetupViaSupabase,
@@ -137,6 +138,146 @@ describe("Supabase module", () => {
137
138
  });
138
139
  });
139
140
 
141
+ describe("fetchPoolerDatabaseUrl", () => {
142
+ const originalFetch = globalThis.fetch;
143
+
144
+ afterEach(() => {
145
+ globalThis.fetch = originalFetch;
146
+ });
147
+
148
+ test("returns pooler db url with username including project ref (db_host/db_port/db_name response)", async () => {
149
+ globalThis.fetch = mock(() =>
150
+ Promise.resolve(
151
+ new Response(
152
+ JSON.stringify([
153
+ {
154
+ db_host: "aws-1-eu-west-1.pooler.supabase.com",
155
+ db_port: 6543,
156
+ db_name: "postgres",
157
+ },
158
+ ]),
159
+ { status: 200 }
160
+ )
161
+ )
162
+ ) as unknown as typeof fetch;
163
+
164
+ const url = await fetchPoolerDatabaseUrl(
165
+ { projectRef: "xhaqmsvczjkkvkgdyast", accessToken: "token" },
166
+ "postgres_ai_mon"
167
+ );
168
+ expect(url).toBe(
169
+ "postgresql://postgres_ai_mon.xhaqmsvczjkkvkgdyast@aws-1-eu-west-1.pooler.supabase.com:6543/postgres"
170
+ );
171
+ });
172
+
173
+ test("does not double-append project ref if username already has it", async () => {
174
+ globalThis.fetch = mock(() =>
175
+ Promise.resolve(
176
+ new Response(
177
+ JSON.stringify([
178
+ {
179
+ db_host: "aws-1-eu-west-1.pooler.supabase.com",
180
+ db_port: 6543,
181
+ db_name: "postgres",
182
+ },
183
+ ]),
184
+ { status: 200 }
185
+ )
186
+ )
187
+ ) as unknown as typeof fetch;
188
+
189
+ const url = await fetchPoolerDatabaseUrl(
190
+ { projectRef: "xhaqmsvczjkkvkgdyast", accessToken: "token" },
191
+ "postgres_ai_mon.xhaqmsvczjkkvkgdyast"
192
+ );
193
+ expect(url).toBe(
194
+ "postgresql://postgres_ai_mon.xhaqmsvczjkkvkgdyast@aws-1-eu-west-1.pooler.supabase.com:6543/postgres"
195
+ );
196
+ });
197
+
198
+ test("returns pooler db url via connection_string fallback path", async () => {
199
+ globalThis.fetch = mock(() =>
200
+ Promise.resolve(
201
+ new Response(
202
+ JSON.stringify([
203
+ {
204
+ // No db_host/db_port/db_name - uses connection_string fallback
205
+ connection_string:
206
+ "postgresql://ignored@aws-1-eu-west-1.pooler.supabase.com:6543/postgres",
207
+ },
208
+ ]),
209
+ { status: 200 }
210
+ )
211
+ )
212
+ ) as unknown as typeof fetch;
213
+
214
+ const url = await fetchPoolerDatabaseUrl(
215
+ { projectRef: "xhaqmsvczjkkvkgdyast", accessToken: "token" },
216
+ "postgres_ai_mon"
217
+ );
218
+ expect(url).toBe(
219
+ "postgresql://postgres_ai_mon.xhaqmsvczjkkvkgdyast@aws-1-eu-west-1.pooler.supabase.com:6543/postgres"
220
+ );
221
+ });
222
+
223
+ test("returns null for invalid connection_string URL", async () => {
224
+ globalThis.fetch = mock(() =>
225
+ Promise.resolve(
226
+ new Response(
227
+ JSON.stringify([
228
+ {
229
+ connection_string: "not-a-valid-url",
230
+ },
231
+ ]),
232
+ { status: 200 }
233
+ )
234
+ )
235
+ ) as unknown as typeof fetch;
236
+
237
+ const url = await fetchPoolerDatabaseUrl(
238
+ { projectRef: "xhaqmsvczjkkvkgdyast", accessToken: "token" },
239
+ "postgres_ai_mon"
240
+ );
241
+ expect(url).toBeNull();
242
+ });
243
+
244
+ test("returns null for empty API response", async () => {
245
+ globalThis.fetch = mock(() =>
246
+ Promise.resolve(new Response(JSON.stringify([]), { status: 200 }))
247
+ ) as unknown as typeof fetch;
248
+
249
+ const url = await fetchPoolerDatabaseUrl(
250
+ { projectRef: "xhaqmsvczjkkvkgdyast", accessToken: "token" },
251
+ "postgres_ai_mon"
252
+ );
253
+ expect(url).toBeNull();
254
+ });
255
+
256
+ test("returns null for API error response", async () => {
257
+ globalThis.fetch = mock(() =>
258
+ Promise.resolve(new Response("Unauthorized", { status: 401 }))
259
+ ) as unknown as typeof fetch;
260
+
261
+ const url = await fetchPoolerDatabaseUrl(
262
+ { projectRef: "xhaqmsvczjkkvkgdyast", accessToken: "token" },
263
+ "postgres_ai_mon"
264
+ );
265
+ expect(url).toBeNull();
266
+ });
267
+
268
+ test("returns null when fetch throws network error", async () => {
269
+ globalThis.fetch = mock(() =>
270
+ Promise.reject(new Error("Network error"))
271
+ ) as unknown as typeof fetch;
272
+
273
+ const url = await fetchPoolerDatabaseUrl(
274
+ { projectRef: "xhaqmsvczjkkvkgdyast", accessToken: "token" },
275
+ "postgres_ai_mon"
276
+ );
277
+ expect(url).toBeNull();
278
+ });
279
+ });
280
+
140
281
  describe("SupabaseClient", () => {
141
282
  test("throws error when project ref is empty", () => {
142
283
  expect(() => new SupabaseClient({ projectRef: "", accessToken: "token" })).toThrow(