postgresai 0.14.0-dev.8 → 0.14.0-dev.81

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (96) hide show
  1. package/README.md +161 -61
  2. package/bin/postgres-ai.ts +2596 -428
  3. package/bun.lock +258 -0
  4. package/bunfig.toml +20 -0
  5. package/dist/bin/postgres-ai.js +31277 -1575
  6. package/dist/sql/01.role.sql +16 -0
  7. package/dist/sql/02.extensions.sql +8 -0
  8. package/dist/sql/03.permissions.sql +38 -0
  9. package/dist/sql/04.optional_rds.sql +6 -0
  10. package/dist/sql/05.optional_self_managed.sql +8 -0
  11. package/dist/sql/06.helpers.sql +439 -0
  12. package/dist/sql/sql/01.role.sql +16 -0
  13. package/dist/sql/sql/02.extensions.sql +8 -0
  14. package/dist/sql/sql/03.permissions.sql +38 -0
  15. package/dist/sql/sql/04.optional_rds.sql +6 -0
  16. package/dist/sql/sql/05.optional_self_managed.sql +8 -0
  17. package/dist/sql/sql/06.helpers.sql +439 -0
  18. package/dist/sql/sql/uninit/01.helpers.sql +5 -0
  19. package/dist/sql/sql/uninit/02.permissions.sql +30 -0
  20. package/dist/sql/sql/uninit/03.role.sql +27 -0
  21. package/dist/sql/uninit/01.helpers.sql +5 -0
  22. package/dist/sql/uninit/02.permissions.sql +30 -0
  23. package/dist/sql/uninit/03.role.sql +27 -0
  24. package/lib/auth-server.ts +124 -106
  25. package/lib/checkup-api.ts +386 -0
  26. package/lib/checkup-dictionary.ts +113 -0
  27. package/lib/checkup.ts +1512 -0
  28. package/lib/config.ts +6 -3
  29. package/lib/init.ts +655 -189
  30. package/lib/issues.ts +848 -193
  31. package/lib/mcp-server.ts +391 -91
  32. package/lib/metrics-loader.ts +127 -0
  33. package/lib/supabase.ts +824 -0
  34. package/lib/util.ts +61 -0
  35. package/package.json +22 -10
  36. package/packages/postgres-ai/README.md +26 -0
  37. package/packages/postgres-ai/bin/postgres-ai.js +27 -0
  38. package/packages/postgres-ai/package.json +27 -0
  39. package/scripts/embed-checkup-dictionary.ts +106 -0
  40. package/scripts/embed-metrics.ts +154 -0
  41. package/sql/01.role.sql +16 -0
  42. package/sql/02.extensions.sql +8 -0
  43. package/sql/03.permissions.sql +38 -0
  44. package/sql/04.optional_rds.sql +6 -0
  45. package/sql/05.optional_self_managed.sql +8 -0
  46. package/sql/06.helpers.sql +439 -0
  47. package/sql/uninit/01.helpers.sql +5 -0
  48. package/sql/uninit/02.permissions.sql +30 -0
  49. package/sql/uninit/03.role.sql +27 -0
  50. package/test/auth.test.ts +258 -0
  51. package/test/checkup.integration.test.ts +321 -0
  52. package/test/checkup.test.ts +1116 -0
  53. package/test/config-consistency.test.ts +36 -0
  54. package/test/init.integration.test.ts +508 -0
  55. package/test/init.test.ts +916 -0
  56. package/test/issues.cli.test.ts +538 -0
  57. package/test/issues.test.ts +456 -0
  58. package/test/mcp-server.test.ts +1527 -0
  59. package/test/schema-validation.test.ts +81 -0
  60. package/test/supabase.test.ts +568 -0
  61. package/test/test-utils.ts +128 -0
  62. package/tsconfig.json +12 -20
  63. package/dist/bin/postgres-ai.d.ts +0 -3
  64. package/dist/bin/postgres-ai.d.ts.map +0 -1
  65. package/dist/bin/postgres-ai.js.map +0 -1
  66. package/dist/lib/auth-server.d.ts +0 -31
  67. package/dist/lib/auth-server.d.ts.map +0 -1
  68. package/dist/lib/auth-server.js +0 -263
  69. package/dist/lib/auth-server.js.map +0 -1
  70. package/dist/lib/config.d.ts +0 -45
  71. package/dist/lib/config.d.ts.map +0 -1
  72. package/dist/lib/config.js +0 -181
  73. package/dist/lib/config.js.map +0 -1
  74. package/dist/lib/init.d.ts +0 -64
  75. package/dist/lib/init.d.ts.map +0 -1
  76. package/dist/lib/init.js +0 -399
  77. package/dist/lib/init.js.map +0 -1
  78. package/dist/lib/issues.d.ts +0 -75
  79. package/dist/lib/issues.d.ts.map +0 -1
  80. package/dist/lib/issues.js +0 -336
  81. package/dist/lib/issues.js.map +0 -1
  82. package/dist/lib/mcp-server.d.ts +0 -9
  83. package/dist/lib/mcp-server.d.ts.map +0 -1
  84. package/dist/lib/mcp-server.js +0 -168
  85. package/dist/lib/mcp-server.js.map +0 -1
  86. package/dist/lib/pkce.d.ts +0 -32
  87. package/dist/lib/pkce.d.ts.map +0 -1
  88. package/dist/lib/pkce.js +0 -101
  89. package/dist/lib/pkce.js.map +0 -1
  90. package/dist/lib/util.d.ts +0 -27
  91. package/dist/lib/util.d.ts.map +0 -1
  92. package/dist/lib/util.js +0 -46
  93. package/dist/lib/util.js.map +0 -1
  94. package/dist/package.json +0 -46
  95. package/test/init.integration.test.cjs +0 -269
  96. package/test/init.test.cjs +0 -76
@@ -0,0 +1,81 @@
1
+ /**
2
+ * JSON Schema validation tests for express checkup reports.
3
+ * Validates that generated reports match schemas in reporter/schemas/.
4
+ */
5
+ import { describe, test, expect } from "bun:test";
6
+ import { resolve } from "path";
7
+ import { readFileSync } from "fs";
8
+ import Ajv2020 from "ajv/dist/2020";
9
+
10
+ import * as checkup from "../lib/checkup";
11
+ import { createMockClient } from "./test-utils";
12
+
13
+ const ajv = new Ajv2020({ allErrors: true, strict: false });
14
+ const schemasDir = resolve(import.meta.dir, "../../reporter/schemas");
15
+
16
+ function validateAgainstSchema(report: any, checkId: string): void {
17
+ const schemaPath = resolve(schemasDir, `${checkId}.schema.json`);
18
+ const schema = JSON.parse(readFileSync(schemaPath, "utf8"));
19
+ const validate = ajv.compile(schema);
20
+ const valid = validate(report);
21
+ if (!valid) {
22
+ const errors = validate.errors?.map(e => `${e.instancePath}: ${e.message}`).join(", ");
23
+ throw new Error(`${checkId} schema validation failed: ${errors}`);
24
+ }
25
+ }
26
+
27
+ // Test data for index reports
28
+ const indexTestData = {
29
+ H001: {
30
+ emptyRows: { invalidIndexesRows: [] },
31
+ dataRows: {
32
+ invalidIndexesRows: [
33
+ { schema_name: "public", table_name: "users", index_name: "users_email_idx", relation_name: "users", index_size_bytes: "1048576", index_definition: "CREATE INDEX users_email_idx ON public.users USING btree (email)", supports_fk: false },
34
+ ],
35
+ },
36
+ },
37
+ H002: {
38
+ emptyRows: { unusedIndexesRows: [] },
39
+ dataRows: {
40
+ unusedIndexesRows: [
41
+ { schema_name: "public", table_name: "logs", index_name: "logs_created_idx", index_definition: "CREATE INDEX logs_created_idx ON public.logs USING btree (created_at)", reason: "Never Used Indexes", idx_scan: "0", index_size_bytes: "8388608", idx_is_btree: true, supports_fk: false },
42
+ ],
43
+ },
44
+ },
45
+ H004: {
46
+ emptyRows: { redundantIndexesRows: [] },
47
+ dataRows: {
48
+ redundantIndexesRows: [
49
+ { schema_name: "public", table_name: "orders", index_name: "orders_user_id_idx", relation_name: "orders", access_method: "btree", reason: "public.orders_user_id_created_idx", index_size_bytes: "2097152", table_size_bytes: "16777216", index_usage: "0", supports_fk: false, index_definition: "CREATE INDEX orders_user_id_idx ON public.orders USING btree (user_id)", redundant_to_json: JSON.stringify([{ index_name: "public.orders_user_id_created_idx", index_definition: "CREATE INDEX ...", index_size_bytes: 1048576 }]) },
50
+ ],
51
+ },
52
+ },
53
+ };
54
+
55
+ describe("Schema validation", () => {
56
+ // Index health checks (H001, H002, H004) - test empty and with data
57
+ for (const [checkId, testData] of Object.entries(indexTestData)) {
58
+ const generator = checkup.REPORT_GENERATORS[checkId];
59
+
60
+ test(`${checkId} validates with empty data`, async () => {
61
+ const mockClient = createMockClient(testData.emptyRows);
62
+ const report = await generator(mockClient as any, "node-01");
63
+ validateAgainstSchema(report, checkId);
64
+ });
65
+
66
+ test(`${checkId} validates with sample data`, async () => {
67
+ const mockClient = createMockClient(testData.dataRows);
68
+ const report = await generator(mockClient as any, "node-01");
69
+ validateAgainstSchema(report, checkId);
70
+ });
71
+ }
72
+
73
+ // Settings reports (D004, F001, G001) - single test each
74
+ for (const checkId of ["D004", "F001", "G001"]) {
75
+ test(`${checkId} validates against schema`, async () => {
76
+ const mockClient = createMockClient();
77
+ const report = await checkup.REPORT_GENERATORS[checkId](mockClient as any, "node-01");
78
+ validateAgainstSchema(report, checkId);
79
+ });
80
+ }
81
+ });
@@ -0,0 +1,568 @@
1
+ import { describe, expect, test, beforeEach, afterEach, mock } from "bun:test";
2
+ import {
3
+ resolveSupabaseConfig,
4
+ extractProjectRefFromUrl,
5
+ SupabaseClient,
6
+ applyInitPlanViaSupabase,
7
+ verifyInitSetupViaSupabase,
8
+ type PgCompatibleError,
9
+ } from "../lib/supabase";
10
+
11
+ // Valid project ref for tests (10-30 alphanumeric chars)
12
+ const VALID_PROJECT_REF = "abcdefghij1234567890";
13
+
14
+ describe("Supabase module", () => {
15
+ describe("extractProjectRefFromUrl", () => {
16
+ test("extracts project ref from standard Supabase URL", () => {
17
+ const url =
18
+ "postgresql://postgres:password@db.abcdefghij.supabase.co:5432/postgres";
19
+ expect(extractProjectRefFromUrl(url)).toBe("abcdefghij");
20
+ });
21
+
22
+ test("extracts project ref from Supabase URL without db. prefix", () => {
23
+ const url =
24
+ "postgresql://postgres:password@abcdefghij.supabase.co:5432/postgres";
25
+ expect(extractProjectRefFromUrl(url)).toBe("abcdefghij");
26
+ });
27
+
28
+ test("extracts project ref from legacy pooler URL", () => {
29
+ const url =
30
+ "postgresql://postgres:password@abcdefghij.pooler.supabase.com:6543/postgres";
31
+ expect(extractProjectRefFromUrl(url)).toBe("abcdefghij");
32
+ });
33
+
34
+ test("extracts project ref from modern AWS pooler URL (username format)", () => {
35
+ const url =
36
+ "postgresql://postgres.abcdefghij:password@aws-0-us-east-1.pooler.supabase.com:6543/postgres";
37
+ expect(extractProjectRefFromUrl(url)).toBe("abcdefghij");
38
+ });
39
+
40
+ test("returns undefined for non-Supabase URL", () => {
41
+ const url = "postgresql://postgres:password@localhost:5432/postgres";
42
+ expect(extractProjectRefFromUrl(url)).toBeUndefined();
43
+ });
44
+
45
+ test("returns undefined for RDS URL", () => {
46
+ const url =
47
+ "postgresql://postgres:password@mydb.cluster-xyz.us-east-1.rds.amazonaws.com:5432/postgres";
48
+ expect(extractProjectRefFromUrl(url)).toBeUndefined();
49
+ });
50
+
51
+ test("returns undefined for invalid URL", () => {
52
+ const url = "not a valid url";
53
+ expect(extractProjectRefFromUrl(url)).toBeUndefined();
54
+ });
55
+
56
+ test("handles URL with special characters in password", () => {
57
+ const url =
58
+ "postgresql://postgres:p%40ss%2Fw0rd@db.myprojectref.supabase.co:5432/postgres";
59
+ expect(extractProjectRefFromUrl(url)).toBe("myprojectref");
60
+ });
61
+
62
+ test("returns undefined for AWS regional pooler without username ref", () => {
63
+ // AWS regional URLs without postgres.<ref> format should not extract region as ref
64
+ const url =
65
+ "postgresql://postgres:password@aws-0-us-east-1.pooler.supabase.com:6543/postgres";
66
+ expect(extractProjectRefFromUrl(url)).toBeUndefined();
67
+ });
68
+ });
69
+
70
+ describe("resolveSupabaseConfig", () => {
71
+ const originalEnv = process.env;
72
+
73
+ beforeEach(() => {
74
+ // Reset env before each test
75
+ process.env = { ...originalEnv };
76
+ delete process.env.SUPABASE_ACCESS_TOKEN;
77
+ delete process.env.SUPABASE_PROJECT_REF;
78
+ });
79
+
80
+ afterEach(() => {
81
+ process.env = originalEnv;
82
+ });
83
+
84
+ test("resolves config from options", () => {
85
+ const config = resolveSupabaseConfig({
86
+ accessToken: "my-token",
87
+ projectRef: "myprojectref12",
88
+ });
89
+ expect(config.accessToken).toBe("my-token");
90
+ expect(config.projectRef).toBe("myprojectref12");
91
+ });
92
+
93
+ test("resolves config from environment variables", () => {
94
+ process.env.SUPABASE_ACCESS_TOKEN = "env-token";
95
+ process.env.SUPABASE_PROJECT_REF = "envprojectref1";
96
+
97
+ const config = resolveSupabaseConfig({});
98
+ expect(config.accessToken).toBe("env-token");
99
+ expect(config.projectRef).toBe("envprojectref1");
100
+ });
101
+
102
+ test("options take precedence over environment variables", () => {
103
+ process.env.SUPABASE_ACCESS_TOKEN = "env-token";
104
+ process.env.SUPABASE_PROJECT_REF = "envprojectref1";
105
+
106
+ const config = resolveSupabaseConfig({
107
+ accessToken: "opts-token",
108
+ projectRef: "optsprojectref",
109
+ });
110
+ expect(config.accessToken).toBe("opts-token");
111
+ expect(config.projectRef).toBe("optsprojectref");
112
+ });
113
+
114
+ test("throws error when access token is missing", () => {
115
+ expect(() =>
116
+ resolveSupabaseConfig({
117
+ projectRef: "myprojectref12",
118
+ })
119
+ ).toThrow(/access token is required/i);
120
+ });
121
+
122
+ test("throws error when project ref is missing", () => {
123
+ expect(() =>
124
+ resolveSupabaseConfig({
125
+ accessToken: "my-token",
126
+ })
127
+ ).toThrow(/project reference is required/i);
128
+ });
129
+
130
+ test("trims whitespace from values", () => {
131
+ const config = resolveSupabaseConfig({
132
+ accessToken: " my-token ",
133
+ projectRef: " myprojectref12 ",
134
+ });
135
+ expect(config.accessToken).toBe("my-token");
136
+ expect(config.projectRef).toBe("myprojectref12");
137
+ });
138
+ });
139
+
140
+ describe("SupabaseClient", () => {
141
+ test("throws error when project ref is empty", () => {
142
+ expect(() => new SupabaseClient({ projectRef: "", accessToken: "token" })).toThrow(
143
+ /project reference is required/i
144
+ );
145
+ });
146
+
147
+ test("throws error when access token is empty", () => {
148
+ expect(() => new SupabaseClient({ projectRef: VALID_PROJECT_REF, accessToken: "" })).toThrow(
149
+ /access token is required/i
150
+ );
151
+ });
152
+
153
+ test("throws error for invalid project ref format (too short)", () => {
154
+ expect(() => new SupabaseClient({ projectRef: "short", accessToken: "token" })).toThrow(
155
+ /invalid supabase project reference format/i
156
+ );
157
+ });
158
+
159
+ test("throws error for invalid project ref format (special chars)", () => {
160
+ expect(() => new SupabaseClient({ projectRef: "../admin/hack", accessToken: "token" })).toThrow(
161
+ /invalid supabase project reference format/i
162
+ );
163
+ });
164
+
165
+ test("accepts valid project ref format", () => {
166
+ const client = new SupabaseClient({ projectRef: VALID_PROJECT_REF, accessToken: "token" });
167
+ expect(client).toBeDefined();
168
+ });
169
+
170
+ describe("query method", () => {
171
+ const originalFetch = globalThis.fetch;
172
+ let mockFetch: ReturnType<typeof mock>;
173
+
174
+ beforeEach(() => {
175
+ mockFetch = mock(() =>
176
+ Promise.resolve(new Response(JSON.stringify([{ db: "postgres" }]), { status: 200 }))
177
+ );
178
+ globalThis.fetch = mockFetch as unknown as typeof fetch;
179
+ });
180
+
181
+ afterEach(() => {
182
+ globalThis.fetch = originalFetch;
183
+ });
184
+
185
+ test("makes correct API request", async () => {
186
+ const client = new SupabaseClient({
187
+ projectRef: VALID_PROJECT_REF,
188
+ accessToken: "mytoken",
189
+ });
190
+
191
+ await client.query("SELECT 1", true);
192
+
193
+ expect(mockFetch).toHaveBeenCalledTimes(1);
194
+ const [url, options] = mockFetch.mock.calls[0] as [string, RequestInit];
195
+ expect(url).toBe(
196
+ `https://api.supabase.com/v1/projects/${VALID_PROJECT_REF}/database/query`
197
+ );
198
+ expect(options.method).toBe("POST");
199
+ expect(options.headers).toEqual({
200
+ "Content-Type": "application/json",
201
+ Authorization: "Bearer mytoken",
202
+ });
203
+ const body = JSON.parse(options.body as string);
204
+ expect(body.query).toBe("SELECT 1");
205
+ expect(body.read_only).toBe(true);
206
+ });
207
+
208
+ test("returns rows from successful response", async () => {
209
+ mockFetch = mock(() =>
210
+ Promise.resolve(
211
+ new Response(JSON.stringify([{ id: 1, name: "test" }]), { status: 200 })
212
+ )
213
+ );
214
+ globalThis.fetch = mockFetch as unknown as typeof fetch;
215
+
216
+ const client = new SupabaseClient({
217
+ projectRef: VALID_PROJECT_REF,
218
+ accessToken: "mytoken",
219
+ });
220
+
221
+ const result = await client.query("SELECT * FROM test");
222
+ expect(result.rows).toEqual([{ id: 1, name: "test" }]);
223
+ expect(result.rowCount).toBe(1);
224
+ });
225
+
226
+ test("handles empty result", async () => {
227
+ mockFetch = mock(() =>
228
+ Promise.resolve(new Response(JSON.stringify([]), { status: 200 }))
229
+ );
230
+ globalThis.fetch = mockFetch as unknown as typeof fetch;
231
+
232
+ const client = new SupabaseClient({
233
+ projectRef: VALID_PROJECT_REF,
234
+ accessToken: "mytoken",
235
+ });
236
+
237
+ const result = await client.query("SELECT * FROM empty_table");
238
+ expect(result.rows).toEqual([]);
239
+ expect(result.rowCount).toBe(0);
240
+ });
241
+
242
+ test("throws PgCompatibleError on HTTP error", async () => {
243
+ mockFetch = mock(() =>
244
+ Promise.resolve(
245
+ new Response(
246
+ JSON.stringify({
247
+ error: {
248
+ code: "42501",
249
+ message: "permission denied",
250
+ details: "Not authorized",
251
+ },
252
+ }),
253
+ { status: 403 }
254
+ )
255
+ )
256
+ );
257
+ globalThis.fetch = mockFetch as unknown as typeof fetch;
258
+
259
+ const client = new SupabaseClient({
260
+ projectRef: VALID_PROJECT_REF,
261
+ accessToken: "mytoken",
262
+ });
263
+
264
+ try {
265
+ await client.query("SELECT * FROM secret_table");
266
+ throw new Error("Expected query to throw");
267
+ } catch (e) {
268
+ const err = e as PgCompatibleError;
269
+ expect(err.message).toBe("permission denied");
270
+ expect(err.code).toBe("42501");
271
+ expect(err.detail).toBe("Not authorized");
272
+ expect(err.httpStatus).toBe(403);
273
+ }
274
+ });
275
+
276
+ test("throws error on non-JSON response", async () => {
277
+ mockFetch = mock(() =>
278
+ Promise.resolve(new Response("Internal Server Error", { status: 500 }))
279
+ );
280
+ globalThis.fetch = mockFetch as unknown as typeof fetch;
281
+
282
+ const client = new SupabaseClient({
283
+ projectRef: VALID_PROJECT_REF,
284
+ accessToken: "mytoken",
285
+ });
286
+
287
+ try {
288
+ await client.query("SELECT 1");
289
+ throw new Error("Expected query to throw");
290
+ } catch (e) {
291
+ const err = e as PgCompatibleError;
292
+ expect(err.message).toContain("non-JSON response");
293
+ expect(err.httpStatus).toBe(500);
294
+ }
295
+ });
296
+
297
+ test("maps Supabase error codes to PostgreSQL codes", async () => {
298
+ mockFetch = mock(() =>
299
+ Promise.resolve(
300
+ new Response(
301
+ JSON.stringify({
302
+ error: {
303
+ code: "PGRST200",
304
+ message: "table not found",
305
+ },
306
+ }),
307
+ { status: 404 }
308
+ )
309
+ )
310
+ );
311
+ globalThis.fetch = mockFetch as unknown as typeof fetch;
312
+
313
+ const client = new SupabaseClient({
314
+ projectRef: VALID_PROJECT_REF,
315
+ accessToken: "mytoken",
316
+ });
317
+
318
+ try {
319
+ await client.query("SELECT * FROM nonexistent");
320
+ throw new Error("Expected query to throw");
321
+ } catch (e) {
322
+ const err = e as PgCompatibleError;
323
+ expect(err.code).toBe("42P01"); // undefined_table
324
+ }
325
+ });
326
+ });
327
+
328
+ describe("testConnection method", () => {
329
+ const originalFetch = globalThis.fetch;
330
+
331
+ afterEach(() => {
332
+ globalThis.fetch = originalFetch;
333
+ });
334
+
335
+ test("returns database and version info", async () => {
336
+ globalThis.fetch = mock(() =>
337
+ Promise.resolve(
338
+ new Response(
339
+ JSON.stringify([
340
+ { db: "postgres", version: "PostgreSQL 15.1" },
341
+ ]),
342
+ { status: 200 }
343
+ )
344
+ )
345
+ ) as unknown as typeof fetch;
346
+
347
+ const client = new SupabaseClient({
348
+ projectRef: VALID_PROJECT_REF,
349
+ accessToken: "mytoken",
350
+ });
351
+
352
+ const result = await client.testConnection();
353
+ expect(result.database).toBe("postgres");
354
+ expect(result.version).toBe("PostgreSQL 15.1");
355
+ });
356
+
357
+ test("returns empty strings for empty response", async () => {
358
+ globalThis.fetch = mock(() =>
359
+ Promise.resolve(new Response(JSON.stringify([]), { status: 200 }))
360
+ ) as unknown as typeof fetch;
361
+
362
+ const client = new SupabaseClient({
363
+ projectRef: VALID_PROJECT_REF,
364
+ accessToken: "mytoken",
365
+ });
366
+
367
+ const result = await client.testConnection();
368
+ expect(result.database).toBe("");
369
+ expect(result.version).toBe("");
370
+ });
371
+ });
372
+
373
+ describe("getCurrentDatabase method", () => {
374
+ const originalFetch = globalThis.fetch;
375
+
376
+ afterEach(() => {
377
+ globalThis.fetch = originalFetch;
378
+ });
379
+
380
+ test("returns current database name", async () => {
381
+ globalThis.fetch = mock(() =>
382
+ Promise.resolve(
383
+ new Response(JSON.stringify([{ db: "mydb" }]), { status: 200 })
384
+ )
385
+ ) as unknown as typeof fetch;
386
+
387
+ const client = new SupabaseClient({
388
+ projectRef: VALID_PROJECT_REF,
389
+ accessToken: "mytoken",
390
+ });
391
+
392
+ const result = await client.getCurrentDatabase();
393
+ expect(result).toBe("mydb");
394
+ });
395
+ });
396
+ });
397
+
398
+ describe("applyInitPlanViaSupabase", () => {
399
+ const originalFetch = globalThis.fetch;
400
+
401
+ afterEach(() => {
402
+ globalThis.fetch = originalFetch;
403
+ });
404
+
405
+ test("applies all non-optional steps and returns applied list", async () => {
406
+ globalThis.fetch = mock(() =>
407
+ Promise.resolve(new Response(JSON.stringify([]), { status: 200 }))
408
+ ) as unknown as typeof fetch;
409
+
410
+ const client = new SupabaseClient({
411
+ projectRef: VALID_PROJECT_REF,
412
+ accessToken: "mytoken",
413
+ });
414
+
415
+ const result = await applyInitPlanViaSupabase({
416
+ client,
417
+ plan: {
418
+ monitoringUser: "test_user",
419
+ database: "testdb",
420
+ steps: [
421
+ { name: "step1", sql: "SELECT 1" },
422
+ { name: "step2", sql: "SELECT 2" },
423
+ ],
424
+ },
425
+ });
426
+
427
+ expect(result.applied).toEqual(["step1", "step2"]);
428
+ expect(result.skippedOptional).toEqual([]);
429
+ });
430
+
431
+ test("skips failing optional steps", async () => {
432
+ let callCount = 0;
433
+ globalThis.fetch = mock(() => {
434
+ callCount++;
435
+ if (callCount === 2) {
436
+ // Second call (optional step) fails
437
+ return Promise.resolve(
438
+ new Response(JSON.stringify({ error: { message: "failed" } }), { status: 500 })
439
+ );
440
+ }
441
+ return Promise.resolve(new Response(JSON.stringify([]), { status: 200 }));
442
+ }) as unknown as typeof fetch;
443
+
444
+ const client = new SupabaseClient({
445
+ projectRef: VALID_PROJECT_REF,
446
+ accessToken: "mytoken",
447
+ });
448
+
449
+ const result = await applyInitPlanViaSupabase({
450
+ client,
451
+ plan: {
452
+ monitoringUser: "test_user",
453
+ database: "testdb",
454
+ steps: [
455
+ { name: "required1", sql: "SELECT 1" },
456
+ { name: "optional1", sql: "SELECT 2", optional: true },
457
+ ],
458
+ },
459
+ });
460
+
461
+ expect(result.applied).toEqual(["required1"]);
462
+ expect(result.skippedOptional).toEqual(["optional1"]);
463
+ });
464
+
465
+ test("throws on failing required step with preserved error fields", async () => {
466
+ globalThis.fetch = mock(() =>
467
+ Promise.resolve(
468
+ new Response(
469
+ JSON.stringify({ error: { code: "42501", message: "permission denied" } }),
470
+ { status: 403 }
471
+ )
472
+ )
473
+ ) as unknown as typeof fetch;
474
+
475
+ const client = new SupabaseClient({
476
+ projectRef: VALID_PROJECT_REF,
477
+ accessToken: "mytoken",
478
+ });
479
+
480
+ try {
481
+ await applyInitPlanViaSupabase({
482
+ client,
483
+ plan: {
484
+ monitoringUser: "test_user",
485
+ database: "testdb",
486
+ steps: [{ name: "create_role", sql: "CREATE ROLE test" }],
487
+ },
488
+ });
489
+ throw new Error("Expected to throw");
490
+ } catch (e) {
491
+ const err = e as PgCompatibleError;
492
+ expect(err.message).toContain('Failed at step "create_role"');
493
+ expect(err.code).toBe("42501");
494
+ }
495
+ });
496
+ });
497
+
498
+ describe("verifyInitSetupViaSupabase", () => {
499
+ const originalFetch = globalThis.fetch;
500
+
501
+ afterEach(() => {
502
+ globalThis.fetch = originalFetch;
503
+ });
504
+
505
+ test("throws error for invalid monitoring user name", async () => {
506
+ const client = new SupabaseClient({
507
+ projectRef: VALID_PROJECT_REF,
508
+ accessToken: "mytoken",
509
+ });
510
+
511
+ try {
512
+ await verifyInitSetupViaSupabase({
513
+ client,
514
+ database: "testdb",
515
+ monitoringUser: "invalid-user-name!", // Invalid: contains hyphen and exclamation
516
+ includeOptionalPermissions: false,
517
+ });
518
+ throw new Error("Expected to throw");
519
+ } catch (e) {
520
+ expect((e as Error).message).toContain("Invalid monitoring user name");
521
+ }
522
+ });
523
+
524
+ test("throws error for database name with null bytes (SQL injection prevention)", async () => {
525
+ globalThis.fetch = mock(() =>
526
+ Promise.resolve(new Response(JSON.stringify([{ rolname: "postgres_ai_mon" }]), { status: 200 }))
527
+ ) as unknown as typeof fetch;
528
+
529
+ const client = new SupabaseClient({
530
+ projectRef: VALID_PROJECT_REF,
531
+ accessToken: "mytoken",
532
+ });
533
+
534
+ try {
535
+ await verifyInitSetupViaSupabase({
536
+ client,
537
+ database: "test\0db", // null byte should be rejected
538
+ monitoringUser: "postgres_ai_mon",
539
+ includeOptionalPermissions: false,
540
+ });
541
+ throw new Error("Expected to throw");
542
+ } catch (e) {
543
+ expect((e as Error).message).toContain("null bytes");
544
+ }
545
+ });
546
+
547
+ test("returns missing role when role does not exist", async () => {
548
+ globalThis.fetch = mock(() =>
549
+ Promise.resolve(new Response(JSON.stringify([]), { status: 200 }))
550
+ ) as unknown as typeof fetch;
551
+
552
+ const client = new SupabaseClient({
553
+ projectRef: VALID_PROJECT_REF,
554
+ accessToken: "mytoken",
555
+ });
556
+
557
+ const result = await verifyInitSetupViaSupabase({
558
+ client,
559
+ database: "testdb",
560
+ monitoringUser: "postgres_ai_mon",
561
+ includeOptionalPermissions: false,
562
+ });
563
+
564
+ expect(result.ok).toBe(false);
565
+ expect(result.missingRequired).toContain('role "postgres_ai_mon" does not exist');
566
+ });
567
+ });
568
+ });