postgresai 0.14.0-beta.2 → 0.14.0-beta.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. package/README.md +53 -45
  2. package/bin/postgres-ai.ts +953 -353
  3. package/bun.lock +258 -0
  4. package/bunfig.toml +11 -0
  5. package/dist/bin/postgres-ai.js +27868 -1781
  6. package/lib/auth-server.ts +124 -106
  7. package/lib/checkup-api.ts +386 -0
  8. package/lib/checkup.ts +1327 -0
  9. package/lib/config.ts +3 -0
  10. package/lib/init.ts +283 -158
  11. package/lib/issues.ts +86 -195
  12. package/lib/mcp-server.ts +6 -17
  13. package/lib/metrics-embedded.ts +79 -0
  14. package/lib/metrics-loader.ts +127 -0
  15. package/lib/util.ts +61 -0
  16. package/package.json +18 -10
  17. package/packages/postgres-ai/README.md +26 -0
  18. package/packages/postgres-ai/bin/postgres-ai.js +27 -0
  19. package/packages/postgres-ai/package.json +27 -0
  20. package/scripts/embed-metrics.ts +154 -0
  21. package/sql/02.permissions.sql +9 -5
  22. package/sql/05.helpers.sql +415 -0
  23. package/test/checkup.integration.test.ts +273 -0
  24. package/test/checkup.test.ts +890 -0
  25. package/test/init.integration.test.ts +399 -0
  26. package/test/init.test.ts +345 -0
  27. package/test/schema-validation.test.ts +81 -0
  28. package/test/test-utils.ts +122 -0
  29. package/tsconfig.json +12 -20
  30. package/dist/bin/postgres-ai.d.ts +0 -3
  31. package/dist/bin/postgres-ai.d.ts.map +0 -1
  32. package/dist/bin/postgres-ai.js.map +0 -1
  33. package/dist/lib/auth-server.d.ts +0 -31
  34. package/dist/lib/auth-server.d.ts.map +0 -1
  35. package/dist/lib/auth-server.js +0 -263
  36. package/dist/lib/auth-server.js.map +0 -1
  37. package/dist/lib/config.d.ts +0 -45
  38. package/dist/lib/config.d.ts.map +0 -1
  39. package/dist/lib/config.js +0 -181
  40. package/dist/lib/config.js.map +0 -1
  41. package/dist/lib/init.d.ts +0 -77
  42. package/dist/lib/init.d.ts.map +0 -1
  43. package/dist/lib/init.js +0 -550
  44. package/dist/lib/init.js.map +0 -1
  45. package/dist/lib/issues.d.ts +0 -75
  46. package/dist/lib/issues.d.ts.map +0 -1
  47. package/dist/lib/issues.js +0 -336
  48. package/dist/lib/issues.js.map +0 -1
  49. package/dist/lib/mcp-server.d.ts +0 -9
  50. package/dist/lib/mcp-server.d.ts.map +0 -1
  51. package/dist/lib/mcp-server.js +0 -168
  52. package/dist/lib/mcp-server.js.map +0 -1
  53. package/dist/lib/pkce.d.ts +0 -32
  54. package/dist/lib/pkce.d.ts.map +0 -1
  55. package/dist/lib/pkce.js +0 -101
  56. package/dist/lib/pkce.js.map +0 -1
  57. package/dist/lib/util.d.ts +0 -27
  58. package/dist/lib/util.d.ts.map +0 -1
  59. package/dist/lib/util.js +0 -46
  60. package/dist/lib/util.js.map +0 -1
  61. package/dist/package.json +0 -46
  62. package/test/init.integration.test.cjs +0 -382
  63. package/test/init.test.cjs +0 -323
@@ -0,0 +1,345 @@
1
+ import { describe, test, expect, beforeAll } from "bun:test";
2
+ import { resolve } from "path";
3
+
4
+ // Import from source directly since we're using Bun
5
+ import * as init from "../lib/init";
6
+ const DEFAULT_MONITORING_USER = init.DEFAULT_MONITORING_USER;
7
+
8
+ function runCli(args: string[], env: Record<string, string> = {}) {
9
+ const cliPath = resolve(import.meta.dir, "..", "bin", "postgres-ai.ts");
10
+ const bunBin = typeof process.execPath === "string" && process.execPath.length > 0 ? process.execPath : "bun";
11
+ const result = Bun.spawnSync([bunBin, cliPath, ...args], {
12
+ env: { ...process.env, ...env },
13
+ });
14
+ return {
15
+ status: result.exitCode,
16
+ stdout: new TextDecoder().decode(result.stdout),
17
+ stderr: new TextDecoder().decode(result.stderr),
18
+ };
19
+ }
20
+
21
+ function runPgai(args: string[], env: Record<string, string> = {}) {
22
+ // For testing, run the CLI directly since pgai is just a thin wrapper
23
+ // In production, pgai wrapper will properly resolve and spawn the postgresai CLI
24
+ const cliPath = resolve(import.meta.dir, "..", "bin", "postgres-ai.ts");
25
+ const bunBin = typeof process.execPath === "string" && process.execPath.length > 0 ? process.execPath : "bun";
26
+ const result = Bun.spawnSync([bunBin, cliPath, ...args], {
27
+ env: { ...process.env, ...env },
28
+ });
29
+ return {
30
+ status: result.exitCode,
31
+ stdout: new TextDecoder().decode(result.stdout),
32
+ stderr: new TextDecoder().decode(result.stderr),
33
+ };
34
+ }
35
+
36
+ describe("init module", () => {
37
+ test("maskConnectionString hides password when present", () => {
38
+ const masked = init.maskConnectionString("postgresql://user:secret@localhost:5432/mydb");
39
+ expect(masked).toMatch(/postgresql:\/\/user:\*{5}@localhost:5432\/mydb/);
40
+ expect(masked).not.toMatch(/secret/);
41
+ });
42
+
43
+ test("parseLibpqConninfo parses basic host/dbname/user/port/password", () => {
44
+ const cfg = init.parseLibpqConninfo("dbname=mydb host=localhost user=alice port=5432 password=secret");
45
+ expect(cfg.database).toBe("mydb");
46
+ expect(cfg.host).toBe("localhost");
47
+ expect(cfg.user).toBe("alice");
48
+ expect(cfg.port).toBe(5432);
49
+ expect(cfg.password).toBe("secret");
50
+ });
51
+
52
+ test("parseLibpqConninfo supports quoted values", () => {
53
+ const cfg = init.parseLibpqConninfo("dbname='my db' host='local host'");
54
+ expect(cfg.database).toBe("my db");
55
+ expect(cfg.host).toBe("local host");
56
+ });
57
+
58
+ test("buildInitPlan includes a race-safe role DO block", async () => {
59
+ const plan = await init.buildInitPlan({
60
+ database: "mydb",
61
+ monitoringUser: DEFAULT_MONITORING_USER,
62
+ monitoringPassword: "pw",
63
+ includeOptionalPermissions: false,
64
+ });
65
+
66
+ expect(plan.database).toBe("mydb");
67
+ const roleStep = plan.steps.find((s: { name: string }) => s.name === "01.role");
68
+ expect(roleStep).toBeTruthy();
69
+ expect(roleStep.sql).toMatch(/do\s+\$\$/i);
70
+ expect(roleStep.sql).toMatch(/create\s+user/i);
71
+ expect(roleStep.sql).toMatch(/alter\s+user/i);
72
+ expect(plan.steps.some((s: { optional?: boolean }) => s.optional)).toBe(false);
73
+ });
74
+
75
+ test("buildInitPlan handles special characters in monitoring user and database identifiers", async () => {
76
+ const monitoringUser = 'user "with" quotes ✓';
77
+ const database = 'db name "with" quotes ✓';
78
+ const plan = await init.buildInitPlan({
79
+ database,
80
+ monitoringUser,
81
+ monitoringPassword: "pw",
82
+ includeOptionalPermissions: false,
83
+ });
84
+
85
+ const roleStep = plan.steps.find((s: { name: string }) => s.name === "01.role");
86
+ expect(roleStep).toBeTruthy();
87
+ expect(roleStep.sql).toMatch(/create\s+user\s+"user ""with"" quotes ✓"/i);
88
+ expect(roleStep.sql).toMatch(/alter\s+user\s+"user ""with"" quotes ✓"/i);
89
+
90
+ const permStep = plan.steps.find((s: { name: string }) => s.name === "02.permissions");
91
+ expect(permStep).toBeTruthy();
92
+ expect(permStep.sql).toMatch(/grant connect on database "db name ""with"" quotes ✓" to "user ""with"" quotes ✓"/i);
93
+ });
94
+
95
+ test("buildInitPlan keeps backslashes in passwords (no unintended escaping)", async () => {
96
+ const pw = String.raw`pw\with\backslash`;
97
+ const plan = await init.buildInitPlan({
98
+ database: "mydb",
99
+ monitoringUser: DEFAULT_MONITORING_USER,
100
+ monitoringPassword: pw,
101
+ includeOptionalPermissions: false,
102
+ });
103
+ const roleStep = plan.steps.find((s: { name: string }) => s.name === "01.role");
104
+ expect(roleStep).toBeTruthy();
105
+ expect(roleStep.sql).toContain(`password '${pw}'`);
106
+ });
107
+
108
+ test("buildInitPlan rejects identifiers with null bytes", async () => {
109
+ await expect(
110
+ init.buildInitPlan({
111
+ database: "mydb",
112
+ monitoringUser: "bad\0user",
113
+ monitoringPassword: "pw",
114
+ includeOptionalPermissions: false,
115
+ })
116
+ ).rejects.toThrow(/Identifier cannot contain null bytes/);
117
+ });
118
+
119
+ test("buildInitPlan rejects literals with null bytes", async () => {
120
+ await expect(
121
+ init.buildInitPlan({
122
+ database: "mydb",
123
+ monitoringUser: DEFAULT_MONITORING_USER,
124
+ monitoringPassword: "pw\0bad",
125
+ includeOptionalPermissions: false,
126
+ })
127
+ ).rejects.toThrow(/Literal cannot contain null bytes/);
128
+ });
129
+
130
+ test("buildInitPlan inlines password safely for CREATE/ALTER ROLE grammar", async () => {
131
+ const plan = await init.buildInitPlan({
132
+ database: "mydb",
133
+ monitoringUser: DEFAULT_MONITORING_USER,
134
+ monitoringPassword: "pa'ss",
135
+ includeOptionalPermissions: false,
136
+ });
137
+ const step = plan.steps.find((s: { name: string }) => s.name === "01.role");
138
+ expect(step).toBeTruthy();
139
+ expect(step.sql).toMatch(/password 'pa''ss'/);
140
+ expect(step.params).toBeUndefined();
141
+ });
142
+
143
+ test("buildInitPlan includes optional steps when enabled", async () => {
144
+ const plan = await init.buildInitPlan({
145
+ database: "mydb",
146
+ monitoringUser: DEFAULT_MONITORING_USER,
147
+ monitoringPassword: "pw",
148
+ includeOptionalPermissions: true,
149
+ });
150
+ expect(plan.steps.some((s: { optional?: boolean }) => s.optional)).toBe(true);
151
+ });
152
+
153
+ test("resolveAdminConnection accepts positional URI", () => {
154
+ const r = init.resolveAdminConnection({ conn: "postgresql://u:p@h:5432/d" });
155
+ expect(r.clientConfig.connectionString).toBeTruthy();
156
+ expect(r.display).not.toMatch(/:p@/);
157
+ });
158
+
159
+ test("resolveAdminConnection accepts positional conninfo", () => {
160
+ const r = init.resolveAdminConnection({ conn: "dbname=mydb host=localhost user=alice" });
161
+ expect(r.clientConfig.database).toBe("mydb");
162
+ expect(r.clientConfig.host).toBe("localhost");
163
+ expect(r.clientConfig.user).toBe("alice");
164
+ });
165
+
166
+ test("resolveAdminConnection rejects invalid psql-like port", () => {
167
+ expect(() => init.resolveAdminConnection({ host: "localhost", port: "abc", username: "u", dbname: "d" }))
168
+ .toThrow(/Invalid port value/);
169
+ });
170
+
171
+ test("resolveAdminConnection rejects when only PGPASSWORD is provided (no connection details)", () => {
172
+ expect(() => init.resolveAdminConnection({ envPassword: "pw" })).toThrow(/Connection is required/);
173
+ });
174
+
175
+ test("resolveAdminConnection rejects when connection is missing", () => {
176
+ expect(() => init.resolveAdminConnection({})).toThrow(/Connection is required/);
177
+ });
178
+
179
+ test("resolveMonitoringPassword auto-generates a strong, URL-safe password by default", async () => {
180
+ const r = await init.resolveMonitoringPassword({ monitoringUser: DEFAULT_MONITORING_USER });
181
+ expect(r.generated).toBe(true);
182
+ expect(typeof r.password).toBe("string");
183
+ expect(r.password.length).toBeGreaterThanOrEqual(30);
184
+ expect(r.password).toMatch(/^[A-Za-z0-9_-]+$/);
185
+ });
186
+
187
+ test("applyInitPlan preserves Postgres error fields on step failures", async () => {
188
+ const plan = {
189
+ monitoringUser: DEFAULT_MONITORING_USER,
190
+ database: "mydb",
191
+ steps: [{ name: "01.role", sql: "select 1" }],
192
+ };
193
+
194
+ const pgErr = Object.assign(new Error("permission denied to create role"), {
195
+ code: "42501",
196
+ detail: "some detail",
197
+ hint: "some hint",
198
+ schema: "pg_catalog",
199
+ table: "pg_roles",
200
+ constraint: "some_constraint",
201
+ routine: "aclcheck_error",
202
+ });
203
+
204
+ const calls: string[] = [];
205
+ const client = {
206
+ query: async (sql: string) => {
207
+ calls.push(sql);
208
+ if (sql === "begin;") return { rowCount: 1 };
209
+ if (sql === "rollback;") return { rowCount: 1 };
210
+ if (sql === "select 1") throw pgErr;
211
+ throw new Error(`unexpected sql: ${sql}`);
212
+ },
213
+ };
214
+
215
+ try {
216
+ await init.applyInitPlan({ client: client as any, plan: plan as any });
217
+ expect(true).toBe(false); // Should not reach here
218
+ } catch (e: any) {
219
+ expect(e).toBeInstanceOf(Error);
220
+ expect(e.message).toMatch(/Failed at step "01\.role":/);
221
+ expect(e.code).toBe("42501");
222
+ expect(e.detail).toBe("some detail");
223
+ expect(e.hint).toBe("some hint");
224
+ expect(e.schema).toBe("pg_catalog");
225
+ expect(e.table).toBe("pg_roles");
226
+ expect(e.constraint).toBe("some_constraint");
227
+ expect(e.routine).toBe("aclcheck_error");
228
+ }
229
+
230
+ expect(calls).toEqual(["begin;", "select 1", "rollback;"]);
231
+ });
232
+
233
+ test("verifyInitSetup runs inside a repeatable read snapshot and rolls back", async () => {
234
+ const calls: string[] = [];
235
+ const client = {
236
+ query: async (sql: string, params?: any) => {
237
+ calls.push(String(sql));
238
+
239
+ if (String(sql).toLowerCase().startsWith("begin isolation level repeatable read")) {
240
+ return { rowCount: 1, rows: [] };
241
+ }
242
+ if (String(sql).toLowerCase() === "rollback;") {
243
+ return { rowCount: 1, rows: [] };
244
+ }
245
+ if (String(sql).includes("select rolconfig")) {
246
+ return { rowCount: 1, rows: [{ rolconfig: ['search_path=postgres_ai, "$user", public, pg_catalog'] }] };
247
+ }
248
+ if (String(sql).includes("from pg_catalog.pg_roles")) {
249
+ return { rowCount: 1, rows: [] };
250
+ }
251
+ if (String(sql).includes("has_database_privilege")) {
252
+ return { rowCount: 1, rows: [{ ok: true }] };
253
+ }
254
+ if (String(sql).includes("pg_has_role")) {
255
+ return { rowCount: 1, rows: [{ ok: true }] };
256
+ }
257
+ if (String(sql).includes("has_table_privilege") && String(sql).includes("pg_catalog.pg_index")) {
258
+ return { rowCount: 1, rows: [{ ok: true }] };
259
+ }
260
+ if (String(sql).includes("to_regclass('postgres_ai.pg_statistic')")) {
261
+ return { rowCount: 1, rows: [{ ok: true }] };
262
+ }
263
+ if (String(sql).includes("has_table_privilege") && String(sql).includes("postgres_ai.pg_statistic")) {
264
+ return { rowCount: 1, rows: [{ ok: true }] };
265
+ }
266
+ if (String(sql).includes("has_function_privilege")) {
267
+ return { rowCount: 1, rows: [{ ok: true }] };
268
+ }
269
+ if (String(sql).includes("has_schema_privilege")) {
270
+ return { rowCount: 1, rows: [{ ok: true }] };
271
+ }
272
+
273
+ throw new Error(`unexpected sql: ${sql} params=${JSON.stringify(params)}`);
274
+ },
275
+ };
276
+
277
+ const r = await init.verifyInitSetup({
278
+ client: client as any,
279
+ database: "mydb",
280
+ monitoringUser: DEFAULT_MONITORING_USER,
281
+ includeOptionalPermissions: false,
282
+ });
283
+ expect(r.ok).toBe(true);
284
+ expect(r.missingRequired.length).toBe(0);
285
+
286
+ expect(calls.length).toBeGreaterThan(2);
287
+ expect(calls[0].toLowerCase()).toMatch(/^begin isolation level repeatable read/);
288
+ expect(calls[calls.length - 1].toLowerCase()).toBe("rollback;");
289
+ });
290
+
291
+ test("redactPasswordsInSql redacts password literals with embedded quotes", async () => {
292
+ const plan = await init.buildInitPlan({
293
+ database: "mydb",
294
+ monitoringUser: DEFAULT_MONITORING_USER,
295
+ monitoringPassword: "pa'ss",
296
+ includeOptionalPermissions: false,
297
+ });
298
+ const step = plan.steps.find((s: { name: string }) => s.name === "01.role");
299
+ expect(step).toBeTruthy();
300
+ const redacted = init.redactPasswordsInSql(step.sql);
301
+ expect(redacted).toMatch(/password '<redacted>'/i);
302
+ });
303
+ });
304
+
305
+ describe("CLI commands", () => {
306
+ test("cli: prepare-db with missing connection prints help/options", () => {
307
+ const r = runCli(["prepare-db"]);
308
+ expect(r.status).not.toBe(0);
309
+ expect(r.stderr).toMatch(/--print-sql/);
310
+ expect(r.stderr).toMatch(/--monitoring-user/);
311
+ });
312
+
313
+ test("cli: prepare-db --print-sql works without connection (offline mode)", () => {
314
+ const r = runCli(["prepare-db", "--print-sql", "-d", "mydb", "--password", "monpw"]);
315
+ expect(r.status).toBe(0);
316
+ expect(r.stdout).toMatch(/SQL plan \(offline; not connected\)/);
317
+ expect(r.stdout).toMatch(new RegExp(`grant connect on database "mydb" to "${DEFAULT_MONITORING_USER}"`, "i"));
318
+ });
319
+
320
+ test("pgai wrapper forwards to postgresai CLI", () => {
321
+ const r = runPgai(["--help"]);
322
+ expect(r.status).toBe(0);
323
+ expect(r.stdout).toMatch(/postgresai|PostgresAI/i);
324
+ });
325
+
326
+ test("cli: prepare-db command exists and shows help", () => {
327
+ const r = runCli(["prepare-db", "--help"]);
328
+ expect(r.status).toBe(0);
329
+ expect(r.stdout).toMatch(/monitoring user/i);
330
+ expect(r.stdout).toMatch(/--print-sql/);
331
+ });
332
+
333
+ test("cli: mon local-install command exists and shows help", () => {
334
+ const r = runCli(["mon", "local-install", "--help"]);
335
+ expect(r.status).toBe(0);
336
+ expect(r.stdout).toMatch(/--demo/);
337
+ expect(r.stdout).toMatch(/--api-key/);
338
+ });
339
+
340
+ test("cli: auth login --help shows --set-key option", () => {
341
+ const r = runCli(["auth", "login", "--help"]);
342
+ expect(r.status).toBe(0);
343
+ expect(r.stdout).toMatch(/--set-key/);
344
+ });
345
+ });
@@ -0,0 +1,81 @@
1
+ /**
2
+ * JSON Schema validation tests for express checkup reports.
3
+ * Validates that generated reports match schemas in reporter/schemas/.
4
+ */
5
+ import { describe, test, expect } from "bun:test";
6
+ import { resolve } from "path";
7
+ import { readFileSync } from "fs";
8
+ import Ajv2020 from "ajv/dist/2020";
9
+
10
+ import * as checkup from "../lib/checkup";
11
+ import { createMockClient } from "./test-utils";
12
+
13
+ const ajv = new Ajv2020({ allErrors: true, strict: false });
14
+ const schemasDir = resolve(import.meta.dir, "../../reporter/schemas");
15
+
16
+ function validateAgainstSchema(report: any, checkId: string): void {
17
+ const schemaPath = resolve(schemasDir, `${checkId}.schema.json`);
18
+ const schema = JSON.parse(readFileSync(schemaPath, "utf8"));
19
+ const validate = ajv.compile(schema);
20
+ const valid = validate(report);
21
+ if (!valid) {
22
+ const errors = validate.errors?.map(e => `${e.instancePath}: ${e.message}`).join(", ");
23
+ throw new Error(`${checkId} schema validation failed: ${errors}`);
24
+ }
25
+ }
26
+
27
+ // Test data for index reports
28
+ const indexTestData = {
29
+ H001: {
30
+ emptyRows: { invalidIndexesRows: [] },
31
+ dataRows: {
32
+ invalidIndexesRows: [
33
+ { schema_name: "public", table_name: "users", index_name: "users_email_idx", relation_name: "users", index_size_bytes: "1048576", supports_fk: false },
34
+ ],
35
+ },
36
+ },
37
+ H002: {
38
+ emptyRows: { unusedIndexesRows: [] },
39
+ dataRows: {
40
+ unusedIndexesRows: [
41
+ { schema_name: "public", table_name: "logs", index_name: "logs_created_idx", index_definition: "CREATE INDEX logs_created_idx ON public.logs USING btree (created_at)", reason: "Never Used Indexes", idx_scan: "0", index_size_bytes: "8388608", idx_is_btree: true, supports_fk: false },
42
+ ],
43
+ },
44
+ },
45
+ H004: {
46
+ emptyRows: { redundantIndexesRows: [] },
47
+ dataRows: {
48
+ redundantIndexesRows: [
49
+ { schema_name: "public", table_name: "orders", index_name: "orders_user_id_idx", relation_name: "orders", access_method: "btree", reason: "public.orders_user_id_created_idx", index_size_bytes: "2097152", table_size_bytes: "16777216", index_usage: "0", supports_fk: false, index_definition: "CREATE INDEX orders_user_id_idx ON public.orders USING btree (user_id)", redundant_to_json: JSON.stringify([{ index_name: "public.orders_user_id_created_idx", index_definition: "CREATE INDEX ...", index_size_bytes: 1048576 }]) },
50
+ ],
51
+ },
52
+ },
53
+ };
54
+
55
+ describe("Schema validation", () => {
56
+ // Index health checks (H001, H002, H004) - test empty and with data
57
+ for (const [checkId, testData] of Object.entries(indexTestData)) {
58
+ const generator = checkup.REPORT_GENERATORS[checkId];
59
+
60
+ test(`${checkId} validates with empty data`, async () => {
61
+ const mockClient = createMockClient(testData.emptyRows);
62
+ const report = await generator(mockClient as any, "node-01");
63
+ validateAgainstSchema(report, checkId);
64
+ });
65
+
66
+ test(`${checkId} validates with sample data`, async () => {
67
+ const mockClient = createMockClient(testData.dataRows);
68
+ const report = await generator(mockClient as any, "node-01");
69
+ validateAgainstSchema(report, checkId);
70
+ });
71
+ }
72
+
73
+ // Settings reports (D004, F001, G001) - single test each
74
+ for (const checkId of ["D004", "F001", "G001"]) {
75
+ test(`${checkId} validates against schema`, async () => {
76
+ const mockClient = createMockClient();
77
+ const report = await checkup.REPORT_GENERATORS[checkId](mockClient as any, "node-01");
78
+ validateAgainstSchema(report, checkId);
79
+ });
80
+ }
81
+ });
@@ -0,0 +1,122 @@
1
+ /**
2
+ * Shared test utilities for CLI tests.
3
+ */
4
+
5
+ export interface MockClientOptions {
6
+ /** Database name returned by current_database() queries (default: "testdb") */
7
+ databaseName?: string;
8
+ /** Version rows for pg_settings version query (default: PG 16.3) */
9
+ versionRows?: any[];
10
+ settingsRows?: any[];
11
+ databaseSizesRows?: any[];
12
+ dbStatsRows?: any[];
13
+ connectionStatesRows?: any[];
14
+ uptimeRows?: any[];
15
+ invalidIndexesRows?: any[];
16
+ unusedIndexesRows?: any[];
17
+ redundantIndexesRows?: any[];
18
+ }
19
+
20
+ const DEFAULT_VERSION_ROWS = [
21
+ { name: "server_version", setting: "16.3" },
22
+ { name: "server_version_num", setting: "160003" },
23
+ ];
24
+
25
+ const defaultSettingsRows = [
26
+ { tag_setting_name: "shared_buffers", tag_setting_value: "128MB", tag_unit: "", tag_category: "Resource Usage / Memory", tag_vartype: "string", is_default: 1, setting_normalized: null, unit_normalized: null },
27
+ { tag_setting_name: "work_mem", tag_setting_value: "4MB", tag_unit: "", tag_category: "Resource Usage / Memory", tag_vartype: "string", is_default: 1, setting_normalized: null, unit_normalized: null },
28
+ { tag_setting_name: "autovacuum", tag_setting_value: "on", tag_unit: "", tag_category: "Autovacuum", tag_vartype: "bool", is_default: 1, setting_normalized: null, unit_normalized: null },
29
+ { tag_setting_name: "pg_stat_statements.max", tag_setting_value: "5000", tag_unit: "", tag_category: "Custom", tag_vartype: "integer", is_default: 0, setting_normalized: null, unit_normalized: null },
30
+ ];
31
+
32
+ /**
33
+ * Create a mock PostgreSQL client for testing report generators.
34
+ * Routes SQL queries to appropriate mock data based on query patterns.
35
+ */
36
+ export function createMockClient(options: MockClientOptions = {}) {
37
+ const {
38
+ databaseName = "testdb",
39
+ versionRows = DEFAULT_VERSION_ROWS,
40
+ settingsRows = defaultSettingsRows,
41
+ databaseSizesRows = [],
42
+ dbStatsRows = [],
43
+ connectionStatesRows = [],
44
+ uptimeRows = [],
45
+ invalidIndexesRows = [],
46
+ unusedIndexesRows = [],
47
+ redundantIndexesRows = [],
48
+ } = options;
49
+
50
+ return {
51
+ query: async (sql: string) => {
52
+ // Version query (simple inline - used by getPostgresVersion)
53
+ if (sql.includes("server_version") && sql.includes("server_version_num") && sql.includes("pg_settings") && !sql.includes("tag_setting_name")) {
54
+ return { rows: versionRows };
55
+ }
56
+ // Settings metric query (from metrics.yml - has tag_setting_name, tag_setting_value)
57
+ if (sql.includes("tag_setting_name") && sql.includes("tag_setting_value") && sql.includes("pg_settings")) {
58
+ return { rows: settingsRows };
59
+ }
60
+ // Database sizes (simple inline - lists all databases)
61
+ if (sql.includes("pg_database") && sql.includes("pg_database_size") && sql.includes("datistemplate")) {
62
+ return { rows: databaseSizesRows };
63
+ }
64
+ // db_size metric (current database size from metrics.yml)
65
+ if (sql.includes("pg_database_size(current_database())") && sql.includes("size_b")) {
66
+ return { rows: [{ tag_datname: databaseName, size_b: "1073741824" }] };
67
+ }
68
+ // db_stats metric (from metrics.yml)
69
+ if (sql.includes("pg_stat_database") && sql.includes("xact_commit") && sql.includes("pg_control_system")) {
70
+ return { rows: dbStatsRows };
71
+ }
72
+ // Stats reset metric (from metrics.yml)
73
+ if (sql.includes("stats_reset") && sql.includes("pg_stat_database") && sql.includes("seconds_since_reset")) {
74
+ return { rows: [{ tag_database_name: databaseName, stats_reset_epoch: "1704067200", seconds_since_reset: "2592000" }] };
75
+ }
76
+ // Postmaster startup time (simple inline - used by getStatsReset)
77
+ if (sql.includes("pg_postmaster_start_time") && sql.includes("postmaster_startup_epoch")) {
78
+ return { rows: [{ postmaster_startup_epoch: "1704067200", postmaster_startup_time: "2024-01-01 00:00:00+00" }] };
79
+ }
80
+ // Connection states (simple inline)
81
+ if (sql.includes("pg_stat_activity") && sql.includes("state") && sql.includes("group by")) {
82
+ return { rows: connectionStatesRows };
83
+ }
84
+ // Uptime info (simple inline)
85
+ if (sql.includes("pg_postmaster_start_time()") && sql.includes("uptime") && !sql.includes("postmaster_startup_epoch")) {
86
+ return { rows: uptimeRows };
87
+ }
88
+ // Invalid indexes (H001) - from metrics.yml
89
+ if (sql.includes("indisvalid = false") && sql.includes("fk_indexes")) {
90
+ return { rows: invalidIndexesRows };
91
+ }
92
+ // Unused indexes (H002) - from metrics.yml
93
+ if (sql.includes("Never Used Indexes") && sql.includes("idx_scan = 0")) {
94
+ return { rows: unusedIndexesRows };
95
+ }
96
+ // Redundant indexes (H004) - from metrics.yml
97
+ if (sql.includes("redundant_indexes_grouped") && sql.includes("columns like")) {
98
+ return { rows: redundantIndexesRows };
99
+ }
100
+ // D004: pg_stat_statements extension check
101
+ if (sql.includes("pg_extension") && sql.includes("pg_stat_statements")) {
102
+ return { rows: [] };
103
+ }
104
+ // D004: pg_stat_kcache extension check
105
+ if (sql.includes("pg_extension") && sql.includes("pg_stat_kcache")) {
106
+ return { rows: [] };
107
+ }
108
+ // G001: Memory settings query
109
+ if (sql.includes("pg_size_bytes") && sql.includes("shared_buffers") && sql.includes("work_mem")) {
110
+ return { rows: [{
111
+ shared_buffers_bytes: "134217728",
112
+ wal_buffers_bytes: "4194304",
113
+ work_mem_bytes: "4194304",
114
+ maintenance_work_mem_bytes: "67108864",
115
+ effective_cache_size_bytes: "4294967296",
116
+ max_connections: 100,
117
+ }] };
118
+ }
119
+ throw new Error(`Unexpected query: ${sql}`);
120
+ },
121
+ };
122
+ }
package/tsconfig.json CHANGED
@@ -1,28 +1,20 @@
1
1
  {
2
2
  "compilerOptions": {
3
- "target": "ES2020",
4
- "module": "node16",
5
- "lib": ["ES2020"],
6
- "outDir": "./dist",
7
- "rootDir": "./",
3
+ "target": "ESNext",
4
+ "module": "ESNext",
5
+ "moduleResolution": "bundler",
6
+ "lib": ["ESNext"],
7
+ "types": ["bun-types"],
8
8
  "strict": true,
9
9
  "esModuleInterop": true,
10
10
  "skipLibCheck": true,
11
- "forceConsistentCasingInFileNames": true,
11
+ "noEmit": true,
12
12
  "resolveJsonModule": true,
13
- "declaration": true,
14
- "declarationMap": true,
15
- "sourceMap": true,
16
- "moduleResolution": "node16",
17
- "types": ["node"]
13
+ "allowImportingTsExtensions": true,
14
+ "verbatimModuleSyntax": false,
15
+ "allowSyntheticDefaultImports": true,
16
+ "forceConsistentCasingInFileNames": true
18
17
  },
19
- "include": [
20
- "bin/**/*",
21
- "lib/**/*"
22
- ],
23
- "exclude": [
24
- "node_modules",
25
- "dist"
26
- ]
18
+ "include": ["bin/**/*", "lib/**/*", "test/**/*"],
19
+ "exclude": ["node_modules", "dist"]
27
20
  }
28
-
@@ -1,3 +0,0 @@
1
- #!/usr/bin/env node
2
- export {};
3
- //# sourceMappingURL=postgres-ai.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"postgres-ai.d.ts","sourceRoot":"","sources":["../../bin/postgres-ai.ts"],"names":[],"mappings":""}