postgresai 0.14.0-dev.53 → 0.14.0-dev.54

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,154 @@
1
+ #!/usr/bin/env bun
2
+ /**
3
+ * Build script to embed metrics.yml into the CLI bundle.
4
+ *
5
+ * This script reads config/pgwatch-prometheus/metrics.yml and generates
6
+ * cli/lib/metrics-embedded.ts with the metrics data embedded as TypeScript.
7
+ *
8
+ * The generated file is NOT committed to git - it's regenerated at build time.
9
+ *
10
+ * Usage: bun run scripts/embed-metrics.ts
11
+ */
12
+
13
+ import * as fs from "fs";
14
+ import * as path from "path";
15
+ import * as yaml from "js-yaml";
16
+
17
+ // Resolve paths relative to cli/ directory
18
+ const CLI_DIR = path.resolve(__dirname, "..");
19
+ const METRICS_YML_PATH = path.resolve(CLI_DIR, "../config/pgwatch-prometheus/metrics.yml");
20
+ const OUTPUT_PATH = path.resolve(CLI_DIR, "lib/metrics-embedded.ts");
21
+
22
+ interface MetricDefinition {
23
+ description?: string;
24
+ // YAML parses numeric keys (e.g., 11:, 14:) as numbers, representing PG major versions
25
+ sqls: Record<number, string>;
26
+ gauges?: string[];
27
+ statement_timeout_seconds?: number;
28
+ is_instance_level?: boolean;
29
+ node_status?: string;
30
+ }
31
+
32
+ interface MetricsYml {
33
+ metrics: Record<string, MetricDefinition>;
34
+ }
35
+
36
+ // Metrics needed for express mode reports
37
+ const REQUIRED_METRICS = [
38
+ // Settings and version (A002, A003, A007, A013)
39
+ "settings",
40
+ // Database stats (A004)
41
+ "db_stats",
42
+ "db_size",
43
+ // Index health (H001, H002, H004)
44
+ "pg_invalid_indexes",
45
+ "unused_indexes",
46
+ "redundant_indexes",
47
+ // Stats reset info (H002)
48
+ "stats_reset",
49
+ ];
50
+
51
+ function main() {
52
+ console.log(`Reading metrics from: ${METRICS_YML_PATH}`);
53
+
54
+ if (!fs.existsSync(METRICS_YML_PATH)) {
55
+ console.error(`ERROR: metrics.yml not found at ${METRICS_YML_PATH}`);
56
+ process.exit(1);
57
+ }
58
+
59
+ const yamlContent = fs.readFileSync(METRICS_YML_PATH, "utf8");
60
+ const parsed = yaml.load(yamlContent) as MetricsYml;
61
+
62
+ if (!parsed.metrics) {
63
+ console.error("ERROR: No 'metrics' section found in metrics.yml");
64
+ process.exit(1);
65
+ }
66
+
67
+ // Extract only required metrics
68
+ const extractedMetrics: Record<string, MetricDefinition> = {};
69
+ const missingMetrics: string[] = [];
70
+
71
+ for (const metricName of REQUIRED_METRICS) {
72
+ if (parsed.metrics[metricName]) {
73
+ extractedMetrics[metricName] = parsed.metrics[metricName];
74
+ } else {
75
+ missingMetrics.push(metricName);
76
+ }
77
+ }
78
+
79
+ if (missingMetrics.length > 0) {
80
+ console.error(`ERROR: Missing required metrics: ${missingMetrics.join(", ")}`);
81
+ process.exit(1);
82
+ }
83
+
84
+ // Generate TypeScript code
85
+ const tsCode = generateTypeScript(extractedMetrics);
86
+
87
+ // Write output
88
+ fs.writeFileSync(OUTPUT_PATH, tsCode, "utf8");
89
+ console.log(`Generated: ${OUTPUT_PATH}`);
90
+ console.log(`Embedded ${Object.keys(extractedMetrics).length} metrics`);
91
+ }
92
+
93
+ function generateTypeScript(metrics: Record<string, MetricDefinition>): string {
94
+ const lines: string[] = [
95
+ "// AUTO-GENERATED FILE - DO NOT EDIT",
96
+ "// Generated from config/pgwatch-prometheus/metrics.yml by scripts/embed-metrics.ts",
97
+ `// Generated at: ${new Date().toISOString()}`,
98
+ "",
99
+ "/**",
100
+ " * Metric definition from metrics.yml",
101
+ " */",
102
+ "export interface MetricDefinition {",
103
+ " description?: string;",
104
+ " sqls: Record<number, string>; // PG major version -> SQL query",
105
+ " gauges?: string[];",
106
+ " statement_timeout_seconds?: number;",
107
+ "}",
108
+ "",
109
+ "/**",
110
+ " * Embedded metrics for express mode reports.",
111
+ " * Only includes metrics required for CLI checkup reports.",
112
+ " */",
113
+ "export const METRICS: Record<string, MetricDefinition> = {",
114
+ ];
115
+
116
+ for (const [name, metric] of Object.entries(metrics)) {
117
+ lines.push(` ${JSON.stringify(name)}: {`);
118
+
119
+ if (metric.description) {
120
+ // Escape description for TypeScript string
121
+ const desc = metric.description.trim().replace(/\n/g, " ").replace(/\s+/g, " ");
122
+ lines.push(` description: ${JSON.stringify(desc)},`);
123
+ }
124
+
125
+ // sqls keys are PG major versions (numbers in YAML, but Object.entries returns strings)
126
+ lines.push(" sqls: {");
127
+ for (const [versionKey, sql] of Object.entries(metric.sqls)) {
128
+ // YAML numeric keys may be parsed as numbers or strings depending on context;
129
+ // explicitly convert to ensure consistent numeric keys in output
130
+ const versionNum = typeof versionKey === "number" ? versionKey : parseInt(versionKey, 10);
131
+ // Use JSON.stringify for robust escaping of all special characters
132
+ lines.push(` ${versionNum}: ${JSON.stringify(sql)},`);
133
+ }
134
+ lines.push(" },");
135
+
136
+ if (metric.gauges) {
137
+ lines.push(` gauges: ${JSON.stringify(metric.gauges)},`);
138
+ }
139
+
140
+ if (metric.statement_timeout_seconds !== undefined) {
141
+ lines.push(` statement_timeout_seconds: ${metric.statement_timeout_seconds},`);
142
+ }
143
+
144
+ lines.push(" },");
145
+ }
146
+
147
+ lines.push("};");
148
+ lines.push("");
149
+
150
+ return lines.join("\n");
151
+ }
152
+
153
+ main();
154
+
@@ -0,0 +1,273 @@
1
+ /**
2
+ * Integration tests for checkup command (express mode)
3
+ * Validates that CLI-generated reports match JSON schemas used by the Python reporter.
4
+ * This ensures compatibility between "express" and "full" (monitoring) modes.
5
+ */
6
+ import { describe, test, expect, afterAll, beforeAll } from "bun:test";
7
+ import * as fs from "fs";
8
+ import * as os from "os";
9
+ import * as path from "path";
10
+ import * as net from "net";
11
+ import { Client } from "pg";
12
+ import { resolve } from "path";
13
+ import { readFileSync } from "fs";
14
+ import Ajv2020 from "ajv/dist/2020";
15
+
16
+ import * as checkup from "../lib/checkup";
17
+
18
+ const ajv = new Ajv2020({ allErrors: true, strict: false });
19
+ const schemasDir = resolve(import.meta.dir, "../../reporter/schemas");
20
+
21
+ function findOnPath(cmd: string): string | null {
22
+ const result = Bun.spawnSync(["sh", "-c", `command -v ${cmd}`]);
23
+ if (result.exitCode === 0) {
24
+ return new TextDecoder().decode(result.stdout).trim();
25
+ }
26
+ return null;
27
+ }
28
+
29
+ function findPgBin(cmd: string): string | null {
30
+ const p = findOnPath(cmd);
31
+ if (p) return p;
32
+ const probe = Bun.spawnSync([
33
+ "sh",
34
+ "-c",
35
+ `ls -1 /usr/lib/postgresql/*/bin/${cmd} 2>/dev/null | head -n 1 || true`,
36
+ ]);
37
+ const out = new TextDecoder().decode(probe.stdout).trim();
38
+ if (out) return out;
39
+ return null;
40
+ }
41
+
42
+ function havePostgresBinaries(): boolean {
43
+ return !!(findPgBin("initdb") && findPgBin("postgres"));
44
+ }
45
+
46
+ function isRunningAsRoot(): boolean {
47
+ return process.getuid?.() === 0;
48
+ }
49
+
50
+ async function getFreePort(): Promise<number> {
51
+ return new Promise((resolve, reject) => {
52
+ const srv = net.createServer();
53
+ srv.listen(0, "127.0.0.1", () => {
54
+ const addr = srv.address() as net.AddressInfo;
55
+ srv.close((err) => {
56
+ if (err) return reject(err);
57
+ resolve(addr.port);
58
+ });
59
+ });
60
+ srv.on("error", reject);
61
+ });
62
+ }
63
+
64
+ async function waitFor<T>(
65
+ fn: () => Promise<T>,
66
+ { timeoutMs = 10000, intervalMs = 100 } = {}
67
+ ): Promise<T> {
68
+ const start = Date.now();
69
+ while (true) {
70
+ try {
71
+ return await fn();
72
+ } catch (e) {
73
+ if (Date.now() - start > timeoutMs) throw e;
74
+ await new Promise((r) => setTimeout(r, intervalMs));
75
+ }
76
+ }
77
+ }
78
+
79
+ interface TempPostgres {
80
+ port: number;
81
+ socketDir: string;
82
+ cleanup: () => Promise<void>;
83
+ connect: (database?: string) => Promise<Client>;
84
+ }
85
+
86
+ async function createTempPostgres(): Promise<TempPostgres> {
87
+ const tmpRoot = fs.mkdtempSync(path.join(os.tmpdir(), "postgresai-checkup-"));
88
+ const dataDir = path.join(tmpRoot, "data");
89
+ const socketDir = path.join(tmpRoot, "sock");
90
+ fs.mkdirSync(socketDir, { recursive: true });
91
+
92
+ const initdb = findPgBin("initdb");
93
+ const postgresBin = findPgBin("postgres");
94
+ if (!initdb || !postgresBin) {
95
+ throw new Error("PostgreSQL binaries not found");
96
+ }
97
+
98
+ const init = Bun.spawnSync([initdb, "-D", dataDir, "-U", "postgres", "-A", "trust"]);
99
+ if (init.exitCode !== 0) {
100
+ throw new Error(new TextDecoder().decode(init.stderr) || new TextDecoder().decode(init.stdout));
101
+ }
102
+
103
+ const hbaPath = path.join(dataDir, "pg_hba.conf");
104
+ fs.appendFileSync(hbaPath, "\nlocal all all trust\n", "utf8");
105
+
106
+ const port = await getFreePort();
107
+ const postgresProc = Bun.spawn(
108
+ [postgresBin, "-D", dataDir, "-k", socketDir, "-h", "127.0.0.1", "-p", String(port)],
109
+ { stdio: ["ignore", "pipe", "pipe"] }
110
+ );
111
+
112
+ const cleanup = async () => {
113
+ postgresProc.kill("SIGTERM");
114
+ try {
115
+ await waitFor(
116
+ async () => {
117
+ if (postgresProc.exitCode === null) throw new Error("still running");
118
+ },
119
+ { timeoutMs: 5000, intervalMs: 100 }
120
+ );
121
+ } catch {
122
+ postgresProc.kill("SIGKILL");
123
+ }
124
+ fs.rmSync(tmpRoot, { recursive: true, force: true });
125
+ };
126
+
127
+ const connect = async (database = "postgres"): Promise<Client> => {
128
+ const c = new Client({ host: socketDir, port, user: "postgres", database });
129
+ await c.connect();
130
+ return c;
131
+ };
132
+
133
+ // Wait for Postgres to start
134
+ await waitFor(async () => {
135
+ const c = await connect();
136
+ await c.end();
137
+ });
138
+
139
+ return { port, socketDir, cleanup, connect };
140
+ }
141
+
142
+ function validateAgainstSchema(report: any, checkId: string): void {
143
+ const schemaPath = resolve(schemasDir, `${checkId}.schema.json`);
144
+ if (!fs.existsSync(schemaPath)) {
145
+ throw new Error(`Schema not found: ${schemaPath}`);
146
+ }
147
+ const schema = JSON.parse(readFileSync(schemaPath, "utf8"));
148
+ const validate = ajv.compile(schema);
149
+ const valid = validate(report);
150
+ if (!valid) {
151
+ const errors = validate.errors?.map(e => `${e.instancePath}: ${e.message}`).join(", ");
152
+ throw new Error(`${checkId} schema validation failed: ${errors}`);
153
+ }
154
+ }
155
+
156
+ // Skip tests if PostgreSQL binaries are not available
157
+ const skipReason = !havePostgresBinaries()
158
+ ? "PostgreSQL binaries not available"
159
+ : isRunningAsRoot()
160
+ ? "Cannot run as root (PostgreSQL refuses)"
161
+ : null;
162
+
163
+ // In CI, warn if integration tests are being skipped (helps catch configuration issues)
164
+ const isCI = process.env.CI === "true" || process.env.GITLAB_CI === "true";
165
+ if (skipReason && isCI) {
166
+ console.warn(`[CI WARNING] Integration tests skipped: ${skipReason}`);
167
+ console.warn("This may indicate a CI configuration issue - PostgreSQL binaries should be available.");
168
+ }
169
+
170
+ describe.skipIf(!!skipReason)("checkup integration: express mode schema compatibility", () => {
171
+ let pg: TempPostgres;
172
+ let client: Client;
173
+
174
+ beforeAll(async () => {
175
+ pg = await createTempPostgres();
176
+ client = await pg.connect();
177
+ });
178
+
179
+ afterAll(async () => {
180
+ if (client) await client.end();
181
+ if (pg) await pg.cleanup();
182
+ });
183
+
184
+ // Test all checks supported by express mode
185
+ const expressChecks = Object.keys(checkup.CHECK_INFO);
186
+
187
+ for (const checkId of expressChecks) {
188
+ test(`${checkId} report validates against shared schema`, async () => {
189
+ const generator = checkup.REPORT_GENERATORS[checkId];
190
+ expect(generator).toBeDefined();
191
+
192
+ const report = await generator(client, "test-node");
193
+
194
+ // Validate basic report structure (matching schema requirements)
195
+ expect(report).toHaveProperty("checkId", checkId);
196
+ expect(report).toHaveProperty("checkTitle");
197
+ expect(report).toHaveProperty("timestamptz");
198
+ expect(report).toHaveProperty("nodes");
199
+ expect(report).toHaveProperty("results");
200
+ expect(report.results).toHaveProperty("test-node");
201
+
202
+ // Validate against JSON schema (same schema used by Python reporter)
203
+ validateAgainstSchema(report, checkId);
204
+ });
205
+ }
206
+
207
+ test("generateAllReports produces valid reports for all checks", async () => {
208
+ const reports = await checkup.generateAllReports(client, "test-node");
209
+
210
+ expect(Object.keys(reports).length).toBe(expressChecks.length);
211
+
212
+ for (const [checkId, report] of Object.entries(reports)) {
213
+ validateAgainstSchema(report, checkId);
214
+ }
215
+ });
216
+
217
+ test("report structure matches Python reporter format", async () => {
218
+ // Generate A003 (settings) report and verify structure matches what Python produces
219
+ const report = await checkup.generateA003(client, "test-node");
220
+
221
+ // Check required fields match Python reporter output structure (per schema)
222
+ expect(report).toHaveProperty("checkId", "A003");
223
+ expect(report).toHaveProperty("checkTitle", "Postgres settings");
224
+ expect(report).toHaveProperty("timestamptz");
225
+ expect(report).toHaveProperty("nodes");
226
+ expect(report.nodes).toHaveProperty("primary");
227
+ expect(report.nodes).toHaveProperty("standbys");
228
+ expect(report).toHaveProperty("results");
229
+
230
+ // Results should have node-specific data
231
+ const nodeResult = report.results["test-node"];
232
+ expect(nodeResult).toHaveProperty("data");
233
+
234
+ // A003 should have settings as keyed object
235
+ expect(typeof nodeResult.data).toBe("object");
236
+
237
+ // Check postgres_version if present
238
+ if (nodeResult.postgres_version) {
239
+ expect(nodeResult.postgres_version).toHaveProperty("version");
240
+ expect(nodeResult.postgres_version).toHaveProperty("server_version_num");
241
+ expect(nodeResult.postgres_version).toHaveProperty("server_major_ver");
242
+ expect(nodeResult.postgres_version).toHaveProperty("server_minor_ver");
243
+ }
244
+ });
245
+
246
+ test("H001 (invalid indexes) has correct data structure", async () => {
247
+ const report = await checkup.generateH001(client, "test-node");
248
+ validateAgainstSchema(report, "H001");
249
+
250
+ const nodeResult = report.results["test-node"];
251
+ expect(nodeResult).toHaveProperty("data");
252
+ // data should be an object with indexes (may be empty on fresh DB)
253
+ expect(typeof nodeResult.data).toBe("object");
254
+ });
255
+
256
+ test("H002 (unused indexes) has correct data structure", async () => {
257
+ const report = await checkup.generateH002(client, "test-node");
258
+ validateAgainstSchema(report, "H002");
259
+
260
+ const nodeResult = report.results["test-node"];
261
+ expect(nodeResult).toHaveProperty("data");
262
+ expect(typeof nodeResult.data).toBe("object");
263
+ });
264
+
265
+ test("H004 (redundant indexes) has correct data structure", async () => {
266
+ const report = await checkup.generateH004(client, "test-node");
267
+ validateAgainstSchema(report, "H004");
268
+
269
+ const nodeResult = report.results["test-node"];
270
+ expect(nodeResult).toHaveProperty("data");
271
+ expect(typeof nodeResult.data).toBe("object");
272
+ });
273
+ });