postgresai 0.14.0-dev.51 → 0.14.0-dev.52

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -13064,7 +13064,7 @@ var {
13064
13064
  // package.json
13065
13065
  var package_default = {
13066
13066
  name: "postgresai",
13067
- version: "0.14.0-dev.51",
13067
+ version: "0.14.0-dev.52",
13068
13068
  description: "postgres_ai CLI",
13069
13069
  license: "Apache-2.0",
13070
13070
  private: false,
@@ -13086,7 +13086,7 @@ var package_default = {
13086
13086
  node: ">=18"
13087
13087
  },
13088
13088
  scripts: {
13089
- build: `bun build ./bin/postgres-ai.ts --outdir ./dist/bin --target node && node -e "const fs=require('fs');const f='./dist/bin/postgres-ai.js';fs.writeFileSync(f,fs.readFileSync(f,'utf8').replace('#!/usr/bin/env bun','#!/usr/bin/env node'))"`,
13089
+ build: `bun build ./bin/postgres-ai.ts --outdir ./dist/bin --target node && node -e "const fs=require('fs');const f='./dist/bin/postgres-ai.js';fs.writeFileSync(f,fs.readFileSync(f,'utf8').replace('#!/usr/bin/env bun','#!/usr/bin/env node'))" && cp -r sql dist/`,
13090
13090
  prepublishOnly: "npm run build",
13091
13091
  start: "bun ./bin/postgres-ai.ts --help",
13092
13092
  "start:node": "node ./dist/bin/postgres-ai.js --help",
@@ -13104,8 +13104,6 @@ var package_default = {
13104
13104
  "@types/bun": "^1.1.14",
13105
13105
  "@types/js-yaml": "^4.0.9",
13106
13106
  "@types/pg": "^8.15.6",
13107
- ajv: "^8.17.1",
13108
- "ajv-formats": "^3.0.1",
13109
13107
  typescript: "^5.3.3"
13110
13108
  },
13111
13109
  publishConfig: {
@@ -13131,8 +13129,7 @@ function readConfig() {
13131
13129
  const config = {
13132
13130
  apiKey: null,
13133
13131
  baseUrl: null,
13134
- orgId: null,
13135
- defaultProject: null
13132
+ orgId: null
13136
13133
  };
13137
13134
  const userConfigPath = getConfigPath();
13138
13135
  if (fs.existsSync(userConfigPath)) {
@@ -13142,7 +13139,6 @@ function readConfig() {
13142
13139
  config.apiKey = parsed.apiKey || null;
13143
13140
  config.baseUrl = parsed.baseUrl || null;
13144
13141
  config.orgId = parsed.orgId || null;
13145
- config.defaultProject = parsed.defaultProject || null;
13146
13142
  return config;
13147
13143
  } catch (err) {
13148
13144
  const message = err instanceof Error ? err.message : String(err);
@@ -15862,10 +15858,9 @@ var safeLoadAll = renamed("safeLoadAll", "loadAll");
15862
15858
  var safeDump = renamed("safeDump", "dump");
15863
15859
 
15864
15860
  // bin/postgres-ai.ts
15865
- import * as fs5 from "fs";
15866
- import * as path5 from "path";
15861
+ import * as fs4 from "fs";
15862
+ import * as path4 from "path";
15867
15863
  import * as os3 from "os";
15868
- import * as crypto2 from "crypto";
15869
15864
 
15870
15865
  // node_modules/pg/esm/index.mjs
15871
15866
  var import_lib = __toESM(require_lib2(), 1);
@@ -15881,10 +15876,9 @@ var Result = import_lib.default.Result;
15881
15876
  var TypeOverrides = import_lib.default.TypeOverrides;
15882
15877
  var defaults = import_lib.default.defaults;
15883
15878
  // package.json
15884
- var version = "0.14.0-dev.51";
15885
15879
  var package_default2 = {
15886
15880
  name: "postgresai",
15887
- version,
15881
+ version: "0.14.0-dev.52",
15888
15882
  description: "postgres_ai CLI",
15889
15883
  license: "Apache-2.0",
15890
15884
  private: false,
@@ -15906,7 +15900,7 @@ var package_default2 = {
15906
15900
  node: ">=18"
15907
15901
  },
15908
15902
  scripts: {
15909
- build: `bun build ./bin/postgres-ai.ts --outdir ./dist/bin --target node && node -e "const fs=require('fs');const f='./dist/bin/postgres-ai.js';fs.writeFileSync(f,fs.readFileSync(f,'utf8').replace('#!/usr/bin/env bun','#!/usr/bin/env node'))"`,
15903
+ build: `bun build ./bin/postgres-ai.ts --outdir ./dist/bin --target node && node -e "const fs=require('fs');const f='./dist/bin/postgres-ai.js';fs.writeFileSync(f,fs.readFileSync(f,'utf8').replace('#!/usr/bin/env bun','#!/usr/bin/env node'))" && cp -r sql dist/`,
15910
15904
  prepublishOnly: "npm run build",
15911
15905
  start: "bun ./bin/postgres-ai.ts --help",
15912
15906
  "start:node": "node ./dist/bin/postgres-ai.js --help",
@@ -15924,8 +15918,6 @@ var package_default2 = {
15924
15918
  "@types/bun": "^1.1.14",
15925
15919
  "@types/js-yaml": "^4.0.9",
15926
15920
  "@types/pg": "^8.15.6",
15927
- ajv: "^8.17.1",
15928
- "ajv-formats": "^3.0.1",
15929
15921
  typescript: "^5.3.3"
15930
15922
  },
15931
15923
  publishConfig: {
@@ -15951,8 +15943,7 @@ function readConfig2() {
15951
15943
  const config = {
15952
15944
  apiKey: null,
15953
15945
  baseUrl: null,
15954
- orgId: null,
15955
- defaultProject: null
15946
+ orgId: null
15956
15947
  };
15957
15948
  const userConfigPath = getConfigPath2();
15958
15949
  if (fs2.existsSync(userConfigPath)) {
@@ -15962,7 +15953,6 @@ function readConfig2() {
15962
15953
  config.apiKey = parsed.apiKey || null;
15963
15954
  config.baseUrl = parsed.baseUrl || null;
15964
15955
  config.orgId = parsed.orgId || null;
15965
- config.defaultProject = parsed.defaultProject || null;
15966
15956
  return config;
15967
15957
  } catch (err) {
15968
15958
  const message = err instanceof Error ? err.message : String(err);
@@ -17114,10 +17104,10 @@ var ksuid = /^[A-Za-z0-9]{27}$/;
17114
17104
  var nanoid = /^[a-zA-Z0-9_-]{21}$/;
17115
17105
  var duration = /^P(?:(\d+W)|(?!.*W)(?=\d|T\d)(\d+Y)?(\d+M)?(\d+D)?(T(?=\d)(\d+H)?(\d+M)?(\d+([.,]\d+)?S)?)?)$/;
17116
17106
  var guid = /^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12})$/;
17117
- var uuid = (version2) => {
17118
- if (!version2)
17107
+ var uuid = (version) => {
17108
+ if (!version)
17119
17109
  return /^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-8][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}|00000000-0000-0000-0000-000000000000|ffffffff-ffff-ffff-ffff-ffffffffffff)$/;
17120
- return new RegExp(`^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-${version2}[0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12})$`);
17110
+ return new RegExp(`^([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-${version}[0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12})$`);
17121
17111
  };
17122
17112
  var email = /^(?!\.)(?!.*\.\.)([A-Za-z0-9_'+\-\.]*)[A-Za-z0-9_+-]@([A-Za-z0-9][A-Za-z0-9\-]*\.)+[A-Za-z]{2,}$/;
17123
17113
  var _emoji = `^(\\p{Extended_Pictographic}|\\p{Emoji_Component})+$`;
@@ -17586,7 +17576,7 @@ class Doc {
17586
17576
  }
17587
17577
 
17588
17578
  // node_modules/zod/v4/core/versions.js
17589
- var version2 = {
17579
+ var version = {
17590
17580
  major: 4,
17591
17581
  minor: 2,
17592
17582
  patch: 1
@@ -17598,7 +17588,7 @@ var $ZodType = /* @__PURE__ */ $constructor("$ZodType", (inst, def) => {
17598
17588
  inst ?? (inst = {});
17599
17589
  inst._zod.def = def;
17600
17590
  inst._zod.bag = inst._zod.bag || {};
17601
- inst._zod.version = version2;
17591
+ inst._zod.version = version;
17602
17592
  const checks = [...inst._zod.def.checks ?? []];
17603
17593
  if (inst._zod.traits.has("$ZodCheck")) {
17604
17594
  checks.unshift(inst);
@@ -24054,6 +24044,7 @@ function generatePKCEParams() {
24054
24044
  }
24055
24045
 
24056
24046
  // lib/auth-server.ts
24047
+ import * as http from "http";
24057
24048
  function escapeHtml(str2) {
24058
24049
  if (!str2)
24059
24050
  return "";
@@ -24069,36 +24060,42 @@ function createCallbackServer(port = 0, expectedState = null, timeoutMs = 300000
24069
24060
  resolveCallback = resolve4;
24070
24061
  rejectCallback = reject;
24071
24062
  });
24063
+ const stopServer = () => {
24064
+ if (serverInstance) {
24065
+ serverInstance.close();
24066
+ serverInstance = null;
24067
+ }
24068
+ };
24072
24069
  const timeout = setTimeout(() => {
24073
24070
  if (!resolved) {
24074
24071
  resolved = true;
24075
- if (serverInstance) {
24076
- serverInstance.stop();
24077
- }
24072
+ stopServer();
24078
24073
  rejectCallback(new Error("Authentication timeout. Please try again."));
24079
24074
  }
24080
24075
  }, timeoutMs);
24081
- serverInstance = Bun.serve({
24082
- port,
24083
- hostname: "127.0.0.1",
24084
- fetch(req) {
24085
- if (resolved) {
24086
- return new Response("Already handled", { status: 200 });
24087
- }
24088
- const url = new URL(req.url);
24089
- if (!url.pathname.startsWith("/callback")) {
24090
- return new Response("Not Found", { status: 404 });
24091
- }
24092
- const code = url.searchParams.get("code");
24093
- const state = url.searchParams.get("state");
24094
- const error2 = url.searchParams.get("error");
24095
- const errorDescription = url.searchParams.get("error_description");
24096
- if (error2) {
24097
- resolved = true;
24098
- clearTimeout(timeout);
24099
- setTimeout(() => serverInstance?.stop(), 100);
24100
- rejectCallback(new Error(`OAuth error: ${error2}${errorDescription ? ` - ${errorDescription}` : ""}`));
24101
- return new Response(`
24076
+ serverInstance = http.createServer((req, res) => {
24077
+ if (resolved) {
24078
+ res.writeHead(200, { "Content-Type": "text/plain" });
24079
+ res.end("Already handled");
24080
+ return;
24081
+ }
24082
+ const url = new URL(req.url || "/", `http://127.0.0.1:${actualPort}`);
24083
+ if (!url.pathname.startsWith("/callback")) {
24084
+ res.writeHead(404, { "Content-Type": "text/plain" });
24085
+ res.end("Not Found");
24086
+ return;
24087
+ }
24088
+ const code = url.searchParams.get("code");
24089
+ const state = url.searchParams.get("state");
24090
+ const error2 = url.searchParams.get("error");
24091
+ const errorDescription = url.searchParams.get("error_description");
24092
+ if (error2) {
24093
+ resolved = true;
24094
+ clearTimeout(timeout);
24095
+ setTimeout(() => stopServer(), 100);
24096
+ rejectCallback(new Error(`OAuth error: ${error2}${errorDescription ? ` - ${errorDescription}` : ""}`));
24097
+ res.writeHead(400, { "Content-Type": "text/html" });
24098
+ res.end(`
24102
24099
  <!DOCTYPE html>
24103
24100
  <html>
24104
24101
  <head>
@@ -24119,10 +24116,12 @@ function createCallbackServer(port = 0, expectedState = null, timeoutMs = 300000
24119
24116
  </div>
24120
24117
  </body>
24121
24118
  </html>
24122
- `, { status: 400, headers: { "Content-Type": "text/html" } });
24123
- }
24124
- if (!code || !state) {
24125
- return new Response(`
24119
+ `);
24120
+ return;
24121
+ }
24122
+ if (!code || !state) {
24123
+ res.writeHead(400, { "Content-Type": "text/html" });
24124
+ res.end(`
24126
24125
  <!DOCTYPE html>
24127
24126
  <html>
24128
24127
  <head>
@@ -24141,14 +24140,16 @@ function createCallbackServer(port = 0, expectedState = null, timeoutMs = 300000
24141
24140
  </div>
24142
24141
  </body>
24143
24142
  </html>
24144
- `, { status: 400, headers: { "Content-Type": "text/html" } });
24145
- }
24146
- if (expectedState && state !== expectedState) {
24147
- resolved = true;
24148
- clearTimeout(timeout);
24149
- setTimeout(() => serverInstance?.stop(), 100);
24150
- rejectCallback(new Error("State mismatch (possible CSRF attack)"));
24151
- return new Response(`
24143
+ `);
24144
+ return;
24145
+ }
24146
+ if (expectedState && state !== expectedState) {
24147
+ resolved = true;
24148
+ clearTimeout(timeout);
24149
+ setTimeout(() => stopServer(), 100);
24150
+ rejectCallback(new Error("State mismatch (possible CSRF attack)"));
24151
+ res.writeHead(400, { "Content-Type": "text/html" });
24152
+ res.end(`
24152
24153
  <!DOCTYPE html>
24153
24154
  <html>
24154
24155
  <head>
@@ -24167,13 +24168,15 @@ function createCallbackServer(port = 0, expectedState = null, timeoutMs = 300000
24167
24168
  </div>
24168
24169
  </body>
24169
24170
  </html>
24170
- `, { status: 400, headers: { "Content-Type": "text/html" } });
24171
- }
24172
- resolved = true;
24173
- clearTimeout(timeout);
24174
- resolveCallback({ code, state });
24175
- setTimeout(() => serverInstance?.stop(), 100);
24176
- return new Response(`
24171
+ `);
24172
+ return;
24173
+ }
24174
+ resolved = true;
24175
+ clearTimeout(timeout);
24176
+ resolveCallback({ code, state });
24177
+ setTimeout(() => stopServer(), 100);
24178
+ res.writeHead(200, { "Content-Type": "text/html" });
24179
+ res.end(`
24177
24180
  <!DOCTYPE html>
24178
24181
  <html>
24179
24182
  <head>
@@ -24192,12 +24195,16 @@ function createCallbackServer(port = 0, expectedState = null, timeoutMs = 300000
24192
24195
  </div>
24193
24196
  </body>
24194
24197
  </html>
24195
- `, { status: 200, headers: { "Content-Type": "text/html" } });
24198
+ `);
24199
+ });
24200
+ serverInstance.listen(port, "127.0.0.1", () => {
24201
+ const address = serverInstance?.address();
24202
+ if (address && typeof address === "object") {
24203
+ actualPort = address.port;
24196
24204
  }
24197
24205
  });
24198
- actualPort = serverInstance.port;
24199
24206
  return {
24200
- server: { stop: () => serverInstance?.stop() },
24207
+ server: { stop: stopServer },
24201
24208
  promise: promise2,
24202
24209
  getPort: () => actualPort
24203
24210
  };
@@ -24206,1300 +24213,6 @@ function createCallbackServer(port = 0, expectedState = null, timeoutMs = 300000
24206
24213
  // bin/postgres-ai.ts
24207
24214
  import { createInterface } from "readline";
24208
24215
  import * as childProcess from "child_process";
24209
-
24210
- // lib/checkup.ts
24211
- import * as fs4 from "fs";
24212
- import * as path4 from "path";
24213
-
24214
- // lib/metrics-loader.ts
24215
- var EMBEDDED_SQL = {
24216
- express_version: `
24217
- select
24218
- name,
24219
- setting
24220
- from pg_settings
24221
- where name in ('server_version', 'server_version_num');
24222
- `,
24223
- express_settings: `
24224
- select
24225
- name,
24226
- setting,
24227
- unit,
24228
- category,
24229
- context,
24230
- vartype,
24231
- case when (source <> 'default') then 0 else 1 end as is_default,
24232
- case
24233
- when unit = '8kB' then pg_size_pretty(setting::bigint * 8192)
24234
- when unit = 'kB' then pg_size_pretty(setting::bigint * 1024)
24235
- when unit = 'MB' then pg_size_pretty(setting::bigint * 1024 * 1024)
24236
- when unit = 'B' then pg_size_pretty(setting::bigint)
24237
- when unit = 'ms' then setting || ' ms'
24238
- when unit = 's' then setting || ' s'
24239
- when unit = 'min' then setting || ' min'
24240
- else setting
24241
- end as pretty_value
24242
- from pg_settings
24243
- order by name;
24244
- `,
24245
- express_altered_settings: `
24246
- select
24247
- name,
24248
- setting,
24249
- unit,
24250
- category,
24251
- case
24252
- when unit = '8kB' then pg_size_pretty(setting::bigint * 8192)
24253
- when unit = 'kB' then pg_size_pretty(setting::bigint * 1024)
24254
- when unit = 'MB' then pg_size_pretty(setting::bigint * 1024 * 1024)
24255
- when unit = 'B' then pg_size_pretty(setting::bigint)
24256
- when unit = 'ms' then setting || ' ms'
24257
- when unit = 's' then setting || ' s'
24258
- when unit = 'min' then setting || ' min'
24259
- else setting
24260
- end as pretty_value
24261
- from pg_settings
24262
- where source <> 'default'
24263
- order by name;
24264
- `,
24265
- express_database_sizes: `
24266
- select
24267
- datname,
24268
- pg_database_size(datname) as size_bytes
24269
- from pg_database
24270
- where datistemplate = false
24271
- order by size_bytes desc;
24272
- `,
24273
- express_cluster_stats: `
24274
- select
24275
- sum(numbackends) as total_connections,
24276
- sum(xact_commit) as total_commits,
24277
- sum(xact_rollback) as total_rollbacks,
24278
- sum(blks_read) as blocks_read,
24279
- sum(blks_hit) as blocks_hit,
24280
- sum(tup_returned) as tuples_returned,
24281
- sum(tup_fetched) as tuples_fetched,
24282
- sum(tup_inserted) as tuples_inserted,
24283
- sum(tup_updated) as tuples_updated,
24284
- sum(tup_deleted) as tuples_deleted,
24285
- sum(deadlocks) as total_deadlocks,
24286
- sum(temp_files) as temp_files_created,
24287
- sum(temp_bytes) as temp_bytes_written
24288
- from pg_stat_database
24289
- where datname is not null;
24290
- `,
24291
- express_connection_states: `
24292
- select
24293
- coalesce(state, 'null') as state,
24294
- count(*) as count
24295
- from pg_stat_activity
24296
- group by state;
24297
- `,
24298
- express_uptime: `
24299
- select
24300
- pg_postmaster_start_time() as start_time,
24301
- current_timestamp - pg_postmaster_start_time() as uptime;
24302
- `,
24303
- express_stats_reset: `
24304
- select
24305
- extract(epoch from stats_reset) as stats_reset_epoch,
24306
- stats_reset::text as stats_reset_time,
24307
- extract(day from (now() - stats_reset))::integer as days_since_reset,
24308
- extract(epoch from pg_postmaster_start_time()) as postmaster_startup_epoch,
24309
- pg_postmaster_start_time()::text as postmaster_startup_time
24310
- from pg_stat_database
24311
- where datname = current_database();
24312
- `,
24313
- express_current_database: `
24314
- select
24315
- current_database() as datname,
24316
- pg_database_size(current_database()) as size_bytes;
24317
- `,
24318
- pg_invalid_indexes: `
24319
- with fk_indexes as (
24320
- select
24321
- schemaname as tag_schema_name,
24322
- (indexrelid::regclass)::text as tag_index_name,
24323
- (relid::regclass)::text as tag_table_name,
24324
- (confrelid::regclass)::text as tag_fk_table_ref,
24325
- array_to_string(indclass, ', ') as tag_opclasses
24326
- from
24327
- pg_stat_all_indexes
24328
- join pg_index using (indexrelid)
24329
- left join pg_constraint
24330
- on array_to_string(indkey, ',') = array_to_string(conkey, ',')
24331
- and schemaname = (connamespace::regnamespace)::text
24332
- and conrelid = relid
24333
- and contype = 'f'
24334
- where idx_scan = 0
24335
- and indisunique is false
24336
- and conkey is not null
24337
- ), data as (
24338
- select
24339
- pci.relname as tag_index_name,
24340
- pn.nspname as tag_schema_name,
24341
- pct.relname as tag_table_name,
24342
- quote_ident(pn.nspname) as tag_schema_name,
24343
- quote_ident(pci.relname) as tag_index_name,
24344
- quote_ident(pct.relname) as tag_table_name,
24345
- coalesce(nullif(quote_ident(pn.nspname), 'public') || '.', '') || quote_ident(pct.relname) as tag_relation_name,
24346
- pg_relation_size(pidx.indexrelid) index_size_bytes,
24347
- ((
24348
- select count(1)
24349
- from fk_indexes fi
24350
- where
24351
- fi.tag_fk_table_ref = pct.relname
24352
- and fi.tag_opclasses like (array_to_string(pidx.indclass, ', ') || '%')
24353
- ) > 0)::int as supports_fk
24354
- from pg_index pidx
24355
- join pg_class as pci on pci.oid = pidx.indexrelid
24356
- join pg_class as pct on pct.oid = pidx.indrelid
24357
- left join pg_namespace pn on pn.oid = pct.relnamespace
24358
- where pidx.indisvalid = false
24359
- ), data_total as (
24360
- select
24361
- sum(index_size_bytes) as index_size_bytes_sum
24362
- from data
24363
- ), num_data as (
24364
- select
24365
- row_number() over () num,
24366
- data.*
24367
- from data
24368
- )
24369
- select
24370
- (extract(epoch from now()) * 1e9)::int8 as epoch_ns,
24371
- current_database() as tag_datname,
24372
- num_data.*
24373
- from num_data
24374
- limit 1000;
24375
- `,
24376
- unused_indexes: `
24377
- with fk_indexes as (
24378
- select
24379
- n.nspname as schema_name,
24380
- ci.relname as index_name,
24381
- cr.relname as table_name,
24382
- (confrelid::regclass)::text as fk_table_ref,
24383
- array_to_string(indclass, ', ') as opclasses
24384
- from pg_index i
24385
- join pg_class ci on ci.oid = i.indexrelid and ci.relkind = 'i'
24386
- join pg_class cr on cr.oid = i.indrelid and cr.relkind = 'r'
24387
- join pg_namespace n on n.oid = ci.relnamespace
24388
- join pg_constraint cn on cn.conrelid = cr.oid
24389
- left join pg_stat_all_indexes as si on si.indexrelid = i.indexrelid
24390
- where
24391
- contype = 'f'
24392
- and i.indisunique is false
24393
- and conkey is not null
24394
- and ci.relpages > 5
24395
- and si.idx_scan < 10
24396
- ), table_scans as (
24397
- select relid,
24398
- tables.idx_scan + tables.seq_scan as all_scans,
24399
- ( tables.n_tup_ins + tables.n_tup_upd + tables.n_tup_del ) as writes,
24400
- pg_relation_size(relid) as table_size
24401
- from pg_stat_all_tables as tables
24402
- join pg_class c on c.oid = relid
24403
- where c.relpages > 5
24404
- ), indexes as (
24405
- select
24406
- i.indrelid,
24407
- i.indexrelid,
24408
- n.nspname as schema_name,
24409
- cr.relname as table_name,
24410
- ci.relname as index_name,
24411
- si.idx_scan,
24412
- pg_relation_size(i.indexrelid) as index_bytes,
24413
- ci.relpages,
24414
- (case when a.amname = 'btree' then true else false end) as idx_is_btree,
24415
- array_to_string(i.indclass, ', ') as opclasses
24416
- from pg_index i
24417
- join pg_class ci on ci.oid = i.indexrelid and ci.relkind = 'i'
24418
- join pg_class cr on cr.oid = i.indrelid and cr.relkind = 'r'
24419
- join pg_namespace n on n.oid = ci.relnamespace
24420
- join pg_am a on ci.relam = a.oid
24421
- left join pg_stat_all_indexes as si on si.indexrelid = i.indexrelid
24422
- where
24423
- i.indisunique = false
24424
- and i.indisvalid = true
24425
- and ci.relpages > 5
24426
- ), index_ratios as (
24427
- select
24428
- i.indexrelid as index_id,
24429
- i.schema_name,
24430
- i.table_name,
24431
- i.index_name,
24432
- idx_scan,
24433
- all_scans,
24434
- round(( case when all_scans = 0 then 0.0::numeric
24435
- else idx_scan::numeric/all_scans * 100 end), 2) as index_scan_pct,
24436
- writes,
24437
- round((case when writes = 0 then idx_scan::numeric else idx_scan::numeric/writes end), 2)
24438
- as scans_per_write,
24439
- index_bytes as index_size_bytes,
24440
- table_size as table_size_bytes,
24441
- i.relpages,
24442
- idx_is_btree,
24443
- i.opclasses,
24444
- (
24445
- select count(1)
24446
- from fk_indexes fi
24447
- where fi.fk_table_ref = i.table_name
24448
- and fi.schema_name = i.schema_name
24449
- and fi.opclasses like (i.opclasses || '%')
24450
- ) > 0 as supports_fk
24451
- from indexes i
24452
- join table_scans ts on ts.relid = i.indrelid
24453
- )
24454
- select
24455
- 'Never Used Indexes' as tag_reason,
24456
- current_database() as tag_datname,
24457
- index_id,
24458
- schema_name as tag_schema_name,
24459
- table_name as tag_table_name,
24460
- index_name as tag_index_name,
24461
- pg_get_indexdef(index_id) as index_definition,
24462
- idx_scan,
24463
- all_scans,
24464
- index_scan_pct,
24465
- writes,
24466
- scans_per_write,
24467
- index_size_bytes,
24468
- table_size_bytes,
24469
- relpages,
24470
- idx_is_btree,
24471
- opclasses as tag_opclasses,
24472
- supports_fk
24473
- from index_ratios
24474
- where
24475
- idx_scan = 0
24476
- and idx_is_btree
24477
- order by index_size_bytes desc
24478
- limit 1000;
24479
- `,
24480
- redundant_indexes: `
24481
- with fk_indexes as (
24482
- select
24483
- n.nspname as schema_name,
24484
- ci.relname as index_name,
24485
- cr.relname as table_name,
24486
- (confrelid::regclass)::text as fk_table_ref,
24487
- array_to_string(indclass, ', ') as opclasses
24488
- from pg_index i
24489
- join pg_class ci on ci.oid = i.indexrelid and ci.relkind = 'i'
24490
- join pg_class cr on cr.oid = i.indrelid and cr.relkind = 'r'
24491
- join pg_namespace n on n.oid = ci.relnamespace
24492
- join pg_constraint cn on cn.conrelid = cr.oid
24493
- left join pg_stat_all_indexes as si on si.indexrelid = i.indexrelid
24494
- where
24495
- contype = 'f'
24496
- and i.indisunique is false
24497
- and conkey is not null
24498
- and ci.relpages > 5
24499
- and si.idx_scan < 10
24500
- ),
24501
- index_data as (
24502
- select
24503
- *,
24504
- indkey::text as columns,
24505
- array_to_string(indclass, ', ') as opclasses
24506
- from pg_index i
24507
- join pg_class ci on ci.oid = i.indexrelid and ci.relkind = 'i'
24508
- where indisvalid = true and ci.relpages > 5
24509
- ), redundant_indexes as (
24510
- select
24511
- i2.indexrelid as index_id,
24512
- tnsp.nspname as schema_name,
24513
- trel.relname as table_name,
24514
- pg_relation_size(trel.oid) as table_size_bytes,
24515
- irel.relname as index_name,
24516
- am1.amname as access_method,
24517
- (i1.indexrelid::regclass)::text as reason,
24518
- i1.indexrelid as reason_index_id,
24519
- pg_get_indexdef(i1.indexrelid) main_index_def,
24520
- pg_size_pretty(pg_relation_size(i1.indexrelid)) main_index_size,
24521
- pg_get_indexdef(i2.indexrelid) index_def,
24522
- pg_relation_size(i2.indexrelid) index_size_bytes,
24523
- s.idx_scan as index_usage,
24524
- quote_ident(tnsp.nspname) as formated_schema_name,
24525
- coalesce(nullif(quote_ident(tnsp.nspname), 'public') || '.', '') || quote_ident(irel.relname) as formated_index_name,
24526
- quote_ident(trel.relname) as formated_table_name,
24527
- coalesce(nullif(quote_ident(tnsp.nspname), 'public') || '.', '') || quote_ident(trel.relname) as formated_relation_name,
24528
- i2.opclasses
24529
- from (
24530
- select indrelid, indexrelid, opclasses, indclass, indexprs, indpred, indisprimary, indisunique, columns
24531
- from index_data
24532
- order by indexrelid
24533
- ) as i1
24534
- join index_data as i2 on (
24535
- i1.indrelid = i2.indrelid
24536
- and i1.indexrelid <> i2.indexrelid
24537
- )
24538
- inner join pg_opclass op1 on i1.indclass[0] = op1.oid
24539
- inner join pg_opclass op2 on i2.indclass[0] = op2.oid
24540
- inner join pg_am am1 on op1.opcmethod = am1.oid
24541
- inner join pg_am am2 on op2.opcmethod = am2.oid
24542
- join pg_stat_all_indexes as s on s.indexrelid = i2.indexrelid
24543
- join pg_class as trel on trel.oid = i2.indrelid
24544
- join pg_namespace as tnsp on trel.relnamespace = tnsp.oid
24545
- join pg_class as irel on irel.oid = i2.indexrelid
24546
- where
24547
- not i2.indisprimary
24548
- and not i2.indisunique
24549
- and am1.amname = am2.amname
24550
- and i1.columns like (i2.columns || '%')
24551
- and i1.opclasses like (i2.opclasses || '%')
24552
- and pg_get_expr(i1.indexprs, i1.indrelid) is not distinct from pg_get_expr(i2.indexprs, i2.indrelid)
24553
- and pg_get_expr(i1.indpred, i1.indrelid) is not distinct from pg_get_expr(i2.indpred, i2.indrelid)
24554
- ), redundant_indexes_fk as (
24555
- select
24556
- ri.*,
24557
- ((
24558
- select count(1)
24559
- from fk_indexes fi
24560
- where
24561
- fi.fk_table_ref = ri.table_name
24562
- and fi.opclasses like (ri.opclasses || '%')
24563
- ) > 0)::int as supports_fk
24564
- from redundant_indexes ri
24565
- ),
24566
- redundant_indexes_tmp_num as (
24567
- select row_number() over () num, rig.*
24568
- from redundant_indexes_fk rig
24569
- ), redundant_indexes_tmp_links as (
24570
- select
24571
- ri1.*,
24572
- ri2.num as r_num
24573
- from redundant_indexes_tmp_num ri1
24574
- left join redundant_indexes_tmp_num ri2 on ri2.reason_index_id = ri1.index_id and ri1.reason_index_id = ri2.index_id
24575
- ), redundant_indexes_tmp_cut as (
24576
- select
24577
- *
24578
- from redundant_indexes_tmp_links
24579
- where num < r_num or r_num is null
24580
- ), redundant_indexes_cut_grouped as (
24581
- select
24582
- distinct(num),
24583
- *
24584
- from redundant_indexes_tmp_cut
24585
- order by index_size_bytes desc
24586
- ), redundant_indexes_grouped as (
24587
- select
24588
- index_id,
24589
- schema_name as tag_schema_name,
24590
- table_name,
24591
- table_size_bytes,
24592
- index_name as tag_index_name,
24593
- access_method as tag_access_method,
24594
- string_agg(distinct reason, ', ') as tag_reason,
24595
- index_size_bytes,
24596
- index_usage,
24597
- index_def as index_definition,
24598
- formated_index_name as tag_index_name,
24599
- formated_schema_name as tag_schema_name,
24600
- formated_table_name as tag_table_name,
24601
- formated_relation_name as tag_relation_name,
24602
- supports_fk::int as supports_fk,
24603
- json_agg(
24604
- distinct jsonb_build_object(
24605
- 'index_name', reason,
24606
- 'index_definition', main_index_def
24607
- )
24608
- )::text as covering_indexes_json
24609
- from redundant_indexes_cut_grouped
24610
- group by
24611
- index_id,
24612
- table_size_bytes,
24613
- schema_name,
24614
- table_name,
24615
- index_name,
24616
- access_method,
24617
- index_def,
24618
- index_size_bytes,
24619
- index_usage,
24620
- formated_index_name,
24621
- formated_schema_name,
24622
- formated_table_name,
24623
- formated_relation_name,
24624
- supports_fk
24625
- order by index_size_bytes desc
24626
- )
24627
- select * from redundant_indexes_grouped
24628
- limit 1000;
24629
- `
24630
- };
24631
- function getMetricSql(metricName, _pgMajorVersion = 16) {
24632
- const sql = EMBEDDED_SQL[metricName];
24633
- if (!sql) {
24634
- throw new Error(`Metric "${metricName}" not found. Available metrics: ${Object.keys(EMBEDDED_SQL).join(", ")}`);
24635
- }
24636
- return sql;
24637
- }
24638
- var METRIC_NAMES = {
24639
- H001: "pg_invalid_indexes",
24640
- H002: "unused_indexes",
24641
- H004: "redundant_indexes",
24642
- version: "express_version",
24643
- settings: "express_settings",
24644
- alteredSettings: "express_altered_settings",
24645
- databaseSizes: "express_database_sizes",
24646
- clusterStats: "express_cluster_stats",
24647
- connectionStates: "express_connection_states",
24648
- uptimeInfo: "express_uptime",
24649
- statsReset: "express_stats_reset",
24650
- currentDatabase: "express_current_database"
24651
- };
24652
- function transformMetricRow(row) {
24653
- const result = {};
24654
- for (const [key, value] of Object.entries(row)) {
24655
- if (key === "epoch_ns" || key === "num" || key === "tag_datname") {
24656
- continue;
24657
- }
24658
- const newKey = key.startsWith("tag_") ? key.slice(4) : key;
24659
- result[newKey] = value;
24660
- }
24661
- return result;
24662
- }
24663
-
24664
- // lib/checkup.ts
24665
- var __dirname = "/builds/postgres-ai/postgres_ai/cli/lib";
24666
- function parseVersionNum(versionNum) {
24667
- if (!versionNum || versionNum.length < 6) {
24668
- return { major: "", minor: "" };
24669
- }
24670
- try {
24671
- const num = parseInt(versionNum, 10);
24672
- return {
24673
- major: Math.floor(num / 1e4).toString(),
24674
- minor: (num % 1e4).toString()
24675
- };
24676
- } catch {
24677
- return { major: "", minor: "" };
24678
- }
24679
- }
24680
- function formatBytes(bytes) {
24681
- if (bytes === 0)
24682
- return "0 B";
24683
- const units = ["B", "KiB", "MiB", "GiB", "TiB", "PiB"];
24684
- const i2 = Math.floor(Math.log(bytes) / Math.log(1024));
24685
- return `${(bytes / Math.pow(1024, i2)).toFixed(2)} ${units[i2]}`;
24686
- }
24687
- async function getPostgresVersion(client) {
24688
- const sql = getMetricSql(METRIC_NAMES.version);
24689
- const result = await client.query(sql);
24690
- let version3 = "";
24691
- let serverVersionNum = "";
24692
- for (const row of result.rows) {
24693
- if (row.name === "server_version") {
24694
- version3 = row.setting;
24695
- } else if (row.name === "server_version_num") {
24696
- serverVersionNum = row.setting;
24697
- }
24698
- }
24699
- const { major, minor } = parseVersionNum(serverVersionNum);
24700
- return {
24701
- version: version3,
24702
- server_version_num: serverVersionNum,
24703
- server_major_ver: major,
24704
- server_minor_ver: minor
24705
- };
24706
- }
24707
- async function getSettings(client) {
24708
- const sql = getMetricSql(METRIC_NAMES.settings);
24709
- const result = await client.query(sql);
24710
- const settings = {};
24711
- for (const row of result.rows) {
24712
- settings[row.name] = {
24713
- setting: row.setting,
24714
- unit: row.unit || "",
24715
- category: row.category,
24716
- context: row.context,
24717
- vartype: row.vartype,
24718
- pretty_value: row.pretty_value
24719
- };
24720
- }
24721
- return settings;
24722
- }
24723
- async function getAlteredSettings(client) {
24724
- const sql = getMetricSql(METRIC_NAMES.alteredSettings);
24725
- const result = await client.query(sql);
24726
- const settings = {};
24727
- for (const row of result.rows) {
24728
- settings[row.name] = {
24729
- value: row.setting,
24730
- unit: row.unit || "",
24731
- category: row.category,
24732
- pretty_value: row.pretty_value
24733
- };
24734
- }
24735
- return settings;
24736
- }
24737
- async function getDatabaseSizes(client) {
24738
- const sql = getMetricSql(METRIC_NAMES.databaseSizes);
24739
- const result = await client.query(sql);
24740
- const sizes = {};
24741
- for (const row of result.rows) {
24742
- sizes[row.datname] = parseInt(row.size_bytes, 10);
24743
- }
24744
- return sizes;
24745
- }
24746
- async function getClusterInfo(client) {
24747
- const info = {};
24748
- const clusterStatsSql = getMetricSql(METRIC_NAMES.clusterStats);
24749
- const statsResult = await client.query(clusterStatsSql);
24750
- if (statsResult.rows.length > 0) {
24751
- const stats = statsResult.rows[0];
24752
- info.total_connections = {
24753
- value: String(stats.total_connections || 0),
24754
- unit: "connections",
24755
- description: "Total active database connections"
24756
- };
24757
- info.total_commits = {
24758
- value: String(stats.total_commits || 0),
24759
- unit: "transactions",
24760
- description: "Total committed transactions"
24761
- };
24762
- info.total_rollbacks = {
24763
- value: String(stats.total_rollbacks || 0),
24764
- unit: "transactions",
24765
- description: "Total rolled back transactions"
24766
- };
24767
- const blocksHit = parseInt(stats.blocks_hit || "0", 10);
24768
- const blocksRead = parseInt(stats.blocks_read || "0", 10);
24769
- const totalBlocks = blocksHit + blocksRead;
24770
- const cacheHitRatio = totalBlocks > 0 ? (blocksHit / totalBlocks * 100).toFixed(2) : "0.00";
24771
- info.cache_hit_ratio = {
24772
- value: cacheHitRatio,
24773
- unit: "%",
24774
- description: "Buffer cache hit ratio"
24775
- };
24776
- info.blocks_read = {
24777
- value: String(blocksRead),
24778
- unit: "blocks",
24779
- description: "Total disk blocks read"
24780
- };
24781
- info.blocks_hit = {
24782
- value: String(blocksHit),
24783
- unit: "blocks",
24784
- description: "Total buffer cache hits"
24785
- };
24786
- info.tuples_returned = {
24787
- value: String(stats.tuples_returned || 0),
24788
- unit: "rows",
24789
- description: "Total rows returned by queries"
24790
- };
24791
- info.tuples_fetched = {
24792
- value: String(stats.tuples_fetched || 0),
24793
- unit: "rows",
24794
- description: "Total rows fetched by queries"
24795
- };
24796
- info.tuples_inserted = {
24797
- value: String(stats.tuples_inserted || 0),
24798
- unit: "rows",
24799
- description: "Total rows inserted"
24800
- };
24801
- info.tuples_updated = {
24802
- value: String(stats.tuples_updated || 0),
24803
- unit: "rows",
24804
- description: "Total rows updated"
24805
- };
24806
- info.tuples_deleted = {
24807
- value: String(stats.tuples_deleted || 0),
24808
- unit: "rows",
24809
- description: "Total rows deleted"
24810
- };
24811
- info.total_deadlocks = {
24812
- value: String(stats.total_deadlocks || 0),
24813
- unit: "deadlocks",
24814
- description: "Total deadlocks detected"
24815
- };
24816
- info.temp_files_created = {
24817
- value: String(stats.temp_files_created || 0),
24818
- unit: "files",
24819
- description: "Total temporary files created"
24820
- };
24821
- const tempBytes = parseInt(stats.temp_bytes_written || "0", 10);
24822
- info.temp_bytes_written = {
24823
- value: formatBytes(tempBytes),
24824
- unit: "bytes",
24825
- description: "Total temporary file bytes written"
24826
- };
24827
- }
24828
- const connStatesSql = getMetricSql(METRIC_NAMES.connectionStates);
24829
- const connResult = await client.query(connStatesSql);
24830
- for (const row of connResult.rows) {
24831
- const stateKey = `connections_${row.state.replace(/\s+/g, "_")}`;
24832
- info[stateKey] = {
24833
- value: String(row.count),
24834
- unit: "connections",
24835
- description: `Connections in '${row.state}' state`
24836
- };
24837
- }
24838
- const uptimeSql = getMetricSql(METRIC_NAMES.uptimeInfo);
24839
- const uptimeResult = await client.query(uptimeSql);
24840
- if (uptimeResult.rows.length > 0) {
24841
- const uptime = uptimeResult.rows[0];
24842
- info.start_time = {
24843
- value: uptime.start_time.toISOString(),
24844
- unit: "timestamp",
24845
- description: "PostgreSQL server start time"
24846
- };
24847
- info.uptime = {
24848
- value: uptime.uptime,
24849
- unit: "interval",
24850
- description: "Server uptime"
24851
- };
24852
- }
24853
- return info;
24854
- }
24855
- async function getInvalidIndexes(client) {
24856
- const sql = getMetricSql(METRIC_NAMES.H001);
24857
- const result = await client.query(sql);
24858
- return result.rows.map((row) => {
24859
- const transformed = transformMetricRow(row);
24860
- const indexSizeBytes = parseInt(String(transformed.index_size_bytes || 0), 10);
24861
- return {
24862
- schema_name: String(transformed.schema_name || ""),
24863
- table_name: String(transformed.table_name || ""),
24864
- index_name: String(transformed.index_name || ""),
24865
- relation_name: String(transformed.relation_name || ""),
24866
- index_size_bytes: indexSizeBytes,
24867
- index_size_pretty: formatBytes(indexSizeBytes),
24868
- supports_fk: transformed.supports_fk === true || transformed.supports_fk === 1
24869
- };
24870
- });
24871
- }
24872
- async function getUnusedIndexes(client) {
24873
- const sql = getMetricSql(METRIC_NAMES.H002);
24874
- const result = await client.query(sql);
24875
- return result.rows.map((row) => {
24876
- const transformed = transformMetricRow(row);
24877
- const indexSizeBytes = parseInt(String(transformed.index_size_bytes || 0), 10);
24878
- return {
24879
- schema_name: String(transformed.schema_name || ""),
24880
- table_name: String(transformed.table_name || ""),
24881
- index_name: String(transformed.index_name || ""),
24882
- index_definition: String(transformed.index_definition || ""),
24883
- reason: String(transformed.reason || ""),
24884
- idx_scan: parseInt(String(transformed.idx_scan || 0), 10),
24885
- index_size_bytes: indexSizeBytes,
24886
- idx_is_btree: transformed.idx_is_btree === true || transformed.idx_is_btree === "t",
24887
- supports_fk: transformed.supports_fk === true || transformed.supports_fk === 1,
24888
- index_size_pretty: formatBytes(indexSizeBytes)
24889
- };
24890
- });
24891
- }
24892
- async function getStatsReset(client) {
24893
- const sql = getMetricSql(METRIC_NAMES.statsReset);
24894
- const result = await client.query(sql);
24895
- const row = result.rows[0] || {};
24896
- return {
24897
- stats_reset_epoch: row.stats_reset_epoch ? parseFloat(row.stats_reset_epoch) : null,
24898
- stats_reset_time: row.stats_reset_time || null,
24899
- days_since_reset: row.days_since_reset ? parseInt(row.days_since_reset, 10) : null,
24900
- postmaster_startup_epoch: row.postmaster_startup_epoch ? parseFloat(row.postmaster_startup_epoch) : null,
24901
- postmaster_startup_time: row.postmaster_startup_time || null
24902
- };
24903
- }
24904
- async function getCurrentDatabaseInfo(client) {
24905
- const sql = getMetricSql(METRIC_NAMES.currentDatabase);
24906
- const result = await client.query(sql);
24907
- const row = result.rows[0] || {};
24908
- return {
24909
- datname: row.datname || "postgres",
24910
- size_bytes: parseInt(row.size_bytes, 10) || 0
24911
- };
24912
- }
24913
- async function getRedundantIndexes(client) {
24914
- const sql = getMetricSql(METRIC_NAMES.H004);
24915
- const result = await client.query(sql);
24916
- return result.rows.map((row) => {
24917
- const transformed = transformMetricRow(row);
24918
- const indexSizeBytes = parseInt(String(transformed.index_size_bytes || 0), 10);
24919
- const tableSizeBytes = parseInt(String(transformed.table_size_bytes || 0), 10);
24920
- let coveringIndexes = [];
24921
- try {
24922
- const jsonStr = String(transformed.covering_indexes_json || "[]");
24923
- const parsed = JSON.parse(jsonStr);
24924
- if (Array.isArray(parsed)) {
24925
- coveringIndexes = parsed.map((item) => ({
24926
- index_name: String(item.index_name || ""),
24927
- index_definition: String(item.index_definition || "")
24928
- }));
24929
- }
24930
- } catch {}
24931
- return {
24932
- schema_name: String(transformed.schema_name || ""),
24933
- table_name: String(transformed.table_name || ""),
24934
- index_name: String(transformed.index_name || ""),
24935
- relation_name: String(transformed.relation_name || ""),
24936
- access_method: String(transformed.access_method || ""),
24937
- reason: String(transformed.reason || ""),
24938
- index_size_bytes: indexSizeBytes,
24939
- table_size_bytes: tableSizeBytes,
24940
- index_usage: parseInt(String(transformed.index_usage || 0), 10),
24941
- supports_fk: transformed.supports_fk === true || transformed.supports_fk === 1,
24942
- index_definition: String(transformed.index_definition || ""),
24943
- index_size_pretty: formatBytes(indexSizeBytes),
24944
- table_size_pretty: formatBytes(tableSizeBytes),
24945
- covering_indexes: coveringIndexes
24946
- };
24947
- });
24948
- }
24949
- function createBaseReport(checkId, checkTitle, nodeName) {
24950
- const buildTs = resolveBuildTs();
24951
- return {
24952
- version: version || null,
24953
- build_ts: buildTs,
24954
- generation_mode: "express",
24955
- checkId,
24956
- checkTitle,
24957
- timestamptz: new Date().toISOString(),
24958
- nodes: {
24959
- primary: nodeName,
24960
- standbys: []
24961
- },
24962
- results: {}
24963
- };
24964
- }
24965
- function readTextFileSafe(p) {
24966
- try {
24967
- const value = fs4.readFileSync(p, "utf8").trim();
24968
- return value || null;
24969
- } catch {
24970
- return null;
24971
- }
24972
- }
24973
- function resolveBuildTs() {
24974
- const envPath = process.env.PGAI_BUILD_TS_FILE;
24975
- const p = envPath && envPath.trim() ? envPath.trim() : "/BUILD_TS";
24976
- const fromFile = readTextFileSafe(p);
24977
- if (fromFile)
24978
- return fromFile;
24979
- try {
24980
- const pkgRoot = path4.resolve(__dirname, "..");
24981
- const fromPkgFile = readTextFileSafe(path4.join(pkgRoot, "BUILD_TS"));
24982
- if (fromPkgFile)
24983
- return fromPkgFile;
24984
- } catch {}
24985
- try {
24986
- const pkgJsonPath = path4.resolve(__dirname, "..", "package.json");
24987
- const st = fs4.statSync(pkgJsonPath);
24988
- return st.mtime.toISOString();
24989
- } catch {
24990
- return new Date().toISOString();
24991
- }
24992
- }
24993
- async function generateA002(client, nodeName = "node-01") {
24994
- const report = createBaseReport("A002", "Postgres major version", nodeName);
24995
- const postgresVersion = await getPostgresVersion(client);
24996
- report.results[nodeName] = {
24997
- data: {
24998
- version: postgresVersion
24999
- }
25000
- };
25001
- return report;
25002
- }
25003
- async function generateA003(client, nodeName = "node-01") {
25004
- const report = createBaseReport("A003", "Postgres settings", nodeName);
25005
- const settings = await getSettings(client);
25006
- const postgresVersion = await getPostgresVersion(client);
25007
- report.results[nodeName] = {
25008
- data: settings,
25009
- postgres_version: postgresVersion
25010
- };
25011
- return report;
25012
- }
25013
- async function generateA004(client, nodeName = "node-01") {
25014
- const report = createBaseReport("A004", "Cluster information", nodeName);
25015
- const generalInfo = await getClusterInfo(client);
25016
- const databaseSizes = await getDatabaseSizes(client);
25017
- const postgresVersion = await getPostgresVersion(client);
25018
- report.results[nodeName] = {
25019
- data: {
25020
- general_info: generalInfo,
25021
- database_sizes: databaseSizes
25022
- },
25023
- postgres_version: postgresVersion
25024
- };
25025
- return report;
25026
- }
25027
- async function generateA007(client, nodeName = "node-01") {
25028
- const report = createBaseReport("A007", "Altered settings", nodeName);
25029
- const alteredSettings = await getAlteredSettings(client);
25030
- const postgresVersion = await getPostgresVersion(client);
25031
- report.results[nodeName] = {
25032
- data: alteredSettings,
25033
- postgres_version: postgresVersion
25034
- };
25035
- return report;
25036
- }
25037
- async function generateA013(client, nodeName = "node-01") {
25038
- const report = createBaseReport("A013", "Postgres minor version", nodeName);
25039
- const postgresVersion = await getPostgresVersion(client);
25040
- report.results[nodeName] = {
25041
- data: {
25042
- version: postgresVersion
25043
- }
25044
- };
25045
- return report;
25046
- }
25047
- async function generateH001(client, nodeName = "node-01") {
25048
- const report = createBaseReport("H001", "Invalid indexes", nodeName);
25049
- const invalidIndexes = await getInvalidIndexes(client);
25050
- const postgresVersion = await getPostgresVersion(client);
25051
- const { datname: dbName, size_bytes: dbSizeBytes } = await getCurrentDatabaseInfo(client);
25052
- const totalCount = invalidIndexes.length;
25053
- const totalSizeBytes = invalidIndexes.reduce((sum, idx) => sum + idx.index_size_bytes, 0);
25054
- report.results[nodeName] = {
25055
- data: {
25056
- [dbName]: {
25057
- invalid_indexes: invalidIndexes,
25058
- total_count: totalCount,
25059
- total_size_bytes: totalSizeBytes,
25060
- total_size_pretty: formatBytes(totalSizeBytes),
25061
- database_size_bytes: dbSizeBytes,
25062
- database_size_pretty: formatBytes(dbSizeBytes)
25063
- }
25064
- },
25065
- postgres_version: postgresVersion
25066
- };
25067
- return report;
25068
- }
25069
- async function generateH002(client, nodeName = "node-01") {
25070
- const report = createBaseReport("H002", "Unused indexes", nodeName);
25071
- const unusedIndexes = await getUnusedIndexes(client);
25072
- const postgresVersion = await getPostgresVersion(client);
25073
- const statsReset = await getStatsReset(client);
25074
- const { datname: dbName, size_bytes: dbSizeBytes } = await getCurrentDatabaseInfo(client);
25075
- const totalCount = unusedIndexes.length;
25076
- const totalSizeBytes = unusedIndexes.reduce((sum, idx) => sum + idx.index_size_bytes, 0);
25077
- report.results[nodeName] = {
25078
- data: {
25079
- [dbName]: {
25080
- unused_indexes: unusedIndexes,
25081
- total_count: totalCount,
25082
- total_size_bytes: totalSizeBytes,
25083
- total_size_pretty: formatBytes(totalSizeBytes),
25084
- database_size_bytes: dbSizeBytes,
25085
- database_size_pretty: formatBytes(dbSizeBytes),
25086
- stats_reset: statsReset
25087
- }
25088
- },
25089
- postgres_version: postgresVersion
25090
- };
25091
- return report;
25092
- }
25093
- async function generateH004(client, nodeName = "node-01") {
25094
- const report = createBaseReport("H004", "Redundant indexes", nodeName);
25095
- const redundantIndexes = await getRedundantIndexes(client);
25096
- const postgresVersion = await getPostgresVersion(client);
25097
- const { datname: dbName, size_bytes: dbSizeBytes } = await getCurrentDatabaseInfo(client);
25098
- const totalCount = redundantIndexes.length;
25099
- const totalSizeBytes = redundantIndexes.reduce((sum, idx) => sum + idx.index_size_bytes, 0);
25100
- report.results[nodeName] = {
25101
- data: {
25102
- [dbName]: {
25103
- redundant_indexes: redundantIndexes,
25104
- total_count: totalCount,
25105
- total_size_bytes: totalSizeBytes,
25106
- total_size_pretty: formatBytes(totalSizeBytes),
25107
- database_size_bytes: dbSizeBytes,
25108
- database_size_pretty: formatBytes(dbSizeBytes)
25109
- }
25110
- },
25111
- postgres_version: postgresVersion
25112
- };
25113
- return report;
25114
- }
25115
- async function generateD004(client, nodeName) {
25116
- const report = createBaseReport("D004", "pg_stat_statements and pg_stat_kcache settings", nodeName);
25117
- const postgresVersion = await getPostgresVersion(client);
25118
- const allSettings = await getSettings(client);
25119
- const pgssSettings = {};
25120
- for (const [name, setting] of Object.entries(allSettings)) {
25121
- if (name.startsWith("pg_stat_statements") || name.startsWith("pg_stat_kcache")) {
25122
- pgssSettings[name] = setting;
25123
- }
25124
- }
25125
- let pgssAvailable = false;
25126
- let pgssMetricsCount = 0;
25127
- let pgssTotalCalls = 0;
25128
- const pgssSampleQueries = [];
25129
- try {
25130
- const extCheck = await client.query("select 1 from pg_extension where extname = 'pg_stat_statements'");
25131
- if (extCheck.rows.length > 0) {
25132
- pgssAvailable = true;
25133
- const statsResult = await client.query(`
25134
- select count(*) as cnt, coalesce(sum(calls), 0) as total_calls
25135
- from pg_stat_statements
25136
- `);
25137
- pgssMetricsCount = parseInt(statsResult.rows[0]?.cnt || "0", 10);
25138
- pgssTotalCalls = parseInt(statsResult.rows[0]?.total_calls || "0", 10);
25139
- const sampleResult = await client.query(`
25140
- select
25141
- queryid::text as queryid,
25142
- coalesce(usename, 'unknown') as "user",
25143
- coalesce(datname, 'unknown') as database,
25144
- calls
25145
- from pg_stat_statements s
25146
- left join pg_database d on s.dbid = d.oid
25147
- left join pg_user u on s.userid = u.usesysid
25148
- order by calls desc
25149
- limit 5
25150
- `);
25151
- for (const row of sampleResult.rows) {
25152
- pgssSampleQueries.push({
25153
- queryid: row.queryid,
25154
- user: row.user,
25155
- database: row.database,
25156
- calls: parseInt(row.calls, 10)
25157
- });
25158
- }
25159
- }
25160
- } catch {}
25161
- let kcacheAvailable = false;
25162
- let kcacheMetricsCount = 0;
25163
- let kcacheTotalExecTime = 0;
25164
- let kcacheTotalUserTime = 0;
25165
- let kcacheTotalSystemTime = 0;
25166
- const kcacheSampleQueries = [];
25167
- try {
25168
- const extCheck = await client.query("select 1 from pg_extension where extname = 'pg_stat_kcache'");
25169
- if (extCheck.rows.length > 0) {
25170
- kcacheAvailable = true;
25171
- const statsResult = await client.query(`
25172
- select
25173
- count(*) as cnt,
25174
- coalesce(sum(exec_user_time + exec_system_time), 0) as total_exec_time,
25175
- coalesce(sum(exec_user_time), 0) as total_user_time,
25176
- coalesce(sum(exec_system_time), 0) as total_system_time
25177
- from pg_stat_kcache
25178
- `);
25179
- kcacheMetricsCount = parseInt(statsResult.rows[0]?.cnt || "0", 10);
25180
- kcacheTotalExecTime = parseFloat(statsResult.rows[0]?.total_exec_time || "0");
25181
- kcacheTotalUserTime = parseFloat(statsResult.rows[0]?.total_user_time || "0");
25182
- kcacheTotalSystemTime = parseFloat(statsResult.rows[0]?.total_system_time || "0");
25183
- const sampleResult = await client.query(`
25184
- select
25185
- queryid::text as queryid,
25186
- coalesce(usename, 'unknown') as "user",
25187
- (exec_user_time + exec_system_time) as exec_total_time
25188
- from pg_stat_kcache k
25189
- left join pg_user u on k.userid = u.usesysid
25190
- order by (exec_user_time + exec_system_time) desc
25191
- limit 5
25192
- `);
25193
- for (const row of sampleResult.rows) {
25194
- kcacheSampleQueries.push({
25195
- queryid: row.queryid,
25196
- user: row.user,
25197
- exec_total_time: parseFloat(row.exec_total_time)
25198
- });
25199
- }
25200
- }
25201
- } catch {}
25202
- report.results[nodeName] = {
25203
- data: {
25204
- settings: pgssSettings,
25205
- pg_stat_statements_status: {
25206
- extension_available: pgssAvailable,
25207
- metrics_count: pgssMetricsCount,
25208
- total_calls: pgssTotalCalls,
25209
- sample_queries: pgssSampleQueries
25210
- },
25211
- pg_stat_kcache_status: {
25212
- extension_available: kcacheAvailable,
25213
- metrics_count: kcacheMetricsCount,
25214
- total_exec_time: kcacheTotalExecTime,
25215
- total_user_time: kcacheTotalUserTime,
25216
- total_system_time: kcacheTotalSystemTime,
25217
- sample_queries: kcacheSampleQueries
25218
- }
25219
- },
25220
- postgres_version: postgresVersion
25221
- };
25222
- return report;
25223
- }
25224
- async function generateF001(client, nodeName) {
25225
- const report = createBaseReport("F001", "Autovacuum: current settings", nodeName);
25226
- const postgresVersion = await getPostgresVersion(client);
25227
- const allSettings = await getSettings(client);
25228
- const autovacuumSettings = {};
25229
- for (const [name, setting] of Object.entries(allSettings)) {
25230
- if (name.includes("autovacuum") || name.includes("vacuum")) {
25231
- autovacuumSettings[name] = setting;
25232
- }
25233
- }
25234
- report.results[nodeName] = {
25235
- data: autovacuumSettings,
25236
- postgres_version: postgresVersion
25237
- };
25238
- return report;
25239
- }
25240
- async function generateG001(client, nodeName) {
25241
- const report = createBaseReport("G001", "Memory-related settings", nodeName);
25242
- const postgresVersion = await getPostgresVersion(client);
25243
- const allSettings = await getSettings(client);
25244
- const memorySettingNames = [
25245
- "shared_buffers",
25246
- "work_mem",
25247
- "maintenance_work_mem",
25248
- "effective_cache_size",
25249
- "wal_buffers",
25250
- "temp_buffers",
25251
- "max_connections",
25252
- "autovacuum_work_mem",
25253
- "hash_mem_multiplier",
25254
- "logical_decoding_work_mem",
25255
- "max_stack_depth",
25256
- "max_prepared_transactions",
25257
- "max_locks_per_transaction",
25258
- "max_pred_locks_per_transaction"
25259
- ];
25260
- const memorySettings = {};
25261
- for (const name of memorySettingNames) {
25262
- if (allSettings[name]) {
25263
- memorySettings[name] = allSettings[name];
25264
- }
25265
- }
25266
- let memoryUsage = {};
25267
- try {
25268
- const memQuery = await client.query(`
25269
- select
25270
- pg_size_bytes(current_setting('shared_buffers')) as shared_buffers_bytes,
25271
- pg_size_bytes(current_setting('wal_buffers')) as wal_buffers_bytes,
25272
- pg_size_bytes(current_setting('work_mem')) as work_mem_bytes,
25273
- pg_size_bytes(current_setting('maintenance_work_mem')) as maintenance_work_mem_bytes,
25274
- pg_size_bytes(current_setting('effective_cache_size')) as effective_cache_size_bytes,
25275
- current_setting('max_connections')::int as max_connections
25276
- `);
25277
- if (memQuery.rows.length > 0) {
25278
- const row = memQuery.rows[0];
25279
- const sharedBuffersBytes = parseInt(row.shared_buffers_bytes, 10);
25280
- const walBuffersBytes = parseInt(row.wal_buffers_bytes, 10);
25281
- const workMemBytes = parseInt(row.work_mem_bytes, 10);
25282
- const maintenanceWorkMemBytes = parseInt(row.maintenance_work_mem_bytes, 10);
25283
- const effectiveCacheSizeBytes = parseInt(row.effective_cache_size_bytes, 10);
25284
- const maxConnections = row.max_connections;
25285
- const sharedMemoryTotal = sharedBuffersBytes + walBuffersBytes;
25286
- const maxWorkMemUsage = workMemBytes * maxConnections;
25287
- memoryUsage = {
25288
- shared_buffers_bytes: sharedBuffersBytes,
25289
- shared_buffers_pretty: formatBytes(sharedBuffersBytes),
25290
- wal_buffers_bytes: walBuffersBytes,
25291
- wal_buffers_pretty: formatBytes(walBuffersBytes),
25292
- shared_memory_total_bytes: sharedMemoryTotal,
25293
- shared_memory_total_pretty: formatBytes(sharedMemoryTotal),
25294
- work_mem_per_connection_bytes: workMemBytes,
25295
- work_mem_per_connection_pretty: formatBytes(workMemBytes),
25296
- max_work_mem_usage_bytes: maxWorkMemUsage,
25297
- max_work_mem_usage_pretty: formatBytes(maxWorkMemUsage),
25298
- maintenance_work_mem_bytes: maintenanceWorkMemBytes,
25299
- maintenance_work_mem_pretty: formatBytes(maintenanceWorkMemBytes),
25300
- effective_cache_size_bytes: effectiveCacheSizeBytes,
25301
- effective_cache_size_pretty: formatBytes(effectiveCacheSizeBytes)
25302
- };
25303
- }
25304
- } catch {}
25305
- report.results[nodeName] = {
25306
- data: {
25307
- settings: memorySettings,
25308
- analysis: {
25309
- estimated_total_memory_usage: memoryUsage
25310
- }
25311
- },
25312
- postgres_version: postgresVersion
25313
- };
25314
- return report;
25315
- }
25316
- var REPORT_GENERATORS = {
25317
- A002: generateA002,
25318
- A003: generateA003,
25319
- A004: generateA004,
25320
- A007: generateA007,
25321
- A013: generateA013,
25322
- D004: generateD004,
25323
- F001: generateF001,
25324
- G001: generateG001,
25325
- H001: generateH001,
25326
- H002: generateH002,
25327
- H004: generateH004
25328
- };
25329
- var CHECK_INFO = {
25330
- A002: "Postgres major version",
25331
- A003: "Postgres settings",
25332
- A004: "Cluster information",
25333
- A007: "Altered settings",
25334
- A013: "Postgres minor version",
25335
- D004: "pg_stat_statements and pg_stat_kcache settings",
25336
- F001: "Autovacuum: current settings",
25337
- G001: "Memory-related settings",
25338
- H001: "Invalid indexes",
25339
- H002: "Unused indexes",
25340
- H004: "Redundant indexes"
25341
- };
25342
- async function generateAllReports(client, nodeName = "node-01", onProgress) {
25343
- const reports = {};
25344
- const entries = Object.entries(REPORT_GENERATORS);
25345
- const total = entries.length;
25346
- let index = 0;
25347
- for (const [checkId, generator] of entries) {
25348
- index += 1;
25349
- onProgress?.({
25350
- checkId,
25351
- checkTitle: CHECK_INFO[checkId] || checkId,
25352
- index,
25353
- total
25354
- });
25355
- reports[checkId] = await generator(client, nodeName);
25356
- }
25357
- return reports;
25358
- }
25359
-
25360
- // lib/checkup-api.ts
25361
- import * as https from "https";
25362
- import { URL as URL3 } from "url";
25363
- class RpcError extends Error {
25364
- rpcName;
25365
- statusCode;
25366
- payloadText;
25367
- payloadJson;
25368
- constructor(params) {
25369
- const { rpcName, statusCode, payloadText, payloadJson } = params;
25370
- super(`RPC ${rpcName} failed: HTTP ${statusCode}`);
25371
- this.name = "RpcError";
25372
- this.rpcName = rpcName;
25373
- this.statusCode = statusCode;
25374
- this.payloadText = payloadText;
25375
- this.payloadJson = payloadJson;
25376
- }
25377
- }
25378
- function formatRpcErrorForDisplay(err) {
25379
- const lines = [];
25380
- lines.push(`Error: RPC ${err.rpcName} failed: HTTP ${err.statusCode}`);
25381
- const obj = err.payloadJson && typeof err.payloadJson === "object" ? err.payloadJson : null;
25382
- const details = obj && typeof obj.details === "string" ? obj.details : "";
25383
- const hint = obj && typeof obj.hint === "string" ? obj.hint : "";
25384
- const message = obj && typeof obj.message === "string" ? obj.message : "";
25385
- if (message)
25386
- lines.push(`Message: ${message}`);
25387
- if (details)
25388
- lines.push(`Details: ${details}`);
25389
- if (hint)
25390
- lines.push(`Hint: ${hint}`);
25391
- if (!message && !details && !hint) {
25392
- const t = (err.payloadText || "").trim();
25393
- if (t)
25394
- lines.push(t);
25395
- }
25396
- return lines;
25397
- }
25398
- function unwrapRpcResponse(parsed) {
25399
- if (Array.isArray(parsed)) {
25400
- if (parsed.length === 1)
25401
- return unwrapRpcResponse(parsed[0]);
25402
- return parsed;
25403
- }
25404
- if (parsed && typeof parsed === "object") {
25405
- const obj = parsed;
25406
- if (obj.result !== undefined)
25407
- return obj.result;
25408
- }
25409
- return parsed;
25410
- }
25411
- async function postRpc(params) {
25412
- const { apiKey, apiBaseUrl, rpcName, bodyObj } = params;
25413
- if (!apiKey)
25414
- throw new Error("API key is required");
25415
- const base = normalizeBaseUrl(apiBaseUrl);
25416
- const url = new URL3(`${base}/rpc/${rpcName}`);
25417
- const body = JSON.stringify(bodyObj);
25418
- const headers = {
25419
- "access-token": apiKey,
25420
- Prefer: "return=representation",
25421
- "Content-Type": "application/json",
25422
- "Content-Length": Buffer.byteLength(body).toString()
25423
- };
25424
- return new Promise((resolve5, reject) => {
25425
- const req = https.request(url, {
25426
- method: "POST",
25427
- headers
25428
- }, (res) => {
25429
- let data = "";
25430
- res.on("data", (chunk) => data += chunk);
25431
- res.on("end", () => {
25432
- if (res.statusCode && res.statusCode >= 200 && res.statusCode < 300) {
25433
- try {
25434
- const parsed = JSON.parse(data);
25435
- resolve5(unwrapRpcResponse(parsed));
25436
- } catch {
25437
- reject(new Error(`Failed to parse RPC response: ${data}`));
25438
- }
25439
- } else {
25440
- const statusCode = res.statusCode || 0;
25441
- let payloadJson = null;
25442
- if (data) {
25443
- try {
25444
- payloadJson = JSON.parse(data);
25445
- } catch {
25446
- payloadJson = null;
25447
- }
25448
- }
25449
- reject(new RpcError({ rpcName, statusCode, payloadText: data, payloadJson }));
25450
- }
25451
- });
25452
- });
25453
- req.on("error", (err) => reject(err));
25454
- req.write(body);
25455
- req.end();
25456
- });
25457
- }
25458
- async function createCheckupReport(params) {
25459
- const { apiKey, apiBaseUrl, project, status } = params;
25460
- const bodyObj = {
25461
- access_token: apiKey,
25462
- project
25463
- };
25464
- if (status)
25465
- bodyObj.status = status;
25466
- const resp = await postRpc({
25467
- apiKey,
25468
- apiBaseUrl,
25469
- rpcName: "checkup_report_create",
25470
- bodyObj
25471
- });
25472
- const reportId = Number(resp?.report_id);
25473
- if (!Number.isFinite(reportId) || reportId <= 0) {
25474
- throw new Error(`Unexpected checkup_report_create response: ${JSON.stringify(resp)}`);
25475
- }
25476
- return { reportId };
25477
- }
25478
- async function uploadCheckupReportJson(params) {
25479
- const { apiKey, apiBaseUrl, reportId, filename, checkId, jsonText } = params;
25480
- const bodyObj = {
25481
- access_token: apiKey,
25482
- checkup_report_id: reportId,
25483
- filename,
25484
- check_id: checkId,
25485
- data: jsonText,
25486
- type: "json",
25487
- generate_issue: true
25488
- };
25489
- const resp = await postRpc({
25490
- apiKey,
25491
- apiBaseUrl,
25492
- rpcName: "checkup_report_file_post",
25493
- bodyObj
25494
- });
25495
- const chunkId = Number(resp?.report_chunck_id ?? resp?.report_chunk_id);
25496
- if (!Number.isFinite(chunkId) || chunkId <= 0) {
25497
- throw new Error(`Unexpected checkup_report_file_post response: ${JSON.stringify(resp)}`);
25498
- }
25499
- return { reportChunkId: chunkId };
25500
- }
25501
-
25502
- // bin/postgres-ai.ts
25503
24216
  var rl = null;
25504
24217
  function getReadline() {
25505
24218
  if (!rl) {
@@ -25514,27 +24227,27 @@ function closeReadline() {
25514
24227
  }
25515
24228
  }
25516
24229
  async function execPromise(command) {
25517
- return new Promise((resolve6, reject) => {
24230
+ return new Promise((resolve5, reject) => {
25518
24231
  childProcess.exec(command, (error2, stdout, stderr) => {
25519
24232
  if (error2) {
25520
24233
  const err = error2;
25521
24234
  err.code = error2.code ?? 1;
25522
24235
  reject(err);
25523
24236
  } else {
25524
- resolve6({ stdout, stderr });
24237
+ resolve5({ stdout, stderr });
25525
24238
  }
25526
24239
  });
25527
24240
  });
25528
24241
  }
25529
24242
  async function execFilePromise(file, args) {
25530
- return new Promise((resolve6, reject) => {
24243
+ return new Promise((resolve5, reject) => {
25531
24244
  childProcess.execFile(file, args, (error2, stdout, stderr) => {
25532
24245
  if (error2) {
25533
24246
  const err = error2;
25534
24247
  err.code = error2.code ?? 1;
25535
24248
  reject(err);
25536
24249
  } else {
25537
- resolve6({ stdout, stderr });
24250
+ resolve5({ stdout, stderr });
25538
24251
  }
25539
24252
  });
25540
24253
  });
@@ -25575,69 +24288,17 @@ function spawn2(cmd, args, options) {
25575
24288
  };
25576
24289
  }
25577
24290
  async function question(prompt) {
25578
- return new Promise((resolve6) => {
24291
+ return new Promise((resolve5) => {
25579
24292
  getReadline().question(prompt, (answer) => {
25580
- resolve6(answer);
24293
+ resolve5(answer);
25581
24294
  });
25582
24295
  });
25583
24296
  }
25584
- function expandHomePath(p) {
25585
- const s = (p || "").trim();
25586
- if (!s)
25587
- return s;
25588
- if (s === "~")
25589
- return os3.homedir();
25590
- if (s.startsWith("~/") || s.startsWith("~\\")) {
25591
- return path5.join(os3.homedir(), s.slice(2));
25592
- }
25593
- return s;
25594
- }
25595
- function createTtySpinner(enabled, initialText) {
25596
- if (!enabled) {
25597
- return {
25598
- update: () => {},
25599
- stop: () => {}
25600
- };
25601
- }
25602
- const frames = ["|", "/", "-", "\\"];
25603
- const startTs = Date.now();
25604
- let text = initialText;
25605
- let frameIdx = 0;
25606
- let stopped = false;
25607
- const render = () => {
25608
- if (stopped)
25609
- return;
25610
- const elapsedSec = ((Date.now() - startTs) / 1000).toFixed(1);
25611
- const frame = frames[frameIdx % frames.length];
25612
- frameIdx += 1;
25613
- process.stdout.write(`\r\x1B[2K${frame} ${text} (${elapsedSec}s)`);
25614
- };
25615
- const timer = setInterval(render, 120);
25616
- render();
25617
- return {
25618
- update: (t) => {
25619
- text = t;
25620
- render();
25621
- },
25622
- stop: (finalText) => {
25623
- if (stopped)
25624
- return;
25625
- stopped = true;
25626
- clearInterval(timer);
25627
- process.stdout.write("\r\x1B[2K");
25628
- if (finalText && finalText.trim()) {
25629
- process.stdout.write(finalText);
25630
- }
25631
- process.stdout.write(`
25632
- `);
25633
- }
25634
- };
25635
- }
25636
24297
  function getDefaultMonitoringProjectDir() {
25637
24298
  const override = process.env.PGAI_PROJECT_DIR;
25638
24299
  if (override && override.trim())
25639
24300
  return override.trim();
25640
- return path5.join(getConfigDir(), "monitoring");
24301
+ return path4.join(getConfigDir(), "monitoring");
25641
24302
  }
25642
24303
  async function downloadText(url) {
25643
24304
  const controller = new AbortController;
@@ -25654,12 +24315,12 @@ async function downloadText(url) {
25654
24315
  }
25655
24316
  async function ensureDefaultMonitoringProject() {
25656
24317
  const projectDir = getDefaultMonitoringProjectDir();
25657
- const composeFile = path5.resolve(projectDir, "docker-compose.yml");
25658
- const instancesFile = path5.resolve(projectDir, "instances.yml");
25659
- if (!fs5.existsSync(projectDir)) {
25660
- fs5.mkdirSync(projectDir, { recursive: true, mode: 448 });
24318
+ const composeFile = path4.resolve(projectDir, "docker-compose.yml");
24319
+ const instancesFile = path4.resolve(projectDir, "instances.yml");
24320
+ if (!fs4.existsSync(projectDir)) {
24321
+ fs4.mkdirSync(projectDir, { recursive: true, mode: 448 });
25661
24322
  }
25662
- if (!fs5.existsSync(composeFile)) {
24323
+ if (!fs4.existsSync(composeFile)) {
25663
24324
  const refs = [
25664
24325
  process.env.PGAI_PROJECT_REF,
25665
24326
  package_default.version,
@@ -25671,36 +24332,36 @@ async function ensureDefaultMonitoringProject() {
25671
24332
  const url = `https://gitlab.com/postgres-ai/postgres_ai/-/raw/${encodeURIComponent(ref)}/docker-compose.yml`;
25672
24333
  try {
25673
24334
  const text = await downloadText(url);
25674
- fs5.writeFileSync(composeFile, text, { encoding: "utf8", mode: 384 });
24335
+ fs4.writeFileSync(composeFile, text, { encoding: "utf8", mode: 384 });
25675
24336
  break;
25676
24337
  } catch (err) {
25677
24338
  lastErr = err;
25678
24339
  }
25679
24340
  }
25680
- if (!fs5.existsSync(composeFile)) {
24341
+ if (!fs4.existsSync(composeFile)) {
25681
24342
  const msg = lastErr instanceof Error ? lastErr.message : String(lastErr);
25682
24343
  throw new Error(`Failed to bootstrap docker-compose.yml: ${msg}`);
25683
24344
  }
25684
24345
  }
25685
- if (!fs5.existsSync(instancesFile)) {
24346
+ if (!fs4.existsSync(instancesFile)) {
25686
24347
  const header = `# PostgreSQL instances to monitor
25687
24348
  ` + `# Add your instances using: pgai mon targets add <connection-string> <name>
25688
24349
 
25689
24350
  `;
25690
- fs5.writeFileSync(instancesFile, header, { encoding: "utf8", mode: 384 });
24351
+ fs4.writeFileSync(instancesFile, header, { encoding: "utf8", mode: 384 });
25691
24352
  }
25692
- const pgwatchConfig = path5.resolve(projectDir, ".pgwatch-config");
25693
- if (!fs5.existsSync(pgwatchConfig)) {
25694
- fs5.writeFileSync(pgwatchConfig, "", { encoding: "utf8", mode: 384 });
24353
+ const pgwatchConfig = path4.resolve(projectDir, ".pgwatch-config");
24354
+ if (!fs4.existsSync(pgwatchConfig)) {
24355
+ fs4.writeFileSync(pgwatchConfig, "", { encoding: "utf8", mode: 384 });
25695
24356
  }
25696
- const envFile = path5.resolve(projectDir, ".env");
25697
- if (!fs5.existsSync(envFile)) {
24357
+ const envFile = path4.resolve(projectDir, ".env");
24358
+ if (!fs4.existsSync(envFile)) {
25698
24359
  const envText = `PGAI_TAG=${package_default.version}
25699
24360
  # PGAI_REGISTRY=registry.gitlab.com/postgres-ai/postgres_ai
25700
24361
  `;
25701
- fs5.writeFileSync(envFile, envText, { encoding: "utf8", mode: 384 });
24362
+ fs4.writeFileSync(envFile, envText, { encoding: "utf8", mode: 384 });
25702
24363
  }
25703
- return { fs: fs5, path: path5, projectDir, composeFile, instancesFile };
24364
+ return { fs: fs4, path: path4, projectDir, composeFile, instancesFile };
25704
24365
  }
25705
24366
  function getConfig(opts) {
25706
24367
  let apiKey = opts.apiKey || process.env.PGAI_API_KEY || "";
@@ -25732,16 +24393,6 @@ function printResult(result, json2) {
25732
24393
  }
25733
24394
  var program2 = new Command;
25734
24395
  program2.name("postgres-ai").description("PostgresAI CLI").version(package_default.version).option("--api-key <key>", "API key (overrides PGAI_API_KEY)").option("--api-base-url <url>", "API base URL for backend RPC (overrides PGAI_API_BASE_URL)").option("--ui-base-url <url>", "UI base URL for browser routes (overrides PGAI_UI_BASE_URL)");
25735
- program2.command("set-default-project <project>").description("store default project for checkup uploads").action(async (project) => {
25736
- const value = (project || "").trim();
25737
- if (!value) {
25738
- console.error("Error: project is required");
25739
- process.exitCode = 1;
25740
- return;
25741
- }
25742
- writeConfig({ defaultProject: value });
25743
- console.log(`Default project saved: ${value}`);
25744
- });
25745
24396
  program2.command("prepare-db [conn]").description("prepare database for monitoring: create monitoring user, required view(s), and grant permissions (idempotent)").option("--db-url <url>", "PostgreSQL connection URL (admin) to run the setup against (deprecated; pass it as positional arg)").option("-h, --host <host>", "PostgreSQL host (psql-like)").option("-p, --port <port>", "PostgreSQL port (psql-like)").option("-U, --username <username>", "PostgreSQL user (psql-like)").option("-d, --dbname <dbname>", "PostgreSQL database name (psql-like)").option("--admin-password <password>", "Admin connection password (otherwise uses PGPASSWORD if set)").option("--monitoring-user <name>", "Monitoring role name to create/update", DEFAULT_MONITORING_USER).option("--password <password>", "Monitoring role password (overrides PGAI_MON_PASSWORD)").option("--skip-optional-permissions", "Skip optional permissions (RDS/self-managed extras)", false).option("--verify", "Verify that monitoring role/permissions are in place (no changes)", false).option("--reset-password", "Reset monitoring role password only (no other changes)", false).option("--print-sql", "Print SQL plan and exit (no changes applied)", false).option("--print-password", "Print generated monitoring password (DANGEROUS in CI logs)", false).addHelpText("after", [
25746
24397
  "",
25747
24398
  "Examples:",
@@ -26012,206 +24663,16 @@ program2.command("prepare-db [conn]").description("prepare database for monitori
26012
24663
  }
26013
24664
  }
26014
24665
  });
26015
- program2.command("checkup [conn]").description("generate health check reports directly from PostgreSQL (express mode)").option("--check-id <id>", `specific check to run: ${Object.keys(CHECK_INFO).join(", ")}, or ALL`, "ALL").option("--node-name <name>", "node name for reports", "node-01").option("--output <path>", "output directory for JSON files").option("--[no-]upload", "upload JSON results to PostgresAI (default: enabled; requires API key)", undefined).option("--project <project>", "project name or ID for remote upload (used with --upload; defaults to config defaultProject; auto-generated on first run)").option("--json", "output JSON to stdout (implies --no-upload)").addHelpText("after", [
26016
- "",
26017
- "Available checks:",
26018
- ...Object.entries(CHECK_INFO).map(([id, title]) => ` ${id}: ${title}`),
26019
- "",
26020
- "Examples:",
26021
- " postgresai checkup postgresql://user:pass@host:5432/db",
26022
- " postgresai checkup postgresql://user:pass@host:5432/db --check-id A003",
26023
- " postgresai checkup postgresql://user:pass@host:5432/db --output ./reports",
26024
- " postgresai checkup postgresql://user:pass@host:5432/db --project my_project",
26025
- " postgresai set-default-project my_project",
26026
- " postgresai checkup postgresql://user:pass@host:5432/db",
26027
- " postgresai checkup postgresql://user:pass@host:5432/db --no-upload --json"
26028
- ].join(`
26029
- `)).action(async (conn, opts, cmd) => {
26030
- if (!conn) {
26031
- cmd.outputHelp();
26032
- process.exitCode = 1;
26033
- return;
26034
- }
26035
- const shouldPrintJson = !!opts.json;
26036
- const shouldUpload = opts.upload !== false && !shouldPrintJson;
26037
- const generateDefaultProjectName = () => {
26038
- return `project_${crypto2.randomBytes(6).toString("hex")}`;
26039
- };
26040
- let outputPath;
26041
- if (opts.output) {
26042
- const outputDir = expandHomePath(opts.output);
26043
- outputPath = path5.isAbsolute(outputDir) ? outputDir : path5.resolve(process.cwd(), outputDir);
26044
- if (!fs5.existsSync(outputPath)) {
26045
- try {
26046
- fs5.mkdirSync(outputPath, { recursive: true });
26047
- } catch (e) {
26048
- const errAny = e;
26049
- const code = typeof errAny?.code === "string" ? errAny.code : "";
26050
- const msg = errAny instanceof Error ? errAny.message : String(errAny);
26051
- if (code === "EACCES" || code === "EPERM" || code === "ENOENT") {
26052
- console.error(`Error: Failed to create output directory: ${outputPath}`);
26053
- console.error(`Reason: ${msg}`);
26054
- console.error("Tip: choose a writable path, e.g. --output ./reports or --output ~/reports");
26055
- process.exitCode = 1;
26056
- return;
26057
- }
26058
- throw e;
26059
- }
26060
- }
26061
- }
26062
- let uploadCfg;
26063
- let projectWasGenerated = false;
26064
- if (shouldUpload) {
26065
- const rootOpts = program2.opts();
26066
- const { apiKey } = getConfig(rootOpts);
26067
- if (!apiKey) {
26068
- console.error("Error: API key is required for upload");
26069
- console.error("Tip: run 'postgresai auth' or pass --api-key / set PGAI_API_KEY");
26070
- process.exitCode = 1;
26071
- return;
26072
- }
26073
- const cfg = readConfig();
26074
- const { apiBaseUrl } = resolveBaseUrls2(rootOpts, cfg);
26075
- let project = (opts.project || cfg.defaultProject || "").trim();
26076
- if (!project) {
26077
- project = generateDefaultProjectName();
26078
- projectWasGenerated = true;
26079
- try {
26080
- writeConfig({ defaultProject: project });
26081
- } catch (e) {
26082
- const message = e instanceof Error ? e.message : String(e);
26083
- console.error(`Warning: Failed to save generated default project: ${message}`);
26084
- }
26085
- }
26086
- uploadCfg = {
26087
- apiKey,
26088
- apiBaseUrl,
26089
- project
26090
- };
26091
- }
26092
- const adminConn = resolveAdminConnection({
26093
- conn,
26094
- envPassword: process.env.PGPASSWORD
26095
- });
26096
- let client;
26097
- const spinnerEnabled = !!process.stdout.isTTY && !!shouldUpload;
26098
- const spinner = createTtySpinner(spinnerEnabled, "Connecting to Postgres");
26099
- try {
26100
- spinner.update("Connecting to Postgres");
26101
- const connResult = await connectWithSslFallback(Client, adminConn);
26102
- client = connResult.client;
26103
- let reports;
26104
- let uploadSummary;
26105
- if (opts.checkId === "ALL") {
26106
- reports = await generateAllReports(client, opts.nodeName, (p) => {
26107
- spinner.update(`Running ${p.checkId}: ${p.checkTitle} (${p.index}/${p.total})`);
26108
- });
26109
- } else {
26110
- const checkId = opts.checkId.toUpperCase();
26111
- const generator = REPORT_GENERATORS[checkId];
26112
- if (!generator) {
26113
- spinner.stop();
26114
- console.error(`Unknown check ID: ${opts.checkId}`);
26115
- console.error(`Available: ${Object.keys(CHECK_INFO).join(", ")}, ALL`);
26116
- process.exitCode = 1;
26117
- return;
26118
- }
26119
- spinner.update(`Running ${checkId}: ${CHECK_INFO[checkId] || checkId}`);
26120
- reports = { [checkId]: await generator(client, opts.nodeName) };
26121
- }
26122
- if (uploadCfg) {
26123
- spinner.update("Creating remote checkup report");
26124
- const created = await createCheckupReport({
26125
- apiKey: uploadCfg.apiKey,
26126
- apiBaseUrl: uploadCfg.apiBaseUrl,
26127
- project: uploadCfg.project
26128
- });
26129
- const reportId = created.reportId;
26130
- const logUpload = (msg) => {
26131
- if (shouldPrintJson) {
26132
- console.error(msg);
26133
- } else {
26134
- console.log(msg);
26135
- }
26136
- };
26137
- logUpload(`Created remote checkup report: ${reportId}`);
26138
- const uploaded = [];
26139
- for (const [checkId, report] of Object.entries(reports)) {
26140
- spinner.update(`Uploading ${checkId}.json`);
26141
- const jsonText = JSON.stringify(report, null, 2);
26142
- const r = await uploadCheckupReportJson({
26143
- apiKey: uploadCfg.apiKey,
26144
- apiBaseUrl: uploadCfg.apiBaseUrl,
26145
- reportId,
26146
- filename: `${checkId}.json`,
26147
- checkId,
26148
- jsonText
26149
- });
26150
- uploaded.push({ checkId, filename: `${checkId}.json`, chunkId: r.reportChunkId });
26151
- }
26152
- logUpload("Upload completed");
26153
- uploadSummary = { project: uploadCfg.project, reportId, uploaded };
26154
- }
26155
- spinner.stop();
26156
- if (opts.output) {
26157
- const outDir = outputPath || path5.resolve(process.cwd(), expandHomePath(opts.output));
26158
- for (const [checkId, report] of Object.entries(reports)) {
26159
- const filePath = path5.join(outDir, `${checkId}.json`);
26160
- fs5.writeFileSync(filePath, JSON.stringify(report, null, 2), "utf8");
26161
- console.log(`\u2713 ${checkId}: ${filePath}`);
26162
- }
26163
- }
26164
- if (uploadSummary) {
26165
- const out = shouldPrintJson ? console.error : console.log;
26166
- out(`
26167
- Checkup report uploaded`);
26168
- out(`======================
26169
- `);
26170
- if (projectWasGenerated) {
26171
- out(`Project: ${uploadSummary.project} (generated and saved as default)`);
26172
- } else {
26173
- out(`Project: ${uploadSummary.project}`);
26174
- }
26175
- out(`Report ID: ${uploadSummary.reportId}`);
26176
- out("View in Console: console.postgres.ai \u2192 Support \u2192 checkup reports");
26177
- out("");
26178
- out("Files:");
26179
- for (const item of uploadSummary.uploaded) {
26180
- out(`- ${item.checkId}: ${item.filename}`);
26181
- }
26182
- }
26183
- if (shouldPrintJson) {
26184
- console.log(JSON.stringify(reports, null, 2));
26185
- } else if (!shouldUpload && !opts.output) {
26186
- console.log(JSON.stringify(reports, null, 2));
26187
- }
26188
- } catch (error2) {
26189
- spinner.stop();
26190
- if (error2 instanceof RpcError) {
26191
- for (const line of formatRpcErrorForDisplay(error2)) {
26192
- console.error(line);
26193
- }
26194
- } else {
26195
- const message = error2 instanceof Error ? error2.message : String(error2);
26196
- console.error(`Error: ${message}`);
26197
- }
26198
- process.exitCode = 1;
26199
- } finally {
26200
- if (client) {
26201
- await client.end();
26202
- }
26203
- }
26204
- });
26205
24666
  function resolvePaths() {
26206
24667
  const startDir = process.cwd();
26207
24668
  let currentDir = startDir;
26208
24669
  while (true) {
26209
- const composeFile = path5.resolve(currentDir, "docker-compose.yml");
26210
- if (fs5.existsSync(composeFile)) {
26211
- const instancesFile = path5.resolve(currentDir, "instances.yml");
26212
- return { fs: fs5, path: path5, projectDir: currentDir, composeFile, instancesFile };
24670
+ const composeFile = path4.resolve(currentDir, "docker-compose.yml");
24671
+ if (fs4.existsSync(composeFile)) {
24672
+ const instancesFile = path4.resolve(currentDir, "instances.yml");
24673
+ return { fs: fs4, path: path4, projectDir: currentDir, composeFile, instancesFile };
26213
24674
  }
26214
- const parentDir = path5.dirname(currentDir);
24675
+ const parentDir = path4.dirname(currentDir);
26215
24676
  if (parentDir === currentDir)
26216
24677
  break;
26217
24678
  currentDir = parentDir;
@@ -26277,12 +24738,12 @@ async function runCompose(args) {
26277
24738
  return 1;
26278
24739
  }
26279
24740
  const env = { ...process.env };
26280
- const cfgPath = path5.resolve(projectDir, ".pgwatch-config");
26281
- if (fs5.existsSync(cfgPath)) {
24741
+ const cfgPath = path4.resolve(projectDir, ".pgwatch-config");
24742
+ if (fs4.existsSync(cfgPath)) {
26282
24743
  try {
26283
- const stats = fs5.statSync(cfgPath);
24744
+ const stats = fs4.statSync(cfgPath);
26284
24745
  if (!stats.isDirectory()) {
26285
- const content = fs5.readFileSync(cfgPath, "utf8");
24746
+ const content = fs4.readFileSync(cfgPath, "utf8");
26286
24747
  const match = content.match(/^grafana_password=([^\r\n]+)/m);
26287
24748
  if (match) {
26288
24749
  env.GF_SECURITY_ADMIN_PASSWORD = match[1].trim();
@@ -26290,13 +24751,13 @@ async function runCompose(args) {
26290
24751
  }
26291
24752
  } catch (err) {}
26292
24753
  }
26293
- return new Promise((resolve6) => {
24754
+ return new Promise((resolve5) => {
26294
24755
  const child = spawn2(cmd[0], [...cmd.slice(1), "-f", composeFile, ...args], {
26295
24756
  stdio: "inherit",
26296
24757
  env,
26297
24758
  cwd: projectDir
26298
24759
  });
26299
- child.on("close", (code) => resolve6(code || 0));
24760
+ child.on("close", (code) => resolve5(code || 0));
26300
24761
  });
26301
24762
  }
26302
24763
  program2.command("help", { isDefault: true }).description("show help").action(() => {
@@ -26314,17 +24775,17 @@ mon.command("local-install").description("install local monitoring stack (genera
26314
24775
  const { projectDir } = await resolveOrInitPaths();
26315
24776
  console.log(`Project directory: ${projectDir}
26316
24777
  `);
26317
- const envFile = path5.resolve(projectDir, ".env");
24778
+ const envFile = path4.resolve(projectDir, ".env");
26318
24779
  const imageTag = opts.tag || package_default.version;
26319
24780
  const envLines = [`PGAI_TAG=${imageTag}`];
26320
- if (fs5.existsSync(envFile)) {
26321
- const existingEnv = fs5.readFileSync(envFile, "utf8");
24781
+ if (fs4.existsSync(envFile)) {
24782
+ const existingEnv = fs4.readFileSync(envFile, "utf8");
26322
24783
  const pwdMatch = existingEnv.match(/^GF_SECURITY_ADMIN_PASSWORD=(.+)$/m);
26323
24784
  if (pwdMatch) {
26324
24785
  envLines.push(`GF_SECURITY_ADMIN_PASSWORD=${pwdMatch[1]}`);
26325
24786
  }
26326
24787
  }
26327
- fs5.writeFileSync(envFile, envLines.join(`
24788
+ fs4.writeFileSync(envFile, envLines.join(`
26328
24789
  `) + `
26329
24790
  `, { encoding: "utf8", mode: 384 });
26330
24791
  if (opts.tag) {
@@ -26360,7 +24821,7 @@ Use demo mode without API key: postgres-ai mon local-install --demo`);
26360
24821
  if (opts.apiKey) {
26361
24822
  console.log("Using API key provided via --api-key parameter");
26362
24823
  writeConfig({ apiKey: opts.apiKey });
26363
- fs5.writeFileSync(path5.resolve(projectDir, ".pgwatch-config"), `api_key=${opts.apiKey}
24824
+ fs4.writeFileSync(path4.resolve(projectDir, ".pgwatch-config"), `api_key=${opts.apiKey}
26364
24825
  `, {
26365
24826
  encoding: "utf8",
26366
24827
  mode: 384
@@ -26381,7 +24842,7 @@ Use demo mode without API key: postgres-ai mon local-install --demo`);
26381
24842
  const trimmedKey = inputApiKey.trim();
26382
24843
  if (trimmedKey) {
26383
24844
  writeConfig({ apiKey: trimmedKey });
26384
- fs5.writeFileSync(path5.resolve(projectDir, ".pgwatch-config"), `api_key=${trimmedKey}
24845
+ fs4.writeFileSync(path4.resolve(projectDir, ".pgwatch-config"), `api_key=${trimmedKey}
26385
24846
  `, {
26386
24847
  encoding: "utf8",
26387
24848
  mode: 384
@@ -26418,7 +24879,7 @@ Use demo mode without API key: postgres-ai mon local-install --demo`);
26418
24879
  # Add your instances using: postgres-ai mon targets add
26419
24880
 
26420
24881
  `;
26421
- fs5.writeFileSync(instancesPath, emptyInstancesContent, "utf8");
24882
+ fs4.writeFileSync(instancesPath, emptyInstancesContent, "utf8");
26422
24883
  console.log(`Instances file: ${instancesPath}`);
26423
24884
  console.log(`Project directory: ${projectDir2}
26424
24885
  `);
@@ -26450,7 +24911,7 @@ Use demo mode without API key: postgres-ai mon local-install --demo`);
26450
24911
  node_name: ${instanceName}
26451
24912
  sink_type: ~sink_type~
26452
24913
  `;
26453
- fs5.appendFileSync(instancesPath, body, "utf8");
24914
+ fs4.appendFileSync(instancesPath, body, "utf8");
26454
24915
  console.log(`\u2713 Monitoring target '${instanceName}' added
26455
24916
  `);
26456
24917
  console.log("Testing connection to the added instance...");
@@ -26505,7 +24966,7 @@ You can provide either:`);
26505
24966
  node_name: ${instanceName}
26506
24967
  sink_type: ~sink_type~
26507
24968
  `;
26508
- fs5.appendFileSync(instancesPath, body, "utf8");
24969
+ fs4.appendFileSync(instancesPath, body, "utf8");
26509
24970
  console.log(`\u2713 Monitoring target '${instanceName}' added
26510
24971
  `);
26511
24972
  console.log("Testing connection to the added instance...");
@@ -26545,13 +25006,13 @@ You can provide either:`);
26545
25006
  console.log(`\u2713 Configuration updated
26546
25007
  `);
26547
25008
  console.log(opts.demo ? "Step 4: Configuring Grafana security..." : "Step 4: Configuring Grafana security...");
26548
- const cfgPath = path5.resolve(projectDir, ".pgwatch-config");
25009
+ const cfgPath = path4.resolve(projectDir, ".pgwatch-config");
26549
25010
  let grafanaPassword = "";
26550
25011
  try {
26551
- if (fs5.existsSync(cfgPath)) {
26552
- const stats = fs5.statSync(cfgPath);
25012
+ if (fs4.existsSync(cfgPath)) {
25013
+ const stats = fs4.statSync(cfgPath);
26553
25014
  if (!stats.isDirectory()) {
26554
- const content = fs5.readFileSync(cfgPath, "utf8");
25015
+ const content = fs4.readFileSync(cfgPath, "utf8");
26555
25016
  const match = content.match(/^grafana_password=([^\r\n]+)/m);
26556
25017
  if (match) {
26557
25018
  grafanaPassword = match[1].trim();
@@ -26564,15 +25025,15 @@ You can provide either:`);
26564
25025
  '`);
26565
25026
  grafanaPassword = password.trim();
26566
25027
  let configContent = "";
26567
- if (fs5.existsSync(cfgPath)) {
26568
- const stats = fs5.statSync(cfgPath);
25028
+ if (fs4.existsSync(cfgPath)) {
25029
+ const stats = fs4.statSync(cfgPath);
26569
25030
  if (!stats.isDirectory()) {
26570
- configContent = fs5.readFileSync(cfgPath, "utf8");
25031
+ configContent = fs4.readFileSync(cfgPath, "utf8");
26571
25032
  }
26572
25033
  }
26573
25034
  const lines = configContent.split(/\r?\n/).filter((l) => !/^grafana_password=/.test(l));
26574
25035
  lines.push(`grafana_password=${grafanaPassword}`);
26575
- fs5.writeFileSync(cfgPath, lines.filter(Boolean).join(`
25036
+ fs4.writeFileSync(cfgPath, lines.filter(Boolean).join(`
26576
25037
  `) + `
26577
25038
  `, "utf8");
26578
25039
  }
@@ -26686,7 +25147,7 @@ mon.command("health").description("health check for monitoring services").option
26686
25147
  if (attempt > 1) {
26687
25148
  console.log(`Retrying (attempt ${attempt}/${maxAttempts})...
26688
25149
  `);
26689
- await new Promise((resolve6) => setTimeout(resolve6, 5000));
25150
+ await new Promise((resolve5) => setTimeout(resolve5, 5000));
26690
25151
  }
26691
25152
  allHealthy = true;
26692
25153
  for (const service of services) {
@@ -26734,11 +25195,11 @@ mon.command("config").description("show monitoring services configuration").acti
26734
25195
  console.log(`Project Directory: ${projectDir}`);
26735
25196
  console.log(`Docker Compose File: ${composeFile}`);
26736
25197
  console.log(`Instances File: ${instancesFile}`);
26737
- if (fs5.existsSync(instancesFile)) {
25198
+ if (fs4.existsSync(instancesFile)) {
26738
25199
  console.log(`
26739
25200
  Instances configuration:
26740
25201
  `);
26741
- const text = fs5.readFileSync(instancesFile, "utf8");
25202
+ const text = fs4.readFileSync(instancesFile, "utf8");
26742
25203
  process.stdout.write(text);
26743
25204
  if (!/\n$/.test(text))
26744
25205
  console.log();
@@ -26753,8 +25214,8 @@ mon.command("update").description("update monitoring stack").action(async () =>
26753
25214
  console.log(`Updating PostgresAI monitoring stack...
26754
25215
  `);
26755
25216
  try {
26756
- const gitDir = path5.resolve(process.cwd(), ".git");
26757
- if (!fs5.existsSync(gitDir)) {
25217
+ const gitDir = path4.resolve(process.cwd(), ".git");
25218
+ if (!fs4.existsSync(gitDir)) {
26758
25219
  console.error("Not a git repository. Cannot update.");
26759
25220
  process.exitCode = 1;
26760
25221
  return;
@@ -26880,13 +25341,13 @@ mon.command("check").description("monitoring services system readiness check").a
26880
25341
  var targets = mon.command("targets").description("manage databases to monitor");
26881
25342
  targets.command("list").description("list monitoring target databases").action(async () => {
26882
25343
  const { instancesFile: instancesPath, projectDir } = await resolveOrInitPaths();
26883
- if (!fs5.existsSync(instancesPath)) {
25344
+ if (!fs4.existsSync(instancesPath)) {
26884
25345
  console.error(`instances.yml not found in ${projectDir}`);
26885
25346
  process.exitCode = 1;
26886
25347
  return;
26887
25348
  }
26888
25349
  try {
26889
- const content = fs5.readFileSync(instancesPath, "utf8");
25350
+ const content = fs4.readFileSync(instancesPath, "utf8");
26890
25351
  const instances = load(content);
26891
25352
  if (!instances || !Array.isArray(instances) || instances.length === 0) {
26892
25353
  console.log("No monitoring targets configured");
@@ -26935,8 +25396,8 @@ targets.command("add [connStr] [name]").description("add monitoring target datab
26935
25396
  const db = m[5];
26936
25397
  const instanceName = name && name.trim() ? name.trim() : `${host}-${db}`.replace(/[^a-zA-Z0-9-]/g, "-");
26937
25398
  try {
26938
- if (fs5.existsSync(file)) {
26939
- const content2 = fs5.readFileSync(file, "utf8");
25399
+ if (fs4.existsSync(file)) {
25400
+ const content2 = fs4.readFileSync(file, "utf8");
26940
25401
  const instances = load(content2) || [];
26941
25402
  if (Array.isArray(instances)) {
26942
25403
  const exists = instances.some((inst) => inst.name === instanceName);
@@ -26948,7 +25409,7 @@ targets.command("add [connStr] [name]").description("add monitoring target datab
26948
25409
  }
26949
25410
  }
26950
25411
  } catch (err) {
26951
- const content2 = fs5.existsSync(file) ? fs5.readFileSync(file, "utf8") : "";
25412
+ const content2 = fs4.existsSync(file) ? fs4.readFileSync(file, "utf8") : "";
26952
25413
  if (new RegExp(`^- name: ${instanceName}$`, "m").test(content2)) {
26953
25414
  console.error(`Monitoring target '${instanceName}' already exists`);
26954
25415
  process.exitCode = 1;
@@ -26967,20 +25428,20 @@ targets.command("add [connStr] [name]").description("add monitoring target datab
26967
25428
  node_name: ${instanceName}
26968
25429
  sink_type: ~sink_type~
26969
25430
  `;
26970
- const content = fs5.existsSync(file) ? fs5.readFileSync(file, "utf8") : "";
26971
- fs5.appendFileSync(file, (content && !/\n$/.test(content) ? `
25431
+ const content = fs4.existsSync(file) ? fs4.readFileSync(file, "utf8") : "";
25432
+ fs4.appendFileSync(file, (content && !/\n$/.test(content) ? `
26972
25433
  ` : "") + body, "utf8");
26973
25434
  console.log(`Monitoring target '${instanceName}' added`);
26974
25435
  });
26975
25436
  targets.command("remove <name>").description("remove monitoring target database").action(async (name) => {
26976
25437
  const { instancesFile: file } = await resolveOrInitPaths();
26977
- if (!fs5.existsSync(file)) {
25438
+ if (!fs4.existsSync(file)) {
26978
25439
  console.error("instances.yml not found");
26979
25440
  process.exitCode = 1;
26980
25441
  return;
26981
25442
  }
26982
25443
  try {
26983
- const content = fs5.readFileSync(file, "utf8");
25444
+ const content = fs4.readFileSync(file, "utf8");
26984
25445
  const instances = load(content);
26985
25446
  if (!instances || !Array.isArray(instances)) {
26986
25447
  console.error("Invalid instances.yml format");
@@ -26993,7 +25454,7 @@ targets.command("remove <name>").description("remove monitoring target database"
26993
25454
  process.exitCode = 1;
26994
25455
  return;
26995
25456
  }
26996
- fs5.writeFileSync(file, dump(filtered), "utf8");
25457
+ fs4.writeFileSync(file, dump(filtered), "utf8");
26997
25458
  console.log(`Monitoring target '${name}' removed`);
26998
25459
  } catch (err) {
26999
25460
  const message = err instanceof Error ? err.message : String(err);
@@ -27003,13 +25464,13 @@ targets.command("remove <name>").description("remove monitoring target database"
27003
25464
  });
27004
25465
  targets.command("test <name>").description("test monitoring target database connectivity").action(async (name) => {
27005
25466
  const { instancesFile: instancesPath } = await resolveOrInitPaths();
27006
- if (!fs5.existsSync(instancesPath)) {
25467
+ if (!fs4.existsSync(instancesPath)) {
27007
25468
  console.error("instances.yml not found");
27008
25469
  process.exitCode = 1;
27009
25470
  return;
27010
25471
  }
27011
25472
  try {
27012
- const content = fs5.readFileSync(instancesPath, "utf8");
25473
+ const content = fs4.readFileSync(instancesPath, "utf8");
27013
25474
  const instances = load(content);
27014
25475
  if (!instances || !Array.isArray(instances)) {
27015
25476
  console.error("Invalid instances.yml format");
@@ -27053,7 +25514,6 @@ auth.command("login", { isDefault: true }).description("authenticate via browser
27053
25514
  return;
27054
25515
  }
27055
25516
  writeConfig({ apiKey: trimmedKey });
27056
- deleteConfigKeys(["orgId", "defaultProject"]);
27057
25517
  console.log(`API key saved to ${getConfigPath()}`);
27058
25518
  return;
27059
25519
  }
@@ -27071,7 +25531,7 @@ auth.command("login", { isDefault: true }).description("authenticate via browser
27071
25531
  console.log("Starting local callback server...");
27072
25532
  const requestedPort = opts.port || 0;
27073
25533
  const callbackServer = createCallbackServer(requestedPort, params.state, 120000);
27074
- await new Promise((resolve6) => setTimeout(resolve6, 100));
25534
+ await new Promise((resolve5) => setTimeout(resolve5, 100));
27075
25535
  const actualPort = callbackServer.getPort();
27076
25536
  const redirectUri = `http://localhost:${actualPort}/callback`;
27077
25537
  console.log(`Callback server listening on port ${actualPort}`);
@@ -27196,7 +25656,6 @@ Please verify the --api-base-url parameter.`);
27196
25656
  baseUrl: apiBaseUrl,
27197
25657
  orgId
27198
25658
  });
27199
- deleteConfigKeys(["defaultProject"]);
27200
25659
  console.log(`
27201
25660
  Authentication successful!`);
27202
25661
  console.log(`API key saved to: ${getConfigPath()}`);
@@ -27245,15 +25704,15 @@ To authenticate, run: pgai auth`);
27245
25704
  });
27246
25705
  auth.command("remove-key").description("remove API key").action(async () => {
27247
25706
  const newConfigPath = getConfigPath();
27248
- const hasNewConfig = fs5.existsSync(newConfigPath);
25707
+ const hasNewConfig = fs4.existsSync(newConfigPath);
27249
25708
  let legacyPath;
27250
25709
  try {
27251
25710
  const { projectDir } = await resolveOrInitPaths();
27252
- legacyPath = path5.resolve(projectDir, ".pgwatch-config");
25711
+ legacyPath = path4.resolve(projectDir, ".pgwatch-config");
27253
25712
  } catch {
27254
- legacyPath = path5.resolve(process.cwd(), ".pgwatch-config");
25713
+ legacyPath = path4.resolve(process.cwd(), ".pgwatch-config");
27255
25714
  }
27256
- const hasLegacyConfig = fs5.existsSync(legacyPath) && fs5.statSync(legacyPath).isFile();
25715
+ const hasLegacyConfig = fs4.existsSync(legacyPath) && fs4.statSync(legacyPath).isFile();
27257
25716
  if (!hasNewConfig && !hasLegacyConfig) {
27258
25717
  console.log("No API key configured");
27259
25718
  return;
@@ -27263,11 +25722,11 @@ auth.command("remove-key").description("remove API key").action(async () => {
27263
25722
  }
27264
25723
  if (hasLegacyConfig) {
27265
25724
  try {
27266
- const content = fs5.readFileSync(legacyPath, "utf8");
25725
+ const content = fs4.readFileSync(legacyPath, "utf8");
27267
25726
  const filtered = content.split(/\r?\n/).filter((l) => !/^api_key=/.test(l)).join(`
27268
25727
  `).replace(/\n+$/g, `
27269
25728
  `);
27270
- fs5.writeFileSync(legacyPath, filtered, "utf8");
25729
+ fs4.writeFileSync(legacyPath, filtered, "utf8");
27271
25730
  } catch (err) {
27272
25731
  console.warn(`Warning: Could not update legacy config: ${err instanceof Error ? err.message : String(err)}`);
27273
25732
  }
@@ -27278,7 +25737,7 @@ To authenticate again, run: pgai auth`);
27278
25737
  });
27279
25738
  mon.command("generate-grafana-password").description("generate Grafana password for monitoring services").action(async () => {
27280
25739
  const { projectDir } = await resolveOrInitPaths();
27281
- const cfgPath = path5.resolve(projectDir, ".pgwatch-config");
25740
+ const cfgPath = path4.resolve(projectDir, ".pgwatch-config");
27282
25741
  try {
27283
25742
  const { stdout: password } = await execPromise(`openssl rand -base64 12 | tr -d '
27284
25743
  '`);
@@ -27289,17 +25748,17 @@ mon.command("generate-grafana-password").description("generate Grafana password
27289
25748
  return;
27290
25749
  }
27291
25750
  let configContent = "";
27292
- if (fs5.existsSync(cfgPath)) {
27293
- const stats = fs5.statSync(cfgPath);
25751
+ if (fs4.existsSync(cfgPath)) {
25752
+ const stats = fs4.statSync(cfgPath);
27294
25753
  if (stats.isDirectory()) {
27295
25754
  console.error(".pgwatch-config is a directory, expected a file. Skipping read.");
27296
25755
  } else {
27297
- configContent = fs5.readFileSync(cfgPath, "utf8");
25756
+ configContent = fs4.readFileSync(cfgPath, "utf8");
27298
25757
  }
27299
25758
  }
27300
25759
  const lines = configContent.split(/\r?\n/).filter((l) => !/^grafana_password=/.test(l));
27301
25760
  lines.push(`grafana_password=${newPassword}`);
27302
- fs5.writeFileSync(cfgPath, lines.filter(Boolean).join(`
25761
+ fs4.writeFileSync(cfgPath, lines.filter(Boolean).join(`
27303
25762
  `) + `
27304
25763
  `, "utf8");
27305
25764
  console.log("\u2713 New Grafana password generated and saved");
@@ -27321,19 +25780,19 @@ Note: This command requires 'openssl' to be installed`);
27321
25780
  });
27322
25781
  mon.command("show-grafana-credentials").description("show Grafana credentials for monitoring services").action(async () => {
27323
25782
  const { projectDir } = await resolveOrInitPaths();
27324
- const cfgPath = path5.resolve(projectDir, ".pgwatch-config");
27325
- if (!fs5.existsSync(cfgPath)) {
25783
+ const cfgPath = path4.resolve(projectDir, ".pgwatch-config");
25784
+ if (!fs4.existsSync(cfgPath)) {
27326
25785
  console.error("Configuration file not found. Run 'postgres-ai mon local-install' first.");
27327
25786
  process.exitCode = 1;
27328
25787
  return;
27329
25788
  }
27330
- const stats = fs5.statSync(cfgPath);
25789
+ const stats = fs4.statSync(cfgPath);
27331
25790
  if (stats.isDirectory()) {
27332
25791
  console.error(".pgwatch-config is a directory, expected a file. Cannot read credentials.");
27333
25792
  process.exitCode = 1;
27334
25793
  return;
27335
25794
  }
27336
- const content = fs5.readFileSync(cfgPath, "utf8");
25795
+ const content = fs4.readFileSync(cfgPath, "utf8");
27337
25796
  const lines = content.split(/\r?\n/);
27338
25797
  let password = "";
27339
25798
  for (const line of lines) {
@@ -27517,29 +25976,29 @@ mcp.command("install [client]").description("install MCP server configuration fo
27517
25976
  let configDir;
27518
25977
  switch (client) {
27519
25978
  case "cursor":
27520
- configPath = path5.join(homeDir, ".cursor", "mcp.json");
27521
- configDir = path5.dirname(configPath);
25979
+ configPath = path4.join(homeDir, ".cursor", "mcp.json");
25980
+ configDir = path4.dirname(configPath);
27522
25981
  break;
27523
25982
  case "windsurf":
27524
- configPath = path5.join(homeDir, ".windsurf", "mcp.json");
27525
- configDir = path5.dirname(configPath);
25983
+ configPath = path4.join(homeDir, ".windsurf", "mcp.json");
25984
+ configDir = path4.dirname(configPath);
27526
25985
  break;
27527
25986
  case "codex":
27528
- configPath = path5.join(homeDir, ".codex", "mcp.json");
27529
- configDir = path5.dirname(configPath);
25987
+ configPath = path4.join(homeDir, ".codex", "mcp.json");
25988
+ configDir = path4.dirname(configPath);
27530
25989
  break;
27531
25990
  default:
27532
25991
  console.error(`Configuration not implemented for: ${client}`);
27533
25992
  process.exitCode = 1;
27534
25993
  return;
27535
25994
  }
27536
- if (!fs5.existsSync(configDir)) {
27537
- fs5.mkdirSync(configDir, { recursive: true });
25995
+ if (!fs4.existsSync(configDir)) {
25996
+ fs4.mkdirSync(configDir, { recursive: true });
27538
25997
  }
27539
25998
  let config2 = { mcpServers: {} };
27540
- if (fs5.existsSync(configPath)) {
25999
+ if (fs4.existsSync(configPath)) {
27541
26000
  try {
27542
- const content = fs5.readFileSync(configPath, "utf8");
26001
+ const content = fs4.readFileSync(configPath, "utf8");
27543
26002
  config2 = JSON.parse(content);
27544
26003
  if (!config2.mcpServers) {
27545
26004
  config2.mcpServers = {};
@@ -27552,7 +26011,7 @@ mcp.command("install [client]").description("install MCP server configuration fo
27552
26011
  command: pgaiPath,
27553
26012
  args: ["mcp", "start"]
27554
26013
  };
27555
- fs5.writeFileSync(configPath, JSON.stringify(config2, null, 2), "utf8");
26014
+ fs4.writeFileSync(configPath, JSON.stringify(config2, null, 2), "utf8");
27556
26015
  console.log(`\u2713 PostgresAI MCP server configured for ${client}`);
27557
26016
  console.log(` Config file: ${configPath}`);
27558
26017
  console.log("");