postgresai 0.14.0-dev.54 → 0.14.0-dev.56

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -303,17 +303,24 @@ Normalization:
303
303
 
304
304
  ### Examples
305
305
 
306
- Linux/macOS (bash/zsh):
306
+ For production (uses default URLs):
307
307
 
308
308
  ```bash
309
+ # Production auth - uses console.postgres.ai by default
310
+ postgresai auth --debug
311
+ ```
312
+
313
+ For staging/development environments:
314
+
315
+ ```bash
316
+ # Linux/macOS (bash/zsh)
309
317
  export PGAI_API_BASE_URL=https://v2.postgres.ai/api/general/
310
318
  export PGAI_UI_BASE_URL=https://console-dev.postgres.ai
311
319
  postgresai auth --debug
312
320
  ```
313
321
 
314
- Windows PowerShell:
315
-
316
322
  ```powershell
323
+ # Windows PowerShell
317
324
  $env:PGAI_API_BASE_URL = "https://v2.postgres.ai/api/general/"
318
325
  $env:PGAI_UI_BASE_URL = "https://console-dev.postgres.ai"
319
326
  postgresai auth --debug
@@ -330,6 +337,27 @@ postgresai auth --debug \
330
337
  Notes:
331
338
  - If `PGAI_UI_BASE_URL` is not set, the default is `https://console.postgres.ai`.
332
339
 
340
+ ## Development
341
+
342
+ ### Testing
343
+
344
+ The CLI uses [Bun](https://bun.sh/) as the test runner with built-in coverage reporting.
345
+
346
+ ```bash
347
+ # Run tests with coverage (default)
348
+ bun run test
349
+
350
+ # Run tests without coverage (faster iteration during development)
351
+ bun run test:fast
352
+
353
+ # Run tests with coverage and show report location
354
+ bun run test:coverage
355
+ ```
356
+
357
+ Coverage configuration is in `bunfig.toml`. Reports are generated in `coverage/` directory:
358
+ - `coverage/lcov-report/index.html` - HTML coverage report
359
+ - `coverage/lcov.info` - LCOV format for CI integration
360
+
333
361
  ## Requirements
334
362
 
335
363
  - Node.js 18 or higher
@@ -1185,17 +1185,33 @@ mon
1185
1185
 
1186
1186
  // Update .env with custom tag if provided
1187
1187
  const envFile = path.resolve(projectDir, ".env");
1188
- const imageTag = opts.tag || pkg.version;
1189
1188
 
1190
- // Build .env content
1191
- const envLines: string[] = [`PGAI_TAG=${imageTag}`];
1192
- // Preserve GF_SECURITY_ADMIN_PASSWORD if it exists
1189
+ // Build .env content, preserving important existing values
1190
+ // Read existing .env first to preserve CI/custom settings
1191
+ let existingTag: string | null = null;
1192
+ let existingRegistry: string | null = null;
1193
+ let existingPassword: string | null = null;
1194
+
1193
1195
  if (fs.existsSync(envFile)) {
1194
1196
  const existingEnv = fs.readFileSync(envFile, "utf8");
1197
+ // Extract existing values
1198
+ const tagMatch = existingEnv.match(/^PGAI_TAG=(.+)$/m);
1199
+ if (tagMatch) existingTag = tagMatch[1].trim();
1200
+ const registryMatch = existingEnv.match(/^PGAI_REGISTRY=(.+)$/m);
1201
+ if (registryMatch) existingRegistry = registryMatch[1].trim();
1195
1202
  const pwdMatch = existingEnv.match(/^GF_SECURITY_ADMIN_PASSWORD=(.+)$/m);
1196
- if (pwdMatch) {
1197
- envLines.push(`GF_SECURITY_ADMIN_PASSWORD=${pwdMatch[1]}`);
1198
- }
1203
+ if (pwdMatch) existingPassword = pwdMatch[1].trim();
1204
+ }
1205
+
1206
+ // Priority: CLI --tag flag > existing .env > package version
1207
+ const imageTag = opts.tag || existingTag || pkg.version;
1208
+
1209
+ const envLines: string[] = [`PGAI_TAG=${imageTag}`];
1210
+ if (existingRegistry) {
1211
+ envLines.push(`PGAI_REGISTRY=${existingRegistry}`);
1212
+ }
1213
+ if (existingPassword) {
1214
+ envLines.push(`GF_SECURITY_ADMIN_PASSWORD=${existingPassword}`);
1199
1215
  }
1200
1216
  fs.writeFileSync(envFile, envLines.join("\n") + "\n", { encoding: "utf8", mode: 0o600 });
1201
1217
 
@@ -2102,7 +2118,8 @@ auth
2102
2118
  }
2103
2119
 
2104
2120
  // Step 3: Open browser
2105
- const authUrl = `${uiBaseUrl}/cli/auth?state=${encodeURIComponent(params.state)}&code_challenge=${encodeURIComponent(params.codeChallenge)}&code_challenge_method=S256&redirect_uri=${encodeURIComponent(redirectUri)}`;
2121
+ // Pass api_url so UI calls oauth_approve on the same backend where oauth_init created the session
2122
+ const authUrl = `${uiBaseUrl}/cli/auth?state=${encodeURIComponent(params.state)}&code_challenge=${encodeURIComponent(params.codeChallenge)}&code_challenge_method=S256&redirect_uri=${encodeURIComponent(redirectUri)}&api_url=${encodeURIComponent(apiBaseUrl)}`;
2106
2123
 
2107
2124
  if (opts.debug) {
2108
2125
  console.log(`Debug: Auth URL: ${authUrl}`);
package/bunfig.toml CHANGED
@@ -6,6 +6,14 @@
6
6
  # Integration tests that connect to databases need longer timeouts
7
7
  timeout = 30000
8
8
 
9
- # Coverage settings (if needed in future)
10
- # coverage = true
11
- # coverageDir = "coverage"
9
+ # Coverage settings - enabled by default for test runs
10
+ coverage = true
11
+ coverageDir = "coverage"
12
+
13
+ # Skip coverage for test files and node_modules
14
+ coverageSkipTestFiles = true
15
+
16
+ # Reporter format for CI integration
17
+ # - text: console output with summary table
18
+ # - lcov: standard format for coverage tools
19
+ coverageReporter = ["text", "lcov"]
@@ -13064,7 +13064,7 @@ var {
13064
13064
  // package.json
13065
13065
  var package_default = {
13066
13066
  name: "postgresai",
13067
- version: "0.14.0-dev.54",
13067
+ version: "0.14.0-dev.56",
13068
13068
  description: "postgres_ai CLI",
13069
13069
  license: "Apache-2.0",
13070
13070
  private: false,
@@ -13090,12 +13090,14 @@ var package_default = {
13090
13090
  },
13091
13091
  scripts: {
13092
13092
  "embed-metrics": "bun run scripts/embed-metrics.ts",
13093
- build: `bun run embed-metrics && bun build ./bin/postgres-ai.ts --outdir ./dist/bin --target node && node -e "const fs=require('fs');const f='./dist/bin/postgres-ai.js';fs.writeFileSync(f,fs.readFileSync(f,'utf8').replace('#!/usr/bin/env bun','#!/usr/bin/env node'))"`,
13093
+ build: `bun run embed-metrics && bun build ./bin/postgres-ai.ts --outdir ./dist/bin --target node && node -e "const fs=require('fs');const f='./dist/bin/postgres-ai.js';fs.writeFileSync(f,fs.readFileSync(f,'utf8').replace('#!/usr/bin/env bun','#!/usr/bin/env node'))" && cp -r ./sql ./dist/sql`,
13094
13094
  prepublishOnly: "npm run build",
13095
13095
  start: "bun ./bin/postgres-ai.ts --help",
13096
13096
  "start:node": "node ./dist/bin/postgres-ai.js --help",
13097
13097
  dev: "bun run embed-metrics && bun --watch ./bin/postgres-ai.ts",
13098
13098
  test: "bun run embed-metrics && bun test",
13099
+ "test:fast": "bun run embed-metrics && bun test --coverage=false",
13100
+ "test:coverage": "bun run embed-metrics && bun test --coverage && echo 'Coverage report: cli/coverage/lcov-report/index.html'",
13099
13101
  typecheck: "bun run embed-metrics && bunx tsc --noEmit"
13100
13102
  },
13101
13103
  dependencies: {
@@ -15885,7 +15887,7 @@ var Result = import_lib.default.Result;
15885
15887
  var TypeOverrides = import_lib.default.TypeOverrides;
15886
15888
  var defaults = import_lib.default.defaults;
15887
15889
  // package.json
15888
- var version = "0.14.0-dev.54";
15890
+ var version = "0.14.0-dev.56";
15889
15891
  var package_default2 = {
15890
15892
  name: "postgresai",
15891
15893
  version,
@@ -15914,12 +15916,14 @@ var package_default2 = {
15914
15916
  },
15915
15917
  scripts: {
15916
15918
  "embed-metrics": "bun run scripts/embed-metrics.ts",
15917
- build: `bun run embed-metrics && bun build ./bin/postgres-ai.ts --outdir ./dist/bin --target node && node -e "const fs=require('fs');const f='./dist/bin/postgres-ai.js';fs.writeFileSync(f,fs.readFileSync(f,'utf8').replace('#!/usr/bin/env bun','#!/usr/bin/env node'))"`,
15919
+ build: `bun run embed-metrics && bun build ./bin/postgres-ai.ts --outdir ./dist/bin --target node && node -e "const fs=require('fs');const f='./dist/bin/postgres-ai.js';fs.writeFileSync(f,fs.readFileSync(f,'utf8').replace('#!/usr/bin/env bun','#!/usr/bin/env node'))" && cp -r ./sql ./dist/sql`,
15918
15920
  prepublishOnly: "npm run build",
15919
15921
  start: "bun ./bin/postgres-ai.ts --help",
15920
15922
  "start:node": "node ./dist/bin/postgres-ai.js --help",
15921
15923
  dev: "bun run embed-metrics && bun --watch ./bin/postgres-ai.ts",
15922
15924
  test: "bun run embed-metrics && bun test",
15925
+ "test:fast": "bun run embed-metrics && bun test --coverage=false",
15926
+ "test:coverage": "bun run embed-metrics && bun test --coverage && echo 'Coverage report: cli/coverage/lcov-report/index.html'",
15923
15927
  typecheck: "bun run embed-metrics && bunx tsc --noEmit"
15924
15928
  },
15925
15929
  dependencies: {
@@ -23470,10 +23474,9 @@ function resolveBaseUrls2(opts, cfg, defaults2 = {}) {
23470
23474
 
23471
23475
  // lib/init.ts
23472
23476
  import { randomBytes } from "crypto";
23473
- import { URL as URL2 } from "url";
23477
+ import { URL as URL2, fileURLToPath } from "url";
23474
23478
  import * as fs3 from "fs";
23475
23479
  import * as path3 from "path";
23476
- var __dirname = "/builds/postgres-ai/postgres_ai/cli/lib";
23477
23480
  var DEFAULT_MONITORING_USER = "postgres_ai_mon";
23478
23481
  function sslModeToConfig(mode) {
23479
23482
  if (mode.toLowerCase() === "disable")
@@ -23554,9 +23557,11 @@ async function connectWithSslFallback(ClientClass, adminConn, verbose) {
23554
23557
  }
23555
23558
  }
23556
23559
  function sqlDir() {
23560
+ const currentFile = fileURLToPath(import.meta.url);
23561
+ const currentDir = path3.dirname(currentFile);
23557
23562
  const candidates = [
23558
- path3.resolve(__dirname, "..", "sql"),
23559
- path3.resolve(__dirname, "..", "..", "sql")
23563
+ path3.resolve(currentDir, "..", "sql"),
23564
+ path3.resolve(currentDir, "..", "..", "sql")
23560
23565
  ];
23561
23566
  for (const candidate of candidates) {
23562
23567
  if (fs3.existsSync(candidate)) {
@@ -24513,6 +24518,7 @@ where
24513
24518
  quote_ident(pci.relname) as tag_index_name,
24514
24519
  quote_ident(pct.relname) as tag_table_name,
24515
24520
  coalesce(nullif(quote_ident(pn.nspname), 'public') || '.', '') || quote_ident(pct.relname) as tag_relation_name,
24521
+ pg_get_indexdef(pidx.indexrelid) as index_definition,
24516
24522
  pg_relation_size(pidx.indexrelid) index_size_bytes,
24517
24523
  ((
24518
24524
  select count(1)
@@ -25150,6 +25156,7 @@ async function getInvalidIndexes(client, pgMajorVersion = 16) {
25150
25156
  relation_name: String(transformed.relation_name || ""),
25151
25157
  index_size_bytes: indexSizeBytes,
25152
25158
  index_size_pretty: formatBytes(indexSizeBytes),
25159
+ index_definition: String(transformed.index_definition || ""),
25153
25160
  supports_fk: toBool(transformed.supports_fk)
25154
25161
  };
25155
25162
  });
@@ -26723,14 +26730,28 @@ mon.command("local-install").description("install local monitoring stack (genera
26723
26730
  console.log(`Project directory: ${projectDir}
26724
26731
  `);
26725
26732
  const envFile = path5.resolve(projectDir, ".env");
26726
- const imageTag = opts.tag || package_default.version;
26727
- const envLines = [`PGAI_TAG=${imageTag}`];
26733
+ let existingTag = null;
26734
+ let existingRegistry = null;
26735
+ let existingPassword = null;
26728
26736
  if (fs5.existsSync(envFile)) {
26729
26737
  const existingEnv = fs5.readFileSync(envFile, "utf8");
26738
+ const tagMatch = existingEnv.match(/^PGAI_TAG=(.+)$/m);
26739
+ if (tagMatch)
26740
+ existingTag = tagMatch[1].trim();
26741
+ const registryMatch = existingEnv.match(/^PGAI_REGISTRY=(.+)$/m);
26742
+ if (registryMatch)
26743
+ existingRegistry = registryMatch[1].trim();
26730
26744
  const pwdMatch = existingEnv.match(/^GF_SECURITY_ADMIN_PASSWORD=(.+)$/m);
26731
- if (pwdMatch) {
26732
- envLines.push(`GF_SECURITY_ADMIN_PASSWORD=${pwdMatch[1]}`);
26733
- }
26745
+ if (pwdMatch)
26746
+ existingPassword = pwdMatch[1].trim();
26747
+ }
26748
+ const imageTag = opts.tag || existingTag || package_default.version;
26749
+ const envLines = [`PGAI_TAG=${imageTag}`];
26750
+ if (existingRegistry) {
26751
+ envLines.push(`PGAI_REGISTRY=${existingRegistry}`);
26752
+ }
26753
+ if (existingPassword) {
26754
+ envLines.push(`GF_SECURITY_ADMIN_PASSWORD=${existingPassword}`);
26734
26755
  }
26735
26756
  fs5.writeFileSync(envFile, envLines.join(`
26736
26757
  `) + `
@@ -27535,7 +27556,7 @@ Please verify the --api-base-url parameter.`);
27535
27556
  process.exit(1);
27536
27557
  return;
27537
27558
  }
27538
- const authUrl = `${uiBaseUrl}/cli/auth?state=${encodeURIComponent(params.state)}&code_challenge=${encodeURIComponent(params.codeChallenge)}&code_challenge_method=S256&redirect_uri=${encodeURIComponent(redirectUri)}`;
27559
+ const authUrl = `${uiBaseUrl}/cli/auth?state=${encodeURIComponent(params.state)}&code_challenge=${encodeURIComponent(params.codeChallenge)}&code_challenge_method=S256&redirect_uri=${encodeURIComponent(redirectUri)}&api_url=${encodeURIComponent(apiBaseUrl)}`;
27539
27560
  if (opts.debug) {
27540
27561
  console.log(`Debug: Auth URL: ${authUrl}`);
27541
27562
  }
@@ -0,0 +1,16 @@
1
+ -- Role creation / password update (template-filled by cli/lib/init.ts)
2
+ --
3
+ -- Always uses a race-safe pattern (create if missing, then always alter to set the password):
4
+ -- do $$ begin
5
+ -- if not exists (select 1 from pg_catalog.pg_roles where rolname = '...') then
6
+ -- begin
7
+ -- create user "..." with password '...';
8
+ -- exception when duplicate_object then
9
+ -- null;
10
+ -- end;
11
+ -- end if;
12
+ -- alter user "..." with password '...';
13
+ -- end $$;
14
+ {{ROLE_STMT}}
15
+
16
+
@@ -0,0 +1,37 @@
1
+ -- Required permissions for postgres_ai monitoring user (template-filled by cli/lib/init.ts)
2
+
3
+ -- Allow connect
4
+ grant connect on database {{DB_IDENT}} to {{ROLE_IDENT}};
5
+
6
+ -- Standard monitoring privileges
7
+ grant pg_monitor to {{ROLE_IDENT}};
8
+ grant select on pg_catalog.pg_index to {{ROLE_IDENT}};
9
+
10
+ -- Create postgres_ai schema for our objects
11
+ create schema if not exists postgres_ai;
12
+ grant usage on schema postgres_ai to {{ROLE_IDENT}};
13
+
14
+ -- For bloat analysis: expose pg_statistic via a view
15
+ create or replace view postgres_ai.pg_statistic as
16
+ select
17
+ n.nspname as schemaname,
18
+ c.relname as tablename,
19
+ a.attname,
20
+ s.stanullfrac as null_frac,
21
+ s.stawidth as avg_width,
22
+ false as inherited
23
+ from pg_catalog.pg_statistic s
24
+ join pg_catalog.pg_class c on c.oid = s.starelid
25
+ join pg_catalog.pg_namespace n on n.oid = c.relnamespace
26
+ join pg_catalog.pg_attribute a on a.attrelid = s.starelid and a.attnum = s.staattnum
27
+ where a.attnum > 0 and not a.attisdropped;
28
+
29
+ grant select on postgres_ai.pg_statistic to {{ROLE_IDENT}};
30
+
31
+ -- Hardened clusters sometimes revoke PUBLIC on schema public
32
+ grant usage on schema public to {{ROLE_IDENT}};
33
+
34
+ -- Keep search_path predictable; postgres_ai first so our objects are found
35
+ alter user {{ROLE_IDENT}} set search_path = postgres_ai, "$user", public, pg_catalog;
36
+
37
+
@@ -0,0 +1,6 @@
1
+ -- Optional permissions for RDS Postgres / Aurora (best effort)
2
+
3
+ create extension if not exists rds_tools;
4
+ grant execute on function rds_tools.pg_ls_multixactdir() to {{ROLE_IDENT}};
5
+
6
+
@@ -0,0 +1,8 @@
1
+ -- Optional permissions for self-managed Postgres (best effort)
2
+
3
+ grant execute on function pg_catalog.pg_stat_file(text) to {{ROLE_IDENT}};
4
+ grant execute on function pg_catalog.pg_stat_file(text, boolean) to {{ROLE_IDENT}};
5
+ grant execute on function pg_catalog.pg_ls_dir(text) to {{ROLE_IDENT}};
6
+ grant execute on function pg_catalog.pg_ls_dir(text, boolean, boolean) to {{ROLE_IDENT}};
7
+
8
+