@indiekitai/pg-dash 0.5.2 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -147,6 +147,23 @@ Real-time lock wait monitor — refreshes every 3 seconds. Shows:
147
147
  - Long-running queries (configurable threshold via `--long-query-threshold`)
148
148
  - Table and lock type for each wait
149
149
 
150
+ ### 📊 query-stats (PG 18+)
151
+
152
+ ```bash
153
+ # Export query statistics from production
154
+ pg-dash query-stats export postgres://prod-server/db --file prod-stats.json
155
+
156
+ # Import to development environment
157
+ pg-dash query-stats import prod-stats.json postgres://localhost/db
158
+ ```
159
+
160
+ Export/import PostgreSQL query statistics (requires PG 18+). Enables "production query plans without production data" workflow:
161
+ 1. Export stats from production: `pg-dash query-stats export prod`
162
+ 2. Import to dev: `pg-dash query-stats import prod-stats.json dev`
163
+ 3. Run `EXPLAIN` locally — now uses production statistics for accurate query plans
164
+
165
+ The exported JSON is typically <1MB regardless of database size.
166
+
150
167
  ### 🛡️ Migration Safety Check
151
168
  - Analyze a migration SQL file for risks before running it
152
169
  - Detects: `CREATE INDEX` without `CONCURRENTLY` (lock risk), `ADD COLUMN NOT NULL` without `DEFAULT`, `ALTER COLUMN TYPE` (full table rewrite), `DROP COLUMN` (app breakage risk), `ADD CONSTRAINT` without `NOT VALID` (full table scan), `CREATE INDEX CONCURRENTLY` inside a transaction (runtime failure), `DROP TABLE`, `TRUNCATE`, `DELETE`/`UPDATE` without `WHERE`
package/README.zh-CN.md CHANGED
@@ -183,6 +183,8 @@ pg-dash explain "<query>" <connection> 终端里 EXPLAIN ANALYZE 查
183
183
  pg-dash watch-locks <connection> 实时锁和长查询监控
184
184
  pg-dash diff-env --source <url> --target <url> 对比两个环境
185
185
  pg-dash schema-diff <connection-string> 显示 Schema 变更
186
+ pg-dash query-stats export <connection> 导出查询统计(PG 18+)
187
+ pg-dash query-stats import <file> <connection> 导入查询统计(PG 18+)
186
188
 
187
189
  Options:
188
190
  -p, --port <port> 面板端口(默认:3480)
package/dist/cli.js CHANGED
@@ -4195,6 +4195,8 @@ Usage:
4195
4195
  pg-dash watch-locks <connection> Real-time lock + long-query monitor
4196
4196
  pg-dash diff-env --source <url> --target <url> Compare two environments
4197
4197
  pg-dash schema-diff <connection-string> Show latest schema changes
4198
+ pg-dash query-stats export <connection> Export query statistics (PG 18+)
4199
+ pg-dash query-stats import <file> <connection> Import query statistics (PG 18+)
4198
4200
  pg-dash --host localhost --user postgres --db mydb
4199
4201
 
4200
4202
  Options:
@@ -4234,7 +4236,7 @@ Environment variables:
4234
4236
  `);
4235
4237
  process.exit(0);
4236
4238
  }
4237
- var KNOWN_SUBCOMMANDS = ["check", "health", "check-migration", "schema-diff", "diff-env", "explain", "watch-locks"];
4239
+ var KNOWN_SUBCOMMANDS = ["check", "health", "check-migration", "schema-diff", "diff-env", "explain", "watch-locks", "query-stats"];
4238
4240
  var subcommand = positionals[0];
4239
4241
  function isValidConnectionString(s) {
4240
4242
  return s.startsWith("postgresql://") || s.startsWith("postgres://") || s.includes("@") || // user@host shorthand
@@ -4751,6 +4753,118 @@ ${bold}${yellow} Long-running Queries (${report.longRunningQueries.length})${re
4751
4753
  await tick();
4752
4754
  const timer = setInterval(tick, intervalSec * 1e3);
4753
4755
  void timer;
4756
+ } else if (subcommand === "query-stats") {
4757
+ const action = positionals[1];
4758
+ const { Pool: Pool3 } = await import("pg");
4759
+ const fs6 = await import("fs");
4760
+ if (action === "export") {
4761
+ const connStr = positionals[2] || resolveConnectionString(2);
4762
+ if (!connStr) {
4763
+ console.error("Error: provide a connection string.\n\nUsage: pg-dash query-stats export <connection> [--file output.json]");
4764
+ process.exit(1);
4765
+ }
4766
+ const outputFile = values["snapshot-path"] || "query-stats.json";
4767
+ const pool = new Pool3({ connectionString: connStr, max: 1, connectionTimeoutMillis: 1e4 });
4768
+ try {
4769
+ const versionRes = await pool.query("SHOW server_version_num");
4770
+ const versionNum = parseInt(versionRes.rows[0].server_version_num, 10);
4771
+ if (versionNum < 18e4) {
4772
+ console.error("Error: query-stats export requires PostgreSQL 18+. Current:", versionRes.rows[0].server_version_num);
4773
+ process.exit(1);
4774
+ }
4775
+ const stats = await pool.query("SELECT pg_stat_statements_reset()");
4776
+ const res = await pool.query(`
4777
+ SELECT
4778
+ s.datname,
4779
+ s.relkind,
4780
+ s.relname,
4781
+ s.seq_scan,
4782
+ s.seq_tup_read,
4783
+ s.idx_scan,
4784
+ s.idx_tup_fetch,
4785
+ s.n_tup_ins,
4786
+ s.n_tup_upd,
4787
+ s.n_tup_del,
4788
+ s.n_live_tup,
4789
+ s.n_dead_tup,
4790
+ s.vacuum_count,
4791
+ s.autovacuum_count,
4792
+ s.last_vacuum,
4793
+ s.last_autovacuum,
4794
+ s.last_autovacuum_age,
4795
+ s.last_data_change_age,
4796
+ s.changes_since_analyze,
4797
+ s.changes_since_autovacuum,
4798
+ s.tuple_count,
4799
+ s.tuple_per_read,
4800
+ s.tuple_written_per_sec
4801
+ FROM pg_stat_user_tables s
4802
+ ORDER BY seq_scan DESC
4803
+ LIMIT 100
4804
+ `);
4805
+ const exportData = {
4806
+ exportedAt: (/* @__PURE__ */ new Date()).toISOString(),
4807
+ pgVersion: versionRes.rows[0].server_version_num,
4808
+ tables: res.rows
4809
+ };
4810
+ fs6.writeFileSync(outputFile, JSON.stringify(exportData, null, 2));
4811
+ console.log(`Exported query statistics to ${outputFile}`);
4812
+ console.log(`Tables: ${res.rows.length}, Size: ${JSON.stringify(exportData).length} bytes`);
4813
+ await pool.end();
4814
+ } catch (err) {
4815
+ console.error(`Error: ${err.message}`);
4816
+ await pool.end();
4817
+ process.exit(1);
4818
+ }
4819
+ } else if (action === "import") {
4820
+ const inputFile = positionals[2];
4821
+ const connStr = positionals[3] || resolveConnectionString(3);
4822
+ if (!inputFile) {
4823
+ console.error("Error: provide an input file.\n\nUsage: pg-dash query-stats import <file> <connection>");
4824
+ process.exit(1);
4825
+ }
4826
+ if (!connStr) {
4827
+ console.error("Error: provide a connection string.\n\nUsage: pg-dash query-stats import <file> <connection>");
4828
+ process.exit(1);
4829
+ }
4830
+ if (!fs6.existsSync(inputFile)) {
4831
+ console.error(`Error: file not found: ${inputFile}`);
4832
+ process.exit(1);
4833
+ }
4834
+ const pool = new Pool3({ connectionString: connStr, max: 1, connectionTimeoutMillis: 1e4 });
4835
+ try {
4836
+ const versionRes = await pool.query("SHOW server_version_num");
4837
+ const versionNum = parseInt(versionRes.rows[0].server_version_num, 10);
4838
+ if (versionNum < 18e4) {
4839
+ console.error("Error: query-stats import requires PostgreSQL 18+. Current:", versionRes.rows[0].server_version_num);
4840
+ process.exit(1);
4841
+ }
4842
+ const importData = JSON.parse(fs6.readFileSync(inputFile, "utf-8"));
4843
+ console.log(`Importing query statistics from ${inputFile}`);
4844
+ console.log(`PG version: ${importData.pgVersion} -> current: ${versionRes.rows[0].server_version_num}`);
4845
+ await pool.query(`
4846
+ CREATE TEMP TABLE _imported_stats (LIKE pg_stat_user_tables INCLUDING ALL)
4847
+ `);
4848
+ for (const row of importData.tables) {
4849
+ const cols = Object.keys(row).join(", ");
4850
+ const vals = Object.values(row).map((v) => v === null ? "NULL" : typeof v === "string" ? `'${v.replace(/'/g, "''")}'` : v).join(", ");
4851
+ try {
4852
+ await pool.query(`INSERT INTO _imported_stats (${cols}) VALUES (${vals})`);
4853
+ } catch (e) {
4854
+ }
4855
+ }
4856
+ await pool.query("SELECT pg_restore_relation_stats('_imported_stats'::regclass)");
4857
+ console.log(`Imported statistics for ${importData.tables.length} tables`);
4858
+ await pool.end();
4859
+ } catch (err) {
4860
+ console.error(`Error: ${err.message}`);
4861
+ await pool.end();
4862
+ process.exit(1);
4863
+ }
4864
+ } else {
4865
+ console.error("Error: specify 'export' or 'import'.\n\nUsage:\n pg-dash query-stats export <connection> [--file output.json]\n pg-dash query-stats import <file> <connection>");
4866
+ process.exit(1);
4867
+ }
4754
4868
  } else {
4755
4869
  if (subcommand && !isValidConnectionString(subcommand) && KNOWN_SUBCOMMANDS.indexOf(subcommand) === -1) {
4756
4870
  console.error(