heroku 10.13.2 → 10.13.3-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/commands/pg/backups/cancel.js +4 -4
- package/lib/commands/pg/backups/capture.js +5 -5
- package/lib/commands/pg/backups/delete.js +2 -2
- package/lib/commands/pg/backups/download.js +3 -3
- package/lib/commands/pg/backups/index.js +2 -2
- package/lib/commands/pg/backups/info.js +3 -3
- package/lib/commands/pg/backups/restore.js +5 -5
- package/lib/commands/pg/backups/schedule.js +7 -7
- package/lib/commands/pg/backups/schedules.js +2 -2
- package/lib/commands/pg/backups/unschedule.js +6 -5
- package/lib/commands/pg/backups/url.js +3 -3
- package/lib/commands/pg/bloat.js +5 -4
- package/lib/commands/pg/blocking.js +5 -4
- package/lib/commands/pg/connection-pooling/attach.js +5 -6
- package/lib/commands/pg/copy.js +6 -7
- package/lib/commands/pg/credentials/create.js +4 -4
- package/lib/commands/pg/credentials/destroy.js +4 -4
- package/lib/commands/pg/credentials/repair-default.js +4 -4
- package/lib/commands/pg/credentials/rotate.js +4 -4
- package/lib/commands/pg/credentials/url.js +4 -4
- package/lib/commands/pg/credentials.js +4 -4
- package/lib/commands/pg/diagnose.js +6 -5
- package/lib/commands/pg/info.js +5 -3
- package/lib/commands/pg/kill.js +6 -5
- package/lib/commands/pg/killall.js +4 -4
- package/lib/commands/pg/links/create.js +10 -8
- package/lib/commands/pg/links/destroy.js +4 -4
- package/lib/commands/pg/links/index.js +14 -8
- package/lib/commands/pg/locks.js +5 -4
- package/lib/commands/pg/maintenance/index.js +4 -4
- package/lib/commands/pg/maintenance/run.js +4 -4
- package/lib/commands/pg/maintenance/window.js +4 -4
- package/lib/commands/pg/outliers.d.ts +2 -2
- package/lib/commands/pg/outliers.js +9 -7
- package/lib/commands/pg/promote.js +5 -4
- package/lib/commands/pg/ps.js +6 -5
- package/lib/commands/pg/psql.js +6 -4
- package/lib/commands/pg/pull.d.ts +1 -1
- package/lib/commands/pg/pull.js +4 -4
- package/lib/commands/pg/push.d.ts +1 -1
- package/lib/commands/pg/push.js +4 -4
- package/lib/commands/pg/reset.js +4 -4
- package/lib/commands/pg/settings/index.js +2 -2
- package/lib/commands/pg/unfollow.js +5 -5
- package/lib/commands/pg/upgrade/cancel.js +5 -5
- package/lib/commands/pg/upgrade/dryrun.js +5 -5
- package/lib/commands/pg/upgrade/index.js +5 -5
- package/lib/commands/pg/upgrade/prepare.js +5 -5
- package/lib/commands/pg/upgrade/run.js +5 -5
- package/lib/commands/pg/upgrade/wait.js +9 -4
- package/lib/commands/pg/vacuum-stats.js +5 -4
- package/lib/commands/pg/wait.js +6 -4
- package/lib/commands/run/detached.d.ts +1 -0
- package/lib/commands/run/detached.js +5 -1
- package/lib/commands/run/index.d.ts +1 -0
- package/lib/commands/run/index.js +7 -2
- package/lib/commands/run/inside.js +1 -6
- package/lib/lib/addons/resolve.d.ts +6 -7
- package/lib/lib/pg/backups.js +4 -4
- package/lib/lib/pg/fetcher.d.ts +3 -34
- package/lib/lib/pg/fetcher.js +3 -92
- package/lib/lib/pg/psql.d.ts +5 -36
- package/lib/lib/pg/psql.js +17 -195
- package/lib/lib/pg/push_pull.d.ts +2 -7
- package/lib/lib/pg/push_pull.js +9 -7
- package/lib/lib/pg/setter.js +3 -3
- package/lib/lib/pg/types.d.ts +10 -15
- package/lib/lib/pg/util.d.ts +6 -32
- package/lib/lib/pg/util.js +3 -70
- package/lib/lib/run/helpers.d.ts +10 -0
- package/lib/lib/run/helpers.js +22 -1
- package/oclif.manifest.json +142 -130
- package/package.json +4 -6
- package/lib/lib/pg/bastion.d.ts +0 -29
- package/lib/lib/pg/bastion.js +0 -121
- package/lib/lib/pg/config.d.ts +0 -2
- package/lib/lib/pg/config.js +0 -13
- package/lib/lib/pg/host.d.ts +0 -1
- package/lib/lib/pg/host.js +0 -6
package/lib/lib/pg/fetcher.js
CHANGED
|
@@ -1,13 +1,8 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.getRelease = exports.
|
|
4
|
-
const api_client_1 = require("@heroku-cli/command/lib/api-client");
|
|
3
|
+
exports.getRelease = exports.all = exports.arbitraryAppDB = void 0;
|
|
5
4
|
const debug_1 = require("debug");
|
|
6
|
-
const
|
|
7
|
-
const bastion_1 = require("./bastion");
|
|
8
|
-
const config_1 = require("./config");
|
|
9
|
-
const color_1 = require("@heroku-cli/color");
|
|
10
|
-
const util_1 = require("./util");
|
|
5
|
+
const lodash_1 = require("lodash");
|
|
11
6
|
const pgDebug = (0, debug_1.default)('pg');
|
|
12
7
|
async function arbitraryAppDB(heroku, app) {
|
|
13
8
|
// Since Postgres backups are tied to the app and not the add-on, but
|
|
@@ -28,108 +23,24 @@ function getAttachmentNamesByAddon(attachments) {
|
|
|
28
23
|
}, {});
|
|
29
24
|
}
|
|
30
25
|
async function all(heroku, app_id) {
|
|
31
|
-
const { uniqBy } = require('lodash');
|
|
32
26
|
pgDebug(`fetching all DBs on ${app_id}`);
|
|
33
27
|
const attachments = await allAttachments(heroku, app_id);
|
|
34
28
|
let addons = attachments.map(a => a.addon);
|
|
35
29
|
// Get the list of attachment names per addon here and add to each addon obj
|
|
36
30
|
const attachmentNamesByAddon = getAttachmentNamesByAddon(attachments);
|
|
37
|
-
addons = uniqBy(addons, 'id');
|
|
31
|
+
addons = (0, lodash_1.uniqBy)(addons, 'id');
|
|
38
32
|
addons.forEach(addon => {
|
|
39
33
|
addon.attachment_names = attachmentNamesByAddon[addon.id];
|
|
40
34
|
});
|
|
41
35
|
return addons;
|
|
42
36
|
}
|
|
43
37
|
exports.all = all;
|
|
44
|
-
async function matchesHelper(heroku, app, db, namespace) {
|
|
45
|
-
var _a;
|
|
46
|
-
(0, debug_1.default)(`fetching ${db} on ${app}`);
|
|
47
|
-
const addonService = process.env.HEROKU_POSTGRESQL_ADDON_NAME || 'heroku-postgresql';
|
|
48
|
-
(0, debug_1.default)(`addon service: ${addonService}`);
|
|
49
|
-
try {
|
|
50
|
-
const attached = await (0, resolve_1.appAttachment)(heroku, app, db, { addon_service: addonService, namespace });
|
|
51
|
-
return ({ matches: [attached] });
|
|
52
|
-
}
|
|
53
|
-
catch (error) {
|
|
54
|
-
if (error instanceof resolve_1.AmbiguousError && ((_a = error.body) === null || _a === void 0 ? void 0 : _a.id) === 'multiple_matches' && error.matches) {
|
|
55
|
-
return { matches: error.matches, error };
|
|
56
|
-
}
|
|
57
|
-
if (error instanceof api_client_1.HerokuAPIError && error.http.statusCode === 404 && error.body && error.body.id === 'not_found') {
|
|
58
|
-
return { matches: null, error };
|
|
59
|
-
}
|
|
60
|
-
throw error;
|
|
61
|
-
}
|
|
62
|
-
}
|
|
63
|
-
async function getAttachment(heroku, app, db = 'DATABASE_URL', namespace = '') {
|
|
64
|
-
var _a;
|
|
65
|
-
const matchesOrError = await matchesHelper(heroku, app, db, namespace);
|
|
66
|
-
let { matches } = matchesOrError;
|
|
67
|
-
const { error } = matchesOrError;
|
|
68
|
-
// happy path where the resolver matches just one
|
|
69
|
-
if (matches && matches.length === 1) {
|
|
70
|
-
return matches[0];
|
|
71
|
-
}
|
|
72
|
-
// case for 404 where there are implicit attachments
|
|
73
|
-
if (!matches) {
|
|
74
|
-
const appConfigMatch = /^(.+?)::(.+)/.exec(db);
|
|
75
|
-
if (appConfigMatch) {
|
|
76
|
-
app = appConfigMatch[1];
|
|
77
|
-
db = appConfigMatch[2];
|
|
78
|
-
}
|
|
79
|
-
if (!db.endsWith('_URL')) {
|
|
80
|
-
db += '_URL';
|
|
81
|
-
}
|
|
82
|
-
const [config = {}, attachments] = await Promise.all([
|
|
83
|
-
(0, config_1.getConfig)(heroku, app),
|
|
84
|
-
allAttachments(heroku, app),
|
|
85
|
-
]);
|
|
86
|
-
if (attachments.length === 0) {
|
|
87
|
-
throw new Error(`${color_1.default.app(app)} has no databases`);
|
|
88
|
-
}
|
|
89
|
-
matches = attachments.filter(attachment => config[db] && config[db] === config[(0, util_1.getConfigVarName)(attachment.config_vars)]);
|
|
90
|
-
if (matches.length === 0) {
|
|
91
|
-
const validOptions = attachments.map(attachment => (0, util_1.getConfigVarName)(attachment.config_vars));
|
|
92
|
-
throw new Error(`Unknown database: ${db}. Valid options are: ${validOptions.join(', ')}`);
|
|
93
|
-
}
|
|
94
|
-
}
|
|
95
|
-
// case for multiple attachments with passedDb
|
|
96
|
-
const first = matches[0];
|
|
97
|
-
// case for 422 where there are ambiguous attachments that are equivalent
|
|
98
|
-
if (matches.every(match => { var _a, _b, _c, _d; return ((_a = first.addon) === null || _a === void 0 ? void 0 : _a.id) === ((_b = match.addon) === null || _b === void 0 ? void 0 : _b.id) && ((_c = first.app) === null || _c === void 0 ? void 0 : _c.id) === ((_d = match.app) === null || _d === void 0 ? void 0 : _d.id); })) {
|
|
99
|
-
const config = (_a = await (0, config_1.getConfig)(heroku, first.app.name)) !== null && _a !== void 0 ? _a : {};
|
|
100
|
-
if (matches.every(match => config[(0, util_1.getConfigVarName)(first.config_vars)] === config[(0, util_1.getConfigVarName)(match.config_vars)])) {
|
|
101
|
-
return first;
|
|
102
|
-
}
|
|
103
|
-
}
|
|
104
|
-
throw error;
|
|
105
|
-
}
|
|
106
|
-
exports.getAttachment = getAttachment;
|
|
107
38
|
async function allAttachments(heroku, app_id) {
|
|
108
39
|
const { body: attachments } = await heroku.get(`/apps/${app_id}/addon-attachments`, {
|
|
109
40
|
headers: { 'Accept-Inclusion': 'addon:plan,config_vars' },
|
|
110
41
|
});
|
|
111
42
|
return attachments.filter((a) => { var _a, _b; return (_b = (_a = a.addon.plan) === null || _a === void 0 ? void 0 : _a.name) === null || _b === void 0 ? void 0 : _b.startsWith('heroku-postgresql'); });
|
|
112
43
|
}
|
|
113
|
-
async function getAddon(heroku, app, db = 'DATABASE_URL') {
|
|
114
|
-
return (await getAttachment(heroku, app, db)).addon;
|
|
115
|
-
}
|
|
116
|
-
exports.getAddon = getAddon;
|
|
117
|
-
async function database(heroku, app, db, namespace) {
|
|
118
|
-
const attached = await getAttachment(heroku, app, db, namespace);
|
|
119
|
-
// would inline this as well but in some cases attachment pulls down config
|
|
120
|
-
// as well, and we would request twice at the same time but I did not want
|
|
121
|
-
// to push this down into attachment because we do not always need config
|
|
122
|
-
const config = await (0, config_1.getConfig)(heroku, attached.app.name);
|
|
123
|
-
const database = (0, util_1.getConnectionDetails)(attached, config);
|
|
124
|
-
if ((0, util_1.bastionKeyPlan)(attached.addon) && !database.bastionKey) {
|
|
125
|
-
const { body: bastionConfig } = await (0, bastion_1.fetchConfig)(heroku, attached.addon);
|
|
126
|
-
const bastionHost = bastionConfig.host;
|
|
127
|
-
const bastionKey = bastionConfig.private_key;
|
|
128
|
-
Object.assign(database, { bastionHost, bastionKey });
|
|
129
|
-
}
|
|
130
|
-
return database;
|
|
131
|
-
}
|
|
132
|
-
exports.database = database;
|
|
133
44
|
async function getRelease(heroku, appName, id) {
|
|
134
45
|
const { body: release } = await heroku.get(`/apps/${appName}/releases/${id}`);
|
|
135
46
|
return release;
|
package/lib/lib/pg/psql.d.ts
CHANGED
|
@@ -1,20 +1,8 @@
|
|
|
1
1
|
/// <reference types="node" />
|
|
2
2
|
/// <reference types="node" />
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
import { SpawnOptions, type SpawnOptionsWithStdioTuple } from 'child_process';
|
|
7
|
-
import type { ChildProcess } from 'node:child_process';
|
|
8
|
-
import { EventEmitter } from 'node:events';
|
|
9
|
-
import type { Server } from 'node:net';
|
|
10
|
-
import { Stream } from 'node:stream';
|
|
11
|
-
import { getConfigs, TunnelConfig } from './bastion';
|
|
12
|
-
import { ConnectionDetails, getConnectionDetails } from './util';
|
|
13
|
-
export declare function psqlQueryOptions(query: string, dbEnv: NodeJS.ProcessEnv, cmdArgs?: string[]): {
|
|
14
|
-
dbEnv: NodeJS.ProcessEnv;
|
|
15
|
-
psqlArgs: string[];
|
|
16
|
-
childProcessOptions: SpawnOptionsWithStdioTuple<"ignore", "pipe", "inherit">;
|
|
17
|
-
};
|
|
3
|
+
import { SpawnOptions } from 'child_process';
|
|
4
|
+
import { ConnectionDetailsWithAttachment } from '@heroku/heroku-cli-util';
|
|
5
|
+
export declare function fetchVersion(db: ConnectionDetailsWithAttachment): Promise<string | undefined>;
|
|
18
6
|
export declare function psqlFileOptions(file: string, dbEnv: NodeJS.ProcessEnv): {
|
|
19
7
|
dbEnv: NodeJS.ProcessEnv;
|
|
20
8
|
psqlArgs: string[];
|
|
@@ -25,24 +13,5 @@ export declare function psqlInteractiveOptions(prompt: string, dbEnv: NodeJS.Pro
|
|
|
25
13
|
psqlArgs: string[];
|
|
26
14
|
childProcessOptions: SpawnOptions;
|
|
27
15
|
};
|
|
28
|
-
export declare function
|
|
29
|
-
|
|
30
|
-
psqlArgs: string[];
|
|
31
|
-
childProcessOptions: SpawnOptions;
|
|
32
|
-
}): ChildProcess;
|
|
33
|
-
export declare function waitForPSQLExit(psql: EventEmitter): Promise<void>;
|
|
34
|
-
export declare const trapAndForwardSignalsToChildProcess: (childProcess: ChildProcess) => () => void;
|
|
35
|
-
export declare function consumeStream(inputStream: Stream): Promise<unknown>;
|
|
36
|
-
export declare function runWithTunnel(db: ConnectionDetails, tunnelConfig: TunnelConfig, options: Parameters<typeof execPSQL>[0]): Promise<string>;
|
|
37
|
-
export declare class Tunnel {
|
|
38
|
-
private readonly bastionTunnel;
|
|
39
|
-
private readonly events;
|
|
40
|
-
constructor(bastionTunnel: Server);
|
|
41
|
-
waitForClose(): Promise<void>;
|
|
42
|
-
close(): void;
|
|
43
|
-
static connect(db: ConnectionDetails, tunnelConfig: TunnelConfig): Promise<Tunnel>;
|
|
44
|
-
}
|
|
45
|
-
export declare function fetchVersion(db: Parameters<typeof exec>[0]): Promise<string | undefined>;
|
|
46
|
-
export declare function exec(db: ConnectionDetails, query: string, cmdArgs?: string[]): Promise<string>;
|
|
47
|
-
export declare function execFile(db: Parameters<typeof getConfigs>[0], file: string): Promise<string>;
|
|
48
|
-
export declare function interactive(db: ReturnType<typeof getConnectionDetails>): Promise<string>;
|
|
16
|
+
export declare function execFile(db: ConnectionDetailsWithAttachment, file: string): Promise<string>;
|
|
17
|
+
export declare function interactive(db: ConnectionDetailsWithAttachment): Promise<string>;
|
package/lib/lib/pg/psql.js
CHANGED
|
@@ -1,29 +1,19 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.interactive = exports.execFile = exports.
|
|
3
|
+
exports.interactive = exports.execFile = exports.psqlInteractiveOptions = exports.psqlFileOptions = exports.fetchVersion = void 0;
|
|
4
4
|
const core_1 = require("@oclif/core");
|
|
5
|
-
const child_process_1 = require("child_process");
|
|
6
5
|
const debug_1 = require("debug");
|
|
7
6
|
const fs = require("fs");
|
|
8
|
-
const node_events_1 = require("node:events");
|
|
9
7
|
const path = require("node:path");
|
|
10
|
-
const
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
const
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
const psqlArgs = ['-c', query, '--set', 'sslmode=require', ...cmdArgs];
|
|
17
|
-
const childProcessOptions = {
|
|
18
|
-
stdio: ['ignore', 'pipe', 'inherit'],
|
|
19
|
-
};
|
|
20
|
-
return {
|
|
21
|
-
dbEnv,
|
|
22
|
-
psqlArgs,
|
|
23
|
-
childProcessOptions,
|
|
24
|
-
};
|
|
8
|
+
const heroku_cli_util_1 = require("@heroku/heroku-cli-util");
|
|
9
|
+
async function fetchVersion(db) {
|
|
10
|
+
var _a;
|
|
11
|
+
const psqlService = new heroku_cli_util_1.utils.pg.PsqlService(db);
|
|
12
|
+
const output = await psqlService.execQuery('SHOW server_version', ['-X', '-q']);
|
|
13
|
+
return (_a = output.match(/[0-9]{1,}\.[0-9]{1,}/)) === null || _a === void 0 ? void 0 : _a[0];
|
|
25
14
|
}
|
|
26
|
-
exports.
|
|
15
|
+
exports.fetchVersion = fetchVersion;
|
|
16
|
+
const pgDebug = (0, debug_1.default)('pg');
|
|
27
17
|
function psqlFileOptions(file, dbEnv) {
|
|
28
18
|
pgDebug('Running sql file: %s', file.trim());
|
|
29
19
|
const childProcessOptions = {
|
|
@@ -65,188 +55,20 @@ function psqlInteractiveOptions(prompt, dbEnv) {
|
|
|
65
55
|
};
|
|
66
56
|
}
|
|
67
57
|
exports.psqlInteractiveOptions = psqlInteractiveOptions;
|
|
68
|
-
function execPSQL({ dbEnv, psqlArgs, childProcessOptions }) {
|
|
69
|
-
const options = Object.assign({ env: dbEnv }, childProcessOptions);
|
|
70
|
-
pgDebug('opening psql process');
|
|
71
|
-
const psql = (0, child_process_1.spawn)('psql', psqlArgs, options);
|
|
72
|
-
psql.once('spawn', () => pgDebug('psql process spawned'));
|
|
73
|
-
return psql;
|
|
74
|
-
}
|
|
75
|
-
exports.execPSQL = execPSQL;
|
|
76
|
-
async function waitForPSQLExit(psql) {
|
|
77
|
-
let errorToThrow = null;
|
|
78
|
-
try {
|
|
79
|
-
const [exitCode] = await (0, node_events_1.once)(psql, 'close');
|
|
80
|
-
pgDebug(`psql exited with code ${exitCode}`);
|
|
81
|
-
if (exitCode > 0) {
|
|
82
|
-
errorToThrow = new Error(`psql exited with code ${exitCode}`);
|
|
83
|
-
}
|
|
84
|
-
}
|
|
85
|
-
catch (error) {
|
|
86
|
-
pgDebug('psql process error', error);
|
|
87
|
-
const { code } = error;
|
|
88
|
-
if (code === 'ENOENT') {
|
|
89
|
-
errorToThrow = new Error('The local psql command could not be located. For help installing psql, see https://devcenter.heroku.com/articles/heroku-postgresql#local-setup');
|
|
90
|
-
}
|
|
91
|
-
}
|
|
92
|
-
if (errorToThrow) {
|
|
93
|
-
throw errorToThrow;
|
|
94
|
-
}
|
|
95
|
-
}
|
|
96
|
-
exports.waitForPSQLExit = waitForPSQLExit;
|
|
97
|
-
// According to node.js docs, sending a kill to a process won't cause an error
|
|
98
|
-
// but could have unintended consequences if the PID gets reassigned:
|
|
99
|
-
// https://nodejs.org/docs/latest-v14.x/api/child_process.html#child_process_subprocess_kill_signal
|
|
100
|
-
// To be on the safe side, check if the process was already killed before sending the signal
|
|
101
|
-
function kill(childProcess, signal) {
|
|
102
|
-
if (!childProcess.killed) {
|
|
103
|
-
pgDebug('killing psql child process');
|
|
104
|
-
childProcess.kill(signal);
|
|
105
|
-
}
|
|
106
|
-
}
|
|
107
|
-
// trap SIGINT so that ctrl+c can be used by psql without killing the
|
|
108
|
-
// parent node process.
|
|
109
|
-
// you can use ctrl+c in psql to kill running queries
|
|
110
|
-
// while keeping the psql process open.
|
|
111
|
-
// This code is to stop the parent node process (heroku CLI)
|
|
112
|
-
// from exiting. If the parent Heroku CLI node process exits, then psql will exit as it
|
|
113
|
-
// is a child process of the Heroku CLI node process.
|
|
114
|
-
const trapAndForwardSignalsToChildProcess = (childProcess) => {
|
|
115
|
-
const signalsToTrap = ['SIGINT'];
|
|
116
|
-
const signalTraps = signalsToTrap.map(signal => {
|
|
117
|
-
process.removeAllListeners(signal);
|
|
118
|
-
const listener = () => kill(childProcess, signal);
|
|
119
|
-
process.on(signal, listener);
|
|
120
|
-
return [signal, listener];
|
|
121
|
-
});
|
|
122
|
-
// restores the built-in node ctrl+c and other handlers
|
|
123
|
-
return () => {
|
|
124
|
-
signalTraps.forEach(([signal, listener]) => {
|
|
125
|
-
process.removeListener(signal, listener);
|
|
126
|
-
});
|
|
127
|
-
};
|
|
128
|
-
};
|
|
129
|
-
exports.trapAndForwardSignalsToChildProcess = trapAndForwardSignalsToChildProcess;
|
|
130
|
-
function consumeStream(inputStream) {
|
|
131
|
-
let result = '';
|
|
132
|
-
const throughStream = new node_stream_1.Stream.PassThrough();
|
|
133
|
-
// eslint-disable-next-line no-async-promise-executor
|
|
134
|
-
const promise = new Promise(async (resolve, reject) => {
|
|
135
|
-
try {
|
|
136
|
-
await (0, promises_1.finished)(throughStream);
|
|
137
|
-
resolve(result);
|
|
138
|
-
}
|
|
139
|
-
catch (error) {
|
|
140
|
-
reject(error);
|
|
141
|
-
}
|
|
142
|
-
});
|
|
143
|
-
// eslint-disable-next-line no-return-assign
|
|
144
|
-
throughStream.on('data', chunk => result += chunk.toString());
|
|
145
|
-
inputStream.pipe(throughStream);
|
|
146
|
-
return promise;
|
|
147
|
-
}
|
|
148
|
-
exports.consumeStream = consumeStream;
|
|
149
|
-
async function runWithTunnel(db, tunnelConfig, options) {
|
|
150
|
-
const tunnel = await Tunnel.connect(db, tunnelConfig);
|
|
151
|
-
pgDebug('after create tunnel');
|
|
152
|
-
const psql = execPSQL(options);
|
|
153
|
-
// interactive opens with stdio: 'inherit'
|
|
154
|
-
// which gives the child process the same stdin,stdout,stderr of the node process (global `process`)
|
|
155
|
-
// https://nodejs.org/api/child_process.html#child_process_options_stdio
|
|
156
|
-
// psql.stdout will be null in this case
|
|
157
|
-
// return a string for consistency but ideally we should return the child process from this function
|
|
158
|
-
// and let the caller decide what to do with stdin/stdout/stderr
|
|
159
|
-
const stdoutPromise = psql.stdout ? consumeStream(psql.stdout) : Promise.resolve('');
|
|
160
|
-
const cleanupSignalTraps = (0, exports.trapAndForwardSignalsToChildProcess)(psql);
|
|
161
|
-
try {
|
|
162
|
-
pgDebug('waiting for psql or tunnel to exit');
|
|
163
|
-
// wait for either psql or tunnel to exit;
|
|
164
|
-
// the important bit is that we ensure both processes are
|
|
165
|
-
// always cleaned up in the `finally` block below
|
|
166
|
-
await Promise.race([
|
|
167
|
-
waitForPSQLExit(psql),
|
|
168
|
-
tunnel.waitForClose(),
|
|
169
|
-
]);
|
|
170
|
-
}
|
|
171
|
-
catch (error) {
|
|
172
|
-
pgDebug('wait for psql or tunnel error', error);
|
|
173
|
-
throw error;
|
|
174
|
-
}
|
|
175
|
-
finally {
|
|
176
|
-
pgDebug('begin tunnel cleanup');
|
|
177
|
-
cleanupSignalTraps();
|
|
178
|
-
tunnel.close();
|
|
179
|
-
kill(psql, 'SIGKILL');
|
|
180
|
-
pgDebug('end tunnel cleanup');
|
|
181
|
-
}
|
|
182
|
-
return stdoutPromise;
|
|
183
|
-
}
|
|
184
|
-
exports.runWithTunnel = runWithTunnel;
|
|
185
|
-
// a small wrapper around tunnel-ssh
|
|
186
|
-
// so that other code doesn't have to worry about
|
|
187
|
-
// whether there is or is not a tunnel
|
|
188
|
-
class Tunnel {
|
|
189
|
-
constructor(bastionTunnel) {
|
|
190
|
-
this.bastionTunnel = bastionTunnel;
|
|
191
|
-
this.events = new node_events_1.EventEmitter();
|
|
192
|
-
}
|
|
193
|
-
async waitForClose() {
|
|
194
|
-
if (this.bastionTunnel) {
|
|
195
|
-
try {
|
|
196
|
-
pgDebug('wait for tunnel close');
|
|
197
|
-
await (0, node_events_1.once)(this.bastionTunnel, 'close');
|
|
198
|
-
pgDebug('tunnel closed');
|
|
199
|
-
}
|
|
200
|
-
catch (error) {
|
|
201
|
-
pgDebug('tunnel close error', error);
|
|
202
|
-
throw new Error('Secure tunnel to your database failed');
|
|
203
|
-
}
|
|
204
|
-
}
|
|
205
|
-
else {
|
|
206
|
-
pgDebug('no bastion required; waiting for fake close event');
|
|
207
|
-
await (0, node_events_1.once)(this.events, 'close');
|
|
208
|
-
}
|
|
209
|
-
}
|
|
210
|
-
close() {
|
|
211
|
-
if (this.bastionTunnel) {
|
|
212
|
-
pgDebug('close tunnel');
|
|
213
|
-
this.bastionTunnel.close();
|
|
214
|
-
}
|
|
215
|
-
else {
|
|
216
|
-
pgDebug('no tunnel necessary; sending fake close event');
|
|
217
|
-
this.events.emit('close', 0);
|
|
218
|
-
}
|
|
219
|
-
}
|
|
220
|
-
static async connect(db, tunnelConfig) {
|
|
221
|
-
const tunnel = await (0, bastion_1.sshTunnel)(db, tunnelConfig);
|
|
222
|
-
return new Tunnel(tunnel);
|
|
223
|
-
}
|
|
224
|
-
}
|
|
225
|
-
exports.Tunnel = Tunnel;
|
|
226
|
-
async function fetchVersion(db) {
|
|
227
|
-
var _a;
|
|
228
|
-
const output = await exec(db, 'SHOW server_version', ['-X', '-q']);
|
|
229
|
-
return (_a = output.match(/[0-9]{1,}\.[0-9]{1,}/)) === null || _a === void 0 ? void 0 : _a[0];
|
|
230
|
-
}
|
|
231
|
-
exports.fetchVersion = fetchVersion;
|
|
232
|
-
async function exec(db, query, cmdArgs = []) {
|
|
233
|
-
const configs = (0, bastion_1.getConfigs)(db);
|
|
234
|
-
const options = psqlQueryOptions(query, configs.dbEnv, cmdArgs);
|
|
235
|
-
return runWithTunnel(db, configs.dbTunnelConfig, options);
|
|
236
|
-
}
|
|
237
|
-
exports.exec = exec;
|
|
238
58
|
async function execFile(db, file) {
|
|
239
|
-
const
|
|
59
|
+
const psqlService = new heroku_cli_util_1.utils.pg.PsqlService(db);
|
|
60
|
+
const configs = heroku_cli_util_1.utils.pg.psql.getPsqlConfigs(db);
|
|
240
61
|
const options = psqlFileOptions(file, configs.dbEnv);
|
|
241
|
-
return runWithTunnel(
|
|
62
|
+
return psqlService.runWithTunnel(configs.dbTunnelConfig, options);
|
|
242
63
|
}
|
|
243
64
|
exports.execFile = execFile;
|
|
244
65
|
async function interactive(db) {
|
|
245
|
-
const
|
|
246
|
-
const
|
|
247
|
-
const
|
|
66
|
+
const psqlService = new heroku_cli_util_1.utils.pg.PsqlService(db);
|
|
67
|
+
const attachmentName = db.attachment.name;
|
|
68
|
+
const prompt = `${db.attachment.app.name}::${attachmentName}%R%# `;
|
|
69
|
+
const configs = heroku_cli_util_1.utils.pg.psql.getPsqlConfigs(db);
|
|
248
70
|
configs.dbEnv.PGAPPNAME = 'psql interactive'; // default was 'psql non-interactive`
|
|
249
71
|
const options = psqlInteractiveOptions(prompt, configs.dbEnv);
|
|
250
|
-
return runWithTunnel(
|
|
72
|
+
return psqlService.runWithTunnel(configs.dbTunnelConfig, options);
|
|
251
73
|
}
|
|
252
74
|
exports.interactive = interactive;
|
|
@@ -1,13 +1,8 @@
|
|
|
1
|
-
|
|
2
|
-
import { ConnectionDetails } from './util';
|
|
3
|
-
import { Server } from 'net';
|
|
1
|
+
import { ConnectionDetails } from '@heroku/heroku-cli-util';
|
|
4
2
|
import { ChildProcess } from 'node:child_process';
|
|
5
3
|
export declare const parseExclusions: (rawExcludeList: string | undefined) => string[];
|
|
6
4
|
export declare const prepare: (target: ConnectionDetails) => Promise<void>;
|
|
7
|
-
export declare
|
|
8
|
-
_tunnel?: Server;
|
|
9
|
-
};
|
|
10
|
-
export declare const maybeTunnel: (herokuDb: ConnectionDetails) => Promise<ConnectionDetailsWithOptionalTunnel>;
|
|
5
|
+
export declare const maybeTunnel: (herokuDb: ConnectionDetails) => Promise<ConnectionDetails>;
|
|
11
6
|
export declare const connArgs: (uri: ConnectionDetails, skipDFlag?: boolean) => string[];
|
|
12
7
|
export declare const spawnPipe: (pgDump: ChildProcess, pgRestore: ChildProcess) => Promise<void>;
|
|
13
8
|
export declare const verifyExtensionsMatch: (source: ConnectionDetails, target: ConnectionDetails) => Promise<void>;
|
package/lib/lib/pg/push_pull.js
CHANGED
|
@@ -1,10 +1,9 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.verifyExtensionsMatch = exports.spawnPipe = exports.connArgs = exports.maybeTunnel = exports.prepare = exports.parseExclusions = void 0;
|
|
4
|
-
const
|
|
4
|
+
const heroku_cli_util_1 = require("@heroku/heroku-cli-util");
|
|
5
5
|
const core_1 = require("@oclif/core");
|
|
6
6
|
const color_1 = require("@heroku-cli/color");
|
|
7
|
-
const bastion_1 = require("./bastion");
|
|
8
7
|
const node_child_process_1 = require("node:child_process");
|
|
9
8
|
const tsheredoc_1 = require("tsheredoc");
|
|
10
9
|
const debug_1 = require("debug");
|
|
@@ -16,6 +15,7 @@ const parseExclusions = (rawExcludeList) => {
|
|
|
16
15
|
};
|
|
17
16
|
exports.parseExclusions = parseExclusions;
|
|
18
17
|
const prepare = async (target) => {
|
|
18
|
+
const psqlService = new heroku_cli_util_1.utils.pg.PsqlService(target);
|
|
19
19
|
if (target.host === 'localhost' || !target.host) {
|
|
20
20
|
exec(`createdb ${(0, exports.connArgs)(target, true).join(' ')}`);
|
|
21
21
|
}
|
|
@@ -26,7 +26,7 @@ const prepare = async (target) => {
|
|
|
26
26
|
// of --echo-all is set.
|
|
27
27
|
const num = Math.random();
|
|
28
28
|
const emptyMarker = `${num}${num}`;
|
|
29
|
-
const result = await
|
|
29
|
+
const result = await psqlService.execQuery(`SELECT CASE count(*) WHEN 0 THEN '${num}' || '${num}' END FROM pg_stat_user_tables`);
|
|
30
30
|
if (!result.includes(emptyMarker))
|
|
31
31
|
core_1.ux.error(`Remote database is not empty. Please create a new database or use ${color_1.color.cmd('heroku pg:reset')}`);
|
|
32
32
|
}
|
|
@@ -35,8 +35,8 @@ exports.prepare = prepare;
|
|
|
35
35
|
const maybeTunnel = async (herokuDb) => {
|
|
36
36
|
var _a;
|
|
37
37
|
let withTunnel = Object.assign({}, herokuDb);
|
|
38
|
-
const configs =
|
|
39
|
-
const tunnel = await
|
|
38
|
+
const configs = heroku_cli_util_1.utils.pg.psql.getPsqlConfigs(herokuDb);
|
|
39
|
+
const tunnel = await heroku_cli_util_1.utils.pg.psql.sshTunnel(herokuDb, configs.dbTunnelConfig);
|
|
40
40
|
if (tunnel) {
|
|
41
41
|
const tunnelHost = {
|
|
42
42
|
host: configs.dbTunnelConfig.localHost,
|
|
@@ -86,6 +86,8 @@ const spawnPipe = async (pgDump, pgRestore) => {
|
|
|
86
86
|
};
|
|
87
87
|
exports.spawnPipe = spawnPipe;
|
|
88
88
|
const verifyExtensionsMatch = async function (source, target) {
|
|
89
|
+
const psqlSource = new heroku_cli_util_1.utils.pg.PsqlService(source);
|
|
90
|
+
const psqlTarget = new heroku_cli_util_1.utils.pg.PsqlService(target);
|
|
89
91
|
// It's pretty common for local DBs to not have extensions available that
|
|
90
92
|
// are used by the remote app, so take the final precaution of warning if
|
|
91
93
|
// the extensions available in the local database don't match. We don't
|
|
@@ -93,8 +95,8 @@ const verifyExtensionsMatch = async function (source, target) {
|
|
|
93
95
|
// used, though.
|
|
94
96
|
const sql = 'SELECT extname FROM pg_extension ORDER BY extname;';
|
|
95
97
|
const [extensionTarget, extensionSource] = await Promise.all([
|
|
96
|
-
|
|
97
|
-
|
|
98
|
+
psqlTarget.execQuery(sql),
|
|
99
|
+
psqlSource.execQuery(sql),
|
|
98
100
|
]);
|
|
99
101
|
const extensions = {
|
|
100
102
|
target: extensionTarget,
|
package/lib/lib/pg/setter.js
CHANGED
|
@@ -4,7 +4,7 @@ exports.numericConverter = exports.booleanConverter = exports.PGSettingsCommand
|
|
|
4
4
|
const command_1 = require("@heroku-cli/command");
|
|
5
5
|
const core_1 = require("@oclif/core");
|
|
6
6
|
const resolve_1 = require("../addons/resolve");
|
|
7
|
-
const
|
|
7
|
+
const heroku_cli_util_1 = require("@heroku/heroku-cli-util");
|
|
8
8
|
const util_1 = require("./util");
|
|
9
9
|
class PGSettingsCommand extends command_1.Command {
|
|
10
10
|
async run() {
|
|
@@ -16,7 +16,7 @@ class PGSettingsCommand extends command_1.Command {
|
|
|
16
16
|
core_1.ux.error('You can’t perform this operation on Essential-tier databases.');
|
|
17
17
|
if (value) {
|
|
18
18
|
const { body: settings } = await this.heroku.patch(`/postgres/v0/databases/${db.id}/config`, {
|
|
19
|
-
hostname:
|
|
19
|
+
hostname: heroku_cli_util_1.utils.pg.host(),
|
|
20
20
|
body: { [this.settingKey]: this.convertValue(value) },
|
|
21
21
|
});
|
|
22
22
|
const setting = settings[this.settingKey];
|
|
@@ -24,7 +24,7 @@ class PGSettingsCommand extends command_1.Command {
|
|
|
24
24
|
core_1.ux.log(this.explain(setting));
|
|
25
25
|
}
|
|
26
26
|
else {
|
|
27
|
-
const { body: settings } = await this.heroku.get(`/postgres/v0/databases/${db.id}/config`, { hostname:
|
|
27
|
+
const { body: settings } = await this.heroku.get(`/postgres/v0/databases/${db.id}/config`, { hostname: heroku_cli_util_1.utils.pg.host() });
|
|
28
28
|
const setting = settings[this.settingKey];
|
|
29
29
|
core_1.ux.log(`${this.settingKey.replace(/_/g, '-')} is set to ${setting.value} for ${db.name}.`);
|
|
30
30
|
core_1.ux.log(this.explain(setting));
|
package/lib/lib/pg/types.d.ts
CHANGED
|
@@ -42,11 +42,10 @@ export declare type BackupTransfer = {
|
|
|
42
42
|
message: string;
|
|
43
43
|
}>;
|
|
44
44
|
};
|
|
45
|
-
export declare type
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
};
|
|
45
|
+
export declare type ExtendedAddon = {
|
|
46
|
+
addon_service: Required<Heroku.AddOnService>;
|
|
47
|
+
plan: Required<Heroku.Plan>;
|
|
48
|
+
} & Required<Heroku.AddOn>;
|
|
50
49
|
declare type ServiceInfo = 'Status' | 'Fork/Follow' | 'Rollback' | 'Created' | 'Region' | 'Data Encryption' | 'Continuous Protection' | 'Enhanced Certificates' | 'Upgradable Extensions' | 'Plan' | 'HA Status' | 'Behind By' | 'Data Size' | 'Tables' | 'PG Version' | 'Connections' | 'Connection Pooling' | 'Credentials' | 'Restricted Credentials' | 'Mutual TLS' | 'Customer Encryption Key' | 'Following' | 'Forked From' | 'Followers' | 'Forks' | 'Maintenance' | 'Maintenance window' | 'Infrastructure' | 'Warning';
|
|
51
50
|
export declare type PgDatabaseService = {
|
|
52
51
|
addon_id: string;
|
|
@@ -129,19 +128,15 @@ export declare type PgUpgradeError = {
|
|
|
129
128
|
message: string;
|
|
130
129
|
};
|
|
131
130
|
};
|
|
132
|
-
export declare type AddOnWithPlan = Required<Heroku.AddOnAttachment['addon']> & {
|
|
133
|
-
plan: Required<Heroku.AddOn['plan']>;
|
|
134
|
-
};
|
|
135
|
-
export declare type AddOnAttachmentWithConfigVarsAndPlan = Required<Heroku.AddOnAttachment> & {
|
|
136
|
-
config_vars: Heroku.AddOn['config_vars'];
|
|
137
|
-
addon: AddOnWithRelatedData;
|
|
138
|
-
};
|
|
139
131
|
export declare type Link = {
|
|
140
|
-
|
|
132
|
+
id: string;
|
|
141
133
|
created_at: string;
|
|
142
|
-
message: string;
|
|
143
134
|
name: string;
|
|
144
|
-
|
|
135
|
+
remote_name: string;
|
|
136
|
+
remote: {
|
|
137
|
+
name: string;
|
|
138
|
+
attachment_name: string;
|
|
139
|
+
};
|
|
145
140
|
};
|
|
146
141
|
declare type CredentialState = 'enabling' | 'active' | 'revoking' | 'revoked' | 'archived';
|
|
147
142
|
export declare type Credential = {
|
package/lib/lib/pg/util.d.ts
CHANGED
|
@@ -1,37 +1,11 @@
|
|
|
1
|
-
/// <reference types="node" />
|
|
2
1
|
import type { AddOnAttachment } from '@heroku-cli/schema';
|
|
3
|
-
import type {
|
|
4
|
-
import {
|
|
5
|
-
|
|
6
|
-
export declare const essentialNumPlan: (addon:
|
|
7
|
-
export declare const legacyEssentialPlan: (addon:
|
|
8
|
-
export declare function essentialPlan(addon:
|
|
9
|
-
export declare function getConfigVarNameFromAttachment(attachment: Required<AddOnAttachment & {
|
|
10
|
-
addon: AddOnAttachmentWithConfigVarsAndPlan;
|
|
11
|
-
}>, config?: Record<string, string>): string;
|
|
2
|
+
import type { ExtendedAddonAttachment } from '@heroku/heroku-cli-util';
|
|
3
|
+
import type { CredentialsInfo } from './types';
|
|
4
|
+
import type { ExtendedAddon } from './types';
|
|
5
|
+
export declare const essentialNumPlan: (addon: ExtendedAddonAttachment['addon'] | ExtendedAddon) => boolean;
|
|
6
|
+
export declare const legacyEssentialPlan: (addon: ExtendedAddonAttachment['addon'] | ExtendedAddon) => boolean;
|
|
7
|
+
export declare function essentialPlan(addon: ExtendedAddonAttachment['addon'] | ExtendedAddon): boolean;
|
|
12
8
|
export declare function formatResponseWithCommands(response: string): string;
|
|
13
9
|
export declare function presentCredentialAttachments(app: string, credAttachments: Required<AddOnAttachment>[], credentials: CredentialsInfo, cred: string): string;
|
|
14
|
-
export declare type ConnectionDetails = {
|
|
15
|
-
user: string;
|
|
16
|
-
password: string;
|
|
17
|
-
database: string;
|
|
18
|
-
host: string;
|
|
19
|
-
port: string;
|
|
20
|
-
pathname: string;
|
|
21
|
-
url: string;
|
|
22
|
-
bastionKey?: string;
|
|
23
|
-
bastionHost?: string;
|
|
24
|
-
_tunnel?: Server;
|
|
25
|
-
};
|
|
26
|
-
export declare type ConnectionDetailsWithAttachment = ConnectionDetails & {
|
|
27
|
-
attachment: Required<AddOnAttachment & {
|
|
28
|
-
addon: AddOnAttachmentWithConfigVarsAndPlan;
|
|
29
|
-
}>;
|
|
30
|
-
};
|
|
31
|
-
export declare const getConnectionDetails: (attachment: Required<AddOnAttachment & {
|
|
32
|
-
addon: AddOnAttachmentWithConfigVarsAndPlan;
|
|
33
|
-
}>, configVars?: Record<string, string>) => ConnectionDetailsWithAttachment;
|
|
34
|
-
export declare const bastionKeyPlan: (a: AddOnAttachmentWithConfigVarsAndPlan) => boolean;
|
|
35
10
|
export declare const configVarNamesFromValue: (config: Record<string, string>, value: string) => string[];
|
|
36
11
|
export declare const databaseNameFromUrl: (uri: string, config: Record<string, string>) => any;
|
|
37
|
-
export declare const parsePostgresConnectionString: (db: string) => ConnectionDetails;
|