@heroku/heroku-cli-util 8.0.15 → 9.0.0-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +15 -3
- package/README.md +44 -241
- package/dist/test-helpers/expect-output.d.ts +2 -0
- package/dist/test-helpers/expect-output.js +16 -0
- package/dist/test-helpers/init.d.ts +1 -0
- package/dist/test-helpers/init.js +18 -0
- package/dist/test-helpers/stub-output.d.ts +2 -0
- package/dist/test-helpers/stub-output.js +33 -0
- package/dist/types/errors/ambiguous.d.ts +15 -0
- package/dist/types/errors/ambiguous.js +14 -0
- package/dist/types/errors/not-found.d.ts +5 -0
- package/dist/types/errors/not-found.js +12 -0
- package/dist/types/pg/data-api.d.ts +17 -0
- package/dist/types/pg/data-api.js +2 -0
- package/dist/types/pg/tunnel.d.ts +22 -0
- package/dist/types/pg/tunnel.js +2 -0
- package/dist/utils/addons/resolve.d.ts +9 -0
- package/dist/utils/addons/resolve.js +39 -0
- package/dist/utils/pg/bastion.d.ts +30 -0
- package/dist/utils/pg/bastion.js +122 -0
- package/dist/utils/pg/config-vars.d.ts +8 -0
- package/dist/utils/pg/config-vars.js +34 -0
- package/dist/utils/pg/databases.d.ts +12 -0
- package/dist/utils/pg/databases.js +137 -0
- package/dist/utils/pg/host.d.ts +1 -0
- package/dist/utils/pg/host.js +7 -0
- package/dist/utils/pg/psql.d.ts +28 -0
- package/dist/utils/pg/psql.js +188 -0
- package/dist/ux/confirm.d.ts +1 -0
- package/dist/ux/confirm.js +7 -0
- package/dist/ux/prompt.d.ts +2 -0
- package/dist/ux/prompt.js +7 -0
- package/dist/ux/styled-header.d.ts +1 -0
- package/dist/ux/styled-header.js +7 -0
- package/dist/ux/styled-json.d.ts +1 -0
- package/dist/ux/styled-json.js +7 -0
- package/dist/ux/styled-object.d.ts +1 -0
- package/dist/ux/styled-object.js +7 -0
- package/dist/ux/table.d.ts +2 -0
- package/dist/ux/table.js +7 -0
- package/dist/ux/wait.d.ts +1 -0
- package/dist/ux/wait.js +7 -0
- package/package.json +54 -55
- package/index.js +0 -40
- package/lib/action.js +0 -54
- package/lib/auth.js +0 -207
- package/lib/command.js +0 -171
- package/lib/console.js +0 -105
- package/lib/date.js +0 -18
- package/lib/errors.js +0 -122
- package/lib/exit.js +0 -42
- package/lib/got.js +0 -153
- package/lib/linewrap.js +0 -783
- package/lib/mutex.js +0 -41
- package/lib/open.js +0 -22
- package/lib/preauth.js +0 -26
- package/lib/process.js +0 -14
- package/lib/prompt.js +0 -150
- package/lib/spinner.js +0 -147
- package/lib/spinners.json +0 -739
- package/lib/styled.js +0 -131
- package/lib/table.js +0 -132
- package/lib/util.js +0 -38
- package/lib/vars.js +0 -29
- package/lib/yubikey.js +0 -14
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.getBastion = exports.env = exports.bastionKeyPlan = void 0;
|
|
4
|
+
exports.fetchConfig = fetchConfig;
|
|
5
|
+
exports.getConfigs = getConfigs;
|
|
6
|
+
exports.sshTunnel = sshTunnel;
|
|
7
|
+
exports.tunnelConfig = tunnelConfig;
|
|
8
|
+
const core_1 = require("@oclif/core");
|
|
9
|
+
const debug_1 = require("debug");
|
|
10
|
+
const EventEmitter = require("node:events");
|
|
11
|
+
const node_util_1 = require("node:util");
|
|
12
|
+
const createTunnel = require("tunnel-ssh");
|
|
13
|
+
const host_1 = require("./host");
|
|
14
|
+
const pgDebug = (0, debug_1.default)('pg');
|
|
15
|
+
const bastionKeyPlan = (a) => Boolean(/private/.test(a.addon.plan.name));
|
|
16
|
+
exports.bastionKeyPlan = bastionKeyPlan;
|
|
17
|
+
const env = (db) => {
|
|
18
|
+
const baseEnv = Object.assign({ PGAPPNAME: 'psql non-interactive', PGSSLMODE: (!db.host || db.host === 'localhost') ? 'prefer' : 'require' }, process.env);
|
|
19
|
+
const mapping = {
|
|
20
|
+
PGDATABASE: 'database',
|
|
21
|
+
PGHOST: 'host',
|
|
22
|
+
PGPASSWORD: 'password',
|
|
23
|
+
PGPORT: 'port',
|
|
24
|
+
PGUSER: 'user',
|
|
25
|
+
};
|
|
26
|
+
for (const envVar of Object.keys(mapping)) {
|
|
27
|
+
const val = db[mapping[envVar]];
|
|
28
|
+
if (val) {
|
|
29
|
+
baseEnv[envVar] = val;
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
return baseEnv;
|
|
33
|
+
};
|
|
34
|
+
exports.env = env;
|
|
35
|
+
async function fetchConfig(heroku, db) {
|
|
36
|
+
return heroku.get(`/client/v11/databases/${encodeURIComponent(db.id)}/bastion`, {
|
|
37
|
+
hostname: (0, host_1.default)(),
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
const getBastion = function (config, baseName) {
|
|
41
|
+
// If there are bastions, extract a host and a key
|
|
42
|
+
// otherwise, return an empty Object
|
|
43
|
+
// If there are bastions:
|
|
44
|
+
// * there should be one *_BASTION_KEY
|
|
45
|
+
// * pick one host from the comma-separated list in *_BASTIONS
|
|
46
|
+
// We assert that _BASTIONS and _BASTION_KEY always exist together
|
|
47
|
+
// If either is falsy, pretend neither exist
|
|
48
|
+
const bastionKey = config[`${baseName}_BASTION_KEY`];
|
|
49
|
+
const bastions = (config[`${baseName}_BASTIONS`] || '').split(',');
|
|
50
|
+
const bastionHost = bastions[Math.floor(Math.random() * bastions.length)];
|
|
51
|
+
return (bastionKey && bastionHost) ? { bastionHost, bastionKey } : {};
|
|
52
|
+
};
|
|
53
|
+
exports.getBastion = getBastion;
|
|
54
|
+
function getConfigs(db) {
|
|
55
|
+
const dbEnv = (0, exports.env)(db);
|
|
56
|
+
const dbTunnelConfig = tunnelConfig(db);
|
|
57
|
+
if (db.bastionKey) {
|
|
58
|
+
Object.assign(dbEnv, {
|
|
59
|
+
PGHOST: dbTunnelConfig.localHost,
|
|
60
|
+
PGPORT: dbTunnelConfig.localPort,
|
|
61
|
+
});
|
|
62
|
+
}
|
|
63
|
+
return {
|
|
64
|
+
dbEnv,
|
|
65
|
+
dbTunnelConfig,
|
|
66
|
+
};
|
|
67
|
+
}
|
|
68
|
+
async function sshTunnel(db, dbTunnelConfig, timeout = 10000) {
|
|
69
|
+
if (!db.bastionKey) {
|
|
70
|
+
return null;
|
|
71
|
+
}
|
|
72
|
+
const timeoutInstance = new Timeout(timeout, 'Establishing a secure tunnel timed out');
|
|
73
|
+
const createSSHTunnel = (0, node_util_1.promisify)(createTunnel);
|
|
74
|
+
try {
|
|
75
|
+
return await Promise.race([
|
|
76
|
+
timeoutInstance.promise(),
|
|
77
|
+
createSSHTunnel(dbTunnelConfig),
|
|
78
|
+
]);
|
|
79
|
+
}
|
|
80
|
+
catch (error) {
|
|
81
|
+
pgDebug(error);
|
|
82
|
+
core_1.ux.error('Unable to establish a secure tunnel to your database.');
|
|
83
|
+
}
|
|
84
|
+
finally {
|
|
85
|
+
timeoutInstance.cancel();
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
function tunnelConfig(db) {
|
|
89
|
+
const localHost = '127.0.0.1';
|
|
90
|
+
const localPort = Math.floor((Math.random() * (65535 - 49152)) + 49152);
|
|
91
|
+
return {
|
|
92
|
+
dstHost: db.host || undefined,
|
|
93
|
+
dstPort: (db.port && Number.parseInt(db.port, 10)) || undefined,
|
|
94
|
+
host: db.bastionHost,
|
|
95
|
+
localHost,
|
|
96
|
+
localPort,
|
|
97
|
+
privateKey: db.bastionKey,
|
|
98
|
+
username: 'bastion',
|
|
99
|
+
};
|
|
100
|
+
}
|
|
101
|
+
class Timeout {
|
|
102
|
+
constructor(timeout, message) {
|
|
103
|
+
// eslint-disable-next-line unicorn/prefer-event-target
|
|
104
|
+
this.events = new EventEmitter();
|
|
105
|
+
this.timeout = timeout;
|
|
106
|
+
this.message = message;
|
|
107
|
+
}
|
|
108
|
+
cancel() {
|
|
109
|
+
this.events.emit('cancelled');
|
|
110
|
+
}
|
|
111
|
+
async promise() {
|
|
112
|
+
this.timer = setTimeout(() => {
|
|
113
|
+
this.events.emit('error', new Error(this.message));
|
|
114
|
+
}, this.timeout);
|
|
115
|
+
try {
|
|
116
|
+
await EventEmitter.once(this.events, 'cancelled');
|
|
117
|
+
}
|
|
118
|
+
finally {
|
|
119
|
+
clearTimeout(this.timer);
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import type { APIClient } from '@heroku-cli/command';
|
|
2
|
+
import type { AddOnAttachment } from '@heroku-cli/schema';
|
|
3
|
+
import type { AddOnAttachmentWithConfigVarsAndPlan } from '../../types/pg/data-api';
|
|
4
|
+
export declare function getConfig(heroku: APIClient, app: string): Promise<Record<string, string> | undefined>;
|
|
5
|
+
export declare function getConfigVarName(configVars: string[]): string;
|
|
6
|
+
export declare function getConfigVarNameFromAttachment(attachment: Required<{
|
|
7
|
+
addon: AddOnAttachmentWithConfigVarsAndPlan;
|
|
8
|
+
} & AddOnAttachment>, config?: Record<string, string>): string;
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.getConfig = getConfig;
|
|
4
|
+
exports.getConfigVarName = getConfigVarName;
|
|
5
|
+
exports.getConfigVarNameFromAttachment = getConfigVarNameFromAttachment;
|
|
6
|
+
const color_1 = require("@heroku-cli/color");
|
|
7
|
+
const core_1 = require("@oclif/core");
|
|
8
|
+
const responseByAppId = new Map();
|
|
9
|
+
async function getConfig(heroku, app) {
|
|
10
|
+
if (!responseByAppId.has(app)) {
|
|
11
|
+
const promise = heroku.get(`/apps/${app}/config-vars`);
|
|
12
|
+
responseByAppId.set(app, promise);
|
|
13
|
+
}
|
|
14
|
+
const result = await responseByAppId.get(app);
|
|
15
|
+
return result === null || result === void 0 ? void 0 : result.body;
|
|
16
|
+
}
|
|
17
|
+
function getConfigVarName(configVars) {
|
|
18
|
+
const connStringVars = configVars.filter(cv => (cv.endsWith('_URL')));
|
|
19
|
+
if (connStringVars.length === 0)
|
|
20
|
+
throw new Error('Database URL not found for this addon');
|
|
21
|
+
return connStringVars[0];
|
|
22
|
+
}
|
|
23
|
+
function getConfigVarNameFromAttachment(attachment, config = {}) {
|
|
24
|
+
var _a, _b;
|
|
25
|
+
const configVars = (_b = (_a = attachment.addon.config_vars) === null || _a === void 0 ? void 0 : _a.filter((cv) => { var _a; return (_a = config[cv]) === null || _a === void 0 ? void 0 : _a.startsWith('postgres://'); })) !== null && _b !== void 0 ? _b : [];
|
|
26
|
+
if (configVars.length === 0) {
|
|
27
|
+
core_1.ux.error(`No config vars found for ${attachment.name}; perhaps they were removed as a side effect of ${color_1.default.cmd('heroku rollback')}? Use ${color_1.default.cmd('heroku addons:attach')} to create a new attachment and then ${color_1.default.cmd('heroku addons:detach')} to remove the current attachment.`);
|
|
28
|
+
}
|
|
29
|
+
const configVarName = `${attachment.name}_URL`;
|
|
30
|
+
if (configVars.includes(configVarName) && configVarName in config) {
|
|
31
|
+
return configVarName;
|
|
32
|
+
}
|
|
33
|
+
return getConfigVarName(configVars);
|
|
34
|
+
}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import type { AddOnAttachment } from '@heroku-cli/schema';
|
|
2
|
+
import { APIClient } from '@heroku-cli/command';
|
|
3
|
+
import type { AddOnAttachmentWithConfigVarsAndPlan } from '../../types/pg/data-api';
|
|
4
|
+
import type { ConnectionDetails, ConnectionDetailsWithAttachment } from '../../types/pg/tunnel';
|
|
5
|
+
export declare function getAttachment(heroku: APIClient, app: string, db?: string, namespace?: string): Promise<Required<{
|
|
6
|
+
addon: AddOnAttachmentWithConfigVarsAndPlan;
|
|
7
|
+
} & AddOnAttachment>>;
|
|
8
|
+
export declare const getConnectionDetails: (attachment: Required<{
|
|
9
|
+
addon: AddOnAttachmentWithConfigVarsAndPlan;
|
|
10
|
+
} & AddOnAttachment>, configVars?: Record<string, string>) => ConnectionDetailsWithAttachment;
|
|
11
|
+
export declare function getDatabase(heroku: APIClient, app: string, db?: string, namespace?: string): Promise<ConnectionDetailsWithAttachment>;
|
|
12
|
+
export declare const parsePostgresConnectionString: (db: string) => ConnectionDetails;
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.parsePostgresConnectionString = exports.getConnectionDetails = void 0;
|
|
4
|
+
exports.getAttachment = getAttachment;
|
|
5
|
+
exports.getDatabase = getDatabase;
|
|
6
|
+
const color_1 = require("@heroku-cli/color");
|
|
7
|
+
const api_client_1 = require("@heroku-cli/command/lib/api-client");
|
|
8
|
+
const debug_1 = require("debug");
|
|
9
|
+
const node_process_1 = require("node:process");
|
|
10
|
+
const ambiguous_1 = require("../../types/errors/ambiguous");
|
|
11
|
+
const resolve_1 = require("../addons/resolve");
|
|
12
|
+
const bastion_1 = require("./bastion");
|
|
13
|
+
const config_vars_1 = require("./config-vars");
|
|
14
|
+
const pgDebug = (0, debug_1.default)('pg');
|
|
15
|
+
async function allAttachments(heroku, appId) {
|
|
16
|
+
const { body: attachments } = await heroku.get(`/apps/${appId}/addon-attachments`, {
|
|
17
|
+
headers: { 'Accept-Inclusion': 'addon:plan,config_vars' },
|
|
18
|
+
});
|
|
19
|
+
return attachments.filter((a) => { var _a, _b; return (_b = (_a = a.addon.plan) === null || _a === void 0 ? void 0 : _a.name) === null || _b === void 0 ? void 0 : _b.startsWith('heroku-postgresql'); });
|
|
20
|
+
}
|
|
21
|
+
async function getAttachment(heroku, app, db = 'DATABASE_URL', namespace = '') {
|
|
22
|
+
var _a;
|
|
23
|
+
const matchesOrError = await matchesHelper(heroku, app, db, namespace);
|
|
24
|
+
let { matches } = matchesOrError;
|
|
25
|
+
const { error } = matchesOrError;
|
|
26
|
+
// happy path where the resolver matches just one
|
|
27
|
+
if (matches && matches.length === 1) {
|
|
28
|
+
return matches[0];
|
|
29
|
+
}
|
|
30
|
+
// case for 404 where there are implicit attachments
|
|
31
|
+
if (!matches) {
|
|
32
|
+
const appConfigMatch = /^(.+?)::(.+)/.exec(db);
|
|
33
|
+
if (appConfigMatch) {
|
|
34
|
+
app = appConfigMatch[1];
|
|
35
|
+
db = appConfigMatch[2];
|
|
36
|
+
}
|
|
37
|
+
if (!db.endsWith('_URL')) {
|
|
38
|
+
db += '_URL';
|
|
39
|
+
}
|
|
40
|
+
const [config = {}, attachments] = await Promise.all([
|
|
41
|
+
(0, config_vars_1.getConfig)(heroku, app),
|
|
42
|
+
allAttachments(heroku, app),
|
|
43
|
+
]);
|
|
44
|
+
if (attachments.length === 0) {
|
|
45
|
+
throw new Error(`${color_1.default.app(app)} has no databases`);
|
|
46
|
+
}
|
|
47
|
+
matches = attachments.filter(attachment => config[db] && config[db] === config[(0, config_vars_1.getConfigVarName)(attachment.config_vars)]);
|
|
48
|
+
if (matches.length === 0) {
|
|
49
|
+
const validOptions = attachments.map(attachment => (0, config_vars_1.getConfigVarName)(attachment.config_vars));
|
|
50
|
+
throw new Error(`Unknown database: ${db}. Valid options are: ${validOptions.join(', ')}`);
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
// case for multiple attachments with passedDb
|
|
54
|
+
const first = matches[0];
|
|
55
|
+
// case for 422 where there are ambiguous attachments that are equivalent
|
|
56
|
+
if (matches.every(match => { var _a, _b, _c, _d; return ((_a = first.addon) === null || _a === void 0 ? void 0 : _a.id) === ((_b = match.addon) === null || _b === void 0 ? void 0 : _b.id) && ((_c = first.app) === null || _c === void 0 ? void 0 : _c.id) === ((_d = match.app) === null || _d === void 0 ? void 0 : _d.id); })) {
|
|
57
|
+
const config = (_a = await (0, config_vars_1.getConfig)(heroku, first.app.name)) !== null && _a !== void 0 ? _a : {};
|
|
58
|
+
if (matches.every(match => config[(0, config_vars_1.getConfigVarName)(first.addon.config_vars)] === config[(0, config_vars_1.getConfigVarName)(match.config_vars)])) {
|
|
59
|
+
return first;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
throw error;
|
|
63
|
+
}
|
|
64
|
+
const getConnectionDetails = (attachment, configVars = {}) => {
|
|
65
|
+
const connStringVar = (0, config_vars_1.getConfigVarNameFromAttachment)(attachment, configVars);
|
|
66
|
+
// remove _URL from the end of the config var name
|
|
67
|
+
const baseName = connStringVar.slice(0, -4);
|
|
68
|
+
// build the default payload for non-bastion dbs
|
|
69
|
+
pgDebug(`Using "${connStringVar}" to connect to your database…`);
|
|
70
|
+
const conn = (0, exports.parsePostgresConnectionString)(configVars[connStringVar]);
|
|
71
|
+
const payload = {
|
|
72
|
+
attachment,
|
|
73
|
+
database: conn.database,
|
|
74
|
+
host: conn.host,
|
|
75
|
+
password: conn.password,
|
|
76
|
+
pathname: conn.pathname,
|
|
77
|
+
port: conn.port,
|
|
78
|
+
url: conn.url,
|
|
79
|
+
user: conn.user,
|
|
80
|
+
};
|
|
81
|
+
// If bastion creds exist, graft it into the payload
|
|
82
|
+
const bastion = (0, bastion_1.getBastion)(configVars, baseName);
|
|
83
|
+
if (bastion) {
|
|
84
|
+
Object.assign(payload, bastion);
|
|
85
|
+
}
|
|
86
|
+
return payload;
|
|
87
|
+
};
|
|
88
|
+
exports.getConnectionDetails = getConnectionDetails;
|
|
89
|
+
async function getDatabase(heroku, app, db, namespace) {
|
|
90
|
+
const attached = await getAttachment(heroku, app, db, namespace);
|
|
91
|
+
// would inline this as well but in some cases attachment pulls down config
|
|
92
|
+
// as well, and we would request twice at the same time but I did not want
|
|
93
|
+
// to push this down into attachment because we do not always need config
|
|
94
|
+
const config = await (0, config_vars_1.getConfig)(heroku, attached.app.name);
|
|
95
|
+
const database = (0, exports.getConnectionDetails)(attached, config);
|
|
96
|
+
if ((0, bastion_1.bastionKeyPlan)(attached.addon) && !database.bastionKey) {
|
|
97
|
+
const { body: bastionConfig } = await (0, bastion_1.fetchConfig)(heroku, attached.addon);
|
|
98
|
+
const bastionHost = bastionConfig.host;
|
|
99
|
+
const bastionKey = bastionConfig.private_key;
|
|
100
|
+
Object.assign(database, { bastionHost, bastionKey });
|
|
101
|
+
}
|
|
102
|
+
return database;
|
|
103
|
+
}
|
|
104
|
+
async function matchesHelper(heroku, app, db, namespace) {
|
|
105
|
+
var _a;
|
|
106
|
+
(0, debug_1.default)(`fetching ${db} on ${app}`);
|
|
107
|
+
const addonService = process.env.HEROKU_POSTGRESQL_ADDON_NAME || 'heroku-postgresql';
|
|
108
|
+
(0, debug_1.default)(`addon service: ${addonService}`);
|
|
109
|
+
try {
|
|
110
|
+
const attached = await (0, resolve_1.appAttachment)(heroku, app, db, { addon_service: addonService, namespace });
|
|
111
|
+
return ({ matches: [attached] });
|
|
112
|
+
}
|
|
113
|
+
catch (error) {
|
|
114
|
+
if (error instanceof ambiguous_1.AmbiguousError && ((_a = error.body) === null || _a === void 0 ? void 0 : _a.id) === 'multiple_matches' && error.matches) {
|
|
115
|
+
return { error, matches: error.matches };
|
|
116
|
+
}
|
|
117
|
+
if (error instanceof api_client_1.HerokuAPIError && error.http.statusCode === 404 && error.body && error.body.id === 'not_found') {
|
|
118
|
+
return { error, matches: null };
|
|
119
|
+
}
|
|
120
|
+
throw error;
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
const parsePostgresConnectionString = (db) => {
|
|
124
|
+
const dbPath = /:\/\//.test(db) ? db : `postgres:///${db}`;
|
|
125
|
+
const url = new URL(dbPath);
|
|
126
|
+
const { hostname, password, pathname, port, username } = url;
|
|
127
|
+
return {
|
|
128
|
+
database: pathname.charAt(0) === '/' ? pathname.slice(1) : pathname,
|
|
129
|
+
host: hostname,
|
|
130
|
+
password,
|
|
131
|
+
pathname,
|
|
132
|
+
port: port || node_process_1.env.PGPORT || (hostname && '5432'),
|
|
133
|
+
url: dbPath,
|
|
134
|
+
user: username,
|
|
135
|
+
};
|
|
136
|
+
};
|
|
137
|
+
exports.parsePostgresConnectionString = parsePostgresConnectionString;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export default function (): string;
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.default = default_1;
|
|
4
|
+
function default_1() {
|
|
5
|
+
const host = process.env.HEROKU_DATA_HOST || process.env.HEROKU_POSTGRESQL_HOST;
|
|
6
|
+
return host !== null && host !== void 0 ? host : 'api.data.heroku.com';
|
|
7
|
+
}
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import { type ChildProcess, type SpawnOptions, type SpawnOptionsWithStdioTuple } from 'node:child_process';
|
|
2
|
+
import { EventEmitter } from 'node:events';
|
|
3
|
+
import { Server } from 'node:net';
|
|
4
|
+
import { Stream } from 'node:stream';
|
|
5
|
+
import { ConnectionDetails, TunnelConfig } from '../../types/pg/tunnel';
|
|
6
|
+
export declare function consumeStream(inputStream: Stream): Promise<unknown>;
|
|
7
|
+
export declare function exec(db: ConnectionDetails, query: string, cmdArgs?: string[]): Promise<string>;
|
|
8
|
+
export declare function psqlQueryOptions(query: string, dbEnv: NodeJS.ProcessEnv, cmdArgs?: string[]): {
|
|
9
|
+
childProcessOptions: SpawnOptionsWithStdioTuple<"ignore", "pipe", "inherit">;
|
|
10
|
+
dbEnv: NodeJS.ProcessEnv;
|
|
11
|
+
psqlArgs: string[];
|
|
12
|
+
};
|
|
13
|
+
export declare function execPSQL({ childProcessOptions, dbEnv, psqlArgs }: {
|
|
14
|
+
childProcessOptions: SpawnOptions;
|
|
15
|
+
dbEnv: NodeJS.ProcessEnv;
|
|
16
|
+
psqlArgs: string[];
|
|
17
|
+
}): ChildProcess;
|
|
18
|
+
export declare function runWithTunnel(db: ConnectionDetails, tunnelConfig: TunnelConfig, options: Parameters<typeof execPSQL>[0]): Promise<string>;
|
|
19
|
+
export declare const trapAndForwardSignalsToChildProcess: (childProcess: ChildProcess) => () => void;
|
|
20
|
+
export declare function waitForPSQLExit(psql: EventEmitter): Promise<void>;
|
|
21
|
+
export declare class Tunnel {
|
|
22
|
+
private readonly bastionTunnel;
|
|
23
|
+
private readonly events;
|
|
24
|
+
constructor(bastionTunnel: Server);
|
|
25
|
+
static connect(db: ConnectionDetails, tunnelConfig: TunnelConfig): Promise<Tunnel>;
|
|
26
|
+
close(): void;
|
|
27
|
+
waitForClose(): Promise<void>;
|
|
28
|
+
}
|
|
@@ -0,0 +1,188 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.Tunnel = exports.trapAndForwardSignalsToChildProcess = void 0;
|
|
4
|
+
exports.consumeStream = consumeStream;
|
|
5
|
+
exports.exec = exec;
|
|
6
|
+
exports.psqlQueryOptions = psqlQueryOptions;
|
|
7
|
+
exports.execPSQL = execPSQL;
|
|
8
|
+
exports.runWithTunnel = runWithTunnel;
|
|
9
|
+
exports.waitForPSQLExit = waitForPSQLExit;
|
|
10
|
+
const debug_1 = require("debug");
|
|
11
|
+
const node_child_process_1 = require("node:child_process");
|
|
12
|
+
const node_events_1 = require("node:events");
|
|
13
|
+
const node_stream_1 = require("node:stream");
|
|
14
|
+
const promises_1 = require("node:stream/promises");
|
|
15
|
+
const bastion_1 = require("./bastion");
|
|
16
|
+
const pgDebug = (0, debug_1.default)('pg');
|
|
17
|
+
function consumeStream(inputStream) {
|
|
18
|
+
let result = '';
|
|
19
|
+
const throughStream = new node_stream_1.Stream.PassThrough();
|
|
20
|
+
// eslint-disable-next-line no-async-promise-executor
|
|
21
|
+
const promise = new Promise(async (resolve, reject) => {
|
|
22
|
+
try {
|
|
23
|
+
await (0, promises_1.finished)(throughStream);
|
|
24
|
+
resolve(result);
|
|
25
|
+
}
|
|
26
|
+
catch (error) {
|
|
27
|
+
reject(error);
|
|
28
|
+
}
|
|
29
|
+
});
|
|
30
|
+
// eslint-disable-next-line no-return-assign
|
|
31
|
+
throughStream.on('data', chunk => result += chunk.toString());
|
|
32
|
+
inputStream.pipe(throughStream);
|
|
33
|
+
return promise;
|
|
34
|
+
}
|
|
35
|
+
async function exec(db, query, cmdArgs = []) {
|
|
36
|
+
const configs = (0, bastion_1.getConfigs)(db);
|
|
37
|
+
const options = psqlQueryOptions(query, configs.dbEnv, cmdArgs);
|
|
38
|
+
return runWithTunnel(db, configs.dbTunnelConfig, options);
|
|
39
|
+
}
|
|
40
|
+
function psqlQueryOptions(query, dbEnv, cmdArgs = []) {
|
|
41
|
+
pgDebug('Running query: %s', query.trim());
|
|
42
|
+
const psqlArgs = ['-c', query, '--set', 'sslmode=require', ...cmdArgs];
|
|
43
|
+
const childProcessOptions = {
|
|
44
|
+
stdio: ['ignore', 'pipe', 'inherit'],
|
|
45
|
+
};
|
|
46
|
+
return {
|
|
47
|
+
childProcessOptions,
|
|
48
|
+
dbEnv,
|
|
49
|
+
psqlArgs,
|
|
50
|
+
};
|
|
51
|
+
}
|
|
52
|
+
function execPSQL({ childProcessOptions, dbEnv, psqlArgs }) {
|
|
53
|
+
const options = Object.assign({ env: dbEnv }, childProcessOptions);
|
|
54
|
+
pgDebug('opening psql process');
|
|
55
|
+
const psql = (0, node_child_process_1.spawn)('psql', psqlArgs, options);
|
|
56
|
+
psql.once('spawn', () => pgDebug('psql process spawned'));
|
|
57
|
+
return psql;
|
|
58
|
+
}
|
|
59
|
+
// According to node.js docs, sending a kill to a process won't cause an error
|
|
60
|
+
// but could have unintended consequences if the PID gets reassigned:
|
|
61
|
+
// https://nodejs.org/docs/latest-v14.x/api/child_process.html#child_process_subprocess_kill_signal
|
|
62
|
+
// To be on the safe side, check if the process was already killed before sending the signal
|
|
63
|
+
function kill(childProcess, signal) {
|
|
64
|
+
if (!childProcess.killed) {
|
|
65
|
+
pgDebug('killing psql child process');
|
|
66
|
+
childProcess.kill(signal);
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
async function runWithTunnel(db, tunnelConfig, options) {
|
|
70
|
+
const tunnel = await Tunnel.connect(db, tunnelConfig);
|
|
71
|
+
pgDebug('after create tunnel');
|
|
72
|
+
const psql = execPSQL(options);
|
|
73
|
+
// interactive opens with stdio: 'inherit'
|
|
74
|
+
// which gives the child process the same stdin,stdout,stderr of the node process (global `process`)
|
|
75
|
+
// https://nodejs.org/api/child_process.html#child_process_options_stdio
|
|
76
|
+
// psql.stdout will be null in this case
|
|
77
|
+
// return a string for consistency but ideally we should return the child process from this function
|
|
78
|
+
// and let the caller decide what to do with stdin/stdout/stderr
|
|
79
|
+
const stdoutPromise = psql.stdout ? consumeStream(psql.stdout) : Promise.resolve('');
|
|
80
|
+
const cleanupSignalTraps = (0, exports.trapAndForwardSignalsToChildProcess)(psql);
|
|
81
|
+
try {
|
|
82
|
+
pgDebug('waiting for psql or tunnel to exit');
|
|
83
|
+
// wait for either psql or tunnel to exit;
|
|
84
|
+
// the important bit is that we ensure both processes are
|
|
85
|
+
// always cleaned up in the `finally` block below
|
|
86
|
+
await Promise.race([
|
|
87
|
+
waitForPSQLExit(psql),
|
|
88
|
+
tunnel.waitForClose(),
|
|
89
|
+
]);
|
|
90
|
+
}
|
|
91
|
+
catch (error) {
|
|
92
|
+
pgDebug('wait for psql or tunnel error', error);
|
|
93
|
+
throw error;
|
|
94
|
+
}
|
|
95
|
+
finally {
|
|
96
|
+
pgDebug('begin tunnel cleanup');
|
|
97
|
+
cleanupSignalTraps();
|
|
98
|
+
tunnel.close();
|
|
99
|
+
kill(psql, 'SIGKILL');
|
|
100
|
+
pgDebug('end tunnel cleanup');
|
|
101
|
+
}
|
|
102
|
+
return stdoutPromise;
|
|
103
|
+
}
|
|
104
|
+
// trap SIGINT so that ctrl+c can be used by psql without killing the
|
|
105
|
+
// parent node process.
|
|
106
|
+
// you can use ctrl+c in psql to kill running queries
|
|
107
|
+
// while keeping the psql process open.
|
|
108
|
+
// This code is to stop the parent node process (heroku CLI)
|
|
109
|
+
// from exiting. If the parent Heroku CLI node process exits, then psql will exit as it
|
|
110
|
+
// is a child process of the Heroku CLI node process.
|
|
111
|
+
const trapAndForwardSignalsToChildProcess = (childProcess) => {
|
|
112
|
+
const signalsToTrap = ['SIGINT'];
|
|
113
|
+
const signalTraps = signalsToTrap.map(signal => {
|
|
114
|
+
process.removeAllListeners(signal);
|
|
115
|
+
const listener = () => kill(childProcess, signal);
|
|
116
|
+
process.on(signal, listener);
|
|
117
|
+
return [signal, listener];
|
|
118
|
+
});
|
|
119
|
+
// restores the built-in node ctrl+c and other handlers
|
|
120
|
+
return () => {
|
|
121
|
+
for (const [signal, listener] of signalTraps) {
|
|
122
|
+
process.removeListener(signal, listener);
|
|
123
|
+
}
|
|
124
|
+
};
|
|
125
|
+
};
|
|
126
|
+
exports.trapAndForwardSignalsToChildProcess = trapAndForwardSignalsToChildProcess;
|
|
127
|
+
async function waitForPSQLExit(psql) {
|
|
128
|
+
let errorToThrow = null;
|
|
129
|
+
try {
|
|
130
|
+
const [exitCode] = await (0, node_events_1.once)(psql, 'close');
|
|
131
|
+
pgDebug(`psql exited with code ${exitCode}`);
|
|
132
|
+
if (exitCode > 0) {
|
|
133
|
+
errorToThrow = new Error(`psql exited with code ${exitCode}`);
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
catch (error) {
|
|
137
|
+
pgDebug('psql process error', error);
|
|
138
|
+
const { code } = error;
|
|
139
|
+
if (code === 'ENOENT') {
|
|
140
|
+
errorToThrow = new Error('The local psql command could not be located. For help installing psql, see https://devcenter.heroku.com/articles/heroku-postgresql#local-setup');
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
if (errorToThrow) {
|
|
144
|
+
throw errorToThrow;
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
// a small wrapper around tunnel-ssh
|
|
148
|
+
// so that other code doesn't have to worry about
|
|
149
|
+
// whether there is or is not a tunnel
|
|
150
|
+
class Tunnel {
|
|
151
|
+
constructor(bastionTunnel) {
|
|
152
|
+
this.bastionTunnel = bastionTunnel;
|
|
153
|
+
// eslint-disable-next-line unicorn/prefer-event-target
|
|
154
|
+
this.events = new node_events_1.EventEmitter();
|
|
155
|
+
}
|
|
156
|
+
static async connect(db, tunnelConfig) {
|
|
157
|
+
const tunnel = await (0, bastion_1.sshTunnel)(db, tunnelConfig);
|
|
158
|
+
return new Tunnel(tunnel);
|
|
159
|
+
}
|
|
160
|
+
close() {
|
|
161
|
+
if (this.bastionTunnel) {
|
|
162
|
+
pgDebug('close tunnel');
|
|
163
|
+
this.bastionTunnel.close();
|
|
164
|
+
}
|
|
165
|
+
else {
|
|
166
|
+
pgDebug('no tunnel necessary; sending fake close event');
|
|
167
|
+
this.events.emit('close', 0);
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
async waitForClose() {
|
|
171
|
+
if (this.bastionTunnel) {
|
|
172
|
+
try {
|
|
173
|
+
pgDebug('wait for tunnel close');
|
|
174
|
+
await (0, node_events_1.once)(this.bastionTunnel, 'close');
|
|
175
|
+
pgDebug('tunnel closed');
|
|
176
|
+
}
|
|
177
|
+
catch (error) {
|
|
178
|
+
pgDebug('tunnel close error', error);
|
|
179
|
+
throw new Error('Secure tunnel to your database failed');
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
else {
|
|
183
|
+
pgDebug('no bastion required; waiting for fake close event');
|
|
184
|
+
await (0, node_events_1.once)(this.events, 'close');
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
exports.Tunnel = Tunnel;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function confirm(message: string): Promise<boolean>;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function styledHeader(header: string): void;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function styledJson(obj: unknown): void;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function styledObject(obj: unknown, keys?: string[]): void;
|
package/dist/ux/table.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function wait(ms?: number): Promise<void>;
|