dbgate-api 5.5.6 → 5.5.7-alpha.25
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env +2 -0
- package/package.json +6 -5
- package/src/controllers/auth.js +10 -3
- package/src/controllers/config.js +16 -4
- package/src/controllers/connections.js +5 -0
- package/src/controllers/databaseConnections.js +38 -8
- package/src/controllers/runners.js +26 -14
- package/src/controllers/serverConnections.js +2 -1
- package/src/controllers/storage.js +6 -1
- package/src/currentVersion.js +2 -2
- package/src/main.js +11 -0
- package/src/proc/serverConnectionProcess.js +4 -4
- package/src/proc/sshForwardProcess.js +2 -2
- package/src/shell/autoIndexForeignKeysTransform.js +19 -0
- package/src/shell/dataTypeMapperTransform.js +21 -0
- package/src/shell/deployDb.js +48 -11
- package/src/shell/dropAllDbObjects.js +42 -0
- package/src/shell/executeQuery.js +12 -4
- package/src/shell/generateDeploySql.js +30 -4
- package/src/shell/index.js +12 -0
- package/src/shell/sqlTextReplacementTransform.js +32 -0
- package/src/utility/cloudUpgrade.js +61 -0
- package/src/utility/directories.js +3 -0
- package/src/utility/exportDbModelSql.js +80 -0
- package/src/utility/hardwareFingerprint.js +1 -0
- package/src/utility/importDbModel.js +2 -22
- package/src/utility/loadModelFolder.js +27 -0
- package/src/utility/loadModelTransform.js +36 -0
- package/src/utility/platformInfo.js +12 -2
- package/src/utility/sshTunnel.js +2 -1
package/.env
CHANGED
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "dbgate-api",
|
|
3
3
|
"main": "src/index.js",
|
|
4
|
-
"version": "5.5.
|
|
4
|
+
"version": "5.5.7-alpha.25",
|
|
5
5
|
"homepage": "https://dbgate.org/",
|
|
6
6
|
"repository": {
|
|
7
7
|
"type": "git",
|
|
@@ -27,10 +27,10 @@
|
|
|
27
27
|
"compare-versions": "^3.6.0",
|
|
28
28
|
"cors": "^2.8.5",
|
|
29
29
|
"cross-env": "^6.0.3",
|
|
30
|
-
"dbgate-datalib": "^5.5.
|
|
30
|
+
"dbgate-datalib": "^5.5.7-alpha.25",
|
|
31
31
|
"dbgate-query-splitter": "^4.11.2",
|
|
32
|
-
"dbgate-sqltree": "^5.5.
|
|
33
|
-
"dbgate-tools": "^5.5.
|
|
32
|
+
"dbgate-sqltree": "^5.5.7-alpha.25",
|
|
33
|
+
"dbgate-tools": "^5.5.7-alpha.25",
|
|
34
34
|
"debug": "^4.3.4",
|
|
35
35
|
"diff": "^5.0.0",
|
|
36
36
|
"diff2html": "^3.4.13",
|
|
@@ -56,6 +56,7 @@
|
|
|
56
56
|
"pinomin": "^1.0.4",
|
|
57
57
|
"portfinder": "^1.0.28",
|
|
58
58
|
"rimraf": "^3.0.0",
|
|
59
|
+
"semver": "^7.6.3",
|
|
59
60
|
"simple-encryptor": "^4.0.0",
|
|
60
61
|
"ssh2": "^1.11.0",
|
|
61
62
|
"stream-json": "^1.8.0",
|
|
@@ -77,7 +78,7 @@
|
|
|
77
78
|
"devDependencies": {
|
|
78
79
|
"@types/fs-extra": "^9.0.11",
|
|
79
80
|
"@types/lodash": "^4.14.149",
|
|
80
|
-
"dbgate-types": "^5.5.
|
|
81
|
+
"dbgate-types": "^5.5.7-alpha.25",
|
|
81
82
|
"env-cmd": "^10.1.0",
|
|
82
83
|
"node-loader": "^1.0.2",
|
|
83
84
|
"nodemon": "^2.0.2",
|
package/src/controllers/auth.js
CHANGED
|
@@ -36,8 +36,9 @@ function authMiddleware(req, res, next) {
|
|
|
36
36
|
'/auth/login',
|
|
37
37
|
'/auth/redirect',
|
|
38
38
|
'/stream',
|
|
39
|
-
'storage/get-connections-for-login-page',
|
|
40
|
-
'
|
|
39
|
+
'/storage/get-connections-for-login-page',
|
|
40
|
+
'/storage/set-admin-password',
|
|
41
|
+
'/auth/get-providers',
|
|
41
42
|
'/connections/dblogin-web',
|
|
42
43
|
'/connections/dblogin-app',
|
|
43
44
|
'/connections/dblogin-auth',
|
|
@@ -69,6 +70,7 @@ function authMiddleware(req, res, next) {
|
|
|
69
70
|
return next();
|
|
70
71
|
} catch (err) {
|
|
71
72
|
if (skipAuth) {
|
|
73
|
+
req.isInvalidToken = true;
|
|
72
74
|
return next();
|
|
73
75
|
}
|
|
74
76
|
|
|
@@ -89,7 +91,12 @@ module.exports = {
|
|
|
89
91
|
const { amoid, login, password, isAdminPage } = params;
|
|
90
92
|
|
|
91
93
|
if (isAdminPage) {
|
|
92
|
-
|
|
94
|
+
let adminPassword = process.env.ADMIN_PASSWORD;
|
|
95
|
+
if (!adminPassword) {
|
|
96
|
+
const adminConfig = await storage.readConfig({ group: 'admin' });
|
|
97
|
+
adminPassword = adminConfig?.adminPassword;
|
|
98
|
+
}
|
|
99
|
+
if (adminPassword && adminPassword == password) {
|
|
93
100
|
return {
|
|
94
101
|
accessToken: jwt.sign(
|
|
95
102
|
{
|
|
@@ -60,8 +60,16 @@ module.exports = {
|
|
|
60
60
|
const checkedLicense = storageConnectionError ? null : await checkLicense();
|
|
61
61
|
const isLicenseValid = checkedLicense?.status == 'ok';
|
|
62
62
|
const logoutUrl = storageConnectionError ? null : await authProvider.getLogoutUrl();
|
|
63
|
+
const adminConfig = storageConnectionError ? null : await storage.readConfig({ group: 'admin' });
|
|
63
64
|
|
|
64
|
-
|
|
65
|
+
const isAdminPasswordMissing = !!(
|
|
66
|
+
process.env.STORAGE_DATABASE &&
|
|
67
|
+
!process.env.ADMIN_PASSWORD &&
|
|
68
|
+
!process.env.BASIC_AUTH &&
|
|
69
|
+
!adminConfig?.adminPasswordState
|
|
70
|
+
);
|
|
71
|
+
|
|
72
|
+
const configResult = {
|
|
65
73
|
runAsPortal: !!connections.portalConnections,
|
|
66
74
|
singleDbConnection: connections.singleDbConnection,
|
|
67
75
|
singleConnection: singleConnection,
|
|
@@ -83,15 +91,19 @@ module.exports = {
|
|
|
83
91
|
isBasicAuth: !!process.env.BASIC_AUTH,
|
|
84
92
|
isAdminLoginForm: !!(
|
|
85
93
|
process.env.STORAGE_DATABASE &&
|
|
86
|
-
process.env.ADMIN_PASSWORD &&
|
|
87
|
-
!process.env.BASIC_AUTH
|
|
88
|
-
checkedLicense?.type == 'premium'
|
|
94
|
+
(process.env.ADMIN_PASSWORD || adminConfig?.adminPasswordState == 'set') &&
|
|
95
|
+
!process.env.BASIC_AUTH
|
|
89
96
|
),
|
|
97
|
+
isAdminPasswordMissing,
|
|
98
|
+
isInvalidToken: req?.isInvalidToken,
|
|
99
|
+
adminPasswordState: adminConfig?.adminPasswordState,
|
|
90
100
|
storageDatabase: process.env.STORAGE_DATABASE,
|
|
91
101
|
logsFilePath: getLogsFilePath(),
|
|
92
102
|
connectionsFilePath: path.join(datadir(), 'connections.jsonl'),
|
|
93
103
|
...currentVersion,
|
|
94
104
|
};
|
|
105
|
+
|
|
106
|
+
return configResult;
|
|
95
107
|
},
|
|
96
108
|
|
|
97
109
|
logout_meta: {
|
|
@@ -368,6 +368,11 @@ module.exports = {
|
|
|
368
368
|
|
|
369
369
|
get_meta: true,
|
|
370
370
|
async get({ conid }, req) {
|
|
371
|
+
if (conid == '__model') {
|
|
372
|
+
return {
|
|
373
|
+
_id: '__model',
|
|
374
|
+
};
|
|
375
|
+
}
|
|
371
376
|
testConnectionPermission(conid, req);
|
|
372
377
|
return this.getCore({ conid, mask: true });
|
|
373
378
|
},
|
|
@@ -13,6 +13,7 @@ const {
|
|
|
13
13
|
modelCompareDbDiffOptions,
|
|
14
14
|
getLogger,
|
|
15
15
|
extractErrorLogData,
|
|
16
|
+
filterStructureBySchema,
|
|
16
17
|
} = require('dbgate-tools');
|
|
17
18
|
const { html, parse } = require('diff2html');
|
|
18
19
|
const { handleProcessCommunication } = require('../utility/processComm');
|
|
@@ -31,6 +32,8 @@ const { testConnectionPermission } = require('../utility/hasPermission');
|
|
|
31
32
|
const { MissingCredentialsError } = require('../utility/exceptions');
|
|
32
33
|
const pipeForkLogs = require('../utility/pipeForkLogs');
|
|
33
34
|
const crypto = require('crypto');
|
|
35
|
+
const loadModelTransform = require('../utility/loadModelTransform');
|
|
36
|
+
const exportDbModelSql = require('../utility/exportDbModelSql');
|
|
34
37
|
|
|
35
38
|
const logger = getLogger('databaseConnections');
|
|
36
39
|
|
|
@@ -349,6 +352,11 @@ module.exports = {
|
|
|
349
352
|
|
|
350
353
|
syncModel_meta: true,
|
|
351
354
|
async syncModel({ conid, database, isFullRefresh }, req) {
|
|
355
|
+
if (conid == '__model') {
|
|
356
|
+
socket.emitChanged('database-structure-changed', { conid, database });
|
|
357
|
+
return { status: 'ok' };
|
|
358
|
+
}
|
|
359
|
+
|
|
352
360
|
testConnectionPermission(conid, req);
|
|
353
361
|
const conn = await this.ensureOpened(conid, database);
|
|
354
362
|
conn.subprocess.send({ msgtype: 'syncModel', isFullRefresh });
|
|
@@ -392,11 +400,12 @@ module.exports = {
|
|
|
392
400
|
},
|
|
393
401
|
|
|
394
402
|
structure_meta: true,
|
|
395
|
-
async structure({ conid, database }, req) {
|
|
403
|
+
async structure({ conid, database, modelTransFile = null }, req) {
|
|
396
404
|
testConnectionPermission(conid, req);
|
|
397
405
|
if (conid == '__model') {
|
|
398
406
|
const model = await importDbModel(database);
|
|
399
|
-
|
|
407
|
+
const trans = await loadModelTransform(modelTransFile);
|
|
408
|
+
return trans ? trans(model) : model;
|
|
400
409
|
}
|
|
401
410
|
|
|
402
411
|
const opened = await this.ensureOpened(conid, database);
|
|
@@ -432,14 +441,35 @@ module.exports = {
|
|
|
432
441
|
},
|
|
433
442
|
|
|
434
443
|
exportModel_meta: true,
|
|
435
|
-
async exportModel({ conid, database }, req) {
|
|
444
|
+
async exportModel({ conid, database, outputFolder, schema }, req) {
|
|
445
|
+
testConnectionPermission(conid, req);
|
|
446
|
+
|
|
447
|
+
const realFolder = outputFolder.startsWith('archive:')
|
|
448
|
+
? resolveArchiveFolder(outputFolder.substring('archive:'.length))
|
|
449
|
+
: outputFolder;
|
|
450
|
+
|
|
451
|
+
const model = await this.structure({ conid, database });
|
|
452
|
+
const filteredModel = schema ? filterStructureBySchema(model, schema) : model;
|
|
453
|
+
await exportDbModel(extendDatabaseInfo(filteredModel), realFolder);
|
|
454
|
+
|
|
455
|
+
if (outputFolder.startsWith('archive:')) {
|
|
456
|
+
socket.emitChanged(`archive-files-changed`, { folder: outputFolder.substring('archive:'.length) });
|
|
457
|
+
}
|
|
458
|
+
return { status: 'ok' };
|
|
459
|
+
},
|
|
460
|
+
|
|
461
|
+
exportModelSql_meta: true,
|
|
462
|
+
async exportModelSql({ conid, database, outputFolder, outputFile, schema }, req) {
|
|
436
463
|
testConnectionPermission(conid, req);
|
|
437
|
-
|
|
438
|
-
await
|
|
464
|
+
|
|
465
|
+
const connection = await connections.getCore({ conid });
|
|
466
|
+
const driver = requireEngineDriver(connection);
|
|
467
|
+
|
|
439
468
|
const model = await this.structure({ conid, database });
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
469
|
+
const filteredModel = schema ? filterStructureBySchema(model, schema) : model;
|
|
470
|
+
await exportDbModelSql(extendDatabaseInfo(filteredModel), driver, outputFolder, outputFile);
|
|
471
|
+
|
|
472
|
+
return { status: 'ok' };
|
|
443
473
|
},
|
|
444
474
|
|
|
445
475
|
generateDeploySql_meta: true,
|
|
@@ -12,6 +12,7 @@ const {
|
|
|
12
12
|
jsonScriptToJavascript,
|
|
13
13
|
getLogger,
|
|
14
14
|
safeJsonParse,
|
|
15
|
+
pinoLogRecordToMessageRecord,
|
|
15
16
|
} = require('dbgate-tools');
|
|
16
17
|
const { handleProcessCommunication } = require('../utility/processComm');
|
|
17
18
|
const processArgs = require('../utility/processArgs');
|
|
@@ -68,18 +69,20 @@ module.exports = {
|
|
|
68
69
|
|
|
69
70
|
dispatchMessage(runid, message) {
|
|
70
71
|
if (message) {
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
72
|
+
if (_.isPlainObject(message)) logger.log(message);
|
|
73
|
+
else logger.info(message);
|
|
74
|
+
|
|
75
|
+
const toEmit = _.isPlainObject(message)
|
|
76
|
+
? {
|
|
77
|
+
time: new Date(),
|
|
78
|
+
...message,
|
|
79
|
+
}
|
|
80
|
+
: {
|
|
81
|
+
message,
|
|
82
|
+
time: new Date(),
|
|
83
|
+
};
|
|
84
|
+
|
|
85
|
+
if (toEmit.level >= 50) {
|
|
83
86
|
toEmit.severity = 'error';
|
|
84
87
|
}
|
|
85
88
|
|
|
@@ -131,7 +134,16 @@ module.exports = {
|
|
|
131
134
|
}
|
|
132
135
|
);
|
|
133
136
|
const pipeDispatcher = severity => data => {
|
|
134
|
-
|
|
137
|
+
const json = safeJsonParse(data, null);
|
|
138
|
+
|
|
139
|
+
if (json) {
|
|
140
|
+
return this.dispatchMessage(runid, pinoLogRecordToMessageRecord(json));
|
|
141
|
+
} else {
|
|
142
|
+
return this.dispatchMessage(runid, {
|
|
143
|
+
message: json == null ? data.toString().trim() : null,
|
|
144
|
+
severity,
|
|
145
|
+
});
|
|
146
|
+
}
|
|
135
147
|
};
|
|
136
148
|
|
|
137
149
|
byline(subprocess.stdout).on('data', pipeDispatcher('info'));
|
|
@@ -165,7 +177,7 @@ module.exports = {
|
|
|
165
177
|
|
|
166
178
|
start_meta: true,
|
|
167
179
|
async start({ script }) {
|
|
168
|
-
const runid = crypto.randomUUID()
|
|
180
|
+
const runid = crypto.randomUUID();
|
|
169
181
|
|
|
170
182
|
if (script.type == 'json') {
|
|
171
183
|
const js = jsonScriptToJavascript(script);
|
|
@@ -134,6 +134,7 @@ module.exports = {
|
|
|
134
134
|
listDatabases_meta: true,
|
|
135
135
|
async listDatabases({ conid }, req) {
|
|
136
136
|
if (!conid) return [];
|
|
137
|
+
if (conid == '__model') return [];
|
|
137
138
|
testConnectionPermission(conid, req);
|
|
138
139
|
const opened = await this.ensureOpened(conid);
|
|
139
140
|
return opened.databases;
|
|
@@ -172,7 +173,7 @@ module.exports = {
|
|
|
172
173
|
}
|
|
173
174
|
})
|
|
174
175
|
);
|
|
175
|
-
socket.setStreamIdFilter(strmid, { conid: conidArray });
|
|
176
|
+
socket.setStreamIdFilter(strmid, { conid: [...(conidArray ?? []), '__model'] });
|
|
176
177
|
return { status: 'ok' };
|
|
177
178
|
},
|
|
178
179
|
|
package/src/currentVersion.js
CHANGED
package/src/main.js
CHANGED
|
@@ -35,6 +35,7 @@ const getExpressPath = require('./utility/getExpressPath');
|
|
|
35
35
|
const _ = require('lodash');
|
|
36
36
|
const { getLogger } = require('dbgate-tools');
|
|
37
37
|
const { getDefaultAuthProvider } = require('./auth/authProvider');
|
|
38
|
+
const startCloudUpgradeTimer = require('./utility/cloudUpgrade');
|
|
38
39
|
|
|
39
40
|
const logger = getLogger('main');
|
|
40
41
|
|
|
@@ -73,6 +74,8 @@ function start() {
|
|
|
73
74
|
if (platformInfo.isDocker) {
|
|
74
75
|
// server static files inside docker container
|
|
75
76
|
app.use(getExpressPath('/'), express.static('/home/dbgate-docker/public'));
|
|
77
|
+
} else if (platformInfo.isAwsUbuntuLayout) {
|
|
78
|
+
app.use(getExpressPath('/'), express.static('/home/ubuntu/build/public'));
|
|
76
79
|
} else if (platformInfo.isNpmDist) {
|
|
77
80
|
app.use(getExpressPath('/'), express.static(path.join(__dirname, '../../dbgate-web/public')));
|
|
78
81
|
} else if (process.env.DEVWEB) {
|
|
@@ -126,6 +129,10 @@ function start() {
|
|
|
126
129
|
const port = process.env.PORT || 3000;
|
|
127
130
|
logger.info(`DbGate API listening on port ${port} (docker build)`);
|
|
128
131
|
server.listen(port);
|
|
132
|
+
} else if (platformInfo.isAwsUbuntuLayout) {
|
|
133
|
+
const port = process.env.PORT || 3000;
|
|
134
|
+
logger.info(`DbGate API listening on port ${port} (AWS AMI build)`);
|
|
135
|
+
server.listen(port);
|
|
129
136
|
} else if (platformInfo.isNpmDist) {
|
|
130
137
|
getPort({
|
|
131
138
|
port: parseInt(
|
|
@@ -162,6 +169,10 @@ function start() {
|
|
|
162
169
|
process.on('SIGINT', shutdown);
|
|
163
170
|
process.on('SIGTERM', shutdown);
|
|
164
171
|
process.on('SIGBREAK', shutdown);
|
|
172
|
+
|
|
173
|
+
if (process.env.CLOUD_UPGRADE_FILE) {
|
|
174
|
+
startCloudUpgradeTimer();
|
|
175
|
+
}
|
|
165
176
|
}
|
|
166
177
|
|
|
167
178
|
function useAllControllers(app, electron) {
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
const stableStringify = require('json-stable-stringify');
|
|
2
|
-
const { extractBoolSettingsValue, extractIntSettingsValue, getLogger } = require('dbgate-tools');
|
|
2
|
+
const { extractBoolSettingsValue, extractIntSettingsValue, getLogger, extractErrorLogData } = require('dbgate-tools');
|
|
3
3
|
const childProcessChecker = require('../utility/childProcessChecker');
|
|
4
4
|
const requireEngineDriver = require('../utility/requireEngineDriver');
|
|
5
5
|
const connectUtility = require('../utility/connectUtility');
|
|
@@ -39,7 +39,7 @@ async function handleRefresh() {
|
|
|
39
39
|
name: 'error',
|
|
40
40
|
message: err.message,
|
|
41
41
|
});
|
|
42
|
-
|
|
42
|
+
logger.error(extractErrorLogData(err), 'Error refreshing server databases');
|
|
43
43
|
setTimeout(() => process.exit(1), 1000);
|
|
44
44
|
}
|
|
45
45
|
}
|
|
@@ -84,7 +84,7 @@ async function handleConnect(connection) {
|
|
|
84
84
|
name: 'error',
|
|
85
85
|
message: err.message,
|
|
86
86
|
});
|
|
87
|
-
|
|
87
|
+
logger.error(extractErrorLogData(err), 'Error connecting to server');
|
|
88
88
|
setTimeout(() => process.exit(1), 1000);
|
|
89
89
|
}
|
|
90
90
|
|
|
@@ -164,7 +164,6 @@ function start() {
|
|
|
164
164
|
setInterval(async () => {
|
|
165
165
|
const time = new Date().getTime();
|
|
166
166
|
if (time - lastPing > 40 * 1000) {
|
|
167
|
-
|
|
168
167
|
logger.info('Server connection not alive, exiting');
|
|
169
168
|
const driver = requireEngineDriver(storedConnection);
|
|
170
169
|
await driver.close(dbhan);
|
|
@@ -181,6 +180,7 @@ function start() {
|
|
|
181
180
|
name: 'error',
|
|
182
181
|
message: err.message,
|
|
183
182
|
});
|
|
183
|
+
logger.error(extractErrorLogData(err), `Error processing message ${message?.['msgtype']}`);
|
|
184
184
|
}
|
|
185
185
|
});
|
|
186
186
|
}
|
|
@@ -3,7 +3,7 @@ const platformInfo = require('../utility/platformInfo');
|
|
|
3
3
|
const childProcessChecker = require('../utility/childProcessChecker');
|
|
4
4
|
const { handleProcessCommunication } = require('../utility/processComm');
|
|
5
5
|
const { SSHConnection } = require('../utility/SSHConnection');
|
|
6
|
-
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
|
6
|
+
const { getLogger, extractErrorLogData, extractErrorMessage } = require('dbgate-tools');
|
|
7
7
|
|
|
8
8
|
const logger = getLogger('sshProcess');
|
|
9
9
|
|
|
@@ -46,7 +46,7 @@ async function handleStart({ connection, tunnelConfig }) {
|
|
|
46
46
|
msgtype: 'error',
|
|
47
47
|
connection,
|
|
48
48
|
tunnelConfig,
|
|
49
|
-
errorMessage: err.message,
|
|
49
|
+
errorMessage: extractErrorMessage(err.message),
|
|
50
50
|
});
|
|
51
51
|
}
|
|
52
52
|
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
const autoIndexForeignKeysTransform = () => database => {
|
|
2
|
+
return {
|
|
3
|
+
...database,
|
|
4
|
+
tables: database.tables.map(table => {
|
|
5
|
+
return {
|
|
6
|
+
...table,
|
|
7
|
+
indexes: [
|
|
8
|
+
...(table.indexes || []),
|
|
9
|
+
...table.foreignKeys.map(fk => ({
|
|
10
|
+
constraintName: `IX_${fk.constraintName}`,
|
|
11
|
+
columns: fk.columns.map(x => ({ columnName: x.columnName })),
|
|
12
|
+
})),
|
|
13
|
+
],
|
|
14
|
+
};
|
|
15
|
+
}),
|
|
16
|
+
};
|
|
17
|
+
};
|
|
18
|
+
|
|
19
|
+
module.exports = autoIndexForeignKeysTransform;
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
const dataTypeMapperTransform = (oldType, newType) => database => {
|
|
2
|
+
return {
|
|
3
|
+
...database,
|
|
4
|
+
tables: database.tables.map(table => {
|
|
5
|
+
return {
|
|
6
|
+
...table,
|
|
7
|
+
columns: table.columns.map(column => {
|
|
8
|
+
if (column.dataType?.toLowerCase() === oldType?.toLowerCase()) {
|
|
9
|
+
return {
|
|
10
|
+
...column,
|
|
11
|
+
dataType: newType,
|
|
12
|
+
};
|
|
13
|
+
}
|
|
14
|
+
return column;
|
|
15
|
+
}),
|
|
16
|
+
};
|
|
17
|
+
}),
|
|
18
|
+
};
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
module.exports = dataTypeMapperTransform;
|
package/src/shell/deployDb.js
CHANGED
|
@@ -1,17 +1,54 @@
|
|
|
1
1
|
const generateDeploySql = require('./generateDeploySql');
|
|
2
2
|
const executeQuery = require('./executeQuery');
|
|
3
|
+
const { ScriptDrivedDeployer } = require('dbgate-datalib');
|
|
4
|
+
const connectUtility = require('../utility/connectUtility');
|
|
5
|
+
const requireEngineDriver = require('../utility/requireEngineDriver');
|
|
6
|
+
const loadModelFolder = require('../utility/loadModelFolder');
|
|
3
7
|
|
|
4
|
-
async function deployDb({
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
8
|
+
async function deployDb({
|
|
9
|
+
connection,
|
|
10
|
+
systemConnection,
|
|
11
|
+
driver,
|
|
12
|
+
analysedStructure,
|
|
13
|
+
modelFolder,
|
|
14
|
+
loadedDbModel,
|
|
15
|
+
modelTransforms,
|
|
16
|
+
dbdiffOptionsExtra,
|
|
17
|
+
ignoreNameRegex = '',
|
|
18
|
+
targetSchema = null,
|
|
19
|
+
}) {
|
|
20
|
+
if (!driver) driver = requireEngineDriver(connection);
|
|
21
|
+
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read'));
|
|
22
|
+
|
|
23
|
+
try {
|
|
24
|
+
const scriptDeployer = new ScriptDrivedDeployer(
|
|
25
|
+
dbhan,
|
|
26
|
+
driver,
|
|
27
|
+
loadedDbModel ?? (await loadModelFolder(modelFolder))
|
|
28
|
+
);
|
|
29
|
+
await scriptDeployer.runPre();
|
|
30
|
+
|
|
31
|
+
const { sql } = await generateDeploySql({
|
|
32
|
+
connection,
|
|
33
|
+
systemConnection: dbhan,
|
|
34
|
+
driver,
|
|
35
|
+
analysedStructure,
|
|
36
|
+
modelFolder,
|
|
37
|
+
loadedDbModel,
|
|
38
|
+
modelTransforms,
|
|
39
|
+
dbdiffOptionsExtra,
|
|
40
|
+
ignoreNameRegex,
|
|
41
|
+
targetSchema,
|
|
42
|
+
});
|
|
43
|
+
// console.log('RUNNING DEPLOY SCRIPT:', sql);
|
|
44
|
+
await executeQuery({ connection, systemConnection: dbhan, driver, sql, logScriptItems: true });
|
|
45
|
+
|
|
46
|
+
await scriptDeployer.runPost();
|
|
47
|
+
} finally {
|
|
48
|
+
if (!systemConnection) {
|
|
49
|
+
await driver.close(dbhan);
|
|
50
|
+
}
|
|
51
|
+
}
|
|
15
52
|
}
|
|
16
53
|
|
|
17
54
|
module.exports = deployDb;
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
const executeQuery = require('./executeQuery');
|
|
2
|
+
const requireEngineDriver = require('../utility/requireEngineDriver');
|
|
3
|
+
const connectUtility = require('../utility/connectUtility');
|
|
4
|
+
const { getLogger, extendDatabaseInfo } = require('dbgate-tools');
|
|
5
|
+
|
|
6
|
+
const logger = getLogger('dropAllDbObjects');
|
|
7
|
+
|
|
8
|
+
async function dropAllDbObjects({ connection, systemConnection, driver, analysedStructure }) {
|
|
9
|
+
if (!driver) driver = requireEngineDriver(connection);
|
|
10
|
+
|
|
11
|
+
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
|
|
12
|
+
|
|
13
|
+
logger.info(`Connected.`);
|
|
14
|
+
|
|
15
|
+
if (!analysedStructure) {
|
|
16
|
+
analysedStructure = await driver.analyseFull(dbhan);
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
analysedStructure = extendDatabaseInfo(analysedStructure);
|
|
20
|
+
|
|
21
|
+
const dmp = driver.createDumper();
|
|
22
|
+
|
|
23
|
+
for (const table of analysedStructure.tables) {
|
|
24
|
+
for (const fk of table.foreignKeys) {
|
|
25
|
+
dmp.dropForeignKey(fk);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
for (const table of analysedStructure.tables) {
|
|
29
|
+
dmp.dropTable(table);
|
|
30
|
+
}
|
|
31
|
+
for (const field of Object.keys(analysedStructure)) {
|
|
32
|
+
if (dmp.getSqlObjectSqlName(field)) {
|
|
33
|
+
for (const obj of analysedStructure[field]) {
|
|
34
|
+
dmp.dropSqlObject(obj);
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
await executeQuery({ connection, systemConnection, driver, sql: dmp.s, logScriptItems: true });
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
module.exports = dropAllDbObjects;
|
|
@@ -1,11 +1,19 @@
|
|
|
1
1
|
const requireEngineDriver = require('../utility/requireEngineDriver');
|
|
2
2
|
const connectUtility = require('../utility/connectUtility');
|
|
3
|
-
const { getLogger } = require('dbgate-tools');
|
|
3
|
+
const { getLogger, getLimitedQuery } = require('dbgate-tools');
|
|
4
4
|
|
|
5
5
|
const logger = getLogger('execQuery');
|
|
6
6
|
|
|
7
|
-
async function executeQuery({
|
|
8
|
-
|
|
7
|
+
async function executeQuery({
|
|
8
|
+
connection = undefined,
|
|
9
|
+
systemConnection = undefined,
|
|
10
|
+
driver = undefined,
|
|
11
|
+
sql,
|
|
12
|
+
logScriptItems = false,
|
|
13
|
+
}) {
|
|
14
|
+
if (!logScriptItems) {
|
|
15
|
+
logger.info({ sql: getLimitedQuery(sql) }, `Execute query`);
|
|
16
|
+
}
|
|
9
17
|
|
|
10
18
|
if (!driver) driver = requireEngineDriver(connection);
|
|
11
19
|
const dbhan = systemConnection || (await connectUtility(driver, connection, 'script'));
|
|
@@ -13,7 +21,7 @@ async function executeQuery({ connection = undefined, systemConnection = undefin
|
|
|
13
21
|
try {
|
|
14
22
|
logger.info(`Connected.`);
|
|
15
23
|
|
|
16
|
-
await driver.script(dbhan, sql);
|
|
24
|
+
await driver.script(dbhan, sql, { logScriptItems });
|
|
17
25
|
} finally {
|
|
18
26
|
if (!systemConnection) {
|
|
19
27
|
await driver.close(dbhan);
|
|
@@ -6,6 +6,10 @@ const {
|
|
|
6
6
|
extendDatabaseInfo,
|
|
7
7
|
modelCompareDbDiffOptions,
|
|
8
8
|
enrichWithPreloadedRows,
|
|
9
|
+
skipNamesInStructureByRegex,
|
|
10
|
+
replaceSchemaInStructure,
|
|
11
|
+
filterStructureBySchema,
|
|
12
|
+
skipDbGateInternalObjects,
|
|
9
13
|
} = require('dbgate-tools');
|
|
10
14
|
const importDbModel = require('../utility/importDbModel');
|
|
11
15
|
const requireEngineDriver = require('../utility/requireEngineDriver');
|
|
@@ -18,6 +22,10 @@ async function generateDeploySql({
|
|
|
18
22
|
analysedStructure = undefined,
|
|
19
23
|
modelFolder = undefined,
|
|
20
24
|
loadedDbModel = undefined,
|
|
25
|
+
modelTransforms = undefined,
|
|
26
|
+
dbdiffOptionsExtra = {},
|
|
27
|
+
ignoreNameRegex = '',
|
|
28
|
+
targetSchema = null,
|
|
21
29
|
}) {
|
|
22
30
|
if (!driver) driver = requireEngineDriver(connection);
|
|
23
31
|
|
|
@@ -28,9 +36,25 @@ async function generateDeploySql({
|
|
|
28
36
|
analysedStructure = await driver.analyseFull(dbhan);
|
|
29
37
|
}
|
|
30
38
|
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
39
|
+
if (ignoreNameRegex) {
|
|
40
|
+
analysedStructure = skipNamesInStructureByRegex(analysedStructure, new RegExp(ignoreNameRegex, 'i'));
|
|
41
|
+
}
|
|
42
|
+
analysedStructure = skipDbGateInternalObjects(analysedStructure);
|
|
43
|
+
|
|
44
|
+
let deployedModelSource = loadedDbModel
|
|
45
|
+
? databaseInfoFromYamlModel(loadedDbModel)
|
|
46
|
+
: await importDbModel(modelFolder);
|
|
47
|
+
|
|
48
|
+
for (const transform of modelTransforms || []) {
|
|
49
|
+
deployedModelSource = transform(deployedModelSource);
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
if (targetSchema) {
|
|
53
|
+
deployedModelSource = replaceSchemaInStructure(deployedModelSource, targetSchema);
|
|
54
|
+
analysedStructure = filterStructureBySchema(analysedStructure, targetSchema);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
const deployedModel = generateDbPairingId(extendDatabaseInfo(deployedModelSource));
|
|
34
58
|
const currentModel = generateDbPairingId(extendDatabaseInfo(analysedStructure));
|
|
35
59
|
const opts = {
|
|
36
60
|
...modelCompareDbDiffOptions,
|
|
@@ -41,6 +65,8 @@ async function generateDeploySql({
|
|
|
41
65
|
noDropSqlObject: true,
|
|
42
66
|
noRenameTable: true,
|
|
43
67
|
noRenameColumn: true,
|
|
68
|
+
|
|
69
|
+
...dbdiffOptionsExtra,
|
|
44
70
|
};
|
|
45
71
|
const currentModelPaired = matchPairedObjects(deployedModel, currentModel, opts);
|
|
46
72
|
const currentModelPairedPreloaded = await enrichWithPreloadedRows(deployedModel, currentModelPaired, dbhan, driver);
|
|
@@ -57,7 +83,7 @@ async function generateDeploySql({
|
|
|
57
83
|
deployedModel,
|
|
58
84
|
driver
|
|
59
85
|
);
|
|
60
|
-
|
|
86
|
+
|
|
61
87
|
return res;
|
|
62
88
|
} finally {
|
|
63
89
|
if (!systemConnection) {
|
package/src/shell/index.js
CHANGED
|
@@ -30,6 +30,12 @@ const dataDuplicator = require('./dataDuplicator');
|
|
|
30
30
|
const dbModelToJson = require('./dbModelToJson');
|
|
31
31
|
const jsonToDbModel = require('./jsonToDbModel');
|
|
32
32
|
const jsonReader = require('./jsonReader');
|
|
33
|
+
const dataTypeMapperTransform = require('./dataTypeMapperTransform');
|
|
34
|
+
const sqlTextReplacementTransform = require('./sqlTextReplacementTransform');
|
|
35
|
+
const autoIndexForeignKeysTransform = require('./autoIndexForeignKeysTransform');
|
|
36
|
+
const generateDeploySql = require('./generateDeploySql');
|
|
37
|
+
const dropAllDbObjects = require('./dropAllDbObjects');
|
|
38
|
+
const scriptDrivedDeploy = require('./scriptDrivedDeploy');
|
|
33
39
|
|
|
34
40
|
const dbgateApi = {
|
|
35
41
|
queryReader,
|
|
@@ -63,6 +69,12 @@ const dbgateApi = {
|
|
|
63
69
|
dataDuplicator,
|
|
64
70
|
dbModelToJson,
|
|
65
71
|
jsonToDbModel,
|
|
72
|
+
dataTypeMapperTransform,
|
|
73
|
+
sqlTextReplacementTransform,
|
|
74
|
+
autoIndexForeignKeysTransform,
|
|
75
|
+
generateDeploySql,
|
|
76
|
+
dropAllDbObjects,
|
|
77
|
+
scriptDrivedDeploy,
|
|
66
78
|
};
|
|
67
79
|
|
|
68
80
|
requirePlugin.initializeDbgateApi(dbgateApi);
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
function replaceInText(text, replacements) {
|
|
2
|
+
let result = text;
|
|
3
|
+
for (const key of Object.keys(replacements)) {
|
|
4
|
+
result = result.split(key).join(replacements[key]);
|
|
5
|
+
}
|
|
6
|
+
return result;
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
function replaceInCollection(collection, replacements) {
|
|
10
|
+
if (!collection) return collection;
|
|
11
|
+
return collection.map(item => {
|
|
12
|
+
if (item.createSql) {
|
|
13
|
+
return {
|
|
14
|
+
...item,
|
|
15
|
+
createSql: replaceInText(item.createSql, replacements),
|
|
16
|
+
};
|
|
17
|
+
}
|
|
18
|
+
return item;
|
|
19
|
+
});
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
const sqlTextReplacementTransform = replacements => database => {
|
|
23
|
+
return {
|
|
24
|
+
...database,
|
|
25
|
+
views: replaceInCollection(database.views, replacements),
|
|
26
|
+
matviews: replaceInCollection(database.matviews, replacements),
|
|
27
|
+
procedures: replaceInCollection(database.procedures, replacements),
|
|
28
|
+
functions: replaceInCollection(database.functions, replacements),
|
|
29
|
+
};
|
|
30
|
+
};
|
|
31
|
+
|
|
32
|
+
module.exports = sqlTextReplacementTransform;
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
const axios = require('axios');
|
|
2
|
+
const fs = require('fs');
|
|
3
|
+
const fsp = require('fs/promises');
|
|
4
|
+
const semver = require('semver');
|
|
5
|
+
const currentVersion = require('../currentVersion');
|
|
6
|
+
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
|
7
|
+
|
|
8
|
+
const logger = getLogger('cloudUpgrade');
|
|
9
|
+
|
|
10
|
+
async function checkCloudUpgrade() {
|
|
11
|
+
try {
|
|
12
|
+
const resp = await axios.default.get('https://api.github.com/repos/dbgate/dbgate/releases/latest');
|
|
13
|
+
const json = resp.data;
|
|
14
|
+
const version = json.name.substring(1);
|
|
15
|
+
let cloudDownloadedVersion = null;
|
|
16
|
+
try {
|
|
17
|
+
cloudDownloadedVersion = await fsp.readFile(process.env.CLOUD_UPGRADE_FILE + '.version', 'utf-8');
|
|
18
|
+
} catch (err) {
|
|
19
|
+
cloudDownloadedVersion = null;
|
|
20
|
+
}
|
|
21
|
+
if (
|
|
22
|
+
semver.gt(version, currentVersion.version) &&
|
|
23
|
+
(!cloudDownloadedVersion || semver.gt(version, cloudDownloadedVersion))
|
|
24
|
+
) {
|
|
25
|
+
logger.info(`New version available: ${version}`);
|
|
26
|
+
const zipUrl = json.assets.find(x => x.name == 'cloud-build.zip').browser_download_url;
|
|
27
|
+
|
|
28
|
+
const writer = fs.createWriteStream(process.env.CLOUD_UPGRADE_FILE);
|
|
29
|
+
|
|
30
|
+
const response = await axios.default({
|
|
31
|
+
url: zipUrl,
|
|
32
|
+
method: 'GET',
|
|
33
|
+
responseType: 'stream',
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
response.data.pipe(writer);
|
|
37
|
+
|
|
38
|
+
await new Promise((resolve, reject) => {
|
|
39
|
+
writer.on('finish', resolve);
|
|
40
|
+
writer.on('error', reject);
|
|
41
|
+
});
|
|
42
|
+
await fsp.writeFile(process.env.CLOUD_UPGRADE_FILE + '.version', version);
|
|
43
|
+
|
|
44
|
+
logger.info(`Downloaded new version from ${zipUrl}`);
|
|
45
|
+
} else {
|
|
46
|
+
logger.info(`Checked version ${version} is not newer than ${cloudDownloadedVersion ?? currentVersion.version}, upgrade skippped`);
|
|
47
|
+
}
|
|
48
|
+
} catch (err) {
|
|
49
|
+
logger.error(extractErrorLogData(err), 'Error checking cloud upgrade');
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
function startCloudUpgradeTimer() {
|
|
54
|
+
// at first in 5 seconds
|
|
55
|
+
setTimeout(checkCloudUpgrade, 5000);
|
|
56
|
+
|
|
57
|
+
// hourly
|
|
58
|
+
setInterval(checkCloudUpgrade, 60 * 60 * 1000);
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
module.exports = startCloudUpgradeTimer;
|
|
@@ -77,6 +77,9 @@ function packagedPluginsDir() {
|
|
|
77
77
|
if (platformInfo.isDocker) {
|
|
78
78
|
return '/home/dbgate-docker/plugins';
|
|
79
79
|
}
|
|
80
|
+
if (platformInfo.isAwsUbuntuLayout) {
|
|
81
|
+
return '/home/ubuntu/build/plugins';
|
|
82
|
+
}
|
|
80
83
|
if (platformInfo.isNpmDist) {
|
|
81
84
|
// node_modules
|
|
82
85
|
return global['PLUGINS_DIR'];
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
const fs = require('fs-extra');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const { getSchemasUsedByStructure } = require('dbgate-tools');
|
|
4
|
+
|
|
5
|
+
async function exportDbModelSql(dbModel, driver, outputDir, outputFile) {
|
|
6
|
+
const { tables, views, procedures, functions, triggers, matviews } = dbModel;
|
|
7
|
+
|
|
8
|
+
const usedSchemas = getSchemasUsedByStructure(dbModel);
|
|
9
|
+
const useSchemaDir = usedSchemas.length > 1;
|
|
10
|
+
|
|
11
|
+
const createdDirs = new Set();
|
|
12
|
+
async function ensureDir(dir) {
|
|
13
|
+
if (!createdDirs.has(dir)) {
|
|
14
|
+
await fs.mkdir(dir, { recursive: true });
|
|
15
|
+
createdDirs.add(dir);
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
async function writeLists(writeList) {
|
|
20
|
+
await writeList(views, 'views');
|
|
21
|
+
await writeList(procedures, 'procedures');
|
|
22
|
+
await writeList(functions, 'functions');
|
|
23
|
+
await writeList(triggers, 'triggers');
|
|
24
|
+
await writeList(matviews, 'matviews');
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
if (outputFile) {
|
|
28
|
+
const dmp = driver.createDumper();
|
|
29
|
+
for (const table of tables || []) {
|
|
30
|
+
dmp.createTable({
|
|
31
|
+
...table,
|
|
32
|
+
foreignKeys: [],
|
|
33
|
+
dependencies: [],
|
|
34
|
+
});
|
|
35
|
+
}
|
|
36
|
+
for (const table of tables || []) {
|
|
37
|
+
for (const fk of table.foreignKeys || []) {
|
|
38
|
+
dmp.createForeignKey(fk);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
writeLists((list, folder) => {
|
|
42
|
+
for (const obj of list || []) {
|
|
43
|
+
dmp.createSqlObject(obj);
|
|
44
|
+
}
|
|
45
|
+
});
|
|
46
|
+
|
|
47
|
+
const script = dmp.s;
|
|
48
|
+
await fs.writeFile(outputFile, script);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
if (outputDir) {
|
|
52
|
+
for (const table of tables || []) {
|
|
53
|
+
const tablesDir = useSchemaDir
|
|
54
|
+
? path.join(outputDir, table.schemaName ?? 'default', 'tables')
|
|
55
|
+
: path.join(outputDir, 'tables');
|
|
56
|
+
await ensureDir(tablesDir);
|
|
57
|
+
const dmp = driver.createDumper();
|
|
58
|
+
dmp.createTable({
|
|
59
|
+
...table,
|
|
60
|
+
foreignKeys: [],
|
|
61
|
+
dependencies: [],
|
|
62
|
+
});
|
|
63
|
+
await fs.writeFile(path.join(tablesDir, `${table.pureName}.sql`), dmp.s);
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
await writeLists(async (list, folder) => {
|
|
67
|
+
for (const obj of list || []) {
|
|
68
|
+
const objdir = useSchemaDir
|
|
69
|
+
? path.join(outputDir, obj.schemaName ?? 'default', folder)
|
|
70
|
+
: path.join(outputDir, folder);
|
|
71
|
+
await ensureDir(objdir);
|
|
72
|
+
const dmp = driver.createDumper();
|
|
73
|
+
dmp.createSqlObject(obj);
|
|
74
|
+
await fs.writeFile(path.join(objdir, `${obj.pureName}.sql`), dmp.s);
|
|
75
|
+
}
|
|
76
|
+
});
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
module.exports = exportDbModelSql;
|
|
@@ -1,28 +1,8 @@
|
|
|
1
|
-
const fs = require('fs-extra');
|
|
2
|
-
const path = require('path');
|
|
3
|
-
const yaml = require('js-yaml');
|
|
4
1
|
const { databaseInfoFromYamlModel, DatabaseAnalyser } = require('dbgate-tools');
|
|
5
|
-
const
|
|
6
|
-
const { archivedir, resolveArchiveFolder } = require('./directories');
|
|
7
|
-
const loadFilesRecursive = require('./loadFilesRecursive');
|
|
2
|
+
const loadModelFolder = require('./loadModelFolder');
|
|
8
3
|
|
|
9
4
|
async function importDbModel(inputDir) {
|
|
10
|
-
const files =
|
|
11
|
-
|
|
12
|
-
const dir = inputDir.startsWith('archive:') ? resolveArchiveFolder(inputDir.substring('archive:'.length)) : inputDir;
|
|
13
|
-
|
|
14
|
-
for (const name of await loadFilesRecursive(dir)) {
|
|
15
|
-
if (name.endsWith('.table.yaml') || name.endsWith('.sql')) {
|
|
16
|
-
const text = await fs.readFile(path.join(dir, name), { encoding: 'utf-8' });
|
|
17
|
-
|
|
18
|
-
files.push({
|
|
19
|
-
name: path.parse(name).base,
|
|
20
|
-
text,
|
|
21
|
-
json: name.endsWith('.yaml') ? yaml.load(text) : null,
|
|
22
|
-
});
|
|
23
|
-
}
|
|
24
|
-
}
|
|
25
|
-
|
|
5
|
+
const files = await loadModelFolder(inputDir);
|
|
26
6
|
return databaseInfoFromYamlModel(files);
|
|
27
7
|
}
|
|
28
8
|
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
const fs = require('fs-extra');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const yaml = require('js-yaml');
|
|
4
|
+
const { resolveArchiveFolder } = require('./directories');
|
|
5
|
+
const loadFilesRecursive = require('./loadFilesRecursive');
|
|
6
|
+
|
|
7
|
+
async function loadModelFolder(inputDir) {
|
|
8
|
+
const files = [];
|
|
9
|
+
|
|
10
|
+
const dir = inputDir.startsWith('archive:') ? resolveArchiveFolder(inputDir.substring('archive:'.length)) : inputDir;
|
|
11
|
+
|
|
12
|
+
for (const name of await loadFilesRecursive(dir)) {
|
|
13
|
+
if (name.endsWith('.table.yaml') || name.endsWith('.sql')) {
|
|
14
|
+
const text = await fs.readFile(path.join(dir, name), { encoding: 'utf-8' });
|
|
15
|
+
|
|
16
|
+
files.push({
|
|
17
|
+
name: path.parse(name).base,
|
|
18
|
+
text,
|
|
19
|
+
json: name.endsWith('.yaml') ? yaml.load(text) : null,
|
|
20
|
+
});
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
return files;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
module.exports = loadModelFolder;
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
const { filesdir } = require('./directories');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const fs = require('fs-extra');
|
|
4
|
+
const _ = require('lodash');
|
|
5
|
+
const dbgateApi = require('../shell');
|
|
6
|
+
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
|
7
|
+
const logger = getLogger('loadModelTransform');
|
|
8
|
+
|
|
9
|
+
function modelTransformFromJson(json) {
|
|
10
|
+
if (!dbgateApi[json.transform]) return null;
|
|
11
|
+
const creator = dbgateApi[json.transform];
|
|
12
|
+
return creator(...json.arguments);
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
async function loadModelTransform(file) {
|
|
16
|
+
if (!file) return null;
|
|
17
|
+
try {
|
|
18
|
+
const dir = filesdir();
|
|
19
|
+
const fullPath = path.join(dir, 'modtrans', file);
|
|
20
|
+
const text = await fs.readFile(fullPath, { encoding: 'utf-8' });
|
|
21
|
+
const json = JSON.parse(text);
|
|
22
|
+
if (_.isArray(json)) {
|
|
23
|
+
const array = _.compact(json.map(x => modelTransformFromJson(x)));
|
|
24
|
+
return array.length ? structure => array.reduce((acc, val) => val(acc), structure) : null;
|
|
25
|
+
}
|
|
26
|
+
if (_.isPlainObject(json)) {
|
|
27
|
+
return modelTransformFromJson(json);
|
|
28
|
+
}
|
|
29
|
+
return null;
|
|
30
|
+
} catch (err) {
|
|
31
|
+
logger.error(extractErrorLogData(err), `Error loading model transform ${file}`);
|
|
32
|
+
return null;
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
module.exports = loadModelTransform;
|
|
@@ -14,6 +14,7 @@ const isBuiltWebMode = process.env.BUILTWEBMODE == '1';
|
|
|
14
14
|
const isNpmDist = !!global['IS_NPM_DIST'];
|
|
15
15
|
const isDbModel = !!global['IS_DB_MODEL'];
|
|
16
16
|
const isForkedApi = processArgs.isForkedApi;
|
|
17
|
+
const isAwsUbuntuLayout = fs.existsSync('/home/ubuntu/build/public');
|
|
17
18
|
|
|
18
19
|
// function moduleAvailable(name) {
|
|
19
20
|
// try {
|
|
@@ -42,11 +43,20 @@ const platformInfo = {
|
|
|
42
43
|
platform,
|
|
43
44
|
runningInWebpack: !!process.env.WEBPACK_DEV_SERVER_URL,
|
|
44
45
|
allowShellConnection:
|
|
45
|
-
(!processArgs.listenApiChild && !isNpmDist) ||
|
|
46
|
+
(!processArgs.listenApiChild && !isNpmDist) ||
|
|
47
|
+
!!process.env.SHELL_CONNECTION ||
|
|
48
|
+
!!isElectron() ||
|
|
49
|
+
!!isDbModel ||
|
|
50
|
+
isDevMode,
|
|
46
51
|
allowShellScripting:
|
|
47
|
-
(!processArgs.listenApiChild && !isNpmDist) ||
|
|
52
|
+
(!processArgs.listenApiChild && !isNpmDist) ||
|
|
53
|
+
!!process.env.SHELL_SCRIPTING ||
|
|
54
|
+
!!isElectron() ||
|
|
55
|
+
!!isDbModel ||
|
|
56
|
+
isDevMode,
|
|
48
57
|
allowConnectionFromEnvVariables: !!isDbModel,
|
|
49
58
|
defaultKeyfile: path.join(os.homedir(), '.ssh/id_rsa'),
|
|
59
|
+
isAwsUbuntuLayout,
|
|
50
60
|
};
|
|
51
61
|
|
|
52
62
|
module.exports = platformInfo;
|
package/src/utility/sshTunnel.js
CHANGED
|
@@ -50,7 +50,7 @@ function callForwardProcess(connection, tunnelConfig, tunnelCacheKey) {
|
|
|
50
50
|
resolve(subprocess);
|
|
51
51
|
}
|
|
52
52
|
if (msgtype == 'error') {
|
|
53
|
-
reject(errorMessage);
|
|
53
|
+
reject(new Error(errorMessage));
|
|
54
54
|
}
|
|
55
55
|
});
|
|
56
56
|
subprocess.on('exit', code => {
|
|
@@ -91,6 +91,7 @@ async function getSshTunnel(connection) {
|
|
|
91
91
|
};
|
|
92
92
|
return sshTunnelCache[tunnelCacheKey];
|
|
93
93
|
} catch (err) {
|
|
94
|
+
logger.error(extractErrorLogData(err), 'Error creating SSH tunnel:');
|
|
94
95
|
// error is not cached
|
|
95
96
|
return {
|
|
96
97
|
state: 'error',
|