dbgate-api-premium 6.5.6 → 6.6.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +7 -6
- package/src/auth/authProvider.js +1 -1
- package/src/auth/storageAuthProvider.js +2 -2
- package/src/controllers/archive.js +1 -1
- package/src/controllers/auth.js +1 -1
- package/src/controllers/cloud.js +15 -1
- package/src/controllers/connections.js +4 -4
- package/src/controllers/databaseConnections.js +11 -11
- package/src/controllers/files.js +24 -4
- package/src/controllers/runners.js +7 -6
- package/src/controllers/scheduler.js +1 -1
- package/src/controllers/serverConnections.js +4 -4
- package/src/controllers/sessions.js +4 -4
- package/src/controllers/storage.js +6 -40
- package/src/controllers/storageDb.js +5 -4
- package/src/controllers/uploads.js +2 -2
- package/src/currentVersion.js +2 -2
- package/src/index.js +35 -5
- package/src/main.js +59 -20
- package/src/proc/databaseConnectionProcess.js +21 -12
- package/src/proc/serverConnectionProcess.js +6 -6
- package/src/proc/sessionProcess.js +2 -2
- package/src/proc/sshForwardProcess.js +1 -1
- package/src/shell/archiveWriter.js +1 -1
- package/src/shell/copyStream.js +1 -1
- package/src/shell/executeQuery.js +3 -3
- package/src/shell/importDatabase.js +3 -3
- package/src/shell/jsonLinesReader.js +1 -1
- package/src/shell/jsonLinesWriter.js +1 -1
- package/src/shell/jsonReader.js +1 -1
- package/src/shell/jsonWriter.js +1 -1
- package/src/shell/loadDatabase.js +2 -2
- package/src/shell/modifyJsonLinesReader.js +1 -1
- package/src/shell/queryReader.js +1 -1
- package/src/shell/requirePlugin.js +6 -1
- package/src/shell/runScript.js +1 -1
- package/src/shell/sqlDataWriter.js +1 -1
- package/src/shell/tableReader.js +3 -3
- package/src/shell/tableWriter.js +1 -1
- package/src/shell/unzipDirectory.js +4 -4
- package/src/shell/zipDirectory.js +3 -3
- package/src/shell/zipJsonLinesData.js +3 -3
- package/src/storageModel.js +6 -0
- package/src/utility/DatastoreProxy.js +3 -3
- package/src/utility/JsonLinesDatastore.js +4 -2
- package/src/utility/appLogStore.js +119 -0
- package/src/utility/auditlog.js +1 -1
- package/src/utility/authProxy.js +31 -3
- package/src/utility/checkLicense.js +10 -4
- package/src/utility/childProcessChecker.js +1 -1
- package/src/utility/cloudIntf.js +5 -5
- package/src/utility/cloudUpgrade.js +4 -4
- package/src/utility/connectUtility.js +1 -1
- package/src/utility/crypting.js +13 -11
- package/src/utility/directories.js +2 -2
- package/src/utility/extractSingleFileFromZip.js +3 -3
- package/src/utility/loadModelTransform.js +1 -1
- package/src/utility/sshTunnel.js +7 -7
- package/src/utility/sshTunnelProxy.js +1 -1
- package/src/utility/useController.js +3 -3
|
@@ -16,16 +16,16 @@ function zipDirectory(inputDirectory, outputFile) {
|
|
|
16
16
|
|
|
17
17
|
// Listen for all archive data to be written
|
|
18
18
|
output.on('close', () => {
|
|
19
|
-
logger.info(`ZIP file created (${archive.pointer()} total bytes)`);
|
|
19
|
+
logger.info(`DBGM-00072 ZIP file created (${archive.pointer()} total bytes)`);
|
|
20
20
|
resolve();
|
|
21
21
|
});
|
|
22
22
|
|
|
23
23
|
archive.on('warning', err => {
|
|
24
|
-
logger.warn(extractErrorLogData(err), `Warning while creating ZIP: ${err.message}`);
|
|
24
|
+
logger.warn(extractErrorLogData(err), `DBGM-00073 Warning while creating ZIP: ${err.message}`);
|
|
25
25
|
});
|
|
26
26
|
|
|
27
27
|
archive.on('error', err => {
|
|
28
|
-
logger.error(extractErrorLogData(err), `Error while creating ZIP: ${err.message}`);
|
|
28
|
+
logger.error(extractErrorLogData(err), `DBGM-00074 Error while creating ZIP: ${err.message}`);
|
|
29
29
|
reject(err);
|
|
30
30
|
});
|
|
31
31
|
|
|
@@ -17,16 +17,16 @@ function zipDirectory(jsonDb, outputFile) {
|
|
|
17
17
|
|
|
18
18
|
// Listen for all archive data to be written
|
|
19
19
|
output.on('close', () => {
|
|
20
|
-
logger.info(`ZIP file created (${archive.pointer()} total bytes)`);
|
|
20
|
+
logger.info(`DBGM-00075 ZIP file created (${archive.pointer()} total bytes)`);
|
|
21
21
|
resolve();
|
|
22
22
|
});
|
|
23
23
|
|
|
24
24
|
archive.on('warning', err => {
|
|
25
|
-
logger.warn(extractErrorLogData(err), `Warning while creating ZIP: ${err.message}`);
|
|
25
|
+
logger.warn(extractErrorLogData(err), `DBGM-00076 Warning while creating ZIP: ${err.message}`);
|
|
26
26
|
});
|
|
27
27
|
|
|
28
28
|
archive.on('error', err => {
|
|
29
|
-
logger.error(extractErrorLogData(err), `Error while creating ZIP: ${err.message}`);
|
|
29
|
+
logger.error(extractErrorLogData(err), `DBGM-00077 Error while creating ZIP: ${err.message}`);
|
|
30
30
|
reject(err);
|
|
31
31
|
});
|
|
32
32
|
|
package/src/storageModel.js
CHANGED
|
@@ -674,6 +674,12 @@ module.exports = {
|
|
|
674
674
|
"columnName": "awsRegion",
|
|
675
675
|
"dataType": "varchar(250)",
|
|
676
676
|
"notNull": false
|
|
677
|
+
},
|
|
678
|
+
{
|
|
679
|
+
"pureName": "connections",
|
|
680
|
+
"columnName": "connectionDefinition",
|
|
681
|
+
"dataType": "text",
|
|
682
|
+
"notNull": false
|
|
677
683
|
}
|
|
678
684
|
],
|
|
679
685
|
"foreignKeys": [],
|
|
@@ -61,7 +61,7 @@ class DatastoreProxy {
|
|
|
61
61
|
this.subprocess = null;
|
|
62
62
|
});
|
|
63
63
|
this.subprocess.on('error', err => {
|
|
64
|
-
logger.error(extractErrorLogData(err), 'Error in data store subprocess');
|
|
64
|
+
logger.error(extractErrorLogData(err), 'DBGM-00167 Error in data store subprocess');
|
|
65
65
|
this.subprocess = null;
|
|
66
66
|
});
|
|
67
67
|
this.subprocess.send({ msgtype: 'open', file: this.file });
|
|
@@ -77,7 +77,7 @@ class DatastoreProxy {
|
|
|
77
77
|
try {
|
|
78
78
|
this.subprocess.send({ msgtype: 'read', msgid, offset, limit });
|
|
79
79
|
} catch (err) {
|
|
80
|
-
logger.error(extractErrorLogData(err), 'Error getting rows');
|
|
80
|
+
logger.error(extractErrorLogData(err), 'DBGM-00168 Error getting rows');
|
|
81
81
|
this.subprocess = null;
|
|
82
82
|
}
|
|
83
83
|
});
|
|
@@ -91,7 +91,7 @@ class DatastoreProxy {
|
|
|
91
91
|
try {
|
|
92
92
|
this.subprocess.send({ msgtype: 'notify', msgid });
|
|
93
93
|
} catch (err) {
|
|
94
|
-
logger.error(extractErrorLogData(err), 'Error notifying subprocess');
|
|
94
|
+
logger.error(extractErrorLogData(err), 'DBGM-00169 Error notifying subprocess');
|
|
95
95
|
this.subprocess = null;
|
|
96
96
|
}
|
|
97
97
|
});
|
|
@@ -7,7 +7,6 @@ const AsyncLock = require('async-lock');
|
|
|
7
7
|
const lock = new AsyncLock();
|
|
8
8
|
const stableStringify = require('json-stable-stringify');
|
|
9
9
|
const { evaluateCondition } = require('dbgate-sqltree');
|
|
10
|
-
const requirePluginFunction = require('./requirePluginFunction');
|
|
11
10
|
const esort = require('external-sorting');
|
|
12
11
|
const { jsldir } = require('./directories');
|
|
13
12
|
const LineReader = require('./LineReader');
|
|
@@ -23,7 +22,10 @@ class JsonLinesDatastore {
|
|
|
23
22
|
this.notifyChangedCallback = null;
|
|
24
23
|
this.currentFilter = null;
|
|
25
24
|
this.currentSort = null;
|
|
26
|
-
|
|
25
|
+
if (formatterFunction) {
|
|
26
|
+
const requirePluginFunction = require('./requirePluginFunction');
|
|
27
|
+
this.rowFormatter = requirePluginFunction(formatterFunction);
|
|
28
|
+
}
|
|
27
29
|
this.sortedFiles = {};
|
|
28
30
|
}
|
|
29
31
|
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
const fs = require('fs-extra');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const { logsdir } = require('./directories');
|
|
4
|
+
const { format, addDays, startOfDay } = require('date-fns');
|
|
5
|
+
const LineReader = require('./LineReader');
|
|
6
|
+
const socket = require('./socket');
|
|
7
|
+
const _ = require('lodash');
|
|
8
|
+
|
|
9
|
+
async function getLogFiles(timeFrom, timeTo) {
|
|
10
|
+
const dir = logsdir();
|
|
11
|
+
const files = await fs.readdir(dir);
|
|
12
|
+
const startPrefix = format(timeFrom, 'yyyy-MM-dd');
|
|
13
|
+
const endPrefix = format(addDays(timeTo, 1), 'yyyy-MM-dd');
|
|
14
|
+
const logFiles = files
|
|
15
|
+
.filter(file => file.endsWith('.ndjson'))
|
|
16
|
+
.filter(file => file >= startPrefix && file < endPrefix);
|
|
17
|
+
return logFiles.sort().map(x => path.join(dir, x));
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
const RECENT_LOG_LIMIT = 1000;
|
|
21
|
+
|
|
22
|
+
let recentLogs = null;
|
|
23
|
+
const beforeRecentLogs = [];
|
|
24
|
+
|
|
25
|
+
function adjustRecentLogs() {
|
|
26
|
+
if (recentLogs.length > RECENT_LOG_LIMIT) {
|
|
27
|
+
recentLogs.splice(0, recentLogs.length - RECENT_LOG_LIMIT);
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
function prepareEntryForExport(entry, lastEntry) {
|
|
32
|
+
return {
|
|
33
|
+
date: format(new Date(entry.time), 'yyyy-MM-dd'),
|
|
34
|
+
time: format(new Date(entry.time), 'HH:mm:ss'),
|
|
35
|
+
dtime: lastEntry ? entry.time - lastEntry.time : 0,
|
|
36
|
+
msgcode: entry.msgcode || '',
|
|
37
|
+
message: entry.msg || '',
|
|
38
|
+
..._.omit(entry, ['time', 'msg', 'msgcode']),
|
|
39
|
+
conid: entry.conid || '',
|
|
40
|
+
database: entry.database || '',
|
|
41
|
+
engine: entry.engine || '',
|
|
42
|
+
ts: entry.time,
|
|
43
|
+
};
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
async function copyAppLogsIntoFile(timeFrom, timeTo, fileName, prepareForExport) {
|
|
47
|
+
const writeStream = fs.createWriteStream(fileName);
|
|
48
|
+
|
|
49
|
+
let lastEntry = null;
|
|
50
|
+
for (const file of await getLogFiles(timeFrom, timeTo)) {
|
|
51
|
+
const readStream = fs.createReadStream(file);
|
|
52
|
+
const reader = new LineReader(readStream);
|
|
53
|
+
do {
|
|
54
|
+
const line = await reader.readLine();
|
|
55
|
+
if (line == null) break;
|
|
56
|
+
try {
|
|
57
|
+
const logEntry = JSON.parse(line);
|
|
58
|
+
if (logEntry.time >= timeFrom && logEntry.time <= timeTo) {
|
|
59
|
+
writeStream.write(
|
|
60
|
+
JSON.stringify(prepareForExport ? prepareEntryForExport(logEntry, lastEntry) : logEntry) + '\n'
|
|
61
|
+
);
|
|
62
|
+
lastEntry = logEntry;
|
|
63
|
+
}
|
|
64
|
+
} catch (e) {
|
|
65
|
+
continue;
|
|
66
|
+
}
|
|
67
|
+
} while (true);
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
async function initializeRecentLogProvider() {
|
|
72
|
+
const logs = [];
|
|
73
|
+
for (const file of await getLogFiles(startOfDay(new Date()), new Date())) {
|
|
74
|
+
const fileStream = fs.createReadStream(file);
|
|
75
|
+
const reader = new LineReader(fileStream);
|
|
76
|
+
do {
|
|
77
|
+
const line = await reader.readLine();
|
|
78
|
+
if (line == null) break;
|
|
79
|
+
try {
|
|
80
|
+
const logEntry = JSON.parse(line);
|
|
81
|
+
logs.push(logEntry);
|
|
82
|
+
if (logs.length > RECENT_LOG_LIMIT) {
|
|
83
|
+
logs.shift();
|
|
84
|
+
}
|
|
85
|
+
} catch (e) {
|
|
86
|
+
continue;
|
|
87
|
+
}
|
|
88
|
+
} while (true);
|
|
89
|
+
}
|
|
90
|
+
recentLogs = logs;
|
|
91
|
+
recentLogs.push(...beforeRecentLogs);
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
let counter = 0;
|
|
95
|
+
function pushToRecentLogs(msg) {
|
|
96
|
+
const finalMsg = {
|
|
97
|
+
...msg,
|
|
98
|
+
counter,
|
|
99
|
+
};
|
|
100
|
+
counter += 1;
|
|
101
|
+
if (recentLogs) {
|
|
102
|
+
recentLogs.push(finalMsg);
|
|
103
|
+
adjustRecentLogs();
|
|
104
|
+
socket.emit('applog-event', finalMsg);
|
|
105
|
+
} else {
|
|
106
|
+
beforeRecentLogs.push(finalMsg);
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
function getRecentAppLogRecords() {
|
|
111
|
+
return recentLogs ?? beforeRecentLogs;
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
module.exports = {
|
|
115
|
+
initializeRecentLogProvider,
|
|
116
|
+
getRecentAppLogRecords,
|
|
117
|
+
pushToRecentLogs,
|
|
118
|
+
copyAppLogsIntoFile,
|
|
119
|
+
};
|
package/src/utility/auditlog.js
CHANGED
|
@@ -92,7 +92,7 @@ async function processAuditLogQueue() {
|
|
|
92
92
|
element.message || null
|
|
93
93
|
);
|
|
94
94
|
} catch (err) {
|
|
95
|
-
logger.error(extractErrorLogData(err), 'Error processing audit log entry');
|
|
95
|
+
logger.error(extractErrorLogData(err), 'DBGM-00159 Error processing audit log entry');
|
|
96
96
|
}
|
|
97
97
|
}
|
|
98
98
|
|
package/src/utility/authProxy.js
CHANGED
|
@@ -120,7 +120,7 @@ function startTokenChecking(sid, callback) {
|
|
|
120
120
|
callback(resp.data.token);
|
|
121
121
|
}
|
|
122
122
|
} catch (err) {
|
|
123
|
-
logger.error(extractErrorLogData(err), 'Error checking token');
|
|
123
|
+
logger.error(extractErrorLogData(err), 'DBGM-00160 Error checking token');
|
|
124
124
|
}
|
|
125
125
|
}, 500);
|
|
126
126
|
}
|
|
@@ -157,12 +157,12 @@ async function obtainRefreshedLicense() {
|
|
|
157
157
|
|
|
158
158
|
const decoded = jwt.decode(licenseKey?.trim());
|
|
159
159
|
if (!decoded?.end) {
|
|
160
|
-
logger.info('Invalid license found');
|
|
160
|
+
logger.info('DBGM-00078 Invalid license found');
|
|
161
161
|
return null;
|
|
162
162
|
}
|
|
163
163
|
|
|
164
164
|
if (Date.now() > decoded.end * 1000) {
|
|
165
|
-
logger.info('License expired, trying to obtain fresh license');
|
|
165
|
+
logger.info('DBGM-00079 License expired, trying to obtain fresh license');
|
|
166
166
|
|
|
167
167
|
try {
|
|
168
168
|
const respToken = await axios.default.post(
|
|
@@ -310,6 +310,32 @@ async function callRefactorSqlQueryApi(query, task, structure, dialect) {
|
|
|
310
310
|
return resp.data;
|
|
311
311
|
}
|
|
312
312
|
|
|
313
|
+
// async function callChatStream({ input, tools }, res) {
|
|
314
|
+
// const resp = await axios.default.post(
|
|
315
|
+
// `${AI_GATEWAY_URL}/chat-stream`,
|
|
316
|
+
// {
|
|
317
|
+
// input,
|
|
318
|
+
// tools,
|
|
319
|
+
// },
|
|
320
|
+
// getAxiosParamsWithLicense()
|
|
321
|
+
// );
|
|
322
|
+
|
|
323
|
+
// res.set(resp.headers);
|
|
324
|
+
// res.status(resp.status);
|
|
325
|
+
// resp.data.pipe(res);
|
|
326
|
+
// }
|
|
327
|
+
|
|
328
|
+
function getAiGatewayServer() {
|
|
329
|
+
return {
|
|
330
|
+
url: AI_GATEWAY_URL,
|
|
331
|
+
headers: {
|
|
332
|
+
Authorization:
|
|
333
|
+
licenseKey ?? process.env.DBGATE_LICENSE ? `Bearer ${licenseKey ?? process.env.DBGATE_LICENSE}` : undefined,
|
|
334
|
+
...getE2ETestHeaders(),
|
|
335
|
+
},
|
|
336
|
+
};
|
|
337
|
+
}
|
|
338
|
+
|
|
313
339
|
module.exports = {
|
|
314
340
|
isAuthProxySupported,
|
|
315
341
|
authProxyGetRedirectUrl,
|
|
@@ -325,4 +351,6 @@ module.exports = {
|
|
|
325
351
|
callRefactorSqlQueryApi,
|
|
326
352
|
getLicenseHttpHeaders,
|
|
327
353
|
tryToGetRefreshedLicense,
|
|
354
|
+
getAiGatewayServer,
|
|
355
|
+
// callChatStream,
|
|
328
356
|
};
|
|
@@ -52,6 +52,12 @@ const licenseTypeById = {
|
|
|
52
52
|
isForWeb: true,
|
|
53
53
|
isForApp: true,
|
|
54
54
|
},
|
|
55
|
+
'6c734e30-9b66-417d-91a2-0f9aeb739b32': {
|
|
56
|
+
name: 'Demo',
|
|
57
|
+
isPremium: true,
|
|
58
|
+
isForWeb: true,
|
|
59
|
+
isForApp: true,
|
|
60
|
+
},
|
|
55
61
|
};
|
|
56
62
|
|
|
57
63
|
function getLicenseByDecoded(decoded) {
|
|
@@ -105,10 +111,10 @@ async function getAwsMetadata() {
|
|
|
105
111
|
awsMetadata = { amiId, region };
|
|
106
112
|
awsMetadataLoaded = true;
|
|
107
113
|
|
|
108
|
-
logger.info(`Loaded AWS metadata, AMIID=${amiId}, region=${region}`);
|
|
114
|
+
logger.info(`DBGM-00080 Loaded AWS metadata, AMIID=${amiId}, region=${region}`);
|
|
109
115
|
return { amiId, region };
|
|
110
116
|
} catch (error) {
|
|
111
|
-
logger.error(extractErrorLogData(error), 'Error getting AWS metadata');
|
|
117
|
+
logger.error(extractErrorLogData(error), 'DBGM-00081 Error getting AWS metadata');
|
|
112
118
|
awsMetadataLoaded = true;
|
|
113
119
|
return null;
|
|
114
120
|
}
|
|
@@ -134,7 +140,7 @@ function checkLicenseKey(licenseKey) {
|
|
|
134
140
|
(platformInfo.isElectron && !licenseTypeObj.isForApp) ||
|
|
135
141
|
(!platformInfo.isElectron && !licenseTypeObj.isForWeb)
|
|
136
142
|
) {
|
|
137
|
-
logger.error(`Incorrect license type, found ${licenseTypeObj?.name ?? 'n/a'}`);
|
|
143
|
+
logger.error(`DBGM-00161 Incorrect license type, found ${licenseTypeObj?.name ?? 'n/a'}`);
|
|
138
144
|
return {
|
|
139
145
|
status: 'error',
|
|
140
146
|
error: `Incorrect license type, found ${licenseTypeObj?.name ?? 'n/a'}`,
|
|
@@ -167,7 +173,7 @@ function checkLicenseKey(licenseKey) {
|
|
|
167
173
|
}
|
|
168
174
|
} catch (err) {}
|
|
169
175
|
|
|
170
|
-
logger.error(extractErrorLogData(err), 'License token is invalid');
|
|
176
|
+
logger.error(extractErrorLogData(err), 'DBGM-00162 License token is invalid');
|
|
171
177
|
return {
|
|
172
178
|
status: 'error',
|
|
173
179
|
error: err.message ?? 'License token is invalid',
|
|
@@ -12,7 +12,7 @@ function childProcessChecker() {
|
|
|
12
12
|
// This will come once parent dies.
|
|
13
13
|
// One way can be to check for error code ERR_IPC_CHANNEL_CLOSED
|
|
14
14
|
// and call process.exit()
|
|
15
|
-
logger.error(extractErrorLogData(err), 'parent died');
|
|
15
|
+
logger.error(extractErrorLogData(err), 'DBGM-00163 parent died');
|
|
16
16
|
process.exit(1);
|
|
17
17
|
}
|
|
18
18
|
}, 1000);
|
package/src/utility/cloudIntf.js
CHANGED
|
@@ -77,7 +77,7 @@ function startCloudTokenChecking(sid, callback) {
|
|
|
77
77
|
callback(resp.data);
|
|
78
78
|
}
|
|
79
79
|
} catch (err) {
|
|
80
|
-
logger.error(extractErrorLogData(err), 'Error checking cloud token');
|
|
80
|
+
logger.error(extractErrorLogData(err), 'DBGM-00164 Error checking cloud token');
|
|
81
81
|
}
|
|
82
82
|
}, 500);
|
|
83
83
|
}
|
|
@@ -125,7 +125,7 @@ async function getCloudUsedEngines() {
|
|
|
125
125
|
const resp = await callCloudApiGet('content-engines');
|
|
126
126
|
return resp || [];
|
|
127
127
|
} catch (err) {
|
|
128
|
-
logger.error(extractErrorLogData(err), 'Error getting cloud content list');
|
|
128
|
+
logger.error(extractErrorLogData(err), 'DBGM-00165 Error getting cloud content list');
|
|
129
129
|
return [];
|
|
130
130
|
}
|
|
131
131
|
}
|
|
@@ -208,7 +208,7 @@ async function updateCloudFiles(isRefresh) {
|
|
|
208
208
|
lastCheckedTm = _.max(cloudFiles.map(x => parseInt(x.modifiedTm)));
|
|
209
209
|
}
|
|
210
210
|
|
|
211
|
-
logger.info({ tags, lastCheckedTm }, 'Downloading cloud files');
|
|
211
|
+
logger.info({ tags, lastCheckedTm }, 'DBGM-00082 Downloading cloud files');
|
|
212
212
|
|
|
213
213
|
const resp = await axios.default.get(
|
|
214
214
|
`${DBGATE_CLOUD_URL}/public-cloud-updates?lastCheckedTm=${lastCheckedTm}&tags=${tags}&isRefresh=${
|
|
@@ -223,7 +223,7 @@ async function updateCloudFiles(isRefresh) {
|
|
|
223
223
|
}
|
|
224
224
|
);
|
|
225
225
|
|
|
226
|
-
logger.info(`Downloaded ${resp.data.length} cloud files`);
|
|
226
|
+
logger.info(`DBGM-00083 Downloaded ${resp.data.length} cloud files`);
|
|
227
227
|
|
|
228
228
|
const filesByPath = lastCheckedTm == 0 ? {} : _.keyBy(cloudFiles, 'path');
|
|
229
229
|
for (const file of resp.data) {
|
|
@@ -269,7 +269,7 @@ async function refreshPublicFiles(isRefresh) {
|
|
|
269
269
|
try {
|
|
270
270
|
await updateCloudFiles(isRefresh);
|
|
271
271
|
} catch (err) {
|
|
272
|
-
logger.error(extractErrorLogData(err), 'Error updating cloud files');
|
|
272
|
+
logger.error(extractErrorLogData(err), 'DBGM-00166 Error updating cloud files');
|
|
273
273
|
}
|
|
274
274
|
}
|
|
275
275
|
|
|
@@ -31,7 +31,7 @@ async function checkCloudUpgrade() {
|
|
|
31
31
|
semver.gt(version, currentVersion.version) &&
|
|
32
32
|
(!cloudDownloadedVersion || semver.gt(version, cloudDownloadedVersion))
|
|
33
33
|
) {
|
|
34
|
-
logger.info(`New version available: ${version}`);
|
|
34
|
+
logger.info(`DBGM-00084 New version available: ${version}`);
|
|
35
35
|
const zipUrl = json.assets.find(x => x.name == 'cloud-build.zip').browser_download_url;
|
|
36
36
|
|
|
37
37
|
const writer = fs.createWriteStream(process.env.CLOUD_UPGRADE_FILE);
|
|
@@ -50,16 +50,16 @@ async function checkCloudUpgrade() {
|
|
|
50
50
|
});
|
|
51
51
|
await fsp.writeFile(process.env.CLOUD_UPGRADE_FILE + '.version', version);
|
|
52
52
|
|
|
53
|
-
logger.info(`Downloaded new version from ${zipUrl}`);
|
|
53
|
+
logger.info(`DBGM-00085 Downloaded new version from ${zipUrl}`);
|
|
54
54
|
} else {
|
|
55
55
|
logger.info(
|
|
56
|
-
`Checked version ${version} is not newer than ${
|
|
56
|
+
`DBGM-00086 Checked version ${version} is not newer than ${
|
|
57
57
|
cloudDownloadedVersion ?? currentVersion.version
|
|
58
58
|
}, upgrade skippped`
|
|
59
59
|
);
|
|
60
60
|
}
|
|
61
61
|
} catch (err) {
|
|
62
|
-
logger.error(extractErrorLogData(err), 'Error checking cloud upgrade');
|
|
62
|
+
logger.error(extractErrorLogData(err), 'DBGM-00087 Error checking cloud upgrade');
|
|
63
63
|
}
|
|
64
64
|
}
|
|
65
65
|
|
|
@@ -132,7 +132,7 @@ async function connectUtility(driver, storedConnection, connectionMode, addition
|
|
|
132
132
|
|
|
133
133
|
connection.ssl = await extractConnectionSslParams(connection);
|
|
134
134
|
|
|
135
|
-
const conn = await driver.connect({ ...connection, ...additionalOptions });
|
|
135
|
+
const conn = await driver.connect({ conid: connectionLoaded?._id, ...connection, ...additionalOptions });
|
|
136
136
|
return conn;
|
|
137
137
|
}
|
|
138
138
|
|
package/src/utility/crypting.js
CHANGED
|
@@ -101,24 +101,26 @@ function decryptObjectPasswordField(obj, field, encryptor = null) {
|
|
|
101
101
|
return obj;
|
|
102
102
|
}
|
|
103
103
|
|
|
104
|
+
const fieldsToEncrypt = ['password', 'sshPassword', 'sshKeyfilePassword', 'connectionDefinition'];
|
|
105
|
+
|
|
104
106
|
function encryptConnection(connection, encryptor = null) {
|
|
105
107
|
if (connection.passwordMode != 'saveRaw') {
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
108
|
+
for (const field of fieldsToEncrypt) {
|
|
109
|
+
connection = encryptObjectPasswordField(connection, field, encryptor);
|
|
110
|
+
}
|
|
109
111
|
}
|
|
110
112
|
return connection;
|
|
111
113
|
}
|
|
112
114
|
|
|
113
115
|
function maskConnection(connection) {
|
|
114
116
|
if (!connection) return connection;
|
|
115
|
-
return _.omit(connection,
|
|
117
|
+
return _.omit(connection, fieldsToEncrypt);
|
|
116
118
|
}
|
|
117
119
|
|
|
118
|
-
function decryptConnection(connection
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
120
|
+
function decryptConnection(connection) {
|
|
121
|
+
for (const field of fieldsToEncrypt) {
|
|
122
|
+
connection = decryptObjectPasswordField(connection, field);
|
|
123
|
+
}
|
|
122
124
|
return connection;
|
|
123
125
|
}
|
|
124
126
|
|
|
@@ -188,9 +190,9 @@ function recryptObjectPasswordFieldInPlace(obj, field, decryptEncryptor, encrypt
|
|
|
188
190
|
}
|
|
189
191
|
|
|
190
192
|
function recryptConnection(connection, decryptEncryptor, encryptEncryptor) {
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
193
|
+
for (const field of fieldsToEncrypt) {
|
|
194
|
+
connection = recryptObjectPasswordField(connection, field, decryptEncryptor, encryptEncryptor);
|
|
195
|
+
}
|
|
194
196
|
return connection;
|
|
195
197
|
}
|
|
196
198
|
|
|
@@ -14,11 +14,11 @@ const createDirectories = {};
|
|
|
14
14
|
const ensureDirectory = (dir, clean) => {
|
|
15
15
|
if (!createDirectories[dir]) {
|
|
16
16
|
if (clean && fs.existsSync(dir) && !platformInfo.isForkedApi) {
|
|
17
|
-
getLogger('directories').info(`Cleaning directory ${dir}`);
|
|
17
|
+
getLogger('directories').info(`DBGM-00170 Cleaning directory ${dir}`);
|
|
18
18
|
cleanDirectory(dir, _.isNumber(clean) ? clean : null);
|
|
19
19
|
}
|
|
20
20
|
if (!fs.existsSync(dir)) {
|
|
21
|
-
getLogger('directories').info(`Creating directory ${dir}`);
|
|
21
|
+
getLogger('directories').info(`DBGM-00171 Creating directory ${dir}`);
|
|
22
22
|
fs.mkdirSync(dir);
|
|
23
23
|
}
|
|
24
24
|
createDirectories[dir] = true;
|
|
@@ -42,13 +42,13 @@ function extractSingleFileFromZip(zipPath, fileInZip, outputPath) {
|
|
|
42
42
|
|
|
43
43
|
// When the file is finished writing, resolve
|
|
44
44
|
writeStream.on('finish', () => {
|
|
45
|
-
logger.info(`File "${fileInZip}" extracted to "${outputPath}".`);
|
|
45
|
+
logger.info(`DBGM-00088 File "${fileInZip}" extracted to "${outputPath}".`);
|
|
46
46
|
resolve(true);
|
|
47
47
|
});
|
|
48
48
|
|
|
49
49
|
// Handle write errors
|
|
50
50
|
writeStream.on('error', writeErr => {
|
|
51
|
-
logger.error(extractErrorLogData(writeErr), `Error extracting "${fileInZip}" from "${zipPath}".`);
|
|
51
|
+
logger.error(extractErrorLogData(writeErr), `DBGM-00089 Error extracting "${fileInZip}" from "${zipPath}".`);
|
|
52
52
|
reject(writeErr);
|
|
53
53
|
});
|
|
54
54
|
});
|
|
@@ -67,7 +67,7 @@ function extractSingleFileFromZip(zipPath, fileInZip, outputPath) {
|
|
|
67
67
|
|
|
68
68
|
// Handle general errors
|
|
69
69
|
zipFile.on('error', err => {
|
|
70
|
-
logger.error(extractErrorLogData(err), `ZIP file error in ${zipPath}.`);
|
|
70
|
+
logger.error(extractErrorLogData(err), `DBGM-00172 ZIP file error in ${zipPath}.`);
|
|
71
71
|
reject(err);
|
|
72
72
|
});
|
|
73
73
|
});
|
|
@@ -28,7 +28,7 @@ async function loadModelTransform(file) {
|
|
|
28
28
|
}
|
|
29
29
|
return null;
|
|
30
30
|
} catch (err) {
|
|
31
|
-
logger.error(extractErrorLogData(err), `Error loading model transform ${file}`);
|
|
31
|
+
logger.error(extractErrorLogData(err), `DBGM-00173 Error loading model transform ${file}`);
|
|
32
32
|
return null;
|
|
33
33
|
}
|
|
34
34
|
}
|
package/src/utility/sshTunnel.js
CHANGED
|
@@ -40,7 +40,7 @@ function callForwardProcess(connection, tunnelConfig, tunnelCacheKey) {
|
|
|
40
40
|
tunnelConfig,
|
|
41
41
|
});
|
|
42
42
|
} catch (err) {
|
|
43
|
-
logger.error(extractErrorLogData(err), 'Error connecting SSH');
|
|
43
|
+
logger.error(extractErrorLogData(err), 'DBGM-00174 Error connecting SSH');
|
|
44
44
|
}
|
|
45
45
|
return new Promise((resolve, reject) => {
|
|
46
46
|
let promiseHandled = false;
|
|
@@ -57,18 +57,18 @@ function callForwardProcess(connection, tunnelConfig, tunnelCacheKey) {
|
|
|
57
57
|
}
|
|
58
58
|
});
|
|
59
59
|
subprocess.on('exit', code => {
|
|
60
|
-
logger.info(`SSH forward process exited with code ${code}`);
|
|
60
|
+
logger.info(`DBGM-00090 SSH forward process exited with code ${code}`);
|
|
61
61
|
delete sshTunnelCache[tunnelCacheKey];
|
|
62
62
|
if (!promiseHandled) {
|
|
63
63
|
reject(
|
|
64
64
|
new Error(
|
|
65
|
-
'SSH forward process exited, try to change "Local host address for SSH connections" in Settings/Connections'
|
|
65
|
+
'DBGM-00091 SSH forward process exited, try to change "Local host address for SSH connections" in Settings/Connections'
|
|
66
66
|
)
|
|
67
67
|
);
|
|
68
68
|
}
|
|
69
69
|
});
|
|
70
70
|
subprocess.on('error', error => {
|
|
71
|
-
logger.error(extractErrorLogData(error), 'SSH forward process error');
|
|
71
|
+
logger.error(extractErrorLogData(error), 'DBGM-00092 SSH forward process error');
|
|
72
72
|
delete sshTunnelCache[tunnelCacheKey];
|
|
73
73
|
if (!promiseHandled) {
|
|
74
74
|
reject(error);
|
|
@@ -97,13 +97,13 @@ async function getSshTunnel(connection) {
|
|
|
97
97
|
};
|
|
98
98
|
try {
|
|
99
99
|
logger.info(
|
|
100
|
-
`Creating SSH tunnel to ${connection.sshHost}-${connection.server}:${connection.port}, using local port ${localPort}`
|
|
100
|
+
`DBGM-00093 Creating SSH tunnel to ${connection.sshHost}-${connection.server}:${connection.port}, using local port ${localPort}`
|
|
101
101
|
);
|
|
102
102
|
|
|
103
103
|
const subprocess = await callForwardProcess(connection, tunnelConfig, tunnelCacheKey);
|
|
104
104
|
|
|
105
105
|
logger.info(
|
|
106
|
-
`Created SSH tunnel to ${connection.sshHost}-${connection.server}:${connection.port}, using local port ${localPort}`
|
|
106
|
+
`DBGM-00094 Created SSH tunnel to ${connection.sshHost}-${connection.server}:${connection.port}, using local port ${localPort}`
|
|
107
107
|
);
|
|
108
108
|
|
|
109
109
|
sshTunnelCache[tunnelCacheKey] = {
|
|
@@ -114,7 +114,7 @@ async function getSshTunnel(connection) {
|
|
|
114
114
|
};
|
|
115
115
|
return sshTunnelCache[tunnelCacheKey];
|
|
116
116
|
} catch (err) {
|
|
117
|
-
logger.error(extractErrorLogData(err), 'Error creating SSH tunnel:');
|
|
117
|
+
logger.error(extractErrorLogData(err), 'DBGM-00095 Error creating SSH tunnel:');
|
|
118
118
|
// error is not cached
|
|
119
119
|
return {
|
|
120
120
|
state: 'error',
|
|
@@ -10,7 +10,7 @@ async function handleGetSshTunnelRequest({ msgid, connection }, subprocess) {
|
|
|
10
10
|
try {
|
|
11
11
|
subprocess.send({ msgtype: 'getsshtunnel-response', msgid, response });
|
|
12
12
|
} catch (err) {
|
|
13
|
-
logger.error(extractErrorLogData(err), 'Error sending to SSH tunnel');
|
|
13
|
+
logger.error(extractErrorLogData(err), 'DBGM-00175 Error sending to SSH tunnel');
|
|
14
14
|
}
|
|
15
15
|
}
|
|
16
16
|
|
|
@@ -12,11 +12,11 @@ module.exports = function useController(app, electron, route, controller) {
|
|
|
12
12
|
const router = express.Router();
|
|
13
13
|
|
|
14
14
|
if (controller._init) {
|
|
15
|
-
logger.info(`Calling init controller for controller ${route}`);
|
|
15
|
+
logger.info(`DBGM-00096 Calling init controller for controller ${route}`);
|
|
16
16
|
try {
|
|
17
17
|
controller._init();
|
|
18
18
|
} catch (err) {
|
|
19
|
-
logger.error(extractErrorLogData(err), `Error initializing controller, exiting application`);
|
|
19
|
+
logger.error(extractErrorLogData(err), `DBGM-00097 Error initializing controller, exiting application`);
|
|
20
20
|
process.exit(1);
|
|
21
21
|
}
|
|
22
22
|
}
|
|
@@ -78,7 +78,7 @@ module.exports = function useController(app, electron, route, controller) {
|
|
|
78
78
|
const data = await controller[key]({ ...req.body, ...req.query }, req);
|
|
79
79
|
res.json(data);
|
|
80
80
|
} catch (err) {
|
|
81
|
-
logger.error(extractErrorLogData(err), `Error when processing route ${route}/${key}`);
|
|
81
|
+
logger.error(extractErrorLogData(err), `DBGM-00176 Error when processing route ${route}/${key}`);
|
|
82
82
|
if (err instanceof MissingCredentialsError) {
|
|
83
83
|
res.json({
|
|
84
84
|
missingCredentials: true,
|