dbgate-api-premium 6.4.2 → 6.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +5 -5
- package/src/controllers/auth.js +11 -0
- package/src/controllers/cloud.js +261 -0
- package/src/controllers/config.js +2 -1
- package/src/controllers/connections.js +20 -0
- package/src/controllers/databaseConnections.js +3 -0
- package/src/controllers/files.js +33 -0
- package/src/controllers/jsldata.js +26 -0
- package/src/controllers/runners.js +12 -0
- package/src/controllers/serverConnections.js +1 -1
- package/src/controllers/sessions.js +7 -2
- package/src/controllers/storage.js +9 -0
- package/src/controllers/uploads.js +4 -0
- package/src/currentVersion.js +2 -2
- package/src/main.js +5 -0
- package/src/proc/connectProcess.js +1 -8
- package/src/proc/sessionProcess.js +2 -2
- package/src/shell/deployDb.js +10 -1
- package/src/shell/executeQuery.js +3 -1
- package/src/shell/generateDeploySql.js +4 -1
- package/src/utility/authProxy.js +16 -1
- package/src/utility/checkLicense.js +11 -13
- package/src/utility/cloudIntf.js +399 -0
- package/src/utility/crypting.js +6 -6
- package/src/utility/handleQueryStream.js +64 -5
- package/src/utility/hardwareFingerprint.js +1 -0
- package/src/utility/security.js +52 -0
package/src/shell/deployDb.js
CHANGED
|
@@ -20,6 +20,7 @@ const crypto = require('crypto');
|
|
|
20
20
|
* @param {string} options.ignoreNameRegex - regex for ignoring objects by name
|
|
21
21
|
* @param {string} options.targetSchema - target schema for deployment
|
|
22
22
|
* @param {number} options.maxMissingTablesRatio - maximum ratio of missing tables in database. Safety check, if missing ratio is highe, deploy is stopped (preventing accidental drop of all tables)
|
|
23
|
+
* @param {boolean} options.useTransaction - run deploy in transaction. If not provided, it will be set to true if driver supports transactions
|
|
23
24
|
*/
|
|
24
25
|
async function deployDb({
|
|
25
26
|
connection,
|
|
@@ -33,6 +34,7 @@ async function deployDb({
|
|
|
33
34
|
ignoreNameRegex = '',
|
|
34
35
|
targetSchema = null,
|
|
35
36
|
maxMissingTablesRatio = undefined,
|
|
37
|
+
useTransaction,
|
|
36
38
|
}) {
|
|
37
39
|
if (!driver) driver = requireEngineDriver(connection);
|
|
38
40
|
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read'));
|
|
@@ -60,7 +62,14 @@ async function deployDb({
|
|
|
60
62
|
maxMissingTablesRatio,
|
|
61
63
|
});
|
|
62
64
|
// console.log('RUNNING DEPLOY SCRIPT:', sql);
|
|
63
|
-
await executeQuery({
|
|
65
|
+
await executeQuery({
|
|
66
|
+
connection,
|
|
67
|
+
systemConnection: dbhan,
|
|
68
|
+
driver,
|
|
69
|
+
sql,
|
|
70
|
+
logScriptItems: true,
|
|
71
|
+
useTransaction,
|
|
72
|
+
});
|
|
64
73
|
|
|
65
74
|
await scriptDeployer.runPost();
|
|
66
75
|
} finally {
|
|
@@ -14,6 +14,7 @@ const logger = getLogger('execQuery');
|
|
|
14
14
|
* @param {string} [options.sql] - SQL query
|
|
15
15
|
* @param {string} [options.sqlFile] - SQL file
|
|
16
16
|
* @param {boolean} [options.logScriptItems] - whether to log script items instead of whole script
|
|
17
|
+
* @param {boolean} [options.useTransaction] - run query in transaction
|
|
17
18
|
* @param {boolean} [options.skipLogging] - whether to skip logging
|
|
18
19
|
*/
|
|
19
20
|
async function executeQuery({
|
|
@@ -24,6 +25,7 @@ async function executeQuery({
|
|
|
24
25
|
sqlFile = undefined,
|
|
25
26
|
logScriptItems = false,
|
|
26
27
|
skipLogging = false,
|
|
28
|
+
useTransaction,
|
|
27
29
|
}) {
|
|
28
30
|
if (!logScriptItems && !skipLogging) {
|
|
29
31
|
logger.info({ sql: getLimitedQuery(sql) }, `Execute query`);
|
|
@@ -42,7 +44,7 @@ async function executeQuery({
|
|
|
42
44
|
logger.debug(`Running SQL query, length: ${sql.length}`);
|
|
43
45
|
}
|
|
44
46
|
|
|
45
|
-
await driver.script(dbhan, sql, { logScriptItems });
|
|
47
|
+
await driver.script(dbhan, sql, { logScriptItems, useTransaction });
|
|
46
48
|
} finally {
|
|
47
49
|
if (!systemConnection) {
|
|
48
50
|
await driver.close(dbhan);
|
|
@@ -52,7 +52,10 @@ async function generateDeploySql({
|
|
|
52
52
|
dbdiffOptionsExtra?.['schemaMode'] !== 'ignore' &&
|
|
53
53
|
dbdiffOptionsExtra?.['schemaMode'] !== 'ignoreImplicit'
|
|
54
54
|
) {
|
|
55
|
-
|
|
55
|
+
if (!driver?.dialect?.defaultSchemaName) {
|
|
56
|
+
throw new Error('targetSchema is required for databases with multiple schemas');
|
|
57
|
+
}
|
|
58
|
+
targetSchema = driver.dialect.defaultSchemaName;
|
|
56
59
|
}
|
|
57
60
|
|
|
58
61
|
try {
|
package/src/utility/authProxy.js
CHANGED
|
@@ -41,6 +41,16 @@ function getAxiosParamsWithLicense() {
|
|
|
41
41
|
};
|
|
42
42
|
}
|
|
43
43
|
|
|
44
|
+
function getLicenseHttpHeaders() {
|
|
45
|
+
const licenseValue = licenseKey ?? process.env.DBGATE_LICENSE;
|
|
46
|
+
if (!licenseValue) {
|
|
47
|
+
return {};
|
|
48
|
+
}
|
|
49
|
+
return {
|
|
50
|
+
'x-license': licenseValue,
|
|
51
|
+
};
|
|
52
|
+
}
|
|
53
|
+
|
|
44
54
|
async function authProxyGetRedirectUrl({ client, type, state, redirectUri }) {
|
|
45
55
|
const respSession = await axios.default.post(
|
|
46
56
|
`${AUTH_PROXY_URL}/create-session`,
|
|
@@ -138,7 +148,11 @@ async function obtainRefreshedLicense() {
|
|
|
138
148
|
return null;
|
|
139
149
|
}
|
|
140
150
|
|
|
141
|
-
const decoded = jwt.decode(licenseKey);
|
|
151
|
+
const decoded = jwt.decode(licenseKey?.trim());
|
|
152
|
+
if (!decoded?.end) {
|
|
153
|
+
logger.info('Invalid license found');
|
|
154
|
+
return null;
|
|
155
|
+
}
|
|
142
156
|
|
|
143
157
|
if (Date.now() > decoded.end * 1000) {
|
|
144
158
|
logger.info('License expired, trying to obtain fresh license');
|
|
@@ -281,4 +295,5 @@ module.exports = {
|
|
|
281
295
|
callTextToSqlApi,
|
|
282
296
|
callCompleteOnCursorApi,
|
|
283
297
|
callRefactorSqlQueryApi,
|
|
298
|
+
getLicenseHttpHeaders,
|
|
284
299
|
};
|
|
@@ -116,7 +116,7 @@ function checkLicenseKey(licenseKey) {
|
|
|
116
116
|
}
|
|
117
117
|
|
|
118
118
|
try {
|
|
119
|
-
const decoded = jwt.verify(licenseKey, publicKey, {
|
|
119
|
+
const decoded = jwt.verify(licenseKey?.trim(), publicKey, {
|
|
120
120
|
algorithms: ['RS256'],
|
|
121
121
|
});
|
|
122
122
|
|
|
@@ -144,7 +144,7 @@ function checkLicenseKey(licenseKey) {
|
|
|
144
144
|
} catch (err) {
|
|
145
145
|
try {
|
|
146
146
|
// detect expired license
|
|
147
|
-
const decoded = jwt.decode(licenseKey);
|
|
147
|
+
const decoded = jwt.decode(licenseKey?.trim());
|
|
148
148
|
if (decoded) {
|
|
149
149
|
const { exp } = decoded;
|
|
150
150
|
if (exp * 1000 < Date.now()) {
|
|
@@ -222,18 +222,16 @@ async function checkLicense() {
|
|
|
222
222
|
}
|
|
223
223
|
|
|
224
224
|
const datadir = path.join(os.homedir(), '.dbgate');
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
return checkLicenseKey(licenseKey);
|
|
233
|
-
}
|
|
234
|
-
} catch (err) {
|
|
235
|
-
logger.warn(extractErrorLogData(err), 'Error loading license key');
|
|
225
|
+
try {
|
|
226
|
+
const licenseKey = fs.readFileSync(path.join(datadir, 'license.key'), {
|
|
227
|
+
encoding: 'utf-8',
|
|
228
|
+
});
|
|
229
|
+
setAuthProxyLicense(licenseKey);
|
|
230
|
+
if (licenseKey) {
|
|
231
|
+
return checkLicenseKey(licenseKey);
|
|
236
232
|
}
|
|
233
|
+
} catch (err) {
|
|
234
|
+
logger.warn(extractErrorLogData(err), 'Error loading license key');
|
|
237
235
|
}
|
|
238
236
|
|
|
239
237
|
if (isElectron()) {
|
|
@@ -0,0 +1,399 @@
|
|
|
1
|
+
const axios = require('axios');
|
|
2
|
+
const fs = require('fs-extra');
|
|
3
|
+
const _ = require('lodash');
|
|
4
|
+
const path = require('path');
|
|
5
|
+
const { getLicenseHttpHeaders } = require('./authProxy');
|
|
6
|
+
const { getLogger, extractErrorLogData, jsonLinesParse } = require('dbgate-tools');
|
|
7
|
+
const { datadir } = require('./directories');
|
|
8
|
+
const platformInfo = require('./platformInfo');
|
|
9
|
+
const connections = require('../controllers/connections');
|
|
10
|
+
const { isProApp } = require('./checkLicense');
|
|
11
|
+
const socket = require('./socket');
|
|
12
|
+
const config = require('../controllers/config');
|
|
13
|
+
const simpleEncryptor = require('simple-encryptor');
|
|
14
|
+
const currentVersion = require('../currentVersion');
|
|
15
|
+
const { getPublicIpInfo } = require('./hardwareFingerprint');
|
|
16
|
+
|
|
17
|
+
const logger = getLogger('cloudIntf');
|
|
18
|
+
|
|
19
|
+
let cloudFiles = null;
|
|
20
|
+
|
|
21
|
+
const DBGATE_IDENTITY_URL = process.env.LOCAL_DBGATE_IDENTITY
|
|
22
|
+
? 'http://localhost:3103'
|
|
23
|
+
: process.env.PROD_DBGATE_IDENTITY
|
|
24
|
+
? 'https://identity.dbgate.io'
|
|
25
|
+
: process.env.DEVWEB || process.env.DEVMODE
|
|
26
|
+
? 'https://identity.dbgate.udolni.net'
|
|
27
|
+
: 'https://identity.dbgate.io';
|
|
28
|
+
|
|
29
|
+
const DBGATE_CLOUD_URL = process.env.LOCAL_DBGATE_CLOUD
|
|
30
|
+
? 'http://localhost:3110'
|
|
31
|
+
: process.env.PROD_DBGATE_CLOUD
|
|
32
|
+
? 'https://cloud.dbgate.io'
|
|
33
|
+
: process.env.DEVWEB || process.env.DEVMODE
|
|
34
|
+
? 'https://cloud.dbgate.udolni.net'
|
|
35
|
+
: 'https://cloud.dbgate.io';
|
|
36
|
+
|
|
37
|
+
async function createDbGateIdentitySession(client) {
|
|
38
|
+
const resp = await axios.default.post(
|
|
39
|
+
`${DBGATE_IDENTITY_URL}/api/create-session`,
|
|
40
|
+
{
|
|
41
|
+
client,
|
|
42
|
+
},
|
|
43
|
+
{
|
|
44
|
+
headers: {
|
|
45
|
+
...getLicenseHttpHeaders(),
|
|
46
|
+
'Content-Type': 'application/json',
|
|
47
|
+
},
|
|
48
|
+
}
|
|
49
|
+
);
|
|
50
|
+
return {
|
|
51
|
+
sid: resp.data.sid,
|
|
52
|
+
url: `${DBGATE_IDENTITY_URL}/api/signin/${resp.data.sid}`,
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
function startCloudTokenChecking(sid, callback) {
|
|
57
|
+
const started = Date.now();
|
|
58
|
+
const interval = setInterval(async () => {
|
|
59
|
+
if (Date.now() - started > 60 * 1000) {
|
|
60
|
+
clearInterval(interval);
|
|
61
|
+
return;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
try {
|
|
65
|
+
// console.log(`Checking cloud token for session: ${DBGATE_IDENTITY_URL}/api/get-token/${sid}`);
|
|
66
|
+
const resp = await axios.default.get(`${DBGATE_IDENTITY_URL}/api/get-token/${sid}`, {
|
|
67
|
+
headers: {
|
|
68
|
+
...getLicenseHttpHeaders(),
|
|
69
|
+
},
|
|
70
|
+
});
|
|
71
|
+
// console.log('CHECK RESP:', resp.data);
|
|
72
|
+
|
|
73
|
+
if (resp.data.email) {
|
|
74
|
+
clearInterval(interval);
|
|
75
|
+
callback(resp.data);
|
|
76
|
+
}
|
|
77
|
+
} catch (err) {
|
|
78
|
+
logger.error(extractErrorLogData(err), 'Error checking cloud token');
|
|
79
|
+
}
|
|
80
|
+
}, 500);
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
async function loadCloudFiles() {
|
|
84
|
+
try {
|
|
85
|
+
const fileContent = await fs.readFile(path.join(datadir(), 'cloud-files.jsonl'), 'utf-8');
|
|
86
|
+
const parsedJson = jsonLinesParse(fileContent);
|
|
87
|
+
cloudFiles = _.sortBy(parsedJson, x => `${x.folder}/${x.title}`);
|
|
88
|
+
} catch (err) {
|
|
89
|
+
cloudFiles = [];
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
async function getCloudUsedEngines() {
|
|
94
|
+
try {
|
|
95
|
+
const resp = await callCloudApiGet('content-engines');
|
|
96
|
+
return resp || [];
|
|
97
|
+
} catch (err) {
|
|
98
|
+
logger.error(extractErrorLogData(err), 'Error getting cloud content list');
|
|
99
|
+
return [];
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
async function collectCloudFilesSearchTags() {
|
|
104
|
+
const res = [];
|
|
105
|
+
if (platformInfo.isElectron) {
|
|
106
|
+
res.push('app');
|
|
107
|
+
} else {
|
|
108
|
+
res.push('web');
|
|
109
|
+
}
|
|
110
|
+
if (platformInfo.isWindows) {
|
|
111
|
+
res.push('windows');
|
|
112
|
+
}
|
|
113
|
+
if (platformInfo.isMac) {
|
|
114
|
+
res.push('mac');
|
|
115
|
+
}
|
|
116
|
+
if (platformInfo.isLinux) {
|
|
117
|
+
res.push('linux');
|
|
118
|
+
}
|
|
119
|
+
if (platformInfo.isAwsUbuntuLayout) {
|
|
120
|
+
res.push('aws');
|
|
121
|
+
}
|
|
122
|
+
if (platformInfo.isAzureUbuntuLayout) {
|
|
123
|
+
res.push('azure');
|
|
124
|
+
}
|
|
125
|
+
if (platformInfo.isSnap) {
|
|
126
|
+
res.push('snap');
|
|
127
|
+
}
|
|
128
|
+
if (platformInfo.isDocker) {
|
|
129
|
+
res.push('docker');
|
|
130
|
+
}
|
|
131
|
+
if (platformInfo.isNpmDist) {
|
|
132
|
+
res.push('npm');
|
|
133
|
+
}
|
|
134
|
+
const engines = await connections.getUsedEngines();
|
|
135
|
+
const engineTags = engines.map(engine => engine.split('@')[0]);
|
|
136
|
+
res.push(...engineTags);
|
|
137
|
+
const cloudEngines = await getCloudUsedEngines();
|
|
138
|
+
const cloudEngineTags = cloudEngines.map(engine => engine.split('@')[0]);
|
|
139
|
+
res.push(...cloudEngineTags);
|
|
140
|
+
|
|
141
|
+
// team-premium and trials will return the same cloud files as premium - no need to check
|
|
142
|
+
res.push(isProApp() ? 'premium' : 'community');
|
|
143
|
+
|
|
144
|
+
return _.uniq(res);
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
async function getCloudSigninHolder() {
|
|
148
|
+
const settingsValue = await config.getSettings();
|
|
149
|
+
const holder = settingsValue['cloudSigninTokenHolder'];
|
|
150
|
+
return holder;
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
async function getCloudSigninHeaders(holder = null) {
|
|
154
|
+
if (!holder) {
|
|
155
|
+
holder = await getCloudSigninHolder();
|
|
156
|
+
}
|
|
157
|
+
if (holder) {
|
|
158
|
+
return {
|
|
159
|
+
'x-cloud-login': holder.token,
|
|
160
|
+
};
|
|
161
|
+
}
|
|
162
|
+
return null;
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
async function updateCloudFiles(isRefresh) {
|
|
166
|
+
let lastCloudFilesTags;
|
|
167
|
+
try {
|
|
168
|
+
lastCloudFilesTags = await fs.readFile(path.join(datadir(), 'cloud-files-tags.txt'), 'utf-8');
|
|
169
|
+
} catch (err) {
|
|
170
|
+
lastCloudFilesTags = '';
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
const ipInfo = await getPublicIpInfo();
|
|
174
|
+
|
|
175
|
+
const tags = (await collectCloudFilesSearchTags()).join(',');
|
|
176
|
+
let lastCheckedTm = 0;
|
|
177
|
+
if (tags == lastCloudFilesTags && cloudFiles.length > 0) {
|
|
178
|
+
lastCheckedTm = _.max(cloudFiles.map(x => parseInt(x.modifiedTm)));
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
logger.info({ tags, lastCheckedTm }, 'Downloading cloud files');
|
|
182
|
+
|
|
183
|
+
const resp = await axios.default.get(
|
|
184
|
+
`${DBGATE_CLOUD_URL}/public-cloud-updates?lastCheckedTm=${lastCheckedTm}&tags=${tags}&isRefresh=${
|
|
185
|
+
isRefresh ? 1 : 0
|
|
186
|
+
}&country=${ipInfo?.country || ''}`,
|
|
187
|
+
{
|
|
188
|
+
headers: {
|
|
189
|
+
...getLicenseHttpHeaders(),
|
|
190
|
+
...(await getCloudSigninHeaders()),
|
|
191
|
+
'x-app-version': currentVersion.version,
|
|
192
|
+
},
|
|
193
|
+
}
|
|
194
|
+
);
|
|
195
|
+
|
|
196
|
+
logger.info(`Downloaded ${resp.data.length} cloud files`);
|
|
197
|
+
|
|
198
|
+
const filesByPath = lastCheckedTm == 0 ? {} : _.keyBy(cloudFiles, 'path');
|
|
199
|
+
for (const file of resp.data) {
|
|
200
|
+
if (file.isDeleted) {
|
|
201
|
+
delete filesByPath[file.path];
|
|
202
|
+
} else {
|
|
203
|
+
filesByPath[file.path] = file;
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
cloudFiles = Object.values(filesByPath);
|
|
208
|
+
|
|
209
|
+
await fs.writeFile(path.join(datadir(), 'cloud-files.jsonl'), cloudFiles.map(x => JSON.stringify(x)).join('\n'));
|
|
210
|
+
await fs.writeFile(path.join(datadir(), 'cloud-files-tags.txt'), tags);
|
|
211
|
+
|
|
212
|
+
socket.emitChanged(`public-cloud-changed`);
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
async function startCloudFiles() {
|
|
216
|
+
loadCloudFiles();
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
async function getPublicCloudFiles() {
|
|
220
|
+
if (!loadCloudFiles) {
|
|
221
|
+
await loadCloudFiles();
|
|
222
|
+
}
|
|
223
|
+
return cloudFiles;
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
async function getPublicFileData(path) {
|
|
227
|
+
const resp = await axios.default.get(`${DBGATE_CLOUD_URL}/public/${path}`, {
|
|
228
|
+
headers: {
|
|
229
|
+
...getLicenseHttpHeaders(),
|
|
230
|
+
},
|
|
231
|
+
});
|
|
232
|
+
return resp.data;
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
async function refreshPublicFiles(isRefresh) {
|
|
236
|
+
if (!cloudFiles) {
|
|
237
|
+
await loadCloudFiles();
|
|
238
|
+
}
|
|
239
|
+
try {
|
|
240
|
+
await updateCloudFiles(isRefresh);
|
|
241
|
+
} catch (err) {
|
|
242
|
+
logger.error(extractErrorLogData(err), 'Error updating cloud files');
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
async function callCloudApiGet(endpoint, signinHolder = null, additionalHeaders = {}) {
|
|
247
|
+
if (!signinHolder) {
|
|
248
|
+
signinHolder = await getCloudSigninHolder();
|
|
249
|
+
}
|
|
250
|
+
if (!signinHolder) {
|
|
251
|
+
return null;
|
|
252
|
+
}
|
|
253
|
+
const signinHeaders = await getCloudSigninHeaders(signinHolder);
|
|
254
|
+
|
|
255
|
+
const resp = await axios.default.get(`${DBGATE_CLOUD_URL}/${endpoint}`, {
|
|
256
|
+
headers: {
|
|
257
|
+
...getLicenseHttpHeaders(),
|
|
258
|
+
...signinHeaders,
|
|
259
|
+
...additionalHeaders,
|
|
260
|
+
},
|
|
261
|
+
validateStatus: status => status < 500,
|
|
262
|
+
});
|
|
263
|
+
const { errorMessage, isLicenseLimit, limitedLicenseLimits } = resp.data;
|
|
264
|
+
if (errorMessage) {
|
|
265
|
+
return {
|
|
266
|
+
apiErrorMessage: errorMessage,
|
|
267
|
+
apiErrorIsLicenseLimit: isLicenseLimit,
|
|
268
|
+
apiErrorLimitedLicenseLimits: limitedLicenseLimits,
|
|
269
|
+
};
|
|
270
|
+
}
|
|
271
|
+
return resp.data;
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
async function callCloudApiPost(endpoint, body, signinHolder = null) {
|
|
275
|
+
if (!signinHolder) {
|
|
276
|
+
signinHolder = await getCloudSigninHolder();
|
|
277
|
+
}
|
|
278
|
+
if (!signinHolder) {
|
|
279
|
+
return null;
|
|
280
|
+
}
|
|
281
|
+
const signinHeaders = await getCloudSigninHeaders(signinHolder);
|
|
282
|
+
|
|
283
|
+
const resp = await axios.default.post(`${DBGATE_CLOUD_URL}/${endpoint}`, body, {
|
|
284
|
+
headers: {
|
|
285
|
+
...getLicenseHttpHeaders(),
|
|
286
|
+
...signinHeaders,
|
|
287
|
+
},
|
|
288
|
+
validateStatus: status => status < 500,
|
|
289
|
+
});
|
|
290
|
+
const { errorMessage, isLicenseLimit, limitedLicenseLimits } = resp.data;
|
|
291
|
+
if (errorMessage) {
|
|
292
|
+
return {
|
|
293
|
+
apiErrorMessage: errorMessage,
|
|
294
|
+
apiErrorIsLicenseLimit: isLicenseLimit,
|
|
295
|
+
apiErrorLimitedLicenseLimits: limitedLicenseLimits,
|
|
296
|
+
};
|
|
297
|
+
}
|
|
298
|
+
return resp.data;
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
async function getCloudFolderEncryptor(folid) {
|
|
302
|
+
const { encryptionKey } = await callCloudApiGet(`folder-key/${folid}`);
|
|
303
|
+
if (!encryptionKey) {
|
|
304
|
+
throw new Error('No encryption key for folder: ' + folid);
|
|
305
|
+
}
|
|
306
|
+
return simpleEncryptor.createEncryptor(encryptionKey);
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
async function getCloudContent(folid, cntid) {
|
|
310
|
+
const signinHolder = await getCloudSigninHolder();
|
|
311
|
+
if (!signinHolder) {
|
|
312
|
+
throw new Error('No signed in');
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
const encryptor = simpleEncryptor.createEncryptor(signinHolder.encryptionKey);
|
|
316
|
+
|
|
317
|
+
const { content, name, type, contentAttributes, apiErrorMessage } = await callCloudApiGet(
|
|
318
|
+
`content/${folid}/${cntid}`,
|
|
319
|
+
signinHolder,
|
|
320
|
+
{
|
|
321
|
+
'x-kehid': signinHolder.kehid,
|
|
322
|
+
}
|
|
323
|
+
);
|
|
324
|
+
|
|
325
|
+
if (apiErrorMessage) {
|
|
326
|
+
return { apiErrorMessage };
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
return {
|
|
330
|
+
content: encryptor.decrypt(content),
|
|
331
|
+
name,
|
|
332
|
+
type,
|
|
333
|
+
contentAttributes,
|
|
334
|
+
};
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
/**
|
|
338
|
+
*
|
|
339
|
+
* @returns Promise<{ cntid: string } | { apiErrorMessage: string }>
|
|
340
|
+
*/
|
|
341
|
+
async function putCloudContent(folid, cntid, content, name, type, contentAttributes) {
|
|
342
|
+
const signinHolder = await getCloudSigninHolder();
|
|
343
|
+
if (!signinHolder) {
|
|
344
|
+
throw new Error('No signed in');
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
const encryptor = simpleEncryptor.createEncryptor(signinHolder.encryptionKey);
|
|
348
|
+
|
|
349
|
+
const resp = await callCloudApiPost(
|
|
350
|
+
`put-content`,
|
|
351
|
+
{
|
|
352
|
+
folid,
|
|
353
|
+
cntid,
|
|
354
|
+
name,
|
|
355
|
+
type,
|
|
356
|
+
kehid: signinHolder.kehid,
|
|
357
|
+
content: encryptor.encrypt(content),
|
|
358
|
+
contentAttributes,
|
|
359
|
+
},
|
|
360
|
+
signinHolder
|
|
361
|
+
);
|
|
362
|
+
socket.emitChanged('cloud-content-changed');
|
|
363
|
+
socket.emit('cloud-content-updated');
|
|
364
|
+
return resp;
|
|
365
|
+
}
|
|
366
|
+
|
|
367
|
+
const cloudConnectionCache = {};
|
|
368
|
+
async function loadCachedCloudConnection(folid, cntid) {
|
|
369
|
+
const cacheKey = `${folid}|${cntid}`;
|
|
370
|
+
if (!cloudConnectionCache[cacheKey]) {
|
|
371
|
+
const { content } = await getCloudContent(folid, cntid);
|
|
372
|
+
cloudConnectionCache[cacheKey] = {
|
|
373
|
+
...JSON.parse(content),
|
|
374
|
+
_id: `cloud://${folid}/${cntid}`,
|
|
375
|
+
};
|
|
376
|
+
}
|
|
377
|
+
return cloudConnectionCache[cacheKey];
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
function removeCloudCachedConnection(folid, cntid) {
|
|
381
|
+
const cacheKey = `${folid}|${cntid}`;
|
|
382
|
+
delete cloudConnectionCache[cacheKey];
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
module.exports = {
|
|
386
|
+
createDbGateIdentitySession,
|
|
387
|
+
startCloudTokenChecking,
|
|
388
|
+
startCloudFiles,
|
|
389
|
+
getPublicCloudFiles,
|
|
390
|
+
getPublicFileData,
|
|
391
|
+
refreshPublicFiles,
|
|
392
|
+
callCloudApiGet,
|
|
393
|
+
callCloudApiPost,
|
|
394
|
+
getCloudFolderEncryptor,
|
|
395
|
+
getCloudContent,
|
|
396
|
+
loadCachedCloudConnection,
|
|
397
|
+
putCloudContent,
|
|
398
|
+
removeCloudCachedConnection,
|
|
399
|
+
};
|
package/src/utility/crypting.js
CHANGED
|
@@ -81,11 +81,11 @@ function decryptPasswordString(password) {
|
|
|
81
81
|
return password;
|
|
82
82
|
}
|
|
83
83
|
|
|
84
|
-
function encryptObjectPasswordField(obj, field) {
|
|
84
|
+
function encryptObjectPasswordField(obj, field, encryptor = null) {
|
|
85
85
|
if (obj && obj[field] && !obj[field].startsWith('crypt:')) {
|
|
86
86
|
return {
|
|
87
87
|
...obj,
|
|
88
|
-
[field]: 'crypt:' + getInternalEncryptor().encrypt(obj[field]),
|
|
88
|
+
[field]: 'crypt:' + (encryptor || getInternalEncryptor()).encrypt(obj[field]),
|
|
89
89
|
};
|
|
90
90
|
}
|
|
91
91
|
return obj;
|
|
@@ -101,11 +101,11 @@ function decryptObjectPasswordField(obj, field) {
|
|
|
101
101
|
return obj;
|
|
102
102
|
}
|
|
103
103
|
|
|
104
|
-
function encryptConnection(connection) {
|
|
104
|
+
function encryptConnection(connection, encryptor = null) {
|
|
105
105
|
if (connection.passwordMode != 'saveRaw') {
|
|
106
|
-
connection = encryptObjectPasswordField(connection, 'password');
|
|
107
|
-
connection = encryptObjectPasswordField(connection, 'sshPassword');
|
|
108
|
-
connection = encryptObjectPasswordField(connection, 'sshKeyfilePassword');
|
|
106
|
+
connection = encryptObjectPasswordField(connection, 'password', encryptor);
|
|
107
|
+
connection = encryptObjectPasswordField(connection, 'sshPassword', encryptor);
|
|
108
|
+
connection = encryptObjectPasswordField(connection, 'sshKeyfilePassword', encryptor);
|
|
109
109
|
}
|
|
110
110
|
return connection;
|
|
111
111
|
}
|