dbgate-api-premium 6.1.6 → 6.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +7 -5
- package/src/auth/storageAuthProvider.js +27 -22
- package/src/controllers/config.js +9 -3
- package/src/controllers/databaseConnections.js +56 -2
- package/src/controllers/sessions.js +24 -3
- package/src/controllers/storage.js +4 -8
- package/src/controllers/storageDb.js +54 -13
- package/src/currentVersion.js +2 -2
- package/src/main.js +6 -0
- package/src/proc/connectProcess.js +1 -1
- package/src/proc/databaseConnectionProcess.js +18 -10
- package/src/proc/serverConnectionProcess.js +1 -1
- package/src/proc/sessionProcess.js +44 -3
- package/src/shell/dataDuplicator.js +1 -1
- package/src/shell/deployDb.js +1 -1
- package/src/shell/dropAllDbObjects.js +1 -1
- package/src/shell/dumpDatabase.js +1 -1
- package/src/shell/executeQuery.js +1 -1
- package/src/shell/generateDeploySql.js +1 -1
- package/src/shell/importDatabase.js +1 -1
- package/src/shell/importDbFromFolder.js +110 -0
- package/src/shell/index.js +2 -0
- package/src/shell/loadDatabase.js +1 -1
- package/src/shell/queryReader.js +1 -1
- package/src/shell/tableReader.js +1 -1
- package/src/shell/tableWriter.js +2 -1
- package/src/utility/authProxy.js +135 -20
- package/src/utility/checkLicense.js +31 -9
- package/src/utility/connectUtility.js +42 -33
- package/src/utility/directories.js +7 -2
- package/src/utility/hardwareFingerprint.js +1 -0
- package/src/utility/platformInfo.js +2 -0
- package/src/utility/processArgs.js +3 -3
- package/src/utility/useController.js +1 -1
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
const executeQuery = require('./executeQuery');
|
|
2
2
|
const requireEngineDriver = require('../utility/requireEngineDriver');
|
|
3
|
-
const connectUtility = require('../utility/connectUtility');
|
|
3
|
+
const { connectUtility } = require('../utility/connectUtility');
|
|
4
4
|
const { getLogger, extendDatabaseInfo } = require('dbgate-tools');
|
|
5
5
|
|
|
6
6
|
const logger = getLogger('dropAllDbObjects');
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
const requireEngineDriver = require('../utility/requireEngineDriver');
|
|
2
|
-
const connectUtility = require('../utility/connectUtility');
|
|
2
|
+
const { connectUtility } = require('../utility/connectUtility');
|
|
3
3
|
const { getLogger } = require('dbgate-tools');
|
|
4
4
|
|
|
5
5
|
const logger = getLogger('dumpDb');
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
const fs = require('fs-extra');
|
|
2
2
|
const requireEngineDriver = require('../utility/requireEngineDriver');
|
|
3
|
-
const connectUtility = require('../utility/connectUtility');
|
|
3
|
+
const { connectUtility } = require('../utility/connectUtility');
|
|
4
4
|
const { getLogger, getLimitedQuery } = require('dbgate-tools');
|
|
5
5
|
|
|
6
6
|
const logger = getLogger('execQuery');
|
|
@@ -13,7 +13,7 @@ const {
|
|
|
13
13
|
} = require('dbgate-tools');
|
|
14
14
|
const importDbModel = require('../utility/importDbModel');
|
|
15
15
|
const requireEngineDriver = require('../utility/requireEngineDriver');
|
|
16
|
-
const connectUtility = require('../utility/connectUtility');
|
|
16
|
+
const { connectUtility } = require('../utility/connectUtility');
|
|
17
17
|
|
|
18
18
|
/**
|
|
19
19
|
* Generates query for deploying model into database
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
const fs = require('fs');
|
|
2
2
|
const requireEngineDriver = require('../utility/requireEngineDriver');
|
|
3
|
-
const connectUtility = require('../utility/connectUtility');
|
|
3
|
+
const { connectUtility } = require('../utility/connectUtility');
|
|
4
4
|
const { splitQueryStream } = require('dbgate-query-splitter/lib/splitQueryStream');
|
|
5
5
|
const download = require('./download');
|
|
6
6
|
const stream = require('stream');
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
const path = require('path');
|
|
2
|
+
const fs = require('fs-extra');
|
|
3
|
+
const executeQuery = require('./executeQuery');
|
|
4
|
+
const { connectUtility } = require('../utility/connectUtility');
|
|
5
|
+
const requireEngineDriver = require('../utility/requireEngineDriver');
|
|
6
|
+
const { getAlterDatabaseScript, DatabaseAnalyser, runCommandOnDriver } = require('dbgate-tools');
|
|
7
|
+
const importDbModel = require('../utility/importDbModel');
|
|
8
|
+
const jsonLinesReader = require('./jsonLinesReader');
|
|
9
|
+
const tableWriter = require('./tableWriter');
|
|
10
|
+
const copyStream = require('./copyStream');
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Deploys database model stored in modelFolder (table as yamls) to database
|
|
14
|
+
* @param {object} options
|
|
15
|
+
* @param {connectionType} options.connection - connection object
|
|
16
|
+
* @param {object} options.systemConnection - system connection (result of driver.connect). If not provided, new connection will be created
|
|
17
|
+
* @param {object} options.driver - driver object. If not provided, it will be loaded from connection
|
|
18
|
+
* @param {string} options.folder - folder with model files (YAML files for tables, SQL files for views, procedures, ...)
|
|
19
|
+
* @param {function[]} options.modelTransforms - array of functions for transforming model
|
|
20
|
+
*/
|
|
21
|
+
async function importDbFromFolder({ connection, systemConnection, driver, folder, modelTransforms }) {
|
|
22
|
+
if (!driver) driver = requireEngineDriver(connection);
|
|
23
|
+
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read'));
|
|
24
|
+
|
|
25
|
+
try {
|
|
26
|
+
const model = await importDbModel(folder);
|
|
27
|
+
|
|
28
|
+
let modelAdapted = {
|
|
29
|
+
...model,
|
|
30
|
+
tables: model.tables.map(table => driver.adaptTableInfo(table)),
|
|
31
|
+
};
|
|
32
|
+
for (const transform of modelTransforms || []) {
|
|
33
|
+
modelAdapted = transform(modelAdapted);
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
const modelNoFk = {
|
|
37
|
+
...modelAdapted,
|
|
38
|
+
tables: modelAdapted.tables.map(table => ({
|
|
39
|
+
...table,
|
|
40
|
+
foreignKeys: [],
|
|
41
|
+
})),
|
|
42
|
+
};
|
|
43
|
+
|
|
44
|
+
// const plan = createAlterDatabasePlan(
|
|
45
|
+
// DatabaseAnalyser.createEmptyStructure(),
|
|
46
|
+
// driver.dialect.enableAllForeignKeys ? modelAdapted : modelNoFk,
|
|
47
|
+
// {},
|
|
48
|
+
// DatabaseAnalyser.createEmptyStructure(),
|
|
49
|
+
// driver.dialect.enableAllForeignKeys ? modelAdapted : modelNoFk,
|
|
50
|
+
// driver
|
|
51
|
+
// );
|
|
52
|
+
// const dmp1 = driver.createDumper({ useHardSeparator: true });
|
|
53
|
+
// if (driver.dialect.enableAllForeignKeys) {
|
|
54
|
+
// dmp1.enableAllForeignKeys(false);
|
|
55
|
+
// }
|
|
56
|
+
// plan.run(dmp1);
|
|
57
|
+
// if (driver.dialect.enableAllForeignKeys) {
|
|
58
|
+
// dmp1.enableAllForeignKeys(true);
|
|
59
|
+
// }
|
|
60
|
+
|
|
61
|
+
const { sql } = getAlterDatabaseScript(
|
|
62
|
+
DatabaseAnalyser.createEmptyStructure(),
|
|
63
|
+
driver.dialect.enableAllForeignKeys ? modelAdapted : modelNoFk,
|
|
64
|
+
{},
|
|
65
|
+
DatabaseAnalyser.createEmptyStructure(),
|
|
66
|
+
driver.dialect.enableAllForeignKeys ? modelAdapted : modelNoFk,
|
|
67
|
+
driver
|
|
68
|
+
);
|
|
69
|
+
// console.log('CREATING STRUCTURE:', sql);
|
|
70
|
+
await executeQuery({ connection, systemConnection: dbhan, driver, sql, logScriptItems: true });
|
|
71
|
+
|
|
72
|
+
if (driver.dialect.enableAllForeignKeys) {
|
|
73
|
+
await runCommandOnDriver(dbhan, driver, dmp => dmp.enableAllForeignKeys(false));
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
for (const table of modelAdapted.tables) {
|
|
77
|
+
const fileName = path.join(folder, `${table.pureName}.jsonl`);
|
|
78
|
+
if (await fs.exists(fileName)) {
|
|
79
|
+
const src = await jsonLinesReader({ fileName });
|
|
80
|
+
const dst = await tableWriter({
|
|
81
|
+
systemConnection: dbhan,
|
|
82
|
+
pureName: table.pureName,
|
|
83
|
+
driver,
|
|
84
|
+
targetTableStructure: table,
|
|
85
|
+
});
|
|
86
|
+
await copyStream(src, dst);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
if (driver.dialect.enableAllForeignKeys) {
|
|
91
|
+
await runCommandOnDriver(dbhan, driver, dmp => dmp.enableAllForeignKeys(true));
|
|
92
|
+
} else if (driver.dialect.createForeignKey) {
|
|
93
|
+
const dmp = driver.createDumper();
|
|
94
|
+
for (const table of modelAdapted.tables) {
|
|
95
|
+
for (const fk of table.foreignKeys) {
|
|
96
|
+
dmp.createForeignKey(fk);
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
// create foreign keys
|
|
101
|
+
await executeQuery({ connection, systemConnection: dbhan, driver, sql: dmp.s, logScriptItems: true });
|
|
102
|
+
}
|
|
103
|
+
} finally {
|
|
104
|
+
if (!systemConnection) {
|
|
105
|
+
await driver.close(dbhan);
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
module.exports = importDbFromFolder;
|
package/src/shell/index.js
CHANGED
|
@@ -35,6 +35,7 @@ const sqlTextReplacementTransform = require('./sqlTextReplacementTransform');
|
|
|
35
35
|
const autoIndexForeignKeysTransform = require('./autoIndexForeignKeysTransform');
|
|
36
36
|
const generateDeploySql = require('./generateDeploySql');
|
|
37
37
|
const dropAllDbObjects = require('./dropAllDbObjects');
|
|
38
|
+
const importDbFromFolder = require('./importDbFromFolder');
|
|
38
39
|
|
|
39
40
|
const dbgateApi = {
|
|
40
41
|
queryReader,
|
|
@@ -73,6 +74,7 @@ const dbgateApi = {
|
|
|
73
74
|
autoIndexForeignKeysTransform,
|
|
74
75
|
generateDeploySql,
|
|
75
76
|
dropAllDbObjects,
|
|
77
|
+
importDbFromFolder,
|
|
76
78
|
};
|
|
77
79
|
|
|
78
80
|
requirePlugin.initializeDbgateApi(dbgateApi);
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
const requireEngineDriver = require('../utility/requireEngineDriver');
|
|
2
|
-
const connectUtility = require('../utility/connectUtility');
|
|
2
|
+
const { connectUtility } = require('../utility/connectUtility');
|
|
3
3
|
const { getLogger } = require('dbgate-tools');
|
|
4
4
|
const exportDbModel = require('../utility/exportDbModel');
|
|
5
5
|
|
package/src/shell/queryReader.js
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
const requireEngineDriver = require('../utility/requireEngineDriver');
|
|
2
|
-
const connectUtility = require('../utility/connectUtility');
|
|
2
|
+
const { connectUtility } = require('../utility/connectUtility');
|
|
3
3
|
const { getLogger } = require('dbgate-tools');
|
|
4
4
|
const logger = getLogger('queryReader');
|
|
5
5
|
|
package/src/shell/tableReader.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
const { quoteFullName, fullNameToString, getLogger } = require('dbgate-tools');
|
|
2
2
|
const requireEngineDriver = require('../utility/requireEngineDriver');
|
|
3
|
-
const connectUtility = require('../utility/connectUtility');
|
|
3
|
+
const { connectUtility } = require('../utility/connectUtility');
|
|
4
4
|
const logger = getLogger('tableReader');
|
|
5
5
|
|
|
6
6
|
/**
|
package/src/shell/tableWriter.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
const { fullNameToString, getLogger } = require('dbgate-tools');
|
|
2
2
|
const requireEngineDriver = require('../utility/requireEngineDriver');
|
|
3
|
-
const connectUtility = require('../utility/connectUtility');
|
|
3
|
+
const { connectUtility } = require('../utility/connectUtility');
|
|
4
4
|
const logger = getLogger('tableWriter');
|
|
5
5
|
|
|
6
6
|
/**
|
|
@@ -15,6 +15,7 @@ const logger = getLogger('tableWriter');
|
|
|
15
15
|
* @param {boolean} options.truncate - truncate table before insert
|
|
16
16
|
* @param {boolean} options.createIfNotExists - create table if not exists
|
|
17
17
|
* @param {boolean} options.commitAfterInsert - commit transaction after insert
|
|
18
|
+
* @param {any} options.targetTableStructure - target table structure (don't analyse if given)
|
|
18
19
|
* @returns {Promise<writerType>} - writer object
|
|
19
20
|
*/
|
|
20
21
|
async function tableWriter({ connection, schemaName, pureName, driver, systemConnection, ...options }) {
|
package/src/utility/authProxy.js
CHANGED
|
@@ -2,11 +2,24 @@ const axios = require('axios');
|
|
|
2
2
|
const { Signer } = require('@aws-sdk/rds-signer');
|
|
3
3
|
const jwt = require('jsonwebtoken');
|
|
4
4
|
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
|
5
|
+
const stableStringify = require('json-stable-stringify');
|
|
6
|
+
const crypto = require('crypto');
|
|
7
|
+
const _ = require('lodash');
|
|
8
|
+
const processArgs = require('./processArgs');
|
|
5
9
|
|
|
6
10
|
const logger = getLogger('authProxy');
|
|
7
11
|
|
|
8
|
-
const AUTH_PROXY_URL = process.env.
|
|
9
|
-
|
|
12
|
+
const AUTH_PROXY_URL = process.env.LOCAL_AUTH_PROXY
|
|
13
|
+
? 'http://localhost:3109'
|
|
14
|
+
: process.env.DEVWEB || process.env.DEVMODE
|
|
15
|
+
? 'https://auth-proxy.dbgate.udolni.net'
|
|
16
|
+
: 'https://auth.dbgate.eu';
|
|
17
|
+
|
|
18
|
+
const AI_GATEWAY_URL = process.env.LOCAL_AI_GATEWAY
|
|
19
|
+
? 'http://localhost:3110'
|
|
20
|
+
: process.env.DEVWEB || process.env.DEVMODE
|
|
21
|
+
? 'https://aigw.dbgate.udolni.net'
|
|
22
|
+
: 'https://aigw.dbgate.io';
|
|
10
23
|
|
|
11
24
|
let licenseKey = null;
|
|
12
25
|
|
|
@@ -18,6 +31,16 @@ function isAuthProxySupported() {
|
|
|
18
31
|
return true;
|
|
19
32
|
}
|
|
20
33
|
|
|
34
|
+
function getAxiosParamsWithLicense() {
|
|
35
|
+
return {
|
|
36
|
+
headers: {
|
|
37
|
+
'Content-Type': 'application/json',
|
|
38
|
+
Authorization: `Bearer ${licenseKey ?? process.env.DBGATE_LICENSE}`,
|
|
39
|
+
'x-api-key': processArgs.runE2eTests ? 'bcf6e1a0-5763-4060-9391-18fda005722d' : null,
|
|
40
|
+
},
|
|
41
|
+
};
|
|
42
|
+
}
|
|
43
|
+
|
|
21
44
|
async function authProxyGetRedirectUrl({ client, type, state, redirectUri }) {
|
|
22
45
|
const respSession = await axios.default.post(
|
|
23
46
|
`${AUTH_PROXY_URL}/create-session`,
|
|
@@ -25,12 +48,7 @@ async function authProxyGetRedirectUrl({ client, type, state, redirectUri }) {
|
|
|
25
48
|
client,
|
|
26
49
|
type,
|
|
27
50
|
},
|
|
28
|
-
|
|
29
|
-
headers: {
|
|
30
|
-
'Content-Type': 'application/json',
|
|
31
|
-
Authorization: `Bearer ${licenseKey ?? process.env.DBGATE_LICENSE}`,
|
|
32
|
-
},
|
|
33
|
-
}
|
|
51
|
+
getAxiosParamsWithLicense()
|
|
34
52
|
);
|
|
35
53
|
|
|
36
54
|
const { sid } = respSession.data;
|
|
@@ -55,12 +73,7 @@ async function authProxyGetTokenFromCode({ sid, code }) {
|
|
|
55
73
|
sid,
|
|
56
74
|
code,
|
|
57
75
|
},
|
|
58
|
-
|
|
59
|
-
headers: {
|
|
60
|
-
'Content-Type': 'application/json',
|
|
61
|
-
Authorization: `Bearer ${licenseKey ?? process.env.DBGATE_LICENSE}`,
|
|
62
|
-
},
|
|
63
|
-
}
|
|
76
|
+
getAxiosParamsWithLicense()
|
|
64
77
|
);
|
|
65
78
|
return respToken.data.token;
|
|
66
79
|
} catch (err) {
|
|
@@ -82,12 +95,7 @@ function startTokenChecking(sid, callback) {
|
|
|
82
95
|
{
|
|
83
96
|
sid,
|
|
84
97
|
},
|
|
85
|
-
|
|
86
|
-
headers: {
|
|
87
|
-
'Content-Type': 'application/json',
|
|
88
|
-
Authorization: `Bearer ${licenseKey ?? process.env.DBGATE_LICENSE}`,
|
|
89
|
-
},
|
|
90
|
-
}
|
|
98
|
+
getAxiosParamsWithLicense()
|
|
91
99
|
);
|
|
92
100
|
|
|
93
101
|
if (resp.data.status == 'ok') {
|
|
@@ -156,6 +164,110 @@ async function obtainRefreshedLicense() {
|
|
|
156
164
|
}
|
|
157
165
|
}
|
|
158
166
|
|
|
167
|
+
/**
|
|
168
|
+
* @param {import('dbgate-types').DatabaseInfo} structure
|
|
169
|
+
* @returns {import('dbgate-types').DatabaseInfoTiny}
|
|
170
|
+
*/
|
|
171
|
+
|
|
172
|
+
function extractTinyStructure(structure) {
|
|
173
|
+
return {
|
|
174
|
+
t: _.sortBy(structure.tables, x => x.pureName).map(table => ({
|
|
175
|
+
n: table.pureName,
|
|
176
|
+
o: table.objectComment,
|
|
177
|
+
c: table.columns.map(column => ({ n: column.columnName, t: column.dataType })),
|
|
178
|
+
p: table.primaryKey
|
|
179
|
+
? {
|
|
180
|
+
c: table.primaryKey?.columns?.map(column => ({ n: column.columnName })),
|
|
181
|
+
}
|
|
182
|
+
: undefined,
|
|
183
|
+
f: _.sortBy(table.foreignKeys, x => x.constraintName).map(fk => ({
|
|
184
|
+
r: fk.refTableName,
|
|
185
|
+
c: fk.columns.map(column => ({ n: column.columnName, r: column.refColumnName })),
|
|
186
|
+
})),
|
|
187
|
+
})),
|
|
188
|
+
};
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
function getSha256Hash(data) {
|
|
192
|
+
return crypto
|
|
193
|
+
.createHash('sha256') // volba hashovací funkce
|
|
194
|
+
.update(data, 'utf8') // aktualizace hashe o data (ve formátu UTF-8)
|
|
195
|
+
.digest('hex'); // získáme výsledek v hexadecimální reprezentaci
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
async function findModelOnProxy(hash) {
|
|
199
|
+
const resp = await axios.default.post(
|
|
200
|
+
`${AI_GATEWAY_URL}/find-db-model`,
|
|
201
|
+
{
|
|
202
|
+
hash,
|
|
203
|
+
},
|
|
204
|
+
getAxiosParamsWithLicense()
|
|
205
|
+
);
|
|
206
|
+
return resp.data.status == 'ok';
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
async function callTextToSqlApi(text, structure, dialect) {
|
|
210
|
+
const tinyStructure = extractTinyStructure(structure);
|
|
211
|
+
const json = stableStringify(tinyStructure);
|
|
212
|
+
const modelHash = getSha256Hash(json);
|
|
213
|
+
const isModelOnProxy = await findModelOnProxy(modelHash);
|
|
214
|
+
|
|
215
|
+
const resp = await axios.default.post(
|
|
216
|
+
`${AI_GATEWAY_URL}/text-to-sql`,
|
|
217
|
+
{
|
|
218
|
+
text,
|
|
219
|
+
modelHash,
|
|
220
|
+
model: isModelOnProxy ? null : tinyStructure,
|
|
221
|
+
dialect,
|
|
222
|
+
},
|
|
223
|
+
getAxiosParamsWithLicense()
|
|
224
|
+
);
|
|
225
|
+
|
|
226
|
+
return resp.data;
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
async function callCompleteOnCursorApi(text, structure, dialect, line) {
|
|
230
|
+
const tinyStructure = extractTinyStructure(structure);
|
|
231
|
+
const json = stableStringify(tinyStructure);
|
|
232
|
+
const modelHash = getSha256Hash(json);
|
|
233
|
+
const isModelOnProxy = await findModelOnProxy(modelHash);
|
|
234
|
+
|
|
235
|
+
const resp = await axios.default.post(
|
|
236
|
+
`${AI_GATEWAY_URL}/complete-on-cursor`,
|
|
237
|
+
{
|
|
238
|
+
text,
|
|
239
|
+
modelHash,
|
|
240
|
+
model: isModelOnProxy ? null : tinyStructure,
|
|
241
|
+
dialect,
|
|
242
|
+
line,
|
|
243
|
+
},
|
|
244
|
+
getAxiosParamsWithLicense()
|
|
245
|
+
);
|
|
246
|
+
|
|
247
|
+
return resp.data;
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
async function callRefactorSqlQueryApi(query, task, structure, dialect) {
|
|
251
|
+
const tinyStructure = extractTinyStructure(structure);
|
|
252
|
+
const json = stableStringify(tinyStructure);
|
|
253
|
+
const modelHash = getSha256Hash(json);
|
|
254
|
+
const isModelOnProxy = await findModelOnProxy(modelHash);
|
|
255
|
+
|
|
256
|
+
const resp = await axios.default.post(
|
|
257
|
+
`${AI_GATEWAY_URL}/refactor-sql-query`,
|
|
258
|
+
{
|
|
259
|
+
query,
|
|
260
|
+
task,
|
|
261
|
+
modelHash,
|
|
262
|
+
model: isModelOnProxy ? null : tinyStructure,
|
|
263
|
+
dialect,
|
|
264
|
+
},
|
|
265
|
+
getAxiosParamsWithLicense()
|
|
266
|
+
);
|
|
267
|
+
|
|
268
|
+
return resp.data;
|
|
269
|
+
}
|
|
270
|
+
|
|
159
271
|
module.exports = {
|
|
160
272
|
isAuthProxySupported,
|
|
161
273
|
authProxyGetRedirectUrl,
|
|
@@ -166,4 +278,7 @@ module.exports = {
|
|
|
166
278
|
supportsAwsIam,
|
|
167
279
|
getAwsIamToken,
|
|
168
280
|
obtainRefreshedLicense,
|
|
281
|
+
callTextToSqlApi,
|
|
282
|
+
callCompleteOnCursorApi,
|
|
283
|
+
callRefactorSqlQueryApi,
|
|
169
284
|
};
|
|
@@ -9,6 +9,7 @@ const { setAuthProxyLicense } = require('./authProxy');
|
|
|
9
9
|
const axios = require('axios');
|
|
10
10
|
const crypto = require('crypto');
|
|
11
11
|
const platformInfo = require('./platformInfo');
|
|
12
|
+
const processArgs = require('./processArgs');
|
|
12
13
|
|
|
13
14
|
const logger = getLogger('checkLicense');
|
|
14
15
|
|
|
@@ -107,6 +108,13 @@ async function getAwsMetadata() {
|
|
|
107
108
|
}
|
|
108
109
|
|
|
109
110
|
function checkLicenseKey(licenseKey) {
|
|
111
|
+
if (processArgs.runE2eTests) {
|
|
112
|
+
return {
|
|
113
|
+
status: 'ok',
|
|
114
|
+
type: 'premium',
|
|
115
|
+
};
|
|
116
|
+
}
|
|
117
|
+
|
|
110
118
|
try {
|
|
111
119
|
const decoded = jwt.verify(licenseKey, publicKey, {
|
|
112
120
|
algorithms: ['RS256'],
|
|
@@ -157,11 +165,11 @@ function checkLicenseKey(licenseKey) {
|
|
|
157
165
|
}
|
|
158
166
|
}
|
|
159
167
|
|
|
160
|
-
let
|
|
161
|
-
let
|
|
162
|
-
function
|
|
163
|
-
if (
|
|
164
|
-
return
|
|
168
|
+
let cloudTokenLoaded = false;
|
|
169
|
+
let cloudTokenHash = null;
|
|
170
|
+
function getCloudToken() {
|
|
171
|
+
if (cloudTokenLoaded) {
|
|
172
|
+
return cloudTokenHash;
|
|
165
173
|
}
|
|
166
174
|
try {
|
|
167
175
|
const token = fs
|
|
@@ -169,18 +177,25 @@ function getAwsToken() {
|
|
|
169
177
|
encoding: 'utf-8',
|
|
170
178
|
})
|
|
171
179
|
.trim();
|
|
172
|
-
|
|
180
|
+
cloudTokenHash = crypto.createHash('md5').update(token).digest('hex');
|
|
173
181
|
} catch (err) {}
|
|
174
|
-
|
|
175
|
-
return
|
|
182
|
+
cloudTokenLoaded = true;
|
|
183
|
+
return cloudTokenHash;
|
|
176
184
|
}
|
|
177
185
|
|
|
178
186
|
async function checkLicense() {
|
|
187
|
+
if (processArgs.runE2eTests) {
|
|
188
|
+
return {
|
|
189
|
+
status: 'ok',
|
|
190
|
+
type: 'premium',
|
|
191
|
+
};
|
|
192
|
+
}
|
|
193
|
+
|
|
179
194
|
if (process.env.DBGATE_LICENSE) {
|
|
180
195
|
return checkLicenseKey(process.env.DBGATE_LICENSE);
|
|
181
196
|
}
|
|
182
197
|
|
|
183
|
-
if (platformInfo.isAwsUbuntuLayout &&
|
|
198
|
+
if (platformInfo.isAwsUbuntuLayout && getCloudToken() == 'b93c7491890460063003a02de06ec84a') {
|
|
184
199
|
const metadata = await getAwsMetadata();
|
|
185
200
|
if (metadata?.amiId) {
|
|
186
201
|
return {
|
|
@@ -190,6 +205,13 @@ async function checkLicense() {
|
|
|
190
205
|
}
|
|
191
206
|
}
|
|
192
207
|
|
|
208
|
+
if (platformInfo.isAzureUbuntuLayout && getCloudToken() == 'b93c7491890460063003a02de06ec84a') {
|
|
209
|
+
return {
|
|
210
|
+
status: 'ok',
|
|
211
|
+
type: 'premium',
|
|
212
|
+
};
|
|
213
|
+
}
|
|
214
|
+
|
|
193
215
|
if (process.env.STORAGE_DATABASE) {
|
|
194
216
|
const licenseConfig = await storageReadConfig('license');
|
|
195
217
|
const key = licenseConfig?.licenseKey;
|
|
@@ -47,50 +47,32 @@ async function loadConnection(driver, storedConnection, connectionMode) {
|
|
|
47
47
|
return storedConnection;
|
|
48
48
|
}
|
|
49
49
|
|
|
50
|
-
async function
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
const connection = {
|
|
54
|
-
database: connectionLoaded.defaultDatabase,
|
|
55
|
-
...decryptConnection(connectionLoaded),
|
|
56
|
-
};
|
|
57
|
-
|
|
58
|
-
if (!connection.port && driver.defaultPort) connection.port = driver.defaultPort.toString();
|
|
59
|
-
|
|
60
|
-
if (connection.useSshTunnel) {
|
|
61
|
-
const tunnel = await getSshTunnelProxy(connection);
|
|
62
|
-
if (tunnel.state == 'error') {
|
|
63
|
-
throw new Error(tunnel.message);
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
connection.server = tunnel.localHost;
|
|
67
|
-
connection.port = tunnel.localPort;
|
|
68
|
-
}
|
|
69
|
-
|
|
70
|
-
// SSL functionality - copied from https://github.com/beekeeper-studio/beekeeper-studio
|
|
50
|
+
async function extractConnectionSslParams(connection) {
|
|
51
|
+
/** @type {any} */
|
|
52
|
+
let ssl = undefined;
|
|
71
53
|
if (connection.useSsl) {
|
|
72
|
-
|
|
54
|
+
ssl = {};
|
|
73
55
|
|
|
74
56
|
if (connection.sslCaFile) {
|
|
75
|
-
|
|
76
|
-
|
|
57
|
+
ssl.ca = await fs.readFile(connection.sslCaFile);
|
|
58
|
+
ssl.sslCaFile = connection.sslCaFile;
|
|
77
59
|
}
|
|
78
60
|
|
|
79
61
|
if (connection.sslCertFile) {
|
|
80
|
-
|
|
81
|
-
|
|
62
|
+
ssl.cert = await fs.readFile(connection.sslCertFile);
|
|
63
|
+
ssl.sslCertFile = connection.sslCertFile;
|
|
82
64
|
}
|
|
83
65
|
|
|
84
66
|
if (connection.sslKeyFile) {
|
|
85
|
-
|
|
86
|
-
|
|
67
|
+
ssl.key = await fs.readFile(connection.sslKeyFile);
|
|
68
|
+
ssl.sslKeyFile = connection.sslKeyFile;
|
|
87
69
|
}
|
|
88
70
|
|
|
89
71
|
if (connection.sslCertFilePassword) {
|
|
90
|
-
|
|
72
|
+
ssl.password = connection.sslCertFilePassword;
|
|
91
73
|
}
|
|
92
74
|
|
|
93
|
-
if (!
|
|
75
|
+
if (!ssl.key && !ssl.ca && !ssl.cert) {
|
|
94
76
|
// TODO: provide this as an option in settings
|
|
95
77
|
// or per-connection as 'reject self-signed certs'
|
|
96
78
|
// How it works:
|
|
@@ -98,14 +80,41 @@ async function connectUtility(driver, storedConnection, connectionMode, addition
|
|
|
98
80
|
// if true, has to be from a public CA
|
|
99
81
|
// Heroku certs are self-signed.
|
|
100
82
|
// if you provide ca/cert/key files, it overrides this
|
|
101
|
-
|
|
83
|
+
ssl.rejectUnauthorized = false;
|
|
102
84
|
} else {
|
|
103
|
-
|
|
85
|
+
ssl.rejectUnauthorized = connection.sslRejectUnauthorized;
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
return ssl;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
async function connectUtility(driver, storedConnection, connectionMode, additionalOptions = null) {
|
|
92
|
+
const connectionLoaded = await loadConnection(driver, storedConnection, connectionMode);
|
|
93
|
+
|
|
94
|
+
const connection = {
|
|
95
|
+
database: connectionLoaded.defaultDatabase,
|
|
96
|
+
...decryptConnection(connectionLoaded),
|
|
97
|
+
};
|
|
98
|
+
|
|
99
|
+
if (!connection.port && driver.defaultPort) connection.port = driver.defaultPort.toString();
|
|
100
|
+
|
|
101
|
+
if (connection.useSshTunnel) {
|
|
102
|
+
const tunnel = await getSshTunnelProxy(connection);
|
|
103
|
+
if (tunnel.state == 'error') {
|
|
104
|
+
throw new Error(tunnel.message);
|
|
104
105
|
}
|
|
106
|
+
|
|
107
|
+
connection.server = tunnel.localHost;
|
|
108
|
+
connection.port = tunnel.localPort;
|
|
105
109
|
}
|
|
106
110
|
|
|
111
|
+
connection.ssl = await extractConnectionSslParams(connection);
|
|
112
|
+
|
|
107
113
|
const conn = await driver.connect({ ...connection, ...additionalOptions });
|
|
108
114
|
return conn;
|
|
109
115
|
}
|
|
110
116
|
|
|
111
|
-
module.exports =
|
|
117
|
+
module.exports = {
|
|
118
|
+
extractConnectionSslParams,
|
|
119
|
+
connectUtility,
|
|
120
|
+
};
|
|
@@ -58,9 +58,11 @@ const jsldir = dirFunc('jsl', true);
|
|
|
58
58
|
const rundir = dirFunc('run', true);
|
|
59
59
|
const uploadsdir = dirFunc('uploads', true);
|
|
60
60
|
const pluginsdir = dirFunc('plugins');
|
|
61
|
-
const archivedir =
|
|
61
|
+
const archivedir = processArgs.runE2eTests
|
|
62
|
+
? dirFunc('archive-e2etests', false, ['default'])
|
|
63
|
+
: dirFunc('archive', false, ['default']);
|
|
62
64
|
const appdir = dirFunc('apps');
|
|
63
|
-
const filesdir = dirFunc('files');
|
|
65
|
+
const filesdir = processArgs.runE2eTests ? dirFunc('files-e2etests') : dirFunc('files');
|
|
64
66
|
const logsdir = dirFunc('logs', 3600 * 24 * 7);
|
|
65
67
|
|
|
66
68
|
function packagedPluginsDir() {
|
|
@@ -80,6 +82,9 @@ function packagedPluginsDir() {
|
|
|
80
82
|
if (platformInfo.isAwsUbuntuLayout) {
|
|
81
83
|
return '/home/ubuntu/build/plugins';
|
|
82
84
|
}
|
|
85
|
+
if (platformInfo.isAzureUbuntuLayout) {
|
|
86
|
+
return '/home/azureuser/build/plugins';
|
|
87
|
+
}
|
|
83
88
|
if (platformInfo.isNpmDist) {
|
|
84
89
|
// node_modules
|
|
85
90
|
return global['PLUGINS_DIR'];
|
|
@@ -73,6 +73,7 @@ async function getPublicHardwareFingerprint() {
|
|
|
73
73
|
region: fingerprint.region,
|
|
74
74
|
isDocker: platformInfo.isDocker,
|
|
75
75
|
isAwsUbuntuLayout: platformInfo.isAwsUbuntuLayout,
|
|
76
|
+
isAzureUbuntuLayout: platformInfo.isAzureUbuntuLayout,
|
|
76
77
|
isElectron: platformInfo.isElectron,
|
|
77
78
|
},
|
|
78
79
|
};
|
|
@@ -15,6 +15,7 @@ const isNpmDist = !!global['IS_NPM_DIST'];
|
|
|
15
15
|
const isDbModel = !!global['IS_DB_MODEL'];
|
|
16
16
|
const isForkedApi = processArgs.isForkedApi;
|
|
17
17
|
const isAwsUbuntuLayout = fs.existsSync('/home/ubuntu/build/public');
|
|
18
|
+
const isAzureUbuntuLayout = fs.existsSync('/home/azureuser/build/public');
|
|
18
19
|
|
|
19
20
|
// function moduleAvailable(name) {
|
|
20
21
|
// try {
|
|
@@ -57,6 +58,7 @@ const platformInfo = {
|
|
|
57
58
|
allowConnectionFromEnvVariables: !!isDbModel,
|
|
58
59
|
defaultKeyfile: path.join(os.homedir(), '.ssh/id_rsa'),
|
|
59
60
|
isAwsUbuntuLayout,
|
|
61
|
+
isAzureUbuntuLayout,
|
|
60
62
|
};
|
|
61
63
|
|
|
62
64
|
module.exports = platformInfo;
|
|
@@ -86,7 +86,7 @@ module.exports = function useController(app, electron, route, controller) {
|
|
|
86
86
|
detail: err.detail,
|
|
87
87
|
});
|
|
88
88
|
} else {
|
|
89
|
-
res.status(500).json({ apiErrorMessage: err.message });
|
|
89
|
+
res.status(500).json({ apiErrorMessage: (_.isString(err) ? err : err.message) ?? 'Unknown error' });
|
|
90
90
|
}
|
|
91
91
|
}
|
|
92
92
|
});
|