dbgate-api-premium 6.3.3 → 6.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/package.json +9 -7
  2. package/src/controllers/archive.js +99 -6
  3. package/src/controllers/config.js +135 -22
  4. package/src/controllers/connections.js +35 -2
  5. package/src/controllers/databaseConnections.js +76 -1
  6. package/src/controllers/files.js +59 -0
  7. package/src/controllers/jsldata.js +9 -0
  8. package/src/controllers/runners.js +25 -5
  9. package/src/controllers/serverConnections.js +17 -2
  10. package/src/controllers/storage.js +51 -1
  11. package/src/controllers/uploads.js +0 -46
  12. package/src/currentVersion.js +2 -2
  13. package/src/proc/connectProcess.js +14 -2
  14. package/src/proc/databaseConnectionProcess.js +70 -5
  15. package/src/proc/serverConnectionProcess.js +7 -1
  16. package/src/proc/sessionProcess.js +15 -178
  17. package/src/shell/archiveReader.js +3 -1
  18. package/src/shell/collectorWriter.js +2 -2
  19. package/src/shell/copyStream.js +1 -0
  20. package/src/shell/dataReplicator.js +96 -0
  21. package/src/shell/download.js +22 -6
  22. package/src/shell/index.js +12 -2
  23. package/src/shell/jsonLinesWriter.js +4 -3
  24. package/src/shell/queryReader.js +10 -3
  25. package/src/shell/unzipDirectory.js +91 -0
  26. package/src/shell/unzipJsonLinesData.js +60 -0
  27. package/src/shell/unzipJsonLinesFile.js +59 -0
  28. package/src/shell/zipDirectory.js +49 -0
  29. package/src/shell/zipJsonLinesData.js +49 -0
  30. package/src/utility/cloudUpgrade.js +14 -1
  31. package/src/utility/crypting.js +56 -5
  32. package/src/utility/extractSingleFileFromZip.js +77 -0
  33. package/src/utility/handleQueryStream.js +186 -0
  34. package/src/utility/listZipEntries.js +41 -0
  35. package/src/utility/storageReplicatorItems.js +88 -0
  36. package/src/shell/dataDuplicator.js +0 -61
@@ -3,7 +3,9 @@ const { archivedir, resolveArchiveFolder } = require('../utility/directories');
3
3
  const jsonLinesReader = require('./jsonLinesReader');
4
4
 
5
5
  function archiveReader({ folderName, fileName, ...other }) {
6
- const jsonlFile = path.join(resolveArchiveFolder(folderName), `${fileName}.jsonl`);
6
+ const jsonlFile = folderName.endsWith('.zip')
7
+ ? `zip://archive:${folderName}//${fileName}.jsonl`
8
+ : path.join(resolveArchiveFolder(folderName), `${fileName}.jsonl`);
7
9
  const res = jsonLinesReader({ fileName: jsonlFile, ...other });
8
10
  return res;
9
11
  }
@@ -15,9 +15,9 @@ class CollectorWriterStream extends stream.Writable {
15
15
 
16
16
  _final(callback) {
17
17
  process.send({
18
- msgtype: 'freeData',
18
+ msgtype: 'dataResult',
19
19
  runid: this.runid,
20
- freeData: { rows: this.rows, structure: this.structure },
20
+ dataResult: { rows: this.rows, structure: this.structure },
21
21
  });
22
22
  callback();
23
23
  }
@@ -69,6 +69,7 @@ async function copyStream(input, output, options) {
69
69
  msgtype: 'copyStreamError',
70
70
  copyStreamError: {
71
71
  message: extractErrorMessage(err),
72
+ progressName,
72
73
  ...err,
73
74
  },
74
75
  });
@@ -0,0 +1,96 @@
1
+ const stream = require('stream');
2
+ const path = require('path');
3
+ const { quoteFullName, fullNameToString, getLogger } = require('dbgate-tools');
4
+ const requireEngineDriver = require('../utility/requireEngineDriver');
5
+ const { connectUtility } = require('../utility/connectUtility');
6
+ const logger = getLogger('datareplicator');
7
+ const { DataReplicator } = require('dbgate-datalib');
8
+ const { compileCompoudEvalCondition } = require('dbgate-filterparser');
9
+ const copyStream = require('./copyStream');
10
+ const jsonLinesReader = require('./jsonLinesReader');
11
+ const { resolveArchiveFolder } = require('../utility/directories');
12
+ const { evaluateCondition } = require('dbgate-sqltree');
13
+
14
+ function compileOperationFunction(enabled, condition) {
15
+ if (!enabled) return _row => false;
16
+ const conditionCompiled = compileCompoudEvalCondition(condition);
17
+ if (condition) {
18
+ return row => evaluateCondition(conditionCompiled, row);
19
+ }
20
+ return _row => true;
21
+ }
22
+
23
+ async function dataReplicator({
24
+ connection,
25
+ archive,
26
+ folder,
27
+ items,
28
+ options,
29
+ analysedStructure = null,
30
+ driver,
31
+ systemConnection,
32
+ }) {
33
+ if (!driver) driver = requireEngineDriver(connection);
34
+
35
+ const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
36
+
37
+ try {
38
+ if (!analysedStructure) {
39
+ analysedStructure = await driver.analyseFull(dbhan);
40
+ }
41
+
42
+ let joinPath;
43
+
44
+ if (archive?.endsWith('.zip')) {
45
+ joinPath = file => `zip://archive:${archive}//${file}`;
46
+ } else {
47
+ const sourceDir = archive
48
+ ? resolveArchiveFolder(archive)
49
+ : folder?.startsWith('archive:')
50
+ ? resolveArchiveFolder(folder.substring('archive:'.length))
51
+ : folder;
52
+ joinPath = file => path.join(sourceDir, file);
53
+ }
54
+
55
+ const repl = new DataReplicator(
56
+ dbhan,
57
+ driver,
58
+ analysedStructure,
59
+ items.map(item => {
60
+ return {
61
+ name: item.name,
62
+ matchColumns: item.matchColumns,
63
+ findExisting: compileOperationFunction(item.findExisting, item.findCondition),
64
+ createNew: compileOperationFunction(item.createNew, item.createCondition),
65
+ updateExisting: compileOperationFunction(item.updateExisting, item.updateCondition),
66
+ deleteMissing: !!item.deleteMissing,
67
+ deleteRestrictionColumns: item.deleteRestrictionColumns ?? [],
68
+ openStream: item.openStream
69
+ ? item.openStream
70
+ : item.jsonArray
71
+ ? () => stream.Readable.from(item.jsonArray)
72
+ : () => jsonLinesReader({ fileName: joinPath(`${item.name}.jsonl`) }),
73
+ };
74
+ }),
75
+ stream,
76
+ copyStream,
77
+ options
78
+ );
79
+
80
+ await repl.run();
81
+ if (options?.runid) {
82
+ process.send({
83
+ msgtype: 'dataResult',
84
+ runid: options?.runid,
85
+ dataResult: repl.result,
86
+ });
87
+ }
88
+ return repl.result;
89
+ } finally {
90
+ if (!systemConnection) {
91
+ await driver.close(dbhan);
92
+ }
93
+ }
94
+ }
95
+
96
+ module.exports = dataReplicator;
@@ -1,14 +1,30 @@
1
1
  const crypto = require('crypto');
2
2
  const path = require('path');
3
- const { uploadsdir } = require('../utility/directories');
3
+ const { uploadsdir, archivedir } = require('../utility/directories');
4
4
  const { downloadFile } = require('../utility/downloader');
5
+ const extractSingleFileFromZip = require('../utility/extractSingleFileFromZip');
5
6
 
6
- async function download(url) {
7
- if (url && url.match(/(^http:\/\/)|(^https:\/\/)/)) {
8
- const tmpFile = path.join(uploadsdir(), crypto.randomUUID());
9
- await downloadFile(url, tmpFile);
10
- return tmpFile;
7
+ async function download(url, options = {}) {
8
+ const { targetFile } = options || {};
9
+ if (url) {
10
+ if (url.match(/(^http:\/\/)|(^https:\/\/)/)) {
11
+ const destFile = targetFile || path.join(uploadsdir(), crypto.randomUUID());
12
+ await downloadFile(url, destFile);
13
+ return destFile;
14
+ }
15
+ const zipMatch = url.match(/^zip\:\/\/(.*)\/\/(.*)$/);
16
+ if (zipMatch) {
17
+ const destFile = targetFile || path.join(uploadsdir(), crypto.randomUUID());
18
+ let zipFile = zipMatch[1];
19
+ if (zipFile.startsWith('archive:')) {
20
+ zipFile = path.join(archivedir(), zipFile.substring('archive:'.length));
21
+ }
22
+
23
+ await extractSingleFileFromZip(zipFile, zipMatch[2], destFile);
24
+ return destFile;
25
+ }
11
26
  }
27
+
12
28
  return url;
13
29
  }
14
30
 
@@ -25,7 +25,7 @@ const importDatabase = require('./importDatabase');
25
25
  const loadDatabase = require('./loadDatabase');
26
26
  const generateModelSql = require('./generateModelSql');
27
27
  const modifyJsonLinesReader = require('./modifyJsonLinesReader');
28
- const dataDuplicator = require('./dataDuplicator');
28
+ const dataReplicator = require('./dataReplicator');
29
29
  const dbModelToJson = require('./dbModelToJson');
30
30
  const jsonToDbModel = require('./jsonToDbModel');
31
31
  const jsonReader = require('./jsonReader');
@@ -35,6 +35,11 @@ const autoIndexForeignKeysTransform = require('./autoIndexForeignKeysTransform')
35
35
  const generateDeploySql = require('./generateDeploySql');
36
36
  const dropAllDbObjects = require('./dropAllDbObjects');
37
37
  const importDbFromFolder = require('./importDbFromFolder');
38
+ const zipDirectory = require('./zipDirectory');
39
+ const unzipDirectory = require('./unzipDirectory');
40
+ const zipJsonLinesData = require('./zipJsonLinesData');
41
+ const unzipJsonLinesData = require('./unzipJsonLinesData');
42
+ const unzipJsonLinesFile = require('./unzipJsonLinesFile');
38
43
 
39
44
  const dbgateApi = {
40
45
  queryReader,
@@ -64,7 +69,7 @@ const dbgateApi = {
64
69
  loadDatabase,
65
70
  generateModelSql,
66
71
  modifyJsonLinesReader,
67
- dataDuplicator,
72
+ dataReplicator,
68
73
  dbModelToJson,
69
74
  jsonToDbModel,
70
75
  dataTypeMapperTransform,
@@ -73,6 +78,11 @@ const dbgateApi = {
73
78
  generateDeploySql,
74
79
  dropAllDbObjects,
75
80
  importDbFromFolder,
81
+ zipDirectory,
82
+ unzipDirectory,
83
+ zipJsonLinesData,
84
+ unzipJsonLinesData,
85
+ unzipJsonLinesFile,
76
86
  };
77
87
 
78
88
  requirePlugin.initializeDbgateApi(dbgateApi);
@@ -36,9 +36,10 @@ async function jsonLinesWriter({ fileName, encoding = 'utf-8', header = true })
36
36
  logger.info(`Writing file ${fileName}`);
37
37
  const stringify = new StringifyStream({ header });
38
38
  const fileStream = fs.createWriteStream(fileName, encoding);
39
- stringify.pipe(fileStream);
40
- stringify['finisher'] = fileStream;
41
- return stringify;
39
+ return [stringify, fileStream];
40
+ // stringify.pipe(fileStream);
41
+ // stringify['finisher'] = fileStream;
42
+ // return stringify;
42
43
  }
43
44
 
44
45
  module.exports = jsonLinesWriter;
@@ -7,6 +7,8 @@ const logger = getLogger('queryReader');
7
7
  * Returns reader object for {@link copyStream} function. This reader object reads data from query.
8
8
  * @param {object} options
9
9
  * @param {connectionType} options.connection - connection object
10
+ * @param {object} options.systemConnection - system connection (result of driver.connect). If not provided, new connection will be created
11
+ * @param {object} options.driver - driver object. If not provided, it will be loaded from connection
10
12
  * @param {string} options.query - SQL query
11
13
  * @param {string} [options.queryType] - query type
12
14
  * @param {string} [options.sql] - SQL query. obsolete; use query instead
@@ -16,6 +18,8 @@ async function queryReader({
16
18
  connection,
17
19
  query,
18
20
  queryType,
21
+ systemConnection,
22
+ driver,
19
23
  // obsolete; use query instead
20
24
  sql,
21
25
  }) {
@@ -28,10 +32,13 @@ async function queryReader({
28
32
  logger.info({ sql: query || sql }, `Reading query`);
29
33
  // else console.log(`Reading query ${JSON.stringify(json)}`);
30
34
 
31
- const driver = requireEngineDriver(connection);
32
- const pool = await connectUtility(driver, connection, queryType == 'json' ? 'read' : 'script');
35
+ if (!driver) {
36
+ driver = requireEngineDriver(connection);
37
+ }
38
+ const dbhan = systemConnection || (await connectUtility(driver, connection, queryType == 'json' ? 'read' : 'script'));
39
+
33
40
  const reader =
34
- queryType == 'json' ? await driver.readJsonQuery(pool, query) : await driver.readQuery(pool, query || sql);
41
+ queryType == 'json' ? await driver.readJsonQuery(dbhan, query) : await driver.readQuery(dbhan, query || sql);
35
42
  return reader;
36
43
  }
37
44
 
@@ -0,0 +1,91 @@
1
+ const yauzl = require('yauzl');
2
+ const fs = require('fs');
3
+ const path = require('path');
4
+ const { getLogger, extractErrorLogData } = require('dbgate-tools');
5
+
6
+ const logger = getLogger('unzipDirectory');
7
+
8
+ /**
9
+ * Extracts an entire ZIP file, preserving its internal directory layout.
10
+ *
11
+ * @param {string} zipPath Path to the ZIP file on disk.
12
+ * @param {string} outputDirectory Folder to create / overwrite with the contents.
13
+ * @returns {Promise<boolean>} Resolves `true` on success, rejects on error.
14
+ */
15
+ function unzipDirectory(zipPath, outputDirectory) {
16
+ return new Promise((resolve, reject) => {
17
+ yauzl.open(zipPath, { lazyEntries: true }, (err, zipFile) => {
18
+ if (err) return reject(err);
19
+
20
+ /** Pending per-file extractions – we resolve the main promise after they’re all done */
21
+ const pending = [];
22
+
23
+ // kick things off
24
+ zipFile.readEntry();
25
+
26
+ zipFile.on('entry', entry => {
27
+ const destPath = path.join(outputDirectory, entry.fileName);
28
+
29
+ // Handle directories (their names always end with “/” in ZIPs)
30
+ if (/\/$/.test(entry.fileName)) {
31
+ // Ensure directory exists, then continue to next entry
32
+ fs.promises
33
+ .mkdir(destPath, { recursive: true })
34
+ .then(() => zipFile.readEntry())
35
+ .catch(reject);
36
+ return;
37
+ }
38
+
39
+ // Handle files
40
+ const filePromise = fs.promises
41
+ .mkdir(path.dirname(destPath), { recursive: true }) // make sure parent dirs exist
42
+ .then(
43
+ () =>
44
+ new Promise((res, rej) => {
45
+ zipFile.openReadStream(entry, (err, readStream) => {
46
+ if (err) return rej(err);
47
+
48
+ const writeStream = fs.createWriteStream(destPath);
49
+ readStream.pipe(writeStream);
50
+
51
+ // proceed to next entry once we’ve consumed *this* one
52
+ readStream.on('end', () => zipFile.readEntry());
53
+
54
+ writeStream.on('finish', () => {
55
+ logger.info(`Extracted "${entry.fileName}" → "${destPath}".`);
56
+ res();
57
+ });
58
+
59
+ writeStream.on('error', writeErr => {
60
+ logger.error(
61
+ extractErrorLogData(writeErr),
62
+ `Error extracting "${entry.fileName}" from "${zipPath}".`
63
+ );
64
+ rej(writeErr);
65
+ });
66
+ });
67
+ })
68
+ );
69
+
70
+ pending.push(filePromise);
71
+ });
72
+
73
+ // Entire archive enumerated; wait for all streams to finish
74
+ zipFile.on('end', () => {
75
+ Promise.all(pending)
76
+ .then(() => {
77
+ logger.info(`Archive "${zipPath}" fully extracted to "${outputDirectory}".`);
78
+ resolve(true);
79
+ })
80
+ .catch(reject);
81
+ });
82
+
83
+ zipFile.on('error', err => {
84
+ logger.error(extractErrorLogData(err), `ZIP file error in ${zipPath}.`);
85
+ reject(err);
86
+ });
87
+ });
88
+ });
89
+ }
90
+
91
+ module.exports = unzipDirectory;
@@ -0,0 +1,60 @@
1
+ const yauzl = require('yauzl');
2
+ const fs = require('fs');
3
+ const { jsonLinesParse } = require('dbgate-tools');
4
+
5
+ function unzipJsonLinesData(zipPath) {
6
+ return new Promise((resolve, reject) => {
7
+ // Open the zip file
8
+ yauzl.open(zipPath, { lazyEntries: true }, (err, zipfile) => {
9
+ if (err) {
10
+ return reject(err);
11
+ }
12
+
13
+ const results = {};
14
+
15
+ // Start reading entries
16
+ zipfile.readEntry();
17
+
18
+ zipfile.on('entry', entry => {
19
+ // Only process .json files
20
+ if (/\.jsonl$/i.test(entry.fileName)) {
21
+ zipfile.openReadStream(entry, (err, readStream) => {
22
+ if (err) {
23
+ return reject(err);
24
+ }
25
+
26
+ const chunks = [];
27
+ readStream.on('data', chunk => chunks.push(chunk));
28
+ readStream.on('end', () => {
29
+ try {
30
+ const fileContent = Buffer.concat(chunks).toString('utf-8');
31
+ const parsedJson = jsonLinesParse(fileContent);
32
+ results[entry.fileName.replace(/\.jsonl$/, '')] = parsedJson;
33
+ } catch (parseError) {
34
+ return reject(parseError);
35
+ }
36
+
37
+ // Move to the next entry
38
+ zipfile.readEntry();
39
+ });
40
+ });
41
+ } else {
42
+ // Not a JSON file, skip
43
+ zipfile.readEntry();
44
+ }
45
+ });
46
+
47
+ // Resolve when no more entries
48
+ zipfile.on('end', () => {
49
+ resolve(results);
50
+ });
51
+
52
+ // Catch errors from zipfile
53
+ zipfile.on('error', zipErr => {
54
+ reject(zipErr);
55
+ });
56
+ });
57
+ });
58
+ }
59
+
60
+ module.exports = unzipJsonLinesData;
@@ -0,0 +1,59 @@
1
+ const yauzl = require('yauzl');
2
+ const fs = require('fs');
3
+ const { jsonLinesParse } = require('dbgate-tools');
4
+
5
+ function unzipJsonLinesFile(zipPath, fileInZip) {
6
+ return new Promise((resolve, reject) => {
7
+ // Open the zip file
8
+ yauzl.open(zipPath, { lazyEntries: true }, (err, zipfile) => {
9
+ if (err) {
10
+ return reject(err);
11
+ }
12
+
13
+ let result = null;
14
+
15
+ // Start reading entries
16
+ zipfile.readEntry();
17
+
18
+ zipfile.on('entry', entry => {
19
+ if (entry.fileName == fileInZip) {
20
+ zipfile.openReadStream(entry, (err, readStream) => {
21
+ if (err) {
22
+ return reject(err);
23
+ }
24
+
25
+ const chunks = [];
26
+ readStream.on('data', chunk => chunks.push(chunk));
27
+ readStream.on('end', () => {
28
+ try {
29
+ const fileContent = Buffer.concat(chunks).toString('utf-8');
30
+ const parsedJson = jsonLinesParse(fileContent);
31
+ result = parsedJson;
32
+ } catch (parseError) {
33
+ return reject(parseError);
34
+ }
35
+
36
+ // Move to the next entry
37
+ zipfile.readEntry();
38
+ });
39
+ });
40
+ } else {
41
+ // Not a JSON file, skip
42
+ zipfile.readEntry();
43
+ }
44
+ });
45
+
46
+ // Resolve when no more entries
47
+ zipfile.on('end', () => {
48
+ resolve(result);
49
+ });
50
+
51
+ // Catch errors from zipfile
52
+ zipfile.on('error', zipErr => {
53
+ reject(zipErr);
54
+ });
55
+ });
56
+ });
57
+ }
58
+
59
+ module.exports = unzipJsonLinesFile;
@@ -0,0 +1,49 @@
1
+ const fs = require('fs');
2
+ const path = require('path');
3
+ const archiver = require('archiver');
4
+ const { getLogger, extractErrorLogData } = require('dbgate-tools');
5
+ const { archivedir } = require('../utility/directories');
6
+ const logger = getLogger('compressDirectory');
7
+
8
+ function zipDirectory(inputDirectory, outputFile) {
9
+ if (outputFile.startsWith('archive:')) {
10
+ outputFile = path.join(archivedir(), outputFile.substring('archive:'.length));
11
+ }
12
+
13
+ return new Promise((resolve, reject) => {
14
+ const output = fs.createWriteStream(outputFile);
15
+ const archive = archiver('zip', { zlib: { level: 9 } }); // level: 9 => best compression
16
+
17
+ // Listen for all archive data to be written
18
+ output.on('close', () => {
19
+ logger.info(`ZIP file created (${archive.pointer()} total bytes)`);
20
+ resolve();
21
+ });
22
+
23
+ archive.on('warning', err => {
24
+ logger.warn(extractErrorLogData(err), `Warning while creating ZIP: ${err.message}`);
25
+ });
26
+
27
+ archive.on('error', err => {
28
+ logger.error(extractErrorLogData(err), `Error while creating ZIP: ${err.message}`);
29
+ reject(err);
30
+ });
31
+
32
+ // Pipe archive data to the file
33
+ archive.pipe(output);
34
+
35
+ // Append files from a folder
36
+ archive.directory(inputDirectory, false, entryData => {
37
+ if (entryData.name.endsWith('.zip')) {
38
+ return false; // returning false means "do not include"
39
+ }
40
+ // otherwise, include it
41
+ return entryData;
42
+ });
43
+
44
+ // Finalize the archive
45
+ archive.finalize();
46
+ });
47
+ }
48
+
49
+ module.exports = zipDirectory;
@@ -0,0 +1,49 @@
1
+ const fs = require('fs');
2
+ const _ = require('lodash');
3
+ const path = require('path');
4
+ const archiver = require('archiver');
5
+ const { getLogger, extractErrorLogData, jsonLinesStringify } = require('dbgate-tools');
6
+ const { archivedir } = require('../utility/directories');
7
+ const logger = getLogger('compressDirectory');
8
+
9
+ function zipDirectory(jsonDb, outputFile) {
10
+ if (outputFile.startsWith('archive:')) {
11
+ outputFile = path.join(archivedir(), outputFile.substring('archive:'.length));
12
+ }
13
+
14
+ return new Promise((resolve, reject) => {
15
+ const output = fs.createWriteStream(outputFile);
16
+ const archive = archiver('zip', { zlib: { level: 9 } }); // level: 9 => best compression
17
+
18
+ // Listen for all archive data to be written
19
+ output.on('close', () => {
20
+ logger.info(`ZIP file created (${archive.pointer()} total bytes)`);
21
+ resolve();
22
+ });
23
+
24
+ archive.on('warning', err => {
25
+ logger.warn(extractErrorLogData(err), `Warning while creating ZIP: ${err.message}`);
26
+ });
27
+
28
+ archive.on('error', err => {
29
+ logger.error(extractErrorLogData(err), `Error while creating ZIP: ${err.message}`);
30
+ reject(err);
31
+ });
32
+
33
+ // Pipe archive data to the file
34
+ archive.pipe(output);
35
+
36
+ for (const key in jsonDb) {
37
+ const data = jsonDb[key];
38
+ if (_.isArray(data)) {
39
+ const jsonString = jsonLinesStringify(data);
40
+ archive.append(jsonString, { name: `${key}.jsonl` });
41
+ }
42
+ }
43
+
44
+ // Finalize the archive
45
+ archive.finalize();
46
+ });
47
+ }
48
+
49
+ module.exports = zipDirectory;
@@ -4,11 +4,20 @@ const fsp = require('fs/promises');
4
4
  const semver = require('semver');
5
5
  const currentVersion = require('../currentVersion');
6
6
  const { getLogger, extractErrorLogData } = require('dbgate-tools');
7
+ const { storageReadConfig } = require('../controllers/storageDb');
7
8
 
8
9
  const logger = getLogger('cloudUpgrade');
9
10
 
10
11
  async function checkCloudUpgrade() {
11
12
  try {
13
+ if (process.env.STORAGE_DATABASE) {
14
+ const settings = await storageReadConfig('settings');
15
+ if (settings['cloud.useAutoUpgrade'] != 1) {
16
+ // auto-upgrade not allowed
17
+ return;
18
+ }
19
+ }
20
+
12
21
  const resp = await axios.default.get('https://api.github.com/repos/dbgate/dbgate/releases/latest');
13
22
  const json = resp.data;
14
23
  const version = json.name.substring(1);
@@ -43,7 +52,11 @@ async function checkCloudUpgrade() {
43
52
 
44
53
  logger.info(`Downloaded new version from ${zipUrl}`);
45
54
  } else {
46
- logger.info(`Checked version ${version} is not newer than ${cloudDownloadedVersion ?? currentVersion.version}, upgrade skippped`);
55
+ logger.info(
56
+ `Checked version ${version} is not newer than ${
57
+ cloudDownloadedVersion ?? currentVersion.version
58
+ }, upgrade skippped`
59
+ );
47
60
  }
48
61
  } catch (err) {
49
62
  logger.error(extractErrorLogData(err), 'Error checking cloud upgrade');