dbgate-api-premium 6.3.3 → 6.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/package.json +9 -7
  2. package/src/controllers/archive.js +99 -6
  3. package/src/controllers/config.js +135 -22
  4. package/src/controllers/connections.js +35 -2
  5. package/src/controllers/databaseConnections.js +76 -1
  6. package/src/controllers/files.js +59 -0
  7. package/src/controllers/jsldata.js +9 -0
  8. package/src/controllers/runners.js +25 -5
  9. package/src/controllers/serverConnections.js +17 -2
  10. package/src/controllers/storage.js +51 -1
  11. package/src/controllers/uploads.js +0 -46
  12. package/src/currentVersion.js +2 -2
  13. package/src/proc/connectProcess.js +14 -2
  14. package/src/proc/databaseConnectionProcess.js +70 -5
  15. package/src/proc/serverConnectionProcess.js +7 -1
  16. package/src/proc/sessionProcess.js +15 -178
  17. package/src/shell/archiveReader.js +3 -1
  18. package/src/shell/collectorWriter.js +2 -2
  19. package/src/shell/copyStream.js +1 -0
  20. package/src/shell/dataReplicator.js +96 -0
  21. package/src/shell/download.js +22 -6
  22. package/src/shell/index.js +12 -2
  23. package/src/shell/jsonLinesWriter.js +4 -3
  24. package/src/shell/queryReader.js +10 -3
  25. package/src/shell/unzipDirectory.js +91 -0
  26. package/src/shell/unzipJsonLinesData.js +60 -0
  27. package/src/shell/unzipJsonLinesFile.js +59 -0
  28. package/src/shell/zipDirectory.js +49 -0
  29. package/src/shell/zipJsonLinesData.js +49 -0
  30. package/src/utility/cloudUpgrade.js +14 -1
  31. package/src/utility/crypting.js +56 -5
  32. package/src/utility/extractSingleFileFromZip.js +77 -0
  33. package/src/utility/handleQueryStream.js +186 -0
  34. package/src/utility/listZipEntries.js +41 -0
  35. package/src/utility/storageReplicatorItems.js +88 -0
  36. package/src/shell/dataDuplicator.js +0 -61
@@ -59,7 +59,7 @@ async function loadEncryptionKeyFromExternal(storedValue, setStoredValue) {
59
59
 
60
60
  let _encryptor = null;
61
61
 
62
- function getEncryptor() {
62
+ function getInternalEncryptor() {
63
63
  if (_encryptor) {
64
64
  return _encryptor;
65
65
  }
@@ -69,14 +69,14 @@ function getEncryptor() {
69
69
 
70
70
  function encryptPasswordString(password) {
71
71
  if (password && !password.startsWith('crypt:')) {
72
- return 'crypt:' + getEncryptor().encrypt(password);
72
+ return 'crypt:' + getInternalEncryptor().encrypt(password);
73
73
  }
74
74
  return password;
75
75
  }
76
76
 
77
77
  function decryptPasswordString(password) {
78
78
  if (password && password.startsWith('crypt:')) {
79
- return getEncryptor().decrypt(password.substring('crypt:'.length));
79
+ return getInternalEncryptor().decrypt(password.substring('crypt:'.length));
80
80
  }
81
81
  return password;
82
82
  }
@@ -85,7 +85,7 @@ function encryptObjectPasswordField(obj, field) {
85
85
  if (obj && obj[field] && !obj[field].startsWith('crypt:')) {
86
86
  return {
87
87
  ...obj,
88
- [field]: 'crypt:' + getEncryptor().encrypt(obj[field]),
88
+ [field]: 'crypt:' + getInternalEncryptor().encrypt(obj[field]),
89
89
  };
90
90
  }
91
91
  return obj;
@@ -95,7 +95,7 @@ function decryptObjectPasswordField(obj, field) {
95
95
  if (obj && obj[field] && obj[field].startsWith('crypt:')) {
96
96
  return {
97
97
  ...obj,
98
- [field]: getEncryptor().decrypt(obj[field].substring('crypt:'.length)),
98
+ [field]: getInternalEncryptor().decrypt(obj[field].substring('crypt:'.length)),
99
99
  };
100
100
  }
101
101
  return obj;
@@ -156,6 +156,49 @@ function getEncryptionKey() {
156
156
  return _encryptionKey;
157
157
  }
158
158
 
159
+ function generateTransportEncryptionKey() {
160
+ const encryptor = simpleEncryptor.createEncryptor(defaultEncryptionKey);
161
+ const result = {
162
+ encryptionKey: crypto.randomBytes(32).toString('hex'),
163
+ };
164
+ return encryptor.encrypt(result);
165
+ }
166
+
167
+ function createTransportEncryptor(encryptionData) {
168
+ const encryptor = simpleEncryptor.createEncryptor(defaultEncryptionKey);
169
+ const data = encryptor.decrypt(encryptionData);
170
+ const res = simpleEncryptor.createEncryptor(data['encryptionKey']);
171
+ return res;
172
+ }
173
+
174
+ function recryptObjectPasswordField(obj, field, decryptEncryptor, encryptEncryptor) {
175
+ if (obj && obj[field] && obj[field].startsWith('crypt:')) {
176
+ return {
177
+ ...obj,
178
+ [field]: 'crypt:' + encryptEncryptor.encrypt(decryptEncryptor.decrypt(obj[field].substring('crypt:'.length))),
179
+ };
180
+ }
181
+ return obj;
182
+ }
183
+
184
+ function recryptObjectPasswordFieldInPlace(obj, field, decryptEncryptor, encryptEncryptor) {
185
+ if (obj && obj[field] && obj[field].startsWith('crypt:')) {
186
+ obj[field] = 'crypt:' + encryptEncryptor.encrypt(decryptEncryptor.decrypt(obj[field].substring('crypt:'.length)));
187
+ }
188
+ }
189
+
190
+ function recryptConnection(connection, decryptEncryptor, encryptEncryptor) {
191
+ connection = recryptObjectPasswordField(connection, 'password', decryptEncryptor, encryptEncryptor);
192
+ connection = recryptObjectPasswordField(connection, 'sshPassword', decryptEncryptor, encryptEncryptor);
193
+ connection = recryptObjectPasswordField(connection, 'sshKeyfilePassword', decryptEncryptor, encryptEncryptor);
194
+ return connection;
195
+ }
196
+
197
+ function recryptUser(user, decryptEncryptor, encryptEncryptor) {
198
+ user = recryptObjectPasswordField(user, 'password', decryptEncryptor, encryptEncryptor);
199
+ return user;
200
+ }
201
+
159
202
  module.exports = {
160
203
  loadEncryptionKey,
161
204
  encryptConnection,
@@ -169,4 +212,12 @@ module.exports = {
169
212
  setEncryptionKey,
170
213
  encryptPasswordString,
171
214
  decryptPasswordString,
215
+
216
+ getInternalEncryptor,
217
+ recryptConnection,
218
+ recryptUser,
219
+ generateTransportEncryptionKey,
220
+ createTransportEncryptor,
221
+ recryptObjectPasswordField,
222
+ recryptObjectPasswordFieldInPlace,
172
223
  };
@@ -0,0 +1,77 @@
1
+ const yauzl = require('yauzl');
2
+ const fs = require('fs');
3
+ const { getLogger, extractErrorLogData } = require('dbgate-tools');
4
+ const logger = getLogger('extractSingleFileFromZip');
5
+ /**
6
+ * Extracts a single file from a ZIP using yauzl.
7
+ * Stops reading the rest of the archive once the file is found.
8
+ *
9
+ * @param {string} zipPath - Path to the ZIP file on disk.
10
+ * @param {string} fileInZip - The file path *inside* the ZIP to extract.
11
+ * @param {string} outputPath - Where to write the extracted file on disk.
12
+ * @returns {Promise<boolean>} - Resolves with a success message or a "not found" message.
13
+ */
14
+ function extractSingleFileFromZip(zipPath, fileInZip, outputPath) {
15
+ return new Promise((resolve, reject) => {
16
+ yauzl.open(zipPath, { lazyEntries: true }, (err, zipFile) => {
17
+ if (err) return reject(err);
18
+
19
+ let fileFound = false;
20
+
21
+ // Start reading the first entry
22
+ zipFile.readEntry();
23
+
24
+ zipFile.on('entry', entry => {
25
+ // Compare the entry name to the file we want
26
+ if (entry.fileName === fileInZip) {
27
+ fileFound = true;
28
+
29
+ // Open a read stream for this entry
30
+ zipFile.openReadStream(entry, (err, readStream) => {
31
+ if (err) return reject(err);
32
+
33
+ // Create a write stream to outputPath
34
+ const writeStream = fs.createWriteStream(outputPath);
35
+ readStream.pipe(writeStream);
36
+
37
+ // When the read stream ends, we can close the zipFile
38
+ readStream.on('end', () => {
39
+ // We won't read further entries
40
+ zipFile.close();
41
+ });
42
+
43
+ // When the file is finished writing, resolve
44
+ writeStream.on('finish', () => {
45
+ logger.info(`File "${fileInZip}" extracted to "${outputPath}".`);
46
+ resolve(true);
47
+ });
48
+
49
+ // Handle write errors
50
+ writeStream.on('error', writeErr => {
51
+ logger.error(extractErrorLogData(writeErr), `Error extracting "${fileInZip}" from "${zipPath}".`);
52
+ reject(writeErr);
53
+ });
54
+ });
55
+ } else {
56
+ // Not the file we want; skip to the next entry
57
+ zipFile.readEntry();
58
+ }
59
+ });
60
+
61
+ // If we reach the end without finding the file
62
+ zipFile.on('end', () => {
63
+ if (!fileFound) {
64
+ resolve(false);
65
+ }
66
+ });
67
+
68
+ // Handle general errors
69
+ zipFile.on('error', err => {
70
+ logger.error(extractErrorLogData(err), `ZIP file error in ${zipPath}.`);
71
+ reject(err);
72
+ });
73
+ });
74
+ });
75
+ }
76
+
77
+ module.exports = extractSingleFileFromZip;
@@ -0,0 +1,186 @@
1
+ const crypto = require('crypto');
2
+ const path = require('path');
3
+ const fs = require('fs');
4
+ const _ = require('lodash');
5
+
6
+ const { jsldir } = require('../utility/directories');
7
+
8
+ class QueryStreamTableWriter {
9
+ constructor(sesid = undefined) {
10
+ this.currentRowCount = 0;
11
+ this.currentChangeIndex = 1;
12
+ this.initializedFile = false;
13
+ this.sesid = sesid;
14
+ }
15
+
16
+ initializeFromQuery(structure, resultIndex) {
17
+ this.jslid = crypto.randomUUID();
18
+ this.currentFile = path.join(jsldir(), `${this.jslid}.jsonl`);
19
+ fs.writeFileSync(
20
+ this.currentFile,
21
+ JSON.stringify({
22
+ ...structure,
23
+ __isStreamHeader: true,
24
+ }) + '\n'
25
+ );
26
+ this.currentStream = fs.createWriteStream(this.currentFile, { flags: 'a' });
27
+ this.writeCurrentStats(false, false);
28
+ this.resultIndex = resultIndex;
29
+ this.initializedFile = true;
30
+ process.send({ msgtype: 'recordset', jslid: this.jslid, resultIndex, sesid: this.sesid });
31
+ }
32
+
33
+ initializeFromReader(jslid) {
34
+ this.jslid = jslid;
35
+ this.currentFile = path.join(jsldir(), `${this.jslid}.jsonl`);
36
+ this.writeCurrentStats(false, false);
37
+ }
38
+
39
+ row(row) {
40
+ // console.log('ACCEPT ROW', row);
41
+ this.currentStream.write(JSON.stringify(row) + '\n');
42
+ this.currentRowCount += 1;
43
+
44
+ if (!this.plannedStats) {
45
+ this.plannedStats = true;
46
+ process.nextTick(() => {
47
+ if (this.currentStream) this.currentStream.uncork();
48
+ process.nextTick(() => this.writeCurrentStats(false, true));
49
+ this.plannedStats = false;
50
+ });
51
+ }
52
+ }
53
+
54
+ rowFromReader(row) {
55
+ if (!this.initializedFile) {
56
+ process.send({ msgtype: 'initializeFile', jslid: this.jslid, sesid: this.sesid });
57
+ this.initializedFile = true;
58
+
59
+ fs.writeFileSync(this.currentFile, JSON.stringify(row) + '\n');
60
+ this.currentStream = fs.createWriteStream(this.currentFile, { flags: 'a' });
61
+ this.writeCurrentStats(false, false);
62
+ this.initializedFile = true;
63
+ return;
64
+ }
65
+
66
+ this.row(row);
67
+ }
68
+
69
+ writeCurrentStats(isFinished = false, emitEvent = false) {
70
+ const stats = {
71
+ rowCount: this.currentRowCount,
72
+ changeIndex: this.currentChangeIndex,
73
+ isFinished,
74
+ jslid: this.jslid,
75
+ };
76
+ fs.writeFileSync(`${this.currentFile}.stats`, JSON.stringify(stats));
77
+ this.currentChangeIndex += 1;
78
+ if (emitEvent) {
79
+ process.send({ msgtype: 'stats', sesid: this.sesid, ...stats });
80
+ }
81
+ }
82
+
83
+ close(afterClose) {
84
+ if (this.currentStream) {
85
+ this.currentStream.end(() => {
86
+ this.writeCurrentStats(true, true);
87
+ if (afterClose) afterClose();
88
+ });
89
+ }
90
+ }
91
+ }
92
+
93
+ class StreamHandler {
94
+ constructor(queryStreamInfoHolder, resolve, startLine, sesid = undefined) {
95
+ this.recordset = this.recordset.bind(this);
96
+ this.startLine = startLine;
97
+ this.sesid = sesid;
98
+ this.row = this.row.bind(this);
99
+ // this.error = this.error.bind(this);
100
+ this.done = this.done.bind(this);
101
+ this.info = this.info.bind(this);
102
+
103
+ // use this for cancelling - not implemented
104
+ // this.stream = null;
105
+
106
+ this.plannedStats = false;
107
+ this.queryStreamInfoHolder = queryStreamInfoHolder;
108
+ this.resolve = resolve;
109
+ // currentHandlers = [...currentHandlers, this];
110
+ }
111
+
112
+ closeCurrentWriter() {
113
+ if (this.currentWriter) {
114
+ this.currentWriter.close();
115
+ this.currentWriter = null;
116
+ }
117
+ }
118
+
119
+ recordset(columns) {
120
+ this.closeCurrentWriter();
121
+ this.currentWriter = new QueryStreamTableWriter(this.sesid);
122
+ this.currentWriter.initializeFromQuery(
123
+ Array.isArray(columns) ? { columns } : columns,
124
+ this.queryStreamInfoHolder.resultIndex
125
+ );
126
+ this.queryStreamInfoHolder.resultIndex += 1;
127
+
128
+ // this.writeCurrentStats();
129
+
130
+ // this.onRow = _.throttle((jslid) => {
131
+ // if (jslid == this.jslid) {
132
+ // this.writeCurrentStats(false, true);
133
+ // }
134
+ // }, 500);
135
+ }
136
+ row(row) {
137
+ if (this.currentWriter) this.currentWriter.row(row);
138
+ else if (row.message) process.send({ msgtype: 'info', info: { message: row.message }, sesid: this.sesid });
139
+ // this.onRow(this.jslid);
140
+ }
141
+ // error(error) {
142
+ // process.send({ msgtype: 'error', error });
143
+ // }
144
+ done(result) {
145
+ this.closeCurrentWriter();
146
+ // currentHandlers = currentHandlers.filter((x) => x != this);
147
+ this.resolve();
148
+ }
149
+ info(info) {
150
+ if (info && info.line != null) {
151
+ info = {
152
+ ...info,
153
+ line: this.startLine + info.line,
154
+ };
155
+ }
156
+ if (info.severity == 'error') {
157
+ this.queryStreamInfoHolder.canceled = true;
158
+ }
159
+ process.send({ msgtype: 'info', info, sesid: this.sesid });
160
+ }
161
+ }
162
+
163
+ function handleQueryStream(dbhan, driver, queryStreamInfoHolder, sqlItem, sesid = undefined) {
164
+ return new Promise((resolve, reject) => {
165
+ const start = sqlItem.trimStart || sqlItem.start;
166
+ const handler = new StreamHandler(queryStreamInfoHolder, resolve, start && start.line, sesid);
167
+ driver.stream(dbhan, sqlItem.text, handler);
168
+ });
169
+ }
170
+
171
+ function allowExecuteCustomScript(storedConnection, driver) {
172
+ if (driver.readOnlySessions) {
173
+ return true;
174
+ }
175
+ if (storedConnection.isReadOnly) {
176
+ return false;
177
+ // throw new Error('Connection is read only');
178
+ }
179
+ return true;
180
+ }
181
+
182
+ module.exports = {
183
+ handleQueryStream,
184
+ QueryStreamTableWriter,
185
+ allowExecuteCustomScript,
186
+ };
@@ -0,0 +1,41 @@
1
+ const yauzl = require('yauzl');
2
+ const path = require('path');
3
+
4
+ /**
5
+ * Lists the files in a ZIP archive using yauzl,
6
+ * returning an array of { fileName, uncompressedSize } objects.
7
+ *
8
+ * @param {string} zipPath - The path to the ZIP file.
9
+ * @returns {Promise<Array<{fileName: string, uncompressedSize: number}>>}
10
+ */
11
+ function listZipEntries(zipPath) {
12
+ return new Promise((resolve, reject) => {
13
+ yauzl.open(zipPath, { lazyEntries: true }, (err, zipfile) => {
14
+ if (err) return reject(err);
15
+
16
+ const entries = [];
17
+
18
+ // Start reading entries
19
+ zipfile.readEntry();
20
+
21
+ // Handle each entry
22
+ zipfile.on('entry', entry => {
23
+ entries.push({
24
+ fileName: entry.fileName,
25
+ uncompressedSize: entry.uncompressedSize,
26
+ });
27
+
28
+ // Move on to the next entry (we’re only listing, not reading file data)
29
+ zipfile.readEntry();
30
+ });
31
+
32
+ // Finished reading all entries
33
+ zipfile.on('end', () => resolve(entries));
34
+
35
+ // Handle errors
36
+ zipfile.on('error', err => reject(err));
37
+ });
38
+ });
39
+ }
40
+
41
+ module.exports = listZipEntries;
@@ -0,0 +1,88 @@
1
+ // *** This file is part of DbGate Premium ***
2
+
3
+ module.exports = [
4
+ {
5
+ name: 'auth_methods',
6
+ findExisting: true,
7
+ createNew: true,
8
+ updateExisting: false,
9
+ matchColumns: ['amoid'],
10
+ },
11
+ {
12
+ name: 'auth_methods_config',
13
+ findExisting: true,
14
+ createNew: true,
15
+ updateExisting: true,
16
+ matchColumns: ['auth_method_id', 'key'],
17
+ deleteMissing: true,
18
+ deleteRestrictionColumns: ['auth_method_id'],
19
+ },
20
+ {
21
+ name: 'config',
22
+ findExisting: true,
23
+ createNew: true,
24
+ updateExisting: true,
25
+ matchColumns: ['group', 'key'],
26
+ },
27
+ {
28
+ name: 'connections',
29
+ findExisting: true,
30
+ createNew: true,
31
+ updateExisting: true,
32
+ matchColumns: ['conid'],
33
+ },
34
+ {
35
+ name: 'role_connections',
36
+ findExisting: true,
37
+ createNew: true,
38
+ matchColumns: ['role_id', 'connection_id'],
39
+ deleteMissing: true,
40
+ deleteRestrictionColumns: ['role_id', 'connection_id'],
41
+ },
42
+ {
43
+ name: 'role_permissions',
44
+ findExisting: true,
45
+ createNew: true,
46
+ matchColumns: ['role_id', 'permission'],
47
+ deleteMissing: true,
48
+ deleteRestrictionColumns: ['role_id'],
49
+ },
50
+ {
51
+ name: 'roles',
52
+ findExisting: true,
53
+ createNew: true,
54
+ updateExisting: true,
55
+ matchColumns: ['name'],
56
+ },
57
+ {
58
+ name: 'user_connections',
59
+ findExisting: true,
60
+ createNew: true,
61
+ matchColumns: ['user_id', 'connection_id'],
62
+ deleteMissing: true,
63
+ deleteRestrictionColumns: ['user_id', 'connection_id'],
64
+ },
65
+ {
66
+ name: 'user_permissions',
67
+ findExisting: true,
68
+ createNew: true,
69
+ matchColumns: ['user_id', 'permission'],
70
+ deleteMissing: true,
71
+ deleteRestrictionColumns: ['user_id', 'permission'],
72
+ },
73
+ {
74
+ name: 'user_roles',
75
+ findExisting: true,
76
+ createNew: true,
77
+ matchColumns: ['user_id', 'role_id'],
78
+ deleteMissing: true,
79
+ deleteRestrictionColumns: ['user_id', 'role_id'],
80
+ },
81
+ {
82
+ name: 'users',
83
+ findExisting: true,
84
+ createNew: true,
85
+ updateExisting: true,
86
+ matchColumns: ['login'],
87
+ },
88
+ ];
@@ -1,61 +0,0 @@
1
- const stream = require('stream');
2
- const path = require('path');
3
- const { quoteFullName, fullNameToString, getLogger } = require('dbgate-tools');
4
- const requireEngineDriver = require('../utility/requireEngineDriver');
5
- const { connectUtility } = require('../utility/connectUtility');
6
- const logger = getLogger('dataDuplicator');
7
- const { DataDuplicator } = require('dbgate-datalib');
8
- const copyStream = require('./copyStream');
9
- const jsonLinesReader = require('./jsonLinesReader');
10
- const { resolveArchiveFolder } = require('../utility/directories');
11
-
12
- async function dataDuplicator({
13
- connection,
14
- archive,
15
- folder,
16
- items,
17
- options,
18
- analysedStructure = null,
19
- driver,
20
- systemConnection,
21
- }) {
22
- if (!driver) driver = requireEngineDriver(connection);
23
-
24
- const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
25
-
26
- try {
27
- if (!analysedStructure) {
28
- analysedStructure = await driver.analyseFull(dbhan);
29
- }
30
-
31
- const sourceDir = archive
32
- ? resolveArchiveFolder(archive)
33
- : folder?.startsWith('archive:')
34
- ? resolveArchiveFolder(folder.substring('archive:'.length))
35
- : folder;
36
-
37
- const dupl = new DataDuplicator(
38
- dbhan,
39
- driver,
40
- analysedStructure,
41
- items.map(item => ({
42
- name: item.name,
43
- operation: item.operation,
44
- matchColumns: item.matchColumns,
45
- openStream:
46
- item.openStream || (() => jsonLinesReader({ fileName: path.join(sourceDir, `${item.name}.jsonl`) })),
47
- })),
48
- stream,
49
- copyStream,
50
- options
51
- );
52
-
53
- await dupl.run();
54
- } finally {
55
- if (!systemConnection) {
56
- await driver.close(dbhan);
57
- }
58
- }
59
- }
60
-
61
- module.exports = dataDuplicator;