dbgate-api-premium 5.5.7-alpha.45
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env +19 -0
- package/.yarnrc +2 -0
- package/README.md +1 -0
- package/env/dblogin/.env +14 -0
- package/env/portal/.env +70 -0
- package/env/singledb/.env +17 -0
- package/env/storage/.env +43 -0
- package/package.json +89 -0
- package/src/auth/authCommon.js +16 -0
- package/src/auth/authProvider.js +343 -0
- package/src/auth/storageAuthProvider.js +393 -0
- package/src/controllers/apps.js +280 -0
- package/src/controllers/archive.js +217 -0
- package/src/controllers/auth.js +136 -0
- package/src/controllers/config.js +271 -0
- package/src/controllers/connections.js +486 -0
- package/src/controllers/databaseConnections.js +561 -0
- package/src/controllers/files.js +222 -0
- package/src/controllers/jsldata.js +296 -0
- package/src/controllers/metadata.js +47 -0
- package/src/controllers/plugins.js +216 -0
- package/src/controllers/queryHistory.js +54 -0
- package/src/controllers/runners.js +234 -0
- package/src/controllers/scheduler.js +46 -0
- package/src/controllers/serverConnections.js +271 -0
- package/src/controllers/sessions.js +243 -0
- package/src/controllers/storage.js +380 -0
- package/src/controllers/storageDb.js +215 -0
- package/src/controllers/uploads.js +133 -0
- package/src/currentVersion.js +5 -0
- package/src/gistSecret.js +2 -0
- package/src/index.js +139 -0
- package/src/main.js +202 -0
- package/src/packagedPluginsContent.js +1 -0
- package/src/proc/connectProcess.js +38 -0
- package/src/proc/databaseConnectionProcess.js +431 -0
- package/src/proc/index.js +15 -0
- package/src/proc/jslDatastoreProcess.js +60 -0
- package/src/proc/serverConnectionProcess.js +188 -0
- package/src/proc/sessionProcess.js +390 -0
- package/src/proc/sshForwardProcess.js +75 -0
- package/src/shell/archiveReader.js +11 -0
- package/src/shell/archiveWriter.js +22 -0
- package/src/shell/autoIndexForeignKeysTransform.js +19 -0
- package/src/shell/collectorWriter.js +33 -0
- package/src/shell/consoleObjectWriter.js +16 -0
- package/src/shell/copyStream.js +48 -0
- package/src/shell/dataDuplicator.js +63 -0
- package/src/shell/dataTypeMapperTransform.js +21 -0
- package/src/shell/dbModelToJson.js +16 -0
- package/src/shell/deployDb.js +56 -0
- package/src/shell/download.js +15 -0
- package/src/shell/dropAllDbObjects.js +42 -0
- package/src/shell/dumpDatabase.js +49 -0
- package/src/shell/executeQuery.js +39 -0
- package/src/shell/fakeObjectReader.js +35 -0
- package/src/shell/finalizer.js +12 -0
- package/src/shell/generateDeploySql.js +95 -0
- package/src/shell/generateModelSql.js +30 -0
- package/src/shell/importDatabase.js +85 -0
- package/src/shell/index.js +80 -0
- package/src/shell/initializeApiEnvironment.js +9 -0
- package/src/shell/jslDataReader.js +9 -0
- package/src/shell/jsonLinesReader.js +52 -0
- package/src/shell/jsonLinesWriter.js +36 -0
- package/src/shell/jsonReader.js +84 -0
- package/src/shell/jsonToDbModel.js +9 -0
- package/src/shell/jsonWriter.js +97 -0
- package/src/shell/loadDatabase.js +27 -0
- package/src/shell/loadFile.js +10 -0
- package/src/shell/modifyJsonLinesReader.js +148 -0
- package/src/shell/queryReader.js +30 -0
- package/src/shell/registerPlugins.js +9 -0
- package/src/shell/requirePlugin.js +43 -0
- package/src/shell/runScript.js +19 -0
- package/src/shell/sqlDataWriter.js +52 -0
- package/src/shell/sqlTextReplacementTransform.js +32 -0
- package/src/shell/tableReader.js +39 -0
- package/src/shell/tableWriter.js +18 -0
- package/src/storageModel.js +819 -0
- package/src/utility/ColumnMapTransformStream.js +21 -0
- package/src/utility/DatastoreProxy.js +106 -0
- package/src/utility/EnsureStreamHeaderStream.js +31 -0
- package/src/utility/JsonLinesDatabase.js +148 -0
- package/src/utility/JsonLinesDatastore.js +232 -0
- package/src/utility/LineReader.js +88 -0
- package/src/utility/SSHConnection.js +251 -0
- package/src/utility/authProxy.js +133 -0
- package/src/utility/checkLicense.js +186 -0
- package/src/utility/childProcessChecker.js +21 -0
- package/src/utility/cleanDirectory.js +24 -0
- package/src/utility/cloudUpgrade.js +61 -0
- package/src/utility/connectUtility.js +111 -0
- package/src/utility/crypting.js +105 -0
- package/src/utility/diff2htmlPage.js +8 -0
- package/src/utility/directories.js +179 -0
- package/src/utility/downloadPackage.js +51 -0
- package/src/utility/downloader.js +25 -0
- package/src/utility/exceptions.js +9 -0
- package/src/utility/exportDbModel.js +31 -0
- package/src/utility/exportDbModelSql.js +80 -0
- package/src/utility/getChartExport.js +55 -0
- package/src/utility/getDiagramExport.js +25 -0
- package/src/utility/getExpressPath.js +10 -0
- package/src/utility/getJslFileName.js +16 -0
- package/src/utility/getMapExport.js +77 -0
- package/src/utility/hardwareFingerprint.js +89 -0
- package/src/utility/hasPermission.js +101 -0
- package/src/utility/importDbModel.js +9 -0
- package/src/utility/loadFilesRecursive.js +20 -0
- package/src/utility/loadModelFolder.js +29 -0
- package/src/utility/loadModelTransform.js +36 -0
- package/src/utility/pipeForkLogs.js +19 -0
- package/src/utility/platformInfo.js +62 -0
- package/src/utility/processArgs.js +39 -0
- package/src/utility/processComm.js +18 -0
- package/src/utility/requireEngineDriver.js +26 -0
- package/src/utility/requirePluginFunction.js +16 -0
- package/src/utility/socket.js +68 -0
- package/src/utility/sshTunnel.js +106 -0
- package/src/utility/sshTunnelProxy.js +36 -0
- package/src/utility/timingSafeCheckToken.js +9 -0
- package/src/utility/useController.js +99 -0
- package/tsconfig.json +13 -0
- package/webpack.config.js +55 -0
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const stream = require('stream');
|
|
3
|
+
const byline = require('byline');
|
|
4
|
+
const { getLogger } = require('dbgate-tools');
|
|
5
|
+
const { parser } = require('stream-json');
|
|
6
|
+
const { pick } = require('stream-json/filters/Pick');
|
|
7
|
+
const { streamArray } = require('stream-json/streamers/StreamArray');
|
|
8
|
+
const { streamObject } = require('stream-json/streamers/StreamObject');
|
|
9
|
+
const download = require('./download');
|
|
10
|
+
|
|
11
|
+
const logger = getLogger('jsonReader');
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class ParseStream extends stream.Transform {
|
|
15
|
+
constructor({ limitRows, jsonStyle, keyField }) {
|
|
16
|
+
super({ objectMode: true });
|
|
17
|
+
this.wasHeader = false;
|
|
18
|
+
this.limitRows = limitRows;
|
|
19
|
+
this.jsonStyle = jsonStyle;
|
|
20
|
+
this.keyField = keyField || '_key';
|
|
21
|
+
this.rowsWritten = 0;
|
|
22
|
+
}
|
|
23
|
+
_transform(chunk, encoding, done) {
|
|
24
|
+
if (!this.wasHeader) {
|
|
25
|
+
this.push({
|
|
26
|
+
__isStreamHeader: true,
|
|
27
|
+
__isDynamicStructure: true,
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
this.wasHeader = true;
|
|
31
|
+
}
|
|
32
|
+
if (!this.limitRows || this.rowsWritten < this.limitRows) {
|
|
33
|
+
if (this.jsonStyle === 'object') {
|
|
34
|
+
this.push({
|
|
35
|
+
...chunk.value,
|
|
36
|
+
[this.keyField]: chunk.key,
|
|
37
|
+
});
|
|
38
|
+
} else {
|
|
39
|
+
this.push(chunk.value);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
this.rowsWritten += 1;
|
|
43
|
+
}
|
|
44
|
+
done();
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
async function jsonReader({
|
|
49
|
+
fileName,
|
|
50
|
+
jsonStyle,
|
|
51
|
+
keyField = '_key',
|
|
52
|
+
rootField = null,
|
|
53
|
+
encoding = 'utf-8',
|
|
54
|
+
limitRows = undefined,
|
|
55
|
+
}) {
|
|
56
|
+
logger.info(`Reading file ${fileName}`);
|
|
57
|
+
|
|
58
|
+
const downloadedFile = await download(fileName);
|
|
59
|
+
const fileStream = fs.createReadStream(
|
|
60
|
+
downloadedFile,
|
|
61
|
+
// @ts-ignore
|
|
62
|
+
encoding
|
|
63
|
+
);
|
|
64
|
+
const parseJsonStream = parser();
|
|
65
|
+
fileStream.pipe(parseJsonStream);
|
|
66
|
+
|
|
67
|
+
const parseStream = new ParseStream({ limitRows, jsonStyle, keyField });
|
|
68
|
+
|
|
69
|
+
const tramsformer = jsonStyle === 'object' ? streamObject() : streamArray();
|
|
70
|
+
|
|
71
|
+
if (rootField) {
|
|
72
|
+
const filterStream = pick({ filter: rootField });
|
|
73
|
+
parseJsonStream.pipe(filterStream);
|
|
74
|
+
filterStream.pipe(tramsformer);
|
|
75
|
+
} else {
|
|
76
|
+
parseJsonStream.pipe(tramsformer);
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
tramsformer.pipe(parseStream);
|
|
80
|
+
|
|
81
|
+
return parseStream;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
module.exports = jsonReader;
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
const exportDbModel = require('../utility/exportDbModel');
|
|
2
|
+
const fs = require('fs');
|
|
3
|
+
|
|
4
|
+
async function jsonToDbModel({ modelFile, outputDir }) {
|
|
5
|
+
const dbInfo = JSON.parse(fs.readFileSync(modelFile, 'utf-8'));
|
|
6
|
+
await exportDbModel(dbInfo, outputDir);
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
module.exports = jsonToDbModel;
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
const { getLogger } = require('dbgate-tools');
|
|
2
|
+
const fs = require('fs');
|
|
3
|
+
const stream = require('stream');
|
|
4
|
+
const _ = require('lodash');
|
|
5
|
+
|
|
6
|
+
const logger = getLogger('jsonArrayWriter');
|
|
7
|
+
|
|
8
|
+
class StringifyStream extends stream.Transform {
|
|
9
|
+
constructor({ jsonStyle, keyField, rootField }) {
|
|
10
|
+
super({ objectMode: true });
|
|
11
|
+
this.wasHeader = false;
|
|
12
|
+
this.wasRecord = false;
|
|
13
|
+
this.jsonStyle = jsonStyle;
|
|
14
|
+
this.keyField = keyField || '_key';
|
|
15
|
+
this.rootField = rootField;
|
|
16
|
+
}
|
|
17
|
+
_transform(chunk, encoding, done) {
|
|
18
|
+
let skip = false;
|
|
19
|
+
|
|
20
|
+
if (!this.wasHeader) {
|
|
21
|
+
skip = chunk.__isStreamHeader;
|
|
22
|
+
this.wasHeader = true;
|
|
23
|
+
}
|
|
24
|
+
if (!skip) {
|
|
25
|
+
if (!this.wasRecord) {
|
|
26
|
+
if (this.rootField) {
|
|
27
|
+
if (this.jsonStyle === 'object') {
|
|
28
|
+
this.push(`{"${this.rootField}": {\n`);
|
|
29
|
+
} else {
|
|
30
|
+
this.push(`{"${this.rootField}": [\n`);
|
|
31
|
+
}
|
|
32
|
+
} else {
|
|
33
|
+
if (this.jsonStyle === 'object') {
|
|
34
|
+
this.push('{\n');
|
|
35
|
+
} else {
|
|
36
|
+
this.push('[\n');
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
} else {
|
|
40
|
+
this.push(',\n');
|
|
41
|
+
}
|
|
42
|
+
this.wasRecord = true;
|
|
43
|
+
|
|
44
|
+
if (this.jsonStyle === 'object') {
|
|
45
|
+
const key = chunk[this.keyField] ?? chunk[Object.keys(chunk)[0]];
|
|
46
|
+
this.push(`"${key}": ${JSON.stringify(_.omit(chunk, [this.keyField]))}`);
|
|
47
|
+
} else {
|
|
48
|
+
this.push(JSON.stringify(chunk));
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
done();
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
_flush(done) {
|
|
55
|
+
if (!this.wasRecord) {
|
|
56
|
+
if (this.rootField) {
|
|
57
|
+
if (this.jsonStyle === 'object') {
|
|
58
|
+
this.push(`{"${this.rootField}": {}}\n`);
|
|
59
|
+
} else {
|
|
60
|
+
this.push(`{"${this.rootField}": []}\n`);
|
|
61
|
+
}
|
|
62
|
+
} else {
|
|
63
|
+
if (this.jsonStyle === 'object') {
|
|
64
|
+
this.push('{}\n');
|
|
65
|
+
} else {
|
|
66
|
+
this.push('[]\n');
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
} else {
|
|
70
|
+
if (this.rootField) {
|
|
71
|
+
if (this.jsonStyle === 'object') {
|
|
72
|
+
this.push('\n}}\n');
|
|
73
|
+
} else {
|
|
74
|
+
this.push('\n]}\n');
|
|
75
|
+
}
|
|
76
|
+
} else {
|
|
77
|
+
if (this.jsonStyle === 'object') {
|
|
78
|
+
this.push('\n}\n');
|
|
79
|
+
} else {
|
|
80
|
+
this.push('\n]\n');
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
done();
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
async function jsonWriter({ fileName, jsonStyle, keyField = '_key', rootField, encoding = 'utf-8' }) {
|
|
89
|
+
logger.info(`Writing file ${fileName}`);
|
|
90
|
+
const stringify = new StringifyStream({ jsonStyle, keyField, rootField });
|
|
91
|
+
const fileStream = fs.createWriteStream(fileName, encoding);
|
|
92
|
+
stringify.pipe(fileStream);
|
|
93
|
+
stringify['finisher'] = fileStream;
|
|
94
|
+
return stringify;
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
module.exports = jsonWriter;
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
const requireEngineDriver = require('../utility/requireEngineDriver');
|
|
2
|
+
const connectUtility = require('../utility/connectUtility');
|
|
3
|
+
const { getLogger } = require('dbgate-tools');
|
|
4
|
+
const exportDbModel = require('../utility/exportDbModel');
|
|
5
|
+
|
|
6
|
+
const logger = getLogger('analyseDb');
|
|
7
|
+
|
|
8
|
+
async function loadDatabase({ connection = undefined, systemConnection = undefined, driver = undefined, outputDir }) {
|
|
9
|
+
logger.info(`Analysing database`);
|
|
10
|
+
|
|
11
|
+
if (!driver) driver = requireEngineDriver(connection);
|
|
12
|
+
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read', { forceRowsAsObjects: true }));
|
|
13
|
+
try {
|
|
14
|
+
logger.info(`Connected.`);
|
|
15
|
+
|
|
16
|
+
const dbInfo = await driver.analyseFull(dbhan);
|
|
17
|
+
logger.info(`Analyse finished`);
|
|
18
|
+
|
|
19
|
+
await exportDbModel(dbInfo, outputDir);
|
|
20
|
+
} finally {
|
|
21
|
+
if (!systemConnection) {
|
|
22
|
+
await driver.close(dbhan);
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
module.exports = loadDatabase;
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
const fs = require('fs-extra');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const { filesdir } = require('../utility/directories');
|
|
4
|
+
|
|
5
|
+
async function loadFile(file) {
|
|
6
|
+
const text = await fs.readFile(path.join(filesdir(), file), { encoding: 'utf-8' });
|
|
7
|
+
return text;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
module.exports = loadFile;
|
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const _ = require('lodash');
|
|
3
|
+
const stream = require('stream');
|
|
4
|
+
const byline = require('byline');
|
|
5
|
+
const { getLogger, processJsonDataUpdateCommands, removeTablePairingId } = require('dbgate-tools');
|
|
6
|
+
const logger = getLogger('modifyJsonLinesReader');
|
|
7
|
+
const stableStringify = require('json-stable-stringify');
|
|
8
|
+
|
|
9
|
+
class ParseStream extends stream.Transform {
|
|
10
|
+
constructor({ limitRows, changeSet, mergedRows, mergeKey, mergeMode }) {
|
|
11
|
+
super({ objectMode: true });
|
|
12
|
+
this.limitRows = limitRows;
|
|
13
|
+
this.changeSet = changeSet;
|
|
14
|
+
this.wasHeader = false;
|
|
15
|
+
this.currentRowIndex = 0;
|
|
16
|
+
if (mergeMode == 'merge') {
|
|
17
|
+
if (mergedRows && mergeKey) {
|
|
18
|
+
this.mergedRowsDict = {};
|
|
19
|
+
for (const row of mergedRows) {
|
|
20
|
+
const key = stableStringify(_.pick(row, mergeKey));
|
|
21
|
+
this.mergedRowsDict[key] = row;
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
this.mergedRowsArray = mergedRows;
|
|
26
|
+
this.mergeKey = mergeKey;
|
|
27
|
+
this.mergeMode = mergeMode;
|
|
28
|
+
}
|
|
29
|
+
_transform(chunk, encoding, done) {
|
|
30
|
+
let obj = JSON.parse(chunk);
|
|
31
|
+
if (obj.__isStreamHeader) {
|
|
32
|
+
if (this.changeSet && this.changeSet.structure) {
|
|
33
|
+
this.push({
|
|
34
|
+
...removeTablePairingId(this.changeSet.structure),
|
|
35
|
+
__isStreamHeader: true,
|
|
36
|
+
});
|
|
37
|
+
} else {
|
|
38
|
+
this.push(obj);
|
|
39
|
+
}
|
|
40
|
+
this.wasHeader = true;
|
|
41
|
+
done();
|
|
42
|
+
return;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
if (this.changeSet) {
|
|
46
|
+
if (!this.wasHeader && this.changeSet.structure) {
|
|
47
|
+
this.push({
|
|
48
|
+
...removeTablePairingId(this.changeSet.structure),
|
|
49
|
+
__isStreamHeader: true,
|
|
50
|
+
});
|
|
51
|
+
this.wasHeader = true;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
if (!this.limitRows || this.currentRowIndex < this.limitRows) {
|
|
55
|
+
if (this.changeSet.deletes.find(x => x.existingRowIndex == this.currentRowIndex)) {
|
|
56
|
+
obj = null;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
const update = this.changeSet.updates.find(x => x.existingRowIndex == this.currentRowIndex);
|
|
60
|
+
if (update) {
|
|
61
|
+
if (update.document) {
|
|
62
|
+
obj = update.document;
|
|
63
|
+
} else {
|
|
64
|
+
obj = _.omitBy(
|
|
65
|
+
{
|
|
66
|
+
...obj,
|
|
67
|
+
...update.fields,
|
|
68
|
+
},
|
|
69
|
+
(v, k) => v?.$$undefined$$
|
|
70
|
+
);
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
if (obj) {
|
|
75
|
+
if (this.changeSet.dataUpdateCommands) {
|
|
76
|
+
obj = processJsonDataUpdateCommands(obj, this.changeSet.dataUpdateCommands);
|
|
77
|
+
}
|
|
78
|
+
this.push(obj);
|
|
79
|
+
}
|
|
80
|
+
this.currentRowIndex += 1;
|
|
81
|
+
}
|
|
82
|
+
} else if (this.mergedRowsArray && this.mergeKey && this.mergeMode) {
|
|
83
|
+
if (this.mergeMode == 'merge') {
|
|
84
|
+
const key = stableStringify(_.pick(obj, this.mergeKey));
|
|
85
|
+
if (this.mergedRowsDict[key]) {
|
|
86
|
+
this.push({ ...obj, ...this.mergedRowsDict[key] });
|
|
87
|
+
delete this.mergedRowsDict[key];
|
|
88
|
+
} else {
|
|
89
|
+
this.push(obj);
|
|
90
|
+
}
|
|
91
|
+
} else if (this.mergeMode == 'append') {
|
|
92
|
+
this.push(obj);
|
|
93
|
+
}
|
|
94
|
+
} else {
|
|
95
|
+
this.push(obj);
|
|
96
|
+
}
|
|
97
|
+
done();
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
_flush(done) {
|
|
101
|
+
if (this.changeSet) {
|
|
102
|
+
for (const insert of this.changeSet.inserts) {
|
|
103
|
+
this.push({
|
|
104
|
+
...insert.document,
|
|
105
|
+
...insert.fields,
|
|
106
|
+
});
|
|
107
|
+
}
|
|
108
|
+
} else if (this.mergedRowsArray && this.mergeKey) {
|
|
109
|
+
if (this.mergeMode == 'merge') {
|
|
110
|
+
for (const row of this.mergedRowsArray) {
|
|
111
|
+
const key = stableStringify(_.pick(row, this.mergeKey));
|
|
112
|
+
if (this.mergedRowsDict[key]) {
|
|
113
|
+
this.push(row);
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
} else {
|
|
117
|
+
for (const row of this.mergedRowsArray) {
|
|
118
|
+
this.push(row);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
done();
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
async function modifyJsonLinesReader({
|
|
127
|
+
fileName,
|
|
128
|
+
encoding = 'utf-8',
|
|
129
|
+
limitRows = undefined,
|
|
130
|
+
changeSet = null,
|
|
131
|
+
mergedRows = null,
|
|
132
|
+
mergeKey = null,
|
|
133
|
+
mergeMode = 'merge',
|
|
134
|
+
}) {
|
|
135
|
+
logger.info(`Reading file ${fileName} with change set`);
|
|
136
|
+
|
|
137
|
+
const fileStream = fs.createReadStream(
|
|
138
|
+
fileName,
|
|
139
|
+
// @ts-ignore
|
|
140
|
+
encoding
|
|
141
|
+
);
|
|
142
|
+
const liner = byline(fileStream);
|
|
143
|
+
const parser = new ParseStream({ limitRows, changeSet, mergedRows, mergeKey, mergeMode });
|
|
144
|
+
liner.pipe(parser);
|
|
145
|
+
return parser;
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
module.exports = modifyJsonLinesReader;
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
const requireEngineDriver = require('../utility/requireEngineDriver');
|
|
2
|
+
const connectUtility = require('../utility/connectUtility');
|
|
3
|
+
const { getLogger } = require('dbgate-tools');
|
|
4
|
+
const logger = getLogger('queryReader');
|
|
5
|
+
|
|
6
|
+
async function queryReader({
|
|
7
|
+
connection,
|
|
8
|
+
query,
|
|
9
|
+
queryType,
|
|
10
|
+
// obsolete; use query instead
|
|
11
|
+
sql,
|
|
12
|
+
}) {
|
|
13
|
+
// if (sql && json) {
|
|
14
|
+
// throw new Error('Only one of sql or json could be set');
|
|
15
|
+
// }
|
|
16
|
+
// if (!sql && !json) {
|
|
17
|
+
// throw new Error('One of sql or json must be set');
|
|
18
|
+
// }
|
|
19
|
+
logger.info({ sql: query || sql }, `Reading query`);
|
|
20
|
+
// else console.log(`Reading query ${JSON.stringify(json)}`);
|
|
21
|
+
|
|
22
|
+
const driver = requireEngineDriver(connection);
|
|
23
|
+
const pool = await connectUtility(driver, connection, queryType == 'json' ? 'read' : 'script');
|
|
24
|
+
logger.info(`Connected.`);
|
|
25
|
+
const reader =
|
|
26
|
+
queryType == 'json' ? await driver.readJsonQuery(pool, query) : await driver.readQuery(pool, query || sql);
|
|
27
|
+
return reader;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
module.exports = queryReader;
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
const path = require('path');
|
|
2
|
+
const fs = require('fs');
|
|
3
|
+
const { pluginsdir, packagedPluginsDir, getPluginBackendPath } = require('../utility/directories');
|
|
4
|
+
const platformInfo = require('../utility/platformInfo');
|
|
5
|
+
const authProxy = require('../utility/authProxy');
|
|
6
|
+
const { getLogger } = require('dbgate-tools');
|
|
7
|
+
const logger = getLogger('requirePlugin');
|
|
8
|
+
|
|
9
|
+
const loadedPlugins = {};
|
|
10
|
+
|
|
11
|
+
const dbgateEnv = {
|
|
12
|
+
dbgateApi: null,
|
|
13
|
+
platformInfo,
|
|
14
|
+
authProxy,
|
|
15
|
+
};
|
|
16
|
+
function requirePlugin(packageName, requiredPlugin = null) {
|
|
17
|
+
if (!packageName) throw new Error('Missing packageName in plugin');
|
|
18
|
+
if (loadedPlugins[packageName]) return loadedPlugins[packageName];
|
|
19
|
+
|
|
20
|
+
if (requiredPlugin == null) {
|
|
21
|
+
let module;
|
|
22
|
+
const modulePath = getPluginBackendPath(packageName);
|
|
23
|
+
logger.info(`Loading module ${packageName} from ${modulePath}`);
|
|
24
|
+
try {
|
|
25
|
+
// @ts-ignore
|
|
26
|
+
module = __non_webpack_require__(modulePath);
|
|
27
|
+
} catch (err) {
|
|
28
|
+
// console.log('Failed load webpacked module', err.message);
|
|
29
|
+
module = require(modulePath);
|
|
30
|
+
}
|
|
31
|
+
requiredPlugin = module.__esModule ? module.default : module;
|
|
32
|
+
}
|
|
33
|
+
loadedPlugins[packageName] = requiredPlugin;
|
|
34
|
+
if (requiredPlugin.initialize) requiredPlugin.initialize(dbgateEnv);
|
|
35
|
+
|
|
36
|
+
return requiredPlugin;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
requirePlugin.initializeDbgateApi = value => {
|
|
40
|
+
dbgateEnv.dbgateApi = value;
|
|
41
|
+
};
|
|
42
|
+
|
|
43
|
+
module.exports = requirePlugin;
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
const { getLogger, extractErrorLogData } = require('dbgate-tools');
|
|
2
|
+
const childProcessChecker = require('../utility/childProcessChecker');
|
|
3
|
+
const processArgs = require('../utility/processArgs');
|
|
4
|
+
const logger = getLogger();
|
|
5
|
+
|
|
6
|
+
async function runScript(func) {
|
|
7
|
+
if (processArgs.checkParent) {
|
|
8
|
+
childProcessChecker();
|
|
9
|
+
}
|
|
10
|
+
try {
|
|
11
|
+
await func();
|
|
12
|
+
process.exit(0);
|
|
13
|
+
} catch (err) {
|
|
14
|
+
logger.error(extractErrorLogData(err), `Error running script`);
|
|
15
|
+
process.exit(1);
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
module.exports = runScript;
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const stream = require('stream');
|
|
3
|
+
const path = require('path');
|
|
4
|
+
const { driverBase, getLogger } = require('dbgate-tools');
|
|
5
|
+
const requireEngineDriver = require('../utility/requireEngineDriver');
|
|
6
|
+
const logger = getLogger('sqlDataWriter');
|
|
7
|
+
|
|
8
|
+
class SqlizeStream extends stream.Transform {
|
|
9
|
+
constructor({ fileName, dataName }) {
|
|
10
|
+
super({ objectMode: true });
|
|
11
|
+
this.wasHeader = false;
|
|
12
|
+
this.tableName = path.parse(fileName).name;
|
|
13
|
+
this.dataName = dataName;
|
|
14
|
+
this.driver = driverBase;
|
|
15
|
+
}
|
|
16
|
+
_transform(chunk, encoding, done) {
|
|
17
|
+
let skip = false;
|
|
18
|
+
if (!this.wasHeader) {
|
|
19
|
+
if (chunk.__isStreamHeader) {
|
|
20
|
+
skip = true;
|
|
21
|
+
this.tableName = chunk.pureName;
|
|
22
|
+
if (chunk.engine) {
|
|
23
|
+
// @ts-ignore
|
|
24
|
+
this.driver = requireEngineDriver(chunk.engine) || driverBase;
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
this.wasHeader = true;
|
|
28
|
+
}
|
|
29
|
+
if (!skip) {
|
|
30
|
+
const dmp = this.driver.createDumper();
|
|
31
|
+
dmp.put(
|
|
32
|
+
'^insert ^into %f (%,i) ^values (%,v);\n',
|
|
33
|
+
{ pureName: this.dataName || this.tableName },
|
|
34
|
+
Object.keys(chunk),
|
|
35
|
+
Object.values(chunk)
|
|
36
|
+
);
|
|
37
|
+
this.push(dmp.s);
|
|
38
|
+
}
|
|
39
|
+
done();
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
async function sqlDataWriter({ fileName, dataName, driver, encoding = 'utf-8' }) {
|
|
44
|
+
logger.info(`Writing file ${fileName}`);
|
|
45
|
+
const stringify = new SqlizeStream({ fileName, dataName });
|
|
46
|
+
const fileStream = fs.createWriteStream(fileName, encoding);
|
|
47
|
+
stringify.pipe(fileStream);
|
|
48
|
+
stringify['finisher'] = fileStream;
|
|
49
|
+
return stringify;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
module.exports = sqlDataWriter;
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
function replaceInText(text, replacements) {
|
|
2
|
+
let result = text;
|
|
3
|
+
for (const key of Object.keys(replacements)) {
|
|
4
|
+
result = result.split(key).join(replacements[key]);
|
|
5
|
+
}
|
|
6
|
+
return result;
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
function replaceInCollection(collection, replacements) {
|
|
10
|
+
if (!collection) return collection;
|
|
11
|
+
return collection.map(item => {
|
|
12
|
+
if (item.createSql) {
|
|
13
|
+
return {
|
|
14
|
+
...item,
|
|
15
|
+
createSql: replaceInText(item.createSql, replacements),
|
|
16
|
+
};
|
|
17
|
+
}
|
|
18
|
+
return item;
|
|
19
|
+
});
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
const sqlTextReplacementTransform = replacements => database => {
|
|
23
|
+
return {
|
|
24
|
+
...database,
|
|
25
|
+
views: replaceInCollection(database.views, replacements),
|
|
26
|
+
matviews: replaceInCollection(database.matviews, replacements),
|
|
27
|
+
procedures: replaceInCollection(database.procedures, replacements),
|
|
28
|
+
functions: replaceInCollection(database.functions, replacements),
|
|
29
|
+
};
|
|
30
|
+
};
|
|
31
|
+
|
|
32
|
+
module.exports = sqlTextReplacementTransform;
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
const { quoteFullName, fullNameToString, getLogger } = require('dbgate-tools');
|
|
2
|
+
const requireEngineDriver = require('../utility/requireEngineDriver');
|
|
3
|
+
const connectUtility = require('../utility/connectUtility');
|
|
4
|
+
const logger = getLogger('tableReader');
|
|
5
|
+
|
|
6
|
+
async function tableReader({ connection, systemConnection, pureName, schemaName }) {
|
|
7
|
+
const driver = requireEngineDriver(connection);
|
|
8
|
+
const dbhan = systemConnection || (await connectUtility(driver, connection, 'read'));
|
|
9
|
+
logger.info(`Connected.`);
|
|
10
|
+
|
|
11
|
+
const fullName = { pureName, schemaName };
|
|
12
|
+
|
|
13
|
+
if (driver.databaseEngineTypes.includes('document')) {
|
|
14
|
+
// @ts-ignore
|
|
15
|
+
logger.info(`Reading collection ${fullNameToString(fullName)}`);
|
|
16
|
+
// @ts-ignore
|
|
17
|
+
return await driver.readQuery(dbhan, JSON.stringify(fullName));
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
const table = await driver.analyseSingleObject(dbhan, fullName, 'tables');
|
|
21
|
+
const query = `select * from ${quoteFullName(driver.dialect, fullName)}`;
|
|
22
|
+
if (table) {
|
|
23
|
+
// @ts-ignore
|
|
24
|
+
logger.info(`Reading table ${fullNameToString(table)}`);
|
|
25
|
+
// @ts-ignore
|
|
26
|
+
return await driver.readQuery(dbhan, query, table);
|
|
27
|
+
}
|
|
28
|
+
const view = await driver.analyseSingleObject(dbhan, fullName, 'views');
|
|
29
|
+
if (view) {
|
|
30
|
+
// @ts-ignore
|
|
31
|
+
logger.info(`Reading view ${fullNameToString(view)}`);
|
|
32
|
+
// @ts-ignore
|
|
33
|
+
return await driver.readQuery(dbhan, query, view);
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
return await driver.readQuery(dbhan, query);
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
module.exports = tableReader;
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
const { fullNameToString, getLogger } = require('dbgate-tools');
|
|
2
|
+
const requireEngineDriver = require('../utility/requireEngineDriver');
|
|
3
|
+
const connectUtility = require('../utility/connectUtility');
|
|
4
|
+
const logger = getLogger('tableWriter');
|
|
5
|
+
|
|
6
|
+
async function tableWriter({ connection, schemaName, pureName, driver, systemConnection, ...options }) {
|
|
7
|
+
logger.info(`Writing table ${fullNameToString({ schemaName, pureName })}`);
|
|
8
|
+
|
|
9
|
+
if (!driver) {
|
|
10
|
+
driver = requireEngineDriver(connection);
|
|
11
|
+
}
|
|
12
|
+
const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
|
|
13
|
+
|
|
14
|
+
logger.info(`Connected.`);
|
|
15
|
+
return await driver.writeTable(dbhan, { schemaName, pureName }, options);
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
module.exports = tableWriter;
|