dbgate-api 5.2.2 → 5.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +6 -4
- package/src/controllers/archive.js +70 -30
- package/src/controllers/databaseConnections.js +2 -2
- package/src/controllers/jsldata.js +26 -9
- package/src/controllers/runners.js +12 -6
- package/src/currentVersion.js +2 -2
- package/src/proc/databaseConnectionProcess.js +3 -3
- package/src/proc/sessionProcess.js +25 -14
- package/src/shell/dataDuplicator.js +50 -0
- package/src/shell/fakeObjectReader.js +17 -9
- package/src/shell/index.js +4 -0
- package/src/shell/jsonLinesReader.js +5 -1
- package/src/shell/jsonLinesWriter.js +3 -1
- package/src/shell/modifyJsonLinesReader.js +145 -0
- package/src/shell/runScript.js +1 -1
- package/src/utility/JsonLinesDatastore.js +68 -33
- package/src/utility/LineReader.js +88 -0
- package/src/utility/connectUtility.js +3 -0
- package/src/utility/directories.js +11 -6
- package/src/utility/freeTableStorage.js +0 -15
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "dbgate-api",
|
|
3
3
|
"main": "src/index.js",
|
|
4
|
-
"version": "5.2.
|
|
4
|
+
"version": "5.2.3",
|
|
5
5
|
"homepage": "https://dbgate.org/",
|
|
6
6
|
"repository": {
|
|
7
7
|
"type": "git",
|
|
@@ -27,8 +27,8 @@
|
|
|
27
27
|
"cors": "^2.8.5",
|
|
28
28
|
"cross-env": "^6.0.3",
|
|
29
29
|
"dbgate-query-splitter": "^4.9.3",
|
|
30
|
-
"dbgate-sqltree": "^5.2.
|
|
31
|
-
"dbgate-tools": "^5.2.
|
|
30
|
+
"dbgate-sqltree": "^5.2.3",
|
|
31
|
+
"dbgate-tools": "^5.2.3",
|
|
32
32
|
"debug": "^4.3.4",
|
|
33
33
|
"diff": "^5.0.0",
|
|
34
34
|
"diff2html": "^3.4.13",
|
|
@@ -36,6 +36,7 @@
|
|
|
36
36
|
"express": "^4.17.1",
|
|
37
37
|
"express-basic-auth": "^1.2.0",
|
|
38
38
|
"express-fileupload": "^1.2.0",
|
|
39
|
+
"external-sorting": "^1.3.1",
|
|
39
40
|
"fs-extra": "^9.1.0",
|
|
40
41
|
"fs-reverse": "^0.0.3",
|
|
41
42
|
"get-port": "^5.1.1",
|
|
@@ -52,6 +53,7 @@
|
|
|
52
53
|
"on-finished": "^2.4.1",
|
|
53
54
|
"pinomin": "^1.0.1",
|
|
54
55
|
"portfinder": "^1.0.28",
|
|
56
|
+
"rimraf": "^3.0.0",
|
|
55
57
|
"simple-encryptor": "^4.0.0",
|
|
56
58
|
"ssh2": "^1.11.0",
|
|
57
59
|
"tar": "^6.0.5",
|
|
@@ -71,7 +73,7 @@
|
|
|
71
73
|
"devDependencies": {
|
|
72
74
|
"@types/fs-extra": "^9.0.11",
|
|
73
75
|
"@types/lodash": "^4.14.149",
|
|
74
|
-
"dbgate-types": "^5.2.
|
|
76
|
+
"dbgate-types": "^5.2.3",
|
|
75
77
|
"env-cmd": "^10.1.0",
|
|
76
78
|
"node-loader": "^1.0.2",
|
|
77
79
|
"nodemon": "^2.0.2",
|
|
@@ -3,10 +3,13 @@ const readline = require('readline');
|
|
|
3
3
|
const path = require('path');
|
|
4
4
|
const { archivedir, clearArchiveLinksCache, resolveArchiveFolder } = require('../utility/directories');
|
|
5
5
|
const socket = require('../utility/socket');
|
|
6
|
-
const { saveFreeTableData } = require('../utility/freeTableStorage');
|
|
7
6
|
const loadFilesRecursive = require('../utility/loadFilesRecursive');
|
|
8
7
|
const getJslFileName = require('../utility/getJslFileName');
|
|
9
8
|
const { getLogger } = require('dbgate-tools');
|
|
9
|
+
const uuidv1 = require('uuid/v1');
|
|
10
|
+
const dbgateApi = require('../shell');
|
|
11
|
+
const jsldata = require('./jsldata');
|
|
12
|
+
const platformInfo = require('../utility/platformInfo');
|
|
10
13
|
|
|
11
14
|
const logger = getLogger('archive');
|
|
12
15
|
|
|
@@ -79,17 +82,20 @@ module.exports = {
|
|
|
79
82
|
refreshFiles_meta: true,
|
|
80
83
|
async refreshFiles({ folder }) {
|
|
81
84
|
socket.emitChanged('archive-files-changed', { folder });
|
|
85
|
+
return true;
|
|
82
86
|
},
|
|
83
87
|
|
|
84
88
|
refreshFolders_meta: true,
|
|
85
89
|
async refreshFolders() {
|
|
86
90
|
socket.emitChanged(`archive-folders-changed`);
|
|
91
|
+
return true;
|
|
87
92
|
},
|
|
88
93
|
|
|
89
94
|
deleteFile_meta: true,
|
|
90
95
|
async deleteFile({ folder, file, fileType }) {
|
|
91
96
|
await fs.unlink(path.join(resolveArchiveFolder(folder), `${file}.${fileType}`));
|
|
92
97
|
socket.emitChanged(`archive-files-changed`, { folder });
|
|
98
|
+
return true;
|
|
93
99
|
},
|
|
94
100
|
|
|
95
101
|
renameFile_meta: true,
|
|
@@ -99,6 +105,46 @@ module.exports = {
|
|
|
99
105
|
path.join(resolveArchiveFolder(folder), `${newFile}.${fileType}`)
|
|
100
106
|
);
|
|
101
107
|
socket.emitChanged(`archive-files-changed`, { folder });
|
|
108
|
+
return true;
|
|
109
|
+
},
|
|
110
|
+
|
|
111
|
+
modifyFile_meta: true,
|
|
112
|
+
async modifyFile({ folder, file, changeSet, mergedRows, mergeKey, mergeMode }) {
|
|
113
|
+
await jsldata.closeDataStore(`archive://${folder}/${file}`);
|
|
114
|
+
const changedFilePath = path.join(resolveArchiveFolder(folder), `${file}.jsonl`);
|
|
115
|
+
|
|
116
|
+
if (!fs.existsSync(changedFilePath)) {
|
|
117
|
+
if (!mergedRows) {
|
|
118
|
+
return false;
|
|
119
|
+
}
|
|
120
|
+
const fileStream = fs.createWriteStream(changedFilePath);
|
|
121
|
+
for (const row of mergedRows) {
|
|
122
|
+
await fileStream.write(JSON.stringify(row) + '\n');
|
|
123
|
+
}
|
|
124
|
+
await fileStream.close();
|
|
125
|
+
|
|
126
|
+
socket.emitChanged(`archive-files-changed`, { folder });
|
|
127
|
+
return true;
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
const tmpchangedFilePath = path.join(resolveArchiveFolder(folder), `${file}-${uuidv1()}.jsonl`);
|
|
131
|
+
const reader = await dbgateApi.modifyJsonLinesReader({
|
|
132
|
+
fileName: changedFilePath,
|
|
133
|
+
changeSet,
|
|
134
|
+
mergedRows,
|
|
135
|
+
mergeKey,
|
|
136
|
+
mergeMode,
|
|
137
|
+
});
|
|
138
|
+
const writer = await dbgateApi.jsonLinesWriter({ fileName: tmpchangedFilePath });
|
|
139
|
+
await dbgateApi.copyStream(reader, writer);
|
|
140
|
+
if (platformInfo.isWindows) {
|
|
141
|
+
await fs.copyFile(tmpchangedFilePath, changedFilePath);
|
|
142
|
+
await fs.unlink(tmpchangedFilePath);
|
|
143
|
+
} else {
|
|
144
|
+
await fs.unlink(changedFilePath);
|
|
145
|
+
await fs.rename(tmpchangedFilePath, changedFilePath);
|
|
146
|
+
}
|
|
147
|
+
return true;
|
|
102
148
|
},
|
|
103
149
|
|
|
104
150
|
renameFolder_meta: true,
|
|
@@ -106,6 +152,7 @@ module.exports = {
|
|
|
106
152
|
const uniqueName = await this.getNewArchiveFolder({ database: newFolder });
|
|
107
153
|
await fs.rename(path.join(archivedir(), folder), path.join(archivedir(), uniqueName));
|
|
108
154
|
socket.emitChanged(`archive-folders-changed`);
|
|
155
|
+
return true;
|
|
109
156
|
},
|
|
110
157
|
|
|
111
158
|
deleteFolder_meta: true,
|
|
@@ -117,36 +164,9 @@ module.exports = {
|
|
|
117
164
|
await fs.rmdir(path.join(archivedir(), folder), { recursive: true });
|
|
118
165
|
}
|
|
119
166
|
socket.emitChanged(`archive-folders-changed`);
|
|
120
|
-
},
|
|
121
|
-
|
|
122
|
-
saveFreeTable_meta: true,
|
|
123
|
-
async saveFreeTable({ folder, file, data }) {
|
|
124
|
-
await saveFreeTableData(path.join(resolveArchiveFolder(folder), `${file}.jsonl`), data);
|
|
125
|
-
socket.emitChanged(`archive-files-changed`, { folder });
|
|
126
167
|
return true;
|
|
127
168
|
},
|
|
128
169
|
|
|
129
|
-
loadFreeTable_meta: true,
|
|
130
|
-
async loadFreeTable({ folder, file }) {
|
|
131
|
-
return new Promise((resolve, reject) => {
|
|
132
|
-
const fileStream = fs.createReadStream(path.join(resolveArchiveFolder(folder), `${file}.jsonl`));
|
|
133
|
-
const liner = readline.createInterface({
|
|
134
|
-
input: fileStream,
|
|
135
|
-
});
|
|
136
|
-
let structure = null;
|
|
137
|
-
const rows = [];
|
|
138
|
-
liner.on('line', line => {
|
|
139
|
-
const data = JSON.parse(line);
|
|
140
|
-
if (structure) rows.push(data);
|
|
141
|
-
else structure = data;
|
|
142
|
-
});
|
|
143
|
-
liner.on('close', () => {
|
|
144
|
-
resolve({ structure, rows });
|
|
145
|
-
fileStream.close();
|
|
146
|
-
});
|
|
147
|
-
});
|
|
148
|
-
},
|
|
149
|
-
|
|
150
170
|
saveText_meta: true,
|
|
151
171
|
async saveText({ folder, file, text }) {
|
|
152
172
|
await fs.writeFile(path.join(resolveArchiveFolder(folder), `${file}.jsonl`), text);
|
|
@@ -155,10 +175,30 @@ module.exports = {
|
|
|
155
175
|
},
|
|
156
176
|
|
|
157
177
|
saveJslData_meta: true,
|
|
158
|
-
async saveJslData({ folder, file, jslid }) {
|
|
178
|
+
async saveJslData({ folder, file, jslid, changeSet }) {
|
|
159
179
|
const source = getJslFileName(jslid);
|
|
160
180
|
const target = path.join(resolveArchiveFolder(folder), `${file}.jsonl`);
|
|
161
|
-
|
|
181
|
+
if (changeSet) {
|
|
182
|
+
const reader = await dbgateApi.modifyJsonLinesReader({
|
|
183
|
+
fileName: source,
|
|
184
|
+
changeSet,
|
|
185
|
+
});
|
|
186
|
+
const writer = await dbgateApi.jsonLinesWriter({ fileName: target });
|
|
187
|
+
await dbgateApi.copyStream(reader, writer);
|
|
188
|
+
} else {
|
|
189
|
+
await fs.copyFile(source, target);
|
|
190
|
+
socket.emitChanged(`archive-files-changed`, { folder });
|
|
191
|
+
}
|
|
192
|
+
return true;
|
|
193
|
+
},
|
|
194
|
+
|
|
195
|
+
saveRows_meta: true,
|
|
196
|
+
async saveRows({ folder, file, rows }) {
|
|
197
|
+
const fileStream = fs.createWriteStream(path.join(resolveArchiveFolder(folder), `${file}.jsonl`));
|
|
198
|
+
for (const row of rows) {
|
|
199
|
+
await fileStream.write(JSON.stringify(row) + '\n');
|
|
200
|
+
}
|
|
201
|
+
await fileStream.close();
|
|
162
202
|
socket.emitChanged(`archive-files-changed`, { folder });
|
|
163
203
|
return true;
|
|
164
204
|
},
|
|
@@ -171,11 +171,11 @@ module.exports = {
|
|
|
171
171
|
},
|
|
172
172
|
|
|
173
173
|
runScript_meta: true,
|
|
174
|
-
async runScript({ conid, database, sql }, req) {
|
|
174
|
+
async runScript({ conid, database, sql, useTransaction }, req) {
|
|
175
175
|
testConnectionPermission(conid, req);
|
|
176
176
|
logger.info({ conid, database, sql }, 'Processing script');
|
|
177
177
|
const opened = await this.ensureOpened(conid, database);
|
|
178
|
-
const res = await this.sendRequest(opened, { msgtype: 'runScript', sql });
|
|
178
|
+
const res = await this.sendRequest(opened, { msgtype: 'runScript', sql, useTransaction });
|
|
179
179
|
return res;
|
|
180
180
|
},
|
|
181
181
|
|
|
@@ -4,7 +4,6 @@ const lineReader = require('line-reader');
|
|
|
4
4
|
const _ = require('lodash');
|
|
5
5
|
const { __ } = require('lodash/fp');
|
|
6
6
|
const DatastoreProxy = require('../utility/DatastoreProxy');
|
|
7
|
-
const { saveFreeTableData } = require('../utility/freeTableStorage');
|
|
8
7
|
const getJslFileName = require('../utility/getJslFileName');
|
|
9
8
|
const JsonLinesDatastore = require('../utility/JsonLinesDatastore');
|
|
10
9
|
const requirePluginFunction = require('../utility/requirePluginFunction');
|
|
@@ -113,6 +112,14 @@ module.exports = {
|
|
|
113
112
|
return datastore;
|
|
114
113
|
},
|
|
115
114
|
|
|
115
|
+
async closeDataStore(jslid) {
|
|
116
|
+
const datastore = this.datastores[jslid];
|
|
117
|
+
if (datastore) {
|
|
118
|
+
await datastore._closeReader();
|
|
119
|
+
delete this.datastores[jslid];
|
|
120
|
+
}
|
|
121
|
+
},
|
|
122
|
+
|
|
116
123
|
getInfo_meta: true,
|
|
117
124
|
async getInfo({ jslid }) {
|
|
118
125
|
const file = getJslFileName(jslid);
|
|
@@ -135,9 +142,15 @@ module.exports = {
|
|
|
135
142
|
},
|
|
136
143
|
|
|
137
144
|
getRows_meta: true,
|
|
138
|
-
async getRows({ jslid, offset, limit, filters, formatterFunction }) {
|
|
145
|
+
async getRows({ jslid, offset, limit, filters, sort, formatterFunction }) {
|
|
139
146
|
const datastore = await this.ensureDatastore(jslid, formatterFunction);
|
|
140
|
-
return datastore.getRows(offset, limit, _.isEmpty(filters) ? null : filters);
|
|
147
|
+
return datastore.getRows(offset, limit, _.isEmpty(filters) ? null : filters, _.isEmpty(sort) ? null : sort);
|
|
148
|
+
},
|
|
149
|
+
|
|
150
|
+
exists_meta: true,
|
|
151
|
+
async exists({ jslid }) {
|
|
152
|
+
const fileName = getJslFileName(jslid);
|
|
153
|
+
return fs.existsSync(fileName);
|
|
141
154
|
},
|
|
142
155
|
|
|
143
156
|
getStats_meta: true,
|
|
@@ -181,18 +194,22 @@ module.exports = {
|
|
|
181
194
|
// }
|
|
182
195
|
},
|
|
183
196
|
|
|
184
|
-
saveFreeTable_meta: true,
|
|
185
|
-
async saveFreeTable({ jslid, data }) {
|
|
186
|
-
saveFreeTableData(getJslFileName(jslid), data);
|
|
187
|
-
return true;
|
|
188
|
-
},
|
|
189
|
-
|
|
190
197
|
saveText_meta: true,
|
|
191
198
|
async saveText({ jslid, text }) {
|
|
192
199
|
await fs.promises.writeFile(getJslFileName(jslid), text);
|
|
193
200
|
return true;
|
|
194
201
|
},
|
|
195
202
|
|
|
203
|
+
saveRows_meta: true,
|
|
204
|
+
async saveRows({ jslid, rows }) {
|
|
205
|
+
const fileStream = fs.createWriteStream(getJslFileName(jslid));
|
|
206
|
+
for (const row of rows) {
|
|
207
|
+
await fileStream.write(JSON.stringify(row) + '\n');
|
|
208
|
+
}
|
|
209
|
+
await fileStream.close();
|
|
210
|
+
return true;
|
|
211
|
+
},
|
|
212
|
+
|
|
196
213
|
extractTimelineChart_meta: true,
|
|
197
214
|
async extractTimelineChart({ jslid, timestampFunction, aggregateFunction, measures }) {
|
|
198
215
|
const timestamp = requirePluginFunction(timestampFunction);
|
|
@@ -70,15 +70,20 @@ module.exports = {
|
|
|
70
70
|
if (message) {
|
|
71
71
|
const json = safeJsonParse(message.message);
|
|
72
72
|
|
|
73
|
-
if (json) logger.
|
|
73
|
+
if (json) logger.log(json);
|
|
74
74
|
else logger.info(message.message);
|
|
75
75
|
|
|
76
|
-
|
|
76
|
+
const toEmit = {
|
|
77
77
|
time: new Date(),
|
|
78
|
-
severity: 'info',
|
|
79
78
|
...message,
|
|
80
79
|
message: json ? json.msg : message.message,
|
|
81
|
-
}
|
|
80
|
+
};
|
|
81
|
+
|
|
82
|
+
if (json && json.level >= 50) {
|
|
83
|
+
toEmit.severity = 'error';
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
socket.emit(`runner-info-${runid}`, toEmit);
|
|
82
87
|
}
|
|
83
88
|
},
|
|
84
89
|
|
|
@@ -125,8 +130,9 @@ module.exports = {
|
|
|
125
130
|
},
|
|
126
131
|
}
|
|
127
132
|
);
|
|
128
|
-
const pipeDispatcher = severity => data =>
|
|
129
|
-
this.dispatchMessage(runid, { severity, message: data.toString().trim() });
|
|
133
|
+
const pipeDispatcher = severity => data => {
|
|
134
|
+
return this.dispatchMessage(runid, { severity, message: data.toString().trim() });
|
|
135
|
+
};
|
|
130
136
|
|
|
131
137
|
byline(subprocess.stdout).on('data', pipeDispatcher('info'));
|
|
132
138
|
byline(subprocess.stderr).on('data', pipeDispatcher('error'));
|
package/src/currentVersion.js
CHANGED
|
@@ -158,12 +158,12 @@ function resolveAnalysedPromises() {
|
|
|
158
158
|
afterAnalyseCallbacks = [];
|
|
159
159
|
}
|
|
160
160
|
|
|
161
|
-
async function handleRunScript({ msgid, sql }, skipReadonlyCheck = false) {
|
|
161
|
+
async function handleRunScript({ msgid, sql, useTransaction }, skipReadonlyCheck = false) {
|
|
162
162
|
await waitConnected();
|
|
163
163
|
const driver = requireEngineDriver(storedConnection);
|
|
164
164
|
try {
|
|
165
165
|
if (!skipReadonlyCheck) ensureExecuteCustomScript(driver);
|
|
166
|
-
await driver.script(systemConnection, sql);
|
|
166
|
+
await driver.script(systemConnection, sql, { useTransaction });
|
|
167
167
|
process.send({ msgtype: 'response', msgid });
|
|
168
168
|
} catch (err) {
|
|
169
169
|
process.send({ msgtype: 'response', msgid, errorMessage: err.message });
|
|
@@ -271,7 +271,7 @@ async function handleSqlPreview({ msgid, objects, options }) {
|
|
|
271
271
|
process.send({ msgtype: 'response', msgid, sql: dmp.s, isTruncated: generator.isTruncated });
|
|
272
272
|
if (generator.isUnhandledException) {
|
|
273
273
|
setTimeout(() => {
|
|
274
|
-
|
|
274
|
+
logger.error('Exiting because of unhandled exception');
|
|
275
275
|
process.exit(0);
|
|
276
276
|
}, 500);
|
|
277
277
|
}
|
|
@@ -21,6 +21,7 @@ let afterConnectCallbacks = [];
|
|
|
21
21
|
let lastPing = null;
|
|
22
22
|
let lastActivity = null;
|
|
23
23
|
let currentProfiler = null;
|
|
24
|
+
let executingScripts = 0;
|
|
24
25
|
|
|
25
26
|
class TableWriter {
|
|
26
27
|
constructor() {
|
|
@@ -263,20 +264,25 @@ async function handleExecuteQuery({ sql }) {
|
|
|
263
264
|
//process.send({ msgtype: 'error', error: e.message });
|
|
264
265
|
}
|
|
265
266
|
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
267
|
+
executingScripts++;
|
|
268
|
+
try {
|
|
269
|
+
const resultIndexHolder = {
|
|
270
|
+
value: 0,
|
|
271
|
+
};
|
|
272
|
+
for (const sqlItem of splitQuery(sql, {
|
|
273
|
+
...driver.getQuerySplitterOptions('stream'),
|
|
274
|
+
returnRichInfo: true,
|
|
275
|
+
})) {
|
|
276
|
+
await handleStream(driver, resultIndexHolder, sqlItem);
|
|
277
|
+
// const handler = new StreamHandler(resultIndex);
|
|
278
|
+
// const stream = await driver.stream(systemConnection, sqlItem, handler);
|
|
279
|
+
// handler.stream = stream;
|
|
280
|
+
// resultIndex = handler.resultIndex;
|
|
281
|
+
}
|
|
282
|
+
process.send({ msgtype: 'done' });
|
|
283
|
+
} finally {
|
|
284
|
+
executingScripts--;
|
|
278
285
|
}
|
|
279
|
-
process.send({ msgtype: 'done' });
|
|
280
286
|
}
|
|
281
287
|
|
|
282
288
|
async function handleExecuteReader({ jslid, sql, fileName }) {
|
|
@@ -349,7 +355,12 @@ function start() {
|
|
|
349
355
|
storedConnection && storedConnection.globalSettings
|
|
350
356
|
? extractIntSettingsValue(storedConnection.globalSettings, 'session.autoCloseTimeout', 15, 1, 120)
|
|
351
357
|
: 15;
|
|
352
|
-
if (
|
|
358
|
+
if (
|
|
359
|
+
useSessionTimeout &&
|
|
360
|
+
time - lastActivity > sessionTimeout * 60 * 1000 &&
|
|
361
|
+
!currentProfiler &&
|
|
362
|
+
executingScripts == 0
|
|
363
|
+
) {
|
|
353
364
|
logger.info('Session not active, exiting');
|
|
354
365
|
process.exit(0);
|
|
355
366
|
}
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
const stream = require('stream');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const { quoteFullName, fullNameToString, getLogger } = require('dbgate-tools');
|
|
4
|
+
const requireEngineDriver = require('../utility/requireEngineDriver');
|
|
5
|
+
const connectUtility = require('../utility/connectUtility');
|
|
6
|
+
const logger = getLogger('dataDuplicator');
|
|
7
|
+
const { DataDuplicator } = require('dbgate-datalib');
|
|
8
|
+
const copyStream = require('./copyStream');
|
|
9
|
+
const jsonLinesReader = require('./jsonLinesReader');
|
|
10
|
+
const { resolveArchiveFolder } = require('../utility/directories');
|
|
11
|
+
|
|
12
|
+
async function dataDuplicator({
|
|
13
|
+
connection,
|
|
14
|
+
archive,
|
|
15
|
+
items,
|
|
16
|
+
options,
|
|
17
|
+
analysedStructure = null,
|
|
18
|
+
driver,
|
|
19
|
+
systemConnection,
|
|
20
|
+
}) {
|
|
21
|
+
if (!driver) driver = requireEngineDriver(connection);
|
|
22
|
+
const pool = systemConnection || (await connectUtility(driver, connection, 'write'));
|
|
23
|
+
|
|
24
|
+
logger.info(`Connected.`);
|
|
25
|
+
|
|
26
|
+
if (!analysedStructure) {
|
|
27
|
+
analysedStructure = await driver.analyseFull(pool);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
const dupl = new DataDuplicator(
|
|
31
|
+
pool,
|
|
32
|
+
driver,
|
|
33
|
+
analysedStructure,
|
|
34
|
+
items.map(item => ({
|
|
35
|
+
name: item.name,
|
|
36
|
+
operation: item.operation,
|
|
37
|
+
matchColumns: item.matchColumns,
|
|
38
|
+
openStream:
|
|
39
|
+
item.openStream ||
|
|
40
|
+
(() => jsonLinesReader({ fileName: path.join(resolveArchiveFolder(archive), `${item.name}.jsonl`) })),
|
|
41
|
+
})),
|
|
42
|
+
stream,
|
|
43
|
+
copyStream,
|
|
44
|
+
options
|
|
45
|
+
);
|
|
46
|
+
|
|
47
|
+
await dupl.run();
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
module.exports = dataDuplicator;
|
|
@@ -1,18 +1,26 @@
|
|
|
1
1
|
const stream = require('stream');
|
|
2
2
|
|
|
3
|
-
async function fakeObjectReader({ delay = 0 } = {}) {
|
|
3
|
+
async function fakeObjectReader({ delay = 0, dynamicData = null } = {}) {
|
|
4
4
|
const pass = new stream.PassThrough({
|
|
5
5
|
objectMode: true,
|
|
6
6
|
});
|
|
7
7
|
function doWrite() {
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
8
|
+
if (dynamicData) {
|
|
9
|
+
pass.write({ __isStreamHeader: true, __isDynamicStructure: true });
|
|
10
|
+
for (const item of dynamicData) {
|
|
11
|
+
pass.write(item);
|
|
12
|
+
}
|
|
13
|
+
pass.end();
|
|
14
|
+
} else {
|
|
15
|
+
pass.write({ columns: [{ columnName: 'id' }, { columnName: 'country' }], __isStreamHeader: true });
|
|
16
|
+
pass.write({ id: 1, country: 'Czechia' });
|
|
17
|
+
pass.write({ id: 2, country: 'Austria' });
|
|
18
|
+
pass.write({ country: 'Germany', id: 3 });
|
|
19
|
+
pass.write({ country: 'Romania', id: 4 });
|
|
20
|
+
pass.write({ country: 'Great Britain', id: 5 });
|
|
21
|
+
pass.write({ country: 'Bosna, Hecegovina', id: 6 });
|
|
22
|
+
pass.end();
|
|
23
|
+
}
|
|
16
24
|
}
|
|
17
25
|
|
|
18
26
|
if (delay) {
|
package/src/shell/index.js
CHANGED
|
@@ -25,6 +25,8 @@ const dumpDatabase = require('./dumpDatabase');
|
|
|
25
25
|
const importDatabase = require('./importDatabase');
|
|
26
26
|
const loadDatabase = require('./loadDatabase');
|
|
27
27
|
const generateModelSql = require('./generateModelSql');
|
|
28
|
+
const modifyJsonLinesReader = require('./modifyJsonLinesReader');
|
|
29
|
+
const dataDuplicator = require('./dataDuplicator');
|
|
28
30
|
|
|
29
31
|
const dbgateApi = {
|
|
30
32
|
queryReader,
|
|
@@ -53,6 +55,8 @@ const dbgateApi = {
|
|
|
53
55
|
importDatabase,
|
|
54
56
|
loadDatabase,
|
|
55
57
|
generateModelSql,
|
|
58
|
+
modifyJsonLinesReader,
|
|
59
|
+
dataDuplicator,
|
|
56
60
|
};
|
|
57
61
|
|
|
58
62
|
requirePlugin.initializeDbgateApi(dbgateApi);
|
|
@@ -35,7 +35,11 @@ class ParseStream extends stream.Transform {
|
|
|
35
35
|
async function jsonLinesReader({ fileName, encoding = 'utf-8', limitRows = undefined }) {
|
|
36
36
|
logger.info(`Reading file ${fileName}`);
|
|
37
37
|
|
|
38
|
-
const fileStream = fs.createReadStream(
|
|
38
|
+
const fileStream = fs.createReadStream(
|
|
39
|
+
fileName,
|
|
40
|
+
// @ts-ignore
|
|
41
|
+
encoding
|
|
42
|
+
);
|
|
39
43
|
const liner = byline(fileStream);
|
|
40
44
|
const parser = new ParseStream({ limitRows });
|
|
41
45
|
liner.pipe(parser);
|
|
@@ -12,7 +12,9 @@ class StringifyStream extends stream.Transform {
|
|
|
12
12
|
_transform(chunk, encoding, done) {
|
|
13
13
|
let skip = false;
|
|
14
14
|
if (!this.wasHeader) {
|
|
15
|
-
skip =
|
|
15
|
+
skip =
|
|
16
|
+
(chunk.__isStreamHeader && !this.header) ||
|
|
17
|
+
(chunk.__isStreamHeader && chunk.__isDynamicStructure && !chunk.__keepDynamicStreamHeader);
|
|
16
18
|
this.wasHeader = true;
|
|
17
19
|
}
|
|
18
20
|
if (!skip) {
|
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const _ = require('lodash');
|
|
3
|
+
const stream = require('stream');
|
|
4
|
+
const byline = require('byline');
|
|
5
|
+
const { getLogger, processJsonDataUpdateCommands, removeTablePairingId } = require('dbgate-tools');
|
|
6
|
+
const logger = getLogger('modifyJsonLinesReader');
|
|
7
|
+
const stableStringify = require('json-stable-stringify');
|
|
8
|
+
|
|
9
|
+
class ParseStream extends stream.Transform {
|
|
10
|
+
constructor({ limitRows, changeSet, mergedRows, mergeKey, mergeMode }) {
|
|
11
|
+
super({ objectMode: true });
|
|
12
|
+
this.limitRows = limitRows;
|
|
13
|
+
this.changeSet = changeSet;
|
|
14
|
+
this.wasHeader = false;
|
|
15
|
+
this.currentRowIndex = 0;
|
|
16
|
+
if (mergeMode == 'merge') {
|
|
17
|
+
if (mergedRows && mergeKey) {
|
|
18
|
+
this.mergedRowsDict = {};
|
|
19
|
+
for (const row of mergedRows) {
|
|
20
|
+
const key = stableStringify(_.pick(row, mergeKey));
|
|
21
|
+
this.mergedRowsDict[key] = row;
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
this.mergedRowsArray = mergedRows;
|
|
26
|
+
this.mergeKey = mergeKey;
|
|
27
|
+
this.mergeMode = mergeMode;
|
|
28
|
+
}
|
|
29
|
+
_transform(chunk, encoding, done) {
|
|
30
|
+
let obj = JSON.parse(chunk);
|
|
31
|
+
if (obj.__isStreamHeader) {
|
|
32
|
+
if (this.changeSet && this.changeSet.structure) {
|
|
33
|
+
this.push({
|
|
34
|
+
...removeTablePairingId(this.changeSet.structure),
|
|
35
|
+
__isStreamHeader: true,
|
|
36
|
+
});
|
|
37
|
+
} else {
|
|
38
|
+
this.push(obj);
|
|
39
|
+
}
|
|
40
|
+
this.wasHeader = true;
|
|
41
|
+
done();
|
|
42
|
+
return;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
if (this.changeSet) {
|
|
46
|
+
if (!this.wasHeader && this.changeSet.structure) {
|
|
47
|
+
this.push({
|
|
48
|
+
...removeTablePairingId(this.changeSet.structure),
|
|
49
|
+
__isStreamHeader: true,
|
|
50
|
+
});
|
|
51
|
+
this.wasHeader = true;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
if (!this.limitRows || this.currentRowIndex < this.limitRows) {
|
|
55
|
+
if (this.changeSet.deletes.find(x => x.existingRowIndex == this.currentRowIndex)) {
|
|
56
|
+
obj = null;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
const update = this.changeSet.updates.find(x => x.existingRowIndex == this.currentRowIndex);
|
|
60
|
+
if (update) {
|
|
61
|
+
if (update.document) {
|
|
62
|
+
obj = update.document;
|
|
63
|
+
} else {
|
|
64
|
+
obj = {
|
|
65
|
+
...obj,
|
|
66
|
+
...update.fields,
|
|
67
|
+
};
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
if (obj) {
|
|
72
|
+
if (this.changeSet.dataUpdateCommands) {
|
|
73
|
+
obj = processJsonDataUpdateCommands(obj, this.changeSet.dataUpdateCommands);
|
|
74
|
+
}
|
|
75
|
+
this.push(obj);
|
|
76
|
+
}
|
|
77
|
+
this.currentRowIndex += 1;
|
|
78
|
+
}
|
|
79
|
+
} else if (this.mergedRowsArray && this.mergeKey && this.mergeMode) {
|
|
80
|
+
if (this.mergeMode == 'merge') {
|
|
81
|
+
const key = stableStringify(_.pick(obj, this.mergeKey));
|
|
82
|
+
if (this.mergedRowsDict[key]) {
|
|
83
|
+
this.push({ ...obj, ...this.mergedRowsDict[key] });
|
|
84
|
+
delete this.mergedRowsDict[key];
|
|
85
|
+
} else {
|
|
86
|
+
this.push(obj);
|
|
87
|
+
}
|
|
88
|
+
} else if (this.mergeMode == 'append') {
|
|
89
|
+
this.push(obj);
|
|
90
|
+
}
|
|
91
|
+
} else {
|
|
92
|
+
this.push(obj);
|
|
93
|
+
}
|
|
94
|
+
done();
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
_flush(done) {
|
|
98
|
+
if (this.changeSet) {
|
|
99
|
+
for (const insert of this.changeSet.inserts) {
|
|
100
|
+
this.push({
|
|
101
|
+
...insert.document,
|
|
102
|
+
...insert.fields,
|
|
103
|
+
});
|
|
104
|
+
}
|
|
105
|
+
} else if (this.mergedRowsArray && this.mergeKey) {
|
|
106
|
+
if (this.mergeMode == 'merge') {
|
|
107
|
+
for (const row of this.mergedRowsArray) {
|
|
108
|
+
const key = stableStringify(_.pick(row, this.mergeKey));
|
|
109
|
+
if (this.mergedRowsDict[key]) {
|
|
110
|
+
this.push(row);
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
} else {
|
|
114
|
+
for (const row of this.mergedRowsArray) {
|
|
115
|
+
this.push(row);
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
done();
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
async function modifyJsonLinesReader({
|
|
124
|
+
fileName,
|
|
125
|
+
encoding = 'utf-8',
|
|
126
|
+
limitRows = undefined,
|
|
127
|
+
changeSet = null,
|
|
128
|
+
mergedRows = null,
|
|
129
|
+
mergeKey = null,
|
|
130
|
+
mergeMode = 'merge',
|
|
131
|
+
}) {
|
|
132
|
+
logger.info(`Reading file ${fileName} with change set`);
|
|
133
|
+
|
|
134
|
+
const fileStream = fs.createReadStream(
|
|
135
|
+
fileName,
|
|
136
|
+
// @ts-ignore
|
|
137
|
+
encoding
|
|
138
|
+
);
|
|
139
|
+
const liner = byline(fileStream);
|
|
140
|
+
const parser = new ParseStream({ limitRows, changeSet, mergedRows, mergeKey, mergeMode });
|
|
141
|
+
liner.pipe(parser);
|
|
142
|
+
return parser;
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
module.exports = modifyJsonLinesReader;
|
package/src/shell/runScript.js
CHANGED
|
@@ -1,26 +1,16 @@
|
|
|
1
|
-
const
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const os = require('os');
|
|
3
|
+
const rimraf = require('rimraf');
|
|
4
|
+
const path = require('path');
|
|
2
5
|
const AsyncLock = require('async-lock');
|
|
3
6
|
const lock = new AsyncLock();
|
|
4
7
|
const stableStringify = require('json-stable-stringify');
|
|
5
8
|
const { evaluateCondition } = require('dbgate-sqltree');
|
|
6
9
|
const requirePluginFunction = require('./requirePluginFunction');
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
resolve(null);
|
|
12
|
-
return;
|
|
13
|
-
}
|
|
14
|
-
|
|
15
|
-
reader.nextLine((err, line) => {
|
|
16
|
-
if (err) {
|
|
17
|
-
reject(err);
|
|
18
|
-
} else {
|
|
19
|
-
resolve(line);
|
|
20
|
-
}
|
|
21
|
-
});
|
|
22
|
-
});
|
|
23
|
-
}
|
|
10
|
+
const esort = require('external-sorting');
|
|
11
|
+
const uuidv1 = require('uuid/v1');
|
|
12
|
+
const { jsldir } = require('./directories');
|
|
13
|
+
const LineReader = require('./LineReader');
|
|
24
14
|
|
|
25
15
|
class JsonLinesDatastore {
|
|
26
16
|
constructor(file, formatterFunction) {
|
|
@@ -32,10 +22,43 @@ class JsonLinesDatastore {
|
|
|
32
22
|
// this.firstRowToBeReturned = null;
|
|
33
23
|
this.notifyChangedCallback = null;
|
|
34
24
|
this.currentFilter = null;
|
|
25
|
+
this.currentSort = null;
|
|
35
26
|
this.rowFormatter = requirePluginFunction(formatterFunction);
|
|
27
|
+
this.sortedFiles = {};
|
|
36
28
|
}
|
|
37
29
|
|
|
38
|
-
|
|
30
|
+
static async sortFile(infile, outfile, sort) {
|
|
31
|
+
const tempDir = path.join(os.tmpdir(), uuidv1());
|
|
32
|
+
fs.mkdirSync(tempDir);
|
|
33
|
+
|
|
34
|
+
await esort
|
|
35
|
+
.default({
|
|
36
|
+
input: fs.createReadStream(infile),
|
|
37
|
+
output: fs.createWriteStream(outfile),
|
|
38
|
+
deserializer: JSON.parse,
|
|
39
|
+
serializer: JSON.stringify,
|
|
40
|
+
tempDir,
|
|
41
|
+
maxHeap: 100,
|
|
42
|
+
comparer: (a, b) => {
|
|
43
|
+
for (const item of sort) {
|
|
44
|
+
const { uniqueName, order } = item;
|
|
45
|
+
if (a[uniqueName] < b[uniqueName]) {
|
|
46
|
+
return order == 'ASC' ? -1 : 1;
|
|
47
|
+
}
|
|
48
|
+
if (a[uniqueName] > b[uniqueName]) {
|
|
49
|
+
return order == 'ASC' ? 1 : -1;
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
return 0;
|
|
53
|
+
},
|
|
54
|
+
})
|
|
55
|
+
.asc();
|
|
56
|
+
|
|
57
|
+
await new Promise(resolve => rimraf(tempDir, resolve));
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
async _closeReader() {
|
|
61
|
+
// console.log('CLOSING READER', this.reader);
|
|
39
62
|
if (!this.reader) return;
|
|
40
63
|
const reader = this.reader;
|
|
41
64
|
this.reader = null;
|
|
@@ -43,7 +66,8 @@ class JsonLinesDatastore {
|
|
|
43
66
|
this.readedSchemaRow = false;
|
|
44
67
|
// this.firstRowToBeReturned = null;
|
|
45
68
|
this.currentFilter = null;
|
|
46
|
-
|
|
69
|
+
this.currentSort = null;
|
|
70
|
+
await reader.close();
|
|
47
71
|
}
|
|
48
72
|
|
|
49
73
|
async notifyChanged(callback) {
|
|
@@ -56,13 +80,12 @@ class JsonLinesDatastore {
|
|
|
56
80
|
if (call) call();
|
|
57
81
|
}
|
|
58
82
|
|
|
59
|
-
async _openReader() {
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
);
|
|
83
|
+
async _openReader(fileName) {
|
|
84
|
+
// console.log('OPENING READER', fileName);
|
|
85
|
+
// console.log(fs.readFileSync(fileName, 'utf-8'));
|
|
86
|
+
|
|
87
|
+
const fileStream = fs.createReadStream(fileName);
|
|
88
|
+
return new LineReader(fileStream);
|
|
66
89
|
}
|
|
67
90
|
|
|
68
91
|
parseLine(line) {
|
|
@@ -77,7 +100,7 @@ class JsonLinesDatastore {
|
|
|
77
100
|
// return res;
|
|
78
101
|
// }
|
|
79
102
|
for (;;) {
|
|
80
|
-
const line = await
|
|
103
|
+
const line = await this.reader.readLine();
|
|
81
104
|
if (!line) {
|
|
82
105
|
// EOF
|
|
83
106
|
return null;
|
|
@@ -140,14 +163,19 @@ class JsonLinesDatastore {
|
|
|
140
163
|
// });
|
|
141
164
|
}
|
|
142
165
|
|
|
143
|
-
async _ensureReader(offset, filter) {
|
|
144
|
-
if (
|
|
166
|
+
async _ensureReader(offset, filter, sort) {
|
|
167
|
+
if (
|
|
168
|
+
this.readedDataRowCount > offset ||
|
|
169
|
+
stableStringify(filter) != stableStringify(this.currentFilter) ||
|
|
170
|
+
stableStringify(sort) != stableStringify(this.currentSort)
|
|
171
|
+
) {
|
|
145
172
|
this._closeReader();
|
|
146
173
|
}
|
|
147
174
|
if (!this.reader) {
|
|
148
|
-
const reader = await this._openReader();
|
|
175
|
+
const reader = await this._openReader(sort ? this.sortedFiles[stableStringify(sort)] : this.file);
|
|
149
176
|
this.reader = reader;
|
|
150
177
|
this.currentFilter = filter;
|
|
178
|
+
this.currentSort = sort;
|
|
151
179
|
}
|
|
152
180
|
// if (!this.readedSchemaRow) {
|
|
153
181
|
// const line = await this._readLine(true); // skip structure
|
|
@@ -179,13 +207,20 @@ class JsonLinesDatastore {
|
|
|
179
207
|
});
|
|
180
208
|
}
|
|
181
209
|
|
|
182
|
-
async getRows(offset, limit, filter) {
|
|
210
|
+
async getRows(offset, limit, filter, sort) {
|
|
183
211
|
const res = [];
|
|
212
|
+
if (sort && !this.sortedFiles[stableStringify(sort)]) {
|
|
213
|
+
const jslid = uuidv1();
|
|
214
|
+
const sortedFile = path.join(jsldir(), `${jslid}.jsonl`);
|
|
215
|
+
await JsonLinesDatastore.sortFile(this.file, sortedFile, sort);
|
|
216
|
+
this.sortedFiles[stableStringify(sort)] = sortedFile;
|
|
217
|
+
}
|
|
184
218
|
await lock.acquire('reader', async () => {
|
|
185
|
-
await this._ensureReader(offset, filter);
|
|
219
|
+
await this._ensureReader(offset, filter, sort);
|
|
186
220
|
// console.log(JSON.stringify(this.currentFilter, undefined, 2));
|
|
187
221
|
for (let i = 0; i < limit; i += 1) {
|
|
188
222
|
const line = await this._readLine(true);
|
|
223
|
+
// console.log('READED LINE', i);
|
|
189
224
|
if (line == null) break;
|
|
190
225
|
res.push(line);
|
|
191
226
|
}
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
const readline = require('readline');
|
|
2
|
+
|
|
3
|
+
class Queue {
|
|
4
|
+
constructor() {
|
|
5
|
+
this.elements = {};
|
|
6
|
+
this.head = 0;
|
|
7
|
+
this.tail = 0;
|
|
8
|
+
}
|
|
9
|
+
enqueue(element) {
|
|
10
|
+
this.elements[this.tail] = element;
|
|
11
|
+
this.tail++;
|
|
12
|
+
}
|
|
13
|
+
dequeue() {
|
|
14
|
+
const item = this.elements[this.head];
|
|
15
|
+
delete this.elements[this.head];
|
|
16
|
+
this.head++;
|
|
17
|
+
return item;
|
|
18
|
+
}
|
|
19
|
+
peek() {
|
|
20
|
+
return this.elements[this.head];
|
|
21
|
+
}
|
|
22
|
+
getLength() {
|
|
23
|
+
return this.tail - this.head;
|
|
24
|
+
}
|
|
25
|
+
isEmpty() {
|
|
26
|
+
return this.getLength() === 0;
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
class LineReader {
|
|
31
|
+
constructor(input) {
|
|
32
|
+
this.input = input;
|
|
33
|
+
this.queue = new Queue();
|
|
34
|
+
this.resolve = null;
|
|
35
|
+
this.isEnded = false;
|
|
36
|
+
this.rl = readline.createInterface({
|
|
37
|
+
input,
|
|
38
|
+
});
|
|
39
|
+
this.input.pause();
|
|
40
|
+
|
|
41
|
+
this.rl.on('line', line => {
|
|
42
|
+
this.input.pause();
|
|
43
|
+
if (this.resolve) {
|
|
44
|
+
const resolve = this.resolve;
|
|
45
|
+
this.resolve = null;
|
|
46
|
+
resolve(line);
|
|
47
|
+
return;
|
|
48
|
+
}
|
|
49
|
+
this.queue.enqueue(line);
|
|
50
|
+
});
|
|
51
|
+
|
|
52
|
+
this.rl.on('close', () => {
|
|
53
|
+
if (this.resolve) {
|
|
54
|
+
const resolve = this.resolve;
|
|
55
|
+
this.resolve = null;
|
|
56
|
+
this.isEnded = true;
|
|
57
|
+
resolve(null);
|
|
58
|
+
return;
|
|
59
|
+
}
|
|
60
|
+
this.queue.enqueue(null);
|
|
61
|
+
});
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
readLine() {
|
|
65
|
+
if (this.isEnded) {
|
|
66
|
+
return Promise.resolve(null);
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
if (!this.queue.isEmpty()) {
|
|
70
|
+
const res = this.queue.dequeue();
|
|
71
|
+
if (res == null) this.isEnded = true;
|
|
72
|
+
return Promise.resolve(res);
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
this.input.resume();
|
|
76
|
+
|
|
77
|
+
return new Promise(resolve => {
|
|
78
|
+
this.resolve = resolve;
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
close() {
|
|
83
|
+
this.isEnded = true;
|
|
84
|
+
return new Promise(resolve => this.input.close(resolve));
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
module.exports = LineReader;
|
|
@@ -62,14 +62,17 @@ async function connectUtility(driver, storedConnection, connectionMode, addition
|
|
|
62
62
|
|
|
63
63
|
if (connection.sslCaFile) {
|
|
64
64
|
connection.ssl.ca = await fs.readFile(connection.sslCaFile);
|
|
65
|
+
connection.ssl.sslCaFile = connection.sslCaFile;
|
|
65
66
|
}
|
|
66
67
|
|
|
67
68
|
if (connection.sslCertFile) {
|
|
68
69
|
connection.ssl.cert = await fs.readFile(connection.sslCertFile);
|
|
70
|
+
connection.ssl.sslCertFile = connection.sslCertFile;
|
|
69
71
|
}
|
|
70
72
|
|
|
71
73
|
if (connection.sslKeyFile) {
|
|
72
74
|
connection.ssl.key = await fs.readFile(connection.sslKeyFile);
|
|
75
|
+
connection.ssl.sslKeyFile = connection.sslKeyFile;
|
|
73
76
|
}
|
|
74
77
|
|
|
75
78
|
if (connection.sslCertFilePassword) {
|
|
@@ -42,18 +42,23 @@ function datadir() {
|
|
|
42
42
|
return dir;
|
|
43
43
|
}
|
|
44
44
|
|
|
45
|
-
const dirFunc =
|
|
46
|
-
|
|
47
|
-
|
|
45
|
+
const dirFunc =
|
|
46
|
+
(dirname, clean, subdirs = []) =>
|
|
47
|
+
() => {
|
|
48
|
+
const dir = path.join(datadir(), dirname);
|
|
49
|
+
ensureDirectory(dir, clean);
|
|
50
|
+
for (const subdir of subdirs) {
|
|
51
|
+
ensureDirectory(path.join(dir, subdir), false);
|
|
52
|
+
}
|
|
48
53
|
|
|
49
|
-
|
|
50
|
-
};
|
|
54
|
+
return dir;
|
|
55
|
+
};
|
|
51
56
|
|
|
52
57
|
const jsldir = dirFunc('jsl', true);
|
|
53
58
|
const rundir = dirFunc('run', true);
|
|
54
59
|
const uploadsdir = dirFunc('uploads', true);
|
|
55
60
|
const pluginsdir = dirFunc('plugins');
|
|
56
|
-
const archivedir = dirFunc('archive');
|
|
61
|
+
const archivedir = dirFunc('archive', false, ['default']);
|
|
57
62
|
const appdir = dirFunc('apps');
|
|
58
63
|
const filesdir = dirFunc('files');
|
|
59
64
|
const logsdir = dirFunc('logs', 3600 * 24 * 7);
|
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
const fs = require('fs-extra');
|
|
2
|
-
|
|
3
|
-
async function saveFreeTableData(file, data) {
|
|
4
|
-
const { structure, rows } = data;
|
|
5
|
-
const fileStream = fs.createWriteStream(file);
|
|
6
|
-
await fileStream.write(JSON.stringify({ __isStreamHeader: true, ...structure }) + '\n');
|
|
7
|
-
for (const row of rows) {
|
|
8
|
-
await fileStream.write(JSON.stringify(row) + '\n');
|
|
9
|
-
}
|
|
10
|
-
await fileStream.close();
|
|
11
|
-
}
|
|
12
|
-
|
|
13
|
-
module.exports = {
|
|
14
|
-
saveFreeTableData,
|
|
15
|
-
};
|