dbgate-api 6.3.2 → 6.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/package.json +9 -7
  2. package/src/controllers/archive.js +99 -6
  3. package/src/controllers/auth.js +3 -1
  4. package/src/controllers/config.js +135 -22
  5. package/src/controllers/connections.js +35 -2
  6. package/src/controllers/databaseConnections.js +101 -2
  7. package/src/controllers/files.js +59 -0
  8. package/src/controllers/jsldata.js +9 -0
  9. package/src/controllers/runners.js +25 -5
  10. package/src/controllers/serverConnections.js +22 -2
  11. package/src/controllers/storage.js +4 -0
  12. package/src/controllers/uploads.js +0 -46
  13. package/src/currentVersion.js +2 -2
  14. package/src/main.js +7 -1
  15. package/src/proc/connectProcess.js +14 -2
  16. package/src/proc/databaseConnectionProcess.js +70 -5
  17. package/src/proc/serverConnectionProcess.js +7 -1
  18. package/src/proc/sessionProcess.js +15 -178
  19. package/src/shell/archiveReader.js +3 -1
  20. package/src/shell/collectorWriter.js +2 -2
  21. package/src/shell/copyStream.js +1 -0
  22. package/src/shell/dataReplicator.js +96 -0
  23. package/src/shell/download.js +22 -6
  24. package/src/shell/index.js +12 -2
  25. package/src/shell/jsonLinesWriter.js +4 -3
  26. package/src/shell/queryReader.js +10 -3
  27. package/src/shell/unzipDirectory.js +91 -0
  28. package/src/shell/unzipJsonLinesData.js +60 -0
  29. package/src/shell/unzipJsonLinesFile.js +59 -0
  30. package/src/shell/zipDirectory.js +49 -0
  31. package/src/shell/zipJsonLinesData.js +49 -0
  32. package/src/storageModel.js +819 -0
  33. package/src/utility/DatastoreProxy.js +4 -0
  34. package/src/utility/cloudUpgrade.js +1 -59
  35. package/src/utility/connectUtility.js +3 -1
  36. package/src/utility/crypting.js +137 -22
  37. package/src/utility/extractSingleFileFromZip.js +77 -0
  38. package/src/utility/getMapExport.js +2 -0
  39. package/src/utility/handleQueryStream.js +186 -0
  40. package/src/utility/healthStatus.js +12 -1
  41. package/src/utility/listZipEntries.js +41 -0
  42. package/src/utility/processArgs.js +5 -0
  43. package/src/utility/sshTunnel.js +13 -2
  44. package/src/shell/dataDuplicator.js +0 -61
@@ -11,6 +11,7 @@ const { decryptConnection } = require('../utility/crypting');
11
11
  const { connectUtility } = require('../utility/connectUtility');
12
12
  const { handleProcessCommunication } = require('../utility/processComm');
13
13
  const { getLogger, extractIntSettingsValue, extractBoolSettingsValue } = require('dbgate-tools');
14
+ const { handleQueryStream, QueryStreamTableWriter, allowExecuteCustomScript } = require('../utility/handleQueryStream');
14
15
 
15
16
  const logger = getLogger('sessionProcess');
16
17
 
@@ -23,175 +24,6 @@ let lastActivity = null;
23
24
  let currentProfiler = null;
24
25
  let executingScripts = 0;
25
26
 
26
- class TableWriter {
27
- constructor() {
28
- this.currentRowCount = 0;
29
- this.currentChangeIndex = 1;
30
- this.initializedFile = false;
31
- }
32
-
33
- initializeFromQuery(structure, resultIndex) {
34
- this.jslid = crypto.randomUUID();
35
- this.currentFile = path.join(jsldir(), `${this.jslid}.jsonl`);
36
- fs.writeFileSync(
37
- this.currentFile,
38
- JSON.stringify({
39
- ...structure,
40
- __isStreamHeader: true,
41
- }) + '\n'
42
- );
43
- this.currentStream = fs.createWriteStream(this.currentFile, { flags: 'a' });
44
- this.writeCurrentStats(false, false);
45
- this.resultIndex = resultIndex;
46
- this.initializedFile = true;
47
- process.send({ msgtype: 'recordset', jslid: this.jslid, resultIndex });
48
- }
49
-
50
- initializeFromReader(jslid) {
51
- this.jslid = jslid;
52
- this.currentFile = path.join(jsldir(), `${this.jslid}.jsonl`);
53
- this.writeCurrentStats(false, false);
54
- }
55
-
56
- row(row) {
57
- // console.log('ACCEPT ROW', row);
58
- this.currentStream.write(JSON.stringify(row) + '\n');
59
- this.currentRowCount += 1;
60
-
61
- if (!this.plannedStats) {
62
- this.plannedStats = true;
63
- process.nextTick(() => {
64
- if (this.currentStream) this.currentStream.uncork();
65
- process.nextTick(() => this.writeCurrentStats(false, true));
66
- this.plannedStats = false;
67
- });
68
- }
69
- }
70
-
71
- rowFromReader(row) {
72
- if (!this.initializedFile) {
73
- process.send({ msgtype: 'initializeFile', jslid: this.jslid });
74
- this.initializedFile = true;
75
-
76
- fs.writeFileSync(this.currentFile, JSON.stringify(row) + '\n');
77
- this.currentStream = fs.createWriteStream(this.currentFile, { flags: 'a' });
78
- this.writeCurrentStats(false, false);
79
- this.initializedFile = true;
80
- return;
81
- }
82
-
83
- this.row(row);
84
- }
85
-
86
- writeCurrentStats(isFinished = false, emitEvent = false) {
87
- const stats = {
88
- rowCount: this.currentRowCount,
89
- changeIndex: this.currentChangeIndex,
90
- isFinished,
91
- jslid: this.jslid,
92
- };
93
- fs.writeFileSync(`${this.currentFile}.stats`, JSON.stringify(stats));
94
- this.currentChangeIndex += 1;
95
- if (emitEvent) {
96
- process.send({ msgtype: 'stats', ...stats });
97
- }
98
- }
99
-
100
- close(afterClose) {
101
- if (this.currentStream) {
102
- this.currentStream.end(() => {
103
- this.writeCurrentStats(true, true);
104
- if (afterClose) afterClose();
105
- });
106
- }
107
- }
108
- }
109
-
110
- class StreamHandler {
111
- constructor(resultIndexHolder, resolve, startLine) {
112
- this.recordset = this.recordset.bind(this);
113
- this.startLine = startLine;
114
- this.row = this.row.bind(this);
115
- // this.error = this.error.bind(this);
116
- this.done = this.done.bind(this);
117
- this.info = this.info.bind(this);
118
-
119
- // use this for cancelling - not implemented
120
- // this.stream = null;
121
-
122
- this.plannedStats = false;
123
- this.resultIndexHolder = resultIndexHolder;
124
- this.resolve = resolve;
125
- // currentHandlers = [...currentHandlers, this];
126
- }
127
-
128
- closeCurrentWriter() {
129
- if (this.currentWriter) {
130
- this.currentWriter.close();
131
- this.currentWriter = null;
132
- }
133
- }
134
-
135
- recordset(columns) {
136
- this.closeCurrentWriter();
137
- this.currentWriter = new TableWriter();
138
- this.currentWriter.initializeFromQuery(
139
- Array.isArray(columns) ? { columns } : columns,
140
- this.resultIndexHolder.value
141
- );
142
- this.resultIndexHolder.value += 1;
143
-
144
- // this.writeCurrentStats();
145
-
146
- // this.onRow = _.throttle((jslid) => {
147
- // if (jslid == this.jslid) {
148
- // this.writeCurrentStats(false, true);
149
- // }
150
- // }, 500);
151
- }
152
- row(row) {
153
- if (this.currentWriter) this.currentWriter.row(row);
154
- else if (row.message) process.send({ msgtype: 'info', info: { message: row.message } });
155
- // this.onRow(this.jslid);
156
- }
157
- // error(error) {
158
- // process.send({ msgtype: 'error', error });
159
- // }
160
- done(result) {
161
- this.closeCurrentWriter();
162
- // currentHandlers = currentHandlers.filter((x) => x != this);
163
- this.resolve();
164
- }
165
- info(info) {
166
- if (info && info.line != null) {
167
- info = {
168
- ...info,
169
- line: this.startLine + info.line,
170
- };
171
- }
172
- process.send({ msgtype: 'info', info });
173
- }
174
- }
175
-
176
- function handleStream(driver, resultIndexHolder, sqlItem) {
177
- return new Promise((resolve, reject) => {
178
- const start = sqlItem.trimStart || sqlItem.start;
179
- const handler = new StreamHandler(resultIndexHolder, resolve, start && start.line);
180
- driver.stream(dbhan, sqlItem.text, handler);
181
- });
182
- }
183
-
184
- function allowExecuteCustomScript(driver) {
185
- if (driver.readOnlySessions) {
186
- return true;
187
- }
188
- if (storedConnection.isReadOnly) {
189
- return false;
190
- // throw new Error('Connection is read only');
191
- }
192
- return true;
193
- }
194
-
195
27
  async function handleConnect(connection) {
196
28
  storedConnection = connection;
197
29
 
@@ -222,12 +54,12 @@ async function handleStartProfiler({ jslid }) {
222
54
  await waitConnected();
223
55
  const driver = requireEngineDriver(storedConnection);
224
56
 
225
- if (!allowExecuteCustomScript(driver)) {
57
+ if (!allowExecuteCustomScript(storedConnection, driver)) {
226
58
  process.send({ msgtype: 'done' });
227
59
  return;
228
60
  }
229
61
 
230
- const writer = new TableWriter();
62
+ const writer = new QueryStreamTableWriter();
231
63
  writer.initializeFromReader(jslid);
232
64
 
233
65
  currentProfiler = await driver.startProfiler(dbhan, {
@@ -251,7 +83,7 @@ async function handleExecuteControlCommand({ command }) {
251
83
  await waitConnected();
252
84
  const driver = requireEngineDriver(storedConnection);
253
85
 
254
- if (command == 'commitTransaction' && !allowExecuteCustomScript(driver)) {
86
+ if (command == 'commitTransaction' && !allowExecuteCustomScript(storedConnection, driver)) {
255
87
  process.send({
256
88
  msgtype: 'info',
257
89
  info: {
@@ -291,7 +123,7 @@ async function handleExecuteQuery({ sql, autoCommit }) {
291
123
  await waitConnected();
292
124
  const driver = requireEngineDriver(storedConnection);
293
125
 
294
- if (!allowExecuteCustomScript(driver)) {
126
+ if (!allowExecuteCustomScript(storedConnection, driver)) {
295
127
  process.send({
296
128
  msgtype: 'info',
297
129
  info: {
@@ -306,18 +138,23 @@ async function handleExecuteQuery({ sql, autoCommit }) {
306
138
 
307
139
  executingScripts++;
308
140
  try {
309
- const resultIndexHolder = {
310
- value: 0,
141
+ const queryStreamInfoHolder = {
142
+ resultIndex: 0,
143
+ canceled: false,
311
144
  };
312
145
  for (const sqlItem of splitQuery(sql, {
313
146
  ...driver.getQuerySplitterOptions('stream'),
314
147
  returnRichInfo: true,
315
148
  })) {
316
- await handleStream(driver, resultIndexHolder, sqlItem);
149
+ await handleQueryStream(dbhan, driver, queryStreamInfoHolder, sqlItem);
317
150
  // const handler = new StreamHandler(resultIndex);
318
151
  // const stream = await driver.stream(systemConnection, sqlItem, handler);
319
152
  // handler.stream = stream;
320
153
  // resultIndex = handler.resultIndex;
154
+
155
+ if (queryStreamInfoHolder.canceled) {
156
+ break;
157
+ }
321
158
  }
322
159
  process.send({ msgtype: 'done', autoCommit });
323
160
  } finally {
@@ -335,13 +172,13 @@ async function handleExecuteReader({ jslid, sql, fileName }) {
335
172
  if (fileName) {
336
173
  sql = fs.readFileSync(fileName, 'utf-8');
337
174
  } else {
338
- if (!allowExecuteCustomScript(driver)) {
175
+ if (!allowExecuteCustomScript(storedConnection, driver)) {
339
176
  process.send({ msgtype: 'done' });
340
177
  return;
341
178
  }
342
179
  }
343
180
 
344
- const writer = new TableWriter();
181
+ const writer = new QueryStreamTableWriter();
345
182
  writer.initializeFromReader(jslid);
346
183
 
347
184
  const reader = await driver.readQuery(dbhan, sql);
@@ -3,7 +3,9 @@ const { archivedir, resolveArchiveFolder } = require('../utility/directories');
3
3
  const jsonLinesReader = require('./jsonLinesReader');
4
4
 
5
5
  function archiveReader({ folderName, fileName, ...other }) {
6
- const jsonlFile = path.join(resolveArchiveFolder(folderName), `${fileName}.jsonl`);
6
+ const jsonlFile = folderName.endsWith('.zip')
7
+ ? `zip://archive:${folderName}//${fileName}.jsonl`
8
+ : path.join(resolveArchiveFolder(folderName), `${fileName}.jsonl`);
7
9
  const res = jsonLinesReader({ fileName: jsonlFile, ...other });
8
10
  return res;
9
11
  }
@@ -15,9 +15,9 @@ class CollectorWriterStream extends stream.Writable {
15
15
 
16
16
  _final(callback) {
17
17
  process.send({
18
- msgtype: 'freeData',
18
+ msgtype: 'dataResult',
19
19
  runid: this.runid,
20
- freeData: { rows: this.rows, structure: this.structure },
20
+ dataResult: { rows: this.rows, structure: this.structure },
21
21
  });
22
22
  callback();
23
23
  }
@@ -69,6 +69,7 @@ async function copyStream(input, output, options) {
69
69
  msgtype: 'copyStreamError',
70
70
  copyStreamError: {
71
71
  message: extractErrorMessage(err),
72
+ progressName,
72
73
  ...err,
73
74
  },
74
75
  });
@@ -0,0 +1,96 @@
1
+ const stream = require('stream');
2
+ const path = require('path');
3
+ const { quoteFullName, fullNameToString, getLogger } = require('dbgate-tools');
4
+ const requireEngineDriver = require('../utility/requireEngineDriver');
5
+ const { connectUtility } = require('../utility/connectUtility');
6
+ const logger = getLogger('datareplicator');
7
+ const { DataReplicator } = require('dbgate-datalib');
8
+ const { compileCompoudEvalCondition } = require('dbgate-filterparser');
9
+ const copyStream = require('./copyStream');
10
+ const jsonLinesReader = require('./jsonLinesReader');
11
+ const { resolveArchiveFolder } = require('../utility/directories');
12
+ const { evaluateCondition } = require('dbgate-sqltree');
13
+
14
+ function compileOperationFunction(enabled, condition) {
15
+ if (!enabled) return _row => false;
16
+ const conditionCompiled = compileCompoudEvalCondition(condition);
17
+ if (condition) {
18
+ return row => evaluateCondition(conditionCompiled, row);
19
+ }
20
+ return _row => true;
21
+ }
22
+
23
+ async function dataReplicator({
24
+ connection,
25
+ archive,
26
+ folder,
27
+ items,
28
+ options,
29
+ analysedStructure = null,
30
+ driver,
31
+ systemConnection,
32
+ }) {
33
+ if (!driver) driver = requireEngineDriver(connection);
34
+
35
+ const dbhan = systemConnection || (await connectUtility(driver, connection, 'write'));
36
+
37
+ try {
38
+ if (!analysedStructure) {
39
+ analysedStructure = await driver.analyseFull(dbhan);
40
+ }
41
+
42
+ let joinPath;
43
+
44
+ if (archive?.endsWith('.zip')) {
45
+ joinPath = file => `zip://archive:${archive}//${file}`;
46
+ } else {
47
+ const sourceDir = archive
48
+ ? resolveArchiveFolder(archive)
49
+ : folder?.startsWith('archive:')
50
+ ? resolveArchiveFolder(folder.substring('archive:'.length))
51
+ : folder;
52
+ joinPath = file => path.join(sourceDir, file);
53
+ }
54
+
55
+ const repl = new DataReplicator(
56
+ dbhan,
57
+ driver,
58
+ analysedStructure,
59
+ items.map(item => {
60
+ return {
61
+ name: item.name,
62
+ matchColumns: item.matchColumns,
63
+ findExisting: compileOperationFunction(item.findExisting, item.findCondition),
64
+ createNew: compileOperationFunction(item.createNew, item.createCondition),
65
+ updateExisting: compileOperationFunction(item.updateExisting, item.updateCondition),
66
+ deleteMissing: !!item.deleteMissing,
67
+ deleteRestrictionColumns: item.deleteRestrictionColumns ?? [],
68
+ openStream: item.openStream
69
+ ? item.openStream
70
+ : item.jsonArray
71
+ ? () => stream.Readable.from(item.jsonArray)
72
+ : () => jsonLinesReader({ fileName: joinPath(`${item.name}.jsonl`) }),
73
+ };
74
+ }),
75
+ stream,
76
+ copyStream,
77
+ options
78
+ );
79
+
80
+ await repl.run();
81
+ if (options?.runid) {
82
+ process.send({
83
+ msgtype: 'dataResult',
84
+ runid: options?.runid,
85
+ dataResult: repl.result,
86
+ });
87
+ }
88
+ return repl.result;
89
+ } finally {
90
+ if (!systemConnection) {
91
+ await driver.close(dbhan);
92
+ }
93
+ }
94
+ }
95
+
96
+ module.exports = dataReplicator;
@@ -1,14 +1,30 @@
1
1
  const crypto = require('crypto');
2
2
  const path = require('path');
3
- const { uploadsdir } = require('../utility/directories');
3
+ const { uploadsdir, archivedir } = require('../utility/directories');
4
4
  const { downloadFile } = require('../utility/downloader');
5
+ const extractSingleFileFromZip = require('../utility/extractSingleFileFromZip');
5
6
 
6
- async function download(url) {
7
- if (url && url.match(/(^http:\/\/)|(^https:\/\/)/)) {
8
- const tmpFile = path.join(uploadsdir(), crypto.randomUUID());
9
- await downloadFile(url, tmpFile);
10
- return tmpFile;
7
+ async function download(url, options = {}) {
8
+ const { targetFile } = options || {};
9
+ if (url) {
10
+ if (url.match(/(^http:\/\/)|(^https:\/\/)/)) {
11
+ const destFile = targetFile || path.join(uploadsdir(), crypto.randomUUID());
12
+ await downloadFile(url, destFile);
13
+ return destFile;
14
+ }
15
+ const zipMatch = url.match(/^zip\:\/\/(.*)\/\/(.*)$/);
16
+ if (zipMatch) {
17
+ const destFile = targetFile || path.join(uploadsdir(), crypto.randomUUID());
18
+ let zipFile = zipMatch[1];
19
+ if (zipFile.startsWith('archive:')) {
20
+ zipFile = path.join(archivedir(), zipFile.substring('archive:'.length));
21
+ }
22
+
23
+ await extractSingleFileFromZip(zipFile, zipMatch[2], destFile);
24
+ return destFile;
25
+ }
11
26
  }
27
+
12
28
  return url;
13
29
  }
14
30
 
@@ -25,7 +25,7 @@ const importDatabase = require('./importDatabase');
25
25
  const loadDatabase = require('./loadDatabase');
26
26
  const generateModelSql = require('./generateModelSql');
27
27
  const modifyJsonLinesReader = require('./modifyJsonLinesReader');
28
- const dataDuplicator = require('./dataDuplicator');
28
+ const dataReplicator = require('./dataReplicator');
29
29
  const dbModelToJson = require('./dbModelToJson');
30
30
  const jsonToDbModel = require('./jsonToDbModel');
31
31
  const jsonReader = require('./jsonReader');
@@ -35,6 +35,11 @@ const autoIndexForeignKeysTransform = require('./autoIndexForeignKeysTransform')
35
35
  const generateDeploySql = require('./generateDeploySql');
36
36
  const dropAllDbObjects = require('./dropAllDbObjects');
37
37
  const importDbFromFolder = require('./importDbFromFolder');
38
+ const zipDirectory = require('./zipDirectory');
39
+ const unzipDirectory = require('./unzipDirectory');
40
+ const zipJsonLinesData = require('./zipJsonLinesData');
41
+ const unzipJsonLinesData = require('./unzipJsonLinesData');
42
+ const unzipJsonLinesFile = require('./unzipJsonLinesFile');
38
43
 
39
44
  const dbgateApi = {
40
45
  queryReader,
@@ -64,7 +69,7 @@ const dbgateApi = {
64
69
  loadDatabase,
65
70
  generateModelSql,
66
71
  modifyJsonLinesReader,
67
- dataDuplicator,
72
+ dataReplicator,
68
73
  dbModelToJson,
69
74
  jsonToDbModel,
70
75
  dataTypeMapperTransform,
@@ -73,6 +78,11 @@ const dbgateApi = {
73
78
  generateDeploySql,
74
79
  dropAllDbObjects,
75
80
  importDbFromFolder,
81
+ zipDirectory,
82
+ unzipDirectory,
83
+ zipJsonLinesData,
84
+ unzipJsonLinesData,
85
+ unzipJsonLinesFile,
76
86
  };
77
87
 
78
88
  requirePlugin.initializeDbgateApi(dbgateApi);
@@ -36,9 +36,10 @@ async function jsonLinesWriter({ fileName, encoding = 'utf-8', header = true })
36
36
  logger.info(`Writing file ${fileName}`);
37
37
  const stringify = new StringifyStream({ header });
38
38
  const fileStream = fs.createWriteStream(fileName, encoding);
39
- stringify.pipe(fileStream);
40
- stringify['finisher'] = fileStream;
41
- return stringify;
39
+ return [stringify, fileStream];
40
+ // stringify.pipe(fileStream);
41
+ // stringify['finisher'] = fileStream;
42
+ // return stringify;
42
43
  }
43
44
 
44
45
  module.exports = jsonLinesWriter;
@@ -7,6 +7,8 @@ const logger = getLogger('queryReader');
7
7
  * Returns reader object for {@link copyStream} function. This reader object reads data from query.
8
8
  * @param {object} options
9
9
  * @param {connectionType} options.connection - connection object
10
+ * @param {object} options.systemConnection - system connection (result of driver.connect). If not provided, new connection will be created
11
+ * @param {object} options.driver - driver object. If not provided, it will be loaded from connection
10
12
  * @param {string} options.query - SQL query
11
13
  * @param {string} [options.queryType] - query type
12
14
  * @param {string} [options.sql] - SQL query. obsolete; use query instead
@@ -16,6 +18,8 @@ async function queryReader({
16
18
  connection,
17
19
  query,
18
20
  queryType,
21
+ systemConnection,
22
+ driver,
19
23
  // obsolete; use query instead
20
24
  sql,
21
25
  }) {
@@ -28,10 +32,13 @@ async function queryReader({
28
32
  logger.info({ sql: query || sql }, `Reading query`);
29
33
  // else console.log(`Reading query ${JSON.stringify(json)}`);
30
34
 
31
- const driver = requireEngineDriver(connection);
32
- const pool = await connectUtility(driver, connection, queryType == 'json' ? 'read' : 'script');
35
+ if (!driver) {
36
+ driver = requireEngineDriver(connection);
37
+ }
38
+ const dbhan = systemConnection || (await connectUtility(driver, connection, queryType == 'json' ? 'read' : 'script'));
39
+
33
40
  const reader =
34
- queryType == 'json' ? await driver.readJsonQuery(pool, query) : await driver.readQuery(pool, query || sql);
41
+ queryType == 'json' ? await driver.readJsonQuery(dbhan, query) : await driver.readQuery(dbhan, query || sql);
35
42
  return reader;
36
43
  }
37
44
 
@@ -0,0 +1,91 @@
1
+ const yauzl = require('yauzl');
2
+ const fs = require('fs');
3
+ const path = require('path');
4
+ const { getLogger, extractErrorLogData } = require('dbgate-tools');
5
+
6
+ const logger = getLogger('unzipDirectory');
7
+
8
+ /**
9
+ * Extracts an entire ZIP file, preserving its internal directory layout.
10
+ *
11
+ * @param {string} zipPath Path to the ZIP file on disk.
12
+ * @param {string} outputDirectory Folder to create / overwrite with the contents.
13
+ * @returns {Promise<boolean>} Resolves `true` on success, rejects on error.
14
+ */
15
+ function unzipDirectory(zipPath, outputDirectory) {
16
+ return new Promise((resolve, reject) => {
17
+ yauzl.open(zipPath, { lazyEntries: true }, (err, zipFile) => {
18
+ if (err) return reject(err);
19
+
20
+ /** Pending per-file extractions – we resolve the main promise after they’re all done */
21
+ const pending = [];
22
+
23
+ // kick things off
24
+ zipFile.readEntry();
25
+
26
+ zipFile.on('entry', entry => {
27
+ const destPath = path.join(outputDirectory, entry.fileName);
28
+
29
+ // Handle directories (their names always end with “/” in ZIPs)
30
+ if (/\/$/.test(entry.fileName)) {
31
+ // Ensure directory exists, then continue to next entry
32
+ fs.promises
33
+ .mkdir(destPath, { recursive: true })
34
+ .then(() => zipFile.readEntry())
35
+ .catch(reject);
36
+ return;
37
+ }
38
+
39
+ // Handle files
40
+ const filePromise = fs.promises
41
+ .mkdir(path.dirname(destPath), { recursive: true }) // make sure parent dirs exist
42
+ .then(
43
+ () =>
44
+ new Promise((res, rej) => {
45
+ zipFile.openReadStream(entry, (err, readStream) => {
46
+ if (err) return rej(err);
47
+
48
+ const writeStream = fs.createWriteStream(destPath);
49
+ readStream.pipe(writeStream);
50
+
51
+ // proceed to next entry once we’ve consumed *this* one
52
+ readStream.on('end', () => zipFile.readEntry());
53
+
54
+ writeStream.on('finish', () => {
55
+ logger.info(`Extracted "${entry.fileName}" → "${destPath}".`);
56
+ res();
57
+ });
58
+
59
+ writeStream.on('error', writeErr => {
60
+ logger.error(
61
+ extractErrorLogData(writeErr),
62
+ `Error extracting "${entry.fileName}" from "${zipPath}".`
63
+ );
64
+ rej(writeErr);
65
+ });
66
+ });
67
+ })
68
+ );
69
+
70
+ pending.push(filePromise);
71
+ });
72
+
73
+ // Entire archive enumerated; wait for all streams to finish
74
+ zipFile.on('end', () => {
75
+ Promise.all(pending)
76
+ .then(() => {
77
+ logger.info(`Archive "${zipPath}" fully extracted to "${outputDirectory}".`);
78
+ resolve(true);
79
+ })
80
+ .catch(reject);
81
+ });
82
+
83
+ zipFile.on('error', err => {
84
+ logger.error(extractErrorLogData(err), `ZIP file error in ${zipPath}.`);
85
+ reject(err);
86
+ });
87
+ });
88
+ });
89
+ }
90
+
91
+ module.exports = unzipDirectory;
@@ -0,0 +1,60 @@
1
+ const yauzl = require('yauzl');
2
+ const fs = require('fs');
3
+ const { jsonLinesParse } = require('dbgate-tools');
4
+
5
+ function unzipJsonLinesData(zipPath) {
6
+ return new Promise((resolve, reject) => {
7
+ // Open the zip file
8
+ yauzl.open(zipPath, { lazyEntries: true }, (err, zipfile) => {
9
+ if (err) {
10
+ return reject(err);
11
+ }
12
+
13
+ const results = {};
14
+
15
+ // Start reading entries
16
+ zipfile.readEntry();
17
+
18
+ zipfile.on('entry', entry => {
19
+ // Only process .json files
20
+ if (/\.jsonl$/i.test(entry.fileName)) {
21
+ zipfile.openReadStream(entry, (err, readStream) => {
22
+ if (err) {
23
+ return reject(err);
24
+ }
25
+
26
+ const chunks = [];
27
+ readStream.on('data', chunk => chunks.push(chunk));
28
+ readStream.on('end', () => {
29
+ try {
30
+ const fileContent = Buffer.concat(chunks).toString('utf-8');
31
+ const parsedJson = jsonLinesParse(fileContent);
32
+ results[entry.fileName.replace(/\.jsonl$/, '')] = parsedJson;
33
+ } catch (parseError) {
34
+ return reject(parseError);
35
+ }
36
+
37
+ // Move to the next entry
38
+ zipfile.readEntry();
39
+ });
40
+ });
41
+ } else {
42
+ // Not a JSON file, skip
43
+ zipfile.readEntry();
44
+ }
45
+ });
46
+
47
+ // Resolve when no more entries
48
+ zipfile.on('end', () => {
49
+ resolve(results);
50
+ });
51
+
52
+ // Catch errors from zipfile
53
+ zipfile.on('error', zipErr => {
54
+ reject(zipErr);
55
+ });
56
+ });
57
+ });
58
+ }
59
+
60
+ module.exports = unzipJsonLinesData;