dbgate-api-premium 7.1.8 → 7.1.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "dbgate-api-premium",
3
3
  "main": "src/index.js",
4
- "version": "7.1.8",
4
+ "version": "7.1.9",
5
5
  "homepage": "https://www.dbgate.io/",
6
6
  "repository": {
7
7
  "type": "git",
@@ -30,11 +30,11 @@
30
30
  "compare-versions": "^3.6.0",
31
31
  "cors": "^2.8.5",
32
32
  "cross-env": "^6.0.3",
33
- "dbgate-datalib": "7.1.8",
33
+ "dbgate-datalib": "7.1.9",
34
34
  "dbgate-query-splitter": "^4.12.0",
35
- "dbgate-rest": "7.1.8",
36
- "dbgate-sqltree": "7.1.8",
37
- "dbgate-tools": "7.1.8",
35
+ "dbgate-rest": "7.1.9",
36
+ "dbgate-sqltree": "7.1.9",
37
+ "dbgate-tools": "7.1.9",
38
38
  "debug": "^4.3.4",
39
39
  "diff": "^5.0.0",
40
40
  "diff2html": "^3.4.13",
@@ -88,7 +88,7 @@
88
88
  "devDependencies": {
89
89
  "@types/fs-extra": "^9.0.11",
90
90
  "@types/lodash": "^4.14.149",
91
- "dbgate-types": "7.1.8",
91
+ "dbgate-types": "7.1.9",
92
92
  "env-cmd": "^10.1.0",
93
93
  "jsdoc-to-markdown": "^9.0.5",
94
94
  "node-loader": "^1.0.2",
@@ -19,6 +19,26 @@ const unzipDirectory = require('../shell/unzipDirectory');
19
19
 
20
20
  const logger = getLogger('archive');
21
21
 
22
+ /**
23
+ * Rejects any archive name (folder or file) that contains path-traversal
24
+ * sequences, directory separators, or null bytes. These values are used
25
+ * directly in path.join() calls; allowing traversal would let callers read
26
+ * or write arbitrary files outside the archive directory.
27
+ */
28
+ function assertSafeArchiveName(name, label) {
29
+ if (typeof name !== 'string' || name.length === 0) {
30
+ throw new Error(`DBGM-00000 Invalid ${label}: must be a non-empty string`);
31
+ }
32
+ if (name.includes('\0') || name.includes('..') || name.includes('/') || name.includes('\\')) {
33
+ throw new Error(`DBGM-00000 Invalid ${label}: path traversal not allowed`);
34
+ }
35
+ // Reject names that resolve to the archive root itself (e.g. '.')
36
+ const resolved = path.resolve(archivedir(), name);
37
+ if (resolved === path.resolve(archivedir())) {
38
+ throw new Error(`DBGM-00000 Invalid ${label}: must not resolve to the archive root`);
39
+ }
40
+ }
41
+
22
42
  module.exports = {
23
43
  folders_meta: true,
24
44
  async folders() {
@@ -39,6 +59,7 @@ module.exports = {
39
59
 
40
60
  createFolder_meta: true,
41
61
  async createFolder({ folder }) {
62
+ assertSafeArchiveName(folder, 'folder');
42
63
  await fs.mkdir(path.join(archivedir(), folder));
43
64
  socket.emitChanged('archive-folders-changed');
44
65
  return true;
@@ -46,8 +67,12 @@ module.exports = {
46
67
 
47
68
  createLink_meta: true,
48
69
  async createLink({ linkedFolder }) {
70
+ if ( typeof linkedFolder !== 'string' || linkedFolder.length === 0) {
71
+ throw new Error(`DBGM-00000 Invalid linkedFolder: must be a non-empty string`);
72
+ }
73
+ assertSafeArchiveName(path.parse(linkedFolder).name, 'linkedFolder');
49
74
  const folder = await this.getNewArchiveFolder({ database: path.parse(linkedFolder).name + '.link' });
50
- fs.writeFile(path.join(archivedir(), folder), linkedFolder);
75
+ await fs.writeFile(path.join(archivedir(), folder), linkedFolder);
51
76
  clearArchiveLinksCache();
52
77
  socket.emitChanged('archive-folders-changed');
53
78
  return folder;
@@ -71,6 +96,8 @@ module.exports = {
71
96
 
72
97
  files_meta: true,
73
98
  async files({ folder }) {
99
+ if (!folder) return [];
100
+ assertSafeArchiveName(folder, 'folder');
74
101
  try {
75
102
  if (folder.endsWith('.zip')) {
76
103
  if (await fs.exists(path.join(archivedir(), folder))) {
@@ -121,6 +148,9 @@ module.exports = {
121
148
 
122
149
  createFile_meta: true,
123
150
  async createFile({ folder, file, fileType, tableInfo }) {
151
+ assertSafeArchiveName(folder, 'folder');
152
+ assertSafeArchiveName(file, 'file');
153
+ assertSafeArchiveName(fileType, 'fileType');
124
154
  await fs.writeFile(
125
155
  path.join(resolveArchiveFolder(folder), `${file}.${fileType}`),
126
156
  tableInfo ? JSON.stringify({ __isStreamHeader: true, tableInfo }) : ''
@@ -131,6 +161,9 @@ module.exports = {
131
161
 
132
162
  deleteFile_meta: true,
133
163
  async deleteFile({ folder, file, fileType }) {
164
+ assertSafeArchiveName(folder, 'folder');
165
+ assertSafeArchiveName(file, 'file');
166
+ assertSafeArchiveName(fileType, 'fileType');
134
167
  await fs.unlink(path.join(resolveArchiveFolder(folder), `${file}.${fileType}`));
135
168
  socket.emitChanged(`archive-files-changed`, { folder });
136
169
  return true;
@@ -138,6 +171,10 @@ module.exports = {
138
171
 
139
172
  renameFile_meta: true,
140
173
  async renameFile({ folder, file, newFile, fileType }) {
174
+ assertSafeArchiveName(folder, 'folder');
175
+ assertSafeArchiveName(file, 'file');
176
+ assertSafeArchiveName(newFile, 'newFile');
177
+ assertSafeArchiveName(fileType, 'fileType');
141
178
  await fs.rename(
142
179
  path.join(resolveArchiveFolder(folder), `${file}.${fileType}`),
143
180
  path.join(resolveArchiveFolder(folder), `${newFile}.${fileType}`)
@@ -148,6 +185,8 @@ module.exports = {
148
185
 
149
186
  modifyFile_meta: true,
150
187
  async modifyFile({ folder, file, changeSet, mergedRows, mergeKey, mergeMode }) {
188
+ assertSafeArchiveName(folder, 'folder');
189
+ assertSafeArchiveName(file, 'file');
151
190
  await jsldata.closeDataStore(`archive://${folder}/${file}`);
152
191
  const changedFilePath = path.join(resolveArchiveFolder(folder), `${file}.jsonl`);
153
192
 
@@ -187,6 +226,8 @@ module.exports = {
187
226
 
188
227
  renameFolder_meta: true,
189
228
  async renameFolder({ folder, newFolder }) {
229
+ assertSafeArchiveName(folder, 'folder');
230
+ assertSafeArchiveName(newFolder, 'newFolder');
190
231
  const uniqueName = await this.getNewArchiveFolder({ database: newFolder });
191
232
  await fs.rename(path.join(archivedir(), folder), path.join(archivedir(), uniqueName));
192
233
  socket.emitChanged(`archive-folders-changed`);
@@ -196,6 +237,7 @@ module.exports = {
196
237
  deleteFolder_meta: true,
197
238
  async deleteFolder({ folder }) {
198
239
  if (!folder) throw new Error('Missing folder parameter');
240
+ assertSafeArchiveName(folder, 'folder');
199
241
  if (folder.endsWith('.link') || folder.endsWith('.zip')) {
200
242
  await fs.unlink(path.join(archivedir(), folder));
201
243
  } else {
@@ -207,6 +249,8 @@ module.exports = {
207
249
 
208
250
  saveText_meta: true,
209
251
  async saveText({ folder, file, text }) {
252
+ assertSafeArchiveName(folder, 'folder');
253
+ assertSafeArchiveName(file, 'file');
210
254
  await fs.writeFile(path.join(resolveArchiveFolder(folder), `${file}.jsonl`), text);
211
255
  socket.emitChanged(`archive-files-changed`, { folder });
212
256
  return true;
@@ -214,6 +258,8 @@ module.exports = {
214
258
 
215
259
  saveJslData_meta: true,
216
260
  async saveJslData({ folder, file, jslid, changeSet }) {
261
+ assertSafeArchiveName(folder, 'folder');
262
+ assertSafeArchiveName(file, 'file');
217
263
  const source = getJslFileName(jslid);
218
264
  const target = path.join(resolveArchiveFolder(folder), `${file}.jsonl`);
219
265
  if (changeSet) {
@@ -232,11 +278,20 @@ module.exports = {
232
278
 
233
279
  saveRows_meta: true,
234
280
  async saveRows({ folder, file, rows }) {
235
- const fileStream = fs.createWriteStream(path.join(resolveArchiveFolder(folder), `${file}.jsonl`));
281
+ assertSafeArchiveName(folder, 'folder');
282
+ assertSafeArchiveName(file, 'file');
283
+ const filePath = path.join(resolveArchiveFolder(folder), `${file}.jsonl`);
284
+ const fileStream = fs.createWriteStream(filePath);
236
285
  for (const row of rows) {
237
- await fileStream.write(JSON.stringify(row) + '\n');
286
+ const ok = fileStream.write(JSON.stringify(row) + '\n');
287
+ if (!ok) {
288
+ await new Promise(resolve => fileStream.once('drain', resolve));
289
+ }
238
290
  }
239
- await fileStream.close();
291
+ await new Promise((resolve, reject) => {
292
+ fileStream.end(() => resolve());
293
+ fileStream.on('error', reject);
294
+ });
240
295
  socket.emitChanged(`archive-files-changed`, { folder });
241
296
  return true;
242
297
  },
@@ -256,6 +311,8 @@ module.exports = {
256
311
 
257
312
  getArchiveData_meta: true,
258
313
  async getArchiveData({ folder, file }) {
314
+ assertSafeArchiveName(folder, 'folder');
315
+ assertSafeArchiveName(file, 'file');
259
316
  let rows;
260
317
  if (folder.endsWith('.zip')) {
261
318
  rows = await unzipJsonLinesFile(path.join(archivedir(), folder), `${file}.jsonl`);
@@ -270,7 +327,7 @@ module.exports = {
270
327
  if (!fileName?.endsWith('.zip')) {
271
328
  throw new Error(`${fileName} is not a ZIP file`);
272
329
  }
273
-
330
+ assertSafeArchiveName(fileName.slice(0, -4), 'fileName');
274
331
  const folder = await this.getNewArchiveFolder({ database: fileName });
275
332
  await fs.copyFile(filePath, path.join(archivedir(), folder));
276
333
  socket.emitChanged(`archive-folders-changed`);
@@ -280,6 +337,7 @@ module.exports = {
280
337
 
281
338
  zip_meta: true,
282
339
  async zip({ folder }) {
340
+ assertSafeArchiveName(folder, 'folder');
283
341
  const newFolder = await this.getNewArchiveFolder({ database: folder + '.zip' });
284
342
  await zipDirectory(path.join(archivedir(), folder), path.join(archivedir(), newFolder));
285
343
  socket.emitChanged(`archive-folders-changed`);
@@ -289,6 +347,7 @@ module.exports = {
289
347
 
290
348
  unzip_meta: true,
291
349
  async unzip({ folder }) {
350
+ assertSafeArchiveName(folder, 'folder');
292
351
  const newFolder = await this.getNewArchiveFolder({ database: folder.slice(0, -4) });
293
352
  await unzipDirectory(path.join(archivedir(), folder), path.join(archivedir(), newFolder));
294
353
  socket.emitChanged(`archive-folders-changed`);
@@ -298,6 +357,7 @@ module.exports = {
298
357
 
299
358
  getZippedPath_meta: true,
300
359
  async getZippedPath({ folder }) {
360
+ assertSafeArchiveName(folder, 'folder');
301
361
  if (folder.endsWith('.zip')) {
302
362
  return { filePath: path.join(archivedir(), folder) };
303
363
  }
@@ -10,6 +10,7 @@ const {
10
10
  extractShellApiPlugins,
11
11
  compileShellApiFunctionName,
12
12
  jsonScriptToJavascript,
13
+ assertValidShellApiFunctionName,
13
14
  getLogger,
14
15
  safeJsonParse,
15
16
  pinoLogRecordToMessageRecord,
@@ -54,19 +55,23 @@ logger.info('DBGM-00014 Finished job script');
54
55
  dbgateApi.runScript(run);
55
56
  `;
56
57
 
57
- const loaderScriptTemplate = (prefix, functionName, props, runid) => `
58
+ const loaderScriptTemplate = (functionName, props, runid) => {
59
+ const plugins = extractShellApiPlugins(functionName, props);
60
+ const prefix = plugins.map(packageName => `// @require ${packageName}\n`).join('');
61
+ return `
58
62
  ${prefix}
59
63
  const dbgateApi = require(process.env.DBGATE_API);
60
64
  dbgateApi.initializeApiEnvironment();
61
- ${requirePluginsTemplate(extractShellApiPlugins(functionName, props))}
65
+ ${requirePluginsTemplate(plugins)}
62
66
  require=null;
63
67
  async function run() {
64
68
  const reader=await ${compileShellApiFunctionName(functionName)}(${JSON.stringify(props)});
65
- const writer=await dbgateApi.collectorWriter({runid: '${runid}'});
69
+ const writer=await dbgateApi.collectorWriter({runid: ${JSON.stringify(runid)}});
66
70
  await dbgateApi.copyStream(reader, writer);
67
71
  }
68
72
  dbgateApi.runScript(run);
69
73
  `;
74
+ };
70
75
 
71
76
  module.exports = {
72
77
  /** @type {import('dbgate-types').OpenedRunner[]} */
@@ -377,14 +382,12 @@ module.exports = {
377
382
  return { errorMessage: 'DBGM-00289 Unallowed file' };
378
383
  }
379
384
  }
380
- const prefix = extractShellApiPlugins(functionName)
381
- .map(packageName => `// @require ${packageName}\n`)
382
- .join('');
383
385
 
384
386
  const promise = new Promise((resolve, reject) => {
387
+ assertValidShellApiFunctionName(functionName);
385
388
  const runid = crypto.randomUUID();
386
389
  this.requests[runid] = { resolve, reject, exitOnStreamError: true };
387
- this.startCore(runid, loaderScriptTemplate(prefix, functionName, props, runid));
390
+ this.startCore(runid, loaderScriptTemplate(functionName, props, runid));
388
391
  });
389
392
  return promise;
390
393
  },
@@ -1,5 +1,5 @@
1
1
 
2
2
  module.exports = {
3
- version: '7.1.8',
4
- buildTime: '2026-04-09T13:37:40.330Z'
3
+ version: '7.1.9',
4
+ buildTime: '2026-04-22T11:39:40.647Z'
5
5
  };
@@ -16,23 +16,53 @@ function unzipDirectory(zipPath, outputDirectory) {
16
16
  return new Promise((resolve, reject) => {
17
17
  yauzl.open(zipPath, { lazyEntries: true }, (err, zipFile) => {
18
18
  if (err) return reject(err);
19
-
19
+ let settled = false;
20
+ /** Track active streams so we can destroy them on early abort */
21
+ const activeStreams = new Set();
22
+ const safeReject = rejectErr => {
23
+ if (settled) return;
24
+ settled = true;
25
+ for (const s of activeStreams) {
26
+ s.destroy();
27
+ }
28
+ activeStreams.clear();
29
+ zipFile.close();
30
+ reject(rejectErr);
31
+ };
20
32
  /** Pending per-file extractions – we resolve the main promise after they’re all done */
21
33
  const pending = [];
22
34
 
35
+ // Resolved output boundary used for zip-slip checks on every entry
36
+ const resolvedOutputDir = path.resolve(outputDirectory);
37
+
23
38
  // kick things off
24
39
  zipFile.readEntry();
25
40
 
26
41
  zipFile.on('entry', entry => {
42
+ // Null-byte poison check
43
+ if (entry.fileName.includes('\0')) {
44
+ return safeReject(new Error(`DBGM-00000 ZIP entry with null byte in filename rejected`));
45
+ }
46
+
27
47
  const destPath = path.join(outputDirectory, entry.fileName);
48
+ const resolvedDest = path.resolve(destPath);
49
+
50
+ // Zip-slip protection: every extracted path must stay inside outputDirectory
51
+ if (resolvedDest !== resolvedOutputDir && !resolvedDest.startsWith(resolvedOutputDir + path.sep)) {
52
+ return safeReject(
53
+ new Error(`DBGM-00000 ZIP slip detected: entry "${entry.fileName}" would escape output directory`)
54
+ );
55
+ }
28
56
 
29
57
  // Handle directories (their names always end with “/” in ZIPs)
30
58
  if (/\/$/.test(entry.fileName)) {
31
59
  // Ensure directory exists, then continue to next entry
32
60
  fs.promises
33
61
  .mkdir(destPath, { recursive: true })
34
- .then(() => zipFile.readEntry())
35
- .catch(reject);
62
+ .then(() => {
63
+ if (!settled) zipFile.readEntry();
64
+ })
65
+ .catch(safeReject);
36
66
  return;
37
67
  }
38
68
 
@@ -46,17 +76,29 @@ function unzipDirectory(zipPath, outputDirectory) {
46
76
  if (err) return rej(err);
47
77
 
48
78
  const writeStream = fs.createWriteStream(destPath);
79
+ activeStreams.add(readStream);
80
+ activeStreams.add(writeStream);
49
81
  readStream.pipe(writeStream);
50
82
 
51
- // proceed to next entry once weve consumed *this* one
52
- readStream.on('end', () => zipFile.readEntry());
83
+ // proceed to next entry once we've consumed *this* one
84
+ readStream.on('end', () => {
85
+ activeStreams.delete(readStream);
86
+ if (!settled) zipFile.readEntry();
87
+ });
88
+
89
+ readStream.on('error', readErr => {
90
+ activeStreams.delete(readStream);
91
+ rej(readErr);
92
+ });
53
93
 
54
94
  writeStream.on('finish', () => {
95
+ activeStreams.delete(writeStream);
55
96
  logger.info(`DBGM-00068 Extracted "${entry.fileName}" → "${destPath}".`);
56
97
  res();
57
98
  });
58
99
 
59
100
  writeStream.on('error', writeErr => {
101
+ activeStreams.delete(writeStream);
60
102
  logger.error(
61
103
  extractErrorLogData(writeErr),
62
104
  `DBGM-00069 Error extracting "${entry.fileName}" from "${zipPath}".`
@@ -67,22 +109,29 @@ function unzipDirectory(zipPath, outputDirectory) {
67
109
  })
68
110
  );
69
111
 
112
+ // Immediately abort the whole unzip if this file fails; otherwise the
113
+ // zip would never emit 'end' (lazyEntries won't advance without readEntry).
114
+ filePromise.catch(safeReject);
70
115
  pending.push(filePromise);
71
116
  });
72
117
 
73
118
  // Entire archive enumerated; wait for all streams to finish
74
119
  zipFile.on('end', () => {
120
+ if (settled) return;
75
121
  Promise.all(pending)
76
122
  .then(() => {
123
+ if (settled) return;
124
+ settled = true;
125
+ zipFile.close();
77
126
  logger.info(`DBGM-00070 Archive "${zipPath}" fully extracted to "${outputDirectory}".`);
78
127
  resolve(true);
79
128
  })
80
- .catch(reject);
129
+ .catch(safeReject);
81
130
  });
82
131
 
83
132
  zipFile.on('error', err => {
84
133
  logger.error(extractErrorLogData(err), `DBGM-00071 ZIP file error in ${zipPath}.`);
85
- reject(err);
134
+ safeReject(err);
86
135
  });
87
136
  });
88
137
  });
@@ -1,4 +1,5 @@
1
1
  const axios = require('axios');
2
+ const https = require('https');
2
3
  const crypto = require('crypto');
3
4
  const fs = require('fs-extra');
4
5
  const _ = require('lodash');
@@ -36,6 +37,15 @@ const DBGATE_CLOUD_URL = process.env.LOCAL_DBGATE_CLOUD
36
37
  ? 'https://cloud.dbgate.udolni.net'
37
38
  : 'https://cloud.dbgate.io';
38
39
 
40
+
41
+ const DBGATE_PUBLIC_CLOUD_URL =
42
+ DBGATE_CLOUD_URL === 'https://cloud.dbgate.io' ? 'https://api.dbgate.cloud' : DBGATE_CLOUD_URL;
43
+
44
+ const stageAxiosConfig =
45
+ !process.env.PROD_DBGATE_CLOUD && (process.env.DEVWEB || process.env.DEVMODE)
46
+ ? { httpsAgent: new https.Agent({ rejectUnauthorized: false }) }
47
+ : {};
48
+
39
49
  async function createDbGateIdentitySession(client, redirectUri) {
40
50
  const resp = await axios.default.post(
41
51
  `${DBGATE_IDENTITY_URL}/api/create-session`,
@@ -48,6 +58,7 @@ async function createDbGateIdentitySession(client, redirectUri) {
48
58
  ...getLicenseHttpHeaders(),
49
59
  'Content-Type': 'application/json',
50
60
  },
61
+ ...stageAxiosConfig,
51
62
  }
52
63
  );
53
64
  return {
@@ -70,6 +81,7 @@ function startCloudTokenChecking(sid, callback) {
70
81
  headers: {
71
82
  ...getLicenseHttpHeaders(),
72
83
  },
84
+ ...stageAxiosConfig,
73
85
  });
74
86
  // console.log('CHECK RESP:', resp.data);
75
87
 
@@ -88,6 +100,7 @@ async function readCloudTokenHolder(sid) {
88
100
  headers: {
89
101
  ...getLicenseHttpHeaders(),
90
102
  },
103
+ ...stageAxiosConfig,
91
104
  });
92
105
  if (resp.data?.email) {
93
106
  return resp.data;
@@ -103,6 +116,7 @@ async function readCloudTestTokenHolder(email) {
103
116
  headers: {
104
117
  ...getLicenseHttpHeaders(),
105
118
  },
119
+ ...stageAxiosConfig,
106
120
  }
107
121
  );
108
122
  if (resp.data?.email) {
@@ -210,9 +224,9 @@ async function updateCloudFiles(isRefresh, language) {
210
224
  logger.info({ tags, lastCheckedTm }, 'DBGM-00082 Downloading cloud files');
211
225
 
212
226
  const resp = await axios.default.get(
213
- `${DBGATE_CLOUD_URL}/public-cloud-updates?lastCheckedTm=${lastCheckedTm}&tags=${tags}&isRefresh=${
227
+ `${DBGATE_PUBLIC_CLOUD_URL}/public-cloud-updates?lastCheckedTm=${lastCheckedTm}&tags=${tags}&isRefresh=${
214
228
  isRefresh ? 1 : 0
215
- }`,
229
+ }}`,
216
230
  {
217
231
  headers: {
218
232
  ...getLicenseHttpHeaders(),
@@ -220,6 +234,7 @@ async function updateCloudFiles(isRefresh, language) {
220
234
  'x-app-version': currentVersion.version,
221
235
  'x-app-language': language || 'en',
222
236
  },
237
+ ...stageAxiosConfig,
223
238
  }
224
239
  );
225
240
 
@@ -254,10 +269,11 @@ async function getPublicCloudFiles() {
254
269
  }
255
270
 
256
271
  async function getPublicFileData(path) {
257
- const resp = await axios.default.get(`${DBGATE_CLOUD_URL}/public/${path}`, {
272
+ const resp = await axios.default.get(`${DBGATE_PUBLIC_CLOUD_URL}/public/${path}`, {
258
273
  headers: {
259
274
  ...getLicenseHttpHeaders(),
260
275
  },
276
+ ...stageAxiosConfig,
261
277
  });
262
278
  return resp.data;
263
279
  }
@@ -289,6 +305,7 @@ async function updatePremiumPromoWidget(language) {
289
305
  'x-app-version': currentVersion.version,
290
306
  'x-app-language': language || 'en',
291
307
  },
308
+ ...stageAxiosConfig,
292
309
  }
293
310
  );
294
311
 
@@ -336,6 +353,7 @@ async function callCloudApiGet(endpoint, signinHolder = null, additionalHeaders
336
353
  ...additionalHeaders,
337
354
  },
338
355
  validateStatus: status => status < 500,
356
+ ...stageAxiosConfig,
339
357
  });
340
358
  const { errorMessage, isLicenseLimit, limitedLicenseLimits } = resp.data;
341
359
  if (errorMessage) {
@@ -374,6 +392,7 @@ async function callCloudApiPost(endpoint, body, signinHolder = null) {
374
392
  ...signinHeaders,
375
393
  },
376
394
  validateStatus: status => status < 500,
395
+ ...stageAxiosConfig,
377
396
  });
378
397
  const { errorMessage, isLicenseLimit, limitedLicenseLimits } = resp.data;
379
398
  if (errorMessage) {
@@ -472,7 +491,7 @@ function removeCloudCachedConnection(folid, cntid) {
472
491
 
473
492
  async function getPublicIpInfo() {
474
493
  try {
475
- const resp = await axios.default.get(`${DBGATE_CLOUD_URL}/ipinfo`);
494
+ const resp = await axios.default.get(`${DBGATE_CLOUD_URL}/ipinfo`, stageAxiosConfig);
476
495
  if (!resp.data?.ip) {
477
496
  return { ip: 'unknown-ip' };
478
497
  }
@@ -488,12 +507,15 @@ async function getPromoWidgetData() {
488
507
  }
489
508
 
490
509
  async function getPromoWidgetPreview(campaign, variant) {
491
- const resp = await axios.default.get(`${DBGATE_CLOUD_URL}/premium-promo-widget-preview/${campaign}/${variant}`);
510
+ const resp = await axios.default.get(
511
+ `${DBGATE_CLOUD_URL}/premium-promo-widget-preview/${campaign}/${variant}`,
512
+ stageAxiosConfig
513
+ );
492
514
  return resp.data;
493
515
  }
494
516
 
495
517
  async function getPromoWidgetList() {
496
- const resp = await axios.default.get(`${DBGATE_CLOUD_URL}/promo-widget-list`);
518
+ const resp = await axios.default.get(`${DBGATE_CLOUD_URL}/promo-widget-list`, stageAxiosConfig);
497
519
  return resp.data;
498
520
  }
499
521