dbgate-api 6.6.0 → 6.6.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. package/package.json +6 -6
  2. package/src/auth/authProvider.js +14 -2
  3. package/src/controllers/archive.js +1 -1
  4. package/src/controllers/auth.js +3 -2
  5. package/src/controllers/cloud.js +1 -1
  6. package/src/controllers/config.js +8 -5
  7. package/src/controllers/connections.js +12 -11
  8. package/src/controllers/databaseConnections.js +148 -83
  9. package/src/controllers/files.js +49 -19
  10. package/src/controllers/plugins.js +7 -4
  11. package/src/controllers/runners.js +10 -6
  12. package/src/controllers/scheduler.js +4 -3
  13. package/src/controllers/serverConnections.js +69 -14
  14. package/src/controllers/sessions.js +8 -5
  15. package/src/controllers/storage.js +0 -4
  16. package/src/controllers/uploads.js +2 -2
  17. package/src/currentVersion.js +2 -2
  18. package/src/index.js +36 -5
  19. package/src/main.js +59 -20
  20. package/src/proc/databaseConnectionProcess.js +45 -13
  21. package/src/proc/serverConnectionProcess.js +32 -6
  22. package/src/proc/sessionProcess.js +2 -2
  23. package/src/proc/sshForwardProcess.js +1 -1
  24. package/src/shell/archiveWriter.js +1 -1
  25. package/src/shell/copyStream.js +1 -1
  26. package/src/shell/executeQuery.js +3 -3
  27. package/src/shell/importDatabase.js +3 -3
  28. package/src/shell/jsonLinesReader.js +1 -1
  29. package/src/shell/jsonLinesWriter.js +1 -1
  30. package/src/shell/jsonReader.js +1 -1
  31. package/src/shell/jsonWriter.js +1 -1
  32. package/src/shell/loadDatabase.js +2 -2
  33. package/src/shell/modifyJsonLinesReader.js +1 -1
  34. package/src/shell/queryReader.js +1 -1
  35. package/src/shell/requirePlugin.js +6 -1
  36. package/src/shell/runScript.js +1 -1
  37. package/src/shell/sqlDataWriter.js +1 -1
  38. package/src/shell/tableReader.js +3 -3
  39. package/src/shell/tableWriter.js +1 -1
  40. package/src/shell/unzipDirectory.js +4 -4
  41. package/src/shell/zipDirectory.js +3 -3
  42. package/src/shell/zipJsonLinesData.js +3 -3
  43. package/src/storageModel.js +726 -105
  44. package/src/utility/DatastoreProxy.js +3 -3
  45. package/src/utility/JsonLinesDatastore.js +4 -2
  46. package/src/utility/appLogStore.js +119 -0
  47. package/src/utility/childProcessChecker.js +1 -1
  48. package/src/utility/cloudIntf.js +5 -5
  49. package/src/utility/connectUtility.js +1 -1
  50. package/src/utility/directories.js +2 -2
  51. package/src/utility/extractSingleFileFromZip.js +3 -3
  52. package/src/utility/hasPermission.js +286 -71
  53. package/src/utility/loadModelTransform.js +1 -1
  54. package/src/utility/sshTunnel.js +7 -7
  55. package/src/utility/sshTunnelProxy.js +1 -1
  56. package/src/utility/useController.js +3 -3
@@ -61,7 +61,7 @@ class DatastoreProxy {
61
61
  this.subprocess = null;
62
62
  });
63
63
  this.subprocess.on('error', err => {
64
- logger.error(extractErrorLogData(err), 'Error in data store subprocess');
64
+ logger.error(extractErrorLogData(err), 'DBGM-00167 Error in data store subprocess');
65
65
  this.subprocess = null;
66
66
  });
67
67
  this.subprocess.send({ msgtype: 'open', file: this.file });
@@ -77,7 +77,7 @@ class DatastoreProxy {
77
77
  try {
78
78
  this.subprocess.send({ msgtype: 'read', msgid, offset, limit });
79
79
  } catch (err) {
80
- logger.error(extractErrorLogData(err), 'Error getting rows');
80
+ logger.error(extractErrorLogData(err), 'DBGM-00168 Error getting rows');
81
81
  this.subprocess = null;
82
82
  }
83
83
  });
@@ -91,7 +91,7 @@ class DatastoreProxy {
91
91
  try {
92
92
  this.subprocess.send({ msgtype: 'notify', msgid });
93
93
  } catch (err) {
94
- logger.error(extractErrorLogData(err), 'Error notifying subprocess');
94
+ logger.error(extractErrorLogData(err), 'DBGM-00169 Error notifying subprocess');
95
95
  this.subprocess = null;
96
96
  }
97
97
  });
@@ -7,7 +7,6 @@ const AsyncLock = require('async-lock');
7
7
  const lock = new AsyncLock();
8
8
  const stableStringify = require('json-stable-stringify');
9
9
  const { evaluateCondition } = require('dbgate-sqltree');
10
- const requirePluginFunction = require('./requirePluginFunction');
11
10
  const esort = require('external-sorting');
12
11
  const { jsldir } = require('./directories');
13
12
  const LineReader = require('./LineReader');
@@ -23,7 +22,10 @@ class JsonLinesDatastore {
23
22
  this.notifyChangedCallback = null;
24
23
  this.currentFilter = null;
25
24
  this.currentSort = null;
26
- this.rowFormatter = requirePluginFunction(formatterFunction);
25
+ if (formatterFunction) {
26
+ const requirePluginFunction = require('./requirePluginFunction');
27
+ this.rowFormatter = requirePluginFunction(formatterFunction);
28
+ }
27
29
  this.sortedFiles = {};
28
30
  }
29
31
 
@@ -0,0 +1,119 @@
1
+ const fs = require('fs-extra');
2
+ const path = require('path');
3
+ const { logsdir } = require('./directories');
4
+ const { format, addDays, startOfDay } = require('date-fns');
5
+ const LineReader = require('./LineReader');
6
+ const socket = require('./socket');
7
+ const _ = require('lodash');
8
+
9
+ async function getLogFiles(timeFrom, timeTo) {
10
+ const dir = logsdir();
11
+ const files = await fs.readdir(dir);
12
+ const startPrefix = format(timeFrom, 'yyyy-MM-dd');
13
+ const endPrefix = format(addDays(timeTo, 1), 'yyyy-MM-dd');
14
+ const logFiles = files
15
+ .filter(file => file.endsWith('.ndjson'))
16
+ .filter(file => file >= startPrefix && file < endPrefix);
17
+ return logFiles.sort().map(x => path.join(dir, x));
18
+ }
19
+
20
+ const RECENT_LOG_LIMIT = 1000;
21
+
22
+ let recentLogs = null;
23
+ const beforeRecentLogs = [];
24
+
25
+ function adjustRecentLogs() {
26
+ if (recentLogs.length > RECENT_LOG_LIMIT) {
27
+ recentLogs.splice(0, recentLogs.length - RECENT_LOG_LIMIT);
28
+ }
29
+ }
30
+
31
+ function prepareEntryForExport(entry, lastEntry) {
32
+ return {
33
+ date: format(new Date(entry.time), 'yyyy-MM-dd'),
34
+ time: format(new Date(entry.time), 'HH:mm:ss'),
35
+ dtime: lastEntry ? entry.time - lastEntry.time : 0,
36
+ msgcode: entry.msgcode || '',
37
+ message: entry.msg || '',
38
+ ..._.omit(entry, ['time', 'msg', 'msgcode']),
39
+ conid: entry.conid || '',
40
+ database: entry.database || '',
41
+ engine: entry.engine || '',
42
+ ts: entry.time,
43
+ };
44
+ }
45
+
46
+ async function copyAppLogsIntoFile(timeFrom, timeTo, fileName, prepareForExport) {
47
+ const writeStream = fs.createWriteStream(fileName);
48
+
49
+ let lastEntry = null;
50
+ for (const file of await getLogFiles(timeFrom, timeTo)) {
51
+ const readStream = fs.createReadStream(file);
52
+ const reader = new LineReader(readStream);
53
+ do {
54
+ const line = await reader.readLine();
55
+ if (line == null) break;
56
+ try {
57
+ const logEntry = JSON.parse(line);
58
+ if (logEntry.time >= timeFrom && logEntry.time <= timeTo) {
59
+ writeStream.write(
60
+ JSON.stringify(prepareForExport ? prepareEntryForExport(logEntry, lastEntry) : logEntry) + '\n'
61
+ );
62
+ lastEntry = logEntry;
63
+ }
64
+ } catch (e) {
65
+ continue;
66
+ }
67
+ } while (true);
68
+ }
69
+ }
70
+
71
+ async function initializeRecentLogProvider() {
72
+ const logs = [];
73
+ for (const file of await getLogFiles(startOfDay(new Date()), new Date())) {
74
+ const fileStream = fs.createReadStream(file);
75
+ const reader = new LineReader(fileStream);
76
+ do {
77
+ const line = await reader.readLine();
78
+ if (line == null) break;
79
+ try {
80
+ const logEntry = JSON.parse(line);
81
+ logs.push(logEntry);
82
+ if (logs.length > RECENT_LOG_LIMIT) {
83
+ logs.shift();
84
+ }
85
+ } catch (e) {
86
+ continue;
87
+ }
88
+ } while (true);
89
+ }
90
+ recentLogs = logs;
91
+ recentLogs.push(...beforeRecentLogs);
92
+ }
93
+
94
+ let counter = 0;
95
+ function pushToRecentLogs(msg) {
96
+ const finalMsg = {
97
+ ...msg,
98
+ counter,
99
+ };
100
+ counter += 1;
101
+ if (recentLogs) {
102
+ recentLogs.push(finalMsg);
103
+ adjustRecentLogs();
104
+ socket.emit('applog-event', finalMsg);
105
+ } else {
106
+ beforeRecentLogs.push(finalMsg);
107
+ }
108
+ }
109
+
110
+ function getRecentAppLogRecords() {
111
+ return recentLogs ?? beforeRecentLogs;
112
+ }
113
+
114
+ module.exports = {
115
+ initializeRecentLogProvider,
116
+ getRecentAppLogRecords,
117
+ pushToRecentLogs,
118
+ copyAppLogsIntoFile,
119
+ };
@@ -12,7 +12,7 @@ function childProcessChecker() {
12
12
  // This will come once parent dies.
13
13
  // One way can be to check for error code ERR_IPC_CHANNEL_CLOSED
14
14
  // and call process.exit()
15
- logger.error(extractErrorLogData(err), 'parent died');
15
+ logger.error(extractErrorLogData(err), 'DBGM-00163 parent died');
16
16
  process.exit(1);
17
17
  }
18
18
  }, 1000);
@@ -77,7 +77,7 @@ function startCloudTokenChecking(sid, callback) {
77
77
  callback(resp.data);
78
78
  }
79
79
  } catch (err) {
80
- logger.error(extractErrorLogData(err), 'Error checking cloud token');
80
+ logger.error(extractErrorLogData(err), 'DBGM-00164 Error checking cloud token');
81
81
  }
82
82
  }, 500);
83
83
  }
@@ -125,7 +125,7 @@ async function getCloudUsedEngines() {
125
125
  const resp = await callCloudApiGet('content-engines');
126
126
  return resp || [];
127
127
  } catch (err) {
128
- logger.error(extractErrorLogData(err), 'Error getting cloud content list');
128
+ logger.error(extractErrorLogData(err), 'DBGM-00165 Error getting cloud content list');
129
129
  return [];
130
130
  }
131
131
  }
@@ -208,7 +208,7 @@ async function updateCloudFiles(isRefresh) {
208
208
  lastCheckedTm = _.max(cloudFiles.map(x => parseInt(x.modifiedTm)));
209
209
  }
210
210
 
211
- logger.info({ tags, lastCheckedTm }, 'Downloading cloud files');
211
+ logger.info({ tags, lastCheckedTm }, 'DBGM-00082 Downloading cloud files');
212
212
 
213
213
  const resp = await axios.default.get(
214
214
  `${DBGATE_CLOUD_URL}/public-cloud-updates?lastCheckedTm=${lastCheckedTm}&tags=${tags}&isRefresh=${
@@ -223,7 +223,7 @@ async function updateCloudFiles(isRefresh) {
223
223
  }
224
224
  );
225
225
 
226
- logger.info(`Downloaded ${resp.data.length} cloud files`);
226
+ logger.info(`DBGM-00083 Downloaded ${resp.data.length} cloud files`);
227
227
 
228
228
  const filesByPath = lastCheckedTm == 0 ? {} : _.keyBy(cloudFiles, 'path');
229
229
  for (const file of resp.data) {
@@ -269,7 +269,7 @@ async function refreshPublicFiles(isRefresh) {
269
269
  try {
270
270
  await updateCloudFiles(isRefresh);
271
271
  } catch (err) {
272
- logger.error(extractErrorLogData(err), 'Error updating cloud files');
272
+ logger.error(extractErrorLogData(err), 'DBGM-00166 Error updating cloud files');
273
273
  }
274
274
  }
275
275
 
@@ -132,7 +132,7 @@ async function connectUtility(driver, storedConnection, connectionMode, addition
132
132
 
133
133
  connection.ssl = await extractConnectionSslParams(connection);
134
134
 
135
- const conn = await driver.connect({ ...connection, ...additionalOptions });
135
+ const conn = await driver.connect({ conid: connectionLoaded?._id, ...connection, ...additionalOptions });
136
136
  return conn;
137
137
  }
138
138
 
@@ -14,11 +14,11 @@ const createDirectories = {};
14
14
  const ensureDirectory = (dir, clean) => {
15
15
  if (!createDirectories[dir]) {
16
16
  if (clean && fs.existsSync(dir) && !platformInfo.isForkedApi) {
17
- getLogger('directories').info(`Cleaning directory ${dir}`);
17
+ getLogger('directories').info(`DBGM-00170 Cleaning directory ${dir}`);
18
18
  cleanDirectory(dir, _.isNumber(clean) ? clean : null);
19
19
  }
20
20
  if (!fs.existsSync(dir)) {
21
- getLogger('directories').info(`Creating directory ${dir}`);
21
+ getLogger('directories').info(`DBGM-00171 Creating directory ${dir}`);
22
22
  fs.mkdirSync(dir);
23
23
  }
24
24
  createDirectories[dir] = true;
@@ -42,13 +42,13 @@ function extractSingleFileFromZip(zipPath, fileInZip, outputPath) {
42
42
 
43
43
  // When the file is finished writing, resolve
44
44
  writeStream.on('finish', () => {
45
- logger.info(`File "${fileInZip}" extracted to "${outputPath}".`);
45
+ logger.info(`DBGM-00088 File "${fileInZip}" extracted to "${outputPath}".`);
46
46
  resolve(true);
47
47
  });
48
48
 
49
49
  // Handle write errors
50
50
  writeStream.on('error', writeErr => {
51
- logger.error(extractErrorLogData(writeErr), `Error extracting "${fileInZip}" from "${zipPath}".`);
51
+ logger.error(extractErrorLogData(writeErr), `DBGM-00089 Error extracting "${fileInZip}" from "${zipPath}".`);
52
52
  reject(writeErr);
53
53
  });
54
54
  });
@@ -67,7 +67,7 @@ function extractSingleFileFromZip(zipPath, fileInZip, outputPath) {
67
67
 
68
68
  // Handle general errors
69
69
  zipFile.on('error', err => {
70
- logger.error(extractErrorLogData(err), `ZIP file error in ${zipPath}.`);
70
+ logger.error(extractErrorLogData(err), `DBGM-00172 ZIP file error in ${zipPath}.`);
71
71
  reject(err);
72
72
  });
73
73
  });
@@ -1,96 +1,303 @@
1
- const { compilePermissions, testPermission } = require('dbgate-tools');
1
+ const { compilePermissions, testPermission, getPermissionsCacheKey } = require('dbgate-tools');
2
2
  const _ = require('lodash');
3
3
  const { getAuthProviderFromReq } = require('../auth/authProvider');
4
4
 
5
5
  const cachedPermissions = {};
6
6
 
7
- function hasPermission(tested, req) {
7
+ async function loadPermissionsFromRequest(req) {
8
+ const authProvider = getAuthProviderFromReq(req);
8
9
  if (!req) {
9
- // request object not available, allow all
10
- return true;
10
+ return null;
11
11
  }
12
12
 
13
- const permissions = getAuthProviderFromReq(req).getCurrentPermissions(req);
13
+ const loadedPermissions = await authProvider.getCurrentPermissions(req);
14
+ return loadedPermissions;
15
+ }
14
16
 
15
- if (!cachedPermissions[permissions]) {
16
- cachedPermissions[permissions] = compilePermissions(permissions);
17
+ function hasPermission(tested, loadedPermissions) {
18
+ if (!loadedPermissions) {
19
+ // not available, allow all
20
+ return true;
17
21
  }
18
22
 
19
- return testPermission(tested, cachedPermissions[permissions]);
20
-
21
- // const { user } = (req && req.auth) || {};
22
- // const { login } = (process.env.OAUTH_PERMISSIONS && req && req.user) || {};
23
- // const key = user || login || '';
24
- // const logins = getLogins();
23
+ const permissionsKey = getPermissionsCacheKey(loadedPermissions);
24
+ if (!cachedPermissions[permissionsKey]) {
25
+ cachedPermissions[permissionsKey] = compilePermissions(loadedPermissions);
26
+ }
25
27
 
26
- // if (!userPermissions[key]) {
27
- // if (logins) {
28
- // const login = logins.find(x => x.login == user);
29
- // userPermissions[key] = compilePermissions(login ? login.permissions : null);
30
- // } else {
31
- // userPermissions[key] = compilePermissions(process.env.PERMISSIONS);
32
- // }
33
- // }
34
- // return testPermission(tested, userPermissions[key]);
28
+ return testPermission(tested, cachedPermissions[permissionsKey]);
35
29
  }
36
30
 
37
- // let loginsCache = null;
38
- // let loginsLoaded = false;
39
-
40
- // function getLogins() {
41
- // if (loginsLoaded) {
42
- // return loginsCache;
43
- // }
44
-
45
- // const res = [];
46
- // if (process.env.LOGIN && process.env.PASSWORD) {
47
- // res.push({
48
- // login: process.env.LOGIN,
49
- // password: process.env.PASSWORD,
50
- // permissions: process.env.PERMISSIONS,
51
- // });
52
- // }
53
- // if (process.env.LOGINS) {
54
- // const logins = _.compact(process.env.LOGINS.split(',').map(x => x.trim()));
55
- // for (const login of logins) {
56
- // const password = process.env[`LOGIN_PASSWORD_${login}`];
57
- // const permissions = process.env[`LOGIN_PERMISSIONS_${login}`];
58
- // if (password) {
59
- // res.push({
60
- // login,
61
- // password,
62
- // permissions,
63
- // });
64
- // }
65
- // }
66
- // } else if (process.env.OAUTH_PERMISSIONS) {
67
- // const login_permission_keys = Object.keys(process.env).filter(key => _.startsWith(key, 'LOGIN_PERMISSIONS_'));
68
- // for (const permissions_key of login_permission_keys) {
69
- // const login = permissions_key.replace('LOGIN_PERMISSIONS_', '');
70
- // const permissions = process.env[permissions_key];
71
- // userPermissions[login] = compilePermissions(permissions);
72
- // }
73
- // }
74
-
75
- // loginsCache = res.length > 0 ? res : null;
76
- // loginsLoaded = true;
77
- // return loginsCache;
78
- // }
79
-
80
- function connectionHasPermission(connection, req) {
31
+ function connectionHasPermission(connection, loadedPermissions) {
81
32
  if (!connection) {
82
33
  return true;
83
34
  }
84
35
  if (_.isString(connection)) {
85
- return hasPermission(`connections/${connection}`, req);
36
+ return hasPermission(`connections/${connection}`, loadedPermissions);
86
37
  } else {
87
- return hasPermission(`connections/${connection._id}`, req);
38
+ return hasPermission(`connections/${connection._id}`, loadedPermissions);
39
+ }
40
+ }
41
+
42
+ async function testConnectionPermission(connection, req, loadedPermissions) {
43
+ if (!loadedPermissions) {
44
+ loadedPermissions = await loadPermissionsFromRequest(req);
45
+ }
46
+ if (process.env.STORAGE_DATABASE) {
47
+ if (hasPermission(`all-connections`, loadedPermissions)) {
48
+ return;
49
+ }
50
+ const conid = _.isString(connection) ? connection : connection?._id;
51
+ if (hasPermission('internal-storage', loadedPermissions) && conid == '__storage') {
52
+ return;
53
+ }
54
+ const authProvider = getAuthProviderFromReq(req);
55
+ if (!req) {
56
+ return;
57
+ }
58
+ if (!(await authProvider.checkCurrentConnectionPermission(req, conid))) {
59
+ throw new Error('DBGM-00263 Connection permission not granted');
60
+ }
61
+ } else {
62
+ if (!connectionHasPermission(connection, loadedPermissions)) {
63
+ throw new Error('DBGM-00264 Connection permission not granted');
64
+ }
65
+ }
66
+ }
67
+
68
+ async function loadDatabasePermissionsFromRequest(req) {
69
+ const authProvider = getAuthProviderFromReq(req);
70
+ if (!req) {
71
+ return null;
72
+ }
73
+
74
+ const databasePermissions = await authProvider.getCurrentDatabasePermissions(req);
75
+ return databasePermissions;
76
+ }
77
+
78
+ async function loadTablePermissionsFromRequest(req) {
79
+ const authProvider = getAuthProviderFromReq(req);
80
+ if (!req) {
81
+ return null;
82
+ }
83
+
84
+ const tablePermissions = await authProvider.getCurrentTablePermissions(req);
85
+ return tablePermissions;
86
+ }
87
+
88
+ function matchDatabasePermissionRow(conid, database, permissionRow) {
89
+ if (permissionRow.connection_id) {
90
+ if (conid != permissionRow.connection_id) {
91
+ return false;
92
+ }
93
+ }
94
+ if (permissionRow.database_names_list) {
95
+ const items = permissionRow.database_names_list.split('\n');
96
+ if (!items.find(item => item.trim()?.toLowerCase() === database?.toLowerCase())) {
97
+ return false;
98
+ }
99
+ }
100
+ if (permissionRow.database_names_regex) {
101
+ const regex = new RegExp(permissionRow.database_names_regex, 'i');
102
+ if (!regex.test(database)) {
103
+ return false;
104
+ }
105
+ }
106
+ return true;
107
+ }
108
+
109
+ function matchTablePermissionRow(objectTypeField, schemaName, pureName, permissionRow) {
110
+ if (permissionRow.table_names_list) {
111
+ const items = permissionRow.table_names_list.split('\n');
112
+ if (!items.find(item => item.trim()?.toLowerCase() === pureName?.toLowerCase())) {
113
+ return false;
114
+ }
115
+ }
116
+ if (permissionRow.table_names_regex) {
117
+ const regex = new RegExp(permissionRow.table_names_regex, 'i');
118
+ if (!regex.test(pureName)) {
119
+ return false;
120
+ }
121
+ }
122
+ if (permissionRow.schema_names_list) {
123
+ const items = permissionRow.schema_names_list.split('\n');
124
+ if (!items.find(item => item.trim()?.toLowerCase() === schemaName?.toLowerCase())) {
125
+ return false;
126
+ }
127
+ }
128
+ if (permissionRow.schema_names_regex) {
129
+ const regex = new RegExp(permissionRow.schema_names_regex, 'i');
130
+ if (!regex.test(schemaName)) {
131
+ return false;
132
+ }
133
+ }
134
+
135
+ return true;
136
+ }
137
+
138
+ const DATABASE_ROLE_ID_NAMES = {
139
+ '-1': 'view',
140
+ '-2': 'read_content',
141
+ '-3': 'write_data',
142
+ '-4': 'run_script',
143
+ '-5': 'deny',
144
+ };
145
+
146
+ function getDatabaseRoleLevelIndex(roleName) {
147
+ if (!roleName) {
148
+ return 6;
149
+ }
150
+ if (roleName == 'run_script') {
151
+ return 5;
152
+ }
153
+ if (roleName == 'write_data') {
154
+ return 4;
155
+ }
156
+ if (roleName == 'read_content') {
157
+ return 3;
158
+ }
159
+ if (roleName == 'view') {
160
+ return 2;
161
+ }
162
+ if (roleName == 'deny') {
163
+ return 1;
164
+ }
165
+ return 6;
166
+ }
167
+
168
+ function getTablePermissionRoleLevelIndex(roleName) {
169
+ if (!roleName) {
170
+ return 6;
171
+ }
172
+ if (roleName == 'run_script') {
173
+ return 5;
174
+ }
175
+ if (roleName == 'create_update_delete') {
176
+ return 4;
177
+ }
178
+ if (roleName == 'update_only') {
179
+ return 3;
180
+ }
181
+ if (roleName == 'read') {
182
+ return 2;
183
+ }
184
+ if (roleName == 'deny') {
185
+ return 1;
88
186
  }
187
+ return 6;
89
188
  }
90
189
 
91
- function testConnectionPermission(connection, req) {
92
- if (!connectionHasPermission(connection, req)) {
93
- throw new Error('Connection permission not granted');
190
+ function getDatabasePermissionRole(conid, database, loadedDatabasePermissions) {
191
+ let res = 'deny';
192
+ for (const permissionRow of loadedDatabasePermissions) {
193
+ if (!matchDatabasePermissionRow(conid, database, permissionRow)) {
194
+ continue;
195
+ }
196
+ res = DATABASE_ROLE_ID_NAMES[permissionRow.database_permission_role_id];
197
+ }
198
+ return res;
199
+ }
200
+
201
+ const TABLE_ROLE_ID_NAMES = {
202
+ '-1': 'read',
203
+ '-2': 'update_only',
204
+ '-3': 'create_update_delete',
205
+ '-4': 'run_script',
206
+ '-5': 'deny',
207
+ };
208
+
209
+ const TABLE_SCOPE_ID_NAMES = {
210
+ '-1': 'all_objects',
211
+ '-2': 'tables',
212
+ '-3': 'views',
213
+ '-4': 'tables_views_collections',
214
+ '-5': 'procedures',
215
+ '-6': 'functions',
216
+ '-7': 'triggers',
217
+ '-8': 'sql_objects',
218
+ '-9': 'collections',
219
+ };
220
+
221
+ function getTablePermissionRole(
222
+ conid,
223
+ database,
224
+ objectTypeField,
225
+ schemaName,
226
+ pureName,
227
+ loadedTablePermissions,
228
+ databasePermissionRole
229
+ ) {
230
+ let res =
231
+ databasePermissionRole == 'read_content'
232
+ ? 'read'
233
+ : databasePermissionRole == 'write_data'
234
+ ? 'create_update_delete'
235
+ : databasePermissionRole == 'run_script'
236
+ ? 'run_script'
237
+ : 'deny';
238
+ for (const permissionRow of loadedTablePermissions) {
239
+ if (!matchDatabasePermissionRow(conid, database, permissionRow)) {
240
+ continue;
241
+ }
242
+ if (!matchTablePermissionRow(objectTypeField, schemaName, pureName, permissionRow)) {
243
+ continue;
244
+ }
245
+ const scope = TABLE_SCOPE_ID_NAMES[permissionRow.table_permission_scope_id];
246
+ switch (scope) {
247
+ case 'tables':
248
+ if (objectTypeField != 'tables') continue;
249
+ break;
250
+ case 'views':
251
+ if (objectTypeField != 'views') continue;
252
+ break;
253
+ case 'tables_views_collections':
254
+ if (objectTypeField != 'tables' && objectTypeField != 'views' && objectTypeField != 'collections') continue;
255
+ break;
256
+ case 'procedures':
257
+ if (objectTypeField != 'procedures') continue;
258
+ break;
259
+ case 'functions':
260
+ if (objectTypeField != 'functions') continue;
261
+ break;
262
+ case 'triggers':
263
+ if (objectTypeField != 'triggers') continue;
264
+ break;
265
+ case 'sql_objects':
266
+ if (objectTypeField != 'procedures' && objectTypeField != 'functions' && objectTypeField != 'triggers')
267
+ continue;
268
+ break;
269
+ case 'collections':
270
+ if (objectTypeField != 'collections') continue;
271
+ break;
272
+ }
273
+ res = TABLE_ROLE_ID_NAMES[permissionRow.table_permission_role_id];
274
+ }
275
+ return res;
276
+ }
277
+
278
+ async function testStandardPermission(permission, req, loadedPermissions) {
279
+ if (!loadedPermissions) {
280
+ loadedPermissions = await loadPermissionsFromRequest(req);
281
+ }
282
+ if (!hasPermission(permission, loadedPermissions)) {
283
+ throw new Error('DBGM-00265 Permission not granted');
284
+ }
285
+ }
286
+
287
+ async function testDatabaseRolePermission(conid, database, requiredRole, req) {
288
+ if (!process.env.STORAGE_DATABASE) {
289
+ return;
290
+ }
291
+ const loadedPermissions = await loadPermissionsFromRequest(req);
292
+ if (hasPermission(`all-databases`, loadedPermissions)) {
293
+ return;
294
+ }
295
+ const databasePermissions = await loadDatabasePermissionsFromRequest(req);
296
+ const role = getDatabasePermissionRole(conid, database, databasePermissions);
297
+ const requiredIndex = getDatabaseRoleLevelIndex(requiredRole);
298
+ const roleIndex = getDatabaseRoleLevelIndex(role);
299
+ if (roleIndex < requiredIndex) {
300
+ throw new Error('DBGM-00266 Permission not granted');
94
301
  }
95
302
  }
96
303
 
@@ -98,4 +305,12 @@ module.exports = {
98
305
  hasPermission,
99
306
  connectionHasPermission,
100
307
  testConnectionPermission,
308
+ loadPermissionsFromRequest,
309
+ loadDatabasePermissionsFromRequest,
310
+ loadTablePermissionsFromRequest,
311
+ getDatabasePermissionRole,
312
+ getTablePermissionRole,
313
+ testStandardPermission,
314
+ testDatabaseRolePermission,
315
+ getTablePermissionRoleLevelIndex,
101
316
  };
@@ -28,7 +28,7 @@ async function loadModelTransform(file) {
28
28
  }
29
29
  return null;
30
30
  } catch (err) {
31
- logger.error(extractErrorLogData(err), `Error loading model transform ${file}`);
31
+ logger.error(extractErrorLogData(err), `DBGM-00173 Error loading model transform ${file}`);
32
32
  return null;
33
33
  }
34
34
  }