@itentialopensource/adapter-selector_ai 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.eslintignore +5 -0
- package/.eslintrc.js +19 -0
- package/.jshintrc +3 -0
- package/AUTH.md +39 -0
- package/BROKER.md +211 -0
- package/CALLS.md +405 -0
- package/CODE_OF_CONDUCT.md +43 -0
- package/CONTRIBUTING.md +13 -0
- package/ENHANCE.md +69 -0
- package/LICENSE +201 -0
- package/PROPERTIES.md +661 -0
- package/README.md +344 -0
- package/SUMMARY.md +9 -0
- package/SYSTEMINFO.md +14 -0
- package/TAB1.md +8 -0
- package/TAB2.md +314 -0
- package/TROUBLESHOOT.md +56 -0
- package/UTILITIES.md +473 -0
- package/adapter.js +4039 -0
- package/adapterBase.js +1488 -0
- package/entities/.generic/action.json +214 -0
- package/entities/.generic/schema.json +28 -0
- package/entities/.system/action.json +50 -0
- package/entities/.system/mockdatafiles/getToken-default.json +3 -0
- package/entities/.system/mockdatafiles/healthcheck-default.json +3 -0
- package/entities/.system/schema.json +19 -0
- package/entities/.system/schemaTokenReq.json +53 -0
- package/entities/.system/schemaTokenResp.json +53 -0
- package/entities/InventorySchemaCreation/action.json +24 -0
- package/entities/InventorySchemaCreation/schema.json +19 -0
- package/entities/InventorySchemaDelete/action.json +24 -0
- package/entities/InventorySchemaDelete/schema.json +19 -0
- package/entities/InventorySchemaDeleteType/action.json +24 -0
- package/entities/InventorySchemaDeleteType/schema.json +19 -0
- package/entities/InventorySchemaFileDownload/action.json +24 -0
- package/entities/InventorySchemaFileDownload/schema.json +19 -0
- package/entities/InventorySchemaFileUpload/action.json +24 -0
- package/entities/InventorySchemaFileUpload/schema.json +19 -0
- package/entities/InventorySchemaUpdate/action.json +24 -0
- package/entities/InventorySchemaUpdate/schema.json +19 -0
- package/entities/InventoryV2SchemaDeleteAll/action.json +24 -0
- package/entities/InventoryV2SchemaDeleteAll/schema.json +19 -0
- package/entities/InventoryV2SchemaDeleteType/action.json +24 -0
- package/entities/InventoryV2SchemaDeleteType/schema.json +19 -0
- package/entities/InventoryV2SchemaUpdate/action.json +24 -0
- package/entities/InventoryV2SchemaUpdate/schema.json +19 -0
- package/entities/InventoryV2SchemaUpload/action.json +24 -0
- package/entities/InventoryV2SchemaUpload/schema.json +19 -0
- package/entities/MetastoreInventoryBulkDataUpload/action.json +24 -0
- package/entities/MetastoreInventoryBulkDataUpload/schema.json +19 -0
- package/entities/MetastoreInventoryCSVDataUpload/action.json +24 -0
- package/entities/MetastoreInventoryCSVDataUpload/schema.json +19 -0
- package/entities/MetastoreInventoryCsvFileExport/action.json +25 -0
- package/entities/MetastoreInventoryCsvFileExport/schema.json +19 -0
- package/entities/MetastoreInventoryDataDelete/action.json +24 -0
- package/entities/MetastoreInventoryDataDelete/schema.json +19 -0
- package/entities/MetastoreInventoryDataDownload/action.json +46 -0
- package/entities/MetastoreInventoryDataDownload/schema.json +20 -0
- package/entities/MetastoreInventoryDataEdit/action.json +24 -0
- package/entities/MetastoreInventoryDataEdit/schema.json +19 -0
- package/entities/MetastoreInventoryDataUpload/action.json +24 -0
- package/entities/MetastoreInventoryDataUpload/schema.json +19 -0
- package/entities/MetastoreInventoryFilesExport/action.json +24 -0
- package/entities/MetastoreInventoryFilesExport/schema.json +19 -0
- package/entities/MetastoreInventoryFilesImport/action.json +24 -0
- package/entities/MetastoreInventoryFilesImport/schema.json +30 -0
- package/entities/MetastoreInventoryItems/action.json +25 -0
- package/entities/MetastoreInventoryItems/schema.json +19 -0
- package/entities/MetastoreInventoryNameDelete/action.json +24 -0
- package/entities/MetastoreInventoryNameDelete/schema.json +19 -0
- package/entities/MetastoreInventoryNameUpdate/action.json +24 -0
- package/entities/MetastoreInventoryNameUpdate/schema.json +19 -0
- package/entities/MetastoreInventoryNameUpload/action.json +24 -0
- package/entities/MetastoreInventoryNameUpload/schema.json +19 -0
- package/entities/MetastoreInventoryNamesDownload/action.json +25 -0
- package/entities/MetastoreInventoryNamesDownload/schema.json +19 -0
- package/entities/MetastoreInventorySchemaGet/action.json +25 -0
- package/entities/MetastoreInventorySchemaGet/schema.json +19 -0
- package/entities/MetastoreInventorySystem/action.json +130 -0
- package/entities/MetastoreInventorySystem/schema.json +24 -0
- package/entities/MetastoreV2InventorySchemaGet/action.json +25 -0
- package/entities/MetastoreV2InventorySchemaGet/schema.json +19 -0
- package/error.json +190 -0
- package/metadata.json +58 -0
- package/package.json +77 -0
- package/pronghorn.json +2508 -0
- package/propertiesDecorators.json +14 -0
- package/propertiesSchema.json +1635 -0
- package/report/adapterInfo.json +10 -0
- package/report/auto-adapter-openapi.json +1330 -0
- package/report/creationReport.json +765 -0
- package/report/metastore-inventory-manager.yaml-OpenApi3Json.json +2366 -0
- package/sampleProperties.json +260 -0
- package/test/integration/adapterTestBasicGet.js +117 -0
- package/test/integration/adapterTestConnectivity.js +117 -0
- package/test/integration/adapterTestIntegration.js +1295 -0
- package/test/unit/adapterBaseTestUnit.js +1626 -0
- package/test/unit/adapterTestUnit.js +2288 -0
- package/utils/adapterInfo.js +156 -0
- package/utils/argParser.js +44 -0
- package/utils/checkMigrate.js +102 -0
- package/utils/entitiesToDB.js +190 -0
- package/utils/findPath.js +74 -0
- package/utils/logger.js +26 -0
- package/utils/methodDocumentor.js +273 -0
- package/utils/modify.js +153 -0
- package/utils/mongoDbConnection.js +79 -0
- package/utils/mongoUtils.js +162 -0
- package/utils/pre-commit.sh +32 -0
- package/utils/removeHooks.js +20 -0
- package/utils/setup.js +33 -0
- package/utils/taskMover.js +308 -0
- package/utils/tbScript.js +103 -0
- package/utils/tbUtils.js +347 -0
- package/utils/testRunner.js +298 -0
- package/utils/troubleshootingAdapter.js +177 -0
- package/utils/updateAdapterConfig.js +158 -0
|
@@ -0,0 +1,273 @@
|
|
|
1
|
+
/* eslint global-require:warn */
|
|
2
|
+
/* eslint import/no-dynamic-require:warn */
|
|
3
|
+
/* eslint no-param-reassign:warn */
|
|
4
|
+
|
|
5
|
+
const fs = require('fs-extra');
|
|
6
|
+
const acorn = require('acorn');
|
|
7
|
+
|
|
8
|
+
// Getting the base directory:
|
|
9
|
+
let adaptdir = __dirname;
|
|
10
|
+
if (adaptdir.endsWith('/utils')) {
|
|
11
|
+
adaptdir = adaptdir.substring(0, adaptdir.length - 6);
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
function createObjectForFunction(
|
|
15
|
+
funcName,
|
|
16
|
+
funcArgs,
|
|
17
|
+
entityPath,
|
|
18
|
+
description,
|
|
19
|
+
workflow
|
|
20
|
+
) {
|
|
21
|
+
const funcObject = {};
|
|
22
|
+
// if the entity path is not set, then the object is not created.
|
|
23
|
+
if (entityPath !== undefined) {
|
|
24
|
+
funcObject.method_signature = `${funcName}(${funcArgs.join(', ')})`;
|
|
25
|
+
funcObject.path = entityPath;
|
|
26
|
+
if (description === undefined) {
|
|
27
|
+
funcObject.description = '';
|
|
28
|
+
funcObject.workflow = 'No';
|
|
29
|
+
} else {
|
|
30
|
+
funcObject.description = description;
|
|
31
|
+
funcObject.workflow = workflow;
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
return funcObject;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
function getPathFromEntity(entity, funcName) {
|
|
38
|
+
let epath;
|
|
39
|
+
if (entity === undefined || entity === '.generic') {
|
|
40
|
+
epath = undefined;
|
|
41
|
+
} else {
|
|
42
|
+
// Access the action.js file for the certain entity to get the path
|
|
43
|
+
const entityPath = `${adaptdir}/entities/${entity}/action.json`;
|
|
44
|
+
const actionJSON = require(entityPath);
|
|
45
|
+
actionJSON.actions.forEach((action) => {
|
|
46
|
+
if (action.name === funcName) {
|
|
47
|
+
if (typeof action.entitypath === 'object') {
|
|
48
|
+
epath = '';
|
|
49
|
+
const keys = Object.keys(action.entitypath);
|
|
50
|
+
for (let k = 0; k < keys.length; k += 1) {
|
|
51
|
+
epath += `${keys[k]}:${action.entitypath[keys[k]]} <br /> `;
|
|
52
|
+
}
|
|
53
|
+
epath = epath.substring(0, epath.length - 8);
|
|
54
|
+
} else {
|
|
55
|
+
epath = action.entitypath;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
return epath;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
function recurseCallExpressions(statement, callList) {
|
|
64
|
+
// Recursively finds all CallExpressions in the syntax tree
|
|
65
|
+
if (statement.type === 'CallExpression') callList.push(statement);
|
|
66
|
+
const keys = Object.keys(statement);
|
|
67
|
+
for (let k = 0; k < keys.length; k += 1) {
|
|
68
|
+
if (typeof statement[keys[k]] === 'object' && statement[keys[k]] !== null) {
|
|
69
|
+
recurseCallExpressions(statement[keys[k]], callList);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
function readFileUsingLib(filename, descriptionObj, workflowObj, functionList) {
|
|
75
|
+
// read the file
|
|
76
|
+
const aFile = fs.readFileSync(filename, 'utf8');
|
|
77
|
+
// parsing the file to get the function and class declarations.
|
|
78
|
+
const aFileFuncArgs = acorn.parse(aFile, { ecmaVersion: 2020 });
|
|
79
|
+
|
|
80
|
+
let callName = 'identifyRequest';
|
|
81
|
+
// Looping through all the declarations parsed:
|
|
82
|
+
aFileFuncArgs.body.forEach((e) => {
|
|
83
|
+
// Getting only the class declaration as it has our required functions.
|
|
84
|
+
if (e.type === 'ClassDeclaration') {
|
|
85
|
+
const methodDefinition = e.body;
|
|
86
|
+
methodDefinition.body.forEach((method) => {
|
|
87
|
+
// Getting method name and its params in the class.
|
|
88
|
+
const funcName = method.key.name;
|
|
89
|
+
const funcArgs = [];
|
|
90
|
+
method.value.params.forEach((param) => {
|
|
91
|
+
if (param.type === 'Identifier') {
|
|
92
|
+
funcArgs.push(param.name);
|
|
93
|
+
} else if (param.type === 'RestElement') {
|
|
94
|
+
funcArgs.push(`...${param.argument.name}`);
|
|
95
|
+
} else {
|
|
96
|
+
const args = `${param.left.name} = ${param.right.raw}`;
|
|
97
|
+
funcArgs.push(args);
|
|
98
|
+
}
|
|
99
|
+
});
|
|
100
|
+
|
|
101
|
+
// Getting the entity for the method:
|
|
102
|
+
const callList = [];
|
|
103
|
+
method.value.body.body.forEach((statement) => {
|
|
104
|
+
recurseCallExpressions(statement, callList);
|
|
105
|
+
});
|
|
106
|
+
const requests = [];
|
|
107
|
+
for (let i = 0; i < callList.length; i += 1) {
|
|
108
|
+
if (callList[i].callee.property && callList[i].callee.property.name === callName) {
|
|
109
|
+
requests.push(callList[i]);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
if (requests.length > 0) {
|
|
113
|
+
const expr = requests[0];
|
|
114
|
+
if (expr.arguments.length < 2) {
|
|
115
|
+
throw new Error(`Bad inputs in method ${funcName}`);
|
|
116
|
+
}
|
|
117
|
+
const entity = expr.arguments[0].value;
|
|
118
|
+
const actionName = expr.arguments[1].value;
|
|
119
|
+
if (expr !== undefined && (expr.arguments[0].type !== 'Literal' || expr.arguments[1].type !== 'Literal')) {
|
|
120
|
+
const param1 = method.value.params[0];
|
|
121
|
+
const param2 = method.value.params[1];
|
|
122
|
+
if (param1.type !== 'Identifier' || param2.type !== 'Identifier'
|
|
123
|
+
|| expr.arguments[0].type !== 'Identifier' || expr.arguments[1].type !== 'Identifier'
|
|
124
|
+
|| param1.name !== expr.arguments[0].name || param2.name !== expr.arguments[1].name) {
|
|
125
|
+
throw new Error(`identifyRequest proxy method ${funcName} unknown format`);
|
|
126
|
+
} else if (callName !== 'identifyRequest') {
|
|
127
|
+
throw new Error(`MethodDocumentor not yet programmed to handle multiple helper methods: 1) ${callName}, 2) ${funcName}`);
|
|
128
|
+
}
|
|
129
|
+
callName = funcName;
|
|
130
|
+
}
|
|
131
|
+
const entityPath = getPathFromEntity(entity, actionName);
|
|
132
|
+
|
|
133
|
+
// Creating and storing the object for the method.
|
|
134
|
+
if (entityPath !== undefined) {
|
|
135
|
+
functionList.push(
|
|
136
|
+
createObjectForFunction(
|
|
137
|
+
funcName,
|
|
138
|
+
funcArgs,
|
|
139
|
+
entityPath,
|
|
140
|
+
descriptionObj[funcName],
|
|
141
|
+
workflowObj[funcName]
|
|
142
|
+
)
|
|
143
|
+
);
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
});
|
|
147
|
+
}
|
|
148
|
+
});
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
function readJSONFile(filename, descriptionObj, workflowObj) {
|
|
152
|
+
// Accessing the JSON file.
|
|
153
|
+
const phJSON = require(filename);
|
|
154
|
+
// Getting the methods array.
|
|
155
|
+
const methodArray = phJSON.methods;
|
|
156
|
+
methodArray.forEach((methodName) => {
|
|
157
|
+
// Getting the method description and workflow:
|
|
158
|
+
const funcName = methodName.name;
|
|
159
|
+
descriptionObj[funcName] = methodName.summary ? methodName.summary : methodName.description;
|
|
160
|
+
workflowObj[funcName] = methodName.task ? 'Yes' : 'No';
|
|
161
|
+
});
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
function readMDFile(filename, functionList) {
|
|
165
|
+
// Reading in the .md file and creating an array with each line as an element.
|
|
166
|
+
const mdFile = fs.readFileSync(filename, 'utf-8');
|
|
167
|
+
const fileSplit = mdFile.split('\n');
|
|
168
|
+
// Storing the data that should added later to the updated data.
|
|
169
|
+
const linesToAddLater = [];
|
|
170
|
+
let index = fileSplit.length - 1;
|
|
171
|
+
|
|
172
|
+
// Removing all the blank lines at the end of the file.
|
|
173
|
+
if (fileSplit[index] === '') {
|
|
174
|
+
while (fileSplit[index] === '') {
|
|
175
|
+
linesToAddLater.push(fileSplit.pop());
|
|
176
|
+
index -= 1;
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
// Checking if the last 2 lines are <br> and </table>. If not, the file is corrupted and the
|
|
181
|
+
// data at the end of the file should be fixed.
|
|
182
|
+
if (fileSplit[index] === '<br>' || fileSplit[index - 1] === '</table>') {
|
|
183
|
+
// Storing <br> and </table> to add later.
|
|
184
|
+
linesToAddLater.push(fileSplit.pop());
|
|
185
|
+
linesToAddLater.push(fileSplit.pop());
|
|
186
|
+
index -= 2;
|
|
187
|
+
} else {
|
|
188
|
+
console.log('The file has bad content at the end.');
|
|
189
|
+
return;
|
|
190
|
+
}
|
|
191
|
+
// if (fileSplit[index] !== '<br>' && fileSplit[index - 1] !== '</table>') {
|
|
192
|
+
// console.log('The file has bad content at the end.');
|
|
193
|
+
// return;
|
|
194
|
+
// } else {
|
|
195
|
+
// // Storing <br> and </table> to add later.
|
|
196
|
+
// linesToAddLater.push(fileSplit.pop());
|
|
197
|
+
// linesToAddLater.push(fileSplit.pop());
|
|
198
|
+
// index -= 2;
|
|
199
|
+
// }
|
|
200
|
+
|
|
201
|
+
// Removing all the lines until the header tags are reached.
|
|
202
|
+
while (!fileSplit[index].includes('<th')) {
|
|
203
|
+
fileSplit.pop();
|
|
204
|
+
index -= 1;
|
|
205
|
+
}
|
|
206
|
+
// Adding </tr> for the header row, because it got removed in the above loop.
|
|
207
|
+
fileSplit.push(' </tr>');
|
|
208
|
+
|
|
209
|
+
// Creating the tags for each method to be appended to the file.
|
|
210
|
+
const tdBeginTag = ' <td style="padding:15px">';
|
|
211
|
+
const tdEndTag = '</td>';
|
|
212
|
+
|
|
213
|
+
functionList.forEach((func) => {
|
|
214
|
+
const signCommand = `${tdBeginTag}${func.method_signature}${tdEndTag}`;
|
|
215
|
+
const descCommand = `${tdBeginTag}${func.description}${tdEndTag}`;
|
|
216
|
+
const pathCommand = `${tdBeginTag}${func.path}${tdEndTag}`;
|
|
217
|
+
const workflowCommand = `${tdBeginTag}${func.workflow}${tdEndTag}`;
|
|
218
|
+
fileSplit.push(' <tr>');
|
|
219
|
+
fileSplit.push(signCommand);
|
|
220
|
+
fileSplit.push(descCommand);
|
|
221
|
+
fileSplit.push(pathCommand);
|
|
222
|
+
fileSplit.push(workflowCommand);
|
|
223
|
+
fileSplit.push(' </tr>');
|
|
224
|
+
});
|
|
225
|
+
|
|
226
|
+
// Adding </table> and <br> at the end of the file to complete the table and the file.
|
|
227
|
+
while (linesToAddLater.length > 0) {
|
|
228
|
+
fileSplit.push(linesToAddLater.pop());
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
// Writing all the content back into the file.
|
|
232
|
+
fs.writeFileSync(filename, fileSplit.join('\n'), {
|
|
233
|
+
encoding: 'utf-8',
|
|
234
|
+
flag: 'w'
|
|
235
|
+
});
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
function getFileInfo() {
|
|
239
|
+
// If files don't exist:
|
|
240
|
+
if (!fs.existsSync(`${adaptdir}/adapter.js`)) {
|
|
241
|
+
console.log('Missing - utils/adapter.js');
|
|
242
|
+
return;
|
|
243
|
+
}
|
|
244
|
+
if (!fs.existsSync(`${adaptdir}/pronghorn.json`)) {
|
|
245
|
+
console.log('Missing - pronghorn.json');
|
|
246
|
+
return;
|
|
247
|
+
}
|
|
248
|
+
if (!fs.existsSync(`${adaptdir}/CALLS.md`)) {
|
|
249
|
+
console.log('Missing - CALLS.md');
|
|
250
|
+
return;
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
const descriptionObj = {};
|
|
254
|
+
const workflowObj = {};
|
|
255
|
+
|
|
256
|
+
// Get the method descriptions and the workflow values from pronghorn.json file.
|
|
257
|
+
readJSONFile(`${adaptdir}/pronghorn.json`, descriptionObj, workflowObj);
|
|
258
|
+
|
|
259
|
+
// Get the method signature, entity path and create an object that contains all the info regarding
|
|
260
|
+
// the method and push it to the functionList array.
|
|
261
|
+
const functionList = [];
|
|
262
|
+
readFileUsingLib(
|
|
263
|
+
`${adaptdir}/adapter.js`,
|
|
264
|
+
descriptionObj,
|
|
265
|
+
workflowObj,
|
|
266
|
+
functionList
|
|
267
|
+
);
|
|
268
|
+
|
|
269
|
+
// createMarkDown(functionList);
|
|
270
|
+
readMDFile(`${adaptdir}/CALLS.md`, functionList);
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
getFileInfo();
|
package/utils/modify.js
ADDED
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
const { execSync } = require('child_process');
|
|
2
|
+
const fs = require('fs-extra');
|
|
3
|
+
const rls = require('readline-sync');
|
|
4
|
+
const { existsSync } = require('fs-extra');
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* @summary Creates a backup zip file of current adapter
|
|
8
|
+
*
|
|
9
|
+
* @function backup
|
|
10
|
+
*/
|
|
11
|
+
function backup() {
|
|
12
|
+
// zip all files except node_modules and package-lock
|
|
13
|
+
const backupCmd = 'zip -r previousVersion.zip .';
|
|
14
|
+
execSync(backupCmd, { encoding: 'utf-8', maxBuffer: 1024 * 1024 * 2 });
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* @summary Archives previous modifications and removes the modification package
|
|
19
|
+
*
|
|
20
|
+
* @function archiveMod
|
|
21
|
+
* @param {String} modType - update(UPD) or migrate(MIG)
|
|
22
|
+
*/
|
|
23
|
+
function archiveMod(modType) {
|
|
24
|
+
if (!existsSync('./adapter_modifications/archive')) {
|
|
25
|
+
execSync('mkdir ./adapter_modifications/archive');
|
|
26
|
+
}
|
|
27
|
+
const zipFile = modType === 'UPD' ? 'updatePackage.zip' : 'migrationPackage.zip';
|
|
28
|
+
const now = new Date();
|
|
29
|
+
const archiveName = `${modType}-${now.toISOString()}`;
|
|
30
|
+
execSync(`mkdir adapter_modifications/archive/${archiveName}`);
|
|
31
|
+
const archiveCmd = 'mv adapter_modifications/archive .'
|
|
32
|
+
+ ` && mv adapter_modifications/* archive/${archiveName}`
|
|
33
|
+
+ ' && mv archive adapter_modifications'
|
|
34
|
+
+ ` && rm ${zipFile}`;
|
|
35
|
+
execSync(archiveCmd, { encoding: 'utf-8' });
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* @summary Reverts modifications using backup zip file
|
|
40
|
+
*
|
|
41
|
+
* @function revertMod
|
|
42
|
+
*/
|
|
43
|
+
function revertMod() {
|
|
44
|
+
const files = fs.readdirSync('./');
|
|
45
|
+
// remove all files except previousVersion
|
|
46
|
+
files.forEach((file) => {
|
|
47
|
+
if (file !== 'previousVersion.zip') {
|
|
48
|
+
fs.removeSync(file);
|
|
49
|
+
}
|
|
50
|
+
});
|
|
51
|
+
// unzip previousVersion, reinstall dependencies and delete zipfile
|
|
52
|
+
execSync('unzip -o previousVersion.zip && rm -rf node_modules && rm package-lock.json && npm install', { maxBuffer: 1024 * 1024 * 2 });
|
|
53
|
+
execSync('rm previousVersion.zip');
|
|
54
|
+
console.log('Changes have been reverted');
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
/**
|
|
58
|
+
* @summary Handle migration logic
|
|
59
|
+
*/
|
|
60
|
+
function handleMigration() {
|
|
61
|
+
if (!existsSync('migrationPackage.zip')) {
|
|
62
|
+
throw new Error('Migration Package not found. Download and place migrationPackage in the adapter root directory');
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
backup();
|
|
66
|
+
console.log('Migrating adapter and running tests...');
|
|
67
|
+
const migrateCmd = 'unzip -o migrationPackage.zip'
|
|
68
|
+
+ ' && cd adapter_modifications'
|
|
69
|
+
+ ' && node migrate';
|
|
70
|
+
const migrateOutput = execSync(migrateCmd, { encoding: 'utf-8' });
|
|
71
|
+
console.log(migrateOutput);
|
|
72
|
+
|
|
73
|
+
if (migrateOutput.includes('Lint exited with code 1') || migrateOutput.includes('Tests exited with code 1')) {
|
|
74
|
+
if (rls.keyInYN('Adapter failed tests or lint after migrating. Would you like to revert the changes?')) {
|
|
75
|
+
console.log('Reverting changes...');
|
|
76
|
+
revertMod();
|
|
77
|
+
throw new Error('Adapter failed tests or lint after migrating. Changes reverted');
|
|
78
|
+
}
|
|
79
|
+
console.log('Adapter Migration will continue. If you want to revert the changes, run the command npm run adapter:revert');
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
console.log('Installing new dependencies..');
|
|
83
|
+
const updatePackageCmd = 'rm -rf node_modules && rm package-lock.json && npm install';
|
|
84
|
+
const updatePackageOutput = execSync(updatePackageCmd, { encoding: 'utf-8' });
|
|
85
|
+
console.log(updatePackageOutput);
|
|
86
|
+
console.log('New dependencies installed');
|
|
87
|
+
archiveMod('MIG');
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
/**
|
|
91
|
+
* @summary Handle update logic
|
|
92
|
+
*/
|
|
93
|
+
function handleUpdate() {
|
|
94
|
+
if (!existsSync('updatePackage.zip')) {
|
|
95
|
+
throw new Error('Update Package not found. Download and place updateAdapter.zip in the adapter root directory');
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
backup();
|
|
99
|
+
const updateCmd = 'unzip -o updatePackage.zip'
|
|
100
|
+
+ ' && cd adapter_modifications'
|
|
101
|
+
+ ' && node update.js updateFiles';
|
|
102
|
+
execSync(updateCmd, { encoding: 'utf-8' });
|
|
103
|
+
const updateOutput = execSync(updateCmd, { encoding: 'utf-8' });
|
|
104
|
+
|
|
105
|
+
if (updateOutput.includes('Lint exited with code 1') || updateOutput.includes('Tests exited with code 1')) {
|
|
106
|
+
if (rls.keyInYN('Adapter failed tests or lint after updating. Would you like to revert the changes?')) {
|
|
107
|
+
console.log('Reverting changes...');
|
|
108
|
+
revertMod();
|
|
109
|
+
throw new Error('Adapter failed tests or lint after updating. Changes reverted');
|
|
110
|
+
}
|
|
111
|
+
console.log('Adapter Update will continue. If you want to revert the changes, run the command npm run adapter:revert');
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
console.log(updateOutput);
|
|
115
|
+
console.log('Adapter Successfully Updated. Restart adapter in IAP to apply the changes');
|
|
116
|
+
archiveMod('UPD');
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
/**
|
|
120
|
+
* @summary Handle revert logic
|
|
121
|
+
*/
|
|
122
|
+
function handleRevert() {
|
|
123
|
+
if (!existsSync('previousVersion.zip')) {
|
|
124
|
+
throw new Error('Previous adapter version not found. There are no changes to revert');
|
|
125
|
+
}
|
|
126
|
+
revertMod();
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
/**
|
|
130
|
+
* @summary Entrypoint for the script
|
|
131
|
+
*/
|
|
132
|
+
function main() {
|
|
133
|
+
const flags = process.argv[2];
|
|
134
|
+
|
|
135
|
+
switch (flags) {
|
|
136
|
+
case '-m':
|
|
137
|
+
return handleMigration();
|
|
138
|
+
case '-u':
|
|
139
|
+
return handleUpdate();
|
|
140
|
+
case '-r':
|
|
141
|
+
return handleRevert();
|
|
142
|
+
default:
|
|
143
|
+
throw new Error('Invalid flag. Use -m for migrate, -u for update, or -r for revert.');
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
try {
|
|
148
|
+
main();
|
|
149
|
+
process.exit(0);
|
|
150
|
+
} catch (error) {
|
|
151
|
+
console.error(error.message || error);
|
|
152
|
+
process.exit(1);
|
|
153
|
+
}
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
// Set globals
|
|
2
|
+
/* global log */
|
|
3
|
+
|
|
4
|
+
const { MongoClient } = require('mongodb');
|
|
5
|
+
const MongoUtils = require('./mongoUtils');
|
|
6
|
+
|
|
7
|
+
class MongoDBConnection {
|
|
8
|
+
constructor(properties) {
|
|
9
|
+
this.properties = properties;
|
|
10
|
+
this.initialize(properties);
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
initialize(properties) {
|
|
14
|
+
const {
|
|
15
|
+
url, database, maxPoolSize, appname
|
|
16
|
+
} = properties;
|
|
17
|
+
|
|
18
|
+
// Handle URL first - if provided, it takes precedence
|
|
19
|
+
if (url) {
|
|
20
|
+
const urlObj = new URL(url);
|
|
21
|
+
const urlDbName = urlObj.pathname.slice(1);
|
|
22
|
+
this.dbName = database || urlDbName;
|
|
23
|
+
|
|
24
|
+
// Update URL if database name is different
|
|
25
|
+
if (this.dbName !== urlDbName) {
|
|
26
|
+
urlObj.pathname = `${this.dbName}`;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
this.url = urlObj.toString();
|
|
30
|
+
} else {
|
|
31
|
+
const connectionObj = MongoUtils.generateConnectionObj(properties);
|
|
32
|
+
this.url = MongoUtils.generateConnectionString(connectionObj);
|
|
33
|
+
this.dbName = database;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
// Set options using generateTlsSettings
|
|
37
|
+
this.options = MongoUtils.generateTlsSettings(properties);
|
|
38
|
+
|
|
39
|
+
// Add maxPoolSize if configured
|
|
40
|
+
if (maxPoolSize > 0 && maxPoolSize <= 65535) {
|
|
41
|
+
this.options.maxPoolSize = maxPoolSize;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
// Add application name if provided
|
|
45
|
+
if (appname) {
|
|
46
|
+
this.options.appname = appname;
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
async closeConnection() {
|
|
51
|
+
if (this.connection && this.connection.close) {
|
|
52
|
+
try {
|
|
53
|
+
await this.connection.close();
|
|
54
|
+
} catch (err) {
|
|
55
|
+
log.error(`Failed to close MongoDB connection - ${err.message}`);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
async connect() {
|
|
61
|
+
const client = new MongoClient(this.url, this.options);
|
|
62
|
+
|
|
63
|
+
client.on('serverHeartbeatSucceeded', (msg) => log.info(`Connection established and heartbeat succeeded - ${JSON.stringify(msg)}`));
|
|
64
|
+
client.on('connectionClosed', (msg) => log.info(`Connection closed - ${JSON.stringify(msg)}`));
|
|
65
|
+
client.on('error', (msg) => log.error(`Connection error - ${JSON.stringify(msg)}`));
|
|
66
|
+
client.on('commandFailed', (msg) => log.error(`Command failed - ${JSON.stringify(msg)}`));
|
|
67
|
+
client.on('serverHeartbeatFailed', (msg) => log.error(`Connection timeout - ${JSON.stringify(msg)}`));
|
|
68
|
+
|
|
69
|
+
await client.connect().catch((error) => {
|
|
70
|
+
throw new Error(MongoUtils.resolveMongoError(error));
|
|
71
|
+
});
|
|
72
|
+
|
|
73
|
+
this.db = client.db(this.dbName);
|
|
74
|
+
this.connection = client;
|
|
75
|
+
return this;
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
module.exports = MongoDBConnection;
|
|
@@ -0,0 +1,162 @@
|
|
|
1
|
+
const fs = require('fs-extra');
|
|
2
|
+
|
|
3
|
+
class MongoUtils {
|
|
4
|
+
/**
|
|
5
|
+
* Parses a MongoDB connection string and extracts its components.
|
|
6
|
+
* @param {string} connectionString - The MongoDB connection string.
|
|
7
|
+
* @returns {object} Parsed connection details.
|
|
8
|
+
*/
|
|
9
|
+
static parseConnectionString(connectionString) {
|
|
10
|
+
try {
|
|
11
|
+
const url = new URL(connectionString);
|
|
12
|
+
return {
|
|
13
|
+
protocol: url.protocol.replace(':', ''),
|
|
14
|
+
host: url.hostname,
|
|
15
|
+
port: url.port || '27017',
|
|
16
|
+
database: url.pathname.replace('/', ''),
|
|
17
|
+
username: url.username || null,
|
|
18
|
+
password: url.password || null,
|
|
19
|
+
options: Object.fromEntries(url.searchParams.entries())
|
|
20
|
+
};
|
|
21
|
+
} catch (error) {
|
|
22
|
+
throw new Error(`Invalid MongoDB URI: ${error.message}`);
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* Generates a MongoDB connection string from given properties.
|
|
28
|
+
* @param {object} config - MongoDB connection properties.
|
|
29
|
+
* @returns {string} A valid MongoDB connection string.
|
|
30
|
+
*/
|
|
31
|
+
static generateConnectionString(config) {
|
|
32
|
+
// Determine protocol based on addSrv flag
|
|
33
|
+
const protocol = config.addSrv ? 'mongodb+srv' : 'mongodb';
|
|
34
|
+
|
|
35
|
+
// Build authentication part if credentials are provided and dbAuth is true
|
|
36
|
+
const authPart = (config.dbAuth && config.username && config.password)
|
|
37
|
+
? `${encodeURIComponent(config.username)}:${encodeURIComponent(config.password)}@`
|
|
38
|
+
: '';
|
|
39
|
+
|
|
40
|
+
// Build host and port part
|
|
41
|
+
const hostPart = config.host;
|
|
42
|
+
const portPart = !config.addSrv && config.port ? `:${config.port}` : '';
|
|
43
|
+
|
|
44
|
+
// Build options part
|
|
45
|
+
let optionsPart = '';
|
|
46
|
+
if (config.options && Object.keys(config.options).length) {
|
|
47
|
+
optionsPart = `?${new URLSearchParams(config.options).toString()}`;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// Build database part
|
|
51
|
+
const dbPart = config.database ? `/${config.database}` : '';
|
|
52
|
+
|
|
53
|
+
return `${protocol}://${authPart}${hostPart}${portPart}${dbPart}${optionsPart}`;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
/**
|
|
57
|
+
* Generates TLS settings for MongoDB connection
|
|
58
|
+
* @param {object} properties - Connection properties containing TLS configuration
|
|
59
|
+
* @returns {object} TLS settings object
|
|
60
|
+
*/
|
|
61
|
+
static generateTlsSettings(properties) {
|
|
62
|
+
// Handle TLS properties
|
|
63
|
+
if (properties.tls) {
|
|
64
|
+
return {
|
|
65
|
+
tls: properties.tls.enabled,
|
|
66
|
+
tlsAllowInvalidCertificates: properties.tls.tlsAllowInvalidCertificates || false,
|
|
67
|
+
tlsCAFile: properties.tls.tlsCAFile
|
|
68
|
+
};
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
// Handle db_ssl properties (standard adapter format)
|
|
72
|
+
if (!properties.db_ssl) {
|
|
73
|
+
return { tls: false };
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
const tlsProperties = {
|
|
77
|
+
tls: properties.db_ssl.enabled || false,
|
|
78
|
+
tlsAllowInvalidCertificates: properties.db_ssl.accept_invalid_cert || false
|
|
79
|
+
};
|
|
80
|
+
|
|
81
|
+
const hasValidCaFile = properties.db_ssl.enabled && properties.db_ssl.ca_file && fs.existsSync(properties.db_ssl.ca_file);
|
|
82
|
+
|
|
83
|
+
if (hasValidCaFile) {
|
|
84
|
+
tlsProperties.tlsCAFile = properties.db_ssl.ca_file;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
return tlsProperties;
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
/**
|
|
91
|
+
* Generates a connection object from properties
|
|
92
|
+
* @param {object} properties - Connection properties
|
|
93
|
+
* @returns {object} Connection object with host, port, database, and credentials
|
|
94
|
+
*/
|
|
95
|
+
static generateConnectionObj(properties) {
|
|
96
|
+
try {
|
|
97
|
+
const connectionObj = {
|
|
98
|
+
host: properties.host,
|
|
99
|
+
port: properties.port,
|
|
100
|
+
database: properties.database,
|
|
101
|
+
addSrv: properties.addSrv || false,
|
|
102
|
+
dbAuth: properties.dbAuth || false,
|
|
103
|
+
options: properties.replSet && !properties.addSrv ? { replicaSet: properties.replSet } : undefined
|
|
104
|
+
};
|
|
105
|
+
|
|
106
|
+
// Only include credentials if dbAuth is true
|
|
107
|
+
if (properties.dbAuth) {
|
|
108
|
+
// Validate that both username and password are provided when dbAuth is true
|
|
109
|
+
if (!properties.username || !properties.password) {
|
|
110
|
+
throw new Error('Both username and password are required when dbAuth is enabled');
|
|
111
|
+
}
|
|
112
|
+
connectionObj.username = properties.username;
|
|
113
|
+
connectionObj.password = properties.password;
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
return connectionObj;
|
|
117
|
+
} catch (error) {
|
|
118
|
+
throw new Error(`Invalid MongoDB configuration: ${error.message}`);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
/**
|
|
123
|
+
* Resolves MongoDB error messages to user-friendly format
|
|
124
|
+
* @param {object} mongoError - The MongoDB error object
|
|
125
|
+
* @returns {string} User-friendly error message
|
|
126
|
+
*/
|
|
127
|
+
static resolveMongoError(mongoError) {
|
|
128
|
+
if (mongoError && mongoError.code === 13) {
|
|
129
|
+
return 'User unauthorized to perform the requested action: ';
|
|
130
|
+
}
|
|
131
|
+
if (mongoError && mongoError.code === 18) {
|
|
132
|
+
return `User Authentication failed. Username/Password combination is incorrect ${mongoError}`;
|
|
133
|
+
}
|
|
134
|
+
return mongoError;
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
/**
|
|
138
|
+
* Helper function to validate and process MongoDB connection properties
|
|
139
|
+
* @param {Object} mongoProps - MongoDB connection properties to validate
|
|
140
|
+
* @returns {Object|undefined} - Validated and processed MongoDB properties or undefined if invalid
|
|
141
|
+
*/
|
|
142
|
+
static getAndValidateMongoProps(mongoProps) {
|
|
143
|
+
if (!mongoProps) return undefined;
|
|
144
|
+
|
|
145
|
+
// Handle URL-based connection
|
|
146
|
+
const url = mongoProps.url && mongoProps.url.trim();
|
|
147
|
+
if (!url) {
|
|
148
|
+
// Handle host-based connection
|
|
149
|
+
const hasHost = mongoProps.hostname || mongoProps.host;
|
|
150
|
+
const hasDatabase = mongoProps.database;
|
|
151
|
+
return (hasHost && hasDatabase) ? mongoProps : undefined;
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
// Process URL-based connection
|
|
155
|
+
const props = { ...mongoProps };
|
|
156
|
+
const { database } = props;
|
|
157
|
+
const urlHasDatabase = database && url.includes(`/${database}`);
|
|
158
|
+
return urlHasDatabase ? props : { ...props, url: `${url}/${database}` };
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
module.exports = MongoUtils;
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
#!/bin/sh
|
|
2
|
+
# @copyright Itential, LLC 2019
|
|
3
|
+
|
|
4
|
+
#exit on any failure in the pipeline
|
|
5
|
+
set -e
|
|
6
|
+
|
|
7
|
+
# --------------------------------------------------
|
|
8
|
+
# pre-commit
|
|
9
|
+
# --------------------------------------------------
|
|
10
|
+
# Contains the standard set of tasks to runbefore
|
|
11
|
+
# committing changes to the repo. If any tasks fail
|
|
12
|
+
# then the commit will be aborted.
|
|
13
|
+
# --------------------------------------------------
|
|
14
|
+
|
|
15
|
+
printf "%b" "Running pre-commit hooks...\\n"
|
|
16
|
+
|
|
17
|
+
# verify testing script is stubbed and no credentials
|
|
18
|
+
node utils/testRunner.js -r
|
|
19
|
+
|
|
20
|
+
# update the adapter information file
|
|
21
|
+
node utils/adapterInfo.js
|
|
22
|
+
node utils/methodDocumentor.js
|
|
23
|
+
git add CALLS.md report/adapterInfo.json
|
|
24
|
+
|
|
25
|
+
# security audit on the code
|
|
26
|
+
npm audit --registry=https://registry.npmjs.org --audit-level=moderate
|
|
27
|
+
|
|
28
|
+
# lint the code
|
|
29
|
+
npm run lint
|
|
30
|
+
|
|
31
|
+
# test the code
|
|
32
|
+
npm run test
|