@itentialopensource/adapter-microsoft_graph 1.0.2 → 1.1.1
Sign up to get free protection for your applications and to get access to all the features.
- package/.eslintignore +0 -1
- package/.jshintrc +3 -0
- package/AUTH.md +20 -16
- package/CALLS.md +69 -28
- package/CHANGELOG.md +16 -0
- package/CONTRIBUTING.md +1 -160
- package/ENHANCE.md +2 -2
- package/README.md +31 -22
- package/SUMMARY.md +2 -2
- package/SYSTEMINFO.md +15 -5
- package/adapter.js +248 -330
- package/adapterBase.js +538 -873
- package/changelogs/changelog.md +6 -0
- package/entities/Mail/action.json +20 -0
- package/entities/Mail/schema.json +1 -0
- package/metadata.json +52 -0
- package/package.json +22 -25
- package/pronghorn.json +550 -144
- package/propertiesSchema.json +444 -40
- package/refs?service=git-upload-pack +0 -0
- package/report/adapter-openapi.json +14709 -0
- package/report/adapter-openapi.yaml +9744 -0
- package/report/adapterInfo.json +8 -8
- package/report/updateReport1691507370664.json +120 -0
- package/report/updateReport1692202407231.json +120 -0
- package/report/updateReport1694460353234.json +120 -0
- package/report/updateReport1695667793473.json +120 -0
- package/sampleProperties.json +63 -2
- package/test/integration/adapterTestBasicGet.js +1 -1
- package/test/integration/adapterTestConnectivity.js +91 -42
- package/test/integration/adapterTestIntegration.js +130 -2
- package/test/unit/adapterBaseTestUnit.js +395 -292
- package/test/unit/adapterTestUnit.js +306 -109
- package/utils/adapterInfo.js +1 -1
- package/utils/addAuth.js +1 -1
- package/utils/artifactize.js +1 -1
- package/utils/checkMigrate.js +1 -1
- package/utils/entitiesToDB.js +1 -0
- package/utils/findPath.js +1 -1
- package/utils/methodDocumentor.js +57 -22
- package/utils/modify.js +13 -15
- package/utils/packModificationScript.js +1 -1
- package/utils/taskMover.js +309 -0
- package/utils/tbScript.js +3 -10
- package/utils/tbUtils.js +2 -3
- package/utils/testRunner.js +1 -1
- package/utils/troubleshootingAdapter.js +1 -3
- package/workflows/README.md +0 -3
@@ -3,7 +3,7 @@
|
|
3
3
|
/* eslint no-param-reassign:warn */
|
4
4
|
|
5
5
|
const fs = require('fs-extra');
|
6
|
-
const
|
6
|
+
const acorn = require('acorn');
|
7
7
|
|
8
8
|
// Getting the base directory:
|
9
9
|
let adaptdir = __dirname;
|
@@ -44,18 +44,38 @@ function getPathFromEntity(entity, funcName) {
|
|
44
44
|
const actionJSON = require(entityPath);
|
45
45
|
actionJSON.actions.forEach((action) => {
|
46
46
|
if (action.name === funcName) {
|
47
|
-
|
47
|
+
if (typeof action.entitypath === 'object') {
|
48
|
+
epath = '';
|
49
|
+
const keys = Object.keys(action.entitypath);
|
50
|
+
for (let k = 0; k < keys.length; k += 1) {
|
51
|
+
epath += `${keys[k]}:${action.entitypath[keys[k]]} <br /> `;
|
52
|
+
}
|
53
|
+
epath = epath.substring(0, epath.length - 8);
|
54
|
+
} else {
|
55
|
+
epath = action.entitypath;
|
56
|
+
}
|
48
57
|
}
|
49
58
|
});
|
50
59
|
}
|
51
60
|
return epath;
|
52
61
|
}
|
53
62
|
|
63
|
+
function recurseCallExpressions(statement, callList) {
|
64
|
+
// Recursively finds all CallExpressions in the syntax tree
|
65
|
+
if (statement.type === 'CallExpression') callList.push(statement);
|
66
|
+
const keys = Object.keys(statement);
|
67
|
+
for (let k = 0; k < keys.length; k += 1) {
|
68
|
+
if (typeof statement[keys[k]] === 'object' && statement[keys[k]] !== null) {
|
69
|
+
recurseCallExpressions(statement[keys[k]], callList);
|
70
|
+
}
|
71
|
+
}
|
72
|
+
}
|
73
|
+
|
54
74
|
function readFileUsingLib(filename, descriptionObj, workflowObj, functionList) {
|
55
75
|
// read the file
|
56
76
|
const aFile = fs.readFileSync(filename, 'utf8');
|
57
77
|
// parsing the file to get the function and class declarations.
|
58
|
-
const aFileFuncArgs =
|
78
|
+
const aFileFuncArgs = acorn.parse(aFile, { ecmaVersion: 2020 });
|
59
79
|
|
60
80
|
// Looping through all the declarations parsed:
|
61
81
|
aFileFuncArgs.body.forEach((e) => {
|
@@ -76,25 +96,40 @@ function readFileUsingLib(filename, descriptionObj, workflowObj, functionList) {
|
|
76
96
|
});
|
77
97
|
|
78
98
|
// Getting the entity for the method:
|
79
|
-
|
80
|
-
method.value.body.body.forEach((
|
81
|
-
|
82
|
-
entity = statementType.block.body[0].argument.arguments[0].value;
|
83
|
-
}
|
99
|
+
const callList = [];
|
100
|
+
method.value.body.body.forEach((statement) => {
|
101
|
+
recurseCallExpressions(statement, callList);
|
84
102
|
});
|
85
|
-
const
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
103
|
+
const requests = [];
|
104
|
+
for (let i = 0; i < callList.length; i += 1) {
|
105
|
+
if (callList[i].callee.property && callList[i].callee.property.name === 'identifyRequest') {
|
106
|
+
requests.push(callList[i]);
|
107
|
+
}
|
108
|
+
}
|
109
|
+
if (requests.length > 0) {
|
110
|
+
const expr = requests[0];
|
111
|
+
if (expr.arguments.length < 2) {
|
112
|
+
throw new Error(`Bad inputs in method ${funcName}`);
|
113
|
+
}
|
114
|
+
const entity = expr.arguments[0].value;
|
115
|
+
const actionName = expr.arguments[1].value;
|
116
|
+
if (expr !== undefined && (expr.arguments[0].type !== 'Literal' || expr.arguments[1].type !== 'Literal')) {
|
117
|
+
throw new Error(`Bad inputs in method ${funcName}`);
|
118
|
+
}
|
119
|
+
const entityPath = getPathFromEntity(entity, actionName);
|
120
|
+
|
121
|
+
// Creating and storing the object for the method.
|
122
|
+
if (entityPath !== undefined) {
|
123
|
+
functionList.push(
|
124
|
+
createObjectForFunction(
|
125
|
+
funcName,
|
126
|
+
funcArgs,
|
127
|
+
entityPath,
|
128
|
+
descriptionObj[funcName],
|
129
|
+
workflowObj[funcName]
|
130
|
+
)
|
131
|
+
);
|
132
|
+
}
|
98
133
|
}
|
99
134
|
});
|
100
135
|
}
|
@@ -109,7 +144,7 @@ function readJSONFile(filename, descriptionObj, workflowObj) {
|
|
109
144
|
methodArray.forEach((methodName) => {
|
110
145
|
// Getting the method description and workflow:
|
111
146
|
const funcName = methodName.name;
|
112
|
-
descriptionObj[funcName] = methodName.description;
|
147
|
+
descriptionObj[funcName] = methodName.summary ? methodName.summary : methodName.description;
|
113
148
|
workflowObj[funcName] = methodName.task ? 'Yes' : 'No';
|
114
149
|
});
|
115
150
|
}
|
package/utils/modify.js
CHANGED
@@ -1,7 +1,7 @@
|
|
1
|
+
const { execSync } = require('child_process');
|
1
2
|
const fs = require('fs-extra');
|
2
3
|
const Ajv = require('ajv');
|
3
4
|
const rls = require('readline-sync');
|
4
|
-
const { execSync } = require('child_process');
|
5
5
|
const { existsSync } = require('fs-extra');
|
6
6
|
const { getAdapterConfig } = require('./tbUtils');
|
7
7
|
const { name } = require('../package.json');
|
@@ -21,9 +21,7 @@ async function updateServiceItem() {
|
|
21
21
|
const validate = ajv.compile(propertiesSchema);
|
22
22
|
validate(currentProps);
|
23
23
|
console.log('Updating Properties...');
|
24
|
-
await database.collection('service_configs').updateOne(
|
25
|
-
{ model: name }, { $set: serviceItem }
|
26
|
-
);
|
24
|
+
await database.collection('service_configs').updateOne({ model: name }, { $set: serviceItem });
|
27
25
|
console.log('Properties Updated');
|
28
26
|
}
|
29
27
|
|
@@ -35,7 +33,7 @@ async function updateServiceItem() {
|
|
35
33
|
function backup() {
|
36
34
|
// zip all files except node_modules and package-lock
|
37
35
|
const backupCmd = 'zip -r previousVersion.zip .';
|
38
|
-
execSync(backupCmd, { encoding: 'utf-8' });
|
36
|
+
execSync(backupCmd, { encoding: 'utf-8', maxBuffer: 1024 * 1024 * 2 });
|
39
37
|
}
|
40
38
|
|
41
39
|
/**
|
@@ -53,9 +51,9 @@ function archiveMod(modType) {
|
|
53
51
|
const archiveName = `${modType}-${now.toISOString()}`;
|
54
52
|
execSync(`mkdir adapter_modifications/archive/${archiveName}`);
|
55
53
|
const archiveCmd = 'mv adapter_modifications/archive .'
|
56
|
-
|
57
|
-
|
58
|
-
|
54
|
+
+ ` && mv adapter_modifications/* archive/${archiveName}`
|
55
|
+
+ ' && mv archive adapter_modifications'
|
56
|
+
+ ` && rm ${zipFile}`;
|
59
57
|
execSync(archiveCmd, { encoding: 'utf-8' });
|
60
58
|
}
|
61
59
|
|
@@ -73,7 +71,7 @@ function revertMod() {
|
|
73
71
|
}
|
74
72
|
});
|
75
73
|
// // unzip previousVersion, reinstall dependencies and delete zipfile
|
76
|
-
execSync('unzip -o previousVersion.zip && rm -rf node_modules && rm package-lock.json && npm install');
|
74
|
+
execSync('unzip -o previousVersion.zip && rm -rf node_modules && rm package-lock.json && npm install', { maxBuffer: 1024 * 1024 * 2 });
|
77
75
|
execSync('rm previousVersion.zip');
|
78
76
|
console.log('Changes have been reverted');
|
79
77
|
}
|
@@ -90,12 +88,12 @@ if (flags === '-m') {
|
|
90
88
|
backup();
|
91
89
|
console.log('Migrating adapter and running tests...');
|
92
90
|
const migrateCmd = 'unzip -o migrationPackage.zip'
|
93
|
-
|
94
|
-
|
91
|
+
+ ' && cd adapter_modifications'
|
92
|
+
+ ' && node migrate';
|
95
93
|
const migrateOutput = execSync(migrateCmd, { encoding: 'utf-8' });
|
96
94
|
console.log(migrateOutput);
|
97
95
|
if (migrateOutput.indexOf('Lint exited with code 1') >= 0
|
98
|
-
|
96
|
+
|| migrateOutput.indexOf('Tests exited with code 1') >= 0) {
|
99
97
|
if (rls.keyInYN('Adapter failed tests or lint after migrating. Would you like to revert the changes?')) {
|
100
98
|
console.log('Reverting changes...');
|
101
99
|
revertMod();
|
@@ -125,12 +123,12 @@ if (flags === '-u') {
|
|
125
123
|
// Backup current adapter
|
126
124
|
backup();
|
127
125
|
const updateCmd = 'unzip -o updatePackage.zip'
|
128
|
-
|
129
|
-
|
126
|
+
+ ' && cd adapter_modifications'
|
127
|
+
+ ' && node update.js updateFiles';
|
130
128
|
execSync(updateCmd, { encoding: 'utf-8' });
|
131
129
|
const updateOutput = execSync(updateCmd, { encoding: 'utf-8' });
|
132
130
|
if (updateOutput.indexOf('Lint exited with code 1') >= 0
|
133
|
-
|
131
|
+
|| updateOutput.indexOf('Tests exited with code 1') >= 0) {
|
134
132
|
if (rls.keyInYN('Adapter failed tests or lint after updating. Would you like to revert the changes?')) {
|
135
133
|
console.log('Reverting changes...');
|
136
134
|
revertMod();
|
@@ -1,9 +1,9 @@
|
|
1
1
|
#!/usr/bin/env node
|
2
2
|
/* @copyright Itential, LLC 2019 */
|
3
3
|
|
4
|
-
const fs = require('fs-extra');
|
5
4
|
const path = require('path');
|
6
5
|
const { spawnSync } = require('child_process');
|
6
|
+
const fs = require('fs-extra');
|
7
7
|
const { createBundle } = require('./artifactize');
|
8
8
|
|
9
9
|
const nodeEntryPath = path.resolve('.');
|
@@ -0,0 +1,309 @@
|
|
1
|
+
/* eslint-disable */
|
2
|
+
const fs = require('fs');
|
3
|
+
|
4
|
+
const blacklistTasks = [
|
5
|
+
'genericAdapterRequest',
|
6
|
+
'genericAdapterRequestNoBasePath',
|
7
|
+
'hasEntities',
|
8
|
+
'healthcheck'
|
9
|
+
];
|
10
|
+
|
11
|
+
const adapterBaseTasks = [
|
12
|
+
'getDevicesFiltered',
|
13
|
+
'isAlive',
|
14
|
+
'getConfig',
|
15
|
+
'getDevice',
|
16
|
+
'iapUpdateAdapterConfiguration',
|
17
|
+
'iapFindAdapterPath',
|
18
|
+
'iapSuspendAdapter',
|
19
|
+
'iapUnsuspendAdapter',
|
20
|
+
'iapGetAdapterQueue',
|
21
|
+
'iapTroubleshootAdapter',
|
22
|
+
'iapRunAdapterHealthcheck',
|
23
|
+
'iapRunAdapterConnectivity',
|
24
|
+
'iapRunAdapterBasicGet',
|
25
|
+
'iapMoveAdapterEntitiesToDB',
|
26
|
+
'getDevice',
|
27
|
+
'getDevicesFiltered',
|
28
|
+
'isAlive',
|
29
|
+
'getConfig',
|
30
|
+
'iapGetDeviceCount',
|
31
|
+
'iapRunAdapterLint',
|
32
|
+
'iapRunAdapterTests',
|
33
|
+
'iapGetAdapterInventory'
|
34
|
+
];
|
35
|
+
|
36
|
+
function updatePronghorn(tasks, original, updated) {
|
37
|
+
const originalFile = require(original);
|
38
|
+
const unusedMethods = [];
|
39
|
+
const usedMethods = originalFile.methods.filter((method) => {
|
40
|
+
if (tasks.includes(method.name)) {
|
41
|
+
unusedMethods.push(method);
|
42
|
+
return false;
|
43
|
+
}
|
44
|
+
return true;
|
45
|
+
});
|
46
|
+
//write used and unused to new files
|
47
|
+
let updatedFile;
|
48
|
+
if (!fs.existsSync(updated)) {
|
49
|
+
updatedFile = { ...originalFile, methods: [], src: 'adapter-inactive.js' };
|
50
|
+
} else {
|
51
|
+
updatedFile = require(updated);
|
52
|
+
}
|
53
|
+
updatedFile.methods = updatedFile.methods.concat(unusedMethods);
|
54
|
+
originalFile.methods = usedMethods;
|
55
|
+
fs.writeFileSync(updated, JSON.stringify(updatedFile, null, 2));
|
56
|
+
fs.writeFileSync(original, JSON.stringify(originalFile, null, 2));
|
57
|
+
return 'Done';
|
58
|
+
}
|
59
|
+
|
60
|
+
function flipTaskFlag(task, pronghornPath, value)
|
61
|
+
{
|
62
|
+
const pronghorn = require(pronghornPath);
|
63
|
+
const index = pronghorn.methods.findIndex((method) => method.name === task);
|
64
|
+
pronghorn.methods[index] = { ...pronghorn.methods[index], task: value };
|
65
|
+
fs.writeFileSync(pronghornPath, JSON.stringify(pronghorn, null, 2));
|
66
|
+
}
|
67
|
+
|
68
|
+
//Return array of relevant paths given adapter directory
|
69
|
+
function createPaths(currentAdapter) {
|
70
|
+
const paths = [];
|
71
|
+
const filePaths = [
|
72
|
+
'adapter.js',
|
73
|
+
'pronghorn.json',
|
74
|
+
'test/integration/adapterTestIntegration.js',
|
75
|
+
'test/unit/adapterTestUnit.js',
|
76
|
+
'adapter-inactive.js',
|
77
|
+
'pronghorn-inactive.json',
|
78
|
+
];
|
79
|
+
filePaths.forEach((file) => {
|
80
|
+
paths.push(`${currentAdapter}/${file}`);
|
81
|
+
});
|
82
|
+
return paths;
|
83
|
+
}
|
84
|
+
|
85
|
+
function insert(str, index, value) {
|
86
|
+
return str.substr(0, index) + value + str.substr(index);
|
87
|
+
}
|
88
|
+
|
89
|
+
//modify adapter js
|
90
|
+
//original - path to file containing tasks we want to remove
|
91
|
+
// updated - path to file we want to move the tasks to
|
92
|
+
function updateAdapterJs(tasks, original, updated, adapterDir) {
|
93
|
+
if (!fs.existsSync(original)) {
|
94
|
+
//could do this or just let the error ocurr lower down and catch in warpper
|
95
|
+
throw new Error(`Original file ${original} does not exist.`);
|
96
|
+
}
|
97
|
+
let originalFile = fs.readFileSync(original, 'utf8');
|
98
|
+
let updatedFile;
|
99
|
+
if (!fs.existsSync(updated)) {
|
100
|
+
const adapterExport = require(`${adapterDir}/pronghorn.json`).export;
|
101
|
+
updatedFile = `/* @copyright Itential, LLC 2019 */\n\n/* eslint import/no-dynamic-require: warn */\n/* eslint no-unused-vars: warn */\n/* global log */\n\nconst path = require('path');\n\nconst AdapterBaseCl = require(path.join(__dirname, 'adapterBase.js'));\n\nclass ${adapterExport}Inactive extends AdapterBaseCl {}\n`;
|
102
|
+
//To do handles backup files where og doesn't exist
|
103
|
+
} else {
|
104
|
+
updatedFile = fs.readFileSync(updated, 'utf8');
|
105
|
+
}
|
106
|
+
|
107
|
+
tasks.forEach((method) => {
|
108
|
+
//accounting for different js docs format
|
109
|
+
const comment = originalFile.indexOf(`* @function ${method}`);
|
110
|
+
const start = originalFile.slice(0, comment).lastIndexOf('/**');
|
111
|
+
if (start !== -1) {
|
112
|
+
//next comment block
|
113
|
+
const end = originalFile.indexOf('/**\n', start + 1);
|
114
|
+
let func = end === -1
|
115
|
+
? originalFile.substring(start - 3, originalFile.lastIndexOf('}'))
|
116
|
+
: originalFile.substring(start, end);
|
117
|
+
originalFile = originalFile.replace(func, '');
|
118
|
+
func = '\n ' + func.trim() + '\n';
|
119
|
+
updatedFile = insert(updatedFile, updatedFile.lastIndexOf('}'), func);
|
120
|
+
} else {
|
121
|
+
console.log(`Task ${method} wasn't found in original file. Skipping.`);
|
122
|
+
}
|
123
|
+
});
|
124
|
+
fs.writeFileSync(original, originalFile, 'utf8');
|
125
|
+
fs.writeFileSync(updated, updatedFile, 'utf8');
|
126
|
+
return 'done';
|
127
|
+
}
|
128
|
+
|
129
|
+
//Update test file for when we deactivate a task
|
130
|
+
function deactivateTest(adapterPath, testPath, tasks) {
|
131
|
+
let unitTest = fs.readFileSync(`${adapterPath}/${testPath}`, 'utf8');
|
132
|
+
tasks.forEach((task) => {
|
133
|
+
const searchStr = `describe('#${task}`;
|
134
|
+
unitTest = unitTest.replace(searchStr, `describe.skip('#${task}`);
|
135
|
+
});
|
136
|
+
fs.writeFileSync(`${adapterPath}/${testPath}`, unitTest, 'utf8');
|
137
|
+
}
|
138
|
+
|
139
|
+
//Update test file when we activate tasks
|
140
|
+
function activateTest(adapterPath, testPath, tasks) {
|
141
|
+
let unitTest = fs.readFileSync(`${adapterPath}/${testPath}`, 'utf8');
|
142
|
+
//tasks ==> toMove
|
143
|
+
tasks.forEach((task) => {
|
144
|
+
const searchStr = `describe.skip('#${task}`;
|
145
|
+
unitTest = unitTest.replace(searchStr, `describe('#${task}`);
|
146
|
+
});
|
147
|
+
fs.writeFileSync(`${adapterPath}/${testPath}`, unitTest, 'utf8');
|
148
|
+
}
|
149
|
+
|
150
|
+
//backups are not actually being written back
|
151
|
+
function rollbackChanges(adapterPath) {
|
152
|
+
const backups = fs.readdirSync(`${adapterPath}/temp`); //this is an array of file names not the full path
|
153
|
+
const filePaths = createPaths(adapterPath);
|
154
|
+
for (let i = 0; i < backups.length; i++) {
|
155
|
+
const file = fs.readFileSync(`${adapterPath}/temp/${backups[i]}`, 'utf8'); //make sure this is getting the file
|
156
|
+
const currentFile = filePaths.find((path) => {
|
157
|
+
const index = path.split('/').length - 1;
|
158
|
+
const fileName = path.split('/')[index];
|
159
|
+
return fileName === backups[i].replace('temp-', '');
|
160
|
+
}); //returns undefined if no match
|
161
|
+
|
162
|
+
if (currentFile) {
|
163
|
+
fs.writeFileSync(currentFile, file, 'utf8');
|
164
|
+
}
|
165
|
+
}
|
166
|
+
//inactive didn't exist before script
|
167
|
+
if (!backups.includes('temp-adapter-inactive.js')) {
|
168
|
+
fs.unlinkSync(`${adapterPath}/pronghorn-inactive.json`);
|
169
|
+
fs.unlinkSync(`${adapterPath}/adapter-inactive.js`);
|
170
|
+
}
|
171
|
+
deleteBackups(adapterPath);
|
172
|
+
}
|
173
|
+
|
174
|
+
function deleteBackups(adapterPath) {
|
175
|
+
fs.rmSync(`${adapterPath}/temp`, { recursive: true });
|
176
|
+
}
|
177
|
+
|
178
|
+
function activateTasks(adapterDir, tasks) {
|
179
|
+
const toDelete = [];
|
180
|
+
const backupFiles = [];
|
181
|
+
const filePaths = createPaths(adapterDir);
|
182
|
+
try {
|
183
|
+
//take backup of each file here
|
184
|
+
if (!fs.existsSync(`${adapterDir}/temp`)) {
|
185
|
+
fs.mkdirSync(`${adapterDir}/temp`);
|
186
|
+
}
|
187
|
+
filePaths.forEach((filePath) => {
|
188
|
+
if (fs.existsSync(filePath)) {
|
189
|
+
const index = filePath.split('/').length - 1;
|
190
|
+
const backupName = `temp-${filePath.split('/')[index]}`;
|
191
|
+
backupFiles.push(`${adapterDir}/temp/${backupName}`);
|
192
|
+
fs.copyFileSync(filePath, `${adapterDir}/temp/${backupName}`);
|
193
|
+
} else {
|
194
|
+
//File doesn't exist before script
|
195
|
+
toDelete.push(filePath);
|
196
|
+
}
|
197
|
+
});
|
198
|
+
tasks = tasks.filter((task) => {
|
199
|
+
if (adapterBaseTasks.includes(task)) {
|
200
|
+
flipTaskFlag(task, `${adapterDir}/pronghorn.json`, true);
|
201
|
+
return false;
|
202
|
+
} else {
|
203
|
+
return true;
|
204
|
+
}
|
205
|
+
});
|
206
|
+
updateAdapterJs(
|
207
|
+
tasks,
|
208
|
+
`${adapterDir}/adapter-inactive.js`,
|
209
|
+
`${adapterDir}/adapter.js`,
|
210
|
+
adapterDir
|
211
|
+
);
|
212
|
+
updatePronghorn(
|
213
|
+
tasks,
|
214
|
+
`${adapterDir}/pronghorn-inactive.json`,
|
215
|
+
`${adapterDir}/pronghorn.json`
|
216
|
+
);
|
217
|
+
activateTest(
|
218
|
+
adapterDir,
|
219
|
+
'/test/integration/adapterTestIntegration.js',
|
220
|
+
tasks
|
221
|
+
);
|
222
|
+
activateTest(adapterDir, '/test/unit/adapterTestUnit.js', tasks);
|
223
|
+
return 'success';
|
224
|
+
} catch (e) {
|
225
|
+
console.log(`Error: ${e} ocurred during execution. Rolling back changes.`);
|
226
|
+
for (let i = 0; i < backupFiles.length; i++) {
|
227
|
+
const file = fs.readFileSync(backupFiles[i], 'utf8');
|
228
|
+
fs.writeFileSync(filePaths[i], file, 'utf8');
|
229
|
+
}
|
230
|
+
toDelete.forEach((filePath) => {
|
231
|
+
if (fs.existsSync(filePath)) {
|
232
|
+
fs.unlinkSync(filePath);
|
233
|
+
}
|
234
|
+
});
|
235
|
+
deleteBackups(adapterDir);
|
236
|
+
process.exit(1);
|
237
|
+
}
|
238
|
+
}
|
239
|
+
|
240
|
+
//moving from adapter.js to adapter-inactive.js
|
241
|
+
function deactivateTasks(adapterDir, tasks) {
|
242
|
+
const toDelete = [];
|
243
|
+
const backupFiles = [];
|
244
|
+
const filePaths = createPaths(adapterDir);
|
245
|
+
try {
|
246
|
+
//take backup of each file here
|
247
|
+
if (!fs.existsSync(`${adapterDir}/temp`)) {
|
248
|
+
fs.mkdirSync(`${adapterDir}/temp`);
|
249
|
+
}
|
250
|
+
filePaths.forEach((filePath) => {
|
251
|
+
if (fs.existsSync(filePath)) {
|
252
|
+
const index = filePath.split('/').length - 1;
|
253
|
+
const backupName = `temp-${filePath.split('/')[index]}`;
|
254
|
+
backupFiles.push(`${adapterDir}/temp/${backupName}`);
|
255
|
+
fs.copyFileSync(filePath, `${adapterDir}/temp/${backupName}`);
|
256
|
+
} else {
|
257
|
+
//File doesn't exist before script
|
258
|
+
toDelete.push(filePath);
|
259
|
+
}
|
260
|
+
});
|
261
|
+
//filter tasks for blacklisted tasks or IAP tasks
|
262
|
+
tasks = tasks.filter((task) => {
|
263
|
+
if (blacklistTasks.includes(task)) {
|
264
|
+
console.log(`${task} cannot be deactivated.`);
|
265
|
+
return false;
|
266
|
+
} else if (adapterBaseTasks.includes(task)) {
|
267
|
+
flipTaskFlag(task, `${adapterDir}/pronghorn.json`, false);
|
268
|
+
return false;
|
269
|
+
} else {
|
270
|
+
return true;
|
271
|
+
}
|
272
|
+
});
|
273
|
+
updateAdapterJs(
|
274
|
+
tasks,
|
275
|
+
`${adapterDir}/adapter.js`,
|
276
|
+
`${adapterDir}/adapter-inactive.js`,
|
277
|
+
adapterDir
|
278
|
+
);
|
279
|
+
updatePronghorn(
|
280
|
+
tasks,
|
281
|
+
`${adapterDir}/pronghorn.json`,
|
282
|
+
`${adapterDir}/pronghorn-inactive.json`
|
283
|
+
);
|
284
|
+
deactivateTest(
|
285
|
+
adapterDir,
|
286
|
+
'/test/integration/adapterTestIntegration.js',
|
287
|
+
tasks
|
288
|
+
);
|
289
|
+
deactivateTest(adapterDir, '/test/unit/adapterTestUnit.js', tasks);
|
290
|
+
return 'success';
|
291
|
+
} catch (e) {
|
292
|
+
console.log(`Error: ${e} ocurred during execution. Rolling back changes.`);
|
293
|
+
for (let i = 0; i < backupFiles.length; i++) {
|
294
|
+
const file = fs.readFileSync(backupFiles[i], 'utf8');
|
295
|
+
fs.writeFileSync(filePaths[i], file, 'utf8');
|
296
|
+
}
|
297
|
+
toDelete.forEach((filePath) => {
|
298
|
+
if (fs.existsSync(filePath)) {
|
299
|
+
fs.unlinkSync(filePath);
|
300
|
+
}
|
301
|
+
});
|
302
|
+
deleteBackups(adapterDir);
|
303
|
+
process.exit(1);
|
304
|
+
}
|
305
|
+
}
|
306
|
+
|
307
|
+
module.exports = {
|
308
|
+
activateTasks, deactivateTasks, rollbackChanges, deleteBackups
|
309
|
+
};
|
package/utils/tbScript.js
CHANGED
@@ -143,9 +143,7 @@ const executeUnderIAPInstallationDirectory = async (command) => {
|
|
143
143
|
const serviceIndex = profileItem.services.indexOf(serviceItem.name);
|
144
144
|
profileItem.services.splice(serviceIndex, 1);
|
145
145
|
const update = { $set: { services: profileItem.services } };
|
146
|
-
await database.collection(utils.IAP_PROFILES_COLLECTION).updateOne(
|
147
|
-
{ id: pronghornProps.id }, update
|
148
|
-
);
|
146
|
+
await database.collection(utils.IAP_PROFILES_COLLECTION).updateOne({ id: pronghornProps.id }, update);
|
149
147
|
console.log(`${serviceItem.name} removed from profileItem.services.`);
|
150
148
|
console.log(`Rerun the script to reinstall ${serviceItem.name}.`);
|
151
149
|
process.exit(0);
|
@@ -162,18 +160,13 @@ const executeUnderIAPInstallationDirectory = async (command) => {
|
|
162
160
|
utils.runTest();
|
163
161
|
if (rls.keyInYN(`Do you want to install ${name} to IAP?`)) {
|
164
162
|
console.log('Creating database entries...');
|
165
|
-
const adapter = utils.createAdapter(
|
166
|
-
pronghornProps, profileItem, sampleProperties, adapterPronghorn
|
167
|
-
);
|
168
|
-
|
163
|
+
const adapter = utils.createAdapter(pronghornProps, profileItem, sampleProperties, adapterPronghorn);
|
169
164
|
adapter.properties.properties = await addAuthInfo(adapter.properties.properties);
|
170
165
|
|
171
166
|
await database.collection(utils.SERVICE_CONFIGS_COLLECTION).insertOne(adapter);
|
172
167
|
profileItem.services.push(adapter.name);
|
173
168
|
const update = { $set: { services: profileItem.services } };
|
174
|
-
await database.collection(utils.IAP_PROFILES_COLLECTION).updateOne(
|
175
|
-
{ id: pronghornProps.id }, update
|
176
|
-
);
|
169
|
+
await database.collection(utils.IAP_PROFILES_COLLECTION).updateOne({ id: pronghornProps.id }, update);
|
177
170
|
console.log('Database entry creation complete.');
|
178
171
|
}
|
179
172
|
console.log('Exiting...');
|
package/utils/tbUtils.js
CHANGED
@@ -6,8 +6,8 @@
|
|
6
6
|
/* eslint-disable no-console */
|
7
7
|
|
8
8
|
const path = require('path');
|
9
|
-
const fs = require('fs-extra');
|
10
9
|
const cp = require('child_process');
|
10
|
+
const fs = require('fs-extra');
|
11
11
|
|
12
12
|
module.exports = {
|
13
13
|
SERVICE_CONFIGS_COLLECTION: 'service_configs',
|
@@ -158,8 +158,7 @@ module.exports = {
|
|
158
158
|
* @param {Object} healthcheck - {Object} healthcheck - ./entities/.system/action.json object
|
159
159
|
*/
|
160
160
|
getHealthCheckEndpoint: (healthcheck) => {
|
161
|
-
const endpoint = healthcheck.actions[1].entitypath.slice(21,
|
162
|
-
healthcheck.actions[1].entitypath.length - 8);
|
161
|
+
const endpoint = healthcheck.actions[1].entitypath.slice(21, healthcheck.actions[1].entitypath.length - 8);
|
163
162
|
return { healthCheckEndpoint: endpoint };
|
164
163
|
},
|
165
164
|
|
package/utils/testRunner.js
CHANGED
@@ -1,9 +1,9 @@
|
|
1
1
|
#!/usr/bin/env node
|
2
2
|
/* @copyright Itential, LLC 2019 */
|
3
3
|
|
4
|
+
const execute = require('child_process').exec;
|
4
5
|
const fs = require('fs-extra');
|
5
6
|
const rl = require('readline-sync');
|
6
|
-
const execute = require('child_process').exec;
|
7
7
|
|
8
8
|
/**
|
9
9
|
* This script will determine the type of integration test to run
|
@@ -160,9 +160,7 @@ const troubleshoot = async (props, scriptFlag, persistFlag, adapter) => {
|
|
160
160
|
if (persistFlag && healthRes) {
|
161
161
|
const { database } = await utils.getIAPDatabaseConnection();
|
162
162
|
const update = { $set: { properties: updatedAdapter.properties } };
|
163
|
-
await database.collection(utils.SERVICE_CONFIGS_COLLECTION).updateOne(
|
164
|
-
{ model: name }, update
|
165
|
-
);
|
163
|
+
await database.collection(utils.SERVICE_CONFIGS_COLLECTION).updateOne({ model: name }, update);
|
166
164
|
if (scriptFlag) {
|
167
165
|
console.log(`${name} updated.`);
|
168
166
|
}
|
package/workflows/README.md
DELETED