@itentialopensource/adapter-meraki 1.0.4 → 1.2.1
Sign up to get free protection for your applications and to get access to all the features.
- package/AUTH.md +14 -18
- package/CALLS.md +3580 -22
- package/CHANGELOG.md +24 -0
- package/CONTRIBUTING.md +1 -160
- package/ENHANCE.md +2 -2
- package/README.md +32 -23
- package/SYSTEMINFO.md +15 -2
- package/adapter.js +164 -335
- package/adapterBase.js +494 -893
- package/changelogs/changelog.md +198 -0
- package/metadata.json +75 -0
- package/package.json +24 -24
- package/pronghorn.json +470 -138
- package/propertiesSchema.json +422 -31
- package/refs?service=git-upload-pack +0 -0
- package/report/adapter-openapi.json +5460 -0
- package/report/adapter-openapi.yaml +3774 -0
- package/report/adapterInfo.json +8 -8
- package/report/updateReport1690417926405.json +119 -0
- package/report/updateReport1692203092612.json +120 -0
- package/report/updateReport1694439659179.json +120 -0
- package/sampleProperties.json +86 -27
- package/test/integration/adapterTestBasicGet.js +1 -3
- package/test/integration/adapterTestConnectivity.js +90 -41
- package/test/integration/adapterTestIntegration.js +129 -1
- package/test/unit/adapterBaseTestUnit.js +387 -312
- package/test/unit/adapterTestUnit.js +336 -110
- package/utils/entitiesToDB.js +2 -2
- package/utils/methodDocumentor.js +260 -0
- package/utils/modify.js +12 -14
- package/utils/packModificationScript.js +1 -1
- package/utils/pre-commit.sh +2 -0
- package/utils/taskMover.js +309 -0
- package/utils/tbScript.js +89 -34
- package/utils/tbUtils.js +41 -21
- package/utils/troubleshootingAdapter.js +9 -6
- package/versions.json +0 -542
- package/workflows/README.md +0 -3
@@ -0,0 +1,260 @@
|
|
1
|
+
/* eslint global-require:warn */
|
2
|
+
/* eslint import/no-dynamic-require:warn */
|
3
|
+
/* eslint no-param-reassign:warn */
|
4
|
+
|
5
|
+
const fs = require('fs-extra');
|
6
|
+
const acorn = require('acorn');
|
7
|
+
|
8
|
+
// Getting the base directory:
|
9
|
+
let adaptdir = __dirname;
|
10
|
+
if (adaptdir.endsWith('/utils')) {
|
11
|
+
adaptdir = adaptdir.substring(0, adaptdir.length - 6);
|
12
|
+
}
|
13
|
+
|
14
|
+
function createObjectForFunction(
|
15
|
+
funcName,
|
16
|
+
funcArgs,
|
17
|
+
entityPath,
|
18
|
+
description,
|
19
|
+
workflow
|
20
|
+
) {
|
21
|
+
const funcObject = {};
|
22
|
+
// if the entity path is not set, then the object is not created.
|
23
|
+
if (entityPath !== undefined) {
|
24
|
+
funcObject.method_signature = `${funcName}(${funcArgs.join(', ')})`;
|
25
|
+
funcObject.path = entityPath;
|
26
|
+
if (description === undefined) {
|
27
|
+
funcObject.description = '';
|
28
|
+
funcObject.workflow = 'No';
|
29
|
+
} else {
|
30
|
+
funcObject.description = description;
|
31
|
+
funcObject.workflow = workflow;
|
32
|
+
}
|
33
|
+
}
|
34
|
+
return funcObject;
|
35
|
+
}
|
36
|
+
|
37
|
+
function getPathFromEntity(entity, funcName) {
|
38
|
+
let epath;
|
39
|
+
if (entity === undefined || entity === '.generic') {
|
40
|
+
epath = undefined;
|
41
|
+
} else {
|
42
|
+
// Access the action.js file for the certain entity to get the path
|
43
|
+
const entityPath = `${adaptdir}/entities/${entity}/action.json`;
|
44
|
+
const actionJSON = require(entityPath);
|
45
|
+
actionJSON.actions.forEach((action) => {
|
46
|
+
if (action.name === funcName) {
|
47
|
+
if (typeof action.entitypath === 'object') {
|
48
|
+
epath = '';
|
49
|
+
const keys = Object.keys(action.entitypath);
|
50
|
+
for (let k = 0; k < keys.length; k += 1) {
|
51
|
+
epath += `${keys[k]}:${action.entitypath[keys[k]]} <br /> `;
|
52
|
+
}
|
53
|
+
epath = epath.substring(0, epath.length - 8);
|
54
|
+
} else {
|
55
|
+
epath = action.entitypath;
|
56
|
+
}
|
57
|
+
}
|
58
|
+
});
|
59
|
+
}
|
60
|
+
return epath;
|
61
|
+
}
|
62
|
+
|
63
|
+
function recurseCallExpressions(statement, callList) {
|
64
|
+
// Recursively finds all CallExpressions in the syntax tree
|
65
|
+
if (statement.type === 'CallExpression') callList.push(statement);
|
66
|
+
const keys = Object.keys(statement);
|
67
|
+
for (let k = 0; k < keys.length; k += 1) {
|
68
|
+
if (typeof statement[keys[k]] === 'object' && statement[keys[k]] !== null) {
|
69
|
+
recurseCallExpressions(statement[keys[k]], callList);
|
70
|
+
}
|
71
|
+
}
|
72
|
+
}
|
73
|
+
|
74
|
+
function readFileUsingLib(filename, descriptionObj, workflowObj, functionList) {
|
75
|
+
// read the file
|
76
|
+
const aFile = fs.readFileSync(filename, 'utf8');
|
77
|
+
// parsing the file to get the function and class declarations.
|
78
|
+
const aFileFuncArgs = acorn.parse(aFile, { ecmaVersion: 2020 });
|
79
|
+
|
80
|
+
// Looping through all the declarations parsed:
|
81
|
+
aFileFuncArgs.body.forEach((e) => {
|
82
|
+
// Getting only the class declaration as it has our required functions.
|
83
|
+
if (e.type === 'ClassDeclaration') {
|
84
|
+
const methodDefinition = e.body;
|
85
|
+
methodDefinition.body.forEach((method) => {
|
86
|
+
// Getting method name and its params in the class.
|
87
|
+
const funcName = method.key.name;
|
88
|
+
const funcArgs = [];
|
89
|
+
method.value.params.forEach((param) => {
|
90
|
+
if (param.type === 'Identifier') {
|
91
|
+
funcArgs.push(param.name);
|
92
|
+
} else {
|
93
|
+
const args = `${param.left.name} = ${param.right.value}`;
|
94
|
+
funcArgs.push(args);
|
95
|
+
}
|
96
|
+
});
|
97
|
+
|
98
|
+
// Getting the entity for the method:
|
99
|
+
const callList = [];
|
100
|
+
method.value.body.body.forEach((statement) => {
|
101
|
+
recurseCallExpressions(statement, callList);
|
102
|
+
});
|
103
|
+
const requests = [];
|
104
|
+
for (let i = 0; i < callList.length; i += 1) {
|
105
|
+
if (callList[i].callee.property && callList[i].callee.property.name === 'identifyRequest') {
|
106
|
+
requests.push(callList[i]);
|
107
|
+
}
|
108
|
+
}
|
109
|
+
if (requests.length > 0) {
|
110
|
+
const expr = requests[0];
|
111
|
+
if (expr.arguments.length < 2) {
|
112
|
+
throw new Error(`Bad inputs in method ${funcName}`);
|
113
|
+
}
|
114
|
+
const entity = expr.arguments[0].value;
|
115
|
+
const actionName = expr.arguments[1].value;
|
116
|
+
if (expr !== undefined && (expr.arguments[0].type !== 'Literal' || expr.arguments[1].type !== 'Literal')) {
|
117
|
+
throw new Error(`Bad inputs in method ${funcName}`);
|
118
|
+
}
|
119
|
+
const entityPath = getPathFromEntity(entity, actionName);
|
120
|
+
|
121
|
+
// Creating and storing the object for the method.
|
122
|
+
if (entityPath !== undefined) {
|
123
|
+
functionList.push(
|
124
|
+
createObjectForFunction(
|
125
|
+
funcName,
|
126
|
+
funcArgs,
|
127
|
+
entityPath,
|
128
|
+
descriptionObj[funcName],
|
129
|
+
workflowObj[funcName]
|
130
|
+
)
|
131
|
+
);
|
132
|
+
}
|
133
|
+
}
|
134
|
+
});
|
135
|
+
}
|
136
|
+
});
|
137
|
+
}
|
138
|
+
|
139
|
+
function readJSONFile(filename, descriptionObj, workflowObj) {
|
140
|
+
// Accessing the JSON file.
|
141
|
+
const phJSON = require(filename);
|
142
|
+
// Getting the methods array.
|
143
|
+
const methodArray = phJSON.methods;
|
144
|
+
methodArray.forEach((methodName) => {
|
145
|
+
// Getting the method description and workflow:
|
146
|
+
const funcName = methodName.name;
|
147
|
+
descriptionObj[funcName] = methodName.summary ? methodName.summary : methodName.description;
|
148
|
+
workflowObj[funcName] = methodName.task ? 'Yes' : 'No';
|
149
|
+
});
|
150
|
+
}
|
151
|
+
|
152
|
+
function readMDFile(filename, functionList) {
|
153
|
+
// Reading in the .md file and creating an array with each line as an element.
|
154
|
+
const mdFile = fs.readFileSync(filename, 'utf-8');
|
155
|
+
const fileSplit = mdFile.split('\n');
|
156
|
+
// Storing the data that should added later to the updated data.
|
157
|
+
const linesToAddLater = [];
|
158
|
+
let index = fileSplit.length - 1;
|
159
|
+
|
160
|
+
// Removing all the blank lines at the end of the file.
|
161
|
+
if (fileSplit[index] === '') {
|
162
|
+
while (fileSplit[index] === '') {
|
163
|
+
linesToAddLater.push(fileSplit.pop());
|
164
|
+
index -= 1;
|
165
|
+
}
|
166
|
+
}
|
167
|
+
|
168
|
+
// Checking if the last 2 lines are <br> and </table>. If not, the file is corrupted and the
|
169
|
+
// data at the end of the file should be fixed.
|
170
|
+
if (fileSplit[index] === '<br>' || fileSplit[index - 1] === '</table>') {
|
171
|
+
// Storing <br> and </table> to add later.
|
172
|
+
linesToAddLater.push(fileSplit.pop());
|
173
|
+
linesToAddLater.push(fileSplit.pop());
|
174
|
+
index -= 2;
|
175
|
+
} else {
|
176
|
+
console.log('The file has bad content at the end.');
|
177
|
+
return;
|
178
|
+
}
|
179
|
+
// if (fileSplit[index] !== '<br>' && fileSplit[index - 1] !== '</table>') {
|
180
|
+
// console.log('The file has bad content at the end.');
|
181
|
+
// return;
|
182
|
+
// } else {
|
183
|
+
// // Storing <br> and </table> to add later.
|
184
|
+
// linesToAddLater.push(fileSplit.pop());
|
185
|
+
// linesToAddLater.push(fileSplit.pop());
|
186
|
+
// index -= 2;
|
187
|
+
// }
|
188
|
+
|
189
|
+
// Removing all the lines until the header tags are reached.
|
190
|
+
while (!fileSplit[index].includes('<th')) {
|
191
|
+
fileSplit.pop();
|
192
|
+
index -= 1;
|
193
|
+
}
|
194
|
+
// Adding </tr> for the header row, because it got removed in the above loop.
|
195
|
+
fileSplit.push(' </tr>');
|
196
|
+
|
197
|
+
// Creating the tags for each method to be appended to the file.
|
198
|
+
const tdBeginTag = ' <td style="padding:15px">';
|
199
|
+
const tdEndTag = '</td>';
|
200
|
+
functionList.forEach((func) => {
|
201
|
+
const signCommand = `${tdBeginTag}${func.method_signature}${tdEndTag}`;
|
202
|
+
const descCommand = `${tdBeginTag}${func.description}${tdEndTag}`;
|
203
|
+
const pathCommand = `${tdBeginTag}${func.path}${tdEndTag}`;
|
204
|
+
const workflowCommand = `${tdBeginTag}${func.workflow}${tdEndTag}`;
|
205
|
+
fileSplit.push(' <tr>');
|
206
|
+
fileSplit.push(signCommand);
|
207
|
+
fileSplit.push(descCommand);
|
208
|
+
fileSplit.push(pathCommand);
|
209
|
+
fileSplit.push(workflowCommand);
|
210
|
+
fileSplit.push(' </tr>');
|
211
|
+
});
|
212
|
+
|
213
|
+
// Adding </table> and <br> at the end of the file to complete the table and the file.
|
214
|
+
while (linesToAddLater.length > 0) {
|
215
|
+
fileSplit.push(linesToAddLater.pop());
|
216
|
+
}
|
217
|
+
|
218
|
+
// Writing all the content back into the file.
|
219
|
+
fs.writeFileSync(filename, fileSplit.join('\n'), {
|
220
|
+
encoding: 'utf-8',
|
221
|
+
flag: 'w'
|
222
|
+
});
|
223
|
+
}
|
224
|
+
|
225
|
+
function getFileInfo() {
|
226
|
+
// If files don't exist:
|
227
|
+
if (!fs.existsSync(`${adaptdir}/adapter.js`)) {
|
228
|
+
console.log('Missing - utils/adapter.js');
|
229
|
+
return;
|
230
|
+
}
|
231
|
+
if (!fs.existsSync(`${adaptdir}/pronghorn.json`)) {
|
232
|
+
console.log('Missing - pronghorn.json');
|
233
|
+
return;
|
234
|
+
}
|
235
|
+
if (!fs.existsSync(`${adaptdir}/CALLS.md`)) {
|
236
|
+
console.log('Missing - CALLS.md');
|
237
|
+
return;
|
238
|
+
}
|
239
|
+
|
240
|
+
const descriptionObj = {};
|
241
|
+
const workflowObj = {};
|
242
|
+
|
243
|
+
// Get the method descriptions and the workflow values from pronghorn.json file.
|
244
|
+
readJSONFile(`${adaptdir}/pronghorn.json`, descriptionObj, workflowObj);
|
245
|
+
|
246
|
+
// Get the method signature, entity path and create an object that contains all the info regarding
|
247
|
+
// the method and push it to the functionList array.
|
248
|
+
const functionList = [];
|
249
|
+
readFileUsingLib(
|
250
|
+
`${adaptdir}/adapter.js`,
|
251
|
+
descriptionObj,
|
252
|
+
workflowObj,
|
253
|
+
functionList
|
254
|
+
);
|
255
|
+
|
256
|
+
// createMarkDown(functionList);
|
257
|
+
readMDFile(`${adaptdir}/CALLS.md`, functionList);
|
258
|
+
}
|
259
|
+
|
260
|
+
getFileInfo();
|
package/utils/modify.js
CHANGED
@@ -21,9 +21,7 @@ async function updateServiceItem() {
|
|
21
21
|
const validate = ajv.compile(propertiesSchema);
|
22
22
|
validate(currentProps);
|
23
23
|
console.log('Updating Properties...');
|
24
|
-
await database.collection('service_configs').updateOne(
|
25
|
-
{ model: name }, { $set: serviceItem }
|
26
|
-
);
|
24
|
+
await database.collection('service_configs').updateOne({ model: name }, { $set: serviceItem });
|
27
25
|
console.log('Properties Updated');
|
28
26
|
}
|
29
27
|
|
@@ -35,7 +33,7 @@ async function updateServiceItem() {
|
|
35
33
|
function backup() {
|
36
34
|
// zip all files except node_modules and package-lock
|
37
35
|
const backupCmd = 'zip -r previousVersion.zip .';
|
38
|
-
execSync(backupCmd, { encoding: 'utf-8' });
|
36
|
+
execSync(backupCmd, { encoding: 'utf-8', maxBuffer: 1024 * 1024 * 2 });
|
39
37
|
}
|
40
38
|
|
41
39
|
/**
|
@@ -53,9 +51,9 @@ function archiveMod(modType) {
|
|
53
51
|
const archiveName = `${modType}-${now.toISOString()}`;
|
54
52
|
execSync(`mkdir adapter_modifications/archive/${archiveName}`);
|
55
53
|
const archiveCmd = 'mv adapter_modifications/archive .'
|
56
|
-
|
57
|
-
|
58
|
-
|
54
|
+
+ ` && mv adapter_modifications/* archive/${archiveName}`
|
55
|
+
+ ' && mv archive adapter_modifications'
|
56
|
+
+ ` && rm ${zipFile}`;
|
59
57
|
execSync(archiveCmd, { encoding: 'utf-8' });
|
60
58
|
}
|
61
59
|
|
@@ -73,7 +71,7 @@ function revertMod() {
|
|
73
71
|
}
|
74
72
|
});
|
75
73
|
// // unzip previousVersion, reinstall dependencies and delete zipfile
|
76
|
-
execSync('unzip -o previousVersion.zip && rm -rf node_modules && rm package-lock.json && npm install');
|
74
|
+
execSync('unzip -o previousVersion.zip && rm -rf node_modules && rm package-lock.json && npm install', { maxBuffer: 1024 * 1024 * 2 });
|
77
75
|
execSync('rm previousVersion.zip');
|
78
76
|
console.log('Changes have been reverted');
|
79
77
|
}
|
@@ -90,12 +88,12 @@ if (flags === '-m') {
|
|
90
88
|
backup();
|
91
89
|
console.log('Migrating adapter and running tests...');
|
92
90
|
const migrateCmd = 'unzip -o migrationPackage.zip'
|
93
|
-
|
94
|
-
|
91
|
+
+ ' && cd adapter_modifications'
|
92
|
+
+ ' && node migrate';
|
95
93
|
const migrateOutput = execSync(migrateCmd, { encoding: 'utf-8' });
|
96
94
|
console.log(migrateOutput);
|
97
95
|
if (migrateOutput.indexOf('Lint exited with code 1') >= 0
|
98
|
-
|
96
|
+
|| migrateOutput.indexOf('Tests exited with code 1') >= 0) {
|
99
97
|
if (rls.keyInYN('Adapter failed tests or lint after migrating. Would you like to revert the changes?')) {
|
100
98
|
console.log('Reverting changes...');
|
101
99
|
revertMod();
|
@@ -125,12 +123,12 @@ if (flags === '-u') {
|
|
125
123
|
// Backup current adapter
|
126
124
|
backup();
|
127
125
|
const updateCmd = 'unzip -o updatePackage.zip'
|
128
|
-
|
129
|
-
|
126
|
+
+ ' && cd adapter_modifications'
|
127
|
+
+ ' && node update.js updateFiles';
|
130
128
|
execSync(updateCmd, { encoding: 'utf-8' });
|
131
129
|
const updateOutput = execSync(updateCmd, { encoding: 'utf-8' });
|
132
130
|
if (updateOutput.indexOf('Lint exited with code 1') >= 0
|
133
|
-
|
131
|
+
|| updateOutput.indexOf('Tests exited with code 1') >= 0) {
|
134
132
|
if (rls.keyInYN('Adapter failed tests or lint after updating. Would you like to revert the changes?')) {
|
135
133
|
console.log('Reverting changes...');
|
136
134
|
revertMod();
|
@@ -1,8 +1,8 @@
|
|
1
1
|
#!/usr/bin/env node
|
2
2
|
/* @copyright Itential, LLC 2019 */
|
3
3
|
|
4
|
-
const { spawnSync } = require('child_process');
|
5
4
|
const path = require('path');
|
5
|
+
const { spawnSync } = require('child_process');
|
6
6
|
const fs = require('fs-extra');
|
7
7
|
const { createBundle } = require('./artifactize');
|
8
8
|
|
package/utils/pre-commit.sh
CHANGED
@@ -19,6 +19,8 @@ node utils/testRunner.js -r
|
|
19
19
|
|
20
20
|
# update the adapter information file
|
21
21
|
node utils/adapterInfo.js
|
22
|
+
node utils/methodDocumentor.js
|
23
|
+
git add CALLS.md report/adapterInfo.json
|
22
24
|
|
23
25
|
# security audit on the code
|
24
26
|
npm audit --registry=https://registry.npmjs.org --audit-level=moderate
|
@@ -0,0 +1,309 @@
|
|
1
|
+
/* eslint-disable */
|
2
|
+
const fs = require('fs');
|
3
|
+
|
4
|
+
const blacklistTasks = [
|
5
|
+
'genericAdapterRequest',
|
6
|
+
'genericAdapterRequestNoBasePath',
|
7
|
+
'hasEntities',
|
8
|
+
'healthcheck'
|
9
|
+
];
|
10
|
+
|
11
|
+
const adapterBaseTasks = [
|
12
|
+
'getDevicesFiltered',
|
13
|
+
'isAlive',
|
14
|
+
'getConfig',
|
15
|
+
'getDevice',
|
16
|
+
'iapUpdateAdapterConfiguration',
|
17
|
+
'iapFindAdapterPath',
|
18
|
+
'iapSuspendAdapter',
|
19
|
+
'iapUnsuspendAdapter',
|
20
|
+
'iapGetAdapterQueue',
|
21
|
+
'iapTroubleshootAdapter',
|
22
|
+
'iapRunAdapterHealthcheck',
|
23
|
+
'iapRunAdapterConnectivity',
|
24
|
+
'iapRunAdapterBasicGet',
|
25
|
+
'iapMoveAdapterEntitiesToDB',
|
26
|
+
'getDevice',
|
27
|
+
'getDevicesFiltered',
|
28
|
+
'isAlive',
|
29
|
+
'getConfig',
|
30
|
+
'iapGetDeviceCount',
|
31
|
+
'iapRunAdapterLint',
|
32
|
+
'iapRunAdapterTests',
|
33
|
+
'iapGetAdapterInventory'
|
34
|
+
];
|
35
|
+
|
36
|
+
function updatePronghorn(tasks, original, updated) {
|
37
|
+
const originalFile = require(original);
|
38
|
+
const unusedMethods = [];
|
39
|
+
const usedMethods = originalFile.methods.filter((method) => {
|
40
|
+
if (tasks.includes(method.name)) {
|
41
|
+
unusedMethods.push(method);
|
42
|
+
return false;
|
43
|
+
}
|
44
|
+
return true;
|
45
|
+
});
|
46
|
+
//write used and unused to new files
|
47
|
+
let updatedFile;
|
48
|
+
if (!fs.existsSync(updated)) {
|
49
|
+
updatedFile = { ...originalFile, methods: [], src: 'adapter-inactive.js' };
|
50
|
+
} else {
|
51
|
+
updatedFile = require(updated);
|
52
|
+
}
|
53
|
+
updatedFile.methods = updatedFile.methods.concat(unusedMethods);
|
54
|
+
originalFile.methods = usedMethods;
|
55
|
+
fs.writeFileSync(updated, JSON.stringify(updatedFile, null, 2));
|
56
|
+
fs.writeFileSync(original, JSON.stringify(originalFile, null, 2));
|
57
|
+
return 'Done';
|
58
|
+
}
|
59
|
+
|
60
|
+
function flipTaskFlag(task, pronghornPath, value)
|
61
|
+
{
|
62
|
+
const pronghorn = require(pronghornPath);
|
63
|
+
const index = pronghorn.methods.findIndex((method) => method.name === task);
|
64
|
+
pronghorn.methods[index] = { ...pronghorn.methods[index], task: value };
|
65
|
+
fs.writeFileSync(pronghornPath, JSON.stringify(pronghorn, null, 2));
|
66
|
+
}
|
67
|
+
|
68
|
+
//Return array of relevant paths given adapter directory
|
69
|
+
function createPaths(currentAdapter) {
|
70
|
+
const paths = [];
|
71
|
+
const filePaths = [
|
72
|
+
'adapter.js',
|
73
|
+
'pronghorn.json',
|
74
|
+
'test/integration/adapterTestIntegration.js',
|
75
|
+
'test/unit/adapterTestUnit.js',
|
76
|
+
'adapter-inactive.js',
|
77
|
+
'pronghorn-inactive.json',
|
78
|
+
];
|
79
|
+
filePaths.forEach((file) => {
|
80
|
+
paths.push(`${currentAdapter}/${file}`);
|
81
|
+
});
|
82
|
+
return paths;
|
83
|
+
}
|
84
|
+
|
85
|
+
function insert(str, index, value) {
|
86
|
+
return str.substr(0, index) + value + str.substr(index);
|
87
|
+
}
|
88
|
+
|
89
|
+
//modify adapter js
|
90
|
+
//original - path to file containing tasks we want to remove
|
91
|
+
// updated - path to file we want to move the tasks to
|
92
|
+
function updateAdapterJs(tasks, original, updated, adapterDir) {
|
93
|
+
if (!fs.existsSync(original)) {
|
94
|
+
//could do this or just let the error ocurr lower down and catch in warpper
|
95
|
+
throw new Error(`Original file ${original} does not exist.`);
|
96
|
+
}
|
97
|
+
let originalFile = fs.readFileSync(original, 'utf8');
|
98
|
+
let updatedFile;
|
99
|
+
if (!fs.existsSync(updated)) {
|
100
|
+
const adapterExport = require(`${adapterDir}/pronghorn.json`).export;
|
101
|
+
updatedFile = `/* @copyright Itential, LLC 2019 */\n\n/* eslint import/no-dynamic-require: warn */\n/* eslint no-unused-vars: warn */\n/* global log */\n\nconst path = require('path');\n\nconst AdapterBaseCl = require(path.join(__dirname, 'adapterBase.js'));\n\nclass ${adapterExport}Inactive extends AdapterBaseCl {}\n`;
|
102
|
+
//To do handles backup files where og doesn't exist
|
103
|
+
} else {
|
104
|
+
updatedFile = fs.readFileSync(updated, 'utf8');
|
105
|
+
}
|
106
|
+
|
107
|
+
tasks.forEach((method) => {
|
108
|
+
//accounting for different js docs format
|
109
|
+
const comment = originalFile.indexOf(`* @function ${method}`);
|
110
|
+
const start = originalFile.slice(0, comment).lastIndexOf('/**');
|
111
|
+
if (start !== -1) {
|
112
|
+
//next comment block
|
113
|
+
const end = originalFile.indexOf('/**\n', start + 1);
|
114
|
+
let func = end === -1
|
115
|
+
? originalFile.substring(start - 3, originalFile.lastIndexOf('}'))
|
116
|
+
: originalFile.substring(start, end);
|
117
|
+
originalFile = originalFile.replace(func, '');
|
118
|
+
func = '\n ' + func.trim() + '\n';
|
119
|
+
updatedFile = insert(updatedFile, updatedFile.lastIndexOf('}'), func);
|
120
|
+
} else {
|
121
|
+
console.log(`Task ${method} wasn't found in original file. Skipping.`);
|
122
|
+
}
|
123
|
+
});
|
124
|
+
fs.writeFileSync(original, originalFile, 'utf8');
|
125
|
+
fs.writeFileSync(updated, updatedFile, 'utf8');
|
126
|
+
return 'done';
|
127
|
+
}
|
128
|
+
|
129
|
+
//Update test file for when we deactivate a task
|
130
|
+
function deactivateTest(adapterPath, testPath, tasks) {
|
131
|
+
let unitTest = fs.readFileSync(`${adapterPath}/${testPath}`, 'utf8');
|
132
|
+
tasks.forEach((task) => {
|
133
|
+
const searchStr = `describe('#${task}`;
|
134
|
+
unitTest = unitTest.replace(searchStr, `describe.skip('#${task}`);
|
135
|
+
});
|
136
|
+
fs.writeFileSync(`${adapterPath}/${testPath}`, unitTest, 'utf8');
|
137
|
+
}
|
138
|
+
|
139
|
+
//Update test file when we activate tasks
|
140
|
+
function activateTest(adapterPath, testPath, tasks) {
|
141
|
+
let unitTest = fs.readFileSync(`${adapterPath}/${testPath}`, 'utf8');
|
142
|
+
//tasks ==> toMove
|
143
|
+
tasks.forEach((task) => {
|
144
|
+
const searchStr = `describe.skip('#${task}`;
|
145
|
+
unitTest = unitTest.replace(searchStr, `describe('#${task}`);
|
146
|
+
});
|
147
|
+
fs.writeFileSync(`${adapterPath}/${testPath}`, unitTest, 'utf8');
|
148
|
+
}
|
149
|
+
|
150
|
+
//backups are not actually being written back
|
151
|
+
function rollbackChanges(adapterPath) {
|
152
|
+
const backups = fs.readdirSync(`${adapterPath}/temp`); //this is an array of file names not the full path
|
153
|
+
const filePaths = createPaths(adapterPath);
|
154
|
+
for (let i = 0; i < backups.length; i++) {
|
155
|
+
const file = fs.readFileSync(`${adapterPath}/temp/${backups[i]}`, 'utf8'); //make sure this is getting the file
|
156
|
+
const currentFile = filePaths.find((path) => {
|
157
|
+
const index = path.split('/').length - 1;
|
158
|
+
const fileName = path.split('/')[index];
|
159
|
+
return fileName === backups[i].replace('temp-', '');
|
160
|
+
}); //returns undefined if no match
|
161
|
+
|
162
|
+
if (currentFile) {
|
163
|
+
fs.writeFileSync(currentFile, file, 'utf8');
|
164
|
+
}
|
165
|
+
}
|
166
|
+
//inactive didn't exist before script
|
167
|
+
if (!backups.includes('temp-adapter-inactive.js')) {
|
168
|
+
fs.unlinkSync(`${adapterPath}/pronghorn-inactive.json`);
|
169
|
+
fs.unlinkSync(`${adapterPath}/adapter-inactive.js`);
|
170
|
+
}
|
171
|
+
deleteBackups(adapterPath);
|
172
|
+
}
|
173
|
+
|
174
|
+
function deleteBackups(adapterPath) {
|
175
|
+
fs.rmSync(`${adapterPath}/temp`, { recursive: true });
|
176
|
+
}
|
177
|
+
|
178
|
+
function activateTasks(adapterDir, tasks) {
|
179
|
+
const toDelete = [];
|
180
|
+
const backupFiles = [];
|
181
|
+
const filePaths = createPaths(adapterDir);
|
182
|
+
try {
|
183
|
+
//take backup of each file here
|
184
|
+
if (!fs.existsSync(`${adapterDir}/temp`)) {
|
185
|
+
fs.mkdirSync(`${adapterDir}/temp`);
|
186
|
+
}
|
187
|
+
filePaths.forEach((filePath) => {
|
188
|
+
if (fs.existsSync(filePath)) {
|
189
|
+
const index = filePath.split('/').length - 1;
|
190
|
+
const backupName = `temp-${filePath.split('/')[index]}`;
|
191
|
+
backupFiles.push(`${adapterDir}/temp/${backupName}`);
|
192
|
+
fs.copyFileSync(filePath, `${adapterDir}/temp/${backupName}`);
|
193
|
+
} else {
|
194
|
+
//File doesn't exist before script
|
195
|
+
toDelete.push(filePath);
|
196
|
+
}
|
197
|
+
});
|
198
|
+
tasks = tasks.filter((task) => {
|
199
|
+
if (adapterBaseTasks.includes(task)) {
|
200
|
+
flipTaskFlag(task, `${adapterDir}/pronghorn.json`, true);
|
201
|
+
return false;
|
202
|
+
} else {
|
203
|
+
return true;
|
204
|
+
}
|
205
|
+
});
|
206
|
+
updateAdapterJs(
|
207
|
+
tasks,
|
208
|
+
`${adapterDir}/adapter-inactive.js`,
|
209
|
+
`${adapterDir}/adapter.js`,
|
210
|
+
adapterDir
|
211
|
+
);
|
212
|
+
updatePronghorn(
|
213
|
+
tasks,
|
214
|
+
`${adapterDir}/pronghorn-inactive.json`,
|
215
|
+
`${adapterDir}/pronghorn.json`
|
216
|
+
);
|
217
|
+
activateTest(
|
218
|
+
adapterDir,
|
219
|
+
'/test/integration/adapterTestIntegration.js',
|
220
|
+
tasks
|
221
|
+
);
|
222
|
+
activateTest(adapterDir, '/test/unit/adapterTestUnit.js', tasks);
|
223
|
+
return 'success';
|
224
|
+
} catch (e) {
|
225
|
+
console.log(`Error: ${e} ocurred during execution. Rolling back changes.`);
|
226
|
+
for (let i = 0; i < backupFiles.length; i++) {
|
227
|
+
const file = fs.readFileSync(backupFiles[i], 'utf8');
|
228
|
+
fs.writeFileSync(filePaths[i], file, 'utf8');
|
229
|
+
}
|
230
|
+
toDelete.forEach((filePath) => {
|
231
|
+
if (fs.existsSync(filePath)) {
|
232
|
+
fs.unlinkSync(filePath);
|
233
|
+
}
|
234
|
+
});
|
235
|
+
deleteBackups(adapterDir);
|
236
|
+
process.exit(1);
|
237
|
+
}
|
238
|
+
}
|
239
|
+
|
240
|
+
//moving from adapter.js to adapter-inactive.js
|
241
|
+
function deactivateTasks(adapterDir, tasks) {
|
242
|
+
const toDelete = [];
|
243
|
+
const backupFiles = [];
|
244
|
+
const filePaths = createPaths(adapterDir);
|
245
|
+
try {
|
246
|
+
//take backup of each file here
|
247
|
+
if (!fs.existsSync(`${adapterDir}/temp`)) {
|
248
|
+
fs.mkdirSync(`${adapterDir}/temp`);
|
249
|
+
}
|
250
|
+
filePaths.forEach((filePath) => {
|
251
|
+
if (fs.existsSync(filePath)) {
|
252
|
+
const index = filePath.split('/').length - 1;
|
253
|
+
const backupName = `temp-${filePath.split('/')[index]}`;
|
254
|
+
backupFiles.push(`${adapterDir}/temp/${backupName}`);
|
255
|
+
fs.copyFileSync(filePath, `${adapterDir}/temp/${backupName}`);
|
256
|
+
} else {
|
257
|
+
//File doesn't exist before script
|
258
|
+
toDelete.push(filePath);
|
259
|
+
}
|
260
|
+
});
|
261
|
+
//filter tasks for blacklisted tasks or IAP tasks
|
262
|
+
tasks = tasks.filter((task) => {
|
263
|
+
if (blacklistTasks.includes(task)) {
|
264
|
+
console.log(`${task} cannot be deactivated.`);
|
265
|
+
return false;
|
266
|
+
} else if (adapterBaseTasks.includes(task)) {
|
267
|
+
flipTaskFlag(task, `${adapterDir}/pronghorn.json`, false);
|
268
|
+
return false;
|
269
|
+
} else {
|
270
|
+
return true;
|
271
|
+
}
|
272
|
+
});
|
273
|
+
updateAdapterJs(
|
274
|
+
tasks,
|
275
|
+
`${adapterDir}/adapter.js`,
|
276
|
+
`${adapterDir}/adapter-inactive.js`,
|
277
|
+
adapterDir
|
278
|
+
);
|
279
|
+
updatePronghorn(
|
280
|
+
tasks,
|
281
|
+
`${adapterDir}/pronghorn.json`,
|
282
|
+
`${adapterDir}/pronghorn-inactive.json`
|
283
|
+
);
|
284
|
+
deactivateTest(
|
285
|
+
adapterDir,
|
286
|
+
'/test/integration/adapterTestIntegration.js',
|
287
|
+
tasks
|
288
|
+
);
|
289
|
+
deactivateTest(adapterDir, '/test/unit/adapterTestUnit.js', tasks);
|
290
|
+
return 'success';
|
291
|
+
} catch (e) {
|
292
|
+
console.log(`Error: ${e} ocurred during execution. Rolling back changes.`);
|
293
|
+
for (let i = 0; i < backupFiles.length; i++) {
|
294
|
+
const file = fs.readFileSync(backupFiles[i], 'utf8');
|
295
|
+
fs.writeFileSync(filePaths[i], file, 'utf8');
|
296
|
+
}
|
297
|
+
toDelete.forEach((filePath) => {
|
298
|
+
if (fs.existsSync(filePath)) {
|
299
|
+
fs.unlinkSync(filePath);
|
300
|
+
}
|
301
|
+
});
|
302
|
+
deleteBackups(adapterDir);
|
303
|
+
process.exit(1);
|
304
|
+
}
|
305
|
+
}
|
306
|
+
|
307
|
+
module.exports = {
|
308
|
+
activateTasks, deactivateTasks, rollbackChanges, deleteBackups
|
309
|
+
};
|