@itentialopensource/adapter-meraki 1.0.3 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/AUTH.md +14 -18
- package/CALLS.md +4264 -0
- package/CHANGELOG.md +16 -0
- package/CONTRIBUTING.md +1 -160
- package/ENHANCE.md +2 -2
- package/README.md +23 -18
- package/SYSTEMINFO.md +15 -2
- package/adapter.js +164 -335
- package/adapterBase.js +411 -920
- package/changelogs/changelog.md +198 -0
- package/metadata.json +61 -0
- package/package.json +24 -24
- package/pronghorn.json +470 -138
- package/propertiesSchema.json +358 -31
- package/refs?service=git-upload-pack +0 -0
- package/report/adapterInfo.json +8 -8
- package/report/updateReport1690417926405.json +119 -0
- package/sampleProperties.json +74 -27
- package/test/integration/adapterTestBasicGet.js +2 -4
- package/test/integration/adapterTestConnectivity.js +91 -42
- package/test/integration/adapterTestIntegration.js +130 -2
- package/test/unit/adapterBaseTestUnit.js +388 -313
- package/test/unit/adapterTestUnit.js +332 -112
- package/utils/adapterInfo.js +1 -1
- package/utils/addAuth.js +1 -1
- package/utils/artifactize.js +1 -1
- package/utils/checkMigrate.js +1 -1
- package/utils/entitiesToDB.js +2 -2
- package/utils/findPath.js +1 -1
- package/utils/methodDocumentor.js +225 -0
- package/utils/modify.js +13 -15
- package/utils/packModificationScript.js +1 -1
- package/utils/pre-commit.sh +2 -0
- package/utils/taskMover.js +309 -0
- package/utils/tbScript.js +89 -34
- package/utils/tbUtils.js +41 -21
- package/utils/testRunner.js +1 -1
- package/utils/troubleshootingAdapter.js +9 -6
- package/versions.json +0 -542
- package/workflows/README.md +0 -3
@@ -0,0 +1,225 @@
|
|
1
|
+
/* eslint global-require:warn */
|
2
|
+
/* eslint import/no-dynamic-require:warn */
|
3
|
+
/* eslint no-param-reassign:warn */
|
4
|
+
|
5
|
+
const fs = require('fs-extra');
|
6
|
+
const esprima = require('esprima');
|
7
|
+
|
8
|
+
// Getting the base directory:
|
9
|
+
let adaptdir = __dirname;
|
10
|
+
if (adaptdir.endsWith('/utils')) {
|
11
|
+
adaptdir = adaptdir.substring(0, adaptdir.length - 6);
|
12
|
+
}
|
13
|
+
|
14
|
+
function createObjectForFunction(
|
15
|
+
funcName,
|
16
|
+
funcArgs,
|
17
|
+
entityPath,
|
18
|
+
description,
|
19
|
+
workflow
|
20
|
+
) {
|
21
|
+
const funcObject = {};
|
22
|
+
// if the entity path is not set, then the object is not created.
|
23
|
+
if (entityPath !== undefined) {
|
24
|
+
funcObject.method_signature = `${funcName}(${funcArgs.join(', ')})`;
|
25
|
+
funcObject.path = entityPath;
|
26
|
+
if (description === undefined) {
|
27
|
+
funcObject.description = '';
|
28
|
+
funcObject.workflow = 'No';
|
29
|
+
} else {
|
30
|
+
funcObject.description = description;
|
31
|
+
funcObject.workflow = workflow;
|
32
|
+
}
|
33
|
+
}
|
34
|
+
return funcObject;
|
35
|
+
}
|
36
|
+
|
37
|
+
function getPathFromEntity(entity, funcName) {
|
38
|
+
let epath;
|
39
|
+
if (entity === undefined || entity === '.generic') {
|
40
|
+
epath = undefined;
|
41
|
+
} else {
|
42
|
+
// Access the action.js file for the certain entity to get the path
|
43
|
+
const entityPath = `${adaptdir}/entities/${entity}/action.json`;
|
44
|
+
const actionJSON = require(entityPath);
|
45
|
+
actionJSON.actions.forEach((action) => {
|
46
|
+
if (action.name === funcName) {
|
47
|
+
epath = action.entitypath;
|
48
|
+
}
|
49
|
+
});
|
50
|
+
}
|
51
|
+
return epath;
|
52
|
+
}
|
53
|
+
|
54
|
+
function readFileUsingLib(filename, descriptionObj, workflowObj, functionList) {
|
55
|
+
// read the file
|
56
|
+
const aFile = fs.readFileSync(filename, 'utf8');
|
57
|
+
// parsing the file to get the function and class declarations.
|
58
|
+
const aFileFuncArgs = esprima.parseScript(aFile);
|
59
|
+
|
60
|
+
// Looping through all the declarations parsed:
|
61
|
+
aFileFuncArgs.body.forEach((e) => {
|
62
|
+
// Getting only the class declaration as it has our required functions.
|
63
|
+
if (e.type === 'ClassDeclaration') {
|
64
|
+
const methodDefinition = e.body;
|
65
|
+
methodDefinition.body.forEach((method) => {
|
66
|
+
// Getting method name and its params in the class.
|
67
|
+
const funcName = method.key.name;
|
68
|
+
const funcArgs = [];
|
69
|
+
method.value.params.forEach((param) => {
|
70
|
+
if (param.type === 'Identifier') {
|
71
|
+
funcArgs.push(param.name);
|
72
|
+
} else {
|
73
|
+
const args = `${param.left.name} = ${param.right.value}`;
|
74
|
+
funcArgs.push(args);
|
75
|
+
}
|
76
|
+
});
|
77
|
+
|
78
|
+
// Getting the entity for the method:
|
79
|
+
let entity;
|
80
|
+
method.value.body.body.forEach((statementType) => {
|
81
|
+
if (statementType.type === 'TryStatement') {
|
82
|
+
entity = statementType.block.body[0].argument.arguments[0].value;
|
83
|
+
}
|
84
|
+
});
|
85
|
+
const entityPath = getPathFromEntity(entity, funcName);
|
86
|
+
|
87
|
+
// Creating and storing the object for the method.
|
88
|
+
if (entityPath !== undefined) {
|
89
|
+
functionList.push(
|
90
|
+
createObjectForFunction(
|
91
|
+
funcName,
|
92
|
+
funcArgs,
|
93
|
+
entityPath,
|
94
|
+
descriptionObj[funcName],
|
95
|
+
workflowObj[funcName]
|
96
|
+
)
|
97
|
+
);
|
98
|
+
}
|
99
|
+
});
|
100
|
+
}
|
101
|
+
});
|
102
|
+
}
|
103
|
+
|
104
|
+
function readJSONFile(filename, descriptionObj, workflowObj) {
|
105
|
+
// Accessing the JSON file.
|
106
|
+
const phJSON = require(filename);
|
107
|
+
// Getting the methods array.
|
108
|
+
const methodArray = phJSON.methods;
|
109
|
+
methodArray.forEach((methodName) => {
|
110
|
+
// Getting the method description and workflow:
|
111
|
+
const funcName = methodName.name;
|
112
|
+
descriptionObj[funcName] = methodName.description;
|
113
|
+
workflowObj[funcName] = methodName.task ? 'Yes' : 'No';
|
114
|
+
});
|
115
|
+
}
|
116
|
+
|
117
|
+
function readMDFile(filename, functionList) {
|
118
|
+
// Reading in the .md file and creating an array with each line as an element.
|
119
|
+
const mdFile = fs.readFileSync(filename, 'utf-8');
|
120
|
+
const fileSplit = mdFile.split('\n');
|
121
|
+
// Storing the data that should added later to the updated data.
|
122
|
+
const linesToAddLater = [];
|
123
|
+
let index = fileSplit.length - 1;
|
124
|
+
|
125
|
+
// Removing all the blank lines at the end of the file.
|
126
|
+
if (fileSplit[index] === '') {
|
127
|
+
while (fileSplit[index] === '') {
|
128
|
+
linesToAddLater.push(fileSplit.pop());
|
129
|
+
index -= 1;
|
130
|
+
}
|
131
|
+
}
|
132
|
+
|
133
|
+
// Checking if the last 2 lines are <br> and </table>. If not, the file is corrupted and the
|
134
|
+
// data at the end of the file should be fixed.
|
135
|
+
if (fileSplit[index] === '<br>' || fileSplit[index - 1] === '</table>') {
|
136
|
+
// Storing <br> and </table> to add later.
|
137
|
+
linesToAddLater.push(fileSplit.pop());
|
138
|
+
linesToAddLater.push(fileSplit.pop());
|
139
|
+
index -= 2;
|
140
|
+
} else {
|
141
|
+
console.log('The file has bad content at the end.');
|
142
|
+
return;
|
143
|
+
}
|
144
|
+
// if (fileSplit[index] !== '<br>' && fileSplit[index - 1] !== '</table>') {
|
145
|
+
// console.log('The file has bad content at the end.');
|
146
|
+
// return;
|
147
|
+
// } else {
|
148
|
+
// // Storing <br> and </table> to add later.
|
149
|
+
// linesToAddLater.push(fileSplit.pop());
|
150
|
+
// linesToAddLater.push(fileSplit.pop());
|
151
|
+
// index -= 2;
|
152
|
+
// }
|
153
|
+
|
154
|
+
// Removing all the lines until the header tags are reached.
|
155
|
+
while (!fileSplit[index].includes('<th')) {
|
156
|
+
fileSplit.pop();
|
157
|
+
index -= 1;
|
158
|
+
}
|
159
|
+
// Adding </tr> for the header row, because it got removed in the above loop.
|
160
|
+
fileSplit.push(' </tr>');
|
161
|
+
|
162
|
+
// Creating the tags for each method to be appended to the file.
|
163
|
+
const tdBeginTag = ' <td style="padding:15px">';
|
164
|
+
const tdEndTag = '</td>';
|
165
|
+
functionList.forEach((func) => {
|
166
|
+
const signCommand = `${tdBeginTag}${func.method_signature}${tdEndTag}`;
|
167
|
+
const descCommand = `${tdBeginTag}${func.description}${tdEndTag}`;
|
168
|
+
const pathCommand = `${tdBeginTag}${func.path}${tdEndTag}`;
|
169
|
+
const workflowCommand = `${tdBeginTag}${func.workflow}${tdEndTag}`;
|
170
|
+
fileSplit.push(' <tr>');
|
171
|
+
fileSplit.push(signCommand);
|
172
|
+
fileSplit.push(descCommand);
|
173
|
+
fileSplit.push(pathCommand);
|
174
|
+
fileSplit.push(workflowCommand);
|
175
|
+
fileSplit.push(' </tr>');
|
176
|
+
});
|
177
|
+
|
178
|
+
// Adding </table> and <br> at the end of the file to complete the table and the file.
|
179
|
+
while (linesToAddLater.length > 0) {
|
180
|
+
fileSplit.push(linesToAddLater.pop());
|
181
|
+
}
|
182
|
+
|
183
|
+
// Writing all the content back into the file.
|
184
|
+
fs.writeFileSync(filename, fileSplit.join('\n'), {
|
185
|
+
encoding: 'utf-8',
|
186
|
+
flag: 'w'
|
187
|
+
});
|
188
|
+
}
|
189
|
+
|
190
|
+
function getFileInfo() {
|
191
|
+
// If files don't exist:
|
192
|
+
if (!fs.existsSync(`${adaptdir}/adapter.js`)) {
|
193
|
+
console.log('Missing - utils/adapter.js');
|
194
|
+
return;
|
195
|
+
}
|
196
|
+
if (!fs.existsSync(`${adaptdir}/pronghorn.json`)) {
|
197
|
+
console.log('Missing - pronghorn.json');
|
198
|
+
return;
|
199
|
+
}
|
200
|
+
if (!fs.existsSync(`${adaptdir}/CALLS.md`)) {
|
201
|
+
console.log('Missing - CALLS.md');
|
202
|
+
return;
|
203
|
+
}
|
204
|
+
|
205
|
+
const descriptionObj = {};
|
206
|
+
const workflowObj = {};
|
207
|
+
|
208
|
+
// Get the method descriptions and the workflow values from pronghorn.json file.
|
209
|
+
readJSONFile(`${adaptdir}/pronghorn.json`, descriptionObj, workflowObj);
|
210
|
+
|
211
|
+
// Get the method signature, entity path and create an object that contains all the info regarding
|
212
|
+
// the method and push it to the functionList array.
|
213
|
+
const functionList = [];
|
214
|
+
readFileUsingLib(
|
215
|
+
`${adaptdir}/adapter.js`,
|
216
|
+
descriptionObj,
|
217
|
+
workflowObj,
|
218
|
+
functionList
|
219
|
+
);
|
220
|
+
|
221
|
+
// createMarkDown(functionList);
|
222
|
+
readMDFile(`${adaptdir}/CALLS.md`, functionList);
|
223
|
+
}
|
224
|
+
|
225
|
+
getFileInfo();
|
package/utils/modify.js
CHANGED
@@ -1,7 +1,7 @@
|
|
1
|
+
const { execSync } = require('child_process');
|
1
2
|
const fs = require('fs-extra');
|
2
3
|
const Ajv = require('ajv');
|
3
4
|
const rls = require('readline-sync');
|
4
|
-
const { execSync } = require('child_process');
|
5
5
|
const { existsSync } = require('fs-extra');
|
6
6
|
const { getAdapterConfig } = require('./tbUtils');
|
7
7
|
const { name } = require('../package.json');
|
@@ -21,9 +21,7 @@ async function updateServiceItem() {
|
|
21
21
|
const validate = ajv.compile(propertiesSchema);
|
22
22
|
validate(currentProps);
|
23
23
|
console.log('Updating Properties...');
|
24
|
-
await database.collection('service_configs').updateOne(
|
25
|
-
{ model: name }, { $set: serviceItem }
|
26
|
-
);
|
24
|
+
await database.collection('service_configs').updateOne({ model: name }, { $set: serviceItem });
|
27
25
|
console.log('Properties Updated');
|
28
26
|
}
|
29
27
|
|
@@ -35,7 +33,7 @@ async function updateServiceItem() {
|
|
35
33
|
function backup() {
|
36
34
|
// zip all files except node_modules and package-lock
|
37
35
|
const backupCmd = 'zip -r previousVersion.zip .';
|
38
|
-
execSync(backupCmd, { encoding: 'utf-8' });
|
36
|
+
execSync(backupCmd, { encoding: 'utf-8', maxBuffer: 1024 * 1024 * 2 });
|
39
37
|
}
|
40
38
|
|
41
39
|
/**
|
@@ -53,9 +51,9 @@ function archiveMod(modType) {
|
|
53
51
|
const archiveName = `${modType}-${now.toISOString()}`;
|
54
52
|
execSync(`mkdir adapter_modifications/archive/${archiveName}`);
|
55
53
|
const archiveCmd = 'mv adapter_modifications/archive .'
|
56
|
-
|
57
|
-
|
58
|
-
|
54
|
+
+ ` && mv adapter_modifications/* archive/${archiveName}`
|
55
|
+
+ ' && mv archive adapter_modifications'
|
56
|
+
+ ` && rm ${zipFile}`;
|
59
57
|
execSync(archiveCmd, { encoding: 'utf-8' });
|
60
58
|
}
|
61
59
|
|
@@ -73,7 +71,7 @@ function revertMod() {
|
|
73
71
|
}
|
74
72
|
});
|
75
73
|
// // unzip previousVersion, reinstall dependencies and delete zipfile
|
76
|
-
execSync('unzip -o previousVersion.zip && rm -rf node_modules && rm package-lock.json && npm install');
|
74
|
+
execSync('unzip -o previousVersion.zip && rm -rf node_modules && rm package-lock.json && npm install', { maxBuffer: 1024 * 1024 * 2 });
|
77
75
|
execSync('rm previousVersion.zip');
|
78
76
|
console.log('Changes have been reverted');
|
79
77
|
}
|
@@ -90,12 +88,12 @@ if (flags === '-m') {
|
|
90
88
|
backup();
|
91
89
|
console.log('Migrating adapter and running tests...');
|
92
90
|
const migrateCmd = 'unzip -o migrationPackage.zip'
|
93
|
-
|
94
|
-
|
91
|
+
+ ' && cd adapter_modifications'
|
92
|
+
+ ' && node migrate';
|
95
93
|
const migrateOutput = execSync(migrateCmd, { encoding: 'utf-8' });
|
96
94
|
console.log(migrateOutput);
|
97
95
|
if (migrateOutput.indexOf('Lint exited with code 1') >= 0
|
98
|
-
|
96
|
+
|| migrateOutput.indexOf('Tests exited with code 1') >= 0) {
|
99
97
|
if (rls.keyInYN('Adapter failed tests or lint after migrating. Would you like to revert the changes?')) {
|
100
98
|
console.log('Reverting changes...');
|
101
99
|
revertMod();
|
@@ -125,12 +123,12 @@ if (flags === '-u') {
|
|
125
123
|
// Backup current adapter
|
126
124
|
backup();
|
127
125
|
const updateCmd = 'unzip -o updatePackage.zip'
|
128
|
-
|
129
|
-
|
126
|
+
+ ' && cd adapter_modifications'
|
127
|
+
+ ' && node update.js updateFiles';
|
130
128
|
execSync(updateCmd, { encoding: 'utf-8' });
|
131
129
|
const updateOutput = execSync(updateCmd, { encoding: 'utf-8' });
|
132
130
|
if (updateOutput.indexOf('Lint exited with code 1') >= 0
|
133
|
-
|
131
|
+
|| updateOutput.indexOf('Tests exited with code 1') >= 0) {
|
134
132
|
if (rls.keyInYN('Adapter failed tests or lint after updating. Would you like to revert the changes?')) {
|
135
133
|
console.log('Reverting changes...');
|
136
134
|
revertMod();
|
@@ -1,9 +1,9 @@
|
|
1
1
|
#!/usr/bin/env node
|
2
2
|
/* @copyright Itential, LLC 2019 */
|
3
3
|
|
4
|
-
const fs = require('fs-extra');
|
5
4
|
const path = require('path');
|
6
5
|
const { spawnSync } = require('child_process');
|
6
|
+
const fs = require('fs-extra');
|
7
7
|
const { createBundle } = require('./artifactize');
|
8
8
|
|
9
9
|
const nodeEntryPath = path.resolve('.');
|
package/utils/pre-commit.sh
CHANGED
@@ -19,6 +19,8 @@ node utils/testRunner.js -r
|
|
19
19
|
|
20
20
|
# update the adapter information file
|
21
21
|
node utils/adapterInfo.js
|
22
|
+
node utils/methodDocumentor.js
|
23
|
+
git add CALLS.md report/adapterInfo.json
|
22
24
|
|
23
25
|
# security audit on the code
|
24
26
|
npm audit --registry=https://registry.npmjs.org --audit-level=moderate
|
@@ -0,0 +1,309 @@
|
|
1
|
+
/* eslint-disable */
|
2
|
+
const fs = require('fs');
|
3
|
+
|
4
|
+
const blacklistTasks = [
|
5
|
+
'genericAdapterRequest',
|
6
|
+
'genericAdapterRequestNoBasePath',
|
7
|
+
'hasEntities',
|
8
|
+
'healthcheck'
|
9
|
+
];
|
10
|
+
|
11
|
+
const adapterBaseTasks = [
|
12
|
+
'getDevicesFiltered',
|
13
|
+
'isAlive',
|
14
|
+
'getConfig',
|
15
|
+
'getDevice',
|
16
|
+
'iapUpdateAdapterConfiguration',
|
17
|
+
'iapFindAdapterPath',
|
18
|
+
'iapSuspendAdapter',
|
19
|
+
'iapUnsuspendAdapter',
|
20
|
+
'iapGetAdapterQueue',
|
21
|
+
'iapTroubleshootAdapter',
|
22
|
+
'iapRunAdapterHealthcheck',
|
23
|
+
'iapRunAdapterConnectivity',
|
24
|
+
'iapRunAdapterBasicGet',
|
25
|
+
'iapMoveAdapterEntitiesToDB',
|
26
|
+
'getDevice',
|
27
|
+
'getDevicesFiltered',
|
28
|
+
'isAlive',
|
29
|
+
'getConfig',
|
30
|
+
'iapGetDeviceCount',
|
31
|
+
'iapRunAdapterLint',
|
32
|
+
'iapRunAdapterTests',
|
33
|
+
'iapGetAdapterInventory'
|
34
|
+
];
|
35
|
+
|
36
|
+
function updatePronghorn(tasks, original, updated) {
|
37
|
+
const originalFile = require(original);
|
38
|
+
const unusedMethods = [];
|
39
|
+
const usedMethods = originalFile.methods.filter((method) => {
|
40
|
+
if (tasks.includes(method.name)) {
|
41
|
+
unusedMethods.push(method);
|
42
|
+
return false;
|
43
|
+
}
|
44
|
+
return true;
|
45
|
+
});
|
46
|
+
//write used and unused to new files
|
47
|
+
let updatedFile;
|
48
|
+
if (!fs.existsSync(updated)) {
|
49
|
+
updatedFile = { ...originalFile, methods: [], src: 'adapter-inactive.js' };
|
50
|
+
} else {
|
51
|
+
updatedFile = require(updated);
|
52
|
+
}
|
53
|
+
updatedFile.methods = updatedFile.methods.concat(unusedMethods);
|
54
|
+
originalFile.methods = usedMethods;
|
55
|
+
fs.writeFileSync(updated, JSON.stringify(updatedFile, null, 2));
|
56
|
+
fs.writeFileSync(original, JSON.stringify(originalFile, null, 2));
|
57
|
+
return 'Done';
|
58
|
+
}
|
59
|
+
|
60
|
+
function flipTaskFlag(task, pronghornPath, value)
|
61
|
+
{
|
62
|
+
const pronghorn = require(pronghornPath);
|
63
|
+
const index = pronghorn.methods.findIndex((method) => method.name === task);
|
64
|
+
pronghorn.methods[index] = { ...pronghorn.methods[index], task: value };
|
65
|
+
fs.writeFileSync(pronghornPath, JSON.stringify(pronghorn, null, 2));
|
66
|
+
}
|
67
|
+
|
68
|
+
//Return array of relevant paths given adapter directory
|
69
|
+
function createPaths(currentAdapter) {
|
70
|
+
const paths = [];
|
71
|
+
const filePaths = [
|
72
|
+
'adapter.js',
|
73
|
+
'pronghorn.json',
|
74
|
+
'test/integration/adapterTestIntegration.js',
|
75
|
+
'test/unit/adapterTestUnit.js',
|
76
|
+
'adapter-inactive.js',
|
77
|
+
'pronghorn-inactive.json',
|
78
|
+
];
|
79
|
+
filePaths.forEach((file) => {
|
80
|
+
paths.push(`${currentAdapter}/${file}`);
|
81
|
+
});
|
82
|
+
return paths;
|
83
|
+
}
|
84
|
+
|
85
|
+
function insert(str, index, value) {
|
86
|
+
return str.substr(0, index) + value + str.substr(index);
|
87
|
+
}
|
88
|
+
|
89
|
+
//modify adapter js
|
90
|
+
//original - path to file containing tasks we want to remove
|
91
|
+
// updated - path to file we want to move the tasks to
|
92
|
+
function updateAdapterJs(tasks, original, updated, adapterDir) {
|
93
|
+
if (!fs.existsSync(original)) {
|
94
|
+
//could do this or just let the error ocurr lower down and catch in warpper
|
95
|
+
throw new Error(`Original file ${original} does not exist.`);
|
96
|
+
}
|
97
|
+
let originalFile = fs.readFileSync(original, 'utf8');
|
98
|
+
let updatedFile;
|
99
|
+
if (!fs.existsSync(updated)) {
|
100
|
+
const adapterExport = require(`${adapterDir}/pronghorn.json`).export;
|
101
|
+
updatedFile = `/* @copyright Itential, LLC 2019 */\n\n/* eslint import/no-dynamic-require: warn */\n/* eslint no-unused-vars: warn */\n/* global log */\n\nconst path = require('path');\n\nconst AdapterBaseCl = require(path.join(__dirname, 'adapterBase.js'));\n\nclass ${adapterExport}Inactive extends AdapterBaseCl {}\n`;
|
102
|
+
//To do handles backup files where og doesn't exist
|
103
|
+
} else {
|
104
|
+
updatedFile = fs.readFileSync(updated, 'utf8');
|
105
|
+
}
|
106
|
+
|
107
|
+
tasks.forEach((method) => {
|
108
|
+
//accounting for different js docs format
|
109
|
+
const comment = originalFile.indexOf(`* @function ${method}`);
|
110
|
+
const start = originalFile.slice(0, comment).lastIndexOf('/**');
|
111
|
+
if (start !== -1) {
|
112
|
+
//next comment block
|
113
|
+
const end = originalFile.indexOf('/**\n', start + 1);
|
114
|
+
let func = end === -1
|
115
|
+
? originalFile.substring(start - 3, originalFile.lastIndexOf('}'))
|
116
|
+
: originalFile.substring(start, end);
|
117
|
+
originalFile = originalFile.replace(func, '');
|
118
|
+
func = '\n ' + func.trim() + '\n';
|
119
|
+
updatedFile = insert(updatedFile, updatedFile.lastIndexOf('}'), func);
|
120
|
+
} else {
|
121
|
+
console.log(`Task ${method} wasn't found in original file. Skipping.`);
|
122
|
+
}
|
123
|
+
});
|
124
|
+
fs.writeFileSync(original, originalFile, 'utf8');
|
125
|
+
fs.writeFileSync(updated, updatedFile, 'utf8');
|
126
|
+
return 'done';
|
127
|
+
}
|
128
|
+
|
129
|
+
//Update test file for when we deactivate a task
|
130
|
+
function deactivateTest(adapterPath, testPath, tasks) {
|
131
|
+
let unitTest = fs.readFileSync(`${adapterPath}/${testPath}`, 'utf8');
|
132
|
+
tasks.forEach((task) => {
|
133
|
+
const searchStr = `describe('#${task}`;
|
134
|
+
unitTest = unitTest.replace(searchStr, `describe.skip('#${task}`);
|
135
|
+
});
|
136
|
+
fs.writeFileSync(`${adapterPath}/${testPath}`, unitTest, 'utf8');
|
137
|
+
}
|
138
|
+
|
139
|
+
//Update test file when we activate tasks
|
140
|
+
function activateTest(adapterPath, testPath, tasks) {
|
141
|
+
let unitTest = fs.readFileSync(`${adapterPath}/${testPath}`, 'utf8');
|
142
|
+
//tasks ==> toMove
|
143
|
+
tasks.forEach((task) => {
|
144
|
+
const searchStr = `describe.skip('#${task}`;
|
145
|
+
unitTest = unitTest.replace(searchStr, `describe('#${task}`);
|
146
|
+
});
|
147
|
+
fs.writeFileSync(`${adapterPath}/${testPath}`, unitTest, 'utf8');
|
148
|
+
}
|
149
|
+
|
150
|
+
//backups are not actually being written back
|
151
|
+
function rollbackChanges(adapterPath) {
|
152
|
+
const backups = fs.readdirSync(`${adapterPath}/temp`); //this is an array of file names not the full path
|
153
|
+
const filePaths = createPaths(adapterPath);
|
154
|
+
for (let i = 0; i < backups.length; i++) {
|
155
|
+
const file = fs.readFileSync(`${adapterPath}/temp/${backups[i]}`, 'utf8'); //make sure this is getting the file
|
156
|
+
const currentFile = filePaths.find((path) => {
|
157
|
+
const index = path.split('/').length - 1;
|
158
|
+
const fileName = path.split('/')[index];
|
159
|
+
return fileName === backups[i].replace('temp-', '');
|
160
|
+
}); //returns undefined if no match
|
161
|
+
|
162
|
+
if (currentFile) {
|
163
|
+
fs.writeFileSync(currentFile, file, 'utf8');
|
164
|
+
}
|
165
|
+
}
|
166
|
+
//inactive didn't exist before script
|
167
|
+
if (!backups.includes('temp-adapter-inactive.js')) {
|
168
|
+
fs.unlinkSync(`${adapterPath}/pronghorn-inactive.json`);
|
169
|
+
fs.unlinkSync(`${adapterPath}/adapter-inactive.js`);
|
170
|
+
}
|
171
|
+
deleteBackups(adapterPath);
|
172
|
+
}
|
173
|
+
|
174
|
+
function deleteBackups(adapterPath) {
|
175
|
+
fs.rmSync(`${adapterPath}/temp`, { recursive: true });
|
176
|
+
}
|
177
|
+
|
178
|
+
function activateTasks(adapterDir, tasks) {
|
179
|
+
const toDelete = [];
|
180
|
+
const backupFiles = [];
|
181
|
+
const filePaths = createPaths(adapterDir);
|
182
|
+
try {
|
183
|
+
//take backup of each file here
|
184
|
+
if (!fs.existsSync(`${adapterDir}/temp`)) {
|
185
|
+
fs.mkdirSync(`${adapterDir}/temp`);
|
186
|
+
}
|
187
|
+
filePaths.forEach((filePath) => {
|
188
|
+
if (fs.existsSync(filePath)) {
|
189
|
+
const index = filePath.split('/').length - 1;
|
190
|
+
const backupName = `temp-${filePath.split('/')[index]}`;
|
191
|
+
backupFiles.push(`${adapterDir}/temp/${backupName}`);
|
192
|
+
fs.copyFileSync(filePath, `${adapterDir}/temp/${backupName}`);
|
193
|
+
} else {
|
194
|
+
//File doesn't exist before script
|
195
|
+
toDelete.push(filePath);
|
196
|
+
}
|
197
|
+
});
|
198
|
+
tasks = tasks.filter((task) => {
|
199
|
+
if (adapterBaseTasks.includes(task)) {
|
200
|
+
flipTaskFlag(task, `${adapterDir}/pronghorn.json`, true);
|
201
|
+
return false;
|
202
|
+
} else {
|
203
|
+
return true;
|
204
|
+
}
|
205
|
+
});
|
206
|
+
updateAdapterJs(
|
207
|
+
tasks,
|
208
|
+
`${adapterDir}/adapter-inactive.js`,
|
209
|
+
`${adapterDir}/adapter.js`,
|
210
|
+
adapterDir
|
211
|
+
);
|
212
|
+
updatePronghorn(
|
213
|
+
tasks,
|
214
|
+
`${adapterDir}/pronghorn-inactive.json`,
|
215
|
+
`${adapterDir}/pronghorn.json`
|
216
|
+
);
|
217
|
+
activateTest(
|
218
|
+
adapterDir,
|
219
|
+
'/test/integration/adapterTestIntegration.js',
|
220
|
+
tasks
|
221
|
+
);
|
222
|
+
activateTest(adapterDir, '/test/unit/adapterTestUnit.js', tasks);
|
223
|
+
return 'success';
|
224
|
+
} catch (e) {
|
225
|
+
console.log(`Error: ${e} ocurred during execution. Rolling back changes.`);
|
226
|
+
for (let i = 0; i < backupFiles.length; i++) {
|
227
|
+
const file = fs.readFileSync(backupFiles[i], 'utf8');
|
228
|
+
fs.writeFileSync(filePaths[i], file, 'utf8');
|
229
|
+
}
|
230
|
+
toDelete.forEach((filePath) => {
|
231
|
+
if (fs.existsSync(filePath)) {
|
232
|
+
fs.unlinkSync(filePath);
|
233
|
+
}
|
234
|
+
});
|
235
|
+
deleteBackups(adapterDir);
|
236
|
+
process.exit(1);
|
237
|
+
}
|
238
|
+
}
|
239
|
+
|
240
|
+
//moving from adapter.js to adapter-inactive.js
|
241
|
+
function deactivateTasks(adapterDir, tasks) {
|
242
|
+
const toDelete = [];
|
243
|
+
const backupFiles = [];
|
244
|
+
const filePaths = createPaths(adapterDir);
|
245
|
+
try {
|
246
|
+
//take backup of each file here
|
247
|
+
if (!fs.existsSync(`${adapterDir}/temp`)) {
|
248
|
+
fs.mkdirSync(`${adapterDir}/temp`);
|
249
|
+
}
|
250
|
+
filePaths.forEach((filePath) => {
|
251
|
+
if (fs.existsSync(filePath)) {
|
252
|
+
const index = filePath.split('/').length - 1;
|
253
|
+
const backupName = `temp-${filePath.split('/')[index]}`;
|
254
|
+
backupFiles.push(`${adapterDir}/temp/${backupName}`);
|
255
|
+
fs.copyFileSync(filePath, `${adapterDir}/temp/${backupName}`);
|
256
|
+
} else {
|
257
|
+
//File doesn't exist before script
|
258
|
+
toDelete.push(filePath);
|
259
|
+
}
|
260
|
+
});
|
261
|
+
//filter tasks for blacklisted tasks or IAP tasks
|
262
|
+
tasks = tasks.filter((task) => {
|
263
|
+
if (blacklistTasks.includes(task)) {
|
264
|
+
console.log(`${task} cannot be deactivated.`);
|
265
|
+
return false;
|
266
|
+
} else if (adapterBaseTasks.includes(task)) {
|
267
|
+
flipTaskFlag(task, `${adapterDir}/pronghorn.json`, false);
|
268
|
+
return false;
|
269
|
+
} else {
|
270
|
+
return true;
|
271
|
+
}
|
272
|
+
});
|
273
|
+
updateAdapterJs(
|
274
|
+
tasks,
|
275
|
+
`${adapterDir}/adapter.js`,
|
276
|
+
`${adapterDir}/adapter-inactive.js`,
|
277
|
+
adapterDir
|
278
|
+
);
|
279
|
+
updatePronghorn(
|
280
|
+
tasks,
|
281
|
+
`${adapterDir}/pronghorn.json`,
|
282
|
+
`${adapterDir}/pronghorn-inactive.json`
|
283
|
+
);
|
284
|
+
deactivateTest(
|
285
|
+
adapterDir,
|
286
|
+
'/test/integration/adapterTestIntegration.js',
|
287
|
+
tasks
|
288
|
+
);
|
289
|
+
deactivateTest(adapterDir, '/test/unit/adapterTestUnit.js', tasks);
|
290
|
+
return 'success';
|
291
|
+
} catch (e) {
|
292
|
+
console.log(`Error: ${e} ocurred during execution. Rolling back changes.`);
|
293
|
+
for (let i = 0; i < backupFiles.length; i++) {
|
294
|
+
const file = fs.readFileSync(backupFiles[i], 'utf8');
|
295
|
+
fs.writeFileSync(filePaths[i], file, 'utf8');
|
296
|
+
}
|
297
|
+
toDelete.forEach((filePath) => {
|
298
|
+
if (fs.existsSync(filePath)) {
|
299
|
+
fs.unlinkSync(filePath);
|
300
|
+
}
|
301
|
+
});
|
302
|
+
deleteBackups(adapterDir);
|
303
|
+
process.exit(1);
|
304
|
+
}
|
305
|
+
}
|
306
|
+
|
307
|
+
module.exports = {
|
308
|
+
activateTasks, deactivateTasks, rollbackChanges, deleteBackups
|
309
|
+
};
|