@itentialopensource/adapter-microsoft_graph 1.0.0
Sign up to get free protection for your applications and to get access to all the features.
- package/.eslintignore +6 -0
- package/.eslintrc.js +18 -0
- package/.gitlab/.gitkeep +0 -0
- package/.gitlab/issue_templates/.gitkeep +0 -0
- package/.gitlab/issue_templates/Default.md +17 -0
- package/.gitlab/issue_templates/bugReportTemplate.md +42 -0
- package/.gitlab/issue_templates/featureRequestTemplate.md +14 -0
- package/.jshintrc +0 -0
- package/AUTH.md +39 -0
- package/BROKER.md +199 -0
- package/CALLS.md +170 -0
- package/CHANGELOG.md +9 -0
- package/CODE_OF_CONDUCT.md +43 -0
- package/CONTRIBUTING.md +172 -0
- package/ENHANCE.md +69 -0
- package/LICENSE +201 -0
- package/PROPERTIES.md +641 -0
- package/README.md +337 -0
- package/SUMMARY.md +9 -0
- package/SYSTEMINFO.md +11 -0
- package/TROUBLESHOOT.md +47 -0
- package/adapter.js +18798 -0
- package/adapterBase.js +1787 -0
- package/entities/.generic/action.json +214 -0
- package/entities/.generic/schema.json +28 -0
- package/entities/.system/action.json +50 -0
- package/entities/.system/mockdatafiles/getToken-default.json +3 -0
- package/entities/.system/mockdatafiles/healthcheck-default.json +3 -0
- package/entities/.system/schema.json +19 -0
- package/entities/.system/schemaTokenReq.json +53 -0
- package/entities/.system/schemaTokenResp.json +53 -0
- package/entities/Applications/action.json +127 -0
- package/entities/Applications/schema.json +35 -0
- package/entities/AzureADDevice/action.json +106 -0
- package/entities/AzureADDevice/schema.json +45 -0
- package/entities/Batch/action.json +24 -0
- package/entities/Batch/schema.json +19 -0
- package/entities/CaseCreation/action.json +249 -0
- package/entities/CaseCreation/schema.json +30 -0
- package/entities/Catalog/action.json +25 -0
- package/entities/Catalog/schema.json +19 -0
- package/entities/ConnectionSetup/action.json +148 -0
- package/entities/ConnectionSetup/schema.json +25 -0
- package/entities/ContentSync/action.json +65 -0
- package/entities/ContentSync/schema.json +21 -0
- package/entities/CreateDSR/action.json +45 -0
- package/entities/CreateDSR/schema.json +20 -0
- package/entities/Deployment/action.json +106 -0
- package/entities/Deployment/schema.json +34 -0
- package/entities/DeploymentAudience/action.json +66 -0
- package/entities/DeploymentAudience/schema.json +32 -0
- package/entities/Events/action.json +108 -0
- package/entities/Events/schema.json +34 -0
- package/entities/Files/action.json +108 -0
- package/entities/Files/schema.json +23 -0
- package/entities/Groups/action.json +25 -0
- package/entities/Groups/schema.json +19 -0
- package/entities/IdentitySync/action.json +105 -0
- package/entities/IdentitySync/schema.json +23 -0
- package/entities/Insights/action.json +46 -0
- package/entities/Insights/schema.json +20 -0
- package/entities/LabelManagement/action.json +106 -0
- package/entities/LabelManagement/schema.json +23 -0
- package/entities/Mail/action.json +212 -0
- package/entities/Mail/schema.json +72 -0
- package/entities/Memberships/action.json +167 -0
- package/entities/Memberships/schema.json +70 -0
- package/entities/Misc/action.json +66 -0
- package/entities/Misc/schema.json +21 -0
- package/entities/Notebooks/action.json +107 -0
- package/entities/Notebooks/schema.json +34 -0
- package/entities/OpenExtensions/action.json +65 -0
- package/entities/OpenExtensions/schema.json +54 -0
- package/entities/People/action.json +46 -0
- package/entities/People/schema.json +31 -0
- package/entities/SchemaExtensions/action.json +65 -0
- package/entities/SchemaExtensions/schema.json +32 -0
- package/entities/Search/action.json +24 -0
- package/entities/Search/schema.json +19 -0
- package/entities/Security/action.json +151 -0
- package/entities/Security/schema.json +58 -0
- package/entities/SharePoint/action.json +214 -0
- package/entities/SharePoint/schema.json +39 -0
- package/entities/Subscriptions/action.json +65 -0
- package/entities/Subscriptions/schema.json +32 -0
- package/entities/TasksPlanner/action.json +272 -0
- package/entities/TasksPlanner/schema.json +86 -0
- package/entities/TasksTodo/action.json +187 -0
- package/entities/TasksTodo/schema.json +49 -0
- package/entities/Teams/action.json +519 -0
- package/entities/Teams/schema.json +120 -0
- package/entities/TrackDSRStatus/action.json +108 -0
- package/entities/TrackDSRStatus/schema.json +23 -0
- package/entities/TriggerEventForExistingLabel/action.json +108 -0
- package/entities/TriggerEventForExistingLabel/schema.json +23 -0
- package/entities/Users/action.json +213 -0
- package/entities/Users/schema.json +50 -0
- package/entities/WorkflowAutomation/action.json +249 -0
- package/entities/WorkflowAutomation/schema.json +30 -0
- package/error.json +190 -0
- package/package.json +87 -0
- package/pronghorn.json +8654 -0
- package/propertiesDecorators.json +14 -0
- package/propertiesSchema.json +1248 -0
- package/refs?service=git-upload-pack +0 -0
- package/report/creationReport.json +1715 -0
- package/report/graph.json +14709 -0
- package/sampleProperties.json +195 -0
- package/test/integration/adapterTestBasicGet.js +83 -0
- package/test/integration/adapterTestConnectivity.js +93 -0
- package/test/integration/adapterTestIntegration.js +6059 -0
- package/test/unit/adapterBaseTestUnit.js +949 -0
- package/test/unit/adapterTestUnit.js +7492 -0
- package/utils/adapterInfo.js +206 -0
- package/utils/addAuth.js +94 -0
- package/utils/artifactize.js +146 -0
- package/utils/basicGet.js +50 -0
- package/utils/checkMigrate.js +63 -0
- package/utils/entitiesToDB.js +178 -0
- package/utils/findPath.js +74 -0
- package/utils/methodDocumentor.js +225 -0
- package/utils/modify.js +154 -0
- package/utils/packModificationScript.js +35 -0
- package/utils/patches2bundledDeps.js +90 -0
- package/utils/pre-commit.sh +32 -0
- package/utils/removeHooks.js +20 -0
- package/utils/setup.js +33 -0
- package/utils/tbScript.js +246 -0
- package/utils/tbUtils.js +490 -0
- package/utils/testRunner.js +298 -0
- package/utils/troubleshootingAdapter.js +195 -0
- package/workflows/README.md +3 -0
@@ -0,0 +1,178 @@
|
|
1
|
+
/* @copyright Itential, LLC 2021 */
|
2
|
+
|
3
|
+
// Set globals
|
4
|
+
/* global log */
|
5
|
+
|
6
|
+
/* eslint import/no-dynamic-require: warn */
|
7
|
+
/* eslint global-require: warn */
|
8
|
+
/* eslint no-unused-vars: warn */
|
9
|
+
/* eslint import/no-unresolved: warn */
|
10
|
+
|
11
|
+
/**
|
12
|
+
* This script is used to read through an adapter's entities files
|
13
|
+
* and then creates documents and enters them into the IAP mongodb
|
14
|
+
*/
|
15
|
+
|
16
|
+
const fs = require('fs');
|
17
|
+
const path = require('path');
|
18
|
+
const utils = require('./tbUtils');
|
19
|
+
|
20
|
+
// get the pronghorn database information
|
21
|
+
const getPronghornProps = async (iapDir) => {
|
22
|
+
log.trace('Retrieving properties.json file...');
|
23
|
+
const rawProps = require(path.join(iapDir, 'properties.json'));
|
24
|
+
log.trace('Decrypting properties...');
|
25
|
+
const { PropertyEncryption } = require('@itential/itential-utils');
|
26
|
+
const propertyEncryption = new PropertyEncryption();
|
27
|
+
const pronghornProps = await propertyEncryption.decryptProps(rawProps);
|
28
|
+
log.trace('Found properties.\n');
|
29
|
+
return pronghornProps;
|
30
|
+
};
|
31
|
+
|
32
|
+
/**
|
33
|
+
* Function used to take a file path to a entity directory and build
|
34
|
+
* a document that corresponds to the entity files.
|
35
|
+
*/
|
36
|
+
const buildDoc = (pathstring) => {
|
37
|
+
let files = fs.readdirSync(pathstring);
|
38
|
+
|
39
|
+
// load the mockdatafiles
|
40
|
+
const mockdatafiles = {};
|
41
|
+
if (files.includes('mockdatafiles') && fs.lstatSync(`${pathstring}/mockdatafiles`).isDirectory()) {
|
42
|
+
fs.readdirSync(`${pathstring}/mockdatafiles`).forEach((file) => {
|
43
|
+
if (file.split('.').pop() === 'json') {
|
44
|
+
const mockpath = `${pathstring}/mockdatafiles/${file}`;
|
45
|
+
const data = JSON.parse(fs.readFileSync(mockpath));
|
46
|
+
mockdatafiles[mockpath.split('/').pop()] = data;
|
47
|
+
}
|
48
|
+
});
|
49
|
+
}
|
50
|
+
|
51
|
+
// load the action data
|
52
|
+
let actions;
|
53
|
+
if (files.includes('action.json')) {
|
54
|
+
actions = JSON.parse(fs.readFileSync(`${pathstring}/action.json`));
|
55
|
+
}
|
56
|
+
|
57
|
+
// Load schema.json and other schemas in remaining json files
|
58
|
+
files = files.filter((f) => (f !== 'action.json') && f.endsWith('.json'));
|
59
|
+
const schema = [];
|
60
|
+
files.forEach((file) => {
|
61
|
+
const data = JSON.parse(fs.readFileSync(`${pathstring}/${file}`));
|
62
|
+
schema.push({
|
63
|
+
name: file,
|
64
|
+
schema: data
|
65
|
+
});
|
66
|
+
});
|
67
|
+
|
68
|
+
// return the data
|
69
|
+
return {
|
70
|
+
actions: actions.actions,
|
71
|
+
schema,
|
72
|
+
mockdatafiles
|
73
|
+
};
|
74
|
+
};
|
75
|
+
|
76
|
+
/**
|
77
|
+
* Function used to get the database from the options or a provided directory
|
78
|
+
*/
|
79
|
+
const optionsHandler = (options) => {
|
80
|
+
// if the database properties were provided in the options - return them
|
81
|
+
if (options.pronghornProps) {
|
82
|
+
if (typeof options.pronghornProps === 'string') {
|
83
|
+
return JSON.parse(options.pronghornProps);
|
84
|
+
}
|
85
|
+
return new Promise((resolve, reject) => resolve(options.pronghornProps));
|
86
|
+
}
|
87
|
+
|
88
|
+
// if the directory was provided, get the pronghorn props from the directory
|
89
|
+
if (options.iapDir) {
|
90
|
+
return getPronghornProps(options.iapDir);
|
91
|
+
}
|
92
|
+
|
93
|
+
// if nothing was provided, error
|
94
|
+
return new Promise((resolve, reject) => reject(new Error('Neither pronghornProps nor iapDir defined in options!')));
|
95
|
+
};
|
96
|
+
|
97
|
+
/**
|
98
|
+
* Function used to put the adapter configuration into the provided database
|
99
|
+
*/
|
100
|
+
const moveEntitiesToDB = async (targetPath, options) => {
|
101
|
+
// set local variables
|
102
|
+
let myOpts = options;
|
103
|
+
let myPath = targetPath;
|
104
|
+
|
105
|
+
// if we got a string parse into a JSON object
|
106
|
+
if (typeof myOpts === 'string') {
|
107
|
+
myOpts = JSON.parse(myOpts);
|
108
|
+
}
|
109
|
+
|
110
|
+
// if there is no target collection - set the collection to the default
|
111
|
+
if (!myOpts.targetCollection) {
|
112
|
+
myOpts.targetCollection = 'adapter_configs';
|
113
|
+
}
|
114
|
+
|
115
|
+
// if there is no id error since we need an id for the entities
|
116
|
+
if (!myOpts.id) {
|
117
|
+
throw new Error('Adapter ID required!');
|
118
|
+
}
|
119
|
+
|
120
|
+
// get the pronghorn database properties
|
121
|
+
return optionsHandler(options).then(async (currentProps) => {
|
122
|
+
// Check valid filepath provided
|
123
|
+
if (!myPath) {
|
124
|
+
// if no path use the current directory without the utils
|
125
|
+
myPath = path.join(__dirname, '../');
|
126
|
+
} else if (myPath.slice(-1) === '/') {
|
127
|
+
myPath = myPath.slice(0, -1);
|
128
|
+
}
|
129
|
+
|
130
|
+
// verify set the entity path
|
131
|
+
const entitiesPath = `${myPath}/entities`;
|
132
|
+
if (!fs.existsSync(entitiesPath)) {
|
133
|
+
throw new Error(`Entities path does not exist in filesystem: ${entitiesPath}`);
|
134
|
+
} else {
|
135
|
+
log.trace('Target found on filesystem');
|
136
|
+
}
|
137
|
+
|
138
|
+
// Get adapter details
|
139
|
+
if (!fs.existsSync(`${myPath}/pronghorn.json`)) {
|
140
|
+
throw new Error(`pronghorn.json does not exist in path: ${myPath}`);
|
141
|
+
} else {
|
142
|
+
log.trace('pronghorn.json found on filesystem');
|
143
|
+
}
|
144
|
+
const adapterData = JSON.parse(fs.readFileSync(`${myPath}/pronghorn.json`));
|
145
|
+
|
146
|
+
// Load files from the filesystem
|
147
|
+
const docs = [];
|
148
|
+
const entities = fs.readdirSync(entitiesPath);
|
149
|
+
entities.forEach((entity) => {
|
150
|
+
const entityPath = `${entitiesPath}/${entity}`;
|
151
|
+
const isDir = fs.lstatSync(entitiesPath).isDirectory();
|
152
|
+
|
153
|
+
// Build doc for entity
|
154
|
+
if (isDir) {
|
155
|
+
let doc = buildDoc(entityPath);
|
156
|
+
doc = {
|
157
|
+
id: myOpts.id,
|
158
|
+
type: adapterData.id,
|
159
|
+
entity,
|
160
|
+
...doc
|
161
|
+
};
|
162
|
+
docs.push(doc);
|
163
|
+
}
|
164
|
+
});
|
165
|
+
|
166
|
+
// Upload documents to db collection
|
167
|
+
const db = await utils.connect(currentProps).catch((err) => { console.error(err); throw err; });
|
168
|
+
if (!db) {
|
169
|
+
console.error('Error occured when connectiong to database', currentProps);
|
170
|
+
throw new Error('Database not found');
|
171
|
+
}
|
172
|
+
const collection = db.collection(myOpts.targetCollection);
|
173
|
+
const res = await collection.insertMany(docs, { checkKeys: false }).catch((err) => { console.error(err); throw err; });
|
174
|
+
return res;
|
175
|
+
});
|
176
|
+
};
|
177
|
+
|
178
|
+
module.exports = { moveEntitiesToDB };
|
@@ -0,0 +1,74 @@
|
|
1
|
+
#!/usr/bin/env node
|
2
|
+
/* @copyright Itential, LLC 2019 */
|
3
|
+
/* eslint global-require:warn */
|
4
|
+
/* eslint import/no-dynamic-require:warn */
|
5
|
+
/* eslint prefer-destructuring:warn */
|
6
|
+
|
7
|
+
const fs = require('fs-extra');
|
8
|
+
const path = require('path');
|
9
|
+
const rls = require('readline-sync');
|
10
|
+
|
11
|
+
/**
|
12
|
+
* This script will determine the type of integration test to run
|
13
|
+
* based on input. If other information is needed, it will solicit
|
14
|
+
* that input and then edit the integration test accordingly.
|
15
|
+
*/
|
16
|
+
|
17
|
+
/**
|
18
|
+
* Updates the action files
|
19
|
+
*/
|
20
|
+
function checkActionFiles(apath) {
|
21
|
+
// verify the path
|
22
|
+
if (!apath) {
|
23
|
+
console.log(' NO PATH PROVIDED!');
|
24
|
+
return 'Done';
|
25
|
+
}
|
26
|
+
|
27
|
+
// make sure the entities directory exists
|
28
|
+
const entitydir = path.join(__dirname, '../entities');
|
29
|
+
if (!fs.statSync(entitydir).isDirectory()) {
|
30
|
+
console.log('Could not find the entities directory');
|
31
|
+
return 'error';
|
32
|
+
}
|
33
|
+
|
34
|
+
const entities = fs.readdirSync(entitydir);
|
35
|
+
let found = false;
|
36
|
+
|
37
|
+
// need to go through each entity in the entities directory
|
38
|
+
for (let e = 0; e < entities.length; e += 1) {
|
39
|
+
// make sure the entity is a directory - do not care about extra files
|
40
|
+
// only entities (dir)
|
41
|
+
if (fs.statSync(`${entitydir}/${entities[e]}`).isDirectory()) {
|
42
|
+
// see if the action file exists in the entity
|
43
|
+
if (fs.existsSync(`${entitydir}/${entities[e]}/action.json`)) {
|
44
|
+
// Read the entity actions from the file system
|
45
|
+
const actions = require(`${entitydir}/${entities[e]}/action.json`);
|
46
|
+
|
47
|
+
// go through all of the actions set the appropriate info in the newActions
|
48
|
+
for (let a = 0; a < actions.actions.length; a += 1) {
|
49
|
+
if (actions.actions[a].entitypath.indexOf(apath) >= 0) {
|
50
|
+
found = true;
|
51
|
+
console.log(` Found - entity: ${entities[e]} action: ${actions.actions[a].name}`);
|
52
|
+
console.log(` method: ${actions.actions[a].method} path: ${actions.actions[a].entitypath}`);
|
53
|
+
console.log(' ');
|
54
|
+
}
|
55
|
+
}
|
56
|
+
} else {
|
57
|
+
console.log(`Could not find entities ${entities[e]} action.json file`);
|
58
|
+
return 'error';
|
59
|
+
}
|
60
|
+
} else {
|
61
|
+
console.log(`Could not find entities ${entities[e]} directory`);
|
62
|
+
return 'error';
|
63
|
+
}
|
64
|
+
}
|
65
|
+
|
66
|
+
if (!found) {
|
67
|
+
console.log(' PATH NOT FOUND!');
|
68
|
+
}
|
69
|
+
return 'Done';
|
70
|
+
}
|
71
|
+
|
72
|
+
const findPath = rls.question('Enter the path/partial path you are looking for: ');
|
73
|
+
console.log(`PATH: ${findPath}`);
|
74
|
+
checkActionFiles(findPath);
|
@@ -0,0 +1,225 @@
|
|
1
|
+
/* eslint global-require:warn */
|
2
|
+
/* eslint import/no-dynamic-require:warn */
|
3
|
+
/* eslint no-param-reassign:warn */
|
4
|
+
|
5
|
+
const fs = require('fs-extra');
|
6
|
+
const esprima = require('esprima');
|
7
|
+
|
8
|
+
// Getting the base directory:
|
9
|
+
let adaptdir = __dirname;
|
10
|
+
if (adaptdir.endsWith('/utils')) {
|
11
|
+
adaptdir = adaptdir.substring(0, adaptdir.length - 6);
|
12
|
+
}
|
13
|
+
|
14
|
+
function createObjectForFunction(
|
15
|
+
funcName,
|
16
|
+
funcArgs,
|
17
|
+
entityPath,
|
18
|
+
description,
|
19
|
+
workflow
|
20
|
+
) {
|
21
|
+
const funcObject = {};
|
22
|
+
// if the entity path is not set, then the object is not created.
|
23
|
+
if (entityPath !== undefined) {
|
24
|
+
funcObject.method_signature = `${funcName}(${funcArgs.join(', ')})`;
|
25
|
+
funcObject.path = entityPath;
|
26
|
+
if (description === undefined) {
|
27
|
+
funcObject.description = '';
|
28
|
+
funcObject.workflow = 'No';
|
29
|
+
} else {
|
30
|
+
funcObject.description = description;
|
31
|
+
funcObject.workflow = workflow;
|
32
|
+
}
|
33
|
+
}
|
34
|
+
return funcObject;
|
35
|
+
}
|
36
|
+
|
37
|
+
function getPathFromEntity(entity, funcName) {
|
38
|
+
let epath;
|
39
|
+
if (entity === undefined || entity === '.generic') {
|
40
|
+
epath = undefined;
|
41
|
+
} else {
|
42
|
+
// Access the action.js file for the certain entity to get the path
|
43
|
+
const entityPath = `${adaptdir}/entities/${entity}/action.json`;
|
44
|
+
const actionJSON = require(entityPath);
|
45
|
+
actionJSON.actions.forEach((action) => {
|
46
|
+
if (action.name === funcName) {
|
47
|
+
epath = action.entitypath;
|
48
|
+
}
|
49
|
+
});
|
50
|
+
}
|
51
|
+
return epath;
|
52
|
+
}
|
53
|
+
|
54
|
+
function readFileUsingLib(filename, descriptionObj, workflowObj, functionList) {
|
55
|
+
// read the file
|
56
|
+
const aFile = fs.readFileSync(filename, 'utf8');
|
57
|
+
// parsing the file to get the function and class declarations.
|
58
|
+
const aFileFuncArgs = esprima.parseScript(aFile);
|
59
|
+
|
60
|
+
// Looping through all the declarations parsed:
|
61
|
+
aFileFuncArgs.body.forEach((e) => {
|
62
|
+
// Getting only the class declaration as it has our required functions.
|
63
|
+
if (e.type === 'ClassDeclaration') {
|
64
|
+
const methodDefinition = e.body;
|
65
|
+
methodDefinition.body.forEach((method) => {
|
66
|
+
// Getting method name and its params in the class.
|
67
|
+
const funcName = method.key.name;
|
68
|
+
const funcArgs = [];
|
69
|
+
method.value.params.forEach((param) => {
|
70
|
+
if (param.type === 'Identifier') {
|
71
|
+
funcArgs.push(param.name);
|
72
|
+
} else {
|
73
|
+
const args = `${param.left.name} = ${param.right.value}`;
|
74
|
+
funcArgs.push(args);
|
75
|
+
}
|
76
|
+
});
|
77
|
+
|
78
|
+
// Getting the entity for the method:
|
79
|
+
let entity;
|
80
|
+
method.value.body.body.forEach((statementType) => {
|
81
|
+
if (statementType.type === 'TryStatement') {
|
82
|
+
entity = statementType.block.body[0].argument.arguments[0].value;
|
83
|
+
}
|
84
|
+
});
|
85
|
+
const entityPath = getPathFromEntity(entity, funcName);
|
86
|
+
|
87
|
+
// Creating and storing the object for the method.
|
88
|
+
if (entityPath !== undefined) {
|
89
|
+
functionList.push(
|
90
|
+
createObjectForFunction(
|
91
|
+
funcName,
|
92
|
+
funcArgs,
|
93
|
+
entityPath,
|
94
|
+
descriptionObj[funcName],
|
95
|
+
workflowObj[funcName]
|
96
|
+
)
|
97
|
+
);
|
98
|
+
}
|
99
|
+
});
|
100
|
+
}
|
101
|
+
});
|
102
|
+
}
|
103
|
+
|
104
|
+
function readJSONFile(filename, descriptionObj, workflowObj) {
|
105
|
+
// Accessing the JSON file.
|
106
|
+
const phJSON = require(filename);
|
107
|
+
// Getting the methods array.
|
108
|
+
const methodArray = phJSON.methods;
|
109
|
+
methodArray.forEach((methodName) => {
|
110
|
+
// Getting the method description and workflow:
|
111
|
+
const funcName = methodName.name;
|
112
|
+
descriptionObj[funcName] = methodName.description;
|
113
|
+
workflowObj[funcName] = methodName.task ? 'Yes' : 'No';
|
114
|
+
});
|
115
|
+
}
|
116
|
+
|
117
|
+
function readMDFile(filename, functionList) {
|
118
|
+
// Reading in the .md file and creating an array with each line as an element.
|
119
|
+
const mdFile = fs.readFileSync(filename, 'utf-8');
|
120
|
+
const fileSplit = mdFile.split('\n');
|
121
|
+
// Storing the data that should added later to the updated data.
|
122
|
+
const linesToAddLater = [];
|
123
|
+
let index = fileSplit.length - 1;
|
124
|
+
|
125
|
+
// Removing all the blank lines at the end of the file.
|
126
|
+
if (fileSplit[index] === '') {
|
127
|
+
while (fileSplit[index] === '') {
|
128
|
+
linesToAddLater.push(fileSplit.pop());
|
129
|
+
index -= 1;
|
130
|
+
}
|
131
|
+
}
|
132
|
+
|
133
|
+
// Checking if the last 2 lines are <br> and </table>. If not, the file is corrupted and the
|
134
|
+
// data at the end of the file should be fixed.
|
135
|
+
if (fileSplit[index] === '<br>' || fileSplit[index - 1] === '</table>') {
|
136
|
+
// Storing <br> and </table> to add later.
|
137
|
+
linesToAddLater.push(fileSplit.pop());
|
138
|
+
linesToAddLater.push(fileSplit.pop());
|
139
|
+
index -= 2;
|
140
|
+
} else {
|
141
|
+
console.log('The file has bad content at the end.');
|
142
|
+
return;
|
143
|
+
}
|
144
|
+
// if (fileSplit[index] !== '<br>' && fileSplit[index - 1] !== '</table>') {
|
145
|
+
// console.log('The file has bad content at the end.');
|
146
|
+
// return;
|
147
|
+
// } else {
|
148
|
+
// // Storing <br> and </table> to add later.
|
149
|
+
// linesToAddLater.push(fileSplit.pop());
|
150
|
+
// linesToAddLater.push(fileSplit.pop());
|
151
|
+
// index -= 2;
|
152
|
+
// }
|
153
|
+
|
154
|
+
// Removing all the lines until the header tags are reached.
|
155
|
+
while (!fileSplit[index].includes('<th')) {
|
156
|
+
fileSplit.pop();
|
157
|
+
index -= 1;
|
158
|
+
}
|
159
|
+
// Adding </tr> for the header row, because it got removed in the above loop.
|
160
|
+
fileSplit.push(' </tr>');
|
161
|
+
|
162
|
+
// Creating the tags for each method to be appended to the file.
|
163
|
+
const tdBeginTag = ' <td style="padding:15px">';
|
164
|
+
const tdEndTag = '</td>';
|
165
|
+
functionList.forEach((func) => {
|
166
|
+
const signCommand = `${tdBeginTag}${func.method_signature}${tdEndTag}`;
|
167
|
+
const descCommand = `${tdBeginTag}${func.description}${tdEndTag}`;
|
168
|
+
const pathCommand = `${tdBeginTag}${func.path}${tdEndTag}`;
|
169
|
+
const workflowCommand = `${tdBeginTag}${func.workflow}${tdEndTag}`;
|
170
|
+
fileSplit.push(' <tr>');
|
171
|
+
fileSplit.push(signCommand);
|
172
|
+
fileSplit.push(descCommand);
|
173
|
+
fileSplit.push(pathCommand);
|
174
|
+
fileSplit.push(workflowCommand);
|
175
|
+
fileSplit.push(' </tr>');
|
176
|
+
});
|
177
|
+
|
178
|
+
// Adding </table> and <br> at the end of the file to complete the table and the file.
|
179
|
+
while (linesToAddLater.length > 0) {
|
180
|
+
fileSplit.push(linesToAddLater.pop());
|
181
|
+
}
|
182
|
+
|
183
|
+
// Writing all the content back into the file.
|
184
|
+
fs.writeFileSync(filename, fileSplit.join('\n'), {
|
185
|
+
encoding: 'utf-8',
|
186
|
+
flag: 'w'
|
187
|
+
});
|
188
|
+
}
|
189
|
+
|
190
|
+
function getFileInfo() {
|
191
|
+
// If files don't exist:
|
192
|
+
if (!fs.existsSync(`${adaptdir}/adapter.js`)) {
|
193
|
+
console.log('Missing - utils/adapter.js');
|
194
|
+
return;
|
195
|
+
}
|
196
|
+
if (!fs.existsSync(`${adaptdir}/pronghorn.json`)) {
|
197
|
+
console.log('Missing - pronghorn.json');
|
198
|
+
return;
|
199
|
+
}
|
200
|
+
if (!fs.existsSync(`${adaptdir}/CALLS.md`)) {
|
201
|
+
console.log('Missing - CALLS.md');
|
202
|
+
return;
|
203
|
+
}
|
204
|
+
|
205
|
+
const descriptionObj = {};
|
206
|
+
const workflowObj = {};
|
207
|
+
|
208
|
+
// Get the method descriptions and the workflow values from pronghorn.json file.
|
209
|
+
readJSONFile(`${adaptdir}/pronghorn.json`, descriptionObj, workflowObj);
|
210
|
+
|
211
|
+
// Get the method signature, entity path and create an object that contains all the info regarding
|
212
|
+
// the method and push it to the functionList array.
|
213
|
+
const functionList = [];
|
214
|
+
readFileUsingLib(
|
215
|
+
`${adaptdir}/adapter.js`,
|
216
|
+
descriptionObj,
|
217
|
+
workflowObj,
|
218
|
+
functionList
|
219
|
+
);
|
220
|
+
|
221
|
+
// createMarkDown(functionList);
|
222
|
+
readMDFile(`${adaptdir}/CALLS.md`, functionList);
|
223
|
+
}
|
224
|
+
|
225
|
+
getFileInfo();
|
package/utils/modify.js
ADDED
@@ -0,0 +1,154 @@
|
|
1
|
+
const fs = require('fs-extra');
|
2
|
+
const Ajv = require('ajv');
|
3
|
+
const rls = require('readline-sync');
|
4
|
+
const { execSync } = require('child_process');
|
5
|
+
const { existsSync } = require('fs-extra');
|
6
|
+
const { getAdapterConfig } = require('./tbUtils');
|
7
|
+
const { name } = require('../package.json');
|
8
|
+
const propertiesSchema = require('../propertiesSchema.json');
|
9
|
+
|
10
|
+
const flags = process.argv[2];
|
11
|
+
|
12
|
+
/**
|
13
|
+
* @summary Updates database instance with new adapter properties
|
14
|
+
*
|
15
|
+
* @function updateServiceItem
|
16
|
+
*/
|
17
|
+
async function updateServiceItem() {
|
18
|
+
const { database, serviceItem } = await getAdapterConfig();
|
19
|
+
const currentProps = serviceItem.properties.properties;
|
20
|
+
const ajv = new Ajv({ allErrors: true, useDefaults: true });
|
21
|
+
const validate = ajv.compile(propertiesSchema);
|
22
|
+
validate(currentProps);
|
23
|
+
console.log('Updating Properties...');
|
24
|
+
await database.collection('service_configs').updateOne(
|
25
|
+
{ model: name }, { $set: serviceItem }
|
26
|
+
);
|
27
|
+
console.log('Properties Updated');
|
28
|
+
}
|
29
|
+
|
30
|
+
/**
|
31
|
+
* @summary Creates a backup zip file of current adapter
|
32
|
+
*
|
33
|
+
* @function backup
|
34
|
+
*/
|
35
|
+
function backup() {
|
36
|
+
// zip all files except node_modules and package-lock
|
37
|
+
const backupCmd = 'zip -r previousVersion.zip .';
|
38
|
+
execSync(backupCmd, { encoding: 'utf-8' });
|
39
|
+
}
|
40
|
+
|
41
|
+
/**
|
42
|
+
* @summary Archives previous modifications and removes the modification package
|
43
|
+
*
|
44
|
+
* @function archiveMod
|
45
|
+
* @param {String} modType - update(UPD) or migrate(MIG)
|
46
|
+
*/
|
47
|
+
function archiveMod(modType) {
|
48
|
+
if (!existsSync('./adapter_modifications/archive')) {
|
49
|
+
execSync('mkdir ./adapter_modifications/archive');
|
50
|
+
}
|
51
|
+
const zipFile = modType === 'UPD' ? 'updatePackage.zip' : 'migrationPackage.zip';
|
52
|
+
const now = new Date();
|
53
|
+
const archiveName = `${modType}-${now.toISOString()}`;
|
54
|
+
execSync(`mkdir adapter_modifications/archive/${archiveName}`);
|
55
|
+
const archiveCmd = 'mv adapter_modifications/archive .'
|
56
|
+
+ ` && mv adapter_modifications/* archive/${archiveName}`
|
57
|
+
+ ' && mv archive adapter_modifications'
|
58
|
+
+ ` && rm ${zipFile}`;
|
59
|
+
execSync(archiveCmd, { encoding: 'utf-8' });
|
60
|
+
}
|
61
|
+
|
62
|
+
/**
|
63
|
+
* @summary Reverts modifications using backup zip file
|
64
|
+
*
|
65
|
+
* @function revertMod
|
66
|
+
*/
|
67
|
+
function revertMod() {
|
68
|
+
const files = fs.readdirSync('./');
|
69
|
+
// remove all files except previousVersion
|
70
|
+
files.forEach((file) => {
|
71
|
+
if (file !== 'previousVersion.zip') {
|
72
|
+
fs.removeSync(file);
|
73
|
+
}
|
74
|
+
});
|
75
|
+
// // unzip previousVersion, reinstall dependencies and delete zipfile
|
76
|
+
execSync('unzip -o previousVersion.zip && rm -rf node_modules && rm package-lock.json && npm install');
|
77
|
+
execSync('rm previousVersion.zip');
|
78
|
+
console.log('Changes have been reverted');
|
79
|
+
}
|
80
|
+
|
81
|
+
// Main Script
|
82
|
+
|
83
|
+
// Migrate
|
84
|
+
if (flags === '-m') {
|
85
|
+
if (!fs.existsSync('migrationPackage.zip')) {
|
86
|
+
console.log('Migration Package not found. Download and place migrationPackage in the adapter root directory');
|
87
|
+
process.exit();
|
88
|
+
}
|
89
|
+
// Backup current adapter
|
90
|
+
backup();
|
91
|
+
console.log('Migrating adapter and running tests...');
|
92
|
+
const migrateCmd = 'unzip -o migrationPackage.zip'
|
93
|
+
+ ' && cd adapter_modifications'
|
94
|
+
+ ' && node migrate';
|
95
|
+
const migrateOutput = execSync(migrateCmd, { encoding: 'utf-8' });
|
96
|
+
console.log(migrateOutput);
|
97
|
+
if (migrateOutput.indexOf('Lint exited with code 1') >= 0
|
98
|
+
|| migrateOutput.indexOf('Tests exited with code 1') >= 0) {
|
99
|
+
if (rls.keyInYN('Adapter failed tests or lint after migrating. Would you like to revert the changes?')) {
|
100
|
+
console.log('Reverting changes...');
|
101
|
+
revertMod();
|
102
|
+
process.exit();
|
103
|
+
}
|
104
|
+
console.log('Adapter Migration will continue. If you want to revert the changes, run the command npm run adapter:revert');
|
105
|
+
}
|
106
|
+
console.log('Installing new dependencies..');
|
107
|
+
const updatePackageCmd = 'rm -rf node_modules && rm package-lock.json && npm install';
|
108
|
+
const updatePackageOutput = execSync(updatePackageCmd, { encoding: 'utf-8' });
|
109
|
+
console.log(updatePackageOutput);
|
110
|
+
console.log('New dependencies installed');
|
111
|
+
console.log('Updating adapter properties..');
|
112
|
+
updateServiceItem().then(() => {
|
113
|
+
console.log('Adapter Successfully Migrated. Restart adapter in IAP to apply the changes');
|
114
|
+
archiveMod('MIG');
|
115
|
+
process.exit();
|
116
|
+
});
|
117
|
+
}
|
118
|
+
|
119
|
+
// Update
|
120
|
+
if (flags === '-u') {
|
121
|
+
if (!fs.existsSync('updatePackage.zip')) {
|
122
|
+
console.log('Update Package not found. Download and place updateAdapter.zip in the adapter root directory');
|
123
|
+
process.exit();
|
124
|
+
}
|
125
|
+
// Backup current adapter
|
126
|
+
backup();
|
127
|
+
const updateCmd = 'unzip -o updatePackage.zip'
|
128
|
+
+ ' && cd adapter_modifications'
|
129
|
+
+ ' && node update.js updateFiles';
|
130
|
+
execSync(updateCmd, { encoding: 'utf-8' });
|
131
|
+
const updateOutput = execSync(updateCmd, { encoding: 'utf-8' });
|
132
|
+
if (updateOutput.indexOf('Lint exited with code 1') >= 0
|
133
|
+
|| updateOutput.indexOf('Tests exited with code 1') >= 0) {
|
134
|
+
if (rls.keyInYN('Adapter failed tests or lint after updating. Would you like to revert the changes?')) {
|
135
|
+
console.log('Reverting changes...');
|
136
|
+
revertMod();
|
137
|
+
process.exit();
|
138
|
+
}
|
139
|
+
console.log('Adapter Update will continue. If you want to revert the changes, run the command npm run adapter:revert');
|
140
|
+
}
|
141
|
+
console.log(updateOutput);
|
142
|
+
console.log('Adapter Successfully Updated. Restart adapter in IAP to apply the changes');
|
143
|
+
archiveMod('UPD');
|
144
|
+
process.exit();
|
145
|
+
}
|
146
|
+
|
147
|
+
// Revert
|
148
|
+
if (flags === '-r') {
|
149
|
+
if (!fs.existsSync('previousVersion.zip')) {
|
150
|
+
console.log('Previous adapter version not found. There are no changes to revert');
|
151
|
+
process.exit();
|
152
|
+
}
|
153
|
+
revertMod();
|
154
|
+
}
|
@@ -0,0 +1,35 @@
|
|
1
|
+
#!/usr/bin/env node
|
2
|
+
/* @copyright Itential, LLC 2019 */
|
3
|
+
|
4
|
+
const fs = require('fs-extra');
|
5
|
+
const path = require('path');
|
6
|
+
const { spawnSync } = require('child_process');
|
7
|
+
const { createBundle } = require('./artifactize');
|
8
|
+
|
9
|
+
const nodeEntryPath = path.resolve('.');
|
10
|
+
createBundle(nodeEntryPath).then((pathObj) => {
|
11
|
+
const { bundlePath, bundledAdapterPath } = pathObj;
|
12
|
+
const npmIgnorePath = path.join(bundledAdapterPath, '.npmignore');
|
13
|
+
const adapterPackagePath = path.join(bundledAdapterPath, 'package.json');
|
14
|
+
const artifactPackagePath = path.join(bundlePath, 'package.json');
|
15
|
+
|
16
|
+
// remove node_modules from .npmIgnore so that node_modules are included in the resulting tar from npm pack
|
17
|
+
let npmIgnoreString;
|
18
|
+
if (fs.existsSync(npmIgnorePath)) {
|
19
|
+
npmIgnoreString = fs.readFileSync(npmIgnorePath, 'utf8');
|
20
|
+
npmIgnoreString = npmIgnoreString.replace('node_modules', '');
|
21
|
+
npmIgnoreString = npmIgnoreString.replace('\n\n', '\n');
|
22
|
+
fs.writeFileSync(npmIgnorePath, npmIgnoreString);
|
23
|
+
}
|
24
|
+
|
25
|
+
// add files to package so that node_modules are included in the resulting tar from npm pack
|
26
|
+
const adapterPackage = fs.readJSONSync(adapterPackagePath);
|
27
|
+
adapterPackage.files = ['*'];
|
28
|
+
fs.writeJSONSync(artifactPackagePath, adapterPackage, { spaces: 2 });
|
29
|
+
const npmResult = spawnSync('npm', ['pack', '-q', bundlePath], { cwd: path.resolve(bundlePath, '..') });
|
30
|
+
if (npmResult.status === 0) {
|
31
|
+
fs.removeSync(bundlePath);
|
32
|
+
console.log('Bundle folder removed');
|
33
|
+
}
|
34
|
+
console.log('Script successful');
|
35
|
+
});
|