@itentialopensource/adapter-netbox 0.6.1 → 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.eslintignore +1 -0
- package/CHANGELOG.md +32 -0
- package/README.md +182 -24
- package/adapter.js +3252 -137
- package/adapterBase.js +285 -7
- package/entities/.generic/action.json +109 -0
- package/entities/.generic/schema.json +23 -0
- package/entities/.system/action.json +1 -1
- package/entities/Graphql/action.json +25 -0
- package/entities/Graphql/schema.json +19 -0
- package/error.json +6 -0
- package/package.json +40 -19
- package/pronghorn.json +764 -234
- package/propertiesSchema.json +51 -4
- package/refs?service=git-upload-pack +0 -0
- package/report/updateReport1615392569777.json +95 -0
- package/report/updateReport1644854487087.json +95 -0
- package/sampleProperties.json +12 -4
- package/test/integration/adapterTestBasicGet.js +85 -0
- package/test/integration/adapterTestConnectivity.js +93 -0
- package/test/integration/adapterTestIntegration.js +43 -5
- package/test/unit/adapterBaseTestUnit.js +944 -0
- package/test/unit/adapterTestUnit.js +658 -9
- package/utils/addAuth.js +94 -0
- package/utils/basicGet.js +50 -0
- package/utils/checkMigrate.js +63 -0
- package/utils/entitiesToDB.js +224 -0
- package/utils/findPath.js +74 -0
- package/utils/modify.js +154 -0
- package/utils/packModificationScript.js +1 -1
- package/utils/patches2bundledDeps.js +90 -0
- package/utils/removeHooks.js +20 -0
- package/utils/tbScript.js +169 -0
- package/utils/tbUtils.js +451 -0
- package/utils/troubleshootingAdapter.js +190 -0
- package/img/adapter.png +0 -0
package/utils/addAuth.js
ADDED
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
/* eslint-disable no-plusplus */
|
|
2
|
+
/* eslint global-require: warn */
|
|
3
|
+
/* eslint import/no-dynamic-require: warn */
|
|
4
|
+
|
|
5
|
+
const rls = require('readline-sync');
|
|
6
|
+
const path = require('path');
|
|
7
|
+
const fs = require('fs');
|
|
8
|
+
|
|
9
|
+
function getQuestions(props, obj) {
|
|
10
|
+
const questions = props.map((p) => `${p}: ${(obj[p] !== undefined) ? `(${obj[p]})` : ''} `);
|
|
11
|
+
return questions;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
// function outputs each property for user to edit/confirm
|
|
15
|
+
// props are the fields that need to be changed depending on what the user selects
|
|
16
|
+
// obj is the JSON object that's being updated
|
|
17
|
+
function confirm(props, obj) {
|
|
18
|
+
// create array of questions
|
|
19
|
+
const updatedObj = obj;
|
|
20
|
+
getQuestions(props, obj).forEach((q) => {
|
|
21
|
+
const answer = rls.question(q);
|
|
22
|
+
// only update the field if the answer is NOT and empty string
|
|
23
|
+
if (answer) {
|
|
24
|
+
updatedObj[q.split(':')[0].trim()] = answer;
|
|
25
|
+
}
|
|
26
|
+
});
|
|
27
|
+
return updatedObj;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
const updateBasicAuth = (auth) => {
|
|
31
|
+
const propsToUpdate = ['username', 'password', 'auth_field', 'auth_field_format'];
|
|
32
|
+
return confirm(propsToUpdate, auth);
|
|
33
|
+
};
|
|
34
|
+
|
|
35
|
+
const updateStaticTokenAuth = (auth) => {
|
|
36
|
+
const propsToUpdate = ['token', 'auth_field', 'auth_field_format'];
|
|
37
|
+
return confirm(propsToUpdate, auth);
|
|
38
|
+
};
|
|
39
|
+
|
|
40
|
+
function updateTokenSchemas(user, pw, token) {
|
|
41
|
+
let schemaPath = path.join(__dirname, '..', 'entities/.system/schemaTokenReq.json');
|
|
42
|
+
const reqSchema = require(schemaPath);
|
|
43
|
+
reqSchema.properties.username.external_name = user;
|
|
44
|
+
reqSchema.properties.password.external_name = pw;
|
|
45
|
+
fs.writeFileSync(schemaPath, JSON.stringify(reqSchema, null, 2));
|
|
46
|
+
schemaPath = path.join(__dirname, '..', 'entities/.system/schemaTokenResp.json');
|
|
47
|
+
const respSchema = require(schemaPath);
|
|
48
|
+
respSchema.properties.token.external_name = token;
|
|
49
|
+
fs.writeFileSync(schemaPath, JSON.stringify(respSchema, null, 2));
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
function updateRequestToken(auth) {
|
|
53
|
+
const propsToUpdate = [
|
|
54
|
+
'username',
|
|
55
|
+
'password',
|
|
56
|
+
'auth_field',
|
|
57
|
+
'auth_field_format',
|
|
58
|
+
'token_user_field',
|
|
59
|
+
'token_password_field',
|
|
60
|
+
'token_result_field',
|
|
61
|
+
'token_URI_path'
|
|
62
|
+
];
|
|
63
|
+
const newAuth = confirm(propsToUpdate, auth);
|
|
64
|
+
updateTokenSchemas(newAuth.token_user_field, newAuth.token_password_field, newAuth.token_result_field);
|
|
65
|
+
|
|
66
|
+
return newAuth;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
// prompt users to pick an auth method from the list above
|
|
70
|
+
const addAuthInfo = (props) => {
|
|
71
|
+
const authOptions = [
|
|
72
|
+
'basic user_password',
|
|
73
|
+
'static_token',
|
|
74
|
+
'request_token',
|
|
75
|
+
'no_authentication'
|
|
76
|
+
];
|
|
77
|
+
const newProps = confirm(['host', 'port', 'base_path'], props);
|
|
78
|
+
|
|
79
|
+
const newAuthMethod = authOptions[rls.keyInSelect(authOptions, 'Which authentication method?')];
|
|
80
|
+
newProps.authentication.auth_method = newAuthMethod;
|
|
81
|
+
|
|
82
|
+
if (newAuthMethod === 'basic user_password') {
|
|
83
|
+
newProps.authentication = updateBasicAuth(newProps.authentication);
|
|
84
|
+
} else if (newAuthMethod === 'static_token') {
|
|
85
|
+
newProps.authentication = updateStaticTokenAuth(newProps.authentication);
|
|
86
|
+
} else if (newAuthMethod === 'request_token') {
|
|
87
|
+
newProps.authentication = updateRequestToken(newProps.authentication);
|
|
88
|
+
}
|
|
89
|
+
console.log('Connectivity and authentication properties have been configured');
|
|
90
|
+
console.log('If you want to make changes, rerun this script to reinstall the adapter');
|
|
91
|
+
return newProps;
|
|
92
|
+
};
|
|
93
|
+
|
|
94
|
+
module.exports = { addAuthInfo };
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
/* @copyright Itential, LLC 2020 */
|
|
2
|
+
|
|
3
|
+
/* eslint object-shorthand: warn */
|
|
4
|
+
/* eslint import/no-extraneous-dependencies: warn */
|
|
5
|
+
/* eslint global-require: warn */
|
|
6
|
+
/* eslint import/no-unresolved: warn */
|
|
7
|
+
/* eslint import/no-dynamic-require: warn */
|
|
8
|
+
|
|
9
|
+
const winston = require('winston');
|
|
10
|
+
|
|
11
|
+
const logLevel = 'none';
|
|
12
|
+
const myCustomLevels = {
|
|
13
|
+
levels: {
|
|
14
|
+
spam: 6,
|
|
15
|
+
trace: 5,
|
|
16
|
+
debug: 4,
|
|
17
|
+
info: 3,
|
|
18
|
+
warn: 2,
|
|
19
|
+
error: 1,
|
|
20
|
+
none: 0
|
|
21
|
+
}
|
|
22
|
+
};
|
|
23
|
+
|
|
24
|
+
const basicGet = {
|
|
25
|
+
/**
|
|
26
|
+
* @summary create Adapter instance
|
|
27
|
+
*
|
|
28
|
+
* @function getAdapterInstance
|
|
29
|
+
* @param {Object} adapter - adaper configuration object required by IAP
|
|
30
|
+
*/
|
|
31
|
+
getAdapterInstance: (adapter) => {
|
|
32
|
+
const Adapter = require('../adapter');
|
|
33
|
+
const adapterProps = JSON.parse(JSON.stringify(adapter.properties.properties));
|
|
34
|
+
adapterProps.stub = false;
|
|
35
|
+
// need to set global logging
|
|
36
|
+
global.log = winston.createLogger({
|
|
37
|
+
level: logLevel,
|
|
38
|
+
levels: myCustomLevels.levels,
|
|
39
|
+
transports: [
|
|
40
|
+
new winston.transports.Console()
|
|
41
|
+
]
|
|
42
|
+
});
|
|
43
|
+
return new Adapter(
|
|
44
|
+
adapter.id,
|
|
45
|
+
adapterProps
|
|
46
|
+
);
|
|
47
|
+
}
|
|
48
|
+
};
|
|
49
|
+
|
|
50
|
+
module.exports = basicGet;
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
const { execSync } = require('child_process');
|
|
2
|
+
const semver = require('semver');
|
|
3
|
+
const axios = require('axios');
|
|
4
|
+
const fs = require('fs');
|
|
5
|
+
const packageJson = require('../package.json');
|
|
6
|
+
|
|
7
|
+
const localEngineVer = packageJson.engineVersion;
|
|
8
|
+
const localUtils = execSync('npm list @itentialopensource/adapter-utils', { encoding: 'utf-8' });
|
|
9
|
+
const localUtilsVer = localUtils.split('@').pop().replace(/(\r\n|\n|\r| )/gm, '');
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* @summary Makes a GET call using axios
|
|
13
|
+
*
|
|
14
|
+
* @function get
|
|
15
|
+
* @param {String} url - url to make the call to
|
|
16
|
+
*/
|
|
17
|
+
function get(url) {
|
|
18
|
+
const config = {
|
|
19
|
+
method: 'get',
|
|
20
|
+
url
|
|
21
|
+
};
|
|
22
|
+
return axios(config);
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* @summary Checks if adapter can be migrated using migration package
|
|
27
|
+
*
|
|
28
|
+
* @function migratePossible
|
|
29
|
+
*/
|
|
30
|
+
function migratePossible() {
|
|
31
|
+
const adapterTestUnit = fs.readFileSync('./test/unit/adapterTestUnit.js', { encoding: 'utf-8' });
|
|
32
|
+
const readme = fs.readFileSync('./README.md', { encoding: 'utf-8' });
|
|
33
|
+
return packageJson.keywords !== null && adapterTestUnit.indexOf('DO NOT REMOVE THIS COMMENT BLOCK') !== -1
|
|
34
|
+
&& readme.indexOf('available at ') !== -1 && readme.indexOf('You will need to change the credentials and possibly the host information below.') !== -1;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* @summary Checks if adapter is up-to-date or if migration is needed
|
|
39
|
+
*
|
|
40
|
+
* @function migrateNeeded
|
|
41
|
+
*/
|
|
42
|
+
async function migrateNeeded() {
|
|
43
|
+
const engineUrl = 'https://adapters.itential.io/engineVersion';
|
|
44
|
+
const utilsUrl = 'https://registry.npmjs.org/@itentialopensource/adapter-utils';
|
|
45
|
+
const latestEngineVer = (await get(engineUrl)).data;
|
|
46
|
+
const latestUtilsVer = (await get(utilsUrl)).data['dist-tags'].latest;
|
|
47
|
+
return semver.lt(localEngineVer, latestEngineVer) || semver.lt(localUtilsVer, latestUtilsVer);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// Main Script
|
|
51
|
+
if (migratePossible()) {
|
|
52
|
+
migrateNeeded().then((needed) => {
|
|
53
|
+
if (needed) {
|
|
54
|
+
console.log('Migration is needed and possible -- go to dev site to download migration package');
|
|
55
|
+
} else {
|
|
56
|
+
console.log('Migration is possible but not needed at the current time.');
|
|
57
|
+
}
|
|
58
|
+
}).catch((error) => {
|
|
59
|
+
console.log('Could not get latest engine or utils version.', error.message);
|
|
60
|
+
});
|
|
61
|
+
} else {
|
|
62
|
+
console.log('Migration is not possible. Please contact Itential support for assistance');
|
|
63
|
+
}
|
|
@@ -0,0 +1,224 @@
|
|
|
1
|
+
/* @copyright Itential, LLC 2021 */
|
|
2
|
+
|
|
3
|
+
// Set globals
|
|
4
|
+
/* global log */
|
|
5
|
+
|
|
6
|
+
/* eslint import/no-dynamic-require: warn */
|
|
7
|
+
/* eslint global-require: warn */
|
|
8
|
+
/* eslint no-unused-vars: warn */
|
|
9
|
+
/* eslint import/no-unresolved: warn */
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* This script is used to read through an adapter's entities files
|
|
13
|
+
* and then creates documents and enters them into the IAP mongodb
|
|
14
|
+
*/
|
|
15
|
+
|
|
16
|
+
const fs = require('fs');
|
|
17
|
+
const { MongoClient } = require('mongodb');
|
|
18
|
+
const path = require('path');
|
|
19
|
+
// const { argv } = require('process');
|
|
20
|
+
// const { string } = require('yargs');
|
|
21
|
+
|
|
22
|
+
// get the pronghorn database information
|
|
23
|
+
const getPronghornProps = async (iapDir) => {
|
|
24
|
+
log.trace('Retrieving properties.json file...');
|
|
25
|
+
const rawProps = require(path.join(iapDir, 'properties.json'));
|
|
26
|
+
log.trace('Decrypting properties...');
|
|
27
|
+
const { PropertyEncryption } = require('@itential/itential-utils');
|
|
28
|
+
const propertyEncryption = new PropertyEncryption();
|
|
29
|
+
const pronghornProps = await propertyEncryption.decryptProps(rawProps);
|
|
30
|
+
log.trace('Found properties.\n');
|
|
31
|
+
return pronghornProps;
|
|
32
|
+
};
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* Function used to take a file path to a entity directory and build
|
|
36
|
+
* a document that corresponds to the entity files.
|
|
37
|
+
*/
|
|
38
|
+
const buildDoc = (pathstring) => {
|
|
39
|
+
let files = fs.readdirSync(pathstring);
|
|
40
|
+
|
|
41
|
+
// load the mockdatafiles
|
|
42
|
+
const mockdatafiles = {};
|
|
43
|
+
if (files.includes('mockdatafiles') && fs.lstatSync(`${pathstring}/mockdatafiles`).isDirectory()) {
|
|
44
|
+
fs.readdirSync(`${pathstring}/mockdatafiles`).forEach((file) => {
|
|
45
|
+
if (file.split('.').pop() === 'json') {
|
|
46
|
+
const mockpath = `${pathstring}/mockdatafiles/${file}`;
|
|
47
|
+
const data = JSON.parse(fs.readFileSync(mockpath));
|
|
48
|
+
mockdatafiles[mockpath.split('/').pop()] = data;
|
|
49
|
+
}
|
|
50
|
+
});
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
// load the action data
|
|
54
|
+
let actions;
|
|
55
|
+
if (files.includes('action.json')) {
|
|
56
|
+
actions = JSON.parse(fs.readFileSync(`${pathstring}/action.json`));
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// Load schema.json and other schemas in remaining json files
|
|
60
|
+
files = files.filter((f) => (f !== 'action.json') && f.endsWith('.json'));
|
|
61
|
+
const schema = [];
|
|
62
|
+
files.forEach((file) => {
|
|
63
|
+
const data = JSON.parse(fs.readFileSync(`${pathstring}/${file}`));
|
|
64
|
+
schema.push({
|
|
65
|
+
name: file,
|
|
66
|
+
schema: data
|
|
67
|
+
});
|
|
68
|
+
});
|
|
69
|
+
|
|
70
|
+
// return the data
|
|
71
|
+
return {
|
|
72
|
+
actions: actions.actions,
|
|
73
|
+
schema,
|
|
74
|
+
mockdatafiles
|
|
75
|
+
};
|
|
76
|
+
};
|
|
77
|
+
|
|
78
|
+
/**
|
|
79
|
+
* Function used to get the database from the options or a provided directory
|
|
80
|
+
*/
|
|
81
|
+
const optionsHandler = (options) => {
|
|
82
|
+
// if the database properties were provided in the options - return them
|
|
83
|
+
if (options.pronghornProps) {
|
|
84
|
+
if (typeof options.pronghornProps === 'string') {
|
|
85
|
+
return JSON.parse(options.pronghornProps);
|
|
86
|
+
}
|
|
87
|
+
return new Promise((resolve, reject) => resolve(options.pronghornProps));
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
// if the directory was provided, get the pronghorn props from the directory
|
|
91
|
+
if (options.iapDir) {
|
|
92
|
+
return getPronghornProps(options.iapDir);
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
// if nothing was provided, error
|
|
96
|
+
return new Promise((resolve, reject) => reject(new Error('Neither pronghornProps nor iapDir defined in options!')));
|
|
97
|
+
};
|
|
98
|
+
|
|
99
|
+
/**
|
|
100
|
+
* Function used to put the adapter configuration into the provided database
|
|
101
|
+
*/
|
|
102
|
+
const moveEntitiesToDB = (targetPath, options) => {
|
|
103
|
+
// set local variables
|
|
104
|
+
let myOpts = options;
|
|
105
|
+
let myPath = targetPath;
|
|
106
|
+
|
|
107
|
+
// if we got a string parse into a JSON object
|
|
108
|
+
if (typeof myOpts === 'string') {
|
|
109
|
+
myOpts = JSON.parse(myOpts);
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
// if there is no target collection - set the collection to the default
|
|
113
|
+
if (!myOpts.targetCollection) {
|
|
114
|
+
myOpts.targetCollection = 'adapter_configs';
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
// if there is no id error since we need an id for the entities
|
|
118
|
+
if (!myOpts.id) {
|
|
119
|
+
throw new Error('Adapter ID required!');
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
// get the pronghorn database properties
|
|
123
|
+
optionsHandler(options).then((currentProps) => {
|
|
124
|
+
let mongoUrl;
|
|
125
|
+
let dbName;
|
|
126
|
+
|
|
127
|
+
// find the mongo properties so we can connect
|
|
128
|
+
if (currentProps.mongoProps) {
|
|
129
|
+
mongoUrl = currentProps.mongoProps.url;
|
|
130
|
+
dbName = currentProps.mongoProps.db;
|
|
131
|
+
} else if (currentProps.mongo) {
|
|
132
|
+
if (currentProps.mongo.url) {
|
|
133
|
+
mongoUrl = currentProps.mongo.url;
|
|
134
|
+
} else {
|
|
135
|
+
mongoUrl = `mongodb://${currentProps.mongo.host}:${currentProps.mongo.port}`;
|
|
136
|
+
}
|
|
137
|
+
dbName = currentProps.mongo.database;
|
|
138
|
+
} else {
|
|
139
|
+
throw new Error('Mongo properties are not specified in adapter preferences!');
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
// Check valid filepath provided
|
|
143
|
+
if (!myPath) {
|
|
144
|
+
// if no path use the current directory without the utils
|
|
145
|
+
myPath = path.join(__dirname, '../');
|
|
146
|
+
} else if (myPath.slice(-1) === '/') {
|
|
147
|
+
myPath = myPath.slice(0, -1);
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
// verify set the entity path
|
|
151
|
+
const entitiesPath = `${myPath}/entities`;
|
|
152
|
+
if (!fs.existsSync(entitiesPath)) {
|
|
153
|
+
throw new Error(`Entities path does not exist in filesystem: ${entitiesPath}`);
|
|
154
|
+
} else {
|
|
155
|
+
log.trace('Target found on filesystem');
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
// Get adapter details
|
|
159
|
+
if (!fs.existsSync(`${myPath}/pronghorn.json`)) {
|
|
160
|
+
throw new Error(`pronghorn.json does not exist in path: ${myPath}`);
|
|
161
|
+
} else {
|
|
162
|
+
log.trace('pronghorn.json found on filesystem');
|
|
163
|
+
}
|
|
164
|
+
const adapterData = JSON.parse(fs.readFileSync(`${myPath}/pronghorn.json`));
|
|
165
|
+
|
|
166
|
+
// Load files from the filesystem
|
|
167
|
+
const docs = [];
|
|
168
|
+
const entities = fs.readdirSync(entitiesPath);
|
|
169
|
+
entities.forEach((entity) => {
|
|
170
|
+
const entityPath = `${entitiesPath}/${entity}`;
|
|
171
|
+
const isDir = fs.lstatSync(entitiesPath).isDirectory();
|
|
172
|
+
|
|
173
|
+
// Build doc for entity
|
|
174
|
+
if (isDir) {
|
|
175
|
+
let doc = buildDoc(entityPath);
|
|
176
|
+
doc = {
|
|
177
|
+
id: myOpts.id,
|
|
178
|
+
type: adapterData.id,
|
|
179
|
+
entity,
|
|
180
|
+
...doc
|
|
181
|
+
};
|
|
182
|
+
docs.push(doc);
|
|
183
|
+
}
|
|
184
|
+
});
|
|
185
|
+
|
|
186
|
+
// Upload documents to db collection
|
|
187
|
+
MongoClient.connect(mongoUrl, (err, db) => {
|
|
188
|
+
if (err) {
|
|
189
|
+
log.error(JSON.stringify(err));
|
|
190
|
+
throw err;
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
// get the proper collection
|
|
194
|
+
const collection = db.db(dbName).collection(myOpts.targetCollection);
|
|
195
|
+
// insert the documents into the collection
|
|
196
|
+
collection.insertMany(docs, { checkKeys: false }, (error, res) => {
|
|
197
|
+
if (error) {
|
|
198
|
+
log.error(JSON.stringify(error));
|
|
199
|
+
throw error;
|
|
200
|
+
}
|
|
201
|
+
// log the insertion, close the database and return
|
|
202
|
+
log.debug(`Inserted ${docs.length} documents to ${dbName}.${myOpts.targetCollection} with response ${JSON.stringify(res)}`);
|
|
203
|
+
db.close();
|
|
204
|
+
return res;
|
|
205
|
+
});
|
|
206
|
+
});
|
|
207
|
+
});
|
|
208
|
+
};
|
|
209
|
+
|
|
210
|
+
// const args = process.argv.slice(2);
|
|
211
|
+
|
|
212
|
+
// throw new SyntaxError(args[0]);
|
|
213
|
+
|
|
214
|
+
// if (args.length === 0) {
|
|
215
|
+
// console.error('ERROR: target path not specified!');
|
|
216
|
+
// } else if (args[0] === 'help') {
|
|
217
|
+
// log.trace('node ./entitiesToDB <target path> <options object: {iapDir: string, pronghornProps: string, targetCollection: string}>');
|
|
218
|
+
// } else if (args.length === 1) {
|
|
219
|
+
// console.error('ERROR: IAP directory not specified');
|
|
220
|
+
// } else {
|
|
221
|
+
// moveEntitiesToDB(args[0], args[1]);
|
|
222
|
+
// }
|
|
223
|
+
|
|
224
|
+
module.exports = { moveEntitiesToDB };
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/* @copyright Itential, LLC 2019 */
|
|
3
|
+
/* eslint global-require:warn */
|
|
4
|
+
/* eslint import/no-dynamic-require:warn */
|
|
5
|
+
/* eslint prefer-destructuring:warn */
|
|
6
|
+
|
|
7
|
+
const fs = require('fs-extra');
|
|
8
|
+
const path = require('path');
|
|
9
|
+
const rls = require('readline-sync');
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* This script will determine the type of integration test to run
|
|
13
|
+
* based on input. If other information is needed, it will solicit
|
|
14
|
+
* that input and then edit the integration test accordingly.
|
|
15
|
+
*/
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Updates the action files
|
|
19
|
+
*/
|
|
20
|
+
function checkActionFiles(apath) {
|
|
21
|
+
// verify the path
|
|
22
|
+
if (!apath) {
|
|
23
|
+
console.log(' NO PATH PROVIDED!');
|
|
24
|
+
return 'Done';
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
// make sure the entities directory exists
|
|
28
|
+
const entitydir = path.join(__dirname, '../entities');
|
|
29
|
+
if (!fs.statSync(entitydir).isDirectory()) {
|
|
30
|
+
console.log('Could not find the entities directory');
|
|
31
|
+
return 'error';
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
const entities = fs.readdirSync(entitydir);
|
|
35
|
+
let found = false;
|
|
36
|
+
|
|
37
|
+
// need to go through each entity in the entities directory
|
|
38
|
+
for (let e = 0; e < entities.length; e += 1) {
|
|
39
|
+
// make sure the entity is a directory - do not care about extra files
|
|
40
|
+
// only entities (dir)
|
|
41
|
+
if (fs.statSync(`${entitydir}/${entities[e]}`).isDirectory()) {
|
|
42
|
+
// see if the action file exists in the entity
|
|
43
|
+
if (fs.existsSync(`${entitydir}/${entities[e]}/action.json`)) {
|
|
44
|
+
// Read the entity actions from the file system
|
|
45
|
+
const actions = require(`${entitydir}/${entities[e]}/action.json`);
|
|
46
|
+
|
|
47
|
+
// go through all of the actions set the appropriate info in the newActions
|
|
48
|
+
for (let a = 0; a < actions.actions.length; a += 1) {
|
|
49
|
+
if (actions.actions[a].entitypath.indexOf(apath) >= 0) {
|
|
50
|
+
found = true;
|
|
51
|
+
console.log(` Found - entity: ${entities[e]} action: ${actions.actions[a].name}`);
|
|
52
|
+
console.log(` method: ${actions.actions[a].method} path: ${actions.actions[a].entitypath}`);
|
|
53
|
+
console.log(' ');
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
} else {
|
|
57
|
+
console.log(`Could not find entities ${entities[e]} action.json file`);
|
|
58
|
+
return 'error';
|
|
59
|
+
}
|
|
60
|
+
} else {
|
|
61
|
+
console.log(`Could not find entities ${entities[e]} directory`);
|
|
62
|
+
return 'error';
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
if (!found) {
|
|
67
|
+
console.log(' PATH NOT FOUND!');
|
|
68
|
+
}
|
|
69
|
+
return 'Done';
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
const findPath = rls.question('Enter the path/partial path you are looking for: ');
|
|
73
|
+
console.log(`PATH: ${findPath}`);
|
|
74
|
+
checkActionFiles(findPath);
|
package/utils/modify.js
ADDED
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
const fs = require('fs-extra');
|
|
2
|
+
const Ajv = require('ajv');
|
|
3
|
+
const rls = require('readline-sync');
|
|
4
|
+
const { execSync } = require('child_process');
|
|
5
|
+
const { existsSync } = require('fs-extra');
|
|
6
|
+
const { getAdapterConfig } = require('./tbUtils');
|
|
7
|
+
const { name } = require('../package.json');
|
|
8
|
+
const propertiesSchema = require('../propertiesSchema.json');
|
|
9
|
+
|
|
10
|
+
const flags = process.argv[2];
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* @summary Updates database instance with new adapter properties
|
|
14
|
+
*
|
|
15
|
+
* @function updateServiceItem
|
|
16
|
+
*/
|
|
17
|
+
async function updateServiceItem() {
|
|
18
|
+
const { database, serviceItem } = await getAdapterConfig();
|
|
19
|
+
const currentProps = serviceItem.properties.properties;
|
|
20
|
+
const ajv = new Ajv({ allErrors: true, useDefaults: true });
|
|
21
|
+
const validate = ajv.compile(propertiesSchema);
|
|
22
|
+
validate(currentProps);
|
|
23
|
+
console.log('Updating Properties...');
|
|
24
|
+
await database.collection('service_configs').updateOne(
|
|
25
|
+
{ model: name }, { $set: serviceItem }
|
|
26
|
+
);
|
|
27
|
+
console.log('Properties Updated');
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* @summary Creates a backup zip file of current adapter
|
|
32
|
+
*
|
|
33
|
+
* @function backup
|
|
34
|
+
*/
|
|
35
|
+
function backup() {
|
|
36
|
+
// zip all files except node_modules and package-lock
|
|
37
|
+
const backupCmd = 'zip -r previousVersion.zip .';
|
|
38
|
+
execSync(backupCmd, { encoding: 'utf-8' });
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* @summary Archives previous modifications and removes the modification package
|
|
43
|
+
*
|
|
44
|
+
* @function archiveMod
|
|
45
|
+
* @param {String} modType - update(UPD) or migrate(MIG)
|
|
46
|
+
*/
|
|
47
|
+
function archiveMod(modType) {
|
|
48
|
+
if (!existsSync('./adapter_modifications/archive')) {
|
|
49
|
+
execSync('mkdir ./adapter_modifications/archive');
|
|
50
|
+
}
|
|
51
|
+
const zipFile = modType === 'UPD' ? 'updatePackage.zip' : 'migrationPackage.zip';
|
|
52
|
+
const now = new Date();
|
|
53
|
+
const archiveName = `${modType}-${now.toISOString()}`;
|
|
54
|
+
execSync(`mkdir adapter_modifications/archive/${archiveName}`);
|
|
55
|
+
const archiveCmd = 'mv adapter_modifications/archive .'
|
|
56
|
+
+ ` && mv adapter_modifications/* archive/${archiveName}`
|
|
57
|
+
+ ' && mv archive adapter_modifications'
|
|
58
|
+
+ ` && rm ${zipFile}`;
|
|
59
|
+
execSync(archiveCmd, { encoding: 'utf-8' });
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
/**
|
|
63
|
+
* @summary Reverts modifications using backup zip file
|
|
64
|
+
*
|
|
65
|
+
* @function revertMod
|
|
66
|
+
*/
|
|
67
|
+
function revertMod() {
|
|
68
|
+
const files = fs.readdirSync('./');
|
|
69
|
+
// remove all files except previousVersion
|
|
70
|
+
files.forEach((file) => {
|
|
71
|
+
if (file !== 'previousVersion.zip') {
|
|
72
|
+
fs.removeSync(file);
|
|
73
|
+
}
|
|
74
|
+
});
|
|
75
|
+
// // unzip previousVersion, reinstall dependencies and delete zipfile
|
|
76
|
+
execSync('unzip -o previousVersion.zip && rm -rf node_modules && rm package-lock.json && npm install');
|
|
77
|
+
execSync('rm previousVersion.zip');
|
|
78
|
+
console.log('Changes have been reverted');
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
// Main Script
|
|
82
|
+
|
|
83
|
+
// Migrate
|
|
84
|
+
if (flags === '-m') {
|
|
85
|
+
if (!fs.existsSync('migrationPackage.zip')) {
|
|
86
|
+
console.log('Migration Package not found. Download and place migrationPackage in the adapter root directory');
|
|
87
|
+
process.exit();
|
|
88
|
+
}
|
|
89
|
+
// Backup current adapter
|
|
90
|
+
backup();
|
|
91
|
+
console.log('Migrating adapter and running tests...');
|
|
92
|
+
const migrateCmd = 'unzip -o migrationPackage.zip'
|
|
93
|
+
+ ' && cd adapter_modifications'
|
|
94
|
+
+ ' && node migrate';
|
|
95
|
+
const migrateOutput = execSync(migrateCmd, { encoding: 'utf-8' });
|
|
96
|
+
console.log(migrateOutput);
|
|
97
|
+
if (migrateOutput.indexOf('Lint exited with code 1') >= 0
|
|
98
|
+
|| migrateOutput.indexOf('Tests exited with code 1') >= 0) {
|
|
99
|
+
if (rls.keyInYN('Adapter failed tests or lint after migrating. Would you like to revert the changes?')) {
|
|
100
|
+
console.log('Reverting changes...');
|
|
101
|
+
revertMod();
|
|
102
|
+
process.exit();
|
|
103
|
+
}
|
|
104
|
+
console.log('Adapter Migration will continue. If you want to revert the changes, run the command npm run adapter:revert');
|
|
105
|
+
}
|
|
106
|
+
console.log('Installing new dependencies..');
|
|
107
|
+
const updatePackageCmd = 'rm -rf node_modules && rm package-lock.json && npm install';
|
|
108
|
+
const updatePackageOutput = execSync(updatePackageCmd, { encoding: 'utf-8' });
|
|
109
|
+
console.log(updatePackageOutput);
|
|
110
|
+
console.log('New dependencies installed');
|
|
111
|
+
console.log('Updating adapter properties..');
|
|
112
|
+
updateServiceItem().then(() => {
|
|
113
|
+
console.log('Adapter Successfully Migrated. Restart adapter in IAP to apply the changes');
|
|
114
|
+
archiveMod('MIG');
|
|
115
|
+
process.exit();
|
|
116
|
+
});
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
// Update
|
|
120
|
+
if (flags === '-u') {
|
|
121
|
+
if (!fs.existsSync('updatePackage.zip')) {
|
|
122
|
+
console.log('Update Package not found. Download and place updateAdapter.zip in the adapter root directory');
|
|
123
|
+
process.exit();
|
|
124
|
+
}
|
|
125
|
+
// Backup current adapter
|
|
126
|
+
backup();
|
|
127
|
+
const updateCmd = 'unzip -o updatePackage.zip'
|
|
128
|
+
+ ' && cd adapter_modifications'
|
|
129
|
+
+ ' && node update.js updateFiles';
|
|
130
|
+
execSync(updateCmd, { encoding: 'utf-8' });
|
|
131
|
+
const updateOutput = execSync(updateCmd, { encoding: 'utf-8' });
|
|
132
|
+
if (updateOutput.indexOf('Lint exited with code 1') >= 0
|
|
133
|
+
|| updateOutput.indexOf('Tests exited with code 1') >= 0) {
|
|
134
|
+
if (rls.keyInYN('Adapter failed tests or lint after updating. Would you like to revert the changes?')) {
|
|
135
|
+
console.log('Reverting changes...');
|
|
136
|
+
revertMod();
|
|
137
|
+
process.exit();
|
|
138
|
+
}
|
|
139
|
+
console.log('Adapter Update will continue. If you want to revert the changes, run the command npm run adapter:revert');
|
|
140
|
+
}
|
|
141
|
+
console.log(updateOutput);
|
|
142
|
+
console.log('Adapter Successfully Updated. Restart adapter in IAP to apply the changes');
|
|
143
|
+
archiveMod('UPD');
|
|
144
|
+
process.exit();
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
// Revert
|
|
148
|
+
if (flags === '-r') {
|
|
149
|
+
if (!fs.existsSync('previousVersion.zip')) {
|
|
150
|
+
console.log('Previous adapter version not found. There are no changes to revert');
|
|
151
|
+
process.exit();
|
|
152
|
+
}
|
|
153
|
+
revertMod();
|
|
154
|
+
}
|
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
const fs = require('fs-extra');
|
|
5
5
|
const path = require('path');
|
|
6
6
|
const { spawnSync } = require('child_process');
|
|
7
|
-
const { createBundle } = require('./artifactize
|
|
7
|
+
const { createBundle } = require('./artifactize');
|
|
8
8
|
|
|
9
9
|
const nodeEntryPath = path.resolve('.');
|
|
10
10
|
createBundle(nodeEntryPath).then((pathObj) => {
|