@itentialopensource/adapter-meraki 0.7.3 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +8 -0
- package/README.md +84 -11
- package/adapter.js +469 -1
- package/adapterBase.js +24 -2
- package/entities/.generic/action.json +5 -5
- package/error.json +6 -0
- package/package.json +18 -13
- package/pronghorn.json +192 -0
- package/propertiesSchema.json +15 -0
- package/refs?service=git-upload-pack +0 -0
- package/report/updateReport1642739939352.json +95 -0
- package/sampleProperties.json +4 -1
- package/test/integration/adapterTestIntegration.js +9 -1
- package/test/unit/adapterBaseTestUnit.js +5 -2
- package/test/unit/adapterTestUnit.js +618 -3
- package/utils/addAuth.js +94 -0
- package/utils/basicGet.js +1 -14
- package/utils/entitiesToDB.js +224 -0
- package/utils/modify.js +1 -1
- package/utils/patches2bundledDeps.js +90 -0
- package/utils/removeHooks.js +20 -0
- package/utils/tbScript.js +14 -8
- package/utils/tbUtils.js +98 -19
- package/utils/troubleshootingAdapter.js +2 -26
package/utils/addAuth.js
ADDED
@@ -0,0 +1,94 @@
|
|
1
|
+
/* eslint-disable no-plusplus */
|
2
|
+
/* eslint global-require: warn */
|
3
|
+
/* eslint import/no-dynamic-require: warn */
|
4
|
+
|
5
|
+
const rls = require('readline-sync');
|
6
|
+
const path = require('path');
|
7
|
+
const fs = require('fs');
|
8
|
+
|
9
|
+
function getQuestions(props, obj) {
|
10
|
+
const questions = props.map((p) => `${p}: ${(obj[p] !== undefined) ? `(${obj[p]})` : ''} `);
|
11
|
+
return questions;
|
12
|
+
}
|
13
|
+
|
14
|
+
// function outputs each property for user to edit/confirm
|
15
|
+
// props are the fields that need to be changed depending on what the user selects
|
16
|
+
// obj is the JSON object that's being updated
|
17
|
+
function confirm(props, obj) {
|
18
|
+
// create array of questions
|
19
|
+
const updatedObj = obj;
|
20
|
+
getQuestions(props, obj).forEach((q) => {
|
21
|
+
const answer = rls.question(q);
|
22
|
+
// only update the field if the answer is NOT and empty string
|
23
|
+
if (answer) {
|
24
|
+
updatedObj[q.split(':')[0].trim()] = answer;
|
25
|
+
}
|
26
|
+
});
|
27
|
+
return updatedObj;
|
28
|
+
}
|
29
|
+
|
30
|
+
const updateBasicAuth = (auth) => {
|
31
|
+
const propsToUpdate = ['username', 'password', 'auth_field', 'auth_field_format'];
|
32
|
+
return confirm(propsToUpdate, auth);
|
33
|
+
};
|
34
|
+
|
35
|
+
const updateStaticTokenAuth = (auth) => {
|
36
|
+
const propsToUpdate = ['token', 'auth_field', 'auth_field_format'];
|
37
|
+
return confirm(propsToUpdate, auth);
|
38
|
+
};
|
39
|
+
|
40
|
+
function updateTokenSchemas(user, pw, token) {
|
41
|
+
let schemaPath = path.join(__dirname, '..', 'entities/.system/schemaTokenReq.json');
|
42
|
+
const reqSchema = require(schemaPath);
|
43
|
+
reqSchema.properties.username.external_name = user;
|
44
|
+
reqSchema.properties.password.external_name = pw;
|
45
|
+
fs.writeFileSync(schemaPath, JSON.stringify(reqSchema, null, 2));
|
46
|
+
schemaPath = path.join(__dirname, '..', 'entities/.system/schemaTokenResp.json');
|
47
|
+
const respSchema = require(schemaPath);
|
48
|
+
respSchema.properties.token.external_name = token;
|
49
|
+
fs.writeFileSync(schemaPath, JSON.stringify(respSchema, null, 2));
|
50
|
+
}
|
51
|
+
|
52
|
+
function updateRequestToken(auth) {
|
53
|
+
const propsToUpdate = [
|
54
|
+
'username',
|
55
|
+
'password',
|
56
|
+
'auth_field',
|
57
|
+
'auth_field_format',
|
58
|
+
'token_user_field',
|
59
|
+
'token_password_field',
|
60
|
+
'token_result_field',
|
61
|
+
'token_URI_path'
|
62
|
+
];
|
63
|
+
const newAuth = confirm(propsToUpdate, auth);
|
64
|
+
updateTokenSchemas(newAuth.token_user_field, newAuth.token_password_field, newAuth.token_result_field);
|
65
|
+
|
66
|
+
return newAuth;
|
67
|
+
}
|
68
|
+
|
69
|
+
// prompt users to pick an auth method from the list above
|
70
|
+
const addAuthInfo = (props) => {
|
71
|
+
const authOptions = [
|
72
|
+
'basic user_password',
|
73
|
+
'static_token',
|
74
|
+
'request_token',
|
75
|
+
'no_authentication'
|
76
|
+
];
|
77
|
+
const newProps = confirm(['host', 'port', 'base_path'], props);
|
78
|
+
|
79
|
+
const newAuthMethod = authOptions[rls.keyInSelect(authOptions, 'Which authentication method?')];
|
80
|
+
newProps.authentication.auth_method = newAuthMethod;
|
81
|
+
|
82
|
+
if (newAuthMethod === 'basic user_password') {
|
83
|
+
newProps.authentication = updateBasicAuth(newProps.authentication);
|
84
|
+
} else if (newAuthMethod === 'static_token') {
|
85
|
+
newProps.authentication = updateStaticTokenAuth(newProps.authentication);
|
86
|
+
} else if (newAuthMethod === 'request_token') {
|
87
|
+
newProps.authentication = updateRequestToken(newProps.authentication);
|
88
|
+
}
|
89
|
+
console.log('Connectivity and authentication properties have been configured');
|
90
|
+
console.log('If you want to make changes, rerun this script to reinstall the adapter');
|
91
|
+
return newProps;
|
92
|
+
};
|
93
|
+
|
94
|
+
module.exports = { addAuthInfo };
|
package/utils/basicGet.js
CHANGED
@@ -4,6 +4,7 @@
|
|
4
4
|
/* eslint import/no-extraneous-dependencies: warn */
|
5
5
|
/* eslint global-require: warn */
|
6
6
|
/* eslint import/no-unresolved: warn */
|
7
|
+
/* eslint import/no-dynamic-require: warn */
|
7
8
|
|
8
9
|
const winston = require('winston');
|
9
10
|
|
@@ -43,20 +44,6 @@ const basicGet = {
|
|
43
44
|
adapter.id,
|
44
45
|
adapterProps
|
45
46
|
);
|
46
|
-
},
|
47
|
-
|
48
|
-
/**
|
49
|
-
* @summary connect to mongodb
|
50
|
-
*
|
51
|
-
* @function connect
|
52
|
-
* @param {Object} properties - pronghornProps
|
53
|
-
*/
|
54
|
-
connect: async function connect(properties) {
|
55
|
-
// Connect to Mongo
|
56
|
-
const { MongoDBConnection } = require('@itential/database');
|
57
|
-
const connection = new MongoDBConnection(properties.mongoProps);
|
58
|
-
const database = await connection.connect(true);
|
59
|
-
return database;
|
60
47
|
}
|
61
48
|
};
|
62
49
|
|
@@ -0,0 +1,224 @@
|
|
1
|
+
/* @copyright Itential, LLC 2021 */
|
2
|
+
|
3
|
+
// Set globals
|
4
|
+
/* global log */
|
5
|
+
|
6
|
+
/* eslint import/no-dynamic-require: warn */
|
7
|
+
/* eslint global-require: warn */
|
8
|
+
/* eslint no-unused-vars: warn */
|
9
|
+
/* eslint import/no-unresolved: warn */
|
10
|
+
|
11
|
+
/**
|
12
|
+
* This script is used to read through an adapter's entities files
|
13
|
+
* and then creates documents and enters them into the IAP mongodb
|
14
|
+
*/
|
15
|
+
|
16
|
+
const fs = require('fs');
|
17
|
+
const { MongoClient } = require('mongodb');
|
18
|
+
const path = require('path');
|
19
|
+
// const { argv } = require('process');
|
20
|
+
// const { string } = require('yargs');
|
21
|
+
|
22
|
+
// get the pronghorn database information
|
23
|
+
const getPronghornProps = async (iapDir) => {
|
24
|
+
log.trace('Retrieving properties.json file...');
|
25
|
+
const rawProps = require(path.join(iapDir, 'properties.json'));
|
26
|
+
log.trace('Decrypting properties...');
|
27
|
+
const { PropertyEncryption } = require('@itential/itential-utils');
|
28
|
+
const propertyEncryption = new PropertyEncryption();
|
29
|
+
const pronghornProps = await propertyEncryption.decryptProps(rawProps);
|
30
|
+
log.trace('Found properties.\n');
|
31
|
+
return pronghornProps;
|
32
|
+
};
|
33
|
+
|
34
|
+
/**
|
35
|
+
* Function used to take a file path to a entity directory and build
|
36
|
+
* a document that corresponds to the entity files.
|
37
|
+
*/
|
38
|
+
const buildDoc = (pathstring) => {
|
39
|
+
let files = fs.readdirSync(pathstring);
|
40
|
+
|
41
|
+
// load the mockdatafiles
|
42
|
+
const mockdatafiles = {};
|
43
|
+
if (files.includes('mockdatafiles') && fs.lstatSync(`${pathstring}/mockdatafiles`).isDirectory()) {
|
44
|
+
fs.readdirSync(`${pathstring}/mockdatafiles`).forEach((file) => {
|
45
|
+
if (file.split('.').pop() === 'json') {
|
46
|
+
const mockpath = `${pathstring}/mockdatafiles/${file}`;
|
47
|
+
const data = JSON.parse(fs.readFileSync(mockpath));
|
48
|
+
mockdatafiles[mockpath.split('/').pop()] = data;
|
49
|
+
}
|
50
|
+
});
|
51
|
+
}
|
52
|
+
|
53
|
+
// load the action data
|
54
|
+
let actions;
|
55
|
+
if (files.includes('action.json')) {
|
56
|
+
actions = JSON.parse(fs.readFileSync(`${pathstring}/action.json`));
|
57
|
+
}
|
58
|
+
|
59
|
+
// Load schema.json and other schemas in remaining json files
|
60
|
+
files = files.filter((f) => (f !== 'action.json') && f.endsWith('.json'));
|
61
|
+
const schema = [];
|
62
|
+
files.forEach((file) => {
|
63
|
+
const data = JSON.parse(fs.readFileSync(`${pathstring}/${file}`));
|
64
|
+
schema.push({
|
65
|
+
name: file,
|
66
|
+
schema: data
|
67
|
+
});
|
68
|
+
});
|
69
|
+
|
70
|
+
// return the data
|
71
|
+
return {
|
72
|
+
actions: actions.actions,
|
73
|
+
schema,
|
74
|
+
mockdatafiles
|
75
|
+
};
|
76
|
+
};
|
77
|
+
|
78
|
+
/**
|
79
|
+
* Function used to get the database from the options or a provided directory
|
80
|
+
*/
|
81
|
+
const optionsHandler = (options) => {
|
82
|
+
// if the database properties were provided in the options - return them
|
83
|
+
if (options.pronghornProps) {
|
84
|
+
if (typeof options.pronghornProps === 'string') {
|
85
|
+
return JSON.parse(options.pronghornProps);
|
86
|
+
}
|
87
|
+
return new Promise((resolve, reject) => resolve(options.pronghornProps));
|
88
|
+
}
|
89
|
+
|
90
|
+
// if the directory was provided, get the pronghorn props from the directory
|
91
|
+
if (options.iapDir) {
|
92
|
+
return getPronghornProps(options.iapDir);
|
93
|
+
}
|
94
|
+
|
95
|
+
// if nothing was provided, error
|
96
|
+
return new Promise((resolve, reject) => reject(new Error('Neither pronghornProps nor iapDir defined in options!')));
|
97
|
+
};
|
98
|
+
|
99
|
+
/**
|
100
|
+
* Function used to put the adapter configuration into the provided database
|
101
|
+
*/
|
102
|
+
const moveEntitiesToDB = (targetPath, options) => {
|
103
|
+
// set local variables
|
104
|
+
let myOpts = options;
|
105
|
+
let myPath = targetPath;
|
106
|
+
|
107
|
+
// if we got a string parse into a JSON object
|
108
|
+
if (typeof myOpts === 'string') {
|
109
|
+
myOpts = JSON.parse(myOpts);
|
110
|
+
}
|
111
|
+
|
112
|
+
// if there is no target collection - set the collection to the default
|
113
|
+
if (!myOpts.targetCollection) {
|
114
|
+
myOpts.targetCollection = 'adapter_configs';
|
115
|
+
}
|
116
|
+
|
117
|
+
// if there is no id error since we need an id for the entities
|
118
|
+
if (!myOpts.id) {
|
119
|
+
throw new Error('Adapter ID required!');
|
120
|
+
}
|
121
|
+
|
122
|
+
// get the pronghorn database properties
|
123
|
+
optionsHandler(options).then((currentProps) => {
|
124
|
+
let mongoUrl;
|
125
|
+
let dbName;
|
126
|
+
|
127
|
+
// find the mongo properties so we can connect
|
128
|
+
if (currentProps.mongoProps) {
|
129
|
+
mongoUrl = currentProps.mongoProps.url;
|
130
|
+
dbName = currentProps.mongoProps.db;
|
131
|
+
} else if (currentProps.mongo) {
|
132
|
+
if (currentProps.mongo.url) {
|
133
|
+
mongoUrl = currentProps.mongo.url;
|
134
|
+
} else {
|
135
|
+
mongoUrl = `mongodb://${currentProps.mongo.host}:${currentProps.mongo.port}`;
|
136
|
+
}
|
137
|
+
dbName = currentProps.mongo.database;
|
138
|
+
} else {
|
139
|
+
throw new Error('Mongo properties are not specified in adapter preferences!');
|
140
|
+
}
|
141
|
+
|
142
|
+
// Check valid filepath provided
|
143
|
+
if (!myPath) {
|
144
|
+
// if no path use the current directory without the utils
|
145
|
+
myPath = path.join(__dirname, '../');
|
146
|
+
} else if (myPath.slice(-1) === '/') {
|
147
|
+
myPath = myPath.slice(0, -1);
|
148
|
+
}
|
149
|
+
|
150
|
+
// verify set the entity path
|
151
|
+
const entitiesPath = `${myPath}/entities`;
|
152
|
+
if (!fs.existsSync(entitiesPath)) {
|
153
|
+
throw new Error(`Entities path does not exist in filesystem: ${entitiesPath}`);
|
154
|
+
} else {
|
155
|
+
log.trace('Target found on filesystem');
|
156
|
+
}
|
157
|
+
|
158
|
+
// Get adapter details
|
159
|
+
if (!fs.existsSync(`${myPath}/pronghorn.json`)) {
|
160
|
+
throw new Error(`pronghorn.json does not exist in path: ${myPath}`);
|
161
|
+
} else {
|
162
|
+
log.trace('pronghorn.json found on filesystem');
|
163
|
+
}
|
164
|
+
const adapterData = JSON.parse(fs.readFileSync(`${myPath}/pronghorn.json`));
|
165
|
+
|
166
|
+
// Load files from the filesystem
|
167
|
+
const docs = [];
|
168
|
+
const entities = fs.readdirSync(entitiesPath);
|
169
|
+
entities.forEach((entity) => {
|
170
|
+
const entityPath = `${entitiesPath}/${entity}`;
|
171
|
+
const isDir = fs.lstatSync(entitiesPath).isDirectory();
|
172
|
+
|
173
|
+
// Build doc for entity
|
174
|
+
if (isDir) {
|
175
|
+
let doc = buildDoc(entityPath);
|
176
|
+
doc = {
|
177
|
+
id: myOpts.id,
|
178
|
+
type: adapterData.id,
|
179
|
+
entity,
|
180
|
+
...doc
|
181
|
+
};
|
182
|
+
docs.push(doc);
|
183
|
+
}
|
184
|
+
});
|
185
|
+
|
186
|
+
// Upload documents to db collection
|
187
|
+
MongoClient.connect(mongoUrl, (err, db) => {
|
188
|
+
if (err) {
|
189
|
+
log.error(JSON.stringify(err));
|
190
|
+
throw err;
|
191
|
+
}
|
192
|
+
|
193
|
+
// get the proper collection
|
194
|
+
const collection = db.db(dbName).collection(myOpts.targetCollection);
|
195
|
+
// insert the documents into the collection
|
196
|
+
collection.insertMany(docs, { checkKeys: false }, (error, res) => {
|
197
|
+
if (error) {
|
198
|
+
log.error(JSON.stringify(error));
|
199
|
+
throw error;
|
200
|
+
}
|
201
|
+
// log the insertion, close the database and return
|
202
|
+
log.debug(`Inserted ${docs.length} documents to ${dbName}.${myOpts.targetCollection} with response ${JSON.stringify(res)}`);
|
203
|
+
db.close();
|
204
|
+
return res;
|
205
|
+
});
|
206
|
+
});
|
207
|
+
});
|
208
|
+
};
|
209
|
+
|
210
|
+
// const args = process.argv.slice(2);
|
211
|
+
|
212
|
+
// throw new SyntaxError(args[0]);
|
213
|
+
|
214
|
+
// if (args.length === 0) {
|
215
|
+
// console.error('ERROR: target path not specified!');
|
216
|
+
// } else if (args[0] === 'help') {
|
217
|
+
// log.trace('node ./entitiesToDB <target path> <options object: {iapDir: string, pronghornProps: string, targetCollection: string}>');
|
218
|
+
// } else if (args.length === 1) {
|
219
|
+
// console.error('ERROR: IAP directory not specified');
|
220
|
+
// } else {
|
221
|
+
// moveEntitiesToDB(args[0], args[1]);
|
222
|
+
// }
|
223
|
+
|
224
|
+
module.exports = { moveEntitiesToDB };
|
package/utils/modify.js
CHANGED
@@ -3,7 +3,7 @@ const Ajv = require('ajv');
|
|
3
3
|
const rls = require('readline-sync');
|
4
4
|
const { execSync } = require('child_process');
|
5
5
|
const { existsSync } = require('fs-extra');
|
6
|
-
const { getAdapterConfig } = require('./
|
6
|
+
const { getAdapterConfig } = require('./tbUtils');
|
7
7
|
const { name } = require('../package.json');
|
8
8
|
const propertiesSchema = require('../propertiesSchema.json');
|
9
9
|
|
@@ -0,0 +1,90 @@
|
|
1
|
+
const fs = require('fs');
|
2
|
+
const semverSatisfies = require('semver/functions/satisfies');
|
3
|
+
const packageJson = require('../package.json');
|
4
|
+
|
5
|
+
try {
|
6
|
+
// pattern supplied by semver.org via https://regex101.com/r/vkijKf/1/ but removed gm from end to only match a single semver
|
7
|
+
// const semverPat = /^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$/;
|
8
|
+
// pattern supplied by semver.org via https://regex101.com/r/Ly7O1x/3/ with following changes
|
9
|
+
// removed P's from before capturing group names and
|
10
|
+
// removed gm from end to only match a single semver
|
11
|
+
// const semverPat = /^(?<major>0|[1-9]\d*)\.(?<minor>0|[1-9]\d*)\.(?<patch>0|[1-9]\d*)(?:-(?<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$/;
|
12
|
+
|
13
|
+
const patches = (fs.existsSync('./patches')) ? fs.readdirSync('./patches', { withFileTypes: true }) : [];
|
14
|
+
if (!patches.length) {
|
15
|
+
console.error('\nno patches - nothing to do\n');
|
16
|
+
process.exitCode = 1;
|
17
|
+
}
|
18
|
+
|
19
|
+
const dependencies = packageJson.dependencies || {};
|
20
|
+
if (!Object.keys(dependencies).length) {
|
21
|
+
console.error('\nno dependencies - nothing to do\n');
|
22
|
+
process.exitCode = 1;
|
23
|
+
}
|
24
|
+
|
25
|
+
let changed = false;
|
26
|
+
console.error('\nprocessing patches');
|
27
|
+
const bundledDependencies = packageJson.bundledDependencies || packageJson.bundleDependencies || [];
|
28
|
+
|
29
|
+
patches.forEach((patch) => {
|
30
|
+
if (!patch.isFile()) {
|
31
|
+
console.error(`${patch.name} skipped, is not a regular file`);
|
32
|
+
return;
|
33
|
+
}
|
34
|
+
if (!patch.name.endsWith('.patch')) {
|
35
|
+
console.error(`${patch.name} skipped, does not end with .patch`);
|
36
|
+
return;
|
37
|
+
}
|
38
|
+
const splits = patch.name.slice(0, -6).split('+');
|
39
|
+
if (splits.length > 4) {
|
40
|
+
console.error(`${patch.name} skipped, does not follow the naming convention (cannot use '+' other than to separate scope/package/semver and at most once within semver)`);
|
41
|
+
return;
|
42
|
+
}
|
43
|
+
const scope = splits[0][0] === '@' ? splits.shift() : null;
|
44
|
+
const packageName = splits.shift();
|
45
|
+
const semver = splits.join('+');
|
46
|
+
// const { groups } = semver.match(semverPat);
|
47
|
+
const file = scope ? `${scope}/${packageName}` : packageName;
|
48
|
+
if (dependencies[file] && semverSatisfies(semver, dependencies[file])) {
|
49
|
+
if (!bundledDependencies.includes(file)) {
|
50
|
+
bundledDependencies.push(file);
|
51
|
+
console.error(`added ${file} to bundledDependencies`);
|
52
|
+
changed = true;
|
53
|
+
} else {
|
54
|
+
console.error(`bundledDependencies already has ${file}`);
|
55
|
+
}
|
56
|
+
} else {
|
57
|
+
const depmsg = dependencies[file] ? `version mismatch (${dependencies[file]}) in dependencies` : 'not found in dependencies';
|
58
|
+
console.error(`patch ${patch.name} ${depmsg}`);
|
59
|
+
}
|
60
|
+
});
|
61
|
+
|
62
|
+
if (!packageJson.bundledDependencies && bundledDependencies.length) {
|
63
|
+
delete packageJson.bundleDependencies;
|
64
|
+
packageJson.bundledDependencies = bundledDependencies;
|
65
|
+
console.error('renaming bundleDependencies to bundledDependencies');
|
66
|
+
changed = true;
|
67
|
+
}
|
68
|
+
if (changed) {
|
69
|
+
fs.writeFileSync('./package.json.new', JSON.stringify(packageJson, null, 2));
|
70
|
+
console.error('wrote package.json.new');
|
71
|
+
fs.renameSync('./package.json', './package.json.old');
|
72
|
+
console.error('moved package.json to package.json.old');
|
73
|
+
fs.renameSync('./package.json.new', './package.json');
|
74
|
+
console.error('moved package.json.new to package.json');
|
75
|
+
} else {
|
76
|
+
console.error('no changes\n');
|
77
|
+
process.exitCode = 1;
|
78
|
+
}
|
79
|
+
} catch (e) {
|
80
|
+
if (e) {
|
81
|
+
// caught error, exit with status 2 to signify abject failure
|
82
|
+
console.error(`\ncaught exception - ${e}\n`);
|
83
|
+
process.exitCode = 2;
|
84
|
+
} else {
|
85
|
+
// caught false, exit with status 1 to signify nothing done
|
86
|
+
process.exitCode = 1;
|
87
|
+
}
|
88
|
+
} finally {
|
89
|
+
console.error('done\n');
|
90
|
+
}
|
@@ -0,0 +1,20 @@
|
|
1
|
+
const fs = require('fs');
|
2
|
+
|
3
|
+
/**
|
4
|
+
* This script will uninstall pre-commit or pre-push hooks in case there's ever a need to
|
5
|
+
* commit/push something that has issues
|
6
|
+
*/
|
7
|
+
|
8
|
+
const precommitPath = '.git/hooks/pre-commit';
|
9
|
+
const prepushPath = '.git/hooks/pre-push';
|
10
|
+
fs.unlink(precommitPath, (err) => {
|
11
|
+
if (err && err.code !== 'ENOENT') {
|
12
|
+
console.log(`${err.message}`);
|
13
|
+
}
|
14
|
+
});
|
15
|
+
|
16
|
+
fs.unlink(prepushPath, (err) => {
|
17
|
+
if (err && err.code !== 'ENOENT') {
|
18
|
+
console.log(`${err.message}`);
|
19
|
+
}
|
20
|
+
});
|
package/utils/tbScript.js
CHANGED
@@ -15,13 +15,18 @@ const basicGet = require('./basicGet');
|
|
15
15
|
const { name } = require('../package.json');
|
16
16
|
const sampleProperties = require('../sampleProperties.json');
|
17
17
|
const adapterPronghorn = require('../pronghorn.json');
|
18
|
+
const { addAuthInfo } = require('./addAuth');
|
18
19
|
|
19
|
-
const { troubleshoot,
|
20
|
+
const { troubleshoot, offline } = require('./troubleshootingAdapter');
|
20
21
|
|
21
22
|
const main = async (command) => {
|
22
|
-
const
|
23
|
+
const dirname = utils.getDirname();
|
24
|
+
const iapDir = path.join(dirname, '../../../');
|
23
25
|
if (!utils.withinIAP(iapDir)) {
|
24
|
-
if (command === '
|
26
|
+
if (command === 'install') {
|
27
|
+
console.log('Not currently in IAP directory - installation not possible');
|
28
|
+
process.exit(0);
|
29
|
+
} else if (command === 'connectivity') {
|
25
30
|
const { host } = sampleProperties.properties;
|
26
31
|
console.log(`perform networking diagnositics to ${host}`);
|
27
32
|
await utils.runConnectivity(host);
|
@@ -43,7 +48,7 @@ const main = async (command) => {
|
|
43
48
|
if (command === undefined) {
|
44
49
|
await troubleshoot({}, true, true);
|
45
50
|
} else if (command === 'install') {
|
46
|
-
const { database, serviceItem, pronghornProps } = await getAdapterConfig();
|
51
|
+
const { database, serviceItem, pronghornProps } = await utils.getAdapterConfig();
|
47
52
|
const filter = { id: pronghornProps.id };
|
48
53
|
const profileItem = await database.collection(utils.IAP_PROFILES_COLLECTION).findOne(filter);
|
49
54
|
if (!profileItem) {
|
@@ -74,14 +79,16 @@ const main = async (command) => {
|
|
74
79
|
process.exit(0);
|
75
80
|
}
|
76
81
|
} else {
|
77
|
-
utils.verifyInstallationDir(
|
78
|
-
utils.npmInstall();
|
82
|
+
utils.verifyInstallationDir(dirname, name);
|
79
83
|
utils.runTest();
|
80
84
|
if (rls.keyInYN(`Do you want to install ${name} to IAP?`)) {
|
81
85
|
console.log('Creating database entries...');
|
82
86
|
const adapter = utils.createAdapter(
|
83
87
|
pronghornProps, profileItem, sampleProperties, adapterPronghorn
|
84
88
|
);
|
89
|
+
|
90
|
+
adapter.properties.properties = await addAuthInfo(adapter.properties.properties);
|
91
|
+
|
85
92
|
await database.collection(utils.SERVICE_CONFIGS_COLLECTION).insertOne(adapter);
|
86
93
|
profileItem.services.push(adapter.name);
|
87
94
|
const update = { $set: { services: profileItem.services } };
|
@@ -94,7 +101,7 @@ const main = async (command) => {
|
|
94
101
|
process.exit(0);
|
95
102
|
}
|
96
103
|
} else if (['healthcheck', 'basicget', 'connectivity'].includes(command)) {
|
97
|
-
const { serviceItem } = await getAdapterConfig();
|
104
|
+
const { serviceItem } = await utils.getAdapterConfig();
|
98
105
|
if (serviceItem) {
|
99
106
|
const adapter = serviceItem;
|
100
107
|
const a = basicGet.getAdapterInstance(adapter);
|
@@ -154,7 +161,6 @@ program.parse(process.argv);
|
|
154
161
|
if (process.argv.length < 3) {
|
155
162
|
main();
|
156
163
|
}
|
157
|
-
|
158
164
|
const allowedParams = ['install', 'healthcheck', 'basicget', 'connectivity'];
|
159
165
|
if (process.argv.length === 3 && !allowedParams.includes(process.argv[2])) {
|
160
166
|
console.log(`unknown parameter ${process.argv[2]}`);
|
package/utils/tbUtils.js
CHANGED
@@ -100,8 +100,8 @@ module.exports = {
|
|
100
100
|
*
|
101
101
|
* @function decryptProperties
|
102
102
|
*/
|
103
|
-
decryptProperties: (props,
|
104
|
-
const propertyEncryptionClassPath = path.join(
|
103
|
+
decryptProperties: (props, iapDir, discovery) => {
|
104
|
+
const propertyEncryptionClassPath = path.join(iapDir, 'node_modules/@itential/pronghorn-core/core/PropertyEncryption.js');
|
105
105
|
const isEncrypted = props.pathProps.encrypted;
|
106
106
|
const PropertyEncryption = discovery.require(propertyEncryptionClassPath, isEncrypted);
|
107
107
|
const propertyEncryption = new PropertyEncryption({
|
@@ -177,12 +177,12 @@ module.exports = {
|
|
177
177
|
verifyInstallationDir: (dirname, name) => {
|
178
178
|
const pathArray = dirname.split(path.sep);
|
179
179
|
const expectedPath = `node_modules/${name}`;
|
180
|
-
const currentPath = pathArray.slice(pathArray.length -
|
181
|
-
if (
|
182
|
-
throw new Error(`adapter should be installed under ${expectedPath}`);
|
180
|
+
const currentPath = pathArray.slice(pathArray.length - 3, pathArray.length).join('/');
|
181
|
+
if (currentPath.trim() !== expectedPath.trim()) {
|
182
|
+
throw new Error(`adapter should be installed under ${expectedPath} but is installed under ${currentPath}`);
|
183
183
|
}
|
184
184
|
|
185
|
-
const serverFile = path.join(dirname, '
|
185
|
+
const serverFile = path.join(dirname, '../../../', 'server.js');
|
186
186
|
if (!fs.existsSync(serverFile)) {
|
187
187
|
throw new Error(`adapter should be installed under IAP/${expectedPath}`);
|
188
188
|
}
|
@@ -304,23 +304,72 @@ module.exports = {
|
|
304
304
|
* @param {Object} adapterPronghorn - ./pronghorn.json in adapter dir
|
305
305
|
* @param {Object} sampleProperties - './sampleProperties.json' in adapter dir
|
306
306
|
*/
|
307
|
-
createAdapter: (pronghornProps, profileItem, sampleProperties, adapterPronghorn)
|
308
|
-
const
|
309
|
-
|
310
|
-
|
311
|
-
|
312
|
-
|
313
|
-
|
314
|
-
|
315
|
-
|
316
|
-
|
317
|
-
|
318
|
-
|
307
|
+
createAdapter: function createAdapter(pronghornProps, profileItem, sampleProperties, adapterPronghorn) {
|
308
|
+
const dirname = this.getDirname();
|
309
|
+
const packagePath = `${dirname.split('node_modules')[0]}package.json`;
|
310
|
+
const info = JSON.parse(fs.readFileSync(packagePath));
|
311
|
+
const version = parseInt(info.version.split('.')[0], 10);
|
312
|
+
|
313
|
+
let adapter = {};
|
314
|
+
if (version >= 2020) {
|
315
|
+
adapter = {
|
316
|
+
isEncrypted: pronghornProps.pathProps.encrypted,
|
317
|
+
model: adapterPronghorn.id,
|
318
|
+
name: sampleProperties.id,
|
319
|
+
type: adapterPronghorn.type,
|
320
|
+
properties: sampleProperties,
|
321
|
+
loggerProps: profileItem.loggerProps
|
322
|
+
};
|
323
|
+
} else {
|
324
|
+
adapter = {
|
325
|
+
mongoProps: pronghornProps.mongoProps,
|
326
|
+
isEncrypted: pronghornProps.pathProps.encrypted,
|
327
|
+
model: adapterPronghorn.id,
|
328
|
+
name: sampleProperties.id,
|
329
|
+
type: adapterPronghorn.type,
|
330
|
+
properties: sampleProperties,
|
331
|
+
redisProps: profileItem.redisProps,
|
332
|
+
loggerProps: profileItem.loggerProps,
|
333
|
+
rabbitmq: profileItem.rabbitmq
|
334
|
+
};
|
335
|
+
adapter.mongoProps.pdb = true;
|
336
|
+
}
|
337
|
+
|
319
338
|
adapter.loggerProps.log_filename = `adapter-${adapter.name}.log`;
|
320
|
-
adapter.mongoProps.pdb = true;
|
321
339
|
return adapter;
|
322
340
|
},
|
323
341
|
|
342
|
+
getPronghornProps: function getPronghornProps(iapDir) {
|
343
|
+
console.log('Retrieving properties.json file...');
|
344
|
+
const rawProps = require(path.join(iapDir, 'properties.json'));
|
345
|
+
console.log('Decrypting properties...');
|
346
|
+
const { Discovery } = require(path.join(iapDir, 'node_modules/@itential/itential-utils'));
|
347
|
+
const discovery = new Discovery();
|
348
|
+
const pronghornProps = this.decryptProperties(rawProps, iapDir, discovery);
|
349
|
+
console.log('Found properties.\n');
|
350
|
+
return pronghornProps;
|
351
|
+
},
|
352
|
+
|
353
|
+
// get database connection and existing adapter config
|
354
|
+
getAdapterConfig: async function getAdapterConfig() {
|
355
|
+
const newDirname = this.getDirname();
|
356
|
+
let iapDir;
|
357
|
+
if (this.withinIAP(newDirname)) { // when this script is called from IAP
|
358
|
+
iapDir = newDirname;
|
359
|
+
} else {
|
360
|
+
iapDir = path.join(this.getDirname(), 'utils', '../../../../');
|
361
|
+
}
|
362
|
+
const pronghornProps = this.getPronghornProps(iapDir);
|
363
|
+
console.log('Connecting to Database...');
|
364
|
+
const database = await this.connect(iapDir, pronghornProps);
|
365
|
+
console.log('Connection established.');
|
366
|
+
const { name } = require(path.join(__dirname, '..', 'package.json'));
|
367
|
+
const serviceItem = await database.collection(this.SERVICE_CONFIGS_COLLECTION).findOne(
|
368
|
+
{ model: name }
|
369
|
+
);
|
370
|
+
return { database, serviceItem, pronghornProps };
|
371
|
+
},
|
372
|
+
|
324
373
|
/**
|
325
374
|
* @summary return async healthcheck result as a Promise
|
326
375
|
*
|
@@ -368,5 +417,35 @@ module.exports = {
|
|
368
417
|
} catch (error) {
|
369
418
|
return false;
|
370
419
|
}
|
420
|
+
},
|
421
|
+
|
422
|
+
/**
|
423
|
+
* @summary Used to determine the proper dirname to return in case adapter reference is
|
424
|
+
* symlinked withink IAP
|
425
|
+
* @returns the symlinked path (using pwd command) of the adapter in case properties.json
|
426
|
+
* is not found in the original path
|
427
|
+
* @function getDirname
|
428
|
+
*/
|
429
|
+
getDirname: function getDirname() {
|
430
|
+
if (this.withinIAP(path.join(__dirname, '../../../../'))) {
|
431
|
+
return __dirname;
|
432
|
+
}
|
433
|
+
const { stdout } = this.systemSync('pwd', true);
|
434
|
+
return stdout.trim();
|
435
|
+
},
|
436
|
+
|
437
|
+
/**
|
438
|
+
* @summary connect to mongodb
|
439
|
+
*
|
440
|
+
* @function connect
|
441
|
+
* @param {Object} properties - pronghornProps
|
442
|
+
*/
|
443
|
+
connect: async function connect(iapDir, properties) {
|
444
|
+
// Connect to Mongo
|
445
|
+
const { MongoDBConnection } = require(path.join(iapDir, 'node_modules/@itential/database'));
|
446
|
+
const connection = new MongoDBConnection(properties.mongoProps);
|
447
|
+
const database = await connection.connect(true);
|
448
|
+
return database;
|
371
449
|
}
|
450
|
+
|
372
451
|
};
|