@itentialopensource/adapter-winston_syslog 1.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/.eslintignore +5 -0
  2. package/.eslintrc.js +18 -0
  3. package/.jshintrc +3 -0
  4. package/CHANGELOG.md +9 -0
  5. package/LICENSE +201 -0
  6. package/README.md +285 -0
  7. package/adapter.js +250 -0
  8. package/adapterBase.js +1782 -0
  9. package/entities/.generic/action.json +214 -0
  10. package/entities/.generic/schema.json +28 -0
  11. package/entities/.system/action.json +50 -0
  12. package/entities/.system/mockdatafiles/getToken-default.json +3 -0
  13. package/entities/.system/mockdatafiles/healthcheck-default.json +3 -0
  14. package/entities/.system/schema.json +19 -0
  15. package/entities/.system/schemaTokenReq.json +53 -0
  16. package/entities/.system/schemaTokenResp.json +53 -0
  17. package/error.json +190 -0
  18. package/package.json +87 -0
  19. package/pronghorn.json +93 -0
  20. package/propertiesDecorators.json +14 -0
  21. package/propertiesSchema.json +1243 -0
  22. package/refs?service=git-upload-pack +0 -0
  23. package/report/adapterInfo.json +10 -0
  24. package/sampleProperties.json +60 -0
  25. package/test/integration/adapterTestIntegration.js +329 -0
  26. package/test/unit/adapterBaseTestUnit.js +949 -0
  27. package/test/unit/adapterTestUnit.js +1207 -0
  28. package/utils/adapterInfo.js +206 -0
  29. package/utils/addAuth.js +94 -0
  30. package/utils/artifactize.js +146 -0
  31. package/utils/basicGet.js +50 -0
  32. package/utils/checkMigrate.js +63 -0
  33. package/utils/entitiesToDB.js +179 -0
  34. package/utils/findPath.js +74 -0
  35. package/utils/modify.js +154 -0
  36. package/utils/packModificationScript.js +35 -0
  37. package/utils/patches2bundledDeps.js +90 -0
  38. package/utils/pre-commit.sh +30 -0
  39. package/utils/removeHooks.js +20 -0
  40. package/utils/setup.js +33 -0
  41. package/utils/tbScript.js +184 -0
  42. package/utils/tbUtils.js +469 -0
  43. package/utils/testRunner.js +298 -0
  44. package/utils/troubleshootingAdapter.js +190 -0
  45. package/workflows/README.md +3 -0
@@ -0,0 +1,179 @@
1
+ /* @copyright Itential, LLC 2021 */
2
+
3
+ // Set globals
4
+ /* global log */
5
+
6
+ /* eslint import/no-dynamic-require: warn */
7
+ /* eslint global-require: warn */
8
+ /* eslint no-unused-vars: warn */
9
+ /* eslint import/no-unresolved: warn */
10
+
11
+ /**
12
+ * This script is used to read through an adapter's entities files
13
+ * and then creates documents and enters them into the IAP mongodb
14
+ */
15
+
16
+ const fs = require('fs');
17
+ const path = require('path');
18
+ const utils = require('./tbUtils');
19
+
20
+ // get the pronghorn database information
21
+ const getPronghornProps = async (iapDir) => {
22
+ log.trace('Retrieving properties.json file...');
23
+ const rawProps = require(path.join(iapDir, 'properties.json'));
24
+ log.trace('Decrypting properties...');
25
+ const { PropertyEncryption } = require('@itential/itential-utils');
26
+ const propertyEncryption = new PropertyEncryption();
27
+ const pronghornProps = await propertyEncryption.decryptProps(rawProps);
28
+ log.trace('Found properties.\n');
29
+ return pronghornProps;
30
+ };
31
+
32
+ /**
33
+ * Function used to take a file path to a entity directory and build
34
+ * a document that corresponds to the entity files.
35
+ */
36
+ const buildDoc = (pathstring) => {
37
+ let files = fs.readdirSync(pathstring);
38
+
39
+ // load the mockdatafiles
40
+ const mockdatafiles = {};
41
+ if (files.includes('mockdatafiles') && fs.lstatSync(`${pathstring}/mockdatafiles`).isDirectory()) {
42
+ fs.readdirSync(`${pathstring}/mockdatafiles`).forEach((file) => {
43
+ if (file.split('.').pop() === 'json') {
44
+ const mockpath = `${pathstring}/mockdatafiles/${file}`;
45
+ const data = JSON.parse(fs.readFileSync(mockpath));
46
+ mockdatafiles[mockpath.split('/').pop()] = data;
47
+ }
48
+ });
49
+ }
50
+
51
+ // load the action data
52
+ let actions;
53
+ if (files.includes('action.json')) {
54
+ actions = JSON.parse(fs.readFileSync(`${pathstring}/action.json`));
55
+ }
56
+
57
+ // Load schema.json and other schemas in remaining json files
58
+ files = files.filter((f) => (f !== 'action.json') && f.endsWith('.json'));
59
+ const schema = [];
60
+ files.forEach((file) => {
61
+ const data = JSON.parse(fs.readFileSync(`${pathstring}/${file}`));
62
+ schema.push({
63
+ name: file,
64
+ schema: data
65
+ });
66
+ });
67
+
68
+ // return the data
69
+ return {
70
+ actions: actions.actions,
71
+ schema,
72
+ mockdatafiles
73
+ };
74
+ };
75
+
76
+ /**
77
+ * Function used to get the database from the options or a provided directory
78
+ */
79
+ const optionsHandler = (options) => {
80
+ // if the database properties were provided in the options - return them
81
+ if (options.pronghornProps) {
82
+ if (typeof options.pronghornProps === 'string') {
83
+ return JSON.parse(options.pronghornProps);
84
+ }
85
+ return new Promise((resolve, reject) => resolve(options.pronghornProps));
86
+ }
87
+
88
+ // if the directory was provided, get the pronghorn props from the directory
89
+ if (options.iapDir) {
90
+ return getPronghornProps(options.iapDir);
91
+ }
92
+
93
+ // if nothing was provided, error
94
+ return new Promise((resolve, reject) => reject(new Error('Neither pronghornProps nor iapDir defined in options!')));
95
+ };
96
+
97
+ /**
98
+ * Function used to put the adapter configuration into the provided database
99
+ */
100
+ const moveEntitiesToDB = async (targetPath, options) => {
101
+ // set local variables
102
+ let myOpts = options;
103
+ let myPath = targetPath;
104
+
105
+ // if we got a string parse into a JSON object
106
+ if (typeof myOpts === 'string') {
107
+ myOpts = JSON.parse(myOpts);
108
+ }
109
+
110
+ // if there is no target collection - set the collection to the default
111
+ if (!myOpts.targetCollection) {
112
+ myOpts.targetCollection = 'adapter_configs';
113
+ }
114
+
115
+ // if there is no id error since we need an id for the entities
116
+ if (!myOpts.id) {
117
+ throw new Error('Adapter ID required!');
118
+ }
119
+
120
+ // get the pronghorn database properties
121
+ return optionsHandler(options).then(async (currentProps) => {
122
+ // Check valid filepath provided
123
+ if (!myPath) {
124
+ // if no path use the current directory without the utils
125
+ myPath = path.join(__dirname, '../');
126
+ } else if (myPath.slice(-1) === '/') {
127
+ myPath = myPath.slice(0, -1);
128
+ }
129
+
130
+ // verify set the entity path
131
+ const entitiesPath = `${myPath}/entities`;
132
+ if (!fs.existsSync(entitiesPath)) {
133
+ throw new Error(`Entities path does not exist in filesystem: ${entitiesPath}`);
134
+ } else {
135
+ log.trace('Target found on filesystem');
136
+ }
137
+
138
+ // Get adapter details
139
+ if (!fs.existsSync(`${myPath}/pronghorn.json`)) {
140
+ throw new Error(`pronghorn.json does not exist in path: ${myPath}`);
141
+ } else {
142
+ log.trace('pronghorn.json found on filesystem');
143
+ }
144
+ const adapterData = JSON.parse(fs.readFileSync(`${myPath}/pronghorn.json`));
145
+
146
+ // Load files from the filesystem
147
+ const docs = [];
148
+ const entities = fs.readdirSync(entitiesPath);
149
+ entities.forEach((entity) => {
150
+ const entityPath = `${entitiesPath}/${entity}`;
151
+ const isDir = fs.lstatSync(entitiesPath).isDirectory();
152
+
153
+ // Build doc for entity
154
+ if (isDir) {
155
+ let doc = buildDoc(entityPath);
156
+ doc = {
157
+ id: myOpts.id,
158
+ type: adapterData.id,
159
+ entity,
160
+ ...doc
161
+ };
162
+ docs.push(doc);
163
+ }
164
+ });
165
+
166
+ // Upload documents to db collection
167
+ const iapDir = utils.getIAPHome();
168
+ const db = await utils.connect(iapDir, currentProps).catch((err) => { console.error(err); throw err; });
169
+ if (!db) {
170
+ console.error('Error occured when connectiong to database', currentProps);
171
+ throw new Error('Database not found');
172
+ }
173
+ const collection = db.collection(myOpts.targetCollection);
174
+ const res = await collection.insertMany(docs, { checkKeys: false }).catch((err) => { console.error(err); throw err; });
175
+ return res;
176
+ });
177
+ };
178
+
179
+ module.exports = { moveEntitiesToDB };
@@ -0,0 +1,74 @@
1
+ #!/usr/bin/env node
2
+ /* @copyright Itential, LLC 2019 */
3
+ /* eslint global-require:warn */
4
+ /* eslint import/no-dynamic-require:warn */
5
+ /* eslint prefer-destructuring:warn */
6
+
7
+ const fs = require('fs-extra');
8
+ const path = require('path');
9
+ const rls = require('readline-sync');
10
+
11
+ /**
12
+ * This script will determine the type of integration test to run
13
+ * based on input. If other information is needed, it will solicit
14
+ * that input and then edit the integration test accordingly.
15
+ */
16
+
17
+ /**
18
+ * Updates the action files
19
+ */
20
+ function checkActionFiles(apath) {
21
+ // verify the path
22
+ if (!apath) {
23
+ console.log(' NO PATH PROVIDED!');
24
+ return 'Done';
25
+ }
26
+
27
+ // make sure the entities directory exists
28
+ const entitydir = path.join(__dirname, '../entities');
29
+ if (!fs.statSync(entitydir).isDirectory()) {
30
+ console.log('Could not find the entities directory');
31
+ return 'error';
32
+ }
33
+
34
+ const entities = fs.readdirSync(entitydir);
35
+ let found = false;
36
+
37
+ // need to go through each entity in the entities directory
38
+ for (let e = 0; e < entities.length; e += 1) {
39
+ // make sure the entity is a directory - do not care about extra files
40
+ // only entities (dir)
41
+ if (fs.statSync(`${entitydir}/${entities[e]}`).isDirectory()) {
42
+ // see if the action file exists in the entity
43
+ if (fs.existsSync(`${entitydir}/${entities[e]}/action.json`)) {
44
+ // Read the entity actions from the file system
45
+ const actions = require(`${entitydir}/${entities[e]}/action.json`);
46
+
47
+ // go through all of the actions set the appropriate info in the newActions
48
+ for (let a = 0; a < actions.actions.length; a += 1) {
49
+ if (actions.actions[a].entitypath.indexOf(apath) >= 0) {
50
+ found = true;
51
+ console.log(` Found - entity: ${entities[e]} action: ${actions.actions[a].name}`);
52
+ console.log(` method: ${actions.actions[a].method} path: ${actions.actions[a].entitypath}`);
53
+ console.log(' ');
54
+ }
55
+ }
56
+ } else {
57
+ console.log(`Could not find entities ${entities[e]} action.json file`);
58
+ return 'error';
59
+ }
60
+ } else {
61
+ console.log(`Could not find entities ${entities[e]} directory`);
62
+ return 'error';
63
+ }
64
+ }
65
+
66
+ if (!found) {
67
+ console.log(' PATH NOT FOUND!');
68
+ }
69
+ return 'Done';
70
+ }
71
+
72
+ const findPath = rls.question('Enter the path/partial path you are looking for: ');
73
+ console.log(`PATH: ${findPath}`);
74
+ checkActionFiles(findPath);
@@ -0,0 +1,154 @@
1
+ const fs = require('fs-extra');
2
+ const Ajv = require('ajv');
3
+ const rls = require('readline-sync');
4
+ const { execSync } = require('child_process');
5
+ const { existsSync } = require('fs-extra');
6
+ const { getAdapterConfig } = require('./tbUtils');
7
+ const { name } = require('../package.json');
8
+ const propertiesSchema = require('../propertiesSchema.json');
9
+
10
+ const flags = process.argv[2];
11
+
12
+ /**
13
+ * @summary Updates database instance with new adapter properties
14
+ *
15
+ * @function updateServiceItem
16
+ */
17
+ async function updateServiceItem() {
18
+ const { database, serviceItem } = await getAdapterConfig();
19
+ const currentProps = serviceItem.properties.properties;
20
+ const ajv = new Ajv({ allErrors: true, useDefaults: true });
21
+ const validate = ajv.compile(propertiesSchema);
22
+ validate(currentProps);
23
+ console.log('Updating Properties...');
24
+ await database.collection('service_configs').updateOne(
25
+ { model: name }, { $set: serviceItem }
26
+ );
27
+ console.log('Properties Updated');
28
+ }
29
+
30
+ /**
31
+ * @summary Creates a backup zip file of current adapter
32
+ *
33
+ * @function backup
34
+ */
35
+ function backup() {
36
+ // zip all files except node_modules and package-lock
37
+ const backupCmd = 'zip -r previousVersion.zip .';
38
+ execSync(backupCmd, { encoding: 'utf-8' });
39
+ }
40
+
41
+ /**
42
+ * @summary Archives previous modifications and removes the modification package
43
+ *
44
+ * @function archiveMod
45
+ * @param {String} modType - update(UPD) or migrate(MIG)
46
+ */
47
+ function archiveMod(modType) {
48
+ if (!existsSync('./adapter_modifications/archive')) {
49
+ execSync('mkdir ./adapter_modifications/archive');
50
+ }
51
+ const zipFile = modType === 'UPD' ? 'updatePackage.zip' : 'migrationPackage.zip';
52
+ const now = new Date();
53
+ const archiveName = `${modType}-${now.toISOString()}`;
54
+ execSync(`mkdir adapter_modifications/archive/${archiveName}`);
55
+ const archiveCmd = 'mv adapter_modifications/archive .'
56
+ + ` && mv adapter_modifications/* archive/${archiveName}`
57
+ + ' && mv archive adapter_modifications'
58
+ + ` && rm ${zipFile}`;
59
+ execSync(archiveCmd, { encoding: 'utf-8' });
60
+ }
61
+
62
+ /**
63
+ * @summary Reverts modifications using backup zip file
64
+ *
65
+ * @function revertMod
66
+ */
67
+ function revertMod() {
68
+ const files = fs.readdirSync('./');
69
+ // remove all files except previousVersion
70
+ files.forEach((file) => {
71
+ if (file !== 'previousVersion.zip') {
72
+ fs.removeSync(file);
73
+ }
74
+ });
75
+ // // unzip previousVersion, reinstall dependencies and delete zipfile
76
+ execSync('unzip -o previousVersion.zip && rm -rf node_modules && rm package-lock.json && npm install');
77
+ execSync('rm previousVersion.zip');
78
+ console.log('Changes have been reverted');
79
+ }
80
+
81
+ // Main Script
82
+
83
+ // Migrate
84
+ if (flags === '-m') {
85
+ if (!fs.existsSync('migrationPackage.zip')) {
86
+ console.log('Migration Package not found. Download and place migrationPackage in the adapter root directory');
87
+ process.exit();
88
+ }
89
+ // Backup current adapter
90
+ backup();
91
+ console.log('Migrating adapter and running tests...');
92
+ const migrateCmd = 'unzip -o migrationPackage.zip'
93
+ + ' && cd adapter_modifications'
94
+ + ' && node migrate';
95
+ const migrateOutput = execSync(migrateCmd, { encoding: 'utf-8' });
96
+ console.log(migrateOutput);
97
+ if (migrateOutput.indexOf('Lint exited with code 1') >= 0
98
+ || migrateOutput.indexOf('Tests exited with code 1') >= 0) {
99
+ if (rls.keyInYN('Adapter failed tests or lint after migrating. Would you like to revert the changes?')) {
100
+ console.log('Reverting changes...');
101
+ revertMod();
102
+ process.exit();
103
+ }
104
+ console.log('Adapter Migration will continue. If you want to revert the changes, run the command npm run adapter:revert');
105
+ }
106
+ console.log('Installing new dependencies..');
107
+ const updatePackageCmd = 'rm -rf node_modules && rm package-lock.json && npm install';
108
+ const updatePackageOutput = execSync(updatePackageCmd, { encoding: 'utf-8' });
109
+ console.log(updatePackageOutput);
110
+ console.log('New dependencies installed');
111
+ console.log('Updating adapter properties..');
112
+ updateServiceItem().then(() => {
113
+ console.log('Adapter Successfully Migrated. Restart adapter in IAP to apply the changes');
114
+ archiveMod('MIG');
115
+ process.exit();
116
+ });
117
+ }
118
+
119
+ // Update
120
+ if (flags === '-u') {
121
+ if (!fs.existsSync('updatePackage.zip')) {
122
+ console.log('Update Package not found. Download and place updateAdapter.zip in the adapter root directory');
123
+ process.exit();
124
+ }
125
+ // Backup current adapter
126
+ backup();
127
+ const updateCmd = 'unzip -o updatePackage.zip'
128
+ + ' && cd adapter_modifications'
129
+ + ' && node update.js updateFiles';
130
+ execSync(updateCmd, { encoding: 'utf-8' });
131
+ const updateOutput = execSync(updateCmd, { encoding: 'utf-8' });
132
+ if (updateOutput.indexOf('Lint exited with code 1') >= 0
133
+ || updateOutput.indexOf('Tests exited with code 1') >= 0) {
134
+ if (rls.keyInYN('Adapter failed tests or lint after updating. Would you like to revert the changes?')) {
135
+ console.log('Reverting changes...');
136
+ revertMod();
137
+ process.exit();
138
+ }
139
+ console.log('Adapter Update will continue. If you want to revert the changes, run the command npm run adapter:revert');
140
+ }
141
+ console.log(updateOutput);
142
+ console.log('Adapter Successfully Updated. Restart adapter in IAP to apply the changes');
143
+ archiveMod('UPD');
144
+ process.exit();
145
+ }
146
+
147
+ // Revert
148
+ if (flags === '-r') {
149
+ if (!fs.existsSync('previousVersion.zip')) {
150
+ console.log('Previous adapter version not found. There are no changes to revert');
151
+ process.exit();
152
+ }
153
+ revertMod();
154
+ }
@@ -0,0 +1,35 @@
1
+ #!/usr/bin/env node
2
+ /* @copyright Itential, LLC 2019 */
3
+
4
+ const fs = require('fs-extra');
5
+ const path = require('path');
6
+ const { spawnSync } = require('child_process');
7
+ const { createBundle } = require('./artifactize');
8
+
9
+ const nodeEntryPath = path.resolve('.');
10
+ createBundle(nodeEntryPath).then((pathObj) => {
11
+ const { bundlePath, bundledAdapterPath } = pathObj;
12
+ const npmIgnorePath = path.join(bundledAdapterPath, '.npmignore');
13
+ const adapterPackagePath = path.join(bundledAdapterPath, 'package.json');
14
+ const artifactPackagePath = path.join(bundlePath, 'package.json');
15
+
16
+ // remove node_modules from .npmIgnore so that node_modules are included in the resulting tar from npm pack
17
+ let npmIgnoreString;
18
+ if (fs.existsSync(npmIgnorePath)) {
19
+ npmIgnoreString = fs.readFileSync(npmIgnorePath, 'utf8');
20
+ npmIgnoreString = npmIgnoreString.replace('node_modules', '');
21
+ npmIgnoreString = npmIgnoreString.replace('\n\n', '\n');
22
+ fs.writeFileSync(npmIgnorePath, npmIgnoreString);
23
+ }
24
+
25
+ // add files to package so that node_modules are included in the resulting tar from npm pack
26
+ const adapterPackage = fs.readJSONSync(adapterPackagePath);
27
+ adapterPackage.files = ['*'];
28
+ fs.writeJSONSync(artifactPackagePath, adapterPackage, { spaces: 2 });
29
+ const npmResult = spawnSync('npm', ['pack', '-q', bundlePath], { cwd: path.resolve(bundlePath, '..') });
30
+ if (npmResult.status === 0) {
31
+ fs.removeSync(bundlePath);
32
+ console.log('Bundle folder removed');
33
+ }
34
+ console.log('Script successful');
35
+ });
@@ -0,0 +1,90 @@
1
+ const fs = require('fs');
2
+ const semverSatisfies = require('semver/functions/satisfies');
3
+ const packageJson = require('../package.json');
4
+
5
+ try {
6
+ // pattern supplied by semver.org via https://regex101.com/r/vkijKf/1/ but removed gm from end to only match a single semver
7
+ // const semverPat = /^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$/;
8
+ // pattern supplied by semver.org via https://regex101.com/r/Ly7O1x/3/ with following changes
9
+ // removed P's from before capturing group names and
10
+ // removed gm from end to only match a single semver
11
+ // const semverPat = /^(?<major>0|[1-9]\d*)\.(?<minor>0|[1-9]\d*)\.(?<patch>0|[1-9]\d*)(?:-(?<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$/;
12
+
13
+ const patches = (fs.existsSync('./patches')) ? fs.readdirSync('./patches', { withFileTypes: true }) : [];
14
+ if (!patches.length) {
15
+ console.error('\nno patches - nothing to do\n');
16
+ process.exitCode = 1;
17
+ }
18
+
19
+ const dependencies = packageJson.dependencies || {};
20
+ if (!Object.keys(dependencies).length) {
21
+ console.error('\nno dependencies - nothing to do\n');
22
+ process.exitCode = 1;
23
+ }
24
+
25
+ let changed = false;
26
+ console.error('\nprocessing patches');
27
+ const bundledDependencies = packageJson.bundledDependencies || packageJson.bundleDependencies || [];
28
+
29
+ patches.forEach((patch) => {
30
+ if (!patch.isFile()) {
31
+ console.error(`${patch.name} skipped, is not a regular file`);
32
+ return;
33
+ }
34
+ if (!patch.name.endsWith('.patch')) {
35
+ console.error(`${patch.name} skipped, does not end with .patch`);
36
+ return;
37
+ }
38
+ const splits = patch.name.slice(0, -6).split('+');
39
+ if (splits.length > 4) {
40
+ console.error(`${patch.name} skipped, does not follow the naming convention (cannot use '+' other than to separate scope/package/semver and at most once within semver)`);
41
+ return;
42
+ }
43
+ const scope = splits[0][0] === '@' ? splits.shift() : null;
44
+ const packageName = splits.shift();
45
+ const semver = splits.join('+');
46
+ // const { groups } = semver.match(semverPat);
47
+ const file = scope ? `${scope}/${packageName}` : packageName;
48
+ if (dependencies[file] && semverSatisfies(semver, dependencies[file])) {
49
+ if (!bundledDependencies.includes(file)) {
50
+ bundledDependencies.push(file);
51
+ console.error(`added ${file} to bundledDependencies`);
52
+ changed = true;
53
+ } else {
54
+ console.error(`bundledDependencies already has ${file}`);
55
+ }
56
+ } else {
57
+ const depmsg = dependencies[file] ? `version mismatch (${dependencies[file]}) in dependencies` : 'not found in dependencies';
58
+ console.error(`patch ${patch.name} ${depmsg}`);
59
+ }
60
+ });
61
+
62
+ if (!packageJson.bundledDependencies && bundledDependencies.length) {
63
+ delete packageJson.bundleDependencies;
64
+ packageJson.bundledDependencies = bundledDependencies;
65
+ console.error('renaming bundleDependencies to bundledDependencies');
66
+ changed = true;
67
+ }
68
+ if (changed) {
69
+ fs.writeFileSync('./package.json.new', JSON.stringify(packageJson, null, 2));
70
+ console.error('wrote package.json.new');
71
+ fs.renameSync('./package.json', './package.json.old');
72
+ console.error('moved package.json to package.json.old');
73
+ fs.renameSync('./package.json.new', './package.json');
74
+ console.error('moved package.json.new to package.json');
75
+ } else {
76
+ console.error('no changes\n');
77
+ process.exitCode = 1;
78
+ }
79
+ } catch (e) {
80
+ if (e) {
81
+ // caught error, exit with status 2 to signify abject failure
82
+ console.error(`\ncaught exception - ${e}\n`);
83
+ process.exitCode = 2;
84
+ } else {
85
+ // caught false, exit with status 1 to signify nothing done
86
+ process.exitCode = 1;
87
+ }
88
+ } finally {
89
+ console.error('done\n');
90
+ }
@@ -0,0 +1,30 @@
1
+ #!/bin/sh
2
+ # @copyright Itential, LLC 2019
3
+
4
+ #exit on any failure in the pipeline
5
+ set -e
6
+
7
+ # --------------------------------------------------
8
+ # pre-commit
9
+ # --------------------------------------------------
10
+ # Contains the standard set of tasks to runbefore
11
+ # committing changes to the repo. If any tasks fail
12
+ # then the commit will be aborted.
13
+ # --------------------------------------------------
14
+
15
+ printf "%b" "Running pre-commit hooks...\\n"
16
+
17
+ # verify testing script is stubbed and no credentials
18
+ node utils/testRunner.js -r
19
+
20
+ # update the adapter information file
21
+ node utils/adapterInfo.js
22
+
23
+ # security audit on the code
24
+ npm audit --registry=https://registry.npmjs.org --audit-level=moderate
25
+
26
+ # lint the code
27
+ npm run lint
28
+
29
+ # test the code
30
+ npm run test
@@ -0,0 +1,20 @@
1
+ const fs = require('fs');
2
+
3
+ /**
4
+ * This script will uninstall pre-commit or pre-push hooks in case there's ever a need to
5
+ * commit/push something that has issues
6
+ */
7
+
8
+ const precommitPath = '.git/hooks/pre-commit';
9
+ const prepushPath = '.git/hooks/pre-push';
10
+ fs.unlink(precommitPath, (err) => {
11
+ if (err && err.code !== 'ENOENT') {
12
+ console.log(`${err.message}`);
13
+ }
14
+ });
15
+
16
+ fs.unlink(prepushPath, (err) => {
17
+ if (err && err.code !== 'ENOENT') {
18
+ console.log(`${err.message}`);
19
+ }
20
+ });
package/utils/setup.js ADDED
@@ -0,0 +1,33 @@
1
+ #!/usr/bin/env node
2
+ /* @copyright Itential, LLC 2019 */
3
+
4
+ const fs = require('fs');
5
+
6
+ /**
7
+ * This script will execute before an npm install command. The purpose is to
8
+ * write out some standard git hooks that will enable folks working on this
9
+ * project to benefit from the protections that the hooks provide.
10
+ */
11
+
12
+ const precommit = fs.readFileSync('utils/pre-commit.sh', 'utf8');
13
+
14
+ fs.stat('.git', (err) => {
15
+ if (err == null) {
16
+ // git repo, not an npm repo.
17
+ // add pre-commit hook if it doesn't exist
18
+ fs.stat('.git/hooks/pre-commit', (statErr) => {
19
+ if (statErr == null || statErr.code === 'ENOENT') {
20
+ fs.writeFile('.git/hooks/pre-commit', precommit, {
21
+ mode: 0o755
22
+ }, (writeErr) => {
23
+ if (writeErr) {
24
+ return console.log(writeErr.message);
25
+ }
26
+ return null;
27
+ });
28
+ } else {
29
+ console.log(statErr.message);
30
+ }
31
+ });
32
+ }
33
+ });