@itentialopensource/adapter-netbox_v210 0.1.1 → 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -356,6 +356,7 @@ describe('[unit] Netbox_v210 Adapter Test', () => {
356
356
  assert.notEqual(undefined, packageDotJson.scripts);
357
357
  assert.notEqual(null, packageDotJson.scripts);
358
358
  assert.notEqual('', packageDotJson.scripts);
359
+ assert.equal('node utils/setup.js && npm install --package-lock-only --ignore-scripts && npx npm-force-resolutions', packageDotJson.scripts.preinstall);
359
360
  assert.equal('node --max_old_space_size=4096 ./node_modules/eslint/bin/eslint.js . --ext .json --ext .js', packageDotJson.scripts.lint);
360
361
  assert.equal('node --max_old_space_size=4096 ./node_modules/eslint/bin/eslint.js . --ext .json --ext .js --quiet', packageDotJson.scripts['lint:errors']);
361
362
  assert.equal('mocha test/unit/adapterBaseTestUnit.js --LOG=error', packageDotJson.scripts['test:baseunit']);
@@ -363,6 +364,8 @@ describe('[unit] Netbox_v210 Adapter Test', () => {
363
364
  assert.equal('mocha test/integration/adapterTestIntegration.js --LOG=error', packageDotJson.scripts['test:integration']);
364
365
  assert.equal('nyc --reporter html --reporter text mocha --reporter dot test/*', packageDotJson.scripts['test:cover']);
365
366
  assert.equal('npm run test:baseunit && npm run test:unit && npm run test:integration', packageDotJson.scripts.test);
367
+ assert.equal('npm publish --registry=https://registry.npmjs.org --access=public', packageDotJson.scripts.deploy);
368
+ assert.equal('npm run deploy', packageDotJson.scripts.build);
366
369
  done();
367
370
  } catch (error) {
368
371
  log.error(`Test Failure: ${error}`);
@@ -375,6 +378,9 @@ describe('[unit] Netbox_v210 Adapter Test', () => {
375
378
  assert.notEqual(undefined, packageDotJson.repository);
376
379
  assert.notEqual(null, packageDotJson.repository);
377
380
  assert.notEqual('', packageDotJson.repository);
381
+ assert.equal('git', packageDotJson.repository.type);
382
+ assert.equal('git@gitlab.com:itentialopensource/adapters/', packageDotJson.repository.url.substring(0, 43));
383
+ assert.equal('https://gitlab.com/itentialopensource/adapters/', packageDotJson.homepage.substring(0, 47));
378
384
  done();
379
385
  } catch (error) {
380
386
  log.error(`Test Failure: ${error}`);
@@ -1126,6 +1132,18 @@ describe('[unit] Netbox_v210 Adapter Test', () => {
1126
1132
  });
1127
1133
  });
1128
1134
 
1135
+ describe('#moveEntitiesToDB', () => {
1136
+ it('should have a moveEntitiesToDB function', (done) => {
1137
+ try {
1138
+ assert.equal(true, typeof a.moveEntitiesToDB === 'function');
1139
+ done();
1140
+ } catch (error) {
1141
+ log.error(`Test Failure: ${error}`);
1142
+ done(error);
1143
+ }
1144
+ });
1145
+ });
1146
+
1129
1147
  describe('#checkActionFiles', () => {
1130
1148
  it('should have a checkActionFiles function', (done) => {
1131
1149
  try {
@@ -1255,6 +1273,90 @@ describe('[unit] Netbox_v210 Adapter Test', () => {
1255
1273
  // }).timeout(attemptTimeout);
1256
1274
  // });
1257
1275
 
1276
+ describe('#hasEntities', () => {
1277
+ it('should have a hasEntities function', (done) => {
1278
+ try {
1279
+ assert.equal(true, typeof a.hasEntities === 'function');
1280
+ done();
1281
+ } catch (error) {
1282
+ log.error(`Test Failure: ${error}`);
1283
+ done(error);
1284
+ }
1285
+ });
1286
+ });
1287
+
1288
+ describe('#hasDevices', () => {
1289
+ it('should have a hasDevices function', (done) => {
1290
+ try {
1291
+ assert.equal(true, typeof a.hasDevices === 'function');
1292
+ done();
1293
+ } catch (error) {
1294
+ log.error(`Test Failure: ${error}`);
1295
+ done(error);
1296
+ }
1297
+ });
1298
+ });
1299
+
1300
+ describe('#getDevice', () => {
1301
+ it('should have a getDevice function', (done) => {
1302
+ try {
1303
+ assert.equal(true, typeof a.getDevice === 'function');
1304
+ done();
1305
+ } catch (error) {
1306
+ log.error(`Test Failure: ${error}`);
1307
+ done(error);
1308
+ }
1309
+ });
1310
+ });
1311
+
1312
+ describe('#getDevicesFiltered', () => {
1313
+ it('should have a getDevicesFiltered function', (done) => {
1314
+ try {
1315
+ assert.equal(true, typeof a.getDevicesFiltered === 'function');
1316
+ done();
1317
+ } catch (error) {
1318
+ log.error(`Test Failure: ${error}`);
1319
+ done(error);
1320
+ }
1321
+ });
1322
+ });
1323
+
1324
+ describe('#isAlive', () => {
1325
+ it('should have a isAlive function', (done) => {
1326
+ try {
1327
+ assert.equal(true, typeof a.isAlive === 'function');
1328
+ done();
1329
+ } catch (error) {
1330
+ log.error(`Test Failure: ${error}`);
1331
+ done(error);
1332
+ }
1333
+ });
1334
+ });
1335
+
1336
+ describe('#getConfig', () => {
1337
+ it('should have a getConfig function', (done) => {
1338
+ try {
1339
+ assert.equal(true, typeof a.getConfig === 'function');
1340
+ done();
1341
+ } catch (error) {
1342
+ log.error(`Test Failure: ${error}`);
1343
+ done(error);
1344
+ }
1345
+ });
1346
+ });
1347
+
1348
+ describe('#getCount', () => {
1349
+ it('should have a getCount function', (done) => {
1350
+ try {
1351
+ assert.equal(true, typeof a.getCount === 'function');
1352
+ done();
1353
+ } catch (error) {
1354
+ log.error(`Test Failure: ${error}`);
1355
+ done(error);
1356
+ }
1357
+ });
1358
+ });
1359
+
1258
1360
  /*
1259
1361
  -----------------------------------------------------------------------
1260
1362
  -----------------------------------------------------------------------
@@ -15935,6 +16037,35 @@ describe('[unit] Netbox_v210 Adapter Test', () => {
15935
16037
  }).timeout(attemptTimeout);
15936
16038
  });
15937
16039
 
16040
+ describe('#getGraphql - errors', () => {
16041
+ it('should have a getGraphql function', (done) => {
16042
+ try {
16043
+ assert.equal(true, typeof a.getGraphql === 'function');
16044
+ done();
16045
+ } catch (error) {
16046
+ log.error(`Test Failure: ${error}`);
16047
+ done(error);
16048
+ }
16049
+ }).timeout(attemptTimeout);
16050
+ it('should error if - missing body', (done) => {
16051
+ try {
16052
+ a.getGraphql(null, (data, error) => {
16053
+ try {
16054
+ const displayE = 'body is required';
16055
+ runErrorAsserts(data, error, 'AD.300', 'Test-netbox_v210-adapter-getGraphql', displayE);
16056
+ done();
16057
+ } catch (err) {
16058
+ log.error(`Test Failure: ${err}`);
16059
+ done(err);
16060
+ }
16061
+ });
16062
+ } catch (error) {
16063
+ log.error(`Adapter Exception: ${error}`);
16064
+ done(error);
16065
+ }
16066
+ }).timeout(attemptTimeout);
16067
+ });
16068
+
15938
16069
  describe('#getTenancyTenantGroups - errors', () => {
15939
16070
  it('should have a getTenancyTenantGroups function', (done) => {
15940
16071
  try {
@@ -0,0 +1,94 @@
1
+ /* eslint-disable no-plusplus */
2
+ /* eslint global-require: warn */
3
+ /* eslint import/no-dynamic-require: warn */
4
+
5
+ const rls = require('readline-sync');
6
+ const path = require('path');
7
+ const fs = require('fs');
8
+
9
+ function getQuestions(props, obj) {
10
+ const questions = props.map((p) => `${p}: ${(obj[p] !== undefined) ? `(${obj[p]})` : ''} `);
11
+ return questions;
12
+ }
13
+
14
+ // function outputs each property for user to edit/confirm
15
+ // props are the fields that need to be changed depending on what the user selects
16
+ // obj is the JSON object that's being updated
17
+ function confirm(props, obj) {
18
+ // create array of questions
19
+ const updatedObj = obj;
20
+ getQuestions(props, obj).forEach((q) => {
21
+ const answer = rls.question(q);
22
+ // only update the field if the answer is NOT and empty string
23
+ if (answer) {
24
+ updatedObj[q.split(':')[0].trim()] = answer;
25
+ }
26
+ });
27
+ return updatedObj;
28
+ }
29
+
30
+ const updateBasicAuth = (auth) => {
31
+ const propsToUpdate = ['username', 'password', 'auth_field', 'auth_field_format'];
32
+ return confirm(propsToUpdate, auth);
33
+ };
34
+
35
+ const updateStaticTokenAuth = (auth) => {
36
+ const propsToUpdate = ['token', 'auth_field', 'auth_field_format'];
37
+ return confirm(propsToUpdate, auth);
38
+ };
39
+
40
+ function updateTokenSchemas(user, pw, token) {
41
+ let schemaPath = path.join(__dirname, '..', 'entities/.system/schemaTokenReq.json');
42
+ const reqSchema = require(schemaPath);
43
+ reqSchema.properties.username.external_name = user;
44
+ reqSchema.properties.password.external_name = pw;
45
+ fs.writeFileSync(schemaPath, JSON.stringify(reqSchema, null, 2));
46
+ schemaPath = path.join(__dirname, '..', 'entities/.system/schemaTokenResp.json');
47
+ const respSchema = require(schemaPath);
48
+ respSchema.properties.token.external_name = token;
49
+ fs.writeFileSync(schemaPath, JSON.stringify(respSchema, null, 2));
50
+ }
51
+
52
+ function updateRequestToken(auth) {
53
+ const propsToUpdate = [
54
+ 'username',
55
+ 'password',
56
+ 'auth_field',
57
+ 'auth_field_format',
58
+ 'token_user_field',
59
+ 'token_password_field',
60
+ 'token_result_field',
61
+ 'token_URI_path'
62
+ ];
63
+ const newAuth = confirm(propsToUpdate, auth);
64
+ updateTokenSchemas(newAuth.token_user_field, newAuth.token_password_field, newAuth.token_result_field);
65
+
66
+ return newAuth;
67
+ }
68
+
69
+ // prompt users to pick an auth method from the list above
70
+ const addAuthInfo = (props) => {
71
+ const authOptions = [
72
+ 'basic user_password',
73
+ 'static_token',
74
+ 'request_token',
75
+ 'no_authentication'
76
+ ];
77
+ const newProps = confirm(['host', 'port', 'base_path'], props);
78
+
79
+ const newAuthMethod = authOptions[rls.keyInSelect(authOptions, 'Which authentication method?')];
80
+ newProps.authentication.auth_method = newAuthMethod;
81
+
82
+ if (newAuthMethod === 'basic user_password') {
83
+ newProps.authentication = updateBasicAuth(newProps.authentication);
84
+ } else if (newAuthMethod === 'static_token') {
85
+ newProps.authentication = updateStaticTokenAuth(newProps.authentication);
86
+ } else if (newAuthMethod === 'request_token') {
87
+ newProps.authentication = updateRequestToken(newProps.authentication);
88
+ }
89
+ console.log('Connectivity and authentication properties have been configured');
90
+ console.log('If you want to make changes, rerun this script to reinstall the adapter');
91
+ return newProps;
92
+ };
93
+
94
+ module.exports = { addAuthInfo };
File without changes
package/utils/basicGet.js CHANGED
@@ -4,6 +4,7 @@
4
4
  /* eslint import/no-extraneous-dependencies: warn */
5
5
  /* eslint global-require: warn */
6
6
  /* eslint import/no-unresolved: warn */
7
+ /* eslint import/no-dynamic-require: warn */
7
8
 
8
9
  const winston = require('winston');
9
10
 
@@ -43,20 +44,6 @@ const basicGet = {
43
44
  adapter.id,
44
45
  adapterProps
45
46
  );
46
- },
47
-
48
- /**
49
- * @summary connect to mongodb
50
- *
51
- * @function connect
52
- * @param {Object} properties - pronghornProps
53
- */
54
- connect: async function connect(properties) {
55
- // Connect to Mongo
56
- const { MongoDBConnection } = require('@itential/database');
57
- const connection = new MongoDBConnection(properties.mongoProps);
58
- const database = await connection.connect(true);
59
- return database;
60
47
  }
61
48
  };
62
49
 
@@ -0,0 +1,224 @@
1
+ /* @copyright Itential, LLC 2021 */
2
+
3
+ // Set globals
4
+ /* global log */
5
+
6
+ /* eslint import/no-dynamic-require: warn */
7
+ /* eslint global-require: warn */
8
+ /* eslint no-unused-vars: warn */
9
+ /* eslint import/no-unresolved: warn */
10
+
11
+ /**
12
+ * This script is used to read through an adapter's entities files
13
+ * and then creates documents and enters them into the IAP mongodb
14
+ */
15
+
16
+ const fs = require('fs');
17
+ const { MongoClient } = require('mongodb');
18
+ const path = require('path');
19
+ // const { argv } = require('process');
20
+ // const { string } = require('yargs');
21
+
22
+ // get the pronghorn database information
23
+ const getPronghornProps = async (iapDir) => {
24
+ log.trace('Retrieving properties.json file...');
25
+ const rawProps = require(path.join(iapDir, 'properties.json'));
26
+ log.trace('Decrypting properties...');
27
+ const { PropertyEncryption } = require('@itential/itential-utils');
28
+ const propertyEncryption = new PropertyEncryption();
29
+ const pronghornProps = await propertyEncryption.decryptProps(rawProps);
30
+ log.trace('Found properties.\n');
31
+ return pronghornProps;
32
+ };
33
+
34
+ /**
35
+ * Function used to take a file path to a entity directory and build
36
+ * a document that corresponds to the entity files.
37
+ */
38
+ const buildDoc = (pathstring) => {
39
+ let files = fs.readdirSync(pathstring);
40
+
41
+ // load the mockdatafiles
42
+ const mockdatafiles = {};
43
+ if (files.includes('mockdatafiles') && fs.lstatSync(`${pathstring}/mockdatafiles`).isDirectory()) {
44
+ fs.readdirSync(`${pathstring}/mockdatafiles`).forEach((file) => {
45
+ if (file.split('.').pop() === 'json') {
46
+ const mockpath = `${pathstring}/mockdatafiles/${file}`;
47
+ const data = JSON.parse(fs.readFileSync(mockpath));
48
+ mockdatafiles[mockpath.split('/').pop()] = data;
49
+ }
50
+ });
51
+ }
52
+
53
+ // load the action data
54
+ let actions;
55
+ if (files.includes('action.json')) {
56
+ actions = JSON.parse(fs.readFileSync(`${pathstring}/action.json`));
57
+ }
58
+
59
+ // Load schema.json and other schemas in remaining json files
60
+ files = files.filter((f) => (f !== 'action.json') && f.endsWith('.json'));
61
+ const schema = [];
62
+ files.forEach((file) => {
63
+ const data = JSON.parse(fs.readFileSync(`${pathstring}/${file}`));
64
+ schema.push({
65
+ name: file,
66
+ schema: data
67
+ });
68
+ });
69
+
70
+ // return the data
71
+ return {
72
+ actions: actions.actions,
73
+ schema,
74
+ mockdatafiles
75
+ };
76
+ };
77
+
78
+ /**
79
+ * Function used to get the database from the options or a provided directory
80
+ */
81
+ const optionsHandler = (options) => {
82
+ // if the database properties were provided in the options - return them
83
+ if (options.pronghornProps) {
84
+ if (typeof options.pronghornProps === 'string') {
85
+ return JSON.parse(options.pronghornProps);
86
+ }
87
+ return new Promise((resolve, reject) => resolve(options.pronghornProps));
88
+ }
89
+
90
+ // if the directory was provided, get the pronghorn props from the directory
91
+ if (options.iapDir) {
92
+ return getPronghornProps(options.iapDir);
93
+ }
94
+
95
+ // if nothing was provided, error
96
+ return new Promise((resolve, reject) => reject(new Error('Neither pronghornProps nor iapDir defined in options!')));
97
+ };
98
+
99
+ /**
100
+ * Function used to put the adapter configuration into the provided database
101
+ */
102
+ const moveEntitiesToDB = (targetPath, options) => {
103
+ // set local variables
104
+ let myOpts = options;
105
+ let myPath = targetPath;
106
+
107
+ // if we got a string parse into a JSON object
108
+ if (typeof myOpts === 'string') {
109
+ myOpts = JSON.parse(myOpts);
110
+ }
111
+
112
+ // if there is no target collection - set the collection to the default
113
+ if (!myOpts.targetCollection) {
114
+ myOpts.targetCollection = 'adapter_configs';
115
+ }
116
+
117
+ // if there is no id error since we need an id for the entities
118
+ if (!myOpts.id) {
119
+ throw new Error('Adapter ID required!');
120
+ }
121
+
122
+ // get the pronghorn database properties
123
+ optionsHandler(options).then((currentProps) => {
124
+ let mongoUrl;
125
+ let dbName;
126
+
127
+ // find the mongo properties so we can connect
128
+ if (currentProps.mongoProps) {
129
+ mongoUrl = currentProps.mongoProps.url;
130
+ dbName = currentProps.mongoProps.db;
131
+ } else if (currentProps.mongo) {
132
+ if (currentProps.mongo.url) {
133
+ mongoUrl = currentProps.mongo.url;
134
+ } else {
135
+ mongoUrl = `mongodb://${currentProps.mongo.host}:${currentProps.mongo.port}`;
136
+ }
137
+ dbName = currentProps.mongo.database;
138
+ } else {
139
+ throw new Error('Mongo properties are not specified in adapter preferences!');
140
+ }
141
+
142
+ // Check valid filepath provided
143
+ if (!myPath) {
144
+ // if no path use the current directory without the utils
145
+ myPath = path.join(__dirname, '../');
146
+ } else if (myPath.slice(-1) === '/') {
147
+ myPath = myPath.slice(0, -1);
148
+ }
149
+
150
+ // verify set the entity path
151
+ const entitiesPath = `${myPath}/entities`;
152
+ if (!fs.existsSync(entitiesPath)) {
153
+ throw new Error(`Entities path does not exist in filesystem: ${entitiesPath}`);
154
+ } else {
155
+ log.trace('Target found on filesystem');
156
+ }
157
+
158
+ // Get adapter details
159
+ if (!fs.existsSync(`${myPath}/pronghorn.json`)) {
160
+ throw new Error(`pronghorn.json does not exist in path: ${myPath}`);
161
+ } else {
162
+ log.trace('pronghorn.json found on filesystem');
163
+ }
164
+ const adapterData = JSON.parse(fs.readFileSync(`${myPath}/pronghorn.json`));
165
+
166
+ // Load files from the filesystem
167
+ const docs = [];
168
+ const entities = fs.readdirSync(entitiesPath);
169
+ entities.forEach((entity) => {
170
+ const entityPath = `${entitiesPath}/${entity}`;
171
+ const isDir = fs.lstatSync(entitiesPath).isDirectory();
172
+
173
+ // Build doc for entity
174
+ if (isDir) {
175
+ let doc = buildDoc(entityPath);
176
+ doc = {
177
+ id: myOpts.id,
178
+ type: adapterData.id,
179
+ entity,
180
+ ...doc
181
+ };
182
+ docs.push(doc);
183
+ }
184
+ });
185
+
186
+ // Upload documents to db collection
187
+ MongoClient.connect(mongoUrl, (err, db) => {
188
+ if (err) {
189
+ log.error(JSON.stringify(err));
190
+ throw err;
191
+ }
192
+
193
+ // get the proper collection
194
+ const collection = db.db(dbName).collection(myOpts.targetCollection);
195
+ // insert the documents into the collection
196
+ collection.insertMany(docs, { checkKeys: false }, (error, res) => {
197
+ if (error) {
198
+ log.error(JSON.stringify(error));
199
+ throw error;
200
+ }
201
+ // log the insertion, close the database and return
202
+ log.debug(`Inserted ${docs.length} documents to ${dbName}.${myOpts.targetCollection} with response ${JSON.stringify(res)}`);
203
+ db.close();
204
+ return res;
205
+ });
206
+ });
207
+ });
208
+ };
209
+
210
+ // const args = process.argv.slice(2);
211
+
212
+ // throw new SyntaxError(args[0]);
213
+
214
+ // if (args.length === 0) {
215
+ // console.error('ERROR: target path not specified!');
216
+ // } else if (args[0] === 'help') {
217
+ // log.trace('node ./entitiesToDB <target path> <options object: {iapDir: string, pronghornProps: string, targetCollection: string}>');
218
+ // } else if (args.length === 1) {
219
+ // console.error('ERROR: IAP directory not specified');
220
+ // } else {
221
+ // moveEntitiesToDB(args[0], args[1]);
222
+ // }
223
+
224
+ module.exports = { moveEntitiesToDB };
package/utils/modify.js CHANGED
@@ -3,7 +3,7 @@ const Ajv = require('ajv');
3
3
  const rls = require('readline-sync');
4
4
  const { execSync } = require('child_process');
5
5
  const { existsSync } = require('fs-extra');
6
- const { getAdapterConfig } = require('./troubleshootingAdapter');
6
+ const { getAdapterConfig } = require('./tbUtils');
7
7
  const { name } = require('../package.json');
8
8
  const propertiesSchema = require('../propertiesSchema.json');
9
9
 
@@ -0,0 +1,90 @@
1
+ const fs = require('fs');
2
+ const semverSatisfies = require('semver/functions/satisfies');
3
+ const packageJson = require('../package.json');
4
+
5
+ try {
6
+ // pattern supplied by semver.org via https://regex101.com/r/vkijKf/1/ but removed gm from end to only match a single semver
7
+ // const semverPat = /^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$/;
8
+ // pattern supplied by semver.org via https://regex101.com/r/Ly7O1x/3/ with following changes
9
+ // removed P's from before capturing group names and
10
+ // removed gm from end to only match a single semver
11
+ // const semverPat = /^(?<major>0|[1-9]\d*)\.(?<minor>0|[1-9]\d*)\.(?<patch>0|[1-9]\d*)(?:-(?<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$/;
12
+
13
+ const patches = (fs.existsSync('./patches')) ? fs.readdirSync('./patches', { withFileTypes: true }) : [];
14
+ if (!patches.length) {
15
+ console.error('\nno patches - nothing to do\n');
16
+ process.exitCode = 1;
17
+ }
18
+
19
+ const dependencies = packageJson.dependencies || {};
20
+ if (!Object.keys(dependencies).length) {
21
+ console.error('\nno dependencies - nothing to do\n');
22
+ process.exitCode = 1;
23
+ }
24
+
25
+ let changed = false;
26
+ console.error('\nprocessing patches');
27
+ const bundledDependencies = packageJson.bundledDependencies || packageJson.bundleDependencies || [];
28
+
29
+ patches.forEach((patch) => {
30
+ if (!patch.isFile()) {
31
+ console.error(`${patch.name} skipped, is not a regular file`);
32
+ return;
33
+ }
34
+ if (!patch.name.endsWith('.patch')) {
35
+ console.error(`${patch.name} skipped, does not end with .patch`);
36
+ return;
37
+ }
38
+ const splits = patch.name.slice(0, -6).split('+');
39
+ if (splits.length > 4) {
40
+ console.error(`${patch.name} skipped, does not follow the naming convention (cannot use '+' other than to separate scope/package/semver and at most once within semver)`);
41
+ return;
42
+ }
43
+ const scope = splits[0][0] === '@' ? splits.shift() : null;
44
+ const packageName = splits.shift();
45
+ const semver = splits.join('+');
46
+ // const { groups } = semver.match(semverPat);
47
+ const file = scope ? `${scope}/${packageName}` : packageName;
48
+ if (dependencies[file] && semverSatisfies(semver, dependencies[file])) {
49
+ if (!bundledDependencies.includes(file)) {
50
+ bundledDependencies.push(file);
51
+ console.error(`added ${file} to bundledDependencies`);
52
+ changed = true;
53
+ } else {
54
+ console.error(`bundledDependencies already has ${file}`);
55
+ }
56
+ } else {
57
+ const depmsg = dependencies[file] ? `version mismatch (${dependencies[file]}) in dependencies` : 'not found in dependencies';
58
+ console.error(`patch ${patch.name} ${depmsg}`);
59
+ }
60
+ });
61
+
62
+ if (!packageJson.bundledDependencies && bundledDependencies.length) {
63
+ delete packageJson.bundleDependencies;
64
+ packageJson.bundledDependencies = bundledDependencies;
65
+ console.error('renaming bundleDependencies to bundledDependencies');
66
+ changed = true;
67
+ }
68
+ if (changed) {
69
+ fs.writeFileSync('./package.json.new', JSON.stringify(packageJson, null, 2));
70
+ console.error('wrote package.json.new');
71
+ fs.renameSync('./package.json', './package.json.old');
72
+ console.error('moved package.json to package.json.old');
73
+ fs.renameSync('./package.json.new', './package.json');
74
+ console.error('moved package.json.new to package.json');
75
+ } else {
76
+ console.error('no changes\n');
77
+ process.exitCode = 1;
78
+ }
79
+ } catch (e) {
80
+ if (e) {
81
+ // caught error, exit with status 2 to signify abject failure
82
+ console.error(`\ncaught exception - ${e}\n`);
83
+ process.exitCode = 2;
84
+ } else {
85
+ // caught false, exit with status 1 to signify nothing done
86
+ process.exitCode = 1;
87
+ }
88
+ } finally {
89
+ console.error('done\n');
90
+ }
@@ -0,0 +1,20 @@
1
+ const fs = require('fs');
2
+
3
+ /**
4
+ * This script will uninstall pre-commit or pre-push hooks in case there's ever a need to
5
+ * commit/push something that has issues
6
+ */
7
+
8
+ const precommitPath = '.git/hooks/pre-commit';
9
+ const prepushPath = '.git/hooks/pre-push';
10
+ fs.unlink(precommitPath, (err) => {
11
+ if (err && err.code !== 'ENOENT') {
12
+ console.log(`${err.message}`);
13
+ }
14
+ });
15
+
16
+ fs.unlink(prepushPath, (err) => {
17
+ if (err && err.code !== 'ENOENT') {
18
+ console.log(`${err.message}`);
19
+ }
20
+ });