@itentialopensource/adapter-microsoft_graph 1.4.7 → 1.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.eslintrc.js +1 -0
- package/AUTH.md +4 -4
- package/BROKER.md +4 -4
- package/CALLS.md +9 -9
- package/ENHANCE.md +3 -3
- package/PROPERTIES.md +24 -9
- package/README.md +24 -23
- package/SUMMARY.md +2 -2
- package/SYSTEMINFO.md +1 -1
- package/TAB1.md +2 -2
- package/TAB2.md +17 -11
- package/TROUBLESHOOT.md +10 -1
- package/UTILITIES.md +473 -0
- package/adapter.js +5 -5
- package/adapterBase.js +52 -16
- package/package.json +24 -28
- package/pronghorn.json +15 -13
- package/propertiesSchema.json +68 -7
- package/report/adapterInfo.json +7 -7
- package/report/auto-adapter-openapi.json +8063 -0
- package/report/updateReport1748551976792.json +120 -0
- package/sampleProperties.json +4 -0
- package/test/integration/adapterTestBasicGet.js +88 -54
- package/test/integration/adapterTestConnectivity.js +15 -16
- package/test/integration/adapterTestIntegration.js +1 -38
- package/test/unit/adapterBaseTestUnit.js +641 -39
- package/test/unit/adapterTestUnit.js +17 -54
- package/utils/adapterInfo.js +114 -164
- package/utils/argParser.js +44 -0
- package/utils/checkMigrate.js +77 -38
- package/utils/entitiesToDB.js +53 -42
- package/utils/logger.js +26 -0
- package/utils/modify.js +56 -55
- package/utils/mongoDbConnection.js +79 -0
- package/utils/mongoUtils.js +162 -0
- package/utils/taskMover.js +31 -32
- package/utils/tbScript.js +36 -172
- package/utils/tbUtils.js +84 -226
- package/utils/troubleshootingAdapter.js +68 -84
- package/utils/updateAdapterConfig.js +158 -0
- package/utils/addAuth.js +0 -94
- package/utils/artifactize.js +0 -146
- package/utils/basicGet.js +0 -50
- package/utils/packModificationScript.js +0 -35
- package/utils/patches2bundledDeps.js +0 -90
@@ -0,0 +1,158 @@
|
|
1
|
+
/* @copyright Itential, LLC 2025 */
|
2
|
+
|
3
|
+
// Set globals
|
4
|
+
/* global log */
|
5
|
+
|
6
|
+
const fs = require('fs');
|
7
|
+
const path = require('path');
|
8
|
+
const PropUtilCl = require('@itentialopensource/adapter-utils').PropertyUtility;
|
9
|
+
const MongoDBConnection = require('./mongoDbConnection');
|
10
|
+
|
11
|
+
const propUtil = new PropUtilCl();
|
12
|
+
|
13
|
+
/**
|
14
|
+
* Updates action configuration in the document
|
15
|
+
* @param {Object} doc - Document to update
|
16
|
+
* @param {string} action - Action name
|
17
|
+
* @param {Object} changes - Changes to apply
|
18
|
+
* @returns {Object} Updated document
|
19
|
+
*/
|
20
|
+
function updateActionConfig(doc, action, changes) {
|
21
|
+
const updateDoc = { ...doc };
|
22
|
+
if (!updateDoc.actions) updateDoc.actions = [];
|
23
|
+
const actionIndex = updateDoc.actions.findIndex((a) => a.name === action);
|
24
|
+
if (actionIndex >= 0) {
|
25
|
+
updateDoc.actions[actionIndex] = propUtil.mergeProperties(changes, updateDoc.actions[actionIndex]);
|
26
|
+
} else {
|
27
|
+
updateDoc.actions.push({ name: action, ...changes });
|
28
|
+
}
|
29
|
+
return updateDoc;
|
30
|
+
}
|
31
|
+
|
32
|
+
/**
|
33
|
+
* Updates schema configuration in the document
|
34
|
+
* @param {Object} doc - Document to update
|
35
|
+
* @param {string} configFile - Configuration file name
|
36
|
+
* @param {Object} changes - Changes to apply
|
37
|
+
* @returns {Object} Updated document
|
38
|
+
*/
|
39
|
+
function updateSchemaConfig(doc, configFile, changes) {
|
40
|
+
const updateDoc = { ...doc };
|
41
|
+
if (!updateDoc.schema) updateDoc.schema = [];
|
42
|
+
const schemaIndex = updateDoc.schema.findIndex((s) => s.name === configFile);
|
43
|
+
if (schemaIndex >= 0) {
|
44
|
+
updateDoc.schema[schemaIndex].schema = propUtil.mergeProperties(changes, updateDoc.schema[schemaIndex].schema);
|
45
|
+
} else {
|
46
|
+
updateDoc.schema.push({ name: configFile, schema: changes });
|
47
|
+
}
|
48
|
+
return updateDoc;
|
49
|
+
}
|
50
|
+
|
51
|
+
/**
|
52
|
+
* Updates mock data configuration in the document
|
53
|
+
* @param {Object} doc - Document to update
|
54
|
+
* @param {string} configFile - Configuration file name
|
55
|
+
* @param {Object} changes - Changes to apply
|
56
|
+
* @param {boolean} replace - Whether to replace or merge
|
57
|
+
* @returns {Object} Updated document
|
58
|
+
*/
|
59
|
+
function updateMockConfig(doc, configFile, changes, replace) {
|
60
|
+
const updateDoc = { ...doc };
|
61
|
+
if (!updateDoc.mockdatafiles) updateDoc.mockdatafiles = {};
|
62
|
+
updateDoc.mockdatafiles[configFile] = replace ? changes : propUtil.mergeProperties(changes, updateDoc.mockdatafiles[configFile] || {});
|
63
|
+
return updateDoc;
|
64
|
+
}
|
65
|
+
|
66
|
+
/**
|
67
|
+
* Configuration update strategies for different types
|
68
|
+
*/
|
69
|
+
const updateStrategies = {
|
70
|
+
action: (doc, configFile, changes, action) => (!action ? doc : updateActionConfig(doc, action, changes)),
|
71
|
+
schema: (doc, configFile, changes) => updateSchemaConfig(doc, configFile, changes),
|
72
|
+
mock: (doc, configFile, changes, replace) => updateMockConfig(doc, configFile, changes, replace)
|
73
|
+
};
|
74
|
+
|
75
|
+
/**
|
76
|
+
* Updates MongoDB configuration for an adapter entity
|
77
|
+
* @param {Object} options - Configuration options
|
78
|
+
* @param {string} options.id - Adapter ID
|
79
|
+
* @param {Object} options.mongoProps - MongoDB connection properties
|
80
|
+
* @param {string} options.entity - Entity name
|
81
|
+
* @param {string} options.type - Update type (action/schema/mock)
|
82
|
+
* @param {string} options.configFile - Configuration file name
|
83
|
+
* @param {Object} options.changes - Changes to apply
|
84
|
+
* @param {string} options.action - Action name (for action type updates)
|
85
|
+
* @param {boolean} options.replace - Whether to replace or merge (for mock type updates)
|
86
|
+
* @returns {Promise<void>}
|
87
|
+
*/
|
88
|
+
async function updateMongoDBConfig(options) {
|
89
|
+
const {
|
90
|
+
id,
|
91
|
+
mongoProps,
|
92
|
+
entity,
|
93
|
+
type,
|
94
|
+
configFile,
|
95
|
+
changes,
|
96
|
+
action,
|
97
|
+
replace
|
98
|
+
} = options;
|
99
|
+
|
100
|
+
if (!mongoProps) {
|
101
|
+
log.error('MongoDB properties not found');
|
102
|
+
return;
|
103
|
+
}
|
104
|
+
|
105
|
+
let mongoConnection = null;
|
106
|
+
try {
|
107
|
+
// Get adapter type from pronghorn.json
|
108
|
+
const pronghornPath = path.join(__dirname, '../pronghorn.json');
|
109
|
+
const pronghornData = JSON.parse(fs.readFileSync(pronghornPath, 'utf8'));
|
110
|
+
const adapterType = pronghornData.id;
|
111
|
+
|
112
|
+
mongoConnection = new MongoDBConnection(mongoProps);
|
113
|
+
await mongoConnection.connect();
|
114
|
+
|
115
|
+
const collection = mongoConnection.db.collection('adapter_configs');
|
116
|
+
const query = {
|
117
|
+
id,
|
118
|
+
type: adapterType,
|
119
|
+
entity
|
120
|
+
};
|
121
|
+
|
122
|
+
const existingConfig = await collection.findOne(query);
|
123
|
+
if (!existingConfig) {
|
124
|
+
log.debug(`No existing configuration found for entity ${entity}`);
|
125
|
+
return;
|
126
|
+
}
|
127
|
+
|
128
|
+
// Update the configuration based on type
|
129
|
+
const updateStrategy = updateStrategies[type];
|
130
|
+
if (!updateStrategy) {
|
131
|
+
log.error(`Unsupported update type: ${type}`);
|
132
|
+
return;
|
133
|
+
}
|
134
|
+
|
135
|
+
const updatedDoc = updateStrategy(existingConfig, configFile, changes, action || replace);
|
136
|
+
|
137
|
+
// Remove _id from updateDoc as it can't be modified
|
138
|
+
const { _id, ...updateDocWithoutId } = updatedDoc;
|
139
|
+
const updateResult = await collection.updateOne(
|
140
|
+
{ id, type: adapterType, entity },
|
141
|
+
{ $set: updateDocWithoutId }
|
142
|
+
);
|
143
|
+
|
144
|
+
if (updateResult.modifiedCount === 0) {
|
145
|
+
log.warn(`No documents were modified for entity ${entity}`);
|
146
|
+
}
|
147
|
+
log.info(`Successfully updated MongoDB configuration for entity ${entity}`);
|
148
|
+
} catch (error) {
|
149
|
+
log.error(`Error updating MongoDB configuration: ${error.message}`);
|
150
|
+
throw error;
|
151
|
+
} finally {
|
152
|
+
if (mongoConnection) {
|
153
|
+
await mongoConnection.closeConnection();
|
154
|
+
}
|
155
|
+
}
|
156
|
+
}
|
157
|
+
|
158
|
+
module.exports = { updateMongoDBConfig };
|
package/utils/addAuth.js
DELETED
@@ -1,94 +0,0 @@
|
|
1
|
-
/* eslint-disable no-plusplus */
|
2
|
-
/* eslint global-require: warn */
|
3
|
-
/* eslint import/no-dynamic-require: warn */
|
4
|
-
|
5
|
-
const path = require('path');
|
6
|
-
const fs = require('fs');
|
7
|
-
const rls = require('readline-sync');
|
8
|
-
|
9
|
-
function getQuestions(props, obj) {
|
10
|
-
const questions = props.map((p) => `${p}: ${(obj[p] !== undefined) ? `(${obj[p]})` : ''} `);
|
11
|
-
return questions;
|
12
|
-
}
|
13
|
-
|
14
|
-
// function outputs each property for user to edit/confirm
|
15
|
-
// props are the fields that need to be changed depending on what the user selects
|
16
|
-
// obj is the JSON object that's being updated
|
17
|
-
function confirm(props, obj) {
|
18
|
-
// create array of questions
|
19
|
-
const updatedObj = obj;
|
20
|
-
getQuestions(props, obj).forEach((q) => {
|
21
|
-
const answer = rls.question(q);
|
22
|
-
// only update the field if the answer is NOT and empty string
|
23
|
-
if (answer) {
|
24
|
-
updatedObj[q.split(':')[0].trim()] = answer;
|
25
|
-
}
|
26
|
-
});
|
27
|
-
return updatedObj;
|
28
|
-
}
|
29
|
-
|
30
|
-
const updateBasicAuth = (auth) => {
|
31
|
-
const propsToUpdate = ['username', 'password', 'auth_field', 'auth_field_format'];
|
32
|
-
return confirm(propsToUpdate, auth);
|
33
|
-
};
|
34
|
-
|
35
|
-
const updateStaticTokenAuth = (auth) => {
|
36
|
-
const propsToUpdate = ['token', 'auth_field', 'auth_field_format'];
|
37
|
-
return confirm(propsToUpdate, auth);
|
38
|
-
};
|
39
|
-
|
40
|
-
function updateTokenSchemas(user, pw, token) {
|
41
|
-
let schemaPath = path.join(__dirname, '..', 'entities/.system/schemaTokenReq.json');
|
42
|
-
const reqSchema = require(schemaPath);
|
43
|
-
reqSchema.properties.username.external_name = user;
|
44
|
-
reqSchema.properties.password.external_name = pw;
|
45
|
-
fs.writeFileSync(schemaPath, JSON.stringify(reqSchema, null, 2));
|
46
|
-
schemaPath = path.join(__dirname, '..', 'entities/.system/schemaTokenResp.json');
|
47
|
-
const respSchema = require(schemaPath);
|
48
|
-
respSchema.properties.token.external_name = token;
|
49
|
-
fs.writeFileSync(schemaPath, JSON.stringify(respSchema, null, 2));
|
50
|
-
}
|
51
|
-
|
52
|
-
function updateRequestToken(auth) {
|
53
|
-
const propsToUpdate = [
|
54
|
-
'username',
|
55
|
-
'password',
|
56
|
-
'auth_field',
|
57
|
-
'auth_field_format',
|
58
|
-
'token_user_field',
|
59
|
-
'token_password_field',
|
60
|
-
'token_result_field',
|
61
|
-
'token_URI_path'
|
62
|
-
];
|
63
|
-
const newAuth = confirm(propsToUpdate, auth);
|
64
|
-
updateTokenSchemas(newAuth.token_user_field, newAuth.token_password_field, newAuth.token_result_field);
|
65
|
-
|
66
|
-
return newAuth;
|
67
|
-
}
|
68
|
-
|
69
|
-
// prompt users to pick an auth method from the list above
|
70
|
-
const addAuthInfo = (props) => {
|
71
|
-
const authOptions = [
|
72
|
-
'basic user_password',
|
73
|
-
'static_token',
|
74
|
-
'request_token',
|
75
|
-
'no_authentication'
|
76
|
-
];
|
77
|
-
const newProps = confirm(['host', 'port', 'base_path'], props);
|
78
|
-
|
79
|
-
const newAuthMethod = authOptions[rls.keyInSelect(authOptions, 'Which authentication method?')];
|
80
|
-
newProps.authentication.auth_method = newAuthMethod;
|
81
|
-
|
82
|
-
if (newAuthMethod === 'basic user_password') {
|
83
|
-
newProps.authentication = updateBasicAuth(newProps.authentication);
|
84
|
-
} else if (newAuthMethod === 'static_token') {
|
85
|
-
newProps.authentication = updateStaticTokenAuth(newProps.authentication);
|
86
|
-
} else if (newAuthMethod === 'request_token') {
|
87
|
-
newProps.authentication = updateRequestToken(newProps.authentication);
|
88
|
-
}
|
89
|
-
console.log('Connectivity and authentication properties have been configured');
|
90
|
-
console.log('If you want to make changes, rerun this script to reinstall the adapter');
|
91
|
-
return newProps;
|
92
|
-
};
|
93
|
-
|
94
|
-
module.exports = { addAuthInfo };
|
package/utils/artifactize.js
DELETED
@@ -1,146 +0,0 @@
|
|
1
|
-
#!/usr/bin/env node
|
2
|
-
/* @copyright Itential, LLC 2019 */
|
3
|
-
|
4
|
-
const path = require('path');
|
5
|
-
const fs = require('fs-extra');
|
6
|
-
|
7
|
-
async function createBundle(adapterOldDir) {
|
8
|
-
// set directories
|
9
|
-
const artifactDir = path.join(adapterOldDir, '../artifactTemp');
|
10
|
-
const workflowsDir = path.join(adapterOldDir, 'workflows');
|
11
|
-
|
12
|
-
// read adapter's package and set names
|
13
|
-
const adapterPackage = fs.readJSONSync(path.join(adapterOldDir, 'package.json'));
|
14
|
-
const originalName = adapterPackage.name.substring(adapterPackage.name.lastIndexOf('/') + 1);
|
15
|
-
const shortenedName = originalName.replace('adapter-', '');
|
16
|
-
const artifactName = originalName.replace('adapter', 'bundled-adapter');
|
17
|
-
|
18
|
-
const adapterNewDir = path.join(artifactDir, 'bundles', 'adapters', originalName);
|
19
|
-
fs.ensureDirSync(adapterNewDir);
|
20
|
-
|
21
|
-
const ops = [];
|
22
|
-
|
23
|
-
// copy old adapterDir to bundled hierarchy location
|
24
|
-
ops.push(() => fs.copySync(adapterOldDir, adapterNewDir));
|
25
|
-
|
26
|
-
// copy readme
|
27
|
-
ops.push(() => fs.copySync(path.join(adapterOldDir, 'README.md'), path.join(artifactDir, 'README.md')));
|
28
|
-
|
29
|
-
// copy changelog
|
30
|
-
if (fs.existsSync(path.join(adapterOldDir, 'CHANGELOG.md'))) {
|
31
|
-
ops.push(() => fs.copySync(path.join(adapterOldDir, 'CHANGELOG.md'), path.join(artifactDir, 'CHANGELOG.md')));
|
32
|
-
}
|
33
|
-
|
34
|
-
// copy license
|
35
|
-
if (fs.existsSync(path.join(adapterOldDir, 'LICENSE'))) {
|
36
|
-
ops.push(() => fs.copySync(path.join(adapterOldDir, 'LICENSE'), path.join(artifactDir, 'LICENSE')));
|
37
|
-
}
|
38
|
-
|
39
|
-
// create package
|
40
|
-
const artifactPackage = {
|
41
|
-
name: artifactName,
|
42
|
-
version: adapterPackage.version,
|
43
|
-
description: `A bundled version of the ${originalName} to be used in adapter-artifacts for easy installation`,
|
44
|
-
scripts: {
|
45
|
-
test: 'echo "Error: no test specified" && exit 1',
|
46
|
-
deploy: 'npm publish --registry=http://registry.npmjs.org'
|
47
|
-
},
|
48
|
-
keywords: [
|
49
|
-
'IAP',
|
50
|
-
'artifacts',
|
51
|
-
'Itential',
|
52
|
-
'Pronghorn',
|
53
|
-
'Adapter',
|
54
|
-
'Adapter-Artifacts',
|
55
|
-
shortenedName
|
56
|
-
],
|
57
|
-
author: 'Itential Artifacts',
|
58
|
-
license: 'Apache-2.0',
|
59
|
-
repository: adapterPackage.repository,
|
60
|
-
private: false,
|
61
|
-
devDependencies: {
|
62
|
-
r2: '^2.0.1',
|
63
|
-
ajv: '6.10.0',
|
64
|
-
'better-ajv-errors': '^0.6.1',
|
65
|
-
'fs-extra': '^7.0.1'
|
66
|
-
}
|
67
|
-
};
|
68
|
-
|
69
|
-
ops.push(() => fs.writeJSONSync(path.join(artifactDir, 'package.json'), artifactPackage, { spaces: 2 }));
|
70
|
-
|
71
|
-
// create manifest
|
72
|
-
const manifest = {
|
73
|
-
bundleName: originalName,
|
74
|
-
version: adapterPackage.version,
|
75
|
-
fingerprint: 'Some verifiable token',
|
76
|
-
createdEpoch: '1554836984020',
|
77
|
-
artifacts: [
|
78
|
-
{
|
79
|
-
id: `${shortenedName}-adapter`,
|
80
|
-
name: `${shortenedName}-adapter`,
|
81
|
-
type: 'adapter',
|
82
|
-
location: `/bundles/adapters/${originalName}`,
|
83
|
-
description: artifactPackage.description,
|
84
|
-
properties: {
|
85
|
-
entryPoint: false
|
86
|
-
}
|
87
|
-
}
|
88
|
-
]
|
89
|
-
};
|
90
|
-
|
91
|
-
// add workflows into artifact
|
92
|
-
if (fs.existsSync(workflowsDir)) {
|
93
|
-
const workflowFileNames = fs.readdirSync(workflowsDir);
|
94
|
-
|
95
|
-
// if folder isnt empty and only file is not readme
|
96
|
-
if (workflowFileNames.length !== 0 && (!(workflowFileNames.length === 1 && workflowFileNames[0].split('.')[1] === 'md'))) {
|
97
|
-
// add workflows to correct location in bundle
|
98
|
-
ops.push(() => fs.copySync(workflowsDir, path.join(artifactDir, 'bundles', 'workflows')));
|
99
|
-
|
100
|
-
// add workflows to manifest
|
101
|
-
workflowFileNames.forEach((filename) => {
|
102
|
-
const [filenameNoExt, ext] = filename.split('.');
|
103
|
-
if (ext === 'json') {
|
104
|
-
manifest.artifacts.push({
|
105
|
-
id: `workflow-${filenameNoExt}`,
|
106
|
-
name: filenameNoExt,
|
107
|
-
type: 'workflow',
|
108
|
-
location: `/bundles/workflows/${filename}`,
|
109
|
-
description: 'Main entry point to artifact',
|
110
|
-
properties: {
|
111
|
-
entryPoint: false
|
112
|
-
}
|
113
|
-
});
|
114
|
-
}
|
115
|
-
});
|
116
|
-
}
|
117
|
-
}
|
118
|
-
|
119
|
-
ops.push(() => fs.writeJSONSync(path.join(artifactDir, 'manifest.json'), manifest, { spaces: 2 }));
|
120
|
-
|
121
|
-
// Run the commands in parallel
|
122
|
-
try {
|
123
|
-
await Promise.all(ops.map(async (op) => op()));
|
124
|
-
} catch (e) {
|
125
|
-
throw new Error(e);
|
126
|
-
}
|
127
|
-
|
128
|
-
const pathObj = {
|
129
|
-
bundlePath: artifactDir,
|
130
|
-
bundledAdapterPath: path.join(artifactDir, 'bundles', 'adapters', originalName)
|
131
|
-
};
|
132
|
-
return pathObj;
|
133
|
-
}
|
134
|
-
|
135
|
-
async function artifactize(entryPathToAdapter) {
|
136
|
-
const truePath = path.resolve(entryPathToAdapter);
|
137
|
-
const packagePath = path.join(truePath, 'package');
|
138
|
-
// remove adapter from package and move bundle in
|
139
|
-
const pathObj = await createBundle(packagePath);
|
140
|
-
const { bundlePath } = pathObj;
|
141
|
-
fs.removeSync(packagePath);
|
142
|
-
fs.moveSync(bundlePath, packagePath);
|
143
|
-
return 'Bundle successfully created and old folder system removed';
|
144
|
-
}
|
145
|
-
|
146
|
-
module.exports = { createBundle, artifactize };
|
package/utils/basicGet.js
DELETED
@@ -1,50 +0,0 @@
|
|
1
|
-
/* @copyright Itential, LLC 2020 */
|
2
|
-
|
3
|
-
/* eslint object-shorthand: warn */
|
4
|
-
/* eslint import/no-extraneous-dependencies: warn */
|
5
|
-
/* eslint global-require: warn */
|
6
|
-
/* eslint import/no-unresolved: warn */
|
7
|
-
/* eslint import/no-dynamic-require: warn */
|
8
|
-
|
9
|
-
const winston = require('winston');
|
10
|
-
|
11
|
-
const logLevel = 'none';
|
12
|
-
const myCustomLevels = {
|
13
|
-
levels: {
|
14
|
-
spam: 6,
|
15
|
-
trace: 5,
|
16
|
-
debug: 4,
|
17
|
-
info: 3,
|
18
|
-
warn: 2,
|
19
|
-
error: 1,
|
20
|
-
none: 0
|
21
|
-
}
|
22
|
-
};
|
23
|
-
|
24
|
-
const basicGet = {
|
25
|
-
/**
|
26
|
-
* @summary create Adapter instance
|
27
|
-
*
|
28
|
-
* @function getAdapterInstance
|
29
|
-
* @param {Object} adapter - adaper configuration object required by IAP
|
30
|
-
*/
|
31
|
-
getAdapterInstance: (adapter) => {
|
32
|
-
const Adapter = require('../adapter');
|
33
|
-
const adapterProps = JSON.parse(JSON.stringify(adapter.properties.properties));
|
34
|
-
adapterProps.stub = false;
|
35
|
-
// need to set global logging
|
36
|
-
global.log = winston.createLogger({
|
37
|
-
level: logLevel,
|
38
|
-
levels: myCustomLevels.levels,
|
39
|
-
transports: [
|
40
|
-
new winston.transports.Console()
|
41
|
-
]
|
42
|
-
});
|
43
|
-
return new Adapter(
|
44
|
-
adapter.id,
|
45
|
-
adapterProps
|
46
|
-
);
|
47
|
-
}
|
48
|
-
};
|
49
|
-
|
50
|
-
module.exports = basicGet;
|
@@ -1,35 +0,0 @@
|
|
1
|
-
#!/usr/bin/env node
|
2
|
-
/* @copyright Itential, LLC 2019 */
|
3
|
-
|
4
|
-
const path = require('path');
|
5
|
-
const { spawnSync } = require('child_process');
|
6
|
-
const fs = require('fs-extra');
|
7
|
-
const { createBundle } = require('./artifactize');
|
8
|
-
|
9
|
-
const nodeEntryPath = path.resolve('.');
|
10
|
-
createBundle(nodeEntryPath).then((pathObj) => {
|
11
|
-
const { bundlePath, bundledAdapterPath } = pathObj;
|
12
|
-
const npmIgnorePath = path.join(bundledAdapterPath, '.npmignore');
|
13
|
-
const adapterPackagePath = path.join(bundledAdapterPath, 'package.json');
|
14
|
-
const artifactPackagePath = path.join(bundlePath, 'package.json');
|
15
|
-
|
16
|
-
// remove node_modules from .npmIgnore so that node_modules are included in the resulting tar from npm pack
|
17
|
-
let npmIgnoreString;
|
18
|
-
if (fs.existsSync(npmIgnorePath)) {
|
19
|
-
npmIgnoreString = fs.readFileSync(npmIgnorePath, 'utf8');
|
20
|
-
npmIgnoreString = npmIgnoreString.replace('node_modules', '');
|
21
|
-
npmIgnoreString = npmIgnoreString.replace('\n\n', '\n');
|
22
|
-
fs.writeFileSync(npmIgnorePath, npmIgnoreString);
|
23
|
-
}
|
24
|
-
|
25
|
-
// add files to package so that node_modules are included in the resulting tar from npm pack
|
26
|
-
const adapterPackage = fs.readJSONSync(adapterPackagePath);
|
27
|
-
adapterPackage.files = ['*'];
|
28
|
-
fs.writeJSONSync(artifactPackagePath, adapterPackage, { spaces: 2 });
|
29
|
-
const npmResult = spawnSync('npm', ['pack', '-q', bundlePath], { cwd: path.resolve(bundlePath, '..') });
|
30
|
-
if (npmResult.status === 0) {
|
31
|
-
fs.removeSync(bundlePath);
|
32
|
-
console.log('Bundle folder removed');
|
33
|
-
}
|
34
|
-
console.log('Script successful');
|
35
|
-
});
|
@@ -1,90 +0,0 @@
|
|
1
|
-
const fs = require('fs');
|
2
|
-
const semverSatisfies = require('semver/functions/satisfies');
|
3
|
-
const packageJson = require('../package.json');
|
4
|
-
|
5
|
-
try {
|
6
|
-
// pattern supplied by semver.org via https://regex101.com/r/vkijKf/1/ but removed gm from end to only match a single semver
|
7
|
-
// const semverPat = /^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$/;
|
8
|
-
// pattern supplied by semver.org via https://regex101.com/r/Ly7O1x/3/ with following changes
|
9
|
-
// removed P's from before capturing group names and
|
10
|
-
// removed gm from end to only match a single semver
|
11
|
-
// const semverPat = /^(?<major>0|[1-9]\d*)\.(?<minor>0|[1-9]\d*)\.(?<patch>0|[1-9]\d*)(?:-(?<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$/;
|
12
|
-
|
13
|
-
const patches = (fs.existsSync('./patches')) ? fs.readdirSync('./patches', { withFileTypes: true }) : [];
|
14
|
-
if (!patches.length) {
|
15
|
-
console.error('\nno patches - nothing to do\n');
|
16
|
-
process.exitCode = 1;
|
17
|
-
}
|
18
|
-
|
19
|
-
const dependencies = packageJson.dependencies || {};
|
20
|
-
if (!Object.keys(dependencies).length) {
|
21
|
-
console.error('\nno dependencies - nothing to do\n');
|
22
|
-
process.exitCode = 1;
|
23
|
-
}
|
24
|
-
|
25
|
-
let changed = false;
|
26
|
-
console.error('\nprocessing patches');
|
27
|
-
const bundledDependencies = packageJson.bundledDependencies || packageJson.bundleDependencies || [];
|
28
|
-
|
29
|
-
patches.forEach((patch) => {
|
30
|
-
if (!patch.isFile()) {
|
31
|
-
console.error(`${patch.name} skipped, is not a regular file`);
|
32
|
-
return;
|
33
|
-
}
|
34
|
-
if (!patch.name.endsWith('.patch')) {
|
35
|
-
console.error(`${patch.name} skipped, does not end with .patch`);
|
36
|
-
return;
|
37
|
-
}
|
38
|
-
const splits = patch.name.slice(0, -6).split('+');
|
39
|
-
if (splits.length > 4) {
|
40
|
-
console.error(`${patch.name} skipped, does not follow the naming convention (cannot use '+' other than to separate scope/package/semver and at most once within semver)`);
|
41
|
-
return;
|
42
|
-
}
|
43
|
-
const scope = splits[0][0] === '@' ? splits.shift() : null;
|
44
|
-
const packageName = splits.shift();
|
45
|
-
const semver = splits.join('+');
|
46
|
-
// const { groups } = semver.match(semverPat);
|
47
|
-
const file = scope ? `${scope}/${packageName}` : packageName;
|
48
|
-
if (dependencies[file] && semverSatisfies(semver, dependencies[file])) {
|
49
|
-
if (!bundledDependencies.includes(file)) {
|
50
|
-
bundledDependencies.push(file);
|
51
|
-
console.error(`added ${file} to bundledDependencies`);
|
52
|
-
changed = true;
|
53
|
-
} else {
|
54
|
-
console.error(`bundledDependencies already has ${file}`);
|
55
|
-
}
|
56
|
-
} else {
|
57
|
-
const depmsg = dependencies[file] ? `version mismatch (${dependencies[file]}) in dependencies` : 'not found in dependencies';
|
58
|
-
console.error(`patch ${patch.name} ${depmsg}`);
|
59
|
-
}
|
60
|
-
});
|
61
|
-
|
62
|
-
if (!packageJson.bundledDependencies && bundledDependencies.length) {
|
63
|
-
delete packageJson.bundleDependencies;
|
64
|
-
packageJson.bundledDependencies = bundledDependencies;
|
65
|
-
console.error('renaming bundleDependencies to bundledDependencies');
|
66
|
-
changed = true;
|
67
|
-
}
|
68
|
-
if (changed) {
|
69
|
-
fs.writeFileSync('./package.json.new', JSON.stringify(packageJson, null, 2));
|
70
|
-
console.error('wrote package.json.new');
|
71
|
-
fs.renameSync('./package.json', './package.json.old');
|
72
|
-
console.error('moved package.json to package.json.old');
|
73
|
-
fs.renameSync('./package.json.new', './package.json');
|
74
|
-
console.error('moved package.json.new to package.json');
|
75
|
-
} else {
|
76
|
-
console.error('no changes\n');
|
77
|
-
process.exitCode = 1;
|
78
|
-
}
|
79
|
-
} catch (e) {
|
80
|
-
if (e) {
|
81
|
-
// caught error, exit with status 2 to signify abject failure
|
82
|
-
console.error(`\ncaught exception - ${e}\n`);
|
83
|
-
process.exitCode = 2;
|
84
|
-
} else {
|
85
|
-
// caught false, exit with status 1 to signify nothing done
|
86
|
-
process.exitCode = 1;
|
87
|
-
}
|
88
|
-
} finally {
|
89
|
-
console.error('done\n');
|
90
|
-
}
|