@itentialopensource/adapter-efficientip_solidserver 0.1.1 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/AUTH.md +39 -0
- package/BROKER.md +199 -0
- package/CALLS.md +1465 -0
- package/CHANGELOG.md +17 -2
- package/CODE_OF_CONDUCT.md +12 -17
- package/CONTRIBUTING.md +3 -148
- package/ENHANCE.md +69 -0
- package/PROPERTIES.md +641 -0
- package/README.md +235 -576
- package/SUMMARY.md +9 -0
- package/SYSTEMINFO.md +11 -0
- package/TROUBLESHOOT.md +47 -0
- package/adapter.js +383 -263
- package/adapterBase.js +854 -408
- package/changelogs/changelog.md +16 -0
- package/entities/.generic/action.json +110 -5
- package/entities/.generic/schema.json +6 -1
- package/error.json +6 -0
- package/metadata.json +49 -0
- package/package.json +27 -22
- package/pronghorn.json +691 -88
- package/propertiesDecorators.json +14 -0
- package/propertiesSchema.json +828 -7
- package/refs?service=git-upload-pack +0 -0
- package/report/adapter-openapi.json +41906 -0
- package/report/adapter-openapi.yaml +23138 -0
- package/report/adapterInfo.json +10 -0
- package/report/updateReport1653233995404.json +120 -0
- package/report/updateReport1691508450223.json +120 -0
- package/report/updateReport1692202927301.json +120 -0
- package/report/updateReport1694465845842.json +120 -0
- package/report/updateReport1698421858198.json +120 -0
- package/sampleProperties.json +153 -3
- package/test/integration/adapterTestBasicGet.js +3 -5
- package/test/integration/adapterTestConnectivity.js +91 -42
- package/test/integration/adapterTestIntegration.js +155 -106
- package/test/unit/adapterBaseTestUnit.js +388 -308
- package/test/unit/adapterTestUnit.js +484 -243
- package/utils/adapterInfo.js +206 -0
- package/utils/addAuth.js +94 -0
- package/utils/artifactize.js +1 -1
- package/utils/basicGet.js +1 -14
- package/utils/checkMigrate.js +1 -1
- package/utils/entitiesToDB.js +179 -0
- package/utils/findPath.js +1 -1
- package/utils/methodDocumentor.js +273 -0
- package/utils/modify.js +14 -16
- package/utils/packModificationScript.js +1 -1
- package/utils/patches2bundledDeps.js +90 -0
- package/utils/pre-commit.sh +5 -0
- package/utils/removeHooks.js +20 -0
- package/utils/taskMover.js +309 -0
- package/utils/tbScript.js +129 -53
- package/utils/tbUtils.js +125 -25
- package/utils/testRunner.js +17 -17
- package/utils/troubleshootingAdapter.js +10 -31
- package/workflows/README.md +0 -3
|
@@ -0,0 +1,273 @@
|
|
|
1
|
+
/* eslint global-require:warn */
|
|
2
|
+
/* eslint import/no-dynamic-require:warn */
|
|
3
|
+
/* eslint no-param-reassign:warn */
|
|
4
|
+
|
|
5
|
+
const fs = require('fs-extra');
|
|
6
|
+
const acorn = require('acorn');
|
|
7
|
+
|
|
8
|
+
// Getting the base directory:
|
|
9
|
+
let adaptdir = __dirname;
|
|
10
|
+
if (adaptdir.endsWith('/utils')) {
|
|
11
|
+
adaptdir = adaptdir.substring(0, adaptdir.length - 6);
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
function createObjectForFunction(
|
|
15
|
+
funcName,
|
|
16
|
+
funcArgs,
|
|
17
|
+
entityPath,
|
|
18
|
+
description,
|
|
19
|
+
workflow
|
|
20
|
+
) {
|
|
21
|
+
const funcObject = {};
|
|
22
|
+
// if the entity path is not set, then the object is not created.
|
|
23
|
+
if (entityPath !== undefined) {
|
|
24
|
+
funcObject.method_signature = `${funcName}(${funcArgs.join(', ')})`;
|
|
25
|
+
funcObject.path = entityPath;
|
|
26
|
+
if (description === undefined) {
|
|
27
|
+
funcObject.description = '';
|
|
28
|
+
funcObject.workflow = 'No';
|
|
29
|
+
} else {
|
|
30
|
+
funcObject.description = description;
|
|
31
|
+
funcObject.workflow = workflow;
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
return funcObject;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
function getPathFromEntity(entity, funcName) {
|
|
38
|
+
let epath;
|
|
39
|
+
if (entity === undefined || entity === '.generic') {
|
|
40
|
+
epath = undefined;
|
|
41
|
+
} else {
|
|
42
|
+
// Access the action.js file for the certain entity to get the path
|
|
43
|
+
const entityPath = `${adaptdir}/entities/${entity}/action.json`;
|
|
44
|
+
const actionJSON = require(entityPath);
|
|
45
|
+
actionJSON.actions.forEach((action) => {
|
|
46
|
+
if (action.name === funcName) {
|
|
47
|
+
if (typeof action.entitypath === 'object') {
|
|
48
|
+
epath = '';
|
|
49
|
+
const keys = Object.keys(action.entitypath);
|
|
50
|
+
for (let k = 0; k < keys.length; k += 1) {
|
|
51
|
+
epath += `${keys[k]}:${action.entitypath[keys[k]]} <br /> `;
|
|
52
|
+
}
|
|
53
|
+
epath = epath.substring(0, epath.length - 8);
|
|
54
|
+
} else {
|
|
55
|
+
epath = action.entitypath;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
return epath;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
function recurseCallExpressions(statement, callList) {
|
|
64
|
+
// Recursively finds all CallExpressions in the syntax tree
|
|
65
|
+
if (statement.type === 'CallExpression') callList.push(statement);
|
|
66
|
+
const keys = Object.keys(statement);
|
|
67
|
+
for (let k = 0; k < keys.length; k += 1) {
|
|
68
|
+
if (typeof statement[keys[k]] === 'object' && statement[keys[k]] !== null) {
|
|
69
|
+
recurseCallExpressions(statement[keys[k]], callList);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
function readFileUsingLib(filename, descriptionObj, workflowObj, functionList) {
|
|
75
|
+
// read the file
|
|
76
|
+
const aFile = fs.readFileSync(filename, 'utf8');
|
|
77
|
+
// parsing the file to get the function and class declarations.
|
|
78
|
+
const aFileFuncArgs = acorn.parse(aFile, { ecmaVersion: 2020 });
|
|
79
|
+
|
|
80
|
+
let callName = 'identifyRequest';
|
|
81
|
+
// Looping through all the declarations parsed:
|
|
82
|
+
aFileFuncArgs.body.forEach((e) => {
|
|
83
|
+
// Getting only the class declaration as it has our required functions.
|
|
84
|
+
if (e.type === 'ClassDeclaration') {
|
|
85
|
+
const methodDefinition = e.body;
|
|
86
|
+
methodDefinition.body.forEach((method) => {
|
|
87
|
+
// Getting method name and its params in the class.
|
|
88
|
+
const funcName = method.key.name;
|
|
89
|
+
const funcArgs = [];
|
|
90
|
+
method.value.params.forEach((param) => {
|
|
91
|
+
if (param.type === 'Identifier') {
|
|
92
|
+
funcArgs.push(param.name);
|
|
93
|
+
} else if (param.type === 'RestElement') {
|
|
94
|
+
funcArgs.push(`...${param.argument.name}`);
|
|
95
|
+
} else {
|
|
96
|
+
const args = `${param.left.name} = ${param.right.raw}`;
|
|
97
|
+
funcArgs.push(args);
|
|
98
|
+
}
|
|
99
|
+
});
|
|
100
|
+
|
|
101
|
+
// Getting the entity for the method:
|
|
102
|
+
const callList = [];
|
|
103
|
+
method.value.body.body.forEach((statement) => {
|
|
104
|
+
recurseCallExpressions(statement, callList);
|
|
105
|
+
});
|
|
106
|
+
const requests = [];
|
|
107
|
+
for (let i = 0; i < callList.length; i += 1) {
|
|
108
|
+
if (callList[i].callee.property && callList[i].callee.property.name === callName) {
|
|
109
|
+
requests.push(callList[i]);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
if (requests.length > 0) {
|
|
113
|
+
const expr = requests[0];
|
|
114
|
+
if (expr.arguments.length < 2) {
|
|
115
|
+
throw new Error(`Bad inputs in method ${funcName}`);
|
|
116
|
+
}
|
|
117
|
+
const entity = expr.arguments[0].value;
|
|
118
|
+
const actionName = expr.arguments[1].value;
|
|
119
|
+
if (expr !== undefined && (expr.arguments[0].type !== 'Literal' || expr.arguments[1].type !== 'Literal')) {
|
|
120
|
+
const param1 = method.value.params[0];
|
|
121
|
+
const param2 = method.value.params[1];
|
|
122
|
+
if (param1.type !== 'Identifier' || param2.type !== 'Identifier'
|
|
123
|
+
|| expr.arguments[0].type !== 'Identifier' || expr.arguments[1].type !== 'Identifier'
|
|
124
|
+
|| param1.name !== expr.arguments[0].name || param2.name !== expr.arguments[1].name) {
|
|
125
|
+
throw new Error(`identifyRequest proxy method ${funcName} unknown format`);
|
|
126
|
+
} else if (callName !== 'identifyRequest') {
|
|
127
|
+
throw new Error(`MethodDocumentor not yet programmed to handle multiple helper methods: 1) ${callName}, 2) ${funcName}`);
|
|
128
|
+
}
|
|
129
|
+
callName = funcName;
|
|
130
|
+
}
|
|
131
|
+
const entityPath = getPathFromEntity(entity, actionName);
|
|
132
|
+
|
|
133
|
+
// Creating and storing the object for the method.
|
|
134
|
+
if (entityPath !== undefined) {
|
|
135
|
+
functionList.push(
|
|
136
|
+
createObjectForFunction(
|
|
137
|
+
funcName,
|
|
138
|
+
funcArgs,
|
|
139
|
+
entityPath,
|
|
140
|
+
descriptionObj[funcName],
|
|
141
|
+
workflowObj[funcName]
|
|
142
|
+
)
|
|
143
|
+
);
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
});
|
|
147
|
+
}
|
|
148
|
+
});
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
function readJSONFile(filename, descriptionObj, workflowObj) {
|
|
152
|
+
// Accessing the JSON file.
|
|
153
|
+
const phJSON = require(filename);
|
|
154
|
+
// Getting the methods array.
|
|
155
|
+
const methodArray = phJSON.methods;
|
|
156
|
+
methodArray.forEach((methodName) => {
|
|
157
|
+
// Getting the method description and workflow:
|
|
158
|
+
const funcName = methodName.name;
|
|
159
|
+
descriptionObj[funcName] = methodName.summary ? methodName.summary : methodName.description;
|
|
160
|
+
workflowObj[funcName] = methodName.task ? 'Yes' : 'No';
|
|
161
|
+
});
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
function readMDFile(filename, functionList) {
|
|
165
|
+
// Reading in the .md file and creating an array with each line as an element.
|
|
166
|
+
const mdFile = fs.readFileSync(filename, 'utf-8');
|
|
167
|
+
const fileSplit = mdFile.split('\n');
|
|
168
|
+
// Storing the data that should added later to the updated data.
|
|
169
|
+
const linesToAddLater = [];
|
|
170
|
+
let index = fileSplit.length - 1;
|
|
171
|
+
|
|
172
|
+
// Removing all the blank lines at the end of the file.
|
|
173
|
+
if (fileSplit[index] === '') {
|
|
174
|
+
while (fileSplit[index] === '') {
|
|
175
|
+
linesToAddLater.push(fileSplit.pop());
|
|
176
|
+
index -= 1;
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
// Checking if the last 2 lines are <br> and </table>. If not, the file is corrupted and the
|
|
181
|
+
// data at the end of the file should be fixed.
|
|
182
|
+
if (fileSplit[index] === '<br>' || fileSplit[index - 1] === '</table>') {
|
|
183
|
+
// Storing <br> and </table> to add later.
|
|
184
|
+
linesToAddLater.push(fileSplit.pop());
|
|
185
|
+
linesToAddLater.push(fileSplit.pop());
|
|
186
|
+
index -= 2;
|
|
187
|
+
} else {
|
|
188
|
+
console.log('The file has bad content at the end.');
|
|
189
|
+
return;
|
|
190
|
+
}
|
|
191
|
+
// if (fileSplit[index] !== '<br>' && fileSplit[index - 1] !== '</table>') {
|
|
192
|
+
// console.log('The file has bad content at the end.');
|
|
193
|
+
// return;
|
|
194
|
+
// } else {
|
|
195
|
+
// // Storing <br> and </table> to add later.
|
|
196
|
+
// linesToAddLater.push(fileSplit.pop());
|
|
197
|
+
// linesToAddLater.push(fileSplit.pop());
|
|
198
|
+
// index -= 2;
|
|
199
|
+
// }
|
|
200
|
+
|
|
201
|
+
// Removing all the lines until the header tags are reached.
|
|
202
|
+
while (!fileSplit[index].includes('<th')) {
|
|
203
|
+
fileSplit.pop();
|
|
204
|
+
index -= 1;
|
|
205
|
+
}
|
|
206
|
+
// Adding </tr> for the header row, because it got removed in the above loop.
|
|
207
|
+
fileSplit.push(' </tr>');
|
|
208
|
+
|
|
209
|
+
// Creating the tags for each method to be appended to the file.
|
|
210
|
+
const tdBeginTag = ' <td style="padding:15px">';
|
|
211
|
+
const tdEndTag = '</td>';
|
|
212
|
+
|
|
213
|
+
functionList.forEach((func) => {
|
|
214
|
+
const signCommand = `${tdBeginTag}${func.method_signature}${tdEndTag}`;
|
|
215
|
+
const descCommand = `${tdBeginTag}${func.description}${tdEndTag}`;
|
|
216
|
+
const pathCommand = `${tdBeginTag}${func.path}${tdEndTag}`;
|
|
217
|
+
const workflowCommand = `${tdBeginTag}${func.workflow}${tdEndTag}`;
|
|
218
|
+
fileSplit.push(' <tr>');
|
|
219
|
+
fileSplit.push(signCommand);
|
|
220
|
+
fileSplit.push(descCommand);
|
|
221
|
+
fileSplit.push(pathCommand);
|
|
222
|
+
fileSplit.push(workflowCommand);
|
|
223
|
+
fileSplit.push(' </tr>');
|
|
224
|
+
});
|
|
225
|
+
|
|
226
|
+
// Adding </table> and <br> at the end of the file to complete the table and the file.
|
|
227
|
+
while (linesToAddLater.length > 0) {
|
|
228
|
+
fileSplit.push(linesToAddLater.pop());
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
// Writing all the content back into the file.
|
|
232
|
+
fs.writeFileSync(filename, fileSplit.join('\n'), {
|
|
233
|
+
encoding: 'utf-8',
|
|
234
|
+
flag: 'w'
|
|
235
|
+
});
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
function getFileInfo() {
|
|
239
|
+
// If files don't exist:
|
|
240
|
+
if (!fs.existsSync(`${adaptdir}/adapter.js`)) {
|
|
241
|
+
console.log('Missing - utils/adapter.js');
|
|
242
|
+
return;
|
|
243
|
+
}
|
|
244
|
+
if (!fs.existsSync(`${adaptdir}/pronghorn.json`)) {
|
|
245
|
+
console.log('Missing - pronghorn.json');
|
|
246
|
+
return;
|
|
247
|
+
}
|
|
248
|
+
if (!fs.existsSync(`${adaptdir}/CALLS.md`)) {
|
|
249
|
+
console.log('Missing - CALLS.md');
|
|
250
|
+
return;
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
const descriptionObj = {};
|
|
254
|
+
const workflowObj = {};
|
|
255
|
+
|
|
256
|
+
// Get the method descriptions and the workflow values from pronghorn.json file.
|
|
257
|
+
readJSONFile(`${adaptdir}/pronghorn.json`, descriptionObj, workflowObj);
|
|
258
|
+
|
|
259
|
+
// Get the method signature, entity path and create an object that contains all the info regarding
|
|
260
|
+
// the method and push it to the functionList array.
|
|
261
|
+
const functionList = [];
|
|
262
|
+
readFileUsingLib(
|
|
263
|
+
`${adaptdir}/adapter.js`,
|
|
264
|
+
descriptionObj,
|
|
265
|
+
workflowObj,
|
|
266
|
+
functionList
|
|
267
|
+
);
|
|
268
|
+
|
|
269
|
+
// createMarkDown(functionList);
|
|
270
|
+
readMDFile(`${adaptdir}/CALLS.md`, functionList);
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
getFileInfo();
|
package/utils/modify.js
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
|
+
const { execSync } = require('child_process');
|
|
1
2
|
const fs = require('fs-extra');
|
|
2
3
|
const Ajv = require('ajv');
|
|
3
4
|
const rls = require('readline-sync');
|
|
4
|
-
const { execSync } = require('child_process');
|
|
5
5
|
const { existsSync } = require('fs-extra');
|
|
6
|
-
const { getAdapterConfig } = require('./
|
|
6
|
+
const { getAdapterConfig } = require('./tbUtils');
|
|
7
7
|
const { name } = require('../package.json');
|
|
8
8
|
const propertiesSchema = require('../propertiesSchema.json');
|
|
9
9
|
|
|
@@ -21,9 +21,7 @@ async function updateServiceItem() {
|
|
|
21
21
|
const validate = ajv.compile(propertiesSchema);
|
|
22
22
|
validate(currentProps);
|
|
23
23
|
console.log('Updating Properties...');
|
|
24
|
-
await database.collection('service_configs').updateOne(
|
|
25
|
-
{ model: name }, { $set: serviceItem }
|
|
26
|
-
);
|
|
24
|
+
await database.collection('service_configs').updateOne({ model: name }, { $set: serviceItem });
|
|
27
25
|
console.log('Properties Updated');
|
|
28
26
|
}
|
|
29
27
|
|
|
@@ -35,7 +33,7 @@ async function updateServiceItem() {
|
|
|
35
33
|
function backup() {
|
|
36
34
|
// zip all files except node_modules and package-lock
|
|
37
35
|
const backupCmd = 'zip -r previousVersion.zip .';
|
|
38
|
-
execSync(backupCmd, { encoding: 'utf-8' });
|
|
36
|
+
execSync(backupCmd, { encoding: 'utf-8', maxBuffer: 1024 * 1024 * 2 });
|
|
39
37
|
}
|
|
40
38
|
|
|
41
39
|
/**
|
|
@@ -53,9 +51,9 @@ function archiveMod(modType) {
|
|
|
53
51
|
const archiveName = `${modType}-${now.toISOString()}`;
|
|
54
52
|
execSync(`mkdir adapter_modifications/archive/${archiveName}`);
|
|
55
53
|
const archiveCmd = 'mv adapter_modifications/archive .'
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
54
|
+
+ ` && mv adapter_modifications/* archive/${archiveName}`
|
|
55
|
+
+ ' && mv archive adapter_modifications'
|
|
56
|
+
+ ` && rm ${zipFile}`;
|
|
59
57
|
execSync(archiveCmd, { encoding: 'utf-8' });
|
|
60
58
|
}
|
|
61
59
|
|
|
@@ -73,7 +71,7 @@ function revertMod() {
|
|
|
73
71
|
}
|
|
74
72
|
});
|
|
75
73
|
// // unzip previousVersion, reinstall dependencies and delete zipfile
|
|
76
|
-
execSync('unzip -o previousVersion.zip && rm -rf node_modules && rm package-lock.json && npm install');
|
|
74
|
+
execSync('unzip -o previousVersion.zip && rm -rf node_modules && rm package-lock.json && npm install', { maxBuffer: 1024 * 1024 * 2 });
|
|
77
75
|
execSync('rm previousVersion.zip');
|
|
78
76
|
console.log('Changes have been reverted');
|
|
79
77
|
}
|
|
@@ -90,12 +88,12 @@ if (flags === '-m') {
|
|
|
90
88
|
backup();
|
|
91
89
|
console.log('Migrating adapter and running tests...');
|
|
92
90
|
const migrateCmd = 'unzip -o migrationPackage.zip'
|
|
93
|
-
|
|
94
|
-
|
|
91
|
+
+ ' && cd adapter_modifications'
|
|
92
|
+
+ ' && node migrate';
|
|
95
93
|
const migrateOutput = execSync(migrateCmd, { encoding: 'utf-8' });
|
|
96
94
|
console.log(migrateOutput);
|
|
97
95
|
if (migrateOutput.indexOf('Lint exited with code 1') >= 0
|
|
98
|
-
|
|
96
|
+
|| migrateOutput.indexOf('Tests exited with code 1') >= 0) {
|
|
99
97
|
if (rls.keyInYN('Adapter failed tests or lint after migrating. Would you like to revert the changes?')) {
|
|
100
98
|
console.log('Reverting changes...');
|
|
101
99
|
revertMod();
|
|
@@ -125,12 +123,12 @@ if (flags === '-u') {
|
|
|
125
123
|
// Backup current adapter
|
|
126
124
|
backup();
|
|
127
125
|
const updateCmd = 'unzip -o updatePackage.zip'
|
|
128
|
-
|
|
129
|
-
|
|
126
|
+
+ ' && cd adapter_modifications'
|
|
127
|
+
+ ' && node update.js updateFiles';
|
|
130
128
|
execSync(updateCmd, { encoding: 'utf-8' });
|
|
131
129
|
const updateOutput = execSync(updateCmd, { encoding: 'utf-8' });
|
|
132
130
|
if (updateOutput.indexOf('Lint exited with code 1') >= 0
|
|
133
|
-
|
|
131
|
+
|| updateOutput.indexOf('Tests exited with code 1') >= 0) {
|
|
134
132
|
if (rls.keyInYN('Adapter failed tests or lint after updating. Would you like to revert the changes?')) {
|
|
135
133
|
console.log('Reverting changes...');
|
|
136
134
|
revertMod();
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
/* @copyright Itential, LLC 2019 */
|
|
3
3
|
|
|
4
|
-
const fs = require('fs-extra');
|
|
5
4
|
const path = require('path');
|
|
6
5
|
const { spawnSync } = require('child_process');
|
|
6
|
+
const fs = require('fs-extra');
|
|
7
7
|
const { createBundle } = require('./artifactize');
|
|
8
8
|
|
|
9
9
|
const nodeEntryPath = path.resolve('.');
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const semverSatisfies = require('semver/functions/satisfies');
|
|
3
|
+
const packageJson = require('../package.json');
|
|
4
|
+
|
|
5
|
+
try {
|
|
6
|
+
// pattern supplied by semver.org via https://regex101.com/r/vkijKf/1/ but removed gm from end to only match a single semver
|
|
7
|
+
// const semverPat = /^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$/;
|
|
8
|
+
// pattern supplied by semver.org via https://regex101.com/r/Ly7O1x/3/ with following changes
|
|
9
|
+
// removed P's from before capturing group names and
|
|
10
|
+
// removed gm from end to only match a single semver
|
|
11
|
+
// const semverPat = /^(?<major>0|[1-9]\d*)\.(?<minor>0|[1-9]\d*)\.(?<patch>0|[1-9]\d*)(?:-(?<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$/;
|
|
12
|
+
|
|
13
|
+
const patches = (fs.existsSync('./patches')) ? fs.readdirSync('./patches', { withFileTypes: true }) : [];
|
|
14
|
+
if (!patches.length) {
|
|
15
|
+
console.error('\nno patches - nothing to do\n');
|
|
16
|
+
process.exitCode = 1;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
const dependencies = packageJson.dependencies || {};
|
|
20
|
+
if (!Object.keys(dependencies).length) {
|
|
21
|
+
console.error('\nno dependencies - nothing to do\n');
|
|
22
|
+
process.exitCode = 1;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
let changed = false;
|
|
26
|
+
console.error('\nprocessing patches');
|
|
27
|
+
const bundledDependencies = packageJson.bundledDependencies || packageJson.bundleDependencies || [];
|
|
28
|
+
|
|
29
|
+
patches.forEach((patch) => {
|
|
30
|
+
if (!patch.isFile()) {
|
|
31
|
+
console.error(`${patch.name} skipped, is not a regular file`);
|
|
32
|
+
return;
|
|
33
|
+
}
|
|
34
|
+
if (!patch.name.endsWith('.patch')) {
|
|
35
|
+
console.error(`${patch.name} skipped, does not end with .patch`);
|
|
36
|
+
return;
|
|
37
|
+
}
|
|
38
|
+
const splits = patch.name.slice(0, -6).split('+');
|
|
39
|
+
if (splits.length > 4) {
|
|
40
|
+
console.error(`${patch.name} skipped, does not follow the naming convention (cannot use '+' other than to separate scope/package/semver and at most once within semver)`);
|
|
41
|
+
return;
|
|
42
|
+
}
|
|
43
|
+
const scope = splits[0][0] === '@' ? splits.shift() : null;
|
|
44
|
+
const packageName = splits.shift();
|
|
45
|
+
const semver = splits.join('+');
|
|
46
|
+
// const { groups } = semver.match(semverPat);
|
|
47
|
+
const file = scope ? `${scope}/${packageName}` : packageName;
|
|
48
|
+
if (dependencies[file] && semverSatisfies(semver, dependencies[file])) {
|
|
49
|
+
if (!bundledDependencies.includes(file)) {
|
|
50
|
+
bundledDependencies.push(file);
|
|
51
|
+
console.error(`added ${file} to bundledDependencies`);
|
|
52
|
+
changed = true;
|
|
53
|
+
} else {
|
|
54
|
+
console.error(`bundledDependencies already has ${file}`);
|
|
55
|
+
}
|
|
56
|
+
} else {
|
|
57
|
+
const depmsg = dependencies[file] ? `version mismatch (${dependencies[file]}) in dependencies` : 'not found in dependencies';
|
|
58
|
+
console.error(`patch ${patch.name} ${depmsg}`);
|
|
59
|
+
}
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
if (!packageJson.bundledDependencies && bundledDependencies.length) {
|
|
63
|
+
delete packageJson.bundleDependencies;
|
|
64
|
+
packageJson.bundledDependencies = bundledDependencies;
|
|
65
|
+
console.error('renaming bundleDependencies to bundledDependencies');
|
|
66
|
+
changed = true;
|
|
67
|
+
}
|
|
68
|
+
if (changed) {
|
|
69
|
+
fs.writeFileSync('./package.json.new', JSON.stringify(packageJson, null, 2));
|
|
70
|
+
console.error('wrote package.json.new');
|
|
71
|
+
fs.renameSync('./package.json', './package.json.old');
|
|
72
|
+
console.error('moved package.json to package.json.old');
|
|
73
|
+
fs.renameSync('./package.json.new', './package.json');
|
|
74
|
+
console.error('moved package.json.new to package.json');
|
|
75
|
+
} else {
|
|
76
|
+
console.error('no changes\n');
|
|
77
|
+
process.exitCode = 1;
|
|
78
|
+
}
|
|
79
|
+
} catch (e) {
|
|
80
|
+
if (e) {
|
|
81
|
+
// caught error, exit with status 2 to signify abject failure
|
|
82
|
+
console.error(`\ncaught exception - ${e}\n`);
|
|
83
|
+
process.exitCode = 2;
|
|
84
|
+
} else {
|
|
85
|
+
// caught false, exit with status 1 to signify nothing done
|
|
86
|
+
process.exitCode = 1;
|
|
87
|
+
}
|
|
88
|
+
} finally {
|
|
89
|
+
console.error('done\n');
|
|
90
|
+
}
|
package/utils/pre-commit.sh
CHANGED
|
@@ -17,6 +17,11 @@ printf "%b" "Running pre-commit hooks...\\n"
|
|
|
17
17
|
# verify testing script is stubbed and no credentials
|
|
18
18
|
node utils/testRunner.js -r
|
|
19
19
|
|
|
20
|
+
# update the adapter information file
|
|
21
|
+
node utils/adapterInfo.js
|
|
22
|
+
node utils/methodDocumentor.js
|
|
23
|
+
git add CALLS.md report/adapterInfo.json
|
|
24
|
+
|
|
20
25
|
# security audit on the code
|
|
21
26
|
npm audit --registry=https://registry.npmjs.org --audit-level=moderate
|
|
22
27
|
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* This script will uninstall pre-commit or pre-push hooks in case there's ever a need to
|
|
5
|
+
* commit/push something that has issues
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
const precommitPath = '.git/hooks/pre-commit';
|
|
9
|
+
const prepushPath = '.git/hooks/pre-push';
|
|
10
|
+
fs.unlink(precommitPath, (err) => {
|
|
11
|
+
if (err && err.code !== 'ENOENT') {
|
|
12
|
+
console.log(`${err.message}`);
|
|
13
|
+
}
|
|
14
|
+
});
|
|
15
|
+
|
|
16
|
+
fs.unlink(prepushPath, (err) => {
|
|
17
|
+
if (err && err.code !== 'ENOENT') {
|
|
18
|
+
console.log(`${err.message}`);
|
|
19
|
+
}
|
|
20
|
+
});
|