@zohodesk/codestandard-validator 0.0.7 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.babelrc +1 -1
- package/bin/execute.js +1 -1
- package/build/hooks/Precommit/pre-commit.js +89 -28
- package/build/hooks/hook.js +40 -0
- package/build/lib/cli.js +2 -2
- package/build/lib/postinstall.js +3 -3
- package/build/utils/CloneCommonLinterRepo/cloneViaCdt.js +39 -26
- package/build/utils/ConfigFileUtils/createConfigFile.js +63 -0
- package/build/utils/ConfigFileUtils/getLintConfiguration.js +2 -2
- package/build/utils/FileAndFolderOperations/filterFiles.js +27 -2
- package/build/utils/General/getGeneralInfo.js +64 -3
- package/build/utils/General/getNodeModulesPath.js +4 -1
- package/build/utils/General/writeProjectDetailsToJson.js +5 -1
- package/build/utils/PluginsInstallation/arePluginsInstalled.js +2 -6
- package/build/utils/PluginsInstallation/checkIfPluginsAreInstalled.js +32 -59
- package/build/utils/PluginsInstallation/installPlugins.js +12 -65
- package/build/utils/PrecommitUsingGitSetup/update-git-precommithook.js +42 -35
- package/changeLog.md +7 -1
- package/index.js +1 -1
- package/jsonUtils/commonLinterRepoDetails.js +1 -1
- package/package.json +3 -2
- package/build/hooks/PrePush/pre-push.js +0 -247
- package/build/hooks/Precommit/filterUtils.js +0 -42
- package/build/hooks/Precommit/lint.js +0 -211
- package/build/utils/ConfigFileUtils/createConfigFiles.js +0 -68
- package/build/utils/EslintConfigFileUtils/createEslintConfigFile.js +0 -54
- package/build/utils/EslintConfigFileUtils/getEslintExecutablePath.js +0 -24
- package/build/utils/EslintConfigFileUtils/getLintConfiguration.js +0 -52
- package/build/utils/PluginsInstallation/Worker/installPluginsByWoker.js +0 -38
- package/build/utils/PluginsInstallation/Worker/worker.js +0 -33
package/.babelrc
CHANGED
package/bin/execute.js
CHANGED
|
@@ -1,13 +1,14 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
|
|
3
3
|
const {
|
|
4
|
-
exec
|
|
4
|
+
exec,
|
|
5
|
+
execSync
|
|
5
6
|
} = require('child_process');
|
|
6
7
|
const fs = require('fs');
|
|
7
8
|
const path = require('path');
|
|
8
9
|
const {
|
|
9
10
|
getEslintExecutablePath
|
|
10
|
-
} = require('../../utils/
|
|
11
|
+
} = require('../../utils/ConfigFileUtils/getEslintExecutablePath');
|
|
11
12
|
const {
|
|
12
13
|
getNodeModulesPath
|
|
13
14
|
} = require('../../utils/General/getNodeModulesPath');
|
|
@@ -87,6 +88,27 @@ function filterDeltedFileFromStagedFiles(files) {
|
|
|
87
88
|
return false;
|
|
88
89
|
});
|
|
89
90
|
}
|
|
91
|
+
async function lintFiles(filePath) {
|
|
92
|
+
switch (String(path.extname(filePath))) {
|
|
93
|
+
case '.js':
|
|
94
|
+
case '.ts':
|
|
95
|
+
case '.tsx':
|
|
96
|
+
case '.jsx':
|
|
97
|
+
case '.properties':
|
|
98
|
+
{
|
|
99
|
+
return await findEslintErrors(filePath);
|
|
100
|
+
}
|
|
101
|
+
case '.css':
|
|
102
|
+
case '.scss':
|
|
103
|
+
{
|
|
104
|
+
return await findStyleLintErrors(filePath);
|
|
105
|
+
}
|
|
106
|
+
default:
|
|
107
|
+
{
|
|
108
|
+
return [];
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
}
|
|
90
112
|
|
|
91
113
|
/**
|
|
92
114
|
* @function findEslintErrors - method Lint given file based on given configuration
|
|
@@ -94,7 +116,7 @@ function filterDeltedFileFromStagedFiles(files) {
|
|
|
94
116
|
* @returns {Array<string>} - array of command line report as a string
|
|
95
117
|
*/
|
|
96
118
|
|
|
97
|
-
|
|
119
|
+
function findEslintErrors(file) {
|
|
98
120
|
let nodeModulesPathOfProject = `${getNodeModulesPath()}`;
|
|
99
121
|
let eslintExecutablePath = getEslintExecutablePath();
|
|
100
122
|
let eslintConfigurationFilePath = `${nodeModulesPathOfProject}/.eslintrc.js`;
|
|
@@ -121,6 +143,39 @@ async function findEslintErrors(file) {
|
|
|
121
143
|
Logger.log(Logger.INFO_TYPE, 'node_modules not found');
|
|
122
144
|
}
|
|
123
145
|
}
|
|
146
|
+
|
|
147
|
+
/**
|
|
148
|
+
*
|
|
149
|
+
* @param {*} params
|
|
150
|
+
*/
|
|
151
|
+
function findStyleLintErrors(filePath) {
|
|
152
|
+
const configFilePath = path.resolve(getNodeModulesPath(), '.stylelintrc.js');
|
|
153
|
+
const absolutePath = path.join(getRootDirectory(), filePath);
|
|
154
|
+
try {
|
|
155
|
+
return new Promise((resolve, reject) => {
|
|
156
|
+
exec(`npx stylelint ${absolutePath} --config ${configFilePath}`, {
|
|
157
|
+
cwd: getNodeModulesPath()
|
|
158
|
+
}, (error, stderr, stdout) => {
|
|
159
|
+
if (stdout) {
|
|
160
|
+
resolve(stdout.trim().split('\n'));
|
|
161
|
+
}
|
|
162
|
+
// if(stderr){
|
|
163
|
+
// resolve(stderr.trim().split('\n'))
|
|
164
|
+
// }
|
|
165
|
+
else if (error) {
|
|
166
|
+
Logger.log(Logger.FAILURE_TYPE, error);
|
|
167
|
+
reject("Error executing stylelint command");
|
|
168
|
+
} else {
|
|
169
|
+
resolve([]);
|
|
170
|
+
}
|
|
171
|
+
});
|
|
172
|
+
});
|
|
173
|
+
} catch (error) {
|
|
174
|
+
Logger.log(Logger.FAILURE_TYPE, `Issue is lint css files`);
|
|
175
|
+
return [];
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
|
|
124
179
|
/**
|
|
125
180
|
* @function {calculateGitDiffForFile} - method calculate diff of file
|
|
126
181
|
* @param {*} branch_name - branch name
|
|
@@ -157,10 +212,10 @@ function isOnlyWarningsPresentInFile(eslintErrorsPresent) {
|
|
|
157
212
|
let severityOfEachErrorInFile = [];
|
|
158
213
|
eslintErrorsPresent.map(error => {
|
|
159
214
|
let partsInString = error.split(" ");
|
|
160
|
-
let severityOfError = partsInString.find(word => word === 'error' || word === 'warning');
|
|
215
|
+
let severityOfError = partsInString.find(word => word === 'error' || word === 'warning' || word === '✖');
|
|
161
216
|
severityOfEachErrorInFile.push(severityOfError);
|
|
162
217
|
});
|
|
163
|
-
return !severityOfEachErrorInFile.includes('error');
|
|
218
|
+
return !(severityOfEachErrorInFile.includes('✖') || severityOfEachErrorInFile.includes('error'));
|
|
164
219
|
}
|
|
165
220
|
|
|
166
221
|
/**
|
|
@@ -193,29 +248,35 @@ async function preCommitHook() {
|
|
|
193
248
|
try {
|
|
194
249
|
staged_files = await getStagedFiles();
|
|
195
250
|
if (!staged_files.length == 0) {
|
|
196
|
-
|
|
251
|
+
const {
|
|
252
|
+
JsFiles: staged_filesJS,
|
|
253
|
+
CssFiles
|
|
254
|
+
} = filterFiles(staged_files, eslintConfigFiles, true);
|
|
197
255
|
|
|
198
|
-
//
|
|
199
|
-
// if(
|
|
256
|
+
// staged_filesJS = filterFiles(staged_filesJS,exemptionFiles) //this is the code for giving exemption to a file during pre commit
|
|
257
|
+
// if(staged_filesJS.length === 0){
|
|
200
258
|
// Logger.log(Logger.SUCCESS_TYPE,`Commit Successful`)
|
|
201
259
|
// process.exit(0)
|
|
202
260
|
// }
|
|
261
|
+
|
|
262
|
+
// CssFiles not Enabled For while
|
|
203
263
|
areFilesStaged = true;
|
|
204
|
-
|
|
205
|
-
|
|
264
|
+
var stagedFiles = [...staged_filesJS];
|
|
265
|
+
for (let file in stagedFiles) {
|
|
266
|
+
let currentFileName = stagedFiles[file];
|
|
206
267
|
let changedLinesArray = [];
|
|
207
268
|
let eslintErrorsInChangedLines = [];
|
|
208
269
|
let isOnlyEslintWarningsPresentInFile = false;
|
|
209
|
-
if (getSupportedLanguage().includes(path.extname(
|
|
270
|
+
if (getSupportedLanguage().includes(path.extname(stagedFiles[file]))) {
|
|
210
271
|
try {
|
|
211
|
-
var
|
|
212
|
-
// eslintErrorsInFile = impactBasedPrecommit == false ? filterWarningInFile(
|
|
213
|
-
if (
|
|
214
|
-
if (!
|
|
272
|
+
var errorsInFile = await lintFiles(stagedFiles[file]);
|
|
273
|
+
// eslintErrorsInFile = impactBasedPrecommit == false ? filterWarningInFile(errorsInFile) : errorsInFile
|
|
274
|
+
if (stagedFiles[file] && typeof stagedFiles[file] == 'string') {
|
|
275
|
+
if (!errorsInFile.length == 0) {
|
|
215
276
|
//Calculating changed lines in a file and storing them in respective arrays
|
|
216
277
|
if (impactBasedPrecommit) {
|
|
217
278
|
//git diff is computed and stored in an array
|
|
218
|
-
let git_diff = await calculateGitDiffForFile(current_branch,
|
|
279
|
+
let git_diff = await calculateGitDiffForFile(current_branch, stagedFiles[file]);
|
|
219
280
|
changedLinesArray = git_diff.filter(line => line.startsWith('@@'));
|
|
220
281
|
let changedLinesStartArray = [];
|
|
221
282
|
let changedLinesEndArray = [];
|
|
@@ -229,15 +290,15 @@ async function preCommitHook() {
|
|
|
229
290
|
changedLinesEndArray.push(changesStartLine + parseInt(changesEndLine) - 1);
|
|
230
291
|
}
|
|
231
292
|
}
|
|
232
|
-
for (let error = 1; error <
|
|
233
|
-
//
|
|
234
|
-
//
|
|
235
|
-
//
|
|
293
|
+
for (let error = 1; error < errorsInFile.length - 2; error++) {
|
|
294
|
+
//errorsInFile[error].trim() - 69:26 error => Do not hardcode content. Use I18N key instead no-hardcoding/no-hardcoding,
|
|
295
|
+
//errorsInFile[error].trim().split(' ')[0] => 69:26
|
|
296
|
+
//errorsInFile[error].trim().split(' ')[0].split(':')[0] => 69
|
|
236
297
|
|
|
237
|
-
let eslintErrorLineNumber =
|
|
298
|
+
let eslintErrorLineNumber = errorsInFile[error].trim().split(' ')[0].split(':')[0];
|
|
238
299
|
for (let lineNumber in changedLinesStartArray) {
|
|
239
300
|
if (eslintErrorLineNumber >= changedLinesStartArray[lineNumber] && eslintErrorLineNumber <= changedLinesEndArray[lineNumber]) {
|
|
240
|
-
eslintErrorsInChangedLines.push(
|
|
301
|
+
eslintErrorsInChangedLines.push(errorsInFile[error]);
|
|
241
302
|
}
|
|
242
303
|
}
|
|
243
304
|
}
|
|
@@ -258,10 +319,10 @@ async function preCommitHook() {
|
|
|
258
319
|
}
|
|
259
320
|
}
|
|
260
321
|
} else {
|
|
261
|
-
if (
|
|
322
|
+
if (errorsInFile.length > 0) {
|
|
262
323
|
let startIndex = 1;
|
|
263
|
-
let endIndex =
|
|
264
|
-
let listOsEslintErrors =
|
|
324
|
+
let endIndex = errorsInFile.length - 2;
|
|
325
|
+
let listOsEslintErrors = errorsInFile.slice(startIndex, endIndex);
|
|
265
326
|
isOnlyEslintWarningsPresentInFile = isOnlyWarningsPresentInFile(listOsEslintErrors);
|
|
266
327
|
Logger.log(Logger.FAILURE_TYPE, `\x1b[1m${currentFileName}\x1b[0m`);
|
|
267
328
|
for (let eslintError of listOsEslintErrors) {
|
|
@@ -282,7 +343,7 @@ async function preCommitHook() {
|
|
|
282
343
|
}
|
|
283
344
|
} catch (err) {
|
|
284
345
|
Logger.log(Logger.FAILURE_TYPE, err);
|
|
285
|
-
Logger.log(Logger.FAILURE_TYPE, "Error in executing
|
|
346
|
+
Logger.log(Logger.FAILURE_TYPE, "Error in executing lint command");
|
|
286
347
|
}
|
|
287
348
|
}
|
|
288
349
|
}
|
|
@@ -293,14 +354,14 @@ async function preCommitHook() {
|
|
|
293
354
|
Logger.log(Logger.INFO_TYPE, 'Error executing pre commit hook');
|
|
294
355
|
}
|
|
295
356
|
if (shouldAbortCommit) {
|
|
296
|
-
Logger.log(Logger.FAILURE_TYPE, `There are
|
|
357
|
+
Logger.log(Logger.FAILURE_TYPE, `There are linter errors/warnings present. So commit is aborted.`);
|
|
297
358
|
process.exit(1);
|
|
298
359
|
} else if (shouldAbortCommit === false && staged_files.length !== 0) {
|
|
299
360
|
Logger.log(Logger.SUCCESS_TYPE, `Commit Successful`);
|
|
300
361
|
process.exit(0);
|
|
301
362
|
}
|
|
302
363
|
} else {
|
|
303
|
-
Logger.log(Logger.FAILURE_TYPE, 'Commit failed since some
|
|
364
|
+
Logger.log(Logger.FAILURE_TYPE, 'Commit failed since some lint plugins are not installed');
|
|
304
365
|
Logger.log(Logger.INFO_TYPE, `Kindly execute the command \x1b[37mnpx ZDPrecommit setupPlugins \x1b[33mfrom the location where package.json is present to install the plugins`);
|
|
305
366
|
Logger.log(Logger.INFO_TYPE, 'Execute the command and kindly try committing again.');
|
|
306
367
|
process.exit(1);
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
const {
|
|
4
|
+
getLastCommitHash,
|
|
5
|
+
getStoredCommitHash,
|
|
6
|
+
updateJsonFile
|
|
7
|
+
} = require("../utils/General/getGeneralInfo");
|
|
8
|
+
const {
|
|
9
|
+
executeMethodsThatReturnBooleanValue
|
|
10
|
+
} = require("../utils/General/wrapperFunctionToExecuteAFunction");
|
|
11
|
+
const {
|
|
12
|
+
cloneViaCdt
|
|
13
|
+
} = require("../utils/CloneCommonLinterRepo/cloneViaCdt");
|
|
14
|
+
const {
|
|
15
|
+
getPluginsToInstall
|
|
16
|
+
} = require("../utils/PluginsInstallation/checkIfPluginsAreInstalled");
|
|
17
|
+
const {
|
|
18
|
+
installPlugins
|
|
19
|
+
} = require("../utils/PluginsInstallation/installPlugins");
|
|
20
|
+
const path = require("path");
|
|
21
|
+
const {
|
|
22
|
+
Logger
|
|
23
|
+
} = require("../utils/Logger/Logger");
|
|
24
|
+
async function hooks() {
|
|
25
|
+
var jsonFilePath = path.join(__dirname, '..', '..', 'jsonUtils', 'fsUtils.json');
|
|
26
|
+
if (!(getLastCommitHash() == getStoredCommitHash())) {
|
|
27
|
+
updateJsonFile(jsonFilePath, data => {
|
|
28
|
+
data.commitHash = getLastCommitHash();
|
|
29
|
+
return data;
|
|
30
|
+
});
|
|
31
|
+
Logger.log(Logger.INFO_TYPE, `Some rules and plugins are being fetched from a remote source and installed...`);
|
|
32
|
+
await executeMethodsThatReturnBooleanValue("Make sure zgit.csez.zohocorpin.com is accessible", cloneViaCdt, null);
|
|
33
|
+
const {
|
|
34
|
+
uninstalledPlugins
|
|
35
|
+
} = getPluginsToInstall();
|
|
36
|
+
await executeMethodsThatReturnBooleanValue("Some issue occurred in installing plugins", installPlugins, uninstalledPlugins);
|
|
37
|
+
}
|
|
38
|
+
require('./Precommit/pre-commit');
|
|
39
|
+
}
|
|
40
|
+
hooks();
|
package/build/lib/cli.js
CHANGED
|
@@ -16,7 +16,7 @@ const {
|
|
|
16
16
|
installPlugins
|
|
17
17
|
} = require("../utils/PluginsInstallation/installPlugins");
|
|
18
18
|
const {
|
|
19
|
-
|
|
19
|
+
getPluginsToInstall
|
|
20
20
|
} = require("../utils/PluginsInstallation/checkIfPluginsAreInstalled");
|
|
21
21
|
const [,, action, ...options] = process.argv;
|
|
22
22
|
async function run() {
|
|
@@ -35,7 +35,7 @@ async function run() {
|
|
|
35
35
|
{
|
|
36
36
|
const {
|
|
37
37
|
uninstalledPlugins
|
|
38
|
-
} =
|
|
38
|
+
} = getPluginsToInstall();
|
|
39
39
|
await executeMethodsThatReturnBooleanValue("Some issue occurred in installing plugins", installPlugins, uninstalledPlugins);
|
|
40
40
|
break;
|
|
41
41
|
}
|
package/build/lib/postinstall.js
CHANGED
|
@@ -7,8 +7,8 @@ const {
|
|
|
7
7
|
setupHusky
|
|
8
8
|
} = require("../utils/HuskySetup/setupHusky");
|
|
9
9
|
const {
|
|
10
|
-
|
|
11
|
-
} = require("../utils/
|
|
10
|
+
createConfigFile
|
|
11
|
+
} = require("../utils/ConfigFileUtils/createConfigFile");
|
|
12
12
|
const {
|
|
13
13
|
Logger
|
|
14
14
|
} = require("../utils/Logger/Logger");
|
|
@@ -34,7 +34,7 @@ async function postInstall() {
|
|
|
34
34
|
await executeMethodsThatReturnBooleanValue("Some issue in writing node_modules path to json", writeFsPaths, null);
|
|
35
35
|
isGitInitialized() ? await executeMethodsThatReturnBooleanValue("Some issue occurred in setting up husky.", setupHusky, null) : null;
|
|
36
36
|
await executeMethodsThatReturnBooleanValue("Make sure zgit.csez.zohocorpin.com is accessible", cloneViaCdt, null);
|
|
37
|
-
await executeMethodsThatReturnBooleanValue("Some issue occurred in creating eslint config file.",
|
|
37
|
+
await executeMethodsThatReturnBooleanValue("Some issue occurred in creating eslint config file.", createConfigFile, null);
|
|
38
38
|
Logger.log(Logger.SUCCESS_TYPE, "Pre commit setup successfull");
|
|
39
39
|
arePluginsInstalled();
|
|
40
40
|
} catch (error) {
|
|
@@ -16,14 +16,6 @@ const {
|
|
|
16
16
|
const {
|
|
17
17
|
executeSynchronizedCommands
|
|
18
18
|
} = require('../General/executeSyncCommands');
|
|
19
|
-
var {
|
|
20
|
-
type,
|
|
21
|
-
endPoint,
|
|
22
|
-
branch,
|
|
23
|
-
cacheDirectory,
|
|
24
|
-
commonLinterRepoName,
|
|
25
|
-
user
|
|
26
|
-
} = require('../../../jsonUtils/commonLinterRepoDetails');
|
|
27
19
|
const {
|
|
28
20
|
getConfigurationPrecommit,
|
|
29
21
|
getRunningEnv
|
|
@@ -34,28 +26,49 @@ const {
|
|
|
34
26
|
const {
|
|
35
27
|
Logger
|
|
36
28
|
} = require("../Logger/Logger");
|
|
29
|
+
const {
|
|
30
|
+
type,
|
|
31
|
+
endPoint,
|
|
32
|
+
branch,
|
|
33
|
+
cacheDirectory,
|
|
34
|
+
commonLinterRepoName,
|
|
35
|
+
user
|
|
36
|
+
} = require('../../../jsonUtils/commonLinterRepoDetails');
|
|
37
|
+
|
|
37
38
|
/**
|
|
38
|
-
* @function cloneViaCdt
|
|
39
|
-
* @
|
|
39
|
+
* @function cloneViaCdt
|
|
40
|
+
* @description Clones the common linter configuration repo using Clint Development Tool (CDT)
|
|
41
|
+
* @returns {boolean} Indicates the success or failure of the cloning process
|
|
40
42
|
*/
|
|
41
|
-
|
|
42
43
|
function cloneViaCdt() {
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
44
|
+
try {
|
|
45
|
+
const {
|
|
46
|
+
token
|
|
47
|
+
} = getConfigurationPrecommit();
|
|
48
|
+
const currentEnv = getRunningEnv();
|
|
49
|
+
Logger.log(Logger.INFO_TYPE, `Application is running in the following environment:${currentEnv}`);
|
|
50
|
+
|
|
51
|
+
// Clean up existing folder
|
|
52
|
+
const deleteDir = getDeleteDirPath();
|
|
53
|
+
removeFolder(deleteDir);
|
|
54
|
+
|
|
55
|
+
// Construct endpoint with credentials if in CI or automation
|
|
56
|
+
const isAutomatedEnv = currentEnv === "CI" || currentEnv === "DEVAUTOMATION";
|
|
57
|
+
const authenticatedEndpoint = isAutomatedEnv ? `https://${user}:${decrypt(token, 12)}@${endPoint}` : `https://${endPoint}`;
|
|
58
|
+
const cloneCommand = ['npx cdt clone', `--clone:type=${type}`, `--clone:url=${authenticatedEndpoint}`, `--clone:branch=${branch}`, `--clone:cacheDir=${cacheDirectory}`, `--clone:proj:name=${commonLinterRepoName}`].join(' ');
|
|
59
|
+
|
|
60
|
+
// Execute the CDT command
|
|
61
|
+
const clonedDir = getClonedDirPath();
|
|
62
|
+
const successMessage = `Lint Configuration Cloned Successfully - ${getRepoName() || 'common'}`;
|
|
63
|
+
const errorMessage = 'Could not clone the linters common repo';
|
|
64
|
+
const result = executeSynchronizedCommands(execSync, [cloneCommand, {
|
|
65
|
+
cwd: clonedDir
|
|
66
|
+
}], successMessage, errorMessage, false, true);
|
|
67
|
+
return result;
|
|
68
|
+
} catch (error) {
|
|
69
|
+
Logger.log(Logger.ERROR_TYPE, `cloneViaCdt failed: ${error.message}`);
|
|
70
|
+
return false;
|
|
53
71
|
}
|
|
54
|
-
var commandToCloneCommonConfigRepo = `npx cdt clone --clone:type=${type} --clone:url=${absoluteEndPoint} --clone:branch=${process.env.CONFIGURATION_BRANCH || branch} --clone:cacheDir=${cacheDirectory} --clone:proj:name=${commonLinterRepoName}`;
|
|
55
|
-
let isCommonConfigurationClonedSuccessfully = executeSynchronizedCommands(execSync, [commandToCloneCommonConfigRepo, {
|
|
56
|
-
cwd: getClonedDirPath()
|
|
57
|
-
}], `Lint Configuration Cloned Successfully - ${getRepoName() || 'common'}`, 'Could not clone the linters common repo', false, true);
|
|
58
|
-
return isCommonConfigurationClonedSuccessfully;
|
|
59
72
|
}
|
|
60
73
|
module.exports = {
|
|
61
74
|
cloneViaCdt
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
const path = require('path');
|
|
4
|
+
const {
|
|
5
|
+
writeFileSync,
|
|
6
|
+
existsSync,
|
|
7
|
+
unlinkSync,
|
|
8
|
+
readFileSync
|
|
9
|
+
} = require('fs');
|
|
10
|
+
const {
|
|
11
|
+
getNodeModulesPath
|
|
12
|
+
} = require('../General/getNodeModulesPath.js');
|
|
13
|
+
const {
|
|
14
|
+
Logger
|
|
15
|
+
} = require('../Logger/Logger.js');
|
|
16
|
+
const {
|
|
17
|
+
executeSynchronizedCommands
|
|
18
|
+
} = require('../General/executeSyncCommands.js');
|
|
19
|
+
const {
|
|
20
|
+
getServicePathElseCommon
|
|
21
|
+
} = require('./getLintConfiguration.js');
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* @function createConfigFile - creates ESLint and/or Stylelint configuration files based on repo config.
|
|
25
|
+
* @returns {boolean} - true if any config file is created successfully, false otherwise.
|
|
26
|
+
*/
|
|
27
|
+
function createConfigFile() {
|
|
28
|
+
const nodeModulesPath = getNodeModulesPath();
|
|
29
|
+
const eslintConfigFilePath = path.join(nodeModulesPath, '.eslintrc.js');
|
|
30
|
+
const stylelintConfigFilePath = path.join(nodeModulesPath, '.stylelintrc.js');
|
|
31
|
+
const {
|
|
32
|
+
pathOfServiceSpecificEslintConfigFile,
|
|
33
|
+
pathOfServiceSpecificCssConfigFile
|
|
34
|
+
} = getServicePathElseCommon();
|
|
35
|
+
try {
|
|
36
|
+
const eslintConfig = executeSynchronizedCommands(readFileSync, [pathOfServiceSpecificEslintConfigFile, 'utf-8'], '', 'Unable to read content of eslint config file.', true, false);
|
|
37
|
+
const cssConfig = executeSynchronizedCommands(readFileSync, [pathOfServiceSpecificCssConfigFile, 'utf-8'], '', 'Unable to read content of stylelint config file.', true, false);
|
|
38
|
+
const cssCreated = handleConfigFile(stylelintConfigFilePath, cssConfig, 'Stylelint');
|
|
39
|
+
const eslintCreated = handleConfigFile(eslintConfigFilePath, eslintConfig, 'Eslint');
|
|
40
|
+
return cssCreated || eslintCreated;
|
|
41
|
+
} catch (error) {
|
|
42
|
+
Logger.log(Logger.FAILURE_TYPE, error);
|
|
43
|
+
Logger.log(Logger.FAILURE_TYPE, 'Issue occurred while generating config files.');
|
|
44
|
+
return false;
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
/**
|
|
49
|
+
* @function handleConfigFile - handles deletion (if exists) and creation of config files.
|
|
50
|
+
* @param {string} filePath - Path to the config file.
|
|
51
|
+
* @param {string} configContent - Configuration content.
|
|
52
|
+
* @param {string} type - Type of configuration (e.g., Eslint, Stylelint).
|
|
53
|
+
* @returns {boolean}
|
|
54
|
+
*/
|
|
55
|
+
function handleConfigFile(filePath, configContent, type) {
|
|
56
|
+
if (existsSync(filePath)) {
|
|
57
|
+
executeSynchronizedCommands(unlinkSync, [filePath], `${type} configuration file removed successfully!`, `Unable to remove the ${type.toLowerCase()} config file.`, false, false);
|
|
58
|
+
}
|
|
59
|
+
return executeSynchronizedCommands(writeFileSync, [filePath, configContent, 'utf-8'], `${type} configuration file created successfully!`, `Unable to create and write a ${type.toLowerCase()} configuration file.`, false, true);
|
|
60
|
+
}
|
|
61
|
+
module.exports = {
|
|
62
|
+
createConfigFile
|
|
63
|
+
};
|
|
@@ -29,12 +29,12 @@ function getServicePathElseCommon() {
|
|
|
29
29
|
const _configurationPath = repoName && existsSync(path.join(libraryInstalledLocation, commonLinterRepoName, 'services', repoName, 'index.js')) ? path.join(libraryInstalledLocation, commonLinterRepoName, 'services', repoName, 'index.js') : commonConfigPath;
|
|
30
30
|
let _pathOfServiceSpecificEslintConfigFile = repoName && existsSync(path.join(libraryInstalledLocation, commonLinterRepoName, 'services', repoName, '.eslintrc.js')) ? path.join(libraryInstalledLocation, commonLinterRepoName, 'services', repoName, '.eslintrc.js') : path.join(libraryInstalledLocation, commonLinterRepoName, 'common', '.eslintrc.js');
|
|
31
31
|
let _pluginVersionPath = repoName && existsSync(path.join(libraryInstalledLocation, commonLinterRepoName, 'services', repoName, 'pluginVersion.js')) ? path.join(libraryInstalledLocation, commonLinterRepoName, 'services', repoName, 'pluginVersion.js') : undefined;
|
|
32
|
-
let
|
|
32
|
+
let _cssLintConfig = repoName && existsSync(path.join(libraryInstalledLocation, commonLinterRepoName, 'services', repoName, '.stylelintrc.js')) ? path.join(libraryInstalledLocation, commonLinterRepoName, 'services', repoName, '.stylelintrc.js') : path.join(libraryInstalledLocation, commonLinterRepoName, 'common', '.stylelintrc.js');
|
|
33
33
|
return {
|
|
34
34
|
configurationPath: _configurationPath,
|
|
35
35
|
pathOfServiceSpecificEslintConfigFile: _pathOfServiceSpecificEslintConfigFile,
|
|
36
36
|
pluginVersionPath: _pluginVersionPath,
|
|
37
|
-
|
|
37
|
+
pathOfServiceSpecificCssConfigFile: _cssLintConfig
|
|
38
38
|
};
|
|
39
39
|
}
|
|
40
40
|
function getLintConfigurationUtil() {
|
|
@@ -3,7 +3,9 @@
|
|
|
3
3
|
const {
|
|
4
4
|
Logger
|
|
5
5
|
} = require('../Logger/Logger');
|
|
6
|
-
|
|
6
|
+
const {
|
|
7
|
+
existsSync
|
|
8
|
+
} = require("fs");
|
|
7
9
|
/**
|
|
8
10
|
* @function filterFiles - removes certain files from set of source files
|
|
9
11
|
* @param {Array} arrayOfFilesToBeFiltered - array of files which must be filtered
|
|
@@ -12,6 +14,29 @@ const {
|
|
|
12
14
|
* @returns {Array} - containing the resultant set of files after filtering
|
|
13
15
|
*/
|
|
14
16
|
function filterFiles(arrayOfFilesToBeFiltered, filesToBeRemoved, isConfigFileNeedToBeRemoved = false) {
|
|
17
|
+
/**
|
|
18
|
+
* @function filterFilesByExtension - filter javascript files. omit feature files
|
|
19
|
+
* @param {Array<String>} lintFiles - linter files as Array
|
|
20
|
+
* @returns {Array<String>}
|
|
21
|
+
* */
|
|
22
|
+
function filterFilesByExtension(lintFiles) {
|
|
23
|
+
return lintFiles.reduce((files, currentFile) => {
|
|
24
|
+
if (currentFile.includes('.feature') && existsSync(currentFile)) {
|
|
25
|
+
files.featureFiles.push(currentFile);
|
|
26
|
+
}
|
|
27
|
+
if (currentFile.includes('.js') || currentFile.includes('.ts') || currentFile.includes('.tsx') || currentFile.includes('.jsx') || currentFile.includes('.properties') && existsSync(currentFile)) {
|
|
28
|
+
files.JsFiles.push(currentFile);
|
|
29
|
+
}
|
|
30
|
+
if (currentFile.includes('.css') && existsSync(currentFile)) {
|
|
31
|
+
files.CssFiles.push(currentFile);
|
|
32
|
+
}
|
|
33
|
+
return files;
|
|
34
|
+
}, {
|
|
35
|
+
featureFiles: [],
|
|
36
|
+
JsFiles: [],
|
|
37
|
+
CssFiles: []
|
|
38
|
+
});
|
|
39
|
+
}
|
|
15
40
|
if (filesToBeRemoved.length !== 0) {
|
|
16
41
|
if (isConfigFileNeedToBeRemoved) {
|
|
17
42
|
arrayOfFilesToBeFiltered.filter(file => {
|
|
@@ -31,7 +56,7 @@ function filterFiles(arrayOfFilesToBeFiltered, filesToBeRemoved, isConfigFileNee
|
|
|
31
56
|
return false;
|
|
32
57
|
}
|
|
33
58
|
});
|
|
34
|
-
return filteredArrayofFilesWithoutConfigFile;
|
|
59
|
+
return filterFilesByExtension(filteredArrayofFilesWithoutConfigFile);
|
|
35
60
|
} else if (filesToBeRemoved.length === 0) {
|
|
36
61
|
return arrayOfFilesToBeFiltered;
|
|
37
62
|
}
|
|
@@ -6,6 +6,13 @@ const fs = require("fs");
|
|
|
6
6
|
const {
|
|
7
7
|
execSync
|
|
8
8
|
} = require('child_process');
|
|
9
|
+
const {
|
|
10
|
+
readOnlyToken,
|
|
11
|
+
commitHashEndPoint
|
|
12
|
+
} = require('../../../jsonUtils/commonLinterRepoDetails');
|
|
13
|
+
const {
|
|
14
|
+
Logger
|
|
15
|
+
} = require('../Logger/Logger');
|
|
9
16
|
|
|
10
17
|
/**
|
|
11
18
|
* @function getTimeStampInfo - to fetch various timestamp details
|
|
@@ -57,8 +64,14 @@ function getSupportedLanguage() {
|
|
|
57
64
|
const {
|
|
58
65
|
supportedExtensions
|
|
59
66
|
} = getConfigurationPrecommit();
|
|
60
|
-
const _language = supportedExtensions;
|
|
61
|
-
|
|
67
|
+
const _language = supportedExtensions || [];
|
|
68
|
+
_language.push('.js');
|
|
69
|
+
_language.push('.jsx');
|
|
70
|
+
_language.push('.ts');
|
|
71
|
+
_language.push('.tsx');
|
|
72
|
+
_language.push('.css');
|
|
73
|
+
_language.push('.scss');
|
|
74
|
+
return [...new Set(_language)];
|
|
62
75
|
}
|
|
63
76
|
function getRunningEnv() {
|
|
64
77
|
const command = "npm config get lint_env";
|
|
@@ -66,10 +79,58 @@ function getRunningEnv() {
|
|
|
66
79
|
shell: true
|
|
67
80
|
}).toString().trim();
|
|
68
81
|
}
|
|
82
|
+
|
|
83
|
+
/**
|
|
84
|
+
* @function getLastCommitHash - Fetches the last commit hash from a GitLab project using its API.
|
|
85
|
+
*
|
|
86
|
+
* @note This function assumes access to a specific GitLab instance and a hardcoded token and project ID.
|
|
87
|
+
* @returns {string} The latest commit hash (SHA) from the specified GitLab repository.
|
|
88
|
+
* @throws {Error} Will throw an error if the API request fails or returns unexpected data.
|
|
89
|
+
*/
|
|
90
|
+
|
|
91
|
+
function getLastCommitHash() {
|
|
92
|
+
try {
|
|
93
|
+
var _JSON$parse$;
|
|
94
|
+
const cmd = `curl --header "PRIVATE-TOKEN: ${readOnlyToken}" --url ${commitHashEndPoint}`;
|
|
95
|
+
return (_JSON$parse$ = JSON.parse(execSync(cmd))[0]) === null || _JSON$parse$ === void 0 ? void 0 : _JSON$parse$.id;
|
|
96
|
+
} catch (err) {
|
|
97
|
+
Logger.log(Logger.FAILURE_TYPE, err);
|
|
98
|
+
return null;
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
/**
|
|
103
|
+
* @function getStoredCommitHash - Loads and returns the stored commit hash from `fsUtils.json`.
|
|
104
|
+
* @returns {string} The commit hash stored in the `fsUtils.json` file.
|
|
105
|
+
* @throws {Error} Will throw if the file does not exist or does not contain a `commitHash` key.
|
|
106
|
+
*/
|
|
107
|
+
|
|
108
|
+
function getStoredCommitHash() {
|
|
109
|
+
const commitInfoPath = getJsonUtilsPath();
|
|
110
|
+
return fs.existsSync(commitInfoPath) && require(commitInfoPath).commitHash;
|
|
111
|
+
}
|
|
112
|
+
function getJsonUtilsPath() {
|
|
113
|
+
return path.join(__dirname, '..', '..', '..', 'jsonUtils', 'fsUtils.json');
|
|
114
|
+
}
|
|
115
|
+
function updateJsonFile(filePath, modifier) {
|
|
116
|
+
try {
|
|
117
|
+
const absolutePath = path.resolve(filePath);
|
|
118
|
+
const rawData = fs.readFileSync(absolutePath, "utf-8");
|
|
119
|
+
const jsonData = JSON.parse(rawData);
|
|
120
|
+
const updatedData = modifier(jsonData);
|
|
121
|
+
fs.writeFileSync(absolutePath, JSON.stringify(updatedData, null, 2), "utf-8");
|
|
122
|
+
} catch (error) {
|
|
123
|
+
Logger.log(Logger.FAILURE_TYPE, "Error updating JSON file");
|
|
124
|
+
}
|
|
125
|
+
}
|
|
69
126
|
module.exports = {
|
|
70
127
|
getSupportedLanguage,
|
|
71
128
|
getTimeStampInfo,
|
|
72
129
|
getEnv,
|
|
73
130
|
getConfigurationPrecommit,
|
|
74
|
-
getRunningEnv
|
|
131
|
+
getRunningEnv,
|
|
132
|
+
getLastCommitHash,
|
|
133
|
+
getStoredCommitHash,
|
|
134
|
+
getJsonUtilsPath,
|
|
135
|
+
updateJsonFile
|
|
75
136
|
};
|
|
@@ -1,13 +1,16 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
|
|
3
3
|
const path = require('path');
|
|
4
|
+
const {
|
|
5
|
+
getJsonUtilsPath
|
|
6
|
+
} = require('./getGeneralInfo');
|
|
4
7
|
|
|
5
8
|
/**
|
|
6
9
|
* @function getNodeModulesPath - fetches the absolute path where node_modules of project is present
|
|
7
10
|
* @returns {string} - path where node_modules of the project is present
|
|
8
11
|
*/
|
|
9
12
|
function getNodeModulesPath() {
|
|
10
|
-
return require(
|
|
13
|
+
return require(getJsonUtilsPath()).nodeModulesPath;
|
|
11
14
|
}
|
|
12
15
|
module.exports = {
|
|
13
16
|
getNodeModulesPath
|
|
@@ -7,9 +7,13 @@ const {
|
|
|
7
7
|
const {
|
|
8
8
|
executeSynchronizedCommands
|
|
9
9
|
} = require('./executeSyncCommands');
|
|
10
|
+
const {
|
|
11
|
+
getLastCommitHash
|
|
12
|
+
} = require('./getGeneralInfo');
|
|
10
13
|
function writeFsPaths() {
|
|
11
14
|
var fileContent = {
|
|
12
|
-
nodeModulesPath: path.resolve(process.cwd(), '..', '..', '..')
|
|
15
|
+
nodeModulesPath: path.resolve(process.cwd(), '..', '..', '..'),
|
|
16
|
+
commitHash: getLastCommitHash()
|
|
13
17
|
};
|
|
14
18
|
return executeSynchronizedCommands(writeFileSync, [path.join(process.cwd(), 'jsonUtils', 'fsUtils.json'), JSON.stringify(fileContent), 'utf-8'], 'node_modules path updated in json file', 'Unable to write node_modules path to json file', false, true);
|
|
15
19
|
}
|