analyze-codebase 1.1.0 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/analyze/index.js +1 -2
- package/dist/utils/file.js +95 -7
- package/dist/utils/output.js +4 -42
- package/package.json +3 -2
- package/readme.md +26 -0
package/dist/analyze/index.js
CHANGED
|
@@ -32,14 +32,13 @@ const analyzeCodebase = async (options) => {
|
|
|
32
32
|
onFile: (filePath) => {
|
|
33
33
|
fileCount++;
|
|
34
34
|
const { fileNameCase } = (0, file_1.analyzeFile)(filePath, output);
|
|
35
|
-
//@ts-ignore
|
|
36
35
|
if (fileNameCases[fileNameCase] !== undefined)
|
|
37
36
|
fileNameCases[fileNameCase] += 1;
|
|
38
37
|
else
|
|
39
38
|
fileNameCases[fileNameCase] = 0;
|
|
40
39
|
},
|
|
41
40
|
});
|
|
42
|
-
(0, output_1.logOutput)({
|
|
41
|
+
await (0, output_1.logOutput)({
|
|
43
42
|
fileCount,
|
|
44
43
|
fileNameCases,
|
|
45
44
|
options,
|
package/dist/utils/file.js
CHANGED
|
@@ -22,10 +22,14 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|
|
22
22
|
__setModuleDefault(result, mod);
|
|
23
23
|
return result;
|
|
24
24
|
};
|
|
25
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
26
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
27
|
+
};
|
|
25
28
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
26
|
-
exports.traverseDirectory = void 0;
|
|
29
|
+
exports.writeAnalyzeResult = exports.makeMigrationFromLastFileSystem = exports.readAnalyzeResult = exports.checkFileExist = exports.traverseDirectory = void 0;
|
|
27
30
|
const fs = __importStar(require("fs/promises"));
|
|
28
31
|
const path = __importStar(require("path"));
|
|
32
|
+
const chalk_1 = __importDefault(require("chalk"));
|
|
29
33
|
const blackList = [
|
|
30
34
|
"node_modules",
|
|
31
35
|
"dist",
|
|
@@ -36,16 +40,16 @@ const blackList = [
|
|
|
36
40
|
"tests",
|
|
37
41
|
"mocks",
|
|
38
42
|
];
|
|
39
|
-
const traverseDirectory = async ({ directory, onFile, onDirectory, exclude, extensions, checkFileNames = true, }) => {
|
|
43
|
+
const traverseDirectory = async ({ directory, onFile, onDirectory, exclude, extensions, checkFileNames = true, skipHidden = true, }) => {
|
|
40
44
|
const files = await fs.readdir(directory, { withFileTypes: true });
|
|
41
45
|
const tasks = files.map(async (file) => {
|
|
42
46
|
var _a;
|
|
43
47
|
const filePath = path.join(directory, file.name);
|
|
44
|
-
if (
|
|
45
|
-
file.name.startsWith(".") ||
|
|
46
|
-
blackList.includes(file.name))
|
|
48
|
+
if (skipHidden && file.name.startsWith("."))
|
|
47
49
|
return;
|
|
48
50
|
if (file.isDirectory()) {
|
|
51
|
+
if (blackList.includes(file.name) || ((_a = exclude === null || exclude === void 0 ? void 0 : exclude.includes) === null || _a === void 0 ? void 0 : _a.call(exclude, file.name)))
|
|
52
|
+
return;
|
|
49
53
|
onDirectory === null || onDirectory === void 0 ? void 0 : onDirectory(filePath);
|
|
50
54
|
await (0, exports.traverseDirectory)({
|
|
51
55
|
directory: filePath,
|
|
@@ -60,12 +64,96 @@ const traverseDirectory = async ({ directory, onFile, onDirectory, exclude, exte
|
|
|
60
64
|
const ext = path.extname(file.name);
|
|
61
65
|
if (extensions === null || extensions === void 0 ? void 0 : extensions.length) {
|
|
62
66
|
if (extensions.includes(ext))
|
|
63
|
-
onFile(filePath);
|
|
67
|
+
await onFile(filePath);
|
|
64
68
|
}
|
|
65
69
|
else
|
|
66
|
-
onFile(filePath);
|
|
70
|
+
await onFile(filePath);
|
|
67
71
|
}
|
|
68
72
|
});
|
|
69
73
|
await Promise.all(tasks);
|
|
70
74
|
};
|
|
71
75
|
exports.traverseDirectory = traverseDirectory;
|
|
76
|
+
const checkFileExist = async (filePath) => {
|
|
77
|
+
try {
|
|
78
|
+
await fs.stat(filePath);
|
|
79
|
+
return true;
|
|
80
|
+
}
|
|
81
|
+
catch (_a) {
|
|
82
|
+
return false;
|
|
83
|
+
}
|
|
84
|
+
};
|
|
85
|
+
exports.checkFileExist = checkFileExist;
|
|
86
|
+
const readAnalyzeResult = async (directory) => {
|
|
87
|
+
const filePath = `${directory}/.analyze-codebase/data.json`;
|
|
88
|
+
if (await (0, exports.checkFileExist)(filePath)) {
|
|
89
|
+
return JSON.parse(await fs.readFile(filePath, "utf-8"));
|
|
90
|
+
}
|
|
91
|
+
};
|
|
92
|
+
exports.readAnalyzeResult = readAnalyzeResult;
|
|
93
|
+
const makeMigrationFromLastFileSystem = async (directory) => {
|
|
94
|
+
const data = await (0, exports.readAnalyzeResult)(directory);
|
|
95
|
+
if (data)
|
|
96
|
+
return;
|
|
97
|
+
console.log(chalk_1.default.yellow("Migrating data from the last file system to the new file system...\n"));
|
|
98
|
+
let newAnalyzeOutput;
|
|
99
|
+
const oldAnalyzes = [];
|
|
100
|
+
await (0, exports.traverseDirectory)({
|
|
101
|
+
directory,
|
|
102
|
+
checkFileNames: true,
|
|
103
|
+
extensions: [".json"],
|
|
104
|
+
onFile: async (filePath) => {
|
|
105
|
+
try {
|
|
106
|
+
const fileContent = JSON.parse(await fs.readFile(filePath, "utf-8"));
|
|
107
|
+
oldAnalyzes.push(fileContent);
|
|
108
|
+
}
|
|
109
|
+
catch (error) {
|
|
110
|
+
console.error(chalk_1.default.red(`Error reading the file: ${filePath}.`));
|
|
111
|
+
}
|
|
112
|
+
},
|
|
113
|
+
});
|
|
114
|
+
if (oldAnalyzes.length) {
|
|
115
|
+
newAnalyzeOutput = {
|
|
116
|
+
totalAnalyzeCount: oldAnalyzes.length,
|
|
117
|
+
firstAnalyzeDate: oldAnalyzes[0].date,
|
|
118
|
+
lastAnalyzeDate: oldAnalyzes[oldAnalyzes.length - 1].date,
|
|
119
|
+
results: oldAnalyzes,
|
|
120
|
+
};
|
|
121
|
+
await fs.writeFile(`${directory}/data.json`, JSON.stringify(newAnalyzeOutput, null, 2));
|
|
122
|
+
}
|
|
123
|
+
};
|
|
124
|
+
exports.makeMigrationFromLastFileSystem = makeMigrationFromLastFileSystem;
|
|
125
|
+
const writeAnalyzeResult = async (directory, result) => {
|
|
126
|
+
await (0, exports.makeMigrationFromLastFileSystem)(`${directory}/.analyze-codebase/`);
|
|
127
|
+
const filePath = `${directory}/.analyze-codebase/data.json`;
|
|
128
|
+
let data;
|
|
129
|
+
const existData = await (0, exports.readAnalyzeResult)(directory);
|
|
130
|
+
if (existData) {
|
|
131
|
+
data = existData;
|
|
132
|
+
data.totalAnalyzeCount += 1;
|
|
133
|
+
data.lastAnalyzeDate = new Date().toISOString();
|
|
134
|
+
}
|
|
135
|
+
else {
|
|
136
|
+
// Create the directory if it doesn't exist
|
|
137
|
+
await fs.mkdir(`${directory}/.analyze-codebase`, { recursive: true });
|
|
138
|
+
data = {
|
|
139
|
+
totalAnalyzeCount: 1,
|
|
140
|
+
firstAnalyzeDate: new Date().toISOString(),
|
|
141
|
+
lastAnalyzeDate: new Date().toISOString(),
|
|
142
|
+
results: [],
|
|
143
|
+
};
|
|
144
|
+
}
|
|
145
|
+
if (data.results.length) {
|
|
146
|
+
const lastResult = data.results[data.results.length - 1];
|
|
147
|
+
if (lastResult.fileCount === result.fileCount &&
|
|
148
|
+
JSON.stringify(lastResult.fileNameCases) ===
|
|
149
|
+
JSON.stringify(result.fileNameCases) &&
|
|
150
|
+
JSON.stringify(lastResult.output) === JSON.stringify(result.output)) {
|
|
151
|
+
result.sameAsBefore = (lastResult.sameAsBefore || 0) + 1;
|
|
152
|
+
console.log(chalk_1.default.red(`The result is the same as the last analyze. Same as before count: ${result.sameAsBefore}\n`));
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
data.results.push(result);
|
|
156
|
+
await fs.writeFile(filePath, JSON.stringify(data, null, 2));
|
|
157
|
+
return filePath;
|
|
158
|
+
};
|
|
159
|
+
exports.writeAnalyzeResult = writeAnalyzeResult;
|
package/dist/utils/output.js
CHANGED
|
@@ -1,58 +1,22 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
-
if (k2 === undefined) k2 = k;
|
|
4
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
-
}
|
|
8
|
-
Object.defineProperty(o, k2, desc);
|
|
9
|
-
}) : (function(o, m, k, k2) {
|
|
10
|
-
if (k2 === undefined) k2 = k;
|
|
11
|
-
o[k2] = m[k];
|
|
12
|
-
}));
|
|
13
|
-
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
-
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
-
}) : function(o, v) {
|
|
16
|
-
o["default"] = v;
|
|
17
|
-
});
|
|
18
|
-
var __importStar = (this && this.__importStar) || function (mod) {
|
|
19
|
-
if (mod && mod.__esModule) return mod;
|
|
20
|
-
var result = {};
|
|
21
|
-
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
22
|
-
__setModuleDefault(result, mod);
|
|
23
|
-
return result;
|
|
24
|
-
};
|
|
25
2
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
26
3
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
27
4
|
};
|
|
28
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
29
6
|
exports.logOutput = void 0;
|
|
30
|
-
const fs = __importStar(require("fs"));
|
|
31
7
|
const cli_table3_1 = __importDefault(require("cli-table3"));
|
|
32
8
|
const chalk_1 = __importDefault(require("chalk"));
|
|
33
|
-
const
|
|
34
|
-
|
|
9
|
+
const file_1 = require("./file");
|
|
10
|
+
const logOutput = async ({ fileCount, output, fileNameCases, options, }) => {
|
|
35
11
|
const resultObject = {
|
|
36
12
|
date: new Date().toISOString(),
|
|
37
13
|
fileCount,
|
|
38
14
|
fileNameCases,
|
|
39
15
|
options,
|
|
40
16
|
output,
|
|
41
|
-
// ... other result information ...
|
|
42
17
|
};
|
|
43
|
-
// Convert the object to a JSON string
|
|
44
|
-
const resultJson = JSON.stringify(resultObject, null, 2); // The third parameter (2) adds indentation for better readability
|
|
45
18
|
// Log the JSON output to the console
|
|
46
19
|
console.log(chalk_1.default.bold("------------- Result -------------\n"));
|
|
47
|
-
// console.log(resultJson);
|
|
48
|
-
if (options.writeJsonOutput) {
|
|
49
|
-
// Create the directory if it doesn't exist
|
|
50
|
-
const directoryPath = `${options.directory}/.analyze-codebase`;
|
|
51
|
-
if (!fs.existsSync(directoryPath))
|
|
52
|
-
fs.mkdirSync(directoryPath, { recursive: true });
|
|
53
|
-
}
|
|
54
|
-
// Write the JSON output to a file (optional)
|
|
55
|
-
// Continue with your existing console logs (if needed)
|
|
56
20
|
if (fileCount === 0) {
|
|
57
21
|
console.log(chalk_1.default.red(`No files found in ${chalk_1.default.cyan(options.directory)} with extensions ${chalk_1.default.cyan(options.extensions)}\n`));
|
|
58
22
|
}
|
|
@@ -91,11 +55,9 @@ const logOutput = ({ fileCount, output, fileNameCases, options, }) => {
|
|
|
91
55
|
console.log(contentTypeTable.toString());
|
|
92
56
|
}
|
|
93
57
|
}
|
|
58
|
+
// Write the JSON output to a file (optional)
|
|
94
59
|
if (options.writeJsonOutput) {
|
|
95
|
-
const writePath =
|
|
96
|
-
fs.writeFileSync(writePath, resultJson, {
|
|
97
|
-
flag: "w",
|
|
98
|
-
});
|
|
60
|
+
const writePath = await (0, file_1.writeAnalyzeResult)(options.directory, resultObject);
|
|
99
61
|
console.log(chalk_1.default.bold(`\nJSON output written to: ${chalk_1.default.cyan(writePath)}\n`));
|
|
100
62
|
}
|
|
101
63
|
console.log(chalk_1.default.bold("\n----------------------------"));
|
package/package.json
CHANGED
|
@@ -5,7 +5,7 @@
|
|
|
5
5
|
"type": "git",
|
|
6
6
|
"url": "https://github.com/mtahagocer/analyze-codebase"
|
|
7
7
|
},
|
|
8
|
-
"version": "1.
|
|
8
|
+
"version": "1.2.0",
|
|
9
9
|
"main": "dist/index.js",
|
|
10
10
|
"license": "MIT",
|
|
11
11
|
"bin": {
|
|
@@ -14,7 +14,8 @@
|
|
|
14
14
|
"scripts": {
|
|
15
15
|
"cli": "ts-node ./src/index.ts",
|
|
16
16
|
"start": "node ./dist/index.js",
|
|
17
|
-
"compile": "npx rimraf dist && npx tsc"
|
|
17
|
+
"compile": "npx rimraf dist && npx tsc",
|
|
18
|
+
"publish": "npm run compile && npm publish"
|
|
18
19
|
},
|
|
19
20
|
"devDependencies": {
|
|
20
21
|
"@types/node": "^20.1.5",
|
package/readme.md
CHANGED
|
@@ -46,6 +46,21 @@ analyze-codebase ./MyProject --exclude node_modules dist --extensions .tsx .ts
|
|
|
46
46
|
|
|
47
47
|
- --checkFileContent [checkFileContent]: Check file content. Default: true.
|
|
48
48
|
|
|
49
|
+
- -w or --writeJsonOutput [writeJsonOutput]: Write json putput for tracking. Default false
|
|
50
|
+
|
|
51
|
+
## Black list
|
|
52
|
+
|
|
53
|
+
analyze-codebase by default have a black list of folder names which don't want to analyze which this directories mostly should be out of analyze (i.e. dist or static files)
|
|
54
|
+
|
|
55
|
+
- node_modules
|
|
56
|
+
- dist
|
|
57
|
+
- build
|
|
58
|
+
- coverage
|
|
59
|
+
- public
|
|
60
|
+
- test
|
|
61
|
+
- tests
|
|
62
|
+
- mocks
|
|
63
|
+
|
|
49
64
|
## Examples
|
|
50
65
|
|
|
51
66
|
Analyze a directory with default options:
|
|
@@ -78,6 +93,17 @@ Analyze only file content
|
|
|
78
93
|
analyze-codebase ./src --exclude node_modules dist --checkFileNames=false
|
|
79
94
|
```
|
|
80
95
|
|
|
96
|
+
Write json output of this analyze
|
|
97
|
+
|
|
98
|
+
```bash
|
|
99
|
+
analyze-codebase -w
|
|
100
|
+
```
|
|
101
|
+
or
|
|
102
|
+
|
|
103
|
+
```bash
|
|
104
|
+
analyze-codebase --writeJsonOutput
|
|
105
|
+
```
|
|
106
|
+
|
|
81
107
|
## Contribution
|
|
82
108
|
|
|
83
109
|
We welcome contributions to enhance the functionality and features of Codebase Analyzer. To contribute to the project, please follow these steps:
|