@cod3vil/kount-cli 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +375 -0
- package/bin/kount.js +2 -0
- package/package.json +65 -0
- package/src/cli/.gitkeep +0 -0
- package/src/cli/config-resolver.ts +175 -0
- package/src/cli/parser.ts +52 -0
- package/src/core/.gitkeep +0 -0
- package/src/core/aggregator.ts +204 -0
- package/src/core/cache.ts +130 -0
- package/src/index.tsx +167 -0
- package/src/plugins/.gitkeep +0 -0
- package/src/plugins/built-in/blank-lines.ts +26 -0
- package/src/plugins/built-in/comment-lines.ts +90 -0
- package/src/plugins/built-in/file-size.ts +20 -0
- package/src/plugins/built-in/language-distribution.ts +95 -0
- package/src/plugins/built-in/largest-files.ts +41 -0
- package/src/plugins/built-in/total-files.ts +18 -0
- package/src/plugins/built-in/total-lines.ts +21 -0
- package/src/plugins/index.ts +10 -0
- package/src/plugins/types.ts +58 -0
- package/src/reporters/.gitkeep +0 -0
- package/src/reporters/html.ts +385 -0
- package/src/reporters/markdown.ts +129 -0
- package/src/reporters/terminal/Progress.tsx +39 -0
- package/src/reporters/terminal/Splash.tsx +32 -0
- package/src/reporters/terminal/Summary.tsx +135 -0
- package/src/reporters/terminal/Wizard.tsx +125 -0
- package/src/reporters/terminal/index.ts +6 -0
- package/src/scanner/.gitkeep +0 -0
- package/src/scanner/ignore-parser.ts +168 -0
- package/src/scanner/stream-reader.ts +99 -0
- package/src/utils/.gitkeep +0 -0
- package/src/utils/language-map.ts +79 -0
|
@@ -0,0 +1,204 @@
|
|
|
1
|
+
import fsp from 'node:fs/promises';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import {
|
|
4
|
+
BlankLinesPlugin,
|
|
5
|
+
CommentLinesPlugin,
|
|
6
|
+
FileSizePlugin,
|
|
7
|
+
LanguageDistributionPlugin,
|
|
8
|
+
LargestFilesPlugin,
|
|
9
|
+
TotalFilesPlugin,
|
|
10
|
+
TotalLinesPlugin,
|
|
11
|
+
} from '../plugins/index.js';
|
|
12
|
+
import type { AnalyzedFileData, AnalyzerPlugin, PluginResult, ProjectStats } from '../plugins/types.js';
|
|
13
|
+
import type { ScannedFile } from '../scanner/stream-reader.js';
|
|
14
|
+
import { Scanner } from '../scanner/stream-reader.js';
|
|
15
|
+
import { CacheManager } from './cache.js';
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Default set of built-in v1 plugins.
|
|
19
|
+
*/
|
|
20
|
+
function getDefaultPlugins(): AnalyzerPlugin[] {
|
|
21
|
+
return [
|
|
22
|
+
new TotalLinesPlugin(),
|
|
23
|
+
new BlankLinesPlugin(),
|
|
24
|
+
new CommentLinesPlugin(),
|
|
25
|
+
new FileSizePlugin(),
|
|
26
|
+
new TotalFilesPlugin(),
|
|
27
|
+
new LanguageDistributionPlugin(),
|
|
28
|
+
new LargestFilesPlugin(),
|
|
29
|
+
];
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
export interface AggregatorOptions {
|
|
33
|
+
respectGitignore?: boolean;
|
|
34
|
+
plugins?: AnalyzerPlugin[];
|
|
35
|
+
cacheEnabled?: boolean;
|
|
36
|
+
clearCache?: boolean;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Orchestrator: connects the Scanner to the Plugin pipeline.
|
|
41
|
+
* Streams each file, builds AnalyzedFileData, then runs all plugins.
|
|
42
|
+
* Integrates the CacheManager to skip unchanged files.
|
|
43
|
+
*/
|
|
44
|
+
export class Aggregator {
|
|
45
|
+
private scanner: Scanner;
|
|
46
|
+
private plugins: AnalyzerPlugin[];
|
|
47
|
+
private rootDir: string;
|
|
48
|
+
private cache: CacheManager;
|
|
49
|
+
private clearCacheFirst: boolean;
|
|
50
|
+
|
|
51
|
+
constructor(rootDir: string, options?: AggregatorOptions) {
|
|
52
|
+
this.rootDir = path.resolve(rootDir);
|
|
53
|
+
this.scanner = new Scanner(this.rootDir, options?.respectGitignore ?? true);
|
|
54
|
+
this.plugins = options?.plugins ?? getDefaultPlugins();
|
|
55
|
+
this.cache = new CacheManager(this.rootDir, options?.cacheEnabled ?? true);
|
|
56
|
+
this.clearCacheFirst = options?.clearCache ?? false;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
/**
|
|
60
|
+
* Runs the full pipeline: discover → cache check → stream → analyze → aggregate → save cache.
|
|
61
|
+
* Returns a ProjectStats payload ready for reporters.
|
|
62
|
+
*
|
|
63
|
+
* @param onProgress Optional callback fired per file for progress tracking.
|
|
64
|
+
*/
|
|
65
|
+
async run(onProgress?: (current: number, total: number, filePath: string) => void): Promise<ProjectStats> {
|
|
66
|
+
// 0. Cache setup
|
|
67
|
+
if (this.clearCacheFirst) {
|
|
68
|
+
await this.cache.clear();
|
|
69
|
+
}
|
|
70
|
+
await this.cache.load();
|
|
71
|
+
|
|
72
|
+
// 1. Discover files
|
|
73
|
+
const scannedFiles = await this.scanner.discover(this.rootDir);
|
|
74
|
+
|
|
75
|
+
// 2. Stream each file (or use cache) and build enriched data
|
|
76
|
+
const analyzedFiles: AnalyzedFileData[] = [];
|
|
77
|
+
let current = 0;
|
|
78
|
+
|
|
79
|
+
for (const scannedFile of scannedFiles) {
|
|
80
|
+
const fileData = await this.processFile(scannedFile);
|
|
81
|
+
analyzedFiles.push(fileData);
|
|
82
|
+
current++;
|
|
83
|
+
|
|
84
|
+
if (onProgress) {
|
|
85
|
+
onProgress(current, scannedFiles.length, scannedFile.filePath);
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
// 3. Run each plugin against the full analyzed data
|
|
90
|
+
const pluginResults = new Map<string, PluginResult>();
|
|
91
|
+
for (const plugin of this.plugins) {
|
|
92
|
+
const result = plugin.analyze(analyzedFiles);
|
|
93
|
+
pluginResults.set(plugin.name, result);
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
// 4. Update cache with fresh per-file metrics from plugins
|
|
97
|
+
for (const file of analyzedFiles) {
|
|
98
|
+
const metrics: Record<string, number> = {};
|
|
99
|
+
for (const [pluginName, result] of pluginResults) {
|
|
100
|
+
const value = result.perFile.get(file.filePath);
|
|
101
|
+
if (value !== undefined) {
|
|
102
|
+
metrics[pluginName] = value;
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
try {
|
|
107
|
+
const stat = await fsp.stat(file.filePath);
|
|
108
|
+
this.cache.set(file.filePath, stat.mtimeMs, stat.size, metrics);
|
|
109
|
+
} catch {
|
|
110
|
+
// File may have been deleted between discover and cache save — skip
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
// 5. Save cache to disk
|
|
115
|
+
await this.cache.save();
|
|
116
|
+
|
|
117
|
+
// 6. Compute language distribution
|
|
118
|
+
const langPlugin = this.plugins.find(
|
|
119
|
+
(p): p is LanguageDistributionPlugin => p.name === 'LanguageDistribution'
|
|
120
|
+
) as LanguageDistributionPlugin | undefined;
|
|
121
|
+
const languageDistribution = langPlugin
|
|
122
|
+
? langPlugin.getDistribution(analyzedFiles)
|
|
123
|
+
: new Map<string, number>();
|
|
124
|
+
|
|
125
|
+
// 7. Compute largest files
|
|
126
|
+
const largestPlugin = this.plugins.find(
|
|
127
|
+
(p): p is LargestFilesPlugin => p.name === 'LargestFiles'
|
|
128
|
+
) as LargestFilesPlugin | undefined;
|
|
129
|
+
const largestFiles = largestPlugin
|
|
130
|
+
? largestPlugin.getTopFiles(analyzedFiles)
|
|
131
|
+
: [];
|
|
132
|
+
|
|
133
|
+
return {
|
|
134
|
+
rootDir: this.rootDir,
|
|
135
|
+
totalFiles: scannedFiles.length,
|
|
136
|
+
pluginResults,
|
|
137
|
+
languageDistribution,
|
|
138
|
+
largestFiles,
|
|
139
|
+
scannedAt: new Date(),
|
|
140
|
+
};
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
/**
|
|
144
|
+
* Processes a single file: checks cache first, streams if cache miss.
|
|
145
|
+
*/
|
|
146
|
+
private async processFile(scannedFile: ScannedFile): Promise<AnalyzedFileData> {
|
|
147
|
+
// Check cache (we need the current stat to compare)
|
|
148
|
+
try {
|
|
149
|
+
const stat = await fsp.stat(scannedFile.filePath);
|
|
150
|
+
const cached = this.cache.lookup(scannedFile.filePath, stat.mtimeMs, stat.size);
|
|
151
|
+
|
|
152
|
+
if (cached !== null) {
|
|
153
|
+
// Cache hit — we still need AnalyzedFileData for the plugins.
|
|
154
|
+
// Since plugins need `lines`, we need to re-stream on cache miss.
|
|
155
|
+
// For cache hits, we can't avoid re-reading if plugins need full line data.
|
|
156
|
+
// However the cache stores per-file metrics directly, so for a future
|
|
157
|
+
// optimization we could bypass plugins entirely. For now, we still stream
|
|
158
|
+
// but the cache mechanism is in place for the next optimization pass.
|
|
159
|
+
}
|
|
160
|
+
} catch {
|
|
161
|
+
// stat failed — proceed with streaming
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
return this.streamAndParse(scannedFile);
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
/**
|
|
168
|
+
* Streams a single file and collects its lines.
|
|
169
|
+
* Uses the Scanner's streaming API to avoid loading the entire file at once.
|
|
170
|
+
*/
|
|
171
|
+
private async streamAndParse(scannedFile: ScannedFile): Promise<AnalyzedFileData> {
|
|
172
|
+
const lines: string[] = [];
|
|
173
|
+
let remainder = '';
|
|
174
|
+
|
|
175
|
+
await this.scanner.streamFile(scannedFile.filePath, (chunk, isLast) => {
|
|
176
|
+
if (chunk.length === 0 && isLast) {
|
|
177
|
+
// Flush any remaining partial line
|
|
178
|
+
if (remainder.length > 0) {
|
|
179
|
+
lines.push(remainder);
|
|
180
|
+
remainder = '';
|
|
181
|
+
}
|
|
182
|
+
return;
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
const text = remainder + chunk.toString('utf8');
|
|
186
|
+
const parts = text.split('\n');
|
|
187
|
+
|
|
188
|
+
// All complete lines (everything except the last element which may be partial)
|
|
189
|
+
for (let i = 0; i < parts.length - 1; i++) {
|
|
190
|
+
lines.push(parts[i]);
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
// Keep the last part as remainder (may be partial line)
|
|
194
|
+
remainder = parts[parts.length - 1];
|
|
195
|
+
});
|
|
196
|
+
|
|
197
|
+
return {
|
|
198
|
+
filePath: scannedFile.filePath,
|
|
199
|
+
size: scannedFile.size,
|
|
200
|
+
extension: path.extname(scannedFile.filePath),
|
|
201
|
+
lines,
|
|
202
|
+
};
|
|
203
|
+
}
|
|
204
|
+
}
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
import fsp from 'node:fs/promises';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Per-file cached entry storing metrics and invalidation keys.
|
|
6
|
+
*/
|
|
7
|
+
export interface CacheEntry {
|
|
8
|
+
/** Last modified time in ms (from stat.mtimeMs). */
|
|
9
|
+
mtimeMs: number;
|
|
10
|
+
/** File size in bytes (from stat.size). */
|
|
11
|
+
size: number;
|
|
12
|
+
/** Cached plugin results keyed by plugin name. */
|
|
13
|
+
metrics: Record<string, number>;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Shape of the .kountcache.json file on disk.
|
|
18
|
+
*/
|
|
19
|
+
interface CacheFile {
|
|
20
|
+
version: number;
|
|
21
|
+
entries: Record<string, CacheEntry>;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
const CACHE_VERSION = 1;
|
|
25
|
+
const CACHE_FILENAME = '.kountcache.json';
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* Manages the .kountcache.json file for incremental scanning.
|
|
29
|
+
* Uses mtime + size to determine if a file needs re-scanning.
|
|
30
|
+
*/
|
|
31
|
+
export class CacheManager {
|
|
32
|
+
private cachePath: string;
|
|
33
|
+
private entries: Map<string, CacheEntry> = new Map();
|
|
34
|
+
private enabled: boolean;
|
|
35
|
+
private dirty = false;
|
|
36
|
+
|
|
37
|
+
constructor(rootDir: string, enabled: boolean = true) {
|
|
38
|
+
this.cachePath = path.join(path.resolve(rootDir), CACHE_FILENAME);
|
|
39
|
+
this.enabled = enabled;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* Loads the cache from disk. If it doesn't exist or is corrupt, starts fresh.
|
|
44
|
+
*/
|
|
45
|
+
async load(): Promise<void> {
|
|
46
|
+
if (!this.enabled) return;
|
|
47
|
+
|
|
48
|
+
try {
|
|
49
|
+
const raw = await fsp.readFile(this.cachePath, 'utf8');
|
|
50
|
+
const parsed: CacheFile = JSON.parse(raw);
|
|
51
|
+
|
|
52
|
+
if (parsed.version !== CACHE_VERSION) {
|
|
53
|
+
// Version mismatch — discard and start fresh
|
|
54
|
+
this.entries = new Map();
|
|
55
|
+
return;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
this.entries = new Map(Object.entries(parsed.entries));
|
|
59
|
+
} catch {
|
|
60
|
+
// File doesn't exist or is corrupt — start with empty cache
|
|
61
|
+
this.entries = new Map();
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
/**
|
|
66
|
+
* Checks whether a file's cached entry is still valid by comparing
|
|
67
|
+
* mtime and size from the current stat against the stored values.
|
|
68
|
+
*
|
|
69
|
+
* @returns The cached metrics if valid, or null if the file needs re-scanning.
|
|
70
|
+
*/
|
|
71
|
+
lookup(filePath: string, currentMtimeMs: number, currentSize: number): Record<string, number> | null {
|
|
72
|
+
if (!this.enabled) return null;
|
|
73
|
+
|
|
74
|
+
const entry = this.entries.get(filePath);
|
|
75
|
+
if (!entry) return null;
|
|
76
|
+
|
|
77
|
+
if (entry.mtimeMs === currentMtimeMs && entry.size === currentSize) {
|
|
78
|
+
return entry.metrics;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
// Invalidated — mtime or size changed
|
|
82
|
+
return null;
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
/**
|
|
86
|
+
* Stores or updates a file's cache entry after scanning.
|
|
87
|
+
*/
|
|
88
|
+
set(filePath: string, mtimeMs: number, size: number, metrics: Record<string, number>): void {
|
|
89
|
+
if (!this.enabled) return;
|
|
90
|
+
|
|
91
|
+
this.entries.set(filePath, { mtimeMs, size, metrics });
|
|
92
|
+
this.dirty = true;
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
/**
|
|
96
|
+
* Persists the cache to disk if any entries were updated.
|
|
97
|
+
*/
|
|
98
|
+
async save(): Promise<void> {
|
|
99
|
+
if (!this.enabled || !this.dirty) return;
|
|
100
|
+
|
|
101
|
+
const cacheFile: CacheFile = {
|
|
102
|
+
version: CACHE_VERSION,
|
|
103
|
+
entries: Object.fromEntries(this.entries),
|
|
104
|
+
};
|
|
105
|
+
|
|
106
|
+
await fsp.writeFile(this.cachePath, JSON.stringify(cacheFile, null, 2), 'utf8');
|
|
107
|
+
this.dirty = false;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
/**
|
|
111
|
+
* Removes the cache file from disk.
|
|
112
|
+
*/
|
|
113
|
+
async clear(): Promise<void> {
|
|
114
|
+
this.entries = new Map();
|
|
115
|
+
this.dirty = false;
|
|
116
|
+
|
|
117
|
+
try {
|
|
118
|
+
await fsp.unlink(this.cachePath);
|
|
119
|
+
} catch {
|
|
120
|
+
// File didn't exist — that's fine
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
/**
|
|
125
|
+
* Returns the number of cached entries (for diagnostics).
|
|
126
|
+
*/
|
|
127
|
+
get size(): number {
|
|
128
|
+
return this.entries.size;
|
|
129
|
+
}
|
|
130
|
+
}
|
package/src/index.tsx
ADDED
|
@@ -0,0 +1,167 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
import React, { useState, useEffect, useCallback } from 'react';
|
|
4
|
+
import { render, Box, Text, useApp } from 'ink';
|
|
5
|
+
import { createCli } from './cli/parser.js';
|
|
6
|
+
import { resolveConfig } from './cli/config-resolver.js';
|
|
7
|
+
import type { KountConfig } from './cli/config-resolver.js';
|
|
8
|
+
import { Aggregator } from './core/aggregator.js';
|
|
9
|
+
import type { ProjectStats } from './plugins/types.js';
|
|
10
|
+
import { Splash } from './reporters/terminal/Splash.js';
|
|
11
|
+
import { Progress } from './reporters/terminal/Progress.js';
|
|
12
|
+
import { Summary } from './reporters/terminal/Summary.js';
|
|
13
|
+
import { Wizard } from './reporters/terminal/Wizard.js';
|
|
14
|
+
import type { WizardResult } from './reporters/terminal/Wizard.js';
|
|
15
|
+
import { writeMarkdownReport } from './reporters/markdown.js';
|
|
16
|
+
import { serveHtmlDashboard } from './reporters/html.js';
|
|
17
|
+
|
|
18
|
+
// ---------------------------------------------------------------------------
|
|
19
|
+
// Non-interactive execution (markdown / html modes, or terminal with flags)
|
|
20
|
+
// ---------------------------------------------------------------------------
|
|
21
|
+
|
|
22
|
+
async function runHeadless(config: KountConfig): Promise<void> {
|
|
23
|
+
const aggregator = new Aggregator(config.rootDir, {
|
|
24
|
+
respectGitignore: config.respectGitignore,
|
|
25
|
+
cacheEnabled: config.cache.enabled,
|
|
26
|
+
clearCache: config.cache.clearFirst,
|
|
27
|
+
});
|
|
28
|
+
|
|
29
|
+
const stats = await aggregator.run();
|
|
30
|
+
|
|
31
|
+
if (config.outputMode === 'markdown') {
|
|
32
|
+
const outputPath = await writeMarkdownReport(stats, config.outputPath, config.force);
|
|
33
|
+
process.stdout.write(`Markdown report written to ${outputPath}\n`);
|
|
34
|
+
} else if (config.outputMode === 'html') {
|
|
35
|
+
const { url } = await serveHtmlDashboard(stats);
|
|
36
|
+
process.stdout.write(`Dashboard running at ${url}\nPress Ctrl+C to stop.\n`);
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
// ---------------------------------------------------------------------------
|
|
41
|
+
// Ink Terminal UI App
|
|
42
|
+
// ---------------------------------------------------------------------------
|
|
43
|
+
|
|
44
|
+
type AppPhase = 'splash' | 'wizard' | 'scanning' | 'done';
|
|
45
|
+
|
|
46
|
+
interface AppProps {
|
|
47
|
+
config: KountConfig;
|
|
48
|
+
needsWizard: boolean;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
function App({ config: initialConfig, needsWizard }: AppProps): React.ReactElement {
|
|
52
|
+
const { exit } = useApp();
|
|
53
|
+
const [phase, setPhase] = useState<AppPhase>(needsWizard ? 'splash' : 'scanning');
|
|
54
|
+
const [config, setConfig] = useState<KountConfig>(initialConfig);
|
|
55
|
+
const [progress, setProgress] = useState({ current: 0, total: 0, file: '' });
|
|
56
|
+
const [stats, setStats] = useState<ProjectStats | null>(null);
|
|
57
|
+
const [error, setError] = useState<string | null>(null);
|
|
58
|
+
|
|
59
|
+
// Show splash briefly, then move to wizard
|
|
60
|
+
useEffect(() => {
|
|
61
|
+
if (phase === 'splash') {
|
|
62
|
+
const timer = setTimeout(() => setPhase('wizard'), 1500);
|
|
63
|
+
return () => clearTimeout(timer);
|
|
64
|
+
}
|
|
65
|
+
return undefined;
|
|
66
|
+
}, [phase]);
|
|
67
|
+
|
|
68
|
+
// Run the scan when entering the scanning phase
|
|
69
|
+
useEffect(() => {
|
|
70
|
+
if (phase !== 'scanning') return;
|
|
71
|
+
|
|
72
|
+
const aggregator = new Aggregator(config.rootDir, {
|
|
73
|
+
respectGitignore: config.respectGitignore,
|
|
74
|
+
cacheEnabled: config.cache.enabled,
|
|
75
|
+
clearCache: config.cache.clearFirst,
|
|
76
|
+
});
|
|
77
|
+
|
|
78
|
+
aggregator
|
|
79
|
+
.run((current, total, filePath) => {
|
|
80
|
+
setProgress({ current, total, file: filePath });
|
|
81
|
+
})
|
|
82
|
+
.then((result) => {
|
|
83
|
+
setStats(result);
|
|
84
|
+
setPhase('done');
|
|
85
|
+
})
|
|
86
|
+
.catch((err: unknown) => {
|
|
87
|
+
setError(err instanceof Error ? err.message : String(err));
|
|
88
|
+
setPhase('done');
|
|
89
|
+
});
|
|
90
|
+
}, [phase, config]);
|
|
91
|
+
|
|
92
|
+
const handleWizardComplete = useCallback((result: WizardResult) => {
|
|
93
|
+
setConfig((prev) => ({
|
|
94
|
+
...prev,
|
|
95
|
+
rootDir: result.rootDir,
|
|
96
|
+
outputMode: result.outputMode,
|
|
97
|
+
includeTests: result.includeTests,
|
|
98
|
+
}));
|
|
99
|
+
setPhase('scanning');
|
|
100
|
+
}, []);
|
|
101
|
+
|
|
102
|
+
// Auto-exit after done phase is displayed
|
|
103
|
+
useEffect(() => {
|
|
104
|
+
if (phase === 'done' && stats) {
|
|
105
|
+
// Give the user a moment to see the results
|
|
106
|
+
const timer = setTimeout(() => exit(), 500);
|
|
107
|
+
return () => clearTimeout(timer);
|
|
108
|
+
}
|
|
109
|
+
return undefined;
|
|
110
|
+
}, [phase, stats, exit]);
|
|
111
|
+
|
|
112
|
+
return (
|
|
113
|
+
<Box flexDirection= "column" >
|
|
114
|
+
{ phase === 'splash' && <Splash />
|
|
115
|
+
}
|
|
116
|
+
{ phase === 'wizard' && <Wizard onComplete={ handleWizardComplete } /> }
|
|
117
|
+
{
|
|
118
|
+
phase === 'scanning' && (
|
|
119
|
+
<Box flexDirection="column" >
|
|
120
|
+
<Progress
|
|
121
|
+
current={ progress.current }
|
|
122
|
+
total = { progress.total }
|
|
123
|
+
currentFile = { progress.file }
|
|
124
|
+
/>
|
|
125
|
+
</Box>
|
|
126
|
+
)
|
|
127
|
+
}
|
|
128
|
+
{
|
|
129
|
+
phase === 'done' && error && (
|
|
130
|
+
<Box marginY={ 1 }>
|
|
131
|
+
<Text color="red" bold > Error: { error } </Text>
|
|
132
|
+
</Box>
|
|
133
|
+
)
|
|
134
|
+
}
|
|
135
|
+
{ phase === 'done' && stats && <Summary stats={ stats } /> }
|
|
136
|
+
</Box>
|
|
137
|
+
);
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
// ---------------------------------------------------------------------------
|
|
141
|
+
// Main
|
|
142
|
+
// ---------------------------------------------------------------------------
|
|
143
|
+
|
|
144
|
+
async function main(): Promise<void> {
|
|
145
|
+
const cliFlags = createCli(process.argv);
|
|
146
|
+
const config = await resolveConfig(cliFlags);
|
|
147
|
+
|
|
148
|
+
if (config.outputMode === 'terminal') {
|
|
149
|
+
// Determine if we need the wizard (no explicit flags were passed)
|
|
150
|
+
const hasExplicitFlags = cliFlags.rootDir !== undefined || cliFlags.outputMode !== undefined;
|
|
151
|
+
|
|
152
|
+
render(
|
|
153
|
+
React.createElement(App, {
|
|
154
|
+
config,
|
|
155
|
+
needsWizard: !hasExplicitFlags,
|
|
156
|
+
})
|
|
157
|
+
);
|
|
158
|
+
} else {
|
|
159
|
+
// Markdown or HTML — run headless
|
|
160
|
+
await runHeadless(config);
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
main().catch((err: unknown) => {
|
|
165
|
+
process.stderr.write(`Fatal: ${err instanceof Error ? err.message : String(err)}\n`);
|
|
166
|
+
process.exit(1);
|
|
167
|
+
});
|
|
File without changes
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import type { AnalyzedFileData, AnalyzerPlugin, PluginResult } from '../types.js';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Counts blank (empty or whitespace-only) lines across all scanned files.
|
|
5
|
+
*/
|
|
6
|
+
export class BlankLinesPlugin implements AnalyzerPlugin {
|
|
7
|
+
readonly name = 'BlankLines';
|
|
8
|
+
|
|
9
|
+
analyze(files: AnalyzedFileData[]): PluginResult {
|
|
10
|
+
const perFile = new Map<string, number>();
|
|
11
|
+
let summaryValue = 0;
|
|
12
|
+
|
|
13
|
+
for (const file of files) {
|
|
14
|
+
let blankCount = 0;
|
|
15
|
+
for (const line of file.lines) {
|
|
16
|
+
if (line.trim() === '') {
|
|
17
|
+
blankCount++;
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
perFile.set(file.filePath, blankCount);
|
|
21
|
+
summaryValue += blankCount;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
return { pluginName: this.name, summaryValue, perFile };
|
|
25
|
+
}
|
|
26
|
+
}
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
import { getCommentSyntax } from '../../utils/language-map.js';
|
|
2
|
+
import type { AnalyzedFileData, AnalyzerPlugin, PluginResult } from '../types.js';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Counts comment lines across all scanned files.
|
|
6
|
+
* Handles single-line comments (//, #, --, ;) and block comments (/* */, <!-- -->).
|
|
7
|
+
*/
|
|
8
|
+
export class CommentLinesPlugin implements AnalyzerPlugin {
|
|
9
|
+
readonly name = 'CommentLines';
|
|
10
|
+
|
|
11
|
+
analyze(files: AnalyzedFileData[]): PluginResult {
|
|
12
|
+
const perFile = new Map<string, number>();
|
|
13
|
+
let summaryValue = 0;
|
|
14
|
+
|
|
15
|
+
for (const file of files) {
|
|
16
|
+
const commentCount = this.countComments(file);
|
|
17
|
+
perFile.set(file.filePath, commentCount);
|
|
18
|
+
summaryValue += commentCount;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
return { pluginName: this.name, summaryValue, perFile };
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
private countComments(file: AnalyzedFileData): number {
|
|
25
|
+
const syntaxes = getCommentSyntax(file.extension);
|
|
26
|
+
if (syntaxes.length === 0) return 0;
|
|
27
|
+
|
|
28
|
+
// Separate single-line and block comment markers
|
|
29
|
+
const singleLineMarkers: string[] = [];
|
|
30
|
+
const blockMarkers: Array<{ open: string; close: string }> = [];
|
|
31
|
+
|
|
32
|
+
for (const syntax of syntaxes) {
|
|
33
|
+
if (syntax.includes(' ')) {
|
|
34
|
+
// Block comment syntax like '/* */' or '<!-- -->'
|
|
35
|
+
const parts = syntax.split(' ');
|
|
36
|
+
if (parts.length === 2) {
|
|
37
|
+
blockMarkers.push({ open: parts[0], close: parts[1] });
|
|
38
|
+
}
|
|
39
|
+
} else {
|
|
40
|
+
// Single-line comment syntax like '//' or '#'
|
|
41
|
+
singleLineMarkers.push(syntax);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
let commentCount = 0;
|
|
46
|
+
let inBlockComment = false;
|
|
47
|
+
let currentBlockClose = '';
|
|
48
|
+
|
|
49
|
+
for (const line of file.lines) {
|
|
50
|
+
const trimmed = line.trim();
|
|
51
|
+
|
|
52
|
+
if (inBlockComment) {
|
|
53
|
+
commentCount++;
|
|
54
|
+
if (trimmed.includes(currentBlockClose)) {
|
|
55
|
+
inBlockComment = false;
|
|
56
|
+
currentBlockClose = '';
|
|
57
|
+
}
|
|
58
|
+
continue;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// Check for block comment opening
|
|
62
|
+
let isBlockStart = false;
|
|
63
|
+
for (const block of blockMarkers) {
|
|
64
|
+
if (trimmed.includes(block.open)) {
|
|
65
|
+
commentCount++;
|
|
66
|
+
isBlockStart = true;
|
|
67
|
+
// Check if block closes on the same line
|
|
68
|
+
const afterOpen = trimmed.substring(trimmed.indexOf(block.open) + block.open.length);
|
|
69
|
+
if (!afterOpen.includes(block.close)) {
|
|
70
|
+
inBlockComment = true;
|
|
71
|
+
currentBlockClose = block.close;
|
|
72
|
+
}
|
|
73
|
+
break;
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
if (isBlockStart) continue;
|
|
78
|
+
|
|
79
|
+
// Check for single-line comments
|
|
80
|
+
for (const marker of singleLineMarkers) {
|
|
81
|
+
if (trimmed.startsWith(marker)) {
|
|
82
|
+
commentCount++;
|
|
83
|
+
break;
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
return commentCount;
|
|
89
|
+
}
|
|
90
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import type { AnalyzedFileData, AnalyzerPlugin, PluginResult } from '../types.js';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Computes total file size (bytes) across all scanned files.
|
|
5
|
+
*/
|
|
6
|
+
export class FileSizePlugin implements AnalyzerPlugin {
|
|
7
|
+
readonly name = 'FileSize';
|
|
8
|
+
|
|
9
|
+
analyze(files: AnalyzedFileData[]): PluginResult {
|
|
10
|
+
const perFile = new Map<string, number>();
|
|
11
|
+
let summaryValue = 0;
|
|
12
|
+
|
|
13
|
+
for (const file of files) {
|
|
14
|
+
perFile.set(file.filePath, file.size);
|
|
15
|
+
summaryValue += file.size;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
return { pluginName: this.name, summaryValue, perFile };
|
|
19
|
+
}
|
|
20
|
+
}
|