fln 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +330 -0
- package/dist/api/fln.d.ts +3 -0
- package/dist/api/fln.d.ts.map +1 -0
- package/dist/api/fln.js +71 -0
- package/dist/api/index.d.ts +3 -0
- package/dist/api/index.d.ts.map +1 -0
- package/dist/api/index.js +2 -0
- package/dist/api/types.d.ts +34 -0
- package/dist/api/types.d.ts.map +1 -0
- package/dist/api/types.js +1 -0
- package/dist/cli/commandLine.d.ts +2 -0
- package/dist/cli/commandLine.d.ts.map +1 -0
- package/dist/cli/commandLine.js +120 -0
- package/dist/cli/help.d.ts +2 -0
- package/dist/cli/help.d.ts.map +1 -0
- package/dist/cli/help.js +40 -0
- package/dist/cli/index.d.ts +2 -0
- package/dist/cli/index.d.ts.map +1 -0
- package/dist/cli/index.js +1 -0
- package/dist/cli/main.d.ts +3 -0
- package/dist/cli/main.d.ts.map +1 -0
- package/dist/cli/main.js +9 -0
- package/dist/cli/output/components/breakdown.d.ts +2 -0
- package/dist/cli/output/components/breakdown.d.ts.map +1 -0
- package/dist/cli/output/components/breakdown.js +21 -0
- package/dist/cli/output/components/errors.d.ts +6 -0
- package/dist/cli/output/components/errors.d.ts.map +1 -0
- package/dist/cli/output/components/errors.js +13 -0
- package/dist/cli/output/components/progressBar.d.ts +7 -0
- package/dist/cli/output/components/progressBar.d.ts.map +1 -0
- package/dist/cli/output/components/progressBar.js +38 -0
- package/dist/cli/output/components/summary.d.ts +8 -0
- package/dist/cli/output/components/summary.d.ts.map +1 -0
- package/dist/cli/output/components/summary.js +12 -0
- package/dist/cli/output/components/warnings.d.ts +6 -0
- package/dist/cli/output/components/warnings.d.ts.map +1 -0
- package/dist/cli/output/components/warnings.js +11 -0
- package/dist/cli/output/formatter.d.ts +5 -0
- package/dist/cli/output/formatter.d.ts.map +1 -0
- package/dist/cli/output/formatter.js +28 -0
- package/dist/cli/output/index.d.ts +8 -0
- package/dist/cli/output/index.d.ts.map +1 -0
- package/dist/cli/output/index.js +4 -0
- package/dist/cli/output/renderer.d.ts +21 -0
- package/dist/cli/output/renderer.d.ts.map +1 -0
- package/dist/cli/output/renderer.js +121 -0
- package/dist/cli/output/styles.d.ts +23 -0
- package/dist/cli/output/styles.d.ts.map +1 -0
- package/dist/cli/output/styles.js +26 -0
- package/dist/config/defaults.d.ts +3 -0
- package/dist/config/defaults.d.ts.map +1 -0
- package/dist/config/defaults.js +2 -0
- package/dist/config/index.d.ts +6 -0
- package/dist/config/index.d.ts.map +1 -0
- package/dist/config/index.js +5 -0
- package/dist/config/loader.d.ts +3 -0
- package/dist/config/loader.d.ts.map +1 -0
- package/dist/config/loader.js +11 -0
- package/dist/config/resolver.d.ts +8 -0
- package/dist/config/resolver.d.ts.map +1 -0
- package/dist/config/resolver.js +66 -0
- package/dist/config/types.d.ts +40 -0
- package/dist/config/types.d.ts.map +1 -0
- package/dist/config/types.js +1 -0
- package/dist/config/utils.d.ts +7 -0
- package/dist/config/utils.d.ts.map +1 -0
- package/dist/config/utils.js +161 -0
- package/dist/core/ignoreMatcher.d.ts +15 -0
- package/dist/core/ignoreMatcher.d.ts.map +1 -0
- package/dist/core/ignoreMatcher.js +97 -0
- package/dist/core/index.d.ts +8 -0
- package/dist/core/index.d.ts.map +1 -0
- package/dist/core/index.js +7 -0
- package/dist/core/renderOutput.d.ts +4 -0
- package/dist/core/renderOutput.d.ts.map +1 -0
- package/dist/core/renderOutput.js +218 -0
- package/dist/core/renderTree.d.ts +3 -0
- package/dist/core/renderTree.d.ts.map +1 -0
- package/dist/core/renderTree.js +23 -0
- package/dist/core/scanTree.d.ts +4 -0
- package/dist/core/scanTree.d.ts.map +1 -0
- package/dist/core/scanTree.js +348 -0
- package/dist/core/size.d.ts +4 -0
- package/dist/core/size.d.ts.map +1 -0
- package/dist/core/size.js +32 -0
- package/dist/core/statsCollector.d.ts +4 -0
- package/dist/core/statsCollector.d.ts.map +1 -0
- package/dist/core/statsCollector.js +28 -0
- package/dist/core/types.d.ts +53 -0
- package/dist/core/types.d.ts.map +1 -0
- package/dist/core/types.js +1 -0
- package/dist/index.d.ts +4 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +3 -0
- package/dist/infra/countTokens.d.ts +2 -0
- package/dist/infra/countTokens.d.ts.map +1 -0
- package/dist/infra/countTokens.js +186 -0
- package/dist/infra/datetime.d.ts +3 -0
- package/dist/infra/datetime.d.ts.map +1 -0
- package/dist/infra/datetime.js +15 -0
- package/dist/infra/index.d.ts +7 -0
- package/dist/infra/index.d.ts.map +1 -0
- package/dist/infra/index.js +6 -0
- package/dist/infra/logger.d.ts +19 -0
- package/dist/infra/logger.d.ts.map +1 -0
- package/dist/infra/logger.js +99 -0
- package/dist/infra/outputWriter.d.ts +15 -0
- package/dist/infra/outputWriter.d.ts.map +1 -0
- package/dist/infra/outputWriter.js +35 -0
- package/dist/infra/terminal.d.ts +91 -0
- package/dist/infra/terminal.d.ts.map +1 -0
- package/dist/infra/terminal.js +189 -0
- package/dist/infra/usageTracker.d.ts +3 -0
- package/dist/infra/usageTracker.d.ts.map +1 -0
- package/dist/infra/usageTracker.js +39 -0
- package/dist/version.d.ts +2 -0
- package/dist/version.d.ts.map +1 -0
- package/dist/version.js +1 -0
- package/package.json +79 -0
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
import { readFile, stat } from "node:fs/promises";
|
|
2
|
+
import { basename, join, parse } from "node:path";
|
|
3
|
+
export function normalizeFileToken(rawValue) {
|
|
4
|
+
return rawValue
|
|
5
|
+
.trim()
|
|
6
|
+
.replaceAll("@", "")
|
|
7
|
+
.replaceAll("/", "-")
|
|
8
|
+
.replaceAll("\\", "-")
|
|
9
|
+
.replaceAll(" ", "-")
|
|
10
|
+
.replaceAll(/[^\w.-]/g, "-")
|
|
11
|
+
.replaceAll(/-+/g, "-")
|
|
12
|
+
.replaceAll(/^[.-]+|[.-]+$/g, "");
|
|
13
|
+
}
|
|
14
|
+
async function readTextFile(filePath) {
|
|
15
|
+
try {
|
|
16
|
+
return await readFile(filePath, "utf8");
|
|
17
|
+
}
|
|
18
|
+
catch {
|
|
19
|
+
return undefined;
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
function extractTomlValue(content, sectionName, key) {
|
|
23
|
+
const lines = content.split("\n");
|
|
24
|
+
let isInSection = false;
|
|
25
|
+
for (const rawLine of lines) {
|
|
26
|
+
const trimmedLine = rawLine.split("#")[0]?.trim() ?? "";
|
|
27
|
+
if (trimmedLine === "")
|
|
28
|
+
continue;
|
|
29
|
+
if (trimmedLine.startsWith("[") && trimmedLine.endsWith("]")) {
|
|
30
|
+
isInSection = trimmedLine === `[${sectionName}]`;
|
|
31
|
+
continue;
|
|
32
|
+
}
|
|
33
|
+
if (!isInSection)
|
|
34
|
+
continue;
|
|
35
|
+
const match = trimmedLine.match(new RegExp(String.raw `^${key}\s*=\s*["'](.+)["']\s*$`));
|
|
36
|
+
if (match)
|
|
37
|
+
return match[1];
|
|
38
|
+
}
|
|
39
|
+
return undefined;
|
|
40
|
+
}
|
|
41
|
+
export async function getProjectMetadata(rootDirectory) {
|
|
42
|
+
const packageJsonContent = await readTextFile(join(rootDirectory, "package.json"));
|
|
43
|
+
if (packageJsonContent)
|
|
44
|
+
try {
|
|
45
|
+
const packageJson = JSON.parse(packageJsonContent);
|
|
46
|
+
const normalizedName = packageJson.name ? normalizeFileToken(packageJson.name) : "";
|
|
47
|
+
const normalizedVersion = packageJson.version ? normalizeFileToken(packageJson.version) : "";
|
|
48
|
+
if (normalizedName)
|
|
49
|
+
return {
|
|
50
|
+
name: normalizedName,
|
|
51
|
+
version: normalizedVersion
|
|
52
|
+
};
|
|
53
|
+
}
|
|
54
|
+
catch { }
|
|
55
|
+
const vcpkgContent = await readTextFile(join(rootDirectory, "vcpkg.json"));
|
|
56
|
+
if (vcpkgContent)
|
|
57
|
+
try {
|
|
58
|
+
const vcpkg = JSON.parse(vcpkgContent);
|
|
59
|
+
const normalizedName = vcpkg.name ? normalizeFileToken(vcpkg.name) : "";
|
|
60
|
+
const normalizedVersion = vcpkg.version ? normalizeFileToken(vcpkg.version) : "";
|
|
61
|
+
if (normalizedName)
|
|
62
|
+
return {
|
|
63
|
+
name: normalizedName,
|
|
64
|
+
version: normalizedVersion
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
catch { }
|
|
68
|
+
const pyprojectContent = await readTextFile(join(rootDirectory, "pyproject.toml"));
|
|
69
|
+
if (pyprojectContent) {
|
|
70
|
+
const pythonName = extractTomlValue(pyprojectContent, "project", "name") ??
|
|
71
|
+
extractTomlValue(pyprojectContent, "tool.poetry", "name");
|
|
72
|
+
const pythonVersion = extractTomlValue(pyprojectContent, "project", "version") ??
|
|
73
|
+
extractTomlValue(pyprojectContent, "tool.poetry", "version");
|
|
74
|
+
const normalizedName = pythonName ? normalizeFileToken(pythonName) : "";
|
|
75
|
+
const normalizedVersion = pythonVersion ? normalizeFileToken(pythonVersion) : "";
|
|
76
|
+
if (normalizedName)
|
|
77
|
+
return {
|
|
78
|
+
name: normalizedName,
|
|
79
|
+
version: normalizedVersion
|
|
80
|
+
};
|
|
81
|
+
}
|
|
82
|
+
const cargoContent = await readTextFile(join(rootDirectory, "Cargo.toml"));
|
|
83
|
+
if (cargoContent) {
|
|
84
|
+
const rustName = extractTomlValue(cargoContent, "package", "name");
|
|
85
|
+
const rustVersion = extractTomlValue(cargoContent, "package", "version");
|
|
86
|
+
const normalizedName = rustName ? normalizeFileToken(rustName) : "";
|
|
87
|
+
const normalizedVersion = rustVersion ? normalizeFileToken(rustVersion) : "";
|
|
88
|
+
if (normalizedName)
|
|
89
|
+
return {
|
|
90
|
+
name: normalizedName,
|
|
91
|
+
version: normalizedVersion
|
|
92
|
+
};
|
|
93
|
+
}
|
|
94
|
+
const goModContent = await readTextFile(join(rootDirectory, "go.mod"));
|
|
95
|
+
if (goModContent) {
|
|
96
|
+
const match = goModContent.match(/^module\s+(.+)$/m);
|
|
97
|
+
if (match) {
|
|
98
|
+
const fullPath = match[1].trim();
|
|
99
|
+
const shortName = fullPath.split("/").pop();
|
|
100
|
+
const normalizedName = shortName ? normalizeFileToken(shortName) : "";
|
|
101
|
+
if (normalizedName)
|
|
102
|
+
return {
|
|
103
|
+
name: normalizedName
|
|
104
|
+
};
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
const cmakeContent = await readTextFile(join(rootDirectory, "CMakeLists.txt"));
|
|
108
|
+
if (cmakeContent) {
|
|
109
|
+
const nameMatch = cmakeContent.match(/project\s*\(\s*([\w.-]+)/i);
|
|
110
|
+
const versionMatch = cmakeContent.match(/version\s+([\d.]+)/i);
|
|
111
|
+
const normalizedName = nameMatch ? normalizeFileToken(nameMatch[1]) : "";
|
|
112
|
+
const normalizedVersion = versionMatch ? normalizeFileToken(versionMatch[1]) : "";
|
|
113
|
+
if (normalizedName)
|
|
114
|
+
return {
|
|
115
|
+
name: normalizedName,
|
|
116
|
+
version: normalizedVersion
|
|
117
|
+
};
|
|
118
|
+
}
|
|
119
|
+
return {
|
|
120
|
+
name: normalizeFileToken(basename(rootDirectory)) || "project"
|
|
121
|
+
};
|
|
122
|
+
}
|
|
123
|
+
async function tryStat(pathValue) {
|
|
124
|
+
try {
|
|
125
|
+
return await stat(pathValue);
|
|
126
|
+
}
|
|
127
|
+
catch {
|
|
128
|
+
return undefined;
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
async function resolveUniquePath(filePath) {
|
|
132
|
+
const existingStats = await tryStat(filePath);
|
|
133
|
+
if (!existingStats)
|
|
134
|
+
return filePath;
|
|
135
|
+
const parsed = parse(filePath);
|
|
136
|
+
let counter = 1;
|
|
137
|
+
while (true) {
|
|
138
|
+
const candidatePath = join(parsed.dir, `${parsed.name}-${counter}${parsed.ext}`);
|
|
139
|
+
const candidateStats = await tryStat(candidatePath);
|
|
140
|
+
if (!candidateStats)
|
|
141
|
+
return candidatePath;
|
|
142
|
+
counter += 1;
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
export async function resolveOutputPath(outputValue, rootDirectory, format) {
|
|
146
|
+
const projectMeta = await getProjectMetadata(rootDirectory);
|
|
147
|
+
const baseFileName = projectMeta.version ?
|
|
148
|
+
`${projectMeta.name}-${projectMeta.version}.${format}` :
|
|
149
|
+
`${projectMeta.name}.${format}`;
|
|
150
|
+
if (!outputValue)
|
|
151
|
+
return await resolveUniquePath(join(rootDirectory, baseFileName));
|
|
152
|
+
if (outputValue === "/dev/null" || outputValue === "nul")
|
|
153
|
+
return outputValue;
|
|
154
|
+
const hasTrailingSeparator = /[/\\]+$/.test(outputValue);
|
|
155
|
+
const outputStats = await tryStat(outputValue);
|
|
156
|
+
if (hasTrailingSeparator || outputStats?.isDirectory()) {
|
|
157
|
+
const filePath = join(outputValue, baseFileName);
|
|
158
|
+
return await resolveUniquePath(filePath);
|
|
159
|
+
}
|
|
160
|
+
return await resolveUniquePath(outputValue);
|
|
161
|
+
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import type { Logger } from "$infra";
|
|
2
|
+
type IgnoreMatcherOptions = {
|
|
3
|
+
rootDirectory: string;
|
|
4
|
+
excludePatterns: string[];
|
|
5
|
+
useGitignore: boolean;
|
|
6
|
+
logger?: Logger;
|
|
7
|
+
};
|
|
8
|
+
export declare class IgnoreMatcher {
|
|
9
|
+
#private;
|
|
10
|
+
constructor(options: IgnoreMatcherOptions);
|
|
11
|
+
ignores(relativePath: string): boolean;
|
|
12
|
+
addGitignoreForDirectory(directoryPath: string): Promise<void>;
|
|
13
|
+
}
|
|
14
|
+
export {};
|
|
15
|
+
//# sourceMappingURL=ignoreMatcher.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ignoreMatcher.d.ts","sourceRoot":"","sources":["../../src/core/ignoreMatcher.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,QAAQ,CAAC;AAGrC,KAAK,oBAAoB,GAAG;IAC3B,aAAa,EAAE,MAAM,CAAC;IACtB,eAAe,EAAE,MAAM,EAAE,CAAC;IAC1B,YAAY,EAAE,OAAO,CAAC;IACtB,MAAM,CAAC,EAAE,MAAM,CAAC;CAChB,CAAC;AAmEF,qBAAa,aAAa;;gBAOb,OAAO,EAAE,oBAAoB;IASlC,OAAO,CAAC,YAAY,EAAE,MAAM,GAAG,OAAO;IAMhC,wBAAwB,CAAC,aAAa,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;CA6B3E"}
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
import { constants } from "node:fs";
|
|
2
|
+
import { access, readFile } from "node:fs/promises";
|
|
3
|
+
import { join, relative, sep } from "node:path";
|
|
4
|
+
import ignore from "ignore";
|
|
5
|
+
const defaultIgnorePatterns = [
|
|
6
|
+
".fln.json",
|
|
7
|
+
".git",
|
|
8
|
+
".DS_Store",
|
|
9
|
+
"Thumbs.db",
|
|
10
|
+
"node_modules",
|
|
11
|
+
".env",
|
|
12
|
+
"package-lock.json",
|
|
13
|
+
"bun.lock",
|
|
14
|
+
"yarn.lock",
|
|
15
|
+
"pnpm-lock.yaml"
|
|
16
|
+
];
|
|
17
|
+
function normalizeRelativePath(relativePath) {
|
|
18
|
+
const normalized = relativePath.split(sep).join("/");
|
|
19
|
+
if (normalized.startsWith("./"))
|
|
20
|
+
return normalized.slice(2);
|
|
21
|
+
return normalized;
|
|
22
|
+
}
|
|
23
|
+
function convertGitignorePattern(pattern, relativeDirectory) {
|
|
24
|
+
const trimmed = pattern.trim();
|
|
25
|
+
if (trimmed === "" || trimmed.startsWith("#"))
|
|
26
|
+
return undefined;
|
|
27
|
+
const isEscaped = trimmed.startsWith("\\") && (trimmed[1] === "!" || trimmed[1] === "#");
|
|
28
|
+
const rawPattern = isEscaped ? trimmed.slice(1) : trimmed;
|
|
29
|
+
const isNegated = !isEscaped && rawPattern.startsWith("!");
|
|
30
|
+
const patternBody = isNegated ? rawPattern.slice(1) : rawPattern;
|
|
31
|
+
const normalizedDirectory = normalizeRelativePath(relativeDirectory);
|
|
32
|
+
const prefix = normalizedDirectory === "" ? "" : `${normalizedDirectory}/`;
|
|
33
|
+
if (patternBody === "")
|
|
34
|
+
return undefined;
|
|
35
|
+
const trimmedTrailingSlash = patternBody.endsWith("/") ? patternBody.slice(0, -1) : patternBody;
|
|
36
|
+
const hasSlash = trimmedTrailingSlash.includes("/");
|
|
37
|
+
let convertedPattern;
|
|
38
|
+
if (patternBody.startsWith("/"))
|
|
39
|
+
convertedPattern = `${prefix}${patternBody.slice(1)}`;
|
|
40
|
+
else if (hasSlash)
|
|
41
|
+
convertedPattern = `${prefix}${patternBody}`;
|
|
42
|
+
else
|
|
43
|
+
convertedPattern = `${prefix}**/${patternBody}`;
|
|
44
|
+
return isNegated ? `!${convertedPattern}` : convertedPattern;
|
|
45
|
+
}
|
|
46
|
+
function normalizeExcludePattern(pattern) {
|
|
47
|
+
const normalized = pattern.trim();
|
|
48
|
+
const isNegated = normalized.startsWith("!");
|
|
49
|
+
const body = isNegated ? normalized.slice(1) : normalized;
|
|
50
|
+
const trimmedTrailingSlash = body.endsWith("/") ? body.slice(0, -1) : body;
|
|
51
|
+
if (body.startsWith("/"))
|
|
52
|
+
return isNegated ? `!${body.slice(1)}` : body.slice(1);
|
|
53
|
+
const result = trimmedTrailingSlash.includes("/") ? body : `**/${body}`;
|
|
54
|
+
return isNegated ? `!${result}` : result;
|
|
55
|
+
}
|
|
56
|
+
export class IgnoreMatcher {
|
|
57
|
+
#rootDirectory;
|
|
58
|
+
#useGitignore;
|
|
59
|
+
#logger;
|
|
60
|
+
#processedGitignore = new Set();
|
|
61
|
+
#matcher = ignore();
|
|
62
|
+
constructor(options) {
|
|
63
|
+
this.#rootDirectory = options.rootDirectory;
|
|
64
|
+
this.#useGitignore = options.useGitignore;
|
|
65
|
+
this.#logger = options.logger;
|
|
66
|
+
this.#matcher.add(defaultIgnorePatterns.map(pattern => normalizeExcludePattern(pattern)));
|
|
67
|
+
this.#matcher.add(options.excludePatterns.map(pattern => normalizeExcludePattern(pattern)));
|
|
68
|
+
}
|
|
69
|
+
ignores(relativePath) {
|
|
70
|
+
const normalized = normalizeRelativePath(relativePath);
|
|
71
|
+
return normalized !== "" && this.#matcher.ignores(normalized);
|
|
72
|
+
}
|
|
73
|
+
async addGitignoreForDirectory(directoryPath) {
|
|
74
|
+
if (!this.#useGitignore)
|
|
75
|
+
return;
|
|
76
|
+
if (this.#processedGitignore.has(directoryPath))
|
|
77
|
+
return;
|
|
78
|
+
this.#processedGitignore.add(directoryPath);
|
|
79
|
+
const gitignorePath = join(directoryPath, ".gitignore");
|
|
80
|
+
const relativeDirectory = relative(this.#rootDirectory, directoryPath);
|
|
81
|
+
try {
|
|
82
|
+
await access(gitignorePath, constants.F_OK);
|
|
83
|
+
}
|
|
84
|
+
catch {
|
|
85
|
+
return;
|
|
86
|
+
}
|
|
87
|
+
const content = await readFile(gitignorePath, "utf8");
|
|
88
|
+
const patterns = content
|
|
89
|
+
.split("\n")
|
|
90
|
+
.map(line => convertGitignorePattern(line, relativeDirectory))
|
|
91
|
+
.filter((line) => line !== undefined);
|
|
92
|
+
if (patterns.length > 0) {
|
|
93
|
+
this.#matcher.add(patterns);
|
|
94
|
+
this.#logger?.debug(`Loaded ${patterns.length} patterns from ${normalizeRelativePath(relativeDirectory) || "."}/.gitignore`);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/core/index.ts"],"names":[],"mappings":"AAAA,cAAc,iBAAiB,CAAC;AAChC,cAAc,gBAAgB,CAAC;AAC/B,cAAc,cAAc,CAAC;AAC7B,cAAc,YAAY,CAAC;AAC3B,cAAc,QAAQ,CAAC;AACvB,cAAc,kBAAkB,CAAC;AACjC,cAAc,SAAS,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"renderOutput.d.ts","sourceRoot":"","sources":["../../src/core/renderOutput.ts"],"names":[],"mappings":"AAKA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,SAAS,CAAC;AAGzC,OAAO,KAAK,EAAY,UAAU,EAAE,MAAM,SAAS,CAAC;AAsQpD,wBAAsB,WAAW,CAAC,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,SAAS,GAAG,OAAO,CAAC,IAAI,CAAC,CAKtF"}
|
|
@@ -0,0 +1,218 @@
|
|
|
1
|
+
import { createReadStream } from "node:fs";
|
|
2
|
+
import { readFile } from "node:fs/promises";
|
|
3
|
+
import { extname, join } from "node:path";
|
|
4
|
+
import { createOutputWriter, formatDateTime } from "$infra";
|
|
5
|
+
import { VERSION } from "$version";
|
|
6
|
+
import { renderTree } from "./renderTree";
|
|
7
|
+
import { formatByteSize } from "./size";
|
|
8
|
+
function getLanguageFromFilename(fileName) {
|
|
9
|
+
const extension = extname(fileName).replace(".", "");
|
|
10
|
+
return extension === "" ? "txt" : extension;
|
|
11
|
+
}
|
|
12
|
+
async function findMaxBacktickSequenceInFile(filePath) {
|
|
13
|
+
const readStream = createReadStream(filePath, { encoding: "utf8" });
|
|
14
|
+
let maxLength = 0;
|
|
15
|
+
let currentLength = 0;
|
|
16
|
+
let carry = "";
|
|
17
|
+
for await (const chunk of readStream) {
|
|
18
|
+
const textChunk = typeof chunk === "string" ? chunk : chunk.toString("utf8");
|
|
19
|
+
const combined = carry + textChunk;
|
|
20
|
+
const safeLength = Math.max(combined.length - 1, 0);
|
|
21
|
+
for (let i = 0; i < safeLength; i++)
|
|
22
|
+
if (combined[i] === "`") {
|
|
23
|
+
currentLength++;
|
|
24
|
+
maxLength = Math.max(maxLength, currentLength);
|
|
25
|
+
}
|
|
26
|
+
else
|
|
27
|
+
currentLength = 0;
|
|
28
|
+
carry = combined.slice(safeLength);
|
|
29
|
+
}
|
|
30
|
+
for (const char of carry)
|
|
31
|
+
if (char === "`") {
|
|
32
|
+
currentLength++;
|
|
33
|
+
maxLength = Math.max(maxLength, currentLength);
|
|
34
|
+
}
|
|
35
|
+
else
|
|
36
|
+
currentLength = 0;
|
|
37
|
+
return maxLength;
|
|
38
|
+
}
|
|
39
|
+
async function writeMarkdownContent(writer, filePath) {
|
|
40
|
+
const readStream = createReadStream(filePath, { encoding: "utf8" });
|
|
41
|
+
let lastCharacter = "";
|
|
42
|
+
for await (const chunk of readStream) {
|
|
43
|
+
const textChunk = typeof chunk === "string" ? chunk : chunk.toString("utf8");
|
|
44
|
+
await writer.write(textChunk);
|
|
45
|
+
if (textChunk.length > 0)
|
|
46
|
+
lastCharacter = textChunk.at(-1) ?? "";
|
|
47
|
+
}
|
|
48
|
+
if (lastCharacter !== "" && lastCharacter !== "\n")
|
|
49
|
+
await writer.write("\n");
|
|
50
|
+
}
|
|
51
|
+
function* iterateFileNodes(node) {
|
|
52
|
+
if (node.type === "file")
|
|
53
|
+
yield node;
|
|
54
|
+
if (node.children)
|
|
55
|
+
for (const child of node.children)
|
|
56
|
+
yield* iterateFileNodes(child);
|
|
57
|
+
}
|
|
58
|
+
function filterSkippedNodes(node) {
|
|
59
|
+
if (node.skipReason)
|
|
60
|
+
return undefined;
|
|
61
|
+
if (!node.children || node.children.length === 0)
|
|
62
|
+
return { ...node };
|
|
63
|
+
const children = node.children
|
|
64
|
+
.map(child => filterSkippedNodes(child))
|
|
65
|
+
.filter((child) => child !== undefined);
|
|
66
|
+
return { ...node, children };
|
|
67
|
+
}
|
|
68
|
+
async function writeMarkdown(result, config) {
|
|
69
|
+
const writer = await createOutputWriter(config.outputFile, config.maximumTotalSizeBytes);
|
|
70
|
+
const outputRoot = filterSkippedNodes(result.root);
|
|
71
|
+
if (!outputRoot)
|
|
72
|
+
throw new Error("Root directory was skipped.");
|
|
73
|
+
try {
|
|
74
|
+
await writer.writeLine(`<!-- 🥞 fln ${VERSION} -->`);
|
|
75
|
+
await writer.writeLine("");
|
|
76
|
+
await writer.writeLine(`# Codebase Snapshot: ${result.projectName}`);
|
|
77
|
+
await writer.writeLine("");
|
|
78
|
+
await writer.writeLine(`Generated: ${config.generatedDate ?? formatDateTime()}`);
|
|
79
|
+
await writer.writeLine(`Files: ${result.stats.files} | Directories: ${result.stats.directories}`);
|
|
80
|
+
await writer.writeLine("");
|
|
81
|
+
await writer.writeLine("---");
|
|
82
|
+
await writer.writeLine("");
|
|
83
|
+
if (config.banner) {
|
|
84
|
+
await writer.writeLine(config.banner);
|
|
85
|
+
await writer.writeLine("");
|
|
86
|
+
}
|
|
87
|
+
if (config.includeTree) {
|
|
88
|
+
await writer.writeLine("## Directory Tree");
|
|
89
|
+
await writer.writeLine("```text");
|
|
90
|
+
await writer.write(renderTree(outputRoot));
|
|
91
|
+
await writer.writeLine("```");
|
|
92
|
+
await writer.writeLine("");
|
|
93
|
+
await writer.writeLine("---");
|
|
94
|
+
await writer.writeLine("");
|
|
95
|
+
}
|
|
96
|
+
if (config.includeContents)
|
|
97
|
+
await writeMarkdownFiles(outputRoot, writer, config);
|
|
98
|
+
if (config.footer) {
|
|
99
|
+
await writer.writeLine("");
|
|
100
|
+
await writer.writeLine(config.footer);
|
|
101
|
+
}
|
|
102
|
+
({
|
|
103
|
+
sizeBytes: result.stats.outputSizeBytes,
|
|
104
|
+
tokenCount: result.stats.outputTokenCount
|
|
105
|
+
} = await writer.close());
|
|
106
|
+
}
|
|
107
|
+
catch (error) {
|
|
108
|
+
await writer.close();
|
|
109
|
+
throw error;
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
async function writeMarkdownFiles(rootNode, outputWriter, renderConfig) {
|
|
113
|
+
await outputWriter.writeLine("## Source Files");
|
|
114
|
+
await outputWriter.writeLine("");
|
|
115
|
+
const fileNodes = Array.from(iterateFileNodes(rootNode));
|
|
116
|
+
for (let i = 0; i < fileNodes.length; i++) {
|
|
117
|
+
const node = fileNodes[i];
|
|
118
|
+
const language = getLanguageFromFilename(node.name);
|
|
119
|
+
const isLastFile = i === fileNodes.length - 1;
|
|
120
|
+
const filePath = join(renderConfig.rootDirectory, node.path);
|
|
121
|
+
let fenceLength = 3;
|
|
122
|
+
if (!node.isBinary)
|
|
123
|
+
try {
|
|
124
|
+
const maxBackticks = await findMaxBacktickSequenceInFile(filePath);
|
|
125
|
+
if (maxBackticks >= 3)
|
|
126
|
+
fenceLength = maxBackticks + 1;
|
|
127
|
+
}
|
|
128
|
+
catch { }
|
|
129
|
+
const fence = "`".repeat(fenceLength);
|
|
130
|
+
await outputWriter.writeLine(`### ${node.path}`);
|
|
131
|
+
await outputWriter.writeLine(`${fence}${language}`);
|
|
132
|
+
if (node.isBinary)
|
|
133
|
+
await outputWriter.writeLine(`[BINARY FILE: ${formatByteSize(node.size)}]`);
|
|
134
|
+
else
|
|
135
|
+
try {
|
|
136
|
+
await writeMarkdownContent(outputWriter, filePath);
|
|
137
|
+
}
|
|
138
|
+
catch {
|
|
139
|
+
await outputWriter.writeLine("[READ ERROR]");
|
|
140
|
+
}
|
|
141
|
+
await outputWriter.writeLine(fence);
|
|
142
|
+
if (!isLastFile)
|
|
143
|
+
await outputWriter.writeLine("");
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
async function writeJson(result, config) {
|
|
147
|
+
const writer = await createOutputWriter(config.outputFile, config.maximumTotalSizeBytes);
|
|
148
|
+
const outputRoot = filterSkippedNodes(result.root);
|
|
149
|
+
if (!outputRoot)
|
|
150
|
+
throw new Error("Root directory was skipped.");
|
|
151
|
+
try {
|
|
152
|
+
await writer.write("{");
|
|
153
|
+
await writer.write(`"version":${JSON.stringify(VERSION)}`);
|
|
154
|
+
await writer.write(`,"generated":${JSON.stringify(config.generatedDate ?? formatDateTime())}`);
|
|
155
|
+
await writer.write(`,"projectName":${JSON.stringify(result.projectName)}`);
|
|
156
|
+
await writer.write(`,"rootDirectory":${JSON.stringify(config.rootDirectory)}`);
|
|
157
|
+
await writer.write(`,"stats":${JSON.stringify(result.stats)}`);
|
|
158
|
+
await writer.write(`,"options":${JSON.stringify({
|
|
159
|
+
includeTree: config.includeTree,
|
|
160
|
+
includeContents: config.includeContents,
|
|
161
|
+
format: config.format,
|
|
162
|
+
maximumFileSizeBytes: config.maximumFileSizeBytes,
|
|
163
|
+
maximumTotalSizeBytes: config.maximumTotalSizeBytes,
|
|
164
|
+
includeHidden: config.includeHidden,
|
|
165
|
+
useGitignore: config.useGitignore,
|
|
166
|
+
excludePatterns: config.excludePatterns,
|
|
167
|
+
includePatterns: config.includePatterns,
|
|
168
|
+
followSymlinks: config.followSymlinks,
|
|
169
|
+
banner: config.banner,
|
|
170
|
+
footer: config.footer
|
|
171
|
+
})}`);
|
|
172
|
+
await writer.write(`,"tree":${JSON.stringify(outputRoot)}`);
|
|
173
|
+
await writer.write(`,"stats":${JSON.stringify(result.stats)}`);
|
|
174
|
+
if (config.includeContents) {
|
|
175
|
+
await writer.write(",\"files\":[");
|
|
176
|
+
let isFirst = true;
|
|
177
|
+
for (const node of iterateFileNodes(outputRoot)) {
|
|
178
|
+
if (!isFirst)
|
|
179
|
+
await writer.write(",");
|
|
180
|
+
isFirst = false;
|
|
181
|
+
await writer.write("{");
|
|
182
|
+
await writer.write(`"path":${JSON.stringify(node.path)}`);
|
|
183
|
+
await writer.write(`,"language":${JSON.stringify(getLanguageFromFilename(node.name))}`);
|
|
184
|
+
await writer.write(`,"isBinary":${JSON.stringify(Boolean(node.isBinary))}`);
|
|
185
|
+
if (node.skipReason)
|
|
186
|
+
await writer.write(`,"skipReason":${JSON.stringify(node.skipReason)}`);
|
|
187
|
+
if (node.isBinary || node.skipReason)
|
|
188
|
+
await writer.write(",\"content\":null");
|
|
189
|
+
else
|
|
190
|
+
try {
|
|
191
|
+
const filePath = join(config.rootDirectory, node.path);
|
|
192
|
+
const content = await readFile(filePath, "utf8");
|
|
193
|
+
await writer.write(`,"content":${JSON.stringify(content)}`);
|
|
194
|
+
}
|
|
195
|
+
catch {
|
|
196
|
+
await writer.write(",\"skipReason\":\"readError\",\"content\":null");
|
|
197
|
+
}
|
|
198
|
+
await writer.write("}");
|
|
199
|
+
}
|
|
200
|
+
await writer.write("]");
|
|
201
|
+
}
|
|
202
|
+
await writer.write("}");
|
|
203
|
+
({
|
|
204
|
+
sizeBytes: result.stats.outputSizeBytes,
|
|
205
|
+
tokenCount: result.stats.outputTokenCount
|
|
206
|
+
} = await writer.close());
|
|
207
|
+
}
|
|
208
|
+
catch (error) {
|
|
209
|
+
await writer.close();
|
|
210
|
+
throw error;
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
export async function writeOutput(result, config) {
|
|
214
|
+
if (config.format === "json")
|
|
215
|
+
await writeJson(result, config);
|
|
216
|
+
else
|
|
217
|
+
await writeMarkdown(result, config);
|
|
218
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"renderTree.d.ts","sourceRoot":"","sources":["../../src/core/renderTree.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,SAAS,CAAC;AAUxC,wBAAgB,UAAU,CAAC,IAAI,EAAE,QAAQ,GAAG,MAAM,CAOjD"}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
function formatNodeSuffix(node) {
|
|
2
|
+
if (node.type === "symlink")
|
|
3
|
+
return node.target ? ` → ${node.target}` : " → [unknown]";
|
|
4
|
+
return "";
|
|
5
|
+
}
|
|
6
|
+
export function renderTree(root) {
|
|
7
|
+
if (!root.children || root.children.length === 0)
|
|
8
|
+
return "";
|
|
9
|
+
return root.children
|
|
10
|
+
.map((child, index) => renderTreeNode(child, "", index === root.children.length - 1))
|
|
11
|
+
.join("");
|
|
12
|
+
}
|
|
13
|
+
function renderTreeNode(node, prefix, isLast) {
|
|
14
|
+
const connector = isLast ? "└── " : "├── ";
|
|
15
|
+
const line = `${prefix}${connector}${node.name}${formatNodeSuffix(node)}\n`;
|
|
16
|
+
if (!node.children || node.children.length === 0)
|
|
17
|
+
return line;
|
|
18
|
+
const childPrefix = `${prefix}${isLast ? " " : "│ "}`;
|
|
19
|
+
const childLines = node.children
|
|
20
|
+
.map((child, index) => renderTreeNode(child, childPrefix, index === node.children.length - 1))
|
|
21
|
+
.join("");
|
|
22
|
+
return `${line}${childLines}`;
|
|
23
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"scanTree.d.ts","sourceRoot":"","sources":["../../src/core/scanTree.ts"],"names":[],"mappings":"AAYA,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,QAAQ,CAAC;AAErC,OAAO,KAAK,EAEX,WAAW,EACX,UAAU,EAGV,MAAM,SAAS,CAAC;AAgHjB,wBAAsB,QAAQ,CAAC,OAAO,EAAE,WAAW,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CA4SxF"}
|