flatten-tool 1.4.0 → 1.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +30 -2
- package/index.ts +153 -66
- package/package.json +2 -2
package/README.md
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
[](https://www.npmjs.com/package/flatten-tool)
|
|
4
4
|
|
|
5
|
-
A CLI utility to flatten directory structures.
|
|
5
|
+
A CLI utility to flatten directory structures, with perfect GitHub Flavored Markdown compatibility.
|
|
6
6
|
|
|
7
7
|
[](https://asciinema.org/a/ThswNC1vrdlK0wdD)
|
|
8
8
|
|
|
@@ -10,13 +10,31 @@ A CLI utility to flatten directory structures.
|
|
|
10
10
|
|
|
11
11
|
Requires [Bun](https://bun.sh) runtime (v1.1+).
|
|
12
12
|
|
|
13
|
+
### Via npm
|
|
14
|
+
|
|
13
15
|
```bash
|
|
14
16
|
npm install -g flatten-tool
|
|
15
17
|
```
|
|
16
18
|
|
|
19
|
+
### For Development
|
|
20
|
+
|
|
21
|
+
Clone the repository and install dependencies:
|
|
22
|
+
|
|
23
|
+
```bash
|
|
24
|
+
git clone https://github.com/MBanucu/flatten-tool.git
|
|
25
|
+
cd flatten-tool
|
|
26
|
+
bun install
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
Run directly with Bun:
|
|
30
|
+
|
|
31
|
+
```bash
|
|
32
|
+
bun run index.ts [args]
|
|
33
|
+
```
|
|
34
|
+
|
|
17
35
|
## Usage
|
|
18
36
|
|
|
19
|
-
By default, the tool merges all file contents into a single Markdown file, starting with a project file tree for navigation, followed by each file's content under a header with its relative path, in a code block with appropriate language highlighting based on the file extension. Ignores and filters are applied as usual.
|
|
37
|
+
By default, the tool merges all file contents into a single Markdown file, starting with a project file tree for navigation, followed by each file's content under a header with its full relative path, in a code block with appropriate language highlighting based on the file extension. The tree includes clickable links to file sections using GitHub-compatible anchors. Ignores and filters are applied as usual.
|
|
20
38
|
|
|
21
39
|
The `<source>` argument is optional and defaults to the current directory (`.`). The `<target>` argument is also optional and defaults to `flattened.md` (or `flattened/` when using `--directory`).
|
|
22
40
|
|
|
@@ -113,6 +131,16 @@ This project uses Bun for runtime, TypeScript for type safety, and follows the g
|
|
|
113
131
|
|
|
114
132
|
## Changelog
|
|
115
133
|
|
|
134
|
+
### v1.6.0
|
|
135
|
+
- Perfect GitHub compatibility: anchors now exactly match GitHub Flavored Markdown auto-generation using github-slugger.
|
|
136
|
+
- Cleaner directory headers: removed trailing `/` for better readability.
|
|
137
|
+
- Precomputed anchors: ensures no mismatches even with slug collisions.
|
|
138
|
+
- Removed unused treeify dependency.
|
|
139
|
+
|
|
140
|
+
### v1.5.0
|
|
141
|
+
- Improved navigation: project file tree is now a clickable nested Markdown list with links to each file's content section using standard markdown anchors.
|
|
142
|
+
- Simplified file headers: removed custom anchors from section headers.
|
|
143
|
+
|
|
116
144
|
### v1.4.0
|
|
117
145
|
- Added project file tree to the beginning of merged Markdown output for better navigation.
|
|
118
146
|
|
package/index.ts
CHANGED
|
@@ -7,7 +7,7 @@ import yargs from 'yargs';
|
|
|
7
7
|
import { hideBin } from 'yargs/helpers';
|
|
8
8
|
import { globby } from 'globby';
|
|
9
9
|
import pkg from './package.json' assert { type: 'json' };
|
|
10
|
-
import
|
|
10
|
+
import GithubSlugger from 'github-slugger';
|
|
11
11
|
|
|
12
12
|
function escapePathComponent(component: string): string {
|
|
13
13
|
return component.replace(/_/g, '__');
|
|
@@ -15,54 +15,22 @@ function escapePathComponent(component: string): string {
|
|
|
15
15
|
|
|
16
16
|
function buildTreeObject(relPaths: string[]): any {
|
|
17
17
|
const tree: any = {};
|
|
18
|
-
|
|
19
|
-
relPaths.forEach((path) => {
|
|
20
|
-
// Normalize paths
|
|
21
|
-
path = path.replace(/\\/g, '/');
|
|
22
|
-
if (path.startsWith('./')) path = path.slice(2);
|
|
23
|
-
|
|
18
|
+
for (const path of relPaths) {
|
|
24
19
|
const parts = path.split('/');
|
|
25
20
|
let node = tree;
|
|
26
|
-
|
|
27
|
-
parts.
|
|
21
|
+
const currentParts: string[] = [];
|
|
22
|
+
for (const [index, part] of parts.entries()) {
|
|
23
|
+
currentParts.push(part);
|
|
28
24
|
const isDir = index < parts.length - 1;
|
|
29
25
|
const key = isDir ? `${part}/` : part;
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
node[key] = isDir ? {} : null;
|
|
26
|
+
if (node[key] === undefined) {
|
|
27
|
+
node[key] = isDir ? {} : currentParts.join('/');
|
|
33
28
|
}
|
|
34
|
-
|
|
35
29
|
if (isDir) {
|
|
36
30
|
node = node[key];
|
|
37
31
|
}
|
|
38
|
-
}
|
|
39
|
-
});
|
|
40
|
-
|
|
41
|
-
// Recursively sort: directories first, then files, case-insensitive
|
|
42
|
-
function sortNode(node: any): void {
|
|
43
|
-
if (node === null || typeof node !== 'object') return;
|
|
44
|
-
|
|
45
|
-
const entries = Object.entries(node);
|
|
46
|
-
entries.sort(([a], [b]) => {
|
|
47
|
-
const aIsDir = a.endsWith('/');
|
|
48
|
-
const bIsDir = b.endsWith('/');
|
|
49
|
-
if (aIsDir !== bIsDir) return aIsDir ? -1 : 1; // dirs before files
|
|
50
|
-
return a.toLowerCase().localeCompare(b.toLowerCase());
|
|
51
|
-
});
|
|
52
|
-
|
|
53
|
-
// Rebuild node in sorted order (preserves insertion order for rendering)
|
|
54
|
-
const sorted: any = {};
|
|
55
|
-
entries.forEach(([key, value]) => {
|
|
56
|
-
sorted[key] = value;
|
|
57
|
-
sortNode(value);
|
|
58
|
-
});
|
|
59
|
-
|
|
60
|
-
Object.keys(node).forEach((k) => delete node[k]);
|
|
61
|
-
Object.assign(node, sorted);
|
|
32
|
+
}
|
|
62
33
|
}
|
|
63
|
-
|
|
64
|
-
sortNode(tree);
|
|
65
|
-
|
|
66
34
|
return tree;
|
|
67
35
|
}
|
|
68
36
|
|
|
@@ -127,46 +95,165 @@ export async function flattenDirectory(
|
|
|
127
95
|
if (err.code !== 'ENOENT') throw err;
|
|
128
96
|
}
|
|
129
97
|
|
|
130
|
-
const
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
// Sort paths for consistent insertion
|
|
135
|
-
relPaths.sort((a, b) => a.toLowerCase().localeCompare(b.toLowerCase()));
|
|
98
|
+
const fileEntries = files.map(srcPath => ({
|
|
99
|
+
srcPath,
|
|
100
|
+
relPath: relative(absSource, srcPath).replace(/\\/g, '/')
|
|
101
|
+
}));
|
|
136
102
|
|
|
103
|
+
const relPaths = fileEntries.map(e => e.relPath);
|
|
137
104
|
const treeObj = buildTreeObject(relPaths);
|
|
138
105
|
|
|
139
|
-
//
|
|
140
|
-
const
|
|
106
|
+
// Map for quick lookup of srcPath by relPath
|
|
107
|
+
const pathMap = new Map<string, string>();
|
|
108
|
+
fileEntries.forEach(({ srcPath, relPath }) => {
|
|
109
|
+
pathMap.set(relPath, srcPath);
|
|
110
|
+
});
|
|
111
|
+
|
|
112
|
+
// === NEW: Precompute anchors in document order ===
|
|
113
|
+
interface Section {
|
|
114
|
+
path: string;
|
|
115
|
+
headerText: string;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
const sections: Section[] = [];
|
|
119
|
+
|
|
120
|
+
function collectSections(node: any, currentPath: string): void {
|
|
121
|
+
const dirs: { name: string; child: any }[] = [];
|
|
122
|
+
const files: { relPath: string }[] = [];
|
|
123
|
+
|
|
124
|
+
for (const [key, value] of Object.entries(node) as [string, any][]) {
|
|
125
|
+
if (key.endsWith('/')) {
|
|
126
|
+
const name = key.slice(0, -1);
|
|
127
|
+
dirs.push({ name, child: value });
|
|
128
|
+
} else {
|
|
129
|
+
files.push({ relPath: value as string });
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
dirs.sort((a, b) => a.name.toLowerCase().localeCompare(b.name.toLowerCase()));
|
|
134
|
+
files.sort((a, b) => a.relPath.toLowerCase().localeCompare(b.relPath.toLowerCase()));
|
|
135
|
+
|
|
136
|
+
for (const dir of dirs) {
|
|
137
|
+
const newPath = currentPath ? `${currentPath}/${dir.name}` : dir.name;
|
|
138
|
+
collectSections(dir.child, newPath);
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
if (currentPath) {
|
|
142
|
+
sections.push({ path: currentPath, headerText: currentPath }); // no trailing /
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
for (const file of files) {
|
|
146
|
+
sections.push({ path: file.relPath, headerText: file.relPath });
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
collectSections(treeObj, '');
|
|
151
|
+
|
|
152
|
+
const anchorMap = new Map<string, string>();
|
|
153
|
+
const anchorSlugger = new GithubSlugger();
|
|
154
|
+
for (const sec of sections) {
|
|
155
|
+
anchorMap.set(sec.path, anchorSlugger.slug(sec.headerText));
|
|
156
|
+
}
|
|
157
|
+
// === END NEW ===
|
|
158
|
+
|
|
159
|
+
// Updated renderMarkdownTree to use precomputed anchors
|
|
160
|
+
function renderMarkdownTree(
|
|
161
|
+
node: any,
|
|
162
|
+
depth: number = 0,
|
|
163
|
+
prefix: string = '',
|
|
164
|
+
anchorMap: Map<string, string>
|
|
165
|
+
): string {
|
|
166
|
+
let result = '';
|
|
167
|
+
const indent = ' '.repeat(depth);
|
|
168
|
+
const entries: [string, any][] = Object.entries(node);
|
|
169
|
+
|
|
170
|
+
entries.sort(([a], [b]) => {
|
|
171
|
+
const aDir = a.endsWith('/');
|
|
172
|
+
const bDir = b.endsWith('/');
|
|
173
|
+
if (aDir !== bDir) return aDir ? -1 : 1;
|
|
174
|
+
return a.toLowerCase().localeCompare(b.toLowerCase());
|
|
175
|
+
});
|
|
176
|
+
|
|
177
|
+
for (const [key, value] of entries) {
|
|
178
|
+
const isDir = key.endsWith('/');
|
|
179
|
+
const name = isDir ? key.slice(0, -1) : key;
|
|
180
|
+
const display = isDir ? name + '/' : name;
|
|
181
|
+
const pathHere = prefix ? `${prefix}/${name}` : name;
|
|
182
|
+
const anchor = anchorMap.get(pathHere) ?? '';
|
|
183
|
+
result += `${indent}- [${display}](#${anchor})\n`;
|
|
184
|
+
if (isDir) {
|
|
185
|
+
result += renderMarkdownTree(value, depth + 1, pathHere, anchorMap);
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
return result;
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
// Render global tree with correct anchors
|
|
192
|
+
let treeMarkdown = "# Project File Tree\n\n- .\n";
|
|
193
|
+
treeMarkdown += renderMarkdownTree(treeObj, 1, '', anchorMap);
|
|
194
|
+
treeMarkdown += "\n\n";
|
|
141
195
|
|
|
142
196
|
const writeStream = createWriteStream(absTarget);
|
|
143
197
|
writeStream.setMaxListeners(0);
|
|
198
|
+
writeStream.write(treeMarkdown);
|
|
199
|
+
|
|
200
|
+
async function writeContentRecursive(
|
|
201
|
+
node: any,
|
|
202
|
+
currentPath: string,
|
|
203
|
+
writeStream: import('node:fs').WriteStream,
|
|
204
|
+
pathMap: Map<string, string>
|
|
205
|
+
): Promise<void> {
|
|
206
|
+
const dirs: { name: string; child: any }[] = [];
|
|
207
|
+
const files: { name: string; relPath: string; srcPath: string }[] = [];
|
|
208
|
+
|
|
209
|
+
for (const [key, value] of Object.entries(node) as [string, any][]) {
|
|
210
|
+
if (key.endsWith('/')) {
|
|
211
|
+
const name = key.slice(0, -1);
|
|
212
|
+
dirs.push({ name, child: value });
|
|
213
|
+
} else {
|
|
214
|
+
const relPath = value as string;
|
|
215
|
+
const srcPath = pathMap.get(relPath)!;
|
|
216
|
+
const name = key;
|
|
217
|
+
files.push({ name, relPath, srcPath });
|
|
218
|
+
}
|
|
219
|
+
}
|
|
144
220
|
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
writeStream.write(treeify.asTree(rootTree));
|
|
148
|
-
writeStream.write("```\n\n");
|
|
221
|
+
dirs.sort((a, b) => a.name.toLowerCase().localeCompare(b.name.toLowerCase()));
|
|
222
|
+
files.sort((a, b) => a.name.toLowerCase().localeCompare(b.name.toLowerCase()));
|
|
149
223
|
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
224
|
+
for (const dir of dirs) {
|
|
225
|
+
const newPath = currentPath ? `${currentPath}/${dir.name}` : dir.name;
|
|
226
|
+
await writeContentRecursive(dir.child, newPath, writeStream, pathMap);
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
// Directory section (only for non-root) — no trailing /
|
|
230
|
+
if (currentPath) {
|
|
231
|
+
writeStream.write(`# ${currentPath}\n\n`);
|
|
232
|
+
writeStream.write(`File Tree\n\n`);
|
|
233
|
+
writeStream.write(`- .\n`);
|
|
234
|
+
writeStream.write(renderMarkdownTree(node, 1, currentPath, anchorMap));
|
|
235
|
+
writeStream.write('\n');
|
|
236
|
+
}
|
|
155
237
|
|
|
156
|
-
|
|
157
|
-
|
|
238
|
+
for (const file of files) {
|
|
239
|
+
writeStream.write(`# ${file.relPath}\n\n`);
|
|
158
240
|
|
|
159
|
-
|
|
160
|
-
|
|
241
|
+
let ext = extname(file.srcPath).slice(1) || 'text';
|
|
242
|
+
const lang = ext;
|
|
243
|
+
const isMd = ['md', 'markdown'].includes(ext.toLowerCase());
|
|
244
|
+
const ticks = isMd ? '````' : '```';
|
|
161
245
|
|
|
162
|
-
|
|
163
|
-
await pipeline(readStream, writeStream, { end: false });
|
|
246
|
+
writeStream.write(`${ticks}${lang}\n`);
|
|
164
247
|
|
|
165
|
-
|
|
166
|
-
|
|
248
|
+
const readStream = createReadStream(file.srcPath, { encoding: 'utf8' });
|
|
249
|
+
await pipeline(readStream, writeStream, { end: false });
|
|
250
|
+
|
|
251
|
+
writeStream.write(`\n${ticks}\n\n`);
|
|
252
|
+
}
|
|
167
253
|
}
|
|
168
254
|
|
|
169
|
-
|
|
255
|
+
await writeContentRecursive(treeObj, '', writeStream, pathMap);
|
|
256
|
+
|
|
170
257
|
writeStream.end();
|
|
171
258
|
await finished(writeStream);
|
|
172
259
|
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "flatten-tool",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.6.0",
|
|
4
4
|
"description": "CLI tool to flatten directory structures: merge files into a single Markdown file (default) or copy/move to a flat directory with escaped filenames. Respects .gitignore, supports move/overwrite, and more.",
|
|
5
5
|
"module": "index.ts",
|
|
6
6
|
"type": "module",
|
|
@@ -46,10 +46,10 @@
|
|
|
46
46
|
"typescript": "^5.9.3"
|
|
47
47
|
},
|
|
48
48
|
"dependencies": {
|
|
49
|
+
"github-slugger": "^2.0.0",
|
|
49
50
|
"globby": "^16.1.0",
|
|
50
51
|
"ignore": "^7.0.5",
|
|
51
52
|
"minimatch": "^10.1.2",
|
|
52
|
-
"treeify": "^1.1.0",
|
|
53
53
|
"yargs": "^18.0.0"
|
|
54
54
|
}
|
|
55
55
|
}
|