flatten-tool 1.5.0 → 1.6.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +38 -2
- package/index.ts +141 -54
- package/package.json +2 -2
package/README.md
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
[](https://www.npmjs.com/package/flatten-tool)
|
|
4
4
|
|
|
5
|
-
A CLI utility to flatten directory structures.
|
|
5
|
+
A CLI utility to flatten directory structures, with perfect GitHub Flavored Markdown compatibility.
|
|
6
6
|
|
|
7
7
|
[](https://asciinema.org/a/ThswNC1vrdlK0wdD)
|
|
8
8
|
|
|
@@ -10,13 +10,40 @@ A CLI utility to flatten directory structures.
|
|
|
10
10
|
|
|
11
11
|
Requires [Bun](https://bun.sh) runtime (v1.1+).
|
|
12
12
|
|
|
13
|
+
### Via npm
|
|
14
|
+
|
|
13
15
|
```bash
|
|
14
16
|
npm install -g flatten-tool
|
|
15
17
|
```
|
|
16
18
|
|
|
19
|
+
### Run without installation
|
|
20
|
+
|
|
21
|
+
You can run flatten-tool directly without installing:
|
|
22
|
+
|
|
23
|
+
```bash
|
|
24
|
+
npx flatten-tool [args]
|
|
25
|
+
bunx flatten-tool [args]
|
|
26
|
+
```
|
|
27
|
+
|
|
28
|
+
### For Development
|
|
29
|
+
|
|
30
|
+
Clone the repository and install dependencies:
|
|
31
|
+
|
|
32
|
+
```bash
|
|
33
|
+
git clone https://github.com/MBanucu/flatten-tool.git
|
|
34
|
+
cd flatten-tool
|
|
35
|
+
bun install
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
Run directly with Bun:
|
|
39
|
+
|
|
40
|
+
```bash
|
|
41
|
+
bun run index.ts [args]
|
|
42
|
+
```
|
|
43
|
+
|
|
17
44
|
## Usage
|
|
18
45
|
|
|
19
|
-
By default, the tool merges all file contents into a single Markdown file, starting with a project file tree for navigation, followed by each file's content under a header with its relative path, in a code block with appropriate language highlighting based on the file extension. Ignores and filters are applied as usual.
|
|
46
|
+
By default, the tool merges all file contents into a single Markdown file, starting with a project file tree for navigation, followed by each file's content under a header with its full relative path, in a code block with appropriate language highlighting based on the file extension. The tree includes clickable links to file sections using GitHub-compatible anchors. Ignores and filters are applied as usual.
|
|
20
47
|
|
|
21
48
|
The `<source>` argument is optional and defaults to the current directory (`.`). The `<target>` argument is also optional and defaults to `flattened.md` (or `flattened/` when using `--directory`).
|
|
22
49
|
|
|
@@ -113,6 +140,15 @@ This project uses Bun for runtime, TypeScript for type safety, and follows the g
|
|
|
113
140
|
|
|
114
141
|
## Changelog
|
|
115
142
|
|
|
143
|
+
### v1.6.1
|
|
144
|
+
- Added instructions for running flatten-tool directly with npx and bunx.
|
|
145
|
+
|
|
146
|
+
### v1.6.0
|
|
147
|
+
- Perfect GitHub compatibility: anchors now exactly match GitHub Flavored Markdown auto-generation using github-slugger.
|
|
148
|
+
- Cleaner directory headers: removed trailing `/` for better readability.
|
|
149
|
+
- Precomputed anchors: ensures no mismatches even with slug collisions.
|
|
150
|
+
- Removed unused treeify dependency.
|
|
151
|
+
|
|
116
152
|
### v1.5.0
|
|
117
153
|
- Improved navigation: project file tree is now a clickable nested Markdown list with links to each file's content section using standard markdown anchors.
|
|
118
154
|
- Simplified file headers: removed custom anchors from section headers.
|
package/index.ts
CHANGED
|
@@ -7,19 +7,12 @@ import yargs from 'yargs';
|
|
|
7
7
|
import { hideBin } from 'yargs/helpers';
|
|
8
8
|
import { globby } from 'globby';
|
|
9
9
|
import pkg from './package.json' assert { type: 'json' };
|
|
10
|
+
import GithubSlugger from 'github-slugger';
|
|
10
11
|
|
|
11
12
|
function escapePathComponent(component: string): string {
|
|
12
13
|
return component.replace(/_/g, '__');
|
|
13
14
|
}
|
|
14
15
|
|
|
15
|
-
function generateMarkdownAnchor(text: string): string {
|
|
16
|
-
return text
|
|
17
|
-
.toLowerCase()
|
|
18
|
-
.replace(/[^\w\s-]/g, '') // Remove punctuation except hyphens and underscores
|
|
19
|
-
.replace(/\s+/g, '-') // Replace spaces with hyphens
|
|
20
|
-
.replace(/^-+|-+$/g, ''); // Remove leading/trailing hyphens
|
|
21
|
-
}
|
|
22
|
-
|
|
23
16
|
function buildTreeObject(relPaths: string[]): any {
|
|
24
17
|
const tree: any = {};
|
|
25
18
|
for (const path of relPaths) {
|
|
@@ -41,33 +34,6 @@ function buildTreeObject(relPaths: string[]): any {
|
|
|
41
34
|
return tree;
|
|
42
35
|
}
|
|
43
36
|
|
|
44
|
-
function renderMarkdownTree(node: any, depth: number): string {
|
|
45
|
-
let result = '';
|
|
46
|
-
const indent = ' '.repeat(depth);
|
|
47
|
-
const entries: [string, any][] = Object.entries(node);
|
|
48
|
-
|
|
49
|
-
entries.sort(([a], [b]) => {
|
|
50
|
-
const aDir = a.endsWith('/');
|
|
51
|
-
const bDir = b.endsWith('/');
|
|
52
|
-
if (aDir !== bDir) return aDir ? -1 : 1;
|
|
53
|
-
return a.toLowerCase().localeCompare(b.toLowerCase());
|
|
54
|
-
});
|
|
55
|
-
|
|
56
|
-
for (const [key, value] of entries) {
|
|
57
|
-
const isDir = key.endsWith('/');
|
|
58
|
-
const display = isDir ? key.slice(0, -1) + '/' : key;
|
|
59
|
-
if (isDir) {
|
|
60
|
-
result += `${indent}- ${display}\n`;
|
|
61
|
-
result += renderMarkdownTree(value, depth + 1);
|
|
62
|
-
} else {
|
|
63
|
-
const fullPath = value as string;
|
|
64
|
-
const anchor = generateMarkdownAnchor(fullPath);
|
|
65
|
-
result += `${indent}- [${display}](#${anchor})\n`;
|
|
66
|
-
}
|
|
67
|
-
}
|
|
68
|
-
return result;
|
|
69
|
-
}
|
|
70
|
-
|
|
71
37
|
async function removeEmptyDirs(dir: string, root?: string): Promise<void> {
|
|
72
38
|
const entries = await readdir(dir, { withFileTypes: true });
|
|
73
39
|
for (const entry of entries) {
|
|
@@ -129,44 +95,165 @@ export async function flattenDirectory(
|
|
|
129
95
|
if (err.code !== 'ENOENT') throw err;
|
|
130
96
|
}
|
|
131
97
|
|
|
132
|
-
// Sort files for consistent content order
|
|
133
98
|
const fileEntries = files.map(srcPath => ({
|
|
134
99
|
srcPath,
|
|
135
100
|
relPath: relative(absSource, srcPath).replace(/\\/g, '/')
|
|
136
101
|
}));
|
|
137
|
-
fileEntries.sort((a, b) => a.relPath.toLowerCase().localeCompare(b.relPath.toLowerCase()));
|
|
138
102
|
|
|
139
103
|
const relPaths = fileEntries.map(e => e.relPath);
|
|
140
|
-
|
|
141
|
-
// Build tree structure
|
|
142
104
|
const treeObj = buildTreeObject(relPaths);
|
|
143
105
|
|
|
144
|
-
//
|
|
106
|
+
// Map for quick lookup of srcPath by relPath
|
|
107
|
+
const pathMap = new Map<string, string>();
|
|
108
|
+
fileEntries.forEach(({ srcPath, relPath }) => {
|
|
109
|
+
pathMap.set(relPath, srcPath);
|
|
110
|
+
});
|
|
111
|
+
|
|
112
|
+
// === NEW: Precompute anchors in document order ===
|
|
113
|
+
interface Section {
|
|
114
|
+
path: string;
|
|
115
|
+
headerText: string;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
const sections: Section[] = [];
|
|
119
|
+
|
|
120
|
+
function collectSections(node: any, currentPath: string): void {
|
|
121
|
+
const dirs: { name: string; child: any }[] = [];
|
|
122
|
+
const files: { relPath: string }[] = [];
|
|
123
|
+
|
|
124
|
+
for (const [key, value] of Object.entries(node) as [string, any][]) {
|
|
125
|
+
if (key.endsWith('/')) {
|
|
126
|
+
const name = key.slice(0, -1);
|
|
127
|
+
dirs.push({ name, child: value });
|
|
128
|
+
} else {
|
|
129
|
+
files.push({ relPath: value as string });
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
dirs.sort((a, b) => a.name.toLowerCase().localeCompare(b.name.toLowerCase()));
|
|
134
|
+
files.sort((a, b) => a.relPath.toLowerCase().localeCompare(b.relPath.toLowerCase()));
|
|
135
|
+
|
|
136
|
+
for (const dir of dirs) {
|
|
137
|
+
const newPath = currentPath ? `${currentPath}/${dir.name}` : dir.name;
|
|
138
|
+
collectSections(dir.child, newPath);
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
if (currentPath) {
|
|
142
|
+
sections.push({ path: currentPath, headerText: currentPath }); // no trailing /
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
for (const file of files) {
|
|
146
|
+
sections.push({ path: file.relPath, headerText: file.relPath });
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
collectSections(treeObj, '');
|
|
151
|
+
|
|
152
|
+
const anchorMap = new Map<string, string>();
|
|
153
|
+
const anchorSlugger = new GithubSlugger();
|
|
154
|
+
for (const sec of sections) {
|
|
155
|
+
anchorMap.set(sec.path, anchorSlugger.slug(sec.headerText));
|
|
156
|
+
}
|
|
157
|
+
// === END NEW ===
|
|
158
|
+
|
|
159
|
+
// Updated renderMarkdownTree to use precomputed anchors
|
|
160
|
+
function renderMarkdownTree(
|
|
161
|
+
node: any,
|
|
162
|
+
depth: number = 0,
|
|
163
|
+
prefix: string = '',
|
|
164
|
+
anchorMap: Map<string, string>
|
|
165
|
+
): string {
|
|
166
|
+
let result = '';
|
|
167
|
+
const indent = ' '.repeat(depth);
|
|
168
|
+
const entries: [string, any][] = Object.entries(node);
|
|
169
|
+
|
|
170
|
+
entries.sort(([a], [b]) => {
|
|
171
|
+
const aDir = a.endsWith('/');
|
|
172
|
+
const bDir = b.endsWith('/');
|
|
173
|
+
if (aDir !== bDir) return aDir ? -1 : 1;
|
|
174
|
+
return a.toLowerCase().localeCompare(b.toLowerCase());
|
|
175
|
+
});
|
|
176
|
+
|
|
177
|
+
for (const [key, value] of entries) {
|
|
178
|
+
const isDir = key.endsWith('/');
|
|
179
|
+
const name = isDir ? key.slice(0, -1) : key;
|
|
180
|
+
const display = isDir ? name + '/' : name;
|
|
181
|
+
const pathHere = prefix ? `${prefix}/${name}` : name;
|
|
182
|
+
const anchor = anchorMap.get(pathHere) ?? '';
|
|
183
|
+
result += `${indent}- [${display}](#${anchor})\n`;
|
|
184
|
+
if (isDir) {
|
|
185
|
+
result += renderMarkdownTree(value, depth + 1, pathHere, anchorMap);
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
return result;
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
// Render global tree with correct anchors
|
|
145
192
|
let treeMarkdown = "# Project File Tree\n\n- .\n";
|
|
146
|
-
treeMarkdown += renderMarkdownTree(treeObj, 1);
|
|
147
|
-
treeMarkdown += "\n";
|
|
193
|
+
treeMarkdown += renderMarkdownTree(treeObj, 1, '', anchorMap);
|
|
194
|
+
treeMarkdown += "\n\n";
|
|
148
195
|
|
|
149
196
|
const writeStream = createWriteStream(absTarget);
|
|
150
197
|
writeStream.setMaxListeners(0);
|
|
151
|
-
|
|
152
198
|
writeStream.write(treeMarkdown);
|
|
153
199
|
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
200
|
+
async function writeContentRecursive(
|
|
201
|
+
node: any,
|
|
202
|
+
currentPath: string,
|
|
203
|
+
writeStream: import('node:fs').WriteStream,
|
|
204
|
+
pathMap: Map<string, string>
|
|
205
|
+
): Promise<void> {
|
|
206
|
+
const dirs: { name: string; child: any }[] = [];
|
|
207
|
+
const files: { name: string; relPath: string; srcPath: string }[] = [];
|
|
208
|
+
|
|
209
|
+
for (const [key, value] of Object.entries(node) as [string, any][]) {
|
|
210
|
+
if (key.endsWith('/')) {
|
|
211
|
+
const name = key.slice(0, -1);
|
|
212
|
+
dirs.push({ name, child: value });
|
|
213
|
+
} else {
|
|
214
|
+
const relPath = value as string;
|
|
215
|
+
const srcPath = pathMap.get(relPath)!;
|
|
216
|
+
const name = key;
|
|
217
|
+
files.push({ name, relPath, srcPath });
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
dirs.sort((a, b) => a.name.toLowerCase().localeCompare(b.name.toLowerCase()));
|
|
222
|
+
files.sort((a, b) => a.name.toLowerCase().localeCompare(b.name.toLowerCase()));
|
|
160
223
|
|
|
161
|
-
|
|
224
|
+
for (const dir of dirs) {
|
|
225
|
+
const newPath = currentPath ? `${currentPath}/${dir.name}` : dir.name;
|
|
226
|
+
await writeContentRecursive(dir.child, newPath, writeStream, pathMap);
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
// Directory section (only for non-root) — no trailing /
|
|
230
|
+
if (currentPath) {
|
|
231
|
+
writeStream.write(`# ${currentPath}\n\n`);
|
|
232
|
+
writeStream.write(`File Tree\n\n`);
|
|
233
|
+
writeStream.write(`- .\n`);
|
|
234
|
+
writeStream.write(renderMarkdownTree(node, 1, currentPath, anchorMap));
|
|
235
|
+
writeStream.write('\n');
|
|
236
|
+
}
|
|
162
237
|
|
|
163
|
-
const
|
|
164
|
-
|
|
238
|
+
for (const file of files) {
|
|
239
|
+
writeStream.write(`# ${file.relPath}\n\n`);
|
|
165
240
|
|
|
166
|
-
|
|
241
|
+
let ext = extname(file.srcPath).slice(1) || 'text';
|
|
242
|
+
const lang = ext;
|
|
243
|
+
const isMd = ['md', 'markdown'].includes(ext.toLowerCase());
|
|
244
|
+
const ticks = isMd ? '````' : '```';
|
|
245
|
+
|
|
246
|
+
writeStream.write(`${ticks}${lang}\n`);
|
|
247
|
+
|
|
248
|
+
const readStream = createReadStream(file.srcPath, { encoding: 'utf8' });
|
|
249
|
+
await pipeline(readStream, writeStream, { end: false });
|
|
250
|
+
|
|
251
|
+
writeStream.write(`\n${ticks}\n\n`);
|
|
252
|
+
}
|
|
167
253
|
}
|
|
168
254
|
|
|
169
|
-
|
|
255
|
+
await writeContentRecursive(treeObj, '', writeStream, pathMap);
|
|
256
|
+
|
|
170
257
|
writeStream.end();
|
|
171
258
|
await finished(writeStream);
|
|
172
259
|
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "flatten-tool",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.6.1",
|
|
4
4
|
"description": "CLI tool to flatten directory structures: merge files into a single Markdown file (default) or copy/move to a flat directory with escaped filenames. Respects .gitignore, supports move/overwrite, and more.",
|
|
5
5
|
"module": "index.ts",
|
|
6
6
|
"type": "module",
|
|
@@ -46,10 +46,10 @@
|
|
|
46
46
|
"typescript": "^5.9.3"
|
|
47
47
|
},
|
|
48
48
|
"dependencies": {
|
|
49
|
+
"github-slugger": "^2.0.0",
|
|
49
50
|
"globby": "^16.1.0",
|
|
50
51
|
"ignore": "^7.0.5",
|
|
51
52
|
"minimatch": "^10.1.2",
|
|
52
|
-
"treeify": "^1.1.0",
|
|
53
53
|
"yargs": "^18.0.0"
|
|
54
54
|
}
|
|
55
55
|
}
|