@leighdinaya/repodocs 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +178 -0
- package/dist/cli.d.mts +1 -0
- package/dist/cli.d.ts +1 -0
- package/dist/cli.mjs +142 -0
- package/dist/index.d.mts +92 -0
- package/dist/index.d.ts +92 -0
- package/dist/index.mjs +10 -0
- package/dist/shared/repodocs.CfJRqUMt.mjs +828 -0
- package/package.json +56 -0
package/README.md
ADDED
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
# repodocs
|
|
2
|
+
|
|
3
|
+
[](https://www.npmjs.com/package/repodocs)
|
|
4
|
+
[](./LICENSE)
|
|
5
|
+
|
|
6
|
+
Convert GitHub repositories into [Docus](https://docus.dev/) documentation sites.
|
|
7
|
+
|
|
8
|
+
repodocs fetches any GitHub repo (or local directory), analyzes its structure, transforms markdown and source code into documentation pages, and generates a ready-to-run Nuxt 3 + Docus site — all in a single command.
|
|
9
|
+
|
|
10
|
+
## Quick Start
|
|
11
|
+
|
|
12
|
+
```bash
|
|
13
|
+
# Generate docs for any GitHub repo
|
|
14
|
+
npx repodocs init nuxt/nuxt
|
|
15
|
+
|
|
16
|
+
# Preview the generated site
|
|
17
|
+
npx repodocs preview
|
|
18
|
+
```
|
|
19
|
+
|
|
20
|
+
## Installation
|
|
21
|
+
|
|
22
|
+
```bash
|
|
23
|
+
# Global install
|
|
24
|
+
npm install -g repodocs
|
|
25
|
+
|
|
26
|
+
# Or use directly with npx
|
|
27
|
+
npx repodocs <command>
|
|
28
|
+
```
|
|
29
|
+
|
|
30
|
+
## CLI Commands
|
|
31
|
+
|
|
32
|
+
### `repodocs init <source>`
|
|
33
|
+
|
|
34
|
+
Run the full pipeline: fetch → analyze → transform → generate.
|
|
35
|
+
|
|
36
|
+
```bash
|
|
37
|
+
repodocs init <source> [options]
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
**Arguments:**
|
|
41
|
+
|
|
42
|
+
| Argument | Description |
|
|
43
|
+
|----------|-------------|
|
|
44
|
+
| `source` | GitHub URL, `owner/repo` shorthand, or local path |
|
|
45
|
+
|
|
46
|
+
**Options:**
|
|
47
|
+
|
|
48
|
+
| Flag | Alias | Default | Description |
|
|
49
|
+
|------|-------|---------|-------------|
|
|
50
|
+
| `--output` | `-o` | `./docs` | Output directory |
|
|
51
|
+
| `--branch` | `-b` | — | Branch to clone |
|
|
52
|
+
| `--include-code` | — | `true` | Include source code pages |
|
|
53
|
+
| `--install` | — | `false` | Run `npm install` after generation |
|
|
54
|
+
|
|
55
|
+
### `repodocs build`
|
|
56
|
+
|
|
57
|
+
Build the generated Docus site to static HTML.
|
|
58
|
+
|
|
59
|
+
```bash
|
|
60
|
+
repodocs build [options]
|
|
61
|
+
```
|
|
62
|
+
|
|
63
|
+
| Flag | Default | Description |
|
|
64
|
+
|------|---------|-------------|
|
|
65
|
+
| `--dir` | `./docs` | Documentation directory to build |
|
|
66
|
+
|
|
67
|
+
### `repodocs preview`
|
|
68
|
+
|
|
69
|
+
Start the Nuxt dev server for local preview.
|
|
70
|
+
|
|
71
|
+
```bash
|
|
72
|
+
repodocs preview [options]
|
|
73
|
+
```
|
|
74
|
+
|
|
75
|
+
| Flag | Default | Description |
|
|
76
|
+
|------|---------|-------------|
|
|
77
|
+
| `--dir` | `./docs` | Documentation directory to serve |
|
|
78
|
+
|
|
79
|
+
### `repodocs serve`
|
|
80
|
+
|
|
81
|
+
Alias for `repodocs preview`.
|
|
82
|
+
|
|
83
|
+
## Examples
|
|
84
|
+
|
|
85
|
+
```bash
|
|
86
|
+
# GitHub URL
|
|
87
|
+
repodocs init https://github.com/unjs/h3
|
|
88
|
+
|
|
89
|
+
# owner/repo shorthand
|
|
90
|
+
repodocs init nuxt/nuxt
|
|
91
|
+
|
|
92
|
+
# SSH URL
|
|
93
|
+
repodocs init git@github.com:unjs/nitro.git
|
|
94
|
+
|
|
95
|
+
# Local directory
|
|
96
|
+
repodocs init ./my-project
|
|
97
|
+
|
|
98
|
+
# Custom output directory and branch
|
|
99
|
+
repodocs init nuxt/nuxt --output ./nuxt-docs --branch main
|
|
100
|
+
|
|
101
|
+
# Without source code pages
|
|
102
|
+
repodocs init unjs/h3 --no-include-code
|
|
103
|
+
|
|
104
|
+
# Build and preview
|
|
105
|
+
repodocs init nuxt/nuxt --install
|
|
106
|
+
repodocs preview
|
|
107
|
+
```
|
|
108
|
+
|
|
109
|
+
## How It Works
|
|
110
|
+
|
|
111
|
+
repodocs runs a four-stage pipeline:
|
|
112
|
+
|
|
113
|
+
1. **Fetch** — Clones the repository via git (shallow clone), falls back to the GitHub API tarball download, or reads from a local directory.
|
|
114
|
+
2. **Analyze** — Walks all files, categorizes them (markdown, code, config, asset), and builds a navigation tree.
|
|
115
|
+
3. **Transform** — Converts markdown files (parsing frontmatter, rewriting links) and wraps source code in documentation pages. Generates `_dir.yml` navigation files.
|
|
116
|
+
4. **Generate** — Scaffolds a Nuxt 3 + Docus project, writes all content pages, copies image assets, and generates `nuxt.config.ts` and `app.config.ts`.
|
|
117
|
+
|
|
118
|
+
## Programmatic API
|
|
119
|
+
|
|
120
|
+
```typescript
|
|
121
|
+
import {
|
|
122
|
+
fetchRepo,
|
|
123
|
+
analyzeRepo,
|
|
124
|
+
transformContent,
|
|
125
|
+
generateSite,
|
|
126
|
+
} from "repodocs";
|
|
127
|
+
|
|
128
|
+
// Fetch a repository
|
|
129
|
+
const { localPath, metadata, cleanup } = await fetchRepo("nuxt/nuxt", {
|
|
130
|
+
branch: "main",
|
|
131
|
+
token: process.env.GITHUB_TOKEN,
|
|
132
|
+
});
|
|
133
|
+
|
|
134
|
+
// Analyze the repo structure
|
|
135
|
+
const analysis = await analyzeRepo(localPath);
|
|
136
|
+
|
|
137
|
+
// Transform content into documentation pages
|
|
138
|
+
const transformed = await transformContent(analysis, localPath, {
|
|
139
|
+
includeCode: true,
|
|
140
|
+
});
|
|
141
|
+
|
|
142
|
+
// Generate the Docus site
|
|
143
|
+
await generateSite(transformed, metadata, {
|
|
144
|
+
outputDir: "./docs",
|
|
145
|
+
metadata,
|
|
146
|
+
});
|
|
147
|
+
|
|
148
|
+
// Clean up temporary files (for remote repos)
|
|
149
|
+
await cleanup?.();
|
|
150
|
+
```
|
|
151
|
+
|
|
152
|
+
### Type Exports
|
|
153
|
+
|
|
154
|
+
```typescript
|
|
155
|
+
import type {
|
|
156
|
+
FetchResult,
|
|
157
|
+
FetchOptions,
|
|
158
|
+
RepoMetadata,
|
|
159
|
+
AnalysisResult,
|
|
160
|
+
AnalyzedFile,
|
|
161
|
+
NavNode,
|
|
162
|
+
FileCategory,
|
|
163
|
+
TransformResult,
|
|
164
|
+
TransformedPage,
|
|
165
|
+
GenerateOptions,
|
|
166
|
+
SiteConfig,
|
|
167
|
+
} from "repodocs";
|
|
168
|
+
```
|
|
169
|
+
|
|
170
|
+
## Environment Variables
|
|
171
|
+
|
|
172
|
+
| Variable | Description |
|
|
173
|
+
|----------|-------------|
|
|
174
|
+
| `GITHUB_TOKEN` | GitHub personal access token. Used for API-based fetching (higher rate limits). Optional — works unauthenticated with lower rate limits. |
|
|
175
|
+
|
|
176
|
+
## License
|
|
177
|
+
|
|
178
|
+
MIT
|
package/dist/cli.d.mts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
package/dist/cli.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
package/dist/cli.mjs
ADDED
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { defineCommand, runMain } from 'citty';
|
|
3
|
+
import { D as DEFAULT_OUTPUT_DIR, f as fetchRepo, a as analyzeRepo, t as transformContent, g as generateSite, l as logger } from './shared/repodocs.CfJRqUMt.mjs';
|
|
4
|
+
import path from 'node:path';
|
|
5
|
+
import { execSync } from 'node:child_process';
|
|
6
|
+
import fs from 'fs-extra';
|
|
7
|
+
import 'node:os';
|
|
8
|
+
import 'simple-git';
|
|
9
|
+
import '@octokit/rest';
|
|
10
|
+
import 'fast-glob';
|
|
11
|
+
import 'gray-matter';
|
|
12
|
+
import 'consola';
|
|
13
|
+
|
|
14
|
+
const initCommand = defineCommand({
|
|
15
|
+
meta: {
|
|
16
|
+
name: "init",
|
|
17
|
+
description: "Fetch a repository and generate a Docus documentation site"
|
|
18
|
+
},
|
|
19
|
+
args: {
|
|
20
|
+
source: {
|
|
21
|
+
type: "positional",
|
|
22
|
+
description: "GitHub URL, owner/repo shorthand, or local path",
|
|
23
|
+
required: true
|
|
24
|
+
},
|
|
25
|
+
output: {
|
|
26
|
+
type: "string",
|
|
27
|
+
alias: "o",
|
|
28
|
+
description: "Output directory",
|
|
29
|
+
default: DEFAULT_OUTPUT_DIR
|
|
30
|
+
},
|
|
31
|
+
branch: {
|
|
32
|
+
type: "string",
|
|
33
|
+
alias: "b",
|
|
34
|
+
description: "Branch to clone"
|
|
35
|
+
},
|
|
36
|
+
"include-code": {
|
|
37
|
+
type: "boolean",
|
|
38
|
+
description: "Include source code pages",
|
|
39
|
+
default: true
|
|
40
|
+
},
|
|
41
|
+
install: {
|
|
42
|
+
type: "boolean",
|
|
43
|
+
description: "Run npm install after generation",
|
|
44
|
+
default: false
|
|
45
|
+
}
|
|
46
|
+
},
|
|
47
|
+
async run({ args }) {
|
|
48
|
+
const { source, output, branch, install } = args;
|
|
49
|
+
const includeCode = args["include-code"];
|
|
50
|
+
try {
|
|
51
|
+
const fetchResult = await fetchRepo(source, { branch });
|
|
52
|
+
try {
|
|
53
|
+
const analysis = await analyzeRepo(fetchResult.localPath);
|
|
54
|
+
const transformed = await transformContent(analysis, fetchResult.localPath, {
|
|
55
|
+
includeCode
|
|
56
|
+
});
|
|
57
|
+
await generateSite(transformed, fetchResult.metadata, {
|
|
58
|
+
outputDir: output,
|
|
59
|
+
metadata: fetchResult.metadata,
|
|
60
|
+
install
|
|
61
|
+
});
|
|
62
|
+
} finally {
|
|
63
|
+
if (fetchResult.cleanup) {
|
|
64
|
+
await fetchResult.cleanup();
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
} catch (error) {
|
|
68
|
+
logger.error(`Failed to generate documentation: ${error}`);
|
|
69
|
+
process.exit(1);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
const buildCommand = defineCommand({
|
|
75
|
+
meta: {
|
|
76
|
+
name: "build",
|
|
77
|
+
description: "Build the documentation site to static HTML"
|
|
78
|
+
},
|
|
79
|
+
args: {
|
|
80
|
+
dir: {
|
|
81
|
+
type: "string",
|
|
82
|
+
description: "Documentation directory",
|
|
83
|
+
default: DEFAULT_OUTPUT_DIR
|
|
84
|
+
}
|
|
85
|
+
},
|
|
86
|
+
async run({ args }) {
|
|
87
|
+
const dir = path.resolve(args.dir);
|
|
88
|
+
if (!await fs.pathExists(path.join(dir, "nuxt.config.ts"))) {
|
|
89
|
+
logger.error(`No Docus project found in ${dir}. Run \`repodocs init\` first.`);
|
|
90
|
+
process.exit(1);
|
|
91
|
+
}
|
|
92
|
+
logger.start("Building documentation site...");
|
|
93
|
+
try {
|
|
94
|
+
execSync("npx nuxi generate", { cwd: dir, stdio: "inherit" });
|
|
95
|
+
logger.success("Build complete! Output in .output/public/");
|
|
96
|
+
} catch (error) {
|
|
97
|
+
logger.error("Build failed");
|
|
98
|
+
process.exit(1);
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
});
|
|
102
|
+
|
|
103
|
+
const previewCommand = defineCommand({
|
|
104
|
+
meta: {
|
|
105
|
+
name: "preview",
|
|
106
|
+
description: "Preview the documentation site locally"
|
|
107
|
+
},
|
|
108
|
+
args: {
|
|
109
|
+
dir: {
|
|
110
|
+
type: "string",
|
|
111
|
+
description: "Documentation directory",
|
|
112
|
+
default: DEFAULT_OUTPUT_DIR
|
|
113
|
+
}
|
|
114
|
+
},
|
|
115
|
+
async run({ args }) {
|
|
116
|
+
const dir = path.resolve(args.dir);
|
|
117
|
+
if (!await fs.pathExists(path.join(dir, "nuxt.config.ts"))) {
|
|
118
|
+
logger.error(`No Docus project found in ${dir}. Run \`repodocs init\` first.`);
|
|
119
|
+
process.exit(1);
|
|
120
|
+
}
|
|
121
|
+
logger.start("Starting preview server...");
|
|
122
|
+
try {
|
|
123
|
+
execSync("npx nuxi dev", { cwd: dir, stdio: "inherit" });
|
|
124
|
+
} catch {
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
});
|
|
128
|
+
|
|
129
|
+
const main = defineCommand({
|
|
130
|
+
meta: {
|
|
131
|
+
name: "repodocs",
|
|
132
|
+
version: "0.1.0",
|
|
133
|
+
description: "Convert GitHub repositories into Docus documentation sites"
|
|
134
|
+
},
|
|
135
|
+
subCommands: {
|
|
136
|
+
init: initCommand,
|
|
137
|
+
build: buildCommand,
|
|
138
|
+
preview: previewCommand,
|
|
139
|
+
serve: previewCommand
|
|
140
|
+
}
|
|
141
|
+
});
|
|
142
|
+
runMain(main);
|
package/dist/index.d.mts
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
interface FetchOptions {
|
|
2
|
+
branch?: string;
|
|
3
|
+
token?: string;
|
|
4
|
+
}
|
|
5
|
+
interface RepoMetadata {
|
|
6
|
+
name: string;
|
|
7
|
+
description: string;
|
|
8
|
+
owner: string;
|
|
9
|
+
repo: string;
|
|
10
|
+
branch: string;
|
|
11
|
+
url: string;
|
|
12
|
+
defaultBranch: string;
|
|
13
|
+
isLocal: boolean;
|
|
14
|
+
}
|
|
15
|
+
interface FetchResult {
|
|
16
|
+
localPath: string;
|
|
17
|
+
metadata: RepoMetadata;
|
|
18
|
+
cleanup?: () => Promise<void>;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
declare function fetchRepo(input: string, options?: FetchOptions): Promise<FetchResult>;
|
|
22
|
+
|
|
23
|
+
type FileCategory = "markdown" | "code" | "config" | "asset" | "ignored";
|
|
24
|
+
interface AnalyzedFile {
|
|
25
|
+
relativePath: string;
|
|
26
|
+
absolutePath: string;
|
|
27
|
+
category: FileCategory;
|
|
28
|
+
language?: string;
|
|
29
|
+
size: number;
|
|
30
|
+
}
|
|
31
|
+
interface NavNode {
|
|
32
|
+
label: string;
|
|
33
|
+
path: string;
|
|
34
|
+
children: NavNode[];
|
|
35
|
+
isDirectory: boolean;
|
|
36
|
+
order: number;
|
|
37
|
+
}
|
|
38
|
+
interface AnalysisResult {
|
|
39
|
+
files: AnalyzedFile[];
|
|
40
|
+
navigation: NavNode;
|
|
41
|
+
markdownFiles: AnalyzedFile[];
|
|
42
|
+
codeFiles: AnalyzedFile[];
|
|
43
|
+
configFiles: AnalyzedFile[];
|
|
44
|
+
assetFiles: AnalyzedFile[];
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
declare function analyzeRepo(repoPath: string): Promise<AnalysisResult>;
|
|
48
|
+
|
|
49
|
+
interface TransformedPage {
|
|
50
|
+
/** Path relative to content/ directory */
|
|
51
|
+
contentPath: string;
|
|
52
|
+
/** Full markdown content with frontmatter */
|
|
53
|
+
content: string;
|
|
54
|
+
/** Original source file path */
|
|
55
|
+
sourcePath: string;
|
|
56
|
+
}
|
|
57
|
+
interface TransformResult {
|
|
58
|
+
pages: TransformedPage[];
|
|
59
|
+
images: ImageAsset[];
|
|
60
|
+
navigationFiles: NavigationFile[];
|
|
61
|
+
}
|
|
62
|
+
interface ImageAsset {
|
|
63
|
+
sourcePath: string;
|
|
64
|
+
destPath: string;
|
|
65
|
+
}
|
|
66
|
+
interface NavigationFile {
|
|
67
|
+
dirPath: string;
|
|
68
|
+
content: string;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
interface TransformOptions {
|
|
72
|
+
includeCode?: boolean;
|
|
73
|
+
}
|
|
74
|
+
declare function transformContent(analysis: AnalysisResult, repoPath: string, options?: TransformOptions): Promise<TransformResult>;
|
|
75
|
+
|
|
76
|
+
interface GenerateOptions {
|
|
77
|
+
outputDir: string;
|
|
78
|
+
metadata: RepoMetadata;
|
|
79
|
+
install?: boolean;
|
|
80
|
+
}
|
|
81
|
+
interface SiteConfig {
|
|
82
|
+
siteName: string;
|
|
83
|
+
description: string;
|
|
84
|
+
githubUrl: string;
|
|
85
|
+
githubOwner: string;
|
|
86
|
+
githubRepo: string;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
declare function generateSite(transformResult: TransformResult, metadata: RepoMetadata, options: GenerateOptions): Promise<void>;
|
|
90
|
+
|
|
91
|
+
export { analyzeRepo, fetchRepo, generateSite, transformContent };
|
|
92
|
+
export type { AnalysisResult, AnalyzedFile, FetchOptions, FetchResult, FileCategory, GenerateOptions, NavNode, RepoMetadata, SiteConfig, TransformResult, TransformedPage };
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
interface FetchOptions {
|
|
2
|
+
branch?: string;
|
|
3
|
+
token?: string;
|
|
4
|
+
}
|
|
5
|
+
interface RepoMetadata {
|
|
6
|
+
name: string;
|
|
7
|
+
description: string;
|
|
8
|
+
owner: string;
|
|
9
|
+
repo: string;
|
|
10
|
+
branch: string;
|
|
11
|
+
url: string;
|
|
12
|
+
defaultBranch: string;
|
|
13
|
+
isLocal: boolean;
|
|
14
|
+
}
|
|
15
|
+
interface FetchResult {
|
|
16
|
+
localPath: string;
|
|
17
|
+
metadata: RepoMetadata;
|
|
18
|
+
cleanup?: () => Promise<void>;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
declare function fetchRepo(input: string, options?: FetchOptions): Promise<FetchResult>;
|
|
22
|
+
|
|
23
|
+
type FileCategory = "markdown" | "code" | "config" | "asset" | "ignored";
|
|
24
|
+
interface AnalyzedFile {
|
|
25
|
+
relativePath: string;
|
|
26
|
+
absolutePath: string;
|
|
27
|
+
category: FileCategory;
|
|
28
|
+
language?: string;
|
|
29
|
+
size: number;
|
|
30
|
+
}
|
|
31
|
+
interface NavNode {
|
|
32
|
+
label: string;
|
|
33
|
+
path: string;
|
|
34
|
+
children: NavNode[];
|
|
35
|
+
isDirectory: boolean;
|
|
36
|
+
order: number;
|
|
37
|
+
}
|
|
38
|
+
interface AnalysisResult {
|
|
39
|
+
files: AnalyzedFile[];
|
|
40
|
+
navigation: NavNode;
|
|
41
|
+
markdownFiles: AnalyzedFile[];
|
|
42
|
+
codeFiles: AnalyzedFile[];
|
|
43
|
+
configFiles: AnalyzedFile[];
|
|
44
|
+
assetFiles: AnalyzedFile[];
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
declare function analyzeRepo(repoPath: string): Promise<AnalysisResult>;
|
|
48
|
+
|
|
49
|
+
interface TransformedPage {
|
|
50
|
+
/** Path relative to content/ directory */
|
|
51
|
+
contentPath: string;
|
|
52
|
+
/** Full markdown content with frontmatter */
|
|
53
|
+
content: string;
|
|
54
|
+
/** Original source file path */
|
|
55
|
+
sourcePath: string;
|
|
56
|
+
}
|
|
57
|
+
interface TransformResult {
|
|
58
|
+
pages: TransformedPage[];
|
|
59
|
+
images: ImageAsset[];
|
|
60
|
+
navigationFiles: NavigationFile[];
|
|
61
|
+
}
|
|
62
|
+
interface ImageAsset {
|
|
63
|
+
sourcePath: string;
|
|
64
|
+
destPath: string;
|
|
65
|
+
}
|
|
66
|
+
interface NavigationFile {
|
|
67
|
+
dirPath: string;
|
|
68
|
+
content: string;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
interface TransformOptions {
|
|
72
|
+
includeCode?: boolean;
|
|
73
|
+
}
|
|
74
|
+
declare function transformContent(analysis: AnalysisResult, repoPath: string, options?: TransformOptions): Promise<TransformResult>;
|
|
75
|
+
|
|
76
|
+
interface GenerateOptions {
|
|
77
|
+
outputDir: string;
|
|
78
|
+
metadata: RepoMetadata;
|
|
79
|
+
install?: boolean;
|
|
80
|
+
}
|
|
81
|
+
interface SiteConfig {
|
|
82
|
+
siteName: string;
|
|
83
|
+
description: string;
|
|
84
|
+
githubUrl: string;
|
|
85
|
+
githubOwner: string;
|
|
86
|
+
githubRepo: string;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
declare function generateSite(transformResult: TransformResult, metadata: RepoMetadata, options: GenerateOptions): Promise<void>;
|
|
90
|
+
|
|
91
|
+
export { analyzeRepo, fetchRepo, generateSite, transformContent };
|
|
92
|
+
export type { AnalysisResult, AnalyzedFile, FetchOptions, FetchResult, FileCategory, GenerateOptions, NavNode, RepoMetadata, SiteConfig, TransformResult, TransformedPage };
|
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
export { a as analyzeRepo, f as fetchRepo, g as generateSite, t as transformContent } from './shared/repodocs.CfJRqUMt.mjs';
|
|
2
|
+
import 'node:path';
|
|
3
|
+
import 'fs-extra';
|
|
4
|
+
import 'node:os';
|
|
5
|
+
import 'simple-git';
|
|
6
|
+
import 'node:child_process';
|
|
7
|
+
import '@octokit/rest';
|
|
8
|
+
import 'fast-glob';
|
|
9
|
+
import 'gray-matter';
|
|
10
|
+
import 'consola';
|
|
@@ -0,0 +1,828 @@
|
|
|
1
|
+
import path from 'node:path';
|
|
2
|
+
import fs from 'fs-extra';
|
|
3
|
+
import os from 'node:os';
|
|
4
|
+
import simpleGit from 'simple-git';
|
|
5
|
+
import { execSync } from 'node:child_process';
|
|
6
|
+
import { Octokit } from '@octokit/rest';
|
|
7
|
+
import fg from 'fast-glob';
|
|
8
|
+
import matter from 'gray-matter';
|
|
9
|
+
import { createConsola } from 'consola';
|
|
10
|
+
|
|
11
|
+
function parseGitHubURL(input) {
|
|
12
|
+
const sshMatch = input.match(/^git@github\.com:([^/]+)\/([^/.]+?)(?:\.git)?$/);
|
|
13
|
+
if (sshMatch) {
|
|
14
|
+
return { owner: sshMatch[1], repo: sshMatch[2] };
|
|
15
|
+
}
|
|
16
|
+
let cleaned = input.replace(/^https?:\/\//, "").replace(/^github\.com\//, "").replace(/\.git$/, "").replace(/\/$/, "");
|
|
17
|
+
cleaned = cleaned.replace(/^github\.com\//, "");
|
|
18
|
+
const match = cleaned.match(/^([^/]+)\/([^/]+?)(?:\/tree\/(.+))?$/);
|
|
19
|
+
if (!match) return null;
|
|
20
|
+
return {
|
|
21
|
+
owner: match[1],
|
|
22
|
+
repo: match[2],
|
|
23
|
+
branch: match[3] || void 0
|
|
24
|
+
};
|
|
25
|
+
}
|
|
26
|
+
function isGitHubURL(input) {
|
|
27
|
+
return input.includes("github.com") || input.startsWith("git@github.com:") || /^[a-zA-Z0-9_-]+\/[a-zA-Z0-9._-]+$/.test(input);
|
|
28
|
+
}
|
|
29
|
+
function buildCloneURL(owner, repo) {
|
|
30
|
+
return `https://github.com/${owner}/${repo}.git`;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
const logger = createConsola({
|
|
34
|
+
fancy: true,
|
|
35
|
+
formatOptions: {
|
|
36
|
+
date: false
|
|
37
|
+
}
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
async function fetchLocal(localPath) {
|
|
41
|
+
const resolved = path.resolve(localPath);
|
|
42
|
+
if (!await fs.pathExists(resolved)) {
|
|
43
|
+
throw new Error(`Local path does not exist: ${resolved}`);
|
|
44
|
+
}
|
|
45
|
+
const stat = await fs.stat(resolved);
|
|
46
|
+
if (!stat.isDirectory()) {
|
|
47
|
+
throw new Error(`Path is not a directory: ${resolved}`);
|
|
48
|
+
}
|
|
49
|
+
logger.info(`Reading local repository at ${resolved}`);
|
|
50
|
+
let name = path.basename(resolved);
|
|
51
|
+
let description = "";
|
|
52
|
+
const pkgPath = path.join(resolved, "package.json");
|
|
53
|
+
if (await fs.pathExists(pkgPath)) {
|
|
54
|
+
try {
|
|
55
|
+
const pkg = await fs.readJSON(pkgPath);
|
|
56
|
+
name = pkg.name || name;
|
|
57
|
+
description = pkg.description || "";
|
|
58
|
+
} catch {
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
return {
|
|
62
|
+
localPath: resolved,
|
|
63
|
+
metadata: {
|
|
64
|
+
name,
|
|
65
|
+
description,
|
|
66
|
+
owner: "",
|
|
67
|
+
repo: name,
|
|
68
|
+
branch: "main",
|
|
69
|
+
url: "",
|
|
70
|
+
defaultBranch: "main",
|
|
71
|
+
isLocal: true
|
|
72
|
+
}
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
async function fetchViaGit(owner, repo, options = {}) {
|
|
77
|
+
const cloneUrl = buildCloneURL(owner, repo);
|
|
78
|
+
const tempDir = path.join(os.tmpdir(), `repodocs-${owner}-${repo}-${Date.now()}`);
|
|
79
|
+
await fs.ensureDir(tempDir);
|
|
80
|
+
logger.start(`Cloning ${owner}/${repo} via git...`);
|
|
81
|
+
const git = simpleGit();
|
|
82
|
+
const cloneArgs = ["--depth", "1"];
|
|
83
|
+
if (options.branch) {
|
|
84
|
+
cloneArgs.push("--branch", options.branch);
|
|
85
|
+
}
|
|
86
|
+
try {
|
|
87
|
+
await git.clone(cloneUrl, tempDir, cloneArgs);
|
|
88
|
+
} catch (error) {
|
|
89
|
+
await fs.remove(tempDir).catch(() => {
|
|
90
|
+
});
|
|
91
|
+
throw error;
|
|
92
|
+
}
|
|
93
|
+
logger.success(`Cloned ${owner}/${repo}`);
|
|
94
|
+
const repoGit = simpleGit(tempDir);
|
|
95
|
+
const branch = (await repoGit.branch()).current || options.branch || "main";
|
|
96
|
+
let name = repo;
|
|
97
|
+
let description = "";
|
|
98
|
+
const pkgPath = path.join(tempDir, "package.json");
|
|
99
|
+
if (await fs.pathExists(pkgPath)) {
|
|
100
|
+
try {
|
|
101
|
+
const pkg = await fs.readJSON(pkgPath);
|
|
102
|
+
name = pkg.name || name;
|
|
103
|
+
description = pkg.description || "";
|
|
104
|
+
} catch {
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
const metadata = {
|
|
108
|
+
name,
|
|
109
|
+
description,
|
|
110
|
+
owner,
|
|
111
|
+
repo,
|
|
112
|
+
branch,
|
|
113
|
+
url: `https://github.com/${owner}/${repo}`,
|
|
114
|
+
defaultBranch: branch,
|
|
115
|
+
isLocal: false
|
|
116
|
+
};
|
|
117
|
+
return {
|
|
118
|
+
localPath: tempDir,
|
|
119
|
+
metadata,
|
|
120
|
+
cleanup: async () => {
|
|
121
|
+
await fs.remove(tempDir).catch(() => {
|
|
122
|
+
});
|
|
123
|
+
}
|
|
124
|
+
};
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
async function fetchViaGitHubAPI(owner, repo, options = {}) {
|
|
128
|
+
const token = options.token || process.env.GITHUB_TOKEN;
|
|
129
|
+
const octokit = new Octokit({ auth: token || void 0 });
|
|
130
|
+
logger.start(`Fetching ${owner}/${repo} via GitHub API...`);
|
|
131
|
+
const { data: repoData } = await octokit.repos.get({ owner, repo });
|
|
132
|
+
const branch = options.branch || repoData.default_branch;
|
|
133
|
+
const { data: tarball } = await octokit.repos.downloadTarballArchive({
|
|
134
|
+
owner,
|
|
135
|
+
repo,
|
|
136
|
+
ref: branch
|
|
137
|
+
});
|
|
138
|
+
const tempDir = path.join(os.tmpdir(), `repodocs-${owner}-${repo}-${Date.now()}`);
|
|
139
|
+
const tarPath = path.join(os.tmpdir(), `repodocs-${owner}-${repo}-${Date.now()}.tar.gz`);
|
|
140
|
+
await fs.ensureDir(tempDir);
|
|
141
|
+
await fs.writeFile(tarPath, Buffer.from(tarball));
|
|
142
|
+
try {
|
|
143
|
+
execSync(`tar xzf "${tarPath}" -C "${tempDir}" --strip-components=1`, {
|
|
144
|
+
stdio: "pipe"
|
|
145
|
+
});
|
|
146
|
+
} finally {
|
|
147
|
+
await fs.remove(tarPath).catch(() => {
|
|
148
|
+
});
|
|
149
|
+
}
|
|
150
|
+
logger.success(`Downloaded ${owner}/${repo}`);
|
|
151
|
+
const metadata = {
|
|
152
|
+
name: repoData.name,
|
|
153
|
+
description: repoData.description || "",
|
|
154
|
+
owner,
|
|
155
|
+
repo,
|
|
156
|
+
branch,
|
|
157
|
+
url: repoData.html_url,
|
|
158
|
+
defaultBranch: repoData.default_branch,
|
|
159
|
+
isLocal: false
|
|
160
|
+
};
|
|
161
|
+
return {
|
|
162
|
+
localPath: tempDir,
|
|
163
|
+
metadata,
|
|
164
|
+
cleanup: async () => {
|
|
165
|
+
await fs.remove(tempDir).catch(() => {
|
|
166
|
+
});
|
|
167
|
+
}
|
|
168
|
+
};
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
async function fetchRepo(input, options = {}) {
|
|
172
|
+
const resolved = path.resolve(input);
|
|
173
|
+
if (await fs.pathExists(resolved)) {
|
|
174
|
+
const stat = await fs.stat(resolved);
|
|
175
|
+
if (stat.isDirectory()) {
|
|
176
|
+
return fetchLocal(resolved);
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
if (!isGitHubURL(input)) {
|
|
180
|
+
throw new Error(
|
|
181
|
+
`Invalid input: "${input}". Provide a GitHub URL or a local directory path.`
|
|
182
|
+
);
|
|
183
|
+
}
|
|
184
|
+
const parsed = parseGitHubURL(input);
|
|
185
|
+
if (!parsed) {
|
|
186
|
+
throw new Error(`Could not parse GitHub URL: "${input}"`);
|
|
187
|
+
}
|
|
188
|
+
const { owner, repo } = parsed;
|
|
189
|
+
const branch = options.branch || parsed.branch;
|
|
190
|
+
try {
|
|
191
|
+
return await fetchViaGit(owner, repo, { ...options, branch });
|
|
192
|
+
} catch (error) {
|
|
193
|
+
logger.warn("Git clone failed, trying GitHub API fallback...");
|
|
194
|
+
logger.debug(String(error));
|
|
195
|
+
return await fetchViaGitHubAPI(owner, repo, { ...options, branch });
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
const IGNORE_PATTERNS = [
|
|
200
|
+
"node_modules/**",
|
|
201
|
+
".git/**",
|
|
202
|
+
"dist/**",
|
|
203
|
+
"build/**",
|
|
204
|
+
".output/**",
|
|
205
|
+
".nuxt/**",
|
|
206
|
+
".next/**",
|
|
207
|
+
".cache/**",
|
|
208
|
+
"coverage/**",
|
|
209
|
+
".DS_Store",
|
|
210
|
+
"*.lock",
|
|
211
|
+
"package-lock.json",
|
|
212
|
+
"yarn.lock",
|
|
213
|
+
"pnpm-lock.yaml",
|
|
214
|
+
"bun.lockb",
|
|
215
|
+
"*.min.js",
|
|
216
|
+
"*.min.css",
|
|
217
|
+
"*.map",
|
|
218
|
+
"*.wasm",
|
|
219
|
+
"*.ico",
|
|
220
|
+
"*.ttf",
|
|
221
|
+
"*.woff",
|
|
222
|
+
"*.woff2",
|
|
223
|
+
"*.eot",
|
|
224
|
+
"*.mp4",
|
|
225
|
+
"*.webm",
|
|
226
|
+
"*.mov",
|
|
227
|
+
"*.zip",
|
|
228
|
+
"*.tar",
|
|
229
|
+
"*.gz",
|
|
230
|
+
"*.exe",
|
|
231
|
+
"*.dll",
|
|
232
|
+
"*.so",
|
|
233
|
+
"*.dylib"
|
|
234
|
+
];
|
|
235
|
+
const MAX_FILE_SIZE = 512 * 1024;
|
|
236
|
+
const MARKDOWN_EXTENSIONS = [".md", ".mdx"];
|
|
237
|
+
const CODE_EXTENSIONS = {
|
|
238
|
+
".ts": "typescript",
|
|
239
|
+
".tsx": "tsx",
|
|
240
|
+
".js": "javascript",
|
|
241
|
+
".jsx": "jsx",
|
|
242
|
+
".vue": "vue",
|
|
243
|
+
".svelte": "svelte",
|
|
244
|
+
".py": "python",
|
|
245
|
+
".rb": "ruby",
|
|
246
|
+
".go": "go",
|
|
247
|
+
".rs": "rust",
|
|
248
|
+
".java": "java",
|
|
249
|
+
".kt": "kotlin",
|
|
250
|
+
".swift": "swift",
|
|
251
|
+
".c": "c",
|
|
252
|
+
".cpp": "cpp",
|
|
253
|
+
".h": "c",
|
|
254
|
+
".hpp": "cpp",
|
|
255
|
+
".cs": "csharp",
|
|
256
|
+
".php": "php",
|
|
257
|
+
".sh": "bash",
|
|
258
|
+
".bash": "bash",
|
|
259
|
+
".zsh": "bash",
|
|
260
|
+
".fish": "fish",
|
|
261
|
+
".sql": "sql",
|
|
262
|
+
".graphql": "graphql",
|
|
263
|
+
".gql": "graphql",
|
|
264
|
+
".yaml": "yaml",
|
|
265
|
+
".yml": "yaml",
|
|
266
|
+
".toml": "toml",
|
|
267
|
+
".json": "json",
|
|
268
|
+
".xml": "xml",
|
|
269
|
+
".html": "html",
|
|
270
|
+
".css": "css",
|
|
271
|
+
".scss": "scss",
|
|
272
|
+
".sass": "sass",
|
|
273
|
+
".less": "less",
|
|
274
|
+
".dockerfile": "dockerfile",
|
|
275
|
+
".tf": "hcl",
|
|
276
|
+
".lua": "lua",
|
|
277
|
+
".r": "r",
|
|
278
|
+
".dart": "dart",
|
|
279
|
+
".ex": "elixir",
|
|
280
|
+
".exs": "elixir",
|
|
281
|
+
".erl": "erlang",
|
|
282
|
+
".zig": "zig",
|
|
283
|
+
".nim": "nim",
|
|
284
|
+
".ml": "ocaml",
|
|
285
|
+
".hs": "haskell",
|
|
286
|
+
".clj": "clojure",
|
|
287
|
+
".scala": "scala"
|
|
288
|
+
};
|
|
289
|
+
const CONFIG_FILES = [
|
|
290
|
+
"package.json",
|
|
291
|
+
"tsconfig.json",
|
|
292
|
+
"nuxt.config.ts",
|
|
293
|
+
"nuxt.config.js",
|
|
294
|
+
"vite.config.ts",
|
|
295
|
+
"vite.config.js",
|
|
296
|
+
"webpack.config.js",
|
|
297
|
+
"rollup.config.js",
|
|
298
|
+
".eslintrc",
|
|
299
|
+
".eslintrc.js",
|
|
300
|
+
".eslintrc.json",
|
|
301
|
+
".prettierrc",
|
|
302
|
+
".prettierrc.js",
|
|
303
|
+
".prettierrc.json",
|
|
304
|
+
"Dockerfile",
|
|
305
|
+
"docker-compose.yml",
|
|
306
|
+
"Makefile",
|
|
307
|
+
".env.example"
|
|
308
|
+
];
|
|
309
|
+
const IMAGE_EXTENSIONS = [".png", ".jpg", ".jpeg", ".gif", ".svg", ".webp", ".avif"];
|
|
310
|
+
const DEFAULT_OUTPUT_DIR = "./docs";
|
|
311
|
+
|
|
312
|
+
async function walkFiles(repoPath) {
|
|
313
|
+
const entries = await fg("**/*", {
|
|
314
|
+
cwd: repoPath,
|
|
315
|
+
dot: false,
|
|
316
|
+
ignore: IGNORE_PATTERNS,
|
|
317
|
+
onlyFiles: true,
|
|
318
|
+
stats: true
|
|
319
|
+
});
|
|
320
|
+
const files = [];
|
|
321
|
+
for (const entry of entries) {
|
|
322
|
+
const absolutePath = path.join(repoPath, entry.path);
|
|
323
|
+
const size = entry.stats?.size ?? 0;
|
|
324
|
+
if (size > MAX_FILE_SIZE) continue;
|
|
325
|
+
files.push({
|
|
326
|
+
relativePath: entry.path,
|
|
327
|
+
absolutePath,
|
|
328
|
+
size
|
|
329
|
+
});
|
|
330
|
+
}
|
|
331
|
+
return files.sort((a, b) => a.relativePath.localeCompare(b.relativePath));
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
function getLanguageFromExtension(filePath) {
|
|
335
|
+
const ext = path.extname(filePath).toLowerCase();
|
|
336
|
+
return CODE_EXTENSIONS[ext] || "text";
|
|
337
|
+
}
|
|
338
|
+
function getLanguageDisplayName(lang) {
|
|
339
|
+
const names = {
|
|
340
|
+
typescript: "TypeScript",
|
|
341
|
+
javascript: "JavaScript",
|
|
342
|
+
python: "Python",
|
|
343
|
+
ruby: "Ruby",
|
|
344
|
+
go: "Go",
|
|
345
|
+
rust: "Rust",
|
|
346
|
+
java: "Java",
|
|
347
|
+
kotlin: "Kotlin",
|
|
348
|
+
swift: "Swift",
|
|
349
|
+
csharp: "C#",
|
|
350
|
+
cpp: "C++",
|
|
351
|
+
c: "C",
|
|
352
|
+
php: "PHP",
|
|
353
|
+
bash: "Shell",
|
|
354
|
+
sql: "SQL",
|
|
355
|
+
yaml: "YAML",
|
|
356
|
+
json: "JSON",
|
|
357
|
+
html: "HTML",
|
|
358
|
+
css: "CSS",
|
|
359
|
+
vue: "Vue",
|
|
360
|
+
tsx: "TSX",
|
|
361
|
+
jsx: "JSX",
|
|
362
|
+
svelte: "Svelte",
|
|
363
|
+
dockerfile: "Dockerfile",
|
|
364
|
+
graphql: "GraphQL",
|
|
365
|
+
hcl: "HCL",
|
|
366
|
+
dart: "Dart",
|
|
367
|
+
elixir: "Elixir",
|
|
368
|
+
scala: "Scala",
|
|
369
|
+
haskell: "Haskell",
|
|
370
|
+
clojure: "Clojure",
|
|
371
|
+
ocaml: "OCaml",
|
|
372
|
+
text: "Text"
|
|
373
|
+
};
|
|
374
|
+
return names[lang] || lang;
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
function categorizeFile(file) {
|
|
378
|
+
const ext = path.extname(file.relativePath).toLowerCase();
|
|
379
|
+
const basename = path.basename(file.relativePath);
|
|
380
|
+
let category;
|
|
381
|
+
let language;
|
|
382
|
+
if (MARKDOWN_EXTENSIONS.includes(ext)) {
|
|
383
|
+
category = "markdown";
|
|
384
|
+
} else if (CONFIG_FILES.includes(basename)) {
|
|
385
|
+
category = "config";
|
|
386
|
+
language = getLanguageFromExtension(file.relativePath);
|
|
387
|
+
} else if (ext in CODE_EXTENSIONS) {
|
|
388
|
+
category = "code";
|
|
389
|
+
language = getLanguageFromExtension(file.relativePath);
|
|
390
|
+
} else if (IMAGE_EXTENSIONS.includes(ext)) {
|
|
391
|
+
category = "asset";
|
|
392
|
+
} else {
|
|
393
|
+
category = "ignored";
|
|
394
|
+
}
|
|
395
|
+
return {
|
|
396
|
+
relativePath: file.relativePath,
|
|
397
|
+
absolutePath: file.absolutePath,
|
|
398
|
+
category,
|
|
399
|
+
language,
|
|
400
|
+
size: file.size
|
|
401
|
+
};
|
|
402
|
+
}
|
|
403
|
+
function categorizeFiles(files) {
|
|
404
|
+
return files.map(categorizeFile);
|
|
405
|
+
}
|
|
406
|
+
|
|
407
|
+
function toContentPath(filePath) {
|
|
408
|
+
const normalized = filePath.replace(/\\/g, "/");
|
|
409
|
+
const parts = normalized.split("/");
|
|
410
|
+
const last = parts[parts.length - 1];
|
|
411
|
+
if (/^readme\.md$/i.test(last)) {
|
|
412
|
+
parts[parts.length - 1] = "index.md";
|
|
413
|
+
}
|
|
414
|
+
return parts.join("/");
|
|
415
|
+
}
|
|
416
|
+
function toCodeContentPath(filePath) {
|
|
417
|
+
const normalized = filePath.replace(/\\/g, "/");
|
|
418
|
+
return `code/${normalized}.md`;
|
|
419
|
+
}
|
|
420
|
+
function mdLinkToRoute(linkPath, fromFile) {
|
|
421
|
+
const [pathPart, anchor] = linkPath.split("#");
|
|
422
|
+
if (!pathPart) {
|
|
423
|
+
return anchor ? `#${anchor}` : "";
|
|
424
|
+
}
|
|
425
|
+
const fromDir = path.dirname(fromFile);
|
|
426
|
+
let resolved = path.join(fromDir, pathPart).replace(/\\/g, "/");
|
|
427
|
+
resolved = resolved.replace(/\.md$/i, "");
|
|
428
|
+
resolved = resolved.replace(/\/readme$/i, "").replace(/\/index$/i, "");
|
|
429
|
+
if (!resolved.startsWith("/")) {
|
|
430
|
+
resolved = "/" + resolved;
|
|
431
|
+
}
|
|
432
|
+
if (resolved === "/") {
|
|
433
|
+
return anchor ? `/#${anchor}` : "/";
|
|
434
|
+
}
|
|
435
|
+
return anchor ? `${resolved}#${anchor}` : resolved;
|
|
436
|
+
}
|
|
437
|
+
function fileNameToTitle(fileName) {
|
|
438
|
+
const name = path.basename(fileName).replace(path.extname(fileName), "");
|
|
439
|
+
if (/^readme$/i.test(name)) return "Overview";
|
|
440
|
+
if (/^index$/i.test(name)) return "Overview";
|
|
441
|
+
return name.replace(/[-_]/g, " ").replace(/([a-z])([A-Z])/g, "$1 $2").replace(/\b\w/g, (c) => c.toUpperCase()).trim();
|
|
442
|
+
}
|
|
443
|
+
function dirNameToLabel(dirName) {
|
|
444
|
+
return dirName.replace(/[-_]/g, " ").replace(/([a-z])([A-Z])/g, "$1 $2").replace(/\b\w/g, (c) => c.toUpperCase()).trim();
|
|
445
|
+
}
|
|
446
|
+
|
|
447
|
+
function buildNavigationTree(files) {
|
|
448
|
+
const root = {
|
|
449
|
+
label: "Root",
|
|
450
|
+
path: "/",
|
|
451
|
+
children: [],
|
|
452
|
+
isDirectory: true,
|
|
453
|
+
order: 0
|
|
454
|
+
};
|
|
455
|
+
const markdownFiles = files.filter((f) => f.category === "markdown");
|
|
456
|
+
for (const file of markdownFiles) {
|
|
457
|
+
const parts = file.relativePath.split("/");
|
|
458
|
+
let current = root;
|
|
459
|
+
for (let i = 0; i < parts.length - 1; i++) {
|
|
460
|
+
const dirName = parts[i];
|
|
461
|
+
let child = current.children.find(
|
|
462
|
+
(c) => c.isDirectory && c.label === dirNameToLabel(dirName)
|
|
463
|
+
);
|
|
464
|
+
if (!child) {
|
|
465
|
+
child = {
|
|
466
|
+
label: dirNameToLabel(dirName),
|
|
467
|
+
path: "/" + parts.slice(0, i + 1).join("/"),
|
|
468
|
+
children: [],
|
|
469
|
+
isDirectory: true,
|
|
470
|
+
order: current.children.length
|
|
471
|
+
};
|
|
472
|
+
current.children.push(child);
|
|
473
|
+
}
|
|
474
|
+
current = child;
|
|
475
|
+
}
|
|
476
|
+
const fileName = parts[parts.length - 1];
|
|
477
|
+
const isReadme = /^readme\.md$/i.test(fileName);
|
|
478
|
+
current.children.push({
|
|
479
|
+
label: fileNameToTitle(fileName),
|
|
480
|
+
path: buildRoutePath(file.relativePath),
|
|
481
|
+
children: [],
|
|
482
|
+
isDirectory: false,
|
|
483
|
+
order: isReadme ? -1 : current.children.length
|
|
484
|
+
});
|
|
485
|
+
}
|
|
486
|
+
sortNavTree(root);
|
|
487
|
+
return root;
|
|
488
|
+
}
|
|
489
|
+
function buildRoutePath(filePath) {
|
|
490
|
+
let route = filePath.replace(/\\/g, "/");
|
|
491
|
+
route = route.replace(/\.md$/i, "");
|
|
492
|
+
route = route.replace(/\/readme$/i, "").replace(/^readme$/i, "");
|
|
493
|
+
if (!route.startsWith("/")) route = "/" + route;
|
|
494
|
+
return route || "/";
|
|
495
|
+
}
|
|
496
|
+
function sortNavTree(node) {
|
|
497
|
+
node.children.sort((a, b) => {
|
|
498
|
+
if (a.isDirectory !== b.isDirectory) {
|
|
499
|
+
return a.isDirectory ? -1 : 1;
|
|
500
|
+
}
|
|
501
|
+
if (a.order !== b.order) return a.order - b.order;
|
|
502
|
+
return a.label.localeCompare(b.label);
|
|
503
|
+
});
|
|
504
|
+
for (const child of node.children) {
|
|
505
|
+
if (child.isDirectory) {
|
|
506
|
+
sortNavTree(child);
|
|
507
|
+
}
|
|
508
|
+
}
|
|
509
|
+
}
|
|
510
|
+
|
|
511
|
+
async function analyzeRepo(repoPath) {
|
|
512
|
+
logger.start("Analyzing repository structure...");
|
|
513
|
+
const walked = await walkFiles(repoPath);
|
|
514
|
+
const files = categorizeFiles(walked);
|
|
515
|
+
const markdownFiles = files.filter((f) => f.category === "markdown");
|
|
516
|
+
const codeFiles = files.filter((f) => f.category === "code");
|
|
517
|
+
const configFiles = files.filter((f) => f.category === "config");
|
|
518
|
+
const assetFiles = files.filter((f) => f.category === "asset");
|
|
519
|
+
const navigation = buildNavigationTree(files);
|
|
520
|
+
logger.success(
|
|
521
|
+
`Found ${markdownFiles.length} markdown, ${codeFiles.length} code, ${configFiles.length} config, ${assetFiles.length} asset files`
|
|
522
|
+
);
|
|
523
|
+
return {
|
|
524
|
+
files,
|
|
525
|
+
navigation,
|
|
526
|
+
markdownFiles,
|
|
527
|
+
codeFiles,
|
|
528
|
+
configFiles,
|
|
529
|
+
assetFiles
|
|
530
|
+
};
|
|
531
|
+
}
|
|
532
|
+
|
|
533
|
+
async function transformMarkdownFile(file, repoPath) {
|
|
534
|
+
const fullPath = path.join(repoPath, file.relativePath);
|
|
535
|
+
const raw = await fs.readFile(fullPath, "utf-8");
|
|
536
|
+
const { data: existingFrontmatter, content: bodyContent } = matter(raw);
|
|
537
|
+
let title = existingFrontmatter.title;
|
|
538
|
+
if (!title) {
|
|
539
|
+
const headingMatch = bodyContent.match(/^#\s+(.+)$/m);
|
|
540
|
+
title = headingMatch ? headingMatch[1].trim() : fileNameToTitle(file.relativePath);
|
|
541
|
+
}
|
|
542
|
+
const frontmatter = {
|
|
543
|
+
title,
|
|
544
|
+
...existingFrontmatter
|
|
545
|
+
};
|
|
546
|
+
if (!frontmatter.description && existingFrontmatter.description) {
|
|
547
|
+
frontmatter.description = existingFrontmatter.description;
|
|
548
|
+
}
|
|
549
|
+
const images = [];
|
|
550
|
+
let processedBody = bodyContent;
|
|
551
|
+
processedBody = processedBody.replace(
|
|
552
|
+
/\[([^\]]*)\]\(([^)]+)\)/g,
|
|
553
|
+
(match, text, href) => {
|
|
554
|
+
if (href.startsWith("http") || href.startsWith("#") || href.startsWith("mailto:")) {
|
|
555
|
+
return match;
|
|
556
|
+
}
|
|
557
|
+
const ext = path.extname(href.split("#")[0].split("?")[0]).toLowerCase();
|
|
558
|
+
if (IMAGE_EXTENSIONS.includes(ext)) {
|
|
559
|
+
const imageSrc = resolveImagePath(href, file.relativePath);
|
|
560
|
+
images.push({
|
|
561
|
+
sourcePath: path.join(repoPath, path.dirname(file.relativePath), href),
|
|
562
|
+
destPath: imageSrc
|
|
563
|
+
});
|
|
564
|
+
return `[${text}](${imageSrc})`;
|
|
565
|
+
}
|
|
566
|
+
if (href.endsWith(".md") || href.includes(".md#")) {
|
|
567
|
+
const route = mdLinkToRoute(href, file.relativePath);
|
|
568
|
+
return `[${text}](${route})`;
|
|
569
|
+
}
|
|
570
|
+
return match;
|
|
571
|
+
}
|
|
572
|
+
);
|
|
573
|
+
processedBody = processedBody.replace(
|
|
574
|
+
/!\[([^\]]*)\]\(([^)]+)\)/g,
|
|
575
|
+
(match, alt, src) => {
|
|
576
|
+
if (src.startsWith("http")) return match;
|
|
577
|
+
const ext = path.extname(src.split("?")[0]).toLowerCase();
|
|
578
|
+
if (IMAGE_EXTENSIONS.includes(ext)) {
|
|
579
|
+
const imageDest = resolveImagePath(src, file.relativePath);
|
|
580
|
+
images.push({
|
|
581
|
+
sourcePath: path.join(repoPath, path.dirname(file.relativePath), src),
|
|
582
|
+
destPath: imageDest
|
|
583
|
+
});
|
|
584
|
+
return ``;
|
|
585
|
+
}
|
|
586
|
+
return match;
|
|
587
|
+
}
|
|
588
|
+
);
|
|
589
|
+
const contentPath = toContentPath(file.relativePath);
|
|
590
|
+
const finalContent = matter.stringify(processedBody, frontmatter);
|
|
591
|
+
return {
|
|
592
|
+
page: {
|
|
593
|
+
contentPath,
|
|
594
|
+
content: finalContent,
|
|
595
|
+
sourcePath: file.relativePath
|
|
596
|
+
},
|
|
597
|
+
images
|
|
598
|
+
};
|
|
599
|
+
}
|
|
600
|
+
function resolveImagePath(href, fromFile) {
|
|
601
|
+
const fromDir = path.dirname(fromFile);
|
|
602
|
+
const resolved = path.join(fromDir, href).replace(/\\/g, "/");
|
|
603
|
+
return `/images/${resolved}`;
|
|
604
|
+
}
|
|
605
|
+
|
|
606
|
+
async function transformCodeFile(file, repoPath) {
|
|
607
|
+
const fullPath = path.join(repoPath, file.relativePath);
|
|
608
|
+
const code = await fs.readFile(fullPath, "utf-8");
|
|
609
|
+
const language = getLanguageFromExtension(file.relativePath);
|
|
610
|
+
const displayName = getLanguageDisplayName(language);
|
|
611
|
+
fileNameToTitle(file.relativePath);
|
|
612
|
+
const frontmatter = {
|
|
613
|
+
title: `${path.basename(file.relativePath)}`,
|
|
614
|
+
description: `Source code: ${file.relativePath}`,
|
|
615
|
+
navigation: {
|
|
616
|
+
title: path.basename(file.relativePath)
|
|
617
|
+
}
|
|
618
|
+
};
|
|
619
|
+
const body = `# ${path.basename(file.relativePath)}
|
|
620
|
+
|
|
621
|
+
> Source: \`${file.relativePath}\` | Language: ${displayName}
|
|
622
|
+
|
|
623
|
+
\`\`\`${language}
|
|
624
|
+
${code}
|
|
625
|
+
\`\`\`
|
|
626
|
+
`;
|
|
627
|
+
const content = matter.stringify(body, frontmatter);
|
|
628
|
+
const contentPath = toCodeContentPath(file.relativePath);
|
|
629
|
+
return {
|
|
630
|
+
contentPath,
|
|
631
|
+
content,
|
|
632
|
+
sourcePath: file.relativePath
|
|
633
|
+
};
|
|
634
|
+
}
|
|
635
|
+
|
|
636
|
+
function generateNavigationFiles(tree) {
|
|
637
|
+
const files = [];
|
|
638
|
+
collectDirYml(tree, "", files);
|
|
639
|
+
return files;
|
|
640
|
+
}
|
|
641
|
+
function collectDirYml(node, currentPath, files) {
|
|
642
|
+
for (const child of node.children) {
|
|
643
|
+
if (child.isDirectory) {
|
|
644
|
+
const dirPath = currentPath ? `${currentPath}/${slugify(child.label)}` : slugify(child.label);
|
|
645
|
+
const content = `title: "${child.label}"
|
|
646
|
+
navigation:
|
|
647
|
+
title: "${child.label}"
|
|
648
|
+
`;
|
|
649
|
+
files.push({
|
|
650
|
+
dirPath,
|
|
651
|
+
content
|
|
652
|
+
});
|
|
653
|
+
collectDirYml(child, dirPath, files);
|
|
654
|
+
}
|
|
655
|
+
}
|
|
656
|
+
}
|
|
657
|
+
function slugify(label) {
|
|
658
|
+
return label.toLowerCase().replace(/\s+/g, "-").replace(/[^a-z0-9-]/g, "");
|
|
659
|
+
}
|
|
660
|
+
|
|
661
|
+
async function transformContent(analysis, repoPath, options = {}) {
|
|
662
|
+
const { includeCode = true } = options;
|
|
663
|
+
logger.start("Transforming content...");
|
|
664
|
+
const result = {
|
|
665
|
+
pages: [],
|
|
666
|
+
images: [],
|
|
667
|
+
navigationFiles: []
|
|
668
|
+
};
|
|
669
|
+
for (const file of analysis.markdownFiles) {
|
|
670
|
+
try {
|
|
671
|
+
const { page, images } = await transformMarkdownFile(file, repoPath);
|
|
672
|
+
result.pages.push(page);
|
|
673
|
+
result.images.push(...images);
|
|
674
|
+
} catch (error) {
|
|
675
|
+
logger.warn(`Skipping ${file.relativePath}: ${error}`);
|
|
676
|
+
}
|
|
677
|
+
}
|
|
678
|
+
if (includeCode) {
|
|
679
|
+
for (const file of analysis.codeFiles) {
|
|
680
|
+
try {
|
|
681
|
+
const page = await transformCodeFile(file, repoPath);
|
|
682
|
+
result.pages.push(page);
|
|
683
|
+
} catch (error) {
|
|
684
|
+
logger.warn(`Skipping code file ${file.relativePath}: ${error}`);
|
|
685
|
+
}
|
|
686
|
+
}
|
|
687
|
+
}
|
|
688
|
+
result.navigationFiles = generateNavigationFiles(analysis.navigation);
|
|
689
|
+
logger.success(
|
|
690
|
+
`Transformed ${result.pages.length} pages, ${result.images.length} images`
|
|
691
|
+
);
|
|
692
|
+
return result;
|
|
693
|
+
}
|
|
694
|
+
|
|
695
|
+
async function scaffoldDocusProject(outputDir, config) {
|
|
696
|
+
await fs.ensureDir(outputDir);
|
|
697
|
+
await fs.ensureDir(path.join(outputDir, "content"));
|
|
698
|
+
await fs.ensureDir(path.join(outputDir, "public"));
|
|
699
|
+
const pkg = {
|
|
700
|
+
name: `${config.githubRepo}-docs`,
|
|
701
|
+
private: true,
|
|
702
|
+
scripts: {
|
|
703
|
+
dev: "nuxi dev",
|
|
704
|
+
build: "nuxi build",
|
|
705
|
+
generate: "nuxi generate",
|
|
706
|
+
preview: "nuxi preview"
|
|
707
|
+
},
|
|
708
|
+
devDependencies: {
|
|
709
|
+
"@nuxt-themes/docus": "^1.15.0",
|
|
710
|
+
nuxt: "^3.16.0"
|
|
711
|
+
}
|
|
712
|
+
};
|
|
713
|
+
await fs.writeJSON(path.join(outputDir, "package.json"), pkg, { spaces: 2 });
|
|
714
|
+
const gitignore = `node_modules
|
|
715
|
+
.output
|
|
716
|
+
.nuxt
|
|
717
|
+
.DS_Store
|
|
718
|
+
`;
|
|
719
|
+
await fs.writeFile(path.join(outputDir, ".gitignore"), gitignore);
|
|
720
|
+
}
|
|
721
|
+
|
|
722
|
+
async function writeConfigs(outputDir, config) {
|
|
723
|
+
const nuxtConfig = `export default defineNuxtConfig({
|
|
724
|
+
extends: ["@nuxt-themes/docus"],
|
|
725
|
+
});
|
|
726
|
+
`;
|
|
727
|
+
await fs.writeFile(path.join(outputDir, "nuxt.config.ts"), nuxtConfig);
|
|
728
|
+
const appConfig = `export default defineAppConfig({
|
|
729
|
+
docus: {
|
|
730
|
+
title: ${JSON.stringify(config.siteName)},
|
|
731
|
+
description: ${JSON.stringify(config.description)},
|
|
732
|
+
socials: {
|
|
733
|
+
github: ${JSON.stringify(config.githubUrl || "")},
|
|
734
|
+
},
|
|
735
|
+
aside: {
|
|
736
|
+
level: 1,
|
|
737
|
+
collapsed: false,
|
|
738
|
+
exclude: [],
|
|
739
|
+
},
|
|
740
|
+
header: {
|
|
741
|
+
title: ${JSON.stringify(config.siteName)},
|
|
742
|
+
logo: false,
|
|
743
|
+
showLinkIcon: true,
|
|
744
|
+
},
|
|
745
|
+
footer: {
|
|
746
|
+
credits: {
|
|
747
|
+
text: "Generated with repodocs",
|
|
748
|
+
href: "https://github.com/repodocs/repodocs",
|
|
749
|
+
icon: "IconDocus",
|
|
750
|
+
},
|
|
751
|
+
},
|
|
752
|
+
},
|
|
753
|
+
});
|
|
754
|
+
`;
|
|
755
|
+
await fs.writeFile(path.join(outputDir, "app.config.ts"), appConfig);
|
|
756
|
+
}
|
|
757
|
+
|
|
758
|
+
async function writeContent(outputDir, transformResult) {
|
|
759
|
+
const contentDir = path.join(outputDir, "content");
|
|
760
|
+
const publicDir = path.join(outputDir, "public");
|
|
761
|
+
for (const page of transformResult.pages) {
|
|
762
|
+
const filePath = path.join(contentDir, page.contentPath);
|
|
763
|
+
await fs.ensureDir(path.dirname(filePath));
|
|
764
|
+
await fs.writeFile(filePath, page.content);
|
|
765
|
+
}
|
|
766
|
+
for (const navFile of transformResult.navigationFiles) {
|
|
767
|
+
const filePath = path.join(contentDir, navFile.dirPath, "_dir.yml");
|
|
768
|
+
await fs.ensureDir(path.dirname(filePath));
|
|
769
|
+
await fs.writeFile(filePath, navFile.content);
|
|
770
|
+
}
|
|
771
|
+
for (const image of transformResult.images) {
|
|
772
|
+
try {
|
|
773
|
+
if (await fs.pathExists(image.sourcePath)) {
|
|
774
|
+
const destPath = path.join(publicDir, image.destPath);
|
|
775
|
+
await fs.ensureDir(path.dirname(destPath));
|
|
776
|
+
await fs.copy(image.sourcePath, destPath);
|
|
777
|
+
}
|
|
778
|
+
} catch (error) {
|
|
779
|
+
logger.warn(`Could not copy image ${image.sourcePath}: ${error}`);
|
|
780
|
+
}
|
|
781
|
+
}
|
|
782
|
+
}
|
|
783
|
+
|
|
784
|
+
async function generateSite(transformResult, metadata, options) {
|
|
785
|
+
const { outputDir, install } = options;
|
|
786
|
+
const resolvedOutput = path.resolve(outputDir);
|
|
787
|
+
logger.start(`Generating Docus site in ${resolvedOutput}...`);
|
|
788
|
+
const config = {
|
|
789
|
+
siteName: formatSiteName(metadata.name),
|
|
790
|
+
description: metadata.description || `Documentation for ${metadata.name}`,
|
|
791
|
+
githubUrl: metadata.url,
|
|
792
|
+
githubOwner: metadata.owner,
|
|
793
|
+
githubRepo: metadata.repo
|
|
794
|
+
};
|
|
795
|
+
await scaffoldDocusProject(resolvedOutput, config);
|
|
796
|
+
await writeConfigs(resolvedOutput, config);
|
|
797
|
+
await writeContent(resolvedOutput, transformResult);
|
|
798
|
+
if (install) {
|
|
799
|
+
logger.start("Installing dependencies...");
|
|
800
|
+
try {
|
|
801
|
+
execSync("npm install", { cwd: resolvedOutput, stdio: "pipe" });
|
|
802
|
+
logger.success("Dependencies installed");
|
|
803
|
+
} catch (error) {
|
|
804
|
+
logger.warn("Failed to install dependencies. Run `npm install` manually.");
|
|
805
|
+
}
|
|
806
|
+
}
|
|
807
|
+
const pageCount = transformResult.pages.length;
|
|
808
|
+
const imageCount = transformResult.images.length;
|
|
809
|
+
logger.box(
|
|
810
|
+
[
|
|
811
|
+
`Documentation site generated!`,
|
|
812
|
+
``,
|
|
813
|
+
` Pages: ${pageCount}`,
|
|
814
|
+
` Images: ${imageCount}`,
|
|
815
|
+
` Output: ${resolvedOutput}`,
|
|
816
|
+
``,
|
|
817
|
+
`Next steps:`,
|
|
818
|
+
` cd ${path.relative(process.cwd(), resolvedOutput)}`,
|
|
819
|
+
` npm install`,
|
|
820
|
+
` npm run dev`
|
|
821
|
+
].join("\n")
|
|
822
|
+
);
|
|
823
|
+
}
|
|
824
|
+
function formatSiteName(name) {
|
|
825
|
+
return name.replace(/^@[^/]+\//, "").replace(/[-_]/g, " ").replace(/\b\w/g, (c) => c.toUpperCase());
|
|
826
|
+
}
|
|
827
|
+
|
|
828
|
+
export { DEFAULT_OUTPUT_DIR as D, analyzeRepo as a, fetchRepo as f, generateSite as g, logger as l, transformContent as t };
|
package/package.json
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@leighdinaya/repodocs",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Convert GitHub repositories into Docus documentation sites",
|
|
5
|
+
"author": "leighayanid",
|
|
6
|
+
"license": "MIT",
|
|
7
|
+
"repository": {
|
|
8
|
+
"type": "git",
|
|
9
|
+
"url": "git+https://github.com/leighayanid/repodocs.git"
|
|
10
|
+
},
|
|
11
|
+
"homepage": "https://github.com/leighayanid/repodocs#readme",
|
|
12
|
+
"bugs": {
|
|
13
|
+
"url": "https://github.com/leighayanid/repodocs/issues"
|
|
14
|
+
},
|
|
15
|
+
"keywords": [
|
|
16
|
+
"documentation",
|
|
17
|
+
"docus",
|
|
18
|
+
"github",
|
|
19
|
+
"docs-generator",
|
|
20
|
+
"nuxt"
|
|
21
|
+
],
|
|
22
|
+
"type": "module",
|
|
23
|
+
"bin": {
|
|
24
|
+
"repodocs": "dist/cli.mjs"
|
|
25
|
+
},
|
|
26
|
+
"exports": {
|
|
27
|
+
".": {
|
|
28
|
+
"import": "./dist/index.mjs",
|
|
29
|
+
"types": "./dist/index.d.mts"
|
|
30
|
+
}
|
|
31
|
+
},
|
|
32
|
+
"files": [
|
|
33
|
+
"dist"
|
|
34
|
+
],
|
|
35
|
+
"scripts": {
|
|
36
|
+
"build": "unbuild",
|
|
37
|
+
"dev": "jiti src/cli.ts",
|
|
38
|
+
"stub": "unbuild --stub"
|
|
39
|
+
},
|
|
40
|
+
"dependencies": {
|
|
41
|
+
"@octokit/rest": "^21.1.1",
|
|
42
|
+
"citty": "^0.1.6",
|
|
43
|
+
"colorette": "^2.0.20",
|
|
44
|
+
"consola": "^3.4.2",
|
|
45
|
+
"fast-glob": "^3.3.3",
|
|
46
|
+
"fs-extra": "^11.3.0",
|
|
47
|
+
"gray-matter": "^4.0.3",
|
|
48
|
+
"simple-git": "^3.27.0"
|
|
49
|
+
},
|
|
50
|
+
"devDependencies": {
|
|
51
|
+
"@types/fs-extra": "^11.0.4",
|
|
52
|
+
"jiti": "^2.4.2",
|
|
53
|
+
"typescript": "^5.7.3",
|
|
54
|
+
"unbuild": "^3.5.0"
|
|
55
|
+
}
|
|
56
|
+
}
|