@tansuasici/docsync 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +171 -0
- package/dist/chunk-72B2YGTF.js +446 -0
- package/dist/chunk-72B2YGTF.js.map +1 -0
- package/dist/cli.js +123 -0
- package/dist/cli.js.map +1 -0
- package/dist/index.d.ts +78 -0
- package/dist/index.js +19 -0
- package/dist/index.js.map +1 -0
- package/package.json +71 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 tansuasici
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,171 @@
|
|
|
1
|
+
# DocSync
|
|
2
|
+
|
|
3
|
+
[](./LICENSE)
|
|
4
|
+
[](https://www.typescriptlang.org/)
|
|
5
|
+
<!-- [](https://www.npmjs.com/package/@tansuasici/docsync) -->
|
|
6
|
+
|
|
7
|
+
**Write docs once in GitHub Markdown, publish everywhere.**
|
|
8
|
+
|
|
9
|
+
DocSync transforms your GitHub-native markdown (`README.md`, `docs/*.md`) into framework-ready MDX. No more maintaining docs in two places.
|
|
10
|
+
|
|
11
|
+
```
|
|
12
|
+
README.md ──┐
|
|
13
|
+
docs/*.md ──┤── docsync build ──→ .docsync/*.mdx ──→ Fumadocs / Docusaurus / ...
|
|
14
|
+
└── docsync.config.ts
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
## The Problem
|
|
18
|
+
|
|
19
|
+
You write documentation in markdown and it looks great on GitHub. But when you want a modern docs site (Fumadocs, Docusaurus, Nextra, Starlight), every framework demands its own directory structure, MDX format, and frontmatter conventions. You end up maintaining docs in two places.
|
|
20
|
+
|
|
21
|
+
DocSync bridges this gap. Keep your markdown as the single source of truth.
|
|
22
|
+
|
|
23
|
+
## Quick Start
|
|
24
|
+
|
|
25
|
+
```bash
|
|
26
|
+
npm install -D @tansuasici/docsync
|
|
27
|
+
npx docsync init
|
|
28
|
+
```
|
|
29
|
+
|
|
30
|
+
Edit the generated config:
|
|
31
|
+
|
|
32
|
+
```ts
|
|
33
|
+
// docsync.config.ts
|
|
34
|
+
import { defineConfig } from '@tansuasici/docsync'
|
|
35
|
+
|
|
36
|
+
export default defineConfig({
|
|
37
|
+
sources: [
|
|
38
|
+
{ path: 'README.md', slug: 'index', title: 'Introduction' },
|
|
39
|
+
{ path: 'docs/**/*.md' },
|
|
40
|
+
],
|
|
41
|
+
target: 'fumadocs',
|
|
42
|
+
github: { repo: 'your-username/your-repo' },
|
|
43
|
+
})
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
Build:
|
|
47
|
+
|
|
48
|
+
```bash
|
|
49
|
+
npx docsync build
|
|
50
|
+
# ✓ index.mdx
|
|
51
|
+
# ✓ getting-started.mdx
|
|
52
|
+
# ✓ api.mdx
|
|
53
|
+
# Done! 3 files written to .docsync/
|
|
54
|
+
```
|
|
55
|
+
|
|
56
|
+
Point your docs framework to the output:
|
|
57
|
+
|
|
58
|
+
```ts
|
|
59
|
+
// source.config.ts (Fumadocs)
|
|
60
|
+
import { defineDocs } from 'fumadocs-mdx/config'
|
|
61
|
+
|
|
62
|
+
export const docs = defineDocs({
|
|
63
|
+
dir: '.docsync',
|
|
64
|
+
})
|
|
65
|
+
```
|
|
66
|
+
|
|
67
|
+
## What It Does
|
|
68
|
+
|
|
69
|
+
| Input (GFM) | Output (MDX) |
|
|
70
|
+
|---|---|
|
|
71
|
+
| `> [!NOTE]` alerts | `<Callout>` components |
|
|
72
|
+
| `{curly braces}` in text | `\{escaped\}` for MDX |
|
|
73
|
+
| `<!-- HTML comments -->` | `{/* JSX comments */}` |
|
|
74
|
+
| `./docs/guide.md` links | `/docs/guide` site URLs |
|
|
75
|
+
| `./assets/logo.png` | GitHub raw URL |
|
|
76
|
+
| First `# Heading` | `title` frontmatter |
|
|
77
|
+
| First paragraph | `description` frontmatter |
|
|
78
|
+
|
|
79
|
+
It also generates navigation config (`meta.json` for Fumadocs) from your source ordering.
|
|
80
|
+
|
|
81
|
+
## Configuration
|
|
82
|
+
|
|
83
|
+
```ts
|
|
84
|
+
import { defineConfig } from '@tansuasici/docsync'
|
|
85
|
+
|
|
86
|
+
export default defineConfig({
|
|
87
|
+
// Required
|
|
88
|
+
sources: [
|
|
89
|
+
{ path: 'README.md', slug: 'index', title: 'Introduction' },
|
|
90
|
+
{ path: 'docs/**/*.md' },
|
|
91
|
+
{ path: 'CHANGELOG.md', slug: 'changelog', order: 99 },
|
|
92
|
+
],
|
|
93
|
+
target: 'fumadocs',
|
|
94
|
+
|
|
95
|
+
// Optional
|
|
96
|
+
outDir: '.docsync', // default: '.docsync'
|
|
97
|
+
baseUrl: '/docs', // default: '/docs'
|
|
98
|
+
clean: true, // default: true
|
|
99
|
+
github: {
|
|
100
|
+
repo: 'user/repo',
|
|
101
|
+
branch: 'main', // default: 'main'
|
|
102
|
+
},
|
|
103
|
+
})
|
|
104
|
+
```
|
|
105
|
+
|
|
106
|
+
### Source Entry Options
|
|
107
|
+
|
|
108
|
+
| Option | Type | Description |
|
|
109
|
+
|--------|------|-------------|
|
|
110
|
+
| `path` | `string` | File path or glob pattern (required) |
|
|
111
|
+
| `slug` | `string` | Override URL slug |
|
|
112
|
+
| `title` | `string` | Override page title |
|
|
113
|
+
| `description` | `string` | Override page description |
|
|
114
|
+
| `order` | `number` | Sidebar position (lower = higher) |
|
|
115
|
+
|
|
116
|
+
## Integration with Build Pipeline
|
|
117
|
+
|
|
118
|
+
```json
|
|
119
|
+
{
|
|
120
|
+
"scripts": {
|
|
121
|
+
"prebuild": "docsync build",
|
|
122
|
+
"build": "next build",
|
|
123
|
+
"dev": "docsync build && next dev"
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
```
|
|
127
|
+
|
|
128
|
+
## Supported Targets
|
|
129
|
+
|
|
130
|
+
| Target | Status |
|
|
131
|
+
|--------|--------|
|
|
132
|
+
| Fumadocs | Full support |
|
|
133
|
+
| Docusaurus | Planned (v0.2) |
|
|
134
|
+
| Nextra | Planned |
|
|
135
|
+
| Starlight | Planned |
|
|
136
|
+
|
|
137
|
+
## How It Works
|
|
138
|
+
|
|
139
|
+
DocSync uses a [unified](https://unifiedjs.com/) / remark pipeline to transform markdown at the AST level:
|
|
140
|
+
|
|
141
|
+
1. **Parse** GFM markdown (tables, task lists, alerts, etc.)
|
|
142
|
+
2. **Transform** GitHub-specific syntax to MDX-compatible output
|
|
143
|
+
3. **Rewrite** relative links and images using the source map
|
|
144
|
+
4. **Inject** frontmatter (title, description) from content analysis
|
|
145
|
+
5. **Generate** framework-specific navigation config
|
|
146
|
+
|
|
147
|
+
This approach is reliable — no regex hacks, no string replacement. The AST handles edge cases like nested code blocks, frontmatter, and mixed content correctly.
|
|
148
|
+
|
|
149
|
+
## Contributing
|
|
150
|
+
|
|
151
|
+
Contributions are welcome! The codebase is TypeScript with a clean architecture:
|
|
152
|
+
|
|
153
|
+
```
|
|
154
|
+
src/
|
|
155
|
+
cli/ CLI commands (citty)
|
|
156
|
+
config/ Config schema (Zod) + loader (c12)
|
|
157
|
+
core/ Build pipeline + source resolver
|
|
158
|
+
transform/ Remark plugins (alerts, escaping, links, images)
|
|
159
|
+
adapters/ Framework-specific output (Fumadocs, ...)
|
|
160
|
+
```
|
|
161
|
+
|
|
162
|
+
```bash
|
|
163
|
+
pnpm install
|
|
164
|
+
pnpm test # 29 tests
|
|
165
|
+
pnpm typecheck # TypeScript verification
|
|
166
|
+
pnpm build # Build with tsup
|
|
167
|
+
```
|
|
168
|
+
|
|
169
|
+
## License
|
|
170
|
+
|
|
171
|
+
MIT
|
|
@@ -0,0 +1,446 @@
|
|
|
1
|
+
// src/config/schema.ts
|
|
2
|
+
import { z } from "zod";
|
|
3
|
+
var sourceEntrySchema = z.object({
|
|
4
|
+
/** File path or glob pattern (repo-relative) */
|
|
5
|
+
path: z.string(),
|
|
6
|
+
/** Override slug (for explicit entries only) */
|
|
7
|
+
slug: z.string().optional(),
|
|
8
|
+
/** Override page title */
|
|
9
|
+
title: z.string().optional(),
|
|
10
|
+
/** Override page description */
|
|
11
|
+
description: z.string().optional(),
|
|
12
|
+
/** Sidebar position (lower = higher) */
|
|
13
|
+
order: z.number().optional()
|
|
14
|
+
});
|
|
15
|
+
var configSchema = z.object({
|
|
16
|
+
/** Source files to include */
|
|
17
|
+
sources: z.array(sourceEntrySchema).min(1),
|
|
18
|
+
/** Target docs framework */
|
|
19
|
+
target: z.enum(["fumadocs"]),
|
|
20
|
+
/** Output directory for generated files */
|
|
21
|
+
outDir: z.string().default(".docsync"),
|
|
22
|
+
/** GitHub repository info for external link rewriting */
|
|
23
|
+
github: z.object({
|
|
24
|
+
repo: z.string(),
|
|
25
|
+
branch: z.string().default("main")
|
|
26
|
+
}).optional(),
|
|
27
|
+
/** Base URL path for docs */
|
|
28
|
+
baseUrl: z.string().default("/docs"),
|
|
29
|
+
/** Clean output directory before build */
|
|
30
|
+
clean: z.boolean().default(true)
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
// src/config/loader.ts
|
|
34
|
+
import { loadConfig } from "c12";
|
|
35
|
+
async function loadDocSyncConfig(cwd = process.cwd()) {
|
|
36
|
+
const { config } = await loadConfig({
|
|
37
|
+
name: "docsync",
|
|
38
|
+
cwd
|
|
39
|
+
});
|
|
40
|
+
const result = configSchema.safeParse(config);
|
|
41
|
+
if (!result.success) {
|
|
42
|
+
const errors = result.error.issues.map((i) => ` - ${i.path.join(".")}: ${i.message}`).join("\n");
|
|
43
|
+
throw new Error(`Invalid docsync config:
|
|
44
|
+
${errors}`);
|
|
45
|
+
}
|
|
46
|
+
return result.data;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
// src/core/source-resolver.ts
|
|
50
|
+
import path from "path";
|
|
51
|
+
import fs from "fs/promises";
|
|
52
|
+
import fg from "fast-glob";
|
|
53
|
+
async function resolveSourceFiles(sources, cwd) {
|
|
54
|
+
const pages = [];
|
|
55
|
+
let autoOrder = 0;
|
|
56
|
+
for (const source of sources) {
|
|
57
|
+
const matches = await fg(source.path, {
|
|
58
|
+
cwd,
|
|
59
|
+
onlyFiles: true,
|
|
60
|
+
absolute: false
|
|
61
|
+
});
|
|
62
|
+
if (matches.length === 0) {
|
|
63
|
+
console.warn(`[docsync] No files matched: ${source.path}`);
|
|
64
|
+
continue;
|
|
65
|
+
}
|
|
66
|
+
matches.sort();
|
|
67
|
+
for (const match of matches) {
|
|
68
|
+
const filePath = path.resolve(cwd, match);
|
|
69
|
+
await fs.access(filePath, fs.constants.R_OK);
|
|
70
|
+
const slug = source.slug ?? deriveSlug(match);
|
|
71
|
+
const order = source.order ?? autoOrder++;
|
|
72
|
+
pages.push({
|
|
73
|
+
filePath,
|
|
74
|
+
relativePath: match,
|
|
75
|
+
slug,
|
|
76
|
+
title: source.title,
|
|
77
|
+
description: source.description,
|
|
78
|
+
order
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
const seen = /* @__PURE__ */ new Set();
|
|
83
|
+
const deduped = [];
|
|
84
|
+
for (const page of pages) {
|
|
85
|
+
if (!seen.has(page.slug)) {
|
|
86
|
+
seen.add(page.slug);
|
|
87
|
+
deduped.push(page);
|
|
88
|
+
} else {
|
|
89
|
+
console.warn(
|
|
90
|
+
`[docsync] Duplicate slug "${page.slug}" from ${page.relativePath} \u2014 skipped`
|
|
91
|
+
);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
deduped.sort((a, b) => a.order - b.order);
|
|
95
|
+
return deduped;
|
|
96
|
+
}
|
|
97
|
+
function deriveSlug(filePath) {
|
|
98
|
+
const parsed = path.parse(filePath);
|
|
99
|
+
const name = parsed.name.toLowerCase();
|
|
100
|
+
if (name === "readme") {
|
|
101
|
+
if (parsed.dir && parsed.dir !== ".") {
|
|
102
|
+
return stripDocsPrefix(parsed.dir) + "/index";
|
|
103
|
+
}
|
|
104
|
+
return "index";
|
|
105
|
+
}
|
|
106
|
+
const dir = parsed.dir && parsed.dir !== "." ? stripDocsPrefix(parsed.dir) : "";
|
|
107
|
+
const slug = dir ? `${dir}/${name}` : name;
|
|
108
|
+
return slug;
|
|
109
|
+
}
|
|
110
|
+
function stripDocsPrefix(dir) {
|
|
111
|
+
return dir.replace(/^docs\/?/, "");
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
// src/core/pipeline.ts
|
|
115
|
+
import fs2 from "fs/promises";
|
|
116
|
+
import path4 from "path";
|
|
117
|
+
|
|
118
|
+
// src/transform/index.ts
|
|
119
|
+
import { unified } from "unified";
|
|
120
|
+
import remarkParse from "remark-parse";
|
|
121
|
+
import remarkGfm from "remark-gfm";
|
|
122
|
+
import remarkFrontmatter from "remark-frontmatter";
|
|
123
|
+
import remarkStringify from "remark-stringify";
|
|
124
|
+
|
|
125
|
+
// src/transform/gfm-alerts.ts
|
|
126
|
+
import { visit } from "unist-util-visit";
|
|
127
|
+
var ALERT_PATTERN = /^\[!(NOTE|TIP|IMPORTANT|WARNING|CAUTION)\]\s*/i;
|
|
128
|
+
var remarkGfmAlerts = (options) => {
|
|
129
|
+
return (tree) => {
|
|
130
|
+
visit(tree, "blockquote", (node, index, parent) => {
|
|
131
|
+
if (!parent || index === void 0) return;
|
|
132
|
+
const firstChild = node.children[0];
|
|
133
|
+
if (!firstChild || firstChild.type !== "paragraph") return;
|
|
134
|
+
const firstInline = firstChild.children[0];
|
|
135
|
+
if (!firstInline || firstInline.type !== "text") return;
|
|
136
|
+
const match = firstInline.value.match(ALERT_PATTERN);
|
|
137
|
+
if (!match) return;
|
|
138
|
+
const alertType = match[1].toLowerCase();
|
|
139
|
+
firstInline.value = firstInline.value.replace(ALERT_PATTERN, "");
|
|
140
|
+
if (firstInline.value.trim() === "") {
|
|
141
|
+
firstChild.children.shift();
|
|
142
|
+
}
|
|
143
|
+
if (firstChild.children.length === 0) {
|
|
144
|
+
node.children.shift();
|
|
145
|
+
}
|
|
146
|
+
const replacement = options.adapter.transformAlert(alertType, node);
|
|
147
|
+
if (replacement) {
|
|
148
|
+
parent.children[index] = replacement;
|
|
149
|
+
}
|
|
150
|
+
});
|
|
151
|
+
};
|
|
152
|
+
};
|
|
153
|
+
|
|
154
|
+
// src/transform/escape-mdx.ts
|
|
155
|
+
import { visit as visit2 } from "unist-util-visit";
|
|
156
|
+
var remarkEscapeMdx = () => {
|
|
157
|
+
return (tree) => {
|
|
158
|
+
visit2(tree, "text", (node) => {
|
|
159
|
+
node.value = node.value.replace(/([{}])/g, "\\$1");
|
|
160
|
+
node.value = node.value.replace(/<(?![a-zA-Z/!])/g, "\\<");
|
|
161
|
+
node.value = node.value.replace(/(?<![a-zA-Z"'/])>/g, "\\>");
|
|
162
|
+
});
|
|
163
|
+
visit2(tree, "html", (node, index, parent) => {
|
|
164
|
+
if (!parent || index === void 0) return;
|
|
165
|
+
const commentMatch = node.value.match(/^<!--\s*([\s\S]*?)\s*-->$/);
|
|
166
|
+
if (commentMatch) {
|
|
167
|
+
node.value = `{/* ${commentMatch[1]} */}`;
|
|
168
|
+
}
|
|
169
|
+
node.value = node.value.replace(
|
|
170
|
+
/<(br|hr|img|input|meta|link)(\s[^>]*)?\s*>/gi,
|
|
171
|
+
"<$1$2 />"
|
|
172
|
+
);
|
|
173
|
+
});
|
|
174
|
+
};
|
|
175
|
+
};
|
|
176
|
+
|
|
177
|
+
// src/transform/rewrite-links.ts
|
|
178
|
+
import path2 from "path";
|
|
179
|
+
import { visit as visit3 } from "unist-util-visit";
|
|
180
|
+
var remarkRewriteLinks = (options) => {
|
|
181
|
+
return (tree) => {
|
|
182
|
+
visit3(tree, "link", (node) => {
|
|
183
|
+
const url = node.url;
|
|
184
|
+
if (url.startsWith("http://") || url.startsWith("https://") || url.startsWith("#")) {
|
|
185
|
+
return;
|
|
186
|
+
}
|
|
187
|
+
const [urlPath, anchor] = url.split("#");
|
|
188
|
+
const currentDir = path2.dirname(options.page.relativePath);
|
|
189
|
+
const resolvedPath = path2.normalize(path2.join(currentDir, urlPath));
|
|
190
|
+
const slug = options.slugMap.get(resolvedPath) ?? options.slugMap.get(`./${resolvedPath}`);
|
|
191
|
+
if (slug !== void 0) {
|
|
192
|
+
const base = options.baseUrl.replace(/\/$/, "");
|
|
193
|
+
node.url = slug === "index" ? base : `${base}/${slug}`;
|
|
194
|
+
if (anchor) {
|
|
195
|
+
node.url += `#${anchor}`;
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
});
|
|
199
|
+
};
|
|
200
|
+
};
|
|
201
|
+
|
|
202
|
+
// src/transform/rewrite-images.ts
|
|
203
|
+
import path3 from "path";
|
|
204
|
+
import { visit as visit4 } from "unist-util-visit";
|
|
205
|
+
var remarkRewriteImages = (options) => {
|
|
206
|
+
return (tree) => {
|
|
207
|
+
visit4(tree, "image", (node) => {
|
|
208
|
+
const url = node.url;
|
|
209
|
+
if (url.startsWith("http://") || url.startsWith("https://") || url.startsWith("data:")) {
|
|
210
|
+
return;
|
|
211
|
+
}
|
|
212
|
+
if (!options.github) return;
|
|
213
|
+
const currentDir = path3.dirname(options.page.relativePath);
|
|
214
|
+
const resolvedPath = path3.normalize(path3.join(currentDir, url));
|
|
215
|
+
const { repo, branch } = options.github;
|
|
216
|
+
node.url = `https://raw.githubusercontent.com/${repo}/${branch}/${resolvedPath}`;
|
|
217
|
+
});
|
|
218
|
+
};
|
|
219
|
+
};
|
|
220
|
+
|
|
221
|
+
// src/transform/frontmatter.ts
|
|
222
|
+
function extractFrontmatter(source, page) {
|
|
223
|
+
const lines = source.split("\n");
|
|
224
|
+
let title = page.title;
|
|
225
|
+
let description = page.description;
|
|
226
|
+
let h1LineIndex = -1;
|
|
227
|
+
for (let i = 0; i < lines.length; i++) {
|
|
228
|
+
const line = lines[i].trim();
|
|
229
|
+
if (line === "" || line === "---") continue;
|
|
230
|
+
if (i === 0 && line === "---") {
|
|
231
|
+
const endIndex = lines.indexOf("---", 1);
|
|
232
|
+
if (endIndex !== -1) {
|
|
233
|
+
continue;
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
const h1Match = line.match(/^#\s+(.+)$/);
|
|
237
|
+
if (h1Match) {
|
|
238
|
+
if (!title) {
|
|
239
|
+
title = h1Match[1].trim();
|
|
240
|
+
}
|
|
241
|
+
h1LineIndex = i;
|
|
242
|
+
break;
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
if (!title) {
|
|
246
|
+
title = page.slug === "index" ? "Introduction" : slugToTitle(page.slug);
|
|
247
|
+
}
|
|
248
|
+
if (!description && h1LineIndex !== -1) {
|
|
249
|
+
for (let i = h1LineIndex + 1; i < lines.length; i++) {
|
|
250
|
+
const line = lines[i].trim();
|
|
251
|
+
if (line === "") continue;
|
|
252
|
+
if (line.startsWith("#") || line.startsWith("```") || line.startsWith("-") || line.startsWith(">")) {
|
|
253
|
+
break;
|
|
254
|
+
}
|
|
255
|
+
description = line;
|
|
256
|
+
break;
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
let contentWithoutH1 = source;
|
|
260
|
+
if (h1LineIndex !== -1) {
|
|
261
|
+
const newLines = [...lines];
|
|
262
|
+
newLines.splice(h1LineIndex, 1);
|
|
263
|
+
if (newLines[h1LineIndex]?.trim() === "") {
|
|
264
|
+
newLines.splice(h1LineIndex, 1);
|
|
265
|
+
}
|
|
266
|
+
contentWithoutH1 = newLines.join("\n");
|
|
267
|
+
}
|
|
268
|
+
return { title, description, contentWithoutH1 };
|
|
269
|
+
}
|
|
270
|
+
function slugToTitle(slug) {
|
|
271
|
+
const name = slug.split("/").pop() ?? slug;
|
|
272
|
+
return name.replace(/[-_]/g, " ").replace(/\b\w/g, (c) => c.toUpperCase());
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
// src/transform/index.ts
|
|
276
|
+
async function transformMarkdown(source, ctx) {
|
|
277
|
+
const { title, description, contentWithoutH1 } = extractFrontmatter(source, ctx.page);
|
|
278
|
+
const frontmatter = ctx.adapter.generateFrontmatter({
|
|
279
|
+
...ctx.page,
|
|
280
|
+
title,
|
|
281
|
+
description
|
|
282
|
+
});
|
|
283
|
+
const processor = unified().use(remarkParse).use(remarkGfm).use(remarkFrontmatter).use(remarkGfmAlerts, { adapter: ctx.adapter }).use(remarkEscapeMdx).use(remarkRewriteLinks, {
|
|
284
|
+
slugMap: ctx.slugMap,
|
|
285
|
+
baseUrl: ctx.config.baseUrl,
|
|
286
|
+
page: ctx.page
|
|
287
|
+
}).use(remarkRewriteImages, {
|
|
288
|
+
github: ctx.config.github,
|
|
289
|
+
page: ctx.page
|
|
290
|
+
}).use(remarkStringify, {
|
|
291
|
+
bullet: "-",
|
|
292
|
+
emphasis: "*",
|
|
293
|
+
strong: "**",
|
|
294
|
+
rule: "-"
|
|
295
|
+
});
|
|
296
|
+
const file = await processor.process(contentWithoutH1);
|
|
297
|
+
const body = String(file);
|
|
298
|
+
const lines = [];
|
|
299
|
+
lines.push("---");
|
|
300
|
+
for (const [key, value] of Object.entries(frontmatter)) {
|
|
301
|
+
if (value !== void 0 && value !== null) {
|
|
302
|
+
lines.push(`${key}: ${formatYamlValue(value)}`);
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
lines.push("---");
|
|
306
|
+
lines.push("");
|
|
307
|
+
const imports = ctx.adapter.getImports?.();
|
|
308
|
+
if (imports && imports.length > 0) {
|
|
309
|
+
lines.push(...imports);
|
|
310
|
+
lines.push("");
|
|
311
|
+
}
|
|
312
|
+
lines.push(body.trim());
|
|
313
|
+
lines.push("");
|
|
314
|
+
return lines.join("\n");
|
|
315
|
+
}
|
|
316
|
+
function formatYamlValue(value) {
|
|
317
|
+
if (typeof value === "string") {
|
|
318
|
+
if (/[:#{}[\],&*?|>!%@`]/.test(value) || value.includes("\n")) {
|
|
319
|
+
return `"${value.replace(/"/g, '\\"')}"`;
|
|
320
|
+
}
|
|
321
|
+
return `"${value}"`;
|
|
322
|
+
}
|
|
323
|
+
if (typeof value === "number" || typeof value === "boolean") {
|
|
324
|
+
return String(value);
|
|
325
|
+
}
|
|
326
|
+
return JSON.stringify(value);
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
// src/adapters/fumadocs.ts
|
|
330
|
+
import { toString } from "mdast-util-to-string";
|
|
331
|
+
var ALERT_TYPE_MAP = {
|
|
332
|
+
note: "info",
|
|
333
|
+
tip: "info",
|
|
334
|
+
important: "info",
|
|
335
|
+
warning: "warn",
|
|
336
|
+
caution: "error"
|
|
337
|
+
};
|
|
338
|
+
var fumadocsAdapter = {
|
|
339
|
+
name: "fumadocs",
|
|
340
|
+
transformAlert(type, node) {
|
|
341
|
+
const calloutType = ALERT_TYPE_MAP[type];
|
|
342
|
+
const content = toString(node);
|
|
343
|
+
const htmlNode = {
|
|
344
|
+
type: "html",
|
|
345
|
+
value: `<Callout type="${calloutType}">
|
|
346
|
+
${content}
|
|
347
|
+
</Callout>`
|
|
348
|
+
};
|
|
349
|
+
return htmlNode;
|
|
350
|
+
},
|
|
351
|
+
generateNavConfig(pages) {
|
|
352
|
+
const pageNames = pages.map((p) => {
|
|
353
|
+
const parts = p.slug.split("/");
|
|
354
|
+
return parts[parts.length - 1];
|
|
355
|
+
});
|
|
356
|
+
const meta = {
|
|
357
|
+
title: "Documentation",
|
|
358
|
+
pages: pageNames
|
|
359
|
+
};
|
|
360
|
+
return {
|
|
361
|
+
filename: "meta.json",
|
|
362
|
+
content: JSON.stringify(meta, null, 2) + "\n"
|
|
363
|
+
};
|
|
364
|
+
},
|
|
365
|
+
generateFrontmatter(page) {
|
|
366
|
+
const fm = {
|
|
367
|
+
title: page.title ?? "Untitled"
|
|
368
|
+
};
|
|
369
|
+
if (page.description) {
|
|
370
|
+
fm.description = page.description;
|
|
371
|
+
}
|
|
372
|
+
return fm;
|
|
373
|
+
},
|
|
374
|
+
getImports() {
|
|
375
|
+
return ["import { Callout } from 'fumadocs-ui/components/callout'"];
|
|
376
|
+
}
|
|
377
|
+
};
|
|
378
|
+
|
|
379
|
+
// src/adapters/index.ts
|
|
380
|
+
var adapters = {
|
|
381
|
+
fumadocs: fumadocsAdapter
|
|
382
|
+
};
|
|
383
|
+
function getAdapter(target) {
|
|
384
|
+
const adapter = adapters[target];
|
|
385
|
+
if (!adapter) {
|
|
386
|
+
throw new Error(
|
|
387
|
+
`Unknown target "${target}". Available targets: ${Object.keys(adapters).join(", ")}`
|
|
388
|
+
);
|
|
389
|
+
}
|
|
390
|
+
return adapter;
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
// src/core/pipeline.ts
|
|
394
|
+
async function buildPipeline(config, cwd) {
|
|
395
|
+
const adapter = getAdapter(config.target);
|
|
396
|
+
const pages = await resolveSourceFiles(config.sources, cwd);
|
|
397
|
+
const outDir = path4.resolve(cwd, config.outDir);
|
|
398
|
+
if (config.clean) {
|
|
399
|
+
await fs2.rm(outDir, { recursive: true, force: true });
|
|
400
|
+
}
|
|
401
|
+
await fs2.mkdir(outDir, { recursive: true });
|
|
402
|
+
const result = { pages: [], errors: [] };
|
|
403
|
+
const slugMap = buildSlugMap(pages);
|
|
404
|
+
for (const page of pages) {
|
|
405
|
+
try {
|
|
406
|
+
const source = await fs2.readFile(page.filePath, "utf-8");
|
|
407
|
+
const mdx = await transformMarkdown(source, {
|
|
408
|
+
page,
|
|
409
|
+
slugMap,
|
|
410
|
+
adapter,
|
|
411
|
+
config
|
|
412
|
+
});
|
|
413
|
+
const outputPath = path4.join(outDir, `${page.slug}.mdx`);
|
|
414
|
+
await fs2.mkdir(path4.dirname(outputPath), { recursive: true });
|
|
415
|
+
await fs2.writeFile(outputPath, mdx, "utf-8");
|
|
416
|
+
result.pages.push({ slug: page.slug, outputPath });
|
|
417
|
+
} catch (err) {
|
|
418
|
+
result.errors.push({
|
|
419
|
+
file: page.relativePath,
|
|
420
|
+
error: err instanceof Error ? err.message : String(err)
|
|
421
|
+
});
|
|
422
|
+
}
|
|
423
|
+
}
|
|
424
|
+
const navConfig = adapter.generateNavConfig(pages);
|
|
425
|
+
if (navConfig) {
|
|
426
|
+
const navPath = path4.join(outDir, navConfig.filename);
|
|
427
|
+
await fs2.writeFile(navPath, navConfig.content, "utf-8");
|
|
428
|
+
}
|
|
429
|
+
return result;
|
|
430
|
+
}
|
|
431
|
+
function buildSlugMap(pages) {
|
|
432
|
+
const map = /* @__PURE__ */ new Map();
|
|
433
|
+
for (const page of pages) {
|
|
434
|
+
map.set(page.relativePath, page.slug);
|
|
435
|
+
map.set(`./${page.relativePath}`, page.slug);
|
|
436
|
+
}
|
|
437
|
+
return map;
|
|
438
|
+
}
|
|
439
|
+
|
|
440
|
+
export {
|
|
441
|
+
configSchema,
|
|
442
|
+
loadDocSyncConfig,
|
|
443
|
+
resolveSourceFiles,
|
|
444
|
+
buildPipeline
|
|
445
|
+
};
|
|
446
|
+
//# sourceMappingURL=chunk-72B2YGTF.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/config/schema.ts","../src/config/loader.ts","../src/core/source-resolver.ts","../src/core/pipeline.ts","../src/transform/index.ts","../src/transform/gfm-alerts.ts","../src/transform/escape-mdx.ts","../src/transform/rewrite-links.ts","../src/transform/rewrite-images.ts","../src/transform/frontmatter.ts","../src/adapters/fumadocs.ts","../src/adapters/index.ts"],"sourcesContent":["import { z } from 'zod'\n\nconst sourceEntrySchema = z.object({\n /** File path or glob pattern (repo-relative) */\n path: z.string(),\n /** Override slug (for explicit entries only) */\n slug: z.string().optional(),\n /** Override page title */\n title: z.string().optional(),\n /** Override page description */\n description: z.string().optional(),\n /** Sidebar position (lower = higher) */\n order: z.number().optional(),\n})\n\nexport const configSchema = z.object({\n /** Source files to include */\n sources: z.array(sourceEntrySchema).min(1),\n /** Target docs framework */\n target: z.enum(['fumadocs']),\n /** Output directory for generated files */\n outDir: z.string().default('.docsync'),\n /** GitHub repository info for external link rewriting */\n github: z\n .object({\n repo: z.string(),\n branch: z.string().default('main'),\n })\n .optional(),\n /** Base URL path for docs */\n baseUrl: z.string().default('/docs'),\n /** Clean output directory before build */\n clean: z.boolean().default(true),\n})\n\nexport type DocSyncConfig = z.infer<typeof configSchema>\nexport type SourceEntry = z.infer<typeof sourceEntrySchema>\n","import { loadConfig } from 'c12'\nimport { configSchema, type DocSyncConfig } from './schema.js'\n\nexport async function loadDocSyncConfig(\n cwd: string = process.cwd(),\n): Promise<DocSyncConfig> {\n const { config } = await loadConfig<DocSyncConfig>({\n name: 'docsync',\n cwd,\n })\n\n const result = configSchema.safeParse(config)\n\n if (!result.success) {\n const errors = result.error.issues\n .map((i) => ` - ${i.path.join('.')}: ${i.message}`)\n .join('\\n')\n throw new Error(`Invalid docsync config:\\n${errors}`)\n }\n\n return result.data\n}\n","import path from 'node:path'\nimport fs from 'node:fs/promises'\nimport fg from 'fast-glob'\nimport type { SourceEntry } from '../config/schema.js'\n\nexport interface ResolvedPage {\n /** Absolute file path */\n filePath: string\n /** Repo-relative file path */\n relativePath: string\n /** URL slug (no leading slash, no extension) */\n slug: string\n /** Page title (from config or extracted later from content) */\n title?: string\n /** Page description (from config or extracted later from content) */\n description?: string\n /** Sidebar order */\n order: number\n}\n\nexport async function resolveSourceFiles(\n sources: SourceEntry[],\n cwd: string,\n): Promise<ResolvedPage[]> {\n const pages: ResolvedPage[] = []\n let autoOrder = 0\n\n for (const source of sources) {\n const matches = await fg(source.path, {\n cwd,\n onlyFiles: true,\n absolute: false,\n })\n\n if (matches.length === 0) {\n console.warn(`[docsync] No files matched: ${source.path}`)\n continue\n }\n\n // Sort for deterministic ordering\n matches.sort()\n\n for (const match of matches) {\n const filePath = path.resolve(cwd, match)\n\n // Verify file exists and is readable\n await fs.access(filePath, fs.constants.R_OK)\n\n const slug = source.slug ?? deriveSlug(match)\n const order = source.order ?? autoOrder++\n\n pages.push({\n filePath,\n relativePath: match,\n slug,\n title: source.title,\n description: source.description,\n order,\n })\n }\n }\n\n // Deduplicate by slug (first entry wins)\n const seen = new Set<string>()\n const deduped: ResolvedPage[] = []\n for (const page of pages) {\n if (!seen.has(page.slug)) {\n seen.add(page.slug)\n deduped.push(page)\n } else {\n console.warn(\n `[docsync] Duplicate slug \"${page.slug}\" from ${page.relativePath} — skipped`,\n )\n }\n }\n\n // Sort by order\n deduped.sort((a, b) => a.order - b.order)\n\n return deduped\n}\n\n/**\n * Derive a URL slug from a file path.\n *\n * Examples:\n * README.md → index\n * docs/getting-started.md → getting-started\n * docs/guides/setup.md → guides/setup\n */\nfunction deriveSlug(filePath: string): string {\n const parsed = path.parse(filePath)\n const name = parsed.name.toLowerCase()\n\n // README → index\n if (name === 'readme') {\n // If it's in a subdirectory, use the directory path\n if (parsed.dir && parsed.dir !== '.') {\n return stripDocsPrefix(parsed.dir) + '/index'\n }\n return 'index'\n }\n\n const dir = parsed.dir && parsed.dir !== '.' ? stripDocsPrefix(parsed.dir) : ''\n const slug = dir ? `${dir}/${name}` : name\n\n return slug\n}\n\n/**\n * Strip common docs directory prefixes.\n * docs/guides/foo → guides/foo\n */\nfunction stripDocsPrefix(dir: string): string {\n return dir.replace(/^docs\\/?/, '')\n}\n","import fs from 'node:fs/promises'\nimport path from 'node:path'\nimport type { DocSyncConfig } from '../config/schema.js'\nimport { resolveSourceFiles, type ResolvedPage } from './source-resolver.js'\nimport { transformMarkdown } from '../transform/index.js'\nimport { getAdapter } from '../adapters/index.js'\n\nexport interface BuildResult {\n pages: { slug: string; outputPath: string }[]\n errors: { file: string; error: string }[]\n}\n\nexport async function buildPipeline(\n config: DocSyncConfig,\n cwd: string,\n): Promise<BuildResult> {\n const adapter = getAdapter(config.target)\n const pages = await resolveSourceFiles(config.sources, cwd)\n const outDir = path.resolve(cwd, config.outDir)\n\n // Clean output directory if configured\n if (config.clean) {\n await fs.rm(outDir, { recursive: true, force: true })\n }\n await fs.mkdir(outDir, { recursive: true })\n\n const result: BuildResult = { pages: [], errors: [] }\n\n // Build a slug map for link rewriting\n const slugMap = buildSlugMap(pages)\n\n // Transform each page\n for (const page of pages) {\n try {\n const source = await fs.readFile(page.filePath, 'utf-8')\n\n const mdx = await transformMarkdown(source, {\n page,\n slugMap,\n adapter,\n config,\n })\n\n const outputPath = path.join(outDir, `${page.slug}.mdx`)\n await fs.mkdir(path.dirname(outputPath), { recursive: true })\n await fs.writeFile(outputPath, mdx, 'utf-8')\n\n result.pages.push({ slug: page.slug, outputPath })\n } catch (err) {\n result.errors.push({\n file: page.relativePath,\n error: err instanceof Error ? err.message : String(err),\n })\n }\n }\n\n // Generate navigation config\n const navConfig = adapter.generateNavConfig(pages)\n if (navConfig) {\n const navPath = path.join(outDir, navConfig.filename)\n await fs.writeFile(navPath, navConfig.content, 'utf-8')\n }\n\n return result\n}\n\n/**\n * Build a map from relative file paths to slugs for link rewriting.\n * Key: repo-relative path (e.g., \"docs/guide.md\")\n * Value: slug (e.g., \"guide\")\n */\nfunction buildSlugMap(pages: ResolvedPage[]): Map<string, string> {\n const map = new Map<string, string>()\n for (const page of pages) {\n map.set(page.relativePath, page.slug)\n // Also map with ./ prefix\n map.set(`./${page.relativePath}`, page.slug)\n }\n return map\n}\n","import { unified } from 'unified'\nimport remarkParse from 'remark-parse'\nimport remarkGfm from 'remark-gfm'\nimport remarkFrontmatter from 'remark-frontmatter'\nimport remarkStringify from 'remark-stringify'\nimport { remarkGfmAlerts } from './gfm-alerts.js'\nimport { remarkEscapeMdx } from './escape-mdx.js'\nimport { remarkRewriteLinks } from './rewrite-links.js'\nimport { remarkRewriteImages } from './rewrite-images.js'\nimport { extractFrontmatter } from './frontmatter.js'\nimport type { ResolvedPage } from '../core/source-resolver.js'\nimport type { DocSyncConfig } from '../config/schema.js'\nimport type { TargetAdapter } from '../adapters/types.js'\n\nexport interface TransformContext {\n page: ResolvedPage\n slugMap: Map<string, string>\n adapter: TargetAdapter\n config: DocSyncConfig\n}\n\nexport async function transformMarkdown(\n source: string,\n ctx: TransformContext,\n): Promise<string> {\n // Extract title and description from content if not set in config\n const { title, description, contentWithoutH1 } = extractFrontmatter(source, ctx.page)\n\n // Generate frontmatter via adapter\n const frontmatter = ctx.adapter.generateFrontmatter({\n ...ctx.page,\n title,\n description,\n })\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const processor = (unified() as any)\n .use(remarkParse)\n .use(remarkGfm)\n .use(remarkFrontmatter)\n .use(remarkGfmAlerts, { adapter: ctx.adapter })\n .use(remarkEscapeMdx)\n .use(remarkRewriteLinks, {\n slugMap: ctx.slugMap,\n baseUrl: ctx.config.baseUrl,\n page: ctx.page,\n })\n .use(remarkRewriteImages, {\n github: ctx.config.github,\n page: ctx.page,\n })\n .use(remarkStringify, {\n bullet: '-',\n emphasis: '*',\n strong: '**',\n rule: '-',\n })\n\n const file = await processor.process(contentWithoutH1)\n const body = String(file)\n\n // Build final MDX output\n const lines: string[] = []\n\n // Frontmatter block\n lines.push('---')\n for (const [key, value] of Object.entries(frontmatter)) {\n if (value !== undefined && value !== null) {\n lines.push(`${key}: ${formatYamlValue(value)}`)\n }\n }\n lines.push('---')\n lines.push('')\n\n // Imports (from adapter)\n const imports = ctx.adapter.getImports?.()\n if (imports && imports.length > 0) {\n lines.push(...imports)\n lines.push('')\n }\n\n // Body\n lines.push(body.trim())\n lines.push('')\n\n return lines.join('\\n')\n}\n\nfunction formatYamlValue(value: unknown): string {\n if (typeof value === 'string') {\n // Quote strings that contain special YAML characters\n if (/[:#{}[\\],&*?|>!%@`]/.test(value) || value.includes('\\n')) {\n return `\"${value.replace(/\"/g, '\\\\\"')}\"`\n }\n return `\"${value}\"`\n }\n if (typeof value === 'number' || typeof value === 'boolean') {\n return String(value)\n }\n return JSON.stringify(value)\n}\n","import { visit } from 'unist-util-visit'\nimport type { Plugin } from 'unified'\nimport type { Blockquote, Root } from 'mdast'\nimport type { TargetAdapter } from '../adapters/types.js'\n\nexport type AlertType = 'note' | 'tip' | 'important' | 'warning' | 'caution'\n\ninterface GfmAlertsOptions {\n adapter: TargetAdapter\n}\n\nconst ALERT_PATTERN = /^\\[!(NOTE|TIP|IMPORTANT|WARNING|CAUTION)\\]\\s*/i\n\n/**\n * Remark plugin that transforms GitHub-style alerts in blockquotes:\n * > [!NOTE]\n * > Content here\n *\n * Into framework-specific callout components via the adapter.\n */\nexport const remarkGfmAlerts: Plugin<[GfmAlertsOptions], Root> = (options) => {\n return (tree) => {\n visit(tree, 'blockquote', (node: Blockquote, index, parent) => {\n if (!parent || index === undefined) return\n\n // Check first child for alert marker\n const firstChild = node.children[0]\n if (!firstChild || firstChild.type !== 'paragraph') return\n\n const firstInline = firstChild.children[0]\n if (!firstInline || firstInline.type !== 'text') return\n\n const match = firstInline.value.match(ALERT_PATTERN)\n if (!match) return\n\n const alertType = match[1].toLowerCase() as AlertType\n\n // Remove the alert marker from the text\n firstInline.value = firstInline.value.replace(ALERT_PATTERN, '')\n\n // If the first text node is now empty, remove it\n if (firstInline.value.trim() === '') {\n firstChild.children.shift()\n }\n\n // If the first paragraph is now empty, remove it\n if (firstChild.children.length === 0) {\n node.children.shift()\n }\n\n // Let the adapter transform the alert\n const replacement = options.adapter.transformAlert(alertType, node)\n if (replacement) {\n parent.children[index] = replacement\n }\n })\n }\n}\n","import { visit } from 'unist-util-visit'\nimport type { Plugin } from 'unified'\nimport type { Root, Text, Html } from 'mdast'\n\n/**\n * Remark plugin that escapes MDX-breaking syntax in text nodes:\n * - { } → \\{ \\}\n * - < > in text (not HTML tags) → \\< \\>\n * - <!-- --> HTML comments → {/* * /} JSX comments\n */\nexport const remarkEscapeMdx: Plugin<[], Root> = () => {\n return (tree) => {\n // Escape curly braces and angle brackets in text nodes\n visit(tree, 'text', (node: Text) => {\n // Escape curly braces\n node.value = node.value.replace(/([{}])/g, '\\\\$1')\n\n // Escape standalone angle brackets (not part of HTML tags)\n // Only escape < that isn't followed by a valid tag name or /\n node.value = node.value.replace(/<(?![a-zA-Z/!])/g, '\\\\<')\n node.value = node.value.replace(/(?<![a-zA-Z\"'/])>/g, '\\\\>')\n })\n\n // Convert HTML comments to JSX comments\n visit(tree, 'html', (node: Html, index, parent) => {\n if (!parent || index === undefined) return\n\n const commentMatch = node.value.match(/^<!--\\s*([\\s\\S]*?)\\s*-->$/)\n if (commentMatch) {\n node.value = `{/* ${commentMatch[1]} */}`\n }\n\n // Self-close void elements\n node.value = node.value.replace(\n /<(br|hr|img|input|meta|link)(\\s[^>]*)?\\s*>/gi,\n '<$1$2 />',\n )\n })\n }\n}\n","import path from 'node:path'\nimport { visit } from 'unist-util-visit'\nimport type { Plugin } from 'unified'\nimport type { Root, Link } from 'mdast'\nimport type { ResolvedPage } from '../core/source-resolver.js'\n\ninterface RewriteLinksOptions {\n slugMap: Map<string, string>\n baseUrl: string\n page: ResolvedPage\n}\n\n/**\n * Remark plugin that rewrites relative markdown links to docs-site URLs.\n *\n * Examples:\n * ./docs/guide.md → /docs/guide (if in source map)\n * ../README.md → /docs/index (if in source map)\n * ./src/index.ts → https://github.com/user/repo/blob/main/src/index.ts (if not in source map)\n * #section → #section (pass-through)\n * https://example.com → https://example.com (pass-through)\n */\nexport const remarkRewriteLinks: Plugin<[RewriteLinksOptions], Root> = (options) => {\n return (tree) => {\n visit(tree, 'link', (node: Link) => {\n const url = node.url\n\n // Skip external links and anchors\n if (url.startsWith('http://') || url.startsWith('https://') || url.startsWith('#')) {\n return\n }\n\n // Split URL and anchor\n const [urlPath, anchor] = url.split('#')\n\n // Resolve the relative path against the current file's directory\n const currentDir = path.dirname(options.page.relativePath)\n const resolvedPath = path.normalize(path.join(currentDir, urlPath))\n\n // Check if this file is in our source map\n const slug = options.slugMap.get(resolvedPath) ?? options.slugMap.get(`./${resolvedPath}`)\n\n if (slug !== undefined) {\n // Rewrite to docs-site URL\n const base = options.baseUrl.replace(/\\/$/, '')\n node.url = slug === 'index' ? base : `${base}/${slug}`\n if (anchor) {\n node.url += `#${anchor}`\n }\n }\n // If not in source map, leave as-is (or could rewrite to GitHub URL)\n })\n }\n}\n","import path from 'node:path'\nimport { visit } from 'unist-util-visit'\nimport type { Plugin } from 'unified'\nimport type { Root, Image } from 'mdast'\nimport type { ResolvedPage } from '../core/source-resolver.js'\n\ninterface RewriteImagesOptions {\n github?: { repo: string; branch: string }\n page: ResolvedPage\n}\n\n/**\n * Remark plugin that rewrites relative image paths to GitHub raw URLs.\n *\n * Examples:\n * ./assets/logo.png → https://raw.githubusercontent.com/user/repo/main/assets/logo.png\n * https://example.com/img.png → https://example.com/img.png (pass-through)\n */\nexport const remarkRewriteImages: Plugin<[RewriteImagesOptions], Root> = (options) => {\n return (tree) => {\n visit(tree, 'image', (node: Image) => {\n const url = node.url\n\n // Skip absolute URLs\n if (url.startsWith('http://') || url.startsWith('https://') || url.startsWith('data:')) {\n return\n }\n\n // If no GitHub config, leave relative paths as-is\n if (!options.github) return\n\n // Resolve relative path against current file's directory\n const currentDir = path.dirname(options.page.relativePath)\n const resolvedPath = path.normalize(path.join(currentDir, url))\n\n const { repo, branch } = options.github\n node.url = `https://raw.githubusercontent.com/${repo}/${branch}/${resolvedPath}`\n })\n }\n}\n","import type { ResolvedPage } from '../core/source-resolver.js'\n\ninterface ExtractedFrontmatter {\n title: string\n description?: string\n /** Content with the first H1 heading removed (since it becomes the title) */\n contentWithoutH1: string\n}\n\n/**\n * Extract title and description from markdown content.\n *\n * - Title: first # heading, or config override, or filename\n * - Description: first paragraph after the heading\n * - Strips the first H1 from content (it becomes frontmatter title)\n */\nexport function extractFrontmatter(\n source: string,\n page: ResolvedPage,\n): ExtractedFrontmatter {\n const lines = source.split('\\n')\n\n let title = page.title\n let description = page.description\n let h1LineIndex = -1\n\n // Find first H1\n for (let i = 0; i < lines.length; i++) {\n const line = lines[i].trim()\n\n // Skip empty lines and frontmatter\n if (line === '' || line === '---') continue\n // Stop if we hit frontmatter block\n if (i === 0 && line === '---') {\n const endIndex = lines.indexOf('---', 1)\n if (endIndex !== -1) {\n // Skip existing frontmatter — we'll generate our own\n // TODO: merge existing frontmatter in future version\n continue\n }\n }\n\n const h1Match = line.match(/^#\\s+(.+)$/)\n if (h1Match) {\n if (!title) {\n title = h1Match[1].trim()\n }\n h1LineIndex = i\n break\n }\n }\n\n // Fallback title from filename\n if (!title) {\n title = page.slug === 'index' ? 'Introduction' : slugToTitle(page.slug)\n }\n\n // Extract description from first paragraph after H1\n if (!description && h1LineIndex !== -1) {\n for (let i = h1LineIndex + 1; i < lines.length; i++) {\n const line = lines[i].trim()\n if (line === '') continue\n // Stop at headings, code blocks, lists, etc.\n if (line.startsWith('#') || line.startsWith('```') || line.startsWith('-') || line.startsWith('>')) {\n break\n }\n description = line\n break\n }\n }\n\n // Remove the first H1 line from content\n let contentWithoutH1 = source\n if (h1LineIndex !== -1) {\n const newLines = [...lines]\n newLines.splice(h1LineIndex, 1)\n // Also remove trailing empty line after H1 if present\n if (newLines[h1LineIndex]?.trim() === '') {\n newLines.splice(h1LineIndex, 1)\n }\n contentWithoutH1 = newLines.join('\\n')\n }\n\n return { title, description, contentWithoutH1 }\n}\n\nfunction slugToTitle(slug: string): string {\n const name = slug.split('/').pop() ?? slug\n return name\n .replace(/[-_]/g, ' ')\n .replace(/\\b\\w/g, (c) => c.toUpperCase())\n}\n","import type { Blockquote, RootContent, Html } from 'mdast'\nimport type { AlertType } from '../transform/gfm-alerts.js'\nimport type { ResolvedPage } from '../core/source-resolver.js'\nimport type { TargetAdapter, NavConfigOutput } from './types.js'\nimport { toString } from 'mdast-util-to-string'\n\n/**\n * Alert type mapping: GFM → Fumadocs Callout type\n *\n * Fumadocs Callout types: info, warn, error\n * GFM alert types: note, tip, important, warning, caution\n */\nconst ALERT_TYPE_MAP: Record<AlertType, string> = {\n note: 'info',\n tip: 'info',\n important: 'info',\n warning: 'warn',\n caution: 'error',\n}\n\nexport const fumadocsAdapter: TargetAdapter = {\n name: 'fumadocs',\n\n transformAlert(type: AlertType, node: Blockquote): RootContent {\n const calloutType = ALERT_TYPE_MAP[type]\n const content = toString(node)\n\n // Generate an HTML node with Callout JSX\n const htmlNode: Html = {\n type: 'html',\n value: `<Callout type=\"${calloutType}\">\\n${content}\\n</Callout>`,\n }\n\n return htmlNode\n },\n\n generateNavConfig(pages: ResolvedPage[]): NavConfigOutput {\n const pageNames = pages.map((p) => {\n // meta.json uses filenames without extension\n const parts = p.slug.split('/')\n return parts[parts.length - 1]\n })\n\n const meta = {\n title: 'Documentation',\n pages: pageNames,\n }\n\n return {\n filename: 'meta.json',\n content: JSON.stringify(meta, null, 2) + '\\n',\n }\n },\n\n generateFrontmatter(page: ResolvedPage): Record<string, unknown> {\n const fm: Record<string, unknown> = {\n title: page.title ?? 'Untitled',\n }\n\n if (page.description) {\n fm.description = page.description\n }\n\n return fm\n },\n\n getImports(): string[] {\n return [\"import { Callout } from 'fumadocs-ui/components/callout'\"]\n },\n}\n","import type { TargetAdapter } from './types.js'\nimport { fumadocsAdapter } from './fumadocs.js'\n\nconst adapters: Record<string, TargetAdapter> = {\n fumadocs: fumadocsAdapter,\n}\n\nexport function getAdapter(target: string): TargetAdapter {\n const adapter = adapters[target]\n if (!adapter) {\n throw new Error(\n `Unknown target \"${target}\". Available targets: ${Object.keys(adapters).join(', ')}`,\n )\n }\n return adapter\n}\n\nexport type { TargetAdapter, NavConfigOutput } from './types.js'\n"],"mappings":";AAAA,SAAS,SAAS;AAElB,IAAM,oBAAoB,EAAE,OAAO;AAAA;AAAA,EAEjC,MAAM,EAAE,OAAO;AAAA;AAAA,EAEf,MAAM,EAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EAE1B,OAAO,EAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EAE3B,aAAa,EAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EAEjC,OAAO,EAAE,OAAO,EAAE,SAAS;AAC7B,CAAC;AAEM,IAAM,eAAe,EAAE,OAAO;AAAA;AAAA,EAEnC,SAAS,EAAE,MAAM,iBAAiB,EAAE,IAAI,CAAC;AAAA;AAAA,EAEzC,QAAQ,EAAE,KAAK,CAAC,UAAU,CAAC;AAAA;AAAA,EAE3B,QAAQ,EAAE,OAAO,EAAE,QAAQ,UAAU;AAAA;AAAA,EAErC,QAAQ,EACL,OAAO;AAAA,IACN,MAAM,EAAE,OAAO;AAAA,IACf,QAAQ,EAAE,OAAO,EAAE,QAAQ,MAAM;AAAA,EACnC,CAAC,EACA,SAAS;AAAA;AAAA,EAEZ,SAAS,EAAE,OAAO,EAAE,QAAQ,OAAO;AAAA;AAAA,EAEnC,OAAO,EAAE,QAAQ,EAAE,QAAQ,IAAI;AACjC,CAAC;;;ACjCD,SAAS,kBAAkB;AAG3B,eAAsB,kBACpB,MAAc,QAAQ,IAAI,GACF;AACxB,QAAM,EAAE,OAAO,IAAI,MAAM,WAA0B;AAAA,IACjD,MAAM;AAAA,IACN;AAAA,EACF,CAAC;AAED,QAAM,SAAS,aAAa,UAAU,MAAM;AAE5C,MAAI,CAAC,OAAO,SAAS;AACnB,UAAM,SAAS,OAAO,MAAM,OACzB,IAAI,CAAC,MAAM,OAAO,EAAE,KAAK,KAAK,GAAG,CAAC,KAAK,EAAE,OAAO,EAAE,EAClD,KAAK,IAAI;AACZ,UAAM,IAAI,MAAM;AAAA,EAA4B,MAAM,EAAE;AAAA,EACtD;AAEA,SAAO,OAAO;AAChB;;;ACrBA,OAAO,UAAU;AACjB,OAAO,QAAQ;AACf,OAAO,QAAQ;AAkBf,eAAsB,mBACpB,SACA,KACyB;AACzB,QAAM,QAAwB,CAAC;AAC/B,MAAI,YAAY;AAEhB,aAAW,UAAU,SAAS;AAC5B,UAAM,UAAU,MAAM,GAAG,OAAO,MAAM;AAAA,MACpC;AAAA,MACA,WAAW;AAAA,MACX,UAAU;AAAA,IACZ,CAAC;AAED,QAAI,QAAQ,WAAW,GAAG;AACxB,cAAQ,KAAK,+BAA+B,OAAO,IAAI,EAAE;AACzD;AAAA,IACF;AAGA,YAAQ,KAAK;AAEb,eAAW,SAAS,SAAS;AAC3B,YAAM,WAAW,KAAK,QAAQ,KAAK,KAAK;AAGxC,YAAM,GAAG,OAAO,UAAU,GAAG,UAAU,IAAI;AAE3C,YAAM,OAAO,OAAO,QAAQ,WAAW,KAAK;AAC5C,YAAM,QAAQ,OAAO,SAAS;AAE9B,YAAM,KAAK;AAAA,QACT;AAAA,QACA,cAAc;AAAA,QACd;AAAA,QACA,OAAO,OAAO;AAAA,QACd,aAAa,OAAO;AAAA,QACpB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAGA,QAAM,OAAO,oBAAI,IAAY;AAC7B,QAAM,UAA0B,CAAC;AACjC,aAAW,QAAQ,OAAO;AACxB,QAAI,CAAC,KAAK,IAAI,KAAK,IAAI,GAAG;AACxB,WAAK,IAAI,KAAK,IAAI;AAClB,cAAQ,KAAK,IAAI;AAAA,IACnB,OAAO;AACL,cAAQ;AAAA,QACN,6BAA6B,KAAK,IAAI,UAAU,KAAK,YAAY;AAAA,MACnE;AAAA,IACF;AAAA,EACF;AAGA,UAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAExC,SAAO;AACT;AAUA,SAAS,WAAW,UAA0B;AAC5C,QAAM,SAAS,KAAK,MAAM,QAAQ;AAClC,QAAM,OAAO,OAAO,KAAK,YAAY;AAGrC,MAAI,SAAS,UAAU;AAErB,QAAI,OAAO,OAAO,OAAO,QAAQ,KAAK;AACpC,aAAO,gBAAgB,OAAO,GAAG,IAAI;AAAA,IACvC;AACA,WAAO;AAAA,EACT;AAEA,QAAM,MAAM,OAAO,OAAO,OAAO,QAAQ,MAAM,gBAAgB,OAAO,GAAG,IAAI;AAC7E,QAAM,OAAO,MAAM,GAAG,GAAG,IAAI,IAAI,KAAK;AAEtC,SAAO;AACT;AAMA,SAAS,gBAAgB,KAAqB;AAC5C,SAAO,IAAI,QAAQ,YAAY,EAAE;AACnC;;;ACnHA,OAAOA,SAAQ;AACf,OAAOC,WAAU;;;ACDjB,SAAS,eAAe;AACxB,OAAO,iBAAiB;AACxB,OAAO,eAAe;AACtB,OAAO,uBAAuB;AAC9B,OAAO,qBAAqB;;;ACJ5B,SAAS,aAAa;AAWtB,IAAM,gBAAgB;AASf,IAAM,kBAAoD,CAAC,YAAY;AAC5E,SAAO,CAAC,SAAS;AACf,UAAM,MAAM,cAAc,CAAC,MAAkB,OAAO,WAAW;AAC7D,UAAI,CAAC,UAAU,UAAU,OAAW;AAGpC,YAAM,aAAa,KAAK,SAAS,CAAC;AAClC,UAAI,CAAC,cAAc,WAAW,SAAS,YAAa;AAEpD,YAAM,cAAc,WAAW,SAAS,CAAC;AACzC,UAAI,CAAC,eAAe,YAAY,SAAS,OAAQ;AAEjD,YAAM,QAAQ,YAAY,MAAM,MAAM,aAAa;AACnD,UAAI,CAAC,MAAO;AAEZ,YAAM,YAAY,MAAM,CAAC,EAAE,YAAY;AAGvC,kBAAY,QAAQ,YAAY,MAAM,QAAQ,eAAe,EAAE;AAG/D,UAAI,YAAY,MAAM,KAAK,MAAM,IAAI;AACnC,mBAAW,SAAS,MAAM;AAAA,MAC5B;AAGA,UAAI,WAAW,SAAS,WAAW,GAAG;AACpC,aAAK,SAAS,MAAM;AAAA,MACtB;AAGA,YAAM,cAAc,QAAQ,QAAQ,eAAe,WAAW,IAAI;AAClE,UAAI,aAAa;AACf,eAAO,SAAS,KAAK,IAAI;AAAA,MAC3B;AAAA,IACF,CAAC;AAAA,EACH;AACF;;;ACzDA,SAAS,SAAAC,cAAa;AAUf,IAAM,kBAAoC,MAAM;AACrD,SAAO,CAAC,SAAS;AAEf,IAAAA,OAAM,MAAM,QAAQ,CAAC,SAAe;AAElC,WAAK,QAAQ,KAAK,MAAM,QAAQ,WAAW,MAAM;AAIjD,WAAK,QAAQ,KAAK,MAAM,QAAQ,oBAAoB,KAAK;AACzD,WAAK,QAAQ,KAAK,MAAM,QAAQ,sBAAsB,KAAK;AAAA,IAC7D,CAAC;AAGD,IAAAA,OAAM,MAAM,QAAQ,CAAC,MAAY,OAAO,WAAW;AACjD,UAAI,CAAC,UAAU,UAAU,OAAW;AAEpC,YAAM,eAAe,KAAK,MAAM,MAAM,2BAA2B;AACjE,UAAI,cAAc;AAChB,aAAK,QAAQ,OAAO,aAAa,CAAC,CAAC;AAAA,MACrC;AAGA,WAAK,QAAQ,KAAK,MAAM;AAAA,QACtB;AAAA,QACA;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AACF;;;ACvCA,OAAOC,WAAU;AACjB,SAAS,SAAAC,cAAa;AAqBf,IAAM,qBAA0D,CAAC,YAAY;AAClF,SAAO,CAAC,SAAS;AACf,IAAAA,OAAM,MAAM,QAAQ,CAAC,SAAe;AAClC,YAAM,MAAM,KAAK;AAGjB,UAAI,IAAI,WAAW,SAAS,KAAK,IAAI,WAAW,UAAU,KAAK,IAAI,WAAW,GAAG,GAAG;AAClF;AAAA,MACF;AAGA,YAAM,CAAC,SAAS,MAAM,IAAI,IAAI,MAAM,GAAG;AAGvC,YAAM,aAAaD,MAAK,QAAQ,QAAQ,KAAK,YAAY;AACzD,YAAM,eAAeA,MAAK,UAAUA,MAAK,KAAK,YAAY,OAAO,CAAC;AAGlE,YAAM,OAAO,QAAQ,QAAQ,IAAI,YAAY,KAAK,QAAQ,QAAQ,IAAI,KAAK,YAAY,EAAE;AAEzF,UAAI,SAAS,QAAW;AAEtB,cAAM,OAAO,QAAQ,QAAQ,QAAQ,OAAO,EAAE;AAC9C,aAAK,MAAM,SAAS,UAAU,OAAO,GAAG,IAAI,IAAI,IAAI;AACpD,YAAI,QAAQ;AACV,eAAK,OAAO,IAAI,MAAM;AAAA,QACxB;AAAA,MACF;AAAA,IAEF,CAAC;AAAA,EACH;AACF;;;ACrDA,OAAOE,WAAU;AACjB,SAAS,SAAAC,cAAa;AAiBf,IAAM,sBAA4D,CAAC,YAAY;AACpF,SAAO,CAAC,SAAS;AACf,IAAAA,OAAM,MAAM,SAAS,CAAC,SAAgB;AACpC,YAAM,MAAM,KAAK;AAGjB,UAAI,IAAI,WAAW,SAAS,KAAK,IAAI,WAAW,UAAU,KAAK,IAAI,WAAW,OAAO,GAAG;AACtF;AAAA,MACF;AAGA,UAAI,CAAC,QAAQ,OAAQ;AAGrB,YAAM,aAAaD,MAAK,QAAQ,QAAQ,KAAK,YAAY;AACzD,YAAM,eAAeA,MAAK,UAAUA,MAAK,KAAK,YAAY,GAAG,CAAC;AAE9D,YAAM,EAAE,MAAM,OAAO,IAAI,QAAQ;AACjC,WAAK,MAAM,qCAAqC,IAAI,IAAI,MAAM,IAAI,YAAY;AAAA,IAChF,CAAC;AAAA,EACH;AACF;;;ACvBO,SAAS,mBACd,QACA,MACsB;AACtB,QAAM,QAAQ,OAAO,MAAM,IAAI;AAE/B,MAAI,QAAQ,KAAK;AACjB,MAAI,cAAc,KAAK;AACvB,MAAI,cAAc;AAGlB,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,UAAM,OAAO,MAAM,CAAC,EAAE,KAAK;AAG3B,QAAI,SAAS,MAAM,SAAS,MAAO;AAEnC,QAAI,MAAM,KAAK,SAAS,OAAO;AAC7B,YAAM,WAAW,MAAM,QAAQ,OAAO,CAAC;AACvC,UAAI,aAAa,IAAI;AAGnB;AAAA,MACF;AAAA,IACF;AAEA,UAAM,UAAU,KAAK,MAAM,YAAY;AACvC,QAAI,SAAS;AACX,UAAI,CAAC,OAAO;AACV,gBAAQ,QAAQ,CAAC,EAAE,KAAK;AAAA,MAC1B;AACA,oBAAc;AACd;AAAA,IACF;AAAA,EACF;AAGA,MAAI,CAAC,OAAO;AACV,YAAQ,KAAK,SAAS,UAAU,iBAAiB,YAAY,KAAK,IAAI;AAAA,EACxE;AAGA,MAAI,CAAC,eAAe,gBAAgB,IAAI;AACtC,aAAS,IAAI,cAAc,GAAG,IAAI,MAAM,QAAQ,KAAK;AACnD,YAAM,OAAO,MAAM,CAAC,EAAE,KAAK;AAC3B,UAAI,SAAS,GAAI;AAEjB,UAAI,KAAK,WAAW,GAAG,KAAK,KAAK,WAAW,KAAK,KAAK,KAAK,WAAW,GAAG,KAAK,KAAK,WAAW,GAAG,GAAG;AAClG;AAAA,MACF;AACA,oBAAc;AACd;AAAA,IACF;AAAA,EACF;AAGA,MAAI,mBAAmB;AACvB,MAAI,gBAAgB,IAAI;AACtB,UAAM,WAAW,CAAC,GAAG,KAAK;AAC1B,aAAS,OAAO,aAAa,CAAC;AAE9B,QAAI,SAAS,WAAW,GAAG,KAAK,MAAM,IAAI;AACxC,eAAS,OAAO,aAAa,CAAC;AAAA,IAChC;AACA,uBAAmB,SAAS,KAAK,IAAI;AAAA,EACvC;AAEA,SAAO,EAAE,OAAO,aAAa,iBAAiB;AAChD;AAEA,SAAS,YAAY,MAAsB;AACzC,QAAM,OAAO,KAAK,MAAM,GAAG,EAAE,IAAI,KAAK;AACtC,SAAO,KACJ,QAAQ,SAAS,GAAG,EACpB,QAAQ,SAAS,CAAC,MAAM,EAAE,YAAY,CAAC;AAC5C;;;ALtEA,eAAsB,kBACpB,QACA,KACiB;AAEjB,QAAM,EAAE,OAAO,aAAa,iBAAiB,IAAI,mBAAmB,QAAQ,IAAI,IAAI;AAGpF,QAAM,cAAc,IAAI,QAAQ,oBAAoB;AAAA,IAClD,GAAG,IAAI;AAAA,IACP;AAAA,IACA;AAAA,EACF,CAAC;AAGD,QAAM,YAAa,QAAQ,EACxB,IAAI,WAAW,EACf,IAAI,SAAS,EACb,IAAI,iBAAiB,EACrB,IAAI,iBAAiB,EAAE,SAAS,IAAI,QAAQ,CAAC,EAC7C,IAAI,eAAe,EACnB,IAAI,oBAAoB;AAAA,IACvB,SAAS,IAAI;AAAA,IACb,SAAS,IAAI,OAAO;AAAA,IACpB,MAAM,IAAI;AAAA,EACZ,CAAC,EACA,IAAI,qBAAqB;AAAA,IACxB,QAAQ,IAAI,OAAO;AAAA,IACnB,MAAM,IAAI;AAAA,EACZ,CAAC,EACA,IAAI,iBAAiB;AAAA,IACpB,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,QAAQ;AAAA,IACR,MAAM;AAAA,EACR,CAAC;AAEH,QAAM,OAAO,MAAM,UAAU,QAAQ,gBAAgB;AACrD,QAAM,OAAO,OAAO,IAAI;AAGxB,QAAM,QAAkB,CAAC;AAGzB,QAAM,KAAK,KAAK;AAChB,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,WAAW,GAAG;AACtD,QAAI,UAAU,UAAa,UAAU,MAAM;AACzC,YAAM,KAAK,GAAG,GAAG,KAAK,gBAAgB,KAAK,CAAC,EAAE;AAAA,IAChD;AAAA,EACF;AACA,QAAM,KAAK,KAAK;AAChB,QAAM,KAAK,EAAE;AAGb,QAAM,UAAU,IAAI,QAAQ,aAAa;AACzC,MAAI,WAAW,QAAQ,SAAS,GAAG;AACjC,UAAM,KAAK,GAAG,OAAO;AACrB,UAAM,KAAK,EAAE;AAAA,EACf;AAGA,QAAM,KAAK,KAAK,KAAK,CAAC;AACtB,QAAM,KAAK,EAAE;AAEb,SAAO,MAAM,KAAK,IAAI;AACxB;AAEA,SAAS,gBAAgB,OAAwB;AAC/C,MAAI,OAAO,UAAU,UAAU;AAE7B,QAAI,sBAAsB,KAAK,KAAK,KAAK,MAAM,SAAS,IAAI,GAAG;AAC7D,aAAO,IAAI,MAAM,QAAQ,MAAM,KAAK,CAAC;AAAA,IACvC;AACA,WAAO,IAAI,KAAK;AAAA,EAClB;AACA,MAAI,OAAO,UAAU,YAAY,OAAO,UAAU,WAAW;AAC3D,WAAO,OAAO,KAAK;AAAA,EACrB;AACA,SAAO,KAAK,UAAU,KAAK;AAC7B;;;AMhGA,SAAS,gBAAgB;AAQzB,IAAM,iBAA4C;AAAA,EAChD,MAAM;AAAA,EACN,KAAK;AAAA,EACL,WAAW;AAAA,EACX,SAAS;AAAA,EACT,SAAS;AACX;AAEO,IAAM,kBAAiC;AAAA,EAC5C,MAAM;AAAA,EAEN,eAAe,MAAiB,MAA+B;AAC7D,UAAM,cAAc,eAAe,IAAI;AACvC,UAAM,UAAU,SAAS,IAAI;AAG7B,UAAM,WAAiB;AAAA,MACrB,MAAM;AAAA,MACN,OAAO,kBAAkB,WAAW;AAAA,EAAO,OAAO;AAAA;AAAA,IACpD;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,kBAAkB,OAAwC;AACxD,UAAM,YAAY,MAAM,IAAI,CAAC,MAAM;AAEjC,YAAM,QAAQ,EAAE,KAAK,MAAM,GAAG;AAC9B,aAAO,MAAM,MAAM,SAAS,CAAC;AAAA,IAC/B,CAAC;AAED,UAAM,OAAO;AAAA,MACX,OAAO;AAAA,MACP,OAAO;AAAA,IACT;AAEA,WAAO;AAAA,MACL,UAAU;AAAA,MACV,SAAS,KAAK,UAAU,MAAM,MAAM,CAAC,IAAI;AAAA,IAC3C;AAAA,EACF;AAAA,EAEA,oBAAoB,MAA6C;AAC/D,UAAM,KAA8B;AAAA,MAClC,OAAO,KAAK,SAAS;AAAA,IACvB;AAEA,QAAI,KAAK,aAAa;AACpB,SAAG,cAAc,KAAK;AAAA,IACxB;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,aAAuB;AACrB,WAAO,CAAC,0DAA0D;AAAA,EACpE;AACF;;;AClEA,IAAM,WAA0C;AAAA,EAC9C,UAAU;AACZ;AAEO,SAAS,WAAW,QAA+B;AACxD,QAAM,UAAU,SAAS,MAAM;AAC/B,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI;AAAA,MACR,mBAAmB,MAAM,yBAAyB,OAAO,KAAK,QAAQ,EAAE,KAAK,IAAI,CAAC;AAAA,IACpF;AAAA,EACF;AACA,SAAO;AACT;;;ARHA,eAAsB,cACpB,QACA,KACsB;AACtB,QAAM,UAAU,WAAW,OAAO,MAAM;AACxC,QAAM,QAAQ,MAAM,mBAAmB,OAAO,SAAS,GAAG;AAC1D,QAAM,SAASE,MAAK,QAAQ,KAAK,OAAO,MAAM;AAG9C,MAAI,OAAO,OAAO;AAChB,UAAMC,IAAG,GAAG,QAAQ,EAAE,WAAW,MAAM,OAAO,KAAK,CAAC;AAAA,EACtD;AACA,QAAMA,IAAG,MAAM,QAAQ,EAAE,WAAW,KAAK,CAAC;AAE1C,QAAM,SAAsB,EAAE,OAAO,CAAC,GAAG,QAAQ,CAAC,EAAE;AAGpD,QAAM,UAAU,aAAa,KAAK;AAGlC,aAAW,QAAQ,OAAO;AACxB,QAAI;AACF,YAAM,SAAS,MAAMA,IAAG,SAAS,KAAK,UAAU,OAAO;AAEvD,YAAM,MAAM,MAAM,kBAAkB,QAAQ;AAAA,QAC1C;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAED,YAAM,aAAaD,MAAK,KAAK,QAAQ,GAAG,KAAK,IAAI,MAAM;AACvD,YAAMC,IAAG,MAAMD,MAAK,QAAQ,UAAU,GAAG,EAAE,WAAW,KAAK,CAAC;AAC5D,YAAMC,IAAG,UAAU,YAAY,KAAK,OAAO;AAE3C,aAAO,MAAM,KAAK,EAAE,MAAM,KAAK,MAAM,WAAW,CAAC;AAAA,IACnD,SAAS,KAAK;AACZ,aAAO,OAAO,KAAK;AAAA,QACjB,MAAM,KAAK;AAAA,QACX,OAAO,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAAA,MACxD,CAAC;AAAA,IACH;AAAA,EACF;AAGA,QAAM,YAAY,QAAQ,kBAAkB,KAAK;AACjD,MAAI,WAAW;AACb,UAAM,UAAUD,MAAK,KAAK,QAAQ,UAAU,QAAQ;AACpD,UAAMC,IAAG,UAAU,SAAS,UAAU,SAAS,OAAO;AAAA,EACxD;AAEA,SAAO;AACT;AAOA,SAAS,aAAa,OAA4C;AAChE,QAAM,MAAM,oBAAI,IAAoB;AACpC,aAAW,QAAQ,OAAO;AACxB,QAAI,IAAI,KAAK,cAAc,KAAK,IAAI;AAEpC,QAAI,IAAI,KAAK,KAAK,YAAY,IAAI,KAAK,IAAI;AAAA,EAC7C;AACA,SAAO;AACT;","names":["fs","path","visit","path","visit","path","visit","path","fs"]}
|
package/dist/cli.js
ADDED
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import {
|
|
3
|
+
buildPipeline,
|
|
4
|
+
loadDocSyncConfig
|
|
5
|
+
} from "./chunk-72B2YGTF.js";
|
|
6
|
+
|
|
7
|
+
// src/cli/index.ts
|
|
8
|
+
import { defineCommand as defineCommand3, runMain } from "citty";
|
|
9
|
+
|
|
10
|
+
// src/cli/commands/build.ts
|
|
11
|
+
import { defineCommand } from "citty";
|
|
12
|
+
var buildCommand = defineCommand({
|
|
13
|
+
meta: {
|
|
14
|
+
name: "build",
|
|
15
|
+
description: "Transform markdown sources into docs-framework output."
|
|
16
|
+
},
|
|
17
|
+
args: {
|
|
18
|
+
cwd: {
|
|
19
|
+
type: "string",
|
|
20
|
+
description: "Working directory",
|
|
21
|
+
default: "."
|
|
22
|
+
}
|
|
23
|
+
},
|
|
24
|
+
async run({ args }) {
|
|
25
|
+
const cwd = args.cwd === "." ? process.cwd() : args.cwd;
|
|
26
|
+
console.log("[docsync] Loading config...");
|
|
27
|
+
let config;
|
|
28
|
+
try {
|
|
29
|
+
config = await loadDocSyncConfig(cwd);
|
|
30
|
+
} catch (err) {
|
|
31
|
+
console.error(
|
|
32
|
+
`[docsync] ${err instanceof Error ? err.message : String(err)}`
|
|
33
|
+
);
|
|
34
|
+
process.exit(1);
|
|
35
|
+
}
|
|
36
|
+
console.log(`[docsync] Target: ${config.target}`);
|
|
37
|
+
console.log(`[docsync] Output: ${config.outDir}`);
|
|
38
|
+
const result = await buildPipeline(config, cwd);
|
|
39
|
+
for (const page of result.pages) {
|
|
40
|
+
console.log(` \u2713 ${page.slug}.mdx`);
|
|
41
|
+
}
|
|
42
|
+
for (const error of result.errors) {
|
|
43
|
+
console.error(` \u2717 ${error.file}: ${error.error}`);
|
|
44
|
+
}
|
|
45
|
+
console.log(
|
|
46
|
+
`
|
|
47
|
+
[docsync] Done! ${result.pages.length} files written to ${config.outDir}/`
|
|
48
|
+
);
|
|
49
|
+
if (result.errors.length > 0) {
|
|
50
|
+
console.error(`[docsync] ${result.errors.length} error(s) occurred.`);
|
|
51
|
+
process.exit(1);
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
});
|
|
55
|
+
|
|
56
|
+
// src/cli/commands/init.ts
|
|
57
|
+
import fs from "fs/promises";
|
|
58
|
+
import path from "path";
|
|
59
|
+
import { defineCommand as defineCommand2 } from "citty";
|
|
60
|
+
var CONFIG_TEMPLATE = `import { defineConfig } from '@tansuasici/docsync'
|
|
61
|
+
|
|
62
|
+
export default defineConfig({
|
|
63
|
+
sources: [
|
|
64
|
+
{ path: 'README.md', slug: 'index', title: 'Introduction' },
|
|
65
|
+
{ path: 'docs/**/*.md' },
|
|
66
|
+
],
|
|
67
|
+
target: 'fumadocs',
|
|
68
|
+
outDir: '.docsync',
|
|
69
|
+
// github: { repo: 'user/repo' },
|
|
70
|
+
})
|
|
71
|
+
`;
|
|
72
|
+
var initCommand = defineCommand2({
|
|
73
|
+
meta: {
|
|
74
|
+
name: "init",
|
|
75
|
+
description: "Initialize a docsync config file."
|
|
76
|
+
},
|
|
77
|
+
args: {
|
|
78
|
+
cwd: {
|
|
79
|
+
type: "string",
|
|
80
|
+
description: "Working directory",
|
|
81
|
+
default: "."
|
|
82
|
+
}
|
|
83
|
+
},
|
|
84
|
+
async run({ args }) {
|
|
85
|
+
const cwd = args.cwd === "." ? process.cwd() : args.cwd;
|
|
86
|
+
const configPath = path.join(cwd, "docsync.config.ts");
|
|
87
|
+
try {
|
|
88
|
+
await fs.access(configPath);
|
|
89
|
+
console.log("[docsync] Config file already exists: docsync.config.ts");
|
|
90
|
+
return;
|
|
91
|
+
} catch {
|
|
92
|
+
}
|
|
93
|
+
await fs.writeFile(configPath, CONFIG_TEMPLATE, "utf-8");
|
|
94
|
+
console.log("[docsync] Created docsync.config.ts");
|
|
95
|
+
const gitignorePath = path.join(cwd, ".gitignore");
|
|
96
|
+
try {
|
|
97
|
+
const gitignore = await fs.readFile(gitignorePath, "utf-8");
|
|
98
|
+
if (!gitignore.includes(".docsync")) {
|
|
99
|
+
await fs.appendFile(gitignorePath, "\n.docsync/\n");
|
|
100
|
+
console.log("[docsync] Added .docsync/ to .gitignore");
|
|
101
|
+
}
|
|
102
|
+
} catch {
|
|
103
|
+
}
|
|
104
|
+
console.log("\nNext steps:");
|
|
105
|
+
console.log(" 1. Edit docsync.config.ts to configure your sources");
|
|
106
|
+
console.log(" 2. Run: npx docsync build");
|
|
107
|
+
}
|
|
108
|
+
});
|
|
109
|
+
|
|
110
|
+
// src/cli/index.ts
|
|
111
|
+
var main = defineCommand3({
|
|
112
|
+
meta: {
|
|
113
|
+
name: "docsync",
|
|
114
|
+
version: "0.0.1",
|
|
115
|
+
description: "Transform GitHub markdown into docs-framework-compatible output."
|
|
116
|
+
},
|
|
117
|
+
subCommands: {
|
|
118
|
+
build: buildCommand,
|
|
119
|
+
init: initCommand
|
|
120
|
+
}
|
|
121
|
+
});
|
|
122
|
+
runMain(main);
|
|
123
|
+
//# sourceMappingURL=cli.js.map
|
package/dist/cli.js.map
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/cli/index.ts","../src/cli/commands/build.ts","../src/cli/commands/init.ts"],"sourcesContent":["import { defineCommand, runMain } from 'citty'\nimport { buildCommand } from './commands/build.js'\nimport { initCommand } from './commands/init.js'\n\nconst main = defineCommand({\n meta: {\n name: 'docsync',\n version: '0.0.1',\n description: 'Transform GitHub markdown into docs-framework-compatible output.',\n },\n subCommands: {\n build: buildCommand,\n init: initCommand,\n },\n})\n\nrunMain(main)\n","import { defineCommand } from 'citty'\nimport { loadDocSyncConfig } from '../../config/loader.js'\nimport { buildPipeline } from '../../core/pipeline.js'\n\nexport const buildCommand = defineCommand({\n meta: {\n name: 'build',\n description: 'Transform markdown sources into docs-framework output.',\n },\n args: {\n cwd: {\n type: 'string',\n description: 'Working directory',\n default: '.',\n },\n },\n async run({ args }) {\n const cwd = args.cwd === '.' ? process.cwd() : args.cwd\n\n console.log('[docsync] Loading config...')\n\n let config\n try {\n config = await loadDocSyncConfig(cwd)\n } catch (err) {\n console.error(\n `[docsync] ${err instanceof Error ? err.message : String(err)}`,\n )\n process.exit(1)\n }\n\n console.log(`[docsync] Target: ${config.target}`)\n console.log(`[docsync] Output: ${config.outDir}`)\n\n const result = await buildPipeline(config, cwd)\n\n // Report results\n for (const page of result.pages) {\n console.log(` ✓ ${page.slug}.mdx`)\n }\n\n for (const error of result.errors) {\n console.error(` ✗ ${error.file}: ${error.error}`)\n }\n\n console.log(\n `\\n[docsync] Done! ${result.pages.length} files written to ${config.outDir}/`,\n )\n\n if (result.errors.length > 0) {\n console.error(`[docsync] ${result.errors.length} error(s) occurred.`)\n process.exit(1)\n }\n },\n})\n","import fs from 'node:fs/promises'\nimport path from 'node:path'\nimport { defineCommand } from 'citty'\n\nconst CONFIG_TEMPLATE = `import { defineConfig } from '@tansuasici/docsync'\n\nexport default defineConfig({\n sources: [\n { path: 'README.md', slug: 'index', title: 'Introduction' },\n { path: 'docs/**/*.md' },\n ],\n target: 'fumadocs',\n outDir: '.docsync',\n // github: { repo: 'user/repo' },\n})\n`\n\nexport const initCommand = defineCommand({\n meta: {\n name: 'init',\n description: 'Initialize a docsync config file.',\n },\n args: {\n cwd: {\n type: 'string',\n description: 'Working directory',\n default: '.',\n },\n },\n async run({ args }) {\n const cwd = args.cwd === '.' ? process.cwd() : args.cwd\n const configPath = path.join(cwd, 'docsync.config.ts')\n\n // Check if config already exists\n try {\n await fs.access(configPath)\n console.log('[docsync] Config file already exists: docsync.config.ts')\n return\n } catch {\n // File doesn't exist — good\n }\n\n await fs.writeFile(configPath, CONFIG_TEMPLATE, 'utf-8')\n console.log('[docsync] Created docsync.config.ts')\n\n // Add .docsync/ to .gitignore if it exists\n const gitignorePath = path.join(cwd, '.gitignore')\n try {\n const gitignore = await fs.readFile(gitignorePath, 'utf-8')\n if (!gitignore.includes('.docsync')) {\n await fs.appendFile(gitignorePath, '\\n.docsync/\\n')\n console.log('[docsync] Added .docsync/ to .gitignore')\n }\n } catch {\n // No .gitignore — skip\n }\n\n console.log('\\nNext steps:')\n console.log(' 1. Edit docsync.config.ts to configure your sources')\n console.log(' 2. Run: npx docsync build')\n },\n})\n"],"mappings":";;;;;;AAAA,SAAS,iBAAAA,gBAAe,eAAe;;;ACAvC,SAAS,qBAAqB;AAIvB,IAAM,eAAe,cAAc;AAAA,EACxC,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,EACf;AAAA,EACA,MAAM;AAAA,IACJ,KAAK;AAAA,MACH,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,EACF;AAAA,EACA,MAAM,IAAI,EAAE,KAAK,GAAG;AAClB,UAAM,MAAM,KAAK,QAAQ,MAAM,QAAQ,IAAI,IAAI,KAAK;AAEpD,YAAQ,IAAI,6BAA6B;AAEzC,QAAI;AACJ,QAAI;AACF,eAAS,MAAM,kBAAkB,GAAG;AAAA,IACtC,SAAS,KAAK;AACZ,cAAQ;AAAA,QACN,aAAa,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MAC/D;AACA,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,YAAQ,IAAI,qBAAqB,OAAO,MAAM,EAAE;AAChD,YAAQ,IAAI,qBAAqB,OAAO,MAAM,EAAE;AAEhD,UAAM,SAAS,MAAM,cAAc,QAAQ,GAAG;AAG9C,eAAW,QAAQ,OAAO,OAAO;AAC/B,cAAQ,IAAI,YAAO,KAAK,IAAI,MAAM;AAAA,IACpC;AAEA,eAAW,SAAS,OAAO,QAAQ;AACjC,cAAQ,MAAM,YAAO,MAAM,IAAI,KAAK,MAAM,KAAK,EAAE;AAAA,IACnD;AAEA,YAAQ;AAAA,MACN;AAAA,kBAAqB,OAAO,MAAM,MAAM,qBAAqB,OAAO,MAAM;AAAA,IAC5E;AAEA,QAAI,OAAO,OAAO,SAAS,GAAG;AAC5B,cAAQ,MAAM,aAAa,OAAO,OAAO,MAAM,qBAAqB;AACpE,cAAQ,KAAK,CAAC;AAAA,IAChB;AAAA,EACF;AACF,CAAC;;;ACtDD,OAAO,QAAQ;AACf,OAAO,UAAU;AACjB,SAAS,iBAAAC,sBAAqB;AAE9B,IAAM,kBAAkB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAajB,IAAM,cAAcA,eAAc;AAAA,EACvC,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,EACf;AAAA,EACA,MAAM;AAAA,IACJ,KAAK;AAAA,MACH,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,EACF;AAAA,EACA,MAAM,IAAI,EAAE,KAAK,GAAG;AAClB,UAAM,MAAM,KAAK,QAAQ,MAAM,QAAQ,IAAI,IAAI,KAAK;AACpD,UAAM,aAAa,KAAK,KAAK,KAAK,mBAAmB;AAGrD,QAAI;AACF,YAAM,GAAG,OAAO,UAAU;AAC1B,cAAQ,IAAI,yDAAyD;AACrE;AAAA,IACF,QAAQ;AAAA,IAER;AAEA,UAAM,GAAG,UAAU,YAAY,iBAAiB,OAAO;AACvD,YAAQ,IAAI,qCAAqC;AAGjD,UAAM,gBAAgB,KAAK,KAAK,KAAK,YAAY;AACjD,QAAI;AACF,YAAM,YAAY,MAAM,GAAG,SAAS,eAAe,OAAO;AAC1D,UAAI,CAAC,UAAU,SAAS,UAAU,GAAG;AACnC,cAAM,GAAG,WAAW,eAAe,eAAe;AAClD,gBAAQ,IAAI,yCAAyC;AAAA,MACvD;AAAA,IACF,QAAQ;AAAA,IAER;AAEA,YAAQ,IAAI,eAAe;AAC3B,YAAQ,IAAI,uDAAuD;AACnE,YAAQ,IAAI,6BAA6B;AAAA,EAC3C;AACF,CAAC;;;AFzDD,IAAM,OAAOC,eAAc;AAAA,EACzB,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,SAAS;AAAA,IACT,aAAa;AAAA,EACf;AAAA,EACA,aAAa;AAAA,IACX,OAAO;AAAA,IACP,MAAM;AAAA,EACR;AACF,CAAC;AAED,QAAQ,IAAI;","names":["defineCommand","defineCommand","defineCommand"]}
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import { z } from 'zod';
|
|
2
|
+
|
|
3
|
+
declare const sourceEntrySchema: z.ZodObject<{
|
|
4
|
+
path: z.ZodString;
|
|
5
|
+
slug: z.ZodOptional<z.ZodString>;
|
|
6
|
+
title: z.ZodOptional<z.ZodString>;
|
|
7
|
+
description: z.ZodOptional<z.ZodString>;
|
|
8
|
+
order: z.ZodOptional<z.ZodNumber>;
|
|
9
|
+
}, z.core.$strip>;
|
|
10
|
+
declare const configSchema: z.ZodObject<{
|
|
11
|
+
sources: z.ZodArray<z.ZodObject<{
|
|
12
|
+
path: z.ZodString;
|
|
13
|
+
slug: z.ZodOptional<z.ZodString>;
|
|
14
|
+
title: z.ZodOptional<z.ZodString>;
|
|
15
|
+
description: z.ZodOptional<z.ZodString>;
|
|
16
|
+
order: z.ZodOptional<z.ZodNumber>;
|
|
17
|
+
}, z.core.$strip>>;
|
|
18
|
+
target: z.ZodEnum<{
|
|
19
|
+
fumadocs: "fumadocs";
|
|
20
|
+
}>;
|
|
21
|
+
outDir: z.ZodDefault<z.ZodString>;
|
|
22
|
+
github: z.ZodOptional<z.ZodObject<{
|
|
23
|
+
repo: z.ZodString;
|
|
24
|
+
branch: z.ZodDefault<z.ZodString>;
|
|
25
|
+
}, z.core.$strip>>;
|
|
26
|
+
baseUrl: z.ZodDefault<z.ZodString>;
|
|
27
|
+
clean: z.ZodDefault<z.ZodBoolean>;
|
|
28
|
+
}, z.core.$strip>;
|
|
29
|
+
type DocSyncConfig = z.infer<typeof configSchema>;
|
|
30
|
+
type SourceEntry = z.infer<typeof sourceEntrySchema>;
|
|
31
|
+
|
|
32
|
+
declare function loadDocSyncConfig(cwd?: string): Promise<DocSyncConfig>;
|
|
33
|
+
|
|
34
|
+
interface ResolvedPage {
|
|
35
|
+
/** Absolute file path */
|
|
36
|
+
filePath: string;
|
|
37
|
+
/** Repo-relative file path */
|
|
38
|
+
relativePath: string;
|
|
39
|
+
/** URL slug (no leading slash, no extension) */
|
|
40
|
+
slug: string;
|
|
41
|
+
/** Page title (from config or extracted later from content) */
|
|
42
|
+
title?: string;
|
|
43
|
+
/** Page description (from config or extracted later from content) */
|
|
44
|
+
description?: string;
|
|
45
|
+
/** Sidebar order */
|
|
46
|
+
order: number;
|
|
47
|
+
}
|
|
48
|
+
declare function resolveSourceFiles(sources: SourceEntry[], cwd: string): Promise<ResolvedPage[]>;
|
|
49
|
+
|
|
50
|
+
interface BuildResult {
|
|
51
|
+
pages: {
|
|
52
|
+
slug: string;
|
|
53
|
+
outputPath: string;
|
|
54
|
+
}[];
|
|
55
|
+
errors: {
|
|
56
|
+
file: string;
|
|
57
|
+
error: string;
|
|
58
|
+
}[];
|
|
59
|
+
}
|
|
60
|
+
declare function buildPipeline(config: DocSyncConfig, cwd: string): Promise<BuildResult>;
|
|
61
|
+
|
|
62
|
+
/**
|
|
63
|
+
* Helper for type-safe config files.
|
|
64
|
+
*
|
|
65
|
+
* @example
|
|
66
|
+
* ```ts
|
|
67
|
+
* // docsync.config.ts
|
|
68
|
+
* import { defineConfig } from 'docsync'
|
|
69
|
+
*
|
|
70
|
+
* export default defineConfig({
|
|
71
|
+
* sources: [{ path: 'README.md', slug: 'index' }],
|
|
72
|
+
* target: 'fumadocs',
|
|
73
|
+
* })
|
|
74
|
+
* ```
|
|
75
|
+
*/
|
|
76
|
+
declare function defineConfig(config: DocSyncConfig): DocSyncConfig;
|
|
77
|
+
|
|
78
|
+
export { type DocSyncConfig, type ResolvedPage, type SourceEntry, buildPipeline, configSchema, defineConfig, loadDocSyncConfig, resolveSourceFiles };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import {
|
|
2
|
+
buildPipeline,
|
|
3
|
+
configSchema,
|
|
4
|
+
loadDocSyncConfig,
|
|
5
|
+
resolveSourceFiles
|
|
6
|
+
} from "./chunk-72B2YGTF.js";
|
|
7
|
+
|
|
8
|
+
// src/index.ts
|
|
9
|
+
function defineConfig(config) {
|
|
10
|
+
return config;
|
|
11
|
+
}
|
|
12
|
+
export {
|
|
13
|
+
buildPipeline,
|
|
14
|
+
configSchema,
|
|
15
|
+
defineConfig,
|
|
16
|
+
loadDocSyncConfig,
|
|
17
|
+
resolveSourceFiles
|
|
18
|
+
};
|
|
19
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts"],"sourcesContent":["import type { DocSyncConfig } from './config/schema.js'\n\nexport { configSchema } from './config/schema.js'\nexport type { DocSyncConfig, SourceEntry } from './config/schema.js'\nexport { loadDocSyncConfig } from './config/loader.js'\nexport { resolveSourceFiles } from './core/source-resolver.js'\nexport type { ResolvedPage } from './core/source-resolver.js'\nexport { buildPipeline } from './core/pipeline.js'\n\n/**\n * Helper for type-safe config files.\n *\n * @example\n * ```ts\n * // docsync.config.ts\n * import { defineConfig } from 'docsync'\n *\n * export default defineConfig({\n * sources: [{ path: 'README.md', slug: 'index' }],\n * target: 'fumadocs',\n * })\n * ```\n */\nexport function defineConfig(config: DocSyncConfig): DocSyncConfig {\n return config\n}\n"],"mappings":";;;;;;;;AAuBO,SAAS,aAAa,QAAsC;AACjE,SAAO;AACT;","names":[]}
|
package/package.json
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@tansuasici/docsync",
|
|
3
|
+
"version": "0.0.1",
|
|
4
|
+
"description": "Transform GitHub markdown into docs-framework-compatible output. Write once in GFM, publish everywhere.",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"bin": {
|
|
7
|
+
"docsync": "./dist/cli.js"
|
|
8
|
+
},
|
|
9
|
+
"exports": {
|
|
10
|
+
".": {
|
|
11
|
+
"import": "./dist/index.js",
|
|
12
|
+
"types": "./dist/index.d.ts"
|
|
13
|
+
}
|
|
14
|
+
},
|
|
15
|
+
"files": [
|
|
16
|
+
"dist"
|
|
17
|
+
],
|
|
18
|
+
"scripts": {
|
|
19
|
+
"build": "tsup",
|
|
20
|
+
"dev": "tsup --watch",
|
|
21
|
+
"test": "vitest run",
|
|
22
|
+
"test:watch": "vitest",
|
|
23
|
+
"lint": "eslint src/",
|
|
24
|
+
"format": "prettier --write 'src/**/*.ts'",
|
|
25
|
+
"typecheck": "tsc --noEmit",
|
|
26
|
+
"prepublishOnly": "pnpm run build"
|
|
27
|
+
},
|
|
28
|
+
"keywords": [
|
|
29
|
+
"docs",
|
|
30
|
+
"documentation",
|
|
31
|
+
"markdown",
|
|
32
|
+
"mdx",
|
|
33
|
+
"fumadocs",
|
|
34
|
+
"github",
|
|
35
|
+
"gfm",
|
|
36
|
+
"sync",
|
|
37
|
+
"cli"
|
|
38
|
+
],
|
|
39
|
+
"author": "tansuasici",
|
|
40
|
+
"license": "MIT",
|
|
41
|
+
"packageManager": "pnpm@10.29.3",
|
|
42
|
+
"dependencies": {
|
|
43
|
+
"c12": "4.0.0-beta.3",
|
|
44
|
+
"citty": "^0.2.1",
|
|
45
|
+
"fast-glob": "^3.3.3",
|
|
46
|
+
"mdast-util-to-string": "^4.0.0",
|
|
47
|
+
"remark-frontmatter": "^5.0.0",
|
|
48
|
+
"remark-gfm": "^4.0.1",
|
|
49
|
+
"remark-mdx": "^3.1.1",
|
|
50
|
+
"remark-parse": "^11.0.0",
|
|
51
|
+
"remark-stringify": "^11.0.0",
|
|
52
|
+
"unified": "^11.0.5",
|
|
53
|
+
"unist-util-visit": "^5.1.0",
|
|
54
|
+
"zod": "^4.3.6"
|
|
55
|
+
},
|
|
56
|
+
"devDependencies": {
|
|
57
|
+
"@types/mdast": "^4.0.4",
|
|
58
|
+
"@types/node": "^25.4.0",
|
|
59
|
+
"@types/unist": "^3.0.3",
|
|
60
|
+
"eslint": "^10.0.3",
|
|
61
|
+
"prettier": "^3.8.1",
|
|
62
|
+
"tsup": "^8.5.1",
|
|
63
|
+
"typescript": "^5.9.3",
|
|
64
|
+
"vitest": "^4.0.18"
|
|
65
|
+
},
|
|
66
|
+
"pnpm": {
|
|
67
|
+
"onlyBuiltDependencies": [
|
|
68
|
+
"esbuild"
|
|
69
|
+
]
|
|
70
|
+
}
|
|
71
|
+
}
|