docs-i18n 0.7.5 → 0.8.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/admin/dist/server/server.js +32 -32
- package/dist/cli.js +1 -1
- package/dist/{upload-XL6KG6S2.js → upload-KYKJVERO.js} +1 -1
- package/package.json +1 -1
- package/template/app/components/BlogArticle.tsx +3 -0
- package/template/app/components/Doc.tsx +4 -0
- package/template/app/components/markdown/MarkdownContent.tsx +6 -2
- package/template/app/site.config.ts +2 -0
- package/template/app/types/index.ts +4 -0
- package/template/app/utils/content-loader.ts +85 -32
- package/template/app/utils/docs.server.ts +38 -6
- package/template/app/utils/markdown/plugins/index.ts +1 -0
- package/template/app/utils/markdown/plugins/mdxJsxToRaw.ts +127 -0
- package/template/app/utils/markdown/processor.ts +14 -1
- package/template/app/utils/sidebar-generator.ts +185 -0
- package/template/app/utils/url-mapper.ts +22 -0
- package/template/content/docs-i18n/en/admin.md +143 -0
- package/template/content/docs-i18n/en/cli.md +324 -0
- package/template/package.json +2 -1
|
@@ -15556,38 +15556,6 @@ async function getStartManifest(matchedRoutes) {
|
|
|
15556
15556
|
//#endregion
|
|
15557
15557
|
//#region \0#tanstack-start-server-fn-resolver
|
|
15558
15558
|
var manifest = {
|
|
15559
|
-
"a3d81974aeece150d4b02be5b91590b8187442ebea56be4a89dcbf053626d22b": {
|
|
15560
|
-
functionName: "fetchVersion_createServerFn_handler",
|
|
15561
|
-
importer: () => import("./assets/misc-y6t3-UOP.js")
|
|
15562
|
-
},
|
|
15563
|
-
"3bf4ba50ca8ccc3c8c60d8f2e53307a320940d68c478df494552066904c5cd74": {
|
|
15564
|
-
functionName: "fetchLlmConfig_createServerFn_handler",
|
|
15565
|
-
importer: () => import("./assets/misc-y6t3-UOP.js")
|
|
15566
|
-
},
|
|
15567
|
-
"e0b4116f6b2c8d096830102e36458acf9c616a056fcdddda956a4d66984ef58c": {
|
|
15568
|
-
functionName: "fetchConfig_createServerFn_handler",
|
|
15569
|
-
importer: () => import("./assets/misc-y6t3-UOP.js")
|
|
15570
|
-
},
|
|
15571
|
-
"a054a04356fe9987891efee8b7a11cd2dedb00f6b2e8f26d1c642e001e553d53": {
|
|
15572
|
-
functionName: "openFile_createServerFn_handler",
|
|
15573
|
-
importer: () => import("./assets/misc-y6t3-UOP.js")
|
|
15574
|
-
},
|
|
15575
|
-
"421de02ce39dde6e27cf4689e837ec072cbd01e63f8cdd5c2a3f42f0bd5ca613": {
|
|
15576
|
-
functionName: "fetchJobs_createServerFn_handler",
|
|
15577
|
-
importer: () => import("./assets/jobs-FXffC7LH.js")
|
|
15578
|
-
},
|
|
15579
|
-
"c08559ac758aa0d315deaca7a0d7d923a9a44d997c8cb811151417c1f221ddd6": {
|
|
15580
|
-
functionName: "createJob_createServerFn_handler",
|
|
15581
|
-
importer: () => import("./assets/jobs-FXffC7LH.js")
|
|
15582
|
-
},
|
|
15583
|
-
"8a56694c9d7b29422a3e7d2f6b803be100d79d3853d92d465cb55ed572781e62": {
|
|
15584
|
-
functionName: "fetchJob_createServerFn_handler",
|
|
15585
|
-
importer: () => import("./assets/jobs-FXffC7LH.js")
|
|
15586
|
-
},
|
|
15587
|
-
"88c2855c84e91504070bfecc50ddfa50339d22c305626800b6d9b05d79385d71": {
|
|
15588
|
-
functionName: "deleteJob_createServerFn_handler",
|
|
15589
|
-
importer: () => import("./assets/jobs-FXffC7LH.js")
|
|
15590
|
-
},
|
|
15591
15559
|
"4e218d79545765572808c7eab33b7663d4496209c15406d0b449366905b6b83f": {
|
|
15592
15560
|
functionName: "fetchStatus_createServerFn_handler",
|
|
15593
15561
|
importer: () => import("./assets/status-CM7Azp4n.js")
|
|
@@ -15611,6 +15579,38 @@ var manifest = {
|
|
|
15611
15579
|
"5080dc3f2f2309ec6981b94c431969637130c657e8a1dfb10400b4614eecc1ea": {
|
|
15612
15580
|
functionName: "fetchModels_createServerFn_handler",
|
|
15613
15581
|
importer: () => import("./assets/models-YNa3F3nn.js")
|
|
15582
|
+
},
|
|
15583
|
+
"421de02ce39dde6e27cf4689e837ec072cbd01e63f8cdd5c2a3f42f0bd5ca613": {
|
|
15584
|
+
functionName: "fetchJobs_createServerFn_handler",
|
|
15585
|
+
importer: () => import("./assets/jobs-FXffC7LH.js")
|
|
15586
|
+
},
|
|
15587
|
+
"c08559ac758aa0d315deaca7a0d7d923a9a44d997c8cb811151417c1f221ddd6": {
|
|
15588
|
+
functionName: "createJob_createServerFn_handler",
|
|
15589
|
+
importer: () => import("./assets/jobs-FXffC7LH.js")
|
|
15590
|
+
},
|
|
15591
|
+
"8a56694c9d7b29422a3e7d2f6b803be100d79d3853d92d465cb55ed572781e62": {
|
|
15592
|
+
functionName: "fetchJob_createServerFn_handler",
|
|
15593
|
+
importer: () => import("./assets/jobs-FXffC7LH.js")
|
|
15594
|
+
},
|
|
15595
|
+
"88c2855c84e91504070bfecc50ddfa50339d22c305626800b6d9b05d79385d71": {
|
|
15596
|
+
functionName: "deleteJob_createServerFn_handler",
|
|
15597
|
+
importer: () => import("./assets/jobs-FXffC7LH.js")
|
|
15598
|
+
},
|
|
15599
|
+
"a3d81974aeece150d4b02be5b91590b8187442ebea56be4a89dcbf053626d22b": {
|
|
15600
|
+
functionName: "fetchVersion_createServerFn_handler",
|
|
15601
|
+
importer: () => import("./assets/misc-y6t3-UOP.js")
|
|
15602
|
+
},
|
|
15603
|
+
"3bf4ba50ca8ccc3c8c60d8f2e53307a320940d68c478df494552066904c5cd74": {
|
|
15604
|
+
functionName: "fetchLlmConfig_createServerFn_handler",
|
|
15605
|
+
importer: () => import("./assets/misc-y6t3-UOP.js")
|
|
15606
|
+
},
|
|
15607
|
+
"e0b4116f6b2c8d096830102e36458acf9c616a056fcdddda956a4d66984ef58c": {
|
|
15608
|
+
functionName: "fetchConfig_createServerFn_handler",
|
|
15609
|
+
importer: () => import("./assets/misc-y6t3-UOP.js")
|
|
15610
|
+
},
|
|
15611
|
+
"a054a04356fe9987891efee8b7a11cd2dedb00f6b2e8f26d1c642e001e553d53": {
|
|
15612
|
+
functionName: "openFile_createServerFn_handler",
|
|
15613
|
+
importer: () => import("./assets/misc-y6t3-UOP.js")
|
|
15614
15614
|
}
|
|
15615
15615
|
};
|
|
15616
15616
|
async function getServerFnById(id) {
|
package/dist/cli.js
CHANGED
|
@@ -72,7 +72,7 @@ async function handleSiteCommand() {
|
|
|
72
72
|
child.on("exit", (code) => process.exit(code ?? 0));
|
|
73
73
|
} else if (subCommand === "upload") {
|
|
74
74
|
console.log("Uploading content to D1...");
|
|
75
|
-
const { collectContentFiles, generateContentSql, collectTranslations, generateTranslationSql } = await import("./upload-
|
|
75
|
+
const { collectContentFiles, generateContentSql, collectTranslations, generateTranslationSql } = await import("./upload-KYKJVERO.js");
|
|
76
76
|
const projectRoot = process.cwd();
|
|
77
77
|
const contentRows = collectContentFiles(projectRoot);
|
|
78
78
|
const contentSql = generateContentSql(contentRows);
|
|
@@ -21,7 +21,7 @@ function walkDir(dir, contentRoot, rows) {
|
|
|
21
21
|
const fullPath = join(dir, entry.name);
|
|
22
22
|
if (entry.isDirectory() && !entry.name.startsWith(".")) {
|
|
23
23
|
walkDir(fullPath, contentRoot, rows);
|
|
24
|
-
} else if (entry.isFile() && (entry.name.endsWith(".md") || entry.name.endsWith(".json"))) {
|
|
24
|
+
} else if (entry.isFile() && (entry.name.endsWith(".md") || entry.name.endsWith(".mdx") || entry.name.endsWith(".json"))) {
|
|
25
25
|
const relativePath = relative(contentRoot, fullPath);
|
|
26
26
|
const body = readFileSync(fullPath, "utf-8");
|
|
27
27
|
const parts = relativePath.split("/");
|
package/package.json
CHANGED
|
@@ -19,6 +19,7 @@ import { MarkdownContent } from '~/components/markdown'
|
|
|
19
19
|
import { Toc } from '~/components/Toc'
|
|
20
20
|
import { Breadcrumbs } from '~/components/Breadcrumbs'
|
|
21
21
|
import { FallbackBanner } from '~/components/FallbackBanner'
|
|
22
|
+
import { useSiteConfig } from '~/utils/site-config'
|
|
22
23
|
import type { LoadedBlogPost } from '~/utils/blog'
|
|
23
24
|
|
|
24
25
|
type BlogArticleProps = {
|
|
@@ -29,6 +30,7 @@ type BlogArticleProps = {
|
|
|
29
30
|
}
|
|
30
31
|
|
|
31
32
|
export function BlogArticle({ post, lang, locale }: BlogArticleProps) {
|
|
33
|
+
const siteConfig = useSiteConfig()
|
|
32
34
|
const { title, content, authors, published, filePath, isFallback } = post
|
|
33
35
|
|
|
34
36
|
// Prepend byline to content (matches tanstack.com pattern)
|
|
@@ -137,6 +139,7 @@ ${content}`
|
|
|
137
139
|
branch=""
|
|
138
140
|
filePath={filePath}
|
|
139
141
|
containerRef={markdownContainerRef}
|
|
142
|
+
customComponents={siteConfig.components}
|
|
140
143
|
/>
|
|
141
144
|
</div>
|
|
142
145
|
{isTocVisible && (
|
|
@@ -11,6 +11,7 @@ import { MarkdownContent } from '~/components/markdown'
|
|
|
11
11
|
import type { DocsConfig, MarkdownHeading } from '~/types'
|
|
12
12
|
import { useLocalCurrentFramework } from './FrameworkSelect'
|
|
13
13
|
import { useParams } from '@tanstack/react-router'
|
|
14
|
+
import { useSiteConfig } from '~/utils/site-config'
|
|
14
15
|
|
|
15
16
|
type DocProps = {
|
|
16
17
|
title: string
|
|
@@ -50,6 +51,8 @@ export function Doc({
|
|
|
50
51
|
isFallback = false,
|
|
51
52
|
locale,
|
|
52
53
|
}: DocProps) {
|
|
54
|
+
const siteConfig = useSiteConfig()
|
|
55
|
+
|
|
53
56
|
// Extract headings synchronously during render to avoid hydration mismatch
|
|
54
57
|
const { headings, markup } = React.useMemo(
|
|
55
58
|
() => renderMarkdown(content),
|
|
@@ -154,6 +157,7 @@ export function Doc({
|
|
|
154
157
|
htmlMarkup={markup}
|
|
155
158
|
containerRef={markdownContainerRef}
|
|
156
159
|
currentFramework={currentFramework}
|
|
160
|
+
customComponents={siteConfig.components}
|
|
157
161
|
titleBarActions={
|
|
158
162
|
setIsFullWidth ? (
|
|
159
163
|
<button
|
|
@@ -3,6 +3,7 @@ import { SquarePen } from 'lucide-react'
|
|
|
3
3
|
import { twMerge } from 'tailwind-merge'
|
|
4
4
|
import { Markdown } from './Markdown'
|
|
5
5
|
import { Button } from '../ui/Button'
|
|
6
|
+
import type { SiteConfig } from '~/types'
|
|
6
7
|
|
|
7
8
|
type MarkdownContentProps = {
|
|
8
9
|
title: string
|
|
@@ -23,6 +24,8 @@ type MarkdownContentProps = {
|
|
|
23
24
|
containerRef?: React.RefObject<HTMLDivElement | null>
|
|
24
25
|
/** Current framework for filtering markdown content */
|
|
25
26
|
currentFramework?: string
|
|
27
|
+
/** Custom components from SiteConfig for rendering MDX JSX elements */
|
|
28
|
+
customComponents?: SiteConfig['components']
|
|
26
29
|
}
|
|
27
30
|
|
|
28
31
|
export function MarkdownContent({
|
|
@@ -36,11 +39,12 @@ export function MarkdownContent({
|
|
|
36
39
|
titleBarActions,
|
|
37
40
|
proseClassName,
|
|
38
41
|
containerRef,
|
|
42
|
+
customComponents,
|
|
39
43
|
}: MarkdownContentProps) {
|
|
40
44
|
const markdownElement = htmlMarkup ? (
|
|
41
|
-
<Markdown htmlMarkup={htmlMarkup} />
|
|
45
|
+
<Markdown htmlMarkup={htmlMarkup} customComponents={customComponents} />
|
|
42
46
|
) : rawContent ? (
|
|
43
|
-
<Markdown rawContent={rawContent} renderMarkdown={renderMarkdown} />
|
|
47
|
+
<Markdown rawContent={rawContent} renderMarkdown={renderMarkdown} customComponents={customComponents} />
|
|
44
48
|
) : null
|
|
45
49
|
|
|
46
50
|
return (
|
|
@@ -36,6 +36,10 @@ export interface ProjectConfig {
|
|
|
36
36
|
badge?: string
|
|
37
37
|
tagline?: string
|
|
38
38
|
description?: string
|
|
39
|
+
/** Transform file path to URL slug. Strips numeric prefixes, extensions, etc. */
|
|
40
|
+
urlMapper?: (filePath: string) => string
|
|
41
|
+
/** How to generate sidebar: 'config' reads docs.config.json, 'filesystem' auto-generates from directory structure */
|
|
42
|
+
sidebarSource?: 'config' | 'filesystem'
|
|
39
43
|
}
|
|
40
44
|
|
|
41
45
|
export interface MarkdownHeading {
|
|
@@ -62,10 +62,16 @@ function getTranslationCache(projectRoot: string): TranslationCache | null {
|
|
|
62
62
|
}
|
|
63
63
|
}
|
|
64
64
|
|
|
65
|
-
export function createFsLoader(
|
|
65
|
+
export function createFsLoader(
|
|
66
|
+
projectRoot: string,
|
|
67
|
+
urlMapper?: (filePath: string) => string,
|
|
68
|
+
): ContentLoader {
|
|
69
|
+
/** Supported markdown extensions, in priority order. */
|
|
70
|
+
const mdExtensions = ['.mdx', '.md']
|
|
71
|
+
|
|
66
72
|
/**
|
|
67
|
-
* Read a raw .md file from one of the candidate base directories.
|
|
68
|
-
*
|
|
73
|
+
* Read a raw .md/.mdx file from one of the candidate base directories.
|
|
74
|
+
* When urlMapper is set, scans directories and matches by mapped slug.
|
|
69
75
|
*/
|
|
70
76
|
function readRawFile(
|
|
71
77
|
project: string,
|
|
@@ -73,14 +79,52 @@ export function createFsLoader(projectRoot: string): ContentLoader {
|
|
|
73
79
|
lang: string,
|
|
74
80
|
slug: string,
|
|
75
81
|
): { raw: string; filePath: string } | null {
|
|
76
|
-
const
|
|
77
|
-
resolve(projectRoot, 'content', project, version, lang
|
|
78
|
-
resolve(projectRoot, 'content', project, lang
|
|
79
|
-
resolve(projectRoot, 'content', lang
|
|
82
|
+
const baseDirs = [
|
|
83
|
+
resolve(projectRoot, 'content', project, version, lang),
|
|
84
|
+
resolve(projectRoot, 'content', project, lang),
|
|
85
|
+
resolve(projectRoot, 'content', lang),
|
|
80
86
|
]
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
87
|
+
|
|
88
|
+
if (!urlMapper) {
|
|
89
|
+
for (const baseDir of baseDirs) {
|
|
90
|
+
for (const ext of mdExtensions) {
|
|
91
|
+
const filePath = resolve(baseDir, `${slug}${ext}`)
|
|
92
|
+
if (existsSync(filePath)) {
|
|
93
|
+
return { raw: readFileSync(filePath, 'utf-8'), filePath }
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
return null
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
// With urlMapper: scan files and match by mapped slug
|
|
101
|
+
for (const baseDir of baseDirs) {
|
|
102
|
+
if (!existsSync(baseDir)) continue
|
|
103
|
+
const match = findFileByMappedSlug(baseDir, baseDir, slug)
|
|
104
|
+
if (match) return match
|
|
105
|
+
}
|
|
106
|
+
return null
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
/** Recursively scan for .md/.mdx files, apply urlMapper, match target slug. */
|
|
110
|
+
function findFileByMappedSlug(
|
|
111
|
+
dir: string,
|
|
112
|
+
baseDir: string,
|
|
113
|
+
targetSlug: string,
|
|
114
|
+
): { raw: string; filePath: string } | null {
|
|
115
|
+
let entries: ReturnType<typeof readdirSync>
|
|
116
|
+
try { entries = readdirSync(dir, { withFileTypes: true }) } catch { return null }
|
|
117
|
+
for (const entry of entries) {
|
|
118
|
+
if (entry.name.startsWith('.')) continue
|
|
119
|
+
const fullPath = join(dir, entry.name)
|
|
120
|
+
if (entry.isFile() && /\.mdx?$/.test(entry.name)) {
|
|
121
|
+
const mapped = urlMapper!(relative(baseDir, fullPath))
|
|
122
|
+
if (mapped === targetSlug) {
|
|
123
|
+
return { raw: readFileSync(fullPath, 'utf-8'), filePath: fullPath }
|
|
124
|
+
}
|
|
125
|
+
} else if (entry.isDirectory()) {
|
|
126
|
+
const result = findFileByMappedSlug(fullPath, baseDir, targetSlug)
|
|
127
|
+
if (result) return result
|
|
84
128
|
}
|
|
85
129
|
}
|
|
86
130
|
return null
|
|
@@ -114,8 +158,9 @@ export function createFsLoader(projectRoot: string): ContentLoader {
|
|
|
114
158
|
try {
|
|
115
159
|
const entries = readdirSync(dir, { withFileTypes: true })
|
|
116
160
|
for (const entry of entries) {
|
|
117
|
-
if (entry.isFile() && entry.name.endsWith('.md')) {
|
|
118
|
-
|
|
161
|
+
if (entry.isFile() && (entry.name.endsWith('.md') || entry.name.endsWith('.mdx'))) {
|
|
162
|
+
const rawSlug = relative(base, join(dir, entry.name)).replace(/\.mdx?$/, '')
|
|
163
|
+
slugs.push(urlMapper ? urlMapper(relative(base, join(dir, entry.name))) : rawSlug)
|
|
119
164
|
} else if (entry.isDirectory() && !entry.name.startsWith('.')) {
|
|
120
165
|
slugs.push(...scanDirectory(join(dir, entry.name), base))
|
|
121
166
|
}
|
|
@@ -149,7 +194,9 @@ export function createFsLoader(projectRoot: string): ContentLoader {
|
|
|
149
194
|
const cache = getTranslationCache(projectRoot)
|
|
150
195
|
if (cache) {
|
|
151
196
|
try {
|
|
152
|
-
|
|
197
|
+
// Derive the extension from the resolved file path for cache lookup
|
|
198
|
+
const ext = en.filePath.endsWith('.mdx') ? '.mdx' : '.md'
|
|
199
|
+
const sourceFilePath = `${slug}${ext}`
|
|
153
200
|
const result = assemble(
|
|
154
201
|
en.raw,
|
|
155
202
|
lang,
|
|
@@ -241,45 +288,51 @@ import type { Db } from '~/db'
|
|
|
241
288
|
import { createDb, schema } from '~/db'
|
|
242
289
|
|
|
243
290
|
export function createD1Loader(db: Db): ContentLoader {
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
291
|
+
/** Try loading a row by slug with .mdx first, then .md. */
|
|
292
|
+
async function loadBySlug(
|
|
293
|
+
prefix: string,
|
|
294
|
+
slug: string,
|
|
295
|
+
): Promise<{ row: { body: string; path: string } } | null> {
|
|
296
|
+
for (const ext of ['.mdx', '.md']) {
|
|
297
|
+
const path = `${prefix}/${slug}${ext}`
|
|
249
298
|
const row = await db
|
|
250
299
|
.select()
|
|
251
300
|
.from(schema.content)
|
|
252
301
|
.where(eq(schema.content.path, path))
|
|
253
302
|
.get()
|
|
303
|
+
if (row) return { row: { body: row.body, path } }
|
|
304
|
+
}
|
|
305
|
+
return null
|
|
306
|
+
}
|
|
254
307
|
|
|
255
|
-
|
|
256
|
-
|
|
308
|
+
return {
|
|
309
|
+
async loadDoc(project, version, lang, slug) {
|
|
310
|
+
// Try requested language
|
|
311
|
+
const result = await loadBySlug(`${project}/${version}/${lang}`, slug)
|
|
312
|
+
|
|
313
|
+
if (result) {
|
|
314
|
+
const { data, content } = matter(result.row.body)
|
|
257
315
|
return {
|
|
258
316
|
content,
|
|
259
317
|
meta: { title: (data.title as string) || '', ...data },
|
|
260
318
|
locale: lang,
|
|
261
319
|
isFallback: false,
|
|
262
|
-
filePath: path,
|
|
320
|
+
filePath: result.row.path,
|
|
263
321
|
}
|
|
264
322
|
}
|
|
265
323
|
|
|
266
324
|
// Fallback to English
|
|
267
325
|
if (lang !== 'en') {
|
|
268
|
-
const
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
.where(eq(schema.content.path, enPath))
|
|
273
|
-
.get()
|
|
274
|
-
|
|
275
|
-
if (enRow) {
|
|
276
|
-
const { data, content } = matter(enRow.body)
|
|
326
|
+
const enResult = await loadBySlug(`${project}/${version}/en`, slug)
|
|
327
|
+
|
|
328
|
+
if (enResult) {
|
|
329
|
+
const { data, content } = matter(enResult.row.body)
|
|
277
330
|
return {
|
|
278
331
|
content,
|
|
279
332
|
meta: { title: (data.title as string) || '', ...data },
|
|
280
333
|
locale: 'en',
|
|
281
334
|
isFallback: true,
|
|
282
|
-
filePath:
|
|
335
|
+
filePath: enResult.row.path,
|
|
283
336
|
}
|
|
284
337
|
}
|
|
285
338
|
}
|
|
@@ -348,7 +401,7 @@ export function createD1Loader(db: Db): ContentLoader {
|
|
|
348
401
|
.all()
|
|
349
402
|
|
|
350
403
|
return rows
|
|
351
|
-
.map((r) => r.path.replace(prefix, '').replace(/\.
|
|
404
|
+
.map((r) => r.path.replace(prefix, '').replace(/\.mdx?$/, ''))
|
|
352
405
|
.filter((s) => s.length > 0)
|
|
353
406
|
},
|
|
354
407
|
}
|
|
@@ -8,8 +8,9 @@
|
|
|
8
8
|
import { readFileSync, existsSync, readdirSync, statSync } from 'node:fs'
|
|
9
9
|
import { resolve, relative, join } from 'node:path'
|
|
10
10
|
import matter from 'gray-matter'
|
|
11
|
-
import type { LoadedDoc, DocsConfig, DocsConfigItem } from '~/types'
|
|
11
|
+
import type { LoadedDoc, DocsConfig, DocsConfigItem, ProjectConfig } from '~/types'
|
|
12
12
|
import { type ContentLoader, createFsLoader } from './content-loader'
|
|
13
|
+
import { generateSidebarFromFilesystem } from './sidebar-generator'
|
|
13
14
|
|
|
14
15
|
/**
|
|
15
16
|
* Get the project root directory.
|
|
@@ -61,25 +62,35 @@ export function setContentLoader(loader: ContentLoader) {
|
|
|
61
62
|
|
|
62
63
|
/**
|
|
63
64
|
* Load a document with i18n fallback.
|
|
64
|
-
*
|
|
65
|
+
* When projectConfig has urlMapper, creates a mapper-aware loader.
|
|
65
66
|
*/
|
|
66
67
|
export async function loadDoc(
|
|
67
68
|
project: string,
|
|
68
69
|
version: string,
|
|
69
70
|
lang: string,
|
|
70
71
|
slug: string,
|
|
72
|
+
projectConfig?: ProjectConfig,
|
|
71
73
|
): Promise<LoadedDoc> {
|
|
74
|
+
if (projectConfig?.urlMapper) {
|
|
75
|
+
const loader = createFsLoader(getProjectRoot(), projectConfig.urlMapper)
|
|
76
|
+
return loader.loadDoc(project, version, lang, slug)
|
|
77
|
+
}
|
|
72
78
|
return getLoader().loadDoc(project, version, lang, slug)
|
|
73
79
|
}
|
|
74
80
|
|
|
75
81
|
/**
|
|
76
|
-
* Load docs config (
|
|
77
|
-
*
|
|
82
|
+
* Load docs config (sidebar structure).
|
|
83
|
+
* When sidebarSource is 'filesystem', generates from directory tree.
|
|
78
84
|
*/
|
|
79
85
|
export async function loadDocsConfig(
|
|
80
86
|
project: string,
|
|
81
87
|
version: string,
|
|
88
|
+
projectConfig?: ProjectConfig,
|
|
82
89
|
): Promise<DocsConfig> {
|
|
90
|
+
if (projectConfig?.sidebarSource === 'filesystem') {
|
|
91
|
+
return loadFilesystemSidebar(project, version, projectConfig)
|
|
92
|
+
}
|
|
93
|
+
|
|
83
94
|
const config = await getLoader().loadDocsConfig(project, version)
|
|
84
95
|
if (config) return config
|
|
85
96
|
|
|
@@ -87,6 +98,27 @@ export async function loadDocsConfig(
|
|
|
87
98
|
return autoScanDocs(getProjectRoot(), project, version)
|
|
88
99
|
}
|
|
89
100
|
|
|
101
|
+
/** Generate sidebar from filesystem using numeric prefix ordering. */
|
|
102
|
+
function loadFilesystemSidebar(
|
|
103
|
+
project: string,
|
|
104
|
+
version: string,
|
|
105
|
+
projectConfig: ProjectConfig,
|
|
106
|
+
): DocsConfig {
|
|
107
|
+
const root = getProjectRoot()
|
|
108
|
+
const candidates = [
|
|
109
|
+
resolve(root, 'content', project, version, 'en'),
|
|
110
|
+
resolve(root, 'content', project, 'en'),
|
|
111
|
+
resolve(root, 'content', 'en'),
|
|
112
|
+
]
|
|
113
|
+
for (const dir of candidates) {
|
|
114
|
+
if (existsSync(dir) && statSync(dir).isDirectory()) {
|
|
115
|
+
const config = generateSidebarFromFilesystem(dir, projectConfig.urlMapper)
|
|
116
|
+
if (config.sections.length > 0) return config
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
return { sections: [] }
|
|
120
|
+
}
|
|
121
|
+
|
|
90
122
|
/**
|
|
91
123
|
* Auto-generate sidebar config by scanning .md files in the content directory.
|
|
92
124
|
*/
|
|
@@ -126,9 +158,9 @@ function scanDirectory(dir: string, base: string): DocsConfigItem[] {
|
|
|
126
158
|
try {
|
|
127
159
|
const entries = readdirSync(dir, { withFileTypes: true })
|
|
128
160
|
for (const entry of entries) {
|
|
129
|
-
if (entry.isFile() && entry.name.endsWith('.md')) {
|
|
161
|
+
if (entry.isFile() && (entry.name.endsWith('.md') || entry.name.endsWith('.mdx'))) {
|
|
130
162
|
const relativePath = relative(base, join(dir, entry.name))
|
|
131
|
-
const slug = relativePath.replace(/\.
|
|
163
|
+
const slug = relativePath.replace(/\.mdx?$/, '')
|
|
132
164
|
// Read frontmatter for title
|
|
133
165
|
const filePath = join(dir, entry.name)
|
|
134
166
|
const raw = readFileSync(filePath, 'utf-8')
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Rehype plugin that converts passed-through MDX JSX AST nodes into
|
|
3
|
+
* raw HTML strings so that rehype-raw can parse them into standard
|
|
4
|
+
* hast element nodes.
|
|
5
|
+
*
|
|
6
|
+
* This is needed because remark-mdx produces mdxJsxFlowElement /
|
|
7
|
+
* mdxJsxTextElement nodes which remark-rehype can pass through (via
|
|
8
|
+
* the `passThrough` option) but rehype-raw does not understand. By
|
|
9
|
+
* serialising them to raw HTML first, the existing pipeline
|
|
10
|
+
* (rehype-raw -> rehype-stringify) handles them naturally as HTML
|
|
11
|
+
* elements, which html-react-parser can then match to custom
|
|
12
|
+
* components.
|
|
13
|
+
*
|
|
14
|
+
* MDX expression nodes (mdxFlowExpression, mdxTextExpression) and
|
|
15
|
+
* ESM nodes (mdxjsEsm) are stripped silently — they have no
|
|
16
|
+
* meaningful HTML representation in a server-rendered pipeline.
|
|
17
|
+
*/
|
|
18
|
+
|
|
19
|
+
import { visit, SKIP } from 'unist-util-visit'
|
|
20
|
+
import type { Root } from 'hast'
|
|
21
|
+
|
|
22
|
+
interface MdxJsxAttribute {
|
|
23
|
+
type: 'mdxJsxAttribute'
|
|
24
|
+
name: string
|
|
25
|
+
value: string | { type: string; value: string } | null | undefined
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
interface MdxJsxNode {
|
|
29
|
+
type: string
|
|
30
|
+
name: string | null
|
|
31
|
+
attributes?: MdxJsxAttribute[]
|
|
32
|
+
children?: any[]
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
function serializeAttributes(attrs?: MdxJsxAttribute[]): string {
|
|
36
|
+
if (!attrs || attrs.length === 0) return ''
|
|
37
|
+
return attrs
|
|
38
|
+
.map((attr) => {
|
|
39
|
+
if (attr.type !== 'mdxJsxAttribute') return ''
|
|
40
|
+
const name = attr.name
|
|
41
|
+
if (attr.value == null) return ` ${name}`
|
|
42
|
+
const val =
|
|
43
|
+
typeof attr.value === 'string'
|
|
44
|
+
? attr.value
|
|
45
|
+
: typeof attr.value === 'object' && attr.value.value
|
|
46
|
+
? attr.value.value
|
|
47
|
+
: ''
|
|
48
|
+
return ` ${name}="${val.replace(/"/g, '"')}"`
|
|
49
|
+
})
|
|
50
|
+
.join('')
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
function serializeChildren(children: any[]): string {
|
|
54
|
+
return children
|
|
55
|
+
.map((child: any) => {
|
|
56
|
+
if (child.type === 'text') return child.value ?? ''
|
|
57
|
+
if (child.type === 'raw') return child.value ?? ''
|
|
58
|
+
if (
|
|
59
|
+
child.type === 'mdxJsxFlowElement' ||
|
|
60
|
+
child.type === 'mdxJsxTextElement'
|
|
61
|
+
) {
|
|
62
|
+
return serializeMdxJsx(child)
|
|
63
|
+
}
|
|
64
|
+
// For standard hast element nodes that ended up as children
|
|
65
|
+
if (child.type === 'element' && child.tagName) {
|
|
66
|
+
const attrs = serializeHastProps(child.properties)
|
|
67
|
+
const inner = child.children ? serializeChildren(child.children) : ''
|
|
68
|
+
return `<${child.tagName}${attrs}>${inner}</${child.tagName}>`
|
|
69
|
+
}
|
|
70
|
+
return ''
|
|
71
|
+
})
|
|
72
|
+
.join('')
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
function serializeHastProps(props?: Record<string, any>): string {
|
|
76
|
+
if (!props) return ''
|
|
77
|
+
return Object.entries(props)
|
|
78
|
+
.map(([key, val]) => {
|
|
79
|
+
if (val === true) return ` ${key}`
|
|
80
|
+
if (val === false || val == null) return ''
|
|
81
|
+
return ` ${key}="${String(val).replace(/"/g, '"')}"`
|
|
82
|
+
})
|
|
83
|
+
.join('')
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
function serializeMdxJsx(node: MdxJsxNode): string {
|
|
87
|
+
const tag = node.name
|
|
88
|
+
if (!tag) {
|
|
89
|
+
// Fragment — just render children
|
|
90
|
+
return node.children ? serializeChildren(node.children) : ''
|
|
91
|
+
}
|
|
92
|
+
const attrs = serializeAttributes(node.attributes as MdxJsxAttribute[])
|
|
93
|
+
const inner = node.children ? serializeChildren(node.children) : ''
|
|
94
|
+
if (!inner && (!node.children || node.children.length === 0)) {
|
|
95
|
+
return `<${tag}${attrs}></${tag}>`
|
|
96
|
+
}
|
|
97
|
+
return `<${tag}${attrs}>${inner}</${tag}>`
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
export function rehypeMdxJsxToRaw() {
|
|
101
|
+
return (tree: Root) => {
|
|
102
|
+
visit(tree, (node: any, index, parent: any) => {
|
|
103
|
+
if (
|
|
104
|
+
node.type === 'mdxJsxFlowElement' ||
|
|
105
|
+
node.type === 'mdxJsxTextElement'
|
|
106
|
+
) {
|
|
107
|
+
const html = serializeMdxJsx(node as MdxJsxNode)
|
|
108
|
+
if (parent && typeof index === 'number') {
|
|
109
|
+
parent.children[index] = { type: 'raw', value: html }
|
|
110
|
+
}
|
|
111
|
+
return SKIP
|
|
112
|
+
}
|
|
113
|
+
// Strip expression / ESM nodes
|
|
114
|
+
if (
|
|
115
|
+
node.type === 'mdxFlowExpression' ||
|
|
116
|
+
node.type === 'mdxTextExpression' ||
|
|
117
|
+
node.type === 'mdxjsEsm'
|
|
118
|
+
) {
|
|
119
|
+
if (parent && typeof index === 'number') {
|
|
120
|
+
parent.children.splice(index, 1)
|
|
121
|
+
return [SKIP, index] as any
|
|
122
|
+
}
|
|
123
|
+
return SKIP
|
|
124
|
+
}
|
|
125
|
+
})
|
|
126
|
+
}
|
|
127
|
+
}
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { unified } from 'unified'
|
|
2
2
|
import remarkParse from 'remark-parse'
|
|
3
|
+
import remarkMdx from 'remark-mdx'
|
|
3
4
|
import remarkGfm from 'remark-gfm'
|
|
4
5
|
import remarkRehype from 'remark-rehype'
|
|
5
6
|
import rehypeCallouts from 'rehype-callouts'
|
|
@@ -10,6 +11,7 @@ import rehypeStringify from 'rehype-stringify'
|
|
|
10
11
|
|
|
11
12
|
import {
|
|
12
13
|
rehypeCollectHeadings,
|
|
14
|
+
rehypeMdxJsxToRaw,
|
|
13
15
|
rehypeParseCommentComponents,
|
|
14
16
|
rehypeTransformCommentComponents,
|
|
15
17
|
rehypeTransformFrameworkComponents,
|
|
@@ -29,9 +31,20 @@ export function renderMarkdown(content: string): MarkdownRenderResult {
|
|
|
29
31
|
|
|
30
32
|
const processor = unified()
|
|
31
33
|
.use(remarkParse)
|
|
34
|
+
.use(remarkMdx)
|
|
32
35
|
.use(remarkGfm)
|
|
33
|
-
.use(remarkRehype, {
|
|
36
|
+
.use(remarkRehype, {
|
|
37
|
+
allowDangerousHtml: true,
|
|
38
|
+
passThrough: [
|
|
39
|
+
'mdxjsEsm',
|
|
40
|
+
'mdxFlowExpression',
|
|
41
|
+
'mdxJsxFlowElement',
|
|
42
|
+
'mdxJsxTextElement',
|
|
43
|
+
'mdxTextExpression',
|
|
44
|
+
],
|
|
45
|
+
})
|
|
34
46
|
.use(extractCodeMeta)
|
|
47
|
+
.use(rehypeMdxJsxToRaw)
|
|
35
48
|
.use(rehypeRaw)
|
|
36
49
|
.use(rehypeParseCommentComponents)
|
|
37
50
|
.use(rehypeCallouts, {
|
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Filesystem-based sidebar generation.
|
|
3
|
+
*
|
|
4
|
+
* Walks a content directory tree, sorts entries by numeric prefix,
|
|
5
|
+
* and produces a DocsConfig with sections (directories) and items (files).
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { readdirSync, readFileSync, existsSync, statSync } from 'node:fs'
|
|
9
|
+
import { join } from 'node:path'
|
|
10
|
+
import matter from 'gray-matter'
|
|
11
|
+
import type {
|
|
12
|
+
DocsConfig,
|
|
13
|
+
DocsConfigSection,
|
|
14
|
+
DocsConfigItem,
|
|
15
|
+
} from '~/types'
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Convert a kebab-case name to Title Case.
|
|
19
|
+
* 'getting-started' → 'Getting Started'
|
|
20
|
+
*/
|
|
21
|
+
function toTitleCase(name: string): string {
|
|
22
|
+
return name
|
|
23
|
+
.split('-')
|
|
24
|
+
.map((word) => word.charAt(0).toUpperCase() + word.slice(1))
|
|
25
|
+
.join(' ')
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Strip a leading numeric prefix from a single name segment.
|
|
30
|
+
* '01-getting-started' → 'getting-started'
|
|
31
|
+
*/
|
|
32
|
+
function stripPrefix(name: string): string {
|
|
33
|
+
return name.replace(/^\d+-/, '')
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Extract the numeric prefix for sorting. Returns Infinity if none.
|
|
38
|
+
*/
|
|
39
|
+
function sortOrder(name: string): number {
|
|
40
|
+
const match = name.match(/^(\d+)-/)
|
|
41
|
+
return match ? parseInt(match[1], 10) : Infinity
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* Try to read the frontmatter title from a markdown file.
|
|
46
|
+
*/
|
|
47
|
+
function readTitle(filePath: string): string | null {
|
|
48
|
+
try {
|
|
49
|
+
const raw = readFileSync(filePath, 'utf-8')
|
|
50
|
+
const { data } = matter(raw)
|
|
51
|
+
if (typeof data.title === 'string' && data.title) return data.title
|
|
52
|
+
} catch {
|
|
53
|
+
// ignore
|
|
54
|
+
}
|
|
55
|
+
return null
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
/**
|
|
59
|
+
* Generate a DocsConfig by walking a content directory.
|
|
60
|
+
*
|
|
61
|
+
* @param contentDir Absolute path to the content directory to scan
|
|
62
|
+
* (e.g. `/path/to/content/latest/docs/en`)
|
|
63
|
+
* @param urlMapper Optional function to transform relative file paths to URL slugs
|
|
64
|
+
*/
|
|
65
|
+
export function generateSidebarFromFilesystem(
|
|
66
|
+
contentDir: string,
|
|
67
|
+
urlMapper?: (path: string) => string,
|
|
68
|
+
): DocsConfig {
|
|
69
|
+
if (!existsSync(contentDir) || !statSync(contentDir).isDirectory()) {
|
|
70
|
+
return { sections: [] }
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
const sections = buildSections(contentDir, contentDir, urlMapper)
|
|
74
|
+
return { sections }
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
/**
|
|
78
|
+
* Recursively build sidebar sections from a directory.
|
|
79
|
+
* Directories become sections, .md/.mdx files become items.
|
|
80
|
+
*/
|
|
81
|
+
function buildSections(
|
|
82
|
+
dir: string,
|
|
83
|
+
baseDir: string,
|
|
84
|
+
urlMapper?: (path: string) => string,
|
|
85
|
+
): DocsConfigSection[] {
|
|
86
|
+
const sections: DocsConfigSection[] = []
|
|
87
|
+
|
|
88
|
+
let entries: ReturnType<typeof readdirSync>
|
|
89
|
+
try {
|
|
90
|
+
entries = readdirSync(dir, { withFileTypes: true })
|
|
91
|
+
} catch {
|
|
92
|
+
return sections
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
// Sort by numeric prefix
|
|
96
|
+
const sorted = [...entries].sort(
|
|
97
|
+
(a, b) => sortOrder(a.name) - sortOrder(b.name),
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
// Collect top-level files (items without a section) — only at root level
|
|
101
|
+
const topLevelItems: DocsConfigItem[] = []
|
|
102
|
+
|
|
103
|
+
for (const entry of sorted) {
|
|
104
|
+
if (entry.name.startsWith('.')) continue
|
|
105
|
+
|
|
106
|
+
if (entry.isDirectory()) {
|
|
107
|
+
const subDir = join(dir, entry.name)
|
|
108
|
+
const cleanName = stripPrefix(entry.name)
|
|
109
|
+
const label = toTitleCase(cleanName)
|
|
110
|
+
|
|
111
|
+
// Collect all file items from this directory (recursively flattened into children)
|
|
112
|
+
const children = collectItems(subDir, baseDir, urlMapper)
|
|
113
|
+
|
|
114
|
+
if (children.length > 0) {
|
|
115
|
+
sections.push({ label, children })
|
|
116
|
+
}
|
|
117
|
+
} else if (
|
|
118
|
+
entry.isFile() &&
|
|
119
|
+
(entry.name.endsWith('.md') || entry.name.endsWith('.mdx'))
|
|
120
|
+
) {
|
|
121
|
+
const filePath = join(dir, entry.name)
|
|
122
|
+
const relativePath = filePath
|
|
123
|
+
.slice(baseDir.length + 1) // remove baseDir prefix + leading slash
|
|
124
|
+
const cleanName = stripPrefix(entry.name).replace(/\.mdx?$/, '')
|
|
125
|
+
const slug = urlMapper ? urlMapper(relativePath) : relativePath.replace(/\.mdx?$/, '')
|
|
126
|
+
const title = readTitle(filePath)
|
|
127
|
+
const label = title || toTitleCase(cleanName)
|
|
128
|
+
|
|
129
|
+
topLevelItems.push({ label, to: slug })
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
// If there are top-level files, add them as a section
|
|
134
|
+
if (topLevelItems.length > 0) {
|
|
135
|
+
sections.unshift({
|
|
136
|
+
label: 'Overview',
|
|
137
|
+
children: topLevelItems,
|
|
138
|
+
})
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
return sections
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
/**
|
|
145
|
+
* Collect all file items from a directory, recursing into subdirectories.
|
|
146
|
+
* Subdirectory files are flattened into a single list of items.
|
|
147
|
+
*/
|
|
148
|
+
function collectItems(
|
|
149
|
+
dir: string,
|
|
150
|
+
baseDir: string,
|
|
151
|
+
urlMapper?: (path: string) => string,
|
|
152
|
+
): DocsConfigItem[] {
|
|
153
|
+
const items: DocsConfigItem[] = []
|
|
154
|
+
|
|
155
|
+
let entries: ReturnType<typeof readdirSync>
|
|
156
|
+
try {
|
|
157
|
+
entries = readdirSync(dir, { withFileTypes: true })
|
|
158
|
+
} catch {
|
|
159
|
+
return items
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
const sorted = [...entries].sort(
|
|
163
|
+
(a, b) => sortOrder(a.name) - sortOrder(b.name),
|
|
164
|
+
)
|
|
165
|
+
|
|
166
|
+
for (const entry of sorted) {
|
|
167
|
+
if (entry.name.startsWith('.')) continue
|
|
168
|
+
|
|
169
|
+
if (entry.isFile() && (entry.name.endsWith('.md') || entry.name.endsWith('.mdx'))) {
|
|
170
|
+
const filePath = join(dir, entry.name)
|
|
171
|
+
const relativePath = filePath.slice(baseDir.length + 1)
|
|
172
|
+
const cleanName = stripPrefix(entry.name).replace(/\.mdx?$/, '')
|
|
173
|
+
const slug = urlMapper ? urlMapper(relativePath) : relativePath.replace(/\.mdx?$/, '')
|
|
174
|
+
const title = readTitle(filePath)
|
|
175
|
+
const label = title || toTitleCase(cleanName)
|
|
176
|
+
|
|
177
|
+
items.push({ label, to: slug })
|
|
178
|
+
} else if (entry.isDirectory() && !entry.name.startsWith('.')) {
|
|
179
|
+
// Recurse into subdirectories
|
|
180
|
+
items.push(...collectItems(join(dir, entry.name), baseDir, urlMapper))
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
return items
|
|
185
|
+
}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Built-in URL mapping helpers for transforming file paths to URL slugs.
|
|
3
|
+
*
|
|
4
|
+
* Projects with numeric-prefixed directories (e.g. nextjs-i18n-docs) can use
|
|
5
|
+
* `stripNumericPrefixes` as their `urlMapper` in ProjectConfig.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Strip numeric prefixes, file extensions, and trailing /index from a file path.
|
|
10
|
+
*
|
|
11
|
+
* Example:
|
|
12
|
+
* '01-app/01-getting-started/01-installation.mdx'
|
|
13
|
+
* → 'app/getting-started/installation'
|
|
14
|
+
*/
|
|
15
|
+
export function stripNumericPrefixes(filePath: string): string {
|
|
16
|
+
return filePath
|
|
17
|
+
.replace(/\.mdx?$/, '') // strip .md or .mdx extension
|
|
18
|
+
.split('/')
|
|
19
|
+
.map((segment) => segment.replace(/^\d+-/, '')) // strip leading numeric prefix per segment
|
|
20
|
+
.join('/')
|
|
21
|
+
.replace(/\/index$/, '') // strip trailing /index
|
|
22
|
+
}
|
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
---
|
|
2
|
+
title: Admin Dashboard
|
|
3
|
+
description: Web UI for monitoring translation progress, managing translation jobs, previewing files, and browsing LLM models.
|
|
4
|
+
---
|
|
5
|
+
|
|
6
|
+
# Admin Dashboard
|
|
7
|
+
|
|
8
|
+
The docs-i18n admin dashboard is a web-based UI for managing your translations. It is built with TanStack Start and React, and runs as a local development server.
|
|
9
|
+
|
|
10
|
+
## Starting the Dashboard
|
|
11
|
+
|
|
12
|
+
```bash
|
|
13
|
+
npx docs-i18n admin
|
|
14
|
+
```
|
|
15
|
+
|
|
16
|
+
The dashboard opens at `http://localhost:3456`. Use `--port` to change the port:
|
|
17
|
+
|
|
18
|
+
```bash
|
|
19
|
+
npx docs-i18n admin --port 4000
|
|
20
|
+
```
|
|
21
|
+
|
|
22
|
+
The dashboard reads your `docs-i18n.config.ts` to discover projects, versions, and languages. The admin is pre-built and requires no additional dependencies.
|
|
23
|
+
|
|
24
|
+
## Features
|
|
25
|
+
|
|
26
|
+
### Translation Overview
|
|
27
|
+
|
|
28
|
+
The main dashboard page shows a grid of all versions and languages with translation progress. For each version/language pair, you see:
|
|
29
|
+
|
|
30
|
+
- Total number of source nodes (EN content units).
|
|
31
|
+
- Number of translated nodes.
|
|
32
|
+
- Percentage complete.
|
|
33
|
+
- Breakdown by section (e.g., `docs/`, `blog/`, `learn/`), showing file counts and node counts per section.
|
|
34
|
+
|
|
35
|
+
English is always shown as 100% complete since it is the source language.
|
|
36
|
+
|
|
37
|
+
The overview auto-scans source files on load. If source files have not been scanned yet, the dashboard triggers a scan automatically.
|
|
38
|
+
|
|
39
|
+
### File Browser
|
|
40
|
+
|
|
41
|
+
Clicking on a version/language cell opens a file-level coverage list. Each file shows:
|
|
42
|
+
|
|
43
|
+
- File path (relative to the source directory).
|
|
44
|
+
- Total translatable nodes in that file.
|
|
45
|
+
- Number of translated nodes.
|
|
46
|
+
|
|
47
|
+
Files are sorted by path. You can click on any file to open the block-level preview.
|
|
48
|
+
|
|
49
|
+
### File Preview
|
|
50
|
+
|
|
51
|
+
The file preview shows every block (AST node) in a file side by side:
|
|
52
|
+
|
|
53
|
+
- **Source** -- the original English text.
|
|
54
|
+
- **Translation** -- the cached translation for the selected language, or empty if not yet translated.
|
|
55
|
+
|
|
56
|
+
Each block displays its type (heading, paragraph, list, blockquote, frontmatter, code, html) and MD5 key. Non-translatable blocks (code, pure HTML tags, gaps between nodes) are shown but clearly distinguished.
|
|
57
|
+
|
|
58
|
+
### Cache Management
|
|
59
|
+
|
|
60
|
+
From the file preview, you can delete individual cache entries. This is useful when a translation is incorrect and you want to re-translate a specific node. After deleting, run the translate command again to get a fresh translation for that key.
|
|
61
|
+
|
|
62
|
+
The dashboard also supports rescanning source files for a specific version via the UI. This rebuilds the source index and cleans orphaned entries.
|
|
63
|
+
|
|
64
|
+
### Translation Jobs
|
|
65
|
+
|
|
66
|
+
The dashboard includes a job management system for running translations directly from the UI instead of the command line.
|
|
67
|
+
|
|
68
|
+
#### Creating a Job
|
|
69
|
+
|
|
70
|
+
Click the job creation button and configure:
|
|
71
|
+
|
|
72
|
+
- **Language** -- target language code.
|
|
73
|
+
- **Version** -- which version to translate.
|
|
74
|
+
- **Project** -- optionally filter to a specific project.
|
|
75
|
+
- **Model** -- LLM model to use (can be selected from the model browser).
|
|
76
|
+
- **Model rotation** -- optionally provide multiple models to rotate through.
|
|
77
|
+
- **Max chunks** -- limit the number of API call chunks.
|
|
78
|
+
- **Concurrency** -- number of parallel API calls (default: 3).
|
|
79
|
+
- **Files** -- optionally select specific files to translate.
|
|
80
|
+
|
|
81
|
+
#### Job Status
|
|
82
|
+
|
|
83
|
+
Running jobs show:
|
|
84
|
+
|
|
85
|
+
- Status: `running`, `completed`, `failed`, or `cancelled`.
|
|
86
|
+
- Start time and finish time.
|
|
87
|
+
- Number of translated chunks and total chunks.
|
|
88
|
+
- Current chunk being processed.
|
|
89
|
+
- Live log output (last 20 lines displayed, up to 500 lines stored).
|
|
90
|
+
|
|
91
|
+
You can cancel a running job, which sends SIGTERM to the translation process. Completed or failed jobs can be removed from the list.
|
|
92
|
+
|
|
93
|
+
#### How Jobs Work
|
|
94
|
+
|
|
95
|
+
Under the hood, the job manager spawns a child process running the `docs-i18n translate` CLI command with the configured options. It captures stdout and stderr, parses progress information from the output, and exposes it through the dashboard API. The child process inherits the API key from your config or environment variables.
|
|
96
|
+
|
|
97
|
+
### Model Browser
|
|
98
|
+
|
|
99
|
+
The dashboard includes an OpenRouter model browser that fetches the list of available models from the OpenRouter API. For each model, it displays:
|
|
100
|
+
|
|
101
|
+
- Model ID and name.
|
|
102
|
+
- Pricing (prompt and completion per million tokens).
|
|
103
|
+
- Context length and maximum output tokens.
|
|
104
|
+
- Whether the model supports JSON response format and tool use.
|
|
105
|
+
- Provider name.
|
|
106
|
+
- Whether the model is free.
|
|
107
|
+
|
|
108
|
+
The model list is cached for 5 minutes. It only shows text-to-text models (filtered by architecture modality) and excludes models with negative pricing. Models are sorted by prompt price (cheapest first).
|
|
109
|
+
|
|
110
|
+
This is useful for selecting a model when creating a translation job.
|
|
111
|
+
|
|
112
|
+
### Open in Editor
|
|
113
|
+
|
|
114
|
+
The dashboard can open source files in your local editor. It tries the following editors in order:
|
|
115
|
+
|
|
116
|
+
1. The value of the `EDITOR_CMD` environment variable (if set).
|
|
117
|
+
2. `code` (VS Code)
|
|
118
|
+
3. `cursor` (Cursor)
|
|
119
|
+
4. `zed` (Zed)
|
|
120
|
+
|
|
121
|
+
If none are found, it falls back to the system default (`open` on macOS, `xdg-open` on Linux, `start` on Windows).
|
|
122
|
+
|
|
123
|
+
## Architecture
|
|
124
|
+
|
|
125
|
+
The admin dashboard uses:
|
|
126
|
+
|
|
127
|
+
- **TanStack Start** -- Full-stack React framework with server functions.
|
|
128
|
+
- **TanStack React Query** -- For data fetching and cache management.
|
|
129
|
+
- **TanStack Router** -- For client-side routing.
|
|
130
|
+
- **Vite** -- Dev server and build tool.
|
|
131
|
+
- **Hono** -- HTTP server (used by TanStack Start internally).
|
|
132
|
+
|
|
133
|
+
Server functions are defined in `packages/admin/server/functions/` and handle:
|
|
134
|
+
|
|
135
|
+
- `fetchStatus` / `fetchFileCoverage` / `fetchFileBlocks` -- Read translation status from SQLite.
|
|
136
|
+
- `deleteCacheEntry` -- Delete a specific translation from the cache.
|
|
137
|
+
- `rescanVersion` -- Rescan source files for a version.
|
|
138
|
+
- `createJob` / `fetchJobs` / `fetchJob` / `deleteJob` -- Manage translation jobs.
|
|
139
|
+
- `fetchModels` -- Fetch available models from OpenRouter.
|
|
140
|
+
- `fetchVersion` / `fetchConfig` -- Get docs-i18n version and project root.
|
|
141
|
+
- `openFile` -- Open a file in the local editor.
|
|
142
|
+
|
|
143
|
+
The dashboard shares the same `TranslationCache` and `parseMdx` functions as the CLI, ensuring consistent behavior.
|
|
@@ -0,0 +1,324 @@
|
|
|
1
|
+
---
|
|
2
|
+
title: CLI Reference
|
|
3
|
+
description: Complete reference for all docs-i18n CLI commands, flags, and common workflows.
|
|
4
|
+
---
|
|
5
|
+
|
|
6
|
+
# CLI Reference
|
|
7
|
+
|
|
8
|
+
docs-i18n provides a command-line interface for managing documentation translations. All commands read from `docs-i18n.config.ts` in the current working directory by default.
|
|
9
|
+
|
|
10
|
+
```bash
|
|
11
|
+
docs-i18n <command> [options]
|
|
12
|
+
```
|
|
13
|
+
|
|
14
|
+
## Global Options
|
|
15
|
+
|
|
16
|
+
| Flag | Description |
|
|
17
|
+
| --- | --- |
|
|
18
|
+
| `--config <path>` | Path to config file (default: `docs-i18n.config.ts`) |
|
|
19
|
+
| `--version`, `-v` | Print the docs-i18n version |
|
|
20
|
+
| `--help`, `-h` | Show help text |
|
|
21
|
+
|
|
22
|
+
## Commands
|
|
23
|
+
|
|
24
|
+
### `translate`
|
|
25
|
+
|
|
26
|
+
Translate content to a target language. Sends untranslated nodes to the configured LLM and caches the results.
|
|
27
|
+
|
|
28
|
+
```bash
|
|
29
|
+
docs-i18n translate --lang <code> [options]
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
**Required:**
|
|
33
|
+
|
|
34
|
+
| Flag | Description |
|
|
35
|
+
| --- | --- |
|
|
36
|
+
| `--lang <code>` | Target language code (e.g., `zh-hans`, `ja`, `es`) |
|
|
37
|
+
|
|
38
|
+
**Optional:**
|
|
39
|
+
|
|
40
|
+
| Flag | Default | Description |
|
|
41
|
+
| --- | --- | --- |
|
|
42
|
+
| `--project <id>` | all projects | Filter to a specific project |
|
|
43
|
+
| `--version <ver>` | all versions | Filter to a specific version |
|
|
44
|
+
| `--files <paths>` | all files | Comma-separated list of relative file paths to translate |
|
|
45
|
+
| `--max <n>` | 999 | Maximum number of API call chunks to process |
|
|
46
|
+
| `--concurrency <n>` | 3 | Number of parallel API calls |
|
|
47
|
+
| `--model <model>` | config value | Override the LLM model |
|
|
48
|
+
| `--api-key <key>` | config/env | Override the API key |
|
|
49
|
+
| `--max-tokens <n>` | 16384 | Max output tokens per API call |
|
|
50
|
+
| `--context-length <n>` | 32768 | Model context window size |
|
|
51
|
+
| `--dry-run` | false | Preview what would be translated without making API calls |
|
|
52
|
+
|
|
53
|
+
**Examples:**
|
|
54
|
+
|
|
55
|
+
```bash
|
|
56
|
+
# Translate everything to Simplified Chinese
|
|
57
|
+
docs-i18n translate --lang zh-hans
|
|
58
|
+
|
|
59
|
+
# Translate a specific version with dry run
|
|
60
|
+
docs-i18n translate --lang zh-hans --version latest --dry-run
|
|
61
|
+
|
|
62
|
+
# Translate specific files only
|
|
63
|
+
docs-i18n translate --lang zh-hans --files docs/intro.mdx,docs/guide.mdx
|
|
64
|
+
|
|
65
|
+
# Use a different model with higher token limits
|
|
66
|
+
docs-i18n translate --lang ja --model qwen/qwen3.5-flash-02-23 --max-tokens 65536 --context-length 1000000
|
|
67
|
+
|
|
68
|
+
# Limit API calls for testing
|
|
69
|
+
docs-i18n translate --lang es --max 5 --concurrency 1
|
|
70
|
+
```
|
|
71
|
+
|
|
72
|
+
**How it works:**
|
|
73
|
+
|
|
74
|
+
1. Loads the SQLite cache and finds all untranslated keys for the given language and version.
|
|
75
|
+
2. Groups untranslated nodes into chunks that fit within the model's context window, accounting for input tokens, output tokens, and language-specific token multipliers (e.g., Japanese uses 2.5x more output tokens than English).
|
|
76
|
+
3. Sends each chunk to the LLM as structured JSON: an array of typed nodes with MD5 keys.
|
|
77
|
+
4. Parses the JSON response, validates keys, and stores translations in the cache.
|
|
78
|
+
5. Runs chunks in parallel up to the concurrency limit.
|
|
79
|
+
|
|
80
|
+
Translation logs are written to `.logs/` with timestamps for debugging.
|
|
81
|
+
|
|
82
|
+
---
|
|
83
|
+
|
|
84
|
+
### `assemble`
|
|
85
|
+
|
|
86
|
+
Assemble translated files by combining English source content with cached translations. For any untranslated nodes, the original English text is used as a fallback.
|
|
87
|
+
|
|
88
|
+
```bash
|
|
89
|
+
docs-i18n assemble [options]
|
|
90
|
+
```
|
|
91
|
+
|
|
92
|
+
**Optional:**
|
|
93
|
+
|
|
94
|
+
| Flag | Default | Description |
|
|
95
|
+
| --- | --- | --- |
|
|
96
|
+
| `--project <id>` | all projects | Filter to a specific project |
|
|
97
|
+
| `--version <ver>` | all versions | Filter to a specific version |
|
|
98
|
+
| `--lang <code>` | all languages | Filter to a specific language |
|
|
99
|
+
|
|
100
|
+
**Examples:**
|
|
101
|
+
|
|
102
|
+
```bash
|
|
103
|
+
# Assemble all projects, versions, and languages
|
|
104
|
+
docs-i18n assemble
|
|
105
|
+
|
|
106
|
+
# Assemble only Chinese for the latest version
|
|
107
|
+
docs-i18n assemble --lang zh-hans --version latest
|
|
108
|
+
```
|
|
109
|
+
|
|
110
|
+
Output is written to `.cache/content/<version>/<lang>/` by default. Each file mirrors the structure of the English source directory.
|
|
111
|
+
|
|
112
|
+
---
|
|
113
|
+
|
|
114
|
+
### `rescan`
|
|
115
|
+
|
|
116
|
+
Rescan source files and rebuild the source index. Also cleans orphaned translations and sources that no longer exist in the source files.
|
|
117
|
+
|
|
118
|
+
```bash
|
|
119
|
+
docs-i18n rescan [options]
|
|
120
|
+
```
|
|
121
|
+
|
|
122
|
+
**Optional:**
|
|
123
|
+
|
|
124
|
+
| Flag | Default | Description |
|
|
125
|
+
| --- | --- | --- |
|
|
126
|
+
| `--project <id>` | all projects | Filter to a specific project |
|
|
127
|
+
| `--version <ver>` | all versions | Filter to a specific version |
|
|
128
|
+
|
|
129
|
+
**Examples:**
|
|
130
|
+
|
|
131
|
+
```bash
|
|
132
|
+
# Rescan all source files
|
|
133
|
+
docs-i18n rescan
|
|
134
|
+
|
|
135
|
+
# Rescan a specific version only
|
|
136
|
+
docs-i18n rescan --version latest
|
|
137
|
+
```
|
|
138
|
+
|
|
139
|
+
**What it does:**
|
|
140
|
+
|
|
141
|
+
1. Walks all source directories and parses every markdown/MDX file.
|
|
142
|
+
2. Extracts translatable nodes and stores their MD5 keys, source text, node types, file paths, and line numbers in the SQLite cache.
|
|
143
|
+
3. Deletes orphaned translations whose source keys no longer exist in any source file.
|
|
144
|
+
4. Deletes orphaned source entries that are no longer referenced by any file.
|
|
145
|
+
|
|
146
|
+
Run `rescan` after adding, removing, or significantly editing source files.
|
|
147
|
+
|
|
148
|
+
---
|
|
149
|
+
|
|
150
|
+
### `status`
|
|
151
|
+
|
|
152
|
+
Show translation coverage for all projects and versions.
|
|
153
|
+
|
|
154
|
+
```bash
|
|
155
|
+
docs-i18n status [options]
|
|
156
|
+
```
|
|
157
|
+
|
|
158
|
+
**Optional:**
|
|
159
|
+
|
|
160
|
+
| Flag | Default | Description |
|
|
161
|
+
| --- | --- | --- |
|
|
162
|
+
| `--lang <code>` | all languages | Show status for a specific language only |
|
|
163
|
+
|
|
164
|
+
**Examples:**
|
|
165
|
+
|
|
166
|
+
```bash
|
|
167
|
+
# Show status for all languages
|
|
168
|
+
docs-i18n status
|
|
169
|
+
|
|
170
|
+
# Show status for Chinese only
|
|
171
|
+
docs-i18n status --lang zh-hans
|
|
172
|
+
```
|
|
173
|
+
|
|
174
|
+
**Output:**
|
|
175
|
+
|
|
176
|
+
```
|
|
177
|
+
Translation Status
|
|
178
|
+
|
|
179
|
+
latest (1523 keys):
|
|
180
|
+
zh-hans ████████████████████ 100% (1523/1523)
|
|
181
|
+
ja ████████████░░░░░░░░ 62% (944/1523)
|
|
182
|
+
es ██░░░░░░░░░░░░░░░░░░ 10% (152/1523)
|
|
183
|
+
```
|
|
184
|
+
|
|
185
|
+
If a version shows "no source files (run rescan first)", you need to run `docs-i18n rescan` before checking status.
|
|
186
|
+
|
|
187
|
+
---
|
|
188
|
+
|
|
189
|
+
### `admin`
|
|
190
|
+
|
|
191
|
+
Start the admin dashboard web UI for managing translations.
|
|
192
|
+
|
|
193
|
+
```bash
|
|
194
|
+
docs-i18n admin [options]
|
|
195
|
+
```
|
|
196
|
+
|
|
197
|
+
**Optional:**
|
|
198
|
+
|
|
199
|
+
| Flag | Default | Description |
|
|
200
|
+
| --- | --- | --- |
|
|
201
|
+
| `--port <n>` | 3456 | Port to run the dashboard on |
|
|
202
|
+
|
|
203
|
+
**Examples:**
|
|
204
|
+
|
|
205
|
+
```bash
|
|
206
|
+
# Start on default port
|
|
207
|
+
docs-i18n admin
|
|
208
|
+
|
|
209
|
+
# Start on custom port
|
|
210
|
+
docs-i18n admin --port 4000
|
|
211
|
+
```
|
|
212
|
+
|
|
213
|
+
The dashboard opens at `http://localhost:3456` (or your custom port). See the [Admin Dashboard](./admin.md) documentation for details on its features.
|
|
214
|
+
|
|
215
|
+
---
|
|
216
|
+
|
|
217
|
+
### `site`
|
|
218
|
+
|
|
219
|
+
Manage the documentation site. Built with TanStack Start and supports dev server, production build, content upload to D1, and deployment to Cloudflare Workers.
|
|
220
|
+
|
|
221
|
+
```bash
|
|
222
|
+
docs-i18n site [subcommand] [options]
|
|
223
|
+
```
|
|
224
|
+
|
|
225
|
+
**Subcommands:**
|
|
226
|
+
|
|
227
|
+
| Subcommand | Description |
|
|
228
|
+
| --- | --- |
|
|
229
|
+
| `dev` (default) | Start the Vite dev server with hot reload |
|
|
230
|
+
| `build` | Build the site for production |
|
|
231
|
+
| `upload` | Upload content and translations to Cloudflare D1 |
|
|
232
|
+
| `deploy` | Deploy to Cloudflare Workers |
|
|
233
|
+
|
|
234
|
+
**Optional:**
|
|
235
|
+
|
|
236
|
+
| Flag | Default | Description |
|
|
237
|
+
| --- | --- | --- |
|
|
238
|
+
| `--port <n>` | 3000 | Port for the dev server |
|
|
239
|
+
| `--db <name>` | `docs-i18n-db` | D1 database name (for `upload`) |
|
|
240
|
+
|
|
241
|
+
**Examples:**
|
|
242
|
+
|
|
243
|
+
```bash
|
|
244
|
+
# Start dev server
|
|
245
|
+
docs-i18n site
|
|
246
|
+
|
|
247
|
+
# Start on custom port
|
|
248
|
+
docs-i18n site dev --port 4000
|
|
249
|
+
|
|
250
|
+
# Build for production
|
|
251
|
+
docs-i18n site build
|
|
252
|
+
|
|
253
|
+
# Upload content to D1
|
|
254
|
+
docs-i18n site upload --db my-docs-db
|
|
255
|
+
|
|
256
|
+
# Deploy to Cloudflare Workers
|
|
257
|
+
docs-i18n site deploy
|
|
258
|
+
```
|
|
259
|
+
|
|
260
|
+
**How it works:**
|
|
261
|
+
|
|
262
|
+
The `site` command does not require a `docs-i18n.config.ts`. It reads the `site.config.ts` file from your project root to configure the documentation site (project names, colors, locales, etc.).
|
|
263
|
+
|
|
264
|
+
On first run, the template's dependencies are automatically installed. The dev server uses Vite with the consumer's `site.config.ts` aliased into the template for customization.
|
|
265
|
+
|
|
266
|
+
For non-English languages, the site automatically loads translations from the `.cache/translations.db` SQLite cache and assembles them at runtime.
|
|
267
|
+
|
|
268
|
+
---
|
|
269
|
+
|
|
270
|
+
## Common Workflows
|
|
271
|
+
|
|
272
|
+
### Full translation pipeline
|
|
273
|
+
|
|
274
|
+
```bash
|
|
275
|
+
# 1. Scan source files
|
|
276
|
+
docs-i18n rescan
|
|
277
|
+
|
|
278
|
+
# 2. Check what needs translating
|
|
279
|
+
docs-i18n status
|
|
280
|
+
|
|
281
|
+
# 3. Translate
|
|
282
|
+
docs-i18n translate --lang zh-hans
|
|
283
|
+
|
|
284
|
+
# 4. Assemble output
|
|
285
|
+
docs-i18n assemble --lang zh-hans
|
|
286
|
+
```
|
|
287
|
+
|
|
288
|
+
### Translate a single file for review
|
|
289
|
+
|
|
290
|
+
```bash
|
|
291
|
+
docs-i18n translate --lang zh-hans --files docs/getting-started.mdx --max 1
|
|
292
|
+
docs-i18n assemble --lang zh-hans
|
|
293
|
+
```
|
|
294
|
+
|
|
295
|
+
### Preview translation volume before spending API credits
|
|
296
|
+
|
|
297
|
+
```bash
|
|
298
|
+
docs-i18n translate --lang ja --dry-run
|
|
299
|
+
```
|
|
300
|
+
|
|
301
|
+
This reports the number of untranslated keys and chunks without making any API calls.
|
|
302
|
+
|
|
303
|
+
### Multi-project translation
|
|
304
|
+
|
|
305
|
+
```bash
|
|
306
|
+
# Translate only the "query" project
|
|
307
|
+
docs-i18n translate --lang zh-hans --project query
|
|
308
|
+
|
|
309
|
+
# Assemble only the "table" project
|
|
310
|
+
docs-i18n assemble --project table --lang zh-hans
|
|
311
|
+
```
|
|
312
|
+
|
|
313
|
+
### After editing source files
|
|
314
|
+
|
|
315
|
+
```bash
|
|
316
|
+
# Rescan to detect changes and clean orphans
|
|
317
|
+
docs-i18n rescan
|
|
318
|
+
|
|
319
|
+
# Translate only new/changed content (cached translations are reused)
|
|
320
|
+
docs-i18n translate --lang zh-hans
|
|
321
|
+
|
|
322
|
+
# Rebuild output files
|
|
323
|
+
docs-i18n assemble
|
|
324
|
+
```
|
package/template/package.json
CHANGED
|
@@ -12,8 +12,8 @@
|
|
|
12
12
|
"@shikijs/transformers": "^3.0.0",
|
|
13
13
|
"@tailwindcss/typography": "^0.5.16",
|
|
14
14
|
"@tailwindcss/vite": "^4.1.7",
|
|
15
|
-
"@tanstack/react-router": "^1.120.3",
|
|
16
15
|
"@tanstack/react-query": "^5.0.0",
|
|
16
|
+
"@tanstack/react-router": "^1.120.3",
|
|
17
17
|
"@tanstack/react-start": "^1.120.3",
|
|
18
18
|
"drizzle-orm": "^0.38.0",
|
|
19
19
|
"gray-matter": "^4.0.3",
|
|
@@ -32,6 +32,7 @@
|
|
|
32
32
|
"rehype-slug": "^6.0.0",
|
|
33
33
|
"rehype-stringify": "^10.0.1",
|
|
34
34
|
"remark-gfm": "^4.0.1",
|
|
35
|
+
"remark-mdx": "^3.1.1",
|
|
35
36
|
"remark-parse": "^11.0.0",
|
|
36
37
|
"remark-rehype": "^11.1.2",
|
|
37
38
|
"shiki": "^3.0.0",
|