fumadocs-mdx 11.5.6 → 11.5.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{chunk-SLCPEEMF.js → chunk-DRVUBK5B.js} +1 -4
- package/dist/{chunk-R6U7CJLB.js → chunk-HFLDWPJA.js} +1 -1
- package/dist/chunk-MK7EXW7O.js +75 -0
- package/dist/{chunk-IZURUUPO.js → chunk-VFALQK6O.js} +3 -9
- package/dist/config/index.cjs +53 -31
- package/dist/config/index.d.cts +1 -1
- package/dist/config/index.d.ts +1 -1
- package/dist/config/index.js +1 -1
- package/dist/{define-DxwgTgV6.d.cts → define-CGHfrlrJ.d.cts} +1 -8
- package/dist/{define-DxwgTgV6.d.ts → define-CGHfrlrJ.d.ts} +1 -8
- package/dist/index.cjs +3 -9
- package/dist/index.d.cts +4 -4
- package/dist/index.d.ts +4 -4
- package/dist/index.js +1 -1
- package/dist/loader-mdx.cjs +54 -35
- package/dist/loader-mdx.js +3 -3
- package/dist/next/index.cjs +5 -7
- package/dist/next/index.js +6 -5
- package/dist/runtime/async.cjs +57 -44
- package/dist/runtime/async.d.cts +2 -2
- package/dist/runtime/async.d.ts +2 -2
- package/dist/runtime/async.js +3 -3
- package/dist/{types-2R3kLFSi.d.ts → types-DCyuz-WB.d.cts} +1 -7
- package/dist/{types-BIA23Xld.d.cts → types-DfVJrYH1.d.ts} +1 -7
- package/package.json +7 -6
- package/dist/chunk-PY2KKTR2.js +0 -53
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
// src/mdx-plugins/remark-include.ts
|
|
2
|
+
import { visit } from "unist-util-visit";
|
|
3
|
+
import * as path from "node:path";
|
|
4
|
+
import * as fs from "node:fs/promises";
|
|
5
|
+
import matter from "gray-matter";
|
|
6
|
+
function flattenNode(node) {
|
|
7
|
+
if ("children" in node)
|
|
8
|
+
return node.children.map((child) => flattenNode(child)).join("");
|
|
9
|
+
if ("value" in node) return node.value;
|
|
10
|
+
return "";
|
|
11
|
+
}
|
|
12
|
+
function remarkInclude() {
|
|
13
|
+
const TagName = "include";
|
|
14
|
+
async function update(tree, file, processor, compiler) {
|
|
15
|
+
const queue = [];
|
|
16
|
+
visit(
|
|
17
|
+
tree,
|
|
18
|
+
["mdxJsxFlowElement", "mdxJsxTextElement"],
|
|
19
|
+
(node, _, parent) => {
|
|
20
|
+
let specifier;
|
|
21
|
+
const params = {};
|
|
22
|
+
if ((node.type === "mdxJsxFlowElement" || node.type === "mdxJsxTextElement") && node.name === TagName) {
|
|
23
|
+
const value = flattenNode(node);
|
|
24
|
+
if (value.length > 0) {
|
|
25
|
+
for (const attr of node.attributes) {
|
|
26
|
+
if (attr.type === "mdxJsxAttribute" && (typeof attr.value === "string" || attr.value === null)) {
|
|
27
|
+
params[attr.name] = attr.value;
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
specifier = value;
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
if (!specifier) return;
|
|
34
|
+
const targetPath = path.resolve(
|
|
35
|
+
"cwd" in params ? process.cwd() : path.dirname(file),
|
|
36
|
+
specifier
|
|
37
|
+
);
|
|
38
|
+
const asCode = params.lang || !specifier.endsWith(".md") && !specifier.endsWith(".mdx");
|
|
39
|
+
queue.push(
|
|
40
|
+
fs.readFile(targetPath).then(async (content) => {
|
|
41
|
+
compiler?.addDependency(targetPath);
|
|
42
|
+
if (asCode) {
|
|
43
|
+
const lang = params.lang ?? path.extname(specifier).slice(1);
|
|
44
|
+
Object.assign(node, {
|
|
45
|
+
type: "code",
|
|
46
|
+
lang,
|
|
47
|
+
meta: params.meta,
|
|
48
|
+
value: content.toString(),
|
|
49
|
+
data: {}
|
|
50
|
+
});
|
|
51
|
+
return;
|
|
52
|
+
}
|
|
53
|
+
const parsed = processor.parse(matter(content).content);
|
|
54
|
+
await update(parsed, targetPath, processor, compiler);
|
|
55
|
+
Object.assign(
|
|
56
|
+
parent && parent.type === "paragraph" ? parent : node,
|
|
57
|
+
parsed
|
|
58
|
+
);
|
|
59
|
+
}).catch((e) => {
|
|
60
|
+
console.warn(`failed to read file: ${targetPath}`, e);
|
|
61
|
+
})
|
|
62
|
+
);
|
|
63
|
+
return "skip";
|
|
64
|
+
}
|
|
65
|
+
);
|
|
66
|
+
await Promise.all(queue);
|
|
67
|
+
}
|
|
68
|
+
return async (tree, file) => {
|
|
69
|
+
await update(tree, file.path, this, file.data._compiler);
|
|
70
|
+
};
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
export {
|
|
74
|
+
remarkInclude
|
|
75
|
+
};
|
|
@@ -34,21 +34,15 @@ var _runtime = {
|
|
|
34
34
|
};
|
|
35
35
|
function createMDXSource(docs, meta = []) {
|
|
36
36
|
return {
|
|
37
|
-
files: (
|
|
37
|
+
files: () => resolveFiles({
|
|
38
38
|
docs,
|
|
39
|
-
meta
|
|
40
|
-
rootDir
|
|
39
|
+
meta
|
|
41
40
|
})
|
|
42
41
|
};
|
|
43
42
|
}
|
|
44
|
-
function resolveFiles({
|
|
45
|
-
docs,
|
|
46
|
-
meta,
|
|
47
|
-
rootDir = ""
|
|
48
|
-
}) {
|
|
43
|
+
function resolveFiles({ docs, meta }) {
|
|
49
44
|
const outputs = [];
|
|
50
45
|
for (const entry of docs) {
|
|
51
|
-
if (!entry._file.path.startsWith(rootDir)) continue;
|
|
52
46
|
outputs.push({
|
|
53
47
|
type: "page",
|
|
54
48
|
path: entry._file.path,
|
package/dist/config/index.cjs
CHANGED
|
@@ -209,44 +209,66 @@ var import_unist_util_visit = require("unist-util-visit");
|
|
|
209
209
|
var path = __toESM(require("path"), 1);
|
|
210
210
|
var fs = __toESM(require("fs/promises"), 1);
|
|
211
211
|
var import_gray_matter = __toESM(require("gray-matter"), 1);
|
|
212
|
+
function flattenNode(node) {
|
|
213
|
+
if ("children" in node)
|
|
214
|
+
return node.children.map((child) => flattenNode(child)).join("");
|
|
215
|
+
if ("value" in node) return node.value;
|
|
216
|
+
return "";
|
|
217
|
+
}
|
|
212
218
|
function remarkInclude() {
|
|
213
219
|
const TagName = "include";
|
|
214
220
|
async function update(tree, file, processor, compiler) {
|
|
215
221
|
const queue = [];
|
|
216
|
-
(0, import_unist_util_visit.visit)(
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
222
|
+
(0, import_unist_util_visit.visit)(
|
|
223
|
+
tree,
|
|
224
|
+
["mdxJsxFlowElement", "mdxJsxTextElement"],
|
|
225
|
+
(node, _, parent) => {
|
|
226
|
+
let specifier;
|
|
227
|
+
const params = {};
|
|
228
|
+
if ((node.type === "mdxJsxFlowElement" || node.type === "mdxJsxTextElement") && node.name === TagName) {
|
|
229
|
+
const value = flattenNode(node);
|
|
230
|
+
if (value.length > 0) {
|
|
231
|
+
for (const attr of node.attributes) {
|
|
232
|
+
if (attr.type === "mdxJsxAttribute" && (typeof attr.value === "string" || attr.value === null)) {
|
|
233
|
+
params[attr.name] = attr.value;
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
specifier = value;
|
|
229
237
|
}
|
|
230
238
|
}
|
|
239
|
+
if (!specifier) return;
|
|
240
|
+
const targetPath = path.resolve(
|
|
241
|
+
"cwd" in params ? process.cwd() : path.dirname(file),
|
|
242
|
+
specifier
|
|
243
|
+
);
|
|
244
|
+
const asCode = params.lang || !specifier.endsWith(".md") && !specifier.endsWith(".mdx");
|
|
245
|
+
queue.push(
|
|
246
|
+
fs.readFile(targetPath).then(async (content) => {
|
|
247
|
+
compiler?.addDependency(targetPath);
|
|
248
|
+
if (asCode) {
|
|
249
|
+
const lang = params.lang ?? path.extname(specifier).slice(1);
|
|
250
|
+
Object.assign(node, {
|
|
251
|
+
type: "code",
|
|
252
|
+
lang,
|
|
253
|
+
meta: params.meta,
|
|
254
|
+
value: content.toString(),
|
|
255
|
+
data: {}
|
|
256
|
+
});
|
|
257
|
+
return;
|
|
258
|
+
}
|
|
259
|
+
const parsed = processor.parse((0, import_gray_matter.default)(content).content);
|
|
260
|
+
await update(parsed, targetPath, processor, compiler);
|
|
261
|
+
Object.assign(
|
|
262
|
+
parent && parent.type === "paragraph" ? parent : node,
|
|
263
|
+
parsed
|
|
264
|
+
);
|
|
265
|
+
}).catch((e) => {
|
|
266
|
+
console.warn(`failed to read file: ${targetPath}`, e);
|
|
267
|
+
})
|
|
268
|
+
);
|
|
269
|
+
return "skip";
|
|
231
270
|
}
|
|
232
|
-
|
|
233
|
-
const child = node.children.at(0);
|
|
234
|
-
if (child && child.type === "text") {
|
|
235
|
-
specifier = child.value;
|
|
236
|
-
}
|
|
237
|
-
}
|
|
238
|
-
if (!specifier) return;
|
|
239
|
-
const targetPath = path.resolve(path.dirname(file), specifier);
|
|
240
|
-
queue.push(
|
|
241
|
-
fs.readFile(targetPath).then(async (content) => {
|
|
242
|
-
const parsed = processor.parse((0, import_gray_matter.default)(content).content);
|
|
243
|
-
compiler?.addDependency(targetPath);
|
|
244
|
-
await update(parsed, targetPath, processor, compiler);
|
|
245
|
-
Object.assign(node, parsed);
|
|
246
|
-
})
|
|
247
|
-
);
|
|
248
|
-
return "skip";
|
|
249
|
-
});
|
|
271
|
+
);
|
|
250
272
|
await Promise.all(queue);
|
|
251
273
|
}
|
|
252
274
|
return async (tree, file) => {
|
package/dist/config/index.d.cts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
export { a as BaseCollection, B as BaseCollectionEntry, C as CollectionSchema, h as DefaultMDXOptions, D as DocCollection, c as DocsCollection, F as FileInfo, G as GlobalConfig, M as MarkdownProps, b as MetaCollection, d as defineCollections, g as defineConfig, e as defineDocs, f as frontmatterSchema, i as getDefaultMDXOptions, m as metaSchema } from '../define-
|
|
1
|
+
export { a as BaseCollection, B as BaseCollectionEntry, C as CollectionSchema, h as DefaultMDXOptions, D as DocCollection, c as DocsCollection, F as FileInfo, G as GlobalConfig, M as MarkdownProps, b as MetaCollection, d as defineCollections, g as defineConfig, e as defineDocs, f as frontmatterSchema, i as getDefaultMDXOptions, m as metaSchema } from '../define-CGHfrlrJ.cjs';
|
|
2
2
|
import { Processor, Transformer } from 'unified';
|
|
3
3
|
import { Root } from 'mdast';
|
|
4
4
|
import '@mdx-js/mdx';
|
package/dist/config/index.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
export { a as BaseCollection, B as BaseCollectionEntry, C as CollectionSchema, h as DefaultMDXOptions, D as DocCollection, c as DocsCollection, F as FileInfo, G as GlobalConfig, M as MarkdownProps, b as MetaCollection, d as defineCollections, g as defineConfig, e as defineDocs, f as frontmatterSchema, i as getDefaultMDXOptions, m as metaSchema } from '../define-
|
|
1
|
+
export { a as BaseCollection, B as BaseCollectionEntry, C as CollectionSchema, h as DefaultMDXOptions, D as DocCollection, c as DocsCollection, F as FileInfo, G as GlobalConfig, M as MarkdownProps, b as MetaCollection, d as defineCollections, g as defineConfig, e as defineDocs, f as frontmatterSchema, i as getDefaultMDXOptions, m as metaSchema } from '../define-CGHfrlrJ.js';
|
|
2
2
|
import { Processor, Transformer } from 'unified';
|
|
3
3
|
import { Root } from 'mdast';
|
|
4
4
|
import '@mdx-js/mdx';
|
package/dist/config/index.js
CHANGED
|
@@ -34,13 +34,6 @@ interface GlobalConfig {
|
|
|
34
34
|
* @defaultValue 'none'
|
|
35
35
|
*/
|
|
36
36
|
lastModifiedTime?: 'git' | 'none';
|
|
37
|
-
/**
|
|
38
|
-
* Generate manifest file on build mode
|
|
39
|
-
*
|
|
40
|
-
* @defaultValue false
|
|
41
|
-
* @deprecated No longer needed, use a route handler to export build time info
|
|
42
|
-
*/
|
|
43
|
-
generateManifest?: boolean;
|
|
44
37
|
}
|
|
45
38
|
interface FileInfo {
|
|
46
39
|
path: string;
|
|
@@ -168,7 +161,7 @@ interface DocsCollection<DocSchema extends StandardSchemaV1 = StandardSchemaV1,
|
|
|
168
161
|
docs: DocCollection<DocSchema, Async>;
|
|
169
162
|
meta: MetaCollection<MetaSchema>;
|
|
170
163
|
}
|
|
171
|
-
declare function defineCollections<T extends 'doc' | 'meta', Schema extends StandardSchemaV1 = StandardSchemaV1, Async extends boolean = false>(options: {
|
|
164
|
+
declare function defineCollections<T extends 'doc' | 'meta', Schema extends StandardSchemaV1 = StandardSchemaV1<unknown, any>, Async extends boolean = false>(options: {
|
|
172
165
|
type: T;
|
|
173
166
|
} & (T extends 'doc' ? DocCollection<Schema, Async> : MetaCollection<Schema>)): {
|
|
174
167
|
type: T;
|
|
@@ -34,13 +34,6 @@ interface GlobalConfig {
|
|
|
34
34
|
* @defaultValue 'none'
|
|
35
35
|
*/
|
|
36
36
|
lastModifiedTime?: 'git' | 'none';
|
|
37
|
-
/**
|
|
38
|
-
* Generate manifest file on build mode
|
|
39
|
-
*
|
|
40
|
-
* @defaultValue false
|
|
41
|
-
* @deprecated No longer needed, use a route handler to export build time info
|
|
42
|
-
*/
|
|
43
|
-
generateManifest?: boolean;
|
|
44
37
|
}
|
|
45
38
|
interface FileInfo {
|
|
46
39
|
path: string;
|
|
@@ -168,7 +161,7 @@ interface DocsCollection<DocSchema extends StandardSchemaV1 = StandardSchemaV1,
|
|
|
168
161
|
docs: DocCollection<DocSchema, Async>;
|
|
169
162
|
meta: MetaCollection<MetaSchema>;
|
|
170
163
|
}
|
|
171
|
-
declare function defineCollections<T extends 'doc' | 'meta', Schema extends StandardSchemaV1 = StandardSchemaV1, Async extends boolean = false>(options: {
|
|
164
|
+
declare function defineCollections<T extends 'doc' | 'meta', Schema extends StandardSchemaV1 = StandardSchemaV1<unknown, any>, Async extends boolean = false>(options: {
|
|
172
165
|
type: T;
|
|
173
166
|
} & (T extends 'doc' ? DocCollection<Schema, Async> : MetaCollection<Schema>)): {
|
|
174
167
|
type: T;
|
package/dist/index.cjs
CHANGED
|
@@ -62,21 +62,15 @@ var _runtime = {
|
|
|
62
62
|
};
|
|
63
63
|
function createMDXSource(docs, meta = []) {
|
|
64
64
|
return {
|
|
65
|
-
files: (
|
|
65
|
+
files: () => resolveFiles({
|
|
66
66
|
docs,
|
|
67
|
-
meta
|
|
68
|
-
rootDir
|
|
67
|
+
meta
|
|
69
68
|
})
|
|
70
69
|
};
|
|
71
70
|
}
|
|
72
|
-
function resolveFiles({
|
|
73
|
-
docs,
|
|
74
|
-
meta,
|
|
75
|
-
rootDir = ""
|
|
76
|
-
}) {
|
|
71
|
+
function resolveFiles({ docs, meta }) {
|
|
77
72
|
const outputs = [];
|
|
78
73
|
for (const entry of docs) {
|
|
79
|
-
if (!entry._file.path.startsWith(rootDir)) continue;
|
|
80
74
|
outputs.push({
|
|
81
75
|
type: "page",
|
|
82
76
|
path: entry._file.path,
|
package/dist/index.d.cts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { PageData, MetaData, Source, VirtualFile } from 'fumadocs-core/source';
|
|
2
|
-
import { B as BaseCollectionEntry } from './define-
|
|
3
|
-
import { R as Runtime } from './types-
|
|
4
|
-
export { a as RuntimeFile } from './types-
|
|
2
|
+
import { B as BaseCollectionEntry } from './define-CGHfrlrJ.cjs';
|
|
3
|
+
import { R as Runtime } from './types-DCyuz-WB.cjs';
|
|
4
|
+
export { a as RuntimeFile } from './types-DCyuz-WB.cjs';
|
|
5
5
|
import '@mdx-js/mdx';
|
|
6
6
|
import 'mdx/types';
|
|
7
7
|
import 'fumadocs-core/mdx-plugins';
|
|
@@ -22,6 +22,6 @@ interface ResolveOptions {
|
|
|
22
22
|
meta: BaseCollectionEntry[];
|
|
23
23
|
rootDir?: string;
|
|
24
24
|
}
|
|
25
|
-
declare function resolveFiles({ docs, meta
|
|
25
|
+
declare function resolveFiles({ docs, meta }: ResolveOptions): VirtualFile[];
|
|
26
26
|
|
|
27
27
|
export { _runtime, createMDXSource, resolveFiles };
|
package/dist/index.d.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { PageData, MetaData, Source, VirtualFile } from 'fumadocs-core/source';
|
|
2
|
-
import { B as BaseCollectionEntry } from './define-
|
|
3
|
-
import { R as Runtime } from './types-
|
|
4
|
-
export { a as RuntimeFile } from './types-
|
|
2
|
+
import { B as BaseCollectionEntry } from './define-CGHfrlrJ.js';
|
|
3
|
+
import { R as Runtime } from './types-DfVJrYH1.js';
|
|
4
|
+
export { a as RuntimeFile } from './types-DfVJrYH1.js';
|
|
5
5
|
import '@mdx-js/mdx';
|
|
6
6
|
import 'mdx/types';
|
|
7
7
|
import 'fumadocs-core/mdx-plugins';
|
|
@@ -22,6 +22,6 @@ interface ResolveOptions {
|
|
|
22
22
|
meta: BaseCollectionEntry[];
|
|
23
23
|
rootDir?: string;
|
|
24
24
|
}
|
|
25
|
-
declare function resolveFiles({ docs, meta
|
|
25
|
+
declare function resolveFiles({ docs, meta }: ResolveOptions): VirtualFile[];
|
|
26
26
|
|
|
27
27
|
export { _runtime, createMDXSource, resolveFiles };
|
package/dist/index.js
CHANGED
package/dist/loader-mdx.cjs
CHANGED
|
@@ -200,10 +200,7 @@ function buildConfig(config) {
|
|
|
200
200
|
null,
|
|
201
201
|
{
|
|
202
202
|
global: globalConfig,
|
|
203
|
-
collections
|
|
204
|
-
_runtime: {
|
|
205
|
-
files: /* @__PURE__ */ new Map()
|
|
206
|
-
}
|
|
203
|
+
collections
|
|
207
204
|
}
|
|
208
205
|
];
|
|
209
206
|
}
|
|
@@ -263,44 +260,66 @@ var import_unist_util_visit = require("unist-util-visit");
|
|
|
263
260
|
var path2 = __toESM(require("path"), 1);
|
|
264
261
|
var fs2 = __toESM(require("fs/promises"), 1);
|
|
265
262
|
var import_gray_matter = __toESM(require("gray-matter"), 1);
|
|
263
|
+
function flattenNode(node) {
|
|
264
|
+
if ("children" in node)
|
|
265
|
+
return node.children.map((child) => flattenNode(child)).join("");
|
|
266
|
+
if ("value" in node) return node.value;
|
|
267
|
+
return "";
|
|
268
|
+
}
|
|
266
269
|
function remarkInclude() {
|
|
267
270
|
const TagName = "include";
|
|
268
271
|
async function update(tree, file, processor, compiler) {
|
|
269
272
|
const queue = [];
|
|
270
|
-
(0, import_unist_util_visit.visit)(
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
273
|
+
(0, import_unist_util_visit.visit)(
|
|
274
|
+
tree,
|
|
275
|
+
["mdxJsxFlowElement", "mdxJsxTextElement"],
|
|
276
|
+
(node, _, parent) => {
|
|
277
|
+
let specifier;
|
|
278
|
+
const params = {};
|
|
279
|
+
if ((node.type === "mdxJsxFlowElement" || node.type === "mdxJsxTextElement") && node.name === TagName) {
|
|
280
|
+
const value = flattenNode(node);
|
|
281
|
+
if (value.length > 0) {
|
|
282
|
+
for (const attr of node.attributes) {
|
|
283
|
+
if (attr.type === "mdxJsxAttribute" && (typeof attr.value === "string" || attr.value === null)) {
|
|
284
|
+
params[attr.name] = attr.value;
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
specifier = value;
|
|
283
288
|
}
|
|
284
289
|
}
|
|
290
|
+
if (!specifier) return;
|
|
291
|
+
const targetPath = path2.resolve(
|
|
292
|
+
"cwd" in params ? process.cwd() : path2.dirname(file),
|
|
293
|
+
specifier
|
|
294
|
+
);
|
|
295
|
+
const asCode = params.lang || !specifier.endsWith(".md") && !specifier.endsWith(".mdx");
|
|
296
|
+
queue.push(
|
|
297
|
+
fs2.readFile(targetPath).then(async (content) => {
|
|
298
|
+
compiler?.addDependency(targetPath);
|
|
299
|
+
if (asCode) {
|
|
300
|
+
const lang = params.lang ?? path2.extname(specifier).slice(1);
|
|
301
|
+
Object.assign(node, {
|
|
302
|
+
type: "code",
|
|
303
|
+
lang,
|
|
304
|
+
meta: params.meta,
|
|
305
|
+
value: content.toString(),
|
|
306
|
+
data: {}
|
|
307
|
+
});
|
|
308
|
+
return;
|
|
309
|
+
}
|
|
310
|
+
const parsed = processor.parse((0, import_gray_matter.default)(content).content);
|
|
311
|
+
await update(parsed, targetPath, processor, compiler);
|
|
312
|
+
Object.assign(
|
|
313
|
+
parent && parent.type === "paragraph" ? parent : node,
|
|
314
|
+
parsed
|
|
315
|
+
);
|
|
316
|
+
}).catch((e) => {
|
|
317
|
+
console.warn(`failed to read file: ${targetPath}`, e);
|
|
318
|
+
})
|
|
319
|
+
);
|
|
320
|
+
return "skip";
|
|
285
321
|
}
|
|
286
|
-
|
|
287
|
-
const child = node.children.at(0);
|
|
288
|
-
if (child && child.type === "text") {
|
|
289
|
-
specifier = child.value;
|
|
290
|
-
}
|
|
291
|
-
}
|
|
292
|
-
if (!specifier) return;
|
|
293
|
-
const targetPath = path2.resolve(path2.dirname(file), specifier);
|
|
294
|
-
queue.push(
|
|
295
|
-
fs2.readFile(targetPath).then(async (content) => {
|
|
296
|
-
const parsed = processor.parse((0, import_gray_matter.default)(content).content);
|
|
297
|
-
compiler?.addDependency(targetPath);
|
|
298
|
-
await update(parsed, targetPath, processor, compiler);
|
|
299
|
-
Object.assign(node, parsed);
|
|
300
|
-
})
|
|
301
|
-
);
|
|
302
|
-
return "skip";
|
|
303
|
-
});
|
|
322
|
+
);
|
|
304
323
|
await Promise.all(queue);
|
|
305
324
|
}
|
|
306
325
|
return async (tree, file) => {
|
package/dist/loader-mdx.js
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
import {
|
|
2
2
|
getConfigHash,
|
|
3
3
|
loadConfig
|
|
4
|
-
} from "./chunk-
|
|
4
|
+
} from "./chunk-HFLDWPJA.js";
|
|
5
5
|
import {
|
|
6
6
|
validate
|
|
7
7
|
} from "./chunk-KGLACICA.js";
|
|
8
8
|
import {
|
|
9
9
|
remarkInclude
|
|
10
|
-
} from "./chunk-
|
|
11
|
-
import "./chunk-
|
|
10
|
+
} from "./chunk-MK7EXW7O.js";
|
|
11
|
+
import "./chunk-DRVUBK5B.js";
|
|
12
12
|
|
|
13
13
|
// src/loader-mdx.ts
|
|
14
14
|
import * as path2 from "node:path";
|
package/dist/next/index.cjs
CHANGED
|
@@ -110,10 +110,7 @@ function buildConfig(config) {
|
|
|
110
110
|
null,
|
|
111
111
|
{
|
|
112
112
|
global: globalConfig,
|
|
113
|
-
collections
|
|
114
|
-
_runtime: {
|
|
115
|
-
files: /* @__PURE__ */ new Map()
|
|
116
|
-
}
|
|
113
|
+
collections
|
|
117
114
|
}
|
|
118
115
|
];
|
|
119
116
|
}
|
|
@@ -226,7 +223,10 @@ ${issues.map((issue) => ` ${issue.path}: ${issue.message}`).join("\n")}`;
|
|
|
226
223
|
}
|
|
227
224
|
|
|
228
225
|
// src/map/file-cache.ts
|
|
229
|
-
var
|
|
226
|
+
var import_lru_cache = require("lru-cache");
|
|
227
|
+
var map = new import_lru_cache.LRUCache({
|
|
228
|
+
max: 200
|
|
229
|
+
});
|
|
230
230
|
var fileCache = {
|
|
231
231
|
read(namespace, path6) {
|
|
232
232
|
return map.get(`${namespace}.${path6}`);
|
|
@@ -298,11 +298,9 @@ async function generateJS(configPath, config, outputPath, configHash) {
|
|
|
298
298
|
name: "_source"
|
|
299
299
|
})
|
|
300
300
|
];
|
|
301
|
-
config._runtime.files.clear();
|
|
302
301
|
const entries = Array.from(config.collections.entries());
|
|
303
302
|
async function getEntries(collectionName, collection, files) {
|
|
304
303
|
const items = files.map(async (file, i) => {
|
|
305
|
-
config._runtime.files.set(file.absolutePath, collectionName);
|
|
306
304
|
if (collection.type === "meta") {
|
|
307
305
|
const cached = fileCache.read("generate-js", file.absolutePath);
|
|
308
306
|
if (cached) return cached;
|
package/dist/next/index.js
CHANGED
|
@@ -2,11 +2,11 @@ import {
|
|
|
2
2
|
findConfigFile,
|
|
3
3
|
getConfigHash,
|
|
4
4
|
loadConfig
|
|
5
|
-
} from "../chunk-
|
|
5
|
+
} from "../chunk-HFLDWPJA.js";
|
|
6
6
|
import {
|
|
7
7
|
validate
|
|
8
8
|
} from "../chunk-KGLACICA.js";
|
|
9
|
-
import "../chunk-
|
|
9
|
+
import "../chunk-DRVUBK5B.js";
|
|
10
10
|
|
|
11
11
|
// src/next/create.ts
|
|
12
12
|
import path3 from "node:path";
|
|
@@ -31,7 +31,10 @@ function getTypeFromPath(path5) {
|
|
|
31
31
|
}
|
|
32
32
|
|
|
33
33
|
// src/map/file-cache.ts
|
|
34
|
-
|
|
34
|
+
import { LRUCache } from "lru-cache";
|
|
35
|
+
var map = new LRUCache({
|
|
36
|
+
max: 200
|
|
37
|
+
});
|
|
35
38
|
var fileCache = {
|
|
36
39
|
read(namespace, path5) {
|
|
37
40
|
return map.get(`${namespace}.${path5}`);
|
|
@@ -103,11 +106,9 @@ async function generateJS(configPath, config, outputPath, configHash) {
|
|
|
103
106
|
name: "_source"
|
|
104
107
|
})
|
|
105
108
|
];
|
|
106
|
-
config._runtime.files.clear();
|
|
107
109
|
const entries = Array.from(config.collections.entries());
|
|
108
110
|
async function getEntries(collectionName, collection, files) {
|
|
109
111
|
const items = files.map(async (file, i) => {
|
|
110
|
-
config._runtime.files.set(file.absolutePath, collectionName);
|
|
111
112
|
if (collection.type === "meta") {
|
|
112
113
|
const cached = fileCache.read("generate-js", file.absolutePath);
|
|
113
114
|
if (cached) return cached;
|
package/dist/runtime/async.cjs
CHANGED
|
@@ -42,44 +42,66 @@ var import_unist_util_visit = require("unist-util-visit");
|
|
|
42
42
|
var path = __toESM(require("path"), 1);
|
|
43
43
|
var fs = __toESM(require("fs/promises"), 1);
|
|
44
44
|
var import_gray_matter = __toESM(require("gray-matter"), 1);
|
|
45
|
+
function flattenNode(node) {
|
|
46
|
+
if ("children" in node)
|
|
47
|
+
return node.children.map((child) => flattenNode(child)).join("");
|
|
48
|
+
if ("value" in node) return node.value;
|
|
49
|
+
return "";
|
|
50
|
+
}
|
|
45
51
|
function remarkInclude() {
|
|
46
52
|
const TagName = "include";
|
|
47
53
|
async function update(tree, file, processor, compiler) {
|
|
48
54
|
const queue = [];
|
|
49
|
-
(0, import_unist_util_visit.visit)(
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
55
|
+
(0, import_unist_util_visit.visit)(
|
|
56
|
+
tree,
|
|
57
|
+
["mdxJsxFlowElement", "mdxJsxTextElement"],
|
|
58
|
+
(node, _, parent) => {
|
|
59
|
+
let specifier;
|
|
60
|
+
const params = {};
|
|
61
|
+
if ((node.type === "mdxJsxFlowElement" || node.type === "mdxJsxTextElement") && node.name === TagName) {
|
|
62
|
+
const value = flattenNode(node);
|
|
63
|
+
if (value.length > 0) {
|
|
64
|
+
for (const attr of node.attributes) {
|
|
65
|
+
if (attr.type === "mdxJsxAttribute" && (typeof attr.value === "string" || attr.value === null)) {
|
|
66
|
+
params[attr.name] = attr.value;
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
specifier = value;
|
|
62
70
|
}
|
|
63
71
|
}
|
|
72
|
+
if (!specifier) return;
|
|
73
|
+
const targetPath = path.resolve(
|
|
74
|
+
"cwd" in params ? process.cwd() : path.dirname(file),
|
|
75
|
+
specifier
|
|
76
|
+
);
|
|
77
|
+
const asCode = params.lang || !specifier.endsWith(".md") && !specifier.endsWith(".mdx");
|
|
78
|
+
queue.push(
|
|
79
|
+
fs.readFile(targetPath).then(async (content) => {
|
|
80
|
+
compiler?.addDependency(targetPath);
|
|
81
|
+
if (asCode) {
|
|
82
|
+
const lang = params.lang ?? path.extname(specifier).slice(1);
|
|
83
|
+
Object.assign(node, {
|
|
84
|
+
type: "code",
|
|
85
|
+
lang,
|
|
86
|
+
meta: params.meta,
|
|
87
|
+
value: content.toString(),
|
|
88
|
+
data: {}
|
|
89
|
+
});
|
|
90
|
+
return;
|
|
91
|
+
}
|
|
92
|
+
const parsed = processor.parse((0, import_gray_matter.default)(content).content);
|
|
93
|
+
await update(parsed, targetPath, processor, compiler);
|
|
94
|
+
Object.assign(
|
|
95
|
+
parent && parent.type === "paragraph" ? parent : node,
|
|
96
|
+
parsed
|
|
97
|
+
);
|
|
98
|
+
}).catch((e) => {
|
|
99
|
+
console.warn(`failed to read file: ${targetPath}`, e);
|
|
100
|
+
})
|
|
101
|
+
);
|
|
102
|
+
return "skip";
|
|
64
103
|
}
|
|
65
|
-
|
|
66
|
-
const child = node.children.at(0);
|
|
67
|
-
if (child && child.type === "text") {
|
|
68
|
-
specifier = child.value;
|
|
69
|
-
}
|
|
70
|
-
}
|
|
71
|
-
if (!specifier) return;
|
|
72
|
-
const targetPath = path.resolve(path.dirname(file), specifier);
|
|
73
|
-
queue.push(
|
|
74
|
-
fs.readFile(targetPath).then(async (content) => {
|
|
75
|
-
const parsed = processor.parse((0, import_gray_matter.default)(content).content);
|
|
76
|
-
compiler?.addDependency(targetPath);
|
|
77
|
-
await update(parsed, targetPath, processor, compiler);
|
|
78
|
-
Object.assign(node, parsed);
|
|
79
|
-
})
|
|
80
|
-
);
|
|
81
|
-
return "skip";
|
|
82
|
-
});
|
|
104
|
+
);
|
|
83
105
|
await Promise.all(queue);
|
|
84
106
|
}
|
|
85
107
|
return async (tree, file) => {
|
|
@@ -126,21 +148,15 @@ var _runtime = {
|
|
|
126
148
|
};
|
|
127
149
|
function createMDXSource(docs, meta = []) {
|
|
128
150
|
return {
|
|
129
|
-
files: (
|
|
151
|
+
files: () => resolveFiles({
|
|
130
152
|
docs,
|
|
131
|
-
meta
|
|
132
|
-
rootDir
|
|
153
|
+
meta
|
|
133
154
|
})
|
|
134
155
|
};
|
|
135
156
|
}
|
|
136
|
-
function resolveFiles({
|
|
137
|
-
docs,
|
|
138
|
-
meta,
|
|
139
|
-
rootDir = ""
|
|
140
|
-
}) {
|
|
157
|
+
function resolveFiles({ docs, meta }) {
|
|
141
158
|
const outputs = [];
|
|
142
159
|
for (const entry of docs) {
|
|
143
|
-
if (!entry._file.path.startsWith(rootDir)) continue;
|
|
144
160
|
outputs.push({
|
|
145
161
|
type: "page",
|
|
146
162
|
path: entry._file.path,
|
|
@@ -188,10 +204,7 @@ function buildConfig(config) {
|
|
|
188
204
|
null,
|
|
189
205
|
{
|
|
190
206
|
global: globalConfig,
|
|
191
|
-
collections
|
|
192
|
-
_runtime: {
|
|
193
|
-
files: /* @__PURE__ */ new Map()
|
|
194
|
-
}
|
|
207
|
+
collections
|
|
195
208
|
}
|
|
196
209
|
];
|
|
197
210
|
}
|
package/dist/runtime/async.d.cts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { L as LoadedConfig, b as RuntimeAsync } from '../types-
|
|
2
|
-
import '../define-
|
|
1
|
+
import { L as LoadedConfig, b as RuntimeAsync } from '../types-DCyuz-WB.cjs';
|
|
2
|
+
import '../define-CGHfrlrJ.cjs';
|
|
3
3
|
import '@mdx-js/mdx';
|
|
4
4
|
import 'mdx/types';
|
|
5
5
|
import 'fumadocs-core/mdx-plugins';
|
package/dist/runtime/async.d.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { L as LoadedConfig, b as RuntimeAsync } from '../types-
|
|
2
|
-
import '../define-
|
|
1
|
+
import { L as LoadedConfig, b as RuntimeAsync } from '../types-DfVJrYH1.js';
|
|
2
|
+
import '../define-CGHfrlrJ.js';
|
|
3
3
|
import '@mdx-js/mdx';
|
|
4
4
|
import 'mdx/types';
|
|
5
5
|
import 'fumadocs-core/mdx-plugins';
|
package/dist/runtime/async.js
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
import {
|
|
2
2
|
_runtime,
|
|
3
3
|
createMDXSource
|
|
4
|
-
} from "../chunk-
|
|
4
|
+
} from "../chunk-VFALQK6O.js";
|
|
5
5
|
import {
|
|
6
6
|
remarkInclude
|
|
7
|
-
} from "../chunk-
|
|
7
|
+
} from "../chunk-MK7EXW7O.js";
|
|
8
8
|
import {
|
|
9
9
|
buildConfig
|
|
10
|
-
} from "../chunk-
|
|
10
|
+
} from "../chunk-DRVUBK5B.js";
|
|
11
11
|
|
|
12
12
|
// src/runtime/async.ts
|
|
13
13
|
import { createCompiler } from "@fumadocs/mdx-remote";
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { D as DocCollection, b as MetaCollection, c as DocsCollection, G as GlobalConfig, F as FileInfo, M as MarkdownProps, B as BaseCollectionEntry } from './define-
|
|
1
|
+
import { D as DocCollection, b as MetaCollection, c as DocsCollection, G as GlobalConfig, F as FileInfo, M as MarkdownProps, B as BaseCollectionEntry } from './define-CGHfrlrJ.cjs';
|
|
2
2
|
import { StandardSchemaV1 } from '@standard-schema/spec';
|
|
3
3
|
import { Source, PageData, MetaData } from 'fumadocs-core/source';
|
|
4
4
|
import { ProcessorOptions } from '@mdx-js/mdx';
|
|
@@ -7,12 +7,6 @@ import { MDXOptions } from '@fumadocs/mdx-remote';
|
|
|
7
7
|
interface LoadedConfig {
|
|
8
8
|
collections: Map<string, DocCollection | MetaCollection | DocsCollection>;
|
|
9
9
|
global?: GlobalConfig;
|
|
10
|
-
_runtime: {
|
|
11
|
-
/**
|
|
12
|
-
* Absolute file path and their associated collections
|
|
13
|
-
*/
|
|
14
|
-
files: Map<string, string>;
|
|
15
|
-
};
|
|
16
10
|
_mdx_loader?: {
|
|
17
11
|
cachedProcessorOptions?: ProcessorOptions;
|
|
18
12
|
};
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { D as DocCollection, b as MetaCollection, c as DocsCollection, G as GlobalConfig, F as FileInfo, M as MarkdownProps, B as BaseCollectionEntry } from './define-
|
|
1
|
+
import { D as DocCollection, b as MetaCollection, c as DocsCollection, G as GlobalConfig, F as FileInfo, M as MarkdownProps, B as BaseCollectionEntry } from './define-CGHfrlrJ.js';
|
|
2
2
|
import { StandardSchemaV1 } from '@standard-schema/spec';
|
|
3
3
|
import { Source, PageData, MetaData } from 'fumadocs-core/source';
|
|
4
4
|
import { ProcessorOptions } from '@mdx-js/mdx';
|
|
@@ -7,12 +7,6 @@ import { MDXOptions } from '@fumadocs/mdx-remote';
|
|
|
7
7
|
interface LoadedConfig {
|
|
8
8
|
collections: Map<string, DocCollection | MetaCollection | DocsCollection>;
|
|
9
9
|
global?: GlobalConfig;
|
|
10
|
-
_runtime: {
|
|
11
|
-
/**
|
|
12
|
-
* Absolute file path and their associated collections
|
|
13
|
-
*/
|
|
14
|
-
files: Map<string, string>;
|
|
15
|
-
};
|
|
16
10
|
_mdx_loader?: {
|
|
17
11
|
cachedProcessorOptions?: ProcessorOptions;
|
|
18
12
|
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "fumadocs-mdx",
|
|
3
|
-
"version": "11.5.
|
|
3
|
+
"version": "11.5.8",
|
|
4
4
|
"description": "The built-in source for Fumadocs",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"NextJs",
|
|
@@ -46,10 +46,11 @@
|
|
|
46
46
|
"@standard-schema/spec": "^1.0.0",
|
|
47
47
|
"chokidar": "^4.0.3",
|
|
48
48
|
"cross-spawn": "^7.0.6",
|
|
49
|
-
"esbuild": "^0.25.
|
|
49
|
+
"esbuild": "^0.25.2",
|
|
50
50
|
"estree-util-value-to-estree": "^3.3.2",
|
|
51
51
|
"fast-glob": "^3.3.3",
|
|
52
52
|
"gray-matter": "^4.0.3",
|
|
53
|
+
"lru-cache": "^11.1.0",
|
|
53
54
|
"unist-util-visit": "^5.0.0",
|
|
54
55
|
"zod": "^3.24.2"
|
|
55
56
|
},
|
|
@@ -57,15 +58,15 @@
|
|
|
57
58
|
"@types/cross-spawn": "^6.0.6",
|
|
58
59
|
"@types/mdast": "^4.0.3",
|
|
59
60
|
"@types/mdx": "^2.0.13",
|
|
60
|
-
"@types/react": "^19.0
|
|
61
|
+
"@types/react": "^19.1.0",
|
|
61
62
|
"mdast-util-mdx-jsx": "^3.2.0",
|
|
62
|
-
"next": "^15.
|
|
63
|
+
"next": "^15.2.4",
|
|
63
64
|
"unified": "^11.0.5",
|
|
64
65
|
"vfile": "^6.0.3",
|
|
65
66
|
"webpack": "^5.97.1",
|
|
66
|
-
"@fumadocs/mdx-remote": "1.
|
|
67
|
+
"@fumadocs/mdx-remote": "1.3.0",
|
|
67
68
|
"eslint-config-custom": "0.0.0",
|
|
68
|
-
"fumadocs-core": "15.
|
|
69
|
+
"fumadocs-core": "15.2.4",
|
|
69
70
|
"tsconfig": "0.0.0"
|
|
70
71
|
},
|
|
71
72
|
"peerDependencies": {
|
package/dist/chunk-PY2KKTR2.js
DELETED
|
@@ -1,53 +0,0 @@
|
|
|
1
|
-
// src/mdx-plugins/remark-include.ts
|
|
2
|
-
import { visit } from "unist-util-visit";
|
|
3
|
-
import * as path from "node:path";
|
|
4
|
-
import * as fs from "node:fs/promises";
|
|
5
|
-
import matter from "gray-matter";
|
|
6
|
-
function remarkInclude() {
|
|
7
|
-
const TagName = "include";
|
|
8
|
-
async function update(tree, file, processor, compiler) {
|
|
9
|
-
const queue = [];
|
|
10
|
-
visit(tree, ["mdxJsxFlowElement", "paragraph"], (node) => {
|
|
11
|
-
let specifier;
|
|
12
|
-
if (node.type === "paragraph" && node.children.length === 3) {
|
|
13
|
-
const [open, content, closure] = node.children;
|
|
14
|
-
if (open.type === "html" && open.value === `<${TagName}>` && content.type === "text" && closure.type === "html" && closure.value === `</${TagName}>`) {
|
|
15
|
-
specifier = content.value.trim();
|
|
16
|
-
}
|
|
17
|
-
} else if (node.type === "paragraph" && node.children.length === 1) {
|
|
18
|
-
const child = node.children[0];
|
|
19
|
-
if (child.type === "mdxJsxTextElement" && child.name === TagName) {
|
|
20
|
-
const text = child.children.at(0);
|
|
21
|
-
if (text && text.type === "text") {
|
|
22
|
-
specifier = text.value;
|
|
23
|
-
}
|
|
24
|
-
}
|
|
25
|
-
}
|
|
26
|
-
if (node.type === "mdxJsxFlowElement" && node.name === TagName) {
|
|
27
|
-
const child = node.children.at(0);
|
|
28
|
-
if (child && child.type === "text") {
|
|
29
|
-
specifier = child.value;
|
|
30
|
-
}
|
|
31
|
-
}
|
|
32
|
-
if (!specifier) return;
|
|
33
|
-
const targetPath = path.resolve(path.dirname(file), specifier);
|
|
34
|
-
queue.push(
|
|
35
|
-
fs.readFile(targetPath).then(async (content) => {
|
|
36
|
-
const parsed = processor.parse(matter(content).content);
|
|
37
|
-
compiler?.addDependency(targetPath);
|
|
38
|
-
await update(parsed, targetPath, processor, compiler);
|
|
39
|
-
Object.assign(node, parsed);
|
|
40
|
-
})
|
|
41
|
-
);
|
|
42
|
-
return "skip";
|
|
43
|
-
});
|
|
44
|
-
await Promise.all(queue);
|
|
45
|
-
}
|
|
46
|
-
return async (tree, file) => {
|
|
47
|
-
await update(tree, file.path, this, file.data._compiler);
|
|
48
|
-
};
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
export {
|
|
52
|
-
remarkInclude
|
|
53
|
-
};
|