fumadocs-mdx 11.5.7 → 11.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{chunk-KGLACICA.js → chunk-2ZOW45YZ.js} +25 -5
- package/dist/{chunk-SLCPEEMF.js → chunk-DRVUBK5B.js} +1 -4
- package/dist/{chunk-R6U7CJLB.js → chunk-HFLDWPJA.js} +1 -1
- package/dist/chunk-MK7EXW7O.js +75 -0
- package/dist/{chunk-IZURUUPO.js → chunk-VFALQK6O.js} +3 -9
- package/dist/config/index.cjs +47 -39
- package/dist/config/index.d.cts +1 -1
- package/dist/config/index.d.ts +1 -1
- package/dist/config/index.js +2 -2
- package/dist/{define-P0QTVn7W.d.cts → define-BaW0PQDJ.d.cts} +2 -2
- package/dist/{define-P0QTVn7W.d.ts → define-BaW0PQDJ.d.ts} +2 -2
- package/dist/index.cjs +3 -9
- package/dist/index.d.cts +3 -4
- package/dist/index.d.ts +3 -4
- package/dist/index.js +1 -1
- package/dist/loader-mdx.cjs +72 -49
- package/dist/loader-mdx.js +5 -5
- package/dist/next/index.cjs +110 -108
- package/dist/next/index.js +89 -102
- package/dist/runtime/async.cjs +54 -56
- package/dist/runtime/async.d.cts +2 -2
- package/dist/runtime/async.d.ts +2 -2
- package/dist/runtime/async.js +9 -9
- package/dist/{types-CJRGJLAg.d.ts → types-BNrQHCj5.d.cts} +10 -10
- package/dist/{types-bWXuqsw9.d.cts → types-DEduCvIT.d.ts} +10 -10
- package/package.json +11 -9
- package/dist/chunk-ITGWT23S.js +0 -68
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
// src/utils/schema.ts
|
|
2
2
|
import { z } from "zod";
|
|
3
|
+
import picocolors from "picocolors";
|
|
3
4
|
var metaSchema = z.object({
|
|
4
5
|
title: z.string().optional(),
|
|
5
6
|
pages: z.array(z.string()).optional(),
|
|
@@ -16,6 +17,28 @@ var frontmatterSchema = z.object({
|
|
|
16
17
|
// Fumadocs OpenAPI generated
|
|
17
18
|
_openapi: z.object({}).passthrough().optional()
|
|
18
19
|
});
|
|
20
|
+
var ValidationError = class extends Error {
|
|
21
|
+
constructor(message, issues) {
|
|
22
|
+
super(message);
|
|
23
|
+
this.issues = issues;
|
|
24
|
+
}
|
|
25
|
+
print() {
|
|
26
|
+
console.error(
|
|
27
|
+
[
|
|
28
|
+
`[MDX] ${this.message}:`,
|
|
29
|
+
...this.issues.map(
|
|
30
|
+
(issue) => picocolors.redBright(
|
|
31
|
+
`- ${picocolors.bold(issue.path?.join(".") ?? "*")}: ${issue.message}`
|
|
32
|
+
)
|
|
33
|
+
)
|
|
34
|
+
].join("\n")
|
|
35
|
+
);
|
|
36
|
+
}
|
|
37
|
+
toString() {
|
|
38
|
+
return `${this.message}:
|
|
39
|
+
${this.issues.map((issue) => ` ${issue.path}: ${issue.message}`).join("\n")}`;
|
|
40
|
+
}
|
|
41
|
+
};
|
|
19
42
|
async function validate(schema, data, context, errorMessage) {
|
|
20
43
|
if (typeof schema === "function" && !("~standard" in schema)) {
|
|
21
44
|
schema = schema(context);
|
|
@@ -25,19 +48,16 @@ async function validate(schema, data, context, errorMessage) {
|
|
|
25
48
|
data
|
|
26
49
|
);
|
|
27
50
|
if (result.issues) {
|
|
28
|
-
throw new
|
|
51
|
+
throw new ValidationError(errorMessage, result.issues);
|
|
29
52
|
}
|
|
30
53
|
return result.value;
|
|
31
54
|
}
|
|
32
55
|
return data;
|
|
33
56
|
}
|
|
34
|
-
function formatError(message, issues) {
|
|
35
|
-
return `${message}:
|
|
36
|
-
${issues.map((issue) => ` ${issue.path}: ${issue.message}`).join("\n")}`;
|
|
37
|
-
}
|
|
38
57
|
|
|
39
58
|
export {
|
|
40
59
|
metaSchema,
|
|
41
60
|
frontmatterSchema,
|
|
61
|
+
ValidationError,
|
|
42
62
|
validate
|
|
43
63
|
};
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
// src/mdx-plugins/remark-include.ts
|
|
2
|
+
import { visit } from "unist-util-visit";
|
|
3
|
+
import * as path from "node:path";
|
|
4
|
+
import * as fs from "node:fs/promises";
|
|
5
|
+
import matter from "gray-matter";
|
|
6
|
+
function flattenNode(node) {
|
|
7
|
+
if ("children" in node)
|
|
8
|
+
return node.children.map((child) => flattenNode(child)).join("");
|
|
9
|
+
if ("value" in node) return node.value;
|
|
10
|
+
return "";
|
|
11
|
+
}
|
|
12
|
+
function remarkInclude() {
|
|
13
|
+
const TagName = "include";
|
|
14
|
+
async function update(tree, file, processor, compiler) {
|
|
15
|
+
const queue = [];
|
|
16
|
+
visit(
|
|
17
|
+
tree,
|
|
18
|
+
["mdxJsxFlowElement", "mdxJsxTextElement"],
|
|
19
|
+
(node, _, parent) => {
|
|
20
|
+
let specifier;
|
|
21
|
+
const params = {};
|
|
22
|
+
if ((node.type === "mdxJsxFlowElement" || node.type === "mdxJsxTextElement") && node.name === TagName) {
|
|
23
|
+
const value = flattenNode(node);
|
|
24
|
+
if (value.length > 0) {
|
|
25
|
+
for (const attr of node.attributes) {
|
|
26
|
+
if (attr.type === "mdxJsxAttribute" && (typeof attr.value === "string" || attr.value === null)) {
|
|
27
|
+
params[attr.name] = attr.value;
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
specifier = value;
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
if (!specifier) return;
|
|
34
|
+
const targetPath = path.resolve(
|
|
35
|
+
"cwd" in params ? process.cwd() : path.dirname(file),
|
|
36
|
+
specifier
|
|
37
|
+
);
|
|
38
|
+
const asCode = params.lang || !specifier.endsWith(".md") && !specifier.endsWith(".mdx");
|
|
39
|
+
queue.push(
|
|
40
|
+
fs.readFile(targetPath).then(async (content) => {
|
|
41
|
+
compiler?.addDependency(targetPath);
|
|
42
|
+
if (asCode) {
|
|
43
|
+
const lang = params.lang ?? path.extname(specifier).slice(1);
|
|
44
|
+
Object.assign(node, {
|
|
45
|
+
type: "code",
|
|
46
|
+
lang,
|
|
47
|
+
meta: params.meta,
|
|
48
|
+
value: content.toString(),
|
|
49
|
+
data: {}
|
|
50
|
+
});
|
|
51
|
+
return;
|
|
52
|
+
}
|
|
53
|
+
const parsed = processor.parse(matter(content).content);
|
|
54
|
+
await update(parsed, targetPath, processor, compiler);
|
|
55
|
+
Object.assign(
|
|
56
|
+
parent && parent.type === "paragraph" ? parent : node,
|
|
57
|
+
parsed
|
|
58
|
+
);
|
|
59
|
+
}).catch((e) => {
|
|
60
|
+
console.warn(`failed to read file: ${targetPath}`, e);
|
|
61
|
+
})
|
|
62
|
+
);
|
|
63
|
+
return "skip";
|
|
64
|
+
}
|
|
65
|
+
);
|
|
66
|
+
await Promise.all(queue);
|
|
67
|
+
}
|
|
68
|
+
return async (tree, file) => {
|
|
69
|
+
await update(tree, file.path, this, file.data._compiler);
|
|
70
|
+
};
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
export {
|
|
74
|
+
remarkInclude
|
|
75
|
+
};
|
|
@@ -34,21 +34,15 @@ var _runtime = {
|
|
|
34
34
|
};
|
|
35
35
|
function createMDXSource(docs, meta = []) {
|
|
36
36
|
return {
|
|
37
|
-
files: (
|
|
37
|
+
files: () => resolveFiles({
|
|
38
38
|
docs,
|
|
39
|
-
meta
|
|
40
|
-
rootDir
|
|
39
|
+
meta
|
|
41
40
|
})
|
|
42
41
|
};
|
|
43
42
|
}
|
|
44
|
-
function resolveFiles({
|
|
45
|
-
docs,
|
|
46
|
-
meta,
|
|
47
|
-
rootDir = ""
|
|
48
|
-
}) {
|
|
43
|
+
function resolveFiles({ docs, meta }) {
|
|
49
44
|
const outputs = [];
|
|
50
45
|
for (const entry of docs) {
|
|
51
|
-
if (!entry._file.path.startsWith(rootDir)) continue;
|
|
52
46
|
outputs.push({
|
|
53
47
|
type: "page",
|
|
54
48
|
path: entry._file.path,
|
package/dist/config/index.cjs
CHANGED
|
@@ -42,6 +42,7 @@ module.exports = __toCommonJS(config_exports);
|
|
|
42
42
|
|
|
43
43
|
// src/utils/schema.ts
|
|
44
44
|
var import_zod = require("zod");
|
|
45
|
+
var import_picocolors = __toESM(require("picocolors"), 1);
|
|
45
46
|
var metaSchema = import_zod.z.object({
|
|
46
47
|
title: import_zod.z.string().optional(),
|
|
47
48
|
pages: import_zod.z.array(import_zod.z.string()).optional(),
|
|
@@ -219,49 +220,56 @@ function remarkInclude() {
|
|
|
219
220
|
const TagName = "include";
|
|
220
221
|
async function update(tree, file, processor, compiler) {
|
|
221
222
|
const queue = [];
|
|
222
|
-
(0, import_unist_util_visit.visit)(
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
223
|
+
(0, import_unist_util_visit.visit)(
|
|
224
|
+
tree,
|
|
225
|
+
["mdxJsxFlowElement", "mdxJsxTextElement"],
|
|
226
|
+
(node, _, parent) => {
|
|
227
|
+
let specifier;
|
|
228
|
+
const params = {};
|
|
229
|
+
if ((node.type === "mdxJsxFlowElement" || node.type === "mdxJsxTextElement") && node.name === TagName) {
|
|
230
|
+
const value = flattenNode(node);
|
|
231
|
+
if (value.length > 0) {
|
|
232
|
+
for (const attr of node.attributes) {
|
|
233
|
+
if (attr.type === "mdxJsxAttribute" && (typeof attr.value === "string" || attr.value === null)) {
|
|
234
|
+
params[attr.name] = attr.value;
|
|
235
|
+
}
|
|
231
236
|
}
|
|
237
|
+
specifier = value;
|
|
232
238
|
}
|
|
233
|
-
specifier = value;
|
|
234
239
|
}
|
|
240
|
+
if (!specifier) return;
|
|
241
|
+
const targetPath = path.resolve(
|
|
242
|
+
"cwd" in params ? process.cwd() : path.dirname(file),
|
|
243
|
+
specifier
|
|
244
|
+
);
|
|
245
|
+
const asCode = params.lang || !specifier.endsWith(".md") && !specifier.endsWith(".mdx");
|
|
246
|
+
queue.push(
|
|
247
|
+
fs.readFile(targetPath).then(async (content) => {
|
|
248
|
+
compiler?.addDependency(targetPath);
|
|
249
|
+
if (asCode) {
|
|
250
|
+
const lang = params.lang ?? path.extname(specifier).slice(1);
|
|
251
|
+
Object.assign(node, {
|
|
252
|
+
type: "code",
|
|
253
|
+
lang,
|
|
254
|
+
meta: params.meta,
|
|
255
|
+
value: content.toString(),
|
|
256
|
+
data: {}
|
|
257
|
+
});
|
|
258
|
+
return;
|
|
259
|
+
}
|
|
260
|
+
const parsed = processor.parse((0, import_gray_matter.default)(content).content);
|
|
261
|
+
await update(parsed, targetPath, processor, compiler);
|
|
262
|
+
Object.assign(
|
|
263
|
+
parent && parent.type === "paragraph" ? parent : node,
|
|
264
|
+
parsed
|
|
265
|
+
);
|
|
266
|
+
}).catch((e) => {
|
|
267
|
+
console.warn(`failed to read file: ${targetPath}`, e);
|
|
268
|
+
})
|
|
269
|
+
);
|
|
270
|
+
return "skip";
|
|
235
271
|
}
|
|
236
|
-
|
|
237
|
-
const targetPath = path.resolve(
|
|
238
|
-
"cwd" in params ? process.cwd() : path.dirname(file),
|
|
239
|
-
specifier
|
|
240
|
-
);
|
|
241
|
-
const asCode = params.lang || !specifier.endsWith(".md") && !specifier.endsWith(".mdx");
|
|
242
|
-
queue.push(
|
|
243
|
-
fs.readFile(targetPath).then(async (content) => {
|
|
244
|
-
compiler?.addDependency(targetPath);
|
|
245
|
-
if (asCode) {
|
|
246
|
-
const lang = params.lang ?? path.extname(specifier).slice(1);
|
|
247
|
-
Object.assign(node, {
|
|
248
|
-
type: "code",
|
|
249
|
-
lang,
|
|
250
|
-
meta: params.meta,
|
|
251
|
-
value: content.toString(),
|
|
252
|
-
data: {}
|
|
253
|
-
});
|
|
254
|
-
return;
|
|
255
|
-
}
|
|
256
|
-
const parsed = processor.parse((0, import_gray_matter.default)(content).content);
|
|
257
|
-
await update(parsed, targetPath, processor, compiler);
|
|
258
|
-
Object.assign(node, parsed);
|
|
259
|
-
}).catch((e) => {
|
|
260
|
-
console.warn(`failed to read file: ${targetPath}`, e);
|
|
261
|
-
})
|
|
262
|
-
);
|
|
263
|
-
return "skip";
|
|
264
|
-
});
|
|
272
|
+
);
|
|
265
273
|
await Promise.all(queue);
|
|
266
274
|
}
|
|
267
275
|
return async (tree, file) => {
|
package/dist/config/index.d.cts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
export {
|
|
1
|
+
export { c as BaseCollection, B as BaseCollectionEntry, C as CollectionSchema, h as DefaultMDXOptions, D as DocCollection, a as DocsCollection, F as FileInfo, G as GlobalConfig, b as MarkdownProps, M as MetaCollection, d as defineCollections, g as defineConfig, e as defineDocs, f as frontmatterSchema, i as getDefaultMDXOptions, m as metaSchema } from '../define-BaW0PQDJ.cjs';
|
|
2
2
|
import { Processor, Transformer } from 'unified';
|
|
3
3
|
import { Root } from 'mdast';
|
|
4
4
|
import '@mdx-js/mdx';
|
package/dist/config/index.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
export {
|
|
1
|
+
export { c as BaseCollection, B as BaseCollectionEntry, C as CollectionSchema, h as DefaultMDXOptions, D as DocCollection, a as DocsCollection, F as FileInfo, G as GlobalConfig, b as MarkdownProps, M as MetaCollection, d as defineCollections, g as defineConfig, e as defineDocs, f as frontmatterSchema, i as getDefaultMDXOptions, m as metaSchema } from '../define-BaW0PQDJ.js';
|
|
2
2
|
import { Processor, Transformer } from 'unified';
|
|
3
3
|
import { Root } from 'mdast';
|
|
4
4
|
import '@mdx-js/mdx';
|
package/dist/config/index.js
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
import {
|
|
2
2
|
frontmatterSchema,
|
|
3
3
|
metaSchema
|
|
4
|
-
} from "../chunk-
|
|
4
|
+
} from "../chunk-2ZOW45YZ.js";
|
|
5
5
|
import {
|
|
6
6
|
remarkInclude
|
|
7
|
-
} from "../chunk-
|
|
7
|
+
} from "../chunk-MK7EXW7O.js";
|
|
8
8
|
import {
|
|
9
9
|
getDefaultMDXOptions
|
|
10
10
|
} from "../chunk-IOENRFUX.js";
|
|
@@ -161,7 +161,7 @@ interface DocsCollection<DocSchema extends StandardSchemaV1 = StandardSchemaV1,
|
|
|
161
161
|
docs: DocCollection<DocSchema, Async>;
|
|
162
162
|
meta: MetaCollection<MetaSchema>;
|
|
163
163
|
}
|
|
164
|
-
declare function defineCollections<T extends 'doc' | 'meta', Schema extends StandardSchemaV1 = StandardSchemaV1, Async extends boolean = false>(options: {
|
|
164
|
+
declare function defineCollections<T extends 'doc' | 'meta', Schema extends StandardSchemaV1 = StandardSchemaV1<unknown, any>, Async extends boolean = false>(options: {
|
|
165
165
|
type: T;
|
|
166
166
|
} & (T extends 'doc' ? DocCollection<Schema, Async> : MetaCollection<Schema>)): {
|
|
167
167
|
type: T;
|
|
@@ -198,4 +198,4 @@ declare function defineDocs<DocSchema extends StandardSchemaV1 = typeof frontmat
|
|
|
198
198
|
};
|
|
199
199
|
declare function defineConfig(config?: GlobalConfig): GlobalConfig;
|
|
200
200
|
|
|
201
|
-
export { type BaseCollectionEntry as B, type CollectionSchema as C, type DocCollection as D, type FileInfo as F, type GlobalConfig as G, type
|
|
201
|
+
export { type BaseCollectionEntry as B, type CollectionSchema as C, type DocCollection as D, type FileInfo as F, type GlobalConfig as G, type MetaCollection as M, type DocsCollection as a, type MarkdownProps as b, type BaseCollection as c, defineCollections as d, defineDocs as e, frontmatterSchema as f, defineConfig as g, type DefaultMDXOptions as h, getDefaultMDXOptions as i, metaSchema as m };
|
|
@@ -161,7 +161,7 @@ interface DocsCollection<DocSchema extends StandardSchemaV1 = StandardSchemaV1,
|
|
|
161
161
|
docs: DocCollection<DocSchema, Async>;
|
|
162
162
|
meta: MetaCollection<MetaSchema>;
|
|
163
163
|
}
|
|
164
|
-
declare function defineCollections<T extends 'doc' | 'meta', Schema extends StandardSchemaV1 = StandardSchemaV1, Async extends boolean = false>(options: {
|
|
164
|
+
declare function defineCollections<T extends 'doc' | 'meta', Schema extends StandardSchemaV1 = StandardSchemaV1<unknown, any>, Async extends boolean = false>(options: {
|
|
165
165
|
type: T;
|
|
166
166
|
} & (T extends 'doc' ? DocCollection<Schema, Async> : MetaCollection<Schema>)): {
|
|
167
167
|
type: T;
|
|
@@ -198,4 +198,4 @@ declare function defineDocs<DocSchema extends StandardSchemaV1 = typeof frontmat
|
|
|
198
198
|
};
|
|
199
199
|
declare function defineConfig(config?: GlobalConfig): GlobalConfig;
|
|
200
200
|
|
|
201
|
-
export { type BaseCollectionEntry as B, type CollectionSchema as C, type DocCollection as D, type FileInfo as F, type GlobalConfig as G, type
|
|
201
|
+
export { type BaseCollectionEntry as B, type CollectionSchema as C, type DocCollection as D, type FileInfo as F, type GlobalConfig as G, type MetaCollection as M, type DocsCollection as a, type MarkdownProps as b, type BaseCollection as c, defineCollections as d, defineDocs as e, frontmatterSchema as f, defineConfig as g, type DefaultMDXOptions as h, getDefaultMDXOptions as i, metaSchema as m };
|
package/dist/index.cjs
CHANGED
|
@@ -62,21 +62,15 @@ var _runtime = {
|
|
|
62
62
|
};
|
|
63
63
|
function createMDXSource(docs, meta = []) {
|
|
64
64
|
return {
|
|
65
|
-
files: (
|
|
65
|
+
files: () => resolveFiles({
|
|
66
66
|
docs,
|
|
67
|
-
meta
|
|
68
|
-
rootDir
|
|
67
|
+
meta
|
|
69
68
|
})
|
|
70
69
|
};
|
|
71
70
|
}
|
|
72
|
-
function resolveFiles({
|
|
73
|
-
docs,
|
|
74
|
-
meta,
|
|
75
|
-
rootDir = ""
|
|
76
|
-
}) {
|
|
71
|
+
function resolveFiles({ docs, meta }) {
|
|
77
72
|
const outputs = [];
|
|
78
73
|
for (const entry of docs) {
|
|
79
|
-
if (!entry._file.path.startsWith(rootDir)) continue;
|
|
80
74
|
outputs.push({
|
|
81
75
|
type: "page",
|
|
82
76
|
path: entry._file.path,
|
package/dist/index.d.cts
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import { PageData, MetaData, Source, VirtualFile } from 'fumadocs-core/source';
|
|
2
|
-
import { B as BaseCollectionEntry } from './define-
|
|
3
|
-
import { R as Runtime } from './types-
|
|
4
|
-
export { a as RuntimeFile } from './types-bWXuqsw9.cjs';
|
|
2
|
+
import { B as BaseCollectionEntry } from './define-BaW0PQDJ.cjs';
|
|
3
|
+
import { R as Runtime } from './types-BNrQHCj5.cjs';
|
|
5
4
|
import '@mdx-js/mdx';
|
|
6
5
|
import 'mdx/types';
|
|
7
6
|
import 'fumadocs-core/mdx-plugins';
|
|
@@ -22,6 +21,6 @@ interface ResolveOptions {
|
|
|
22
21
|
meta: BaseCollectionEntry[];
|
|
23
22
|
rootDir?: string;
|
|
24
23
|
}
|
|
25
|
-
declare function resolveFiles({ docs, meta
|
|
24
|
+
declare function resolveFiles({ docs, meta }: ResolveOptions): VirtualFile[];
|
|
26
25
|
|
|
27
26
|
export { _runtime, createMDXSource, resolveFiles };
|
package/dist/index.d.ts
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import { PageData, MetaData, Source, VirtualFile } from 'fumadocs-core/source';
|
|
2
|
-
import { B as BaseCollectionEntry } from './define-
|
|
3
|
-
import { R as Runtime } from './types-
|
|
4
|
-
export { a as RuntimeFile } from './types-CJRGJLAg.js';
|
|
2
|
+
import { B as BaseCollectionEntry } from './define-BaW0PQDJ.js';
|
|
3
|
+
import { R as Runtime } from './types-DEduCvIT.js';
|
|
5
4
|
import '@mdx-js/mdx';
|
|
6
5
|
import 'mdx/types';
|
|
7
6
|
import 'fumadocs-core/mdx-plugins';
|
|
@@ -22,6 +21,6 @@ interface ResolveOptions {
|
|
|
22
21
|
meta: BaseCollectionEntry[];
|
|
23
22
|
rootDir?: string;
|
|
24
23
|
}
|
|
25
|
-
declare function resolveFiles({ docs, meta
|
|
24
|
+
declare function resolveFiles({ docs, meta }: ResolveOptions): VirtualFile[];
|
|
26
25
|
|
|
27
26
|
export { _runtime, createMDXSource, resolveFiles };
|
package/dist/index.js
CHANGED
package/dist/loader-mdx.cjs
CHANGED
|
@@ -200,10 +200,7 @@ function buildConfig(config) {
|
|
|
200
200
|
null,
|
|
201
201
|
{
|
|
202
202
|
global: globalConfig,
|
|
203
|
-
collections
|
|
204
|
-
_runtime: {
|
|
205
|
-
files: /* @__PURE__ */ new Map()
|
|
206
|
-
}
|
|
203
|
+
collections
|
|
207
204
|
}
|
|
208
205
|
];
|
|
209
206
|
}
|
|
@@ -273,49 +270,56 @@ function remarkInclude() {
|
|
|
273
270
|
const TagName = "include";
|
|
274
271
|
async function update(tree, file, processor, compiler) {
|
|
275
272
|
const queue = [];
|
|
276
|
-
(0, import_unist_util_visit.visit)(
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
273
|
+
(0, import_unist_util_visit.visit)(
|
|
274
|
+
tree,
|
|
275
|
+
["mdxJsxFlowElement", "mdxJsxTextElement"],
|
|
276
|
+
(node, _, parent) => {
|
|
277
|
+
let specifier;
|
|
278
|
+
const params = {};
|
|
279
|
+
if ((node.type === "mdxJsxFlowElement" || node.type === "mdxJsxTextElement") && node.name === TagName) {
|
|
280
|
+
const value = flattenNode(node);
|
|
281
|
+
if (value.length > 0) {
|
|
282
|
+
for (const attr of node.attributes) {
|
|
283
|
+
if (attr.type === "mdxJsxAttribute" && (typeof attr.value === "string" || attr.value === null)) {
|
|
284
|
+
params[attr.name] = attr.value;
|
|
285
|
+
}
|
|
285
286
|
}
|
|
287
|
+
specifier = value;
|
|
286
288
|
}
|
|
287
|
-
specifier = value;
|
|
288
289
|
}
|
|
290
|
+
if (!specifier) return;
|
|
291
|
+
const targetPath = path2.resolve(
|
|
292
|
+
"cwd" in params ? process.cwd() : path2.dirname(file),
|
|
293
|
+
specifier
|
|
294
|
+
);
|
|
295
|
+
const asCode = params.lang || !specifier.endsWith(".md") && !specifier.endsWith(".mdx");
|
|
296
|
+
queue.push(
|
|
297
|
+
fs2.readFile(targetPath).then(async (content) => {
|
|
298
|
+
compiler?.addDependency(targetPath);
|
|
299
|
+
if (asCode) {
|
|
300
|
+
const lang = params.lang ?? path2.extname(specifier).slice(1);
|
|
301
|
+
Object.assign(node, {
|
|
302
|
+
type: "code",
|
|
303
|
+
lang,
|
|
304
|
+
meta: params.meta,
|
|
305
|
+
value: content.toString(),
|
|
306
|
+
data: {}
|
|
307
|
+
});
|
|
308
|
+
return;
|
|
309
|
+
}
|
|
310
|
+
const parsed = processor.parse((0, import_gray_matter.default)(content).content);
|
|
311
|
+
await update(parsed, targetPath, processor, compiler);
|
|
312
|
+
Object.assign(
|
|
313
|
+
parent && parent.type === "paragraph" ? parent : node,
|
|
314
|
+
parsed
|
|
315
|
+
);
|
|
316
|
+
}).catch((e) => {
|
|
317
|
+
console.warn(`failed to read file: ${targetPath}`, e);
|
|
318
|
+
})
|
|
319
|
+
);
|
|
320
|
+
return "skip";
|
|
289
321
|
}
|
|
290
|
-
|
|
291
|
-
const targetPath = path2.resolve(
|
|
292
|
-
"cwd" in params ? process.cwd() : path2.dirname(file),
|
|
293
|
-
specifier
|
|
294
|
-
);
|
|
295
|
-
const asCode = params.lang || !specifier.endsWith(".md") && !specifier.endsWith(".mdx");
|
|
296
|
-
queue.push(
|
|
297
|
-
fs2.readFile(targetPath).then(async (content) => {
|
|
298
|
-
compiler?.addDependency(targetPath);
|
|
299
|
-
if (asCode) {
|
|
300
|
-
const lang = params.lang ?? path2.extname(specifier).slice(1);
|
|
301
|
-
Object.assign(node, {
|
|
302
|
-
type: "code",
|
|
303
|
-
lang,
|
|
304
|
-
meta: params.meta,
|
|
305
|
-
value: content.toString(),
|
|
306
|
-
data: {}
|
|
307
|
-
});
|
|
308
|
-
return;
|
|
309
|
-
}
|
|
310
|
-
const parsed = processor.parse((0, import_gray_matter.default)(content).content);
|
|
311
|
-
await update(parsed, targetPath, processor, compiler);
|
|
312
|
-
Object.assign(node, parsed);
|
|
313
|
-
}).catch((e) => {
|
|
314
|
-
console.warn(`failed to read file: ${targetPath}`, e);
|
|
315
|
-
})
|
|
316
|
-
);
|
|
317
|
-
return "skip";
|
|
318
|
-
});
|
|
322
|
+
);
|
|
319
323
|
await Promise.all(queue);
|
|
320
324
|
}
|
|
321
325
|
return async (tree, file) => {
|
|
@@ -385,6 +389,7 @@ function getGitTimestamp(file) {
|
|
|
385
389
|
|
|
386
390
|
// src/utils/schema.ts
|
|
387
391
|
var import_zod = require("zod");
|
|
392
|
+
var import_picocolors = __toESM(require("picocolors"), 1);
|
|
388
393
|
var metaSchema = import_zod.z.object({
|
|
389
394
|
title: import_zod.z.string().optional(),
|
|
390
395
|
pages: import_zod.z.array(import_zod.z.string()).optional(),
|
|
@@ -401,6 +406,28 @@ var frontmatterSchema = import_zod.z.object({
|
|
|
401
406
|
// Fumadocs OpenAPI generated
|
|
402
407
|
_openapi: import_zod.z.object({}).passthrough().optional()
|
|
403
408
|
});
|
|
409
|
+
var ValidationError = class extends Error {
|
|
410
|
+
constructor(message, issues) {
|
|
411
|
+
super(message);
|
|
412
|
+
this.issues = issues;
|
|
413
|
+
}
|
|
414
|
+
print() {
|
|
415
|
+
console.error(
|
|
416
|
+
[
|
|
417
|
+
`[MDX] ${this.message}:`,
|
|
418
|
+
...this.issues.map(
|
|
419
|
+
(issue) => import_picocolors.default.redBright(
|
|
420
|
+
`- ${import_picocolors.default.bold(issue.path?.join(".") ?? "*")}: ${issue.message}`
|
|
421
|
+
)
|
|
422
|
+
)
|
|
423
|
+
].join("\n")
|
|
424
|
+
);
|
|
425
|
+
}
|
|
426
|
+
toString() {
|
|
427
|
+
return `${this.message}:
|
|
428
|
+
${this.issues.map((issue) => ` ${issue.path}: ${issue.message}`).join("\n")}`;
|
|
429
|
+
}
|
|
430
|
+
};
|
|
404
431
|
async function validate(schema, data, context, errorMessage) {
|
|
405
432
|
if (typeof schema === "function" && !("~standard" in schema)) {
|
|
406
433
|
schema = schema(context);
|
|
@@ -410,16 +437,12 @@ async function validate(schema, data, context, errorMessage) {
|
|
|
410
437
|
data
|
|
411
438
|
);
|
|
412
439
|
if (result.issues) {
|
|
413
|
-
throw new
|
|
440
|
+
throw new ValidationError(errorMessage, result.issues);
|
|
414
441
|
}
|
|
415
442
|
return result.value;
|
|
416
443
|
}
|
|
417
444
|
return data;
|
|
418
445
|
}
|
|
419
|
-
function formatError(message, issues) {
|
|
420
|
-
return `${message}:
|
|
421
|
-
${issues.map((issue) => ` ${issue.path}: ${issue.message}`).join("\n")}`;
|
|
422
|
-
}
|
|
423
446
|
|
|
424
447
|
// src/loader-mdx.ts
|
|
425
448
|
function parseQuery(query) {
|
|
@@ -463,7 +486,7 @@ async function loader(source, callback) {
|
|
|
463
486
|
source,
|
|
464
487
|
path: filePath
|
|
465
488
|
},
|
|
466
|
-
`invalid frontmatter in ${filePath}
|
|
489
|
+
`invalid frontmatter in ${filePath}`
|
|
467
490
|
);
|
|
468
491
|
}
|
|
469
492
|
let timestamp;
|
package/dist/loader-mdx.js
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
import {
|
|
2
2
|
getConfigHash,
|
|
3
3
|
loadConfig
|
|
4
|
-
} from "./chunk-
|
|
4
|
+
} from "./chunk-HFLDWPJA.js";
|
|
5
|
+
import "./chunk-DRVUBK5B.js";
|
|
5
6
|
import {
|
|
6
7
|
validate
|
|
7
|
-
} from "./chunk-
|
|
8
|
+
} from "./chunk-2ZOW45YZ.js";
|
|
8
9
|
import {
|
|
9
10
|
remarkInclude
|
|
10
|
-
} from "./chunk-
|
|
11
|
-
import "./chunk-SLCPEEMF.js";
|
|
11
|
+
} from "./chunk-MK7EXW7O.js";
|
|
12
12
|
|
|
13
13
|
// src/loader-mdx.ts
|
|
14
14
|
import * as path2 from "node:path";
|
|
@@ -118,7 +118,7 @@ async function loader(source, callback) {
|
|
|
118
118
|
source,
|
|
119
119
|
path: filePath
|
|
120
120
|
},
|
|
121
|
-
`invalid frontmatter in ${filePath}
|
|
121
|
+
`invalid frontmatter in ${filePath}`
|
|
122
122
|
);
|
|
123
123
|
}
|
|
124
124
|
let timestamp;
|