fumadocs-mdx 11.6.0 → 13.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bin.cjs +1728 -0
- package/dist/bin.d.cts +1 -0
- package/dist/bin.d.ts +1 -0
- package/dist/bin.js +16 -0
- package/dist/build-mdx-CCNr86q6.d.ts +53 -0
- package/dist/build-mdx-D-r3_eQL.d.cts +53 -0
- package/dist/bun/index.cjs +857 -0
- package/dist/bun/index.d.cts +13 -0
- package/dist/bun/index.d.ts +13 -0
- package/dist/bun/index.js +62 -0
- package/dist/chunk-3J3WL7WN.js +160 -0
- package/dist/chunk-CXA4JO4Z.js +45 -0
- package/dist/chunk-EELYB2XC.js +207 -0
- package/dist/chunk-FSZMKRVH.js +80 -0
- package/dist/chunk-II3H5ZVZ.js +77 -0
- package/dist/chunk-JVZFH6ND.js +40 -0
- package/dist/chunk-K5ZLPEIQ.js +207 -0
- package/dist/{chunk-VFALQK6O.js → chunk-KILFIBVW.js} +21 -12
- package/dist/chunk-NVRDCY6Z.js +30 -0
- package/dist/chunk-U4MQ44TS.js +53 -0
- package/dist/chunk-VWJKRQZR.js +19 -0
- package/dist/chunk-XQ5O7IPO.js +128 -0
- package/dist/chunk-XZY2AWJI.js +81 -0
- package/dist/chunk-YVCR6FUH.js +82 -0
- package/dist/config/index.cjs +232 -128
- package/dist/config/index.d.cts +4 -6
- package/dist/config/index.d.ts +4 -6
- package/dist/config/index.js +26 -19
- package/dist/core-B6j6Fxse.d.cts +218 -0
- package/dist/core-B6j6Fxse.d.ts +218 -0
- package/dist/index.cjs +0 -76
- package/dist/index.d.cts +73 -20
- package/dist/index.d.ts +73 -20
- package/dist/index.js +0 -10
- package/dist/load-MNG3CLET.js +7 -0
- package/dist/next/index.cjs +567 -314
- package/dist/next/index.d.cts +9 -12
- package/dist/next/index.d.ts +9 -12
- package/dist/next/index.js +238 -211
- package/dist/node/loader.cjs +922 -0
- package/dist/node/loader.d.cts +5 -0
- package/dist/node/loader.d.ts +5 -0
- package/dist/node/loader.js +33 -0
- package/dist/plugins/json-schema.cjs +162 -0
- package/dist/plugins/json-schema.d.cts +24 -0
- package/dist/plugins/json-schema.d.ts +24 -0
- package/dist/plugins/json-schema.js +78 -0
- package/dist/{mdx-options-CAU273O3.js → preset-ZMP6U62C.js} +1 -1
- package/dist/runtime/next/async.cjs +715 -0
- package/dist/runtime/next/async.d.cts +21 -0
- package/dist/runtime/next/async.d.ts +21 -0
- package/dist/runtime/next/async.js +89 -0
- package/dist/runtime/next/index.cjs +136 -0
- package/dist/runtime/next/index.d.cts +33 -0
- package/dist/runtime/next/index.d.ts +33 -0
- package/dist/runtime/next/index.js +11 -0
- package/dist/runtime/vite/browser.cjs +107 -0
- package/dist/runtime/vite/browser.d.cts +59 -0
- package/dist/runtime/vite/browser.d.ts +59 -0
- package/dist/runtime/vite/browser.js +11 -0
- package/dist/runtime/vite/server.cjs +243 -0
- package/dist/runtime/vite/server.d.cts +30 -0
- package/dist/runtime/vite/server.d.ts +30 -0
- package/dist/runtime/vite/server.js +111 -0
- package/dist/types-AGzTfBmf.d.ts +45 -0
- package/dist/types-DKGMoay5.d.cts +45 -0
- package/dist/vite/index.cjs +1185 -0
- package/dist/vite/index.d.cts +45 -0
- package/dist/vite/index.d.ts +45 -0
- package/dist/vite/index.js +297 -0
- package/dist/webpack/index.cjs +957 -0
- package/dist/{loader-mdx.d.cts → webpack/index.d.cts} +3 -6
- package/dist/{loader-mdx.d.ts → webpack/index.d.ts} +3 -6
- package/dist/webpack/index.js +44 -0
- package/loader-mdx.cjs +1 -1
- package/package.json +86 -29
- package/bin.js +0 -5
- package/dist/chunk-2ZOW45YZ.js +0 -63
- package/dist/chunk-DRVUBK5B.js +0 -39
- package/dist/chunk-HFLDWPJA.js +0 -62
- package/dist/chunk-IOENRFUX.js +0 -112
- package/dist/chunk-MK7EXW7O.js +0 -75
- package/dist/define-BaW0PQDJ.d.cts +0 -201
- package/dist/define-BaW0PQDJ.d.ts +0 -201
- package/dist/loader-mdx.cjs +0 -527
- package/dist/loader-mdx.js +0 -162
- package/dist/runtime/async.cjs +0 -269
- package/dist/runtime/async.d.cts +0 -18
- package/dist/runtime/async.d.ts +0 -18
- package/dist/runtime/async.js +0 -73
- package/dist/types-BNrQHCj5.d.cts +0 -100
- package/dist/types-DEduCvIT.d.ts +0 -100
- package/dist/watcher-IAZDSTU7.js +0 -24
|
@@ -0,0 +1,207 @@
|
|
|
1
|
+
import {
|
|
2
|
+
fumaMatter
|
|
3
|
+
} from "./chunk-VWJKRQZR.js";
|
|
4
|
+
|
|
5
|
+
// src/loaders/mdx/remark-include.ts
|
|
6
|
+
import { unified } from "unified";
|
|
7
|
+
import { visit as visit2 } from "unist-util-visit";
|
|
8
|
+
import * as path from "path";
|
|
9
|
+
import * as fs from "fs/promises";
|
|
10
|
+
import { remarkHeading } from "fumadocs-core/mdx-plugins";
|
|
11
|
+
|
|
12
|
+
// src/loaders/mdx/remark-unravel.ts
|
|
13
|
+
import { visit } from "unist-util-visit";
|
|
14
|
+
function remarkMarkAndUnravel() {
|
|
15
|
+
return (tree) => {
|
|
16
|
+
visit(tree, function(node, index, parent) {
|
|
17
|
+
let offset = -1;
|
|
18
|
+
let all = true;
|
|
19
|
+
let oneOrMore = false;
|
|
20
|
+
if (parent && typeof index === "number" && node.type === "paragraph") {
|
|
21
|
+
const children = node.children;
|
|
22
|
+
while (++offset < children.length) {
|
|
23
|
+
const child = children[offset];
|
|
24
|
+
if (child.type === "mdxJsxTextElement" || child.type === "mdxTextExpression") {
|
|
25
|
+
oneOrMore = true;
|
|
26
|
+
} else if (child.type === "text" && child.value.trim().length === 0) {
|
|
27
|
+
} else {
|
|
28
|
+
all = false;
|
|
29
|
+
break;
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
if (all && oneOrMore) {
|
|
33
|
+
offset = -1;
|
|
34
|
+
const newChildren = [];
|
|
35
|
+
while (++offset < children.length) {
|
|
36
|
+
const child = children[offset];
|
|
37
|
+
if (child.type === "mdxJsxTextElement") {
|
|
38
|
+
child.type = "mdxJsxFlowElement";
|
|
39
|
+
}
|
|
40
|
+
if (child.type === "mdxTextExpression") {
|
|
41
|
+
child.type = "mdxFlowExpression";
|
|
42
|
+
}
|
|
43
|
+
if (child.type === "text" && /^[\t\r\n ]+$/.test(String(child.value))) {
|
|
44
|
+
} else {
|
|
45
|
+
newChildren.push(child);
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
parent.children.splice(index, 1, ...newChildren);
|
|
49
|
+
return index;
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
});
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
// src/loaders/mdx/remark-include.ts
|
|
57
|
+
var ElementLikeTypes = [
|
|
58
|
+
"mdxJsxFlowElement",
|
|
59
|
+
"mdxJsxTextElement",
|
|
60
|
+
"containerDirective",
|
|
61
|
+
"textDirective",
|
|
62
|
+
"leafDirective"
|
|
63
|
+
];
|
|
64
|
+
function isElementLike(node) {
|
|
65
|
+
return ElementLikeTypes.includes(node.type);
|
|
66
|
+
}
|
|
67
|
+
function parseElementAttributes(element) {
|
|
68
|
+
if (Array.isArray(element.attributes)) {
|
|
69
|
+
const attributes = {};
|
|
70
|
+
for (const attr of element.attributes) {
|
|
71
|
+
if (attr.type === "mdxJsxAttribute" && (typeof attr.value === "string" || attr.value === null)) {
|
|
72
|
+
attributes[attr.name] = attr.value;
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
return attributes;
|
|
76
|
+
}
|
|
77
|
+
return element.attributes ?? {};
|
|
78
|
+
}
|
|
79
|
+
function flattenNode(node) {
|
|
80
|
+
if ("children" in node)
|
|
81
|
+
return node.children.map((child) => flattenNode(child)).join("");
|
|
82
|
+
if ("value" in node) return node.value;
|
|
83
|
+
return "";
|
|
84
|
+
}
|
|
85
|
+
function parseSpecifier(specifier) {
|
|
86
|
+
const idx = specifier.lastIndexOf("#");
|
|
87
|
+
if (idx === -1) return { file: specifier };
|
|
88
|
+
return {
|
|
89
|
+
file: specifier.slice(0, idx),
|
|
90
|
+
section: specifier.slice(idx + 1)
|
|
91
|
+
};
|
|
92
|
+
}
|
|
93
|
+
function extractSection(root, section) {
|
|
94
|
+
let nodes;
|
|
95
|
+
let capturingHeadingContent = false;
|
|
96
|
+
visit2(root, (node) => {
|
|
97
|
+
if (node.type === "heading") {
|
|
98
|
+
if (capturingHeadingContent) {
|
|
99
|
+
return false;
|
|
100
|
+
}
|
|
101
|
+
if (node.data?.hProperties?.id === section) {
|
|
102
|
+
capturingHeadingContent = true;
|
|
103
|
+
nodes = [node];
|
|
104
|
+
return "skip";
|
|
105
|
+
}
|
|
106
|
+
return;
|
|
107
|
+
}
|
|
108
|
+
if (capturingHeadingContent) {
|
|
109
|
+
nodes?.push(node);
|
|
110
|
+
return "skip";
|
|
111
|
+
}
|
|
112
|
+
if (isElementLike(node) && node.name === "section") {
|
|
113
|
+
const attributes = parseElementAttributes(node);
|
|
114
|
+
if (attributes.id === section) {
|
|
115
|
+
nodes = node.children;
|
|
116
|
+
return false;
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
});
|
|
120
|
+
if (nodes)
|
|
121
|
+
return {
|
|
122
|
+
type: "root",
|
|
123
|
+
children: nodes
|
|
124
|
+
};
|
|
125
|
+
}
|
|
126
|
+
function remarkInclude() {
|
|
127
|
+
const TagName = "include";
|
|
128
|
+
const embedContent = async (file, heading, params, data) => {
|
|
129
|
+
let content;
|
|
130
|
+
try {
|
|
131
|
+
content = (await fs.readFile(file)).toString();
|
|
132
|
+
} catch (e) {
|
|
133
|
+
throw new Error(
|
|
134
|
+
`failed to read file ${file}
|
|
135
|
+
${e instanceof Error ? e.message : String(e)}`,
|
|
136
|
+
{ cause: e }
|
|
137
|
+
);
|
|
138
|
+
}
|
|
139
|
+
const ext = path.extname(file);
|
|
140
|
+
data._compiler?.addDependency(file);
|
|
141
|
+
if (params.lang || ext !== ".md" && ext !== ".mdx") {
|
|
142
|
+
const lang = params.lang ?? ext.slice(1);
|
|
143
|
+
return {
|
|
144
|
+
type: "code",
|
|
145
|
+
lang,
|
|
146
|
+
meta: params.meta,
|
|
147
|
+
value: content,
|
|
148
|
+
data: {}
|
|
149
|
+
};
|
|
150
|
+
}
|
|
151
|
+
const parser = data._getProcessor ? data._getProcessor(ext === ".mdx" ? "mdx" : "md") : this;
|
|
152
|
+
const parsed = fumaMatter(content);
|
|
153
|
+
let mdast = parser.parse({
|
|
154
|
+
path: file,
|
|
155
|
+
value: parsed.content,
|
|
156
|
+
data: { frontmatter: parsed.data }
|
|
157
|
+
});
|
|
158
|
+
const baseProcessor = unified().use(remarkMarkAndUnravel);
|
|
159
|
+
if (heading) {
|
|
160
|
+
const extracted = extractSection(
|
|
161
|
+
await baseProcessor.use(remarkHeading).run(mdast),
|
|
162
|
+
heading
|
|
163
|
+
);
|
|
164
|
+
if (!extracted)
|
|
165
|
+
throw new Error(
|
|
166
|
+
`Cannot find section ${heading} in ${file}, make sure you have encapsulated the section in a <section id="${heading}"> tag, or a :::section directive with remark-directive configured.`
|
|
167
|
+
);
|
|
168
|
+
mdast = extracted;
|
|
169
|
+
} else {
|
|
170
|
+
mdast = await baseProcessor.run(mdast);
|
|
171
|
+
}
|
|
172
|
+
await update(mdast, path.dirname(file), data);
|
|
173
|
+
return mdast;
|
|
174
|
+
};
|
|
175
|
+
async function update(tree, directory, data) {
|
|
176
|
+
const queue = [];
|
|
177
|
+
visit2(tree, ElementLikeTypes, (_node, _, parent) => {
|
|
178
|
+
const node = _node;
|
|
179
|
+
if (node.name !== TagName) return;
|
|
180
|
+
const specifier = flattenNode(node);
|
|
181
|
+
if (specifier.length === 0) return "skip";
|
|
182
|
+
const attributes = parseElementAttributes(node);
|
|
183
|
+
const { file: relativePath, section } = parseSpecifier(specifier);
|
|
184
|
+
const file = path.resolve(
|
|
185
|
+
"cwd" in attributes ? process.cwd() : directory,
|
|
186
|
+
relativePath
|
|
187
|
+
);
|
|
188
|
+
queue.push(
|
|
189
|
+
embedContent(file, section, attributes, data).then((replace) => {
|
|
190
|
+
Object.assign(
|
|
191
|
+
parent && parent.type === "paragraph" ? parent : node,
|
|
192
|
+
replace
|
|
193
|
+
);
|
|
194
|
+
})
|
|
195
|
+
);
|
|
196
|
+
return "skip";
|
|
197
|
+
});
|
|
198
|
+
await Promise.all(queue);
|
|
199
|
+
}
|
|
200
|
+
return async (tree, file) => {
|
|
201
|
+
await update(tree, path.dirname(file.path), file.data);
|
|
202
|
+
};
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
export {
|
|
206
|
+
remarkInclude
|
|
207
|
+
};
|
|
@@ -1,22 +1,29 @@
|
|
|
1
|
-
|
|
1
|
+
import {
|
|
2
|
+
createDocMethods
|
|
3
|
+
} from "./chunk-NVRDCY6Z.js";
|
|
4
|
+
|
|
5
|
+
// src/runtime/next/index.ts
|
|
2
6
|
var _runtime = {
|
|
3
7
|
doc(files) {
|
|
4
8
|
return files.map((file) => {
|
|
5
|
-
const
|
|
9
|
+
const data = file.data;
|
|
6
10
|
return {
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
11
|
+
_exports: data,
|
|
12
|
+
body: data.default,
|
|
13
|
+
lastModified: data.lastModified,
|
|
14
|
+
toc: data.toc,
|
|
15
|
+
structuredData: data.structuredData,
|
|
16
|
+
extractedReferences: data.extractedReferences,
|
|
17
|
+
...data.frontmatter,
|
|
18
|
+
...createDocMethods(file.info, () => Promise.resolve(data))
|
|
12
19
|
};
|
|
13
20
|
});
|
|
14
21
|
},
|
|
15
22
|
meta(files) {
|
|
16
23
|
return files.map((file) => {
|
|
17
24
|
return {
|
|
18
|
-
|
|
19
|
-
|
|
25
|
+
info: file.info,
|
|
26
|
+
...file.data
|
|
20
27
|
};
|
|
21
28
|
});
|
|
22
29
|
},
|
|
@@ -34,7 +41,7 @@ var _runtime = {
|
|
|
34
41
|
};
|
|
35
42
|
function createMDXSource(docs, meta = []) {
|
|
36
43
|
return {
|
|
37
|
-
files:
|
|
44
|
+
files: resolveFiles({
|
|
38
45
|
docs,
|
|
39
46
|
meta
|
|
40
47
|
})
|
|
@@ -45,14 +52,16 @@ function resolveFiles({ docs, meta }) {
|
|
|
45
52
|
for (const entry of docs) {
|
|
46
53
|
outputs.push({
|
|
47
54
|
type: "page",
|
|
48
|
-
|
|
55
|
+
absolutePath: entry.info.fullPath,
|
|
56
|
+
path: entry.info.path,
|
|
49
57
|
data: entry
|
|
50
58
|
});
|
|
51
59
|
}
|
|
52
60
|
for (const entry of meta) {
|
|
53
61
|
outputs.push({
|
|
54
62
|
type: "meta",
|
|
55
|
-
|
|
63
|
+
absolutePath: entry.info.fullPath,
|
|
64
|
+
path: entry.info.path,
|
|
56
65
|
data: entry
|
|
57
66
|
});
|
|
58
67
|
}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
// src/runtime/shared.ts
|
|
2
|
+
import fs from "fs/promises";
|
|
3
|
+
function createDocMethods(info, load) {
|
|
4
|
+
return {
|
|
5
|
+
info,
|
|
6
|
+
async getText(type) {
|
|
7
|
+
if (type === "raw") {
|
|
8
|
+
return (await fs.readFile(info.fullPath)).toString();
|
|
9
|
+
}
|
|
10
|
+
const data = await load();
|
|
11
|
+
if (typeof data._markdown !== "string")
|
|
12
|
+
throw new Error(
|
|
13
|
+
"getText('processed') requires `includeProcessedMarkdown` to be enabled in your collection config."
|
|
14
|
+
);
|
|
15
|
+
return data._markdown;
|
|
16
|
+
},
|
|
17
|
+
async getMDAST() {
|
|
18
|
+
const data = await load();
|
|
19
|
+
if (!data._mdast)
|
|
20
|
+
throw new Error(
|
|
21
|
+
"getMDAST() requires `includeMDAST` to be enabled in your collection config."
|
|
22
|
+
);
|
|
23
|
+
return JSON.parse(data._mdast);
|
|
24
|
+
}
|
|
25
|
+
};
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
export {
|
|
29
|
+
createDocMethods
|
|
30
|
+
};
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
// src/config/build.ts
|
|
2
|
+
function buildConfig(config) {
|
|
3
|
+
const collections = /* @__PURE__ */ new Map();
|
|
4
|
+
let globalConfig = {};
|
|
5
|
+
for (const [k, v] of Object.entries(config)) {
|
|
6
|
+
if (!v) {
|
|
7
|
+
continue;
|
|
8
|
+
}
|
|
9
|
+
if (typeof v === "object" && "type" in v) {
|
|
10
|
+
if (v.type === "docs") {
|
|
11
|
+
collections.set(k, v);
|
|
12
|
+
continue;
|
|
13
|
+
}
|
|
14
|
+
if (v.type === "doc" || v.type === "meta") {
|
|
15
|
+
collections.set(k, v);
|
|
16
|
+
continue;
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
if (k === "default" && v) {
|
|
20
|
+
globalConfig = v;
|
|
21
|
+
continue;
|
|
22
|
+
}
|
|
23
|
+
throw new Error(
|
|
24
|
+
`Unknown export "${k}", you can only export collections from source configuration file.`
|
|
25
|
+
);
|
|
26
|
+
}
|
|
27
|
+
const mdxOptionsCache = /* @__PURE__ */ new Map();
|
|
28
|
+
return {
|
|
29
|
+
global: globalConfig,
|
|
30
|
+
collections,
|
|
31
|
+
async getDefaultMDXOptions(mode = "default") {
|
|
32
|
+
const cached = mdxOptionsCache.get(mode);
|
|
33
|
+
if (cached) return cached;
|
|
34
|
+
const input = this.global.mdxOptions;
|
|
35
|
+
async function uncached() {
|
|
36
|
+
const options = typeof input === "function" ? await input() : input;
|
|
37
|
+
const { getDefaultMDXOptions } = await import("./preset-ZMP6U62C.js");
|
|
38
|
+
if (options?.preset === "minimal") return options;
|
|
39
|
+
return getDefaultMDXOptions({
|
|
40
|
+
...options,
|
|
41
|
+
_withoutBundler: mode === "remote"
|
|
42
|
+
});
|
|
43
|
+
}
|
|
44
|
+
const result = uncached();
|
|
45
|
+
mdxOptionsCache.set(mode, result);
|
|
46
|
+
return result;
|
|
47
|
+
}
|
|
48
|
+
};
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
export {
|
|
52
|
+
buildConfig
|
|
53
|
+
};
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
// src/utils/fuma-matter.ts
|
|
2
|
+
import { load } from "js-yaml";
|
|
3
|
+
var regex = /^---\r?\n(.+?)\r?\n---\r?\n/s;
|
|
4
|
+
function fumaMatter(input) {
|
|
5
|
+
const output = { matter: "", data: {}, content: input };
|
|
6
|
+
const match = regex.exec(input);
|
|
7
|
+
if (!match) {
|
|
8
|
+
return output;
|
|
9
|
+
}
|
|
10
|
+
output.matter = match[0];
|
|
11
|
+
output.content = input.slice(match[0].length);
|
|
12
|
+
const loaded = load(match[1]);
|
|
13
|
+
output.data = loaded ?? {};
|
|
14
|
+
return output;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export {
|
|
18
|
+
fumaMatter
|
|
19
|
+
};
|
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
import {
|
|
2
|
+
buildMDX
|
|
3
|
+
} from "./chunk-3J3WL7WN.js";
|
|
4
|
+
import {
|
|
5
|
+
getGitTimestamp,
|
|
6
|
+
validate
|
|
7
|
+
} from "./chunk-EELYB2XC.js";
|
|
8
|
+
import {
|
|
9
|
+
fumaMatter
|
|
10
|
+
} from "./chunk-VWJKRQZR.js";
|
|
11
|
+
|
|
12
|
+
// src/loaders/mdx/index.ts
|
|
13
|
+
import { z } from "zod";
|
|
14
|
+
import fs from "fs/promises";
|
|
15
|
+
import path from "path";
|
|
16
|
+
import { createHash } from "crypto";
|
|
17
|
+
var querySchema = z.object({
|
|
18
|
+
only: z.literal(["frontmatter", "all"]).default("all"),
|
|
19
|
+
collection: z.string().optional()
|
|
20
|
+
}).loose();
|
|
21
|
+
var cacheEntry = z.object({
|
|
22
|
+
code: z.string(),
|
|
23
|
+
map: z.any().optional(),
|
|
24
|
+
hash: z.string().optional()
|
|
25
|
+
});
|
|
26
|
+
function createMdxLoader(configLoader) {
|
|
27
|
+
return async ({
|
|
28
|
+
source: value,
|
|
29
|
+
development: isDevelopment,
|
|
30
|
+
query,
|
|
31
|
+
compiler,
|
|
32
|
+
filePath
|
|
33
|
+
}) => {
|
|
34
|
+
const matter = fumaMatter(value);
|
|
35
|
+
const parsed = querySchema.parse(query);
|
|
36
|
+
const config = await configLoader.getConfig();
|
|
37
|
+
let after;
|
|
38
|
+
if (!isDevelopment && config.global.experimentalBuildCache) {
|
|
39
|
+
const cacheDir = config.global.experimentalBuildCache;
|
|
40
|
+
const cacheKey = `${parsed.hash}_${parsed.collection ?? "global"}_${generateCacheHash(filePath)}`;
|
|
41
|
+
const cached = await fs.readFile(path.join(cacheDir, cacheKey)).then((content) => cacheEntry.parse(JSON.parse(content.toString()))).catch(() => null);
|
|
42
|
+
if (cached && cached.hash === generateCacheHash(value)) return cached;
|
|
43
|
+
after = async () => {
|
|
44
|
+
await fs.mkdir(cacheDir, { recursive: true });
|
|
45
|
+
await fs.writeFile(
|
|
46
|
+
path.join(cacheDir, cacheKey),
|
|
47
|
+
JSON.stringify({
|
|
48
|
+
...out,
|
|
49
|
+
hash: generateCacheHash(value)
|
|
50
|
+
})
|
|
51
|
+
);
|
|
52
|
+
};
|
|
53
|
+
}
|
|
54
|
+
const collection = parsed.collection ? config.collections.get(parsed.collection) : void 0;
|
|
55
|
+
let docCollection;
|
|
56
|
+
switch (collection?.type) {
|
|
57
|
+
case "doc":
|
|
58
|
+
docCollection = collection;
|
|
59
|
+
break;
|
|
60
|
+
case "docs":
|
|
61
|
+
docCollection = collection.docs;
|
|
62
|
+
break;
|
|
63
|
+
}
|
|
64
|
+
if (docCollection?.schema) {
|
|
65
|
+
matter.data = await validate(
|
|
66
|
+
docCollection.schema,
|
|
67
|
+
matter.data,
|
|
68
|
+
{
|
|
69
|
+
source: value,
|
|
70
|
+
path: filePath
|
|
71
|
+
},
|
|
72
|
+
`invalid frontmatter in ${filePath}`
|
|
73
|
+
);
|
|
74
|
+
}
|
|
75
|
+
if (parsed.only === "frontmatter") {
|
|
76
|
+
return {
|
|
77
|
+
code: `export const frontmatter = ${JSON.stringify(matter.data)}`,
|
|
78
|
+
map: null
|
|
79
|
+
};
|
|
80
|
+
}
|
|
81
|
+
const data = {};
|
|
82
|
+
if (config.global.lastModifiedTime === "git") {
|
|
83
|
+
data.lastModified = (await getGitTimestamp(filePath))?.getTime();
|
|
84
|
+
}
|
|
85
|
+
const lineOffset = isDevelopment ? countLines(matter.matter) : 0;
|
|
86
|
+
const compiled = await buildMDX(
|
|
87
|
+
`${getConfigHash(config)}:${parsed.collection ?? "global"}`,
|
|
88
|
+
"\n".repeat(lineOffset) + matter.content,
|
|
89
|
+
{
|
|
90
|
+
development: isDevelopment,
|
|
91
|
+
...docCollection?.mdxOptions ?? await config.getDefaultMDXOptions(),
|
|
92
|
+
postprocess: docCollection?.postprocess,
|
|
93
|
+
data,
|
|
94
|
+
filePath,
|
|
95
|
+
frontmatter: matter.data,
|
|
96
|
+
_compiler: compiler
|
|
97
|
+
}
|
|
98
|
+
);
|
|
99
|
+
const out = {
|
|
100
|
+
code: String(compiled.value),
|
|
101
|
+
map: compiled.map
|
|
102
|
+
};
|
|
103
|
+
await after?.();
|
|
104
|
+
return out;
|
|
105
|
+
};
|
|
106
|
+
}
|
|
107
|
+
var hashes = /* @__PURE__ */ new WeakMap();
|
|
108
|
+
function getConfigHash(config) {
|
|
109
|
+
let hash = hashes.get(config);
|
|
110
|
+
if (hash) return hash;
|
|
111
|
+
hash = Date.now().toString();
|
|
112
|
+
hashes.set(config, hash);
|
|
113
|
+
return hash;
|
|
114
|
+
}
|
|
115
|
+
function generateCacheHash(input) {
|
|
116
|
+
return createHash("md5").update(input).digest("hex");
|
|
117
|
+
}
|
|
118
|
+
function countLines(s) {
|
|
119
|
+
let num = 0;
|
|
120
|
+
for (const c of s) {
|
|
121
|
+
if (c === "\n") num++;
|
|
122
|
+
}
|
|
123
|
+
return num;
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
export {
|
|
127
|
+
createMdxLoader
|
|
128
|
+
};
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
// src/utils/collections.ts
|
|
2
|
+
import picomatch from "picomatch";
|
|
3
|
+
import { glob } from "tinyglobby";
|
|
4
|
+
import path from "path";
|
|
5
|
+
var SupportedFormats = {
|
|
6
|
+
doc: ["mdx", "md"],
|
|
7
|
+
meta: ["json", "yaml"]
|
|
8
|
+
};
|
|
9
|
+
function getGlobPatterns(collection) {
|
|
10
|
+
if (collection.files) return collection.files;
|
|
11
|
+
return [`**/*.{${SupportedFormats[collection.type].join(",")}}`];
|
|
12
|
+
}
|
|
13
|
+
function isFileSupported(filePath, collection) {
|
|
14
|
+
return SupportedFormats[collection.type].some(
|
|
15
|
+
(format) => filePath.endsWith(`.${format}`)
|
|
16
|
+
);
|
|
17
|
+
}
|
|
18
|
+
function createCollectionMatcher(core) {
|
|
19
|
+
const CacheKey = "collection-matcher";
|
|
20
|
+
return {
|
|
21
|
+
scan(config) {
|
|
22
|
+
const scanned = [];
|
|
23
|
+
function scan(name, collection) {
|
|
24
|
+
const patterns = getGlobPatterns(collection);
|
|
25
|
+
for (const dir of Array.isArray(collection.dir) ? collection.dir : [collection.dir]) {
|
|
26
|
+
scanned.push({
|
|
27
|
+
name,
|
|
28
|
+
collection,
|
|
29
|
+
matcher: picomatch(patterns, {
|
|
30
|
+
cwd: dir
|
|
31
|
+
})
|
|
32
|
+
});
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
for (const [name, collection] of config.collections) {
|
|
36
|
+
if (collection.type === "docs") {
|
|
37
|
+
scan(name, collection.meta);
|
|
38
|
+
scan(name, collection.docs);
|
|
39
|
+
} else {
|
|
40
|
+
scan(name, collection);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
return scanned;
|
|
44
|
+
},
|
|
45
|
+
getFileCollection(file) {
|
|
46
|
+
const scanned = core.cache.get(CacheKey) ?? this.scan(core.getConfig());
|
|
47
|
+
core.cache.set(CacheKey, scanned);
|
|
48
|
+
for (const item of scanned) {
|
|
49
|
+
if (isFileSupported(file, item.collection) && item.matcher(file))
|
|
50
|
+
return { name: item.name, collection: item.collection };
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
async function getCollectionFiles(collection) {
|
|
56
|
+
const files = /* @__PURE__ */ new Map();
|
|
57
|
+
const dirs = Array.isArray(collection.dir) ? collection.dir : [collection.dir];
|
|
58
|
+
const patterns = getGlobPatterns(collection);
|
|
59
|
+
await Promise.all(
|
|
60
|
+
dirs.map(async (dir) => {
|
|
61
|
+
const result = await glob(patterns, {
|
|
62
|
+
cwd: path.resolve(dir)
|
|
63
|
+
});
|
|
64
|
+
for (const item of result) {
|
|
65
|
+
if (!isFileSupported(item, collection)) continue;
|
|
66
|
+
const fullPath = path.join(dir, item);
|
|
67
|
+
files.set(fullPath, {
|
|
68
|
+
path: item,
|
|
69
|
+
fullPath
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
})
|
|
73
|
+
);
|
|
74
|
+
return Array.from(files.values());
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
export {
|
|
78
|
+
getGlobPatterns,
|
|
79
|
+
createCollectionMatcher,
|
|
80
|
+
getCollectionFiles
|
|
81
|
+
};
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
import {
|
|
2
|
+
ValidationError
|
|
3
|
+
} from "./chunk-EELYB2XC.js";
|
|
4
|
+
|
|
5
|
+
// src/loaders/adapter.ts
|
|
6
|
+
import { fileURLToPath } from "url";
|
|
7
|
+
import fs from "fs/promises";
|
|
8
|
+
import { parse } from "querystring";
|
|
9
|
+
import path from "path";
|
|
10
|
+
function toNode(loader, filterByPath) {
|
|
11
|
+
return async (url, _context, nextLoad) => {
|
|
12
|
+
if (!url.startsWith("file:///")) return nextLoad(url);
|
|
13
|
+
const parsedUrl = new URL(url);
|
|
14
|
+
const filePath = fileURLToPath(parsedUrl);
|
|
15
|
+
if (filterByPath(filePath)) {
|
|
16
|
+
const source = (await fs.readFile(filePath)).toString();
|
|
17
|
+
const result = await loader({
|
|
18
|
+
filePath,
|
|
19
|
+
query: Object.fromEntries(parsedUrl.searchParams.entries()),
|
|
20
|
+
source,
|
|
21
|
+
development: false,
|
|
22
|
+
compiler: {
|
|
23
|
+
addDependency() {
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
});
|
|
27
|
+
return {
|
|
28
|
+
source: result.code,
|
|
29
|
+
format: "module",
|
|
30
|
+
shortCircuit: true
|
|
31
|
+
};
|
|
32
|
+
}
|
|
33
|
+
return nextLoad(url);
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
function toVite(loader) {
|
|
37
|
+
return async function(file, query, value) {
|
|
38
|
+
const result = await loader({
|
|
39
|
+
filePath: file,
|
|
40
|
+
query: parse(query),
|
|
41
|
+
source: value,
|
|
42
|
+
development: this.environment.mode === "dev",
|
|
43
|
+
compiler: {
|
|
44
|
+
addDependency: (file2) => {
|
|
45
|
+
this.addWatchFile(file2);
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
});
|
|
49
|
+
return {
|
|
50
|
+
code: result.code,
|
|
51
|
+
map: result.map
|
|
52
|
+
};
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
function toWebpack(loader) {
|
|
56
|
+
return async function(source, callback) {
|
|
57
|
+
try {
|
|
58
|
+
const result = await loader({
|
|
59
|
+
filePath: this.resourcePath,
|
|
60
|
+
query: parse(this.resourceQuery.slice(1)),
|
|
61
|
+
source,
|
|
62
|
+
development: this.mode === "development",
|
|
63
|
+
compiler: this
|
|
64
|
+
});
|
|
65
|
+
callback(void 0, result.code, result.map);
|
|
66
|
+
} catch (error) {
|
|
67
|
+
if (error instanceof ValidationError) {
|
|
68
|
+
return callback(new Error(error.toStringFormatted()));
|
|
69
|
+
}
|
|
70
|
+
if (!(error instanceof Error)) throw error;
|
|
71
|
+
const fpath = path.relative(this.context, this.resourcePath);
|
|
72
|
+
error.message = `${fpath}:${error.name}: ${error.message}`;
|
|
73
|
+
callback(error);
|
|
74
|
+
}
|
|
75
|
+
};
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
export {
|
|
79
|
+
toNode,
|
|
80
|
+
toVite,
|
|
81
|
+
toWebpack
|
|
82
|
+
};
|