fumadocs-mdx 12.0.3 → 13.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (80) hide show
  1. package/dist/bin.cjs +1116 -359
  2. package/dist/bin.js +4 -4
  3. package/dist/build-mdx-CCNr86q6.d.ts +53 -0
  4. package/dist/build-mdx-D-r3_eQL.d.cts +53 -0
  5. package/dist/bun/index.cjs +196 -52
  6. package/dist/bun/index.d.cts +8 -3
  7. package/dist/bun/index.d.ts +8 -3
  8. package/dist/bun/index.js +19 -10
  9. package/dist/{chunk-QAUWMR5D.js → chunk-3J3WL7WN.js} +23 -5
  10. package/dist/{chunk-6Y5JDZHD.js → chunk-CXA4JO4Z.js} +1 -21
  11. package/dist/chunk-EELYB2XC.js +207 -0
  12. package/dist/{chunk-46UPKP5R.js → chunk-II3H5ZVZ.js} +5 -5
  13. package/dist/{chunk-LGYVNESJ.js → chunk-JVZFH6ND.js} +6 -22
  14. package/dist/{chunk-LMG6UWCL.js → chunk-K5ZLPEIQ.js} +56 -16
  15. package/dist/{chunk-OMAMTKDE.js → chunk-KILFIBVW.js} +3 -12
  16. package/dist/chunk-NVRDCY6Z.js +30 -0
  17. package/dist/{chunk-RMDXSZYE.js → chunk-XQ5O7IPO.js} +31 -24
  18. package/dist/chunk-XZY2AWJI.js +81 -0
  19. package/dist/{chunk-VXEBLM4X.js → chunk-YVCR6FUH.js} +1 -1
  20. package/dist/config/index.cjs +56 -16
  21. package/dist/config/index.d.cts +2 -1
  22. package/dist/config/index.d.ts +2 -1
  23. package/dist/config/index.js +1 -1
  24. package/dist/{define-DJbJduHy.d.ts → core-B6j6Fxse.d.cts} +89 -2
  25. package/dist/{define-DJbJduHy.d.cts → core-B6j6Fxse.d.ts} +89 -2
  26. package/dist/index.cjs +0 -109
  27. package/dist/index.d.cts +75 -9
  28. package/dist/index.d.ts +75 -9
  29. package/dist/index.js +0 -11
  30. package/dist/{load-UUXLUBHL.js → load-MNG3CLET.js} +1 -3
  31. package/dist/next/index.cjs +298 -234
  32. package/dist/next/index.d.cts +2 -11
  33. package/dist/next/index.d.ts +2 -11
  34. package/dist/next/index.js +177 -141
  35. package/dist/node/loader.cjs +228 -85
  36. package/dist/node/loader.js +19 -9
  37. package/dist/plugins/json-schema.cjs +162 -0
  38. package/dist/plugins/json-schema.d.cts +24 -0
  39. package/dist/plugins/json-schema.d.ts +24 -0
  40. package/dist/plugins/json-schema.js +78 -0
  41. package/dist/runtime/next/async.cjs +108 -70
  42. package/dist/runtime/next/async.d.cts +9 -6
  43. package/dist/runtime/next/async.d.ts +9 -6
  44. package/dist/runtime/next/async.js +8 -18
  45. package/dist/runtime/next/index.cjs +25 -14
  46. package/dist/runtime/next/index.d.cts +11 -8
  47. package/dist/runtime/next/index.d.ts +11 -8
  48. package/dist/runtime/next/index.js +2 -2
  49. package/dist/runtime/vite/browser.cjs +7 -3
  50. package/dist/runtime/vite/browser.d.cts +56 -7
  51. package/dist/runtime/vite/browser.d.ts +56 -7
  52. package/dist/runtime/vite/browser.js +2 -1
  53. package/dist/runtime/vite/server.cjs +40 -34
  54. package/dist/runtime/vite/server.d.cts +13 -10
  55. package/dist/runtime/vite/server.d.ts +13 -10
  56. package/dist/runtime/vite/server.js +8 -23
  57. package/dist/{types-TeHjsmja.d.ts → types-AGzTfBmf.d.ts} +3 -10
  58. package/dist/{types-BRx1QsIJ.d.cts → types-DKGMoay5.d.cts} +3 -10
  59. package/dist/vite/index.cjs +443 -249
  60. package/dist/vite/index.d.cts +23 -10
  61. package/dist/vite/index.d.ts +23 -10
  62. package/dist/vite/index.js +213 -36
  63. package/dist/{loader-mdx.cjs → webpack/index.cjs} +268 -82
  64. package/dist/{loader-mdx.d.ts → webpack/index.d.cts} +1 -0
  65. package/dist/{loader-mdx.d.cts → webpack/index.d.ts} +1 -0
  66. package/dist/webpack/index.js +44 -0
  67. package/loader-mdx.cjs +1 -1
  68. package/package.json +30 -16
  69. package/dist/browser-BupUnhpC.d.ts +0 -98
  70. package/dist/browser-R0x9IPaQ.d.cts +0 -98
  71. package/dist/chunk-ADR6R7HM.js +0 -29
  72. package/dist/chunk-IQAEAI4P.js +0 -66
  73. package/dist/chunk-XMFLD5J6.js +0 -30
  74. package/dist/chunk-ZLCSVXCD.js +0 -10
  75. package/dist/chunk-ZX7TM4AR.js +0 -127
  76. package/dist/loader-mdx.js +0 -25
  77. package/dist/postinstall-SCSXM4IM.js +0 -10
  78. package/dist/shared-CfiiRctw.d.ts +0 -70
  79. package/dist/shared-fFqiuWJC.d.cts +0 -70
  80. package/dist/watcher-HGOH3APP.js +0 -22
@@ -38,28 +38,8 @@ function ident(code, tab = 1) {
38
38
  return code.split("\n").map((v) => " ".repeat(tab) + v).join("\n");
39
39
  }
40
40
 
41
- // src/utils/collections.ts
42
- function getSupportedFormats(collection) {
43
- return {
44
- doc: ["mdx", "md"],
45
- meta: ["json", "yaml"]
46
- }[collection.type];
47
- }
48
- function getGlobPatterns(collection) {
49
- if (collection.files) return collection.files;
50
- return [`**/*.{${getSupportedFormats(collection).join(",")}}`];
51
- }
52
- function isFileSupported(filePath, collection) {
53
- for (const format of getSupportedFormats(collection)) {
54
- if (filePath.endsWith(`.${format}`)) return true;
55
- }
56
- return false;
57
- }
58
-
59
41
  export {
60
42
  getImportCode,
61
43
  toImportPath,
62
- ident,
63
- getGlobPatterns,
64
- isFileSupported
44
+ ident
65
45
  };
@@ -0,0 +1,207 @@
1
+ // src/loaders/config/index.ts
2
+ import path from "path";
3
+ import fs from "fs/promises";
4
+ function findConfigFile() {
5
+ return path.resolve("source.config.ts");
6
+ }
7
+ function staticConfig({
8
+ core,
9
+ buildConfig
10
+ }) {
11
+ let cached;
12
+ async function newConfig() {
13
+ const { loadConfig } = await import("./load-MNG3CLET.js");
14
+ await core.init({
15
+ config: loadConfig(
16
+ core._options.configPath,
17
+ core._options.outDir,
18
+ buildConfig
19
+ )
20
+ });
21
+ return core.getConfig();
22
+ }
23
+ return {
24
+ async getConfig() {
25
+ return cached ??= newConfig();
26
+ }
27
+ };
28
+ }
29
+ function dynamicConfig({
30
+ core,
31
+ buildConfig
32
+ }) {
33
+ let loaded;
34
+ async function getConfigHash() {
35
+ const stats = await fs.stat(core._options.configPath).catch(() => void 0);
36
+ if (stats) {
37
+ return stats.mtime.getTime().toString();
38
+ }
39
+ throw new Error("Cannot find config file");
40
+ }
41
+ async function newConfig() {
42
+ const { loadConfig } = await import("./load-MNG3CLET.js");
43
+ await core.init({
44
+ config: loadConfig(
45
+ core._options.configPath,
46
+ core._options.outDir,
47
+ buildConfig
48
+ )
49
+ });
50
+ return core.getConfig();
51
+ }
52
+ return {
53
+ async getConfig() {
54
+ const hash = await getConfigHash();
55
+ if (loaded && loaded.hash === hash) return loaded.config;
56
+ loaded = {
57
+ hash,
58
+ config: newConfig()
59
+ };
60
+ return loaded.config;
61
+ }
62
+ };
63
+ }
64
+
65
+ // src/utils/validation.ts
66
+ import picocolors from "picocolors";
67
+ var ValidationError = class extends Error {
68
+ constructor(message, issues) {
69
+ super(
70
+ `${message}:
71
+ ${issues.map((issue) => ` ${issue.path}: ${issue.message}`).join("\n")}`
72
+ );
73
+ this.title = message;
74
+ this.issues = issues;
75
+ }
76
+ toStringFormatted() {
77
+ return [
78
+ picocolors.bold(`[MDX] ${this.title}:`),
79
+ ...this.issues.map(
80
+ (issue) => picocolors.redBright(
81
+ `- ${picocolors.bold(issue.path?.join(".") ?? "*")}: ${issue.message}`
82
+ )
83
+ )
84
+ ].join("\n");
85
+ }
86
+ };
87
+ async function validate(schema, data, context, errorMessage) {
88
+ if (typeof schema === "function" && !("~standard" in schema)) {
89
+ schema = schema(context);
90
+ }
91
+ if ("~standard" in schema) {
92
+ const result = await schema["~standard"].validate(
93
+ data
94
+ );
95
+ if (result.issues) {
96
+ throw new ValidationError(errorMessage, result.issues);
97
+ }
98
+ return result.value;
99
+ }
100
+ return data;
101
+ }
102
+
103
+ // src/core.ts
104
+ import path2 from "path";
105
+ import fs2 from "fs/promises";
106
+ function createCore(options, defaultPlugins = []) {
107
+ let config;
108
+ let plugins;
109
+ return {
110
+ _options: options,
111
+ getPluginContext() {
112
+ return {
113
+ core: this,
114
+ ...options
115
+ };
116
+ },
117
+ /**
118
+ * Convenient cache store, reset when config changes
119
+ */
120
+ cache: /* @__PURE__ */ new Map(),
121
+ async init({ config: newConfig }) {
122
+ config = await newConfig;
123
+ this.cache.clear();
124
+ plugins = [];
125
+ for await (const option of [
126
+ ...defaultPlugins,
127
+ ...config.global.plugins ?? []
128
+ ]) {
129
+ if (!option) continue;
130
+ if (Array.isArray(option)) plugins.push(...option);
131
+ else plugins.push(option);
132
+ }
133
+ for (const plugin of plugins) {
134
+ const out = await plugin.config?.call(this.getPluginContext(), config);
135
+ if (out) config = out;
136
+ }
137
+ return this;
138
+ },
139
+ getConfig() {
140
+ return config;
141
+ },
142
+ creatConfigLoader() {
143
+ return {
144
+ getConfig() {
145
+ return config;
146
+ }
147
+ };
148
+ },
149
+ async initServer(server) {
150
+ for (const plugin of plugins) {
151
+ await plugin.configureServer?.call(this.getPluginContext(), server);
152
+ }
153
+ },
154
+ async emitAndWrite({
155
+ filterPlugin = () => true
156
+ } = {}) {
157
+ const start = performance.now();
158
+ const out = await Promise.all(
159
+ plugins.map((plugin) => {
160
+ if (!filterPlugin(plugin) || !plugin.emit) return [];
161
+ return plugin.emit.call(this.getPluginContext());
162
+ })
163
+ );
164
+ await Promise.all(
165
+ out.flat().map(async (entry) => {
166
+ const file = path2.join(options.outDir, entry.path);
167
+ await fs2.mkdir(path2.dirname(file), { recursive: true });
168
+ await fs2.writeFile(file, entry.content);
169
+ })
170
+ );
171
+ console.log(`[MDX] generated files in ${performance.now() - start}ms`);
172
+ }
173
+ };
174
+ }
175
+
176
+ // src/utils/git-timestamp.ts
177
+ import path3 from "path";
178
+ import { x } from "tinyexec";
179
+ var cache = /* @__PURE__ */ new Map();
180
+ async function getGitTimestamp(file) {
181
+ const cached = cache.get(file);
182
+ if (cached) return cached;
183
+ try {
184
+ const out = await x(
185
+ "git",
186
+ ["log", "-1", '--pretty="%ai"', path3.relative(process.cwd(), file)],
187
+ {
188
+ throwOnError: true
189
+ }
190
+ );
191
+ const time = new Date(out.stdout);
192
+ cache.set(file, time);
193
+ return time;
194
+ } catch {
195
+ return;
196
+ }
197
+ }
198
+
199
+ export {
200
+ findConfigFile,
201
+ staticConfig,
202
+ dynamicConfig,
203
+ ValidationError,
204
+ validate,
205
+ getGitTimestamp,
206
+ createCore
207
+ };
@@ -1,7 +1,5 @@
1
- // src/runtime/vite/browser.ts
2
- import { createElement, lazy } from "react";
3
-
4
1
  // src/runtime/vite/base.ts
2
+ import { createElement, lazy } from "react";
5
3
  function fromConfigBase() {
6
4
  function normalize(entries, base) {
7
5
  const out = {};
@@ -28,8 +26,6 @@ function fromConfigBase() {
28
26
  }
29
27
  };
30
28
  }
31
-
32
- // src/runtime/vite/browser.ts
33
29
  var loaderStore = /* @__PURE__ */ new Map();
34
30
  function createClientLoader(files, options) {
35
31
  const { id = "", component } = options;
@@ -63,6 +59,10 @@ function createClientLoader(files, options) {
63
59
  getRenderer,
64
60
  getComponent(path) {
65
61
  return getRenderer()[path];
62
+ },
63
+ useContent(path, props) {
64
+ const Comp = this.getComponent(path);
65
+ return createElement(Comp, props);
66
66
  }
67
67
  };
68
68
  }
@@ -3,10 +3,8 @@ import {
3
3
  } from "./chunk-U4MQ44TS.js";
4
4
 
5
5
  // src/loaders/config/load.ts
6
- import * as fs from "fs/promises";
7
6
  import * as path from "path";
8
7
  import { pathToFileURL } from "url";
9
- var cache = null;
10
8
  async function compileConfig(configPath, outDir) {
11
9
  const { build } = await import("esbuild");
12
10
  const transformed = await build({
@@ -27,30 +25,16 @@ async function compileConfig(configPath, outDir) {
27
25
  throw new Error("failed to compile configuration file");
28
26
  }
29
27
  }
30
- async function loadConfig(configPath, outDir, hash, build = false) {
31
- if (cache && cache.hash === hash) {
32
- return await cache.config;
33
- }
28
+ async function loadConfig(configPath, outDir, build = false) {
34
29
  if (build) await compileConfig(configPath, outDir);
35
30
  const url = pathToFileURL(path.resolve(outDir, "source.config.mjs"));
36
- const config = import(`${url.href}?hash=${hash}`).then((loaded) => {
37
- return buildConfig(
38
- // every call to `loadConfig` will cause the previous cache to be ignored
39
- loaded
40
- );
41
- });
42
- if (hash) cache = { config, hash };
31
+ url.searchParams.set("hash", Date.now().toString());
32
+ const config = import(url.href).then(
33
+ (loaded) => buildConfig(loaded)
34
+ );
43
35
  return await config;
44
36
  }
45
- async function getConfigHash(configPath) {
46
- const stats = await fs.stat(configPath).catch(() => void 0);
47
- if (stats) {
48
- return stats.mtime.getTime().toString();
49
- }
50
- throw new Error("Cannot find config file");
51
- }
52
37
 
53
38
  export {
54
- loadConfig,
55
- getConfigHash
39
+ loadConfig
56
40
  };
@@ -4,12 +4,56 @@ import {
4
4
 
5
5
  // src/loaders/mdx/remark-include.ts
6
6
  import { unified } from "unified";
7
- import { visit } from "unist-util-visit";
7
+ import { visit as visit2 } from "unist-util-visit";
8
8
  import * as path from "path";
9
9
  import * as fs from "fs/promises";
10
- import remarkParse from "remark-parse";
11
- import remarkMdx from "remark-mdx";
12
10
  import { remarkHeading } from "fumadocs-core/mdx-plugins";
11
+
12
+ // src/loaders/mdx/remark-unravel.ts
13
+ import { visit } from "unist-util-visit";
14
+ function remarkMarkAndUnravel() {
15
+ return (tree) => {
16
+ visit(tree, function(node, index, parent) {
17
+ let offset = -1;
18
+ let all = true;
19
+ let oneOrMore = false;
20
+ if (parent && typeof index === "number" && node.type === "paragraph") {
21
+ const children = node.children;
22
+ while (++offset < children.length) {
23
+ const child = children[offset];
24
+ if (child.type === "mdxJsxTextElement" || child.type === "mdxTextExpression") {
25
+ oneOrMore = true;
26
+ } else if (child.type === "text" && child.value.trim().length === 0) {
27
+ } else {
28
+ all = false;
29
+ break;
30
+ }
31
+ }
32
+ if (all && oneOrMore) {
33
+ offset = -1;
34
+ const newChildren = [];
35
+ while (++offset < children.length) {
36
+ const child = children[offset];
37
+ if (child.type === "mdxJsxTextElement") {
38
+ child.type = "mdxJsxFlowElement";
39
+ }
40
+ if (child.type === "mdxTextExpression") {
41
+ child.type = "mdxFlowExpression";
42
+ }
43
+ if (child.type === "text" && /^[\t\r\n ]+$/.test(String(child.value))) {
44
+ } else {
45
+ newChildren.push(child);
46
+ }
47
+ }
48
+ parent.children.splice(index, 1, ...newChildren);
49
+ return index;
50
+ }
51
+ }
52
+ });
53
+ };
54
+ }
55
+
56
+ // src/loaders/mdx/remark-include.ts
13
57
  var ElementLikeTypes = [
14
58
  "mdxJsxFlowElement",
15
59
  "mdxJsxTextElement",
@@ -49,7 +93,7 @@ function parseSpecifier(specifier) {
49
93
  function extractSection(root, section) {
50
94
  let nodes;
51
95
  let capturingHeadingContent = false;
52
- visit(root, (node) => {
96
+ visit2(root, (node) => {
53
97
  if (node.type === "heading") {
54
98
  if (capturingHeadingContent) {
55
99
  return false;
@@ -81,7 +125,7 @@ function extractSection(root, section) {
81
125
  }
82
126
  function remarkInclude() {
83
127
  const TagName = "include";
84
- async function embedContent(file, heading, params, data) {
128
+ const embedContent = async (file, heading, params, data) => {
85
129
  let content;
86
130
  try {
87
131
  content = (await fs.readFile(file)).toString();
@@ -104,18 +148,17 @@ ${e instanceof Error ? e.message : String(e)}`,
104
148
  data: {}
105
149
  };
106
150
  }
107
- const parser = (data._getProcessor ?? getDefaultProcessor)(
108
- ext === ".mdx" ? "mdx" : "md"
109
- );
151
+ const parser = data._getProcessor ? data._getProcessor(ext === ".mdx" ? "mdx" : "md") : this;
110
152
  const parsed = fumaMatter(content);
111
153
  let mdast = parser.parse({
112
154
  path: file,
113
155
  value: parsed.content,
114
156
  data: { frontmatter: parsed.data }
115
157
  });
158
+ const baseProcessor = unified().use(remarkMarkAndUnravel);
116
159
  if (heading) {
117
160
  const extracted = extractSection(
118
- await unified().use(remarkHeading).run(mdast),
161
+ await baseProcessor.use(remarkHeading).run(mdast),
119
162
  heading
120
163
  );
121
164
  if (!extracted)
@@ -123,13 +166,15 @@ ${e instanceof Error ? e.message : String(e)}`,
123
166
  `Cannot find section ${heading} in ${file}, make sure you have encapsulated the section in a <section id="${heading}"> tag, or a :::section directive with remark-directive configured.`
124
167
  );
125
168
  mdast = extracted;
169
+ } else {
170
+ mdast = await baseProcessor.run(mdast);
126
171
  }
127
172
  await update(mdast, path.dirname(file), data);
128
173
  return mdast;
129
- }
174
+ };
130
175
  async function update(tree, directory, data) {
131
176
  const queue = [];
132
- visit(tree, ElementLikeTypes, (_node, _, parent) => {
177
+ visit2(tree, ElementLikeTypes, (_node, _, parent) => {
133
178
  const node = _node;
134
179
  if (node.name !== TagName) return;
135
180
  const specifier = flattenNode(node);
@@ -156,11 +201,6 @@ ${e instanceof Error ? e.message : String(e)}`,
156
201
  await update(tree, path.dirname(file.path), file.data);
157
202
  };
158
203
  }
159
- function getDefaultProcessor(format) {
160
- const mdProcessor = unified().use(remarkParse);
161
- if (format === "md") return mdProcessor;
162
- return mdProcessor.use(remarkMdx);
163
- }
164
204
 
165
205
  export {
166
206
  remarkInclude
@@ -1,16 +1,13 @@
1
1
  import {
2
- missingProcessedMarkdown
3
- } from "./chunk-ZLCSVXCD.js";
2
+ createDocMethods
3
+ } from "./chunk-NVRDCY6Z.js";
4
4
 
5
5
  // src/runtime/next/index.ts
6
- import * as fs from "fs/promises";
7
6
  var _runtime = {
8
7
  doc(files) {
9
8
  return files.map((file) => {
10
9
  const data = file.data;
11
- const filePath = file.info.fullPath;
12
10
  return {
13
- info: file.info,
14
11
  _exports: data,
15
12
  body: data.default,
16
13
  lastModified: data.lastModified,
@@ -18,13 +15,7 @@ var _runtime = {
18
15
  structuredData: data.structuredData,
19
16
  extractedReferences: data.extractedReferences,
20
17
  ...data.frontmatter,
21
- async getText(type) {
22
- if (type === "raw") {
23
- return (await fs.readFile(filePath)).toString();
24
- }
25
- if (typeof data._markdown !== "string") missingProcessedMarkdown();
26
- return data._markdown;
27
- }
18
+ ...createDocMethods(file.info, () => Promise.resolve(data))
28
19
  };
29
20
  });
30
21
  },
@@ -0,0 +1,30 @@
1
+ // src/runtime/shared.ts
2
+ import fs from "fs/promises";
3
+ function createDocMethods(info, load) {
4
+ return {
5
+ info,
6
+ async getText(type) {
7
+ if (type === "raw") {
8
+ return (await fs.readFile(info.fullPath)).toString();
9
+ }
10
+ const data = await load();
11
+ if (typeof data._markdown !== "string")
12
+ throw new Error(
13
+ "getText('processed') requires `includeProcessedMarkdown` to be enabled in your collection config."
14
+ );
15
+ return data._markdown;
16
+ },
17
+ async getMDAST() {
18
+ const data = await load();
19
+ if (!data._mdast)
20
+ throw new Error(
21
+ "getMDAST() requires `includeMDAST` to be enabled in your collection config."
22
+ );
23
+ return JSON.parse(data._mdast);
24
+ }
25
+ };
26
+ }
27
+
28
+ export {
29
+ createDocMethods
30
+ };
@@ -1,10 +1,10 @@
1
1
  import {
2
2
  buildMDX
3
- } from "./chunk-QAUWMR5D.js";
3
+ } from "./chunk-3J3WL7WN.js";
4
4
  import {
5
5
  getGitTimestamp,
6
6
  validate
7
- } from "./chunk-IQAEAI4P.js";
7
+ } from "./chunk-EELYB2XC.js";
8
8
  import {
9
9
  fumaMatter
10
10
  } from "./chunk-VWJKRQZR.js";
@@ -16,10 +16,7 @@ import path from "path";
16
16
  import { createHash } from "crypto";
17
17
  var querySchema = z.object({
18
18
  only: z.literal(["frontmatter", "all"]).default("all"),
19
- collection: z.string().optional(),
20
- hash: z.string().describe(
21
- "the hash of config, used for revalidation on Turbopack/Webpack."
22
- ).optional()
19
+ collection: z.string().optional()
23
20
  }).loose();
24
21
  var cacheEntry = z.object({
25
22
  code: z.string(),
@@ -36,14 +33,25 @@ function createMdxLoader(configLoader) {
36
33
  }) => {
37
34
  const matter = fumaMatter(value);
38
35
  const parsed = querySchema.parse(query);
39
- const loaded = await configLoader.getConfig(parsed.hash);
40
- const cacheDir = isDevelopment ? void 0 : loaded.global.experimentalBuildCache;
41
- const cacheKey = `${parsed.hash}_${parsed.collection ?? "global"}_${generateCacheHash(filePath)}`;
42
- if (cacheDir) {
36
+ const config = await configLoader.getConfig();
37
+ let after;
38
+ if (!isDevelopment && config.global.experimentalBuildCache) {
39
+ const cacheDir = config.global.experimentalBuildCache;
40
+ const cacheKey = `${parsed.hash}_${parsed.collection ?? "global"}_${generateCacheHash(filePath)}`;
43
41
  const cached = await fs.readFile(path.join(cacheDir, cacheKey)).then((content) => cacheEntry.parse(JSON.parse(content.toString()))).catch(() => null);
44
42
  if (cached && cached.hash === generateCacheHash(value)) return cached;
43
+ after = async () => {
44
+ await fs.mkdir(cacheDir, { recursive: true });
45
+ await fs.writeFile(
46
+ path.join(cacheDir, cacheKey),
47
+ JSON.stringify({
48
+ ...out,
49
+ hash: generateCacheHash(value)
50
+ })
51
+ );
52
+ };
45
53
  }
46
- const collection = parsed.collection ? loaded.collections.get(parsed.collection) : void 0;
54
+ const collection = parsed.collection ? config.collections.get(parsed.collection) : void 0;
47
55
  let docCollection;
48
56
  switch (collection?.type) {
49
57
  case "doc":
@@ -71,16 +79,16 @@ function createMdxLoader(configLoader) {
71
79
  };
72
80
  }
73
81
  const data = {};
74
- if (loaded.global.lastModifiedTime === "git") {
82
+ if (config.global.lastModifiedTime === "git") {
75
83
  data.lastModified = (await getGitTimestamp(filePath))?.getTime();
76
84
  }
77
85
  const lineOffset = isDevelopment ? countLines(matter.matter) : 0;
78
86
  const compiled = await buildMDX(
79
- `${parsed.hash ?? ""}:${parsed.collection ?? "global"}`,
87
+ `${getConfigHash(config)}:${parsed.collection ?? "global"}`,
80
88
  "\n".repeat(lineOffset) + matter.content,
81
89
  {
82
90
  development: isDevelopment,
83
- ...docCollection?.mdxOptions ?? await loaded.getDefaultMDXOptions(),
91
+ ...docCollection?.mdxOptions ?? await config.getDefaultMDXOptions(),
84
92
  postprocess: docCollection?.postprocess,
85
93
  data,
86
94
  filePath,
@@ -92,19 +100,18 @@ function createMdxLoader(configLoader) {
92
100
  code: String(compiled.value),
93
101
  map: compiled.map
94
102
  };
95
- if (cacheDir) {
96
- await fs.mkdir(cacheDir, { recursive: true });
97
- await fs.writeFile(
98
- path.join(cacheDir, cacheKey),
99
- JSON.stringify({
100
- ...out,
101
- hash: generateCacheHash(value)
102
- })
103
- );
104
- }
103
+ await after?.();
105
104
  return out;
106
105
  };
107
106
  }
107
+ var hashes = /* @__PURE__ */ new WeakMap();
108
+ function getConfigHash(config) {
109
+ let hash = hashes.get(config);
110
+ if (hash) return hash;
111
+ hash = Date.now().toString();
112
+ hashes.set(config, hash);
113
+ return hash;
114
+ }
108
115
  function generateCacheHash(input) {
109
116
  return createHash("md5").update(input).digest("hex");
110
117
  }
@@ -0,0 +1,81 @@
1
+ // src/utils/collections.ts
2
+ import picomatch from "picomatch";
3
+ import { glob } from "tinyglobby";
4
+ import path from "path";
5
+ var SupportedFormats = {
6
+ doc: ["mdx", "md"],
7
+ meta: ["json", "yaml"]
8
+ };
9
+ function getGlobPatterns(collection) {
10
+ if (collection.files) return collection.files;
11
+ return [`**/*.{${SupportedFormats[collection.type].join(",")}}`];
12
+ }
13
+ function isFileSupported(filePath, collection) {
14
+ return SupportedFormats[collection.type].some(
15
+ (format) => filePath.endsWith(`.${format}`)
16
+ );
17
+ }
18
+ function createCollectionMatcher(core) {
19
+ const CacheKey = "collection-matcher";
20
+ return {
21
+ scan(config) {
22
+ const scanned = [];
23
+ function scan(name, collection) {
24
+ const patterns = getGlobPatterns(collection);
25
+ for (const dir of Array.isArray(collection.dir) ? collection.dir : [collection.dir]) {
26
+ scanned.push({
27
+ name,
28
+ collection,
29
+ matcher: picomatch(patterns, {
30
+ cwd: dir
31
+ })
32
+ });
33
+ }
34
+ }
35
+ for (const [name, collection] of config.collections) {
36
+ if (collection.type === "docs") {
37
+ scan(name, collection.meta);
38
+ scan(name, collection.docs);
39
+ } else {
40
+ scan(name, collection);
41
+ }
42
+ }
43
+ return scanned;
44
+ },
45
+ getFileCollection(file) {
46
+ const scanned = core.cache.get(CacheKey) ?? this.scan(core.getConfig());
47
+ core.cache.set(CacheKey, scanned);
48
+ for (const item of scanned) {
49
+ if (isFileSupported(file, item.collection) && item.matcher(file))
50
+ return { name: item.name, collection: item.collection };
51
+ }
52
+ }
53
+ };
54
+ }
55
+ async function getCollectionFiles(collection) {
56
+ const files = /* @__PURE__ */ new Map();
57
+ const dirs = Array.isArray(collection.dir) ? collection.dir : [collection.dir];
58
+ const patterns = getGlobPatterns(collection);
59
+ await Promise.all(
60
+ dirs.map(async (dir) => {
61
+ const result = await glob(patterns, {
62
+ cwd: path.resolve(dir)
63
+ });
64
+ for (const item of result) {
65
+ if (!isFileSupported(item, collection)) continue;
66
+ const fullPath = path.join(dir, item);
67
+ files.set(fullPath, {
68
+ path: item,
69
+ fullPath
70
+ });
71
+ }
72
+ })
73
+ );
74
+ return Array.from(files.values());
75
+ }
76
+
77
+ export {
78
+ getGlobPatterns,
79
+ createCollectionMatcher,
80
+ getCollectionFiles
81
+ };
@@ -1,6 +1,6 @@
1
1
  import {
2
2
  ValidationError
3
- } from "./chunk-IQAEAI4P.js";
3
+ } from "./chunk-EELYB2XC.js";
4
4
 
5
5
  // src/loaders/adapter.ts
6
6
  import { fileURLToPath } from "url";