fumadocs-mdx 11.6.6 → 11.6.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/bin.js CHANGED
@@ -2,4 +2,4 @@
2
2
 
3
3
  import { postInstall } from './dist/next/index.js';
4
4
 
5
- void postInstall(process.argv[2]);
5
+ void postInstall(process.argv[2], process.argv[3]);
@@ -1,8 +1,11 @@
1
+ import {
2
+ fumaMatter
3
+ } from "./chunk-MXACIHNJ.js";
4
+
1
5
  // src/mdx-plugins/remark-include.ts
2
6
  import { visit } from "unist-util-visit";
3
7
  import * as path from "path";
4
8
  import * as fs from "fs/promises";
5
- import matter from "gray-matter";
6
9
  function flattenNode(node) {
7
10
  if ("children" in node)
8
11
  return node.children.map((child) => flattenNode(child)).join("");
@@ -37,7 +40,7 @@ function remarkInclude() {
37
40
  );
38
41
  const asCode = params.lang || !specifier.endsWith(".md") && !specifier.endsWith(".mdx");
39
42
  queue.push(
40
- fs.readFile(targetPath).then(async (content) => {
43
+ fs.readFile(targetPath).then((buffer) => buffer.toString()).then(async (content) => {
41
44
  compiler?.addDependency(targetPath);
42
45
  if (asCode) {
43
46
  const lang = params.lang ?? path.extname(specifier).slice(1);
@@ -50,14 +53,17 @@ function remarkInclude() {
50
53
  });
51
54
  return;
52
55
  }
53
- const parsed = processor.parse(matter(content).content);
56
+ const parsed = processor.parse(fumaMatter(content).content);
54
57
  await update(parsed, targetPath, processor, compiler);
55
58
  Object.assign(
56
59
  parent && parent.type === "paragraph" ? parent : node,
57
60
  parsed
58
61
  );
59
62
  }).catch((e) => {
60
- console.warn(`failed to read file: ${targetPath}`, e);
63
+ throw new Error(
64
+ `failed to read file ${targetPath}
65
+ ${e instanceof Error ? e.message : String(e)}`
66
+ );
61
67
  })
62
68
  );
63
69
  return "skip";
@@ -0,0 +1,40 @@
1
+ // src/utils/fuma-matter.ts
2
+ import { LRUCache } from "lru-cache";
3
+ import { load } from "js-yaml";
4
+ var cache = new LRUCache({
5
+ max: 200
6
+ });
7
+ function fumaMatter(input) {
8
+ if (input === "") {
9
+ return { data: {}, content: input, matter: "" };
10
+ }
11
+ const cached = cache.get(input);
12
+ if (cached) return cached;
13
+ const result = parseMatter(input);
14
+ cache.set(input, result);
15
+ return structuredClone(result);
16
+ }
17
+ var delimiter = "---";
18
+ function parseMatter(str) {
19
+ const output = { matter: "", data: {}, content: str };
20
+ const open = delimiter + "\n";
21
+ const close = "\n" + delimiter;
22
+ if (!str.startsWith(open)) {
23
+ return output;
24
+ }
25
+ str = str.slice(open.length);
26
+ const len = str.length;
27
+ let closeIdx = str.indexOf(close);
28
+ if (closeIdx === -1) {
29
+ closeIdx = len;
30
+ }
31
+ output.matter = str.slice(0, closeIdx);
32
+ output.content = str.slice(closeIdx + close.length);
33
+ const loaded = load(output.matter);
34
+ output.data = loaded ?? {};
35
+ return output;
36
+ }
37
+
38
+ export {
39
+ fumaMatter
40
+ };
@@ -1,20 +1,24 @@
1
1
  // src/runtime/index.ts
2
2
  import fs from "fs";
3
+ var cache = /* @__PURE__ */ new Map();
3
4
  var _runtime = {
4
5
  doc(files) {
5
6
  return files.map((file) => {
6
7
  const { default: body, frontmatter, ...exports } = file.data;
7
- let cachedContent;
8
8
  return {
9
9
  body,
10
10
  ...exports,
11
11
  ...frontmatter,
12
- get content() {
13
- cachedContent ??= fs.readFileSync(file.info.absolutePath).toString();
14
- return cachedContent;
15
- },
12
+ _file: file.info,
16
13
  _exports: file.data,
17
- _file: file.info
14
+ get content() {
15
+ const path = this._file.absolutePath;
16
+ const cached = cache.get(path);
17
+ if (cached) return cached;
18
+ const content = fs.readFileSync(path).toString();
19
+ cache.set(path, content);
20
+ return content;
21
+ }
18
22
  };
19
23
  });
20
24
  },
@@ -51,6 +55,7 @@ function resolveFiles({ docs, meta }) {
51
55
  for (const entry of docs) {
52
56
  outputs.push({
53
57
  type: "page",
58
+ absolutePath: entry._file.absolutePath,
54
59
  path: entry._file.path,
55
60
  data: entry
56
61
  });
@@ -58,6 +63,7 @@ function resolveFiles({ docs, meta }) {
58
63
  for (const entry of meta) {
59
64
  outputs.push({
60
65
  type: "meta",
66
+ absolutePath: entry._file.absolutePath,
61
67
  path: entry._file.path,
62
68
  data: entry
63
69
  });
@@ -30,12 +30,12 @@ async function compileConfig(configPath, outDir) {
30
30
  throw new Error("failed to compile configuration file");
31
31
  }
32
32
  }
33
- async function loadConfig(configPath, hash, build = false) {
33
+ async function loadConfig(configPath, outDir, hash, build = false) {
34
34
  if (cache && cache.hash === hash) {
35
35
  return await cache.config;
36
36
  }
37
- if (build) await compileConfig(configPath, ".source");
38
- const url = pathToFileURL(path.resolve(".source/source.config.mjs"));
37
+ if (build) await compileConfig(configPath, outDir);
38
+ const url = pathToFileURL(path.resolve(outDir, "source.config.mjs"));
39
39
  const config = import(`${url.href}?hash=${hash}`).then((loaded) => {
40
40
  const [err, config2] = buildConfig(
41
41
  // every call to `loadConfig` will cause the previous cache to be ignored
@@ -213,7 +213,45 @@ function getDefaultMDXOptions({
213
213
  var import_unist_util_visit = require("unist-util-visit");
214
214
  var path = __toESM(require("path"), 1);
215
215
  var fs = __toESM(require("fs/promises"), 1);
216
- var import_gray_matter = __toESM(require("gray-matter"), 1);
216
+
217
+ // src/utils/fuma-matter.ts
218
+ var import_lru_cache = require("lru-cache");
219
+ var import_js_yaml = require("js-yaml");
220
+ var cache = new import_lru_cache.LRUCache({
221
+ max: 200
222
+ });
223
+ function fumaMatter(input) {
224
+ if (input === "") {
225
+ return { data: {}, content: input, matter: "" };
226
+ }
227
+ const cached = cache.get(input);
228
+ if (cached) return cached;
229
+ const result = parseMatter(input);
230
+ cache.set(input, result);
231
+ return structuredClone(result);
232
+ }
233
+ var delimiter = "---";
234
+ function parseMatter(str) {
235
+ const output = { matter: "", data: {}, content: str };
236
+ const open = delimiter + "\n";
237
+ const close = "\n" + delimiter;
238
+ if (!str.startsWith(open)) {
239
+ return output;
240
+ }
241
+ str = str.slice(open.length);
242
+ const len = str.length;
243
+ let closeIdx = str.indexOf(close);
244
+ if (closeIdx === -1) {
245
+ closeIdx = len;
246
+ }
247
+ output.matter = str.slice(0, closeIdx);
248
+ output.content = str.slice(closeIdx + close.length);
249
+ const loaded = (0, import_js_yaml.load)(output.matter);
250
+ output.data = loaded ?? {};
251
+ return output;
252
+ }
253
+
254
+ // src/mdx-plugins/remark-include.ts
217
255
  function flattenNode(node) {
218
256
  if ("children" in node)
219
257
  return node.children.map((child) => flattenNode(child)).join("");
@@ -248,7 +286,7 @@ function remarkInclude() {
248
286
  );
249
287
  const asCode = params.lang || !specifier.endsWith(".md") && !specifier.endsWith(".mdx");
250
288
  queue.push(
251
- fs.readFile(targetPath).then(async (content) => {
289
+ fs.readFile(targetPath).then((buffer) => buffer.toString()).then(async (content) => {
252
290
  compiler?.addDependency(targetPath);
253
291
  if (asCode) {
254
292
  const lang = params.lang ?? path.extname(specifier).slice(1);
@@ -261,14 +299,17 @@ function remarkInclude() {
261
299
  });
262
300
  return;
263
301
  }
264
- const parsed = processor.parse((0, import_gray_matter.default)(content).content);
302
+ const parsed = processor.parse(fumaMatter(content).content);
265
303
  await update(parsed, targetPath, processor, compiler);
266
304
  Object.assign(
267
305
  parent && parent.type === "paragraph" ? parent : node,
268
306
  parsed
269
307
  );
270
308
  }).catch((e) => {
271
- console.warn(`failed to read file: ${targetPath}`, e);
309
+ throw new Error(
310
+ `failed to read file ${targetPath}
311
+ ${e instanceof Error ? e.message : String(e)}`
312
+ );
272
313
  })
273
314
  );
274
315
  return "skip";
@@ -4,7 +4,8 @@ import {
4
4
  } from "../chunk-OTM6WYMS.js";
5
5
  import {
6
6
  remarkInclude
7
- } from "../chunk-3UUEUK4M.js";
7
+ } from "../chunk-KTLWF7GN.js";
8
+ import "../chunk-MXACIHNJ.js";
8
9
  import {
9
10
  getDefaultMDXOptions
10
11
  } from "../chunk-VC3Y6FLZ.js";
package/dist/index.cjs CHANGED
@@ -38,21 +38,25 @@ module.exports = __toCommonJS(index_exports);
38
38
 
39
39
  // src/runtime/index.ts
40
40
  var import_node_fs = __toESM(require("fs"), 1);
41
+ var cache = /* @__PURE__ */ new Map();
41
42
  var _runtime = {
42
43
  doc(files) {
43
44
  return files.map((file) => {
44
45
  const { default: body, frontmatter, ...exports2 } = file.data;
45
- let cachedContent;
46
46
  return {
47
47
  body,
48
48
  ...exports2,
49
49
  ...frontmatter,
50
- get content() {
51
- cachedContent ??= import_node_fs.default.readFileSync(file.info.absolutePath).toString();
52
- return cachedContent;
53
- },
50
+ _file: file.info,
54
51
  _exports: file.data,
55
- _file: file.info
52
+ get content() {
53
+ const path = this._file.absolutePath;
54
+ const cached = cache.get(path);
55
+ if (cached) return cached;
56
+ const content = import_node_fs.default.readFileSync(path).toString();
57
+ cache.set(path, content);
58
+ return content;
59
+ }
56
60
  };
57
61
  });
58
62
  },
@@ -89,6 +93,7 @@ function resolveFiles({ docs, meta }) {
89
93
  for (const entry of docs) {
90
94
  outputs.push({
91
95
  type: "page",
96
+ absolutePath: entry._file.absolutePath,
92
97
  path: entry._file.path,
93
98
  data: entry
94
99
  });
@@ -96,6 +101,7 @@ function resolveFiles({ docs, meta }) {
96
101
  for (const entry of meta) {
97
102
  outputs.push({
98
103
  type: "meta",
104
+ absolutePath: entry._file.absolutePath,
99
105
  path: entry._file.path,
100
106
  data: entry
101
107
  });
package/dist/index.js CHANGED
@@ -2,7 +2,7 @@ import {
2
2
  _runtime,
3
3
  createMDXSource,
4
4
  resolveFiles
5
- } from "./chunk-7SSA5RCV.js";
5
+ } from "./chunk-NUDEC6C5.js";
6
6
  export {
7
7
  _runtime,
8
8
  createMDXSource,
@@ -165,7 +165,6 @@ __export(loader_mdx_exports, {
165
165
  module.exports = __toCommonJS(loader_mdx_exports);
166
166
  var path4 = __toESM(require("path"), 1);
167
167
  var import_node_querystring = require("querystring");
168
- var import_gray_matter2 = __toESM(require("gray-matter"), 1);
169
168
 
170
169
  // src/utils/config.ts
171
170
  var fs = __toESM(require("fs/promises"), 1);
@@ -230,12 +229,12 @@ async function compileConfig(configPath, outDir) {
230
229
  throw new Error("failed to compile configuration file");
231
230
  }
232
231
  }
233
- async function loadConfig(configPath, hash, build = false) {
232
+ async function loadConfig(configPath, outDir, hash, build = false) {
234
233
  if (cache && cache.hash === hash) {
235
234
  return await cache.config;
236
235
  }
237
- if (build) await compileConfig(configPath, ".source");
238
- const url = (0, import_node_url.pathToFileURL)(path.resolve(".source/source.config.mjs"));
236
+ if (build) await compileConfig(configPath, outDir);
237
+ const url = (0, import_node_url.pathToFileURL)(path.resolve(outDir, "source.config.mjs"));
239
238
  const config = import(`${url.href}?hash=${hash}`).then((loaded) => {
240
239
  const [err, config2] = buildConfig(
241
240
  // every call to `loadConfig` will cause the previous cache to be ignored
@@ -262,7 +261,45 @@ var import_mdx = require("@mdx-js/mdx");
262
261
  var import_unist_util_visit = require("unist-util-visit");
263
262
  var path2 = __toESM(require("path"), 1);
264
263
  var fs2 = __toESM(require("fs/promises"), 1);
265
- var import_gray_matter = __toESM(require("gray-matter"), 1);
264
+
265
+ // src/utils/fuma-matter.ts
266
+ var import_lru_cache = require("lru-cache");
267
+ var import_js_yaml = require("js-yaml");
268
+ var cache2 = new import_lru_cache.LRUCache({
269
+ max: 200
270
+ });
271
+ function fumaMatter(input) {
272
+ if (input === "") {
273
+ return { data: {}, content: input, matter: "" };
274
+ }
275
+ const cached = cache2.get(input);
276
+ if (cached) return cached;
277
+ const result = parseMatter(input);
278
+ cache2.set(input, result);
279
+ return structuredClone(result);
280
+ }
281
+ var delimiter = "---";
282
+ function parseMatter(str) {
283
+ const output = { matter: "", data: {}, content: str };
284
+ const open = delimiter + "\n";
285
+ const close = "\n" + delimiter;
286
+ if (!str.startsWith(open)) {
287
+ return output;
288
+ }
289
+ str = str.slice(open.length);
290
+ const len = str.length;
291
+ let closeIdx = str.indexOf(close);
292
+ if (closeIdx === -1) {
293
+ closeIdx = len;
294
+ }
295
+ output.matter = str.slice(0, closeIdx);
296
+ output.content = str.slice(closeIdx + close.length);
297
+ const loaded = (0, import_js_yaml.load)(output.matter);
298
+ output.data = loaded ?? {};
299
+ return output;
300
+ }
301
+
302
+ // src/mdx-plugins/remark-include.ts
266
303
  function flattenNode(node) {
267
304
  if ("children" in node)
268
305
  return node.children.map((child) => flattenNode(child)).join("");
@@ -297,7 +334,7 @@ function remarkInclude() {
297
334
  );
298
335
  const asCode = params.lang || !specifier.endsWith(".md") && !specifier.endsWith(".mdx");
299
336
  queue.push(
300
- fs2.readFile(targetPath).then(async (content) => {
337
+ fs2.readFile(targetPath).then((buffer) => buffer.toString()).then(async (content) => {
301
338
  compiler?.addDependency(targetPath);
302
339
  if (asCode) {
303
340
  const lang = params.lang ?? path2.extname(specifier).slice(1);
@@ -310,14 +347,17 @@ function remarkInclude() {
310
347
  });
311
348
  return;
312
349
  }
313
- const parsed = processor.parse((0, import_gray_matter.default)(content).content);
350
+ const parsed = processor.parse(fumaMatter(content).content);
314
351
  await update(parsed, targetPath, processor, compiler);
315
352
  Object.assign(
316
353
  parent && parent.type === "paragraph" ? parent : node,
317
354
  parsed
318
355
  );
319
356
  }).catch((e) => {
320
- console.warn(`failed to read file: ${targetPath}`, e);
357
+ throw new Error(
358
+ `failed to read file ${targetPath}
359
+ ${e instanceof Error ? e.message : String(e)}`
360
+ );
321
361
  })
322
362
  );
323
363
  return "skip";
@@ -331,7 +371,7 @@ function remarkInclude() {
331
371
  }
332
372
 
333
373
  // src/utils/build-mdx.ts
334
- var cache2 = /* @__PURE__ */ new Map();
374
+ var cache3 = /* @__PURE__ */ new Map();
335
375
  async function buildMDX(cacheKey, source, options) {
336
376
  const { filePath, frontmatter, data, ...rest } = options;
337
377
  let format = options.format;
@@ -340,7 +380,7 @@ async function buildMDX(cacheKey, source, options) {
340
380
  }
341
381
  format ??= "mdx";
342
382
  const key = `${cacheKey}:${format}`;
343
- let cached = cache2.get(key);
383
+ let cached = cache3.get(key);
344
384
  if (cached === void 0) {
345
385
  cached = (0, import_mdx.createProcessor)({
346
386
  outputFormat: "program",
@@ -349,7 +389,7 @@ async function buildMDX(cacheKey, source, options) {
349
389
  remarkPlugins: [remarkInclude, ...rest.remarkPlugins ?? []],
350
390
  format
351
391
  });
352
- cache2.set(key, cached);
392
+ cache3.set(key, cached);
353
393
  }
354
394
  return cached.process({
355
395
  value: source,
@@ -365,9 +405,9 @@ async function buildMDX(cacheKey, source, options) {
365
405
  // src/utils/git-timestamp.ts
366
406
  var import_node_path = __toESM(require("path"), 1);
367
407
  var import_tinyexec = require("tinyexec");
368
- var cache3 = /* @__PURE__ */ new Map();
408
+ var cache4 = /* @__PURE__ */ new Map();
369
409
  async function getGitTimestamp(file) {
370
- const cached = cache3.get(file);
410
+ const cached = cache4.get(file);
371
411
  if (cached) return cached;
372
412
  try {
373
413
  const out = await (0, import_tinyexec.x)(
@@ -378,7 +418,7 @@ async function getGitTimestamp(file) {
378
418
  }
379
419
  );
380
420
  const time = new Date(out.stdout);
381
- cache3.set(file, time);
421
+ cache4.set(file, time);
382
422
  return time;
383
423
  } catch {
384
424
  return;
@@ -445,24 +485,25 @@ async function loader(source, callback) {
445
485
  this.cacheable(true);
446
486
  const context = this.context;
447
487
  const filePath = this.resourcePath;
448
- const { configPath } = this.getOptions();
449
- const matter2 = (0, import_gray_matter2.default)(source);
488
+ const { configPath, outDir } = this.getOptions();
489
+ const matter = fumaMatter(source);
450
490
  const {
451
491
  hash: configHash = await getConfigHash(configPath),
452
492
  collection: collectionId
453
493
  } = (0, import_node_querystring.parse)(this.resourceQuery.slice(1));
454
- const config = await loadConfig(configPath, configHash);
494
+ const config = await loadConfig(configPath, outDir, configHash);
455
495
  let collection = collectionId !== void 0 ? config.collections.get(collectionId) : void 0;
456
496
  if (collection && collection.type === "docs") collection = collection.docs;
457
497
  if (collection && collection.type !== "doc") {
458
498
  collection = void 0;
459
499
  }
500
+ let data = matter.data;
460
501
  const mdxOptions = collection?.mdxOptions ?? await loadDefaultOptions(config);
461
502
  if (collection?.schema) {
462
503
  try {
463
- matter2.data = await validate(
504
+ data = await validate(
464
505
  collection.schema,
465
- matter2.data,
506
+ matter.data,
466
507
  {
467
508
  source,
468
509
  path: filePath
@@ -482,16 +523,16 @@ async function loader(source, callback) {
482
523
  }
483
524
  try {
484
525
  const lineOffset = "\n".repeat(
485
- this.mode === "development" ? lines(matter2.matter) : 0
526
+ this.mode === "development" ? lines(matter.matter) : 0
486
527
  );
487
528
  const file = await buildMDX(
488
529
  `${configHash}:${collectionId ?? "global"}`,
489
- lineOffset + matter2.content,
530
+ lineOffset + matter.content,
490
531
  {
491
532
  development: this.mode === "development",
492
533
  ...mdxOptions,
493
534
  filePath,
494
- frontmatter: matter2.data,
535
+ frontmatter: data,
495
536
  data: {
496
537
  lastModified: timestamp
497
538
  },
@@ -2,6 +2,7 @@ import { LoaderContext } from 'webpack';
2
2
 
3
3
  interface Options {
4
4
  configPath: string;
5
+ outDir: string;
5
6
  }
6
7
  /**
7
8
  * Load MDX/markdown files
@@ -2,6 +2,7 @@ import { LoaderContext } from 'webpack';
2
2
 
3
3
  interface Options {
4
4
  configPath: string;
5
+ outDir: string;
5
6
  }
6
7
  /**
7
8
  * Load MDX/markdown files
@@ -2,20 +2,22 @@ import {
2
2
  getConfigHash,
3
3
  getGitTimestamp,
4
4
  loadConfig
5
- } from "./chunk-GWITJLOZ.js";
5
+ } from "./chunk-SXOJYWZ3.js";
6
6
  import {
7
7
  ValidationError,
8
8
  validate
9
9
  } from "./chunk-OTM6WYMS.js";
10
10
  import {
11
11
  remarkInclude
12
- } from "./chunk-3UUEUK4M.js";
12
+ } from "./chunk-KTLWF7GN.js";
13
13
  import "./chunk-DRVUBK5B.js";
14
+ import {
15
+ fumaMatter
16
+ } from "./chunk-MXACIHNJ.js";
14
17
 
15
18
  // src/loader-mdx.ts
16
19
  import * as path from "path";
17
20
  import { parse } from "querystring";
18
- import grayMatter from "gray-matter";
19
21
 
20
22
  // src/utils/build-mdx.ts
21
23
  import { createProcessor } from "@mdx-js/mdx";
@@ -55,22 +57,23 @@ async function loader(source, callback) {
55
57
  this.cacheable(true);
56
58
  const context = this.context;
57
59
  const filePath = this.resourcePath;
58
- const { configPath } = this.getOptions();
59
- const matter = grayMatter(source);
60
+ const { configPath, outDir } = this.getOptions();
61
+ const matter = fumaMatter(source);
60
62
  const {
61
63
  hash: configHash = await getConfigHash(configPath),
62
64
  collection: collectionId
63
65
  } = parse(this.resourceQuery.slice(1));
64
- const config = await loadConfig(configPath, configHash);
66
+ const config = await loadConfig(configPath, outDir, configHash);
65
67
  let collection = collectionId !== void 0 ? config.collections.get(collectionId) : void 0;
66
68
  if (collection && collection.type === "docs") collection = collection.docs;
67
69
  if (collection && collection.type !== "doc") {
68
70
  collection = void 0;
69
71
  }
72
+ let data = matter.data;
70
73
  const mdxOptions = collection?.mdxOptions ?? await loadDefaultOptions(config);
71
74
  if (collection?.schema) {
72
75
  try {
73
- matter.data = await validate(
76
+ data = await validate(
74
77
  collection.schema,
75
78
  matter.data,
76
79
  {
@@ -101,7 +104,7 @@ async function loader(source, callback) {
101
104
  development: this.mode === "development",
102
105
  ...mdxOptions,
103
106
  filePath,
104
- frontmatter: matter.data,
107
+ frontmatter: data,
105
108
  data: {
106
109
  lastModified: timestamp
107
110
  },
@@ -69,9 +69,6 @@ __export(next_exports, {
69
69
  });
70
70
  module.exports = __toCommonJS(next_exports);
71
71
 
72
- // src/next/create.ts
73
- var import_node_path3 = __toESM(require("path"), 1);
74
-
75
72
  // src/utils/config.ts
76
73
  var fs = __toESM(require("fs/promises"), 1);
77
74
  var path = __toESM(require("path"), 1);
@@ -118,12 +115,12 @@ function findConfigFile() {
118
115
  return path.resolve("source.config.ts");
119
116
  }
120
117
  var cache = null;
121
- async function compileConfig(configPath, outDir2) {
118
+ async function compileConfig(configPath, outDir) {
122
119
  const { build } = await import("esbuild");
123
120
  const transformed = await build({
124
121
  entryPoints: [{ in: configPath, out: "source.config" }],
125
122
  bundle: true,
126
- outdir: outDir2,
123
+ outdir: outDir,
127
124
  target: "node18",
128
125
  write: true,
129
126
  platform: "node",
@@ -138,12 +135,12 @@ async function compileConfig(configPath, outDir2) {
138
135
  throw new Error("failed to compile configuration file");
139
136
  }
140
137
  }
141
- async function loadConfig(configPath, hash, build = false) {
138
+ async function loadConfig(configPath, outDir, hash, build = false) {
142
139
  if (cache && cache.hash === hash) {
143
140
  return await cache.config;
144
141
  }
145
- if (build) await compileConfig(configPath, ".source");
146
- const url = (0, import_node_url.pathToFileURL)(path.resolve(".source/source.config.mjs"));
142
+ if (build) await compileConfig(configPath, outDir);
143
+ const url = (0, import_node_url.pathToFileURL)(path.resolve(outDir, "source.config.mjs"));
147
144
  const config = import(`${url.href}?hash=${hash}`).then((loaded) => {
148
145
  const [err, config2] = buildConfig(
149
146
  // every call to `loadConfig` will cause the previous cache to be ignored
@@ -176,8 +173,8 @@ var import_tinyglobby = require("tinyglobby");
176
173
  var import_node_path = require("path");
177
174
  var docTypes = [".mdx", ".md"];
178
175
  var metaTypes = [".json", ".yaml"];
179
- function getTypeFromPath(path7) {
180
- const ext = (0, import_node_path.extname)(path7);
176
+ function getTypeFromPath(path6) {
177
+ const ext = (0, import_node_path.extname)(path6);
181
178
  if (docTypes.includes(ext)) return "doc";
182
179
  if (metaTypes.includes(ext)) return "meta";
183
180
  }
@@ -243,23 +240,22 @@ var map = new import_lru_cache.LRUCache({
243
240
  max: 200
244
241
  });
245
242
  var fileCache = {
246
- read(namespace, path7) {
247
- return map.get(`${namespace}.${path7}`);
243
+ read(namespace, path6) {
244
+ return map.get(`${namespace}.${path6}`);
248
245
  },
249
- write(namespace, path7, data) {
250
- map.set(`${namespace}.${path7}`, data);
246
+ write(namespace, path6, data) {
247
+ map.set(`${namespace}.${path6}`, data);
251
248
  },
252
- removeCache(path7) {
249
+ removeCache(path6) {
253
250
  for (const key of map.keys()) {
254
251
  const keyPath = key.slice(key.indexOf(".") + 1);
255
- if (keyPath === path7) map.delete(key);
252
+ if (keyPath === path6) map.delete(key);
256
253
  }
257
254
  }
258
255
  };
259
256
 
260
257
  // src/map/generate.ts
261
- var import_gray_matter = __toESM(require("gray-matter"), 1);
262
- var import_js_yaml = require("js-yaml");
258
+ var import_js_yaml2 = require("js-yaml");
263
259
 
264
260
  // src/utils/git-timestamp.ts
265
261
  var import_node_path2 = __toESM(require("path"), 1);
@@ -284,6 +280,43 @@ async function getGitTimestamp(file) {
284
280
  }
285
281
  }
286
282
 
283
+ // src/utils/fuma-matter.ts
284
+ var import_lru_cache2 = require("lru-cache");
285
+ var import_js_yaml = require("js-yaml");
286
+ var cache3 = new import_lru_cache2.LRUCache({
287
+ max: 200
288
+ });
289
+ function fumaMatter(input) {
290
+ if (input === "") {
291
+ return { data: {}, content: input, matter: "" };
292
+ }
293
+ const cached = cache3.get(input);
294
+ if (cached) return cached;
295
+ const result = parseMatter(input);
296
+ cache3.set(input, result);
297
+ return structuredClone(result);
298
+ }
299
+ var delimiter = "---";
300
+ function parseMatter(str) {
301
+ const output = { matter: "", data: {}, content: str };
302
+ const open = delimiter + "\n";
303
+ const close = "\n" + delimiter;
304
+ if (!str.startsWith(open)) {
305
+ return output;
306
+ }
307
+ str = str.slice(open.length);
308
+ const len = str.length;
309
+ let closeIdx = str.indexOf(close);
310
+ if (closeIdx === -1) {
311
+ closeIdx = len;
312
+ }
313
+ output.matter = str.slice(0, closeIdx);
314
+ output.content = str.slice(closeIdx + close.length);
315
+ const loaded = (0, import_js_yaml.load)(output.matter);
316
+ output.data = loaded ?? {};
317
+ return output;
318
+ }
319
+
287
320
  // src/map/generate.ts
288
321
  async function readFileWithCache(file) {
289
322
  const cached = fileCache.read("read-file", file);
@@ -291,7 +324,7 @@ async function readFileWithCache(file) {
291
324
  return (await fs2.readFile(file)).toString();
292
325
  }
293
326
  async function generateJS(configPath, config, outputPath, configHash) {
294
- const outDir2 = path3.dirname(outputPath);
327
+ const outDir = path3.dirname(outputPath);
295
328
  let asyncInit = false;
296
329
  const lines = [
297
330
  getImportCode({
@@ -301,7 +334,7 @@ async function generateJS(configPath, config, outputPath, configHash) {
301
334
  }),
302
335
  getImportCode({
303
336
  type: "namespace",
304
- specifier: toImportPath(configPath, outDir2),
337
+ specifier: toImportPath(configPath, outDir),
305
338
  name: "_source"
306
339
  })
307
340
  ];
@@ -313,7 +346,7 @@ async function generateJS(configPath, config, outputPath, configHash) {
313
346
  getImportCode({
314
347
  type: "namespace",
315
348
  name: importId,
316
- specifier: `${toImportPath(file.absolutePath, outDir2)}?collection=${collectionName}&hash=${configHash}`
349
+ specifier: `${toImportPath(file.absolutePath, outDir)}?collection=${collectionName}&hash=${configHash}`
317
350
  })
318
351
  );
319
352
  return `{ info: ${JSON.stringify(file)}, data: ${importId} }`;
@@ -356,11 +389,12 @@ async function generateJS(configPath, config, outputPath, configHash) {
356
389
  asyncInit = true;
357
390
  }
358
391
  const entries2 = files.map(async (file) => {
359
- const parsed = (0, import_gray_matter.default)(
392
+ const parsed = fumaMatter(
360
393
  await readFileWithCache(file.absolutePath).catch(() => "")
361
394
  );
395
+ let data = parsed.data;
362
396
  if (collection.schema) {
363
- parsed.data = await validate(
397
+ data = await validate(
364
398
  collection.schema,
365
399
  parsed.data,
366
400
  { path: file.absolutePath, source: parsed.content },
@@ -374,7 +408,7 @@ async function generateJS(configPath, config, outputPath, configHash) {
374
408
  return JSON.stringify({
375
409
  info: file,
376
410
  lastModified,
377
- data: parsed.data,
411
+ data,
378
412
  content: parsed.content
379
413
  });
380
414
  });
@@ -457,7 +491,7 @@ function parseMetaEntry(file, content) {
457
491
  const extname3 = path3.extname(file);
458
492
  try {
459
493
  if (extname3 === ".json") return JSON.parse(content);
460
- if (extname3 === ".yaml") return (0, import_js_yaml.load)(content);
494
+ if (extname3 === ".yaml") return (0, import_js_yaml2.load)(content);
461
495
  } catch (e) {
462
496
  throw new Error(`Failed to parse meta file: ${file}.`, {
463
497
  cause: e
@@ -467,10 +501,10 @@ function parseMetaEntry(file, content) {
467
501
  }
468
502
 
469
503
  // src/map/index.ts
470
- async function start(dev, configPath, outDir2) {
504
+ async function start(dev, configPath, outDir) {
471
505
  let configHash = await getConfigHash(configPath);
472
- let config = await loadConfig(configPath, configHash, true);
473
- const outPath = path4.resolve(outDir2, `index.ts`);
506
+ let config = await loadConfig(configPath, outDir, configHash, true);
507
+ const outPath = path4.resolve(outDir, `index.ts`);
474
508
  async function updateMapFile() {
475
509
  const start2 = performance.now();
476
510
  try {
@@ -501,7 +535,7 @@ async function start(dev, configPath, outDir2) {
501
535
  const isConfigFile = absolutePath === configPath;
502
536
  if (isConfigFile) {
503
537
  configHash = await getConfigHash(configPath);
504
- config = await loadConfig(configPath, configHash, true);
538
+ config = await loadConfig(configPath, outDir, configHash, true);
505
539
  }
506
540
  if (event === "change") fileCache.removeCache(absolutePath);
507
541
  await updateMapFile();
@@ -516,10 +550,10 @@ async function start(dev, configPath, outDir2) {
516
550
  }
517
551
 
518
552
  // src/next/create.ts
519
- var outDir = import_node_path3.default.resolve(".source");
520
553
  var defaultPageExtensions = ["mdx", "md", "jsx", "js", "tsx", "ts"];
521
554
  function createMDX({
522
- configPath = findConfigFile()
555
+ configPath = findConfigFile(),
556
+ outDir = ".source"
523
557
  } = {}) {
524
558
  const isDev = process.argv.includes("dev");
525
559
  const isBuild = process.argv.includes("build");
@@ -529,7 +563,8 @@ function createMDX({
529
563
  }
530
564
  return (nextConfig = {}) => {
531
565
  const mdxLoaderOptions = {
532
- configPath
566
+ configPath,
567
+ outDir
533
568
  };
534
569
  return {
535
570
  ...nextConfig,
@@ -572,14 +607,14 @@ function createMDX({
572
607
  }
573
608
 
574
609
  // src/postinstall.ts
575
- var path6 = __toESM(require("path"), 1);
610
+ var path5 = __toESM(require("path"), 1);
576
611
  var fs4 = __toESM(require("fs/promises"), 1);
577
- async function postInstall(configPath = findConfigFile()) {
578
- const jsOut = path6.resolve(".source/index.ts");
612
+ async function postInstall(configPath = findConfigFile(), outDir = ".source") {
613
+ const jsOut = path5.resolve(outDir, "index.ts");
579
614
  const hash = await getConfigHash(configPath);
580
- const config = await loadConfig(configPath, hash, true);
581
- await fs4.rm(path6.dirname(jsOut), { recursive: true });
582
- await fs4.mkdir(path6.dirname(jsOut), { recursive: true });
615
+ const config = await loadConfig(configPath, outDir, hash, true);
616
+ await fs4.rm(path5.dirname(jsOut), { recursive: true });
617
+ await fs4.mkdir(path5.dirname(jsOut), { recursive: true });
583
618
  await fs4.writeFile(jsOut, await generateJS(configPath, config, jsOut, hash));
584
619
  console.log("[MDX] types generated");
585
620
  }
@@ -12,10 +12,16 @@ interface CreateMDXOptions {
12
12
  * Path to source configuration file
13
13
  */
14
14
  configPath?: string;
15
+ /**
16
+ * Directory for output files
17
+ *
18
+ * @defaultValue '.source'
19
+ */
20
+ outDir?: string;
15
21
  }
16
22
 
17
- declare function createMDX({ configPath, }?: CreateMDXOptions): (nextConfig?: NextConfig) => NextConfig;
23
+ declare function createMDX({ configPath, outDir, }?: CreateMDXOptions): (nextConfig?: NextConfig) => NextConfig;
18
24
 
19
- declare function postInstall(configPath?: string): Promise<void>;
25
+ declare function postInstall(configPath?: string, outDir?: string): Promise<void>;
20
26
 
21
27
  export { type CreateMDXOptions, createMDX, postInstall, start };
@@ -12,10 +12,16 @@ interface CreateMDXOptions {
12
12
  * Path to source configuration file
13
13
  */
14
14
  configPath?: string;
15
+ /**
16
+ * Directory for output files
17
+ *
18
+ * @defaultValue '.source'
19
+ */
20
+ outDir?: string;
15
21
  }
16
22
 
17
- declare function createMDX({ configPath, }?: CreateMDXOptions): (nextConfig?: NextConfig) => NextConfig;
23
+ declare function createMDX({ configPath, outDir, }?: CreateMDXOptions): (nextConfig?: NextConfig) => NextConfig;
18
24
 
19
- declare function postInstall(configPath?: string): Promise<void>;
25
+ declare function postInstall(configPath?: string, outDir?: string): Promise<void>;
20
26
 
21
27
  export { type CreateMDXOptions, createMDX, postInstall, start };
@@ -3,15 +3,15 @@ import {
3
3
  getConfigHash,
4
4
  getGitTimestamp,
5
5
  loadConfig
6
- } from "../chunk-GWITJLOZ.js";
6
+ } from "../chunk-SXOJYWZ3.js";
7
7
  import {
8
8
  ValidationError,
9
9
  validate
10
10
  } from "../chunk-OTM6WYMS.js";
11
11
  import "../chunk-DRVUBK5B.js";
12
-
13
- // src/next/create.ts
14
- import path3 from "path";
12
+ import {
13
+ fumaMatter
14
+ } from "../chunk-MXACIHNJ.js";
15
15
 
16
16
  // src/map/index.ts
17
17
  import * as path2 from "path";
@@ -26,8 +26,8 @@ import { glob } from "tinyglobby";
26
26
  import { extname } from "path";
27
27
  var docTypes = [".mdx", ".md"];
28
28
  var metaTypes = [".json", ".yaml"];
29
- function getTypeFromPath(path5) {
30
- const ext = extname(path5);
29
+ function getTypeFromPath(path4) {
30
+ const ext = extname(path4);
31
31
  if (docTypes.includes(ext)) return "doc";
32
32
  if (metaTypes.includes(ext)) return "meta";
33
33
  }
@@ -38,22 +38,21 @@ var map = new LRUCache({
38
38
  max: 200
39
39
  });
40
40
  var fileCache = {
41
- read(namespace, path5) {
42
- return map.get(`${namespace}.${path5}`);
41
+ read(namespace, path4) {
42
+ return map.get(`${namespace}.${path4}`);
43
43
  },
44
- write(namespace, path5, data) {
45
- map.set(`${namespace}.${path5}`, data);
44
+ write(namespace, path4, data) {
45
+ map.set(`${namespace}.${path4}`, data);
46
46
  },
47
- removeCache(path5) {
47
+ removeCache(path4) {
48
48
  for (const key of map.keys()) {
49
49
  const keyPath = key.slice(key.indexOf(".") + 1);
50
- if (keyPath === path5) map.delete(key);
50
+ if (keyPath === path4) map.delete(key);
51
51
  }
52
52
  }
53
53
  };
54
54
 
55
55
  // src/map/generate.ts
56
- import matter from "gray-matter";
57
56
  import { load } from "js-yaml";
58
57
  async function readFileWithCache(file) {
59
58
  const cached = fileCache.read("read-file", file);
@@ -61,7 +60,7 @@ async function readFileWithCache(file) {
61
60
  return (await fs.readFile(file)).toString();
62
61
  }
63
62
  async function generateJS(configPath, config, outputPath, configHash) {
64
- const outDir2 = path.dirname(outputPath);
63
+ const outDir = path.dirname(outputPath);
65
64
  let asyncInit = false;
66
65
  const lines = [
67
66
  getImportCode({
@@ -71,7 +70,7 @@ async function generateJS(configPath, config, outputPath, configHash) {
71
70
  }),
72
71
  getImportCode({
73
72
  type: "namespace",
74
- specifier: toImportPath(configPath, outDir2),
73
+ specifier: toImportPath(configPath, outDir),
75
74
  name: "_source"
76
75
  })
77
76
  ];
@@ -83,7 +82,7 @@ async function generateJS(configPath, config, outputPath, configHash) {
83
82
  getImportCode({
84
83
  type: "namespace",
85
84
  name: importId,
86
- specifier: `${toImportPath(file.absolutePath, outDir2)}?collection=${collectionName}&hash=${configHash}`
85
+ specifier: `${toImportPath(file.absolutePath, outDir)}?collection=${collectionName}&hash=${configHash}`
87
86
  })
88
87
  );
89
88
  return `{ info: ${JSON.stringify(file)}, data: ${importId} }`;
@@ -126,11 +125,12 @@ async function generateJS(configPath, config, outputPath, configHash) {
126
125
  asyncInit = true;
127
126
  }
128
127
  const entries2 = files.map(async (file) => {
129
- const parsed = matter(
128
+ const parsed = fumaMatter(
130
129
  await readFileWithCache(file.absolutePath).catch(() => "")
131
130
  );
131
+ let data = parsed.data;
132
132
  if (collection.schema) {
133
- parsed.data = await validate(
133
+ data = await validate(
134
134
  collection.schema,
135
135
  parsed.data,
136
136
  { path: file.absolutePath, source: parsed.content },
@@ -144,7 +144,7 @@ async function generateJS(configPath, config, outputPath, configHash) {
144
144
  return JSON.stringify({
145
145
  info: file,
146
146
  lastModified,
147
- data: parsed.data,
147
+ data,
148
148
  content: parsed.content
149
149
  });
150
150
  });
@@ -237,10 +237,10 @@ function parseMetaEntry(file, content) {
237
237
  }
238
238
 
239
239
  // src/map/index.ts
240
- async function start(dev, configPath, outDir2) {
240
+ async function start(dev, configPath, outDir) {
241
241
  let configHash = await getConfigHash(configPath);
242
- let config = await loadConfig(configPath, configHash, true);
243
- const outPath = path2.resolve(outDir2, `index.ts`);
242
+ let config = await loadConfig(configPath, outDir, configHash, true);
243
+ const outPath = path2.resolve(outDir, `index.ts`);
244
244
  async function updateMapFile() {
245
245
  const start2 = performance.now();
246
246
  try {
@@ -271,7 +271,7 @@ async function start(dev, configPath, outDir2) {
271
271
  const isConfigFile = absolutePath === configPath;
272
272
  if (isConfigFile) {
273
273
  configHash = await getConfigHash(configPath);
274
- config = await loadConfig(configPath, configHash, true);
274
+ config = await loadConfig(configPath, outDir, configHash, true);
275
275
  }
276
276
  if (event === "change") fileCache.removeCache(absolutePath);
277
277
  await updateMapFile();
@@ -286,10 +286,10 @@ async function start(dev, configPath, outDir2) {
286
286
  }
287
287
 
288
288
  // src/next/create.ts
289
- var outDir = path3.resolve(".source");
290
289
  var defaultPageExtensions = ["mdx", "md", "jsx", "js", "tsx", "ts"];
291
290
  function createMDX({
292
- configPath = findConfigFile()
291
+ configPath = findConfigFile(),
292
+ outDir = ".source"
293
293
  } = {}) {
294
294
  const isDev = process.argv.includes("dev");
295
295
  const isBuild = process.argv.includes("build");
@@ -299,7 +299,8 @@ function createMDX({
299
299
  }
300
300
  return (nextConfig = {}) => {
301
301
  const mdxLoaderOptions = {
302
- configPath
302
+ configPath,
303
+ outDir
303
304
  };
304
305
  return {
305
306
  ...nextConfig,
@@ -342,14 +343,14 @@ function createMDX({
342
343
  }
343
344
 
344
345
  // src/postinstall.ts
345
- import * as path4 from "path";
346
+ import * as path3 from "path";
346
347
  import * as fs3 from "fs/promises";
347
- async function postInstall(configPath = findConfigFile()) {
348
- const jsOut = path4.resolve(".source/index.ts");
348
+ async function postInstall(configPath = findConfigFile(), outDir = ".source") {
349
+ const jsOut = path3.resolve(outDir, "index.ts");
349
350
  const hash = await getConfigHash(configPath);
350
- const config = await loadConfig(configPath, hash, true);
351
- await fs3.rm(path4.dirname(jsOut), { recursive: true });
352
- await fs3.mkdir(path4.dirname(jsOut), { recursive: true });
351
+ const config = await loadConfig(configPath, outDir, hash, true);
352
+ await fs3.rm(path3.dirname(jsOut), { recursive: true });
353
+ await fs3.mkdir(path3.dirname(jsOut), { recursive: true });
353
354
  await fs3.writeFile(jsOut, await generateJS(configPath, config, jsOut, hash));
354
355
  console.log("[MDX] types generated");
355
356
  }
@@ -40,7 +40,45 @@ var import_mdx_remote = require("@fumadocs/mdx-remote");
40
40
  var import_unist_util_visit = require("unist-util-visit");
41
41
  var path = __toESM(require("path"), 1);
42
42
  var fs = __toESM(require("fs/promises"), 1);
43
- var import_gray_matter = __toESM(require("gray-matter"), 1);
43
+
44
+ // src/utils/fuma-matter.ts
45
+ var import_lru_cache = require("lru-cache");
46
+ var import_js_yaml = require("js-yaml");
47
+ var cache = new import_lru_cache.LRUCache({
48
+ max: 200
49
+ });
50
+ function fumaMatter(input) {
51
+ if (input === "") {
52
+ return { data: {}, content: input, matter: "" };
53
+ }
54
+ const cached = cache.get(input);
55
+ if (cached) return cached;
56
+ const result = parseMatter(input);
57
+ cache.set(input, result);
58
+ return structuredClone(result);
59
+ }
60
+ var delimiter = "---";
61
+ function parseMatter(str) {
62
+ const output = { matter: "", data: {}, content: str };
63
+ const open = delimiter + "\n";
64
+ const close = "\n" + delimiter;
65
+ if (!str.startsWith(open)) {
66
+ return output;
67
+ }
68
+ str = str.slice(open.length);
69
+ const len = str.length;
70
+ let closeIdx = str.indexOf(close);
71
+ if (closeIdx === -1) {
72
+ closeIdx = len;
73
+ }
74
+ output.matter = str.slice(0, closeIdx);
75
+ output.content = str.slice(closeIdx + close.length);
76
+ const loaded = (0, import_js_yaml.load)(output.matter);
77
+ output.data = loaded ?? {};
78
+ return output;
79
+ }
80
+
81
+ // src/mdx-plugins/remark-include.ts
44
82
  function flattenNode(node) {
45
83
  if ("children" in node)
46
84
  return node.children.map((child) => flattenNode(child)).join("");
@@ -75,7 +113,7 @@ function remarkInclude() {
75
113
  );
76
114
  const asCode = params.lang || !specifier.endsWith(".md") && !specifier.endsWith(".mdx");
77
115
  queue.push(
78
- fs.readFile(targetPath).then(async (content) => {
116
+ fs.readFile(targetPath).then((buffer) => buffer.toString()).then(async (content) => {
79
117
  compiler?.addDependency(targetPath);
80
118
  if (asCode) {
81
119
  const lang = params.lang ?? path.extname(specifier).slice(1);
@@ -88,14 +126,17 @@ function remarkInclude() {
88
126
  });
89
127
  return;
90
128
  }
91
- const parsed = processor.parse((0, import_gray_matter.default)(content).content);
129
+ const parsed = processor.parse(fumaMatter(content).content);
92
130
  await update(parsed, targetPath, processor, compiler);
93
131
  Object.assign(
94
132
  parent && parent.type === "paragraph" ? parent : node,
95
133
  parsed
96
134
  );
97
135
  }).catch((e) => {
98
- console.warn(`failed to read file: ${targetPath}`, e);
136
+ throw new Error(
137
+ `failed to read file ${targetPath}
138
+ ${e instanceof Error ? e.message : String(e)}`
139
+ );
99
140
  })
100
141
  );
101
142
  return "skip";
@@ -113,21 +154,25 @@ var import_mdx_plugins = require("fumadocs-core/mdx-plugins");
113
154
 
114
155
  // src/runtime/index.ts
115
156
  var import_node_fs = __toESM(require("fs"), 1);
157
+ var cache2 = /* @__PURE__ */ new Map();
116
158
  var _runtime = {
117
159
  doc(files) {
118
160
  return files.map((file) => {
119
161
  const { default: body, frontmatter, ...exports2 } = file.data;
120
- let cachedContent;
121
162
  return {
122
163
  body,
123
164
  ...exports2,
124
165
  ...frontmatter,
125
- get content() {
126
- cachedContent ??= import_node_fs.default.readFileSync(file.info.absolutePath).toString();
127
- return cachedContent;
128
- },
166
+ _file: file.info,
129
167
  _exports: file.data,
130
- _file: file.info
168
+ get content() {
169
+ const path2 = this._file.absolutePath;
170
+ const cached = cache2.get(path2);
171
+ if (cached) return cached;
172
+ const content = import_node_fs.default.readFileSync(path2).toString();
173
+ cache2.set(path2, content);
174
+ return content;
175
+ }
131
176
  };
132
177
  });
133
178
  },
@@ -164,6 +209,7 @@ function resolveFiles({ docs, meta }) {
164
209
  for (const entry of docs) {
165
210
  outputs.push({
166
211
  type: "page",
212
+ absolutePath: entry._file.absolutePath,
167
213
  path: entry._file.path,
168
214
  data: entry
169
215
  });
@@ -171,6 +217,7 @@ function resolveFiles({ docs, meta }) {
171
217
  for (const entry of meta) {
172
218
  outputs.push({
173
219
  type: "meta",
220
+ absolutePath: entry._file.absolutePath,
174
221
  path: entry._file.path,
175
222
  data: entry
176
223
  });
@@ -1,13 +1,14 @@
1
1
  import {
2
2
  _runtime,
3
3
  createMDXSource
4
- } from "../chunk-7SSA5RCV.js";
4
+ } from "../chunk-NUDEC6C5.js";
5
5
  import {
6
6
  remarkInclude
7
- } from "../chunk-3UUEUK4M.js";
7
+ } from "../chunk-KTLWF7GN.js";
8
8
  import {
9
9
  buildConfig
10
10
  } from "../chunk-DRVUBK5B.js";
11
+ import "../chunk-MXACIHNJ.js";
11
12
 
12
13
  // src/runtime/async.ts
13
14
  import { createCompiler } from "@fumadocs/mdx-remote";
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "fumadocs-mdx",
3
- "version": "11.6.6",
3
+ "version": "11.6.8",
4
4
  "description": "The built-in source for Fumadocs",
5
5
  "keywords": [
6
6
  "NextJs",
@@ -45,30 +45,29 @@
45
45
  "@mdx-js/mdx": "^3.1.0",
46
46
  "@standard-schema/spec": "^1.0.0",
47
47
  "chokidar": "^4.0.3",
48
- "esbuild": "^0.25.4",
48
+ "esbuild": "^0.25.5",
49
49
  "estree-util-value-to-estree": "^3.4.0",
50
- "gray-matter": "^4.0.3",
51
50
  "js-yaml": "^4.1.0",
52
51
  "lru-cache": "^11.1.0",
53
52
  "picocolors": "^1.1.1",
54
53
  "tinyexec": "^1.0.1",
55
- "tinyglobby": "^0.2.13",
54
+ "tinyglobby": "^0.2.14",
56
55
  "unist-util-visit": "^5.0.0",
57
- "zod": "^3.25.28"
56
+ "zod": "^3.25.63"
58
57
  },
59
58
  "devDependencies": {
60
59
  "@types/js-yaml": "^4.0.9",
61
60
  "@types/mdast": "^4.0.3",
62
61
  "@types/mdx": "^2.0.13",
63
- "@types/react": "^19.1.5",
62
+ "@types/react": "^19.1.8",
64
63
  "mdast-util-mdx-jsx": "^3.2.0",
65
- "next": "^15.3.2",
64
+ "next": "^15.3.3",
66
65
  "unified": "^11.0.5",
67
66
  "vfile": "^6.0.3",
68
67
  "webpack": "^5.99.9",
69
- "@fumadocs/mdx-remote": "1.3.2",
68
+ "@fumadocs/mdx-remote": "1.3.3",
70
69
  "eslint-config-custom": "0.0.0",
71
- "fumadocs-core": "15.4.0",
70
+ "fumadocs-core": "15.5.2",
72
71
  "tsconfig": "0.0.0"
73
72
  },
74
73
  "peerDependencies": {