fumadocs-mdx 11.6.7 → 11.6.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,8 +1,11 @@
1
+ import {
2
+ fumaMatter
3
+ } from "./chunk-MXACIHNJ.js";
4
+
1
5
  // src/mdx-plugins/remark-include.ts
2
6
  import { visit } from "unist-util-visit";
3
7
  import * as path from "path";
4
8
  import * as fs from "fs/promises";
5
- import matter from "gray-matter";
6
9
  function flattenNode(node) {
7
10
  if ("children" in node)
8
11
  return node.children.map((child) => flattenNode(child)).join("");
@@ -37,7 +40,7 @@ function remarkInclude() {
37
40
  );
38
41
  const asCode = params.lang || !specifier.endsWith(".md") && !specifier.endsWith(".mdx");
39
42
  queue.push(
40
- fs.readFile(targetPath).then(async (content) => {
43
+ fs.readFile(targetPath).then((buffer) => buffer.toString()).then(async (content) => {
41
44
  compiler?.addDependency(targetPath);
42
45
  if (asCode) {
43
46
  const lang = params.lang ?? path.extname(specifier).slice(1);
@@ -50,7 +53,7 @@ function remarkInclude() {
50
53
  });
51
54
  return;
52
55
  }
53
- const parsed = processor.parse(matter(content).content);
56
+ const parsed = processor.parse(fumaMatter(content).content);
54
57
  await update(parsed, targetPath, processor, compiler);
55
58
  Object.assign(
56
59
  parent && parent.type === "paragraph" ? parent : node,
@@ -0,0 +1,40 @@
1
+ // src/utils/fuma-matter.ts
2
+ import { LRUCache } from "lru-cache";
3
+ import { load } from "js-yaml";
4
+ var cache = new LRUCache({
5
+ max: 200
6
+ });
7
+ function fumaMatter(input) {
8
+ if (input === "") {
9
+ return { data: {}, content: input, matter: "" };
10
+ }
11
+ const cached = cache.get(input);
12
+ if (cached) return cached;
13
+ const result = parseMatter(input);
14
+ cache.set(input, result);
15
+ return structuredClone(result);
16
+ }
17
+ var delimiter = "---";
18
+ function parseMatter(str) {
19
+ const output = { matter: "", data: {}, content: str };
20
+ const open = delimiter + "\n";
21
+ const close = "\n" + delimiter;
22
+ if (!str.startsWith(open)) {
23
+ return output;
24
+ }
25
+ str = str.slice(open.length);
26
+ const len = str.length;
27
+ let closeIdx = str.indexOf(close);
28
+ if (closeIdx === -1) {
29
+ closeIdx = len;
30
+ }
31
+ output.matter = str.slice(0, closeIdx);
32
+ output.content = str.slice(closeIdx + close.length);
33
+ const loaded = load(output.matter);
34
+ output.data = loaded ?? {};
35
+ return output;
36
+ }
37
+
38
+ export {
39
+ fumaMatter
40
+ };
@@ -1,20 +1,24 @@
1
1
  // src/runtime/index.ts
2
2
  import fs from "fs";
3
+ var cache = /* @__PURE__ */ new Map();
3
4
  var _runtime = {
4
5
  doc(files) {
5
6
  return files.map((file) => {
6
7
  const { default: body, frontmatter, ...exports } = file.data;
7
- let cachedContent;
8
8
  return {
9
9
  body,
10
10
  ...exports,
11
11
  ...frontmatter,
12
- get content() {
13
- cachedContent ??= fs.readFileSync(file.info.absolutePath).toString();
14
- return cachedContent;
15
- },
12
+ _file: file.info,
16
13
  _exports: file.data,
17
- _file: file.info
14
+ get content() {
15
+ const path = this._file.absolutePath;
16
+ const cached = cache.get(path);
17
+ if (cached) return cached;
18
+ const content = fs.readFileSync(path).toString();
19
+ cache.set(path, content);
20
+ return content;
21
+ }
18
22
  };
19
23
  });
20
24
  },
@@ -51,6 +55,7 @@ function resolveFiles({ docs, meta }) {
51
55
  for (const entry of docs) {
52
56
  outputs.push({
53
57
  type: "page",
58
+ absolutePath: entry._file.absolutePath,
54
59
  path: entry._file.path,
55
60
  data: entry
56
61
  });
@@ -58,6 +63,7 @@ function resolveFiles({ docs, meta }) {
58
63
  for (const entry of meta) {
59
64
  outputs.push({
60
65
  type: "meta",
66
+ absolutePath: entry._file.absolutePath,
61
67
  path: entry._file.path,
62
68
  data: entry
63
69
  });
@@ -213,7 +213,45 @@ function getDefaultMDXOptions({
213
213
  var import_unist_util_visit = require("unist-util-visit");
214
214
  var path = __toESM(require("path"), 1);
215
215
  var fs = __toESM(require("fs/promises"), 1);
216
- var import_gray_matter = __toESM(require("gray-matter"), 1);
216
+
217
+ // src/utils/fuma-matter.ts
218
+ var import_lru_cache = require("lru-cache");
219
+ var import_js_yaml = require("js-yaml");
220
+ var cache = new import_lru_cache.LRUCache({
221
+ max: 200
222
+ });
223
+ function fumaMatter(input) {
224
+ if (input === "") {
225
+ return { data: {}, content: input, matter: "" };
226
+ }
227
+ const cached = cache.get(input);
228
+ if (cached) return cached;
229
+ const result = parseMatter(input);
230
+ cache.set(input, result);
231
+ return structuredClone(result);
232
+ }
233
+ var delimiter = "---";
234
+ function parseMatter(str) {
235
+ const output = { matter: "", data: {}, content: str };
236
+ const open = delimiter + "\n";
237
+ const close = "\n" + delimiter;
238
+ if (!str.startsWith(open)) {
239
+ return output;
240
+ }
241
+ str = str.slice(open.length);
242
+ const len = str.length;
243
+ let closeIdx = str.indexOf(close);
244
+ if (closeIdx === -1) {
245
+ closeIdx = len;
246
+ }
247
+ output.matter = str.slice(0, closeIdx);
248
+ output.content = str.slice(closeIdx + close.length);
249
+ const loaded = (0, import_js_yaml.load)(output.matter);
250
+ output.data = loaded ?? {};
251
+ return output;
252
+ }
253
+
254
+ // src/mdx-plugins/remark-include.ts
217
255
  function flattenNode(node) {
218
256
  if ("children" in node)
219
257
  return node.children.map((child) => flattenNode(child)).join("");
@@ -248,7 +286,7 @@ function remarkInclude() {
248
286
  );
249
287
  const asCode = params.lang || !specifier.endsWith(".md") && !specifier.endsWith(".mdx");
250
288
  queue.push(
251
- fs.readFile(targetPath).then(async (content) => {
289
+ fs.readFile(targetPath).then((buffer) => buffer.toString()).then(async (content) => {
252
290
  compiler?.addDependency(targetPath);
253
291
  if (asCode) {
254
292
  const lang = params.lang ?? path.extname(specifier).slice(1);
@@ -261,7 +299,7 @@ function remarkInclude() {
261
299
  });
262
300
  return;
263
301
  }
264
- const parsed = processor.parse((0, import_gray_matter.default)(content).content);
302
+ const parsed = processor.parse(fumaMatter(content).content);
265
303
  await update(parsed, targetPath, processor, compiler);
266
304
  Object.assign(
267
305
  parent && parent.type === "paragraph" ? parent : node,
@@ -4,7 +4,8 @@ import {
4
4
  } from "../chunk-OTM6WYMS.js";
5
5
  import {
6
6
  remarkInclude
7
- } from "../chunk-2Z6EJ3GA.js";
7
+ } from "../chunk-KTLWF7GN.js";
8
+ import "../chunk-MXACIHNJ.js";
8
9
  import {
9
10
  getDefaultMDXOptions
10
11
  } from "../chunk-VC3Y6FLZ.js";
package/dist/index.cjs CHANGED
@@ -38,21 +38,25 @@ module.exports = __toCommonJS(index_exports);
38
38
 
39
39
  // src/runtime/index.ts
40
40
  var import_node_fs = __toESM(require("fs"), 1);
41
+ var cache = /* @__PURE__ */ new Map();
41
42
  var _runtime = {
42
43
  doc(files) {
43
44
  return files.map((file) => {
44
45
  const { default: body, frontmatter, ...exports2 } = file.data;
45
- let cachedContent;
46
46
  return {
47
47
  body,
48
48
  ...exports2,
49
49
  ...frontmatter,
50
- get content() {
51
- cachedContent ??= import_node_fs.default.readFileSync(file.info.absolutePath).toString();
52
- return cachedContent;
53
- },
50
+ _file: file.info,
54
51
  _exports: file.data,
55
- _file: file.info
52
+ get content() {
53
+ const path = this._file.absolutePath;
54
+ const cached = cache.get(path);
55
+ if (cached) return cached;
56
+ const content = import_node_fs.default.readFileSync(path).toString();
57
+ cache.set(path, content);
58
+ return content;
59
+ }
56
60
  };
57
61
  });
58
62
  },
@@ -89,6 +93,7 @@ function resolveFiles({ docs, meta }) {
89
93
  for (const entry of docs) {
90
94
  outputs.push({
91
95
  type: "page",
96
+ absolutePath: entry._file.absolutePath,
92
97
  path: entry._file.path,
93
98
  data: entry
94
99
  });
@@ -96,6 +101,7 @@ function resolveFiles({ docs, meta }) {
96
101
  for (const entry of meta) {
97
102
  outputs.push({
98
103
  type: "meta",
104
+ absolutePath: entry._file.absolutePath,
99
105
  path: entry._file.path,
100
106
  data: entry
101
107
  });
package/dist/index.js CHANGED
@@ -2,7 +2,7 @@ import {
2
2
  _runtime,
3
3
  createMDXSource,
4
4
  resolveFiles
5
- } from "./chunk-7SSA5RCV.js";
5
+ } from "./chunk-NUDEC6C5.js";
6
6
  export {
7
7
  _runtime,
8
8
  createMDXSource,
@@ -165,7 +165,6 @@ __export(loader_mdx_exports, {
165
165
  module.exports = __toCommonJS(loader_mdx_exports);
166
166
  var path4 = __toESM(require("path"), 1);
167
167
  var import_node_querystring = require("querystring");
168
- var import_gray_matter2 = __toESM(require("gray-matter"), 1);
169
168
 
170
169
  // src/utils/config.ts
171
170
  var fs = __toESM(require("fs/promises"), 1);
@@ -262,7 +261,45 @@ var import_mdx = require("@mdx-js/mdx");
262
261
  var import_unist_util_visit = require("unist-util-visit");
263
262
  var path2 = __toESM(require("path"), 1);
264
263
  var fs2 = __toESM(require("fs/promises"), 1);
265
- var import_gray_matter = __toESM(require("gray-matter"), 1);
264
+
265
+ // src/utils/fuma-matter.ts
266
+ var import_lru_cache = require("lru-cache");
267
+ var import_js_yaml = require("js-yaml");
268
+ var cache2 = new import_lru_cache.LRUCache({
269
+ max: 200
270
+ });
271
+ function fumaMatter(input) {
272
+ if (input === "") {
273
+ return { data: {}, content: input, matter: "" };
274
+ }
275
+ const cached = cache2.get(input);
276
+ if (cached) return cached;
277
+ const result = parseMatter(input);
278
+ cache2.set(input, result);
279
+ return structuredClone(result);
280
+ }
281
+ var delimiter = "---";
282
+ function parseMatter(str) {
283
+ const output = { matter: "", data: {}, content: str };
284
+ const open = delimiter + "\n";
285
+ const close = "\n" + delimiter;
286
+ if (!str.startsWith(open)) {
287
+ return output;
288
+ }
289
+ str = str.slice(open.length);
290
+ const len = str.length;
291
+ let closeIdx = str.indexOf(close);
292
+ if (closeIdx === -1) {
293
+ closeIdx = len;
294
+ }
295
+ output.matter = str.slice(0, closeIdx);
296
+ output.content = str.slice(closeIdx + close.length);
297
+ const loaded = (0, import_js_yaml.load)(output.matter);
298
+ output.data = loaded ?? {};
299
+ return output;
300
+ }
301
+
302
+ // src/mdx-plugins/remark-include.ts
266
303
  function flattenNode(node) {
267
304
  if ("children" in node)
268
305
  return node.children.map((child) => flattenNode(child)).join("");
@@ -297,7 +334,7 @@ function remarkInclude() {
297
334
  );
298
335
  const asCode = params.lang || !specifier.endsWith(".md") && !specifier.endsWith(".mdx");
299
336
  queue.push(
300
- fs2.readFile(targetPath).then(async (content) => {
337
+ fs2.readFile(targetPath).then((buffer) => buffer.toString()).then(async (content) => {
301
338
  compiler?.addDependency(targetPath);
302
339
  if (asCode) {
303
340
  const lang = params.lang ?? path2.extname(specifier).slice(1);
@@ -310,7 +347,7 @@ function remarkInclude() {
310
347
  });
311
348
  return;
312
349
  }
313
- const parsed = processor.parse((0, import_gray_matter.default)(content).content);
350
+ const parsed = processor.parse(fumaMatter(content).content);
314
351
  await update(parsed, targetPath, processor, compiler);
315
352
  Object.assign(
316
353
  parent && parent.type === "paragraph" ? parent : node,
@@ -334,7 +371,7 @@ ${e instanceof Error ? e.message : String(e)}`
334
371
  }
335
372
 
336
373
  // src/utils/build-mdx.ts
337
- var cache2 = /* @__PURE__ */ new Map();
374
+ var cache3 = /* @__PURE__ */ new Map();
338
375
  async function buildMDX(cacheKey, source, options) {
339
376
  const { filePath, frontmatter, data, ...rest } = options;
340
377
  let format = options.format;
@@ -343,7 +380,7 @@ async function buildMDX(cacheKey, source, options) {
343
380
  }
344
381
  format ??= "mdx";
345
382
  const key = `${cacheKey}:${format}`;
346
- let cached = cache2.get(key);
383
+ let cached = cache3.get(key);
347
384
  if (cached === void 0) {
348
385
  cached = (0, import_mdx.createProcessor)({
349
386
  outputFormat: "program",
@@ -352,7 +389,7 @@ async function buildMDX(cacheKey, source, options) {
352
389
  remarkPlugins: [remarkInclude, ...rest.remarkPlugins ?? []],
353
390
  format
354
391
  });
355
- cache2.set(key, cached);
392
+ cache3.set(key, cached);
356
393
  }
357
394
  return cached.process({
358
395
  value: source,
@@ -368,9 +405,9 @@ async function buildMDX(cacheKey, source, options) {
368
405
  // src/utils/git-timestamp.ts
369
406
  var import_node_path = __toESM(require("path"), 1);
370
407
  var import_tinyexec = require("tinyexec");
371
- var cache3 = /* @__PURE__ */ new Map();
408
+ var cache4 = /* @__PURE__ */ new Map();
372
409
  async function getGitTimestamp(file) {
373
- const cached = cache3.get(file);
410
+ const cached = cache4.get(file);
374
411
  if (cached) return cached;
375
412
  try {
376
413
  const out = await (0, import_tinyexec.x)(
@@ -381,7 +418,7 @@ async function getGitTimestamp(file) {
381
418
  }
382
419
  );
383
420
  const time = new Date(out.stdout);
384
- cache3.set(file, time);
421
+ cache4.set(file, time);
385
422
  return time;
386
423
  } catch {
387
424
  return;
@@ -449,7 +486,7 @@ async function loader(source, callback) {
449
486
  const context = this.context;
450
487
  const filePath = this.resourcePath;
451
488
  const { configPath, outDir } = this.getOptions();
452
- const matter2 = (0, import_gray_matter2.default)(source);
489
+ const matter = fumaMatter(source);
453
490
  const {
454
491
  hash: configHash = await getConfigHash(configPath),
455
492
  collection: collectionId
@@ -460,12 +497,13 @@ async function loader(source, callback) {
460
497
  if (collection && collection.type !== "doc") {
461
498
  collection = void 0;
462
499
  }
500
+ let data = matter.data;
463
501
  const mdxOptions = collection?.mdxOptions ?? await loadDefaultOptions(config);
464
502
  if (collection?.schema) {
465
503
  try {
466
- matter2.data = await validate(
504
+ data = await validate(
467
505
  collection.schema,
468
- matter2.data,
506
+ matter.data,
469
507
  {
470
508
  source,
471
509
  path: filePath
@@ -485,16 +523,16 @@ async function loader(source, callback) {
485
523
  }
486
524
  try {
487
525
  const lineOffset = "\n".repeat(
488
- this.mode === "development" ? lines(matter2.matter) : 0
526
+ this.mode === "development" ? lines(matter.matter) : 0
489
527
  );
490
528
  const file = await buildMDX(
491
529
  `${configHash}:${collectionId ?? "global"}`,
492
- lineOffset + matter2.content,
530
+ lineOffset + matter.content,
493
531
  {
494
532
  development: this.mode === "development",
495
533
  ...mdxOptions,
496
534
  filePath,
497
- frontmatter: matter2.data,
535
+ frontmatter: data,
498
536
  data: {
499
537
  lastModified: timestamp
500
538
  },
@@ -9,13 +9,15 @@ import {
9
9
  } from "./chunk-OTM6WYMS.js";
10
10
  import {
11
11
  remarkInclude
12
- } from "./chunk-2Z6EJ3GA.js";
12
+ } from "./chunk-KTLWF7GN.js";
13
13
  import "./chunk-DRVUBK5B.js";
14
+ import {
15
+ fumaMatter
16
+ } from "./chunk-MXACIHNJ.js";
14
17
 
15
18
  // src/loader-mdx.ts
16
19
  import * as path from "path";
17
20
  import { parse } from "querystring";
18
- import grayMatter from "gray-matter";
19
21
 
20
22
  // src/utils/build-mdx.ts
21
23
  import { createProcessor } from "@mdx-js/mdx";
@@ -56,7 +58,7 @@ async function loader(source, callback) {
56
58
  const context = this.context;
57
59
  const filePath = this.resourcePath;
58
60
  const { configPath, outDir } = this.getOptions();
59
- const matter = grayMatter(source);
61
+ const matter = fumaMatter(source);
60
62
  const {
61
63
  hash: configHash = await getConfigHash(configPath),
62
64
  collection: collectionId
@@ -67,10 +69,11 @@ async function loader(source, callback) {
67
69
  if (collection && collection.type !== "doc") {
68
70
  collection = void 0;
69
71
  }
72
+ let data = matter.data;
70
73
  const mdxOptions = collection?.mdxOptions ?? await loadDefaultOptions(config);
71
74
  if (collection?.schema) {
72
75
  try {
73
- matter.data = await validate(
76
+ data = await validate(
74
77
  collection.schema,
75
78
  matter.data,
76
79
  {
@@ -101,7 +104,7 @@ async function loader(source, callback) {
101
104
  development: this.mode === "development",
102
105
  ...mdxOptions,
103
106
  filePath,
104
- frontmatter: matter.data,
107
+ frontmatter: data,
105
108
  data: {
106
109
  lastModified: timestamp
107
110
  },
@@ -255,8 +255,7 @@ var fileCache = {
255
255
  };
256
256
 
257
257
  // src/map/generate.ts
258
- var import_gray_matter = __toESM(require("gray-matter"), 1);
259
- var import_js_yaml = require("js-yaml");
258
+ var import_js_yaml2 = require("js-yaml");
260
259
 
261
260
  // src/utils/git-timestamp.ts
262
261
  var import_node_path2 = __toESM(require("path"), 1);
@@ -281,6 +280,43 @@ async function getGitTimestamp(file) {
281
280
  }
282
281
  }
283
282
 
283
+ // src/utils/fuma-matter.ts
284
+ var import_lru_cache2 = require("lru-cache");
285
+ var import_js_yaml = require("js-yaml");
286
+ var cache3 = new import_lru_cache2.LRUCache({
287
+ max: 200
288
+ });
289
+ function fumaMatter(input) {
290
+ if (input === "") {
291
+ return { data: {}, content: input, matter: "" };
292
+ }
293
+ const cached = cache3.get(input);
294
+ if (cached) return cached;
295
+ const result = parseMatter(input);
296
+ cache3.set(input, result);
297
+ return structuredClone(result);
298
+ }
299
+ var delimiter = "---";
300
+ function parseMatter(str) {
301
+ const output = { matter: "", data: {}, content: str };
302
+ const open = delimiter + "\n";
303
+ const close = "\n" + delimiter;
304
+ if (!str.startsWith(open)) {
305
+ return output;
306
+ }
307
+ str = str.slice(open.length);
308
+ const len = str.length;
309
+ let closeIdx = str.indexOf(close);
310
+ if (closeIdx === -1) {
311
+ closeIdx = len;
312
+ }
313
+ output.matter = str.slice(0, closeIdx);
314
+ output.content = str.slice(closeIdx + close.length);
315
+ const loaded = (0, import_js_yaml.load)(output.matter);
316
+ output.data = loaded ?? {};
317
+ return output;
318
+ }
319
+
284
320
  // src/map/generate.ts
285
321
  async function readFileWithCache(file) {
286
322
  const cached = fileCache.read("read-file", file);
@@ -353,11 +389,12 @@ async function generateJS(configPath, config, outputPath, configHash) {
353
389
  asyncInit = true;
354
390
  }
355
391
  const entries2 = files.map(async (file) => {
356
- const parsed = (0, import_gray_matter.default)(
392
+ const parsed = fumaMatter(
357
393
  await readFileWithCache(file.absolutePath).catch(() => "")
358
394
  );
395
+ let data = parsed.data;
359
396
  if (collection.schema) {
360
- parsed.data = await validate(
397
+ data = await validate(
361
398
  collection.schema,
362
399
  parsed.data,
363
400
  { path: file.absolutePath, source: parsed.content },
@@ -371,7 +408,7 @@ async function generateJS(configPath, config, outputPath, configHash) {
371
408
  return JSON.stringify({
372
409
  info: file,
373
410
  lastModified,
374
- data: parsed.data,
411
+ data,
375
412
  content: parsed.content
376
413
  });
377
414
  });
@@ -454,7 +491,7 @@ function parseMetaEntry(file, content) {
454
491
  const extname3 = path3.extname(file);
455
492
  try {
456
493
  if (extname3 === ".json") return JSON.parse(content);
457
- if (extname3 === ".yaml") return (0, import_js_yaml.load)(content);
494
+ if (extname3 === ".yaml") return (0, import_js_yaml2.load)(content);
458
495
  } catch (e) {
459
496
  throw new Error(`Failed to parse meta file: ${file}.`, {
460
497
  cause: e
@@ -9,6 +9,9 @@ import {
9
9
  validate
10
10
  } from "../chunk-OTM6WYMS.js";
11
11
  import "../chunk-DRVUBK5B.js";
12
+ import {
13
+ fumaMatter
14
+ } from "../chunk-MXACIHNJ.js";
12
15
 
13
16
  // src/map/index.ts
14
17
  import * as path2 from "path";
@@ -50,7 +53,6 @@ var fileCache = {
50
53
  };
51
54
 
52
55
  // src/map/generate.ts
53
- import matter from "gray-matter";
54
56
  import { load } from "js-yaml";
55
57
  async function readFileWithCache(file) {
56
58
  const cached = fileCache.read("read-file", file);
@@ -123,11 +125,12 @@ async function generateJS(configPath, config, outputPath, configHash) {
123
125
  asyncInit = true;
124
126
  }
125
127
  const entries2 = files.map(async (file) => {
126
- const parsed = matter(
128
+ const parsed = fumaMatter(
127
129
  await readFileWithCache(file.absolutePath).catch(() => "")
128
130
  );
131
+ let data = parsed.data;
129
132
  if (collection.schema) {
130
- parsed.data = await validate(
133
+ data = await validate(
131
134
  collection.schema,
132
135
  parsed.data,
133
136
  { path: file.absolutePath, source: parsed.content },
@@ -141,7 +144,7 @@ async function generateJS(configPath, config, outputPath, configHash) {
141
144
  return JSON.stringify({
142
145
  info: file,
143
146
  lastModified,
144
- data: parsed.data,
147
+ data,
145
148
  content: parsed.content
146
149
  });
147
150
  });
@@ -40,7 +40,45 @@ var import_mdx_remote = require("@fumadocs/mdx-remote");
40
40
  var import_unist_util_visit = require("unist-util-visit");
41
41
  var path = __toESM(require("path"), 1);
42
42
  var fs = __toESM(require("fs/promises"), 1);
43
- var import_gray_matter = __toESM(require("gray-matter"), 1);
43
+
44
+ // src/utils/fuma-matter.ts
45
+ var import_lru_cache = require("lru-cache");
46
+ var import_js_yaml = require("js-yaml");
47
+ var cache = new import_lru_cache.LRUCache({
48
+ max: 200
49
+ });
50
+ function fumaMatter(input) {
51
+ if (input === "") {
52
+ return { data: {}, content: input, matter: "" };
53
+ }
54
+ const cached = cache.get(input);
55
+ if (cached) return cached;
56
+ const result = parseMatter(input);
57
+ cache.set(input, result);
58
+ return structuredClone(result);
59
+ }
60
+ var delimiter = "---";
61
+ function parseMatter(str) {
62
+ const output = { matter: "", data: {}, content: str };
63
+ const open = delimiter + "\n";
64
+ const close = "\n" + delimiter;
65
+ if (!str.startsWith(open)) {
66
+ return output;
67
+ }
68
+ str = str.slice(open.length);
69
+ const len = str.length;
70
+ let closeIdx = str.indexOf(close);
71
+ if (closeIdx === -1) {
72
+ closeIdx = len;
73
+ }
74
+ output.matter = str.slice(0, closeIdx);
75
+ output.content = str.slice(closeIdx + close.length);
76
+ const loaded = (0, import_js_yaml.load)(output.matter);
77
+ output.data = loaded ?? {};
78
+ return output;
79
+ }
80
+
81
+ // src/mdx-plugins/remark-include.ts
44
82
  function flattenNode(node) {
45
83
  if ("children" in node)
46
84
  return node.children.map((child) => flattenNode(child)).join("");
@@ -75,7 +113,7 @@ function remarkInclude() {
75
113
  );
76
114
  const asCode = params.lang || !specifier.endsWith(".md") && !specifier.endsWith(".mdx");
77
115
  queue.push(
78
- fs.readFile(targetPath).then(async (content) => {
116
+ fs.readFile(targetPath).then((buffer) => buffer.toString()).then(async (content) => {
79
117
  compiler?.addDependency(targetPath);
80
118
  if (asCode) {
81
119
  const lang = params.lang ?? path.extname(specifier).slice(1);
@@ -88,7 +126,7 @@ function remarkInclude() {
88
126
  });
89
127
  return;
90
128
  }
91
- const parsed = processor.parse((0, import_gray_matter.default)(content).content);
129
+ const parsed = processor.parse(fumaMatter(content).content);
92
130
  await update(parsed, targetPath, processor, compiler);
93
131
  Object.assign(
94
132
  parent && parent.type === "paragraph" ? parent : node,
@@ -116,21 +154,25 @@ var import_mdx_plugins = require("fumadocs-core/mdx-plugins");
116
154
 
117
155
  // src/runtime/index.ts
118
156
  var import_node_fs = __toESM(require("fs"), 1);
157
+ var cache2 = /* @__PURE__ */ new Map();
119
158
  var _runtime = {
120
159
  doc(files) {
121
160
  return files.map((file) => {
122
161
  const { default: body, frontmatter, ...exports2 } = file.data;
123
- let cachedContent;
124
162
  return {
125
163
  body,
126
164
  ...exports2,
127
165
  ...frontmatter,
128
- get content() {
129
- cachedContent ??= import_node_fs.default.readFileSync(file.info.absolutePath).toString();
130
- return cachedContent;
131
- },
166
+ _file: file.info,
132
167
  _exports: file.data,
133
- _file: file.info
168
+ get content() {
169
+ const path2 = this._file.absolutePath;
170
+ const cached = cache2.get(path2);
171
+ if (cached) return cached;
172
+ const content = import_node_fs.default.readFileSync(path2).toString();
173
+ cache2.set(path2, content);
174
+ return content;
175
+ }
134
176
  };
135
177
  });
136
178
  },
@@ -167,6 +209,7 @@ function resolveFiles({ docs, meta }) {
167
209
  for (const entry of docs) {
168
210
  outputs.push({
169
211
  type: "page",
212
+ absolutePath: entry._file.absolutePath,
170
213
  path: entry._file.path,
171
214
  data: entry
172
215
  });
@@ -174,6 +217,7 @@ function resolveFiles({ docs, meta }) {
174
217
  for (const entry of meta) {
175
218
  outputs.push({
176
219
  type: "meta",
220
+ absolutePath: entry._file.absolutePath,
177
221
  path: entry._file.path,
178
222
  data: entry
179
223
  });
@@ -1,13 +1,14 @@
1
1
  import {
2
2
  _runtime,
3
3
  createMDXSource
4
- } from "../chunk-7SSA5RCV.js";
4
+ } from "../chunk-NUDEC6C5.js";
5
5
  import {
6
6
  remarkInclude
7
- } from "../chunk-2Z6EJ3GA.js";
7
+ } from "../chunk-KTLWF7GN.js";
8
8
  import {
9
9
  buildConfig
10
10
  } from "../chunk-DRVUBK5B.js";
11
+ import "../chunk-MXACIHNJ.js";
11
12
 
12
13
  // src/runtime/async.ts
13
14
  import { createCompiler } from "@fumadocs/mdx-remote";
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "fumadocs-mdx",
3
- "version": "11.6.7",
3
+ "version": "11.6.8",
4
4
  "description": "The built-in source for Fumadocs",
5
5
  "keywords": [
6
6
  "NextJs",
@@ -47,28 +47,27 @@
47
47
  "chokidar": "^4.0.3",
48
48
  "esbuild": "^0.25.5",
49
49
  "estree-util-value-to-estree": "^3.4.0",
50
- "gray-matter": "^4.0.3",
51
50
  "js-yaml": "^4.1.0",
52
51
  "lru-cache": "^11.1.0",
53
52
  "picocolors": "^1.1.1",
54
53
  "tinyexec": "^1.0.1",
55
54
  "tinyglobby": "^0.2.14",
56
55
  "unist-util-visit": "^5.0.0",
57
- "zod": "^3.25.42"
56
+ "zod": "^3.25.63"
58
57
  },
59
58
  "devDependencies": {
60
59
  "@types/js-yaml": "^4.0.9",
61
60
  "@types/mdast": "^4.0.3",
62
61
  "@types/mdx": "^2.0.13",
63
- "@types/react": "^19.1.6",
62
+ "@types/react": "^19.1.8",
64
63
  "mdast-util-mdx-jsx": "^3.2.0",
65
64
  "next": "^15.3.3",
66
65
  "unified": "^11.0.5",
67
66
  "vfile": "^6.0.3",
68
67
  "webpack": "^5.99.9",
69
- "@fumadocs/mdx-remote": "1.3.2",
68
+ "@fumadocs/mdx-remote": "1.3.3",
70
69
  "eslint-config-custom": "0.0.0",
71
- "fumadocs-core": "15.5.1",
70
+ "fumadocs-core": "15.5.2",
72
71
  "tsconfig": "0.0.0"
73
72
  },
74
73
  "peerDependencies": {