fumadocs-mdx 12.0.3 → 13.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (80) hide show
  1. package/dist/bin.cjs +1033 -308
  2. package/dist/bin.js +4 -4
  3. package/dist/build-mdx-BHG-_uxo.d.cts +53 -0
  4. package/dist/build-mdx-CzrQDBRZ.d.ts +53 -0
  5. package/dist/bun/index.cjs +92 -28
  6. package/dist/bun/index.js +6 -7
  7. package/dist/{chunk-QAUWMR5D.js → chunk-3J3WL7WN.js} +23 -5
  8. package/dist/chunk-4MAYA5QX.js +44 -0
  9. package/dist/{chunk-RMDXSZYE.js → chunk-CEA6MYJU.js} +13 -8
  10. package/dist/{chunk-IQAEAI4P.js → chunk-HI62EXSB.js} +63 -2
  11. package/dist/{chunk-46UPKP5R.js → chunk-II3H5ZVZ.js} +5 -5
  12. package/dist/{chunk-LGYVNESJ.js → chunk-JVZFH6ND.js} +6 -22
  13. package/dist/{chunk-LMG6UWCL.js → chunk-K5ZLPEIQ.js} +56 -16
  14. package/dist/{chunk-OMAMTKDE.js → chunk-KILFIBVW.js} +3 -12
  15. package/dist/chunk-NVRDCY6Z.js +30 -0
  16. package/dist/{chunk-VXEBLM4X.js → chunk-XV5Z4BFL.js} +1 -1
  17. package/dist/config/index.cjs +56 -16
  18. package/dist/config/index.d.cts +2 -1
  19. package/dist/config/index.d.ts +2 -1
  20. package/dist/config/index.js +1 -1
  21. package/dist/{define-DJbJduHy.d.ts → define-BCNh3n4O.d.cts} +40 -16
  22. package/dist/{define-DJbJduHy.d.cts → define-bck_EB4t.d.ts} +40 -16
  23. package/dist/index.cjs +0 -109
  24. package/dist/index.d.cts +73 -12
  25. package/dist/index.d.ts +73 -12
  26. package/dist/index.js +0 -11
  27. package/dist/{load-UUXLUBHL.js → load-MNG3CLET.js} +1 -3
  28. package/dist/loader-mdx.cjs +153 -60
  29. package/dist/loader-mdx.d.cts +1 -0
  30. package/dist/loader-mdx.d.ts +1 -0
  31. package/dist/loader-mdx.js +23 -9
  32. package/dist/next/index.cjs +213 -181
  33. package/dist/next/index.d.cts +2 -11
  34. package/dist/next/index.d.ts +2 -11
  35. package/dist/next/index.js +145 -117
  36. package/dist/node/loader.cjs +128 -70
  37. package/dist/node/loader.js +14 -9
  38. package/dist/plugins/index.cjs +78 -0
  39. package/dist/plugins/index.d.cts +7 -0
  40. package/dist/plugins/index.d.ts +7 -0
  41. package/dist/plugins/index.js +6 -0
  42. package/dist/plugins/json-schema.cjs +61 -0
  43. package/dist/plugins/json-schema.d.cts +16 -0
  44. package/dist/plugins/json-schema.d.ts +16 -0
  45. package/dist/plugins/json-schema.js +40 -0
  46. package/dist/remark-postprocess-K233ZVBK.d.cts +22 -0
  47. package/dist/remark-postprocess-K233ZVBK.d.ts +22 -0
  48. package/dist/runtime/next/async.cjs +108 -70
  49. package/dist/runtime/next/async.d.cts +9 -6
  50. package/dist/runtime/next/async.d.ts +9 -6
  51. package/dist/runtime/next/async.js +9 -19
  52. package/dist/runtime/next/index.cjs +25 -14
  53. package/dist/runtime/next/index.d.cts +11 -8
  54. package/dist/runtime/next/index.d.ts +11 -8
  55. package/dist/runtime/next/index.js +2 -2
  56. package/dist/runtime/vite/browser.cjs +7 -3
  57. package/dist/runtime/vite/browser.d.cts +56 -7
  58. package/dist/runtime/vite/browser.d.ts +56 -7
  59. package/dist/runtime/vite/browser.js +2 -1
  60. package/dist/runtime/vite/server.cjs +40 -34
  61. package/dist/runtime/vite/server.d.cts +13 -10
  62. package/dist/runtime/vite/server.d.ts +13 -10
  63. package/dist/runtime/vite/server.js +8 -23
  64. package/dist/{types-TeHjsmja.d.ts → types-1cCFEzWt.d.ts} +3 -10
  65. package/dist/{types-BRx1QsIJ.d.cts → types-D5NhXTJY.d.cts} +3 -10
  66. package/dist/vite/index.cjs +386 -224
  67. package/dist/vite/index.d.cts +23 -10
  68. package/dist/vite/index.d.ts +23 -10
  69. package/dist/vite/index.js +211 -37
  70. package/dist/{watcher-HGOH3APP.js → watcher-WXJDWRZY.js} +1 -1
  71. package/package.json +28 -15
  72. package/dist/browser-BupUnhpC.d.ts +0 -98
  73. package/dist/browser-R0x9IPaQ.d.cts +0 -98
  74. package/dist/chunk-ADR6R7HM.js +0 -29
  75. package/dist/chunk-XMFLD5J6.js +0 -30
  76. package/dist/chunk-ZLCSVXCD.js +0 -10
  77. package/dist/chunk-ZX7TM4AR.js +0 -127
  78. package/dist/postinstall-SCSXM4IM.js +0 -10
  79. package/dist/shared-CfiiRctw.d.ts +0 -70
  80. package/dist/shared-fFqiuWJC.d.cts +0 -70
package/dist/bin.js CHANGED
@@ -3,14 +3,14 @@
3
3
  // src/bin.ts
4
4
  import { existsSync } from "fs";
5
5
  async function start() {
6
- const args = process.argv.slice(2);
6
+ const [configPath] = process.argv.slice(2);
7
7
  const isNext = existsSync("next.config.js") || existsSync("next.config.mjs") || existsSync("next.config.ts");
8
8
  if (isNext) {
9
9
  const { postInstall } = await import("./next/index.js");
10
- await postInstall(...args);
10
+ await postInstall(configPath);
11
11
  } else {
12
- const { postInstall } = await import("./postinstall-SCSXM4IM.js");
13
- await postInstall(...args);
12
+ const { postInstall } = await import("./vite/index.js");
13
+ await postInstall(configPath);
14
14
  }
15
15
  }
16
16
  void start();
@@ -0,0 +1,53 @@
1
+ import { createProcessor } from '@mdx-js/mdx';
2
+ import { StructuredData } from 'fumadocs-core/mdx-plugins';
3
+ import { TOCItemType } from 'fumadocs-core/toc';
4
+ import { FC } from 'react';
5
+ import { MDXProps } from 'mdx/types';
6
+ import { E as ExtractedReference } from './remark-postprocess-K233ZVBK.cjs';
7
+
8
+ type Processor = ReturnType<typeof createProcessor>;
9
+ interface CompilerOptions {
10
+ addDependency: (file: string) => void;
11
+ }
12
+ interface CompiledMDXProperties<Frontmatter = Record<string, unknown>> {
13
+ frontmatter: Frontmatter;
14
+ structuredData: StructuredData;
15
+ toc: TOCItemType[];
16
+ default: FC<MDXProps>;
17
+ /**
18
+ * Only available when `lastModifiedTime` is enabled on MDX loader
19
+ */
20
+ lastModified?: Date;
21
+ extractedReferences?: ExtractedReference[];
22
+ _markdown?: string;
23
+ _mdast?: string;
24
+ }
25
+ interface FumadocsDataMap {
26
+ /**
27
+ * [Fumadocs MDX] raw frontmatter, you can modify it
28
+ */
29
+ frontmatter?: Record<string, unknown>;
30
+ /**
31
+ * [Fumadocs MDX] additional ESM exports to write
32
+ */
33
+ 'mdx-export'?: {
34
+ name: string;
35
+ value: unknown;
36
+ }[];
37
+ extractedReferences: ExtractedReference[];
38
+ /**
39
+ * [Fumadocs MDX] The compiler object from loader
40
+ */
41
+ _compiler?: CompilerOptions;
42
+ _getProcessor?: (format: 'md' | 'mdx') => Processor;
43
+ /**
44
+ * [Fumadocs MDX] Processed Markdown content before `remark-rehype`.
45
+ */
46
+ _markdown?: string;
47
+ }
48
+ declare module 'vfile' {
49
+ interface DataMap extends FumadocsDataMap {
50
+ }
51
+ }
52
+
53
+ export type { CompiledMDXProperties as C };
@@ -0,0 +1,53 @@
1
+ import { createProcessor } from '@mdx-js/mdx';
2
+ import { StructuredData } from 'fumadocs-core/mdx-plugins';
3
+ import { TOCItemType } from 'fumadocs-core/toc';
4
+ import { FC } from 'react';
5
+ import { MDXProps } from 'mdx/types';
6
+ import { E as ExtractedReference } from './remark-postprocess-K233ZVBK.js';
7
+
8
+ type Processor = ReturnType<typeof createProcessor>;
9
+ interface CompilerOptions {
10
+ addDependency: (file: string) => void;
11
+ }
12
+ interface CompiledMDXProperties<Frontmatter = Record<string, unknown>> {
13
+ frontmatter: Frontmatter;
14
+ structuredData: StructuredData;
15
+ toc: TOCItemType[];
16
+ default: FC<MDXProps>;
17
+ /**
18
+ * Only available when `lastModifiedTime` is enabled on MDX loader
19
+ */
20
+ lastModified?: Date;
21
+ extractedReferences?: ExtractedReference[];
22
+ _markdown?: string;
23
+ _mdast?: string;
24
+ }
25
+ interface FumadocsDataMap {
26
+ /**
27
+ * [Fumadocs MDX] raw frontmatter, you can modify it
28
+ */
29
+ frontmatter?: Record<string, unknown>;
30
+ /**
31
+ * [Fumadocs MDX] additional ESM exports to write
32
+ */
33
+ 'mdx-export'?: {
34
+ name: string;
35
+ value: unknown;
36
+ }[];
37
+ extractedReferences: ExtractedReference[];
38
+ /**
39
+ * [Fumadocs MDX] The compiler object from loader
40
+ */
41
+ _compiler?: CompilerOptions;
42
+ _getProcessor?: (format: 'md' | 'mdx') => Processor;
43
+ /**
44
+ * [Fumadocs MDX] Processed Markdown content before `remark-rehype`.
45
+ */
46
+ _markdown?: string;
47
+ }
48
+ declare module 'vfile' {
49
+ interface DataMap extends FumadocsDataMap {
50
+ }
51
+ }
52
+
53
+ export type { CompiledMDXProperties as C };
@@ -261,12 +261,56 @@ var import_mdx = require("@mdx-js/mdx");
261
261
 
262
262
  // src/loaders/mdx/remark-include.ts
263
263
  var import_unified = require("unified");
264
- var import_unist_util_visit = require("unist-util-visit");
264
+ var import_unist_util_visit2 = require("unist-util-visit");
265
265
  var path2 = __toESM(require("path"), 1);
266
266
  var fs = __toESM(require("fs/promises"), 1);
267
- var import_remark_parse = __toESM(require("remark-parse"), 1);
268
- var import_remark_mdx = __toESM(require("remark-mdx"), 1);
269
267
  var import_mdx_plugins = require("fumadocs-core/mdx-plugins");
268
+
269
+ // src/loaders/mdx/remark-unravel.ts
270
+ var import_unist_util_visit = require("unist-util-visit");
271
+ function remarkMarkAndUnravel() {
272
+ return (tree) => {
273
+ (0, import_unist_util_visit.visit)(tree, function(node, index, parent) {
274
+ let offset = -1;
275
+ let all = true;
276
+ let oneOrMore = false;
277
+ if (parent && typeof index === "number" && node.type === "paragraph") {
278
+ const children = node.children;
279
+ while (++offset < children.length) {
280
+ const child = children[offset];
281
+ if (child.type === "mdxJsxTextElement" || child.type === "mdxTextExpression") {
282
+ oneOrMore = true;
283
+ } else if (child.type === "text" && child.value.trim().length === 0) {
284
+ } else {
285
+ all = false;
286
+ break;
287
+ }
288
+ }
289
+ if (all && oneOrMore) {
290
+ offset = -1;
291
+ const newChildren = [];
292
+ while (++offset < children.length) {
293
+ const child = children[offset];
294
+ if (child.type === "mdxJsxTextElement") {
295
+ child.type = "mdxJsxFlowElement";
296
+ }
297
+ if (child.type === "mdxTextExpression") {
298
+ child.type = "mdxFlowExpression";
299
+ }
300
+ if (child.type === "text" && /^[\t\r\n ]+$/.test(String(child.value))) {
301
+ } else {
302
+ newChildren.push(child);
303
+ }
304
+ }
305
+ parent.children.splice(index, 1, ...newChildren);
306
+ return index;
307
+ }
308
+ }
309
+ });
310
+ };
311
+ }
312
+
313
+ // src/loaders/mdx/remark-include.ts
270
314
  var ElementLikeTypes = [
271
315
  "mdxJsxFlowElement",
272
316
  "mdxJsxTextElement",
@@ -306,7 +350,7 @@ function parseSpecifier(specifier) {
306
350
  function extractSection(root, section) {
307
351
  let nodes;
308
352
  let capturingHeadingContent = false;
309
- (0, import_unist_util_visit.visit)(root, (node) => {
353
+ (0, import_unist_util_visit2.visit)(root, (node) => {
310
354
  if (node.type === "heading") {
311
355
  if (capturingHeadingContent) {
312
356
  return false;
@@ -338,7 +382,7 @@ function extractSection(root, section) {
338
382
  }
339
383
  function remarkInclude() {
340
384
  const TagName = "include";
341
- async function embedContent(file, heading, params, data) {
385
+ const embedContent = async (file, heading, params, data) => {
342
386
  let content;
343
387
  try {
344
388
  content = (await fs.readFile(file)).toString();
@@ -361,18 +405,17 @@ ${e instanceof Error ? e.message : String(e)}`,
361
405
  data: {}
362
406
  };
363
407
  }
364
- const parser = (data._getProcessor ?? getDefaultProcessor)(
365
- ext === ".mdx" ? "mdx" : "md"
366
- );
408
+ const parser = data._getProcessor ? data._getProcessor(ext === ".mdx" ? "mdx" : "md") : this;
367
409
  const parsed = fumaMatter(content);
368
410
  let mdast = parser.parse({
369
411
  path: file,
370
412
  value: parsed.content,
371
413
  data: { frontmatter: parsed.data }
372
414
  });
415
+ const baseProcessor = (0, import_unified.unified)().use(remarkMarkAndUnravel);
373
416
  if (heading) {
374
417
  const extracted = extractSection(
375
- await (0, import_unified.unified)().use(import_mdx_plugins.remarkHeading).run(mdast),
418
+ await baseProcessor.use(import_mdx_plugins.remarkHeading).run(mdast),
376
419
  heading
377
420
  );
378
421
  if (!extracted)
@@ -380,13 +423,15 @@ ${e instanceof Error ? e.message : String(e)}`,
380
423
  `Cannot find section ${heading} in ${file}, make sure you have encapsulated the section in a <section id="${heading}"> tag, or a :::section directive with remark-directive configured.`
381
424
  );
382
425
  mdast = extracted;
426
+ } else {
427
+ mdast = await baseProcessor.run(mdast);
383
428
  }
384
429
  await update(mdast, path2.dirname(file), data);
385
430
  return mdast;
386
- }
431
+ };
387
432
  async function update(tree, directory, data) {
388
433
  const queue = [];
389
- (0, import_unist_util_visit.visit)(tree, ElementLikeTypes, (_node, _, parent) => {
434
+ (0, import_unist_util_visit2.visit)(tree, ElementLikeTypes, (_node, _, parent) => {
390
435
  const node = _node;
391
436
  if (node.name !== TagName) return;
392
437
  const specifier = flattenNode(node);
@@ -413,24 +458,28 @@ ${e instanceof Error ? e.message : String(e)}`,
413
458
  await update(tree, path2.dirname(file.path), file.data);
414
459
  };
415
460
  }
416
- function getDefaultProcessor(format) {
417
- const mdProcessor = (0, import_unified.unified)().use(import_remark_parse.default);
418
- if (format === "md") return mdProcessor;
419
- return mdProcessor.use(import_remark_mdx.default);
420
- }
421
461
 
422
462
  // src/loaders/mdx/remark-postprocess.ts
423
- var import_unist_util_visit2 = require("unist-util-visit");
463
+ var import_unist_util_visit3 = require("unist-util-visit");
424
464
  var import_mdast_util_to_markdown = require("mdast-util-to-markdown");
425
465
  var import_estree_util_value_to_estree = require("estree-util-value-to-estree");
466
+ var import_unist_util_remove_position = require("unist-util-remove-position");
467
+ var import_remark_mdx = __toESM(require("remark-mdx"), 1);
426
468
  function remarkPostprocess({
469
+ _format,
427
470
  includeProcessedMarkdown = false,
471
+ includeMDAST = false,
428
472
  valueToExport = []
429
- } = {}) {
473
+ }) {
474
+ let _stringifyProcessor;
475
+ const getStringifyProcessor = () => {
476
+ if (_format === "mdx") return this;
477
+ return _stringifyProcessor ??= this().use(import_remark_mdx.default).freeze();
478
+ };
430
479
  return (tree, file) => {
431
480
  let title;
432
481
  const urls = [];
433
- (0, import_unist_util_visit2.visit)(tree, ["heading", "link"], (node) => {
482
+ (0, import_unist_util_visit3.visit)(tree, ["heading", "link"], (node) => {
434
483
  if (node.type === "heading" && node.depth === 1) {
435
484
  title = flattenNode2(node);
436
485
  }
@@ -446,12 +495,19 @@ function remarkPostprocess({
446
495
  }
447
496
  file.data.extractedReferences = urls;
448
497
  if (includeProcessedMarkdown) {
498
+ const processor = getStringifyProcessor();
449
499
  file.data._markdown = (0, import_mdast_util_to_markdown.toMarkdown)(tree, {
450
- ...this.data("settings"),
500
+ ...processor.data("settings"),
451
501
  // from https://github.com/remarkjs/remark/blob/main/packages/remark-stringify/lib/index.js
452
- extensions: this.data("toMarkdownExtensions") || []
502
+ extensions: processor.data("toMarkdownExtensions") || []
453
503
  });
454
504
  }
505
+ if (includeMDAST) {
506
+ const options = includeMDAST === true ? {} : includeMDAST;
507
+ file.data._mdast = JSON.stringify(
508
+ options.removePosition ? (0, import_unist_util_remove_position.removePosition)(structuredClone(tree)) : tree
509
+ );
510
+ }
455
511
  for (const { name, value } of file.data["mdx-export"] ?? []) {
456
512
  tree.children.unshift(getMdastExport(name, value));
457
513
  }
@@ -519,6 +575,7 @@ async function buildMDX(cacheKey, source, options) {
519
575
  [
520
576
  remarkPostprocess,
521
577
  {
578
+ _format: format,
522
579
  ...options.postprocess,
523
580
  valueToExport: [
524
581
  ...options.postprocess?.valueToExport ?? [],
@@ -526,7 +583,8 @@ async function buildMDX(cacheKey, source, options) {
526
583
  "extractedReferences",
527
584
  "frontmatter",
528
585
  "lastModified",
529
- "_markdown"
586
+ "_markdown",
587
+ "_mdast"
530
588
  ]
531
589
  }
532
590
  ]
@@ -558,16 +616,21 @@ var import_node_path2 = __toESM(require("path"), 1);
558
616
  var import_node_crypto = require("crypto");
559
617
  var querySchema = import_zod.z.object({
560
618
  only: import_zod.z.literal(["frontmatter", "all"]).default("all"),
561
- collection: import_zod.z.string().optional(),
562
- hash: import_zod.z.string().describe(
563
- "the hash of config, used for revalidation on Turbopack/Webpack."
564
- ).optional()
619
+ collection: import_zod.z.string().optional()
565
620
  }).loose();
566
621
  var cacheEntry = import_zod.z.object({
567
622
  code: import_zod.z.string(),
568
623
  map: import_zod.z.any().optional(),
569
624
  hash: import_zod.z.string().optional()
570
625
  });
626
+ var hashes = /* @__PURE__ */ new WeakMap();
627
+ function getConfigHash(config) {
628
+ let hash = hashes.get(config);
629
+ if (hash) return hash;
630
+ hash = Date.now().toString();
631
+ hashes.set(config, hash);
632
+ return hash;
633
+ }
571
634
  function createMdxLoader(configLoader) {
572
635
  return async ({
573
636
  source: value,
@@ -578,7 +641,7 @@ function createMdxLoader(configLoader) {
578
641
  }) => {
579
642
  const matter = fumaMatter(value);
580
643
  const parsed = querySchema.parse(query);
581
- const loaded = await configLoader.getConfig(parsed.hash);
644
+ const loaded = await configLoader.getConfig();
582
645
  const cacheDir = isDevelopment ? void 0 : loaded.global.experimentalBuildCache;
583
646
  const cacheKey = `${parsed.hash}_${parsed.collection ?? "global"}_${generateCacheHash(filePath)}`;
584
647
  if (cacheDir) {
@@ -618,7 +681,7 @@ function createMdxLoader(configLoader) {
618
681
  }
619
682
  const lineOffset = isDevelopment ? countLines(matter.matter) : 0;
620
683
  const compiled = await buildMDX(
621
- `${parsed.hash ?? ""}:${parsed.collection ?? "global"}`,
684
+ `${getConfigHash(loaded)}:${parsed.collection ?? "global"}`,
622
685
  "\n".repeat(lineOffset) + matter.content,
623
686
  {
624
687
  development: isDevelopment,
@@ -660,6 +723,7 @@ function countLines(s) {
660
723
 
661
724
  // src/loaders/config/index.ts
662
725
  var import_node_path3 = __toESM(require("path"), 1);
726
+ var import_promises2 = __toESM(require("fs/promises"), 1);
663
727
  function findConfigFile() {
664
728
  return import_node_path3.default.resolve("source.config.ts");
665
729
  }
package/dist/bun/index.js CHANGED
@@ -1,16 +1,15 @@
1
+ import {
2
+ createMdxLoader
3
+ } from "../chunk-CEA6MYJU.js";
4
+ import "../chunk-3J3WL7WN.js";
5
+ import "../chunk-K5ZLPEIQ.js";
1
6
  import {
2
7
  buildConfig
3
8
  } from "../chunk-U4MQ44TS.js";
4
- import {
5
- createMdxLoader
6
- } from "../chunk-RMDXSZYE.js";
7
- import "../chunk-QAUWMR5D.js";
8
- import "../chunk-LMG6UWCL.js";
9
- import "../chunk-IQAEAI4P.js";
10
9
  import {
11
10
  findConfigFile,
12
11
  resolvedConfig
13
- } from "../chunk-XMFLD5J6.js";
12
+ } from "../chunk-HI62EXSB.js";
14
13
  import "../chunk-VWJKRQZR.js";
15
14
 
16
15
  // src/bun/index.ts
@@ -1,6 +1,6 @@
1
1
  import {
2
2
  remarkInclude
3
- } from "./chunk-LMG6UWCL.js";
3
+ } from "./chunk-K5ZLPEIQ.js";
4
4
 
5
5
  // src/loaders/mdx/build-mdx.ts
6
6
  import { createProcessor } from "@mdx-js/mdx";
@@ -9,10 +9,19 @@ import { createProcessor } from "@mdx-js/mdx";
9
9
  import { visit } from "unist-util-visit";
10
10
  import { toMarkdown } from "mdast-util-to-markdown";
11
11
  import { valueToEstree } from "estree-util-value-to-estree";
12
+ import { removePosition } from "unist-util-remove-position";
13
+ import remarkMdx from "remark-mdx";
12
14
  function remarkPostprocess({
15
+ _format,
13
16
  includeProcessedMarkdown = false,
17
+ includeMDAST = false,
14
18
  valueToExport = []
15
- } = {}) {
19
+ }) {
20
+ let _stringifyProcessor;
21
+ const getStringifyProcessor = () => {
22
+ if (_format === "mdx") return this;
23
+ return _stringifyProcessor ??= this().use(remarkMdx).freeze();
24
+ };
16
25
  return (tree, file) => {
17
26
  let title;
18
27
  const urls = [];
@@ -32,12 +41,19 @@ function remarkPostprocess({
32
41
  }
33
42
  file.data.extractedReferences = urls;
34
43
  if (includeProcessedMarkdown) {
44
+ const processor = getStringifyProcessor();
35
45
  file.data._markdown = toMarkdown(tree, {
36
- ...this.data("settings"),
46
+ ...processor.data("settings"),
37
47
  // from https://github.com/remarkjs/remark/blob/main/packages/remark-stringify/lib/index.js
38
- extensions: this.data("toMarkdownExtensions") || []
48
+ extensions: processor.data("toMarkdownExtensions") || []
39
49
  });
40
50
  }
51
+ if (includeMDAST) {
52
+ const options = includeMDAST === true ? {} : includeMDAST;
53
+ file.data._mdast = JSON.stringify(
54
+ options.removePosition ? removePosition(structuredClone(tree)) : tree
55
+ );
56
+ }
41
57
  for (const { name, value } of file.data["mdx-export"] ?? []) {
42
58
  tree.children.unshift(getMdastExport(name, value));
43
59
  }
@@ -105,6 +121,7 @@ async function buildMDX(cacheKey, source, options) {
105
121
  [
106
122
  remarkPostprocess,
107
123
  {
124
+ _format: format,
108
125
  ...options.postprocess,
109
126
  valueToExport: [
110
127
  ...options.postprocess?.valueToExport ?? [],
@@ -112,7 +129,8 @@ async function buildMDX(cacheKey, source, options) {
112
129
  "extractedReferences",
113
130
  "frontmatter",
114
131
  "lastModified",
115
- "_markdown"
132
+ "_markdown",
133
+ "_mdast"
116
134
  ]
117
135
  }
118
136
  ]
@@ -0,0 +1,44 @@
1
+ // src/plugins/index.ts
2
+ import path from "path";
3
+ import fs from "fs/promises";
4
+ function createPluginHandler(context, defaultPlugins = []) {
5
+ const plugins = [];
6
+ async function write(entry) {
7
+ const file = path.join(context.outDir, entry.path);
8
+ await fs.mkdir(path.dirname(file), { recursive: true });
9
+ await fs.writeFile(file, entry.content);
10
+ }
11
+ return {
12
+ async init(config) {
13
+ if (config.global.plugins) {
14
+ defaultPlugins.push(...config.global.plugins);
15
+ }
16
+ for await (const option of defaultPlugins) {
17
+ if (!option) continue;
18
+ if (Array.isArray(option)) plugins.push(...option);
19
+ else plugins.push(option);
20
+ }
21
+ for (const plugin of plugins) {
22
+ const out = await plugin.config?.call(context, config);
23
+ if (out) config = out;
24
+ }
25
+ return config;
26
+ },
27
+ async emit() {
28
+ const out = await Promise.all(
29
+ plugins.map((plugin) => {
30
+ return plugin.emit?.call(context) ?? [];
31
+ })
32
+ );
33
+ return out.flat();
34
+ },
35
+ async emitAndWrite() {
36
+ const entries = await this.emit();
37
+ await Promise.all(entries.map(write));
38
+ }
39
+ };
40
+ }
41
+
42
+ export {
43
+ createPluginHandler
44
+ };
@@ -1,10 +1,10 @@
1
1
  import {
2
2
  buildMDX
3
- } from "./chunk-QAUWMR5D.js";
3
+ } from "./chunk-3J3WL7WN.js";
4
4
  import {
5
5
  getGitTimestamp,
6
6
  validate
7
- } from "./chunk-IQAEAI4P.js";
7
+ } from "./chunk-HI62EXSB.js";
8
8
  import {
9
9
  fumaMatter
10
10
  } from "./chunk-VWJKRQZR.js";
@@ -16,16 +16,21 @@ import path from "path";
16
16
  import { createHash } from "crypto";
17
17
  var querySchema = z.object({
18
18
  only: z.literal(["frontmatter", "all"]).default("all"),
19
- collection: z.string().optional(),
20
- hash: z.string().describe(
21
- "the hash of config, used for revalidation on Turbopack/Webpack."
22
- ).optional()
19
+ collection: z.string().optional()
23
20
  }).loose();
24
21
  var cacheEntry = z.object({
25
22
  code: z.string(),
26
23
  map: z.any().optional(),
27
24
  hash: z.string().optional()
28
25
  });
26
+ var hashes = /* @__PURE__ */ new WeakMap();
27
+ function getConfigHash(config) {
28
+ let hash = hashes.get(config);
29
+ if (hash) return hash;
30
+ hash = Date.now().toString();
31
+ hashes.set(config, hash);
32
+ return hash;
33
+ }
29
34
  function createMdxLoader(configLoader) {
30
35
  return async ({
31
36
  source: value,
@@ -36,7 +41,7 @@ function createMdxLoader(configLoader) {
36
41
  }) => {
37
42
  const matter = fumaMatter(value);
38
43
  const parsed = querySchema.parse(query);
39
- const loaded = await configLoader.getConfig(parsed.hash);
44
+ const loaded = await configLoader.getConfig();
40
45
  const cacheDir = isDevelopment ? void 0 : loaded.global.experimentalBuildCache;
41
46
  const cacheKey = `${parsed.hash}_${parsed.collection ?? "global"}_${generateCacheHash(filePath)}`;
42
47
  if (cacheDir) {
@@ -76,7 +81,7 @@ function createMdxLoader(configLoader) {
76
81
  }
77
82
  const lineOffset = isDevelopment ? countLines(matter.matter) : 0;
78
83
  const compiled = await buildMDX(
79
- `${parsed.hash ?? ""}:${parsed.collection ?? "global"}`,
84
+ `${getConfigHash(loaded)}:${parsed.collection ?? "global"}`,
80
85
  "\n".repeat(lineOffset) + matter.content,
81
86
  {
82
87
  development: isDevelopment,
@@ -1,3 +1,60 @@
1
+ // src/loaders/config/index.ts
2
+ import path from "path";
3
+ import fs from "fs/promises";
4
+ function findConfigFile() {
5
+ return path.resolve("source.config.ts");
6
+ }
7
+ function resolvedConfig(loaded) {
8
+ return {
9
+ getConfig() {
10
+ return loaded;
11
+ }
12
+ };
13
+ }
14
+ function staticConfig({
15
+ outDir,
16
+ configPath,
17
+ buildConfig
18
+ }) {
19
+ let cached;
20
+ return {
21
+ async getConfig() {
22
+ if (cached) return cached;
23
+ cached = import("./load-MNG3CLET.js").then(
24
+ (mod) => mod.loadConfig(configPath, outDir, buildConfig)
25
+ );
26
+ return cached;
27
+ }
28
+ };
29
+ }
30
+ function dynamicConfig({
31
+ outDir,
32
+ configPath,
33
+ buildConfig
34
+ }) {
35
+ let loaded;
36
+ async function getConfigHash() {
37
+ const stats = await fs.stat(configPath).catch(() => void 0);
38
+ if (stats) {
39
+ return stats.mtime.getTime().toString();
40
+ }
41
+ throw new Error("Cannot find config file");
42
+ }
43
+ return {
44
+ async getConfig() {
45
+ const hash = await getConfigHash();
46
+ if (loaded && loaded.hash === hash) return loaded.config;
47
+ loaded = {
48
+ hash,
49
+ config: import("./load-MNG3CLET.js").then(
50
+ (mod) => mod.loadConfig(configPath, outDir, buildConfig)
51
+ )
52
+ };
53
+ return loaded.config;
54
+ }
55
+ };
56
+ }
57
+
1
58
  // src/utils/validation.ts
2
59
  import picocolors from "picocolors";
3
60
  var ValidationError = class extends Error {
@@ -37,7 +94,7 @@ async function validate(schema, data, context, errorMessage) {
37
94
  }
38
95
 
39
96
  // src/utils/git-timestamp.ts
40
- import path from "path";
97
+ import path2 from "path";
41
98
  import { x } from "tinyexec";
42
99
  var cache = /* @__PURE__ */ new Map();
43
100
  async function getGitTimestamp(file) {
@@ -46,7 +103,7 @@ async function getGitTimestamp(file) {
46
103
  try {
47
104
  const out = await x(
48
105
  "git",
49
- ["log", "-1", '--pretty="%ai"', path.relative(process.cwd(), file)],
106
+ ["log", "-1", '--pretty="%ai"', path2.relative(process.cwd(), file)],
50
107
  {
51
108
  throwOnError: true
52
109
  }
@@ -60,6 +117,10 @@ async function getGitTimestamp(file) {
60
117
  }
61
118
 
62
119
  export {
120
+ findConfigFile,
121
+ resolvedConfig,
122
+ staticConfig,
123
+ dynamicConfig,
63
124
  ValidationError,
64
125
  validate,
65
126
  getGitTimestamp
@@ -1,7 +1,5 @@
1
- // src/runtime/vite/browser.ts
2
- import { createElement, lazy } from "react";
3
-
4
1
  // src/runtime/vite/base.ts
2
+ import { createElement, lazy } from "react";
5
3
  function fromConfigBase() {
6
4
  function normalize(entries, base) {
7
5
  const out = {};
@@ -28,8 +26,6 @@ function fromConfigBase() {
28
26
  }
29
27
  };
30
28
  }
31
-
32
- // src/runtime/vite/browser.ts
33
29
  var loaderStore = /* @__PURE__ */ new Map();
34
30
  function createClientLoader(files, options) {
35
31
  const { id = "", component } = options;
@@ -63,6 +59,10 @@ function createClientLoader(files, options) {
63
59
  getRenderer,
64
60
  getComponent(path) {
65
61
  return getRenderer()[path];
62
+ },
63
+ useContent(path, props) {
64
+ const Comp = this.getComponent(path);
65
+ return createElement(Comp, props);
66
66
  }
67
67
  };
68
68
  }