fumadocs-mdx 14.2.4 → 14.2.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -66,6 +66,7 @@ function toVite(loader) {
66
66
  return !loader.test || loader.test.test(id);
67
67
  },
68
68
  async transform(value, id) {
69
+ const environment = this.environment;
69
70
  const [file, query = ""] = id.split("?", 2);
70
71
  const result = await loader.load({
71
72
  filePath: file,
@@ -73,7 +74,7 @@ function toVite(loader) {
73
74
  getSource() {
74
75
  return value;
75
76
  },
76
- development: this.environment.mode === "dev",
77
+ development: environment.mode === "dev",
77
78
  compiler: { addDependency: (file$1) => {
78
79
  this.addWatchFile(file$1);
79
80
  } }
@@ -141,4 +142,4 @@ function toBun(loader) {
141
142
 
142
143
  //#endregion
143
144
  export { createIntegratedConfigLoader as a, toWebpack as i, toNode as n, createStandaloneConfigLoader as o, toVite as r, toBun as t };
144
- //# sourceMappingURL=adapter-DG-viEbG.js.map
145
+ //# sourceMappingURL=adapter-DI4cexsC.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"adapter-DI4cexsC.js","names":["file"],"sources":["../src/loaders/config.ts","../src/loaders/adapter.ts"],"sourcesContent":["import type { Core } from '@/core';\nimport fs from 'node:fs/promises';\n\nexport interface ConfigLoader {\n getCore(): Promise<Core>;\n}\n\nexport function createStandaloneConfigLoader({\n core,\n buildConfig,\n mode,\n}: {\n /**\n * core (not initialized)\n */\n core: Core;\n buildConfig: boolean;\n /**\n * In dev mode, the config file is dynamically re-loaded when it's updated.\n */\n mode: 'dev' | 'production';\n}): ConfigLoader {\n let prev:\n | {\n hash: string;\n init: Promise<void>;\n }\n | undefined;\n\n async function getConfigHash(): Promise<string> {\n if (mode === 'production') return 'static';\n\n const stats = await fs.stat(core.getOptions().configPath).catch(() => {\n throw new Error('Cannot find config file');\n });\n\n return stats.mtime.getTime().toString();\n }\n\n return {\n async getCore() {\n const hash = await getConfigHash();\n if (!prev || hash !== prev.hash) {\n prev = {\n hash,\n init: (async () => {\n const { loadConfig } = await import('../config/load-from-file');\n\n await core.init({\n config: loadConfig(core, buildConfig),\n });\n })(),\n };\n }\n\n await prev.init;\n return core;\n },\n };\n}\n\n/**\n * create config loader from initialized core\n */\nexport function createIntegratedConfigLoader(core: Core): ConfigLoader {\n return {\n async getCore() {\n return core;\n },\n };\n}\n","import type { CompilerOptions } from '@/loaders/mdx/build-mdx';\nimport type { LoadFnOutput, LoadHook } from 'node:module';\nimport { fileURLToPath } from 'node:url';\nimport fs from 'node:fs/promises';\nimport type { TransformPluginContext } from 'rollup';\nimport type { Environment, TransformResult } from 'vite';\nimport { parse } from 'node:querystring';\nimport { ValidationError } from '@/utils/validation';\nimport type { LoaderContext } from 'webpack';\nimport { readFileSync } from 'node:fs';\n\nexport interface LoaderInput {\n development: boolean;\n compiler: CompilerOptions;\n\n filePath: string;\n query: Record<string, string | string[] | undefined>;\n getSource: () => string | Promise<string>;\n}\n\nexport interface LoaderOutput {\n code: string;\n map?: unknown;\n\n /**\n * Only supported in Vite 8.\n *\n * Explicitly define the transformed module type, for unsupported environments, you need to consider the differences between each bundler.\n */\n moduleType?: 'js' | 'json';\n}\n\ntype Awaitable<T> = T | Promise<T>;\n\nexport interface Loader {\n /**\n * Filter file paths, the input can be either a file URL or file path.\n *\n * Must take resource query into consideration.\n */\n test?: RegExp;\n\n /**\n * Transform input into JavaScript.\n *\n * Returns:\n * - `LoaderOutput`: JavaScript code & source map.\n * - `null`: skip the loader. Fallback to default behaviour if possible, otherwise the adapter will try workarounds.\n */\n load: (input: LoaderInput) => Awaitable<LoaderOutput | null>;\n\n bun?: {\n /**\n * 1. Bun doesn't allow `null` in loaders.\n * 2. Bun requires sync result to support dynamic require().\n */\n load?: (source: string, input: LoaderInput) => Awaitable<Bun.OnLoadResult>;\n };\n}\n\nexport function toNode(loader: Loader): LoadHook {\n return async (url, _context, nextLoad): Promise<LoadFnOutput> => {\n if (url.startsWith('file:///') && (!loader.test || loader.test.test(url))) {\n const parsedUrl = new URL(url);\n const filePath = fileURLToPath(parsedUrl);\n\n const result = await loader.load({\n filePath,\n query: Object.fromEntries(parsedUrl.searchParams.entries()),\n async getSource() {\n return (await fs.readFile(filePath)).toString();\n },\n development: false,\n compiler: {\n addDependency() {},\n },\n });\n\n if (result) {\n return {\n source: result.code,\n format: 'module',\n shortCircuit: true,\n };\n }\n }\n\n return nextLoad(url);\n };\n}\n\nexport interface ViteLoader {\n filter: (id: string) => boolean;\n\n transform: (\n this: TransformPluginContext,\n value: string,\n id: string,\n ) => Promise<TransformResult | null>;\n}\n\nexport function toVite(loader: Loader): ViteLoader {\n return {\n filter(id) {\n return !loader.test || loader.test.test(id);\n },\n async transform(value, id) {\n // Vite doesn't expose the real context types\n const environment = (this as unknown as { environment: Environment }).environment;\n const [file, query = ''] = id.split('?', 2);\n\n const result = await loader.load({\n filePath: file,\n query: parse(query),\n getSource() {\n return value;\n },\n development: environment.mode === 'dev',\n compiler: {\n addDependency: (file) => {\n this.addWatchFile(file);\n },\n },\n });\n\n if (result === null) return null;\n return {\n code: result.code,\n map: result.map as TransformResult['map'],\n moduleType: result.moduleType,\n };\n },\n };\n}\n\nexport type WebpackLoader = (\n this: LoaderContext<unknown>,\n source: string,\n callback: LoaderContext<unknown>['callback'],\n) => Promise<void>;\n\n/**\n * need to handle the `test` regex in Webpack config instead.\n */\nexport function toWebpack(loader: Loader): WebpackLoader {\n return async function (source, callback) {\n try {\n const result = await loader.load({\n filePath: this.resourcePath,\n query: parse(this.resourceQuery.slice(1)),\n getSource() {\n return source;\n },\n development: this.mode === 'development',\n compiler: this,\n });\n\n if (result === null) {\n callback(undefined, source);\n } else {\n callback(undefined, result.code, result.map as string);\n }\n } catch (error) {\n if (error instanceof ValidationError) {\n return callback(new Error(await error.toStringFormatted()));\n }\n\n if (!(error instanceof Error)) throw error;\n callback(error);\n }\n };\n}\n\nexport function toBun(loader: Loader) {\n function toResult(output: LoaderOutput | null): Bun.OnLoadResult {\n // it errors, treat this as an exception\n if (!output) return;\n\n return {\n contents: output.code,\n loader: output.moduleType ?? 'js',\n };\n }\n\n return (build: Bun.PluginBuilder) => {\n // avoid using async here, because it will cause dynamic require() to fail\n build.onLoad({ filter: loader.test ?? /.+/ }, (args) => {\n const [filePath, query = ''] = args.path.split('?', 2);\n const input: LoaderInput = {\n async getSource() {\n return Bun.file(filePath).text();\n },\n query: parse(query),\n filePath,\n development: false,\n compiler: {\n addDependency() {},\n },\n };\n\n if (loader.bun?.load) {\n return loader.bun.load(readFileSync(filePath).toString(), input);\n }\n\n const result = loader.load(input);\n if (result instanceof Promise) {\n return result.then(toResult);\n }\n return toResult(result);\n });\n };\n}\n"],"mappings":";;;;;;;AAOA,SAAgB,6BAA6B,EAC3C,MACA,aACA,QAWe;CACf,IAAI;CAOJ,eAAe,gBAAiC;AAC9C,MAAI,SAAS,aAAc,QAAO;AAMlC,UAJc,MAAM,GAAG,KAAK,KAAK,YAAY,CAAC,WAAW,CAAC,YAAY;AACpE,SAAM,IAAI,MAAM,0BAA0B;IAC1C,EAEW,MAAM,SAAS,CAAC,UAAU;;AAGzC,QAAO,EACL,MAAM,UAAU;EACd,MAAM,OAAO,MAAM,eAAe;AAClC,MAAI,CAAC,QAAQ,SAAS,KAAK,KACzB,QAAO;GACL;GACA,OAAO,YAAY;IACjB,MAAM,EAAE,eAAe,MAAM,OAAO;AAEpC,UAAM,KAAK,KAAK,EACd,QAAQ,WAAW,MAAM,YAAY,EACtC,CAAC;OACA;GACL;AAGH,QAAM,KAAK;AACX,SAAO;IAEV;;;;;AAMH,SAAgB,6BAA6B,MAA0B;AACrE,QAAO,EACL,MAAM,UAAU;AACd,SAAO;IAEV;;;;;ACTH,SAAgB,OAAO,QAA0B;AAC/C,QAAO,OAAO,KAAK,UAAU,aAAoC;AAC/D,MAAI,IAAI,WAAW,WAAW,KAAK,CAAC,OAAO,QAAQ,OAAO,KAAK,KAAK,IAAI,GAAG;GACzE,MAAM,YAAY,IAAI,IAAI,IAAI;GAC9B,MAAM,WAAW,cAAc,UAAU;GAEzC,MAAM,SAAS,MAAM,OAAO,KAAK;IAC/B;IACA,OAAO,OAAO,YAAY,UAAU,aAAa,SAAS,CAAC;IAC3D,MAAM,YAAY;AAChB,aAAQ,MAAM,GAAG,SAAS,SAAS,EAAE,UAAU;;IAEjD,aAAa;IACb,UAAU,EACR,gBAAgB,IACjB;IACF,CAAC;AAEF,OAAI,OACF,QAAO;IACL,QAAQ,OAAO;IACf,QAAQ;IACR,cAAc;IACf;;AAIL,SAAO,SAAS,IAAI;;;AAcxB,SAAgB,OAAO,QAA4B;AACjD,QAAO;EACL,OAAO,IAAI;AACT,UAAO,CAAC,OAAO,QAAQ,OAAO,KAAK,KAAK,GAAG;;EAE7C,MAAM,UAAU,OAAO,IAAI;GAEzB,MAAM,cAAe,KAAiD;GACtE,MAAM,CAAC,MAAM,QAAQ,MAAM,GAAG,MAAM,KAAK,EAAE;GAE3C,MAAM,SAAS,MAAM,OAAO,KAAK;IAC/B,UAAU;IACV,OAAO,MAAM,MAAM;IACnB,YAAY;AACV,YAAO;;IAET,aAAa,YAAY,SAAS;IAClC,UAAU,EACR,gBAAgB,WAAS;AACvB,UAAK,aAAaA,OAAK;OAE1B;IACF,CAAC;AAEF,OAAI,WAAW,KAAM,QAAO;AAC5B,UAAO;IACL,MAAM,OAAO;IACb,KAAK,OAAO;IACZ,YAAY,OAAO;IACpB;;EAEJ;;;;;AAYH,SAAgB,UAAU,QAA+B;AACvD,QAAO,eAAgB,QAAQ,UAAU;AACvC,MAAI;GACF,MAAM,SAAS,MAAM,OAAO,KAAK;IAC/B,UAAU,KAAK;IACf,OAAO,MAAM,KAAK,cAAc,MAAM,EAAE,CAAC;IACzC,YAAY;AACV,YAAO;;IAET,aAAa,KAAK,SAAS;IAC3B,UAAU;IACX,CAAC;AAEF,OAAI,WAAW,KACb,UAAS,QAAW,OAAO;OAE3B,UAAS,QAAW,OAAO,MAAM,OAAO,IAAc;WAEjD,OAAO;AACd,OAAI,iBAAiB,gBACnB,QAAO,SAAS,IAAI,MAAM,MAAM,MAAM,mBAAmB,CAAC,CAAC;AAG7D,OAAI,EAAE,iBAAiB,OAAQ,OAAM;AACrC,YAAS,MAAM;;;;AAKrB,SAAgB,MAAM,QAAgB;CACpC,SAAS,SAAS,QAA+C;AAE/D,MAAI,CAAC,OAAQ;AAEb,SAAO;GACL,UAAU,OAAO;GACjB,QAAQ,OAAO,cAAc;GAC9B;;AAGH,SAAQ,UAA6B;AAEnC,QAAM,OAAO,EAAE,QAAQ,OAAO,QAAQ,MAAM,GAAG,SAAS;GACtD,MAAM,CAAC,UAAU,QAAQ,MAAM,KAAK,KAAK,MAAM,KAAK,EAAE;GACtD,MAAM,QAAqB;IACzB,MAAM,YAAY;AAChB,YAAO,IAAI,KAAK,SAAS,CAAC,MAAM;;IAElC,OAAO,MAAM,MAAM;IACnB;IACA,aAAa;IACb,UAAU,EACR,gBAAgB,IACjB;IACF;AAED,OAAI,OAAO,KAAK,KACd,QAAO,OAAO,IAAI,KAAK,aAAa,SAAS,CAAC,UAAU,EAAE,MAAM;GAGlE,MAAM,SAAS,OAAO,KAAK,MAAM;AACjC,OAAI,kBAAkB,QACpB,QAAO,OAAO,KAAK,SAAS;AAE9B,UAAO,SAAS,OAAO;IACvB"}
@@ -1 +1 @@
1
- {"version":3,"file":"build-BTTNEFmV.js","names":["matcher: picomatch.Matcher","loaded: GlobalConfig","result: ProcessorOptions | Promise<ProcessorOptions>"],"sources":["../src/config/build.ts"],"sourcesContent":["import type { ProcessorOptions } from '@mdx-js/mdx';\nimport type {\n AnyCollection,\n DocCollection,\n DocsCollection,\n GlobalConfig,\n MetaCollection,\n} from '@/config/define';\nimport picomatch from 'picomatch';\nimport { applyMdxPreset } from '@/config/preset';\nimport path from 'node:path';\n\nexport type BuildEnvironment = 'bundler' | 'runtime';\n\nexport interface LoadedConfig {\n collections: Map<string, CollectionItem>;\n global: GlobalConfig;\n getMDXOptions(\n collection?: DocCollectionItem,\n environment?: BuildEnvironment,\n ): ProcessorOptions | Promise<ProcessorOptions>;\n workspaces: Record<\n string,\n {\n dir: string;\n config: LoadedConfig;\n }\n >;\n}\n\nexport type CollectionItem = MetaCollectionItem | DocCollectionItem | DocsCollectionItem;\n\ninterface PrimitiveCollectionItem {\n name: string;\n cwd: string;\n /**\n * content directory (absolute)\n */\n dir: string;\n hasFile: (filePath: string) => boolean;\n isFileSupported: (filePath: string) => boolean;\n patterns: string[];\n}\n\nexport type MetaCollectionItem = PrimitiveCollectionItem & Omit<MetaCollection, 'files' | 'dir'>;\nexport type DocCollectionItem = PrimitiveCollectionItem & Omit<DocCollection, 'files' | 'dir'>;\n\nexport interface DocsCollectionItem\n extends Omit<DocsCollection, 'dir' | 'meta' | 'docs'>, Omit<PrimitiveCollectionItem, 'patterns'> {\n meta: MetaCollectionItem;\n docs: DocCollectionItem;\n}\n\nconst SupportedFormats = {\n doc: ['mdx', 'md'],\n meta: ['json', 'yaml'],\n};\n\nexport function buildCollection(\n name: string,\n collection: AnyCollection,\n cwd: string,\n): CollectionItem {\n if (collection.type === 'docs') {\n return {\n ...collection,\n type: 'docs',\n get dir() {\n return this.docs.dir;\n },\n name,\n meta: buildCollection(name, collection.meta, cwd) as MetaCollectionItem,\n docs: buildCollection(name, collection.docs, cwd) as DocCollectionItem,\n hasFile(filePath) {\n return this.docs.hasFile(filePath) || this.meta.hasFile(filePath);\n },\n isFileSupported(filePath) {\n return this.docs.isFileSupported(filePath) || this.meta.isFileSupported(filePath);\n },\n cwd,\n };\n }\n\n return {\n ...collection,\n ...buildPrimitiveCollection(name, collection, cwd),\n };\n}\n\nfunction buildPrimitiveCollection(\n name: string,\n config: DocCollection | MetaCollection,\n cwd: string,\n): PrimitiveCollectionItem {\n const supportedFormats = SupportedFormats[config.type];\n const patterns = config.files ?? [`**/*.{${supportedFormats.join(',')}}`];\n let matcher: picomatch.Matcher;\n\n return {\n dir: path.resolve(cwd, config.dir),\n cwd,\n name,\n patterns,\n isFileSupported(filePath) {\n return supportedFormats.some((format) => filePath.endsWith(`.${format}`));\n },\n hasFile(filePath) {\n if (!this.isFileSupported(filePath)) return false;\n\n const relativePath = path.relative(this.dir, filePath);\n if (relativePath.startsWith(`..${path.sep}`)) return false;\n\n return (matcher ??= picomatch(patterns))(relativePath);\n },\n };\n}\n\nexport function buildConfig(config: Record<string, unknown>, cwd = process.cwd()): LoadedConfig {\n const collections = new Map<string, CollectionItem>();\n const loaded: GlobalConfig = {};\n\n for (const [k, v] of Object.entries(config)) {\n if (!v) {\n continue;\n }\n\n if (typeof v === 'object' && 'type' in v) {\n if (v.type === 'docs' || v.type === 'doc' || v.type === 'meta') {\n collections.set(k, buildCollection(k, v as AnyCollection, cwd));\n continue;\n }\n }\n\n if (k === 'default' && v) {\n Object.assign(loaded, v);\n continue;\n }\n\n throw new Error(\n `Unknown export \"${k}\", you can only export collections from source configuration file.`,\n );\n }\n\n const mdxOptionsCache = new Map<string, ProcessorOptions | Promise<ProcessorOptions>>();\n return {\n global: loaded,\n collections,\n workspaces: Object.fromEntries(\n Object.entries(loaded.workspaces ?? {}).map(([key, value]) => {\n return [\n key,\n {\n dir: value.dir,\n config: buildConfig(value.config, path.resolve(cwd, value.dir)),\n },\n ];\n }),\n ),\n getMDXOptions(collection, environment = 'bundler') {\n const key = collection ? `${environment}:${collection.name}` : environment;\n const cached = mdxOptionsCache.get(key);\n if (cached) return cached;\n let result: ProcessorOptions | Promise<ProcessorOptions>;\n\n if (collection?.mdxOptions) {\n const optionsFn = collection.mdxOptions;\n result = typeof optionsFn === 'function' ? optionsFn(environment) : optionsFn;\n } else {\n result = (async () => {\n const optionsFn = this.global.mdxOptions;\n const options = typeof optionsFn === 'function' ? await optionsFn() : optionsFn;\n\n return applyMdxPreset(options)(environment);\n })();\n }\n\n mdxOptionsCache.set(key, result);\n return result;\n },\n };\n}\n"],"mappings":";;;;;AAqDA,MAAM,mBAAmB;CACvB,KAAK,CAAC,OAAO,KAAK;CAClB,MAAM,CAAC,QAAQ,OAAO;CACvB;AAED,SAAgB,gBACd,MACA,YACA,KACgB;AAChB,KAAI,WAAW,SAAS,OACtB,QAAO;EACL,GAAG;EACH,MAAM;EACN,IAAI,MAAM;AACR,UAAO,KAAK,KAAK;;EAEnB;EACA,MAAM,gBAAgB,MAAM,WAAW,MAAM,IAAI;EACjD,MAAM,gBAAgB,MAAM,WAAW,MAAM,IAAI;EACjD,QAAQ,UAAU;AAChB,UAAO,KAAK,KAAK,QAAQ,SAAS,IAAI,KAAK,KAAK,QAAQ,SAAS;;EAEnE,gBAAgB,UAAU;AACxB,UAAO,KAAK,KAAK,gBAAgB,SAAS,IAAI,KAAK,KAAK,gBAAgB,SAAS;;EAEnF;EACD;AAGH,QAAO;EACL,GAAG;EACH,GAAG,yBAAyB,MAAM,YAAY,IAAI;EACnD;;AAGH,SAAS,yBACP,MACA,QACA,KACyB;CACzB,MAAM,mBAAmB,iBAAiB,OAAO;CACjD,MAAM,WAAW,OAAO,SAAS,CAAC,SAAS,iBAAiB,KAAK,IAAI,CAAC,GAAG;CACzE,IAAIA;AAEJ,QAAO;EACL,KAAK,KAAK,QAAQ,KAAK,OAAO,IAAI;EAClC;EACA;EACA;EACA,gBAAgB,UAAU;AACxB,UAAO,iBAAiB,MAAM,WAAW,SAAS,SAAS,IAAI,SAAS,CAAC;;EAE3E,QAAQ,UAAU;AAChB,OAAI,CAAC,KAAK,gBAAgB,SAAS,CAAE,QAAO;GAE5C,MAAM,eAAe,KAAK,SAAS,KAAK,KAAK,SAAS;AACtD,OAAI,aAAa,WAAW,KAAK,KAAK,MAAM,CAAE,QAAO;AAErD,WAAQ,YAAY,UAAU,SAAS,EAAE,aAAa;;EAEzD;;AAGH,SAAgB,YAAY,QAAiC,MAAM,QAAQ,KAAK,EAAgB;CAC9F,MAAM,8BAAc,IAAI,KAA6B;CACrD,MAAMC,SAAuB,EAAE;AAE/B,MAAK,MAAM,CAAC,GAAG,MAAM,OAAO,QAAQ,OAAO,EAAE;AAC3C,MAAI,CAAC,EACH;AAGF,MAAI,OAAO,MAAM,YAAY,UAAU,GACrC;OAAI,EAAE,SAAS,UAAU,EAAE,SAAS,SAAS,EAAE,SAAS,QAAQ;AAC9D,gBAAY,IAAI,GAAG,gBAAgB,GAAG,GAAoB,IAAI,CAAC;AAC/D;;;AAIJ,MAAI,MAAM,aAAa,GAAG;AACxB,UAAO,OAAO,QAAQ,EAAE;AACxB;;AAGF,QAAM,IAAI,MACR,mBAAmB,EAAE,oEACtB;;CAGH,MAAM,kCAAkB,IAAI,KAA2D;AACvF,QAAO;EACL,QAAQ;EACR;EACA,YAAY,OAAO,YACjB,OAAO,QAAQ,OAAO,cAAc,EAAE,CAAC,CAAC,KAAK,CAAC,KAAK,WAAW;AAC5D,UAAO,CACL,KACA;IACE,KAAK,MAAM;IACX,QAAQ,YAAY,MAAM,QAAQ,KAAK,QAAQ,KAAK,MAAM,IAAI,CAAC;IAChE,CACF;IACD,CACH;EACD,cAAc,YAAY,cAAc,WAAW;GACjD,MAAM,MAAM,aAAa,GAAG,YAAY,GAAG,WAAW,SAAS;GAC/D,MAAM,SAAS,gBAAgB,IAAI,IAAI;AACvC,OAAI,OAAQ,QAAO;GACnB,IAAIC;AAEJ,OAAI,YAAY,YAAY;IAC1B,MAAM,YAAY,WAAW;AAC7B,aAAS,OAAO,cAAc,aAAa,UAAU,YAAY,GAAG;SAEpE,WAAU,YAAY;IACpB,MAAM,YAAY,KAAK,OAAO;AAG9B,WAAO,eAFS,OAAO,cAAc,aAAa,MAAM,WAAW,GAAG,UAExC,CAAC,YAAY;OACzC;AAGN,mBAAgB,IAAI,KAAK,OAAO;AAChC,UAAO;;EAEV"}
1
+ {"version":3,"file":"build-BTTNEFmV.js","names":[],"sources":["../src/config/build.ts"],"sourcesContent":["import type { ProcessorOptions } from '@mdx-js/mdx';\nimport type {\n AnyCollection,\n DocCollection,\n DocsCollection,\n GlobalConfig,\n MetaCollection,\n} from '@/config/define';\nimport picomatch from 'picomatch';\nimport { applyMdxPreset } from '@/config/preset';\nimport path from 'node:path';\n\nexport type BuildEnvironment = 'bundler' | 'runtime';\n\nexport interface LoadedConfig {\n collections: Map<string, CollectionItem>;\n global: GlobalConfig;\n getMDXOptions(\n collection?: DocCollectionItem,\n environment?: BuildEnvironment,\n ): ProcessorOptions | Promise<ProcessorOptions>;\n workspaces: Record<\n string,\n {\n dir: string;\n config: LoadedConfig;\n }\n >;\n}\n\nexport type CollectionItem = MetaCollectionItem | DocCollectionItem | DocsCollectionItem;\n\ninterface PrimitiveCollectionItem {\n name: string;\n cwd: string;\n /**\n * content directory (absolute)\n */\n dir: string;\n hasFile: (filePath: string) => boolean;\n isFileSupported: (filePath: string) => boolean;\n patterns: string[];\n}\n\nexport type MetaCollectionItem = PrimitiveCollectionItem & Omit<MetaCollection, 'files' | 'dir'>;\nexport type DocCollectionItem = PrimitiveCollectionItem & Omit<DocCollection, 'files' | 'dir'>;\n\nexport interface DocsCollectionItem\n extends Omit<DocsCollection, 'dir' | 'meta' | 'docs'>, Omit<PrimitiveCollectionItem, 'patterns'> {\n meta: MetaCollectionItem;\n docs: DocCollectionItem;\n}\n\nconst SupportedFormats = {\n doc: ['mdx', 'md'],\n meta: ['json', 'yaml'],\n};\n\nexport function buildCollection(\n name: string,\n collection: AnyCollection,\n cwd: string,\n): CollectionItem {\n if (collection.type === 'docs') {\n return {\n ...collection,\n type: 'docs',\n get dir() {\n return this.docs.dir;\n },\n name,\n meta: buildCollection(name, collection.meta, cwd) as MetaCollectionItem,\n docs: buildCollection(name, collection.docs, cwd) as DocCollectionItem,\n hasFile(filePath) {\n return this.docs.hasFile(filePath) || this.meta.hasFile(filePath);\n },\n isFileSupported(filePath) {\n return this.docs.isFileSupported(filePath) || this.meta.isFileSupported(filePath);\n },\n cwd,\n };\n }\n\n return {\n ...collection,\n ...buildPrimitiveCollection(name, collection, cwd),\n };\n}\n\nfunction buildPrimitiveCollection(\n name: string,\n config: DocCollection | MetaCollection,\n cwd: string,\n): PrimitiveCollectionItem {\n const supportedFormats = SupportedFormats[config.type];\n const patterns = config.files ?? [`**/*.{${supportedFormats.join(',')}}`];\n let matcher: picomatch.Matcher;\n\n return {\n dir: path.resolve(cwd, config.dir),\n cwd,\n name,\n patterns,\n isFileSupported(filePath) {\n return supportedFormats.some((format) => filePath.endsWith(`.${format}`));\n },\n hasFile(filePath) {\n if (!this.isFileSupported(filePath)) return false;\n\n const relativePath = path.relative(this.dir, filePath);\n if (relativePath.startsWith(`..${path.sep}`)) return false;\n\n return (matcher ??= picomatch(patterns))(relativePath);\n },\n };\n}\n\nexport function buildConfig(config: Record<string, unknown>, cwd = process.cwd()): LoadedConfig {\n const collections = new Map<string, CollectionItem>();\n const loaded: GlobalConfig = {};\n\n for (const [k, v] of Object.entries(config)) {\n if (!v) {\n continue;\n }\n\n if (typeof v === 'object' && 'type' in v) {\n if (v.type === 'docs' || v.type === 'doc' || v.type === 'meta') {\n collections.set(k, buildCollection(k, v as AnyCollection, cwd));\n continue;\n }\n }\n\n if (k === 'default' && v) {\n Object.assign(loaded, v);\n continue;\n }\n\n throw new Error(\n `Unknown export \"${k}\", you can only export collections from source configuration file.`,\n );\n }\n\n const mdxOptionsCache = new Map<string, ProcessorOptions | Promise<ProcessorOptions>>();\n return {\n global: loaded,\n collections,\n workspaces: Object.fromEntries(\n Object.entries(loaded.workspaces ?? {}).map(([key, value]) => {\n return [\n key,\n {\n dir: value.dir,\n config: buildConfig(value.config, path.resolve(cwd, value.dir)),\n },\n ];\n }),\n ),\n getMDXOptions(collection, environment = 'bundler') {\n const key = collection ? `${environment}:${collection.name}` : environment;\n const cached = mdxOptionsCache.get(key);\n if (cached) return cached;\n let result: ProcessorOptions | Promise<ProcessorOptions>;\n\n if (collection?.mdxOptions) {\n const optionsFn = collection.mdxOptions;\n result = typeof optionsFn === 'function' ? optionsFn(environment) : optionsFn;\n } else {\n result = (async () => {\n const optionsFn = this.global.mdxOptions;\n const options = typeof optionsFn === 'function' ? await optionsFn() : optionsFn;\n\n return applyMdxPreset(options)(environment);\n })();\n }\n\n mdxOptionsCache.set(key, result);\n return result;\n },\n };\n}\n"],"mappings":";;;;;AAqDA,MAAM,mBAAmB;CACvB,KAAK,CAAC,OAAO,KAAK;CAClB,MAAM,CAAC,QAAQ,OAAO;CACvB;AAED,SAAgB,gBACd,MACA,YACA,KACgB;AAChB,KAAI,WAAW,SAAS,OACtB,QAAO;EACL,GAAG;EACH,MAAM;EACN,IAAI,MAAM;AACR,UAAO,KAAK,KAAK;;EAEnB;EACA,MAAM,gBAAgB,MAAM,WAAW,MAAM,IAAI;EACjD,MAAM,gBAAgB,MAAM,WAAW,MAAM,IAAI;EACjD,QAAQ,UAAU;AAChB,UAAO,KAAK,KAAK,QAAQ,SAAS,IAAI,KAAK,KAAK,QAAQ,SAAS;;EAEnE,gBAAgB,UAAU;AACxB,UAAO,KAAK,KAAK,gBAAgB,SAAS,IAAI,KAAK,KAAK,gBAAgB,SAAS;;EAEnF;EACD;AAGH,QAAO;EACL,GAAG;EACH,GAAG,yBAAyB,MAAM,YAAY,IAAI;EACnD;;AAGH,SAAS,yBACP,MACA,QACA,KACyB;CACzB,MAAM,mBAAmB,iBAAiB,OAAO;CACjD,MAAM,WAAW,OAAO,SAAS,CAAC,SAAS,iBAAiB,KAAK,IAAI,CAAC,GAAG;CACzE,IAAI;AAEJ,QAAO;EACL,KAAK,KAAK,QAAQ,KAAK,OAAO,IAAI;EAClC;EACA;EACA;EACA,gBAAgB,UAAU;AACxB,UAAO,iBAAiB,MAAM,WAAW,SAAS,SAAS,IAAI,SAAS,CAAC;;EAE3E,QAAQ,UAAU;AAChB,OAAI,CAAC,KAAK,gBAAgB,SAAS,CAAE,QAAO;GAE5C,MAAM,eAAe,KAAK,SAAS,KAAK,KAAK,SAAS;AACtD,OAAI,aAAa,WAAW,KAAK,KAAK,MAAM,CAAE,QAAO;AAErD,WAAQ,YAAY,UAAU,SAAS,EAAE,aAAa;;EAEzD;;AAGH,SAAgB,YAAY,QAAiC,MAAM,QAAQ,KAAK,EAAgB;CAC9F,MAAM,8BAAc,IAAI,KAA6B;CACrD,MAAM,SAAuB,EAAE;AAE/B,MAAK,MAAM,CAAC,GAAG,MAAM,OAAO,QAAQ,OAAO,EAAE;AAC3C,MAAI,CAAC,EACH;AAGF,MAAI,OAAO,MAAM,YAAY,UAAU,GACrC;OAAI,EAAE,SAAS,UAAU,EAAE,SAAS,SAAS,EAAE,SAAS,QAAQ;AAC9D,gBAAY,IAAI,GAAG,gBAAgB,GAAG,GAAoB,IAAI,CAAC;AAC/D;;;AAIJ,MAAI,MAAM,aAAa,GAAG;AACxB,UAAO,OAAO,QAAQ,EAAE;AACxB;;AAGF,QAAM,IAAI,MACR,mBAAmB,EAAE,oEACtB;;CAGH,MAAM,kCAAkB,IAAI,KAA2D;AACvF,QAAO;EACL,QAAQ;EACR;EACA,YAAY,OAAO,YACjB,OAAO,QAAQ,OAAO,cAAc,EAAE,CAAC,CAAC,KAAK,CAAC,KAAK,WAAW;AAC5D,UAAO,CACL,KACA;IACE,KAAK,MAAM;IACX,QAAQ,YAAY,MAAM,QAAQ,KAAK,QAAQ,KAAK,MAAM,IAAI,CAAC;IAChE,CACF;IACD,CACH;EACD,cAAc,YAAY,cAAc,WAAW;GACjD,MAAM,MAAM,aAAa,GAAG,YAAY,GAAG,WAAW,SAAS;GAC/D,MAAM,SAAS,gBAAgB,IAAI,IAAI;AACvC,OAAI,OAAQ,QAAO;GACnB,IAAI;AAEJ,OAAI,YAAY,YAAY;IAC1B,MAAM,YAAY,WAAW;AAC7B,aAAS,OAAO,cAAc,aAAa,UAAU,YAAY,GAAG;SAEpE,WAAU,YAAY;IACpB,MAAM,YAAY,KAAK,OAAO;AAG9B,WAAO,eAFS,OAAO,cAAc,aAAa,MAAM,WAAW,GAAG,UAExC,CAAC,YAAY;OACzC;AAGN,mBAAgB,IAAI,KAAK,OAAO;AAChC,UAAO;;EAEV"}
@@ -1 +1 @@
1
- {"version":3,"file":"build-mdx-BVeBo4jT.js","names":["_stringifyProcessor: Processor | undefined","urls: ExtractedReference[]","postprocessOptions: PostprocessOptions"],"sources":["../src/loaders/mdx/remark-postprocess.ts","../src/loaders/mdx/build-mdx.ts"],"sourcesContent":["import type { Processor, Transformer } from 'unified';\nimport type { Root, RootContent } from 'mdast';\nimport { visit } from 'unist-util-visit';\nimport { toMarkdown } from 'mdast-util-to-markdown';\nimport { valueToEstree } from 'estree-util-value-to-estree';\nimport { removePosition } from 'unist-util-remove-position';\nimport remarkMdx from 'remark-mdx';\nimport { flattenNode } from './mdast-utils';\n\nexport interface ExtractedReference {\n href: string;\n}\n\nexport interface PostprocessOptions {\n _format: 'md' | 'mdx';\n\n /**\n * Properties to export from `vfile.data`\n */\n valueToExport?: string[];\n\n /**\n * stringify MDAST and export via `_markdown`.\n */\n includeProcessedMarkdown?: boolean;\n\n /**\n * extract link references, export via `extractedReferences`.\n */\n extractLinkReferences?: boolean;\n\n /**\n * store MDAST and export via `_mdast`.\n */\n includeMDAST?:\n | boolean\n | {\n removePosition?: boolean;\n };\n}\n\n/**\n * - collect references\n * - write frontmatter (auto-title & description)\n */\nexport function remarkPostprocess(\n this: Processor,\n {\n _format,\n includeProcessedMarkdown = false,\n includeMDAST = false,\n extractLinkReferences = false,\n valueToExport = [],\n }: PostprocessOptions,\n): Transformer<Root, Root> {\n let _stringifyProcessor: Processor | undefined;\n const getStringifyProcessor = () => {\n return (_stringifyProcessor ??=\n _format === 'mdx'\n ? this\n : // force Markdown processor to stringify MDX nodes\n this().use(remarkMdx).freeze());\n };\n\n return (tree, file) => {\n const frontmatter = (file.data.frontmatter ??= {});\n if (!frontmatter.title) {\n visit(tree, 'heading', (node) => {\n if (node.depth === 1) {\n frontmatter.title = flattenNode(node);\n return false;\n }\n });\n }\n\n file.data['mdx-export'] ??= [];\n file.data['mdx-export'].push({\n name: 'frontmatter',\n value: frontmatter,\n });\n\n if (extractLinkReferences) {\n const urls: ExtractedReference[] = [];\n\n visit(tree, 'link', (node) => {\n urls.push({\n href: node.url,\n });\n return 'skip';\n });\n\n file.data['mdx-export'].push({\n name: 'extractedReferences',\n value: urls,\n });\n }\n\n if (includeProcessedMarkdown) {\n const processor = getStringifyProcessor();\n const markdown = toMarkdown(tree, {\n ...processor.data('settings'),\n // from https://github.com/remarkjs/remark/blob/main/packages/remark-stringify/lib/index.js\n extensions: processor.data('toMarkdownExtensions') || [],\n });\n\n file.data['mdx-export'].push({\n name: '_markdown',\n value: markdown,\n });\n }\n\n if (includeMDAST) {\n const options = includeMDAST === true ? {} : includeMDAST;\n const mdast = JSON.stringify(\n options.removePosition ? removePosition(structuredClone(tree)) : tree,\n );\n\n file.data['mdx-export'].push({\n name: '_mdast',\n value: mdast,\n });\n }\n\n for (const { name, value } of file.data['mdx-export']) {\n tree.children.unshift(getMdastExport(name, value));\n }\n\n // reset the data to reduce memory usage\n file.data['mdx-export'] = [];\n\n for (const name of valueToExport) {\n if (!(name in file.data)) continue;\n\n tree.children.unshift(getMdastExport(name, file.data[name]));\n }\n };\n}\n\n/**\n * MDX.js first converts javascript (with esm support) into mdast nodes with remark-mdx, then handle the other remark plugins\n *\n * Therefore, if we want to inject an export, we must convert the object into AST, then add the mdast node\n */\nfunction getMdastExport(name: string, value: unknown): RootContent {\n return {\n type: 'mdxjsEsm',\n value: '',\n data: {\n estree: {\n type: 'Program',\n sourceType: 'module',\n body: [\n {\n type: 'ExportNamedDeclaration',\n attributes: [],\n specifiers: [],\n source: null,\n declaration: {\n type: 'VariableDeclaration',\n kind: 'let',\n declarations: [\n {\n type: 'VariableDeclarator',\n id: {\n type: 'Identifier',\n name,\n },\n init: valueToEstree(value),\n },\n ],\n },\n },\n ],\n },\n },\n };\n}\n","import { createProcessor } from '@mdx-js/mdx';\nimport { VFile } from 'vfile';\nimport { remarkInclude } from '@/loaders/mdx/remark-include';\nimport type { StructuredData } from 'fumadocs-core/mdx-plugins';\nimport type { TOCItemType } from 'fumadocs-core/toc';\nimport type { FC } from 'react';\nimport type { MDXProps } from 'mdx/types';\nimport { type PostprocessOptions, remarkPostprocess } from '@/loaders/mdx/remark-postprocess';\nimport type { Core } from '@/core';\nimport type { DocCollectionItem } from '@/config/build';\n\ntype Processor = ReturnType<typeof createProcessor>;\n\ninterface BuildMDXOptions {\n /**\n * Specify a file path for source\n */\n filePath: string;\n source: string;\n frontmatter?: Record<string, unknown>;\n\n environment: 'bundler' | 'runtime';\n isDevelopment: boolean;\n _compiler?: CompilerOptions;\n}\n\nexport interface CompilerOptions {\n addDependency: (file: string) => void;\n}\n\nexport interface CompiledMDXProperties<Frontmatter = Record<string, unknown>> {\n frontmatter: Frontmatter;\n structuredData: StructuredData;\n toc: TOCItemType[];\n default: FC<MDXProps>;\n\n /**\n * Enable from `postprocess` option.\n */\n _markdown?: string;\n /**\n * Enable from `postprocess` option.\n */\n _mdast?: string;\n}\n\nexport interface FumadocsDataMap {\n /**\n * [Fumadocs MDX] raw frontmatter, you can modify it\n */\n frontmatter?: Record<string, unknown>;\n\n /**\n * [Fumadocs MDX] additional ESM exports to write\n */\n 'mdx-export'?: { name: string; value: unknown }[];\n\n /**\n * [Fumadocs MDX] The compiler object from loader\n */\n _compiler?: CompilerOptions;\n\n /**\n * [Fumadocs MDX] get internal processor, do not use this on user land.\n */\n _getProcessor?: (format: 'md' | 'mdx') => Processor;\n}\n\ndeclare module 'vfile' {\n // eslint-disable-next-line @typescript-eslint/no-empty-object-type -- extend data map\n interface DataMap extends FumadocsDataMap {}\n}\n\nexport async function buildMDX(\n core: Core,\n collection: DocCollectionItem | undefined,\n { filePath, frontmatter, source, _compiler, environment, isDevelopment }: BuildMDXOptions,\n): Promise<VFile> {\n const mdxOptions = await core.getConfig().getMDXOptions(collection, environment);\n\n function getProcessor(format: 'md' | 'mdx') {\n const cache = core.cache as Map<string, Processor>;\n const key = `build-mdx:${collection?.name ?? 'global'}:${format}`;\n let processor = cache.get(key);\n\n if (!processor) {\n const postprocessOptions: PostprocessOptions = {\n _format: format,\n ...collection?.postprocess,\n };\n\n processor = createProcessor({\n outputFormat: 'program',\n development: isDevelopment,\n ...mdxOptions,\n remarkPlugins: [\n remarkInclude,\n ...(mdxOptions.remarkPlugins ?? []),\n [remarkPostprocess, postprocessOptions],\n ],\n format,\n });\n\n cache.set(key, processor);\n }\n\n return processor;\n }\n\n let vfile = new VFile({\n value: source,\n path: filePath,\n cwd: collection?.cwd,\n data: { frontmatter, _compiler, _getProcessor: getProcessor },\n });\n\n if (collection) {\n vfile = await core.transformVFile({ collection, filePath, source }, vfile);\n }\n\n return getProcessor(filePath.endsWith('.mdx') ? 'mdx' : 'md').process(vfile);\n}\n"],"mappings":";;;;;;;;;;;;;;AA6CA,SAAgB,kBAEd,EACE,SACA,2BAA2B,OAC3B,eAAe,OACf,wBAAwB,OACxB,gBAAgB,EAAE,IAEK;CACzB,IAAIA;CACJ,MAAM,8BAA8B;AAClC,SAAQ,wBACN,YAAY,QACR,OAEA,MAAM,CAAC,IAAI,UAAU,CAAC,QAAQ;;AAGtC,SAAQ,MAAM,SAAS;EACrB,MAAM,cAAe,KAAK,KAAK,gBAAgB,EAAE;AACjD,MAAI,CAAC,YAAY,MACf,OAAM,MAAM,YAAY,SAAS;AAC/B,OAAI,KAAK,UAAU,GAAG;AACpB,gBAAY,QAAQ,YAAY,KAAK;AACrC,WAAO;;IAET;AAGJ,OAAK,KAAK,kBAAkB,EAAE;AAC9B,OAAK,KAAK,cAAc,KAAK;GAC3B,MAAM;GACN,OAAO;GACR,CAAC;AAEF,MAAI,uBAAuB;GACzB,MAAMC,OAA6B,EAAE;AAErC,SAAM,MAAM,SAAS,SAAS;AAC5B,SAAK,KAAK,EACR,MAAM,KAAK,KACZ,CAAC;AACF,WAAO;KACP;AAEF,QAAK,KAAK,cAAc,KAAK;IAC3B,MAAM;IACN,OAAO;IACR,CAAC;;AAGJ,MAAI,0BAA0B;GAC5B,MAAM,YAAY,uBAAuB;GACzC,MAAM,WAAW,WAAW,MAAM;IAChC,GAAG,UAAU,KAAK,WAAW;IAE7B,YAAY,UAAU,KAAK,uBAAuB,IAAI,EAAE;IACzD,CAAC;AAEF,QAAK,KAAK,cAAc,KAAK;IAC3B,MAAM;IACN,OAAO;IACR,CAAC;;AAGJ,MAAI,cAAc;GAChB,MAAM,UAAU,iBAAiB,OAAO,EAAE,GAAG;GAC7C,MAAM,QAAQ,KAAK,UACjB,QAAQ,iBAAiB,eAAe,gBAAgB,KAAK,CAAC,GAAG,KAClE;AAED,QAAK,KAAK,cAAc,KAAK;IAC3B,MAAM;IACN,OAAO;IACR,CAAC;;AAGJ,OAAK,MAAM,EAAE,MAAM,WAAW,KAAK,KAAK,cACtC,MAAK,SAAS,QAAQ,eAAe,MAAM,MAAM,CAAC;AAIpD,OAAK,KAAK,gBAAgB,EAAE;AAE5B,OAAK,MAAM,QAAQ,eAAe;AAChC,OAAI,EAAE,QAAQ,KAAK,MAAO;AAE1B,QAAK,SAAS,QAAQ,eAAe,MAAM,KAAK,KAAK,MAAM,CAAC;;;;;;;;;AAUlE,SAAS,eAAe,MAAc,OAA6B;AACjE,QAAO;EACL,MAAM;EACN,OAAO;EACP,MAAM,EACJ,QAAQ;GACN,MAAM;GACN,YAAY;GACZ,MAAM,CACJ;IACE,MAAM;IACN,YAAY,EAAE;IACd,YAAY,EAAE;IACd,QAAQ;IACR,aAAa;KACX,MAAM;KACN,MAAM;KACN,cAAc,CACZ;MACE,MAAM;MACN,IAAI;OACF,MAAM;OACN;OACD;MACD,MAAM,cAAc,MAAM;MAC3B,CACF;KACF;IACF,CACF;GACF,EACF;EACF;;;;;ACtGH,eAAsB,SACpB,MACA,YACA,EAAE,UAAU,aAAa,QAAQ,WAAW,aAAa,iBACzC;CAChB,MAAM,aAAa,MAAM,KAAK,WAAW,CAAC,cAAc,YAAY,YAAY;CAEhF,SAAS,aAAa,QAAsB;EAC1C,MAAM,QAAQ,KAAK;EACnB,MAAM,MAAM,aAAa,YAAY,QAAQ,SAAS,GAAG;EACzD,IAAI,YAAY,MAAM,IAAI,IAAI;AAE9B,MAAI,CAAC,WAAW;GACd,MAAMC,qBAAyC;IAC7C,SAAS;IACT,GAAG,YAAY;IAChB;AAED,eAAY,gBAAgB;IAC1B,cAAc;IACd,aAAa;IACb,GAAG;IACH,eAAe;KACb;KACA,GAAI,WAAW,iBAAiB,EAAE;KAClC,CAAC,mBAAmB,mBAAmB;KACxC;IACD;IACD,CAAC;AAEF,SAAM,IAAI,KAAK,UAAU;;AAG3B,SAAO;;CAGT,IAAI,QAAQ,IAAI,MAAM;EACpB,OAAO;EACP,MAAM;EACN,KAAK,YAAY;EACjB,MAAM;GAAE;GAAa;GAAW,eAAe;GAAc;EAC9D,CAAC;AAEF,KAAI,WACF,SAAQ,MAAM,KAAK,eAAe;EAAE;EAAY;EAAU;EAAQ,EAAE,MAAM;AAG5E,QAAO,aAAa,SAAS,SAAS,OAAO,GAAG,QAAQ,KAAK,CAAC,QAAQ,MAAM"}
1
+ {"version":3,"file":"build-mdx-BVeBo4jT.js","names":[],"sources":["../src/loaders/mdx/remark-postprocess.ts","../src/loaders/mdx/build-mdx.ts"],"sourcesContent":["import type { Processor, Transformer } from 'unified';\nimport type { Root, RootContent } from 'mdast';\nimport { visit } from 'unist-util-visit';\nimport { toMarkdown } from 'mdast-util-to-markdown';\nimport { valueToEstree } from 'estree-util-value-to-estree';\nimport { removePosition } from 'unist-util-remove-position';\nimport remarkMdx from 'remark-mdx';\nimport { flattenNode } from './mdast-utils';\n\nexport interface ExtractedReference {\n href: string;\n}\n\nexport interface PostprocessOptions {\n _format: 'md' | 'mdx';\n\n /**\n * Properties to export from `vfile.data`\n */\n valueToExport?: string[];\n\n /**\n * stringify MDAST and export via `_markdown`.\n */\n includeProcessedMarkdown?: boolean;\n\n /**\n * extract link references, export via `extractedReferences`.\n */\n extractLinkReferences?: boolean;\n\n /**\n * store MDAST and export via `_mdast`.\n */\n includeMDAST?:\n | boolean\n | {\n removePosition?: boolean;\n };\n}\n\n/**\n * - collect references\n * - write frontmatter (auto-title & description)\n */\nexport function remarkPostprocess(\n this: Processor,\n {\n _format,\n includeProcessedMarkdown = false,\n includeMDAST = false,\n extractLinkReferences = false,\n valueToExport = [],\n }: PostprocessOptions,\n): Transformer<Root, Root> {\n let _stringifyProcessor: Processor | undefined;\n const getStringifyProcessor = () => {\n return (_stringifyProcessor ??=\n _format === 'mdx'\n ? this\n : // force Markdown processor to stringify MDX nodes\n this().use(remarkMdx).freeze());\n };\n\n return (tree, file) => {\n const frontmatter = (file.data.frontmatter ??= {});\n if (!frontmatter.title) {\n visit(tree, 'heading', (node) => {\n if (node.depth === 1) {\n frontmatter.title = flattenNode(node);\n return false;\n }\n });\n }\n\n file.data['mdx-export'] ??= [];\n file.data['mdx-export'].push({\n name: 'frontmatter',\n value: frontmatter,\n });\n\n if (extractLinkReferences) {\n const urls: ExtractedReference[] = [];\n\n visit(tree, 'link', (node) => {\n urls.push({\n href: node.url,\n });\n return 'skip';\n });\n\n file.data['mdx-export'].push({\n name: 'extractedReferences',\n value: urls,\n });\n }\n\n if (includeProcessedMarkdown) {\n const processor = getStringifyProcessor();\n const markdown = toMarkdown(tree, {\n ...processor.data('settings'),\n // from https://github.com/remarkjs/remark/blob/main/packages/remark-stringify/lib/index.js\n extensions: processor.data('toMarkdownExtensions') || [],\n });\n\n file.data['mdx-export'].push({\n name: '_markdown',\n value: markdown,\n });\n }\n\n if (includeMDAST) {\n const options = includeMDAST === true ? {} : includeMDAST;\n const mdast = JSON.stringify(\n options.removePosition ? removePosition(structuredClone(tree)) : tree,\n );\n\n file.data['mdx-export'].push({\n name: '_mdast',\n value: mdast,\n });\n }\n\n for (const { name, value } of file.data['mdx-export']) {\n tree.children.unshift(getMdastExport(name, value));\n }\n\n // reset the data to reduce memory usage\n file.data['mdx-export'] = [];\n\n for (const name of valueToExport) {\n if (!(name in file.data)) continue;\n\n tree.children.unshift(getMdastExport(name, file.data[name]));\n }\n };\n}\n\n/**\n * MDX.js first converts javascript (with esm support) into mdast nodes with remark-mdx, then handle the other remark plugins\n *\n * Therefore, if we want to inject an export, we must convert the object into AST, then add the mdast node\n */\nfunction getMdastExport(name: string, value: unknown): RootContent {\n return {\n type: 'mdxjsEsm',\n value: '',\n data: {\n estree: {\n type: 'Program',\n sourceType: 'module',\n body: [\n {\n type: 'ExportNamedDeclaration',\n attributes: [],\n specifiers: [],\n source: null,\n declaration: {\n type: 'VariableDeclaration',\n kind: 'let',\n declarations: [\n {\n type: 'VariableDeclarator',\n id: {\n type: 'Identifier',\n name,\n },\n init: valueToEstree(value),\n },\n ],\n },\n },\n ],\n },\n },\n };\n}\n","import { createProcessor } from '@mdx-js/mdx';\nimport { VFile } from 'vfile';\nimport { remarkInclude } from '@/loaders/mdx/remark-include';\nimport type { StructuredData } from 'fumadocs-core/mdx-plugins';\nimport type { TOCItemType } from 'fumadocs-core/toc';\nimport type { FC } from 'react';\nimport type { MDXProps } from 'mdx/types';\nimport { type PostprocessOptions, remarkPostprocess } from '@/loaders/mdx/remark-postprocess';\nimport type { Core } from '@/core';\nimport type { DocCollectionItem } from '@/config/build';\n\ntype Processor = ReturnType<typeof createProcessor>;\n\ninterface BuildMDXOptions {\n /**\n * Specify a file path for source\n */\n filePath: string;\n source: string;\n frontmatter?: Record<string, unknown>;\n\n environment: 'bundler' | 'runtime';\n isDevelopment: boolean;\n _compiler?: CompilerOptions;\n}\n\nexport interface CompilerOptions {\n addDependency: (file: string) => void;\n}\n\nexport interface CompiledMDXProperties<Frontmatter = Record<string, unknown>> {\n frontmatter: Frontmatter;\n structuredData: StructuredData;\n toc: TOCItemType[];\n default: FC<MDXProps>;\n\n /**\n * Enable from `postprocess` option.\n */\n _markdown?: string;\n /**\n * Enable from `postprocess` option.\n */\n _mdast?: string;\n}\n\nexport interface FumadocsDataMap {\n /**\n * [Fumadocs MDX] raw frontmatter, you can modify it\n */\n frontmatter?: Record<string, unknown>;\n\n /**\n * [Fumadocs MDX] additional ESM exports to write\n */\n 'mdx-export'?: { name: string; value: unknown }[];\n\n /**\n * [Fumadocs MDX] The compiler object from loader\n */\n _compiler?: CompilerOptions;\n\n /**\n * [Fumadocs MDX] get internal processor, do not use this on user land.\n */\n _getProcessor?: (format: 'md' | 'mdx') => Processor;\n}\n\ndeclare module 'vfile' {\n // eslint-disable-next-line @typescript-eslint/no-empty-object-type -- extend data map\n interface DataMap extends FumadocsDataMap {}\n}\n\nexport async function buildMDX(\n core: Core,\n collection: DocCollectionItem | undefined,\n { filePath, frontmatter, source, _compiler, environment, isDevelopment }: BuildMDXOptions,\n): Promise<VFile> {\n const mdxOptions = await core.getConfig().getMDXOptions(collection, environment);\n\n function getProcessor(format: 'md' | 'mdx') {\n const cache = core.cache as Map<string, Processor>;\n const key = `build-mdx:${collection?.name ?? 'global'}:${format}`;\n let processor = cache.get(key);\n\n if (!processor) {\n const postprocessOptions: PostprocessOptions = {\n _format: format,\n ...collection?.postprocess,\n };\n\n processor = createProcessor({\n outputFormat: 'program',\n development: isDevelopment,\n ...mdxOptions,\n remarkPlugins: [\n remarkInclude,\n ...(mdxOptions.remarkPlugins ?? []),\n [remarkPostprocess, postprocessOptions],\n ],\n format,\n });\n\n cache.set(key, processor);\n }\n\n return processor;\n }\n\n let vfile = new VFile({\n value: source,\n path: filePath,\n cwd: collection?.cwd,\n data: { frontmatter, _compiler, _getProcessor: getProcessor },\n });\n\n if (collection) {\n vfile = await core.transformVFile({ collection, filePath, source }, vfile);\n }\n\n return getProcessor(filePath.endsWith('.mdx') ? 'mdx' : 'md').process(vfile);\n}\n"],"mappings":";;;;;;;;;;;;;;AA6CA,SAAgB,kBAEd,EACE,SACA,2BAA2B,OAC3B,eAAe,OACf,wBAAwB,OACxB,gBAAgB,EAAE,IAEK;CACzB,IAAI;CACJ,MAAM,8BAA8B;AAClC,SAAQ,wBACN,YAAY,QACR,OAEA,MAAM,CAAC,IAAI,UAAU,CAAC,QAAQ;;AAGtC,SAAQ,MAAM,SAAS;EACrB,MAAM,cAAe,KAAK,KAAK,gBAAgB,EAAE;AACjD,MAAI,CAAC,YAAY,MACf,OAAM,MAAM,YAAY,SAAS;AAC/B,OAAI,KAAK,UAAU,GAAG;AACpB,gBAAY,QAAQ,YAAY,KAAK;AACrC,WAAO;;IAET;AAGJ,OAAK,KAAK,kBAAkB,EAAE;AAC9B,OAAK,KAAK,cAAc,KAAK;GAC3B,MAAM;GACN,OAAO;GACR,CAAC;AAEF,MAAI,uBAAuB;GACzB,MAAM,OAA6B,EAAE;AAErC,SAAM,MAAM,SAAS,SAAS;AAC5B,SAAK,KAAK,EACR,MAAM,KAAK,KACZ,CAAC;AACF,WAAO;KACP;AAEF,QAAK,KAAK,cAAc,KAAK;IAC3B,MAAM;IACN,OAAO;IACR,CAAC;;AAGJ,MAAI,0BAA0B;GAC5B,MAAM,YAAY,uBAAuB;GACzC,MAAM,WAAW,WAAW,MAAM;IAChC,GAAG,UAAU,KAAK,WAAW;IAE7B,YAAY,UAAU,KAAK,uBAAuB,IAAI,EAAE;IACzD,CAAC;AAEF,QAAK,KAAK,cAAc,KAAK;IAC3B,MAAM;IACN,OAAO;IACR,CAAC;;AAGJ,MAAI,cAAc;GAChB,MAAM,UAAU,iBAAiB,OAAO,EAAE,GAAG;GAC7C,MAAM,QAAQ,KAAK,UACjB,QAAQ,iBAAiB,eAAe,gBAAgB,KAAK,CAAC,GAAG,KAClE;AAED,QAAK,KAAK,cAAc,KAAK;IAC3B,MAAM;IACN,OAAO;IACR,CAAC;;AAGJ,OAAK,MAAM,EAAE,MAAM,WAAW,KAAK,KAAK,cACtC,MAAK,SAAS,QAAQ,eAAe,MAAM,MAAM,CAAC;AAIpD,OAAK,KAAK,gBAAgB,EAAE;AAE5B,OAAK,MAAM,QAAQ,eAAe;AAChC,OAAI,EAAE,QAAQ,KAAK,MAAO;AAE1B,QAAK,SAAS,QAAQ,eAAe,MAAM,KAAK,KAAK,MAAM,CAAC;;;;;;;;;AAUlE,SAAS,eAAe,MAAc,OAA6B;AACjE,QAAO;EACL,MAAM;EACN,OAAO;EACP,MAAM,EACJ,QAAQ;GACN,MAAM;GACN,YAAY;GACZ,MAAM,CACJ;IACE,MAAM;IACN,YAAY,EAAE;IACd,YAAY,EAAE;IACd,QAAQ;IACR,aAAa;KACX,MAAM;KACN,MAAM;KACN,cAAc,CACZ;MACE,MAAM;MACN,IAAI;OACF,MAAM;OACN;OACD;MACD,MAAM,cAAc,MAAM;MAC3B,CACF;KACF;IACF,CACF;GACF,EACF;EACF;;;;;ACtGH,eAAsB,SACpB,MACA,YACA,EAAE,UAAU,aAAa,QAAQ,WAAW,aAAa,iBACzC;CAChB,MAAM,aAAa,MAAM,KAAK,WAAW,CAAC,cAAc,YAAY,YAAY;CAEhF,SAAS,aAAa,QAAsB;EAC1C,MAAM,QAAQ,KAAK;EACnB,MAAM,MAAM,aAAa,YAAY,QAAQ,SAAS,GAAG;EACzD,IAAI,YAAY,MAAM,IAAI,IAAI;AAE9B,MAAI,CAAC,WAAW;GACd,MAAM,qBAAyC;IAC7C,SAAS;IACT,GAAG,YAAY;IAChB;AAED,eAAY,gBAAgB;IAC1B,cAAc;IACd,aAAa;IACb,GAAG;IACH,eAAe;KACb;KACA,GAAI,WAAW,iBAAiB,EAAE;KAClC,CAAC,mBAAmB,mBAAmB;KACxC;IACD;IACD,CAAC;AAEF,SAAM,IAAI,KAAK,UAAU;;AAG3B,SAAO;;CAGT,IAAI,QAAQ,IAAI,MAAM;EACpB,OAAO;EACP,MAAM;EACN,KAAK,YAAY;EACjB,MAAM;GAAE;GAAa;GAAW,eAAe;GAAc;EAC9D,CAAC;AAEF,KAAI,WACF,SAAQ,MAAM,KAAK,eAAe;EAAE;EAAY;EAAU;EAAQ,EAAE,MAAM;AAG5E,QAAO,aAAa,SAAS,SAAS,OAAO,GAAG,QAAQ,KAAK,CAAC,QAAQ,MAAM"}
package/dist/bun/index.js CHANGED
@@ -1,11 +1,11 @@
1
1
  import "../fuma-matter-CHgJa_-B.js";
2
- import { t as createMdxLoader } from "../mdx-DMZ9tsAa.js";
2
+ import { t as createMdxLoader } from "../mdx-CRT-jSh5.js";
3
3
  import "../preset-gmDZnBcg.js";
4
4
  import { t as buildConfig } from "../build-BTTNEFmV.js";
5
5
  import { n as createCore, t as _Defaults } from "../core-DjldE3H9.js";
6
6
  import "../codegen-DleOVLNr.js";
7
- import { a as createIntegratedConfigLoader, t as toBun } from "../adapter-DG-viEbG.js";
8
- import { t as createMetaLoader } from "../meta-DyieTM4Z.js";
7
+ import { a as createIntegratedConfigLoader, t as toBun } from "../adapter-DI4cexsC.js";
8
+ import { t as createMetaLoader } from "../meta-BKBx8Gab.js";
9
9
  import { pathToFileURL } from "node:url";
10
10
 
11
11
  //#region src/bun/index.ts
@@ -1 +1 @@
1
- {"version":3,"file":"codegen-DleOVLNr.js","names":["banner: string[]","code: string","filename: string","path"],"sources":["../src/utils/codegen.ts"],"sourcesContent":["import path from 'node:path';\nimport { glob } from 'tinyglobby';\n\nexport interface GlobImportOptions {\n base: string;\n query?: Record<string, string | undefined>;\n import?: string;\n eager?: boolean;\n}\n\nexport interface CodeGenOptions {\n target: 'default' | 'vite';\n outDir: string;\n /**\n * add .js extenstion to imports\n */\n jsExtension: boolean;\n globCache: Map<string, Promise<string[]>>;\n}\n\nexport type CodeGen = ReturnType<typeof createCodegen>;\n\n/**\n * Code generator (one instance per file)\n */\nexport function createCodegen({\n target = 'default',\n outDir = '',\n jsExtension = false,\n globCache = new Map(),\n}: Partial<CodeGenOptions>) {\n let eagerImportId = 0;\n const banner: string[] = ['// @ts-nocheck'];\n\n if (target === 'vite') {\n banner.push('/// <reference types=\"vite/client\" />');\n }\n\n return {\n options: {\n target,\n outDir,\n } as CodeGenOptions,\n lines: [] as string[],\n addImport(statement: string) {\n this.lines.unshift(statement);\n },\n async pushAsync(insert: Promise<string | undefined>[]) {\n for (const line of await Promise.all(insert)) {\n if (line === undefined) continue;\n\n this.lines.push(line);\n }\n },\n\n async generateGlobImport(\n patterns: string | string[],\n options: GlobImportOptions,\n ): Promise<string> {\n if (target === 'vite') {\n return this.generateViteGlobImport(patterns, options);\n }\n\n return this.generateNodeGlobImport(patterns, options);\n },\n\n generateViteGlobImport(\n patterns: string | string[],\n { base, ...rest }: GlobImportOptions,\n ): string {\n patterns = (typeof patterns === 'string' ? [patterns] : patterns).map(normalizeViteGlobPath);\n\n return `import.meta.glob(${JSON.stringify(patterns)}, ${JSON.stringify(\n {\n base: normalizeViteGlobPath(path.relative(outDir, base)),\n ...rest,\n },\n null,\n 2,\n )})`;\n },\n async generateNodeGlobImport(\n patterns: string | string[],\n { base, eager = false, query = {}, import: importName }: GlobImportOptions,\n ): Promise<string> {\n const cacheKey = JSON.stringify({ patterns, base });\n let files = globCache.get(cacheKey);\n if (!files) {\n files = glob(patterns, {\n cwd: base,\n });\n globCache.set(cacheKey, files);\n }\n\n let code: string = '{';\n for (const item of await files) {\n const fullPath = path.join(base, item);\n const searchParams = new URLSearchParams();\n\n for (const [k, v] of Object.entries(query)) {\n if (v !== undefined) searchParams.set(k, v);\n }\n\n const importPath = this.formatImportPath(fullPath) + '?' + searchParams.toString();\n if (eager) {\n const name = `__fd_glob_${eagerImportId++}`;\n this.lines.unshift(\n importName\n ? `import { ${importName} as ${name} } from ${JSON.stringify(importPath)}`\n : `import * as ${name} from ${JSON.stringify(importPath)}`,\n );\n\n code += `${JSON.stringify(item)}: ${name}, `;\n } else {\n let line = `${JSON.stringify(item)}: () => import(${JSON.stringify(importPath)})`;\n if (importName) {\n line += `.then(mod => mod.${importName})`;\n }\n\n code += `${line}, `;\n }\n }\n\n code += '}';\n return code;\n },\n formatImportPath(file: string) {\n const ext = path.extname(file);\n let filename: string;\n\n if (ext === '.ts') {\n filename = file.substring(0, file.length - ext.length);\n if (jsExtension) filename += '.js';\n } else {\n filename = file;\n }\n\n const importPath = slash(path.relative(outDir, filename));\n return importPath.startsWith('.') ? importPath : `./${importPath}`;\n },\n toString() {\n return [...banner, ...this.lines].join('\\n');\n },\n };\n}\n\n/**\n * convert into POSIX & relative file paths, such that Vite can accept it.\n */\nfunction normalizeViteGlobPath(file: string) {\n file = slash(file);\n if (file.startsWith('./')) return file;\n if (file.startsWith('/')) return `.${file}`;\n\n return `./${file}`;\n}\n\nexport function slash(path: string): string {\n const isExtendedLengthPath = path.startsWith('\\\\\\\\?\\\\');\n\n if (isExtendedLengthPath) {\n return path;\n }\n\n return path.replaceAll('\\\\', '/');\n}\n\nexport function ident(code: string, tab: number = 1) {\n return code\n .split('\\n')\n .map((v) => ' '.repeat(tab) + v)\n .join('\\n');\n}\n"],"mappings":";;;;;;;AAyBA,SAAgB,cAAc,EAC5B,SAAS,WACT,SAAS,IACT,cAAc,OACd,4BAAY,IAAI,KAAK,IACK;CAC1B,IAAI,gBAAgB;CACpB,MAAMA,SAAmB,CAAC,iBAAiB;AAE3C,KAAI,WAAW,OACb,QAAO,KAAK,0CAAwC;AAGtD,QAAO;EACL,SAAS;GACP;GACA;GACD;EACD,OAAO,EAAE;EACT,UAAU,WAAmB;AAC3B,QAAK,MAAM,QAAQ,UAAU;;EAE/B,MAAM,UAAU,QAAuC;AACrD,QAAK,MAAM,QAAQ,MAAM,QAAQ,IAAI,OAAO,EAAE;AAC5C,QAAI,SAAS,OAAW;AAExB,SAAK,MAAM,KAAK,KAAK;;;EAIzB,MAAM,mBACJ,UACA,SACiB;AACjB,OAAI,WAAW,OACb,QAAO,KAAK,uBAAuB,UAAU,QAAQ;AAGvD,UAAO,KAAK,uBAAuB,UAAU,QAAQ;;EAGvD,uBACE,UACA,EAAE,MAAM,GAAG,QACH;AACR,eAAY,OAAO,aAAa,WAAW,CAAC,SAAS,GAAG,UAAU,IAAI,sBAAsB;AAE5F,UAAO,oBAAoB,KAAK,UAAU,SAAS,CAAC,IAAI,KAAK,UAC3D;IACE,MAAM,sBAAsB,KAAK,SAAS,QAAQ,KAAK,CAAC;IACxD,GAAG;IACJ,EACD,MACA,EACD,CAAC;;EAEJ,MAAM,uBACJ,UACA,EAAE,MAAM,QAAQ,OAAO,QAAQ,EAAE,EAAE,QAAQ,cAC1B;GACjB,MAAM,WAAW,KAAK,UAAU;IAAE;IAAU;IAAM,CAAC;GACnD,IAAI,QAAQ,UAAU,IAAI,SAAS;AACnC,OAAI,CAAC,OAAO;AACV,YAAQ,KAAK,UAAU,EACrB,KAAK,MACN,CAAC;AACF,cAAU,IAAI,UAAU,MAAM;;GAGhC,IAAIC,OAAe;AACnB,QAAK,MAAM,QAAQ,MAAM,OAAO;IAC9B,MAAM,WAAW,KAAK,KAAK,MAAM,KAAK;IACtC,MAAM,eAAe,IAAI,iBAAiB;AAE1C,SAAK,MAAM,CAAC,GAAG,MAAM,OAAO,QAAQ,MAAM,CACxC,KAAI,MAAM,OAAW,cAAa,IAAI,GAAG,EAAE;IAG7C,MAAM,aAAa,KAAK,iBAAiB,SAAS,GAAG,MAAM,aAAa,UAAU;AAClF,QAAI,OAAO;KACT,MAAM,OAAO,aAAa;AAC1B,UAAK,MAAM,QACT,aACI,YAAY,WAAW,MAAM,KAAK,UAAU,KAAK,UAAU,WAAW,KACtE,eAAe,KAAK,QAAQ,KAAK,UAAU,WAAW,GAC3D;AAED,aAAQ,GAAG,KAAK,UAAU,KAAK,CAAC,IAAI,KAAK;WACpC;KACL,IAAI,OAAO,GAAG,KAAK,UAAU,KAAK,CAAC,iBAAiB,KAAK,UAAU,WAAW,CAAC;AAC/E,SAAI,WACF,SAAQ,oBAAoB,WAAW;AAGzC,aAAQ,GAAG,KAAK;;;AAIpB,WAAQ;AACR,UAAO;;EAET,iBAAiB,MAAc;GAC7B,MAAM,MAAM,KAAK,QAAQ,KAAK;GAC9B,IAAIC;AAEJ,OAAI,QAAQ,OAAO;AACjB,eAAW,KAAK,UAAU,GAAG,KAAK,SAAS,IAAI,OAAO;AACtD,QAAI,YAAa,aAAY;SAE7B,YAAW;GAGb,MAAM,aAAa,MAAM,KAAK,SAAS,QAAQ,SAAS,CAAC;AACzD,UAAO,WAAW,WAAW,IAAI,GAAG,aAAa,KAAK;;EAExD,WAAW;AACT,UAAO,CAAC,GAAG,QAAQ,GAAG,KAAK,MAAM,CAAC,KAAK,KAAK;;EAE/C;;;;;AAMH,SAAS,sBAAsB,MAAc;AAC3C,QAAO,MAAM,KAAK;AAClB,KAAI,KAAK,WAAW,KAAK,CAAE,QAAO;AAClC,KAAI,KAAK,WAAW,IAAI,CAAE,QAAO,IAAI;AAErC,QAAO,KAAK;;AAGd,SAAgB,MAAM,QAAsB;AAG1C,KAF6BC,OAAK,WAAW,UAAU,CAGrD,QAAOA;AAGT,QAAOA,OAAK,WAAW,MAAM,IAAI;;AAGnC,SAAgB,MAAM,MAAc,MAAc,GAAG;AACnD,QAAO,KACJ,MAAM,KAAK,CACX,KAAK,MAAM,KAAK,OAAO,IAAI,GAAG,EAAE,CAChC,KAAK,KAAK"}
1
+ {"version":3,"file":"codegen-DleOVLNr.js","names":["path"],"sources":["../src/utils/codegen.ts"],"sourcesContent":["import path from 'node:path';\nimport { glob } from 'tinyglobby';\n\nexport interface GlobImportOptions {\n base: string;\n query?: Record<string, string | undefined>;\n import?: string;\n eager?: boolean;\n}\n\nexport interface CodeGenOptions {\n target: 'default' | 'vite';\n outDir: string;\n /**\n * add .js extenstion to imports\n */\n jsExtension: boolean;\n globCache: Map<string, Promise<string[]>>;\n}\n\nexport type CodeGen = ReturnType<typeof createCodegen>;\n\n/**\n * Code generator (one instance per file)\n */\nexport function createCodegen({\n target = 'default',\n outDir = '',\n jsExtension = false,\n globCache = new Map(),\n}: Partial<CodeGenOptions>) {\n let eagerImportId = 0;\n const banner: string[] = ['// @ts-nocheck'];\n\n if (target === 'vite') {\n banner.push('/// <reference types=\"vite/client\" />');\n }\n\n return {\n options: {\n target,\n outDir,\n } as CodeGenOptions,\n lines: [] as string[],\n addImport(statement: string) {\n this.lines.unshift(statement);\n },\n async pushAsync(insert: Promise<string | undefined>[]) {\n for (const line of await Promise.all(insert)) {\n if (line === undefined) continue;\n\n this.lines.push(line);\n }\n },\n\n async generateGlobImport(\n patterns: string | string[],\n options: GlobImportOptions,\n ): Promise<string> {\n if (target === 'vite') {\n return this.generateViteGlobImport(patterns, options);\n }\n\n return this.generateNodeGlobImport(patterns, options);\n },\n\n generateViteGlobImport(\n patterns: string | string[],\n { base, ...rest }: GlobImportOptions,\n ): string {\n patterns = (typeof patterns === 'string' ? [patterns] : patterns).map(normalizeViteGlobPath);\n\n return `import.meta.glob(${JSON.stringify(patterns)}, ${JSON.stringify(\n {\n base: normalizeViteGlobPath(path.relative(outDir, base)),\n ...rest,\n },\n null,\n 2,\n )})`;\n },\n async generateNodeGlobImport(\n patterns: string | string[],\n { base, eager = false, query = {}, import: importName }: GlobImportOptions,\n ): Promise<string> {\n const cacheKey = JSON.stringify({ patterns, base });\n let files = globCache.get(cacheKey);\n if (!files) {\n files = glob(patterns, {\n cwd: base,\n });\n globCache.set(cacheKey, files);\n }\n\n let code: string = '{';\n for (const item of await files) {\n const fullPath = path.join(base, item);\n const searchParams = new URLSearchParams();\n\n for (const [k, v] of Object.entries(query)) {\n if (v !== undefined) searchParams.set(k, v);\n }\n\n const importPath = this.formatImportPath(fullPath) + '?' + searchParams.toString();\n if (eager) {\n const name = `__fd_glob_${eagerImportId++}`;\n this.lines.unshift(\n importName\n ? `import { ${importName} as ${name} } from ${JSON.stringify(importPath)}`\n : `import * as ${name} from ${JSON.stringify(importPath)}`,\n );\n\n code += `${JSON.stringify(item)}: ${name}, `;\n } else {\n let line = `${JSON.stringify(item)}: () => import(${JSON.stringify(importPath)})`;\n if (importName) {\n line += `.then(mod => mod.${importName})`;\n }\n\n code += `${line}, `;\n }\n }\n\n code += '}';\n return code;\n },\n formatImportPath(file: string) {\n const ext = path.extname(file);\n let filename: string;\n\n if (ext === '.ts') {\n filename = file.substring(0, file.length - ext.length);\n if (jsExtension) filename += '.js';\n } else {\n filename = file;\n }\n\n const importPath = slash(path.relative(outDir, filename));\n return importPath.startsWith('.') ? importPath : `./${importPath}`;\n },\n toString() {\n return [...banner, ...this.lines].join('\\n');\n },\n };\n}\n\n/**\n * convert into POSIX & relative file paths, such that Vite can accept it.\n */\nfunction normalizeViteGlobPath(file: string) {\n file = slash(file);\n if (file.startsWith('./')) return file;\n if (file.startsWith('/')) return `.${file}`;\n\n return `./${file}`;\n}\n\nexport function slash(path: string): string {\n const isExtendedLengthPath = path.startsWith('\\\\\\\\?\\\\');\n\n if (isExtendedLengthPath) {\n return path;\n }\n\n return path.replaceAll('\\\\', '/');\n}\n\nexport function ident(code: string, tab: number = 1) {\n return code\n .split('\\n')\n .map((v) => ' '.repeat(tab) + v)\n .join('\\n');\n}\n"],"mappings":";;;;;;;AAyBA,SAAgB,cAAc,EAC5B,SAAS,WACT,SAAS,IACT,cAAc,OACd,4BAAY,IAAI,KAAK,IACK;CAC1B,IAAI,gBAAgB;CACpB,MAAM,SAAmB,CAAC,iBAAiB;AAE3C,KAAI,WAAW,OACb,QAAO,KAAK,0CAAwC;AAGtD,QAAO;EACL,SAAS;GACP;GACA;GACD;EACD,OAAO,EAAE;EACT,UAAU,WAAmB;AAC3B,QAAK,MAAM,QAAQ,UAAU;;EAE/B,MAAM,UAAU,QAAuC;AACrD,QAAK,MAAM,QAAQ,MAAM,QAAQ,IAAI,OAAO,EAAE;AAC5C,QAAI,SAAS,OAAW;AAExB,SAAK,MAAM,KAAK,KAAK;;;EAIzB,MAAM,mBACJ,UACA,SACiB;AACjB,OAAI,WAAW,OACb,QAAO,KAAK,uBAAuB,UAAU,QAAQ;AAGvD,UAAO,KAAK,uBAAuB,UAAU,QAAQ;;EAGvD,uBACE,UACA,EAAE,MAAM,GAAG,QACH;AACR,eAAY,OAAO,aAAa,WAAW,CAAC,SAAS,GAAG,UAAU,IAAI,sBAAsB;AAE5F,UAAO,oBAAoB,KAAK,UAAU,SAAS,CAAC,IAAI,KAAK,UAC3D;IACE,MAAM,sBAAsB,KAAK,SAAS,QAAQ,KAAK,CAAC;IACxD,GAAG;IACJ,EACD,MACA,EACD,CAAC;;EAEJ,MAAM,uBACJ,UACA,EAAE,MAAM,QAAQ,OAAO,QAAQ,EAAE,EAAE,QAAQ,cAC1B;GACjB,MAAM,WAAW,KAAK,UAAU;IAAE;IAAU;IAAM,CAAC;GACnD,IAAI,QAAQ,UAAU,IAAI,SAAS;AACnC,OAAI,CAAC,OAAO;AACV,YAAQ,KAAK,UAAU,EACrB,KAAK,MACN,CAAC;AACF,cAAU,IAAI,UAAU,MAAM;;GAGhC,IAAI,OAAe;AACnB,QAAK,MAAM,QAAQ,MAAM,OAAO;IAC9B,MAAM,WAAW,KAAK,KAAK,MAAM,KAAK;IACtC,MAAM,eAAe,IAAI,iBAAiB;AAE1C,SAAK,MAAM,CAAC,GAAG,MAAM,OAAO,QAAQ,MAAM,CACxC,KAAI,MAAM,OAAW,cAAa,IAAI,GAAG,EAAE;IAG7C,MAAM,aAAa,KAAK,iBAAiB,SAAS,GAAG,MAAM,aAAa,UAAU;AAClF,QAAI,OAAO;KACT,MAAM,OAAO,aAAa;AAC1B,UAAK,MAAM,QACT,aACI,YAAY,WAAW,MAAM,KAAK,UAAU,KAAK,UAAU,WAAW,KACtE,eAAe,KAAK,QAAQ,KAAK,UAAU,WAAW,GAC3D;AAED,aAAQ,GAAG,KAAK,UAAU,KAAK,CAAC,IAAI,KAAK;WACpC;KACL,IAAI,OAAO,GAAG,KAAK,UAAU,KAAK,CAAC,iBAAiB,KAAK,UAAU,WAAW,CAAC;AAC/E,SAAI,WACF,SAAQ,oBAAoB,WAAW;AAGzC,aAAQ,GAAG,KAAK;;;AAIpB,WAAQ;AACR,UAAO;;EAET,iBAAiB,MAAc;GAC7B,MAAM,MAAM,KAAK,QAAQ,KAAK;GAC9B,IAAI;AAEJ,OAAI,QAAQ,OAAO;AACjB,eAAW,KAAK,UAAU,GAAG,KAAK,SAAS,IAAI,OAAO;AACtD,QAAI,YAAa,aAAY;SAE7B,YAAW;GAGb,MAAM,aAAa,MAAM,KAAK,SAAS,QAAQ,SAAS,CAAC;AACzD,UAAO,WAAW,WAAW,IAAI,GAAG,aAAa,KAAK;;EAExD,WAAW;AACT,UAAO,CAAC,GAAG,QAAQ,GAAG,KAAK,MAAM,CAAC,KAAK,KAAK;;EAE/C;;;;;AAMH,SAAS,sBAAsB,MAAc;AAC3C,QAAO,MAAM,KAAK;AAClB,KAAI,KAAK,WAAW,KAAK,CAAE,QAAO;AAClC,KAAI,KAAK,WAAW,IAAI,CAAE,QAAO,IAAI;AAErC,QAAO,KAAK;;AAGd,SAAgB,MAAM,QAAsB;AAG1C,KAF6BA,OAAK,WAAW,UAAU,CAGrD,QAAOA;AAGT,QAAOA,OAAK,WAAW,MAAM,IAAI;;AAGnC,SAAgB,MAAM,MAAc,MAAc,GAAG;AACnD,QAAO,KACJ,MAAM,KAAK,CACX,KAAK,MAAM,KAAK,OAAO,IAAI,GAAG,EAAE,CAChC,KAAK,KAAK"}
@@ -1 +1 @@
1
- {"version":3,"file":"core-DjldE3H9.js","names":["plugins: Plugin[]","config: LoadedConfig","out: EmitOutput","options","lines: string[]","postprocessOptions: Partial<PostprocessOptions> | undefined"],"sources":["../src/utils/validation.ts","../src/core.ts"],"sourcesContent":["import type { CollectionSchema } from '@/config';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\n\nexport class ValidationError extends Error {\n title: string;\n issues: readonly StandardSchemaV1.Issue[];\n\n constructor(message: string, issues: readonly StandardSchemaV1.Issue[]) {\n super(`${message}:\\n${issues.map((issue) => ` ${issue.path}: ${issue.message}`).join('\\n')}`);\n\n this.title = message;\n this.issues = issues;\n }\n\n async toStringFormatted() {\n // Handle ESM/CJS interop: picocolors is a CJS module that exports via\n // module.exports = createColors(). When dynamically imported in ESM context\n // (e.g., Next.js 16 Turbopack), the exports are wrapped under .default\n const picocolorsModule = await import('picocolors');\n const picocolors = picocolorsModule.default ?? picocolorsModule;\n\n return [\n picocolors.bold(`[MDX] ${this.title}:`),\n ...this.issues.map((issue) =>\n picocolors.redBright(\n `- ${picocolors.bold(issue.path?.join('.') ?? '*')}: ${issue.message}`,\n ),\n ),\n ].join('\\n');\n }\n}\n\nexport async function validate<Schema extends StandardSchemaV1, Context>(\n schema: CollectionSchema<Schema, Context>,\n data: unknown,\n context: Context,\n errorMessage: string,\n): Promise<StandardSchemaV1.InferOutput<Schema>> {\n if (typeof schema === 'function' && !('~standard' in schema)) {\n schema = schema(context);\n }\n\n if ('~standard' in schema) {\n const result = await (schema as StandardSchemaV1)['~standard'].validate(data);\n\n if (result.issues) {\n throw new ValidationError(errorMessage, result.issues);\n }\n\n return result.value;\n }\n\n return data;\n}\n","import type {\n CollectionItem,\n DocCollectionItem,\n LoadedConfig,\n MetaCollectionItem,\n} from '@/config/build';\nimport path from 'node:path';\nimport fs from 'node:fs/promises';\nimport type { FSWatcher } from 'chokidar';\nimport { validate } from './utils/validation';\nimport type { VFile } from 'vfile';\nimport type { IndexFilePlugin } from './plugins/index-file';\nimport type { PostprocessOptions } from './config';\nimport { ident } from './utils/codegen';\n\ntype Awaitable<T> = T | Promise<T>;\n\nexport interface EmitEntry {\n /**\n * path relative to output directory\n */\n path: string;\n content: string;\n}\n\nexport interface PluginContext {\n core: Core;\n}\n\nexport type CompilationContext<Collection> = PluginContext & TransformOptions<Collection>;\n\nexport interface TransformOptions<Collection> {\n collection: Collection;\n filePath: string;\n source: string;\n}\n\nexport interface Plugin extends IndexFilePlugin {\n name?: string;\n\n /**\n * on config loaded/updated\n */\n config?: (this: PluginContext, config: LoadedConfig) => Awaitable<void | LoadedConfig>;\n\n /**\n * Generate files (e.g. types, index file, or JSON schemas)\n */\n emit?: (this: PluginContext) => Awaitable<EmitEntry[]>;\n\n /**\n * Configure Fumadocs dev server\n */\n configureServer?: (this: PluginContext, server: ServerContext) => Awaitable<void>;\n\n meta?: {\n /**\n * Transform metadata\n */\n transform?: (\n this: CompilationContext<MetaCollectionItem>,\n data: unknown,\n ) => Awaitable<unknown | void>;\n };\n\n doc?: {\n /**\n * Transform frontmatter\n */\n frontmatter?: (\n this: CompilationContext<DocCollectionItem>,\n data: Record<string, unknown>,\n ) => Awaitable<Record<string, unknown> | void>;\n\n /**\n * Transform `vfile` on compilation stage\n */\n vfile?: (this: CompilationContext<DocCollectionItem>, file: VFile) => Awaitable<VFile | void>;\n };\n}\n\nexport type PluginOption = Awaitable<Plugin | PluginOption[] | false | undefined>;\n\nexport interface ServerContext {\n /**\n * the file watcher, by default all content files are watched, along with other files.\n *\n * make sure to filter when listening to events\n */\n watcher?: FSWatcher;\n}\n\nexport interface CoreOptions {\n environment: string;\n configPath: string;\n outDir: string;\n plugins?: PluginOption[];\n\n /**\n * the workspace info if this instance is created as a workspace\n */\n workspace?: {\n parent: Core;\n name: string;\n dir: string;\n };\n}\n\nexport interface EmitOptions {\n /**\n * filter the plugins to run emit\n */\n filterPlugin?: (plugin: Plugin) => boolean;\n\n /**\n * filter the workspaces to run emit\n */\n filterWorkspace?: (workspace: string) => boolean;\n\n /**\n * write files\n */\n write?: boolean;\n}\n\nexport interface EmitOutput {\n entries: EmitEntry[];\n workspaces: Record<string, EmitEntry[]>;\n}\n\nexport const _Defaults = {\n configPath: 'source.config.ts',\n outDir: '.source',\n};\n\nasync function getPlugins(pluginOptions: PluginOption[]): Promise<Plugin[]> {\n const plugins: Plugin[] = [];\n\n for await (const option of pluginOptions) {\n if (!option) continue;\n if (Array.isArray(option)) plugins.push(...(await getPlugins(option)));\n else plugins.push(option);\n }\n\n return plugins;\n}\n\nexport function createCore(options: CoreOptions) {\n let config: LoadedConfig;\n let plugins: Plugin[];\n const workspaces = new Map<string, Core>();\n\n async function transformMetadata<T>(\n { collection, filePath, source }: TransformOptions<DocCollectionItem | MetaCollectionItem>,\n data: unknown,\n ): Promise<T> {\n if (collection.schema) {\n data = await validate(\n collection.schema,\n data,\n { path: filePath, source },\n collection.type === 'doc'\n ? `invalid frontmatter in ${filePath}`\n : `invalid data in ${filePath}`,\n );\n }\n\n return data as T;\n }\n\n return {\n /**\n * Convenient cache store, reset when config changes\n */\n cache: new Map<string, unknown>(),\n async init({ config: newConfig }: { config: Awaitable<LoadedConfig> }) {\n config = await newConfig;\n this.cache.clear();\n workspaces.clear();\n plugins = await getPlugins([postprocessPlugin(), options.plugins, config.global.plugins]);\n\n for (const plugin of plugins) {\n const out = await plugin.config?.call(this.getPluginContext(), config);\n if (out) config = out;\n }\n\n // only support workspaces with max depth 1\n if (!options.workspace) {\n await Promise.all(\n Object.entries(config.workspaces).map(async ([name, workspace]) => {\n const core = createCore({\n ...options,\n outDir: path.join(options.outDir, name),\n workspace: {\n name,\n parent: this,\n dir: workspace.dir,\n },\n });\n await core.init({ config: workspace.config });\n workspaces.set(name, core);\n }),\n );\n }\n },\n getWorkspaces() {\n return workspaces;\n },\n getOptions() {\n return options;\n },\n getConfig(): LoadedConfig {\n return config;\n },\n /**\n * The file path of compiled config file, the file may not exist (e.g. on Vite, or still compiling)\n */\n getCompiledConfigPath(): string {\n return path.join(options.outDir, 'source.config.mjs');\n },\n getPlugins() {\n return plugins;\n },\n getCollections(): CollectionItem[] {\n return Array.from(config.collections.values());\n },\n getCollection(name: string): CollectionItem | undefined {\n return config.collections.get(name);\n },\n getPluginContext(): PluginContext {\n return {\n core: this,\n };\n },\n async initServer(server: ServerContext): Promise<void> {\n const ctx = this.getPluginContext();\n for (const plugin of plugins) {\n await plugin.configureServer?.call(ctx, server);\n }\n for (const workspace of workspaces.values()) {\n await workspace.initServer(server);\n }\n },\n async emit(emitOptions: EmitOptions = {}): Promise<EmitOutput> {\n const { filterPlugin, filterWorkspace, write = false } = emitOptions;\n const start = performance.now();\n const ctx = this.getPluginContext();\n const added = new Set<string>();\n const out: EmitOutput = {\n entries: [],\n workspaces: {},\n };\n\n for (const li of await Promise.all(\n plugins.map((plugin) => {\n if ((filterPlugin && !filterPlugin(plugin)) || !plugin.emit) return;\n return plugin.emit.call(ctx);\n }),\n )) {\n if (!li) continue;\n for (const item of li) {\n if (added.has(item.path)) continue;\n out.entries.push(item);\n added.add(item.path);\n }\n }\n\n if (write) {\n await Promise.all(\n out.entries.map(async (entry) => {\n const file = path.join(options.outDir, entry.path);\n\n await fs.mkdir(path.dirname(file), { recursive: true });\n await fs.writeFile(file, entry.content);\n }),\n );\n\n console.log(\n options.workspace\n ? `[MDX: ${options.workspace.name}] generated files in ${performance.now() - start}ms`\n : `[MDX] generated files in ${performance.now() - start}ms`,\n );\n }\n\n for (const [name, workspace] of workspaces) {\n if (filterWorkspace && !filterWorkspace(name)) continue;\n out.workspaces[name] = (await workspace.emit(emitOptions)).entries;\n }\n\n return out;\n },\n async transformMeta(\n options: TransformOptions<MetaCollectionItem>,\n data: unknown,\n ): Promise<unknown> {\n const ctx = {\n ...this.getPluginContext(),\n ...options,\n };\n\n data = await transformMetadata(options, data);\n for (const plugin of plugins) {\n if (plugin.meta?.transform) data = (await plugin.meta.transform.call(ctx, data)) ?? data;\n }\n\n return data;\n },\n async transformFrontmatter(\n options: TransformOptions<DocCollectionItem>,\n data: Record<string, unknown>,\n ): Promise<Record<string, unknown>> {\n const ctx = {\n ...this.getPluginContext(),\n ...options,\n };\n\n data = await transformMetadata(options, data);\n for (const plugin of plugins) {\n if (plugin.doc?.frontmatter) data = (await plugin.doc.frontmatter.call(ctx, data)) ?? data;\n }\n\n return data;\n },\n async transformVFile(\n options: TransformOptions<DocCollectionItem>,\n file: VFile,\n ): Promise<VFile> {\n const ctx = {\n ...this.getPluginContext(),\n ...options,\n };\n\n for (const plugin of plugins) {\n if (plugin.doc?.vfile) file = (await plugin.doc.vfile.call(ctx, file)) ?? file;\n }\n\n return file;\n },\n };\n}\n\nfunction postprocessPlugin(): Plugin {\n const LinkReferenceTypes = `{\n /**\n * extracted references (e.g. hrefs, paths), useful for analyzing relationships between pages.\n */\n extractedReferences: import(\"fumadocs-mdx\").ExtractedReference[];\n}`;\n\n return {\n 'index-file': {\n generateTypeConfig() {\n const lines: string[] = [];\n lines.push('{');\n lines.push(' DocData: {');\n for (const collection of this.core.getCollections()) {\n let postprocessOptions: Partial<PostprocessOptions> | undefined;\n switch (collection.type) {\n case 'doc':\n postprocessOptions = collection.postprocess;\n break;\n case 'docs':\n postprocessOptions = collection.docs.postprocess;\n break;\n }\n\n if (postprocessOptions?.extractLinkReferences) {\n lines.push(ident(`${collection.name}: ${LinkReferenceTypes},`, 2));\n }\n }\n lines.push(' }');\n lines.push('}');\n return lines.join('\\n');\n },\n serverOptions(options) {\n options.doc ??= {};\n options.doc.passthroughs ??= [];\n options.doc.passthroughs.push('extractedReferences');\n },\n },\n };\n}\n\nexport type Core = ReturnType<typeof createCore>;\n"],"mappings":";;;;;AAGA,IAAa,kBAAb,cAAqC,MAAM;CAIzC,YAAY,SAAiB,QAA2C;AACtE,QAAM,GAAG,QAAQ,KAAK,OAAO,KAAK,UAAU,KAAK,MAAM,KAAK,IAAI,MAAM,UAAU,CAAC,KAAK,KAAK,GAAG;AAE9F,OAAK,QAAQ;AACb,OAAK,SAAS;;CAGhB,MAAM,oBAAoB;EAIxB,MAAM,mBAAmB,MAAM,OAAO;EACtC,MAAM,aAAa,iBAAiB,WAAW;AAE/C,SAAO,CACL,WAAW,KAAK,SAAS,KAAK,MAAM,GAAG,EACvC,GAAG,KAAK,OAAO,KAAK,UAClB,WAAW,UACT,KAAK,WAAW,KAAK,MAAM,MAAM,KAAK,IAAI,IAAI,IAAI,CAAC,IAAI,MAAM,UAC9D,CACF,CACF,CAAC,KAAK,KAAK;;;AAIhB,eAAsB,SACpB,QACA,MACA,SACA,cAC+C;AAC/C,KAAI,OAAO,WAAW,cAAc,EAAE,eAAe,QACnD,UAAS,OAAO,QAAQ;AAG1B,KAAI,eAAe,QAAQ;EACzB,MAAM,SAAS,MAAO,OAA4B,aAAa,SAAS,KAAK;AAE7E,MAAI,OAAO,OACT,OAAM,IAAI,gBAAgB,cAAc,OAAO,OAAO;AAGxD,SAAO,OAAO;;AAGhB,QAAO;;;;;AC8ET,MAAa,YAAY;CACvB,YAAY;CACZ,QAAQ;CACT;AAED,eAAe,WAAW,eAAkD;CAC1E,MAAMA,UAAoB,EAAE;AAE5B,YAAW,MAAM,UAAU,eAAe;AACxC,MAAI,CAAC,OAAQ;AACb,MAAI,MAAM,QAAQ,OAAO,CAAE,SAAQ,KAAK,GAAI,MAAM,WAAW,OAAO,CAAE;MACjE,SAAQ,KAAK,OAAO;;AAG3B,QAAO;;AAGT,SAAgB,WAAW,SAAsB;CAC/C,IAAIC;CACJ,IAAID;CACJ,MAAM,6BAAa,IAAI,KAAmB;CAE1C,eAAe,kBACb,EAAE,YAAY,UAAU,UACxB,MACY;AACZ,MAAI,WAAW,OACb,QAAO,MAAM,SACX,WAAW,QACX,MACA;GAAE,MAAM;GAAU;GAAQ,EAC1B,WAAW,SAAS,QAChB,0BAA0B,aAC1B,mBAAmB,WACxB;AAGH,SAAO;;AAGT,QAAO;EAIL,uBAAO,IAAI,KAAsB;EACjC,MAAM,KAAK,EAAE,QAAQ,aAAkD;AACrE,YAAS,MAAM;AACf,QAAK,MAAM,OAAO;AAClB,cAAW,OAAO;AAClB,aAAU,MAAM,WAAW;IAAC,mBAAmB;IAAE,QAAQ;IAAS,OAAO,OAAO;IAAQ,CAAC;AAEzF,QAAK,MAAM,UAAU,SAAS;IAC5B,MAAM,MAAM,MAAM,OAAO,QAAQ,KAAK,KAAK,kBAAkB,EAAE,OAAO;AACtE,QAAI,IAAK,UAAS;;AAIpB,OAAI,CAAC,QAAQ,UACX,OAAM,QAAQ,IACZ,OAAO,QAAQ,OAAO,WAAW,CAAC,IAAI,OAAO,CAAC,MAAM,eAAe;IACjE,MAAM,OAAO,WAAW;KACtB,GAAG;KACH,QAAQ,KAAK,KAAK,QAAQ,QAAQ,KAAK;KACvC,WAAW;MACT;MACA,QAAQ;MACR,KAAK,UAAU;MAChB;KACF,CAAC;AACF,UAAM,KAAK,KAAK,EAAE,QAAQ,UAAU,QAAQ,CAAC;AAC7C,eAAW,IAAI,MAAM,KAAK;KAC1B,CACH;;EAGL,gBAAgB;AACd,UAAO;;EAET,aAAa;AACX,UAAO;;EAET,YAA0B;AACxB,UAAO;;EAKT,wBAAgC;AAC9B,UAAO,KAAK,KAAK,QAAQ,QAAQ,oBAAoB;;EAEvD,aAAa;AACX,UAAO;;EAET,iBAAmC;AACjC,UAAO,MAAM,KAAK,OAAO,YAAY,QAAQ,CAAC;;EAEhD,cAAc,MAA0C;AACtD,UAAO,OAAO,YAAY,IAAI,KAAK;;EAErC,mBAAkC;AAChC,UAAO,EACL,MAAM,MACP;;EAEH,MAAM,WAAW,QAAsC;GACrD,MAAM,MAAM,KAAK,kBAAkB;AACnC,QAAK,MAAM,UAAU,QACnB,OAAM,OAAO,iBAAiB,KAAK,KAAK,OAAO;AAEjD,QAAK,MAAM,aAAa,WAAW,QAAQ,CACzC,OAAM,UAAU,WAAW,OAAO;;EAGtC,MAAM,KAAK,cAA2B,EAAE,EAAuB;GAC7D,MAAM,EAAE,cAAc,iBAAiB,QAAQ,UAAU;GACzD,MAAM,QAAQ,YAAY,KAAK;GAC/B,MAAM,MAAM,KAAK,kBAAkB;GACnC,MAAM,wBAAQ,IAAI,KAAa;GAC/B,MAAME,MAAkB;IACtB,SAAS,EAAE;IACX,YAAY,EAAE;IACf;AAED,QAAK,MAAM,MAAM,MAAM,QAAQ,IAC7B,QAAQ,KAAK,WAAW;AACtB,QAAK,gBAAgB,CAAC,aAAa,OAAO,IAAK,CAAC,OAAO,KAAM;AAC7D,WAAO,OAAO,KAAK,KAAK,IAAI;KAC5B,CACH,EAAE;AACD,QAAI,CAAC,GAAI;AACT,SAAK,MAAM,QAAQ,IAAI;AACrB,SAAI,MAAM,IAAI,KAAK,KAAK,CAAE;AAC1B,SAAI,QAAQ,KAAK,KAAK;AACtB,WAAM,IAAI,KAAK,KAAK;;;AAIxB,OAAI,OAAO;AACT,UAAM,QAAQ,IACZ,IAAI,QAAQ,IAAI,OAAO,UAAU;KAC/B,MAAM,OAAO,KAAK,KAAK,QAAQ,QAAQ,MAAM,KAAK;AAElD,WAAM,GAAG,MAAM,KAAK,QAAQ,KAAK,EAAE,EAAE,WAAW,MAAM,CAAC;AACvD,WAAM,GAAG,UAAU,MAAM,MAAM,QAAQ;MACvC,CACH;AAED,YAAQ,IACN,QAAQ,YACJ,SAAS,QAAQ,UAAU,KAAK,uBAAuB,YAAY,KAAK,GAAG,MAAM,MACjF,4BAA4B,YAAY,KAAK,GAAG,MAAM,IAC3D;;AAGH,QAAK,MAAM,CAAC,MAAM,cAAc,YAAY;AAC1C,QAAI,mBAAmB,CAAC,gBAAgB,KAAK,CAAE;AAC/C,QAAI,WAAW,SAAS,MAAM,UAAU,KAAK,YAAY,EAAE;;AAG7D,UAAO;;EAET,MAAM,cACJ,WACA,MACkB;GAClB,MAAM,MAAM;IACV,GAAG,KAAK,kBAAkB;IAC1B,GAAGC;IACJ;AAED,UAAO,MAAM,kBAAkBA,WAAS,KAAK;AAC7C,QAAK,MAAM,UAAU,QACnB,KAAI,OAAO,MAAM,UAAW,QAAQ,MAAM,OAAO,KAAK,UAAU,KAAK,KAAK,KAAK,IAAK;AAGtF,UAAO;;EAET,MAAM,qBACJ,WACA,MACkC;GAClC,MAAM,MAAM;IACV,GAAG,KAAK,kBAAkB;IAC1B,GAAGA;IACJ;AAED,UAAO,MAAM,kBAAkBA,WAAS,KAAK;AAC7C,QAAK,MAAM,UAAU,QACnB,KAAI,OAAO,KAAK,YAAa,QAAQ,MAAM,OAAO,IAAI,YAAY,KAAK,KAAK,KAAK,IAAK;AAGxF,UAAO;;EAET,MAAM,eACJ,WACA,MACgB;GAChB,MAAM,MAAM;IACV,GAAG,KAAK,kBAAkB;IAC1B,GAAGA;IACJ;AAED,QAAK,MAAM,UAAU,QACnB,KAAI,OAAO,KAAK,MAAO,QAAQ,MAAM,OAAO,IAAI,MAAM,KAAK,KAAK,KAAK,IAAK;AAG5E,UAAO;;EAEV;;AAGH,SAAS,oBAA4B;CACnC,MAAM,qBAAqB;;;;;;AAO3B,QAAO,EACL,cAAc;EACZ,qBAAqB;GACnB,MAAMC,QAAkB,EAAE;AAC1B,SAAM,KAAK,IAAI;AACf,SAAM,KAAK,eAAe;AAC1B,QAAK,MAAM,cAAc,KAAK,KAAK,gBAAgB,EAAE;IACnD,IAAIC;AACJ,YAAQ,WAAW,MAAnB;KACE,KAAK;AACH,2BAAqB,WAAW;AAChC;KACF,KAAK;AACH,2BAAqB,WAAW,KAAK;AACrC;;AAGJ,QAAI,oBAAoB,sBACtB,OAAM,KAAK,MAAM,GAAG,WAAW,KAAK,IAAI,mBAAmB,IAAI,EAAE,CAAC;;AAGtE,SAAM,KAAK,MAAM;AACjB,SAAM,KAAK,IAAI;AACf,UAAO,MAAM,KAAK,KAAK;;EAEzB,cAAc,SAAS;AACrB,WAAQ,QAAQ,EAAE;AAClB,WAAQ,IAAI,iBAAiB,EAAE;AAC/B,WAAQ,IAAI,aAAa,KAAK,sBAAsB;;EAEvD,EACF"}
1
+ {"version":3,"file":"core-DjldE3H9.js","names":["options"],"sources":["../src/utils/validation.ts","../src/core.ts"],"sourcesContent":["import type { CollectionSchema } from '@/config';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\n\nexport class ValidationError extends Error {\n title: string;\n issues: readonly StandardSchemaV1.Issue[];\n\n constructor(message: string, issues: readonly StandardSchemaV1.Issue[]) {\n super(`${message}:\\n${issues.map((issue) => ` ${issue.path}: ${issue.message}`).join('\\n')}`);\n\n this.title = message;\n this.issues = issues;\n }\n\n async toStringFormatted() {\n // Handle ESM/CJS interop: picocolors is a CJS module that exports via\n // module.exports = createColors(). When dynamically imported in ESM context\n // (e.g., Next.js 16 Turbopack), the exports are wrapped under .default\n const picocolorsModule = await import('picocolors');\n const picocolors = picocolorsModule.default ?? picocolorsModule;\n\n return [\n picocolors.bold(`[MDX] ${this.title}:`),\n ...this.issues.map((issue) =>\n picocolors.redBright(\n `- ${picocolors.bold(issue.path?.join('.') ?? '*')}: ${issue.message}`,\n ),\n ),\n ].join('\\n');\n }\n}\n\nexport async function validate<Schema extends StandardSchemaV1, Context>(\n schema: CollectionSchema<Schema, Context>,\n data: unknown,\n context: Context,\n errorMessage: string,\n): Promise<StandardSchemaV1.InferOutput<Schema>> {\n if (typeof schema === 'function' && !('~standard' in schema)) {\n schema = schema(context);\n }\n\n if ('~standard' in schema) {\n const result = await (schema as StandardSchemaV1)['~standard'].validate(data);\n\n if (result.issues) {\n throw new ValidationError(errorMessage, result.issues);\n }\n\n return result.value;\n }\n\n return data;\n}\n","import type {\n CollectionItem,\n DocCollectionItem,\n LoadedConfig,\n MetaCollectionItem,\n} from '@/config/build';\nimport path from 'node:path';\nimport fs from 'node:fs/promises';\nimport type { FSWatcher } from 'chokidar';\nimport { validate } from './utils/validation';\nimport type { VFile } from 'vfile';\nimport type { IndexFilePlugin } from './plugins/index-file';\nimport type { PostprocessOptions } from './config';\nimport { ident } from './utils/codegen';\n\ntype Awaitable<T> = T | Promise<T>;\n\nexport interface EmitEntry {\n /**\n * path relative to output directory\n */\n path: string;\n content: string;\n}\n\nexport interface PluginContext {\n core: Core;\n}\n\nexport type CompilationContext<Collection> = PluginContext & TransformOptions<Collection>;\n\nexport interface TransformOptions<Collection> {\n collection: Collection;\n filePath: string;\n source: string;\n}\n\nexport interface Plugin extends IndexFilePlugin {\n name?: string;\n\n /**\n * on config loaded/updated\n */\n config?: (this: PluginContext, config: LoadedConfig) => Awaitable<void | LoadedConfig>;\n\n /**\n * Generate files (e.g. types, index file, or JSON schemas)\n */\n emit?: (this: PluginContext) => Awaitable<EmitEntry[]>;\n\n /**\n * Configure Fumadocs dev server\n */\n configureServer?: (this: PluginContext, server: ServerContext) => Awaitable<void>;\n\n meta?: {\n /**\n * Transform metadata\n */\n transform?: (\n this: CompilationContext<MetaCollectionItem>,\n data: unknown,\n ) => Awaitable<unknown | void>;\n };\n\n doc?: {\n /**\n * Transform frontmatter\n */\n frontmatter?: (\n this: CompilationContext<DocCollectionItem>,\n data: Record<string, unknown>,\n ) => Awaitable<Record<string, unknown> | void>;\n\n /**\n * Transform `vfile` on compilation stage\n */\n vfile?: (this: CompilationContext<DocCollectionItem>, file: VFile) => Awaitable<VFile | void>;\n };\n}\n\nexport type PluginOption = Awaitable<Plugin | PluginOption[] | false | undefined>;\n\nexport interface ServerContext {\n /**\n * the file watcher, by default all content files are watched, along with other files.\n *\n * make sure to filter when listening to events\n */\n watcher?: FSWatcher;\n}\n\nexport interface CoreOptions {\n environment: string;\n configPath: string;\n outDir: string;\n plugins?: PluginOption[];\n\n /**\n * the workspace info if this instance is created as a workspace\n */\n workspace?: {\n parent: Core;\n name: string;\n dir: string;\n };\n}\n\nexport interface EmitOptions {\n /**\n * filter the plugins to run emit\n */\n filterPlugin?: (plugin: Plugin) => boolean;\n\n /**\n * filter the workspaces to run emit\n */\n filterWorkspace?: (workspace: string) => boolean;\n\n /**\n * write files\n */\n write?: boolean;\n}\n\nexport interface EmitOutput {\n entries: EmitEntry[];\n workspaces: Record<string, EmitEntry[]>;\n}\n\nexport const _Defaults = {\n configPath: 'source.config.ts',\n outDir: '.source',\n};\n\nasync function getPlugins(pluginOptions: PluginOption[]): Promise<Plugin[]> {\n const plugins: Plugin[] = [];\n\n for await (const option of pluginOptions) {\n if (!option) continue;\n if (Array.isArray(option)) plugins.push(...(await getPlugins(option)));\n else plugins.push(option);\n }\n\n return plugins;\n}\n\nexport function createCore(options: CoreOptions) {\n let config: LoadedConfig;\n let plugins: Plugin[];\n const workspaces = new Map<string, Core>();\n\n async function transformMetadata<T>(\n { collection, filePath, source }: TransformOptions<DocCollectionItem | MetaCollectionItem>,\n data: unknown,\n ): Promise<T> {\n if (collection.schema) {\n data = await validate(\n collection.schema,\n data,\n { path: filePath, source },\n collection.type === 'doc'\n ? `invalid frontmatter in ${filePath}`\n : `invalid data in ${filePath}`,\n );\n }\n\n return data as T;\n }\n\n return {\n /**\n * Convenient cache store, reset when config changes\n */\n cache: new Map<string, unknown>(),\n async init({ config: newConfig }: { config: Awaitable<LoadedConfig> }) {\n config = await newConfig;\n this.cache.clear();\n workspaces.clear();\n plugins = await getPlugins([postprocessPlugin(), options.plugins, config.global.plugins]);\n\n for (const plugin of plugins) {\n const out = await plugin.config?.call(this.getPluginContext(), config);\n if (out) config = out;\n }\n\n // only support workspaces with max depth 1\n if (!options.workspace) {\n await Promise.all(\n Object.entries(config.workspaces).map(async ([name, workspace]) => {\n const core = createCore({\n ...options,\n outDir: path.join(options.outDir, name),\n workspace: {\n name,\n parent: this,\n dir: workspace.dir,\n },\n });\n await core.init({ config: workspace.config });\n workspaces.set(name, core);\n }),\n );\n }\n },\n getWorkspaces() {\n return workspaces;\n },\n getOptions() {\n return options;\n },\n getConfig(): LoadedConfig {\n return config;\n },\n /**\n * The file path of compiled config file, the file may not exist (e.g. on Vite, or still compiling)\n */\n getCompiledConfigPath(): string {\n return path.join(options.outDir, 'source.config.mjs');\n },\n getPlugins() {\n return plugins;\n },\n getCollections(): CollectionItem[] {\n return Array.from(config.collections.values());\n },\n getCollection(name: string): CollectionItem | undefined {\n return config.collections.get(name);\n },\n getPluginContext(): PluginContext {\n return {\n core: this,\n };\n },\n async initServer(server: ServerContext): Promise<void> {\n const ctx = this.getPluginContext();\n for (const plugin of plugins) {\n await plugin.configureServer?.call(ctx, server);\n }\n for (const workspace of workspaces.values()) {\n await workspace.initServer(server);\n }\n },\n async emit(emitOptions: EmitOptions = {}): Promise<EmitOutput> {\n const { filterPlugin, filterWorkspace, write = false } = emitOptions;\n const start = performance.now();\n const ctx = this.getPluginContext();\n const added = new Set<string>();\n const out: EmitOutput = {\n entries: [],\n workspaces: {},\n };\n\n for (const li of await Promise.all(\n plugins.map((plugin) => {\n if ((filterPlugin && !filterPlugin(plugin)) || !plugin.emit) return;\n return plugin.emit.call(ctx);\n }),\n )) {\n if (!li) continue;\n for (const item of li) {\n if (added.has(item.path)) continue;\n out.entries.push(item);\n added.add(item.path);\n }\n }\n\n if (write) {\n await Promise.all(\n out.entries.map(async (entry) => {\n const file = path.join(options.outDir, entry.path);\n\n await fs.mkdir(path.dirname(file), { recursive: true });\n await fs.writeFile(file, entry.content);\n }),\n );\n\n console.log(\n options.workspace\n ? `[MDX: ${options.workspace.name}] generated files in ${performance.now() - start}ms`\n : `[MDX] generated files in ${performance.now() - start}ms`,\n );\n }\n\n for (const [name, workspace] of workspaces) {\n if (filterWorkspace && !filterWorkspace(name)) continue;\n out.workspaces[name] = (await workspace.emit(emitOptions)).entries;\n }\n\n return out;\n },\n async transformMeta(\n options: TransformOptions<MetaCollectionItem>,\n data: unknown,\n ): Promise<unknown> {\n const ctx = {\n ...this.getPluginContext(),\n ...options,\n };\n\n data = await transformMetadata(options, data);\n for (const plugin of plugins) {\n if (plugin.meta?.transform) data = (await plugin.meta.transform.call(ctx, data)) ?? data;\n }\n\n return data;\n },\n async transformFrontmatter(\n options: TransformOptions<DocCollectionItem>,\n data: Record<string, unknown>,\n ): Promise<Record<string, unknown>> {\n const ctx = {\n ...this.getPluginContext(),\n ...options,\n };\n\n data = await transformMetadata(options, data);\n for (const plugin of plugins) {\n if (plugin.doc?.frontmatter) data = (await plugin.doc.frontmatter.call(ctx, data)) ?? data;\n }\n\n return data;\n },\n async transformVFile(\n options: TransformOptions<DocCollectionItem>,\n file: VFile,\n ): Promise<VFile> {\n const ctx = {\n ...this.getPluginContext(),\n ...options,\n };\n\n for (const plugin of plugins) {\n if (plugin.doc?.vfile) file = (await plugin.doc.vfile.call(ctx, file)) ?? file;\n }\n\n return file;\n },\n };\n}\n\nfunction postprocessPlugin(): Plugin {\n const LinkReferenceTypes = `{\n /**\n * extracted references (e.g. hrefs, paths), useful for analyzing relationships between pages.\n */\n extractedReferences: import(\"fumadocs-mdx\").ExtractedReference[];\n}`;\n\n return {\n 'index-file': {\n generateTypeConfig() {\n const lines: string[] = [];\n lines.push('{');\n lines.push(' DocData: {');\n for (const collection of this.core.getCollections()) {\n let postprocessOptions: Partial<PostprocessOptions> | undefined;\n switch (collection.type) {\n case 'doc':\n postprocessOptions = collection.postprocess;\n break;\n case 'docs':\n postprocessOptions = collection.docs.postprocess;\n break;\n }\n\n if (postprocessOptions?.extractLinkReferences) {\n lines.push(ident(`${collection.name}: ${LinkReferenceTypes},`, 2));\n }\n }\n lines.push(' }');\n lines.push('}');\n return lines.join('\\n');\n },\n serverOptions(options) {\n options.doc ??= {};\n options.doc.passthroughs ??= [];\n options.doc.passthroughs.push('extractedReferences');\n },\n },\n };\n}\n\nexport type Core = ReturnType<typeof createCore>;\n"],"mappings":";;;;;AAGA,IAAa,kBAAb,cAAqC,MAAM;CAIzC,YAAY,SAAiB,QAA2C;AACtE,QAAM,GAAG,QAAQ,KAAK,OAAO,KAAK,UAAU,KAAK,MAAM,KAAK,IAAI,MAAM,UAAU,CAAC,KAAK,KAAK,GAAG;AAE9F,OAAK,QAAQ;AACb,OAAK,SAAS;;CAGhB,MAAM,oBAAoB;EAIxB,MAAM,mBAAmB,MAAM,OAAO;EACtC,MAAM,aAAa,iBAAiB,WAAW;AAE/C,SAAO,CACL,WAAW,KAAK,SAAS,KAAK,MAAM,GAAG,EACvC,GAAG,KAAK,OAAO,KAAK,UAClB,WAAW,UACT,KAAK,WAAW,KAAK,MAAM,MAAM,KAAK,IAAI,IAAI,IAAI,CAAC,IAAI,MAAM,UAC9D,CACF,CACF,CAAC,KAAK,KAAK;;;AAIhB,eAAsB,SACpB,QACA,MACA,SACA,cAC+C;AAC/C,KAAI,OAAO,WAAW,cAAc,EAAE,eAAe,QACnD,UAAS,OAAO,QAAQ;AAG1B,KAAI,eAAe,QAAQ;EACzB,MAAM,SAAS,MAAO,OAA4B,aAAa,SAAS,KAAK;AAE7E,MAAI,OAAO,OACT,OAAM,IAAI,gBAAgB,cAAc,OAAO,OAAO;AAGxD,SAAO,OAAO;;AAGhB,QAAO;;;;;AC8ET,MAAa,YAAY;CACvB,YAAY;CACZ,QAAQ;CACT;AAED,eAAe,WAAW,eAAkD;CAC1E,MAAM,UAAoB,EAAE;AAE5B,YAAW,MAAM,UAAU,eAAe;AACxC,MAAI,CAAC,OAAQ;AACb,MAAI,MAAM,QAAQ,OAAO,CAAE,SAAQ,KAAK,GAAI,MAAM,WAAW,OAAO,CAAE;MACjE,SAAQ,KAAK,OAAO;;AAG3B,QAAO;;AAGT,SAAgB,WAAW,SAAsB;CAC/C,IAAI;CACJ,IAAI;CACJ,MAAM,6BAAa,IAAI,KAAmB;CAE1C,eAAe,kBACb,EAAE,YAAY,UAAU,UACxB,MACY;AACZ,MAAI,WAAW,OACb,QAAO,MAAM,SACX,WAAW,QACX,MACA;GAAE,MAAM;GAAU;GAAQ,EAC1B,WAAW,SAAS,QAChB,0BAA0B,aAC1B,mBAAmB,WACxB;AAGH,SAAO;;AAGT,QAAO;EAIL,uBAAO,IAAI,KAAsB;EACjC,MAAM,KAAK,EAAE,QAAQ,aAAkD;AACrE,YAAS,MAAM;AACf,QAAK,MAAM,OAAO;AAClB,cAAW,OAAO;AAClB,aAAU,MAAM,WAAW;IAAC,mBAAmB;IAAE,QAAQ;IAAS,OAAO,OAAO;IAAQ,CAAC;AAEzF,QAAK,MAAM,UAAU,SAAS;IAC5B,MAAM,MAAM,MAAM,OAAO,QAAQ,KAAK,KAAK,kBAAkB,EAAE,OAAO;AACtE,QAAI,IAAK,UAAS;;AAIpB,OAAI,CAAC,QAAQ,UACX,OAAM,QAAQ,IACZ,OAAO,QAAQ,OAAO,WAAW,CAAC,IAAI,OAAO,CAAC,MAAM,eAAe;IACjE,MAAM,OAAO,WAAW;KACtB,GAAG;KACH,QAAQ,KAAK,KAAK,QAAQ,QAAQ,KAAK;KACvC,WAAW;MACT;MACA,QAAQ;MACR,KAAK,UAAU;MAChB;KACF,CAAC;AACF,UAAM,KAAK,KAAK,EAAE,QAAQ,UAAU,QAAQ,CAAC;AAC7C,eAAW,IAAI,MAAM,KAAK;KAC1B,CACH;;EAGL,gBAAgB;AACd,UAAO;;EAET,aAAa;AACX,UAAO;;EAET,YAA0B;AACxB,UAAO;;EAKT,wBAAgC;AAC9B,UAAO,KAAK,KAAK,QAAQ,QAAQ,oBAAoB;;EAEvD,aAAa;AACX,UAAO;;EAET,iBAAmC;AACjC,UAAO,MAAM,KAAK,OAAO,YAAY,QAAQ,CAAC;;EAEhD,cAAc,MAA0C;AACtD,UAAO,OAAO,YAAY,IAAI,KAAK;;EAErC,mBAAkC;AAChC,UAAO,EACL,MAAM,MACP;;EAEH,MAAM,WAAW,QAAsC;GACrD,MAAM,MAAM,KAAK,kBAAkB;AACnC,QAAK,MAAM,UAAU,QACnB,OAAM,OAAO,iBAAiB,KAAK,KAAK,OAAO;AAEjD,QAAK,MAAM,aAAa,WAAW,QAAQ,CACzC,OAAM,UAAU,WAAW,OAAO;;EAGtC,MAAM,KAAK,cAA2B,EAAE,EAAuB;GAC7D,MAAM,EAAE,cAAc,iBAAiB,QAAQ,UAAU;GACzD,MAAM,QAAQ,YAAY,KAAK;GAC/B,MAAM,MAAM,KAAK,kBAAkB;GACnC,MAAM,wBAAQ,IAAI,KAAa;GAC/B,MAAM,MAAkB;IACtB,SAAS,EAAE;IACX,YAAY,EAAE;IACf;AAED,QAAK,MAAM,MAAM,MAAM,QAAQ,IAC7B,QAAQ,KAAK,WAAW;AACtB,QAAK,gBAAgB,CAAC,aAAa,OAAO,IAAK,CAAC,OAAO,KAAM;AAC7D,WAAO,OAAO,KAAK,KAAK,IAAI;KAC5B,CACH,EAAE;AACD,QAAI,CAAC,GAAI;AACT,SAAK,MAAM,QAAQ,IAAI;AACrB,SAAI,MAAM,IAAI,KAAK,KAAK,CAAE;AAC1B,SAAI,QAAQ,KAAK,KAAK;AACtB,WAAM,IAAI,KAAK,KAAK;;;AAIxB,OAAI,OAAO;AACT,UAAM,QAAQ,IACZ,IAAI,QAAQ,IAAI,OAAO,UAAU;KAC/B,MAAM,OAAO,KAAK,KAAK,QAAQ,QAAQ,MAAM,KAAK;AAElD,WAAM,GAAG,MAAM,KAAK,QAAQ,KAAK,EAAE,EAAE,WAAW,MAAM,CAAC;AACvD,WAAM,GAAG,UAAU,MAAM,MAAM,QAAQ;MACvC,CACH;AAED,YAAQ,IACN,QAAQ,YACJ,SAAS,QAAQ,UAAU,KAAK,uBAAuB,YAAY,KAAK,GAAG,MAAM,MACjF,4BAA4B,YAAY,KAAK,GAAG,MAAM,IAC3D;;AAGH,QAAK,MAAM,CAAC,MAAM,cAAc,YAAY;AAC1C,QAAI,mBAAmB,CAAC,gBAAgB,KAAK,CAAE;AAC/C,QAAI,WAAW,SAAS,MAAM,UAAU,KAAK,YAAY,EAAE;;AAG7D,UAAO;;EAET,MAAM,cACJ,WACA,MACkB;GAClB,MAAM,MAAM;IACV,GAAG,KAAK,kBAAkB;IAC1B,GAAGA;IACJ;AAED,UAAO,MAAM,kBAAkBA,WAAS,KAAK;AAC7C,QAAK,MAAM,UAAU,QACnB,KAAI,OAAO,MAAM,UAAW,QAAQ,MAAM,OAAO,KAAK,UAAU,KAAK,KAAK,KAAK,IAAK;AAGtF,UAAO;;EAET,MAAM,qBACJ,WACA,MACkC;GAClC,MAAM,MAAM;IACV,GAAG,KAAK,kBAAkB;IAC1B,GAAGA;IACJ;AAED,UAAO,MAAM,kBAAkBA,WAAS,KAAK;AAC7C,QAAK,MAAM,UAAU,QACnB,KAAI,OAAO,KAAK,YAAa,QAAQ,MAAM,OAAO,IAAI,YAAY,KAAK,KAAK,KAAK,IAAK;AAGxF,UAAO;;EAET,MAAM,eACJ,WACA,MACgB;GAChB,MAAM,MAAM;IACV,GAAG,KAAK,kBAAkB;IAC1B,GAAGA;IACJ;AAED,QAAK,MAAM,UAAU,QACnB,KAAI,OAAO,KAAK,MAAO,QAAQ,MAAM,OAAO,IAAI,MAAM,KAAK,KAAK,KAAK,IAAK;AAG5E,UAAO;;EAEV;;AAGH,SAAS,oBAA4B;CACnC,MAAM,qBAAqB;;;;;;AAO3B,QAAO,EACL,cAAc;EACZ,qBAAqB;GACnB,MAAM,QAAkB,EAAE;AAC1B,SAAM,KAAK,IAAI;AACf,SAAM,KAAK,eAAe;AAC1B,QAAK,MAAM,cAAc,KAAK,KAAK,gBAAgB,EAAE;IACnD,IAAI;AACJ,YAAQ,WAAW,MAAnB;KACE,KAAK;AACH,2BAAqB,WAAW;AAChC;KACF,KAAK;AACH,2BAAqB,WAAW,KAAK;AACrC;;AAGJ,QAAI,oBAAoB,sBACtB,OAAM,KAAK,MAAM,GAAG,WAAW,KAAK,IAAI,mBAAmB,IAAI,EAAE,CAAC;;AAGtE,SAAM,KAAK,MAAM;AACjB,SAAM,KAAK,IAAI;AACf,UAAO,MAAM,KAAK,KAAK;;EAEzB,cAAc,SAAS;AACrB,WAAQ,QAAQ,EAAE;AAClB,WAAQ,IAAI,iBAAiB,EAAE;AAC/B,WAAQ,IAAI,aAAa,KAAK,sBAAsB;;EAEvD,EACF"}
@@ -1 +1 @@
1
- {"version":3,"file":"fuma-matter-CHgJa_-B.js","names":["output: Output"],"sources":["../src/utils/fuma-matter.ts"],"sourcesContent":["/**\n * Inspired by https://github.com/jonschlinkert/gray-matter\n */\nimport { load } from 'js-yaml';\n\ninterface Output {\n /**\n * The matter section, including the delimiter.\n */\n matter: string;\n content: string;\n data: unknown;\n}\n\nconst regex = /^---\\r?\\n(.+?)\\r?\\n---\\r?\\n?/s;\n\n/**\n * parse frontmatter, it supports only yaml format\n */\nexport function fumaMatter(input: string): Output {\n const output: Output = { matter: '', data: {}, content: input };\n const match = regex.exec(input);\n if (!match) {\n return output;\n }\n\n // get the raw front-matter block\n output.matter = match[0];\n output.content = input.slice(match[0].length);\n\n const loaded = load(match[1]);\n output.data = loaded ?? {};\n\n return output;\n}\n"],"mappings":";;;;;;AAcA,MAAM,QAAQ;;;;AAKd,SAAgB,WAAW,OAAuB;CAChD,MAAMA,SAAiB;EAAE,QAAQ;EAAI,MAAM,EAAE;EAAE,SAAS;EAAO;CAC/D,MAAM,QAAQ,MAAM,KAAK,MAAM;AAC/B,KAAI,CAAC,MACH,QAAO;AAIT,QAAO,SAAS,MAAM;AACtB,QAAO,UAAU,MAAM,MAAM,MAAM,GAAG,OAAO;AAG7C,QAAO,OADQ,KAAK,MAAM,GAAG,IACL,EAAE;AAE1B,QAAO"}
1
+ {"version":3,"file":"fuma-matter-CHgJa_-B.js","names":[],"sources":["../src/utils/fuma-matter.ts"],"sourcesContent":["/**\n * Inspired by https://github.com/jonschlinkert/gray-matter\n */\nimport { load } from 'js-yaml';\n\ninterface Output {\n /**\n * The matter section, including the delimiter.\n */\n matter: string;\n content: string;\n data: unknown;\n}\n\nconst regex = /^---\\r?\\n(.+?)\\r?\\n---\\r?\\n?/s;\n\n/**\n * parse frontmatter, it supports only yaml format\n */\nexport function fumaMatter(input: string): Output {\n const output: Output = { matter: '', data: {}, content: input };\n const match = regex.exec(input);\n if (!match) {\n return output;\n }\n\n // get the raw front-matter block\n output.matter = match[0];\n output.content = input.slice(match[0].length);\n\n const loaded = load(match[1]);\n output.data = loaded ?? {};\n\n return output;\n}\n"],"mappings":";;;;;;AAcA,MAAM,QAAQ;;;;AAKd,SAAgB,WAAW,OAAuB;CAChD,MAAM,SAAiB;EAAE,QAAQ;EAAI,MAAM,EAAE;EAAE,SAAS;EAAO;CAC/D,MAAM,QAAQ,MAAM,KAAK,MAAM;AAC/B,KAAI,CAAC,MACH,QAAO;AAIT,QAAO,SAAS,MAAM;AACtB,QAAO,UAAU,MAAM,MAAM,MAAM,GAAG,OAAO;AAG7C,QAAO,OADQ,KAAK,MAAM,GAAG,IACL,EAAE;AAE1B,QAAO"}
@@ -1 +1 @@
1
- {"version":3,"file":"index-file-D9HsrWU_.js","names":["dynamicCollections: CollectionItem[]","serverOptions: ServerOptions","typeConfigs: string[]","out: Promise<EmitEntry>[]","metaGlob","partialOptions: CoreOptions","path","infoStr: string[]","collection: DocCollectionItem | undefined"],"sources":["../src/utils/fs-cache.ts","../src/plugins/index-file.ts"],"sourcesContent":["import fs from 'node:fs/promises';\nimport path from 'node:path';\n\nconst map = new Map<string, Promise<string>>();\n\nexport function createFSCache() {\n return {\n read(file: string): Promise<string> {\n const fullPath = toFullPath(file);\n const cached = map.get(fullPath);\n if (cached) return cached;\n\n const read = fs.readFile(fullPath).then((s) => s.toString());\n map.set(fullPath, read);\n return read;\n },\n\n delete(file: string) {\n map.delete(toFullPath(file));\n },\n };\n}\n\n/**\n * make file paths relative to cwd\n */\nfunction toFullPath(file: string) {\n if (path.isAbsolute(file)) {\n return path.relative(process.cwd(), file);\n }\n\n return file;\n}\n","import type { Core, CoreOptions, Plugin, PluginContext } from '@/core';\nimport type { CollectionItem, DocCollectionItem, MetaCollectionItem } from '@/config/build';\nimport path from 'path';\nimport { type CodeGen, createCodegen, ident, slash } from '@/utils/codegen';\nimport { glob } from 'tinyglobby';\nimport { createFSCache } from '@/utils/fs-cache';\nimport { createHash } from 'crypto';\nimport type { LazyEntry } from '@/runtime/dynamic';\nimport type { EmitEntry } from '@/core';\nimport { fumaMatter } from '@/utils/fuma-matter';\nimport type { ServerOptions } from '@/runtime/server';\n\nexport interface IndexFilePluginOptions {\n target?: 'default' | 'vite';\n\n /**\n * add `.js` extensions to imports, needed for ESM without bundler resolution\n */\n addJsExtension?: boolean;\n\n /**\n * Generate entry point for browser\n * @defaultValue true\n */\n browser?: boolean;\n\n /**\n * Generate entry point for dynamic compilation\n * @defaultValue true\n */\n dynamic?: boolean;\n}\n\nexport interface IndexFilePlugin {\n ['index-file']?: {\n generateTypeConfig?: (this: PluginContext) => string | void;\n serverOptions?: (this: PluginContext, options: ServerOptions) => void;\n };\n}\n\ninterface FileGenContext {\n core: Core;\n workspace?: string;\n codegen: CodeGen;\n serverOptions: ServerOptions;\n tc: string;\n}\n\nconst indexFileCache = createFSCache();\n\nexport default function indexFile(options: IndexFilePluginOptions = {}): Plugin {\n const { target = 'default', addJsExtension, browser = true, dynamic = true } = options;\n let dynamicCollections: CollectionItem[];\n\n function isDynamic(collection: CollectionItem) {\n return (\n (collection.type === 'docs' && collection.docs.dynamic) ||\n (collection.type === 'doc' && collection.dynamic)\n );\n }\n\n function generateConfigs(core: Core): {\n serverOptions: ServerOptions;\n tc: string;\n } {\n const serverOptions: ServerOptions = {};\n const typeConfigs: string[] = ['import(\"fumadocs-mdx/runtime/types\").InternalTypeConfig'];\n const ctx = core.getPluginContext();\n\n for (const plugin of core.getPlugins()) {\n const indexFilePlugin = plugin['index-file'];\n if (!indexFilePlugin) continue;\n\n indexFilePlugin.serverOptions?.call(ctx, serverOptions);\n const config = indexFilePlugin.generateTypeConfig?.call(ctx);\n if (config) typeConfigs.push(config);\n }\n\n return {\n serverOptions,\n tc: typeConfigs.join(' & '),\n };\n }\n\n return {\n name: 'index-file',\n config() {\n dynamicCollections = this.core.getCollections().filter(isDynamic);\n },\n configureServer(server) {\n if (!server.watcher) return;\n\n server.watcher.on('all', async (event, file) => {\n indexFileCache.delete(file);\n\n // dynamic collections always require re-generation on change\n if (dynamicCollections.length === 0) {\n // vite uses `import.meta.glob`, no need to re-generate\n if (target === 'vite') return;\n // only re-generate when adding/deleting entries\n if (target === 'default' && event === 'change') return;\n }\n\n const updatedCollection = this.core\n .getCollections()\n .find((collection) => collection.hasFile(file));\n\n if (!updatedCollection) return;\n if (!isDynamic(updatedCollection)) {\n if (target === 'vite') return;\n if (target === 'default' && event === 'change') return;\n }\n\n await this.core.emit({\n filterPlugin: (plugin) => plugin.name === 'index-file',\n filterWorkspace: () => false,\n write: true,\n });\n });\n },\n async emit() {\n const globCache = new Map<string, Promise<string[]>>();\n const { workspace, outDir } = this.core.getOptions();\n const { serverOptions, tc } = generateConfigs(this.core);\n const toEmitEntry = async (\n path: string,\n content: (ctx: FileGenContext) => Promise<void>,\n ): Promise<EmitEntry> => {\n const codegen = createCodegen({\n target,\n outDir: outDir,\n jsExtension: addJsExtension,\n globCache,\n });\n await content({\n core: this.core,\n codegen,\n serverOptions,\n tc,\n workspace: workspace?.name,\n });\n return {\n path,\n content: codegen.toString(),\n };\n };\n\n const out: Promise<EmitEntry>[] = [toEmitEntry('server.ts', generateServerIndexFile)];\n\n if (dynamic) out.push(toEmitEntry('dynamic.ts', generateDynamicIndexFile));\n\n if (browser) out.push(toEmitEntry('browser.ts', generateBrowserIndexFile));\n\n return await Promise.all(out);\n },\n };\n}\n\nasync function generateServerIndexFile(ctx: FileGenContext) {\n const { core, codegen, serverOptions, tc } = ctx;\n codegen.lines.push(\n `import { server } from 'fumadocs-mdx/runtime/server';`,\n `import type * as Config from '${codegen.formatImportPath(core.getOptions().configPath)}';`,\n '',\n `const create = server<typeof Config, ${tc}>(${JSON.stringify(serverOptions)});`,\n );\n\n async function generateCollectionObject(collection: CollectionItem): Promise<string | undefined> {\n const base = getBase(collection);\n\n switch (collection.type) {\n case 'docs': {\n if (collection.docs.dynamic) return;\n\n if (collection.docs.async) {\n const [metaGlob, headGlob, bodyGlob] = await Promise.all([\n generateMetaCollectionGlob(ctx, collection.meta, true),\n generateDocCollectionFrontmatterGlob(ctx, collection.docs, true),\n generateDocCollectionGlob(ctx, collection.docs),\n ]);\n\n return `await create.docsLazy(\"${collection.name}\", \"${base}\", ${metaGlob}, ${headGlob}, ${bodyGlob})`;\n }\n\n const [metaGlob, docGlob] = await Promise.all([\n generateMetaCollectionGlob(ctx, collection.meta, true),\n generateDocCollectionGlob(ctx, collection.docs, true),\n ]);\n\n return `await create.docs(\"${collection.name}\", \"${base}\", ${metaGlob}, ${docGlob})`;\n }\n case 'doc':\n if (collection.dynamic) return;\n\n if (collection.async) {\n const [headGlob, bodyGlob] = await Promise.all([\n generateDocCollectionFrontmatterGlob(ctx, collection, true),\n generateDocCollectionGlob(ctx, collection),\n ]);\n\n return `await create.docLazy(\"${collection.name}\", \"${base}\", ${headGlob}, ${bodyGlob})`;\n }\n\n return `await create.doc(\"${collection.name}\", \"${base}\", ${await generateDocCollectionGlob(\n ctx,\n collection,\n true,\n )})`;\n case 'meta':\n return `await create.meta(\"${collection.name}\", \"${base}\", ${await generateMetaCollectionGlob(\n ctx,\n collection,\n true,\n )})`;\n }\n }\n\n await codegen.pushAsync(\n core.getCollections().map(async (collection) => {\n const obj = await generateCollectionObject(collection);\n if (!obj) return;\n\n return `\\nexport const ${collection.name} = ${obj};`;\n }),\n );\n}\n\nasync function generateDynamicIndexFile(ctx: FileGenContext) {\n const { core, codegen, serverOptions, tc } = ctx;\n const { configPath, environment, outDir } = core.getOptions();\n // serializable config options\n const partialOptions: CoreOptions = {\n configPath,\n environment,\n outDir,\n };\n codegen.lines.push(\n `import { dynamic } from 'fumadocs-mdx/runtime/dynamic';`,\n `import * as Config from '${codegen.formatImportPath(configPath)}';`,\n '',\n `const create = await dynamic<typeof Config, ${tc}>(Config, ${JSON.stringify(partialOptions)}, ${JSON.stringify(serverOptions)});`,\n );\n\n async function generateCollectionObjectEntry(\n collection: DocCollectionItem,\n absolutePath: string,\n ) {\n const fullPath = path.relative(process.cwd(), absolutePath);\n const content = await indexFileCache.read(fullPath).catch(() => '');\n const parsed = fumaMatter(content);\n const data = await core.transformFrontmatter(\n {\n collection,\n filePath: fullPath,\n source: content,\n },\n parsed.data as Record<string, unknown>,\n );\n\n const hash = createHash('md5').update(content).digest('hex');\n const infoStr: string[] = [\n // make sure it's included in vercel/nft\n `absolutePath: path.resolve(${JSON.stringify(fullPath)})`,\n ];\n for (const [k, v] of Object.entries({\n info: {\n fullPath,\n path: path.relative(collection.dir, absolutePath),\n },\n data,\n hash,\n } satisfies LazyEntry)) {\n infoStr.push(`${k}: ${JSON.stringify(v)}`);\n }\n\n return `{ ${infoStr.join(', ')} }`;\n }\n\n async function generateCollectionObject(parent: CollectionItem): Promise<string | undefined> {\n let collection: DocCollectionItem | undefined;\n if (parent.type === 'doc') collection = parent;\n else if (parent.type === 'docs') collection = parent.docs;\n\n if (!collection || !collection.dynamic) return;\n\n const files = await glob(collection.patterns, {\n cwd: collection.dir,\n absolute: true,\n });\n const entries = await Promise.all(\n files.map((file) => generateCollectionObjectEntry(collection, file)),\n );\n\n switch (parent.type) {\n case 'docs': {\n const metaGlob = await generateMetaCollectionGlob(ctx, parent.meta, true);\n\n return `await create.docs(\"${parent.name}\", \"${getBase(parent)}\", ${metaGlob}, ${entries.join(', ')})`;\n }\n case 'doc':\n return `await create.doc(\"${collection.name}\", \"${getBase(collection)}\", ${entries.join(', ')})`;\n }\n }\n\n await codegen.pushAsync(\n core.getCollections().map(async (collection) => {\n const obj = await generateCollectionObject(collection);\n if (!obj) return;\n\n return `\\nexport const ${collection.name} = ${obj};`;\n }),\n );\n}\n\nasync function generateBrowserIndexFile(ctx: FileGenContext) {\n const { core, codegen, tc } = ctx;\n codegen.lines.push(\n `import { browser } from 'fumadocs-mdx/runtime/browser';`,\n `import type * as Config from '${codegen.formatImportPath(core.getOptions().configPath)}';`,\n '',\n `const create = browser<typeof Config, ${tc}>();`,\n );\n\n async function generateCollectionObject(collection: CollectionItem): Promise<string | undefined> {\n switch (collection.type) {\n case 'docs': {\n if (collection.docs.dynamic) return;\n\n return generateCollectionObject(collection.docs);\n }\n case 'doc':\n if (collection.dynamic) return;\n\n return `create.doc(\"${collection.name}\", ${await generateDocCollectionGlob(ctx, collection)})`;\n }\n }\n\n codegen.lines.push('const browserCollections = {');\n\n await codegen.pushAsync(\n core.getCollections().map(async (collection) => {\n const obj = await generateCollectionObject(collection);\n if (!obj) return;\n\n return ident(`${collection.name}: ${obj},`);\n }),\n );\n\n codegen.lines.push('};', 'export default browserCollections;');\n}\n\nfunction getBase(collection: CollectionItem) {\n return slash(path.relative(process.cwd(), collection.dir));\n}\n\nfunction generateDocCollectionFrontmatterGlob(\n { codegen, workspace }: FileGenContext,\n collection: DocCollectionItem,\n eager = false,\n) {\n return codegen.generateGlobImport(collection.patterns, {\n query: {\n collection: collection.name,\n only: 'frontmatter',\n workspace,\n },\n import: 'frontmatter',\n base: collection.dir,\n eager,\n });\n}\n\nfunction generateDocCollectionGlob(\n { codegen, workspace }: FileGenContext,\n collection: DocCollectionItem,\n eager = false,\n) {\n return codegen.generateGlobImport(collection.patterns, {\n query: {\n collection: collection.name,\n workspace,\n },\n base: collection.dir,\n eager,\n });\n}\n\nfunction generateMetaCollectionGlob(\n { codegen, workspace }: FileGenContext,\n collection: MetaCollectionItem,\n eager = false,\n) {\n return codegen.generateGlobImport(collection.patterns, {\n query: {\n collection: collection.name,\n workspace,\n },\n import: 'default',\n base: collection.dir,\n eager,\n });\n}\n"],"mappings":";;;;;;;;;AAGA,MAAM,sBAAM,IAAI,KAA8B;AAE9C,SAAgB,gBAAgB;AAC9B,QAAO;EACL,KAAK,MAA+B;GAClC,MAAM,WAAW,WAAW,KAAK;GACjC,MAAM,SAAS,IAAI,IAAI,SAAS;AAChC,OAAI,OAAQ,QAAO;GAEnB,MAAM,OAAO,GAAG,SAAS,SAAS,CAAC,MAAM,MAAM,EAAE,UAAU,CAAC;AAC5D,OAAI,IAAI,UAAU,KAAK;AACvB,UAAO;;EAGT,OAAO,MAAc;AACnB,OAAI,OAAO,WAAW,KAAK,CAAC;;EAE/B;;;;;AAMH,SAAS,WAAW,MAAc;AAChC,KAAI,KAAK,WAAW,KAAK,CACvB,QAAO,KAAK,SAAS,QAAQ,KAAK,EAAE,KAAK;AAG3C,QAAO;;;;;ACiBT,MAAM,iBAAiB,eAAe;AAEtC,SAAwB,UAAU,UAAkC,EAAE,EAAU;CAC9E,MAAM,EAAE,SAAS,WAAW,gBAAgB,UAAU,MAAM,UAAU,SAAS;CAC/E,IAAIA;CAEJ,SAAS,UAAU,YAA4B;AAC7C,SACG,WAAW,SAAS,UAAU,WAAW,KAAK,WAC9C,WAAW,SAAS,SAAS,WAAW;;CAI7C,SAAS,gBAAgB,MAGvB;EACA,MAAMC,gBAA+B,EAAE;EACvC,MAAMC,cAAwB,CAAC,4DAA0D;EACzF,MAAM,MAAM,KAAK,kBAAkB;AAEnC,OAAK,MAAM,UAAU,KAAK,YAAY,EAAE;GACtC,MAAM,kBAAkB,OAAO;AAC/B,OAAI,CAAC,gBAAiB;AAEtB,mBAAgB,eAAe,KAAK,KAAK,cAAc;GACvD,MAAM,SAAS,gBAAgB,oBAAoB,KAAK,IAAI;AAC5D,OAAI,OAAQ,aAAY,KAAK,OAAO;;AAGtC,SAAO;GACL;GACA,IAAI,YAAY,KAAK,MAAM;GAC5B;;AAGH,QAAO;EACL,MAAM;EACN,SAAS;AACP,wBAAqB,KAAK,KAAK,gBAAgB,CAAC,OAAO,UAAU;;EAEnE,gBAAgB,QAAQ;AACtB,OAAI,CAAC,OAAO,QAAS;AAErB,UAAO,QAAQ,GAAG,OAAO,OAAO,OAAO,SAAS;AAC9C,mBAAe,OAAO,KAAK;AAG3B,QAAI,mBAAmB,WAAW,GAAG;AAEnC,SAAI,WAAW,OAAQ;AAEvB,SAAI,WAAW,aAAa,UAAU,SAAU;;IAGlD,MAAM,oBAAoB,KAAK,KAC5B,gBAAgB,CAChB,MAAM,eAAe,WAAW,QAAQ,KAAK,CAAC;AAEjD,QAAI,CAAC,kBAAmB;AACxB,QAAI,CAAC,UAAU,kBAAkB,EAAE;AACjC,SAAI,WAAW,OAAQ;AACvB,SAAI,WAAW,aAAa,UAAU,SAAU;;AAGlD,UAAM,KAAK,KAAK,KAAK;KACnB,eAAe,WAAW,OAAO,SAAS;KAC1C,uBAAuB;KACvB,OAAO;KACR,CAAC;KACF;;EAEJ,MAAM,OAAO;GACX,MAAM,4BAAY,IAAI,KAAgC;GACtD,MAAM,EAAE,WAAW,WAAW,KAAK,KAAK,YAAY;GACpD,MAAM,EAAE,eAAe,OAAO,gBAAgB,KAAK,KAAK;GACxD,MAAM,cAAc,OAClB,QACA,YACuB;IACvB,MAAM,UAAU,cAAc;KAC5B;KACQ;KACR,aAAa;KACb;KACD,CAAC;AACF,UAAM,QAAQ;KACZ,MAAM,KAAK;KACX;KACA;KACA;KACA,WAAW,WAAW;KACvB,CAAC;AACF,WAAO;KACL;KACA,SAAS,QAAQ,UAAU;KAC5B;;GAGH,MAAMC,MAA4B,CAAC,YAAY,aAAa,wBAAwB,CAAC;AAErF,OAAI,QAAS,KAAI,KAAK,YAAY,cAAc,yBAAyB,CAAC;AAE1E,OAAI,QAAS,KAAI,KAAK,YAAY,cAAc,yBAAyB,CAAC;AAE1E,UAAO,MAAM,QAAQ,IAAI,IAAI;;EAEhC;;AAGH,eAAe,wBAAwB,KAAqB;CAC1D,MAAM,EAAE,MAAM,SAAS,eAAe,OAAO;AAC7C,SAAQ,MAAM,KACZ,yDACA,iCAAiC,QAAQ,iBAAiB,KAAK,YAAY,CAAC,WAAW,CAAC,KACxF,IACA,wCAAwC,GAAG,IAAI,KAAK,UAAU,cAAc,CAAC,IAC9E;CAED,eAAe,yBAAyB,YAAyD;EAC/F,MAAM,OAAO,QAAQ,WAAW;AAEhC,UAAQ,WAAW,MAAnB;GACE,KAAK,QAAQ;AACX,QAAI,WAAW,KAAK,QAAS;AAE7B,QAAI,WAAW,KAAK,OAAO;KACzB,MAAM,CAACC,YAAU,UAAU,YAAY,MAAM,QAAQ,IAAI;MACvD,2BAA2B,KAAK,WAAW,MAAM,KAAK;MACtD,qCAAqC,KAAK,WAAW,MAAM,KAAK;MAChE,0BAA0B,KAAK,WAAW,KAAK;MAChD,CAAC;AAEF,YAAO,0BAA0B,WAAW,KAAK,MAAM,KAAK,KAAKA,WAAS,IAAI,SAAS,IAAI,SAAS;;IAGtG,MAAM,CAAC,UAAU,WAAW,MAAM,QAAQ,IAAI,CAC5C,2BAA2B,KAAK,WAAW,MAAM,KAAK,EACtD,0BAA0B,KAAK,WAAW,MAAM,KAAK,CACtD,CAAC;AAEF,WAAO,sBAAsB,WAAW,KAAK,MAAM,KAAK,KAAK,SAAS,IAAI,QAAQ;;GAEpF,KAAK;AACH,QAAI,WAAW,QAAS;AAExB,QAAI,WAAW,OAAO;KACpB,MAAM,CAAC,UAAU,YAAY,MAAM,QAAQ,IAAI,CAC7C,qCAAqC,KAAK,YAAY,KAAK,EAC3D,0BAA0B,KAAK,WAAW,CAC3C,CAAC;AAEF,YAAO,yBAAyB,WAAW,KAAK,MAAM,KAAK,KAAK,SAAS,IAAI,SAAS;;AAGxF,WAAO,qBAAqB,WAAW,KAAK,MAAM,KAAK,KAAK,MAAM,0BAChE,KACA,YACA,KACD,CAAC;GACJ,KAAK,OACH,QAAO,sBAAsB,WAAW,KAAK,MAAM,KAAK,KAAK,MAAM,2BACjE,KACA,YACA,KACD,CAAC;;;AAIR,OAAM,QAAQ,UACZ,KAAK,gBAAgB,CAAC,IAAI,OAAO,eAAe;EAC9C,MAAM,MAAM,MAAM,yBAAyB,WAAW;AACtD,MAAI,CAAC,IAAK;AAEV,SAAO,kBAAkB,WAAW,KAAK,KAAK,IAAI;GAClD,CACH;;AAGH,eAAe,yBAAyB,KAAqB;CAC3D,MAAM,EAAE,MAAM,SAAS,eAAe,OAAO;CAC7C,MAAM,EAAE,YAAY,aAAa,WAAW,KAAK,YAAY;CAE7D,MAAMC,iBAA8B;EAClC;EACA;EACA;EACD;AACD,SAAQ,MAAM,KACZ,2DACA,4BAA4B,QAAQ,iBAAiB,WAAW,CAAC,KACjE,IACA,+CAA+C,GAAG,YAAY,KAAK,UAAU,eAAe,CAAC,IAAI,KAAK,UAAU,cAAc,CAAC,IAChI;CAED,eAAe,8BACb,YACA,cACA;EACA,MAAM,WAAWC,OAAK,SAAS,QAAQ,KAAK,EAAE,aAAa;EAC3D,MAAM,UAAU,MAAM,eAAe,KAAK,SAAS,CAAC,YAAY,GAAG;EACnE,MAAM,SAAS,WAAW,QAAQ;EAClC,MAAM,OAAO,MAAM,KAAK,qBACtB;GACE;GACA,UAAU;GACV,QAAQ;GACT,EACD,OAAO,KACR;EAED,MAAM,OAAO,WAAW,MAAM,CAAC,OAAO,QAAQ,CAAC,OAAO,MAAM;EAC5D,MAAMC,UAAoB,CAExB,8BAA8B,KAAK,UAAU,SAAS,CAAC,GACxD;AACD,OAAK,MAAM,CAAC,GAAG,MAAM,OAAO,QAAQ;GAClC,MAAM;IACJ;IACA,MAAMD,OAAK,SAAS,WAAW,KAAK,aAAa;IAClD;GACD;GACA;GACD,CAAqB,CACpB,SAAQ,KAAK,GAAG,EAAE,IAAI,KAAK,UAAU,EAAE,GAAG;AAG5C,SAAO,KAAK,QAAQ,KAAK,KAAK,CAAC;;CAGjC,eAAe,yBAAyB,QAAqD;EAC3F,IAAIE;AACJ,MAAI,OAAO,SAAS,MAAO,cAAa;WAC/B,OAAO,SAAS,OAAQ,cAAa,OAAO;AAErD,MAAI,CAAC,cAAc,CAAC,WAAW,QAAS;EAExC,MAAM,QAAQ,MAAM,KAAK,WAAW,UAAU;GAC5C,KAAK,WAAW;GAChB,UAAU;GACX,CAAC;EACF,MAAM,UAAU,MAAM,QAAQ,IAC5B,MAAM,KAAK,SAAS,8BAA8B,YAAY,KAAK,CAAC,CACrE;AAED,UAAQ,OAAO,MAAf;GACE,KAAK,QAAQ;IACX,MAAM,WAAW,MAAM,2BAA2B,KAAK,OAAO,MAAM,KAAK;AAEzE,WAAO,sBAAsB,OAAO,KAAK,MAAM,QAAQ,OAAO,CAAC,KAAK,SAAS,IAAI,QAAQ,KAAK,KAAK,CAAC;;GAEtG,KAAK,MACH,QAAO,qBAAqB,WAAW,KAAK,MAAM,QAAQ,WAAW,CAAC,KAAK,QAAQ,KAAK,KAAK,CAAC;;;AAIpG,OAAM,QAAQ,UACZ,KAAK,gBAAgB,CAAC,IAAI,OAAO,eAAe;EAC9C,MAAM,MAAM,MAAM,yBAAyB,WAAW;AACtD,MAAI,CAAC,IAAK;AAEV,SAAO,kBAAkB,WAAW,KAAK,KAAK,IAAI;GAClD,CACH;;AAGH,eAAe,yBAAyB,KAAqB;CAC3D,MAAM,EAAE,MAAM,SAAS,OAAO;AAC9B,SAAQ,MAAM,KACZ,2DACA,iCAAiC,QAAQ,iBAAiB,KAAK,YAAY,CAAC,WAAW,CAAC,KACxF,IACA,yCAAyC,GAAG,MAC7C;CAED,eAAe,yBAAyB,YAAyD;AAC/F,UAAQ,WAAW,MAAnB;GACE,KAAK;AACH,QAAI,WAAW,KAAK,QAAS;AAE7B,WAAO,yBAAyB,WAAW,KAAK;GAElD,KAAK;AACH,QAAI,WAAW,QAAS;AAExB,WAAO,eAAe,WAAW,KAAK,KAAK,MAAM,0BAA0B,KAAK,WAAW,CAAC;;;AAIlG,SAAQ,MAAM,KAAK,+BAA+B;AAElD,OAAM,QAAQ,UACZ,KAAK,gBAAgB,CAAC,IAAI,OAAO,eAAe;EAC9C,MAAM,MAAM,MAAM,yBAAyB,WAAW;AACtD,MAAI,CAAC,IAAK;AAEV,SAAO,MAAM,GAAG,WAAW,KAAK,IAAI,IAAI,GAAG;GAC3C,CACH;AAED,SAAQ,MAAM,KAAK,MAAM,qCAAqC;;AAGhE,SAAS,QAAQ,YAA4B;AAC3C,QAAO,MAAMF,OAAK,SAAS,QAAQ,KAAK,EAAE,WAAW,IAAI,CAAC;;AAG5D,SAAS,qCACP,EAAE,SAAS,aACX,YACA,QAAQ,OACR;AACA,QAAO,QAAQ,mBAAmB,WAAW,UAAU;EACrD,OAAO;GACL,YAAY,WAAW;GACvB,MAAM;GACN;GACD;EACD,QAAQ;EACR,MAAM,WAAW;EACjB;EACD,CAAC;;AAGJ,SAAS,0BACP,EAAE,SAAS,aACX,YACA,QAAQ,OACR;AACA,QAAO,QAAQ,mBAAmB,WAAW,UAAU;EACrD,OAAO;GACL,YAAY,WAAW;GACvB;GACD;EACD,MAAM,WAAW;EACjB;EACD,CAAC;;AAGJ,SAAS,2BACP,EAAE,SAAS,aACX,YACA,QAAQ,OACR;AACA,QAAO,QAAQ,mBAAmB,WAAW,UAAU;EACrD,OAAO;GACL,YAAY,WAAW;GACvB;GACD;EACD,QAAQ;EACR,MAAM,WAAW;EACjB;EACD,CAAC"}
1
+ {"version":3,"file":"index-file-D9HsrWU_.js","names":["metaGlob","path"],"sources":["../src/utils/fs-cache.ts","../src/plugins/index-file.ts"],"sourcesContent":["import fs from 'node:fs/promises';\nimport path from 'node:path';\n\nconst map = new Map<string, Promise<string>>();\n\nexport function createFSCache() {\n return {\n read(file: string): Promise<string> {\n const fullPath = toFullPath(file);\n const cached = map.get(fullPath);\n if (cached) return cached;\n\n const read = fs.readFile(fullPath).then((s) => s.toString());\n map.set(fullPath, read);\n return read;\n },\n\n delete(file: string) {\n map.delete(toFullPath(file));\n },\n };\n}\n\n/**\n * make file paths relative to cwd\n */\nfunction toFullPath(file: string) {\n if (path.isAbsolute(file)) {\n return path.relative(process.cwd(), file);\n }\n\n return file;\n}\n","import type { Core, CoreOptions, Plugin, PluginContext } from '@/core';\nimport type { CollectionItem, DocCollectionItem, MetaCollectionItem } from '@/config/build';\nimport path from 'path';\nimport { type CodeGen, createCodegen, ident, slash } from '@/utils/codegen';\nimport { glob } from 'tinyglobby';\nimport { createFSCache } from '@/utils/fs-cache';\nimport { createHash } from 'crypto';\nimport type { LazyEntry } from '@/runtime/dynamic';\nimport type { EmitEntry } from '@/core';\nimport { fumaMatter } from '@/utils/fuma-matter';\nimport type { ServerOptions } from '@/runtime/server';\n\nexport interface IndexFilePluginOptions {\n target?: 'default' | 'vite';\n\n /**\n * add `.js` extensions to imports, needed for ESM without bundler resolution\n */\n addJsExtension?: boolean;\n\n /**\n * Generate entry point for browser\n * @defaultValue true\n */\n browser?: boolean;\n\n /**\n * Generate entry point for dynamic compilation\n * @defaultValue true\n */\n dynamic?: boolean;\n}\n\nexport interface IndexFilePlugin {\n ['index-file']?: {\n generateTypeConfig?: (this: PluginContext) => string | void;\n serverOptions?: (this: PluginContext, options: ServerOptions) => void;\n };\n}\n\ninterface FileGenContext {\n core: Core;\n workspace?: string;\n codegen: CodeGen;\n serverOptions: ServerOptions;\n tc: string;\n}\n\nconst indexFileCache = createFSCache();\n\nexport default function indexFile(options: IndexFilePluginOptions = {}): Plugin {\n const { target = 'default', addJsExtension, browser = true, dynamic = true } = options;\n let dynamicCollections: CollectionItem[];\n\n function isDynamic(collection: CollectionItem) {\n return (\n (collection.type === 'docs' && collection.docs.dynamic) ||\n (collection.type === 'doc' && collection.dynamic)\n );\n }\n\n function generateConfigs(core: Core): {\n serverOptions: ServerOptions;\n tc: string;\n } {\n const serverOptions: ServerOptions = {};\n const typeConfigs: string[] = ['import(\"fumadocs-mdx/runtime/types\").InternalTypeConfig'];\n const ctx = core.getPluginContext();\n\n for (const plugin of core.getPlugins()) {\n const indexFilePlugin = plugin['index-file'];\n if (!indexFilePlugin) continue;\n\n indexFilePlugin.serverOptions?.call(ctx, serverOptions);\n const config = indexFilePlugin.generateTypeConfig?.call(ctx);\n if (config) typeConfigs.push(config);\n }\n\n return {\n serverOptions,\n tc: typeConfigs.join(' & '),\n };\n }\n\n return {\n name: 'index-file',\n config() {\n dynamicCollections = this.core.getCollections().filter(isDynamic);\n },\n configureServer(server) {\n if (!server.watcher) return;\n\n server.watcher.on('all', async (event, file) => {\n indexFileCache.delete(file);\n\n // dynamic collections always require re-generation on change\n if (dynamicCollections.length === 0) {\n // vite uses `import.meta.glob`, no need to re-generate\n if (target === 'vite') return;\n // only re-generate when adding/deleting entries\n if (target === 'default' && event === 'change') return;\n }\n\n const updatedCollection = this.core\n .getCollections()\n .find((collection) => collection.hasFile(file));\n\n if (!updatedCollection) return;\n if (!isDynamic(updatedCollection)) {\n if (target === 'vite') return;\n if (target === 'default' && event === 'change') return;\n }\n\n await this.core.emit({\n filterPlugin: (plugin) => plugin.name === 'index-file',\n filterWorkspace: () => false,\n write: true,\n });\n });\n },\n async emit() {\n const globCache = new Map<string, Promise<string[]>>();\n const { workspace, outDir } = this.core.getOptions();\n const { serverOptions, tc } = generateConfigs(this.core);\n const toEmitEntry = async (\n path: string,\n content: (ctx: FileGenContext) => Promise<void>,\n ): Promise<EmitEntry> => {\n const codegen = createCodegen({\n target,\n outDir: outDir,\n jsExtension: addJsExtension,\n globCache,\n });\n await content({\n core: this.core,\n codegen,\n serverOptions,\n tc,\n workspace: workspace?.name,\n });\n return {\n path,\n content: codegen.toString(),\n };\n };\n\n const out: Promise<EmitEntry>[] = [toEmitEntry('server.ts', generateServerIndexFile)];\n\n if (dynamic) out.push(toEmitEntry('dynamic.ts', generateDynamicIndexFile));\n\n if (browser) out.push(toEmitEntry('browser.ts', generateBrowserIndexFile));\n\n return await Promise.all(out);\n },\n };\n}\n\nasync function generateServerIndexFile(ctx: FileGenContext) {\n const { core, codegen, serverOptions, tc } = ctx;\n codegen.lines.push(\n `import { server } from 'fumadocs-mdx/runtime/server';`,\n `import type * as Config from '${codegen.formatImportPath(core.getOptions().configPath)}';`,\n '',\n `const create = server<typeof Config, ${tc}>(${JSON.stringify(serverOptions)});`,\n );\n\n async function generateCollectionObject(collection: CollectionItem): Promise<string | undefined> {\n const base = getBase(collection);\n\n switch (collection.type) {\n case 'docs': {\n if (collection.docs.dynamic) return;\n\n if (collection.docs.async) {\n const [metaGlob, headGlob, bodyGlob] = await Promise.all([\n generateMetaCollectionGlob(ctx, collection.meta, true),\n generateDocCollectionFrontmatterGlob(ctx, collection.docs, true),\n generateDocCollectionGlob(ctx, collection.docs),\n ]);\n\n return `await create.docsLazy(\"${collection.name}\", \"${base}\", ${metaGlob}, ${headGlob}, ${bodyGlob})`;\n }\n\n const [metaGlob, docGlob] = await Promise.all([\n generateMetaCollectionGlob(ctx, collection.meta, true),\n generateDocCollectionGlob(ctx, collection.docs, true),\n ]);\n\n return `await create.docs(\"${collection.name}\", \"${base}\", ${metaGlob}, ${docGlob})`;\n }\n case 'doc':\n if (collection.dynamic) return;\n\n if (collection.async) {\n const [headGlob, bodyGlob] = await Promise.all([\n generateDocCollectionFrontmatterGlob(ctx, collection, true),\n generateDocCollectionGlob(ctx, collection),\n ]);\n\n return `await create.docLazy(\"${collection.name}\", \"${base}\", ${headGlob}, ${bodyGlob})`;\n }\n\n return `await create.doc(\"${collection.name}\", \"${base}\", ${await generateDocCollectionGlob(\n ctx,\n collection,\n true,\n )})`;\n case 'meta':\n return `await create.meta(\"${collection.name}\", \"${base}\", ${await generateMetaCollectionGlob(\n ctx,\n collection,\n true,\n )})`;\n }\n }\n\n await codegen.pushAsync(\n core.getCollections().map(async (collection) => {\n const obj = await generateCollectionObject(collection);\n if (!obj) return;\n\n return `\\nexport const ${collection.name} = ${obj};`;\n }),\n );\n}\n\nasync function generateDynamicIndexFile(ctx: FileGenContext) {\n const { core, codegen, serverOptions, tc } = ctx;\n const { configPath, environment, outDir } = core.getOptions();\n // serializable config options\n const partialOptions: CoreOptions = {\n configPath,\n environment,\n outDir,\n };\n codegen.lines.push(\n `import { dynamic } from 'fumadocs-mdx/runtime/dynamic';`,\n `import * as Config from '${codegen.formatImportPath(configPath)}';`,\n '',\n `const create = await dynamic<typeof Config, ${tc}>(Config, ${JSON.stringify(partialOptions)}, ${JSON.stringify(serverOptions)});`,\n );\n\n async function generateCollectionObjectEntry(\n collection: DocCollectionItem,\n absolutePath: string,\n ) {\n const fullPath = path.relative(process.cwd(), absolutePath);\n const content = await indexFileCache.read(fullPath).catch(() => '');\n const parsed = fumaMatter(content);\n const data = await core.transformFrontmatter(\n {\n collection,\n filePath: fullPath,\n source: content,\n },\n parsed.data as Record<string, unknown>,\n );\n\n const hash = createHash('md5').update(content).digest('hex');\n const infoStr: string[] = [\n // make sure it's included in vercel/nft\n `absolutePath: path.resolve(${JSON.stringify(fullPath)})`,\n ];\n for (const [k, v] of Object.entries({\n info: {\n fullPath,\n path: path.relative(collection.dir, absolutePath),\n },\n data,\n hash,\n } satisfies LazyEntry)) {\n infoStr.push(`${k}: ${JSON.stringify(v)}`);\n }\n\n return `{ ${infoStr.join(', ')} }`;\n }\n\n async function generateCollectionObject(parent: CollectionItem): Promise<string | undefined> {\n let collection: DocCollectionItem | undefined;\n if (parent.type === 'doc') collection = parent;\n else if (parent.type === 'docs') collection = parent.docs;\n\n if (!collection || !collection.dynamic) return;\n\n const files = await glob(collection.patterns, {\n cwd: collection.dir,\n absolute: true,\n });\n const entries = await Promise.all(\n files.map((file) => generateCollectionObjectEntry(collection, file)),\n );\n\n switch (parent.type) {\n case 'docs': {\n const metaGlob = await generateMetaCollectionGlob(ctx, parent.meta, true);\n\n return `await create.docs(\"${parent.name}\", \"${getBase(parent)}\", ${metaGlob}, ${entries.join(', ')})`;\n }\n case 'doc':\n return `await create.doc(\"${collection.name}\", \"${getBase(collection)}\", ${entries.join(', ')})`;\n }\n }\n\n await codegen.pushAsync(\n core.getCollections().map(async (collection) => {\n const obj = await generateCollectionObject(collection);\n if (!obj) return;\n\n return `\\nexport const ${collection.name} = ${obj};`;\n }),\n );\n}\n\nasync function generateBrowserIndexFile(ctx: FileGenContext) {\n const { core, codegen, tc } = ctx;\n codegen.lines.push(\n `import { browser } from 'fumadocs-mdx/runtime/browser';`,\n `import type * as Config from '${codegen.formatImportPath(core.getOptions().configPath)}';`,\n '',\n `const create = browser<typeof Config, ${tc}>();`,\n );\n\n async function generateCollectionObject(collection: CollectionItem): Promise<string | undefined> {\n switch (collection.type) {\n case 'docs': {\n if (collection.docs.dynamic) return;\n\n return generateCollectionObject(collection.docs);\n }\n case 'doc':\n if (collection.dynamic) return;\n\n return `create.doc(\"${collection.name}\", ${await generateDocCollectionGlob(ctx, collection)})`;\n }\n }\n\n codegen.lines.push('const browserCollections = {');\n\n await codegen.pushAsync(\n core.getCollections().map(async (collection) => {\n const obj = await generateCollectionObject(collection);\n if (!obj) return;\n\n return ident(`${collection.name}: ${obj},`);\n }),\n );\n\n codegen.lines.push('};', 'export default browserCollections;');\n}\n\nfunction getBase(collection: CollectionItem) {\n return slash(path.relative(process.cwd(), collection.dir));\n}\n\nfunction generateDocCollectionFrontmatterGlob(\n { codegen, workspace }: FileGenContext,\n collection: DocCollectionItem,\n eager = false,\n) {\n return codegen.generateGlobImport(collection.patterns, {\n query: {\n collection: collection.name,\n only: 'frontmatter',\n workspace,\n },\n import: 'frontmatter',\n base: collection.dir,\n eager,\n });\n}\n\nfunction generateDocCollectionGlob(\n { codegen, workspace }: FileGenContext,\n collection: DocCollectionItem,\n eager = false,\n) {\n return codegen.generateGlobImport(collection.patterns, {\n query: {\n collection: collection.name,\n workspace,\n },\n base: collection.dir,\n eager,\n });\n}\n\nfunction generateMetaCollectionGlob(\n { codegen, workspace }: FileGenContext,\n collection: MetaCollectionItem,\n eager = false,\n) {\n return codegen.generateGlobImport(collection.patterns, {\n query: {\n collection: collection.name,\n workspace,\n },\n import: 'default',\n base: collection.dir,\n eager,\n });\n}\n"],"mappings":";;;;;;;;;AAGA,MAAM,sBAAM,IAAI,KAA8B;AAE9C,SAAgB,gBAAgB;AAC9B,QAAO;EACL,KAAK,MAA+B;GAClC,MAAM,WAAW,WAAW,KAAK;GACjC,MAAM,SAAS,IAAI,IAAI,SAAS;AAChC,OAAI,OAAQ,QAAO;GAEnB,MAAM,OAAO,GAAG,SAAS,SAAS,CAAC,MAAM,MAAM,EAAE,UAAU,CAAC;AAC5D,OAAI,IAAI,UAAU,KAAK;AACvB,UAAO;;EAGT,OAAO,MAAc;AACnB,OAAI,OAAO,WAAW,KAAK,CAAC;;EAE/B;;;;;AAMH,SAAS,WAAW,MAAc;AAChC,KAAI,KAAK,WAAW,KAAK,CACvB,QAAO,KAAK,SAAS,QAAQ,KAAK,EAAE,KAAK;AAG3C,QAAO;;;;;ACiBT,MAAM,iBAAiB,eAAe;AAEtC,SAAwB,UAAU,UAAkC,EAAE,EAAU;CAC9E,MAAM,EAAE,SAAS,WAAW,gBAAgB,UAAU,MAAM,UAAU,SAAS;CAC/E,IAAI;CAEJ,SAAS,UAAU,YAA4B;AAC7C,SACG,WAAW,SAAS,UAAU,WAAW,KAAK,WAC9C,WAAW,SAAS,SAAS,WAAW;;CAI7C,SAAS,gBAAgB,MAGvB;EACA,MAAM,gBAA+B,EAAE;EACvC,MAAM,cAAwB,CAAC,4DAA0D;EACzF,MAAM,MAAM,KAAK,kBAAkB;AAEnC,OAAK,MAAM,UAAU,KAAK,YAAY,EAAE;GACtC,MAAM,kBAAkB,OAAO;AAC/B,OAAI,CAAC,gBAAiB;AAEtB,mBAAgB,eAAe,KAAK,KAAK,cAAc;GACvD,MAAM,SAAS,gBAAgB,oBAAoB,KAAK,IAAI;AAC5D,OAAI,OAAQ,aAAY,KAAK,OAAO;;AAGtC,SAAO;GACL;GACA,IAAI,YAAY,KAAK,MAAM;GAC5B;;AAGH,QAAO;EACL,MAAM;EACN,SAAS;AACP,wBAAqB,KAAK,KAAK,gBAAgB,CAAC,OAAO,UAAU;;EAEnE,gBAAgB,QAAQ;AACtB,OAAI,CAAC,OAAO,QAAS;AAErB,UAAO,QAAQ,GAAG,OAAO,OAAO,OAAO,SAAS;AAC9C,mBAAe,OAAO,KAAK;AAG3B,QAAI,mBAAmB,WAAW,GAAG;AAEnC,SAAI,WAAW,OAAQ;AAEvB,SAAI,WAAW,aAAa,UAAU,SAAU;;IAGlD,MAAM,oBAAoB,KAAK,KAC5B,gBAAgB,CAChB,MAAM,eAAe,WAAW,QAAQ,KAAK,CAAC;AAEjD,QAAI,CAAC,kBAAmB;AACxB,QAAI,CAAC,UAAU,kBAAkB,EAAE;AACjC,SAAI,WAAW,OAAQ;AACvB,SAAI,WAAW,aAAa,UAAU,SAAU;;AAGlD,UAAM,KAAK,KAAK,KAAK;KACnB,eAAe,WAAW,OAAO,SAAS;KAC1C,uBAAuB;KACvB,OAAO;KACR,CAAC;KACF;;EAEJ,MAAM,OAAO;GACX,MAAM,4BAAY,IAAI,KAAgC;GACtD,MAAM,EAAE,WAAW,WAAW,KAAK,KAAK,YAAY;GACpD,MAAM,EAAE,eAAe,OAAO,gBAAgB,KAAK,KAAK;GACxD,MAAM,cAAc,OAClB,QACA,YACuB;IACvB,MAAM,UAAU,cAAc;KAC5B;KACQ;KACR,aAAa;KACb;KACD,CAAC;AACF,UAAM,QAAQ;KACZ,MAAM,KAAK;KACX;KACA;KACA;KACA,WAAW,WAAW;KACvB,CAAC;AACF,WAAO;KACL;KACA,SAAS,QAAQ,UAAU;KAC5B;;GAGH,MAAM,MAA4B,CAAC,YAAY,aAAa,wBAAwB,CAAC;AAErF,OAAI,QAAS,KAAI,KAAK,YAAY,cAAc,yBAAyB,CAAC;AAE1E,OAAI,QAAS,KAAI,KAAK,YAAY,cAAc,yBAAyB,CAAC;AAE1E,UAAO,MAAM,QAAQ,IAAI,IAAI;;EAEhC;;AAGH,eAAe,wBAAwB,KAAqB;CAC1D,MAAM,EAAE,MAAM,SAAS,eAAe,OAAO;AAC7C,SAAQ,MAAM,KACZ,yDACA,iCAAiC,QAAQ,iBAAiB,KAAK,YAAY,CAAC,WAAW,CAAC,KACxF,IACA,wCAAwC,GAAG,IAAI,KAAK,UAAU,cAAc,CAAC,IAC9E;CAED,eAAe,yBAAyB,YAAyD;EAC/F,MAAM,OAAO,QAAQ,WAAW;AAEhC,UAAQ,WAAW,MAAnB;GACE,KAAK,QAAQ;AACX,QAAI,WAAW,KAAK,QAAS;AAE7B,QAAI,WAAW,KAAK,OAAO;KACzB,MAAM,CAACA,YAAU,UAAU,YAAY,MAAM,QAAQ,IAAI;MACvD,2BAA2B,KAAK,WAAW,MAAM,KAAK;MACtD,qCAAqC,KAAK,WAAW,MAAM,KAAK;MAChE,0BAA0B,KAAK,WAAW,KAAK;MAChD,CAAC;AAEF,YAAO,0BAA0B,WAAW,KAAK,MAAM,KAAK,KAAKA,WAAS,IAAI,SAAS,IAAI,SAAS;;IAGtG,MAAM,CAAC,UAAU,WAAW,MAAM,QAAQ,IAAI,CAC5C,2BAA2B,KAAK,WAAW,MAAM,KAAK,EACtD,0BAA0B,KAAK,WAAW,MAAM,KAAK,CACtD,CAAC;AAEF,WAAO,sBAAsB,WAAW,KAAK,MAAM,KAAK,KAAK,SAAS,IAAI,QAAQ;;GAEpF,KAAK;AACH,QAAI,WAAW,QAAS;AAExB,QAAI,WAAW,OAAO;KACpB,MAAM,CAAC,UAAU,YAAY,MAAM,QAAQ,IAAI,CAC7C,qCAAqC,KAAK,YAAY,KAAK,EAC3D,0BAA0B,KAAK,WAAW,CAC3C,CAAC;AAEF,YAAO,yBAAyB,WAAW,KAAK,MAAM,KAAK,KAAK,SAAS,IAAI,SAAS;;AAGxF,WAAO,qBAAqB,WAAW,KAAK,MAAM,KAAK,KAAK,MAAM,0BAChE,KACA,YACA,KACD,CAAC;GACJ,KAAK,OACH,QAAO,sBAAsB,WAAW,KAAK,MAAM,KAAK,KAAK,MAAM,2BACjE,KACA,YACA,KACD,CAAC;;;AAIR,OAAM,QAAQ,UACZ,KAAK,gBAAgB,CAAC,IAAI,OAAO,eAAe;EAC9C,MAAM,MAAM,MAAM,yBAAyB,WAAW;AACtD,MAAI,CAAC,IAAK;AAEV,SAAO,kBAAkB,WAAW,KAAK,KAAK,IAAI;GAClD,CACH;;AAGH,eAAe,yBAAyB,KAAqB;CAC3D,MAAM,EAAE,MAAM,SAAS,eAAe,OAAO;CAC7C,MAAM,EAAE,YAAY,aAAa,WAAW,KAAK,YAAY;CAE7D,MAAM,iBAA8B;EAClC;EACA;EACA;EACD;AACD,SAAQ,MAAM,KACZ,2DACA,4BAA4B,QAAQ,iBAAiB,WAAW,CAAC,KACjE,IACA,+CAA+C,GAAG,YAAY,KAAK,UAAU,eAAe,CAAC,IAAI,KAAK,UAAU,cAAc,CAAC,IAChI;CAED,eAAe,8BACb,YACA,cACA;EACA,MAAM,WAAWC,OAAK,SAAS,QAAQ,KAAK,EAAE,aAAa;EAC3D,MAAM,UAAU,MAAM,eAAe,KAAK,SAAS,CAAC,YAAY,GAAG;EACnE,MAAM,SAAS,WAAW,QAAQ;EAClC,MAAM,OAAO,MAAM,KAAK,qBACtB;GACE;GACA,UAAU;GACV,QAAQ;GACT,EACD,OAAO,KACR;EAED,MAAM,OAAO,WAAW,MAAM,CAAC,OAAO,QAAQ,CAAC,OAAO,MAAM;EAC5D,MAAM,UAAoB,CAExB,8BAA8B,KAAK,UAAU,SAAS,CAAC,GACxD;AACD,OAAK,MAAM,CAAC,GAAG,MAAM,OAAO,QAAQ;GAClC,MAAM;IACJ;IACA,MAAMA,OAAK,SAAS,WAAW,KAAK,aAAa;IAClD;GACD;GACA;GACD,CAAqB,CACpB,SAAQ,KAAK,GAAG,EAAE,IAAI,KAAK,UAAU,EAAE,GAAG;AAG5C,SAAO,KAAK,QAAQ,KAAK,KAAK,CAAC;;CAGjC,eAAe,yBAAyB,QAAqD;EAC3F,IAAI;AACJ,MAAI,OAAO,SAAS,MAAO,cAAa;WAC/B,OAAO,SAAS,OAAQ,cAAa,OAAO;AAErD,MAAI,CAAC,cAAc,CAAC,WAAW,QAAS;EAExC,MAAM,QAAQ,MAAM,KAAK,WAAW,UAAU;GAC5C,KAAK,WAAW;GAChB,UAAU;GACX,CAAC;EACF,MAAM,UAAU,MAAM,QAAQ,IAC5B,MAAM,KAAK,SAAS,8BAA8B,YAAY,KAAK,CAAC,CACrE;AAED,UAAQ,OAAO,MAAf;GACE,KAAK,QAAQ;IACX,MAAM,WAAW,MAAM,2BAA2B,KAAK,OAAO,MAAM,KAAK;AAEzE,WAAO,sBAAsB,OAAO,KAAK,MAAM,QAAQ,OAAO,CAAC,KAAK,SAAS,IAAI,QAAQ,KAAK,KAAK,CAAC;;GAEtG,KAAK,MACH,QAAO,qBAAqB,WAAW,KAAK,MAAM,QAAQ,WAAW,CAAC,KAAK,QAAQ,KAAK,KAAK,CAAC;;;AAIpG,OAAM,QAAQ,UACZ,KAAK,gBAAgB,CAAC,IAAI,OAAO,eAAe;EAC9C,MAAM,MAAM,MAAM,yBAAyB,WAAW;AACtD,MAAI,CAAC,IAAK;AAEV,SAAO,kBAAkB,WAAW,KAAK,KAAK,IAAI;GAClD,CACH;;AAGH,eAAe,yBAAyB,KAAqB;CAC3D,MAAM,EAAE,MAAM,SAAS,OAAO;AAC9B,SAAQ,MAAM,KACZ,2DACA,iCAAiC,QAAQ,iBAAiB,KAAK,YAAY,CAAC,WAAW,CAAC,KACxF,IACA,yCAAyC,GAAG,MAC7C;CAED,eAAe,yBAAyB,YAAyD;AAC/F,UAAQ,WAAW,MAAnB;GACE,KAAK;AACH,QAAI,WAAW,KAAK,QAAS;AAE7B,WAAO,yBAAyB,WAAW,KAAK;GAElD,KAAK;AACH,QAAI,WAAW,QAAS;AAExB,WAAO,eAAe,WAAW,KAAK,KAAK,MAAM,0BAA0B,KAAK,WAAW,CAAC;;;AAIlG,SAAQ,MAAM,KAAK,+BAA+B;AAElD,OAAM,QAAQ,UACZ,KAAK,gBAAgB,CAAC,IAAI,OAAO,eAAe;EAC9C,MAAM,MAAM,MAAM,yBAAyB,WAAW;AACtD,MAAI,CAAC,IAAK;AAEV,SAAO,MAAM,GAAG,WAAW,KAAK,IAAI,IAAI,GAAG;GAC3C,CACH;AAED,SAAQ,MAAM,KAAK,MAAM,qCAAqC;;AAGhE,SAAS,QAAQ,YAA4B;AAC3C,QAAO,MAAMA,OAAK,SAAS,QAAQ,KAAK,EAAE,WAAW,IAAI,CAAC;;AAG5D,SAAS,qCACP,EAAE,SAAS,aACX,YACA,QAAQ,OACR;AACA,QAAO,QAAQ,mBAAmB,WAAW,UAAU;EACrD,OAAO;GACL,YAAY,WAAW;GACvB,MAAM;GACN;GACD;EACD,QAAQ;EACR,MAAM,WAAW;EACjB;EACD,CAAC;;AAGJ,SAAS,0BACP,EAAE,SAAS,aACX,YACA,QAAQ,OACR;AACA,QAAO,QAAQ,mBAAmB,WAAW,UAAU;EACrD,OAAO;GACL,YAAY,WAAW;GACvB;GACD;EACD,MAAM,WAAW;EACjB;EACD,CAAC;;AAGJ,SAAS,2BACP,EAAE,SAAS,aACX,YACA,QAAQ,OACR;AACA,QAAO,QAAQ,mBAAmB,WAAW,UAAU;EACrD,OAAO;GACL,YAAY,WAAW;GACvB;GACD;EACD,QAAQ;EACR,MAAM,WAAW;EACjB;EACD,CAAC"}
@@ -6,11 +6,11 @@ import path from "node:path";
6
6
  import { createHash } from "node:crypto";
7
7
 
8
8
  //#region src/loaders/mdx/index.ts
9
- const querySchema = z.object({
9
+ const querySchema = z.looseObject({
10
10
  only: z.literal(["frontmatter", "all"]).default("all"),
11
11
  collection: z.string().optional(),
12
12
  workspace: z.string().optional()
13
- }).loose();
13
+ });
14
14
  const cacheEntry = z.object({
15
15
  code: z.string(),
16
16
  map: z.any().optional(),
@@ -88,4 +88,4 @@ function countLines(s) {
88
88
 
89
89
  //#endregion
90
90
  export { createMdxLoader as t };
91
- //# sourceMappingURL=mdx-DMZ9tsAa.js.map
91
+ //# sourceMappingURL=mdx-CRT-jSh5.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"mdx-CRT-jSh5.js","names":[],"sources":["../src/loaders/mdx/index.ts"],"sourcesContent":["import { fumaMatter } from '@/utils/fuma-matter';\nimport type { Loader } from '@/loaders/adapter';\nimport { z } from 'zod';\nimport type { DocCollectionItem } from '@/config/build';\nimport fs from 'node:fs/promises';\nimport path from 'node:path';\nimport { createHash } from 'node:crypto';\nimport type { ConfigLoader } from '@/loaders/config';\nimport { mdxLoaderGlob } from '..';\n\nconst querySchema = z.looseObject({\n only: z.literal(['frontmatter', 'all']).default('all'),\n collection: z.string().optional(),\n workspace: z.string().optional(),\n});\n\nconst cacheEntry = z.object({\n code: z.string(),\n map: z.any().optional(),\n hash: z.string().optional(),\n});\n\ntype CacheEntry = z.infer<typeof cacheEntry>;\n\nexport function createMdxLoader({ getCore }: ConfigLoader): Loader {\n return {\n test: mdxLoaderGlob,\n async load({ getSource, development: isDevelopment, query, compiler, filePath }) {\n let core = await getCore();\n const value = await getSource();\n const matter = fumaMatter(value);\n const { collection: collectionName, workspace, only } = querySchema.parse(query);\n if (workspace) {\n core = core.getWorkspaces().get(workspace) ?? core;\n }\n\n let after: (() => Promise<void>) | undefined;\n\n const { experimentalBuildCache = false } = core.getConfig().global;\n if (!isDevelopment && experimentalBuildCache) {\n const cacheDir = experimentalBuildCache;\n const cacheKey = `${collectionName ?? 'global'}_${generateCacheHash(filePath)}`;\n\n const cached = await fs\n .readFile(path.join(cacheDir, cacheKey))\n .then((content) => cacheEntry.parse(JSON.parse(content.toString())))\n .catch(() => null);\n\n if (cached && cached.hash === generateCacheHash(value)) return cached;\n after = async () => {\n await fs.mkdir(cacheDir, { recursive: true });\n await fs.writeFile(\n path.join(cacheDir, cacheKey),\n JSON.stringify({\n ...out,\n hash: generateCacheHash(value),\n } satisfies CacheEntry),\n );\n };\n }\n\n const collection = collectionName ? core.getCollection(collectionName) : undefined;\n\n let docCollection: DocCollectionItem | undefined;\n switch (collection?.type) {\n case 'doc':\n docCollection = collection;\n break;\n case 'docs':\n docCollection = collection.docs;\n break;\n }\n\n if (docCollection) {\n matter.data = await core.transformFrontmatter(\n { collection: docCollection, filePath, source: value },\n matter.data as Record<string, unknown>,\n );\n }\n\n if (only === 'frontmatter') {\n return {\n code: `export const frontmatter = ${JSON.stringify(matter.data)}`,\n map: null,\n };\n }\n\n const { buildMDX } = await import('@/loaders/mdx/build-mdx');\n const compiled = await buildMDX(core, docCollection, {\n isDevelopment,\n // ensure the line number is correct in errors\n source: '\\n'.repeat(countLines(matter.matter)) + matter.content,\n filePath,\n frontmatter: matter.data as Record<string, unknown>,\n _compiler: compiler,\n environment: 'bundler',\n });\n\n const out = {\n code: String(compiled.value),\n map: compiled.map,\n };\n\n await after?.();\n return out;\n },\n };\n}\n\nfunction generateCacheHash(input: string): string {\n return createHash('md5').update(input).digest('hex');\n}\n\nfunction countLines(s: string) {\n let num = 0;\n\n for (const c of s) {\n if (c === '\\n') num++;\n }\n\n return num;\n}\n"],"mappings":";;;;;;;;AAUA,MAAM,cAAc,EAAE,YAAY;CAChC,MAAM,EAAE,QAAQ,CAAC,eAAe,MAAM,CAAC,CAAC,QAAQ,MAAM;CACtD,YAAY,EAAE,QAAQ,CAAC,UAAU;CACjC,WAAW,EAAE,QAAQ,CAAC,UAAU;CACjC,CAAC;AAEF,MAAM,aAAa,EAAE,OAAO;CAC1B,MAAM,EAAE,QAAQ;CAChB,KAAK,EAAE,KAAK,CAAC,UAAU;CACvB,MAAM,EAAE,QAAQ,CAAC,UAAU;CAC5B,CAAC;AAIF,SAAgB,gBAAgB,EAAE,WAAiC;AACjE,QAAO;EACL,MAAM;EACN,MAAM,KAAK,EAAE,WAAW,aAAa,eAAe,OAAO,UAAU,YAAY;GAC/E,IAAI,OAAO,MAAM,SAAS;GAC1B,MAAM,QAAQ,MAAM,WAAW;GAC/B,MAAM,SAAS,WAAW,MAAM;GAChC,MAAM,EAAE,YAAY,gBAAgB,WAAW,SAAS,YAAY,MAAM,MAAM;AAChF,OAAI,UACF,QAAO,KAAK,eAAe,CAAC,IAAI,UAAU,IAAI;GAGhD,IAAI;GAEJ,MAAM,EAAE,yBAAyB,UAAU,KAAK,WAAW,CAAC;AAC5D,OAAI,CAAC,iBAAiB,wBAAwB;IAC5C,MAAM,WAAW;IACjB,MAAM,WAAW,GAAG,kBAAkB,SAAS,GAAG,kBAAkB,SAAS;IAE7E,MAAM,SAAS,MAAM,GAClB,SAAS,KAAK,KAAK,UAAU,SAAS,CAAC,CACvC,MAAM,YAAY,WAAW,MAAM,KAAK,MAAM,QAAQ,UAAU,CAAC,CAAC,CAAC,CACnE,YAAY,KAAK;AAEpB,QAAI,UAAU,OAAO,SAAS,kBAAkB,MAAM,CAAE,QAAO;AAC/D,YAAQ,YAAY;AAClB,WAAM,GAAG,MAAM,UAAU,EAAE,WAAW,MAAM,CAAC;AAC7C,WAAM,GAAG,UACP,KAAK,KAAK,UAAU,SAAS,EAC7B,KAAK,UAAU;MACb,GAAG;MACH,MAAM,kBAAkB,MAAM;MAC/B,CAAsB,CACxB;;;GAIL,MAAM,aAAa,iBAAiB,KAAK,cAAc,eAAe,GAAG;GAEzE,IAAI;AACJ,WAAQ,YAAY,MAApB;IACE,KAAK;AACH,qBAAgB;AAChB;IACF,KAAK;AACH,qBAAgB,WAAW;AAC3B;;AAGJ,OAAI,cACF,QAAO,OAAO,MAAM,KAAK,qBACvB;IAAE,YAAY;IAAe;IAAU,QAAQ;IAAO,EACtD,OAAO,KACR;AAGH,OAAI,SAAS,cACX,QAAO;IACL,MAAM,8BAA8B,KAAK,UAAU,OAAO,KAAK;IAC/D,KAAK;IACN;GAGH,MAAM,EAAE,aAAa,MAAM,OAAO;GAClC,MAAM,WAAW,MAAM,SAAS,MAAM,eAAe;IACnD;IAEA,QAAQ,KAAK,OAAO,WAAW,OAAO,OAAO,CAAC,GAAG,OAAO;IACxD;IACA,aAAa,OAAO;IACpB,WAAW;IACX,aAAa;IACd,CAAC;GAEF,MAAM,MAAM;IACV,MAAM,OAAO,SAAS,MAAM;IAC5B,KAAK,SAAS;IACf;AAED,SAAM,SAAS;AACf,UAAO;;EAEV;;AAGH,SAAS,kBAAkB,OAAuB;AAChD,QAAO,WAAW,MAAM,CAAC,OAAO,MAAM,CAAC,OAAO,MAAM;;AAGtD,SAAS,WAAW,GAAW;CAC7B,IAAI,MAAM;AAEV,MAAK,MAAM,KAAK,EACd,KAAI,MAAM,KAAM;AAGlB,QAAO"}
@@ -3,10 +3,10 @@ import { load } from "js-yaml";
3
3
  import { z } from "zod";
4
4
 
5
5
  //#region src/loaders/meta.ts
6
- const querySchema = z.object({
6
+ const querySchema = z.looseObject({
7
7
  collection: z.string().optional(),
8
8
  workspace: z.string().optional()
9
- }).loose();
9
+ });
10
10
  /**
11
11
  * load meta files, fallback to bundler's built-in plugins when ?collection is unspecified.
12
12
  */
@@ -78,4 +78,4 @@ function createMetaLoader({ getCore }, resolve = {}) {
78
78
 
79
79
  //#endregion
80
80
  export { createMetaLoader as t };
81
- //# sourceMappingURL=meta-DyieTM4Z.js.map
81
+ //# sourceMappingURL=meta-BKBx8Gab.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"meta-BKBx8Gab.js","names":[],"sources":["../src/loaders/meta.ts"],"sourcesContent":["import type { Loader, LoaderInput } from '@/loaders/adapter';\nimport type { ConfigLoader } from '@/loaders/config';\nimport { load } from 'js-yaml';\nimport { z } from 'zod';\nimport { metaLoaderGlob } from '.';\nimport type { MetaCollectionItem } from '@/config/build';\n\nconst querySchema = z.looseObject({\n collection: z.string().optional(),\n workspace: z.string().optional(),\n});\n\n/**\n * load meta files, fallback to bundler's built-in plugins when ?collection is unspecified.\n */\nexport function createMetaLoader(\n { getCore }: ConfigLoader,\n resolve: {\n json?: 'json' | 'js';\n yaml?: 'js';\n } = {},\n): Loader {\n const { json: resolveJson = 'js' } = resolve;\n\n function parse(filePath: string, source: string) {\n try {\n if (filePath.endsWith('.json')) return JSON.parse(source);\n if (filePath.endsWith('.yaml')) return load(source);\n } catch (e) {\n throw new Error(`invalid data in ${filePath}`, { cause: e });\n }\n\n throw new Error('Unknown file type ' + filePath);\n }\n\n function onMeta(source: string, { filePath, query }: LoaderInput) {\n const parsed = querySchema.safeParse(query);\n if (!parsed.success || !parsed.data.collection) return null;\n const { collection: collectionName, workspace } = parsed.data;\n\n return async (): Promise<unknown> => {\n let core = await getCore();\n if (workspace) {\n core = core.getWorkspaces().get(workspace) ?? core;\n }\n\n const collection = core.getCollection(collectionName);\n let metaCollection: MetaCollectionItem | undefined;\n\n switch (collection?.type) {\n case 'meta':\n metaCollection = collection;\n break;\n case 'docs':\n metaCollection = collection.meta;\n break;\n }\n\n const data = parse(filePath, source);\n\n if (!metaCollection) return data;\n return core.transformMeta(\n {\n collection: metaCollection,\n filePath,\n source,\n },\n data,\n );\n };\n }\n\n return {\n test: metaLoaderGlob,\n async load(input) {\n const result = onMeta(await input.getSource(), input);\n if (result === null) return null;\n const data = await result();\n\n if (input.filePath.endsWith('.json')) {\n return {\n moduleType: resolveJson,\n code:\n resolveJson === 'json'\n ? JSON.stringify(data)\n : `export default ${JSON.stringify(data)}`,\n };\n } else {\n return {\n moduleType: 'js',\n code: `export default ${JSON.stringify(data)}`,\n };\n }\n },\n bun: {\n load(source, input) {\n const result = onMeta(source, input);\n if (result === null)\n return {\n loader: 'object',\n exports: parse(input.filePath, source),\n };\n\n return result().then((data) => ({\n loader: 'object',\n exports: { default: data },\n }));\n },\n },\n };\n}\n"],"mappings":";;;;;AAOA,MAAM,cAAc,EAAE,YAAY;CAChC,YAAY,EAAE,QAAQ,CAAC,UAAU;CACjC,WAAW,EAAE,QAAQ,CAAC,UAAU;CACjC,CAAC;;;;AAKF,SAAgB,iBACd,EAAE,WACF,UAGI,EAAE,EACE;CACR,MAAM,EAAE,MAAM,cAAc,SAAS;CAErC,SAAS,MAAM,UAAkB,QAAgB;AAC/C,MAAI;AACF,OAAI,SAAS,SAAS,QAAQ,CAAE,QAAO,KAAK,MAAM,OAAO;AACzD,OAAI,SAAS,SAAS,QAAQ,CAAE,QAAO,KAAK,OAAO;WAC5C,GAAG;AACV,SAAM,IAAI,MAAM,mBAAmB,YAAY,EAAE,OAAO,GAAG,CAAC;;AAG9D,QAAM,IAAI,MAAM,uBAAuB,SAAS;;CAGlD,SAAS,OAAO,QAAgB,EAAE,UAAU,SAAsB;EAChE,MAAM,SAAS,YAAY,UAAU,MAAM;AAC3C,MAAI,CAAC,OAAO,WAAW,CAAC,OAAO,KAAK,WAAY,QAAO;EACvD,MAAM,EAAE,YAAY,gBAAgB,cAAc,OAAO;AAEzD,SAAO,YAA8B;GACnC,IAAI,OAAO,MAAM,SAAS;AAC1B,OAAI,UACF,QAAO,KAAK,eAAe,CAAC,IAAI,UAAU,IAAI;GAGhD,MAAM,aAAa,KAAK,cAAc,eAAe;GACrD,IAAI;AAEJ,WAAQ,YAAY,MAApB;IACE,KAAK;AACH,sBAAiB;AACjB;IACF,KAAK;AACH,sBAAiB,WAAW;AAC5B;;GAGJ,MAAM,OAAO,MAAM,UAAU,OAAO;AAEpC,OAAI,CAAC,eAAgB,QAAO;AAC5B,UAAO,KAAK,cACV;IACE,YAAY;IACZ;IACA;IACD,EACD,KACD;;;AAIL,QAAO;EACL,MAAM;EACN,MAAM,KAAK,OAAO;GAChB,MAAM,SAAS,OAAO,MAAM,MAAM,WAAW,EAAE,MAAM;AACrD,OAAI,WAAW,KAAM,QAAO;GAC5B,MAAM,OAAO,MAAM,QAAQ;AAE3B,OAAI,MAAM,SAAS,SAAS,QAAQ,CAClC,QAAO;IACL,YAAY;IACZ,MACE,gBAAgB,SACZ,KAAK,UAAU,KAAK,GACpB,kBAAkB,KAAK,UAAU,KAAK;IAC7C;OAED,QAAO;IACL,YAAY;IACZ,MAAM,kBAAkB,KAAK,UAAU,KAAK;IAC7C;;EAGL,KAAK,EACH,KAAK,QAAQ,OAAO;GAClB,MAAM,SAAS,OAAO,QAAQ,MAAM;AACpC,OAAI,WAAW,KACb,QAAO;IACL,QAAQ;IACR,SAAS,MAAM,MAAM,UAAU,OAAO;IACvC;AAEH,UAAO,QAAQ,CAAC,MAAM,UAAU;IAC9B,QAAQ;IACR,SAAS,EAAE,SAAS,MAAM;IAC3B,EAAE;KAEN;EACF"}
@@ -1 +1 @@
1
- {"version":3,"file":"index.js","names":["loaderOptions: WebpackLoaderOptions","path","turbopack: TurbopackOptions"],"sources":["../../src/next/index.ts"],"sourcesContent":["import type { NextConfig } from 'next';\nimport type { Configuration } from 'webpack';\nimport type { WebpackLoaderOptions } from '@/webpack';\nimport type { TurbopackLoaderOptions, TurbopackOptions } from 'next/dist/server/config-shared';\nimport * as path from 'node:path';\nimport { loadConfig } from '@/config/load-from-file';\nimport { _Defaults, type Core, createCore } from '@/core';\nimport { mdxLoaderGlob, metaLoaderGlob } from '@/loaders';\nimport type { IndexFilePluginOptions } from '@/plugins/index-file';\nimport indexFile from '@/plugins/index-file';\n\nexport interface CreateMDXOptions {\n /**\n * Path to source configuration file\n */\n configPath?: string;\n\n /**\n * Directory for output files\n *\n * @defaultValue '.source'\n */\n outDir?: string;\n\n index?: IndexFilePluginOptions | false;\n}\n\nconst defaultPageExtensions = ['mdx', 'md', 'jsx', 'js', 'tsx', 'ts'];\n\nexport function createMDX(createOptions: CreateMDXOptions = {}) {\n const core = createNextCore(applyDefaults(createOptions));\n const isDev = process.env.NODE_ENV === 'development';\n\n if (process.env._FUMADOCS_MDX !== '1') {\n process.env._FUMADOCS_MDX = '1';\n\n void init(isDev, core);\n }\n\n return (nextConfig: NextConfig = {}): NextConfig => {\n const { configPath, outDir } = core.getOptions();\n const loaderOptions: WebpackLoaderOptions = {\n configPath,\n outDir,\n absoluteCompiledConfigPath: path.resolve(core.getCompiledConfigPath()),\n isDev,\n };\n\n const turbopack: TurbopackOptions = {\n ...nextConfig.turbopack,\n rules: {\n ...nextConfig.turbopack?.rules,\n '*.{md,mdx}': {\n loaders: [\n {\n loader: 'fumadocs-mdx/loader-mdx',\n options: loaderOptions as unknown as TurbopackLoaderOptions,\n },\n ],\n as: '*.js',\n },\n '*.json': {\n loaders: [\n {\n loader: 'fumadocs-mdx/loader-meta',\n options: loaderOptions as unknown as TurbopackLoaderOptions,\n },\n ],\n as: '*.json',\n },\n '*.yaml': {\n loaders: [\n {\n loader: 'fumadocs-mdx/loader-meta',\n options: loaderOptions as unknown as TurbopackLoaderOptions,\n },\n ],\n as: '*.js',\n },\n },\n };\n\n return {\n ...nextConfig,\n turbopack,\n pageExtensions: nextConfig.pageExtensions ?? defaultPageExtensions,\n webpack: (config: Configuration, options) => {\n config.resolve ||= {};\n\n config.module ||= {};\n config.module.rules ||= [];\n\n config.module.rules.push(\n {\n test: mdxLoaderGlob,\n use: [\n options.defaultLoaders.babel,\n {\n loader: 'fumadocs-mdx/loader-mdx',\n options: loaderOptions,\n },\n ],\n },\n {\n test: metaLoaderGlob,\n enforce: 'pre',\n use: [\n {\n loader: 'fumadocs-mdx/loader-meta',\n options: loaderOptions,\n },\n ],\n },\n );\n\n config.plugins ||= [];\n\n return nextConfig.webpack?.(config, options) ?? config;\n },\n };\n };\n}\n\nasync function init(dev: boolean, core: Core): Promise<void> {\n async function initOrReload() {\n await core.init({\n config: loadConfig(core, true),\n });\n await core.emit({ write: true });\n }\n\n async function devServer() {\n const { FSWatcher } = await import('chokidar');\n const { configPath, outDir } = core.getOptions();\n const watcher = new FSWatcher({\n ignoreInitial: true,\n persistent: true,\n ignored: [outDir],\n });\n\n watcher.add(configPath);\n for (const collection of core.getCollections()) {\n watcher.add(collection.dir);\n }\n for (const workspace of core.getWorkspaces().values()) {\n for (const collection of workspace.getCollections()) {\n watcher.add(collection.dir);\n }\n }\n\n watcher.on('ready', () => {\n console.log('[MDX] started dev server');\n });\n\n const absoluteConfigPath = path.resolve(configPath);\n watcher.on('all', async (_event, file) => {\n if (path.resolve(file) === absoluteConfigPath) {\n // skip plugin listeners\n watcher.removeAllListeners();\n\n await watcher.close();\n await initOrReload();\n console.log('[MDX] restarting dev server');\n await devServer();\n }\n });\n\n process.on('exit', () => {\n if (watcher.closed) return;\n\n console.log('[MDX] closing dev server');\n void watcher.close();\n });\n\n await core.initServer({ watcher });\n }\n\n await initOrReload();\n if (dev) {\n await devServer();\n }\n}\n\nexport async function postInstall(options: CreateMDXOptions) {\n const core = createNextCore(applyDefaults(options));\n await core.init({\n config: loadConfig(core, true),\n });\n await core.emit({ write: true });\n}\n\nfunction applyDefaults(options: CreateMDXOptions): Required<CreateMDXOptions> {\n return {\n index: {},\n outDir: options.outDir ?? _Defaults.outDir,\n configPath: options.configPath ?? _Defaults.configPath,\n };\n}\n\nfunction createNextCore(options: Required<CreateMDXOptions>): Core {\n return createCore({\n environment: 'next',\n outDir: options.outDir,\n configPath: options.configPath,\n plugins: [options.index && indexFile(options.index)],\n });\n}\n"],"mappings":";;;;;;;;;;;AA2BA,MAAM,wBAAwB;CAAC;CAAO;CAAM;CAAO;CAAM;CAAO;CAAK;AAErE,SAAgB,UAAU,gBAAkC,EAAE,EAAE;CAC9D,MAAM,OAAO,eAAe,cAAc,cAAc,CAAC;CACzD,MAAM,QAAQ,QAAQ,IAAI,aAAa;AAEvC,KAAI,QAAQ,IAAI,kBAAkB,KAAK;AACrC,UAAQ,IAAI,gBAAgB;AAE5B,EAAK,KAAK,OAAO,KAAK;;AAGxB,SAAQ,aAAyB,EAAE,KAAiB;EAClD,MAAM,EAAE,YAAY,WAAW,KAAK,YAAY;EAChD,MAAMA,gBAAsC;GAC1C;GACA;GACA,4BAA4BC,OAAK,QAAQ,KAAK,uBAAuB,CAAC;GACtE;GACD;EAED,MAAMC,YAA8B;GAClC,GAAG,WAAW;GACd,OAAO;IACL,GAAG,WAAW,WAAW;IACzB,cAAc;KACZ,SAAS,CACP;MACE,QAAQ;MACR,SAAS;MACV,CACF;KACD,IAAI;KACL;IACD,UAAU;KACR,SAAS,CACP;MACE,QAAQ;MACR,SAAS;MACV,CACF;KACD,IAAI;KACL;IACD,UAAU;KACR,SAAS,CACP;MACE,QAAQ;MACR,SAAS;MACV,CACF;KACD,IAAI;KACL;IACF;GACF;AAED,SAAO;GACL,GAAG;GACH;GACA,gBAAgB,WAAW,kBAAkB;GAC7C,UAAU,QAAuB,YAAY;AAC3C,WAAO,YAAY,EAAE;AAErB,WAAO,WAAW,EAAE;AACpB,WAAO,OAAO,UAAU,EAAE;AAE1B,WAAO,OAAO,MAAM,KAClB;KACE,MAAM;KACN,KAAK,CACH,QAAQ,eAAe,OACvB;MACE,QAAQ;MACR,SAAS;MACV,CACF;KACF,EACD;KACE,MAAM;KACN,SAAS;KACT,KAAK,CACH;MACE,QAAQ;MACR,SAAS;MACV,CACF;KACF,CACF;AAED,WAAO,YAAY,EAAE;AAErB,WAAO,WAAW,UAAU,QAAQ,QAAQ,IAAI;;GAEnD;;;AAIL,eAAe,KAAK,KAAc,MAA2B;CAC3D,eAAe,eAAe;AAC5B,QAAM,KAAK,KAAK,EACd,QAAQ,WAAW,MAAM,KAAK,EAC/B,CAAC;AACF,QAAM,KAAK,KAAK,EAAE,OAAO,MAAM,CAAC;;CAGlC,eAAe,YAAY;EACzB,MAAM,EAAE,cAAc,MAAM,OAAO;EACnC,MAAM,EAAE,YAAY,WAAW,KAAK,YAAY;EAChD,MAAM,UAAU,IAAI,UAAU;GAC5B,eAAe;GACf,YAAY;GACZ,SAAS,CAAC,OAAO;GAClB,CAAC;AAEF,UAAQ,IAAI,WAAW;AACvB,OAAK,MAAM,cAAc,KAAK,gBAAgB,CAC5C,SAAQ,IAAI,WAAW,IAAI;AAE7B,OAAK,MAAM,aAAa,KAAK,eAAe,CAAC,QAAQ,CACnD,MAAK,MAAM,cAAc,UAAU,gBAAgB,CACjD,SAAQ,IAAI,WAAW,IAAI;AAI/B,UAAQ,GAAG,eAAe;AACxB,WAAQ,IAAI,2BAA2B;IACvC;EAEF,MAAM,qBAAqBD,OAAK,QAAQ,WAAW;AACnD,UAAQ,GAAG,OAAO,OAAO,QAAQ,SAAS;AACxC,OAAIA,OAAK,QAAQ,KAAK,KAAK,oBAAoB;AAE7C,YAAQ,oBAAoB;AAE5B,UAAM,QAAQ,OAAO;AACrB,UAAM,cAAc;AACpB,YAAQ,IAAI,8BAA8B;AAC1C,UAAM,WAAW;;IAEnB;AAEF,UAAQ,GAAG,cAAc;AACvB,OAAI,QAAQ,OAAQ;AAEpB,WAAQ,IAAI,2BAA2B;AACvC,GAAK,QAAQ,OAAO;IACpB;AAEF,QAAM,KAAK,WAAW,EAAE,SAAS,CAAC;;AAGpC,OAAM,cAAc;AACpB,KAAI,IACF,OAAM,WAAW;;AAIrB,eAAsB,YAAY,SAA2B;CAC3D,MAAM,OAAO,eAAe,cAAc,QAAQ,CAAC;AACnD,OAAM,KAAK,KAAK,EACd,QAAQ,WAAW,MAAM,KAAK,EAC/B,CAAC;AACF,OAAM,KAAK,KAAK,EAAE,OAAO,MAAM,CAAC;;AAGlC,SAAS,cAAc,SAAuD;AAC5E,QAAO;EACL,OAAO,EAAE;EACT,QAAQ,QAAQ,UAAU,UAAU;EACpC,YAAY,QAAQ,cAAc,UAAU;EAC7C;;AAGH,SAAS,eAAe,SAA2C;AACjE,QAAO,WAAW;EAChB,aAAa;EACb,QAAQ,QAAQ;EAChB,YAAY,QAAQ;EACpB,SAAS,CAAC,QAAQ,SAAS,UAAU,QAAQ,MAAM,CAAC;EACrD,CAAC"}
1
+ {"version":3,"file":"index.js","names":["path"],"sources":["../../src/next/index.ts"],"sourcesContent":["import type { NextConfig } from 'next';\nimport type { Configuration } from 'webpack';\nimport type { WebpackLoaderOptions } from '@/webpack';\nimport type { TurbopackLoaderOptions, TurbopackOptions } from 'next/dist/server/config-shared';\nimport * as path from 'node:path';\nimport { loadConfig } from '@/config/load-from-file';\nimport { _Defaults, type Core, createCore } from '@/core';\nimport { mdxLoaderGlob, metaLoaderGlob } from '@/loaders';\nimport type { IndexFilePluginOptions } from '@/plugins/index-file';\nimport indexFile from '@/plugins/index-file';\n\nexport interface CreateMDXOptions {\n /**\n * Path to source configuration file\n */\n configPath?: string;\n\n /**\n * Directory for output files\n *\n * @defaultValue '.source'\n */\n outDir?: string;\n\n index?: IndexFilePluginOptions | false;\n}\n\nconst defaultPageExtensions = ['mdx', 'md', 'jsx', 'js', 'tsx', 'ts'];\n\nexport function createMDX(createOptions: CreateMDXOptions = {}) {\n const core = createNextCore(applyDefaults(createOptions));\n const isDev = process.env.NODE_ENV === 'development';\n\n if (process.env._FUMADOCS_MDX !== '1') {\n process.env._FUMADOCS_MDX = '1';\n\n void init(isDev, core);\n }\n\n return (nextConfig: NextConfig = {}): NextConfig => {\n const { configPath, outDir } = core.getOptions();\n const loaderOptions: WebpackLoaderOptions = {\n configPath,\n outDir,\n absoluteCompiledConfigPath: path.resolve(core.getCompiledConfigPath()),\n isDev,\n };\n\n const turbopack: TurbopackOptions = {\n ...nextConfig.turbopack,\n rules: {\n ...nextConfig.turbopack?.rules,\n '*.{md,mdx}': {\n loaders: [\n {\n loader: 'fumadocs-mdx/loader-mdx',\n options: loaderOptions as unknown as TurbopackLoaderOptions,\n },\n ],\n as: '*.js',\n },\n '*.json': {\n loaders: [\n {\n loader: 'fumadocs-mdx/loader-meta',\n options: loaderOptions as unknown as TurbopackLoaderOptions,\n },\n ],\n as: '*.json',\n },\n '*.yaml': {\n loaders: [\n {\n loader: 'fumadocs-mdx/loader-meta',\n options: loaderOptions as unknown as TurbopackLoaderOptions,\n },\n ],\n as: '*.js',\n },\n },\n };\n\n return {\n ...nextConfig,\n turbopack,\n pageExtensions: nextConfig.pageExtensions ?? defaultPageExtensions,\n webpack: (config: Configuration, options) => {\n config.resolve ||= {};\n\n config.module ||= {};\n config.module.rules ||= [];\n\n config.module.rules.push(\n {\n test: mdxLoaderGlob,\n use: [\n options.defaultLoaders.babel,\n {\n loader: 'fumadocs-mdx/loader-mdx',\n options: loaderOptions,\n },\n ],\n },\n {\n test: metaLoaderGlob,\n enforce: 'pre',\n use: [\n {\n loader: 'fumadocs-mdx/loader-meta',\n options: loaderOptions,\n },\n ],\n },\n );\n\n config.plugins ||= [];\n\n return nextConfig.webpack?.(config, options) ?? config;\n },\n };\n };\n}\n\nasync function init(dev: boolean, core: Core): Promise<void> {\n async function initOrReload() {\n await core.init({\n config: loadConfig(core, true),\n });\n await core.emit({ write: true });\n }\n\n async function devServer() {\n const { FSWatcher } = await import('chokidar');\n const { configPath, outDir } = core.getOptions();\n const watcher = new FSWatcher({\n ignoreInitial: true,\n persistent: true,\n ignored: [outDir],\n });\n\n watcher.add(configPath);\n for (const collection of core.getCollections()) {\n watcher.add(collection.dir);\n }\n for (const workspace of core.getWorkspaces().values()) {\n for (const collection of workspace.getCollections()) {\n watcher.add(collection.dir);\n }\n }\n\n watcher.on('ready', () => {\n console.log('[MDX] started dev server');\n });\n\n const absoluteConfigPath = path.resolve(configPath);\n watcher.on('all', async (_event, file) => {\n if (path.resolve(file) === absoluteConfigPath) {\n // skip plugin listeners\n watcher.removeAllListeners();\n\n await watcher.close();\n await initOrReload();\n console.log('[MDX] restarting dev server');\n await devServer();\n }\n });\n\n process.on('exit', () => {\n if (watcher.closed) return;\n\n console.log('[MDX] closing dev server');\n void watcher.close();\n });\n\n await core.initServer({ watcher });\n }\n\n await initOrReload();\n if (dev) {\n await devServer();\n }\n}\n\nexport async function postInstall(options: CreateMDXOptions) {\n const core = createNextCore(applyDefaults(options));\n await core.init({\n config: loadConfig(core, true),\n });\n await core.emit({ write: true });\n}\n\nfunction applyDefaults(options: CreateMDXOptions): Required<CreateMDXOptions> {\n return {\n index: {},\n outDir: options.outDir ?? _Defaults.outDir,\n configPath: options.configPath ?? _Defaults.configPath,\n };\n}\n\nfunction createNextCore(options: Required<CreateMDXOptions>): Core {\n return createCore({\n environment: 'next',\n outDir: options.outDir,\n configPath: options.configPath,\n plugins: [options.index && indexFile(options.index)],\n });\n}\n"],"mappings":";;;;;;;;;;;AA2BA,MAAM,wBAAwB;CAAC;CAAO;CAAM;CAAO;CAAM;CAAO;CAAK;AAErE,SAAgB,UAAU,gBAAkC,EAAE,EAAE;CAC9D,MAAM,OAAO,eAAe,cAAc,cAAc,CAAC;CACzD,MAAM,QAAQ,QAAQ,IAAI,aAAa;AAEvC,KAAI,QAAQ,IAAI,kBAAkB,KAAK;AACrC,UAAQ,IAAI,gBAAgB;AAE5B,EAAK,KAAK,OAAO,KAAK;;AAGxB,SAAQ,aAAyB,EAAE,KAAiB;EAClD,MAAM,EAAE,YAAY,WAAW,KAAK,YAAY;EAChD,MAAM,gBAAsC;GAC1C;GACA;GACA,4BAA4BA,OAAK,QAAQ,KAAK,uBAAuB,CAAC;GACtE;GACD;EAED,MAAM,YAA8B;GAClC,GAAG,WAAW;GACd,OAAO;IACL,GAAG,WAAW,WAAW;IACzB,cAAc;KACZ,SAAS,CACP;MACE,QAAQ;MACR,SAAS;MACV,CACF;KACD,IAAI;KACL;IACD,UAAU;KACR,SAAS,CACP;MACE,QAAQ;MACR,SAAS;MACV,CACF;KACD,IAAI;KACL;IACD,UAAU;KACR,SAAS,CACP;MACE,QAAQ;MACR,SAAS;MACV,CACF;KACD,IAAI;KACL;IACF;GACF;AAED,SAAO;GACL,GAAG;GACH;GACA,gBAAgB,WAAW,kBAAkB;GAC7C,UAAU,QAAuB,YAAY;AAC3C,WAAO,YAAY,EAAE;AAErB,WAAO,WAAW,EAAE;AACpB,WAAO,OAAO,UAAU,EAAE;AAE1B,WAAO,OAAO,MAAM,KAClB;KACE,MAAM;KACN,KAAK,CACH,QAAQ,eAAe,OACvB;MACE,QAAQ;MACR,SAAS;MACV,CACF;KACF,EACD;KACE,MAAM;KACN,SAAS;KACT,KAAK,CACH;MACE,QAAQ;MACR,SAAS;MACV,CACF;KACF,CACF;AAED,WAAO,YAAY,EAAE;AAErB,WAAO,WAAW,UAAU,QAAQ,QAAQ,IAAI;;GAEnD;;;AAIL,eAAe,KAAK,KAAc,MAA2B;CAC3D,eAAe,eAAe;AAC5B,QAAM,KAAK,KAAK,EACd,QAAQ,WAAW,MAAM,KAAK,EAC/B,CAAC;AACF,QAAM,KAAK,KAAK,EAAE,OAAO,MAAM,CAAC;;CAGlC,eAAe,YAAY;EACzB,MAAM,EAAE,cAAc,MAAM,OAAO;EACnC,MAAM,EAAE,YAAY,WAAW,KAAK,YAAY;EAChD,MAAM,UAAU,IAAI,UAAU;GAC5B,eAAe;GACf,YAAY;GACZ,SAAS,CAAC,OAAO;GAClB,CAAC;AAEF,UAAQ,IAAI,WAAW;AACvB,OAAK,MAAM,cAAc,KAAK,gBAAgB,CAC5C,SAAQ,IAAI,WAAW,IAAI;AAE7B,OAAK,MAAM,aAAa,KAAK,eAAe,CAAC,QAAQ,CACnD,MAAK,MAAM,cAAc,UAAU,gBAAgB,CACjD,SAAQ,IAAI,WAAW,IAAI;AAI/B,UAAQ,GAAG,eAAe;AACxB,WAAQ,IAAI,2BAA2B;IACvC;EAEF,MAAM,qBAAqBA,OAAK,QAAQ,WAAW;AACnD,UAAQ,GAAG,OAAO,OAAO,QAAQ,SAAS;AACxC,OAAIA,OAAK,QAAQ,KAAK,KAAK,oBAAoB;AAE7C,YAAQ,oBAAoB;AAE5B,UAAM,QAAQ,OAAO;AACrB,UAAM,cAAc;AACpB,YAAQ,IAAI,8BAA8B;AAC1C,UAAM,WAAW;;IAEnB;AAEF,UAAQ,GAAG,cAAc;AACvB,OAAI,QAAQ,OAAQ;AAEpB,WAAQ,IAAI,2BAA2B;AACvC,GAAK,QAAQ,OAAO;IACpB;AAEF,QAAM,KAAK,WAAW,EAAE,SAAS,CAAC;;AAGpC,OAAM,cAAc;AACpB,KAAI,IACF,OAAM,WAAW;;AAIrB,eAAsB,YAAY,SAA2B;CAC3D,MAAM,OAAO,eAAe,cAAc,QAAQ,CAAC;AACnD,OAAM,KAAK,KAAK,EACd,QAAQ,WAAW,MAAM,KAAK,EAC/B,CAAC;AACF,OAAM,KAAK,KAAK,EAAE,OAAO,MAAM,CAAC;;AAGlC,SAAS,cAAc,SAAuD;AAC5E,QAAO;EACL,OAAO,EAAE;EACT,QAAQ,QAAQ,UAAU,UAAU;EACpC,YAAY,QAAQ,cAAc,UAAU;EAC7C;;AAGH,SAAS,eAAe,SAA2C;AACjE,QAAO,WAAW;EAChB,aAAa;EACb,QAAQ,QAAQ;EAChB,YAAY,QAAQ;EACpB,SAAS,CAAC,QAAQ,SAAS,UAAU,QAAQ,MAAM,CAAC;EACrD,CAAC"}
@@ -1,9 +1,9 @@
1
1
  import "../fuma-matter-CHgJa_-B.js";
2
- import { t as createMdxLoader } from "../mdx-DMZ9tsAa.js";
2
+ import { t as createMdxLoader } from "../mdx-CRT-jSh5.js";
3
3
  import { n as createCore, t as _Defaults } from "../core-DjldE3H9.js";
4
4
  import "../codegen-DleOVLNr.js";
5
- import { n as toNode, o as createStandaloneConfigLoader } from "../adapter-DG-viEbG.js";
6
- import { t as createMetaLoader } from "../meta-DyieTM4Z.js";
5
+ import { n as toNode, o as createStandaloneConfigLoader } from "../adapter-DI4cexsC.js";
6
+ import { t as createMetaLoader } from "../meta-BKBx8Gab.js";
7
7
 
8
8
  //#region src/node/loader.ts
9
9
  const configLoader = createStandaloneConfigLoader({
@@ -1 +1 @@
1
- {"version":3,"file":"loader.js","names":["load: LoadHook"],"sources":["../../src/node/loader.ts"],"sourcesContent":["import { _Defaults, createCore } from '@/core';\nimport { createMdxLoader } from '@/loaders/mdx';\nimport { toNode } from '@/loaders/adapter';\nimport { createStandaloneConfigLoader } from '@/loaders/config';\nimport type { LoadHook } from 'node:module';\nimport { createMetaLoader } from '@/loaders/meta';\n\nconst core = createCore({\n environment: 'node',\n configPath: _Defaults.configPath,\n outDir: _Defaults.outDir,\n});\n\nconst configLoader = createStandaloneConfigLoader({\n core,\n buildConfig: true,\n mode: 'production',\n});\n\nconst mdxLoader = toNode(createMdxLoader(configLoader));\nconst metaLoader = toNode(createMetaLoader(configLoader));\n\nexport const load: LoadHook = (url, context, nextLoad) => {\n return mdxLoader(url, context, (v, ctx) => metaLoader(v, { ...context, ...ctx }, nextLoad));\n};\n"],"mappings":";;;;;;;;AAaA,MAAM,eAAe,6BAA6B;CAChD,MAPW,WAAW;EACtB,aAAa;EACb,YAAY,UAAU;EACtB,QAAQ,UAAU;EACnB,CAAC;CAIA,aAAa;CACb,MAAM;CACP,CAAC;AAEF,MAAM,YAAY,OAAO,gBAAgB,aAAa,CAAC;AACvD,MAAM,aAAa,OAAO,iBAAiB,aAAa,CAAC;AAEzD,MAAaA,QAAkB,KAAK,SAAS,aAAa;AACxD,QAAO,UAAU,KAAK,UAAU,GAAG,QAAQ,WAAW,GAAG;EAAE,GAAG;EAAS,GAAG;EAAK,EAAE,SAAS,CAAC"}
1
+ {"version":3,"file":"loader.js","names":[],"sources":["../../src/node/loader.ts"],"sourcesContent":["import { _Defaults, createCore } from '@/core';\nimport { createMdxLoader } from '@/loaders/mdx';\nimport { toNode } from '@/loaders/adapter';\nimport { createStandaloneConfigLoader } from '@/loaders/config';\nimport type { LoadHook } from 'node:module';\nimport { createMetaLoader } from '@/loaders/meta';\n\nconst core = createCore({\n environment: 'node',\n configPath: _Defaults.configPath,\n outDir: _Defaults.outDir,\n});\n\nconst configLoader = createStandaloneConfigLoader({\n core,\n buildConfig: true,\n mode: 'production',\n});\n\nconst mdxLoader = toNode(createMdxLoader(configLoader));\nconst metaLoader = toNode(createMetaLoader(configLoader));\n\nexport const load: LoadHook = (url, context, nextLoad) => {\n return mdxLoader(url, context, (v, ctx) => metaLoader(v, { ...context, ...ctx }, nextLoad));\n};\n"],"mappings":";;;;;;;;AAaA,MAAM,eAAe,6BAA6B;CAChD,MAPW,WAAW;EACtB,aAAa;EACb,YAAY,UAAU;EACtB,QAAQ,UAAU;EACnB,CAAC;CAIA,aAAa;CACb,MAAM;CACP,CAAC;AAEF,MAAM,YAAY,OAAO,gBAAgB,aAAa,CAAC;AACvD,MAAM,aAAa,OAAO,iBAAiB,aAAa,CAAC;AAEzD,MAAa,QAAkB,KAAK,SAAS,aAAa;AACxD,QAAO,UAAU,KAAK,UAAU,GAAG,QAAQ,WAAW,GAAG;EAAE,GAAG;EAAS,GAAG;EAAK,EAAE,SAAS,CAAC"}
@@ -1 +1 @@
1
- {"version":3,"file":"json-schema.js","names":["parent: DocsCollectionItem | undefined","match: MetaCollectionItem | undefined","obj: object","files: EmitEntry[]"],"sources":["../../src/plugins/json-schema.ts"],"sourcesContent":["import type { EmitEntry, Plugin } from '@/core';\nimport type { DocsCollectionItem, MetaCollectionItem } from '@/config/build';\nimport { z } from 'zod';\nimport fs from 'node:fs/promises';\nimport path from 'node:path';\n\nexport interface JSONSchemaOptions {\n /**\n * insert `$schema` field to JSON files on creation.\n *\n * @defaultValue false\n */\n insert?: boolean;\n}\n\n/**\n * Generate JSON schemas locally for collection schemas\n *\n * note: **it only works with Zod**\n */\nexport default function jsonSchema({ insert = false }: JSONSchemaOptions = {}): Plugin {\n function getSchemaPath(name: string) {\n return `json-schema/${name}.json`;\n }\n\n return {\n configureServer(server) {\n const { outDir } = this.core.getOptions();\n if (!server.watcher || !insert) return;\n\n server.watcher.on('add', async (file) => {\n let parent: DocsCollectionItem | undefined;\n let match: MetaCollectionItem | undefined;\n for (const collection of this.core.getCollections()) {\n if (collection.type === 'meta' && collection.hasFile(file)) {\n match = collection;\n break;\n }\n if (collection.type === 'docs' && collection.meta.hasFile(file)) {\n parent = collection;\n match = collection.meta;\n break;\n }\n }\n\n if (!match) return;\n let obj: object;\n try {\n const content = (await fs.readFile(file)).toString();\n obj = content.length > 0 ? JSON.parse(content) : {};\n } catch {\n return;\n }\n\n if ('$schema' in obj) return;\n const schemaPath = path.join(\n outDir,\n getSchemaPath(parent ? `${parent.name}.meta` : match.name),\n );\n const updated = {\n $schema: path.relative(path.dirname(file), schemaPath),\n ...obj,\n };\n\n await fs.writeFile(file, JSON.stringify(updated, null, 2));\n });\n },\n emit() {\n const files: EmitEntry[] = [];\n\n function onSchema(name: string, schema: z.ZodSchema) {\n files.push({\n path: getSchemaPath(name),\n content: JSON.stringify(\n z.toJSONSchema(schema, {\n io: 'input',\n unrepresentable: 'any',\n }),\n ),\n });\n }\n\n for (const collection of this.core.getCollections()) {\n if (collection.type === 'docs') {\n if (collection.meta.schema instanceof z.ZodType) {\n onSchema(`${collection.name}.meta`, collection.meta.schema);\n }\n\n if (collection.docs.schema instanceof z.ZodType) {\n onSchema(`${collection.name}.docs`, collection.docs.schema);\n }\n } else if (collection.schema instanceof z.ZodType) {\n onSchema(collection.name, collection.schema);\n }\n }\n\n return files;\n },\n };\n}\n"],"mappings":";;;;;;;;;;AAoBA,SAAwB,WAAW,EAAE,SAAS,UAA6B,EAAE,EAAU;CACrF,SAAS,cAAc,MAAc;AACnC,SAAO,eAAe,KAAK;;AAG7B,QAAO;EACL,gBAAgB,QAAQ;GACtB,MAAM,EAAE,WAAW,KAAK,KAAK,YAAY;AACzC,OAAI,CAAC,OAAO,WAAW,CAAC,OAAQ;AAEhC,UAAO,QAAQ,GAAG,OAAO,OAAO,SAAS;IACvC,IAAIA;IACJ,IAAIC;AACJ,SAAK,MAAM,cAAc,KAAK,KAAK,gBAAgB,EAAE;AACnD,SAAI,WAAW,SAAS,UAAU,WAAW,QAAQ,KAAK,EAAE;AAC1D,cAAQ;AACR;;AAEF,SAAI,WAAW,SAAS,UAAU,WAAW,KAAK,QAAQ,KAAK,EAAE;AAC/D,eAAS;AACT,cAAQ,WAAW;AACnB;;;AAIJ,QAAI,CAAC,MAAO;IACZ,IAAIC;AACJ,QAAI;KACF,MAAM,WAAW,MAAM,GAAG,SAAS,KAAK,EAAE,UAAU;AACpD,WAAM,QAAQ,SAAS,IAAI,KAAK,MAAM,QAAQ,GAAG,EAAE;YAC7C;AACN;;AAGF,QAAI,aAAa,IAAK;IACtB,MAAM,aAAa,KAAK,KACtB,QACA,cAAc,SAAS,GAAG,OAAO,KAAK,SAAS,MAAM,KAAK,CAC3D;IACD,MAAM,UAAU;KACd,SAAS,KAAK,SAAS,KAAK,QAAQ,KAAK,EAAE,WAAW;KACtD,GAAG;KACJ;AAED,UAAM,GAAG,UAAU,MAAM,KAAK,UAAU,SAAS,MAAM,EAAE,CAAC;KAC1D;;EAEJ,OAAO;GACL,MAAMC,QAAqB,EAAE;GAE7B,SAAS,SAAS,MAAc,QAAqB;AACnD,UAAM,KAAK;KACT,MAAM,cAAc,KAAK;KACzB,SAAS,KAAK,UACZ,EAAE,aAAa,QAAQ;MACrB,IAAI;MACJ,iBAAiB;MAClB,CAAC,CACH;KACF,CAAC;;AAGJ,QAAK,MAAM,cAAc,KAAK,KAAK,gBAAgB,CACjD,KAAI,WAAW,SAAS,QAAQ;AAC9B,QAAI,WAAW,KAAK,kBAAkB,EAAE,QACtC,UAAS,GAAG,WAAW,KAAK,QAAQ,WAAW,KAAK,OAAO;AAG7D,QAAI,WAAW,KAAK,kBAAkB,EAAE,QACtC,UAAS,GAAG,WAAW,KAAK,QAAQ,WAAW,KAAK,OAAO;cAEpD,WAAW,kBAAkB,EAAE,QACxC,UAAS,WAAW,MAAM,WAAW,OAAO;AAIhD,UAAO;;EAEV"}
1
+ {"version":3,"file":"json-schema.js","names":[],"sources":["../../src/plugins/json-schema.ts"],"sourcesContent":["import type { EmitEntry, Plugin } from '@/core';\nimport type { DocsCollectionItem, MetaCollectionItem } from '@/config/build';\nimport { z } from 'zod';\nimport fs from 'node:fs/promises';\nimport path from 'node:path';\n\nexport interface JSONSchemaOptions {\n /**\n * insert `$schema` field to JSON files on creation.\n *\n * @defaultValue false\n */\n insert?: boolean;\n}\n\n/**\n * Generate JSON schemas locally for collection schemas\n *\n * note: **it only works with Zod**\n */\nexport default function jsonSchema({ insert = false }: JSONSchemaOptions = {}): Plugin {\n function getSchemaPath(name: string) {\n return `json-schema/${name}.json`;\n }\n\n return {\n configureServer(server) {\n const { outDir } = this.core.getOptions();\n if (!server.watcher || !insert) return;\n\n server.watcher.on('add', async (file) => {\n let parent: DocsCollectionItem | undefined;\n let match: MetaCollectionItem | undefined;\n for (const collection of this.core.getCollections()) {\n if (collection.type === 'meta' && collection.hasFile(file)) {\n match = collection;\n break;\n }\n if (collection.type === 'docs' && collection.meta.hasFile(file)) {\n parent = collection;\n match = collection.meta;\n break;\n }\n }\n\n if (!match) return;\n let obj: object;\n try {\n const content = (await fs.readFile(file)).toString();\n obj = content.length > 0 ? JSON.parse(content) : {};\n } catch {\n return;\n }\n\n if ('$schema' in obj) return;\n const schemaPath = path.join(\n outDir,\n getSchemaPath(parent ? `${parent.name}.meta` : match.name),\n );\n const updated = {\n $schema: path.relative(path.dirname(file), schemaPath),\n ...obj,\n };\n\n await fs.writeFile(file, JSON.stringify(updated, null, 2));\n });\n },\n emit() {\n const files: EmitEntry[] = [];\n\n function onSchema(name: string, schema: z.ZodSchema) {\n files.push({\n path: getSchemaPath(name),\n content: JSON.stringify(\n z.toJSONSchema(schema, {\n io: 'input',\n unrepresentable: 'any',\n }),\n ),\n });\n }\n\n for (const collection of this.core.getCollections()) {\n if (collection.type === 'docs') {\n if (collection.meta.schema instanceof z.ZodType) {\n onSchema(`${collection.name}.meta`, collection.meta.schema);\n }\n\n if (collection.docs.schema instanceof z.ZodType) {\n onSchema(`${collection.name}.docs`, collection.docs.schema);\n }\n } else if (collection.schema instanceof z.ZodType) {\n onSchema(collection.name, collection.schema);\n }\n }\n\n return files;\n },\n };\n}\n"],"mappings":";;;;;;;;;;AAoBA,SAAwB,WAAW,EAAE,SAAS,UAA6B,EAAE,EAAU;CACrF,SAAS,cAAc,MAAc;AACnC,SAAO,eAAe,KAAK;;AAG7B,QAAO;EACL,gBAAgB,QAAQ;GACtB,MAAM,EAAE,WAAW,KAAK,KAAK,YAAY;AACzC,OAAI,CAAC,OAAO,WAAW,CAAC,OAAQ;AAEhC,UAAO,QAAQ,GAAG,OAAO,OAAO,SAAS;IACvC,IAAI;IACJ,IAAI;AACJ,SAAK,MAAM,cAAc,KAAK,KAAK,gBAAgB,EAAE;AACnD,SAAI,WAAW,SAAS,UAAU,WAAW,QAAQ,KAAK,EAAE;AAC1D,cAAQ;AACR;;AAEF,SAAI,WAAW,SAAS,UAAU,WAAW,KAAK,QAAQ,KAAK,EAAE;AAC/D,eAAS;AACT,cAAQ,WAAW;AACnB;;;AAIJ,QAAI,CAAC,MAAO;IACZ,IAAI;AACJ,QAAI;KACF,MAAM,WAAW,MAAM,GAAG,SAAS,KAAK,EAAE,UAAU;AACpD,WAAM,QAAQ,SAAS,IAAI,KAAK,MAAM,QAAQ,GAAG,EAAE;YAC7C;AACN;;AAGF,QAAI,aAAa,IAAK;IACtB,MAAM,aAAa,KAAK,KACtB,QACA,cAAc,SAAS,GAAG,OAAO,KAAK,SAAS,MAAM,KAAK,CAC3D;IACD,MAAM,UAAU;KACd,SAAS,KAAK,SAAS,KAAK,QAAQ,KAAK,EAAE,WAAW;KACtD,GAAG;KACJ;AAED,UAAM,GAAG,UAAU,MAAM,KAAK,UAAU,SAAS,MAAM,EAAE,CAAC;KAC1D;;EAEJ,OAAO;GACL,MAAM,QAAqB,EAAE;GAE7B,SAAS,SAAS,MAAc,QAAqB;AACnD,UAAM,KAAK;KACT,MAAM,cAAc,KAAK;KACzB,SAAS,KAAK,UACZ,EAAE,aAAa,QAAQ;MACrB,IAAI;MACJ,iBAAiB;MAClB,CAAC,CACH;KACF,CAAC;;AAGJ,QAAK,MAAM,cAAc,KAAK,KAAK,gBAAgB,CACjD,KAAI,WAAW,SAAS,QAAQ;AAC9B,QAAI,WAAW,KAAK,kBAAkB,EAAE,QACtC,UAAS,GAAG,WAAW,KAAK,QAAQ,WAAW,KAAK,OAAO;AAG7D,QAAI,WAAW,KAAK,kBAAkB,EAAE,QACtC,UAAS,GAAG,WAAW,KAAK,QAAQ,WAAW,KAAK,OAAO;cAEpD,WAAW,kBAAkB,EAAE,QACxC,UAAS,WAAW,MAAM,WAAW,OAAO;AAIhD,UAAO;;EAEV"}
@@ -1 +1 @@
1
- {"version":3,"file":"last-modified.js","names":["fn: VersionControlFn","lines: string[]"],"sources":["../../src/plugins/last-modified.ts"],"sourcesContent":["import path from 'node:path';\nimport { x } from 'tinyexec';\nimport type { Plugin } from '@/core';\nimport { ident } from '@/utils/codegen';\n\nconst cache = new Map<string, Promise<Date | null>>();\ntype VersionControlFn = (filePath: string) => Promise<Date | null | undefined>;\n\nexport interface LastModifiedPluginOptions {\n /**\n * Version control to obtain the last modified time.\n *\n * - `git`: Requires `git` to be installed.\n *\n * If you are using Vercel, please set `VERCEL_DEEP_CLONE` environment variable to `true`.\n *\n * - A function: return the last modified time for given file path.\n *\n * @defaultValue 'git'\n */\n versionControl?: 'git' | VersionControlFn;\n\n /**\n * Filter the collections to include by names\n */\n filter?: (collection: string) => boolean;\n}\n\nconst ExtendTypes = `{\n /**\n * Last modified date of document file, obtained from version control.\n *\n */\n lastModified?: Date;\n}`;\n\n/**\n * Injects `lastModified` property to page exports.\n */\nexport default function lastModified(options: LastModifiedPluginOptions = {}): Plugin {\n const { versionControl = 'git', filter = () => true } = options;\n let fn: VersionControlFn;\n\n return {\n name: 'last-modified',\n 'index-file': {\n generateTypeConfig() {\n const lines: string[] = [];\n lines.push('{');\n lines.push(' DocData: {');\n for (const collection of this.core.getCollections()) {\n if (filter(collection.name)) {\n lines.push(ident(`${collection.name}: ${ExtendTypes},`, 2));\n }\n }\n lines.push(' }');\n lines.push('}');\n return lines.join('\\n');\n },\n serverOptions(options) {\n options.doc ??= {};\n options.doc.passthroughs ??= [];\n options.doc.passthroughs.push('lastModified');\n },\n },\n config() {\n const { workspace } = this.core.getOptions();\n const cwd = workspace ? path.resolve(workspace.dir) : process.cwd();\n\n switch (versionControl) {\n case 'git':\n fn = (v) => getGitTimestamp(v, cwd);\n break;\n default:\n fn = versionControl;\n }\n },\n doc: {\n async vfile(file) {\n if (!filter(this.collection.name)) return;\n\n const timestamp = await fn(this.filePath);\n if (timestamp) {\n file.data['mdx-export'] ??= [];\n file.data['mdx-export'].push({\n name: 'lastModified',\n value: timestamp,\n });\n }\n },\n },\n };\n}\n\nasync function getGitTimestamp(file: string, cwd: string): Promise<Date | null> {\n const cached = cache.get(file);\n if (cached) return cached;\n\n const timePromise = (async () => {\n const out = await x('git', ['log', '-1', '--pretty=\"%ai\"', path.relative(cwd, file)], {\n nodeOptions: {\n cwd,\n },\n });\n\n if (out.exitCode !== 0) return null;\n const date = new Date(out.stdout);\n return isNaN(date.getTime()) ? null : date;\n })();\n\n cache.set(file, timePromise);\n return timePromise;\n}\n"],"mappings":";;;;;AAKA,MAAM,wBAAQ,IAAI,KAAmC;AAuBrD,MAAM,cAAc;;;;;;;;;;AAWpB,SAAwB,aAAa,UAAqC,EAAE,EAAU;CACpF,MAAM,EAAE,iBAAiB,OAAO,eAAe,SAAS;CACxD,IAAIA;AAEJ,QAAO;EACL,MAAM;EACN,cAAc;GACZ,qBAAqB;IACnB,MAAMC,QAAkB,EAAE;AAC1B,UAAM,KAAK,IAAI;AACf,UAAM,KAAK,eAAe;AAC1B,SAAK,MAAM,cAAc,KAAK,KAAK,gBAAgB,CACjD,KAAI,OAAO,WAAW,KAAK,CACzB,OAAM,KAAK,MAAM,GAAG,WAAW,KAAK,IAAI,YAAY,IAAI,EAAE,CAAC;AAG/D,UAAM,KAAK,MAAM;AACjB,UAAM,KAAK,IAAI;AACf,WAAO,MAAM,KAAK,KAAK;;GAEzB,cAAc,WAAS;AACrB,cAAQ,QAAQ,EAAE;AAClB,cAAQ,IAAI,iBAAiB,EAAE;AAC/B,cAAQ,IAAI,aAAa,KAAK,eAAe;;GAEhD;EACD,SAAS;GACP,MAAM,EAAE,cAAc,KAAK,KAAK,YAAY;GAC5C,MAAM,MAAM,YAAY,KAAK,QAAQ,UAAU,IAAI,GAAG,QAAQ,KAAK;AAEnE,WAAQ,gBAAR;IACE,KAAK;AACH,WAAM,MAAM,gBAAgB,GAAG,IAAI;AACnC;IACF,QACE,MAAK;;;EAGX,KAAK,EACH,MAAM,MAAM,MAAM;AAChB,OAAI,CAAC,OAAO,KAAK,WAAW,KAAK,CAAE;GAEnC,MAAM,YAAY,MAAM,GAAG,KAAK,SAAS;AACzC,OAAI,WAAW;AACb,SAAK,KAAK,kBAAkB,EAAE;AAC9B,SAAK,KAAK,cAAc,KAAK;KAC3B,MAAM;KACN,OAAO;KACR,CAAC;;KAGP;EACF;;AAGH,eAAe,gBAAgB,MAAc,KAAmC;CAC9E,MAAM,SAAS,MAAM,IAAI,KAAK;AAC9B,KAAI,OAAQ,QAAO;CAEnB,MAAM,eAAe,YAAY;EAC/B,MAAM,MAAM,MAAM,EAAE,OAAO;GAAC;GAAO;GAAM;GAAkB,KAAK,SAAS,KAAK,KAAK;GAAC,EAAE,EACpF,aAAa,EACX,KACD,EACF,CAAC;AAEF,MAAI,IAAI,aAAa,EAAG,QAAO;EAC/B,MAAM,OAAO,IAAI,KAAK,IAAI,OAAO;AACjC,SAAO,MAAM,KAAK,SAAS,CAAC,GAAG,OAAO;KACpC;AAEJ,OAAM,IAAI,MAAM,YAAY;AAC5B,QAAO"}
1
+ {"version":3,"file":"last-modified.js","names":[],"sources":["../../src/plugins/last-modified.ts"],"sourcesContent":["import path from 'node:path';\nimport { x } from 'tinyexec';\nimport type { Plugin } from '@/core';\nimport { ident } from '@/utils/codegen';\n\nconst cache = new Map<string, Promise<Date | null>>();\ntype VersionControlFn = (filePath: string) => Promise<Date | null | undefined>;\n\nexport interface LastModifiedPluginOptions {\n /**\n * Version control to obtain the last modified time.\n *\n * - `git`: Requires `git` to be installed.\n *\n * If you are using Vercel, please set `VERCEL_DEEP_CLONE` environment variable to `true`.\n *\n * - A function: return the last modified time for given file path.\n *\n * @defaultValue 'git'\n */\n versionControl?: 'git' | VersionControlFn;\n\n /**\n * Filter the collections to include by names\n */\n filter?: (collection: string) => boolean;\n}\n\nconst ExtendTypes = `{\n /**\n * Last modified date of document file, obtained from version control.\n *\n */\n lastModified?: Date;\n}`;\n\n/**\n * Injects `lastModified` property to page exports.\n */\nexport default function lastModified(options: LastModifiedPluginOptions = {}): Plugin {\n const { versionControl = 'git', filter = () => true } = options;\n let fn: VersionControlFn;\n\n return {\n name: 'last-modified',\n 'index-file': {\n generateTypeConfig() {\n const lines: string[] = [];\n lines.push('{');\n lines.push(' DocData: {');\n for (const collection of this.core.getCollections()) {\n if (filter(collection.name)) {\n lines.push(ident(`${collection.name}: ${ExtendTypes},`, 2));\n }\n }\n lines.push(' }');\n lines.push('}');\n return lines.join('\\n');\n },\n serverOptions(options) {\n options.doc ??= {};\n options.doc.passthroughs ??= [];\n options.doc.passthroughs.push('lastModified');\n },\n },\n config() {\n const { workspace } = this.core.getOptions();\n const cwd = workspace ? path.resolve(workspace.dir) : process.cwd();\n\n switch (versionControl) {\n case 'git':\n fn = (v) => getGitTimestamp(v, cwd);\n break;\n default:\n fn = versionControl;\n }\n },\n doc: {\n async vfile(file) {\n if (!filter(this.collection.name)) return;\n\n const timestamp = await fn(this.filePath);\n if (timestamp) {\n file.data['mdx-export'] ??= [];\n file.data['mdx-export'].push({\n name: 'lastModified',\n value: timestamp,\n });\n }\n },\n },\n };\n}\n\nasync function getGitTimestamp(file: string, cwd: string): Promise<Date | null> {\n const cached = cache.get(file);\n if (cached) return cached;\n\n const timePromise = (async () => {\n const out = await x('git', ['log', '-1', '--pretty=\"%ai\"', path.relative(cwd, file)], {\n nodeOptions: {\n cwd,\n },\n });\n\n if (out.exitCode !== 0) return null;\n const date = new Date(out.stdout);\n return isNaN(date.getTime()) ? null : date;\n })();\n\n cache.set(file, timePromise);\n return timePromise;\n}\n"],"mappings":";;;;;AAKA,MAAM,wBAAQ,IAAI,KAAmC;AAuBrD,MAAM,cAAc;;;;;;;;;;AAWpB,SAAwB,aAAa,UAAqC,EAAE,EAAU;CACpF,MAAM,EAAE,iBAAiB,OAAO,eAAe,SAAS;CACxD,IAAI;AAEJ,QAAO;EACL,MAAM;EACN,cAAc;GACZ,qBAAqB;IACnB,MAAM,QAAkB,EAAE;AAC1B,UAAM,KAAK,IAAI;AACf,UAAM,KAAK,eAAe;AAC1B,SAAK,MAAM,cAAc,KAAK,KAAK,gBAAgB,CACjD,KAAI,OAAO,WAAW,KAAK,CACzB,OAAM,KAAK,MAAM,GAAG,WAAW,KAAK,IAAI,YAAY,IAAI,EAAE,CAAC;AAG/D,UAAM,KAAK,MAAM;AACjB,UAAM,KAAK,IAAI;AACf,WAAO,MAAM,KAAK,KAAK;;GAEzB,cAAc,WAAS;AACrB,cAAQ,QAAQ,EAAE;AAClB,cAAQ,IAAI,iBAAiB,EAAE;AAC/B,cAAQ,IAAI,aAAa,KAAK,eAAe;;GAEhD;EACD,SAAS;GACP,MAAM,EAAE,cAAc,KAAK,KAAK,YAAY;GAC5C,MAAM,MAAM,YAAY,KAAK,QAAQ,UAAU,IAAI,GAAG,QAAQ,KAAK;AAEnE,WAAQ,gBAAR;IACE,KAAK;AACH,WAAM,MAAM,gBAAgB,GAAG,IAAI;AACnC;IACF,QACE,MAAK;;;EAGX,KAAK,EACH,MAAM,MAAM,MAAM;AAChB,OAAI,CAAC,OAAO,KAAK,WAAW,KAAK,CAAE;GAEnC,MAAM,YAAY,MAAM,GAAG,KAAK,SAAS;AACzC,OAAI,WAAW;AACb,SAAK,KAAK,kBAAkB,EAAE;AAC9B,SAAK,KAAK,cAAc,KAAK;KAC3B,MAAM;KACN,OAAO;KACR,CAAC;;KAGP;EACF;;AAGH,eAAe,gBAAgB,MAAc,KAAmC;CAC9E,MAAM,SAAS,MAAM,IAAI,KAAK;AAC9B,KAAI,OAAQ,QAAO;CAEnB,MAAM,eAAe,YAAY;EAC/B,MAAM,MAAM,MAAM,EAAE,OAAO;GAAC;GAAO;GAAM;GAAkB,KAAK,SAAS,KAAK,KAAK;GAAC,EAAE,EACpF,aAAa,EACX,KACD,EACF,CAAC;AAEF,MAAI,IAAI,aAAa,EAAG,QAAO;EAC/B,MAAM,OAAO,IAAI,KAAK,IAAI,OAAO;AACjC,SAAO,MAAM,KAAK,SAAS,CAAC,GAAG,OAAO;KACpC;AAEJ,OAAM,IAAI,MAAM,YAAY;AAC5B,QAAO"}
@@ -1 +1 @@
1
- {"version":3,"file":"remark-include-D3G3mAnv.js","names":["newChildren: RootContent[]","extractedLines: string[]","ElementLikeTypes: ElementLikeContent['type'][]","attributes: Record<string, string | null>","nodes: RootContent[] | undefined","content: string","fs","path","queue: Promise<void>[]"],"sources":["../src/loaders/mdx/remark-unravel.ts","../src/loaders/mdx/mdast-utils.ts","../src/loaders/mdx/remark-include.ts"],"sourcesContent":["// from internal remark plugins in https://github.com/mdx-js/mdx/blob/main/packages/mdx/lib/plugin/remark-mark-and-unravel.js\n// we need to ensure consistency with MDX.js when parsing embed content in `remark-include`\nimport { visit } from 'unist-util-visit';\nimport type { Transformer } from 'unified';\nimport type { Root, RootContent } from 'mdast';\n\nexport function remarkMarkAndUnravel(): Transformer<Root, Root> {\n return (tree) => {\n visit(tree, function (node, index, parent) {\n let offset = -1;\n let all = true;\n let oneOrMore = false;\n\n if (parent && typeof index === 'number' && node.type === 'paragraph') {\n const children = node.children;\n\n while (++offset < children.length) {\n const child = children[offset];\n\n if (child.type === 'mdxJsxTextElement' || child.type === 'mdxTextExpression') {\n oneOrMore = true;\n } else if (child.type === 'text' && child.value.trim().length === 0) {\n // Empty.\n } else {\n all = false;\n break;\n }\n }\n\n if (all && oneOrMore) {\n offset = -1;\n const newChildren: RootContent[] = [];\n\n while (++offset < children.length) {\n const child = children[offset];\n\n if (child.type === 'mdxJsxTextElement') {\n // @ts-expect-error: mutate because it is faster; content model is fine.\n child.type = 'mdxJsxFlowElement';\n }\n\n if (child.type === 'mdxTextExpression') {\n // @ts-expect-error: mutate because it is faster; content model is fine.\n child.type = 'mdxFlowExpression';\n }\n\n if (child.type === 'text' && /^[\\t\\r\\n ]+$/.test(String(child.value))) {\n // Empty.\n } else {\n newChildren.push(child);\n }\n }\n\n parent.children.splice(index, 1, ...newChildren);\n return index;\n }\n }\n });\n };\n}\n","import type { RootContent } from 'mdast';\n\nexport function flattenNode(node: RootContent): string {\n if ('children' in node) return node.children.map((child) => flattenNode(child)).join('');\n\n if ('value' in node) return node.value;\n\n return '';\n}\n","import { type Processor, type Transformer, unified } from 'unified';\nimport { visit } from 'unist-util-visit';\nimport type { Code, Node, Root, RootContent } from 'mdast';\nimport * as path from 'node:path';\nimport * as fs from 'node:fs/promises';\nimport { fumaMatter } from '@/utils/fuma-matter';\nimport type { MdxJsxFlowElement, MdxJsxTextElement } from 'mdast-util-mdx-jsx';\nimport { remarkHeading } from 'fumadocs-core/mdx-plugins';\nimport { VFile } from 'vfile';\nimport type { Directives } from 'mdast-util-directive';\nimport { remarkMarkAndUnravel } from '@/loaders/mdx/remark-unravel';\nimport { flattenNode } from './mdast-utils';\n\n/**\n * VS Code–style region extraction\n * Adapted from VitePress:\n * https://github.com/vuejs/vitepress/blob/main/src/node/markdown/plugins/snippet.ts\n */\n\n// region marker regexes\nconst REGION_MARKERS = [\n {\n start: /^\\s*\\/\\/\\s*#?region\\b\\s*(.*?)\\s*$/,\n end: /^\\s*\\/\\/\\s*#?endregion\\b\\s*(.*?)\\s*$/,\n },\n {\n start: /^\\s*<!--\\s*#?region\\b\\s*(.*?)\\s*-->/,\n end: /^\\s*<!--\\s*#?endregion\\b\\s*(.*?)\\s*-->/,\n },\n {\n start: /^\\s*\\/\\*\\s*#region\\b\\s*(.*?)\\s*\\*\\//,\n end: /^\\s*\\/\\*\\s*#endregion\\b\\s*(.*?)\\s*\\*\\//,\n },\n {\n start: /^\\s*#[rR]egion\\b\\s*(.*?)\\s*$/,\n end: /^\\s*#[eE]nd ?[rR]egion\\b\\s*(.*?)\\s*$/,\n },\n {\n start: /^\\s*#\\s*#?region\\b\\s*(.*?)\\s*$/,\n end: /^\\s*#\\s*#?endregion\\b\\s*(.*?)\\s*$/,\n },\n {\n start: /^\\s*(?:--|::|@?REM)\\s*#region\\b\\s*(.*?)\\s*$/,\n end: /^\\s*(?:--|::|@?REM)\\s*#endregion\\b\\s*(.*?)\\s*$/,\n },\n {\n start: /^\\s*#pragma\\s+region\\b\\s*(.*?)\\s*$/,\n end: /^\\s*#pragma\\s+endregion\\b\\s*(.*?)\\s*$/,\n },\n {\n start: /^\\s*\\(\\*\\s*#region\\b\\s*(.*?)\\s*\\*\\)/,\n end: /^\\s*\\(\\*\\s*#endregion\\b\\s*(.*?)\\s*\\*\\)/,\n },\n];\n\nfunction dedent(lines: string[]): string {\n const minIndent = lines.reduce((min, line) => {\n const match = line.match(/^(\\s*)\\S/);\n return match ? Math.min(min, match[1].length) : min;\n }, Infinity);\n\n return minIndent === Infinity\n ? lines.join('\\n')\n : lines.map((l) => l.slice(minIndent)).join('\\n');\n}\n\nfunction extractCodeRegion(content: string, regionName: string): string {\n const lines = content.split('\\n');\n\n for (let i = 0; i < lines.length; i++) {\n for (const re of REGION_MARKERS) {\n let match = re.start.exec(lines[i]);\n if (match?.[1] !== regionName) continue;\n\n let depth = 1;\n const extractedLines: string[] = [];\n for (let j = i + 1; j < lines.length; j++) {\n match = re.start.exec(lines[j]);\n if (match) {\n depth++;\n continue;\n }\n\n match = re.end.exec(lines[j]);\n if (match) {\n if (match[1] === regionName) depth = 0;\n else if (match[1] === '') depth--;\n else continue;\n\n if (depth > 0) continue;\n return dedent(extractedLines);\n } else {\n extractedLines.push(lines[j]);\n }\n }\n }\n }\n throw new Error(`Region \"${regionName}\" not found`);\n}\n\nexport interface Params {\n lang?: string;\n meta?: string;\n}\n\nconst ElementLikeTypes: ElementLikeContent['type'][] = [\n 'mdxJsxFlowElement',\n 'mdxJsxTextElement',\n 'containerDirective',\n 'textDirective',\n 'leafDirective',\n];\ntype ElementLikeContent = MdxJsxFlowElement | MdxJsxTextElement | Directives;\n\nfunction isElementLike(node: Node): node is ElementLikeContent {\n return ElementLikeTypes.includes(node.type as ElementLikeContent['type']);\n}\n\nfunction parseElementAttributes(\n element: ElementLikeContent,\n): Record<string, string | null | undefined> {\n if (Array.isArray(element.attributes)) {\n const attributes: Record<string, string | null> = {};\n\n for (const attr of element.attributes) {\n if (\n attr.type === 'mdxJsxAttribute' &&\n (typeof attr.value === 'string' || attr.value === null)\n ) {\n attributes[attr.name] = attr.value;\n }\n }\n\n return attributes;\n }\n\n return element.attributes ?? {};\n}\n\nfunction parseSpecifier(specifier: string): {\n file: string;\n section?: string;\n} {\n const idx = specifier.lastIndexOf('#');\n if (idx === -1) return { file: specifier };\n\n return {\n file: specifier.slice(0, idx),\n section: specifier.slice(idx + 1),\n };\n}\n\nfunction extractSection(root: Root, section: string): Root | undefined {\n let nodes: RootContent[] | undefined;\n let capturingHeadingContent = false;\n\n visit(root, (node) => {\n if (node.type === 'heading') {\n if (capturingHeadingContent) {\n return false;\n }\n\n if (node.data?.hProperties?.id === section) {\n capturingHeadingContent = true;\n nodes = [node];\n return 'skip';\n }\n\n return;\n }\n\n if (capturingHeadingContent) {\n nodes?.push(node as RootContent);\n return 'skip';\n }\n\n if (isElementLike(node) && node.name === 'section') {\n const attributes = parseElementAttributes(node);\n\n if (attributes.id === section) {\n nodes = node.children;\n return false;\n }\n }\n });\n\n if (nodes)\n return {\n type: 'root',\n children: nodes,\n };\n}\n\nexport function remarkInclude(this: Processor): Transformer<Root, Root> {\n const TagName = 'include';\n\n const embedContent = async (\n targetPath: string,\n heading: string | undefined,\n params: Params,\n parent: VFile,\n ) => {\n const { _getProcessor = () => this, _compiler } = parent.data;\n let content: string;\n try {\n content = (await fs.readFile(targetPath)).toString();\n } catch (e) {\n throw new Error(\n `failed to read file ${targetPath}\\n${e instanceof Error ? e.message : String(e)}`,\n { cause: e },\n );\n }\n\n const ext = path.extname(targetPath);\n _compiler?.addDependency(targetPath);\n // For non-Markdown files, support VS Code–style region extraction\n if (params.lang || (ext !== '.md' && ext !== '.mdx')) {\n const lang = params.lang ?? ext.slice(1);\n let value = content;\n if (heading) {\n value = extractCodeRegion(content, heading.trim());\n }\n return {\n type: 'code',\n lang,\n meta: params.meta,\n value,\n data: {},\n } satisfies Code;\n }\n\n const parser = _getProcessor(ext === '.mdx' ? 'mdx' : 'md');\n const parsed = fumaMatter(content);\n const targetFile = new VFile({\n path: targetPath,\n value: parsed.content,\n data: {\n ...parent.data,\n frontmatter: parsed.data as Record<string, unknown>,\n },\n });\n let mdast = parser.parse(targetFile) as Root;\n const baseProcessor = unified().use(remarkMarkAndUnravel);\n\n if (heading) {\n // parse headings before extraction\n const extracted = extractSection(await baseProcessor.use(remarkHeading).run(mdast), heading);\n if (!extracted)\n throw new Error(\n `Cannot find section ${heading} in ${targetPath}, make sure you have encapsulated the section in a <section id=\"${heading}\"> tag, or a :::section directive with remark-directive configured.`,\n );\n\n mdast = extracted;\n } else {\n mdast = await baseProcessor.run(mdast);\n }\n\n await update(mdast, targetFile);\n return mdast;\n };\n\n async function update(tree: Root, file: VFile) {\n const queue: Promise<void>[] = [];\n\n visit(tree, ElementLikeTypes, (_node, _, parent) => {\n const node = _node as ElementLikeContent;\n if (node.name !== TagName) return;\n\n const specifier = flattenNode(node);\n if (specifier.length === 0) return 'skip';\n\n const attributes = parseElementAttributes(node);\n const { file: relativePath, section } = parseSpecifier(specifier);\n const targetPath = path.resolve('cwd' in attributes ? file.cwd : file.dirname!, relativePath);\n\n queue.push(\n embedContent(targetPath, section, attributes, file).then((replace) => {\n Object.assign(parent && parent.type === 'paragraph' ? parent : node, replace);\n }),\n );\n\n return 'skip';\n });\n\n await Promise.all(queue);\n }\n\n return async (tree, file) => {\n await update(tree, file);\n };\n}\n"],"mappings":";;;;;;;;;AAMA,SAAgB,uBAAgD;AAC9D,SAAQ,SAAS;AACf,QAAM,MAAM,SAAU,MAAM,OAAO,QAAQ;GACzC,IAAI,SAAS;GACb,IAAI,MAAM;GACV,IAAI,YAAY;AAEhB,OAAI,UAAU,OAAO,UAAU,YAAY,KAAK,SAAS,aAAa;IACpE,MAAM,WAAW,KAAK;AAEtB,WAAO,EAAE,SAAS,SAAS,QAAQ;KACjC,MAAM,QAAQ,SAAS;AAEvB,SAAI,MAAM,SAAS,uBAAuB,MAAM,SAAS,oBACvD,aAAY;cACH,MAAM,SAAS,UAAU,MAAM,MAAM,MAAM,CAAC,WAAW,GAAG,QAE9D;AACL,YAAM;AACN;;;AAIJ,QAAI,OAAO,WAAW;AACpB,cAAS;KACT,MAAMA,cAA6B,EAAE;AAErC,YAAO,EAAE,SAAS,SAAS,QAAQ;MACjC,MAAM,QAAQ,SAAS;AAEvB,UAAI,MAAM,SAAS,oBAEjB,OAAM,OAAO;AAGf,UAAI,MAAM,SAAS,oBAEjB,OAAM,OAAO;AAGf,UAAI,MAAM,SAAS,UAAU,eAAe,KAAK,OAAO,MAAM,MAAM,CAAC,EAAE,OAGrE,aAAY,KAAK,MAAM;;AAI3B,YAAO,SAAS,OAAO,OAAO,GAAG,GAAG,YAAY;AAChD,YAAO;;;IAGX;;;;;;ACvDN,SAAgB,YAAY,MAA2B;AACrD,KAAI,cAAc,KAAM,QAAO,KAAK,SAAS,KAAK,UAAU,YAAY,MAAM,CAAC,CAAC,KAAK,GAAG;AAExF,KAAI,WAAW,KAAM,QAAO,KAAK;AAEjC,QAAO;;;;;;;;;;ACaT,MAAM,iBAAiB;CACrB;EACE,OAAO;EACP,KAAK;EACN;CACD;EACE,OAAO;EACP,KAAK;EACN;CACD;EACE,OAAO;EACP,KAAK;EACN;CACD;EACE,OAAO;EACP,KAAK;EACN;CACD;EACE,OAAO;EACP,KAAK;EACN;CACD;EACE,OAAO;EACP,KAAK;EACN;CACD;EACE,OAAO;EACP,KAAK;EACN;CACD;EACE,OAAO;EACP,KAAK;EACN;CACF;AAED,SAAS,OAAO,OAAyB;CACvC,MAAM,YAAY,MAAM,QAAQ,KAAK,SAAS;EAC5C,MAAM,QAAQ,KAAK,MAAM,WAAW;AACpC,SAAO,QAAQ,KAAK,IAAI,KAAK,MAAM,GAAG,OAAO,GAAG;IAC/C,SAAS;AAEZ,QAAO,cAAc,WACjB,MAAM,KAAK,KAAK,GAChB,MAAM,KAAK,MAAM,EAAE,MAAM,UAAU,CAAC,CAAC,KAAK,KAAK;;AAGrD,SAAS,kBAAkB,SAAiB,YAA4B;CACtE,MAAM,QAAQ,QAAQ,MAAM,KAAK;AAEjC,MAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,IAChC,MAAK,MAAM,MAAM,gBAAgB;EAC/B,IAAI,QAAQ,GAAG,MAAM,KAAK,MAAM,GAAG;AACnC,MAAI,QAAQ,OAAO,WAAY;EAE/B,IAAI,QAAQ;EACZ,MAAMC,iBAA2B,EAAE;AACnC,OAAK,IAAI,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACzC,WAAQ,GAAG,MAAM,KAAK,MAAM,GAAG;AAC/B,OAAI,OAAO;AACT;AACA;;AAGF,WAAQ,GAAG,IAAI,KAAK,MAAM,GAAG;AAC7B,OAAI,OAAO;AACT,QAAI,MAAM,OAAO,WAAY,SAAQ;aAC5B,MAAM,OAAO,GAAI;QACrB;AAEL,QAAI,QAAQ,EAAG;AACf,WAAO,OAAO,eAAe;SAE7B,gBAAe,KAAK,MAAM,GAAG;;;AAKrC,OAAM,IAAI,MAAM,WAAW,WAAW,aAAa;;AAQrD,MAAMC,mBAAiD;CACrD;CACA;CACA;CACA;CACA;CACD;AAGD,SAAS,cAAc,MAAwC;AAC7D,QAAO,iBAAiB,SAAS,KAAK,KAAmC;;AAG3E,SAAS,uBACP,SAC2C;AAC3C,KAAI,MAAM,QAAQ,QAAQ,WAAW,EAAE;EACrC,MAAMC,aAA4C,EAAE;AAEpD,OAAK,MAAM,QAAQ,QAAQ,WACzB,KACE,KAAK,SAAS,sBACb,OAAO,KAAK,UAAU,YAAY,KAAK,UAAU,MAElD,YAAW,KAAK,QAAQ,KAAK;AAIjC,SAAO;;AAGT,QAAO,QAAQ,cAAc,EAAE;;AAGjC,SAAS,eAAe,WAGtB;CACA,MAAM,MAAM,UAAU,YAAY,IAAI;AACtC,KAAI,QAAQ,GAAI,QAAO,EAAE,MAAM,WAAW;AAE1C,QAAO;EACL,MAAM,UAAU,MAAM,GAAG,IAAI;EAC7B,SAAS,UAAU,MAAM,MAAM,EAAE;EAClC;;AAGH,SAAS,eAAe,MAAY,SAAmC;CACrE,IAAIC;CACJ,IAAI,0BAA0B;AAE9B,OAAM,OAAO,SAAS;AACpB,MAAI,KAAK,SAAS,WAAW;AAC3B,OAAI,wBACF,QAAO;AAGT,OAAI,KAAK,MAAM,aAAa,OAAO,SAAS;AAC1C,8BAA0B;AAC1B,YAAQ,CAAC,KAAK;AACd,WAAO;;AAGT;;AAGF,MAAI,yBAAyB;AAC3B,UAAO,KAAK,KAAoB;AAChC,UAAO;;AAGT,MAAI,cAAc,KAAK,IAAI,KAAK,SAAS,WAGvC;OAFmB,uBAAuB,KAAK,CAEhC,OAAO,SAAS;AAC7B,YAAQ,KAAK;AACb,WAAO;;;GAGX;AAEF,KAAI,MACF,QAAO;EACL,MAAM;EACN,UAAU;EACX;;AAGL,SAAgB,gBAAwD;CACtE,MAAM,UAAU;CAEhB,MAAM,eAAe,OACnB,YACA,SACA,QACA,WACG;EACH,MAAM,EAAE,sBAAsB,MAAM,cAAc,OAAO;EACzD,IAAIC;AACJ,MAAI;AACF,cAAW,MAAMC,KAAG,SAAS,WAAW,EAAE,UAAU;WAC7C,GAAG;AACV,SAAM,IAAI,MACR,uBAAuB,WAAW,IAAI,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE,IAChF,EAAE,OAAO,GAAG,CACb;;EAGH,MAAM,MAAMC,OAAK,QAAQ,WAAW;AACpC,aAAW,cAAc,WAAW;AAEpC,MAAI,OAAO,QAAS,QAAQ,SAAS,QAAQ,QAAS;GACpD,MAAM,OAAO,OAAO,QAAQ,IAAI,MAAM,EAAE;GACxC,IAAI,QAAQ;AACZ,OAAI,QACF,SAAQ,kBAAkB,SAAS,QAAQ,MAAM,CAAC;AAEpD,UAAO;IACL,MAAM;IACN;IACA,MAAM,OAAO;IACb;IACA,MAAM,EAAE;IACT;;EAGH,MAAM,SAAS,cAAc,QAAQ,SAAS,QAAQ,KAAK;EAC3D,MAAM,SAAS,WAAW,QAAQ;EAClC,MAAM,aAAa,IAAI,MAAM;GAC3B,MAAM;GACN,OAAO,OAAO;GACd,MAAM;IACJ,GAAG,OAAO;IACV,aAAa,OAAO;IACrB;GACF,CAAC;EACF,IAAI,QAAQ,OAAO,MAAM,WAAW;EACpC,MAAM,gBAAgB,SAAS,CAAC,IAAI,qBAAqB;AAEzD,MAAI,SAAS;GAEX,MAAM,YAAY,eAAe,MAAM,cAAc,IAAI,cAAc,CAAC,IAAI,MAAM,EAAE,QAAQ;AAC5F,OAAI,CAAC,UACH,OAAM,IAAI,MACR,uBAAuB,QAAQ,MAAM,WAAW,kEAAkE,QAAQ,qEAC3H;AAEH,WAAQ;QAER,SAAQ,MAAM,cAAc,IAAI,MAAM;AAGxC,QAAM,OAAO,OAAO,WAAW;AAC/B,SAAO;;CAGT,eAAe,OAAO,MAAY,MAAa;EAC7C,MAAMC,QAAyB,EAAE;AAEjC,QAAM,MAAM,mBAAmB,OAAO,GAAG,WAAW;GAClD,MAAM,OAAO;AACb,OAAI,KAAK,SAAS,QAAS;GAE3B,MAAM,YAAY,YAAY,KAAK;AACnC,OAAI,UAAU,WAAW,EAAG,QAAO;GAEnC,MAAM,aAAa,uBAAuB,KAAK;GAC/C,MAAM,EAAE,MAAM,cAAc,YAAY,eAAe,UAAU;GACjE,MAAM,aAAaD,OAAK,QAAQ,SAAS,aAAa,KAAK,MAAM,KAAK,SAAU,aAAa;AAE7F,SAAM,KACJ,aAAa,YAAY,SAAS,YAAY,KAAK,CAAC,MAAM,YAAY;AACpE,WAAO,OAAO,UAAU,OAAO,SAAS,cAAc,SAAS,MAAM,QAAQ;KAC7E,CACH;AAED,UAAO;IACP;AAEF,QAAM,QAAQ,IAAI,MAAM;;AAG1B,QAAO,OAAO,MAAM,SAAS;AAC3B,QAAM,OAAO,MAAM,KAAK"}
1
+ {"version":3,"file":"remark-include-D3G3mAnv.js","names":["fs","path"],"sources":["../src/loaders/mdx/remark-unravel.ts","../src/loaders/mdx/mdast-utils.ts","../src/loaders/mdx/remark-include.ts"],"sourcesContent":["// from internal remark plugins in https://github.com/mdx-js/mdx/blob/main/packages/mdx/lib/plugin/remark-mark-and-unravel.js\n// we need to ensure consistency with MDX.js when parsing embed content in `remark-include`\nimport { visit } from 'unist-util-visit';\nimport type { Transformer } from 'unified';\nimport type { Root, RootContent } from 'mdast';\n\nexport function remarkMarkAndUnravel(): Transformer<Root, Root> {\n return (tree) => {\n visit(tree, function (node, index, parent) {\n let offset = -1;\n let all = true;\n let oneOrMore = false;\n\n if (parent && typeof index === 'number' && node.type === 'paragraph') {\n const children = node.children;\n\n while (++offset < children.length) {\n const child = children[offset];\n\n if (child.type === 'mdxJsxTextElement' || child.type === 'mdxTextExpression') {\n oneOrMore = true;\n } else if (child.type === 'text' && child.value.trim().length === 0) {\n // Empty.\n } else {\n all = false;\n break;\n }\n }\n\n if (all && oneOrMore) {\n offset = -1;\n const newChildren: RootContent[] = [];\n\n while (++offset < children.length) {\n const child = children[offset];\n\n if (child.type === 'mdxJsxTextElement') {\n // @ts-expect-error: mutate because it is faster; content model is fine.\n child.type = 'mdxJsxFlowElement';\n }\n\n if (child.type === 'mdxTextExpression') {\n // @ts-expect-error: mutate because it is faster; content model is fine.\n child.type = 'mdxFlowExpression';\n }\n\n if (child.type === 'text' && /^[\\t\\r\\n ]+$/.test(String(child.value))) {\n // Empty.\n } else {\n newChildren.push(child);\n }\n }\n\n parent.children.splice(index, 1, ...newChildren);\n return index;\n }\n }\n });\n };\n}\n","import type { RootContent } from 'mdast';\n\nexport function flattenNode(node: RootContent): string {\n if ('children' in node) return node.children.map((child) => flattenNode(child)).join('');\n\n if ('value' in node) return node.value;\n\n return '';\n}\n","import { type Processor, type Transformer, unified } from 'unified';\nimport { visit } from 'unist-util-visit';\nimport type { Code, Node, Root, RootContent } from 'mdast';\nimport * as path from 'node:path';\nimport * as fs from 'node:fs/promises';\nimport { fumaMatter } from '@/utils/fuma-matter';\nimport type { MdxJsxFlowElement, MdxJsxTextElement } from 'mdast-util-mdx-jsx';\nimport { remarkHeading } from 'fumadocs-core/mdx-plugins';\nimport { VFile } from 'vfile';\nimport type { Directives } from 'mdast-util-directive';\nimport { remarkMarkAndUnravel } from '@/loaders/mdx/remark-unravel';\nimport { flattenNode } from './mdast-utils';\n\n/**\n * VS Code–style region extraction\n * Adapted from VitePress:\n * https://github.com/vuejs/vitepress/blob/main/src/node/markdown/plugins/snippet.ts\n */\n\n// region marker regexes\nconst REGION_MARKERS = [\n {\n start: /^\\s*\\/\\/\\s*#?region\\b\\s*(.*?)\\s*$/,\n end: /^\\s*\\/\\/\\s*#?endregion\\b\\s*(.*?)\\s*$/,\n },\n {\n start: /^\\s*<!--\\s*#?region\\b\\s*(.*?)\\s*-->/,\n end: /^\\s*<!--\\s*#?endregion\\b\\s*(.*?)\\s*-->/,\n },\n {\n start: /^\\s*\\/\\*\\s*#region\\b\\s*(.*?)\\s*\\*\\//,\n end: /^\\s*\\/\\*\\s*#endregion\\b\\s*(.*?)\\s*\\*\\//,\n },\n {\n start: /^\\s*#[rR]egion\\b\\s*(.*?)\\s*$/,\n end: /^\\s*#[eE]nd ?[rR]egion\\b\\s*(.*?)\\s*$/,\n },\n {\n start: /^\\s*#\\s*#?region\\b\\s*(.*?)\\s*$/,\n end: /^\\s*#\\s*#?endregion\\b\\s*(.*?)\\s*$/,\n },\n {\n start: /^\\s*(?:--|::|@?REM)\\s*#region\\b\\s*(.*?)\\s*$/,\n end: /^\\s*(?:--|::|@?REM)\\s*#endregion\\b\\s*(.*?)\\s*$/,\n },\n {\n start: /^\\s*#pragma\\s+region\\b\\s*(.*?)\\s*$/,\n end: /^\\s*#pragma\\s+endregion\\b\\s*(.*?)\\s*$/,\n },\n {\n start: /^\\s*\\(\\*\\s*#region\\b\\s*(.*?)\\s*\\*\\)/,\n end: /^\\s*\\(\\*\\s*#endregion\\b\\s*(.*?)\\s*\\*\\)/,\n },\n];\n\nfunction dedent(lines: string[]): string {\n const minIndent = lines.reduce((min, line) => {\n const match = line.match(/^(\\s*)\\S/);\n return match ? Math.min(min, match[1].length) : min;\n }, Infinity);\n\n return minIndent === Infinity\n ? lines.join('\\n')\n : lines.map((l) => l.slice(minIndent)).join('\\n');\n}\n\nfunction extractCodeRegion(content: string, regionName: string): string {\n const lines = content.split('\\n');\n\n for (let i = 0; i < lines.length; i++) {\n for (const re of REGION_MARKERS) {\n let match = re.start.exec(lines[i]);\n if (match?.[1] !== regionName) continue;\n\n let depth = 1;\n const extractedLines: string[] = [];\n for (let j = i + 1; j < lines.length; j++) {\n match = re.start.exec(lines[j]);\n if (match) {\n depth++;\n continue;\n }\n\n match = re.end.exec(lines[j]);\n if (match) {\n if (match[1] === regionName) depth = 0;\n else if (match[1] === '') depth--;\n else continue;\n\n if (depth > 0) continue;\n return dedent(extractedLines);\n } else {\n extractedLines.push(lines[j]);\n }\n }\n }\n }\n throw new Error(`Region \"${regionName}\" not found`);\n}\n\nexport interface Params {\n lang?: string;\n meta?: string;\n}\n\nconst ElementLikeTypes: ElementLikeContent['type'][] = [\n 'mdxJsxFlowElement',\n 'mdxJsxTextElement',\n 'containerDirective',\n 'textDirective',\n 'leafDirective',\n];\ntype ElementLikeContent = MdxJsxFlowElement | MdxJsxTextElement | Directives;\n\nfunction isElementLike(node: Node): node is ElementLikeContent {\n return ElementLikeTypes.includes(node.type as ElementLikeContent['type']);\n}\n\nfunction parseElementAttributes(\n element: ElementLikeContent,\n): Record<string, string | null | undefined> {\n if (Array.isArray(element.attributes)) {\n const attributes: Record<string, string | null> = {};\n\n for (const attr of element.attributes) {\n if (\n attr.type === 'mdxJsxAttribute' &&\n (typeof attr.value === 'string' || attr.value === null)\n ) {\n attributes[attr.name] = attr.value;\n }\n }\n\n return attributes;\n }\n\n return element.attributes ?? {};\n}\n\nfunction parseSpecifier(specifier: string): {\n file: string;\n section?: string;\n} {\n const idx = specifier.lastIndexOf('#');\n if (idx === -1) return { file: specifier };\n\n return {\n file: specifier.slice(0, idx),\n section: specifier.slice(idx + 1),\n };\n}\n\nfunction extractSection(root: Root, section: string): Root | undefined {\n let nodes: RootContent[] | undefined;\n let capturingHeadingContent = false;\n\n visit(root, (node) => {\n if (node.type === 'heading') {\n if (capturingHeadingContent) {\n return false;\n }\n\n if (node.data?.hProperties?.id === section) {\n capturingHeadingContent = true;\n nodes = [node];\n return 'skip';\n }\n\n return;\n }\n\n if (capturingHeadingContent) {\n nodes?.push(node as RootContent);\n return 'skip';\n }\n\n if (isElementLike(node) && node.name === 'section') {\n const attributes = parseElementAttributes(node);\n\n if (attributes.id === section) {\n nodes = node.children;\n return false;\n }\n }\n });\n\n if (nodes)\n return {\n type: 'root',\n children: nodes,\n };\n}\n\nexport function remarkInclude(this: Processor): Transformer<Root, Root> {\n const TagName = 'include';\n\n const embedContent = async (\n targetPath: string,\n heading: string | undefined,\n params: Params,\n parent: VFile,\n ) => {\n const { _getProcessor = () => this, _compiler } = parent.data;\n let content: string;\n try {\n content = (await fs.readFile(targetPath)).toString();\n } catch (e) {\n throw new Error(\n `failed to read file ${targetPath}\\n${e instanceof Error ? e.message : String(e)}`,\n { cause: e },\n );\n }\n\n const ext = path.extname(targetPath);\n _compiler?.addDependency(targetPath);\n // For non-Markdown files, support VS Code–style region extraction\n if (params.lang || (ext !== '.md' && ext !== '.mdx')) {\n const lang = params.lang ?? ext.slice(1);\n let value = content;\n if (heading) {\n value = extractCodeRegion(content, heading.trim());\n }\n return {\n type: 'code',\n lang,\n meta: params.meta,\n value,\n data: {},\n } satisfies Code;\n }\n\n const parser = _getProcessor(ext === '.mdx' ? 'mdx' : 'md');\n const parsed = fumaMatter(content);\n const targetFile = new VFile({\n path: targetPath,\n value: parsed.content,\n data: {\n ...parent.data,\n frontmatter: parsed.data as Record<string, unknown>,\n },\n });\n let mdast = parser.parse(targetFile) as Root;\n const baseProcessor = unified().use(remarkMarkAndUnravel);\n\n if (heading) {\n // parse headings before extraction\n const extracted = extractSection(await baseProcessor.use(remarkHeading).run(mdast), heading);\n if (!extracted)\n throw new Error(\n `Cannot find section ${heading} in ${targetPath}, make sure you have encapsulated the section in a <section id=\"${heading}\"> tag, or a :::section directive with remark-directive configured.`,\n );\n\n mdast = extracted;\n } else {\n mdast = await baseProcessor.run(mdast);\n }\n\n await update(mdast, targetFile);\n return mdast;\n };\n\n async function update(tree: Root, file: VFile) {\n const queue: Promise<void>[] = [];\n\n visit(tree, ElementLikeTypes, (_node, _, parent) => {\n const node = _node as ElementLikeContent;\n if (node.name !== TagName) return;\n\n const specifier = flattenNode(node);\n if (specifier.length === 0) return 'skip';\n\n const attributes = parseElementAttributes(node);\n const { file: relativePath, section } = parseSpecifier(specifier);\n const targetPath = path.resolve('cwd' in attributes ? file.cwd : file.dirname!, relativePath);\n\n queue.push(\n embedContent(targetPath, section, attributes, file).then((replace) => {\n Object.assign(parent && parent.type === 'paragraph' ? parent : node, replace);\n }),\n );\n\n return 'skip';\n });\n\n await Promise.all(queue);\n }\n\n return async (tree, file) => {\n await update(tree, file);\n };\n}\n"],"mappings":";;;;;;;;;AAMA,SAAgB,uBAAgD;AAC9D,SAAQ,SAAS;AACf,QAAM,MAAM,SAAU,MAAM,OAAO,QAAQ;GACzC,IAAI,SAAS;GACb,IAAI,MAAM;GACV,IAAI,YAAY;AAEhB,OAAI,UAAU,OAAO,UAAU,YAAY,KAAK,SAAS,aAAa;IACpE,MAAM,WAAW,KAAK;AAEtB,WAAO,EAAE,SAAS,SAAS,QAAQ;KACjC,MAAM,QAAQ,SAAS;AAEvB,SAAI,MAAM,SAAS,uBAAuB,MAAM,SAAS,oBACvD,aAAY;cACH,MAAM,SAAS,UAAU,MAAM,MAAM,MAAM,CAAC,WAAW,GAAG,QAE9D;AACL,YAAM;AACN;;;AAIJ,QAAI,OAAO,WAAW;AACpB,cAAS;KACT,MAAM,cAA6B,EAAE;AAErC,YAAO,EAAE,SAAS,SAAS,QAAQ;MACjC,MAAM,QAAQ,SAAS;AAEvB,UAAI,MAAM,SAAS,oBAEjB,OAAM,OAAO;AAGf,UAAI,MAAM,SAAS,oBAEjB,OAAM,OAAO;AAGf,UAAI,MAAM,SAAS,UAAU,eAAe,KAAK,OAAO,MAAM,MAAM,CAAC,EAAE,OAGrE,aAAY,KAAK,MAAM;;AAI3B,YAAO,SAAS,OAAO,OAAO,GAAG,GAAG,YAAY;AAChD,YAAO;;;IAGX;;;;;;ACvDN,SAAgB,YAAY,MAA2B;AACrD,KAAI,cAAc,KAAM,QAAO,KAAK,SAAS,KAAK,UAAU,YAAY,MAAM,CAAC,CAAC,KAAK,GAAG;AAExF,KAAI,WAAW,KAAM,QAAO,KAAK;AAEjC,QAAO;;;;;;;;;;ACaT,MAAM,iBAAiB;CACrB;EACE,OAAO;EACP,KAAK;EACN;CACD;EACE,OAAO;EACP,KAAK;EACN;CACD;EACE,OAAO;EACP,KAAK;EACN;CACD;EACE,OAAO;EACP,KAAK;EACN;CACD;EACE,OAAO;EACP,KAAK;EACN;CACD;EACE,OAAO;EACP,KAAK;EACN;CACD;EACE,OAAO;EACP,KAAK;EACN;CACD;EACE,OAAO;EACP,KAAK;EACN;CACF;AAED,SAAS,OAAO,OAAyB;CACvC,MAAM,YAAY,MAAM,QAAQ,KAAK,SAAS;EAC5C,MAAM,QAAQ,KAAK,MAAM,WAAW;AACpC,SAAO,QAAQ,KAAK,IAAI,KAAK,MAAM,GAAG,OAAO,GAAG;IAC/C,SAAS;AAEZ,QAAO,cAAc,WACjB,MAAM,KAAK,KAAK,GAChB,MAAM,KAAK,MAAM,EAAE,MAAM,UAAU,CAAC,CAAC,KAAK,KAAK;;AAGrD,SAAS,kBAAkB,SAAiB,YAA4B;CACtE,MAAM,QAAQ,QAAQ,MAAM,KAAK;AAEjC,MAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,IAChC,MAAK,MAAM,MAAM,gBAAgB;EAC/B,IAAI,QAAQ,GAAG,MAAM,KAAK,MAAM,GAAG;AACnC,MAAI,QAAQ,OAAO,WAAY;EAE/B,IAAI,QAAQ;EACZ,MAAM,iBAA2B,EAAE;AACnC,OAAK,IAAI,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACzC,WAAQ,GAAG,MAAM,KAAK,MAAM,GAAG;AAC/B,OAAI,OAAO;AACT;AACA;;AAGF,WAAQ,GAAG,IAAI,KAAK,MAAM,GAAG;AAC7B,OAAI,OAAO;AACT,QAAI,MAAM,OAAO,WAAY,SAAQ;aAC5B,MAAM,OAAO,GAAI;QACrB;AAEL,QAAI,QAAQ,EAAG;AACf,WAAO,OAAO,eAAe;SAE7B,gBAAe,KAAK,MAAM,GAAG;;;AAKrC,OAAM,IAAI,MAAM,WAAW,WAAW,aAAa;;AAQrD,MAAM,mBAAiD;CACrD;CACA;CACA;CACA;CACA;CACD;AAGD,SAAS,cAAc,MAAwC;AAC7D,QAAO,iBAAiB,SAAS,KAAK,KAAmC;;AAG3E,SAAS,uBACP,SAC2C;AAC3C,KAAI,MAAM,QAAQ,QAAQ,WAAW,EAAE;EACrC,MAAM,aAA4C,EAAE;AAEpD,OAAK,MAAM,QAAQ,QAAQ,WACzB,KACE,KAAK,SAAS,sBACb,OAAO,KAAK,UAAU,YAAY,KAAK,UAAU,MAElD,YAAW,KAAK,QAAQ,KAAK;AAIjC,SAAO;;AAGT,QAAO,QAAQ,cAAc,EAAE;;AAGjC,SAAS,eAAe,WAGtB;CACA,MAAM,MAAM,UAAU,YAAY,IAAI;AACtC,KAAI,QAAQ,GAAI,QAAO,EAAE,MAAM,WAAW;AAE1C,QAAO;EACL,MAAM,UAAU,MAAM,GAAG,IAAI;EAC7B,SAAS,UAAU,MAAM,MAAM,EAAE;EAClC;;AAGH,SAAS,eAAe,MAAY,SAAmC;CACrE,IAAI;CACJ,IAAI,0BAA0B;AAE9B,OAAM,OAAO,SAAS;AACpB,MAAI,KAAK,SAAS,WAAW;AAC3B,OAAI,wBACF,QAAO;AAGT,OAAI,KAAK,MAAM,aAAa,OAAO,SAAS;AAC1C,8BAA0B;AAC1B,YAAQ,CAAC,KAAK;AACd,WAAO;;AAGT;;AAGF,MAAI,yBAAyB;AAC3B,UAAO,KAAK,KAAoB;AAChC,UAAO;;AAGT,MAAI,cAAc,KAAK,IAAI,KAAK,SAAS,WAGvC;OAFmB,uBAAuB,KAAK,CAEhC,OAAO,SAAS;AAC7B,YAAQ,KAAK;AACb,WAAO;;;GAGX;AAEF,KAAI,MACF,QAAO;EACL,MAAM;EACN,UAAU;EACX;;AAGL,SAAgB,gBAAwD;CACtE,MAAM,UAAU;CAEhB,MAAM,eAAe,OACnB,YACA,SACA,QACA,WACG;EACH,MAAM,EAAE,sBAAsB,MAAM,cAAc,OAAO;EACzD,IAAI;AACJ,MAAI;AACF,cAAW,MAAMA,KAAG,SAAS,WAAW,EAAE,UAAU;WAC7C,GAAG;AACV,SAAM,IAAI,MACR,uBAAuB,WAAW,IAAI,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE,IAChF,EAAE,OAAO,GAAG,CACb;;EAGH,MAAM,MAAMC,OAAK,QAAQ,WAAW;AACpC,aAAW,cAAc,WAAW;AAEpC,MAAI,OAAO,QAAS,QAAQ,SAAS,QAAQ,QAAS;GACpD,MAAM,OAAO,OAAO,QAAQ,IAAI,MAAM,EAAE;GACxC,IAAI,QAAQ;AACZ,OAAI,QACF,SAAQ,kBAAkB,SAAS,QAAQ,MAAM,CAAC;AAEpD,UAAO;IACL,MAAM;IACN;IACA,MAAM,OAAO;IACb;IACA,MAAM,EAAE;IACT;;EAGH,MAAM,SAAS,cAAc,QAAQ,SAAS,QAAQ,KAAK;EAC3D,MAAM,SAAS,WAAW,QAAQ;EAClC,MAAM,aAAa,IAAI,MAAM;GAC3B,MAAM;GACN,OAAO,OAAO;GACd,MAAM;IACJ,GAAG,OAAO;IACV,aAAa,OAAO;IACrB;GACF,CAAC;EACF,IAAI,QAAQ,OAAO,MAAM,WAAW;EACpC,MAAM,gBAAgB,SAAS,CAAC,IAAI,qBAAqB;AAEzD,MAAI,SAAS;GAEX,MAAM,YAAY,eAAe,MAAM,cAAc,IAAI,cAAc,CAAC,IAAI,MAAM,EAAE,QAAQ;AAC5F,OAAI,CAAC,UACH,OAAM,IAAI,MACR,uBAAuB,QAAQ,MAAM,WAAW,kEAAkE,QAAQ,qEAC3H;AAEH,WAAQ;QAER,SAAQ,MAAM,cAAc,IAAI,MAAM;AAGxC,QAAM,OAAO,OAAO,WAAW;AAC/B,SAAO;;CAGT,eAAe,OAAO,MAAY,MAAa;EAC7C,MAAM,QAAyB,EAAE;AAEjC,QAAM,MAAM,mBAAmB,OAAO,GAAG,WAAW;GAClD,MAAM,OAAO;AACb,OAAI,KAAK,SAAS,QAAS;GAE3B,MAAM,YAAY,YAAY,KAAK;AACnC,OAAI,UAAU,WAAW,EAAG,QAAO;GAEnC,MAAM,aAAa,uBAAuB,KAAK;GAC/C,MAAM,EAAE,MAAM,cAAc,YAAY,eAAe,UAAU;GACjE,MAAM,aAAaA,OAAK,QAAQ,SAAS,aAAa,KAAK,MAAM,KAAK,SAAU,aAAa;AAE7F,SAAM,KACJ,aAAa,YAAY,SAAS,YAAY,KAAK,CAAC,MAAM,YAAY;AACpE,WAAO,OAAO,UAAU,OAAO,SAAS,cAAc,SAAS,MAAM,QAAQ;KAC7E,CACH;AAED,UAAO;IACP;AAEF,QAAM,QAAQ,IAAI,MAAM;;AAG1B,QAAO,OAAO,MAAM,SAAS;AAC3B,QAAM,OAAO,MAAM,KAAK"}
@@ -22,23 +22,23 @@ interface ClientLoaderOptions<Doc, Props> {
22
22
  interface ClientLoader<Doc, Props> {
23
23
  preload: (path: string) => Promise<Doc>;
24
24
  /**
25
- * Get a component that renders content with `React.lazy`.
25
+ * Get a component that renders content with `use()`.
26
26
  */
27
27
  getComponent: (path: string) => FC<Props>;
28
28
  /**
29
- * Get react nodes that renders content with `React.lazy`.
29
+ * Get react nodes that renders content, wraps `<Suspense>` by default.
30
30
  */
31
- useContent: (path: string, props: Props) => ReactNode;
31
+ useContent: undefined extends Props ? (path: string, props?: Props) => ReactNode : (path: string, props: Props) => ReactNode;
32
32
  }
33
33
  type BrowserCreate<Config, TC extends InternalTypeConfig> = ReturnType<typeof browser<Config, TC>>;
34
34
  interface DocCollectionEntry<Name extends string = string, Frontmatter = unknown, TC extends InternalTypeConfig = InternalTypeConfig> {
35
35
  raw: Record<string, () => Promise<CompiledMDXFile<Name, Frontmatter, TC>>>;
36
- createClientLoader: <Props extends object>(options: ClientLoaderOptions<CompiledMDXFile<Name, Frontmatter, TC>, Props>) => ClientLoader<CompiledMDXFile<Name, Frontmatter, TC>, Props>;
36
+ createClientLoader: <Props extends object | undefined = undefined>(options: ClientLoaderOptions<CompiledMDXFile<Name, Frontmatter, TC>, Props>) => ClientLoader<CompiledMDXFile<Name, Frontmatter, TC>, Props>;
37
37
  }
38
38
  declare function browser<Config, TC extends InternalTypeConfig>(): {
39
39
  doc<Name extends keyof Config & string>(_name: Name, glob: Record<string, () => Promise<unknown>>): Config[Name] extends DocCollection<infer Schema> | DocsCollection<infer Schema> ? DocCollectionEntry<Name, StandardSchemaV1.InferOutput<Schema>, TC> : never;
40
40
  };
41
- declare function createClientLoader<Doc = CompiledMDXProperties, Props extends object = object>(globEntries: Record<string, () => Promise<Doc>>, options: ClientLoaderOptions<Doc, Props>): ClientLoader<Doc, Props>;
41
+ declare function createClientLoader<Doc = CompiledMDXProperties, Props extends object | undefined = undefined>(globEntries: Record<string, () => Promise<Doc>>, options: ClientLoaderOptions<Doc, Props>): ClientLoader<Doc, Props>;
42
42
  //#endregion
43
43
  export { BrowserCreate, ClientLoader, ClientLoaderOptions, DocCollectionEntry, browser, createClientLoader };
44
44
  //# sourceMappingURL=browser.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"browser.d.ts","names":[],"sources":["../../src/runtime/browser.ts"],"sourcesContent":[],"mappings":";;;;;;;KAMK,6DAGQ,sBACT,sBAAsB,eAAe,cAAc,QAAQ;UAE9C;EANZ;;;;;;;;AAML;EAYsB,EAAA,CAAA,EAAA,MAAA;EAAY,SAAA,EAAA,CAAA,MAAA,EAAZ,GAAY,EAAA,KAAA,EAAA,KAAA,EAAA,GAAU,SAAV;;AAAmB,UAGpC,YAHoC,CAAA,GAAA,EAAA,KAAA,CAAA,CAAA;EAGpC,OAAA,EAAA,CAAA,IAAA,EAAY,MAAA,EAAA,GACA,OADA,CACQ,GADR,CAAA;EACQ;;;EAIH,YAAA,EAAA,CAAA,IAAA,EAAA,MAAA,EAAA,GAAA,EAAA,CAAG,KAAH,CAAA;EAKE;;;EAGxB,UAAA,EAAA,CAAA,IAAa,EAAA,MAAA,EAAA,KAAA,EAHW,KAGX,EAAA,GAHqB,SAGrB;;AACR,KADL,aACK,CAAA,MAAA,EAAA,WAD4B,kBAC5B,CAAA,GADkD,UAClD,CAAA,OAAR,OAAQ,CAAA,MAAA,EAAQ,EAAR,CAAA,CAAA;AAAQ,UAGR,kBAHQ,CAAA,aAAA,MAAA,GAAA,MAAA,EAAA,cAAA,OAAA,EAAA,WAMZ,kBANY,GAMS,kBANT,CAAA,CAAA;EAAhB,GAAA,EAQF,MARE,CAAA,MAAA,EAAA,GAAA,GAQmB,OARnB,CAQ2B,eAR3B,CAQ2C,IAR3C,EAQiD,WARjD,EAQ8D,EAR9D,CAAA,CAAA,CAAA;EAD0D,kBAAA,EAAA,CAAA,cAAA,MAAA,CAAA,CAAA,OAAA,EAYtD,mBAZsD,CAYlC,eAZkC,CAYlB,IAZkB,EAYZ,WAZY,EAYC,EAZD,CAAA,EAYM,KAZN,CAAA,EAAA,GAa5D,YAb4D,CAa/C,eAb+C,CAa/B,IAb+B,EAazB,WAbyB,EAaZ,EAbY,CAAA,EAaP,KAbO,CAAA;;AAIlD,iBAYD,OAZmB,CAAA,MAAA,EAAA,WAYQ,kBAZR,CAAA,CAAA,CAAA,EAAA;EAGtB,GAAA,CAAA,aAAA,MAWc,MAXd,GAAA,MAAA,CAAA,CAAA,KAAA,EAYA,IAZA,EAAA,IAAA,EAaD,MAbC,CAAA,MAAA,EAAA,GAAA,GAaoB,OAbpB,CAAA,OAAA,CAAA,CAAA,CAAA,EAsBO,MAtBP,CAsBc,IAtBd,CAAA,SAsB4B,aAtB5B,CAAA,KAAA,OAAA,CAAA,GAsB0D,cAtB1D,CAAA,KAAA,OAAA,CAAA,GAuBH,kBAvBG,CAuBgB,IAvBhB,EAuBsB,gBAAA,CAAiB,WAvBvC,CAuBmD,MAvBnD,CAAA,EAuB4D,EAvB5D,CAAA,GAAA,KAAA;CAAqB;AAEkB,iBAkCpC,kBAlCoC,CAAA,MAkCX,qBAlCW,EAAA,cAAA,MAAA,GAAA,MAAA,CAAA,CAAA,WAAA,EAmCrC,MAnCqC,CAAA,MAAA,EAAA,GAAA,GAmChB,OAnCgB,CAmCR,GAnCQ,CAAA,CAAA,EAAA,OAAA,EAoCzC,mBApCyC,CAoCrB,GApCqB,EAoChB,KApCgB,CAAA,CAAA,EAqCjD,YArCiD,CAqCpC,GArCoC,EAqC/B,KArC+B,CAAA"}
1
+ {"version":3,"file":"browser.d.ts","names":[],"sources":["../../src/runtime/browser.tsx"],"sourcesContent":[],"mappings":";;;;;;;KAMK,6DAGQ,sBACT,sBAAsB,eAAe,cAAc,QAAQ;UAE9C;EANZ;;;;;;;;AAML;EAYsB,EAAA,CAAA,EAAA,MAAA;EAAY,SAAA,EAAA,CAAA,MAAA,EAAZ,GAAY,EAAA,KAAA,EAAA,KAAA,EAAA,GAAU,SAAV;;AAAmB,UAGpC,YAHoC,CAAA,GAAA,EAAA,KAAA,CAAA,CAAA;EAGpC,OAAA,EAAA,CAAA,IAAA,EAAY,MAAA,EAAA,GACA,OADA,CACQ,GADR,CAAA;EACQ;;;EAIH,YAAA,EAAA,CAAA,IAAA,EAAA,MAAA,EAAA,GAAA,EAAA,CAAG,KAAH,CAAA;EAIF;;;EAEJ,UAAA,EAAA,SAAA,SAFI,KAEJ,GAAA,CAAA,IAAA,EAAA,MAAA,EAAA,KAAA,CAAA,EADC,KACD,EAAA,GADW,SACX,GAAA,CAAA,IAAA,EAAA,MAAA,EAAA,KAAA,EAAA,KAAA,EAAA,GAAU,SAAV;;AAAmB,KAGnC,aAHmC,CAAA,MAAA,EAAA,WAGF,kBAHE,CAAA,GAGoB,UAHpB,CAAA,OAItC,OAJsC,CAI9B,MAJ8B,EAItB,EAJsB,CAAA,CAAA;AAGnC,UAIK,kBAJQ,CAAA,aAAA,MAAA,GAAA,MAAA,EAAA,cAAA,OAAA,EAAA,WAOZ,kBAPY,GAOS,kBAPT,CAAA,CAAA;EAAoB,GAAA,EAStC,MATsC,CAAA,MAAA,EAAA,GAAA,GASjB,OATiB,CAST,eATS,CASO,IATP,EASa,WATb,EAS0B,EAT1B,CAAA,CAAA,CAAA;EAC5B,kBAAA,EAAA,CAAA,cAAA,MAAA,GAAA,SAAA,GAAA,SAAA,CAAA,CAAA,OAAA,EAWJ,mBAXI,CAWgB,eAXhB,CAWgC,IAXhC,EAWsC,WAXtC,EAWmD,EAXnD,CAAA,EAWwD,KAXxD,CAAA,EAAA,GAYV,YAZU,CAYG,eAZH,CAYmB,IAZnB,EAYyB,WAZzB,EAYsC,EAZtC,CAAA,EAY2C,KAZ3C,CAAA;;AAAR,iBAeO,OAfP,CAAA,MAAA,EAAA,WAekC,kBAflC,CAAA,CAAA,CAAA,EAAA;EAD0D,GAAA,CAAA,aAAA,MAkBxC,MAlBwC,GAAA,MAAA,CAAA,CAAA,KAAA,EAmBtD,IAnBsD,EAAA,IAAA,EAoBvD,MApBuD,CAAA,MAAA,EAAA,GAAA,GAoBlC,OApBkC,CAAA,OAAA,CAAA,CAAA,CAAA,EA6B/C,MA7B+C,CA6BxC,IA7BwC,CAAA,SA6B1B,aA7B0B,CAAA,KAAA,OAAA,CAAA,GA6BI,cA7BJ,CAAA,KAAA,OAAA,CAAA,GA8BzD,kBA9ByD,CA8BtC,IA9BsC,EA8BhC,gBAAA,CAAiB,WA9Be,CA8BH,MA9BG,CAAA,EA8BM,EA9BN,CAAA,GAAA,KAAA;CAAU;AAI5D,iBAuCD,kBAvCmB,CAAA,MAwC3B,qBAxC2B,EAAA,cAAA,MAAA,GAAA,SAAA,GAAA,SAAA,CAAA,CAAA,WAAA,EA2CpB,MA3CoB,CAAA,MAAA,EAAA,GAAA,GA2CC,OA3CD,CA2CS,GA3CT,CAAA,CAAA,EAAA,OAAA,EA4CxB,mBA5CwB,CA4CJ,GA5CI,EA4CC,KA5CD,CAAA,CAAA,EA6ChC,YA7CgC,CA6CnB,GA7CmB,EA6Cd,KA7Cc,CAAA"}
@@ -1,6 +1,7 @@
1
- import { createElement, lazy } from "react";
1
+ import { use } from "react";
2
+ import { jsx } from "react/jsx-runtime";
2
3
 
3
- //#region src/runtime/browser.ts
4
+ //#region src/runtime/browser.tsx
4
5
  function browser() {
5
6
  return { doc(_name, glob) {
6
7
  return {
@@ -16,7 +17,7 @@ function browser() {
16
17
  }
17
18
  const loaderStore = /* @__PURE__ */ new Map();
18
19
  function createClientLoader(globEntries, options) {
19
- const { id = "", component } = options;
20
+ const { id = "", component: useRenderer } = options;
20
21
  const renderers = {};
21
22
  const loaders = /* @__PURE__ */ new Map();
22
23
  const store = loaderStore.get(id) ?? { preloaded: /* @__PURE__ */ new Map() };
@@ -29,16 +30,13 @@ function createClientLoader(globEntries, options) {
29
30
  }
30
31
  function getRenderer(path) {
31
32
  if (path in renderers) return renderers[path];
32
- const OnDemand = lazy(async () => {
33
- const loaded = await getLoader(path)();
34
- return { default: (props) => component(loaded, props) };
35
- });
36
- renderers[path] = (props) => {
37
- const cached = store.preloaded.get(path);
38
- if (!cached) return createElement(OnDemand, props);
39
- return component(cached, props);
40
- };
41
- return renderers[path];
33
+ let promise;
34
+ function Renderer(props) {
35
+ let doc = store.preloaded.get(path);
36
+ doc ??= use(promise ??= getLoader(path)());
37
+ return useRenderer(doc, props);
38
+ }
39
+ return renderers[path] = Renderer;
42
40
  }
43
41
  return {
44
42
  async preload(path) {
@@ -50,7 +48,7 @@ function createClientLoader(globEntries, options) {
50
48
  return getRenderer(path);
51
49
  },
52
50
  useContent(path, props) {
53
- return createElement(this.getComponent(path), props);
51
+ return /* @__PURE__ */ jsx(getRenderer(path), { ...props });
54
52
  }
55
53
  };
56
54
  }
@@ -1 +1 @@
1
- {"version":3,"file":"browser.js","names":["renderers: Record<string, FC<Props>>"],"sources":["../../src/runtime/browser.ts"],"sourcesContent":["import { type ReactNode, type FC, lazy, createElement } from 'react';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\nimport type { DocCollection, DocsCollection } from '@/config/define';\nimport type { CompiledMDXProperties } from '@/loaders/mdx/build-mdx';\nimport type { InternalTypeConfig } from './types';\n\ntype CompiledMDXFile<\n Name extends string,\n Frontmatter,\n TC extends InternalTypeConfig,\n> = CompiledMDXProperties<Frontmatter> & TC['DocData'][Name] & Record<string, unknown>;\n\nexport interface ClientLoaderOptions<Doc, Props> {\n /**\n * Loader ID (usually your collection name)\n *\n * The code splitting strategy of frameworks like Tanstack Start may duplicate `createClientLoader()` into different chunks.\n *\n * We use loader ID to share cache between multiple instances of client loader.\n *\n * @defaultValue ''\n */\n id?: string;\n\n component: (loaded: Doc, props: Props) => ReactNode;\n}\n\nexport interface ClientLoader<Doc, Props> {\n preload: (path: string) => Promise<Doc>;\n /**\n * Get a component that renders content with `React.lazy`.\n */\n getComponent: (path: string) => FC<Props>;\n\n /**\n * Get react nodes that renders content with `React.lazy`.\n */\n useContent: (path: string, props: Props) => ReactNode;\n}\n\nexport type BrowserCreate<Config, TC extends InternalTypeConfig> = ReturnType<\n typeof browser<Config, TC>\n>;\n\nexport interface DocCollectionEntry<\n Name extends string = string,\n Frontmatter = unknown,\n TC extends InternalTypeConfig = InternalTypeConfig,\n> {\n raw: Record<string, () => Promise<CompiledMDXFile<Name, Frontmatter, TC>>>;\n\n createClientLoader: <Props extends object>(\n options: ClientLoaderOptions<CompiledMDXFile<Name, Frontmatter, TC>, Props>,\n ) => ClientLoader<CompiledMDXFile<Name, Frontmatter, TC>, Props>;\n}\n\nexport function browser<Config, TC extends InternalTypeConfig>() {\n return {\n doc<Name extends keyof Config & string>(\n _name: Name,\n glob: Record<string, () => Promise<unknown>>,\n ) {\n const out: DocCollectionEntry = {\n raw: glob as DocCollectionEntry['raw'],\n createClientLoader({ id = _name as string, ...options }) {\n return createClientLoader(this.raw, { id, ...options });\n },\n };\n\n return out as Config[Name] extends DocCollection<infer Schema> | DocsCollection<infer Schema>\n ? DocCollectionEntry<Name, StandardSchemaV1.InferOutput<Schema>, TC>\n : never;\n },\n };\n}\n\nconst loaderStore = new Map<\n string,\n {\n preloaded: Map<string, CompiledMDXProperties>;\n }\n>();\n\nexport function createClientLoader<Doc = CompiledMDXProperties, Props extends object = object>(\n globEntries: Record<string, () => Promise<Doc>>,\n options: ClientLoaderOptions<Doc, Props>,\n): ClientLoader<Doc, Props> {\n const { id = '', component } = options;\n const renderers: Record<string, FC<Props>> = {};\n const loaders = new Map<string, () => Promise<Doc>>();\n const store = loaderStore.get(id) ?? {\n preloaded: new Map(),\n };\n loaderStore.set(id, store);\n\n for (const k in globEntries) {\n loaders.set(k.startsWith('./') ? k.slice(2) : k, globEntries[k]);\n }\n\n function getLoader(path: string) {\n const loader = loaders.get(path);\n if (!loader)\n throw new Error(`[createClientLoader] ${path} does not exist in available entries`);\n return loader;\n }\n\n function getRenderer(path: string): FC<Props> {\n if (path in renderers) return renderers[path];\n\n const OnDemand = lazy(async () => {\n const loaded = await getLoader(path)();\n\n return { default: (props) => component(loaded, props) };\n });\n\n renderers[path] = (props) => {\n const cached = store.preloaded.get(path);\n if (!cached) return createElement(OnDemand, props);\n return component(cached, props);\n };\n return renderers[path];\n }\n\n return {\n async preload(path) {\n const loaded = await getLoader(path)();\n store.preloaded.set(path, loaded);\n return loaded;\n },\n getComponent(path) {\n return getRenderer(path);\n },\n useContent(path, props) {\n const Comp = this.getComponent(path);\n return createElement(Comp, props);\n },\n };\n}\n"],"mappings":";;;AAwDA,SAAgB,UAAiD;AAC/D,QAAO,EACL,IACE,OACA,MACA;AAQA,SAPgC;GAC9B,KAAK;GACL,mBAAmB,EAAE,KAAK,OAAiB,GAAG,WAAW;AACvD,WAAO,mBAAmB,KAAK,KAAK;KAAE;KAAI,GAAG;KAAS,CAAC;;GAE1D;IAMJ;;AAGH,MAAM,8BAAc,IAAI,KAKrB;AAEH,SAAgB,mBACd,aACA,SAC0B;CAC1B,MAAM,EAAE,KAAK,IAAI,cAAc;CAC/B,MAAMA,YAAuC,EAAE;CAC/C,MAAM,0BAAU,IAAI,KAAiC;CACrD,MAAM,QAAQ,YAAY,IAAI,GAAG,IAAI,EACnC,2BAAW,IAAI,KAAK,EACrB;AACD,aAAY,IAAI,IAAI,MAAM;AAE1B,MAAK,MAAM,KAAK,YACd,SAAQ,IAAI,EAAE,WAAW,KAAK,GAAG,EAAE,MAAM,EAAE,GAAG,GAAG,YAAY,GAAG;CAGlE,SAAS,UAAU,MAAc;EAC/B,MAAM,SAAS,QAAQ,IAAI,KAAK;AAChC,MAAI,CAAC,OACH,OAAM,IAAI,MAAM,wBAAwB,KAAK,sCAAsC;AACrF,SAAO;;CAGT,SAAS,YAAY,MAAyB;AAC5C,MAAI,QAAQ,UAAW,QAAO,UAAU;EAExC,MAAM,WAAW,KAAK,YAAY;GAChC,MAAM,SAAS,MAAM,UAAU,KAAK,EAAE;AAEtC,UAAO,EAAE,UAAU,UAAU,UAAU,QAAQ,MAAM,EAAE;IACvD;AAEF,YAAU,SAAS,UAAU;GAC3B,MAAM,SAAS,MAAM,UAAU,IAAI,KAAK;AACxC,OAAI,CAAC,OAAQ,QAAO,cAAc,UAAU,MAAM;AAClD,UAAO,UAAU,QAAQ,MAAM;;AAEjC,SAAO,UAAU;;AAGnB,QAAO;EACL,MAAM,QAAQ,MAAM;GAClB,MAAM,SAAS,MAAM,UAAU,KAAK,EAAE;AACtC,SAAM,UAAU,IAAI,MAAM,OAAO;AACjC,UAAO;;EAET,aAAa,MAAM;AACjB,UAAO,YAAY,KAAK;;EAE1B,WAAW,MAAM,OAAO;AAEtB,UAAO,cADM,KAAK,aAAa,KAAK,EACT,MAAM;;EAEpC"}
1
+ {"version":3,"file":"browser.js","names":[],"sources":["../../src/runtime/browser.tsx"],"sourcesContent":["import { type ReactNode, type FC, use } from 'react';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\nimport type { DocCollection, DocsCollection } from '@/config/define';\nimport type { CompiledMDXProperties } from '@/loaders/mdx/build-mdx';\nimport type { InternalTypeConfig } from './types';\n\ntype CompiledMDXFile<\n Name extends string,\n Frontmatter,\n TC extends InternalTypeConfig,\n> = CompiledMDXProperties<Frontmatter> & TC['DocData'][Name] & Record<string, unknown>;\n\nexport interface ClientLoaderOptions<Doc, Props> {\n /**\n * Loader ID (usually your collection name)\n *\n * The code splitting strategy of frameworks like Tanstack Start may duplicate `createClientLoader()` into different chunks.\n *\n * We use loader ID to share cache between multiple instances of client loader.\n *\n * @defaultValue ''\n */\n id?: string;\n\n component: (loaded: Doc, props: Props) => ReactNode;\n}\n\nexport interface ClientLoader<Doc, Props> {\n preload: (path: string) => Promise<Doc>;\n /**\n * Get a component that renders content with `use()`.\n */\n getComponent: (path: string) => FC<Props>;\n /**\n * Get react nodes that renders content, wraps `<Suspense>` by default.\n */\n useContent: undefined extends Props\n ? (path: string, props?: Props) => ReactNode\n : (path: string, props: Props) => ReactNode;\n}\n\nexport type BrowserCreate<Config, TC extends InternalTypeConfig> = ReturnType<\n typeof browser<Config, TC>\n>;\n\nexport interface DocCollectionEntry<\n Name extends string = string,\n Frontmatter = unknown,\n TC extends InternalTypeConfig = InternalTypeConfig,\n> {\n raw: Record<string, () => Promise<CompiledMDXFile<Name, Frontmatter, TC>>>;\n\n createClientLoader: <Props extends object | undefined = undefined>(\n options: ClientLoaderOptions<CompiledMDXFile<Name, Frontmatter, TC>, Props>,\n ) => ClientLoader<CompiledMDXFile<Name, Frontmatter, TC>, Props>;\n}\n\nexport function browser<Config, TC extends InternalTypeConfig>() {\n return {\n doc<Name extends keyof Config & string>(\n _name: Name,\n glob: Record<string, () => Promise<unknown>>,\n ) {\n const out: DocCollectionEntry = {\n raw: glob as DocCollectionEntry['raw'],\n createClientLoader({ id = _name as string, ...options }) {\n return createClientLoader(this.raw, { id, ...options });\n },\n };\n\n return out as Config[Name] extends DocCollection<infer Schema> | DocsCollection<infer Schema>\n ? DocCollectionEntry<Name, StandardSchemaV1.InferOutput<Schema>, TC>\n : never;\n },\n };\n}\n\nconst loaderStore = new Map<\n string,\n {\n preloaded: Map<string, CompiledMDXProperties>;\n }\n>();\n\nexport function createClientLoader<\n Doc = CompiledMDXProperties,\n Props extends object | undefined = undefined,\n>(\n globEntries: Record<string, () => Promise<Doc>>,\n options: ClientLoaderOptions<Doc, Props>,\n): ClientLoader<Doc, Props> {\n const { id = '', component: useRenderer } = options;\n const renderers: Record<string, FC<Props>> = {};\n const loaders = new Map<string, () => Promise<Doc>>();\n const store = loaderStore.get(id) ?? {\n preloaded: new Map(),\n };\n loaderStore.set(id, store);\n\n for (const k in globEntries) {\n loaders.set(k.startsWith('./') ? k.slice(2) : k, globEntries[k]);\n }\n\n function getLoader(path: string) {\n const loader = loaders.get(path);\n if (!loader)\n throw new Error(`[createClientLoader] ${path} does not exist in available entries`);\n return loader;\n }\n\n function getRenderer(path: string): FC<Props> {\n if (path in renderers) return renderers[path];\n\n let promise: Promise<Doc> | undefined;\n function Renderer(props: Props) {\n let doc = store.preloaded.get(path);\n doc ??= use((promise ??= getLoader(path)()));\n\n return useRenderer(doc, props);\n }\n\n return (renderers[path] = Renderer);\n }\n\n return {\n async preload(path) {\n const loaded = await getLoader(path)();\n store.preloaded.set(path, loaded);\n return loaded;\n },\n getComponent(path) {\n return getRenderer(path);\n },\n useContent(path: string, props: Props & object) {\n const Comp = getRenderer(path);\n return <Comp {...props} />;\n },\n } as ClientLoader<Doc, Props>;\n}\n"],"mappings":";;;;AAyDA,SAAgB,UAAiD;AAC/D,QAAO,EACL,IACE,OACA,MACA;AAQA,SAPgC;GAC9B,KAAK;GACL,mBAAmB,EAAE,KAAK,OAAiB,GAAG,WAAW;AACvD,WAAO,mBAAmB,KAAK,KAAK;KAAE;KAAI,GAAG;KAAS,CAAC;;GAE1D;IAMJ;;AAGH,MAAM,8BAAc,IAAI,KAKrB;AAEH,SAAgB,mBAId,aACA,SAC0B;CAC1B,MAAM,EAAE,KAAK,IAAI,WAAW,gBAAgB;CAC5C,MAAM,YAAuC,EAAE;CAC/C,MAAM,0BAAU,IAAI,KAAiC;CACrD,MAAM,QAAQ,YAAY,IAAI,GAAG,IAAI,EACnC,2BAAW,IAAI,KAAK,EACrB;AACD,aAAY,IAAI,IAAI,MAAM;AAE1B,MAAK,MAAM,KAAK,YACd,SAAQ,IAAI,EAAE,WAAW,KAAK,GAAG,EAAE,MAAM,EAAE,GAAG,GAAG,YAAY,GAAG;CAGlE,SAAS,UAAU,MAAc;EAC/B,MAAM,SAAS,QAAQ,IAAI,KAAK;AAChC,MAAI,CAAC,OACH,OAAM,IAAI,MAAM,wBAAwB,KAAK,sCAAsC;AACrF,SAAO;;CAGT,SAAS,YAAY,MAAyB;AAC5C,MAAI,QAAQ,UAAW,QAAO,UAAU;EAExC,IAAI;EACJ,SAAS,SAAS,OAAc;GAC9B,IAAI,MAAM,MAAM,UAAU,IAAI,KAAK;AACnC,WAAQ,IAAK,YAAY,UAAU,KAAK,EAAE,CAAE;AAE5C,UAAO,YAAY,KAAK,MAAM;;AAGhC,SAAQ,UAAU,QAAQ;;AAG5B,QAAO;EACL,MAAM,QAAQ,MAAM;GAClB,MAAM,SAAS,MAAM,UAAU,KAAK,EAAE;AACtC,SAAM,UAAU,IAAI,MAAM,OAAO;AACjC,UAAO;;EAET,aAAa,MAAM;AACjB,UAAO,YAAY,KAAK;;EAE1B,WAAW,MAAc,OAAuB;AAE9C,UAAO,oBADM,YAAY,KAAK,IACjB,GAAI,QAAS;;EAE7B"}
@@ -1 +1 @@
1
- {"version":3,"file":"dynamic.js","names":["head: Record<string, () => unknown>","body: Record<string, () => Promise<unknown>>","cachedResult: Promise<CompiledMDXProperties> | undefined"],"sources":["../../src/runtime/dynamic.ts"],"sourcesContent":["import { buildConfig, type DocCollectionItem } from '@/config/build';\nimport { buildMDX, type CompiledMDXProperties } from '@/loaders/mdx/build-mdx';\nimport { executeMdx } from '@fumadocs/mdx-remote/client';\nimport { pathToFileURL } from 'node:url';\nimport { fumaMatter } from '@/utils/fuma-matter';\nimport fs from 'node:fs/promises';\nimport { server, type ServerOptions } from './server';\nimport { type CoreOptions, createCore } from '@/core';\nimport type { FileInfo, InternalTypeConfig } from './types';\n\nexport interface LazyEntry<Data = unknown> {\n info: FileInfo;\n data: Data;\n\n hash?: string;\n}\n\nexport type CreateDynamic<Config, TC extends InternalTypeConfig = InternalTypeConfig> = ReturnType<\n typeof dynamic<Config, TC>\n>;\n\nexport async function dynamic<Config, TC extends InternalTypeConfig>(\n configExports: Config,\n coreOptions: CoreOptions,\n serverOptions?: ServerOptions,\n) {\n const core = createCore(coreOptions);\n await core.init({\n config: buildConfig(configExports as Record<string, unknown>),\n });\n\n const create = server<Config, TC>(serverOptions);\n\n function getDocCollection(name: string): DocCollectionItem | undefined {\n const collection = core.getCollection(name);\n if (!collection) return;\n\n if (collection.type === 'docs') return collection.docs;\n else if (collection.type === 'doc') return collection;\n }\n\n function convertLazyEntries(collection: DocCollectionItem, entries: LazyEntry[]) {\n const head: Record<string, () => unknown> = {};\n const body: Record<string, () => Promise<unknown>> = {};\n\n async function compile({ info, data }: LazyEntry<unknown>) {\n let content = (await fs.readFile(info.fullPath)).toString();\n content = fumaMatter(content).content;\n\n const compiled = await buildMDX(core, collection, {\n filePath: info.fullPath,\n source: content,\n frontmatter: data as Record<string, unknown>,\n isDevelopment: false,\n environment: 'runtime',\n });\n\n return (await executeMdx(String(compiled.value), {\n baseUrl: pathToFileURL(info.fullPath),\n })) as CompiledMDXProperties;\n }\n\n for (const entry of entries) {\n head[entry.info.path] = () => entry.data;\n let cachedResult: Promise<CompiledMDXProperties> | undefined;\n body[entry.info.path] = () => (cachedResult ??= compile(entry));\n }\n\n return { head, body };\n }\n\n return {\n async doc<Name extends keyof Config & string>(\n name: Name,\n base: string,\n entries: LazyEntry<unknown>[],\n ) {\n const collection = getDocCollection(name as string);\n if (!collection) throw new Error(`the doc collection ${name as string} doesn't exist.`);\n\n const { head, body } = convertLazyEntries(collection, entries);\n\n return create.docLazy(name, base, head, body);\n },\n async docs<Name extends keyof Config & string>(\n name: Name,\n base: string,\n meta: Record<string, unknown>,\n entries: LazyEntry<unknown>[],\n ) {\n const collection = getDocCollection(name as string);\n if (!collection) throw new Error(`the doc collection ${name as string} doesn't exist.`);\n\n const docs = convertLazyEntries(collection, entries);\n return create.docsLazy(name, base, meta, docs.head, docs.body);\n },\n };\n}\n"],"mappings":";;;;;;;;;;;;;AAqBA,eAAsB,QACpB,eACA,aACA,eACA;CACA,MAAM,OAAO,WAAW,YAAY;AACpC,OAAM,KAAK,KAAK,EACd,QAAQ,YAAY,cAAyC,EAC9D,CAAC;CAEF,MAAM,SAAS,OAAmB,cAAc;CAEhD,SAAS,iBAAiB,MAA6C;EACrE,MAAM,aAAa,KAAK,cAAc,KAAK;AAC3C,MAAI,CAAC,WAAY;AAEjB,MAAI,WAAW,SAAS,OAAQ,QAAO,WAAW;WACzC,WAAW,SAAS,MAAO,QAAO;;CAG7C,SAAS,mBAAmB,YAA+B,SAAsB;EAC/E,MAAMA,OAAsC,EAAE;EAC9C,MAAMC,OAA+C,EAAE;EAEvD,eAAe,QAAQ,EAAE,MAAM,QAA4B;GACzD,IAAI,WAAW,MAAM,GAAG,SAAS,KAAK,SAAS,EAAE,UAAU;AAC3D,aAAU,WAAW,QAAQ,CAAC;GAE9B,MAAM,WAAW,MAAM,SAAS,MAAM,YAAY;IAChD,UAAU,KAAK;IACf,QAAQ;IACR,aAAa;IACb,eAAe;IACf,aAAa;IACd,CAAC;AAEF,UAAQ,MAAM,WAAW,OAAO,SAAS,MAAM,EAAE,EAC/C,SAAS,cAAc,KAAK,SAAS,EACtC,CAAC;;AAGJ,OAAK,MAAM,SAAS,SAAS;AAC3B,QAAK,MAAM,KAAK,cAAc,MAAM;GACpC,IAAIC;AACJ,QAAK,MAAM,KAAK,cAAe,iBAAiB,QAAQ,MAAM;;AAGhE,SAAO;GAAE;GAAM;GAAM;;AAGvB,QAAO;EACL,MAAM,IACJ,MACA,MACA,SACA;GACA,MAAM,aAAa,iBAAiB,KAAe;AACnD,OAAI,CAAC,WAAY,OAAM,IAAI,MAAM,sBAAsB,KAAe,iBAAiB;GAEvF,MAAM,EAAE,MAAM,SAAS,mBAAmB,YAAY,QAAQ;AAE9D,UAAO,OAAO,QAAQ,MAAM,MAAM,MAAM,KAAK;;EAE/C,MAAM,KACJ,MACA,MACA,MACA,SACA;GACA,MAAM,aAAa,iBAAiB,KAAe;AACnD,OAAI,CAAC,WAAY,OAAM,IAAI,MAAM,sBAAsB,KAAe,iBAAiB;GAEvF,MAAM,OAAO,mBAAmB,YAAY,QAAQ;AACpD,UAAO,OAAO,SAAS,MAAM,MAAM,MAAM,KAAK,MAAM,KAAK,KAAK;;EAEjE"}
1
+ {"version":3,"file":"dynamic.js","names":[],"sources":["../../src/runtime/dynamic.ts"],"sourcesContent":["import { buildConfig, type DocCollectionItem } from '@/config/build';\nimport { buildMDX, type CompiledMDXProperties } from '@/loaders/mdx/build-mdx';\nimport { executeMdx } from '@fumadocs/mdx-remote/client';\nimport { pathToFileURL } from 'node:url';\nimport { fumaMatter } from '@/utils/fuma-matter';\nimport fs from 'node:fs/promises';\nimport { server, type ServerOptions } from './server';\nimport { type CoreOptions, createCore } from '@/core';\nimport type { FileInfo, InternalTypeConfig } from './types';\n\nexport interface LazyEntry<Data = unknown> {\n info: FileInfo;\n data: Data;\n\n hash?: string;\n}\n\nexport type CreateDynamic<Config, TC extends InternalTypeConfig = InternalTypeConfig> = ReturnType<\n typeof dynamic<Config, TC>\n>;\n\nexport async function dynamic<Config, TC extends InternalTypeConfig>(\n configExports: Config,\n coreOptions: CoreOptions,\n serverOptions?: ServerOptions,\n) {\n const core = createCore(coreOptions);\n await core.init({\n config: buildConfig(configExports as Record<string, unknown>),\n });\n\n const create = server<Config, TC>(serverOptions);\n\n function getDocCollection(name: string): DocCollectionItem | undefined {\n const collection = core.getCollection(name);\n if (!collection) return;\n\n if (collection.type === 'docs') return collection.docs;\n else if (collection.type === 'doc') return collection;\n }\n\n function convertLazyEntries(collection: DocCollectionItem, entries: LazyEntry[]) {\n const head: Record<string, () => unknown> = {};\n const body: Record<string, () => Promise<unknown>> = {};\n\n async function compile({ info, data }: LazyEntry<unknown>) {\n let content = (await fs.readFile(info.fullPath)).toString();\n content = fumaMatter(content).content;\n\n const compiled = await buildMDX(core, collection, {\n filePath: info.fullPath,\n source: content,\n frontmatter: data as Record<string, unknown>,\n isDevelopment: false,\n environment: 'runtime',\n });\n\n return (await executeMdx(String(compiled.value), {\n baseUrl: pathToFileURL(info.fullPath),\n })) as CompiledMDXProperties;\n }\n\n for (const entry of entries) {\n head[entry.info.path] = () => entry.data;\n let cachedResult: Promise<CompiledMDXProperties> | undefined;\n body[entry.info.path] = () => (cachedResult ??= compile(entry));\n }\n\n return { head, body };\n }\n\n return {\n async doc<Name extends keyof Config & string>(\n name: Name,\n base: string,\n entries: LazyEntry<unknown>[],\n ) {\n const collection = getDocCollection(name as string);\n if (!collection) throw new Error(`the doc collection ${name as string} doesn't exist.`);\n\n const { head, body } = convertLazyEntries(collection, entries);\n\n return create.docLazy(name, base, head, body);\n },\n async docs<Name extends keyof Config & string>(\n name: Name,\n base: string,\n meta: Record<string, unknown>,\n entries: LazyEntry<unknown>[],\n ) {\n const collection = getDocCollection(name as string);\n if (!collection) throw new Error(`the doc collection ${name as string} doesn't exist.`);\n\n const docs = convertLazyEntries(collection, entries);\n return create.docsLazy(name, base, meta, docs.head, docs.body);\n },\n };\n}\n"],"mappings":";;;;;;;;;;;;;AAqBA,eAAsB,QACpB,eACA,aACA,eACA;CACA,MAAM,OAAO,WAAW,YAAY;AACpC,OAAM,KAAK,KAAK,EACd,QAAQ,YAAY,cAAyC,EAC9D,CAAC;CAEF,MAAM,SAAS,OAAmB,cAAc;CAEhD,SAAS,iBAAiB,MAA6C;EACrE,MAAM,aAAa,KAAK,cAAc,KAAK;AAC3C,MAAI,CAAC,WAAY;AAEjB,MAAI,WAAW,SAAS,OAAQ,QAAO,WAAW;WACzC,WAAW,SAAS,MAAO,QAAO;;CAG7C,SAAS,mBAAmB,YAA+B,SAAsB;EAC/E,MAAM,OAAsC,EAAE;EAC9C,MAAM,OAA+C,EAAE;EAEvD,eAAe,QAAQ,EAAE,MAAM,QAA4B;GACzD,IAAI,WAAW,MAAM,GAAG,SAAS,KAAK,SAAS,EAAE,UAAU;AAC3D,aAAU,WAAW,QAAQ,CAAC;GAE9B,MAAM,WAAW,MAAM,SAAS,MAAM,YAAY;IAChD,UAAU,KAAK;IACf,QAAQ;IACR,aAAa;IACb,eAAe;IACf,aAAa;IACd,CAAC;AAEF,UAAQ,MAAM,WAAW,OAAO,SAAS,MAAM,EAAE,EAC/C,SAAS,cAAc,KAAK,SAAS,EACtC,CAAC;;AAGJ,OAAK,MAAM,SAAS,SAAS;AAC3B,QAAK,MAAM,KAAK,cAAc,MAAM;GACpC,IAAI;AACJ,QAAK,MAAM,KAAK,cAAe,iBAAiB,QAAQ,MAAM;;AAGhE,SAAO;GAAE;GAAM;GAAM;;AAGvB,QAAO;EACL,MAAM,IACJ,MACA,MACA,SACA;GACA,MAAM,aAAa,iBAAiB,KAAe;AACnD,OAAI,CAAC,WAAY,OAAM,IAAI,MAAM,sBAAsB,KAAe,iBAAiB;GAEvF,MAAM,EAAE,MAAM,SAAS,mBAAmB,YAAY,QAAQ;AAE9D,UAAO,OAAO,QAAQ,MAAM,MAAM,MAAM,KAAK;;EAE/C,MAAM,KACJ,MACA,MACA,MACA,SACA;GACA,MAAM,aAAa,iBAAiB,KAAe;AACnD,OAAI,CAAC,WAAY,OAAM,IAAI,MAAM,sBAAsB,KAAe,iBAAiB;GAEvF,MAAM,OAAO,mBAAmB,YAAY,QAAQ;AACpD,UAAO,OAAO,SAAS,MAAM,MAAM,MAAM,KAAK,MAAM,KAAK,KAAK;;EAEjE"}
@@ -1 +1 @@
1
- {"version":3,"file":"server.js","names":["path","data: DocData","data: CompiledMDXProperties","files: VirtualFile<{\n pageData: Page;\n metaData: Meta;\n }>[]"],"sources":["../../src/runtime/server.ts"],"sourcesContent":["import type { MetaData, PageData, Source, VirtualFile } from 'fumadocs-core/source';\nimport * as path from 'node:path';\nimport type { DocCollection, DocsCollection, MetaCollection } from '@/config';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\nimport type { CompiledMDXProperties } from '@/loaders/mdx/build-mdx';\nimport type { InternalTypeConfig, DocData, DocMethods, FileInfo, MetaMethods } from './types';\n\nexport type MetaCollectionEntry<Data> = Data & MetaMethods;\n\nexport type DocCollectionEntry<\n Name extends string = string,\n Frontmatter = unknown,\n TC extends InternalTypeConfig = InternalTypeConfig,\n> = DocData & DocMethods & Frontmatter & TC['DocData'][Name];\n\nexport type AsyncDocCollectionEntry<\n Name extends string = string,\n Frontmatter = unknown,\n TC extends InternalTypeConfig = InternalTypeConfig,\n> = {\n load: () => Promise<DocData & TC['DocData'][Name]>;\n} & DocMethods &\n Frontmatter;\n\nexport interface DocsCollectionEntry<\n Name extends string = string,\n Frontmatter extends PageData = PageData,\n Meta extends MetaData = MetaData,\n TC extends InternalTypeConfig = InternalTypeConfig,\n> {\n docs: DocCollectionEntry<Name, Frontmatter, TC>[];\n meta: MetaCollectionEntry<Meta>[];\n toFumadocsSource: () => Source<{\n pageData: DocCollectionEntry<Name, Frontmatter, TC>;\n metaData: MetaCollectionEntry<Meta>;\n }>;\n}\n\nexport interface AsyncDocsCollectionEntry<\n Name extends string = string,\n Frontmatter extends PageData = PageData,\n Meta extends MetaData = MetaData,\n TC extends InternalTypeConfig = InternalTypeConfig,\n> {\n docs: AsyncDocCollectionEntry<Name, Frontmatter, TC>[];\n meta: MetaCollectionEntry<Meta>[];\n toFumadocsSource: () => Source<{\n pageData: AsyncDocCollectionEntry<Name, Frontmatter, TC>;\n metaData: MetaCollectionEntry<Meta>;\n }>;\n}\n\ntype AwaitableGlobEntries<T> = Record<string, T | (() => Promise<T>)>;\n\nexport type ServerCreate<Config, TC extends InternalTypeConfig = InternalTypeConfig> = ReturnType<\n typeof server<Config, TC>\n>;\n\nexport interface ServerOptions {\n doc?: {\n passthroughs?: string[];\n };\n}\n\nexport function server<Config, TC extends InternalTypeConfig>(options: ServerOptions = {}) {\n const { doc: { passthroughs: docPassthroughs = [] } = {} } = options;\n\n function fileInfo(file: string, base: string): FileInfo {\n if (file.startsWith('./')) {\n file = file.slice(2);\n }\n\n return {\n path: file,\n fullPath: path.join(base, file),\n };\n }\n\n function mapDocData(entry: CompiledMDXProperties): DocData {\n const data: DocData = {\n body: entry.default,\n toc: entry.toc,\n structuredData: entry.structuredData,\n _exports: entry as unknown as Record<string, unknown>,\n };\n\n for (const key of docPassthroughs) {\n // @ts-expect-error -- handle passthrough properties\n data[key] = entry[key];\n }\n\n return data;\n }\n\n return {\n async doc<Name extends keyof Config & string>(\n _name: Name,\n base: string,\n glob: AwaitableGlobEntries<unknown>,\n ) {\n const out = await Promise.all(\n Object.entries(glob).map(async ([k, v]) => {\n const data: CompiledMDXProperties = typeof v === 'function' ? await v() : v;\n\n return {\n ...mapDocData(data),\n ...(data.frontmatter as object),\n ...createDocMethods(fileInfo(k, base), () => data),\n } satisfies DocCollectionEntry;\n }),\n );\n\n return out as unknown as Config[Name] extends\n | DocCollection<infer Schema>\n | DocsCollection<infer Schema>\n ? DocCollectionEntry<Name, StandardSchemaV1.InferOutput<Schema>, TC>[]\n : never;\n },\n async docLazy<Name extends keyof Config & string>(\n _name: Name,\n base: string,\n head: AwaitableGlobEntries<unknown>,\n body: Record<string, () => Promise<unknown>>,\n ) {\n const out = await Promise.all(\n Object.entries(head).map(async ([k, v]) => {\n const data = typeof v === 'function' ? await v() : v;\n const content = body[k] as () => Promise<CompiledMDXProperties>;\n\n return {\n ...data,\n ...createDocMethods(fileInfo(k, base), content),\n async load() {\n return mapDocData(await content());\n },\n } satisfies AsyncDocCollectionEntry;\n }),\n );\n\n return out as unknown as Config[Name] extends\n | DocCollection<infer Schema>\n | DocsCollection<infer Schema>\n ? AsyncDocCollectionEntry<Name, StandardSchemaV1.InferOutput<Schema>, TC>[]\n : never;\n },\n async meta<Name extends keyof Config & string>(\n _name: Name,\n base: string,\n glob: AwaitableGlobEntries<unknown>,\n ) {\n const out = await Promise.all(\n Object.entries(glob).map(async ([k, v]) => {\n const data = typeof v === 'function' ? await v() : v;\n\n return {\n info: fileInfo(k, base),\n ...data,\n } satisfies MetaCollectionEntry<unknown>;\n }),\n );\n\n return out as unknown as Config[Name] extends\n | MetaCollection<infer Schema>\n | DocsCollection<StandardSchemaV1, infer Schema>\n ? MetaCollectionEntry<StandardSchemaV1.InferOutput<Schema>>[]\n : never;\n },\n\n async docs<Name extends keyof Config & string>(\n name: Name,\n base: string,\n metaGlob: AwaitableGlobEntries<unknown>,\n docGlob: AwaitableGlobEntries<unknown>,\n ) {\n const entry = {\n docs: await this.doc(name, base, docGlob),\n meta: await this.meta(name, base, metaGlob),\n toFumadocsSource() {\n return toFumadocsSource(this.docs, this.meta);\n },\n } satisfies DocsCollectionEntry;\n\n return entry as Config[Name] extends DocsCollection<infer Page, infer Meta>\n ? StandardSchemaV1.InferOutput<Page> extends PageData\n ? StandardSchemaV1.InferOutput<Meta> extends MetaData\n ? DocsCollectionEntry<\n Name,\n StandardSchemaV1.InferOutput<Page>,\n StandardSchemaV1.InferOutput<Meta>,\n TC\n >\n : never\n : never\n : never;\n },\n async docsLazy<Name extends keyof Config & string>(\n name: Name,\n base: string,\n metaGlob: AwaitableGlobEntries<unknown>,\n docHeadGlob: AwaitableGlobEntries<unknown>,\n docBodyGlob: Record<string, () => Promise<unknown>>,\n ) {\n const entry = {\n docs: await this.docLazy(name, base, docHeadGlob, docBodyGlob),\n meta: await this.meta(name, base, metaGlob),\n toFumadocsSource() {\n return toFumadocsSource(this.docs, this.meta);\n },\n } satisfies AsyncDocsCollectionEntry;\n\n return entry as Config[Name] extends DocsCollection<infer Page, infer Meta>\n ? StandardSchemaV1.InferOutput<Page> extends PageData\n ? StandardSchemaV1.InferOutput<Meta> extends MetaData\n ? AsyncDocsCollectionEntry<\n Name,\n StandardSchemaV1.InferOutput<Page>,\n StandardSchemaV1.InferOutput<Meta>,\n TC\n >\n : never\n : never\n : never;\n },\n };\n}\n\nexport function toFumadocsSource<\n Page extends DocMethods & PageData,\n Meta extends MetaMethods & MetaData,\n>(\n pages: Page[],\n metas: Meta[],\n): Source<{\n pageData: Page;\n metaData: Meta;\n}> {\n const files: VirtualFile<{\n pageData: Page;\n metaData: Meta;\n }>[] = [];\n\n for (const entry of pages) {\n files.push({\n type: 'page',\n path: entry.info.path,\n absolutePath: entry.info.fullPath,\n data: entry,\n });\n }\n\n for (const entry of metas) {\n files.push({\n type: 'meta',\n path: entry.info.path,\n absolutePath: entry.info.fullPath,\n data: entry,\n });\n }\n\n return {\n files,\n };\n}\n\nfunction createDocMethods(\n info: FileInfo,\n load: () => CompiledMDXProperties | Promise<CompiledMDXProperties>,\n): DocMethods {\n return {\n info,\n async getText(type) {\n if (type === 'raw') {\n const fs = await import('node:fs/promises');\n\n return (await fs.readFile(info.fullPath)).toString();\n }\n\n const data = await load();\n if (typeof data._markdown !== 'string')\n throw new Error(\n \"getText('processed') requires `includeProcessedMarkdown` to be enabled in your collection config.\",\n );\n return data._markdown;\n },\n async getMDAST() {\n const data = await load();\n\n if (!data._mdast)\n throw new Error(\n 'getMDAST() requires `includeMDAST` to be enabled in your collection config.',\n );\n return JSON.parse(data._mdast);\n },\n };\n}\n"],"mappings":";;;AAgEA,SAAgB,OAA8C,UAAyB,EAAE,EAAE;CACzF,MAAM,EAAE,KAAK,EAAE,cAAc,kBAAkB,EAAE,KAAK,EAAE,KAAK;CAE7D,SAAS,SAAS,MAAc,MAAwB;AACtD,MAAI,KAAK,WAAW,KAAK,CACvB,QAAO,KAAK,MAAM,EAAE;AAGtB,SAAO;GACL,MAAM;GACN,UAAUA,OAAK,KAAK,MAAM,KAAK;GAChC;;CAGH,SAAS,WAAW,OAAuC;EACzD,MAAMC,OAAgB;GACpB,MAAM,MAAM;GACZ,KAAK,MAAM;GACX,gBAAgB,MAAM;GACtB,UAAU;GACX;AAED,OAAK,MAAM,OAAO,gBAEhB,MAAK,OAAO,MAAM;AAGpB,SAAO;;AAGT,QAAO;EACL,MAAM,IACJ,OACA,MACA,MACA;AAaA,UAZY,MAAM,QAAQ,IACxB,OAAO,QAAQ,KAAK,CAAC,IAAI,OAAO,CAAC,GAAG,OAAO;IACzC,MAAMC,OAA8B,OAAO,MAAM,aAAa,MAAM,GAAG,GAAG;AAE1E,WAAO;KACL,GAAG,WAAW,KAAK;KACnB,GAAI,KAAK;KACT,GAAG,iBAAiB,SAAS,GAAG,KAAK,QAAQ,KAAK;KACnD;KACD,CACH;;EAQH,MAAM,QACJ,OACA,MACA,MACA,MACA;AAgBA,UAfY,MAAM,QAAQ,IACxB,OAAO,QAAQ,KAAK,CAAC,IAAI,OAAO,CAAC,GAAG,OAAO;IACzC,MAAM,OAAO,OAAO,MAAM,aAAa,MAAM,GAAG,GAAG;IACnD,MAAM,UAAU,KAAK;AAErB,WAAO;KACL,GAAG;KACH,GAAG,iBAAiB,SAAS,GAAG,KAAK,EAAE,QAAQ;KAC/C,MAAM,OAAO;AACX,aAAO,WAAW,MAAM,SAAS,CAAC;;KAErC;KACD,CACH;;EAQH,MAAM,KACJ,OACA,MACA,MACA;AAYA,UAXY,MAAM,QAAQ,IACxB,OAAO,QAAQ,KAAK,CAAC,IAAI,OAAO,CAAC,GAAG,OAAO;IACzC,MAAM,OAAO,OAAO,MAAM,aAAa,MAAM,GAAG,GAAG;AAEnD,WAAO;KACL,MAAM,SAAS,GAAG,KAAK;KACvB,GAAG;KACJ;KACD,CACH;;EASH,MAAM,KACJ,MACA,MACA,UACA,SACA;AASA,UARc;IACZ,MAAM,MAAM,KAAK,IAAI,MAAM,MAAM,QAAQ;IACzC,MAAM,MAAM,KAAK,KAAK,MAAM,MAAM,SAAS;IAC3C,mBAAmB;AACjB,YAAO,iBAAiB,KAAK,MAAM,KAAK,KAAK;;IAEhD;;EAeH,MAAM,SACJ,MACA,MACA,UACA,aACA,aACA;AASA,UARc;IACZ,MAAM,MAAM,KAAK,QAAQ,MAAM,MAAM,aAAa,YAAY;IAC9D,MAAM,MAAM,KAAK,KAAK,MAAM,MAAM,SAAS;IAC3C,mBAAmB;AACjB,YAAO,iBAAiB,KAAK,MAAM,KAAK,KAAK;;IAEhD;;EAeJ;;AAGH,SAAgB,iBAId,OACA,OAIC;CACD,MAAMC,QAGC,EAAE;AAET,MAAK,MAAM,SAAS,MAClB,OAAM,KAAK;EACT,MAAM;EACN,MAAM,MAAM,KAAK;EACjB,cAAc,MAAM,KAAK;EACzB,MAAM;EACP,CAAC;AAGJ,MAAK,MAAM,SAAS,MAClB,OAAM,KAAK;EACT,MAAM;EACN,MAAM,MAAM,KAAK;EACjB,cAAc,MAAM,KAAK;EACzB,MAAM;EACP,CAAC;AAGJ,QAAO,EACL,OACD;;AAGH,SAAS,iBACP,MACA,MACY;AACZ,QAAO;EACL;EACA,MAAM,QAAQ,MAAM;AAClB,OAAI,SAAS,MAGX,SAAQ,OAFG,MAAM,OAAO,qBAEP,SAAS,KAAK,SAAS,EAAE,UAAU;GAGtD,MAAM,OAAO,MAAM,MAAM;AACzB,OAAI,OAAO,KAAK,cAAc,SAC5B,OAAM,IAAI,MACR,oGACD;AACH,UAAO,KAAK;;EAEd,MAAM,WAAW;GACf,MAAM,OAAO,MAAM,MAAM;AAEzB,OAAI,CAAC,KAAK,OACR,OAAM,IAAI,MACR,8EACD;AACH,UAAO,KAAK,MAAM,KAAK,OAAO;;EAEjC"}
1
+ {"version":3,"file":"server.js","names":["path"],"sources":["../../src/runtime/server.ts"],"sourcesContent":["import type { MetaData, PageData, Source, VirtualFile } from 'fumadocs-core/source';\nimport * as path from 'node:path';\nimport type { DocCollection, DocsCollection, MetaCollection } from '@/config';\nimport type { StandardSchemaV1 } from '@standard-schema/spec';\nimport type { CompiledMDXProperties } from '@/loaders/mdx/build-mdx';\nimport type { InternalTypeConfig, DocData, DocMethods, FileInfo, MetaMethods } from './types';\n\nexport type MetaCollectionEntry<Data> = Data & MetaMethods;\n\nexport type DocCollectionEntry<\n Name extends string = string,\n Frontmatter = unknown,\n TC extends InternalTypeConfig = InternalTypeConfig,\n> = DocData & DocMethods & Frontmatter & TC['DocData'][Name];\n\nexport type AsyncDocCollectionEntry<\n Name extends string = string,\n Frontmatter = unknown,\n TC extends InternalTypeConfig = InternalTypeConfig,\n> = {\n load: () => Promise<DocData & TC['DocData'][Name]>;\n} & DocMethods &\n Frontmatter;\n\nexport interface DocsCollectionEntry<\n Name extends string = string,\n Frontmatter extends PageData = PageData,\n Meta extends MetaData = MetaData,\n TC extends InternalTypeConfig = InternalTypeConfig,\n> {\n docs: DocCollectionEntry<Name, Frontmatter, TC>[];\n meta: MetaCollectionEntry<Meta>[];\n toFumadocsSource: () => Source<{\n pageData: DocCollectionEntry<Name, Frontmatter, TC>;\n metaData: MetaCollectionEntry<Meta>;\n }>;\n}\n\nexport interface AsyncDocsCollectionEntry<\n Name extends string = string,\n Frontmatter extends PageData = PageData,\n Meta extends MetaData = MetaData,\n TC extends InternalTypeConfig = InternalTypeConfig,\n> {\n docs: AsyncDocCollectionEntry<Name, Frontmatter, TC>[];\n meta: MetaCollectionEntry<Meta>[];\n toFumadocsSource: () => Source<{\n pageData: AsyncDocCollectionEntry<Name, Frontmatter, TC>;\n metaData: MetaCollectionEntry<Meta>;\n }>;\n}\n\ntype AwaitableGlobEntries<T> = Record<string, T | (() => Promise<T>)>;\n\nexport type ServerCreate<Config, TC extends InternalTypeConfig = InternalTypeConfig> = ReturnType<\n typeof server<Config, TC>\n>;\n\nexport interface ServerOptions {\n doc?: {\n passthroughs?: string[];\n };\n}\n\nexport function server<Config, TC extends InternalTypeConfig>(options: ServerOptions = {}) {\n const { doc: { passthroughs: docPassthroughs = [] } = {} } = options;\n\n function fileInfo(file: string, base: string): FileInfo {\n if (file.startsWith('./')) {\n file = file.slice(2);\n }\n\n return {\n path: file,\n fullPath: path.join(base, file),\n };\n }\n\n function mapDocData(entry: CompiledMDXProperties): DocData {\n const data: DocData = {\n body: entry.default,\n toc: entry.toc,\n structuredData: entry.structuredData,\n _exports: entry as unknown as Record<string, unknown>,\n };\n\n for (const key of docPassthroughs) {\n // @ts-expect-error -- handle passthrough properties\n data[key] = entry[key];\n }\n\n return data;\n }\n\n return {\n async doc<Name extends keyof Config & string>(\n _name: Name,\n base: string,\n glob: AwaitableGlobEntries<unknown>,\n ) {\n const out = await Promise.all(\n Object.entries(glob).map(async ([k, v]) => {\n const data: CompiledMDXProperties = typeof v === 'function' ? await v() : v;\n\n return {\n ...mapDocData(data),\n ...(data.frontmatter as object),\n ...createDocMethods(fileInfo(k, base), () => data),\n } satisfies DocCollectionEntry;\n }),\n );\n\n return out as unknown as Config[Name] extends\n | DocCollection<infer Schema>\n | DocsCollection<infer Schema>\n ? DocCollectionEntry<Name, StandardSchemaV1.InferOutput<Schema>, TC>[]\n : never;\n },\n async docLazy<Name extends keyof Config & string>(\n _name: Name,\n base: string,\n head: AwaitableGlobEntries<unknown>,\n body: Record<string, () => Promise<unknown>>,\n ) {\n const out = await Promise.all(\n Object.entries(head).map(async ([k, v]) => {\n const data = typeof v === 'function' ? await v() : v;\n const content = body[k] as () => Promise<CompiledMDXProperties>;\n\n return {\n ...data,\n ...createDocMethods(fileInfo(k, base), content),\n async load() {\n return mapDocData(await content());\n },\n } satisfies AsyncDocCollectionEntry;\n }),\n );\n\n return out as unknown as Config[Name] extends\n | DocCollection<infer Schema>\n | DocsCollection<infer Schema>\n ? AsyncDocCollectionEntry<Name, StandardSchemaV1.InferOutput<Schema>, TC>[]\n : never;\n },\n async meta<Name extends keyof Config & string>(\n _name: Name,\n base: string,\n glob: AwaitableGlobEntries<unknown>,\n ) {\n const out = await Promise.all(\n Object.entries(glob).map(async ([k, v]) => {\n const data = typeof v === 'function' ? await v() : v;\n\n return {\n info: fileInfo(k, base),\n ...data,\n } satisfies MetaCollectionEntry<unknown>;\n }),\n );\n\n return out as unknown as Config[Name] extends\n | MetaCollection<infer Schema>\n | DocsCollection<StandardSchemaV1, infer Schema>\n ? MetaCollectionEntry<StandardSchemaV1.InferOutput<Schema>>[]\n : never;\n },\n\n async docs<Name extends keyof Config & string>(\n name: Name,\n base: string,\n metaGlob: AwaitableGlobEntries<unknown>,\n docGlob: AwaitableGlobEntries<unknown>,\n ) {\n const entry = {\n docs: await this.doc(name, base, docGlob),\n meta: await this.meta(name, base, metaGlob),\n toFumadocsSource() {\n return toFumadocsSource(this.docs, this.meta);\n },\n } satisfies DocsCollectionEntry;\n\n return entry as Config[Name] extends DocsCollection<infer Page, infer Meta>\n ? StandardSchemaV1.InferOutput<Page> extends PageData\n ? StandardSchemaV1.InferOutput<Meta> extends MetaData\n ? DocsCollectionEntry<\n Name,\n StandardSchemaV1.InferOutput<Page>,\n StandardSchemaV1.InferOutput<Meta>,\n TC\n >\n : never\n : never\n : never;\n },\n async docsLazy<Name extends keyof Config & string>(\n name: Name,\n base: string,\n metaGlob: AwaitableGlobEntries<unknown>,\n docHeadGlob: AwaitableGlobEntries<unknown>,\n docBodyGlob: Record<string, () => Promise<unknown>>,\n ) {\n const entry = {\n docs: await this.docLazy(name, base, docHeadGlob, docBodyGlob),\n meta: await this.meta(name, base, metaGlob),\n toFumadocsSource() {\n return toFumadocsSource(this.docs, this.meta);\n },\n } satisfies AsyncDocsCollectionEntry;\n\n return entry as Config[Name] extends DocsCollection<infer Page, infer Meta>\n ? StandardSchemaV1.InferOutput<Page> extends PageData\n ? StandardSchemaV1.InferOutput<Meta> extends MetaData\n ? AsyncDocsCollectionEntry<\n Name,\n StandardSchemaV1.InferOutput<Page>,\n StandardSchemaV1.InferOutput<Meta>,\n TC\n >\n : never\n : never\n : never;\n },\n };\n}\n\nexport function toFumadocsSource<\n Page extends DocMethods & PageData,\n Meta extends MetaMethods & MetaData,\n>(\n pages: Page[],\n metas: Meta[],\n): Source<{\n pageData: Page;\n metaData: Meta;\n}> {\n const files: VirtualFile<{\n pageData: Page;\n metaData: Meta;\n }>[] = [];\n\n for (const entry of pages) {\n files.push({\n type: 'page',\n path: entry.info.path,\n absolutePath: entry.info.fullPath,\n data: entry,\n });\n }\n\n for (const entry of metas) {\n files.push({\n type: 'meta',\n path: entry.info.path,\n absolutePath: entry.info.fullPath,\n data: entry,\n });\n }\n\n return {\n files,\n };\n}\n\nfunction createDocMethods(\n info: FileInfo,\n load: () => CompiledMDXProperties | Promise<CompiledMDXProperties>,\n): DocMethods {\n return {\n info,\n async getText(type) {\n if (type === 'raw') {\n const fs = await import('node:fs/promises');\n\n return (await fs.readFile(info.fullPath)).toString();\n }\n\n const data = await load();\n if (typeof data._markdown !== 'string')\n throw new Error(\n \"getText('processed') requires `includeProcessedMarkdown` to be enabled in your collection config.\",\n );\n return data._markdown;\n },\n async getMDAST() {\n const data = await load();\n\n if (!data._mdast)\n throw new Error(\n 'getMDAST() requires `includeMDAST` to be enabled in your collection config.',\n );\n return JSON.parse(data._mdast);\n },\n };\n}\n"],"mappings":";;;AAgEA,SAAgB,OAA8C,UAAyB,EAAE,EAAE;CACzF,MAAM,EAAE,KAAK,EAAE,cAAc,kBAAkB,EAAE,KAAK,EAAE,KAAK;CAE7D,SAAS,SAAS,MAAc,MAAwB;AACtD,MAAI,KAAK,WAAW,KAAK,CACvB,QAAO,KAAK,MAAM,EAAE;AAGtB,SAAO;GACL,MAAM;GACN,UAAUA,OAAK,KAAK,MAAM,KAAK;GAChC;;CAGH,SAAS,WAAW,OAAuC;EACzD,MAAM,OAAgB;GACpB,MAAM,MAAM;GACZ,KAAK,MAAM;GACX,gBAAgB,MAAM;GACtB,UAAU;GACX;AAED,OAAK,MAAM,OAAO,gBAEhB,MAAK,OAAO,MAAM;AAGpB,SAAO;;AAGT,QAAO;EACL,MAAM,IACJ,OACA,MACA,MACA;AAaA,UAZY,MAAM,QAAQ,IACxB,OAAO,QAAQ,KAAK,CAAC,IAAI,OAAO,CAAC,GAAG,OAAO;IACzC,MAAM,OAA8B,OAAO,MAAM,aAAa,MAAM,GAAG,GAAG;AAE1E,WAAO;KACL,GAAG,WAAW,KAAK;KACnB,GAAI,KAAK;KACT,GAAG,iBAAiB,SAAS,GAAG,KAAK,QAAQ,KAAK;KACnD;KACD,CACH;;EAQH,MAAM,QACJ,OACA,MACA,MACA,MACA;AAgBA,UAfY,MAAM,QAAQ,IACxB,OAAO,QAAQ,KAAK,CAAC,IAAI,OAAO,CAAC,GAAG,OAAO;IACzC,MAAM,OAAO,OAAO,MAAM,aAAa,MAAM,GAAG,GAAG;IACnD,MAAM,UAAU,KAAK;AAErB,WAAO;KACL,GAAG;KACH,GAAG,iBAAiB,SAAS,GAAG,KAAK,EAAE,QAAQ;KAC/C,MAAM,OAAO;AACX,aAAO,WAAW,MAAM,SAAS,CAAC;;KAErC;KACD,CACH;;EAQH,MAAM,KACJ,OACA,MACA,MACA;AAYA,UAXY,MAAM,QAAQ,IACxB,OAAO,QAAQ,KAAK,CAAC,IAAI,OAAO,CAAC,GAAG,OAAO;IACzC,MAAM,OAAO,OAAO,MAAM,aAAa,MAAM,GAAG,GAAG;AAEnD,WAAO;KACL,MAAM,SAAS,GAAG,KAAK;KACvB,GAAG;KACJ;KACD,CACH;;EASH,MAAM,KACJ,MACA,MACA,UACA,SACA;AASA,UARc;IACZ,MAAM,MAAM,KAAK,IAAI,MAAM,MAAM,QAAQ;IACzC,MAAM,MAAM,KAAK,KAAK,MAAM,MAAM,SAAS;IAC3C,mBAAmB;AACjB,YAAO,iBAAiB,KAAK,MAAM,KAAK,KAAK;;IAEhD;;EAeH,MAAM,SACJ,MACA,MACA,UACA,aACA,aACA;AASA,UARc;IACZ,MAAM,MAAM,KAAK,QAAQ,MAAM,MAAM,aAAa,YAAY;IAC9D,MAAM,MAAM,KAAK,KAAK,MAAM,MAAM,SAAS;IAC3C,mBAAmB;AACjB,YAAO,iBAAiB,KAAK,MAAM,KAAK,KAAK;;IAEhD;;EAeJ;;AAGH,SAAgB,iBAId,OACA,OAIC;CACD,MAAM,QAGC,EAAE;AAET,MAAK,MAAM,SAAS,MAClB,OAAM,KAAK;EACT,MAAM;EACN,MAAM,MAAM,KAAK;EACjB,cAAc,MAAM,KAAK;EACzB,MAAM;EACP,CAAC;AAGJ,MAAK,MAAM,SAAS,MAClB,OAAM,KAAK;EACT,MAAM;EACN,MAAM,MAAM,KAAK;EACjB,cAAc,MAAM,KAAK;EACzB,MAAM;EACP,CAAC;AAGJ,QAAO,EACL,OACD;;AAGH,SAAS,iBACP,MACA,MACY;AACZ,QAAO;EACL;EACA,MAAM,QAAQ,MAAM;AAClB,OAAI,SAAS,MAGX,SAAQ,OAFG,MAAM,OAAO,qBAEP,SAAS,KAAK,SAAS,EAAE,UAAU;GAGtD,MAAM,OAAO,MAAM,MAAM;AACzB,OAAI,OAAO,KAAK,cAAc,SAC5B,OAAM,IAAI,MACR,oGACD;AACH,UAAO,KAAK;;EAEd,MAAM,WAAW;GACf,MAAM,OAAO,MAAM,MAAM;AAEzB,OAAI,CAAC,KAAK,OACR,OAAM,IAAI,MACR,8EACD;AACH,UAAO,KAAK,MAAM,KAAK,OAAO;;EAEjC"}
@@ -1,11 +1,11 @@
1
1
  import "../fuma-matter-CHgJa_-B.js";
2
- import { t as createMdxLoader } from "../mdx-DMZ9tsAa.js";
2
+ import { t as createMdxLoader } from "../mdx-CRT-jSh5.js";
3
3
  import "../preset-gmDZnBcg.js";
4
4
  import { t as buildConfig } from "../build-BTTNEFmV.js";
5
5
  import { n as createCore, r as ValidationError, t as _Defaults } from "../core-DjldE3H9.js";
6
6
  import "../codegen-DleOVLNr.js";
7
- import { a as createIntegratedConfigLoader, r as toVite } from "../adapter-DG-viEbG.js";
8
- import { t as createMetaLoader } from "../meta-DyieTM4Z.js";
7
+ import { a as createIntegratedConfigLoader, r as toVite } from "../adapter-DI4cexsC.js";
8
+ import { t as createMetaLoader } from "../meta-BKBx8Gab.js";
9
9
  import { t as indexFile } from "../index-file-D9HsrWU_.js";
10
10
  import { mergeConfig } from "vite";
11
11
 
@@ -1,8 +1,8 @@
1
1
  import "../fuma-matter-CHgJa_-B.js";
2
- import { t as createMdxLoader } from "../mdx-DMZ9tsAa.js";
2
+ import { t as createMdxLoader } from "../mdx-CRT-jSh5.js";
3
3
  import "../core-DjldE3H9.js";
4
4
  import "../codegen-DleOVLNr.js";
5
- import { i as toWebpack, o as createStandaloneConfigLoader } from "../adapter-DG-viEbG.js";
5
+ import { i as toWebpack, o as createStandaloneConfigLoader } from "../adapter-DI4cexsC.js";
6
6
  import { t as getCore } from "../webpack--VSEE6Zp.js";
7
7
 
8
8
  //#region src/webpack/mdx.ts
@@ -1 +1 @@
1
- {"version":3,"file":"mdx.js","names":["instance: WebpackLoader | undefined"],"sources":["../../src/webpack/mdx.ts"],"sourcesContent":["import { type LoaderContext } from 'webpack';\nimport { createMdxLoader } from '@/loaders/mdx';\nimport { toWebpack, type WebpackLoader } from '@/loaders/adapter';\nimport { createStandaloneConfigLoader } from '@/loaders/config';\nimport { getCore, type WebpackLoaderOptions } from '@/webpack';\n\nlet instance: WebpackLoader | undefined;\n\nexport default async function loader(\n this: LoaderContext<WebpackLoaderOptions>,\n source: string,\n callback: LoaderContext<WebpackLoaderOptions>['callback'],\n): Promise<void> {\n const options = this.getOptions();\n this.cacheable(true);\n this.addDependency(options.absoluteCompiledConfigPath);\n\n if (!instance) {\n instance = toWebpack(\n createMdxLoader(\n createStandaloneConfigLoader({\n core: getCore(options),\n buildConfig: false,\n mode: options.isDev ? 'dev' : 'production',\n }),\n ),\n );\n }\n\n await instance.call(this, source, callback);\n}\n"],"mappings":";;;;;;;;AAMA,IAAIA;AAEJ,eAA8B,OAE5B,QACA,UACe;CACf,MAAM,UAAU,KAAK,YAAY;AACjC,MAAK,UAAU,KAAK;AACpB,MAAK,cAAc,QAAQ,2BAA2B;AAEtD,KAAI,CAAC,SACH,YAAW,UACT,gBACE,6BAA6B;EAC3B,MAAM,QAAQ,QAAQ;EACtB,aAAa;EACb,MAAM,QAAQ,QAAQ,QAAQ;EAC/B,CAAC,CACH,CACF;AAGH,OAAM,SAAS,KAAK,MAAM,QAAQ,SAAS"}
1
+ {"version":3,"file":"mdx.js","names":[],"sources":["../../src/webpack/mdx.ts"],"sourcesContent":["import { type LoaderContext } from 'webpack';\nimport { createMdxLoader } from '@/loaders/mdx';\nimport { toWebpack, type WebpackLoader } from '@/loaders/adapter';\nimport { createStandaloneConfigLoader } from '@/loaders/config';\nimport { getCore, type WebpackLoaderOptions } from '@/webpack';\n\nlet instance: WebpackLoader | undefined;\n\nexport default async function loader(\n this: LoaderContext<WebpackLoaderOptions>,\n source: string,\n callback: LoaderContext<WebpackLoaderOptions>['callback'],\n): Promise<void> {\n const options = this.getOptions();\n this.cacheable(true);\n this.addDependency(options.absoluteCompiledConfigPath);\n\n if (!instance) {\n instance = toWebpack(\n createMdxLoader(\n createStandaloneConfigLoader({\n core: getCore(options),\n buildConfig: false,\n mode: options.isDev ? 'dev' : 'production',\n }),\n ),\n );\n }\n\n await instance.call(this, source, callback);\n}\n"],"mappings":";;;;;;;;AAMA,IAAI;AAEJ,eAA8B,OAE5B,QACA,UACe;CACf,MAAM,UAAU,KAAK,YAAY;AACjC,MAAK,UAAU,KAAK;AACpB,MAAK,cAAc,QAAQ,2BAA2B;AAEtD,KAAI,CAAC,SACH,YAAW,UACT,gBACE,6BAA6B;EAC3B,MAAM,QAAQ,QAAQ;EACtB,aAAa;EACb,MAAM,QAAQ,QAAQ,QAAQ;EAC/B,CAAC,CACH,CACF;AAGH,OAAM,SAAS,KAAK,MAAM,QAAQ,SAAS"}
@@ -1,7 +1,7 @@
1
1
  import "../core-DjldE3H9.js";
2
2
  import "../codegen-DleOVLNr.js";
3
- import { i as toWebpack, o as createStandaloneConfigLoader } from "../adapter-DG-viEbG.js";
4
- import { t as createMetaLoader } from "../meta-DyieTM4Z.js";
3
+ import { i as toWebpack, o as createStandaloneConfigLoader } from "../adapter-DI4cexsC.js";
4
+ import { t as createMetaLoader } from "../meta-BKBx8Gab.js";
5
5
  import { t as getCore } from "../webpack--VSEE6Zp.js";
6
6
 
7
7
  //#region src/webpack/meta.ts
@@ -1 +1 @@
1
- {"version":3,"file":"meta.js","names":["instance: WebpackLoader | undefined"],"sources":["../../src/webpack/meta.ts"],"sourcesContent":["import { type LoaderContext } from 'webpack';\nimport { toWebpack, type WebpackLoader } from '@/loaders/adapter';\nimport { createStandaloneConfigLoader } from '@/loaders/config';\nimport { createMetaLoader } from '@/loaders/meta';\nimport { getCore, type WebpackLoaderOptions } from '@/webpack';\n\nlet instance: WebpackLoader | undefined;\n\nexport default async function loader(\n this: LoaderContext<WebpackLoaderOptions>,\n source: string,\n callback: LoaderContext<WebpackLoaderOptions>['callback'],\n): Promise<void> {\n const options = this.getOptions();\n this.cacheable(true);\n this.addDependency(options.absoluteCompiledConfigPath);\n\n if (!instance) {\n instance = toWebpack(\n createMetaLoader(\n createStandaloneConfigLoader({\n core: getCore(options),\n buildConfig: false,\n mode: options.isDev ? 'dev' : 'production',\n }),\n {\n json: 'json',\n yaml: 'js',\n },\n ),\n );\n }\n\n await instance.call(this, source, callback);\n}\n"],"mappings":";;;;;;;AAMA,IAAIA;AAEJ,eAA8B,OAE5B,QACA,UACe;CACf,MAAM,UAAU,KAAK,YAAY;AACjC,MAAK,UAAU,KAAK;AACpB,MAAK,cAAc,QAAQ,2BAA2B;AAEtD,KAAI,CAAC,SACH,YAAW,UACT,iBACE,6BAA6B;EAC3B,MAAM,QAAQ,QAAQ;EACtB,aAAa;EACb,MAAM,QAAQ,QAAQ,QAAQ;EAC/B,CAAC,EACF;EACE,MAAM;EACN,MAAM;EACP,CACF,CACF;AAGH,OAAM,SAAS,KAAK,MAAM,QAAQ,SAAS"}
1
+ {"version":3,"file":"meta.js","names":[],"sources":["../../src/webpack/meta.ts"],"sourcesContent":["import { type LoaderContext } from 'webpack';\nimport { toWebpack, type WebpackLoader } from '@/loaders/adapter';\nimport { createStandaloneConfigLoader } from '@/loaders/config';\nimport { createMetaLoader } from '@/loaders/meta';\nimport { getCore, type WebpackLoaderOptions } from '@/webpack';\n\nlet instance: WebpackLoader | undefined;\n\nexport default async function loader(\n this: LoaderContext<WebpackLoaderOptions>,\n source: string,\n callback: LoaderContext<WebpackLoaderOptions>['callback'],\n): Promise<void> {\n const options = this.getOptions();\n this.cacheable(true);\n this.addDependency(options.absoluteCompiledConfigPath);\n\n if (!instance) {\n instance = toWebpack(\n createMetaLoader(\n createStandaloneConfigLoader({\n core: getCore(options),\n buildConfig: false,\n mode: options.isDev ? 'dev' : 'production',\n }),\n {\n json: 'json',\n yaml: 'js',\n },\n ),\n );\n }\n\n await instance.call(this, source, callback);\n}\n"],"mappings":";;;;;;;AAMA,IAAI;AAEJ,eAA8B,OAE5B,QACA,UACe;CACf,MAAM,UAAU,KAAK,YAAY;AACjC,MAAK,UAAU,KAAK;AACpB,MAAK,cAAc,QAAQ,2BAA2B;AAEtD,KAAI,CAAC,SACH,YAAW,UACT,iBACE,6BAA6B;EAC3B,MAAM,QAAQ,QAAQ;EACtB,aAAa;EACb,MAAM,QAAQ,QAAQ,QAAQ;EAC/B,CAAC,EACF;EACE,MAAM;EACN,MAAM;EACP,CACF,CACF;AAGH,OAAM,SAAS,KAAK,MAAM,QAAQ,SAAS"}
@@ -1 +1 @@
1
- {"version":3,"file":"webpack--VSEE6Zp.js","names":["core: Core"],"sources":["../src/webpack/index.ts"],"sourcesContent":["import { type Core, createCore } from '@/core';\n\nexport interface WebpackLoaderOptions {\n absoluteCompiledConfigPath: string;\n configPath: string;\n outDir: string;\n isDev: boolean;\n}\n\nlet core: Core;\n\nexport function getCore(options: WebpackLoaderOptions) {\n return (core ??= createCore({\n environment: 'webpack',\n outDir: options.outDir,\n configPath: options.configPath,\n }));\n}\n"],"mappings":";;;AASA,IAAIA;AAEJ,SAAgB,QAAQ,SAA+B;AACrD,QAAQ,SAAS,WAAW;EAC1B,aAAa;EACb,QAAQ,QAAQ;EAChB,YAAY,QAAQ;EACrB,CAAC"}
1
+ {"version":3,"file":"webpack--VSEE6Zp.js","names":[],"sources":["../src/webpack/index.ts"],"sourcesContent":["import { type Core, createCore } from '@/core';\n\nexport interface WebpackLoaderOptions {\n absoluteCompiledConfigPath: string;\n configPath: string;\n outDir: string;\n isDev: boolean;\n}\n\nlet core: Core;\n\nexport function getCore(options: WebpackLoaderOptions) {\n return (core ??= createCore({\n environment: 'webpack',\n outDir: options.outDir,\n configPath: options.configPath,\n }));\n}\n"],"mappings":";;;AASA,IAAI;AAEJ,SAAgB,QAAQ,SAA+B;AACrD,QAAQ,SAAS,WAAW;EAC1B,aAAa;EACb,QAAQ,QAAQ;EAChB,YAAY,QAAQ;EACrB,CAAC"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "fumadocs-mdx",
3
- "version": "14.2.4",
3
+ "version": "14.2.5",
4
4
  "description": "The built-in source for Fumadocs",
5
5
  "keywords": [
6
6
  "Docs",
@@ -76,16 +76,16 @@
76
76
  "unist-util-remove-position": "^5.0.0",
77
77
  "unist-util-visit": "^5.0.0",
78
78
  "vfile": "^6.0.3",
79
- "zod": "^4.2.1"
79
+ "zod": "^4.3.5"
80
80
  },
81
81
  "devDependencies": {
82
82
  "@types/bun": "^1.3.5",
83
83
  "@types/js-yaml": "^4.0.9",
84
84
  "@types/mdast": "^4.0.4",
85
85
  "@types/mdx": "^2.0.13",
86
- "@types/node": "^24.10.2",
86
+ "@types/node": "^25.0.5",
87
87
  "@types/picomatch": "^4.0.2",
88
- "@types/react": "^19.2.7",
88
+ "@types/react": "^19.2.8",
89
89
  "mdast-util-directive": "^3.1.0",
90
90
  "mdast-util-mdx-jsx": "^3.2.0",
91
91
  "next": "16.1.1",
@@ -93,13 +93,13 @@
93
93
  "remark": "^15.0.1",
94
94
  "remark-directive": "^4.0.0",
95
95
  "remark-stringify": "^11.0.0",
96
- "rollup": "^4.54.0",
97
- "tsdown": "^0.18.3",
98
- "vite": "^7.3.0",
96
+ "rollup": "^4.55.1",
97
+ "tsdown": "^0.19.0",
98
+ "vite": "^7.3.1",
99
99
  "webpack": "^5.104.1",
100
100
  "@fumadocs/mdx-remote": "1.4.4",
101
+ "fumadocs-core": "16.4.7",
101
102
  "eslint-config-custom": "0.0.0",
102
- "fumadocs-core": "16.4.2",
103
103
  "tsconfig": "0.0.0"
104
104
  },
105
105
  "peerDependencies": {
@@ -1 +0,0 @@
1
- {"version":3,"file":"adapter-DG-viEbG.js","names":["prev:\n | {\n hash: string;\n init: Promise<void>;\n }\n | undefined","file","input: LoaderInput"],"sources":["../src/loaders/config.ts","../src/loaders/adapter.ts"],"sourcesContent":["import type { Core } from '@/core';\nimport fs from 'node:fs/promises';\n\nexport interface ConfigLoader {\n getCore(): Promise<Core>;\n}\n\nexport function createStandaloneConfigLoader({\n core,\n buildConfig,\n mode,\n}: {\n /**\n * core (not initialized)\n */\n core: Core;\n buildConfig: boolean;\n /**\n * In dev mode, the config file is dynamically re-loaded when it's updated.\n */\n mode: 'dev' | 'production';\n}): ConfigLoader {\n let prev:\n | {\n hash: string;\n init: Promise<void>;\n }\n | undefined;\n\n async function getConfigHash(): Promise<string> {\n if (mode === 'production') return 'static';\n\n const stats = await fs.stat(core.getOptions().configPath).catch(() => {\n throw new Error('Cannot find config file');\n });\n\n return stats.mtime.getTime().toString();\n }\n\n return {\n async getCore() {\n const hash = await getConfigHash();\n if (!prev || hash !== prev.hash) {\n prev = {\n hash,\n init: (async () => {\n const { loadConfig } = await import('../config/load-from-file');\n\n await core.init({\n config: loadConfig(core, buildConfig),\n });\n })(),\n };\n }\n\n await prev.init;\n return core;\n },\n };\n}\n\n/**\n * create config loader from initialized core\n */\nexport function createIntegratedConfigLoader(core: Core): ConfigLoader {\n return {\n async getCore() {\n return core;\n },\n };\n}\n","import type { CompilerOptions } from '@/loaders/mdx/build-mdx';\nimport type { LoadFnOutput, LoadHook } from 'node:module';\nimport { fileURLToPath } from 'node:url';\nimport fs from 'node:fs/promises';\nimport type { SourceMap, TransformPluginContext } from 'rollup';\nimport type { TransformResult } from 'vite';\nimport { parse } from 'node:querystring';\nimport { ValidationError } from '@/utils/validation';\nimport type { LoaderContext } from 'webpack';\nimport { readFileSync } from 'node:fs';\n\nexport interface LoaderInput {\n development: boolean;\n compiler: CompilerOptions;\n\n filePath: string;\n query: Record<string, string | string[] | undefined>;\n getSource: () => string | Promise<string>;\n}\n\nexport interface LoaderOutput {\n code: string;\n map?: unknown;\n\n /**\n * Only supported in Vite 8.\n *\n * Explicitly define the transformed module type, for unsupported environments, you need to consider the differences between each bundler.\n */\n moduleType?: 'js' | 'json';\n}\n\ntype Awaitable<T> = T | Promise<T>;\n\nexport interface Loader {\n /**\n * Filter file paths, the input can be either a file URL or file path.\n *\n * Must take resource query into consideration.\n */\n test?: RegExp;\n\n /**\n * Transform input into JavaScript.\n *\n * Returns:\n * - `LoaderOutput`: JavaScript code & source map.\n * - `null`: skip the loader. Fallback to default behaviour if possible, otherwise the adapter will try workarounds.\n */\n load: (input: LoaderInput) => Awaitable<LoaderOutput | null>;\n\n bun?: {\n /**\n * 1. Bun doesn't allow `null` in loaders.\n * 2. Bun requires sync result to support dynamic require().\n */\n load?: (source: string, input: LoaderInput) => Awaitable<Bun.OnLoadResult>;\n };\n}\n\nexport function toNode(loader: Loader): LoadHook {\n return async (url, _context, nextLoad): Promise<LoadFnOutput> => {\n if (url.startsWith('file:///') && (!loader.test || loader.test.test(url))) {\n const parsedUrl = new URL(url);\n const filePath = fileURLToPath(parsedUrl);\n\n const result = await loader.load({\n filePath,\n query: Object.fromEntries(parsedUrl.searchParams.entries()),\n async getSource() {\n return (await fs.readFile(filePath)).toString();\n },\n development: false,\n compiler: {\n addDependency() {},\n },\n });\n\n if (result) {\n return {\n source: result.code,\n format: 'module',\n shortCircuit: true,\n };\n }\n }\n\n return nextLoad(url);\n };\n}\n\nexport interface ViteLoader {\n filter: (id: string) => boolean;\n\n transform: (\n this: TransformPluginContext,\n value: string,\n id: string,\n ) => Promise<TransformResult | null>;\n}\n\nexport function toVite(loader: Loader): ViteLoader {\n return {\n filter(id) {\n return !loader.test || loader.test.test(id);\n },\n async transform(value, id) {\n const [file, query = ''] = id.split('?', 2);\n\n const result = await loader.load({\n filePath: file,\n query: parse(query),\n getSource() {\n return value;\n },\n development: this.environment.mode === 'dev',\n compiler: {\n addDependency: (file) => {\n this.addWatchFile(file);\n },\n },\n });\n\n if (result === null) return null;\n return {\n code: result.code,\n map: result.map as SourceMap,\n moduleType: result.moduleType,\n };\n },\n };\n}\n\nexport type WebpackLoader = (\n this: LoaderContext<unknown>,\n source: string,\n callback: LoaderContext<unknown>['callback'],\n) => Promise<void>;\n\n/**\n * need to handle the `test` regex in Webpack config instead.\n */\nexport function toWebpack(loader: Loader): WebpackLoader {\n return async function (source, callback) {\n try {\n const result = await loader.load({\n filePath: this.resourcePath,\n query: parse(this.resourceQuery.slice(1)),\n getSource() {\n return source;\n },\n development: this.mode === 'development',\n compiler: this,\n });\n\n if (result === null) {\n callback(undefined, source);\n } else {\n callback(undefined, result.code, result.map as string);\n }\n } catch (error) {\n if (error instanceof ValidationError) {\n return callback(new Error(await error.toStringFormatted()));\n }\n\n if (!(error instanceof Error)) throw error;\n callback(error);\n }\n };\n}\n\nexport function toBun(loader: Loader) {\n function toResult(output: LoaderOutput | null): Bun.OnLoadResult {\n // it errors, treat this as an exception\n if (!output) return;\n\n return {\n contents: output.code,\n loader: output.moduleType ?? 'js',\n };\n }\n\n return (build: Bun.PluginBuilder) => {\n // avoid using async here, because it will cause dynamic require() to fail\n build.onLoad({ filter: loader.test ?? /.+/ }, (args) => {\n const [filePath, query = ''] = args.path.split('?', 2);\n const input: LoaderInput = {\n async getSource() {\n return Bun.file(filePath).text();\n },\n query: parse(query),\n filePath,\n development: false,\n compiler: {\n addDependency() {},\n },\n };\n\n if (loader.bun?.load) {\n return loader.bun.load(readFileSync(filePath).toString(), input);\n }\n\n const result = loader.load(input);\n if (result instanceof Promise) {\n return result.then(toResult);\n }\n return toResult(result);\n });\n };\n}\n"],"mappings":";;;;;;;AAOA,SAAgB,6BAA6B,EAC3C,MACA,aACA,QAWe;CACf,IAAIA;CAOJ,eAAe,gBAAiC;AAC9C,MAAI,SAAS,aAAc,QAAO;AAMlC,UAJc,MAAM,GAAG,KAAK,KAAK,YAAY,CAAC,WAAW,CAAC,YAAY;AACpE,SAAM,IAAI,MAAM,0BAA0B;IAC1C,EAEW,MAAM,SAAS,CAAC,UAAU;;AAGzC,QAAO,EACL,MAAM,UAAU;EACd,MAAM,OAAO,MAAM,eAAe;AAClC,MAAI,CAAC,QAAQ,SAAS,KAAK,KACzB,QAAO;GACL;GACA,OAAO,YAAY;IACjB,MAAM,EAAE,eAAe,MAAM,OAAO;AAEpC,UAAM,KAAK,KAAK,EACd,QAAQ,WAAW,MAAM,YAAY,EACtC,CAAC;OACA;GACL;AAGH,QAAM,KAAK;AACX,SAAO;IAEV;;;;;AAMH,SAAgB,6BAA6B,MAA0B;AACrE,QAAO,EACL,MAAM,UAAU;AACd,SAAO;IAEV;;;;;ACTH,SAAgB,OAAO,QAA0B;AAC/C,QAAO,OAAO,KAAK,UAAU,aAAoC;AAC/D,MAAI,IAAI,WAAW,WAAW,KAAK,CAAC,OAAO,QAAQ,OAAO,KAAK,KAAK,IAAI,GAAG;GACzE,MAAM,YAAY,IAAI,IAAI,IAAI;GAC9B,MAAM,WAAW,cAAc,UAAU;GAEzC,MAAM,SAAS,MAAM,OAAO,KAAK;IAC/B;IACA,OAAO,OAAO,YAAY,UAAU,aAAa,SAAS,CAAC;IAC3D,MAAM,YAAY;AAChB,aAAQ,MAAM,GAAG,SAAS,SAAS,EAAE,UAAU;;IAEjD,aAAa;IACb,UAAU,EACR,gBAAgB,IACjB;IACF,CAAC;AAEF,OAAI,OACF,QAAO;IACL,QAAQ,OAAO;IACf,QAAQ;IACR,cAAc;IACf;;AAIL,SAAO,SAAS,IAAI;;;AAcxB,SAAgB,OAAO,QAA4B;AACjD,QAAO;EACL,OAAO,IAAI;AACT,UAAO,CAAC,OAAO,QAAQ,OAAO,KAAK,KAAK,GAAG;;EAE7C,MAAM,UAAU,OAAO,IAAI;GACzB,MAAM,CAAC,MAAM,QAAQ,MAAM,GAAG,MAAM,KAAK,EAAE;GAE3C,MAAM,SAAS,MAAM,OAAO,KAAK;IAC/B,UAAU;IACV,OAAO,MAAM,MAAM;IACnB,YAAY;AACV,YAAO;;IAET,aAAa,KAAK,YAAY,SAAS;IACvC,UAAU,EACR,gBAAgB,WAAS;AACvB,UAAK,aAAaC,OAAK;OAE1B;IACF,CAAC;AAEF,OAAI,WAAW,KAAM,QAAO;AAC5B,UAAO;IACL,MAAM,OAAO;IACb,KAAK,OAAO;IACZ,YAAY,OAAO;IACpB;;EAEJ;;;;;AAYH,SAAgB,UAAU,QAA+B;AACvD,QAAO,eAAgB,QAAQ,UAAU;AACvC,MAAI;GACF,MAAM,SAAS,MAAM,OAAO,KAAK;IAC/B,UAAU,KAAK;IACf,OAAO,MAAM,KAAK,cAAc,MAAM,EAAE,CAAC;IACzC,YAAY;AACV,YAAO;;IAET,aAAa,KAAK,SAAS;IAC3B,UAAU;IACX,CAAC;AAEF,OAAI,WAAW,KACb,UAAS,QAAW,OAAO;OAE3B,UAAS,QAAW,OAAO,MAAM,OAAO,IAAc;WAEjD,OAAO;AACd,OAAI,iBAAiB,gBACnB,QAAO,SAAS,IAAI,MAAM,MAAM,MAAM,mBAAmB,CAAC,CAAC;AAG7D,OAAI,EAAE,iBAAiB,OAAQ,OAAM;AACrC,YAAS,MAAM;;;;AAKrB,SAAgB,MAAM,QAAgB;CACpC,SAAS,SAAS,QAA+C;AAE/D,MAAI,CAAC,OAAQ;AAEb,SAAO;GACL,UAAU,OAAO;GACjB,QAAQ,OAAO,cAAc;GAC9B;;AAGH,SAAQ,UAA6B;AAEnC,QAAM,OAAO,EAAE,QAAQ,OAAO,QAAQ,MAAM,GAAG,SAAS;GACtD,MAAM,CAAC,UAAU,QAAQ,MAAM,KAAK,KAAK,MAAM,KAAK,EAAE;GACtD,MAAMC,QAAqB;IACzB,MAAM,YAAY;AAChB,YAAO,IAAI,KAAK,SAAS,CAAC,MAAM;;IAElC,OAAO,MAAM,MAAM;IACnB;IACA,aAAa;IACb,UAAU,EACR,gBAAgB,IACjB;IACF;AAED,OAAI,OAAO,KAAK,KACd,QAAO,OAAO,IAAI,KAAK,aAAa,SAAS,CAAC,UAAU,EAAE,MAAM;GAGlE,MAAM,SAAS,OAAO,KAAK,MAAM;AACjC,OAAI,kBAAkB,QACpB,QAAO,OAAO,KAAK,SAAS;AAE9B,UAAO,SAAS,OAAO;IACvB"}
@@ -1 +0,0 @@
1
- {"version":3,"file":"mdx-DMZ9tsAa.js","names":["after: (() => Promise<void>) | undefined","docCollection: DocCollectionItem | undefined"],"sources":["../src/loaders/mdx/index.ts"],"sourcesContent":["import { fumaMatter } from '@/utils/fuma-matter';\nimport type { SourceMap } from 'rollup';\nimport type { Loader } from '@/loaders/adapter';\nimport { z } from 'zod';\nimport type { DocCollectionItem } from '@/config/build';\nimport fs from 'node:fs/promises';\nimport path from 'node:path';\nimport { createHash } from 'node:crypto';\nimport type { ConfigLoader } from '@/loaders/config';\nimport { mdxLoaderGlob } from '..';\n\nconst querySchema = z\n .object({\n only: z.literal(['frontmatter', 'all']).default('all'),\n collection: z.string().optional(),\n workspace: z.string().optional(),\n })\n .loose();\n\nconst cacheEntry = z.object({\n code: z.string(),\n map: z.any().optional(),\n hash: z.string().optional(),\n});\n\ntype CacheEntry = z.infer<typeof cacheEntry>;\n\nexport function createMdxLoader({ getCore }: ConfigLoader): Loader {\n return {\n test: mdxLoaderGlob,\n async load({ getSource, development: isDevelopment, query, compiler, filePath }) {\n let core = await getCore();\n const value = await getSource();\n const matter = fumaMatter(value);\n const { collection: collectionName, workspace, only } = querySchema.parse(query);\n if (workspace) {\n core = core.getWorkspaces().get(workspace) ?? core;\n }\n\n let after: (() => Promise<void>) | undefined;\n\n const { experimentalBuildCache = false } = core.getConfig().global;\n if (!isDevelopment && experimentalBuildCache) {\n const cacheDir = experimentalBuildCache;\n const cacheKey = `${collectionName ?? 'global'}_${generateCacheHash(filePath)}`;\n\n const cached = await fs\n .readFile(path.join(cacheDir, cacheKey))\n .then((content) => cacheEntry.parse(JSON.parse(content.toString())))\n .catch(() => null);\n\n if (cached && cached.hash === generateCacheHash(value)) return cached;\n after = async () => {\n await fs.mkdir(cacheDir, { recursive: true });\n await fs.writeFile(\n path.join(cacheDir, cacheKey),\n JSON.stringify({\n ...out,\n hash: generateCacheHash(value),\n } satisfies CacheEntry),\n );\n };\n }\n\n const collection = collectionName ? core.getCollection(collectionName) : undefined;\n\n let docCollection: DocCollectionItem | undefined;\n switch (collection?.type) {\n case 'doc':\n docCollection = collection;\n break;\n case 'docs':\n docCollection = collection.docs;\n break;\n }\n\n if (docCollection) {\n matter.data = await core.transformFrontmatter(\n { collection: docCollection, filePath, source: value },\n matter.data as Record<string, unknown>,\n );\n }\n\n if (only === 'frontmatter') {\n return {\n code: `export const frontmatter = ${JSON.stringify(matter.data)}`,\n map: null,\n };\n }\n\n const { buildMDX } = await import('@/loaders/mdx/build-mdx');\n const compiled = await buildMDX(core, docCollection, {\n isDevelopment,\n // ensure the line number is correct in errors\n source: '\\n'.repeat(countLines(matter.matter)) + matter.content,\n filePath,\n frontmatter: matter.data as Record<string, unknown>,\n _compiler: compiler,\n environment: 'bundler',\n });\n\n const out = {\n code: String(compiled.value),\n map: compiled.map as SourceMap,\n };\n\n await after?.();\n return out;\n },\n };\n}\n\nfunction generateCacheHash(input: string): string {\n return createHash('md5').update(input).digest('hex');\n}\n\nfunction countLines(s: string) {\n let num = 0;\n\n for (const c of s) {\n if (c === '\\n') num++;\n }\n\n return num;\n}\n"],"mappings":";;;;;;;;AAWA,MAAM,cAAc,EACjB,OAAO;CACN,MAAM,EAAE,QAAQ,CAAC,eAAe,MAAM,CAAC,CAAC,QAAQ,MAAM;CACtD,YAAY,EAAE,QAAQ,CAAC,UAAU;CACjC,WAAW,EAAE,QAAQ,CAAC,UAAU;CACjC,CAAC,CACD,OAAO;AAEV,MAAM,aAAa,EAAE,OAAO;CAC1B,MAAM,EAAE,QAAQ;CAChB,KAAK,EAAE,KAAK,CAAC,UAAU;CACvB,MAAM,EAAE,QAAQ,CAAC,UAAU;CAC5B,CAAC;AAIF,SAAgB,gBAAgB,EAAE,WAAiC;AACjE,QAAO;EACL,MAAM;EACN,MAAM,KAAK,EAAE,WAAW,aAAa,eAAe,OAAO,UAAU,YAAY;GAC/E,IAAI,OAAO,MAAM,SAAS;GAC1B,MAAM,QAAQ,MAAM,WAAW;GAC/B,MAAM,SAAS,WAAW,MAAM;GAChC,MAAM,EAAE,YAAY,gBAAgB,WAAW,SAAS,YAAY,MAAM,MAAM;AAChF,OAAI,UACF,QAAO,KAAK,eAAe,CAAC,IAAI,UAAU,IAAI;GAGhD,IAAIA;GAEJ,MAAM,EAAE,yBAAyB,UAAU,KAAK,WAAW,CAAC;AAC5D,OAAI,CAAC,iBAAiB,wBAAwB;IAC5C,MAAM,WAAW;IACjB,MAAM,WAAW,GAAG,kBAAkB,SAAS,GAAG,kBAAkB,SAAS;IAE7E,MAAM,SAAS,MAAM,GAClB,SAAS,KAAK,KAAK,UAAU,SAAS,CAAC,CACvC,MAAM,YAAY,WAAW,MAAM,KAAK,MAAM,QAAQ,UAAU,CAAC,CAAC,CAAC,CACnE,YAAY,KAAK;AAEpB,QAAI,UAAU,OAAO,SAAS,kBAAkB,MAAM,CAAE,QAAO;AAC/D,YAAQ,YAAY;AAClB,WAAM,GAAG,MAAM,UAAU,EAAE,WAAW,MAAM,CAAC;AAC7C,WAAM,GAAG,UACP,KAAK,KAAK,UAAU,SAAS,EAC7B,KAAK,UAAU;MACb,GAAG;MACH,MAAM,kBAAkB,MAAM;MAC/B,CAAsB,CACxB;;;GAIL,MAAM,aAAa,iBAAiB,KAAK,cAAc,eAAe,GAAG;GAEzE,IAAIC;AACJ,WAAQ,YAAY,MAApB;IACE,KAAK;AACH,qBAAgB;AAChB;IACF,KAAK;AACH,qBAAgB,WAAW;AAC3B;;AAGJ,OAAI,cACF,QAAO,OAAO,MAAM,KAAK,qBACvB;IAAE,YAAY;IAAe;IAAU,QAAQ;IAAO,EACtD,OAAO,KACR;AAGH,OAAI,SAAS,cACX,QAAO;IACL,MAAM,8BAA8B,KAAK,UAAU,OAAO,KAAK;IAC/D,KAAK;IACN;GAGH,MAAM,EAAE,aAAa,MAAM,OAAO;GAClC,MAAM,WAAW,MAAM,SAAS,MAAM,eAAe;IACnD;IAEA,QAAQ,KAAK,OAAO,WAAW,OAAO,OAAO,CAAC,GAAG,OAAO;IACxD;IACA,aAAa,OAAO;IACpB,WAAW;IACX,aAAa;IACd,CAAC;GAEF,MAAM,MAAM;IACV,MAAM,OAAO,SAAS,MAAM;IAC5B,KAAK,SAAS;IACf;AAED,SAAM,SAAS;AACf,UAAO;;EAEV;;AAGH,SAAS,kBAAkB,OAAuB;AAChD,QAAO,WAAW,MAAM,CAAC,OAAO,MAAM,CAAC,OAAO,MAAM;;AAGtD,SAAS,WAAW,GAAW;CAC7B,IAAI,MAAM;AAEV,MAAK,MAAM,KAAK,EACd,KAAI,MAAM,KAAM;AAGlB,QAAO"}
@@ -1 +0,0 @@
1
- {"version":3,"file":"meta-DyieTM4Z.js","names":["metaCollection: MetaCollectionItem | undefined"],"sources":["../src/loaders/meta.ts"],"sourcesContent":["import type { Loader, LoaderInput } from '@/loaders/adapter';\nimport type { ConfigLoader } from '@/loaders/config';\nimport { load } from 'js-yaml';\nimport { z } from 'zod';\nimport { metaLoaderGlob } from '.';\nimport type { MetaCollectionItem } from '@/config/build';\n\nconst querySchema = z\n .object({\n collection: z.string().optional(),\n workspace: z.string().optional(),\n })\n .loose();\n\n/**\n * load meta files, fallback to bundler's built-in plugins when ?collection is unspecified.\n */\nexport function createMetaLoader(\n { getCore }: ConfigLoader,\n resolve: {\n json?: 'json' | 'js';\n yaml?: 'js';\n } = {},\n): Loader {\n const { json: resolveJson = 'js' } = resolve;\n\n function parse(filePath: string, source: string) {\n try {\n if (filePath.endsWith('.json')) return JSON.parse(source);\n if (filePath.endsWith('.yaml')) return load(source);\n } catch (e) {\n throw new Error(`invalid data in ${filePath}`, { cause: e });\n }\n\n throw new Error('Unknown file type ' + filePath);\n }\n\n function onMeta(source: string, { filePath, query }: LoaderInput) {\n const parsed = querySchema.safeParse(query);\n if (!parsed.success || !parsed.data.collection) return null;\n const { collection: collectionName, workspace } = parsed.data;\n\n return async (): Promise<unknown> => {\n let core = await getCore();\n if (workspace) {\n core = core.getWorkspaces().get(workspace) ?? core;\n }\n\n const collection = core.getCollection(collectionName);\n let metaCollection: MetaCollectionItem | undefined;\n\n switch (collection?.type) {\n case 'meta':\n metaCollection = collection;\n break;\n case 'docs':\n metaCollection = collection.meta;\n break;\n }\n\n const data = parse(filePath, source);\n\n if (!metaCollection) return data;\n return core.transformMeta(\n {\n collection: metaCollection,\n filePath,\n source,\n },\n data,\n );\n };\n }\n\n return {\n test: metaLoaderGlob,\n async load(input) {\n const result = onMeta(await input.getSource(), input);\n if (result === null) return null;\n const data = await result();\n\n if (input.filePath.endsWith('.json')) {\n return {\n moduleType: resolveJson,\n code:\n resolveJson === 'json'\n ? JSON.stringify(data)\n : `export default ${JSON.stringify(data)}`,\n };\n } else {\n return {\n moduleType: 'js',\n code: `export default ${JSON.stringify(data)}`,\n };\n }\n },\n bun: {\n load(source, input) {\n const result = onMeta(source, input);\n if (result === null)\n return {\n loader: 'object',\n exports: parse(input.filePath, source),\n };\n\n return result().then((data) => ({\n loader: 'object',\n exports: { default: data },\n }));\n },\n },\n };\n}\n"],"mappings":";;;;;AAOA,MAAM,cAAc,EACjB,OAAO;CACN,YAAY,EAAE,QAAQ,CAAC,UAAU;CACjC,WAAW,EAAE,QAAQ,CAAC,UAAU;CACjC,CAAC,CACD,OAAO;;;;AAKV,SAAgB,iBACd,EAAE,WACF,UAGI,EAAE,EACE;CACR,MAAM,EAAE,MAAM,cAAc,SAAS;CAErC,SAAS,MAAM,UAAkB,QAAgB;AAC/C,MAAI;AACF,OAAI,SAAS,SAAS,QAAQ,CAAE,QAAO,KAAK,MAAM,OAAO;AACzD,OAAI,SAAS,SAAS,QAAQ,CAAE,QAAO,KAAK,OAAO;WAC5C,GAAG;AACV,SAAM,IAAI,MAAM,mBAAmB,YAAY,EAAE,OAAO,GAAG,CAAC;;AAG9D,QAAM,IAAI,MAAM,uBAAuB,SAAS;;CAGlD,SAAS,OAAO,QAAgB,EAAE,UAAU,SAAsB;EAChE,MAAM,SAAS,YAAY,UAAU,MAAM;AAC3C,MAAI,CAAC,OAAO,WAAW,CAAC,OAAO,KAAK,WAAY,QAAO;EACvD,MAAM,EAAE,YAAY,gBAAgB,cAAc,OAAO;AAEzD,SAAO,YAA8B;GACnC,IAAI,OAAO,MAAM,SAAS;AAC1B,OAAI,UACF,QAAO,KAAK,eAAe,CAAC,IAAI,UAAU,IAAI;GAGhD,MAAM,aAAa,KAAK,cAAc,eAAe;GACrD,IAAIA;AAEJ,WAAQ,YAAY,MAApB;IACE,KAAK;AACH,sBAAiB;AACjB;IACF,KAAK;AACH,sBAAiB,WAAW;AAC5B;;GAGJ,MAAM,OAAO,MAAM,UAAU,OAAO;AAEpC,OAAI,CAAC,eAAgB,QAAO;AAC5B,UAAO,KAAK,cACV;IACE,YAAY;IACZ;IACA;IACD,EACD,KACD;;;AAIL,QAAO;EACL,MAAM;EACN,MAAM,KAAK,OAAO;GAChB,MAAM,SAAS,OAAO,MAAM,MAAM,WAAW,EAAE,MAAM;AACrD,OAAI,WAAW,KAAM,QAAO;GAC5B,MAAM,OAAO,MAAM,QAAQ;AAE3B,OAAI,MAAM,SAAS,SAAS,QAAQ,CAClC,QAAO;IACL,YAAY;IACZ,MACE,gBAAgB,SACZ,KAAK,UAAU,KAAK,GACpB,kBAAkB,KAAK,UAAU,KAAK;IAC7C;OAED,QAAO;IACL,YAAY;IACZ,MAAM,kBAAkB,KAAK,UAAU,KAAK;IAC7C;;EAGL,KAAK,EACH,KAAK,QAAQ,OAAO;GAClB,MAAM,SAAS,OAAO,QAAQ,MAAM;AACpC,OAAI,WAAW,KACb,QAAO;IACL,QAAQ;IACR,SAAS,MAAM,MAAM,UAAU,OAAO;IACvC;AAEH,UAAO,QAAQ,CAAC,MAAM,UAAU;IAC9B,QAAQ;IACR,SAAS,EAAE,SAAS,MAAM;IAC3B,EAAE;KAEN;EACF"}