@hanzo/docs-mdx 14.2.6 → 14.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. package/dist/{config-s4QVhqzA.js → adapter-Jwap8N8Y.js} +36 -36
  2. package/dist/{config-s4QVhqzA.js.map → adapter-Jwap8N8Y.js.map} +1 -1
  3. package/dist/{build-DbdeZyKK.js → build-DP5xMGSd.js} +2 -2
  4. package/dist/{build-DbdeZyKK.js.map → build-DP5xMGSd.js.map} +1 -1
  5. package/dist/{build-mdx-BTwSGUFs.js → build-mdx-B98VCkri.js} +2 -2
  6. package/dist/{build-mdx-BTwSGUFs.js.map → build-mdx-B98VCkri.js.map} +1 -1
  7. package/dist/build-mdx-CKU70o5p.js +5 -0
  8. package/dist/{build-mdx-DyJk_jWV.d.ts → build-mdx-DWx1HtNY.d.ts} +7 -7
  9. package/dist/build-mdx-DWx1HtNY.d.ts.map +1 -0
  10. package/dist/bun/index.d.ts +1 -1
  11. package/dist/bun/index.js +8 -8
  12. package/dist/{codegen-IYa8t8xV.js → codegen-DleOVLNr.js} +1 -1
  13. package/dist/{codegen-IYa8t8xV.js.map → codegen-DleOVLNr.js.map} +1 -1
  14. package/dist/config/index.d.ts +1 -1
  15. package/dist/config/index.js +3 -3
  16. package/dist/{core-CCsY8cxS.js → core-BJualF84.js} +3 -3
  17. package/dist/core-BJualF84.js.map +1 -0
  18. package/dist/{core-CgLkZ4NQ.d.ts → core-INx0uFn9.d.ts} +4 -14
  19. package/dist/core-INx0uFn9.d.ts.map +1 -0
  20. package/dist/{fuma-matter-BWdOvvCf.js → frontmatter-Cq6XTjTb.js} +6 -6
  21. package/dist/frontmatter-Cq6XTjTb.js.map +1 -0
  22. package/dist/{index-file-DsOEtYCF.js → index-file-BVTnUkr9.js} +5 -5
  23. package/dist/index-file-BVTnUkr9.js.map +1 -0
  24. package/dist/index.d.ts +2 -2
  25. package/dist/load-from-file-BD2SdoUT.js +5 -0
  26. package/dist/{load-from-file-Bx1TjN0x.js → load-from-file-CSjo1gN1.js} +2 -2
  27. package/dist/{load-from-file-Bx1TjN0x.js.map → load-from-file-CSjo1gN1.js.map} +1 -1
  28. package/dist/{loaders-BjG6ii1-.js → loaders-BVwYfelL.js} +1 -1
  29. package/dist/{loaders-BjG6ii1-.js.map → loaders-BVwYfelL.js.map} +1 -1
  30. package/dist/{mdx-DBBqLhht.js → mdx-B8Ub7Bro.js} +7 -7
  31. package/dist/mdx-B8Ub7Bro.js.map +1 -0
  32. package/dist/{meta-u6oJ9ej1.js → meta-DyieTM4Z.js} +2 -2
  33. package/dist/{meta-u6oJ9ej1.js.map → meta-DyieTM4Z.js.map} +1 -1
  34. package/dist/next/index.cjs +5 -5
  35. package/dist/next/index.d.ts +1 -1
  36. package/dist/next/index.js +8 -8
  37. package/dist/node/loader.js +6 -6
  38. package/dist/plugins/index-file.d.ts +1 -1
  39. package/dist/plugins/index-file.js +3 -3
  40. package/dist/plugins/json-schema.d.ts +1 -1
  41. package/dist/plugins/json-schema.js +2 -2
  42. package/dist/plugins/last-modified.d.ts +1 -1
  43. package/dist/plugins/last-modified.js +1 -1
  44. package/dist/{preset-D_quDsKp.js → preset-DtKimtBh.js} +1 -1
  45. package/dist/{preset-D_quDsKp.js.map → preset-DtKimtBh.js.map} +1 -1
  46. package/dist/{remark-include-BnRd6OBo.js → remark-include-CIiVrABN.js} +4 -4
  47. package/dist/remark-include-CIiVrABN.js.map +1 -0
  48. package/dist/runtime/browser.d.ts +2 -2
  49. package/dist/runtime/dynamic.d.ts +1 -1
  50. package/dist/runtime/dynamic.js +9 -9
  51. package/dist/runtime/dynamic.js.map +1 -1
  52. package/dist/runtime/server.d.ts +2 -2
  53. package/dist/runtime/server.js +4 -10
  54. package/dist/runtime/server.js.map +1 -1
  55. package/dist/vite/index.d.ts +2 -2
  56. package/dist/vite/index.js +10 -10
  57. package/dist/vite/index.js.map +1 -1
  58. package/dist/webpack/mdx.d.ts +1 -1
  59. package/dist/webpack/mdx.js +6 -6
  60. package/dist/webpack/meta.d.ts +1 -1
  61. package/dist/webpack/meta.js +5 -5
  62. package/dist/{webpack-fHsy5QQI.js → webpack-DPxDgZAx.js} +2 -2
  63. package/dist/{webpack-fHsy5QQI.js.map → webpack-DPxDgZAx.js.map} +1 -1
  64. package/package.json +1 -1
  65. package/dist/build-mdx-Cg4YpCKX.js +0 -5
  66. package/dist/build-mdx-DyJk_jWV.d.ts.map +0 -1
  67. package/dist/core-CCsY8cxS.js.map +0 -1
  68. package/dist/core-CgLkZ4NQ.d.ts.map +0 -1
  69. package/dist/fuma-matter-BWdOvvCf.js.map +0 -1
  70. package/dist/index-file-DsOEtYCF.js.map +0 -1
  71. package/dist/load-from-file-DPPJI83O.js +0 -5
  72. package/dist/mdx-DBBqLhht.js.map +0 -1
  73. package/dist/remark-include-BnRd6OBo.js.map +0 -1
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index-file-BVTnUkr9.js","names":["dynamicCollections: CollectionItem[]","serverOptions: ServerOptions","typeConfigs: string[]","out: Promise<EmitEntry>[]","metaGlob","partialOptions: CoreOptions","path","infoStr: string[]","collection: DocCollectionItem | undefined"],"sources":["../src/utils/fs-cache.ts","../src/plugins/index-file.ts"],"sourcesContent":["import fs from 'node:fs/promises';\nimport path from 'node:path';\n\nconst map = new Map<string, Promise<string>>();\n\nexport function createFSCache() {\n return {\n read(file: string): Promise<string> {\n const fullPath = toFullPath(file);\n const cached = map.get(fullPath);\n if (cached) return cached;\n\n const read = fs.readFile(fullPath).then((s) => s.toString());\n map.set(fullPath, read);\n return read;\n },\n\n delete(file: string) {\n map.delete(toFullPath(file));\n },\n };\n}\n\n/**\n * make file paths relative to cwd\n */\nfunction toFullPath(file: string) {\n if (path.isAbsolute(file)) {\n return path.relative(process.cwd(), file);\n }\n\n return file;\n}\n","import type { Core, CoreOptions, Plugin, PluginContext } from '@/core';\nimport type { CollectionItem, DocCollectionItem, MetaCollectionItem } from '@/config/build';\nimport path from 'path';\nimport { type CodeGen, createCodegen, ident, slash } from '@/utils/codegen';\nimport { glob } from 'tinyglobby';\nimport { createFSCache } from '@/utils/fs-cache';\nimport { createHash } from 'crypto';\nimport type { LazyEntry } from '@/runtime/dynamic';\nimport type { EmitEntry } from '@/core';\nimport { parseFrontmatter } from '@/utils/frontmatter';\nimport type { ServerOptions } from '@/runtime/server';\n\nexport interface IndexFilePluginOptions {\n target?: 'default' | 'vite';\n\n /**\n * add `.js` extensions to imports, needed for ESM without bundler resolution\n */\n addJsExtension?: boolean;\n\n /**\n * Generate entry point for browser\n * @defaultValue true\n */\n browser?: boolean;\n\n /**\n * Generate entry point for dynamic compilation\n * @defaultValue true\n */\n dynamic?: boolean;\n}\n\nexport interface IndexFilePlugin {\n ['index-file']?: {\n generateTypeConfig?: (this: PluginContext) => string | void;\n serverOptions?: (this: PluginContext, options: ServerOptions) => void;\n };\n}\n\ninterface FileGenContext {\n core: Core;\n workspace?: string;\n codegen: CodeGen;\n serverOptions: ServerOptions;\n tc: string;\n}\n\nconst indexFileCache = createFSCache();\n\nexport default function indexFile(options: IndexFilePluginOptions = {}): Plugin {\n const { target = 'default', addJsExtension, browser = true, dynamic = true } = options;\n let dynamicCollections: CollectionItem[];\n\n function isDynamic(collection: CollectionItem) {\n return (\n (collection.type === 'docs' && collection.docs.dynamic) ||\n (collection.type === 'doc' && collection.dynamic)\n );\n }\n\n function generateConfigs(core: Core): {\n serverOptions: ServerOptions;\n tc: string;\n } {\n const serverOptions: ServerOptions = {};\n const typeConfigs: string[] = ['import(\"@hanzo/docs-mdx/runtime/types\").InternalTypeConfig'];\n const ctx = core.getPluginContext();\n\n for (const plugin of core.getPlugins()) {\n const indexFilePlugin = plugin['index-file'];\n if (!indexFilePlugin) continue;\n\n indexFilePlugin.serverOptions?.call(ctx, serverOptions);\n const config = indexFilePlugin.generateTypeConfig?.call(ctx);\n if (config) typeConfigs.push(config);\n }\n\n return {\n serverOptions,\n tc: typeConfigs.join(' & '),\n };\n }\n\n return {\n name: 'index-file',\n config() {\n dynamicCollections = this.core.getCollections().filter(isDynamic);\n },\n configureServer(server) {\n if (!server.watcher) return;\n\n server.watcher.on('all', async (event, file) => {\n indexFileCache.delete(file);\n\n // dynamic collections always require re-generation on change\n if (dynamicCollections.length === 0) {\n // vite uses `import.meta.glob`, no need to re-generate\n if (target === 'vite') return;\n // only re-generate when adding/deleting entries\n if (target === 'default' && event === 'change') return;\n }\n\n const updatedCollection = this.core\n .getCollections()\n .find((collection) => collection.hasFile(file));\n\n if (!updatedCollection) return;\n if (!isDynamic(updatedCollection)) {\n if (target === 'vite') return;\n if (target === 'default' && event === 'change') return;\n }\n\n await this.core.emit({\n filterPlugin: (plugin) => plugin.name === 'index-file',\n filterWorkspace: () => false,\n write: true,\n });\n });\n },\n async emit() {\n const globCache = new Map<string, Promise<string[]>>();\n const { workspace, outDir } = this.core.getOptions();\n const { serverOptions, tc } = generateConfigs(this.core);\n const toEmitEntry = async (\n path: string,\n content: (ctx: FileGenContext) => Promise<void>,\n ): Promise<EmitEntry> => {\n const codegen = createCodegen({\n target,\n outDir: outDir,\n jsExtension: addJsExtension,\n globCache,\n });\n await content({\n core: this.core,\n codegen,\n serverOptions,\n tc,\n workspace: workspace?.name,\n });\n return {\n path,\n content: codegen.toString(),\n };\n };\n\n const out: Promise<EmitEntry>[] = [toEmitEntry('server.ts', generateServerIndexFile)];\n\n if (dynamic) out.push(toEmitEntry('dynamic.ts', generateDynamicIndexFile));\n\n if (browser) out.push(toEmitEntry('browser.ts', generateBrowserIndexFile));\n\n return await Promise.all(out);\n },\n };\n}\n\nasync function generateServerIndexFile(ctx: FileGenContext) {\n const { core, codegen, serverOptions, tc } = ctx;\n codegen.lines.push(\n `import { server } from '@hanzo/docs-mdx/runtime/server';`,\n `import type * as Config from '${codegen.formatImportPath(core.getOptions().configPath)}';`,\n '',\n `const create = server<typeof Config, ${tc}>(${JSON.stringify(serverOptions)});`,\n );\n\n async function generateCollectionObject(collection: CollectionItem): Promise<string | undefined> {\n const base = getBase(collection);\n\n switch (collection.type) {\n case 'docs': {\n if (collection.docs.dynamic) return;\n\n if (collection.docs.async) {\n const [metaGlob, headGlob, bodyGlob] = await Promise.all([\n generateMetaCollectionGlob(ctx, collection.meta, true),\n generateDocCollectionFrontmatterGlob(ctx, collection.docs, true),\n generateDocCollectionGlob(ctx, collection.docs),\n ]);\n\n return `await create.docsLazy(\"${collection.name}\", \"${base}\", ${metaGlob}, ${headGlob}, ${bodyGlob})`;\n }\n\n const [metaGlob, docGlob] = await Promise.all([\n generateMetaCollectionGlob(ctx, collection.meta, true),\n generateDocCollectionGlob(ctx, collection.docs, true),\n ]);\n\n return `await create.docs(\"${collection.name}\", \"${base}\", ${metaGlob}, ${docGlob})`;\n }\n case 'doc':\n if (collection.dynamic) return;\n\n if (collection.async) {\n const [headGlob, bodyGlob] = await Promise.all([\n generateDocCollectionFrontmatterGlob(ctx, collection, true),\n generateDocCollectionGlob(ctx, collection),\n ]);\n\n return `await create.docLazy(\"${collection.name}\", \"${base}\", ${headGlob}, ${bodyGlob})`;\n }\n\n return `await create.doc(\"${collection.name}\", \"${base}\", ${await generateDocCollectionGlob(\n ctx,\n collection,\n true,\n )})`;\n case 'meta':\n return `await create.meta(\"${collection.name}\", \"${base}\", ${await generateMetaCollectionGlob(\n ctx,\n collection,\n true,\n )})`;\n }\n }\n\n await codegen.pushAsync(\n core.getCollections().map(async (collection) => {\n const obj = await generateCollectionObject(collection);\n if (!obj) return;\n\n return `\\nexport const ${collection.name} = ${obj};`;\n }),\n );\n}\n\nasync function generateDynamicIndexFile(ctx: FileGenContext) {\n const { core, codegen, serverOptions, tc } = ctx;\n const { configPath, environment, outDir } = core.getOptions();\n // serializable config options\n const partialOptions: CoreOptions = {\n configPath,\n environment,\n outDir,\n };\n codegen.lines.push(\n `import { dynamic } from '@hanzo/docs-mdx/runtime/dynamic';`,\n `import * as Config from '${codegen.formatImportPath(configPath)}';`,\n '',\n `const create = await dynamic<typeof Config, ${tc}>(Config, ${JSON.stringify(partialOptions)}, ${JSON.stringify(serverOptions)});`,\n );\n\n async function generateCollectionObjectEntry(\n collection: DocCollectionItem,\n absolutePath: string,\n ) {\n const fullPath = path.relative(process.cwd(), absolutePath);\n const content = await indexFileCache.read(fullPath).catch(() => '');\n const parsed = parseFrontmatter(content);\n const data = await core.transformFrontmatter(\n {\n collection,\n filePath: fullPath,\n source: content,\n },\n parsed.data as Record<string, unknown>,\n );\n\n const hash = createHash('md5').update(content).digest('hex');\n const infoStr: string[] = [\n // make sure it's included in vercel/nft\n `absolutePath: path.resolve(${JSON.stringify(fullPath)})`,\n ];\n for (const [k, v] of Object.entries({\n info: {\n fullPath,\n path: path.relative(collection.dir, absolutePath),\n },\n data,\n hash,\n } satisfies LazyEntry)) {\n infoStr.push(`${k}: ${JSON.stringify(v)}`);\n }\n\n return `{ ${infoStr.join(', ')} }`;\n }\n\n async function generateCollectionObject(parent: CollectionItem): Promise<string | undefined> {\n let collection: DocCollectionItem | undefined;\n if (parent.type === 'doc') collection = parent;\n else if (parent.type === 'docs') collection = parent.docs;\n\n if (!collection || !collection.dynamic) return;\n\n const files = await glob(collection.patterns, {\n cwd: collection.dir,\n absolute: true,\n });\n const entries = await Promise.all(\n files.map((file) => generateCollectionObjectEntry(collection, file)),\n );\n\n switch (parent.type) {\n case 'docs': {\n const metaGlob = await generateMetaCollectionGlob(ctx, parent.meta, true);\n\n return `await create.docs(\"${parent.name}\", \"${getBase(parent)}\", ${metaGlob}, ${entries.join(', ')})`;\n }\n case 'doc':\n return `await create.doc(\"${collection.name}\", \"${getBase(collection)}\", ${entries.join(', ')})`;\n }\n }\n\n await codegen.pushAsync(\n core.getCollections().map(async (collection) => {\n const obj = await generateCollectionObject(collection);\n if (!obj) return;\n\n return `\\nexport const ${collection.name} = ${obj};`;\n }),\n );\n}\n\nasync function generateBrowserIndexFile(ctx: FileGenContext) {\n const { core, codegen, tc } = ctx;\n codegen.lines.push(\n `import { browser } from '@hanzo/docs-mdx/runtime/browser';`,\n `import type * as Config from '${codegen.formatImportPath(core.getOptions().configPath)}';`,\n '',\n `const create = browser<typeof Config, ${tc}>();`,\n );\n\n async function generateCollectionObject(collection: CollectionItem): Promise<string | undefined> {\n switch (collection.type) {\n case 'docs': {\n if (collection.docs.dynamic) return;\n\n return generateCollectionObject(collection.docs);\n }\n case 'doc':\n if (collection.dynamic) return;\n\n return `create.doc(\"${collection.name}\", ${await generateDocCollectionGlob(ctx, collection)})`;\n }\n }\n\n codegen.lines.push('const browserCollections = {');\n\n await codegen.pushAsync(\n core.getCollections().map(async (collection) => {\n const obj = await generateCollectionObject(collection);\n if (!obj) return;\n\n return ident(`${collection.name}: ${obj},`);\n }),\n );\n\n codegen.lines.push('};', 'export default browserCollections;');\n}\n\nfunction getBase(collection: CollectionItem) {\n return slash(path.relative(process.cwd(), collection.dir));\n}\n\nfunction generateDocCollectionFrontmatterGlob(\n { codegen, workspace }: FileGenContext,\n collection: DocCollectionItem,\n eager = false,\n) {\n return codegen.generateGlobImport(collection.patterns, {\n query: {\n collection: collection.name,\n only: 'frontmatter',\n workspace,\n },\n import: 'frontmatter',\n base: collection.dir,\n eager,\n });\n}\n\nfunction generateDocCollectionGlob(\n { codegen, workspace }: FileGenContext,\n collection: DocCollectionItem,\n eager = false,\n) {\n return codegen.generateGlobImport(collection.patterns, {\n query: {\n collection: collection.name,\n workspace,\n },\n base: collection.dir,\n eager,\n });\n}\n\nfunction generateMetaCollectionGlob(\n { codegen, workspace }: FileGenContext,\n collection: MetaCollectionItem,\n eager = false,\n) {\n return codegen.generateGlobImport(collection.patterns, {\n query: {\n collection: collection.name,\n workspace,\n },\n import: 'default',\n base: collection.dir,\n eager,\n });\n}\n"],"mappings":";;;;;;;;;AAGA,MAAM,sBAAM,IAAI,KAA8B;AAE9C,SAAgB,gBAAgB;AAC9B,QAAO;EACL,KAAK,MAA+B;GAClC,MAAM,WAAW,WAAW,KAAK;GACjC,MAAM,SAAS,IAAI,IAAI,SAAS;AAChC,OAAI,OAAQ,QAAO;GAEnB,MAAM,OAAO,GAAG,SAAS,SAAS,CAAC,MAAM,MAAM,EAAE,UAAU,CAAC;AAC5D,OAAI,IAAI,UAAU,KAAK;AACvB,UAAO;;EAGT,OAAO,MAAc;AACnB,OAAI,OAAO,WAAW,KAAK,CAAC;;EAE/B;;;;;AAMH,SAAS,WAAW,MAAc;AAChC,KAAI,KAAK,WAAW,KAAK,CACvB,QAAO,KAAK,SAAS,QAAQ,KAAK,EAAE,KAAK;AAG3C,QAAO;;;;;ACiBT,MAAM,iBAAiB,eAAe;AAEtC,SAAwB,UAAU,UAAkC,EAAE,EAAU;CAC9E,MAAM,EAAE,SAAS,WAAW,gBAAgB,UAAU,MAAM,UAAU,SAAS;CAC/E,IAAIA;CAEJ,SAAS,UAAU,YAA4B;AAC7C,SACG,WAAW,SAAS,UAAU,WAAW,KAAK,WAC9C,WAAW,SAAS,SAAS,WAAW;;CAI7C,SAAS,gBAAgB,MAGvB;EACA,MAAMC,gBAA+B,EAAE;EACvC,MAAMC,cAAwB,CAAC,+DAA6D;EAC5F,MAAM,MAAM,KAAK,kBAAkB;AAEnC,OAAK,MAAM,UAAU,KAAK,YAAY,EAAE;GACtC,MAAM,kBAAkB,OAAO;AAC/B,OAAI,CAAC,gBAAiB;AAEtB,mBAAgB,eAAe,KAAK,KAAK,cAAc;GACvD,MAAM,SAAS,gBAAgB,oBAAoB,KAAK,IAAI;AAC5D,OAAI,OAAQ,aAAY,KAAK,OAAO;;AAGtC,SAAO;GACL;GACA,IAAI,YAAY,KAAK,MAAM;GAC5B;;AAGH,QAAO;EACL,MAAM;EACN,SAAS;AACP,wBAAqB,KAAK,KAAK,gBAAgB,CAAC,OAAO,UAAU;;EAEnE,gBAAgB,QAAQ;AACtB,OAAI,CAAC,OAAO,QAAS;AAErB,UAAO,QAAQ,GAAG,OAAO,OAAO,OAAO,SAAS;AAC9C,mBAAe,OAAO,KAAK;AAG3B,QAAI,mBAAmB,WAAW,GAAG;AAEnC,SAAI,WAAW,OAAQ;AAEvB,SAAI,WAAW,aAAa,UAAU,SAAU;;IAGlD,MAAM,oBAAoB,KAAK,KAC5B,gBAAgB,CAChB,MAAM,eAAe,WAAW,QAAQ,KAAK,CAAC;AAEjD,QAAI,CAAC,kBAAmB;AACxB,QAAI,CAAC,UAAU,kBAAkB,EAAE;AACjC,SAAI,WAAW,OAAQ;AACvB,SAAI,WAAW,aAAa,UAAU,SAAU;;AAGlD,UAAM,KAAK,KAAK,KAAK;KACnB,eAAe,WAAW,OAAO,SAAS;KAC1C,uBAAuB;KACvB,OAAO;KACR,CAAC;KACF;;EAEJ,MAAM,OAAO;GACX,MAAM,4BAAY,IAAI,KAAgC;GACtD,MAAM,EAAE,WAAW,WAAW,KAAK,KAAK,YAAY;GACpD,MAAM,EAAE,eAAe,OAAO,gBAAgB,KAAK,KAAK;GACxD,MAAM,cAAc,OAClB,QACA,YACuB;IACvB,MAAM,UAAU,cAAc;KAC5B;KACQ;KACR,aAAa;KACb;KACD,CAAC;AACF,UAAM,QAAQ;KACZ,MAAM,KAAK;KACX;KACA;KACA;KACA,WAAW,WAAW;KACvB,CAAC;AACF,WAAO;KACL;KACA,SAAS,QAAQ,UAAU;KAC5B;;GAGH,MAAMC,MAA4B,CAAC,YAAY,aAAa,wBAAwB,CAAC;AAErF,OAAI,QAAS,KAAI,KAAK,YAAY,cAAc,yBAAyB,CAAC;AAE1E,OAAI,QAAS,KAAI,KAAK,YAAY,cAAc,yBAAyB,CAAC;AAE1E,UAAO,MAAM,QAAQ,IAAI,IAAI;;EAEhC;;AAGH,eAAe,wBAAwB,KAAqB;CAC1D,MAAM,EAAE,MAAM,SAAS,eAAe,OAAO;AAC7C,SAAQ,MAAM,KACZ,4DACA,iCAAiC,QAAQ,iBAAiB,KAAK,YAAY,CAAC,WAAW,CAAC,KACxF,IACA,wCAAwC,GAAG,IAAI,KAAK,UAAU,cAAc,CAAC,IAC9E;CAED,eAAe,yBAAyB,YAAyD;EAC/F,MAAM,OAAO,QAAQ,WAAW;AAEhC,UAAQ,WAAW,MAAnB;GACE,KAAK,QAAQ;AACX,QAAI,WAAW,KAAK,QAAS;AAE7B,QAAI,WAAW,KAAK,OAAO;KACzB,MAAM,CAACC,YAAU,UAAU,YAAY,MAAM,QAAQ,IAAI;MACvD,2BAA2B,KAAK,WAAW,MAAM,KAAK;MACtD,qCAAqC,KAAK,WAAW,MAAM,KAAK;MAChE,0BAA0B,KAAK,WAAW,KAAK;MAChD,CAAC;AAEF,YAAO,0BAA0B,WAAW,KAAK,MAAM,KAAK,KAAKA,WAAS,IAAI,SAAS,IAAI,SAAS;;IAGtG,MAAM,CAAC,UAAU,WAAW,MAAM,QAAQ,IAAI,CAC5C,2BAA2B,KAAK,WAAW,MAAM,KAAK,EACtD,0BAA0B,KAAK,WAAW,MAAM,KAAK,CACtD,CAAC;AAEF,WAAO,sBAAsB,WAAW,KAAK,MAAM,KAAK,KAAK,SAAS,IAAI,QAAQ;;GAEpF,KAAK;AACH,QAAI,WAAW,QAAS;AAExB,QAAI,WAAW,OAAO;KACpB,MAAM,CAAC,UAAU,YAAY,MAAM,QAAQ,IAAI,CAC7C,qCAAqC,KAAK,YAAY,KAAK,EAC3D,0BAA0B,KAAK,WAAW,CAC3C,CAAC;AAEF,YAAO,yBAAyB,WAAW,KAAK,MAAM,KAAK,KAAK,SAAS,IAAI,SAAS;;AAGxF,WAAO,qBAAqB,WAAW,KAAK,MAAM,KAAK,KAAK,MAAM,0BAChE,KACA,YACA,KACD,CAAC;GACJ,KAAK,OACH,QAAO,sBAAsB,WAAW,KAAK,MAAM,KAAK,KAAK,MAAM,2BACjE,KACA,YACA,KACD,CAAC;;;AAIR,OAAM,QAAQ,UACZ,KAAK,gBAAgB,CAAC,IAAI,OAAO,eAAe;EAC9C,MAAM,MAAM,MAAM,yBAAyB,WAAW;AACtD,MAAI,CAAC,IAAK;AAEV,SAAO,kBAAkB,WAAW,KAAK,KAAK,IAAI;GAClD,CACH;;AAGH,eAAe,yBAAyB,KAAqB;CAC3D,MAAM,EAAE,MAAM,SAAS,eAAe,OAAO;CAC7C,MAAM,EAAE,YAAY,aAAa,WAAW,KAAK,YAAY;CAE7D,MAAMC,iBAA8B;EAClC;EACA;EACA;EACD;AACD,SAAQ,MAAM,KACZ,8DACA,4BAA4B,QAAQ,iBAAiB,WAAW,CAAC,KACjE,IACA,+CAA+C,GAAG,YAAY,KAAK,UAAU,eAAe,CAAC,IAAI,KAAK,UAAU,cAAc,CAAC,IAChI;CAED,eAAe,8BACb,YACA,cACA;EACA,MAAM,WAAWC,OAAK,SAAS,QAAQ,KAAK,EAAE,aAAa;EAC3D,MAAM,UAAU,MAAM,eAAe,KAAK,SAAS,CAAC,YAAY,GAAG;EACnE,MAAM,SAAS,iBAAiB,QAAQ;EACxC,MAAM,OAAO,MAAM,KAAK,qBACtB;GACE;GACA,UAAU;GACV,QAAQ;GACT,EACD,OAAO,KACR;EAED,MAAM,OAAO,WAAW,MAAM,CAAC,OAAO,QAAQ,CAAC,OAAO,MAAM;EAC5D,MAAMC,UAAoB,CAExB,8BAA8B,KAAK,UAAU,SAAS,CAAC,GACxD;AACD,OAAK,MAAM,CAAC,GAAG,MAAM,OAAO,QAAQ;GAClC,MAAM;IACJ;IACA,MAAMD,OAAK,SAAS,WAAW,KAAK,aAAa;IAClD;GACD;GACA;GACD,CAAqB,CACpB,SAAQ,KAAK,GAAG,EAAE,IAAI,KAAK,UAAU,EAAE,GAAG;AAG5C,SAAO,KAAK,QAAQ,KAAK,KAAK,CAAC;;CAGjC,eAAe,yBAAyB,QAAqD;EAC3F,IAAIE;AACJ,MAAI,OAAO,SAAS,MAAO,cAAa;WAC/B,OAAO,SAAS,OAAQ,cAAa,OAAO;AAErD,MAAI,CAAC,cAAc,CAAC,WAAW,QAAS;EAExC,MAAM,QAAQ,MAAM,KAAK,WAAW,UAAU;GAC5C,KAAK,WAAW;GAChB,UAAU;GACX,CAAC;EACF,MAAM,UAAU,MAAM,QAAQ,IAC5B,MAAM,KAAK,SAAS,8BAA8B,YAAY,KAAK,CAAC,CACrE;AAED,UAAQ,OAAO,MAAf;GACE,KAAK,QAAQ;IACX,MAAM,WAAW,MAAM,2BAA2B,KAAK,OAAO,MAAM,KAAK;AAEzE,WAAO,sBAAsB,OAAO,KAAK,MAAM,QAAQ,OAAO,CAAC,KAAK,SAAS,IAAI,QAAQ,KAAK,KAAK,CAAC;;GAEtG,KAAK,MACH,QAAO,qBAAqB,WAAW,KAAK,MAAM,QAAQ,WAAW,CAAC,KAAK,QAAQ,KAAK,KAAK,CAAC;;;AAIpG,OAAM,QAAQ,UACZ,KAAK,gBAAgB,CAAC,IAAI,OAAO,eAAe;EAC9C,MAAM,MAAM,MAAM,yBAAyB,WAAW;AACtD,MAAI,CAAC,IAAK;AAEV,SAAO,kBAAkB,WAAW,KAAK,KAAK,IAAI;GAClD,CACH;;AAGH,eAAe,yBAAyB,KAAqB;CAC3D,MAAM,EAAE,MAAM,SAAS,OAAO;AAC9B,SAAQ,MAAM,KACZ,8DACA,iCAAiC,QAAQ,iBAAiB,KAAK,YAAY,CAAC,WAAW,CAAC,KACxF,IACA,yCAAyC,GAAG,MAC7C;CAED,eAAe,yBAAyB,YAAyD;AAC/F,UAAQ,WAAW,MAAnB;GACE,KAAK;AACH,QAAI,WAAW,KAAK,QAAS;AAE7B,WAAO,yBAAyB,WAAW,KAAK;GAElD,KAAK;AACH,QAAI,WAAW,QAAS;AAExB,WAAO,eAAe,WAAW,KAAK,KAAK,MAAM,0BAA0B,KAAK,WAAW,CAAC;;;AAIlG,SAAQ,MAAM,KAAK,+BAA+B;AAElD,OAAM,QAAQ,UACZ,KAAK,gBAAgB,CAAC,IAAI,OAAO,eAAe;EAC9C,MAAM,MAAM,MAAM,yBAAyB,WAAW;AACtD,MAAI,CAAC,IAAK;AAEV,SAAO,MAAM,GAAG,WAAW,KAAK,IAAI,IAAI,GAAG;GAC3C,CACH;AAED,SAAQ,MAAM,KAAK,MAAM,qCAAqC;;AAGhE,SAAS,QAAQ,YAA4B;AAC3C,QAAO,MAAMF,OAAK,SAAS,QAAQ,KAAK,EAAE,WAAW,IAAI,CAAC;;AAG5D,SAAS,qCACP,EAAE,SAAS,aACX,YACA,QAAQ,OACR;AACA,QAAO,QAAQ,mBAAmB,WAAW,UAAU;EACrD,OAAO;GACL,YAAY,WAAW;GACvB,MAAM;GACN;GACD;EACD,QAAQ;EACR,MAAM,WAAW;EACjB;EACD,CAAC;;AAGJ,SAAS,0BACP,EAAE,SAAS,aACX,YACA,QAAQ,OACR;AACA,QAAO,QAAQ,mBAAmB,WAAW,UAAU;EACrD,OAAO;GACL,YAAY,WAAW;GACvB;GACD;EACD,MAAM,WAAW;EACjB;EACD,CAAC;;AAGJ,SAAS,2BACP,EAAE,SAAS,aACX,YACA,QAAQ,OACR;AACA,QAAO,QAAQ,mBAAmB,WAAW,UAAU;EACrD,OAAO;GACL,YAAY,WAAW;GACvB;GACD;EACD,QAAQ;EACR,MAAM,WAAW;EACjB;EACD,CAAC"}
package/dist/index.d.ts CHANGED
@@ -1,3 +1,3 @@
1
- import { a as EmitOptions, c as PluginContext, d as TransformOptions, f as _Defaults, i as EmitEntry, l as PluginOption, n as Core, o as EmitOutput, p as createCore, r as CoreOptions, s as Plugin, t as CompilationContext, u as ServerContext, z as ExtractedReference } from "./core-CgLkZ4NQ.js";
2
- import { t as CompiledMDXProperties } from "./build-mdx-DyJk_jWV.js";
1
+ import { a as EmitOptions, c as PluginContext, d as TransformOptions, f as _Defaults, i as EmitEntry, l as PluginOption, n as Core, o as EmitOutput, p as createCore, r as CoreOptions, s as Plugin, t as CompilationContext, u as ServerContext, z as ExtractedReference } from "./core-INx0uFn9.js";
2
+ import { t as CompiledMDXProperties } from "./build-mdx-DWx1HtNY.js";
3
3
  export { CompilationContext, type CompiledMDXProperties, Core, CoreOptions, EmitEntry, EmitOptions, EmitOutput, type ExtractedReference, Plugin, PluginContext, PluginOption, ServerContext, TransformOptions, _Defaults, createCore };
@@ -0,0 +1,5 @@
1
+ import "./preset-DtKimtBh.js";
2
+ import "./build-DP5xMGSd.js";
3
+ import { t as loadConfig } from "./load-from-file-CSjo1gN1.js";
4
+
5
+ export { loadConfig };
@@ -1,4 +1,4 @@
1
- import { t as buildConfig } from "./build-DbdeZyKK.js";
1
+ import { t as buildConfig } from "./build-DP5xMGSd.js";
2
2
  import { pathToFileURL } from "node:url";
3
3
 
4
4
  //#region src/config/load-from-file.ts
@@ -35,4 +35,4 @@ async function loadConfig(core, build = false) {
35
35
 
36
36
  //#endregion
37
37
  export { loadConfig as t };
38
- //# sourceMappingURL=load-from-file-Bx1TjN0x.js.map
38
+ //# sourceMappingURL=load-from-file-CSjo1gN1.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"load-from-file-Bx1TjN0x.js","names":[],"sources":["../src/config/load-from-file.ts"],"sourcesContent":["import { pathToFileURL } from 'node:url';\nimport type { LoadedConfig } from '@/config/build';\nimport { buildConfig } from '@/config/build';\nimport type { Core } from '@/core';\n\nasync function compileConfig(core: Core) {\n const { build } = await import('esbuild');\n const { configPath, outDir } = core.getOptions();\n\n const transformed = await build({\n entryPoints: [{ in: configPath, out: 'source.config' }],\n bundle: true,\n outdir: outDir,\n target: 'node20',\n write: true,\n platform: 'node',\n format: 'esm',\n packages: 'external',\n outExtension: {\n '.js': '.mjs',\n },\n allowOverwrite: true,\n });\n\n if (transformed.errors.length > 0) {\n throw new Error('failed to compile configuration file');\n }\n}\n\n/**\n * Load config\n *\n * @param build - By default, it assumes the config file has been compiled. Set this `true` to compile the config first.\n */\nexport async function loadConfig(core: Core, build = false): Promise<LoadedConfig> {\n if (build) await compileConfig(core);\n\n const url = pathToFileURL(core.getCompiledConfigPath());\n // always return a new config\n url.searchParams.set('hash', Date.now().toString());\n\n const config = import(url.href).then((loaded) => buildConfig(loaded as Record<string, unknown>));\n\n return await config;\n}\n"],"mappings":";;;;AAKA,eAAe,cAAc,MAAY;CACvC,MAAM,EAAE,UAAU,MAAM,OAAO;CAC/B,MAAM,EAAE,YAAY,WAAW,KAAK,YAAY;AAiBhD,MAfoB,MAAM,MAAM;EAC9B,aAAa,CAAC;GAAE,IAAI;GAAY,KAAK;GAAiB,CAAC;EACvD,QAAQ;EACR,QAAQ;EACR,QAAQ;EACR,OAAO;EACP,UAAU;EACV,QAAQ;EACR,UAAU;EACV,cAAc,EACZ,OAAO,QACR;EACD,gBAAgB;EACjB,CAAC,EAEc,OAAO,SAAS,EAC9B,OAAM,IAAI,MAAM,uCAAuC;;;;;;;AAS3D,eAAsB,WAAW,MAAY,QAAQ,OAA8B;AACjF,KAAI,MAAO,OAAM,cAAc,KAAK;CAEpC,MAAM,MAAM,cAAc,KAAK,uBAAuB,CAAC;AAEvD,KAAI,aAAa,IAAI,QAAQ,KAAK,KAAK,CAAC,UAAU,CAAC;AAInD,QAAO,MAFQ,OAAO,IAAI,MAAM,MAAM,WAAW,YAAY,OAAkC,CAAC"}
1
+ {"version":3,"file":"load-from-file-CSjo1gN1.js","names":[],"sources":["../src/config/load-from-file.ts"],"sourcesContent":["import { pathToFileURL } from 'node:url';\nimport type { LoadedConfig } from '@/config/build';\nimport { buildConfig } from '@/config/build';\nimport type { Core } from '@/core';\n\nasync function compileConfig(core: Core) {\n const { build } = await import('esbuild');\n const { configPath, outDir } = core.getOptions();\n\n const transformed = await build({\n entryPoints: [{ in: configPath, out: 'source.config' }],\n bundle: true,\n outdir: outDir,\n target: 'node20',\n write: true,\n platform: 'node',\n format: 'esm',\n packages: 'external',\n outExtension: {\n '.js': '.mjs',\n },\n allowOverwrite: true,\n });\n\n if (transformed.errors.length > 0) {\n throw new Error('failed to compile configuration file');\n }\n}\n\n/**\n * Load config\n *\n * @param build - By default, it assumes the config file has been compiled. Set this `true` to compile the config first.\n */\nexport async function loadConfig(core: Core, build = false): Promise<LoadedConfig> {\n if (build) await compileConfig(core);\n\n const url = pathToFileURL(core.getCompiledConfigPath());\n // always return a new config\n url.searchParams.set('hash', Date.now().toString());\n\n const config = import(url.href).then((loaded) => buildConfig(loaded as Record<string, unknown>));\n\n return await config;\n}\n"],"mappings":";;;;AAKA,eAAe,cAAc,MAAY;CACvC,MAAM,EAAE,UAAU,MAAM,OAAO;CAC/B,MAAM,EAAE,YAAY,WAAW,KAAK,YAAY;AAiBhD,MAfoB,MAAM,MAAM;EAC9B,aAAa,CAAC;GAAE,IAAI;GAAY,KAAK;GAAiB,CAAC;EACvD,QAAQ;EACR,QAAQ;EACR,QAAQ;EACR,OAAO;EACP,UAAU;EACV,QAAQ;EACR,UAAU;EACV,cAAc,EACZ,OAAO,QACR;EACD,gBAAgB;EACjB,CAAC,EAEc,OAAO,SAAS,EAC9B,OAAM,IAAI,MAAM,uCAAuC;;;;;;;AAS3D,eAAsB,WAAW,MAAY,QAAQ,OAA8B;AACjF,KAAI,MAAO,OAAM,cAAc,KAAK;CAEpC,MAAM,MAAM,cAAc,KAAK,uBAAuB,CAAC;AAEvD,KAAI,aAAa,IAAI,QAAQ,KAAK,KAAK,CAAC,UAAU,CAAC;AAInD,QAAO,MAFQ,OAAO,IAAI,MAAM,MAAM,WAAW,YAAY,OAAkC,CAAC"}
@@ -4,4 +4,4 @@ const mdxLoaderGlob = /\.mdx?(\?.+?)?$/;
4
4
 
5
5
  //#endregion
6
6
  export { metaLoaderGlob as n, mdxLoaderGlob as t };
7
- //# sourceMappingURL=loaders-BjG6ii1-.js.map
7
+ //# sourceMappingURL=loaders-BVwYfelL.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"loaders-BjG6ii1-.js","names":[],"sources":["../src/loaders/index.ts"],"sourcesContent":["export const metaLoaderGlob = /\\.(json|yaml)(\\?.+?)?$/;\nexport const mdxLoaderGlob = /\\.mdx?(\\?.+?)?$/;\n"],"mappings":";AAAA,MAAa,iBAAiB;AAC9B,MAAa,gBAAgB"}
1
+ {"version":3,"file":"loaders-BVwYfelL.js","names":[],"sources":["../src/loaders/index.ts"],"sourcesContent":["export const metaLoaderGlob = /\\.(json|yaml)(\\?.+?)?$/;\nexport const mdxLoaderGlob = /\\.mdx?(\\?.+?)?$/;\n"],"mappings":";AAAA,MAAa,iBAAiB;AAC9B,MAAa,gBAAgB"}
@@ -1,8 +1,8 @@
1
- import { t as mdxLoaderGlob } from "./loaders-BjG6ii1-.js";
2
- import { t as fumaMatter } from "./fuma-matter-BWdOvvCf.js";
3
- import path from "node:path";
4
- import fs from "node:fs/promises";
1
+ import { t as parseFrontmatter } from "./frontmatter-Cq6XTjTb.js";
2
+ import { t as mdxLoaderGlob } from "./loaders-BVwYfelL.js";
5
3
  import { z } from "zod";
4
+ import fs from "node:fs/promises";
5
+ import path from "node:path";
6
6
  import { createHash } from "node:crypto";
7
7
 
8
8
  //#region src/loaders/mdx/index.ts
@@ -22,7 +22,7 @@ function createMdxLoader({ getCore }) {
22
22
  async load({ getSource, development: isDevelopment, query, compiler, filePath }) {
23
23
  let core = await getCore();
24
24
  const value = await getSource();
25
- const matter = fumaMatter(value);
25
+ const matter = parseFrontmatter(value);
26
26
  const { collection: collectionName, workspace, only } = querySchema.parse(query);
27
27
  if (workspace) core = core.getWorkspaces().get(workspace) ?? core;
28
28
  let after;
@@ -59,7 +59,7 @@ function createMdxLoader({ getCore }) {
59
59
  code: `export const frontmatter = ${JSON.stringify(matter.data)}`,
60
60
  map: null
61
61
  };
62
- const { buildMDX } = await import("./build-mdx-Cg4YpCKX.js");
62
+ const { buildMDX } = await import("./build-mdx-CKU70o5p.js");
63
63
  const compiled = await buildMDX(core, docCollection, {
64
64
  isDevelopment,
65
65
  source: "\n".repeat(countLines(matter.matter)) + matter.content,
@@ -88,4 +88,4 @@ function countLines(s) {
88
88
 
89
89
  //#endregion
90
90
  export { createMdxLoader as t };
91
- //# sourceMappingURL=mdx-DBBqLhht.js.map
91
+ //# sourceMappingURL=mdx-B8Ub7Bro.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"mdx-B8Ub7Bro.js","names":["after: (() => Promise<void>) | undefined","docCollection: DocCollectionItem | undefined"],"sources":["../src/loaders/mdx/index.ts"],"sourcesContent":["import { parseFrontmatter } from '@/utils/frontmatter';\nimport type { SourceMap } from 'rollup';\nimport type { Loader } from '@/loaders/adapter';\nimport { z } from 'zod';\nimport type { DocCollectionItem } from '@/config/build';\nimport fs from 'node:fs/promises';\nimport path from 'node:path';\nimport { createHash } from 'node:crypto';\nimport type { ConfigLoader } from '@/loaders/config';\nimport { mdxLoaderGlob } from '..';\n\nconst querySchema = z\n .object({\n only: z.literal(['frontmatter', 'all']).default('all'),\n collection: z.string().optional(),\n workspace: z.string().optional(),\n })\n .loose();\n\nconst cacheEntry = z.object({\n code: z.string(),\n map: z.any().optional(),\n hash: z.string().optional(),\n});\n\ntype CacheEntry = z.infer<typeof cacheEntry>;\n\nexport function createMdxLoader({ getCore }: ConfigLoader): Loader {\n return {\n test: mdxLoaderGlob,\n async load({ getSource, development: isDevelopment, query, compiler, filePath }) {\n let core = await getCore();\n const value = await getSource();\n const matter = parseFrontmatter(value);\n const { collection: collectionName, workspace, only } = querySchema.parse(query);\n if (workspace) {\n core = core.getWorkspaces().get(workspace) ?? core;\n }\n\n let after: (() => Promise<void>) | undefined;\n\n const { experimentalBuildCache = false } = core.getConfig().global;\n if (!isDevelopment && experimentalBuildCache) {\n const cacheDir = experimentalBuildCache;\n const cacheKey = `${collectionName ?? 'global'}_${generateCacheHash(filePath)}`;\n\n const cached = await fs\n .readFile(path.join(cacheDir, cacheKey))\n .then((content) => cacheEntry.parse(JSON.parse(content.toString())))\n .catch(() => null);\n\n if (cached && cached.hash === generateCacheHash(value)) return cached;\n after = async () => {\n await fs.mkdir(cacheDir, { recursive: true });\n await fs.writeFile(\n path.join(cacheDir, cacheKey),\n JSON.stringify({\n ...out,\n hash: generateCacheHash(value),\n } satisfies CacheEntry),\n );\n };\n }\n\n const collection = collectionName ? core.getCollection(collectionName) : undefined;\n\n let docCollection: DocCollectionItem | undefined;\n switch (collection?.type) {\n case 'doc':\n docCollection = collection;\n break;\n case 'docs':\n docCollection = collection.docs;\n break;\n }\n\n if (docCollection) {\n matter.data = await core.transformFrontmatter(\n { collection: docCollection, filePath, source: value },\n matter.data as Record<string, unknown>,\n );\n }\n\n if (only === 'frontmatter') {\n return {\n code: `export const frontmatter = ${JSON.stringify(matter.data)}`,\n map: null,\n };\n }\n\n const { buildMDX } = await import('@/loaders/mdx/build-mdx');\n const compiled = await buildMDX(core, docCollection, {\n isDevelopment,\n // ensure the line number is correct in errors\n source: '\\n'.repeat(countLines(matter.matter)) + matter.content,\n filePath,\n frontmatter: matter.data as Record<string, unknown>,\n _compiler: compiler,\n environment: 'bundler',\n });\n\n const out = {\n code: String(compiled.value),\n map: compiled.map as SourceMap,\n };\n\n await after?.();\n return out;\n },\n };\n}\n\nfunction generateCacheHash(input: string): string {\n return createHash('md5').update(input).digest('hex');\n}\n\nfunction countLines(s: string) {\n let num = 0;\n\n for (const c of s) {\n if (c === '\\n') num++;\n }\n\n return num;\n}\n"],"mappings":";;;;;;;;AAWA,MAAM,cAAc,EACjB,OAAO;CACN,MAAM,EAAE,QAAQ,CAAC,eAAe,MAAM,CAAC,CAAC,QAAQ,MAAM;CACtD,YAAY,EAAE,QAAQ,CAAC,UAAU;CACjC,WAAW,EAAE,QAAQ,CAAC,UAAU;CACjC,CAAC,CACD,OAAO;AAEV,MAAM,aAAa,EAAE,OAAO;CAC1B,MAAM,EAAE,QAAQ;CAChB,KAAK,EAAE,KAAK,CAAC,UAAU;CACvB,MAAM,EAAE,QAAQ,CAAC,UAAU;CAC5B,CAAC;AAIF,SAAgB,gBAAgB,EAAE,WAAiC;AACjE,QAAO;EACL,MAAM;EACN,MAAM,KAAK,EAAE,WAAW,aAAa,eAAe,OAAO,UAAU,YAAY;GAC/E,IAAI,OAAO,MAAM,SAAS;GAC1B,MAAM,QAAQ,MAAM,WAAW;GAC/B,MAAM,SAAS,iBAAiB,MAAM;GACtC,MAAM,EAAE,YAAY,gBAAgB,WAAW,SAAS,YAAY,MAAM,MAAM;AAChF,OAAI,UACF,QAAO,KAAK,eAAe,CAAC,IAAI,UAAU,IAAI;GAGhD,IAAIA;GAEJ,MAAM,EAAE,yBAAyB,UAAU,KAAK,WAAW,CAAC;AAC5D,OAAI,CAAC,iBAAiB,wBAAwB;IAC5C,MAAM,WAAW;IACjB,MAAM,WAAW,GAAG,kBAAkB,SAAS,GAAG,kBAAkB,SAAS;IAE7E,MAAM,SAAS,MAAM,GAClB,SAAS,KAAK,KAAK,UAAU,SAAS,CAAC,CACvC,MAAM,YAAY,WAAW,MAAM,KAAK,MAAM,QAAQ,UAAU,CAAC,CAAC,CAAC,CACnE,YAAY,KAAK;AAEpB,QAAI,UAAU,OAAO,SAAS,kBAAkB,MAAM,CAAE,QAAO;AAC/D,YAAQ,YAAY;AAClB,WAAM,GAAG,MAAM,UAAU,EAAE,WAAW,MAAM,CAAC;AAC7C,WAAM,GAAG,UACP,KAAK,KAAK,UAAU,SAAS,EAC7B,KAAK,UAAU;MACb,GAAG;MACH,MAAM,kBAAkB,MAAM;MAC/B,CAAsB,CACxB;;;GAIL,MAAM,aAAa,iBAAiB,KAAK,cAAc,eAAe,GAAG;GAEzE,IAAIC;AACJ,WAAQ,YAAY,MAApB;IACE,KAAK;AACH,qBAAgB;AAChB;IACF,KAAK;AACH,qBAAgB,WAAW;AAC3B;;AAGJ,OAAI,cACF,QAAO,OAAO,MAAM,KAAK,qBACvB;IAAE,YAAY;IAAe;IAAU,QAAQ;IAAO,EACtD,OAAO,KACR;AAGH,OAAI,SAAS,cACX,QAAO;IACL,MAAM,8BAA8B,KAAK,UAAU,OAAO,KAAK;IAC/D,KAAK;IACN;GAGH,MAAM,EAAE,aAAa,MAAM,OAAO;GAClC,MAAM,WAAW,MAAM,SAAS,MAAM,eAAe;IACnD;IAEA,QAAQ,KAAK,OAAO,WAAW,OAAO,OAAO,CAAC,GAAG,OAAO;IACxD;IACA,aAAa,OAAO;IACpB,WAAW;IACX,aAAa;IACd,CAAC;GAEF,MAAM,MAAM;IACV,MAAM,OAAO,SAAS,MAAM;IAC5B,KAAK,SAAS;IACf;AAED,SAAM,SAAS;AACf,UAAO;;EAEV;;AAGH,SAAS,kBAAkB,OAAuB;AAChD,QAAO,WAAW,MAAM,CAAC,OAAO,MAAM,CAAC,OAAO,MAAM;;AAGtD,SAAS,WAAW,GAAW;CAC7B,IAAI,MAAM;AAEV,MAAK,MAAM,KAAK,EACd,KAAI,MAAM,KAAM;AAGlB,QAAO"}
@@ -1,4 +1,4 @@
1
- import { n as metaLoaderGlob } from "./loaders-BjG6ii1-.js";
1
+ import { n as metaLoaderGlob } from "./loaders-BVwYfelL.js";
2
2
  import { load } from "js-yaml";
3
3
  import { z } from "zod";
4
4
 
@@ -78,4 +78,4 @@ function createMetaLoader({ getCore }, resolve = {}) {
78
78
 
79
79
  //#endregion
80
80
  export { createMetaLoader as t };
81
- //# sourceMappingURL=meta-u6oJ9ej1.js.map
81
+ //# sourceMappingURL=meta-DyieTM4Z.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"meta-u6oJ9ej1.js","names":["metaCollection: MetaCollectionItem | undefined"],"sources":["../src/loaders/meta.ts"],"sourcesContent":["import type { Loader, LoaderInput } from '@/loaders/adapter';\nimport type { ConfigLoader } from '@/loaders/config';\nimport { load } from 'js-yaml';\nimport { z } from 'zod';\nimport { metaLoaderGlob } from '.';\nimport type { MetaCollectionItem } from '@/config/build';\n\nconst querySchema = z\n .object({\n collection: z.string().optional(),\n workspace: z.string().optional(),\n })\n .loose();\n\n/**\n * load meta files, fallback to bundler's built-in plugins when ?collection is unspecified.\n */\nexport function createMetaLoader(\n { getCore }: ConfigLoader,\n resolve: {\n json?: 'json' | 'js';\n yaml?: 'js';\n } = {},\n): Loader {\n const { json: resolveJson = 'js' } = resolve;\n\n function parse(filePath: string, source: string) {\n try {\n if (filePath.endsWith('.json')) return JSON.parse(source);\n if (filePath.endsWith('.yaml')) return load(source);\n } catch (e) {\n throw new Error(`invalid data in ${filePath}`, { cause: e });\n }\n\n throw new Error('Unknown file type ' + filePath);\n }\n\n function onMeta(source: string, { filePath, query }: LoaderInput) {\n const parsed = querySchema.safeParse(query);\n if (!parsed.success || !parsed.data.collection) return null;\n const { collection: collectionName, workspace } = parsed.data;\n\n return async (): Promise<unknown> => {\n let core = await getCore();\n if (workspace) {\n core = core.getWorkspaces().get(workspace) ?? core;\n }\n\n const collection = core.getCollection(collectionName);\n let metaCollection: MetaCollectionItem | undefined;\n\n switch (collection?.type) {\n case 'meta':\n metaCollection = collection;\n break;\n case 'docs':\n metaCollection = collection.meta;\n break;\n }\n\n const data = parse(filePath, source);\n\n if (!metaCollection) return data;\n return core.transformMeta(\n {\n collection: metaCollection,\n filePath,\n source,\n },\n data,\n );\n };\n }\n\n return {\n test: metaLoaderGlob,\n async load(input) {\n const result = onMeta(await input.getSource(), input);\n if (result === null) return null;\n const data = await result();\n\n if (input.filePath.endsWith('.json')) {\n return {\n moduleType: resolveJson,\n code:\n resolveJson === 'json'\n ? JSON.stringify(data)\n : `export default ${JSON.stringify(data)}`,\n };\n } else {\n return {\n moduleType: 'js',\n code: `export default ${JSON.stringify(data)}`,\n };\n }\n },\n bun: {\n load(source, input) {\n const result = onMeta(source, input);\n if (result === null)\n return {\n loader: 'object',\n exports: parse(input.filePath, source),\n };\n\n return result().then((data) => ({\n loader: 'object',\n exports: { default: data },\n }));\n },\n },\n };\n}\n"],"mappings":";;;;;AAOA,MAAM,cAAc,EACjB,OAAO;CACN,YAAY,EAAE,QAAQ,CAAC,UAAU;CACjC,WAAW,EAAE,QAAQ,CAAC,UAAU;CACjC,CAAC,CACD,OAAO;;;;AAKV,SAAgB,iBACd,EAAE,WACF,UAGI,EAAE,EACE;CACR,MAAM,EAAE,MAAM,cAAc,SAAS;CAErC,SAAS,MAAM,UAAkB,QAAgB;AAC/C,MAAI;AACF,OAAI,SAAS,SAAS,QAAQ,CAAE,QAAO,KAAK,MAAM,OAAO;AACzD,OAAI,SAAS,SAAS,QAAQ,CAAE,QAAO,KAAK,OAAO;WAC5C,GAAG;AACV,SAAM,IAAI,MAAM,mBAAmB,YAAY,EAAE,OAAO,GAAG,CAAC;;AAG9D,QAAM,IAAI,MAAM,uBAAuB,SAAS;;CAGlD,SAAS,OAAO,QAAgB,EAAE,UAAU,SAAsB;EAChE,MAAM,SAAS,YAAY,UAAU,MAAM;AAC3C,MAAI,CAAC,OAAO,WAAW,CAAC,OAAO,KAAK,WAAY,QAAO;EACvD,MAAM,EAAE,YAAY,gBAAgB,cAAc,OAAO;AAEzD,SAAO,YAA8B;GACnC,IAAI,OAAO,MAAM,SAAS;AAC1B,OAAI,UACF,QAAO,KAAK,eAAe,CAAC,IAAI,UAAU,IAAI;GAGhD,MAAM,aAAa,KAAK,cAAc,eAAe;GACrD,IAAIA;AAEJ,WAAQ,YAAY,MAApB;IACE,KAAK;AACH,sBAAiB;AACjB;IACF,KAAK;AACH,sBAAiB,WAAW;AAC5B;;GAGJ,MAAM,OAAO,MAAM,UAAU,OAAO;AAEpC,OAAI,CAAC,eAAgB,QAAO;AAC5B,UAAO,KAAK,cACV;IACE,YAAY;IACZ;IACA;IACD,EACD,KACD;;;AAIL,QAAO;EACL,MAAM;EACN,MAAM,KAAK,OAAO;GAChB,MAAM,SAAS,OAAO,MAAM,MAAM,WAAW,EAAE,MAAM;AACrD,OAAI,WAAW,KAAM,QAAO;GAC5B,MAAM,OAAO,MAAM,QAAQ;AAE3B,OAAI,MAAM,SAAS,SAAS,QAAQ,CAClC,QAAO;IACL,YAAY;IACZ,MACE,gBAAgB,SACZ,KAAK,UAAU,KAAK,GACpB,kBAAkB,KAAK,UAAU,KAAK;IAC7C;OAED,QAAO;IACL,YAAY;IACZ,MAAM,kBAAkB,KAAK,UAAU,KAAK;IAC7C;;EAGL,KAAK,EACH,KAAK,QAAQ,OAAO;GAClB,MAAM,SAAS,OAAO,QAAQ,MAAM;AACpC,OAAI,WAAW,KACb,QAAO;IACL,QAAQ;IACR,SAAS,MAAM,MAAM,UAAU,OAAO;IACvC;AAEH,UAAO,QAAQ,CAAC,MAAM,UAAU;IAC9B,QAAQ;IACR,SAAS,EAAE,SAAS,MAAM;IAC3B,EAAE;KAEN;EACF"}
1
+ {"version":3,"file":"meta-DyieTM4Z.js","names":["metaCollection: MetaCollectionItem | undefined"],"sources":["../src/loaders/meta.ts"],"sourcesContent":["import type { Loader, LoaderInput } from '@/loaders/adapter';\nimport type { ConfigLoader } from '@/loaders/config';\nimport { load } from 'js-yaml';\nimport { z } from 'zod';\nimport { metaLoaderGlob } from '.';\nimport type { MetaCollectionItem } from '@/config/build';\n\nconst querySchema = z\n .object({\n collection: z.string().optional(),\n workspace: z.string().optional(),\n })\n .loose();\n\n/**\n * load meta files, fallback to bundler's built-in plugins when ?collection is unspecified.\n */\nexport function createMetaLoader(\n { getCore }: ConfigLoader,\n resolve: {\n json?: 'json' | 'js';\n yaml?: 'js';\n } = {},\n): Loader {\n const { json: resolveJson = 'js' } = resolve;\n\n function parse(filePath: string, source: string) {\n try {\n if (filePath.endsWith('.json')) return JSON.parse(source);\n if (filePath.endsWith('.yaml')) return load(source);\n } catch (e) {\n throw new Error(`invalid data in ${filePath}`, { cause: e });\n }\n\n throw new Error('Unknown file type ' + filePath);\n }\n\n function onMeta(source: string, { filePath, query }: LoaderInput) {\n const parsed = querySchema.safeParse(query);\n if (!parsed.success || !parsed.data.collection) return null;\n const { collection: collectionName, workspace } = parsed.data;\n\n return async (): Promise<unknown> => {\n let core = await getCore();\n if (workspace) {\n core = core.getWorkspaces().get(workspace) ?? core;\n }\n\n const collection = core.getCollection(collectionName);\n let metaCollection: MetaCollectionItem | undefined;\n\n switch (collection?.type) {\n case 'meta':\n metaCollection = collection;\n break;\n case 'docs':\n metaCollection = collection.meta;\n break;\n }\n\n const data = parse(filePath, source);\n\n if (!metaCollection) return data;\n return core.transformMeta(\n {\n collection: metaCollection,\n filePath,\n source,\n },\n data,\n );\n };\n }\n\n return {\n test: metaLoaderGlob,\n async load(input) {\n const result = onMeta(await input.getSource(), input);\n if (result === null) return null;\n const data = await result();\n\n if (input.filePath.endsWith('.json')) {\n return {\n moduleType: resolveJson,\n code:\n resolveJson === 'json'\n ? JSON.stringify(data)\n : `export default ${JSON.stringify(data)}`,\n };\n } else {\n return {\n moduleType: 'js',\n code: `export default ${JSON.stringify(data)}`,\n };\n }\n },\n bun: {\n load(source, input) {\n const result = onMeta(source, input);\n if (result === null)\n return {\n loader: 'object',\n exports: parse(input.filePath, source),\n };\n\n return result().then((data) => ({\n loader: 'object',\n exports: { default: data },\n }));\n },\n },\n };\n}\n"],"mappings":";;;;;AAOA,MAAM,cAAc,EACjB,OAAO;CACN,YAAY,EAAE,QAAQ,CAAC,UAAU;CACjC,WAAW,EAAE,QAAQ,CAAC,UAAU;CACjC,CAAC,CACD,OAAO;;;;AAKV,SAAgB,iBACd,EAAE,WACF,UAGI,EAAE,EACE;CACR,MAAM,EAAE,MAAM,cAAc,SAAS;CAErC,SAAS,MAAM,UAAkB,QAAgB;AAC/C,MAAI;AACF,OAAI,SAAS,SAAS,QAAQ,CAAE,QAAO,KAAK,MAAM,OAAO;AACzD,OAAI,SAAS,SAAS,QAAQ,CAAE,QAAO,KAAK,OAAO;WAC5C,GAAG;AACV,SAAM,IAAI,MAAM,mBAAmB,YAAY,EAAE,OAAO,GAAG,CAAC;;AAG9D,QAAM,IAAI,MAAM,uBAAuB,SAAS;;CAGlD,SAAS,OAAO,QAAgB,EAAE,UAAU,SAAsB;EAChE,MAAM,SAAS,YAAY,UAAU,MAAM;AAC3C,MAAI,CAAC,OAAO,WAAW,CAAC,OAAO,KAAK,WAAY,QAAO;EACvD,MAAM,EAAE,YAAY,gBAAgB,cAAc,OAAO;AAEzD,SAAO,YAA8B;GACnC,IAAI,OAAO,MAAM,SAAS;AAC1B,OAAI,UACF,QAAO,KAAK,eAAe,CAAC,IAAI,UAAU,IAAI;GAGhD,MAAM,aAAa,KAAK,cAAc,eAAe;GACrD,IAAIA;AAEJ,WAAQ,YAAY,MAApB;IACE,KAAK;AACH,sBAAiB;AACjB;IACF,KAAK;AACH,sBAAiB,WAAW;AAC5B;;GAGJ,MAAM,OAAO,MAAM,UAAU,OAAO;AAEpC,OAAI,CAAC,eAAgB,QAAO;AAC5B,UAAO,KAAK,cACV;IACE,YAAY;IACZ;IACA;IACD,EACD,KACD;;;AAIL,QAAO;EACL,MAAM;EACN,MAAM,KAAK,OAAO;GAChB,MAAM,SAAS,OAAO,MAAM,MAAM,WAAW,EAAE,MAAM;AACrD,OAAI,WAAW,KAAM,QAAO;GAC5B,MAAM,OAAO,MAAM,QAAQ;AAE3B,OAAI,MAAM,SAAS,SAAS,QAAQ,CAClC,QAAO;IACL,YAAY;IACZ,MACE,gBAAgB,SACZ,KAAK,UAAU,KAAK,GACpB,kBAAkB,KAAK,UAAU,KAAK;IAC7C;OAED,QAAO;IACL,YAAY;IACZ,MAAM,kBAAkB,KAAK,UAAU,KAAK;IAC7C;;EAGL,KAAK,EACH,KAAK,QAAQ,OAAO;GAClB,MAAM,SAAS,OAAO,QAAQ,MAAM;AACpC,OAAI,WAAW,KACb,QAAO;IACL,QAAQ;IACR,SAAS,MAAM,MAAM,UAAU,OAAO;IACvC;AAEH,UAAO,QAAQ,CAAC,MAAM,UAAU;IAC9B,QAAQ;IACR,SAAS,EAAE,SAAS,MAAM;IAC3B,EAAE;KAEN;EACF"}
@@ -557,15 +557,15 @@ function toFullPath(file) {
557
557
  }
558
558
 
559
559
  //#endregion
560
- //#region src/utils/fuma-matter.ts
560
+ //#region src/utils/frontmatter.ts
561
561
  /**
562
- * Inspired by https://github.com/jonschlinkert/gray-matter
562
+ * Frontmatter parser - inspired by gray-matter
563
563
  */
564
564
  const regex = /^---\r?\n(.+?)\r?\n---\r?\n?/s;
565
565
  /**
566
- * parse frontmatter, it supports only yaml format
566
+ * Parse YAML frontmatter from markdown content
567
567
  */
568
- function fumaMatter(input) {
568
+ function parseFrontmatter(input) {
569
569
  const output = {
570
570
  matter: "",
571
571
  data: {},
@@ -707,7 +707,7 @@ async function generateDynamicIndexFile(ctx) {
707
707
  async function generateCollectionObjectEntry(collection, absolutePath) {
708
708
  const fullPath = path.default.relative(process.cwd(), absolutePath);
709
709
  const content = await indexFileCache.read(fullPath).catch(() => "");
710
- const parsed = fumaMatter(content);
710
+ const parsed = parseFrontmatter(content);
711
711
  const data = await core.transformFrontmatter({
712
712
  collection,
713
713
  filePath: fullPath,
@@ -1,4 +1,4 @@
1
- import { h as IndexFilePluginOptions } from "../core-CgLkZ4NQ.js";
1
+ import { h as IndexFilePluginOptions } from "../core-INx0uFn9.js";
2
2
  import { NextConfig } from "next";
3
3
 
4
4
  //#region src/next/index.d.ts
@@ -1,11 +1,11 @@
1
- import "../preset-D_quDsKp.js";
2
- import "../build-DbdeZyKK.js";
3
- import { t as loadConfig } from "../load-from-file-Bx1TjN0x.js";
4
- import { n as createCore, t as _Defaults } from "../core-CCsY8cxS.js";
5
- import "../codegen-IYa8t8xV.js";
6
- import { n as metaLoaderGlob, t as mdxLoaderGlob } from "../loaders-BjG6ii1-.js";
7
- import { t as indexFile } from "../index-file-DsOEtYCF.js";
8
- import "../fuma-matter-BWdOvvCf.js";
1
+ import "../frontmatter-Cq6XTjTb.js";
2
+ import { n as metaLoaderGlob, t as mdxLoaderGlob } from "../loaders-BVwYfelL.js";
3
+ import "../preset-DtKimtBh.js";
4
+ import "../build-DP5xMGSd.js";
5
+ import { n as createCore, t as _Defaults } from "../core-BJualF84.js";
6
+ import "../codegen-DleOVLNr.js";
7
+ import { t as loadConfig } from "../load-from-file-CSjo1gN1.js";
8
+ import { t as indexFile } from "../index-file-BVTnUkr9.js";
9
9
  import * as path$1 from "node:path";
10
10
 
11
11
  //#region src/next/index.ts
@@ -1,9 +1,9 @@
1
- import { n as createCore, t as _Defaults } from "../core-CCsY8cxS.js";
2
- import "../codegen-IYa8t8xV.js";
3
- import "../fuma-matter-BWdOvvCf.js";
4
- import { t as createMdxLoader } from "../mdx-DBBqLhht.js";
5
- import { i as toNode, n as createStandaloneConfigLoader } from "../config-s4QVhqzA.js";
6
- import { t as createMetaLoader } from "../meta-u6oJ9ej1.js";
1
+ import "../frontmatter-Cq6XTjTb.js";
2
+ import { t as createMdxLoader } from "../mdx-B8Ub7Bro.js";
3
+ import { n as createCore, t as _Defaults } from "../core-BJualF84.js";
4
+ import "../codegen-DleOVLNr.js";
5
+ import { n as toNode, o as createStandaloneConfigLoader } from "../adapter-Jwap8N8Y.js";
6
+ import { t as createMetaLoader } from "../meta-DyieTM4Z.js";
7
7
 
8
8
  //#region src/node/loader.ts
9
9
  const configLoader = createStandaloneConfigLoader({
@@ -1,2 +1,2 @@
1
- import { g as indexFile, h as IndexFilePluginOptions, m as IndexFilePlugin } from "../core-CgLkZ4NQ.js";
1
+ import { g as indexFile, h as IndexFilePluginOptions, m as IndexFilePlugin } from "../core-INx0uFn9.js";
2
2
  export { IndexFilePlugin, IndexFilePluginOptions, indexFile as default };
@@ -1,5 +1,5 @@
1
- import "../codegen-IYa8t8xV.js";
2
- import { t as indexFile } from "../index-file-DsOEtYCF.js";
3
- import "../fuma-matter-BWdOvvCf.js";
1
+ import "../frontmatter-Cq6XTjTb.js";
2
+ import "../codegen-DleOVLNr.js";
3
+ import { t as indexFile } from "../index-file-BVTnUkr9.js";
4
4
 
5
5
  export { indexFile as default };
@@ -1,4 +1,4 @@
1
- import { s as Plugin } from "../core-CgLkZ4NQ.js";
1
+ import { s as Plugin } from "../core-INx0uFn9.js";
2
2
 
3
3
  //#region src/plugins/json-schema.d.ts
4
4
  interface JSONSchemaOptions {
@@ -1,6 +1,6 @@
1
- import path from "node:path";
2
- import fs from "node:fs/promises";
3
1
  import { z } from "zod";
2
+ import fs from "node:fs/promises";
3
+ import path from "node:path";
4
4
 
5
5
  //#region src/plugins/json-schema.ts
6
6
  /**
@@ -1,4 +1,4 @@
1
- import { s as Plugin } from "../core-CgLkZ4NQ.js";
1
+ import { s as Plugin } from "../core-INx0uFn9.js";
2
2
 
3
3
  //#region src/plugins/last-modified.d.ts
4
4
  type VersionControlFn = (filePath: string) => Promise<Date | null | undefined>;
@@ -1,4 +1,4 @@
1
- import { n as ident } from "../codegen-IYa8t8xV.js";
1
+ import { n as ident } from "../codegen-DleOVLNr.js";
2
2
  import path from "node:path";
3
3
  import { x } from "tinyexec";
4
4
 
@@ -58,4 +58,4 @@ function applyMdxPreset(options = {}) {
58
58
 
59
59
  //#endregion
60
60
  export { applyMdxPreset as t };
61
- //# sourceMappingURL=preset-D_quDsKp.js.map
61
+ //# sourceMappingURL=preset-DtKimtBh.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"preset-D_quDsKp.js","names":[],"sources":["../src/config/preset.ts"],"sourcesContent":["import type { ProcessorOptions } from '@mdx-js/mdx';\nimport type { Pluggable } from 'unified';\nimport type * as Plugins from '@hanzo/docs-core/mdx-plugins';\nimport type { BuildEnvironment } from './build';\n\ntype ResolvePlugins = Pluggable[] | ((v: Pluggable[]) => Pluggable[]);\n\nexport type DefaultMDXOptions = Omit<\n NonNullable<ProcessorOptions>,\n 'rehypePlugins' | 'remarkPlugins' | '_ctx'\n> & {\n rehypePlugins?: ResolvePlugins;\n remarkPlugins?: ResolvePlugins;\n\n /**\n * Properties to export from `vfile.data`\n */\n valueToExport?: string[];\n\n remarkStructureOptions?: Plugins.StructureOptions | false;\n remarkHeadingOptions?: Plugins.RemarkHeadingOptions;\n remarkImageOptions?: Plugins.RemarkImageOptions | false;\n remarkCodeTabOptions?: Plugins.RemarkCodeTabOptions | false;\n remarkNpmOptions?: Plugins.RemarkNpmOptions | false;\n rehypeCodeOptions?: Plugins.RehypeCodeOptions | false;\n};\n\nfunction pluginOption(\n def: (v: Pluggable[]) => (Pluggable | false)[],\n options: ResolvePlugins = [],\n): Pluggable[] {\n const list = def(Array.isArray(options) ? options : []).filter(Boolean) as Pluggable[];\n\n if (typeof options === 'function') {\n return options(list);\n }\n\n return list;\n}\n\nexport type MDXPresetOptions =\n | ({ preset?: 'hanzo-docs' } & DefaultMDXOptions)\n | ({\n preset: 'minimal';\n } & ProcessorOptions);\n\n/**\n * apply MDX processor presets\n */\nexport function applyMdxPreset(\n options: MDXPresetOptions = {},\n): (environment: BuildEnvironment) => Promise<ProcessorOptions> {\n return async (environment = 'bundler') => {\n if (options.preset === 'minimal') return options;\n\n const plugins = await import('@hanzo/docs-core/mdx-plugins');\n const {\n valueToExport = [],\n rehypeCodeOptions,\n remarkImageOptions,\n remarkHeadingOptions,\n remarkStructureOptions,\n remarkCodeTabOptions,\n remarkNpmOptions,\n ...mdxOptions\n } = options;\n\n const remarkPlugins = pluginOption(\n (v) => [\n plugins.remarkGfm,\n [\n plugins.remarkHeading,\n {\n generateToc: false,\n ...remarkHeadingOptions,\n },\n ],\n remarkImageOptions !== false && [\n plugins.remarkImage,\n {\n ...remarkImageOptions,\n useImport: remarkImageOptions?.useImport ?? environment === 'bundler',\n },\n ],\n 'remarkCodeTab' in plugins &&\n remarkCodeTabOptions !== false && [plugins.remarkCodeTab, remarkCodeTabOptions],\n 'remarkNpm' in plugins &&\n remarkNpmOptions !== false && [plugins.remarkNpm, remarkNpmOptions],\n ...v,\n remarkStructureOptions !== false && [\n plugins.remarkStructure,\n {\n exportAs: 'structuredData',\n ...remarkStructureOptions,\n } satisfies Plugins.StructureOptions,\n ],\n valueToExport.length > 0 &&\n (() => {\n return (_, file) => {\n file.data['mdx-export'] ??= [];\n\n for (const name of valueToExport) {\n if (!(name in file.data)) continue;\n\n file.data['mdx-export'].push({\n name,\n value: file.data[name],\n });\n }\n };\n }),\n ],\n mdxOptions.remarkPlugins,\n );\n\n const rehypePlugins = pluginOption(\n (v) => [\n rehypeCodeOptions !== false && [plugins.rehypeCode, rehypeCodeOptions],\n ...v,\n plugins.rehypeToc,\n ],\n mdxOptions.rehypePlugins,\n );\n\n return {\n ...mdxOptions,\n outputFormat: environment === 'runtime' ? 'function-body' : mdxOptions.outputFormat,\n remarkPlugins,\n rehypePlugins,\n };\n };\n}\n"],"mappings":";AA2BA,SAAS,aACP,KACA,UAA0B,EAAE,EACf;CACb,MAAM,OAAO,IAAI,MAAM,QAAQ,QAAQ,GAAG,UAAU,EAAE,CAAC,CAAC,OAAO,QAAQ;AAEvE,KAAI,OAAO,YAAY,WACrB,QAAO,QAAQ,KAAK;AAGtB,QAAO;;;;;AAYT,SAAgB,eACd,UAA4B,EAAE,EACgC;AAC9D,QAAO,OAAO,cAAc,cAAc;AACxC,MAAI,QAAQ,WAAW,UAAW,QAAO;EAEzC,MAAM,UAAU,MAAM,OAAO;EAC7B,MAAM,EACJ,gBAAgB,EAAE,EAClB,mBACA,oBACA,sBACA,wBACA,sBACA,kBACA,GAAG,eACD;EAEJ,MAAM,gBAAgB,cACnB,MAAM;GACL,QAAQ;GACR,CACE,QAAQ,eACR;IACE,aAAa;IACb,GAAG;IACJ,CACF;GACD,uBAAuB,SAAS,CAC9B,QAAQ,aACR;IACE,GAAG;IACH,WAAW,oBAAoB,aAAa,gBAAgB;IAC7D,CACF;GACD,mBAAmB,WACjB,yBAAyB,SAAS,CAAC,QAAQ,eAAe,qBAAqB;GACjF,eAAe,WACb,qBAAqB,SAAS,CAAC,QAAQ,WAAW,iBAAiB;GACrE,GAAG;GACH,2BAA2B,SAAS,CAClC,QAAQ,iBACR;IACE,UAAU;IACV,GAAG;IACJ,CACF;GACD,cAAc,SAAS,YACd;AACL,YAAQ,GAAG,SAAS;AAClB,UAAK,KAAK,kBAAkB,EAAE;AAE9B,UAAK,MAAM,QAAQ,eAAe;AAChC,UAAI,EAAE,QAAQ,KAAK,MAAO;AAE1B,WAAK,KAAK,cAAc,KAAK;OAC3B;OACA,OAAO,KAAK,KAAK;OAClB,CAAC;;;;GAIX,EACD,WAAW,cACZ;EAED,MAAM,gBAAgB,cACnB,MAAM;GACL,sBAAsB,SAAS,CAAC,QAAQ,YAAY,kBAAkB;GACtE,GAAG;GACH,QAAQ;GACT,EACD,WAAW,cACZ;AAED,SAAO;GACL,GAAG;GACH,cAAc,gBAAgB,YAAY,kBAAkB,WAAW;GACvE;GACA;GACD"}
1
+ {"version":3,"file":"preset-DtKimtBh.js","names":[],"sources":["../src/config/preset.ts"],"sourcesContent":["import type { ProcessorOptions } from '@mdx-js/mdx';\nimport type { Pluggable } from 'unified';\nimport type * as Plugins from '@hanzo/docs-core/mdx-plugins';\nimport type { BuildEnvironment } from './build';\n\ntype ResolvePlugins = Pluggable[] | ((v: Pluggable[]) => Pluggable[]);\n\nexport type DefaultMDXOptions = Omit<\n NonNullable<ProcessorOptions>,\n 'rehypePlugins' | 'remarkPlugins' | '_ctx'\n> & {\n rehypePlugins?: ResolvePlugins;\n remarkPlugins?: ResolvePlugins;\n\n /**\n * Properties to export from `vfile.data`\n */\n valueToExport?: string[];\n\n remarkStructureOptions?: Plugins.StructureOptions | false;\n remarkHeadingOptions?: Plugins.RemarkHeadingOptions;\n remarkImageOptions?: Plugins.RemarkImageOptions | false;\n remarkCodeTabOptions?: Plugins.RemarkCodeTabOptions | false;\n remarkNpmOptions?: Plugins.RemarkNpmOptions | false;\n rehypeCodeOptions?: Plugins.RehypeCodeOptions | false;\n};\n\nfunction pluginOption(\n def: (v: Pluggable[]) => (Pluggable | false)[],\n options: ResolvePlugins = [],\n): Pluggable[] {\n const list = def(Array.isArray(options) ? options : []).filter(Boolean) as Pluggable[];\n\n if (typeof options === 'function') {\n return options(list);\n }\n\n return list;\n}\n\nexport type MDXPresetOptions =\n | ({ preset?: 'hanzo-docs' } & DefaultMDXOptions)\n | ({\n preset: 'minimal';\n } & ProcessorOptions);\n\n/**\n * apply MDX processor presets\n */\nexport function applyMdxPreset(\n options: MDXPresetOptions = {},\n): (environment: BuildEnvironment) => Promise<ProcessorOptions> {\n return async (environment = 'bundler') => {\n if (options.preset === 'minimal') return options;\n\n const plugins = await import('@hanzo/docs-core/mdx-plugins');\n const {\n valueToExport = [],\n rehypeCodeOptions,\n remarkImageOptions,\n remarkHeadingOptions,\n remarkStructureOptions,\n remarkCodeTabOptions,\n remarkNpmOptions,\n ...mdxOptions\n } = options;\n\n const remarkPlugins = pluginOption(\n (v) => [\n plugins.remarkGfm,\n [\n plugins.remarkHeading,\n {\n generateToc: false,\n ...remarkHeadingOptions,\n },\n ],\n remarkImageOptions !== false && [\n plugins.remarkImage,\n {\n ...remarkImageOptions,\n useImport: remarkImageOptions?.useImport ?? environment === 'bundler',\n },\n ],\n 'remarkCodeTab' in plugins &&\n remarkCodeTabOptions !== false && [plugins.remarkCodeTab, remarkCodeTabOptions],\n 'remarkNpm' in plugins &&\n remarkNpmOptions !== false && [plugins.remarkNpm, remarkNpmOptions],\n ...v,\n remarkStructureOptions !== false && [\n plugins.remarkStructure,\n {\n exportAs: 'structuredData',\n ...remarkStructureOptions,\n } satisfies Plugins.StructureOptions,\n ],\n valueToExport.length > 0 &&\n (() => {\n return (_, file) => {\n file.data['mdx-export'] ??= [];\n\n for (const name of valueToExport) {\n if (!(name in file.data)) continue;\n\n file.data['mdx-export'].push({\n name,\n value: file.data[name],\n });\n }\n };\n }),\n ],\n mdxOptions.remarkPlugins,\n );\n\n const rehypePlugins = pluginOption(\n (v) => [\n rehypeCodeOptions !== false && [plugins.rehypeCode, rehypeCodeOptions],\n ...v,\n plugins.rehypeToc,\n ],\n mdxOptions.rehypePlugins,\n );\n\n return {\n ...mdxOptions,\n outputFormat: environment === 'runtime' ? 'function-body' : mdxOptions.outputFormat,\n remarkPlugins,\n rehypePlugins,\n };\n };\n}\n"],"mappings":";AA2BA,SAAS,aACP,KACA,UAA0B,EAAE,EACf;CACb,MAAM,OAAO,IAAI,MAAM,QAAQ,QAAQ,GAAG,UAAU,EAAE,CAAC,CAAC,OAAO,QAAQ;AAEvE,KAAI,OAAO,YAAY,WACrB,QAAO,QAAQ,KAAK;AAGtB,QAAO;;;;;AAYT,SAAgB,eACd,UAA4B,EAAE,EACgC;AAC9D,QAAO,OAAO,cAAc,cAAc;AACxC,MAAI,QAAQ,WAAW,UAAW,QAAO;EAEzC,MAAM,UAAU,MAAM,OAAO;EAC7B,MAAM,EACJ,gBAAgB,EAAE,EAClB,mBACA,oBACA,sBACA,wBACA,sBACA,kBACA,GAAG,eACD;EAEJ,MAAM,gBAAgB,cACnB,MAAM;GACL,QAAQ;GACR,CACE,QAAQ,eACR;IACE,aAAa;IACb,GAAG;IACJ,CACF;GACD,uBAAuB,SAAS,CAC9B,QAAQ,aACR;IACE,GAAG;IACH,WAAW,oBAAoB,aAAa,gBAAgB;IAC7D,CACF;GACD,mBAAmB,WACjB,yBAAyB,SAAS,CAAC,QAAQ,eAAe,qBAAqB;GACjF,eAAe,WACb,qBAAqB,SAAS,CAAC,QAAQ,WAAW,iBAAiB;GACrE,GAAG;GACH,2BAA2B,SAAS,CAClC,QAAQ,iBACR;IACE,UAAU;IACV,GAAG;IACJ,CACF;GACD,cAAc,SAAS,YACd;AACL,YAAQ,GAAG,SAAS;AAClB,UAAK,KAAK,kBAAkB,EAAE;AAE9B,UAAK,MAAM,QAAQ,eAAe;AAChC,UAAI,EAAE,QAAQ,KAAK,MAAO;AAE1B,WAAK,KAAK,cAAc,KAAK;OAC3B;OACA,OAAO,KAAK,KAAK;OAClB,CAAC;;;;GAIX,EACD,WAAW,cACZ;EAED,MAAM,gBAAgB,cACnB,MAAM;GACL,sBAAsB,SAAS,CAAC,QAAQ,YAAY,kBAAkB;GACtE,GAAG;GACH,QAAQ;GACT,EACD,WAAW,cACZ;AAED,SAAO;GACL,GAAG;GACH,cAAc,gBAAgB,YAAY,kBAAkB,WAAW;GACvE;GACA;GACD"}
@@ -1,6 +1,6 @@
1
- import { t as fumaMatter } from "./fuma-matter-BWdOvvCf.js";
2
- import * as path$1 from "node:path";
1
+ import { t as parseFrontmatter } from "./frontmatter-Cq6XTjTb.js";
3
2
  import * as fs$1 from "node:fs/promises";
3
+ import * as path$1 from "node:path";
4
4
  import { unified } from "unified";
5
5
  import { visit } from "unist-util-visit";
6
6
  import { remarkHeading } from "@hanzo/docs-core/mdx-plugins";
@@ -201,7 +201,7 @@ function remarkInclude() {
201
201
  };
202
202
  }
203
203
  const parser = _getProcessor(ext === ".mdx" ? "mdx" : "md");
204
- const parsed = fumaMatter(content);
204
+ const parsed = parseFrontmatter(content);
205
205
  const targetFile = new VFile({
206
206
  path: targetPath,
207
207
  value: parsed.content,
@@ -244,4 +244,4 @@ function remarkInclude() {
244
244
 
245
245
  //#endregion
246
246
  export { flattenNode as n, remarkInclude as t };
247
- //# sourceMappingURL=remark-include-BnRd6OBo.js.map
247
+ //# sourceMappingURL=remark-include-CIiVrABN.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"remark-include-CIiVrABN.js","names":["newChildren: RootContent[]","extractedLines: string[]","ElementLikeTypes: ElementLikeContent['type'][]","attributes: Record<string, string | null>","nodes: RootContent[] | undefined","content: string","fs","path","queue: Promise<void>[]"],"sources":["../src/loaders/mdx/remark-unravel.ts","../src/loaders/mdx/mdast-utils.ts","../src/loaders/mdx/remark-include.ts"],"sourcesContent":["// from internal remark plugins in https://github.com/mdx-js/mdx/blob/main/packages/mdx/lib/plugin/remark-mark-and-unravel.js\n// we need to ensure consistency with MDX.js when parsing embed content in `remark-include`\nimport { visit } from 'unist-util-visit';\nimport type { Transformer } from 'unified';\nimport type { Root, RootContent } from 'mdast';\n\nexport function remarkMarkAndUnravel(): Transformer<Root, Root> {\n return (tree) => {\n visit(tree, function (node, index, parent) {\n let offset = -1;\n let all = true;\n let oneOrMore = false;\n\n if (parent && typeof index === 'number' && node.type === 'paragraph') {\n const children = node.children;\n\n while (++offset < children.length) {\n const child = children[offset];\n\n if (child.type === 'mdxJsxTextElement' || child.type === 'mdxTextExpression') {\n oneOrMore = true;\n } else if (child.type === 'text' && child.value.trim().length === 0) {\n // Empty.\n } else {\n all = false;\n break;\n }\n }\n\n if (all && oneOrMore) {\n offset = -1;\n const newChildren: RootContent[] = [];\n\n while (++offset < children.length) {\n const child = children[offset];\n\n if (child.type === 'mdxJsxTextElement') {\n // @ts-expect-error: mutate because it is faster; content model is fine.\n child.type = 'mdxJsxFlowElement';\n }\n\n if (child.type === 'mdxTextExpression') {\n // @ts-expect-error: mutate because it is faster; content model is fine.\n child.type = 'mdxFlowExpression';\n }\n\n if (child.type === 'text' && /^[\\t\\r\\n ]+$/.test(String(child.value))) {\n // Empty.\n } else {\n newChildren.push(child);\n }\n }\n\n parent.children.splice(index, 1, ...newChildren);\n return index;\n }\n }\n });\n };\n}\n","import type { RootContent } from 'mdast';\n\nexport function flattenNode(node: RootContent): string {\n if ('children' in node) return node.children.map((child) => flattenNode(child)).join('');\n\n if ('value' in node) return node.value;\n\n return '';\n}\n","import { type Processor, type Transformer, unified } from 'unified';\nimport { visit } from 'unist-util-visit';\nimport type { Code, Node, Root, RootContent } from 'mdast';\nimport * as path from 'node:path';\nimport * as fs from 'node:fs/promises';\nimport { parseFrontmatter } from '@/utils/frontmatter';\nimport type { MdxJsxFlowElement, MdxJsxTextElement } from 'mdast-util-mdx-jsx';\nimport { remarkHeading } from '@hanzo/docs-core/mdx-plugins';\nimport { VFile } from 'vfile';\nimport type { Directives } from 'mdast-util-directive';\nimport { remarkMarkAndUnravel } from '@/loaders/mdx/remark-unravel';\nimport { flattenNode } from './mdast-utils';\n\n/**\n * VS Code–style region extraction\n * Adapted from VitePress:\n * https://github.com/vuejs/vitepress/blob/main/src/node/markdown/plugins/snippet.ts\n */\n\n// region marker regexes\nconst REGION_MARKERS = [\n {\n start: /^\\s*\\/\\/\\s*#?region\\b\\s*(.*?)\\s*$/,\n end: /^\\s*\\/\\/\\s*#?endregion\\b\\s*(.*?)\\s*$/,\n },\n {\n start: /^\\s*<!--\\s*#?region\\b\\s*(.*?)\\s*-->/,\n end: /^\\s*<!--\\s*#?endregion\\b\\s*(.*?)\\s*-->/,\n },\n {\n start: /^\\s*\\/\\*\\s*#region\\b\\s*(.*?)\\s*\\*\\//,\n end: /^\\s*\\/\\*\\s*#endregion\\b\\s*(.*?)\\s*\\*\\//,\n },\n {\n start: /^\\s*#[rR]egion\\b\\s*(.*?)\\s*$/,\n end: /^\\s*#[eE]nd ?[rR]egion\\b\\s*(.*?)\\s*$/,\n },\n {\n start: /^\\s*#\\s*#?region\\b\\s*(.*?)\\s*$/,\n end: /^\\s*#\\s*#?endregion\\b\\s*(.*?)\\s*$/,\n },\n {\n start: /^\\s*(?:--|::|@?REM)\\s*#region\\b\\s*(.*?)\\s*$/,\n end: /^\\s*(?:--|::|@?REM)\\s*#endregion\\b\\s*(.*?)\\s*$/,\n },\n {\n start: /^\\s*#pragma\\s+region\\b\\s*(.*?)\\s*$/,\n end: /^\\s*#pragma\\s+endregion\\b\\s*(.*?)\\s*$/,\n },\n {\n start: /^\\s*\\(\\*\\s*#region\\b\\s*(.*?)\\s*\\*\\)/,\n end: /^\\s*\\(\\*\\s*#endregion\\b\\s*(.*?)\\s*\\*\\)/,\n },\n];\n\nfunction dedent(lines: string[]): string {\n const minIndent = lines.reduce((min, line) => {\n const match = line.match(/^(\\s*)\\S/);\n return match ? Math.min(min, match[1].length) : min;\n }, Infinity);\n\n return minIndent === Infinity\n ? lines.join('\\n')\n : lines.map((l) => l.slice(minIndent)).join('\\n');\n}\n\nfunction extractCodeRegion(content: string, regionName: string): string {\n const lines = content.split('\\n');\n\n for (let i = 0; i < lines.length; i++) {\n for (const re of REGION_MARKERS) {\n let match = re.start.exec(lines[i]);\n if (match?.[1] !== regionName) continue;\n\n let depth = 1;\n const extractedLines: string[] = [];\n for (let j = i + 1; j < lines.length; j++) {\n match = re.start.exec(lines[j]);\n if (match) {\n depth++;\n continue;\n }\n\n match = re.end.exec(lines[j]);\n if (match) {\n if (match[1] === regionName) depth = 0;\n else if (match[1] === '') depth--;\n else continue;\n\n if (depth > 0) continue;\n return dedent(extractedLines);\n } else {\n extractedLines.push(lines[j]);\n }\n }\n }\n }\n throw new Error(`Region \"${regionName}\" not found`);\n}\n\nexport interface Params {\n lang?: string;\n meta?: string;\n}\n\nconst ElementLikeTypes: ElementLikeContent['type'][] = [\n 'mdxJsxFlowElement',\n 'mdxJsxTextElement',\n 'containerDirective',\n 'textDirective',\n 'leafDirective',\n];\ntype ElementLikeContent = MdxJsxFlowElement | MdxJsxTextElement | Directives;\n\nfunction isElementLike(node: Node): node is ElementLikeContent {\n return ElementLikeTypes.includes(node.type as ElementLikeContent['type']);\n}\n\nfunction parseElementAttributes(\n element: ElementLikeContent,\n): Record<string, string | null | undefined> {\n if (Array.isArray(element.attributes)) {\n const attributes: Record<string, string | null> = {};\n\n for (const attr of element.attributes) {\n if (\n attr.type === 'mdxJsxAttribute' &&\n (typeof attr.value === 'string' || attr.value === null)\n ) {\n attributes[attr.name] = attr.value;\n }\n }\n\n return attributes;\n }\n\n return element.attributes ?? {};\n}\n\nfunction parseSpecifier(specifier: string): {\n file: string;\n section?: string;\n} {\n const idx = specifier.lastIndexOf('#');\n if (idx === -1) return { file: specifier };\n\n return {\n file: specifier.slice(0, idx),\n section: specifier.slice(idx + 1),\n };\n}\n\nfunction extractSection(root: Root, section: string): Root | undefined {\n let nodes: RootContent[] | undefined;\n let capturingHeadingContent = false;\n\n visit(root, (node) => {\n if (node.type === 'heading') {\n if (capturingHeadingContent) {\n return false;\n }\n\n if (node.data?.hProperties?.id === section) {\n capturingHeadingContent = true;\n nodes = [node];\n return 'skip';\n }\n\n return;\n }\n\n if (capturingHeadingContent) {\n nodes?.push(node as RootContent);\n return 'skip';\n }\n\n if (isElementLike(node) && node.name === 'section') {\n const attributes = parseElementAttributes(node);\n\n if (attributes.id === section) {\n nodes = node.children;\n return false;\n }\n }\n });\n\n if (nodes)\n return {\n type: 'root',\n children: nodes,\n };\n}\n\nexport function remarkInclude(this: Processor): Transformer<Root, Root> {\n const TagName = 'include';\n\n const embedContent = async (\n targetPath: string,\n heading: string | undefined,\n params: Params,\n parent: VFile,\n ) => {\n const { _getProcessor = () => this, _compiler } = parent.data;\n let content: string;\n try {\n content = (await fs.readFile(targetPath)).toString();\n } catch (e) {\n throw new Error(\n `failed to read file ${targetPath}\\n${e instanceof Error ? e.message : String(e)}`,\n { cause: e },\n );\n }\n\n const ext = path.extname(targetPath);\n _compiler?.addDependency(targetPath);\n // For non-Markdown files, support VS Code–style region extraction\n if (params.lang || (ext !== '.md' && ext !== '.mdx')) {\n const lang = params.lang ?? ext.slice(1);\n let value = content;\n if (heading) {\n value = extractCodeRegion(content, heading.trim());\n }\n return {\n type: 'code',\n lang,\n meta: params.meta,\n value,\n data: {},\n } satisfies Code;\n }\n\n const parser = _getProcessor(ext === '.mdx' ? 'mdx' : 'md');\n const parsed = parseFrontmatter(content);\n const targetFile = new VFile({\n path: targetPath,\n value: parsed.content,\n data: {\n ...parent.data,\n frontmatter: parsed.data as Record<string, unknown>,\n },\n });\n let mdast = parser.parse(targetFile) as Root;\n const baseProcessor = unified().use(remarkMarkAndUnravel);\n\n if (heading) {\n // parse headings before extraction\n const extracted = extractSection(await baseProcessor.use(remarkHeading).run(mdast), heading);\n if (!extracted)\n throw new Error(\n `Cannot find section ${heading} in ${targetPath}, make sure you have encapsulated the section in a <section id=\"${heading}\"> tag, or a :::section directive with remark-directive configured.`,\n );\n\n mdast = extracted;\n } else {\n mdast = await baseProcessor.run(mdast);\n }\n\n await update(mdast, targetFile);\n return mdast;\n };\n\n async function update(tree: Root, file: VFile) {\n const queue: Promise<void>[] = [];\n\n visit(tree, ElementLikeTypes, (_node, _, parent) => {\n const node = _node as ElementLikeContent;\n if (node.name !== TagName) return;\n\n const specifier = flattenNode(node);\n if (specifier.length === 0) return 'skip';\n\n const attributes = parseElementAttributes(node);\n const { file: relativePath, section } = parseSpecifier(specifier);\n const targetPath = path.resolve('cwd' in attributes ? file.cwd : file.dirname!, relativePath);\n\n queue.push(\n embedContent(targetPath, section, attributes, file).then((replace) => {\n Object.assign(parent && parent.type === 'paragraph' ? parent : node, replace);\n }),\n );\n\n return 'skip';\n });\n\n await Promise.all(queue);\n }\n\n return async (tree, file) => {\n await update(tree, file);\n };\n}\n"],"mappings":";;;;;;;;;AAMA,SAAgB,uBAAgD;AAC9D,SAAQ,SAAS;AACf,QAAM,MAAM,SAAU,MAAM,OAAO,QAAQ;GACzC,IAAI,SAAS;GACb,IAAI,MAAM;GACV,IAAI,YAAY;AAEhB,OAAI,UAAU,OAAO,UAAU,YAAY,KAAK,SAAS,aAAa;IACpE,MAAM,WAAW,KAAK;AAEtB,WAAO,EAAE,SAAS,SAAS,QAAQ;KACjC,MAAM,QAAQ,SAAS;AAEvB,SAAI,MAAM,SAAS,uBAAuB,MAAM,SAAS,oBACvD,aAAY;cACH,MAAM,SAAS,UAAU,MAAM,MAAM,MAAM,CAAC,WAAW,GAAG,QAE9D;AACL,YAAM;AACN;;;AAIJ,QAAI,OAAO,WAAW;AACpB,cAAS;KACT,MAAMA,cAA6B,EAAE;AAErC,YAAO,EAAE,SAAS,SAAS,QAAQ;MACjC,MAAM,QAAQ,SAAS;AAEvB,UAAI,MAAM,SAAS,oBAEjB,OAAM,OAAO;AAGf,UAAI,MAAM,SAAS,oBAEjB,OAAM,OAAO;AAGf,UAAI,MAAM,SAAS,UAAU,eAAe,KAAK,OAAO,MAAM,MAAM,CAAC,EAAE,OAGrE,aAAY,KAAK,MAAM;;AAI3B,YAAO,SAAS,OAAO,OAAO,GAAG,GAAG,YAAY;AAChD,YAAO;;;IAGX;;;;;;ACvDN,SAAgB,YAAY,MAA2B;AACrD,KAAI,cAAc,KAAM,QAAO,KAAK,SAAS,KAAK,UAAU,YAAY,MAAM,CAAC,CAAC,KAAK,GAAG;AAExF,KAAI,WAAW,KAAM,QAAO,KAAK;AAEjC,QAAO;;;;;;;;;;ACaT,MAAM,iBAAiB;CACrB;EACE,OAAO;EACP,KAAK;EACN;CACD;EACE,OAAO;EACP,KAAK;EACN;CACD;EACE,OAAO;EACP,KAAK;EACN;CACD;EACE,OAAO;EACP,KAAK;EACN;CACD;EACE,OAAO;EACP,KAAK;EACN;CACD;EACE,OAAO;EACP,KAAK;EACN;CACD;EACE,OAAO;EACP,KAAK;EACN;CACD;EACE,OAAO;EACP,KAAK;EACN;CACF;AAED,SAAS,OAAO,OAAyB;CACvC,MAAM,YAAY,MAAM,QAAQ,KAAK,SAAS;EAC5C,MAAM,QAAQ,KAAK,MAAM,WAAW;AACpC,SAAO,QAAQ,KAAK,IAAI,KAAK,MAAM,GAAG,OAAO,GAAG;IAC/C,SAAS;AAEZ,QAAO,cAAc,WACjB,MAAM,KAAK,KAAK,GAChB,MAAM,KAAK,MAAM,EAAE,MAAM,UAAU,CAAC,CAAC,KAAK,KAAK;;AAGrD,SAAS,kBAAkB,SAAiB,YAA4B;CACtE,MAAM,QAAQ,QAAQ,MAAM,KAAK;AAEjC,MAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,IAChC,MAAK,MAAM,MAAM,gBAAgB;EAC/B,IAAI,QAAQ,GAAG,MAAM,KAAK,MAAM,GAAG;AACnC,MAAI,QAAQ,OAAO,WAAY;EAE/B,IAAI,QAAQ;EACZ,MAAMC,iBAA2B,EAAE;AACnC,OAAK,IAAI,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACzC,WAAQ,GAAG,MAAM,KAAK,MAAM,GAAG;AAC/B,OAAI,OAAO;AACT;AACA;;AAGF,WAAQ,GAAG,IAAI,KAAK,MAAM,GAAG;AAC7B,OAAI,OAAO;AACT,QAAI,MAAM,OAAO,WAAY,SAAQ;aAC5B,MAAM,OAAO,GAAI;QACrB;AAEL,QAAI,QAAQ,EAAG;AACf,WAAO,OAAO,eAAe;SAE7B,gBAAe,KAAK,MAAM,GAAG;;;AAKrC,OAAM,IAAI,MAAM,WAAW,WAAW,aAAa;;AAQrD,MAAMC,mBAAiD;CACrD;CACA;CACA;CACA;CACA;CACD;AAGD,SAAS,cAAc,MAAwC;AAC7D,QAAO,iBAAiB,SAAS,KAAK,KAAmC;;AAG3E,SAAS,uBACP,SAC2C;AAC3C,KAAI,MAAM,QAAQ,QAAQ,WAAW,EAAE;EACrC,MAAMC,aAA4C,EAAE;AAEpD,OAAK,MAAM,QAAQ,QAAQ,WACzB,KACE,KAAK,SAAS,sBACb,OAAO,KAAK,UAAU,YAAY,KAAK,UAAU,MAElD,YAAW,KAAK,QAAQ,KAAK;AAIjC,SAAO;;AAGT,QAAO,QAAQ,cAAc,EAAE;;AAGjC,SAAS,eAAe,WAGtB;CACA,MAAM,MAAM,UAAU,YAAY,IAAI;AACtC,KAAI,QAAQ,GAAI,QAAO,EAAE,MAAM,WAAW;AAE1C,QAAO;EACL,MAAM,UAAU,MAAM,GAAG,IAAI;EAC7B,SAAS,UAAU,MAAM,MAAM,EAAE;EAClC;;AAGH,SAAS,eAAe,MAAY,SAAmC;CACrE,IAAIC;CACJ,IAAI,0BAA0B;AAE9B,OAAM,OAAO,SAAS;AACpB,MAAI,KAAK,SAAS,WAAW;AAC3B,OAAI,wBACF,QAAO;AAGT,OAAI,KAAK,MAAM,aAAa,OAAO,SAAS;AAC1C,8BAA0B;AAC1B,YAAQ,CAAC,KAAK;AACd,WAAO;;AAGT;;AAGF,MAAI,yBAAyB;AAC3B,UAAO,KAAK,KAAoB;AAChC,UAAO;;AAGT,MAAI,cAAc,KAAK,IAAI,KAAK,SAAS,WAGvC;OAFmB,uBAAuB,KAAK,CAEhC,OAAO,SAAS;AAC7B,YAAQ,KAAK;AACb,WAAO;;;GAGX;AAEF,KAAI,MACF,QAAO;EACL,MAAM;EACN,UAAU;EACX;;AAGL,SAAgB,gBAAwD;CACtE,MAAM,UAAU;CAEhB,MAAM,eAAe,OACnB,YACA,SACA,QACA,WACG;EACH,MAAM,EAAE,sBAAsB,MAAM,cAAc,OAAO;EACzD,IAAIC;AACJ,MAAI;AACF,cAAW,MAAMC,KAAG,SAAS,WAAW,EAAE,UAAU;WAC7C,GAAG;AACV,SAAM,IAAI,MACR,uBAAuB,WAAW,IAAI,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE,IAChF,EAAE,OAAO,GAAG,CACb;;EAGH,MAAM,MAAMC,OAAK,QAAQ,WAAW;AACpC,aAAW,cAAc,WAAW;AAEpC,MAAI,OAAO,QAAS,QAAQ,SAAS,QAAQ,QAAS;GACpD,MAAM,OAAO,OAAO,QAAQ,IAAI,MAAM,EAAE;GACxC,IAAI,QAAQ;AACZ,OAAI,QACF,SAAQ,kBAAkB,SAAS,QAAQ,MAAM,CAAC;AAEpD,UAAO;IACL,MAAM;IACN;IACA,MAAM,OAAO;IACb;IACA,MAAM,EAAE;IACT;;EAGH,MAAM,SAAS,cAAc,QAAQ,SAAS,QAAQ,KAAK;EAC3D,MAAM,SAAS,iBAAiB,QAAQ;EACxC,MAAM,aAAa,IAAI,MAAM;GAC3B,MAAM;GACN,OAAO,OAAO;GACd,MAAM;IACJ,GAAG,OAAO;IACV,aAAa,OAAO;IACrB;GACF,CAAC;EACF,IAAI,QAAQ,OAAO,MAAM,WAAW;EACpC,MAAM,gBAAgB,SAAS,CAAC,IAAI,qBAAqB;AAEzD,MAAI,SAAS;GAEX,MAAM,YAAY,eAAe,MAAM,cAAc,IAAI,cAAc,CAAC,IAAI,MAAM,EAAE,QAAQ;AAC5F,OAAI,CAAC,UACH,OAAM,IAAI,MACR,uBAAuB,QAAQ,MAAM,WAAW,kEAAkE,QAAQ,qEAC3H;AAEH,WAAQ;QAER,SAAQ,MAAM,cAAc,IAAI,MAAM;AAGxC,QAAM,OAAO,OAAO,WAAW;AAC/B,SAAO;;CAGT,eAAe,OAAO,MAAY,MAAa;EAC7C,MAAMC,QAAyB,EAAE;AAEjC,QAAM,MAAM,mBAAmB,OAAO,GAAG,WAAW;GAClD,MAAM,OAAO;AACb,OAAI,KAAK,SAAS,QAAS;GAE3B,MAAM,YAAY,YAAY,KAAK;AACnC,OAAI,UAAU,WAAW,EAAG,QAAO;GAEnC,MAAM,aAAa,uBAAuB,KAAK;GAC/C,MAAM,EAAE,MAAM,cAAc,YAAY,eAAe,UAAU;GACjE,MAAM,aAAaD,OAAK,QAAQ,SAAS,aAAa,KAAK,MAAM,KAAK,SAAU,aAAa;AAE7F,SAAM,KACJ,aAAa,YAAY,SAAS,YAAY,KAAK,CAAC,MAAM,YAAY;AACpE,WAAO,OAAO,UAAU,OAAO,SAAS,cAAc,SAAS,MAAM,QAAQ;KAC7E,CACH;AAED,UAAO;IACP;AAEF,QAAM,QAAQ,IAAI,MAAM;;AAG1B,QAAO,OAAO,MAAM,SAAS;AAC3B,QAAM,OAAO,MAAM,KAAK"}
@@ -1,6 +1,6 @@
1
- import { M as DocCollection, N as DocsCollection } from "../core-CgLkZ4NQ.js";
1
+ import { M as DocCollection, N as DocsCollection } from "../core-INx0uFn9.js";
2
2
  import { i as InternalTypeConfig } from "../types-CBMeukTI.js";
3
- import { t as CompiledMDXProperties } from "../build-mdx-DyJk_jWV.js";
3
+ import { t as CompiledMDXProperties } from "../build-mdx-DWx1HtNY.js";
4
4
  import { FC, ReactNode } from "react";
5
5
  import { StandardSchemaV1 } from "@standard-schema/spec";
6
6
 
@@ -1,4 +1,4 @@
1
- import { C as ServerOptions, M as DocCollection, N as DocsCollection, _ as AsyncDocCollectionEntry, r as CoreOptions, v as AsyncDocsCollectionEntry } from "../core-CgLkZ4NQ.js";
1
+ import { C as ServerOptions, M as DocCollection, N as DocsCollection, _ as AsyncDocCollectionEntry, r as CoreOptions, v as AsyncDocsCollectionEntry } from "../core-INx0uFn9.js";
2
2
  import { i as InternalTypeConfig, r as FileInfo } from "../types-CBMeukTI.js";
3
3
  import * as _standard_schema_spec0 from "@standard-schema/spec";
4
4
  import * as _hanzo_docs_core_source0 from "@hanzo/docs-core/source";
@@ -1,13 +1,13 @@
1
- import "../preset-D_quDsKp.js";
2
- import { t as buildConfig } from "../build-DbdeZyKK.js";
3
- import { n as createCore } from "../core-CCsY8cxS.js";
4
- import "../codegen-IYa8t8xV.js";
5
- import { t as fumaMatter } from "../fuma-matter-BWdOvvCf.js";
6
- import "../remark-include-BnRd6OBo.js";
7
- import { t as buildMDX } from "../build-mdx-BTwSGUFs.js";
1
+ import { t as parseFrontmatter } from "../frontmatter-Cq6XTjTb.js";
2
+ import "../preset-DtKimtBh.js";
3
+ import { t as buildConfig } from "../build-DP5xMGSd.js";
4
+ import { n as createCore } from "../core-BJualF84.js";
5
+ import "../codegen-DleOVLNr.js";
6
+ import "../remark-include-CIiVrABN.js";
7
+ import { t as buildMDX } from "../build-mdx-B98VCkri.js";
8
8
  import { server } from "./server.js";
9
- import { pathToFileURL } from "node:url";
10
9
  import fs from "node:fs/promises";
10
+ import { pathToFileURL } from "node:url";
11
11
  import { executeMdx } from "@hanzo/mdx-runtime/client";
12
12
 
13
13
  //#region src/runtime/dynamic.ts
@@ -26,7 +26,7 @@ async function dynamic(configExports, coreOptions, serverOptions) {
26
26
  const body = {};
27
27
  async function compile({ info, data }) {
28
28
  let content = (await fs.readFile(info.fullPath)).toString();
29
- content = fumaMatter(content).content;
29
+ content = parseFrontmatter(content).content;
30
30
  const compiled = await buildMDX(core, collection, {
31
31
  filePath: info.fullPath,
32
32
  source: content,
@@ -1 +1 @@
1
- {"version":3,"file":"dynamic.js","names":["head: Record<string, () => unknown>","body: Record<string, () => Promise<unknown>>","cachedResult: Promise<CompiledMDXProperties> | undefined"],"sources":["../../src/runtime/dynamic.ts"],"sourcesContent":["import { buildConfig, type DocCollectionItem } from '@/config/build';\nimport { buildMDX, type CompiledMDXProperties } from '@/loaders/mdx/build-mdx';\nimport { executeMdx } from '@hanzo/mdx-runtime/client';\nimport { pathToFileURL } from 'node:url';\nimport { fumaMatter } from '@/utils/fuma-matter';\nimport fs from 'node:fs/promises';\nimport { server, type ServerOptions } from './server';\nimport { type CoreOptions, createCore } from '@/core';\nimport type { FileInfo, InternalTypeConfig } from './types';\n\nexport interface LazyEntry<Data = unknown> {\n info: FileInfo;\n data: Data;\n\n hash?: string;\n}\n\nexport type CreateDynamic<Config, TC extends InternalTypeConfig = InternalTypeConfig> = ReturnType<\n typeof dynamic<Config, TC>\n>;\n\nexport async function dynamic<Config, TC extends InternalTypeConfig>(\n configExports: Config,\n coreOptions: CoreOptions,\n serverOptions?: ServerOptions,\n) {\n const core = createCore(coreOptions);\n await core.init({\n config: buildConfig(configExports as Record<string, unknown>),\n });\n\n const create = server<Config, TC>(serverOptions);\n\n function getDocCollection(name: string): DocCollectionItem | undefined {\n const collection = core.getCollection(name);\n if (!collection) return;\n\n if (collection.type === 'docs') return collection.docs;\n else if (collection.type === 'doc') return collection;\n }\n\n function convertLazyEntries(collection: DocCollectionItem, entries: LazyEntry[]) {\n const head: Record<string, () => unknown> = {};\n const body: Record<string, () => Promise<unknown>> = {};\n\n async function compile({ info, data }: LazyEntry<unknown>) {\n let content = (await fs.readFile(info.fullPath)).toString();\n content = fumaMatter(content).content;\n\n const compiled = await buildMDX(core, collection, {\n filePath: info.fullPath,\n source: content,\n frontmatter: data as Record<string, unknown>,\n isDevelopment: false,\n environment: 'runtime',\n });\n\n return (await executeMdx(String(compiled.value), {\n baseUrl: pathToFileURL(info.fullPath),\n })) as CompiledMDXProperties;\n }\n\n for (const entry of entries) {\n head[entry.info.path] = () => entry.data;\n let cachedResult: Promise<CompiledMDXProperties> | undefined;\n body[entry.info.path] = () => (cachedResult ??= compile(entry));\n }\n\n return { head, body };\n }\n\n return {\n async doc<Name extends keyof Config & string>(\n name: Name,\n base: string,\n entries: LazyEntry<unknown>[],\n ) {\n const collection = getDocCollection(name as string);\n if (!collection) throw new Error(`the doc collection ${name as string} doesn't exist.`);\n\n const { head, body } = convertLazyEntries(collection, entries);\n\n return create.docLazy(name, base, head, body);\n },\n async docs<Name extends keyof Config & string>(\n name: Name,\n base: string,\n meta: Record<string, unknown>,\n entries: LazyEntry<unknown>[],\n ) {\n const collection = getDocCollection(name as string);\n if (!collection) throw new Error(`the doc collection ${name as string} doesn't exist.`);\n\n const docs = convertLazyEntries(collection, entries);\n return create.docsLazy(name, base, meta, docs.head, docs.body);\n },\n };\n}\n"],"mappings":";;;;;;;;;;;;;AAqBA,eAAsB,QACpB,eACA,aACA,eACA;CACA,MAAM,OAAO,WAAW,YAAY;AACpC,OAAM,KAAK,KAAK,EACd,QAAQ,YAAY,cAAyC,EAC9D,CAAC;CAEF,MAAM,SAAS,OAAmB,cAAc;CAEhD,SAAS,iBAAiB,MAA6C;EACrE,MAAM,aAAa,KAAK,cAAc,KAAK;AAC3C,MAAI,CAAC,WAAY;AAEjB,MAAI,WAAW,SAAS,OAAQ,QAAO,WAAW;WACzC,WAAW,SAAS,MAAO,QAAO;;CAG7C,SAAS,mBAAmB,YAA+B,SAAsB;EAC/E,MAAMA,OAAsC,EAAE;EAC9C,MAAMC,OAA+C,EAAE;EAEvD,eAAe,QAAQ,EAAE,MAAM,QAA4B;GACzD,IAAI,WAAW,MAAM,GAAG,SAAS,KAAK,SAAS,EAAE,UAAU;AAC3D,aAAU,WAAW,QAAQ,CAAC;GAE9B,MAAM,WAAW,MAAM,SAAS,MAAM,YAAY;IAChD,UAAU,KAAK;IACf,QAAQ;IACR,aAAa;IACb,eAAe;IACf,aAAa;IACd,CAAC;AAEF,UAAQ,MAAM,WAAW,OAAO,SAAS,MAAM,EAAE,EAC/C,SAAS,cAAc,KAAK,SAAS,EACtC,CAAC;;AAGJ,OAAK,MAAM,SAAS,SAAS;AAC3B,QAAK,MAAM,KAAK,cAAc,MAAM;GACpC,IAAIC;AACJ,QAAK,MAAM,KAAK,cAAe,iBAAiB,QAAQ,MAAM;;AAGhE,SAAO;GAAE;GAAM;GAAM;;AAGvB,QAAO;EACL,MAAM,IACJ,MACA,MACA,SACA;GACA,MAAM,aAAa,iBAAiB,KAAe;AACnD,OAAI,CAAC,WAAY,OAAM,IAAI,MAAM,sBAAsB,KAAe,iBAAiB;GAEvF,MAAM,EAAE,MAAM,SAAS,mBAAmB,YAAY,QAAQ;AAE9D,UAAO,OAAO,QAAQ,MAAM,MAAM,MAAM,KAAK;;EAE/C,MAAM,KACJ,MACA,MACA,MACA,SACA;GACA,MAAM,aAAa,iBAAiB,KAAe;AACnD,OAAI,CAAC,WAAY,OAAM,IAAI,MAAM,sBAAsB,KAAe,iBAAiB;GAEvF,MAAM,OAAO,mBAAmB,YAAY,QAAQ;AACpD,UAAO,OAAO,SAAS,MAAM,MAAM,MAAM,KAAK,MAAM,KAAK,KAAK;;EAEjE"}
1
+ {"version":3,"file":"dynamic.js","names":["head: Record<string, () => unknown>","body: Record<string, () => Promise<unknown>>","cachedResult: Promise<CompiledMDXProperties> | undefined"],"sources":["../../src/runtime/dynamic.ts"],"sourcesContent":["import { buildConfig, type DocCollectionItem } from '@/config/build';\nimport { buildMDX, type CompiledMDXProperties } from '@/loaders/mdx/build-mdx';\nimport { executeMdx } from '@hanzo/mdx-runtime/client';\nimport { pathToFileURL } from 'node:url';\nimport { parseFrontmatter } from '@/utils/frontmatter';\nimport fs from 'node:fs/promises';\nimport { server, type ServerOptions } from './server';\nimport { type CoreOptions, createCore } from '@/core';\nimport type { FileInfo, InternalTypeConfig } from './types';\n\nexport interface LazyEntry<Data = unknown> {\n info: FileInfo;\n data: Data;\n\n hash?: string;\n}\n\nexport type CreateDynamic<Config, TC extends InternalTypeConfig = InternalTypeConfig> = ReturnType<\n typeof dynamic<Config, TC>\n>;\n\nexport async function dynamic<Config, TC extends InternalTypeConfig>(\n configExports: Config,\n coreOptions: CoreOptions,\n serverOptions?: ServerOptions,\n) {\n const core = createCore(coreOptions);\n await core.init({\n config: buildConfig(configExports as Record<string, unknown>),\n });\n\n const create = server<Config, TC>(serverOptions);\n\n function getDocCollection(name: string): DocCollectionItem | undefined {\n const collection = core.getCollection(name);\n if (!collection) return;\n\n if (collection.type === 'docs') return collection.docs;\n else if (collection.type === 'doc') return collection;\n }\n\n function convertLazyEntries(collection: DocCollectionItem, entries: LazyEntry[]) {\n const head: Record<string, () => unknown> = {};\n const body: Record<string, () => Promise<unknown>> = {};\n\n async function compile({ info, data }: LazyEntry<unknown>) {\n let content = (await fs.readFile(info.fullPath)).toString();\n content = parseFrontmatter(content).content;\n\n const compiled = await buildMDX(core, collection, {\n filePath: info.fullPath,\n source: content,\n frontmatter: data as Record<string, unknown>,\n isDevelopment: false,\n environment: 'runtime',\n });\n\n return (await executeMdx(String(compiled.value), {\n baseUrl: pathToFileURL(info.fullPath),\n })) as CompiledMDXProperties;\n }\n\n for (const entry of entries) {\n head[entry.info.path] = () => entry.data;\n let cachedResult: Promise<CompiledMDXProperties> | undefined;\n body[entry.info.path] = () => (cachedResult ??= compile(entry));\n }\n\n return { head, body };\n }\n\n return {\n async doc<Name extends keyof Config & string>(\n name: Name,\n base: string,\n entries: LazyEntry<unknown>[],\n ) {\n const collection = getDocCollection(name as string);\n if (!collection) throw new Error(`the doc collection ${name as string} doesn't exist.`);\n\n const { head, body } = convertLazyEntries(collection, entries);\n\n return create.docLazy(name, base, head, body);\n },\n async docs<Name extends keyof Config & string>(\n name: Name,\n base: string,\n meta: Record<string, unknown>,\n entries: LazyEntry<unknown>[],\n ) {\n const collection = getDocCollection(name as string);\n if (!collection) throw new Error(`the doc collection ${name as string} doesn't exist.`);\n\n const docs = convertLazyEntries(collection, entries);\n return create.docsLazy(name, base, meta, docs.head, docs.body);\n },\n };\n}\n"],"mappings":";;;;;;;;;;;;;AAqBA,eAAsB,QACpB,eACA,aACA,eACA;CACA,MAAM,OAAO,WAAW,YAAY;AACpC,OAAM,KAAK,KAAK,EACd,QAAQ,YAAY,cAAyC,EAC9D,CAAC;CAEF,MAAM,SAAS,OAAmB,cAAc;CAEhD,SAAS,iBAAiB,MAA6C;EACrE,MAAM,aAAa,KAAK,cAAc,KAAK;AAC3C,MAAI,CAAC,WAAY;AAEjB,MAAI,WAAW,SAAS,OAAQ,QAAO,WAAW;WACzC,WAAW,SAAS,MAAO,QAAO;;CAG7C,SAAS,mBAAmB,YAA+B,SAAsB;EAC/E,MAAMA,OAAsC,EAAE;EAC9C,MAAMC,OAA+C,EAAE;EAEvD,eAAe,QAAQ,EAAE,MAAM,QAA4B;GACzD,IAAI,WAAW,MAAM,GAAG,SAAS,KAAK,SAAS,EAAE,UAAU;AAC3D,aAAU,iBAAiB,QAAQ,CAAC;GAEpC,MAAM,WAAW,MAAM,SAAS,MAAM,YAAY;IAChD,UAAU,KAAK;IACf,QAAQ;IACR,aAAa;IACb,eAAe;IACf,aAAa;IACd,CAAC;AAEF,UAAQ,MAAM,WAAW,OAAO,SAAS,MAAM,EAAE,EAC/C,SAAS,cAAc,KAAK,SAAS,EACtC,CAAC;;AAGJ,OAAK,MAAM,SAAS,SAAS;AAC3B,QAAK,MAAM,KAAK,cAAc,MAAM;GACpC,IAAIC;AACJ,QAAK,MAAM,KAAK,cAAe,iBAAiB,QAAQ,MAAM;;AAGhE,SAAO;GAAE;GAAM;GAAM;;AAGvB,QAAO;EACL,MAAM,IACJ,MACA,MACA,SACA;GACA,MAAM,aAAa,iBAAiB,KAAe;AACnD,OAAI,CAAC,WAAY,OAAM,IAAI,MAAM,sBAAsB,KAAe,iBAAiB;GAEvF,MAAM,EAAE,MAAM,SAAS,mBAAmB,YAAY,QAAQ;AAE9D,UAAO,OAAO,QAAQ,MAAM,MAAM,MAAM,KAAK;;EAE/C,MAAM,KACJ,MACA,MACA,MACA,SACA;GACA,MAAM,aAAa,iBAAiB,KAAe;AACnD,OAAI,CAAC,WAAY,OAAM,IAAI,MAAM,sBAAsB,KAAe,iBAAiB;GAEvF,MAAM,OAAO,mBAAmB,YAAY,QAAQ;AACpD,UAAO,OAAO,SAAS,MAAM,MAAM,MAAM,KAAK,MAAM,KAAK,KAAK;;EAEjE"}
@@ -1,2 +1,2 @@
1
- import { C as ServerOptions, S as ServerCreate, T as toFumadocsSource, _ as AsyncDocCollectionEntry, b as DocsCollectionEntry, v as AsyncDocsCollectionEntry, w as server, x as MetaCollectionEntry, y as DocCollectionEntry } from "../core-CgLkZ4NQ.js";
2
- export { AsyncDocCollectionEntry, AsyncDocsCollectionEntry, DocCollectionEntry, DocsCollectionEntry, MetaCollectionEntry, ServerCreate, ServerOptions, server, toFumadocsSource };
1
+ import { C as ServerOptions, S as ServerCreate, T as server, _ as AsyncDocCollectionEntry, b as DocsCollectionEntry, v as AsyncDocsCollectionEntry, w as createSource, x as MetaCollectionEntry, y as DocCollectionEntry } from "../core-INx0uFn9.js";
2
+ export { AsyncDocCollectionEntry, AsyncDocsCollectionEntry, DocCollectionEntry, DocsCollectionEntry, MetaCollectionEntry, ServerCreate, ServerOptions, createSource, server };
@@ -57,11 +57,8 @@ function server(options = {}) {
57
57
  return {
58
58
  docs: await this.doc(name, base, docGlob),
59
59
  meta: await this.meta(name, base, metaGlob),
60
- toFumadocsSource() {
61
- return toFumadocsSource(this.docs, this.meta);
62
- },
63
60
  toSource() {
64
- return toFumadocsSource(this.docs, this.meta);
61
+ return createSource(this.docs, this.meta);
65
62
  }
66
63
  };
67
64
  },
@@ -69,17 +66,14 @@ function server(options = {}) {
69
66
  return {
70
67
  docs: await this.docLazy(name, base, docHeadGlob, docBodyGlob),
71
68
  meta: await this.meta(name, base, metaGlob),
72
- toFumadocsSource() {
73
- return toFumadocsSource(this.docs, this.meta);
74
- },
75
69
  toSource() {
76
- return toFumadocsSource(this.docs, this.meta);
70
+ return createSource(this.docs, this.meta);
77
71
  }
78
72
  };
79
73
  }
80
74
  };
81
75
  }
82
- function toFumadocsSource(pages, metas) {
76
+ function createSource(pages, metas) {
83
77
  const files = [];
84
78
  for (const entry of pages) files.push({
85
79
  type: "page",
@@ -113,5 +107,5 @@ function createDocMethods(info, load) {
113
107
  }
114
108
 
115
109
  //#endregion
116
- export { server, toFumadocsSource };
110
+ export { createSource, server };
117
111
  //# sourceMappingURL=server.js.map