@pagesmith/core 0.3.0 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. package/README.md +9 -4
  2. package/REFERENCE.md +5 -2
  3. package/dist/ai/index.d.mts +5 -3
  4. package/dist/ai/index.d.mts.map +1 -1
  5. package/dist/ai/index.mjs +300 -206
  6. package/dist/ai/index.mjs.map +1 -1
  7. package/dist/assets/index.d.mts +10 -1
  8. package/dist/assets/index.d.mts.map +1 -1
  9. package/dist/assets/index.mjs +2 -2
  10. package/dist/{assets-DXiWF_KI.mjs → assets-CAPOqQ_P.mjs} +42 -5
  11. package/dist/assets-CAPOqQ_P.mjs.map +1 -0
  12. package/dist/{content-config-Bfe4W9us.d.mts → content-config-DJXUOcNG.d.mts} +49 -17
  13. package/dist/{content-config-Bfe4W9us.d.mts.map → content-config-DJXUOcNG.d.mts.map} +1 -1
  14. package/dist/{content-layer-DPK1EmfY.mjs → content-layer-B5enqWeJ.mjs} +123 -28
  15. package/dist/content-layer-B5enqWeJ.mjs.map +1 -0
  16. package/dist/content-layer-CpHYUYNN.d.mts +121 -0
  17. package/dist/content-layer-CpHYUYNN.d.mts.map +1 -0
  18. package/dist/create/index.d.mts.map +1 -1
  19. package/dist/create/index.mjs +26 -28
  20. package/dist/create/index.mjs.map +1 -1
  21. package/dist/css/index.d.mts +1 -1
  22. package/dist/{heading-BpDXnl-7.d.mts → heading-Dhvzlay-.d.mts} +1 -1
  23. package/dist/{heading-BpDXnl-7.d.mts.map → heading-Dhvzlay-.d.mts.map} +1 -1
  24. package/dist/{index-Bg9srb5U.d.mts → index-B7NRZAxd.d.mts} +1 -1
  25. package/dist/{index-Bg9srb5U.d.mts.map → index-B7NRZAxd.d.mts.map} +1 -1
  26. package/dist/{index-BBYkDxwI.d.mts → index-C0QFHYwb.d.mts} +1 -1
  27. package/dist/{index-BBYkDxwI.d.mts.map → index-C0QFHYwb.d.mts.map} +1 -1
  28. package/dist/{index-CbOKbkjJ.d.mts → index-CJkBs8YQ.d.mts} +2 -2
  29. package/dist/index-CJkBs8YQ.d.mts.map +1 -0
  30. package/dist/{index-YXQxMV6J.d.mts → index-DCznbvaV.d.mts} +2 -2
  31. package/dist/{index-YXQxMV6J.d.mts.map → index-DCznbvaV.d.mts.map} +1 -1
  32. package/dist/index.d.mts +15 -99
  33. package/dist/index.d.mts.map +1 -1
  34. package/dist/index.mjs +13 -9
  35. package/dist/index.mjs.map +1 -1
  36. package/dist/loaders/index.d.mts +2 -2
  37. package/dist/markdown/index.d.mts +2 -2
  38. package/dist/markdown/index.mjs +1 -1
  39. package/dist/{markdown-CyrHoDhP.mjs → markdown-BmDJgYeB.mjs} +23 -1
  40. package/dist/{markdown-CyrHoDhP.mjs.map → markdown-BmDJgYeB.mjs.map} +1 -1
  41. package/dist/mcp/index.d.mts +23 -0
  42. package/dist/mcp/index.d.mts.map +1 -0
  43. package/dist/mcp/index.mjs +2 -0
  44. package/dist/mcp/server.d.mts +13 -0
  45. package/dist/mcp/server.d.mts.map +1 -0
  46. package/dist/mcp/server.mjs +2 -0
  47. package/dist/runtime/index.mjs +1 -1
  48. package/dist/schemas/index.d.mts +3 -3
  49. package/dist/server-D3DHoh5f.mjs +202 -0
  50. package/dist/server-D3DHoh5f.mjs.map +1 -0
  51. package/dist/ssg-utils/index.d.mts +61 -0
  52. package/dist/ssg-utils/index.d.mts.map +1 -0
  53. package/dist/ssg-utils/index.mjs +118 -0
  54. package/dist/ssg-utils/index.mjs.map +1 -0
  55. package/dist/{types-Cn52sdoq.d.mts → types-B-V5qemH.d.mts} +1 -1
  56. package/dist/{types-Cn52sdoq.d.mts.map → types-B-V5qemH.d.mts.map} +1 -1
  57. package/dist/vite/index.d.mts +69 -34
  58. package/dist/vite/index.d.mts.map +1 -1
  59. package/dist/vite/index.mjs +294 -226
  60. package/dist/vite/index.mjs.map +1 -1
  61. package/docs/agents/AGENTS.md.template +9 -0
  62. package/docs/agents/changelog-notes.md +15 -0
  63. package/docs/agents/errors.md +96 -0
  64. package/docs/agents/migration.md +25 -0
  65. package/docs/agents/recipes.md +26 -0
  66. package/docs/agents/usage.md +58 -0
  67. package/docs/llms-full.txt +53 -0
  68. package/docs/llms.txt +29 -0
  69. package/package.json +56 -4
  70. package/dist/assets-DXiWF_KI.mjs.map +0 -1
  71. package/dist/content-layer-DPK1EmfY.mjs.map +0 -1
  72. package/dist/index-CbOKbkjJ.d.mts.map +0 -1
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@pagesmith/core",
3
- "version": "0.3.0",
3
+ "version": "0.4.0",
4
4
  "description": "File-based CMS — schema-validated collections, lazy markdown rendering, and runtime CSS/JS exports",
5
5
  "keywords": [
6
6
  "cms",
@@ -15,6 +15,7 @@
15
15
  "url": "https://github.com/sujeet-pro/pagesmith/issues"
16
16
  },
17
17
  "license": "MIT",
18
+ "author": "Sujeet <hello@sujeet.pro> (https://sujeet.pro)",
18
19
  "repository": {
19
20
  "type": "git",
20
21
  "url": "https://github.com/sujeet-pro/pagesmith.git",
@@ -25,6 +26,7 @@
25
26
  "src/styles/",
26
27
  "assets/",
27
28
  "templates/",
29
+ "docs/",
28
30
  "REFERENCE.md"
29
31
  ],
30
32
  "type": "module",
@@ -89,10 +91,25 @@
89
91
  "types": "./dist/vite/index.d.mts",
90
92
  "import": "./dist/vite/index.mjs"
91
93
  },
94
+ "./ssg-utils": {
95
+ "types": "./dist/ssg-utils/index.d.mts",
96
+ "import": "./dist/ssg-utils/index.mjs"
97
+ },
92
98
  "./create": {
93
99
  "types": "./dist/create/index.d.mts",
94
100
  "import": "./dist/create/index.mjs"
95
101
  },
102
+ "./mcp": {
103
+ "types": "./dist/mcp/index.d.mts",
104
+ "import": "./dist/mcp/index.mjs"
105
+ },
106
+ "./llms": "./docs/llms.txt",
107
+ "./llms-full": "./docs/llms-full.txt",
108
+ "./agents/usage": "./docs/agents/usage.md",
109
+ "./agents/recipes": "./docs/agents/recipes.md",
110
+ "./agents/changelog-notes": "./docs/agents/changelog-notes.md",
111
+ "./agents/migration": "./docs/agents/migration.md",
112
+ "./agents/template": "./docs/agents/AGENTS.md.template",
96
113
  "./package.json": "./package.json"
97
114
  },
98
115
  "scripts": {
@@ -100,6 +117,7 @@
100
117
  "test": "vp test run"
101
118
  },
102
119
  "dependencies": {
120
+ "@modelcontextprotocol/sdk": "^1.29.0",
103
121
  "devalue": "^5.1.1",
104
122
  "fast-glob": "^3.3.0",
105
123
  "gray-matter": "^4.0.3",
@@ -119,14 +137,48 @@
119
137
  "remark-parse": "^11.0.0",
120
138
  "remark-rehype": "^11.1.1",
121
139
  "remark-smartypants": "^3.0.2",
122
- "smol-toml": "^1.3.0",
140
+ "smol-toml": "^1.6.1",
123
141
  "unified": "^11.0.5",
124
142
  "yaml": "^2.7.0",
125
143
  "zod": "^4.3.6"
126
144
  },
127
145
  "devDependencies": {
128
- "@types/node": "^25.5.0",
129
- "typescript": "^5.9.0",
146
+ "@types/node": "^25.5.2",
147
+ "typescript": "^6.0.2",
130
148
  "vite-plus": "^0.1.13"
149
+ },
150
+ "bundleDependencies": [
151
+ "@modelcontextprotocol/sdk",
152
+ "devalue",
153
+ "fast-glob",
154
+ "gray-matter",
155
+ "json5",
156
+ "lightningcss",
157
+ "rehype-accessible-emojis",
158
+ "rehype-autolink-headings",
159
+ "rehype-expressive-code",
160
+ "rehype-external-links",
161
+ "rehype-mathjax",
162
+ "rehype-slug",
163
+ "rehype-stringify",
164
+ "remark-frontmatter",
165
+ "remark-gfm",
166
+ "remark-github-alerts",
167
+ "remark-math",
168
+ "remark-parse",
169
+ "remark-rehype",
170
+ "remark-smartypants",
171
+ "smol-toml",
172
+ "unified",
173
+ "yaml",
174
+ "zod"
175
+ ],
176
+ "engines": {
177
+ "node": ">=24"
178
+ },
179
+ "ai": {
180
+ "context": "./docs/llms.txt",
181
+ "fullContext": "./docs/llms-full.txt",
182
+ "agentsDir": "./docs/agents"
131
183
  }
132
184
  }
@@ -1 +0,0 @@
1
- {"version":3,"file":"assets-DXiWF_KI.mjs","names":[],"sources":["../src/assets/copier.ts","../src/assets/hasher.ts"],"sourcesContent":["import { copyFileSync, existsSync, mkdirSync, readdirSync } from 'fs'\nimport { dirname, join, relative } from 'path'\n\n/**\n * Copy public directory to output, preserving structure (no hashing).\n * Skips `fonts/` since those are copied to dist/assets/ and hashed.\n */\nexport function copyPublicFiles(publicDir: string, outDir: string): void {\n if (!existsSync(publicDir)) return\n function walk(dir: string) {\n for (const entry of readdirSync(dir, { withFileTypes: true })) {\n const full = join(dir, entry.name)\n // Skip fonts/ — they are copied to dist/assets/ and hashed\n if (entry.isDirectory() && entry.name === 'fonts' && dir === publicDir) continue\n if (entry.isDirectory()) {\n walk(full)\n continue\n }\n const rel = relative(publicDir, full)\n const dest = join(outDir, rel)\n mkdirSync(dirname(dest), { recursive: true })\n copyFileSync(full, dest)\n }\n }\n walk(publicDir)\n}\n","/**\n * Demand-driven asset pipeline.\n *\n * Instead of blindly copying all content assets to dist, this:\n * 1. Hashes pre-existing dist/assets/ files (CSS, JS, fonts — already there from bundling)\n * 2. Scans generated HTML for /assets/* references\n * 3. For each referenced content asset not yet in dist, finds the source\n * file in the content directory, copies it with a content hash\n * 4. Rewrites all HTML references to hashed paths\n *\n * Content assets are only copied if actually referenced in the output HTML.\n * Public assets (favicons, robots.txt) are handled separately by copyPublicFiles.\n */\n\nimport { createHash } from 'crypto'\nimport { existsSync, mkdirSync, readdirSync, readFileSync, renameSync, writeFileSync } from 'fs'\nimport { basename, dirname, extname, join, relative } from 'path'\n\nconst HASHABLE_EXTS = new Set([\n '.css',\n '.js',\n '.svg',\n '.png',\n '.jpg',\n '.jpeg',\n '.gif',\n '.webp',\n '.avif',\n '.ico',\n '.woff',\n '.woff2',\n '.ttf',\n '.eot',\n])\n\nconst CONTENT_ASSET_EXTS = new Set([\n '.svg',\n '.png',\n '.jpg',\n '.jpeg',\n '.gif',\n '.webp',\n '.avif',\n '.ico',\n])\n\n/** Build a basename → source path lookup for content assets. */\nfunction buildContentAssetMap(contentDir: string): Map<string, string> {\n const map = new Map<string, string>()\n function walk(dir: string) {\n if (!existsSync(dir)) return\n for (const entry of readdirSync(dir, { withFileTypes: true })) {\n const full = join(dir, entry.name)\n if (entry.isDirectory()) {\n walk(full)\n continue\n }\n const ext = extname(entry.name)\n if (!CONTENT_ASSET_EXTS.has(ext)) continue\n if (entry.name.endsWith('.inline.svg')) continue\n map.set(entry.name, full)\n }\n }\n walk(contentDir)\n return map\n}\n\nfunction computeHash(content: Buffer): string {\n return createHash('sha256').update(content).digest('hex').slice(0, 8)\n}\n\n/**\n * Hash assets and rewrite HTML references.\n *\n * @param outDir - The dist output directory\n * @param contentDir - The content source directory (for finding referenced assets)\n */\nexport function hashAssets(outDir: string, contentDir: string): void {\n const assetsDir = join(outDir, 'assets')\n mkdirSync(assetsDir, { recursive: true })\n\n const renames = new Map<string, string>()\n const contentAssets = buildContentAssetMap(contentDir)\n\n // Phase 1: Collect and hash pre-existing files in dist/assets/ (CSS, JS, fonts)\n const existing: Array<{ full: string; ext: string; name: string }> = []\n if (existsSync(assetsDir)) {\n for (const entry of readdirSync(assetsDir, { withFileTypes: true })) {\n if (entry.isDirectory()) continue\n const ext = extname(entry.name)\n if (!HASHABLE_EXTS.has(ext)) continue\n existing.push({ full: join(assetsDir, entry.name), ext, name: basename(entry.name, ext) })\n }\n }\n for (const file of existing) {\n const content = readFileSync(file.full)\n const hash = computeHash(content)\n const hashedPath = join(assetsDir, `${file.name}.${hash}${file.ext}`)\n renameSync(file.full, hashedPath)\n renames.set(file.full, hashedPath)\n }\n\n // Phase 2: Scan HTML — resolve content assets on demand, rewrite all references\n function processHtml(dir: string) {\n for (const entry of readdirSync(dir, { withFileTypes: true })) {\n const full = join(dir, entry.name)\n if (entry.isDirectory()) {\n processHtml(full)\n continue\n }\n if (!entry.name.endsWith('.html')) continue\n\n let html = readFileSync(full, 'utf-8')\n\n html = html.replace(/(src|href|srcset)=\"([^\"]+)\"/g, (match, attr: string, ref: string) => {\n if (\n ref.startsWith('http:') ||\n ref.startsWith('https:') ||\n ref.startsWith('//') ||\n ref.startsWith('#') ||\n ref.startsWith('data:') ||\n ref.startsWith('mailto:')\n ) {\n return match\n }\n\n // Normalize relative refs (shouldn't exist after rehype, but just in case)\n let assetRef = ref\n if (ref.startsWith('./') && /\\.(svg|png|jpg|jpeg|gif|webp|avif|ico)$/i.test(ref)) {\n assetRef = '/assets/' + basename(ref)\n }\n\n // Non-asset paths (e.g. page links, anchors)\n if (!assetRef.startsWith('/assets/')) return match\n\n const fileName = assetRef.slice('/assets/'.length)\n const distPath = join(assetsDir, fileName)\n\n // Already hashed in phase 1 (CSS, JS, fonts) or a prior HTML file\n const already = renames.get(distPath)\n if (already) {\n return `${attr}=\"/${relative(outDir, already)}\"`\n }\n\n // Content asset — find source, copy + hash on demand\n const sourcePath = contentAssets.get(fileName)\n if (!sourcePath) {\n return `${attr}=\"${assetRef}\"`\n }\n\n const content = readFileSync(sourcePath)\n const hash = computeHash(content)\n const ext = extname(fileName)\n const name = basename(fileName, ext)\n const hashedName = `${name}.${hash}${ext}`\n const hashedDest = join(assetsDir, hashedName)\n\n writeFileSync(hashedDest, content)\n renames.set(distPath, hashedDest)\n\n return `${attr}=\"/assets/${hashedName}\"`\n })\n\n writeFileSync(full, html)\n }\n }\n processHtml(outDir)\n}\n"],"mappings":";;;;;;;;AAOA,SAAgB,gBAAgB,WAAmB,QAAsB;AACvE,KAAI,CAAC,WAAW,UAAU,CAAE;CAC5B,SAAS,KAAK,KAAa;AACzB,OAAK,MAAM,SAAS,YAAY,KAAK,EAAE,eAAe,MAAM,CAAC,EAAE;GAC7D,MAAM,OAAO,KAAK,KAAK,MAAM,KAAK;AAElC,OAAI,MAAM,aAAa,IAAI,MAAM,SAAS,WAAW,QAAQ,UAAW;AACxE,OAAI,MAAM,aAAa,EAAE;AACvB,SAAK,KAAK;AACV;;GAGF,MAAM,OAAO,KAAK,QADN,SAAS,WAAW,KAAK,CACP;AAC9B,aAAU,QAAQ,KAAK,EAAE,EAAE,WAAW,MAAM,CAAC;AAC7C,gBAAa,MAAM,KAAK;;;AAG5B,MAAK,UAAU;;;;;;;;;;;;;;;;;ACNjB,MAAM,gBAAgB,IAAI,IAAI;CAC5B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAEF,MAAM,qBAAqB,IAAI,IAAI;CACjC;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;;AAGF,SAAS,qBAAqB,YAAyC;CACrE,MAAM,sBAAM,IAAI,KAAqB;CACrC,SAAS,KAAK,KAAa;AACzB,MAAI,CAAC,WAAW,IAAI,CAAE;AACtB,OAAK,MAAM,SAAS,YAAY,KAAK,EAAE,eAAe,MAAM,CAAC,EAAE;GAC7D,MAAM,OAAO,KAAK,KAAK,MAAM,KAAK;AAClC,OAAI,MAAM,aAAa,EAAE;AACvB,SAAK,KAAK;AACV;;GAEF,MAAM,MAAM,QAAQ,MAAM,KAAK;AAC/B,OAAI,CAAC,mBAAmB,IAAI,IAAI,CAAE;AAClC,OAAI,MAAM,KAAK,SAAS,cAAc,CAAE;AACxC,OAAI,IAAI,MAAM,MAAM,KAAK;;;AAG7B,MAAK,WAAW;AAChB,QAAO;;AAGT,SAAS,YAAY,SAAyB;AAC5C,QAAO,WAAW,SAAS,CAAC,OAAO,QAAQ,CAAC,OAAO,MAAM,CAAC,MAAM,GAAG,EAAE;;;;;;;;AASvE,SAAgB,WAAW,QAAgB,YAA0B;CACnE,MAAM,YAAY,KAAK,QAAQ,SAAS;AACxC,WAAU,WAAW,EAAE,WAAW,MAAM,CAAC;CAEzC,MAAM,0BAAU,IAAI,KAAqB;CACzC,MAAM,gBAAgB,qBAAqB,WAAW;CAGtD,MAAM,WAA+D,EAAE;AACvE,KAAI,WAAW,UAAU,CACvB,MAAK,MAAM,SAAS,YAAY,WAAW,EAAE,eAAe,MAAM,CAAC,EAAE;AACnE,MAAI,MAAM,aAAa,CAAE;EACzB,MAAM,MAAM,QAAQ,MAAM,KAAK;AAC/B,MAAI,CAAC,cAAc,IAAI,IAAI,CAAE;AAC7B,WAAS,KAAK;GAAE,MAAM,KAAK,WAAW,MAAM,KAAK;GAAE;GAAK,MAAM,SAAS,MAAM,MAAM,IAAI;GAAE,CAAC;;AAG9F,MAAK,MAAM,QAAQ,UAAU;EAE3B,MAAM,OAAO,YADG,aAAa,KAAK,KAAK,CACN;EACjC,MAAM,aAAa,KAAK,WAAW,GAAG,KAAK,KAAK,GAAG,OAAO,KAAK,MAAM;AACrE,aAAW,KAAK,MAAM,WAAW;AACjC,UAAQ,IAAI,KAAK,MAAM,WAAW;;CAIpC,SAAS,YAAY,KAAa;AAChC,OAAK,MAAM,SAAS,YAAY,KAAK,EAAE,eAAe,MAAM,CAAC,EAAE;GAC7D,MAAM,OAAO,KAAK,KAAK,MAAM,KAAK;AAClC,OAAI,MAAM,aAAa,EAAE;AACvB,gBAAY,KAAK;AACjB;;AAEF,OAAI,CAAC,MAAM,KAAK,SAAS,QAAQ,CAAE;GAEnC,IAAI,OAAO,aAAa,MAAM,QAAQ;AAEtC,UAAO,KAAK,QAAQ,iCAAiC,OAAO,MAAc,QAAgB;AACxF,QACE,IAAI,WAAW,QAAQ,IACvB,IAAI,WAAW,SAAS,IACxB,IAAI,WAAW,KAAK,IACpB,IAAI,WAAW,IAAI,IACnB,IAAI,WAAW,QAAQ,IACvB,IAAI,WAAW,UAAU,CAEzB,QAAO;IAIT,IAAI,WAAW;AACf,QAAI,IAAI,WAAW,KAAK,IAAI,2CAA2C,KAAK,IAAI,CAC9E,YAAW,aAAa,SAAS,IAAI;AAIvC,QAAI,CAAC,SAAS,WAAW,WAAW,CAAE,QAAO;IAE7C,MAAM,WAAW,SAAS,MAAM,EAAkB;IAClD,MAAM,WAAW,KAAK,WAAW,SAAS;IAG1C,MAAM,UAAU,QAAQ,IAAI,SAAS;AACrC,QAAI,QACF,QAAO,GAAG,KAAK,KAAK,SAAS,QAAQ,QAAQ,CAAC;IAIhD,MAAM,aAAa,cAAc,IAAI,SAAS;AAC9C,QAAI,CAAC,WACH,QAAO,GAAG,KAAK,IAAI,SAAS;IAG9B,MAAM,UAAU,aAAa,WAAW;IACxC,MAAM,OAAO,YAAY,QAAQ;IACjC,MAAM,MAAM,QAAQ,SAAS;IAE7B,MAAM,aAAa,GADN,SAAS,UAAU,IAAI,CACT,GAAG,OAAO;IACrC,MAAM,aAAa,KAAK,WAAW,WAAW;AAE9C,kBAAc,YAAY,QAAQ;AAClC,YAAQ,IAAI,UAAU,WAAW;AAEjC,WAAO,GAAG,KAAK,YAAY,WAAW;KACtC;AAEF,iBAAc,MAAM,KAAK;;;AAG7B,aAAY,OAAO"}
@@ -1 +0,0 @@
1
- {"version":3,"file":"content-layer-DPK1EmfY.mjs","names":["coreConvert"],"sources":["../src/convert.ts","../src/utils/read-time.ts","../src/entry.ts","../src/plugins/index.ts","../src/utils/glob.ts","../src/utils/slug.ts","../src/validation/schema-validator.ts","../src/validation/code-block-validator.ts","../src/validation/heading-validator.ts","../src/validation/link-validator.ts","../src/validation/runner.ts","../src/store.ts","../src/content-layer.ts"],"sourcesContent":["import { processMarkdown } from './markdown'\nimport type { Heading } from './schemas/heading'\nimport type { MarkdownConfig } from './schemas/markdown-config'\n\nexport type ConvertOptions = {\n markdown?: MarkdownConfig\n}\n\nexport type ConvertResult = {\n html: string\n toc: Heading[]\n frontmatter: Record<string, unknown>\n}\n\nexport async function convert(input: string, options: ConvertOptions = {}): Promise<ConvertResult> {\n const result = await processMarkdown(input, options.markdown || {})\n return { html: result.html, toc: result.headings, frontmatter: result.frontmatter }\n}\n","/**\n * Read time estimation.\n *\n * Computes estimated reading time from markdown source (~200 words per minute).\n */\n\n/** Compute read time in minutes from raw markdown. */\nexport function computeReadTime(rawMarkdown: string): number {\n const plainText = rawMarkdown\n .replace(/```[\\s\\S]*?```/g, ' ')\n .replace(/^( {4}|\\t).+$/gm, ' ')\n .replace(/^---[\\s\\S]*?---/m, ' ')\n .replace(/<[^>]+>/g, ' ')\n .replace(/!?\\[([^\\]]*)\\]\\([^)]*\\)/g, '$1')\n .replace(/[#*_~`>]/g, ' ')\n .replace(/\\s+/g, ' ')\n .trim()\n const wordCount = plainText.split(' ').filter(Boolean).length\n return Math.max(1, Math.ceil(wordCount / 200))\n}\n","/**\n * ContentEntry — represents a single content entry in a collection.\n *\n * Supports lazy rendering: data is available immediately after loading,\n * but HTML rendering is deferred until render() is called.\n */\n\nimport type { Heading } from './schemas/heading'\nimport type { MarkdownConfig } from './schemas/markdown-config'\nimport { processMarkdown } from './markdown'\nimport { computeReadTime } from './utils/read-time'\n\nexport type RenderedContent = {\n /** Processed HTML */\n html: string\n /** Extracted headings for TOC */\n headings: Heading[]\n /** Estimated read time in minutes */\n readTime: number\n}\n\nexport class ContentEntry<T = Record<string, any>> {\n /** URL-friendly identifier */\n readonly slug: string\n /** Collection this entry belongs to */\n readonly collection: string\n /** Absolute path to source file */\n readonly filePath: string\n /** Validated data (frontmatter or parsed data) */\n readonly data: T\n /** Raw body content (markdown only) */\n readonly rawContent?: string\n\n /** Cached render result */\n private _rendered?: RenderedContent\n /** Markdown config for rendering */\n private _markdownConfig: MarkdownConfig\n\n constructor(\n slug: string,\n collection: string,\n filePath: string,\n data: T,\n rawContent: string | undefined,\n markdownConfig: MarkdownConfig,\n ) {\n this.slug = slug\n this.collection = collection\n this.filePath = filePath\n this.data = data\n this.rawContent = rawContent\n this._markdownConfig = markdownConfig\n }\n\n /** Render the entry content to HTML. Cached after first call. */\n async render(options?: { force?: boolean }): Promise<RenderedContent> {\n if (this._rendered && !options?.force) {\n return this._rendered\n }\n\n if (!this.rawContent) {\n // Non-markdown entries have no renderable content\n this._rendered = { html: '', headings: [], readTime: 0 }\n return this._rendered\n }\n\n const result = await processMarkdown(this.rawContent, this._markdownConfig, {\n content: this.rawContent,\n frontmatter: this.data as Record<string, unknown>,\n })\n const readTime = computeReadTime(this.rawContent)\n\n this._rendered = {\n html: result.html,\n headings: result.headings,\n readTime,\n }\n\n return this._rendered\n }\n\n /** Clear cached render result. */\n clearRenderCache(): void {\n this._rendered = undefined\n }\n}\n","/**\n * Plugin registration and execution.\n */\n\nimport type { ContentPlugin } from '../schemas/content-config'\n\n/** Collect all remark plugins from content plugins. */\nexport function collectRemarkPlugins(plugins: ContentPlugin[]): any[] {\n return plugins.filter((p) => p.remarkPlugin).map((p) => p.remarkPlugin!)\n}\n\n/** Collect all rehype plugins from content plugins. */\nexport function collectRehypePlugins(plugins: ContentPlugin[]): any[] {\n return plugins.filter((p) => p.rehypePlugin).map((p) => p.rehypePlugin!)\n}\n\n/** Run all plugin validators against an entry. */\nexport function runPluginValidators(\n plugins: ContentPlugin[],\n entry: { data: Record<string, any>; content?: string },\n): string[] {\n const issues: string[] = []\n for (const plugin of plugins) {\n if (plugin.validate) {\n issues.push(...plugin.validate(entry))\n }\n }\n return issues\n}\n\nexport type { ContentPlugin } from '../schemas/content-config'\n","/**\n * File discovery via glob patterns.\n */\n\nimport fg from 'fast-glob'\nimport { resolve } from 'path'\n\nexport interface DiscoverOptions {\n /** Directory to search in (absolute path) */\n directory: string\n /** Glob patterns to include */\n include: string[]\n /** Glob patterns to exclude */\n exclude?: string[]\n}\n\n/** Discover files matching glob patterns in a directory. */\nexport async function discoverFiles(options: DiscoverOptions): Promise<string[]> {\n const { directory, include, exclude = [] } = options\n\n const files = await fg(include, {\n cwd: directory,\n absolute: true,\n ignore: ['**/node_modules/**', '**/dist/**', '**/dev/**', ...exclude],\n })\n\n return files.map((p) => resolve(p))\n}\n","/**\n * Path-to-slug conversion.\n *\n * Generalized from packages/core/src/content/collector.ts.\n */\n\nimport { extname, relative } from 'path'\n\n/**\n * Convert a content file path to a URL-friendly slug.\n *\n * Examples:\n * content/posts/hello-world/README.md -> 'hello-world'\n * content/posts/hello-world/index.md -> 'hello-world'\n * content/posts/hello-world.md -> 'hello-world'\n * content/authors/john.json -> 'john'\n * content/posts/nested/deep/post.md -> 'nested/deep/post'\n */\nexport function toSlug(filePath: string, directory: string): string {\n const ext = extname(filePath)\n let slug = relative(directory, filePath).replace(/\\\\/g, '/')\n\n // Remove file extension\n if (ext) {\n slug = slug.slice(0, -ext.length)\n }\n\n // Strip README / index suffixes\n if (slug === 'README' || slug === 'index') return '/'\n if (slug.endsWith('/README')) slug = slug.slice(0, -7)\n if (slug.endsWith('/index')) slug = slug.slice(0, -6)\n\n return slug\n}\n","/**\n * Schema validation with rich error reporting.\n *\n * Wraps Zod's safeParse to produce human-readable validation issues.\n */\n\nimport type { ZodError, ZodType } from 'zod'\n\nexport type ValidationIssue = {\n /** Field path (e.g. 'tags[0]') */\n field?: string\n /** Human-readable error message */\n message: string\n /** Error severity */\n severity: 'error' | 'warn'\n}\n\nexport type ValidationEntryResult = {\n slug: string\n filePath: string\n issues: ValidationIssue[]\n}\n\nexport type ValidationResult = {\n collection: string\n entries: ValidationEntryResult[]\n errors: number\n warnings: number\n}\n\n/** Format a Zod error path into a human-readable field path. */\nexport function formatPath(path: PropertyKey[]): string {\n return path\n .map((segment, i) => {\n if (typeof segment === 'number') return `[${segment}]`\n if (typeof segment === 'symbol') return `[${String(segment)}]`\n if (i === 0) return segment\n return `.${segment}`\n })\n .join('')\n}\n\n/** Validate data against a Zod schema and return structured issues. */\nexport function validateSchema(\n data: Record<string, any>,\n schema: ZodType,\n): {\n issues: ValidationIssue[]\n validatedData: any\n} {\n const result = schema.safeParse(data)\n if (result.success) {\n return {\n issues: [],\n validatedData: result.data,\n }\n }\n\n const issues = (result.error as ZodError).issues.map((issue) => ({\n field: issue.path.length > 0 ? formatPath(issue.path) : undefined,\n message: issue.message,\n severity: 'error' as const,\n }))\n\n return {\n issues,\n validatedData: data,\n }\n}\n","/**\n * Code block validator — checks fenced code block meta syntax.\n *\n * Walks the shared MDAST for `code` nodes. Validates known meta properties\n * and language identifiers. Meta syntax follows Expressive Code conventions.\n */\n\nimport type { ValidationIssue } from './schema-validator'\nimport type { ContentValidator, MdastNode, ValidatorContext } from './types'\n\n/** Known meta properties accepted by Expressive Code and its plugins. */\nconst KNOWN_META_PROPS = new Set([\n 'title',\n 'showLineNumbers',\n 'startLineNumber',\n 'wrap',\n 'frame',\n 'collapse',\n 'mark',\n 'ins',\n 'del',\n])\n\n/** Extract the property name portion of a meta token (before `=` or `{`). */\nfunction extractMetaPropNames(meta: string): string[] {\n const props: string[] = []\n\n // Match key=value, key={...}, or bare flags\n const tokenRegex = /(\\w+)(?:=(?:\\{[^}]*\\}|\"[^\"]*\"|'[^']*'|\\S+))?/g\n let match: RegExpExecArray | null\n while ((match = tokenRegex.exec(meta)) !== null) {\n props.push(match[1]!)\n }\n\n return props\n}\n\n/** Collect all `code` nodes from MDAST. */\nfunction collectCodeBlocks(node: MdastNode): MdastNode[] {\n const blocks: MdastNode[] = []\n\n if (node.type === 'code') {\n blocks.push(node)\n }\n\n if (node.children) {\n for (const child of node.children) {\n blocks.push(...collectCodeBlocks(child))\n }\n }\n\n return blocks\n}\n\nexport const codeBlockValidator: ContentValidator = {\n name: 'code-blocks',\n\n validate(ctx: ValidatorContext): ValidationIssue[] {\n if (!ctx.rawContent || !ctx.mdast) return []\n\n const issues: ValidationIssue[] = []\n const tree = ctx.mdast as MdastNode\n\n const codeBlocks = collectCodeBlocks(tree)\n\n for (const block of codeBlocks) {\n const line = block.position?.start.line\n const lineInfo = line ? ` (line ${line})` : ''\n const meta = block.meta ?? ''\n const hasMeta = meta.trim().length > 0\n\n // Language required when using syntax features\n if (hasMeta && !block.lang) {\n issues.push({\n field: `code-block${lineInfo}`,\n message: 'Code block has meta properties but no language identifier',\n severity: 'warn',\n })\n }\n\n if (!hasMeta) continue\n\n // Check for unknown meta properties\n const propNames = extractMetaPropNames(meta)\n for (const prop of propNames) {\n if (!KNOWN_META_PROPS.has(prop)) {\n issues.push({\n field: `code-block${lineInfo}`,\n message: `Unknown code block meta property: \"${prop}\"`,\n severity: 'warn',\n })\n }\n }\n }\n\n return issues\n },\n}\n","/**\n * Heading validator — checks heading structure in markdown.\n *\n * Walks the shared MDAST for heading nodes. Validates level ordering and h1 uniqueness.\n */\n\nimport type { ValidationIssue } from './schema-validator'\nimport type { ContentValidator, MdastNode, ValidatorContext } from './types'\n\n/** Extract plain text from a heading node's children. */\nfunction getTextContent(node: MdastNode): string {\n if (node.type === 'text') return node.value ?? ''\n if (node.children) return node.children.map(getTextContent).join('')\n return ''\n}\n\n/** Collect all heading nodes from MDAST. */\nfunction collectHeadings(node: MdastNode): Array<{ depth: number; text: string; line?: number }> {\n const headings: Array<{ depth: number; text: string; line?: number }> = []\n\n if (node.type === 'heading' && node.depth) {\n headings.push({\n depth: node.depth,\n text: getTextContent(node),\n line: node.position?.start.line,\n })\n }\n\n if (node.children) {\n for (const child of node.children) {\n headings.push(...collectHeadings(child))\n }\n }\n\n return headings\n}\n\nexport const headingValidator: ContentValidator = {\n name: 'headings',\n\n validate(ctx: ValidatorContext): ValidationIssue[] {\n if (!ctx.rawContent || !ctx.mdast) return []\n\n const issues: ValidationIssue[] = []\n const tree = ctx.mdast as MdastNode\n\n const headings = collectHeadings(tree)\n\n // No headings in a document with content is worth noting\n if (headings.length === 0) {\n const hasContent = ctx.rawContent.trim().length > 0\n if (hasContent) {\n issues.push({\n message: 'Document has content but no headings',\n severity: 'warn',\n })\n }\n return issues\n }\n\n // At most one h1\n const h1s = headings.filter((h) => h.depth === 1)\n if (h1s.length > 1) {\n for (const h of h1s.slice(1)) {\n const lineInfo = h.line ? ` (line ${h.line})` : ''\n issues.push({\n field: `headings${lineInfo}`,\n message: `Multiple h1 headings found: \"${h.text}\"`,\n severity: 'warn',\n })\n }\n }\n\n // No skipped levels (only flag when going deeper)\n for (let i = 1; i < headings.length; i++) {\n const prev = headings[i - 1]!\n const curr = headings[i]!\n if (curr.depth > prev.depth + 1) {\n const lineInfo = curr.line ? ` (line ${curr.line})` : ''\n issues.push({\n field: `headings${lineInfo}`,\n message: `Heading level skip: h${prev.depth} -> h${curr.depth} (\"${curr.text}\")`,\n severity: 'warn',\n })\n }\n }\n\n return issues\n },\n}\n","/**\n * Link validator — checks links in markdown content.\n *\n * Walks the shared MDAST for link/image nodes. Internal links are checked for file existence;\n * external links are checked for well-formed URL format.\n */\n\nimport { existsSync } from 'fs'\nimport { dirname, resolve } from 'path'\nimport type { ValidationIssue } from './schema-validator'\nimport type { ContentValidator, MdastNode, ValidatorContext } from './types'\n\n/** Walk MDAST tree, collecting link and image nodes. */\nfunction collectLinks(node: MdastNode): Array<{ url: string; line?: number }> {\n const links: Array<{ url: string; line?: number }> = []\n\n if ((node.type === 'link' || node.type === 'image') && node.url) {\n links.push({\n url: node.url,\n line: node.position?.start.line,\n })\n }\n\n if (node.children) {\n for (const child of node.children) {\n links.push(...collectLinks(child))\n }\n }\n\n return links\n}\n\nfunction isInternalLink(url: string): boolean {\n if (url.startsWith('#')) return false\n if (url.startsWith('http://') || url.startsWith('https://')) return false\n if (url.startsWith('//')) return false\n if (url.startsWith('mailto:')) return false\n if (url.startsWith('tel:')) return false\n return true\n}\n\nfunction isWellFormedUrl(url: string): boolean {\n try {\n new URL(url)\n return true\n } catch {\n return false\n }\n}\n\nexport type LinkValidatorOptions = {\n /** Glob patterns for internal links to skip file-existence checks on. */\n skipPatterns?: string[]\n}\n\nexport function createLinkValidator(options?: LinkValidatorOptions): ContentValidator {\n const skipPatterns = options?.skipPatterns ?? []\n\n function shouldSkip(url: string): boolean {\n return skipPatterns.some((pattern) => {\n if (pattern.includes('*')) {\n const regex = new RegExp('^' + pattern.replace(/\\*/g, '.*') + '$')\n return regex.test(url)\n }\n return url.startsWith(pattern)\n })\n }\n\n return {\n name: 'links',\n\n validate(ctx: ValidatorContext): ValidationIssue[] {\n if (!ctx.rawContent || !ctx.mdast) return []\n\n const issues: ValidationIssue[] = []\n const tree = ctx.mdast as MdastNode\n\n const links = collectLinks(tree)\n const fileDir = dirname(ctx.filePath)\n\n for (const link of links) {\n const lineInfo = link.line ? ` (line ${link.line})` : ''\n\n // External links — check URL format\n if (link.url.startsWith('http://') || link.url.startsWith('https://')) {\n if (!isWellFormedUrl(link.url)) {\n issues.push({\n field: `links${lineInfo}`,\n message: `Malformed external URL: ${link.url}`,\n severity: 'warn',\n })\n }\n continue\n }\n\n // Internal links — check file exists\n if (isInternalLink(link.url)) {\n if (shouldSkip(link.url)) continue\n\n // Strip fragment and query\n const urlPath = link.url.split('#')[0]!.split('?')[0]!\n if (!urlPath) continue // pure fragment link\n\n const resolved = resolve(fileDir, urlPath)\n if (!existsSync(resolved)) {\n issues.push({\n field: `links${lineInfo}`,\n message: `Broken internal link: ${link.url}`,\n severity: 'error',\n })\n }\n }\n }\n\n return issues\n },\n }\n}\n\nexport const linkValidator: ContentValidator = createLinkValidator()\n","/**\n * Validation runner — executes a list of content validators on an entry.\n *\n * Validators that throw are caught and converted to error-severity issues\n * so one failing validator does not block the rest.\n */\n\nimport remarkParse from 'remark-parse'\nimport { unified } from 'unified'\nimport type { ValidationIssue } from './schema-validator'\nimport type { ContentValidator, ValidatorContext } from './types'\n\nimport { codeBlockValidator } from './code-block-validator'\nimport { headingValidator } from './heading-validator'\nimport { linkValidator } from './link-validator'\n\n/** The built-in validators for markdown content. */\nexport const builtinMarkdownValidators: ContentValidator[] = [\n linkValidator,\n codeBlockValidator,\n headingValidator,\n]\n\n/** Run all validators on a single content entry. */\nexport async function runValidators(\n ctx: ValidatorContext,\n validators: ContentValidator[],\n): Promise<ValidationIssue[]> {\n if (ctx.rawContent && !ctx.mdast) {\n ctx.mdast = unified().use(remarkParse).parse(ctx.rawContent)\n }\n\n const issues: ValidationIssue[] = []\n\n for (const validator of validators) {\n try {\n const result = await validator.validate(ctx)\n issues.push(...result)\n } catch (err) {\n // Convert thrown errors into issues so one bad validator doesn't abort all\n const message = err instanceof Error ? err.message : String(err)\n issues.push({\n message: `Validator \"${validator.name}\" threw: ${message}`,\n severity: 'error',\n })\n }\n }\n\n return issues\n}\n","/**\n * ContentStore — in-memory cache for loaded collections.\n *\n * Handles file discovery, loading, validation, and caching.\n * Not exported directly — used internally by ContentLayer.\n */\n\nimport { resolve } from 'path'\nimport type { MarkdownConfig } from './schemas/markdown-config'\nimport type { ZodType } from 'zod'\nimport { ContentEntry } from './entry'\nimport { defaultIncludePatterns, resolveLoader } from './loaders'\nimport type { Loader } from './loaders/types'\nimport { collectRehypePlugins, collectRemarkPlugins, runPluginValidators } from './plugins'\nimport type { CollectionDef, RawEntry } from './schemas/collection'\nimport type { ContentLayerConfig } from './schemas/content-config'\nimport { discoverFiles } from './utils/glob'\nimport { toSlug } from './utils/slug'\nimport { validateSchema, type ValidationIssue } from './validation'\nimport { builtinMarkdownValidators, runValidators } from './validation/runner'\nimport type { ContentValidator } from './validation/types'\n\ntype CacheEntry = {\n entry: ContentEntry\n issues: ValidationIssue[]\n}\n\nexport class ContentStore {\n private cache = new Map<string, Map<string, CacheEntry>>()\n private loaded = new Set<string>()\n private config: ContentLayerConfig\n private rootDir: string\n private markdownConfig: MarkdownConfig\n\n constructor(config: ContentLayerConfig) {\n this.config = config\n this.rootDir = config.root ? resolve(config.root) : process.cwd()\n this.markdownConfig = this.createMarkdownConfig()\n }\n\n /** Load a collection (if not already loaded) and return entries. */\n async loadCollection<S extends ZodType>(\n name: string,\n def: CollectionDef<S>,\n ): Promise<ContentEntry[]> {\n if (this.loaded.has(name)) {\n const cached = this.cache.get(name)\n return cached ? Array.from(cached.values()).map((c) => c.entry) : []\n }\n\n const loader = resolveLoader(def.loader)\n const directory = resolve(this.rootDir, def.directory)\n const include = def.include ?? defaultIncludePatterns(loader)\n\n const files = await discoverFiles({\n directory,\n include,\n exclude: def.exclude,\n })\n\n const entries = new Map<string, CacheEntry>()\n const results = await Promise.all(\n files.map(async (filePath) => {\n try {\n return await this.loadEntry(name, filePath, directory, loader, def)\n } catch (err) {\n const message = err instanceof Error ? err.message : String(err)\n const slug = def.slugify ? def.slugify(filePath, directory) : toSlug(filePath, directory)\n const loadError = new Error(`Failed to load ${filePath}: ${message}`, {\n cause: err,\n })\n console.warn(loadError.message)\n return {\n entry: new ContentEntry(slug, name, filePath, {}, undefined, this.markdownConfig),\n issues: [{ message: loadError.message, severity: 'error' as const }],\n }\n }\n }),\n )\n\n for (const result of results) {\n if (result) {\n entries.set(result.entry.slug, result)\n }\n }\n\n this.cache.set(name, entries)\n this.loaded.add(name)\n\n return Array.from(entries.values()).map((c) => c.entry)\n }\n\n /** Load a single entry from a file. */\n private async loadEntry(\n collectionName: string,\n filePath: string,\n directory: string,\n loader: Loader,\n def: CollectionDef<any>,\n ): Promise<CacheEntry | null> {\n const loaded = await loader.load(filePath)\n const slug = def.slugify ? def.slugify(filePath, directory) : toSlug(filePath, directory)\n\n let raw: RawEntry = {\n data: loaded.data,\n content: loaded.content,\n filePath,\n slug,\n }\n\n // Apply transform\n if (def.transform) {\n raw = await def.transform(raw)\n }\n\n // Apply computed fields\n if (def.computed) {\n for (const [key, fn] of Object.entries(def.computed) as Array<\n [string, (entry: RawEntry) => any]\n >) {\n raw.data[key] = fn(raw)\n }\n }\n\n // Apply filter\n if (def.filter && !def.filter(raw)) {\n return null\n }\n\n // Validate schema once to collect issues and transformed data.\n const { issues, validatedData } = validateSchema(raw.data, def.schema)\n\n // Custom validation\n if (def.validate) {\n const customError = def.validate(raw)\n if (customError) {\n issues.push({ message: customError, severity: 'error' })\n }\n }\n\n // Run content validators on markdown entries\n const isMarkdownEntry = raw.content !== undefined\n if (isMarkdownEntry) {\n const validators = this.resolveValidators(def)\n if (validators.length > 0) {\n const contentIssues = await runValidators(\n {\n filePath,\n slug,\n collection: collectionName,\n rawContent: raw.content,\n data: raw.data,\n },\n validators,\n )\n issues.push(...contentIssues)\n }\n }\n\n if (this.config.plugins?.length) {\n const pluginIssues = runPluginValidators(this.config.plugins, {\n data: raw.data,\n content: raw.content,\n })\n for (const message of pluginIssues) {\n issues.push({ message, severity: 'error' })\n }\n }\n\n const entry = new ContentEntry(\n slug,\n collectionName,\n filePath,\n validatedData,\n raw.content,\n this.markdownConfig,\n )\n\n return { entry, issues }\n }\n\n /** Get a single entry by slug. */\n getEntry(collection: string, slug: string): ContentEntry | undefined {\n return this.cache.get(collection)?.get(slug)?.entry\n }\n\n /** Get validation issues for a collection. */\n getIssues(collection: string): Map<string, ValidationIssue[]> {\n const result = new Map<string, ValidationIssue[]>()\n const entries = this.cache.get(collection)\n if (!entries) return result\n for (const [slug, cached] of entries) {\n if (cached.issues.length > 0) {\n result.set(slug, cached.issues)\n }\n }\n return result\n }\n\n /** Invalidate a single entry and reload it without reloading the entire collection. */\n async invalidate(collection: string, slug: string): Promise<void> {\n const def = this.config.collections[collection]\n if (!def) return\n\n const collectionCache = this.cache.get(collection)\n if (!collectionCache) return\n\n const existing = collectionCache.get(slug)\n if (!existing) return\n\n const loader = resolveLoader(def.loader)\n const directory = resolve(this.rootDir, def.directory)\n\n try {\n const result = await this.loadEntry(\n collection,\n existing.entry.filePath,\n directory,\n loader,\n def,\n )\n if (result) {\n collectionCache.set(slug, result)\n } else {\n // Entry was filtered out after reload\n collectionCache.delete(slug)\n }\n } catch {\n // File may have been deleted; remove from cache\n collectionCache.delete(slug)\n }\n }\n\n /** Invalidate an entire collection. */\n async invalidateCollection(collection: string): Promise<void> {\n this.cache.delete(collection)\n this.loaded.delete(collection)\n }\n\n /** Invalidate all collections. */\n invalidateAll(): void {\n this.cache.clear()\n this.loaded.clear()\n }\n\n /** Resolve the list of content validators for a collection. */\n private resolveValidators(def: CollectionDef<any>): ContentValidator[] {\n const builtin = def.disableBuiltinValidators ? [] : builtinMarkdownValidators\n const custom = def.validators ?? []\n return [...builtin, ...custom]\n }\n\n private createMarkdownConfig(): MarkdownConfig {\n const base = this.config.markdown ?? {}\n const remarkPlugins = [...(base.remarkPlugins ?? [])]\n const rehypePlugins = [...(base.rehypePlugins ?? [])]\n const plugins = this.config.plugins ?? []\n\n if (plugins.length > 0) {\n remarkPlugins.push(...collectRemarkPlugins(plugins))\n rehypePlugins.push(...collectRehypePlugins(plugins))\n }\n\n return {\n ...base,\n ...(remarkPlugins.length > 0 ? { remarkPlugins } : {}),\n ...(rehypePlugins.length > 0 ? { rehypePlugins } : {}),\n }\n }\n}\n","/**\n * ContentLayer — the main API for working with content collections.\n *\n * Created via createContentLayer(config). Provides methods to:\n * - Load and query collections (getCollection, getEntry)\n * - Convert markdown directly (convert)\n * - Invalidate cache (invalidate, invalidateCollection, invalidateAll)\n * - Validate all entries (validate)\n */\n\nimport { convert as coreConvert } from './convert'\nimport type { ConvertResult } from './convert'\nimport type { MarkdownConfig } from './schemas/markdown-config'\nimport type { ContentEntry } from './entry'\nimport type { CollectionDef } from './schemas/collection'\nimport type { ContentLayerConfig } from './schemas/content-config'\nimport { ContentStore } from './store'\nimport type { ValidationResult } from './validation'\n\nexport interface ContentLayer {\n /** Get all entries in a collection. */\n getCollection(name: string): Promise<ContentEntry[]>\n\n /** Get a single entry by collection name and slug. */\n getEntry(collection: string, slug: string): Promise<ContentEntry | undefined>\n\n /** Convert raw markdown to HTML (no collection, no validation). */\n convert(markdown: string, options?: LayerConvertOptions): Promise<ConvertResult>\n\n /** Invalidate a single entry's cache. */\n invalidate(collection: string, slug: string): Promise<void>\n\n /** Invalidate an entire collection's cache. */\n invalidateCollection(collection: string): Promise<void>\n\n /** Invalidate all cached data. */\n invalidateAll(): void\n\n /** Validate all entries in a collection (or all collections). */\n validate(collection?: string): Promise<ValidationResult[]>\n\n /** Get the names of all configured collections. */\n getCollectionNames(): string[]\n\n /** Get the definition of a collection. */\n getCollectionDef(name: string): CollectionDef | undefined\n\n /** Get all collection definitions. */\n getCollections(): Record<string, CollectionDef>\n}\n\nexport type LayerConvertOptions = {\n markdown?: MarkdownConfig\n}\n\nclass ContentLayerImpl implements ContentLayer {\n private store: ContentStore\n private config: ContentLayerConfig\n\n constructor(config: ContentLayerConfig) {\n this.config = config\n this.store = new ContentStore(config)\n }\n\n async getCollection(name: string): Promise<ContentEntry[]> {\n const def = this.config.collections[name]\n if (!def) {\n throw new Error(\n `Collection \"${name}\" not found. Available: ${Object.keys(this.config.collections).join(\n ', ',\n )}`,\n )\n }\n return this.store.loadCollection(name, def)\n }\n\n async getEntry(collection: string, slug: string): Promise<ContentEntry | undefined> {\n // The first getEntry call loads the full collection and then serves from cache.\n // Single-entry loading would skip collection-level transforms and validation context.\n await this.getCollection(collection)\n return this.store.getEntry(collection, slug)\n }\n\n async convert(markdown: string, options?: LayerConvertOptions): Promise<ConvertResult> {\n return coreConvert(markdown, {\n markdown: options?.markdown ?? this.config.markdown,\n })\n }\n\n async invalidate(collection: string, slug: string): Promise<void> {\n await this.store.invalidate(collection, slug)\n }\n\n async invalidateCollection(collection: string): Promise<void> {\n await this.store.invalidateCollection(collection)\n }\n\n invalidateAll(): void {\n this.store.invalidateAll()\n }\n\n async validate(collection?: string): Promise<ValidationResult[]> {\n const names = collection ? [collection] : Object.keys(this.config.collections)\n const results: ValidationResult[] = []\n\n for (const name of names) {\n // Ensure loaded\n await this.getCollection(name)\n\n const issues = this.store.getIssues(name)\n const entries = Array.from(issues.entries()).map(([slug, entryIssues]) => {\n const entry = this.store.getEntry(name, slug)\n return {\n slug,\n filePath: entry?.filePath ?? '',\n issues: entryIssues,\n }\n })\n\n let errors = 0\n let warnings = 0\n for (const entry of entries) {\n for (const issue of entry.issues) {\n if (issue.severity === 'error') errors++\n else warnings++\n }\n }\n\n results.push({ collection: name, entries, errors, warnings })\n }\n\n return results\n }\n\n getCollectionNames(): string[] {\n return Object.keys(this.config.collections)\n }\n\n getCollectionDef(name: string): CollectionDef | undefined {\n return this.config.collections[name]\n }\n\n getCollections(): Record<string, CollectionDef> {\n return { ...this.config.collections }\n }\n}\n\n/** Create a new content layer from a configuration. */\nexport function createContentLayer(config: ContentLayerConfig): ContentLayer {\n return new ContentLayerImpl(config)\n}\n"],"mappings":";;;;;;;;AAcA,eAAsB,QAAQ,OAAe,UAA0B,EAAE,EAA0B;CACjG,MAAM,SAAS,MAAM,gBAAgB,OAAO,QAAQ,YAAY,EAAE,CAAC;AACnE,QAAO;EAAE,MAAM,OAAO;EAAM,KAAK,OAAO;EAAU,aAAa,OAAO;EAAa;;;;;;;;;;ACTrF,SAAgB,gBAAgB,aAA6B;CAU3D,MAAM,YATY,YACf,QAAQ,mBAAmB,IAAI,CAC/B,QAAQ,mBAAmB,IAAI,CAC/B,QAAQ,oBAAoB,IAAI,CAChC,QAAQ,YAAY,IAAI,CACxB,QAAQ,4BAA4B,KAAK,CACzC,QAAQ,aAAa,IAAI,CACzB,QAAQ,QAAQ,IAAI,CACpB,MAAM,CACmB,MAAM,IAAI,CAAC,OAAO,QAAQ,CAAC;AACvD,QAAO,KAAK,IAAI,GAAG,KAAK,KAAK,YAAY,IAAI,CAAC;;;;ACGhD,IAAa,eAAb,MAAmD;;CAEjD;;CAEA;;CAEA;;CAEA;;CAEA;;CAGA;;CAEA;CAEA,YACE,MACA,YACA,UACA,MACA,YACA,gBACA;AACA,OAAK,OAAO;AACZ,OAAK,aAAa;AAClB,OAAK,WAAW;AAChB,OAAK,OAAO;AACZ,OAAK,aAAa;AAClB,OAAK,kBAAkB;;;CAIzB,MAAM,OAAO,SAAyD;AACpE,MAAI,KAAK,aAAa,CAAC,SAAS,MAC9B,QAAO,KAAK;AAGd,MAAI,CAAC,KAAK,YAAY;AAEpB,QAAK,YAAY;IAAE,MAAM;IAAI,UAAU,EAAE;IAAE,UAAU;IAAG;AACxD,UAAO,KAAK;;EAGd,MAAM,SAAS,MAAM,gBAAgB,KAAK,YAAY,KAAK,iBAAiB;GAC1E,SAAS,KAAK;GACd,aAAa,KAAK;GACnB,CAAC;EACF,MAAM,WAAW,gBAAgB,KAAK,WAAW;AAEjD,OAAK,YAAY;GACf,MAAM,OAAO;GACb,UAAU,OAAO;GACjB;GACD;AAED,SAAO,KAAK;;;CAId,mBAAyB;AACvB,OAAK,YAAY,KAAA;;;;;;AC5ErB,SAAgB,qBAAqB,SAAiC;AACpE,QAAO,QAAQ,QAAQ,MAAM,EAAE,aAAa,CAAC,KAAK,MAAM,EAAE,aAAc;;;AAI1E,SAAgB,qBAAqB,SAAiC;AACpE,QAAO,QAAQ,QAAQ,MAAM,EAAE,aAAa,CAAC,KAAK,MAAM,EAAE,aAAc;;;AAI1E,SAAgB,oBACd,SACA,OACU;CACV,MAAM,SAAmB,EAAE;AAC3B,MAAK,MAAM,UAAU,QACnB,KAAI,OAAO,SACT,QAAO,KAAK,GAAG,OAAO,SAAS,MAAM,CAAC;AAG1C,QAAO;;;;;;;;ACVT,eAAsB,cAAc,SAA6C;CAC/E,MAAM,EAAE,WAAW,SAAS,UAAU,EAAE,KAAK;AAQ7C,SANc,MAAM,GAAG,SAAS;EAC9B,KAAK;EACL,UAAU;EACV,QAAQ;GAAC;GAAsB;GAAc;GAAa,GAAG;GAAQ;EACtE,CAAC,EAEW,KAAK,MAAM,QAAQ,EAAE,CAAC;;;;;;;;;;;;;;;;;;;ACRrC,SAAgB,OAAO,UAAkB,WAA2B;CAClE,MAAM,MAAM,QAAQ,SAAS;CAC7B,IAAI,OAAO,SAAS,WAAW,SAAS,CAAC,QAAQ,OAAO,IAAI;AAG5D,KAAI,IACF,QAAO,KAAK,MAAM,GAAG,CAAC,IAAI,OAAO;AAInC,KAAI,SAAS,YAAY,SAAS,QAAS,QAAO;AAClD,KAAI,KAAK,SAAS,UAAU,CAAE,QAAO,KAAK,MAAM,GAAG,GAAG;AACtD,KAAI,KAAK,SAAS,SAAS,CAAE,QAAO,KAAK,MAAM,GAAG,GAAG;AAErD,QAAO;;;;;ACDT,SAAgB,WAAW,MAA6B;AACtD,QAAO,KACJ,KAAK,SAAS,MAAM;AACnB,MAAI,OAAO,YAAY,SAAU,QAAO,IAAI,QAAQ;AACpD,MAAI,OAAO,YAAY,SAAU,QAAO,IAAI,OAAO,QAAQ,CAAC;AAC5D,MAAI,MAAM,EAAG,QAAO;AACpB,SAAO,IAAI;GACX,CACD,KAAK,GAAG;;;AAIb,SAAgB,eACd,MACA,QAIA;CACA,MAAM,SAAS,OAAO,UAAU,KAAK;AACrC,KAAI,OAAO,QACT,QAAO;EACL,QAAQ,EAAE;EACV,eAAe,OAAO;EACvB;AASH,QAAO;EACL,QAPc,OAAO,MAAmB,OAAO,KAAK,WAAW;GAC/D,OAAO,MAAM,KAAK,SAAS,IAAI,WAAW,MAAM,KAAK,GAAG,KAAA;GACxD,SAAS,MAAM;GACf,UAAU;GACX,EAAE;EAID,eAAe;EAChB;;;;;ACxDH,MAAM,mBAAmB,IAAI,IAAI;CAC/B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;;AAGF,SAAS,qBAAqB,MAAwB;CACpD,MAAM,QAAkB,EAAE;CAG1B,MAAM,aAAa;CACnB,IAAI;AACJ,SAAQ,QAAQ,WAAW,KAAK,KAAK,MAAM,KACzC,OAAM,KAAK,MAAM,GAAI;AAGvB,QAAO;;;AAIT,SAAS,kBAAkB,MAA8B;CACvD,MAAM,SAAsB,EAAE;AAE9B,KAAI,KAAK,SAAS,OAChB,QAAO,KAAK,KAAK;AAGnB,KAAI,KAAK,SACP,MAAK,MAAM,SAAS,KAAK,SACvB,QAAO,KAAK,GAAG,kBAAkB,MAAM,CAAC;AAI5C,QAAO;;AAGT,MAAa,qBAAuC;CAClD,MAAM;CAEN,SAAS,KAA0C;AACjD,MAAI,CAAC,IAAI,cAAc,CAAC,IAAI,MAAO,QAAO,EAAE;EAE5C,MAAM,SAA4B,EAAE;EACpC,MAAM,OAAO,IAAI;EAEjB,MAAM,aAAa,kBAAkB,KAAK;AAE1C,OAAK,MAAM,SAAS,YAAY;GAC9B,MAAM,OAAO,MAAM,UAAU,MAAM;GACnC,MAAM,WAAW,OAAO,UAAU,KAAK,KAAK;GAC5C,MAAM,OAAO,MAAM,QAAQ;GAC3B,MAAM,UAAU,KAAK,MAAM,CAAC,SAAS;AAGrC,OAAI,WAAW,CAAC,MAAM,KACpB,QAAO,KAAK;IACV,OAAO,aAAa;IACpB,SAAS;IACT,UAAU;IACX,CAAC;AAGJ,OAAI,CAAC,QAAS;GAGd,MAAM,YAAY,qBAAqB,KAAK;AAC5C,QAAK,MAAM,QAAQ,UACjB,KAAI,CAAC,iBAAiB,IAAI,KAAK,CAC7B,QAAO,KAAK;IACV,OAAO,aAAa;IACpB,SAAS,sCAAsC,KAAK;IACpD,UAAU;IACX,CAAC;;AAKR,SAAO;;CAEV;;;;ACvFD,SAAS,eAAe,MAAyB;AAC/C,KAAI,KAAK,SAAS,OAAQ,QAAO,KAAK,SAAS;AAC/C,KAAI,KAAK,SAAU,QAAO,KAAK,SAAS,IAAI,eAAe,CAAC,KAAK,GAAG;AACpE,QAAO;;;AAIT,SAAS,gBAAgB,MAAwE;CAC/F,MAAM,WAAkE,EAAE;AAE1E,KAAI,KAAK,SAAS,aAAa,KAAK,MAClC,UAAS,KAAK;EACZ,OAAO,KAAK;EACZ,MAAM,eAAe,KAAK;EAC1B,MAAM,KAAK,UAAU,MAAM;EAC5B,CAAC;AAGJ,KAAI,KAAK,SACP,MAAK,MAAM,SAAS,KAAK,SACvB,UAAS,KAAK,GAAG,gBAAgB,MAAM,CAAC;AAI5C,QAAO;;AAGT,MAAa,mBAAqC;CAChD,MAAM;CAEN,SAAS,KAA0C;AACjD,MAAI,CAAC,IAAI,cAAc,CAAC,IAAI,MAAO,QAAO,EAAE;EAE5C,MAAM,SAA4B,EAAE;EACpC,MAAM,OAAO,IAAI;EAEjB,MAAM,WAAW,gBAAgB,KAAK;AAGtC,MAAI,SAAS,WAAW,GAAG;AAEzB,OADmB,IAAI,WAAW,MAAM,CAAC,SAAS,EAEhD,QAAO,KAAK;IACV,SAAS;IACT,UAAU;IACX,CAAC;AAEJ,UAAO;;EAIT,MAAM,MAAM,SAAS,QAAQ,MAAM,EAAE,UAAU,EAAE;AACjD,MAAI,IAAI,SAAS,EACf,MAAK,MAAM,KAAK,IAAI,MAAM,EAAE,EAAE;GAC5B,MAAM,WAAW,EAAE,OAAO,UAAU,EAAE,KAAK,KAAK;AAChD,UAAO,KAAK;IACV,OAAO,WAAW;IAClB,SAAS,gCAAgC,EAAE,KAAK;IAChD,UAAU;IACX,CAAC;;AAKN,OAAK,IAAI,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;GACxC,MAAM,OAAO,SAAS,IAAI;GAC1B,MAAM,OAAO,SAAS;AACtB,OAAI,KAAK,QAAQ,KAAK,QAAQ,GAAG;IAC/B,MAAM,WAAW,KAAK,OAAO,UAAU,KAAK,KAAK,KAAK;AACtD,WAAO,KAAK;KACV,OAAO,WAAW;KAClB,SAAS,wBAAwB,KAAK,MAAM,OAAO,KAAK,MAAM,KAAK,KAAK,KAAK;KAC7E,UAAU;KACX,CAAC;;;AAIN,SAAO;;CAEV;;;;;;;;;;AC5ED,SAAS,aAAa,MAAwD;CAC5E,MAAM,QAA+C,EAAE;AAEvD,MAAK,KAAK,SAAS,UAAU,KAAK,SAAS,YAAY,KAAK,IAC1D,OAAM,KAAK;EACT,KAAK,KAAK;EACV,MAAM,KAAK,UAAU,MAAM;EAC5B,CAAC;AAGJ,KAAI,KAAK,SACP,MAAK,MAAM,SAAS,KAAK,SACvB,OAAM,KAAK,GAAG,aAAa,MAAM,CAAC;AAItC,QAAO;;AAGT,SAAS,eAAe,KAAsB;AAC5C,KAAI,IAAI,WAAW,IAAI,CAAE,QAAO;AAChC,KAAI,IAAI,WAAW,UAAU,IAAI,IAAI,WAAW,WAAW,CAAE,QAAO;AACpE,KAAI,IAAI,WAAW,KAAK,CAAE,QAAO;AACjC,KAAI,IAAI,WAAW,UAAU,CAAE,QAAO;AACtC,KAAI,IAAI,WAAW,OAAO,CAAE,QAAO;AACnC,QAAO;;AAGT,SAAS,gBAAgB,KAAsB;AAC7C,KAAI;AACF,MAAI,IAAI,IAAI;AACZ,SAAO;SACD;AACN,SAAO;;;AASX,SAAgB,oBAAoB,SAAkD;CACpF,MAAM,eAAe,SAAS,gBAAgB,EAAE;CAEhD,SAAS,WAAW,KAAsB;AACxC,SAAO,aAAa,MAAM,YAAY;AACpC,OAAI,QAAQ,SAAS,IAAI,CAEvB,QADc,IAAI,OAAO,MAAM,QAAQ,QAAQ,OAAO,KAAK,GAAG,IAAI,CACrD,KAAK,IAAI;AAExB,UAAO,IAAI,WAAW,QAAQ;IAC9B;;AAGJ,QAAO;EACL,MAAM;EAEN,SAAS,KAA0C;AACjD,OAAI,CAAC,IAAI,cAAc,CAAC,IAAI,MAAO,QAAO,EAAE;GAE5C,MAAM,SAA4B,EAAE;GACpC,MAAM,OAAO,IAAI;GAEjB,MAAM,QAAQ,aAAa,KAAK;GAChC,MAAM,UAAU,QAAQ,IAAI,SAAS;AAErC,QAAK,MAAM,QAAQ,OAAO;IACxB,MAAM,WAAW,KAAK,OAAO,UAAU,KAAK,KAAK,KAAK;AAGtD,QAAI,KAAK,IAAI,WAAW,UAAU,IAAI,KAAK,IAAI,WAAW,WAAW,EAAE;AACrE,SAAI,CAAC,gBAAgB,KAAK,IAAI,CAC5B,QAAO,KAAK;MACV,OAAO,QAAQ;MACf,SAAS,2BAA2B,KAAK;MACzC,UAAU;MACX,CAAC;AAEJ;;AAIF,QAAI,eAAe,KAAK,IAAI,EAAE;AAC5B,SAAI,WAAW,KAAK,IAAI,CAAE;KAG1B,MAAM,UAAU,KAAK,IAAI,MAAM,IAAI,CAAC,GAAI,MAAM,IAAI,CAAC;AACnD,SAAI,CAAC,QAAS;AAGd,SAAI,CAAC,WADY,QAAQ,SAAS,QAAQ,CACjB,CACvB,QAAO,KAAK;MACV,OAAO,QAAQ;MACf,SAAS,yBAAyB,KAAK;MACvC,UAAU;MACX,CAAC;;;AAKR,UAAO;;EAEV;;AAGH,MAAa,gBAAkC,qBAAqB;;;;;;;;;;ACtGpE,MAAa,4BAAgD;CAC3D;CACA;CACA;CACD;;AAGD,eAAsB,cACpB,KACA,YAC4B;AAC5B,KAAI,IAAI,cAAc,CAAC,IAAI,MACzB,KAAI,QAAQ,SAAS,CAAC,IAAI,YAAY,CAAC,MAAM,IAAI,WAAW;CAG9D,MAAM,SAA4B,EAAE;AAEpC,MAAK,MAAM,aAAa,WACtB,KAAI;EACF,MAAM,SAAS,MAAM,UAAU,SAAS,IAAI;AAC5C,SAAO,KAAK,GAAG,OAAO;UACf,KAAK;EAEZ,MAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,IAAI;AAChE,SAAO,KAAK;GACV,SAAS,cAAc,UAAU,KAAK,WAAW;GACjD,UAAU;GACX,CAAC;;AAIN,QAAO;;;;;;;;;;ACrBT,IAAa,eAAb,MAA0B;CACxB,wBAAgB,IAAI,KAAsC;CAC1D,yBAAiB,IAAI,KAAa;CAClC;CACA;CACA;CAEA,YAAY,QAA4B;AACtC,OAAK,SAAS;AACd,OAAK,UAAU,OAAO,OAAO,QAAQ,OAAO,KAAK,GAAG,QAAQ,KAAK;AACjE,OAAK,iBAAiB,KAAK,sBAAsB;;;CAInD,MAAM,eACJ,MACA,KACyB;AACzB,MAAI,KAAK,OAAO,IAAI,KAAK,EAAE;GACzB,MAAM,SAAS,KAAK,MAAM,IAAI,KAAK;AACnC,UAAO,SAAS,MAAM,KAAK,OAAO,QAAQ,CAAC,CAAC,KAAK,MAAM,EAAE,MAAM,GAAG,EAAE;;EAGtE,MAAM,SAAS,cAAc,IAAI,OAAO;EACxC,MAAM,YAAY,QAAQ,KAAK,SAAS,IAAI,UAAU;EAGtD,MAAM,QAAQ,MAAM,cAAc;GAChC;GACA,SAJc,IAAI,WAAW,uBAAuB,OAAO;GAK3D,SAAS,IAAI;GACd,CAAC;EAEF,MAAM,0BAAU,IAAI,KAAyB;EAC7C,MAAM,UAAU,MAAM,QAAQ,IAC5B,MAAM,IAAI,OAAO,aAAa;AAC5B,OAAI;AACF,WAAO,MAAM,KAAK,UAAU,MAAM,UAAU,WAAW,QAAQ,IAAI;YAC5D,KAAK;IACZ,MAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,IAAI;IAChE,MAAM,OAAO,IAAI,UAAU,IAAI,QAAQ,UAAU,UAAU,GAAG,OAAO,UAAU,UAAU;IACzF,MAAM,YAAY,IAAI,MAAM,kBAAkB,SAAS,IAAI,WAAW,EACpE,OAAO,KACR,CAAC;AACF,YAAQ,KAAK,UAAU,QAAQ;AAC/B,WAAO;KACL,OAAO,IAAI,aAAa,MAAM,MAAM,UAAU,EAAE,EAAE,KAAA,GAAW,KAAK,eAAe;KACjF,QAAQ,CAAC;MAAE,SAAS,UAAU;MAAS,UAAU;MAAkB,CAAC;KACrE;;IAEH,CACH;AAED,OAAK,MAAM,UAAU,QACnB,KAAI,OACF,SAAQ,IAAI,OAAO,MAAM,MAAM,OAAO;AAI1C,OAAK,MAAM,IAAI,MAAM,QAAQ;AAC7B,OAAK,OAAO,IAAI,KAAK;AAErB,SAAO,MAAM,KAAK,QAAQ,QAAQ,CAAC,CAAC,KAAK,MAAM,EAAE,MAAM;;;CAIzD,MAAc,UACZ,gBACA,UACA,WACA,QACA,KAC4B;EAC5B,MAAM,SAAS,MAAM,OAAO,KAAK,SAAS;EAC1C,MAAM,OAAO,IAAI,UAAU,IAAI,QAAQ,UAAU,UAAU,GAAG,OAAO,UAAU,UAAU;EAEzF,IAAI,MAAgB;GAClB,MAAM,OAAO;GACb,SAAS,OAAO;GAChB;GACA;GACD;AAGD,MAAI,IAAI,UACN,OAAM,MAAM,IAAI,UAAU,IAAI;AAIhC,MAAI,IAAI,SACN,MAAK,MAAM,CAAC,KAAK,OAAO,OAAO,QAAQ,IAAI,SAAS,CAGlD,KAAI,KAAK,OAAO,GAAG,IAAI;AAK3B,MAAI,IAAI,UAAU,CAAC,IAAI,OAAO,IAAI,CAChC,QAAO;EAIT,MAAM,EAAE,QAAQ,kBAAkB,eAAe,IAAI,MAAM,IAAI,OAAO;AAGtE,MAAI,IAAI,UAAU;GAChB,MAAM,cAAc,IAAI,SAAS,IAAI;AACrC,OAAI,YACF,QAAO,KAAK;IAAE,SAAS;IAAa,UAAU;IAAS,CAAC;;AAM5D,MADwB,IAAI,YAAY,KAAA,GACnB;GACnB,MAAM,aAAa,KAAK,kBAAkB,IAAI;AAC9C,OAAI,WAAW,SAAS,GAAG;IACzB,MAAM,gBAAgB,MAAM,cAC1B;KACE;KACA;KACA,YAAY;KACZ,YAAY,IAAI;KAChB,MAAM,IAAI;KACX,EACD,WACD;AACD,WAAO,KAAK,GAAG,cAAc;;;AAIjC,MAAI,KAAK,OAAO,SAAS,QAAQ;GAC/B,MAAM,eAAe,oBAAoB,KAAK,OAAO,SAAS;IAC5D,MAAM,IAAI;IACV,SAAS,IAAI;IACd,CAAC;AACF,QAAK,MAAM,WAAW,aACpB,QAAO,KAAK;IAAE;IAAS,UAAU;IAAS,CAAC;;AAa/C,SAAO;GAAE,OATK,IAAI,aAChB,MACA,gBACA,UACA,eACA,IAAI,SACJ,KAAK,eACN;GAEe;GAAQ;;;CAI1B,SAAS,YAAoB,MAAwC;AACnE,SAAO,KAAK,MAAM,IAAI,WAAW,EAAE,IAAI,KAAK,EAAE;;;CAIhD,UAAU,YAAoD;EAC5D,MAAM,yBAAS,IAAI,KAAgC;EACnD,MAAM,UAAU,KAAK,MAAM,IAAI,WAAW;AAC1C,MAAI,CAAC,QAAS,QAAO;AACrB,OAAK,MAAM,CAAC,MAAM,WAAW,QAC3B,KAAI,OAAO,OAAO,SAAS,EACzB,QAAO,IAAI,MAAM,OAAO,OAAO;AAGnC,SAAO;;;CAIT,MAAM,WAAW,YAAoB,MAA6B;EAChE,MAAM,MAAM,KAAK,OAAO,YAAY;AACpC,MAAI,CAAC,IAAK;EAEV,MAAM,kBAAkB,KAAK,MAAM,IAAI,WAAW;AAClD,MAAI,CAAC,gBAAiB;EAEtB,MAAM,WAAW,gBAAgB,IAAI,KAAK;AAC1C,MAAI,CAAC,SAAU;EAEf,MAAM,SAAS,cAAc,IAAI,OAAO;EACxC,MAAM,YAAY,QAAQ,KAAK,SAAS,IAAI,UAAU;AAEtD,MAAI;GACF,MAAM,SAAS,MAAM,KAAK,UACxB,YACA,SAAS,MAAM,UACf,WACA,QACA,IACD;AACD,OAAI,OACF,iBAAgB,IAAI,MAAM,OAAO;OAGjC,iBAAgB,OAAO,KAAK;UAExB;AAEN,mBAAgB,OAAO,KAAK;;;;CAKhC,MAAM,qBAAqB,YAAmC;AAC5D,OAAK,MAAM,OAAO,WAAW;AAC7B,OAAK,OAAO,OAAO,WAAW;;;CAIhC,gBAAsB;AACpB,OAAK,MAAM,OAAO;AAClB,OAAK,OAAO,OAAO;;;CAIrB,kBAA0B,KAA6C;EACrE,MAAM,UAAU,IAAI,2BAA2B,EAAE,GAAG;EACpD,MAAM,SAAS,IAAI,cAAc,EAAE;AACnC,SAAO,CAAC,GAAG,SAAS,GAAG,OAAO;;CAGhC,uBAA+C;EAC7C,MAAM,OAAO,KAAK,OAAO,YAAY,EAAE;EACvC,MAAM,gBAAgB,CAAC,GAAI,KAAK,iBAAiB,EAAE,CAAE;EACrD,MAAM,gBAAgB,CAAC,GAAI,KAAK,iBAAiB,EAAE,CAAE;EACrD,MAAM,UAAU,KAAK,OAAO,WAAW,EAAE;AAEzC,MAAI,QAAQ,SAAS,GAAG;AACtB,iBAAc,KAAK,GAAG,qBAAqB,QAAQ,CAAC;AACpD,iBAAc,KAAK,GAAG,qBAAqB,QAAQ,CAAC;;AAGtD,SAAO;GACL,GAAG;GACH,GAAI,cAAc,SAAS,IAAI,EAAE,eAAe,GAAG,EAAE;GACrD,GAAI,cAAc,SAAS,IAAI,EAAE,eAAe,GAAG,EAAE;GACtD;;;;;;;;;;;;;;ACpNL,IAAM,mBAAN,MAA+C;CAC7C;CACA;CAEA,YAAY,QAA4B;AACtC,OAAK,SAAS;AACd,OAAK,QAAQ,IAAI,aAAa,OAAO;;CAGvC,MAAM,cAAc,MAAuC;EACzD,MAAM,MAAM,KAAK,OAAO,YAAY;AACpC,MAAI,CAAC,IACH,OAAM,IAAI,MACR,eAAe,KAAK,0BAA0B,OAAO,KAAK,KAAK,OAAO,YAAY,CAAC,KACjF,KACD,GACF;AAEH,SAAO,KAAK,MAAM,eAAe,MAAM,IAAI;;CAG7C,MAAM,SAAS,YAAoB,MAAiD;AAGlF,QAAM,KAAK,cAAc,WAAW;AACpC,SAAO,KAAK,MAAM,SAAS,YAAY,KAAK;;CAG9C,MAAM,QAAQ,UAAkB,SAAuD;AACrF,SAAOA,QAAY,UAAU,EAC3B,UAAU,SAAS,YAAY,KAAK,OAAO,UAC5C,CAAC;;CAGJ,MAAM,WAAW,YAAoB,MAA6B;AAChE,QAAM,KAAK,MAAM,WAAW,YAAY,KAAK;;CAG/C,MAAM,qBAAqB,YAAmC;AAC5D,QAAM,KAAK,MAAM,qBAAqB,WAAW;;CAGnD,gBAAsB;AACpB,OAAK,MAAM,eAAe;;CAG5B,MAAM,SAAS,YAAkD;EAC/D,MAAM,QAAQ,aAAa,CAAC,WAAW,GAAG,OAAO,KAAK,KAAK,OAAO,YAAY;EAC9E,MAAM,UAA8B,EAAE;AAEtC,OAAK,MAAM,QAAQ,OAAO;AAExB,SAAM,KAAK,cAAc,KAAK;GAE9B,MAAM,SAAS,KAAK,MAAM,UAAU,KAAK;GACzC,MAAM,UAAU,MAAM,KAAK,OAAO,SAAS,CAAC,CAAC,KAAK,CAAC,MAAM,iBAAiB;AAExE,WAAO;KACL;KACA,UAHY,KAAK,MAAM,SAAS,MAAM,KAAK,EAG1B,YAAY;KAC7B,QAAQ;KACT;KACD;GAEF,IAAI,SAAS;GACb,IAAI,WAAW;AACf,QAAK,MAAM,SAAS,QAClB,MAAK,MAAM,SAAS,MAAM,OACxB,KAAI,MAAM,aAAa,QAAS;OAC3B;AAIT,WAAQ,KAAK;IAAE,YAAY;IAAM;IAAS;IAAQ;IAAU,CAAC;;AAG/D,SAAO;;CAGT,qBAA+B;AAC7B,SAAO,OAAO,KAAK,KAAK,OAAO,YAAY;;CAG7C,iBAAiB,MAAyC;AACxD,SAAO,KAAK,OAAO,YAAY;;CAGjC,iBAAgD;AAC9C,SAAO,EAAE,GAAG,KAAK,OAAO,aAAa;;;;AAKzC,SAAgB,mBAAmB,QAA0C;AAC3E,QAAO,IAAI,iBAAiB,OAAO"}
@@ -1 +0,0 @@
1
- {"version":3,"file":"index-CbOKbkjJ.d.mts","names":[],"sources":["../src/markdown/pipeline.ts"],"mappings":";;;KAuBY,cAAA;EACV,IAAA;EACA,QAAA,EAAU,OAAA;EACV,WAAA,EAAa,MAAA;AAAA;AAAA,iBA8HO,eAAA,CACpB,GAAA,UACA,MAAA,GAAS,cAAA,EACT,YAAA;EAAiB,OAAA;EAAiB,WAAA,EAAa,MAAA;AAAA,IAC9C,OAAA,CAAQ,cAAA"}