@eclipsa/content 0.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1 @@
1
+ {"version":3,"file":"internal-h0upzIHm.mjs","names":["decodeHtmlEntities"],"sources":["../highlight.ts","../search.ts","../internal.ts"],"sourcesContent":["import { createHighlighter, type Highlighter } from 'shiki'\nimport type { ContentHighlightOptions } from './types.ts'\n\nconst DEFAULT_THEME = 'github-dark'\nconst CODE_BLOCK_RE = /<pre\\b[^>]*>\\s*<code\\b([^>]*)>([\\s\\S]*?)<\\/code>\\s*<\\/pre>/giu\nconst CLASS_ATTR_RE = /\\bclass=(['\"])(.*?)\\1/iu\nconst HTML_ENTITY_RE = /&(?:#(\\d+)|#x([\\da-fA-F]+)|amp|lt|gt|quot|#39);/g\nconst highlighterCache = new Map<string, Promise<Highlighter>>()\nconst loadedLanguagesByTheme = new Map<string, Set<string>>()\n\nconst decodeHtmlEntities = (value: string) =>\n value.replace(HTML_ENTITY_RE, (entity, decimal, hex) => {\n if (decimal) {\n return String.fromCodePoint(Number(decimal))\n }\n if (hex) {\n return String.fromCodePoint(Number.parseInt(hex, 16))\n }\n switch (entity) {\n case '&amp;':\n return '&'\n case '&lt;':\n return '<'\n case '&gt;':\n return '>'\n case '&quot;':\n return '\"'\n case '&#39;':\n return \"'\"\n default:\n return entity\n }\n })\n\nconst getLanguageFromCodeAttributes = (attributes: string) => {\n const classAttr = CLASS_ATTR_RE.exec(attributes)?.[2]\n if (!classAttr) {\n return null\n }\n for (const token of classAttr.split(/\\s+/)) {\n if (token.startsWith('language-')) {\n return token.slice('language-'.length)\n }\n }\n return null\n}\n\nconst resolveTheme = (options: boolean | ContentHighlightOptions | undefined) => {\n if (!options) {\n return null\n }\n return options === true ? DEFAULT_THEME : (options.theme ?? DEFAULT_THEME)\n}\n\nconst getHighlighter = (theme: string) => {\n const cached = highlighterCache.get(theme)\n if (cached) {\n return cached\n }\n const next = createHighlighter({\n langs: [],\n themes: [theme],\n })\n highlighterCache.set(theme, next)\n loadedLanguagesByTheme.set(theme, new Set())\n return next\n}\n\nconst ensureLanguageLoaded = async (theme: string, language: string) => {\n const loadedLanguages = loadedLanguagesByTheme.get(theme) ?? new Set<string>()\n loadedLanguagesByTheme.set(theme, loadedLanguages)\n if (loadedLanguages.has(language)) {\n return\n }\n const highlighter = await getHighlighter(theme)\n await highlighter.loadLanguage(language as any)\n loadedLanguages.add(language)\n}\n\nexport const highlightHtml = async (\n html: string,\n options: boolean | ContentHighlightOptions | undefined,\n) => {\n const theme = resolveTheme(options)\n if (!theme) {\n return html\n }\n\n const highlighter = await getHighlighter(theme)\n let highlightedHtml = ''\n let lastIndex = 0\n\n for (const match of html.matchAll(CODE_BLOCK_RE)) {\n const index = match.index ?? 0\n const fullMatch = match[0]\n const codeAttributes = match[1] ?? ''\n const encodedCode = match[2] ?? ''\n const language = getLanguageFromCodeAttributes(codeAttributes)\n\n highlightedHtml += html.slice(lastIndex, index)\n lastIndex = index + fullMatch.length\n\n if (!language) {\n highlightedHtml += fullMatch\n continue\n }\n\n try {\n await ensureLanguageLoaded(theme, language)\n highlightedHtml += highlighter.codeToHtml(decodeHtmlEntities(encodedCode), {\n lang: language,\n theme,\n })\n } catch {\n highlightedHtml += fullMatch\n }\n }\n\n if (lastIndex === 0) {\n return html\n }\n\n highlightedHtml += html.slice(lastIndex)\n return highlightedHtml\n}\n","import type {\n ContentSearchDocument,\n ContentSearchField,\n ContentSearchIndex,\n ContentSearchOptions,\n ContentSearchQueryOptions,\n ContentSearchResult,\n ContentSearchPosting,\n ResolvedContentSearchOptions,\n} from './types.ts'\n\nconst DEFAULT_SEARCH_OPTIONS: ResolvedContentSearchOptions = {\n enabled: true,\n hotkey: '/',\n limit: 10,\n placeholder: 'Search docs...',\n prefix: true,\n}\n\nconst SEARCH_STOPWORDS = new Set([\n 'a',\n 'an',\n 'and',\n 'are',\n 'as',\n 'at',\n 'be',\n 'by',\n 'for',\n 'from',\n 'has',\n 'have',\n 'how',\n 'in',\n 'is',\n 'it',\n 'of',\n 'on',\n 'or',\n 'that',\n 'the',\n 'this',\n 'to',\n 'was',\n 'were',\n 'with',\n])\n\nconst SEARCH_K1 = 1.2\nconst SEARCH_B = 0.75\n\nconst isCjkChar = (char: string) =>\n /[\\u3400-\\u4dbf\\u4e00-\\u9fff\\u3040-\\u30ff\\uac00-\\ud7af]/u.test(char)\n\nconst tokenizeValue = (text: string, query: boolean) => {\n const tokens: string[] = []\n let current = ''\n\n for (const char of text) {\n if (isCjkChar(char)) {\n if (current !== '') {\n const token = current.toLowerCase()\n if (query || (token.length >= 2 && !SEARCH_STOPWORDS.has(token))) {\n tokens.push(token)\n }\n current = ''\n }\n tokens.push(char)\n continue\n }\n if (/[\\p{L}\\p{N}_]/u.test(char)) {\n current += char\n continue\n }\n if (current !== '') {\n const token = current.toLowerCase()\n if (query || (token.length >= 2 && !SEARCH_STOPWORDS.has(token))) {\n tokens.push(token)\n }\n current = ''\n }\n }\n\n if (current !== '') {\n const token = current.toLowerCase()\n if (query || (token.length >= 2 && !SEARCH_STOPWORDS.has(token))) {\n tokens.push(token)\n }\n }\n\n return tokens\n}\n\nconst tokenizeIndex = (text: string) => tokenizeValue(text, false)\n\nconst tokenizeQuery = (text: string) => tokenizeValue(text, true)\n\nconst getFieldBoost = (field: ContentSearchField) => {\n switch (field) {\n case 'title':\n return 10\n case 'heading':\n return 5\n case 'code':\n return 0.5\n case 'body':\n default:\n return 1\n }\n}\n\nconst addDocumentFieldTerms = (\n map: Map<string, { field: ContentSearchField; tf: number }>,\n field: ContentSearchField,\n text: string,\n) => {\n for (const token of tokenizeIndex(text)) {\n const existing = map.get(token)\n if (existing) {\n existing.tf += 1\n continue\n }\n map.set(token, { field, tf: 1 })\n }\n}\n\nconst getSnippet = (body: string, matches: string[], maxLength = 150) => {\n if (body === '') {\n return ''\n }\n const lowerBody = body.toLowerCase()\n let firstMatchIndex = -1\n for (const match of matches) {\n const index = lowerBody.indexOf(match.toLowerCase())\n if (index !== -1 && (firstMatchIndex === -1 || index < firstMatchIndex)) {\n firstMatchIndex = index\n }\n }\n const start = Math.max(0, firstMatchIndex - 50)\n const end = Math.min(body.length, start + maxLength)\n let snippet = body.slice(start, end).trim()\n if (start > 0) {\n snippet = `...${snippet}`\n }\n if (end < body.length) {\n snippet = `${snippet}...`\n }\n return snippet\n}\n\nexport const resolveContentSearchOptions = (\n options: boolean | ContentSearchOptions | undefined,\n): ResolvedContentSearchOptions => {\n if (options === false) {\n return {\n ...DEFAULT_SEARCH_OPTIONS,\n enabled: false,\n }\n }\n const normalized = typeof options === 'object' ? options : {}\n return {\n enabled: normalized.enabled ?? true,\n hotkey: normalized.hotkey ?? DEFAULT_SEARCH_OPTIONS.hotkey,\n limit: normalized.limit ?? DEFAULT_SEARCH_OPTIONS.limit,\n placeholder: normalized.placeholder ?? DEFAULT_SEARCH_OPTIONS.placeholder,\n prefix: normalized.prefix ?? DEFAULT_SEARCH_OPTIONS.prefix,\n }\n}\n\nexport const buildContentSearchIndex = (\n documents: ContentSearchDocument[],\n options: ResolvedContentSearchOptions,\n): ContentSearchIndex => {\n const index: Record<string, ContentSearchPosting[]> = {}\n const df: Record<string, number> = {}\n let totalDocumentLength = 0\n\n documents.forEach((document, docIdx) => {\n const docTerms = new Map<string, { field: ContentSearchField; tf: number }>()\n\n addDocumentFieldTerms(docTerms, 'title', document.title)\n for (const heading of document.headings) {\n addDocumentFieldTerms(docTerms, 'heading', heading)\n }\n addDocumentFieldTerms(docTerms, 'body', document.body)\n for (const code of document.code) {\n addDocumentFieldTerms(docTerms, 'code', code)\n }\n\n totalDocumentLength += tokenizeIndex(document.body).length\n\n for (const [term, posting] of docTerms) {\n df[term] = (df[term] ?? 0) + 1\n const postings = index[term] ?? []\n postings.push({\n docIdx,\n field: posting.field,\n tf: posting.tf,\n })\n index[term] = postings\n }\n })\n\n return {\n avgDl: documents.length === 0 ? 0 : totalDocumentLength / documents.length,\n df,\n docCount: documents.length,\n documents,\n index,\n options,\n }\n}\n\nexport const searchContentIndex = (\n searchIndex: ContentSearchIndex,\n query: string,\n options: ContentSearchQueryOptions = {},\n): ContentSearchResult[] => {\n if (query.trim() === '' || searchIndex.docCount === 0) {\n return []\n }\n\n const tokens = tokenizeQuery(query)\n if (tokens.length === 0) {\n return []\n }\n\n const limit = options.limit ?? searchIndex.options.limit\n const prefix = options.prefix ?? searchIndex.options.prefix\n const docScores = new Map<number, { matches: Set<string>; score: number }>()\n\n tokens.forEach((token, index) => {\n const isLastToken = index === tokens.length - 1\n const matchingTerms =\n prefix && isLastToken && token.length >= 2\n ? Object.keys(searchIndex.index).filter((term) => term.startsWith(token))\n : searchIndex.index[token]\n ? [token]\n : []\n\n for (const term of matchingTerms) {\n const postings = searchIndex.index[term] ?? []\n const df = searchIndex.df[term] ?? 1\n const idf = Math.log((searchIndex.docCount - df + 0.5) / (df + 0.5) + 1)\n\n for (const posting of postings) {\n const document = searchIndex.documents[posting.docIdx]\n if (!document) {\n continue\n }\n const docLength = Math.max(1, tokenizeIndex(document.body).length)\n const score =\n idf *\n ((posting.tf * (SEARCH_K1 + 1)) /\n (posting.tf +\n SEARCH_K1 *\n (1 - SEARCH_B + (SEARCH_B * docLength) / Math.max(1, searchIndex.avgDl)))) *\n getFieldBoost(posting.field)\n\n const current = docScores.get(posting.docIdx) ?? {\n matches: new Set<string>(),\n score: 0,\n }\n current.score += score\n current.matches.add(term)\n docScores.set(posting.docIdx, current)\n }\n }\n })\n\n return [...docScores.entries()]\n .map(([docIdx, value]) => {\n const document = searchIndex.documents[docIdx]!\n const matches = [...value.matches]\n return {\n collection: document.collection,\n id: document.id,\n matches,\n score: value.score,\n snippet: getSnippet(document.body, matches),\n title: document.title,\n url: document.url,\n } satisfies ContentSearchResult\n })\n .sort((left, right) => right.score - left.score)\n .slice(0, limit)\n}\n\nexport const generateContentSearchRuntimeModule = (\n assetPath: string,\n options: ResolvedContentSearchOptions,\n) => `let searchIndexPromise = null\nconst searchOptions = ${JSON.stringify(options)}\n\nconst loadSearchIndex = async () => {\n if (searchIndexPromise) {\n return searchIndexPromise\n }\n searchIndexPromise = fetch(${JSON.stringify(assetPath)})\n .then((response) => {\n if (!response.ok) {\n throw new Error('Failed to load search index.')\n }\n return response.json()\n })\n .catch(() => null)\n return searchIndexPromise\n}\n\nconst isCjkChar = (char) => /[\\\\u3400-\\\\u4dbf\\\\u4e00-\\\\u9fff\\\\u3040-\\\\u30ff\\\\uac00-\\\\ud7af]/u.test(char)\n\nconst tokenizeQuery = (text) => {\n const tokens = []\n let current = ''\n for (const char of text) {\n if (isCjkChar(char)) {\n if (current !== '') {\n tokens.push(current.toLowerCase())\n current = ''\n }\n tokens.push(char)\n continue\n }\n if (/[\\\\p{L}\\\\p{N}_]/u.test(char)) {\n current += char\n continue\n }\n if (current !== '') {\n tokens.push(current.toLowerCase())\n current = ''\n }\n }\n if (current !== '') {\n tokens.push(current.toLowerCase())\n }\n return tokens\n}\n\nconst getFieldBoost = (field) => {\n switch (field) {\n case 'title':\n return 10\n case 'heading':\n return 5\n case 'code':\n return 0.5\n case 'body':\n default:\n return 1\n }\n}\n\nconst getSnippet = (body, matches, maxLength = 150) => {\n if (body === '') {\n return ''\n }\n const lowerBody = body.toLowerCase()\n let firstMatchIndex = -1\n for (const match of matches) {\n const index = lowerBody.indexOf(match.toLowerCase())\n if (index !== -1 && (firstMatchIndex === -1 || index < firstMatchIndex)) {\n firstMatchIndex = index\n }\n }\n const start = Math.max(0, firstMatchIndex - 50)\n const end = Math.min(body.length, start + maxLength)\n let snippet = body.slice(start, end).trim()\n if (start > 0) {\n snippet = '...' + snippet\n }\n if (end < body.length) {\n snippet = snippet + '...'\n }\n return snippet\n}\n\nexport const search = async (\n query,\n options = {},\n) => {\n const searchIndex = await loadSearchIndex()\n if (!searchIndex || query.trim() === '') {\n return []\n }\n const tokens = tokenizeQuery(query)\n if (tokens.length === 0) {\n return []\n }\n const limit = options.limit ?? searchOptions.limit\n const prefix = options.prefix ?? searchOptions.prefix\n const docScores = new Map()\n\n tokens.forEach((token, tokenIndex) => {\n const isLastToken = tokenIndex === tokens.length - 1\n const matchingTerms =\n prefix && isLastToken && token.length >= 2\n ? Object.keys(searchIndex.index).filter((term) => term.startsWith(token))\n : searchIndex.index[token]\n ? [token]\n : []\n\n for (const term of matchingTerms) {\n const postings = searchIndex.index[term] ?? []\n const df = searchIndex.df[term] ?? 1\n const idf = Math.log((searchIndex.docCount - df + 0.5) / (df + 0.5) + 1)\n\n for (const posting of postings) {\n const document = searchIndex.documents[posting.docIdx]\n if (!document) {\n continue\n }\n const docLength = Math.max(1, document.body.split(/\\\\s+/u).filter(Boolean).length)\n const score =\n idf *\n ((posting.tf * (1.2 + 1)) /\n (posting.tf + 1.2 * (1 - 0.75 + (0.75 * docLength) / Math.max(1, searchIndex.avgDl)))) *\n getFieldBoost(posting.field)\n\n const current = docScores.get(posting.docIdx) ?? {\n matches: new Set(),\n score: 0,\n }\n current.score += score\n current.matches.add(term)\n docScores.set(posting.docIdx, current)\n }\n }\n })\n\n return [...docScores.entries()]\n .map(([docIdx, value]) => {\n const document = searchIndex.documents[docIdx]\n const matches = [...value.matches]\n return {\n collection: document.collection,\n id: document.id,\n matches,\n score: value.score,\n snippet: getSnippet(document.body, matches),\n title: document.title,\n url: document.url,\n }\n })\n .sort((left, right) => right.score - left.score)\n .slice(0, limit)\n}\n\nexport { searchOptions }\nexport default { search, searchOptions }\n`\n","import fg from 'fast-glob'\nimport * as fs from 'node:fs/promises'\nimport { createRequire } from 'node:module'\nimport path from 'node:path'\nimport YAML from 'yaml'\nimport type { StandardSchemaIssue, StandardSchemaV1 } from 'eclipsa'\nimport { highlightHtml } from './highlight.ts'\nimport { buildContentSearchIndex, resolveContentSearchOptions } from './search.ts'\nimport type { CollectionEntry, ContentFilter, ContentRuntimeModule } from './mod.ts'\nimport { CONTENT_COLLECTION_MARKER } from './types.ts'\nimport type {\n AnyCollection,\n BaseContentEntry,\n ContentMarkdownOptions,\n ContentComponentProps,\n ContentEntryReference,\n ContentHeading,\n ContentLoader,\n ContentLoaderContext,\n ContentLoaderObject,\n ContentSearchDocument,\n ContentSearchIndex,\n ResolvedContentSearchOptions,\n ContentSourceEntry,\n DefinedCollection,\n GlobLoader,\n RenderedContent,\n ResolvedContentEntries,\n} from './types.ts'\n\ninterface ParsedFrontmatter {\n body: string\n data: Record<string, unknown>\n}\n\ninterface ResolvedManifest {\n collections: Map<AnyCollection, BaseContentEntry[]>\n markdownByCollectionName: Map<string, ContentMarkdownOptions | undefined>\n searchByCollectionName: Map<string, ResolvedContentSearchOptions>\n entriesByCollection: Map<AnyCollection, Map<string, BaseContentEntry>>\n}\n\ninterface CreateContentRuntimeOptions {\n collectionsModule: Record<string, unknown>\n configPath: string\n root: string\n}\n\nconst MARKDOWN_EXTENSION_RE = /\\.md$/i\nconst require = createRequire(import.meta.url)\nlet markdownTransform: typeof import('@ox-content/napi').transform | null = null\n\nexport class ContentCollectionError extends Error {\n constructor(message: string) {\n super(message)\n this.name = 'ContentCollectionError'\n }\n}\n\nconst normalizeSlashes = (value: string) => value.replaceAll('\\\\', '/')\n\nconst formatIssuePath = (pathValue: StandardSchemaIssue['path']) => {\n if (!pathValue || pathValue.length === 0) {\n return ''\n }\n return pathValue\n .map((segment) =>\n typeof segment === 'object' && segment !== null && 'key' in segment\n ? String(segment.key)\n : String(segment),\n )\n .join('.')\n}\n\nconst createSchemaError = (\n collection: string,\n filePath: string,\n issues: readonly StandardSchemaIssue[],\n) => {\n const detail = issues\n .map((issue) => {\n const issuePath = formatIssuePath(issue.path)\n return issuePath === '' ? issue.message : `${issuePath}: ${issue.message}`\n })\n .join('; ')\n return new ContentCollectionError(\n `Invalid frontmatter in collection \"${collection}\" for ${filePath}: ${detail}`,\n )\n}\n\nconst parseFrontmatter = (source: string): ParsedFrontmatter => {\n if (!source.startsWith('---')) {\n return {\n body: source,\n data: {},\n }\n }\n const match = /^---\\r?\\n([\\s\\S]*?)\\r?\\n---\\r?\\n?/u.exec(source)\n if (!match) {\n return {\n body: source,\n data: {},\n }\n }\n const raw = YAML.parse(match[1] ?? '')\n if (raw == null) {\n return {\n body: source.slice(match[0].length),\n data: {},\n }\n }\n if (typeof raw !== 'object' || Array.isArray(raw)) {\n throw new ContentCollectionError('Markdown frontmatter must resolve to an object.')\n }\n return {\n body: source.slice(match[0].length),\n data: { ...(raw as Record<string, unknown>) },\n }\n}\n\nconst normalizeIdSegment = (segment: string) =>\n segment\n .trim()\n .replaceAll(/\\s+/g, '-')\n .replaceAll(/[^a-zA-Z0-9/_-]+/g, '-')\n .replaceAll(/-+/g, '-')\n .replaceAll(/^[-/]+|[-/]+$/g, '')\n\nconst normalizeEntryId = (value: string) =>\n normalizeSlashes(value).split('/').map(normalizeIdSegment).filter(Boolean).join('/')\n\nconst toEntryIdFromRelativePath = (relativePath: string) => {\n const withoutExt = normalizeSlashes(relativePath).replace(MARKDOWN_EXTENSION_RE, '')\n const segments = withoutExt.split('/').filter(Boolean)\n if (segments[segments.length - 1] === 'index' && segments.length > 1) {\n segments.pop()\n }\n return normalizeEntryId(segments.join('/')) || 'index'\n}\n\nconst validateData = async (\n collection: string,\n schema: StandardSchemaV1<any, any> | undefined,\n filePath: string,\n data: Record<string, unknown>,\n) => {\n if (!schema) {\n return data\n }\n const result = await schema['~standard'].validate(data)\n if ('issues' in result && result.issues !== undefined) {\n throw createSchemaError(collection, filePath, result.issues)\n }\n return result.value as Record<string, unknown>\n}\n\nconst resolveGlobLoaderEntries = async (\n collection: string,\n loader: GlobLoader,\n context: ContentLoaderContext,\n): Promise<ContentSourceEntry[]> => {\n const baseDir = path.resolve(path.dirname(context.configPath), loader.base)\n const matches = await fg(loader.pattern, {\n absolute: true,\n cwd: baseDir,\n onlyFiles: true,\n })\n return Promise.all(\n matches.map(async (filePath) => {\n const source = await fs.readFile(filePath, 'utf8')\n const relativePath = normalizeSlashes(path.relative(baseDir, filePath))\n const parsed = parseFrontmatter(source)\n const slug = typeof parsed.data.slug === 'string' ? parsed.data.slug : undefined\n delete parsed.data.slug\n return {\n body: parsed.body,\n data: parsed.data,\n filePath,\n id: slug ? normalizeEntryId(slug) : toEntryIdFromRelativePath(relativePath),\n } satisfies ContentSourceEntry\n }),\n )\n}\n\nconst resolveLoaderEntries = async (\n collection: string,\n loader: ContentLoader,\n context: ContentLoaderContext,\n) => {\n if ((loader as GlobLoader).kind === 'glob') {\n return resolveGlobLoaderEntries(collection, loader as GlobLoader, context)\n }\n return [...(await (loader as ContentLoaderObject).load(context))]\n}\n\nconst normalizeResolvedEntry = async (\n collection: string,\n schema: StandardSchemaV1<any, any> | undefined,\n entry: ContentSourceEntry,\n index: number,\n) => {\n const parsed =\n entry.data === undefined\n ? parseFrontmatter(entry.body)\n : {\n body: entry.body,\n data: { ...entry.data },\n }\n const filePath = entry.filePath ?? `${collection}:${entry.id ?? index}`\n const slug = typeof parsed.data.slug === 'string' ? parsed.data.slug : undefined\n delete parsed.data.slug\n const id =\n normalizeEntryId(entry.id ?? slug ?? `${collection}-${index}`) || `${collection}-${index}`\n return {\n body: parsed.body,\n collection,\n data: await validateData(collection, schema, filePath, parsed.data),\n filePath,\n id,\n } satisfies BaseContentEntry\n}\n\nconst isDefinedCollection = (value: unknown): value is DefinedCollection<any> =>\n typeof value === 'object' &&\n value !== null &&\n CONTENT_COLLECTION_MARKER in value &&\n (value as Record<string, unknown>)[CONTENT_COLLECTION_MARKER] === true\n\nexport const resolveCollections = async ({\n collectionsModule,\n configPath,\n root,\n}: CreateContentRuntimeOptions): Promise<ResolvedManifest> => {\n const byCollection = new Map<AnyCollection, BaseContentEntry[]>()\n const entriesByCollection = new Map<AnyCollection, Map<string, BaseContentEntry>>()\n const markdownByCollectionName = new Map<string, ContentMarkdownOptions | undefined>()\n const searchByCollectionName = new Map<string, ResolvedContentSearchOptions>()\n const definedCollections = Object.entries(collectionsModule).filter(\n (entry): entry is [string, DefinedCollection<any>] => isDefinedCollection(entry[1]),\n )\n for (const [collectionName, definition] of definedCollections) {\n const context: ContentLoaderContext = {\n collection: collectionName,\n configPath,\n root,\n }\n const rawEntries = await resolveLoaderEntries(collectionName, definition.loader, context)\n const resolvedEntries = await Promise.all(\n rawEntries.map((entry, index) =>\n normalizeResolvedEntry(collectionName, definition.schema, entry, index),\n ),\n )\n resolvedEntries.sort((left, right) => left.id.localeCompare(right.id))\n const entriesById = new Map<string, BaseContentEntry>()\n for (const entry of resolvedEntries) {\n if (entriesById.has(entry.id)) {\n throw new ContentCollectionError(\n `Duplicate content id \"${entry.id}\" in collection \"${collectionName}\".`,\n )\n }\n entriesById.set(entry.id, entry)\n }\n byCollection.set(definition, resolvedEntries)\n entriesByCollection.set(definition, entriesById)\n markdownByCollectionName.set(collectionName, definition.markdown)\n searchByCollectionName.set(collectionName, resolveContentSearchOptions(definition.search))\n }\n return {\n collections: byCollection,\n markdownByCollectionName,\n searchByCollectionName,\n entriesByCollection,\n }\n}\n\nconst createContentRenderer =\n (html: string) =>\n (props: Omit<ContentComponentProps, 'html'> = {}) => ({\n isStatic: false,\n props: {\n ...props,\n dangerouslySetInnerHTML: html,\n },\n type: props.as ?? 'article',\n })\n\nconst resolveOxContentNapiPath = () => {\n const resolvePaths = [\n process.cwd(),\n path.join(process.cwd(), 'node_modules', '@eclipsa', 'content'),\n ]\n try {\n return require.resolve('@ox-content/napi', { paths: resolvePaths })\n } catch {\n return '@ox-content/napi'\n }\n}\n\nconst loadMarkdownTransform = async () => {\n markdownTransform ??= (require(resolveOxContentNapiPath()) as typeof import('@ox-content/napi'))\n .transform\n return markdownTransform\n}\n\nconst decodeHtmlEntities = (value: string) =>\n value\n .replaceAll('&amp;', '&')\n .replaceAll('&lt;', '<')\n .replaceAll('&gt;', '>')\n .replaceAll('&quot;', '\"')\n .replaceAll('&#39;', \"'\")\n .replaceAll('&nbsp;', ' ')\n\nconst stripHtml = (html: string) =>\n decodeHtmlEntities(\n html\n .replaceAll(/<style[\\s\\S]*?<\\/style>/gu, ' ')\n .replaceAll(/<script[\\s\\S]*?<\\/script>/gu, ' ')\n .replaceAll(/<[^>]+>/gu, ' ')\n .replaceAll(/\\s+/gu, ' ')\n .trim(),\n )\n\nconst extractMarkdownCode = (source: string) => {\n const codeBlocks = new Set<string>()\n for (const match of source.matchAll(/```[\\t ]*[^\\n\\r]*\\r?\\n([\\s\\S]*?)```/gu)) {\n const code = match[1]?.trim()\n if (code) {\n codeBlocks.add(code)\n }\n }\n for (const match of source.matchAll(/`([^`\\n\\r]+)`/gu)) {\n const code = match[1]?.trim()\n if (code) {\n codeBlocks.add(code)\n }\n }\n return [...codeBlocks]\n}\n\nconst resolveSearchUrl = (base: string, entry: BaseContentEntry) => {\n const normalizedBase = base === '' ? '/' : base.endsWith('/') ? base : `${base}/`\n return `${normalizedBase}${entry.collection}/${entry.id}`.replaceAll(/\\/+/g, '/')\n}\n\nconst transformMarkdownEntry = async (entry: BaseContentEntry) => {\n const transform = await loadMarkdownTransform()\n const result = transform(entry.body, {\n autolinks: true,\n footnotes: true,\n gfm: true,\n sourcePath: entry.filePath,\n strikethrough: true,\n tables: true,\n taskLists: true,\n tocMaxDepth: 6,\n })\n if (result.errors.length > 0) {\n throw new ContentCollectionError(\n `Failed to render markdown for ${entry.filePath}: ${result.errors.join('; ')}`,\n )\n }\n return result\n}\n\nconst createSearchDocument = async (\n entry: BaseContentEntry,\n base: string,\n): Promise<ContentSearchDocument> => {\n const result = await transformMarkdownEntry(entry)\n const headings = result.toc.map((heading) => heading.text)\n const title =\n typeof entry.data.title === 'string'\n ? entry.data.title\n : (result.toc.find((heading) => heading.depth === 1)?.text ?? entry.id)\n\n return {\n body: stripHtml(result.html),\n code: extractMarkdownCode(entry.body),\n collection: entry.collection,\n headings,\n id: entry.id,\n title,\n url: resolveSearchUrl(base, entry),\n }\n}\n\nconst renderMarkdown = async (\n entry: BaseContentEntry,\n markdownOptions: ContentMarkdownOptions | undefined,\n): Promise<RenderedContent> => {\n const result = await transformMarkdownEntry(entry)\n const headings = result.toc.map(\n (heading) =>\n ({\n depth: heading.depth,\n slug: heading.slug,\n text: heading.text,\n }) satisfies ContentHeading,\n )\n const html = await highlightHtml(result.html, markdownOptions?.highlight)\n return {\n Content: createContentRenderer(html),\n headings,\n html,\n }\n}\n\nexport const createContentSearch = async ({\n collectionsModule,\n configPath,\n root,\n base,\n}: CreateContentRuntimeOptions & {\n base: string\n}): Promise<{\n index: ContentSearchIndex\n options: ResolvedContentSearchOptions\n}> => {\n const manifest = await resolveCollections({\n collectionsModule,\n configPath,\n root,\n })\n const documents: ContentSearchDocument[] = []\n let resolvedOptions = resolveContentSearchOptions(false)\n\n for (const entries of manifest.collections.values()) {\n const collectionName = entries[0]?.collection\n if (!collectionName) {\n continue\n }\n const searchOptions = manifest.searchByCollectionName.get(collectionName)\n if (!searchOptions?.enabled) {\n continue\n }\n if (!resolvedOptions.enabled) {\n resolvedOptions = searchOptions\n }\n for (const entry of entries) {\n documents.push(await createSearchDocument(entry, base))\n }\n }\n\n return {\n index: buildContentSearchIndex(documents, resolvedOptions),\n options: resolvedOptions,\n }\n}\n\nexport const createContentRuntime = ({\n collectionsModule,\n configPath,\n root,\n}: CreateContentRuntimeOptions): ContentRuntimeModule => {\n let manifestPromise: Promise<ResolvedManifest> | null = null\n const renderCache = new Map<string, RenderedContent>()\n const getManifest = () => {\n manifestPromise ??= resolveCollections({\n collectionsModule,\n configPath,\n root,\n })\n return manifestPromise\n }\n return {\n async getCollection<Collection extends AnyCollection>(\n collection: Collection,\n filter?: ContentFilter<Collection>,\n ) {\n const manifest = await getManifest()\n const entries = (manifest.collections.get(collection) ?? []) as CollectionEntry<Collection>[]\n if (!filter) {\n return [...entries]\n }\n const filtered: CollectionEntry<Collection>[] = []\n for (const entry of entries) {\n if (await filter(entry)) {\n filtered.push(entry)\n }\n }\n return filtered\n },\n async getEntries<Entries extends readonly ContentEntryReference<any>[]>(entries: Entries) {\n const manifest = await getManifest()\n return entries.map((entry) => {\n const collectionEntries = manifest.entriesByCollection.get(entry.collection)\n return collectionEntries?.get(entry.id)\n }) as ResolvedContentEntries<Entries>\n },\n async getEntry<Collection extends AnyCollection>(collection: Collection, id: string) {\n const manifest = await getManifest()\n return manifest.entriesByCollection.get(collection)?.get(id) as\n | CollectionEntry<Collection>\n | undefined\n },\n async render<Collection extends AnyCollection>(entry: CollectionEntry<Collection>) {\n const key = `${entry.collection}:${entry.id}`\n const cached = renderCache.get(key)\n if (cached) {\n return cached\n }\n const manifest = await getManifest()\n const rendered = await renderMarkdown(\n entry,\n manifest.markdownByCollectionName.get(entry.collection),\n )\n renderCache.set(key, rendered)\n return rendered\n },\n }\n}\n\nexport { parseFrontmatter, toEntryIdFromRelativePath }\n"],"mappings":";;;;;;;;AAGA,MAAM,gBAAgB;AACtB,MAAM,gBAAgB;AACtB,MAAM,gBAAgB;AACtB,MAAM,iBAAiB;AACvB,MAAM,mCAAmB,IAAI,KAAmC;AAChE,MAAM,yCAAyB,IAAI,KAA0B;AAE7D,MAAMA,wBAAsB,UAC1B,MAAM,QAAQ,iBAAiB,QAAQ,SAAS,QAAQ;AACtD,KAAI,QACF,QAAO,OAAO,cAAc,OAAO,QAAQ,CAAC;AAE9C,KAAI,IACF,QAAO,OAAO,cAAc,OAAO,SAAS,KAAK,GAAG,CAAC;AAEvD,SAAQ,QAAR;EACE,KAAK,QACH,QAAO;EACT,KAAK,OACH,QAAO;EACT,KAAK,OACH,QAAO;EACT,KAAK,SACH,QAAO;EACT,KAAK,QACH,QAAO;EACT,QACE,QAAO;;EAEX;AAEJ,MAAM,iCAAiC,eAAuB;CAC5D,MAAM,YAAY,cAAc,KAAK,WAAW,GAAG;AACnD,KAAI,CAAC,UACH,QAAO;AAET,MAAK,MAAM,SAAS,UAAU,MAAM,MAAM,CACxC,KAAI,MAAM,WAAW,YAAY,CAC/B,QAAO,MAAM,MAAM,EAAmB;AAG1C,QAAO;;AAGT,MAAM,gBAAgB,YAA2D;AAC/E,KAAI,CAAC,QACH,QAAO;AAET,QAAO,YAAY,OAAO,gBAAiB,QAAQ,SAAS;;AAG9D,MAAM,kBAAkB,UAAkB;CACxC,MAAM,SAAS,iBAAiB,IAAI,MAAM;AAC1C,KAAI,OACF,QAAO;CAET,MAAM,OAAO,kBAAkB;EAC7B,OAAO,EAAE;EACT,QAAQ,CAAC,MAAM;EAChB,CAAC;AACF,kBAAiB,IAAI,OAAO,KAAK;AACjC,wBAAuB,IAAI,uBAAO,IAAI,KAAK,CAAC;AAC5C,QAAO;;AAGT,MAAM,uBAAuB,OAAO,OAAe,aAAqB;CACtE,MAAM,kBAAkB,uBAAuB,IAAI,MAAM,oBAAI,IAAI,KAAa;AAC9E,wBAAuB,IAAI,OAAO,gBAAgB;AAClD,KAAI,gBAAgB,IAAI,SAAS,CAC/B;AAGF,QADoB,MAAM,eAAe,MAAM,EAC7B,aAAa,SAAgB;AAC/C,iBAAgB,IAAI,SAAS;;AAG/B,MAAa,gBAAgB,OAC3B,MACA,YACG;CACH,MAAM,QAAQ,aAAa,QAAQ;AACnC,KAAI,CAAC,MACH,QAAO;CAGT,MAAM,cAAc,MAAM,eAAe,MAAM;CAC/C,IAAI,kBAAkB;CACtB,IAAI,YAAY;AAEhB,MAAK,MAAM,SAAS,KAAK,SAAS,cAAc,EAAE;EAChD,MAAM,QAAQ,MAAM,SAAS;EAC7B,MAAM,YAAY,MAAM;EACxB,MAAM,iBAAiB,MAAM,MAAM;EACnC,MAAM,cAAc,MAAM,MAAM;EAChC,MAAM,WAAW,8BAA8B,eAAe;AAE9D,qBAAmB,KAAK,MAAM,WAAW,MAAM;AAC/C,cAAY,QAAQ,UAAU;AAE9B,MAAI,CAAC,UAAU;AACb,sBAAmB;AACnB;;AAGF,MAAI;AACF,SAAM,qBAAqB,OAAO,SAAS;AAC3C,sBAAmB,YAAY,WAAWA,qBAAmB,YAAY,EAAE;IACzE,MAAM;IACN;IACD,CAAC;UACI;AACN,sBAAmB;;;AAIvB,KAAI,cAAc,EAChB,QAAO;AAGT,oBAAmB,KAAK,MAAM,UAAU;AACxC,QAAO;;;;AChHT,MAAM,yBAAuD;CAC3D,SAAS;CACT,QAAQ;CACR,OAAO;CACP,aAAa;CACb,QAAQ;CACT;AAED,MAAM,mBAAmB,IAAI,IAAI;CAC/B;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;AAKF,MAAM,aAAa,SACjB,0DAA0D,KAAK,KAAK;AAEtE,MAAM,iBAAiB,MAAc,UAAmB;CACtD,MAAM,SAAmB,EAAE;CAC3B,IAAI,UAAU;AAEd,MAAK,MAAM,QAAQ,MAAM;AACvB,MAAI,UAAU,KAAK,EAAE;AACnB,OAAI,YAAY,IAAI;IAClB,MAAM,QAAQ,QAAQ,aAAa;AACnC,QAAI,SAAU,MAAM,UAAU,KAAK,CAAC,iBAAiB,IAAI,MAAM,CAC7D,QAAO,KAAK,MAAM;AAEpB,cAAU;;AAEZ,UAAO,KAAK,KAAK;AACjB;;AAEF,MAAI,iBAAiB,KAAK,KAAK,EAAE;AAC/B,cAAW;AACX;;AAEF,MAAI,YAAY,IAAI;GAClB,MAAM,QAAQ,QAAQ,aAAa;AACnC,OAAI,SAAU,MAAM,UAAU,KAAK,CAAC,iBAAiB,IAAI,MAAM,CAC7D,QAAO,KAAK,MAAM;AAEpB,aAAU;;;AAId,KAAI,YAAY,IAAI;EAClB,MAAM,QAAQ,QAAQ,aAAa;AACnC,MAAI,SAAU,MAAM,UAAU,KAAK,CAAC,iBAAiB,IAAI,MAAM,CAC7D,QAAO,KAAK,MAAM;;AAItB,QAAO;;AAGT,MAAM,iBAAiB,SAAiB,cAAc,MAAM,MAAM;AAkBlE,MAAM,yBACJ,KACA,OACA,SACG;AACH,MAAK,MAAM,SAAS,cAAc,KAAK,EAAE;EACvC,MAAM,WAAW,IAAI,IAAI,MAAM;AAC/B,MAAI,UAAU;AACZ,YAAS,MAAM;AACf;;AAEF,MAAI,IAAI,OAAO;GAAE;GAAO,IAAI;GAAG,CAAC;;;AA4BpC,MAAa,+BACX,YACiC;AACjC,KAAI,YAAY,MACd,QAAO;EACL,GAAG;EACH,SAAS;EACV;CAEH,MAAM,aAAa,OAAO,YAAY,WAAW,UAAU,EAAE;AAC7D,QAAO;EACL,SAAS,WAAW,WAAW;EAC/B,QAAQ,WAAW,UAAU,uBAAuB;EACpD,OAAO,WAAW,SAAS,uBAAuB;EAClD,aAAa,WAAW,eAAe,uBAAuB;EAC9D,QAAQ,WAAW,UAAU,uBAAuB;EACrD;;AAGH,MAAa,2BACX,WACA,YACuB;CACvB,MAAM,QAAgD,EAAE;CACxD,MAAM,KAA6B,EAAE;CACrC,IAAI,sBAAsB;AAE1B,WAAU,SAAS,UAAU,WAAW;EACtC,MAAM,2BAAW,IAAI,KAAwD;AAE7E,wBAAsB,UAAU,SAAS,SAAS,MAAM;AACxD,OAAK,MAAM,WAAW,SAAS,SAC7B,uBAAsB,UAAU,WAAW,QAAQ;AAErD,wBAAsB,UAAU,QAAQ,SAAS,KAAK;AACtD,OAAK,MAAM,QAAQ,SAAS,KAC1B,uBAAsB,UAAU,QAAQ,KAAK;AAG/C,yBAAuB,cAAc,SAAS,KAAK,CAAC;AAEpD,OAAK,MAAM,CAAC,MAAM,YAAY,UAAU;AACtC,MAAG,SAAS,GAAG,SAAS,KAAK;GAC7B,MAAM,WAAW,MAAM,SAAS,EAAE;AAClC,YAAS,KAAK;IACZ;IACA,OAAO,QAAQ;IACf,IAAI,QAAQ;IACb,CAAC;AACF,SAAM,QAAQ;;GAEhB;AAEF,QAAO;EACL,OAAO,UAAU,WAAW,IAAI,IAAI,sBAAsB,UAAU;EACpE;EACA,UAAU,UAAU;EACpB;EACA;EACA;EACD;;AA8EH,MAAa,sCACX,WACA,YACG;wBACmB,KAAK,UAAU,QAAQ,CAAC;;;;;;+BAMjB,KAAK,UAAU,UAAU,CAAC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC1PzD,MAAM,wBAAwB;AAC9B,MAAM,UAAU,cAAc,OAAO,KAAK,IAAI;AAC9C,IAAI,oBAAwE;AAE5E,IAAa,yBAAb,cAA4C,MAAM;CAChD,YAAY,SAAiB;AAC3B,QAAM,QAAQ;AACd,OAAK,OAAO;;;AAIhB,MAAM,oBAAoB,UAAkB,MAAM,WAAW,MAAM,IAAI;AAEvE,MAAM,mBAAmB,cAA2C;AAClE,KAAI,CAAC,aAAa,UAAU,WAAW,EACrC,QAAO;AAET,QAAO,UACJ,KAAK,YACJ,OAAO,YAAY,YAAY,YAAY,QAAQ,SAAS,UACxD,OAAO,QAAQ,IAAI,GACnB,OAAO,QAAQ,CACpB,CACA,KAAK,IAAI;;AAGd,MAAM,qBACJ,YACA,UACA,WACG;AAOH,QAAO,IAAI,uBACT,sCAAsC,WAAW,QAAQ,SAAS,IAPrD,OACZ,KAAK,UAAU;EACd,MAAM,YAAY,gBAAgB,MAAM,KAAK;AAC7C,SAAO,cAAc,KAAK,MAAM,UAAU,GAAG,UAAU,IAAI,MAAM;GACjE,CACD,KAAK,KAAK,GAGZ;;AAGH,MAAM,oBAAoB,WAAsC;AAC9D,KAAI,CAAC,OAAO,WAAW,MAAM,CAC3B,QAAO;EACL,MAAM;EACN,MAAM,EAAE;EACT;CAEH,MAAM,QAAQ,qCAAqC,KAAK,OAAO;AAC/D,KAAI,CAAC,MACH,QAAO;EACL,MAAM;EACN,MAAM,EAAE;EACT;CAEH,MAAM,MAAM,KAAK,MAAM,MAAM,MAAM,GAAG;AACtC,KAAI,OAAO,KACT,QAAO;EACL,MAAM,OAAO,MAAM,MAAM,GAAG,OAAO;EACnC,MAAM,EAAE;EACT;AAEH,KAAI,OAAO,QAAQ,YAAY,MAAM,QAAQ,IAAI,CAC/C,OAAM,IAAI,uBAAuB,kDAAkD;AAErF,QAAO;EACL,MAAM,OAAO,MAAM,MAAM,GAAG,OAAO;EACnC,MAAM,EAAE,GAAI,KAAiC;EAC9C;;AAGH,MAAM,sBAAsB,YAC1B,QACG,MAAM,CACN,WAAW,QAAQ,IAAI,CACvB,WAAW,qBAAqB,IAAI,CACpC,WAAW,OAAO,IAAI,CACtB,WAAW,kBAAkB,GAAG;AAErC,MAAM,oBAAoB,UACxB,iBAAiB,MAAM,CAAC,MAAM,IAAI,CAAC,IAAI,mBAAmB,CAAC,OAAO,QAAQ,CAAC,KAAK,IAAI;AAEtF,MAAM,6BAA6B,iBAAyB;CAE1D,MAAM,WADa,iBAAiB,aAAa,CAAC,QAAQ,uBAAuB,GAAG,CACxD,MAAM,IAAI,CAAC,OAAO,QAAQ;AACtD,KAAI,SAAS,SAAS,SAAS,OAAO,WAAW,SAAS,SAAS,EACjE,UAAS,KAAK;AAEhB,QAAO,iBAAiB,SAAS,KAAK,IAAI,CAAC,IAAI;;AAGjD,MAAM,eAAe,OACnB,YACA,QACA,UACA,SACG;AACH,KAAI,CAAC,OACH,QAAO;CAET,MAAM,SAAS,MAAM,OAAO,aAAa,SAAS,KAAK;AACvD,KAAI,YAAY,UAAU,OAAO,WAAW,KAAA,EAC1C,OAAM,kBAAkB,YAAY,UAAU,OAAO,OAAO;AAE9D,QAAO,OAAO;;AAGhB,MAAM,2BAA2B,OAC/B,YACA,QACA,YACkC;CAClC,MAAM,UAAU,KAAK,QAAQ,KAAK,QAAQ,QAAQ,WAAW,EAAE,OAAO,KAAK;CAC3E,MAAM,UAAU,MAAM,GAAG,OAAO,SAAS;EACvC,UAAU;EACV,KAAK;EACL,WAAW;EACZ,CAAC;AACF,QAAO,QAAQ,IACb,QAAQ,IAAI,OAAO,aAAa;EAC9B,MAAM,SAAS,MAAM,GAAG,SAAS,UAAU,OAAO;EAClD,MAAM,eAAe,iBAAiB,KAAK,SAAS,SAAS,SAAS,CAAC;EACvE,MAAM,SAAS,iBAAiB,OAAO;EACvC,MAAM,OAAO,OAAO,OAAO,KAAK,SAAS,WAAW,OAAO,KAAK,OAAO,KAAA;AACvE,SAAO,OAAO,KAAK;AACnB,SAAO;GACL,MAAM,OAAO;GACb,MAAM,OAAO;GACb;GACA,IAAI,OAAO,iBAAiB,KAAK,GAAG,0BAA0B,aAAa;GAC5E;GACD,CACH;;AAGH,MAAM,uBAAuB,OAC3B,YACA,QACA,YACG;AACH,KAAK,OAAsB,SAAS,OAClC,QAAO,yBAAyB,YAAY,QAAsB,QAAQ;AAE5E,QAAO,CAAC,GAAI,MAAO,OAA+B,KAAK,QAAQ,CAAE;;AAGnE,MAAM,yBAAyB,OAC7B,YACA,QACA,OACA,UACG;CACH,MAAM,SACJ,MAAM,SAAS,KAAA,IACX,iBAAiB,MAAM,KAAK,GAC5B;EACE,MAAM,MAAM;EACZ,MAAM,EAAE,GAAG,MAAM,MAAM;EACxB;CACP,MAAM,WAAW,MAAM,YAAY,GAAG,WAAW,GAAG,MAAM,MAAM;CAChE,MAAM,OAAO,OAAO,OAAO,KAAK,SAAS,WAAW,OAAO,KAAK,OAAO,KAAA;AACvE,QAAO,OAAO,KAAK;CACnB,MAAM,KACJ,iBAAiB,MAAM,MAAM,QAAQ,GAAG,WAAW,GAAG,QAAQ,IAAI,GAAG,WAAW,GAAG;AACrF,QAAO;EACL,MAAM,OAAO;EACb;EACA,MAAM,MAAM,aAAa,YAAY,QAAQ,UAAU,OAAO,KAAK;EACnE;EACA;EACD;;AAGH,MAAM,uBAAuB,UAC3B,OAAO,UAAU,YACjB,UAAU,QAAA,oCACmB,SAC5B,MAAA,sCAAiE;AAEpE,MAAa,qBAAqB,OAAO,EACvC,mBACA,YACA,WAC4D;CAC5D,MAAM,+BAAe,IAAI,KAAwC;CACjE,MAAM,sCAAsB,IAAI,KAAmD;CACnF,MAAM,2CAA2B,IAAI,KAAiD;CACtF,MAAM,yCAAyB,IAAI,KAA2C;CAC9E,MAAM,qBAAqB,OAAO,QAAQ,kBAAkB,CAAC,QAC1D,UAAqD,oBAAoB,MAAM,GAAG,CACpF;AACD,MAAK,MAAM,CAAC,gBAAgB,eAAe,oBAAoB;EAC7D,MAAM,UAAgC;GACpC,YAAY;GACZ;GACA;GACD;EACD,MAAM,aAAa,MAAM,qBAAqB,gBAAgB,WAAW,QAAQ,QAAQ;EACzF,MAAM,kBAAkB,MAAM,QAAQ,IACpC,WAAW,KAAK,OAAO,UACrB,uBAAuB,gBAAgB,WAAW,QAAQ,OAAO,MAAM,CACxE,CACF;AACD,kBAAgB,MAAM,MAAM,UAAU,KAAK,GAAG,cAAc,MAAM,GAAG,CAAC;EACtE,MAAM,8BAAc,IAAI,KAA+B;AACvD,OAAK,MAAM,SAAS,iBAAiB;AACnC,OAAI,YAAY,IAAI,MAAM,GAAG,CAC3B,OAAM,IAAI,uBACR,yBAAyB,MAAM,GAAG,mBAAmB,eAAe,IACrE;AAEH,eAAY,IAAI,MAAM,IAAI,MAAM;;AAElC,eAAa,IAAI,YAAY,gBAAgB;AAC7C,sBAAoB,IAAI,YAAY,YAAY;AAChD,2BAAyB,IAAI,gBAAgB,WAAW,SAAS;AACjE,yBAAuB,IAAI,gBAAgB,4BAA4B,WAAW,OAAO,CAAC;;AAE5F,QAAO;EACL,aAAa;EACb;EACA;EACA;EACD;;AAGH,MAAM,yBACH,UACA,QAA6C,EAAE,MAAM;CACpD,UAAU;CACV,OAAO;EACL,GAAG;EACH,yBAAyB;EAC1B;CACD,MAAM,MAAM,MAAM;CACnB;AAEH,MAAM,iCAAiC;CACrC,MAAM,eAAe,CACnB,QAAQ,KAAK,EACb,KAAK,KAAK,QAAQ,KAAK,EAAE,gBAAgB,YAAY,UAAU,CAChE;AACD,KAAI;AACF,SAAO,QAAQ,QAAQ,oBAAoB,EAAE,OAAO,cAAc,CAAC;SAC7D;AACN,SAAO;;;AAIX,MAAM,wBAAwB,YAAY;AACxC,uBAAuB,QAAQ,0BAA0B,CAAC,CACvD;AACH,QAAO;;AAGT,MAAM,sBAAsB,UAC1B,MACG,WAAW,SAAS,IAAI,CACxB,WAAW,QAAQ,IAAI,CACvB,WAAW,QAAQ,IAAI,CACvB,WAAW,UAAU,KAAI,CACzB,WAAW,SAAS,IAAI,CACxB,WAAW,UAAU,IAAI;AAE9B,MAAM,aAAa,SACjB,mBACE,KACG,WAAW,6BAA6B,IAAI,CAC5C,WAAW,+BAA+B,IAAI,CAC9C,WAAW,aAAa,IAAI,CAC5B,WAAW,SAAS,IAAI,CACxB,MAAM,CACV;AAEH,MAAM,uBAAuB,WAAmB;CAC9C,MAAM,6BAAa,IAAI,KAAa;AACpC,MAAK,MAAM,SAAS,OAAO,SAAS,wCAAwC,EAAE;EAC5E,MAAM,OAAO,MAAM,IAAI,MAAM;AAC7B,MAAI,KACF,YAAW,IAAI,KAAK;;AAGxB,MAAK,MAAM,SAAS,OAAO,SAAS,kBAAkB,EAAE;EACtD,MAAM,OAAO,MAAM,IAAI,MAAM;AAC7B,MAAI,KACF,YAAW,IAAI,KAAK;;AAGxB,QAAO,CAAC,GAAG,WAAW;;AAGxB,MAAM,oBAAoB,MAAc,UAA4B;AAElE,QAAO,GADgB,SAAS,KAAK,MAAM,KAAK,SAAS,IAAI,GAAG,OAAO,GAAG,KAAK,KACpD,MAAM,WAAW,GAAG,MAAM,KAAK,WAAW,QAAQ,IAAI;;AAGnF,MAAM,yBAAyB,OAAO,UAA4B;CAEhE,MAAM,UADY,MAAM,uBAAuB,EACtB,MAAM,MAAM;EACnC,WAAW;EACX,WAAW;EACX,KAAK;EACL,YAAY,MAAM;EAClB,eAAe;EACf,QAAQ;EACR,WAAW;EACX,aAAa;EACd,CAAC;AACF,KAAI,OAAO,OAAO,SAAS,EACzB,OAAM,IAAI,uBACR,iCAAiC,MAAM,SAAS,IAAI,OAAO,OAAO,KAAK,KAAK,GAC7E;AAEH,QAAO;;AAGT,MAAM,uBAAuB,OAC3B,OACA,SACmC;CACnC,MAAM,SAAS,MAAM,uBAAuB,MAAM;CAClD,MAAM,WAAW,OAAO,IAAI,KAAK,YAAY,QAAQ,KAAK;CAC1D,MAAM,QACJ,OAAO,MAAM,KAAK,UAAU,WACxB,MAAM,KAAK,QACV,OAAO,IAAI,MAAM,YAAY,QAAQ,UAAU,EAAE,EAAE,QAAQ,MAAM;AAExE,QAAO;EACL,MAAM,UAAU,OAAO,KAAK;EAC5B,MAAM,oBAAoB,MAAM,KAAK;EACrC,YAAY,MAAM;EAClB;EACA,IAAI,MAAM;EACV;EACA,KAAK,iBAAiB,MAAM,MAAM;EACnC;;AAGH,MAAM,iBAAiB,OACrB,OACA,oBAC6B;CAC7B,MAAM,SAAS,MAAM,uBAAuB,MAAM;CAClD,MAAM,WAAW,OAAO,IAAI,KACzB,aACE;EACC,OAAO,QAAQ;EACf,MAAM,QAAQ;EACd,MAAM,QAAQ;EACf,EACJ;CACD,MAAM,OAAO,MAAM,cAAc,OAAO,MAAM,iBAAiB,UAAU;AACzE,QAAO;EACL,SAAS,sBAAsB,KAAK;EACpC;EACA;EACD;;AAGH,MAAa,sBAAsB,OAAO,EACxC,mBACA,YACA,MACA,WAMI;CACJ,MAAM,WAAW,MAAM,mBAAmB;EACxC;EACA;EACA;EACD,CAAC;CACF,MAAM,YAAqC,EAAE;CAC7C,IAAI,kBAAkB,4BAA4B,MAAM;AAExD,MAAK,MAAM,WAAW,SAAS,YAAY,QAAQ,EAAE;EACnD,MAAM,iBAAiB,QAAQ,IAAI;AACnC,MAAI,CAAC,eACH;EAEF,MAAM,gBAAgB,SAAS,uBAAuB,IAAI,eAAe;AACzE,MAAI,CAAC,eAAe,QAClB;AAEF,MAAI,CAAC,gBAAgB,QACnB,mBAAkB;AAEpB,OAAK,MAAM,SAAS,QAClB,WAAU,KAAK,MAAM,qBAAqB,OAAO,KAAK,CAAC;;AAI3D,QAAO;EACL,OAAO,wBAAwB,WAAW,gBAAgB;EAC1D,SAAS;EACV;;AAGH,MAAa,wBAAwB,EACnC,mBACA,YACA,WACuD;CACvD,IAAI,kBAAoD;CACxD,MAAM,8BAAc,IAAI,KAA8B;CACtD,MAAM,oBAAoB;AACxB,sBAAoB,mBAAmB;GACrC;GACA;GACA;GACD,CAAC;AACF,SAAO;;AAET,QAAO;EACL,MAAM,cACJ,YACA,QACA;GAEA,MAAM,WADW,MAAM,aAAa,EACV,YAAY,IAAI,WAAW,IAAI,EAAE;AAC3D,OAAI,CAAC,OACH,QAAO,CAAC,GAAG,QAAQ;GAErB,MAAM,WAA0C,EAAE;AAClD,QAAK,MAAM,SAAS,QAClB,KAAI,MAAM,OAAO,MAAM,CACrB,UAAS,KAAK,MAAM;AAGxB,UAAO;;EAET,MAAM,WAAkE,SAAkB;GACxF,MAAM,WAAW,MAAM,aAAa;AACpC,UAAO,QAAQ,KAAK,UAAU;AAE5B,WAD0B,SAAS,oBAAoB,IAAI,MAAM,WAAW,EAClD,IAAI,MAAM,GAAG;KACvC;;EAEJ,MAAM,SAA2C,YAAwB,IAAY;AAEnF,WADiB,MAAM,aAAa,EACpB,oBAAoB,IAAI,WAAW,EAAE,IAAI,GAAG;;EAI9D,MAAM,OAAyC,OAAoC;GACjF,MAAM,MAAM,GAAG,MAAM,WAAW,GAAG,MAAM;GACzC,MAAM,SAAS,YAAY,IAAI,IAAI;AACnC,OAAI,OACF,QAAO;GAGT,MAAM,WAAW,MAAM,eACrB,QAFe,MAAM,aAAa,EAGzB,yBAAyB,IAAI,MAAM,WAAW,CACxD;AACD,eAAY,IAAI,KAAK,SAAS;AAC9B,UAAO;;EAEV"}
@@ -0,0 +1,47 @@
1
+ import { L as ResolvedContentSearchOptions, S as ContentMarkdownOptions, T as ContentSearchIndex, d as BaseContentEntry, n as ContentRuntimeModule, u as AnyCollection } from "./mod-P8gKoDsz.mjs";
2
+
3
+ //#region internal.d.ts
4
+ interface ParsedFrontmatter {
5
+ body: string;
6
+ data: Record<string, unknown>;
7
+ }
8
+ interface ResolvedManifest {
9
+ collections: Map<AnyCollection, BaseContentEntry[]>;
10
+ markdownByCollectionName: Map<string, ContentMarkdownOptions | undefined>;
11
+ searchByCollectionName: Map<string, ResolvedContentSearchOptions>;
12
+ entriesByCollection: Map<AnyCollection, Map<string, BaseContentEntry>>;
13
+ }
14
+ interface CreateContentRuntimeOptions {
15
+ collectionsModule: Record<string, unknown>;
16
+ configPath: string;
17
+ root: string;
18
+ }
19
+ declare class ContentCollectionError extends Error {
20
+ constructor(message: string);
21
+ }
22
+ declare const parseFrontmatter: (source: string) => ParsedFrontmatter;
23
+ declare const toEntryIdFromRelativePath: (relativePath: string) => string;
24
+ declare const resolveCollections: ({
25
+ collectionsModule,
26
+ configPath,
27
+ root
28
+ }: CreateContentRuntimeOptions) => Promise<ResolvedManifest>;
29
+ declare const createContentSearch: ({
30
+ collectionsModule,
31
+ configPath,
32
+ root,
33
+ base
34
+ }: CreateContentRuntimeOptions & {
35
+ base: string;
36
+ }) => Promise<{
37
+ index: ContentSearchIndex;
38
+ options: ResolvedContentSearchOptions;
39
+ }>;
40
+ declare const createContentRuntime: ({
41
+ collectionsModule,
42
+ configPath,
43
+ root
44
+ }: CreateContentRuntimeOptions) => ContentRuntimeModule;
45
+ //#endregion
46
+ export { ContentCollectionError, createContentRuntime, createContentSearch, parseFrontmatter, resolveCollections, toEntryIdFromRelativePath };
47
+ //# sourceMappingURL=internal.d.mts.map
@@ -0,0 +1,2 @@
1
+ import { a as resolveCollections, i as parseFrontmatter, n as createContentRuntime, o as toEntryIdFromRelativePath, r as createContentSearch, t as ContentCollectionError } from "./internal-h0upzIHm.mjs";
2
+ export { ContentCollectionError, createContentRuntime, createContentSearch, parseFrontmatter, resolveCollections, toEntryIdFromRelativePath };
@@ -0,0 +1,151 @@
1
+ import { InferStandardSchemaOutput, StandardSchemaV1, StandardSchemaV1 as StandardSchemaV1$1 } from "eclipsa";
2
+
3
+ //#region types.d.ts
4
+ declare const CONTENT_COLLECTION_MARKER = "__eclipsa_content_collection__";
5
+ interface ContentSourceEntry {
6
+ body: string;
7
+ data?: Record<string, unknown>;
8
+ filePath?: string;
9
+ id?: string;
10
+ }
11
+ interface ContentLoaderContext {
12
+ collection: string;
13
+ configPath: string;
14
+ root: string;
15
+ }
16
+ interface ContentLoaderObject {
17
+ load(context: ContentLoaderContext): ContentSourceEntry[] | Promise<ContentSourceEntry[]> | readonly ContentSourceEntry[];
18
+ }
19
+ interface ContentHighlightOptions {
20
+ theme?: string;
21
+ }
22
+ interface ContentMarkdownOptions {
23
+ highlight?: boolean | ContentHighlightOptions;
24
+ }
25
+ interface ContentSearchOptions {
26
+ enabled?: boolean;
27
+ hotkey?: string;
28
+ limit?: number;
29
+ placeholder?: string;
30
+ prefix?: boolean;
31
+ }
32
+ interface ResolvedContentSearchOptions {
33
+ enabled: boolean;
34
+ hotkey: string;
35
+ limit: number;
36
+ placeholder: string;
37
+ prefix: boolean;
38
+ }
39
+ type ContentSearchField = 'body' | 'code' | 'heading' | 'title';
40
+ interface ContentSearchDocument {
41
+ body: string;
42
+ code: string[];
43
+ collection: string;
44
+ headings: string[];
45
+ id: string;
46
+ title: string;
47
+ url: string;
48
+ }
49
+ interface ContentSearchPosting {
50
+ docIdx: number;
51
+ field: ContentSearchField;
52
+ tf: number;
53
+ }
54
+ interface ContentSearchIndex {
55
+ avgDl: number;
56
+ df: Record<string, number>;
57
+ docCount: number;
58
+ documents: ContentSearchDocument[];
59
+ index: Record<string, ContentSearchPosting[]>;
60
+ options: ResolvedContentSearchOptions;
61
+ }
62
+ interface ContentSearchQueryOptions {
63
+ limit?: number;
64
+ prefix?: boolean;
65
+ }
66
+ interface ContentSearchResult {
67
+ collection: string;
68
+ id: string;
69
+ matches: string[];
70
+ score: number;
71
+ snippet: string;
72
+ title: string;
73
+ url: string;
74
+ }
75
+ interface GlobLoaderOptions {
76
+ base: string;
77
+ pattern: string;
78
+ }
79
+ interface GlobLoader {
80
+ readonly base: string;
81
+ readonly kind: 'glob';
82
+ readonly pattern: string;
83
+ }
84
+ type ContentLoader = GlobLoader | ContentLoaderObject;
85
+ interface ContentCollectionDefinition<Schema extends StandardSchemaV1<any, any> | undefined> {
86
+ loader: ContentLoader;
87
+ markdown?: ContentMarkdownOptions;
88
+ search?: boolean | ContentSearchOptions;
89
+ schema?: Schema;
90
+ }
91
+ interface DefinedCollection<Schema extends StandardSchemaV1<any, any> | undefined = StandardSchemaV1<any, any> | undefined> extends ContentCollectionDefinition<Schema> {
92
+ readonly [CONTENT_COLLECTION_MARKER]: true;
93
+ }
94
+ type AnyCollection = DefinedCollection<StandardSchemaV1<any, any> | undefined>;
95
+ type InferCollectionData<Collection extends AnyCollection> = Collection extends DefinedCollection<infer Schema> ? Schema extends StandardSchemaV1<any, any> ? InferStandardSchemaOutput<Schema> : Record<string, unknown> : Record<string, unknown>;
96
+ interface BaseContentEntry<Data = Record<string, unknown>, Collection extends string = string> {
97
+ body: string;
98
+ collection: Collection;
99
+ data: Data;
100
+ filePath: string;
101
+ id: string;
102
+ }
103
+ interface ContentHeading {
104
+ depth: number;
105
+ slug: string;
106
+ text: string;
107
+ }
108
+ interface ContentComponentProps extends Record<string, unknown> {
109
+ as?: string;
110
+ html: string;
111
+ }
112
+ interface RenderedContent {
113
+ Content: (props?: Omit<ContentComponentProps, 'html'>) => any;
114
+ headings: ContentHeading[];
115
+ html: string;
116
+ }
117
+ type CollectionEntry<Collection extends AnyCollection = AnyCollection> = BaseContentEntry<InferCollectionData<Collection>>;
118
+ type ContentFilter<Collection extends AnyCollection> = (entry: CollectionEntry<Collection>) => boolean | Promise<boolean>;
119
+ interface ContentEntryReference<Collection extends AnyCollection = AnyCollection> {
120
+ collection: Collection;
121
+ id: string;
122
+ }
123
+ type ResolvedContentEntries<Entries extends readonly ContentEntryReference<any>[]> = { [Index in keyof Entries]: Entries[Index] extends ContentEntryReference<infer Collection> ? CollectionEntry<Collection> | undefined : never };
124
+ //#endregion
125
+ //#region mod.d.ts
126
+ interface ContentRuntimeModule {
127
+ getCollection<Collection extends AnyCollection>(collection: Collection, filter?: ContentFilter<Collection>): Promise<CollectionEntry<Collection>[]>;
128
+ getEntries<Entries extends readonly ContentEntryReference<any>[]>(entries: Entries): Promise<ResolvedContentEntries<Entries>>;
129
+ getEntry<Collection extends AnyCollection>(collection: Collection, id: string): Promise<CollectionEntry<Collection> | undefined>;
130
+ render<Collection extends AnyCollection>(entry: CollectionEntry<Collection>): Promise<RenderedContent>;
131
+ }
132
+ declare const defineCollection: <Schema extends StandardSchemaV1<any, any> | undefined = StandardSchemaV1<any, any> | undefined>(definition: ContentCollectionDefinition<Schema>) => DefinedCollection<Schema>;
133
+ declare const glob: (options: GlobLoaderOptions) => GlobLoader;
134
+ declare const Content: ({
135
+ as,
136
+ html,
137
+ ...props
138
+ }: ContentComponentProps) => {
139
+ isStatic: boolean;
140
+ props: {
141
+ dangerouslySetInnerHTML: string;
142
+ };
143
+ type: string;
144
+ };
145
+ declare const getCollection: <Collection extends AnyCollection>(collection: Collection, filter?: ContentFilter<Collection>) => Promise<CollectionEntry<Collection>[]>;
146
+ declare const getEntry: <Collection extends AnyCollection>(collection: Collection, id: string) => Promise<CollectionEntry<Collection> | undefined>;
147
+ declare const getEntries: <Entries extends readonly ContentEntryReference<any>[]>(entries: Entries) => Promise<ResolvedContentEntries<Entries>>;
148
+ declare const render: <Collection extends AnyCollection>(entry: CollectionEntry<Collection>) => Promise<RenderedContent>;
149
+ //#endregion
150
+ export { ContentSourceEntry as A, ContentSearchDocument as C, ContentSearchPosting as D, ContentSearchOptions as E, RenderedContent as F, ResolvedContentEntries as I, ResolvedContentSearchOptions as L, GlobLoader as M, GlobLoaderOptions as N, ContentSearchQueryOptions as O, InferCollectionData as P, ContentMarkdownOptions as S, ContentSearchIndex as T, ContentHeading as _, getCollection as a, ContentLoaderContext as b, glob as c, BaseContentEntry as d, CollectionEntry as f, ContentFilter as g, ContentEntryReference as h, defineCollection as i, DefinedCollection as j, ContentSearchResult as k, render as l, ContentComponentProps as m, ContentRuntimeModule as n, getEntries as o, ContentCollectionDefinition as p, StandardSchemaV1$1 as r, getEntry as s, Content as t, AnyCollection as u, ContentHighlightOptions as v, ContentSearchField as w, ContentLoaderObject as x, ContentLoader as y };
151
+ //# sourceMappingURL=mod-P8gKoDsz.d.mts.map
package/dist/mod.d.mts ADDED
@@ -0,0 +1,2 @@
1
+ import { A as ContentSourceEntry, C as ContentSearchDocument, D as ContentSearchPosting, E as ContentSearchOptions, F as RenderedContent, I as ResolvedContentEntries, L as ResolvedContentSearchOptions, M as GlobLoader, N as GlobLoaderOptions, O as ContentSearchQueryOptions, P as InferCollectionData, S as ContentMarkdownOptions, T as ContentSearchIndex, _ as ContentHeading, a as getCollection, b as ContentLoaderContext, c as glob, d as BaseContentEntry, f as CollectionEntry, g as ContentFilter, h as ContentEntryReference, i as defineCollection, j as DefinedCollection, k as ContentSearchResult, l as render, m as ContentComponentProps, n as ContentRuntimeModule, o as getEntries, p as ContentCollectionDefinition, r as StandardSchemaV1, s as getEntry, t as Content, u as AnyCollection, v as ContentHighlightOptions, w as ContentSearchField, x as ContentLoaderObject, y as ContentLoader } from "./mod-P8gKoDsz.mjs";
2
+ export { AnyCollection, BaseContentEntry, CollectionEntry, Content, ContentCollectionDefinition, ContentComponentProps, ContentEntryReference, ContentFilter, ContentHeading, ContentHighlightOptions, ContentLoader, ContentLoaderContext, ContentLoaderObject, ContentMarkdownOptions, ContentRuntimeModule, ContentSearchDocument, ContentSearchField, ContentSearchIndex, ContentSearchOptions, ContentSearchPosting, ContentSearchQueryOptions, ContentSearchResult, ContentSourceEntry, DefinedCollection, GlobLoader, GlobLoaderOptions, InferCollectionData, RenderedContent, ResolvedContentEntries, ResolvedContentSearchOptions, StandardSchemaV1, defineCollection, getCollection, getEntries, getEntry, glob, render };
package/dist/mod.mjs ADDED
@@ -0,0 +1,34 @@
1
+ import { t as CONTENT_COLLECTION_MARKER } from "./types-rZ-wc23p.mjs";
2
+ //#region mod.ts
3
+ const ensureServerOnly = () => {
4
+ if (typeof window !== "undefined") throw new Error("@eclipsa/content query APIs are server-only.");
5
+ };
6
+ const loadRuntime = async () => {
7
+ ensureServerOnly();
8
+ return import("virtual:eclipsa-content:runtime");
9
+ };
10
+ const defineCollection = (definition) => ({
11
+ ...definition,
12
+ [CONTENT_COLLECTION_MARKER]: true
13
+ });
14
+ const glob = (options) => ({
15
+ base: options.base,
16
+ kind: "glob",
17
+ pattern: options.pattern
18
+ });
19
+ const Content = ({ as = "article", html, ...props }) => ({
20
+ isStatic: false,
21
+ props: {
22
+ ...props,
23
+ dangerouslySetInnerHTML: html
24
+ },
25
+ type: as
26
+ });
27
+ const getCollection = async (collection, filter) => (await loadRuntime()).getCollection(collection, filter);
28
+ const getEntry = async (collection, id) => (await loadRuntime()).getEntry(collection, id);
29
+ const getEntries = async (entries) => (await loadRuntime()).getEntries(entries);
30
+ const render = async (entry) => (await loadRuntime()).render(entry);
31
+ //#endregion
32
+ export { Content, defineCollection, getCollection, getEntries, getEntry, glob, render };
33
+
34
+ //# sourceMappingURL=mod.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"mod.mjs","names":[],"sources":["../mod.ts"],"sourcesContent":["import type { StandardSchemaV1 } from 'eclipsa'\nimport { CONTENT_COLLECTION_MARKER } from './types.ts'\nimport type {\n AnyCollection,\n CollectionEntry,\n ContentCollectionDefinition,\n ContentComponentProps,\n ContentFilter,\n ContentHighlightOptions,\n ContentMarkdownOptions,\n ContentEntryReference,\n DefinedCollection,\n GlobLoader,\n GlobLoaderOptions,\n ResolvedContentEntries,\n RenderedContent,\n} from './types.ts'\n\nconst ensureServerOnly = () => {\n if (typeof window !== 'undefined') {\n throw new Error('@eclipsa/content query APIs are server-only.')\n }\n}\n\nconst loadRuntime = async (): Promise<ContentRuntimeModule> => {\n ensureServerOnly()\n return import('virtual:eclipsa-content:runtime')\n}\n\nexport interface ContentRuntimeModule {\n getCollection<Collection extends AnyCollection>(\n collection: Collection,\n filter?: ContentFilter<Collection>,\n ): Promise<CollectionEntry<Collection>[]>\n getEntries<Entries extends readonly ContentEntryReference<any>[]>(\n entries: Entries,\n ): Promise<ResolvedContentEntries<Entries>>\n getEntry<Collection extends AnyCollection>(\n collection: Collection,\n id: string,\n ): Promise<CollectionEntry<Collection> | undefined>\n render<Collection extends AnyCollection>(\n entry: CollectionEntry<Collection>,\n ): Promise<RenderedContent>\n}\n\nexport type {\n AnyCollection,\n BaseContentEntry,\n CollectionEntry,\n ContentCollectionDefinition,\n ContentComponentProps,\n ContentFilter,\n ContentHighlightOptions,\n ContentMarkdownOptions,\n ContentEntryReference,\n ContentHeading,\n ContentLoader,\n ContentLoaderContext,\n ContentLoaderObject,\n ContentSearchDocument,\n ContentSearchField,\n ContentSearchIndex,\n ContentSearchOptions,\n ContentSearchPosting,\n ContentSearchQueryOptions,\n ContentSearchResult,\n ContentSourceEntry,\n DefinedCollection,\n GlobLoader,\n GlobLoaderOptions,\n InferCollectionData,\n ResolvedContentSearchOptions,\n RenderedContent,\n ResolvedContentEntries,\n} from './types.ts'\n\nexport type { StandardSchemaV1 } from 'eclipsa'\n\nexport const defineCollection = <\n Schema extends StandardSchemaV1<any, any> | undefined = StandardSchemaV1<any, any> | undefined,\n>(\n definition: ContentCollectionDefinition<Schema>,\n): DefinedCollection<Schema> =>\n ({\n ...definition,\n [CONTENT_COLLECTION_MARKER]: true,\n }) as DefinedCollection<Schema>\n\nexport const glob = (options: GlobLoaderOptions): GlobLoader => ({\n base: options.base,\n kind: 'glob',\n pattern: options.pattern,\n})\n\nexport const Content = ({ as = 'article', html, ...props }: ContentComponentProps) => ({\n isStatic: false,\n props: {\n ...props,\n dangerouslySetInnerHTML: html,\n },\n type: as,\n})\n\nexport const getCollection = async <Collection extends AnyCollection>(\n collection: Collection,\n filter?: ContentFilter<Collection>,\n): Promise<CollectionEntry<Collection>[]> =>\n (await loadRuntime()).getCollection(collection, filter) as Promise<CollectionEntry<Collection>[]>\n\nexport const getEntry = async <Collection extends AnyCollection>(\n collection: Collection,\n id: string,\n): Promise<CollectionEntry<Collection> | undefined> =>\n (await loadRuntime()).getEntry(collection, id) as Promise<CollectionEntry<Collection> | undefined>\n\nexport const getEntries = async <Entries extends readonly ContentEntryReference<any>[]>(\n entries: Entries,\n): Promise<ResolvedContentEntries<Entries>> =>\n (await loadRuntime()).getEntries(entries) as Promise<ResolvedContentEntries<Entries>>\n\nexport const render = async <Collection extends AnyCollection>(\n entry: CollectionEntry<Collection>,\n): Promise<RenderedContent> => (await loadRuntime()).render(entry)\n"],"mappings":";;AAkBA,MAAM,yBAAyB;AAC7B,KAAI,OAAO,WAAW,YACpB,OAAM,IAAI,MAAM,+CAA+C;;AAInE,MAAM,cAAc,YAA2C;AAC7D,mBAAkB;AAClB,QAAO,OAAO;;AAqDhB,MAAa,oBAGX,gBAEC;CACC,GAAG;EACF,4BAA4B;CAC9B;AAEH,MAAa,QAAQ,aAA4C;CAC/D,MAAM,QAAQ;CACd,MAAM;CACN,SAAS,QAAQ;CAClB;AAED,MAAa,WAAW,EAAE,KAAK,WAAW,MAAM,GAAG,aAAoC;CACrF,UAAU;CACV,OAAO;EACL,GAAG;EACH,yBAAyB;EAC1B;CACD,MAAM;CACP;AAED,MAAa,gBAAgB,OAC3B,YACA,YAEC,MAAM,aAAa,EAAE,cAAc,YAAY,OAAO;AAEzD,MAAa,WAAW,OACtB,YACA,QAEC,MAAM,aAAa,EAAE,SAAS,YAAY,GAAG;AAEhD,MAAa,aAAa,OACxB,aAEC,MAAM,aAAa,EAAE,WAAW,QAAQ;AAE3C,MAAa,SAAS,OACpB,WAC8B,MAAM,aAAa,EAAE,OAAO,MAAM"}
@@ -0,0 +1,40 @@
1
+ {
2
+ "name": "@eclipsa/content",
3
+ "homepage": "https://github.com/pnsk-lab/eclipsa",
4
+ "bugs": {
5
+ "url": "https://github.com/pnsk-lab/eclipsa/issues"
6
+ },
7
+ "license": "MIT",
8
+ "repository": {
9
+ "type": "git",
10
+ "url": "git+https://github.com/pnsk-lab/eclipsa.git",
11
+ "directory": "packages/content"
12
+ },
13
+ "type": "module",
14
+ "exports": {
15
+ ".": {
16
+ "types": "./mod.d.mts",
17
+ "import": "./mod.mjs"
18
+ },
19
+ "./vite": {
20
+ "types": "./vite.d.mts",
21
+ "import": "./vite.mjs"
22
+ },
23
+ "./internal": {
24
+ "types": "./internal.d.mts",
25
+ "import": "./internal.mjs"
26
+ }
27
+ },
28
+ "dependencies": {
29
+ "@ox-content/napi": "^0.17.0",
30
+ "eclipsa": "0.2.0-alpha.0",
31
+ "fast-glob": "^3.3.2",
32
+ "shiki": "^4.0.2",
33
+ "yaml": "^2.8.1"
34
+ },
35
+ "peerDependencies": {
36
+ "vite": "*"
37
+ },
38
+ "version": "0.0.0",
39
+ "private": false
40
+ }
@@ -0,0 +1,6 @@
1
+ //#region types.ts
2
+ const CONTENT_COLLECTION_MARKER = "__eclipsa_content_collection__";
3
+ //#endregion
4
+ export { CONTENT_COLLECTION_MARKER as t };
5
+
6
+ //# sourceMappingURL=types-rZ-wc23p.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"types-rZ-wc23p.mjs","names":[],"sources":["../types.ts"],"sourcesContent":["import type { InferStandardSchemaOutput, StandardSchemaV1 } from 'eclipsa'\n\nexport const CONTENT_COLLECTION_MARKER = '__eclipsa_content_collection__'\n\nexport interface ContentSourceEntry {\n body: string\n data?: Record<string, unknown>\n filePath?: string\n id?: string\n}\n\nexport interface ContentLoaderContext {\n collection: string\n configPath: string\n root: string\n}\n\nexport interface ContentLoaderObject {\n load(\n context: ContentLoaderContext,\n ): ContentSourceEntry[] | Promise<ContentSourceEntry[]> | readonly ContentSourceEntry[]\n}\n\nexport interface ContentHighlightOptions {\n theme?: string\n}\n\nexport interface ContentMarkdownOptions {\n highlight?: boolean | ContentHighlightOptions\n}\n\nexport interface ContentSearchOptions {\n enabled?: boolean\n hotkey?: string\n limit?: number\n placeholder?: string\n prefix?: boolean\n}\n\nexport interface ResolvedContentSearchOptions {\n enabled: boolean\n hotkey: string\n limit: number\n placeholder: string\n prefix: boolean\n}\n\nexport type ContentSearchField = 'body' | 'code' | 'heading' | 'title'\n\nexport interface ContentSearchDocument {\n body: string\n code: string[]\n collection: string\n headings: string[]\n id: string\n title: string\n url: string\n}\n\nexport interface ContentSearchPosting {\n docIdx: number\n field: ContentSearchField\n tf: number\n}\n\nexport interface ContentSearchIndex {\n avgDl: number\n df: Record<string, number>\n docCount: number\n documents: ContentSearchDocument[]\n index: Record<string, ContentSearchPosting[]>\n options: ResolvedContentSearchOptions\n}\n\nexport interface ContentSearchQueryOptions {\n limit?: number\n prefix?: boolean\n}\n\nexport interface ContentSearchResult {\n collection: string\n id: string\n matches: string[]\n score: number\n snippet: string\n title: string\n url: string\n}\n\nexport interface GlobLoaderOptions {\n base: string\n pattern: string\n}\n\nexport interface GlobLoader {\n readonly base: string\n readonly kind: 'glob'\n readonly pattern: string\n}\n\nexport type ContentLoader = GlobLoader | ContentLoaderObject\n\nexport interface ContentCollectionDefinition<\n Schema extends StandardSchemaV1<any, any> | undefined,\n> {\n loader: ContentLoader\n markdown?: ContentMarkdownOptions\n search?: boolean | ContentSearchOptions\n schema?: Schema\n}\n\nexport interface DefinedCollection<\n Schema extends StandardSchemaV1<any, any> | undefined = StandardSchemaV1<any, any> | undefined,\n> extends ContentCollectionDefinition<Schema> {\n readonly [CONTENT_COLLECTION_MARKER]: true\n}\n\nexport type AnyCollection = DefinedCollection<StandardSchemaV1<any, any> | undefined>\n\nexport type InferCollectionData<Collection extends AnyCollection> =\n Collection extends DefinedCollection<infer Schema>\n ? Schema extends StandardSchemaV1<any, any>\n ? InferStandardSchemaOutput<Schema>\n : Record<string, unknown>\n : Record<string, unknown>\n\nexport interface BaseContentEntry<\n Data = Record<string, unknown>,\n Collection extends string = string,\n> {\n body: string\n collection: Collection\n data: Data\n filePath: string\n id: string\n}\n\nexport interface ContentHeading {\n depth: number\n slug: string\n text: string\n}\n\nexport interface ContentComponentProps extends Record<string, unknown> {\n as?: string\n html: string\n}\n\nexport interface RenderedContent {\n Content: (props?: Omit<ContentComponentProps, 'html'>) => any\n headings: ContentHeading[]\n html: string\n}\n\nexport type CollectionEntry<Collection extends AnyCollection = AnyCollection> = BaseContentEntry<\n InferCollectionData<Collection>\n>\n\nexport type ContentFilter<Collection extends AnyCollection> = (\n entry: CollectionEntry<Collection>,\n) => boolean | Promise<boolean>\n\nexport interface ContentEntryReference<Collection extends AnyCollection = AnyCollection> {\n collection: Collection\n id: string\n}\n\nexport type ResolvedContentEntries<Entries extends readonly ContentEntryReference<any>[]> = {\n [Index in keyof Entries]: Entries[Index] extends ContentEntryReference<infer Collection>\n ? CollectionEntry<Collection> | undefined\n : never\n}\n"],"mappings":";AAEA,MAAa,4BAA4B"}
@@ -0,0 +1,24 @@
1
+ declare module 'virtual:eclipsa-content:runtime' {
2
+ export function getCollection<Collection extends import('./types.ts').AnyCollection>(
3
+ collection: Collection,
4
+ filter?: import('./mod.ts').ContentFilter<Collection>,
5
+ ): Promise<import('./mod.ts').CollectionEntry<Collection>[]>
6
+ export function getEntries<
7
+ Entries extends readonly import('./types.ts').ContentEntryReference<any>[],
8
+ >(entries: Entries): Promise<import('./types.ts').ResolvedContentEntries<Entries>>
9
+ export function getEntry<Collection extends import('./types.ts').AnyCollection>(
10
+ collection: Collection,
11
+ id: string,
12
+ ): Promise<import('./mod.ts').CollectionEntry<Collection> | undefined>
13
+ export function render<Collection extends import('./types.ts').AnyCollection>(
14
+ entry: import('./mod.ts').CollectionEntry<Collection>,
15
+ ): Promise<import('./types.ts').RenderedContent>
16
+ }
17
+
18
+ declare module 'virtual:eclipsa-content:search' {
19
+ export const searchOptions: import('./types.ts').ResolvedContentSearchOptions
20
+ export function search(
21
+ query: string,
22
+ options?: import('./types.ts').ContentSearchQueryOptions,
23
+ ): Promise<import('./types.ts').ContentSearchResult[]>
24
+ }
@@ -0,0 +1,7 @@
1
+ import { PluginOption } from "vite";
2
+
3
+ //#region vite.d.ts
4
+ declare const eclipsaContent: () => PluginOption;
5
+ //#endregion
6
+ export { eclipsaContent };
7
+ //# sourceMappingURL=vite.d.mts.map
package/dist/vite.mjs ADDED
@@ -0,0 +1,195 @@
1
+ import { c as resolveContentSearchOptions, r as createContentSearch, s as generateContentSearchRuntimeModule } from "./internal-h0upzIHm.mjs";
2
+ import * as fs from "node:fs/promises";
3
+ import { pathToFileURL } from "node:url";
4
+ import path from "node:path";
5
+ //#region vite.ts
6
+ const DEV_APP_INVALIDATORS_KEY = Symbol.for("eclipsa.dev-app-invalidators");
7
+ const CONTENT_HMR_EVENT = "eclipsa:content-update";
8
+ const VIRTUAL_RUNTIME_ID = "virtual:eclipsa-content:runtime";
9
+ const RESOLVED_VIRTUAL_RUNTIME_ID = "\0eclipsa-content:runtime";
10
+ const VIRTUAL_SEARCH_ID = "virtual:eclipsa-content:search";
11
+ const RESOLVED_VIRTUAL_SEARCH_ID = "\0eclipsa-content:search";
12
+ const CONTENT_CONFIG_PATH = "app/content.config.ts";
13
+ const CONTENT_COLLECTION_MARKER = "__eclipsa_content_collection__";
14
+ const CONTENT_SEARCH_ASSET = "__eclipsa_content_search__.json";
15
+ const normalizeSlashes = (value) => value.replaceAll("\\", "/");
16
+ const stripQuery = (id) => id.split("?", 1)[0] ?? id;
17
+ const fileExists = async (filePath) => {
18
+ try {
19
+ await fs.access(filePath);
20
+ return true;
21
+ } catch {
22
+ return false;
23
+ }
24
+ };
25
+ const getConfigPath = (root) => path.join(root, CONTENT_CONFIG_PATH);
26
+ const getSearchAssetPath = (base) => {
27
+ return `${base === "" ? "/" : base.endsWith("/") ? base : `${base}/`}${CONTENT_SEARCH_ASSET}`;
28
+ };
29
+ const isContentConfigId = (root, id) => normalizeSlashes(path.resolve(stripQuery(id))) === normalizeSlashes(getConfigPath(root));
30
+ const getNamedCollectionExports = (source) => [...source.matchAll(/^\s*export\s+const\s+([A-Za-z_$][\w$]*)\s*=/gm)].map((match) => match[1]);
31
+ const invalidateVirtualRuntime = (server) => {
32
+ const graphs = [server.moduleGraph, ...Object.values(server.environments ?? {}).map((environment) => environment.moduleGraph)];
33
+ for (const graph of graphs) {
34
+ if (!graph) continue;
35
+ for (const id of [
36
+ VIRTUAL_RUNTIME_ID,
37
+ RESOLVED_VIRTUAL_RUNTIME_ID,
38
+ VIRTUAL_SEARCH_ID,
39
+ RESOLVED_VIRTUAL_SEARCH_ID
40
+ ]) {
41
+ const mod = graph.getModuleById?.(id);
42
+ if (mod) graph.invalidateModule?.(mod);
43
+ }
44
+ }
45
+ };
46
+ const invalidateRegisteredDevApps = (server) => {
47
+ const invalidators = server[DEV_APP_INVALIDATORS_KEY];
48
+ if (!invalidators) return;
49
+ for (const invalidate of invalidators) invalidate();
50
+ };
51
+ const shouldInvalidateForFile = (root, filePath) => {
52
+ const normalizedFilePath = normalizeSlashes(path.resolve(filePath));
53
+ const normalizedRoot = normalizeSlashes(path.resolve(root));
54
+ if (!normalizedFilePath.startsWith(normalizedRoot)) return false;
55
+ if (normalizedFilePath === normalizeSlashes(path.join(root, CONTENT_CONFIG_PATH))) return true;
56
+ return normalizedFilePath.endsWith(".md");
57
+ };
58
+ const createMissingRuntimeModule = (root) => {
59
+ const message = `Missing ${CONTENT_CONFIG_PATH} in ${root}.`;
60
+ return `
61
+ const error = new Error(${JSON.stringify(message)});
62
+ export const getCollection = async () => { throw error; };
63
+ export const getEntries = async () => { throw error; };
64
+ export const getEntry = async () => { throw error; };
65
+ export const render = async () => { throw error; };
66
+ `;
67
+ };
68
+ const createClientRuntimeModule = () => `
69
+ const error = new Error("@eclipsa/content query APIs are server-only.");
70
+ export const getCollection = async () => { throw error; };
71
+ export const getEntries = async () => { throw error; };
72
+ export const getEntry = async () => { throw error; };
73
+ export const render = async () => { throw error; };
74
+ `;
75
+ const createDisabledSearchModule = () => `
76
+ export const searchOptions = ${JSON.stringify(resolveContentSearchOptions(false))};
77
+ export const search = async () => [];
78
+ export default { search, searchOptions };
79
+ `;
80
+ const createClientContentConfigModule = async (configPath) => {
81
+ const exportNames = getNamedCollectionExports(await fs.readFile(configPath, "utf8"));
82
+ if (exportNames.length === 0) return "";
83
+ return exportNames.map((name) => `export const ${name} = Object.freeze({ ${JSON.stringify(CONTENT_COLLECTION_MARKER)}: true });`).join("\n");
84
+ };
85
+ const createRuntimeModule = (root, configPath) => `
86
+ import * as collectionsModule from ${JSON.stringify(normalizeSlashes(configPath))};
87
+ import { createContentRuntime } from '@eclipsa/content/internal';
88
+
89
+ const runtime = createContentRuntime({
90
+ collectionsModule,
91
+ configPath: ${JSON.stringify(normalizeSlashes(configPath))},
92
+ root: ${JSON.stringify(normalizeSlashes(root))},
93
+ });
94
+
95
+ export const getCollection = runtime.getCollection;
96
+ export const getEntries = runtime.getEntries;
97
+ export const getEntry = runtime.getEntry;
98
+ export const render = runtime.render;
99
+ `;
100
+ const loadCollectionsModule = async (configPath) => {
101
+ return import(`${pathToFileURL(configPath).href}?t=${Date.now()}`);
102
+ };
103
+ const handleInvalidation = (server, root, filePath) => {
104
+ if (!shouldInvalidateForFile(root, filePath)) return false;
105
+ invalidateVirtualRuntime(server);
106
+ invalidateRegisteredDevApps(server);
107
+ server.ws?.send?.(CONTENT_HMR_EVENT);
108
+ return true;
109
+ };
110
+ const contentPlugin = () => {
111
+ let config;
112
+ let searchStatePromise = null;
113
+ const resolveSearchState = async () => {
114
+ if (searchStatePromise) return searchStatePromise;
115
+ searchStatePromise = (async () => {
116
+ const configPath = getConfigPath(config.root);
117
+ if (!await fileExists(configPath)) return null;
118
+ const collectionsModule = await loadCollectionsModule(configPath);
119
+ const result = await createContentSearch({
120
+ base: config.base,
121
+ collectionsModule,
122
+ configPath,
123
+ root: config.root
124
+ });
125
+ if (!result.options.enabled || result.index.documents.length === 0) return null;
126
+ return {
127
+ indexJson: JSON.stringify(result.index),
128
+ options: result.options
129
+ };
130
+ })();
131
+ return searchStatePromise;
132
+ };
133
+ return {
134
+ enforce: "pre",
135
+ name: "vite-plugin-eclipsa-content",
136
+ configResolved(resolvedConfig) {
137
+ config = resolvedConfig;
138
+ },
139
+ configureServer(server) {
140
+ const searchPath = getSearchAssetPath(config.base);
141
+ server.middlewares.use(async (req, res, next) => {
142
+ if ((req.url?.split("?", 1)[0] ?? "") !== searchPath) {
143
+ next();
144
+ return;
145
+ }
146
+ const state = await resolveSearchState();
147
+ if (!state) {
148
+ res.statusCode = 404;
149
+ res.end("Not found");
150
+ return;
151
+ }
152
+ res.setHeader("Content-Type", "application/json; charset=utf-8");
153
+ res.end(state.indexJson);
154
+ });
155
+ },
156
+ hotUpdate(options) {
157
+ searchStatePromise = null;
158
+ if (handleInvalidation(options.server, config.root, options.file)) return [];
159
+ },
160
+ resolveId(id) {
161
+ if (id === VIRTUAL_RUNTIME_ID) return RESOLVED_VIRTUAL_RUNTIME_ID;
162
+ if (id === VIRTUAL_SEARCH_ID) return RESOLVED_VIRTUAL_SEARCH_ID;
163
+ return null;
164
+ },
165
+ async load(id) {
166
+ if (id === RESOLVED_VIRTUAL_SEARCH_ID) {
167
+ const state = await resolveSearchState();
168
+ if (!state) return createDisabledSearchModule();
169
+ return generateContentSearchRuntimeModule(getSearchAssetPath(config.base), state.options);
170
+ }
171
+ if (id !== RESOLVED_VIRTUAL_RUNTIME_ID) {
172
+ if (this.environment?.name === "client" && isContentConfigId(config.root, id)) return createClientContentConfigModule(getConfigPath(config.root));
173
+ return null;
174
+ }
175
+ if (this.environment?.name === "client") return createClientRuntimeModule();
176
+ const configPath = getConfigPath(config.root);
177
+ if (!await fileExists(configPath)) return createMissingRuntimeModule(config.root);
178
+ return createRuntimeModule(config.root, configPath);
179
+ },
180
+ async generateBundle() {
181
+ const state = await resolveSearchState();
182
+ if (!state) return;
183
+ this.emitFile({
184
+ fileName: CONTENT_SEARCH_ASSET,
185
+ source: state.indexJson,
186
+ type: "asset"
187
+ });
188
+ }
189
+ };
190
+ };
191
+ const eclipsaContent = () => [contentPlugin()];
192
+ //#endregion
193
+ export { eclipsaContent };
194
+
195
+ //# sourceMappingURL=vite.mjs.map