doccupine 0.0.83 → 0.0.84

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/dist/lib/layout.js +7 -2
  2. package/dist/lib/structures.js +6 -0
  3. package/dist/templates/app/robots.d.ts +1 -0
  4. package/dist/templates/app/robots.js +11 -0
  5. package/dist/templates/app/theme.d.ts +1 -1
  6. package/dist/templates/app/theme.js +1 -1
  7. package/dist/templates/components/DocsSideBar.d.ts +1 -1
  8. package/dist/templates/components/DocsSideBar.js +2 -2
  9. package/dist/templates/components/PostHogProvider.d.ts +1 -1
  10. package/dist/templates/components/PostHogProvider.js +9 -62
  11. package/dist/templates/components/PostHogProviderLazy.d.ts +1 -0
  12. package/dist/templates/components/PostHogProviderLazy.js +70 -0
  13. package/dist/templates/components/SearchDocs.d.ts +1 -1
  14. package/dist/templates/components/SearchDocs.js +40 -270
  15. package/dist/templates/components/SearchModalContent.d.ts +1 -0
  16. package/dist/templates/components/SearchModalContent.js +326 -0
  17. package/dist/templates/components/SideBar.d.ts +1 -1
  18. package/dist/templates/components/SideBar.js +5 -1
  19. package/dist/templates/components/layout/DocsComponents.d.ts +1 -1
  20. package/dist/templates/components/layout/DocsComponents.js +1 -1
  21. package/dist/templates/components/layout/DocsNavigation.d.ts +1 -1
  22. package/dist/templates/components/layout/DocsNavigation.js +1 -1
  23. package/dist/templates/components/layout/Footer.d.ts +1 -1
  24. package/dist/templates/components/layout/Footer.js +8 -3
  25. package/dist/templates/components/layout/SharedStyles.d.ts +1 -1
  26. package/dist/templates/components/layout/SharedStyles.js +2 -1
  27. package/dist/templates/components/layout/StaticLinks.d.ts +1 -1
  28. package/dist/templates/components/layout/StaticLinks.js +1 -1
  29. package/dist/templates/mdx/theme.mdx.d.ts +1 -1
  30. package/dist/templates/mdx/theme.mdx.js +1 -1
  31. package/dist/templates/package.js +10 -10
  32. package/dist/templates/services/mcp/tools.d.ts +1 -1
  33. package/dist/templates/services/mcp/tools.js +9 -10
  34. package/dist/templates/tsconfig.d.ts +1 -1
  35. package/dist/templates/tsconfig.js +0 -1
  36. package/package.json +3 -3
@@ -11,21 +11,21 @@ export const packageJsonTemplate = JSON.stringify({
11
11
  },
12
12
  dependencies: {
13
13
  "@langchain/anthropic": "^1.3.25",
14
- "@langchain/core": "^1.1.34",
14
+ "@langchain/core": "^1.1.36",
15
15
  "@langchain/google-genai": "^2.1.26",
16
- "@langchain/openai": "^1.3.0",
16
+ "@langchain/openai": "^1.3.1",
17
17
  "@mdx-js/react": "^3.1.1",
18
- "@modelcontextprotocol/sdk": "^1.27.1",
18
+ "@modelcontextprotocol/sdk": "^1.28.0",
19
19
  "@posthog/react": "^1.8.2",
20
- "cherry-styled-components": "^0.1.13",
21
- langchain: "^1.2.35",
22
- "lucide-react": "^0.577.0",
20
+ "cherry-styled-components": "^0.1.16",
21
+ langchain: "^1.2.37",
22
+ "lucide-react": "^1.7.0",
23
23
  minisearch: "^7.2.0",
24
24
  next: "16.2.1",
25
25
  "next-mdx-remote": "^6.0.0",
26
26
  polished: "^4.3.1",
27
- "posthog-js": "^1.363.1",
28
- "posthog-node": "^5.28.5",
27
+ "posthog-js": "^1.364.2",
28
+ "posthog-node": "^5.28.9",
29
29
  react: "19.2.4",
30
30
  "react-dom": "19.2.4",
31
31
  "rehype-highlight": "^7.0.2",
@@ -40,10 +40,10 @@ export const packageJsonTemplate = JSON.stringify({
40
40
  "@types/node": "^25",
41
41
  "@types/react": "^19",
42
42
  "@types/react-dom": "^19",
43
- "baseline-browser-mapping": "^2.10.9",
43
+ "baseline-browser-mapping": "^2.10.12",
44
44
  eslint: "^9",
45
45
  "eslint-config-next": "16.2.1",
46
46
  prettier: "^3.8.1",
47
- typescript: "^5",
47
+ typescript: "^6",
48
48
  },
49
49
  }, null, 2) + "\n";
@@ -1 +1 @@
1
- export declare const mcpToolsTemplate = "import path from \"node:path\";\nimport fs from \"node:fs/promises\";\nimport type {\n MCPToolDefinition,\n DocsResource,\n DocsChunk,\n GetDocParams,\n ListDocsParams,\n} from \"@/services/mcp/types\";\n\nconst PROJECT_ROOT = process.cwd();\nconst APP_DIR = path.join(PROJECT_ROOT, \"app\");\nconst VALID_EXT = new Set([\".ts\", \".tsx\", \".js\", \".jsx\"]);\n\n/**\n * Tool definitions for MCP - these describe the available tools\n */\nexport const DOCS_TOOLS: MCPToolDefinition[] = [\n {\n name: \"search_docs\",\n description:\n \"Search through the documentation content using semantic search. Returns relevant chunks of documentation based on the query.\",\n inputSchema: {\n type: \"object\",\n properties: {\n query: {\n type: \"string\",\n description: \"The search query to find relevant documentation\",\n },\n limit: {\n type: \"number\",\n description: \"Maximum number of results to return (default: 6)\",\n },\n },\n required: [\"query\"],\n },\n },\n {\n name: \"get_doc\",\n description:\n \"Get the full content of a specific documentation page by its path.\",\n inputSchema: {\n type: \"object\",\n properties: {\n path: {\n type: \"string\",\n description:\n \"The file path to the documentation page (e.g., 'app/getting-started/page.tsx')\",\n },\n },\n required: [\"path\"],\n },\n },\n {\n name: \"list_docs\",\n description:\n \"List all available documentation pages, optionally filtered by directory.\",\n inputSchema: {\n type: \"object\",\n properties: {\n directory: {\n type: \"string\",\n description:\n \"Optional directory to filter results (e.g., 'components')\",\n },\n },\n },\n },\n];\n\n/**\n * Recursively walk directory to find documentation files\n */\nasync function* walkDocs(dir: string): AsyncGenerator<string> {\n const entries = await fs.readdir(dir, { withFileTypes: true });\n for (const entry of entries) {\n const fullPath = path.join(dir, entry.name);\n if (entry.isDirectory()) {\n if ([\"node_modules\", \".next\", \".git\", \"api\"].includes(entry.name)) {\n continue;\n }\n yield* walkDocs(fullPath);\n } else {\n const ext = path.extname(entry.name).toLowerCase();\n if (VALID_EXT.has(ext) && entry.name.startsWith(\"page.\")) {\n yield fullPath;\n }\n }\n }\n}\n\n/**\n * Extract content blocks from a file\n */\nfunction extractContentBlocks(fileText: string): string[] {\n const results: string[] = [];\n\n const tplRegex = /(?:export\\s+)?const\\s+content\\s*=\\s*`((?:\\\\`|[^`])*)`\\s*;/g;\n let m: RegExpExecArray | null;\n while ((m = tplRegex.exec(fileText)) !== null) {\n results.push(m[1]);\n }\n\n const sglRegex = /(?:export\\s+)?const\\s+content\\s*=\\s*'([^']*)'\\s*;/g;\n while ((m = sglRegex.exec(fileText)) !== null) {\n results.push(m[1]);\n }\n\n const dblRegex = /(?:export\\s+)?const\\s+content\\s*=\\s*\"([^\"]*)\"\\s*;/g;\n while ((m = dblRegex.exec(fileText)) !== null) {\n results.push(m[1]);\n }\n\n return results;\n}\n\n/**\n * Get the title from markdown content\n */\nfunction extractTitle(content: string): string {\n const match = content.match(/^#\\s+(.+)$/m);\n return match ? match[1].trim() : \"Untitled\";\n}\n\n/**\n * List all documentation resources\n */\nexport async function listDocs(\n params?: ListDocsParams,\n): Promise<DocsResource[]> {\n const resources: DocsResource[] = [];\n const filterDir = params?.directory;\n\n for await (const filePath of walkDocs(APP_DIR)) {\n const relativePath = path.relative(PROJECT_ROOT, filePath);\n\n if (filterDir && !relativePath.includes(filterDir)) {\n continue;\n }\n\n try {\n const fileContent = await fs.readFile(filePath, \"utf8\");\n const blocks = extractContentBlocks(fileContent);\n const content = blocks.join(\"\\n\\n\");\n const title = extractTitle(content);\n const docPath = path.dirname(relativePath).replace(/^app\\/?/, \"\") || \"/\";\n\n resources.push({\n uri: `docs://${docPath}`,\n name: title,\n path: relativePath,\n content,\n });\n } catch (error) {\n console.warn(`Failed to read doc file: ${filePath}`, error);\n }\n }\n\n return resources;\n}\n\n/**\n * Get a specific documentation page\n */\nexport async function getDoc(\n params: GetDocParams,\n): Promise<DocsResource | null> {\n let targetPath = params.path;\n\n // Normalize path\n if (!targetPath.startsWith(\"app/\")) {\n targetPath = `app/${targetPath}`;\n }\n if (!targetPath.includes(\"page.\")) {\n targetPath = path.join(targetPath, \"page.tsx\");\n }\n\n const fullPath = path.join(PROJECT_ROOT, targetPath);\n\n // Prevent path traversal\n const resolvedPath = path.resolve(fullPath);\n if (!resolvedPath.startsWith(path.resolve(APP_DIR))) {\n return null;\n }\n\n try {\n const fileContent = await fs.readFile(fullPath, \"utf8\");\n const blocks = extractContentBlocks(fileContent);\n const content = blocks.join(\"\\n\\n\");\n const title = extractTitle(content);\n const docPath = path.dirname(targetPath).replace(/^app\\/?/, \"\") || \"/\";\n\n return {\n uri: `docs://${docPath}`,\n name: title,\n path: targetPath,\n content,\n };\n } catch (error) {\n console.warn(`Failed to read doc: ${targetPath}`, error);\n return null;\n }\n}\n\n/**\n * Chunk text for embeddings.\n * - chunkSize=800 chars balances granularity with embedding context window limits\n * - overlap=100 chars ensures continuity so searches don't miss content at chunk boundaries\n */\nfunction chunkText(text: string, chunkSize = 800, overlap = 100): string[] {\n const chunks: string[] = [];\n let i = 0;\n while (i < text.length) {\n const end = Math.min(i + chunkSize, text.length);\n chunks.push(text.slice(i, end));\n if (end === text.length) break;\n i = end - overlap;\n if (i < 0) i = 0;\n }\n return chunks;\n}\n\n/**\n * Get all documentation chunks for indexing\n */\nexport async function getAllDocsChunks(): Promise<DocsChunk[]> {\n const allChunks: DocsChunk[] = [];\n const docs = await listDocs();\n\n for (const doc of docs) {\n const cleanContent = doc.content\n .replace(/\\r\\n/g, \"\\n\")\n .replace(/\\n{3,}/g, \"\\n\\n\")\n .slice(0, 200_000);\n\n const textChunks = chunkText(cleanContent);\n for (let i = 0; i < textChunks.length; i++) {\n allChunks.push({\n id: `${doc.path}:${i}`,\n text: textChunks[i],\n path: doc.path,\n uri: doc.uri,\n });\n }\n }\n\n return allChunks;\n}\n";
1
+ export declare const mcpToolsTemplate = "import path from \"node:path\";\nimport fs from \"node:fs/promises\";\nimport type {\n MCPToolDefinition,\n DocsResource,\n DocsChunk,\n GetDocParams,\n ListDocsParams,\n} from \"@/services/mcp/types\";\n\nconst APP_DIR = path.join(process.cwd(), \"app\");\nconst VALID_EXT = new Set([\".ts\", \".tsx\", \".js\", \".jsx\"]);\n\n/**\n * Tool definitions for MCP - these describe the available tools\n */\nexport const DOCS_TOOLS: MCPToolDefinition[] = [\n {\n name: \"search_docs\",\n description:\n \"Search through the documentation content using semantic search. Returns relevant chunks of documentation based on the query.\",\n inputSchema: {\n type: \"object\",\n properties: {\n query: {\n type: \"string\",\n description: \"The search query to find relevant documentation\",\n },\n limit: {\n type: \"number\",\n description: \"Maximum number of results to return (default: 6)\",\n },\n },\n required: [\"query\"],\n },\n },\n {\n name: \"get_doc\",\n description:\n \"Get the full content of a specific documentation page by its path.\",\n inputSchema: {\n type: \"object\",\n properties: {\n path: {\n type: \"string\",\n description:\n \"The file path to the documentation page (e.g., 'app/getting-started/page.tsx')\",\n },\n },\n required: [\"path\"],\n },\n },\n {\n name: \"list_docs\",\n description:\n \"List all available documentation pages, optionally filtered by directory.\",\n inputSchema: {\n type: \"object\",\n properties: {\n directory: {\n type: \"string\",\n description:\n \"Optional directory to filter results (e.g., 'components')\",\n },\n },\n },\n },\n];\n\n/**\n * Recursively walk directory to find documentation files\n */\nasync function* walkDocs(dir: string): AsyncGenerator<string> {\n const entries = await fs.readdir(dir, { withFileTypes: true });\n for (const entry of entries) {\n const fullPath = path.join(dir, entry.name);\n if (entry.isDirectory()) {\n if ([\"node_modules\", \".next\", \".git\", \"api\"].includes(entry.name)) {\n continue;\n }\n yield* walkDocs(fullPath);\n } else {\n const ext = path.extname(entry.name).toLowerCase();\n if (VALID_EXT.has(ext) && entry.name.startsWith(\"page.\")) {\n yield fullPath;\n }\n }\n }\n}\n\n/**\n * Extract content blocks from a file\n */\nfunction extractContentBlocks(fileText: string): string[] {\n const results: string[] = [];\n\n const tplRegex = /(?:export\\s+)?const\\s+content\\s*=\\s*`((?:\\\\`|[^`])*)`\\s*;/g;\n let m: RegExpExecArray | null;\n while ((m = tplRegex.exec(fileText)) !== null) {\n results.push(m[1]);\n }\n\n const sglRegex = /(?:export\\s+)?const\\s+content\\s*=\\s*'([^']*)'\\s*;/g;\n while ((m = sglRegex.exec(fileText)) !== null) {\n results.push(m[1]);\n }\n\n const dblRegex = /(?:export\\s+)?const\\s+content\\s*=\\s*\"([^\"]*)\"\\s*;/g;\n while ((m = dblRegex.exec(fileText)) !== null) {\n results.push(m[1]);\n }\n\n return results;\n}\n\n/**\n * Get the title from markdown content\n */\nfunction extractTitle(content: string): string {\n const match = content.match(/^#\\s+(.+)$/m);\n return match ? match[1].trim() : \"Untitled\";\n}\n\n/**\n * List all documentation resources\n */\nexport async function listDocs(\n params?: ListDocsParams,\n): Promise<DocsResource[]> {\n const resources: DocsResource[] = [];\n const filterDir = params?.directory;\n\n for await (const filePath of walkDocs(APP_DIR)) {\n const relativePath = path.join(\"app\", path.relative(APP_DIR, filePath));\n\n if (filterDir && !relativePath.includes(filterDir)) {\n continue;\n }\n\n try {\n const fileContent = await fs.readFile(filePath, \"utf8\");\n const blocks = extractContentBlocks(fileContent);\n const content = blocks.join(\"\\n\\n\");\n const title = extractTitle(content);\n const docPath = path.dirname(relativePath).replace(/^app\\/?/, \"\") || \"/\";\n\n resources.push({\n uri: `docs://${docPath}`,\n name: title,\n path: relativePath,\n content,\n });\n } catch (error) {\n console.warn(`Failed to read doc file: ${filePath}`, error);\n }\n }\n\n return resources;\n}\n\n/**\n * Get a specific documentation page\n */\nexport async function getDoc(\n params: GetDocParams,\n): Promise<DocsResource | null> {\n let targetPath = params.path;\n\n // Normalize path - strip leading \"app/\" if present to get the relative part\n const relativePart = targetPath.replace(/^app\\//, \"\");\n if (!relativePart.includes(\"page.\")) {\n targetPath = path.join(\"app\", relativePart, \"page.tsx\");\n } else if (!targetPath.startsWith(\"app/\")) {\n targetPath = path.join(\"app\", relativePart);\n }\n\n const fullPath = path.join(APP_DIR, targetPath.replace(/^app\\//, \"\"));\n\n // Prevent path traversal\n const resolvedPath = path.resolve(fullPath);\n if (!resolvedPath.startsWith(path.resolve(APP_DIR))) {\n return null;\n }\n\n try {\n const fileContent = await fs.readFile(fullPath, \"utf8\");\n const blocks = extractContentBlocks(fileContent);\n const content = blocks.join(\"\\n\\n\");\n const title = extractTitle(content);\n const docPath = path.dirname(targetPath).replace(/^app\\/?/, \"\") || \"/\";\n\n return {\n uri: `docs://${docPath}`,\n name: title,\n path: targetPath,\n content,\n };\n } catch (error) {\n console.warn(`Failed to read doc: ${targetPath}`, error);\n return null;\n }\n}\n\n/**\n * Chunk text for embeddings.\n * - chunkSize=800 chars balances granularity with embedding context window limits\n * - overlap=100 chars ensures continuity so searches don't miss content at chunk boundaries\n */\nfunction chunkText(text: string, chunkSize = 800, overlap = 100): string[] {\n const chunks: string[] = [];\n let i = 0;\n while (i < text.length) {\n const end = Math.min(i + chunkSize, text.length);\n chunks.push(text.slice(i, end));\n if (end === text.length) break;\n i = end - overlap;\n if (i < 0) i = 0;\n }\n return chunks;\n}\n\n/**\n * Get all documentation chunks for indexing\n */\nexport async function getAllDocsChunks(): Promise<DocsChunk[]> {\n const allChunks: DocsChunk[] = [];\n const docs = await listDocs();\n\n for (const doc of docs) {\n const cleanContent = doc.content\n .replace(/\\r\\n/g, \"\\n\")\n .replace(/\\n{3,}/g, \"\\n\\n\")\n .slice(0, 200_000);\n\n const textChunks = chunkText(cleanContent);\n for (let i = 0; i < textChunks.length; i++) {\n allChunks.push({\n id: `${doc.path}:${i}`,\n text: textChunks[i],\n path: doc.path,\n uri: doc.uri,\n });\n }\n }\n\n return allChunks;\n}\n";
@@ -8,8 +8,7 @@ import type {
8
8
  ListDocsParams,
9
9
  } from "@/services/mcp/types";
10
10
 
11
- const PROJECT_ROOT = process.cwd();
12
- const APP_DIR = path.join(PROJECT_ROOT, "app");
11
+ const APP_DIR = path.join(process.cwd(), "app");
13
12
  const VALID_EXT = new Set([".ts", ".tsx", ".js", ".jsx"]);
14
13
 
15
14
  /**
@@ -132,7 +131,7 @@ export async function listDocs(
132
131
  const filterDir = params?.directory;
133
132
 
134
133
  for await (const filePath of walkDocs(APP_DIR)) {
135
- const relativePath = path.relative(PROJECT_ROOT, filePath);
134
+ const relativePath = path.join("app", path.relative(APP_DIR, filePath));
136
135
 
137
136
  if (filterDir && !relativePath.includes(filterDir)) {
138
137
  continue;
@@ -167,15 +166,15 @@ export async function getDoc(
167
166
  ): Promise<DocsResource | null> {
168
167
  let targetPath = params.path;
169
168
 
170
- // Normalize path
171
- if (!targetPath.startsWith("app/")) {
172
- targetPath = \`app/\${targetPath}\`;
173
- }
174
- if (!targetPath.includes("page.")) {
175
- targetPath = path.join(targetPath, "page.tsx");
169
+ // Normalize path - strip leading "app/" if present to get the relative part
170
+ const relativePart = targetPath.replace(/^app\\//, "");
171
+ if (!relativePart.includes("page.")) {
172
+ targetPath = path.join("app", relativePart, "page.tsx");
173
+ } else if (!targetPath.startsWith("app/")) {
174
+ targetPath = path.join("app", relativePart);
176
175
  }
177
176
 
178
- const fullPath = path.join(PROJECT_ROOT, targetPath);
177
+ const fullPath = path.join(APP_DIR, targetPath.replace(/^app\\//, ""));
179
178
 
180
179
  // Prevent path traversal
181
180
  const resolvedPath = path.resolve(fullPath);
@@ -1 +1 @@
1
- export declare const tsconfigTemplate = "{\n \"compilerOptions\": {\n \"target\": \"es2020\",\n \"lib\": [\"dom\", \"dom.iterable\", \"esnext\"],\n \"allowJs\": true,\n \"skipLibCheck\": true,\n \"strict\": true,\n \"noEmit\": true,\n \"esModuleInterop\": true,\n \"module\": \"esnext\",\n \"moduleResolution\": \"bundler\",\n \"resolveJsonModule\": true,\n \"isolatedModules\": true,\n \"jsx\": \"react-jsx\",\n \"incremental\": true,\n \"plugins\": [{ \"name\": \"next\" }],\n \"baseUrl\": \".\",\n \"paths\": {\n \"@/*\": [\"./*\"]\n }\n },\n \"include\": [\n \"next-env.d.ts\",\n \"**/*.ts\",\n \"**/*.tsx\",\n \"**/*.d.ts\",\n \".next/types/**/*.ts\",\n \".next/dev/types/**/*.ts\"\n ],\n \"exclude\": [\"node_modules\"]\n}\n";
1
+ export declare const tsconfigTemplate = "{\n \"compilerOptions\": {\n \"target\": \"es2020\",\n \"lib\": [\"dom\", \"dom.iterable\", \"esnext\"],\n \"allowJs\": true,\n \"skipLibCheck\": true,\n \"strict\": true,\n \"noEmit\": true,\n \"esModuleInterop\": true,\n \"module\": \"esnext\",\n \"moduleResolution\": \"bundler\",\n \"resolveJsonModule\": true,\n \"isolatedModules\": true,\n \"jsx\": \"react-jsx\",\n \"incremental\": true,\n \"plugins\": [{ \"name\": \"next\" }],\n \"paths\": {\n \"@/*\": [\"./*\"]\n }\n },\n \"include\": [\n \"next-env.d.ts\",\n \"**/*.ts\",\n \"**/*.tsx\",\n \"**/*.d.ts\",\n \".next/types/**/*.ts\",\n \".next/dev/types/**/*.ts\"\n ],\n \"exclude\": [\"node_modules\"]\n}\n";
@@ -14,7 +14,6 @@ export const tsconfigTemplate = `{
14
14
  "jsx": "react-jsx",
15
15
  "incremental": true,
16
16
  "plugins": [{ "name": "next" }],
17
- "baseUrl": ".",
18
17
  "paths": {
19
18
  "@/*": ["./*"]
20
19
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "doccupine",
3
- "version": "0.0.83",
3
+ "version": "0.0.84",
4
4
  "description": "Free and open-source documentation platform. Write MDX, get a production-ready site with AI chat, built-in components, and an MCP server - in one command.",
5
5
  "main": "dist/index.js",
6
6
  "bin": {
@@ -50,8 +50,8 @@
50
50
  "@types/node": "^25.5.0",
51
51
  "@types/prompts": "^2.4.9",
52
52
  "prettier": "^3.8.1",
53
- "typescript": "^5.9.3",
54
- "vitest": "^4.1.0"
53
+ "typescript": "^6.0.2",
54
+ "vitest": "^4.1.2"
55
55
  },
56
56
  "files": [
57
57
  "dist/**/*"