@elizaos/plugin-knowledge 1.0.4 → 1.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/.vite/manifest.json +2 -2
- package/dist/assets/index-C77XebWS.css +1 -0
- package/dist/assets/{index-BwFQYb4z.js → index-DZQIX0Kb.js} +34 -34
- package/dist/{chunk-BB6B27BS.js → chunk-MFXNKYBS.js} +4 -2
- package/dist/{chunk-BB6B27BS.js.map → chunk-MFXNKYBS.js.map} +1 -1
- package/dist/{docs-loader-NAE6ASGY.js → docs-loader-AEQHIBO4.js} +2 -2
- package/dist/index.html +2 -2
- package/dist/index.js +105 -35
- package/dist/index.js.map +1 -1
- package/package.json +4 -2
- package/dist/assets/index-Bjc7mMWy.css +0 -1
- /package/dist/{docs-loader-NAE6ASGY.js.map → docs-loader-AEQHIBO4.js.map} +0 -0
|
@@ -291,7 +291,9 @@ async function fetchUrlContent(url) {
|
|
|
291
291
|
const arrayBuffer = await response.arrayBuffer();
|
|
292
292
|
const buffer = Buffer.from(arrayBuffer);
|
|
293
293
|
const base64Content = buffer.toString("base64");
|
|
294
|
-
logger.debug(
|
|
294
|
+
logger.debug(
|
|
295
|
+
`[URL FETCHER] Successfully fetched content from URL: ${url} (${buffer.length} bytes)`
|
|
296
|
+
);
|
|
295
297
|
return {
|
|
296
298
|
content: base64Content,
|
|
297
299
|
contentType
|
|
@@ -514,4 +516,4 @@ export {
|
|
|
514
516
|
getKnowledgePath,
|
|
515
517
|
loadDocsFromPath
|
|
516
518
|
};
|
|
517
|
-
//# sourceMappingURL=chunk-
|
|
519
|
+
//# sourceMappingURL=chunk-MFXNKYBS.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/docs-loader.ts","../src/utils.ts"],"sourcesContent":["import { logger, UUID, createUniqueUuid } from \"@elizaos/core\";\nimport * as fs from \"fs\";\nimport * as path from \"path\";\nimport { KnowledgeService } from \"./service.ts\";\nimport { AddKnowledgeOptions } from \"./types.ts\";\nimport { isBinaryContentType } from './utils.ts';\n\n/**\n * Get the knowledge path from environment or default to ./docs\n */\nexport function getKnowledgePath(): string {\n const envPath = process.env.KNOWLEDGE_PATH;\n\n if (envPath) {\n // Resolve relative paths from current working directory\n const resolvedPath = path.resolve(envPath);\n\n if (!fs.existsSync(resolvedPath)) {\n logger.warn(`Knowledge path from environment variable does not exist: ${resolvedPath}`);\n logger.warn('Please create the directory or update KNOWLEDGE_PATH environment variable');\n }\n\n return resolvedPath;\n }\n\n // Default to docs folder in current working directory\n const defaultPath = path.join(process.cwd(), 'docs');\n\n if (!fs.existsSync(defaultPath)) {\n logger.info(`Default docs folder does not exist at: ${defaultPath}`);\n logger.info('To use the knowledge plugin, either:');\n logger.info('1. Create a \"docs\" folder in your project root');\n logger.info('2. Set KNOWLEDGE_PATH environment variable to your documents folder');\n }\n\n return defaultPath;\n}\n\n/**\n * Load documents from the knowledge path\n */\nexport async function loadDocsFromPath(\n service: KnowledgeService,\n agentId: UUID,\n worldId?: UUID\n): Promise<{ total: number; successful: number; failed: number }> {\n const docsPath = getKnowledgePath();\n\n if (!fs.existsSync(docsPath)) {\n logger.warn(`Knowledge path does not exist: ${docsPath}`);\n return { total: 0, successful: 0, failed: 0 };\n }\n\n logger.info(`Loading documents from: ${docsPath}`);\n\n // Get all files recursively\n const files = getAllFiles(docsPath);\n\n if (files.length === 0) {\n logger.info('No files found in knowledge path');\n return { total: 0, successful: 0, failed: 0 };\n }\n\n logger.info(`Found ${files.length} files to process`);\n\n let successful = 0;\n let failed = 0;\n\n for (const filePath of files) {\n try {\n const fileName = path.basename(filePath);\n const fileExt = path.extname(filePath).toLowerCase();\n\n // Skip hidden files and directories\n if (fileName.startsWith('.')) {\n continue;\n }\n\n // Determine content type\n const contentType = getContentType(fileExt);\n\n // Skip unsupported file types\n if (!contentType) {\n logger.debug(`Skipping unsupported file type: ${filePath}`);\n continue;\n }\n\n // Read file\n const fileBuffer = fs.readFileSync(filePath);\n\n // Check if file is binary using the same logic as the service\n const isBinary = isBinaryContentType(contentType, fileName);\n\n // For text files, read as UTF-8 string directly\n // For binary files, convert to base64\n const content = isBinary ? fileBuffer.toString('base64') : fileBuffer.toString('utf-8');\n\n // Create knowledge options\n const knowledgeOptions: AddKnowledgeOptions = {\n clientDocumentId: createUniqueUuid(agentId, `docs-${fileName}-${Date.now()}`) as UUID,\n contentType,\n originalFilename: fileName,\n worldId: worldId || agentId,\n content,\n roomId: agentId,\n entityId: agentId,\n };\n\n // Process the document\n logger.debug(`Processing document: ${fileName}`);\n const result = await service.addKnowledge(knowledgeOptions);\n\n logger.info(`Successfully processed ${fileName}: ${result.fragmentCount} fragments created`);\n successful++;\n } catch (error) {\n logger.error(`Failed to process file ${filePath}:`, error);\n failed++;\n }\n }\n\n logger.info(\n `Document loading complete: ${successful} successful, ${failed} failed out of ${files.length} total`\n );\n\n return {\n total: files.length,\n successful,\n failed,\n };\n}\n\n/**\n * Recursively get all files in a directory\n */\nfunction getAllFiles(dirPath: string, files: string[] = []): string[] {\n try {\n const entries = fs.readdirSync(dirPath, { withFileTypes: true });\n\n for (const entry of entries) {\n const fullPath = path.join(dirPath, entry.name);\n\n if (entry.isDirectory()) {\n // Skip node_modules and other common directories\n if (!['node_modules', '.git', '.vscode', 'dist', 'build'].includes(entry.name)) {\n getAllFiles(fullPath, files);\n }\n } else if (entry.isFile()) {\n files.push(fullPath);\n }\n }\n } catch (error) {\n logger.error(`Error reading directory ${dirPath}:`, error);\n }\n\n return files;\n}\n\n/**\n * Get content type based on file extension\n */\nfunction getContentType(extension: string): string | null {\n const contentTypes: Record<string, string> = {\n // Text documents\n '.txt': 'text/plain',\n '.md': 'text/markdown',\n '.markdown': 'text/markdown',\n '.tson': 'text/plain',\n '.xml': 'application/xml',\n '.csv': 'text/csv',\n '.tsv': 'text/tab-separated-values',\n '.log': 'text/plain',\n\n // Web files\n '.html': 'text/html',\n '.htm': 'text/html',\n '.css': 'text/css',\n '.scss': 'text/x-scss',\n '.sass': 'text/x-sass',\n '.less': 'text/x-less',\n\n // JavaScript/TypeScript\n '.js': 'text/javascript',\n '.jsx': 'text/javascript',\n '.ts': 'text/typescript',\n '.tsx': 'text/typescript',\n '.mjs': 'text/javascript',\n '.cjs': 'text/javascript',\n '.vue': 'text/x-vue',\n '.svelte': 'text/x-svelte',\n '.astro': 'text/x-astro',\n\n // Python\n '.py': 'text/x-python',\n '.pyw': 'text/x-python',\n '.pyi': 'text/x-python',\n\n // Java/Kotlin/Scala\n '.java': 'text/x-java',\n '.kt': 'text/x-kotlin',\n '.kts': 'text/x-kotlin',\n '.scala': 'text/x-scala',\n\n // C/C++/C#\n '.c': 'text/x-c',\n '.cpp': 'text/x-c++',\n '.cc': 'text/x-c++',\n '.cxx': 'text/x-c++',\n '.h': 'text/x-c',\n '.hpp': 'text/x-c++',\n '.cs': 'text/x-csharp',\n\n // Other languages\n '.php': 'text/x-php',\n '.rb': 'text/x-ruby',\n '.go': 'text/x-go',\n '.rs': 'text/x-rust',\n '.swift': 'text/x-swift',\n '.r': 'text/x-r',\n '.R': 'text/x-r',\n '.m': 'text/x-objectivec',\n '.mm': 'text/x-objectivec',\n '.clj': 'text/x-clojure',\n '.cljs': 'text/x-clojure',\n '.ex': 'text/x-elixir',\n '.exs': 'text/x-elixir',\n '.lua': 'text/x-lua',\n '.pl': 'text/x-perl',\n '.pm': 'text/x-perl',\n '.dart': 'text/x-dart',\n '.hs': 'text/x-haskell',\n '.elm': 'text/x-elm',\n '.ml': 'text/x-ocaml',\n '.fs': 'text/x-fsharp',\n '.fsx': 'text/x-fsharp',\n '.vb': 'text/x-vb',\n '.pas': 'text/x-pascal',\n '.d': 'text/x-d',\n '.nim': 'text/x-nim',\n '.zig': 'text/x-zig',\n '.jl': 'text/x-julia',\n '.tcl': 'text/x-tcl',\n '.awk': 'text/x-awk',\n '.sed': 'text/x-sed',\n\n // Shell scripts\n '.sh': 'text/x-sh',\n '.bash': 'text/x-sh',\n '.zsh': 'text/x-sh',\n '.fish': 'text/x-fish',\n '.ps1': 'text/x-powershell',\n '.bat': 'text/x-batch',\n '.cmd': 'text/x-batch',\n\n // Config files\n '.json': 'application/json',\n '.yaml': 'text/x-yaml',\n '.yml': 'text/x-yaml',\n '.toml': 'text/x-toml',\n '.ini': 'text/x-ini',\n '.cfg': 'text/x-ini',\n '.conf': 'text/x-ini',\n '.env': 'text/plain',\n '.gitignore': 'text/plain',\n '.dockerignore': 'text/plain',\n '.editorconfig': 'text/plain',\n '.properties': 'text/x-properties',\n\n // Database\n '.sql': 'text/x-sql',\n\n // Binary documents\n '.pdf': 'application/pdf',\n '.doc': 'application/msword',\n '.docx': 'application/vnd.openxmlformats-officedocument.wordprocessingml.document',\n };\n\n return contentTypes[extension] || null;\n}\n","import { Buffer } from 'node:buffer';\nimport * as mammoth from 'mammoth';\nimport { logger } from '@elizaos/core';\nimport { getDocument, PDFDocumentProxy } from 'pdfjs-dist/legacy/build/pdf.mjs';\nimport type { TextItem, TextMarkedContent } from 'pdfjs-dist/types/src/display/api';\n\nconst PLAIN_TEXT_CONTENT_TYPES = [\n 'application/typescript',\n 'text/typescript',\n 'text/x-python',\n 'application/x-python-code',\n 'application/yaml',\n 'text/yaml',\n 'application/x-yaml',\n 'application/json',\n 'text/markdown',\n 'text/csv',\n];\n\nconst MAX_FALLBACK_SIZE_BYTES = 5 * 1024 * 1024; // 5 MB\nconst BINARY_CHECK_BYTES = 1024; // Check first 1KB for binary indicators\n\n/**\n * Extracts text content from a file buffer based on its content type.\n * Supports DOCX, plain text, and provides a fallback for unknown types.\n * PDF should be handled by `convertPdfToTextFromBuffer`.\n */\nexport async function extractTextFromFileBuffer(\n fileBuffer: Buffer,\n contentType: string,\n originalFilename: string // For logging and context\n): Promise<string> {\n const lowerContentType = contentType.toLowerCase();\n logger.debug(\n `[TextUtil] Attempting to extract text from ${originalFilename} (type: ${contentType})`\n );\n\n if (\n lowerContentType === 'application/vnd.openxmlformats-officedocument.wordprocessingml.document'\n ) {\n logger.debug(`[TextUtil] Extracting text from DOCX ${originalFilename} via mammoth.`);\n try {\n const result = await mammoth.extractRawText({ buffer: fileBuffer });\n logger.debug(\n `[TextUtil] DOCX text extraction complete for ${originalFilename}. Text length: ${result.value.length}`\n );\n return result.value;\n } catch (docxError: any) {\n const errorMsg = `[TextUtil] Failed to parse DOCX file ${originalFilename}: ${docxError.message}`;\n logger.error(errorMsg, docxError.stack);\n throw new Error(errorMsg);\n }\n } else if (\n lowerContentType === 'application/msword' ||\n originalFilename.toLowerCase().endsWith('.doc')\n ) {\n // For .doc files, we'll store the content as-is, and just add a message\n // The frontend will handle the display appropriately\n logger.debug(`[TextUtil] Handling Microsoft Word .doc file: ${originalFilename}`);\n\n // We'll add a descriptive message as a placeholder\n return `[Microsoft Word Document: ${originalFilename}]\\n\\nThis document was indexed for search but cannot be displayed directly in the browser. The original document content is preserved for retrieval purposes.`;\n } else if (\n lowerContentType.startsWith('text/') ||\n PLAIN_TEXT_CONTENT_TYPES.includes(lowerContentType)\n ) {\n logger.debug(\n `[TextUtil] Extracting text from plain text compatible file ${originalFilename} (type: ${contentType})`\n );\n return fileBuffer.toString('utf-8');\n } else {\n logger.warn(\n `[TextUtil] Unsupported content type: \"${contentType}\" for ${originalFilename}. Attempting fallback to plain text.`\n );\n\n if (fileBuffer.length > MAX_FALLBACK_SIZE_BYTES) {\n const sizeErrorMsg = `[TextUtil] File ${originalFilename} (type: ${contentType}) exceeds maximum size for fallback (${MAX_FALLBACK_SIZE_BYTES} bytes). Cannot process as plain text.`;\n logger.error(sizeErrorMsg);\n throw new Error(sizeErrorMsg);\n }\n\n // Simple binary detection: check for null bytes in the first N bytes\n const initialBytes = fileBuffer.subarray(0, Math.min(fileBuffer.length, BINARY_CHECK_BYTES));\n if (initialBytes.includes(0)) {\n // Check for NUL byte\n const binaryHeuristicMsg = `[TextUtil] File ${originalFilename} (type: ${contentType}) appears to be binary based on initial byte check. Cannot process as plain text.`;\n logger.error(binaryHeuristicMsg);\n throw new Error(binaryHeuristicMsg);\n }\n\n try {\n const textContent = fileBuffer.toString('utf-8');\n if (textContent.includes('\\ufffd')) {\n // Replacement character, indicating potential binary or wrong encoding\n const binaryErrorMsg = `[TextUtil] File ${originalFilename} (type: ${contentType}) seems to be binary or has encoding issues after fallback to plain text (detected \\ufffd).`;\n logger.error(binaryErrorMsg);\n throw new Error(binaryErrorMsg); // Throw error for likely binary content\n }\n logger.debug(\n `[TextUtil] Successfully processed unknown type ${contentType} as plain text after fallback for ${originalFilename}.`\n );\n return textContent;\n } catch (fallbackError: any) {\n // If the initial toString failed or if we threw due to \\ufffd\n const finalErrorMsg = `[TextUtil] Unsupported content type: ${contentType} for ${originalFilename}. Fallback to plain text also failed or indicated binary content.`;\n logger.error(finalErrorMsg, fallbackError.message ? fallbackError.stack : undefined);\n throw new Error(finalErrorMsg);\n }\n }\n}\n\n/**\n * Converts a PDF file buffer to text content.\n * Requires pdfjs-dist to be properly configured, especially its worker.\n */\n/**\n * Converts a PDF Buffer to text with enhanced formatting preservation.\n *\n * @param {Buffer} pdfBuffer - The PDF Buffer to convert to text\n * @param {string} [filename] - Optional filename for logging purposes\n * @returns {Promise<string>} Text content of the PDF\n */\nexport async function convertPdfToTextFromBuffer(\n pdfBuffer: Buffer,\n filename?: string\n): Promise<string> {\n const docName = filename || 'unnamed-document';\n logger.debug(`[PdfService] Starting conversion for ${docName}`);\n\n try {\n const uint8Array = new Uint8Array(pdfBuffer);\n const pdf: PDFDocumentProxy = await getDocument({ data: uint8Array }).promise;\n const numPages = pdf.numPages;\n const textPages: string[] = [];\n\n for (let pageNum = 1; pageNum <= numPages; pageNum++) {\n logger.debug(`[PdfService] Processing page ${pageNum}/${numPages}`);\n const page = await pdf.getPage(pageNum);\n const textContent = await page.getTextContent();\n\n // Group text items by their y-position to maintain line structure\n const lineMap = new Map<number, TextItem[]>();\n\n textContent.items.filter(isTextItem).forEach((item) => {\n // Round y-position to account for small variations in the same line\n const yPos = Math.round(item.transform[5]);\n if (!lineMap.has(yPos)) {\n lineMap.set(yPos, []);\n }\n lineMap.get(yPos)!.push(item);\n });\n\n // Sort lines by y-position (top to bottom) and items within lines by x-position (left to right)\n const sortedLines = Array.from(lineMap.entries())\n .sort((a, b) => b[0] - a[0]) // Reverse sort for top-to-bottom\n .map(([_, items]) =>\n items\n .sort((a, b) => a.transform[4] - b.transform[4])\n .map((item) => item.str)\n .join(' ')\n );\n\n textPages.push(sortedLines.join('\\n'));\n }\n\n const fullText = textPages.join('\\n\\n').replace(/\\s+/g, ' ').trim();\n logger.debug(`[PdfService] Conversion complete for ${docName}, length: ${fullText.length}`);\n return fullText;\n } catch (error: any) {\n logger.error(`[PdfService] Error converting PDF ${docName}:`, error.message);\n throw new Error(`Failed to convert PDF to text: ${error.message}`);\n }\n}\n\n/**\n * Determines if a file should be treated as binary based on its content type and filename\n * @param contentType MIME type of the file\n * @param filename Original filename\n * @returns True if the file should be treated as binary (base64 encoded)\n */\nexport function isBinaryContentType(contentType: string, filename: string): boolean {\n // Text-based content types that should NOT be treated as binary\n const textContentTypes = [\n 'text/',\n 'application/json',\n 'application/xml',\n 'application/javascript',\n 'application/typescript',\n 'application/x-yaml',\n 'application/x-sh',\n ];\n\n // Check if it's a text-based MIME type\n const isTextMimeType = textContentTypes.some((type) => contentType.includes(type));\n if (isTextMimeType) {\n return false;\n }\n\n // Binary content types\n const binaryContentTypes = [\n 'application/pdf',\n 'application/msword',\n 'application/vnd.openxmlformats-officedocument',\n 'application/vnd.ms-excel',\n 'application/vnd.ms-powerpoint',\n 'application/zip',\n 'application/x-zip-compressed',\n 'application/octet-stream',\n 'image/',\n 'audio/',\n 'video/',\n ];\n\n // Check MIME type\n const isBinaryMimeType = binaryContentTypes.some((type) => contentType.includes(type));\n\n if (isBinaryMimeType) {\n return true;\n }\n\n // Check file extension as fallback\n const fileExt = filename.split('.').pop()?.toLowerCase() || '';\n\n // Text file extensions that should NOT be treated as binary\n const textExtensions = [\n 'txt',\n 'md',\n 'markdown',\n 'json',\n 'xml',\n 'html',\n 'htm',\n 'css',\n 'js',\n 'ts',\n 'jsx',\n 'tsx',\n 'yaml',\n 'yml',\n 'toml',\n 'ini',\n 'cfg',\n 'conf',\n 'sh',\n 'bash',\n 'zsh',\n 'fish',\n 'py',\n 'rb',\n 'go',\n 'rs',\n 'java',\n 'c',\n 'cpp',\n 'h',\n 'hpp',\n 'cs',\n 'php',\n 'sql',\n 'r',\n 'swift',\n 'kt',\n 'scala',\n 'clj',\n 'ex',\n 'exs',\n 'vim',\n 'env',\n 'gitignore',\n 'dockerignore',\n 'editorconfig',\n 'log',\n 'csv',\n 'tsv',\n 'properties',\n 'gradle',\n 'sbt',\n 'makefile',\n 'dockerfile',\n 'vagrantfile',\n 'gemfile',\n 'rakefile',\n 'podfile',\n 'csproj',\n 'vbproj',\n 'fsproj',\n 'sln',\n 'pom',\n ];\n\n // If it's a known text extension, it's not binary\n if (textExtensions.includes(fileExt)) {\n return false;\n }\n\n // Binary file extensions\n const binaryExtensions = [\n 'pdf',\n 'docx',\n 'doc',\n 'xls',\n 'xlsx',\n 'ppt',\n 'pptx',\n 'zip',\n 'rar',\n '7z',\n 'tar',\n 'gz',\n 'bz2',\n 'xz',\n 'jpg',\n 'jpeg',\n 'png',\n 'gif',\n 'bmp',\n 'svg',\n 'ico',\n 'webp',\n 'mp3',\n 'mp4',\n 'avi',\n 'mov',\n 'wmv',\n 'flv',\n 'wav',\n 'flac',\n 'ogg',\n 'exe',\n 'dll',\n 'so',\n 'dylib',\n 'bin',\n 'dat',\n 'db',\n 'sqlite',\n ];\n\n return binaryExtensions.includes(fileExt);\n}\n\n/**\n * Check if the input is a TextItem.\n *\n * @param item - The input item to check.\n * @returns A boolean indicating if the input is a TextItem.\n */\nfunction isTextItem(item: TextItem | TextMarkedContent): item is TextItem {\n return 'str' in item;\n}\n\n/**\n * Normalizes an S3 URL by removing query parameters (signature, etc.)\n * This allows for consistent URL comparison regardless of presigned URL parameters\n * @param url The S3 URL to normalize\n * @returns The normalized URL containing only the origin and pathname\n */\nexport function normalizeS3Url(url: string): string {\n try {\n const urlObj = new URL(url);\n return `${urlObj.origin}${urlObj.pathname}`;\n } catch (error) {\n logger.warn(`[URL NORMALIZER] Failed to parse URL: ${url}. Returning original.`);\n return url;\n }\n}\n\n/**\n * Fetches content from a URL and converts it to base64 format\n * @param url The URL to fetch content from\n * @returns An object containing the base64 content and content type\n */\nexport async function fetchUrlContent(url: string): Promise<{ content: string; contentType: string }> {\n logger.debug(`[URL FETCHER] Fetching content from URL: ${url}`);\n \n try {\n // Fetch the URL with timeout\n const controller = new AbortController();\n const timeoutId = setTimeout(() => controller.abort(), 30000); // 30 second timeout\n \n const response = await fetch(url, { \n signal: controller.signal,\n headers: {\n 'User-Agent': 'Eliza-Knowledge-Plugin/1.0'\n }\n });\n clearTimeout(timeoutId);\n \n if (!response.ok) {\n throw new Error(`Failed to fetch URL: ${response.status} ${response.statusText}`);\n }\n \n // Get content type from response headers\n const contentType = response.headers.get('content-type') || 'application/octet-stream';\n logger.debug(`[URL FETCHER] Content type from server: ${contentType} for URL: ${url}`);\n \n // Get content as ArrayBuffer\n const arrayBuffer = await response.arrayBuffer();\n const buffer = Buffer.from(arrayBuffer);\n \n // Convert to base64\n const base64Content = buffer.toString('base64');\n \n logger.debug(`[URL FETCHER] Successfully fetched content from URL: ${url} (${buffer.length} bytes)`);\n return {\n content: base64Content,\n contentType\n };\n } catch (error: any) {\n logger.error(`[URL FETCHER] Error fetching content from URL ${url}: ${error.message}`);\n throw new Error(`Failed to fetch content from URL: ${error.message}`);\n }\n}\n"],"mappings":";AAAA,SAAS,UAAAA,SAAc,wBAAwB;AAC/C,YAAY,QAAQ;AACpB,YAAY,UAAU;;;ACFtB,SAAS,cAAc;AACvB,YAAY,aAAa;AACzB,SAAS,cAAc;AACvB,SAAS,mBAAqC;AAG9C,IAAM,2BAA2B;AAAA,EAC/B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,0BAA0B,IAAI,OAAO;AAC3C,IAAM,qBAAqB;AAO3B,eAAsB,0BACpB,YACA,aACA,kBACiB;AACjB,QAAM,mBAAmB,YAAY,YAAY;AACjD,SAAO;AAAA,IACL,8CAA8C,gBAAgB,WAAW,WAAW;AAAA,EACtF;AAEA,MACE,qBAAqB,2EACrB;AACA,WAAO,MAAM,wCAAwC,gBAAgB,eAAe;AACpF,QAAI;AACF,YAAM,SAAS,MAAc,uBAAe,EAAE,QAAQ,WAAW,CAAC;AAClE,aAAO;AAAA,QACL,gDAAgD,gBAAgB,kBAAkB,OAAO,MAAM,MAAM;AAAA,MACvG;AACA,aAAO,OAAO;AAAA,IAChB,SAAS,WAAgB;AACvB,YAAM,WAAW,wCAAwC,gBAAgB,KAAK,UAAU,OAAO;AAC/F,aAAO,MAAM,UAAU,UAAU,KAAK;AACtC,YAAM,IAAI,MAAM,QAAQ;AAAA,IAC1B;AAAA,EACF,WACE,qBAAqB,wBACrB,iBAAiB,YAAY,EAAE,SAAS,MAAM,GAC9C;AAGA,WAAO,MAAM,iDAAiD,gBAAgB,EAAE;AAGhF,WAAO,6BAA6B,gBAAgB;AAAA;AAAA;AAAA,EACtD,WACE,iBAAiB,WAAW,OAAO,KACnC,yBAAyB,SAAS,gBAAgB,GAClD;AACA,WAAO;AAAA,MACL,8DAA8D,gBAAgB,WAAW,WAAW;AAAA,IACtG;AACA,WAAO,WAAW,SAAS,OAAO;AAAA,EACpC,OAAO;AACL,WAAO;AAAA,MACL,yCAAyC,WAAW,SAAS,gBAAgB;AAAA,IAC/E;AAEA,QAAI,WAAW,SAAS,yBAAyB;AAC/C,YAAM,eAAe,mBAAmB,gBAAgB,WAAW,WAAW,wCAAwC,uBAAuB;AAC7I,aAAO,MAAM,YAAY;AACzB,YAAM,IAAI,MAAM,YAAY;AAAA,IAC9B;AAGA,UAAM,eAAe,WAAW,SAAS,GAAG,KAAK,IAAI,WAAW,QAAQ,kBAAkB,CAAC;AAC3F,QAAI,aAAa,SAAS,CAAC,GAAG;AAE5B,YAAM,qBAAqB,mBAAmB,gBAAgB,WAAW,WAAW;AACpF,aAAO,MAAM,kBAAkB;AAC/B,YAAM,IAAI,MAAM,kBAAkB;AAAA,IACpC;AAEA,QAAI;AACF,YAAM,cAAc,WAAW,SAAS,OAAO;AAC/C,UAAI,YAAY,SAAS,QAAQ,GAAG;AAElC,cAAM,iBAAiB,mBAAmB,gBAAgB,WAAW,WAAW;AAChF,eAAO,MAAM,cAAc;AAC3B,cAAM,IAAI,MAAM,cAAc;AAAA,MAChC;AACA,aAAO;AAAA,QACL,kDAAkD,WAAW,qCAAqC,gBAAgB;AAAA,MACpH;AACA,aAAO;AAAA,IACT,SAAS,eAAoB;AAE3B,YAAM,gBAAgB,wCAAwC,WAAW,QAAQ,gBAAgB;AACjG,aAAO,MAAM,eAAe,cAAc,UAAU,cAAc,QAAQ,MAAS;AACnF,YAAM,IAAI,MAAM,aAAa;AAAA,IAC/B;AAAA,EACF;AACF;AAaA,eAAsB,2BACpB,WACA,UACiB;AACjB,QAAM,UAAU,YAAY;AAC5B,SAAO,MAAM,wCAAwC,OAAO,EAAE;AAE9D,MAAI;AACF,UAAM,aAAa,IAAI,WAAW,SAAS;AAC3C,UAAM,MAAwB,MAAM,YAAY,EAAE,MAAM,WAAW,CAAC,EAAE;AACtE,UAAM,WAAW,IAAI;AACrB,UAAM,YAAsB,CAAC;AAE7B,aAAS,UAAU,GAAG,WAAW,UAAU,WAAW;AACpD,aAAO,MAAM,gCAAgC,OAAO,IAAI,QAAQ,EAAE;AAClE,YAAM,OAAO,MAAM,IAAI,QAAQ,OAAO;AACtC,YAAM,cAAc,MAAM,KAAK,eAAe;AAG9C,YAAM,UAAU,oBAAI,IAAwB;AAE5C,kBAAY,MAAM,OAAO,UAAU,EAAE,QAAQ,CAAC,SAAS;AAErD,cAAM,OAAO,KAAK,MAAM,KAAK,UAAU,CAAC,CAAC;AACzC,YAAI,CAAC,QAAQ,IAAI,IAAI,GAAG;AACtB,kBAAQ,IAAI,MAAM,CAAC,CAAC;AAAA,QACtB;AACA,gBAAQ,IAAI,IAAI,EAAG,KAAK,IAAI;AAAA,MAC9B,CAAC;AAGD,YAAM,cAAc,MAAM,KAAK,QAAQ,QAAQ,CAAC,EAC7C,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC,EAC1B;AAAA,QAAI,CAAC,CAAC,GAAG,KAAK,MACb,MACG,KAAK,CAAC,GAAG,MAAM,EAAE,UAAU,CAAC,IAAI,EAAE,UAAU,CAAC,CAAC,EAC9C,IAAI,CAAC,SAAS,KAAK,GAAG,EACtB,KAAK,GAAG;AAAA,MACb;AAEF,gBAAU,KAAK,YAAY,KAAK,IAAI,CAAC;AAAA,IACvC;AAEA,UAAM,WAAW,UAAU,KAAK,MAAM,EAAE,QAAQ,QAAQ,GAAG,EAAE,KAAK;AAClE,WAAO,MAAM,wCAAwC,OAAO,aAAa,SAAS,MAAM,EAAE;AAC1F,WAAO;AAAA,EACT,SAAS,OAAY;AACnB,WAAO,MAAM,qCAAqC,OAAO,KAAK,MAAM,OAAO;AAC3E,UAAM,IAAI,MAAM,kCAAkC,MAAM,OAAO,EAAE;AAAA,EACnE;AACF;AAQO,SAAS,oBAAoB,aAAqB,UAA2B;AAElF,QAAM,mBAAmB;AAAA,IACvB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAGA,QAAM,iBAAiB,iBAAiB,KAAK,CAAC,SAAS,YAAY,SAAS,IAAI,CAAC;AACjF,MAAI,gBAAgB;AAClB,WAAO;AAAA,EACT;AAGA,QAAM,qBAAqB;AAAA,IACzB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAGA,QAAM,mBAAmB,mBAAmB,KAAK,CAAC,SAAS,YAAY,SAAS,IAAI,CAAC;AAErF,MAAI,kBAAkB;AACpB,WAAO;AAAA,EACT;AAGA,QAAM,UAAU,SAAS,MAAM,GAAG,EAAE,IAAI,GAAG,YAAY,KAAK;AAG5D,QAAM,iBAAiB;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAGA,MAAI,eAAe,SAAS,OAAO,GAAG;AACpC,WAAO;AAAA,EACT;AAGA,QAAM,mBAAmB;AAAA,IACvB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,SAAO,iBAAiB,SAAS,OAAO;AAC1C;AAQA,SAAS,WAAW,MAAsD;AACxE,SAAO,SAAS;AAClB;AAQO,SAAS,eAAe,KAAqB;AAClD,MAAI;AACF,UAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,WAAO,GAAG,OAAO,MAAM,GAAG,OAAO,QAAQ;AAAA,EAC3C,SAAS,OAAO;AACd,WAAO,KAAK,yCAAyC,GAAG,uBAAuB;AAC/E,WAAO;AAAA,EACT;AACF;AAOA,eAAsB,gBAAgB,KAAgE;AACpG,SAAO,MAAM,4CAA4C,GAAG,EAAE;AAE9D,MAAI;AAEF,UAAM,aAAa,IAAI,gBAAgB;AACvC,UAAM,YAAY,WAAW,MAAM,WAAW,MAAM,GAAG,GAAK;AAE5D,UAAM,WAAW,MAAM,MAAM,KAAK;AAAA,MAChC,QAAQ,WAAW;AAAA,MACnB,SAAS;AAAA,QACP,cAAc;AAAA,MAChB;AAAA,IACF,CAAC;AACD,iBAAa,SAAS;AAEtB,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,IAAI,MAAM,wBAAwB,SAAS,MAAM,IAAI,SAAS,UAAU,EAAE;AAAA,IAClF;AAGA,UAAM,cAAc,SAAS,QAAQ,IAAI,cAAc,KAAK;AAC5D,WAAO,MAAM,2CAA2C,WAAW,aAAa,GAAG,EAAE;AAGrF,UAAM,cAAc,MAAM,SAAS,YAAY;AAC/C,UAAM,SAAS,OAAO,KAAK,WAAW;AAGtC,UAAM,gBAAgB,OAAO,SAAS,QAAQ;AAE9C,WAAO,MAAM,wDAAwD,GAAG,KAAK,OAAO,MAAM,SAAS;AACnG,WAAO;AAAA,MACL,SAAS;AAAA,MACT;AAAA,IACF;AAAA,EACF,SAAS,OAAY;AACnB,WAAO,MAAM,iDAAiD,GAAG,KAAK,MAAM,OAAO,EAAE;AACrF,UAAM,IAAI,MAAM,qCAAqC,MAAM,OAAO,EAAE;AAAA,EACtE;AACF;;;ADlZO,SAAS,mBAA2B;AACzC,QAAM,UAAU,QAAQ,IAAI;AAE5B,MAAI,SAAS;AAEX,UAAM,eAAoB,aAAQ,OAAO;AAEzC,QAAI,CAAI,cAAW,YAAY,GAAG;AAChC,MAAAC,QAAO,KAAK,4DAA4D,YAAY,EAAE;AACtF,MAAAA,QAAO,KAAK,2EAA2E;AAAA,IACzF;AAEA,WAAO;AAAA,EACT;AAGA,QAAM,cAAmB,UAAK,QAAQ,IAAI,GAAG,MAAM;AAEnD,MAAI,CAAI,cAAW,WAAW,GAAG;AAC/B,IAAAA,QAAO,KAAK,0CAA0C,WAAW,EAAE;AACnE,IAAAA,QAAO,KAAK,sCAAsC;AAClD,IAAAA,QAAO,KAAK,gDAAgD;AAC5D,IAAAA,QAAO,KAAK,qEAAqE;AAAA,EACnF;AAEA,SAAO;AACT;AAKA,eAAsB,iBACpB,SACA,SACA,SACgE;AAChE,QAAM,WAAW,iBAAiB;AAElC,MAAI,CAAI,cAAW,QAAQ,GAAG;AAC5B,IAAAA,QAAO,KAAK,kCAAkC,QAAQ,EAAE;AACxD,WAAO,EAAE,OAAO,GAAG,YAAY,GAAG,QAAQ,EAAE;AAAA,EAC9C;AAEA,EAAAA,QAAO,KAAK,2BAA2B,QAAQ,EAAE;AAGjD,QAAM,QAAQ,YAAY,QAAQ;AAElC,MAAI,MAAM,WAAW,GAAG;AACtB,IAAAA,QAAO,KAAK,kCAAkC;AAC9C,WAAO,EAAE,OAAO,GAAG,YAAY,GAAG,QAAQ,EAAE;AAAA,EAC9C;AAEA,EAAAA,QAAO,KAAK,SAAS,MAAM,MAAM,mBAAmB;AAEpD,MAAI,aAAa;AACjB,MAAI,SAAS;AAEb,aAAW,YAAY,OAAO;AAC5B,QAAI;AACF,YAAM,WAAgB,cAAS,QAAQ;AACvC,YAAM,UAAe,aAAQ,QAAQ,EAAE,YAAY;AAGnD,UAAI,SAAS,WAAW,GAAG,GAAG;AAC5B;AAAA,MACF;AAGA,YAAM,cAAc,eAAe,OAAO;AAG1C,UAAI,CAAC,aAAa;AAChB,QAAAA,QAAO,MAAM,mCAAmC,QAAQ,EAAE;AAC1D;AAAA,MACF;AAGA,YAAM,aAAgB,gBAAa,QAAQ;AAG3C,YAAM,WAAW,oBAAoB,aAAa,QAAQ;AAI1D,YAAM,UAAU,WAAW,WAAW,SAAS,QAAQ,IAAI,WAAW,SAAS,OAAO;AAGtF,YAAM,mBAAwC;AAAA,QAC5C,kBAAkB,iBAAiB,SAAS,QAAQ,QAAQ,IAAI,KAAK,IAAI,CAAC,EAAE;AAAA,QAC5E;AAAA,QACA,kBAAkB;AAAA,QAClB,SAAS,WAAW;AAAA,QACpB;AAAA,QACA,QAAQ;AAAA,QACR,UAAU;AAAA,MACZ;AAGA,MAAAA,QAAO,MAAM,wBAAwB,QAAQ,EAAE;AAC/C,YAAM,SAAS,MAAM,QAAQ,aAAa,gBAAgB;AAE1D,MAAAA,QAAO,KAAK,0BAA0B,QAAQ,KAAK,OAAO,aAAa,oBAAoB;AAC3F;AAAA,IACF,SAAS,OAAO;AACd,MAAAA,QAAO,MAAM,0BAA0B,QAAQ,KAAK,KAAK;AACzD;AAAA,IACF;AAAA,EACF;AAEA,EAAAA,QAAO;AAAA,IACL,8BAA8B,UAAU,gBAAgB,MAAM,kBAAkB,MAAM,MAAM;AAAA,EAC9F;AAEA,SAAO;AAAA,IACL,OAAO,MAAM;AAAA,IACb;AAAA,IACA;AAAA,EACF;AACF;AAKA,SAAS,YAAY,SAAiB,QAAkB,CAAC,GAAa;AACpE,MAAI;AACF,UAAM,UAAa,eAAY,SAAS,EAAE,eAAe,KAAK,CAAC;AAE/D,eAAW,SAAS,SAAS;AAC3B,YAAM,WAAgB,UAAK,SAAS,MAAM,IAAI;AAE9C,UAAI,MAAM,YAAY,GAAG;AAEvB,YAAI,CAAC,CAAC,gBAAgB,QAAQ,WAAW,QAAQ,OAAO,EAAE,SAAS,MAAM,IAAI,GAAG;AAC9E,sBAAY,UAAU,KAAK;AAAA,QAC7B;AAAA,MACF,WAAW,MAAM,OAAO,GAAG;AACzB,cAAM,KAAK,QAAQ;AAAA,MACrB;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,IAAAA,QAAO,MAAM,2BAA2B,OAAO,KAAK,KAAK;AAAA,EAC3D;AAEA,SAAO;AACT;AAKA,SAAS,eAAe,WAAkC;AACxD,QAAM,eAAuC;AAAA;AAAA,IAE3C,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,aAAa;AAAA,IACb,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA;AAAA,IAGR,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,SAAS;AAAA,IACT,SAAS;AAAA;AAAA,IAGT,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,WAAW;AAAA,IACX,UAAU;AAAA;AAAA,IAGV,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA;AAAA,IAGR,SAAS;AAAA,IACT,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,UAAU;AAAA;AAAA,IAGV,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO;AAAA;AAAA,IAGP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO;AAAA,IACP,UAAU;AAAA,IACV,MAAM;AAAA,IACN,MAAM;AAAA,IACN,MAAM;AAAA,IACN,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,OAAO;AAAA,IACP,SAAS;AAAA,IACT,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA;AAAA,IAGR,OAAO;AAAA,IACP,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA;AAAA,IAGR,SAAS;AAAA,IACT,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,cAAc;AAAA,IACd,iBAAiB;AAAA,IACjB,iBAAiB;AAAA,IACjB,eAAe;AAAA;AAAA,IAGf,QAAQ;AAAA;AAAA,IAGR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,SAAS;AAAA,EACX;AAEA,SAAO,aAAa,SAAS,KAAK;AACpC;","names":["logger","logger"]}
|
|
1
|
+
{"version":3,"sources":["../src/docs-loader.ts","../src/utils.ts"],"sourcesContent":["import { logger, UUID, createUniqueUuid } from \"@elizaos/core\";\nimport * as fs from \"fs\";\nimport * as path from \"path\";\nimport { KnowledgeService } from \"./service.ts\";\nimport { AddKnowledgeOptions } from \"./types.ts\";\nimport { isBinaryContentType } from './utils.ts';\n\n/**\n * Get the knowledge path from environment or default to ./docs\n */\nexport function getKnowledgePath(): string {\n const envPath = process.env.KNOWLEDGE_PATH;\n\n if (envPath) {\n // Resolve relative paths from current working directory\n const resolvedPath = path.resolve(envPath);\n\n if (!fs.existsSync(resolvedPath)) {\n logger.warn(`Knowledge path from environment variable does not exist: ${resolvedPath}`);\n logger.warn('Please create the directory or update KNOWLEDGE_PATH environment variable');\n }\n\n return resolvedPath;\n }\n\n // Default to docs folder in current working directory\n const defaultPath = path.join(process.cwd(), 'docs');\n\n if (!fs.existsSync(defaultPath)) {\n logger.info(`Default docs folder does not exist at: ${defaultPath}`);\n logger.info('To use the knowledge plugin, either:');\n logger.info('1. Create a \"docs\" folder in your project root');\n logger.info('2. Set KNOWLEDGE_PATH environment variable to your documents folder');\n }\n\n return defaultPath;\n}\n\n/**\n * Load documents from the knowledge path\n */\nexport async function loadDocsFromPath(\n service: KnowledgeService,\n agentId: UUID,\n worldId?: UUID\n): Promise<{ total: number; successful: number; failed: number }> {\n const docsPath = getKnowledgePath();\n\n if (!fs.existsSync(docsPath)) {\n logger.warn(`Knowledge path does not exist: ${docsPath}`);\n return { total: 0, successful: 0, failed: 0 };\n }\n\n logger.info(`Loading documents from: ${docsPath}`);\n\n // Get all files recursively\n const files = getAllFiles(docsPath);\n\n if (files.length === 0) {\n logger.info('No files found in knowledge path');\n return { total: 0, successful: 0, failed: 0 };\n }\n\n logger.info(`Found ${files.length} files to process`);\n\n let successful = 0;\n let failed = 0;\n\n for (const filePath of files) {\n try {\n const fileName = path.basename(filePath);\n const fileExt = path.extname(filePath).toLowerCase();\n\n // Skip hidden files and directories\n if (fileName.startsWith('.')) {\n continue;\n }\n\n // Determine content type\n const contentType = getContentType(fileExt);\n\n // Skip unsupported file types\n if (!contentType) {\n logger.debug(`Skipping unsupported file type: ${filePath}`);\n continue;\n }\n\n // Read file\n const fileBuffer = fs.readFileSync(filePath);\n\n // Check if file is binary using the same logic as the service\n const isBinary = isBinaryContentType(contentType, fileName);\n\n // For text files, read as UTF-8 string directly\n // For binary files, convert to base64\n const content = isBinary ? fileBuffer.toString('base64') : fileBuffer.toString('utf-8');\n\n // Create knowledge options\n const knowledgeOptions: AddKnowledgeOptions = {\n clientDocumentId: createUniqueUuid(agentId, `docs-${fileName}-${Date.now()}`) as UUID,\n contentType,\n originalFilename: fileName,\n worldId: worldId || agentId,\n content,\n roomId: agentId,\n entityId: agentId,\n };\n\n // Process the document\n logger.debug(`Processing document: ${fileName}`);\n const result = await service.addKnowledge(knowledgeOptions);\n\n logger.info(`Successfully processed ${fileName}: ${result.fragmentCount} fragments created`);\n successful++;\n } catch (error) {\n logger.error(`Failed to process file ${filePath}:`, error);\n failed++;\n }\n }\n\n logger.info(\n `Document loading complete: ${successful} successful, ${failed} failed out of ${files.length} total`\n );\n\n return {\n total: files.length,\n successful,\n failed,\n };\n}\n\n/**\n * Recursively get all files in a directory\n */\nfunction getAllFiles(dirPath: string, files: string[] = []): string[] {\n try {\n const entries = fs.readdirSync(dirPath, { withFileTypes: true });\n\n for (const entry of entries) {\n const fullPath = path.join(dirPath, entry.name);\n\n if (entry.isDirectory()) {\n // Skip node_modules and other common directories\n if (!['node_modules', '.git', '.vscode', 'dist', 'build'].includes(entry.name)) {\n getAllFiles(fullPath, files);\n }\n } else if (entry.isFile()) {\n files.push(fullPath);\n }\n }\n } catch (error) {\n logger.error(`Error reading directory ${dirPath}:`, error);\n }\n\n return files;\n}\n\n/**\n * Get content type based on file extension\n */\nfunction getContentType(extension: string): string | null {\n const contentTypes: Record<string, string> = {\n // Text documents\n '.txt': 'text/plain',\n '.md': 'text/markdown',\n '.markdown': 'text/markdown',\n '.tson': 'text/plain',\n '.xml': 'application/xml',\n '.csv': 'text/csv',\n '.tsv': 'text/tab-separated-values',\n '.log': 'text/plain',\n\n // Web files\n '.html': 'text/html',\n '.htm': 'text/html',\n '.css': 'text/css',\n '.scss': 'text/x-scss',\n '.sass': 'text/x-sass',\n '.less': 'text/x-less',\n\n // JavaScript/TypeScript\n '.js': 'text/javascript',\n '.jsx': 'text/javascript',\n '.ts': 'text/typescript',\n '.tsx': 'text/typescript',\n '.mjs': 'text/javascript',\n '.cjs': 'text/javascript',\n '.vue': 'text/x-vue',\n '.svelte': 'text/x-svelte',\n '.astro': 'text/x-astro',\n\n // Python\n '.py': 'text/x-python',\n '.pyw': 'text/x-python',\n '.pyi': 'text/x-python',\n\n // Java/Kotlin/Scala\n '.java': 'text/x-java',\n '.kt': 'text/x-kotlin',\n '.kts': 'text/x-kotlin',\n '.scala': 'text/x-scala',\n\n // C/C++/C#\n '.c': 'text/x-c',\n '.cpp': 'text/x-c++',\n '.cc': 'text/x-c++',\n '.cxx': 'text/x-c++',\n '.h': 'text/x-c',\n '.hpp': 'text/x-c++',\n '.cs': 'text/x-csharp',\n\n // Other languages\n '.php': 'text/x-php',\n '.rb': 'text/x-ruby',\n '.go': 'text/x-go',\n '.rs': 'text/x-rust',\n '.swift': 'text/x-swift',\n '.r': 'text/x-r',\n '.R': 'text/x-r',\n '.m': 'text/x-objectivec',\n '.mm': 'text/x-objectivec',\n '.clj': 'text/x-clojure',\n '.cljs': 'text/x-clojure',\n '.ex': 'text/x-elixir',\n '.exs': 'text/x-elixir',\n '.lua': 'text/x-lua',\n '.pl': 'text/x-perl',\n '.pm': 'text/x-perl',\n '.dart': 'text/x-dart',\n '.hs': 'text/x-haskell',\n '.elm': 'text/x-elm',\n '.ml': 'text/x-ocaml',\n '.fs': 'text/x-fsharp',\n '.fsx': 'text/x-fsharp',\n '.vb': 'text/x-vb',\n '.pas': 'text/x-pascal',\n '.d': 'text/x-d',\n '.nim': 'text/x-nim',\n '.zig': 'text/x-zig',\n '.jl': 'text/x-julia',\n '.tcl': 'text/x-tcl',\n '.awk': 'text/x-awk',\n '.sed': 'text/x-sed',\n\n // Shell scripts\n '.sh': 'text/x-sh',\n '.bash': 'text/x-sh',\n '.zsh': 'text/x-sh',\n '.fish': 'text/x-fish',\n '.ps1': 'text/x-powershell',\n '.bat': 'text/x-batch',\n '.cmd': 'text/x-batch',\n\n // Config files\n '.json': 'application/json',\n '.yaml': 'text/x-yaml',\n '.yml': 'text/x-yaml',\n '.toml': 'text/x-toml',\n '.ini': 'text/x-ini',\n '.cfg': 'text/x-ini',\n '.conf': 'text/x-ini',\n '.env': 'text/plain',\n '.gitignore': 'text/plain',\n '.dockerignore': 'text/plain',\n '.editorconfig': 'text/plain',\n '.properties': 'text/x-properties',\n\n // Database\n '.sql': 'text/x-sql',\n\n // Binary documents\n '.pdf': 'application/pdf',\n '.doc': 'application/msword',\n '.docx': 'application/vnd.openxmlformats-officedocument.wordprocessingml.document',\n };\n\n return contentTypes[extension] || null;\n}\n","import { Buffer } from 'node:buffer';\nimport * as mammoth from 'mammoth';\nimport { logger } from '@elizaos/core';\nimport { getDocument, PDFDocumentProxy } from 'pdfjs-dist/legacy/build/pdf.mjs';\nimport type { TextItem, TextMarkedContent } from 'pdfjs-dist/types/src/display/api';\n\nconst PLAIN_TEXT_CONTENT_TYPES = [\n 'application/typescript',\n 'text/typescript',\n 'text/x-python',\n 'application/x-python-code',\n 'application/yaml',\n 'text/yaml',\n 'application/x-yaml',\n 'application/json',\n 'text/markdown',\n 'text/csv',\n];\n\nconst MAX_FALLBACK_SIZE_BYTES = 5 * 1024 * 1024; // 5 MB\nconst BINARY_CHECK_BYTES = 1024; // Check first 1KB for binary indicators\n\n/**\n * Extracts text content from a file buffer based on its content type.\n * Supports DOCX, plain text, and provides a fallback for unknown types.\n * PDF should be handled by `convertPdfToTextFromBuffer`.\n */\nexport async function extractTextFromFileBuffer(\n fileBuffer: Buffer,\n contentType: string,\n originalFilename: string // For logging and context\n): Promise<string> {\n const lowerContentType = contentType.toLowerCase();\n logger.debug(\n `[TextUtil] Attempting to extract text from ${originalFilename} (type: ${contentType})`\n );\n\n if (\n lowerContentType === 'application/vnd.openxmlformats-officedocument.wordprocessingml.document'\n ) {\n logger.debug(`[TextUtil] Extracting text from DOCX ${originalFilename} via mammoth.`);\n try {\n const result = await mammoth.extractRawText({ buffer: fileBuffer });\n logger.debug(\n `[TextUtil] DOCX text extraction complete for ${originalFilename}. Text length: ${result.value.length}`\n );\n return result.value;\n } catch (docxError: any) {\n const errorMsg = `[TextUtil] Failed to parse DOCX file ${originalFilename}: ${docxError.message}`;\n logger.error(errorMsg, docxError.stack);\n throw new Error(errorMsg);\n }\n } else if (\n lowerContentType === 'application/msword' ||\n originalFilename.toLowerCase().endsWith('.doc')\n ) {\n // For .doc files, we'll store the content as-is, and just add a message\n // The frontend will handle the display appropriately\n logger.debug(`[TextUtil] Handling Microsoft Word .doc file: ${originalFilename}`);\n\n // We'll add a descriptive message as a placeholder\n return `[Microsoft Word Document: ${originalFilename}]\\n\\nThis document was indexed for search but cannot be displayed directly in the browser. The original document content is preserved for retrieval purposes.`;\n } else if (\n lowerContentType.startsWith('text/') ||\n PLAIN_TEXT_CONTENT_TYPES.includes(lowerContentType)\n ) {\n logger.debug(\n `[TextUtil] Extracting text from plain text compatible file ${originalFilename} (type: ${contentType})`\n );\n return fileBuffer.toString('utf-8');\n } else {\n logger.warn(\n `[TextUtil] Unsupported content type: \"${contentType}\" for ${originalFilename}. Attempting fallback to plain text.`\n );\n\n if (fileBuffer.length > MAX_FALLBACK_SIZE_BYTES) {\n const sizeErrorMsg = `[TextUtil] File ${originalFilename} (type: ${contentType}) exceeds maximum size for fallback (${MAX_FALLBACK_SIZE_BYTES} bytes). Cannot process as plain text.`;\n logger.error(sizeErrorMsg);\n throw new Error(sizeErrorMsg);\n }\n\n // Simple binary detection: check for null bytes in the first N bytes\n const initialBytes = fileBuffer.subarray(0, Math.min(fileBuffer.length, BINARY_CHECK_BYTES));\n if (initialBytes.includes(0)) {\n // Check for NUL byte\n const binaryHeuristicMsg = `[TextUtil] File ${originalFilename} (type: ${contentType}) appears to be binary based on initial byte check. Cannot process as plain text.`;\n logger.error(binaryHeuristicMsg);\n throw new Error(binaryHeuristicMsg);\n }\n\n try {\n const textContent = fileBuffer.toString('utf-8');\n if (textContent.includes('\\ufffd')) {\n // Replacement character, indicating potential binary or wrong encoding\n const binaryErrorMsg = `[TextUtil] File ${originalFilename} (type: ${contentType}) seems to be binary or has encoding issues after fallback to plain text (detected \\ufffd).`;\n logger.error(binaryErrorMsg);\n throw new Error(binaryErrorMsg); // Throw error for likely binary content\n }\n logger.debug(\n `[TextUtil] Successfully processed unknown type ${contentType} as plain text after fallback for ${originalFilename}.`\n );\n return textContent;\n } catch (fallbackError: any) {\n // If the initial toString failed or if we threw due to \\ufffd\n const finalErrorMsg = `[TextUtil] Unsupported content type: ${contentType} for ${originalFilename}. Fallback to plain text also failed or indicated binary content.`;\n logger.error(finalErrorMsg, fallbackError.message ? fallbackError.stack : undefined);\n throw new Error(finalErrorMsg);\n }\n }\n}\n\n/**\n * Converts a PDF file buffer to text content.\n * Requires pdfjs-dist to be properly configured, especially its worker.\n */\n/**\n * Converts a PDF Buffer to text with enhanced formatting preservation.\n *\n * @param {Buffer} pdfBuffer - The PDF Buffer to convert to text\n * @param {string} [filename] - Optional filename for logging purposes\n * @returns {Promise<string>} Text content of the PDF\n */\nexport async function convertPdfToTextFromBuffer(\n pdfBuffer: Buffer,\n filename?: string\n): Promise<string> {\n const docName = filename || 'unnamed-document';\n logger.debug(`[PdfService] Starting conversion for ${docName}`);\n\n try {\n const uint8Array = new Uint8Array(pdfBuffer);\n const pdf: PDFDocumentProxy = await getDocument({ data: uint8Array }).promise;\n const numPages = pdf.numPages;\n const textPages: string[] = [];\n\n for (let pageNum = 1; pageNum <= numPages; pageNum++) {\n logger.debug(`[PdfService] Processing page ${pageNum}/${numPages}`);\n const page = await pdf.getPage(pageNum);\n const textContent = await page.getTextContent();\n\n // Group text items by their y-position to maintain line structure\n const lineMap = new Map<number, TextItem[]>();\n\n textContent.items.filter(isTextItem).forEach((item) => {\n // Round y-position to account for small variations in the same line\n const yPos = Math.round(item.transform[5]);\n if (!lineMap.has(yPos)) {\n lineMap.set(yPos, []);\n }\n lineMap.get(yPos)!.push(item);\n });\n\n // Sort lines by y-position (top to bottom) and items within lines by x-position (left to right)\n const sortedLines = Array.from(lineMap.entries())\n .sort((a, b) => b[0] - a[0]) // Reverse sort for top-to-bottom\n .map(([_, items]) =>\n items\n .sort((a, b) => a.transform[4] - b.transform[4])\n .map((item) => item.str)\n .join(' ')\n );\n\n textPages.push(sortedLines.join('\\n'));\n }\n\n const fullText = textPages.join('\\n\\n').replace(/\\s+/g, ' ').trim();\n logger.debug(`[PdfService] Conversion complete for ${docName}, length: ${fullText.length}`);\n return fullText;\n } catch (error: any) {\n logger.error(`[PdfService] Error converting PDF ${docName}:`, error.message);\n throw new Error(`Failed to convert PDF to text: ${error.message}`);\n }\n}\n\n/**\n * Determines if a file should be treated as binary based on its content type and filename\n * @param contentType MIME type of the file\n * @param filename Original filename\n * @returns True if the file should be treated as binary (base64 encoded)\n */\nexport function isBinaryContentType(contentType: string, filename: string): boolean {\n // Text-based content types that should NOT be treated as binary\n const textContentTypes = [\n 'text/',\n 'application/json',\n 'application/xml',\n 'application/javascript',\n 'application/typescript',\n 'application/x-yaml',\n 'application/x-sh',\n ];\n\n // Check if it's a text-based MIME type\n const isTextMimeType = textContentTypes.some((type) => contentType.includes(type));\n if (isTextMimeType) {\n return false;\n }\n\n // Binary content types\n const binaryContentTypes = [\n 'application/pdf',\n 'application/msword',\n 'application/vnd.openxmlformats-officedocument',\n 'application/vnd.ms-excel',\n 'application/vnd.ms-powerpoint',\n 'application/zip',\n 'application/x-zip-compressed',\n 'application/octet-stream',\n 'image/',\n 'audio/',\n 'video/',\n ];\n\n // Check MIME type\n const isBinaryMimeType = binaryContentTypes.some((type) => contentType.includes(type));\n\n if (isBinaryMimeType) {\n return true;\n }\n\n // Check file extension as fallback\n const fileExt = filename.split('.').pop()?.toLowerCase() || '';\n\n // Text file extensions that should NOT be treated as binary\n const textExtensions = [\n 'txt',\n 'md',\n 'markdown',\n 'json',\n 'xml',\n 'html',\n 'htm',\n 'css',\n 'js',\n 'ts',\n 'jsx',\n 'tsx',\n 'yaml',\n 'yml',\n 'toml',\n 'ini',\n 'cfg',\n 'conf',\n 'sh',\n 'bash',\n 'zsh',\n 'fish',\n 'py',\n 'rb',\n 'go',\n 'rs',\n 'java',\n 'c',\n 'cpp',\n 'h',\n 'hpp',\n 'cs',\n 'php',\n 'sql',\n 'r',\n 'swift',\n 'kt',\n 'scala',\n 'clj',\n 'ex',\n 'exs',\n 'vim',\n 'env',\n 'gitignore',\n 'dockerignore',\n 'editorconfig',\n 'log',\n 'csv',\n 'tsv',\n 'properties',\n 'gradle',\n 'sbt',\n 'makefile',\n 'dockerfile',\n 'vagrantfile',\n 'gemfile',\n 'rakefile',\n 'podfile',\n 'csproj',\n 'vbproj',\n 'fsproj',\n 'sln',\n 'pom',\n ];\n\n // If it's a known text extension, it's not binary\n if (textExtensions.includes(fileExt)) {\n return false;\n }\n\n // Binary file extensions\n const binaryExtensions = [\n 'pdf',\n 'docx',\n 'doc',\n 'xls',\n 'xlsx',\n 'ppt',\n 'pptx',\n 'zip',\n 'rar',\n '7z',\n 'tar',\n 'gz',\n 'bz2',\n 'xz',\n 'jpg',\n 'jpeg',\n 'png',\n 'gif',\n 'bmp',\n 'svg',\n 'ico',\n 'webp',\n 'mp3',\n 'mp4',\n 'avi',\n 'mov',\n 'wmv',\n 'flv',\n 'wav',\n 'flac',\n 'ogg',\n 'exe',\n 'dll',\n 'so',\n 'dylib',\n 'bin',\n 'dat',\n 'db',\n 'sqlite',\n ];\n\n return binaryExtensions.includes(fileExt);\n}\n\n/**\n * Check if the input is a TextItem.\n *\n * @param item - The input item to check.\n * @returns A boolean indicating if the input is a TextItem.\n */\nfunction isTextItem(item: TextItem | TextMarkedContent): item is TextItem {\n return 'str' in item;\n}\n\n/**\n * Normalizes an S3 URL by removing query parameters (signature, etc.)\n * This allows for consistent URL comparison regardless of presigned URL parameters\n * @param url The S3 URL to normalize\n * @returns The normalized URL containing only the origin and pathname\n */\nexport function normalizeS3Url(url: string): string {\n try {\n const urlObj = new URL(url);\n return `${urlObj.origin}${urlObj.pathname}`;\n } catch (error) {\n logger.warn(`[URL NORMALIZER] Failed to parse URL: ${url}. Returning original.`);\n return url;\n }\n}\n\n/**\n * Fetches content from a URL and converts it to base64 format\n * @param url The URL to fetch content from\n * @returns An object containing the base64 content and content type\n */\nexport async function fetchUrlContent(\n url: string\n): Promise<{ content: string; contentType: string }> {\n logger.debug(`[URL FETCHER] Fetching content from URL: ${url}`);\n\n try {\n // Fetch the URL with timeout\n const controller = new AbortController();\n const timeoutId = setTimeout(() => controller.abort(), 30000); // 30 second timeout\n\n const response = await fetch(url, {\n signal: controller.signal,\n headers: {\n 'User-Agent': 'Eliza-Knowledge-Plugin/1.0',\n },\n });\n clearTimeout(timeoutId);\n\n if (!response.ok) {\n throw new Error(`Failed to fetch URL: ${response.status} ${response.statusText}`);\n }\n\n // Get content type from response headers\n const contentType = response.headers.get('content-type') || 'application/octet-stream';\n logger.debug(`[URL FETCHER] Content type from server: ${contentType} for URL: ${url}`);\n\n // Get content as ArrayBuffer\n const arrayBuffer = await response.arrayBuffer();\n const buffer = Buffer.from(arrayBuffer);\n\n // Convert to base64\n const base64Content = buffer.toString('base64');\n\n logger.debug(\n `[URL FETCHER] Successfully fetched content from URL: ${url} (${buffer.length} bytes)`\n );\n return {\n content: base64Content,\n contentType,\n };\n } catch (error: any) {\n logger.error(`[URL FETCHER] Error fetching content from URL ${url}: ${error.message}`);\n throw new Error(`Failed to fetch content from URL: ${error.message}`);\n }\n}\n"],"mappings":";AAAA,SAAS,UAAAA,SAAc,wBAAwB;AAC/C,YAAY,QAAQ;AACpB,YAAY,UAAU;;;ACFtB,SAAS,cAAc;AACvB,YAAY,aAAa;AACzB,SAAS,cAAc;AACvB,SAAS,mBAAqC;AAG9C,IAAM,2BAA2B;AAAA,EAC/B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,0BAA0B,IAAI,OAAO;AAC3C,IAAM,qBAAqB;AAO3B,eAAsB,0BACpB,YACA,aACA,kBACiB;AACjB,QAAM,mBAAmB,YAAY,YAAY;AACjD,SAAO;AAAA,IACL,8CAA8C,gBAAgB,WAAW,WAAW;AAAA,EACtF;AAEA,MACE,qBAAqB,2EACrB;AACA,WAAO,MAAM,wCAAwC,gBAAgB,eAAe;AACpF,QAAI;AACF,YAAM,SAAS,MAAc,uBAAe,EAAE,QAAQ,WAAW,CAAC;AAClE,aAAO;AAAA,QACL,gDAAgD,gBAAgB,kBAAkB,OAAO,MAAM,MAAM;AAAA,MACvG;AACA,aAAO,OAAO;AAAA,IAChB,SAAS,WAAgB;AACvB,YAAM,WAAW,wCAAwC,gBAAgB,KAAK,UAAU,OAAO;AAC/F,aAAO,MAAM,UAAU,UAAU,KAAK;AACtC,YAAM,IAAI,MAAM,QAAQ;AAAA,IAC1B;AAAA,EACF,WACE,qBAAqB,wBACrB,iBAAiB,YAAY,EAAE,SAAS,MAAM,GAC9C;AAGA,WAAO,MAAM,iDAAiD,gBAAgB,EAAE;AAGhF,WAAO,6BAA6B,gBAAgB;AAAA;AAAA;AAAA,EACtD,WACE,iBAAiB,WAAW,OAAO,KACnC,yBAAyB,SAAS,gBAAgB,GAClD;AACA,WAAO;AAAA,MACL,8DAA8D,gBAAgB,WAAW,WAAW;AAAA,IACtG;AACA,WAAO,WAAW,SAAS,OAAO;AAAA,EACpC,OAAO;AACL,WAAO;AAAA,MACL,yCAAyC,WAAW,SAAS,gBAAgB;AAAA,IAC/E;AAEA,QAAI,WAAW,SAAS,yBAAyB;AAC/C,YAAM,eAAe,mBAAmB,gBAAgB,WAAW,WAAW,wCAAwC,uBAAuB;AAC7I,aAAO,MAAM,YAAY;AACzB,YAAM,IAAI,MAAM,YAAY;AAAA,IAC9B;AAGA,UAAM,eAAe,WAAW,SAAS,GAAG,KAAK,IAAI,WAAW,QAAQ,kBAAkB,CAAC;AAC3F,QAAI,aAAa,SAAS,CAAC,GAAG;AAE5B,YAAM,qBAAqB,mBAAmB,gBAAgB,WAAW,WAAW;AACpF,aAAO,MAAM,kBAAkB;AAC/B,YAAM,IAAI,MAAM,kBAAkB;AAAA,IACpC;AAEA,QAAI;AACF,YAAM,cAAc,WAAW,SAAS,OAAO;AAC/C,UAAI,YAAY,SAAS,QAAQ,GAAG;AAElC,cAAM,iBAAiB,mBAAmB,gBAAgB,WAAW,WAAW;AAChF,eAAO,MAAM,cAAc;AAC3B,cAAM,IAAI,MAAM,cAAc;AAAA,MAChC;AACA,aAAO;AAAA,QACL,kDAAkD,WAAW,qCAAqC,gBAAgB;AAAA,MACpH;AACA,aAAO;AAAA,IACT,SAAS,eAAoB;AAE3B,YAAM,gBAAgB,wCAAwC,WAAW,QAAQ,gBAAgB;AACjG,aAAO,MAAM,eAAe,cAAc,UAAU,cAAc,QAAQ,MAAS;AACnF,YAAM,IAAI,MAAM,aAAa;AAAA,IAC/B;AAAA,EACF;AACF;AAaA,eAAsB,2BACpB,WACA,UACiB;AACjB,QAAM,UAAU,YAAY;AAC5B,SAAO,MAAM,wCAAwC,OAAO,EAAE;AAE9D,MAAI;AACF,UAAM,aAAa,IAAI,WAAW,SAAS;AAC3C,UAAM,MAAwB,MAAM,YAAY,EAAE,MAAM,WAAW,CAAC,EAAE;AACtE,UAAM,WAAW,IAAI;AACrB,UAAM,YAAsB,CAAC;AAE7B,aAAS,UAAU,GAAG,WAAW,UAAU,WAAW;AACpD,aAAO,MAAM,gCAAgC,OAAO,IAAI,QAAQ,EAAE;AAClE,YAAM,OAAO,MAAM,IAAI,QAAQ,OAAO;AACtC,YAAM,cAAc,MAAM,KAAK,eAAe;AAG9C,YAAM,UAAU,oBAAI,IAAwB;AAE5C,kBAAY,MAAM,OAAO,UAAU,EAAE,QAAQ,CAAC,SAAS;AAErD,cAAM,OAAO,KAAK,MAAM,KAAK,UAAU,CAAC,CAAC;AACzC,YAAI,CAAC,QAAQ,IAAI,IAAI,GAAG;AACtB,kBAAQ,IAAI,MAAM,CAAC,CAAC;AAAA,QACtB;AACA,gBAAQ,IAAI,IAAI,EAAG,KAAK,IAAI;AAAA,MAC9B,CAAC;AAGD,YAAM,cAAc,MAAM,KAAK,QAAQ,QAAQ,CAAC,EAC7C,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC,EAC1B;AAAA,QAAI,CAAC,CAAC,GAAG,KAAK,MACb,MACG,KAAK,CAAC,GAAG,MAAM,EAAE,UAAU,CAAC,IAAI,EAAE,UAAU,CAAC,CAAC,EAC9C,IAAI,CAAC,SAAS,KAAK,GAAG,EACtB,KAAK,GAAG;AAAA,MACb;AAEF,gBAAU,KAAK,YAAY,KAAK,IAAI,CAAC;AAAA,IACvC;AAEA,UAAM,WAAW,UAAU,KAAK,MAAM,EAAE,QAAQ,QAAQ,GAAG,EAAE,KAAK;AAClE,WAAO,MAAM,wCAAwC,OAAO,aAAa,SAAS,MAAM,EAAE;AAC1F,WAAO;AAAA,EACT,SAAS,OAAY;AACnB,WAAO,MAAM,qCAAqC,OAAO,KAAK,MAAM,OAAO;AAC3E,UAAM,IAAI,MAAM,kCAAkC,MAAM,OAAO,EAAE;AAAA,EACnE;AACF;AAQO,SAAS,oBAAoB,aAAqB,UAA2B;AAElF,QAAM,mBAAmB;AAAA,IACvB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAGA,QAAM,iBAAiB,iBAAiB,KAAK,CAAC,SAAS,YAAY,SAAS,IAAI,CAAC;AACjF,MAAI,gBAAgB;AAClB,WAAO;AAAA,EACT;AAGA,QAAM,qBAAqB;AAAA,IACzB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAGA,QAAM,mBAAmB,mBAAmB,KAAK,CAAC,SAAS,YAAY,SAAS,IAAI,CAAC;AAErF,MAAI,kBAAkB;AACpB,WAAO;AAAA,EACT;AAGA,QAAM,UAAU,SAAS,MAAM,GAAG,EAAE,IAAI,GAAG,YAAY,KAAK;AAG5D,QAAM,iBAAiB;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAGA,MAAI,eAAe,SAAS,OAAO,GAAG;AACpC,WAAO;AAAA,EACT;AAGA,QAAM,mBAAmB;AAAA,IACvB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,SAAO,iBAAiB,SAAS,OAAO;AAC1C;AAQA,SAAS,WAAW,MAAsD;AACxE,SAAO,SAAS;AAClB;AAQO,SAAS,eAAe,KAAqB;AAClD,MAAI;AACF,UAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,WAAO,GAAG,OAAO,MAAM,GAAG,OAAO,QAAQ;AAAA,EAC3C,SAAS,OAAO;AACd,WAAO,KAAK,yCAAyC,GAAG,uBAAuB;AAC/E,WAAO;AAAA,EACT;AACF;AAOA,eAAsB,gBACpB,KACmD;AACnD,SAAO,MAAM,4CAA4C,GAAG,EAAE;AAE9D,MAAI;AAEF,UAAM,aAAa,IAAI,gBAAgB;AACvC,UAAM,YAAY,WAAW,MAAM,WAAW,MAAM,GAAG,GAAK;AAE5D,UAAM,WAAW,MAAM,MAAM,KAAK;AAAA,MAChC,QAAQ,WAAW;AAAA,MACnB,SAAS;AAAA,QACP,cAAc;AAAA,MAChB;AAAA,IACF,CAAC;AACD,iBAAa,SAAS;AAEtB,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,IAAI,MAAM,wBAAwB,SAAS,MAAM,IAAI,SAAS,UAAU,EAAE;AAAA,IAClF;AAGA,UAAM,cAAc,SAAS,QAAQ,IAAI,cAAc,KAAK;AAC5D,WAAO,MAAM,2CAA2C,WAAW,aAAa,GAAG,EAAE;AAGrF,UAAM,cAAc,MAAM,SAAS,YAAY;AAC/C,UAAM,SAAS,OAAO,KAAK,WAAW;AAGtC,UAAM,gBAAgB,OAAO,SAAS,QAAQ;AAE9C,WAAO;AAAA,MACL,wDAAwD,GAAG,KAAK,OAAO,MAAM;AAAA,IAC/E;AACA,WAAO;AAAA,MACL,SAAS;AAAA,MACT;AAAA,IACF;AAAA,EACF,SAAS,OAAY;AACnB,WAAO,MAAM,iDAAiD,GAAG,KAAK,MAAM,OAAO,EAAE;AACrF,UAAM,IAAI,MAAM,qCAAqC,MAAM,OAAO,EAAE;AAAA,EACtE;AACF;;;ADtZO,SAAS,mBAA2B;AACzC,QAAM,UAAU,QAAQ,IAAI;AAE5B,MAAI,SAAS;AAEX,UAAM,eAAoB,aAAQ,OAAO;AAEzC,QAAI,CAAI,cAAW,YAAY,GAAG;AAChC,MAAAC,QAAO,KAAK,4DAA4D,YAAY,EAAE;AACtF,MAAAA,QAAO,KAAK,2EAA2E;AAAA,IACzF;AAEA,WAAO;AAAA,EACT;AAGA,QAAM,cAAmB,UAAK,QAAQ,IAAI,GAAG,MAAM;AAEnD,MAAI,CAAI,cAAW,WAAW,GAAG;AAC/B,IAAAA,QAAO,KAAK,0CAA0C,WAAW,EAAE;AACnE,IAAAA,QAAO,KAAK,sCAAsC;AAClD,IAAAA,QAAO,KAAK,gDAAgD;AAC5D,IAAAA,QAAO,KAAK,qEAAqE;AAAA,EACnF;AAEA,SAAO;AACT;AAKA,eAAsB,iBACpB,SACA,SACA,SACgE;AAChE,QAAM,WAAW,iBAAiB;AAElC,MAAI,CAAI,cAAW,QAAQ,GAAG;AAC5B,IAAAA,QAAO,KAAK,kCAAkC,QAAQ,EAAE;AACxD,WAAO,EAAE,OAAO,GAAG,YAAY,GAAG,QAAQ,EAAE;AAAA,EAC9C;AAEA,EAAAA,QAAO,KAAK,2BAA2B,QAAQ,EAAE;AAGjD,QAAM,QAAQ,YAAY,QAAQ;AAElC,MAAI,MAAM,WAAW,GAAG;AACtB,IAAAA,QAAO,KAAK,kCAAkC;AAC9C,WAAO,EAAE,OAAO,GAAG,YAAY,GAAG,QAAQ,EAAE;AAAA,EAC9C;AAEA,EAAAA,QAAO,KAAK,SAAS,MAAM,MAAM,mBAAmB;AAEpD,MAAI,aAAa;AACjB,MAAI,SAAS;AAEb,aAAW,YAAY,OAAO;AAC5B,QAAI;AACF,YAAM,WAAgB,cAAS,QAAQ;AACvC,YAAM,UAAe,aAAQ,QAAQ,EAAE,YAAY;AAGnD,UAAI,SAAS,WAAW,GAAG,GAAG;AAC5B;AAAA,MACF;AAGA,YAAM,cAAc,eAAe,OAAO;AAG1C,UAAI,CAAC,aAAa;AAChB,QAAAA,QAAO,MAAM,mCAAmC,QAAQ,EAAE;AAC1D;AAAA,MACF;AAGA,YAAM,aAAgB,gBAAa,QAAQ;AAG3C,YAAM,WAAW,oBAAoB,aAAa,QAAQ;AAI1D,YAAM,UAAU,WAAW,WAAW,SAAS,QAAQ,IAAI,WAAW,SAAS,OAAO;AAGtF,YAAM,mBAAwC;AAAA,QAC5C,kBAAkB,iBAAiB,SAAS,QAAQ,QAAQ,IAAI,KAAK,IAAI,CAAC,EAAE;AAAA,QAC5E;AAAA,QACA,kBAAkB;AAAA,QAClB,SAAS,WAAW;AAAA,QACpB;AAAA,QACA,QAAQ;AAAA,QACR,UAAU;AAAA,MACZ;AAGA,MAAAA,QAAO,MAAM,wBAAwB,QAAQ,EAAE;AAC/C,YAAM,SAAS,MAAM,QAAQ,aAAa,gBAAgB;AAE1D,MAAAA,QAAO,KAAK,0BAA0B,QAAQ,KAAK,OAAO,aAAa,oBAAoB;AAC3F;AAAA,IACF,SAAS,OAAO;AACd,MAAAA,QAAO,MAAM,0BAA0B,QAAQ,KAAK,KAAK;AACzD;AAAA,IACF;AAAA,EACF;AAEA,EAAAA,QAAO;AAAA,IACL,8BAA8B,UAAU,gBAAgB,MAAM,kBAAkB,MAAM,MAAM;AAAA,EAC9F;AAEA,SAAO;AAAA,IACL,OAAO,MAAM;AAAA,IACb;AAAA,IACA;AAAA,EACF;AACF;AAKA,SAAS,YAAY,SAAiB,QAAkB,CAAC,GAAa;AACpE,MAAI;AACF,UAAM,UAAa,eAAY,SAAS,EAAE,eAAe,KAAK,CAAC;AAE/D,eAAW,SAAS,SAAS;AAC3B,YAAM,WAAgB,UAAK,SAAS,MAAM,IAAI;AAE9C,UAAI,MAAM,YAAY,GAAG;AAEvB,YAAI,CAAC,CAAC,gBAAgB,QAAQ,WAAW,QAAQ,OAAO,EAAE,SAAS,MAAM,IAAI,GAAG;AAC9E,sBAAY,UAAU,KAAK;AAAA,QAC7B;AAAA,MACF,WAAW,MAAM,OAAO,GAAG;AACzB,cAAM,KAAK,QAAQ;AAAA,MACrB;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,IAAAA,QAAO,MAAM,2BAA2B,OAAO,KAAK,KAAK;AAAA,EAC3D;AAEA,SAAO;AACT;AAKA,SAAS,eAAe,WAAkC;AACxD,QAAM,eAAuC;AAAA;AAAA,IAE3C,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,aAAa;AAAA,IACb,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA;AAAA,IAGR,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,SAAS;AAAA,IACT,SAAS;AAAA;AAAA,IAGT,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,WAAW;AAAA,IACX,UAAU;AAAA;AAAA,IAGV,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA;AAAA,IAGR,SAAS;AAAA,IACT,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,UAAU;AAAA;AAAA,IAGV,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO;AAAA;AAAA,IAGP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO;AAAA,IACP,UAAU;AAAA,IACV,MAAM;AAAA,IACN,MAAM;AAAA,IACN,MAAM;AAAA,IACN,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,OAAO;AAAA,IACP,SAAS;AAAA,IACT,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA;AAAA,IAGR,OAAO;AAAA,IACP,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA;AAAA,IAGR,SAAS;AAAA,IACT,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,cAAc;AAAA,IACd,iBAAiB;AAAA,IACjB,iBAAiB;AAAA,IACjB,eAAe;AAAA;AAAA,IAGf,QAAQ;AAAA;AAAA,IAGR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,SAAS;AAAA,EACX;AAEA,SAAO,aAAa,SAAS,KAAK;AACpC;","names":["logger","logger"]}
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import {
|
|
2
2
|
getKnowledgePath,
|
|
3
3
|
loadDocsFromPath
|
|
4
|
-
} from "./chunk-
|
|
4
|
+
} from "./chunk-MFXNKYBS.js";
|
|
5
5
|
export {
|
|
6
6
|
getKnowledgePath,
|
|
7
7
|
loadDocsFromPath
|
|
8
8
|
};
|
|
9
|
-
//# sourceMappingURL=docs-loader-
|
|
9
|
+
//# sourceMappingURL=docs-loader-AEQHIBO4.js.map
|
package/dist/index.html
CHANGED
|
@@ -5,8 +5,8 @@
|
|
|
5
5
|
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
|
6
6
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
|
7
7
|
<title>Agent Plugin View</title>
|
|
8
|
-
<script type="module" crossorigin src="./assets/index-
|
|
9
|
-
<link rel="stylesheet" crossorigin href="./assets/index-
|
|
8
|
+
<script type="module" crossorigin src="./assets/index-DZQIX0Kb.js"></script>
|
|
9
|
+
<link rel="stylesheet" crossorigin href="./assets/index-C77XebWS.css">
|
|
10
10
|
</head>
|
|
11
11
|
<body>
|
|
12
12
|
<div id="root"></div>
|
package/dist/index.js
CHANGED
|
@@ -5,7 +5,7 @@ import {
|
|
|
5
5
|
isBinaryContentType,
|
|
6
6
|
loadDocsFromPath,
|
|
7
7
|
normalizeS3Url
|
|
8
|
-
} from "./chunk-
|
|
8
|
+
} from "./chunk-MFXNKYBS.js";
|
|
9
9
|
|
|
10
10
|
// src/index.ts
|
|
11
11
|
import { logger as logger6 } from "@elizaos/core";
|
|
@@ -1152,6 +1152,7 @@ function createRateLimiter(requestsPerMinute) {
|
|
|
1152
1152
|
var KnowledgeService = class _KnowledgeService extends Service {
|
|
1153
1153
|
static serviceType = "knowledge";
|
|
1154
1154
|
config;
|
|
1155
|
+
knowledgeConfig;
|
|
1155
1156
|
capabilityDescription = "Provides Retrieval Augmented Generation capabilities, including knowledge upload and querying.";
|
|
1156
1157
|
knowledgeProcessingSemaphore;
|
|
1157
1158
|
/**
|
|
@@ -1166,7 +1167,7 @@ var KnowledgeService = class _KnowledgeService extends Service {
|
|
|
1166
1167
|
if (typeof value === "string") return value.toLowerCase() === "true";
|
|
1167
1168
|
return false;
|
|
1168
1169
|
};
|
|
1169
|
-
this.
|
|
1170
|
+
this.knowledgeConfig = {
|
|
1170
1171
|
CTX_KNOWLEDGE_ENABLED: parseBooleanEnv(config?.CTX_KNOWLEDGE_ENABLED),
|
|
1171
1172
|
LOAD_DOCS_ON_STARTUP: parseBooleanEnv(config?.LOAD_DOCS_ON_STARTUP),
|
|
1172
1173
|
MAX_INPUT_TOKENS: config?.MAX_INPUT_TOKENS,
|
|
@@ -1175,11 +1176,12 @@ var KnowledgeService = class _KnowledgeService extends Service {
|
|
|
1175
1176
|
TEXT_PROVIDER: config?.TEXT_PROVIDER,
|
|
1176
1177
|
TEXT_EMBEDDING_MODEL: config?.TEXT_EMBEDDING_MODEL
|
|
1177
1178
|
};
|
|
1179
|
+
this.config = { ...this.knowledgeConfig };
|
|
1178
1180
|
logger3.info(
|
|
1179
1181
|
`KnowledgeService initialized for agent ${this.runtime.agentId} with config:`,
|
|
1180
|
-
this.
|
|
1182
|
+
this.knowledgeConfig
|
|
1181
1183
|
);
|
|
1182
|
-
if (this.
|
|
1184
|
+
if (this.knowledgeConfig.LOAD_DOCS_ON_STARTUP) {
|
|
1183
1185
|
this.loadInitialDocuments().catch((error) => {
|
|
1184
1186
|
logger3.error("Error during initial document loading in KnowledgeService:", error);
|
|
1185
1187
|
});
|
|
@@ -1246,6 +1248,9 @@ var KnowledgeService = class _KnowledgeService extends Service {
|
|
|
1246
1248
|
if (!service) {
|
|
1247
1249
|
logger3.warn(`KnowledgeService not found for agent ${runtime.agentId} during stop.`);
|
|
1248
1250
|
}
|
|
1251
|
+
if (service instanceof _KnowledgeService) {
|
|
1252
|
+
await service.stop();
|
|
1253
|
+
}
|
|
1249
1254
|
}
|
|
1250
1255
|
/**
|
|
1251
1256
|
* Stop the service
|
|
@@ -1392,9 +1397,16 @@ var KnowledgeService = class _KnowledgeService extends Service {
|
|
|
1392
1397
|
...documentMemory,
|
|
1393
1398
|
id: clientDocumentId,
|
|
1394
1399
|
// Ensure the ID of the memory is the clientDocumentId
|
|
1400
|
+
agentId,
|
|
1395
1401
|
roomId: roomId || agentId,
|
|
1396
1402
|
entityId: entityId || agentId
|
|
1397
1403
|
};
|
|
1404
|
+
logger3.debug(
|
|
1405
|
+
`KnowledgeService: Creating memory with agentId=${agentId}, entityId=${entityId}, roomId=${roomId}, this.runtime.agentId=${this.runtime.agentId}`
|
|
1406
|
+
);
|
|
1407
|
+
logger3.debug(
|
|
1408
|
+
`KnowledgeService: memoryWithScope agentId=${memoryWithScope.agentId}, entityId=${memoryWithScope.entityId}`
|
|
1409
|
+
);
|
|
1398
1410
|
await this.runtime.createMemory(memoryWithScope, "documents");
|
|
1399
1411
|
logger3.debug(
|
|
1400
1412
|
`KnowledgeService: Stored document ${originalFilename} (Memory ID: ${memoryWithScope.id})`
|
|
@@ -1665,16 +1677,9 @@ var KnowledgeService = class _KnowledgeService extends Service {
|
|
|
1665
1677
|
* Corresponds to GET /plugins/knowledge/documents
|
|
1666
1678
|
*/
|
|
1667
1679
|
async getMemories(params) {
|
|
1668
|
-
if (params.tableName !== "documents") {
|
|
1669
|
-
logger3.warn(
|
|
1670
|
-
`KnowledgeService.getMemories called with tableName ${params.tableName}, but this service primarily manages 'documents'. Proceeding, but review usage.`
|
|
1671
|
-
);
|
|
1672
|
-
}
|
|
1673
1680
|
return this.runtime.getMemories({
|
|
1674
|
-
...params
|
|
1681
|
+
...params
|
|
1675
1682
|
// includes tableName, roomId, count, end
|
|
1676
|
-
agentId: this.runtime.agentId
|
|
1677
|
-
// Ensure agentId is correctly scoped
|
|
1678
1683
|
});
|
|
1679
1684
|
}
|
|
1680
1685
|
/**
|
|
@@ -2900,6 +2905,41 @@ var knowledgeActions = [processKnowledgeAction, searchKnowledgeAction];
|
|
|
2900
2905
|
import { MemoryType as MemoryType4, createUniqueUuid as createUniqueUuid3, logger as logger5 } from "@elizaos/core";
|
|
2901
2906
|
import fs3 from "fs";
|
|
2902
2907
|
import path3 from "path";
|
|
2908
|
+
import multer from "multer";
|
|
2909
|
+
var createUploadMiddleware = (runtime) => {
|
|
2910
|
+
const uploadDir = runtime.getSetting("KNOWLEDGE_UPLOAD_DIR") || "/tmp/uploads/";
|
|
2911
|
+
const maxFileSize = parseInt(runtime.getSetting("KNOWLEDGE_MAX_FILE_SIZE") || "52428800");
|
|
2912
|
+
const maxFiles = parseInt(runtime.getSetting("KNOWLEDGE_MAX_FILES") || "10");
|
|
2913
|
+
const allowedMimeTypes = runtime.getSetting("KNOWLEDGE_ALLOWED_MIME_TYPES")?.split(",") || [
|
|
2914
|
+
"text/plain",
|
|
2915
|
+
"text/markdown",
|
|
2916
|
+
"application/pdf",
|
|
2917
|
+
"application/msword",
|
|
2918
|
+
"application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
|
2919
|
+
"text/html",
|
|
2920
|
+
"application/json",
|
|
2921
|
+
"application/xml",
|
|
2922
|
+
"text/csv"
|
|
2923
|
+
];
|
|
2924
|
+
return multer({
|
|
2925
|
+
dest: uploadDir,
|
|
2926
|
+
limits: {
|
|
2927
|
+
fileSize: maxFileSize,
|
|
2928
|
+
files: maxFiles
|
|
2929
|
+
},
|
|
2930
|
+
fileFilter: (req, file, cb) => {
|
|
2931
|
+
if (allowedMimeTypes.includes(file.mimetype)) {
|
|
2932
|
+
cb(null, true);
|
|
2933
|
+
} else {
|
|
2934
|
+
cb(
|
|
2935
|
+
new Error(
|
|
2936
|
+
`File type ${file.mimetype} not allowed. Allowed types: ${allowedMimeTypes.join(", ")}`
|
|
2937
|
+
)
|
|
2938
|
+
);
|
|
2939
|
+
}
|
|
2940
|
+
}
|
|
2941
|
+
});
|
|
2942
|
+
};
|
|
2903
2943
|
function sendSuccess(res, data, status = 200) {
|
|
2904
2944
|
res.writeHead(status, { "Content-Type": "application/json" });
|
|
2905
2945
|
res.end(JSON.stringify({ success: true, data }));
|
|
@@ -2927,13 +2967,13 @@ async function uploadKnowledgeHandler(req, res, runtime) {
|
|
|
2927
2967
|
if (!service) {
|
|
2928
2968
|
return sendError(res, 500, "SERVICE_NOT_FOUND", "KnowledgeService not found");
|
|
2929
2969
|
}
|
|
2930
|
-
const
|
|
2931
|
-
const isJsonRequest = !
|
|
2932
|
-
if (!
|
|
2970
|
+
const hasUploadedFiles = req.files && req.files.length > 0;
|
|
2971
|
+
const isJsonRequest = !hasUploadedFiles && req.body && (req.body.fileUrl || req.body.fileUrls);
|
|
2972
|
+
if (!hasUploadedFiles && !isJsonRequest) {
|
|
2933
2973
|
return sendError(res, 400, "INVALID_REQUEST", "Request must contain either files or URLs");
|
|
2934
2974
|
}
|
|
2935
2975
|
try {
|
|
2936
|
-
if (
|
|
2976
|
+
if (hasUploadedFiles) {
|
|
2937
2977
|
const files = req.files;
|
|
2938
2978
|
if (!files || files.length === 0) {
|
|
2939
2979
|
return sendError(res, 400, "NO_FILES", "No files uploaded");
|
|
@@ -2941,7 +2981,8 @@ async function uploadKnowledgeHandler(req, res, runtime) {
|
|
|
2941
2981
|
const processingPromises = files.map(async (file, index) => {
|
|
2942
2982
|
let knowledgeId;
|
|
2943
2983
|
const originalFilename = file.originalname;
|
|
2944
|
-
const
|
|
2984
|
+
const agentId = req.body.agentId || req.query.agentId || runtime.agentId;
|
|
2985
|
+
const worldId = req.body.worldId || agentId;
|
|
2945
2986
|
const filePath = file.path;
|
|
2946
2987
|
knowledgeId = req.body?.documentIds && req.body.documentIds[index] || req.body?.documentId || createUniqueUuid3(runtime, `knowledge-${originalFilename}-${Date.now()}`);
|
|
2947
2988
|
try {
|
|
@@ -2977,9 +3018,10 @@ async function uploadKnowledgeHandler(req, res, runtime) {
|
|
|
2977
3018
|
content: base64Content,
|
|
2978
3019
|
// The base64 string of the file
|
|
2979
3020
|
worldId,
|
|
2980
|
-
roomId:
|
|
2981
|
-
//
|
|
2982
|
-
entityId:
|
|
3021
|
+
roomId: agentId,
|
|
3022
|
+
// Use the correct agent ID
|
|
3023
|
+
entityId: agentId
|
|
3024
|
+
// Use the correct agent ID
|
|
2983
3025
|
};
|
|
2984
3026
|
await service.addKnowledge(addKnowledgeOpts);
|
|
2985
3027
|
cleanupFile(filePath);
|
|
@@ -3011,6 +3053,7 @@ async function uploadKnowledgeHandler(req, res, runtime) {
|
|
|
3011
3053
|
if (fileUrls.length === 0) {
|
|
3012
3054
|
return sendError(res, 400, "MISSING_URL", "File URL is required");
|
|
3013
3055
|
}
|
|
3056
|
+
const agentId = req.body.agentId || req.query.agentId || runtime.agentId;
|
|
3014
3057
|
const processingPromises = fileUrls.map(async (fileUrl) => {
|
|
3015
3058
|
try {
|
|
3016
3059
|
const normalizedUrl = normalizeS3Url(fileUrl);
|
|
@@ -3048,15 +3091,17 @@ async function uploadKnowledgeHandler(req, res, runtime) {
|
|
|
3048
3091
|
originalFilename,
|
|
3049
3092
|
content,
|
|
3050
3093
|
// Use the base64 encoded content from the URL
|
|
3051
|
-
worldId:
|
|
3052
|
-
roomId:
|
|
3053
|
-
entityId:
|
|
3094
|
+
worldId: agentId,
|
|
3095
|
+
roomId: agentId,
|
|
3096
|
+
entityId: agentId,
|
|
3054
3097
|
// Store the normalized URL in metadata
|
|
3055
3098
|
metadata: {
|
|
3056
3099
|
url: normalizedUrl
|
|
3057
3100
|
}
|
|
3058
3101
|
};
|
|
3059
|
-
logger5.debug(
|
|
3102
|
+
logger5.debug(
|
|
3103
|
+
`[KNOWLEDGE URL HANDLER] Processing knowledge from URL: ${fileUrl} (type: ${contentType})`
|
|
3104
|
+
);
|
|
3060
3105
|
const result = await service.addKnowledge(addKnowledgeOpts);
|
|
3061
3106
|
return {
|
|
3062
3107
|
id: result.clientDocumentId,
|
|
@@ -3081,7 +3126,7 @@ async function uploadKnowledgeHandler(req, res, runtime) {
|
|
|
3081
3126
|
}
|
|
3082
3127
|
} catch (error) {
|
|
3083
3128
|
logger5.error("[KNOWLEDGE HANDLER] Error processing knowledge:", error);
|
|
3084
|
-
if (
|
|
3129
|
+
if (hasUploadedFiles) {
|
|
3085
3130
|
cleanupFiles(req.files);
|
|
3086
3131
|
}
|
|
3087
3132
|
sendError(res, 500, "PROCESSING_ERROR", "Failed to process knowledge", error.message);
|
|
@@ -3101,6 +3146,7 @@ async function getKnowledgeDocumentsHandler(req, res, runtime) {
|
|
|
3101
3146
|
const limit = req.query.limit ? Number.parseInt(req.query.limit, 10) : 20;
|
|
3102
3147
|
const before = req.query.before ? Number.parseInt(req.query.before, 10) : Date.now();
|
|
3103
3148
|
const includeEmbedding = req.query.includeEmbedding === "true";
|
|
3149
|
+
const agentId = req.query.agentId;
|
|
3104
3150
|
const fileUrls = req.query.fileUrls ? typeof req.query.fileUrls === "string" && req.query.fileUrls.includes(",") ? req.query.fileUrls.split(",") : [req.query.fileUrls] : null;
|
|
3105
3151
|
const memories = await service.getMemories({
|
|
3106
3152
|
tableName: "documents",
|
|
@@ -3110,13 +3156,17 @@ async function getKnowledgeDocumentsHandler(req, res, runtime) {
|
|
|
3110
3156
|
let filteredMemories = memories;
|
|
3111
3157
|
if (fileUrls && fileUrls.length > 0) {
|
|
3112
3158
|
const normalizedRequestUrls = fileUrls.map((url) => normalizeS3Url(url));
|
|
3113
|
-
const urlBasedIds = normalizedRequestUrls.map(
|
|
3159
|
+
const urlBasedIds = normalizedRequestUrls.map(
|
|
3160
|
+
(url) => createUniqueUuid3(runtime, url)
|
|
3161
|
+
);
|
|
3114
3162
|
filteredMemories = memories.filter(
|
|
3115
3163
|
(memory) => urlBasedIds.includes(memory.id) || // If the ID corresponds directly
|
|
3116
3164
|
// Or if the URL is stored in the metadata (check if it exists)
|
|
3117
3165
|
memory.metadata && "url" in memory.metadata && typeof memory.metadata.url === "string" && normalizedRequestUrls.includes(normalizeS3Url(memory.metadata.url))
|
|
3118
3166
|
);
|
|
3119
|
-
logger5.debug(
|
|
3167
|
+
logger5.debug(
|
|
3168
|
+
`[KNOWLEDGE GET HANDLER] Filtered documents by URLs: ${fileUrls.length} URLs, found ${filteredMemories.length} matching documents`
|
|
3169
|
+
);
|
|
3120
3170
|
}
|
|
3121
3171
|
const cleanMemories = includeEmbedding ? filteredMemories : filteredMemories.map((memory) => ({
|
|
3122
3172
|
...memory,
|
|
@@ -3154,9 +3204,13 @@ async function deleteKnowledgeDocumentHandler(req, res, runtime) {
|
|
|
3154
3204
|
}
|
|
3155
3205
|
try {
|
|
3156
3206
|
const typedKnowledgeId = knowledgeId;
|
|
3157
|
-
logger5.debug(
|
|
3207
|
+
logger5.debug(
|
|
3208
|
+
`[KNOWLEDGE DELETE HANDLER] Attempting to delete document with ID: ${typedKnowledgeId}`
|
|
3209
|
+
);
|
|
3158
3210
|
await service.deleteMemory(typedKnowledgeId);
|
|
3159
|
-
logger5.info(
|
|
3211
|
+
logger5.info(
|
|
3212
|
+
`[KNOWLEDGE DELETE HANDLER] Successfully deleted document with ID: ${typedKnowledgeId}`
|
|
3213
|
+
);
|
|
3160
3214
|
sendSuccess(res, null, 204);
|
|
3161
3215
|
} catch (error) {
|
|
3162
3216
|
logger5.error(`[KNOWLEDGE DELETE HANDLER] Error deleting document ${knowledgeId}:`, error);
|
|
@@ -3184,6 +3238,7 @@ async function getKnowledgeByIdHandler(req, res, runtime) {
|
|
|
3184
3238
|
}
|
|
3185
3239
|
try {
|
|
3186
3240
|
logger5.debug(`[KNOWLEDGE GET BY ID HANDLER] Retrieving document with ID: ${knowledgeId}`);
|
|
3241
|
+
const agentId = req.query.agentId;
|
|
3187
3242
|
const memories = await service.getMemories({
|
|
3188
3243
|
tableName: "documents",
|
|
3189
3244
|
count: 1e3
|
|
@@ -3205,9 +3260,11 @@ async function getKnowledgeByIdHandler(req, res, runtime) {
|
|
|
3205
3260
|
}
|
|
3206
3261
|
async function knowledgePanelHandler(req, res, runtime) {
|
|
3207
3262
|
const agentId = runtime.agentId;
|
|
3263
|
+
logger5.debug(`[KNOWLEDGE PANEL] Serving panel for agent ${agentId}, request path: ${req.path}`);
|
|
3208
3264
|
try {
|
|
3209
3265
|
const currentDir = path3.dirname(new URL(import.meta.url).pathname);
|
|
3210
3266
|
const frontendPath = path3.join(currentDir, "../dist/index.html");
|
|
3267
|
+
logger5.debug(`[KNOWLEDGE PANEL] Looking for frontend at: ${frontendPath}`);
|
|
3211
3268
|
if (fs3.existsSync(frontendPath)) {
|
|
3212
3269
|
const html = await fs3.promises.readFile(frontendPath, "utf8");
|
|
3213
3270
|
const injectedHtml = html.replace(
|
|
@@ -3216,7 +3273,7 @@ async function knowledgePanelHandler(req, res, runtime) {
|
|
|
3216
3273
|
<script>
|
|
3217
3274
|
window.ELIZA_CONFIG = {
|
|
3218
3275
|
agentId: '${agentId}',
|
|
3219
|
-
apiBase: '/api
|
|
3276
|
+
apiBase: '/api'
|
|
3220
3277
|
};
|
|
3221
3278
|
</script>`
|
|
3222
3279
|
);
|
|
@@ -3255,7 +3312,7 @@ async function knowledgePanelHandler(req, res, runtime) {
|
|
|
3255
3312
|
<script>
|
|
3256
3313
|
window.ELIZA_CONFIG = {
|
|
3257
3314
|
agentId: '${agentId}',
|
|
3258
|
-
apiBase: '/api
|
|
3315
|
+
apiBase: '/api'
|
|
3259
3316
|
};
|
|
3260
3317
|
</script>
|
|
3261
3318
|
<link rel="stylesheet" href="./assets/${cssFile}">
|
|
@@ -3332,9 +3389,9 @@ async function getKnowledgeChunksHandler(req, res, runtime) {
|
|
|
3332
3389
|
const limit = req.query.limit ? Number.parseInt(req.query.limit, 10) : 100;
|
|
3333
3390
|
const before = req.query.before ? Number.parseInt(req.query.before, 10) : Date.now();
|
|
3334
3391
|
const documentId = req.query.documentId;
|
|
3392
|
+
const agentId = req.query.agentId;
|
|
3335
3393
|
const chunks = await service.getMemories({
|
|
3336
3394
|
tableName: "knowledge",
|
|
3337
|
-
// or whatever table stores the chunks
|
|
3338
3395
|
count: limit,
|
|
3339
3396
|
end: before
|
|
3340
3397
|
});
|
|
@@ -3347,6 +3404,20 @@ async function getKnowledgeChunksHandler(req, res, runtime) {
|
|
|
3347
3404
|
sendError(res, 500, "RETRIEVAL_ERROR", "Failed to retrieve knowledge chunks", error.message);
|
|
3348
3405
|
}
|
|
3349
3406
|
}
|
|
3407
|
+
async function uploadKnowledgeWithMulter(req, res, runtime) {
|
|
3408
|
+
const upload = createUploadMiddleware(runtime);
|
|
3409
|
+
const uploadArray = upload.array(
|
|
3410
|
+
"files",
|
|
3411
|
+
parseInt(runtime.getSetting("KNOWLEDGE_MAX_FILES") || "10")
|
|
3412
|
+
);
|
|
3413
|
+
uploadArray(req, res, (err) => {
|
|
3414
|
+
if (err) {
|
|
3415
|
+
logger5.error("[KNOWLEDGE UPLOAD] Multer error:", err);
|
|
3416
|
+
return sendError(res, 400, "UPLOAD_ERROR", err.message);
|
|
3417
|
+
}
|
|
3418
|
+
uploadKnowledgeHandler(req, res, runtime);
|
|
3419
|
+
});
|
|
3420
|
+
}
|
|
3350
3421
|
var knowledgeRoutes = [
|
|
3351
3422
|
{
|
|
3352
3423
|
type: "GET",
|
|
@@ -3363,8 +3434,7 @@ var knowledgeRoutes = [
|
|
|
3363
3434
|
{
|
|
3364
3435
|
type: "POST",
|
|
3365
3436
|
path: "/documents",
|
|
3366
|
-
handler:
|
|
3367
|
-
isMultipart: true
|
|
3437
|
+
handler: uploadKnowledgeWithMulter
|
|
3368
3438
|
},
|
|
3369
3439
|
{
|
|
3370
3440
|
type: "GET",
|
|
@@ -3433,7 +3503,7 @@ var knowledgePlugin = {
|
|
|
3433
3503
|
try {
|
|
3434
3504
|
const service = runtime.getService(KnowledgeService.serviceType);
|
|
3435
3505
|
if (service instanceof KnowledgeService) {
|
|
3436
|
-
const { loadDocsFromPath: loadDocsFromPath2 } = await import("./docs-loader-
|
|
3506
|
+
const { loadDocsFromPath: loadDocsFromPath2 } = await import("./docs-loader-AEQHIBO4.js");
|
|
3437
3507
|
const result = await loadDocsFromPath2(service, runtime.agentId);
|
|
3438
3508
|
if (result.successful > 0) {
|
|
3439
3509
|
logger6.info(`Loaded ${result.successful} documents from docs folder on startup`);
|