@elizaos/plugin-knowledge 1.0.5 → 1.0.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -303,6 +303,10 @@ async function fetchUrlContent(url) {
303
303
  throw new Error(`Failed to fetch content from URL: ${error.message}`);
304
304
  }
305
305
  }
306
+ function looksLikeBase64(content) {
307
+ const base64Regex = /^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$/;
308
+ return content && content.length > 0 && base64Regex.test(content.replace(/\s/g, "")) || false;
309
+ }
306
310
 
307
311
  // src/docs-loader.ts
308
312
  function getKnowledgePath() {
@@ -513,7 +517,8 @@ export {
513
517
  isBinaryContentType,
514
518
  normalizeS3Url,
515
519
  fetchUrlContent,
520
+ looksLikeBase64,
516
521
  getKnowledgePath,
517
522
  loadDocsFromPath
518
523
  };
519
- //# sourceMappingURL=chunk-MFXNKYBS.js.map
524
+ //# sourceMappingURL=chunk-536BD2UA.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/docs-loader.ts","../src/utils.ts"],"sourcesContent":["import { logger, UUID, createUniqueUuid } from \"@elizaos/core\";\nimport * as fs from \"fs\";\nimport * as path from \"path\";\nimport { KnowledgeService } from \"./service.ts\";\nimport { AddKnowledgeOptions } from \"./types.ts\";\nimport { isBinaryContentType } from './utils.ts';\n\n/**\n * Get the knowledge path from environment or default to ./docs\n */\nexport function getKnowledgePath(): string {\n const envPath = process.env.KNOWLEDGE_PATH;\n\n if (envPath) {\n // Resolve relative paths from current working directory\n const resolvedPath = path.resolve(envPath);\n\n if (!fs.existsSync(resolvedPath)) {\n logger.warn(`Knowledge path from environment variable does not exist: ${resolvedPath}`);\n logger.warn('Please create the directory or update KNOWLEDGE_PATH environment variable');\n }\n\n return resolvedPath;\n }\n\n // Default to docs folder in current working directory\n const defaultPath = path.join(process.cwd(), 'docs');\n\n if (!fs.existsSync(defaultPath)) {\n logger.info(`Default docs folder does not exist at: ${defaultPath}`);\n logger.info('To use the knowledge plugin, either:');\n logger.info('1. Create a \"docs\" folder in your project root');\n logger.info('2. Set KNOWLEDGE_PATH environment variable to your documents folder');\n }\n\n return defaultPath;\n}\n\n/**\n * Load documents from the knowledge path\n */\nexport async function loadDocsFromPath(\n service: KnowledgeService,\n agentId: UUID,\n worldId?: UUID\n): Promise<{ total: number; successful: number; failed: number }> {\n const docsPath = getKnowledgePath();\n\n if (!fs.existsSync(docsPath)) {\n logger.warn(`Knowledge path does not exist: ${docsPath}`);\n return { total: 0, successful: 0, failed: 0 };\n }\n\n logger.info(`Loading documents from: ${docsPath}`);\n\n // Get all files recursively\n const files = getAllFiles(docsPath);\n\n if (files.length === 0) {\n logger.info('No files found in knowledge path');\n return { total: 0, successful: 0, failed: 0 };\n }\n\n logger.info(`Found ${files.length} files to process`);\n\n let successful = 0;\n let failed = 0;\n\n for (const filePath of files) {\n try {\n const fileName = path.basename(filePath);\n const fileExt = path.extname(filePath).toLowerCase();\n\n // Skip hidden files and directories\n if (fileName.startsWith('.')) {\n continue;\n }\n\n // Determine content type\n const contentType = getContentType(fileExt);\n\n // Skip unsupported file types\n if (!contentType) {\n logger.debug(`Skipping unsupported file type: ${filePath}`);\n continue;\n }\n\n // Read file\n const fileBuffer = fs.readFileSync(filePath);\n\n // Check if file is binary using the same logic as the service\n const isBinary = isBinaryContentType(contentType, fileName);\n\n // For text files, read as UTF-8 string directly\n // For binary files, convert to base64\n const content = isBinary ? fileBuffer.toString('base64') : fileBuffer.toString('utf-8');\n\n // Create knowledge options\n const knowledgeOptions: AddKnowledgeOptions = {\n clientDocumentId: createUniqueUuid(agentId, `docs-${fileName}-${Date.now()}`) as UUID,\n contentType,\n originalFilename: fileName,\n worldId: worldId || agentId,\n content,\n roomId: agentId,\n entityId: agentId,\n };\n\n // Process the document\n logger.debug(`Processing document: ${fileName}`);\n const result = await service.addKnowledge(knowledgeOptions);\n\n logger.info(`Successfully processed ${fileName}: ${result.fragmentCount} fragments created`);\n successful++;\n } catch (error) {\n logger.error(`Failed to process file ${filePath}:`, error);\n failed++;\n }\n }\n\n logger.info(\n `Document loading complete: ${successful} successful, ${failed} failed out of ${files.length} total`\n );\n\n return {\n total: files.length,\n successful,\n failed,\n };\n}\n\n/**\n * Recursively get all files in a directory\n */\nfunction getAllFiles(dirPath: string, files: string[] = []): string[] {\n try {\n const entries = fs.readdirSync(dirPath, { withFileTypes: true });\n\n for (const entry of entries) {\n const fullPath = path.join(dirPath, entry.name);\n\n if (entry.isDirectory()) {\n // Skip node_modules and other common directories\n if (!['node_modules', '.git', '.vscode', 'dist', 'build'].includes(entry.name)) {\n getAllFiles(fullPath, files);\n }\n } else if (entry.isFile()) {\n files.push(fullPath);\n }\n }\n } catch (error) {\n logger.error(`Error reading directory ${dirPath}:`, error);\n }\n\n return files;\n}\n\n/**\n * Get content type based on file extension\n */\nfunction getContentType(extension: string): string | null {\n const contentTypes: Record<string, string> = {\n // Text documents\n '.txt': 'text/plain',\n '.md': 'text/markdown',\n '.markdown': 'text/markdown',\n '.tson': 'text/plain',\n '.xml': 'application/xml',\n '.csv': 'text/csv',\n '.tsv': 'text/tab-separated-values',\n '.log': 'text/plain',\n\n // Web files\n '.html': 'text/html',\n '.htm': 'text/html',\n '.css': 'text/css',\n '.scss': 'text/x-scss',\n '.sass': 'text/x-sass',\n '.less': 'text/x-less',\n\n // JavaScript/TypeScript\n '.js': 'text/javascript',\n '.jsx': 'text/javascript',\n '.ts': 'text/typescript',\n '.tsx': 'text/typescript',\n '.mjs': 'text/javascript',\n '.cjs': 'text/javascript',\n '.vue': 'text/x-vue',\n '.svelte': 'text/x-svelte',\n '.astro': 'text/x-astro',\n\n // Python\n '.py': 'text/x-python',\n '.pyw': 'text/x-python',\n '.pyi': 'text/x-python',\n\n // Java/Kotlin/Scala\n '.java': 'text/x-java',\n '.kt': 'text/x-kotlin',\n '.kts': 'text/x-kotlin',\n '.scala': 'text/x-scala',\n\n // C/C++/C#\n '.c': 'text/x-c',\n '.cpp': 'text/x-c++',\n '.cc': 'text/x-c++',\n '.cxx': 'text/x-c++',\n '.h': 'text/x-c',\n '.hpp': 'text/x-c++',\n '.cs': 'text/x-csharp',\n\n // Other languages\n '.php': 'text/x-php',\n '.rb': 'text/x-ruby',\n '.go': 'text/x-go',\n '.rs': 'text/x-rust',\n '.swift': 'text/x-swift',\n '.r': 'text/x-r',\n '.R': 'text/x-r',\n '.m': 'text/x-objectivec',\n '.mm': 'text/x-objectivec',\n '.clj': 'text/x-clojure',\n '.cljs': 'text/x-clojure',\n '.ex': 'text/x-elixir',\n '.exs': 'text/x-elixir',\n '.lua': 'text/x-lua',\n '.pl': 'text/x-perl',\n '.pm': 'text/x-perl',\n '.dart': 'text/x-dart',\n '.hs': 'text/x-haskell',\n '.elm': 'text/x-elm',\n '.ml': 'text/x-ocaml',\n '.fs': 'text/x-fsharp',\n '.fsx': 'text/x-fsharp',\n '.vb': 'text/x-vb',\n '.pas': 'text/x-pascal',\n '.d': 'text/x-d',\n '.nim': 'text/x-nim',\n '.zig': 'text/x-zig',\n '.jl': 'text/x-julia',\n '.tcl': 'text/x-tcl',\n '.awk': 'text/x-awk',\n '.sed': 'text/x-sed',\n\n // Shell scripts\n '.sh': 'text/x-sh',\n '.bash': 'text/x-sh',\n '.zsh': 'text/x-sh',\n '.fish': 'text/x-fish',\n '.ps1': 'text/x-powershell',\n '.bat': 'text/x-batch',\n '.cmd': 'text/x-batch',\n\n // Config files\n '.json': 'application/json',\n '.yaml': 'text/x-yaml',\n '.yml': 'text/x-yaml',\n '.toml': 'text/x-toml',\n '.ini': 'text/x-ini',\n '.cfg': 'text/x-ini',\n '.conf': 'text/x-ini',\n '.env': 'text/plain',\n '.gitignore': 'text/plain',\n '.dockerignore': 'text/plain',\n '.editorconfig': 'text/plain',\n '.properties': 'text/x-properties',\n\n // Database\n '.sql': 'text/x-sql',\n\n // Binary documents\n '.pdf': 'application/pdf',\n '.doc': 'application/msword',\n '.docx': 'application/vnd.openxmlformats-officedocument.wordprocessingml.document',\n };\n\n return contentTypes[extension] || null;\n}\n","import { Buffer } from 'node:buffer';\nimport * as mammoth from 'mammoth';\nimport { logger } from '@elizaos/core';\nimport { getDocument, PDFDocumentProxy } from 'pdfjs-dist/legacy/build/pdf.mjs';\nimport type { TextItem, TextMarkedContent } from 'pdfjs-dist/types/src/display/api';\n\nconst PLAIN_TEXT_CONTENT_TYPES = [\n 'application/typescript',\n 'text/typescript',\n 'text/x-python',\n 'application/x-python-code',\n 'application/yaml',\n 'text/yaml',\n 'application/x-yaml',\n 'application/json',\n 'text/markdown',\n 'text/csv',\n];\n\nconst MAX_FALLBACK_SIZE_BYTES = 5 * 1024 * 1024; // 5 MB\nconst BINARY_CHECK_BYTES = 1024; // Check first 1KB for binary indicators\n\n/**\n * Extracts text content from a file buffer based on its content type.\n * Supports DOCX, plain text, and provides a fallback for unknown types.\n * PDF should be handled by `convertPdfToTextFromBuffer`.\n */\nexport async function extractTextFromFileBuffer(\n fileBuffer: Buffer,\n contentType: string,\n originalFilename: string // For logging and context\n): Promise<string> {\n const lowerContentType = contentType.toLowerCase();\n logger.debug(\n `[TextUtil] Attempting to extract text from ${originalFilename} (type: ${contentType})`\n );\n\n if (\n lowerContentType === 'application/vnd.openxmlformats-officedocument.wordprocessingml.document'\n ) {\n logger.debug(`[TextUtil] Extracting text from DOCX ${originalFilename} via mammoth.`);\n try {\n const result = await mammoth.extractRawText({ buffer: fileBuffer });\n logger.debug(\n `[TextUtil] DOCX text extraction complete for ${originalFilename}. Text length: ${result.value.length}`\n );\n return result.value;\n } catch (docxError: any) {\n const errorMsg = `[TextUtil] Failed to parse DOCX file ${originalFilename}: ${docxError.message}`;\n logger.error(errorMsg, docxError.stack);\n throw new Error(errorMsg);\n }\n } else if (\n lowerContentType === 'application/msword' ||\n originalFilename.toLowerCase().endsWith('.doc')\n ) {\n // For .doc files, we'll store the content as-is, and just add a message\n // The frontend will handle the display appropriately\n logger.debug(`[TextUtil] Handling Microsoft Word .doc file: ${originalFilename}`);\n\n // We'll add a descriptive message as a placeholder\n return `[Microsoft Word Document: ${originalFilename}]\\n\\nThis document was indexed for search but cannot be displayed directly in the browser. The original document content is preserved for retrieval purposes.`;\n } else if (\n lowerContentType.startsWith('text/') ||\n PLAIN_TEXT_CONTENT_TYPES.includes(lowerContentType)\n ) {\n logger.debug(\n `[TextUtil] Extracting text from plain text compatible file ${originalFilename} (type: ${contentType})`\n );\n return fileBuffer.toString('utf-8');\n } else {\n logger.warn(\n `[TextUtil] Unsupported content type: \"${contentType}\" for ${originalFilename}. Attempting fallback to plain text.`\n );\n\n if (fileBuffer.length > MAX_FALLBACK_SIZE_BYTES) {\n const sizeErrorMsg = `[TextUtil] File ${originalFilename} (type: ${contentType}) exceeds maximum size for fallback (${MAX_FALLBACK_SIZE_BYTES} bytes). Cannot process as plain text.`;\n logger.error(sizeErrorMsg);\n throw new Error(sizeErrorMsg);\n }\n\n // Simple binary detection: check for null bytes in the first N bytes\n const initialBytes = fileBuffer.subarray(0, Math.min(fileBuffer.length, BINARY_CHECK_BYTES));\n if (initialBytes.includes(0)) {\n // Check for NUL byte\n const binaryHeuristicMsg = `[TextUtil] File ${originalFilename} (type: ${contentType}) appears to be binary based on initial byte check. Cannot process as plain text.`;\n logger.error(binaryHeuristicMsg);\n throw new Error(binaryHeuristicMsg);\n }\n\n try {\n const textContent = fileBuffer.toString('utf-8');\n if (textContent.includes('\\ufffd')) {\n // Replacement character, indicating potential binary or wrong encoding\n const binaryErrorMsg = `[TextUtil] File ${originalFilename} (type: ${contentType}) seems to be binary or has encoding issues after fallback to plain text (detected \\ufffd).`;\n logger.error(binaryErrorMsg);\n throw new Error(binaryErrorMsg); // Throw error for likely binary content\n }\n logger.debug(\n `[TextUtil] Successfully processed unknown type ${contentType} as plain text after fallback for ${originalFilename}.`\n );\n return textContent;\n } catch (fallbackError: any) {\n // If the initial toString failed or if we threw due to \\ufffd\n const finalErrorMsg = `[TextUtil] Unsupported content type: ${contentType} for ${originalFilename}. Fallback to plain text also failed or indicated binary content.`;\n logger.error(finalErrorMsg, fallbackError.message ? fallbackError.stack : undefined);\n throw new Error(finalErrorMsg);\n }\n }\n}\n\n/**\n * Converts a PDF file buffer to text content.\n * Requires pdfjs-dist to be properly configured, especially its worker.\n */\n/**\n * Converts a PDF Buffer to text with enhanced formatting preservation.\n *\n * @param {Buffer} pdfBuffer - The PDF Buffer to convert to text\n * @param {string} [filename] - Optional filename for logging purposes\n * @returns {Promise<string>} Text content of the PDF\n */\nexport async function convertPdfToTextFromBuffer(\n pdfBuffer: Buffer,\n filename?: string\n): Promise<string> {\n const docName = filename || 'unnamed-document';\n logger.debug(`[PdfService] Starting conversion for ${docName}`);\n\n try {\n const uint8Array = new Uint8Array(pdfBuffer);\n const pdf: PDFDocumentProxy = await getDocument({ data: uint8Array }).promise;\n const numPages = pdf.numPages;\n const textPages: string[] = [];\n\n for (let pageNum = 1; pageNum <= numPages; pageNum++) {\n logger.debug(`[PdfService] Processing page ${pageNum}/${numPages}`);\n const page = await pdf.getPage(pageNum);\n const textContent = await page.getTextContent();\n\n // Group text items by their y-position to maintain line structure\n const lineMap = new Map<number, TextItem[]>();\n\n textContent.items.filter(isTextItem).forEach((item) => {\n // Round y-position to account for small variations in the same line\n const yPos = Math.round(item.transform[5]);\n if (!lineMap.has(yPos)) {\n lineMap.set(yPos, []);\n }\n lineMap.get(yPos)!.push(item);\n });\n\n // Sort lines by y-position (top to bottom) and items within lines by x-position (left to right)\n const sortedLines = Array.from(lineMap.entries())\n .sort((a, b) => b[0] - a[0]) // Reverse sort for top-to-bottom\n .map(([_, items]) =>\n items\n .sort((a, b) => a.transform[4] - b.transform[4])\n .map((item) => item.str)\n .join(' ')\n );\n\n textPages.push(sortedLines.join('\\n'));\n }\n\n const fullText = textPages.join('\\n\\n').replace(/\\s+/g, ' ').trim();\n logger.debug(`[PdfService] Conversion complete for ${docName}, length: ${fullText.length}`);\n return fullText;\n } catch (error: any) {\n logger.error(`[PdfService] Error converting PDF ${docName}:`, error.message);\n throw new Error(`Failed to convert PDF to text: ${error.message}`);\n }\n}\n\n/**\n * Determines if a file should be treated as binary based on its content type and filename\n * @param contentType MIME type of the file\n * @param filename Original filename\n * @returns True if the file should be treated as binary (base64 encoded)\n */\nexport function isBinaryContentType(contentType: string, filename: string): boolean {\n // Text-based content types that should NOT be treated as binary\n const textContentTypes = [\n 'text/',\n 'application/json',\n 'application/xml',\n 'application/javascript',\n 'application/typescript',\n 'application/x-yaml',\n 'application/x-sh',\n ];\n\n // Check if it's a text-based MIME type\n const isTextMimeType = textContentTypes.some((type) => contentType.includes(type));\n if (isTextMimeType) {\n return false;\n }\n\n // Binary content types\n const binaryContentTypes = [\n 'application/pdf',\n 'application/msword',\n 'application/vnd.openxmlformats-officedocument',\n 'application/vnd.ms-excel',\n 'application/vnd.ms-powerpoint',\n 'application/zip',\n 'application/x-zip-compressed',\n 'application/octet-stream',\n 'image/',\n 'audio/',\n 'video/',\n ];\n\n // Check MIME type\n const isBinaryMimeType = binaryContentTypes.some((type) => contentType.includes(type));\n\n if (isBinaryMimeType) {\n return true;\n }\n\n // Check file extension as fallback\n const fileExt = filename.split('.').pop()?.toLowerCase() || '';\n\n // Text file extensions that should NOT be treated as binary\n const textExtensions = [\n 'txt',\n 'md',\n 'markdown',\n 'json',\n 'xml',\n 'html',\n 'htm',\n 'css',\n 'js',\n 'ts',\n 'jsx',\n 'tsx',\n 'yaml',\n 'yml',\n 'toml',\n 'ini',\n 'cfg',\n 'conf',\n 'sh',\n 'bash',\n 'zsh',\n 'fish',\n 'py',\n 'rb',\n 'go',\n 'rs',\n 'java',\n 'c',\n 'cpp',\n 'h',\n 'hpp',\n 'cs',\n 'php',\n 'sql',\n 'r',\n 'swift',\n 'kt',\n 'scala',\n 'clj',\n 'ex',\n 'exs',\n 'vim',\n 'env',\n 'gitignore',\n 'dockerignore',\n 'editorconfig',\n 'log',\n 'csv',\n 'tsv',\n 'properties',\n 'gradle',\n 'sbt',\n 'makefile',\n 'dockerfile',\n 'vagrantfile',\n 'gemfile',\n 'rakefile',\n 'podfile',\n 'csproj',\n 'vbproj',\n 'fsproj',\n 'sln',\n 'pom',\n ];\n\n // If it's a known text extension, it's not binary\n if (textExtensions.includes(fileExt)) {\n return false;\n }\n\n // Binary file extensions\n const binaryExtensions = [\n 'pdf',\n 'docx',\n 'doc',\n 'xls',\n 'xlsx',\n 'ppt',\n 'pptx',\n 'zip',\n 'rar',\n '7z',\n 'tar',\n 'gz',\n 'bz2',\n 'xz',\n 'jpg',\n 'jpeg',\n 'png',\n 'gif',\n 'bmp',\n 'svg',\n 'ico',\n 'webp',\n 'mp3',\n 'mp4',\n 'avi',\n 'mov',\n 'wmv',\n 'flv',\n 'wav',\n 'flac',\n 'ogg',\n 'exe',\n 'dll',\n 'so',\n 'dylib',\n 'bin',\n 'dat',\n 'db',\n 'sqlite',\n ];\n\n return binaryExtensions.includes(fileExt);\n}\n\n/**\n * Check if the input is a TextItem.\n *\n * @param item - The input item to check.\n * @returns A boolean indicating if the input is a TextItem.\n */\nfunction isTextItem(item: TextItem | TextMarkedContent): item is TextItem {\n return 'str' in item;\n}\n\n/**\n * Normalizes an S3 URL by removing query parameters (signature, etc.)\n * This allows for consistent URL comparison regardless of presigned URL parameters\n * @param url The S3 URL to normalize\n * @returns The normalized URL containing only the origin and pathname\n */\nexport function normalizeS3Url(url: string): string {\n try {\n const urlObj = new URL(url);\n return `${urlObj.origin}${urlObj.pathname}`;\n } catch (error) {\n logger.warn(`[URL NORMALIZER] Failed to parse URL: ${url}. Returning original.`);\n return url;\n }\n}\n\n/**\n * Fetches content from a URL and converts it to base64 format\n * @param url The URL to fetch content from\n * @returns An object containing the base64 content and content type\n */\nexport async function fetchUrlContent(\n url: string\n): Promise<{ content: string; contentType: string }> {\n logger.debug(`[URL FETCHER] Fetching content from URL: ${url}`);\n\n try {\n // Fetch the URL with timeout\n const controller = new AbortController();\n const timeoutId = setTimeout(() => controller.abort(), 30000); // 30 second timeout\n\n const response = await fetch(url, {\n signal: controller.signal,\n headers: {\n 'User-Agent': 'Eliza-Knowledge-Plugin/1.0',\n },\n });\n clearTimeout(timeoutId);\n\n if (!response.ok) {\n throw new Error(`Failed to fetch URL: ${response.status} ${response.statusText}`);\n }\n\n // Get content type from response headers\n const contentType = response.headers.get('content-type') || 'application/octet-stream';\n logger.debug(`[URL FETCHER] Content type from server: ${contentType} for URL: ${url}`);\n\n // Get content as ArrayBuffer\n const arrayBuffer = await response.arrayBuffer();\n const buffer = Buffer.from(arrayBuffer);\n\n // Convert to base64\n const base64Content = buffer.toString('base64');\n\n logger.debug(\n `[URL FETCHER] Successfully fetched content from URL: ${url} (${buffer.length} bytes)`\n );\n return {\n content: base64Content,\n contentType,\n };\n } catch (error: any) {\n logger.error(`[URL FETCHER] Error fetching content from URL ${url}: ${error.message}`);\n throw new Error(`Failed to fetch content from URL: ${error.message}`);\n }\n}\n"],"mappings":";AAAA,SAAS,UAAAA,SAAc,wBAAwB;AAC/C,YAAY,QAAQ;AACpB,YAAY,UAAU;;;ACFtB,SAAS,cAAc;AACvB,YAAY,aAAa;AACzB,SAAS,cAAc;AACvB,SAAS,mBAAqC;AAG9C,IAAM,2BAA2B;AAAA,EAC/B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,0BAA0B,IAAI,OAAO;AAC3C,IAAM,qBAAqB;AAO3B,eAAsB,0BACpB,YACA,aACA,kBACiB;AACjB,QAAM,mBAAmB,YAAY,YAAY;AACjD,SAAO;AAAA,IACL,8CAA8C,gBAAgB,WAAW,WAAW;AAAA,EACtF;AAEA,MACE,qBAAqB,2EACrB;AACA,WAAO,MAAM,wCAAwC,gBAAgB,eAAe;AACpF,QAAI;AACF,YAAM,SAAS,MAAc,uBAAe,EAAE,QAAQ,WAAW,CAAC;AAClE,aAAO;AAAA,QACL,gDAAgD,gBAAgB,kBAAkB,OAAO,MAAM,MAAM;AAAA,MACvG;AACA,aAAO,OAAO;AAAA,IAChB,SAAS,WAAgB;AACvB,YAAM,WAAW,wCAAwC,gBAAgB,KAAK,UAAU,OAAO;AAC/F,aAAO,MAAM,UAAU,UAAU,KAAK;AACtC,YAAM,IAAI,MAAM,QAAQ;AAAA,IAC1B;AAAA,EACF,WACE,qBAAqB,wBACrB,iBAAiB,YAAY,EAAE,SAAS,MAAM,GAC9C;AAGA,WAAO,MAAM,iDAAiD,gBAAgB,EAAE;AAGhF,WAAO,6BAA6B,gBAAgB;AAAA;AAAA;AAAA,EACtD,WACE,iBAAiB,WAAW,OAAO,KACnC,yBAAyB,SAAS,gBAAgB,GAClD;AACA,WAAO;AAAA,MACL,8DAA8D,gBAAgB,WAAW,WAAW;AAAA,IACtG;AACA,WAAO,WAAW,SAAS,OAAO;AAAA,EACpC,OAAO;AACL,WAAO;AAAA,MACL,yCAAyC,WAAW,SAAS,gBAAgB;AAAA,IAC/E;AAEA,QAAI,WAAW,SAAS,yBAAyB;AAC/C,YAAM,eAAe,mBAAmB,gBAAgB,WAAW,WAAW,wCAAwC,uBAAuB;AAC7I,aAAO,MAAM,YAAY;AACzB,YAAM,IAAI,MAAM,YAAY;AAAA,IAC9B;AAGA,UAAM,eAAe,WAAW,SAAS,GAAG,KAAK,IAAI,WAAW,QAAQ,kBAAkB,CAAC;AAC3F,QAAI,aAAa,SAAS,CAAC,GAAG;AAE5B,YAAM,qBAAqB,mBAAmB,gBAAgB,WAAW,WAAW;AACpF,aAAO,MAAM,kBAAkB;AAC/B,YAAM,IAAI,MAAM,kBAAkB;AAAA,IACpC;AAEA,QAAI;AACF,YAAM,cAAc,WAAW,SAAS,OAAO;AAC/C,UAAI,YAAY,SAAS,QAAQ,GAAG;AAElC,cAAM,iBAAiB,mBAAmB,gBAAgB,WAAW,WAAW;AAChF,eAAO,MAAM,cAAc;AAC3B,cAAM,IAAI,MAAM,cAAc;AAAA,MAChC;AACA,aAAO;AAAA,QACL,kDAAkD,WAAW,qCAAqC,gBAAgB;AAAA,MACpH;AACA,aAAO;AAAA,IACT,SAAS,eAAoB;AAE3B,YAAM,gBAAgB,wCAAwC,WAAW,QAAQ,gBAAgB;AACjG,aAAO,MAAM,eAAe,cAAc,UAAU,cAAc,QAAQ,MAAS;AACnF,YAAM,IAAI,MAAM,aAAa;AAAA,IAC/B;AAAA,EACF;AACF;AAaA,eAAsB,2BACpB,WACA,UACiB;AACjB,QAAM,UAAU,YAAY;AAC5B,SAAO,MAAM,wCAAwC,OAAO,EAAE;AAE9D,MAAI;AACF,UAAM,aAAa,IAAI,WAAW,SAAS;AAC3C,UAAM,MAAwB,MAAM,YAAY,EAAE,MAAM,WAAW,CAAC,EAAE;AACtE,UAAM,WAAW,IAAI;AACrB,UAAM,YAAsB,CAAC;AAE7B,aAAS,UAAU,GAAG,WAAW,UAAU,WAAW;AACpD,aAAO,MAAM,gCAAgC,OAAO,IAAI,QAAQ,EAAE;AAClE,YAAM,OAAO,MAAM,IAAI,QAAQ,OAAO;AACtC,YAAM,cAAc,MAAM,KAAK,eAAe;AAG9C,YAAM,UAAU,oBAAI,IAAwB;AAE5C,kBAAY,MAAM,OAAO,UAAU,EAAE,QAAQ,CAAC,SAAS;AAErD,cAAM,OAAO,KAAK,MAAM,KAAK,UAAU,CAAC,CAAC;AACzC,YAAI,CAAC,QAAQ,IAAI,IAAI,GAAG;AACtB,kBAAQ,IAAI,MAAM,CAAC,CAAC;AAAA,QACtB;AACA,gBAAQ,IAAI,IAAI,EAAG,KAAK,IAAI;AAAA,MAC9B,CAAC;AAGD,YAAM,cAAc,MAAM,KAAK,QAAQ,QAAQ,CAAC,EAC7C,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC,EAC1B;AAAA,QAAI,CAAC,CAAC,GAAG,KAAK,MACb,MACG,KAAK,CAAC,GAAG,MAAM,EAAE,UAAU,CAAC,IAAI,EAAE,UAAU,CAAC,CAAC,EAC9C,IAAI,CAAC,SAAS,KAAK,GAAG,EACtB,KAAK,GAAG;AAAA,MACb;AAEF,gBAAU,KAAK,YAAY,KAAK,IAAI,CAAC;AAAA,IACvC;AAEA,UAAM,WAAW,UAAU,KAAK,MAAM,EAAE,QAAQ,QAAQ,GAAG,EAAE,KAAK;AAClE,WAAO,MAAM,wCAAwC,OAAO,aAAa,SAAS,MAAM,EAAE;AAC1F,WAAO;AAAA,EACT,SAAS,OAAY;AACnB,WAAO,MAAM,qCAAqC,OAAO,KAAK,MAAM,OAAO;AAC3E,UAAM,IAAI,MAAM,kCAAkC,MAAM,OAAO,EAAE;AAAA,EACnE;AACF;AAQO,SAAS,oBAAoB,aAAqB,UAA2B;AAElF,QAAM,mBAAmB;AAAA,IACvB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAGA,QAAM,iBAAiB,iBAAiB,KAAK,CAAC,SAAS,YAAY,SAAS,IAAI,CAAC;AACjF,MAAI,gBAAgB;AAClB,WAAO;AAAA,EACT;AAGA,QAAM,qBAAqB;AAAA,IACzB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAGA,QAAM,mBAAmB,mBAAmB,KAAK,CAAC,SAAS,YAAY,SAAS,IAAI,CAAC;AAErF,MAAI,kBAAkB;AACpB,WAAO;AAAA,EACT;AAGA,QAAM,UAAU,SAAS,MAAM,GAAG,EAAE,IAAI,GAAG,YAAY,KAAK;AAG5D,QAAM,iBAAiB;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAGA,MAAI,eAAe,SAAS,OAAO,GAAG;AACpC,WAAO;AAAA,EACT;AAGA,QAAM,mBAAmB;AAAA,IACvB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,SAAO,iBAAiB,SAAS,OAAO;AAC1C;AAQA,SAAS,WAAW,MAAsD;AACxE,SAAO,SAAS;AAClB;AAQO,SAAS,eAAe,KAAqB;AAClD,MAAI;AACF,UAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,WAAO,GAAG,OAAO,MAAM,GAAG,OAAO,QAAQ;AAAA,EAC3C,SAAS,OAAO;AACd,WAAO,KAAK,yCAAyC,GAAG,uBAAuB;AAC/E,WAAO;AAAA,EACT;AACF;AAOA,eAAsB,gBACpB,KACmD;AACnD,SAAO,MAAM,4CAA4C,GAAG,EAAE;AAE9D,MAAI;AAEF,UAAM,aAAa,IAAI,gBAAgB;AACvC,UAAM,YAAY,WAAW,MAAM,WAAW,MAAM,GAAG,GAAK;AAE5D,UAAM,WAAW,MAAM,MAAM,KAAK;AAAA,MAChC,QAAQ,WAAW;AAAA,MACnB,SAAS;AAAA,QACP,cAAc;AAAA,MAChB;AAAA,IACF,CAAC;AACD,iBAAa,SAAS;AAEtB,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,IAAI,MAAM,wBAAwB,SAAS,MAAM,IAAI,SAAS,UAAU,EAAE;AAAA,IAClF;AAGA,UAAM,cAAc,SAAS,QAAQ,IAAI,cAAc,KAAK;AAC5D,WAAO,MAAM,2CAA2C,WAAW,aAAa,GAAG,EAAE;AAGrF,UAAM,cAAc,MAAM,SAAS,YAAY;AAC/C,UAAM,SAAS,OAAO,KAAK,WAAW;AAGtC,UAAM,gBAAgB,OAAO,SAAS,QAAQ;AAE9C,WAAO;AAAA,MACL,wDAAwD,GAAG,KAAK,OAAO,MAAM;AAAA,IAC/E;AACA,WAAO;AAAA,MACL,SAAS;AAAA,MACT;AAAA,IACF;AAAA,EACF,SAAS,OAAY;AACnB,WAAO,MAAM,iDAAiD,GAAG,KAAK,MAAM,OAAO,EAAE;AACrF,UAAM,IAAI,MAAM,qCAAqC,MAAM,OAAO,EAAE;AAAA,EACtE;AACF;;;ADtZO,SAAS,mBAA2B;AACzC,QAAM,UAAU,QAAQ,IAAI;AAE5B,MAAI,SAAS;AAEX,UAAM,eAAoB,aAAQ,OAAO;AAEzC,QAAI,CAAI,cAAW,YAAY,GAAG;AAChC,MAAAC,QAAO,KAAK,4DAA4D,YAAY,EAAE;AACtF,MAAAA,QAAO,KAAK,2EAA2E;AAAA,IACzF;AAEA,WAAO;AAAA,EACT;AAGA,QAAM,cAAmB,UAAK,QAAQ,IAAI,GAAG,MAAM;AAEnD,MAAI,CAAI,cAAW,WAAW,GAAG;AAC/B,IAAAA,QAAO,KAAK,0CAA0C,WAAW,EAAE;AACnE,IAAAA,QAAO,KAAK,sCAAsC;AAClD,IAAAA,QAAO,KAAK,gDAAgD;AAC5D,IAAAA,QAAO,KAAK,qEAAqE;AAAA,EACnF;AAEA,SAAO;AACT;AAKA,eAAsB,iBACpB,SACA,SACA,SACgE;AAChE,QAAM,WAAW,iBAAiB;AAElC,MAAI,CAAI,cAAW,QAAQ,GAAG;AAC5B,IAAAA,QAAO,KAAK,kCAAkC,QAAQ,EAAE;AACxD,WAAO,EAAE,OAAO,GAAG,YAAY,GAAG,QAAQ,EAAE;AAAA,EAC9C;AAEA,EAAAA,QAAO,KAAK,2BAA2B,QAAQ,EAAE;AAGjD,QAAM,QAAQ,YAAY,QAAQ;AAElC,MAAI,MAAM,WAAW,GAAG;AACtB,IAAAA,QAAO,KAAK,kCAAkC;AAC9C,WAAO,EAAE,OAAO,GAAG,YAAY,GAAG,QAAQ,EAAE;AAAA,EAC9C;AAEA,EAAAA,QAAO,KAAK,SAAS,MAAM,MAAM,mBAAmB;AAEpD,MAAI,aAAa;AACjB,MAAI,SAAS;AAEb,aAAW,YAAY,OAAO;AAC5B,QAAI;AACF,YAAM,WAAgB,cAAS,QAAQ;AACvC,YAAM,UAAe,aAAQ,QAAQ,EAAE,YAAY;AAGnD,UAAI,SAAS,WAAW,GAAG,GAAG;AAC5B;AAAA,MACF;AAGA,YAAM,cAAc,eAAe,OAAO;AAG1C,UAAI,CAAC,aAAa;AAChB,QAAAA,QAAO,MAAM,mCAAmC,QAAQ,EAAE;AAC1D;AAAA,MACF;AAGA,YAAM,aAAgB,gBAAa,QAAQ;AAG3C,YAAM,WAAW,oBAAoB,aAAa,QAAQ;AAI1D,YAAM,UAAU,WAAW,WAAW,SAAS,QAAQ,IAAI,WAAW,SAAS,OAAO;AAGtF,YAAM,mBAAwC;AAAA,QAC5C,kBAAkB,iBAAiB,SAAS,QAAQ,QAAQ,IAAI,KAAK,IAAI,CAAC,EAAE;AAAA,QAC5E;AAAA,QACA,kBAAkB;AAAA,QAClB,SAAS,WAAW;AAAA,QACpB;AAAA,QACA,QAAQ;AAAA,QACR,UAAU;AAAA,MACZ;AAGA,MAAAA,QAAO,MAAM,wBAAwB,QAAQ,EAAE;AAC/C,YAAM,SAAS,MAAM,QAAQ,aAAa,gBAAgB;AAE1D,MAAAA,QAAO,KAAK,0BAA0B,QAAQ,KAAK,OAAO,aAAa,oBAAoB;AAC3F;AAAA,IACF,SAAS,OAAO;AACd,MAAAA,QAAO,MAAM,0BAA0B,QAAQ,KAAK,KAAK;AACzD;AAAA,IACF;AAAA,EACF;AAEA,EAAAA,QAAO;AAAA,IACL,8BAA8B,UAAU,gBAAgB,MAAM,kBAAkB,MAAM,MAAM;AAAA,EAC9F;AAEA,SAAO;AAAA,IACL,OAAO,MAAM;AAAA,IACb;AAAA,IACA;AAAA,EACF;AACF;AAKA,SAAS,YAAY,SAAiB,QAAkB,CAAC,GAAa;AACpE,MAAI;AACF,UAAM,UAAa,eAAY,SAAS,EAAE,eAAe,KAAK,CAAC;AAE/D,eAAW,SAAS,SAAS;AAC3B,YAAM,WAAgB,UAAK,SAAS,MAAM,IAAI;AAE9C,UAAI,MAAM,YAAY,GAAG;AAEvB,YAAI,CAAC,CAAC,gBAAgB,QAAQ,WAAW,QAAQ,OAAO,EAAE,SAAS,MAAM,IAAI,GAAG;AAC9E,sBAAY,UAAU,KAAK;AAAA,QAC7B;AAAA,MACF,WAAW,MAAM,OAAO,GAAG;AACzB,cAAM,KAAK,QAAQ;AAAA,MACrB;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,IAAAA,QAAO,MAAM,2BAA2B,OAAO,KAAK,KAAK;AAAA,EAC3D;AAEA,SAAO;AACT;AAKA,SAAS,eAAe,WAAkC;AACxD,QAAM,eAAuC;AAAA;AAAA,IAE3C,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,aAAa;AAAA,IACb,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA;AAAA,IAGR,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,SAAS;AAAA,IACT,SAAS;AAAA;AAAA,IAGT,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,WAAW;AAAA,IACX,UAAU;AAAA;AAAA,IAGV,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA;AAAA,IAGR,SAAS;AAAA,IACT,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,UAAU;AAAA;AAAA,IAGV,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO;AAAA;AAAA,IAGP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO;AAAA,IACP,UAAU;AAAA,IACV,MAAM;AAAA,IACN,MAAM;AAAA,IACN,MAAM;AAAA,IACN,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,OAAO;AAAA,IACP,SAAS;AAAA,IACT,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA;AAAA,IAGR,OAAO;AAAA,IACP,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA;AAAA,IAGR,SAAS;AAAA,IACT,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,cAAc;AAAA,IACd,iBAAiB;AAAA,IACjB,iBAAiB;AAAA,IACjB,eAAe;AAAA;AAAA,IAGf,QAAQ;AAAA;AAAA,IAGR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,SAAS;AAAA,EACX;AAEA,SAAO,aAAa,SAAS,KAAK;AACpC;","names":["logger","logger"]}
1
+ {"version":3,"sources":["../src/docs-loader.ts","../src/utils.ts"],"sourcesContent":["import { logger, UUID, createUniqueUuid } from \"@elizaos/core\";\nimport * as fs from \"fs\";\nimport * as path from \"path\";\nimport { KnowledgeService } from \"./service.ts\";\nimport { AddKnowledgeOptions } from \"./types.ts\";\nimport { isBinaryContentType } from './utils.ts';\n\n/**\n * Get the knowledge path from environment or default to ./docs\n */\nexport function getKnowledgePath(): string {\n const envPath = process.env.KNOWLEDGE_PATH;\n\n if (envPath) {\n // Resolve relative paths from current working directory\n const resolvedPath = path.resolve(envPath);\n\n if (!fs.existsSync(resolvedPath)) {\n logger.warn(`Knowledge path from environment variable does not exist: ${resolvedPath}`);\n logger.warn('Please create the directory or update KNOWLEDGE_PATH environment variable');\n }\n\n return resolvedPath;\n }\n\n // Default to docs folder in current working directory\n const defaultPath = path.join(process.cwd(), 'docs');\n\n if (!fs.existsSync(defaultPath)) {\n logger.info(`Default docs folder does not exist at: ${defaultPath}`);\n logger.info('To use the knowledge plugin, either:');\n logger.info('1. Create a \"docs\" folder in your project root');\n logger.info('2. Set KNOWLEDGE_PATH environment variable to your documents folder');\n }\n\n return defaultPath;\n}\n\n/**\n * Load documents from the knowledge path\n */\nexport async function loadDocsFromPath(\n service: KnowledgeService,\n agentId: UUID,\n worldId?: UUID\n): Promise<{ total: number; successful: number; failed: number }> {\n const docsPath = getKnowledgePath();\n\n if (!fs.existsSync(docsPath)) {\n logger.warn(`Knowledge path does not exist: ${docsPath}`);\n return { total: 0, successful: 0, failed: 0 };\n }\n\n logger.info(`Loading documents from: ${docsPath}`);\n\n // Get all files recursively\n const files = getAllFiles(docsPath);\n\n if (files.length === 0) {\n logger.info('No files found in knowledge path');\n return { total: 0, successful: 0, failed: 0 };\n }\n\n logger.info(`Found ${files.length} files to process`);\n\n let successful = 0;\n let failed = 0;\n\n for (const filePath of files) {\n try {\n const fileName = path.basename(filePath);\n const fileExt = path.extname(filePath).toLowerCase();\n\n // Skip hidden files and directories\n if (fileName.startsWith('.')) {\n continue;\n }\n\n // Determine content type\n const contentType = getContentType(fileExt);\n\n // Skip unsupported file types\n if (!contentType) {\n logger.debug(`Skipping unsupported file type: ${filePath}`);\n continue;\n }\n\n // Read file\n const fileBuffer = fs.readFileSync(filePath);\n\n // Check if file is binary using the same logic as the service\n const isBinary = isBinaryContentType(contentType, fileName);\n\n // For text files, read as UTF-8 string directly\n // For binary files, convert to base64\n const content = isBinary ? fileBuffer.toString('base64') : fileBuffer.toString('utf-8');\n\n // Create knowledge options\n const knowledgeOptions: AddKnowledgeOptions = {\n clientDocumentId: createUniqueUuid(agentId, `docs-${fileName}-${Date.now()}`) as UUID,\n contentType,\n originalFilename: fileName,\n worldId: worldId || agentId,\n content,\n roomId: agentId,\n entityId: agentId,\n };\n\n // Process the document\n logger.debug(`Processing document: ${fileName}`);\n const result = await service.addKnowledge(knowledgeOptions);\n\n logger.info(`Successfully processed ${fileName}: ${result.fragmentCount} fragments created`);\n successful++;\n } catch (error) {\n logger.error(`Failed to process file ${filePath}:`, error);\n failed++;\n }\n }\n\n logger.info(\n `Document loading complete: ${successful} successful, ${failed} failed out of ${files.length} total`\n );\n\n return {\n total: files.length,\n successful,\n failed,\n };\n}\n\n/**\n * Recursively get all files in a directory\n */\nfunction getAllFiles(dirPath: string, files: string[] = []): string[] {\n try {\n const entries = fs.readdirSync(dirPath, { withFileTypes: true });\n\n for (const entry of entries) {\n const fullPath = path.join(dirPath, entry.name);\n\n if (entry.isDirectory()) {\n // Skip node_modules and other common directories\n if (!['node_modules', '.git', '.vscode', 'dist', 'build'].includes(entry.name)) {\n getAllFiles(fullPath, files);\n }\n } else if (entry.isFile()) {\n files.push(fullPath);\n }\n }\n } catch (error) {\n logger.error(`Error reading directory ${dirPath}:`, error);\n }\n\n return files;\n}\n\n/**\n * Get content type based on file extension\n */\nfunction getContentType(extension: string): string | null {\n const contentTypes: Record<string, string> = {\n // Text documents\n '.txt': 'text/plain',\n '.md': 'text/markdown',\n '.markdown': 'text/markdown',\n '.tson': 'text/plain',\n '.xml': 'application/xml',\n '.csv': 'text/csv',\n '.tsv': 'text/tab-separated-values',\n '.log': 'text/plain',\n\n // Web files\n '.html': 'text/html',\n '.htm': 'text/html',\n '.css': 'text/css',\n '.scss': 'text/x-scss',\n '.sass': 'text/x-sass',\n '.less': 'text/x-less',\n\n // JavaScript/TypeScript\n '.js': 'text/javascript',\n '.jsx': 'text/javascript',\n '.ts': 'text/typescript',\n '.tsx': 'text/typescript',\n '.mjs': 'text/javascript',\n '.cjs': 'text/javascript',\n '.vue': 'text/x-vue',\n '.svelte': 'text/x-svelte',\n '.astro': 'text/x-astro',\n\n // Python\n '.py': 'text/x-python',\n '.pyw': 'text/x-python',\n '.pyi': 'text/x-python',\n\n // Java/Kotlin/Scala\n '.java': 'text/x-java',\n '.kt': 'text/x-kotlin',\n '.kts': 'text/x-kotlin',\n '.scala': 'text/x-scala',\n\n // C/C++/C#\n '.c': 'text/x-c',\n '.cpp': 'text/x-c++',\n '.cc': 'text/x-c++',\n '.cxx': 'text/x-c++',\n '.h': 'text/x-c',\n '.hpp': 'text/x-c++',\n '.cs': 'text/x-csharp',\n\n // Other languages\n '.php': 'text/x-php',\n '.rb': 'text/x-ruby',\n '.go': 'text/x-go',\n '.rs': 'text/x-rust',\n '.swift': 'text/x-swift',\n '.r': 'text/x-r',\n '.R': 'text/x-r',\n '.m': 'text/x-objectivec',\n '.mm': 'text/x-objectivec',\n '.clj': 'text/x-clojure',\n '.cljs': 'text/x-clojure',\n '.ex': 'text/x-elixir',\n '.exs': 'text/x-elixir',\n '.lua': 'text/x-lua',\n '.pl': 'text/x-perl',\n '.pm': 'text/x-perl',\n '.dart': 'text/x-dart',\n '.hs': 'text/x-haskell',\n '.elm': 'text/x-elm',\n '.ml': 'text/x-ocaml',\n '.fs': 'text/x-fsharp',\n '.fsx': 'text/x-fsharp',\n '.vb': 'text/x-vb',\n '.pas': 'text/x-pascal',\n '.d': 'text/x-d',\n '.nim': 'text/x-nim',\n '.zig': 'text/x-zig',\n '.jl': 'text/x-julia',\n '.tcl': 'text/x-tcl',\n '.awk': 'text/x-awk',\n '.sed': 'text/x-sed',\n\n // Shell scripts\n '.sh': 'text/x-sh',\n '.bash': 'text/x-sh',\n '.zsh': 'text/x-sh',\n '.fish': 'text/x-fish',\n '.ps1': 'text/x-powershell',\n '.bat': 'text/x-batch',\n '.cmd': 'text/x-batch',\n\n // Config files\n '.json': 'application/json',\n '.yaml': 'text/x-yaml',\n '.yml': 'text/x-yaml',\n '.toml': 'text/x-toml',\n '.ini': 'text/x-ini',\n '.cfg': 'text/x-ini',\n '.conf': 'text/x-ini',\n '.env': 'text/plain',\n '.gitignore': 'text/plain',\n '.dockerignore': 'text/plain',\n '.editorconfig': 'text/plain',\n '.properties': 'text/x-properties',\n\n // Database\n '.sql': 'text/x-sql',\n\n // Binary documents\n '.pdf': 'application/pdf',\n '.doc': 'application/msword',\n '.docx': 'application/vnd.openxmlformats-officedocument.wordprocessingml.document',\n };\n\n return contentTypes[extension] || null;\n}\n","import { Buffer } from 'node:buffer';\nimport * as mammoth from 'mammoth';\nimport { logger } from '@elizaos/core';\nimport { getDocument, PDFDocumentProxy } from 'pdfjs-dist/legacy/build/pdf.mjs';\nimport type { TextItem, TextMarkedContent } from 'pdfjs-dist/types/src/display/api';\n\nconst PLAIN_TEXT_CONTENT_TYPES = [\n 'application/typescript',\n 'text/typescript',\n 'text/x-python',\n 'application/x-python-code',\n 'application/yaml',\n 'text/yaml',\n 'application/x-yaml',\n 'application/json',\n 'text/markdown',\n 'text/csv',\n];\n\nconst MAX_FALLBACK_SIZE_BYTES = 5 * 1024 * 1024; // 5 MB\nconst BINARY_CHECK_BYTES = 1024; // Check first 1KB for binary indicators\n\n/**\n * Extracts text content from a file buffer based on its content type.\n * Supports DOCX, plain text, and provides a fallback for unknown types.\n * PDF should be handled by `convertPdfToTextFromBuffer`.\n */\nexport async function extractTextFromFileBuffer(\n fileBuffer: Buffer,\n contentType: string,\n originalFilename: string // For logging and context\n): Promise<string> {\n const lowerContentType = contentType.toLowerCase();\n logger.debug(\n `[TextUtil] Attempting to extract text from ${originalFilename} (type: ${contentType})`\n );\n\n if (\n lowerContentType === 'application/vnd.openxmlformats-officedocument.wordprocessingml.document'\n ) {\n logger.debug(`[TextUtil] Extracting text from DOCX ${originalFilename} via mammoth.`);\n try {\n const result = await mammoth.extractRawText({ buffer: fileBuffer });\n logger.debug(\n `[TextUtil] DOCX text extraction complete for ${originalFilename}. Text length: ${result.value.length}`\n );\n return result.value;\n } catch (docxError: any) {\n const errorMsg = `[TextUtil] Failed to parse DOCX file ${originalFilename}: ${docxError.message}`;\n logger.error(errorMsg, docxError.stack);\n throw new Error(errorMsg);\n }\n } else if (\n lowerContentType === 'application/msword' ||\n originalFilename.toLowerCase().endsWith('.doc')\n ) {\n // For .doc files, we'll store the content as-is, and just add a message\n // The frontend will handle the display appropriately\n logger.debug(`[TextUtil] Handling Microsoft Word .doc file: ${originalFilename}`);\n\n // We'll add a descriptive message as a placeholder\n return `[Microsoft Word Document: ${originalFilename}]\\n\\nThis document was indexed for search but cannot be displayed directly in the browser. The original document content is preserved for retrieval purposes.`;\n } else if (\n lowerContentType.startsWith('text/') ||\n PLAIN_TEXT_CONTENT_TYPES.includes(lowerContentType)\n ) {\n logger.debug(\n `[TextUtil] Extracting text from plain text compatible file ${originalFilename} (type: ${contentType})`\n );\n return fileBuffer.toString('utf-8');\n } else {\n logger.warn(\n `[TextUtil] Unsupported content type: \"${contentType}\" for ${originalFilename}. Attempting fallback to plain text.`\n );\n\n if (fileBuffer.length > MAX_FALLBACK_SIZE_BYTES) {\n const sizeErrorMsg = `[TextUtil] File ${originalFilename} (type: ${contentType}) exceeds maximum size for fallback (${MAX_FALLBACK_SIZE_BYTES} bytes). Cannot process as plain text.`;\n logger.error(sizeErrorMsg);\n throw new Error(sizeErrorMsg);\n }\n\n // Simple binary detection: check for null bytes in the first N bytes\n const initialBytes = fileBuffer.subarray(0, Math.min(fileBuffer.length, BINARY_CHECK_BYTES));\n if (initialBytes.includes(0)) {\n // Check for NUL byte\n const binaryHeuristicMsg = `[TextUtil] File ${originalFilename} (type: ${contentType}) appears to be binary based on initial byte check. Cannot process as plain text.`;\n logger.error(binaryHeuristicMsg);\n throw new Error(binaryHeuristicMsg);\n }\n\n try {\n const textContent = fileBuffer.toString('utf-8');\n if (textContent.includes('\\ufffd')) {\n // Replacement character, indicating potential binary or wrong encoding\n const binaryErrorMsg = `[TextUtil] File ${originalFilename} (type: ${contentType}) seems to be binary or has encoding issues after fallback to plain text (detected \\ufffd).`;\n logger.error(binaryErrorMsg);\n throw new Error(binaryErrorMsg); // Throw error for likely binary content\n }\n logger.debug(\n `[TextUtil] Successfully processed unknown type ${contentType} as plain text after fallback for ${originalFilename}.`\n );\n return textContent;\n } catch (fallbackError: any) {\n // If the initial toString failed or if we threw due to \\ufffd\n const finalErrorMsg = `[TextUtil] Unsupported content type: ${contentType} for ${originalFilename}. Fallback to plain text also failed or indicated binary content.`;\n logger.error(finalErrorMsg, fallbackError.message ? fallbackError.stack : undefined);\n throw new Error(finalErrorMsg);\n }\n }\n}\n\n/**\n * Converts a PDF file buffer to text content.\n * Requires pdfjs-dist to be properly configured, especially its worker.\n */\n/**\n * Converts a PDF Buffer to text with enhanced formatting preservation.\n *\n * @param {Buffer} pdfBuffer - The PDF Buffer to convert to text\n * @param {string} [filename] - Optional filename for logging purposes\n * @returns {Promise<string>} Text content of the PDF\n */\nexport async function convertPdfToTextFromBuffer(\n pdfBuffer: Buffer,\n filename?: string\n): Promise<string> {\n const docName = filename || 'unnamed-document';\n logger.debug(`[PdfService] Starting conversion for ${docName}`);\n\n try {\n const uint8Array = new Uint8Array(pdfBuffer);\n const pdf: PDFDocumentProxy = await getDocument({ data: uint8Array }).promise;\n const numPages = pdf.numPages;\n const textPages: string[] = [];\n\n for (let pageNum = 1; pageNum <= numPages; pageNum++) {\n logger.debug(`[PdfService] Processing page ${pageNum}/${numPages}`);\n const page = await pdf.getPage(pageNum);\n const textContent = await page.getTextContent();\n\n // Group text items by their y-position to maintain line structure\n const lineMap = new Map<number, TextItem[]>();\n\n textContent.items.filter(isTextItem).forEach((item) => {\n // Round y-position to account for small variations in the same line\n const yPos = Math.round(item.transform[5]);\n if (!lineMap.has(yPos)) {\n lineMap.set(yPos, []);\n }\n lineMap.get(yPos)!.push(item);\n });\n\n // Sort lines by y-position (top to bottom) and items within lines by x-position (left to right)\n const sortedLines = Array.from(lineMap.entries())\n .sort((a, b) => b[0] - a[0]) // Reverse sort for top-to-bottom\n .map(([_, items]) =>\n items\n .sort((a, b) => a.transform[4] - b.transform[4])\n .map((item) => item.str)\n .join(' ')\n );\n\n textPages.push(sortedLines.join('\\n'));\n }\n\n const fullText = textPages.join('\\n\\n').replace(/\\s+/g, ' ').trim();\n logger.debug(`[PdfService] Conversion complete for ${docName}, length: ${fullText.length}`);\n return fullText;\n } catch (error: any) {\n logger.error(`[PdfService] Error converting PDF ${docName}:`, error.message);\n throw new Error(`Failed to convert PDF to text: ${error.message}`);\n }\n}\n\n/**\n * Determines if a file should be treated as binary based on its content type and filename\n * @param contentType MIME type of the file\n * @param filename Original filename\n * @returns True if the file should be treated as binary (base64 encoded)\n */\nexport function isBinaryContentType(contentType: string, filename: string): boolean {\n // Text-based content types that should NOT be treated as binary\n const textContentTypes = [\n 'text/',\n 'application/json',\n 'application/xml',\n 'application/javascript',\n 'application/typescript',\n 'application/x-yaml',\n 'application/x-sh',\n ];\n\n // Check if it's a text-based MIME type\n const isTextMimeType = textContentTypes.some((type) => contentType.includes(type));\n if (isTextMimeType) {\n return false;\n }\n\n // Binary content types\n const binaryContentTypes = [\n 'application/pdf',\n 'application/msword',\n 'application/vnd.openxmlformats-officedocument',\n 'application/vnd.ms-excel',\n 'application/vnd.ms-powerpoint',\n 'application/zip',\n 'application/x-zip-compressed',\n 'application/octet-stream',\n 'image/',\n 'audio/',\n 'video/',\n ];\n\n // Check MIME type\n const isBinaryMimeType = binaryContentTypes.some((type) => contentType.includes(type));\n\n if (isBinaryMimeType) {\n return true;\n }\n\n // Check file extension as fallback\n const fileExt = filename.split('.').pop()?.toLowerCase() || '';\n\n // Text file extensions that should NOT be treated as binary\n const textExtensions = [\n 'txt',\n 'md',\n 'markdown',\n 'json',\n 'xml',\n 'html',\n 'htm',\n 'css',\n 'js',\n 'ts',\n 'jsx',\n 'tsx',\n 'yaml',\n 'yml',\n 'toml',\n 'ini',\n 'cfg',\n 'conf',\n 'sh',\n 'bash',\n 'zsh',\n 'fish',\n 'py',\n 'rb',\n 'go',\n 'rs',\n 'java',\n 'c',\n 'cpp',\n 'h',\n 'hpp',\n 'cs',\n 'php',\n 'sql',\n 'r',\n 'swift',\n 'kt',\n 'scala',\n 'clj',\n 'ex',\n 'exs',\n 'vim',\n 'env',\n 'gitignore',\n 'dockerignore',\n 'editorconfig',\n 'log',\n 'csv',\n 'tsv',\n 'properties',\n 'gradle',\n 'sbt',\n 'makefile',\n 'dockerfile',\n 'vagrantfile',\n 'gemfile',\n 'rakefile',\n 'podfile',\n 'csproj',\n 'vbproj',\n 'fsproj',\n 'sln',\n 'pom',\n ];\n\n // If it's a known text extension, it's not binary\n if (textExtensions.includes(fileExt)) {\n return false;\n }\n\n // Binary file extensions\n const binaryExtensions = [\n 'pdf',\n 'docx',\n 'doc',\n 'xls',\n 'xlsx',\n 'ppt',\n 'pptx',\n 'zip',\n 'rar',\n '7z',\n 'tar',\n 'gz',\n 'bz2',\n 'xz',\n 'jpg',\n 'jpeg',\n 'png',\n 'gif',\n 'bmp',\n 'svg',\n 'ico',\n 'webp',\n 'mp3',\n 'mp4',\n 'avi',\n 'mov',\n 'wmv',\n 'flv',\n 'wav',\n 'flac',\n 'ogg',\n 'exe',\n 'dll',\n 'so',\n 'dylib',\n 'bin',\n 'dat',\n 'db',\n 'sqlite',\n ];\n\n return binaryExtensions.includes(fileExt);\n}\n\n/**\n * Check if the input is a TextItem.\n *\n * @param item - The input item to check.\n * @returns A boolean indicating if the input is a TextItem.\n */\nfunction isTextItem(item: TextItem | TextMarkedContent): item is TextItem {\n return 'str' in item;\n}\n\n/**\n * Normalizes an S3 URL by removing query parameters (signature, etc.)\n * This allows for consistent URL comparison regardless of presigned URL parameters\n * @param url The S3 URL to normalize\n * @returns The normalized URL containing only the origin and pathname\n */\nexport function normalizeS3Url(url: string): string {\n try {\n const urlObj = new URL(url);\n return `${urlObj.origin}${urlObj.pathname}`;\n } catch (error) {\n logger.warn(`[URL NORMALIZER] Failed to parse URL: ${url}. Returning original.`);\n return url;\n }\n}\n\n/**\n * Fetches content from a URL and converts it to base64 format\n * @param url The URL to fetch content from\n * @returns An object containing the base64 content and content type\n */\nexport async function fetchUrlContent(\n url: string\n): Promise<{ content: string; contentType: string }> {\n logger.debug(`[URL FETCHER] Fetching content from URL: ${url}`);\n\n try {\n // Fetch the URL with timeout\n const controller = new AbortController();\n const timeoutId = setTimeout(() => controller.abort(), 30000); // 30 second timeout\n\n const response = await fetch(url, {\n signal: controller.signal,\n headers: {\n 'User-Agent': 'Eliza-Knowledge-Plugin/1.0',\n },\n });\n clearTimeout(timeoutId);\n\n if (!response.ok) {\n throw new Error(`Failed to fetch URL: ${response.status} ${response.statusText}`);\n }\n\n // Get content type from response headers\n const contentType = response.headers.get('content-type') || 'application/octet-stream';\n logger.debug(`[URL FETCHER] Content type from server: ${contentType} for URL: ${url}`);\n\n // Get content as ArrayBuffer\n const arrayBuffer = await response.arrayBuffer();\n const buffer = Buffer.from(arrayBuffer);\n\n // Convert to base64\n const base64Content = buffer.toString('base64');\n\n logger.debug(\n `[URL FETCHER] Successfully fetched content from URL: ${url} (${buffer.length} bytes)`\n );\n return {\n content: base64Content,\n contentType,\n };\n } catch (error: any) {\n logger.error(`[URL FETCHER] Error fetching content from URL ${url}: ${error.message}`);\n throw new Error(`Failed to fetch content from URL: ${error.message}`);\n }\n}\n\nexport function looksLikeBase64(content?: string | null): boolean {\n // const base64Regex = /^[A-Za-z0-9+/]+=*$/; // This is the old regex\n // https://stackoverflow.com/questions/475074/regex-to-parse-or-validate-base64-data\n const base64Regex = /^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$/;\n return content && content.length > 0 && base64Regex.test(content.replace(/\\s/g, '')) || false;\n}"],"mappings":";AAAA,SAAS,UAAAA,SAAc,wBAAwB;AAC/C,YAAY,QAAQ;AACpB,YAAY,UAAU;;;ACFtB,SAAS,cAAc;AACvB,YAAY,aAAa;AACzB,SAAS,cAAc;AACvB,SAAS,mBAAqC;AAG9C,IAAM,2BAA2B;AAAA,EAC/B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,0BAA0B,IAAI,OAAO;AAC3C,IAAM,qBAAqB;AAO3B,eAAsB,0BACpB,YACA,aACA,kBACiB;AACjB,QAAM,mBAAmB,YAAY,YAAY;AACjD,SAAO;AAAA,IACL,8CAA8C,gBAAgB,WAAW,WAAW;AAAA,EACtF;AAEA,MACE,qBAAqB,2EACrB;AACA,WAAO,MAAM,wCAAwC,gBAAgB,eAAe;AACpF,QAAI;AACF,YAAM,SAAS,MAAc,uBAAe,EAAE,QAAQ,WAAW,CAAC;AAClE,aAAO;AAAA,QACL,gDAAgD,gBAAgB,kBAAkB,OAAO,MAAM,MAAM;AAAA,MACvG;AACA,aAAO,OAAO;AAAA,IAChB,SAAS,WAAgB;AACvB,YAAM,WAAW,wCAAwC,gBAAgB,KAAK,UAAU,OAAO;AAC/F,aAAO,MAAM,UAAU,UAAU,KAAK;AACtC,YAAM,IAAI,MAAM,QAAQ;AAAA,IAC1B;AAAA,EACF,WACE,qBAAqB,wBACrB,iBAAiB,YAAY,EAAE,SAAS,MAAM,GAC9C;AAGA,WAAO,MAAM,iDAAiD,gBAAgB,EAAE;AAGhF,WAAO,6BAA6B,gBAAgB;AAAA;AAAA;AAAA,EACtD,WACE,iBAAiB,WAAW,OAAO,KACnC,yBAAyB,SAAS,gBAAgB,GAClD;AACA,WAAO;AAAA,MACL,8DAA8D,gBAAgB,WAAW,WAAW;AAAA,IACtG;AACA,WAAO,WAAW,SAAS,OAAO;AAAA,EACpC,OAAO;AACL,WAAO;AAAA,MACL,yCAAyC,WAAW,SAAS,gBAAgB;AAAA,IAC/E;AAEA,QAAI,WAAW,SAAS,yBAAyB;AAC/C,YAAM,eAAe,mBAAmB,gBAAgB,WAAW,WAAW,wCAAwC,uBAAuB;AAC7I,aAAO,MAAM,YAAY;AACzB,YAAM,IAAI,MAAM,YAAY;AAAA,IAC9B;AAGA,UAAM,eAAe,WAAW,SAAS,GAAG,KAAK,IAAI,WAAW,QAAQ,kBAAkB,CAAC;AAC3F,QAAI,aAAa,SAAS,CAAC,GAAG;AAE5B,YAAM,qBAAqB,mBAAmB,gBAAgB,WAAW,WAAW;AACpF,aAAO,MAAM,kBAAkB;AAC/B,YAAM,IAAI,MAAM,kBAAkB;AAAA,IACpC;AAEA,QAAI;AACF,YAAM,cAAc,WAAW,SAAS,OAAO;AAC/C,UAAI,YAAY,SAAS,QAAQ,GAAG;AAElC,cAAM,iBAAiB,mBAAmB,gBAAgB,WAAW,WAAW;AAChF,eAAO,MAAM,cAAc;AAC3B,cAAM,IAAI,MAAM,cAAc;AAAA,MAChC;AACA,aAAO;AAAA,QACL,kDAAkD,WAAW,qCAAqC,gBAAgB;AAAA,MACpH;AACA,aAAO;AAAA,IACT,SAAS,eAAoB;AAE3B,YAAM,gBAAgB,wCAAwC,WAAW,QAAQ,gBAAgB;AACjG,aAAO,MAAM,eAAe,cAAc,UAAU,cAAc,QAAQ,MAAS;AACnF,YAAM,IAAI,MAAM,aAAa;AAAA,IAC/B;AAAA,EACF;AACF;AAaA,eAAsB,2BACpB,WACA,UACiB;AACjB,QAAM,UAAU,YAAY;AAC5B,SAAO,MAAM,wCAAwC,OAAO,EAAE;AAE9D,MAAI;AACF,UAAM,aAAa,IAAI,WAAW,SAAS;AAC3C,UAAM,MAAwB,MAAM,YAAY,EAAE,MAAM,WAAW,CAAC,EAAE;AACtE,UAAM,WAAW,IAAI;AACrB,UAAM,YAAsB,CAAC;AAE7B,aAAS,UAAU,GAAG,WAAW,UAAU,WAAW;AACpD,aAAO,MAAM,gCAAgC,OAAO,IAAI,QAAQ,EAAE;AAClE,YAAM,OAAO,MAAM,IAAI,QAAQ,OAAO;AACtC,YAAM,cAAc,MAAM,KAAK,eAAe;AAG9C,YAAM,UAAU,oBAAI,IAAwB;AAE5C,kBAAY,MAAM,OAAO,UAAU,EAAE,QAAQ,CAAC,SAAS;AAErD,cAAM,OAAO,KAAK,MAAM,KAAK,UAAU,CAAC,CAAC;AACzC,YAAI,CAAC,QAAQ,IAAI,IAAI,GAAG;AACtB,kBAAQ,IAAI,MAAM,CAAC,CAAC;AAAA,QACtB;AACA,gBAAQ,IAAI,IAAI,EAAG,KAAK,IAAI;AAAA,MAC9B,CAAC;AAGD,YAAM,cAAc,MAAM,KAAK,QAAQ,QAAQ,CAAC,EAC7C,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC,EAC1B;AAAA,QAAI,CAAC,CAAC,GAAG,KAAK,MACb,MACG,KAAK,CAAC,GAAG,MAAM,EAAE,UAAU,CAAC,IAAI,EAAE,UAAU,CAAC,CAAC,EAC9C,IAAI,CAAC,SAAS,KAAK,GAAG,EACtB,KAAK,GAAG;AAAA,MACb;AAEF,gBAAU,KAAK,YAAY,KAAK,IAAI,CAAC;AAAA,IACvC;AAEA,UAAM,WAAW,UAAU,KAAK,MAAM,EAAE,QAAQ,QAAQ,GAAG,EAAE,KAAK;AAClE,WAAO,MAAM,wCAAwC,OAAO,aAAa,SAAS,MAAM,EAAE;AAC1F,WAAO;AAAA,EACT,SAAS,OAAY;AACnB,WAAO,MAAM,qCAAqC,OAAO,KAAK,MAAM,OAAO;AAC3E,UAAM,IAAI,MAAM,kCAAkC,MAAM,OAAO,EAAE;AAAA,EACnE;AACF;AAQO,SAAS,oBAAoB,aAAqB,UAA2B;AAElF,QAAM,mBAAmB;AAAA,IACvB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAGA,QAAM,iBAAiB,iBAAiB,KAAK,CAAC,SAAS,YAAY,SAAS,IAAI,CAAC;AACjF,MAAI,gBAAgB;AAClB,WAAO;AAAA,EACT;AAGA,QAAM,qBAAqB;AAAA,IACzB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAGA,QAAM,mBAAmB,mBAAmB,KAAK,CAAC,SAAS,YAAY,SAAS,IAAI,CAAC;AAErF,MAAI,kBAAkB;AACpB,WAAO;AAAA,EACT;AAGA,QAAM,UAAU,SAAS,MAAM,GAAG,EAAE,IAAI,GAAG,YAAY,KAAK;AAG5D,QAAM,iBAAiB;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAGA,MAAI,eAAe,SAAS,OAAO,GAAG;AACpC,WAAO;AAAA,EACT;AAGA,QAAM,mBAAmB;AAAA,IACvB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,SAAO,iBAAiB,SAAS,OAAO;AAC1C;AAQA,SAAS,WAAW,MAAsD;AACxE,SAAO,SAAS;AAClB;AAQO,SAAS,eAAe,KAAqB;AAClD,MAAI;AACF,UAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,WAAO,GAAG,OAAO,MAAM,GAAG,OAAO,QAAQ;AAAA,EAC3C,SAAS,OAAO;AACd,WAAO,KAAK,yCAAyC,GAAG,uBAAuB;AAC/E,WAAO;AAAA,EACT;AACF;AAOA,eAAsB,gBACpB,KACmD;AACnD,SAAO,MAAM,4CAA4C,GAAG,EAAE;AAE9D,MAAI;AAEF,UAAM,aAAa,IAAI,gBAAgB;AACvC,UAAM,YAAY,WAAW,MAAM,WAAW,MAAM,GAAG,GAAK;AAE5D,UAAM,WAAW,MAAM,MAAM,KAAK;AAAA,MAChC,QAAQ,WAAW;AAAA,MACnB,SAAS;AAAA,QACP,cAAc;AAAA,MAChB;AAAA,IACF,CAAC;AACD,iBAAa,SAAS;AAEtB,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,IAAI,MAAM,wBAAwB,SAAS,MAAM,IAAI,SAAS,UAAU,EAAE;AAAA,IAClF;AAGA,UAAM,cAAc,SAAS,QAAQ,IAAI,cAAc,KAAK;AAC5D,WAAO,MAAM,2CAA2C,WAAW,aAAa,GAAG,EAAE;AAGrF,UAAM,cAAc,MAAM,SAAS,YAAY;AAC/C,UAAM,SAAS,OAAO,KAAK,WAAW;AAGtC,UAAM,gBAAgB,OAAO,SAAS,QAAQ;AAE9C,WAAO;AAAA,MACL,wDAAwD,GAAG,KAAK,OAAO,MAAM;AAAA,IAC/E;AACA,WAAO;AAAA,MACL,SAAS;AAAA,MACT;AAAA,IACF;AAAA,EACF,SAAS,OAAY;AACnB,WAAO,MAAM,iDAAiD,GAAG,KAAK,MAAM,OAAO,EAAE;AACrF,UAAM,IAAI,MAAM,qCAAqC,MAAM,OAAO,EAAE;AAAA,EACtE;AACF;AAEO,SAAS,gBAAgB,SAAkC;AAGhE,QAAM,cAAc;AACpB,SAAO,WAAW,QAAQ,SAAS,KAAK,YAAY,KAAK,QAAQ,QAAQ,OAAO,EAAE,CAAC,KAAK;AAC1F;;;AD7ZO,SAAS,mBAA2B;AACzC,QAAM,UAAU,QAAQ,IAAI;AAE5B,MAAI,SAAS;AAEX,UAAM,eAAoB,aAAQ,OAAO;AAEzC,QAAI,CAAI,cAAW,YAAY,GAAG;AAChC,MAAAC,QAAO,KAAK,4DAA4D,YAAY,EAAE;AACtF,MAAAA,QAAO,KAAK,2EAA2E;AAAA,IACzF;AAEA,WAAO;AAAA,EACT;AAGA,QAAM,cAAmB,UAAK,QAAQ,IAAI,GAAG,MAAM;AAEnD,MAAI,CAAI,cAAW,WAAW,GAAG;AAC/B,IAAAA,QAAO,KAAK,0CAA0C,WAAW,EAAE;AACnE,IAAAA,QAAO,KAAK,sCAAsC;AAClD,IAAAA,QAAO,KAAK,gDAAgD;AAC5D,IAAAA,QAAO,KAAK,qEAAqE;AAAA,EACnF;AAEA,SAAO;AACT;AAKA,eAAsB,iBACpB,SACA,SACA,SACgE;AAChE,QAAM,WAAW,iBAAiB;AAElC,MAAI,CAAI,cAAW,QAAQ,GAAG;AAC5B,IAAAA,QAAO,KAAK,kCAAkC,QAAQ,EAAE;AACxD,WAAO,EAAE,OAAO,GAAG,YAAY,GAAG,QAAQ,EAAE;AAAA,EAC9C;AAEA,EAAAA,QAAO,KAAK,2BAA2B,QAAQ,EAAE;AAGjD,QAAM,QAAQ,YAAY,QAAQ;AAElC,MAAI,MAAM,WAAW,GAAG;AACtB,IAAAA,QAAO,KAAK,kCAAkC;AAC9C,WAAO,EAAE,OAAO,GAAG,YAAY,GAAG,QAAQ,EAAE;AAAA,EAC9C;AAEA,EAAAA,QAAO,KAAK,SAAS,MAAM,MAAM,mBAAmB;AAEpD,MAAI,aAAa;AACjB,MAAI,SAAS;AAEb,aAAW,YAAY,OAAO;AAC5B,QAAI;AACF,YAAM,WAAgB,cAAS,QAAQ;AACvC,YAAM,UAAe,aAAQ,QAAQ,EAAE,YAAY;AAGnD,UAAI,SAAS,WAAW,GAAG,GAAG;AAC5B;AAAA,MACF;AAGA,YAAM,cAAc,eAAe,OAAO;AAG1C,UAAI,CAAC,aAAa;AAChB,QAAAA,QAAO,MAAM,mCAAmC,QAAQ,EAAE;AAC1D;AAAA,MACF;AAGA,YAAM,aAAgB,gBAAa,QAAQ;AAG3C,YAAM,WAAW,oBAAoB,aAAa,QAAQ;AAI1D,YAAM,UAAU,WAAW,WAAW,SAAS,QAAQ,IAAI,WAAW,SAAS,OAAO;AAGtF,YAAM,mBAAwC;AAAA,QAC5C,kBAAkB,iBAAiB,SAAS,QAAQ,QAAQ,IAAI,KAAK,IAAI,CAAC,EAAE;AAAA,QAC5E;AAAA,QACA,kBAAkB;AAAA,QAClB,SAAS,WAAW;AAAA,QACpB;AAAA,QACA,QAAQ;AAAA,QACR,UAAU;AAAA,MACZ;AAGA,MAAAA,QAAO,MAAM,wBAAwB,QAAQ,EAAE;AAC/C,YAAM,SAAS,MAAM,QAAQ,aAAa,gBAAgB;AAE1D,MAAAA,QAAO,KAAK,0BAA0B,QAAQ,KAAK,OAAO,aAAa,oBAAoB;AAC3F;AAAA,IACF,SAAS,OAAO;AACd,MAAAA,QAAO,MAAM,0BAA0B,QAAQ,KAAK,KAAK;AACzD;AAAA,IACF;AAAA,EACF;AAEA,EAAAA,QAAO;AAAA,IACL,8BAA8B,UAAU,gBAAgB,MAAM,kBAAkB,MAAM,MAAM;AAAA,EAC9F;AAEA,SAAO;AAAA,IACL,OAAO,MAAM;AAAA,IACb;AAAA,IACA;AAAA,EACF;AACF;AAKA,SAAS,YAAY,SAAiB,QAAkB,CAAC,GAAa;AACpE,MAAI;AACF,UAAM,UAAa,eAAY,SAAS,EAAE,eAAe,KAAK,CAAC;AAE/D,eAAW,SAAS,SAAS;AAC3B,YAAM,WAAgB,UAAK,SAAS,MAAM,IAAI;AAE9C,UAAI,MAAM,YAAY,GAAG;AAEvB,YAAI,CAAC,CAAC,gBAAgB,QAAQ,WAAW,QAAQ,OAAO,EAAE,SAAS,MAAM,IAAI,GAAG;AAC9E,sBAAY,UAAU,KAAK;AAAA,QAC7B;AAAA,MACF,WAAW,MAAM,OAAO,GAAG;AACzB,cAAM,KAAK,QAAQ;AAAA,MACrB;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,IAAAA,QAAO,MAAM,2BAA2B,OAAO,KAAK,KAAK;AAAA,EAC3D;AAEA,SAAO;AACT;AAKA,SAAS,eAAe,WAAkC;AACxD,QAAM,eAAuC;AAAA;AAAA,IAE3C,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,aAAa;AAAA,IACb,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA;AAAA,IAGR,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,SAAS;AAAA,IACT,SAAS;AAAA;AAAA,IAGT,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,WAAW;AAAA,IACX,UAAU;AAAA;AAAA,IAGV,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA;AAAA,IAGR,SAAS;AAAA,IACT,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,UAAU;AAAA;AAAA,IAGV,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO;AAAA;AAAA,IAGP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO;AAAA,IACP,UAAU;AAAA,IACV,MAAM;AAAA,IACN,MAAM;AAAA,IACN,MAAM;AAAA,IACN,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,OAAO;AAAA,IACP,SAAS;AAAA,IACT,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA;AAAA,IAGR,OAAO;AAAA,IACP,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA;AAAA,IAGR,SAAS;AAAA,IACT,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,cAAc;AAAA,IACd,iBAAiB;AAAA,IACjB,iBAAiB;AAAA,IACjB,eAAe;AAAA;AAAA,IAGf,QAAQ;AAAA;AAAA,IAGR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,SAAS;AAAA,EACX;AAEA,SAAO,aAAa,SAAS,KAAK;AACpC;","names":["logger","logger"]}
@@ -1,9 +1,9 @@
1
1
  import {
2
2
  getKnowledgePath,
3
3
  loadDocsFromPath
4
- } from "./chunk-MFXNKYBS.js";
4
+ } from "./chunk-536BD2UA.js";
5
5
  export {
6
6
  getKnowledgePath,
7
7
  loadDocsFromPath
8
8
  };
9
- //# sourceMappingURL=docs-loader-AEQHIBO4.js.map
9
+ //# sourceMappingURL=docs-loader-IBTEOAYT.js.map
package/dist/index.d.ts CHANGED
@@ -98,6 +98,8 @@ interface TextGenerationOptions {
98
98
  * Options for adding knowledge to the system
99
99
  */
100
100
  interface AddKnowledgeOptions {
101
+ /** Agent ID from the frontend - if not provided, will use runtime.agentId */
102
+ agentId?: UUID;
101
103
  worldId: UUID;
102
104
  roomId: UUID;
103
105
  entityId: UUID;
package/dist/index.html CHANGED
@@ -5,8 +5,8 @@
5
5
  <link rel="icon" type="image/svg+xml" href="/vite.svg" />
6
6
  <meta name="viewport" content="width=device-width, initial-scale=1.0" />
7
7
  <title>Agent Plugin View</title>
8
- <script type="module" crossorigin src="./assets/index-DZQIX0Kb.js"></script>
9
- <link rel="stylesheet" crossorigin href="./assets/index-C77XebWS.css">
8
+ <script type="module" crossorigin src="./assets/index-DlJcnv-R.js"></script>
9
+ <link rel="stylesheet" crossorigin href="./assets/index-DiVvUMt0.css">
10
10
  </head>
11
11
  <body>
12
12
  <div id="root"></div>
package/dist/index.js CHANGED
@@ -4,8 +4,9 @@ import {
4
4
  fetchUrlContent,
5
5
  isBinaryContentType,
6
6
  loadDocsFromPath,
7
+ looksLikeBase64,
7
8
  normalizeS3Url
8
- } from "./chunk-MFXNKYBS.js";
9
+ } from "./chunk-536BD2UA.js";
9
10
 
10
11
  // src/index.ts
11
12
  import { logger as logger6 } from "@elizaos/core";
@@ -1264,9 +1265,9 @@ var KnowledgeService = class _KnowledgeService extends Service {
1264
1265
  * @returns Promise with document processing result
1265
1266
  */
1266
1267
  async addKnowledge(options) {
1267
- const agentId = this.runtime.agentId;
1268
+ const agentId = options.agentId || this.runtime.agentId;
1268
1269
  logger3.info(
1269
- `KnowledgeService (agent: ${agentId}) processing document for public addKnowledge: ${options.originalFilename}, type: ${options.contentType}`
1270
+ `KnowledgeService processing document for agent: ${agentId}, file: ${options.originalFilename}, type: ${options.contentType}`
1270
1271
  );
1271
1272
  try {
1272
1273
  const existingDocument = await this.runtime.getMemoryById(options.clientDocumentId);
@@ -1302,6 +1303,7 @@ var KnowledgeService = class _KnowledgeService extends Service {
1302
1303
  * @returns Promise with document processing result
1303
1304
  */
1304
1305
  async processDocument({
1306
+ agentId: passedAgentId,
1305
1307
  clientDocumentId,
1306
1308
  contentType,
1307
1309
  originalFilename,
@@ -1311,10 +1313,10 @@ var KnowledgeService = class _KnowledgeService extends Service {
1311
1313
  entityId,
1312
1314
  metadata
1313
1315
  }) {
1314
- const agentId = this.runtime.agentId;
1316
+ const agentId = passedAgentId || this.runtime.agentId;
1315
1317
  try {
1316
1318
  logger3.debug(
1317
- `KnowledgeService: Processing document ${originalFilename} (type: ${contentType}) via processDocument`
1319
+ `KnowledgeService: Processing document ${originalFilename} (type: ${contentType}) via processDocument for agent: ${agentId}`
1318
1320
  );
1319
1321
  let fileBuffer = null;
1320
1322
  let extractedText;
@@ -1343,9 +1345,7 @@ var KnowledgeService = class _KnowledgeService extends Service {
1343
1345
  extractedText = await extractTextFromDocument(fileBuffer, contentType, originalFilename);
1344
1346
  documentContentToStore = extractedText;
1345
1347
  } else {
1346
- const base64Regex = /^[A-Za-z0-9+/]+=*$/;
1347
- const looksLikeBase64 = content && content.length > 0 && base64Regex.test(content.replace(/\s/g, ""));
1348
- if (looksLikeBase64) {
1348
+ if (looksLikeBase64(content)) {
1349
1349
  try {
1350
1350
  const decodedBuffer = Buffer.from(content, "base64");
1351
1351
  const decodedText = decodedBuffer.toString("utf8");
@@ -1678,8 +1678,9 @@ var KnowledgeService = class _KnowledgeService extends Service {
1678
1678
  */
1679
1679
  async getMemories(params) {
1680
1680
  return this.runtime.getMemories({
1681
- ...params
1681
+ ...params,
1682
1682
  // includes tableName, roomId, count, end
1683
+ agentId: this.runtime.agentId
1683
1684
  });
1684
1685
  }
1685
1686
  /**
@@ -2633,7 +2634,7 @@ var KnowledgeTestSuite = class {
2633
2634
  var tests_default = new KnowledgeTestSuite();
2634
2635
 
2635
2636
  // src/actions.ts
2636
- import { logger as logger4, createUniqueUuid as createUniqueUuid2 } from "@elizaos/core";
2637
+ import { logger as logger4, stringToUuid } from "@elizaos/core";
2637
2638
  import * as fs2 from "fs";
2638
2639
  import * as path2 from "path";
2639
2640
  var processKnowledgeAction = {
@@ -2734,7 +2735,7 @@ var processKnowledgeAction = {
2734
2735
  else if ([".txt", ".md", ".tson", ".xml", ".csv"].includes(fileExt))
2735
2736
  contentType = "text/plain";
2736
2737
  const knowledgeOptions = {
2737
- clientDocumentId: createUniqueUuid2(runtime.agentId + fileName + Date.now(), fileName),
2738
+ clientDocumentId: stringToUuid(runtime.agentId + fileName + Date.now()),
2738
2739
  contentType,
2739
2740
  originalFilename: fileName,
2740
2741
  worldId: runtime.agentId,
@@ -2761,7 +2762,7 @@ var processKnowledgeAction = {
2761
2762
  return;
2762
2763
  }
2763
2764
  const knowledgeOptions = {
2764
- clientDocumentId: createUniqueUuid2(runtime.agentId + "text" + Date.now(), "user-knowledge"),
2765
+ clientDocumentId: stringToUuid(runtime.agentId + "text" + Date.now() + "user-knowledge"),
2765
2766
  contentType: "text/plain",
2766
2767
  originalFilename: "user-knowledge.txt",
2767
2768
  worldId: runtime.agentId,
@@ -2902,7 +2903,7 @@ ${formattedResults}`
2902
2903
  var knowledgeActions = [processKnowledgeAction, searchKnowledgeAction];
2903
2904
 
2904
2905
  // src/routes.ts
2905
- import { MemoryType as MemoryType4, createUniqueUuid as createUniqueUuid3, logger as logger5 } from "@elizaos/core";
2906
+ import { createUniqueUuid as createUniqueUuid2, logger as logger5, ModelType as ModelType4 } from "@elizaos/core";
2906
2907
  import fs3 from "fs";
2907
2908
  import path3 from "path";
2908
2909
  import multer from "multer";
@@ -2978,37 +2979,32 @@ async function uploadKnowledgeHandler(req, res, runtime) {
2978
2979
  if (!files || files.length === 0) {
2979
2980
  return sendError(res, 400, "NO_FILES", "No files uploaded");
2980
2981
  }
2982
+ const agentId = req.body.agentId || req.query.agentId;
2983
+ if (!agentId) {
2984
+ logger5.error("[KNOWLEDGE UPLOAD HANDLER] No agent ID provided in request");
2985
+ return sendError(
2986
+ res,
2987
+ 400,
2988
+ "MISSING_AGENT_ID",
2989
+ "Agent ID is required for uploading knowledge"
2990
+ );
2991
+ }
2992
+ const worldId = req.body.worldId || agentId;
2993
+ logger5.info(`[KNOWLEDGE UPLOAD HANDLER] Processing upload for agent: ${agentId}`);
2981
2994
  const processingPromises = files.map(async (file, index) => {
2982
2995
  let knowledgeId;
2983
2996
  const originalFilename = file.originalname;
2984
- const agentId = req.body.agentId || req.query.agentId || runtime.agentId;
2985
- const worldId = req.body.worldId || agentId;
2986
2997
  const filePath = file.path;
2987
- knowledgeId = req.body?.documentIds && req.body.documentIds[index] || req.body?.documentId || createUniqueUuid3(runtime, `knowledge-${originalFilename}-${Date.now()}`);
2998
+ knowledgeId = req.body?.documentIds && req.body.documentIds[index] || req.body?.documentId || createUniqueUuid2(runtime, `knowledge-${originalFilename}-${Date.now()}`);
2999
+ logger5.debug(
3000
+ `[KNOWLEDGE UPLOAD HANDLER] File: ${originalFilename}, Agent ID: ${agentId}, World ID: ${worldId}, Knowledge ID: ${knowledgeId}`
3001
+ );
2988
3002
  try {
2989
3003
  const fileBuffer = await fs3.promises.readFile(filePath);
2990
- const fileExt = file.originalname.split(".").pop()?.toLowerCase() || "";
2991
- const filename = file.originalname;
2992
- const title = filename.replace(`.${fileExt}`, "");
2993
3004
  const base64Content = fileBuffer.toString("base64");
2994
- const knowledgeItem = {
2995
- id: knowledgeId,
2996
- content: {
2997
- text: base64Content
2998
- },
2999
- metadata: {
3000
- type: MemoryType4.DOCUMENT,
3001
- timestamp: Date.now(),
3002
- source: "upload",
3003
- filename,
3004
- fileExt,
3005
- title,
3006
- path: originalFilename,
3007
- fileType: file.mimetype,
3008
- fileSize: file.size
3009
- }
3010
- };
3011
3005
  const addKnowledgeOpts = {
3006
+ agentId,
3007
+ // Pass the agent ID from frontend
3012
3008
  clientDocumentId: knowledgeId,
3013
3009
  // This is knowledgeItem.id
3014
3010
  contentType: file.mimetype,
@@ -3053,11 +3049,21 @@ async function uploadKnowledgeHandler(req, res, runtime) {
3053
3049
  if (fileUrls.length === 0) {
3054
3050
  return sendError(res, 400, "MISSING_URL", "File URL is required");
3055
3051
  }
3056
- const agentId = req.body.agentId || req.query.agentId || runtime.agentId;
3052
+ const agentId = req.body.agentId || req.query.agentId;
3053
+ if (!agentId) {
3054
+ logger5.error("[KNOWLEDGE URL HANDLER] No agent ID provided in request");
3055
+ return sendError(
3056
+ res,
3057
+ 400,
3058
+ "MISSING_AGENT_ID",
3059
+ "Agent ID is required for uploading knowledge from URLs"
3060
+ );
3061
+ }
3062
+ logger5.info(`[KNOWLEDGE URL HANDLER] Processing URL upload for agent: ${agentId}`);
3057
3063
  const processingPromises = fileUrls.map(async (fileUrl) => {
3058
3064
  try {
3059
3065
  const normalizedUrl = normalizeS3Url(fileUrl);
3060
- const knowledgeId = createUniqueUuid3(runtime, normalizedUrl);
3066
+ const knowledgeId = createUniqueUuid2(runtime, normalizedUrl);
3061
3067
  const urlObject = new URL(fileUrl);
3062
3068
  const pathSegments = urlObject.pathname.split("/");
3063
3069
  const encodedFilename = pathSegments[pathSegments.length - 1] || "document.pdf";
@@ -3086,6 +3092,8 @@ async function uploadKnowledgeHandler(req, res, runtime) {
3086
3092
  }
3087
3093
  }
3088
3094
  const addKnowledgeOpts = {
3095
+ agentId,
3096
+ // Pass the agent ID from frontend
3089
3097
  clientDocumentId: knowledgeId,
3090
3098
  contentType,
3091
3099
  originalFilename,
@@ -3157,7 +3165,7 @@ async function getKnowledgeDocumentsHandler(req, res, runtime) {
3157
3165
  if (fileUrls && fileUrls.length > 0) {
3158
3166
  const normalizedRequestUrls = fileUrls.map((url) => normalizeS3Url(url));
3159
3167
  const urlBasedIds = normalizedRequestUrls.map(
3160
- (url) => createUniqueUuid3(runtime, url)
3168
+ (url) => createUniqueUuid2(runtime, url)
3161
3169
  );
3162
3170
  filteredMemories = memories.filter(
3163
3171
  (memory) => urlBasedIds.includes(memory.id) || // If the ID corresponds directly
@@ -3404,6 +3412,87 @@ async function getKnowledgeChunksHandler(req, res, runtime) {
3404
3412
  sendError(res, 500, "RETRIEVAL_ERROR", "Failed to retrieve knowledge chunks", error.message);
3405
3413
  }
3406
3414
  }
3415
+ async function searchKnowledgeHandler(req, res, runtime) {
3416
+ const service = runtime.getService(KnowledgeService.serviceType);
3417
+ if (!service) {
3418
+ return sendError(res, 500, "SERVICE_NOT_FOUND", "KnowledgeService not found");
3419
+ }
3420
+ try {
3421
+ const searchText = req.query.q;
3422
+ const parsedThreshold = req.query.threshold ? Number.parseFloat(req.query.threshold) : NaN;
3423
+ let matchThreshold = Number.isNaN(parsedThreshold) ? 0.5 : parsedThreshold;
3424
+ matchThreshold = Math.max(0, Math.min(1, matchThreshold));
3425
+ const parsedLimit = req.query.limit ? Number.parseInt(req.query.limit, 10) : NaN;
3426
+ let limit = Number.isNaN(parsedLimit) ? 20 : parsedLimit;
3427
+ limit = Math.max(1, Math.min(100, limit));
3428
+ const agentId = req.query.agentId || runtime.agentId;
3429
+ if (!searchText || searchText.trim().length === 0) {
3430
+ return sendError(res, 400, "INVALID_QUERY", "Search query cannot be empty");
3431
+ }
3432
+ if (req.query.threshold && (parsedThreshold < 0 || parsedThreshold > 1)) {
3433
+ logger5.debug(
3434
+ `[KNOWLEDGE SEARCH] Threshold value ${parsedThreshold} was clamped to ${matchThreshold}`
3435
+ );
3436
+ }
3437
+ if (req.query.limit && (parsedLimit < 1 || parsedLimit > 100)) {
3438
+ logger5.debug(`[KNOWLEDGE SEARCH] Limit value ${parsedLimit} was clamped to ${limit}`);
3439
+ }
3440
+ logger5.debug(
3441
+ `[KNOWLEDGE SEARCH] Searching for: "${searchText}" with threshold: ${matchThreshold}, limit: ${limit}`
3442
+ );
3443
+ const embedding = await runtime.useModel(ModelType4.TEXT_EMBEDDING, {
3444
+ text: searchText
3445
+ });
3446
+ const results = await runtime.searchMemories({
3447
+ tableName: "knowledge",
3448
+ embedding,
3449
+ query: searchText,
3450
+ count: limit,
3451
+ match_threshold: matchThreshold,
3452
+ roomId: agentId
3453
+ });
3454
+ const enhancedResults = await Promise.all(
3455
+ results.map(async (fragment) => {
3456
+ let documentTitle = "Unknown Document";
3457
+ let documentFilename = "unknown";
3458
+ if (fragment.metadata && typeof fragment.metadata === "object" && "documentId" in fragment.metadata) {
3459
+ const documentId = fragment.metadata.documentId;
3460
+ try {
3461
+ const document = await runtime.getMemoryById(documentId);
3462
+ if (document && document.metadata) {
3463
+ documentTitle = document.metadata.title || document.metadata.filename || documentTitle;
3464
+ documentFilename = document.metadata.filename || documentFilename;
3465
+ }
3466
+ } catch (e) {
3467
+ logger5.debug(`Could not fetch document ${documentId} for fragment`);
3468
+ }
3469
+ }
3470
+ return {
3471
+ id: fragment.id,
3472
+ content: fragment.content,
3473
+ similarity: fragment.similarity || 0,
3474
+ metadata: {
3475
+ ...fragment.metadata || {},
3476
+ documentTitle,
3477
+ documentFilename
3478
+ }
3479
+ };
3480
+ })
3481
+ );
3482
+ logger5.info(
3483
+ `[KNOWLEDGE SEARCH] Found ${enhancedResults.length} results for query: "${searchText}"`
3484
+ );
3485
+ sendSuccess(res, {
3486
+ query: searchText,
3487
+ threshold: matchThreshold,
3488
+ results: enhancedResults,
3489
+ count: enhancedResults.length
3490
+ });
3491
+ } catch (error) {
3492
+ logger5.error("[KNOWLEDGE SEARCH] Error searching knowledge:", error);
3493
+ sendError(res, 500, "SEARCH_ERROR", "Failed to search knowledge", error.message);
3494
+ }
3495
+ }
3407
3496
  async function uploadKnowledgeWithMulter(req, res, runtime) {
3408
3497
  const upload = createUploadMiddleware(runtime);
3409
3498
  const uploadArray = upload.array(
@@ -3455,6 +3544,11 @@ var knowledgeRoutes = [
3455
3544
  type: "GET",
3456
3545
  path: "/knowledges",
3457
3546
  handler: getKnowledgeChunksHandler
3547
+ },
3548
+ {
3549
+ type: "GET",
3550
+ path: "/search",
3551
+ handler: searchKnowledgeHandler
3458
3552
  }
3459
3553
  ];
3460
3554
 
@@ -3503,7 +3597,7 @@ var knowledgePlugin = {
3503
3597
  try {
3504
3598
  const service = runtime.getService(KnowledgeService.serviceType);
3505
3599
  if (service instanceof KnowledgeService) {
3506
- const { loadDocsFromPath: loadDocsFromPath2 } = await import("./docs-loader-AEQHIBO4.js");
3600
+ const { loadDocsFromPath: loadDocsFromPath2 } = await import("./docs-loader-IBTEOAYT.js");
3507
3601
  const result = await loadDocsFromPath2(service, runtime.agentId);
3508
3602
  if (result.successful > 0) {
3509
3603
  logger6.info(`Loaded ${result.successful} documents from docs folder on startup`);