@elizaos/plugin-knowledge 1.0.9 → 1.0.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +29 -47
- package/dist/.vite/manifest.json +2 -2
- package/dist/assets/index-CzI8hR5q.css +1 -0
- package/dist/assets/index-DimDNB3w.js +160 -0
- package/dist/{chunk-QH7GBNKB.js → chunk-RFXW7QQK.js} +9 -3
- package/dist/chunk-RFXW7QQK.js.map +1 -0
- package/dist/{docs-loader-5INCF4VJ.js → docs-loader-5H4HRYEE.js} +2 -2
- package/dist/index.d.ts +0 -3
- package/dist/index.html +2 -2
- package/dist/index.js +576 -386
- package/dist/index.js.map +1 -1
- package/package.json +8 -6
- package/dist/assets/index-BIGrGyiB.css +0 -1
- package/dist/assets/index-BdW2hLiy.js +0 -165
- package/dist/chunk-QH7GBNKB.js.map +0 -1
- /package/dist/{docs-loader-5INCF4VJ.js.map → docs-loader-5H4HRYEE.js.map} +0 -0
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/docs-loader.ts","../src/utils.ts","../node_modules/uuid/dist/esm/regex.js","../node_modules/uuid/dist/esm/validate.js","../node_modules/uuid/dist/esm/parse.js","../node_modules/uuid/dist/esm/stringify.js","../node_modules/uuid/dist/esm/rng.js","../node_modules/uuid/dist/esm/v35.js","../node_modules/uuid/dist/esm/native.js","../node_modules/uuid/dist/esm/v4.js","../node_modules/uuid/dist/esm/sha1.js","../node_modules/uuid/dist/esm/v5.js"],"sourcesContent":["import { logger, UUID } from '@elizaos/core';\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport { KnowledgeService } from './service.ts';\nimport { AddKnowledgeOptions } from './types.ts';\nimport { isBinaryContentType } from './utils.ts';\n\n/**\n * Get the knowledge path from environment or default to ./docs\n */\nexport function getKnowledgePath(): string {\n const envPath = process.env.KNOWLEDGE_PATH;\n\n if (envPath) {\n // Resolve relative paths from current working directory\n const resolvedPath = path.resolve(envPath);\n\n if (!fs.existsSync(resolvedPath)) {\n logger.warn(`Knowledge path from environment variable does not exist: ${resolvedPath}`);\n logger.warn('Please create the directory or update KNOWLEDGE_PATH environment variable');\n }\n\n return resolvedPath;\n }\n\n // Default to docs folder in current working directory\n const defaultPath = path.join(process.cwd(), 'docs');\n\n if (!fs.existsSync(defaultPath)) {\n logger.info(`Default docs folder does not exist at: ${defaultPath}`);\n logger.info('To use the knowledge plugin, either:');\n logger.info('1. Create a \"docs\" folder in your project root');\n logger.info('2. Set KNOWLEDGE_PATH environment variable to your documents folder');\n }\n\n return defaultPath;\n}\n\n/**\n * Load documents from the knowledge path\n */\nexport async function loadDocsFromPath(\n service: KnowledgeService,\n agentId: UUID,\n worldId?: UUID\n): Promise<{ total: number; successful: number; failed: number }> {\n const docsPath = getKnowledgePath();\n\n if (!fs.existsSync(docsPath)) {\n logger.warn(`Knowledge path does not exist: ${docsPath}`);\n return { total: 0, successful: 0, failed: 0 };\n }\n\n logger.info(`Loading documents from: ${docsPath}`);\n\n // Get all files recursively\n const files = getAllFiles(docsPath);\n\n if (files.length === 0) {\n logger.info('No files found in knowledge path');\n return { total: 0, successful: 0, failed: 0 };\n }\n\n logger.info(`Found ${files.length} files to process`);\n\n let successful = 0;\n let failed = 0;\n\n for (const filePath of files) {\n try {\n const fileName = path.basename(filePath);\n const fileExt = path.extname(filePath).toLowerCase();\n\n // Skip hidden files and directories\n if (fileName.startsWith('.')) {\n continue;\n }\n\n // Determine content type\n const contentType = getContentType(fileExt);\n\n // Skip unsupported file types\n if (!contentType) {\n logger.debug(`Skipping unsupported file type: ${filePath}`);\n continue;\n }\n\n // Read file\n const fileBuffer = fs.readFileSync(filePath);\n\n // Check if file is binary using the same logic as the service\n const isBinary = isBinaryContentType(contentType, fileName);\n\n // For text files, read as UTF-8 string directly\n // For binary files, convert to base64\n const content = isBinary ? fileBuffer.toString('base64') : fileBuffer.toString('utf-8');\n\n // Create knowledge options\n const knowledgeOptions: AddKnowledgeOptions = {\n clientDocumentId: '' as UUID, // Will be generated by the service based on content\n contentType,\n originalFilename: fileName,\n worldId: worldId || agentId,\n content,\n roomId: agentId,\n entityId: agentId,\n };\n\n // Process the document\n logger.debug(`Processing document: ${fileName}`);\n const result = await service.addKnowledge(knowledgeOptions);\n\n logger.info(`Successfully processed ${fileName}: ${result.fragmentCount} fragments created`);\n successful++;\n } catch (error) {\n logger.error(`Failed to process file ${filePath}:`, error);\n failed++;\n }\n }\n\n logger.info(\n `Document loading complete: ${successful} successful, ${failed} failed out of ${files.length} total`\n );\n\n return {\n total: files.length,\n successful,\n failed,\n };\n}\n\n/**\n * Recursively get all files in a directory\n */\nfunction getAllFiles(dirPath: string, files: string[] = []): string[] {\n try {\n const entries = fs.readdirSync(dirPath, { withFileTypes: true });\n\n for (const entry of entries) {\n const fullPath = path.join(dirPath, entry.name);\n\n if (entry.isDirectory()) {\n // Skip node_modules and other common directories\n if (!['node_modules', '.git', '.vscode', 'dist', 'build'].includes(entry.name)) {\n getAllFiles(fullPath, files);\n }\n } else if (entry.isFile()) {\n files.push(fullPath);\n }\n }\n } catch (error) {\n logger.error(`Error reading directory ${dirPath}:`, error);\n }\n\n return files;\n}\n\n/**\n * Get content type based on file extension\n */\nfunction getContentType(extension: string): string | null {\n const contentTypes: Record<string, string> = {\n // Text documents\n '.txt': 'text/plain',\n '.md': 'text/markdown',\n '.markdown': 'text/markdown',\n '.tson': 'text/plain',\n '.xml': 'application/xml',\n '.csv': 'text/csv',\n '.tsv': 'text/tab-separated-values',\n '.log': 'text/plain',\n\n // Web files\n '.html': 'text/html',\n '.htm': 'text/html',\n '.css': 'text/css',\n '.scss': 'text/x-scss',\n '.sass': 'text/x-sass',\n '.less': 'text/x-less',\n\n // JavaScript/TypeScript\n '.js': 'text/javascript',\n '.jsx': 'text/javascript',\n '.ts': 'text/typescript',\n '.tsx': 'text/typescript',\n '.mjs': 'text/javascript',\n '.cjs': 'text/javascript',\n '.vue': 'text/x-vue',\n '.svelte': 'text/x-svelte',\n '.astro': 'text/x-astro',\n\n // Python\n '.py': 'text/x-python',\n '.pyw': 'text/x-python',\n '.pyi': 'text/x-python',\n\n // Java/Kotlin/Scala\n '.java': 'text/x-java',\n '.kt': 'text/x-kotlin',\n '.kts': 'text/x-kotlin',\n '.scala': 'text/x-scala',\n\n // C/C++/C#\n '.c': 'text/x-c',\n '.cpp': 'text/x-c++',\n '.cc': 'text/x-c++',\n '.cxx': 'text/x-c++',\n '.h': 'text/x-c',\n '.hpp': 'text/x-c++',\n '.cs': 'text/x-csharp',\n\n // Other languages\n '.php': 'text/x-php',\n '.rb': 'text/x-ruby',\n '.go': 'text/x-go',\n '.rs': 'text/x-rust',\n '.swift': 'text/x-swift',\n '.r': 'text/x-r',\n '.R': 'text/x-r',\n '.m': 'text/x-objectivec',\n '.mm': 'text/x-objectivec',\n '.clj': 'text/x-clojure',\n '.cljs': 'text/x-clojure',\n '.ex': 'text/x-elixir',\n '.exs': 'text/x-elixir',\n '.lua': 'text/x-lua',\n '.pl': 'text/x-perl',\n '.pm': 'text/x-perl',\n '.dart': 'text/x-dart',\n '.hs': 'text/x-haskell',\n '.elm': 'text/x-elm',\n '.ml': 'text/x-ocaml',\n '.fs': 'text/x-fsharp',\n '.fsx': 'text/x-fsharp',\n '.vb': 'text/x-vb',\n '.pas': 'text/x-pascal',\n '.d': 'text/x-d',\n '.nim': 'text/x-nim',\n '.zig': 'text/x-zig',\n '.jl': 'text/x-julia',\n '.tcl': 'text/x-tcl',\n '.awk': 'text/x-awk',\n '.sed': 'text/x-sed',\n\n // Shell scripts\n '.sh': 'text/x-sh',\n '.bash': 'text/x-sh',\n '.zsh': 'text/x-sh',\n '.fish': 'text/x-fish',\n '.ps1': 'text/x-powershell',\n '.bat': 'text/x-batch',\n '.cmd': 'text/x-batch',\n\n // Config files\n '.json': 'application/json',\n '.yaml': 'text/x-yaml',\n '.yml': 'text/x-yaml',\n '.toml': 'text/x-toml',\n '.ini': 'text/x-ini',\n '.cfg': 'text/x-ini',\n '.conf': 'text/x-ini',\n '.env': 'text/plain',\n '.gitignore': 'text/plain',\n '.dockerignore': 'text/plain',\n '.editorconfig': 'text/plain',\n '.properties': 'text/x-properties',\n\n // Database\n '.sql': 'text/x-sql',\n\n // Binary documents\n '.pdf': 'application/pdf',\n '.doc': 'application/msword',\n '.docx': 'application/vnd.openxmlformats-officedocument.wordprocessingml.document',\n };\n\n return contentTypes[extension] || null;\n}\n","import { Buffer } from 'node:buffer';\nimport * as mammoth from 'mammoth';\nimport { logger } from '@elizaos/core';\nimport { getDocument, PDFDocumentProxy } from 'pdfjs-dist/legacy/build/pdf.mjs';\nimport type { TextItem, TextMarkedContent } from 'pdfjs-dist/types/src/display/api';\nimport { createHash } from 'crypto';\nimport { v5 as uuidv5 } from 'uuid';\n\nconst PLAIN_TEXT_CONTENT_TYPES = [\n 'application/typescript',\n 'text/typescript',\n 'text/x-python',\n 'application/x-python-code',\n 'application/yaml',\n 'text/yaml',\n 'application/x-yaml',\n 'application/json',\n 'text/markdown',\n 'text/csv',\n];\n\nconst MAX_FALLBACK_SIZE_BYTES = 5 * 1024 * 1024; // 5 MB\nconst BINARY_CHECK_BYTES = 1024; // Check first 1KB for binary indicators\n\n/**\n * Extracts text content from a file buffer based on its content type.\n * Supports DOCX, plain text, and provides a fallback for unknown types.\n * PDF should be handled by `convertPdfToTextFromBuffer`.\n */\nexport async function extractTextFromFileBuffer(\n fileBuffer: Buffer,\n contentType: string,\n originalFilename: string // For logging and context\n): Promise<string> {\n const lowerContentType = contentType.toLowerCase();\n logger.debug(\n `[TextUtil] Attempting to extract text from ${originalFilename} (type: ${contentType})`\n );\n\n if (\n lowerContentType === 'application/vnd.openxmlformats-officedocument.wordprocessingml.document'\n ) {\n logger.debug(`[TextUtil] Extracting text from DOCX ${originalFilename} via mammoth.`);\n try {\n const result = await mammoth.extractRawText({ buffer: fileBuffer });\n logger.debug(\n `[TextUtil] DOCX text extraction complete for ${originalFilename}. Text length: ${result.value.length}`\n );\n return result.value;\n } catch (docxError: any) {\n const errorMsg = `[TextUtil] Failed to parse DOCX file ${originalFilename}: ${docxError.message}`;\n logger.error(errorMsg, docxError.stack);\n throw new Error(errorMsg);\n }\n } else if (\n lowerContentType === 'application/msword' ||\n originalFilename.toLowerCase().endsWith('.doc')\n ) {\n // For .doc files, we'll store the content as-is, and just add a message\n // The frontend will handle the display appropriately\n logger.debug(`[TextUtil] Handling Microsoft Word .doc file: ${originalFilename}`);\n\n // We'll add a descriptive message as a placeholder\n return `[Microsoft Word Document: ${originalFilename}]\\n\\nThis document was indexed for search but cannot be displayed directly in the browser. The original document content is preserved for retrieval purposes.`;\n } else if (\n lowerContentType.startsWith('text/') ||\n PLAIN_TEXT_CONTENT_TYPES.includes(lowerContentType)\n ) {\n logger.debug(\n `[TextUtil] Extracting text from plain text compatible file ${originalFilename} (type: ${contentType})`\n );\n return fileBuffer.toString('utf-8');\n } else {\n logger.warn(\n `[TextUtil] Unsupported content type: \"${contentType}\" for ${originalFilename}. Attempting fallback to plain text.`\n );\n\n if (fileBuffer.length > MAX_FALLBACK_SIZE_BYTES) {\n const sizeErrorMsg = `[TextUtil] File ${originalFilename} (type: ${contentType}) exceeds maximum size for fallback (${MAX_FALLBACK_SIZE_BYTES} bytes). Cannot process as plain text.`;\n logger.error(sizeErrorMsg);\n throw new Error(sizeErrorMsg);\n }\n\n // Simple binary detection: check for null bytes in the first N bytes\n const initialBytes = fileBuffer.subarray(0, Math.min(fileBuffer.length, BINARY_CHECK_BYTES));\n if (initialBytes.includes(0)) {\n // Check for NUL byte\n const binaryHeuristicMsg = `[TextUtil] File ${originalFilename} (type: ${contentType}) appears to be binary based on initial byte check. Cannot process as plain text.`;\n logger.error(binaryHeuristicMsg);\n throw new Error(binaryHeuristicMsg);\n }\n\n try {\n const textContent = fileBuffer.toString('utf-8');\n if (textContent.includes('\\ufffd')) {\n // Replacement character, indicating potential binary or wrong encoding\n const binaryErrorMsg = `[TextUtil] File ${originalFilename} (type: ${contentType}) seems to be binary or has encoding issues after fallback to plain text (detected \\ufffd).`;\n logger.error(binaryErrorMsg);\n throw new Error(binaryErrorMsg); // Throw error for likely binary content\n }\n logger.debug(\n `[TextUtil] Successfully processed unknown type ${contentType} as plain text after fallback for ${originalFilename}.`\n );\n return textContent;\n } catch (fallbackError: any) {\n // If the initial toString failed or if we threw due to \\ufffd\n const finalErrorMsg = `[TextUtil] Unsupported content type: ${contentType} for ${originalFilename}. Fallback to plain text also failed or indicated binary content.`;\n logger.error(finalErrorMsg, fallbackError.message ? fallbackError.stack : undefined);\n throw new Error(finalErrorMsg);\n }\n }\n}\n\n/**\n * Converts a PDF file buffer to text content.\n * Requires pdfjs-dist to be properly configured, especially its worker.\n */\n/**\n * Converts a PDF Buffer to text with enhanced formatting preservation.\n *\n * @param {Buffer} pdfBuffer - The PDF Buffer to convert to text\n * @param {string} [filename] - Optional filename for logging purposes\n * @returns {Promise<string>} Text content of the PDF\n */\nexport async function convertPdfToTextFromBuffer(\n pdfBuffer: Buffer,\n filename?: string\n): Promise<string> {\n const docName = filename || 'unnamed-document';\n logger.debug(`[PdfService] Starting conversion for ${docName}`);\n\n try {\n const uint8Array = new Uint8Array(pdfBuffer);\n const pdf: PDFDocumentProxy = await getDocument({ data: uint8Array }).promise;\n const numPages = pdf.numPages;\n const textPages: string[] = [];\n\n for (let pageNum = 1; pageNum <= numPages; pageNum++) {\n logger.debug(`[PdfService] Processing page ${pageNum}/${numPages}`);\n const page = await pdf.getPage(pageNum);\n const textContent = await page.getTextContent();\n\n // Group text items by their y-position to maintain line structure\n const lineMap = new Map<number, TextItem[]>();\n\n textContent.items.filter(isTextItem).forEach((item) => {\n // Round y-position to account for small variations in the same line\n const yPos = Math.round(item.transform[5]);\n if (!lineMap.has(yPos)) {\n lineMap.set(yPos, []);\n }\n lineMap.get(yPos)!.push(item);\n });\n\n // Sort lines by y-position (top to bottom) and items within lines by x-position (left to right)\n const sortedLines = Array.from(lineMap.entries())\n .sort((a, b) => b[0] - a[0]) // Reverse sort for top-to-bottom\n .map(([_, items]) =>\n items\n .sort((a, b) => a.transform[4] - b.transform[4])\n .map((item) => item.str)\n .join(' ')\n );\n\n textPages.push(sortedLines.join('\\n'));\n }\n\n const fullText = textPages.join('\\n\\n').replace(/\\s+/g, ' ').trim();\n logger.debug(`[PdfService] Conversion complete for ${docName}, length: ${fullText.length}`);\n return fullText;\n } catch (error: any) {\n logger.error(`[PdfService] Error converting PDF ${docName}:`, error.message);\n throw new Error(`Failed to convert PDF to text: ${error.message}`);\n }\n}\n\n/**\n * Determines if a file should be treated as binary based on its content type and filename\n * @param contentType MIME type of the file\n * @param filename Original filename\n * @returns True if the file should be treated as binary (base64 encoded)\n */\nexport function isBinaryContentType(contentType: string, filename: string): boolean {\n // Text-based content types that should NOT be treated as binary\n const textContentTypes = [\n 'text/',\n 'application/json',\n 'application/xml',\n 'application/javascript',\n 'application/typescript',\n 'application/x-yaml',\n 'application/x-sh',\n ];\n\n // Check if it's a text-based MIME type\n const isTextMimeType = textContentTypes.some((type) => contentType.includes(type));\n if (isTextMimeType) {\n return false;\n }\n\n // Binary content types\n const binaryContentTypes = [\n 'application/pdf',\n 'application/msword',\n 'application/vnd.openxmlformats-officedocument',\n 'application/vnd.ms-excel',\n 'application/vnd.ms-powerpoint',\n 'application/zip',\n 'application/x-zip-compressed',\n 'application/octet-stream',\n 'image/',\n 'audio/',\n 'video/',\n ];\n\n // Check MIME type\n const isBinaryMimeType = binaryContentTypes.some((type) => contentType.includes(type));\n\n if (isBinaryMimeType) {\n return true;\n }\n\n // Check file extension as fallback\n const fileExt = filename.split('.').pop()?.toLowerCase() || '';\n\n // Text file extensions that should NOT be treated as binary\n const textExtensions = [\n 'txt',\n 'md',\n 'markdown',\n 'json',\n 'xml',\n 'html',\n 'htm',\n 'css',\n 'js',\n 'ts',\n 'jsx',\n 'tsx',\n 'yaml',\n 'yml',\n 'toml',\n 'ini',\n 'cfg',\n 'conf',\n 'sh',\n 'bash',\n 'zsh',\n 'fish',\n 'py',\n 'rb',\n 'go',\n 'rs',\n 'java',\n 'c',\n 'cpp',\n 'h',\n 'hpp',\n 'cs',\n 'php',\n 'sql',\n 'r',\n 'swift',\n 'kt',\n 'scala',\n 'clj',\n 'ex',\n 'exs',\n 'vim',\n 'env',\n 'gitignore',\n 'dockerignore',\n 'editorconfig',\n 'log',\n 'csv',\n 'tsv',\n 'properties',\n 'gradle',\n 'sbt',\n 'makefile',\n 'dockerfile',\n 'vagrantfile',\n 'gemfile',\n 'rakefile',\n 'podfile',\n 'csproj',\n 'vbproj',\n 'fsproj',\n 'sln',\n 'pom',\n ];\n\n // If it's a known text extension, it's not binary\n if (textExtensions.includes(fileExt)) {\n return false;\n }\n\n // Binary file extensions\n const binaryExtensions = [\n 'pdf',\n 'docx',\n 'doc',\n 'xls',\n 'xlsx',\n 'ppt',\n 'pptx',\n 'zip',\n 'rar',\n '7z',\n 'tar',\n 'gz',\n 'bz2',\n 'xz',\n 'jpg',\n 'jpeg',\n 'png',\n 'gif',\n 'bmp',\n 'svg',\n 'ico',\n 'webp',\n 'mp3',\n 'mp4',\n 'avi',\n 'mov',\n 'wmv',\n 'flv',\n 'wav',\n 'flac',\n 'ogg',\n 'exe',\n 'dll',\n 'so',\n 'dylib',\n 'bin',\n 'dat',\n 'db',\n 'sqlite',\n ];\n\n return binaryExtensions.includes(fileExt);\n}\n\n/**\n * Check if the input is a TextItem.\n *\n * @param item - The input item to check.\n * @returns A boolean indicating if the input is a TextItem.\n */\nfunction isTextItem(item: TextItem | TextMarkedContent): item is TextItem {\n return 'str' in item;\n}\n\n/**\n * Normalizes an S3 URL by removing query parameters (signature, etc.)\n * This allows for consistent URL comparison regardless of presigned URL parameters\n * @param url The S3 URL to normalize\n * @returns The normalized URL containing only the origin and pathname\n */\nexport function normalizeS3Url(url: string): string {\n try {\n const urlObj = new URL(url);\n return `${urlObj.origin}${urlObj.pathname}`;\n } catch (error) {\n logger.warn(`[URL NORMALIZER] Failed to parse URL: ${url}. Returning original.`);\n return url;\n }\n}\n\n/**\n * Fetches content from a URL and converts it to base64 format\n * @param url The URL to fetch content from\n * @returns An object containing the base64 content and content type\n */\nexport async function fetchUrlContent(\n url: string\n): Promise<{ content: string; contentType: string }> {\n logger.debug(`[URL FETCHER] Fetching content from URL: ${url}`);\n\n try {\n // Fetch the URL with timeout\n const controller = new AbortController();\n const timeoutId = setTimeout(() => controller.abort(), 30000); // 30 second timeout\n\n const response = await fetch(url, {\n signal: controller.signal,\n headers: {\n 'User-Agent': 'Eliza-Knowledge-Plugin/1.0',\n },\n });\n clearTimeout(timeoutId);\n\n if (!response.ok) {\n throw new Error(`Failed to fetch URL: ${response.status} ${response.statusText}`);\n }\n\n // Get content type from response headers\n const contentType = response.headers.get('content-type') || 'application/octet-stream';\n logger.debug(`[URL FETCHER] Content type from server: ${contentType} for URL: ${url}`);\n\n // Get content as ArrayBuffer\n const arrayBuffer = await response.arrayBuffer();\n const buffer = Buffer.from(arrayBuffer);\n\n // Convert to base64\n const base64Content = buffer.toString('base64');\n\n logger.debug(\n `[URL FETCHER] Successfully fetched content from URL: ${url} (${buffer.length} bytes)`\n );\n return {\n content: base64Content,\n contentType,\n };\n } catch (error: any) {\n logger.error(`[URL FETCHER] Error fetching content from URL ${url}: ${error.message}`);\n throw new Error(`Failed to fetch content from URL: ${error.message}`);\n }\n}\n\nexport function looksLikeBase64(content?: string | null): boolean {\n // const base64Regex = /^[A-Za-z0-9+/]+=*$/; // This is the old regex\n // https://stackoverflow.com/questions/475074/regex-to-parse-or-validate-base64-data\n const base64Regex = /^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$/;\n return (content && content.length > 0 && base64Regex.test(content.replace(/\\s/g, ''))) || false;\n}\n\n/**\n * Generates a consistent UUID for a document based on its content.\n * Takes the first N characters/lines of the document and creates a hash-based UUID.\n * This ensures the same document always gets the same ID, preventing duplicates.\n *\n * @param content The document content (text or base64)\n * @param agentId The agent ID to namespace the document\n * @param options Optional configuration for ID generation\n * @returns A deterministic UUID based on the content\n */\nexport function generateContentBasedId(\n content: string,\n agentId: string,\n options?: {\n maxChars?: number;\n includeFilename?: string;\n contentType?: string;\n }\n): string {\n const {\n maxChars = 2000, // Use first 2000 chars by default\n includeFilename,\n contentType,\n } = options || {};\n\n // For consistent hashing, we need to normalize the content\n let contentForHashing: string;\n\n // If it's base64, decode it first to get actual content\n if (looksLikeBase64(content)) {\n try {\n const decoded = Buffer.from(content, 'base64').toString('utf8');\n // Check if decoded content is readable text\n if (!decoded.includes('\\ufffd') || contentType?.includes('pdf')) {\n // For PDFs and other binary files, use a portion of the base64 itself\n contentForHashing = content.slice(0, maxChars);\n } else {\n // For text files that were base64 encoded, use the decoded text\n contentForHashing = decoded.slice(0, maxChars);\n }\n } catch {\n // If decoding fails, use the base64 string itself\n contentForHashing = content.slice(0, maxChars);\n }\n } else {\n // Plain text content\n contentForHashing = content.slice(0, maxChars);\n }\n\n // Normalize whitespace and line endings for consistency\n contentForHashing = contentForHashing\n .replace(/\\r\\n/g, '\\n') // Normalize line endings\n .replace(/\\r/g, '\\n')\n .trim();\n\n // Create a deterministic string that includes all relevant factors\n const componentsToHash = [\n agentId, // Namespace by agent\n contentForHashing, // The actual content\n includeFilename || '', // Optional filename for additional uniqueness\n ]\n .filter(Boolean)\n .join('::');\n\n // Create SHA-256 hash\n const hash = createHash('sha256').update(componentsToHash).digest('hex');\n\n // Use a namespace UUID for documents (you can define this as a constant)\n const DOCUMENT_NAMESPACE = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; // Standard namespace UUID\n\n // Generate UUID v5 from the hash (deterministic)\n const uuid = uuidv5(hash, DOCUMENT_NAMESPACE);\n\n logger.debug(\n `[generateContentBasedId] Generated UUID ${uuid} for document with content hash ${hash.slice(0, 8)}...`\n );\n\n return uuid;\n}\n\n/**\n * Extracts the first N lines from text content for ID generation\n * @param content The full text content\n * @param maxLines Maximum number of lines to extract\n * @returns The extracted lines as a single string\n */\nexport function extractFirstLines(content: string, maxLines: number = 10): string {\n const lines = content.split(/\\r?\\n/);\n return lines.slice(0, maxLines).join('\\n');\n}\n","export default /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-8][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000|ffffffff-ffff-ffff-ffff-ffffffffffff)$/i;\n","import REGEX from './regex.js';\nfunction validate(uuid) {\n return typeof uuid === 'string' && REGEX.test(uuid);\n}\nexport default validate;\n","import validate from './validate.js';\nfunction parse(uuid) {\n if (!validate(uuid)) {\n throw TypeError('Invalid UUID');\n }\n let v;\n return Uint8Array.of((v = parseInt(uuid.slice(0, 8), 16)) >>> 24, (v >>> 16) & 0xff, (v >>> 8) & 0xff, v & 0xff, (v = parseInt(uuid.slice(9, 13), 16)) >>> 8, v & 0xff, (v = parseInt(uuid.slice(14, 18), 16)) >>> 8, v & 0xff, (v = parseInt(uuid.slice(19, 23), 16)) >>> 8, v & 0xff, ((v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000) & 0xff, (v / 0x100000000) & 0xff, (v >>> 24) & 0xff, (v >>> 16) & 0xff, (v >>> 8) & 0xff, v & 0xff);\n}\nexport default parse;\n","import validate from './validate.js';\nconst byteToHex = [];\nfor (let i = 0; i < 256; ++i) {\n byteToHex.push((i + 0x100).toString(16).slice(1));\n}\nexport function unsafeStringify(arr, offset = 0) {\n return (byteToHex[arr[offset + 0]] +\n byteToHex[arr[offset + 1]] +\n byteToHex[arr[offset + 2]] +\n byteToHex[arr[offset + 3]] +\n '-' +\n byteToHex[arr[offset + 4]] +\n byteToHex[arr[offset + 5]] +\n '-' +\n byteToHex[arr[offset + 6]] +\n byteToHex[arr[offset + 7]] +\n '-' +\n byteToHex[arr[offset + 8]] +\n byteToHex[arr[offset + 9]] +\n '-' +\n byteToHex[arr[offset + 10]] +\n byteToHex[arr[offset + 11]] +\n byteToHex[arr[offset + 12]] +\n byteToHex[arr[offset + 13]] +\n byteToHex[arr[offset + 14]] +\n byteToHex[arr[offset + 15]]).toLowerCase();\n}\nfunction stringify(arr, offset = 0) {\n const uuid = unsafeStringify(arr, offset);\n if (!validate(uuid)) {\n throw TypeError('Stringified UUID is invalid');\n }\n return uuid;\n}\nexport default stringify;\n","import { randomFillSync } from 'crypto';\nconst rnds8Pool = new Uint8Array(256);\nlet poolPtr = rnds8Pool.length;\nexport default function rng() {\n if (poolPtr > rnds8Pool.length - 16) {\n randomFillSync(rnds8Pool);\n poolPtr = 0;\n }\n return rnds8Pool.slice(poolPtr, (poolPtr += 16));\n}\n","import parse from './parse.js';\nimport { unsafeStringify } from './stringify.js';\nexport function stringToBytes(str) {\n str = unescape(encodeURIComponent(str));\n const bytes = new Uint8Array(str.length);\n for (let i = 0; i < str.length; ++i) {\n bytes[i] = str.charCodeAt(i);\n }\n return bytes;\n}\nexport const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';\nexport const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';\nexport default function v35(version, hash, value, namespace, buf, offset) {\n const valueBytes = typeof value === 'string' ? stringToBytes(value) : value;\n const namespaceBytes = typeof namespace === 'string' ? parse(namespace) : namespace;\n if (typeof namespace === 'string') {\n namespace = parse(namespace);\n }\n if (namespace?.length !== 16) {\n throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');\n }\n let bytes = new Uint8Array(16 + valueBytes.length);\n bytes.set(namespaceBytes);\n bytes.set(valueBytes, namespaceBytes.length);\n bytes = hash(bytes);\n bytes[6] = (bytes[6] & 0x0f) | version;\n bytes[8] = (bytes[8] & 0x3f) | 0x80;\n if (buf) {\n offset = offset || 0;\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = bytes[i];\n }\n return buf;\n }\n return unsafeStringify(bytes);\n}\n","import { randomUUID } from 'crypto';\nexport default { randomUUID };\n","import native from './native.js';\nimport rng from './rng.js';\nimport { unsafeStringify } from './stringify.js';\nfunction v4(options, buf, offset) {\n if (native.randomUUID && !buf && !options) {\n return native.randomUUID();\n }\n options = options || {};\n const rnds = options.random || (options.rng || rng)();\n rnds[6] = (rnds[6] & 0x0f) | 0x40;\n rnds[8] = (rnds[8] & 0x3f) | 0x80;\n if (buf) {\n offset = offset || 0;\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = rnds[i];\n }\n return buf;\n }\n return unsafeStringify(rnds);\n}\nexport default v4;\n","import { createHash } from 'crypto';\nfunction sha1(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n }\n else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n return createHash('sha1').update(bytes).digest();\n}\nexport default sha1;\n","import sha1 from './sha1.js';\nimport v35, { DNS, URL } from './v35.js';\nexport { DNS, URL } from './v35.js';\nfunction v5(value, namespace, buf, offset) {\n return v35(0x50, sha1, value, namespace, buf, offset);\n}\nv5.DNS = DNS;\nv5.URL = URL;\nexport default v5;\n"],"mappings":";AAAA,SAAS,UAAAA,eAAoB;AAC7B,YAAY,QAAQ;AACpB,YAAY,UAAU;;;ACFtB,SAAS,UAAAC,eAAc;AACvB,YAAY,aAAa;AACzB,SAAS,cAAc;AACvB,SAAS,mBAAqC;AAE9C,SAAS,cAAAC,mBAAkB;;;ACL3B,IAAO,gBAAQ;;;ACCf,SAAS,SAAS,MAAM;AACpB,SAAO,OAAO,SAAS,YAAY,cAAM,KAAK,IAAI;AACtD;AACA,IAAO,mBAAQ;;;ACHf,SAAS,MAAM,MAAM;AACjB,MAAI,CAAC,iBAAS,IAAI,GAAG;AACjB,UAAM,UAAU,cAAc;AAAA,EAClC;AACA,MAAI;AACJ,SAAO,WAAW,IAAI,IAAI,SAAS,KAAK,MAAM,GAAG,CAAC,GAAG,EAAE,OAAO,IAAK,MAAM,KAAM,KAAO,MAAM,IAAK,KAAM,IAAI,MAAO,IAAI,SAAS,KAAK,MAAM,GAAG,EAAE,GAAG,EAAE,OAAO,GAAG,IAAI,MAAO,IAAI,SAAS,KAAK,MAAM,IAAI,EAAE,GAAG,EAAE,OAAO,GAAG,IAAI,MAAO,IAAI,SAAS,KAAK,MAAM,IAAI,EAAE,GAAG,EAAE,OAAO,GAAG,IAAI,MAAQ,IAAI,SAAS,KAAK,MAAM,IAAI,EAAE,GAAG,EAAE,KAAK,gBAAiB,KAAO,IAAI,aAAe,KAAO,MAAM,KAAM,KAAO,MAAM,KAAM,KAAO,MAAM,IAAK,KAAM,IAAI,GAAI;AACvb;AACA,IAAO,gBAAQ;;;ACPf,IAAM,YAAY,CAAC;AACnB,SAAS,IAAI,GAAG,IAAI,KAAK,EAAE,GAAG;AAC1B,YAAU,MAAM,IAAI,KAAO,SAAS,EAAE,EAAE,MAAM,CAAC,CAAC;AACpD;AACO,SAAS,gBAAgB,KAAK,SAAS,GAAG;AAC7C,UAAQ,UAAU,IAAI,SAAS,CAAC,CAAC,IAC7B,UAAU,IAAI,SAAS,CAAC,CAAC,IACzB,UAAU,IAAI,SAAS,CAAC,CAAC,IACzB,UAAU,IAAI,SAAS,CAAC,CAAC,IACzB,MACA,UAAU,IAAI,SAAS,CAAC,CAAC,IACzB,UAAU,IAAI,SAAS,CAAC,CAAC,IACzB,MACA,UAAU,IAAI,SAAS,CAAC,CAAC,IACzB,UAAU,IAAI,SAAS,CAAC,CAAC,IACzB,MACA,UAAU,IAAI,SAAS,CAAC,CAAC,IACzB,UAAU,IAAI,SAAS,CAAC,CAAC,IACzB,MACA,UAAU,IAAI,SAAS,EAAE,CAAC,IAC1B,UAAU,IAAI,SAAS,EAAE,CAAC,IAC1B,UAAU,IAAI,SAAS,EAAE,CAAC,IAC1B,UAAU,IAAI,SAAS,EAAE,CAAC,IAC1B,UAAU,IAAI,SAAS,EAAE,CAAC,IAC1B,UAAU,IAAI,SAAS,EAAE,CAAC,GAAG,YAAY;AACjD;;;AC1BA,SAAS,sBAAsB;AAC/B,IAAM,YAAY,IAAI,WAAW,GAAG;AACpC,IAAI,UAAU,UAAU;AACT,SAAR,MAAuB;AAC1B,MAAI,UAAU,UAAU,SAAS,IAAI;AACjC,mBAAe,SAAS;AACxB,cAAU;AAAA,EACd;AACA,SAAO,UAAU,MAAM,SAAU,WAAW,EAAG;AACnD;;;ACPO,SAAS,cAAc,KAAK;AAC/B,QAAM,SAAS,mBAAmB,GAAG,CAAC;AACtC,QAAM,QAAQ,IAAI,WAAW,IAAI,MAAM;AACvC,WAAS,IAAI,GAAG,IAAI,IAAI,QAAQ,EAAE,GAAG;AACjC,UAAM,CAAC,IAAI,IAAI,WAAW,CAAC;AAAA,EAC/B;AACA,SAAO;AACX;AACO,IAAM,MAAM;AACZ,IAAMC,OAAM;AACJ,SAAR,IAAqB,SAAS,MAAM,OAAO,WAAW,KAAK,QAAQ;AACtE,QAAM,aAAa,OAAO,UAAU,WAAW,cAAc,KAAK,IAAI;AACtE,QAAM,iBAAiB,OAAO,cAAc,WAAW,cAAM,SAAS,IAAI;AAC1E,MAAI,OAAO,cAAc,UAAU;AAC/B,gBAAY,cAAM,SAAS;AAAA,EAC/B;AACA,MAAI,WAAW,WAAW,IAAI;AAC1B,UAAM,UAAU,kEAAkE;AAAA,EACtF;AACA,MAAI,QAAQ,IAAI,WAAW,KAAK,WAAW,MAAM;AACjD,QAAM,IAAI,cAAc;AACxB,QAAM,IAAI,YAAY,eAAe,MAAM;AAC3C,UAAQ,KAAK,KAAK;AAClB,QAAM,CAAC,IAAK,MAAM,CAAC,IAAI,KAAQ;AAC/B,QAAM,CAAC,IAAK,MAAM,CAAC,IAAI,KAAQ;AAC/B,MAAI,KAAK;AACL,aAAS,UAAU;AACnB,aAAS,IAAI,GAAG,IAAI,IAAI,EAAE,GAAG;AACzB,UAAI,SAAS,CAAC,IAAI,MAAM,CAAC;AAAA,IAC7B;AACA,WAAO;AAAA,EACX;AACA,SAAO,gBAAgB,KAAK;AAChC;;;ACnCA,SAAS,kBAAkB;AAC3B,IAAO,iBAAQ,EAAE,WAAW;;;ACE5B,SAAS,GAAG,SAAS,KAAK,QAAQ;AAC9B,MAAI,eAAO,cAAc,CAAC,OAAO,CAAC,SAAS;AACvC,WAAO,eAAO,WAAW;AAAA,EAC7B;AACA,YAAU,WAAW,CAAC;AACtB,QAAM,OAAO,QAAQ,WAAW,QAAQ,OAAO,KAAK;AACpD,OAAK,CAAC,IAAK,KAAK,CAAC,IAAI,KAAQ;AAC7B,OAAK,CAAC,IAAK,KAAK,CAAC,IAAI,KAAQ;AAC7B,MAAI,KAAK;AACL,aAAS,UAAU;AACnB,aAAS,IAAI,GAAG,IAAI,IAAI,EAAE,GAAG;AACzB,UAAI,SAAS,CAAC,IAAI,KAAK,CAAC;AAAA,IAC5B;AACA,WAAO;AAAA,EACX;AACA,SAAO,gBAAgB,IAAI;AAC/B;AACA,IAAO,aAAQ;;;ACpBf,SAAS,kBAAkB;AAC3B,SAAS,KAAK,OAAO;AACjB,MAAI,MAAM,QAAQ,KAAK,GAAG;AACtB,YAAQ,OAAO,KAAK,KAAK;AAAA,EAC7B,WACS,OAAO,UAAU,UAAU;AAChC,YAAQ,OAAO,KAAK,OAAO,MAAM;AAAA,EACrC;AACA,SAAO,WAAW,MAAM,EAAE,OAAO,KAAK,EAAE,OAAO;AACnD;AACA,IAAO,eAAQ;;;ACPf,SAAS,GAAG,OAAO,WAAW,KAAK,QAAQ;AACvC,SAAO,IAAI,IAAM,cAAM,OAAO,WAAW,KAAK,MAAM;AACxD;AACA,GAAG,MAAM;AACT,GAAG,MAAMC;AACT,IAAO,aAAQ;;;AVAf,IAAM,2BAA2B;AAAA,EAC/B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,0BAA0B,IAAI,OAAO;AAC3C,IAAM,qBAAqB;AAO3B,eAAsB,0BACpB,YACA,aACA,kBACiB;AACjB,QAAM,mBAAmB,YAAY,YAAY;AACjD,SAAO;AAAA,IACL,8CAA8C,gBAAgB,WAAW,WAAW;AAAA,EACtF;AAEA,MACE,qBAAqB,2EACrB;AACA,WAAO,MAAM,wCAAwC,gBAAgB,eAAe;AACpF,QAAI;AACF,YAAM,SAAS,MAAc,uBAAe,EAAE,QAAQ,WAAW,CAAC;AAClE,aAAO;AAAA,QACL,gDAAgD,gBAAgB,kBAAkB,OAAO,MAAM,MAAM;AAAA,MACvG;AACA,aAAO,OAAO;AAAA,IAChB,SAAS,WAAgB;AACvB,YAAM,WAAW,wCAAwC,gBAAgB,KAAK,UAAU,OAAO;AAC/F,aAAO,MAAM,UAAU,UAAU,KAAK;AACtC,YAAM,IAAI,MAAM,QAAQ;AAAA,IAC1B;AAAA,EACF,WACE,qBAAqB,wBACrB,iBAAiB,YAAY,EAAE,SAAS,MAAM,GAC9C;AAGA,WAAO,MAAM,iDAAiD,gBAAgB,EAAE;AAGhF,WAAO,6BAA6B,gBAAgB;AAAA;AAAA;AAAA,EACtD,WACE,iBAAiB,WAAW,OAAO,KACnC,yBAAyB,SAAS,gBAAgB,GAClD;AACA,WAAO;AAAA,MACL,8DAA8D,gBAAgB,WAAW,WAAW;AAAA,IACtG;AACA,WAAO,WAAW,SAAS,OAAO;AAAA,EACpC,OAAO;AACL,WAAO;AAAA,MACL,yCAAyC,WAAW,SAAS,gBAAgB;AAAA,IAC/E;AAEA,QAAI,WAAW,SAAS,yBAAyB;AAC/C,YAAM,eAAe,mBAAmB,gBAAgB,WAAW,WAAW,wCAAwC,uBAAuB;AAC7I,aAAO,MAAM,YAAY;AACzB,YAAM,IAAI,MAAM,YAAY;AAAA,IAC9B;AAGA,UAAM,eAAe,WAAW,SAAS,GAAG,KAAK,IAAI,WAAW,QAAQ,kBAAkB,CAAC;AAC3F,QAAI,aAAa,SAAS,CAAC,GAAG;AAE5B,YAAM,qBAAqB,mBAAmB,gBAAgB,WAAW,WAAW;AACpF,aAAO,MAAM,kBAAkB;AAC/B,YAAM,IAAI,MAAM,kBAAkB;AAAA,IACpC;AAEA,QAAI;AACF,YAAM,cAAc,WAAW,SAAS,OAAO;AAC/C,UAAI,YAAY,SAAS,QAAQ,GAAG;AAElC,cAAM,iBAAiB,mBAAmB,gBAAgB,WAAW,WAAW;AAChF,eAAO,MAAM,cAAc;AAC3B,cAAM,IAAI,MAAM,cAAc;AAAA,MAChC;AACA,aAAO;AAAA,QACL,kDAAkD,WAAW,qCAAqC,gBAAgB;AAAA,MACpH;AACA,aAAO;AAAA,IACT,SAAS,eAAoB;AAE3B,YAAM,gBAAgB,wCAAwC,WAAW,QAAQ,gBAAgB;AACjG,aAAO,MAAM,eAAe,cAAc,UAAU,cAAc,QAAQ,MAAS;AACnF,YAAM,IAAI,MAAM,aAAa;AAAA,IAC/B;AAAA,EACF;AACF;AAaA,eAAsB,2BACpB,WACA,UACiB;AACjB,QAAM,UAAU,YAAY;AAC5B,SAAO,MAAM,wCAAwC,OAAO,EAAE;AAE9D,MAAI;AACF,UAAM,aAAa,IAAI,WAAW,SAAS;AAC3C,UAAM,MAAwB,MAAM,YAAY,EAAE,MAAM,WAAW,CAAC,EAAE;AACtE,UAAM,WAAW,IAAI;AACrB,UAAM,YAAsB,CAAC;AAE7B,aAAS,UAAU,GAAG,WAAW,UAAU,WAAW;AACpD,aAAO,MAAM,gCAAgC,OAAO,IAAI,QAAQ,EAAE;AAClE,YAAM,OAAO,MAAM,IAAI,QAAQ,OAAO;AACtC,YAAM,cAAc,MAAM,KAAK,eAAe;AAG9C,YAAM,UAAU,oBAAI,IAAwB;AAE5C,kBAAY,MAAM,OAAO,UAAU,EAAE,QAAQ,CAAC,SAAS;AAErD,cAAM,OAAO,KAAK,MAAM,KAAK,UAAU,CAAC,CAAC;AACzC,YAAI,CAAC,QAAQ,IAAI,IAAI,GAAG;AACtB,kBAAQ,IAAI,MAAM,CAAC,CAAC;AAAA,QACtB;AACA,gBAAQ,IAAI,IAAI,EAAG,KAAK,IAAI;AAAA,MAC9B,CAAC;AAGD,YAAM,cAAc,MAAM,KAAK,QAAQ,QAAQ,CAAC,EAC7C,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC,EAC1B;AAAA,QAAI,CAAC,CAAC,GAAG,KAAK,MACb,MACG,KAAK,CAAC,GAAG,MAAM,EAAE,UAAU,CAAC,IAAI,EAAE,UAAU,CAAC,CAAC,EAC9C,IAAI,CAAC,SAAS,KAAK,GAAG,EACtB,KAAK,GAAG;AAAA,MACb;AAEF,gBAAU,KAAK,YAAY,KAAK,IAAI,CAAC;AAAA,IACvC;AAEA,UAAM,WAAW,UAAU,KAAK,MAAM,EAAE,QAAQ,QAAQ,GAAG,EAAE,KAAK;AAClE,WAAO,MAAM,wCAAwC,OAAO,aAAa,SAAS,MAAM,EAAE;AAC1F,WAAO;AAAA,EACT,SAAS,OAAY;AACnB,WAAO,MAAM,qCAAqC,OAAO,KAAK,MAAM,OAAO;AAC3E,UAAM,IAAI,MAAM,kCAAkC,MAAM,OAAO,EAAE;AAAA,EACnE;AACF;AAQO,SAAS,oBAAoB,aAAqB,UAA2B;AAElF,QAAM,mBAAmB;AAAA,IACvB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAGA,QAAM,iBAAiB,iBAAiB,KAAK,CAAC,SAAS,YAAY,SAAS,IAAI,CAAC;AACjF,MAAI,gBAAgB;AAClB,WAAO;AAAA,EACT;AAGA,QAAM,qBAAqB;AAAA,IACzB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAGA,QAAM,mBAAmB,mBAAmB,KAAK,CAAC,SAAS,YAAY,SAAS,IAAI,CAAC;AAErF,MAAI,kBAAkB;AACpB,WAAO;AAAA,EACT;AAGA,QAAM,UAAU,SAAS,MAAM,GAAG,EAAE,IAAI,GAAG,YAAY,KAAK;AAG5D,QAAM,iBAAiB;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAGA,MAAI,eAAe,SAAS,OAAO,GAAG;AACpC,WAAO;AAAA,EACT;AAGA,QAAM,mBAAmB;AAAA,IACvB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,SAAO,iBAAiB,SAAS,OAAO;AAC1C;AAQA,SAAS,WAAW,MAAsD;AACxE,SAAO,SAAS;AAClB;AAQO,SAAS,eAAe,KAAqB;AAClD,MAAI;AACF,UAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,WAAO,GAAG,OAAO,MAAM,GAAG,OAAO,QAAQ;AAAA,EAC3C,SAAS,OAAO;AACd,WAAO,KAAK,yCAAyC,GAAG,uBAAuB;AAC/E,WAAO;AAAA,EACT;AACF;AAOA,eAAsB,gBACpB,KACmD;AACnD,SAAO,MAAM,4CAA4C,GAAG,EAAE;AAE9D,MAAI;AAEF,UAAM,aAAa,IAAI,gBAAgB;AACvC,UAAM,YAAY,WAAW,MAAM,WAAW,MAAM,GAAG,GAAK;AAE5D,UAAM,WAAW,MAAM,MAAM,KAAK;AAAA,MAChC,QAAQ,WAAW;AAAA,MACnB,SAAS;AAAA,QACP,cAAc;AAAA,MAChB;AAAA,IACF,CAAC;AACD,iBAAa,SAAS;AAEtB,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,IAAI,MAAM,wBAAwB,SAAS,MAAM,IAAI,SAAS,UAAU,EAAE;AAAA,IAClF;AAGA,UAAM,cAAc,SAAS,QAAQ,IAAI,cAAc,KAAK;AAC5D,WAAO,MAAM,2CAA2C,WAAW,aAAa,GAAG,EAAE;AAGrF,UAAM,cAAc,MAAM,SAAS,YAAY;AAC/C,UAAM,SAASC,QAAO,KAAK,WAAW;AAGtC,UAAM,gBAAgB,OAAO,SAAS,QAAQ;AAE9C,WAAO;AAAA,MACL,wDAAwD,GAAG,KAAK,OAAO,MAAM;AAAA,IAC/E;AACA,WAAO;AAAA,MACL,SAAS;AAAA,MACT;AAAA,IACF;AAAA,EACF,SAAS,OAAY;AACnB,WAAO,MAAM,iDAAiD,GAAG,KAAK,MAAM,OAAO,EAAE;AACrF,UAAM,IAAI,MAAM,qCAAqC,MAAM,OAAO,EAAE;AAAA,EACtE;AACF;AAEO,SAAS,gBAAgB,SAAkC;AAGhE,QAAM,cAAc;AACpB,SAAQ,WAAW,QAAQ,SAAS,KAAK,YAAY,KAAK,QAAQ,QAAQ,OAAO,EAAE,CAAC,KAAM;AAC5F;AAYO,SAAS,uBACd,SACA,SACA,SAKQ;AACR,QAAM;AAAA,IACJ,WAAW;AAAA;AAAA,IACX;AAAA,IACA;AAAA,EACF,IAAI,WAAW,CAAC;AAGhB,MAAI;AAGJ,MAAI,gBAAgB,OAAO,GAAG;AAC5B,QAAI;AACF,YAAM,UAAUA,QAAO,KAAK,SAAS,QAAQ,EAAE,SAAS,MAAM;AAE9D,UAAI,CAAC,QAAQ,SAAS,QAAQ,KAAK,aAAa,SAAS,KAAK,GAAG;AAE/D,4BAAoB,QAAQ,MAAM,GAAG,QAAQ;AAAA,MAC/C,OAAO;AAEL,4BAAoB,QAAQ,MAAM,GAAG,QAAQ;AAAA,MAC/C;AAAA,IACF,QAAQ;AAEN,0BAAoB,QAAQ,MAAM,GAAG,QAAQ;AAAA,IAC/C;AAAA,EACF,OAAO;AAEL,wBAAoB,QAAQ,MAAM,GAAG,QAAQ;AAAA,EAC/C;AAGA,sBAAoB,kBACjB,QAAQ,SAAS,IAAI,EACrB,QAAQ,OAAO,IAAI,EACnB,KAAK;AAGR,QAAM,mBAAmB;AAAA,IACvB;AAAA;AAAA,IACA;AAAA;AAAA,IACA,mBAAmB;AAAA;AAAA,EACrB,EACG,OAAO,OAAO,EACd,KAAK,IAAI;AAGZ,QAAM,OAAOC,YAAW,QAAQ,EAAE,OAAO,gBAAgB,EAAE,OAAO,KAAK;AAGvE,QAAM,qBAAqB;AAG3B,QAAM,OAAO,WAAO,MAAM,kBAAkB;AAE5C,SAAO;AAAA,IACL,2CAA2C,IAAI,mCAAmC,KAAK,MAAM,GAAG,CAAC,CAAC;AAAA,EACpG;AAEA,SAAO;AACT;;;AD/eO,SAAS,mBAA2B;AACzC,QAAM,UAAU,QAAQ,IAAI;AAE5B,MAAI,SAAS;AAEX,UAAM,eAAoB,aAAQ,OAAO;AAEzC,QAAI,CAAI,cAAW,YAAY,GAAG;AAChC,MAAAC,QAAO,KAAK,4DAA4D,YAAY,EAAE;AACtF,MAAAA,QAAO,KAAK,2EAA2E;AAAA,IACzF;AAEA,WAAO;AAAA,EACT;AAGA,QAAM,cAAmB,UAAK,QAAQ,IAAI,GAAG,MAAM;AAEnD,MAAI,CAAI,cAAW,WAAW,GAAG;AAC/B,IAAAA,QAAO,KAAK,0CAA0C,WAAW,EAAE;AACnE,IAAAA,QAAO,KAAK,sCAAsC;AAClD,IAAAA,QAAO,KAAK,gDAAgD;AAC5D,IAAAA,QAAO,KAAK,qEAAqE;AAAA,EACnF;AAEA,SAAO;AACT;AAKA,eAAsB,iBACpB,SACA,SACA,SACgE;AAChE,QAAM,WAAW,iBAAiB;AAElC,MAAI,CAAI,cAAW,QAAQ,GAAG;AAC5B,IAAAA,QAAO,KAAK,kCAAkC,QAAQ,EAAE;AACxD,WAAO,EAAE,OAAO,GAAG,YAAY,GAAG,QAAQ,EAAE;AAAA,EAC9C;AAEA,EAAAA,QAAO,KAAK,2BAA2B,QAAQ,EAAE;AAGjD,QAAM,QAAQ,YAAY,QAAQ;AAElC,MAAI,MAAM,WAAW,GAAG;AACtB,IAAAA,QAAO,KAAK,kCAAkC;AAC9C,WAAO,EAAE,OAAO,GAAG,YAAY,GAAG,QAAQ,EAAE;AAAA,EAC9C;AAEA,EAAAA,QAAO,KAAK,SAAS,MAAM,MAAM,mBAAmB;AAEpD,MAAI,aAAa;AACjB,MAAI,SAAS;AAEb,aAAW,YAAY,OAAO;AAC5B,QAAI;AACF,YAAM,WAAgB,cAAS,QAAQ;AACvC,YAAM,UAAe,aAAQ,QAAQ,EAAE,YAAY;AAGnD,UAAI,SAAS,WAAW,GAAG,GAAG;AAC5B;AAAA,MACF;AAGA,YAAM,cAAc,eAAe,OAAO;AAG1C,UAAI,CAAC,aAAa;AAChB,QAAAA,QAAO,MAAM,mCAAmC,QAAQ,EAAE;AAC1D;AAAA,MACF;AAGA,YAAM,aAAgB,gBAAa,QAAQ;AAG3C,YAAM,WAAW,oBAAoB,aAAa,QAAQ;AAI1D,YAAM,UAAU,WAAW,WAAW,SAAS,QAAQ,IAAI,WAAW,SAAS,OAAO;AAGtF,YAAM,mBAAwC;AAAA,QAC5C,kBAAkB;AAAA;AAAA,QAClB;AAAA,QACA,kBAAkB;AAAA,QAClB,SAAS,WAAW;AAAA,QACpB;AAAA,QACA,QAAQ;AAAA,QACR,UAAU;AAAA,MACZ;AAGA,MAAAA,QAAO,MAAM,wBAAwB,QAAQ,EAAE;AAC/C,YAAM,SAAS,MAAM,QAAQ,aAAa,gBAAgB;AAE1D,MAAAA,QAAO,KAAK,0BAA0B,QAAQ,KAAK,OAAO,aAAa,oBAAoB;AAC3F;AAAA,IACF,SAAS,OAAO;AACd,MAAAA,QAAO,MAAM,0BAA0B,QAAQ,KAAK,KAAK;AACzD;AAAA,IACF;AAAA,EACF;AAEA,EAAAA,QAAO;AAAA,IACL,8BAA8B,UAAU,gBAAgB,MAAM,kBAAkB,MAAM,MAAM;AAAA,EAC9F;AAEA,SAAO;AAAA,IACL,OAAO,MAAM;AAAA,IACb;AAAA,IACA;AAAA,EACF;AACF;AAKA,SAAS,YAAY,SAAiB,QAAkB,CAAC,GAAa;AACpE,MAAI;AACF,UAAM,UAAa,eAAY,SAAS,EAAE,eAAe,KAAK,CAAC;AAE/D,eAAW,SAAS,SAAS;AAC3B,YAAM,WAAgB,UAAK,SAAS,MAAM,IAAI;AAE9C,UAAI,MAAM,YAAY,GAAG;AAEvB,YAAI,CAAC,CAAC,gBAAgB,QAAQ,WAAW,QAAQ,OAAO,EAAE,SAAS,MAAM,IAAI,GAAG;AAC9E,sBAAY,UAAU,KAAK;AAAA,QAC7B;AAAA,MACF,WAAW,MAAM,OAAO,GAAG;AACzB,cAAM,KAAK,QAAQ;AAAA,MACrB;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,IAAAA,QAAO,MAAM,2BAA2B,OAAO,KAAK,KAAK;AAAA,EAC3D;AAEA,SAAO;AACT;AAKA,SAAS,eAAe,WAAkC;AACxD,QAAM,eAAuC;AAAA;AAAA,IAE3C,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,aAAa;AAAA,IACb,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA;AAAA,IAGR,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,SAAS;AAAA,IACT,SAAS;AAAA;AAAA,IAGT,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,WAAW;AAAA,IACX,UAAU;AAAA;AAAA,IAGV,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA;AAAA,IAGR,SAAS;AAAA,IACT,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,UAAU;AAAA;AAAA,IAGV,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,OAAO;AAAA;AAAA,IAGP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO;AAAA,IACP,UAAU;AAAA,IACV,MAAM;AAAA,IACN,MAAM;AAAA,IACN,MAAM;AAAA,IACN,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,OAAO;AAAA,IACP,SAAS;AAAA,IACT,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA;AAAA,IAGR,OAAO;AAAA,IACP,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,QAAQ;AAAA;AAAA,IAGR,SAAS;AAAA,IACT,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,cAAc;AAAA,IACd,iBAAiB;AAAA,IACjB,iBAAiB;AAAA,IACjB,eAAe;AAAA;AAAA,IAGf,QAAQ;AAAA;AAAA,IAGR,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,SAAS;AAAA,EACX;AAEA,SAAO,aAAa,SAAS,KAAK;AACpC;","names":["logger","Buffer","createHash","URL","URL","Buffer","createHash","logger"]}
|
|
File without changes
|