@elizaos/plugin-knowledge 1.0.0-beta.72 → 1.0.0-beta.73

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,3 +1,10 @@
1
+ import {
2
+ convertPdfToTextFromBuffer,
3
+ extractTextFromFileBuffer,
4
+ isBinaryContentType,
5
+ loadDocsFromPath
6
+ } from "./chunk-LHJERZV7.js";
7
+
1
8
  // src/index.ts
2
9
  import { logger as logger6 } from "@elizaos/core";
3
10
 
@@ -25,15 +32,11 @@ var ModelConfigSchema = z.object({
25
32
  TEXT_MODEL: z.string().optional(),
26
33
  // Token limits
27
34
  MAX_INPUT_TOKENS: z.string().or(z.number()).transform((val) => typeof val === "string" ? parseInt(val, 10) : val),
28
- MAX_OUTPUT_TOKENS: z.string().or(z.number()).optional().transform(
29
- (val) => val ? typeof val === "string" ? parseInt(val, 10) : val : 4096
30
- ),
35
+ MAX_OUTPUT_TOKENS: z.string().or(z.number()).optional().transform((val) => val ? typeof val === "string" ? parseInt(val, 10) : val : 4096),
31
36
  // Embedding dimension
32
37
  // For OpenAI: Only applies to text-embedding-3-small and text-embedding-3-large models
33
38
  // Default: 1536 dimensions
34
- EMBEDDING_DIMENSION: z.string().or(z.number()).optional().transform(
35
- (val) => val ? typeof val === "string" ? parseInt(val, 10) : val : 1536
36
- ),
39
+ EMBEDDING_DIMENSION: z.string().or(z.number()).optional().transform((val) => val ? typeof val === "string" ? parseInt(val, 10) : val : 1536),
37
40
  // Contextual Knowledge settings
38
41
  CTX_KNOWLEDGE_ENABLED: z.boolean().default(false)
39
42
  });
@@ -44,41 +47,48 @@ var KnowledgeServiceType = {
44
47
  // src/config.ts
45
48
  import z2 from "zod";
46
49
  import { logger } from "@elizaos/core";
47
- function validateModelConfig() {
50
+ function validateModelConfig(runtime) {
48
51
  try {
49
- const ctxKnowledgeEnabled2 = process.env.CTX_KNOWLEDGE_ENABLED === "true";
52
+ const getSetting = (key, defaultValue) => {
53
+ if (runtime) {
54
+ return runtime.getSetting(key) || defaultValue;
55
+ }
56
+ return process.env[key] || defaultValue;
57
+ };
58
+ const ctxKnowledgeEnabled2 = getSetting("CTX_KNOWLEDGE_ENABLED") === "true";
50
59
  logger.debug(`Configuration: CTX_KNOWLEDGE_ENABLED=${ctxKnowledgeEnabled2}`);
51
- const assumePluginOpenAI = !process.env.EMBEDDING_PROVIDER;
60
+ const embeddingProvider = getSetting("EMBEDDING_PROVIDER");
61
+ const assumePluginOpenAI = !embeddingProvider;
52
62
  if (assumePluginOpenAI) {
53
- if (process.env.OPENAI_API_KEY && process.env.OPENAI_EMBEDDING_MODEL) {
54
- logger.info(
55
- "EMBEDDING_PROVIDER not specified, using configuration from plugin-openai"
56
- );
63
+ const openaiApiKey2 = getSetting("OPENAI_API_KEY");
64
+ const openaiEmbeddingModel = getSetting("OPENAI_EMBEDDING_MODEL");
65
+ if (openaiApiKey2 && openaiEmbeddingModel) {
66
+ logger.info("EMBEDDING_PROVIDER not specified, using configuration from plugin-openai");
57
67
  } else {
58
68
  logger.warn(
59
69
  "EMBEDDING_PROVIDER not specified, but plugin-openai configuration incomplete. Check OPENAI_API_KEY and OPENAI_EMBEDDING_MODEL."
60
70
  );
61
71
  }
62
72
  }
63
- const embeddingProvider = process.env.EMBEDDING_PROVIDER || "openai";
64
- const textEmbeddingModel = process.env.TEXT_EMBEDDING_MODEL || process.env.OPENAI_EMBEDDING_MODEL || "text-embedding-3-small";
65
- const embeddingDimension = process.env.EMBEDDING_DIMENSION || process.env.OPENAI_EMBEDDING_DIMENSIONS || 1536;
66
- const openaiApiKey = process.env.OPENAI_API_KEY;
73
+ const finalEmbeddingProvider = embeddingProvider || "openai";
74
+ const textEmbeddingModel = getSetting("TEXT_EMBEDDING_MODEL") || getSetting("OPENAI_EMBEDDING_MODEL") || "text-embedding-3-small";
75
+ const embeddingDimension = getSetting("EMBEDDING_DIMENSION") || getSetting("OPENAI_EMBEDDING_DIMENSIONS") || "1536";
76
+ const openaiApiKey = getSetting("OPENAI_API_KEY");
67
77
  const config = ModelConfigSchema.parse({
68
- EMBEDDING_PROVIDER: embeddingProvider,
69
- TEXT_PROVIDER: process.env.TEXT_PROVIDER,
78
+ EMBEDDING_PROVIDER: finalEmbeddingProvider,
79
+ TEXT_PROVIDER: getSetting("TEXT_PROVIDER"),
70
80
  OPENAI_API_KEY: openaiApiKey,
71
- ANTHROPIC_API_KEY: process.env.ANTHROPIC_API_KEY,
72
- OPENROUTER_API_KEY: process.env.OPENROUTER_API_KEY,
73
- GOOGLE_API_KEY: process.env.GOOGLE_API_KEY,
74
- OPENAI_BASE_URL: process.env.OPENAI_BASE_URL,
75
- ANTHROPIC_BASE_URL: process.env.ANTHROPIC_BASE_URL,
76
- OPENROUTER_BASE_URL: process.env.OPENROUTER_BASE_URL,
77
- GOOGLE_BASE_URL: process.env.GOOGLE_BASE_URL,
81
+ ANTHROPIC_API_KEY: getSetting("ANTHROPIC_API_KEY"),
82
+ OPENROUTER_API_KEY: getSetting("OPENROUTER_API_KEY"),
83
+ GOOGLE_API_KEY: getSetting("GOOGLE_API_KEY"),
84
+ OPENAI_BASE_URL: getSetting("OPENAI_BASE_URL"),
85
+ ANTHROPIC_BASE_URL: getSetting("ANTHROPIC_BASE_URL"),
86
+ OPENROUTER_BASE_URL: getSetting("OPENROUTER_BASE_URL"),
87
+ GOOGLE_BASE_URL: getSetting("GOOGLE_BASE_URL"),
78
88
  TEXT_EMBEDDING_MODEL: textEmbeddingModel,
79
- TEXT_MODEL: process.env.TEXT_MODEL,
80
- MAX_INPUT_TOKENS: process.env.MAX_INPUT_TOKENS || 4e3,
81
- MAX_OUTPUT_TOKENS: process.env.MAX_OUTPUT_TOKENS || 4096,
89
+ TEXT_MODEL: getSetting("TEXT_MODEL"),
90
+ MAX_INPUT_TOKENS: getSetting("MAX_INPUT_TOKENS", "4000"),
91
+ MAX_OUTPUT_TOKENS: getSetting("MAX_OUTPUT_TOKENS", "4096"),
82
92
  EMBEDDING_DIMENSION: embeddingDimension,
83
93
  CTX_KNOWLEDGE_ENABLED: ctxKnowledgeEnabled2
84
94
  });
@@ -95,60 +105,38 @@ function validateModelConfig() {
95
105
  function validateConfigRequirements(config, assumePluginOpenAI) {
96
106
  if (!assumePluginOpenAI) {
97
107
  if (config.EMBEDDING_PROVIDER === "openai" && !config.OPENAI_API_KEY) {
98
- throw new Error(
99
- 'OPENAI_API_KEY is required when EMBEDDING_PROVIDER is set to "openai"'
100
- );
108
+ throw new Error('OPENAI_API_KEY is required when EMBEDDING_PROVIDER is set to "openai"');
101
109
  }
102
110
  if (config.EMBEDDING_PROVIDER === "google" && !config.GOOGLE_API_KEY) {
103
- throw new Error(
104
- 'GOOGLE_API_KEY is required when EMBEDDING_PROVIDER is set to "google"'
105
- );
111
+ throw new Error('GOOGLE_API_KEY is required when EMBEDDING_PROVIDER is set to "google"');
106
112
  }
107
113
  } else {
108
114
  if (!config.OPENAI_API_KEY) {
109
- throw new Error(
110
- "OPENAI_API_KEY is required when using plugin-openai configuration"
111
- );
115
+ throw new Error("OPENAI_API_KEY is required when using plugin-openai configuration");
112
116
  }
113
117
  if (!config.TEXT_EMBEDDING_MODEL) {
114
- throw new Error(
115
- "OPENAI_EMBEDDING_MODEL is required when using plugin-openai configuration"
116
- );
118
+ throw new Error("OPENAI_EMBEDDING_MODEL is required when using plugin-openai configuration");
117
119
  }
118
120
  }
119
121
  if (config.CTX_KNOWLEDGE_ENABLED) {
120
- logger.info(
121
- "Contextual Knowledge is enabled. Validating text generation settings..."
122
- );
122
+ logger.info("Contextual Knowledge is enabled. Validating text generation settings...");
123
123
  if (!config.TEXT_PROVIDER) {
124
- throw new Error(
125
- "TEXT_PROVIDER is required when CTX_KNOWLEDGE_ENABLED is true"
126
- );
124
+ throw new Error("TEXT_PROVIDER is required when CTX_KNOWLEDGE_ENABLED is true");
127
125
  }
128
126
  if (!config.TEXT_MODEL) {
129
- throw new Error(
130
- "TEXT_MODEL is required when CTX_KNOWLEDGE_ENABLED is true"
131
- );
127
+ throw new Error("TEXT_MODEL is required when CTX_KNOWLEDGE_ENABLED is true");
132
128
  }
133
129
  if (config.TEXT_PROVIDER === "openai" && !config.OPENAI_API_KEY) {
134
- throw new Error(
135
- 'OPENAI_API_KEY is required when TEXT_PROVIDER is set to "openai"'
136
- );
130
+ throw new Error('OPENAI_API_KEY is required when TEXT_PROVIDER is set to "openai"');
137
131
  }
138
132
  if (config.TEXT_PROVIDER === "anthropic" && !config.ANTHROPIC_API_KEY) {
139
- throw new Error(
140
- 'ANTHROPIC_API_KEY is required when TEXT_PROVIDER is set to "anthropic"'
141
- );
133
+ throw new Error('ANTHROPIC_API_KEY is required when TEXT_PROVIDER is set to "anthropic"');
142
134
  }
143
135
  if (config.TEXT_PROVIDER === "openrouter" && !config.OPENROUTER_API_KEY) {
144
- throw new Error(
145
- 'OPENROUTER_API_KEY is required when TEXT_PROVIDER is set to "openrouter"'
146
- );
136
+ throw new Error('OPENROUTER_API_KEY is required when TEXT_PROVIDER is set to "openrouter"');
147
137
  }
148
138
  if (config.TEXT_PROVIDER === "google" && !config.GOOGLE_API_KEY) {
149
- throw new Error(
150
- 'GOOGLE_API_KEY is required when TEXT_PROVIDER is set to "google"'
151
- );
139
+ throw new Error('GOOGLE_API_KEY is required when TEXT_PROVIDER is set to "google"');
152
140
  }
153
141
  if (config.TEXT_PROVIDER === "openrouter") {
154
142
  const modelName = config.TEXT_MODEL?.toLowerCase() || "";
@@ -164,17 +152,21 @@ function validateConfigRequirements(config, assumePluginOpenAI) {
164
152
  "Contextual Knowledge is disabled. Using embedding configuration from plugin-openai."
165
153
  );
166
154
  } else {
167
- logger.info(
168
- "Contextual Knowledge is disabled. Using basic embedding-only configuration."
169
- );
155
+ logger.info("Contextual Knowledge is disabled. Using basic embedding-only configuration.");
170
156
  }
171
157
  }
172
158
  }
173
- async function getProviderRateLimits() {
174
- const config = validateModelConfig();
175
- const maxConcurrentRequests = getEnvInt("MAX_CONCURRENT_REQUESTS", 30);
176
- const requestsPerMinute = getEnvInt("REQUESTS_PER_MINUTE", 60);
177
- const tokensPerMinute = getEnvInt("TOKENS_PER_MINUTE", 15e4);
159
+ async function getProviderRateLimits(runtime) {
160
+ const config = validateModelConfig(runtime);
161
+ const getSetting = (key, defaultValue) => {
162
+ if (runtime) {
163
+ return runtime.getSetting(key) || defaultValue;
164
+ }
165
+ return process.env[key] || defaultValue;
166
+ };
167
+ const maxConcurrentRequests = parseInt(getSetting("MAX_CONCURRENT_REQUESTS", "30"), 10);
168
+ const requestsPerMinute = parseInt(getSetting("REQUESTS_PER_MINUTE", "60"), 10);
169
+ const tokensPerMinute = parseInt(getSetting("TOKENS_PER_MINUTE", "150000"), 10);
178
170
  switch (config.EMBEDDING_PROVIDER) {
179
171
  case "openai":
180
172
  return {
@@ -199,14 +191,11 @@ async function getProviderRateLimits() {
199
191
  };
200
192
  }
201
193
  }
202
- function getEnvInt(envVar, defaultValue) {
203
- return process.env[envVar] ? parseInt(process.env[envVar], 10) : defaultValue;
204
- }
205
194
 
206
195
  // src/service.ts
207
196
  import {
208
197
  createUniqueUuid,
209
- logger as logger4,
198
+ logger as logger3,
210
199
  MemoryType as MemoryType2,
211
200
  ModelType as ModelType2,
212
201
  Semaphore,
@@ -218,7 +207,7 @@ import {
218
207
  import {
219
208
  MemoryType,
220
209
  ModelType,
221
- logger as logger3,
210
+ logger as logger2,
222
211
  splitChunks
223
212
  } from "@elizaos/core";
224
213
 
@@ -701,143 +690,12 @@ ${chunkContent}`;
701
690
  return generatedContext.trim();
702
691
  }
703
692
 
704
- // src/utils.ts
705
- import * as mammoth from "mammoth";
706
- import { logger as logger2 } from "@elizaos/core";
707
- import { getDocument } from "pdfjs-dist/legacy/build/pdf.mjs";
708
- var PLAIN_TEXT_CONTENT_TYPES = [
709
- "application/typescript",
710
- "text/typescript",
711
- "text/x-python",
712
- "application/x-python-code",
713
- "application/yaml",
714
- "text/yaml",
715
- "application/x-yaml",
716
- "application/json",
717
- "text/markdown",
718
- "text/csv"
719
- ];
720
- var MAX_FALLBACK_SIZE_BYTES = 5 * 1024 * 1024;
721
- var BINARY_CHECK_BYTES = 1024;
722
- async function extractTextFromFileBuffer(fileBuffer, contentType, originalFilename) {
723
- const lowerContentType = contentType.toLowerCase();
724
- logger2.debug(
725
- `[TextUtil] Attempting to extract text from ${originalFilename} (type: ${contentType})`
726
- );
727
- if (lowerContentType === "application/vnd.openxmlformats-officedocument.wordprocessingml.document") {
728
- logger2.debug(
729
- `[TextUtil] Extracting text from DOCX ${originalFilename} via mammoth.`
730
- );
731
- try {
732
- const result = await mammoth.extractRawText({ buffer: fileBuffer });
733
- logger2.debug(
734
- `[TextUtil] DOCX text extraction complete for ${originalFilename}. Text length: ${result.value.length}`
735
- );
736
- return result.value;
737
- } catch (docxError) {
738
- const errorMsg = `[TextUtil] Failed to parse DOCX file ${originalFilename}: ${docxError.message}`;
739
- logger2.error(errorMsg, docxError.stack);
740
- throw new Error(errorMsg);
741
- }
742
- } else if (lowerContentType === "application/msword" || originalFilename.toLowerCase().endsWith(".doc")) {
743
- logger2.debug(
744
- `[TextUtil] Handling Microsoft Word .doc file: ${originalFilename}`
745
- );
746
- return `[Microsoft Word Document: ${originalFilename}]
747
-
748
- This document was indexed for search but cannot be displayed directly in the browser. The original document content is preserved for retrieval purposes.`;
749
- } else if (lowerContentType.startsWith("text/") || PLAIN_TEXT_CONTENT_TYPES.includes(lowerContentType)) {
750
- logger2.debug(
751
- `[TextUtil] Extracting text from plain text compatible file ${originalFilename} (type: ${contentType})`
752
- );
753
- return fileBuffer.toString("utf-8");
754
- } else {
755
- logger2.warn(
756
- `[TextUtil] Unsupported content type: "${contentType}" for ${originalFilename}. Attempting fallback to plain text.`
757
- );
758
- if (fileBuffer.length > MAX_FALLBACK_SIZE_BYTES) {
759
- const sizeErrorMsg = `[TextUtil] File ${originalFilename} (type: ${contentType}) exceeds maximum size for fallback (${MAX_FALLBACK_SIZE_BYTES} bytes). Cannot process as plain text.`;
760
- logger2.error(sizeErrorMsg);
761
- throw new Error(sizeErrorMsg);
762
- }
763
- const initialBytes = fileBuffer.subarray(
764
- 0,
765
- Math.min(fileBuffer.length, BINARY_CHECK_BYTES)
766
- );
767
- if (initialBytes.includes(0)) {
768
- const binaryHeuristicMsg = `[TextUtil] File ${originalFilename} (type: ${contentType}) appears to be binary based on initial byte check. Cannot process as plain text.`;
769
- logger2.error(binaryHeuristicMsg);
770
- throw new Error(binaryHeuristicMsg);
771
- }
772
- try {
773
- const textContent = fileBuffer.toString("utf-8");
774
- if (textContent.includes("\uFFFD")) {
775
- const binaryErrorMsg = `[TextUtil] File ${originalFilename} (type: ${contentType}) seems to be binary or has encoding issues after fallback to plain text (detected \uFFFD).`;
776
- logger2.error(binaryErrorMsg);
777
- throw new Error(binaryErrorMsg);
778
- }
779
- logger2.debug(
780
- `[TextUtil] Successfully processed unknown type ${contentType} as plain text after fallback for ${originalFilename}.`
781
- );
782
- return textContent;
783
- } catch (fallbackError) {
784
- const finalErrorMsg = `[TextUtil] Unsupported content type: ${contentType} for ${originalFilename}. Fallback to plain text also failed or indicated binary content.`;
785
- logger2.error(
786
- finalErrorMsg,
787
- fallbackError.message ? fallbackError.stack : void 0
788
- );
789
- throw new Error(finalErrorMsg);
790
- }
791
- }
792
- }
793
- async function convertPdfToTextFromBuffer(pdfBuffer, filename) {
794
- const docName = filename || "unnamed-document";
795
- logger2.debug(`[PdfService] Starting conversion for ${docName}`);
796
- try {
797
- const uint8Array = new Uint8Array(pdfBuffer);
798
- const pdf = await getDocument({ data: uint8Array }).promise;
799
- const numPages = pdf.numPages;
800
- const textPages = [];
801
- for (let pageNum = 1; pageNum <= numPages; pageNum++) {
802
- logger2.debug(`[PdfService] Processing page ${pageNum}/${numPages}`);
803
- const page = await pdf.getPage(pageNum);
804
- const textContent = await page.getTextContent();
805
- const lineMap = /* @__PURE__ */ new Map();
806
- textContent.items.filter(isTextItem).forEach((item) => {
807
- const yPos = Math.round(item.transform[5]);
808
- if (!lineMap.has(yPos)) {
809
- lineMap.set(yPos, []);
810
- }
811
- lineMap.get(yPos).push(item);
812
- });
813
- const sortedLines = Array.from(lineMap.entries()).sort((a, b) => b[0] - a[0]).map(
814
- ([_, items]) => items.sort((a, b) => a.transform[4] - b.transform[4]).map((item) => item.str).join(" ")
815
- );
816
- textPages.push(sortedLines.join("\n"));
817
- }
818
- const fullText = textPages.join("\n\n").replace(/\s+/g, " ").trim();
819
- logger2.debug(
820
- `[PdfService] Conversion complete for ${docName}, length: ${fullText.length}`
821
- );
822
- return fullText;
823
- } catch (error) {
824
- logger2.error(
825
- `[PdfService] Error converting PDF ${docName}:`,
826
- error.message
827
- );
828
- throw new Error(`Failed to convert PDF to text: ${error.message}`);
829
- }
830
- }
831
- function isTextItem(item) {
832
- return "str" in item;
833
- }
834
-
835
693
  // src/document-processor.ts
836
694
  var ctxKnowledgeEnabled = process.env.CTX_KNOWLEDGE_ENABLED === "true" || process.env.CTX_KNOWLEDGE_ENABLED === "True";
837
695
  if (ctxKnowledgeEnabled) {
838
- logger3.info(`Document processor starting with Contextual Knowledge ENABLED`);
696
+ logger2.info(`Document processor starting with Contextual Knowledge ENABLED`);
839
697
  } else {
840
- logger3.info(`Document processor starting with Contextual Knowledge DISABLED`);
698
+ logger2.info(`Document processor starting with Contextual Knowledge DISABLED`);
841
699
  }
842
700
  async function processFragmentsSynchronously({
843
701
  runtime,
@@ -850,19 +708,19 @@ async function processFragmentsSynchronously({
850
708
  worldId
851
709
  }) {
852
710
  if (!fullDocumentText || fullDocumentText.trim() === "") {
853
- logger3.warn(
711
+ logger2.warn(
854
712
  `No text content available to chunk for document ${documentId}.`
855
713
  );
856
714
  return 0;
857
715
  }
858
716
  const chunks = await splitDocumentIntoChunks(fullDocumentText);
859
717
  if (chunks.length === 0) {
860
- logger3.warn(
718
+ logger2.warn(
861
719
  `No chunks generated from text for ${documentId}. No fragments to save.`
862
720
  );
863
721
  return 0;
864
722
  }
865
- logger3.info(
723
+ logger2.info(
866
724
  `Split content into ${chunks.length} chunks for document ${documentId}`
867
725
  );
868
726
  const providerLimits = await getProviderRateLimits();
@@ -885,11 +743,11 @@ async function processFragmentsSynchronously({
885
743
  rateLimiter
886
744
  });
887
745
  if (failedCount > 0) {
888
- logger3.warn(
746
+ logger2.warn(
889
747
  `Failed to process ${failedCount} chunks out of ${chunks.length} for document ${documentId}`
890
748
  );
891
749
  }
892
- logger3.info(
750
+ logger2.info(
893
751
  `Finished saving ${savedCount} fragments for document ${documentId}.`
894
752
  );
895
753
  return savedCount;
@@ -902,17 +760,17 @@ async function extractTextFromDocument(fileBuffer, contentType, originalFilename
902
760
  }
903
761
  try {
904
762
  if (contentType === "application/pdf") {
905
- logger3.debug(`Extracting text from PDF: ${originalFilename}`);
763
+ logger2.debug(`Extracting text from PDF: ${originalFilename}`);
906
764
  return await convertPdfToTextFromBuffer(fileBuffer, originalFilename);
907
765
  } else {
908
- logger3.debug(
766
+ logger2.debug(
909
767
  `Extracting text from non-PDF: ${originalFilename} (Type: ${contentType})`
910
768
  );
911
769
  if (contentType.includes("text/") || contentType.includes("application/json") || contentType.includes("application/xml")) {
912
770
  try {
913
771
  return fileBuffer.toString("utf8");
914
772
  } catch (textError) {
915
- logger3.warn(
773
+ logger2.warn(
916
774
  `Failed to decode ${originalFilename} as UTF-8, falling back to binary extraction`
917
775
  );
918
776
  }
@@ -924,7 +782,7 @@ async function extractTextFromDocument(fileBuffer, contentType, originalFilename
924
782
  );
925
783
  }
926
784
  } catch (error) {
927
- logger3.error(
785
+ logger2.error(
928
786
  `Error extracting text from ${originalFilename}: ${error.message}`
929
787
  );
930
788
  throw new Error(
@@ -974,7 +832,7 @@ async function splitDocumentIntoChunks(documentText) {
974
832
  const targetCharChunkOverlap = Math.round(
975
833
  tokenChunkOverlap * DEFAULT_CHARS_PER_TOKEN
976
834
  );
977
- logger3.debug(
835
+ logger2.debug(
978
836
  `Using core splitChunks with settings: tokenChunkSize=${tokenChunkSize}, tokenChunkOverlap=${tokenChunkOverlap}, charChunkSize=${targetCharChunkSize}, charChunkOverlap=${targetCharChunkOverlap}`
979
837
  );
980
838
  return await splitChunks(documentText, tokenChunkSize, tokenChunkOverlap);
@@ -1001,7 +859,7 @@ async function processAndSaveFragments({
1001
859
  { length: batchChunks.length },
1002
860
  (_, k) => i + k
1003
861
  );
1004
- logger3.debug(
862
+ logger2.debug(
1005
863
  `Processing batch of ${batchChunks.length} chunks for document ${documentId}. Starting original index: ${batchOriginalIndices[0]}, batch ${Math.floor(i / concurrencyLimit) + 1}/${Math.ceil(chunks.length / concurrencyLimit)}`
1006
864
  );
1007
865
  const contextualizedChunks = await getContextualizedChunks(
@@ -1021,7 +879,7 @@ async function processAndSaveFragments({
1021
879
  if (!result.success) {
1022
880
  failedCount++;
1023
881
  failedChunks.push(originalChunkIndex);
1024
- logger3.warn(
882
+ logger2.warn(
1025
883
  `Failed to process chunk ${originalChunkIndex} for document ${documentId}`
1026
884
  );
1027
885
  continue;
@@ -1029,7 +887,7 @@ async function processAndSaveFragments({
1029
887
  const contextualizedChunkText = result.text;
1030
888
  const embedding = result.embedding;
1031
889
  if (!embedding || embedding.length === 0) {
1032
- logger3.warn(
890
+ logger2.warn(
1033
891
  `Zero vector detected for chunk ${originalChunkIndex} (document ${documentId}). Embedding: ${JSON.stringify(result.embedding)}`
1034
892
  );
1035
893
  failedCount++;
@@ -1054,12 +912,12 @@ async function processAndSaveFragments({
1054
912
  }
1055
913
  };
1056
914
  await runtime.createMemory(fragmentMemory, "knowledge");
1057
- logger3.debug(
915
+ logger2.debug(
1058
916
  `Saved fragment ${originalChunkIndex + 1} for document ${documentId} (Fragment ID: ${fragmentMemory.id})`
1059
917
  );
1060
918
  savedCount++;
1061
919
  } catch (saveError) {
1062
- logger3.error(
920
+ logger2.error(
1063
921
  `Error saving chunk ${originalChunkIndex} to database: ${saveError.message}`,
1064
922
  saveError.stack
1065
923
  );
@@ -1103,7 +961,7 @@ async function generateEmbeddingsForChunks(runtime, contextualizedChunks, rateLi
1103
961
  text: contextualizedChunk.contextualizedText
1104
962
  };
1105
963
  } catch (error) {
1106
- logger3.error(
964
+ logger2.error(
1107
965
  `Error generating embedding for chunk ${contextualizedChunk.index}: ${error.message}`
1108
966
  );
1109
967
  return {
@@ -1118,7 +976,7 @@ async function generateEmbeddingsForChunks(runtime, contextualizedChunks, rateLi
1118
976
  }
1119
977
  async function getContextualizedChunks(runtime, fullDocumentText, chunks, contentType, batchOriginalIndices) {
1120
978
  if (ctxKnowledgeEnabled && fullDocumentText) {
1121
- logger3.debug(`Generating contexts for ${chunks.length} chunks`);
979
+ logger2.debug(`Generating contexts for ${chunks.length} chunks`);
1122
980
  return await generateContextsInBatch(
1123
981
  runtime,
1124
982
  fullDocumentText,
@@ -1143,7 +1001,7 @@ async function generateContextsInBatch(runtime, fullDocumentText, chunks, conten
1143
1001
  const config = validateModelConfig();
1144
1002
  const isUsingOpenRouter = config.TEXT_PROVIDER === "openrouter";
1145
1003
  const isUsingCacheCapableModel = isUsingOpenRouter && (config.TEXT_MODEL?.toLowerCase().includes("claude") || config.TEXT_MODEL?.toLowerCase().includes("gemini"));
1146
- logger3.info(
1004
+ logger2.info(
1147
1005
  `Using provider: ${config.TEXT_PROVIDER}, model: ${config.TEXT_MODEL}, caching capability: ${isUsingCacheCapableModel}`
1148
1006
  );
1149
1007
  const promptConfigs = prepareContextPrompts(
@@ -1188,7 +1046,7 @@ async function generateContextsInBatch(runtime, fullDocumentText, chunks, conten
1188
1046
  item.chunkText,
1189
1047
  generatedContext
1190
1048
  );
1191
- logger3.debug(
1049
+ logger2.debug(
1192
1050
  `Context added for chunk ${item.originalIndex}. New length: ${contextualizedText.length}`
1193
1051
  );
1194
1052
  return {
@@ -1197,7 +1055,7 @@ async function generateContextsInBatch(runtime, fullDocumentText, chunks, conten
1197
1055
  index: item.originalIndex
1198
1056
  };
1199
1057
  } catch (error) {
1200
- logger3.error(
1058
+ logger2.error(
1201
1059
  `Error generating context for chunk ${item.originalIndex}: ${error.message}`,
1202
1060
  error.stack
1203
1061
  );
@@ -1218,7 +1076,7 @@ function prepareContextPrompts(chunks, fullDocumentText, contentType, batchIndic
1218
1076
  if (isUsingCacheCapableModel) {
1219
1077
  const cachingPromptInfo = contentType ? getCachingPromptForMimeType(contentType, chunkText) : getCachingContextualizationPrompt(chunkText);
1220
1078
  if (cachingPromptInfo.prompt.startsWith("Error:")) {
1221
- logger3.warn(
1079
+ logger2.warn(
1222
1080
  `Skipping contextualization for chunk ${originalIndex} due to: ${cachingPromptInfo.prompt}`
1223
1081
  );
1224
1082
  return {
@@ -1240,7 +1098,7 @@ function prepareContextPrompts(chunks, fullDocumentText, contentType, batchIndic
1240
1098
  } else {
1241
1099
  const prompt = contentType ? getPromptForMimeType(contentType, fullDocumentText, chunkText) : getContextualizationPrompt(fullDocumentText, chunkText);
1242
1100
  if (prompt.startsWith("Error:")) {
1243
- logger3.warn(
1101
+ logger2.warn(
1244
1102
  `Skipping contextualization for chunk ${originalIndex} due to: ${prompt}`
1245
1103
  );
1246
1104
  return {
@@ -1260,7 +1118,7 @@ function prepareContextPrompts(chunks, fullDocumentText, contentType, batchIndic
1260
1118
  };
1261
1119
  }
1262
1120
  } catch (error) {
1263
- logger3.error(
1121
+ logger2.error(
1264
1122
  `Error preparing prompt for chunk ${originalIndex}: ${error.message}`,
1265
1123
  error.stack
1266
1124
  );
@@ -1281,7 +1139,7 @@ async function generateEmbeddingWithValidation(runtime, text) {
1281
1139
  });
1282
1140
  const embedding = Array.isArray(embeddingResult) ? embeddingResult : embeddingResult?.embedding;
1283
1141
  if (!embedding || embedding.length === 0) {
1284
- logger3.warn(
1142
+ logger2.warn(
1285
1143
  `Zero vector detected. Embedding result: ${JSON.stringify(embeddingResult)}`
1286
1144
  );
1287
1145
  return {
@@ -1301,14 +1159,14 @@ async function withRateLimitRetry(operation, errorContext, retryDelay) {
1301
1159
  } catch (error) {
1302
1160
  if (error.status === 429) {
1303
1161
  const delay = retryDelay || error.headers?.["retry-after"] || 5;
1304
- logger3.warn(
1162
+ logger2.warn(
1305
1163
  `Rate limit hit for ${errorContext}. Retrying after ${delay}s`
1306
1164
  );
1307
1165
  await new Promise((resolve) => setTimeout(resolve, delay * 1e3));
1308
1166
  try {
1309
1167
  return await operation();
1310
1168
  } catch (retryError) {
1311
- logger3.error(
1169
+ logger2.error(
1312
1170
  `Failed after retry for ${errorContext}: ${retryError.message}`
1313
1171
  );
1314
1172
  throw retryError;
@@ -1329,7 +1187,7 @@ function createRateLimiter(requestsPerMinute) {
1329
1187
  const oldestRequest = requestTimes[0];
1330
1188
  const timeToWait = Math.max(0, oldestRequest + intervalMs - now);
1331
1189
  if (timeToWait > 0) {
1332
- logger3.debug(
1190
+ logger2.debug(
1333
1191
  `Rate limiting applied, waiting ${timeToWait}ms before next request`
1334
1192
  );
1335
1193
  await new Promise((resolve) => setTimeout(resolve, timeToWait));
@@ -1341,42 +1199,87 @@ function createRateLimiter(requestsPerMinute) {
1341
1199
 
1342
1200
  // src/service.ts
1343
1201
  var KnowledgeService = class _KnowledgeService extends Service {
1202
+ static serviceType = "knowledge";
1203
+ config;
1204
+ capabilityDescription = "Provides Retrieval Augmented Generation capabilities, including knowledge upload and querying.";
1205
+ knowledgeProcessingSemaphore;
1344
1206
  /**
1345
1207
  * Create a new Knowledge service
1346
1208
  * @param runtime Agent runtime
1347
1209
  */
1348
- constructor(runtime) {
1210
+ constructor(runtime, config) {
1349
1211
  super(runtime);
1350
- this.runtime = runtime;
1351
1212
  this.knowledgeProcessingSemaphore = new Semaphore(10);
1352
- logger4.info(`KnowledgeService initialized for agent: ${runtime.agentId}`);
1213
+ const parseBooleanEnv = (value) => {
1214
+ if (typeof value === "boolean") return value;
1215
+ if (typeof value === "string") return value.toLowerCase() === "true";
1216
+ return false;
1217
+ };
1218
+ this.config = {
1219
+ CTX_KNOWLEDGE_ENABLED: parseBooleanEnv(config?.CTX_KNOWLEDGE_ENABLED),
1220
+ LOAD_DOCS_ON_STARTUP: parseBooleanEnv(config?.LOAD_DOCS_ON_STARTUP),
1221
+ MAX_INPUT_TOKENS: config?.MAX_INPUT_TOKENS,
1222
+ MAX_OUTPUT_TOKENS: config?.MAX_OUTPUT_TOKENS,
1223
+ EMBEDDING_PROVIDER: config?.EMBEDDING_PROVIDER,
1224
+ TEXT_PROVIDER: config?.TEXT_PROVIDER,
1225
+ TEXT_EMBEDDING_MODEL: config?.TEXT_EMBEDDING_MODEL
1226
+ };
1227
+ logger3.info(
1228
+ `KnowledgeService initialized for agent ${this.runtime.agentId} with config:`,
1229
+ this.config
1230
+ );
1231
+ if (this.config.LOAD_DOCS_ON_STARTUP) {
1232
+ this.loadInitialDocuments().catch((error) => {
1233
+ logger3.error("Error during initial document loading in KnowledgeService:", error);
1234
+ });
1235
+ }
1236
+ }
1237
+ async loadInitialDocuments() {
1238
+ logger3.info(
1239
+ `KnowledgeService: Checking for documents to load on startup for agent ${this.runtime.agentId}`
1240
+ );
1241
+ try {
1242
+ await new Promise((resolve) => setTimeout(resolve, 1e3));
1243
+ const result = await loadDocsFromPath(this, this.runtime.agentId);
1244
+ if (result.successful > 0) {
1245
+ logger3.info(
1246
+ `KnowledgeService: Loaded ${result.successful} documents from docs folder on startup for agent ${this.runtime.agentId}`
1247
+ );
1248
+ } else {
1249
+ logger3.info(
1250
+ `KnowledgeService: No new documents found to load on startup for agent ${this.runtime.agentId}`
1251
+ );
1252
+ }
1253
+ } catch (error) {
1254
+ logger3.error(
1255
+ `KnowledgeService: Error loading documents on startup for agent ${this.runtime.agentId}:`,
1256
+ error
1257
+ );
1258
+ }
1353
1259
  }
1354
- static serviceType = KnowledgeServiceType.KNOWLEDGE;
1355
- capabilityDescription = "Provides Retrieval Augmented Generation capabilities, including knowledge upload and querying.";
1356
- knowledgeProcessingSemaphore;
1357
1260
  /**
1358
1261
  * Start the Knowledge service
1359
1262
  * @param runtime Agent runtime
1360
1263
  * @returns Initialized Knowledge service
1361
1264
  */
1362
1265
  static async start(runtime) {
1363
- logger4.info(`Starting Knowledge service for agent: ${runtime.agentId}`);
1266
+ logger3.info(`Starting Knowledge service for agent: ${runtime.agentId}`);
1364
1267
  const service = new _KnowledgeService(runtime);
1365
1268
  if (service.runtime.character?.knowledge && service.runtime.character.knowledge.length > 0) {
1366
- logger4.info(
1269
+ logger3.info(
1367
1270
  `KnowledgeService: Processing ${service.runtime.character.knowledge.length} character knowledge items.`
1368
1271
  );
1369
1272
  const stringKnowledge = service.runtime.character.knowledge.filter(
1370
1273
  (item) => typeof item === "string"
1371
1274
  );
1372
- service.processCharacterKnowledge(stringKnowledge).catch((err) => {
1373
- logger4.error(
1275
+ await service.processCharacterKnowledge(stringKnowledge).catch((err) => {
1276
+ logger3.error(
1374
1277
  `KnowledgeService: Error processing character knowledge during startup: ${err.message}`,
1375
1278
  err
1376
1279
  );
1377
1280
  });
1378
1281
  } else {
1379
- logger4.info(
1282
+ logger3.info(
1380
1283
  `KnowledgeService: No character knowledge to process for agent ${runtime.agentId}.`
1381
1284
  );
1382
1285
  }
@@ -1387,21 +1290,17 @@ var KnowledgeService = class _KnowledgeService extends Service {
1387
1290
  * @param runtime Agent runtime
1388
1291
  */
1389
1292
  static async stop(runtime) {
1390
- logger4.info(`Stopping Knowledge service for agent: ${runtime.agentId}`);
1293
+ logger3.info(`Stopping Knowledge service for agent: ${runtime.agentId}`);
1391
1294
  const service = runtime.getService(_KnowledgeService.serviceType);
1392
1295
  if (!service) {
1393
- logger4.warn(
1394
- `KnowledgeService not found for agent ${runtime.agentId} during stop.`
1395
- );
1296
+ logger3.warn(`KnowledgeService not found for agent ${runtime.agentId} during stop.`);
1396
1297
  }
1397
1298
  }
1398
1299
  /**
1399
1300
  * Stop the service
1400
1301
  */
1401
1302
  async stop() {
1402
- logger4.info(
1403
- `Knowledge service stopping for agent: ${this.runtime.agentId}`
1404
- );
1303
+ logger3.info(`Knowledge service stopping for agent: ${this.runtime.agentId}`);
1405
1304
  }
1406
1305
  /**
1407
1306
  * Add knowledge to the system
@@ -1410,15 +1309,13 @@ var KnowledgeService = class _KnowledgeService extends Service {
1410
1309
  */
1411
1310
  async addKnowledge(options) {
1412
1311
  const agentId = this.runtime.agentId;
1413
- logger4.info(
1312
+ logger3.info(
1414
1313
  `KnowledgeService (agent: ${agentId}) processing document for public addKnowledge: ${options.originalFilename}, type: ${options.contentType}`
1415
1314
  );
1416
1315
  try {
1417
- const existingDocument = await this.runtime.getMemoryById(
1418
- options.clientDocumentId
1419
- );
1316
+ const existingDocument = await this.runtime.getMemoryById(options.clientDocumentId);
1420
1317
  if (existingDocument && existingDocument.metadata?.type === MemoryType2.DOCUMENT) {
1421
- logger4.info(
1318
+ logger3.info(
1422
1319
  `Document ${options.originalFilename} with ID ${options.clientDocumentId} already exists. Skipping processing.`
1423
1320
  );
1424
1321
  const fragments = await this.runtime.getMemories({
@@ -1437,7 +1334,7 @@ var KnowledgeService = class _KnowledgeService extends Service {
1437
1334
  };
1438
1335
  }
1439
1336
  } catch (error) {
1440
- logger4.debug(
1337
+ logger3.debug(
1441
1338
  `Document ${options.clientDocumentId} not found or error checking existence, proceeding with processing: ${error instanceof Error ? error.message : String(error)}`
1442
1339
  );
1443
1340
  }
@@ -1459,45 +1356,74 @@ var KnowledgeService = class _KnowledgeService extends Service {
1459
1356
  }) {
1460
1357
  const agentId = this.runtime.agentId;
1461
1358
  try {
1462
- logger4.debug(
1359
+ logger3.debug(
1463
1360
  `KnowledgeService: Processing document ${originalFilename} (type: ${contentType}) via processDocument`
1464
1361
  );
1465
1362
  let fileBuffer = null;
1466
1363
  let extractedText;
1364
+ let documentContentToStore;
1467
1365
  const isPdfFile = contentType === "application/pdf" || originalFilename.toLowerCase().endsWith(".pdf");
1468
- const isBinaryFile = this.isBinaryContentType(
1469
- contentType,
1470
- originalFilename
1471
- );
1472
- if (isBinaryFile) {
1366
+ if (isPdfFile) {
1473
1367
  try {
1474
1368
  fileBuffer = Buffer.from(content, "base64");
1475
1369
  } catch (e) {
1476
- logger4.error(
1370
+ logger3.error(
1477
1371
  `KnowledgeService: Failed to convert base64 to buffer for ${originalFilename}: ${e.message}`
1478
1372
  );
1479
- throw new Error(
1480
- `Invalid base64 content for binary file ${originalFilename}`
1373
+ throw new Error(`Invalid base64 content for PDF file ${originalFilename}`);
1374
+ }
1375
+ extractedText = await extractTextFromDocument(fileBuffer, contentType, originalFilename);
1376
+ documentContentToStore = content;
1377
+ } else if (isBinaryContentType(contentType, originalFilename)) {
1378
+ try {
1379
+ fileBuffer = Buffer.from(content, "base64");
1380
+ } catch (e) {
1381
+ logger3.error(
1382
+ `KnowledgeService: Failed to convert base64 to buffer for ${originalFilename}: ${e.message}`
1481
1383
  );
1384
+ throw new Error(`Invalid base64 content for binary file ${originalFilename}`);
1482
1385
  }
1483
- extractedText = await extractTextFromDocument(
1484
- fileBuffer,
1485
- contentType,
1486
- originalFilename
1487
- );
1386
+ extractedText = await extractTextFromDocument(fileBuffer, contentType, originalFilename);
1387
+ documentContentToStore = extractedText;
1488
1388
  } else {
1489
- extractedText = content;
1389
+ const base64Regex = /^[A-Za-z0-9+/]+=*$/;
1390
+ const looksLikeBase64 = content && content.length > 0 && base64Regex.test(content.replace(/\s/g, ""));
1391
+ if (looksLikeBase64) {
1392
+ try {
1393
+ const decodedBuffer = Buffer.from(content, "base64");
1394
+ const decodedText = decodedBuffer.toString("utf8");
1395
+ const invalidCharCount = (decodedText.match(/\ufffd/g) || []).length;
1396
+ const textLength = decodedText.length;
1397
+ if (invalidCharCount > 0 && invalidCharCount / textLength > 0.1) {
1398
+ throw new Error("Decoded content contains too many invalid characters");
1399
+ }
1400
+ logger3.debug(`Successfully decoded base64 content for text file: ${originalFilename}`);
1401
+ extractedText = decodedText;
1402
+ documentContentToStore = decodedText;
1403
+ } catch (e) {
1404
+ logger3.error(
1405
+ `Failed to decode base64 for ${originalFilename}: ${e instanceof Error ? e.message : String(e)}`
1406
+ );
1407
+ throw new Error(
1408
+ `File ${originalFilename} appears to be corrupted or incorrectly encoded`
1409
+ );
1410
+ }
1411
+ } else {
1412
+ logger3.debug(`Treating content as plain text for file: ${originalFilename}`);
1413
+ extractedText = content;
1414
+ documentContentToStore = content;
1415
+ }
1490
1416
  }
1491
1417
  if (!extractedText || extractedText.trim() === "") {
1492
1418
  const noTextError = new Error(
1493
1419
  `KnowledgeService: No text content extracted from ${originalFilename} (type: ${contentType}).`
1494
1420
  );
1495
- logger4.warn(noTextError.message);
1421
+ logger3.warn(noTextError.message);
1496
1422
  throw noTextError;
1497
1423
  }
1498
1424
  const documentMemory = createDocumentMemory({
1499
- text: isPdfFile ? content : extractedText,
1500
- // Store base64 for PDF, text for others
1425
+ text: documentContentToStore,
1426
+ // Store base64 only for PDFs, plain text for everything else
1501
1427
  agentId,
1502
1428
  clientDocumentId,
1503
1429
  // This becomes the memory.id
@@ -1516,7 +1442,7 @@ var KnowledgeService = class _KnowledgeService extends Service {
1516
1442
  entityId: entityId || agentId
1517
1443
  };
1518
1444
  await this.runtime.createMemory(memoryWithScope, "documents");
1519
- logger4.debug(
1445
+ logger3.debug(
1520
1446
  `KnowledgeService: Stored document ${originalFilename} (Memory ID: ${memoryWithScope.id})`
1521
1447
  );
1522
1448
  const fragmentCount = await processFragmentsSynchronously({
@@ -1530,7 +1456,7 @@ var KnowledgeService = class _KnowledgeService extends Service {
1530
1456
  entityId: entityId || agentId,
1531
1457
  worldId: worldId || agentId
1532
1458
  });
1533
- logger4.info(
1459
+ logger3.info(
1534
1460
  `KnowledgeService: Document ${originalFilename} processed with ${fragmentCount} fragments for agent ${agentId}`
1535
1461
  );
1536
1462
  return {
@@ -1539,65 +1465,16 @@ var KnowledgeService = class _KnowledgeService extends Service {
1539
1465
  fragmentCount
1540
1466
  };
1541
1467
  } catch (error) {
1542
- logger4.error(
1468
+ logger3.error(
1543
1469
  `KnowledgeService: Error processing document ${originalFilename}: ${error.message}`,
1544
1470
  error.stack
1545
1471
  );
1546
1472
  throw error;
1547
1473
  }
1548
1474
  }
1549
- /**
1550
- * Determines if a file should be treated as binary based on its content type and filename
1551
- * @param contentType MIME type of the file
1552
- * @param filename Original filename
1553
- * @returns True if the file should be treated as binary (base64 encoded)
1554
- */
1555
- isBinaryContentType(contentType, filename) {
1556
- const binaryContentTypes = [
1557
- "application/pdf",
1558
- "application/msword",
1559
- "application/vnd.openxmlformats-officedocument",
1560
- "application/vnd.ms-excel",
1561
- "application/vnd.ms-powerpoint",
1562
- "application/zip",
1563
- "application/x-zip-compressed",
1564
- "application/octet-stream",
1565
- "image/",
1566
- "audio/",
1567
- "video/"
1568
- ];
1569
- const isBinaryMimeType = binaryContentTypes.some(
1570
- (type) => contentType.includes(type)
1571
- );
1572
- if (isBinaryMimeType) {
1573
- return true;
1574
- }
1575
- const fileExt = filename.split(".").pop()?.toLowerCase() || "";
1576
- const binaryExtensions = [
1577
- "pdf",
1578
- "docx",
1579
- "doc",
1580
- "xls",
1581
- "xlsx",
1582
- "ppt",
1583
- "pptx",
1584
- "zip",
1585
- "jpg",
1586
- "jpeg",
1587
- "png",
1588
- "gif",
1589
- "mp3",
1590
- "mp4",
1591
- "wav"
1592
- ];
1593
- return binaryExtensions.includes(fileExt);
1594
- }
1595
1475
  // --- Knowledge methods moved from AgentRuntime ---
1596
1476
  async handleProcessingError(error, context) {
1597
- logger4.error(
1598
- `KnowledgeService: Error ${context}:`,
1599
- error?.message || error || "Unknown error"
1600
- );
1477
+ logger3.error(`KnowledgeService: Error ${context}:`, error?.message || error || "Unknown error");
1601
1478
  throw error;
1602
1479
  }
1603
1480
  async checkExistingKnowledge(knowledgeId) {
@@ -1605,13 +1482,9 @@ var KnowledgeService = class _KnowledgeService extends Service {
1605
1482
  return !!existingDocument;
1606
1483
  }
1607
1484
  async getKnowledge(message, scope) {
1608
- logger4.debug(
1609
- "KnowledgeService: getKnowledge called for message id: " + message.id
1610
- );
1485
+ logger3.debug("KnowledgeService: getKnowledge called for message id: " + message.id);
1611
1486
  if (!message?.content?.text || message?.content?.text.trim().length === 0) {
1612
- logger4.warn(
1613
- "KnowledgeService: Invalid or empty message content for knowledge query."
1614
- );
1487
+ logger3.warn("KnowledgeService: Invalid or empty message content for knowledge query.");
1615
1488
  return [];
1616
1489
  }
1617
1490
  const embedding = await this.runtime.useModel(ModelType2.TEXT_EMBEDDING, {
@@ -1642,7 +1515,7 @@ var KnowledgeService = class _KnowledgeService extends Service {
1642
1515
  }
1643
1516
  async processCharacterKnowledge(items) {
1644
1517
  await new Promise((resolve) => setTimeout(resolve, 1e3));
1645
- logger4.info(
1518
+ logger3.info(
1646
1519
  `KnowledgeService: Processing ${items.length} character knowledge items for agent ${this.runtime.agentId}`
1647
1520
  );
1648
1521
  const processingPromises = items.map(async (item) => {
@@ -1650,12 +1523,12 @@ var KnowledgeService = class _KnowledgeService extends Service {
1650
1523
  try {
1651
1524
  const knowledgeId = createUniqueUuid(this.runtime.agentId + item, item);
1652
1525
  if (await this.checkExistingKnowledge(knowledgeId)) {
1653
- logger4.debug(
1526
+ logger3.debug(
1654
1527
  `KnowledgeService: Character knowledge item with ID ${knowledgeId} already exists. Skipping.`
1655
1528
  );
1656
1529
  return;
1657
1530
  }
1658
- logger4.debug(
1531
+ logger3.debug(
1659
1532
  `KnowledgeService: Processing character knowledge for ${this.runtime.character?.name} - ${item.slice(0, 100)}`
1660
1533
  );
1661
1534
  let metadata = {
@@ -1700,21 +1573,16 @@ var KnowledgeService = class _KnowledgeService extends Service {
1700
1573
  }
1701
1574
  );
1702
1575
  } catch (error) {
1703
- await this.handleProcessingError(
1704
- error,
1705
- "processing character knowledge"
1706
- );
1576
+ await this.handleProcessingError(error, "processing character knowledge");
1707
1577
  } finally {
1708
1578
  this.knowledgeProcessingSemaphore.release();
1709
1579
  }
1710
1580
  });
1711
1581
  await Promise.all(processingPromises);
1712
- logger4.info(
1582
+ logger3.info(
1713
1583
  `KnowledgeService: Finished processing character knowledge for agent ${this.runtime.agentId}.`
1714
1584
  );
1715
1585
  }
1716
- // Renamed from AgentRuntime's addKnowledge
1717
- // This is the core logic for adding text-based knowledge items and creating fragments.
1718
1586
  async _internalAddKnowledge(item, options = {
1719
1587
  targetTokens: 1500,
1720
1588
  // TODO: Make these configurable, perhaps from plugin config
@@ -1731,9 +1599,7 @@ var KnowledgeService = class _KnowledgeService extends Service {
1731
1599
  worldId: scope?.worldId ?? this.runtime.agentId,
1732
1600
  entityId: scope?.entityId ?? this.runtime.agentId
1733
1601
  };
1734
- logger4.debug(
1735
- `KnowledgeService: _internalAddKnowledge called for item ID ${item.id}`
1736
- );
1602
+ logger3.debug(`KnowledgeService: _internalAddKnowledge called for item ID ${item.id}`);
1737
1603
  const documentMemory = {
1738
1604
  id: item.id,
1739
1605
  // This ID should be the unique ID for the document being added.
@@ -1755,7 +1621,7 @@ var KnowledgeService = class _KnowledgeService extends Service {
1755
1621
  };
1756
1622
  const existingDocument = await this.runtime.getMemoryById(item.id);
1757
1623
  if (existingDocument) {
1758
- logger4.debug(
1624
+ logger3.debug(
1759
1625
  `KnowledgeService: Document ${item.id} already exists in _internalAddKnowledge, updating...`
1760
1626
  );
1761
1627
  await this.runtime.updateMemory({
@@ -1779,16 +1645,28 @@ var KnowledgeService = class _KnowledgeService extends Service {
1779
1645
  await this.processDocumentFragment(fragment);
1780
1646
  fragmentsProcessed++;
1781
1647
  } catch (error) {
1782
- logger4.error(
1648
+ logger3.error(
1783
1649
  `KnowledgeService: Error processing fragment ${fragment.id} for document ${item.id}:`,
1784
1650
  error
1785
1651
  );
1786
1652
  }
1787
1653
  }
1788
- logger4.debug(
1654
+ logger3.debug(
1789
1655
  `KnowledgeService: Processed ${fragmentsProcessed}/${fragments.length} fragments for document ${item.id}.`
1790
1656
  );
1791
1657
  }
1658
+ async processDocumentFragment(fragment) {
1659
+ try {
1660
+ await this.runtime.addEmbeddingToMemory(fragment);
1661
+ await this.runtime.createMemory(fragment, "knowledge");
1662
+ } catch (error) {
1663
+ logger3.error(
1664
+ `KnowledgeService: Error processing fragment ${fragment.id}:`,
1665
+ error instanceof Error ? error.message : String(error)
1666
+ );
1667
+ throw error;
1668
+ }
1669
+ }
1792
1670
  async splitAndCreateFragments(document, targetTokens, overlap, scope) {
1793
1671
  if (!document.content.text) {
1794
1672
  return [];
@@ -1827,19 +1705,37 @@ var KnowledgeService = class _KnowledgeService extends Service {
1827
1705
  };
1828
1706
  });
1829
1707
  }
1830
- async processDocumentFragment(fragment) {
1831
- try {
1832
- await this.runtime.addEmbeddingToMemory(fragment);
1833
- await this.runtime.createMemory(fragment, "knowledge");
1834
- } catch (error) {
1835
- logger4.error(
1836
- `KnowledgeService: Error processing fragment ${fragment.id}:`,
1837
- error instanceof Error ? error.message : String(error)
1708
+ // ADDED METHODS START
1709
+ /**
1710
+ * Retrieves memories, typically documents, for the agent.
1711
+ * Corresponds to GET /plugins/knowledge/documents
1712
+ */
1713
+ async getMemories(params) {
1714
+ if (params.tableName !== "documents") {
1715
+ logger3.warn(
1716
+ `KnowledgeService.getMemories called with tableName ${params.tableName}, but this service primarily manages 'documents'. Proceeding, but review usage.`
1838
1717
  );
1839
- throw error;
1840
1718
  }
1719
+ return this.runtime.getMemories({
1720
+ ...params,
1721
+ // includes tableName, roomId, count, end
1722
+ agentId: this.runtime.agentId
1723
+ // Ensure agentId is correctly scoped
1724
+ });
1725
+ }
1726
+ /**
1727
+ * Deletes a specific memory item (knowledge document) by its ID.
1728
+ * Corresponds to DELETE /plugins/knowledge/documents/:knowledgeId
1729
+ * Assumes the memoryId corresponds to an item in the 'documents' table or that
1730
+ * runtime.deleteMemory can correctly identify it.
1731
+ */
1732
+ async deleteMemory(memoryId) {
1733
+ await this.runtime.deleteMemory(memoryId);
1734
+ logger3.info(
1735
+ `KnowledgeService: Deleted memory ${memoryId} for agent ${this.runtime.agentId}. Assumed it was a document or related fragment.`
1736
+ );
1841
1737
  }
1842
- // --- End of moved knowledge methods ---
1738
+ // ADDED METHODS END
1843
1739
  };
1844
1740
 
1845
1741
  // src/provider.ts
@@ -2415,7 +2311,9 @@ var KnowledgeTestSuite = class {
2415
2311
  contentType: "text/plain",
2416
2312
  originalFilename: "knowledge-test.txt",
2417
2313
  worldId: runtime.agentId,
2418
- content: "This is test knowledge that should be stored and retrievable."
2314
+ content: "This is test knowledge that should be stored and retrievable.",
2315
+ roomId: runtime.agentId,
2316
+ entityId: runtime.agentId
2419
2317
  };
2420
2318
  const result = await service.addKnowledge(testDocument);
2421
2319
  if (result.clientDocumentId !== testDocument.clientDocumentId) {
@@ -2446,7 +2344,9 @@ var KnowledgeTestSuite = class {
2446
2344
  contentType: "text/plain",
2447
2345
  originalFilename: "duplicate-test.txt",
2448
2346
  worldId: runtime.agentId,
2449
- content: "This document will be uploaded twice."
2347
+ content: "This document will be uploaded twice.",
2348
+ roomId: runtime.agentId,
2349
+ entityId: runtime.agentId
2450
2350
  };
2451
2351
  const result1 = await service.addKnowledge(testDocument);
2452
2352
  const result2 = await service.addKnowledge(testDocument);
@@ -2470,7 +2370,9 @@ var KnowledgeTestSuite = class {
2470
2370
  contentType: "text/plain",
2471
2371
  originalFilename: "retrieval-test.txt",
2472
2372
  worldId: runtime.agentId,
2473
- content: "The capital of France is Paris. Paris is known for the Eiffel Tower."
2373
+ content: "The capital of France is Paris. Paris is known for the Eiffel Tower.",
2374
+ roomId: runtime.agentId,
2375
+ entityId: runtime.agentId
2474
2376
  };
2475
2377
  await service.addKnowledge(testDocument);
2476
2378
  const queryMessage = {
@@ -2506,7 +2408,9 @@ var KnowledgeTestSuite = class {
2506
2408
  contentType: "text/plain",
2507
2409
  originalFilename: "provider-test.txt",
2508
2410
  worldId: runtime.agentId,
2509
- content: "Important fact 1. Important fact 2. Important fact 3."
2411
+ content: "Important fact 1. Important fact 2. Important fact 3.",
2412
+ roomId: runtime.agentId,
2413
+ entityId: runtime.agentId
2510
2414
  };
2511
2415
  await service.addKnowledge(testDocument);
2512
2416
  const message = {
@@ -2604,8 +2508,10 @@ var KnowledgeTestSuite = class {
2604
2508
  contentType: "text/plain",
2605
2509
  originalFilename: "empty.txt",
2606
2510
  worldId: runtime.agentId,
2607
- content: ""
2511
+ content: "",
2608
2512
  // Empty content should cause an error
2513
+ roomId: runtime.agentId,
2514
+ entityId: runtime.agentId
2609
2515
  });
2610
2516
  throw new Error("Expected error for empty content");
2611
2517
  } catch (error) {
@@ -2618,8 +2524,10 @@ var KnowledgeTestSuite = class {
2618
2524
  contentType: "text/plain",
2619
2525
  originalFilename: "null-content.txt",
2620
2526
  worldId: runtime.agentId,
2621
- content: null
2527
+ content: null,
2622
2528
  // This should definitely cause an error
2529
+ roomId: runtime.agentId,
2530
+ entityId: runtime.agentId
2623
2531
  });
2624
2532
  } catch (error) {
2625
2533
  }
@@ -2652,7 +2560,9 @@ var KnowledgeTestSuite = class {
2652
2560
  Unlike classical bits, qubits can exist in superposition.
2653
2561
  This allows quantum computers to process many calculations simultaneously.
2654
2562
  Major companies like IBM, Google, and Microsoft are developing quantum computers.
2655
- `
2563
+ `,
2564
+ roomId: runtime.agentId,
2565
+ entityId: runtime.agentId
2656
2566
  };
2657
2567
  const addResult = await service.addKnowledge(document);
2658
2568
  if (addResult.fragmentCount === 0) {
@@ -2704,7 +2614,9 @@ var KnowledgeTestSuite = class {
2704
2614
  contentType: "text/plain",
2705
2615
  originalFilename: "large-document.txt",
2706
2616
  worldId: runtime.agentId,
2707
- content: largeContent
2617
+ content: largeContent,
2618
+ roomId: runtime.agentId,
2619
+ entityId: runtime.agentId
2708
2620
  };
2709
2621
  const result = await service.addKnowledge(document);
2710
2622
  if (result.fragmentCount < 2) {
@@ -2730,7 +2642,6 @@ var KnowledgeTestSuite = class {
2730
2642
  name: "Should detect binary content types correctly",
2731
2643
  fn: async (runtime) => {
2732
2644
  const service = await KnowledgeService.start(runtime);
2733
- const isBinary = service.isBinaryContentType.bind(service);
2734
2645
  const binaryTypes = [
2735
2646
  { type: "application/pdf", filename: "test.pdf", expected: true },
2736
2647
  { type: "image/png", filename: "test.png", expected: true },
@@ -2748,7 +2659,7 @@ var KnowledgeTestSuite = class {
2748
2659
  }
2749
2660
  ];
2750
2661
  for (const test of binaryTypes) {
2751
- const result = isBinary(test.type, test.filename);
2662
+ const result = isBinaryContentType(test.type, test.filename);
2752
2663
  if (result !== test.expected) {
2753
2664
  throw new Error(
2754
2665
  `Binary detection failed for ${test.type}/${test.filename}. Expected ${test.expected}, got ${result}`
@@ -2763,22 +2674,13 @@ var KnowledgeTestSuite = class {
2763
2674
  var tests_default = new KnowledgeTestSuite();
2764
2675
 
2765
2676
  // src/actions.ts
2766
- import { logger as logger5, createUniqueUuid as createUniqueUuid2 } from "@elizaos/core";
2677
+ import { logger as logger4, createUniqueUuid as createUniqueUuid2 } from "@elizaos/core";
2767
2678
  import * as fs2 from "fs";
2768
2679
  import * as path2 from "path";
2769
2680
  var processKnowledgeAction = {
2770
2681
  name: "PROCESS_KNOWLEDGE",
2771
2682
  description: "Process and store knowledge from a file path or text content into the knowledge base",
2772
- similes: [
2773
- "add knowledge",
2774
- "upload document",
2775
- "store information",
2776
- "add to knowledge base",
2777
- "learn from document",
2778
- "ingest file",
2779
- "process document",
2780
- "remember this"
2781
- ],
2683
+ similes: [],
2782
2684
  examples: [
2783
2685
  [
2784
2686
  {
@@ -2832,7 +2734,7 @@ var processKnowledgeAction = {
2832
2734
  const hasPath = pathPattern.test(text);
2833
2735
  const service = runtime.getService(KnowledgeService.serviceType);
2834
2736
  if (!service) {
2835
- logger5.warn(
2737
+ logger4.warn(
2836
2738
  "Knowledge service not available for PROCESS_KNOWLEDGE action"
2837
2739
  );
2838
2740
  return false;
@@ -2917,7 +2819,7 @@ var processKnowledgeAction = {
2917
2819
  await callback(response);
2918
2820
  }
2919
2821
  } catch (error) {
2920
- logger5.error("Error in PROCESS_KNOWLEDGE action:", error);
2822
+ logger4.error("Error in PROCESS_KNOWLEDGE action:", error);
2921
2823
  const errorResponse = {
2922
2824
  text: `I encountered an error while processing the knowledge: ${error instanceof Error ? error.message : "Unknown error"}`
2923
2825
  };
@@ -3028,7 +2930,7 @@ ${formattedResults}`
3028
2930
  await callback(response);
3029
2931
  }
3030
2932
  } catch (error) {
3031
- logger5.error("Error in SEARCH_KNOWLEDGE action:", error);
2933
+ logger4.error("Error in SEARCH_KNOWLEDGE action:", error);
3032
2934
  const errorResponse = {
3033
2935
  text: `I encountered an error while searching the knowledge base: ${error instanceof Error ? error.message : "Unknown error"}`
3034
2936
  };
@@ -3040,26 +2942,297 @@ ${formattedResults}`
3040
2942
  };
3041
2943
  var knowledgeActions = [processKnowledgeAction, searchKnowledgeAction];
3042
2944
 
2945
+ // src/routes.ts
2946
+ import { MemoryType as MemoryType4, createUniqueUuid as createUniqueUuid3, logger as logger5 } from "@elizaos/core";
2947
+ import fs3 from "fs";
2948
+ import path3 from "path";
2949
+ function sendSuccess(res, data, status = 200) {
2950
+ res.writeHead(status, { "Content-Type": "application/json" });
2951
+ res.end(JSON.stringify({ success: true, data }));
2952
+ }
2953
+ function sendError(res, status, code, message, details) {
2954
+ res.writeHead(status, { "Content-Type": "application/json" });
2955
+ res.end(JSON.stringify({ success: false, error: { code, message, details } }));
2956
+ }
2957
+ var cleanupFile = (filePath) => {
2958
+ if (filePath && fs3.existsSync(filePath)) {
2959
+ try {
2960
+ fs3.unlinkSync(filePath);
2961
+ } catch (error) {
2962
+ logger5.error(`Error cleaning up file ${filePath}:`, error);
2963
+ }
2964
+ }
2965
+ };
2966
+ var cleanupFiles = (files) => {
2967
+ if (files) {
2968
+ files.forEach((file) => cleanupFile(file.path));
2969
+ }
2970
+ };
2971
+ async function uploadKnowledgeHandler(req, res, runtime) {
2972
+ const service = runtime.getService(KnowledgeService.serviceType);
2973
+ if (!service) {
2974
+ return sendError(res, 500, "SERVICE_NOT_FOUND", "KnowledgeService not found");
2975
+ }
2976
+ const files = req.files;
2977
+ if (!files || files.length === 0) {
2978
+ return sendError(res, 400, "NO_FILES", "No files uploaded");
2979
+ }
2980
+ try {
2981
+ const processingPromises = files.map(async (file, index) => {
2982
+ let knowledgeId;
2983
+ const originalFilename = file.originalname;
2984
+ const worldId = req.body.worldId || runtime.agentId;
2985
+ const filePath = file.path;
2986
+ knowledgeId = req.body?.documentIds && req.body.documentIds[index] || req.body?.documentId || createUniqueUuid3(runtime, `knowledge-${originalFilename}-${Date.now()}`);
2987
+ try {
2988
+ const fileBuffer = await fs3.promises.readFile(filePath);
2989
+ const fileExt = file.originalname.split(".").pop()?.toLowerCase() || "";
2990
+ const filename = file.originalname;
2991
+ const title = filename.replace(`.${fileExt}`, "");
2992
+ const base64Content = fileBuffer.toString("base64");
2993
+ const knowledgeItem = {
2994
+ id: knowledgeId,
2995
+ content: {
2996
+ text: base64Content
2997
+ },
2998
+ metadata: {
2999
+ type: MemoryType4.DOCUMENT,
3000
+ timestamp: Date.now(),
3001
+ source: "upload",
3002
+ filename,
3003
+ fileExt,
3004
+ title,
3005
+ path: originalFilename,
3006
+ fileType: file.mimetype,
3007
+ fileSize: file.size
3008
+ }
3009
+ };
3010
+ const addKnowledgeOpts = {
3011
+ clientDocumentId: knowledgeId,
3012
+ // This is knowledgeItem.id
3013
+ contentType: file.mimetype,
3014
+ // Directly from multer file object
3015
+ originalFilename,
3016
+ // Directly from multer file object
3017
+ content: base64Content,
3018
+ // The base64 string of the file
3019
+ worldId,
3020
+ roomId: runtime.agentId,
3021
+ // Or a more specific room ID if available
3022
+ entityId: runtime.agentId
3023
+ };
3024
+ await service.addKnowledge(addKnowledgeOpts);
3025
+ cleanupFile(filePath);
3026
+ return {
3027
+ id: knowledgeId,
3028
+ filename: originalFilename,
3029
+ type: file.mimetype,
3030
+ size: file.size,
3031
+ uploadedAt: Date.now(),
3032
+ status: "success"
3033
+ };
3034
+ } catch (fileError) {
3035
+ logger5.error(
3036
+ `[KNOWLEDGE UPLOAD HANDLER] Error processing file ${file.originalname}: ${fileError}`
3037
+ );
3038
+ cleanupFile(filePath);
3039
+ return {
3040
+ id: knowledgeId,
3041
+ filename: originalFilename,
3042
+ status: "error_processing",
3043
+ error: fileError.message
3044
+ };
3045
+ }
3046
+ });
3047
+ const results = await Promise.all(processingPromises);
3048
+ sendSuccess(res, results);
3049
+ } catch (error) {
3050
+ logger5.error("[KNOWLEDGE UPLOAD HANDLER] Error uploading knowledge:", error);
3051
+ cleanupFiles(files);
3052
+ sendError(res, 500, "UPLOAD_ERROR", "Failed to upload knowledge", error.message);
3053
+ }
3054
+ }
3055
+ async function getKnowledgeDocumentsHandler(req, res, runtime) {
3056
+ const service = runtime.getService(KnowledgeService.serviceType);
3057
+ if (!service) {
3058
+ return sendError(
3059
+ res,
3060
+ 500,
3061
+ "SERVICE_NOT_FOUND",
3062
+ "KnowledgeService not found for getKnowledgeDocumentsHandler"
3063
+ );
3064
+ }
3065
+ try {
3066
+ const limit = req.query.limit ? Number.parseInt(req.query.limit, 10) : 20;
3067
+ const before = req.query.before ? Number.parseInt(req.query.before, 10) : Date.now();
3068
+ const includeEmbedding = req.query.includeEmbedding === "true";
3069
+ const memories = await service.getMemories({
3070
+ tableName: "documents",
3071
+ count: limit,
3072
+ end: before
3073
+ });
3074
+ const cleanMemories = includeEmbedding ? memories : memories.map((memory) => ({
3075
+ ...memory,
3076
+ embedding: void 0
3077
+ }));
3078
+ sendSuccess(res, { memories: cleanMemories });
3079
+ } catch (error) {
3080
+ logger5.error("[KNOWLEDGE GET HANDLER] Error retrieving documents:", error);
3081
+ sendError(res, 500, "RETRIEVAL_ERROR", "Failed to retrieve documents", error.message);
3082
+ }
3083
+ }
3084
+ async function deleteKnowledgeDocumentHandler(req, res, runtime) {
3085
+ const service = runtime.getService(KnowledgeService.serviceType);
3086
+ if (!service) {
3087
+ return sendError(
3088
+ res,
3089
+ 500,
3090
+ "SERVICE_NOT_FOUND",
3091
+ "KnowledgeService not found for deleteKnowledgeDocumentHandler"
3092
+ );
3093
+ }
3094
+ const knowledgeId = req.path.split("/documents/")[1];
3095
+ if (!knowledgeId || knowledgeId.length < 36) {
3096
+ return sendError(res, 400, "INVALID_ID", "Invalid Knowledge ID format");
3097
+ }
3098
+ try {
3099
+ await service.deleteMemory(knowledgeId);
3100
+ sendSuccess(res, null, 204);
3101
+ } catch (error) {
3102
+ logger5.error(`[KNOWLEDGE DELETE HANDLER] Error deleting document ${knowledgeId}:`, error);
3103
+ sendError(res, 500, "DELETE_ERROR", "Failed to delete document", error.message);
3104
+ }
3105
+ }
3106
+ async function knowledgePanelHandler(req, res, runtime) {
3107
+ try {
3108
+ const currentDir = path3.dirname(new URL(import.meta.url).pathname);
3109
+ const frontendPath = path3.join(currentDir, "../dist/index.html");
3110
+ if (fs3.existsSync(frontendPath)) {
3111
+ const html = await fs3.promises.readFile(frontendPath, "utf8");
3112
+ res.writeHead(200, { "Content-Type": "text/html" });
3113
+ res.end(html);
3114
+ } else {
3115
+ const html = `
3116
+ <!DOCTYPE html>
3117
+ <html lang="en">
3118
+ <head>
3119
+ <meta charset="UTF-8">
3120
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
3121
+ <title>Knowledge</title>
3122
+ <link rel="stylesheet" href="./assets/index-BlRATUqY.css">
3123
+ <style>
3124
+ body { font-family: system-ui, -apple-system, sans-serif; margin: 0; padding: 20px; }
3125
+ .container { max-width: 1200px; margin: 0 auto; }
3126
+ .loading { text-align: center; padding: 40px; color: #666; }
3127
+ </style>
3128
+ </head>
3129
+ <body>
3130
+ <div class="container">
3131
+ <div id="knowledge-root">
3132
+ <div class="loading">Loading Knowledge Library...</div>
3133
+ </div>
3134
+ </div>
3135
+ <script type="module" src="./assets/index-7riujMow.js"></script>
3136
+ </body>
3137
+ </html>`;
3138
+ res.writeHead(200, { "Content-Type": "text/html" });
3139
+ res.end(html);
3140
+ }
3141
+ } catch (error) {
3142
+ logger5.error("[KNOWLEDGE PANEL] Error serving frontend:", error);
3143
+ sendError(res, 500, "FRONTEND_ERROR", "Failed to load knowledge panel", error.message);
3144
+ }
3145
+ }
3146
+ async function frontendAssetHandler(req, res, runtime) {
3147
+ try {
3148
+ logger5.debug(
3149
+ `[KNOWLEDGE ASSET HANDLER] Called with req.path: ${req.path}, req.originalUrl: ${req.originalUrl}, req.params: ${JSON.stringify(req.params)}`
3150
+ );
3151
+ const currentDir = path3.dirname(new URL(import.meta.url).pathname);
3152
+ const assetRequestPath = req.path;
3153
+ const assetsMarker = "/assets/";
3154
+ const assetsStartIndex = assetRequestPath.indexOf(assetsMarker);
3155
+ let assetName = null;
3156
+ if (assetsStartIndex !== -1) {
3157
+ assetName = assetRequestPath.substring(assetsStartIndex + assetsMarker.length);
3158
+ }
3159
+ if (!assetName || assetName.includes("..")) {
3160
+ return sendError(
3161
+ res,
3162
+ 400,
3163
+ "BAD_REQUEST",
3164
+ `Invalid asset name: '${assetName}' from path ${assetRequestPath}`
3165
+ );
3166
+ }
3167
+ const assetPath = path3.join(currentDir, "../dist/assets", assetName);
3168
+ logger5.debug(`[KNOWLEDGE ASSET HANDLER] Attempting to serve asset: ${assetPath}`);
3169
+ if (fs3.existsSync(assetPath)) {
3170
+ const fileStream = fs3.createReadStream(assetPath);
3171
+ let contentType = "application/octet-stream";
3172
+ if (assetPath.endsWith(".js")) {
3173
+ contentType = "application/javascript";
3174
+ } else if (assetPath.endsWith(".css")) {
3175
+ contentType = "text/css";
3176
+ }
3177
+ res.writeHead(200, { "Content-Type": contentType });
3178
+ fileStream.pipe(res);
3179
+ } else {
3180
+ sendError(res, 404, "NOT_FOUND", `Asset not found: ${req.url}`);
3181
+ }
3182
+ } catch (error) {
3183
+ logger5.error(`[KNOWLEDGE ASSET HANDLER] Error serving asset ${req.url}:`, error);
3184
+ sendError(res, 500, "ASSET_ERROR", `Failed to load asset ${req.url}`, error.message);
3185
+ }
3186
+ }
3187
+ var knowledgeRoutes = [
3188
+ {
3189
+ type: "GET",
3190
+ name: "Knowledge",
3191
+ path: "/display",
3192
+ handler: knowledgePanelHandler,
3193
+ public: true
3194
+ },
3195
+ {
3196
+ type: "GET",
3197
+ path: "/assets/*",
3198
+ handler: frontendAssetHandler
3199
+ },
3200
+ {
3201
+ type: "POST",
3202
+ path: "/upload",
3203
+ handler: uploadKnowledgeHandler,
3204
+ isMultipart: true
3205
+ },
3206
+ {
3207
+ type: "GET",
3208
+ path: "/documents",
3209
+ handler: getKnowledgeDocumentsHandler
3210
+ },
3211
+ {
3212
+ type: "DELETE",
3213
+ path: "/documents/*",
3214
+ handler: deleteKnowledgeDocumentHandler
3215
+ }
3216
+ ];
3217
+
3043
3218
  // src/index.ts
3044
3219
  var knowledgePlugin = {
3045
3220
  name: "knowledge",
3046
3221
  description: "Plugin for Retrieval Augmented Generation, including knowledge management and embedding.",
3047
3222
  config: {
3048
- // Token limits
3049
- MAX_INPUT_TOKENS: process.env.MAX_INPUT_TOKENS,
3050
- MAX_OUTPUT_TOKENS: process.env.MAX_OUTPUT_TOKENS,
3223
+ // Token limits - these will be read from runtime settings during init
3224
+ MAX_INPUT_TOKENS: "4000",
3225
+ MAX_OUTPUT_TOKENS: "4096",
3051
3226
  // Contextual Knowledge settings
3052
- CTX_KNOWLEDGE_ENABLED: process.env.CTX_KNOWLEDGE_ENABLED || "false"
3227
+ CTX_KNOWLEDGE_ENABLED: "false"
3053
3228
  },
3054
3229
  async init(config, runtime) {
3055
3230
  logger6.info("Initializing Knowledge Plugin...");
3056
3231
  try {
3057
3232
  logger6.info("Validating model configuration for Knowledge plugin...");
3058
- const validatedConfig = validateModelConfig();
3233
+ const validatedConfig = validateModelConfig(runtime);
3059
3234
  if (validatedConfig.CTX_KNOWLEDGE_ENABLED) {
3060
- logger6.info(
3061
- "Running in Contextual Knowledge mode with text generation capabilities."
3062
- );
3235
+ logger6.info("Running in Contextual Knowledge mode with text generation capabilities.");
3063
3236
  logger6.info(
3064
3237
  `Using ${validatedConfig.EMBEDDING_PROVIDER} for embeddings and ${validatedConfig.TEXT_PROVIDER} for text generation.`
3065
3238
  );
@@ -3080,21 +3253,17 @@ var knowledgePlugin = {
3080
3253
  }
3081
3254
  logger6.info("Model configuration validated successfully.");
3082
3255
  if (runtime) {
3083
- logger6.info(
3084
- `Knowledge Plugin initialized for agent: ${runtime.agentId}`
3085
- );
3256
+ logger6.info(`Knowledge Plugin initialized for agent: ${runtime.agentId}`);
3086
3257
  const loadDocsOnStartup = config.LOAD_DOCS_ON_STARTUP !== "false" && process.env.LOAD_DOCS_ON_STARTUP !== "false";
3087
3258
  if (loadDocsOnStartup) {
3088
3259
  setTimeout(async () => {
3089
3260
  try {
3090
3261
  const service = runtime.getService(KnowledgeService.serviceType);
3091
3262
  if (service instanceof KnowledgeService) {
3092
- const { loadDocsFromPath } = await import("./docs-loader-G3WS433E.js");
3093
- const result = await loadDocsFromPath(service, runtime.agentId);
3263
+ const { loadDocsFromPath: loadDocsFromPath2 } = await import("./docs-loader-25N4HXDV.js");
3264
+ const result = await loadDocsFromPath2(service, runtime.agentId);
3094
3265
  if (result.successful > 0) {
3095
- logger6.info(
3096
- `Loaded ${result.successful} documents from docs folder on startup`
3097
- );
3266
+ logger6.info(`Loaded ${result.successful} documents from docs folder on startup`);
3098
3267
  }
3099
3268
  }
3100
3269
  } catch (error) {
@@ -3103,7 +3272,9 @@ var knowledgePlugin = {
3103
3272
  }, 5e3);
3104
3273
  }
3105
3274
  }
3106
- logger6.info("Knowledge Plugin initialized.");
3275
+ logger6.info(
3276
+ "Knowledge Plugin initialized. Frontend panel should be discoverable via its public route."
3277
+ );
3107
3278
  } catch (error) {
3108
3279
  logger6.error("Failed to initialize Knowledge plugin:", error);
3109
3280
  throw error;
@@ -3111,6 +3282,7 @@ var knowledgePlugin = {
3111
3282
  },
3112
3283
  services: [KnowledgeService],
3113
3284
  providers: [knowledgeProvider],
3285
+ routes: knowledgeRoutes,
3114
3286
  actions: knowledgeActions,
3115
3287
  tests: [tests_default]
3116
3288
  };