@grec0/memory-bank-mcp 0.0.3 → 0.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,215 @@
1
+ import { getUserAgent } from "universal-user-agent";
2
+ import { createPlankaError } from "./errors.js";
3
+ import { VERSION } from "./version.js";
4
+ import https from "https";
5
+ import nodeFetch from "node-fetch";
6
+ // Global variables to store tokens
7
+ let agentToken = null;
8
+ // Create HTTPS agent for insecure connections if needed
9
+ let httpsAgent;
10
+ if (process.env.PLANKA_ALLOW_INSECURE === "true") {
11
+ console.error("[DEBUG] Allowing insecure HTTPS connections (certificate validation disabled)");
12
+ httpsAgent = new https.Agent({
13
+ rejectUnauthorized: false,
14
+ });
15
+ }
16
+ // Custom fetch function that uses the agent
17
+ const customFetch = httpsAgent
18
+ ? (url, options) => {
19
+ return nodeFetch(url, { ...options, agent: httpsAgent });
20
+ }
21
+ : fetch;
22
+ async function parseResponseBody(response) {
23
+ const contentType = response.headers.get("content-type");
24
+ if (contentType?.includes("application/json")) {
25
+ return response.json();
26
+ }
27
+ return response.text();
28
+ }
29
+ export function buildUrl(baseUrl, params) {
30
+ const url = new URL(baseUrl);
31
+ Object.entries(params).forEach(([key, value]) => {
32
+ if (value !== undefined) {
33
+ url.searchParams.append(key, value.toString());
34
+ }
35
+ });
36
+ return url.toString();
37
+ }
38
+ const USER_AGENT = `modelcontextprotocol/servers/planka/v${VERSION} ${getUserAgent()}`;
39
+ async function authenticateAgent() {
40
+ const email = process.env.PLANKA_AGENT_EMAIL;
41
+ const password = process.env.PLANKA_AGENT_PASSWORD;
42
+ console.error(`[DEBUG] Attempting authentication with email: ${email}`);
43
+ if (!email || !password) {
44
+ throw new Error("PLANKA_AGENT_EMAIL and PLANKA_AGENT_PASSWORD environment variables are required");
45
+ }
46
+ const baseUrl = process.env.PLANKA_BASE_URL || "http://localhost:3000";
47
+ // Construir la URL correctamente para el endpoint de tokens
48
+ const url = baseUrl.endsWith('/')
49
+ ? `${baseUrl}api/access-tokens`
50
+ : `${baseUrl}/api/access-tokens`;
51
+ console.error(`[DEBUG] Authentication URL: ${url}`);
52
+ console.error(`[DEBUG] Base URL: ${baseUrl}`);
53
+ try {
54
+ const requestBody = JSON.stringify({
55
+ emailOrUsername: email,
56
+ password: password,
57
+ });
58
+ console.error(`[DEBUG] Request body: ${requestBody}`);
59
+ const response = await customFetch(url, {
60
+ method: "POST",
61
+ headers: {
62
+ "Accept": "application/json",
63
+ "Content-Type": "application/json",
64
+ "User-Agent": USER_AGENT,
65
+ },
66
+ body: requestBody,
67
+ credentials: "include",
68
+ });
69
+ console.error(`[DEBUG] Response status: ${response.status}`);
70
+ console.error(`[DEBUG] Response headers:`, Object.fromEntries(response.headers.entries()));
71
+ const responseBody = await parseResponseBody(response);
72
+ console.error(`[DEBUG] Response body:`, responseBody);
73
+ if (!response.ok) {
74
+ throw createPlankaError(response.status, responseBody);
75
+ }
76
+ // The token is directly in the item field
77
+ const { item } = responseBody;
78
+ agentToken = item;
79
+ console.error(`[DEBUG] Authentication successful, token length: ${item?.length}`);
80
+ return item;
81
+ }
82
+ catch (error) {
83
+ console.error(`[DEBUG] Authentication error:`, error);
84
+ // Rethrow with more context
85
+ const errorMessage = error instanceof Error ? error.message : String(error);
86
+ throw new Error(`Failed to authenticate agent with Planka: ${errorMessage}`);
87
+ }
88
+ }
89
+ async function getAuthToken() {
90
+ if (agentToken) {
91
+ return agentToken;
92
+ }
93
+ return authenticateAgent();
94
+ }
95
+ export async function plankaRequest(path, options = {}) {
96
+ const baseUrl = process.env.PLANKA_BASE_URL || "http://localhost:3000";
97
+ // Ensure path starts with /api/ if not already present
98
+ const normalizedPath = path.startsWith("/api/") ? path : `/api/${path}`;
99
+ // Construir la URL correctamente
100
+ const url = baseUrl.endsWith('/')
101
+ ? `${baseUrl}${normalizedPath.substring(1)}` // Remove leading slash if baseUrl ends with /
102
+ : `${baseUrl}${normalizedPath}`;
103
+ const headers = {
104
+ "Accept": "application/json",
105
+ "Content-Type": "application/json",
106
+ "User-Agent": USER_AGENT,
107
+ ...options.headers,
108
+ };
109
+ // Remove Content-Type header for FormData
110
+ if (options.body instanceof FormData) {
111
+ delete headers["Content-Type"];
112
+ }
113
+ // Add authentication token if not skipped
114
+ if (!options.skipAuth) {
115
+ try {
116
+ const token = await getAuthToken();
117
+ headers["Authorization"] = `Bearer ${token}`;
118
+ }
119
+ catch (error) {
120
+ const errorMessage = error instanceof Error
121
+ ? error.message
122
+ : String(error);
123
+ throw new Error(`Failed to get authentication token: ${errorMessage}`);
124
+ }
125
+ }
126
+ try {
127
+ const response = await customFetch(url, {
128
+ method: options.method || "GET",
129
+ headers,
130
+ body: options.body instanceof FormData
131
+ ? options.body
132
+ : options.body
133
+ ? JSON.stringify(options.body)
134
+ : undefined,
135
+ credentials: "include", // Include cookies for Planka authentication
136
+ });
137
+ const responseBody = await parseResponseBody(response);
138
+ if (!response.ok) {
139
+ throw createPlankaError(response.status, responseBody);
140
+ }
141
+ return responseBody;
142
+ }
143
+ catch (error) {
144
+ const errorMessage = error instanceof Error ? error.message : String(error);
145
+ throw new Error(`Failed to make Planka request to ${url}: ${errorMessage}`);
146
+ }
147
+ }
148
+ export function validateProjectName(name) {
149
+ const sanitized = name.trim();
150
+ if (!sanitized) {
151
+ throw new Error("Project name cannot be empty");
152
+ }
153
+ return sanitized;
154
+ }
155
+ export function validateBoardName(name) {
156
+ const sanitized = name.trim();
157
+ if (!sanitized) {
158
+ throw new Error("Board name cannot be empty");
159
+ }
160
+ return sanitized;
161
+ }
162
+ export function validateListName(name) {
163
+ const sanitized = name.trim();
164
+ if (!sanitized) {
165
+ throw new Error("List name cannot be empty");
166
+ }
167
+ return sanitized;
168
+ }
169
+ export function validateCardName(name) {
170
+ const sanitized = name.trim();
171
+ if (!sanitized) {
172
+ throw new Error("Card name cannot be empty");
173
+ }
174
+ return sanitized;
175
+ }
176
+ /**
177
+ * Looks up a user ID by email
178
+ *
179
+ * @param {string} email - The email of the user to look up
180
+ * @returns {Promise<string | null>} The user ID if found, null otherwise
181
+ */
182
+ export async function getUserIdByEmail(email) {
183
+ try {
184
+ // Get all users
185
+ const response = await plankaRequest("/api/users");
186
+ const { items } = response;
187
+ // Find the user with the matching email
188
+ const user = items.find((user) => user.email === email);
189
+ return user ? user.id : null;
190
+ }
191
+ catch (error) {
192
+ console.error(`Failed to get user ID by email: ${error instanceof Error ? error.message : String(error)}`);
193
+ return null;
194
+ }
195
+ }
196
+ /**
197
+ * Looks up a user ID by username
198
+ *
199
+ * @param {string} username - The username of the user to look up
200
+ * @returns {Promise<string | null>} The user ID if found, null otherwise
201
+ */
202
+ export async function getUserIdByUsername(username) {
203
+ try {
204
+ // Get all users
205
+ const response = await plankaRequest("/api/users");
206
+ const { items } = response;
207
+ // Find the user with the matching username
208
+ const user = items.find((user) => user.username === username);
209
+ return user ? user.id : null;
210
+ }
211
+ catch (error) {
212
+ console.error(`Failed to get user ID by username: ${error instanceof Error ? error.message : String(error)}`);
213
+ return null;
214
+ }
215
+ }
@@ -1,6 +1,7 @@
1
1
  /**
2
2
  * @fileoverview Vector store for Memory Bank using LanceDB
3
3
  * Manages storage and retrieval of code embeddings
4
+ * Uses snake_case for field names for LanceDB SQL compatibility
4
5
  */
5
6
  import * as lancedb from "@lancedb/lancedb";
6
7
  import * as fs from "fs";
@@ -122,16 +123,17 @@ export class VectorStore {
122
123
  }
123
124
  }
124
125
  /**
125
- * Deletes all chunks from a specific file in a specific project
126
+ * Deletes all chunks from a specific file
126
127
  */
127
- async deleteChunksByFile(filePath, projectId) {
128
+ async deleteChunksByFile(filePath) {
128
129
  await this.ensureInitialized();
129
130
  if (!this.table) {
130
131
  return;
131
132
  }
132
133
  try {
133
- await this.table.delete(`"filePath" = '${filePath}' AND "projectId" = '${projectId}'`);
134
- console.error(`Deleted all chunks from file: ${filePath} (project: ${projectId.substring(0, 8)}...)`);
134
+ // Use snake_case field name for LanceDB SQL compatibility
135
+ await this.table.delete(`file_path = '${filePath}'`);
136
+ console.error(`Deleted all chunks from file: ${filePath}`);
135
137
  }
136
138
  catch (error) {
137
139
  console.error(`Error deleting chunks by file: ${error}`);
@@ -152,15 +154,18 @@ export class VectorStore {
152
154
  try {
153
155
  // Start with vector search
154
156
  let query = this.table.search(queryVector).limit(topK);
155
- // Apply filters if specified
157
+ // Apply filters if specified (using snake_case field names)
156
158
  if (options.filterByFile) {
157
- query = query.where(`filePath LIKE '%${options.filterByFile}%'`);
159
+ query = query.where(`file_path LIKE '%${options.filterByFile}%'`);
158
160
  }
159
161
  if (options.filterByLanguage) {
160
162
  query = query.where(`language = '${options.filterByLanguage}'`);
161
163
  }
162
164
  if (options.filterByType) {
163
- query = query.where(`chunkType = '${options.filterByType}'`);
165
+ query = query.where(`chunk_type = '${options.filterByType}'`);
166
+ }
167
+ if (options.filterByProject) {
168
+ query = query.where(`project_id = '${options.filterByProject}'`);
164
169
  }
165
170
  // Execute search
166
171
  const results = await query.toArray();
@@ -174,17 +179,17 @@ export class VectorStore {
174
179
  chunk: {
175
180
  id: result.id,
176
181
  vector: result.vector,
177
- filePath: result.filePath,
182
+ file_path: result.file_path,
178
183
  content: result.content,
179
- startLine: result.startLine,
180
- endLine: result.endLine,
181
- chunkType: result.chunkType,
184
+ start_line: result.start_line,
185
+ end_line: result.end_line,
186
+ chunk_type: result.chunk_type,
182
187
  name: result.name,
183
188
  language: result.language,
184
- fileHash: result.fileHash,
189
+ file_hash: result.file_hash,
185
190
  timestamp: result.timestamp,
186
191
  context: result.context,
187
- projectId: result.projectId,
192
+ project_id: result.project_id,
188
193
  },
189
194
  score,
190
195
  distance,
@@ -208,22 +213,23 @@ export class VectorStore {
208
213
  }
209
214
  try {
210
215
  const results = await this.table
211
- .where(`filePath = '${filePath}'`)
216
+ .query()
217
+ .where(`file_path = '${filePath}'`)
212
218
  .toArray();
213
219
  return results.map((r) => ({
214
220
  id: r.id,
215
221
  vector: r.vector,
216
- filePath: r.filePath,
222
+ file_path: r.file_path,
217
223
  content: r.content,
218
- startLine: r.startLine,
219
- endLine: r.endLine,
220
- chunkType: r.chunkType,
224
+ start_line: r.start_line,
225
+ end_line: r.end_line,
226
+ chunk_type: r.chunk_type,
221
227
  name: r.name,
222
228
  language: r.language,
223
- fileHash: r.fileHash,
229
+ file_hash: r.file_hash,
224
230
  timestamp: r.timestamp,
225
231
  context: r.context,
226
- projectId: r.projectId,
232
+ project_id: r.project_id,
227
233
  }));
228
234
  }
229
235
  catch (error) {
@@ -231,6 +237,56 @@ export class VectorStore {
231
237
  return [];
232
238
  }
233
239
  }
240
+ /**
241
+ * Gets all chunks, optionally filtered by project
242
+ */
243
+ async getAllChunks(projectId) {
244
+ await this.ensureInitialized();
245
+ if (!this.table) {
246
+ console.error("getAllChunks: No table exists");
247
+ return [];
248
+ }
249
+ try {
250
+ let query = this.table.query();
251
+ // Apply project filter using snake_case field name
252
+ if (projectId) {
253
+ query = query.where(`project_id = '${projectId}'`);
254
+ console.error(`getAllChunks: Filtering by project_id='${projectId}'`);
255
+ }
256
+ const results = await query.toArray();
257
+ console.error(`getAllChunks: Got ${results.length} results`);
258
+ // Debug: Check first result's content
259
+ if (results.length > 0) {
260
+ const first = results[0];
261
+ console.error(`getAllChunks: First result file_path=${first.file_path}, content length=${first.content?.length || 0}`);
262
+ }
263
+ return results.map((r) => ({
264
+ id: r.id,
265
+ vector: r.vector,
266
+ file_path: r.file_path,
267
+ content: r.content,
268
+ start_line: r.start_line,
269
+ end_line: r.end_line,
270
+ chunk_type: r.chunk_type,
271
+ name: r.name,
272
+ language: r.language,
273
+ file_hash: r.file_hash,
274
+ timestamp: r.timestamp,
275
+ context: r.context,
276
+ project_id: r.project_id,
277
+ }));
278
+ }
279
+ catch (error) {
280
+ console.error(`Error getting all chunks: ${error}`);
281
+ return [];
282
+ }
283
+ }
284
+ /**
285
+ * Gets chunks by project ID
286
+ */
287
+ async getChunksByProject(projectId) {
288
+ return this.getAllChunks(projectId);
289
+ }
234
290
  /**
235
291
  * Gets statistics about the vector store
236
292
  */
@@ -245,16 +301,15 @@ export class VectorStore {
245
301
  };
246
302
  }
247
303
  try {
248
- // Use query().toArray() instead of direct toArray()
249
304
  const allChunks = await this.table.query().toArray();
250
305
  const uniqueFiles = new Set();
251
306
  const languageCounts = {};
252
307
  const typeCounts = {};
253
308
  let latestTimestamp = 0;
254
309
  for (const chunk of allChunks) {
255
- uniqueFiles.add(chunk.filePath);
310
+ uniqueFiles.add(chunk.file_path);
256
311
  languageCounts[chunk.language] = (languageCounts[chunk.language] || 0) + 1;
257
- typeCounts[chunk.chunkType] = (typeCounts[chunk.chunkType] || 0) + 1;
312
+ typeCounts[chunk.chunk_type] = (typeCounts[chunk.chunk_type] || 0) + 1;
258
313
  if (chunk.timestamp > latestTimestamp) {
259
314
  latestTimestamp = chunk.timestamp;
260
315
  }
@@ -309,12 +364,11 @@ export class VectorStore {
309
364
  return new Map();
310
365
  }
311
366
  try {
312
- // Use query().toArray() instead of direct toArray()
313
367
  const allChunks = await this.table.query().toArray();
314
368
  const fileHashes = new Map();
315
369
  for (const chunk of allChunks) {
316
- if (!fileHashes.has(chunk.filePath)) {
317
- fileHashes.set(chunk.filePath, chunk.fileHash);
370
+ if (!fileHashes.has(chunk.file_path)) {
371
+ fileHashes.set(chunk.file_path, chunk.file_hash);
318
372
  }
319
373
  }
320
374
  return fileHashes;
@@ -324,47 +378,6 @@ export class VectorStore {
324
378
  return new Map();
325
379
  }
326
380
  }
327
- /**
328
- * Gets aggregated statistics for all indexed files in a single query
329
- * Returns a map of filePath -> { lastIndexed, chunkCount, fileHash }
330
- */
331
- async getIndexedFileStats() {
332
- await this.ensureInitialized();
333
- if (!this.table) {
334
- return new Map();
335
- }
336
- try {
337
- // Fetch all chunks in one go - much faster than N queries
338
- // querying only necessary columns to reduce memory usage
339
- const allChunks = await this.table.query()
340
- .select(['filePath', 'timestamp', 'fileHash'])
341
- .toArray();
342
- const stats = new Map();
343
- for (const chunk of allChunks) {
344
- const current = stats.get(chunk.filePath);
345
- if (!current) {
346
- stats.set(chunk.filePath, {
347
- lastIndexed: chunk.timestamp,
348
- chunkCount: 1,
349
- fileHash: chunk.fileHash
350
- });
351
- }
352
- else {
353
- // Update stats
354
- current.chunkCount++;
355
- // Keep the latest timestamp
356
- if (chunk.timestamp > current.lastIndexed) {
357
- current.lastIndexed = chunk.timestamp;
358
- }
359
- }
360
- }
361
- return stats;
362
- }
363
- catch (error) {
364
- console.error(`Error getting indexed file stats: ${error}`);
365
- return new Map();
366
- }
367
- }
368
381
  }
369
382
  /**
370
383
  * Creates a vector store from environment variables
package/dist/index.js CHANGED
@@ -10,6 +10,7 @@ import { z } from "zod";
10
10
  import { createEmbeddingService } from "./common/embeddingService.js";
11
11
  import { createVectorStore } from "./common/vectorStore.js";
12
12
  import { createIndexManager } from "./common/indexManager.js";
13
+ import { createProjectKnowledgeService } from "./common/projectKnowledgeService.js";
13
14
  // Import tools
14
15
  import { indexCode } from "./tools/indexCode.js";
15
16
  import { searchMemory } from "./tools/searchMemory.js";
@@ -17,11 +18,14 @@ import { readFile } from "./tools/readFile.js";
17
18
  import { writeFile } from "./tools/writeFile.js";
18
19
  import { getStats } from "./tools/getStats.js";
19
20
  import { analyzeCoverage } from "./tools/analyzeCoverage.js";
21
+ import { generateProjectDocs, generateProjectDocsToolDefinition } from "./tools/generateProjectDocs.js";
22
+ import { getProjectDocs, getProjectDocsToolDefinition } from "./tools/getProjectDocs.js";
20
23
  import { VERSION } from "./common/version.js";
21
24
  // Global services
22
25
  let embeddingService;
23
26
  let vectorStore;
24
27
  let indexManager;
28
+ let projectKnowledgeService;
25
29
  let workspaceRoot;
26
30
  // Create the MCP Server
27
31
  const server = new McpServer({
@@ -67,8 +71,8 @@ server.tool("memorybank_search", "Busca código relevante mediante búsqueda sem
67
71
  minScore: z
68
72
  .number()
69
73
  .optional()
70
- .default(0.7)
71
- .describe("Puntuación mínima de similitud (0-1). Valores más altos = resultados más relevantes"),
74
+ .default(0.4)
75
+ .describe("Puntuación mínima de similitud (0-1). por defecto usa 0.4 y basado en el resultado ajusta el valor"),
72
76
  filterByFile: z
73
77
  .string()
74
78
  .optional()
@@ -184,6 +188,47 @@ server.tool("memorybank_analyze_coverage", "Analiza la cobertura de indexación
184
188
  };
185
189
  }
186
190
  });
191
+ // Tool: Generate Project Docs
192
+ server.tool(generateProjectDocsToolDefinition.name, generateProjectDocsToolDefinition.description, {
193
+ projectId: z
194
+ .string()
195
+ .optional()
196
+ .describe("ID del proyecto (opcional, usa 'default' si no se especifica)"),
197
+ force: z
198
+ .boolean()
199
+ .optional()
200
+ .default(false)
201
+ .describe("Forzar regeneración de todos los documentos aunque no hayan cambiado"),
202
+ }, async (args) => {
203
+ const result = await generateProjectDocs({
204
+ projectId: args.projectId,
205
+ force: args.force,
206
+ }, projectKnowledgeService, vectorStore);
207
+ return {
208
+ content: [{ type: "text", text: JSON.stringify(result, null, 2) }],
209
+ };
210
+ });
211
+ // Tool: Get Project Docs
212
+ server.tool(getProjectDocsToolDefinition.name, getProjectDocsToolDefinition.description, {
213
+ document: z
214
+ .string()
215
+ .optional()
216
+ .default("summary")
217
+ .describe("Documento específico a recuperar: projectBrief, productContext, systemPatterns, techContext, activeContext, progress, all, summary"),
218
+ format: z
219
+ .enum(["full", "summary"])
220
+ .optional()
221
+ .default("full")
222
+ .describe("Formato de salida: 'full' devuelve contenido completo, 'summary' devuelve resumen de todos los docs"),
223
+ }, async (args) => {
224
+ const result = await getProjectDocs({
225
+ document: args.document,
226
+ format: args.format,
227
+ }, projectKnowledgeService);
228
+ return {
229
+ content: [{ type: "text", text: JSON.stringify(result, null, 2) }],
230
+ };
231
+ });
187
232
  /**
188
233
  * Validates and initializes environment
189
234
  */
@@ -208,6 +253,12 @@ async function validateEnvironment() {
208
253
  const embeddingModel = process.env.MEMORYBANK_EMBEDDING_MODEL || "text-embedding-3-small";
209
254
  const embeddingDimensions = process.env.MEMORYBANK_EMBEDDING_DIMENSIONS || "1536";
210
255
  console.error(`✓ Embedding model: ${embeddingModel} (${embeddingDimensions} dimensions)`);
256
+ // Project Knowledge Layer configuration
257
+ const reasoningModel = process.env.MEMORYBANK_REASONING_MODEL || "gpt-5-mini";
258
+ const reasoningEffort = process.env.MEMORYBANK_REASONING_EFFORT || "medium";
259
+ const autoUpdateDocs = process.env.MEMORYBANK_AUTO_UPDATE_DOCS === "true";
260
+ console.error(`✓ Reasoning model: ${reasoningModel} (effort: ${reasoningEffort})`);
261
+ console.error(`✓ Auto-update docs: ${autoUpdateDocs}`);
211
262
  // Initialize services
212
263
  console.error("\nInitializing services...");
213
264
  try {
@@ -216,8 +267,21 @@ async function validateEnvironment() {
216
267
  vectorStore = createVectorStore();
217
268
  await vectorStore.initialize();
218
269
  console.error("✓ Vector store initialized");
219
- indexManager = createIndexManager(embeddingService, vectorStore, workspaceRoot);
270
+ indexManager = createIndexManager(embeddingService, vectorStore);
220
271
  console.error("✓ Index manager initialized");
272
+ // Initialize Project Knowledge Service
273
+ try {
274
+ projectKnowledgeService = createProjectKnowledgeService();
275
+ console.error("✓ Project Knowledge service initialized");
276
+ // Connect to Index Manager for auto-update hooks
277
+ indexManager.setProjectKnowledgeService(projectKnowledgeService);
278
+ indexManager.setAutoUpdateDocs(autoUpdateDocs);
279
+ console.error("✓ Project Knowledge service connected to Index Manager");
280
+ }
281
+ catch (error) {
282
+ console.error(`⚠ Warning: Project Knowledge service not available: ${error}`);
283
+ console.error(" Project documentation features will be disabled.");
284
+ }
221
285
  }
222
286
  catch (error) {
223
287
  console.error(`ERROR: Failed to initialize services: ${error}`);
@@ -241,12 +305,16 @@ async function startStdioServer() {
241
305
  await server.connect(transport);
242
306
  console.error("\n=== MCP Server Ready ===");
243
307
  console.error("Available tools:");
244
- console.error(" - memorybank_index_code: Indexar código semánticamente");
245
- console.error(" - memorybank_search: Buscar código por similitud semántica");
246
- console.error(" - memorybank_read_file: Leer archivos del workspace");
247
- console.error(" - memorybank_write_file: Escribir archivos y reindexar");
248
- console.error(" - memorybank_get_stats: Obtener estadísticas del índice");
249
- console.error(" - memorybank_analyze_coverage: Analizar cobertura de indexación");
308
+ console.error(" Core Memory Bank:");
309
+ console.error(" - memorybank_index_code: Indexar código semánticamente");
310
+ console.error(" - memorybank_search: Buscar código por similitud semántica");
311
+ console.error(" - memorybank_read_file: Leer archivos del workspace");
312
+ console.error(" - memorybank_write_file: Escribir archivos y reindexar");
313
+ console.error(" - memorybank_get_stats: Obtener estadísticas del índice");
314
+ console.error(" - memorybank_analyze_coverage: Analizar cobertura de indexación");
315
+ console.error(" Project Knowledge Layer:");
316
+ console.error(" - memorybank_generate_project_docs: Generar documentación con IA");
317
+ console.error(" - memorybank_get_project_docs: Leer documentación del proyecto");
250
318
  console.error("");
251
319
  console.error("Ready to accept requests...\n");
252
320
  }