@grec0/memory-bank-mcp 0.0.3 → 0.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +642 -420
- package/dist/common/chunker.js +166 -534
- package/dist/common/embeddingService.js +39 -51
- package/dist/common/fileScanner.js +123 -58
- package/dist/common/indexManager.js +185 -108
- package/dist/common/projectKnowledgeService.js +627 -0
- package/dist/common/setup.js +49 -0
- package/dist/common/utils.js +215 -0
- package/dist/common/vectorStore.js +80 -67
- package/dist/index.js +106 -14
- package/dist/operations/boardMemberships.js +186 -0
- package/dist/operations/boards.js +268 -0
- package/dist/operations/cards.js +426 -0
- package/dist/operations/comments.js +249 -0
- package/dist/operations/labels.js +258 -0
- package/dist/operations/lists.js +157 -0
- package/dist/operations/projects.js +102 -0
- package/dist/operations/tasks.js +238 -0
- package/dist/tools/analyzeCoverage.js +50 -67
- package/dist/tools/board-summary.js +151 -0
- package/dist/tools/card-details.js +106 -0
- package/dist/tools/create-card-with-tasks.js +81 -0
- package/dist/tools/generateProjectDocs.js +133 -0
- package/dist/tools/getProjectDocs.js +126 -0
- package/dist/tools/index.js +3 -0
- package/dist/tools/indexCode.js +4 -2
- package/dist/tools/searchMemory.js +4 -2
- package/dist/tools/workflow-actions.js +145 -0
- package/dist/tools/writeFile.js +2 -2
- package/package.json +2 -2
|
@@ -0,0 +1,215 @@
|
|
|
1
|
+
import { getUserAgent } from "universal-user-agent";
|
|
2
|
+
import { createPlankaError } from "./errors.js";
|
|
3
|
+
import { VERSION } from "./version.js";
|
|
4
|
+
import https from "https";
|
|
5
|
+
import nodeFetch from "node-fetch";
|
|
6
|
+
// Global variables to store tokens
|
|
7
|
+
let agentToken = null;
|
|
8
|
+
// Create HTTPS agent for insecure connections if needed
|
|
9
|
+
let httpsAgent;
|
|
10
|
+
if (process.env.PLANKA_ALLOW_INSECURE === "true") {
|
|
11
|
+
console.error("[DEBUG] Allowing insecure HTTPS connections (certificate validation disabled)");
|
|
12
|
+
httpsAgent = new https.Agent({
|
|
13
|
+
rejectUnauthorized: false,
|
|
14
|
+
});
|
|
15
|
+
}
|
|
16
|
+
// Custom fetch function that uses the agent
|
|
17
|
+
const customFetch = httpsAgent
|
|
18
|
+
? (url, options) => {
|
|
19
|
+
return nodeFetch(url, { ...options, agent: httpsAgent });
|
|
20
|
+
}
|
|
21
|
+
: fetch;
|
|
22
|
+
async function parseResponseBody(response) {
|
|
23
|
+
const contentType = response.headers.get("content-type");
|
|
24
|
+
if (contentType?.includes("application/json")) {
|
|
25
|
+
return response.json();
|
|
26
|
+
}
|
|
27
|
+
return response.text();
|
|
28
|
+
}
|
|
29
|
+
export function buildUrl(baseUrl, params) {
|
|
30
|
+
const url = new URL(baseUrl);
|
|
31
|
+
Object.entries(params).forEach(([key, value]) => {
|
|
32
|
+
if (value !== undefined) {
|
|
33
|
+
url.searchParams.append(key, value.toString());
|
|
34
|
+
}
|
|
35
|
+
});
|
|
36
|
+
return url.toString();
|
|
37
|
+
}
|
|
38
|
+
const USER_AGENT = `modelcontextprotocol/servers/planka/v${VERSION} ${getUserAgent()}`;
|
|
39
|
+
async function authenticateAgent() {
|
|
40
|
+
const email = process.env.PLANKA_AGENT_EMAIL;
|
|
41
|
+
const password = process.env.PLANKA_AGENT_PASSWORD;
|
|
42
|
+
console.error(`[DEBUG] Attempting authentication with email: ${email}`);
|
|
43
|
+
if (!email || !password) {
|
|
44
|
+
throw new Error("PLANKA_AGENT_EMAIL and PLANKA_AGENT_PASSWORD environment variables are required");
|
|
45
|
+
}
|
|
46
|
+
const baseUrl = process.env.PLANKA_BASE_URL || "http://localhost:3000";
|
|
47
|
+
// Construir la URL correctamente para el endpoint de tokens
|
|
48
|
+
const url = baseUrl.endsWith('/')
|
|
49
|
+
? `${baseUrl}api/access-tokens`
|
|
50
|
+
: `${baseUrl}/api/access-tokens`;
|
|
51
|
+
console.error(`[DEBUG] Authentication URL: ${url}`);
|
|
52
|
+
console.error(`[DEBUG] Base URL: ${baseUrl}`);
|
|
53
|
+
try {
|
|
54
|
+
const requestBody = JSON.stringify({
|
|
55
|
+
emailOrUsername: email,
|
|
56
|
+
password: password,
|
|
57
|
+
});
|
|
58
|
+
console.error(`[DEBUG] Request body: ${requestBody}`);
|
|
59
|
+
const response = await customFetch(url, {
|
|
60
|
+
method: "POST",
|
|
61
|
+
headers: {
|
|
62
|
+
"Accept": "application/json",
|
|
63
|
+
"Content-Type": "application/json",
|
|
64
|
+
"User-Agent": USER_AGENT,
|
|
65
|
+
},
|
|
66
|
+
body: requestBody,
|
|
67
|
+
credentials: "include",
|
|
68
|
+
});
|
|
69
|
+
console.error(`[DEBUG] Response status: ${response.status}`);
|
|
70
|
+
console.error(`[DEBUG] Response headers:`, Object.fromEntries(response.headers.entries()));
|
|
71
|
+
const responseBody = await parseResponseBody(response);
|
|
72
|
+
console.error(`[DEBUG] Response body:`, responseBody);
|
|
73
|
+
if (!response.ok) {
|
|
74
|
+
throw createPlankaError(response.status, responseBody);
|
|
75
|
+
}
|
|
76
|
+
// The token is directly in the item field
|
|
77
|
+
const { item } = responseBody;
|
|
78
|
+
agentToken = item;
|
|
79
|
+
console.error(`[DEBUG] Authentication successful, token length: ${item?.length}`);
|
|
80
|
+
return item;
|
|
81
|
+
}
|
|
82
|
+
catch (error) {
|
|
83
|
+
console.error(`[DEBUG] Authentication error:`, error);
|
|
84
|
+
// Rethrow with more context
|
|
85
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
86
|
+
throw new Error(`Failed to authenticate agent with Planka: ${errorMessage}`);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
async function getAuthToken() {
|
|
90
|
+
if (agentToken) {
|
|
91
|
+
return agentToken;
|
|
92
|
+
}
|
|
93
|
+
return authenticateAgent();
|
|
94
|
+
}
|
|
95
|
+
export async function plankaRequest(path, options = {}) {
|
|
96
|
+
const baseUrl = process.env.PLANKA_BASE_URL || "http://localhost:3000";
|
|
97
|
+
// Ensure path starts with /api/ if not already present
|
|
98
|
+
const normalizedPath = path.startsWith("/api/") ? path : `/api/${path}`;
|
|
99
|
+
// Construir la URL correctamente
|
|
100
|
+
const url = baseUrl.endsWith('/')
|
|
101
|
+
? `${baseUrl}${normalizedPath.substring(1)}` // Remove leading slash if baseUrl ends with /
|
|
102
|
+
: `${baseUrl}${normalizedPath}`;
|
|
103
|
+
const headers = {
|
|
104
|
+
"Accept": "application/json",
|
|
105
|
+
"Content-Type": "application/json",
|
|
106
|
+
"User-Agent": USER_AGENT,
|
|
107
|
+
...options.headers,
|
|
108
|
+
};
|
|
109
|
+
// Remove Content-Type header for FormData
|
|
110
|
+
if (options.body instanceof FormData) {
|
|
111
|
+
delete headers["Content-Type"];
|
|
112
|
+
}
|
|
113
|
+
// Add authentication token if not skipped
|
|
114
|
+
if (!options.skipAuth) {
|
|
115
|
+
try {
|
|
116
|
+
const token = await getAuthToken();
|
|
117
|
+
headers["Authorization"] = `Bearer ${token}`;
|
|
118
|
+
}
|
|
119
|
+
catch (error) {
|
|
120
|
+
const errorMessage = error instanceof Error
|
|
121
|
+
? error.message
|
|
122
|
+
: String(error);
|
|
123
|
+
throw new Error(`Failed to get authentication token: ${errorMessage}`);
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
try {
|
|
127
|
+
const response = await customFetch(url, {
|
|
128
|
+
method: options.method || "GET",
|
|
129
|
+
headers,
|
|
130
|
+
body: options.body instanceof FormData
|
|
131
|
+
? options.body
|
|
132
|
+
: options.body
|
|
133
|
+
? JSON.stringify(options.body)
|
|
134
|
+
: undefined,
|
|
135
|
+
credentials: "include", // Include cookies for Planka authentication
|
|
136
|
+
});
|
|
137
|
+
const responseBody = await parseResponseBody(response);
|
|
138
|
+
if (!response.ok) {
|
|
139
|
+
throw createPlankaError(response.status, responseBody);
|
|
140
|
+
}
|
|
141
|
+
return responseBody;
|
|
142
|
+
}
|
|
143
|
+
catch (error) {
|
|
144
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
145
|
+
throw new Error(`Failed to make Planka request to ${url}: ${errorMessage}`);
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
export function validateProjectName(name) {
|
|
149
|
+
const sanitized = name.trim();
|
|
150
|
+
if (!sanitized) {
|
|
151
|
+
throw new Error("Project name cannot be empty");
|
|
152
|
+
}
|
|
153
|
+
return sanitized;
|
|
154
|
+
}
|
|
155
|
+
export function validateBoardName(name) {
|
|
156
|
+
const sanitized = name.trim();
|
|
157
|
+
if (!sanitized) {
|
|
158
|
+
throw new Error("Board name cannot be empty");
|
|
159
|
+
}
|
|
160
|
+
return sanitized;
|
|
161
|
+
}
|
|
162
|
+
export function validateListName(name) {
|
|
163
|
+
const sanitized = name.trim();
|
|
164
|
+
if (!sanitized) {
|
|
165
|
+
throw new Error("List name cannot be empty");
|
|
166
|
+
}
|
|
167
|
+
return sanitized;
|
|
168
|
+
}
|
|
169
|
+
export function validateCardName(name) {
|
|
170
|
+
const sanitized = name.trim();
|
|
171
|
+
if (!sanitized) {
|
|
172
|
+
throw new Error("Card name cannot be empty");
|
|
173
|
+
}
|
|
174
|
+
return sanitized;
|
|
175
|
+
}
|
|
176
|
+
/**
|
|
177
|
+
* Looks up a user ID by email
|
|
178
|
+
*
|
|
179
|
+
* @param {string} email - The email of the user to look up
|
|
180
|
+
* @returns {Promise<string | null>} The user ID if found, null otherwise
|
|
181
|
+
*/
|
|
182
|
+
export async function getUserIdByEmail(email) {
|
|
183
|
+
try {
|
|
184
|
+
// Get all users
|
|
185
|
+
const response = await plankaRequest("/api/users");
|
|
186
|
+
const { items } = response;
|
|
187
|
+
// Find the user with the matching email
|
|
188
|
+
const user = items.find((user) => user.email === email);
|
|
189
|
+
return user ? user.id : null;
|
|
190
|
+
}
|
|
191
|
+
catch (error) {
|
|
192
|
+
console.error(`Failed to get user ID by email: ${error instanceof Error ? error.message : String(error)}`);
|
|
193
|
+
return null;
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
/**
|
|
197
|
+
* Looks up a user ID by username
|
|
198
|
+
*
|
|
199
|
+
* @param {string} username - The username of the user to look up
|
|
200
|
+
* @returns {Promise<string | null>} The user ID if found, null otherwise
|
|
201
|
+
*/
|
|
202
|
+
export async function getUserIdByUsername(username) {
|
|
203
|
+
try {
|
|
204
|
+
// Get all users
|
|
205
|
+
const response = await plankaRequest("/api/users");
|
|
206
|
+
const { items } = response;
|
|
207
|
+
// Find the user with the matching username
|
|
208
|
+
const user = items.find((user) => user.username === username);
|
|
209
|
+
return user ? user.id : null;
|
|
210
|
+
}
|
|
211
|
+
catch (error) {
|
|
212
|
+
console.error(`Failed to get user ID by username: ${error instanceof Error ? error.message : String(error)}`);
|
|
213
|
+
return null;
|
|
214
|
+
}
|
|
215
|
+
}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* @fileoverview Vector store for Memory Bank using LanceDB
|
|
3
3
|
* Manages storage and retrieval of code embeddings
|
|
4
|
+
* Uses snake_case for field names for LanceDB SQL compatibility
|
|
4
5
|
*/
|
|
5
6
|
import * as lancedb from "@lancedb/lancedb";
|
|
6
7
|
import * as fs from "fs";
|
|
@@ -122,16 +123,17 @@ export class VectorStore {
|
|
|
122
123
|
}
|
|
123
124
|
}
|
|
124
125
|
/**
|
|
125
|
-
* Deletes all chunks from a specific file
|
|
126
|
+
* Deletes all chunks from a specific file
|
|
126
127
|
*/
|
|
127
|
-
async deleteChunksByFile(filePath
|
|
128
|
+
async deleteChunksByFile(filePath) {
|
|
128
129
|
await this.ensureInitialized();
|
|
129
130
|
if (!this.table) {
|
|
130
131
|
return;
|
|
131
132
|
}
|
|
132
133
|
try {
|
|
133
|
-
|
|
134
|
-
|
|
134
|
+
// Use snake_case field name for LanceDB SQL compatibility
|
|
135
|
+
await this.table.delete(`file_path = '${filePath}'`);
|
|
136
|
+
console.error(`Deleted all chunks from file: ${filePath}`);
|
|
135
137
|
}
|
|
136
138
|
catch (error) {
|
|
137
139
|
console.error(`Error deleting chunks by file: ${error}`);
|
|
@@ -152,15 +154,18 @@ export class VectorStore {
|
|
|
152
154
|
try {
|
|
153
155
|
// Start with vector search
|
|
154
156
|
let query = this.table.search(queryVector).limit(topK);
|
|
155
|
-
// Apply filters if specified
|
|
157
|
+
// Apply filters if specified (using snake_case field names)
|
|
156
158
|
if (options.filterByFile) {
|
|
157
|
-
query = query.where(`
|
|
159
|
+
query = query.where(`file_path LIKE '%${options.filterByFile}%'`);
|
|
158
160
|
}
|
|
159
161
|
if (options.filterByLanguage) {
|
|
160
162
|
query = query.where(`language = '${options.filterByLanguage}'`);
|
|
161
163
|
}
|
|
162
164
|
if (options.filterByType) {
|
|
163
|
-
query = query.where(`
|
|
165
|
+
query = query.where(`chunk_type = '${options.filterByType}'`);
|
|
166
|
+
}
|
|
167
|
+
if (options.filterByProject) {
|
|
168
|
+
query = query.where(`project_id = '${options.filterByProject}'`);
|
|
164
169
|
}
|
|
165
170
|
// Execute search
|
|
166
171
|
const results = await query.toArray();
|
|
@@ -174,17 +179,17 @@ export class VectorStore {
|
|
|
174
179
|
chunk: {
|
|
175
180
|
id: result.id,
|
|
176
181
|
vector: result.vector,
|
|
177
|
-
|
|
182
|
+
file_path: result.file_path,
|
|
178
183
|
content: result.content,
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
184
|
+
start_line: result.start_line,
|
|
185
|
+
end_line: result.end_line,
|
|
186
|
+
chunk_type: result.chunk_type,
|
|
182
187
|
name: result.name,
|
|
183
188
|
language: result.language,
|
|
184
|
-
|
|
189
|
+
file_hash: result.file_hash,
|
|
185
190
|
timestamp: result.timestamp,
|
|
186
191
|
context: result.context,
|
|
187
|
-
|
|
192
|
+
project_id: result.project_id,
|
|
188
193
|
},
|
|
189
194
|
score,
|
|
190
195
|
distance,
|
|
@@ -208,22 +213,23 @@ export class VectorStore {
|
|
|
208
213
|
}
|
|
209
214
|
try {
|
|
210
215
|
const results = await this.table
|
|
211
|
-
.
|
|
216
|
+
.query()
|
|
217
|
+
.where(`file_path = '${filePath}'`)
|
|
212
218
|
.toArray();
|
|
213
219
|
return results.map((r) => ({
|
|
214
220
|
id: r.id,
|
|
215
221
|
vector: r.vector,
|
|
216
|
-
|
|
222
|
+
file_path: r.file_path,
|
|
217
223
|
content: r.content,
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
224
|
+
start_line: r.start_line,
|
|
225
|
+
end_line: r.end_line,
|
|
226
|
+
chunk_type: r.chunk_type,
|
|
221
227
|
name: r.name,
|
|
222
228
|
language: r.language,
|
|
223
|
-
|
|
229
|
+
file_hash: r.file_hash,
|
|
224
230
|
timestamp: r.timestamp,
|
|
225
231
|
context: r.context,
|
|
226
|
-
|
|
232
|
+
project_id: r.project_id,
|
|
227
233
|
}));
|
|
228
234
|
}
|
|
229
235
|
catch (error) {
|
|
@@ -231,6 +237,56 @@ export class VectorStore {
|
|
|
231
237
|
return [];
|
|
232
238
|
}
|
|
233
239
|
}
|
|
240
|
+
/**
|
|
241
|
+
* Gets all chunks, optionally filtered by project
|
|
242
|
+
*/
|
|
243
|
+
async getAllChunks(projectId) {
|
|
244
|
+
await this.ensureInitialized();
|
|
245
|
+
if (!this.table) {
|
|
246
|
+
console.error("getAllChunks: No table exists");
|
|
247
|
+
return [];
|
|
248
|
+
}
|
|
249
|
+
try {
|
|
250
|
+
let query = this.table.query();
|
|
251
|
+
// Apply project filter using snake_case field name
|
|
252
|
+
if (projectId) {
|
|
253
|
+
query = query.where(`project_id = '${projectId}'`);
|
|
254
|
+
console.error(`getAllChunks: Filtering by project_id='${projectId}'`);
|
|
255
|
+
}
|
|
256
|
+
const results = await query.toArray();
|
|
257
|
+
console.error(`getAllChunks: Got ${results.length} results`);
|
|
258
|
+
// Debug: Check first result's content
|
|
259
|
+
if (results.length > 0) {
|
|
260
|
+
const first = results[0];
|
|
261
|
+
console.error(`getAllChunks: First result file_path=${first.file_path}, content length=${first.content?.length || 0}`);
|
|
262
|
+
}
|
|
263
|
+
return results.map((r) => ({
|
|
264
|
+
id: r.id,
|
|
265
|
+
vector: r.vector,
|
|
266
|
+
file_path: r.file_path,
|
|
267
|
+
content: r.content,
|
|
268
|
+
start_line: r.start_line,
|
|
269
|
+
end_line: r.end_line,
|
|
270
|
+
chunk_type: r.chunk_type,
|
|
271
|
+
name: r.name,
|
|
272
|
+
language: r.language,
|
|
273
|
+
file_hash: r.file_hash,
|
|
274
|
+
timestamp: r.timestamp,
|
|
275
|
+
context: r.context,
|
|
276
|
+
project_id: r.project_id,
|
|
277
|
+
}));
|
|
278
|
+
}
|
|
279
|
+
catch (error) {
|
|
280
|
+
console.error(`Error getting all chunks: ${error}`);
|
|
281
|
+
return [];
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
/**
|
|
285
|
+
* Gets chunks by project ID
|
|
286
|
+
*/
|
|
287
|
+
async getChunksByProject(projectId) {
|
|
288
|
+
return this.getAllChunks(projectId);
|
|
289
|
+
}
|
|
234
290
|
/**
|
|
235
291
|
* Gets statistics about the vector store
|
|
236
292
|
*/
|
|
@@ -245,16 +301,15 @@ export class VectorStore {
|
|
|
245
301
|
};
|
|
246
302
|
}
|
|
247
303
|
try {
|
|
248
|
-
// Use query().toArray() instead of direct toArray()
|
|
249
304
|
const allChunks = await this.table.query().toArray();
|
|
250
305
|
const uniqueFiles = new Set();
|
|
251
306
|
const languageCounts = {};
|
|
252
307
|
const typeCounts = {};
|
|
253
308
|
let latestTimestamp = 0;
|
|
254
309
|
for (const chunk of allChunks) {
|
|
255
|
-
uniqueFiles.add(chunk.
|
|
310
|
+
uniqueFiles.add(chunk.file_path);
|
|
256
311
|
languageCounts[chunk.language] = (languageCounts[chunk.language] || 0) + 1;
|
|
257
|
-
typeCounts[chunk.
|
|
312
|
+
typeCounts[chunk.chunk_type] = (typeCounts[chunk.chunk_type] || 0) + 1;
|
|
258
313
|
if (chunk.timestamp > latestTimestamp) {
|
|
259
314
|
latestTimestamp = chunk.timestamp;
|
|
260
315
|
}
|
|
@@ -309,12 +364,11 @@ export class VectorStore {
|
|
|
309
364
|
return new Map();
|
|
310
365
|
}
|
|
311
366
|
try {
|
|
312
|
-
// Use query().toArray() instead of direct toArray()
|
|
313
367
|
const allChunks = await this.table.query().toArray();
|
|
314
368
|
const fileHashes = new Map();
|
|
315
369
|
for (const chunk of allChunks) {
|
|
316
|
-
if (!fileHashes.has(chunk.
|
|
317
|
-
fileHashes.set(chunk.
|
|
370
|
+
if (!fileHashes.has(chunk.file_path)) {
|
|
371
|
+
fileHashes.set(chunk.file_path, chunk.file_hash);
|
|
318
372
|
}
|
|
319
373
|
}
|
|
320
374
|
return fileHashes;
|
|
@@ -324,47 +378,6 @@ export class VectorStore {
|
|
|
324
378
|
return new Map();
|
|
325
379
|
}
|
|
326
380
|
}
|
|
327
|
-
/**
|
|
328
|
-
* Gets aggregated statistics for all indexed files in a single query
|
|
329
|
-
* Returns a map of filePath -> { lastIndexed, chunkCount, fileHash }
|
|
330
|
-
*/
|
|
331
|
-
async getIndexedFileStats() {
|
|
332
|
-
await this.ensureInitialized();
|
|
333
|
-
if (!this.table) {
|
|
334
|
-
return new Map();
|
|
335
|
-
}
|
|
336
|
-
try {
|
|
337
|
-
// Fetch all chunks in one go - much faster than N queries
|
|
338
|
-
// querying only necessary columns to reduce memory usage
|
|
339
|
-
const allChunks = await this.table.query()
|
|
340
|
-
.select(['filePath', 'timestamp', 'fileHash'])
|
|
341
|
-
.toArray();
|
|
342
|
-
const stats = new Map();
|
|
343
|
-
for (const chunk of allChunks) {
|
|
344
|
-
const current = stats.get(chunk.filePath);
|
|
345
|
-
if (!current) {
|
|
346
|
-
stats.set(chunk.filePath, {
|
|
347
|
-
lastIndexed: chunk.timestamp,
|
|
348
|
-
chunkCount: 1,
|
|
349
|
-
fileHash: chunk.fileHash
|
|
350
|
-
});
|
|
351
|
-
}
|
|
352
|
-
else {
|
|
353
|
-
// Update stats
|
|
354
|
-
current.chunkCount++;
|
|
355
|
-
// Keep the latest timestamp
|
|
356
|
-
if (chunk.timestamp > current.lastIndexed) {
|
|
357
|
-
current.lastIndexed = chunk.timestamp;
|
|
358
|
-
}
|
|
359
|
-
}
|
|
360
|
-
}
|
|
361
|
-
return stats;
|
|
362
|
-
}
|
|
363
|
-
catch (error) {
|
|
364
|
-
console.error(`Error getting indexed file stats: ${error}`);
|
|
365
|
-
return new Map();
|
|
366
|
-
}
|
|
367
|
-
}
|
|
368
381
|
}
|
|
369
382
|
/**
|
|
370
383
|
* Creates a vector store from environment variables
|
package/dist/index.js
CHANGED
|
@@ -10,6 +10,7 @@ import { z } from "zod";
|
|
|
10
10
|
import { createEmbeddingService } from "./common/embeddingService.js";
|
|
11
11
|
import { createVectorStore } from "./common/vectorStore.js";
|
|
12
12
|
import { createIndexManager } from "./common/indexManager.js";
|
|
13
|
+
import { createProjectKnowledgeService } from "./common/projectKnowledgeService.js";
|
|
13
14
|
// Import tools
|
|
14
15
|
import { indexCode } from "./tools/indexCode.js";
|
|
15
16
|
import { searchMemory } from "./tools/searchMemory.js";
|
|
@@ -17,11 +18,14 @@ import { readFile } from "./tools/readFile.js";
|
|
|
17
18
|
import { writeFile } from "./tools/writeFile.js";
|
|
18
19
|
import { getStats } from "./tools/getStats.js";
|
|
19
20
|
import { analyzeCoverage } from "./tools/analyzeCoverage.js";
|
|
21
|
+
import { generateProjectDocs, generateProjectDocsToolDefinition } from "./tools/generateProjectDocs.js";
|
|
22
|
+
import { getProjectDocs, getProjectDocsToolDefinition } from "./tools/getProjectDocs.js";
|
|
20
23
|
import { VERSION } from "./common/version.js";
|
|
21
24
|
// Global services
|
|
22
25
|
let embeddingService;
|
|
23
26
|
let vectorStore;
|
|
24
27
|
let indexManager;
|
|
28
|
+
let projectKnowledgeService;
|
|
25
29
|
let workspaceRoot;
|
|
26
30
|
// Create the MCP Server
|
|
27
31
|
const server = new McpServer({
|
|
@@ -29,7 +33,10 @@ const server = new McpServer({
|
|
|
29
33
|
version: VERSION,
|
|
30
34
|
});
|
|
31
35
|
// Tool: Index Code
|
|
32
|
-
server.tool("memorybank_index_code", "Indexa semánticamente código de un directorio o archivo específico para permitir búsquedas semánticas", {
|
|
36
|
+
server.tool("memorybank_index_code", "Indexa semánticamente código de un directorio o archivo específico para permitir búsquedas semánticas. El projectId es OBLIGATORIO y debe coincidir con el definido en AGENTS.md", {
|
|
37
|
+
projectId: z
|
|
38
|
+
.string()
|
|
39
|
+
.describe("Identificador único del proyecto (OBLIGATORIO). Debe coincidir con el definido en AGENTS.md del proyecto"),
|
|
33
40
|
path: z
|
|
34
41
|
.string()
|
|
35
42
|
.optional()
|
|
@@ -46,6 +53,7 @@ server.tool("memorybank_index_code", "Indexa semánticamente código de un direc
|
|
|
46
53
|
.describe("Forzar reindexación completa aunque no haya cambios"),
|
|
47
54
|
}, async (args) => {
|
|
48
55
|
const result = await indexCode({
|
|
56
|
+
projectId: args.projectId,
|
|
49
57
|
path: args.path,
|
|
50
58
|
recursive: args.recursive,
|
|
51
59
|
forceReindex: args.forceReindex,
|
|
@@ -55,7 +63,10 @@ server.tool("memorybank_index_code", "Indexa semánticamente código de un direc
|
|
|
55
63
|
};
|
|
56
64
|
});
|
|
57
65
|
// Tool: Search Memory Bank
|
|
58
|
-
server.tool("memorybank_search", "Busca código relevante mediante búsqueda semántica vectorial. Usa esta herramienta SIEMPRE que necesites información sobre el código", {
|
|
66
|
+
server.tool("memorybank_search", "Busca código relevante mediante búsqueda semántica vectorial. Usa esta herramienta SIEMPRE que necesites información sobre el código. El projectId es OBLIGATORIO", {
|
|
67
|
+
projectId: z
|
|
68
|
+
.string()
|
|
69
|
+
.describe("Identificador del proyecto donde buscar (OBLIGATORIO). Debe coincidir con el usado al indexar"),
|
|
59
70
|
query: z
|
|
60
71
|
.string()
|
|
61
72
|
.describe("Consulta semántica: describe qué estás buscando en lenguaje natural (ej: 'función de autenticación', '¿cómo se validan los emails?')"),
|
|
@@ -67,8 +78,8 @@ server.tool("memorybank_search", "Busca código relevante mediante búsqueda sem
|
|
|
67
78
|
minScore: z
|
|
68
79
|
.number()
|
|
69
80
|
.optional()
|
|
70
|
-
.default(0.
|
|
71
|
-
.describe("Puntuación mínima de similitud (0-1).
|
|
81
|
+
.default(0.4)
|
|
82
|
+
.describe("Puntuación mínima de similitud (0-1). por defecto usa 0.4 y basado en el resultado ajusta el valor"),
|
|
72
83
|
filterByFile: z
|
|
73
84
|
.string()
|
|
74
85
|
.optional()
|
|
@@ -79,6 +90,7 @@ server.tool("memorybank_search", "Busca código relevante mediante búsqueda sem
|
|
|
79
90
|
.describe("Filtrar resultados por lenguaje de programación (ej: 'typescript', 'python')"),
|
|
80
91
|
}, async (args) => {
|
|
81
92
|
const result = await searchMemory({
|
|
93
|
+
projectId: args.projectId,
|
|
82
94
|
query: args.query,
|
|
83
95
|
topK: args.topK,
|
|
84
96
|
minScore: args.minScore,
|
|
@@ -113,7 +125,10 @@ server.tool("memorybank_read_file", "Lee el contenido de un archivo específico
|
|
|
113
125
|
};
|
|
114
126
|
});
|
|
115
127
|
// Tool: Write File
|
|
116
|
-
server.tool("memorybank_write_file", "Escribe o modifica un archivo y automáticamente lo reindexa en el Memory Bank para mantener la consistencia", {
|
|
128
|
+
server.tool("memorybank_write_file", "Escribe o modifica un archivo y automáticamente lo reindexa en el Memory Bank para mantener la consistencia. El projectId es OBLIGATORIO para la reindexación correcta", {
|
|
129
|
+
projectId: z
|
|
130
|
+
.string()
|
|
131
|
+
.describe("Identificador del proyecto (OBLIGATORIO). Necesario para la auto-reindexación correcta"),
|
|
117
132
|
path: z
|
|
118
133
|
.string()
|
|
119
134
|
.describe("Ruta relativa o absoluta del archivo a escribir"),
|
|
@@ -127,6 +142,7 @@ server.tool("memorybank_write_file", "Escribe o modifica un archivo y automátic
|
|
|
127
142
|
.describe("Reindexar automáticamente el archivo después de escribirlo"),
|
|
128
143
|
}, async (args) => {
|
|
129
144
|
const result = await writeFile({
|
|
145
|
+
projectId: args.projectId,
|
|
130
146
|
path: args.path,
|
|
131
147
|
content: args.content,
|
|
132
148
|
autoReindex: args.autoReindex,
|
|
@@ -143,9 +159,18 @@ server.tool("memorybank_get_stats", "Obtiene estadísticas del Memory Bank: arch
|
|
|
143
159
|
};
|
|
144
160
|
});
|
|
145
161
|
// Tool: Analyze Coverage
|
|
146
|
-
server.tool("memorybank_analyze_coverage", "Analiza la cobertura de indexación del proyecto. Muestra qué carpetas/archivos están indexados, cuáles no, y cuáles tienen cambios pendientes.
|
|
162
|
+
server.tool("memorybank_analyze_coverage", "Analiza la cobertura de indexación del proyecto. Muestra qué carpetas/archivos están indexados, cuáles no, y cuáles tienen cambios pendientes. El projectId es OBLIGATORIO. NOTA: Puede tardar en workspaces grandes", {
|
|
163
|
+
projectId: z
|
|
164
|
+
.string()
|
|
165
|
+
.describe("Identificador del proyecto a analizar (OBLIGATORIO)"),
|
|
166
|
+
path: z
|
|
167
|
+
.string()
|
|
168
|
+
.optional()
|
|
169
|
+
.describe("Ruta específica a analizar (por defecto: raíz del workspace)"),
|
|
170
|
+
}, async (args) => {
|
|
147
171
|
try {
|
|
148
|
-
const
|
|
172
|
+
const targetPath = args.path || workspaceRoot;
|
|
173
|
+
const result = await analyzeCoverage(indexManager, vectorStore, targetPath, args.projectId);
|
|
149
174
|
return {
|
|
150
175
|
content: [{ type: "text", text: JSON.stringify(result, null, 2) }],
|
|
151
176
|
};
|
|
@@ -184,6 +209,50 @@ server.tool("memorybank_analyze_coverage", "Analiza la cobertura de indexación
|
|
|
184
209
|
};
|
|
185
210
|
}
|
|
186
211
|
});
|
|
212
|
+
// Tool: Generate Project Docs
|
|
213
|
+
server.tool(generateProjectDocsToolDefinition.name, generateProjectDocsToolDefinition.description + ". El projectId es OBLIGATORIO", {
|
|
214
|
+
projectId: z
|
|
215
|
+
.string()
|
|
216
|
+
.describe("Identificador del proyecto (OBLIGATORIO). Debe coincidir con el usado al indexar"),
|
|
217
|
+
force: z
|
|
218
|
+
.boolean()
|
|
219
|
+
.optional()
|
|
220
|
+
.default(false)
|
|
221
|
+
.describe("Forzar regeneración de todos los documentos aunque no hayan cambiado"),
|
|
222
|
+
}, async (args) => {
|
|
223
|
+
const result = await generateProjectDocs({
|
|
224
|
+
projectId: args.projectId,
|
|
225
|
+
force: args.force,
|
|
226
|
+
}, projectKnowledgeService, vectorStore);
|
|
227
|
+
return {
|
|
228
|
+
content: [{ type: "text", text: JSON.stringify(result, null, 2) }],
|
|
229
|
+
};
|
|
230
|
+
});
|
|
231
|
+
// Tool: Get Project Docs
|
|
232
|
+
server.tool(getProjectDocsToolDefinition.name, getProjectDocsToolDefinition.description + ". El projectId es OBLIGATORIO", {
|
|
233
|
+
projectId: z
|
|
234
|
+
.string()
|
|
235
|
+
.describe("Identificador del proyecto (OBLIGATORIO). Debe coincidir con el usado al generar los docs"),
|
|
236
|
+
document: z
|
|
237
|
+
.string()
|
|
238
|
+
.optional()
|
|
239
|
+
.default("summary")
|
|
240
|
+
.describe("Documento específico a recuperar: projectBrief, productContext, systemPatterns, techContext, activeContext, progress, all, summary"),
|
|
241
|
+
format: z
|
|
242
|
+
.enum(["full", "summary"])
|
|
243
|
+
.optional()
|
|
244
|
+
.default("full")
|
|
245
|
+
.describe("Formato de salida: 'full' devuelve contenido completo, 'summary' devuelve resumen de todos los docs"),
|
|
246
|
+
}, async (args) => {
|
|
247
|
+
const result = await getProjectDocs({
|
|
248
|
+
projectId: args.projectId,
|
|
249
|
+
document: args.document,
|
|
250
|
+
format: args.format,
|
|
251
|
+
}, projectKnowledgeService);
|
|
252
|
+
return {
|
|
253
|
+
content: [{ type: "text", text: JSON.stringify(result, null, 2) }],
|
|
254
|
+
};
|
|
255
|
+
});
|
|
187
256
|
/**
|
|
188
257
|
* Validates and initializes environment
|
|
189
258
|
*/
|
|
@@ -208,6 +277,12 @@ async function validateEnvironment() {
|
|
|
208
277
|
const embeddingModel = process.env.MEMORYBANK_EMBEDDING_MODEL || "text-embedding-3-small";
|
|
209
278
|
const embeddingDimensions = process.env.MEMORYBANK_EMBEDDING_DIMENSIONS || "1536";
|
|
210
279
|
console.error(`✓ Embedding model: ${embeddingModel} (${embeddingDimensions} dimensions)`);
|
|
280
|
+
// Project Knowledge Layer configuration
|
|
281
|
+
const reasoningModel = process.env.MEMORYBANK_REASONING_MODEL || "gpt-5-mini";
|
|
282
|
+
const reasoningEffort = process.env.MEMORYBANK_REASONING_EFFORT || "medium";
|
|
283
|
+
const autoUpdateDocs = process.env.MEMORYBANK_AUTO_UPDATE_DOCS === "true";
|
|
284
|
+
console.error(`✓ Reasoning model: ${reasoningModel} (effort: ${reasoningEffort})`);
|
|
285
|
+
console.error(`✓ Auto-update docs: ${autoUpdateDocs}`);
|
|
211
286
|
// Initialize services
|
|
212
287
|
console.error("\nInitializing services...");
|
|
213
288
|
try {
|
|
@@ -216,8 +291,21 @@ async function validateEnvironment() {
|
|
|
216
291
|
vectorStore = createVectorStore();
|
|
217
292
|
await vectorStore.initialize();
|
|
218
293
|
console.error("✓ Vector store initialized");
|
|
219
|
-
indexManager = createIndexManager(embeddingService, vectorStore
|
|
294
|
+
indexManager = createIndexManager(embeddingService, vectorStore);
|
|
220
295
|
console.error("✓ Index manager initialized");
|
|
296
|
+
// Initialize Project Knowledge Service
|
|
297
|
+
try {
|
|
298
|
+
projectKnowledgeService = createProjectKnowledgeService();
|
|
299
|
+
console.error("✓ Project Knowledge service initialized");
|
|
300
|
+
// Connect to Index Manager for auto-update hooks
|
|
301
|
+
indexManager.setProjectKnowledgeService(projectKnowledgeService);
|
|
302
|
+
indexManager.setAutoUpdateDocs(autoUpdateDocs);
|
|
303
|
+
console.error("✓ Project Knowledge service connected to Index Manager");
|
|
304
|
+
}
|
|
305
|
+
catch (error) {
|
|
306
|
+
console.error(`⚠ Warning: Project Knowledge service not available: ${error}`);
|
|
307
|
+
console.error(" Project documentation features will be disabled.");
|
|
308
|
+
}
|
|
221
309
|
}
|
|
222
310
|
catch (error) {
|
|
223
311
|
console.error(`ERROR: Failed to initialize services: ${error}`);
|
|
@@ -241,12 +329,16 @@ async function startStdioServer() {
|
|
|
241
329
|
await server.connect(transport);
|
|
242
330
|
console.error("\n=== MCP Server Ready ===");
|
|
243
331
|
console.error("Available tools:");
|
|
244
|
-
console.error("
|
|
245
|
-
console.error("
|
|
246
|
-
console.error("
|
|
247
|
-
console.error("
|
|
248
|
-
console.error("
|
|
249
|
-
console.error("
|
|
332
|
+
console.error(" Core Memory Bank:");
|
|
333
|
+
console.error(" - memorybank_index_code: Indexar código semánticamente");
|
|
334
|
+
console.error(" - memorybank_search: Buscar código por similitud semántica");
|
|
335
|
+
console.error(" - memorybank_read_file: Leer archivos del workspace");
|
|
336
|
+
console.error(" - memorybank_write_file: Escribir archivos y reindexar");
|
|
337
|
+
console.error(" - memorybank_get_stats: Obtener estadísticas del índice");
|
|
338
|
+
console.error(" - memorybank_analyze_coverage: Analizar cobertura de indexación");
|
|
339
|
+
console.error(" Project Knowledge Layer:");
|
|
340
|
+
console.error(" - memorybank_generate_project_docs: Generar documentación con IA");
|
|
341
|
+
console.error(" - memorybank_get_project_docs: Leer documentación del proyecto");
|
|
250
342
|
console.error("");
|
|
251
343
|
console.error("Ready to accept requests...\n");
|
|
252
344
|
}
|