@grec0/memory-bank-mcp 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.memoryignore.example +76 -0
- package/README.md +425 -0
- package/dist/common/chunker.js +407 -0
- package/dist/common/embeddingService.js +302 -0
- package/dist/common/errors.js +71 -0
- package/dist/common/fileScanner.js +261 -0
- package/dist/common/indexManager.js +332 -0
- package/dist/common/setup.js +49 -0
- package/dist/common/types.js +115 -0
- package/dist/common/utils.js +215 -0
- package/dist/common/vectorStore.js +332 -0
- package/dist/common/version.js +2 -0
- package/dist/index.js +274 -0
- package/dist/operations/boardMemberships.js +186 -0
- package/dist/operations/boards.js +268 -0
- package/dist/operations/cards.js +426 -0
- package/dist/operations/comments.js +249 -0
- package/dist/operations/labels.js +258 -0
- package/dist/operations/lists.js +157 -0
- package/dist/operations/projects.js +102 -0
- package/dist/operations/tasks.js +238 -0
- package/dist/tools/analyzeCoverage.js +316 -0
- package/dist/tools/board-summary.js +151 -0
- package/dist/tools/card-details.js +106 -0
- package/dist/tools/create-card-with-tasks.js +81 -0
- package/dist/tools/getStats.js +59 -0
- package/dist/tools/index.js +12 -0
- package/dist/tools/indexCode.js +53 -0
- package/dist/tools/readFile.js +69 -0
- package/dist/tools/searchMemory.js +60 -0
- package/dist/tools/workflow-actions.js +145 -0
- package/dist/tools/writeFile.js +66 -0
- package/package.json +58 -0
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Project operations for the MCP Kanban server
|
|
3
|
+
*
|
|
4
|
+
* This module provides functions for interacting with projects in the Planka Kanban system,
|
|
5
|
+
* including creating, retrieving, updating, and deleting projects.
|
|
6
|
+
*/
|
|
7
|
+
import { z } from "zod";
|
|
8
|
+
import { plankaRequest } from "../common/utils.js";
|
|
9
|
+
import { PlankaProjectSchema } from "../common/types.js";
|
|
10
|
+
// Schema definitions
|
|
11
|
+
/**
|
|
12
|
+
* Schema for creating a new project
|
|
13
|
+
* @property {string} name - The name of the project
|
|
14
|
+
*/
|
|
15
|
+
export const CreateProjectSchema = z.object({
|
|
16
|
+
name: z.string().describe("Project name"),
|
|
17
|
+
});
|
|
18
|
+
/**
|
|
19
|
+
* Schema for retrieving projects with pagination
|
|
20
|
+
* @property {number} [page] - Page number for pagination (default: 1)
|
|
21
|
+
* @property {number} [perPage] - Number of results per page (default: 30, max: 100)
|
|
22
|
+
*/
|
|
23
|
+
export const GetProjectsSchema = z.object({
|
|
24
|
+
page: z.number().optional().describe("Page number for pagination (default: 1)"),
|
|
25
|
+
perPage: z.number().optional().describe("Number of results per page (default: 30, max: 100)"),
|
|
26
|
+
});
|
|
27
|
+
/**
|
|
28
|
+
* Schema for retrieving a specific project
|
|
29
|
+
* @property {string} id - The ID of the project to retrieve
|
|
30
|
+
*/
|
|
31
|
+
export const GetProjectSchema = z.object({
|
|
32
|
+
id: z.string().describe("Project ID"),
|
|
33
|
+
});
|
|
34
|
+
/**
|
|
35
|
+
* Schema for updating a project
|
|
36
|
+
* @property {string} id - The ID of the project to update
|
|
37
|
+
* @property {string} [name] - The new name for the project
|
|
38
|
+
*/
|
|
39
|
+
export const UpdateProjectSchema = z.object({
|
|
40
|
+
id: z.string().describe("Project ID"),
|
|
41
|
+
name: z.string().optional().describe("Project name"),
|
|
42
|
+
});
|
|
43
|
+
/**
|
|
44
|
+
* Schema for deleting a project
|
|
45
|
+
* @property {string} id - The ID of the project to delete
|
|
46
|
+
*/
|
|
47
|
+
export const DeleteProjectSchema = z.object({
|
|
48
|
+
id: z.string().describe("Project ID"),
|
|
49
|
+
});
|
|
50
|
+
// Response schemas
|
|
51
|
+
const ProjectsResponseSchema = z.object({
|
|
52
|
+
items: z.array(PlankaProjectSchema),
|
|
53
|
+
included: z.record(z.any()).optional(),
|
|
54
|
+
});
|
|
55
|
+
const ProjectResponseSchema = z.object({
|
|
56
|
+
item: PlankaProjectSchema,
|
|
57
|
+
included: z.record(z.any()).optional(),
|
|
58
|
+
});
|
|
59
|
+
/**
|
|
60
|
+
* Retrieves projects with pagination support
|
|
61
|
+
*
|
|
62
|
+
* @param {number} [page=1] - The page number to retrieve (1-indexed)
|
|
63
|
+
* @param {number} [perPage=30] - The number of projects per page (max: 100)
|
|
64
|
+
* @returns {Promise<{items: Array<object>, included?: object}>} Paginated projects
|
|
65
|
+
* @throws {Error} If retrieving projects fails
|
|
66
|
+
*/
|
|
67
|
+
export async function getProjects(page = 1, perPage = 30) {
|
|
68
|
+
try {
|
|
69
|
+
// Ensure perPage is within limits
|
|
70
|
+
if (perPage > 100) {
|
|
71
|
+
perPage = 100;
|
|
72
|
+
}
|
|
73
|
+
const queryParams = new URLSearchParams();
|
|
74
|
+
queryParams.append("page", page.toString());
|
|
75
|
+
queryParams.append("per_page", perPage.toString());
|
|
76
|
+
const response = await plankaRequest(`/api/projects?${queryParams.toString()}`, {
|
|
77
|
+
method: "GET",
|
|
78
|
+
});
|
|
79
|
+
const parsedResponse = ProjectsResponseSchema.parse(response);
|
|
80
|
+
return parsedResponse;
|
|
81
|
+
}
|
|
82
|
+
catch (error) {
|
|
83
|
+
throw new Error(`Failed to get projects: ${error instanceof Error ? error.message : String(error)}`);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
/**
|
|
87
|
+
* Retrieves a specific project by ID
|
|
88
|
+
*
|
|
89
|
+
* @param {string} id - The ID of the project to retrieve
|
|
90
|
+
* @returns {Promise<object>} The requested project
|
|
91
|
+
* @throws {Error} If retrieving the project fails
|
|
92
|
+
*/
|
|
93
|
+
export async function getProject(id) {
|
|
94
|
+
try {
|
|
95
|
+
const response = await plankaRequest(`/api/projects/${id}`);
|
|
96
|
+
const parsedResponse = ProjectResponseSchema.parse(response);
|
|
97
|
+
return parsedResponse.item;
|
|
98
|
+
}
|
|
99
|
+
catch (error) {
|
|
100
|
+
throw new Error(`Failed to get project: ${error instanceof Error ? error.message : String(error)}`);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
@@ -0,0 +1,238 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Task operations for the MCP Kanban server
|
|
3
|
+
*
|
|
4
|
+
* This module provides functions for interacting with tasks in the Planka Kanban board,
|
|
5
|
+
* including creating, retrieving, updating, and deleting tasks, as well as batch operations.
|
|
6
|
+
*/
|
|
7
|
+
import { z } from "zod";
|
|
8
|
+
import { plankaRequest } from "../common/utils.js";
|
|
9
|
+
import { PlankaTaskSchema } from "../common/types.js";
|
|
10
|
+
// Schema definitions
|
|
11
|
+
/**
|
|
12
|
+
* Schema for creating a new task
|
|
13
|
+
* @property {string} cardId - The ID of the card to create the task in
|
|
14
|
+
* @property {string} name - The name of the task
|
|
15
|
+
* @property {number} [position] - The position of the task in the card (default: 65535)
|
|
16
|
+
*/
|
|
17
|
+
export const CreateTaskSchema = z.object({
|
|
18
|
+
cardId: z.string().describe("Card ID"),
|
|
19
|
+
name: z.string().describe("Task name"),
|
|
20
|
+
position: z.number().optional().describe("Task position (default: 65535)"),
|
|
21
|
+
});
|
|
22
|
+
/**
|
|
23
|
+
* Schema for batch creating multiple tasks
|
|
24
|
+
* @property {Array<CreateTaskSchema>} tasks - Array of tasks to create
|
|
25
|
+
*/
|
|
26
|
+
export const BatchCreateTasksSchema = z.object({
|
|
27
|
+
tasks: z.array(CreateTaskSchema).describe("Array of tasks to create"),
|
|
28
|
+
});
|
|
29
|
+
/**
|
|
30
|
+
* Schema for retrieving tasks from a card
|
|
31
|
+
* @property {string} cardId - The ID of the card to get tasks from
|
|
32
|
+
*/
|
|
33
|
+
export const GetTasksSchema = z.object({
|
|
34
|
+
cardId: z.string().describe("Card ID"),
|
|
35
|
+
});
|
|
36
|
+
/**
|
|
37
|
+
* Schema for retrieving a specific task
|
|
38
|
+
* @property {string} id - The ID of the task to retrieve
|
|
39
|
+
* @property {string} [cardId] - The ID of the card containing the task
|
|
40
|
+
*/
|
|
41
|
+
export const GetTaskSchema = z.object({
|
|
42
|
+
id: z.string().describe("Task ID"),
|
|
43
|
+
cardId: z.string().optional().describe("Card ID containing the task"),
|
|
44
|
+
});
|
|
45
|
+
/**
|
|
46
|
+
* Schema for updating a task
|
|
47
|
+
* @property {string} id - The ID of the task to update
|
|
48
|
+
* @property {string} [name] - The new name for the task
|
|
49
|
+
* @property {boolean} [isCompleted] - Whether the task is completed
|
|
50
|
+
* @property {number} [position] - The new position for the task
|
|
51
|
+
*/
|
|
52
|
+
export const UpdateTaskSchema = z.object({
|
|
53
|
+
id: z.string().describe("Task ID"),
|
|
54
|
+
name: z.string().optional().describe("Task name"),
|
|
55
|
+
isCompleted: z.boolean().optional().describe("Whether the task is completed"),
|
|
56
|
+
position: z.number().optional().describe("Task position"),
|
|
57
|
+
});
|
|
58
|
+
/**
|
|
59
|
+
* Schema for deleting a task
|
|
60
|
+
* @property {string} id - The ID of the task to delete
|
|
61
|
+
*/
|
|
62
|
+
export const DeleteTaskSchema = z.object({
|
|
63
|
+
id: z.string().describe("Task ID"),
|
|
64
|
+
});
|
|
65
|
+
// Response schemas
|
|
66
|
+
const TasksResponseSchema = z.object({
|
|
67
|
+
items: z.array(PlankaTaskSchema),
|
|
68
|
+
included: z.record(z.any()).optional(),
|
|
69
|
+
});
|
|
70
|
+
const TaskResponseSchema = z.object({
|
|
71
|
+
item: PlankaTaskSchema,
|
|
72
|
+
included: z.record(z.any()).optional(),
|
|
73
|
+
});
|
|
74
|
+
// Map to store task ID to card ID mapping
|
|
75
|
+
const taskCardIdMap = {};
|
|
76
|
+
// Function implementations
|
|
77
|
+
/**
|
|
78
|
+
* Creates a new task for a card
|
|
79
|
+
*
|
|
80
|
+
* @param {object} params - The task creation parameters
|
|
81
|
+
* @param {string} params.cardId - The ID of the card to create the task in
|
|
82
|
+
* @param {string} params.name - The name of the new task
|
|
83
|
+
* @param {number} params.position - The position of the task in the card
|
|
84
|
+
* @returns {Promise<object>} The created task
|
|
85
|
+
*/
|
|
86
|
+
export async function createTask(params) {
|
|
87
|
+
try {
|
|
88
|
+
const { cardId, name, position = 65535 } = params;
|
|
89
|
+
const response = await plankaRequest(`/api/cards/${cardId}/tasks`, {
|
|
90
|
+
method: "POST",
|
|
91
|
+
body: { name, position },
|
|
92
|
+
});
|
|
93
|
+
// Store the task ID to card ID mapping for getTask
|
|
94
|
+
if (response.item && response.item.id) {
|
|
95
|
+
taskCardIdMap[response.item.id] = cardId;
|
|
96
|
+
}
|
|
97
|
+
return response.item;
|
|
98
|
+
}
|
|
99
|
+
catch (error) {
|
|
100
|
+
console.error("Error creating task:", error);
|
|
101
|
+
throw new Error(`Failed to create task: ${error instanceof Error ? error.message : String(error)}`);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
/**
|
|
105
|
+
* Creates multiple tasks for cards in a single operation
|
|
106
|
+
*
|
|
107
|
+
* @param {BatchCreateTasksOptions} options - The batch create tasks options
|
|
108
|
+
* @returns {Promise<{results: any[], successes: any[], failures: TaskError[]}>} The results of the batch operation
|
|
109
|
+
* @throws {Error} If the batch operation fails completely
|
|
110
|
+
*/
|
|
111
|
+
export async function batchCreateTasks(options) {
|
|
112
|
+
try {
|
|
113
|
+
const results = [];
|
|
114
|
+
const successes = [];
|
|
115
|
+
const failures = [];
|
|
116
|
+
// Process each task in sequence
|
|
117
|
+
for (let i = 0; i < options.tasks.length; i++) {
|
|
118
|
+
const task = options.tasks[i];
|
|
119
|
+
// Ensure position is set if not provided
|
|
120
|
+
if (!task.position) {
|
|
121
|
+
task.position = 65535 * (i + 1);
|
|
122
|
+
}
|
|
123
|
+
try {
|
|
124
|
+
const result = await createTask(task);
|
|
125
|
+
results.push({
|
|
126
|
+
success: true,
|
|
127
|
+
result,
|
|
128
|
+
});
|
|
129
|
+
successes.push(result);
|
|
130
|
+
}
|
|
131
|
+
catch (error) {
|
|
132
|
+
const errorMessage = error instanceof Error
|
|
133
|
+
? error.message
|
|
134
|
+
: String(error);
|
|
135
|
+
results.push({
|
|
136
|
+
success: false,
|
|
137
|
+
error: { message: errorMessage },
|
|
138
|
+
});
|
|
139
|
+
failures.push({
|
|
140
|
+
index: i,
|
|
141
|
+
task,
|
|
142
|
+
error: errorMessage,
|
|
143
|
+
});
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
return {
|
|
147
|
+
results,
|
|
148
|
+
successes,
|
|
149
|
+
failures,
|
|
150
|
+
};
|
|
151
|
+
}
|
|
152
|
+
catch (error) {
|
|
153
|
+
throw new Error(`Failed to batch create tasks: ${error instanceof Error ? error.message : String(error)}`);
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
/**
|
|
157
|
+
* Retrieves all tasks for a specific card
|
|
158
|
+
*
|
|
159
|
+
* @param {string} cardId - The ID of the card to get tasks from
|
|
160
|
+
* @returns {Promise<Array<object>>} Array of tasks in the card
|
|
161
|
+
*/
|
|
162
|
+
export async function getTasks(cardId) {
|
|
163
|
+
try {
|
|
164
|
+
// Instead of using the tasks endpoint which returns HTML,
|
|
165
|
+
// we'll get the card details which includes tasks
|
|
166
|
+
const response = await plankaRequest(`/api/cards/${cardId}`);
|
|
167
|
+
// Extract tasks from the card response
|
|
168
|
+
if (response?.included?.tasks && Array.isArray(response.included.tasks)) {
|
|
169
|
+
const tasks = response.included.tasks;
|
|
170
|
+
return tasks;
|
|
171
|
+
}
|
|
172
|
+
return [];
|
|
173
|
+
}
|
|
174
|
+
catch (error) {
|
|
175
|
+
console.error(`Error getting tasks for card ${cardId}:`, error);
|
|
176
|
+
// If there's an error, return an empty array
|
|
177
|
+
return [];
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
/**
|
|
181
|
+
* Retrieves a specific task by ID
|
|
182
|
+
*
|
|
183
|
+
* @param {string} id - The ID of the task to retrieve
|
|
184
|
+
* @param {string} [cardId] - Optional card ID to help find the task
|
|
185
|
+
* @returns {Promise<object>} The requested task
|
|
186
|
+
*/
|
|
187
|
+
export async function getTask(id, cardId) {
|
|
188
|
+
try {
|
|
189
|
+
// Tasks in Planka are always part of a card, so we need the card ID
|
|
190
|
+
const taskCardId = cardId || taskCardIdMap[id];
|
|
191
|
+
if (!taskCardId) {
|
|
192
|
+
throw new Error("Card ID is required to get a task. Either provide it directly or create the task first.");
|
|
193
|
+
}
|
|
194
|
+
// Get the card details which includes tasks
|
|
195
|
+
const response = await plankaRequest(`/api/cards/${taskCardId}`);
|
|
196
|
+
if (!response?.included?.tasks ||
|
|
197
|
+
!Array.isArray(response.included.tasks)) {
|
|
198
|
+
throw new Error(`Failed to get tasks for card ${taskCardId}`);
|
|
199
|
+
}
|
|
200
|
+
// Find the task with the matching ID
|
|
201
|
+
const task = response.included.tasks.find((task) => task.id === id);
|
|
202
|
+
if (!task) {
|
|
203
|
+
throw new Error(`Task with ID ${id} not found in card ${taskCardId}`);
|
|
204
|
+
}
|
|
205
|
+
return task;
|
|
206
|
+
}
|
|
207
|
+
catch (error) {
|
|
208
|
+
console.error(`Error getting task with ID ${id}:`, error);
|
|
209
|
+
throw new Error(`Failed to get task: ${error instanceof Error ? error.message : String(error)}`);
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
/**
|
|
213
|
+
* Updates a task's properties
|
|
214
|
+
*
|
|
215
|
+
* @param {string} id - The ID of the task to update
|
|
216
|
+
* @param {Partial<Omit<CreateTaskOptions, "cardId">>} options - The properties to update
|
|
217
|
+
* @returns {Promise<object>} The updated task
|
|
218
|
+
*/
|
|
219
|
+
export async function updateTask(id, options) {
|
|
220
|
+
const response = await plankaRequest(`/api/tasks/${id}`, {
|
|
221
|
+
method: "PATCH",
|
|
222
|
+
body: options,
|
|
223
|
+
});
|
|
224
|
+
const parsedResponse = TaskResponseSchema.parse(response);
|
|
225
|
+
return parsedResponse.item;
|
|
226
|
+
}
|
|
227
|
+
/**
|
|
228
|
+
* Deletes a task by ID
|
|
229
|
+
*
|
|
230
|
+
* @param {string} id - The ID of the task to delete
|
|
231
|
+
* @returns {Promise<{success: boolean}>} Success indicator
|
|
232
|
+
*/
|
|
233
|
+
export async function deleteTask(id) {
|
|
234
|
+
await plankaRequest(`/api/tasks/${id}`, {
|
|
235
|
+
method: "DELETE",
|
|
236
|
+
});
|
|
237
|
+
return { success: true };
|
|
238
|
+
}
|
|
@@ -0,0 +1,316 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Analyze coverage tool for Memory Bank
|
|
3
|
+
* Provides detailed analysis of indexation coverage across the project
|
|
4
|
+
*/
|
|
5
|
+
import * as path from "path";
|
|
6
|
+
import { scanFiles } from "../common/fileScanner.js";
|
|
7
|
+
/**
|
|
8
|
+
* Builds a directory tree with indexation status
|
|
9
|
+
*/
|
|
10
|
+
function buildDirectoryTree(files, indexedFiles, pendingFiles, rootPath) {
|
|
11
|
+
const root = {
|
|
12
|
+
name: path.basename(rootPath),
|
|
13
|
+
path: "",
|
|
14
|
+
type: "directory",
|
|
15
|
+
status: "indexed",
|
|
16
|
+
fileCount: 0,
|
|
17
|
+
indexedCount: 0,
|
|
18
|
+
pendingCount: 0,
|
|
19
|
+
children: [],
|
|
20
|
+
};
|
|
21
|
+
// Build tree structure
|
|
22
|
+
const dirMap = new Map();
|
|
23
|
+
dirMap.set("", root);
|
|
24
|
+
// Sort files by path for consistent tree building
|
|
25
|
+
const sortedFiles = [...files].sort((a, b) => a.path.localeCompare(b.path));
|
|
26
|
+
for (const file of sortedFiles) {
|
|
27
|
+
const parts = file.path.split(path.sep);
|
|
28
|
+
let currentPath = "";
|
|
29
|
+
// Create directory nodes
|
|
30
|
+
for (let i = 0; i < parts.length - 1; i++) {
|
|
31
|
+
const parentPath = currentPath;
|
|
32
|
+
currentPath = currentPath ? path.join(currentPath, parts[i]) : parts[i];
|
|
33
|
+
if (!dirMap.has(currentPath)) {
|
|
34
|
+
const dirNode = {
|
|
35
|
+
name: parts[i],
|
|
36
|
+
path: currentPath,
|
|
37
|
+
type: "directory",
|
|
38
|
+
status: "indexed",
|
|
39
|
+
fileCount: 0,
|
|
40
|
+
indexedCount: 0,
|
|
41
|
+
pendingCount: 0,
|
|
42
|
+
children: [],
|
|
43
|
+
};
|
|
44
|
+
dirMap.set(currentPath, dirNode);
|
|
45
|
+
const parent = dirMap.get(parentPath);
|
|
46
|
+
if (parent && parent.children) {
|
|
47
|
+
parent.children.push(dirNode);
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
// Add file node
|
|
52
|
+
const fileName = parts[parts.length - 1];
|
|
53
|
+
const fileDir = parts.length > 1 ? path.dirname(file.path) : "";
|
|
54
|
+
const parentDir = dirMap.get(fileDir);
|
|
55
|
+
if (parentDir && parentDir.children) {
|
|
56
|
+
const indexed = indexedFiles.has(file.path);
|
|
57
|
+
const pending = pendingFiles.has(file.path);
|
|
58
|
+
const fileNode = {
|
|
59
|
+
name: fileName,
|
|
60
|
+
path: file.path,
|
|
61
|
+
type: "file",
|
|
62
|
+
status: pending ? "pending_reindex" : indexed ? "indexed" : "not_indexed",
|
|
63
|
+
size: file.size,
|
|
64
|
+
lastModified: file.mtime,
|
|
65
|
+
lastIndexed: indexed ? new Date(indexedFiles.get(file.path).lastIndexed) : undefined,
|
|
66
|
+
chunkCount: indexed ? indexedFiles.get(file.path).chunks : 0,
|
|
67
|
+
};
|
|
68
|
+
parentDir.children.push(fileNode);
|
|
69
|
+
// Update parent stats
|
|
70
|
+
let current = parentDir;
|
|
71
|
+
while (current) {
|
|
72
|
+
current.fileCount = (current.fileCount || 0) + 1;
|
|
73
|
+
if (indexed)
|
|
74
|
+
current.indexedCount = (current.indexedCount || 0) + 1;
|
|
75
|
+
if (pending)
|
|
76
|
+
current.pendingCount = (current.pendingCount || 0) + 1;
|
|
77
|
+
// Find parent
|
|
78
|
+
const parentPath = path.dirname(current.path);
|
|
79
|
+
current = parentPath !== current.path ? dirMap.get(parentPath === "." ? "" : parentPath) : null;
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
// Sort children (directories first, then files)
|
|
84
|
+
const sortChildren = (node) => {
|
|
85
|
+
if (node.children) {
|
|
86
|
+
node.children.sort((a, b) => {
|
|
87
|
+
if (a.type !== b.type) {
|
|
88
|
+
return a.type === "directory" ? -1 : 1;
|
|
89
|
+
}
|
|
90
|
+
return a.name.localeCompare(b.name);
|
|
91
|
+
});
|
|
92
|
+
node.children.forEach(sortChildren);
|
|
93
|
+
}
|
|
94
|
+
};
|
|
95
|
+
sortChildren(root);
|
|
96
|
+
return root;
|
|
97
|
+
}
|
|
98
|
+
/**
|
|
99
|
+
* Calculates coverage statistics
|
|
100
|
+
*/
|
|
101
|
+
function calculateStats(files, indexedFiles, pendingFiles, totalChunks) {
|
|
102
|
+
const stats = {
|
|
103
|
+
totalFiles: files.length,
|
|
104
|
+
indexedFiles: 0,
|
|
105
|
+
notIndexedFiles: 0,
|
|
106
|
+
pendingReindexFiles: 0,
|
|
107
|
+
ignoredFiles: 0,
|
|
108
|
+
totalSize: 0,
|
|
109
|
+
indexedSize: 0,
|
|
110
|
+
coveragePercentage: 0,
|
|
111
|
+
totalChunks,
|
|
112
|
+
languageBreakdown: {},
|
|
113
|
+
directoryBreakdown: {},
|
|
114
|
+
};
|
|
115
|
+
for (const file of files) {
|
|
116
|
+
stats.totalSize += file.size;
|
|
117
|
+
const indexed = indexedFiles.has(file.path);
|
|
118
|
+
const pending = pendingFiles.has(file.path);
|
|
119
|
+
if (pending) {
|
|
120
|
+
stats.pendingReindexFiles++;
|
|
121
|
+
}
|
|
122
|
+
else if (indexed) {
|
|
123
|
+
stats.indexedFiles++;
|
|
124
|
+
stats.indexedSize += file.size;
|
|
125
|
+
}
|
|
126
|
+
else {
|
|
127
|
+
stats.notIndexedFiles++;
|
|
128
|
+
}
|
|
129
|
+
// Language breakdown
|
|
130
|
+
if (!stats.languageBreakdown[file.language]) {
|
|
131
|
+
stats.languageBreakdown[file.language] = {
|
|
132
|
+
total: 0,
|
|
133
|
+
indexed: 0,
|
|
134
|
+
chunks: 0,
|
|
135
|
+
};
|
|
136
|
+
}
|
|
137
|
+
stats.languageBreakdown[file.language].total++;
|
|
138
|
+
if (indexed) {
|
|
139
|
+
stats.languageBreakdown[file.language].indexed++;
|
|
140
|
+
stats.languageBreakdown[file.language].chunks += indexedFiles.get(file.path).chunks;
|
|
141
|
+
}
|
|
142
|
+
// Directory breakdown
|
|
143
|
+
const dir = path.dirname(file.path);
|
|
144
|
+
const topLevelDir = dir.split(path.sep)[0] || "(root)";
|
|
145
|
+
if (!stats.directoryBreakdown[topLevelDir]) {
|
|
146
|
+
stats.directoryBreakdown[topLevelDir] = {
|
|
147
|
+
total: 0,
|
|
148
|
+
indexed: 0,
|
|
149
|
+
pending: 0,
|
|
150
|
+
};
|
|
151
|
+
}
|
|
152
|
+
stats.directoryBreakdown[topLevelDir].total++;
|
|
153
|
+
if (indexed)
|
|
154
|
+
stats.directoryBreakdown[topLevelDir].indexed++;
|
|
155
|
+
if (pending)
|
|
156
|
+
stats.directoryBreakdown[topLevelDir].pending++;
|
|
157
|
+
}
|
|
158
|
+
stats.coveragePercentage = stats.totalFiles > 0
|
|
159
|
+
? (stats.indexedFiles / stats.totalFiles) * 100
|
|
160
|
+
: 0;
|
|
161
|
+
return stats;
|
|
162
|
+
}
|
|
163
|
+
/**
|
|
164
|
+
* Generates recommendations based on coverage analysis
|
|
165
|
+
*/
|
|
166
|
+
function generateRecommendations(stats, tree) {
|
|
167
|
+
const recommendations = [];
|
|
168
|
+
// Low coverage
|
|
169
|
+
if (stats.coveragePercentage < 50) {
|
|
170
|
+
recommendations.push(`⚠️ Cobertura baja (${stats.coveragePercentage.toFixed(1)}%). Considera indexar el proyecto completo con memorybank_index_code({})`);
|
|
171
|
+
}
|
|
172
|
+
else if (stats.coveragePercentage < 80) {
|
|
173
|
+
recommendations.push(`📊 Cobertura media (${stats.coveragePercentage.toFixed(1)}%). Hay ${stats.notIndexedFiles} archivos sin indexar`);
|
|
174
|
+
}
|
|
175
|
+
else if (stats.coveragePercentage === 100) {
|
|
176
|
+
recommendations.push(`✅ Cobertura completa (100%). Todos los archivos están indexados`);
|
|
177
|
+
}
|
|
178
|
+
// Pending reindex
|
|
179
|
+
if (stats.pendingReindexFiles > 0) {
|
|
180
|
+
recommendations.push(`🔄 Hay ${stats.pendingReindexFiles} archivo(s) con cambios pendientes de reindexación. Ejecuta memorybank_index_code({ forceReindex: true })`);
|
|
181
|
+
}
|
|
182
|
+
// Language-specific recommendations
|
|
183
|
+
const unindexedLanguages = Object.entries(stats.languageBreakdown)
|
|
184
|
+
.filter(([_, data]) => data.indexed === 0 && data.total > 0)
|
|
185
|
+
.map(([lang]) => lang);
|
|
186
|
+
if (unindexedLanguages.length > 0) {
|
|
187
|
+
recommendations.push(`💡 Lenguajes sin indexar: ${unindexedLanguages.join(", ")}. Considera indexar estos archivos`);
|
|
188
|
+
}
|
|
189
|
+
// Directory-specific recommendations
|
|
190
|
+
const unindexedDirs = Object.entries(stats.directoryBreakdown)
|
|
191
|
+
.filter(([_, data]) => data.indexed === 0 && data.total > 5)
|
|
192
|
+
.map(([dir]) => dir);
|
|
193
|
+
if (unindexedDirs.length > 0) {
|
|
194
|
+
recommendations.push(`📁 Directorios sin indexar: ${unindexedDirs.join(", ")}. Usa memorybank_index_code({ path: "directorio" })`);
|
|
195
|
+
}
|
|
196
|
+
// Size recommendations
|
|
197
|
+
const avgChunksPerFile = stats.indexedFiles > 0 ? stats.totalChunks / stats.indexedFiles : 0;
|
|
198
|
+
if (avgChunksPerFile > 20) {
|
|
199
|
+
recommendations.push(`⚡ Promedio alto de chunks por archivo (${avgChunksPerFile.toFixed(1)}). Los archivos son muy grandes o el chunk_size es pequeño`);
|
|
200
|
+
}
|
|
201
|
+
return recommendations;
|
|
202
|
+
}
|
|
203
|
+
/**
|
|
204
|
+
* Analyzes indexation coverage of the project
|
|
205
|
+
*/
|
|
206
|
+
export async function analyzeCoverage(indexManager, vectorStore, workspaceRoot) {
|
|
207
|
+
try {
|
|
208
|
+
console.error("\n=== Analizando cobertura de indexación ===");
|
|
209
|
+
console.error(`Workspace root: ${workspaceRoot}`);
|
|
210
|
+
// 1. Scan all code files in workspace with timeout protection
|
|
211
|
+
console.error("Escaneando archivos del workspace...");
|
|
212
|
+
// Add timeout and file limit protection
|
|
213
|
+
const scanStartTime = Date.now();
|
|
214
|
+
const maxScanTime = 10000; // 10 seconds max
|
|
215
|
+
let allFiles = [];
|
|
216
|
+
try {
|
|
217
|
+
allFiles = scanFiles({
|
|
218
|
+
rootPath: workspaceRoot,
|
|
219
|
+
recursive: true
|
|
220
|
+
});
|
|
221
|
+
const scanDuration = Date.now() - scanStartTime;
|
|
222
|
+
console.error(`Escaneo completado en ${scanDuration}ms`);
|
|
223
|
+
console.error(`Encontrados ${allFiles.length} archivos de código`);
|
|
224
|
+
// If scan took too long or found too many files, limit results
|
|
225
|
+
if (scanDuration > maxScanTime || allFiles.length > 10000) {
|
|
226
|
+
console.error(`⚠️ Workspace muy grande. Limitando análisis a primeros 1000 archivos`);
|
|
227
|
+
allFiles = allFiles.slice(0, 1000);
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
catch (error) {
|
|
231
|
+
console.error(`Error escaneando archivos: ${error}`);
|
|
232
|
+
throw error;
|
|
233
|
+
}
|
|
234
|
+
// 2. Get indexed files from vector store
|
|
235
|
+
console.error("Obteniendo archivos indexados...");
|
|
236
|
+
await vectorStore.initialize();
|
|
237
|
+
const fileHashes = await vectorStore.getFileHashes();
|
|
238
|
+
// 3. Get index metadata
|
|
239
|
+
const indexStats = await indexManager.getStats();
|
|
240
|
+
// 4. Build indexed files map with chunk counts
|
|
241
|
+
const indexedFiles = new Map();
|
|
242
|
+
// Get chunks grouped by file from vector store
|
|
243
|
+
for (const [filePath, hash] of fileHashes) {
|
|
244
|
+
const chunks = await vectorStore.getChunksByFile(filePath);
|
|
245
|
+
if (chunks.length > 0) {
|
|
246
|
+
indexedFiles.set(filePath, {
|
|
247
|
+
lastIndexed: chunks[0].timestamp,
|
|
248
|
+
chunks: chunks.length,
|
|
249
|
+
});
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
// 5. Identify pending files (files that changed)
|
|
253
|
+
const pendingFiles = new Set();
|
|
254
|
+
for (const file of allFiles) {
|
|
255
|
+
const indexed = indexedFiles.get(file.path);
|
|
256
|
+
if (indexed) {
|
|
257
|
+
// Check if file hash matches
|
|
258
|
+
const chunks = await vectorStore.getChunksByFile(file.path);
|
|
259
|
+
if (chunks.length > 0 && chunks[0].fileHash !== file.hash) {
|
|
260
|
+
pendingFiles.add(file.path);
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
console.error(`Archivos indexados: ${indexedFiles.size}`);
|
|
265
|
+
console.error(`Archivos con cambios: ${pendingFiles.size}`);
|
|
266
|
+
// 6. Build directory tree
|
|
267
|
+
console.error("Construyendo árbol de directorios...");
|
|
268
|
+
const tree = buildDirectoryTree(allFiles, indexedFiles, pendingFiles, workspaceRoot);
|
|
269
|
+
// 7. Calculate statistics
|
|
270
|
+
console.error("Calculando estadísticas...");
|
|
271
|
+
const stats = calculateStats(allFiles, indexedFiles, pendingFiles, indexStats.totalChunks);
|
|
272
|
+
// 8. Generate recommendations
|
|
273
|
+
const recommendations = generateRecommendations(stats, tree);
|
|
274
|
+
// 9. Format message
|
|
275
|
+
const message = `Análisis completado: ${stats.indexedFiles}/${stats.totalFiles} archivos indexados (${stats.coveragePercentage.toFixed(1)}% cobertura)`;
|
|
276
|
+
console.error("\n=== Análisis completado ===");
|
|
277
|
+
console.error(message);
|
|
278
|
+
console.error(`Total chunks: ${stats.totalChunks}`);
|
|
279
|
+
console.error(`Pendientes: ${stats.pendingReindexFiles}`);
|
|
280
|
+
return {
|
|
281
|
+
success: true,
|
|
282
|
+
stats,
|
|
283
|
+
tree,
|
|
284
|
+
recommendations,
|
|
285
|
+
message,
|
|
286
|
+
};
|
|
287
|
+
}
|
|
288
|
+
catch (error) {
|
|
289
|
+
console.error(`Error analyzing coverage: ${error}`);
|
|
290
|
+
return {
|
|
291
|
+
success: false,
|
|
292
|
+
stats: {
|
|
293
|
+
totalFiles: 0,
|
|
294
|
+
indexedFiles: 0,
|
|
295
|
+
notIndexedFiles: 0,
|
|
296
|
+
pendingReindexFiles: 0,
|
|
297
|
+
ignoredFiles: 0,
|
|
298
|
+
totalSize: 0,
|
|
299
|
+
indexedSize: 0,
|
|
300
|
+
coveragePercentage: 0,
|
|
301
|
+
totalChunks: 0,
|
|
302
|
+
languageBreakdown: {},
|
|
303
|
+
directoryBreakdown: {},
|
|
304
|
+
},
|
|
305
|
+
tree: {
|
|
306
|
+
name: "root",
|
|
307
|
+
path: "",
|
|
308
|
+
type: "directory",
|
|
309
|
+
status: "not_indexed",
|
|
310
|
+
children: [],
|
|
311
|
+
},
|
|
312
|
+
recommendations: [],
|
|
313
|
+
message: `Failed to analyze coverage: ${error}`,
|
|
314
|
+
};
|
|
315
|
+
}
|
|
316
|
+
}
|