@grec0/memory-bank-mcp 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.memoryignore.example +76 -0
- package/README.md +425 -0
- package/dist/common/chunker.js +407 -0
- package/dist/common/embeddingService.js +302 -0
- package/dist/common/errors.js +71 -0
- package/dist/common/fileScanner.js +261 -0
- package/dist/common/indexManager.js +332 -0
- package/dist/common/setup.js +49 -0
- package/dist/common/types.js +115 -0
- package/dist/common/utils.js +215 -0
- package/dist/common/vectorStore.js +332 -0
- package/dist/common/version.js +2 -0
- package/dist/index.js +274 -0
- package/dist/operations/boardMemberships.js +186 -0
- package/dist/operations/boards.js +268 -0
- package/dist/operations/cards.js +426 -0
- package/dist/operations/comments.js +249 -0
- package/dist/operations/labels.js +258 -0
- package/dist/operations/lists.js +157 -0
- package/dist/operations/projects.js +102 -0
- package/dist/operations/tasks.js +238 -0
- package/dist/tools/analyzeCoverage.js +316 -0
- package/dist/tools/board-summary.js +151 -0
- package/dist/tools/card-details.js +106 -0
- package/dist/tools/create-card-with-tasks.js +81 -0
- package/dist/tools/getStats.js +59 -0
- package/dist/tools/index.js +12 -0
- package/dist/tools/indexCode.js +53 -0
- package/dist/tools/readFile.js +69 -0
- package/dist/tools/searchMemory.js +60 -0
- package/dist/tools/workflow-actions.js +145 -0
- package/dist/tools/writeFile.js +66 -0
- package/package.json +58 -0
|
@@ -0,0 +1,215 @@
|
|
|
1
|
+
import { getUserAgent } from "universal-user-agent";
|
|
2
|
+
import { createPlankaError } from "./errors.js";
|
|
3
|
+
import { VERSION } from "./version.js";
|
|
4
|
+
import https from "https";
|
|
5
|
+
import nodeFetch from "node-fetch";
|
|
6
|
+
// Global variables to store tokens
|
|
7
|
+
let agentToken = null;
|
|
8
|
+
// Create HTTPS agent for insecure connections if needed
|
|
9
|
+
let httpsAgent;
|
|
10
|
+
if (process.env.PLANKA_ALLOW_INSECURE === "true") {
|
|
11
|
+
console.error("[DEBUG] Allowing insecure HTTPS connections (certificate validation disabled)");
|
|
12
|
+
httpsAgent = new https.Agent({
|
|
13
|
+
rejectUnauthorized: false,
|
|
14
|
+
});
|
|
15
|
+
}
|
|
16
|
+
// Custom fetch function that uses the agent
|
|
17
|
+
const customFetch = httpsAgent
|
|
18
|
+
? (url, options) => {
|
|
19
|
+
return nodeFetch(url, { ...options, agent: httpsAgent });
|
|
20
|
+
}
|
|
21
|
+
: fetch;
|
|
22
|
+
async function parseResponseBody(response) {
|
|
23
|
+
const contentType = response.headers.get("content-type");
|
|
24
|
+
if (contentType?.includes("application/json")) {
|
|
25
|
+
return response.json();
|
|
26
|
+
}
|
|
27
|
+
return response.text();
|
|
28
|
+
}
|
|
29
|
+
export function buildUrl(baseUrl, params) {
|
|
30
|
+
const url = new URL(baseUrl);
|
|
31
|
+
Object.entries(params).forEach(([key, value]) => {
|
|
32
|
+
if (value !== undefined) {
|
|
33
|
+
url.searchParams.append(key, value.toString());
|
|
34
|
+
}
|
|
35
|
+
});
|
|
36
|
+
return url.toString();
|
|
37
|
+
}
|
|
38
|
+
const USER_AGENT = `modelcontextprotocol/servers/planka/v${VERSION} ${getUserAgent()}`;
|
|
39
|
+
async function authenticateAgent() {
|
|
40
|
+
const email = process.env.PLANKA_AGENT_EMAIL;
|
|
41
|
+
const password = process.env.PLANKA_AGENT_PASSWORD;
|
|
42
|
+
console.error(`[DEBUG] Attempting authentication with email: ${email}`);
|
|
43
|
+
if (!email || !password) {
|
|
44
|
+
throw new Error("PLANKA_AGENT_EMAIL and PLANKA_AGENT_PASSWORD environment variables are required");
|
|
45
|
+
}
|
|
46
|
+
const baseUrl = process.env.PLANKA_BASE_URL || "http://localhost:3000";
|
|
47
|
+
// Construir la URL correctamente para el endpoint de tokens
|
|
48
|
+
const url = baseUrl.endsWith('/')
|
|
49
|
+
? `${baseUrl}api/access-tokens`
|
|
50
|
+
: `${baseUrl}/api/access-tokens`;
|
|
51
|
+
console.error(`[DEBUG] Authentication URL: ${url}`);
|
|
52
|
+
console.error(`[DEBUG] Base URL: ${baseUrl}`);
|
|
53
|
+
try {
|
|
54
|
+
const requestBody = JSON.stringify({
|
|
55
|
+
emailOrUsername: email,
|
|
56
|
+
password: password,
|
|
57
|
+
});
|
|
58
|
+
console.error(`[DEBUG] Request body: ${requestBody}`);
|
|
59
|
+
const response = await customFetch(url, {
|
|
60
|
+
method: "POST",
|
|
61
|
+
headers: {
|
|
62
|
+
"Accept": "application/json",
|
|
63
|
+
"Content-Type": "application/json",
|
|
64
|
+
"User-Agent": USER_AGENT,
|
|
65
|
+
},
|
|
66
|
+
body: requestBody,
|
|
67
|
+
credentials: "include",
|
|
68
|
+
});
|
|
69
|
+
console.error(`[DEBUG] Response status: ${response.status}`);
|
|
70
|
+
console.error(`[DEBUG] Response headers:`, Object.fromEntries(response.headers.entries()));
|
|
71
|
+
const responseBody = await parseResponseBody(response);
|
|
72
|
+
console.error(`[DEBUG] Response body:`, responseBody);
|
|
73
|
+
if (!response.ok) {
|
|
74
|
+
throw createPlankaError(response.status, responseBody);
|
|
75
|
+
}
|
|
76
|
+
// The token is directly in the item field
|
|
77
|
+
const { item } = responseBody;
|
|
78
|
+
agentToken = item;
|
|
79
|
+
console.error(`[DEBUG] Authentication successful, token length: ${item?.length}`);
|
|
80
|
+
return item;
|
|
81
|
+
}
|
|
82
|
+
catch (error) {
|
|
83
|
+
console.error(`[DEBUG] Authentication error:`, error);
|
|
84
|
+
// Rethrow with more context
|
|
85
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
86
|
+
throw new Error(`Failed to authenticate agent with Planka: ${errorMessage}`);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
async function getAuthToken() {
|
|
90
|
+
if (agentToken) {
|
|
91
|
+
return agentToken;
|
|
92
|
+
}
|
|
93
|
+
return authenticateAgent();
|
|
94
|
+
}
|
|
95
|
+
export async function plankaRequest(path, options = {}) {
|
|
96
|
+
const baseUrl = process.env.PLANKA_BASE_URL || "http://localhost:3000";
|
|
97
|
+
// Ensure path starts with /api/ if not already present
|
|
98
|
+
const normalizedPath = path.startsWith("/api/") ? path : `/api/${path}`;
|
|
99
|
+
// Construir la URL correctamente
|
|
100
|
+
const url = baseUrl.endsWith('/')
|
|
101
|
+
? `${baseUrl}${normalizedPath.substring(1)}` // Remove leading slash if baseUrl ends with /
|
|
102
|
+
: `${baseUrl}${normalizedPath}`;
|
|
103
|
+
const headers = {
|
|
104
|
+
"Accept": "application/json",
|
|
105
|
+
"Content-Type": "application/json",
|
|
106
|
+
"User-Agent": USER_AGENT,
|
|
107
|
+
...options.headers,
|
|
108
|
+
};
|
|
109
|
+
// Remove Content-Type header for FormData
|
|
110
|
+
if (options.body instanceof FormData) {
|
|
111
|
+
delete headers["Content-Type"];
|
|
112
|
+
}
|
|
113
|
+
// Add authentication token if not skipped
|
|
114
|
+
if (!options.skipAuth) {
|
|
115
|
+
try {
|
|
116
|
+
const token = await getAuthToken();
|
|
117
|
+
headers["Authorization"] = `Bearer ${token}`;
|
|
118
|
+
}
|
|
119
|
+
catch (error) {
|
|
120
|
+
const errorMessage = error instanceof Error
|
|
121
|
+
? error.message
|
|
122
|
+
: String(error);
|
|
123
|
+
throw new Error(`Failed to get authentication token: ${errorMessage}`);
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
try {
|
|
127
|
+
const response = await customFetch(url, {
|
|
128
|
+
method: options.method || "GET",
|
|
129
|
+
headers,
|
|
130
|
+
body: options.body instanceof FormData
|
|
131
|
+
? options.body
|
|
132
|
+
: options.body
|
|
133
|
+
? JSON.stringify(options.body)
|
|
134
|
+
: undefined,
|
|
135
|
+
credentials: "include", // Include cookies for Planka authentication
|
|
136
|
+
});
|
|
137
|
+
const responseBody = await parseResponseBody(response);
|
|
138
|
+
if (!response.ok) {
|
|
139
|
+
throw createPlankaError(response.status, responseBody);
|
|
140
|
+
}
|
|
141
|
+
return responseBody;
|
|
142
|
+
}
|
|
143
|
+
catch (error) {
|
|
144
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
145
|
+
throw new Error(`Failed to make Planka request to ${url}: ${errorMessage}`);
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
export function validateProjectName(name) {
|
|
149
|
+
const sanitized = name.trim();
|
|
150
|
+
if (!sanitized) {
|
|
151
|
+
throw new Error("Project name cannot be empty");
|
|
152
|
+
}
|
|
153
|
+
return sanitized;
|
|
154
|
+
}
|
|
155
|
+
export function validateBoardName(name) {
|
|
156
|
+
const sanitized = name.trim();
|
|
157
|
+
if (!sanitized) {
|
|
158
|
+
throw new Error("Board name cannot be empty");
|
|
159
|
+
}
|
|
160
|
+
return sanitized;
|
|
161
|
+
}
|
|
162
|
+
export function validateListName(name) {
|
|
163
|
+
const sanitized = name.trim();
|
|
164
|
+
if (!sanitized) {
|
|
165
|
+
throw new Error("List name cannot be empty");
|
|
166
|
+
}
|
|
167
|
+
return sanitized;
|
|
168
|
+
}
|
|
169
|
+
export function validateCardName(name) {
|
|
170
|
+
const sanitized = name.trim();
|
|
171
|
+
if (!sanitized) {
|
|
172
|
+
throw new Error("Card name cannot be empty");
|
|
173
|
+
}
|
|
174
|
+
return sanitized;
|
|
175
|
+
}
|
|
176
|
+
/**
|
|
177
|
+
* Looks up a user ID by email
|
|
178
|
+
*
|
|
179
|
+
* @param {string} email - The email of the user to look up
|
|
180
|
+
* @returns {Promise<string | null>} The user ID if found, null otherwise
|
|
181
|
+
*/
|
|
182
|
+
export async function getUserIdByEmail(email) {
|
|
183
|
+
try {
|
|
184
|
+
// Get all users
|
|
185
|
+
const response = await plankaRequest("/api/users");
|
|
186
|
+
const { items } = response;
|
|
187
|
+
// Find the user with the matching email
|
|
188
|
+
const user = items.find((user) => user.email === email);
|
|
189
|
+
return user ? user.id : null;
|
|
190
|
+
}
|
|
191
|
+
catch (error) {
|
|
192
|
+
console.error(`Failed to get user ID by email: ${error instanceof Error ? error.message : String(error)}`);
|
|
193
|
+
return null;
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
/**
|
|
197
|
+
* Looks up a user ID by username
|
|
198
|
+
*
|
|
199
|
+
* @param {string} username - The username of the user to look up
|
|
200
|
+
* @returns {Promise<string | null>} The user ID if found, null otherwise
|
|
201
|
+
*/
|
|
202
|
+
export async function getUserIdByUsername(username) {
|
|
203
|
+
try {
|
|
204
|
+
// Get all users
|
|
205
|
+
const response = await plankaRequest("/api/users");
|
|
206
|
+
const { items } = response;
|
|
207
|
+
// Find the user with the matching username
|
|
208
|
+
const user = items.find((user) => user.username === username);
|
|
209
|
+
return user ? user.id : null;
|
|
210
|
+
}
|
|
211
|
+
catch (error) {
|
|
212
|
+
console.error(`Failed to get user ID by username: ${error instanceof Error ? error.message : String(error)}`);
|
|
213
|
+
return null;
|
|
214
|
+
}
|
|
215
|
+
}
|
|
@@ -0,0 +1,332 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Vector store for Memory Bank using LanceDB
|
|
3
|
+
* Manages storage and retrieval of code embeddings
|
|
4
|
+
*/
|
|
5
|
+
import * as lancedb from "@lancedb/lancedb";
|
|
6
|
+
import * as fs from "fs";
|
|
7
|
+
/**
|
|
8
|
+
* Vector store using LanceDB
|
|
9
|
+
*/
|
|
10
|
+
export class VectorStore {
|
|
11
|
+
db = null;
|
|
12
|
+
table = null;
|
|
13
|
+
dbPath;
|
|
14
|
+
tableName;
|
|
15
|
+
constructor(dbPath = ".memorybank", tableName = "code_chunks") {
|
|
16
|
+
this.dbPath = dbPath;
|
|
17
|
+
this.tableName = tableName;
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* Initializes the vector database
|
|
21
|
+
*/
|
|
22
|
+
async initialize() {
|
|
23
|
+
try {
|
|
24
|
+
// Create database directory if it doesn't exist
|
|
25
|
+
if (!fs.existsSync(this.dbPath)) {
|
|
26
|
+
fs.mkdirSync(this.dbPath, { recursive: true });
|
|
27
|
+
console.error(`Created database directory: ${this.dbPath}`);
|
|
28
|
+
}
|
|
29
|
+
// Connect to LanceDB
|
|
30
|
+
this.db = await lancedb.connect(this.dbPath);
|
|
31
|
+
console.error(`Connected to LanceDB at ${this.dbPath}`);
|
|
32
|
+
// Check if table exists
|
|
33
|
+
const tableNames = await this.db.tableNames();
|
|
34
|
+
if (tableNames.includes(this.tableName)) {
|
|
35
|
+
// Open existing table
|
|
36
|
+
this.table = await this.db.openTable(this.tableName);
|
|
37
|
+
console.error(`Opened existing table: ${this.tableName}`);
|
|
38
|
+
}
|
|
39
|
+
else {
|
|
40
|
+
// Create new table with empty data (will add records later)
|
|
41
|
+
console.error(`Creating new table: ${this.tableName}`);
|
|
42
|
+
// LanceDB requires at least one record to create table with schema
|
|
43
|
+
// We'll create it when first inserting data
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
catch (error) {
|
|
47
|
+
console.error(`Error initializing vector store: ${error}`);
|
|
48
|
+
throw error;
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
/**
|
|
52
|
+
* Ensures database and table are initialized
|
|
53
|
+
*/
|
|
54
|
+
async ensureInitialized() {
|
|
55
|
+
if (!this.db) {
|
|
56
|
+
await this.initialize();
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
/**
|
|
60
|
+
* Inserts chunks into the vector store
|
|
61
|
+
*/
|
|
62
|
+
async insertChunks(chunks) {
|
|
63
|
+
await this.ensureInitialized();
|
|
64
|
+
if (chunks.length === 0) {
|
|
65
|
+
return;
|
|
66
|
+
}
|
|
67
|
+
try {
|
|
68
|
+
if (!this.table) {
|
|
69
|
+
// Create table with first batch of data
|
|
70
|
+
this.table = await this.db.createTable(this.tableName, chunks);
|
|
71
|
+
console.error(`Created table ${this.tableName} with ${chunks.length} chunks`);
|
|
72
|
+
}
|
|
73
|
+
else {
|
|
74
|
+
// Add to existing table
|
|
75
|
+
await this.table.add(chunks);
|
|
76
|
+
console.error(`Added ${chunks.length} chunks to ${this.tableName}`);
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
catch (error) {
|
|
80
|
+
console.error(`Error inserting chunks: ${error}`);
|
|
81
|
+
throw error;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
/**
|
|
85
|
+
* Updates chunks in the vector store
|
|
86
|
+
*/
|
|
87
|
+
async updateChunks(chunks) {
|
|
88
|
+
await this.ensureInitialized();
|
|
89
|
+
if (chunks.length === 0) {
|
|
90
|
+
return;
|
|
91
|
+
}
|
|
92
|
+
try {
|
|
93
|
+
// Delete old versions by ID
|
|
94
|
+
const ids = chunks.map((c) => c.id);
|
|
95
|
+
await this.deleteChunksByIds(ids);
|
|
96
|
+
// Insert updated versions
|
|
97
|
+
await this.insertChunks(chunks);
|
|
98
|
+
console.error(`Updated ${chunks.length} chunks`);
|
|
99
|
+
}
|
|
100
|
+
catch (error) {
|
|
101
|
+
console.error(`Error updating chunks: ${error}`);
|
|
102
|
+
throw error;
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
/**
|
|
106
|
+
* Deletes chunks by their IDs
|
|
107
|
+
*/
|
|
108
|
+
async deleteChunksByIds(ids) {
|
|
109
|
+
await this.ensureInitialized();
|
|
110
|
+
if (!this.table || ids.length === 0) {
|
|
111
|
+
return;
|
|
112
|
+
}
|
|
113
|
+
try {
|
|
114
|
+
// LanceDB uses SQL-like syntax for deletion
|
|
115
|
+
const idList = ids.map((id) => `'${id}'`).join(",");
|
|
116
|
+
await this.table.delete(`id IN (${idList})`);
|
|
117
|
+
console.error(`Deleted ${ids.length} chunks`);
|
|
118
|
+
}
|
|
119
|
+
catch (error) {
|
|
120
|
+
console.error(`Error deleting chunks: ${error}`);
|
|
121
|
+
throw error;
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
/**
|
|
125
|
+
* Deletes all chunks from a specific file
|
|
126
|
+
*/
|
|
127
|
+
async deleteChunksByFile(filePath) {
|
|
128
|
+
await this.ensureInitialized();
|
|
129
|
+
if (!this.table) {
|
|
130
|
+
return;
|
|
131
|
+
}
|
|
132
|
+
try {
|
|
133
|
+
await this.table.delete(`filePath = '${filePath}'`);
|
|
134
|
+
console.error(`Deleted all chunks from file: ${filePath}`);
|
|
135
|
+
}
|
|
136
|
+
catch (error) {
|
|
137
|
+
console.error(`Error deleting chunks by file: ${error}`);
|
|
138
|
+
throw error;
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
/**
|
|
142
|
+
* Searches for similar chunks using vector similarity
|
|
143
|
+
*/
|
|
144
|
+
async search(queryVector, options = {}) {
|
|
145
|
+
await this.ensureInitialized();
|
|
146
|
+
if (!this.table) {
|
|
147
|
+
console.error("No table exists yet, returning empty results");
|
|
148
|
+
return [];
|
|
149
|
+
}
|
|
150
|
+
const topK = options.topK || 10;
|
|
151
|
+
const minScore = options.minScore || 0.0;
|
|
152
|
+
try {
|
|
153
|
+
// Start with vector search
|
|
154
|
+
let query = this.table.search(queryVector).limit(topK);
|
|
155
|
+
// Apply filters if specified
|
|
156
|
+
if (options.filterByFile) {
|
|
157
|
+
query = query.where(`filePath LIKE '%${options.filterByFile}%'`);
|
|
158
|
+
}
|
|
159
|
+
if (options.filterByLanguage) {
|
|
160
|
+
query = query.where(`language = '${options.filterByLanguage}'`);
|
|
161
|
+
}
|
|
162
|
+
if (options.filterByType) {
|
|
163
|
+
query = query.where(`chunkType = '${options.filterByType}'`);
|
|
164
|
+
}
|
|
165
|
+
// Execute search
|
|
166
|
+
const results = await query.toArray();
|
|
167
|
+
// Convert to SearchResult format
|
|
168
|
+
const searchResults = results.map((result) => {
|
|
169
|
+
// LanceDB returns distance, convert to similarity score (0-1)
|
|
170
|
+
// Using cosine similarity: score = 1 - (distance / 2)
|
|
171
|
+
const distance = result._distance || 0;
|
|
172
|
+
const score = Math.max(0, 1 - distance / 2);
|
|
173
|
+
return {
|
|
174
|
+
chunk: {
|
|
175
|
+
id: result.id,
|
|
176
|
+
vector: result.vector,
|
|
177
|
+
filePath: result.filePath,
|
|
178
|
+
content: result.content,
|
|
179
|
+
startLine: result.startLine,
|
|
180
|
+
endLine: result.endLine,
|
|
181
|
+
chunkType: result.chunkType,
|
|
182
|
+
name: result.name,
|
|
183
|
+
language: result.language,
|
|
184
|
+
fileHash: result.fileHash,
|
|
185
|
+
timestamp: result.timestamp,
|
|
186
|
+
context: result.context,
|
|
187
|
+
},
|
|
188
|
+
score,
|
|
189
|
+
distance,
|
|
190
|
+
};
|
|
191
|
+
});
|
|
192
|
+
// Filter by minimum score
|
|
193
|
+
return searchResults.filter((r) => r.score >= minScore);
|
|
194
|
+
}
|
|
195
|
+
catch (error) {
|
|
196
|
+
console.error(`Error searching vector store: ${error}`);
|
|
197
|
+
throw error;
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
/**
|
|
201
|
+
* Gets all chunks from a specific file
|
|
202
|
+
*/
|
|
203
|
+
async getChunksByFile(filePath) {
|
|
204
|
+
await this.ensureInitialized();
|
|
205
|
+
if (!this.table) {
|
|
206
|
+
return [];
|
|
207
|
+
}
|
|
208
|
+
try {
|
|
209
|
+
const results = await this.table
|
|
210
|
+
.where(`filePath = '${filePath}'`)
|
|
211
|
+
.toArray();
|
|
212
|
+
return results.map((r) => ({
|
|
213
|
+
id: r.id,
|
|
214
|
+
vector: r.vector,
|
|
215
|
+
filePath: r.filePath,
|
|
216
|
+
content: r.content,
|
|
217
|
+
startLine: r.startLine,
|
|
218
|
+
endLine: r.endLine,
|
|
219
|
+
chunkType: r.chunkType,
|
|
220
|
+
name: r.name,
|
|
221
|
+
language: r.language,
|
|
222
|
+
fileHash: r.fileHash,
|
|
223
|
+
timestamp: r.timestamp,
|
|
224
|
+
context: r.context,
|
|
225
|
+
}));
|
|
226
|
+
}
|
|
227
|
+
catch (error) {
|
|
228
|
+
console.error(`Error getting chunks by file: ${error}`);
|
|
229
|
+
return [];
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
/**
|
|
233
|
+
* Gets statistics about the vector store
|
|
234
|
+
*/
|
|
235
|
+
async getStats() {
|
|
236
|
+
await this.ensureInitialized();
|
|
237
|
+
if (!this.table) {
|
|
238
|
+
return {
|
|
239
|
+
totalChunks: 0,
|
|
240
|
+
fileCount: 0,
|
|
241
|
+
languageCounts: {},
|
|
242
|
+
typeCounts: {},
|
|
243
|
+
};
|
|
244
|
+
}
|
|
245
|
+
try {
|
|
246
|
+
// Use query().toArray() instead of direct toArray()
|
|
247
|
+
const allChunks = await this.table.query().toArray();
|
|
248
|
+
const uniqueFiles = new Set();
|
|
249
|
+
const languageCounts = {};
|
|
250
|
+
const typeCounts = {};
|
|
251
|
+
let latestTimestamp = 0;
|
|
252
|
+
for (const chunk of allChunks) {
|
|
253
|
+
uniqueFiles.add(chunk.filePath);
|
|
254
|
+
languageCounts[chunk.language] = (languageCounts[chunk.language] || 0) + 1;
|
|
255
|
+
typeCounts[chunk.chunkType] = (typeCounts[chunk.chunkType] || 0) + 1;
|
|
256
|
+
if (chunk.timestamp > latestTimestamp) {
|
|
257
|
+
latestTimestamp = chunk.timestamp;
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
return {
|
|
261
|
+
totalChunks: allChunks.length,
|
|
262
|
+
fileCount: uniqueFiles.size,
|
|
263
|
+
languageCounts,
|
|
264
|
+
typeCounts,
|
|
265
|
+
lastUpdated: latestTimestamp > 0 ? new Date(latestTimestamp) : undefined,
|
|
266
|
+
};
|
|
267
|
+
}
|
|
268
|
+
catch (error) {
|
|
269
|
+
console.error(`Error getting stats: ${error}`);
|
|
270
|
+
throw error;
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
/**
|
|
274
|
+
* Clears all data from the vector store
|
|
275
|
+
*/
|
|
276
|
+
async clear() {
|
|
277
|
+
await this.ensureInitialized();
|
|
278
|
+
if (!this.table) {
|
|
279
|
+
return;
|
|
280
|
+
}
|
|
281
|
+
try {
|
|
282
|
+
// Drop the table
|
|
283
|
+
await this.db.dropTable(this.tableName);
|
|
284
|
+
this.table = null;
|
|
285
|
+
console.error(`Cleared vector store table: ${this.tableName}`);
|
|
286
|
+
}
|
|
287
|
+
catch (error) {
|
|
288
|
+
console.error(`Error clearing vector store: ${error}`);
|
|
289
|
+
throw error;
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
/**
|
|
293
|
+
* Closes the database connection
|
|
294
|
+
*/
|
|
295
|
+
async close() {
|
|
296
|
+
// LanceDB connections are lightweight and don't need explicit closing
|
|
297
|
+
// But we'll clear references
|
|
298
|
+
this.table = null;
|
|
299
|
+
this.db = null;
|
|
300
|
+
}
|
|
301
|
+
/**
|
|
302
|
+
* Gets file hashes for all indexed files
|
|
303
|
+
*/
|
|
304
|
+
async getFileHashes() {
|
|
305
|
+
await this.ensureInitialized();
|
|
306
|
+
if (!this.table) {
|
|
307
|
+
return new Map();
|
|
308
|
+
}
|
|
309
|
+
try {
|
|
310
|
+
// Use query().toArray() instead of direct toArray()
|
|
311
|
+
const allChunks = await this.table.query().toArray();
|
|
312
|
+
const fileHashes = new Map();
|
|
313
|
+
for (const chunk of allChunks) {
|
|
314
|
+
if (!fileHashes.has(chunk.filePath)) {
|
|
315
|
+
fileHashes.set(chunk.filePath, chunk.fileHash);
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
return fileHashes;
|
|
319
|
+
}
|
|
320
|
+
catch (error) {
|
|
321
|
+
console.error(`Error getting file hashes: ${error}`);
|
|
322
|
+
return new Map();
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
/**
|
|
327
|
+
* Creates a vector store from environment variables
|
|
328
|
+
*/
|
|
329
|
+
export function createVectorStore() {
|
|
330
|
+
const storagePath = process.env.MEMORYBANK_STORAGE_PATH || ".memorybank";
|
|
331
|
+
return new VectorStore(storagePath);
|
|
332
|
+
}
|