@grec0/memory-bank-mcp 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,302 @@
1
+ /**
2
+ * @fileoverview Embedding service for Memory Bank using OpenAI
3
+ * Generates vector embeddings for code chunks
4
+ */
5
+ import OpenAI from "openai";
6
+ import * as crypto from "crypto";
7
+ import * as fs from "fs";
8
+ import * as path from "path";
9
+ /**
10
+ * Embedding service with caching and batch processing
11
+ */
12
+ export class EmbeddingService {
13
+ client;
14
+ options;
15
+ cache;
16
+ constructor(apiKey, options) {
17
+ if (!apiKey) {
18
+ throw new Error("OpenAI API key is required");
19
+ }
20
+ this.client = new OpenAI({ apiKey });
21
+ this.options = {
22
+ model: options?.model || "text-embedding-3-small",
23
+ dimensions: options?.dimensions || 1536,
24
+ batchSize: options?.batchSize || 100,
25
+ enableCache: options?.enableCache !== undefined ? options.enableCache : true,
26
+ cachePath: options?.cachePath || ".memorybank/embedding-cache.json",
27
+ };
28
+ this.cache = new Map();
29
+ if (this.options.enableCache) {
30
+ this.loadCache();
31
+ }
32
+ }
33
+ /**
34
+ * Loads embedding cache from disk
35
+ */
36
+ loadCache() {
37
+ try {
38
+ if (fs.existsSync(this.options.cachePath)) {
39
+ const data = fs.readFileSync(this.options.cachePath, "utf-8");
40
+ const entries = JSON.parse(data);
41
+ for (const entry of entries) {
42
+ this.cache.set(entry.chunkId, entry);
43
+ }
44
+ console.error(`Loaded ${entries.length} cached embeddings`);
45
+ }
46
+ }
47
+ catch (error) {
48
+ console.error(`Warning: Could not load embedding cache: ${error}`);
49
+ }
50
+ }
51
+ /**
52
+ * Saves embedding cache to disk
53
+ */
54
+ saveCache() {
55
+ try {
56
+ const dir = path.dirname(this.options.cachePath);
57
+ if (!fs.existsSync(dir)) {
58
+ fs.mkdirSync(dir, { recursive: true });
59
+ }
60
+ const entries = Array.from(this.cache.values());
61
+ fs.writeFileSync(this.options.cachePath, JSON.stringify(entries, null, 2));
62
+ console.error(`Saved ${entries.length} embeddings to cache`);
63
+ }
64
+ catch (error) {
65
+ console.error(`Warning: Could not save embedding cache: ${error}`);
66
+ }
67
+ }
68
+ /**
69
+ * Generates hash of content for cache lookup
70
+ */
71
+ hashContent(content) {
72
+ return crypto.createHash("sha256").update(content).digest("hex");
73
+ }
74
+ /**
75
+ * Checks if embedding is cached
76
+ */
77
+ getCachedEmbedding(chunkId, content) {
78
+ if (!this.options.enableCache) {
79
+ return null;
80
+ }
81
+ const cached = this.cache.get(chunkId);
82
+ if (!cached) {
83
+ return null;
84
+ }
85
+ // Verify content hasn't changed
86
+ const contentHash = this.hashContent(content);
87
+ if (cached.contentHash !== contentHash) {
88
+ // Content changed, invalidate cache
89
+ this.cache.delete(chunkId);
90
+ return null;
91
+ }
92
+ // Verify model matches
93
+ if (cached.model !== this.options.model) {
94
+ return null;
95
+ }
96
+ return cached.vector;
97
+ }
98
+ /**
99
+ * Caches an embedding
100
+ */
101
+ cacheEmbedding(chunkId, content, vector) {
102
+ if (!this.options.enableCache) {
103
+ return;
104
+ }
105
+ const contentHash = this.hashContent(content);
106
+ this.cache.set(chunkId, {
107
+ chunkId,
108
+ contentHash,
109
+ vector,
110
+ model: this.options.model,
111
+ timestamp: Date.now(),
112
+ });
113
+ }
114
+ /**
115
+ * Sleeps for a specified duration (for retry backoff)
116
+ */
117
+ sleep(ms) {
118
+ return new Promise((resolve) => setTimeout(resolve, ms));
119
+ }
120
+ /**
121
+ * Generates embeddings for a batch of texts with retry logic
122
+ */
123
+ async generateBatchWithRetry(texts, maxRetries = 3) {
124
+ let lastError = null;
125
+ for (let attempt = 0; attempt < maxRetries; attempt++) {
126
+ try {
127
+ const response = await this.client.embeddings.create({
128
+ model: this.options.model,
129
+ input: texts,
130
+ dimensions: this.options.dimensions,
131
+ });
132
+ return response.data.map((item) => item.embedding);
133
+ }
134
+ catch (error) {
135
+ lastError = error;
136
+ // Check if it's a rate limit error
137
+ if (error?.status === 429 || error?.code === "rate_limit_exceeded") {
138
+ const backoffMs = Math.pow(2, attempt) * 1000; // Exponential backoff
139
+ console.error(`Rate limit hit, retrying in ${backoffMs}ms (attempt ${attempt + 1}/${maxRetries})`);
140
+ await this.sleep(backoffMs);
141
+ continue;
142
+ }
143
+ // Check if it's a temporary error
144
+ if (error?.status >= 500 && error?.status < 600) {
145
+ const backoffMs = Math.pow(2, attempt) * 1000;
146
+ console.error(`Server error ${error.status}, retrying in ${backoffMs}ms (attempt ${attempt + 1}/${maxRetries})`);
147
+ await this.sleep(backoffMs);
148
+ continue;
149
+ }
150
+ // For other errors, don't retry
151
+ throw error;
152
+ }
153
+ }
154
+ // All retries failed
155
+ throw lastError || new Error("Failed to generate embeddings after retries");
156
+ }
157
+ /**
158
+ * Generates embedding for a single chunk
159
+ */
160
+ async generateEmbedding(chunkId, content) {
161
+ // Check cache first
162
+ const cached = this.getCachedEmbedding(chunkId, content);
163
+ if (cached) {
164
+ return {
165
+ chunkId,
166
+ vector: cached,
167
+ model: this.options.model,
168
+ tokens: 0, // Not tracked for cached
169
+ };
170
+ }
171
+ // Generate new embedding
172
+ const vectors = await this.generateBatchWithRetry([content]);
173
+ const vector = vectors[0];
174
+ // Cache the result
175
+ this.cacheEmbedding(chunkId, content, vector);
176
+ // Estimate tokens (rough approximation: ~4 chars per token)
177
+ const tokens = Math.ceil(content.length / 4);
178
+ return {
179
+ chunkId,
180
+ vector,
181
+ model: this.options.model,
182
+ tokens,
183
+ };
184
+ }
185
+ /**
186
+ * Generates embeddings for multiple chunks in batches
187
+ */
188
+ async generateBatchEmbeddings(chunks) {
189
+ const results = [];
190
+ const toGenerate = [];
191
+ // Check cache and collect chunks that need generation
192
+ for (let i = 0; i < chunks.length; i++) {
193
+ const chunk = chunks[i];
194
+ const cached = this.getCachedEmbedding(chunk.id, chunk.content);
195
+ if (cached) {
196
+ results[i] = {
197
+ chunkId: chunk.id,
198
+ vector: cached,
199
+ model: this.options.model,
200
+ tokens: 0,
201
+ };
202
+ }
203
+ else {
204
+ toGenerate.push({ ...chunk, index: i });
205
+ }
206
+ }
207
+ console.error(`Generating embeddings: ${toGenerate.length} new, ${chunks.length - toGenerate.length} cached`);
208
+ // Process in batches
209
+ for (let i = 0; i < toGenerate.length; i += this.options.batchSize) {
210
+ const batch = toGenerate.slice(i, i + this.options.batchSize);
211
+ const batchTexts = batch.map((item) => item.content);
212
+ console.error(`Processing batch ${Math.floor(i / this.options.batchSize) + 1}/${Math.ceil(toGenerate.length / this.options.batchSize)}`);
213
+ try {
214
+ const vectors = await this.generateBatchWithRetry(batchTexts);
215
+ // Store results and cache
216
+ for (let j = 0; j < batch.length; j++) {
217
+ const item = batch[j];
218
+ const vector = vectors[j];
219
+ // Cache the result
220
+ this.cacheEmbedding(item.id, item.content, vector);
221
+ // Estimate tokens
222
+ const tokens = Math.ceil(item.content.length / 4);
223
+ results[item.index] = {
224
+ chunkId: item.id,
225
+ vector,
226
+ model: this.options.model,
227
+ tokens,
228
+ };
229
+ }
230
+ }
231
+ catch (error) {
232
+ console.error(`Error generating batch embeddings: ${error}`);
233
+ throw error;
234
+ }
235
+ // Small delay between batches to avoid rate limits
236
+ if (i + this.options.batchSize < toGenerate.length) {
237
+ await this.sleep(100);
238
+ }
239
+ }
240
+ // Save cache after batch processing
241
+ if (this.options.enableCache && toGenerate.length > 0) {
242
+ this.saveCache();
243
+ }
244
+ return results;
245
+ }
246
+ /**
247
+ * Generates embedding for a query (for search)
248
+ */
249
+ async generateQueryEmbedding(query) {
250
+ try {
251
+ const response = await this.client.embeddings.create({
252
+ model: this.options.model,
253
+ input: query,
254
+ dimensions: this.options.dimensions,
255
+ });
256
+ return response.data[0].embedding;
257
+ }
258
+ catch (error) {
259
+ console.error(`Error generating query embedding: ${error}`);
260
+ throw error;
261
+ }
262
+ }
263
+ /**
264
+ * Clears the embedding cache
265
+ */
266
+ clearCache() {
267
+ this.cache.clear();
268
+ if (fs.existsSync(this.options.cachePath)) {
269
+ fs.unlinkSync(this.options.cachePath);
270
+ }
271
+ console.error("Embedding cache cleared");
272
+ }
273
+ /**
274
+ * Gets cache statistics
275
+ */
276
+ getCacheStats() {
277
+ const models = {};
278
+ for (const entry of this.cache.values()) {
279
+ models[entry.model] = (models[entry.model] || 0) + 1;
280
+ }
281
+ return {
282
+ size: this.cache.size,
283
+ models,
284
+ };
285
+ }
286
+ }
287
+ /**
288
+ * Creates an embedding service from environment variables
289
+ */
290
+ export function createEmbeddingService() {
291
+ const apiKey = process.env.OPENAI_API_KEY;
292
+ if (!apiKey) {
293
+ throw new Error("OPENAI_API_KEY environment variable is required. Get your API key from https://platform.openai.com/api-keys");
294
+ }
295
+ const options = {
296
+ model: process.env.MEMORYBANK_EMBEDDING_MODEL || "text-embedding-3-small",
297
+ dimensions: parseInt(process.env.MEMORYBANK_EMBEDDING_DIMENSIONS || "1536"),
298
+ enableCache: true,
299
+ cachePath: path.join(process.env.MEMORYBANK_STORAGE_PATH || ".memorybank", "embedding-cache.json"),
300
+ };
301
+ return new EmbeddingService(apiKey, options);
302
+ }
@@ -0,0 +1,71 @@
1
+ export class PlankaError extends Error {
2
+ status;
3
+ response;
4
+ constructor(message, status, response) {
5
+ super(message);
6
+ this.status = status;
7
+ this.response = response;
8
+ this.name = "PlankaError";
9
+ }
10
+ }
11
+ export class PlankaValidationError extends PlankaError {
12
+ constructor(message, status, response) {
13
+ super(message, status, response);
14
+ this.name = "PlankaValidationError";
15
+ }
16
+ }
17
+ export class PlankaResourceNotFoundError extends PlankaError {
18
+ constructor(resource) {
19
+ super(`Resource not found: ${resource}`, 404, {
20
+ message: `${resource} not found`,
21
+ });
22
+ this.name = "PlankaResourceNotFoundError";
23
+ }
24
+ }
25
+ export class PlankaAuthenticationError extends PlankaError {
26
+ constructor(message = "Authentication failed") {
27
+ super(message, 401, { message });
28
+ this.name = "PlankaAuthenticationError";
29
+ }
30
+ }
31
+ export class PlankaPermissionError extends PlankaError {
32
+ constructor(message = "Insufficient permissions") {
33
+ super(message, 403, { message });
34
+ this.name = "PlankaPermissionError";
35
+ }
36
+ }
37
+ export class PlankaRateLimitError extends PlankaError {
38
+ resetAt;
39
+ constructor(message = "Rate limit exceeded", resetAt) {
40
+ super(message, 429, { message, reset_at: resetAt.toISOString() });
41
+ this.resetAt = resetAt;
42
+ this.name = "PlankaRateLimitError";
43
+ }
44
+ }
45
+ export class PlankaConflictError extends PlankaError {
46
+ constructor(message) {
47
+ super(message, 409, { message });
48
+ this.name = "PlankaConflictError";
49
+ }
50
+ }
51
+ export function isPlankaError(error) {
52
+ return error instanceof PlankaError;
53
+ }
54
+ export function createPlankaError(status, response) {
55
+ switch (status) {
56
+ case 401:
57
+ return new PlankaAuthenticationError(response?.message);
58
+ case 403:
59
+ return new PlankaPermissionError(response?.message);
60
+ case 404:
61
+ return new PlankaResourceNotFoundError(response?.message || "Resource");
62
+ case 409:
63
+ return new PlankaConflictError(response?.message || "Conflict occurred");
64
+ case 422:
65
+ return new PlankaValidationError(response?.message || "Validation failed", status, response);
66
+ case 429:
67
+ return new PlankaRateLimitError(response?.message, new Date(response?.reset_at || Date.now() + 60000));
68
+ default:
69
+ return new PlankaError(response?.message || "Planka API error", status, response);
70
+ }
71
+ }
@@ -0,0 +1,261 @@
1
+ /**
2
+ * @fileoverview File scanner for Memory Bank
3
+ * Scans workspace files respecting .gitignore and .memoryignore patterns
4
+ */
5
+ import * as fs from "fs";
6
+ import * as path from "path";
7
+ import * as crypto from "crypto";
8
+ import ignoreLib from "ignore";
9
+ // Handle ignore library export
10
+ const ignore = typeof ignoreLib === 'function' ? ignoreLib : ignoreLib.default;
11
+ // Language detection by file extension
12
+ const LANGUAGE_MAP = {
13
+ ".ts": "typescript",
14
+ ".tsx": "typescript",
15
+ ".js": "javascript",
16
+ ".jsx": "javascript",
17
+ ".mjs": "javascript",
18
+ ".cjs": "javascript",
19
+ ".py": "python",
20
+ ".java": "java",
21
+ ".c": "c",
22
+ ".cpp": "cpp",
23
+ ".cc": "cpp",
24
+ ".cxx": "cpp",
25
+ ".h": "c",
26
+ ".hpp": "cpp",
27
+ ".cs": "csharp",
28
+ ".go": "go",
29
+ ".rs": "rust",
30
+ ".rb": "ruby",
31
+ ".php": "php",
32
+ ".swift": "swift",
33
+ ".kt": "kotlin",
34
+ ".kts": "kotlin",
35
+ ".scala": "scala",
36
+ ".r": "r",
37
+ ".R": "r",
38
+ ".sql": "sql",
39
+ ".sh": "shell",
40
+ ".bash": "shell",
41
+ ".zsh": "shell",
42
+ ".fish": "shell",
43
+ ".md": "markdown",
44
+ ".json": "json",
45
+ ".yaml": "yaml",
46
+ ".yml": "yaml",
47
+ ".xml": "xml",
48
+ ".html": "html",
49
+ ".htm": "html",
50
+ ".css": "css",
51
+ ".scss": "scss",
52
+ ".sass": "sass",
53
+ ".vue": "vue",
54
+ ".svelte": "svelte",
55
+ };
56
+ // Binary file extensions to skip
57
+ const BINARY_EXTENSIONS = new Set([
58
+ ".exe", ".dll", ".so", ".dylib", ".bin",
59
+ ".pdf", ".jpg", ".jpeg", ".png", ".gif", ".svg", ".ico",
60
+ ".mp3", ".mp4", ".avi", ".mov", ".wav",
61
+ ".zip", ".tar", ".gz", ".rar", ".7z",
62
+ ".db", ".sqlite", ".sqlite3",
63
+ ".woff", ".woff2", ".ttf", ".eot",
64
+ ".pyc", ".class", ".o", ".a",
65
+ ]);
66
+ /**
67
+ * Loads ignore patterns from .gitignore and .memoryignore files
68
+ */
69
+ export function loadIgnorePatterns(rootPath) {
70
+ const ig = ignore();
71
+ // Always ignore .git directory and .memorybank storage
72
+ ig.add([".git", ".memorybank", "node_modules", "dist", "build", "out"]);
73
+ // Load .gitignore if exists
74
+ const gitignorePath = path.join(rootPath, ".gitignore");
75
+ if (fs.existsSync(gitignorePath)) {
76
+ try {
77
+ const gitignoreContent = fs.readFileSync(gitignorePath, "utf-8");
78
+ ig.add(gitignoreContent);
79
+ console.error(`Loaded .gitignore patterns from ${gitignorePath}`);
80
+ }
81
+ catch (error) {
82
+ console.error(`Warning: Could not read .gitignore: ${error}`);
83
+ }
84
+ }
85
+ // Load .memoryignore if exists
86
+ const memoryignorePath = path.join(rootPath, ".memoryignore");
87
+ if (fs.existsSync(memoryignorePath)) {
88
+ try {
89
+ const memoryignoreContent = fs.readFileSync(memoryignorePath, "utf-8");
90
+ ig.add(memoryignoreContent);
91
+ console.error(`Loaded .memoryignore patterns from ${memoryignorePath}`);
92
+ }
93
+ catch (error) {
94
+ console.error(`Warning: Could not read .memoryignore: ${error}`);
95
+ }
96
+ }
97
+ return ig;
98
+ }
99
+ /**
100
+ * Calculates SHA-256 hash of file content
101
+ */
102
+ export function calculateFileHash(filePath) {
103
+ const content = fs.readFileSync(filePath);
104
+ return crypto.createHash("sha256").update(content).digest("hex");
105
+ }
106
+ /**
107
+ * Detects language from file extension
108
+ */
109
+ export function detectLanguage(filePath) {
110
+ const ext = path.extname(filePath).toLowerCase();
111
+ return LANGUAGE_MAP[ext] || "unknown";
112
+ }
113
+ /**
114
+ * Checks if file is binary based on extension
115
+ */
116
+ export function isBinaryFile(filePath) {
117
+ const ext = path.extname(filePath).toLowerCase();
118
+ return BINARY_EXTENSIONS.has(ext);
119
+ }
120
+ /**
121
+ * Checks if file is a code file that should be indexed
122
+ */
123
+ export function isCodeFile(filePath) {
124
+ if (isBinaryFile(filePath)) {
125
+ return false;
126
+ }
127
+ const ext = path.extname(filePath).toLowerCase();
128
+ // Check if it's a known code file
129
+ if (LANGUAGE_MAP[ext]) {
130
+ return true;
131
+ }
132
+ // Additional checks for files without extension or special cases
133
+ const basename = path.basename(filePath);
134
+ const codeFileNames = new Set([
135
+ "Makefile", "Dockerfile", "Jenkinsfile", "Vagrantfile",
136
+ "Rakefile", "Gemfile", "Podfile", "Fastfile",
137
+ ]);
138
+ return codeFileNames.has(basename);
139
+ }
140
+ /**
141
+ * Recursively scans directory for code files
142
+ */
143
+ function scanDirectoryRecursive(dirPath, rootPath, ig, options, results) {
144
+ let entries;
145
+ try {
146
+ entries = fs.readdirSync(dirPath, { withFileTypes: true });
147
+ }
148
+ catch (error) {
149
+ console.error(`Warning: Could not read directory ${dirPath}: ${error}`);
150
+ return;
151
+ }
152
+ for (const entry of entries) {
153
+ const fullPath = path.join(dirPath, entry.name);
154
+ const relativePath = path.relative(rootPath, fullPath);
155
+ // Skip hidden files if not included
156
+ if (!options.includeHidden && entry.name.startsWith(".")) {
157
+ continue;
158
+ }
159
+ // Check ignore patterns (use forward slashes for cross-platform compatibility)
160
+ const relativePathForward = relativePath.split(path.sep).join("/");
161
+ if (ig.ignores(relativePathForward)) {
162
+ continue;
163
+ }
164
+ if (entry.isDirectory()) {
165
+ if (options.recursive) {
166
+ scanDirectoryRecursive(fullPath, rootPath, ig, options, results);
167
+ }
168
+ }
169
+ else if (entry.isFile()) {
170
+ try {
171
+ // Check if it's a code file
172
+ if (!isCodeFile(fullPath)) {
173
+ continue;
174
+ }
175
+ const stats = fs.statSync(fullPath);
176
+ // Check file size limit
177
+ if (stats.size > options.maxFileSize) {
178
+ console.error(`Skipping large file (${stats.size} bytes): ${relativePath}`);
179
+ continue;
180
+ }
181
+ // Calculate hash and collect metadata
182
+ const hash = calculateFileHash(fullPath);
183
+ const language = detectLanguage(fullPath);
184
+ const extension = path.extname(fullPath);
185
+ results.push({
186
+ path: relativePath,
187
+ absolutePath: fullPath,
188
+ hash,
189
+ size: stats.size,
190
+ mtime: stats.mtime,
191
+ language,
192
+ extension,
193
+ });
194
+ }
195
+ catch (error) {
196
+ console.error(`Warning: Could not process file ${fullPath}: ${error}`);
197
+ }
198
+ }
199
+ }
200
+ }
201
+ /**
202
+ * Scans workspace for code files
203
+ */
204
+ export function scanFiles(options) {
205
+ const fullOptions = {
206
+ rootPath: options.rootPath,
207
+ recursive: options.recursive !== undefined ? options.recursive : true,
208
+ includeHidden: options.includeHidden !== undefined ? options.includeHidden : false,
209
+ maxFileSize: options.maxFileSize || 10 * 1024 * 1024, // 10MB default
210
+ };
211
+ // Validate root path
212
+ if (!fs.existsSync(fullOptions.rootPath)) {
213
+ throw new Error(`Root path does not exist: ${fullOptions.rootPath}`);
214
+ }
215
+ const stats = fs.statSync(fullOptions.rootPath);
216
+ if (!stats.isDirectory()) {
217
+ throw new Error(`Root path is not a directory: ${fullOptions.rootPath}`);
218
+ }
219
+ console.error(`Scanning files in: ${fullOptions.rootPath}`);
220
+ // Load ignore patterns
221
+ const ig = loadIgnorePatterns(fullOptions.rootPath);
222
+ // Scan files
223
+ const results = [];
224
+ scanDirectoryRecursive(fullOptions.rootPath, fullOptions.rootPath, ig, fullOptions, results);
225
+ console.error(`Found ${results.length} code files to index`);
226
+ return results;
227
+ }
228
+ /**
229
+ * Scans a single file and returns its metadata
230
+ */
231
+ export function scanSingleFile(filePath, rootPath) {
232
+ try {
233
+ if (!fs.existsSync(filePath)) {
234
+ throw new Error(`File does not exist: ${filePath}`);
235
+ }
236
+ const stats = fs.statSync(filePath);
237
+ if (!stats.isFile()) {
238
+ throw new Error(`Path is not a file: ${filePath}`);
239
+ }
240
+ if (!isCodeFile(filePath)) {
241
+ return null;
242
+ }
243
+ const hash = calculateFileHash(filePath);
244
+ const language = detectLanguage(filePath);
245
+ const extension = path.extname(filePath);
246
+ const relativePath = path.relative(rootPath, filePath);
247
+ return {
248
+ path: relativePath,
249
+ absolutePath: filePath,
250
+ hash,
251
+ size: stats.size,
252
+ mtime: stats.mtime,
253
+ language,
254
+ extension,
255
+ };
256
+ }
257
+ catch (error) {
258
+ console.error(`Error scanning file ${filePath}: ${error}`);
259
+ return null;
260
+ }
261
+ }