@aws/lsp-codewhisperer 0.0.81 → 0.0.82

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/CHANGELOG.md +17 -0
  2. package/out/language-server/agenticChat/agenticChatController.js +80 -2
  3. package/out/language-server/agenticChat/agenticChatController.js.map +1 -1
  4. package/out/language-server/agenticChat/context/additionalContextProvider.d.ts +1 -1
  5. package/out/language-server/agenticChat/context/additionalContextProvider.js +50 -3
  6. package/out/language-server/agenticChat/context/additionalContextProvider.js.map +1 -1
  7. package/out/language-server/agenticChat/context/memorybank/memoryBankController.d.ts +104 -0
  8. package/out/language-server/agenticChat/context/memorybank/memoryBankController.js +680 -0
  9. package/out/language-server/agenticChat/context/memorybank/memoryBankController.js.map +1 -0
  10. package/out/language-server/agenticChat/context/memorybank/memoryBankPrompts.d.ts +14 -0
  11. package/out/language-server/agenticChat/context/memorybank/memoryBankPrompts.js +156 -0
  12. package/out/language-server/agenticChat/context/memorybank/memoryBankPrompts.js.map +1 -0
  13. package/out/language-server/agenticChat/tools/qCodeAnalysis/codeReview.js +3 -3
  14. package/out/language-server/agenticChat/tools/qCodeAnalysis/codeReview.js.map +1 -1
  15. package/out/language-server/agenticChat/tools/qCodeAnalysis/codeReviewConstants.js +1 -1
  16. package/out/language-server/chat/telemetry/chatTelemetryController.d.ts +1 -1
  17. package/out/language-server/chat/telemetry/chatTelemetryController.js +2 -1
  18. package/out/language-server/chat/telemetry/chatTelemetryController.js.map +1 -1
  19. package/out/language-server/inline-completion/codeWhispererServer.js +6 -4
  20. package/out/language-server/inline-completion/codeWhispererServer.js.map +1 -1
  21. package/out/language-server/inline-completion/editCompletionHandler.d.ts +1 -1
  22. package/out/language-server/inline-completion/editCompletionHandler.js +6 -3
  23. package/out/language-server/inline-completion/editCompletionHandler.js.map +1 -1
  24. package/out/language-server/inline-completion/session/sessionManager.d.ts +7 -1
  25. package/out/language-server/inline-completion/session/sessionManager.js +20 -4
  26. package/out/language-server/inline-completion/session/sessionManager.js.map +1 -1
  27. package/out/shared/amazonQServiceManager/AmazonQTokenServiceManager.js +2 -0
  28. package/out/shared/amazonQServiceManager/AmazonQTokenServiceManager.js.map +1 -1
  29. package/out/shared/codeWhispererService.d.ts +2 -2
  30. package/out/shared/codeWhispererService.js +4 -4
  31. package/out/shared/codeWhispererService.js.map +1 -1
  32. package/out/shared/supplementalContextUtil/supplementalContextUtil.js +4 -4
  33. package/out/shared/supplementalContextUtil/supplementalContextUtil.js.map +1 -1
  34. package/out/shared/telemetry/telemetryService.d.ts +1 -0
  35. package/out/shared/telemetry/telemetryService.js +10 -8
  36. package/out/shared/telemetry/telemetryService.js.map +1 -1
  37. package/out/shared/testUtils.js +2 -0
  38. package/out/shared/testUtils.js.map +1 -1
  39. package/package.json +1 -1
@@ -0,0 +1,680 @@
1
+ "use strict";
2
+ /*!
3
+ * Copyright Amazon.com, Inc. or its affiliates.
4
+ * All Rights Reserved. SPDX-License-Identifier: Apache-2.0
5
+ */
6
+ Object.defineProperty(exports, "__esModule", { value: true });
7
+ exports.MemoryBankController = void 0;
8
+ const memoryBankPrompts_1 = require("./memoryBankPrompts");
9
+ const mcpUtils_1 = require("../../tools/mcp/mcpUtils");
10
+ const MEMORY_BANK_DIRECTORY = '.amazonq/rules/memory-bank';
11
+ const MEMORY_BANK_FILES = {
12
+ PRODUCT: 'product.md',
13
+ STRUCTURE: 'structure.md',
14
+ TECH: 'tech.md',
15
+ GUIDELINES: 'guidelines.md',
16
+ };
17
+ /**
18
+ * Controller for Memory Bank functionality
19
+ * Handles memory bank creation detection and prompt generation
20
+ */
21
+ class MemoryBankController {
22
+ features;
23
+ static instance;
24
+ constructor(features) {
25
+ this.features = features;
26
+ }
27
+ static getInstance(features) {
28
+ if (!MemoryBankController.instance) {
29
+ MemoryBankController.instance = new MemoryBankController(features);
30
+ }
31
+ return MemoryBankController.instance;
32
+ }
33
+ /**
34
+ * Check if a prompt is requesting memory bank creation
35
+ * Can be expanded based on feedbacks
36
+ */
37
+ isMemoryBankCreationRequest(prompt) {
38
+ const normalizedPrompt = prompt.toLowerCase().trim();
39
+ const triggers = [
40
+ 'create a memory bank',
41
+ 'create memory bank',
42
+ 'generate a memory bank',
43
+ 'generate memory bank',
44
+ 'regenerate memory bank',
45
+ 'build memory bank',
46
+ 'make memory bank',
47
+ 'setup memory bank',
48
+ ];
49
+ return triggers.some(trigger => normalizedPrompt.includes(trigger));
50
+ }
51
+ /**
52
+ * Prepare comprehensive memory bank creation prompt with all necessary input
53
+ * This does all the programmatic work upfront and creates a single comprehensive prompt
54
+ */
55
+ async prepareComprehensiveMemoryBankPrompt(workspaceFolderUri, llmCallFunction) {
56
+ try {
57
+ this.features.logging.info(`Memory Bank: Starting pre-processing for workspace: "${workspaceFolderUri}"`);
58
+ // Step 1: Clean directory
59
+ await this.cleanMemoryBankDirectory(workspaceFolderUri);
60
+ // Step 2: Execute deterministic analysis (TF-IDF)
61
+ this.features.logging.info(`Memory Bank: running analysis for workspace`);
62
+ const analysisResults = await this.executeGuidelinesGenerationPipeline(workspaceFolderUri);
63
+ // Step 3: Make LLM call for file ranking
64
+ const rankingPrompt = memoryBankPrompts_1.MemoryBankPrompts.getFileRankingPrompt(analysisResults.formattedFilesString, 10);
65
+ const rankedFilesResponse = await llmCallFunction(rankingPrompt);
66
+ // Step 4: Parse ranked files
67
+ let rankedFilesList = [];
68
+ try {
69
+ // Clean the response - remove any markdown formatting or extra text
70
+ let cleanResponse = rankedFilesResponse.trim();
71
+ // Extract JSON array if it's wrapped in markdown or other text
72
+ const jsonMatch = cleanResponse.match(/\[.*\]/s);
73
+ if (jsonMatch) {
74
+ cleanResponse = jsonMatch[0];
75
+ }
76
+ else {
77
+ // Handle case where LLM returns comma-separated quoted strings without brackets
78
+ if (cleanResponse.includes('",') && cleanResponse.includes('"')) {
79
+ // Add brackets to make it a valid JSON array
80
+ cleanResponse = `[${cleanResponse}]`;
81
+ }
82
+ }
83
+ rankedFilesList = JSON.parse(cleanResponse);
84
+ if (!Array.isArray(rankedFilesList)) {
85
+ throw new Error('Invalid ranking response format - not an array');
86
+ }
87
+ // Validate that all items are strings (file paths)
88
+ rankedFilesList = rankedFilesList.filter(item => typeof item === 'string' && item.length > 0);
89
+ if (rankedFilesList.length === 0) {
90
+ throw new Error('No valid file paths in ranking response');
91
+ }
92
+ this.features.logging.info(`Memory Bank: parsed ${rankedFilesList.length} ranked files from LLM response`);
93
+ }
94
+ catch (error) {
95
+ this.features.logging.warn(`Memory Bank: failed to parse LLM ranking response, using TF-IDF fallback: ${error}`);
96
+ rankedFilesList = analysisResults.rankedFilesList.slice(0, 10);
97
+ }
98
+ this.features.logging.info(`Memory Bank: using ${rankedFilesList.length} files for documentation generation`);
99
+ // Step 5: Create the comprehensive prompt with ranked files and workspace path
100
+ const normalizedWorkspacePath = (0, mcpUtils_1.normalizePathFromUri)(workspaceFolderUri, this.features.logging);
101
+ this.features.logging.info(`Memory Bank: Generating final prompt with path: "${normalizedWorkspacePath}"`);
102
+ const finalPrompt = memoryBankPrompts_1.MemoryBankPrompts.getCompleteMemoryBankPrompt(rankedFilesList, normalizedWorkspacePath);
103
+ return finalPrompt;
104
+ }
105
+ catch (error) {
106
+ this.features.logging.error(`Memory Bank preparation failed: ${error}`);
107
+ throw error;
108
+ }
109
+ }
110
+ /**
111
+ * Clean and recreate memory bank directory
112
+ */
113
+ async cleanMemoryBankDirectory(workspaceFolderUri) {
114
+ try {
115
+ const normalizedWorkspacePath = (0, mcpUtils_1.normalizePathFromUri)(workspaceFolderUri, this.features.logging);
116
+ const memoryBankPath = `${normalizedWorkspacePath}/${MEMORY_BANK_DIRECTORY}`;
117
+ // Remove all existing memory bank files to ensure clean recreation
118
+ const filesToRemove = ['product.md', 'structure.md', 'tech.md', 'guidelines.md'];
119
+ let removedCount = 0;
120
+ for (const fileName of filesToRemove) {
121
+ const filePath = `${memoryBankPath}/${fileName}`;
122
+ try {
123
+ const exists = await this.features.workspace.fs.exists(filePath);
124
+ if (exists) {
125
+ await this.features.workspace.fs.rm(filePath);
126
+ removedCount++;
127
+ }
128
+ }
129
+ catch (error) {
130
+ // Ignore errors when removing files that don't exist
131
+ this.features.logging.error(`Could not remove ${fileName}: ${error}`);
132
+ }
133
+ }
134
+ if (removedCount > 0) {
135
+ this.features.logging.info(`Memory Bank: cleaned ${removedCount} existing files`);
136
+ }
137
+ // Create the directory structure using mkdir with recursive option
138
+ await this.features.workspace.fs.mkdir(memoryBankPath, { recursive: true });
139
+ }
140
+ catch (error) {
141
+ this.features.logging.error(`Memory Bank directory creation failed: ${error}`);
142
+ throw error;
143
+ }
144
+ }
145
+ /**
146
+ * files discovery
147
+ */
148
+ async discoverAllSourceFiles(workspaceFolderUri, extensions) {
149
+ try {
150
+ // Recursively discover all source files
151
+ const allWorkspaceFolders = this.features.workspace.getAllWorkspaceFolders();
152
+ const workspaceFolders = allWorkspaceFolders?.map(({ uri }) => {
153
+ return (0, mcpUtils_1.normalizePathFromUri)(uri, this.features.logging);
154
+ }) ?? [(0, mcpUtils_1.normalizePathFromUri)(workspaceFolderUri, this.features.logging)];
155
+ // Collect files from all workspace folders
156
+ let allSourceFiles = [];
157
+ for (const folder of workspaceFolders) {
158
+ const sourceFiles = await this.discoverSourceFiles(folder, extensions);
159
+ this.features.logging.info(`Found ${sourceFiles.length} files in "${folder}"`);
160
+ allSourceFiles.push(...sourceFiles);
161
+ }
162
+ this.features.logging.info(`Total files discovered: ${allSourceFiles.length}`);
163
+ // OPTIMIZATION: Parallel file size calculation with batching
164
+ const batchSize = 10; // Process 10 files at a time
165
+ const files = [];
166
+ for (let i = 0; i < allSourceFiles.length; i += batchSize) {
167
+ const batch = allSourceFiles.slice(i, i + batchSize);
168
+ const batchResults = await Promise.all(batch.map(async (filePath) => ({
169
+ path: filePath,
170
+ size: await this.calculateFileLineCount(filePath),
171
+ })));
172
+ files.push(...batchResults);
173
+ }
174
+ return files;
175
+ }
176
+ catch (error) {
177
+ this.features.logging.error(`Error in getAllFiles: ${error}`);
178
+ return [];
179
+ }
180
+ }
181
+ /**
182
+ * line counting
183
+ */
184
+ async calculateFileLineCount(filePath) {
185
+ try {
186
+ const content = await this.features.workspace.fs.readFile(filePath);
187
+ return content.split('\n').length;
188
+ }
189
+ catch (error) {
190
+ this.features.logging.error(`Error reading file ${filePath}: ${error}`);
191
+ return 0;
192
+ }
193
+ }
194
+ /**
195
+ * lexical dissimilarity calculation
196
+ */
197
+ async calculateLexicalDissimilarity(files) {
198
+ try {
199
+ // OPTIMIZATION: Parallel file reading with batching
200
+ const batchSize = 20; // Process 20 files at a time to reduce I/O overhead
201
+ const fileContents = [];
202
+ let hasReadErrors = false;
203
+ for (let i = 0; i < files.length; i += batchSize) {
204
+ const batch = files.slice(i, i + batchSize);
205
+ const batchContents = await Promise.all(batch.map(async (file) => {
206
+ try {
207
+ return await this.features.workspace.fs.readFile(file.path);
208
+ }
209
+ catch (error) {
210
+ this.features.logging.warn(`Could not read file for TF-IDF analysis: ${file.path}`);
211
+ hasReadErrors = true;
212
+ return ''; // Empty content for unreadable files
213
+ }
214
+ }));
215
+ fileContents.push(...batchContents);
216
+ }
217
+ // Check if all files are empty (no content to analyze)
218
+ const hasContent = fileContents.some(content => content.trim().length > 0);
219
+ if (!hasContent) {
220
+ // If no files have content due to read errors, log as error
221
+ if (hasReadErrors) {
222
+ this.features.logging.error('All files failed to read or are empty, using fallback dissimilarity values');
223
+ }
224
+ // If no files have content, return fallback values
225
+ return files.map(f => ({ ...f, dissimilarity: 0.85 }));
226
+ }
227
+ // Step 2: Get the TF-IDF vectors for each file (equivalent to sklearn's TfidfVectorizer)
228
+ const tfidfMatrix = this.createTfidfMatrix(fileContents);
229
+ // Step 3: Get the cosine similarity of each file (equivalent to sklearn's cosine_similarity)
230
+ const cosineSimilarities = this.calculateCosineSimilarityMatrix(tfidfMatrix);
231
+ // Step 4: Get the lexical dissimilarity of each file (1 - similarity)
232
+ const lexicalDissimilarities = [];
233
+ for (let i = 0; i < cosineSimilarities.length; i++) {
234
+ // Calculate mean similarity for this file with all files (including itself)
235
+ const meanSimilarity = cosineSimilarities[i].reduce((sum, sim) => sum + sim, 0) / cosineSimilarities[i].length;
236
+ // Dissimilarity = 1 - mean_similarity (exactly like Python code)
237
+ const dissimilarity = 1 - meanSimilarity;
238
+ lexicalDissimilarities.push({
239
+ path: files[i].path,
240
+ size: files[i].size,
241
+ dissimilarity: Math.max(0.0, Math.min(1.0, dissimilarity)), // Ensure bounds [0,1]
242
+ });
243
+ }
244
+ return lexicalDissimilarities;
245
+ }
246
+ catch (error) {
247
+ this.features.logging.error(`Error in calculateLexicalDissimilarity: ${error}`);
248
+ // Fallback to reasonable defaults if TF-IDF calculation fails
249
+ return files.map(f => ({ ...f, dissimilarity: 0.85 }));
250
+ }
251
+ }
252
+ /**
253
+ * Create TF-IDF matrix, Returns array of TF-IDF vectors, where each vector is a Map<term, tfidf_score>
254
+ */
255
+ createTfidfMatrix(documents) {
256
+ // Step 1: Tokenize all documents and build vocabulary
257
+ const tokenizedDocs = documents.map(doc => this.tokenizeDocument(doc));
258
+ const vocabulary = new Set();
259
+ tokenizedDocs.forEach(tokens => tokens.forEach(token => vocabulary.add(token)));
260
+ const vocabArray = Array.from(vocabulary);
261
+ const numDocs = documents.length;
262
+ // Step 2: Calculate document frequencies (DF)
263
+ const documentFrequencies = new Map();
264
+ vocabArray.forEach(term => {
265
+ const df = tokenizedDocs.filter(tokens => tokens.includes(term)).length;
266
+ documentFrequencies.set(term, df);
267
+ });
268
+ // Step 3: Calculate TF-IDF for each document
269
+ const tfidfMatrix = [];
270
+ for (let docIndex = 0; docIndex < numDocs; docIndex++) {
271
+ const tokens = tokenizedDocs[docIndex];
272
+ const tfidfVector = new Map();
273
+ // Calculate term frequencies for this document
274
+ const termFrequencies = new Map();
275
+ tokens.forEach(token => {
276
+ termFrequencies.set(token, (termFrequencies.get(token) || 0) + 1);
277
+ });
278
+ // Calculate TF-IDF for each term in vocabulary
279
+ vocabArray.forEach(term => {
280
+ const tf = termFrequencies.get(term) || 0;
281
+ const df = documentFrequencies.get(term) || 1;
282
+ const idf = Math.log(numDocs / df);
283
+ const tfidf = tf * idf;
284
+ tfidfVector.set(term, tfidf);
285
+ });
286
+ tfidfMatrix.push(tfidfVector);
287
+ }
288
+ return tfidfMatrix;
289
+ }
290
+ /**
291
+ * Calculate cosine similarity matrix
292
+ */
293
+ calculateCosineSimilarityMatrix(tfidfMatrix) {
294
+ const numDocs = tfidfMatrix.length;
295
+ const similarities = [];
296
+ for (let i = 0; i < numDocs; i++) {
297
+ const row = [];
298
+ for (let j = 0; j < numDocs; j++) {
299
+ const similarity = this.calculateCosineSimilarity(tfidfMatrix[i], tfidfMatrix[j]);
300
+ row.push(similarity);
301
+ }
302
+ similarities.push(row);
303
+ }
304
+ return similarities;
305
+ }
306
+ /**
307
+ * Calculate cosine similarity between two TF-IDF vectors
308
+ */
309
+ calculateCosineSimilarity(vectorA, vectorB) {
310
+ let dotProduct = 0;
311
+ let normA = 0;
312
+ let normB = 0;
313
+ // Get all unique terms from both vectors
314
+ const allTerms = new Set([...vectorA.keys(), ...vectorB.keys()]);
315
+ allTerms.forEach(term => {
316
+ const valueA = vectorA.get(term) || 0;
317
+ const valueB = vectorB.get(term) || 0;
318
+ dotProduct += valueA * valueB;
319
+ normA += valueA * valueA;
320
+ normB += valueB * valueB;
321
+ });
322
+ // Avoid division by zero
323
+ if (normA === 0 || normB === 0) {
324
+ return 0;
325
+ }
326
+ return dotProduct / (Math.sqrt(normA) * Math.sqrt(normB));
327
+ }
328
+ /**
329
+ * Tokenize document into terms (simple whitespace + punctuation splitting)
330
+ */
331
+ tokenizeDocument(document) {
332
+ return document
333
+ .toLowerCase()
334
+ .replace(/[^\w\s]/g, ' ') // Replace punctuation with spaces
335
+ .split(/\s+/) // Split on whitespace
336
+ .filter(token => token.length > 2); // Filter out very short tokens
337
+ }
338
+ /**
339
+ * Execute the complete guidelines generation pipeline
340
+ * https://code.amazon.com/packages/QIDEPersonalization/blobs/mainline/--/src/stylefile-gen.ipynb
341
+ */
342
+ async executeGuidelinesGenerationPipeline(workspaceFolderUri) {
343
+ try {
344
+ // Step 1: Discover all source files
345
+ // OPTIMIZATION: Prioritize common extensions first for faster discovery
346
+ const extensions = [
347
+ '.ts',
348
+ '.js',
349
+ '.tsx',
350
+ '.jsx',
351
+ '.py',
352
+ '.java',
353
+ '.cpp',
354
+ '.c',
355
+ '.h',
356
+ '.cs',
357
+ '.go',
358
+ '.rs',
359
+ '.php',
360
+ '.rb',
361
+ '.swift',
362
+ '.kt',
363
+ '.scala',
364
+ ];
365
+ const discoveredFiles = await this.discoverAllSourceFiles(workspaceFolderUri, extensions);
366
+ if (discoveredFiles.length === 0) {
367
+ throw new Error('No source files found in workspace');
368
+ }
369
+ // Filter out very large files to prevent conversation overflow
370
+ const MAX_FILE_SIZE_FOR_MEMORY_BANK = 20000; // 20KB limit
371
+ const reasonableSizedFiles = discoveredFiles.filter(file => file.size <= MAX_FILE_SIZE_FOR_MEMORY_BANK);
372
+ this.features.logging.debug(`Memory Bank analysis: filtered ${discoveredFiles.length - reasonableSizedFiles.length} files over ${MAX_FILE_SIZE_FOR_MEMORY_BANK} characters`);
373
+ // Limit files to prevent memory exhaustion on large projects
374
+ const MAX_FILES_FOR_ANALYSIS = 200;
375
+ let filesToAnalyze;
376
+ if (reasonableSizedFiles.length > MAX_FILES_FOR_ANALYSIS) {
377
+ const shuffled = [...reasonableSizedFiles].sort(() => Math.random() - 0.5);
378
+ filesToAnalyze = shuffled.slice(0, MAX_FILES_FOR_ANALYSIS);
379
+ this.features.logging.info(`Memory Bank analysis: randomly selected ${filesToAnalyze.length} files (from ${reasonableSizedFiles.length} reasonable-sized files for ranking)`);
380
+ }
381
+ else {
382
+ filesToAnalyze = reasonableSizedFiles;
383
+ }
384
+ // Step 2: Calculate lexical dissimilarity using TF-IDF
385
+ const filesWithDissimilarity = await this.calculateLexicalDissimilarity(filesToAnalyze);
386
+ // Step 3: Sort by size
387
+ filesWithDissimilarity.sort((a, b) => b.size - a.size);
388
+ // Step 4: Format files string for LLM ranking
389
+ const formattedFilesString = this.formatFilesForRanking(filesWithDissimilarity);
390
+ // Step 5: Create fallback ranking (deterministic, for when LLM fails)
391
+ const rankedFilesList = filesWithDissimilarity
392
+ .sort((a, b) => b.dissimilarity - a.dissimilarity)
393
+ .slice(0, 10)
394
+ .map(f => f.path);
395
+ return {
396
+ discoveredFiles: filesToAnalyze,
397
+ filesWithDissimilarity,
398
+ formattedFilesString,
399
+ rankedFilesList,
400
+ };
401
+ }
402
+ catch (error) {
403
+ this.features.logging.error(`Memory Bank analysis pipeline failed: ${error}`);
404
+ throw error;
405
+ }
406
+ }
407
+ /**
408
+ * Format files for processing pipeline
409
+ */
410
+ formatFilesForRanking(files) {
411
+ // Files are already sorted by size in executeGuidelinesGenerationPipeline()
412
+ return files
413
+ .map(f => `${f.path} has ${f.size} lines and a mean lexical dissimilarity of ${f.dissimilarity.toFixed(6)} to the other files`)
414
+ .join('\n');
415
+ }
416
+ /**
417
+ * Recursively discover source files with given extensions
418
+ */
419
+ async discoverSourceFiles(workspaceFolderUri, extensions) {
420
+ const sourceFiles = [];
421
+ const traverseDirectory = async (dirPath) => {
422
+ try {
423
+ const entries = await this.features.workspace.fs.readdir(dirPath);
424
+ for (const entry of entries) {
425
+ const fullPath = `${dirPath}/${entry.name}`;
426
+ // Skip common directories that don't contain source code
427
+ if (entry.isDirectory() && this.shouldSkipDirectory(entry.name)) {
428
+ continue;
429
+ }
430
+ if (entry.isDirectory()) {
431
+ // Directory - recurse
432
+ await traverseDirectory(fullPath);
433
+ }
434
+ else {
435
+ // File - check if it's a source file
436
+ if (extensions.some(ext => entry.name.endsWith(ext))) {
437
+ sourceFiles.push(fullPath);
438
+ }
439
+ }
440
+ }
441
+ }
442
+ catch (error) {
443
+ this.features.logging.error(`Could not read directory ${dirPath}: ${error}`);
444
+ }
445
+ };
446
+ await traverseDirectory(workspaceFolderUri);
447
+ return sourceFiles;
448
+ }
449
+ /**
450
+ * Check if a directory should be skipped during source file discovery
451
+ */
452
+ shouldSkipDirectory(dirName) {
453
+ // Comprehensive language-agnostic directory exclusions
454
+ const skipDirs = [
455
+ // Version Control Systems
456
+ '.git',
457
+ '.svn',
458
+ '.hg',
459
+ '.bzr',
460
+ '.fossil-settings',
461
+ // Package Managers & Dependencies
462
+ 'node_modules',
463
+ 'bower_components',
464
+ 'jspm_packages',
465
+ 'vendor',
466
+ 'packages',
467
+ 'deps',
468
+ '_deps',
469
+ 'third_party',
470
+ 'external',
471
+ 'Pods',
472
+ 'Carthage',
473
+ 'DerivedData', // iOS/macOS
474
+ 'venv',
475
+ 'env',
476
+ '.venv',
477
+ '.env',
478
+ 'virtualenv',
479
+ '__pycache__',
480
+ '.tox', // Python
481
+ 'gems',
482
+ '.bundle', // Ruby
483
+ 'composer', // PHP
484
+ 'node_modules',
485
+ 'elm-stuff', // Elm
486
+ 'target',
487
+ 'project/target',
488
+ 'project/project', // Scala/SBT
489
+ // Build Outputs & Artifacts
490
+ 'build',
491
+ 'builds',
492
+ 'dist',
493
+ 'out',
494
+ 'output',
495
+ 'bin',
496
+ 'obj',
497
+ 'lib',
498
+ 'release',
499
+ 'debug',
500
+ 'Release',
501
+ 'Debug',
502
+ 'x64',
503
+ 'x86',
504
+ 'AnyCPU',
505
+ '.next',
506
+ '.nuxt',
507
+ '.output',
508
+ '.vercel',
509
+ '.netlify', // Web frameworks
510
+ 'public/build',
511
+ 'static/build',
512
+ 'assets/build',
513
+ 'cmake-build-debug',
514
+ 'cmake-build-release', // CMake
515
+ '_build',
516
+ 'ebin',
517
+ 'deps', // Erlang/Elixir
518
+ 'zig-cache',
519
+ 'zig-out', // Zig
520
+ // IDE & Editor Directories
521
+ '.vscode',
522
+ '.idea',
523
+ '.vs',
524
+ '.vscode-test',
525
+ '.eclipse',
526
+ '.metadata',
527
+ '.settings',
528
+ '.project',
529
+ '.classpath',
530
+ '.atom',
531
+ '.sublime-project',
532
+ '.sublime-workspace',
533
+ '__pycache__',
534
+ '.mypy_cache',
535
+ '.dmypy.json', // Python
536
+ '.dart_tool',
537
+ '.flutter-plugins',
538
+ '.flutter-plugins-dependencies', // Dart/Flutter
539
+ // Testing & Coverage
540
+ 'coverage',
541
+ '.coverage',
542
+ '.nyc_output',
543
+ '.pytest_cache',
544
+ '.cache',
545
+ 'htmlcov',
546
+ 'test-results',
547
+ 'test-reports',
548
+ 'allure-results',
549
+ 'junit',
550
+ 'xunit',
551
+ 'nunit',
552
+ 'TestResults',
553
+ '.jest',
554
+ 'jest_html_reporters.html',
555
+ // Logs & Temporary Files
556
+ 'logs',
557
+ 'log',
558
+ 'tmp',
559
+ 'temp',
560
+ '.tmp',
561
+ '.temp',
562
+ 'crash-reports',
563
+ 'error-reports',
564
+ // Documentation Build Outputs
565
+ '_site',
566
+ '.jekyll-cache',
567
+ '.jekyll-metadata', // Jekyll
568
+ 'docs/_build',
569
+ 'doc/_build',
570
+ 'documentation/_build', // Sphinx
571
+ '.docusaurus',
572
+ 'website/build', // Docusaurus
573
+ 'book',
574
+ '_book', // GitBook/mdBook
575
+ // Language-Specific Caches & Artifacts
576
+ '.gradle',
577
+ 'gradle', // Gradle
578
+ '.m2',
579
+ '.ivy2', // Maven/Ivy
580
+ '.stack-work',
581
+ '.cabal-sandbox',
582
+ 'cabal.sandbox.config', // Haskell
583
+ '_opam',
584
+ '.opam', // OCaml
585
+ 'Cargo.lock', // Rust (keep Cargo.toml but skip lock in some cases)
586
+ '.cargo', // Rust cache
587
+ '.mix',
588
+ '_build', // Elixir
589
+ 'rebar3.crashdump',
590
+ '_checkouts', // Erlang
591
+ '.rebar',
592
+ '.rebar3',
593
+ 'priv/static', // Phoenix framework
594
+ // OS-Specific
595
+ '.DS_Store',
596
+ 'Thumbs.db',
597
+ 'Desktop.ini',
598
+ '$RECYCLE.BIN',
599
+ '.Trash-*',
600
+ '.fuse_hidden*',
601
+ // Cloud & Deployment
602
+ '.serverless',
603
+ '.aws-sam',
604
+ '.terraform',
605
+ '.pulumi',
606
+ 'cdk.out',
607
+ '.cdk.staging',
608
+ 'amplify',
609
+ // Mobile Development
610
+ 'ios/build',
611
+ 'android/build',
612
+ 'android/.gradle',
613
+ 'ios/Pods',
614
+ 'android/app/build',
615
+ // Game Development
616
+ 'Library',
617
+ 'Temp',
618
+ 'Obj',
619
+ 'Build',
620
+ 'Builds', // Unity
621
+ 'Intermediate',
622
+ 'Binaries',
623
+ 'DerivedDataCache', // Unreal
624
+ // Database
625
+ '*.db-journal',
626
+ '*.sqlite-journal',
627
+ // Backup & Archive
628
+ 'backup',
629
+ 'backups',
630
+ '.backup',
631
+ 'archive',
632
+ 'archives',
633
+ ];
634
+ // Skip any directory starting with . (hidden directories) except some important ones
635
+ if (dirName.startsWith('.')) {
636
+ const allowedHiddenDirs = ['.github', '.gitlab', '.circleci', '.travis', '.azure', '.devcontainer'];
637
+ return !allowedHiddenDirs.includes(dirName);
638
+ }
639
+ return skipDirs.includes(dirName);
640
+ }
641
+ /**
642
+ * Check if memory bank exists in workspace
643
+ */
644
+ async memoryBankExists(workspaceFolderUri) {
645
+ try {
646
+ const normalizedWorkspacePath = (0, mcpUtils_1.normalizePathFromUri)(workspaceFolderUri, this.features.logging);
647
+ const memoryBankPath = `${normalizedWorkspacePath}/${MEMORY_BANK_DIRECTORY}`;
648
+ this.features.logging.info(`Memory Bank: Checking existence at path: "${memoryBankPath}"`);
649
+ const exists = await this.features.workspace.fs.exists(memoryBankPath);
650
+ if (!exists) {
651
+ this.features.logging.info(`Memory Bank: Directory does not exist: "${memoryBankPath}"`);
652
+ return false;
653
+ }
654
+ // Check if at least one memory bank file exists
655
+ const files = Object.values(MEMORY_BANK_FILES);
656
+ let foundFiles = 0;
657
+ for (const file of files) {
658
+ const filePath = `${memoryBankPath}/${file}`;
659
+ const fileExists = await this.features.workspace.fs.exists(filePath);
660
+ if (fileExists) {
661
+ foundFiles++;
662
+ }
663
+ }
664
+ const hasFiles = foundFiles > 0;
665
+ if (hasFiles) {
666
+ this.features.logging.info(`Memory Bank: Found ${foundFiles} existing memory bank files`);
667
+ }
668
+ else {
669
+ this.features.logging.info(`Memory Bank: No existing memory bank files found`);
670
+ }
671
+ return hasFiles;
672
+ }
673
+ catch (error) {
674
+ this.features.logging.error(`Error checking memory bank existence: ${error}`);
675
+ return false;
676
+ }
677
+ }
678
+ }
679
+ exports.MemoryBankController = MemoryBankController;
680
+ //# sourceMappingURL=memoryBankController.js.map