@juspay/yama 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,438 @@
1
+ "use strict";
2
+ /**
3
+ * Enhanced Bitbucket Provider - Optimized from both pr-police.js and pr-describe.js
4
+ * Provides unified, cached, and optimized Bitbucket operations
5
+ */
6
+ Object.defineProperty(exports, "__esModule", { value: true });
7
+ exports.BitbucketProvider = void 0;
8
+ exports.createBitbucketProvider = createBitbucketProvider;
9
+ const types_1 = require("../../types");
10
+ const Logger_1 = require("../../utils/Logger");
11
+ const Cache_1 = require("../../utils/Cache");
12
+ class BitbucketProvider {
13
+ constructor(credentials) {
14
+ this.initialized = false;
15
+ this.credentials = credentials;
16
+ this.baseUrl = credentials.baseUrl || 'https://your-bitbucket-server.com';
17
+ }
18
+ /**
19
+ * Initialize MCP handlers with lazy loading and connection reuse
20
+ */
21
+ async initialize() {
22
+ if (this.initialized) {
23
+ return;
24
+ }
25
+ try {
26
+ Logger_1.logger.debug('Initializing Bitbucket MCP handlers...');
27
+ const dynamicImport = eval('(specifier) => import(specifier)');
28
+ const [{ BitbucketApiClient }, { BranchHandlers }, { PullRequestHandlers }, { ReviewHandlers }, { FileHandlers }] = await Promise.all([
29
+ dynamicImport('@nexus2520/bitbucket-mcp-server/build/utils/api-client.js'),
30
+ dynamicImport('@nexus2520/bitbucket-mcp-server/build/handlers/branch-handlers.js'),
31
+ dynamicImport('@nexus2520/bitbucket-mcp-server/build/handlers/pull-request-handlers.js'),
32
+ dynamicImport('@nexus2520/bitbucket-mcp-server/build/handlers/review-handlers.js'),
33
+ dynamicImport('@nexus2520/bitbucket-mcp-server/build/handlers/file-handlers.js')
34
+ ]);
35
+ this.apiClient = new BitbucketApiClient(this.baseUrl, this.credentials.username, undefined, this.credentials.token);
36
+ this.branchHandlers = new BranchHandlers(this.apiClient, this.baseUrl);
37
+ this.pullRequestHandlers = new PullRequestHandlers(this.apiClient, this.baseUrl, this.credentials.username);
38
+ this.reviewHandlers = new ReviewHandlers(this.apiClient, this.credentials.username);
39
+ this.fileHandlers = new FileHandlers(this.apiClient, this.baseUrl);
40
+ this.initialized = true;
41
+ Logger_1.logger.debug('Bitbucket MCP handlers initialized successfully');
42
+ }
43
+ catch (error) {
44
+ throw new types_1.ProviderError(`Failed to initialize Bitbucket provider: ${error.message}`);
45
+ }
46
+ }
47
+ /**
48
+ * Parse MCP response - exactly matching the working pr-police.js implementation
49
+ */
50
+ parseMCPResponse(result) {
51
+ // Handle error responses
52
+ if (result.error) {
53
+ throw new Error(result.error);
54
+ }
55
+ // Check if result has MCP format (content array) or direct data - EXACTLY like pr-police.js
56
+ if (result.content && result.content[0] && result.content[0].text) {
57
+ const text = result.content[0].text;
58
+ // Check if it's an error message
59
+ if (typeof text === 'string' && text.startsWith('Error:')) {
60
+ throw new Error(text);
61
+ }
62
+ try {
63
+ return JSON.parse(text);
64
+ }
65
+ catch (error) {
66
+ // If parsing fails, return the text as-is for simple responses
67
+ return text;
68
+ }
69
+ }
70
+ else {
71
+ // Direct data format - return as-is
72
+ return result;
73
+ }
74
+ }
75
+ /**
76
+ * Find PR for branch with intelligent caching
77
+ */
78
+ async findPRForBranch(identifier) {
79
+ await this.initialize();
80
+ const { workspace, repository, branch } = identifier;
81
+ if (!branch) {
82
+ throw new types_1.ProviderError('Branch name is required');
83
+ }
84
+ const cacheKey = Cache_1.Cache.keys.branchInfo(workspace, repository, branch);
85
+ return Cache_1.cache.getOrSet(cacheKey, async () => {
86
+ Logger_1.logger.debug(`Finding PR for branch: ${workspace}/${repository}@${branch}`);
87
+ const rawBranchData = await this.branchHandlers.handleGetBranch({
88
+ workspace,
89
+ repository,
90
+ branch_name: branch,
91
+ include_merged_prs: false,
92
+ });
93
+ const branchData = this.parseMCPResponse(rawBranchData);
94
+ // Direct data extraction
95
+ if (branchData.open_pull_requests && branchData.open_pull_requests.length > 0) {
96
+ const firstPR = branchData.open_pull_requests[0];
97
+ // Debug author data structure
98
+ Logger_1.logger.debug(`Author data structure: ${JSON.stringify(firstPR.author, null, 2)}`);
99
+ Logger_1.logger.debug(`Raw firstPR keys: ${Object.keys(firstPR).join(', ')}`);
100
+ return {
101
+ id: firstPR.id,
102
+ title: firstPR.title,
103
+ description: firstPR.description || '',
104
+ author: firstPR.author?.displayName || firstPR.author?.name || firstPR.author || 'Unknown',
105
+ state: 'OPEN',
106
+ sourceRef: branch,
107
+ targetRef: firstPR.destination?.branch?.name || 'main',
108
+ createdDate: firstPR.createdDate || new Date().toISOString(),
109
+ updatedDate: firstPR.updatedDate || new Date().toISOString(),
110
+ reviewers: firstPR.reviewers || [],
111
+ fileChanges: firstPR.file_changes || []
112
+ };
113
+ }
114
+ throw new types_1.ProviderError(`No open PR found for branch: ${branch}`);
115
+ }, 3600 // Cache for 1 hour
116
+ );
117
+ }
118
+ /**
119
+ * Get PR details with enhanced caching
120
+ */
121
+ async getPRDetails(identifier) {
122
+ await this.initialize();
123
+ const { workspace, repository, pullRequestId } = identifier;
124
+ if (!pullRequestId) {
125
+ throw new types_1.ProviderError('Pull request ID is required');
126
+ }
127
+ const cacheKey = Cache_1.Cache.keys.prInfo(workspace, repository, pullRequestId);
128
+ return Cache_1.cache.getOrSet(cacheKey, async () => {
129
+ Logger_1.logger.debug(`Getting PR details: ${workspace}/${repository}#${pullRequestId}`);
130
+ const rawPRDetails = await this.pullRequestHandlers.handleGetPullRequest({
131
+ workspace,
132
+ repository,
133
+ pull_request_id: pullRequestId,
134
+ });
135
+ const prData = this.parseMCPResponse(rawPRDetails);
136
+ // Debug author data structure
137
+ Logger_1.logger.debug(`PR Details author data structure: ${JSON.stringify(prData.author, null, 2)}`);
138
+ Logger_1.logger.debug(`PR Details raw keys: ${Object.keys(prData).join(', ')}`);
139
+ return {
140
+ id: prData.id,
141
+ title: prData.title,
142
+ description: prData.description || '',
143
+ author: prData.author?.displayName || prData.author?.name || prData.author || 'Unknown',
144
+ state: prData.state || 'OPEN',
145
+ sourceRef: prData.source?.branch?.name || '',
146
+ targetRef: prData.destination?.branch?.name || '',
147
+ createdDate: prData.createdDate || new Date().toISOString(),
148
+ updatedDate: prData.updatedDate || new Date().toISOString(),
149
+ reviewers: prData.reviewers || [],
150
+ comments: prData.active_comments || [],
151
+ fileChanges: prData.file_changes?.map((f) => f.path || f.file) || []
152
+ };
153
+ }, 1800 // Cache for 30 minutes
154
+ );
155
+ }
156
+ /**
157
+ * Get PR diff with smart caching and filtering
158
+ */
159
+ async getPRDiff(identifier, contextLines = 3, excludePatterns = ['*.lock', '*.svg'], includePatterns) {
160
+ await this.initialize();
161
+ const { workspace, repository, pullRequestId } = identifier;
162
+ if (!pullRequestId) {
163
+ throw new types_1.ProviderError('Pull request ID is required');
164
+ }
165
+ // Create a cache key that includes include patterns if specified
166
+ const cacheKey = includePatterns && includePatterns.length === 1
167
+ ? `file-diff:${workspace}:${repository}:${pullRequestId}:${includePatterns[0]}`
168
+ : Cache_1.Cache.keys.prDiff(workspace, repository, pullRequestId);
169
+ return Cache_1.cache.getOrSet(cacheKey, async () => {
170
+ Logger_1.logger.debug(`Getting PR diff: ${workspace}/${repository}#${pullRequestId}`);
171
+ if (includePatterns) {
172
+ Logger_1.logger.debug(`Include patterns: ${includePatterns.join(', ')}`);
173
+ }
174
+ const args = {
175
+ workspace,
176
+ repository,
177
+ pull_request_id: pullRequestId,
178
+ context_lines: contextLines,
179
+ exclude_patterns: excludePatterns,
180
+ };
181
+ // Add include_patterns if specified
182
+ if (includePatterns) {
183
+ args.include_patterns = includePatterns;
184
+ }
185
+ const rawDiff = await this.reviewHandlers.handleGetPullRequestDiff(args);
186
+ const diffData = this.parseMCPResponse(rawDiff);
187
+ return {
188
+ diff: diffData.diff || '',
189
+ fileChanges: diffData.file_changes || [],
190
+ totalAdditions: diffData.total_additions || 0,
191
+ totalDeletions: diffData.total_deletions || 0
192
+ };
193
+ }, 1800 // Cache for 30 minutes
194
+ );
195
+ }
196
+ /**
197
+ * Get file content with caching
198
+ */
199
+ async getFileContent(workspace, repository, filePath, branch) {
200
+ await this.initialize();
201
+ const cacheKey = Cache_1.Cache.keys.fileContent(workspace, repository, filePath, branch);
202
+ return Cache_1.cache.getOrSet(cacheKey, async () => {
203
+ Logger_1.logger.debug(`Getting file content: ${workspace}/${repository}/${filePath}@${branch}`);
204
+ const result = await this.fileHandlers.handleGetFileContent({
205
+ workspace,
206
+ repository,
207
+ file_path: filePath,
208
+ branch,
209
+ });
210
+ // Handle file content response directly (don't JSON parse)
211
+ if (result.content && result.content[0] && result.content[0].text) {
212
+ const fileResponse = JSON.parse(result.content[0].text);
213
+ return fileResponse.content || '';
214
+ }
215
+ // Handle direct response format
216
+ return result.content || '';
217
+ }, 7200 // Cache for 2 hours (files change less frequently)
218
+ );
219
+ }
220
+ /**
221
+ * List directory content with caching
222
+ */
223
+ async listDirectoryContent(workspace, repository, path, branch) {
224
+ await this.initialize();
225
+ const cacheKey = Cache_1.Cache.keys.directoryContent(workspace, repository, path, branch);
226
+ return Cache_1.cache.getOrSet(cacheKey, async () => {
227
+ Logger_1.logger.debug(`Listing directory: ${workspace}/${repository}/${path}@${branch}`);
228
+ const result = await this.fileHandlers.handleListDirectoryContent({
229
+ workspace,
230
+ repository,
231
+ path,
232
+ branch,
233
+ });
234
+ const dirData = this.parseMCPResponse(result);
235
+ return dirData.contents || [];
236
+ }, 3600 // Cache for 1 hour
237
+ );
238
+ }
239
+ /**
240
+ * Update PR description with reviewer preservation
241
+ */
242
+ async updatePRDescription(identifier, description) {
243
+ await this.initialize();
244
+ const { workspace, repository, pullRequestId } = identifier;
245
+ if (!pullRequestId) {
246
+ throw new types_1.ProviderError('Pull request ID is required');
247
+ }
248
+ try {
249
+ Logger_1.logger.debug(`Updating PR description: ${workspace}/${repository}#${pullRequestId}`);
250
+ Logger_1.logger.debug(`Description length: ${description.length} characters`);
251
+ const result = await this.pullRequestHandlers.handleUpdatePullRequest({
252
+ workspace,
253
+ repository,
254
+ pull_request_id: pullRequestId,
255
+ description: description
256
+ });
257
+ // Log the raw MCP response
258
+ Logger_1.logger.debug(`Raw MCP update response: ${JSON.stringify(result, null, 2)}`);
259
+ const updateData = this.parseMCPResponse(result);
260
+ // Log the parsed response
261
+ Logger_1.logger.debug(`Parsed update response: ${JSON.stringify(updateData, null, 2)}`);
262
+ // Invalidate related cache entries
263
+ Cache_1.cache.del(Cache_1.Cache.keys.prInfo(workspace, repository, pullRequestId));
264
+ // Check if the response indicates actual success
265
+ if (typeof updateData === 'string' && updateData.includes('Error')) {
266
+ Logger_1.logger.error(`Update response contains error: ${updateData}`);
267
+ return {
268
+ success: false,
269
+ message: updateData
270
+ };
271
+ }
272
+ return {
273
+ success: true,
274
+ message: updateData.message || 'PR description updated successfully'
275
+ };
276
+ }
277
+ catch (error) {
278
+ Logger_1.logger.error(`Failed to update PR description: ${error.message}`);
279
+ throw new types_1.ProviderError(`Update failed: ${error.message}`);
280
+ }
281
+ }
282
+ /**
283
+ * Add comment to PR with smart positioning
284
+ */
285
+ async addComment(identifier, commentText, options = {}) {
286
+ await this.initialize();
287
+ const { workspace, repository, pullRequestId } = identifier;
288
+ if (!pullRequestId) {
289
+ throw new types_1.ProviderError('Pull request ID is required');
290
+ }
291
+ try {
292
+ Logger_1.logger.debug(`Adding comment to PR: ${workspace}/${repository}#${pullRequestId}`);
293
+ const args = {
294
+ workspace,
295
+ repository,
296
+ pull_request_id: pullRequestId,
297
+ comment_text: commentText,
298
+ };
299
+ // Add inline comment parameters if provided
300
+ if (options.filePath && options.codeSnippet) {
301
+ args.file_path = options.filePath;
302
+ args.code_snippet = options.codeSnippet;
303
+ if (options.searchContext)
304
+ args.search_context = options.searchContext;
305
+ if (options.matchStrategy)
306
+ args.match_strategy = options.matchStrategy;
307
+ if (options.suggestion)
308
+ args.suggestion = options.suggestion;
309
+ Logger_1.logger.debug(`🔍 Inline comment details:`);
310
+ Logger_1.logger.debug(` File: ${options.filePath}`);
311
+ Logger_1.logger.debug(` Code snippet: "${options.codeSnippet}"`);
312
+ Logger_1.logger.debug(` Match strategy: ${options.matchStrategy}`);
313
+ if (options.searchContext) {
314
+ Logger_1.logger.debug(` Search context before: ${JSON.stringify(options.searchContext.before)}`);
315
+ Logger_1.logger.debug(` Search context after: ${JSON.stringify(options.searchContext.after)}`);
316
+ }
317
+ }
318
+ else if (options.filePath && options.lineNumber) {
319
+ // Fallback to line number if no code snippet
320
+ args.file_path = options.filePath;
321
+ args.line_number = options.lineNumber;
322
+ args.line_type = options.lineType || 'CONTEXT';
323
+ Logger_1.logger.debug(`🔍 Line-based comment details:`);
324
+ Logger_1.logger.debug(` File: ${options.filePath}`);
325
+ Logger_1.logger.debug(` Line: ${options.lineNumber}`);
326
+ Logger_1.logger.debug(` Type: ${options.lineType || 'CONTEXT'}`);
327
+ }
328
+ Logger_1.logger.debug(`🔍 MCP addComment args: ${JSON.stringify(args, null, 2)}`);
329
+ const result = await this.pullRequestHandlers.handleAddComment(args);
330
+ // Parse response exactly like pr-police.js
331
+ let commentData;
332
+ if (result.content && result.content[0] && result.content[0].text) {
333
+ commentData = JSON.parse(result.content[0].text);
334
+ }
335
+ else {
336
+ commentData = result;
337
+ }
338
+ return {
339
+ success: true,
340
+ commentId: commentData.id || commentData.comment_id
341
+ };
342
+ }
343
+ catch (error) {
344
+ Logger_1.logger.error(`Failed to add comment: ${error.message}`);
345
+ throw new types_1.ProviderError(`Comment failed: ${error.message}`);
346
+ }
347
+ }
348
+ /**
349
+ * Batch operation support for multiple API calls
350
+ */
351
+ async batchOperations(operations, options = {}) {
352
+ const { maxConcurrent = 5, delayBetween = 1000, continueOnError = true } = options;
353
+ const results = [];
354
+ // Process operations in batches
355
+ for (let i = 0; i < operations.length; i += maxConcurrent) {
356
+ const batch = operations.slice(i, i + maxConcurrent);
357
+ const batchPromises = batch.map(async (operation) => {
358
+ try {
359
+ const data = await operation();
360
+ return { success: true, data };
361
+ }
362
+ catch (error) {
363
+ const errorMessage = error instanceof Error ? error.message : String(error);
364
+ if (!continueOnError) {
365
+ throw error;
366
+ }
367
+ return { success: false, error: errorMessage };
368
+ }
369
+ });
370
+ const batchResults = await Promise.all(batchPromises);
371
+ results.push(...batchResults);
372
+ // Add delay between batches (except for the last batch)
373
+ if (i + maxConcurrent < operations.length && delayBetween > 0) {
374
+ await new Promise(resolve => setTimeout(resolve, delayBetween));
375
+ }
376
+ }
377
+ return results;
378
+ }
379
+ /**
380
+ * Health check for the provider
381
+ */
382
+ async healthCheck() {
383
+ try {
384
+ await this.initialize();
385
+ // Try a simple API call to verify connectivity
386
+ const testResult = await this.branchHandlers.handleGetBranch({
387
+ workspace: 'test',
388
+ repository: 'test',
389
+ branch_name: 'test',
390
+ include_merged_prs: false,
391
+ });
392
+ return {
393
+ healthy: true,
394
+ details: {
395
+ initialized: this.initialized,
396
+ baseUrl: this.baseUrl,
397
+ username: this.credentials.username,
398
+ apiConnected: !!testResult
399
+ }
400
+ };
401
+ }
402
+ catch (error) {
403
+ return {
404
+ healthy: false,
405
+ details: {
406
+ initialized: this.initialized,
407
+ error: error.message
408
+ }
409
+ };
410
+ }
411
+ }
412
+ /**
413
+ * Get provider statistics and cache metrics
414
+ */
415
+ getStats() {
416
+ return {
417
+ provider: 'bitbucket',
418
+ initialized: this.initialized,
419
+ baseUrl: this.baseUrl,
420
+ cacheStats: Cache_1.cache.stats(),
421
+ cacheHitRatio: Cache_1.cache.getHitRatio()
422
+ };
423
+ }
424
+ /**
425
+ * Clear provider-related cache entries
426
+ */
427
+ clearCache() {
428
+ // Clear all cache entries (could be made more specific)
429
+ Cache_1.cache.clear();
430
+ Logger_1.logger.debug('BitbucketProvider cache cleared');
431
+ }
432
+ }
433
+ exports.BitbucketProvider = BitbucketProvider;
434
+ // Export factory function
435
+ function createBitbucketProvider(credentials) {
436
+ return new BitbucketProvider(credentials);
437
+ }
438
+ //# sourceMappingURL=BitbucketProvider.js.map
@@ -0,0 +1,105 @@
1
+ /**
2
+ * Enhanced Code Reviewer - Optimized to work with Unified Context
3
+ * Preserves all original functionality from pr-police.js but optimized
4
+ */
5
+ import { ReviewResult, ReviewOptions, AIProviderConfig, CodeReviewConfig } from '../types';
6
+ import { UnifiedContext } from '../core/ContextGatherer';
7
+ import { BitbucketProvider } from '../core/providers/BitbucketProvider';
8
+ export declare class CodeReviewer {
9
+ private neurolink;
10
+ private bitbucketProvider;
11
+ private aiConfig;
12
+ private reviewConfig;
13
+ constructor(bitbucketProvider: BitbucketProvider, aiConfig: AIProviderConfig, reviewConfig: CodeReviewConfig);
14
+ /**
15
+ * Review code using pre-gathered unified context (OPTIMIZED)
16
+ */
17
+ reviewCodeWithContext(context: UnifiedContext, options: ReviewOptions): Promise<ReviewResult>;
18
+ /**
19
+ * Validate violations to ensure code snippets exist in diff
20
+ */
21
+ private validateViolations;
22
+ /**
23
+ * Try to fix code snippet by finding it in the actual diff
24
+ */
25
+ private tryFixCodeSnippet;
26
+ /**
27
+ * Get system prompt for security-focused code review
28
+ */
29
+ private getSecurityReviewSystemPrompt;
30
+ /**
31
+ * Get analysis requirements from config or defaults
32
+ */
33
+ private getAnalysisRequirements;
34
+ /**
35
+ * Build focused analysis prompt separated from context
36
+ */
37
+ private buildCoreAnalysisPrompt;
38
+ /**
39
+ * Extract diff content based on strategy
40
+ */
41
+ private extractDiffContent;
42
+ /**
43
+ * Detect project type for better context
44
+ */
45
+ private detectProjectType;
46
+ /**
47
+ * Assess complexity level for better AI context
48
+ */
49
+ private assessComplexity;
50
+ /**
51
+ * Legacy method - kept for compatibility but simplified
52
+ */
53
+ private buildAnalysisPrompt;
54
+ /**
55
+ * Analyze code with AI using the enhanced prompt
56
+ */
57
+ private analyzeWithAI;
58
+ /**
59
+ * Post comments to PR using unified context - matching pr-police.js exactly
60
+ */
61
+ private postComments;
62
+ /**
63
+ * Format inline comment for specific violation
64
+ */
65
+ private formatInlineComment;
66
+ /**
67
+ * Generate comprehensive summary comment with failed comments info
68
+ */
69
+ private generateSummaryComment;
70
+ /**
71
+ * Helper methods for processing violations
72
+ */
73
+ private cleanFilePath;
74
+ /**
75
+ * Extract exact file path from diff
76
+ */
77
+ private extractFilePathFromDiff;
78
+ /**
79
+ * Extract line number from diff for a specific code snippet
80
+ */
81
+ private extractLineNumberFromDiff;
82
+ /**
83
+ * Escape markdown code blocks properly
84
+ */
85
+ private escapeMarkdownCodeBlock;
86
+ private cleanCodeSnippet;
87
+ private splitArrayLines;
88
+ private groupViolationsByCategory;
89
+ private calculateStats;
90
+ private generateReviewResult;
91
+ /**
92
+ * Utility methods
93
+ */
94
+ private parseAIResponse;
95
+ /**
96
+ * Extract line information for comment from context
97
+ */
98
+ private extractLineInfoForComment;
99
+ /**
100
+ * Generate all possible path variations for a file
101
+ */
102
+ private generatePathVariations;
103
+ }
104
+ export declare function createCodeReviewer(bitbucketProvider: BitbucketProvider, aiConfig: AIProviderConfig, reviewConfig: CodeReviewConfig): CodeReviewer;
105
+ //# sourceMappingURL=CodeReviewer.d.ts.map