@juspay/yama 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,444 @@
1
+ "use strict";
2
+ /**
3
+ * Enhanced Bitbucket Provider - Optimized from both pr-police.js and pr-describe.js
4
+ * Provides unified, cached, and optimized Bitbucket operations
5
+ */
6
+ Object.defineProperty(exports, "__esModule", { value: true });
7
+ exports.BitbucketProvider = void 0;
8
+ exports.createBitbucketProvider = createBitbucketProvider;
9
+ const types_1 = require("../../types");
10
+ const Logger_1 = require("../../utils/Logger");
11
+ const Cache_1 = require("../../utils/Cache");
12
+ class BitbucketProvider {
13
+ constructor(credentials) {
14
+ this.initialized = false;
15
+ this.credentials = credentials;
16
+ this.baseUrl = credentials.baseUrl || "https://your-bitbucket-server.com";
17
+ }
18
+ /**
19
+ * Initialize MCP handlers with lazy loading and connection reuse
20
+ */
21
+ async initialize() {
22
+ if (this.initialized) {
23
+ return;
24
+ }
25
+ try {
26
+ Logger_1.logger.debug("Initializing Bitbucket MCP handlers...");
27
+ const dynamicImport = eval("(specifier) => import(specifier)");
28
+ const [{ BitbucketApiClient }, { BranchHandlers }, { PullRequestHandlers }, { ReviewHandlers }, { FileHandlers },] = await Promise.all([
29
+ dynamicImport("@nexus2520/bitbucket-mcp-server/build/utils/api-client.js"),
30
+ dynamicImport("@nexus2520/bitbucket-mcp-server/build/handlers/branch-handlers.js"),
31
+ dynamicImport("@nexus2520/bitbucket-mcp-server/build/handlers/pull-request-handlers.js"),
32
+ dynamicImport("@nexus2520/bitbucket-mcp-server/build/handlers/review-handlers.js"),
33
+ dynamicImport("@nexus2520/bitbucket-mcp-server/build/handlers/file-handlers.js"),
34
+ ]);
35
+ this.apiClient = new BitbucketApiClient(this.baseUrl, this.credentials.username, undefined, this.credentials.token);
36
+ this.branchHandlers = new BranchHandlers(this.apiClient, this.baseUrl);
37
+ this.pullRequestHandlers = new PullRequestHandlers(this.apiClient, this.baseUrl, this.credentials.username);
38
+ this.reviewHandlers = new ReviewHandlers(this.apiClient, this.credentials.username);
39
+ this.fileHandlers = new FileHandlers(this.apiClient, this.baseUrl);
40
+ this.initialized = true;
41
+ Logger_1.logger.debug("Bitbucket MCP handlers initialized successfully");
42
+ }
43
+ catch (error) {
44
+ throw new types_1.ProviderError(`Failed to initialize Bitbucket provider: ${error.message}`);
45
+ }
46
+ }
47
+ /**
48
+ * Parse MCP response - exactly matching the working pr-police.js implementation
49
+ */
50
+ parseMCPResponse(result) {
51
+ // Handle error responses
52
+ if (result.error) {
53
+ throw new Error(result.error);
54
+ }
55
+ // Check if result has MCP format (content array) or direct data - EXACTLY like pr-police.js
56
+ if (result.content && result.content[0] && result.content[0].text) {
57
+ const text = result.content[0].text;
58
+ // Check if it's an error message
59
+ if (typeof text === "string" && text.startsWith("Error:")) {
60
+ throw new Error(text);
61
+ }
62
+ try {
63
+ return JSON.parse(text);
64
+ }
65
+ catch (error) {
66
+ // If parsing fails, return the text as-is for simple responses
67
+ return text;
68
+ }
69
+ }
70
+ else {
71
+ // Direct data format - return as-is
72
+ return result;
73
+ }
74
+ }
75
+ /**
76
+ * Find PR for branch with intelligent caching
77
+ */
78
+ async findPRForBranch(identifier) {
79
+ await this.initialize();
80
+ const { workspace, repository, branch } = identifier;
81
+ if (!branch) {
82
+ throw new types_1.ProviderError("Branch name is required");
83
+ }
84
+ const cacheKey = Cache_1.Cache.keys.branchInfo(workspace, repository, branch);
85
+ return Cache_1.cache.getOrSet(cacheKey, async () => {
86
+ Logger_1.logger.debug(`Finding PR for branch: ${workspace}/${repository}@${branch}`);
87
+ const rawBranchData = await this.branchHandlers.handleGetBranch({
88
+ workspace,
89
+ repository,
90
+ branch_name: branch,
91
+ include_merged_prs: false,
92
+ });
93
+ const branchData = this.parseMCPResponse(rawBranchData);
94
+ // Direct data extraction
95
+ if (branchData.open_pull_requests &&
96
+ branchData.open_pull_requests.length > 0) {
97
+ const firstPR = branchData.open_pull_requests[0];
98
+ // Debug author data structure
99
+ Logger_1.logger.debug(`Author data structure: ${JSON.stringify(firstPR.author, null, 2)}`);
100
+ Logger_1.logger.debug(`Raw firstPR keys: ${Object.keys(firstPR).join(", ")}`);
101
+ return {
102
+ id: firstPR.id,
103
+ title: firstPR.title,
104
+ description: firstPR.description || "",
105
+ author: firstPR.author?.displayName ||
106
+ firstPR.author?.name ||
107
+ firstPR.author ||
108
+ "Unknown",
109
+ state: "OPEN",
110
+ sourceRef: branch,
111
+ targetRef: firstPR.destination?.branch?.name || "main",
112
+ createdDate: firstPR.createdDate || new Date().toISOString(),
113
+ updatedDate: firstPR.updatedDate || new Date().toISOString(),
114
+ reviewers: firstPR.reviewers || [],
115
+ fileChanges: firstPR.file_changes || [],
116
+ };
117
+ }
118
+ throw new types_1.ProviderError(`No open PR found for branch: ${branch}`);
119
+ }, 3600);
120
+ }
121
+ /**
122
+ * Get PR details with enhanced caching
123
+ */
124
+ async getPRDetails(identifier) {
125
+ await this.initialize();
126
+ const { workspace, repository, pullRequestId } = identifier;
127
+ if (!pullRequestId) {
128
+ throw new types_1.ProviderError("Pull request ID is required");
129
+ }
130
+ const cacheKey = Cache_1.Cache.keys.prInfo(workspace, repository, pullRequestId);
131
+ return Cache_1.cache.getOrSet(cacheKey, async () => {
132
+ Logger_1.logger.debug(`Getting PR details: ${workspace}/${repository}#${pullRequestId}`);
133
+ const rawPRDetails = await this.pullRequestHandlers.handleGetPullRequest({
134
+ workspace,
135
+ repository,
136
+ pull_request_id: pullRequestId,
137
+ });
138
+ const prData = this.parseMCPResponse(rawPRDetails);
139
+ // Debug author data structure
140
+ Logger_1.logger.debug(`PR Details author data structure: ${JSON.stringify(prData.author, null, 2)}`);
141
+ Logger_1.logger.debug(`PR Details raw keys: ${Object.keys(prData).join(", ")}`);
142
+ return {
143
+ id: prData.id,
144
+ title: prData.title,
145
+ description: prData.description || "",
146
+ author: prData.author?.displayName ||
147
+ prData.author?.name ||
148
+ prData.author ||
149
+ "Unknown",
150
+ state: prData.state || "OPEN",
151
+ sourceRef: prData.source?.branch?.name || "",
152
+ targetRef: prData.destination?.branch?.name || "",
153
+ createdDate: prData.createdDate || new Date().toISOString(),
154
+ updatedDate: prData.updatedDate || new Date().toISOString(),
155
+ reviewers: prData.reviewers || [],
156
+ comments: prData.active_comments || [],
157
+ fileChanges: prData.file_changes?.map((f) => f.path || f.file) ||
158
+ [],
159
+ };
160
+ }, 1800);
161
+ }
162
+ /**
163
+ * Get PR diff with smart caching and filtering
164
+ */
165
+ async getPRDiff(identifier, contextLines = 3, excludePatterns = ["*.lock", "*.svg"], includePatterns) {
166
+ await this.initialize();
167
+ const { workspace, repository, pullRequestId } = identifier;
168
+ if (!pullRequestId) {
169
+ throw new types_1.ProviderError("Pull request ID is required");
170
+ }
171
+ // Create a cache key that includes include patterns if specified
172
+ const cacheKey = includePatterns && includePatterns.length === 1
173
+ ? `file-diff:${workspace}:${repository}:${pullRequestId}:${includePatterns[0]}`
174
+ : Cache_1.Cache.keys.prDiff(workspace, repository, pullRequestId);
175
+ return Cache_1.cache.getOrSet(cacheKey, async () => {
176
+ Logger_1.logger.debug(`Getting PR diff: ${workspace}/${repository}#${pullRequestId}`);
177
+ if (includePatterns) {
178
+ Logger_1.logger.debug(`Include patterns: ${includePatterns.join(", ")}`);
179
+ }
180
+ const args = {
181
+ workspace,
182
+ repository,
183
+ pull_request_id: pullRequestId,
184
+ context_lines: contextLines,
185
+ exclude_patterns: excludePatterns,
186
+ };
187
+ // Add include_patterns if specified
188
+ if (includePatterns) {
189
+ args.include_patterns = includePatterns;
190
+ }
191
+ const rawDiff = await this.reviewHandlers.handleGetPullRequestDiff(args);
192
+ const diffData = this.parseMCPResponse(rawDiff);
193
+ return {
194
+ diff: diffData.diff || "",
195
+ fileChanges: diffData.file_changes || [],
196
+ totalAdditions: diffData.total_additions || 0,
197
+ totalDeletions: diffData.total_deletions || 0,
198
+ };
199
+ }, 1800);
200
+ }
201
+ /**
202
+ * Get file content with caching
203
+ */
204
+ async getFileContent(workspace, repository, filePath, branch) {
205
+ await this.initialize();
206
+ const cacheKey = Cache_1.Cache.keys.fileContent(workspace, repository, filePath, branch);
207
+ return Cache_1.cache.getOrSet(cacheKey, async () => {
208
+ Logger_1.logger.debug(`Getting file content: ${workspace}/${repository}/${filePath}@${branch}`);
209
+ const result = await this.fileHandlers.handleGetFileContent({
210
+ workspace,
211
+ repository,
212
+ file_path: filePath,
213
+ branch,
214
+ });
215
+ // Handle file content response directly (don't JSON parse)
216
+ if (result.content && result.content[0] && result.content[0].text) {
217
+ const fileResponse = JSON.parse(result.content[0].text);
218
+ return fileResponse.content || "";
219
+ }
220
+ // Handle direct response format
221
+ return result.content || "";
222
+ }, 7200);
223
+ }
224
+ /**
225
+ * List directory content with caching
226
+ */
227
+ async listDirectoryContent(workspace, repository, path, branch) {
228
+ await this.initialize();
229
+ const cacheKey = Cache_1.Cache.keys.directoryContent(workspace, repository, path, branch);
230
+ return Cache_1.cache.getOrSet(cacheKey, async () => {
231
+ Logger_1.logger.debug(`Listing directory: ${workspace}/${repository}/${path}@${branch}`);
232
+ const result = await this.fileHandlers.handleListDirectoryContent({
233
+ workspace,
234
+ repository,
235
+ path,
236
+ branch,
237
+ });
238
+ const dirData = this.parseMCPResponse(result);
239
+ return dirData.contents || [];
240
+ }, 3600);
241
+ }
242
+ /**
243
+ * Update PR description with reviewer preservation
244
+ */
245
+ async updatePRDescription(identifier, description) {
246
+ await this.initialize();
247
+ const { workspace, repository, pullRequestId } = identifier;
248
+ if (!pullRequestId) {
249
+ throw new types_1.ProviderError("Pull request ID is required");
250
+ }
251
+ try {
252
+ Logger_1.logger.debug(`Updating PR description: ${workspace}/${repository}#${pullRequestId}`);
253
+ Logger_1.logger.debug(`Description length: ${description.length} characters`);
254
+ const result = await this.pullRequestHandlers.handleUpdatePullRequest({
255
+ workspace,
256
+ repository,
257
+ pull_request_id: pullRequestId,
258
+ description: description,
259
+ });
260
+ // Log the raw MCP response
261
+ Logger_1.logger.debug(`Raw MCP update response: ${JSON.stringify(result, null, 2)}`);
262
+ const updateData = this.parseMCPResponse(result);
263
+ // Log the parsed response
264
+ Logger_1.logger.debug(`Parsed update response: ${JSON.stringify(updateData, null, 2)}`);
265
+ // Invalidate related cache entries
266
+ Cache_1.cache.del(Cache_1.Cache.keys.prInfo(workspace, repository, pullRequestId));
267
+ // Check if the response indicates actual success
268
+ if (typeof updateData === "string" && updateData.includes("Error")) {
269
+ Logger_1.logger.error(`Update response contains error: ${updateData}`);
270
+ return {
271
+ success: false,
272
+ message: updateData,
273
+ };
274
+ }
275
+ return {
276
+ success: true,
277
+ message: updateData.message || "PR description updated successfully",
278
+ };
279
+ }
280
+ catch (error) {
281
+ Logger_1.logger.error(`Failed to update PR description: ${error.message}`);
282
+ throw new types_1.ProviderError(`Update failed: ${error.message}`);
283
+ }
284
+ }
285
+ /**
286
+ * Add comment to PR with smart positioning
287
+ */
288
+ async addComment(identifier, commentText, options = {}) {
289
+ await this.initialize();
290
+ const { workspace, repository, pullRequestId } = identifier;
291
+ if (!pullRequestId) {
292
+ throw new types_1.ProviderError("Pull request ID is required");
293
+ }
294
+ try {
295
+ Logger_1.logger.debug(`Adding comment to PR: ${workspace}/${repository}#${pullRequestId}`);
296
+ const args = {
297
+ workspace,
298
+ repository,
299
+ pull_request_id: pullRequestId,
300
+ comment_text: commentText,
301
+ };
302
+ // Add inline comment parameters if provided
303
+ if (options.filePath && options.codeSnippet) {
304
+ args.file_path = options.filePath;
305
+ args.code_snippet = options.codeSnippet;
306
+ if (options.searchContext) {
307
+ args.search_context = options.searchContext;
308
+ }
309
+ if (options.matchStrategy) {
310
+ args.match_strategy = options.matchStrategy;
311
+ }
312
+ if (options.suggestion) {
313
+ args.suggestion = options.suggestion;
314
+ }
315
+ Logger_1.logger.debug(`🔍 Inline comment details:`);
316
+ Logger_1.logger.debug(` File: ${options.filePath}`);
317
+ Logger_1.logger.debug(` Code snippet: "${options.codeSnippet}"`);
318
+ Logger_1.logger.debug(` Match strategy: ${options.matchStrategy}`);
319
+ if (options.searchContext) {
320
+ Logger_1.logger.debug(` Search context before: ${JSON.stringify(options.searchContext.before)}`);
321
+ Logger_1.logger.debug(` Search context after: ${JSON.stringify(options.searchContext.after)}`);
322
+ }
323
+ }
324
+ else if (options.filePath && options.lineNumber) {
325
+ // Fallback to line number if no code snippet
326
+ args.file_path = options.filePath;
327
+ args.line_number = options.lineNumber;
328
+ args.line_type = options.lineType || "CONTEXT";
329
+ Logger_1.logger.debug(`🔍 Line-based comment details:`);
330
+ Logger_1.logger.debug(` File: ${options.filePath}`);
331
+ Logger_1.logger.debug(` Line: ${options.lineNumber}`);
332
+ Logger_1.logger.debug(` Type: ${options.lineType || "CONTEXT"}`);
333
+ }
334
+ Logger_1.logger.debug(`🔍 MCP addComment args: ${JSON.stringify(args, null, 2)}`);
335
+ const result = await this.pullRequestHandlers.handleAddComment(args);
336
+ // Parse response exactly like pr-police.js
337
+ let commentData;
338
+ if (result.content && result.content[0] && result.content[0].text) {
339
+ commentData = JSON.parse(result.content[0].text);
340
+ }
341
+ else {
342
+ commentData = result;
343
+ }
344
+ return {
345
+ success: true,
346
+ commentId: commentData.id || commentData.comment_id,
347
+ };
348
+ }
349
+ catch (error) {
350
+ Logger_1.logger.error(`Failed to add comment: ${error.message}`);
351
+ throw new types_1.ProviderError(`Comment failed: ${error.message}`);
352
+ }
353
+ }
354
+ /**
355
+ * Batch operation support for multiple API calls
356
+ */
357
+ async batchOperations(operations, options = {}) {
358
+ const { maxConcurrent = 5, delayBetween = 1000, continueOnError = true, } = options;
359
+ const results = [];
360
+ // Process operations in batches
361
+ for (let i = 0; i < operations.length; i += maxConcurrent) {
362
+ const batch = operations.slice(i, i + maxConcurrent);
363
+ const batchPromises = batch.map(async (operation) => {
364
+ try {
365
+ const data = await operation();
366
+ return { success: true, data };
367
+ }
368
+ catch (error) {
369
+ const errorMessage = error instanceof Error ? error.message : String(error);
370
+ if (!continueOnError) {
371
+ throw error;
372
+ }
373
+ return { success: false, error: errorMessage };
374
+ }
375
+ });
376
+ const batchResults = await Promise.all(batchPromises);
377
+ results.push(...batchResults);
378
+ // Add delay between batches (except for the last batch)
379
+ if (i + maxConcurrent < operations.length && delayBetween > 0) {
380
+ await new Promise((resolve) => setTimeout(resolve, delayBetween));
381
+ }
382
+ }
383
+ return results;
384
+ }
385
+ /**
386
+ * Health check for the provider
387
+ */
388
+ async healthCheck() {
389
+ try {
390
+ await this.initialize();
391
+ // Try a simple API call to verify connectivity
392
+ const testResult = await this.branchHandlers.handleGetBranch({
393
+ workspace: "test",
394
+ repository: "test",
395
+ branch_name: "test",
396
+ include_merged_prs: false,
397
+ });
398
+ return {
399
+ healthy: true,
400
+ details: {
401
+ initialized: this.initialized,
402
+ baseUrl: this.baseUrl,
403
+ username: this.credentials.username,
404
+ apiConnected: !!testResult,
405
+ },
406
+ };
407
+ }
408
+ catch (error) {
409
+ return {
410
+ healthy: false,
411
+ details: {
412
+ initialized: this.initialized,
413
+ error: error.message,
414
+ },
415
+ };
416
+ }
417
+ }
418
+ /**
419
+ * Get provider statistics and cache metrics
420
+ */
421
+ getStats() {
422
+ return {
423
+ provider: "bitbucket",
424
+ initialized: this.initialized,
425
+ baseUrl: this.baseUrl,
426
+ cacheStats: Cache_1.cache.stats(),
427
+ cacheHitRatio: Cache_1.cache.getHitRatio(),
428
+ };
429
+ }
430
+ /**
431
+ * Clear provider-related cache entries
432
+ */
433
+ clearCache() {
434
+ // Clear all cache entries (could be made more specific)
435
+ Cache_1.cache.clear();
436
+ Logger_1.logger.debug("BitbucketProvider cache cleared");
437
+ }
438
+ }
439
+ exports.BitbucketProvider = BitbucketProvider;
440
+ // Export factory function
441
+ function createBitbucketProvider(credentials) {
442
+ return new BitbucketProvider(credentials);
443
+ }
444
+ //# sourceMappingURL=BitbucketProvider.js.map
@@ -0,0 +1,105 @@
1
+ /**
2
+ * Enhanced Code Reviewer - Optimized to work with Unified Context
3
+ * Preserves all original functionality from pr-police.js but optimized
4
+ */
5
+ import { ReviewResult, ReviewOptions, AIProviderConfig, CodeReviewConfig } from "../types";
6
+ import { UnifiedContext } from "../core/ContextGatherer";
7
+ import { BitbucketProvider } from "../core/providers/BitbucketProvider";
8
+ export declare class CodeReviewer {
9
+ private neurolink;
10
+ private bitbucketProvider;
11
+ private aiConfig;
12
+ private reviewConfig;
13
+ constructor(bitbucketProvider: BitbucketProvider, aiConfig: AIProviderConfig, reviewConfig: CodeReviewConfig);
14
+ /**
15
+ * Review code using pre-gathered unified context (OPTIMIZED)
16
+ */
17
+ reviewCodeWithContext(context: UnifiedContext, options: ReviewOptions): Promise<ReviewResult>;
18
+ /**
19
+ * Validate violations to ensure code snippets exist in diff
20
+ */
21
+ private validateViolations;
22
+ /**
23
+ * Try to fix code snippet by finding it in the actual diff
24
+ */
25
+ private tryFixCodeSnippet;
26
+ /**
27
+ * Get system prompt for security-focused code review
28
+ */
29
+ private getSecurityReviewSystemPrompt;
30
+ /**
31
+ * Get analysis requirements from config or defaults
32
+ */
33
+ private getAnalysisRequirements;
34
+ /**
35
+ * Build focused analysis prompt separated from context
36
+ */
37
+ private buildCoreAnalysisPrompt;
38
+ /**
39
+ * Extract diff content based on strategy
40
+ */
41
+ private extractDiffContent;
42
+ /**
43
+ * Detect project type for better context
44
+ */
45
+ private detectProjectType;
46
+ /**
47
+ * Assess complexity level for better AI context
48
+ */
49
+ private assessComplexity;
50
+ /**
51
+ * Legacy method - kept for compatibility but simplified
52
+ */
53
+ private buildAnalysisPrompt;
54
+ /**
55
+ * Analyze code with AI using the enhanced prompt
56
+ */
57
+ private analyzeWithAI;
58
+ /**
59
+ * Post comments to PR using unified context - matching pr-police.js exactly
60
+ */
61
+ private postComments;
62
+ /**
63
+ * Format inline comment for specific violation
64
+ */
65
+ private formatInlineComment;
66
+ /**
67
+ * Generate comprehensive summary comment with failed comments info
68
+ */
69
+ private generateSummaryComment;
70
+ /**
71
+ * Helper methods for processing violations
72
+ */
73
+ private cleanFilePath;
74
+ /**
75
+ * Extract exact file path from diff
76
+ */
77
+ private extractFilePathFromDiff;
78
+ /**
79
+ * Extract line number from diff for a specific code snippet
80
+ */
81
+ private extractLineNumberFromDiff;
82
+ /**
83
+ * Escape markdown code blocks properly
84
+ */
85
+ private escapeMarkdownCodeBlock;
86
+ private cleanCodeSnippet;
87
+ private splitArrayLines;
88
+ private groupViolationsByCategory;
89
+ private calculateStats;
90
+ private generateReviewResult;
91
+ /**
92
+ * Utility methods
93
+ */
94
+ private parseAIResponse;
95
+ /**
96
+ * Extract line information for comment from context
97
+ */
98
+ private extractLineInfoForComment;
99
+ /**
100
+ * Generate all possible path variations for a file
101
+ */
102
+ private generatePathVariations;
103
+ }
104
+ export declare function createCodeReviewer(bitbucketProvider: BitbucketProvider, aiConfig: AIProviderConfig, reviewConfig: CodeReviewConfig): CodeReviewer;
105
+ //# sourceMappingURL=CodeReviewer.d.ts.map