@juspay/yama 1.6.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.mcp-config.example.json +26 -0
- package/CHANGELOG.md +34 -0
- package/README.md +311 -685
- package/dist/cli/v2.cli.d.ts +13 -0
- package/dist/cli/v2.cli.js +290 -0
- package/dist/index.d.ts +12 -13
- package/dist/index.js +18 -19
- package/dist/v2/config/ConfigLoader.d.ts +50 -0
- package/dist/v2/config/ConfigLoader.js +205 -0
- package/dist/v2/config/DefaultConfig.d.ts +9 -0
- package/dist/v2/config/DefaultConfig.js +191 -0
- package/dist/v2/core/MCPServerManager.d.ts +22 -0
- package/dist/v2/core/MCPServerManager.js +92 -0
- package/dist/v2/core/SessionManager.d.ts +72 -0
- package/dist/v2/core/SessionManager.js +200 -0
- package/dist/v2/core/YamaV2Orchestrator.d.ts +112 -0
- package/dist/v2/core/YamaV2Orchestrator.js +549 -0
- package/dist/v2/prompts/EnhancementSystemPrompt.d.ts +8 -0
- package/dist/v2/prompts/EnhancementSystemPrompt.js +216 -0
- package/dist/v2/prompts/PromptBuilder.d.ts +38 -0
- package/dist/v2/prompts/PromptBuilder.js +228 -0
- package/dist/v2/prompts/ReviewSystemPrompt.d.ts +8 -0
- package/dist/v2/prompts/ReviewSystemPrompt.js +270 -0
- package/dist/v2/types/config.types.d.ts +120 -0
- package/dist/v2/types/config.types.js +5 -0
- package/dist/v2/types/mcp.types.d.ts +191 -0
- package/dist/v2/types/mcp.types.js +6 -0
- package/dist/v2/types/v2.types.d.ts +182 -0
- package/dist/v2/types/v2.types.js +42 -0
- package/dist/v2/utils/ObservabilityConfig.d.ts +22 -0
- package/dist/v2/utils/ObservabilityConfig.js +48 -0
- package/package.json +11 -9
- package/yama.config.example.yaml +214 -204
- package/dist/cli/index.d.ts +0 -12
- package/dist/cli/index.js +0 -538
- package/dist/core/ContextGatherer.d.ts +0 -110
- package/dist/core/ContextGatherer.js +0 -470
- package/dist/core/Guardian.d.ts +0 -81
- package/dist/core/Guardian.js +0 -480
- package/dist/core/providers/BitbucketProvider.d.ts +0 -105
- package/dist/core/providers/BitbucketProvider.js +0 -489
- package/dist/features/CodeReviewer.d.ts +0 -173
- package/dist/features/CodeReviewer.js +0 -1707
- package/dist/features/DescriptionEnhancer.d.ts +0 -70
- package/dist/features/DescriptionEnhancer.js +0 -511
- package/dist/features/MultiInstanceProcessor.d.ts +0 -74
- package/dist/features/MultiInstanceProcessor.js +0 -360
- package/dist/types/index.d.ts +0 -624
- package/dist/types/index.js +0 -104
- package/dist/utils/Cache.d.ts +0 -103
- package/dist/utils/Cache.js +0 -444
- package/dist/utils/ConfigManager.d.ts +0 -88
- package/dist/utils/ConfigManager.js +0 -602
- package/dist/utils/ContentSimilarityService.d.ts +0 -74
- package/dist/utils/ContentSimilarityService.js +0 -215
- package/dist/utils/ExactDuplicateRemover.d.ts +0 -77
- package/dist/utils/ExactDuplicateRemover.js +0 -361
- package/dist/utils/Logger.d.ts +0 -31
- package/dist/utils/Logger.js +0 -214
- package/dist/utils/MemoryBankManager.d.ts +0 -73
- package/dist/utils/MemoryBankManager.js +0 -310
- package/dist/utils/ParallelProcessing.d.ts +0 -140
- package/dist/utils/ParallelProcessing.js +0 -333
- package/dist/utils/ProviderLimits.d.ts +0 -58
- package/dist/utils/ProviderLimits.js +0 -143
- package/dist/utils/RetryManager.d.ts +0 -78
- package/dist/utils/RetryManager.js +0 -205
|
@@ -1,489 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Enhanced Bitbucket Provider - Optimized from both pr-police.js and pr-describe.js
|
|
3
|
-
* Provides unified, cached, and optimized Bitbucket operations
|
|
4
|
-
*/
|
|
5
|
-
import { ProviderError, } from "../../types/index.js";
|
|
6
|
-
import { logger } from "../../utils/Logger.js";
|
|
7
|
-
import { cache, Cache } from "../../utils/Cache.js";
|
|
8
|
-
import { RetryManager } from "../../utils/RetryManager.js";
|
|
9
|
-
export class BitbucketProvider {
|
|
10
|
-
apiClient;
|
|
11
|
-
branchHandlers;
|
|
12
|
-
pullRequestHandlers;
|
|
13
|
-
reviewHandlers;
|
|
14
|
-
fileHandlers;
|
|
15
|
-
initialized = false;
|
|
16
|
-
baseUrl;
|
|
17
|
-
credentials;
|
|
18
|
-
constructor(credentials) {
|
|
19
|
-
this.credentials = credentials;
|
|
20
|
-
this.baseUrl = credentials.baseUrl || "https://your-bitbucket-server.com";
|
|
21
|
-
}
|
|
22
|
-
/**
|
|
23
|
-
* Initialize MCP handlers with lazy loading and connection reuse
|
|
24
|
-
*/
|
|
25
|
-
async initialize() {
|
|
26
|
-
if (this.initialized) {
|
|
27
|
-
return;
|
|
28
|
-
}
|
|
29
|
-
try {
|
|
30
|
-
logger.debug("Initializing Bitbucket MCP handlers...");
|
|
31
|
-
const dynamicImport = new Function("specifier", "return import(specifier)");
|
|
32
|
-
const [{ BitbucketApiClient }, { BranchHandlers }, { PullRequestHandlers }, { ReviewHandlers }, { FileHandlers },] = await Promise.all([
|
|
33
|
-
dynamicImport("@nexus2520/bitbucket-mcp-server/build/utils/api-client.js"),
|
|
34
|
-
dynamicImport("@nexus2520/bitbucket-mcp-server/build/handlers/branch-handlers.js"),
|
|
35
|
-
dynamicImport("@nexus2520/bitbucket-mcp-server/build/handlers/pull-request-handlers.js"),
|
|
36
|
-
dynamicImport("@nexus2520/bitbucket-mcp-server/build/handlers/review-handlers.js"),
|
|
37
|
-
dynamicImport("@nexus2520/bitbucket-mcp-server/build/handlers/file-handlers.js"),
|
|
38
|
-
]);
|
|
39
|
-
this.apiClient = new BitbucketApiClient(this.baseUrl, this.credentials.username, undefined, this.credentials.token);
|
|
40
|
-
this.branchHandlers = new BranchHandlers(this.apiClient, this.baseUrl);
|
|
41
|
-
this.pullRequestHandlers = new PullRequestHandlers(this.apiClient, this.baseUrl, this.credentials.username);
|
|
42
|
-
this.reviewHandlers = new ReviewHandlers(this.apiClient, this.credentials.username);
|
|
43
|
-
this.fileHandlers = new FileHandlers(this.apiClient, this.baseUrl);
|
|
44
|
-
this.initialized = true;
|
|
45
|
-
logger.debug("Bitbucket MCP handlers initialized successfully");
|
|
46
|
-
}
|
|
47
|
-
catch (error) {
|
|
48
|
-
throw new ProviderError(`Failed to initialize Bitbucket provider: ${error.message}`);
|
|
49
|
-
}
|
|
50
|
-
}
|
|
51
|
-
/**
|
|
52
|
-
* Parse MCP response - exactly matching the working pr-police.js implementation
|
|
53
|
-
*/
|
|
54
|
-
parseMCPResponse(result) {
|
|
55
|
-
// Handle error responses
|
|
56
|
-
if (result.error) {
|
|
57
|
-
throw new Error(result.error);
|
|
58
|
-
}
|
|
59
|
-
// Check if result has MCP format (content array) or direct data - EXACTLY like pr-police.js
|
|
60
|
-
if (result.content && result.content[0] && result.content[0].text) {
|
|
61
|
-
const text = result.content[0].text;
|
|
62
|
-
// Check if it's an error message
|
|
63
|
-
if (typeof text === "string" && text.startsWith("Error:")) {
|
|
64
|
-
throw new Error(text);
|
|
65
|
-
}
|
|
66
|
-
try {
|
|
67
|
-
return JSON.parse(text);
|
|
68
|
-
}
|
|
69
|
-
catch (error) {
|
|
70
|
-
// If parsing fails, return the text as-is for simple responses
|
|
71
|
-
return text;
|
|
72
|
-
}
|
|
73
|
-
}
|
|
74
|
-
else {
|
|
75
|
-
// Direct data format - return as-is
|
|
76
|
-
return result;
|
|
77
|
-
}
|
|
78
|
-
}
|
|
79
|
-
/**
|
|
80
|
-
* Find PR for branch with intelligent caching
|
|
81
|
-
*/
|
|
82
|
-
async findPRForBranch(identifier) {
|
|
83
|
-
await this.initialize();
|
|
84
|
-
const { workspace, repository, branch } = identifier;
|
|
85
|
-
if (!branch) {
|
|
86
|
-
throw new ProviderError("Branch name is required");
|
|
87
|
-
}
|
|
88
|
-
const cacheKey = Cache.keys.branchInfo(workspace, repository, branch);
|
|
89
|
-
return cache.getOrSetResilient(cacheKey, async () => {
|
|
90
|
-
logger.debug(`Finding PR for branch: ${workspace}/${repository}@${branch}`);
|
|
91
|
-
const rawBranchData = await RetryManager.withRetry(() => this.branchHandlers.handleGetBranch({
|
|
92
|
-
workspace,
|
|
93
|
-
repository,
|
|
94
|
-
branch_name: branch,
|
|
95
|
-
include_merged_prs: false,
|
|
96
|
-
}), `Find PR for branch ${workspace}/${repository}@${branch}`);
|
|
97
|
-
const branchData = this.parseMCPResponse(rawBranchData);
|
|
98
|
-
// Direct data extraction
|
|
99
|
-
if (branchData.open_pull_requests &&
|
|
100
|
-
branchData.open_pull_requests.length > 0) {
|
|
101
|
-
const firstPR = branchData.open_pull_requests[0];
|
|
102
|
-
// Debug author data structure
|
|
103
|
-
logger.debug(`Author data structure: ${JSON.stringify(firstPR.author, null, 2)}`);
|
|
104
|
-
logger.debug(`Raw firstPR keys: ${Object.keys(firstPR).join(", ")}`);
|
|
105
|
-
return {
|
|
106
|
-
id: firstPR.id,
|
|
107
|
-
title: firstPR.title,
|
|
108
|
-
description: firstPR.description || "",
|
|
109
|
-
author: firstPR.author?.displayName ||
|
|
110
|
-
firstPR.author?.name ||
|
|
111
|
-
firstPR.author ||
|
|
112
|
-
"Unknown",
|
|
113
|
-
state: "OPEN",
|
|
114
|
-
sourceRef: branch,
|
|
115
|
-
targetRef: firstPR.destination?.branch?.name || "main",
|
|
116
|
-
createdDate: firstPR.createdDate || new Date().toISOString(),
|
|
117
|
-
updatedDate: firstPR.updatedDate || new Date().toISOString(),
|
|
118
|
-
reviewers: firstPR.reviewers || [],
|
|
119
|
-
fileChanges: firstPR.file_changes || [],
|
|
120
|
-
};
|
|
121
|
-
}
|
|
122
|
-
throw new ProviderError(`No open PR found for branch: ${branch}`);
|
|
123
|
-
}, 3600);
|
|
124
|
-
}
|
|
125
|
-
/**
|
|
126
|
-
* Get PR details with enhanced caching
|
|
127
|
-
*/
|
|
128
|
-
async getPRDetails(identifier) {
|
|
129
|
-
await this.initialize();
|
|
130
|
-
const { workspace, repository, pullRequestId } = identifier;
|
|
131
|
-
if (!pullRequestId) {
|
|
132
|
-
throw new ProviderError("Pull request ID is required");
|
|
133
|
-
}
|
|
134
|
-
const cacheKey = Cache.keys.prInfo(workspace, repository, pullRequestId);
|
|
135
|
-
return cache.getOrSetResilient(cacheKey, async () => {
|
|
136
|
-
logger.debug(`Getting PR details: ${workspace}/${repository}#${pullRequestId}`);
|
|
137
|
-
const rawPRDetails = await RetryManager.withRetry(() => this.pullRequestHandlers.handleGetPullRequest({
|
|
138
|
-
workspace,
|
|
139
|
-
repository,
|
|
140
|
-
pull_request_id: pullRequestId,
|
|
141
|
-
}), `Get PR details ${workspace}/${repository}#${pullRequestId}`);
|
|
142
|
-
const prData = this.parseMCPResponse(rawPRDetails);
|
|
143
|
-
// Debug author data structure
|
|
144
|
-
logger.debug(`PR Details author data structure: ${JSON.stringify(prData.author, null, 2)}`);
|
|
145
|
-
logger.debug(`PR Details raw keys: ${Object.keys(prData).join(", ")}`);
|
|
146
|
-
return {
|
|
147
|
-
id: prData.id,
|
|
148
|
-
title: prData.title,
|
|
149
|
-
description: prData.description || "",
|
|
150
|
-
author: prData.author?.displayName ||
|
|
151
|
-
prData.author?.name ||
|
|
152
|
-
prData.author ||
|
|
153
|
-
"Unknown",
|
|
154
|
-
state: prData.state || "OPEN",
|
|
155
|
-
sourceRef: prData.source?.branch?.name || "",
|
|
156
|
-
targetRef: prData.destination?.branch?.name || "",
|
|
157
|
-
createdDate: prData.createdDate || new Date().toISOString(),
|
|
158
|
-
updatedDate: prData.updatedDate || new Date().toISOString(),
|
|
159
|
-
reviewers: prData.reviewers || [],
|
|
160
|
-
comments: prData.active_comments || [],
|
|
161
|
-
fileChanges: prData.file_changes?.map((f) => f.path || f.file) ||
|
|
162
|
-
[],
|
|
163
|
-
};
|
|
164
|
-
}, 1800);
|
|
165
|
-
}
|
|
166
|
-
/**
|
|
167
|
-
* Get PR diff with smart caching and filtering
|
|
168
|
-
*/
|
|
169
|
-
async getPRDiff(identifier, contextLines = 3, excludePatterns = ["*.lock", "*.svg"], includePatterns) {
|
|
170
|
-
await this.initialize();
|
|
171
|
-
const { workspace, repository, pullRequestId } = identifier;
|
|
172
|
-
if (!pullRequestId) {
|
|
173
|
-
throw new ProviderError("Pull request ID is required");
|
|
174
|
-
}
|
|
175
|
-
// Create a cache key that includes include patterns if specified
|
|
176
|
-
const cacheKey = includePatterns && includePatterns.length === 1
|
|
177
|
-
? `file-diff:${workspace}:${repository}:${pullRequestId}:${includePatterns[0]}`
|
|
178
|
-
: Cache.keys.prDiff(workspace, repository, pullRequestId);
|
|
179
|
-
return cache.getOrSetResilient(cacheKey, async () => {
|
|
180
|
-
logger.debug(`Getting PR diff: ${workspace}/${repository}#${pullRequestId}`);
|
|
181
|
-
if (includePatterns) {
|
|
182
|
-
logger.debug(`Include patterns: ${includePatterns.join(", ")}`);
|
|
183
|
-
}
|
|
184
|
-
const args = {
|
|
185
|
-
workspace,
|
|
186
|
-
repository,
|
|
187
|
-
pull_request_id: pullRequestId,
|
|
188
|
-
context_lines: contextLines,
|
|
189
|
-
exclude_patterns: excludePatterns,
|
|
190
|
-
};
|
|
191
|
-
// Add include_patterns if specified
|
|
192
|
-
if (includePatterns) {
|
|
193
|
-
args.include_patterns = includePatterns;
|
|
194
|
-
}
|
|
195
|
-
const rawDiff = await RetryManager.withRetry(() => this.reviewHandlers.handleGetPullRequestDiff(args), `Get PR diff ${workspace}/${repository}#${pullRequestId}`);
|
|
196
|
-
const diffData = this.parseMCPResponse(rawDiff);
|
|
197
|
-
return {
|
|
198
|
-
diff: diffData.diff || "",
|
|
199
|
-
fileChanges: diffData.file_changes || [],
|
|
200
|
-
totalAdditions: diffData.total_additions || 0,
|
|
201
|
-
totalDeletions: diffData.total_deletions || 0,
|
|
202
|
-
};
|
|
203
|
-
}, 1800);
|
|
204
|
-
}
|
|
205
|
-
/**
|
|
206
|
-
* Get file content with caching
|
|
207
|
-
*/
|
|
208
|
-
async getFileContent(workspace, repository, filePath, branch) {
|
|
209
|
-
await this.initialize();
|
|
210
|
-
const cacheKey = Cache.keys.fileContent(workspace, repository, filePath, branch);
|
|
211
|
-
return cache.getOrSetResilient(cacheKey, async () => {
|
|
212
|
-
logger.debug(`Getting file content: ${workspace}/${repository}/${filePath}@${branch}`);
|
|
213
|
-
const result = await RetryManager.withRetry(() => this.fileHandlers.handleGetFileContent({
|
|
214
|
-
workspace,
|
|
215
|
-
repository,
|
|
216
|
-
file_path: filePath,
|
|
217
|
-
branch,
|
|
218
|
-
}), `Get file content ${workspace}/${repository}/${filePath}@${branch}`);
|
|
219
|
-
// Handle file content response with proper error handling for plain text files
|
|
220
|
-
if (result.content &&
|
|
221
|
-
result.content[0] &&
|
|
222
|
-
result.content[0].text) {
|
|
223
|
-
try {
|
|
224
|
-
const fileResponse = JSON.parse(result.content[0].text);
|
|
225
|
-
return fileResponse.content || "";
|
|
226
|
-
}
|
|
227
|
-
catch (parseError) {
|
|
228
|
-
// If JSON parsing fails, the content might be plain text (like .clinerules)
|
|
229
|
-
// Return the text content directly
|
|
230
|
-
logger.debug(`JSON parsing failed for ${filePath}, treating as plain text: ${parseError.message}`);
|
|
231
|
-
return result.content[0].text || "";
|
|
232
|
-
}
|
|
233
|
-
}
|
|
234
|
-
// Handle direct response format
|
|
235
|
-
return result.content || "";
|
|
236
|
-
}, 7200);
|
|
237
|
-
}
|
|
238
|
-
/**
|
|
239
|
-
* List directory content with caching
|
|
240
|
-
*/
|
|
241
|
-
async listDirectoryContent(workspace, repository, path, branch) {
|
|
242
|
-
await this.initialize();
|
|
243
|
-
const cacheKey = Cache.keys.directoryContent(workspace, repository, path, branch);
|
|
244
|
-
return cache.getOrSetResilient(cacheKey, async () => {
|
|
245
|
-
logger.debug(`Listing directory: ${workspace}/${repository}/${path}@${branch}`);
|
|
246
|
-
const result = await RetryManager.withRetry(() => this.fileHandlers.handleListDirectoryContent({
|
|
247
|
-
workspace,
|
|
248
|
-
repository,
|
|
249
|
-
path,
|
|
250
|
-
branch,
|
|
251
|
-
}), `List directory ${workspace}/${repository}/${path}@${branch}`);
|
|
252
|
-
const dirData = this.parseMCPResponse(result);
|
|
253
|
-
return dirData.contents || [];
|
|
254
|
-
}, 3600);
|
|
255
|
-
}
|
|
256
|
-
/**
|
|
257
|
-
* Update PR description with reviewer preservation
|
|
258
|
-
*/
|
|
259
|
-
async updatePRDescription(identifier, description) {
|
|
260
|
-
await this.initialize();
|
|
261
|
-
const { workspace, repository, pullRequestId } = identifier;
|
|
262
|
-
if (!pullRequestId) {
|
|
263
|
-
throw new ProviderError("Pull request ID is required");
|
|
264
|
-
}
|
|
265
|
-
try {
|
|
266
|
-
logger.debug(`Updating PR description: ${workspace}/${repository}#${pullRequestId}`);
|
|
267
|
-
logger.debug(`Description length: ${description.length} characters`);
|
|
268
|
-
const result = await RetryManager.withRetry(() => this.pullRequestHandlers.handleUpdatePullRequest({
|
|
269
|
-
workspace,
|
|
270
|
-
repository,
|
|
271
|
-
pull_request_id: pullRequestId,
|
|
272
|
-
description: description,
|
|
273
|
-
}), `Update PR description ${workspace}/${repository}#${pullRequestId}`);
|
|
274
|
-
// Log the raw MCP response
|
|
275
|
-
logger.debug(`Raw MCP update response: ${JSON.stringify(result, null, 2)}`);
|
|
276
|
-
const updateData = this.parseMCPResponse(result);
|
|
277
|
-
// Log the parsed response
|
|
278
|
-
logger.debug(`Parsed update response: ${JSON.stringify(updateData, null, 2)}`);
|
|
279
|
-
// Invalidate related cache entries
|
|
280
|
-
cache.del(Cache.keys.prInfo(workspace, repository, pullRequestId));
|
|
281
|
-
// Check if the response indicates actual success
|
|
282
|
-
if (typeof updateData === "string" && updateData.includes("Error")) {
|
|
283
|
-
logger.error(`Update response contains error: ${updateData}`);
|
|
284
|
-
return {
|
|
285
|
-
success: false,
|
|
286
|
-
message: updateData,
|
|
287
|
-
};
|
|
288
|
-
}
|
|
289
|
-
return {
|
|
290
|
-
success: true,
|
|
291
|
-
message: updateData.message || "PR description updated successfully",
|
|
292
|
-
};
|
|
293
|
-
}
|
|
294
|
-
catch (error) {
|
|
295
|
-
logger.error(`Failed to update PR description: ${error.message}`);
|
|
296
|
-
throw new ProviderError(`Update failed: ${error.message}`);
|
|
297
|
-
}
|
|
298
|
-
}
|
|
299
|
-
/**
|
|
300
|
-
* Add comment to PR with smart positioning and validation
|
|
301
|
-
*/
|
|
302
|
-
async addComment(identifier, commentText, options = {}) {
|
|
303
|
-
await this.initialize();
|
|
304
|
-
const { workspace, repository, pullRequestId } = identifier;
|
|
305
|
-
if (!pullRequestId) {
|
|
306
|
-
throw new ProviderError("Pull request ID is required");
|
|
307
|
-
}
|
|
308
|
-
try {
|
|
309
|
-
logger.debug(`Adding comment to PR: ${workspace}/${repository}#${pullRequestId}`);
|
|
310
|
-
const args = {
|
|
311
|
-
workspace,
|
|
312
|
-
repository,
|
|
313
|
-
pull_request_id: pullRequestId,
|
|
314
|
-
comment_text: commentText,
|
|
315
|
-
};
|
|
316
|
-
// Add inline comment parameters if provided
|
|
317
|
-
if (options.filePath && options.codeSnippet) {
|
|
318
|
-
args.file_path = options.filePath;
|
|
319
|
-
args.code_snippet = options.codeSnippet;
|
|
320
|
-
if (options.searchContext) {
|
|
321
|
-
args.search_context = options.searchContext;
|
|
322
|
-
}
|
|
323
|
-
if (options.matchStrategy) {
|
|
324
|
-
args.match_strategy = options.matchStrategy;
|
|
325
|
-
}
|
|
326
|
-
if (options.suggestion) {
|
|
327
|
-
args.suggestion = options.suggestion;
|
|
328
|
-
}
|
|
329
|
-
logger.debug(`🔍 Inline comment details:`);
|
|
330
|
-
logger.debug(` File: ${options.filePath}`);
|
|
331
|
-
logger.debug(` Code snippet: "${options.codeSnippet}"`);
|
|
332
|
-
logger.debug(` Match strategy: ${options.matchStrategy}`);
|
|
333
|
-
if (options.searchContext) {
|
|
334
|
-
logger.debug(` Search context before: ${JSON.stringify(options.searchContext.before)}`);
|
|
335
|
-
logger.debug(` Search context after: ${JSON.stringify(options.searchContext.after)}`);
|
|
336
|
-
}
|
|
337
|
-
}
|
|
338
|
-
else if (options.filePath && options.lineNumber) {
|
|
339
|
-
// Fallback to line number if no code snippet
|
|
340
|
-
args.file_path = options.filePath;
|
|
341
|
-
args.line_number = options.lineNumber;
|
|
342
|
-
args.line_type = options.lineType || "CONTEXT";
|
|
343
|
-
logger.debug(`🔍 Line-based comment details:`);
|
|
344
|
-
logger.debug(` File: ${options.filePath}`);
|
|
345
|
-
logger.debug(` Line: ${options.lineNumber}`);
|
|
346
|
-
logger.debug(` Type: ${options.lineType || "CONTEXT"}`);
|
|
347
|
-
}
|
|
348
|
-
logger.debug(`🔍 MCP addComment args: ${JSON.stringify(args, null, 2)}`);
|
|
349
|
-
const result = await RetryManager.withRetry(() => this.pullRequestHandlers.handleAddComment(args), `Add comment to PR ${workspace}/${repository}#${pullRequestId}`);
|
|
350
|
-
// Parse response exactly like pr-police.js
|
|
351
|
-
let commentData;
|
|
352
|
-
if (result.content &&
|
|
353
|
-
result.content[0] &&
|
|
354
|
-
result.content[0].text) {
|
|
355
|
-
commentData = JSON.parse(result.content[0].text);
|
|
356
|
-
}
|
|
357
|
-
else {
|
|
358
|
-
commentData = result;
|
|
359
|
-
}
|
|
360
|
-
return {
|
|
361
|
-
success: true,
|
|
362
|
-
commentId: commentData.id || commentData.comment_id,
|
|
363
|
-
};
|
|
364
|
-
}
|
|
365
|
-
catch (error) {
|
|
366
|
-
logger.error(`Failed to add comment: ${error.message}`);
|
|
367
|
-
// If inline comment fails, try posting as general comment
|
|
368
|
-
if (options.filePath && options.codeSnippet) {
|
|
369
|
-
logger.debug(`Attempting fallback to general comment...`);
|
|
370
|
-
try {
|
|
371
|
-
const fallbackArgs = {
|
|
372
|
-
workspace,
|
|
373
|
-
repository,
|
|
374
|
-
pull_request_id: pullRequestId,
|
|
375
|
-
comment_text: `**File: ${options.filePath}**\n\n${commentText}`,
|
|
376
|
-
};
|
|
377
|
-
const fallbackResult = await RetryManager.withRetry(() => this.pullRequestHandlers.handleAddComment(fallbackArgs), `Add fallback comment to PR ${workspace}/${repository}#${pullRequestId}`);
|
|
378
|
-
let fallbackData;
|
|
379
|
-
if (fallbackResult.content &&
|
|
380
|
-
fallbackResult.content[0] &&
|
|
381
|
-
fallbackResult.content[0].text) {
|
|
382
|
-
fallbackData = JSON.parse(fallbackResult.content[0].text);
|
|
383
|
-
}
|
|
384
|
-
else {
|
|
385
|
-
fallbackData = fallbackResult;
|
|
386
|
-
}
|
|
387
|
-
logger.debug(`Fallback comment posted successfully`);
|
|
388
|
-
return {
|
|
389
|
-
success: true,
|
|
390
|
-
commentId: fallbackData.id || fallbackData.comment_id,
|
|
391
|
-
};
|
|
392
|
-
}
|
|
393
|
-
catch (fallbackError) {
|
|
394
|
-
logger.error(`Fallback comment also failed: ${fallbackError.message}`);
|
|
395
|
-
}
|
|
396
|
-
}
|
|
397
|
-
throw new ProviderError(`Comment failed: ${error.message}`);
|
|
398
|
-
}
|
|
399
|
-
}
|
|
400
|
-
/**
|
|
401
|
-
* Batch operation support for multiple API calls
|
|
402
|
-
*/
|
|
403
|
-
async batchOperations(operations, options = {}) {
|
|
404
|
-
const { maxConcurrent = 5, delayBetween = 1000, continueOnError = true, } = options;
|
|
405
|
-
const results = [];
|
|
406
|
-
// Process operations in batches
|
|
407
|
-
for (let i = 0; i < operations.length; i += maxConcurrent) {
|
|
408
|
-
const batch = operations.slice(i, i + maxConcurrent);
|
|
409
|
-
const batchPromises = batch.map(async (operation) => {
|
|
410
|
-
try {
|
|
411
|
-
const data = await operation();
|
|
412
|
-
return { success: true, data };
|
|
413
|
-
}
|
|
414
|
-
catch (error) {
|
|
415
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
416
|
-
if (!continueOnError) {
|
|
417
|
-
throw error;
|
|
418
|
-
}
|
|
419
|
-
return { success: false, error: errorMessage };
|
|
420
|
-
}
|
|
421
|
-
});
|
|
422
|
-
const batchResults = await Promise.all(batchPromises);
|
|
423
|
-
results.push(...batchResults);
|
|
424
|
-
// Add delay between batches (except for the last batch)
|
|
425
|
-
if (i + maxConcurrent < operations.length && delayBetween > 0) {
|
|
426
|
-
await new Promise((resolve) => setTimeout(resolve, delayBetween));
|
|
427
|
-
}
|
|
428
|
-
}
|
|
429
|
-
return results;
|
|
430
|
-
}
|
|
431
|
-
/**
|
|
432
|
-
* Health check for the provider
|
|
433
|
-
*/
|
|
434
|
-
async healthCheck() {
|
|
435
|
-
try {
|
|
436
|
-
await this.initialize();
|
|
437
|
-
// Try a simple API call to verify connectivity
|
|
438
|
-
const testResult = await this.branchHandlers.handleGetBranch({
|
|
439
|
-
workspace: "test",
|
|
440
|
-
repository: "test",
|
|
441
|
-
branch_name: "test",
|
|
442
|
-
include_merged_prs: false,
|
|
443
|
-
});
|
|
444
|
-
return {
|
|
445
|
-
healthy: true,
|
|
446
|
-
details: {
|
|
447
|
-
initialized: this.initialized,
|
|
448
|
-
baseUrl: this.baseUrl,
|
|
449
|
-
username: this.credentials.username,
|
|
450
|
-
apiConnected: !!testResult,
|
|
451
|
-
},
|
|
452
|
-
};
|
|
453
|
-
}
|
|
454
|
-
catch (error) {
|
|
455
|
-
return {
|
|
456
|
-
healthy: false,
|
|
457
|
-
details: {
|
|
458
|
-
initialized: this.initialized,
|
|
459
|
-
error: error.message,
|
|
460
|
-
},
|
|
461
|
-
};
|
|
462
|
-
}
|
|
463
|
-
}
|
|
464
|
-
/**
|
|
465
|
-
* Get provider statistics and cache metrics
|
|
466
|
-
*/
|
|
467
|
-
getStats() {
|
|
468
|
-
return {
|
|
469
|
-
provider: "bitbucket",
|
|
470
|
-
initialized: this.initialized,
|
|
471
|
-
baseUrl: this.baseUrl,
|
|
472
|
-
cacheStats: cache.stats(),
|
|
473
|
-
cacheHitRatio: cache.getHitRatio(),
|
|
474
|
-
};
|
|
475
|
-
}
|
|
476
|
-
/**
|
|
477
|
-
* Clear provider-related cache entries
|
|
478
|
-
*/
|
|
479
|
-
clearCache() {
|
|
480
|
-
// Clear all cache entries (could be made more specific)
|
|
481
|
-
cache.clear();
|
|
482
|
-
logger.debug("BitbucketProvider cache cleared");
|
|
483
|
-
}
|
|
484
|
-
}
|
|
485
|
-
// Export factory function
|
|
486
|
-
export function createBitbucketProvider(credentials) {
|
|
487
|
-
return new BitbucketProvider(credentials);
|
|
488
|
-
}
|
|
489
|
-
//# sourceMappingURL=BitbucketProvider.js.map
|
|
@@ -1,173 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Enhanced Code Reviewer - Optimized to work with Unified Context
|
|
3
|
-
* Preserves all original functionality from pr-police.js but optimized
|
|
4
|
-
*/
|
|
5
|
-
import { ReviewResult, ReviewOptions, AIProviderConfig, CodeReviewConfig } from "../types/index.js";
|
|
6
|
-
import { UnifiedContext } from "../core/ContextGatherer.js";
|
|
7
|
-
import { BitbucketProvider } from "../core/providers/BitbucketProvider.js";
|
|
8
|
-
export declare class CodeReviewer {
|
|
9
|
-
private neurolink;
|
|
10
|
-
private bitbucketProvider;
|
|
11
|
-
private aiConfig;
|
|
12
|
-
private reviewConfig;
|
|
13
|
-
constructor(bitbucketProvider: BitbucketProvider, aiConfig: AIProviderConfig, reviewConfig: CodeReviewConfig);
|
|
14
|
-
/**
|
|
15
|
-
* Review code using pre-gathered unified context (OPTIMIZED with Multi-Instance and Batch Processing)
|
|
16
|
-
*/
|
|
17
|
-
reviewCodeWithContext(context: UnifiedContext, options: ReviewOptions, multiInstanceConfig?: any): Promise<ReviewResult>;
|
|
18
|
-
/**
|
|
19
|
-
* Review code using multiple instances for enhanced analysis
|
|
20
|
-
*/
|
|
21
|
-
reviewWithMultipleInstances(context: UnifiedContext, options: ReviewOptions, multiInstanceConfig: any): Promise<any>;
|
|
22
|
-
/**
|
|
23
|
-
* Get system prompt for security-focused code review
|
|
24
|
-
*/
|
|
25
|
-
private getSecurityReviewSystemPrompt;
|
|
26
|
-
/**
|
|
27
|
-
* Get analysis requirements from config or defaults
|
|
28
|
-
*/
|
|
29
|
-
private getAnalysisRequirements;
|
|
30
|
-
/**
|
|
31
|
-
* Build focused analysis prompt separated from context
|
|
32
|
-
*/
|
|
33
|
-
private buildCoreAnalysisPrompt;
|
|
34
|
-
/**
|
|
35
|
-
* Extract diff content based on strategy
|
|
36
|
-
*/
|
|
37
|
-
private extractDiffContent;
|
|
38
|
-
/**
|
|
39
|
-
* Detect project type for better context
|
|
40
|
-
*/
|
|
41
|
-
private detectProjectType;
|
|
42
|
-
/**
|
|
43
|
-
* Assess complexity level for better AI context
|
|
44
|
-
*/
|
|
45
|
-
private assessComplexity;
|
|
46
|
-
/**
|
|
47
|
-
* Legacy method - kept for compatibility but simplified
|
|
48
|
-
*/
|
|
49
|
-
private buildAnalysisPrompt;
|
|
50
|
-
/**
|
|
51
|
-
* Get safe token limit based on AI provider using shared utility
|
|
52
|
-
*/
|
|
53
|
-
private getSafeTokenLimit;
|
|
54
|
-
/**
|
|
55
|
-
* Analyze code with AI using the enhanced prompt
|
|
56
|
-
*/
|
|
57
|
-
private analyzeWithAI;
|
|
58
|
-
/**
|
|
59
|
-
* Post comments to PR using unified context - matching pr-police.js exactly
|
|
60
|
-
*/
|
|
61
|
-
private postComments;
|
|
62
|
-
/**
|
|
63
|
-
* Format inline comment for specific violation
|
|
64
|
-
*/
|
|
65
|
-
private formatInlineComment;
|
|
66
|
-
/**
|
|
67
|
-
* Generate comprehensive summary comment with failed comments info
|
|
68
|
-
*/
|
|
69
|
-
private generateSummaryComment;
|
|
70
|
-
/**
|
|
71
|
-
* Helper methods for processing violations
|
|
72
|
-
*/
|
|
73
|
-
private cleanFilePath;
|
|
74
|
-
/**
|
|
75
|
-
* Extract exact file path from diff
|
|
76
|
-
*/
|
|
77
|
-
private extractFilePathFromDiff;
|
|
78
|
-
/**
|
|
79
|
-
* Extract line number from diff for a specific code snippet
|
|
80
|
-
*/
|
|
81
|
-
private extractLineNumberFromDiff;
|
|
82
|
-
/**
|
|
83
|
-
* Escape markdown code blocks properly
|
|
84
|
-
*/
|
|
85
|
-
private escapeMarkdownCodeBlock;
|
|
86
|
-
private cleanCodeSnippet;
|
|
87
|
-
private splitArrayLines;
|
|
88
|
-
private groupViolationsByCategory;
|
|
89
|
-
private calculateStats;
|
|
90
|
-
private generateReviewResult;
|
|
91
|
-
/**
|
|
92
|
-
* Get batch processing configuration with defaults
|
|
93
|
-
*/
|
|
94
|
-
private getBatchProcessingConfig;
|
|
95
|
-
/**
|
|
96
|
-
* Determine if batch processing should be used
|
|
97
|
-
*/
|
|
98
|
-
private shouldUseBatchProcessing;
|
|
99
|
-
/**
|
|
100
|
-
* Main batch processing method with parallel processing support
|
|
101
|
-
*/
|
|
102
|
-
private reviewWithBatchProcessing;
|
|
103
|
-
/**
|
|
104
|
-
* Process batches in parallel with concurrency control
|
|
105
|
-
*/
|
|
106
|
-
private processInParallel;
|
|
107
|
-
/**
|
|
108
|
-
* Process batches serially (original implementation)
|
|
109
|
-
*/
|
|
110
|
-
private processSerially;
|
|
111
|
-
/**
|
|
112
|
-
* Pre-allocate tokens based on distribution strategy with proper integer arithmetic
|
|
113
|
-
*/
|
|
114
|
-
private preAllocateTokens;
|
|
115
|
-
/**
|
|
116
|
-
* Try weighted allocation for batches
|
|
117
|
-
*/
|
|
118
|
-
private tryWeightedAllocation;
|
|
119
|
-
/**
|
|
120
|
-
* Try equal allocation for batches
|
|
121
|
-
*/
|
|
122
|
-
private tryEqualAllocation;
|
|
123
|
-
/**
|
|
124
|
-
* Process a single batch with concurrency control
|
|
125
|
-
*/
|
|
126
|
-
private processBatchWithConcurrency;
|
|
127
|
-
/**
|
|
128
|
-
* Prioritize files based on security importance and file type
|
|
129
|
-
*/
|
|
130
|
-
private prioritizeFiles;
|
|
131
|
-
/**
|
|
132
|
-
* Calculate file priority based on path and content
|
|
133
|
-
*/
|
|
134
|
-
private calculateFilePriority;
|
|
135
|
-
/**
|
|
136
|
-
* Estimate token count for a file
|
|
137
|
-
*/
|
|
138
|
-
private estimateFileTokens;
|
|
139
|
-
/**
|
|
140
|
-
* Create batches from prioritized files
|
|
141
|
-
*/
|
|
142
|
-
private createBatches;
|
|
143
|
-
/**
|
|
144
|
-
* Process a single batch of files
|
|
145
|
-
*/
|
|
146
|
-
private processBatch;
|
|
147
|
-
/**
|
|
148
|
-
* Create context for a specific batch
|
|
149
|
-
*/
|
|
150
|
-
private createBatchContext;
|
|
151
|
-
/**
|
|
152
|
-
* Build analysis prompt for a specific batch
|
|
153
|
-
*/
|
|
154
|
-
private buildBatchAnalysisPrompt;
|
|
155
|
-
/**
|
|
156
|
-
* Utility methods
|
|
157
|
-
*/
|
|
158
|
-
private parseAIResponse;
|
|
159
|
-
/**
|
|
160
|
-
* Extract line information for comment from context
|
|
161
|
-
*/
|
|
162
|
-
private extractLineInfoForComment;
|
|
163
|
-
/**
|
|
164
|
-
* Detect programming language from file extension
|
|
165
|
-
*/
|
|
166
|
-
private detectLanguageFromFile;
|
|
167
|
-
/**
|
|
168
|
-
* Generate all possible path variations for a file
|
|
169
|
-
*/
|
|
170
|
-
private generatePathVariations;
|
|
171
|
-
}
|
|
172
|
-
export declare function createCodeReviewer(bitbucketProvider: BitbucketProvider, aiConfig: AIProviderConfig, reviewConfig: CodeReviewConfig): CodeReviewer;
|
|
173
|
-
//# sourceMappingURL=CodeReviewer.d.ts.map
|