@juspay/yama 1.4.1 → 1.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/core/providers/BitbucketProvider.js +36 -23
- package/dist/features/CodeReviewer.d.ts +12 -0
- package/dist/features/CodeReviewer.js +189 -6
- package/dist/features/MultiInstanceProcessor.js +3 -2
- package/dist/types/index.d.ts +38 -0
- package/dist/types/index.js +65 -0
- package/dist/utils/Cache.d.ts +8 -2
- package/dist/utils/Cache.js +190 -10
- package/dist/utils/ParallelProcessing.d.ts +28 -0
- package/dist/utils/ParallelProcessing.js +108 -3
- package/dist/utils/RetryManager.d.ts +78 -0
- package/dist/utils/RetryManager.js +205 -0
- package/package.json +1 -1
- package/yama.config.example.yaml +1 -0
package/CHANGELOG.md
CHANGED
|
@@ -1,3 +1,15 @@
|
|
|
1
|
+
## [1.5.1](https://github.com/juspay/yama/compare/v1.5.0...v1.5.1) (2025-09-24)
|
|
2
|
+
|
|
3
|
+
### Bug Fixes
|
|
4
|
+
|
|
5
|
+
- **allocation:** Added fix for batch token allocation ([11f7192](https://github.com/juspay/yama/commit/11f719257a75ba946c45612e336db69a17cf278d))
|
|
6
|
+
|
|
7
|
+
# [1.5.0](https://github.com/juspay/yama/compare/v1.4.1...v1.5.0) (2025-09-19)
|
|
8
|
+
|
|
9
|
+
### Features
|
|
10
|
+
|
|
11
|
+
- **summary:** Added config support for summary comment ([666ea5c](https://github.com/juspay/yama/commit/666ea5c78b93d2ef3df24a09f95581a4b8e75650))
|
|
12
|
+
|
|
1
13
|
## [1.4.1](https://github.com/juspay/yama/compare/v1.4.0...v1.4.1) (2025-09-18)
|
|
2
14
|
|
|
3
15
|
### Bug Fixes
|
|
@@ -5,6 +5,7 @@
|
|
|
5
5
|
import { ProviderError, } from "../../types/index.js";
|
|
6
6
|
import { logger } from "../../utils/Logger.js";
|
|
7
7
|
import { cache, Cache } from "../../utils/Cache.js";
|
|
8
|
+
import { RetryManager } from "../../utils/RetryManager.js";
|
|
8
9
|
export class BitbucketProvider {
|
|
9
10
|
apiClient;
|
|
10
11
|
branchHandlers;
|
|
@@ -85,14 +86,14 @@ export class BitbucketProvider {
|
|
|
85
86
|
throw new ProviderError("Branch name is required");
|
|
86
87
|
}
|
|
87
88
|
const cacheKey = Cache.keys.branchInfo(workspace, repository, branch);
|
|
88
|
-
return cache.
|
|
89
|
+
return cache.getOrSetResilient(cacheKey, async () => {
|
|
89
90
|
logger.debug(`Finding PR for branch: ${workspace}/${repository}@${branch}`);
|
|
90
|
-
const rawBranchData = await this.branchHandlers.handleGetBranch({
|
|
91
|
+
const rawBranchData = await RetryManager.withRetry(() => this.branchHandlers.handleGetBranch({
|
|
91
92
|
workspace,
|
|
92
93
|
repository,
|
|
93
94
|
branch_name: branch,
|
|
94
95
|
include_merged_prs: false,
|
|
95
|
-
});
|
|
96
|
+
}), `Find PR for branch ${workspace}/${repository}@${branch}`);
|
|
96
97
|
const branchData = this.parseMCPResponse(rawBranchData);
|
|
97
98
|
// Direct data extraction
|
|
98
99
|
if (branchData.open_pull_requests &&
|
|
@@ -131,13 +132,13 @@ export class BitbucketProvider {
|
|
|
131
132
|
throw new ProviderError("Pull request ID is required");
|
|
132
133
|
}
|
|
133
134
|
const cacheKey = Cache.keys.prInfo(workspace, repository, pullRequestId);
|
|
134
|
-
return cache.
|
|
135
|
+
return cache.getOrSetResilient(cacheKey, async () => {
|
|
135
136
|
logger.debug(`Getting PR details: ${workspace}/${repository}#${pullRequestId}`);
|
|
136
|
-
const rawPRDetails = await this.pullRequestHandlers.handleGetPullRequest({
|
|
137
|
+
const rawPRDetails = await RetryManager.withRetry(() => this.pullRequestHandlers.handleGetPullRequest({
|
|
137
138
|
workspace,
|
|
138
139
|
repository,
|
|
139
140
|
pull_request_id: pullRequestId,
|
|
140
|
-
});
|
|
141
|
+
}), `Get PR details ${workspace}/${repository}#${pullRequestId}`);
|
|
141
142
|
const prData = this.parseMCPResponse(rawPRDetails);
|
|
142
143
|
// Debug author data structure
|
|
143
144
|
logger.debug(`PR Details author data structure: ${JSON.stringify(prData.author, null, 2)}`);
|
|
@@ -175,7 +176,7 @@ export class BitbucketProvider {
|
|
|
175
176
|
const cacheKey = includePatterns && includePatterns.length === 1
|
|
176
177
|
? `file-diff:${workspace}:${repository}:${pullRequestId}:${includePatterns[0]}`
|
|
177
178
|
: Cache.keys.prDiff(workspace, repository, pullRequestId);
|
|
178
|
-
return cache.
|
|
179
|
+
return cache.getOrSetResilient(cacheKey, async () => {
|
|
179
180
|
logger.debug(`Getting PR diff: ${workspace}/${repository}#${pullRequestId}`);
|
|
180
181
|
if (includePatterns) {
|
|
181
182
|
logger.debug(`Include patterns: ${includePatterns.join(", ")}`);
|
|
@@ -191,7 +192,7 @@ export class BitbucketProvider {
|
|
|
191
192
|
if (includePatterns) {
|
|
192
193
|
args.include_patterns = includePatterns;
|
|
193
194
|
}
|
|
194
|
-
const rawDiff = await this.reviewHandlers.handleGetPullRequestDiff(args);
|
|
195
|
+
const rawDiff = await RetryManager.withRetry(() => this.reviewHandlers.handleGetPullRequestDiff(args), `Get PR diff ${workspace}/${repository}#${pullRequestId}`);
|
|
195
196
|
const diffData = this.parseMCPResponse(rawDiff);
|
|
196
197
|
return {
|
|
197
198
|
diff: diffData.diff || "",
|
|
@@ -207,18 +208,28 @@ export class BitbucketProvider {
|
|
|
207
208
|
async getFileContent(workspace, repository, filePath, branch) {
|
|
208
209
|
await this.initialize();
|
|
209
210
|
const cacheKey = Cache.keys.fileContent(workspace, repository, filePath, branch);
|
|
210
|
-
return cache.
|
|
211
|
+
return cache.getOrSetResilient(cacheKey, async () => {
|
|
211
212
|
logger.debug(`Getting file content: ${workspace}/${repository}/${filePath}@${branch}`);
|
|
212
|
-
const result = await this.fileHandlers.handleGetFileContent({
|
|
213
|
+
const result = await RetryManager.withRetry(() => this.fileHandlers.handleGetFileContent({
|
|
213
214
|
workspace,
|
|
214
215
|
repository,
|
|
215
216
|
file_path: filePath,
|
|
216
217
|
branch,
|
|
217
|
-
});
|
|
218
|
-
// Handle file content response
|
|
219
|
-
if (result.content &&
|
|
220
|
-
|
|
221
|
-
|
|
218
|
+
}), `Get file content ${workspace}/${repository}/${filePath}@${branch}`);
|
|
219
|
+
// Handle file content response with proper error handling for plain text files
|
|
220
|
+
if (result.content &&
|
|
221
|
+
result.content[0] &&
|
|
222
|
+
result.content[0].text) {
|
|
223
|
+
try {
|
|
224
|
+
const fileResponse = JSON.parse(result.content[0].text);
|
|
225
|
+
return fileResponse.content || "";
|
|
226
|
+
}
|
|
227
|
+
catch (parseError) {
|
|
228
|
+
// If JSON parsing fails, the content might be plain text (like .clinerules)
|
|
229
|
+
// Return the text content directly
|
|
230
|
+
logger.debug(`JSON parsing failed for ${filePath}, treating as plain text: ${parseError.message}`);
|
|
231
|
+
return result.content[0].text || "";
|
|
232
|
+
}
|
|
222
233
|
}
|
|
223
234
|
// Handle direct response format
|
|
224
235
|
return result.content || "";
|
|
@@ -230,14 +241,14 @@ export class BitbucketProvider {
|
|
|
230
241
|
async listDirectoryContent(workspace, repository, path, branch) {
|
|
231
242
|
await this.initialize();
|
|
232
243
|
const cacheKey = Cache.keys.directoryContent(workspace, repository, path, branch);
|
|
233
|
-
return cache.
|
|
244
|
+
return cache.getOrSetResilient(cacheKey, async () => {
|
|
234
245
|
logger.debug(`Listing directory: ${workspace}/${repository}/${path}@${branch}`);
|
|
235
|
-
const result = await this.fileHandlers.handleListDirectoryContent({
|
|
246
|
+
const result = await RetryManager.withRetry(() => this.fileHandlers.handleListDirectoryContent({
|
|
236
247
|
workspace,
|
|
237
248
|
repository,
|
|
238
249
|
path,
|
|
239
250
|
branch,
|
|
240
|
-
});
|
|
251
|
+
}), `List directory ${workspace}/${repository}/${path}@${branch}`);
|
|
241
252
|
const dirData = this.parseMCPResponse(result);
|
|
242
253
|
return dirData.contents || [];
|
|
243
254
|
}, 3600);
|
|
@@ -254,12 +265,12 @@ export class BitbucketProvider {
|
|
|
254
265
|
try {
|
|
255
266
|
logger.debug(`Updating PR description: ${workspace}/${repository}#${pullRequestId}`);
|
|
256
267
|
logger.debug(`Description length: ${description.length} characters`);
|
|
257
|
-
const result = await this.pullRequestHandlers.handleUpdatePullRequest({
|
|
268
|
+
const result = await RetryManager.withRetry(() => this.pullRequestHandlers.handleUpdatePullRequest({
|
|
258
269
|
workspace,
|
|
259
270
|
repository,
|
|
260
271
|
pull_request_id: pullRequestId,
|
|
261
272
|
description: description,
|
|
262
|
-
});
|
|
273
|
+
}), `Update PR description ${workspace}/${repository}#${pullRequestId}`);
|
|
263
274
|
// Log the raw MCP response
|
|
264
275
|
logger.debug(`Raw MCP update response: ${JSON.stringify(result, null, 2)}`);
|
|
265
276
|
const updateData = this.parseMCPResponse(result);
|
|
@@ -335,10 +346,12 @@ export class BitbucketProvider {
|
|
|
335
346
|
logger.debug(` Type: ${options.lineType || "CONTEXT"}`);
|
|
336
347
|
}
|
|
337
348
|
logger.debug(`🔍 MCP addComment args: ${JSON.stringify(args, null, 2)}`);
|
|
338
|
-
const result = await this.pullRequestHandlers.handleAddComment(args);
|
|
349
|
+
const result = await RetryManager.withRetry(() => this.pullRequestHandlers.handleAddComment(args), `Add comment to PR ${workspace}/${repository}#${pullRequestId}`);
|
|
339
350
|
// Parse response exactly like pr-police.js
|
|
340
351
|
let commentData;
|
|
341
|
-
if (result.content &&
|
|
352
|
+
if (result.content &&
|
|
353
|
+
result.content[0] &&
|
|
354
|
+
result.content[0].text) {
|
|
342
355
|
commentData = JSON.parse(result.content[0].text);
|
|
343
356
|
}
|
|
344
357
|
else {
|
|
@@ -361,7 +374,7 @@ export class BitbucketProvider {
|
|
|
361
374
|
pull_request_id: pullRequestId,
|
|
362
375
|
comment_text: `**File: ${options.filePath}**\n\n${commentText}`,
|
|
363
376
|
};
|
|
364
|
-
const fallbackResult = await this.pullRequestHandlers.handleAddComment(fallbackArgs);
|
|
377
|
+
const fallbackResult = await RetryManager.withRetry(() => this.pullRequestHandlers.handleAddComment(fallbackArgs), `Add fallback comment to PR ${workspace}/${repository}#${pullRequestId}`);
|
|
365
378
|
let fallbackData;
|
|
366
379
|
if (fallbackResult.content &&
|
|
367
380
|
fallbackResult.content[0] &&
|
|
@@ -108,6 +108,18 @@ export declare class CodeReviewer {
|
|
|
108
108
|
* Process batches serially (original implementation)
|
|
109
109
|
*/
|
|
110
110
|
private processSerially;
|
|
111
|
+
/**
|
|
112
|
+
* Pre-allocate tokens based on distribution strategy with proper integer arithmetic
|
|
113
|
+
*/
|
|
114
|
+
private preAllocateTokens;
|
|
115
|
+
/**
|
|
116
|
+
* Try weighted allocation for batches
|
|
117
|
+
*/
|
|
118
|
+
private tryWeightedAllocation;
|
|
119
|
+
/**
|
|
120
|
+
* Try equal allocation for batches
|
|
121
|
+
*/
|
|
122
|
+
private tryEqualAllocation;
|
|
111
123
|
/**
|
|
112
124
|
* Process a single batch with concurrency control
|
|
113
125
|
*/
|
|
@@ -465,8 +465,9 @@ Return ONLY valid JSON:
|
|
|
465
465
|
});
|
|
466
466
|
}
|
|
467
467
|
}
|
|
468
|
-
// Post summary comment (include failed comments info if any)
|
|
469
|
-
|
|
468
|
+
// Post summary comment (include failed comments info if any) - only if enabled in config
|
|
469
|
+
const shouldPostSummary = this.reviewConfig.postSummaryComment !== false; // Default to true if not specified
|
|
470
|
+
if (uniqueViolations.length > 0 && shouldPostSummary) {
|
|
470
471
|
try {
|
|
471
472
|
const summaryComment = this.generateSummaryComment(uniqueViolations, context, failedComments);
|
|
472
473
|
await this.bitbucketProvider.addComment(context.identifier, summaryComment);
|
|
@@ -477,6 +478,9 @@ Return ONLY valid JSON:
|
|
|
477
478
|
logger.debug(`❌ Failed to post summary comment: ${error.message}`);
|
|
478
479
|
}
|
|
479
480
|
}
|
|
481
|
+
else if (uniqueViolations.length > 0 && !shouldPostSummary) {
|
|
482
|
+
logger.debug("📝 Summary comment posting disabled in configuration");
|
|
483
|
+
}
|
|
480
484
|
logger.success(`✅ Posted ${commentsPosted} comments successfully`);
|
|
481
485
|
if (commentsFailed > 0) {
|
|
482
486
|
logger.warn(`⚠️ Failed to post ${commentsFailed} inline comments`);
|
|
@@ -918,7 +922,25 @@ ${recommendation}
|
|
|
918
922
|
// Initialize concurrency control
|
|
919
923
|
const semaphore = new Semaphore(optimalConcurrency);
|
|
920
924
|
const tokenBudget = new TokenBudgetManager(this.getSafeTokenLimit() * 0.8); // 80% for safety
|
|
921
|
-
|
|
925
|
+
// NEW: Pre-allocate tokens based on distribution strategy
|
|
926
|
+
const distributionStrategy = parallelConfig.tokenBudgetDistribution || "equal";
|
|
927
|
+
logger.info(`🎯 Using ${distributionStrategy} token distribution strategy for ${batches.length} batches`);
|
|
928
|
+
const tokenAllocations = this.preAllocateTokens(batches, tokenBudget, distributionStrategy);
|
|
929
|
+
if (!tokenAllocations) {
|
|
930
|
+
const totalRequired = batches.reduce((sum, b) => sum + b.estimatedTokens, 0);
|
|
931
|
+
const totalBudget = tokenBudget.getTotalBudget();
|
|
932
|
+
throw new Error(`Insufficient token budget: required ${totalRequired}, available ${totalBudget}. ` +
|
|
933
|
+
`Consider reducing batch count (current: ${batches.length}) or increasing token limit.`);
|
|
934
|
+
}
|
|
935
|
+
// Apply pre-allocated tokens to the budget manager
|
|
936
|
+
if (!tokenBudget.preAllocateAllBatches(tokenAllocations)) {
|
|
937
|
+
throw new Error("Failed to pre-allocate tokens for all batches");
|
|
938
|
+
}
|
|
939
|
+
logger.info(`🎯 Parallel processing: ${optimalConcurrency} concurrent batches, ${tokenBudget.getTotalBudget()} token budget (${distributionStrategy} distribution)`);
|
|
940
|
+
// Log allocation details
|
|
941
|
+
tokenAllocations.forEach((tokens, batchIndex) => {
|
|
942
|
+
logger.debug(`Batch ${batchIndex + 1}: ${tokens} tokens allocated`);
|
|
943
|
+
});
|
|
922
944
|
const batchResults = new Array(batches.length);
|
|
923
945
|
const allViolations = [];
|
|
924
946
|
const processingPromises = [];
|
|
@@ -1002,6 +1024,149 @@ ${recommendation}
|
|
|
1002
1024
|
logger.success(`🎯 Serial processing completed: ${allViolations.length} total violations from ${batches.length} batches in ${Math.round(totalTime / 1000)}s (avg ${avgBatchSize.toFixed(1)} files/batch)`);
|
|
1003
1025
|
return { violations: allViolations, batchResults };
|
|
1004
1026
|
}
|
|
1027
|
+
/**
|
|
1028
|
+
* Pre-allocate tokens based on distribution strategy with proper integer arithmetic
|
|
1029
|
+
*/
|
|
1030
|
+
preAllocateTokens(batches, tokenBudget, strategy) {
|
|
1031
|
+
// Ensure we're working with integer budget to avoid floating-point issues
|
|
1032
|
+
const totalBudget = Math.floor(tokenBudget.getTotalBudget());
|
|
1033
|
+
const allocations = new Map();
|
|
1034
|
+
if (strategy === "equal") {
|
|
1035
|
+
// Equal distribution: divide budget equally among all batches with proper remainder handling
|
|
1036
|
+
const baseTokens = Math.floor(totalBudget / batches.length);
|
|
1037
|
+
const remainder = totalBudget % batches.length;
|
|
1038
|
+
if (baseTokens < 1000) {
|
|
1039
|
+
// Minimum viable tokens per batch
|
|
1040
|
+
logger.error(`Equal distribution would give ${baseTokens} tokens per batch, which is insufficient`);
|
|
1041
|
+
return null;
|
|
1042
|
+
}
|
|
1043
|
+
let totalAllocated = 0;
|
|
1044
|
+
for (let i = 0; i < batches.length; i++) {
|
|
1045
|
+
// Distribute remainder to first few batches
|
|
1046
|
+
const tokens = baseTokens + (i < remainder ? 1 : 0);
|
|
1047
|
+
allocations.set(i, tokens);
|
|
1048
|
+
totalAllocated += tokens;
|
|
1049
|
+
}
|
|
1050
|
+
// Double-check that we haven't exceeded budget due to any calculation errors
|
|
1051
|
+
if (totalAllocated > totalBudget) {
|
|
1052
|
+
logger.error(`Equal distribution calculation error: ${totalAllocated} > ${totalBudget}`);
|
|
1053
|
+
// Adjust the last batch to fit within budget
|
|
1054
|
+
const lastBatchIndex = batches.length - 1;
|
|
1055
|
+
const lastBatchTokens = allocations.get(lastBatchIndex);
|
|
1056
|
+
const adjustment = totalAllocated - totalBudget;
|
|
1057
|
+
const newLastBatchTokens = lastBatchTokens - adjustment;
|
|
1058
|
+
if (newLastBatchTokens < 1000) {
|
|
1059
|
+
logger.error(`Adjustment would result in last batch having ${newLastBatchTokens} tokens, which is below the minimum threshold (1000). Aborting allocation.`);
|
|
1060
|
+
return null;
|
|
1061
|
+
}
|
|
1062
|
+
allocations.set(lastBatchIndex, newLastBatchTokens);
|
|
1063
|
+
totalAllocated = totalBudget;
|
|
1064
|
+
logger.warn(`Adjusted last batch by -${adjustment} tokens to fit budget`);
|
|
1065
|
+
}
|
|
1066
|
+
logger.info(`Equal distribution: ${baseTokens} tokens per batch for ${batches.length} batches`);
|
|
1067
|
+
logger.debug(`Pre-allocated ${totalAllocated} tokens across ${batches.length} batches (${totalBudget - totalAllocated} remaining)`);
|
|
1068
|
+
}
|
|
1069
|
+
else if (strategy === "weighted") {
|
|
1070
|
+
// Weighted distribution: try weighted first, automatically fallback to equal if needed
|
|
1071
|
+
logger.debug(`Attempting weighted distribution...`);
|
|
1072
|
+
const weightedResult = this.tryWeightedAllocation(batches, totalBudget);
|
|
1073
|
+
if (weightedResult) {
|
|
1074
|
+
// Weighted allocation succeeded
|
|
1075
|
+
weightedResult.forEach((tokens, batchIndex) => {
|
|
1076
|
+
allocations.set(batchIndex, tokens);
|
|
1077
|
+
});
|
|
1078
|
+
logger.info(`✅ Weighted distribution: optimal allocation successful`);
|
|
1079
|
+
logger.debug(`Pre-allocated ${Array.from(weightedResult.values()).reduce((sum, tokens) => sum + tokens, 0)} tokens across ${batches.length} batches`);
|
|
1080
|
+
}
|
|
1081
|
+
else {
|
|
1082
|
+
// Weighted allocation failed, automatically fallback to equal distribution
|
|
1083
|
+
logger.warn(`⚠️ Weighted distribution: insufficient budget for optimal allocation, falling back to equal distribution`);
|
|
1084
|
+
const equalResult = this.tryEqualAllocation(batches, totalBudget);
|
|
1085
|
+
if (!equalResult) {
|
|
1086
|
+
logger.error(`Weighted distribution: both optimal and equal allocation failed`);
|
|
1087
|
+
return null;
|
|
1088
|
+
}
|
|
1089
|
+
equalResult.forEach((tokens, batchIndex) => {
|
|
1090
|
+
allocations.set(batchIndex, tokens);
|
|
1091
|
+
});
|
|
1092
|
+
logger.info(`✅ Weighted distribution: equal allocation fallback successful`);
|
|
1093
|
+
logger.debug(`Pre-allocated ${Array.from(equalResult.values()).reduce((sum, tokens) => sum + tokens, 0)} tokens across ${batches.length} batches`);
|
|
1094
|
+
}
|
|
1095
|
+
}
|
|
1096
|
+
// Final validation with strict integer checking
|
|
1097
|
+
const totalAllocated = Array.from(allocations.values()).reduce((sum, tokens) => sum + tokens, 0);
|
|
1098
|
+
if (totalAllocated > totalBudget) {
|
|
1099
|
+
logger.error(`CRITICAL: Total allocation (${totalAllocated}) exceeds budget (${totalBudget}) - this should never happen`);
|
|
1100
|
+
logger.error(`Budget type: ${typeof totalBudget}, Allocation type: ${typeof totalAllocated}`);
|
|
1101
|
+
logger.error(`Individual allocations: ${Array.from(allocations.entries())
|
|
1102
|
+
.map(([i, tokens]) => `batch${i}:${tokens}`)
|
|
1103
|
+
.join(", ")}`);
|
|
1104
|
+
throw new Error(`Total allocation (${totalAllocated}) exceeds budget (${totalBudget}) - this should never happen`);
|
|
1105
|
+
}
|
|
1106
|
+
return allocations;
|
|
1107
|
+
}
|
|
1108
|
+
/**
|
|
1109
|
+
* Try weighted allocation for batches
|
|
1110
|
+
*/
|
|
1111
|
+
tryWeightedAllocation(batches, totalBudget) {
|
|
1112
|
+
const totalEstimated = batches.reduce((sum, batch) => sum + batch.estimatedTokens, 0);
|
|
1113
|
+
if (totalEstimated > totalBudget) {
|
|
1114
|
+
logger.debug(`Total estimated tokens (${totalEstimated}) exceed budget (${totalBudget})`);
|
|
1115
|
+
return null;
|
|
1116
|
+
}
|
|
1117
|
+
let totalAllocated = 0;
|
|
1118
|
+
const minTokensPerBatch = 1000;
|
|
1119
|
+
const allocations = new Map();
|
|
1120
|
+
for (let i = 0; i < batches.length; i++) {
|
|
1121
|
+
const batch = batches[i];
|
|
1122
|
+
const weight = batch.estimatedTokens / totalEstimated;
|
|
1123
|
+
const allocation = Math.floor(weight * totalBudget);
|
|
1124
|
+
const finalAllocation = Math.max(allocation, minTokensPerBatch);
|
|
1125
|
+
allocations.set(i, finalAllocation);
|
|
1126
|
+
totalAllocated += finalAllocation;
|
|
1127
|
+
}
|
|
1128
|
+
// Check if we exceeded budget due to minimum allocations
|
|
1129
|
+
if (totalAllocated > totalBudget) {
|
|
1130
|
+
logger.debug(`Weighted allocation with minimums (${totalAllocated}) exceeds budget (${totalBudget})`);
|
|
1131
|
+
return null;
|
|
1132
|
+
}
|
|
1133
|
+
return allocations;
|
|
1134
|
+
}
|
|
1135
|
+
/**
|
|
1136
|
+
* Try equal allocation for batches
|
|
1137
|
+
*/
|
|
1138
|
+
tryEqualAllocation(batches, totalBudget) {
|
|
1139
|
+
const baseTokens = Math.floor(totalBudget / batches.length);
|
|
1140
|
+
const remainder = totalBudget % batches.length;
|
|
1141
|
+
if (baseTokens < 1000) {
|
|
1142
|
+
// Minimum viable tokens per batch
|
|
1143
|
+
logger.debug(`Equal distribution would give ${baseTokens} tokens per batch, which is insufficient`);
|
|
1144
|
+
return null;
|
|
1145
|
+
}
|
|
1146
|
+
const allocations = new Map();
|
|
1147
|
+
let totalAllocated = 0;
|
|
1148
|
+
for (let i = 0; i < batches.length; i++) {
|
|
1149
|
+
// Distribute remainder to first few batches
|
|
1150
|
+
const tokens = baseTokens + (i < remainder ? 1 : 0);
|
|
1151
|
+
allocations.set(i, tokens);
|
|
1152
|
+
totalAllocated += tokens;
|
|
1153
|
+
}
|
|
1154
|
+
// Double-check that we haven't exceeded budget due to any calculation errors
|
|
1155
|
+
if (totalAllocated > totalBudget) {
|
|
1156
|
+
logger.debug(`Equal distribution calculation error: ${totalAllocated} > ${totalBudget}`);
|
|
1157
|
+
// Adjust the last batch to fit within budget
|
|
1158
|
+
const lastBatchIndex = batches.length - 1;
|
|
1159
|
+
const lastBatchTokens = allocations.get(lastBatchIndex);
|
|
1160
|
+
const adjustment = totalAllocated - totalBudget;
|
|
1161
|
+
const newLastBatchTokens = lastBatchTokens - adjustment;
|
|
1162
|
+
if (newLastBatchTokens < 1000) {
|
|
1163
|
+
logger.error(`Adjustment would result in last batch having ${newLastBatchTokens} tokens, which is below the minimum threshold (1000). Aborting allocation.`);
|
|
1164
|
+
return null;
|
|
1165
|
+
}
|
|
1166
|
+
allocations.set(lastBatchIndex, newLastBatchTokens);
|
|
1167
|
+
}
|
|
1168
|
+
return allocations;
|
|
1169
|
+
}
|
|
1005
1170
|
/**
|
|
1006
1171
|
* Process a single batch with concurrency control
|
|
1007
1172
|
*/
|
|
@@ -1009,9 +1174,22 @@ ${recommendation}
|
|
|
1009
1174
|
// Acquire semaphore permit
|
|
1010
1175
|
await semaphore.acquire();
|
|
1011
1176
|
try {
|
|
1012
|
-
//
|
|
1013
|
-
if (
|
|
1014
|
-
|
|
1177
|
+
// NEW: In pre-allocation mode, tokens are already allocated, just verify and mark as processing
|
|
1178
|
+
if (tokenBudget.isPreAllocationMode()) {
|
|
1179
|
+
const batchState = tokenBudget.getBatchState(batchIndex);
|
|
1180
|
+
if (batchState !== "pending") {
|
|
1181
|
+
throw new Error(`Batch ${batchIndex + 1} is not in pending state (current: ${batchState})`);
|
|
1182
|
+
}
|
|
1183
|
+
// Mark as processing (this is handled in allocateForBatch for pre-allocation mode)
|
|
1184
|
+
if (!tokenBudget.allocateForBatch(batchIndex, batch.estimatedTokens)) {
|
|
1185
|
+
throw new Error(`Failed to mark batch ${batchIndex + 1} as processing`);
|
|
1186
|
+
}
|
|
1187
|
+
}
|
|
1188
|
+
else {
|
|
1189
|
+
// Legacy mode: allocate tokens dynamically
|
|
1190
|
+
if (!tokenBudget.allocateForBatch(batchIndex, batch.estimatedTokens)) {
|
|
1191
|
+
throw new Error(`Insufficient token budget for batch ${batchIndex + 1}`);
|
|
1192
|
+
}
|
|
1015
1193
|
}
|
|
1016
1194
|
logger.info(`🔄 Processing batch ${batchIndex + 1}/${totalBatches} (${batch.files.length} files, parallel)`);
|
|
1017
1195
|
// Process the batch (existing logic)
|
|
@@ -1019,6 +1197,11 @@ ${recommendation}
|
|
|
1019
1197
|
logger.info(`✅ Batch ${batchIndex + 1} completed: ${result.violations.length} violations in ${Math.round(result.processingTime / 1000)}s`);
|
|
1020
1198
|
return result;
|
|
1021
1199
|
}
|
|
1200
|
+
catch (error) {
|
|
1201
|
+
// Mark batch as failed in token budget
|
|
1202
|
+
tokenBudget.markBatchFailed(batchIndex, error.message);
|
|
1203
|
+
throw error;
|
|
1204
|
+
}
|
|
1022
1205
|
finally {
|
|
1023
1206
|
// Always release resources
|
|
1024
1207
|
tokenBudget.releaseBatch(batchIndex);
|
|
@@ -237,8 +237,9 @@ export class MultiInstanceProcessor {
|
|
|
237
237
|
minLimit = Math.min(minLimit, instanceLimit);
|
|
238
238
|
}
|
|
239
239
|
// Total budget is the sum of all instance limits, but with safety margin
|
|
240
|
-
|
|
241
|
-
|
|
240
|
+
// Use Math.floor to ensure integer result and avoid floating-point precision issues
|
|
241
|
+
const totalBudget = Math.floor(instances.length * minLimit * 0.8); // 80% safety margin
|
|
242
|
+
logger.debug(`Calculated total token budget: ${totalBudget} (${instances.length} instances × ${minLimit} × 0.8, floored)`);
|
|
242
243
|
return totalBudget;
|
|
243
244
|
}
|
|
244
245
|
/**
|
package/dist/types/index.d.ts
CHANGED
|
@@ -283,6 +283,7 @@ export interface CodeReviewConfig {
|
|
|
283
283
|
systemPrompt?: string;
|
|
284
284
|
analysisTemplate?: string;
|
|
285
285
|
focusAreas?: string[];
|
|
286
|
+
postSummaryComment?: boolean;
|
|
286
287
|
batchProcessing?: BatchProcessingConfig;
|
|
287
288
|
multiInstance?: MultiInstanceConfig;
|
|
288
289
|
semanticDeduplication?: SemanticDeduplicationConfig;
|
|
@@ -568,6 +569,7 @@ export interface TokenBudgetManagerInterface {
|
|
|
568
569
|
getAvailableBudget(): number;
|
|
569
570
|
getTotalBudget(): number;
|
|
570
571
|
getUsedTokens(): number;
|
|
572
|
+
preAllocateAllBatches(allocations: Map<number, number>): boolean;
|
|
571
573
|
}
|
|
572
574
|
export declare class GuardianError extends Error {
|
|
573
575
|
code: string;
|
|
@@ -583,4 +585,40 @@ export declare class ProviderError extends GuardianError {
|
|
|
583
585
|
export declare class ValidationError extends GuardianError {
|
|
584
586
|
constructor(message: string, context?: any);
|
|
585
587
|
}
|
|
588
|
+
export declare enum CacheErrorCode {
|
|
589
|
+
CACHE_SYSTEM_FAILURE = "CACHE_SYSTEM_FAILURE",
|
|
590
|
+
CACHE_MEMORY_EXHAUSTED = "CACHE_MEMORY_EXHAUSTED",
|
|
591
|
+
CACHE_INITIALIZATION_FAILED = "CACHE_INITIALIZATION_FAILED",
|
|
592
|
+
CACHE_STORAGE_FULL = "CACHE_STORAGE_FULL",
|
|
593
|
+
CACHE_STORAGE_PERMISSION = "CACHE_STORAGE_PERMISSION",
|
|
594
|
+
CACHE_STORAGE_CORRUPTION = "CACHE_STORAGE_CORRUPTION",
|
|
595
|
+
CACHE_NETWORK_CONNECTION = "CACHE_NETWORK_CONNECTION",
|
|
596
|
+
CACHE_NETWORK_TIMEOUT = "CACHE_NETWORK_TIMEOUT",
|
|
597
|
+
CACHE_NETWORK_AUTH = "CACHE_NETWORK_AUTH",
|
|
598
|
+
CACHE_CONFIG_INVALID = "CACHE_CONFIG_INVALID",
|
|
599
|
+
CACHE_CONFIG_MISSING = "CACHE_CONFIG_MISSING",
|
|
600
|
+
CACHE_OPERATION_FAILED = "CACHE_OPERATION_FAILED",
|
|
601
|
+
CACHE_SERIALIZATION_ERROR = "CACHE_SERIALIZATION_ERROR",
|
|
602
|
+
CACHE_KEY_INVALID = "CACHE_KEY_INVALID"
|
|
603
|
+
}
|
|
604
|
+
export declare abstract class CacheError extends GuardianError {
|
|
605
|
+
operation?: string | undefined;
|
|
606
|
+
key?: string | undefined;
|
|
607
|
+
constructor(code: CacheErrorCode, message: string, operation?: string | undefined, key?: string | undefined, context?: any);
|
|
608
|
+
}
|
|
609
|
+
export declare class CacheSystemError extends CacheError {
|
|
610
|
+
constructor(message: string, operation?: string, key?: string, context?: any);
|
|
611
|
+
}
|
|
612
|
+
export declare class CacheStorageError extends CacheError {
|
|
613
|
+
constructor(code: CacheErrorCode | undefined, message: string, operation?: string, key?: string, context?: any);
|
|
614
|
+
}
|
|
615
|
+
export declare class CacheNetworkError extends CacheError {
|
|
616
|
+
constructor(code: CacheErrorCode | undefined, message: string, operation?: string, key?: string, context?: any);
|
|
617
|
+
}
|
|
618
|
+
export declare class CacheConfigurationError extends CacheError {
|
|
619
|
+
constructor(code: CacheErrorCode | undefined, message: string, operation?: string, key?: string, context?: any);
|
|
620
|
+
}
|
|
621
|
+
export declare class CacheOperationError extends CacheError {
|
|
622
|
+
constructor(code: CacheErrorCode | undefined, message: string, operation?: string, key?: string, context?: any);
|
|
623
|
+
}
|
|
586
624
|
//# sourceMappingURL=index.d.ts.map
|
package/dist/types/index.js
CHANGED
|
@@ -34,6 +34,71 @@ export class ValidationError extends GuardianError {
|
|
|
34
34
|
}
|
|
35
35
|
}
|
|
36
36
|
// ============================================================================
|
|
37
|
+
// Cache Error Types
|
|
38
|
+
// ============================================================================
|
|
39
|
+
export var CacheErrorCode;
|
|
40
|
+
(function (CacheErrorCode) {
|
|
41
|
+
// System-level cache errors
|
|
42
|
+
CacheErrorCode["CACHE_SYSTEM_FAILURE"] = "CACHE_SYSTEM_FAILURE";
|
|
43
|
+
CacheErrorCode["CACHE_MEMORY_EXHAUSTED"] = "CACHE_MEMORY_EXHAUSTED";
|
|
44
|
+
CacheErrorCode["CACHE_INITIALIZATION_FAILED"] = "CACHE_INITIALIZATION_FAILED";
|
|
45
|
+
// Storage-related errors
|
|
46
|
+
CacheErrorCode["CACHE_STORAGE_FULL"] = "CACHE_STORAGE_FULL";
|
|
47
|
+
CacheErrorCode["CACHE_STORAGE_PERMISSION"] = "CACHE_STORAGE_PERMISSION";
|
|
48
|
+
CacheErrorCode["CACHE_STORAGE_CORRUPTION"] = "CACHE_STORAGE_CORRUPTION";
|
|
49
|
+
// Network-related errors (for future Redis support)
|
|
50
|
+
CacheErrorCode["CACHE_NETWORK_CONNECTION"] = "CACHE_NETWORK_CONNECTION";
|
|
51
|
+
CacheErrorCode["CACHE_NETWORK_TIMEOUT"] = "CACHE_NETWORK_TIMEOUT";
|
|
52
|
+
CacheErrorCode["CACHE_NETWORK_AUTH"] = "CACHE_NETWORK_AUTH";
|
|
53
|
+
// Configuration errors
|
|
54
|
+
CacheErrorCode["CACHE_CONFIG_INVALID"] = "CACHE_CONFIG_INVALID";
|
|
55
|
+
CacheErrorCode["CACHE_CONFIG_MISSING"] = "CACHE_CONFIG_MISSING";
|
|
56
|
+
// Operation errors
|
|
57
|
+
CacheErrorCode["CACHE_OPERATION_FAILED"] = "CACHE_OPERATION_FAILED";
|
|
58
|
+
CacheErrorCode["CACHE_SERIALIZATION_ERROR"] = "CACHE_SERIALIZATION_ERROR";
|
|
59
|
+
CacheErrorCode["CACHE_KEY_INVALID"] = "CACHE_KEY_INVALID";
|
|
60
|
+
})(CacheErrorCode || (CacheErrorCode = {}));
|
|
61
|
+
export class CacheError extends GuardianError {
|
|
62
|
+
operation;
|
|
63
|
+
key;
|
|
64
|
+
constructor(code, message, operation, key, context) {
|
|
65
|
+
super(code, message, context);
|
|
66
|
+
this.operation = operation;
|
|
67
|
+
this.key = key;
|
|
68
|
+
this.name = "CacheError";
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
export class CacheSystemError extends CacheError {
|
|
72
|
+
constructor(message, operation, key, context) {
|
|
73
|
+
super(CacheErrorCode.CACHE_SYSTEM_FAILURE, message, operation, key, context);
|
|
74
|
+
this.name = "CacheSystemError";
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
export class CacheStorageError extends CacheError {
|
|
78
|
+
constructor(code = CacheErrorCode.CACHE_STORAGE_FULL, message, operation, key, context) {
|
|
79
|
+
super(code, message, operation, key, context);
|
|
80
|
+
this.name = "CacheStorageError";
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
export class CacheNetworkError extends CacheError {
|
|
84
|
+
constructor(code = CacheErrorCode.CACHE_NETWORK_CONNECTION, message, operation, key, context) {
|
|
85
|
+
super(code, message, operation, key, context);
|
|
86
|
+
this.name = "CacheNetworkError";
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
export class CacheConfigurationError extends CacheError {
|
|
90
|
+
constructor(code = CacheErrorCode.CACHE_CONFIG_INVALID, message, operation, key, context) {
|
|
91
|
+
super(code, message, operation, key, context);
|
|
92
|
+
this.name = "CacheConfigurationError";
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
export class CacheOperationError extends CacheError {
|
|
96
|
+
constructor(code = CacheErrorCode.CACHE_OPERATION_FAILED, message, operation, key, context) {
|
|
97
|
+
super(code, message, operation, key, context);
|
|
98
|
+
this.name = "CacheOperationError";
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
// ============================================================================
|
|
37
102
|
// Export all types - Main file, no re-exports needed
|
|
38
103
|
// ============================================================================
|
|
39
104
|
//# sourceMappingURL=index.js.map
|
package/dist/utils/Cache.d.ts
CHANGED
|
@@ -8,7 +8,7 @@ export declare class Cache implements ICache {
|
|
|
8
8
|
private statsData;
|
|
9
9
|
constructor(options?: CacheOptions);
|
|
10
10
|
/**
|
|
11
|
-
* Get value from cache
|
|
11
|
+
* Get value from cache with resilient error handling
|
|
12
12
|
*/
|
|
13
13
|
get<T>(key: string): T | undefined;
|
|
14
14
|
/**
|
|
@@ -39,15 +39,21 @@ export declare class Cache implements ICache {
|
|
|
39
39
|
misses: number;
|
|
40
40
|
keys: number;
|
|
41
41
|
size: number;
|
|
42
|
+
cacheErrors: number;
|
|
43
|
+
nonCacheErrors: number;
|
|
42
44
|
};
|
|
43
45
|
/**
|
|
44
46
|
* Get detailed cache statistics from node-cache
|
|
45
47
|
*/
|
|
46
48
|
getDetailedStats(): any;
|
|
47
49
|
/**
|
|
48
|
-
* Get or set pattern
|
|
50
|
+
* Get or set pattern with automatic fallback on cache failures
|
|
49
51
|
*/
|
|
50
52
|
getOrSet<T>(key: string, fetchFn: () => Promise<T>, ttl?: number): Promise<T>;
|
|
53
|
+
/**
|
|
54
|
+
* Resilient get or set pattern that bypasses cache entirely on cache system failures
|
|
55
|
+
*/
|
|
56
|
+
getOrSetResilient<T>(key: string, fetchFn: () => Promise<T>, ttl?: number): Promise<T>;
|
|
51
57
|
/**
|
|
52
58
|
* Cache with tags for group invalidation
|
|
53
59
|
*/
|