@juspay/yama 1.4.1 → 1.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,205 @@
1
+ /**
2
+ * Retry Manager for Yama
3
+ * Provides intelligent retry logic with exponential backoff for handling transient failures
4
+ */
5
+ import { logger } from "./Logger.js";
6
+ export class RetryManager {
7
+ static DEFAULT_OPTIONS = {
8
+ maxAttempts: 3,
9
+ baseDelayMs: 1000,
10
+ maxDelayMs: 10000,
11
+ backoffMultiplier: 2.0,
12
+ jitterMs: 100,
13
+ retryableErrors: [
14
+ "provider_error",
15
+ "network",
16
+ "timeout",
17
+ "connection",
18
+ "econnreset",
19
+ "etimedout",
20
+ "enotfound",
21
+ "econnrefused",
22
+ "socket hang up",
23
+ "request timeout",
24
+ "service unavailable",
25
+ "bad gateway",
26
+ "gateway timeout",
27
+ "temporary failure",
28
+ "rate limit",
29
+ ],
30
+ };
31
+ /**
32
+ * Execute an operation with retry logic
33
+ */
34
+ static async withRetry(operation, context, options = {}) {
35
+ const opts = { ...RetryManager.DEFAULT_OPTIONS, ...options };
36
+ const startTime = Date.now();
37
+ let lastError;
38
+ for (let attempt = 1; attempt <= opts.maxAttempts; attempt++) {
39
+ try {
40
+ const result = await operation();
41
+ if (attempt > 1) {
42
+ const elapsed = Date.now() - startTime;
43
+ logger.info(`${context} succeeded on attempt ${attempt} after ${elapsed}ms`);
44
+ }
45
+ return result;
46
+ }
47
+ catch (error) {
48
+ lastError = error;
49
+ const isLastAttempt = attempt === opts.maxAttempts;
50
+ const isRetryable = RetryManager.isRetryableError(lastError, opts.retryableErrors);
51
+ const elapsed = Date.now() - startTime;
52
+ const retryContext = {
53
+ operation: context,
54
+ attempt,
55
+ maxAttempts: opts.maxAttempts,
56
+ lastError,
57
+ totalElapsed: elapsed,
58
+ };
59
+ if (isLastAttempt || !isRetryable) {
60
+ if (isLastAttempt) {
61
+ logger.error(`${context} failed after ${opts.maxAttempts} attempts (${elapsed}ms total):`, lastError);
62
+ }
63
+ else {
64
+ logger.error(`${context} failed with non-retryable error:`, lastError);
65
+ }
66
+ throw lastError;
67
+ }
68
+ const delay = RetryManager.calculateDelay(attempt, opts);
69
+ logger.warn(`${context} failed (attempt ${attempt}/${opts.maxAttempts}), retrying in ${delay}ms:`, lastError.message);
70
+ await RetryManager.sleep(delay);
71
+ }
72
+ }
73
+ // This should never be reached, but TypeScript requires it
74
+ throw (lastError ||
75
+ new Error(`${context} failed after ${opts.maxAttempts} attempts`));
76
+ }
77
+ /**
78
+ * Check if an error is retryable based on error patterns
79
+ */
80
+ static isRetryableError(error, retryablePatterns) {
81
+ if (!error) {
82
+ return false;
83
+ }
84
+ const errorMessage = error.message?.toLowerCase() || "";
85
+ const errorCode = error.code?.toLowerCase() || "";
86
+ const errorName = error.name?.toLowerCase() || "";
87
+ // Check if any retryable pattern matches the error
88
+ return retryablePatterns.some((pattern) => {
89
+ const lowerPattern = pattern.toLowerCase();
90
+ return (errorMessage.includes(lowerPattern) ||
91
+ errorCode.includes(lowerPattern) ||
92
+ errorName.includes(lowerPattern));
93
+ });
94
+ }
95
+ /**
96
+ * Calculate delay with exponential backoff and jitter
97
+ */
98
+ static calculateDelay(attempt, options) {
99
+ // Exponential backoff: baseDelay * (multiplier ^ (attempt - 1))
100
+ const exponentialDelay = options.baseDelayMs * Math.pow(options.backoffMultiplier, attempt - 1);
101
+ // Apply maximum delay cap
102
+ const cappedDelay = Math.min(exponentialDelay, options.maxDelayMs);
103
+ // Add jitter to prevent thundering herd
104
+ const jitter = Math.random() * options.jitterMs;
105
+ return Math.floor(cappedDelay + jitter);
106
+ }
107
+ /**
108
+ * Sleep for specified milliseconds
109
+ */
110
+ static sleep(ms) {
111
+ return new Promise((resolve) => setTimeout(resolve, ms));
112
+ }
113
+ /**
114
+ * Create a retry wrapper function for a specific operation
115
+ */
116
+ static createRetryWrapper(fn, context, options = {}) {
117
+ return async (...args) => {
118
+ return RetryManager.withRetry(() => fn(...args), context, options);
119
+ };
120
+ }
121
+ /**
122
+ * Batch retry operations with individual retry logic
123
+ */
124
+ static async batchWithRetry(operations, options = {}) {
125
+ const { continueOnError = true, ...retryOptions } = options;
126
+ const results = [];
127
+ for (const { fn, context } of operations) {
128
+ try {
129
+ const data = await RetryManager.withRetry(fn, context, retryOptions);
130
+ results.push({ success: true, data, context });
131
+ }
132
+ catch (error) {
133
+ const err = error;
134
+ results.push({ success: false, error: err, context });
135
+ if (!continueOnError) {
136
+ throw error;
137
+ }
138
+ }
139
+ }
140
+ return results;
141
+ }
142
+ /**
143
+ * Get retry statistics for monitoring
144
+ */
145
+ static getRetryStats(results) {
146
+ const total = results.length;
147
+ const successful = results.filter((r) => r.success).length;
148
+ const failed = total - successful;
149
+ const successRate = total > 0 ? successful / total : 0;
150
+ const failuresByContext = {};
151
+ results
152
+ .filter((r) => !r.success)
153
+ .forEach((r) => {
154
+ failuresByContext[r.context] = (failuresByContext[r.context] || 0) + 1;
155
+ });
156
+ return {
157
+ total,
158
+ successful,
159
+ failed,
160
+ successRate,
161
+ failuresByContext,
162
+ };
163
+ }
164
+ /**
165
+ * Create a circuit breaker pattern (simple implementation)
166
+ */
167
+ static createCircuitBreaker(fn, context, options = {}) {
168
+ const { failureThreshold = 5, recoveryTimeoutMs = 30000, retryOptions = {}, } = options;
169
+ let failureCount = 0;
170
+ let lastFailureTime = 0;
171
+ let state = "CLOSED";
172
+ return async (...args) => {
173
+ const now = Date.now();
174
+ // Check if we should attempt recovery
175
+ if (state === "OPEN" && now - lastFailureTime > recoveryTimeoutMs) {
176
+ state = "HALF_OPEN";
177
+ logger.debug(`Circuit breaker for ${context} entering HALF_OPEN state`);
178
+ }
179
+ // Reject immediately if circuit is open
180
+ if (state === "OPEN") {
181
+ throw new Error(`Circuit breaker OPEN for ${context} (${failureCount} failures)`);
182
+ }
183
+ try {
184
+ const result = await RetryManager.withRetry(() => fn(...args), context, retryOptions);
185
+ // Success - reset circuit breaker
186
+ if (state === "HALF_OPEN") {
187
+ state = "CLOSED";
188
+ failureCount = 0;
189
+ logger.info(`Circuit breaker for ${context} recovered to CLOSED state`);
190
+ }
191
+ return result;
192
+ }
193
+ catch (error) {
194
+ failureCount++;
195
+ lastFailureTime = now;
196
+ if (failureCount >= failureThreshold) {
197
+ state = "OPEN";
198
+ logger.error(`Circuit breaker OPEN for ${context} after ${failureCount} failures`);
199
+ }
200
+ throw error;
201
+ }
202
+ };
203
+ }
204
+ }
205
+ //# sourceMappingURL=RetryManager.js.map
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@juspay/yama",
3
- "version": "1.4.1",
3
+ "version": "1.5.1",
4
4
  "description": "Enterprise-grade Pull Request automation toolkit with AI-powered code review and description enhancement",
5
5
  "keywords": [
6
6
  "pr",
@@ -29,6 +29,7 @@ features:
29
29
  # Code Review Configuration
30
30
  codeReview:
31
31
  enabled: true
32
+ postSummaryComment: true # Post summary comment at the end of review (default: true)
32
33
  severityLevels: ["CRITICAL", "MAJOR", "MINOR", "SUGGESTION"]
33
34
  categories:
34
35
  [