@juspay/yama 1.5.0 → 1.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,78 @@
1
+ /**
2
+ * Retry Manager for Yama
3
+ * Provides intelligent retry logic with exponential backoff for handling transient failures
4
+ */
5
+ export interface RetryOptions {
6
+ maxAttempts?: number;
7
+ baseDelayMs?: number;
8
+ maxDelayMs?: number;
9
+ backoffMultiplier?: number;
10
+ jitterMs?: number;
11
+ retryableErrors?: string[];
12
+ }
13
+ export interface RetryContext {
14
+ operation: string;
15
+ attempt: number;
16
+ maxAttempts: number;
17
+ lastError?: Error;
18
+ totalElapsed: number;
19
+ }
20
+ export declare class RetryManager {
21
+ private static readonly DEFAULT_OPTIONS;
22
+ /**
23
+ * Execute an operation with retry logic
24
+ */
25
+ static withRetry<T>(operation: () => Promise<T>, context: string, options?: RetryOptions): Promise<T>;
26
+ /**
27
+ * Check if an error is retryable based on error patterns
28
+ */
29
+ private static isRetryableError;
30
+ /**
31
+ * Calculate delay with exponential backoff and jitter
32
+ */
33
+ private static calculateDelay;
34
+ /**
35
+ * Sleep for specified milliseconds
36
+ */
37
+ private static sleep;
38
+ /**
39
+ * Create a retry wrapper function for a specific operation
40
+ */
41
+ static createRetryWrapper<T extends any[], R>(fn: (...args: T) => Promise<R>, context: string, options?: RetryOptions): (...args: T) => Promise<R>;
42
+ /**
43
+ * Batch retry operations with individual retry logic
44
+ */
45
+ static batchWithRetry<T>(operations: Array<{
46
+ fn: () => Promise<T>;
47
+ context: string;
48
+ }>, options?: RetryOptions & {
49
+ continueOnError?: boolean;
50
+ }): Promise<Array<{
51
+ success: boolean;
52
+ data?: T;
53
+ error?: Error;
54
+ context: string;
55
+ }>>;
56
+ /**
57
+ * Get retry statistics for monitoring
58
+ */
59
+ static getRetryStats(results: Array<{
60
+ success: boolean;
61
+ context: string;
62
+ }>): {
63
+ total: number;
64
+ successful: number;
65
+ failed: number;
66
+ successRate: number;
67
+ failuresByContext: Record<string, number>;
68
+ };
69
+ /**
70
+ * Create a circuit breaker pattern (simple implementation)
71
+ */
72
+ static createCircuitBreaker<T extends any[], R>(fn: (...args: T) => Promise<R>, context: string, options?: {
73
+ failureThreshold?: number;
74
+ recoveryTimeoutMs?: number;
75
+ retryOptions?: RetryOptions;
76
+ }): (...args: T) => Promise<R>;
77
+ }
78
+ //# sourceMappingURL=RetryManager.d.ts.map
@@ -0,0 +1,205 @@
1
+ /**
2
+ * Retry Manager for Yama
3
+ * Provides intelligent retry logic with exponential backoff for handling transient failures
4
+ */
5
+ import { logger } from "./Logger.js";
6
+ export class RetryManager {
7
+ static DEFAULT_OPTIONS = {
8
+ maxAttempts: 3,
9
+ baseDelayMs: 1000,
10
+ maxDelayMs: 10000,
11
+ backoffMultiplier: 2.0,
12
+ jitterMs: 100,
13
+ retryableErrors: [
14
+ "provider_error",
15
+ "network",
16
+ "timeout",
17
+ "connection",
18
+ "econnreset",
19
+ "etimedout",
20
+ "enotfound",
21
+ "econnrefused",
22
+ "socket hang up",
23
+ "request timeout",
24
+ "service unavailable",
25
+ "bad gateway",
26
+ "gateway timeout",
27
+ "temporary failure",
28
+ "rate limit",
29
+ ],
30
+ };
31
+ /**
32
+ * Execute an operation with retry logic
33
+ */
34
+ static async withRetry(operation, context, options = {}) {
35
+ const opts = { ...RetryManager.DEFAULT_OPTIONS, ...options };
36
+ const startTime = Date.now();
37
+ let lastError;
38
+ for (let attempt = 1; attempt <= opts.maxAttempts; attempt++) {
39
+ try {
40
+ const result = await operation();
41
+ if (attempt > 1) {
42
+ const elapsed = Date.now() - startTime;
43
+ logger.info(`${context} succeeded on attempt ${attempt} after ${elapsed}ms`);
44
+ }
45
+ return result;
46
+ }
47
+ catch (error) {
48
+ lastError = error;
49
+ const isLastAttempt = attempt === opts.maxAttempts;
50
+ const isRetryable = RetryManager.isRetryableError(lastError, opts.retryableErrors);
51
+ const elapsed = Date.now() - startTime;
52
+ const retryContext = {
53
+ operation: context,
54
+ attempt,
55
+ maxAttempts: opts.maxAttempts,
56
+ lastError,
57
+ totalElapsed: elapsed,
58
+ };
59
+ if (isLastAttempt || !isRetryable) {
60
+ if (isLastAttempt) {
61
+ logger.error(`${context} failed after ${opts.maxAttempts} attempts (${elapsed}ms total):`, lastError);
62
+ }
63
+ else {
64
+ logger.error(`${context} failed with non-retryable error:`, lastError);
65
+ }
66
+ throw lastError;
67
+ }
68
+ const delay = RetryManager.calculateDelay(attempt, opts);
69
+ logger.warn(`${context} failed (attempt ${attempt}/${opts.maxAttempts}), retrying in ${delay}ms:`, lastError.message);
70
+ await RetryManager.sleep(delay);
71
+ }
72
+ }
73
+ // This should never be reached, but TypeScript requires it
74
+ throw (lastError ||
75
+ new Error(`${context} failed after ${opts.maxAttempts} attempts`));
76
+ }
77
+ /**
78
+ * Check if an error is retryable based on error patterns
79
+ */
80
+ static isRetryableError(error, retryablePatterns) {
81
+ if (!error) {
82
+ return false;
83
+ }
84
+ const errorMessage = error.message?.toLowerCase() || "";
85
+ const errorCode = error.code?.toLowerCase() || "";
86
+ const errorName = error.name?.toLowerCase() || "";
87
+ // Check if any retryable pattern matches the error
88
+ return retryablePatterns.some((pattern) => {
89
+ const lowerPattern = pattern.toLowerCase();
90
+ return (errorMessage.includes(lowerPattern) ||
91
+ errorCode.includes(lowerPattern) ||
92
+ errorName.includes(lowerPattern));
93
+ });
94
+ }
95
+ /**
96
+ * Calculate delay with exponential backoff and jitter
97
+ */
98
+ static calculateDelay(attempt, options) {
99
+ // Exponential backoff: baseDelay * (multiplier ^ (attempt - 1))
100
+ const exponentialDelay = options.baseDelayMs * Math.pow(options.backoffMultiplier, attempt - 1);
101
+ // Apply maximum delay cap
102
+ const cappedDelay = Math.min(exponentialDelay, options.maxDelayMs);
103
+ // Add jitter to prevent thundering herd
104
+ const jitter = Math.random() * options.jitterMs;
105
+ return Math.floor(cappedDelay + jitter);
106
+ }
107
+ /**
108
+ * Sleep for specified milliseconds
109
+ */
110
+ static sleep(ms) {
111
+ return new Promise((resolve) => setTimeout(resolve, ms));
112
+ }
113
+ /**
114
+ * Create a retry wrapper function for a specific operation
115
+ */
116
+ static createRetryWrapper(fn, context, options = {}) {
117
+ return async (...args) => {
118
+ return RetryManager.withRetry(() => fn(...args), context, options);
119
+ };
120
+ }
121
+ /**
122
+ * Batch retry operations with individual retry logic
123
+ */
124
+ static async batchWithRetry(operations, options = {}) {
125
+ const { continueOnError = true, ...retryOptions } = options;
126
+ const results = [];
127
+ for (const { fn, context } of operations) {
128
+ try {
129
+ const data = await RetryManager.withRetry(fn, context, retryOptions);
130
+ results.push({ success: true, data, context });
131
+ }
132
+ catch (error) {
133
+ const err = error;
134
+ results.push({ success: false, error: err, context });
135
+ if (!continueOnError) {
136
+ throw error;
137
+ }
138
+ }
139
+ }
140
+ return results;
141
+ }
142
+ /**
143
+ * Get retry statistics for monitoring
144
+ */
145
+ static getRetryStats(results) {
146
+ const total = results.length;
147
+ const successful = results.filter((r) => r.success).length;
148
+ const failed = total - successful;
149
+ const successRate = total > 0 ? successful / total : 0;
150
+ const failuresByContext = {};
151
+ results
152
+ .filter((r) => !r.success)
153
+ .forEach((r) => {
154
+ failuresByContext[r.context] = (failuresByContext[r.context] || 0) + 1;
155
+ });
156
+ return {
157
+ total,
158
+ successful,
159
+ failed,
160
+ successRate,
161
+ failuresByContext,
162
+ };
163
+ }
164
+ /**
165
+ * Create a circuit breaker pattern (simple implementation)
166
+ */
167
+ static createCircuitBreaker(fn, context, options = {}) {
168
+ const { failureThreshold = 5, recoveryTimeoutMs = 30000, retryOptions = {}, } = options;
169
+ let failureCount = 0;
170
+ let lastFailureTime = 0;
171
+ let state = "CLOSED";
172
+ return async (...args) => {
173
+ const now = Date.now();
174
+ // Check if we should attempt recovery
175
+ if (state === "OPEN" && now - lastFailureTime > recoveryTimeoutMs) {
176
+ state = "HALF_OPEN";
177
+ logger.debug(`Circuit breaker for ${context} entering HALF_OPEN state`);
178
+ }
179
+ // Reject immediately if circuit is open
180
+ if (state === "OPEN") {
181
+ throw new Error(`Circuit breaker OPEN for ${context} (${failureCount} failures)`);
182
+ }
183
+ try {
184
+ const result = await RetryManager.withRetry(() => fn(...args), context, retryOptions);
185
+ // Success - reset circuit breaker
186
+ if (state === "HALF_OPEN") {
187
+ state = "CLOSED";
188
+ failureCount = 0;
189
+ logger.info(`Circuit breaker for ${context} recovered to CLOSED state`);
190
+ }
191
+ return result;
192
+ }
193
+ catch (error) {
194
+ failureCount++;
195
+ lastFailureTime = now;
196
+ if (failureCount >= failureThreshold) {
197
+ state = "OPEN";
198
+ logger.error(`Circuit breaker OPEN for ${context} after ${failureCount} failures`);
199
+ }
200
+ throw error;
201
+ }
202
+ };
203
+ }
204
+ }
205
+ //# sourceMappingURL=RetryManager.js.map
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@juspay/yama",
3
- "version": "1.5.0",
3
+ "version": "1.6.0",
4
4
  "description": "Enterprise-grade Pull Request automation toolkit with AI-powered code review and description enhancement",
5
5
  "keywords": [
6
6
  "pr",
@@ -109,6 +109,7 @@ features:
109
109
  enabled: true
110
110
  preserveContent: true # Always preserve existing content
111
111
  autoFormat: true
112
+
112
113
  requiredSections:
113
114
  - key: "changelog"
114
115
  name: "Changelog (Modules Modified)"
@@ -120,6 +121,16 @@ features:
120
121
  name: "CAC Config Or Service Config Changes"
121
122
  required: true
122
123
 
124
+ # Customize AI behavior:
125
+ systemPrompt: |
126
+ You are a Senior Staff Engineer writing comprehensive technical documentation.
127
+ Focus on architectural decisions, trade-offs, and long-term maintainability.
128
+
129
+ # Customize formatting:
130
+ enhancementInstructions: |
131
+ Keep descriptions under 300 words. Use bullet points, not paragraphs.
132
+ No emojis. Professional tone only. Include numbers and metrics.
133
+
123
134
  # NEW: Diff Strategy Configuration
124
135
  diffStrategy:
125
136
  enabled: true