@juspay/yama 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,455 @@
1
+ "use strict";
2
+ /**
3
+ * Unified Context Gatherer - The foundation for all Yama operations
4
+ * Gathers all necessary context once and reuses it across all operations
5
+ */
6
+ Object.defineProperty(exports, "__esModule", { value: true });
7
+ exports.ContextGatherer = void 0;
8
+ exports.createContextGatherer = createContextGatherer;
9
+ // NeuroLink will be dynamically imported
10
+ const types_1 = require("../types");
11
+ const Logger_1 = require("../utils/Logger");
12
+ const Cache_1 = require("../utils/Cache");
13
+ class ContextGatherer {
14
+ constructor(bitbucketProvider, aiConfig) {
15
+ this.startTime = 0;
16
+ this.bitbucketProvider = bitbucketProvider;
17
+ this.aiConfig = aiConfig;
18
+ }
19
+ /**
20
+ * Main context gathering method - used by all operations
21
+ */
22
+ async gatherContext(identifier, options = {}) {
23
+ this.startTime = Date.now();
24
+ const contextId = this.generateContextId(identifier);
25
+ const cacheHits = [];
26
+ Logger_1.logger.phase('🔍 Gathering unified context...');
27
+ Logger_1.logger.info(`Target: ${identifier.workspace}/${identifier.repository}`);
28
+ try {
29
+ // Step 1: Find and get PR information
30
+ const pr = await this.findAndGetPR(identifier, cacheHits, options.forceRefresh);
31
+ const completeIdentifier = {
32
+ ...identifier,
33
+ pullRequestId: pr.id
34
+ };
35
+ // Step 2: Gather project context (memory bank + clinerules)
36
+ const projectContext = await this.gatherProjectContext(completeIdentifier, cacheHits, options.forceRefresh);
37
+ // Step 3: Determine diff strategy based on file count and config
38
+ const diffStrategy = this.determineDiffStrategy(pr.fileChanges || [], options.diffStrategyConfig);
39
+ Logger_1.logger.info(`Diff strategy: ${diffStrategy.strategy} (${diffStrategy.reason})`);
40
+ // Step 4: Get diff data based on strategy (if requested)
41
+ let prDiff;
42
+ let fileDiffs;
43
+ if (options.includeDiff !== false) {
44
+ if (diffStrategy.strategy === 'whole') {
45
+ prDiff = await this.getPRDiff(completeIdentifier, options.contextLines || 3, options.excludePatterns || ['*.lock', '*.svg'], cacheHits, options.forceRefresh);
46
+ }
47
+ else {
48
+ fileDiffs = await this.getFileByFileDiffs(completeIdentifier, pr.fileChanges || [], options.contextLines || 3, options.excludePatterns || ['*.lock', '*.svg'], cacheHits, options.forceRefresh);
49
+ }
50
+ }
51
+ const gatheringDuration = Date.now() - this.startTime;
52
+ const context = {
53
+ pr,
54
+ identifier: completeIdentifier,
55
+ projectContext,
56
+ diffStrategy,
57
+ prDiff,
58
+ fileDiffs,
59
+ contextId,
60
+ gatheredAt: new Date().toISOString(),
61
+ cacheHits,
62
+ gatheringDuration
63
+ };
64
+ Logger_1.logger.success(`Context gathered in ${Math.round(gatheringDuration / 1000)}s ` +
65
+ `(${cacheHits.length} cache hits, ${diffStrategy.fileCount} files, ${diffStrategy.estimatedSize})`);
66
+ // Cache the complete context for reuse
67
+ this.cacheContext(context);
68
+ return context;
69
+ }
70
+ catch (error) {
71
+ Logger_1.logger.error(`Context gathering failed: ${error.message}`);
72
+ throw new types_1.ProviderError(`Failed to gather context: ${error.message}`);
73
+ }
74
+ }
75
+ /**
76
+ * Step 1: Find PR and get detailed information
77
+ */
78
+ async findAndGetPR(identifier, cacheHits, forceRefresh = false) {
79
+ Logger_1.logger.debug('Step 1: Finding and getting PR information...');
80
+ // If PR ID is provided, get details directly
81
+ if (identifier.pullRequestId) {
82
+ const cacheKey = Cache_1.Cache.keys.prInfo(identifier.workspace, identifier.repository, identifier.pullRequestId);
83
+ if (!forceRefresh && Cache_1.cache.has(cacheKey)) {
84
+ cacheHits.push('pr-details');
85
+ }
86
+ return Cache_1.cache.getOrSet(cacheKey, async () => {
87
+ Logger_1.logger.debug(`Getting PR details: ${identifier.workspace}/${identifier.repository}#${identifier.pullRequestId}`);
88
+ return await this.bitbucketProvider.getPRDetails(identifier);
89
+ }, 1800 // 30 minutes
90
+ );
91
+ }
92
+ // If branch is provided, find PR first
93
+ if (identifier.branch) {
94
+ const branchCacheKey = Cache_1.Cache.keys.branchInfo(identifier.workspace, identifier.repository, identifier.branch);
95
+ if (!forceRefresh && Cache_1.cache.has(branchCacheKey)) {
96
+ cacheHits.push('branch-pr-lookup');
97
+ }
98
+ const prInfo = await Cache_1.cache.getOrSet(branchCacheKey, async () => {
99
+ Logger_1.logger.debug(`Finding PR for branch: ${identifier.workspace}/${identifier.repository}@${identifier.branch}`);
100
+ return await this.bitbucketProvider.findPRForBranch(identifier);
101
+ }, 3600 // 1 hour
102
+ );
103
+ // Now get full PR details
104
+ const detailsCacheKey = Cache_1.Cache.keys.prInfo(identifier.workspace, identifier.repository, prInfo.id);
105
+ if (!forceRefresh && Cache_1.cache.has(detailsCacheKey)) {
106
+ cacheHits.push('pr-details-from-branch');
107
+ }
108
+ return Cache_1.cache.getOrSet(detailsCacheKey, async () => {
109
+ return await this.bitbucketProvider.getPRDetails({
110
+ ...identifier,
111
+ pullRequestId: prInfo.id
112
+ });
113
+ }, 1800 // 30 minutes
114
+ );
115
+ }
116
+ throw new types_1.ProviderError('Either pullRequestId or branch must be provided');
117
+ }
118
+ /**
119
+ * Step 2: Gather project context (memory bank + clinerules)
120
+ */
121
+ async gatherProjectContext(identifier, cacheHits, forceRefresh = false) {
122
+ Logger_1.logger.debug('Step 2: Gathering project context...');
123
+ const cacheKey = Cache_1.Cache.keys.projectContext(identifier.workspace, identifier.repository, identifier.branch || 'main');
124
+ if (!forceRefresh && Cache_1.cache.has(cacheKey)) {
125
+ cacheHits.push('project-context');
126
+ }
127
+ return Cache_1.cache.getOrSet(cacheKey, async () => {
128
+ try {
129
+ // Get memory-bank directory listing
130
+ const memoryBankFiles = await this.bitbucketProvider.listDirectoryContent(identifier.workspace, identifier.repository, 'memory-bank', identifier.branch || 'main');
131
+ if (!memoryBankFiles.length) {
132
+ Logger_1.logger.debug('No memory-bank directory found');
133
+ return {
134
+ memoryBank: {
135
+ summary: 'No project context available',
136
+ projectContext: 'None',
137
+ patterns: 'None',
138
+ standards: 'None'
139
+ },
140
+ clinerules: '',
141
+ filesProcessed: 0
142
+ };
143
+ }
144
+ // Get content of each memory bank file
145
+ const fileContents = {};
146
+ const files = memoryBankFiles.filter(f => f.type === 'file');
147
+ for (const file of files) {
148
+ try {
149
+ fileContents[file.name] = await this.bitbucketProvider.getFileContent(identifier.workspace, identifier.repository, `memory-bank/${file.name}`, identifier.branch || 'main');
150
+ Logger_1.logger.debug(`✓ Got content for: ${file.name}`);
151
+ }
152
+ catch (error) {
153
+ Logger_1.logger.debug(`Could not read file ${file.name}: ${error.message}`);
154
+ }
155
+ }
156
+ // Get .clinerules file
157
+ let clinerules = '';
158
+ try {
159
+ clinerules = await this.bitbucketProvider.getFileContent(identifier.workspace, identifier.repository, '.clinerules', identifier.branch || 'main');
160
+ Logger_1.logger.debug('✓ Got .clinerules content');
161
+ }
162
+ catch (error) {
163
+ Logger_1.logger.debug(`Could not read .clinerules: ${error.message}`);
164
+ }
165
+ // Parse and summarize with AI
166
+ const contextData = await this.parseProjectContextWithAI(fileContents, clinerules);
167
+ return {
168
+ memoryBank: {
169
+ summary: `Project Context: ${contextData.projectContext}
170
+ Patterns: ${contextData.patterns}
171
+ Standards: ${contextData.standards}`,
172
+ projectContext: contextData.projectContext,
173
+ patterns: contextData.patterns,
174
+ standards: contextData.standards
175
+ },
176
+ clinerules,
177
+ filesProcessed: Object.keys(fileContents).length
178
+ };
179
+ }
180
+ catch (error) {
181
+ Logger_1.logger.debug(`Failed to gather project context: ${error.message}`);
182
+ return {
183
+ memoryBank: {
184
+ summary: 'Context gathering failed',
185
+ projectContext: 'Failed to load',
186
+ patterns: 'Failed to load',
187
+ standards: 'Failed to load'
188
+ },
189
+ clinerules: '',
190
+ filesProcessed: 0
191
+ };
192
+ }
193
+ }, 7200 // 2 hours - project context changes less frequently
194
+ );
195
+ }
196
+ /**
197
+ * Parse project context with AI
198
+ */
199
+ async parseProjectContextWithAI(fileContents, clinerules) {
200
+ const prompt = `Parse and summarize these memory bank files and .clinerules:
201
+
202
+ Memory Bank Files: ${JSON.stringify(fileContents, null, 2)}
203
+
204
+ .clinerules Content: ${clinerules}
205
+
206
+ Extract and summarize the content and return ONLY this JSON format:
207
+ {
208
+ "success": true,
209
+ "projectContext": "Summary of project purpose, architecture, key components...",
210
+ "patterns": "Summary of coding patterns, best practices, conventions...",
211
+ "standards": "Summary of quality standards, review criteria..."
212
+ }`;
213
+ try {
214
+ // Initialize NeuroLink with eval-based dynamic import
215
+ if (!this.neurolink) {
216
+ const dynamicImport = eval('(specifier) => import(specifier)');
217
+ const { NeuroLink } = await dynamicImport('@juspay/neurolink');
218
+ this.neurolink = new NeuroLink();
219
+ }
220
+ // Context for project analysis
221
+ const aiContext = {
222
+ operation: 'project-context-analysis',
223
+ fileCount: Object.keys(fileContents).length,
224
+ hasClinerules: !!clinerules,
225
+ analysisType: 'memory-bank-synthesis'
226
+ };
227
+ const result = await this.neurolink.generate({
228
+ input: { text: prompt },
229
+ systemPrompt: 'You are an Expert Project Analyst. Synthesize project context from documentation and configuration files to help AI understand the codebase architecture, patterns, and business domain.',
230
+ provider: this.aiConfig.provider,
231
+ model: this.aiConfig.model,
232
+ temperature: 0.3,
233
+ maxTokens: Math.max(this.aiConfig.maxTokens || 0, 500000), // Quality first - always use higher limit
234
+ timeout: '10m', // Allow longer processing for quality
235
+ context: aiContext,
236
+ enableAnalytics: this.aiConfig.enableAnalytics || true,
237
+ enableEvaluation: false // Not needed for context synthesis
238
+ });
239
+ // Log context analysis
240
+ if (result.analytics) {
241
+ Logger_1.logger.debug(`Context Analysis - Files: ${Object.keys(fileContents).length}, Provider: ${result.provider}`);
242
+ }
243
+ // Modern NeuroLink returns { content: string }
244
+ const response = this.parseAIResponse(result);
245
+ if (response.success) {
246
+ return {
247
+ projectContext: response.projectContext || 'None',
248
+ patterns: response.patterns || 'None',
249
+ standards: response.standards || 'None'
250
+ };
251
+ }
252
+ throw new Error('AI parsing failed');
253
+ }
254
+ catch (error) {
255
+ Logger_1.logger.warn(`AI context parsing failed, using fallback: ${error.message}`);
256
+ return {
257
+ projectContext: 'AI parsing unavailable',
258
+ patterns: 'Standard patterns assumed',
259
+ standards: 'Standard quality requirements'
260
+ };
261
+ }
262
+ }
263
+ /**
264
+ * Step 3: Determine optimal diff strategy
265
+ */
266
+ determineDiffStrategy(fileChanges, config) {
267
+ const fileCount = fileChanges.length;
268
+ // Get threshold values from config or use defaults
269
+ const wholeDiffMaxFiles = config?.thresholds?.wholeDiffMaxFiles ?? 2;
270
+ // Note: fileByFileMinFiles is currently same as wholeDiffMaxFiles + 1
271
+ // but kept separate for future flexibility
272
+ // Check if force strategy is configured
273
+ if (config?.forceStrategy && config.forceStrategy !== 'auto') {
274
+ return {
275
+ strategy: config.forceStrategy,
276
+ reason: `Forced by configuration`,
277
+ fileCount,
278
+ estimatedSize: this.estimateDiffSize(fileCount)
279
+ };
280
+ }
281
+ // Determine strategy based on thresholds
282
+ let strategy = 'whole';
283
+ let reason = '';
284
+ if (fileCount === 0) {
285
+ strategy = 'whole';
286
+ reason = 'No files to analyze';
287
+ }
288
+ else if (fileCount <= wholeDiffMaxFiles) {
289
+ strategy = 'whole';
290
+ reason = `${fileCount} file(s) ≤ ${wholeDiffMaxFiles} (threshold), using whole diff`;
291
+ }
292
+ else {
293
+ strategy = 'file-by-file';
294
+ reason = `${fileCount} file(s) > ${wholeDiffMaxFiles} (threshold), using file-by-file`;
295
+ }
296
+ return {
297
+ strategy,
298
+ reason,
299
+ fileCount,
300
+ estimatedSize: this.estimateDiffSize(fileCount)
301
+ };
302
+ }
303
+ /**
304
+ * Estimate diff size based on file count
305
+ */
306
+ estimateDiffSize(fileCount) {
307
+ if (fileCount === 0)
308
+ return '0 KB';
309
+ if (fileCount <= 2)
310
+ return 'Small (~5-20 KB)';
311
+ if (fileCount <= 5)
312
+ return 'Small (~10-50 KB)';
313
+ if (fileCount <= 20)
314
+ return 'Medium (~50-200 KB)';
315
+ if (fileCount <= 50)
316
+ return 'Large (~200-500 KB)';
317
+ return 'Very Large (>500 KB)';
318
+ }
319
+ /**
320
+ * Get whole PR diff
321
+ */
322
+ async getPRDiff(identifier, contextLines, excludePatterns, cacheHits, forceRefresh = false) {
323
+ Logger_1.logger.debug('Getting whole PR diff...');
324
+ const cacheKey = Cache_1.Cache.keys.prDiff(identifier.workspace, identifier.repository, identifier.pullRequestId);
325
+ if (!forceRefresh && Cache_1.cache.has(cacheKey)) {
326
+ cacheHits.push('pr-diff');
327
+ }
328
+ return Cache_1.cache.getOrSet(cacheKey, async () => {
329
+ return await this.bitbucketProvider.getPRDiff(identifier, contextLines, excludePatterns);
330
+ }, 1800 // 30 minutes
331
+ );
332
+ }
333
+ /**
334
+ * Get file-by-file diffs for large changesets
335
+ */
336
+ async getFileByFileDiffs(identifier, fileChanges, contextLines, excludePatterns, cacheHits, forceRefresh = false) {
337
+ Logger_1.logger.debug(`Getting file-by-file diffs for ${fileChanges.length} files...`);
338
+ const fileDiffs = new Map();
339
+ // Filter out excluded files
340
+ const filteredFiles = fileChanges.filter(file => !excludePatterns.some(pattern => new RegExp(pattern.replace(/\*/g, '.*')).test(file)));
341
+ Logger_1.logger.debug(`Processing ${filteredFiles.length} files after exclusions`);
342
+ // Process files in batches for better performance
343
+ const batchSize = 5;
344
+ for (let i = 0; i < filteredFiles.length; i += batchSize) {
345
+ const batch = filteredFiles.slice(i, i + batchSize);
346
+ const batchPromises = batch.map(async (file) => {
347
+ const fileCacheKey = `file-diff:${identifier.workspace}:${identifier.repository}:${identifier.pullRequestId}:${file}`;
348
+ if (!forceRefresh && Cache_1.cache.has(fileCacheKey)) {
349
+ cacheHits.push(`file-diff-${file}`);
350
+ }
351
+ return Cache_1.cache.getOrSet(fileCacheKey, async () => {
352
+ // Use include_patterns to get diff for just this file
353
+ const fileDiff = await this.bitbucketProvider.getPRDiff(identifier, contextLines, excludePatterns, [file] // Include patterns with single file
354
+ );
355
+ return fileDiff.diff;
356
+ }, 1800 // 30 minutes
357
+ );
358
+ });
359
+ const batchResults = await Promise.all(batchPromises);
360
+ batch.forEach((file, index) => {
361
+ fileDiffs.set(file, batchResults[index]);
362
+ });
363
+ // Small delay between batches to avoid overwhelming the API
364
+ if (i + batchSize < filteredFiles.length) {
365
+ await new Promise(resolve => setTimeout(resolve, 500));
366
+ }
367
+ }
368
+ Logger_1.logger.debug(`✓ Got diffs for ${fileDiffs.size} files`);
369
+ return fileDiffs;
370
+ }
371
+ /**
372
+ * Cache the complete context for reuse
373
+ */
374
+ cacheContext(context) {
375
+ const contextCacheKey = `context:${context.contextId}`;
376
+ Cache_1.cache.set(contextCacheKey, context, 1800); // 30 minutes
377
+ // Tag it for easy invalidation
378
+ Cache_1.cache.setWithTags(contextCacheKey, context, [
379
+ `workspace:${context.identifier.workspace}`,
380
+ `repository:${context.identifier.repository}`,
381
+ `pr:${context.identifier.pullRequestId}`
382
+ ], 1800);
383
+ }
384
+ /**
385
+ * Get cached context if available
386
+ */
387
+ async getCachedContext(identifier) {
388
+ const contextId = this.generateContextId(identifier);
389
+ const contextCacheKey = `context:${contextId}`;
390
+ const cached = Cache_1.cache.get(contextCacheKey);
391
+ if (cached) {
392
+ Logger_1.logger.debug(`✓ Using cached context: ${contextId}`);
393
+ return cached;
394
+ }
395
+ return null;
396
+ }
397
+ /**
398
+ * Invalidate context cache for a specific PR
399
+ */
400
+ invalidateContext(identifier) {
401
+ Cache_1.cache.invalidateTag(`pr:${identifier.pullRequestId}`);
402
+ Cache_1.cache.invalidateTag(`workspace:${identifier.workspace}`);
403
+ Logger_1.logger.debug(`Context cache invalidated for PR ${identifier.pullRequestId}`);
404
+ }
405
+ /**
406
+ * Generate unique context ID
407
+ */
408
+ generateContextId(identifier) {
409
+ const parts = [
410
+ identifier.workspace,
411
+ identifier.repository,
412
+ identifier.pullRequestId || identifier.branch || 'unknown'
413
+ ];
414
+ return Buffer.from(parts.join(':'))
415
+ .toString('base64')
416
+ .replace(/[+/=]/g, '')
417
+ .substring(0, 16);
418
+ }
419
+ /**
420
+ * Parse AI response utility
421
+ */
422
+ parseAIResponse(result) {
423
+ try {
424
+ const responseText = result.content || result.text || result.response || '';
425
+ if (!responseText) {
426
+ return { success: false, error: 'Empty response' };
427
+ }
428
+ // Find JSON in response
429
+ const jsonMatch = responseText.match(/\{[\s\S]*\}/);
430
+ if (jsonMatch) {
431
+ return JSON.parse(jsonMatch[0]);
432
+ }
433
+ return { success: false, error: 'No JSON found' };
434
+ }
435
+ catch (error) {
436
+ return { success: false, error: error.message };
437
+ }
438
+ }
439
+ /**
440
+ * Get gathering statistics
441
+ */
442
+ getStats() {
443
+ return {
444
+ lastGatheringDuration: this.startTime ? Date.now() - this.startTime : 0,
445
+ cacheStats: Cache_1.cache.stats(),
446
+ cacheHitRatio: Cache_1.cache.getHitRatio()
447
+ };
448
+ }
449
+ }
450
+ exports.ContextGatherer = ContextGatherer;
451
+ // Export factory function
452
+ function createContextGatherer(bitbucketProvider, aiConfig) {
453
+ return new ContextGatherer(bitbucketProvider, aiConfig);
454
+ }
455
+ //# sourceMappingURL=ContextGatherer.js.map
@@ -0,0 +1,80 @@
1
+ /**
2
+ * Yama - Unified orchestrator class
3
+ * The main class that coordinates all operations using shared context
4
+ */
5
+ import { GuardianConfig, OperationOptions, ProcessResult, StreamUpdate, StreamOptions, ReviewOptions, EnhancementOptions } from '../types';
6
+ export declare class Guardian {
7
+ private config;
8
+ private bitbucketProvider;
9
+ private contextGatherer;
10
+ private codeReviewer;
11
+ private descriptionEnhancer;
12
+ private neurolink;
13
+ private initialized;
14
+ constructor(config?: Partial<GuardianConfig>);
15
+ /**
16
+ * Initialize Guardian with configuration
17
+ */
18
+ initialize(configPath?: string): Promise<void>;
19
+ /**
20
+ * Main method: Process PR with multiple operations using unified context
21
+ */
22
+ processPR(options: OperationOptions): Promise<ProcessResult>;
23
+ /**
24
+ * Streaming version of processPR for real-time updates
25
+ */
26
+ processPRStream(options: OperationOptions, _streamOptions?: StreamOptions): AsyncIterableIterator<StreamUpdate>;
27
+ /**
28
+ * Gather unified context (cached and reusable)
29
+ */
30
+ private gatherUnifiedContext;
31
+ /**
32
+ * Execute individual operation using shared context
33
+ */
34
+ private executeOperation;
35
+ /**
36
+ * Execute code review using shared context
37
+ */
38
+ private executeCodeReview;
39
+ /**
40
+ * Execute description enhancement using shared context
41
+ */
42
+ private executeDescriptionEnhancement;
43
+ /**
44
+ * Individual operation methods for backwards compatibility
45
+ */
46
+ /**
47
+ * Code review operation (standalone)
48
+ */
49
+ reviewCode(options: ReviewOptions): Promise<any>;
50
+ /**
51
+ * Description enhancement operation (standalone)
52
+ */
53
+ enhanceDescription(options: EnhancementOptions): Promise<any>;
54
+ /**
55
+ * Health check for all components
56
+ */
57
+ healthCheck(): Promise<{
58
+ healthy: boolean;
59
+ components: any;
60
+ }>;
61
+ /**
62
+ * Get comprehensive statistics
63
+ */
64
+ getStats(): any;
65
+ /**
66
+ * Clear all caches
67
+ */
68
+ clearCache(): void;
69
+ /**
70
+ * Ensure Guardian is initialized
71
+ */
72
+ private ensureInitialized;
73
+ /**
74
+ * Shutdown Guardian gracefully
75
+ */
76
+ shutdown(): Promise<void>;
77
+ }
78
+ export declare function createGuardian(config?: Partial<GuardianConfig>): Guardian;
79
+ export declare const guardian: Guardian;
80
+ //# sourceMappingURL=Guardian.d.ts.map