@juspay/yama 1.1.0 → 1.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,8 +2,8 @@
2
2
  * Unified Context Gatherer - The foundation for all Yama operations
3
3
  * Gathers all necessary context once and reuses it across all operations
4
4
  */
5
- import { PRIdentifier, PRInfo, PRDiff, AIProviderConfig, DiffStrategyConfig } from '../types';
6
- import { BitbucketProvider } from './providers/BitbucketProvider';
5
+ import { PRIdentifier, PRInfo, PRDiff, AIProviderConfig, DiffStrategyConfig } from "../types/index.js";
6
+ import { BitbucketProvider } from "./providers/BitbucketProvider.js";
7
7
  export interface ProjectContext {
8
8
  memoryBank: {
9
9
  summary: string;
@@ -15,7 +15,7 @@ export interface ProjectContext {
15
15
  filesProcessed: number;
16
16
  }
17
17
  export interface DiffStrategy {
18
- strategy: 'whole' | 'file-by-file';
18
+ strategy: "whole" | "file-by-file";
19
19
  reason: string;
20
20
  fileCount: number;
21
21
  estimatedSize: string;
@@ -1,18 +1,17 @@
1
- "use strict";
2
1
  /**
3
2
  * Unified Context Gatherer - The foundation for all Yama operations
4
3
  * Gathers all necessary context once and reuses it across all operations
5
4
  */
6
- Object.defineProperty(exports, "__esModule", { value: true });
7
- exports.ContextGatherer = void 0;
8
- exports.createContextGatherer = createContextGatherer;
9
5
  // NeuroLink will be dynamically imported
10
- const types_1 = require("../types");
11
- const Logger_1 = require("../utils/Logger");
12
- const Cache_1 = require("../utils/Cache");
13
- class ContextGatherer {
6
+ import { ProviderError, } from "../types/index.js";
7
+ import { logger } from "../utils/Logger.js";
8
+ import { cache, Cache } from "../utils/Cache.js";
9
+ export class ContextGatherer {
10
+ neurolink;
11
+ bitbucketProvider;
12
+ aiConfig;
13
+ startTime = 0;
14
14
  constructor(bitbucketProvider, aiConfig) {
15
- this.startTime = 0;
16
15
  this.bitbucketProvider = bitbucketProvider;
17
16
  this.aiConfig = aiConfig;
18
17
  }
@@ -23,29 +22,29 @@ class ContextGatherer {
23
22
  this.startTime = Date.now();
24
23
  const contextId = this.generateContextId(identifier);
25
24
  const cacheHits = [];
26
- Logger_1.logger.phase('🔍 Gathering unified context...');
27
- Logger_1.logger.info(`Target: ${identifier.workspace}/${identifier.repository}`);
25
+ logger.phase("🔍 Gathering unified context...");
26
+ logger.info(`Target: ${identifier.workspace}/${identifier.repository}`);
28
27
  try {
29
28
  // Step 1: Find and get PR information
30
29
  const pr = await this.findAndGetPR(identifier, cacheHits, options.forceRefresh);
31
30
  const completeIdentifier = {
32
31
  ...identifier,
33
- pullRequestId: pr.id
32
+ pullRequestId: pr.id,
34
33
  };
35
34
  // Step 2: Gather project context (memory bank + clinerules)
36
35
  const projectContext = await this.gatherProjectContext(completeIdentifier, cacheHits, options.forceRefresh);
37
36
  // Step 3: Determine diff strategy based on file count and config
38
37
  const diffStrategy = this.determineDiffStrategy(pr.fileChanges || [], options.diffStrategyConfig);
39
- Logger_1.logger.info(`Diff strategy: ${diffStrategy.strategy} (${diffStrategy.reason})`);
38
+ logger.info(`Diff strategy: ${diffStrategy.strategy} (${diffStrategy.reason})`);
40
39
  // Step 4: Get diff data based on strategy (if requested)
41
40
  let prDiff;
42
41
  let fileDiffs;
43
42
  if (options.includeDiff !== false) {
44
- if (diffStrategy.strategy === 'whole') {
45
- prDiff = await this.getPRDiff(completeIdentifier, options.contextLines || 3, options.excludePatterns || ['*.lock', '*.svg'], cacheHits, options.forceRefresh);
43
+ if (diffStrategy.strategy === "whole") {
44
+ prDiff = await this.getPRDiff(completeIdentifier, options.contextLines || 3, options.excludePatterns || ["*.lock", "*.svg"], cacheHits, options.forceRefresh);
46
45
  }
47
46
  else {
48
- fileDiffs = await this.getFileByFileDiffs(completeIdentifier, pr.fileChanges || [], options.contextLines || 3, options.excludePatterns || ['*.lock', '*.svg'], cacheHits, options.forceRefresh);
47
+ fileDiffs = await this.getFileByFileDiffs(completeIdentifier, pr.fileChanges || [], options.contextLines || 3, options.excludePatterns || ["*.lock", "*.svg"], cacheHits, options.forceRefresh);
49
48
  }
50
49
  }
51
50
  const gatheringDuration = Date.now() - this.startTime;
@@ -59,108 +58,106 @@ class ContextGatherer {
59
58
  contextId,
60
59
  gatheredAt: new Date().toISOString(),
61
60
  cacheHits,
62
- gatheringDuration
61
+ gatheringDuration,
63
62
  };
64
- Logger_1.logger.success(`Context gathered in ${Math.round(gatheringDuration / 1000)}s ` +
63
+ logger.success(`Context gathered in ${Math.round(gatheringDuration / 1000)}s ` +
65
64
  `(${cacheHits.length} cache hits, ${diffStrategy.fileCount} files, ${diffStrategy.estimatedSize})`);
66
65
  // Cache the complete context for reuse
67
66
  this.cacheContext(context);
68
67
  return context;
69
68
  }
70
69
  catch (error) {
71
- Logger_1.logger.error(`Context gathering failed: ${error.message}`);
72
- throw new types_1.ProviderError(`Failed to gather context: ${error.message}`);
70
+ logger.error(`Context gathering failed: ${error.message}`);
71
+ throw new ProviderError(`Failed to gather context: ${error.message}`);
73
72
  }
74
73
  }
75
74
  /**
76
75
  * Step 1: Find PR and get detailed information
77
76
  */
78
77
  async findAndGetPR(identifier, cacheHits, forceRefresh = false) {
79
- Logger_1.logger.debug('Step 1: Finding and getting PR information...');
78
+ logger.debug("Step 1: Finding and getting PR information...");
80
79
  // If PR ID is provided, get details directly
81
80
  if (identifier.pullRequestId) {
82
- const cacheKey = Cache_1.Cache.keys.prInfo(identifier.workspace, identifier.repository, identifier.pullRequestId);
83
- if (!forceRefresh && Cache_1.cache.has(cacheKey)) {
84
- cacheHits.push('pr-details');
81
+ const cacheKey = Cache.keys.prInfo(identifier.workspace, identifier.repository, identifier.pullRequestId);
82
+ if (!forceRefresh && cache.has(cacheKey)) {
83
+ cacheHits.push("pr-details");
85
84
  }
86
- return Cache_1.cache.getOrSet(cacheKey, async () => {
87
- Logger_1.logger.debug(`Getting PR details: ${identifier.workspace}/${identifier.repository}#${identifier.pullRequestId}`);
85
+ return cache.getOrSet(cacheKey, async () => {
86
+ logger.debug(`Getting PR details: ${identifier.workspace}/${identifier.repository}#${identifier.pullRequestId}`);
88
87
  return await this.bitbucketProvider.getPRDetails(identifier);
89
- }, 1800 // 30 minutes
90
- );
88
+ }, 1800);
91
89
  }
92
90
  // If branch is provided, find PR first
93
91
  if (identifier.branch) {
94
- const branchCacheKey = Cache_1.Cache.keys.branchInfo(identifier.workspace, identifier.repository, identifier.branch);
95
- if (!forceRefresh && Cache_1.cache.has(branchCacheKey)) {
96
- cacheHits.push('branch-pr-lookup');
92
+ const branchCacheKey = Cache.keys.branchInfo(identifier.workspace, identifier.repository, identifier.branch);
93
+ if (!forceRefresh && cache.has(branchCacheKey)) {
94
+ cacheHits.push("branch-pr-lookup");
97
95
  }
98
- const prInfo = await Cache_1.cache.getOrSet(branchCacheKey, async () => {
99
- Logger_1.logger.debug(`Finding PR for branch: ${identifier.workspace}/${identifier.repository}@${identifier.branch}`);
96
+ const prInfo = await cache.getOrSet(branchCacheKey, async () => {
97
+ logger.debug(`Finding PR for branch: ${identifier.workspace}/${identifier.repository}@${identifier.branch}`);
100
98
  return await this.bitbucketProvider.findPRForBranch(identifier);
101
- }, 3600 // 1 hour
102
- );
99
+ }, 3600);
103
100
  // Now get full PR details
104
- const detailsCacheKey = Cache_1.Cache.keys.prInfo(identifier.workspace, identifier.repository, prInfo.id);
105
- if (!forceRefresh && Cache_1.cache.has(detailsCacheKey)) {
106
- cacheHits.push('pr-details-from-branch');
101
+ const detailsCacheKey = Cache.keys.prInfo(identifier.workspace, identifier.repository, prInfo.id);
102
+ if (!forceRefresh && cache.has(detailsCacheKey)) {
103
+ cacheHits.push("pr-details-from-branch");
107
104
  }
108
- return Cache_1.cache.getOrSet(detailsCacheKey, async () => {
105
+ return cache.getOrSet(detailsCacheKey, async () => {
109
106
  return await this.bitbucketProvider.getPRDetails({
110
107
  ...identifier,
111
- pullRequestId: prInfo.id
108
+ pullRequestId: prInfo.id,
112
109
  });
113
- }, 1800 // 30 minutes
114
- );
110
+ }, 1800);
115
111
  }
116
- throw new types_1.ProviderError('Either pullRequestId or branch must be provided');
112
+ throw new ProviderError("Either pullRequestId or branch must be provided");
117
113
  }
118
114
  /**
119
115
  * Step 2: Gather project context (memory bank + clinerules)
120
116
  */
121
117
  async gatherProjectContext(identifier, cacheHits, forceRefresh = false) {
122
- Logger_1.logger.debug('Step 2: Gathering project context...');
123
- const cacheKey = Cache_1.Cache.keys.projectContext(identifier.workspace, identifier.repository, identifier.branch || 'main');
124
- if (!forceRefresh && Cache_1.cache.has(cacheKey)) {
125
- cacheHits.push('project-context');
118
+ logger.debug("Step 2: Gathering project context...");
119
+ const cacheKey = Cache.keys.projectContext(identifier.workspace, identifier.repository, identifier.branch || "main");
120
+ if (!forceRefresh && cache.has(cacheKey)) {
121
+ cacheHits.push("project-context");
126
122
  }
127
- return Cache_1.cache.getOrSet(cacheKey, async () => {
123
+ return cache.getOrSet(cacheKey, async () => {
128
124
  try {
129
125
  // Get memory-bank directory listing
130
- const memoryBankFiles = await this.bitbucketProvider.listDirectoryContent(identifier.workspace, identifier.repository, 'memory-bank', identifier.branch || 'main');
126
+ const memoryBankFiles = await this.bitbucketProvider.listDirectoryContent(identifier.workspace, identifier.repository, "memory-bank", identifier.branch || "main");
131
127
  if (!memoryBankFiles.length) {
132
- Logger_1.logger.debug('No memory-bank directory found');
128
+ logger.debug("No memory-bank directory found");
133
129
  return {
134
130
  memoryBank: {
135
- summary: 'No project context available',
136
- projectContext: 'None',
137
- patterns: 'None',
138
- standards: 'None'
131
+ summary: "No project context available",
132
+ projectContext: "None",
133
+ patterns: "None",
134
+ standards: "None",
139
135
  },
140
- clinerules: '',
141
- filesProcessed: 0
136
+ clinerules: "",
137
+ filesProcessed: 0,
142
138
  };
143
139
  }
144
140
  // Get content of each memory bank file
145
141
  const fileContents = {};
146
- const files = memoryBankFiles.filter(f => f.type === 'file');
142
+ const files = memoryBankFiles.filter((f) => f.type === "file");
147
143
  for (const file of files) {
148
144
  try {
149
- fileContents[file.name] = await this.bitbucketProvider.getFileContent(identifier.workspace, identifier.repository, `memory-bank/${file.name}`, identifier.branch || 'main');
150
- Logger_1.logger.debug(`✓ Got content for: ${file.name}`);
145
+ fileContents[file.name] =
146
+ await this.bitbucketProvider.getFileContent(identifier.workspace, identifier.repository, `memory-bank/${file.name}`, identifier.branch || "main");
147
+ logger.debug(`✓ Got content for: ${file.name}`);
151
148
  }
152
149
  catch (error) {
153
- Logger_1.logger.debug(`Could not read file ${file.name}: ${error.message}`);
150
+ logger.debug(`Could not read file ${file.name}: ${error.message}`);
154
151
  }
155
152
  }
156
153
  // Get .clinerules file
157
- let clinerules = '';
154
+ let clinerules = "";
158
155
  try {
159
- clinerules = await this.bitbucketProvider.getFileContent(identifier.workspace, identifier.repository, '.clinerules', identifier.branch || 'main');
160
- Logger_1.logger.debug('✓ Got .clinerules content');
156
+ clinerules = await this.bitbucketProvider.getFileContent(identifier.workspace, identifier.repository, ".clinerules", identifier.branch || "main");
157
+ logger.debug("✓ Got .clinerules content");
161
158
  }
162
159
  catch (error) {
163
- Logger_1.logger.debug(`Could not read .clinerules: ${error.message}`);
160
+ logger.debug(`Could not read .clinerules: ${error.message}`);
164
161
  }
165
162
  // Parse and summarize with AI
166
163
  const contextData = await this.parseProjectContextWithAI(fileContents, clinerules);
@@ -171,27 +168,26 @@ Patterns: ${contextData.patterns}
171
168
  Standards: ${contextData.standards}`,
172
169
  projectContext: contextData.projectContext,
173
170
  patterns: contextData.patterns,
174
- standards: contextData.standards
171
+ standards: contextData.standards,
175
172
  },
176
173
  clinerules,
177
- filesProcessed: Object.keys(fileContents).length
174
+ filesProcessed: Object.keys(fileContents).length,
178
175
  };
179
176
  }
180
177
  catch (error) {
181
- Logger_1.logger.debug(`Failed to gather project context: ${error.message}`);
178
+ logger.debug(`Failed to gather project context: ${error.message}`);
182
179
  return {
183
180
  memoryBank: {
184
- summary: 'Context gathering failed',
185
- projectContext: 'Failed to load',
186
- patterns: 'Failed to load',
187
- standards: 'Failed to load'
181
+ summary: "Context gathering failed",
182
+ projectContext: "Failed to load",
183
+ patterns: "Failed to load",
184
+ standards: "Failed to load",
188
185
  },
189
- clinerules: '',
190
- filesProcessed: 0
186
+ clinerules: "",
187
+ filesProcessed: 0,
191
188
  };
192
189
  }
193
- }, 7200 // 2 hours - project context changes less frequently
194
- );
190
+ }, 7200);
195
191
  }
196
192
  /**
197
193
  * Parse project context with AI
@@ -213,50 +209,49 @@ Extract and summarize the content and return ONLY this JSON format:
213
209
  try {
214
210
  // Initialize NeuroLink with eval-based dynamic import
215
211
  if (!this.neurolink) {
216
- const dynamicImport = eval('(specifier) => import(specifier)');
217
- const { NeuroLink } = await dynamicImport('@juspay/neurolink');
212
+ const { NeuroLink } = await import("@juspay/neurolink");
218
213
  this.neurolink = new NeuroLink();
219
214
  }
220
215
  // Context for project analysis
221
216
  const aiContext = {
222
- operation: 'project-context-analysis',
217
+ operation: "project-context-analysis",
223
218
  fileCount: Object.keys(fileContents).length,
224
219
  hasClinerules: !!clinerules,
225
- analysisType: 'memory-bank-synthesis'
220
+ analysisType: "memory-bank-synthesis",
226
221
  };
227
222
  const result = await this.neurolink.generate({
228
223
  input: { text: prompt },
229
- systemPrompt: 'You are an Expert Project Analyst. Synthesize project context from documentation and configuration files to help AI understand the codebase architecture, patterns, and business domain.',
224
+ systemPrompt: "You are an Expert Project Analyst. Synthesize project context from documentation and configuration files to help AI understand the codebase architecture, patterns, and business domain.",
230
225
  provider: this.aiConfig.provider,
231
226
  model: this.aiConfig.model,
232
227
  temperature: 0.3,
233
228
  maxTokens: Math.max(this.aiConfig.maxTokens || 0, 500000), // Quality first - always use higher limit
234
- timeout: '10m', // Allow longer processing for quality
229
+ timeout: "10m", // Allow longer processing for quality
235
230
  context: aiContext,
236
231
  enableAnalytics: this.aiConfig.enableAnalytics || true,
237
- enableEvaluation: false // Not needed for context synthesis
232
+ enableEvaluation: false, // Not needed for context synthesis
238
233
  });
239
234
  // Log context analysis
240
235
  if (result.analytics) {
241
- Logger_1.logger.debug(`Context Analysis - Files: ${Object.keys(fileContents).length}, Provider: ${result.provider}`);
236
+ logger.debug(`Context Analysis - Files: ${Object.keys(fileContents).length}, Provider: ${result.provider}`);
242
237
  }
243
238
  // Modern NeuroLink returns { content: string }
244
239
  const response = this.parseAIResponse(result);
245
240
  if (response.success) {
246
241
  return {
247
- projectContext: response.projectContext || 'None',
248
- patterns: response.patterns || 'None',
249
- standards: response.standards || 'None'
242
+ projectContext: response.projectContext || "None",
243
+ patterns: response.patterns || "None",
244
+ standards: response.standards || "None",
250
245
  };
251
246
  }
252
- throw new Error('AI parsing failed');
247
+ throw new Error("AI parsing failed");
253
248
  }
254
249
  catch (error) {
255
- Logger_1.logger.warn(`AI context parsing failed, using fallback: ${error.message}`);
250
+ logger.warn(`AI context parsing failed, using fallback: ${error.message}`);
256
251
  return {
257
- projectContext: 'AI parsing unavailable',
258
- patterns: 'Standard patterns assumed',
259
- standards: 'Standard quality requirements'
252
+ projectContext: "AI parsing unavailable",
253
+ patterns: "Standard patterns assumed",
254
+ standards: "Standard quality requirements",
260
255
  };
261
256
  }
262
257
  }
@@ -267,94 +262,96 @@ Extract and summarize the content and return ONLY this JSON format:
267
262
  const fileCount = fileChanges.length;
268
263
  // Get threshold values from config or use defaults
269
264
  const wholeDiffMaxFiles = config?.thresholds?.wholeDiffMaxFiles ?? 2;
270
- // Note: fileByFileMinFiles is currently same as wholeDiffMaxFiles + 1
265
+ // Note: fileByFileMinFiles is currently same as wholeDiffMaxFiles + 1
271
266
  // but kept separate for future flexibility
272
267
  // Check if force strategy is configured
273
- if (config?.forceStrategy && config.forceStrategy !== 'auto') {
268
+ if (config?.forceStrategy && config.forceStrategy !== "auto") {
274
269
  return {
275
270
  strategy: config.forceStrategy,
276
271
  reason: `Forced by configuration`,
277
272
  fileCount,
278
- estimatedSize: this.estimateDiffSize(fileCount)
273
+ estimatedSize: this.estimateDiffSize(fileCount),
279
274
  };
280
275
  }
281
276
  // Determine strategy based on thresholds
282
- let strategy = 'whole';
283
- let reason = '';
277
+ let strategy = "whole";
278
+ let reason = "";
284
279
  if (fileCount === 0) {
285
- strategy = 'whole';
286
- reason = 'No files to analyze';
280
+ strategy = "whole";
281
+ reason = "No files to analyze";
287
282
  }
288
283
  else if (fileCount <= wholeDiffMaxFiles) {
289
- strategy = 'whole';
284
+ strategy = "whole";
290
285
  reason = `${fileCount} file(s) ≤ ${wholeDiffMaxFiles} (threshold), using whole diff`;
291
286
  }
292
287
  else {
293
- strategy = 'file-by-file';
288
+ strategy = "file-by-file";
294
289
  reason = `${fileCount} file(s) > ${wholeDiffMaxFiles} (threshold), using file-by-file`;
295
290
  }
296
291
  return {
297
292
  strategy,
298
293
  reason,
299
294
  fileCount,
300
- estimatedSize: this.estimateDiffSize(fileCount)
295
+ estimatedSize: this.estimateDiffSize(fileCount),
301
296
  };
302
297
  }
303
298
  /**
304
299
  * Estimate diff size based on file count
305
300
  */
306
301
  estimateDiffSize(fileCount) {
307
- if (fileCount === 0)
308
- return '0 KB';
309
- if (fileCount <= 2)
310
- return 'Small (~5-20 KB)';
311
- if (fileCount <= 5)
312
- return 'Small (~10-50 KB)';
313
- if (fileCount <= 20)
314
- return 'Medium (~50-200 KB)';
315
- if (fileCount <= 50)
316
- return 'Large (~200-500 KB)';
317
- return 'Very Large (>500 KB)';
302
+ if (fileCount === 0) {
303
+ return "0 KB";
304
+ }
305
+ if (fileCount <= 2) {
306
+ return "Small (~5-20 KB)";
307
+ }
308
+ if (fileCount <= 5) {
309
+ return "Small (~10-50 KB)";
310
+ }
311
+ if (fileCount <= 20) {
312
+ return "Medium (~50-200 KB)";
313
+ }
314
+ if (fileCount <= 50) {
315
+ return "Large (~200-500 KB)";
316
+ }
317
+ return "Very Large (>500 KB)";
318
318
  }
319
319
  /**
320
320
  * Get whole PR diff
321
321
  */
322
322
  async getPRDiff(identifier, contextLines, excludePatterns, cacheHits, forceRefresh = false) {
323
- Logger_1.logger.debug('Getting whole PR diff...');
324
- const cacheKey = Cache_1.Cache.keys.prDiff(identifier.workspace, identifier.repository, identifier.pullRequestId);
325
- if (!forceRefresh && Cache_1.cache.has(cacheKey)) {
326
- cacheHits.push('pr-diff');
323
+ logger.debug("Getting whole PR diff...");
324
+ const cacheKey = Cache.keys.prDiff(identifier.workspace, identifier.repository, identifier.pullRequestId);
325
+ if (!forceRefresh && cache.has(cacheKey)) {
326
+ cacheHits.push("pr-diff");
327
327
  }
328
- return Cache_1.cache.getOrSet(cacheKey, async () => {
328
+ return cache.getOrSet(cacheKey, async () => {
329
329
  return await this.bitbucketProvider.getPRDiff(identifier, contextLines, excludePatterns);
330
- }, 1800 // 30 minutes
331
- );
330
+ }, 1800);
332
331
  }
333
332
  /**
334
333
  * Get file-by-file diffs for large changesets
335
334
  */
336
335
  async getFileByFileDiffs(identifier, fileChanges, contextLines, excludePatterns, cacheHits, forceRefresh = false) {
337
- Logger_1.logger.debug(`Getting file-by-file diffs for ${fileChanges.length} files...`);
336
+ logger.debug(`Getting file-by-file diffs for ${fileChanges.length} files...`);
338
337
  const fileDiffs = new Map();
339
338
  // Filter out excluded files
340
- const filteredFiles = fileChanges.filter(file => !excludePatterns.some(pattern => new RegExp(pattern.replace(/\*/g, '.*')).test(file)));
341
- Logger_1.logger.debug(`Processing ${filteredFiles.length} files after exclusions`);
339
+ const filteredFiles = fileChanges.filter((file) => !excludePatterns.some((pattern) => new RegExp(pattern.replace(/\*/g, ".*")).test(file)));
340
+ logger.debug(`Processing ${filteredFiles.length} files after exclusions`);
342
341
  // Process files in batches for better performance
343
342
  const batchSize = 5;
344
343
  for (let i = 0; i < filteredFiles.length; i += batchSize) {
345
344
  const batch = filteredFiles.slice(i, i + batchSize);
346
345
  const batchPromises = batch.map(async (file) => {
347
346
  const fileCacheKey = `file-diff:${identifier.workspace}:${identifier.repository}:${identifier.pullRequestId}:${file}`;
348
- if (!forceRefresh && Cache_1.cache.has(fileCacheKey)) {
347
+ if (!forceRefresh && cache.has(fileCacheKey)) {
349
348
  cacheHits.push(`file-diff-${file}`);
350
349
  }
351
- return Cache_1.cache.getOrSet(fileCacheKey, async () => {
350
+ return cache.getOrSet(fileCacheKey, async () => {
352
351
  // Use include_patterns to get diff for just this file
353
- const fileDiff = await this.bitbucketProvider.getPRDiff(identifier, contextLines, excludePatterns, [file] // Include patterns with single file
354
- );
352
+ const fileDiff = await this.bitbucketProvider.getPRDiff(identifier, contextLines, excludePatterns, [file]);
355
353
  return fileDiff.diff;
356
- }, 1800 // 30 minutes
357
- );
354
+ }, 1800);
358
355
  });
359
356
  const batchResults = await Promise.all(batchPromises);
360
357
  batch.forEach((file, index) => {
@@ -362,10 +359,10 @@ Extract and summarize the content and return ONLY this JSON format:
362
359
  });
363
360
  // Small delay between batches to avoid overwhelming the API
364
361
  if (i + batchSize < filteredFiles.length) {
365
- await new Promise(resolve => setTimeout(resolve, 500));
362
+ await new Promise((resolve) => setTimeout(resolve, 500));
366
363
  }
367
364
  }
368
- Logger_1.logger.debug(`✓ Got diffs for ${fileDiffs.size} files`);
365
+ logger.debug(`✓ Got diffs for ${fileDiffs.size} files`);
369
366
  return fileDiffs;
370
367
  }
371
368
  /**
@@ -373,12 +370,12 @@ Extract and summarize the content and return ONLY this JSON format:
373
370
  */
374
371
  cacheContext(context) {
375
372
  const contextCacheKey = `context:${context.contextId}`;
376
- Cache_1.cache.set(contextCacheKey, context, 1800); // 30 minutes
373
+ cache.set(contextCacheKey, context, 1800); // 30 minutes
377
374
  // Tag it for easy invalidation
378
- Cache_1.cache.setWithTags(contextCacheKey, context, [
375
+ cache.setWithTags(contextCacheKey, context, [
379
376
  `workspace:${context.identifier.workspace}`,
380
377
  `repository:${context.identifier.repository}`,
381
- `pr:${context.identifier.pullRequestId}`
378
+ `pr:${context.identifier.pullRequestId}`,
382
379
  ], 1800);
383
380
  }
384
381
  /**
@@ -387,9 +384,9 @@ Extract and summarize the content and return ONLY this JSON format:
387
384
  async getCachedContext(identifier) {
388
385
  const contextId = this.generateContextId(identifier);
389
386
  const contextCacheKey = `context:${contextId}`;
390
- const cached = Cache_1.cache.get(contextCacheKey);
387
+ const cached = cache.get(contextCacheKey);
391
388
  if (cached) {
392
- Logger_1.logger.debug(`✓ Using cached context: ${contextId}`);
389
+ logger.debug(`✓ Using cached context: ${contextId}`);
393
390
  return cached;
394
391
  }
395
392
  return null;
@@ -398,9 +395,9 @@ Extract and summarize the content and return ONLY this JSON format:
398
395
  * Invalidate context cache for a specific PR
399
396
  */
400
397
  invalidateContext(identifier) {
401
- Cache_1.cache.invalidateTag(`pr:${identifier.pullRequestId}`);
402
- Cache_1.cache.invalidateTag(`workspace:${identifier.workspace}`);
403
- Logger_1.logger.debug(`Context cache invalidated for PR ${identifier.pullRequestId}`);
398
+ cache.invalidateTag(`pr:${identifier.pullRequestId}`);
399
+ cache.invalidateTag(`workspace:${identifier.workspace}`);
400
+ logger.debug(`Context cache invalidated for PR ${identifier.pullRequestId}`);
404
401
  }
405
402
  /**
406
403
  * Generate unique context ID
@@ -409,11 +406,11 @@ Extract and summarize the content and return ONLY this JSON format:
409
406
  const parts = [
410
407
  identifier.workspace,
411
408
  identifier.repository,
412
- identifier.pullRequestId || identifier.branch || 'unknown'
409
+ identifier.pullRequestId || identifier.branch || "unknown",
413
410
  ];
414
- return Buffer.from(parts.join(':'))
415
- .toString('base64')
416
- .replace(/[+/=]/g, '')
411
+ return Buffer.from(parts.join(":"))
412
+ .toString("base64")
413
+ .replace(/[+/=]/g, "")
417
414
  .substring(0, 16);
418
415
  }
419
416
  /**
@@ -421,16 +418,16 @@ Extract and summarize the content and return ONLY this JSON format:
421
418
  */
422
419
  parseAIResponse(result) {
423
420
  try {
424
- const responseText = result.content || result.text || result.response || '';
421
+ const responseText = result.content || result.text || result.response || "";
425
422
  if (!responseText) {
426
- return { success: false, error: 'Empty response' };
423
+ return { success: false, error: "Empty response" };
427
424
  }
428
425
  // Find JSON in response
429
426
  const jsonMatch = responseText.match(/\{[\s\S]*\}/);
430
427
  if (jsonMatch) {
431
428
  return JSON.parse(jsonMatch[0]);
432
429
  }
433
- return { success: false, error: 'No JSON found' };
430
+ return { success: false, error: "No JSON found" };
434
431
  }
435
432
  catch (error) {
436
433
  return { success: false, error: error.message };
@@ -442,14 +439,13 @@ Extract and summarize the content and return ONLY this JSON format:
442
439
  getStats() {
443
440
  return {
444
441
  lastGatheringDuration: this.startTime ? Date.now() - this.startTime : 0,
445
- cacheStats: Cache_1.cache.stats(),
446
- cacheHitRatio: Cache_1.cache.getHitRatio()
442
+ cacheStats: cache.stats(),
443
+ cacheHitRatio: cache.getHitRatio(),
447
444
  };
448
445
  }
449
446
  }
450
- exports.ContextGatherer = ContextGatherer;
451
447
  // Export factory function
452
- function createContextGatherer(bitbucketProvider, aiConfig) {
448
+ export function createContextGatherer(bitbucketProvider, aiConfig) {
453
449
  return new ContextGatherer(bitbucketProvider, aiConfig);
454
450
  }
455
451
  //# sourceMappingURL=ContextGatherer.js.map
@@ -2,7 +2,7 @@
2
2
  * Yama - Unified orchestrator class
3
3
  * The main class that coordinates all operations using shared context
4
4
  */
5
- import { GuardianConfig, OperationOptions, ProcessResult, StreamUpdate, StreamOptions, ReviewOptions, EnhancementOptions } from '../types';
5
+ import { GuardianConfig, OperationOptions, ProcessResult, StreamUpdate, StreamOptions, ReviewOptions, EnhancementOptions } from "../types/index.js";
6
6
  export declare class Guardian {
7
7
  private config;
8
8
  private bitbucketProvider;