@juspay/yama 1.0.0 → 1.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,18 +1,17 @@
1
- "use strict";
2
1
  /**
3
2
  * Unified Context Gatherer - The foundation for all Yama operations
4
3
  * Gathers all necessary context once and reuses it across all operations
5
4
  */
6
- Object.defineProperty(exports, "__esModule", { value: true });
7
- exports.ContextGatherer = void 0;
8
- exports.createContextGatherer = createContextGatherer;
9
5
  // NeuroLink will be dynamically imported
10
- const types_1 = require("../types");
11
- const Logger_1 = require("../utils/Logger");
12
- const Cache_1 = require("../utils/Cache");
13
- class ContextGatherer {
6
+ import { ProviderError, } from "../types/index.js";
7
+ import { logger } from "../utils/Logger.js";
8
+ import { cache, Cache } from "../utils/Cache.js";
9
+ export class ContextGatherer {
10
+ neurolink;
11
+ bitbucketProvider;
12
+ aiConfig;
13
+ startTime = 0;
14
14
  constructor(bitbucketProvider, aiConfig) {
15
- this.startTime = 0;
16
15
  this.bitbucketProvider = bitbucketProvider;
17
16
  this.aiConfig = aiConfig;
18
17
  }
@@ -23,8 +22,8 @@ class ContextGatherer {
23
22
  this.startTime = Date.now();
24
23
  const contextId = this.generateContextId(identifier);
25
24
  const cacheHits = [];
26
- Logger_1.logger.phase("🔍 Gathering unified context...");
27
- Logger_1.logger.info(`Target: ${identifier.workspace}/${identifier.repository}`);
25
+ logger.phase("🔍 Gathering unified context...");
26
+ logger.info(`Target: ${identifier.workspace}/${identifier.repository}`);
28
27
  try {
29
28
  // Step 1: Find and get PR information
30
29
  const pr = await this.findAndGetPR(identifier, cacheHits, options.forceRefresh);
@@ -36,7 +35,7 @@ class ContextGatherer {
36
35
  const projectContext = await this.gatherProjectContext(completeIdentifier, cacheHits, options.forceRefresh);
37
36
  // Step 3: Determine diff strategy based on file count and config
38
37
  const diffStrategy = this.determineDiffStrategy(pr.fileChanges || [], options.diffStrategyConfig);
39
- Logger_1.logger.info(`Diff strategy: ${diffStrategy.strategy} (${diffStrategy.reason})`);
38
+ logger.info(`Diff strategy: ${diffStrategy.strategy} (${diffStrategy.reason})`);
40
39
  // Step 4: Get diff data based on strategy (if requested)
41
40
  let prDiff;
42
41
  let fileDiffs;
@@ -61,72 +60,72 @@ class ContextGatherer {
61
60
  cacheHits,
62
61
  gatheringDuration,
63
62
  };
64
- Logger_1.logger.success(`Context gathered in ${Math.round(gatheringDuration / 1000)}s ` +
63
+ logger.success(`Context gathered in ${Math.round(gatheringDuration / 1000)}s ` +
65
64
  `(${cacheHits.length} cache hits, ${diffStrategy.fileCount} files, ${diffStrategy.estimatedSize})`);
66
65
  // Cache the complete context for reuse
67
66
  this.cacheContext(context);
68
67
  return context;
69
68
  }
70
69
  catch (error) {
71
- Logger_1.logger.error(`Context gathering failed: ${error.message}`);
72
- throw new types_1.ProviderError(`Failed to gather context: ${error.message}`);
70
+ logger.error(`Context gathering failed: ${error.message}`);
71
+ throw new ProviderError(`Failed to gather context: ${error.message}`);
73
72
  }
74
73
  }
75
74
  /**
76
75
  * Step 1: Find PR and get detailed information
77
76
  */
78
77
  async findAndGetPR(identifier, cacheHits, forceRefresh = false) {
79
- Logger_1.logger.debug("Step 1: Finding and getting PR information...");
78
+ logger.debug("Step 1: Finding and getting PR information...");
80
79
  // If PR ID is provided, get details directly
81
80
  if (identifier.pullRequestId) {
82
- const cacheKey = Cache_1.Cache.keys.prInfo(identifier.workspace, identifier.repository, identifier.pullRequestId);
83
- if (!forceRefresh && Cache_1.cache.has(cacheKey)) {
81
+ const cacheKey = Cache.keys.prInfo(identifier.workspace, identifier.repository, identifier.pullRequestId);
82
+ if (!forceRefresh && cache.has(cacheKey)) {
84
83
  cacheHits.push("pr-details");
85
84
  }
86
- return Cache_1.cache.getOrSet(cacheKey, async () => {
87
- Logger_1.logger.debug(`Getting PR details: ${identifier.workspace}/${identifier.repository}#${identifier.pullRequestId}`);
85
+ return cache.getOrSet(cacheKey, async () => {
86
+ logger.debug(`Getting PR details: ${identifier.workspace}/${identifier.repository}#${identifier.pullRequestId}`);
88
87
  return await this.bitbucketProvider.getPRDetails(identifier);
89
88
  }, 1800);
90
89
  }
91
90
  // If branch is provided, find PR first
92
91
  if (identifier.branch) {
93
- const branchCacheKey = Cache_1.Cache.keys.branchInfo(identifier.workspace, identifier.repository, identifier.branch);
94
- if (!forceRefresh && Cache_1.cache.has(branchCacheKey)) {
92
+ const branchCacheKey = Cache.keys.branchInfo(identifier.workspace, identifier.repository, identifier.branch);
93
+ if (!forceRefresh && cache.has(branchCacheKey)) {
95
94
  cacheHits.push("branch-pr-lookup");
96
95
  }
97
- const prInfo = await Cache_1.cache.getOrSet(branchCacheKey, async () => {
98
- Logger_1.logger.debug(`Finding PR for branch: ${identifier.workspace}/${identifier.repository}@${identifier.branch}`);
96
+ const prInfo = await cache.getOrSet(branchCacheKey, async () => {
97
+ logger.debug(`Finding PR for branch: ${identifier.workspace}/${identifier.repository}@${identifier.branch}`);
99
98
  return await this.bitbucketProvider.findPRForBranch(identifier);
100
99
  }, 3600);
101
100
  // Now get full PR details
102
- const detailsCacheKey = Cache_1.Cache.keys.prInfo(identifier.workspace, identifier.repository, prInfo.id);
103
- if (!forceRefresh && Cache_1.cache.has(detailsCacheKey)) {
101
+ const detailsCacheKey = Cache.keys.prInfo(identifier.workspace, identifier.repository, prInfo.id);
102
+ if (!forceRefresh && cache.has(detailsCacheKey)) {
104
103
  cacheHits.push("pr-details-from-branch");
105
104
  }
106
- return Cache_1.cache.getOrSet(detailsCacheKey, async () => {
105
+ return cache.getOrSet(detailsCacheKey, async () => {
107
106
  return await this.bitbucketProvider.getPRDetails({
108
107
  ...identifier,
109
108
  pullRequestId: prInfo.id,
110
109
  });
111
110
  }, 1800);
112
111
  }
113
- throw new types_1.ProviderError("Either pullRequestId or branch must be provided");
112
+ throw new ProviderError("Either pullRequestId or branch must be provided");
114
113
  }
115
114
  /**
116
115
  * Step 2: Gather project context (memory bank + clinerules)
117
116
  */
118
117
  async gatherProjectContext(identifier, cacheHits, forceRefresh = false) {
119
- Logger_1.logger.debug("Step 2: Gathering project context...");
120
- const cacheKey = Cache_1.Cache.keys.projectContext(identifier.workspace, identifier.repository, identifier.branch || "main");
121
- if (!forceRefresh && Cache_1.cache.has(cacheKey)) {
118
+ logger.debug("Step 2: Gathering project context...");
119
+ const cacheKey = Cache.keys.projectContext(identifier.workspace, identifier.repository, identifier.branch || "main");
120
+ if (!forceRefresh && cache.has(cacheKey)) {
122
121
  cacheHits.push("project-context");
123
122
  }
124
- return Cache_1.cache.getOrSet(cacheKey, async () => {
123
+ return cache.getOrSet(cacheKey, async () => {
125
124
  try {
126
125
  // Get memory-bank directory listing
127
126
  const memoryBankFiles = await this.bitbucketProvider.listDirectoryContent(identifier.workspace, identifier.repository, "memory-bank", identifier.branch || "main");
128
127
  if (!memoryBankFiles.length) {
129
- Logger_1.logger.debug("No memory-bank directory found");
128
+ logger.debug("No memory-bank directory found");
130
129
  return {
131
130
  memoryBank: {
132
131
  summary: "No project context available",
@@ -145,20 +144,20 @@ class ContextGatherer {
145
144
  try {
146
145
  fileContents[file.name] =
147
146
  await this.bitbucketProvider.getFileContent(identifier.workspace, identifier.repository, `memory-bank/${file.name}`, identifier.branch || "main");
148
- Logger_1.logger.debug(`✓ Got content for: ${file.name}`);
147
+ logger.debug(`✓ Got content for: ${file.name}`);
149
148
  }
150
149
  catch (error) {
151
- Logger_1.logger.debug(`Could not read file ${file.name}: ${error.message}`);
150
+ logger.debug(`Could not read file ${file.name}: ${error.message}`);
152
151
  }
153
152
  }
154
153
  // Get .clinerules file
155
154
  let clinerules = "";
156
155
  try {
157
156
  clinerules = await this.bitbucketProvider.getFileContent(identifier.workspace, identifier.repository, ".clinerules", identifier.branch || "main");
158
- Logger_1.logger.debug("✓ Got .clinerules content");
157
+ logger.debug("✓ Got .clinerules content");
159
158
  }
160
159
  catch (error) {
161
- Logger_1.logger.debug(`Could not read .clinerules: ${error.message}`);
160
+ logger.debug(`Could not read .clinerules: ${error.message}`);
162
161
  }
163
162
  // Parse and summarize with AI
164
163
  const contextData = await this.parseProjectContextWithAI(fileContents, clinerules);
@@ -176,7 +175,7 @@ Standards: ${contextData.standards}`,
176
175
  };
177
176
  }
178
177
  catch (error) {
179
- Logger_1.logger.debug(`Failed to gather project context: ${error.message}`);
178
+ logger.debug(`Failed to gather project context: ${error.message}`);
180
179
  return {
181
180
  memoryBank: {
182
181
  summary: "Context gathering failed",
@@ -210,8 +209,7 @@ Extract and summarize the content and return ONLY this JSON format:
210
209
  try {
211
210
  // Initialize NeuroLink with eval-based dynamic import
212
211
  if (!this.neurolink) {
213
- const dynamicImport = eval("(specifier) => import(specifier)");
214
- const { NeuroLink } = await dynamicImport("@juspay/neurolink");
212
+ const { NeuroLink } = await import("@juspay/neurolink");
215
213
  this.neurolink = new NeuroLink();
216
214
  }
217
215
  // Context for project analysis
@@ -235,7 +233,7 @@ Extract and summarize the content and return ONLY this JSON format:
235
233
  });
236
234
  // Log context analysis
237
235
  if (result.analytics) {
238
- Logger_1.logger.debug(`Context Analysis - Files: ${Object.keys(fileContents).length}, Provider: ${result.provider}`);
236
+ logger.debug(`Context Analysis - Files: ${Object.keys(fileContents).length}, Provider: ${result.provider}`);
239
237
  }
240
238
  // Modern NeuroLink returns { content: string }
241
239
  const response = this.parseAIResponse(result);
@@ -249,7 +247,7 @@ Extract and summarize the content and return ONLY this JSON format:
249
247
  throw new Error("AI parsing failed");
250
248
  }
251
249
  catch (error) {
252
- Logger_1.logger.warn(`AI context parsing failed, using fallback: ${error.message}`);
250
+ logger.warn(`AI context parsing failed, using fallback: ${error.message}`);
253
251
  return {
254
252
  projectContext: "AI parsing unavailable",
255
253
  patterns: "Standard patterns assumed",
@@ -322,12 +320,12 @@ Extract and summarize the content and return ONLY this JSON format:
322
320
  * Get whole PR diff
323
321
  */
324
322
  async getPRDiff(identifier, contextLines, excludePatterns, cacheHits, forceRefresh = false) {
325
- Logger_1.logger.debug("Getting whole PR diff...");
326
- const cacheKey = Cache_1.Cache.keys.prDiff(identifier.workspace, identifier.repository, identifier.pullRequestId);
327
- if (!forceRefresh && Cache_1.cache.has(cacheKey)) {
323
+ logger.debug("Getting whole PR diff...");
324
+ const cacheKey = Cache.keys.prDiff(identifier.workspace, identifier.repository, identifier.pullRequestId);
325
+ if (!forceRefresh && cache.has(cacheKey)) {
328
326
  cacheHits.push("pr-diff");
329
327
  }
330
- return Cache_1.cache.getOrSet(cacheKey, async () => {
328
+ return cache.getOrSet(cacheKey, async () => {
331
329
  return await this.bitbucketProvider.getPRDiff(identifier, contextLines, excludePatterns);
332
330
  }, 1800);
333
331
  }
@@ -335,21 +333,21 @@ Extract and summarize the content and return ONLY this JSON format:
335
333
  * Get file-by-file diffs for large changesets
336
334
  */
337
335
  async getFileByFileDiffs(identifier, fileChanges, contextLines, excludePatterns, cacheHits, forceRefresh = false) {
338
- Logger_1.logger.debug(`Getting file-by-file diffs for ${fileChanges.length} files...`);
336
+ logger.debug(`Getting file-by-file diffs for ${fileChanges.length} files...`);
339
337
  const fileDiffs = new Map();
340
338
  // Filter out excluded files
341
339
  const filteredFiles = fileChanges.filter((file) => !excludePatterns.some((pattern) => new RegExp(pattern.replace(/\*/g, ".*")).test(file)));
342
- Logger_1.logger.debug(`Processing ${filteredFiles.length} files after exclusions`);
340
+ logger.debug(`Processing ${filteredFiles.length} files after exclusions`);
343
341
  // Process files in batches for better performance
344
342
  const batchSize = 5;
345
343
  for (let i = 0; i < filteredFiles.length; i += batchSize) {
346
344
  const batch = filteredFiles.slice(i, i + batchSize);
347
345
  const batchPromises = batch.map(async (file) => {
348
346
  const fileCacheKey = `file-diff:${identifier.workspace}:${identifier.repository}:${identifier.pullRequestId}:${file}`;
349
- if (!forceRefresh && Cache_1.cache.has(fileCacheKey)) {
347
+ if (!forceRefresh && cache.has(fileCacheKey)) {
350
348
  cacheHits.push(`file-diff-${file}`);
351
349
  }
352
- return Cache_1.cache.getOrSet(fileCacheKey, async () => {
350
+ return cache.getOrSet(fileCacheKey, async () => {
353
351
  // Use include_patterns to get diff for just this file
354
352
  const fileDiff = await this.bitbucketProvider.getPRDiff(identifier, contextLines, excludePatterns, [file]);
355
353
  return fileDiff.diff;
@@ -364,7 +362,7 @@ Extract and summarize the content and return ONLY this JSON format:
364
362
  await new Promise((resolve) => setTimeout(resolve, 500));
365
363
  }
366
364
  }
367
- Logger_1.logger.debug(`✓ Got diffs for ${fileDiffs.size} files`);
365
+ logger.debug(`✓ Got diffs for ${fileDiffs.size} files`);
368
366
  return fileDiffs;
369
367
  }
370
368
  /**
@@ -372,9 +370,9 @@ Extract and summarize the content and return ONLY this JSON format:
372
370
  */
373
371
  cacheContext(context) {
374
372
  const contextCacheKey = `context:${context.contextId}`;
375
- Cache_1.cache.set(contextCacheKey, context, 1800); // 30 minutes
373
+ cache.set(contextCacheKey, context, 1800); // 30 minutes
376
374
  // Tag it for easy invalidation
377
- Cache_1.cache.setWithTags(contextCacheKey, context, [
375
+ cache.setWithTags(contextCacheKey, context, [
378
376
  `workspace:${context.identifier.workspace}`,
379
377
  `repository:${context.identifier.repository}`,
380
378
  `pr:${context.identifier.pullRequestId}`,
@@ -386,9 +384,9 @@ Extract and summarize the content and return ONLY this JSON format:
386
384
  async getCachedContext(identifier) {
387
385
  const contextId = this.generateContextId(identifier);
388
386
  const contextCacheKey = `context:${contextId}`;
389
- const cached = Cache_1.cache.get(contextCacheKey);
387
+ const cached = cache.get(contextCacheKey);
390
388
  if (cached) {
391
- Logger_1.logger.debug(`✓ Using cached context: ${contextId}`);
389
+ logger.debug(`✓ Using cached context: ${contextId}`);
392
390
  return cached;
393
391
  }
394
392
  return null;
@@ -397,9 +395,9 @@ Extract and summarize the content and return ONLY this JSON format:
397
395
  * Invalidate context cache for a specific PR
398
396
  */
399
397
  invalidateContext(identifier) {
400
- Cache_1.cache.invalidateTag(`pr:${identifier.pullRequestId}`);
401
- Cache_1.cache.invalidateTag(`workspace:${identifier.workspace}`);
402
- Logger_1.logger.debug(`Context cache invalidated for PR ${identifier.pullRequestId}`);
398
+ cache.invalidateTag(`pr:${identifier.pullRequestId}`);
399
+ cache.invalidateTag(`workspace:${identifier.workspace}`);
400
+ logger.debug(`Context cache invalidated for PR ${identifier.pullRequestId}`);
403
401
  }
404
402
  /**
405
403
  * Generate unique context ID
@@ -441,14 +439,13 @@ Extract and summarize the content and return ONLY this JSON format:
441
439
  getStats() {
442
440
  return {
443
441
  lastGatheringDuration: this.startTime ? Date.now() - this.startTime : 0,
444
- cacheStats: Cache_1.cache.stats(),
445
- cacheHitRatio: Cache_1.cache.getHitRatio(),
442
+ cacheStats: cache.stats(),
443
+ cacheHitRatio: cache.getHitRatio(),
446
444
  };
447
445
  }
448
446
  }
449
- exports.ContextGatherer = ContextGatherer;
450
447
  // Export factory function
451
- function createContextGatherer(bitbucketProvider, aiConfig) {
448
+ export function createContextGatherer(bitbucketProvider, aiConfig) {
452
449
  return new ContextGatherer(bitbucketProvider, aiConfig);
453
450
  }
454
451
  //# sourceMappingURL=ContextGatherer.js.map
@@ -2,7 +2,7 @@
2
2
  * Yama - Unified orchestrator class
3
3
  * The main class that coordinates all operations using shared context
4
4
  */
5
- import { GuardianConfig, OperationOptions, ProcessResult, StreamUpdate, StreamOptions, ReviewOptions, EnhancementOptions } from "../types";
5
+ import { GuardianConfig, OperationOptions, ProcessResult, StreamUpdate, StreamOptions, ReviewOptions, EnhancementOptions } from "../types/index.js";
6
6
  export declare class Guardian {
7
7
  private config;
8
8
  private bitbucketProvider;
@@ -1,22 +1,24 @@
1
- "use strict";
2
1
  /**
3
2
  * Yama - Unified orchestrator class
4
3
  * The main class that coordinates all operations using shared context
5
4
  */
6
- Object.defineProperty(exports, "__esModule", { value: true });
7
- exports.guardian = exports.Guardian = void 0;
8
- exports.createGuardian = createGuardian;
9
- const types_1 = require("../types");
10
- const BitbucketProvider_1 = require("./providers/BitbucketProvider");
11
- const ContextGatherer_1 = require("./ContextGatherer");
12
- const CodeReviewer_1 = require("../features/CodeReviewer");
13
- const DescriptionEnhancer_1 = require("../features/DescriptionEnhancer");
14
- const Logger_1 = require("../utils/Logger");
15
- const ConfigManager_1 = require("../utils/ConfigManager");
16
- const Cache_1 = require("../utils/Cache");
17
- class Guardian {
5
+ import { GuardianError, } from "../types/index.js";
6
+ import { BitbucketProvider } from "./providers/BitbucketProvider.js";
7
+ import { ContextGatherer } from "./ContextGatherer.js";
8
+ import { CodeReviewer } from "../features/CodeReviewer.js";
9
+ import { DescriptionEnhancer } from "../features/DescriptionEnhancer.js";
10
+ import { logger } from "../utils/Logger.js";
11
+ import { configManager } from "../utils/ConfigManager.js";
12
+ import { cache } from "../utils/Cache.js";
13
+ export class Guardian {
14
+ config;
15
+ bitbucketProvider;
16
+ contextGatherer;
17
+ codeReviewer;
18
+ descriptionEnhancer;
19
+ neurolink;
20
+ initialized = false;
18
21
  constructor(config) {
19
- this.initialized = false;
20
22
  this.config = {};
21
23
  if (config) {
22
24
  this.config = { ...this.config, ...config };
@@ -30,27 +32,26 @@ class Guardian {
30
32
  return;
31
33
  }
32
34
  try {
33
- Logger_1.logger.badge();
34
- Logger_1.logger.phase("🚀 Initializing Yama...");
35
+ logger.badge();
36
+ logger.phase("🚀 Initializing Yama...");
35
37
  // Load configuration
36
- this.config = await ConfigManager_1.configManager.loadConfig(configPath);
38
+ this.config = await configManager.loadConfig(configPath);
37
39
  // Initialize providers
38
- this.bitbucketProvider = new BitbucketProvider_1.BitbucketProvider(this.config.providers.git.credentials);
40
+ this.bitbucketProvider = new BitbucketProvider(this.config.providers.git.credentials);
39
41
  await this.bitbucketProvider.initialize();
40
- // Initialize NeuroLink with eval-based dynamic import to bypass TypeScript compilation
41
- const dynamicImport = eval("(specifier) => import(specifier)");
42
- const { NeuroLink } = await dynamicImport("@juspay/neurolink");
42
+ // Initialize NeuroLink with native ESM dynamic import
43
+ const { NeuroLink } = await import("@juspay/neurolink");
43
44
  this.neurolink = new NeuroLink();
44
45
  // Initialize core components
45
- this.contextGatherer = new ContextGatherer_1.ContextGatherer(this.bitbucketProvider, this.config.providers.ai);
46
- this.codeReviewer = new CodeReviewer_1.CodeReviewer(this.bitbucketProvider, this.config.providers.ai, this.config.features.codeReview);
47
- this.descriptionEnhancer = new DescriptionEnhancer_1.DescriptionEnhancer(this.bitbucketProvider, this.config.providers.ai);
46
+ this.contextGatherer = new ContextGatherer(this.bitbucketProvider, this.config.providers.ai);
47
+ this.codeReviewer = new CodeReviewer(this.bitbucketProvider, this.config.providers.ai, this.config.features.codeReview);
48
+ this.descriptionEnhancer = new DescriptionEnhancer(this.bitbucketProvider, this.config.providers.ai);
48
49
  this.initialized = true;
49
- Logger_1.logger.success("✅ Yama initialized successfully");
50
+ logger.success("✅ Yama initialized successfully");
50
51
  }
51
52
  catch (error) {
52
- Logger_1.logger.error(`Failed to initialize Yama: ${error.message}`);
53
- throw new types_1.GuardianError("INITIALIZATION_ERROR", `Initialization failed: ${error.message}`);
53
+ logger.error(`Failed to initialize Yama: ${error.message}`);
54
+ throw new GuardianError("INITIALIZATION_ERROR", `Initialization failed: ${error.message}`);
54
55
  }
55
56
  }
56
57
  /**
@@ -61,15 +62,15 @@ class Guardian {
61
62
  const startTime = Date.now();
62
63
  const operations = [];
63
64
  try {
64
- Logger_1.logger.operation("PR Processing", "started");
65
- Logger_1.logger.info(`Target: ${options.workspace}/${options.repository}`);
66
- Logger_1.logger.info(`Operations: ${options.operations.join(", ")}`);
67
- Logger_1.logger.info(`Mode: ${options.dryRun ? "DRY RUN" : "LIVE"}`);
65
+ logger.operation("PR Processing", "started");
66
+ logger.info(`Target: ${options.workspace}/${options.repository}`);
67
+ logger.info(`Operations: ${options.operations.join(", ")}`);
68
+ logger.info(`Mode: ${options.dryRun ? "DRY RUN" : "LIVE"}`);
68
69
  // Step 1: Gather unified context ONCE for all operations
69
- Logger_1.logger.phase("📋 Gathering unified context...");
70
+ logger.phase("📋 Gathering unified context...");
70
71
  const context = await this.gatherUnifiedContext(options);
71
- Logger_1.logger.success(`Context ready: PR #${context.pr.id} - "${context.pr.title}"`);
72
- Logger_1.logger.info(`Files: ${context.diffStrategy.fileCount}, Strategy: ${context.diffStrategy.strategy}`);
72
+ logger.success(`Context ready: PR #${context.pr.id} - "${context.pr.title}"`);
73
+ logger.info(`Files: ${context.diffStrategy.fileCount}, Strategy: ${context.diffStrategy.strategy}`);
73
74
  // Step 2: Execute requested operations using shared context
74
75
  for (const operation of options.operations) {
75
76
  if (operation === "all") {
@@ -96,14 +97,14 @@ class Guardian {
96
97
  totalDuration: duration,
97
98
  },
98
99
  };
99
- Logger_1.logger.operation("PR Processing", "completed");
100
- Logger_1.logger.success(`✅ Processing completed in ${Math.round(duration / 1000)}s: ` +
100
+ logger.operation("PR Processing", "completed");
101
+ logger.success(`✅ Processing completed in ${Math.round(duration / 1000)}s: ` +
101
102
  `${successCount} success, ${errorCount} errors, ${skippedCount} skipped`);
102
103
  return result;
103
104
  }
104
105
  catch (error) {
105
- Logger_1.logger.operation("PR Processing", "failed");
106
- Logger_1.logger.error(`Processing failed: ${error.message}`);
106
+ logger.operation("PR Processing", "failed");
107
+ logger.error(`Processing failed: ${error.message}`);
107
108
  throw error;
108
109
  }
109
110
  }
@@ -212,7 +213,7 @@ class Guardian {
212
213
  // Check if we have cached context first
213
214
  const cachedContext = await this.contextGatherer.getCachedContext(identifier);
214
215
  if (cachedContext && options.config?.cache?.enabled !== false) {
215
- Logger_1.logger.debug("✓ Using cached context");
216
+ logger.debug("✓ Using cached context");
216
217
  return cachedContext;
217
218
  }
218
219
  // Determine what operations need diff data
@@ -258,7 +259,7 @@ class Guardian {
258
259
  };
259
260
  }
260
261
  catch (error) {
261
- Logger_1.logger.error(`Operation ${operation} failed: ${error.message}`);
262
+ logger.error(`Operation ${operation} failed: ${error.message}`);
262
263
  return {
263
264
  operation,
264
265
  status: "error",
@@ -273,16 +274,16 @@ class Guardian {
273
274
  */
274
275
  async executeCodeReview(context, options) {
275
276
  if (!this.config.features.codeReview.enabled) {
276
- Logger_1.logger.info("Code review is disabled in configuration");
277
+ logger.info("Code review is disabled in configuration");
277
278
  return { skipped: true, reason: "disabled in config" };
278
279
  }
279
- Logger_1.logger.phase("🔍 Executing code review...");
280
+ logger.phase("🔍 Executing code review...");
280
281
  const reviewOptions = {
281
282
  workspace: context.identifier.workspace,
282
283
  repository: context.identifier.repository,
283
284
  pullRequestId: context.identifier.pullRequestId,
284
285
  dryRun: options.dryRun,
285
- verbose: Logger_1.logger.getConfig().verbose,
286
+ verbose: logger.getConfig().verbose,
286
287
  excludePatterns: this.config.features.codeReview.excludePatterns,
287
288
  contextLines: this.config.features.codeReview.contextLines,
288
289
  };
@@ -294,16 +295,16 @@ class Guardian {
294
295
  */
295
296
  async executeDescriptionEnhancement(context, options) {
296
297
  if (!this.config.features.descriptionEnhancement.enabled) {
297
- Logger_1.logger.info("Description enhancement is disabled in configuration");
298
+ logger.info("Description enhancement is disabled in configuration");
298
299
  return { skipped: true, reason: "disabled in config" };
299
300
  }
300
- Logger_1.logger.phase("📝 Executing description enhancement...");
301
+ logger.phase("📝 Executing description enhancement...");
301
302
  const enhancementOptions = {
302
303
  workspace: context.identifier.workspace,
303
304
  repository: context.identifier.repository,
304
305
  pullRequestId: context.identifier.pullRequestId,
305
306
  dryRun: options.dryRun,
306
- verbose: Logger_1.logger.getConfig().verbose,
307
+ verbose: logger.getConfig().verbose,
307
308
  preserveContent: this.config.features.descriptionEnhancement.preserveContent,
308
309
  ensureRequiredSections: true,
309
310
  customSections: this.config.features.descriptionEnhancement.requiredSections,
@@ -325,7 +326,7 @@ class Guardian {
325
326
  branch: options.branch,
326
327
  pullRequestId: options.pullRequestId,
327
328
  };
328
- Logger_1.logger.operation("Code Review", "started");
329
+ logger.operation("Code Review", "started");
329
330
  try {
330
331
  // Gather context specifically for code review
331
332
  const context = await this.contextGatherer.gatherContext(identifier, {
@@ -334,11 +335,11 @@ class Guardian {
334
335
  includeDiff: true,
335
336
  });
336
337
  const result = await this.codeReviewer.reviewCodeWithContext(context, options);
337
- Logger_1.logger.operation("Code Review", "completed");
338
+ logger.operation("Code Review", "completed");
338
339
  return result;
339
340
  }
340
341
  catch (error) {
341
- Logger_1.logger.operation("Code Review", "failed");
342
+ logger.operation("Code Review", "failed");
342
343
  throw error;
343
344
  }
344
345
  }
@@ -353,18 +354,18 @@ class Guardian {
353
354
  branch: options.branch,
354
355
  pullRequestId: options.pullRequestId,
355
356
  };
356
- Logger_1.logger.operation("Description Enhancement", "started");
357
+ logger.operation("Description Enhancement", "started");
357
358
  try {
358
359
  // Gather context specifically for description enhancement
359
360
  const context = await this.contextGatherer.gatherContext(identifier, {
360
361
  includeDiff: true, // Description enhancement may need to see changes
361
362
  });
362
363
  const result = await this.descriptionEnhancer.enhanceWithContext(context, options);
363
- Logger_1.logger.operation("Description Enhancement", "completed");
364
+ logger.operation("Description Enhancement", "completed");
364
365
  return result;
365
366
  }
366
367
  catch (error) {
367
- Logger_1.logger.operation("Description Enhancement", "failed");
368
+ logger.operation("Description Enhancement", "failed");
368
369
  throw error;
369
370
  }
370
371
  }
@@ -379,7 +380,7 @@ class Guardian {
379
380
  // Check cache
380
381
  components.cache = {
381
382
  healthy: true,
382
- stats: Cache_1.cache.stats(),
383
+ stats: cache.stats(),
383
384
  };
384
385
  // Check NeuroLink (if initialized)
385
386
  components.neurolink = {
@@ -416,16 +417,16 @@ class Guardian {
416
417
  bitbucket: this.bitbucketProvider?.getStats(),
417
418
  context: this.contextGatherer?.getStats(),
418
419
  },
419
- cache: Cache_1.cache.stats(),
420
+ cache: cache.stats(),
420
421
  };
421
422
  }
422
423
  /**
423
424
  * Clear all caches
424
425
  */
425
426
  clearCache() {
426
- Cache_1.cache.clear();
427
+ cache.clear();
427
428
  this.bitbucketProvider?.clearCache();
428
- Logger_1.logger.info("All caches cleared");
429
+ logger.info("All caches cleared");
429
430
  }
430
431
  /**
431
432
  * Ensure Guardian is initialized
@@ -439,19 +440,18 @@ class Guardian {
439
440
  * Shutdown Guardian gracefully
440
441
  */
441
442
  async shutdown() {
442
- Logger_1.logger.info("Shutting down Yama...");
443
+ logger.info("Shutting down Yama...");
443
444
  // Clear caches
444
445
  this.clearCache();
445
446
  // Reset state
446
447
  this.initialized = false;
447
- Logger_1.logger.success("Yama shutdown complete");
448
+ logger.success("Yama shutdown complete");
448
449
  }
449
450
  }
450
- exports.Guardian = Guardian;
451
451
  // Export factory function
452
- function createGuardian(config) {
452
+ export function createGuardian(config) {
453
453
  return new Guardian(config);
454
454
  }
455
455
  // Export default instance
456
- exports.guardian = new Guardian();
456
+ export const guardian = new Guardian();
457
457
  //# sourceMappingURL=Guardian.js.map
@@ -2,7 +2,7 @@
2
2
  * Enhanced Bitbucket Provider - Optimized from both pr-police.js and pr-describe.js
3
3
  * Provides unified, cached, and optimized Bitbucket operations
4
4
  */
5
- import { PRIdentifier, PRInfo, PRDiff, GitCredentials } from "../../types";
5
+ import { PRIdentifier, PRInfo, PRDiff, GitCredentials } from "../../types/index.js";
6
6
  export interface BitbucketMCPResponse {
7
7
  content?: Array<{
8
8
  text?: string;