@juspay/yama 1.0.0 โ†’ 1.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,15 +1,15 @@
1
- "use strict";
2
1
  /**
3
2
  * Enhanced Code Reviewer - Optimized to work with Unified Context
4
3
  * Preserves all original functionality from pr-police.js but optimized
5
4
  */
6
- Object.defineProperty(exports, "__esModule", { value: true });
7
- exports.CodeReviewer = void 0;
8
- exports.createCodeReviewer = createCodeReviewer;
9
5
  // NeuroLink will be dynamically imported
10
- const types_1 = require("../types");
11
- const Logger_1 = require("../utils/Logger");
12
- class CodeReviewer {
6
+ import { ProviderError, } from "../types/index.js";
7
+ import { logger } from "../utils/Logger.js";
8
+ export class CodeReviewer {
9
+ neurolink;
10
+ bitbucketProvider;
11
+ aiConfig;
12
+ reviewConfig;
13
13
  constructor(bitbucketProvider, aiConfig, reviewConfig) {
14
14
  this.bitbucketProvider = bitbucketProvider;
15
15
  this.aiConfig = aiConfig;
@@ -21,8 +21,8 @@ class CodeReviewer {
21
21
  async reviewCodeWithContext(context, options) {
22
22
  const startTime = Date.now();
23
23
  try {
24
- Logger_1.logger.phase("๐Ÿงช Conducting AI-powered code analysis...");
25
- Logger_1.logger.info(`Analyzing ${context.diffStrategy.fileCount} files using ${context.diffStrategy.strategy} strategy`);
24
+ logger.phase("๐Ÿงช Conducting AI-powered code analysis...");
25
+ logger.info(`Analyzing ${context.diffStrategy.fileCount} files using ${context.diffStrategy.strategy} strategy`);
26
26
  const analysisPrompt = this.buildAnalysisPrompt(context, options);
27
27
  const violations = await this.analyzeWithAI(analysisPrompt, context);
28
28
  const validatedViolations = this.validateViolations(violations, context);
@@ -31,12 +31,12 @@ class CodeReviewer {
31
31
  }
32
32
  const duration = Math.round((Date.now() - startTime) / 1000);
33
33
  const result = this.generateReviewResult(validatedViolations, duration, context);
34
- Logger_1.logger.success(`Code review completed in ${duration}s: ${validatedViolations.length} violations found`);
34
+ logger.success(`Code review completed in ${duration}s: ${validatedViolations.length} violations found`);
35
35
  return result;
36
36
  }
37
37
  catch (error) {
38
- Logger_1.logger.error(`Code review failed: ${error.message}`);
39
- throw new types_1.ProviderError(`Code review failed: ${error.message}`);
38
+ logger.error(`Code review failed: ${error.message}`);
39
+ throw new ProviderError(`Code review failed: ${error.message}`);
40
40
  }
41
41
  }
42
42
  /**
@@ -60,8 +60,8 @@ class CodeReviewer {
60
60
  validatedViolations.push(fixedViolation);
61
61
  }
62
62
  else {
63
- Logger_1.logger.debug(`โš ๏ธ Skipping violation - snippet not found in diff: ${violation.file}`);
64
- Logger_1.logger.debug(` Original snippet: "${violation.code_snippet}"`);
63
+ logger.debug(`โš ๏ธ Skipping violation - snippet not found in diff: ${violation.file}`);
64
+ logger.debug(` Original snippet: "${violation.code_snippet}"`);
65
65
  }
66
66
  }
67
67
  }
@@ -70,7 +70,7 @@ class CodeReviewer {
70
70
  validatedViolations.push(violation);
71
71
  }
72
72
  }
73
- Logger_1.logger.debug(`Validated ${validatedViolations.length} out of ${violations.length} violations`);
73
+ logger.debug(`Validated ${validatedViolations.length} out of ${violations.length} violations`);
74
74
  return validatedViolations;
75
75
  }
76
76
  /**
@@ -119,7 +119,7 @@ class CodeReviewer {
119
119
  for (const path of pathVariations) {
120
120
  fileDiff = context.fileDiffs.get(path);
121
121
  if (fileDiff) {
122
- Logger_1.logger.debug(`Found diff for ${violation.file} using variation: ${path}`);
122
+ logger.debug(`Found diff for ${violation.file} using variation: ${path}`);
123
123
  break;
124
124
  }
125
125
  }
@@ -128,14 +128,14 @@ class CodeReviewer {
128
128
  for (const [key, value] of context.fileDiffs.entries()) {
129
129
  if (key.endsWith(violation.file) || violation.file.endsWith(key)) {
130
130
  fileDiff = value;
131
- Logger_1.logger.debug(`Found diff for ${violation.file} using partial match: ${key}`);
131
+ logger.debug(`Found diff for ${violation.file} using partial match: ${key}`);
132
132
  break;
133
133
  }
134
134
  }
135
135
  }
136
136
  }
137
137
  if (!fileDiff) {
138
- Logger_1.logger.debug(`โŒ Could not find diff for file: ${violation.file}`);
138
+ logger.debug(`โŒ Could not find diff for file: ${violation.file}`);
139
139
  return null;
140
140
  }
141
141
  // First, try to find the exact line with line number extraction
@@ -152,7 +152,7 @@ class CodeReviewer {
152
152
  after: [diffLines[snippetIndex + 1]],
153
153
  };
154
154
  }
155
- Logger_1.logger.debug(`โœ… Found exact match with line number for ${violation.file}`);
155
+ logger.debug(`โœ… Found exact match with line number for ${violation.file}`);
156
156
  return fixedViolation;
157
157
  }
158
158
  // Fallback: Clean the snippet and try fuzzy matching
@@ -176,15 +176,15 @@ class CodeReviewer {
176
176
  after: [diffLines[i + 1]],
177
177
  };
178
178
  }
179
- Logger_1.logger.debug(`โœ… Fixed code snippet for ${violation.file} using fuzzy match`);
179
+ logger.debug(`โœ… Fixed code snippet for ${violation.file} using fuzzy match`);
180
180
  return fixedViolation;
181
181
  }
182
182
  }
183
- Logger_1.logger.debug(`โŒ Could not find snippet in diff for ${violation.file}`);
184
- Logger_1.logger.debug(` Looking for: "${violation.code_snippet}"`);
183
+ logger.debug(`โŒ Could not find snippet in diff for ${violation.file}`);
184
+ logger.debug(` Looking for: "${violation.code_snippet}"`);
185
185
  }
186
186
  catch (error) {
187
- Logger_1.logger.debug(`Error fixing code snippet: ${error.message}`);
187
+ logger.debug(`Error fixing code snippet: ${error.message}`);
188
188
  }
189
189
  return null;
190
190
  }
@@ -402,11 +402,10 @@ Return ONLY valid JSON:
402
402
  */
403
403
  async analyzeWithAI(prompt, context) {
404
404
  try {
405
- Logger_1.logger.debug("Starting AI analysis...");
405
+ logger.debug("Starting AI analysis...");
406
406
  // Initialize NeuroLink with eval-based dynamic import
407
407
  if (!this.neurolink) {
408
- const dynamicImport = eval("(specifier) => import(specifier)");
409
- const { NeuroLink } = await dynamicImport("@juspay/neurolink");
408
+ const { NeuroLink } = await import("@juspay/neurolink");
410
409
  this.neurolink = new NeuroLink();
411
410
  }
412
411
  // Extract context from unified context for better AI understanding
@@ -442,31 +441,31 @@ Return ONLY valid JSON:
442
441
  });
443
442
  // Log analytics if available
444
443
  if (result.analytics) {
445
- Logger_1.logger.debug(`AI Analytics - Provider: ${result.provider}, Response Time: ${result.responseTime}ms, Quality Score: ${result.evaluation?.overallScore}`);
444
+ logger.debug(`AI Analytics - Provider: ${result.provider}, Response Time: ${result.responseTime}ms, Quality Score: ${result.evaluation?.overallScore}`);
446
445
  }
447
- Logger_1.logger.debug("AI analysis completed, parsing response...");
446
+ logger.debug("AI analysis completed, parsing response...");
448
447
  // Modern NeuroLink returns { content: string }
449
448
  const analysisData = this.parseAIResponse(result);
450
449
  // Display AI response for debugging
451
- if (Logger_1.logger.getConfig().verbose) {
452
- Logger_1.logger.debug("AI Analysis Response:");
453
- Logger_1.logger.debug("โ•".repeat(80));
454
- Logger_1.logger.debug(JSON.stringify(analysisData, null, 2));
455
- Logger_1.logger.debug("โ•".repeat(80));
450
+ if (logger.getConfig().verbose) {
451
+ logger.debug("AI Analysis Response:");
452
+ logger.debug("โ•".repeat(80));
453
+ logger.debug(JSON.stringify(analysisData, null, 2));
454
+ logger.debug("โ•".repeat(80));
456
455
  }
457
456
  if (!analysisData.violations || !Array.isArray(analysisData.violations)) {
458
- Logger_1.logger.debug("No violations array found in AI response");
457
+ logger.debug("No violations array found in AI response");
459
458
  return [];
460
459
  }
461
- Logger_1.logger.debug(`AI analysis found ${analysisData.violations.length} violations`);
460
+ logger.debug(`AI analysis found ${analysisData.violations.length} violations`);
462
461
  return analysisData.violations;
463
462
  }
464
463
  catch (error) {
465
464
  if (error.message?.includes("timeout")) {
466
- Logger_1.logger.error("โฐ AI analysis timed out after 15 minutes");
465
+ logger.error("โฐ AI analysis timed out after 15 minutes");
467
466
  throw new Error("Analysis timeout - try reducing diff size or adjusting timeout");
468
467
  }
469
- Logger_1.logger.error(`AI analysis failed: ${error.message}`);
468
+ logger.error(`AI analysis failed: ${error.message}`);
470
469
  throw error;
471
470
  }
472
471
  }
@@ -474,7 +473,7 @@ Return ONLY valid JSON:
474
473
  * Post comments to PR using unified context - matching pr-police.js exactly
475
474
  */
476
475
  async postComments(context, violations, _options) {
477
- Logger_1.logger.phase("๐Ÿ“ Posting review comments...");
476
+ logger.phase("๐Ÿ“ Posting review comments...");
478
477
  let commentsPosted = 0;
479
478
  let commentsFailed = 0;
480
479
  const failedComments = [];
@@ -493,19 +492,19 @@ Return ONLY valid JSON:
493
492
  // Clean code snippet and fix search context - EXACTLY like pr-police.js
494
493
  const processedViolation = this.cleanCodeSnippet(violation);
495
494
  if (!processedViolation) {
496
- Logger_1.logger.debug(`โš ๏ธ Skipping invalid violation for ${cleanFilePath}`);
495
+ logger.debug(`โš ๏ธ Skipping invalid violation for ${cleanFilePath}`);
497
496
  continue;
498
497
  }
499
498
  const formattedComment = this.formatInlineComment(processedViolation);
500
499
  // Debug logging
501
- Logger_1.logger.debug(`๐Ÿ” Posting inline comment:`);
502
- Logger_1.logger.debug(` File: ${cleanFilePath}`);
503
- Logger_1.logger.debug(` Issue: ${processedViolation.issue}`);
504
- Logger_1.logger.debug(` Original snippet: ${violation.code_snippet}`);
505
- Logger_1.logger.debug(` Processed snippet: ${processedViolation.code_snippet}`);
500
+ logger.debug(`๐Ÿ” Posting inline comment:`);
501
+ logger.debug(` File: ${cleanFilePath}`);
502
+ logger.debug(` Issue: ${processedViolation.issue}`);
503
+ logger.debug(` Original snippet: ${violation.code_snippet}`);
504
+ logger.debug(` Processed snippet: ${processedViolation.code_snippet}`);
506
505
  if (processedViolation.search_context) {
507
- Logger_1.logger.debug(` Search context before: ${JSON.stringify(processedViolation.search_context.before)}`);
508
- Logger_1.logger.debug(` Search context after: ${JSON.stringify(processedViolation.search_context.after)}`);
506
+ logger.debug(` Search context before: ${JSON.stringify(processedViolation.search_context.before)}`);
507
+ logger.debug(` Search context after: ${JSON.stringify(processedViolation.search_context.after)}`);
509
508
  }
510
509
  // Use new code snippet approach - EXACTLY like pr-police.js
511
510
  await this.bitbucketProvider.addComment(context.identifier, formattedComment, {
@@ -518,14 +517,14 @@ Return ONLY valid JSON:
518
517
  suggestion: processedViolation.suggestion, // Pass the suggestion for inline code suggestions
519
518
  });
520
519
  commentsPosted++;
521
- Logger_1.logger.debug(`โœ… Posted inline comment: ${cleanFilePath} (${processedViolation.issue})`);
520
+ logger.debug(`โœ… Posted inline comment: ${cleanFilePath} (${processedViolation.issue})`);
522
521
  }
523
522
  catch (error) {
524
523
  commentsFailed++;
525
524
  const errorMsg = error.message;
526
- Logger_1.logger.debug(`โŒ Failed to post inline comment: ${errorMsg}`);
527
- Logger_1.logger.debug(` File: ${violation.file}, Issue: ${violation.issue}`);
528
- Logger_1.logger.debug(` Code snippet: ${violation.code_snippet}`);
525
+ logger.debug(`โŒ Failed to post inline comment: ${errorMsg}`);
526
+ logger.debug(` File: ${violation.file}, Issue: ${violation.issue}`);
527
+ logger.debug(` Code snippet: ${violation.code_snippet}`);
529
528
  failedComments.push({
530
529
  file: violation.file,
531
530
  issue: violation.issue,
@@ -539,15 +538,15 @@ Return ONLY valid JSON:
539
538
  const summaryComment = this.generateSummaryComment(violations, context, failedComments);
540
539
  await this.bitbucketProvider.addComment(context.identifier, summaryComment);
541
540
  commentsPosted++;
542
- Logger_1.logger.debug("โœ… Posted summary comment");
541
+ logger.debug("โœ… Posted summary comment");
543
542
  }
544
543
  catch (error) {
545
- Logger_1.logger.debug(`โŒ Failed to post summary comment: ${error.message}`);
544
+ logger.debug(`โŒ Failed to post summary comment: ${error.message}`);
546
545
  }
547
546
  }
548
- Logger_1.logger.success(`โœ… Posted ${commentsPosted} comments successfully`);
547
+ logger.success(`โœ… Posted ${commentsPosted} comments successfully`);
549
548
  if (commentsFailed > 0) {
550
- Logger_1.logger.warn(`โš ๏ธ Failed to post ${commentsFailed} inline comments`);
549
+ logger.warn(`โš ๏ธ Failed to post ${commentsFailed} inline comments`);
551
550
  }
552
551
  }
553
552
  /**
@@ -704,7 +703,7 @@ ${recommendation}
704
703
  const cleaned = filePath.replace(/^(src|dst):\/\//, "");
705
704
  // Log the cleaning for debugging
706
705
  if (cleaned !== filePath) {
707
- Logger_1.logger.debug(`Cleaned file path: ${filePath} -> ${cleaned}`);
706
+ logger.debug(`Cleaned file path: ${filePath} -> ${cleaned}`);
708
707
  }
709
708
  return cleaned;
710
709
  }
@@ -734,7 +733,7 @@ ${recommendation}
734
733
  let currentOldLine = 0;
735
734
  let inHunk = false;
736
735
  // Debug logging
737
- Logger_1.logger.debug(`Looking for snippet: "${codeSnippet}"`);
736
+ logger.debug(`Looking for snippet: "${codeSnippet}"`);
738
737
  for (let i = 0; i < lines.length; i++) {
739
738
  const line = lines[i];
740
739
  // Parse hunk headers (e.g., @@ -10,6 +10,8 @@)
@@ -744,7 +743,7 @@ ${recommendation}
744
743
  currentOldLine = parseInt(hunkMatch[1]);
745
744
  currentNewLine = parseInt(hunkMatch[2]);
746
745
  inHunk = true;
747
- Logger_1.logger.debug(`Found hunk header: old=${currentOldLine}, new=${currentNewLine}`);
746
+ logger.debug(`Found hunk header: old=${currentOldLine}, new=${currentNewLine}`);
748
747
  continue;
749
748
  }
750
749
  // Skip lines that aren't part of the diff content
@@ -770,7 +769,7 @@ ${recommendation}
770
769
  resultLine = currentNewLine;
771
770
  lineType = "CONTEXT";
772
771
  }
773
- Logger_1.logger.debug(`Found match at line ${resultLine} (${lineType})`);
772
+ logger.debug(`Found match at line ${resultLine} (${lineType})`);
774
773
  return { lineNumber: resultLine, lineType };
775
774
  }
776
775
  // Update line counters AFTER checking for match
@@ -788,7 +787,7 @@ ${recommendation}
788
787
  currentOldLine++;
789
788
  }
790
789
  }
791
- Logger_1.logger.debug(`Snippet not found in diff`);
790
+ logger.debug(`Snippet not found in diff`);
792
791
  return null;
793
792
  }
794
793
  /**
@@ -843,7 +842,7 @@ ${recommendation}
843
842
  return fixed;
844
843
  }
845
844
  catch (error) {
846
- Logger_1.logger.debug(`โŒ Error cleaning code snippet: ${error.message}`);
845
+ logger.debug(`โŒ Error cleaning code snippet: ${error.message}`);
847
846
  return null;
848
847
  }
849
848
  }
@@ -914,7 +913,7 @@ ${recommendation}
914
913
  return { violations: [] };
915
914
  }
916
915
  catch (error) {
917
- Logger_1.logger.debug(`Failed to parse AI response: ${error.message}`);
916
+ logger.debug(`Failed to parse AI response: ${error.message}`);
918
917
  return { violations: [] };
919
918
  }
920
919
  }
@@ -963,7 +962,7 @@ ${recommendation}
963
962
  for (const path of pathVariations) {
964
963
  fileDiff = context.fileDiffs.get(path);
965
964
  if (fileDiff) {
966
- Logger_1.logger.debug(`Found diff for ${violation.file} using variation: ${path}`);
965
+ logger.debug(`Found diff for ${violation.file} using variation: ${path}`);
967
966
  break;
968
967
  }
969
968
  }
@@ -972,7 +971,7 @@ ${recommendation}
972
971
  for (const [key, value] of context.fileDiffs.entries()) {
973
972
  if (key.endsWith(violation.file) || violation.file.endsWith(key)) {
974
973
  fileDiff = value;
975
- Logger_1.logger.debug(`Found diff for ${violation.file} using partial match: ${key}`);
974
+ logger.debug(`Found diff for ${violation.file} using partial match: ${key}`);
976
975
  break;
977
976
  }
978
977
  }
@@ -981,16 +980,16 @@ ${recommendation}
981
980
  if (fileDiff) {
982
981
  const lineInfo = this.extractLineNumberFromDiff(fileDiff, violation.code_snippet);
983
982
  if (lineInfo) {
984
- Logger_1.logger.debug(`Extracted line info for ${violation.file}: line ${lineInfo.lineNumber}, type ${lineInfo.lineType}`);
983
+ logger.debug(`Extracted line info for ${violation.file}: line ${lineInfo.lineNumber}, type ${lineInfo.lineType}`);
985
984
  }
986
985
  return lineInfo;
987
986
  }
988
987
  else {
989
- Logger_1.logger.debug(`No diff found for file: ${violation.file}`);
988
+ logger.debug(`No diff found for file: ${violation.file}`);
990
989
  }
991
990
  }
992
991
  catch (error) {
993
- Logger_1.logger.debug(`Error extracting line info: ${error.message}`);
992
+ logger.debug(`Error extracting line info: ${error.message}`);
994
993
  }
995
994
  return null;
996
995
  }
@@ -1034,8 +1033,7 @@ ${recommendation}
1034
1033
  return Array.from(variations);
1035
1034
  }
1036
1035
  }
1037
- exports.CodeReviewer = CodeReviewer;
1038
- function createCodeReviewer(bitbucketProvider, aiConfig, reviewConfig) {
1036
+ export function createCodeReviewer(bitbucketProvider, aiConfig, reviewConfig) {
1039
1037
  return new CodeReviewer(bitbucketProvider, aiConfig, reviewConfig);
1040
1038
  }
1041
1039
  //# sourceMappingURL=CodeReviewer.js.map
@@ -2,9 +2,9 @@
2
2
  * Enhanced Description Enhancer - Optimized to work with Unified Context
3
3
  * Preserves all original functionality from pr-describe.js but optimized
4
4
  */
5
- import { EnhancementOptions, EnhancementResult, AIProviderConfig } from "../types";
6
- import { UnifiedContext } from "../core/ContextGatherer";
7
- import { BitbucketProvider } from "../core/providers/BitbucketProvider";
5
+ import { EnhancementOptions, EnhancementResult, AIProviderConfig } from "../types/index.js";
6
+ import { UnifiedContext } from "../core/ContextGatherer.js";
7
+ import { BitbucketProvider } from "../core/providers/BitbucketProvider.js";
8
8
  export declare class DescriptionEnhancer {
9
9
  private neurolink;
10
10
  private bitbucketProvider;
@@ -1,28 +1,27 @@
1
- "use strict";
2
1
  /**
3
2
  * Enhanced Description Enhancer - Optimized to work with Unified Context
4
3
  * Preserves all original functionality from pr-describe.js but optimized
5
4
  */
6
- Object.defineProperty(exports, "__esModule", { value: true });
7
- exports.DescriptionEnhancer = void 0;
8
- exports.createDescriptionEnhancer = createDescriptionEnhancer;
9
- const types_1 = require("../types");
10
- const Logger_1 = require("../utils/Logger");
11
- class DescriptionEnhancer {
5
+ import { ProviderError, } from "../types/index.js";
6
+ import { logger } from "../utils/Logger.js";
7
+ export class DescriptionEnhancer {
8
+ neurolink;
9
+ bitbucketProvider;
10
+ aiConfig;
11
+ defaultRequiredSections = [
12
+ { key: "changelog", name: "Changelog (Modules Modified)", required: true },
13
+ {
14
+ key: "testcases",
15
+ name: "Test Cases (What to be tested)",
16
+ required: true,
17
+ },
18
+ {
19
+ key: "config_changes",
20
+ name: "CAC Config Or Service Config Changes",
21
+ required: true,
22
+ },
23
+ ];
12
24
  constructor(bitbucketProvider, aiConfig) {
13
- this.defaultRequiredSections = [
14
- { key: "changelog", name: "Changelog (Modules Modified)", required: true },
15
- {
16
- key: "testcases",
17
- name: "Test Cases (What to be tested)",
18
- required: true,
19
- },
20
- {
21
- key: "config_changes",
22
- name: "CAC Config Or Service Config Changes",
23
- required: true,
24
- },
25
- ];
26
25
  this.bitbucketProvider = bitbucketProvider;
27
26
  this.aiConfig = aiConfig;
28
27
  }
@@ -32,11 +31,11 @@ class DescriptionEnhancer {
32
31
  async enhanceWithContext(context, options) {
33
32
  const startTime = Date.now();
34
33
  try {
35
- Logger_1.logger.phase("๐Ÿ“ Enhancing PR description...");
36
- Logger_1.logger.info(`Processing PR #${context.pr.id}: "${context.pr.title}"`);
34
+ logger.phase("๐Ÿ“ Enhancing PR description...");
35
+ logger.info(`Processing PR #${context.pr.id}: "${context.pr.title}"`);
37
36
  // Step 1: Analyze existing content and identify what needs enhancement
38
37
  const analysisResult = this.analyzeExistingContent(context.pr.description, options.customSections || this.defaultRequiredSections);
39
- Logger_1.logger.info(`Content analysis: ${analysisResult.preservedContent.media.length} media items, ` +
38
+ logger.info(`Content analysis: ${analysisResult.preservedContent.media.length} media items, ` +
40
39
  `${analysisResult.missingCount} missing sections`);
41
40
  // Step 2: Generate enhanced description using AI
42
41
  const enhancedDescription = await this.generateEnhancedDescription(context, analysisResult, options);
@@ -49,20 +48,20 @@ class DescriptionEnhancer {
49
48
  }
50
49
  const duration = Math.round((Date.now() - startTime) / 1000);
51
50
  const result = this.generateEnhancementResult(context.pr.description, enhancedDescription, analysisResult, duration);
52
- Logger_1.logger.success(`Description enhancement completed in ${duration}s: ` +
51
+ logger.success(`Description enhancement completed in ${duration}s: ` +
53
52
  `${result.sectionsAdded.length} sections added, ${result.sectionsEnhanced.length} enhanced`);
54
53
  return result;
55
54
  }
56
55
  catch (error) {
57
- Logger_1.logger.error(`Description enhancement failed: ${error.message}`);
58
- throw new types_1.ProviderError(`Description enhancement failed: ${error.message}`);
56
+ logger.error(`Description enhancement failed: ${error.message}`);
57
+ throw new ProviderError(`Description enhancement failed: ${error.message}`);
59
58
  }
60
59
  }
61
60
  /**
62
61
  * Analyze existing PR description content
63
62
  */
64
63
  analyzeExistingContent(description, requiredSections) {
65
- Logger_1.logger.debug("Analyzing existing PR description content...");
64
+ logger.debug("Analyzing existing PR description content...");
66
65
  // Extract preservable content (media, files, links)
67
66
  const preservedContent = this.extractPreservableContent(description);
68
67
  // Validate required sections
@@ -100,7 +99,7 @@ class DescriptionEnhancer {
100
99
  const linkRegex = /\[[^\]]*\]\([^)]+\)/g;
101
100
  const allLinks = description.match(linkRegex) || [];
102
101
  preservableContent.links = allLinks.filter((link) => !mediaRegex.test(link) && !fileRegex.test(link));
103
- Logger_1.logger.debug(`Preservable content: ${preservableContent.media.length} media, ` +
102
+ logger.debug(`Preservable content: ${preservableContent.media.length} media, ` +
104
103
  `${preservableContent.files.length} files, ${preservableContent.links.length} links`);
105
104
  return preservableContent;
106
105
  }
@@ -198,11 +197,10 @@ class DescriptionEnhancer {
198
197
  * Generate enhanced description using AI and unified context
199
198
  */
200
199
  async generateEnhancedDescription(context, analysisResult, options) {
201
- Logger_1.logger.debug("Generating AI-enhanced description...");
200
+ logger.debug("Generating AI-enhanced description...");
202
201
  // Initialize NeuroLink with eval-based dynamic import
203
202
  if (!this.neurolink) {
204
- const dynamicImport = eval("(specifier) => import(specifier)");
205
- const { NeuroLink } = await dynamicImport("@juspay/neurolink");
203
+ const { NeuroLink } = await import("@juspay/neurolink");
206
204
  this.neurolink = new NeuroLink();
207
205
  }
208
206
  const enhancementPrompt = this.buildEnhancementPrompt(context, analysisResult, options);
@@ -233,16 +231,16 @@ class DescriptionEnhancer {
233
231
  const finalValidation = this.validateRequiredSections(enhancedDescription, options.customSections || this.defaultRequiredSections);
234
232
  const stillMissing = finalValidation.filter((s) => !s.present);
235
233
  if (stillMissing.length > 0) {
236
- Logger_1.logger.warn(`Warning: ${stillMissing.length} required sections still missing after AI enhancement`);
234
+ logger.warn(`Warning: ${stillMissing.length} required sections still missing after AI enhancement`);
237
235
  }
238
236
  return enhancedDescription;
239
237
  }
240
238
  catch (error) {
241
239
  if (error.message?.includes("timeout")) {
242
- Logger_1.logger.error("โฐ Description enhancement timed out after 8 minutes");
240
+ logger.error("โฐ Description enhancement timed out after 8 minutes");
243
241
  throw new Error("Enhancement timeout - try with smaller diff or adjust timeout");
244
242
  }
245
- Logger_1.logger.error(`AI description generation failed: ${error.message}`);
243
+ logger.error(`AI description generation failed: ${error.message}`);
246
244
  throw error;
247
245
  }
248
246
  }
@@ -369,14 +367,14 @@ Generate the enhanced description now, ensuring ALL preservation requirements ar
369
367
  * Update PR description in Bitbucket
370
368
  */
371
369
  async updatePRDescription(context, enhancedDescription) {
372
- Logger_1.logger.debug(`Updating PR description for #${context.pr.id}...`);
370
+ logger.debug(`Updating PR description for #${context.pr.id}...`);
373
371
  try {
374
372
  await this.bitbucketProvider.updatePRDescription(context.identifier, enhancedDescription);
375
- Logger_1.logger.success("โœ… PR description updated successfully");
373
+ logger.success("โœ… PR description updated successfully");
376
374
  }
377
375
  catch (error) {
378
- Logger_1.logger.error(`Failed to update PR description: ${error.message}`);
379
- throw new types_1.ProviderError(`Description update failed: ${error.message}`);
376
+ logger.error(`Failed to update PR description: ${error.message}`);
377
+ throw new ProviderError(`Description update failed: ${error.message}`);
380
378
  }
381
379
  }
382
380
  /**
@@ -441,8 +439,7 @@ Generate the enhanced description now, ensuring ALL preservation requirements ar
441
439
  };
442
440
  }
443
441
  }
444
- exports.DescriptionEnhancer = DescriptionEnhancer;
445
- function createDescriptionEnhancer(bitbucketProvider, aiConfig) {
442
+ export function createDescriptionEnhancer(bitbucketProvider, aiConfig) {
446
443
  return new DescriptionEnhancer(bitbucketProvider, aiConfig);
447
444
  }
448
445
  //# sourceMappingURL=DescriptionEnhancer.js.map
package/dist/index.d.ts CHANGED
@@ -2,15 +2,15 @@
2
2
  * Yama - Main package exports
3
3
  * Provides both programmatic API and CLI access
4
4
  */
5
- export { Guardian, createGuardian, guardian } from "./core/Guardian";
6
- export { ContextGatherer, createContextGatherer } from "./core/ContextGatherer";
7
- export type { UnifiedContext, ProjectContext, DiffStrategy, } from "./core/ContextGatherer";
8
- export { BitbucketProvider, createBitbucketProvider, } from "./core/providers/BitbucketProvider";
9
- export { CodeReviewer, createCodeReviewer } from "./features/CodeReviewer";
10
- export { DescriptionEnhancer, createDescriptionEnhancer, } from "./features/DescriptionEnhancer";
11
- export { Logger, createLogger, logger } from "./utils/Logger";
12
- export { Cache, createCache, cache } from "./utils/Cache";
13
- export { ConfigManager, createConfigManager, configManager, } from "./utils/ConfigManager";
14
- export * from "./types";
15
- export { main as cli } from "./cli/index";
5
+ export { Guardian, createGuardian, guardian } from "./core/Guardian.js";
6
+ export { ContextGatherer, createContextGatherer } from "./core/ContextGatherer.js";
7
+ export type { UnifiedContext, ProjectContext, DiffStrategy, } from "./core/ContextGatherer.js";
8
+ export { BitbucketProvider, createBitbucketProvider, } from "./core/providers/BitbucketProvider.js";
9
+ export { CodeReviewer, createCodeReviewer } from "./features/CodeReviewer.js";
10
+ export { DescriptionEnhancer, createDescriptionEnhancer, } from "./features/DescriptionEnhancer.js";
11
+ export { Logger, createLogger, logger } from "./utils/Logger.js";
12
+ export { Cache, createCache, cache } from "./utils/Cache.js";
13
+ export { ConfigManager, createConfigManager, configManager, } from "./utils/ConfigManager.js";
14
+ export * from "./types/index.js";
15
+ export { main as cli } from "./cli/index.js";
16
16
  //# sourceMappingURL=index.d.ts.map
package/dist/index.js CHANGED
@@ -1,60 +1,22 @@
1
- "use strict";
2
1
  /**
3
2
  * Yama - Main package exports
4
3
  * Provides both programmatic API and CLI access
5
4
  */
6
- var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
7
- if (k2 === undefined) k2 = k;
8
- var desc = Object.getOwnPropertyDescriptor(m, k);
9
- if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
10
- desc = { enumerable: true, get: function() { return m[k]; } };
11
- }
12
- Object.defineProperty(o, k2, desc);
13
- }) : (function(o, m, k, k2) {
14
- if (k2 === undefined) k2 = k;
15
- o[k2] = m[k];
16
- }));
17
- var __exportStar = (this && this.__exportStar) || function(m, exports) {
18
- for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
19
- };
20
- Object.defineProperty(exports, "__esModule", { value: true });
21
- exports.cli = exports.configManager = exports.createConfigManager = exports.ConfigManager = exports.cache = exports.createCache = exports.Cache = exports.logger = exports.createLogger = exports.Logger = exports.createDescriptionEnhancer = exports.DescriptionEnhancer = exports.createCodeReviewer = exports.CodeReviewer = exports.createBitbucketProvider = exports.BitbucketProvider = exports.createContextGatherer = exports.ContextGatherer = exports.guardian = exports.createGuardian = exports.Guardian = void 0;
22
5
  // Core classes
23
- var Guardian_1 = require("./core/Guardian");
24
- Object.defineProperty(exports, "Guardian", { enumerable: true, get: function () { return Guardian_1.Guardian; } });
25
- Object.defineProperty(exports, "createGuardian", { enumerable: true, get: function () { return Guardian_1.createGuardian; } });
26
- Object.defineProperty(exports, "guardian", { enumerable: true, get: function () { return Guardian_1.guardian; } });
27
- var ContextGatherer_1 = require("./core/ContextGatherer");
28
- Object.defineProperty(exports, "ContextGatherer", { enumerable: true, get: function () { return ContextGatherer_1.ContextGatherer; } });
29
- Object.defineProperty(exports, "createContextGatherer", { enumerable: true, get: function () { return ContextGatherer_1.createContextGatherer; } });
6
+ export { Guardian, createGuardian, guardian } from "./core/Guardian.js";
7
+ export { ContextGatherer, createContextGatherer } from "./core/ContextGatherer.js";
30
8
  // Providers
31
- var BitbucketProvider_1 = require("./core/providers/BitbucketProvider");
32
- Object.defineProperty(exports, "BitbucketProvider", { enumerable: true, get: function () { return BitbucketProvider_1.BitbucketProvider; } });
33
- Object.defineProperty(exports, "createBitbucketProvider", { enumerable: true, get: function () { return BitbucketProvider_1.createBitbucketProvider; } });
9
+ export { BitbucketProvider, createBitbucketProvider, } from "./core/providers/BitbucketProvider.js";
34
10
  // Features
35
- var CodeReviewer_1 = require("./features/CodeReviewer");
36
- Object.defineProperty(exports, "CodeReviewer", { enumerable: true, get: function () { return CodeReviewer_1.CodeReviewer; } });
37
- Object.defineProperty(exports, "createCodeReviewer", { enumerable: true, get: function () { return CodeReviewer_1.createCodeReviewer; } });
38
- var DescriptionEnhancer_1 = require("./features/DescriptionEnhancer");
39
- Object.defineProperty(exports, "DescriptionEnhancer", { enumerable: true, get: function () { return DescriptionEnhancer_1.DescriptionEnhancer; } });
40
- Object.defineProperty(exports, "createDescriptionEnhancer", { enumerable: true, get: function () { return DescriptionEnhancer_1.createDescriptionEnhancer; } });
11
+ export { CodeReviewer, createCodeReviewer } from "./features/CodeReviewer.js";
12
+ export { DescriptionEnhancer, createDescriptionEnhancer, } from "./features/DescriptionEnhancer.js";
41
13
  // Utilities
42
- var Logger_1 = require("./utils/Logger");
43
- Object.defineProperty(exports, "Logger", { enumerable: true, get: function () { return Logger_1.Logger; } });
44
- Object.defineProperty(exports, "createLogger", { enumerable: true, get: function () { return Logger_1.createLogger; } });
45
- Object.defineProperty(exports, "logger", { enumerable: true, get: function () { return Logger_1.logger; } });
46
- var Cache_1 = require("./utils/Cache");
47
- Object.defineProperty(exports, "Cache", { enumerable: true, get: function () { return Cache_1.Cache; } });
48
- Object.defineProperty(exports, "createCache", { enumerable: true, get: function () { return Cache_1.createCache; } });
49
- Object.defineProperty(exports, "cache", { enumerable: true, get: function () { return Cache_1.cache; } });
50
- var ConfigManager_1 = require("./utils/ConfigManager");
51
- Object.defineProperty(exports, "ConfigManager", { enumerable: true, get: function () { return ConfigManager_1.ConfigManager; } });
52
- Object.defineProperty(exports, "createConfigManager", { enumerable: true, get: function () { return ConfigManager_1.createConfigManager; } });
53
- Object.defineProperty(exports, "configManager", { enumerable: true, get: function () { return ConfigManager_1.configManager; } });
14
+ export { Logger, createLogger, logger } from "./utils/Logger.js";
15
+ export { Cache, createCache, cache } from "./utils/Cache.js";
16
+ export { ConfigManager, createConfigManager, configManager, } from "./utils/ConfigManager.js";
54
17
  // Types
55
- __exportStar(require("./types"), exports);
18
+ export * from "./types/index.js";
56
19
  // CLI
57
- var index_1 = require("./cli/index");
58
- Object.defineProperty(exports, "cli", { enumerable: true, get: function () { return index_1.main; } });
20
+ export { main as cli } from "./cli/index.js";
59
21
  // Note: Use named import { Guardian } from '@juspay/yama' instead
60
22
  //# sourceMappingURL=index.js.map