@eldrforge/commands-git 0.1.5 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -3,13 +3,13 @@ import 'dotenv/config';
3
3
  import shellescape from 'shell-escape';
4
4
  import { getDryRunLogger, DEFAULT_MAX_DIFF_BYTES, DEFAULT_EXCLUDED_PATTERNS, Diff, Files, Log, DEFAULT_OUTPUT_DIRECTORY, sanitizeDirection, toAIConfig, createStorageAdapter, createLoggerAdapter, getOutputPath, getTimestampedResponseFilename, getTimestampedRequestFilename, filterContent, getTimestampedCommitFilename, improveContentWithLLM, getLogger, getTimestampedReviewNotesFilename, getTimestampedReviewFilename } from '@eldrforge/core';
5
5
  import { ValidationError, ExternalDependencyError, CommandError, createStorage, checkForFileDependencies as checkForFileDependencies$1, logFileDependencyWarning, logFileDependencySuggestions, FileOperationError } from '@eldrforge/shared';
6
- import { run, validateString, safeJsonParse, validatePackageJson, unstageAll, stageFiles, verifyStagedFiles, runSecure, getCurrentBranch, getGitStatusSummary } from '@eldrforge/git-tools';
6
+ import { run, validateString, safeJsonParse, validatePackageJson, unstageAll, stageFiles, verifyStagedFiles, getCurrentBranch, runSecure, getGitStatusSummary } from '@eldrforge/git-tools';
7
7
  import { getRecentClosedIssuesForCommit, handleIssueCreation, getReleaseNotesContent, getIssuesContent } from '@eldrforge/github-tools';
8
8
  import { runAgenticCommit, requireTTY, generateReflectionReport, getUserChoice, STANDARD_CHOICES, getLLMFeedbackInEditor, editContentInEditor, createCompletionWithRetry, createCommitPrompt, createReviewPrompt, createCompletion } from '@eldrforge/ai-service';
9
9
  import path from 'path';
10
- import fs from 'fs/promises';
11
10
  import os from 'os';
12
11
  import { spawn } from 'child_process';
12
+ import fs from 'fs/promises';
13
13
 
14
14
  // Helper function to read context files
15
15
  async function readContextFiles(contextFiles, logger) {
@@ -552,10 +552,11 @@ const saveCommitMessage = async (outputDirectory, summary, storage, logger)=>{
552
552
  return lines.join('\n');
553
553
  }
554
554
  const executeInternal$3 = async (runConfig)=>{
555
- var _ref, _runConfig_excludedPatterns;
556
- var _runConfig_commit, _runConfig_commit1, _runConfig_commit2, _runConfig_commit3, _runConfig_commit4, _runConfig_commit5, _runConfig_commit6, _aiConfig_commands_commit, _aiConfig_commands, _runConfig_commit7, _aiConfig_commands_commit1, _aiConfig_commands1, _runConfig_commit8, _runConfig_commit9, _runConfig_commit10, _runConfig_commit11, _runConfig_commit12, _runConfig_commit13, _runConfig_commit14;
555
+ var _ref, _ref1, _runConfig_excludedPatterns;
556
+ var _runConfig_commit, _runConfig_commit1, _runConfig_commit2, _runConfig_commit3, _runConfig_commit4, _runConfig_commit5, _runConfig_commit6, _runConfig_commit7, _aiConfig_commands_commit, _aiConfig_commands, _aiConfig_commands_commit1, _aiConfig_commands1, _aiConfig_commands_commit2, _aiConfig_commands2, _runConfig_commit8, _aiConfig_commands_commit3, _aiConfig_commands3, _runConfig_commit9, _runConfig_commit10, _runConfig_commit11, _runConfig_commit12, _runConfig_commit13, _runConfig_commit14, _runConfig_commit15;
557
557
  const isDryRun = runConfig.dryRun || false;
558
558
  const logger = getDryRunLogger(isDryRun);
559
+ logger.info('COMMIT_START: Starting commit message generation | Mode: %s', isDryRun ? 'dry-run' : 'live');
559
560
  // Track if user explicitly chose to skip in interactive mode
560
561
  let userSkippedCommit = false;
561
562
  if ((_runConfig_commit = runConfig.commit) === null || _runConfig_commit === void 0 ? void 0 : _runConfig_commit.add) {
@@ -568,11 +569,14 @@ const executeInternal$3 = async (runConfig)=>{
568
569
  }
569
570
  }
570
571
  // Determine cached state with single, clear logic
572
+ logger.info('COMMIT_CHECK_STAGED: Checking for staged changes | Action: Analyzing git status');
571
573
  const cached = await determineCachedState(runConfig);
574
+ logger.info('COMMIT_STAGED_STATUS: Staged changes detected: %s | Cached: %s', cached ? 'yes' : 'no', cached);
572
575
  // Validate sendit state early - now returns boolean instead of throwing
573
576
  validateSenditState(runConfig, cached, isDryRun, logger);
577
+ logger.info('COMMIT_GENERATE_DIFF: Generating diff content | Max bytes: %d', (_ref = (_runConfig_commit1 = runConfig.commit) === null || _runConfig_commit1 === void 0 ? void 0 : _runConfig_commit1.maxDiffBytes) !== null && _ref !== void 0 ? _ref : DEFAULT_MAX_DIFF_BYTES);
574
578
  let diffContent = '';
575
- const maxDiffBytes = (_ref = (_runConfig_commit1 = runConfig.commit) === null || _runConfig_commit1 === void 0 ? void 0 : _runConfig_commit1.maxDiffBytes) !== null && _ref !== void 0 ? _ref : DEFAULT_MAX_DIFF_BYTES;
579
+ const maxDiffBytes = (_ref1 = (_runConfig_commit2 = runConfig.commit) === null || _runConfig_commit2 === void 0 ? void 0 : _runConfig_commit2.maxDiffBytes) !== null && _ref1 !== void 0 ? _ref1 : DEFAULT_MAX_DIFF_BYTES;
576
580
  const options = {
577
581
  cached,
578
582
  excludedPatterns: (_runConfig_excludedPatterns = runConfig.excludedPatterns) !== null && _runConfig_excludedPatterns !== void 0 ? _runConfig_excludedPatterns : DEFAULT_EXCLUDED_PATTERNS,
@@ -580,15 +584,16 @@ const executeInternal$3 = async (runConfig)=>{
580
584
  };
581
585
  const diff = await Diff.create(options);
582
586
  diffContent = await diff.get();
587
+ logger.info('COMMIT_DIFF_GENERATED: Diff content generated | Size: %d bytes | Has changes: %s', diffContent.length, diffContent.trim().length > 0 ? 'yes' : 'no');
583
588
  // Check if there are actually any changes in the diff
584
589
  let hasActualChanges = diffContent.trim().length > 0;
585
590
  // If no changes found with current patterns, check for critical excluded files
586
591
  if (!hasActualChanges) {
587
592
  const criticalChanges = await Diff.hasCriticalExcludedChanges();
588
593
  if (criticalChanges.hasChanges) {
589
- var _runConfig_commit15;
594
+ var _runConfig_commit16;
590
595
  logger.info('CRITICAL_FILES_DETECTED: No changes with exclusion patterns, but critical files modified | Files: %s | Action: May need to include critical files', criticalChanges.files.join(', '));
591
- if (((_runConfig_commit15 = runConfig.commit) === null || _runConfig_commit15 === void 0 ? void 0 : _runConfig_commit15.sendit) && !isDryRun) {
596
+ if (((_runConfig_commit16 = runConfig.commit) === null || _runConfig_commit16 === void 0 ? void 0 : _runConfig_commit16.sendit) && !isDryRun) {
592
597
  var _runConfig_excludedPatterns1;
593
598
  // In sendit mode, automatically include critical files
594
599
  logger.info('SENDIT_INCLUDING_CRITICAL: SendIt mode including critical files in diff | Purpose: Ensure all important changes are captured');
@@ -620,10 +625,10 @@ const executeInternal$3 = async (runConfig)=>{
620
625
  }
621
626
  }
622
627
  } else {
623
- var _runConfig_commit16;
628
+ var _runConfig_commit17;
624
629
  // No changes at all - try fallback to file content for new repositories
625
630
  logger.info('NO_CHANGES_DETECTED: No changes found in working directory | Status: clean | Action: Nothing to commit');
626
- if (((_runConfig_commit16 = runConfig.commit) === null || _runConfig_commit16 === void 0 ? void 0 : _runConfig_commit16.sendit) && !isDryRun) {
631
+ if (((_runConfig_commit17 = runConfig.commit) === null || _runConfig_commit17 === void 0 ? void 0 : _runConfig_commit17.sendit) && !isDryRun) {
627
632
  logger.warn('No changes detected to commit. Skipping commit operation.');
628
633
  return 'No changes to commit.';
629
634
  } else {
@@ -642,8 +647,8 @@ const executeInternal$3 = async (runConfig)=>{
642
647
  diffContent = fileContent;
643
648
  hasActualChanges = true; // We have content to work with
644
649
  } else {
645
- var _runConfig_commit17;
646
- if ((_runConfig_commit17 = runConfig.commit) === null || _runConfig_commit17 === void 0 ? void 0 : _runConfig_commit17.sendit) {
650
+ var _runConfig_commit18;
651
+ if ((_runConfig_commit18 = runConfig.commit) === null || _runConfig_commit18 === void 0 ? void 0 : _runConfig_commit18.sendit) {
647
652
  logger.info('COMMIT_SKIPPED: Skipping commit operation | Reason: No changes detected | Action: None');
648
653
  return 'No changes to commit.';
649
654
  } else {
@@ -654,7 +659,7 @@ const executeInternal$3 = async (runConfig)=>{
654
659
  }
655
660
  }
656
661
  const logOptions = {
657
- limit: (_runConfig_commit2 = runConfig.commit) === null || _runConfig_commit2 === void 0 ? void 0 : _runConfig_commit2.messageLimit
662
+ limit: (_runConfig_commit3 = runConfig.commit) === null || _runConfig_commit3 === void 0 ? void 0 : _runConfig_commit3.messageLimit
658
663
  };
659
664
  const log = await Log.create(logOptions);
660
665
  const logContext = await log.get();
@@ -689,7 +694,7 @@ const executeInternal$3 = async (runConfig)=>{
689
694
  overridePaths: runConfig.discoveredConfigDirs || [],
690
695
  overrides: runConfig.overrides || false
691
696
  };
692
- const userDirection = sanitizeDirection((_runConfig_commit3 = runConfig.commit) === null || _runConfig_commit3 === void 0 ? void 0 : _runConfig_commit3.direction);
697
+ const userDirection = sanitizeDirection((_runConfig_commit4 = runConfig.commit) === null || _runConfig_commit4 === void 0 ? void 0 : _runConfig_commit4.direction);
693
698
  if (userDirection) {
694
699
  logger.debug('Using user direction: %s', userDirection);
695
700
  }
@@ -698,10 +703,10 @@ const executeInternal$3 = async (runConfig)=>{
698
703
  const aiStorageAdapter = createStorageAdapter(outputDirectory);
699
704
  const aiLogger = createLoggerAdapter(isDryRun);
700
705
  // Read context from files if provided
701
- const contextFromFiles = await readContextFiles((_runConfig_commit4 = runConfig.commit) === null || _runConfig_commit4 === void 0 ? void 0 : _runConfig_commit4.contextFiles, logger);
706
+ const contextFromFiles = await readContextFiles((_runConfig_commit5 = runConfig.commit) === null || _runConfig_commit5 === void 0 ? void 0 : _runConfig_commit5.contextFiles, logger);
702
707
  // Combine file context with existing context
703
708
  const combinedContext = [
704
- (_runConfig_commit5 = runConfig.commit) === null || _runConfig_commit5 === void 0 ? void 0 : _runConfig_commit5.context,
709
+ (_runConfig_commit6 = runConfig.commit) === null || _runConfig_commit6 === void 0 ? void 0 : _runConfig_commit6.context,
705
710
  contextFromFiles
706
711
  ].filter(Boolean).join('\n\n---\n\n');
707
712
  // Define promptContext for use in interactive improvements
@@ -711,7 +716,7 @@ const executeInternal$3 = async (runConfig)=>{
711
716
  directories: runConfig.contextDirectories
712
717
  };
713
718
  // Announce self-reflection if enabled
714
- if ((_runConfig_commit6 = runConfig.commit) === null || _runConfig_commit6 === void 0 ? void 0 : _runConfig_commit6.selfReflection) {
719
+ if ((_runConfig_commit7 = runConfig.commit) === null || _runConfig_commit7 === void 0 ? void 0 : _runConfig_commit7.selfReflection) {
715
720
  logger.info('📊 Self-reflection enabled - detailed analysis will be generated');
716
721
  }
717
722
  // Get list of changed files
@@ -720,30 +725,31 @@ const executeInternal$3 = async (runConfig)=>{
720
725
  const changedFiles = changedFilesOutput.split('\n').filter((f)=>f.trim().length > 0);
721
726
  logger.debug('Changed files for analysis: %d files', changedFiles.length);
722
727
  // Run agentic commit generation
728
+ logger.info('COMMIT_AI_GENERATION: Starting AI-powered commit message generation | Model: %s | Reasoning: %s | Files: %d', ((_aiConfig_commands = aiConfig.commands) === null || _aiConfig_commands === void 0 ? void 0 : (_aiConfig_commands_commit = _aiConfig_commands.commit) === null || _aiConfig_commands_commit === void 0 ? void 0 : _aiConfig_commands_commit.model) || aiConfig.model || 'gpt-4o-mini', ((_aiConfig_commands1 = aiConfig.commands) === null || _aiConfig_commands1 === void 0 ? void 0 : (_aiConfig_commands_commit1 = _aiConfig_commands1.commit) === null || _aiConfig_commands_commit1 === void 0 ? void 0 : _aiConfig_commands_commit1.reasoning) || aiConfig.reasoning || 'low', changedFiles.length);
723
729
  const agenticResult = await runAgenticCommit({
724
730
  changedFiles,
725
731
  diffContent,
726
732
  userDirection,
727
733
  logContext,
728
- model: ((_aiConfig_commands = aiConfig.commands) === null || _aiConfig_commands === void 0 ? void 0 : (_aiConfig_commands_commit = _aiConfig_commands.commit) === null || _aiConfig_commands_commit === void 0 ? void 0 : _aiConfig_commands_commit.model) || aiConfig.model,
729
- maxIterations: ((_runConfig_commit7 = runConfig.commit) === null || _runConfig_commit7 === void 0 ? void 0 : _runConfig_commit7.maxAgenticIterations) || 10,
734
+ model: ((_aiConfig_commands2 = aiConfig.commands) === null || _aiConfig_commands2 === void 0 ? void 0 : (_aiConfig_commands_commit2 = _aiConfig_commands2.commit) === null || _aiConfig_commands_commit2 === void 0 ? void 0 : _aiConfig_commands_commit2.model) || aiConfig.model,
735
+ maxIterations: ((_runConfig_commit8 = runConfig.commit) === null || _runConfig_commit8 === void 0 ? void 0 : _runConfig_commit8.maxAgenticIterations) || 10,
730
736
  debug: runConfig.debug,
731
737
  debugRequestFile: getOutputPath(outputDirectory, getTimestampedRequestFilename('commit')),
732
738
  debugResponseFile: getOutputPath(outputDirectory, getTimestampedResponseFilename('commit')),
733
739
  storage: aiStorageAdapter,
734
740
  logger: aiLogger,
735
- openaiReasoning: ((_aiConfig_commands1 = aiConfig.commands) === null || _aiConfig_commands1 === void 0 ? void 0 : (_aiConfig_commands_commit1 = _aiConfig_commands1.commit) === null || _aiConfig_commands_commit1 === void 0 ? void 0 : _aiConfig_commands_commit1.reasoning) || aiConfig.reasoning
741
+ openaiReasoning: ((_aiConfig_commands3 = aiConfig.commands) === null || _aiConfig_commands3 === void 0 ? void 0 : (_aiConfig_commands_commit3 = _aiConfig_commands3.commit) === null || _aiConfig_commands_commit3 === void 0 ? void 0 : _aiConfig_commands_commit3.reasoning) || aiConfig.reasoning
736
742
  });
737
743
  const iterations = agenticResult.iterations || 0;
738
744
  const toolCalls = agenticResult.toolCallsExecuted || 0;
739
745
  logger.info(`🔍 Analysis complete: ${iterations} iterations, ${toolCalls} tool calls`);
740
746
  // Generate self-reflection output if enabled
741
- if ((_runConfig_commit8 = runConfig.commit) === null || _runConfig_commit8 === void 0 ? void 0 : _runConfig_commit8.selfReflection) {
747
+ if ((_runConfig_commit9 = runConfig.commit) === null || _runConfig_commit9 === void 0 ? void 0 : _runConfig_commit9.selfReflection) {
742
748
  await generateSelfReflection(agenticResult, outputDirectory, storage, logger);
743
749
  }
744
750
  // Check for suggested splits
745
- if (agenticResult.suggestedSplits.length > 1 && ((_runConfig_commit9 = runConfig.commit) === null || _runConfig_commit9 === void 0 ? void 0 : _runConfig_commit9.allowCommitSplitting)) {
746
- var _runConfig_commit18;
751
+ if (agenticResult.suggestedSplits.length > 1 && ((_runConfig_commit10 = runConfig.commit) === null || _runConfig_commit10 === void 0 ? void 0 : _runConfig_commit10.allowCommitSplitting)) {
752
+ var _runConfig_commit19;
747
753
  logger.info('\n📋 AI suggests splitting this into %d commits:', agenticResult.suggestedSplits.length);
748
754
  for(let i = 0; i < agenticResult.suggestedSplits.length; i++){
749
755
  const split = agenticResult.suggestedSplits[i];
@@ -753,9 +759,9 @@ const executeInternal$3 = async (runConfig)=>{
753
759
  logger.info(' Message: %s', split.message);
754
760
  }
755
761
  // NEW: Check if auto-split is enabled (defaults to true if not specified)
756
- const autoSplitEnabled = ((_runConfig_commit18 = runConfig.commit) === null || _runConfig_commit18 === void 0 ? void 0 : _runConfig_commit18.autoSplit) !== false; // Default to true
762
+ const autoSplitEnabled = ((_runConfig_commit19 = runConfig.commit) === null || _runConfig_commit19 === void 0 ? void 0 : _runConfig_commit19.autoSplit) !== false; // Default to true
757
763
  if (autoSplitEnabled) {
758
- var _runConfig_commit19, _runConfig_commit20;
764
+ var _runConfig_commit20, _runConfig_commit21;
759
765
  logger.info('\n🔄 Auto-split enabled - creating separate commits...\n');
760
766
  // Deduplicate files across splits to prevent staging errors
761
767
  // (AI sometimes suggests the same file in multiple splits)
@@ -766,12 +772,12 @@ const executeInternal$3 = async (runConfig)=>{
766
772
  const splitResult = await executeSplitCommits({
767
773
  splits: deduplicatedSplits,
768
774
  isDryRun,
769
- interactive: !!(((_runConfig_commit19 = runConfig.commit) === null || _runConfig_commit19 === void 0 ? void 0 : _runConfig_commit19.interactive) && !((_runConfig_commit20 = runConfig.commit) === null || _runConfig_commit20 === void 0 ? void 0 : _runConfig_commit20.sendit)),
775
+ interactive: !!(((_runConfig_commit20 = runConfig.commit) === null || _runConfig_commit20 === void 0 ? void 0 : _runConfig_commit20.interactive) && !((_runConfig_commit21 = runConfig.commit) === null || _runConfig_commit21 === void 0 ? void 0 : _runConfig_commit21.sendit)),
770
776
  logger});
771
777
  if (splitResult.success) {
772
- var _runConfig_commit21;
778
+ var _runConfig_commit22;
773
779
  // Push if requested (all commits)
774
- if (((_runConfig_commit21 = runConfig.commit) === null || _runConfig_commit21 === void 0 ? void 0 : _runConfig_commit21.push) && !isDryRun) {
780
+ if (((_runConfig_commit22 = runConfig.commit) === null || _runConfig_commit22 === void 0 ? void 0 : _runConfig_commit22.push) && !isDryRun) {
775
781
  await pushCommit(runConfig.commit.push, logger, isDryRun);
776
782
  }
777
783
  return formatSplitCommitSummary(splitResult);
@@ -796,13 +802,13 @@ const executeInternal$3 = async (runConfig)=>{
796
802
  await saveCommitMessage(outputDirectory, summary, storage, logger);
797
803
  // 🛡️ Universal Safety Check: Run before ANY commit operation
798
804
  // This protects both direct commits (--sendit) and automated commits (publish, etc.)
799
- const willCreateCommit = ((_runConfig_commit10 = runConfig.commit) === null || _runConfig_commit10 === void 0 ? void 0 : _runConfig_commit10.sendit) && hasActualChanges && cached;
800
- if (willCreateCommit && !((_runConfig_commit11 = runConfig.commit) === null || _runConfig_commit11 === void 0 ? void 0 : _runConfig_commit11.skipFileCheck) && !isDryRun) {
805
+ const willCreateCommit = ((_runConfig_commit11 = runConfig.commit) === null || _runConfig_commit11 === void 0 ? void 0 : _runConfig_commit11.sendit) && hasActualChanges && cached;
806
+ if (willCreateCommit && !((_runConfig_commit12 = runConfig.commit) === null || _runConfig_commit12 === void 0 ? void 0 : _runConfig_commit12.skipFileCheck) && !isDryRun) {
801
807
  logger.debug('Checking for file: dependencies before commit operation...');
802
808
  try {
803
809
  const fileDependencyIssues = await checkForFileDependencies$1(storage, process.cwd());
804
810
  if (fileDependencyIssues.length > 0) {
805
- var _runConfig_commit22;
811
+ var _runConfig_commit23;
806
812
  logger.error('🚫 COMMIT BLOCKED: Found file: dependencies that should not be committed!');
807
813
  logger.error('');
808
814
  logFileDependencyWarning(fileDependencyIssues, 'commit');
@@ -810,7 +816,7 @@ const executeInternal$3 = async (runConfig)=>{
810
816
  logger.error('Generated commit message was:');
811
817
  logger.error('%s', summary);
812
818
  logger.error('');
813
- if ((_runConfig_commit22 = runConfig.commit) === null || _runConfig_commit22 === void 0 ? void 0 : _runConfig_commit22.sendit) {
819
+ if ((_runConfig_commit23 = runConfig.commit) === null || _runConfig_commit23 === void 0 ? void 0 : _runConfig_commit23.sendit) {
814
820
  logger.error('To bypass this check, use: kodrdriv commit --skip-file-check --sendit');
815
821
  } else {
816
822
  logger.error('To bypass this check, add skipFileCheck: true to your commit configuration');
@@ -822,12 +828,12 @@ const executeInternal$3 = async (runConfig)=>{
822
828
  logger.warn('Warning: Could not check for file: dependencies: %s', error.message);
823
829
  logger.warn('Proceeding with commit...');
824
830
  }
825
- } else if (((_runConfig_commit12 = runConfig.commit) === null || _runConfig_commit12 === void 0 ? void 0 : _runConfig_commit12.skipFileCheck) && willCreateCommit) {
831
+ } else if (((_runConfig_commit13 = runConfig.commit) === null || _runConfig_commit13 === void 0 ? void 0 : _runConfig_commit13.skipFileCheck) && willCreateCommit) {
826
832
  logger.warn('⚠️ Skipping file: dependency check as requested');
827
833
  }
828
834
  // Handle interactive mode
829
- if (((_runConfig_commit13 = runConfig.commit) === null || _runConfig_commit13 === void 0 ? void 0 : _runConfig_commit13.interactive) && !isDryRun) {
830
- var _runConfig_commit23;
835
+ if (((_runConfig_commit14 = runConfig.commit) === null || _runConfig_commit14 === void 0 ? void 0 : _runConfig_commit14.interactive) && !isDryRun) {
836
+ var _runConfig_commit24;
831
837
  requireTTY('Interactive mode requires a terminal. Use --sendit or --dry-run instead.');
832
838
  const interactiveResult = await handleInteractiveCommitFeedback(summary, runConfig, promptConfig, promptContext, outputDirectory, storage, diffContent, hasActualChanges, cached);
833
839
  if (interactiveResult.action === 'skip') {
@@ -837,23 +843,23 @@ const executeInternal$3 = async (runConfig)=>{
837
843
  return interactiveResult.finalMessage;
838
844
  }
839
845
  // User chose to commit - check if sendit is enabled to determine what action to take
840
- const senditEnabled = (_runConfig_commit23 = runConfig.commit) === null || _runConfig_commit23 === void 0 ? void 0 : _runConfig_commit23.sendit;
846
+ const senditEnabled = (_runConfig_commit24 = runConfig.commit) === null || _runConfig_commit24 === void 0 ? void 0 : _runConfig_commit24.sendit;
841
847
  const willActuallyCommit = senditEnabled && hasActualChanges && cached;
842
848
  if (willActuallyCommit) {
843
- var _runConfig_commit24;
844
- const commitAction = ((_runConfig_commit24 = runConfig.commit) === null || _runConfig_commit24 === void 0 ? void 0 : _runConfig_commit24.amend) ? 'amending last commit' : 'committing';
849
+ var _runConfig_commit25;
850
+ const commitAction = ((_runConfig_commit25 = runConfig.commit) === null || _runConfig_commit25 === void 0 ? void 0 : _runConfig_commit25.amend) ? 'amending last commit' : 'committing';
845
851
  logger.info('SENDIT_EXECUTING: SendIt enabled, executing commit action | Action: %s | Message Length: %d | Final Message: \n\n%s\n\n', commitAction.charAt(0).toUpperCase() + commitAction.slice(1), interactiveResult.finalMessage.length, interactiveResult.finalMessage);
846
852
  try {
847
- var _runConfig_commit25, _runConfig_commit26;
853
+ var _runConfig_commit26, _runConfig_commit27;
848
854
  const validatedSummary = validateString(interactiveResult.finalMessage, 'commit summary');
849
855
  const escapedSummary = shellescape([
850
856
  validatedSummary
851
857
  ]);
852
- const commitCommand = ((_runConfig_commit25 = runConfig.commit) === null || _runConfig_commit25 === void 0 ? void 0 : _runConfig_commit25.amend) ? `git commit --amend -m ${escapedSummary}` : `git commit -m ${escapedSummary}`;
858
+ const commitCommand = ((_runConfig_commit26 = runConfig.commit) === null || _runConfig_commit26 === void 0 ? void 0 : _runConfig_commit26.amend) ? `git commit --amend -m ${escapedSummary}` : `git commit -m ${escapedSummary}`;
853
859
  await run(commitCommand);
854
860
  logger.info('COMMIT_SUCCESS: Commit operation completed successfully | Status: committed | Action: Changes saved to repository');
855
861
  // Push if requested
856
- await pushCommit((_runConfig_commit26 = runConfig.commit) === null || _runConfig_commit26 === void 0 ? void 0 : _runConfig_commit26.push, logger, isDryRun);
862
+ await pushCommit((_runConfig_commit27 = runConfig.commit) === null || _runConfig_commit27 === void 0 ? void 0 : _runConfig_commit27.push, logger, isDryRun);
857
863
  } catch (error) {
858
864
  logger.error('Failed to commit:', error);
859
865
  throw new ExternalDependencyError('Failed to create commit', 'git', error);
@@ -876,32 +882,32 @@ const executeInternal$3 = async (runConfig)=>{
876
882
  logger.debug('Skipping sendit logic because user chose to skip in interactive mode');
877
883
  return summary;
878
884
  }
879
- if ((_runConfig_commit14 = runConfig.commit) === null || _runConfig_commit14 === void 0 ? void 0 : _runConfig_commit14.sendit) {
885
+ if ((_runConfig_commit15 = runConfig.commit) === null || _runConfig_commit15 === void 0 ? void 0 : _runConfig_commit15.sendit) {
880
886
  if (isDryRun) {
881
- var _runConfig_commit27, _runConfig_commit28;
887
+ var _runConfig_commit28, _runConfig_commit29;
882
888
  logger.info('Would commit with message: \n\n%s\n\n', summary);
883
- const commitAction = ((_runConfig_commit27 = runConfig.commit) === null || _runConfig_commit27 === void 0 ? void 0 : _runConfig_commit27.amend) ? 'git commit --amend -m <generated-message>' : 'git commit -m <generated-message>';
889
+ const commitAction = ((_runConfig_commit28 = runConfig.commit) === null || _runConfig_commit28 === void 0 ? void 0 : _runConfig_commit28.amend) ? 'git commit --amend -m <generated-message>' : 'git commit -m <generated-message>';
884
890
  logger.info('Would execute: %s', commitAction);
885
891
  // Show push command in dry run if requested
886
- if ((_runConfig_commit28 = runConfig.commit) === null || _runConfig_commit28 === void 0 ? void 0 : _runConfig_commit28.push) {
892
+ if ((_runConfig_commit29 = runConfig.commit) === null || _runConfig_commit29 === void 0 ? void 0 : _runConfig_commit29.push) {
887
893
  const remote = typeof runConfig.commit.push === 'string' ? runConfig.commit.push : 'origin';
888
894
  logger.info('Would push to %s with: git push %s', remote, remote);
889
895
  }
890
896
  } else if (hasActualChanges && cached) {
891
- var _runConfig_commit29;
892
- const commitAction = ((_runConfig_commit29 = runConfig.commit) === null || _runConfig_commit29 === void 0 ? void 0 : _runConfig_commit29.amend) ? 'amending commit' : 'committing';
897
+ var _runConfig_commit30;
898
+ const commitAction = ((_runConfig_commit30 = runConfig.commit) === null || _runConfig_commit30 === void 0 ? void 0 : _runConfig_commit30.amend) ? 'amending commit' : 'committing';
893
899
  logger.info('SendIt mode enabled. %s with message: \n\n%s\n\n', commitAction.charAt(0).toUpperCase() + commitAction.slice(1), summary);
894
900
  try {
895
- var _runConfig_commit30, _runConfig_commit31;
901
+ var _runConfig_commit31, _runConfig_commit32;
896
902
  const validatedSummary = validateString(summary, 'commit summary');
897
903
  const escapedSummary = shellescape([
898
904
  validatedSummary
899
905
  ]);
900
- const commitCommand = ((_runConfig_commit30 = runConfig.commit) === null || _runConfig_commit30 === void 0 ? void 0 : _runConfig_commit30.amend) ? `git commit --amend -m ${escapedSummary}` : `git commit -m ${escapedSummary}`;
906
+ const commitCommand = ((_runConfig_commit31 = runConfig.commit) === null || _runConfig_commit31 === void 0 ? void 0 : _runConfig_commit31.amend) ? `git commit --amend -m ${escapedSummary}` : `git commit -m ${escapedSummary}`;
901
907
  await run(commitCommand);
902
908
  logger.info('Commit successful!');
903
909
  // Push if requested
904
- await pushCommit((_runConfig_commit31 = runConfig.commit) === null || _runConfig_commit31 === void 0 ? void 0 : _runConfig_commit31.push, logger, isDryRun);
910
+ await pushCommit((_runConfig_commit32 = runConfig.commit) === null || _runConfig_commit32 === void 0 ? void 0 : _runConfig_commit32.push, logger, isDryRun);
905
911
  } catch (error) {
906
912
  logger.error('Failed to commit:', error);
907
913
  throw new ExternalDependencyError('Failed to create commit', 'git', error);
@@ -939,419 +945,117 @@ const execute$4 = async (runConfig)=>{
939
945
  }
940
946
  };
941
947
 
942
- const logger = getLogger();
943
- // Cache file to store test run timestamps per package
944
- const TEST_CACHE_FILE = '.kodrdriv-test-cache.json';
945
- /**
946
- * Load test cache from disk
947
- */ async function loadTestCache(packageDir) {
948
- const cachePath = path.join(packageDir, TEST_CACHE_FILE);
949
- try {
950
- const content = await fs.readFile(cachePath, 'utf-8');
951
- return JSON.parse(content);
952
- } catch {
953
- return {};
954
- }
955
- }
956
- /**
957
- * Save test cache to disk
958
- */ async function saveTestCache(packageDir, cache) {
959
- const cachePath = path.join(packageDir, TEST_CACHE_FILE);
960
- try {
961
- await fs.writeFile(cachePath, JSON.stringify(cache, null, 2), 'utf-8');
962
- } catch (error) {
963
- logger.debug(`Failed to save test cache: ${error.message}`);
964
- }
965
- }
966
- /**
967
- * Get the current git commit hash
968
- */ async function getCurrentCommitHash(packageDir) {
969
- try {
970
- const { stdout } = await runSecure('git', [
971
- 'rev-parse',
972
- 'HEAD'
973
- ], {
974
- cwd: packageDir
948
+ function _define_property(obj, key, value) {
949
+ if (key in obj) {
950
+ Object.defineProperty(obj, key, {
951
+ value: value,
952
+ enumerable: true,
953
+ configurable: true,
954
+ writable: true
975
955
  });
976
- return stdout.trim();
977
- } catch {
978
- return null;
956
+ } else {
957
+ obj[key] = value;
979
958
  }
959
+ return obj;
980
960
  }
981
- /**
982
- * Check if source files have changed since the last test run
983
- */ async function hasSourceFilesChanged(packageDir, lastCommitHash) {
984
- if (!lastCommitHash) {
985
- return {
986
- changed: true,
987
- reason: 'No previous test run recorded'
988
- };
961
+ // Performance timing helper
962
+ class PerformanceTimer {
963
+ static start(logger, operation) {
964
+ logger.verbose(`⏱️ Starting: ${operation}`);
965
+ return new PerformanceTimer(logger);
989
966
  }
990
- try {
991
- // Get current commit hash
992
- const currentCommitHash = await getCurrentCommitHash(packageDir);
993
- if (!currentCommitHash) {
994
- return {
995
- changed: true,
996
- reason: 'Not in a git repository'
997
- };
998
- }
999
- // If commit hash changed, files definitely changed
1000
- if (currentCommitHash !== lastCommitHash) {
1001
- return {
1002
- changed: true,
1003
- reason: `Commit hash changed: ${lastCommitHash.substring(0, 7)} -> ${currentCommitHash.substring(0, 7)}`
1004
- };
1005
- }
1006
- // Check if there are any uncommitted changes to source files
1007
- const { stdout } = await runSecure('git', [
1008
- 'status',
1009
- '--porcelain'
1010
- ], {
1011
- cwd: packageDir
1012
- });
1013
- const changedFiles = stdout.split('\n').filter((line)=>line.trim()).map((line)=>line.substring(3).trim()).filter((file)=>{
1014
- // Only consider source files, not build artifacts or config files
1015
- const ext = path.extname(file);
1016
- return(// TypeScript/JavaScript source files
1017
- [
1018
- '.ts',
1019
- '.tsx',
1020
- '.js',
1021
- '.jsx'
1022
- ].includes(ext) || // Test files
1023
- file.includes('.test.') || file.includes('.spec.') || // Config files that affect build/test
1024
- [
1025
- 'tsconfig.json',
1026
- 'vite.config.ts',
1027
- 'vitest.config.ts',
1028
- 'package.json'
1029
- ].includes(path.basename(file)));
1030
- });
1031
- if (changedFiles.length > 0) {
1032
- return {
1033
- changed: true,
1034
- reason: `Uncommitted changes in: ${changedFiles.slice(0, 3).join(', ')}${changedFiles.length > 3 ? '...' : ''}`
1035
- };
1036
- }
1037
- return {
1038
- changed: false,
1039
- reason: 'No source file changes detected'
1040
- };
1041
- } catch (error) {
1042
- logger.debug(`Error checking for source file changes: ${error.message}`);
1043
- // Conservative: assume changed if we can't verify
1044
- return {
1045
- changed: true,
1046
- reason: `Could not verify changes: ${error.message}`
1047
- };
967
+ end(operation) {
968
+ const duration = Date.now() - this.startTime;
969
+ this.logger.verbose(`⏱️ Completed: ${operation} (${duration}ms)`);
970
+ return duration;
971
+ }
972
+ constructor(logger){
973
+ _define_property(this, "startTime", void 0);
974
+ _define_property(this, "logger", void 0);
975
+ this.logger = logger;
976
+ this.startTime = Date.now();
1048
977
  }
1049
978
  }
1050
- /**
1051
- * Check if dist directory needs to be cleaned (is outdated compared to source files)
1052
- */ async function isCleanNeeded(packageDir) {
1053
- const storage = createStorage();
1054
- const distPath = path.join(packageDir, 'dist');
1055
- try {
1056
- // Check if dist directory exists
1057
- const distExists = await storage.exists('dist');
1058
- if (!distExists) {
979
+ const EXCLUDED_DIRECTORIES = [
980
+ 'node_modules',
981
+ 'dist',
982
+ 'build',
983
+ 'coverage',
984
+ '.git',
985
+ '.next',
986
+ '.nuxt',
987
+ 'out',
988
+ 'public',
989
+ 'static',
990
+ 'assets'
991
+ ];
992
+ // Batch read multiple package.json files in parallel
993
+ const batchReadPackageJsonFiles = async (packageJsonPaths, storage, rootDir)=>{
994
+ const logger = getLogger();
995
+ const timer = PerformanceTimer.start(logger, `Batch reading ${packageJsonPaths.length} package.json files`);
996
+ const readPromises = packageJsonPaths.map(async (packageJsonPath)=>{
997
+ try {
998
+ const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
999
+ const parsed = safeJsonParse(packageJsonContent, packageJsonPath);
1000
+ const packageJson = validatePackageJson(parsed, packageJsonPath, false);
1001
+ const relativePath = path.relative(rootDir, path.dirname(packageJsonPath));
1059
1002
  return {
1060
- needed: false,
1061
- reason: 'dist directory does not exist'
1003
+ path: packageJsonPath,
1004
+ packageJson,
1005
+ relativePath: relativePath || '.'
1062
1006
  };
1007
+ } catch (error) {
1008
+ logger.debug(`Skipped invalid package.json at ${packageJsonPath}: ${error.message}`);
1009
+ return null;
1010
+ }
1011
+ });
1012
+ const results = await Promise.all(readPromises);
1013
+ const validResults = results.filter((result)=>result !== null);
1014
+ timer.end(`Successfully read ${validResults.length}/${packageJsonPaths.length} package.json files`);
1015
+ return validResults;
1016
+ };
1017
+ // Optimized recursive package.json finder with parallel processing
1018
+ const findAllPackageJsonFiles = async (rootDir, storage)=>{
1019
+ const logger = getLogger();
1020
+ const timer = PerformanceTimer.start(logger, 'Optimized scanning for package.json files');
1021
+ const scanForPaths = async (currentDir, depth = 0)=>{
1022
+ // Prevent infinite recursion and overly deep scanning
1023
+ if (depth > 5) {
1024
+ return [];
1063
1025
  }
1064
- // Get dist directory modification time
1065
- const distStats = await fs.stat(distPath);
1066
- const distMtime = distStats.mtimeMs;
1067
- // Use git to find source files that are newer than dist
1068
1026
  try {
1069
- // Get all tracked source files
1070
- const { stdout: trackedFiles } = await runSecure('git', [
1071
- 'ls-files'
1072
- ], {
1073
- cwd: packageDir
1074
- });
1075
- const files = trackedFiles.split('\n').filter(Boolean);
1076
- // Check if any source files are newer than dist
1077
- for (const file of files){
1078
- const ext = path.extname(file);
1079
- if (![
1080
- '.ts',
1081
- '.tsx',
1082
- '.js',
1083
- '.jsx',
1084
- '.json'
1085
- ].includes(ext)) {
1086
- continue;
1087
- }
1088
- // Skip dist files
1089
- if (file.startsWith('dist/')) {
1027
+ if (!await storage.exists(currentDir) || !await storage.isDirectory(currentDir)) {
1028
+ return [];
1029
+ }
1030
+ const items = await storage.listFiles(currentDir);
1031
+ const foundPaths = [];
1032
+ // Check for package.json in current directory
1033
+ if (items.includes('package.json')) {
1034
+ const packageJsonPath = path.join(currentDir, 'package.json');
1035
+ foundPaths.push(packageJsonPath);
1036
+ }
1037
+ // Process subdirectories in parallel
1038
+ const subdirPromises = [];
1039
+ for (const item of items){
1040
+ if (EXCLUDED_DIRECTORIES.includes(item)) {
1090
1041
  continue;
1091
1042
  }
1092
- try {
1093
- const filePath = path.join(packageDir, file);
1094
- const fileStats = await fs.stat(filePath);
1095
- if (fileStats.mtimeMs > distMtime) {
1096
- return {
1097
- needed: true,
1098
- reason: `${file} is newer than dist directory`
1099
- };
1043
+ const itemPath = path.join(currentDir, item);
1044
+ subdirPromises.push((async ()=>{
1045
+ try {
1046
+ if (await storage.isDirectory(itemPath)) {
1047
+ return await scanForPaths(itemPath, depth + 1);
1048
+ }
1049
+ } catch (error) {
1050
+ logger.debug(`Skipped directory ${itemPath}: ${error.message}`);
1100
1051
  }
1101
- } catch {
1102
- continue;
1103
- }
1104
- }
1105
- return {
1106
- needed: false,
1107
- reason: 'dist directory is up to date with source files'
1108
- };
1109
- } catch (error) {
1110
- // If git check fails, fall back to checking common source directories
1111
- logger.debug(`Git-based check failed, using fallback: ${error.message}`);
1112
- const sourceDirs = [
1113
- 'src',
1114
- 'tests'
1115
- ];
1116
- for (const dir of sourceDirs){
1117
- const dirPath = path.join(packageDir, dir);
1118
- try {
1119
- const dirStats = await fs.stat(dirPath);
1120
- if (dirStats.mtimeMs > distMtime) {
1121
- return {
1122
- needed: true,
1123
- reason: `${dir} directory is newer than dist`
1124
- };
1125
- }
1126
- } catch {
1127
- continue;
1128
- }
1129
- }
1130
- // Conservative: if we can't verify, assume clean is needed
1131
- return {
1132
- needed: true,
1133
- reason: 'Could not verify dist freshness, cleaning to be safe'
1134
- };
1135
- }
1136
- } catch (error) {
1137
- logger.debug(`Error checking if clean is needed: ${error.message}`);
1138
- // Conservative: assume clean is needed if we can't check
1139
- return {
1140
- needed: true,
1141
- reason: `Could not verify: ${error.message}`
1142
- };
1143
- }
1144
- }
1145
- /**
1146
- * Check if tests need to be run (source files changed since last test run)
1147
- */ async function isTestNeeded(packageDir) {
1148
- try {
1149
- // Load test cache
1150
- const cache = await loadTestCache(packageDir);
1151
- const cacheKey = packageDir;
1152
- // Check if we have a cached test run for this package
1153
- const cached = cache[cacheKey];
1154
- if (!cached) {
1155
- return {
1156
- needed: true,
1157
- reason: 'No previous test run recorded'
1158
- };
1159
- }
1160
- // Check if source files have changed since last test run
1161
- const changeCheck = await hasSourceFilesChanged(packageDir, cached.lastCommitHash);
1162
- if (changeCheck.changed) {
1163
- return {
1164
- needed: true,
1165
- reason: changeCheck.reason
1166
- };
1167
- }
1168
- return {
1169
- needed: false,
1170
- reason: 'No source file changes since last test run'
1171
- };
1172
- } catch (error) {
1173
- logger.debug(`Error checking if test is needed: ${error.message}`);
1174
- // Conservative: assume test is needed if we can't check
1175
- return {
1176
- needed: true,
1177
- reason: `Could not verify: ${error.message}`
1178
- };
1179
- }
1180
- }
1181
- /**
1182
- * Record that tests were run for this package
1183
- */ async function recordTestRun(packageDir) {
1184
- try {
1185
- const cache = await loadTestCache(packageDir);
1186
- const cacheKey = packageDir;
1187
- const commitHash = await getCurrentCommitHash(packageDir);
1188
- cache[cacheKey] = {
1189
- lastTestRun: Date.now(),
1190
- lastCommitHash: commitHash || 'unknown'
1191
- };
1192
- await saveTestCache(packageDir, cache);
1193
- } catch (error) {
1194
- logger.debug(`Failed to record test run: ${error.message}`);
1195
- }
1196
- }
1197
- /**
1198
- * Optimize a precommit command by skipping unnecessary steps
1199
- * Returns the optimized command and information about what was skipped
1200
- */ async function optimizePrecommitCommand(packageDir, originalCommand, options = {}) {
1201
- const { skipClean = true, skipTest = true } = options;
1202
- // Parse the original command to extract individual scripts
1203
- // Common patterns: "npm run precommit", "npm run clean && npm run build && npm run lint && npm run test"
1204
- const isPrecommitScript = originalCommand.includes('precommit') || originalCommand.includes('pre-commit');
1205
- let optimizedCommand = originalCommand;
1206
- const skipped = {
1207
- clean: false,
1208
- test: false
1209
- };
1210
- const reasons = {};
1211
- // If it's a precommit script, we need to check what it actually runs
1212
- // For now, we'll optimize the common pattern: clean && build && lint && test
1213
- if (isPrecommitScript || originalCommand.includes('clean')) {
1214
- if (skipClean) {
1215
- const cleanCheck = await isCleanNeeded(packageDir);
1216
- if (!cleanCheck.needed) {
1217
- // Remove clean from the command
1218
- optimizedCommand = optimizedCommand.replace(/npm\s+run\s+clean\s+&&\s*/g, '').replace(/npm\s+run\s+clean\s+/g, '').replace(/\s*&&\s*npm\s+run\s+clean/g, '').trim();
1219
- skipped.clean = true;
1220
- reasons.clean = cleanCheck.reason;
1221
- }
1222
- }
1223
- }
1224
- if (isPrecommitScript || originalCommand.includes('test')) {
1225
- if (skipTest) {
1226
- const testCheck = await isTestNeeded(packageDir);
1227
- if (!testCheck.needed) {
1228
- // Remove test from the command
1229
- optimizedCommand = optimizedCommand.replace(/\s*&&\s*npm\s+run\s+test\s*/g, '').replace(/\s*&&\s*npm\s+run\s+test$/g, '').replace(/npm\s+run\s+test\s+&&\s*/g, '').trim();
1230
- skipped.test = true;
1231
- reasons.test = testCheck.reason;
1232
- }
1233
- }
1234
- }
1235
- // Clean up any double && or trailing &&
1236
- optimizedCommand = optimizedCommand.replace(/\s*&&\s*&&/g, ' && ').replace(/&&\s*$/, '').trim();
1237
- return {
1238
- optimizedCommand,
1239
- skipped,
1240
- reasons
1241
- };
1242
- }
1243
-
1244
- function _define_property(obj, key, value) {
1245
- if (key in obj) {
1246
- Object.defineProperty(obj, key, {
1247
- value: value,
1248
- enumerable: true,
1249
- configurable: true,
1250
- writable: true
1251
- });
1252
- } else {
1253
- obj[key] = value;
1254
- }
1255
- return obj;
1256
- }
1257
- // Performance timing helper
1258
- class PerformanceTimer {
1259
- static start(logger, operation) {
1260
- logger.verbose(`⏱️ Starting: ${operation}`);
1261
- return new PerformanceTimer(logger);
1262
- }
1263
- end(operation) {
1264
- const duration = Date.now() - this.startTime;
1265
- this.logger.verbose(`⏱️ Completed: ${operation} (${duration}ms)`);
1266
- return duration;
1267
- }
1268
- constructor(logger){
1269
- _define_property(this, "startTime", void 0);
1270
- _define_property(this, "logger", void 0);
1271
- this.logger = logger;
1272
- this.startTime = Date.now();
1273
- }
1274
- }
1275
- const EXCLUDED_DIRECTORIES = [
1276
- 'node_modules',
1277
- 'dist',
1278
- 'build',
1279
- 'coverage',
1280
- '.git',
1281
- '.next',
1282
- '.nuxt',
1283
- 'out',
1284
- 'public',
1285
- 'static',
1286
- 'assets'
1287
- ];
1288
- // Batch read multiple package.json files in parallel
1289
- const batchReadPackageJsonFiles = async (packageJsonPaths, storage, rootDir)=>{
1290
- const logger = getLogger();
1291
- const timer = PerformanceTimer.start(logger, `Batch reading ${packageJsonPaths.length} package.json files`);
1292
- const readPromises = packageJsonPaths.map(async (packageJsonPath)=>{
1293
- try {
1294
- const packageJsonContent = await storage.readFile(packageJsonPath, 'utf-8');
1295
- const parsed = safeJsonParse(packageJsonContent, packageJsonPath);
1296
- const packageJson = validatePackageJson(parsed, packageJsonPath, false);
1297
- const relativePath = path.relative(rootDir, path.dirname(packageJsonPath));
1298
- return {
1299
- path: packageJsonPath,
1300
- packageJson,
1301
- relativePath: relativePath || '.'
1302
- };
1303
- } catch (error) {
1304
- logger.debug(`Skipped invalid package.json at ${packageJsonPath}: ${error.message}`);
1305
- return null;
1306
- }
1307
- });
1308
- const results = await Promise.all(readPromises);
1309
- const validResults = results.filter((result)=>result !== null);
1310
- timer.end(`Successfully read ${validResults.length}/${packageJsonPaths.length} package.json files`);
1311
- return validResults;
1312
- };
1313
- // Optimized recursive package.json finder with parallel processing
1314
- const findAllPackageJsonFiles = async (rootDir, storage)=>{
1315
- const logger = getLogger();
1316
- const timer = PerformanceTimer.start(logger, 'Optimized scanning for package.json files');
1317
- const scanForPaths = async (currentDir, depth = 0)=>{
1318
- // Prevent infinite recursion and overly deep scanning
1319
- if (depth > 5) {
1320
- return [];
1321
- }
1322
- try {
1323
- if (!await storage.exists(currentDir) || !await storage.isDirectory(currentDir)) {
1324
- return [];
1325
- }
1326
- const items = await storage.listFiles(currentDir);
1327
- const foundPaths = [];
1328
- // Check for package.json in current directory
1329
- if (items.includes('package.json')) {
1330
- const packageJsonPath = path.join(currentDir, 'package.json');
1331
- foundPaths.push(packageJsonPath);
1332
- }
1333
- // Process subdirectories in parallel
1334
- const subdirPromises = [];
1335
- for (const item of items){
1336
- if (EXCLUDED_DIRECTORIES.includes(item)) {
1337
- continue;
1338
- }
1339
- const itemPath = path.join(currentDir, item);
1340
- subdirPromises.push((async ()=>{
1341
- try {
1342
- if (await storage.isDirectory(itemPath)) {
1343
- return await scanForPaths(itemPath, depth + 1);
1344
- }
1345
- } catch (error) {
1346
- logger.debug(`Skipped directory ${itemPath}: ${error.message}`);
1347
- }
1348
- return [];
1349
- })());
1350
- }
1351
- if (subdirPromises.length > 0) {
1352
- const subdirResults = await Promise.all(subdirPromises);
1353
- for (const subdirPaths of subdirResults){
1354
- foundPaths.push(...subdirPaths);
1052
+ return [];
1053
+ })());
1054
+ }
1055
+ if (subdirPromises.length > 0) {
1056
+ const subdirResults = await Promise.all(subdirPromises);
1057
+ for (const subdirPaths of subdirResults){
1058
+ foundPaths.push(...subdirPaths);
1355
1059
  }
1356
1060
  }
1357
1061
  return foundPaths;
@@ -1540,87 +1244,90 @@ const checkForFileDependencies = (packageJsonFiles)=>{
1540
1244
  };
1541
1245
 
1542
1246
  /**
1543
- * Execute precommit checks: lint -> build -> test
1544
- * Skips clean step (clean should be run separately if needed)
1545
- * Uses optimization to skip steps when unchanged
1247
+ * Check if running in MCP server mode
1248
+ */ const isMcpMode = ()=>process.env.KODRDRIV_MCP_SERVER === 'true';
1249
+ /**
1250
+ * Get an MCP-aware logger that suppresses info/warn/debug output in MCP mode.
1251
+ * Errors are always logged since they indicate problems that need attention.
1252
+ */ const getMcpAwareLogger = ()=>{
1253
+ const coreLogger = getLogger();
1254
+ if (!isMcpMode()) {
1255
+ // In normal mode, just return the core logger
1256
+ return coreLogger;
1257
+ }
1258
+ // In MCP mode, wrap the logger to suppress non-error output
1259
+ return {
1260
+ info: (_message, ..._args)=>{},
1261
+ warn: (_message, ..._args)=>{},
1262
+ debug: (_message, ..._args)=>{},
1263
+ verbose: (_message, ..._args)=>{},
1264
+ silly: (_message, ..._args)=>{},
1265
+ // Always log errors - they indicate real problems
1266
+ error: (message, ...args)=>coreLogger.error(message, ...args)
1267
+ };
1268
+ };
1269
+
1270
+ /**
1271
+ * Execute precommit checks by running the package's precommit script.
1272
+ * Expects the package to have a "precommit" script in package.json.
1546
1273
  */ const execute$3 = async (runConfig)=>{
1547
- const logger = getLogger();
1274
+ var _runConfig_precommit, _packageJson_scripts;
1275
+ const logger = getMcpAwareLogger();
1548
1276
  const isDryRun = runConfig.dryRun || false;
1549
1277
  const packageDir = process.cwd();
1550
- // Default command: lint -> build -> test (no clean)
1551
- const defaultCommand = 'npm run lint && npm run build && npm run test';
1552
- // Check if package.json has a precommit script
1553
- let commandToRun = defaultCommand;
1278
+ const shouldFix = ((_runConfig_precommit = runConfig.precommit) === null || _runConfig_precommit === void 0 ? void 0 : _runConfig_precommit.fix) || false;
1279
+ // Verify precommit script exists
1280
+ const fs = await import('fs/promises');
1281
+ const packageJsonPath = path.join(packageDir, 'package.json');
1282
+ let packageName = packageDir;
1283
+ let packageJson;
1554
1284
  try {
1555
- var _packageJson_scripts;
1556
- const fs = await import('fs/promises');
1557
- const packageJsonPath = path.join(packageDir, 'package.json');
1285
+ var _packageJson_scripts1;
1558
1286
  const packageJsonContent = await fs.readFile(packageJsonPath, 'utf-8');
1559
- const packageJson = JSON.parse(packageJsonContent);
1560
- // If there's a precommit script, check what it does
1561
- if ((_packageJson_scripts = packageJson.scripts) === null || _packageJson_scripts === void 0 ? void 0 : _packageJson_scripts.precommit) {
1562
- const precommitScript = packageJson.scripts.precommit;
1563
- // If it includes clean, we'll optimize it out
1564
- // Otherwise, use the precommit script directly
1565
- if (!precommitScript.includes('clean')) {
1566
- commandToRun = `npm run precommit`;
1567
- } else {
1568
- // Use default command (lint -> build -> test) if precommit includes clean
1569
- commandToRun = defaultCommand;
1570
- }
1287
+ packageJson = JSON.parse(packageJsonContent);
1288
+ packageName = packageJson.name || packageDir;
1289
+ if (!((_packageJson_scripts1 = packageJson.scripts) === null || _packageJson_scripts1 === void 0 ? void 0 : _packageJson_scripts1.precommit)) {
1290
+ throw new Error(`Package "${packageName}" is missing a "precommit" script in package.json`);
1571
1291
  }
1572
1292
  } catch (error) {
1573
- logger.debug(`Could not read package.json, using default command: ${error.message}`);
1293
+ if (error.code === 'ENOENT') {
1294
+ throw new Error(`No package.json found at ${packageJsonPath}`);
1295
+ }
1296
+ throw error;
1574
1297
  }
1298
+ // If --fix is enabled, try to run lint --fix before precommit
1299
+ if (shouldFix && ((_packageJson_scripts = packageJson.scripts) === null || _packageJson_scripts === void 0 ? void 0 : _packageJson_scripts.lint)) {
1300
+ const lintFixCommand = 'npm run lint -- --fix';
1301
+ if (isDryRun) {
1302
+ logger.info(`DRY RUN: Would execute: ${lintFixCommand}`);
1303
+ } else {
1304
+ try {
1305
+ logger.info(`🔧 Running lint --fix before precommit checks: ${lintFixCommand}`);
1306
+ await run(lintFixCommand, {
1307
+ cwd: packageDir
1308
+ });
1309
+ logger.info(`✅ Lint fixes applied`);
1310
+ } catch (error) {
1311
+ // Log warning but continue with precommit - lint --fix may fail on some issues
1312
+ logger.warn(`⚠️ Lint --fix had issues (continuing with precommit): ${error.message}`);
1313
+ }
1314
+ }
1315
+ }
1316
+ const commandToRun = 'npm run precommit';
1575
1317
  if (isDryRun) {
1576
1318
  logger.info(`DRY RUN: Would execute: ${commandToRun}`);
1577
1319
  return `DRY RUN: Would run precommit checks: ${commandToRun}`;
1578
1320
  }
1579
- // Optimize the command (skip clean/test if unchanged)
1580
- let optimizedCommand = commandToRun;
1581
- let optimizationInfo = null;
1582
- try {
1583
- const optimization = await optimizePrecommitCommand(packageDir, commandToRun);
1584
- optimizedCommand = optimization.optimizedCommand;
1585
- optimizationInfo = {
1586
- skipped: optimization.skipped,
1587
- reasons: optimization.reasons
1588
- };
1589
- if (optimization.skipped.clean || optimization.skipped.test) {
1590
- const skippedParts = [];
1591
- if (optimization.skipped.clean) {
1592
- skippedParts.push(`clean (${optimization.reasons.clean})`);
1593
- }
1594
- if (optimization.skipped.test) {
1595
- skippedParts.push(`test (${optimization.reasons.test})`);
1596
- }
1597
- logger.info(`⚡ Optimized: Skipped ${skippedParts.join(', ')}`);
1598
- if (runConfig.verbose || runConfig.debug) {
1599
- logger.info(` Original: ${commandToRun}`);
1600
- logger.info(` Optimized: ${optimizedCommand}`);
1601
- }
1602
- }
1603
- } catch (error) {
1604
- logger.debug(`Precommit optimization failed: ${error.message}`);
1605
- }
1606
- // Execute the optimized command
1321
+ // Execute the precommit script
1607
1322
  const timer = PerformanceTimer.start(logger, 'Precommit checks');
1608
1323
  try {
1609
- logger.info(`🔧 Running precommit checks: ${optimizedCommand}`);
1610
- await run(optimizedCommand, {
1324
+ logger.info(`🔧 Running precommit checks: ${commandToRun}`);
1325
+ await run(commandToRun, {
1611
1326
  cwd: packageDir
1612
1327
  });
1613
1328
  const duration = timer.end('Precommit checks');
1614
1329
  const seconds = (duration / 1000).toFixed(1);
1615
1330
  logger.info(`✅ Precommit checks passed (${seconds}s)`);
1616
- // Record test run if tests were executed (not skipped)
1617
- if (optimizedCommand.includes('test') && (!optimizationInfo || !optimizationInfo.skipped.test)) {
1618
- try {
1619
- await recordTestRun(packageDir);
1620
- } catch (error) {
1621
- logger.debug(`Failed to record test run: ${error.message}`);
1622
- }
1623
- }
1624
1331
  return `Precommit checks completed successfully in ${seconds}s`;
1625
1332
  } catch (error) {
1626
1333
  timer.end('Precommit checks');
@@ -2963,5 +2670,307 @@ const AUTO_RESOLVABLE_PATTERNS = {
2963
2670
  }
2964
2671
  };
2965
2672
 
2673
+ const logger = getLogger();
2674
+ // Cache file to store test run timestamps per package
2675
+ const TEST_CACHE_FILE = '.kodrdriv-test-cache.json';
2676
+ /**
2677
+ * Load test cache from disk
2678
+ */ async function loadTestCache(packageDir) {
2679
+ const cachePath = path.join(packageDir, TEST_CACHE_FILE);
2680
+ try {
2681
+ const content = await fs.readFile(cachePath, 'utf-8');
2682
+ return JSON.parse(content);
2683
+ } catch {
2684
+ return {};
2685
+ }
2686
+ }
2687
+ /**
2688
+ * Save test cache to disk
2689
+ */ async function saveTestCache(packageDir, cache) {
2690
+ const cachePath = path.join(packageDir, TEST_CACHE_FILE);
2691
+ try {
2692
+ await fs.writeFile(cachePath, JSON.stringify(cache, null, 2), 'utf-8');
2693
+ } catch (error) {
2694
+ logger.debug(`Failed to save test cache: ${error.message}`);
2695
+ }
2696
+ }
2697
+ /**
2698
+ * Get the current git commit hash
2699
+ */ async function getCurrentCommitHash(packageDir) {
2700
+ try {
2701
+ const { stdout } = await runSecure('git', [
2702
+ 'rev-parse',
2703
+ 'HEAD'
2704
+ ], {
2705
+ cwd: packageDir
2706
+ });
2707
+ return stdout.trim();
2708
+ } catch {
2709
+ return null;
2710
+ }
2711
+ }
2712
+ /**
2713
+ * Check if source files have changed since the last test run
2714
+ */ async function hasSourceFilesChanged(packageDir, lastCommitHash) {
2715
+ if (!lastCommitHash) {
2716
+ return {
2717
+ changed: true,
2718
+ reason: 'No previous test run recorded'
2719
+ };
2720
+ }
2721
+ try {
2722
+ // Get current commit hash
2723
+ const currentCommitHash = await getCurrentCommitHash(packageDir);
2724
+ if (!currentCommitHash) {
2725
+ return {
2726
+ changed: true,
2727
+ reason: 'Not in a git repository'
2728
+ };
2729
+ }
2730
+ // If commit hash changed, files definitely changed
2731
+ if (currentCommitHash !== lastCommitHash) {
2732
+ return {
2733
+ changed: true,
2734
+ reason: `Commit hash changed: ${lastCommitHash.substring(0, 7)} -> ${currentCommitHash.substring(0, 7)}`
2735
+ };
2736
+ }
2737
+ // Check if there are any uncommitted changes to source files
2738
+ const { stdout } = await runSecure('git', [
2739
+ 'status',
2740
+ '--porcelain'
2741
+ ], {
2742
+ cwd: packageDir
2743
+ });
2744
+ const changedFiles = stdout.split('\n').filter((line)=>line.trim()).map((line)=>line.substring(3).trim()).filter((file)=>{
2745
+ // Only consider source files, not build artifacts or config files
2746
+ const ext = path.extname(file);
2747
+ return(// TypeScript/JavaScript source files
2748
+ [
2749
+ '.ts',
2750
+ '.tsx',
2751
+ '.js',
2752
+ '.jsx'
2753
+ ].includes(ext) || // Test files
2754
+ file.includes('.test.') || file.includes('.spec.') || // Config files that affect build/test
2755
+ [
2756
+ 'tsconfig.json',
2757
+ 'vite.config.ts',
2758
+ 'vitest.config.ts',
2759
+ 'package.json'
2760
+ ].includes(path.basename(file)));
2761
+ });
2762
+ if (changedFiles.length > 0) {
2763
+ return {
2764
+ changed: true,
2765
+ reason: `Uncommitted changes in: ${changedFiles.slice(0, 3).join(', ')}${changedFiles.length > 3 ? '...' : ''}`
2766
+ };
2767
+ }
2768
+ return {
2769
+ changed: false,
2770
+ reason: 'No source file changes detected'
2771
+ };
2772
+ } catch (error) {
2773
+ logger.debug(`Error checking for source file changes: ${error.message}`);
2774
+ // Conservative: assume changed if we can't verify
2775
+ return {
2776
+ changed: true,
2777
+ reason: `Could not verify changes: ${error.message}`
2778
+ };
2779
+ }
2780
+ }
2781
+ /**
2782
+ * Check if dist directory needs to be cleaned (is outdated compared to source files)
2783
+ */ async function isCleanNeeded(packageDir) {
2784
+ const storage = createStorage();
2785
+ const distPath = path.join(packageDir, 'dist');
2786
+ try {
2787
+ // Check if dist directory exists
2788
+ const distExists = await storage.exists('dist');
2789
+ if (!distExists) {
2790
+ return {
2791
+ needed: false,
2792
+ reason: 'dist directory does not exist'
2793
+ };
2794
+ }
2795
+ // Get dist directory modification time
2796
+ const distStats = await fs.stat(distPath);
2797
+ const distMtime = distStats.mtimeMs;
2798
+ // Use git to find source files that are newer than dist
2799
+ try {
2800
+ // Get all tracked source files
2801
+ const { stdout: trackedFiles } = await runSecure('git', [
2802
+ 'ls-files'
2803
+ ], {
2804
+ cwd: packageDir
2805
+ });
2806
+ const files = trackedFiles.split('\n').filter(Boolean);
2807
+ // Check if any source files are newer than dist
2808
+ for (const file of files){
2809
+ const ext = path.extname(file);
2810
+ if (![
2811
+ '.ts',
2812
+ '.tsx',
2813
+ '.js',
2814
+ '.jsx',
2815
+ '.json'
2816
+ ].includes(ext)) {
2817
+ continue;
2818
+ }
2819
+ // Skip dist files
2820
+ if (file.startsWith('dist/')) {
2821
+ continue;
2822
+ }
2823
+ try {
2824
+ const filePath = path.join(packageDir, file);
2825
+ const fileStats = await fs.stat(filePath);
2826
+ if (fileStats.mtimeMs > distMtime) {
2827
+ return {
2828
+ needed: true,
2829
+ reason: `${file} is newer than dist directory`
2830
+ };
2831
+ }
2832
+ } catch {
2833
+ continue;
2834
+ }
2835
+ }
2836
+ return {
2837
+ needed: false,
2838
+ reason: 'dist directory is up to date with source files'
2839
+ };
2840
+ } catch (error) {
2841
+ // If git check fails, fall back to checking common source directories
2842
+ logger.debug(`Git-based check failed, using fallback: ${error.message}`);
2843
+ const sourceDirs = [
2844
+ 'src',
2845
+ 'tests'
2846
+ ];
2847
+ for (const dir of sourceDirs){
2848
+ const dirPath = path.join(packageDir, dir);
2849
+ try {
2850
+ const dirStats = await fs.stat(dirPath);
2851
+ if (dirStats.mtimeMs > distMtime) {
2852
+ return {
2853
+ needed: true,
2854
+ reason: `${dir} directory is newer than dist`
2855
+ };
2856
+ }
2857
+ } catch {
2858
+ continue;
2859
+ }
2860
+ }
2861
+ // Conservative: if we can't verify, assume clean is needed
2862
+ return {
2863
+ needed: true,
2864
+ reason: 'Could not verify dist freshness, cleaning to be safe'
2865
+ };
2866
+ }
2867
+ } catch (error) {
2868
+ logger.debug(`Error checking if clean is needed: ${error.message}`);
2869
+ // Conservative: assume clean is needed if we can't check
2870
+ return {
2871
+ needed: true,
2872
+ reason: `Could not verify: ${error.message}`
2873
+ };
2874
+ }
2875
+ }
2876
+ /**
2877
+ * Check if tests need to be run (source files changed since last test run)
2878
+ */ async function isTestNeeded(packageDir) {
2879
+ try {
2880
+ // Load test cache
2881
+ const cache = await loadTestCache(packageDir);
2882
+ const cacheKey = packageDir;
2883
+ // Check if we have a cached test run for this package
2884
+ const cached = cache[cacheKey];
2885
+ if (!cached) {
2886
+ return {
2887
+ needed: true,
2888
+ reason: 'No previous test run recorded'
2889
+ };
2890
+ }
2891
+ // Check if source files have changed since last test run
2892
+ const changeCheck = await hasSourceFilesChanged(packageDir, cached.lastCommitHash);
2893
+ if (changeCheck.changed) {
2894
+ return {
2895
+ needed: true,
2896
+ reason: changeCheck.reason
2897
+ };
2898
+ }
2899
+ return {
2900
+ needed: false,
2901
+ reason: 'No source file changes since last test run'
2902
+ };
2903
+ } catch (error) {
2904
+ logger.debug(`Error checking if test is needed: ${error.message}`);
2905
+ // Conservative: assume test is needed if we can't check
2906
+ return {
2907
+ needed: true,
2908
+ reason: `Could not verify: ${error.message}`
2909
+ };
2910
+ }
2911
+ }
2912
+ /**
2913
+ * Record that tests were run for this package
2914
+ */ async function recordTestRun(packageDir) {
2915
+ try {
2916
+ const cache = await loadTestCache(packageDir);
2917
+ const cacheKey = packageDir;
2918
+ const commitHash = await getCurrentCommitHash(packageDir);
2919
+ cache[cacheKey] = {
2920
+ lastTestRun: Date.now(),
2921
+ lastCommitHash: commitHash || 'unknown'
2922
+ };
2923
+ await saveTestCache(packageDir, cache);
2924
+ } catch (error) {
2925
+ logger.debug(`Failed to record test run: ${error.message}`);
2926
+ }
2927
+ }
2928
+ /**
2929
+ * Optimize a precommit command by skipping unnecessary steps
2930
+ * Returns the optimized command and information about what was skipped
2931
+ */ async function optimizePrecommitCommand(packageDir, originalCommand, options = {}) {
2932
+ const { skipClean = true, skipTest = true } = options;
2933
+ // Parse the original command to extract individual scripts
2934
+ // Common patterns: "npm run precommit", "npm run clean && npm run build && npm run lint && npm run test"
2935
+ const isPrecommitScript = originalCommand.includes('precommit') || originalCommand.includes('pre-commit');
2936
+ let optimizedCommand = originalCommand;
2937
+ const skipped = {
2938
+ clean: false,
2939
+ test: false
2940
+ };
2941
+ const reasons = {};
2942
+ // If it's a precommit script, we need to check what it actually runs
2943
+ // For now, we'll optimize the common pattern: clean && build && lint && test
2944
+ if (isPrecommitScript || originalCommand.includes('clean')) {
2945
+ if (skipClean) {
2946
+ const cleanCheck = await isCleanNeeded(packageDir);
2947
+ if (!cleanCheck.needed) {
2948
+ // Remove clean from the command
2949
+ optimizedCommand = optimizedCommand.replace(/npm\s+run\s+clean\s+&&\s*/g, '').replace(/npm\s+run\s+clean\s+/g, '').replace(/\s*&&\s*npm\s+run\s+clean/g, '').trim();
2950
+ skipped.clean = true;
2951
+ reasons.clean = cleanCheck.reason;
2952
+ }
2953
+ }
2954
+ }
2955
+ if (isPrecommitScript || originalCommand.includes('test')) {
2956
+ if (skipTest) {
2957
+ const testCheck = await isTestNeeded(packageDir);
2958
+ if (!testCheck.needed) {
2959
+ // Remove test from the command
2960
+ optimizedCommand = optimizedCommand.replace(/\s*&&\s*npm\s+run\s+test\s*/g, '').replace(/\s*&&\s*npm\s+run\s+test$/g, '').replace(/npm\s+run\s+test\s+&&\s*/g, '').trim();
2961
+ skipped.test = true;
2962
+ reasons.test = testCheck.reason;
2963
+ }
2964
+ }
2965
+ }
2966
+ // Clean up any double && or trailing &&
2967
+ optimizedCommand = optimizedCommand.replace(/\s*&&\s*&&/g, ' && ').replace(/&&\s*$/, '').trim();
2968
+ return {
2969
+ optimizedCommand,
2970
+ skipped,
2971
+ reasons
2972
+ };
2973
+ }
2974
+
2966
2975
  export { PerformanceTimer, batchReadPackageJsonFiles, checkForFileDependencies, execute$2 as clean, collectAllDependencies, execute$4 as commit, findAllPackageJsonFiles, findPackagesByScope, isCleanNeeded, isTestNeeded, optimizePrecommitCommand, execute$3 as precommit, execute as pull, recordTestRun, execute$1 as review, scanDirectoryForPackages };
2967
2976
  //# sourceMappingURL=index.js.map