mobbdev 1.0.126 → 1.0.128

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.mjs +406 -287
  2. package/package.json +1 -1
package/dist/index.mjs CHANGED
@@ -41,7 +41,7 @@ var init_configs = __esm({
41
41
  MCP_API_KEY_HEADER_NAME = "x-mobb-key";
42
42
  MCP_LOGIN_MAX_WAIT = 2 * 60 * 1e3;
43
43
  MCP_LOGIN_CHECK_DELAY = 2 * 1e3;
44
- MCP_VUL_REPORT_DIGEST_TIMEOUT_MS = 5 * 60 * 1e3;
44
+ MCP_VUL_REPORT_DIGEST_TIMEOUT_MS = 30 * 60 * 1e3;
45
45
  MCP_MAX_FILE_SIZE = MAX_UPLOAD_FILE_SIZE_MB * 1024 * 1024;
46
46
  MCP_PERIODIC_CHECK_INTERVAL = 15 * 60 * 1e3;
47
47
  MCP_DEFAULT_MAX_FILES_TO_SCAN = 10;
@@ -564,46 +564,29 @@ var init_FileUtils = __esm({
564
564
  }
565
565
  // Process directory at repository root level with special handling for excluded root directories
566
566
  static async processRootDirectory(dir, excludedRootDirectories) {
567
- try {
568
- await fsPromises.access(dir, fs2.constants.R_OK);
569
- } catch {
567
+ const visitedDirs = /* @__PURE__ */ new Set();
568
+ return this.processDirectory(
569
+ dir,
570
+ dir,
571
+ excludedRootDirectories,
572
+ 0,
573
+ visitedDirs,
574
+ true
575
+ );
576
+ }
577
+ // Process directories with tracking to prevent circular symlink recursion
578
+ static async processDirectory(dir, rootDir, excludedRootDirectories = [], depth = 0, visitedDirs = /* @__PURE__ */ new Set(), isRootLevel = false) {
579
+ if (depth > 20) {
570
580
  return [];
571
581
  }
572
- const items = await fsPromises.readdir(dir);
573
- const results = [];
574
- const filePromises = [];
575
- for (const item of items) {
576
- const fullPath = path.join(dir, item);
577
- try {
578
- await fsPromises.access(fullPath, fs2.constants.R_OK);
579
- const stat = await fsPromises.stat(fullPath);
580
- if (stat.isDirectory()) {
581
- if (excludedRootDirectories.includes(item)) {
582
- continue;
583
- }
584
- filePromises.push(this.processSubdirectory(fullPath, dir, 1));
585
- } else {
586
- results.push({
587
- name: item,
588
- fullPath,
589
- relativePath: item,
590
- time: stat.mtime.getTime(),
591
- isFile: true
592
- });
593
- }
594
- } catch {
595
- continue;
582
+ let canonicalPath;
583
+ try {
584
+ canonicalPath = await fsPromises.realpath(dir);
585
+ if (visitedDirs.has(canonicalPath)) {
586
+ return [];
596
587
  }
597
- }
598
- const subdirResults = await Promise.all(filePromises);
599
- for (const subdirResult of subdirResults) {
600
- results.push(...subdirResult);
601
- }
602
- return results;
603
- }
604
- // Process subdirectories without applying root exclusions
605
- static async processSubdirectory(dir, rootDir, depth) {
606
- if (depth > 20) {
588
+ visitedDirs.add(canonicalPath);
589
+ } catch {
607
590
  return [];
608
591
  }
609
592
  try {
@@ -620,8 +603,11 @@ var init_FileUtils = __esm({
620
603
  await fsPromises.access(fullPath, fs2.constants.R_OK);
621
604
  const stat = await fsPromises.stat(fullPath);
622
605
  if (stat.isDirectory()) {
606
+ if (isRootLevel && excludedRootDirectories.includes(item)) {
607
+ continue;
608
+ }
623
609
  filePromises.push(
624
- this.processSubdirectory(fullPath, rootDir, depth + 1)
610
+ this.processDirectory(fullPath, rootDir, [], depth + 1, visitedDirs)
625
611
  );
626
612
  } else {
627
613
  results.push({
@@ -700,25 +686,27 @@ var init_GitService = __esm({
700
686
  this.log = log2 || noopLog;
701
687
  this.git = simpleGit(repositoryPath, { binary: "git" });
702
688
  this.repositoryPath = repositoryPath;
703
- this.log("Git service initialized", "debug", { repositoryPath });
689
+ this.log("[GitService] Git service initialized", "debug", {
690
+ repositoryPath
691
+ });
704
692
  }
705
693
  /**
706
694
  * Validates that the path is a valid git repository
707
695
  */
708
696
  async validateRepository() {
709
- this.log("Validating git repository", "debug");
697
+ this.log("[GitService] Validating git repository", "debug");
710
698
  try {
711
699
  const isRepo = await this.git.checkIsRepo();
712
700
  if (!isRepo) {
713
- const error = "Path is not a valid git repository";
701
+ const error = "[GitService] Path is not a valid git repository";
714
702
  this.log(error, "error");
715
703
  return { isValid: false, error };
716
704
  }
717
- this.log("Git repository validation successful", "debug");
705
+ this.log("[GitService] Git repository validation successful", "debug");
718
706
  return { isValid: true };
719
707
  } catch (error) {
720
708
  const errorMessage = `Failed to verify git repository: ${error.message}`;
721
- this.log(errorMessage, "error", { error });
709
+ this.log(`[GitService] ${errorMessage}`, "error", { error });
722
710
  return { isValid: false, error: errorMessage };
723
711
  }
724
712
  }
@@ -726,7 +714,7 @@ var init_GitService = __esm({
726
714
  * Gets the current git status and returns changed files
727
715
  */
728
716
  async getChangedFiles() {
729
- this.log("Getting git status", "debug");
717
+ this.log("[GitService] Getting git status", "debug");
730
718
  try {
731
719
  const status = await this.git.status();
732
720
  const gitRoot = await this.git.revparse(["--show-toplevel"]);
@@ -750,7 +738,7 @@ var init_GitService = __esm({
750
738
  path2.join(gitRoot, gitRelativePath)
751
739
  );
752
740
  });
753
- this.log("Git status retrieved", "info", {
741
+ this.log("[GitService] Git status retrieved", "info", {
754
742
  fileCount: files.length,
755
743
  files: files.slice(0, 10),
756
744
  // Log first 10 files to avoid spam
@@ -763,7 +751,7 @@ var init_GitService = __esm({
763
751
  return { files, deletedFiles, status };
764
752
  } catch (error) {
765
753
  const errorMessage = `Failed to get git status: ${error.message}`;
766
- this.log(errorMessage, "error", { error });
754
+ this.log(`[GitService] ${errorMessage}`, "error", { error });
767
755
  throw new Error(errorMessage);
768
756
  }
769
757
  }
@@ -771,7 +759,7 @@ var init_GitService = __esm({
771
759
  * Gets git repository information including remote URL, current commit hash, and branch name
772
760
  */
773
761
  async getGitInfo() {
774
- this.log("Getting git repository information", "debug");
762
+ this.log("[GitService] Getting git repository information", "debug");
775
763
  try {
776
764
  const [repoUrl, hash, reference] = await Promise.all([
777
765
  this.git.getConfig("remote.origin.url"),
@@ -788,7 +776,7 @@ var init_GitService = __esm({
788
776
  "https://github.com/"
789
777
  );
790
778
  }
791
- this.log("Git repository information retrieved", "debug", {
779
+ this.log("[GitService] Git repository information retrieved", "debug", {
792
780
  repoUrl: normalizedRepoUrl,
793
781
  hash,
794
782
  reference
@@ -800,7 +788,7 @@ var init_GitService = __esm({
800
788
  };
801
789
  } catch (error) {
802
790
  const errorMessage = `Failed to get git repository information: ${error.message}`;
803
- this.log(errorMessage, "error", { error });
791
+ this.log(`[GitService] ${errorMessage}`, "error", { error });
804
792
  throw new Error(errorMessage);
805
793
  }
806
794
  }
@@ -808,7 +796,7 @@ var init_GitService = __esm({
808
796
  * Validates if a branch name is valid according to git's rules
809
797
  */
810
798
  async isValidBranchName(branchName) {
811
- this.log("Validating branch name", "debug", { branchName });
799
+ this.log("[GitService] Validating branch name", "debug", { branchName });
812
800
  try {
813
801
  const result = await this.git.raw([
814
802
  "check-ref-format",
@@ -816,13 +804,16 @@ var init_GitService = __esm({
816
804
  branchName
817
805
  ]);
818
806
  const isValid = Boolean(result);
819
- this.log("Branch name validation result", "debug", {
807
+ this.log("[GitService] Branch name validation result", "debug", {
820
808
  branchName,
821
809
  isValid
822
810
  });
823
811
  return isValid;
824
812
  } catch (error) {
825
- this.log("Branch name validation failed", "debug", { branchName, error });
813
+ this.log("[GitService] Branch name validation failed", "debug", {
814
+ branchName,
815
+ error
816
+ });
826
817
  return false;
827
818
  }
828
819
  }
@@ -830,14 +821,14 @@ var init_GitService = __esm({
830
821
  * Gets the current branch name
831
822
  */
832
823
  async getCurrentBranch() {
833
- this.log("Getting current branch name", "debug");
824
+ this.log("[GitService] Getting current branch name", "debug");
834
825
  try {
835
826
  const branch = await this.git.revparse(["--abbrev-ref", "HEAD"]);
836
- this.log("Current branch retrieved", "debug", { branch });
827
+ this.log("[GitService] Current branch retrieved", "debug", { branch });
837
828
  return branch;
838
829
  } catch (error) {
839
830
  const errorMessage = `Failed to get current branch: ${error.message}`;
840
- this.log(errorMessage, "error", { error });
831
+ this.log(`[GitService] ${errorMessage}`, "error", { error });
841
832
  throw new Error(errorMessage);
842
833
  }
843
834
  }
@@ -845,14 +836,14 @@ var init_GitService = __esm({
845
836
  * Gets the current commit hash
846
837
  */
847
838
  async getCurrentCommitHash() {
848
- this.log("Getting current commit hash", "debug");
839
+ this.log("[GitService] Getting current commit hash", "debug");
849
840
  try {
850
841
  const hash = await this.git.revparse(["HEAD"]);
851
- this.log("Current commit hash retrieved", "debug", { hash });
842
+ this.log("[GitService] Current commit hash retrieved", "debug", { hash });
852
843
  return hash;
853
844
  } catch (error) {
854
845
  const errorMessage = `Failed to get current commit hash: ${error.message}`;
855
- this.log(errorMessage, "error", { error });
846
+ this.log(`[GitService] ${errorMessage}`, "error", { error });
856
847
  throw new Error(errorMessage);
857
848
  }
858
849
  }
@@ -860,20 +851,24 @@ var init_GitService = __esm({
860
851
  * Gets both the current commit hash and current branch name
861
852
  */
862
853
  async getCurrentCommitAndBranch() {
863
- this.log("Getting current commit hash and branch", "debug");
854
+ this.log("[GitService] Getting current commit hash and branch", "debug");
864
855
  try {
865
856
  const [hash, branch] = await Promise.all([
866
857
  this.git.revparse(["HEAD"]),
867
858
  this.git.revparse(["--abbrev-ref", "HEAD"])
868
859
  ]);
869
- this.log("Current commit hash and branch retrieved", "debug", {
870
- hash,
871
- branch
872
- });
860
+ this.log(
861
+ "[GitService] Current commit hash and branch retrieved",
862
+ "debug",
863
+ {
864
+ hash,
865
+ branch
866
+ }
867
+ );
873
868
  return { hash, branch };
874
869
  } catch (error) {
875
870
  const errorMessage = `Failed to get current commit hash and branch: ${error.message}`;
876
- this.log(errorMessage, "error", { error });
871
+ this.log(`[GitService] ${errorMessage}`, "error", { error });
877
872
  return { hash: "", branch: "" };
878
873
  }
879
874
  }
@@ -881,7 +876,7 @@ var init_GitService = __esm({
881
876
  * Gets the remote repository URL
882
877
  */
883
878
  async getRemoteUrl() {
884
- this.log("Getting remote repository URL", "debug");
879
+ this.log("[GitService] Getting remote repository URL", "debug");
885
880
  try {
886
881
  const remoteUrl = await this.git.getConfig("remote.origin.url");
887
882
  const url = remoteUrl.value || "";
@@ -895,13 +890,13 @@ var init_GitService = __esm({
895
890
  "https://github.com/"
896
891
  );
897
892
  }
898
- this.log("Remote repository URL retrieved", "debug", {
893
+ this.log("[GitService] Remote repository URL retrieved", "debug", {
899
894
  url: normalizedUrl
900
895
  });
901
896
  return normalizedUrl;
902
897
  } catch (error) {
903
898
  const errorMessage = `Failed to get remote repository URL: ${error.message}`;
904
- this.log(errorMessage, "error", { error });
899
+ this.log(`[GitService] ${errorMessage}`, "error", { error });
905
900
  throw new Error(errorMessage);
906
901
  }
907
902
  }
@@ -912,7 +907,7 @@ var init_GitService = __esm({
912
907
  maxFiles = MCP_DEFAULT_MAX_FILES_TO_SCAN
913
908
  }) {
914
909
  this.log(
915
- `Getting the ${maxFiles} most recently changed files, starting with current changes`,
910
+ `[GitService] Getting the ${maxFiles} most recently changed files, starting with current changes`,
916
911
  "debug"
917
912
  );
918
913
  try {
@@ -933,10 +928,14 @@ var init_GitService = __esm({
933
928
  fileSet.add(file);
934
929
  }
935
930
  }
936
- this.log(`Added ${fileSet.size} files from current changes`, "debug", {
937
- filesFromCurrentChanges: fileSet.size,
938
- currentChangesTotal: currentChanges.files.length
939
- });
931
+ this.log(
932
+ `[GitService] Added ${fileSet.size} files from current changes`,
933
+ "debug",
934
+ {
935
+ filesFromCurrentChanges: fileSet.size,
936
+ currentChangesTotal: currentChanges.files.length
937
+ }
938
+ );
940
939
  const logResult = await this.git.log({
941
940
  maxCount: maxFiles * 5,
942
941
  // 5 times the max files to scan to ensure we find enough files
@@ -978,7 +977,7 @@ var init_GitService = __esm({
978
977
  path2.join(gitRoot, gitRelativePath)
979
978
  );
980
979
  }
981
- this.log(`Considering file: ${adjustedPath}`, "debug");
980
+ this.log(`[GitService] Considering file: ${adjustedPath}`, "debug");
982
981
  if (!fileSet.has(adjustedPath) && await FileUtils.shouldPackFile(
983
982
  path2.join(gitRoot, gitRelativePath)
984
983
  ) && !adjustedPath.startsWith("..")) {
@@ -986,13 +985,17 @@ var init_GitService = __esm({
986
985
  }
987
986
  }
988
987
  } catch (showError) {
989
- this.log(`Could not get files for commit ${commit.hash}`, "debug", {
990
- error: showError
991
- });
988
+ this.log(
989
+ `[GitService] Could not get files for commit ${commit.hash}`,
990
+ "debug",
991
+ {
992
+ error: showError
993
+ }
994
+ );
992
995
  }
993
996
  }
994
997
  const files = Array.from(fileSet);
995
- this.log("Recently changed files retrieved", "info", {
998
+ this.log("[GitService] Recently changed files retrieved", "info", {
996
999
  fileCount: files.length,
997
1000
  commitsProcessed,
998
1001
  totalCommitsAvailable: logResult.all.length,
@@ -1008,7 +1011,7 @@ var init_GitService = __esm({
1008
1011
  };
1009
1012
  } catch (error) {
1010
1013
  const errorMessage = `Failed to get recently changed files: ${error.message}`;
1011
- this.log(errorMessage, "error", { error });
1014
+ this.log(`[GitService] ${errorMessage}`, "error", { error });
1012
1015
  throw new Error(errorMessage);
1013
1016
  }
1014
1017
  }
@@ -1066,7 +1069,7 @@ var init_GitService = __esm({
1066
1069
  * Gets all remote repository URLs (equivalent to 'git remote -v')
1067
1070
  */
1068
1071
  async getRepoUrls() {
1069
- this.log("Getting all remote repository URLs", "debug");
1072
+ this.log("[GitService] Getting all remote repository URLs", "debug");
1070
1073
  try {
1071
1074
  const remotes = await this.git.remote(["-v"]);
1072
1075
  if (!remotes) {
@@ -1088,13 +1091,13 @@ var init_GitService = __esm({
1088
1091
  remote.push = normalizedUrl;
1089
1092
  }
1090
1093
  });
1091
- this.log("Remote repository URLs retrieved", "debug", {
1094
+ this.log("[GitService] Remote repository URLs retrieved", "debug", {
1092
1095
  remotes: remoteMap
1093
1096
  });
1094
1097
  return remoteMap;
1095
1098
  } catch (error) {
1096
1099
  const errorMessage = `Failed to get remote repository URLs: ${error.message}`;
1097
- this.log(errorMessage, "error", { error });
1100
+ this.log(`[GitService] ${errorMessage}`, "error", { error });
1098
1101
  throw new Error(errorMessage);
1099
1102
  }
1100
1103
  }
@@ -1103,7 +1106,7 @@ var init_GitService = __esm({
1103
1106
  * @returns The contents of the .gitignore file as a string, or null if the file doesn't exist
1104
1107
  */
1105
1108
  async getGitignoreContent() {
1106
- this.log("Getting .gitignore contents", "debug");
1109
+ this.log("[GitService] Getting .gitignore contents", "debug");
1107
1110
  try {
1108
1111
  let combinedContent = "";
1109
1112
  const localGitignorePath = path2.join(this.repositoryPath, ".gitignore");
@@ -1124,20 +1127,23 @@ ${rootContent}`;
1124
1127
  }
1125
1128
  } catch (rootErr) {
1126
1129
  this.log(
1127
- "Unable to resolve git root while reading .gitignore",
1130
+ "[GitService] Unable to resolve git root while reading .gitignore",
1128
1131
  "debug",
1129
1132
  { error: rootErr }
1130
1133
  );
1131
1134
  }
1132
1135
  if (combinedContent.trim() === "") {
1133
- this.log(".gitignore file not found", "debug");
1136
+ this.log("[GitService] .gitignore file not found", "debug");
1134
1137
  return null;
1135
1138
  }
1136
- this.log(".gitignore contents retrieved successfully", "debug");
1139
+ this.log(
1140
+ "[GitService] .gitignore contents retrieved successfully",
1141
+ "debug"
1142
+ );
1137
1143
  return combinedContent.trimEnd();
1138
1144
  } catch (error) {
1139
1145
  const errorMessage = `Failed to get .gitignore contents: ${error.message}`;
1140
- this.log(errorMessage, "error", { error });
1146
+ this.log(`[GitService] ${errorMessage}`, "error", { error });
1141
1147
  return null;
1142
1148
  }
1143
1149
  }
@@ -2106,7 +2112,7 @@ var DigestVulnerabilityReportDocument = `
2106
2112
  }
2107
2113
  `;
2108
2114
  var SubmitVulnerabilityReportDocument = `
2109
- mutation SubmitVulnerabilityReport($fixReportId: String!, $repoUrl: String!, $reference: String!, $projectId: String!, $scanSource: String!, $sha: String, $experimentalEnabled: Boolean, $vulnerabilityReportFileName: String, $pullRequest: Int, $isFullScan: Boolean) {
2115
+ mutation SubmitVulnerabilityReport($fixReportId: String!, $repoUrl: String!, $reference: String!, $projectId: String!, $scanSource: String!, $sha: String, $experimentalEnabled: Boolean, $vulnerabilityReportFileName: String, $pullRequest: Int, $isFullScan: Boolean, $scanContext: String!, $fileCount: Int) {
2110
2116
  submitVulnerabilityReport(
2111
2117
  fixReportId: $fixReportId
2112
2118
  repoUrl: $repoUrl
@@ -2118,6 +2124,8 @@ var SubmitVulnerabilityReportDocument = `
2118
2124
  projectId: $projectId
2119
2125
  vulnerabilityReportFileName: $vulnerabilityReportFileName
2120
2126
  scanSource: $scanSource
2127
+ scanContext: $scanContext
2128
+ fileCount: $fileCount
2121
2129
  ) {
2122
2130
  __typename
2123
2131
  ... on VulnerabilityReport {
@@ -2260,14 +2268,14 @@ var GetReportFixesDocument = `
2260
2268
  var GetLatestReportByRepoUrlDocument = `
2261
2269
  query GetLatestReportByRepoUrl($repoUrl: String!, $filters: fix_bool_exp = {}, $limit: Int!, $offset: Int!, $currentUserEmail: String!) {
2262
2270
  fixReport(
2263
- where: {_and: [{repo: {originalUrl: {_eq: $repoUrl}}}, {state: {_eq: Finished}}, {vulnerabilityReport: {_or: [{vendor: {_is_null: true}}, {vendor: {_nin: [semgrep, opengrep]}}]}}]}
2271
+ where: {_and: [{repo: {originalUrl: {_eq: $repoUrl}}}, {state: {_eq: Finished}}, {vulnerabilityReport: {scanSource: {_neq: MCP}}}]}
2264
2272
  order_by: {createdOn: desc}
2265
2273
  limit: 1
2266
2274
  ) {
2267
2275
  ...FixReportSummaryFields
2268
2276
  }
2269
2277
  expiredReport: fixReport(
2270
- where: {_and: [{repo: {originalUrl: {_eq: $repoUrl}}}, {state: {_eq: Expired}}, {_or: [{vulnerabilityReport: {vendor: {_is_null: true}}}, {vulnerabilityReport: {vendor: {_nin: [semgrep, opengrep]}}}]}]}
2278
+ where: {_and: [{repo: {originalUrl: {_eq: $repoUrl}}}, {state: {_eq: Expired}}, {vulnerabilityReport: {scanSource: {_neq: MCP}}}]}
2271
2279
  order_by: {createdOn: desc}
2272
2280
  limit: 1
2273
2281
  ) {
@@ -4564,9 +4572,7 @@ var FixRatingZ = z7.object({
4564
4572
  })
4565
4573
  });
4566
4574
  var IssueSharedStateZ = z7.object({
4567
- id: z7.string().uuid(),
4568
4575
  isArchived: z7.boolean(),
4569
- ticketIntegrationId: z7.string().uuid().nullable(),
4570
4576
  ticketIntegrations: z7.array(
4571
4577
  z7.object({
4572
4578
  url: z7.string()
@@ -4623,9 +4629,7 @@ var FixPartsForFixScreenZ = FixQueryZ.merge(
4623
4629
  z7.object({
4624
4630
  vulnerabilityReportIssues: z7.array(
4625
4631
  z7.object({
4626
- vendorIssueId: z7.string(),
4627
4632
  issueType: z7.string(),
4628
- issueLanguage: z7.string(),
4629
4633
  sharedState: IssueSharedStateZ
4630
4634
  })
4631
4635
  )
@@ -4640,7 +4644,6 @@ import { z as z8 } from "zod";
4640
4644
  var FixPageFixReportZ = z8.object({
4641
4645
  id: z8.string().uuid(),
4642
4646
  analysisUrl: z8.string(),
4643
- expirationOn: z8.string(),
4644
4647
  createdOn: z8.string(),
4645
4648
  state: z8.nativeEnum(Fix_Report_State_Enum),
4646
4649
  repo: z8.object({
@@ -4651,48 +4654,12 @@ var FixPageFixReportZ = z8.object({
4651
4654
  isKnownBranch: z8.boolean().nullable()
4652
4655
  }),
4653
4656
  vulnerabilityReport: z8.object({
4657
+ id: z8.string().uuid(),
4654
4658
  vendor: z8.nativeEnum(Vulnerability_Report_Vendor_Enum),
4655
- vendorReportId: z8.string().uuid().nullable(),
4656
4659
  projectId: z8.string().uuid(),
4657
4660
  project: z8.object({
4658
4661
  organizationId: z8.string().uuid()
4659
- }),
4660
- file: z8.object({
4661
- id: z8.string().uuid(),
4662
- path: z8.string()
4663
- }),
4664
- pending: z8.object({
4665
- aggregate: z8.object({
4666
- count: z8.number()
4667
- })
4668
- }),
4669
- supported: z8.object({
4670
- aggregate: z8.object({
4671
- count: z8.number()
4672
- })
4673
- }),
4674
- all: z8.object({
4675
- aggregate: z8.object({
4676
- count: z8.number()
4677
- })
4678
- }),
4679
- fixable: z8.object({
4680
- aggregate: z8.object({
4681
- count: z8.number()
4682
- })
4683
- }),
4684
- errors: z8.object({
4685
- aggregate: z8.object({
4686
- count: z8.number()
4687
- })
4688
- }),
4689
- vulnerabilityReportIssues: z8.object({
4690
- extraData: z8.object({
4691
- missing_files: z8.string().array().nullish(),
4692
- large_files: z8.string().array().nullish(),
4693
- error_files: z8.string().array().nullish()
4694
- })
4695
- }).array()
4662
+ })
4696
4663
  })
4697
4664
  });
4698
4665
 
@@ -9176,6 +9143,13 @@ var mobbCliCommand = {
9176
9143
  convertToSarif: "convert-to-sarif",
9177
9144
  mcp: "mcp"
9178
9145
  };
9146
+ var ScanContext = {
9147
+ FULL_SCAN: "FULL_SCAN",
9148
+ BACKGROUND_PERIODIC: "BACKGROUND_PERIODIC",
9149
+ BACKGROUND_INITIAL: "BACKGROUND_INITIAL",
9150
+ USER_REQUEST: "USER_REQUEST",
9151
+ BUGSY: "BUGSY"
9152
+ };
9179
9153
 
9180
9154
  // src/args/yargs.ts
9181
9155
  import chalk10 from "chalk";
@@ -10358,7 +10332,8 @@ var GQLClient = class {
10358
10332
  pullRequest,
10359
10333
  sha: sha || "",
10360
10334
  experimentalEnabled: !!experimentalEnabled,
10361
- scanSource: params.scanSource
10335
+ scanSource: params.scanSource,
10336
+ scanContext: ScanContext.BUGSY
10362
10337
  });
10363
10338
  }
10364
10339
  async getFixReportState(fixReportId) {
@@ -11268,7 +11243,8 @@ async function _scan(params, { skipPrompts = false } = {}) {
11268
11243
  sha,
11269
11244
  experimentalEnabled: !!experimentalEnabled,
11270
11245
  pullRequest: params.pullRequest,
11271
- scanSource: _getScanSource(command, ci)
11246
+ scanSource: _getScanSource(command, ci),
11247
+ scanContext: ScanContext.BUGSY
11272
11248
  }
11273
11249
  });
11274
11250
  if (sendReportRes.submitVulnerabilityReport.__typename !== "VulnerabilityReport") {
@@ -11429,7 +11405,8 @@ async function _scan(params, { skipPrompts = false } = {}) {
11429
11405
  sha: commitHash || gitInfo.hash || "0123456789abcdef",
11430
11406
  scanSource: _getScanSource(command, ci),
11431
11407
  pullRequest: params.pullRequest,
11432
- experimentalEnabled: !!experimentalEnabled
11408
+ experimentalEnabled: !!experimentalEnabled,
11409
+ scanContext: ScanContext.BUGSY
11433
11410
  }
11434
11411
  });
11435
11412
  if (command === "review") {
@@ -11636,6 +11613,7 @@ async function analyze({
11636
11613
  mobbProjectName,
11637
11614
  organizationId,
11638
11615
  autoPr,
11616
+ createOnePr,
11639
11617
  commitDirectly,
11640
11618
  pullRequest
11641
11619
  }, { skipPrompts = false } = {}) {
@@ -11654,7 +11632,8 @@ async function analyze({
11654
11632
  command: "analyze",
11655
11633
  autoPr,
11656
11634
  commitDirectly,
11657
- pullRequest
11635
+ pullRequest,
11636
+ createOnePr
11658
11637
  },
11659
11638
  { skipPrompts }
11660
11639
  );
@@ -12109,7 +12088,9 @@ var McpGQLClient = class {
12109
12088
  __publicField(this, "apiUrl");
12110
12089
  this._auth = args;
12111
12090
  this.apiUrl = process.env["API_URL"] || MCP_DEFAULT_API_URL;
12112
- logDebug(`creating graphql client with api url ${this.apiUrl}`, { args });
12091
+ logDebug(`[GraphQL] Creating graphql client with api url ${this.apiUrl}`, {
12092
+ args
12093
+ });
12113
12094
  this.client = new GraphQLClient2(this.apiUrl, {
12114
12095
  headers: args.type === "apiKey" ? { [MCP_API_KEY_HEADER_NAME]: args.apiKey || "" } : {
12115
12096
  Authorization: `Bearer ${args.token}`
@@ -12139,15 +12120,15 @@ var McpGQLClient = class {
12139
12120
  }
12140
12121
  async isApiEndpointReachable() {
12141
12122
  try {
12142
- logDebug("GraphQL: Calling Me query for API connection verification");
12123
+ logDebug("[GraphQL] Calling Me query for API connection verification");
12143
12124
  const result = await this.getUserInfo();
12144
- logDebug("GraphQL: Me query successful", { result });
12125
+ logDebug("[GraphQL] Me query successful", { result });
12145
12126
  return true;
12146
12127
  } catch (e) {
12147
12128
  const error = e;
12148
- logDebug(`API connection verification failed`, { error });
12129
+ logDebug(`[GraphQL] API connection verification failed`, { error });
12149
12130
  if (error?.toString().includes("FetchError")) {
12150
- logError("API connection verification failed", { error });
12131
+ logError("[GraphQL] API connection verification failed", { error });
12151
12132
  return false;
12152
12133
  }
12153
12134
  }
@@ -12172,14 +12153,14 @@ var McpGQLClient = class {
12172
12153
  }
12173
12154
  async uploadS3BucketInfo() {
12174
12155
  try {
12175
- logDebug("GraphQL: Calling uploadS3BucketInfo mutation");
12156
+ logDebug("[GraphQL] Calling uploadS3BucketInfo mutation");
12176
12157
  const result = await this.clientSdk.uploadS3BucketInfo({
12177
12158
  fileName: "report.json"
12178
12159
  });
12179
- logDebug("GraphQL: uploadS3BucketInfo successful", { result });
12160
+ logDebug("[GraphQL] uploadS3BucketInfo successful", { result });
12180
12161
  return result;
12181
12162
  } catch (e) {
12182
- logError("GraphQL: uploadS3BucketInfo failed", {
12163
+ logError("[GraphQL] uploadS3BucketInfo failed", {
12183
12164
  error: e,
12184
12165
  ...this.getErrorContext()
12185
12166
  });
@@ -12188,17 +12169,17 @@ var McpGQLClient = class {
12188
12169
  }
12189
12170
  async getAnalysis(analysisId) {
12190
12171
  try {
12191
- logDebug("GraphQL: Calling getAnalysis query", { analysisId });
12172
+ logDebug("[GraphQL] Calling getAnalysis query", { analysisId });
12192
12173
  const res = await this.clientSdk.getAnalysis({
12193
12174
  analysisId
12194
12175
  });
12195
- logDebug("GraphQL: getAnalysis successful", { result: res });
12176
+ logDebug("[GraphQL] getAnalysis successful", { result: res });
12196
12177
  if (!res.analysis) {
12197
12178
  throw new Error(`Analysis not found: ${analysisId}`);
12198
12179
  }
12199
12180
  return res.analysis;
12200
12181
  } catch (e) {
12201
- logError("GraphQL: getAnalysis failed", {
12182
+ logError("[GraphQL] getAnalysis failed", {
12202
12183
  error: e,
12203
12184
  analysisId,
12204
12185
  ...this.getErrorContext()
@@ -12208,14 +12189,14 @@ var McpGQLClient = class {
12208
12189
  }
12209
12190
  async submitVulnerabilityReport(variables) {
12210
12191
  try {
12211
- logDebug("GraphQL: Calling SubmitVulnerabilityReport mutation", {
12192
+ logDebug("[GraphQL] Calling SubmitVulnerabilityReport mutation", {
12212
12193
  variables
12213
12194
  });
12214
12195
  const result = await this.clientSdk.SubmitVulnerabilityReport(variables);
12215
- logDebug("GraphQL: SubmitVulnerabilityReport successful", { result });
12196
+ logDebug("[GraphQL] SubmitVulnerabilityReport successful", { result });
12216
12197
  return result;
12217
12198
  } catch (e) {
12218
- logError("GraphQL: SubmitVulnerabilityReport failed", {
12199
+ logError("[GraphQL] SubmitVulnerabilityReport failed", {
12219
12200
  error: e,
12220
12201
  variables,
12221
12202
  ...this.getErrorContext()
@@ -12297,14 +12278,14 @@ var McpGQLClient = class {
12297
12278
  }
12298
12279
  const shortEmailHash = crypto3.createHash("sha256").update(userEmail).digest("hex").slice(0, 8).toUpperCase();
12299
12280
  const projectName = `MCP Scans ${shortEmailHash}`;
12300
- logDebug("GraphQL: Calling getLastOrgAndNamedProject query", {
12281
+ logDebug("[GraphQL] Calling getLastOrgAndNamedProject query", {
12301
12282
  projectName
12302
12283
  });
12303
12284
  const orgAndProjectRes = await this.clientSdk.getLastOrgAndNamedProject({
12304
12285
  email: userEmail,
12305
12286
  projectName
12306
12287
  });
12307
- logDebug("GraphQL: getLastOrgAndNamedProject successful", {
12288
+ logDebug("[GraphQL] getLastOrgAndNamedProject successful", {
12308
12289
  result: orgAndProjectRes
12309
12290
  });
12310
12291
  if (!orgAndProjectRes.user?.[0]?.userOrganizationsAndUserOrganizationRoles?.[0]?.organization?.id) {
@@ -12315,13 +12296,13 @@ var McpGQLClient = class {
12315
12296
  const organization = orgAndProjectRes.user?.[0]?.userOrganizationsAndUserOrganizationRoles?.[0]?.organization;
12316
12297
  const projectId = organization?.projects?.[0]?.id;
12317
12298
  if (projectId) {
12318
- logDebug("GraphQL: Found existing project", {
12299
+ logDebug("[GraphQL] Found existing project", {
12319
12300
  projectId,
12320
12301
  projectName
12321
12302
  });
12322
12303
  return projectId;
12323
12304
  }
12324
- logDebug("GraphQL: Project not found, creating new project", {
12305
+ logDebug("[GraphQL] Project not found, creating new project", {
12325
12306
  organizationId: organization.id,
12326
12307
  projectName
12327
12308
  });
@@ -12329,10 +12310,10 @@ var McpGQLClient = class {
12329
12310
  organizationId: organization.id,
12330
12311
  projectName
12331
12312
  });
12332
- logDebug("GraphQL: CreateProject successful", { result: createdProject });
12313
+ logDebug("[GraphQL] CreateProject successful", { result: createdProject });
12333
12314
  return createdProject.createProject.projectId;
12334
12315
  } catch (e) {
12335
- logError("GraphQL: getProjectId failed", {
12316
+ logError("[GraphQL] getProjectId failed", {
12336
12317
  error: e,
12337
12318
  ...this.getErrorContext()
12338
12319
  });
@@ -12348,14 +12329,14 @@ var McpGQLClient = class {
12348
12329
  return this.currentUser;
12349
12330
  }
12350
12331
  async validateUserToken() {
12351
- logDebug("validating user token");
12332
+ logDebug("[GraphQL] Validating user token");
12352
12333
  try {
12353
12334
  await this.clientSdk.CreateCommunityUser();
12354
12335
  const info = await this.getUserInfo();
12355
- logDebug("user token validated successfully");
12336
+ logDebug("[GraphQL] User token validated successfully");
12356
12337
  return info?.email || true;
12357
12338
  } catch (e) {
12358
- logError("user token validation failed");
12339
+ logError("[GraphQL] User token validation failed");
12359
12340
  return false;
12360
12341
  }
12361
12342
  }
@@ -12367,12 +12348,12 @@ var McpGQLClient = class {
12367
12348
  });
12368
12349
  const loginId = res.insert_cli_login_one?.id || "";
12369
12350
  if (!loginId) {
12370
- logError("create cli login failed - no login ID returned");
12351
+ logError("[GraphQL] Create cli login failed - no login ID returned");
12371
12352
  return "";
12372
12353
  }
12373
12354
  return loginId;
12374
12355
  } catch (e) {
12375
- logError("create cli login failed", { error: e });
12356
+ logError("[GraphQL] Create cli login failed", { error: e });
12376
12357
  return "";
12377
12358
  }
12378
12359
  }
@@ -12384,7 +12365,7 @@ var McpGQLClient = class {
12384
12365
  });
12385
12366
  return res?.cli_login_by_pk?.encryptedApiToken || null;
12386
12367
  } catch (e) {
12387
- logError("get encrypted api token failed", { error: e });
12368
+ logError("[GraphQL] Get encrypted api token failed", { error: e });
12388
12369
  return null;
12389
12370
  }
12390
12371
  }
@@ -12410,12 +12391,12 @@ var McpGQLClient = class {
12410
12391
  fixIds,
12411
12392
  source: "MCP" /* Mcp */
12412
12393
  });
12413
- logDebug("GraphQL: updateFixesDownloadStatus successful", {
12394
+ logDebug("[GraphQL] updateFixesDownloadStatus successful", {
12414
12395
  result: resUpdate,
12415
12396
  fixIds
12416
12397
  });
12417
12398
  } else {
12418
- logDebug("GraphQL: No fixes found to update download status");
12399
+ logDebug("[GraphQL] No fixes found to update download status");
12419
12400
  }
12420
12401
  }
12421
12402
  async getLatestReportByRepoUrl({
@@ -12424,7 +12405,7 @@ var McpGQLClient = class {
12424
12405
  offset = 0
12425
12406
  }) {
12426
12407
  try {
12427
- logDebug("GraphQL: Calling GetLatestReportByRepoUrl query", {
12408
+ logDebug("[GraphQL] Calling GetLatestReportByRepoUrl query", {
12428
12409
  repoUrl,
12429
12410
  limit,
12430
12411
  offset
@@ -12436,7 +12417,7 @@ var McpGQLClient = class {
12436
12417
  currentUserEmail = `%${userInfo.email}%`;
12437
12418
  }
12438
12419
  } catch (err) {
12439
- logDebug("Failed to get user email, using default pattern", {
12420
+ logDebug("[GraphQL] Failed to get user email, using default pattern", {
12440
12421
  error: err
12441
12422
  });
12442
12423
  }
@@ -12446,7 +12427,7 @@ var McpGQLClient = class {
12446
12427
  offset,
12447
12428
  currentUserEmail
12448
12429
  });
12449
- logDebug("GraphQL: GetLatestReportByRepoUrl successful", {
12430
+ logDebug("[GraphQL] GetLatestReportByRepoUrl successful", {
12450
12431
  result: res,
12451
12432
  reportCount: res.fixReport?.length || 0
12452
12433
  });
@@ -12461,7 +12442,7 @@ var McpGQLClient = class {
12461
12442
  expiredReport: res.expiredReport?.[0] || null
12462
12443
  };
12463
12444
  } catch (e) {
12464
- logError("GraphQL: GetLatestReportByRepoUrl failed", {
12445
+ logError("[GraphQL] GetLatestReportByRepoUrl failed", {
12465
12446
  error: e,
12466
12447
  repoUrl,
12467
12448
  ...this.getErrorContext()
@@ -12484,7 +12465,7 @@ var McpGQLClient = class {
12484
12465
  filters["severityText"] = { _in: severity };
12485
12466
  }
12486
12467
  try {
12487
- logDebug("GraphQL: Calling GetReportFixes query", {
12468
+ logDebug("[GraphQL] Calling GetReportFixes query", {
12488
12469
  reportId,
12489
12470
  limit,
12490
12471
  offset,
@@ -12499,7 +12480,7 @@ var McpGQLClient = class {
12499
12480
  currentUserEmail = `%${userInfo.email}%`;
12500
12481
  }
12501
12482
  } catch (err) {
12502
- logDebug("Failed to get user email, using default pattern", {
12483
+ logDebug("[GraphQL] Failed to get user email, using default pattern", {
12503
12484
  error: err
12504
12485
  });
12505
12486
  }
@@ -12510,7 +12491,7 @@ var McpGQLClient = class {
12510
12491
  filters,
12511
12492
  currentUserEmail
12512
12493
  });
12513
- logDebug("GraphQL: GetReportFixes successful", {
12494
+ logDebug("[GraphQL] GetReportFixes successful", {
12514
12495
  result: res,
12515
12496
  fixCount: res.fixReport?.[0]?.fixes?.length || 0,
12516
12497
  totalCount: res.fixReport?.[0]?.filteredFixesCount?.aggregate?.count || 0
@@ -12527,7 +12508,7 @@ var McpGQLClient = class {
12527
12508
  expiredReport: res.expiredReport?.[0] || null
12528
12509
  };
12529
12510
  } catch (e) {
12530
- logError("GraphQL: GetReportFixes failed", {
12511
+ logError("[GraphQL] GetReportFixes failed", {
12531
12512
  error: e,
12532
12513
  reportId,
12533
12514
  ...this.getErrorContext()
@@ -12539,18 +12520,20 @@ var McpGQLClient = class {
12539
12520
  async function createAuthenticatedMcpGQLClient({
12540
12521
  isBackgoundCall = false
12541
12522
  } = {}) {
12542
- logDebug("getting config", { apiToken: configStore.get("apiToken") });
12523
+ logDebug("[GraphQL] Getting config", {
12524
+ apiToken: configStore.get("apiToken")
12525
+ });
12543
12526
  const initialClient = new McpGQLClient({
12544
12527
  apiKey: process.env["MOBB_API_KEY"] || process.env["API_KEY"] || // fallback for backward compatibility
12545
12528
  configStore.get("apiToken") || "",
12546
12529
  type: "apiKey"
12547
12530
  });
12548
12531
  const isApiEndpointReachable = await initialClient.isApiEndpointReachable();
12549
- logDebug("API connection status", { isApiEndpointReachable });
12532
+ logDebug("[GraphQL] API connection status", { isApiEndpointReachable });
12550
12533
  if (!isApiEndpointReachable) {
12551
12534
  throw new ApiConnectionError("Error: failed to reach Mobb GraphQL endpoint");
12552
12535
  }
12553
- logDebug("validating user token");
12536
+ logDebug("[GraphQL] Validating user token");
12554
12537
  const userVerify = await initialClient.validateUserToken();
12555
12538
  if (userVerify) {
12556
12539
  return initialClient;
@@ -13331,82 +13314,128 @@ var getLocalFiles = async ({
13331
13314
  path: path13,
13332
13315
  maxFileSize = MCP_MAX_FILE_SIZE,
13333
13316
  maxFiles,
13334
- isAllFilesScan
13317
+ isAllFilesScan,
13318
+ scanContext
13335
13319
  }) => {
13336
- const resolvedRepoPath = await fs11.realpath(path13);
13337
- const gitService = new GitService(resolvedRepoPath, log);
13338
- const gitValidation = await gitService.validateRepository();
13339
- let files = [];
13340
- if (!gitValidation.isValid || isAllFilesScan) {
13341
- logDebug(
13342
- "Git repository validation failed, using all files in the repository",
13343
- {
13344
- path: path13
13345
- }
13346
- );
13347
- files = await FileUtils.getLastChangedFiles({
13348
- dir: path13,
13349
- maxFileSize,
13350
- maxFiles,
13351
- isAllFilesScan
13320
+ logDebug(`[${scanContext}] Starting getLocalFiles`, {
13321
+ path: path13,
13322
+ maxFileSize,
13323
+ maxFiles,
13324
+ isAllFilesScan
13325
+ });
13326
+ try {
13327
+ const resolvedRepoPath = await fs11.realpath(path13);
13328
+ logDebug(`[${scanContext}] Resolved repository path`, {
13329
+ resolvedRepoPath,
13330
+ originalPath: path13
13352
13331
  });
13353
- logDebug("Found files in the repository", {
13354
- files,
13355
- fileCount: files.length
13332
+ const gitService = new GitService(resolvedRepoPath, log);
13333
+ const gitValidation = await gitService.validateRepository();
13334
+ logDebug(`[${scanContext}] Git repository validation result`, {
13335
+ isValid: gitValidation.isValid,
13336
+ error: gitValidation.error,
13337
+ isAllFilesScan
13356
13338
  });
13357
- } else {
13358
- logDebug("maxFiles", {
13359
- maxFiles
13360
- });
13361
- const gitResult = await gitService.getChangedFiles();
13362
- files = gitResult.files;
13363
- if (files.length === 0 || maxFiles) {
13364
- const recentResult = await gitService.getRecentlyChangedFiles({
13365
- maxFiles
13366
- });
13367
- files = recentResult.files;
13368
- logDebug(
13369
- "No changes found, using recently changed files from git history",
13370
- {
13371
- files,
13372
- fileCount: files.length,
13373
- commitsChecked: recentResult.commitCount
13374
- }
13375
- );
13339
+ let files = [];
13340
+ if (!gitValidation.isValid || isAllFilesScan) {
13341
+ try {
13342
+ files = await FileUtils.getLastChangedFiles({
13343
+ dir: path13,
13344
+ maxFileSize,
13345
+ maxFiles,
13346
+ isAllFilesScan
13347
+ });
13348
+ logDebug(`[${scanContext}] Found files in the repository`, {
13349
+ fileCount: files.length
13350
+ });
13351
+ } catch (error) {
13352
+ logError(`${scanContext}Error getting last changed files`, {
13353
+ error: error instanceof Error ? error.message : String(error),
13354
+ stack: error instanceof Error ? error.stack : void 0
13355
+ });
13356
+ throw error;
13357
+ }
13376
13358
  } else {
13377
- logDebug("Found changed files in the git repository", {
13378
- files,
13379
- fileCount: files.length
13380
- });
13381
- }
13382
- }
13383
- files = files.filter(
13384
- (file) => FileUtils.shouldPackFile(
13385
- nodePath.resolve(resolvedRepoPath, file),
13386
- maxFileSize
13387
- )
13388
- );
13389
- const filesWithStats = await Promise.all(
13390
- files.map(async (file) => {
13391
- const absoluteFilePath = nodePath.resolve(resolvedRepoPath, file);
13392
- const relativePath = nodePath.relative(resolvedRepoPath, absoluteFilePath);
13393
- let fileStat;
13394
13359
  try {
13395
- fileStat = await fs11.stat(absoluteFilePath);
13396
- } catch (e) {
13397
- logDebug("File not found", {
13398
- file
13360
+ const gitResult = await gitService.getChangedFiles();
13361
+ files = gitResult.files;
13362
+ if (files.length === 0 || maxFiles) {
13363
+ logDebug(
13364
+ `[${scanContext}] No changes found or maxFiles specified, getting recently changed files`,
13365
+ { maxFiles }
13366
+ );
13367
+ const recentResult = await gitService.getRecentlyChangedFiles({
13368
+ maxFiles
13369
+ });
13370
+ files = recentResult.files;
13371
+ logDebug(
13372
+ `[${scanContext}] Using recently changed files from git history`,
13373
+ {
13374
+ fileCount: files.length,
13375
+ commitsChecked: recentResult.commitCount
13376
+ }
13377
+ );
13378
+ } else {
13379
+ logDebug(
13380
+ `[${scanContext}] Found changed files in the git repository`,
13381
+ {
13382
+ fileCount: files.length
13383
+ }
13384
+ );
13385
+ }
13386
+ } catch (error) {
13387
+ logError(`${scanContext}Error getting files from git`, {
13388
+ error: error instanceof Error ? error.message : String(error),
13389
+ stack: error instanceof Error ? error.stack : void 0
13399
13390
  });
13391
+ throw error;
13400
13392
  }
13401
- return {
13402
- filename: nodePath.basename(absoluteFilePath),
13403
- relativePath,
13404
- fullPath: absoluteFilePath,
13405
- lastEdited: fileStat?.mtime.getTime() ?? 0
13406
- };
13407
- })
13408
- );
13409
- return filesWithStats.filter((file) => file.lastEdited > 0);
13393
+ }
13394
+ files = files.filter((file) => {
13395
+ const fullPath = nodePath.resolve(resolvedRepoPath, file);
13396
+ const isPackable = FileUtils.shouldPackFile(fullPath, maxFileSize);
13397
+ return isPackable;
13398
+ });
13399
+ const filesWithStats = await Promise.all(
13400
+ files.map(async (file) => {
13401
+ const absoluteFilePath = nodePath.resolve(resolvedRepoPath, file);
13402
+ const relativePath = nodePath.relative(
13403
+ resolvedRepoPath,
13404
+ absoluteFilePath
13405
+ );
13406
+ try {
13407
+ const fileStat = await fs11.stat(absoluteFilePath);
13408
+ return {
13409
+ filename: nodePath.basename(absoluteFilePath),
13410
+ relativePath,
13411
+ fullPath: absoluteFilePath,
13412
+ lastEdited: fileStat.mtime.getTime()
13413
+ };
13414
+ } catch (e) {
13415
+ logError(`[${scanContext}] Error getting file stats`, {
13416
+ file,
13417
+ absoluteFilePath,
13418
+ error: e instanceof Error ? e.message : String(e)
13419
+ });
13420
+ return {
13421
+ filename: nodePath.basename(absoluteFilePath),
13422
+ relativePath,
13423
+ fullPath: absoluteFilePath,
13424
+ lastEdited: 0
13425
+ };
13426
+ }
13427
+ })
13428
+ );
13429
+ const result = filesWithStats.filter((file) => file.lastEdited > 0);
13430
+ return result;
13431
+ } catch (error) {
13432
+ logError(`${scanContext}Unexpected error in getLocalFiles`, {
13433
+ error: error instanceof Error ? error.message : String(error),
13434
+ stack: error instanceof Error ? error.stack : void 0,
13435
+ path: path13
13436
+ });
13437
+ throw error;
13438
+ }
13410
13439
  };
13411
13440
 
13412
13441
  // src/mcp/services/ScanFiles.ts
@@ -13427,7 +13456,7 @@ var FileOperations = class {
13427
13456
  * @returns ZIP archive as a Buffer with metadata
13428
13457
  */
13429
13458
  async createSourceCodeArchive(fileList, repositoryPath, maxFileSize) {
13430
- logDebug("FilePacking: packing files");
13459
+ logDebug("[FileOperations] Packing files");
13431
13460
  const zip = new AdmZip2();
13432
13461
  let packedFilesCount = 0;
13433
13462
  const resolvedRepoPath = path12.resolve(repositoryPath);
@@ -13436,13 +13465,13 @@ var FileOperations = class {
13436
13465
  const resolvedFilePath = path12.resolve(absoluteFilepath);
13437
13466
  if (!resolvedFilePath.startsWith(resolvedRepoPath)) {
13438
13467
  logDebug(
13439
- `Skipping ${filepath} due to potential path traversal security risk`
13468
+ `[FileOperations] Skipping ${filepath} due to potential path traversal security risk`
13440
13469
  );
13441
13470
  continue;
13442
13471
  }
13443
13472
  if (!FileUtils.shouldPackFile(absoluteFilepath, maxFileSize)) {
13444
13473
  logDebug(
13445
- `Excluding ${filepath} - file is too large, binary, or matches exclusion rules`
13474
+ `[FileOperations] Excluding ${filepath} - file is too large, binary, or matches exclusion rules`
13446
13475
  );
13447
13476
  continue;
13448
13477
  }
@@ -13459,7 +13488,7 @@ var FileOperations = class {
13459
13488
  totalSize: archiveBuffer.length
13460
13489
  };
13461
13490
  logInfo(
13462
- `Files packed successfully ${packedFilesCount} files, ${result.totalSize} bytes`
13491
+ `[FileOperations] Files packed successfully ${packedFilesCount} files, ${result.totalSize} bytes`
13463
13492
  );
13464
13493
  return result;
13465
13494
  }
@@ -13476,14 +13505,18 @@ var FileOperations = class {
13476
13505
  const absoluteFilepath = path12.join(repositoryPath, filepath);
13477
13506
  const resolvedFilePath = path12.resolve(absoluteFilepath);
13478
13507
  if (!resolvedFilePath.startsWith(resolvedRepoPath)) {
13479
- logDebug(`Rejecting ${filepath} - path traversal attempt detected`);
13508
+ logDebug(
13509
+ `[FileOperations] Rejecting ${filepath} - path traversal attempt detected`
13510
+ );
13480
13511
  continue;
13481
13512
  }
13482
13513
  try {
13483
13514
  await fs12.promises.access(absoluteFilepath, fs12.constants.R_OK);
13484
13515
  validatedPaths.push(filepath);
13485
13516
  } catch (error) {
13486
- logDebug(`Skipping ${filepath} - file is not accessible: ${error}`);
13517
+ logDebug(
13518
+ `[FileOperations] Skipping ${filepath} - file is not accessible: ${error}`
13519
+ );
13487
13520
  }
13488
13521
  }
13489
13522
  return validatedPaths;
@@ -13505,7 +13538,9 @@ var FileOperations = class {
13505
13538
  content
13506
13539
  });
13507
13540
  } catch (error) {
13508
- logError(`Failed to read file ${absolutePath}: ${error}`);
13541
+ logError(
13542
+ `[FileOperations] Failed to read file ${absolutePath}: ${error}`
13543
+ );
13509
13544
  }
13510
13545
  }
13511
13546
  return fileDataArray;
@@ -13520,7 +13555,9 @@ var FileOperations = class {
13520
13555
  try {
13521
13556
  return await fs12.promises.readFile(absoluteFilepath);
13522
13557
  } catch (fsError) {
13523
- logError(`Failed to read ${relativeFilepath} from filesystem: ${fsError}`);
13558
+ logError(
13559
+ `[FileOperations] Failed to read ${relativeFilepath} from filesystem: ${fsError}`
13560
+ );
13524
13561
  return null;
13525
13562
  }
13526
13563
  }
@@ -13562,7 +13599,8 @@ var scanFiles = async ({
13562
13599
  repoUrl: repoUrl || "",
13563
13600
  branchName: branch || "no-branch",
13564
13601
  sha: "0123456789abcdef",
13565
- scanContext
13602
+ scanContext,
13603
+ fileCount: packingResult.packedFilesCount
13566
13604
  });
13567
13605
  return {
13568
13606
  fixReportId,
@@ -13635,7 +13673,8 @@ var executeSecurityScan = async ({
13635
13673
  repoUrl,
13636
13674
  branchName,
13637
13675
  sha,
13638
- scanContext
13676
+ scanContext,
13677
+ fileCount
13639
13678
  }) => {
13640
13679
  if (!gqlClient) {
13641
13680
  throw new GqlClientError();
@@ -13648,7 +13687,9 @@ var executeSecurityScan = async ({
13648
13687
  reference: branchName,
13649
13688
  scanSource: "MCP" /* Mcp */,
13650
13689
  isFullScan: !!isAllDetectionRulesScan,
13651
- sha
13690
+ sha,
13691
+ scanContext,
13692
+ fileCount
13652
13693
  };
13653
13694
  logInfo(`[${scanContext}] Submitting vulnerability report`);
13654
13695
  logDebug(`[${scanContext}] Submit vulnerability report variables`, {
@@ -13677,11 +13718,64 @@ var executeSecurityScan = async ({
13677
13718
  scanContext
13678
13719
  });
13679
13720
  } catch (error) {
13680
- logError(`[${scanContext}] Security analysis failed or timed out`, {
13681
- error,
13682
- analysisId
13721
+ const errorObj = error;
13722
+ const errorDetails = {
13723
+ message: errorObj.message || "No error message",
13724
+ name: errorObj.name || "Unknown error type",
13725
+ stack: errorObj.stack,
13726
+ analysisId,
13727
+ timeoutMs: MCP_VUL_REPORT_DIGEST_TIMEOUT_MS,
13728
+ isTimeoutError: errorObj.message?.includes("Timeout expired"),
13729
+ // Safely extract additional properties from the error object
13730
+ ...Object.getOwnPropertyNames(errorObj).filter(
13731
+ (prop) => prop !== "message" && prop !== "name" && prop !== "stack"
13732
+ ).reduce(
13733
+ (acc, prop) => ({
13734
+ ...acc,
13735
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
13736
+ [prop]: errorObj[prop]
13737
+ }),
13738
+ {}
13739
+ )
13740
+ };
13741
+ logError(
13742
+ `[${scanContext}] Security analysis failed or timed out`,
13743
+ errorDetails
13744
+ );
13745
+ logDebug(`[${scanContext}] Security scan failure context`, {
13746
+ fixReportId,
13747
+ projectId,
13748
+ repoUrl,
13749
+ branchName,
13750
+ isAllDetectionRulesScan,
13751
+ fileCount,
13752
+ scanSource: "MCP" /* Mcp */,
13753
+ subscriptionParams: { analysisId },
13754
+ expectedCallbackState: "Finished" /* Finished */,
13755
+ subscriptionTimeout: {
13756
+ configuredTimeoutMs: MCP_VUL_REPORT_DIGEST_TIMEOUT_MS,
13757
+ isTimeoutError: errorObj.message?.includes("Timeout expired")
13758
+ }
13683
13759
  });
13684
- throw new ScanError(`Security analysis failed: ${error.message}`);
13760
+ try {
13761
+ const analysis = await gqlClient.getAnalysis(analysisId);
13762
+ if (analysis) {
13763
+ logDebug(`[${scanContext}] Current analysis state at error time`, {
13764
+ analysisId,
13765
+ state: analysis.state,
13766
+ failReason: analysis.failReason || "No failure reason provided",
13767
+ // The createdAt field doesn't exist in the analysis type, include other useful properties
13768
+ analysisObjectId: analysis.id
13769
+ });
13770
+ }
13771
+ } catch (analysisError) {
13772
+ logDebug(`[${scanContext}] Failed to get analysis state`, {
13773
+ analysisError: analysisError.message
13774
+ });
13775
+ }
13776
+ throw new ScanError(
13777
+ `Security analysis failed: ${error.message || "Unknown error"}`
13778
+ );
13685
13779
  }
13686
13780
  logDebug(`[${scanContext}] Security scan completed successfully`, {
13687
13781
  fixReportId,
@@ -13760,7 +13854,8 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
13760
13854
  );
13761
13855
  const files = await getLocalFiles({
13762
13856
  path: path13,
13763
- isAllFilesScan
13857
+ isAllFilesScan,
13858
+ scanContext
13764
13859
  });
13765
13860
  logDebug(`[${scanContext}] Active files`, { files });
13766
13861
  const filesToScan = files.filter((file) => {
@@ -13801,14 +13896,14 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
13801
13896
  logInfo(
13802
13897
  `[${scanContext}] Security fixes retrieved, total: ${fixes?.fixes?.length || 0}, new: ${newFixes?.length || 0}`
13803
13898
  );
13804
- this.updateFreshFixesCache(newFixes || [], filesToScan);
13899
+ this.updateFreshFixesCache(newFixes || [], filesToScan, scanContext);
13805
13900
  this.updateFilesScanTimestamps(filesToScan);
13806
13901
  this.isInitialScanComplete = true;
13807
13902
  } catch (error) {
13808
13903
  const errorMessage = error.message;
13809
13904
  if (errorMessage.includes("Authentication failed") || errorMessage.includes("access-denied") || errorMessage.includes("Authentication hook unauthorized")) {
13810
13905
  logError(
13811
- "Periodic scan skipped due to authentication failure. Please re-authenticate by running a manual scan.",
13906
+ `[${scanContext}] Periodic scan skipped due to authentication failure. Please re-authenticate by running a manual scan.`,
13812
13907
  {
13813
13908
  error: errorMessage
13814
13909
  }
@@ -13816,20 +13911,28 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
13816
13911
  return;
13817
13912
  }
13818
13913
  if (errorMessage.includes("ReportInitializationError")) {
13819
- logError("Periodic scan failed during report initialization", {
13820
- error: errorMessage
13821
- });
13914
+ logError(
13915
+ `[${scanContext}] Periodic scan failed during report initialization`,
13916
+ {
13917
+ error: errorMessage
13918
+ }
13919
+ );
13822
13920
  return;
13823
13921
  }
13824
- logError("Unexpected error during periodic security scan", { error });
13922
+ logError(
13923
+ `[${scanContext}] Unexpected error during periodic security scan`,
13924
+ { error }
13925
+ );
13825
13926
  throw error;
13826
13927
  }
13827
13928
  }
13828
- updateFreshFixesCache(newFixes, filesToScan) {
13829
- this.freshFixes = this.freshFixes.filter((fix) => !this.isFixFromOldScan(fix, filesToScan)).concat(newFixes).sort((a, b) => {
13929
+ updateFreshFixesCache(newFixes, filesToScan, scanContext) {
13930
+ this.freshFixes = this.freshFixes.filter((fix) => !this.isFixFromOldScan(fix, filesToScan, scanContext)).concat(newFixes).sort((a, b) => {
13830
13931
  return (b.severityValue ?? 0) - (a.severityValue ?? 0);
13831
13932
  });
13832
- logInfo(`Fresh fixes cache updated, total: ${this.freshFixes.length}`);
13933
+ logInfo(
13934
+ `[${scanContext}] Fresh fixes cache updated, total: ${this.freshFixes.length}`
13935
+ );
13833
13936
  }
13834
13937
  updateFilesScanTimestamps(filesToScan) {
13835
13938
  filesToScan.forEach((file) => {
@@ -13841,13 +13944,13 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
13841
13944
  (reportedFix) => reportedFix.sharedState?.id === fix.sharedState?.id
13842
13945
  );
13843
13946
  }
13844
- isFixFromOldScan(fix, filesToScan) {
13947
+ isFixFromOldScan(fix, filesToScan, scanContext) {
13845
13948
  const patch = fix.patchAndQuestions?.__typename === "FixData" ? fix.patchAndQuestions.patch : void 0;
13846
13949
  const fixFile = extractPathFromPatch(patch);
13847
13950
  if (!fixFile) {
13848
13951
  return false;
13849
13952
  }
13850
- logDebug("Checking if fix is from old scan", {
13953
+ logDebug(`[${scanContext}] Checking if fix is from old scan`, {
13851
13954
  fixFile,
13852
13955
  filesToScan,
13853
13956
  isFromOldScan: filesToScan.some((file) => file.relativePath === fixFile)
@@ -13855,14 +13958,17 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
13855
13958
  return filesToScan.some((file) => file.relativePath === fixFile);
13856
13959
  }
13857
13960
  async getFreshFixes({ path: path13 }) {
13961
+ const scanContext = ScanContext.USER_REQUEST;
13962
+ logDebug(`[${scanContext}] Getting fresh fixes`, { path: path13 });
13858
13963
  if (this.path !== path13) {
13859
13964
  this.path = path13;
13860
13965
  this.reset();
13966
+ logInfo(`[${scanContext}] Reset service state for new path`, { path: path13 });
13861
13967
  }
13862
13968
  this.gqlClient = await createAuthenticatedMcpGQLClient();
13863
13969
  this.triggerScan({ path: path13, gqlClient: this.gqlClient });
13864
13970
  if (this.freshFixes.length > 0) {
13865
- return this.generateFreshFixesResponse();
13971
+ return this.generateFreshFixesResponse(scanContext);
13866
13972
  }
13867
13973
  if (!this.isInitialScanComplete) {
13868
13974
  return initialScanInProgressPrompt;
@@ -13881,21 +13987,27 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
13881
13987
  }
13882
13988
  }
13883
13989
  startPeriodicScanning(path13) {
13884
- logDebug("Starting periodic scan for new security vulnerabilities", {
13885
- path: path13
13886
- });
13990
+ const scanContext = ScanContext.BACKGROUND_PERIODIC;
13991
+ logDebug(
13992
+ `[${scanContext}] Starting periodic scan for new security vulnerabilities`,
13993
+ {
13994
+ path: path13
13995
+ }
13996
+ );
13887
13997
  this.intervalId = setInterval(() => {
13888
- logDebug("Triggering periodic security scan", { path: path13 });
13998
+ logDebug(`[${scanContext}] Triggering periodic security scan`, { path: path13 });
13889
13999
  this.scanForSecurityVulnerabilities({
13890
14000
  path: path13,
13891
- scanContext: "BACKGROUND_PERIODIC"
14001
+ scanContext
13892
14002
  }).catch((error) => {
13893
- logError("Error during periodic security scan", { error });
14003
+ logError(`[${scanContext}] Error during periodic security scan`, {
14004
+ error
14005
+ });
13894
14006
  });
13895
14007
  }, MCP_PERIODIC_CHECK_INTERVAL);
13896
14008
  }
13897
14009
  async executeInitialFullScan(path13) {
13898
- const scanContext = "FULL_SCAN";
14010
+ const scanContext = ScanContext.FULL_SCAN;
13899
14011
  logDebug(`[${scanContext}] Triggering initial full security scan`, { path: path13 });
13900
14012
  logDebug(`[${scanContext}] Full scan paths scanned`, {
13901
14013
  fullScanPathsScanned: this.fullScanPathsScanned
@@ -13915,7 +14027,7 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
13915
14027
  path: path13,
13916
14028
  isAllFilesScan: true,
13917
14029
  isAllDetectionRulesScan: true,
13918
- scanContext: "FULL_SCAN"
14030
+ scanContext: ScanContext.FULL_SCAN
13919
14031
  });
13920
14032
  if (!this.fullScanPathsScanned.includes(path13)) {
13921
14033
  this.fullScanPathsScanned.push(path13);
@@ -13923,25 +14035,31 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
13923
14035
  }
13924
14036
  logInfo(`[${scanContext}] Full scan completed`, { path: path13 });
13925
14037
  } catch (error) {
13926
- logError("Error during initial full security scan", { error });
14038
+ logError(`[${scanContext}] Error during initial full security scan`, {
14039
+ error
14040
+ });
13927
14041
  }
13928
14042
  }
13929
14043
  executeInitialScan(path13) {
13930
- const scanContext = "BACKGROUND_INITIAL";
14044
+ const scanContext = ScanContext.BACKGROUND_INITIAL;
13931
14045
  logDebug(`[${scanContext}] Triggering initial security scan`, { path: path13 });
13932
14046
  this.scanForSecurityVulnerabilities({
13933
14047
  path: path13,
13934
- scanContext: "BACKGROUND_INITIAL"
14048
+ scanContext: ScanContext.BACKGROUND_INITIAL
13935
14049
  }).catch((error) => {
13936
14050
  logError(`[${scanContext}] Error during initial security scan`, { error });
13937
14051
  });
13938
14052
  }
13939
- generateFreshFixesResponse() {
14053
+ generateFreshFixesResponse(scanContext = ScanContext.USER_REQUEST) {
13940
14054
  const freshFixes = this.freshFixes.splice(0, MCP_DEFAULT_LIMIT);
13941
14055
  if (freshFixes.length > 0) {
14056
+ logInfo(
14057
+ `[${scanContext}] Reporting ${freshFixes.length} fresh fixes to user`
14058
+ );
13942
14059
  this.reportedFixes.push(...freshFixes);
13943
14060
  return freshFixesPrompt({ fixes: freshFixes, limit: MCP_DEFAULT_LIMIT });
13944
14061
  }
14062
+ logInfo(`[${scanContext}] No fresh fixes to report`);
13945
14063
  return noFreshFixesPrompt;
13946
14064
  }
13947
14065
  };
@@ -14245,7 +14363,7 @@ var _ScanAndFixVulnerabilitiesService = class _ScanAndFixVulnerabilitiesService
14245
14363
  fileList,
14246
14364
  repositoryPath,
14247
14365
  gqlClient: this.gqlClient,
14248
- scanContext: "SCAN_AND_FIX_TOOL"
14366
+ scanContext: ScanContext.USER_REQUEST
14249
14367
  });
14250
14368
  fixReportId = scanResult.fixReportId;
14251
14369
  } else {
@@ -14417,7 +14535,8 @@ Example payload:
14417
14535
  const files = await getLocalFiles({
14418
14536
  path: path13,
14419
14537
  maxFileSize: MCP_MAX_FILE_SIZE,
14420
- maxFiles: args.maxFiles
14538
+ maxFiles: args.maxFiles,
14539
+ scanContext: ScanContext.USER_REQUEST
14421
14540
  });
14422
14541
  logDebug("Files", { files });
14423
14542
  if (files.length === 0) {