mobbdev 1.0.107 → 1.0.109

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.mjs +838 -397
  2. package/package.json +3 -4
package/dist/index.mjs CHANGED
@@ -540,6 +540,9 @@ var FixDetailsFragmentDoc = `
540
540
  vulnerability_report_issue_tag_value
541
541
  }
542
542
  }
543
+ sharedState {
544
+ id
545
+ }
543
546
  patchAndQuestions {
544
547
  __typename
545
548
  ... on FixData {
@@ -704,6 +707,7 @@ var GetAnalysisSubscriptionDocument = `
704
707
  analysis: fixReport_by_pk(id: $analysisId) {
705
708
  id
706
709
  state
710
+ failReason
707
711
  }
708
712
  }
709
713
  `;
@@ -712,6 +716,7 @@ var GetAnalysisDocument = `
712
716
  analysis: fixReport_by_pk(id: $analysisId) {
713
717
  id
714
718
  state
719
+ failReason
715
720
  repo {
716
721
  commitSha
717
722
  pullRequest
@@ -1091,6 +1096,13 @@ var GetReportFixesDocument = `
1091
1096
  }
1092
1097
  }
1093
1098
  ${FixReportSummaryFieldsFragmentDoc}`;
1099
+ var UpdateDownloadedFixDataDocument = `
1100
+ mutation updateDownloadedFixData($fixIds: [String!]!, $source: FixDownloadSource!) {
1101
+ updateDownloadedFixData(fixIds: $fixIds, source: $source) {
1102
+ status
1103
+ }
1104
+ }
1105
+ `;
1094
1106
  var defaultWrapper = (action, _operationName, _operationType, _variables) => action();
1095
1107
  function getSdk(client, withWrapper = defaultWrapper) {
1096
1108
  return {
@@ -1162,6 +1174,9 @@ function getSdk(client, withWrapper = defaultWrapper) {
1162
1174
  },
1163
1175
  GetReportFixes(variables, requestHeaders, signal) {
1164
1176
  return withWrapper((wrappedRequestHeaders) => client.request({ document: GetReportFixesDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "GetReportFixes", "query", variables);
1177
+ },
1178
+ updateDownloadedFixData(variables, requestHeaders, signal) {
1179
+ return withWrapper((wrappedRequestHeaders) => client.request({ document: UpdateDownloadedFixDataDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "updateDownloadedFixData", "mutation", variables);
1165
1180
  }
1166
1181
  };
1167
1182
  }
@@ -3671,6 +3686,7 @@ var ReportQueryResultZ = z11.object({
3671
3686
  createdOn: z11.string(),
3672
3687
  expirationOn: z11.string().nullable(),
3673
3688
  state: z11.nativeEnum(Fix_Report_State_Enum),
3689
+ failReason: z11.string().nullable(),
3674
3690
  fixes: z11.array(
3675
3691
  z11.object({
3676
3692
  id: z11.string().uuid(),
@@ -4444,9 +4460,15 @@ import { z as z15 } from "zod";
4444
4460
  var EnvVariablesZod = z15.object({
4445
4461
  GITLAB_API_TOKEN: z15.string().optional(),
4446
4462
  GITHUB_API_TOKEN: z15.string().optional(),
4447
- GIT_PROXY_HOST: z15.string().optional().default("http://tinyproxy:8888")
4463
+ GIT_PROXY_HOST: z15.string().optional().default("http://tinyproxy:8888"),
4464
+ MAX_UPLOAD_FILE_SIZE_MB: z15.coerce.number().gt(0).default(5)
4448
4465
  });
4449
- var { GITLAB_API_TOKEN, GITHUB_API_TOKEN, GIT_PROXY_HOST } = EnvVariablesZod.parse(process.env);
4466
+ var {
4467
+ GITLAB_API_TOKEN,
4468
+ GITHUB_API_TOKEN,
4469
+ GIT_PROXY_HOST,
4470
+ MAX_UPLOAD_FILE_SIZE_MB
4471
+ } = EnvVariablesZod.parse(process.env);
4450
4472
 
4451
4473
  // src/features/analysis/scm/ado/validation.ts
4452
4474
  import { z as z16 } from "zod";
@@ -4838,7 +4860,7 @@ async function getAdoSdk(params) {
4838
4860
  const url = new URL(repoUrl);
4839
4861
  const origin2 = url.origin.toLowerCase().endsWith(".visualstudio.com") ? DEFUALT_ADO_ORIGIN : url.origin.toLowerCase();
4840
4862
  const params2 = `path=/&versionDescriptor[versionOptions]=0&versionDescriptor[versionType]=commit&versionDescriptor[version]=${branch}&resolveLfs=true&$format=zip&api-version=5.0&download=true`;
4841
- const path14 = [
4863
+ const path13 = [
4842
4864
  prefixPath,
4843
4865
  owner,
4844
4866
  projectName,
@@ -4849,7 +4871,7 @@ async function getAdoSdk(params) {
4849
4871
  "items",
4850
4872
  "items"
4851
4873
  ].filter(Boolean).join("/");
4852
- return new URL(`${path14}?${params2}`, origin2).toString();
4874
+ return new URL(`${path13}?${params2}`, origin2).toString();
4853
4875
  },
4854
4876
  async getAdoBranchList({ repoUrl }) {
4855
4877
  try {
@@ -5076,6 +5098,18 @@ import { setTimeout as setTimeout2 } from "timers/promises";
5076
5098
  import * as path2 from "path";
5077
5099
  import { simpleGit } from "simple-git";
5078
5100
 
5101
+ // src/mcp/core/configs.ts
5102
+ var MCP_DEFAULT_API_URL = "https://api.mobb.ai/v1/graphql";
5103
+ var MCP_API_KEY_HEADER_NAME = "x-mobb-key";
5104
+ var MCP_LOGIN_MAX_WAIT = 10 * 60 * 1e3;
5105
+ var MCP_LOGIN_CHECK_DELAY = 1 * 1e3;
5106
+ var MCP_VUL_REPORT_DIGEST_TIMEOUT_MS = 5 * 60 * 1e3;
5107
+ var MCP_MAX_FILE_SIZE = MAX_UPLOAD_FILE_SIZE_MB * 1024 * 1024;
5108
+ var MCP_PERIODIC_CHECK_INTERVAL = 15 * 60 * 1e3;
5109
+ var MCP_DEFAULT_MAX_FILES_TO_SCAN = 10;
5110
+ var MCP_REPORT_ID_EXPIRATION_MS = 2 * 60 * 60 * 1e3;
5111
+ var MCP_TOOLS_BROWSER_COOLDOWN_MS = 24 * 60 * 60 * 1e3;
5112
+
5079
5113
  // src/features/analysis/scm/FileUtils.ts
5080
5114
  import fs2 from "fs";
5081
5115
  import { isBinary } from "istextorbinary";
@@ -5083,6 +5117,9 @@ import path from "path";
5083
5117
  var EXCLUDED_FILE_PATTERNS = [
5084
5118
  // ... (copy the full array from FilePacking.ts)
5085
5119
  ".json",
5120
+ ".snap",
5121
+ ".env.vault",
5122
+ ".env",
5086
5123
  ".yaml",
5087
5124
  ".yml",
5088
5125
  ".toml",
@@ -5234,16 +5271,24 @@ var FileUtils = class {
5234
5271
  }
5235
5272
  static shouldPackFile(filepath, maxFileSize = 1024 * 1024 * 5) {
5236
5273
  const absoluteFilepath = path.resolve(filepath);
5237
- if (this.isExcludedFileType(filepath)) return false;
5238
- if (!fs2.existsSync(absoluteFilepath)) return false;
5239
- if (fs2.lstatSync(absoluteFilepath).size > maxFileSize) return false;
5274
+ if (this.isExcludedFileType(filepath)) {
5275
+ return false;
5276
+ }
5277
+ if (!fs2.existsSync(absoluteFilepath)) {
5278
+ return false;
5279
+ }
5280
+ if (fs2.lstatSync(absoluteFilepath).size > maxFileSize) {
5281
+ return false;
5282
+ }
5240
5283
  let data;
5241
5284
  try {
5242
5285
  data = fs2.readFileSync(absoluteFilepath);
5243
5286
  } catch {
5244
5287
  return false;
5245
5288
  }
5246
- if (isBinary(null, data)) return false;
5289
+ if (isBinary(null, data)) {
5290
+ return false;
5291
+ }
5247
5292
  return true;
5248
5293
  }
5249
5294
  static getAllFiles(dir, rootDir) {
@@ -5253,7 +5298,7 @@ var FileUtils = class {
5253
5298
  if (relativeDepth > 20) {
5254
5299
  return [];
5255
5300
  }
5256
- if (results.length > 1e5) {
5301
+ if (results.length > 1e3) {
5257
5302
  return [];
5258
5303
  }
5259
5304
  try {
@@ -5284,10 +5329,14 @@ var FileUtils = class {
5284
5329
  }
5285
5330
  return results;
5286
5331
  }
5287
- static getLastChangedFiles(dir, maxFileSize = 1024 * 1024 * 5, count = 10) {
5332
+ static getLastChangedFiles({
5333
+ dir,
5334
+ maxFileSize,
5335
+ maxFiles = MCP_DEFAULT_MAX_FILES_TO_SCAN
5336
+ }) {
5288
5337
  if (!fs2.existsSync(dir) || !fs2.lstatSync(dir).isDirectory()) return [];
5289
5338
  const files = this.getAllFiles(dir);
5290
- return files.filter((file) => this.shouldPackFile(file.fullPath, maxFileSize)).sort((a, b) => b.time - a.time).slice(0, count).map((file) => file.relativePath);
5339
+ return files.filter((file) => this.shouldPackFile(file.fullPath, maxFileSize)).sort((a, b) => b.time - a.time).slice(0, maxFiles).map((file) => file.relativePath);
5291
5340
  }
5292
5341
  };
5293
5342
 
@@ -5336,7 +5385,10 @@ var GitService = class {
5336
5385
  gitRoot,
5337
5386
  this.repositoryPath
5338
5387
  );
5339
- const files = status.files.map((file) => {
5388
+ const deletedFiles = status.files.filter((file) => file.index === "D" || file.working_dir === "D").map((file) => file.path);
5389
+ const files = status.files.filter((file) => {
5390
+ return !(file.index === "D" || file.working_dir === "D");
5391
+ }).map((file) => {
5340
5392
  const gitRelativePath = file.path;
5341
5393
  if (relativePathFromGitRoot === "") {
5342
5394
  return gitRelativePath;
@@ -5353,11 +5405,13 @@ var GitService = class {
5353
5405
  fileCount: files.length,
5354
5406
  files: files.slice(0, 10),
5355
5407
  // Log first 10 files to avoid spam
5408
+ deletedFileCount: deletedFiles.length,
5409
+ deletedFiles: deletedFiles.slice(0, 10),
5356
5410
  gitRoot,
5357
5411
  workingDir: this.repositoryPath,
5358
5412
  relativePathFromGitRoot
5359
5413
  });
5360
- return { files, status };
5414
+ return { files, deletedFiles, status };
5361
5415
  } catch (error) {
5362
5416
  const errorMessage = `Failed to get git status: ${error.message}`;
5363
5417
  this.log(errorMessage, "error", { error });
@@ -5482,11 +5536,13 @@ var GitService = class {
5482
5536
  }
5483
5537
  }
5484
5538
  /**
5485
- * Gets the 10 most recently changed files based on commit history
5539
+ * Gets the maxFiles most recently changed files based on commit history
5486
5540
  */
5487
- async getRecentlyChangedFiles() {
5541
+ async getRecentlyChangedFiles({
5542
+ maxFiles = MCP_DEFAULT_MAX_FILES_TO_SCAN
5543
+ }) {
5488
5544
  this.log(
5489
- "Getting the 10 most recently changed files from commit history",
5545
+ `Getting the ${maxFiles} most recently changed files from commit history`,
5490
5546
  "debug"
5491
5547
  );
5492
5548
  try {
@@ -5499,8 +5555,8 @@ var GitService = class {
5499
5555
  const files = [];
5500
5556
  let commitsProcessed = 0;
5501
5557
  const logResult = await this.git.log({
5502
- maxCount: 100,
5503
- // Get last 100 commits - should be enough to find 10 unique files
5558
+ maxCount: maxFiles * 5,
5559
+ // 5 times the max files to scan to ensure we find enough files
5504
5560
  format: {
5505
5561
  hash: "%H",
5506
5562
  date: "%ai",
@@ -5510,7 +5566,7 @@ var GitService = class {
5510
5566
  }
5511
5567
  });
5512
5568
  for (const commit of logResult.all) {
5513
- if (files.length >= 10) {
5569
+ if (files.length >= maxFiles) {
5514
5570
  break;
5515
5571
  }
5516
5572
  commitsProcessed++;
@@ -5522,7 +5578,7 @@ var GitService = class {
5522
5578
  ]);
5523
5579
  const commitFiles = filesOutput.split("\n").filter((file) => file.trim() !== "");
5524
5580
  for (const file of commitFiles) {
5525
- if (files.length >= 10) {
5581
+ if (files.length >= maxFiles) {
5526
5582
  break;
5527
5583
  }
5528
5584
  const gitRelativePath = file.trim();
@@ -5540,7 +5596,7 @@ var GitService = class {
5540
5596
  );
5541
5597
  }
5542
5598
  this.log(`Considering file: ${adjustedPath}`, "debug");
5543
- if (!fileSet.has(adjustedPath) && FileUtils.shouldPackFile(path2.join(gitRoot, gitRelativePath))) {
5599
+ if (!fileSet.has(adjustedPath) && FileUtils.shouldPackFile(path2.join(gitRoot, gitRelativePath)) && !adjustedPath.startsWith("..")) {
5544
5600
  fileSet.add(adjustedPath);
5545
5601
  files.push(adjustedPath);
5546
5602
  }
@@ -5555,8 +5611,8 @@ var GitService = class {
5555
5611
  fileCount: files.length,
5556
5612
  commitsProcessed,
5557
5613
  totalCommitsAvailable: logResult.all.length,
5558
- files: files.slice(0, 10),
5559
- // Log the files (should be all of them since we limit to 10)
5614
+ files: files.slice(0, maxFiles),
5615
+ // Log the files (should be all of them since we limit to maxFiles)
5560
5616
  gitRoot,
5561
5617
  workingDir: this.repositoryPath,
5562
5618
  relativePathFromGitRoot
@@ -6887,14 +6943,14 @@ function getGithubSdk(params = {}) {
6887
6943
  };
6888
6944
  },
6889
6945
  async getGithubBlameRanges(params2) {
6890
- const { ref, gitHubUrl, path: path14 } = params2;
6946
+ const { ref, gitHubUrl, path: path13 } = params2;
6891
6947
  const { owner, repo } = parseGithubOwnerAndRepo(gitHubUrl);
6892
6948
  const res = await octokit.graphql(
6893
6949
  GET_BLAME_DOCUMENT,
6894
6950
  {
6895
6951
  owner,
6896
6952
  repo,
6897
- path: path14,
6953
+ path: path13,
6898
6954
  ref
6899
6955
  }
6900
6956
  );
@@ -7203,11 +7259,11 @@ var GithubSCMLib = class extends SCMLib {
7203
7259
  markdownComment: comment
7204
7260
  });
7205
7261
  }
7206
- async getRepoBlameRanges(ref, path14) {
7262
+ async getRepoBlameRanges(ref, path13) {
7207
7263
  this._validateUrl();
7208
7264
  return await this.githubSdk.getGithubBlameRanges({
7209
7265
  ref,
7210
- path: path14,
7266
+ path: path13,
7211
7267
  gitHubUrl: this.url
7212
7268
  });
7213
7269
  }
@@ -7613,13 +7669,13 @@ function parseGitlabOwnerAndRepo(gitlabUrl) {
7613
7669
  const { organization, repoName, projectPath } = parsingResult;
7614
7670
  return { owner: organization, repo: repoName, projectPath };
7615
7671
  }
7616
- async function getGitlabBlameRanges({ ref, gitlabUrl, path: path14 }, options) {
7672
+ async function getGitlabBlameRanges({ ref, gitlabUrl, path: path13 }, options) {
7617
7673
  const { projectPath } = parseGitlabOwnerAndRepo(gitlabUrl);
7618
7674
  const api2 = getGitBeaker({
7619
7675
  url: gitlabUrl,
7620
7676
  gitlabAuthToken: options?.gitlabAuthToken
7621
7677
  });
7622
- const resp = await api2.RepositoryFiles.allFileBlames(projectPath, path14, ref);
7678
+ const resp = await api2.RepositoryFiles.allFileBlames(projectPath, path13, ref);
7623
7679
  let lineNumber = 1;
7624
7680
  return resp.filter((range) => range.lines).map((range) => {
7625
7681
  const oldLineNumber = lineNumber;
@@ -7795,10 +7851,10 @@ var GitlabSCMLib = class extends SCMLib {
7795
7851
  markdownComment: comment
7796
7852
  });
7797
7853
  }
7798
- async getRepoBlameRanges(ref, path14) {
7854
+ async getRepoBlameRanges(ref, path13) {
7799
7855
  this._validateUrl();
7800
7856
  return await getGitlabBlameRanges(
7801
- { ref, path: path14, gitlabUrl: this.url },
7857
+ { ref, path: path13, gitlabUrl: this.url },
7802
7858
  {
7803
7859
  url: this.url,
7804
7860
  gitlabAuthToken: this.accessToken
@@ -8526,6 +8582,84 @@ import open2 from "open";
8526
8582
  import tmp2 from "tmp";
8527
8583
  import { z as z29 } from "zod";
8528
8584
 
8585
+ // src/mcp/core/Errors.ts
8586
+ var ApiConnectionError = class extends Error {
8587
+ constructor(message = "Failed to connect to the API") {
8588
+ super(message);
8589
+ this.name = "ApiConnectionError";
8590
+ }
8591
+ };
8592
+ var CliLoginError = class extends Error {
8593
+ constructor(message = "CLI login failed") {
8594
+ super(message);
8595
+ this.name = "CliLoginError";
8596
+ }
8597
+ };
8598
+ var AuthenticationError = class extends Error {
8599
+ constructor(message = "Authentication failed") {
8600
+ super(message);
8601
+ this.name = "AuthenticationError";
8602
+ }
8603
+ };
8604
+ var NoFilesError = class extends Error {
8605
+ constructor(message = "No files to fix") {
8606
+ super(message);
8607
+ this.name = "NoFilesError";
8608
+ }
8609
+ };
8610
+ var GqlClientError = class extends Error {
8611
+ constructor(message = "GraphQL client not initialized") {
8612
+ super(message);
8613
+ this.name = "GqlClientError";
8614
+ }
8615
+ };
8616
+ var FileProcessingError = class extends Error {
8617
+ constructor(message) {
8618
+ super(message);
8619
+ this.name = "FileProcessingError";
8620
+ }
8621
+ };
8622
+ var ReportInitializationError = class extends Error {
8623
+ constructor(message) {
8624
+ super(message);
8625
+ this.name = "ReportInitializationError";
8626
+ }
8627
+ };
8628
+ var FileUploadError = class extends Error {
8629
+ constructor(message) {
8630
+ super(message);
8631
+ this.name = "FileUploadError";
8632
+ }
8633
+ };
8634
+ var ScanError = class extends Error {
8635
+ constructor(message) {
8636
+ super(message);
8637
+ this.name = "ScanError";
8638
+ }
8639
+ };
8640
+ var FailedToGetApiTokenError = class extends Error {
8641
+ constructor(message) {
8642
+ super(message);
8643
+ this.name = "FailedToGetApiTokenError";
8644
+ }
8645
+ };
8646
+ var _ReportDigestError = class _ReportDigestError extends Error {
8647
+ constructor(message, failReason) {
8648
+ super(message);
8649
+ this.failReason = failReason;
8650
+ this.name = "ReportDigestError";
8651
+ this.failReason = failReason;
8652
+ }
8653
+ getDisplayMessage() {
8654
+ if (this.failReason?.trim()) {
8655
+ return `\u{1F575}\uFE0F\u200D\u2642\uFE0F Digesting report failed. ${this.failReason}`;
8656
+ }
8657
+ return _ReportDigestError.defaultMessage;
8658
+ }
8659
+ };
8660
+ __publicField(_ReportDigestError, "defaultMessage", "\u{1F575}\uFE0F\u200D\u2642\uFE0F Digesting report failed. Please verify that the file provided is of a valid supported report format.");
8661
+ var ReportDigestError = _ReportDigestError;
8662
+
8529
8663
  // src/features/analysis/add_fix_comments_for_pr/add_fix_comments_for_pr.ts
8530
8664
  import Debug8 from "debug";
8531
8665
 
@@ -8797,7 +8931,7 @@ async function postIssueComment(params) {
8797
8931
  fpDescription
8798
8932
  } = params;
8799
8933
  const {
8800
- path: path14,
8934
+ path: path13,
8801
8935
  startLine,
8802
8936
  vulnerabilityReportIssue: {
8803
8937
  vulnerabilityReportIssueTags,
@@ -8812,7 +8946,7 @@ async function postIssueComment(params) {
8812
8946
  Refresh the page in order to see the changes.`,
8813
8947
  pull_number: pullRequest,
8814
8948
  commit_id: commitSha,
8815
- path: path14,
8949
+ path: path13,
8816
8950
  line: startLine
8817
8951
  });
8818
8952
  const commentId = commentRes.data.id;
@@ -8846,7 +8980,7 @@ async function postFixComment(params) {
8846
8980
  scanner
8847
8981
  } = params;
8848
8982
  const {
8849
- path: path14,
8983
+ path: path13,
8850
8984
  startLine,
8851
8985
  vulnerabilityReportIssue: { fixId, vulnerabilityReportIssueTags, category },
8852
8986
  vulnerabilityReportIssueId
@@ -8864,7 +8998,7 @@ async function postFixComment(params) {
8864
8998
  Refresh the page in order to see the changes.`,
8865
8999
  pull_number: pullRequest,
8866
9000
  commit_id: commitSha,
8867
- path: path14,
9001
+ path: path13,
8868
9002
  line: startLine
8869
9003
  });
8870
9004
  const commentId = commentRes.data.id;
@@ -9631,7 +9765,10 @@ var GQLClient = class {
9631
9765
  params.subscribeToAnalysisParams,
9632
9766
  async (resolve, reject, data) => {
9633
9767
  if (!data.analysis?.state || data.analysis?.state === "Failed" /* Failed */) {
9634
- reject(new Error(`Analysis failed with id: ${data.analysis?.id}`));
9768
+ const errorMessage = data.analysis?.failReason || `Analysis failed with id: ${data.analysis?.id}`;
9769
+ reject(
9770
+ new ReportDigestError(errorMessage, data.analysis?.failReason ?? "")
9771
+ );
9635
9772
  return;
9636
9773
  }
9637
9774
  if (callbackStates.includes(data.analysis?.state)) {
@@ -10059,8 +10196,8 @@ async function forkSnyk(args, { display }) {
10059
10196
  }
10060
10197
  async function getSnykReport(reportPath, repoRoot, { skipPrompts = false }) {
10061
10198
  debug15("get snyk report start %s %s", reportPath, repoRoot);
10062
- const config5 = await forkSnyk(["config"], { display: false });
10063
- const { message: configMessage } = config5;
10199
+ const config4 = await forkSnyk(["config"], { display: false });
10200
+ const { message: configMessage } = config4;
10064
10201
  if (!configMessage.includes("api: ")) {
10065
10202
  const snykLoginSpinner = createSpinner3().start();
10066
10203
  if (!skipPrompts) {
@@ -10072,7 +10209,7 @@ async function getSnykReport(reportPath, repoRoot, { skipPrompts = false }) {
10072
10209
  snykLoginSpinner.update({
10073
10210
  text: "\u{1F513} Waiting for Snyk login to complete"
10074
10211
  });
10075
- debug15("no token in the config %s", config5);
10212
+ debug15("no token in the config %s", config4);
10076
10213
  await forkSnyk(["auth"], { display: true });
10077
10214
  snykLoginSpinner.success({ text: "\u{1F513} Login to Snyk Successful" });
10078
10215
  }
@@ -10742,23 +10879,19 @@ async function _digestReport({
10742
10879
  shouldScan
10743
10880
  }
10744
10881
  );
10745
- try {
10746
- await gqlClient.subscribeToAnalysis({
10747
- subscribeToAnalysisParams: {
10748
- analysisId: fixReportId
10749
- },
10750
- callback: () => digestSpinner.update({
10751
- text: progressMassages.processingVulnerabilityReportSuccess
10752
- }),
10753
- callbackStates: [
10754
- "Digested" /* Digested */,
10755
- "Finished" /* Finished */
10756
- ],
10757
- timeoutInMs: VUL_REPORT_DIGEST_TIMEOUT_MS
10758
- });
10759
- } catch (e) {
10760
- throw new Error(progressMassages.processingVulnerabilityReportFailed);
10761
- }
10882
+ await gqlClient.subscribeToAnalysis({
10883
+ subscribeToAnalysisParams: {
10884
+ analysisId: fixReportId
10885
+ },
10886
+ callback: () => digestSpinner.update({
10887
+ text: progressMassages.processingVulnerabilityReportSuccess
10888
+ }),
10889
+ callbackStates: [
10890
+ "Digested" /* Digested */,
10891
+ "Finished" /* Finished */
10892
+ ],
10893
+ timeoutInMs: VUL_REPORT_DIGEST_TIMEOUT_MS
10894
+ });
10762
10895
  const vulnFiles = await gqlClient.getVulnerabilityReportPaths(
10763
10896
  vulnerabilityReportId
10764
10897
  );
@@ -10767,8 +10900,9 @@ async function _digestReport({
10767
10900
  });
10768
10901
  return vulnFiles;
10769
10902
  } catch (e) {
10903
+ const errorMessage = e instanceof ReportDigestError ? e.getDisplayMessage() : ReportDigestError.defaultMessage;
10770
10904
  digestSpinner.error({
10771
- text: "\u{1F575}\uFE0F\u200D\u2642\uFE0F Digesting report failed. Please verify that the file provided is of a valid supported report format."
10905
+ text: errorMessage
10772
10906
  });
10773
10907
  throw e;
10774
10908
  }
@@ -11288,89 +11422,17 @@ import Configstore3 from "configstore";
11288
11422
  import { GraphQLClient as GraphQLClient2 } from "graphql-request";
11289
11423
  import open4 from "open";
11290
11424
  import { v4 as uuidv42 } from "uuid";
11291
-
11292
- // src/mcp/constants.ts
11293
- var DEFAULT_API_URL2 = "https://api.mobb.ai/v1/graphql";
11294
- var API_KEY_HEADER_NAME2 = "x-mobb-key";
11295
-
11296
- // src/mcp/core/Errors.ts
11297
- var ApiConnectionError = class extends Error {
11298
- constructor(message = "Failed to connect to the API") {
11299
- super(message);
11300
- this.name = "ApiConnectionError";
11301
- }
11302
- };
11303
- var CliLoginError = class extends Error {
11304
- constructor(message = "CLI login failed") {
11305
- super(message);
11306
- this.name = "CliLoginError";
11307
- }
11308
- };
11309
- var AuthenticationError = class extends Error {
11310
- constructor(message = "Authentication failed") {
11311
- super(message);
11312
- this.name = "AuthenticationError";
11313
- }
11314
- };
11315
- var NoFilesError = class extends Error {
11316
- constructor(message = "No files to fix") {
11317
- super(message);
11318
- this.name = "NoFilesError";
11319
- }
11320
- };
11321
- var GqlClientError = class extends Error {
11322
- constructor(message = "GraphQL client not initialized") {
11323
- super(message);
11324
- this.name = "GqlClientError";
11325
- }
11326
- };
11327
- var FileProcessingError = class extends Error {
11328
- constructor(message) {
11329
- super(message);
11330
- this.name = "FileProcessingError";
11331
- }
11332
- };
11333
- var ReportInitializationError = class extends Error {
11334
- constructor(message) {
11335
- super(message);
11336
- this.name = "ReportInitializationError";
11337
- }
11338
- };
11339
- var FileUploadError = class extends Error {
11340
- constructor(message) {
11341
- super(message);
11342
- this.name = "FileUploadError";
11343
- }
11344
- };
11345
- var ScanError = class extends Error {
11346
- constructor(message) {
11347
- super(message);
11348
- this.name = "ScanError";
11349
- }
11350
- };
11351
- var FailedToGetApiTokenError = class extends Error {
11352
- constructor(message) {
11353
- super(message);
11354
- this.name = "FailedToGetApiTokenError";
11355
- }
11356
- };
11357
-
11358
- // src/mcp/services/McpGQLClient.ts
11359
- var LOGIN_MAX_WAIT2 = 10 * 1e3;
11360
- var LOGIN_CHECK_DELAY2 = 1 * 1e3;
11361
- var config4 = new Configstore3(packageJson.name, { apiToken: "" });
11362
- var BROWSER_COOLDOWN_MS = 5e3;
11363
- var lastBrowserOpenTime = 0;
11425
+ var mobbConfigStore = new Configstore3(packageJson.name, { apiToken: "" });
11364
11426
  var McpGQLClient = class {
11365
11427
  constructor(args) {
11366
11428
  __publicField(this, "client");
11367
11429
  __publicField(this, "clientSdk");
11368
11430
  __publicField(this, "_auth");
11369
11431
  this._auth = args;
11370
- const API_URL2 = process.env["API_URL"] || DEFAULT_API_URL2;
11432
+ const API_URL2 = process.env["API_URL"] || MCP_DEFAULT_API_URL;
11371
11433
  logDebug("creating graphql client", { API_URL: API_URL2, args });
11372
11434
  this.client = new GraphQLClient2(API_URL2, {
11373
- headers: args.type === "apiKey" ? { [API_KEY_HEADER_NAME2]: args.apiKey || "" } : {
11435
+ headers: args.type === "apiKey" ? { [MCP_API_KEY_HEADER_NAME]: args.apiKey || "" } : {
11374
11436
  Authorization: `Bearer ${args.token}`
11375
11437
  },
11376
11438
  requestMiddleware: (request) => {
@@ -11388,10 +11450,10 @@ var McpGQLClient = class {
11388
11450
  }
11389
11451
  getErrorContext() {
11390
11452
  return {
11391
- endpoint: process.env["API_URL"] || DEFAULT_API_URL2,
11453
+ endpoint: process.env["API_URL"] || MCP_DEFAULT_API_URL,
11392
11454
  apiKey: this._auth.type === "apiKey" ? this._auth.apiKey : "",
11393
11455
  headers: {
11394
- [API_KEY_HEADER_NAME2]: this._auth.type === "apiKey" ? "[REDACTED]" : "undefined",
11456
+ [MCP_API_KEY_HEADER_NAME]: this._auth.type === "apiKey" ? "[REDACTED]" : "undefined",
11395
11457
  "x-hasura-request-id": "[DYNAMIC]"
11396
11458
  }
11397
11459
  };
@@ -11477,12 +11539,14 @@ var McpGQLClient = class {
11477
11539
  async (resolve, reject, data) => {
11478
11540
  logDebug("GraphQL: GetAnalysis subscription data received", { data });
11479
11541
  if (!data.analysis?.state || data.analysis?.state === "Failed" /* Failed */) {
11542
+ const errorMessage = data.analysis?.failReason || `Analysis failed with id: ${data.analysis?.id}`;
11480
11543
  logError("GraphQL: Analysis failed", {
11481
11544
  analysisId: data.analysis?.id,
11482
11545
  state: data.analysis?.state,
11546
+ failReason: data.analysis?.failReason,
11483
11547
  ...this.getErrorContext()
11484
11548
  });
11485
- reject(new Error(`Analysis failed with id: ${data.analysis?.id}`));
11549
+ reject(new Error(errorMessage));
11486
11550
  return;
11487
11551
  }
11488
11552
  if (callbackStates.includes(data.analysis?.state)) {
@@ -11518,7 +11582,16 @@ var McpGQLClient = class {
11518
11582
  }
11519
11583
  async getProjectId() {
11520
11584
  try {
11521
- const projectName = "MCP Scans";
11585
+ const me = await this.getUserInfo();
11586
+ if (!me) {
11587
+ throw new Error("User not found");
11588
+ }
11589
+ const userEmail = me.email;
11590
+ if (!userEmail) {
11591
+ throw new Error("User email not found");
11592
+ }
11593
+ const shortEmailHash = crypto2.createHash("sha256").update(userEmail).digest("hex").slice(0, 8).toUpperCase();
11594
+ const projectName = `MCP Scans ${shortEmailHash}`;
11522
11595
  logDebug("GraphQL: Calling getOrgAndProjectId query", { projectName });
11523
11596
  const getOrgAndProjectIdResult = await this.clientSdk.getOrgAndProjectId({
11524
11597
  filters: {},
@@ -11578,7 +11651,7 @@ var McpGQLClient = class {
11578
11651
  try {
11579
11652
  const res = await this.clientSdk.CreateCliLogin(variables, {
11580
11653
  // We may have outdated API key in the config storage. Avoid using it for the login request.
11581
- [API_KEY_HEADER_NAME2]: ""
11654
+ [MCP_API_KEY_HEADER_NAME]: ""
11582
11655
  });
11583
11656
  const loginId = res.insert_cli_login_one?.id || "";
11584
11657
  if (!loginId) {
@@ -11595,7 +11668,7 @@ var McpGQLClient = class {
11595
11668
  try {
11596
11669
  const res = await this.clientSdk.GetEncryptedApiToken(variables, {
11597
11670
  // We may have outdated API key in the config storage. Avoid using it for the login request.
11598
- [API_KEY_HEADER_NAME2]: ""
11671
+ [MCP_API_KEY_HEADER_NAME]: ""
11599
11672
  });
11600
11673
  return res?.cli_login_by_pk?.encryptedApiToken || null;
11601
11674
  } catch (e) {
@@ -11603,6 +11676,20 @@ var McpGQLClient = class {
11603
11676
  return null;
11604
11677
  }
11605
11678
  }
11679
+ async _updateFixesArchiveState(fixIds) {
11680
+ if (fixIds.length > 0) {
11681
+ const resUpdate = await this.clientSdk.updateDownloadedFixData({
11682
+ fixIds,
11683
+ source: "MCP" /* Mcp */
11684
+ });
11685
+ logInfo("GraphQL: updateFixesArchiveState successful", {
11686
+ result: resUpdate,
11687
+ fixIds
11688
+ });
11689
+ } else {
11690
+ logInfo("GraphQL: No fixes found");
11691
+ }
11692
+ }
11606
11693
  async getLatestReportByRepoUrl({
11607
11694
  repoUrl,
11608
11695
  limit = 3,
@@ -11623,6 +11710,8 @@ var McpGQLClient = class {
11623
11710
  result: res,
11624
11711
  reportCount: res.fixReport?.length || 0
11625
11712
  });
11713
+ const fixIds = res.fixReport?.[0]?.fixes?.map((fix) => fix.id) || [];
11714
+ await this._updateFixesArchiveState(fixIds);
11626
11715
  return {
11627
11716
  fixReport: res.fixReport?.[0] || null,
11628
11717
  expiredReport: res.expiredReport?.[0] || null
@@ -11673,6 +11762,8 @@ var McpGQLClient = class {
11673
11762
  if (res.fixReport.length === 0) {
11674
11763
  return null;
11675
11764
  }
11765
+ const fixIds = res.fixReport?.[0]?.fixes?.map((fix) => fix.id) || [];
11766
+ await this._updateFixesArchiveState(fixIds);
11676
11767
  return {
11677
11768
  fixes: res.fixReport?.[0]?.fixes || [],
11678
11769
  totalCount: res.fixReport?.[0]?.filteredFixesCount?.aggregate?.count || 0,
@@ -11688,21 +11779,26 @@ var McpGQLClient = class {
11688
11779
  }
11689
11780
  }
11690
11781
  };
11691
- async function openBrowser(url) {
11692
- const now = Date.now();
11693
- if (!process.env["TEST"] && now - lastBrowserOpenTime < BROWSER_COOLDOWN_MS) {
11694
- logDebug(`browser cooldown active, skipping open for ${url}`);
11695
- return;
11782
+ async function openBrowser(url, isToolsCall) {
11783
+ if (isToolsCall) {
11784
+ const now = Date.now();
11785
+ const lastBrowserOpenTime = mobbConfigStore.get("lastBrowserOpenTime") || 0;
11786
+ if (now - lastBrowserOpenTime < MCP_TOOLS_BROWSER_COOLDOWN_MS) {
11787
+ logDebug(`browser cooldown active, skipping open for ${url}`);
11788
+ return;
11789
+ }
11696
11790
  }
11697
11791
  logDebug(`opening browser url ${url}`);
11698
11792
  await open4(url);
11699
- lastBrowserOpenTime = now;
11793
+ mobbConfigStore.set("lastBrowserOpenTime", Date.now());
11700
11794
  }
11701
- async function getMcpGQLClient() {
11702
- logDebug("getting config", { apiToken: config4.get("apiToken") });
11795
+ async function getMcpGQLClient({
11796
+ isToolsCall = false
11797
+ } = {}) {
11798
+ logDebug("getting config", { apiToken: mobbConfigStore.get("apiToken") });
11703
11799
  const inGqlClient = new McpGQLClient({
11704
11800
  apiKey: process.env["MOBB_API_KEY"] || process.env["API_KEY"] || // fallback for backward compatibility
11705
- config4.get("apiToken") || "",
11801
+ mobbConfigStore.get("apiToken") || "",
11706
11802
  type: "apiKey"
11707
11803
  });
11708
11804
  const isConnected = await inGqlClient.verifyConnection();
@@ -11730,10 +11826,10 @@ async function getMcpGQLClient() {
11730
11826
  const webLoginUrl2 = `${WEB_APP_URL}/cli-login`;
11731
11827
  const browserUrl = `${webLoginUrl2}/${loginId}?hostname=${os2.hostname()}`;
11732
11828
  logDebug(`opening browser url ${browserUrl}`);
11733
- await openBrowser(browserUrl);
11829
+ await openBrowser(browserUrl, isToolsCall);
11734
11830
  logDebug(`waiting for login to complete`);
11735
11831
  let newApiToken = null;
11736
- for (let i = 0; i < LOGIN_MAX_WAIT2 / LOGIN_CHECK_DELAY2; i++) {
11832
+ for (let i = 0; i < MCP_LOGIN_MAX_WAIT / MCP_LOGIN_CHECK_DELAY; i++) {
11737
11833
  const encryptedApiToken = await inGqlClient.getEncryptedApiToken({
11738
11834
  loginId
11739
11835
  });
@@ -11743,7 +11839,7 @@ async function getMcpGQLClient() {
11743
11839
  logDebug("API token decrypted");
11744
11840
  break;
11745
11841
  }
11746
- await sleep(LOGIN_CHECK_DELAY2);
11842
+ await sleep(MCP_LOGIN_CHECK_DELAY);
11747
11843
  }
11748
11844
  if (!newApiToken) {
11749
11845
  throw new FailedToGetApiTokenError(
@@ -11754,7 +11850,7 @@ async function getMcpGQLClient() {
11754
11850
  const loginSuccess = await newGqlClient.verifyToken();
11755
11851
  if (loginSuccess) {
11756
11852
  logDebug(`set api token ${newApiToken}`);
11757
- config4.set("apiToken", newApiToken);
11853
+ mobbConfigStore.set("apiToken", newApiToken);
11758
11854
  } else {
11759
11855
  throw new AuthenticationError("Invalid API token");
11760
11856
  }
@@ -11797,14 +11893,14 @@ var ToolRegistry = class {
11797
11893
 
11798
11894
  // src/mcp/core/McpServer.ts
11799
11895
  var McpServer = class {
11800
- constructor(config5) {
11896
+ constructor(config4) {
11801
11897
  __publicField(this, "server");
11802
11898
  __publicField(this, "toolRegistry");
11803
11899
  __publicField(this, "isEventHandlersSetup", false);
11804
11900
  this.server = new Server(
11805
11901
  {
11806
- name: config5.name,
11807
- version: config5.version
11902
+ name: config4.name,
11903
+ version: config4.version
11808
11904
  },
11809
11905
  {
11810
11906
  capabilities: {
@@ -11815,7 +11911,7 @@ var McpServer = class {
11815
11911
  this.toolRegistry = new ToolRegistry();
11816
11912
  this.setupHandlers();
11817
11913
  this.setupProcessEventHandlers();
11818
- logInfo("MCP server instance created", config5);
11914
+ logInfo("MCP server instance created", config4);
11819
11915
  }
11820
11916
  setupProcessEventHandlers() {
11821
11917
  if (this.isEventHandlersSetup) {
@@ -11866,7 +11962,7 @@ var McpServer = class {
11866
11962
  logInfo("Request", {
11867
11963
  request: JSON.parse(JSON.stringify(request))
11868
11964
  });
11869
- void getMcpGQLClient();
11965
+ void getMcpGQLClient({ isToolsCall: true });
11870
11966
  const toolsDefinitions = this.toolRegistry.getAllTools();
11871
11967
  const response = {
11872
11968
  tools: toolsDefinitions.map((tool) => ({
@@ -11951,7 +12047,7 @@ var McpServer = class {
11951
12047
  }
11952
12048
  };
11953
12049
 
11954
- // src/mcp/tools/fetchAvailableFixes/FetchAvailableFixesTool.ts
12050
+ // src/mcp/tools/checkForNewAvailableFixes/CheckForNewAvailableFixesTool.ts
11955
12051
  import { z as z32 } from "zod";
11956
12052
 
11957
12053
  // src/mcp/services/PathValidation.ts
@@ -11959,7 +12055,10 @@ import fs9 from "fs";
11959
12055
  import path11 from "path";
11960
12056
  async function validatePath(inputPath) {
11961
12057
  logDebug("Validating MCP path", { inputPath });
11962
- if (inputPath === ".") {
12058
+ if (/^\/[a-zA-Z]:\//.test(inputPath)) {
12059
+ inputPath = inputPath.slice(1);
12060
+ }
12061
+ if (inputPath === "." || inputPath === "./") {
11963
12062
  if (process.env["WORKSPACE_FOLDER_PATHS"]) {
11964
12063
  logDebug("Fallback to workspace folder path", {
11965
12064
  inputPath,
@@ -12029,7 +12128,6 @@ var BaseTool = class {
12029
12128
  };
12030
12129
  }
12031
12130
  async execute(args) {
12032
- logInfo(`Executing tool: ${this.name}`, { args });
12033
12131
  logInfo(`Authenticating tool: ${this.name}`, { args });
12034
12132
  const mcpGqlClient = await getMcpGQLClient();
12035
12133
  const userInfo = await mcpGqlClient.getUserInfo();
@@ -12282,9 +12380,36 @@ ${applyFixesPrompt({
12282
12380
  offset
12283
12381
  })}`;
12284
12382
  };
12285
- var noFixesFoundPrompt = `\u{1F50D} **MOBB SECURITY SCAN COMPLETED**
12383
+ var nextStepsPrompt = ({ scannedFiles }) => `
12384
+ ### \u{1F4C1} Scanned Files
12385
+ ${scannedFiles.map((file) => `- ${file}`).join("\n")}
12386
+
12387
+ ### Extend the scan scope
12388
+
12389
+ To scan a larger number of files, include the additional parameter:
12390
+
12391
+ - **maxFiles**: <number_of_files_to_scan>
12392
+
12393
+ This will scan up to the specified number of recently changed files.
12394
+
12395
+ ### \u{1F504} Running a Fresh Scan
12396
+
12397
+ To perform a **rescan** of your repository (fetching a brand-new vulnerability report and updated fixes), include the additional parameter:
12398
+
12399
+ - **rescan**: true
12400
+
12401
+ This will start a new analysis, discard any cached results.
12402
+
12403
+ \u26A0\uFE0F *Note:* A full rescan may take longer to complete than simply fetching additional fixes because your repository is re-uploaded and re-analyzed from scratch.
12404
+
12405
+ `;
12406
+ var noFixesFoundPrompt = ({
12407
+ scannedFiles
12408
+ }) => `\u{1F50D} **MOBB SECURITY SCAN COMPLETED**
12286
12409
 
12287
12410
  Mobb security scan completed successfully but found no automated fixes available at this time.
12411
+
12412
+ ${nextStepsPrompt({ scannedFiles })}
12288
12413
  `;
12289
12414
  var fixesPrompt = ({
12290
12415
  fixes,
@@ -12293,7 +12418,7 @@ var fixesPrompt = ({
12293
12418
  scannedFiles
12294
12419
  }) => {
12295
12420
  if (totalCount === 0) {
12296
- return noFixesFoundPrompt;
12421
+ return noFixesFoundPrompt({ scannedFiles });
12297
12422
  }
12298
12423
  const shownCount = fixes.length;
12299
12424
  const nextOffset = offset + shownCount;
@@ -12310,42 +12435,492 @@ ${applyFixesPrompt({
12310
12435
  offset
12311
12436
  })}
12312
12437
 
12313
- ### \u{1F4C1} Scanned Files
12314
- ${scannedFiles.map((file) => `- ${file}`).join("\n")}
12315
-
12316
- ### \u{1F504} Running a Fresh Scan
12317
-
12318
- To perform a **rescan** of your repository (fetching a brand-new vulnerability report and updated fixes), include the additional parameter:
12319
-
12320
- - **isRescan**: true
12321
-
12322
- This will start a new analysis, discard any cached results.
12323
-
12324
- \u26A0\uFE0F *Note:* A full rescan may take longer to complete than simply fetching additional fixes because your repository is re-uploaded and re-analyzed from scratch.
12438
+ ${nextStepsPrompt({ scannedFiles })}
12439
+ `;
12440
+ };
12441
+ var noFreshFixesPrompt = `No fresh fixes available for this repository at this time.
12442
+ `;
12443
+ var initialScanInProgressPrompt = `Initial scan in progress. Call the tool again in 1 minute to check for available fixes.`;
12444
+ var freshFixesPrompt = ({ fixes }) => {
12445
+ return `Here are the fresh fixes to the vulnerabilities discovered by Mobb MCP
12325
12446
 
12447
+ ${applyFixesPrompt({
12448
+ fixes,
12449
+ totalCount: fixes.length,
12450
+ hasMore: false,
12451
+ nextOffset: 0,
12452
+ shownCount: fixes.length,
12453
+ currentTool: "fetch_available_fixes",
12454
+ offset: 0
12455
+ })}
12326
12456
  `;
12327
12457
  };
12328
12458
 
12329
- // src/mcp/tools/fetchAvailableFixes/FetchAvailableFixesService.ts
12330
- var _FetchAvailableFixesService = class _FetchAvailableFixesService {
12331
- constructor() {
12332
- __publicField(this, "gqlClient", null);
12333
- __publicField(this, "currentOffset", 0);
12334
- }
12335
- static getInstance() {
12336
- if (!_FetchAvailableFixesService.instance) {
12337
- _FetchAvailableFixesService.instance = new _FetchAvailableFixesService();
12338
- }
12339
- return _FetchAvailableFixesService.instance;
12340
- }
12341
- reset() {
12342
- this.currentOffset = 0;
12343
- }
12344
- async initializeGqlClient() {
12345
- if (!this.gqlClient) {
12346
- this.gqlClient = await getMcpGQLClient();
12459
+ // src/mcp/services/GetLocalFiles.ts
12460
+ import fs10 from "fs/promises";
12461
+ import nodePath from "path";
12462
+ var getLocalFiles = async ({
12463
+ path: path13,
12464
+ maxFileSize = 1024 * 1024 * 5,
12465
+ maxFiles
12466
+ }) => {
12467
+ const resolvedRepoPath = await fs10.realpath(path13);
12468
+ const gitService = new GitService(resolvedRepoPath, log);
12469
+ const gitValidation = await gitService.validateRepository();
12470
+ let files = [];
12471
+ if (!gitValidation.isValid) {
12472
+ logDebug(
12473
+ "Git repository validation failed, using all files in the repository",
12474
+ {
12475
+ path: path13
12476
+ }
12477
+ );
12478
+ files = FileUtils.getLastChangedFiles({
12479
+ dir: path13,
12480
+ maxFileSize,
12481
+ maxFiles
12482
+ });
12483
+ logDebug("Found files in the repository", {
12484
+ files,
12485
+ fileCount: files.length
12486
+ });
12487
+ } else {
12488
+ logDebug("maxFiles", {
12489
+ maxFiles
12490
+ });
12491
+ const gitResult = await gitService.getChangedFiles();
12492
+ files = gitResult.files;
12493
+ if (files.length === 0 || maxFiles) {
12494
+ const recentResult = await gitService.getRecentlyChangedFiles({
12495
+ maxFiles
12496
+ });
12497
+ files = recentResult.files;
12498
+ logDebug(
12499
+ "No changes found, using recently changed files from git history",
12500
+ {
12501
+ files,
12502
+ fileCount: files.length,
12503
+ commitsChecked: recentResult.commitCount
12504
+ }
12505
+ );
12506
+ } else {
12507
+ logDebug("Found changed files in the git repository", {
12508
+ files,
12509
+ fileCount: files.length
12510
+ });
12347
12511
  }
12348
- return this.gqlClient;
12512
+ }
12513
+ files = files.filter(
12514
+ (file) => FileUtils.shouldPackFile(
12515
+ nodePath.resolve(resolvedRepoPath, file),
12516
+ maxFileSize
12517
+ )
12518
+ );
12519
+ const filesWithStats = await Promise.all(
12520
+ files.map(async (file) => {
12521
+ const absoluteFilePath = nodePath.resolve(resolvedRepoPath, file);
12522
+ const relativePath = nodePath.relative(resolvedRepoPath, absoluteFilePath);
12523
+ let fileStat;
12524
+ try {
12525
+ fileStat = await fs10.stat(absoluteFilePath);
12526
+ } catch (e) {
12527
+ logDebug("File not found", {
12528
+ file
12529
+ });
12530
+ }
12531
+ return {
12532
+ filename: nodePath.basename(absoluteFilePath),
12533
+ relativePath,
12534
+ fullPath: absoluteFilePath,
12535
+ lastEdited: fileStat?.mtime.getTime() ?? 0
12536
+ };
12537
+ })
12538
+ );
12539
+ return filesWithStats.filter((file) => file.lastEdited > 0);
12540
+ };
12541
+
12542
+ // src/mcp/services/ScanFiles.ts
12543
+ import fs11 from "fs";
12544
+ import path12 from "path";
12545
+ import AdmZip2 from "adm-zip";
12546
+ var scanFiles = async (fileList, repositoryPath, gqlClient) => {
12547
+ const repoUploadInfo = await initializeReport(gqlClient);
12548
+ const fixReportId = repoUploadInfo.fixReportId;
12549
+ const zipBuffer = await packFiles(fileList, repositoryPath);
12550
+ await uploadFiles(zipBuffer, repoUploadInfo);
12551
+ const projectId = await getProjectId(gqlClient);
12552
+ await runScan({ fixReportId, projectId, gqlClient });
12553
+ return {
12554
+ fixReportId,
12555
+ projectId
12556
+ };
12557
+ };
12558
+ var initializeReport = async (gqlClient) => {
12559
+ if (!gqlClient) {
12560
+ throw new GqlClientError();
12561
+ }
12562
+ try {
12563
+ const {
12564
+ uploadS3BucketInfo: { repoUploadInfo }
12565
+ } = await gqlClient.uploadS3BucketInfo();
12566
+ logInfo("Upload info retrieved", { uploadKey: repoUploadInfo?.uploadKey });
12567
+ return repoUploadInfo;
12568
+ } catch (error) {
12569
+ const message = error.message;
12570
+ throw new ReportInitializationError(`Error initializing report: ${message}`);
12571
+ }
12572
+ };
12573
+ var packFiles = async (fileList, repositoryPath) => {
12574
+ try {
12575
+ logInfo(`FilePacking: packing files from ${repositoryPath}`);
12576
+ const zip = new AdmZip2();
12577
+ let packedFilesCount = 0;
12578
+ const resolvedRepoPath = path12.resolve(repositoryPath);
12579
+ logInfo("FilePacking: compressing files");
12580
+ for (const filepath of fileList) {
12581
+ const absoluteFilepath = path12.join(repositoryPath, filepath);
12582
+ const resolvedFilePath = path12.resolve(absoluteFilepath);
12583
+ if (!resolvedFilePath.startsWith(resolvedRepoPath)) {
12584
+ logInfo(
12585
+ `FilePacking: skipping ${filepath} due to potential path traversal`
12586
+ );
12587
+ continue;
12588
+ }
12589
+ if (!FileUtils.shouldPackFile(absoluteFilepath, MCP_MAX_FILE_SIZE)) {
12590
+ logInfo(
12591
+ `FilePacking: ignoring ${filepath} because it is excluded or invalid`
12592
+ );
12593
+ continue;
12594
+ }
12595
+ let data;
12596
+ try {
12597
+ data = fs11.readFileSync(absoluteFilepath);
12598
+ } catch (fsError) {
12599
+ logInfo(
12600
+ `FilePacking: failed to read ${filepath} from filesystem: ${fsError}`
12601
+ );
12602
+ continue;
12603
+ }
12604
+ zip.addFile(filepath, data);
12605
+ packedFilesCount++;
12606
+ }
12607
+ const zipBuffer = zip.toBuffer();
12608
+ logInfo(
12609
+ `FilePacking: read ${packedFilesCount} source files. total size: ${zipBuffer.length} bytes`
12610
+ );
12611
+ logInfo("Files packed successfully", { fileCount: fileList.length });
12612
+ return zipBuffer;
12613
+ } catch (error) {
12614
+ const message = error.message;
12615
+ throw new FileProcessingError(`Error packing files: ${message}`);
12616
+ }
12617
+ };
12618
+ var uploadFiles = async (zipBuffer, repoUploadInfo) => {
12619
+ if (!repoUploadInfo) {
12620
+ throw new FileUploadError("Upload info is required");
12621
+ }
12622
+ try {
12623
+ await uploadFile({
12624
+ file: zipBuffer,
12625
+ url: repoUploadInfo.url,
12626
+ uploadFields: JSON.parse(repoUploadInfo.uploadFieldsJSON),
12627
+ uploadKey: repoUploadInfo.uploadKey
12628
+ });
12629
+ logInfo("File uploaded successfully");
12630
+ } catch (error) {
12631
+ logError("File upload failed", { error: error.message });
12632
+ throw new FileUploadError(
12633
+ `Failed to upload the file: ${error.message}`
12634
+ );
12635
+ }
12636
+ };
12637
+ var getProjectId = async (gqlClient) => {
12638
+ if (!gqlClient) {
12639
+ throw new GqlClientError();
12640
+ }
12641
+ const projectId = await gqlClient.getProjectId();
12642
+ logInfo("Project ID retrieved", { projectId });
12643
+ return projectId;
12644
+ };
12645
+ var runScan = async ({
12646
+ fixReportId,
12647
+ projectId,
12648
+ gqlClient
12649
+ }) => {
12650
+ if (!gqlClient) {
12651
+ throw new GqlClientError();
12652
+ }
12653
+ logInfo("Starting scan", { fixReportId, projectId });
12654
+ const submitVulnerabilityReportVariables = {
12655
+ fixReportId,
12656
+ projectId,
12657
+ repoUrl: "",
12658
+ reference: "no-branch",
12659
+ scanSource: "MCP" /* Mcp */
12660
+ };
12661
+ logInfo("Submitting vulnerability report");
12662
+ const submitRes = await gqlClient.submitVulnerabilityReport(
12663
+ submitVulnerabilityReportVariables
12664
+ );
12665
+ if (submitRes.submitVulnerabilityReport.__typename !== "VulnerabilityReport") {
12666
+ logError("Vulnerability report submission failed", {
12667
+ response: submitRes
12668
+ });
12669
+ throw new ScanError("\u{1F575}\uFE0F\u200D\u2642\uFE0F Mobb analysis failed");
12670
+ }
12671
+ logInfo("Vulnerability report submitted successfully", {
12672
+ analysisId: submitRes.submitVulnerabilityReport.fixReportId
12673
+ });
12674
+ logInfo("Starting analysis subscription");
12675
+ await gqlClient.subscribeToGetAnalysis({
12676
+ subscribeToAnalysisParams: {
12677
+ analysisId: submitRes.submitVulnerabilityReport.fixReportId
12678
+ },
12679
+ callback: () => {
12680
+ },
12681
+ callbackStates: ["Finished" /* Finished */],
12682
+ timeoutInMs: MCP_VUL_REPORT_DIGEST_TIMEOUT_MS
12683
+ });
12684
+ logInfo("Analysis subscription completed");
12685
+ };
12686
+
12687
+ // src/mcp/tools/checkForNewAvailableFixes/CheckForNewAvailableFixesService.ts
12688
+ function extractPathFromPatch(patch) {
12689
+ const match = patch?.match(/^diff --git a\/([^\s]+) b\//);
12690
+ return match?.[1] ?? null;
12691
+ }
12692
+ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService {
12693
+ constructor() {
12694
+ /**
12695
+ * Cache of the last known total number of fixes per repository URL so that we
12696
+ * can determine whether *new* fixes have been generated since the user last
12697
+ * asked.
12698
+ */
12699
+ __publicField(this, "path", "");
12700
+ __publicField(this, "filesLastScanned", {});
12701
+ __publicField(this, "freshFixes", []);
12702
+ __publicField(this, "reportedFixes", []);
12703
+ __publicField(this, "intervalId", null);
12704
+ __publicField(this, "isInitialScanComplete", false);
12705
+ }
12706
+ static getInstance() {
12707
+ if (!_CheckForNewAvailableFixesService.instance) {
12708
+ _CheckForNewAvailableFixesService.instance = new _CheckForNewAvailableFixesService();
12709
+ }
12710
+ return _CheckForNewAvailableFixesService.instance;
12711
+ }
12712
+ /**
12713
+ * Resets any cached state so the service can be reused between independent
12714
+ * MCP sessions.
12715
+ */
12716
+ reset() {
12717
+ this.filesLastScanned = {};
12718
+ this.freshFixes = [];
12719
+ this.reportedFixes = [];
12720
+ if (this.intervalId) {
12721
+ clearInterval(this.intervalId);
12722
+ this.intervalId = null;
12723
+ }
12724
+ }
12725
+ /**
12726
+ * Stub implementation – in a future version this will query the backend for
12727
+ * the latest fixes count and compare it with the cached value. For now it
12728
+ * simply returns a placeholder string so that the tool can be wired into the
12729
+ * system and used in tests.
12730
+ */
12731
+ async scan({ path: path13 }) {
12732
+ logInfo("Scanning for new fixes", { path: path13 });
12733
+ const gqlClient = await getMcpGQLClient();
12734
+ const isConnected = await gqlClient.verifyConnection();
12735
+ if (!isConnected) {
12736
+ logError("Failed to connect to the API, scan aborted");
12737
+ return;
12738
+ }
12739
+ logInfo("Connected to the API, assebling list of files to scan", { path: path13 });
12740
+ const files = await getLocalFiles({
12741
+ path: path13,
12742
+ maxFileSize: MCP_MAX_FILE_SIZE
12743
+ });
12744
+ logInfo("Active files", { files });
12745
+ const filesToScan = files.filter((file) => {
12746
+ const lastScannedEditTime = this.filesLastScanned[file.fullPath];
12747
+ if (!lastScannedEditTime) {
12748
+ return true;
12749
+ }
12750
+ return file.lastEdited > lastScannedEditTime;
12751
+ });
12752
+ if (filesToScan.length === 0) {
12753
+ logInfo("No files to scan", { path: path13 });
12754
+ return;
12755
+ }
12756
+ logInfo("Files to scan", { filesToScan });
12757
+ const { fixReportId, projectId } = await scanFiles(
12758
+ filesToScan.map((file) => file.relativePath),
12759
+ path13,
12760
+ gqlClient
12761
+ );
12762
+ logInfo("Scan completed", { fixReportId, projectId });
12763
+ const fixes = await gqlClient.getReportFixesPaginated({
12764
+ reportId: fixReportId,
12765
+ offset: 0,
12766
+ limit: 1e3
12767
+ });
12768
+ const newFixes = fixes?.fixes?.filter((fix) => !this.isAlreadyReported(fix));
12769
+ logInfo("Fixes retrieved", {
12770
+ count: fixes?.fixes?.length || 0,
12771
+ newFixes: newFixes?.length || 0
12772
+ });
12773
+ this.freshFixes = this.freshFixes.filter((fix) => !this.isFixFromOldScan(fix, filesToScan)).concat(newFixes || []);
12774
+ logInfo("Fresh fixes", { freshFixes: this.freshFixes });
12775
+ filesToScan.forEach((file) => {
12776
+ this.filesLastScanned[file.fullPath] = file.lastEdited;
12777
+ });
12778
+ this.isInitialScanComplete = true;
12779
+ }
12780
+ isAlreadyReported(fix) {
12781
+ return this.reportedFixes.some(
12782
+ (reportedFix) => reportedFix.sharedState?.id === fix.sharedState?.id
12783
+ );
12784
+ }
12785
+ isFixFromOldScan(fix, filesToScan) {
12786
+ const patch = fix.patchAndQuestions?.__typename === "FixData" ? fix.patchAndQuestions.patch : void 0;
12787
+ const fixFile = extractPathFromPatch(patch);
12788
+ if (!fixFile) {
12789
+ return false;
12790
+ }
12791
+ logInfo("isOldFix", {
12792
+ fixFile,
12793
+ filesToScan,
12794
+ isOldFix: filesToScan.some((file) => file.relativePath === fixFile)
12795
+ });
12796
+ return filesToScan.some((file) => file.relativePath === fixFile);
12797
+ }
12798
+ async getFreshFixes({ path: path13 }) {
12799
+ if (this.path !== path13) {
12800
+ this.path = path13;
12801
+ this.reset();
12802
+ }
12803
+ if (!this.intervalId) {
12804
+ logInfo("Starting periodic scan for new fixes", { path: path13 });
12805
+ this.intervalId = setInterval(() => {
12806
+ logDebug("Triggering periodic scan", { path: path13 });
12807
+ this.scan({ path: path13 }).catch((error) => {
12808
+ logError("Error during periodic scan", { error });
12809
+ });
12810
+ }, MCP_PERIODIC_CHECK_INTERVAL);
12811
+ logDebug("Triggering initial scan", { path: path13 });
12812
+ this.scan({ path: path13 }).catch((error) => {
12813
+ logError("Error during initial scan", { error });
12814
+ });
12815
+ }
12816
+ if (this.freshFixes.length > 0) {
12817
+ const freshFixes = this.freshFixes.splice(0, 3);
12818
+ if (freshFixes.length > 0) {
12819
+ this.reportedFixes.concat(freshFixes);
12820
+ return freshFixesPrompt({ fixes: freshFixes });
12821
+ }
12822
+ }
12823
+ if (!this.isInitialScanComplete) {
12824
+ return initialScanInProgressPrompt;
12825
+ }
12826
+ return noFreshFixesPrompt;
12827
+ }
12828
+ };
12829
+ __publicField(_CheckForNewAvailableFixesService, "instance");
12830
+ var CheckForNewAvailableFixesService = _CheckForNewAvailableFixesService;
12831
+
12832
+ // src/mcp/tools/checkForNewAvailableFixes/CheckForNewAvailableFixesTool.ts
12833
+ var CheckForNewAvailableFixesTool = class extends BaseTool {
12834
+ constructor() {
12835
+ super();
12836
+ __publicField(this, "name", "check_for_new_available_fixes");
12837
+ __publicField(this, "displayName", "Check for New Available Fixes");
12838
+ // A detailed description to guide the LLM on when and how to invoke this tool.
12839
+ __publicField(this, "description", `Continuesly monitors your code and scans for new security vulnerabilities.
12840
+
12841
+ When to invoke:
12842
+ \u2022 ALWAYS call this tool ONCE at the very end of every reasoning / code-generation session (after all code edits, linting, tests, etc.).
12843
+ \u2022 Invoke when the user explicitly asks to "check for new fixes", "look for fresh patches", or similar.
12844
+ \u2022 The repository must exist locally and be a valid Git repo with an 'origin' remote configured.
12845
+
12846
+ How to invoke:
12847
+ \u2022 Required argument:
12848
+ \u2013 path (string): absolute path to the repository root.
12849
+
12850
+ Behaviour:
12851
+ \u2022 If no new fixes are available, it returns a concise message indicating so.
12852
+ \u2022 If fixes are found, it returns a human-readable summary including total count and severity breakdown.
12853
+
12854
+ Example payload:
12855
+ {
12856
+ "path": "/home/user/my-project"
12857
+ }`);
12858
+ __publicField(this, "inputSchema", {
12859
+ type: "object",
12860
+ properties: {
12861
+ path: {
12862
+ type: "string",
12863
+ description: "Full local path to the cloned git repository to check for new available fixes"
12864
+ }
12865
+ },
12866
+ required: ["path"]
12867
+ });
12868
+ __publicField(this, "inputValidationSchema", z32.object({
12869
+ path: z32.string().describe(
12870
+ "Full local path to the cloned git repository to check for new available fixes"
12871
+ )
12872
+ }));
12873
+ __publicField(this, "newFixesService");
12874
+ this.newFixesService = new CheckForNewAvailableFixesService();
12875
+ }
12876
+ async executeInternal(args) {
12877
+ const pathValidationResult = await validatePath(args.path);
12878
+ if (!pathValidationResult.isValid) {
12879
+ throw new Error(
12880
+ `Invalid path: potential security risk detected in path: ${pathValidationResult.error}`
12881
+ );
12882
+ }
12883
+ const path13 = pathValidationResult.path;
12884
+ const resultText = await this.newFixesService.getFreshFixes({
12885
+ path: path13
12886
+ });
12887
+ logInfo("CheckForNewAvailableFixesTool execution completed", {
12888
+ resultText
12889
+ });
12890
+ return {
12891
+ content: [
12892
+ {
12893
+ type: "text",
12894
+ text: resultText
12895
+ }
12896
+ ]
12897
+ };
12898
+ }
12899
+ };
12900
+
12901
+ // src/mcp/tools/fetchAvailableFixes/FetchAvailableFixesTool.ts
12902
+ import { z as z33 } from "zod";
12903
+
12904
+ // src/mcp/tools/fetchAvailableFixes/FetchAvailableFixesService.ts
12905
+ var _FetchAvailableFixesService = class _FetchAvailableFixesService {
12906
+ constructor() {
12907
+ __publicField(this, "gqlClient", null);
12908
+ __publicField(this, "currentOffset", 0);
12909
+ }
12910
+ static getInstance() {
12911
+ if (!_FetchAvailableFixesService.instance) {
12912
+ _FetchAvailableFixesService.instance = new _FetchAvailableFixesService();
12913
+ }
12914
+ return _FetchAvailableFixesService.instance;
12915
+ }
12916
+ reset() {
12917
+ this.currentOffset = 0;
12918
+ }
12919
+ async initializeGqlClient() {
12920
+ if (!this.gqlClient) {
12921
+ this.gqlClient = await getMcpGQLClient();
12922
+ }
12923
+ return this.gqlClient;
12349
12924
  }
12350
12925
  async checkForAvailableFixes({
12351
12926
  repoUrl,
@@ -12443,12 +13018,12 @@ Call this tool instead of scan_and_fix_vulnerabilities when you only need a fixe
12443
13018
  },
12444
13019
  required: ["path"]
12445
13020
  });
12446
- __publicField(this, "inputValidationSchema", z32.object({
12447
- path: z32.string().describe(
13021
+ __publicField(this, "inputValidationSchema", z33.object({
13022
+ path: z33.string().describe(
12448
13023
  "Full local path to the cloned git repository to check for available fixes"
12449
13024
  ),
12450
- offset: z32.number().optional().describe("Optional offset for pagination"),
12451
- limit: z32.number().optional().describe("Optional maximum number of fixes to return")
13025
+ offset: z33.number().optional().describe("Optional offset for pagination"),
13026
+ limit: z33.number().optional().describe("Optional maximum number of fixes to return")
12452
13027
  }));
12453
13028
  __publicField(this, "availableFixesService");
12454
13029
  this.availableFixesService = FetchAvailableFixesService.getInstance();
@@ -12461,8 +13036,8 @@ Call this tool instead of scan_and_fix_vulnerabilities when you only need a fixe
12461
13036
  `Invalid path: potential security risk detected in path: ${pathValidationResult.error}`
12462
13037
  );
12463
13038
  }
12464
- const path14 = pathValidationResult.path;
12465
- const gitService = new GitService(path14, log);
13039
+ const path13 = pathValidationResult.path;
13040
+ const gitService = new GitService(path13, log);
12466
13041
  const gitValidation = await gitService.validateRepository();
12467
13042
  if (!gitValidation.isValid) {
12468
13043
  throw new Error(`Invalid git repository: ${gitValidation.error}`);
@@ -12495,57 +13070,12 @@ Call this tool instead of scan_and_fix_vulnerabilities when you only need a fixe
12495
13070
  };
12496
13071
 
12497
13072
  // src/mcp/tools/scanAndFixVulnerabilities/ScanAndFixVulnerabilitiesTool.ts
12498
- import z33 from "zod";
13073
+ import z34 from "zod";
12499
13074
 
12500
13075
  // src/mcp/tools/scanAndFixVulnerabilities/ScanAndFixVulnerabilitiesService.ts
12501
- import path13 from "path";
12502
-
12503
- // src/mcp/services/FilePacking.ts
12504
- import fs10 from "fs";
12505
- import path12 from "path";
12506
- import AdmZip2 from "adm-zip";
12507
- var MAX_FILE_SIZE2 = 1024 * 1024 * 5;
12508
- var FilePacking = class {
12509
- async packFiles(sourceDirectoryPath, filesToPack) {
12510
- logInfo(`FilePacking: packing files from ${sourceDirectoryPath}`);
12511
- const zip = new AdmZip2();
12512
- let packedFilesCount = 0;
12513
- logInfo("FilePacking: compressing files");
12514
- for (const filepath of filesToPack) {
12515
- const absoluteFilepath = path12.join(sourceDirectoryPath, filepath);
12516
- if (!FileUtils.shouldPackFile(absoluteFilepath, MAX_FILE_SIZE2)) {
12517
- logInfo(
12518
- `FilePacking: ignoring ${filepath} because it is excluded or invalid`
12519
- );
12520
- continue;
12521
- }
12522
- let data;
12523
- try {
12524
- data = fs10.readFileSync(absoluteFilepath);
12525
- } catch (fsError) {
12526
- logInfo(
12527
- `FilePacking: failed to read ${filepath} from filesystem: ${fsError}`
12528
- );
12529
- continue;
12530
- }
12531
- zip.addFile(filepath, data);
12532
- packedFilesCount++;
12533
- }
12534
- const zipBuffer = zip.toBuffer();
12535
- logInfo(
12536
- `FilePacking: read ${packedFilesCount} source files. total size: ${zipBuffer.length} bytes`
12537
- );
12538
- logInfo("FilePacking: Files packed successfully");
12539
- return zipBuffer;
12540
- }
12541
- };
12542
-
12543
- // src/mcp/tools/scanAndFixVulnerabilities/ScanAndFixVulnerabilitiesService.ts
12544
- var VUL_REPORT_DIGEST_TIMEOUT_MS2 = 1e3 * 60 * 5;
12545
13076
  var _ScanAndFixVulnerabilitiesService = class _ScanAndFixVulnerabilitiesService {
12546
13077
  constructor() {
12547
13078
  __publicField(this, "gqlClient");
12548
- __publicField(this, "filePacking");
12549
13079
  /**
12550
13080
  * Stores the fix report id that is created on the first run so that subsequent
12551
13081
  * calls can skip the expensive packing/uploading/scan flow and directly fetch
@@ -12553,7 +13083,11 @@ var _ScanAndFixVulnerabilitiesService = class _ScanAndFixVulnerabilitiesService
12553
13083
  */
12554
13084
  __publicField(this, "storedFixReportId");
12555
13085
  __publicField(this, "currentOffset", 0);
12556
- this.filePacking = new FilePacking();
13086
+ /**
13087
+ * Timestamp when the fixReportId was created
13088
+ * Used to expire the fixReportId after REPORT_ID_EXPIRATION_MS hours
13089
+ */
13090
+ __publicField(this, "fixReportIdTimestamp");
12557
13091
  }
12558
13092
  static getInstance() {
12559
13093
  if (!_ScanAndFixVulnerabilitiesService.instance) {
@@ -12564,6 +13098,17 @@ var _ScanAndFixVulnerabilitiesService = class _ScanAndFixVulnerabilitiesService
12564
13098
  reset() {
12565
13099
  this.storedFixReportId = void 0;
12566
13100
  this.currentOffset = void 0;
13101
+ this.fixReportIdTimestamp = void 0;
13102
+ }
13103
+ /**
13104
+ * Checks if the stored fixReportId has expired (older than 2 hours)
13105
+ */
13106
+ isFixReportIdExpired() {
13107
+ if (!this.fixReportIdTimestamp) {
13108
+ return true;
13109
+ }
13110
+ const currentTime = Date.now();
13111
+ return currentTime - this.fixReportIdTimestamp > MCP_REPORT_ID_EXPIRATION_MS;
12567
13112
  }
12568
13113
  async processVulnerabilities({
12569
13114
  fileList,
@@ -12576,19 +13121,20 @@ var _ScanAndFixVulnerabilitiesService = class _ScanAndFixVulnerabilitiesService
12576
13121
  this.gqlClient = await this.initializeGqlClient();
12577
13122
  logInfo("storedFixReportId", {
12578
13123
  storedFixReportId: this.storedFixReportId,
12579
- currentOffset: this.currentOffset
13124
+ currentOffset: this.currentOffset,
13125
+ fixReportIdTimestamp: this.fixReportIdTimestamp,
13126
+ isExpired: this.storedFixReportId ? this.isFixReportIdExpired() : null
12580
13127
  });
12581
13128
  let fixReportId = this.storedFixReportId;
12582
- if (!fixReportId || isRescan) {
13129
+ if (!fixReportId || isRescan || this.isFixReportIdExpired()) {
12583
13130
  this.reset();
12584
13131
  this.validateFiles(fileList);
12585
- const repoUploadInfo = await this.initializeReport();
12586
- fixReportId = repoUploadInfo.fixReportId;
12587
- this.storedFixReportId = fixReportId;
12588
- const zipBuffer = await this.packFiles(fileList, repositoryPath);
12589
- await this.uploadFiles(zipBuffer, repoUploadInfo);
12590
- const projectId = await this.getProjectId();
12591
- await this.runScan({ fixReportId, projectId });
13132
+ const scanResult = await scanFiles(
13133
+ fileList,
13134
+ repositoryPath,
13135
+ this.gqlClient
13136
+ );
13137
+ fixReportId = scanResult.fixReportId;
12592
13138
  }
12593
13139
  const effectiveOffset = offset ?? (this.currentOffset || 0);
12594
13140
  logDebug("effectiveOffset", { effectiveOffset });
@@ -12597,11 +13143,17 @@ var _ScanAndFixVulnerabilitiesService = class _ScanAndFixVulnerabilitiesService
12597
13143
  effectiveOffset,
12598
13144
  limit
12599
13145
  );
13146
+ if (fixes.totalCount > 0) {
13147
+ this.storedFixReportId = fixReportId;
13148
+ this.fixReportIdTimestamp = Date.now();
13149
+ } else {
13150
+ this.reset();
13151
+ }
12600
13152
  const prompt = fixesPrompt({
12601
13153
  fixes: fixes.fixes,
12602
13154
  totalCount: fixes.totalCount,
12603
13155
  offset: effectiveOffset,
12604
- scannedFiles: fileList.map((file) => path13.basename(file))
13156
+ scannedFiles: [...fileList]
12605
13157
  });
12606
13158
  this.currentOffset = effectiveOffset + (fixes.fixes?.length || 0);
12607
13159
  return prompt;
@@ -12626,101 +13178,6 @@ var _ScanAndFixVulnerabilitiesService = class _ScanAndFixVulnerabilitiesService
12626
13178
  }
12627
13179
  return gqlClient;
12628
13180
  }
12629
- async initializeReport() {
12630
- if (!this.gqlClient) {
12631
- throw new GqlClientError();
12632
- }
12633
- try {
12634
- const {
12635
- uploadS3BucketInfo: { repoUploadInfo }
12636
- } = await this.gqlClient.uploadS3BucketInfo();
12637
- logInfo("Upload info retrieved", { uploadKey: repoUploadInfo?.uploadKey });
12638
- return repoUploadInfo;
12639
- } catch (error) {
12640
- const message = error.message;
12641
- throw new ReportInitializationError(
12642
- `Error initializing report: ${message}`
12643
- );
12644
- }
12645
- }
12646
- async packFiles(fileList, repositoryPath) {
12647
- try {
12648
- const zipBuffer = await this.filePacking.packFiles(
12649
- repositoryPath,
12650
- fileList
12651
- );
12652
- logInfo("Files packed successfully", { fileCount: fileList.length });
12653
- return zipBuffer;
12654
- } catch (error) {
12655
- const message = error.message;
12656
- throw new FileProcessingError(`Error packing files: ${message}`);
12657
- }
12658
- }
12659
- async uploadFiles(zipBuffer, repoUploadInfo) {
12660
- if (!repoUploadInfo) {
12661
- throw new FileUploadError("Upload info is required");
12662
- }
12663
- try {
12664
- await uploadFile({
12665
- file: zipBuffer,
12666
- url: repoUploadInfo.url,
12667
- uploadFields: JSON.parse(repoUploadInfo.uploadFieldsJSON),
12668
- uploadKey: repoUploadInfo.uploadKey
12669
- });
12670
- logInfo("File uploaded successfully");
12671
- } catch (error) {
12672
- logError("File upload failed", { error: error.message });
12673
- throw new FileUploadError(
12674
- `Failed to upload the file: ${error.message}`
12675
- );
12676
- }
12677
- }
12678
- async getProjectId() {
12679
- if (!this.gqlClient) {
12680
- throw new GqlClientError();
12681
- }
12682
- const projectId = await this.gqlClient.getProjectId();
12683
- logInfo("Project ID retrieved", { projectId });
12684
- return projectId;
12685
- }
12686
- async runScan(params) {
12687
- if (!this.gqlClient) {
12688
- throw new GqlClientError();
12689
- }
12690
- const { fixReportId, projectId } = params;
12691
- logInfo("Starting scan", { fixReportId, projectId });
12692
- const submitVulnerabilityReportVariables = {
12693
- fixReportId,
12694
- projectId,
12695
- repoUrl: "",
12696
- reference: "no-branch",
12697
- scanSource: "MCP" /* Mcp */
12698
- };
12699
- logInfo("Submitting vulnerability report");
12700
- const submitRes = await this.gqlClient.submitVulnerabilityReport(
12701
- submitVulnerabilityReportVariables
12702
- );
12703
- if (submitRes.submitVulnerabilityReport.__typename !== "VulnerabilityReport") {
12704
- logError("Vulnerability report submission failed", {
12705
- response: submitRes
12706
- });
12707
- throw new ScanError("\u{1F575}\uFE0F\u200D\u2642\uFE0F Mobb analysis failed");
12708
- }
12709
- logInfo("Vulnerability report submitted successfully", {
12710
- analysisId: submitRes.submitVulnerabilityReport.fixReportId
12711
- });
12712
- logInfo("Starting analysis subscription");
12713
- await this.gqlClient.subscribeToGetAnalysis({
12714
- subscribeToAnalysisParams: {
12715
- analysisId: submitRes.submitVulnerabilityReport.fixReportId
12716
- },
12717
- callback: () => {
12718
- },
12719
- callbackStates: ["Finished" /* Finished */],
12720
- timeoutInMs: VUL_REPORT_DIGEST_TIMEOUT_MS2
12721
- });
12722
- logInfo("Analysis subscription completed");
12723
- }
12724
13181
  async getReportFixes(fixReportId, offset, limit) {
12725
13182
  logDebug("getReportFixes", { fixReportId, offset, limit });
12726
13183
  if (!this.gqlClient) {
@@ -12761,10 +13218,13 @@ How to invoke:
12761
13218
  \u2022 Optional arguments:
12762
13219
  \u2013 offset (number): pagination offset used when the result set is large.
12763
13220
  \u2013 limit (number): maximum number of fixes to include in the response.
13221
+ \u2013 maxFiles (number): maximum number of files to scan (default: ${MCP_DEFAULT_MAX_FILES_TO_SCAN}). Provide this value to increase the scope of the scan.
12764
13222
  \u2013 rescan (boolean): true to force a complete rescan even if cached results exist.
12765
13223
 
12766
13224
  Behaviour:
13225
+ \u2022 If the directory is a valid Git repository, the tool scans the changed files in the repository. If there are no changes, it scans the files included in the las commit.
12767
13226
  \u2022 If the directory is not a valid Git repository, the tool falls back to scanning recently changed files in the folder.
13227
+ \u2022 If maxFiles is provided, the tool scans the maxFiles most recently changed files in the repository.
12768
13228
  \u2022 By default, only new, modified, or staged files are scanned; if none are found, it checks recently changed files.
12769
13229
  \u2022 The tool NEVER commits or pushes changes; it only returns proposed diffs/fixes as text.
12770
13230
 
@@ -12777,15 +13237,19 @@ Example payload:
12777
13237
  {
12778
13238
  "path": "/home/user/my-project",
12779
13239
  "limit": 20,
13240
+ "maxFiles": 50,
12780
13241
  "rescan": false
12781
13242
  }`);
12782
- __publicField(this, "inputValidationSchema", z33.object({
12783
- path: z33.string().describe(
13243
+ __publicField(this, "inputValidationSchema", z34.object({
13244
+ path: z34.string().describe(
12784
13245
  "Full local path to repository to scan and fix vulnerabilities"
12785
13246
  ),
12786
- offset: z33.number().optional().describe("Optional offset for pagination"),
12787
- limit: z33.number().optional().describe("Optional maximum number of results to return"),
12788
- rescan: z33.boolean().optional().describe("Optional whether to rescan the repository")
13247
+ offset: z34.number().optional().describe("Optional offset for pagination"),
13248
+ limit: z34.number().optional().describe("Optional maximum number of results to return"),
13249
+ maxFiles: z34.number().optional().describe(
13250
+ `Optional maximum number of files to scan (default: ${MCP_DEFAULT_MAX_FILES_TO_SCAN}). Increase for comprehensive scans of larger codebases or decrease for faster focused scans.`
13251
+ ),
13252
+ rescan: z34.boolean().optional().describe("Optional whether to rescan the repository")
12789
13253
  }));
12790
13254
  __publicField(this, "inputSchema", {
12791
13255
  type: "object",
@@ -12802,6 +13266,10 @@ Example payload:
12802
13266
  type: "number",
12803
13267
  description: "[Optional] maximum number of results to return"
12804
13268
  },
13269
+ maxFiles: {
13270
+ type: "number",
13271
+ description: `[Optional] maximum number of files to scan (default: ${MCP_DEFAULT_MAX_FILES_TO_SCAN}). Use higher values for more comprehensive scans or lower values for faster performance.`
13272
+ },
12805
13273
  rescan: {
12806
13274
  type: "boolean",
12807
13275
  description: "[Optional] whether to rescan the repository"
@@ -12824,43 +13292,14 @@ Example payload:
12824
13292
  `Invalid path: potential security risk detected in path: ${pathValidationResult.error}`
12825
13293
  );
12826
13294
  }
12827
- const path14 = pathValidationResult.path;
12828
- const gitService = new GitService(path14, log);
12829
- const gitValidation = await gitService.validateRepository();
12830
- let files = [];
12831
- if (!gitValidation.isValid) {
12832
- logDebug(
12833
- "Git repository validation failed, using all files in the repository",
12834
- {
12835
- path: path14
12836
- }
12837
- );
12838
- files = FileUtils.getLastChangedFiles(path14);
12839
- logDebug("Found files in the repository", {
12840
- files,
12841
- fileCount: files.length
12842
- });
12843
- } else {
12844
- const gitResult = await gitService.getChangedFiles();
12845
- files = gitResult.files;
12846
- if (files.length === 0) {
12847
- const recentResult = await gitService.getRecentlyChangedFiles();
12848
- files = recentResult.files;
12849
- logDebug(
12850
- "No changes found, using recently changed files from git history",
12851
- {
12852
- files,
12853
- fileCount: files.length,
12854
- commitsChecked: recentResult.commitCount
12855
- }
12856
- );
12857
- } else {
12858
- logDebug("Found changed files in the git repository", {
12859
- files,
12860
- fileCount: files.length
12861
- });
12862
- }
12863
- }
13295
+ const path13 = pathValidationResult.path;
13296
+ const files = await getLocalFiles({
13297
+ path: path13,
13298
+ maxFileSize: 1024 * 1024 * 5,
13299
+ // 5MB
13300
+ maxFiles: args.maxFiles
13301
+ });
13302
+ logInfo("Files", { files });
12864
13303
  if (files.length === 0) {
12865
13304
  return {
12866
13305
  content: [
@@ -12873,11 +13312,11 @@ Example payload:
12873
13312
  }
12874
13313
  try {
12875
13314
  const fixResult = await this.vulnerabilityFixService.processVulnerabilities({
12876
- fileList: files,
12877
- repositoryPath: path14,
13315
+ fileList: files.map((file) => file.relativePath),
13316
+ repositoryPath: args.path,
12878
13317
  offset: args.offset,
12879
13318
  limit: args.limit,
12880
- isRescan: args.rescan
13319
+ isRescan: args.rescan || !!args.maxFiles
12881
13320
  });
12882
13321
  const result = {
12883
13322
  content: [
@@ -12932,8 +13371,10 @@ function createMcpServer() {
12932
13371
  };
12933
13372
  const scanAndFixVulnerabilitiesTool = new ScanAndFixVulnerabilitiesTool();
12934
13373
  const fetchAvailableFixesTool = new FetchAvailableFixesTool();
13374
+ const checkForNewAvailableFixesTool = new CheckForNewAvailableFixesTool();
12935
13375
  registerIfEnabled(scanAndFixVulnerabilitiesTool);
12936
13376
  registerIfEnabled(fetchAvailableFixesTool);
13377
+ registerIfEnabled(checkForNewAvailableFixesTool);
12937
13378
  logInfo("MCP server created and configured");
12938
13379
  return server;
12939
13380
  }
@@ -12967,7 +13408,7 @@ var mcpHandler = async (_args) => {
12967
13408
  };
12968
13409
 
12969
13410
  // src/args/commands/review.ts
12970
- import fs11 from "fs";
13411
+ import fs12 from "fs";
12971
13412
  import chalk9 from "chalk";
12972
13413
  function reviewBuilder(yargs2) {
12973
13414
  return yargs2.option("f", {
@@ -13004,7 +13445,7 @@ function reviewBuilder(yargs2) {
13004
13445
  ).help();
13005
13446
  }
13006
13447
  function validateReviewOptions(argv) {
13007
- if (!fs11.existsSync(argv.f)) {
13448
+ if (!fs12.existsSync(argv.f)) {
13008
13449
  throw new CliError(`
13009
13450
  Can't access ${chalk9.bold(argv.f)}`);
13010
13451
  }