mobbdev 1.0.177 → 1.0.179

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.mjs +801 -172
  2. package/package.json +1 -1
package/dist/index.mjs CHANGED
@@ -32,7 +32,7 @@ var init_env = __esm({
32
32
  });
33
33
 
34
34
  // src/mcp/core/configs.ts
35
- var MCP_DEFAULT_API_URL, MCP_API_KEY_HEADER_NAME, MCP_LOGIN_MAX_WAIT, MCP_LOGIN_CHECK_DELAY, MCP_VUL_REPORT_DIGEST_TIMEOUT_MS, MCP_MAX_FILE_SIZE, MCP_PERIODIC_CHECK_INTERVAL, MCP_DEFAULT_MAX_FILES_TO_SCAN, MCP_REPORT_ID_EXPIRATION_MS, MCP_TOOLS_BROWSER_COOLDOWN_MS, MCP_DEFAULT_LIMIT, isAutoScan, MVS_AUTO_FIX_OVERRIDE, MCP_AUTO_FIX_DEBUG_MODE;
35
+ var MCP_DEFAULT_API_URL, MCP_API_KEY_HEADER_NAME, MCP_LOGIN_MAX_WAIT, MCP_LOGIN_CHECK_DELAY, MCP_VUL_REPORT_DIGEST_TIMEOUT_MS, MCP_MAX_FILE_SIZE, MCP_PERIODIC_CHECK_INTERVAL, MCP_DEFAULT_MAX_FILES_TO_SCAN, MCP_REPORT_ID_EXPIRATION_MS, MCP_TOOLS_BROWSER_COOLDOWN_MS, MCP_DEFAULT_LIMIT, isAutoScan, MVS_AUTO_FIX_OVERRIDE, MCP_AUTO_FIX_DEBUG_MODE, MCP_PERIODIC_TRACK_INTERVAL, MCP_DEFAULT_REST_API_URL;
36
36
  var init_configs = __esm({
37
37
  "src/mcp/core/configs.ts"() {
38
38
  "use strict";
@@ -51,6 +51,8 @@ var init_configs = __esm({
51
51
  isAutoScan = process.env["AUTO_SCAN"] !== "false";
52
52
  MVS_AUTO_FIX_OVERRIDE = process.env["MVS_AUTO_FIX"];
53
53
  MCP_AUTO_FIX_DEBUG_MODE = true;
54
+ MCP_PERIODIC_TRACK_INTERVAL = 60 * 60 * 1e3;
55
+ MCP_DEFAULT_REST_API_URL = "https://api.mobb.ai/api/rest/mcp/track";
54
56
  }
55
57
  });
56
58
 
@@ -2274,6 +2276,39 @@ var UploadS3BucketInfoDocument = `
2274
2276
  }
2275
2277
  }
2276
2278
  `;
2279
+ var UploadAiBlameInferencesInitDocument = `
2280
+ mutation UploadAIBlameInferencesInit($sessions: [AIBlameInferenceInitInput!]!) {
2281
+ uploadAIBlameInferencesInit(sessions: $sessions) {
2282
+ status
2283
+ error
2284
+ uploadSessions {
2285
+ aiBlameInferenceId
2286
+ prompt {
2287
+ url
2288
+ artifactId
2289
+ fileName
2290
+ uploadFieldsJSON
2291
+ uploadKey
2292
+ }
2293
+ inference {
2294
+ url
2295
+ artifactId
2296
+ fileName
2297
+ uploadFieldsJSON
2298
+ uploadKey
2299
+ }
2300
+ }
2301
+ }
2302
+ }
2303
+ `;
2304
+ var FinalizeAiBlameInferencesUploadDocument = `
2305
+ mutation FinalizeAIBlameInferencesUpload($sessions: [AIBlameInferenceFinalizeInput!]!) {
2306
+ finalizeAIBlameInferencesUpload(sessions: $sessions) {
2307
+ status
2308
+ error
2309
+ }
2310
+ }
2311
+ `;
2277
2312
  var DigestVulnerabilityReportDocument = `
2278
2313
  mutation DigestVulnerabilityReport($vulnerabilityReportFileName: String, $fixReportId: String!, $projectId: String!, $scanSource: String!, $repoUrl: String, $reference: String, $sha: String) {
2279
2314
  digestVulnerabilityReport(
@@ -2531,6 +2566,12 @@ function getSdk(client, withWrapper = defaultWrapper) {
2531
2566
  uploadS3BucketInfo(variables, requestHeaders, signal) {
2532
2567
  return withWrapper((wrappedRequestHeaders) => client.request({ document: UploadS3BucketInfoDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "uploadS3BucketInfo", "mutation", variables);
2533
2568
  },
2569
+ UploadAIBlameInferencesInit(variables, requestHeaders, signal) {
2570
+ return withWrapper((wrappedRequestHeaders) => client.request({ document: UploadAiBlameInferencesInitDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "UploadAIBlameInferencesInit", "mutation", variables);
2571
+ },
2572
+ FinalizeAIBlameInferencesUpload(variables, requestHeaders, signal) {
2573
+ return withWrapper((wrappedRequestHeaders) => client.request({ document: FinalizeAiBlameInferencesUploadDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "FinalizeAIBlameInferencesUpload", "mutation", variables);
2574
+ },
2534
2575
  DigestVulnerabilityReport(variables, requestHeaders, signal) {
2535
2576
  return withWrapper((wrappedRequestHeaders) => client.request({ document: DigestVulnerabilityReportDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "DigestVulnerabilityReport", "mutation", variables);
2536
2577
  },
@@ -6373,7 +6414,7 @@ async function getAdoSdk(params) {
6373
6414
  const url = new URL(repoUrl);
6374
6415
  const origin2 = url.origin.toLowerCase().endsWith(".visualstudio.com") ? DEFUALT_ADO_ORIGIN : url.origin.toLowerCase();
6375
6416
  const params2 = `path=/&versionDescriptor[versionOptions]=0&versionDescriptor[versionType]=commit&versionDescriptor[version]=${branch}&resolveLfs=true&$format=zip&api-version=5.0&download=true`;
6376
- const path15 = [
6417
+ const path17 = [
6377
6418
  prefixPath,
6378
6419
  owner,
6379
6420
  projectName,
@@ -6384,7 +6425,7 @@ async function getAdoSdk(params) {
6384
6425
  "items",
6385
6426
  "items"
6386
6427
  ].filter(Boolean).join("/");
6387
- return new URL(`${path15}?${params2}`, origin2).toString();
6428
+ return new URL(`${path17}?${params2}`, origin2).toString();
6388
6429
  },
6389
6430
  async getAdoBranchList({ repoUrl }) {
6390
6431
  try {
@@ -6875,6 +6916,9 @@ var AdoSCMLib = class extends SCMLib {
6875
6916
  markdownComment: comment
6876
6917
  });
6877
6918
  }
6919
+ async getCommitDiff(_commitSha) {
6920
+ throw new Error("getCommitDiff not implemented for ADO");
6921
+ }
6878
6922
  };
6879
6923
 
6880
6924
  // src/features/analysis/scm/bitbucket/bitbucket.ts
@@ -7438,6 +7482,9 @@ var BitbucketSCMLib = class extends SCMLib {
7438
7482
  markdownComment: comment
7439
7483
  });
7440
7484
  }
7485
+ async getCommitDiff(_commitSha) {
7486
+ throw new Error("getCommitDiff not implemented for Bitbucket");
7487
+ }
7441
7488
  };
7442
7489
 
7443
7490
  // src/features/analysis/scm/constants.ts
@@ -7806,6 +7853,33 @@ function getGithubSdk(params = {}) {
7806
7853
  commit_sha: commitSha
7807
7854
  });
7808
7855
  },
7856
+ async getCommitWithDiff({
7857
+ commitSha,
7858
+ owner,
7859
+ repo
7860
+ }) {
7861
+ const [commitData, diffData] = await Promise.all([
7862
+ // Get commit metadata
7863
+ octokit.rest.repos.getCommit({
7864
+ repo,
7865
+ owner,
7866
+ ref: commitSha
7867
+ }),
7868
+ // Get commit diff
7869
+ octokit.request("GET /repos/{owner}/{repo}/commits/{ref}", {
7870
+ owner,
7871
+ repo,
7872
+ ref: commitSha,
7873
+ headers: {
7874
+ Accept: "application/vnd.github.v3.diff"
7875
+ }
7876
+ })
7877
+ ]);
7878
+ return {
7879
+ commit: commitData.data,
7880
+ diff: diffData.data
7881
+ };
7882
+ },
7809
7883
  async getTagDate({
7810
7884
  tag,
7811
7885
  owner,
@@ -7839,14 +7913,14 @@ function getGithubSdk(params = {}) {
7839
7913
  };
7840
7914
  },
7841
7915
  async getGithubBlameRanges(params2) {
7842
- const { ref, gitHubUrl, path: path15 } = params2;
7916
+ const { ref, gitHubUrl, path: path17 } = params2;
7843
7917
  const { owner, repo } = parseGithubOwnerAndRepo(gitHubUrl);
7844
7918
  const res = await octokit.graphql(
7845
7919
  GET_BLAME_DOCUMENT,
7846
7920
  {
7847
7921
  owner,
7848
7922
  repo,
7849
- path: path15,
7923
+ path: path17,
7850
7924
  ref
7851
7925
  }
7852
7926
  );
@@ -8153,11 +8227,11 @@ var GithubSCMLib = class extends SCMLib {
8153
8227
  markdownComment: comment
8154
8228
  });
8155
8229
  }
8156
- async getRepoBlameRanges(ref, path15) {
8230
+ async getRepoBlameRanges(ref, path17) {
8157
8231
  this._validateUrl();
8158
8232
  return await this.githubSdk.getGithubBlameRanges({
8159
8233
  ref,
8160
- path: path15,
8234
+ path: path17,
8161
8235
  gitHubUrl: this.url
8162
8236
  });
8163
8237
  }
@@ -8241,6 +8315,24 @@ var GithubSCMLib = class extends SCMLib {
8241
8315
  comment_id: commentId
8242
8316
  });
8243
8317
  }
8318
+ async getCommitDiff(commitSha) {
8319
+ this._validateAccessTokenAndUrl();
8320
+ const { owner, repo } = parseGithubOwnerAndRepo(this.url);
8321
+ const { commit, diff } = await this.githubSdk.getCommitWithDiff({
8322
+ owner,
8323
+ repo,
8324
+ commitSha
8325
+ });
8326
+ const commitTimestamp = commit.commit.committer?.date ? new Date(commit.commit.committer.date) : new Date(commit.commit.author?.date || Date.now());
8327
+ return {
8328
+ diff,
8329
+ commitTimestamp,
8330
+ commitSha: commit.sha,
8331
+ authorName: commit.commit.author?.name,
8332
+ authorEmail: commit.commit.author?.email,
8333
+ message: commit.commit.message
8334
+ };
8335
+ }
8244
8336
  };
8245
8337
 
8246
8338
  // src/features/analysis/scm/gitlab/gitlab.ts
@@ -8571,13 +8663,13 @@ function parseGitlabOwnerAndRepo(gitlabUrl) {
8571
8663
  const { organization, repoName, projectPath } = parsingResult;
8572
8664
  return { owner: organization, repo: repoName, projectPath };
8573
8665
  }
8574
- async function getGitlabBlameRanges({ ref, gitlabUrl, path: path15 }, options) {
8666
+ async function getGitlabBlameRanges({ ref, gitlabUrl, path: path17 }, options) {
8575
8667
  const { projectPath } = parseGitlabOwnerAndRepo(gitlabUrl);
8576
8668
  const api2 = getGitBeaker({
8577
8669
  url: gitlabUrl,
8578
8670
  gitlabAuthToken: options?.gitlabAuthToken
8579
8671
  });
8580
- const resp = await api2.RepositoryFiles.allFileBlames(projectPath, path15, ref);
8672
+ const resp = await api2.RepositoryFiles.allFileBlames(projectPath, path17, ref);
8581
8673
  let lineNumber = 1;
8582
8674
  return resp.filter((range) => range.lines).map((range) => {
8583
8675
  const oldLineNumber = lineNumber;
@@ -8753,10 +8845,10 @@ var GitlabSCMLib = class extends SCMLib {
8753
8845
  markdownComment: comment
8754
8846
  });
8755
8847
  }
8756
- async getRepoBlameRanges(ref, path15) {
8848
+ async getRepoBlameRanges(ref, path17) {
8757
8849
  this._validateUrl();
8758
8850
  return await getGitlabBlameRanges(
8759
- { ref, path: path15, gitlabUrl: this.url },
8851
+ { ref, path: path17, gitlabUrl: this.url },
8760
8852
  {
8761
8853
  url: this.url,
8762
8854
  gitlabAuthToken: this.accessToken
@@ -8806,6 +8898,9 @@ var GitlabSCMLib = class extends SCMLib {
8806
8898
  this._validateAccessTokenAndUrl();
8807
8899
  return `${this.url}/-/commits/${branchName}`;
8808
8900
  }
8901
+ async getCommitDiff(_commitSha) {
8902
+ throw new Error("getCommitDiff not implemented for GitLab");
8903
+ }
8809
8904
  };
8810
8905
 
8811
8906
  // src/features/analysis/scm/scmFactory.ts
@@ -8902,6 +8997,17 @@ var StubSCMLib = class extends SCMLib {
8902
8997
  async addCommentToSubmitRequest(_submitRequestId, _comment) {
8903
8998
  console.warn("addCommentToSubmitRequest() no-op");
8904
8999
  }
9000
+ async getCommitDiff(_commitSha) {
9001
+ console.warn("getCommitDiff() returning stub diff");
9002
+ return {
9003
+ diff: "",
9004
+ commitTimestamp: /* @__PURE__ */ new Date(),
9005
+ commitSha: _commitSha,
9006
+ authorName: void 0,
9007
+ authorEmail: void 0,
9008
+ message: void 0
9009
+ };
9010
+ }
8905
9011
  };
8906
9012
 
8907
9013
  // src/features/analysis/scm/scmFactory.ts
@@ -9454,7 +9560,8 @@ var mobbCliCommand = {
9454
9560
  analyze: "analyze",
9455
9561
  review: "review",
9456
9562
  convertToSarif: "convert-to-sarif",
9457
- mcp: "mcp"
9563
+ mcp: "mcp",
9564
+ uploadAiBlame: "upload-ai-blame"
9458
9565
  };
9459
9566
  var ScanContext = {
9460
9567
  FULL_SCAN: "FULL_SCAN",
@@ -9465,7 +9572,7 @@ var ScanContext = {
9465
9572
  };
9466
9573
 
9467
9574
  // src/args/yargs.ts
9468
- import chalk10 from "chalk";
9575
+ import chalk11 from "chalk";
9469
9576
  import yargs from "yargs/yargs";
9470
9577
 
9471
9578
  // src/args/commands/analyze.ts
@@ -9834,7 +9941,7 @@ async function postIssueComment(params) {
9834
9941
  fpDescription
9835
9942
  } = params;
9836
9943
  const {
9837
- path: path15,
9944
+ path: path17,
9838
9945
  startLine,
9839
9946
  vulnerabilityReportIssue: {
9840
9947
  vulnerabilityReportIssueTags,
@@ -9849,7 +9956,7 @@ async function postIssueComment(params) {
9849
9956
  Refresh the page in order to see the changes.`,
9850
9957
  pull_number: pullRequest,
9851
9958
  commit_id: commitSha,
9852
- path: path15,
9959
+ path: path17,
9853
9960
  line: startLine
9854
9961
  });
9855
9962
  const commentId = commentRes.data.id;
@@ -9883,7 +9990,7 @@ async function postFixComment(params) {
9883
9990
  scanner
9884
9991
  } = params;
9885
9992
  const {
9886
- path: path15,
9993
+ path: path17,
9887
9994
  startLine,
9888
9995
  vulnerabilityReportIssue: { fixId, vulnerabilityReportIssueTags, category },
9889
9996
  vulnerabilityReportIssueId
@@ -9901,7 +10008,7 @@ async function postFixComment(params) {
9901
10008
  Refresh the page in order to see the changes.`,
9902
10009
  pull_number: pullRequest,
9903
10010
  commit_id: commitSha,
9904
- path: path15,
10011
+ path: path17,
9905
10012
  line: startLine
9906
10013
  });
9907
10014
  const commentId = commentRes.data.id;
@@ -10198,10 +10305,10 @@ async function getGitInfo(srcDirPath) {
10198
10305
  repoUrl: void 0
10199
10306
  };
10200
10307
  }
10201
- const gitInfo = await gitService.getGitInfo();
10308
+ const gitInfo2 = await gitService.getGitInfo();
10202
10309
  return {
10203
10310
  success: true,
10204
- ...gitInfo
10311
+ ...gitInfo2
10205
10312
  };
10206
10313
  } catch (e) {
10207
10314
  if (e instanceof Error) {
@@ -10995,8 +11102,8 @@ import path8 from "path";
10995
11102
  var debug15 = Debug15("mobbdev:checkmarx");
10996
11103
  var require2 = createRequire(import.meta.url);
10997
11104
  var getCheckmarxPath = () => {
10998
- const os3 = type();
10999
- const cxFileName = os3 === "Windows_NT" ? "cx.exe" : "cx";
11105
+ const os5 = type();
11106
+ const cxFileName = os5 === "Windows_NT" ? "cx.exe" : "cx";
11000
11107
  try {
11001
11108
  return require2.resolve(`.bin/${cxFileName}`);
11002
11109
  } catch (e) {
@@ -11671,7 +11778,7 @@ async function _scan(params, { skipPrompts = false } = {}) {
11671
11778
  text: "\u{1F4C1} Uploading Report successful!"
11672
11779
  });
11673
11780
  }
11674
- let gitInfo = { success: false };
11781
+ let gitInfo2 = { success: false };
11675
11782
  if (reportPath) {
11676
11783
  const vulnFiles = await _digestReport({
11677
11784
  gqlClient,
@@ -11687,7 +11794,7 @@ async function _scan(params, { skipPrompts = false } = {}) {
11687
11794
  repoUploadInfo,
11688
11795
  isIncludeAllFiles: false
11689
11796
  });
11690
- gitInfo = res.gitInfo;
11797
+ gitInfo2 = res.gitInfo;
11691
11798
  } else {
11692
11799
  const res = await _zipAndUploadRepo({
11693
11800
  srcPath,
@@ -11695,7 +11802,7 @@ async function _scan(params, { skipPrompts = false } = {}) {
11695
11802
  repoUploadInfo,
11696
11803
  isIncludeAllFiles: true
11697
11804
  });
11698
- gitInfo = res.gitInfo;
11805
+ gitInfo2 = res.gitInfo;
11699
11806
  await _digestReport({
11700
11807
  gqlClient,
11701
11808
  fixReportId: reportUploadInfo.fixReportId,
@@ -11713,9 +11820,9 @@ async function _scan(params, { skipPrompts = false } = {}) {
11713
11820
  submitVulnerabilityReportVariables: {
11714
11821
  fixReportId: reportUploadInfo.fixReportId,
11715
11822
  projectId,
11716
- repoUrl: repo || gitInfo.repoUrl || getTopLevelDirName(srcPath),
11717
- reference: ref || gitInfo.reference || "no-branch",
11718
- sha: commitHash || gitInfo.hash || "0123456789abcdef",
11823
+ repoUrl: repo || gitInfo2.repoUrl || getTopLevelDirName(srcPath),
11824
+ reference: ref || gitInfo2.reference || "no-branch",
11825
+ sha: commitHash || gitInfo2.hash || "0123456789abcdef",
11719
11826
  scanSource: _getScanSource(command, ci),
11720
11827
  pullRequest: params.pullRequest,
11721
11828
  experimentalEnabled: !!experimentalEnabled,
@@ -11761,11 +11868,11 @@ async function _zipAndUploadRepo({
11761
11868
  const srcFileStatus = await fsPromises2.lstat(srcPath);
11762
11869
  const zippingSpinner = createSpinner4("\u{1F4E6} Zipping repo").start();
11763
11870
  let zipBuffer;
11764
- let gitInfo = { success: false };
11871
+ let gitInfo2 = { success: false };
11765
11872
  if (srcFileStatus.isFile() && path9.extname(srcPath).toLowerCase() === ".fpr") {
11766
11873
  zipBuffer = await repackFpr(srcPath);
11767
11874
  } else {
11768
- gitInfo = await getGitInfo(srcPath);
11875
+ gitInfo2 = await getGitInfo(srcPath);
11769
11876
  zipBuffer = await pack(srcPath, vulnFiles, isIncludeAllFiles);
11770
11877
  }
11771
11878
  zippingSpinner.success({ text: "\u{1F4E6} Zipping repo successful!" });
@@ -11782,7 +11889,7 @@ async function _zipAndUploadRepo({
11782
11889
  throw e;
11783
11890
  }
11784
11891
  uploadRepoSpinner.success({ text: "\u{1F4C1} Uploading Repo successful!" });
11785
- return { gitInfo };
11892
+ return { gitInfo: gitInfo2 };
11786
11893
  }
11787
11894
  async function _digestReport({
11788
11895
  gqlClient,
@@ -12283,6 +12390,29 @@ var logWarn = (message, data) => logger.log(message, "warn", data);
12283
12390
  var logDebug = (message, data) => logger.log(message, "debug", data);
12284
12391
  var log = logger.log.bind(logger);
12285
12392
 
12393
+ // src/mcp/services/ConfigStoreService.ts
12394
+ init_configs();
12395
+ import Configstore4 from "configstore";
12396
+ function createConfigStore(defaultValues = { apiToken: "" }) {
12397
+ const API_URL2 = process.env["API_URL"] || MCP_DEFAULT_API_URL;
12398
+ let domain = "";
12399
+ try {
12400
+ const url = new URL(API_URL2);
12401
+ domain = url.hostname;
12402
+ } catch (e) {
12403
+ domain = API_URL2.replace(/^https?:\/\//, "").replace(/\/.*$/, "").replace(/:\d+$/, "");
12404
+ }
12405
+ const sanitizedDomain = domain.replace(/\./g, "_");
12406
+ return new Configstore4(
12407
+ `${packageJson.name}-${sanitizedDomain}`,
12408
+ defaultValues
12409
+ );
12410
+ }
12411
+ function getConfigStore() {
12412
+ return createConfigStore();
12413
+ }
12414
+ var configStore = getConfigStore();
12415
+
12286
12416
  // src/mcp/services/McpGQLClient.ts
12287
12417
  import crypto3 from "crypto";
12288
12418
  import { GraphQLClient as GraphQLClient2 } from "graphql-request";
@@ -12415,29 +12545,6 @@ var GetLatestReportByRepoUrlResponseSchema = z31.object({
12415
12545
  expiredReport: z31.array(ExpiredReportSchema)
12416
12546
  });
12417
12547
 
12418
- // src/mcp/services/ConfigStoreService.ts
12419
- init_configs();
12420
- import Configstore4 from "configstore";
12421
- function createConfigStore(defaultValues = { apiToken: "" }) {
12422
- const API_URL2 = process.env["API_URL"] || MCP_DEFAULT_API_URL;
12423
- let domain = "";
12424
- try {
12425
- const url = new URL(API_URL2);
12426
- domain = url.hostname;
12427
- } catch (e) {
12428
- domain = API_URL2.replace(/^https?:\/\//, "").replace(/\/.*$/, "").replace(/:\d+$/, "");
12429
- }
12430
- const sanitizedDomain = domain.replace(/\./g, "_");
12431
- return new Configstore4(
12432
- `${packageJson.name}-${sanitizedDomain}`,
12433
- defaultValues
12434
- );
12435
- }
12436
- function getConfigStore() {
12437
- return createConfigStore();
12438
- }
12439
- var configStore = getConfigStore();
12440
-
12441
12548
  // src/mcp/services/McpAuthService.ts
12442
12549
  import crypto2 from "crypto";
12443
12550
  import os2 from "os";
@@ -12557,6 +12664,12 @@ var McpGQLClient = class {
12557
12664
  }
12558
12665
  };
12559
12666
  }
12667
+ async uploadAIBlameInferencesInitRaw(variables) {
12668
+ return await this.clientSdk.UploadAIBlameInferencesInit(variables);
12669
+ }
12670
+ async finalizeAIBlameInferencesUploadRaw(variables) {
12671
+ return await this.clientSdk.FinalizeAIBlameInferencesUpload(variables);
12672
+ }
12560
12673
  async isApiEndpointReachable() {
12561
12674
  try {
12562
12675
  logDebug("[GraphQL] Calling Me query for API connection verification");
@@ -13119,6 +13232,369 @@ async function createAuthenticatedMcpGQLClient({
13119
13232
  return new McpGQLClient({ apiKey: newApiToken, type: "apiKey" });
13120
13233
  }
13121
13234
 
13235
+ // src/mcp/services/McpUsageService/McpUsageService.ts
13236
+ init_configs();
13237
+ import fetch5 from "node-fetch";
13238
+ import os4 from "os";
13239
+ import { v4 as uuidv43, v5 as uuidv5 } from "uuid";
13240
+
13241
+ // src/mcp/services/McpUsageService/host.ts
13242
+ import { execSync } from "child_process";
13243
+ import fs10 from "fs";
13244
+ import os3 from "os";
13245
+ import path11 from "path";
13246
+ var IDEs = ["cursor", "windsurf", "webstorm", "vscode"];
13247
+ var runCommand = (cmd) => {
13248
+ try {
13249
+ return execSync(cmd, { encoding: "utf8" }).trim();
13250
+ } catch {
13251
+ return "";
13252
+ }
13253
+ };
13254
+ var gitInfo = {
13255
+ name: runCommand("git config user.name"),
13256
+ email: runCommand("git config user.email")
13257
+ };
13258
+ var getMCPConfigPath = (hostName) => {
13259
+ const home = os3.homedir();
13260
+ switch (hostName.toLowerCase()) {
13261
+ case "cursor":
13262
+ return path11.join(home, ".cursor", "mcp.json");
13263
+ case "windsurf":
13264
+ return path11.join(home, ".codeium", "windsurf", "mcp_config.json");
13265
+ case "webstorm":
13266
+ return "";
13267
+ case "visualstudiocode":
13268
+ case "vscode":
13269
+ return process.platform === "win32" ? path11.join(home, "AppData", "Roaming", "Code", "User", "mcp.json") : path11.join(
13270
+ home,
13271
+ "Library",
13272
+ "Application Support",
13273
+ "Code",
13274
+ "User",
13275
+ "mcp.json"
13276
+ );
13277
+ default:
13278
+ throw new Error(`Unknown hostName: ${hostName}`);
13279
+ }
13280
+ };
13281
+ var readMCPConfig = (hostName) => {
13282
+ const filePath = getMCPConfigPath(hostName);
13283
+ if (!fs10.existsSync(filePath)) return null;
13284
+ return JSON.parse(fs10.readFileSync(filePath, "utf8"));
13285
+ };
13286
+ var getRunningProcesses = () => {
13287
+ try {
13288
+ return os3.platform() === "win32" ? execSync("tasklist", { encoding: "utf8" }) : execSync("ps aux", { encoding: "utf8" });
13289
+ } catch {
13290
+ return "";
13291
+ }
13292
+ };
13293
+ var knownHosts = {
13294
+ webstorm: "WebStorm",
13295
+ cursor: "Cursor",
13296
+ windsurf: "Windsurf",
13297
+ code: "Vscode"
13298
+ };
13299
+ var versionCommands = {
13300
+ WebStorm: {
13301
+ darwin: [
13302
+ `grep -m1 '"version"' /Applications/WebStorm.app/Contents/Resources/product-info.json | cut -d'"' -f4`
13303
+ ],
13304
+ win32: []
13305
+ },
13306
+ Cursor: {
13307
+ darwin: [
13308
+ "grep -A1 CFBundleVersion /Applications/Cursor.app/Contents/Info.plist | grep '<string>' | sed -E 's/.*<string>(.*)<\\/string>.*/\\1/'",
13309
+ "cursor --version"
13310
+ ],
13311
+ win32: [
13312
+ '(Get-Item "$env:LOCALAPPDATA\\Programs\\cursor\\Cursor.exe").VersionInfo.ProductVersion'
13313
+ ]
13314
+ },
13315
+ Windsurf: {
13316
+ darwin: [
13317
+ "grep -A1 CFBundleVersion /Applications/Windsurf.app/Contents/Info.plist | grep '<string>' | sed -E 's/.*<string>(.*)<\\/string>.*/\\1/'",
13318
+ "windsurf --version"
13319
+ ],
13320
+ win32: [
13321
+ `(Get-Item "$env:LOCALAPPDATA\\Programs\\Windsurf\\Windsurf.exe").VersionInfo.ProductVersion`
13322
+ ]
13323
+ },
13324
+ Vscode: {
13325
+ darwin: [
13326
+ `grep -A1 CFBundleVersion "/Applications/Visual Studio Code.app/Contents/Info.plist" | grep '<string>' | sed -E 's/.*<string>(.*)<\\/string>.*/\\1/'`,
13327
+ process.env["TERM_PROGRAM_VERSION"] || "Unknown"
13328
+ ],
13329
+ win32: [
13330
+ `(Get-Item "$env:LOCALAPPDATA\\Programs\\Microsoft VS Code\\Code.exe").VersionInfo.ProductVersion`
13331
+ ]
13332
+ }
13333
+ };
13334
+ var getProcessInfo = (pid) => {
13335
+ const platform = os3.platform();
13336
+ try {
13337
+ if (platform === "linux" || platform === "darwin") {
13338
+ const output = execSync(`ps -o pid=,ppid=,comm= -p ${pid}`, {
13339
+ stdio: ["pipe", "pipe", "ignore"]
13340
+ }).toString().trim();
13341
+ if (!output) return null;
13342
+ const [pidStr, ppid, ...cmd] = output.trim().split(/\s+/);
13343
+ return { pid: pidStr ?? "", ppid: ppid ?? "", cmd: cmd.join(" ") };
13344
+ } else if (platform === "win32") {
13345
+ const output = execSync(
13346
+ `powershell -Command "Get-CimInstance Win32_Process -Filter 'ProcessId=${pid}' | Select-Object ProcessId,ParentProcessId,Name | Format-Table -HideTableHeaders"`,
13347
+ { stdio: ["pipe", "pipe", "ignore"] }
13348
+ ).toString().trim();
13349
+ if (!output) return null;
13350
+ const parts = output.split(/\s+/);
13351
+ const pidStr = parts[0];
13352
+ const ppid = parts[1];
13353
+ const cmd = parts.slice(2).join(" ");
13354
+ return { pid: pidStr ?? "", ppid: ppid ?? "", cmd };
13355
+ } else {
13356
+ logWarn(`[UsageService] Unsupported platform: ${platform}`);
13357
+ return null;
13358
+ }
13359
+ } catch {
13360
+ return null;
13361
+ }
13362
+ };
13363
+ var getHostInfo = () => {
13364
+ const runningProcesses = getRunningProcesses().toLowerCase();
13365
+ const results = [];
13366
+ const allConfigs = {};
13367
+ for (const ide of IDEs) {
13368
+ const cfg = readMCPConfig(ide);
13369
+ if (cfg) allConfigs[ide] = cfg;
13370
+ }
13371
+ const servers = [];
13372
+ for (const [ide, cfg] of Object.entries(allConfigs)) {
13373
+ for (const [name, server] of Object.entries(
13374
+ cfg.mcpServers || cfg.servers || {}
13375
+ )) {
13376
+ servers.push({
13377
+ ide,
13378
+ name,
13379
+ command: server.command || "",
13380
+ isRunning: false
13381
+ });
13382
+ }
13383
+ }
13384
+ const runningLines = runningProcesses.split("\n");
13385
+ for (const line of runningLines) {
13386
+ if (line.includes("mcp")) {
13387
+ const cmdLower = line.toLowerCase();
13388
+ const existingServer = servers.find(
13389
+ (s) => s.command && cmdLower.includes(s.command.toLowerCase())
13390
+ );
13391
+ if (existingServer) {
13392
+ existingServer.isRunning = true;
13393
+ } else {
13394
+ let ideName = "Unknown";
13395
+ const foundHostKey = Object.keys(knownHosts).find(
13396
+ (key) => cmdLower.includes(key)
13397
+ );
13398
+ if (foundHostKey) {
13399
+ ideName = knownHosts[foundHostKey] || "Unknown";
13400
+ } else {
13401
+ const pidMatch = line.trim().split(/\s+/)[1];
13402
+ const pid = parseInt(String(pidMatch), 10);
13403
+ if (!isNaN(pid)) {
13404
+ let currentPid = pid;
13405
+ while (currentPid && currentPid !== 0) {
13406
+ const proc = getProcessInfo(currentPid);
13407
+ if (!proc) break;
13408
+ const cmdProc = proc.cmd.toLowerCase();
13409
+ const found = Object.keys(knownHosts).find(
13410
+ (key) => cmdProc.includes(key)
13411
+ );
13412
+ if (found) {
13413
+ ideName = knownHosts[found] || "Unknown";
13414
+ break;
13415
+ }
13416
+ currentPid = parseInt(proc.ppid, 10);
13417
+ }
13418
+ }
13419
+ }
13420
+ servers.push({
13421
+ ide: ideName.toLowerCase(),
13422
+ name: "unknown",
13423
+ command: line.trim(),
13424
+ isRunning: true
13425
+ });
13426
+ }
13427
+ }
13428
+ }
13429
+ for (const { ide, name, command, isRunning } of servers) {
13430
+ const config4 = allConfigs[ide] || null;
13431
+ const ideName = ide.charAt(0).toUpperCase() + ide.slice(1) || "Unknown";
13432
+ let ideVersion = "Unknown";
13433
+ const platform = os3.platform();
13434
+ const cmds = versionCommands[ideName]?.[platform] ?? [];
13435
+ for (const cmd of cmds) {
13436
+ try {
13437
+ const versionOutput = cmd.includes("grep") || cmd.includes("--version") || cmd.includes("sed") ? execSync(cmd, { stdio: ["pipe", "pipe", "ignore"] }).toString().split("\n")[0] ?? "" : cmd;
13438
+ if (versionOutput && versionOutput !== "Unknown") {
13439
+ ideVersion = versionOutput;
13440
+ break;
13441
+ }
13442
+ } catch {
13443
+ continue;
13444
+ }
13445
+ }
13446
+ let mcpConfigObj = {};
13447
+ if (config4) {
13448
+ const allServers = config4.mcpServers || config4.servers || {};
13449
+ if (name in allServers && allServers[name]) {
13450
+ mcpConfigObj = allServers[name];
13451
+ }
13452
+ }
13453
+ results.push({
13454
+ mcpName: name || command,
13455
+ mcpConfiguration: JSON.stringify(mcpConfigObj),
13456
+ ideName: ideName || "Unknown",
13457
+ ideVersion: ideVersion || "Unknown",
13458
+ isRunning
13459
+ });
13460
+ }
13461
+ return { mcps: results, user: gitInfo };
13462
+ };
13463
+
13464
+ // src/mcp/services/McpUsageService/McpUsageService.ts
13465
+ var McpUsageService = class {
13466
+ constructor() {
13467
+ __publicField(this, "configKey", "mcpUsage");
13468
+ __publicField(this, "intervalId", null);
13469
+ __publicField(this, "REST_API_URL", MCP_DEFAULT_REST_API_URL);
13470
+ this.startPeriodicTracking();
13471
+ if (process.env["API_URL"]) {
13472
+ const url = new URL(process.env["API_URL"]);
13473
+ const domain = `${url.protocol}//${url.host}`;
13474
+ this.REST_API_URL = `${domain}/api/rest/mcp/track`;
13475
+ }
13476
+ }
13477
+ startPeriodicTracking() {
13478
+ logDebug(`[UsageService] Starting periodic tracking for mcps`, {});
13479
+ this.intervalId = setInterval(async () => {
13480
+ logDebug(`[UsageService] Triggering periodic usage service`, {
13481
+ MCP_PERIODIC_TRACK_INTERVAL
13482
+ });
13483
+ await mcpUsageService.trackServerStart();
13484
+ }, 1e4);
13485
+ }
13486
+ generateHostId() {
13487
+ const stored = configStore.get(this.configKey);
13488
+ if (stored?.mcpHostId) return stored.mcpHostId;
13489
+ const interfaces = os4.networkInterfaces();
13490
+ const macs = [];
13491
+ for (const iface of Object.values(interfaces)) {
13492
+ if (!iface) continue;
13493
+ for (const net of iface) {
13494
+ if (net.mac && net.mac !== "00:00:00:00:00:00") macs.push(net.mac);
13495
+ }
13496
+ }
13497
+ const macString = macs.length ? macs.sort().join(",") : `${os4.hostname()}-${uuidv43()}`;
13498
+ const hostId = uuidv5(macString, uuidv5.DNS);
13499
+ logDebug("[UsageService] Generated new host ID", { hostId });
13500
+ return hostId;
13501
+ }
13502
+ getOrganizationId() {
13503
+ const organizationId = configStore.get("GOV-ORG-ID") || "";
13504
+ if (organizationId) {
13505
+ logDebug("[UsageService] Using stored organization ID", {
13506
+ organizationId
13507
+ });
13508
+ return organizationId;
13509
+ }
13510
+ return "";
13511
+ }
13512
+ createUsageData(mcpHostId, organizationId, status) {
13513
+ const { user, mcps } = getHostInfo();
13514
+ return {
13515
+ mcpHostId,
13516
+ organizationId,
13517
+ mcpVersion: packageJson.version,
13518
+ mcpOsName: os4.platform(),
13519
+ mcps: JSON.stringify(mcps),
13520
+ status,
13521
+ userName: user.name,
13522
+ userEmail: user.email,
13523
+ date: String((/* @__PURE__ */ new Date()).toISOString().split("T")[0])
13524
+ // it's used to make sure we track the mcp usage daily
13525
+ };
13526
+ }
13527
+ async trackUsage(status) {
13528
+ try {
13529
+ const hostId = this.generateHostId();
13530
+ const organizationId = this.getOrganizationId();
13531
+ if (!organizationId) {
13532
+ logError(
13533
+ "[UsageService] Cannot track MCP usage - organization ID not available"
13534
+ );
13535
+ return;
13536
+ }
13537
+ const usageData = this.createUsageData(hostId, organizationId, status);
13538
+ const stored = configStore.get(this.configKey);
13539
+ const hasChanges = !stored || Object.keys(usageData).some(
13540
+ (key) => usageData[key] !== stored[key]
13541
+ );
13542
+ if (!hasChanges) {
13543
+ logDebug(
13544
+ `[UsageService] Skipping ${status} usage tracking - no changes`
13545
+ );
13546
+ return;
13547
+ }
13548
+ logDebug("[UsageService] Before", { usageData });
13549
+ try {
13550
+ const res = await fetch5(this.REST_API_URL, {
13551
+ method: "POST",
13552
+ headers: {
13553
+ Accept: "application/json"
13554
+ },
13555
+ body: JSON.stringify({
13556
+ organizationId,
13557
+ mcps: usageData.mcps,
13558
+ status,
13559
+ osName: usageData.mcpOsName,
13560
+ userFullName: usageData.userName,
13561
+ userEmail: usageData.userEmail
13562
+ })
13563
+ });
13564
+ const authResult = await res.json();
13565
+ logDebug("[UsageService] Success usage data", { authResult });
13566
+ } catch (err) {
13567
+ logDebug("[UsageService] Error usage data", { err });
13568
+ }
13569
+ logDebug("[UsageService] Saving usage data", { usageData });
13570
+ configStore.set(this.configKey, usageData);
13571
+ logInfo(
13572
+ `[UsageService] MCP server ${status === "ACTIVE" ? "start" : "stop"} tracked successfully`
13573
+ );
13574
+ } catch (error) {
13575
+ configStore.set(this.configKey, { status: "FAILED" });
13576
+ logError(
13577
+ `[UsageService] Failed to track MCP server ${status === "ACTIVE" ? "start" : "stop"}`,
13578
+ { error }
13579
+ );
13580
+ }
13581
+ }
13582
+ async trackServerStart() {
13583
+ await this.trackUsage("ACTIVE");
13584
+ }
13585
+ async trackServerStop() {
13586
+ await this.trackUsage("INACTIVE");
13587
+ }
13588
+ reset() {
13589
+ if (!this.intervalId) {
13590
+ return;
13591
+ }
13592
+ clearInterval(this.intervalId);
13593
+ this.intervalId = null;
13594
+ }
13595
+ };
13596
+ var mcpUsageService = new McpUsageService();
13597
+
13122
13598
  // src/mcp/tools/toolNames.ts
13123
13599
  var MCP_TOOL_CHECK_FOR_NEW_AVAILABLE_FIXES = "check_for_new_available_fixes";
13124
13600
  var MCP_TOOL_FETCH_AVAILABLE_FIXES = "fetch_available_fixes";
@@ -13185,7 +13661,23 @@ var McpServer = class {
13185
13661
  logInfo("MCP server instance created");
13186
13662
  logDebug("MCP server instance config", { config: config4 });
13187
13663
  }
13188
- handleProcessSignal({
13664
+ async trackServerUsage(action, signalOrError) {
13665
+ try {
13666
+ if (action === "start") {
13667
+ await mcpUsageService.trackServerStart();
13668
+ }
13669
+ if (action === "stop") {
13670
+ await mcpUsageService.trackServerStop();
13671
+ mcpUsageService.reset();
13672
+ }
13673
+ } catch (usageError) {
13674
+ logWarn(`Failed to track MCP server ${action}`, {
13675
+ error: usageError,
13676
+ signalOrError
13677
+ });
13678
+ }
13679
+ }
13680
+ async handleProcessSignal({
13189
13681
  signal,
13190
13682
  error
13191
13683
  }) {
@@ -13223,9 +13715,11 @@ var McpServer = class {
13223
13715
  logDebug(message, { signal });
13224
13716
  }
13225
13717
  if (signal === "SIGINT" || signal === "SIGTERM") {
13718
+ await this.trackServerUsage("stop", signal);
13226
13719
  process.exit(0);
13227
13720
  }
13228
13721
  if (signal === "uncaughtException") {
13722
+ await this.trackServerUsage("stop", signal);
13229
13723
  process.exit(1);
13230
13724
  }
13231
13725
  }
@@ -13243,8 +13737,8 @@ var McpServer = class {
13243
13737
  "warning"
13244
13738
  ];
13245
13739
  signals.forEach((signal) => {
13246
- const handler = (error) => {
13247
- this.handleProcessSignal({ signal, error });
13740
+ const handler = async (error) => {
13741
+ await this.handleProcessSignal({ signal, error });
13248
13742
  };
13249
13743
  this.eventHandlers.set(signal, handler);
13250
13744
  process.on(signal, handler);
@@ -13303,6 +13797,12 @@ var McpServer = class {
13303
13797
  }
13304
13798
  }
13305
13799
  async handleListToolsRequest(request) {
13800
+ const govOrgId = configStore.get("GOV-ORG-ID") || "";
13801
+ if (govOrgId) {
13802
+ return {
13803
+ tools: []
13804
+ };
13805
+ }
13306
13806
  logInfo("Received list_tools request");
13307
13807
  logDebug("list_tools request", {
13308
13808
  request: JSON.parse(JSON.stringify(request))
@@ -13390,6 +13890,7 @@ var McpServer = class {
13390
13890
  const transport = new StdioServerTransport();
13391
13891
  await this.server.connect(transport);
13392
13892
  logDebug("MCP server is running on stdin/stdout");
13893
+ await this.trackServerUsage("start");
13393
13894
  process.stdin.resume();
13394
13895
  await this.createShutdownPromise();
13395
13896
  await this.stop();
@@ -13400,6 +13901,7 @@ var McpServer = class {
13400
13901
  }
13401
13902
  async stop() {
13402
13903
  logDebug("MCP server shutting down");
13904
+ await this.trackServerUsage("stop");
13403
13905
  this.eventHandlers.forEach((handler, signal) => {
13404
13906
  process.removeListener(signal, handler);
13405
13907
  });
@@ -13413,8 +13915,8 @@ var McpServer = class {
13413
13915
  import { z as z34 } from "zod";
13414
13916
 
13415
13917
  // src/mcp/services/PathValidation.ts
13416
- import fs10 from "fs";
13417
- import path11 from "path";
13918
+ import fs11 from "fs";
13919
+ import path12 from "path";
13418
13920
  async function validatePath(inputPath) {
13419
13921
  logDebug("Validating MCP path", { inputPath });
13420
13922
  if (/^\/[a-zA-Z]:\//.test(inputPath)) {
@@ -13441,7 +13943,7 @@ async function validatePath(inputPath) {
13441
13943
  logError(error);
13442
13944
  return { isValid: false, error, path: inputPath };
13443
13945
  }
13444
- const normalizedPath = path11.normalize(inputPath);
13946
+ const normalizedPath = path12.normalize(inputPath);
13445
13947
  if (normalizedPath.includes("..")) {
13446
13948
  const error = `Normalized path contains path traversal patterns: ${inputPath}`;
13447
13949
  logError(error);
@@ -13468,7 +13970,7 @@ async function validatePath(inputPath) {
13468
13970
  logDebug("Path validation successful", { inputPath });
13469
13971
  logDebug("Checking path existence", { inputPath });
13470
13972
  try {
13471
- await fs10.promises.access(inputPath);
13973
+ await fs11.promises.access(inputPath);
13472
13974
  logDebug("Path exists and is accessible", { inputPath });
13473
13975
  return { isValid: true, path: inputPath };
13474
13976
  } catch (error) {
@@ -14030,26 +14532,26 @@ ${whatHappensNextSection}`;
14030
14532
  init_FileUtils();
14031
14533
  init_GitService();
14032
14534
  init_configs();
14033
- import fs11 from "fs/promises";
14535
+ import fs12 from "fs/promises";
14034
14536
  import nodePath from "path";
14035
14537
  var getLocalFiles = async ({
14036
- path: path15,
14538
+ path: path17,
14037
14539
  maxFileSize = MCP_MAX_FILE_SIZE,
14038
14540
  maxFiles,
14039
14541
  isAllFilesScan,
14040
14542
  scanContext
14041
14543
  }) => {
14042
14544
  logDebug(`[${scanContext}] Starting getLocalFiles`, {
14043
- path: path15,
14545
+ path: path17,
14044
14546
  maxFileSize,
14045
14547
  maxFiles,
14046
14548
  isAllFilesScan
14047
14549
  });
14048
14550
  try {
14049
- const resolvedRepoPath = await fs11.realpath(path15);
14551
+ const resolvedRepoPath = await fs12.realpath(path17);
14050
14552
  logDebug(`[${scanContext}] Resolved repository path`, {
14051
14553
  resolvedRepoPath,
14052
- originalPath: path15
14554
+ originalPath: path17
14053
14555
  });
14054
14556
  const gitService = new GitService(resolvedRepoPath, log);
14055
14557
  const gitValidation = await gitService.validateRepository();
@@ -14062,7 +14564,7 @@ var getLocalFiles = async ({
14062
14564
  if (!gitValidation.isValid || isAllFilesScan) {
14063
14565
  try {
14064
14566
  files = await FileUtils.getLastChangedFiles({
14065
- dir: path15,
14567
+ dir: path17,
14066
14568
  maxFileSize,
14067
14569
  maxFiles,
14068
14570
  isAllFilesScan
@@ -14126,7 +14628,7 @@ var getLocalFiles = async ({
14126
14628
  absoluteFilePath
14127
14629
  );
14128
14630
  try {
14129
- const fileStat = await fs11.stat(absoluteFilePath);
14631
+ const fileStat = await fs12.stat(absoluteFilePath);
14130
14632
  return {
14131
14633
  filename: nodePath.basename(absoluteFilePath),
14132
14634
  relativePath,
@@ -14154,15 +14656,15 @@ var getLocalFiles = async ({
14154
14656
  logError(`${scanContext}Unexpected error in getLocalFiles`, {
14155
14657
  error: error instanceof Error ? error.message : String(error),
14156
14658
  stack: error instanceof Error ? error.stack : void 0,
14157
- path: path15
14659
+ path: path17
14158
14660
  });
14159
14661
  throw error;
14160
14662
  }
14161
14663
  };
14162
14664
 
14163
14665
  // src/mcp/services/LocalMobbFolderService.ts
14164
- import fs12 from "fs";
14165
- import path12 from "path";
14666
+ import fs13 from "fs";
14667
+ import path13 from "path";
14166
14668
  import { z as z33 } from "zod";
14167
14669
  init_GitService();
14168
14670
  function extractPathFromPatch(patch) {
@@ -14249,19 +14751,19 @@ var LocalMobbFolderService = class {
14249
14751
  "[LocalMobbFolderService] Non-git repository detected, skipping .gitignore operations"
14250
14752
  );
14251
14753
  }
14252
- const mobbFolderPath = path12.join(
14754
+ const mobbFolderPath = path13.join(
14253
14755
  this.repoPath,
14254
14756
  this.defaultMobbFolderName
14255
14757
  );
14256
- if (!fs12.existsSync(mobbFolderPath)) {
14758
+ if (!fs13.existsSync(mobbFolderPath)) {
14257
14759
  logInfo("[LocalMobbFolderService] Creating .mobb folder", {
14258
14760
  mobbFolderPath
14259
14761
  });
14260
- fs12.mkdirSync(mobbFolderPath, { recursive: true });
14762
+ fs13.mkdirSync(mobbFolderPath, { recursive: true });
14261
14763
  } else {
14262
14764
  logDebug("[LocalMobbFolderService] .mobb folder already exists");
14263
14765
  }
14264
- const stats = fs12.statSync(mobbFolderPath);
14766
+ const stats = fs13.statSync(mobbFolderPath);
14265
14767
  if (!stats.isDirectory()) {
14266
14768
  throw new Error(`Path exists but is not a directory: ${mobbFolderPath}`);
14267
14769
  }
@@ -14302,13 +14804,13 @@ var LocalMobbFolderService = class {
14302
14804
  logDebug("[LocalMobbFolderService] Git repository validated successfully");
14303
14805
  } else {
14304
14806
  try {
14305
- const stats = fs12.statSync(this.repoPath);
14807
+ const stats = fs13.statSync(this.repoPath);
14306
14808
  if (!stats.isDirectory()) {
14307
14809
  throw new Error(
14308
14810
  `Path exists but is not a directory: ${this.repoPath}`
14309
14811
  );
14310
14812
  }
14311
- fs12.accessSync(this.repoPath, fs12.constants.R_OK | fs12.constants.W_OK);
14813
+ fs13.accessSync(this.repoPath, fs13.constants.R_OK | fs13.constants.W_OK);
14312
14814
  logDebug(
14313
14815
  "[LocalMobbFolderService] Non-git directory validated successfully"
14314
14816
  );
@@ -14421,8 +14923,8 @@ var LocalMobbFolderService = class {
14421
14923
  mobbFolderPath,
14422
14924
  baseFileName
14423
14925
  );
14424
- const filePath = path12.join(mobbFolderPath, uniqueFileName);
14425
- await fs12.promises.writeFile(filePath, patch, "utf8");
14926
+ const filePath = path13.join(mobbFolderPath, uniqueFileName);
14927
+ await fs13.promises.writeFile(filePath, patch, "utf8");
14426
14928
  logInfo("[LocalMobbFolderService] Patch saved successfully", {
14427
14929
  filePath,
14428
14930
  fileName: uniqueFileName,
@@ -14479,11 +14981,11 @@ var LocalMobbFolderService = class {
14479
14981
  * @returns Unique filename that doesn't conflict with existing files
14480
14982
  */
14481
14983
  getUniqueFileName(folderPath, baseFileName) {
14482
- const baseName = path12.parse(baseFileName).name;
14483
- const extension = path12.parse(baseFileName).ext;
14984
+ const baseName = path13.parse(baseFileName).name;
14985
+ const extension = path13.parse(baseFileName).ext;
14484
14986
  let uniqueFileName = baseFileName;
14485
14987
  let index = 1;
14486
- while (fs12.existsSync(path12.join(folderPath, uniqueFileName))) {
14988
+ while (fs13.existsSync(path13.join(folderPath, uniqueFileName))) {
14487
14989
  uniqueFileName = `${baseName}-${index}${extension}`;
14488
14990
  index++;
14489
14991
  if (index > 1e3) {
@@ -14514,18 +15016,18 @@ var LocalMobbFolderService = class {
14514
15016
  logDebug("[LocalMobbFolderService] Logging patch info", { fixId: fix.id });
14515
15017
  try {
14516
15018
  const mobbFolderPath = await this.getFolder();
14517
- const patchInfoPath = path12.join(mobbFolderPath, "patchInfo.md");
15019
+ const patchInfoPath = path13.join(mobbFolderPath, "patchInfo.md");
14518
15020
  const markdownContent = this.generateFixMarkdown(fix, savedPatchFileName);
14519
15021
  let existingContent = "";
14520
- if (fs12.existsSync(patchInfoPath)) {
14521
- existingContent = await fs12.promises.readFile(patchInfoPath, "utf8");
15022
+ if (fs13.existsSync(patchInfoPath)) {
15023
+ existingContent = await fs13.promises.readFile(patchInfoPath, "utf8");
14522
15024
  logDebug("[LocalMobbFolderService] Existing patchInfo.md found");
14523
15025
  } else {
14524
15026
  logDebug("[LocalMobbFolderService] Creating new patchInfo.md file");
14525
15027
  }
14526
15028
  const separator = existingContent ? "\n\n================================================================================\n\n" : "";
14527
15029
  const updatedContent = `${markdownContent}${separator}${existingContent}`;
14528
- await fs12.promises.writeFile(patchInfoPath, updatedContent, "utf8");
15030
+ await fs13.promises.writeFile(patchInfoPath, updatedContent, "utf8");
14529
15031
  logInfo("[LocalMobbFolderService] Patch info logged successfully", {
14530
15032
  patchInfoPath,
14531
15033
  fixId: fix.id,
@@ -14556,7 +15058,7 @@ var LocalMobbFolderService = class {
14556
15058
  const timestamp = (/* @__PURE__ */ new Date()).toISOString();
14557
15059
  const patch = this.extractPatchFromFix(fix);
14558
15060
  const relativePatchedFilePath = patch ? extractPathFromPatch(patch) : null;
14559
- const patchedFilePath = relativePatchedFilePath ? path12.resolve(this.repoPath, relativePatchedFilePath) : null;
15061
+ const patchedFilePath = relativePatchedFilePath ? path13.resolve(this.repoPath, relativePatchedFilePath) : null;
14560
15062
  const fixIdentifier = savedPatchFileName ? savedPatchFileName.replace(".patch", "") : fix.id;
14561
15063
  let markdown = `# Fix ${fixIdentifier}
14562
15064
 
@@ -14898,16 +15400,16 @@ import {
14898
15400
  unlinkSync,
14899
15401
  writeFileSync
14900
15402
  } from "fs";
14901
- import fs13 from "fs/promises";
15403
+ import fs14 from "fs/promises";
14902
15404
  import parseDiff2 from "parse-diff";
14903
- import path13 from "path";
15405
+ import path14 from "path";
14904
15406
  var PatchApplicationService = class {
14905
15407
  /**
14906
15408
  * Gets the appropriate comment syntax for a file based on its extension
14907
15409
  */
14908
15410
  static getCommentSyntax(filePath) {
14909
- const ext = path13.extname(filePath).toLowerCase();
14910
- const basename2 = path13.basename(filePath);
15411
+ const ext = path14.extname(filePath).toLowerCase();
15412
+ const basename2 = path14.basename(filePath);
14911
15413
  const commentMap = {
14912
15414
  // C-style languages (single line comments)
14913
15415
  ".js": "//",
@@ -15117,7 +15619,7 @@ var PatchApplicationService = class {
15117
15619
  }
15118
15620
  );
15119
15621
  }
15120
- const dirPath = path13.dirname(filePath);
15622
+ const dirPath = path14.dirname(filePath);
15121
15623
  mkdirSync(dirPath, { recursive: true });
15122
15624
  writeFileSync(filePath, finalContent, "utf8");
15123
15625
  return filePath;
@@ -15401,9 +15903,9 @@ var PatchApplicationService = class {
15401
15903
  continue;
15402
15904
  }
15403
15905
  try {
15404
- const absolutePath = path13.resolve(repositoryPath, targetFile);
15906
+ const absolutePath = path14.resolve(repositoryPath, targetFile);
15405
15907
  if (existsSync2(absolutePath)) {
15406
- const stats = await fs13.stat(absolutePath);
15908
+ const stats = await fs14.stat(absolutePath);
15407
15909
  const fileModTime = stats.mtime.getTime();
15408
15910
  if (fileModTime > scanStartTime) {
15409
15911
  logError(
@@ -15444,7 +15946,7 @@ var PatchApplicationService = class {
15444
15946
  const appliedFixes = [];
15445
15947
  const failedFixes = [];
15446
15948
  const skippedFixes = [];
15447
- const resolvedRepoPath = await fs13.realpath(repositoryPath);
15949
+ const resolvedRepoPath = await fs14.realpath(repositoryPath);
15448
15950
  logInfo(
15449
15951
  `[${scanContext}] Starting patch application for ${fixes.length} fixes`,
15450
15952
  {
@@ -15592,11 +16094,11 @@ var PatchApplicationService = class {
15592
16094
  }) {
15593
16095
  const sanitizedRepoPath = String(repositoryPath || "").replace("\0", "").replace(/^(\.\.(\/|\\))+/, "");
15594
16096
  const sanitizedTargetFile = String(targetFile || "").replace("\0", "").replace(/^(\.\.(\/|\\))+/, "");
15595
- const absoluteFilePath = path13.resolve(
16097
+ const absoluteFilePath = path14.resolve(
15596
16098
  sanitizedRepoPath,
15597
16099
  sanitizedTargetFile
15598
16100
  );
15599
- const relativePath = path13.relative(sanitizedRepoPath, absoluteFilePath);
16101
+ const relativePath = path14.relative(sanitizedRepoPath, absoluteFilePath);
15600
16102
  if (relativePath.startsWith("..")) {
15601
16103
  throw new Error(
15602
16104
  `Security violation: target file ${targetFile} resolves outside repository`
@@ -15630,7 +16132,7 @@ var PatchApplicationService = class {
15630
16132
  fix,
15631
16133
  scanContext
15632
16134
  });
15633
- appliedFiles.push(path13.relative(repositoryPath, actualPath));
16135
+ appliedFiles.push(path14.relative(repositoryPath, actualPath));
15634
16136
  logDebug(`[${scanContext}] Created new file: ${relativePath}`);
15635
16137
  }
15636
16138
  /**
@@ -15678,7 +16180,7 @@ var PatchApplicationService = class {
15678
16180
  fix,
15679
16181
  scanContext
15680
16182
  });
15681
- appliedFiles.push(path13.relative(repositoryPath, actualPath));
16183
+ appliedFiles.push(path14.relative(repositoryPath, actualPath));
15682
16184
  logDebug(`[${scanContext}] Modified file: ${relativePath}`);
15683
16185
  }
15684
16186
  }
@@ -15873,8 +16375,8 @@ init_configs();
15873
16375
 
15874
16376
  // src/mcp/services/FileOperations.ts
15875
16377
  init_FileUtils();
15876
- import fs14 from "fs";
15877
- import path14 from "path";
16378
+ import fs15 from "fs";
16379
+ import path15 from "path";
15878
16380
  import AdmZip2 from "adm-zip";
15879
16381
  var FileOperations = class {
15880
16382
  /**
@@ -15894,10 +16396,10 @@ var FileOperations = class {
15894
16396
  let packedFilesCount = 0;
15895
16397
  const packedFiles = [];
15896
16398
  const excludedFiles = [];
15897
- const resolvedRepoPath = path14.resolve(repositoryPath);
16399
+ const resolvedRepoPath = path15.resolve(repositoryPath);
15898
16400
  for (const filepath of fileList) {
15899
- const absoluteFilepath = path14.join(repositoryPath, filepath);
15900
- const resolvedFilePath = path14.resolve(absoluteFilepath);
16401
+ const absoluteFilepath = path15.join(repositoryPath, filepath);
16402
+ const resolvedFilePath = path15.resolve(absoluteFilepath);
15901
16403
  if (!resolvedFilePath.startsWith(resolvedRepoPath)) {
15902
16404
  const reason = "potential path traversal security risk";
15903
16405
  logDebug(`[FileOperations] Skipping ${filepath} due to ${reason}`);
@@ -15944,11 +16446,11 @@ var FileOperations = class {
15944
16446
  fileList,
15945
16447
  repositoryPath
15946
16448
  }) {
15947
- const resolvedRepoPath = path14.resolve(repositoryPath);
16449
+ const resolvedRepoPath = path15.resolve(repositoryPath);
15948
16450
  const validatedPaths = [];
15949
16451
  for (const filepath of fileList) {
15950
- const absoluteFilepath = path14.join(repositoryPath, filepath);
15951
- const resolvedFilePath = path14.resolve(absoluteFilepath);
16452
+ const absoluteFilepath = path15.join(repositoryPath, filepath);
16453
+ const resolvedFilePath = path15.resolve(absoluteFilepath);
15952
16454
  if (!resolvedFilePath.startsWith(resolvedRepoPath)) {
15953
16455
  logDebug(
15954
16456
  `[FileOperations] Rejecting ${filepath} - path traversal attempt detected`
@@ -15956,7 +16458,7 @@ var FileOperations = class {
15956
16458
  continue;
15957
16459
  }
15958
16460
  try {
15959
- await fs14.promises.access(absoluteFilepath, fs14.constants.R_OK);
16461
+ await fs15.promises.access(absoluteFilepath, fs15.constants.R_OK);
15960
16462
  validatedPaths.push(filepath);
15961
16463
  } catch (error) {
15962
16464
  logDebug(
@@ -15975,8 +16477,8 @@ var FileOperations = class {
15975
16477
  const fileDataArray = [];
15976
16478
  for (const absolutePath of filePaths) {
15977
16479
  try {
15978
- const content = await fs14.promises.readFile(absolutePath);
15979
- const relativePath = path14.basename(absolutePath);
16480
+ const content = await fs15.promises.readFile(absolutePath);
16481
+ const relativePath = path15.basename(absolutePath);
15980
16482
  fileDataArray.push({
15981
16483
  relativePath,
15982
16484
  absolutePath,
@@ -16001,7 +16503,7 @@ var FileOperations = class {
16001
16503
  relativeFilepath
16002
16504
  }) {
16003
16505
  try {
16004
- return await fs14.promises.readFile(absoluteFilepath);
16506
+ return await fs15.promises.readFile(absoluteFilepath);
16005
16507
  } catch (fsError) {
16006
16508
  logError(
16007
16509
  `[FileOperations] Failed to read ${relativeFilepath} from filesystem: ${fsError}`
@@ -16288,14 +16790,14 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
16288
16790
  * since the last scan.
16289
16791
  */
16290
16792
  async scanForSecurityVulnerabilities({
16291
- path: path15,
16793
+ path: path17,
16292
16794
  isAllDetectionRulesScan,
16293
16795
  isAllFilesScan,
16294
16796
  scanContext
16295
16797
  }) {
16296
16798
  this.hasAuthenticationFailed = false;
16297
16799
  logDebug(`[${scanContext}] Scanning for new security vulnerabilities`, {
16298
- path: path15
16800
+ path: path17
16299
16801
  });
16300
16802
  if (!this.gqlClient) {
16301
16803
  logInfo(`[${scanContext}] No GQL client found, skipping scan`);
@@ -16311,10 +16813,10 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
16311
16813
  }
16312
16814
  logDebug(
16313
16815
  `[${scanContext}] Connected to the API, assembling list of files to scan`,
16314
- { path: path15 }
16816
+ { path: path17 }
16315
16817
  );
16316
16818
  const files = await getLocalFiles({
16317
- path: path15,
16819
+ path: path17,
16318
16820
  isAllFilesScan,
16319
16821
  scanContext
16320
16822
  });
@@ -16339,13 +16841,13 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
16339
16841
  });
16340
16842
  const { fixReportId, projectId } = await scanFiles({
16341
16843
  fileList: filesToScan.map((file) => file.relativePath),
16342
- repositoryPath: path15,
16844
+ repositoryPath: path17,
16343
16845
  gqlClient: this.gqlClient,
16344
16846
  isAllDetectionRulesScan,
16345
16847
  scanContext
16346
16848
  });
16347
16849
  logInfo(
16348
- `[${scanContext}] Security scan completed for ${path15} reportId: ${fixReportId} projectId: ${projectId}`
16850
+ `[${scanContext}] Security scan completed for ${path17} reportId: ${fixReportId} projectId: ${projectId}`
16349
16851
  );
16350
16852
  if (isAllFilesScan) {
16351
16853
  return;
@@ -16639,13 +17141,13 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
16639
17141
  });
16640
17142
  return scannedFiles.some((file) => file.relativePath === fixFile);
16641
17143
  }
16642
- async getFreshFixes({ path: path15 }) {
17144
+ async getFreshFixes({ path: path17 }) {
16643
17145
  const scanContext = ScanContext.USER_REQUEST;
16644
- logDebug(`[${scanContext}] Getting fresh fixes`, { path: path15 });
16645
- if (this.path !== path15) {
16646
- this.path = path15;
17146
+ logDebug(`[${scanContext}] Getting fresh fixes`, { path: path17 });
17147
+ if (this.path !== path17) {
17148
+ this.path = path17;
16647
17149
  this.reset();
16648
- logInfo(`[${scanContext}] Reset service state for new path`, { path: path15 });
17150
+ logInfo(`[${scanContext}] Reset service state for new path`, { path: path17 });
16649
17151
  }
16650
17152
  try {
16651
17153
  this.gqlClient = await createAuthenticatedMcpGQLClient();
@@ -16663,7 +17165,7 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
16663
17165
  }
16664
17166
  throw error;
16665
17167
  }
16666
- this.triggerScan({ path: path15, gqlClient: this.gqlClient });
17168
+ this.triggerScan({ path: path17, gqlClient: this.gqlClient });
16667
17169
  let isMvsAutoFixEnabled = null;
16668
17170
  try {
16669
17171
  isMvsAutoFixEnabled = await this.gqlClient.getMvsAutoFixSettings();
@@ -16697,33 +17199,33 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
16697
17199
  return noFreshFixesPrompt;
16698
17200
  }
16699
17201
  triggerScan({
16700
- path: path15,
17202
+ path: path17,
16701
17203
  gqlClient
16702
17204
  }) {
16703
- if (this.path !== path15) {
16704
- this.path = path15;
17205
+ if (this.path !== path17) {
17206
+ this.path = path17;
16705
17207
  this.reset();
16706
- logInfo(`Reset service state for new path in triggerScan`, { path: path15 });
17208
+ logInfo(`Reset service state for new path in triggerScan`, { path: path17 });
16707
17209
  }
16708
17210
  this.gqlClient = gqlClient;
16709
17211
  if (!this.intervalId) {
16710
- this.startPeriodicScanning(path15);
16711
- this.executeInitialScan(path15);
16712
- void this.executeInitialFullScan(path15);
17212
+ this.startPeriodicScanning(path17);
17213
+ this.executeInitialScan(path17);
17214
+ void this.executeInitialFullScan(path17);
16713
17215
  }
16714
17216
  }
16715
- startPeriodicScanning(path15) {
17217
+ startPeriodicScanning(path17) {
16716
17218
  const scanContext = ScanContext.BACKGROUND_PERIODIC;
16717
17219
  logDebug(
16718
17220
  `[${scanContext}] Starting periodic scan for new security vulnerabilities`,
16719
17221
  {
16720
- path: path15
17222
+ path: path17
16721
17223
  }
16722
17224
  );
16723
17225
  this.intervalId = setInterval(() => {
16724
- logDebug(`[${scanContext}] Triggering periodic security scan`, { path: path15 });
17226
+ logDebug(`[${scanContext}] Triggering periodic security scan`, { path: path17 });
16725
17227
  this.scanForSecurityVulnerabilities({
16726
- path: path15,
17228
+ path: path17,
16727
17229
  scanContext
16728
17230
  }).catch((error) => {
16729
17231
  logError(`[${scanContext}] Error during periodic security scan`, {
@@ -16732,45 +17234,45 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
16732
17234
  });
16733
17235
  }, MCP_PERIODIC_CHECK_INTERVAL);
16734
17236
  }
16735
- async executeInitialFullScan(path15) {
17237
+ async executeInitialFullScan(path17) {
16736
17238
  const scanContext = ScanContext.FULL_SCAN;
16737
- logDebug(`[${scanContext}] Triggering initial full security scan`, { path: path15 });
17239
+ logDebug(`[${scanContext}] Triggering initial full security scan`, { path: path17 });
16738
17240
  logDebug(`[${scanContext}] Full scan paths scanned`, {
16739
17241
  fullScanPathsScanned: this.fullScanPathsScanned
16740
17242
  });
16741
- if (this.fullScanPathsScanned.includes(path15)) {
17243
+ if (this.fullScanPathsScanned.includes(path17)) {
16742
17244
  logDebug(`[${scanContext}] Full scan already executed for this path`, {
16743
- path: path15
17245
+ path: path17
16744
17246
  });
16745
17247
  return;
16746
17248
  }
16747
17249
  configStore.set("fullScanPathsScanned", [
16748
17250
  ...this.fullScanPathsScanned,
16749
- path15
17251
+ path17
16750
17252
  ]);
16751
17253
  try {
16752
17254
  await this.scanForSecurityVulnerabilities({
16753
- path: path15,
17255
+ path: path17,
16754
17256
  isAllFilesScan: true,
16755
17257
  isAllDetectionRulesScan: true,
16756
17258
  scanContext: ScanContext.FULL_SCAN
16757
17259
  });
16758
- if (!this.fullScanPathsScanned.includes(path15)) {
16759
- this.fullScanPathsScanned.push(path15);
17260
+ if (!this.fullScanPathsScanned.includes(path17)) {
17261
+ this.fullScanPathsScanned.push(path17);
16760
17262
  configStore.set("fullScanPathsScanned", this.fullScanPathsScanned);
16761
17263
  }
16762
- logInfo(`[${scanContext}] Full scan completed`, { path: path15 });
17264
+ logInfo(`[${scanContext}] Full scan completed`, { path: path17 });
16763
17265
  } catch (error) {
16764
17266
  logError(`[${scanContext}] Error during initial full security scan`, {
16765
17267
  error
16766
17268
  });
16767
17269
  }
16768
17270
  }
16769
- executeInitialScan(path15) {
17271
+ executeInitialScan(path17) {
16770
17272
  const scanContext = ScanContext.BACKGROUND_INITIAL;
16771
- logDebug(`[${scanContext}] Triggering initial security scan`, { path: path15 });
17273
+ logDebug(`[${scanContext}] Triggering initial security scan`, { path: path17 });
16772
17274
  this.scanForSecurityVulnerabilities({
16773
- path: path15,
17275
+ path: path17,
16774
17276
  scanContext: ScanContext.BACKGROUND_INITIAL
16775
17277
  }).catch((error) => {
16776
17278
  logError(`[${scanContext}] Error during initial security scan`, { error });
@@ -16866,9 +17368,9 @@ Example payload:
16866
17368
  `Invalid path: potential security risk detected in path: ${pathValidationResult.error}`
16867
17369
  );
16868
17370
  }
16869
- const path15 = pathValidationResult.path;
17371
+ const path17 = pathValidationResult.path;
16870
17372
  const resultText = await this.newFixesService.getFreshFixes({
16871
- path: path15
17373
+ path: path17
16872
17374
  });
16873
17375
  logInfo("CheckForNewAvailableFixesTool execution completed", {
16874
17376
  resultText
@@ -17016,8 +17518,8 @@ Call this tool instead of ${MCP_TOOL_SCAN_AND_FIX_VULNERABILITIES} when you only
17016
17518
  `Invalid path: potential security risk detected in path: ${pathValidationResult.error}`
17017
17519
  );
17018
17520
  }
17019
- const path15 = pathValidationResult.path;
17020
- const gitService = new GitService(path15, log);
17521
+ const path17 = pathValidationResult.path;
17522
+ const gitService = new GitService(path17, log);
17021
17523
  const gitValidation = await gitService.validateRepository();
17022
17524
  if (!gitValidation.isValid) {
17023
17525
  throw new Error(`Invalid git repository: ${gitValidation.error}`);
@@ -17283,9 +17785,9 @@ Example payload:
17283
17785
  `Invalid path: potential security risk detected in path: ${pathValidationResult.error}`
17284
17786
  );
17285
17787
  }
17286
- const path15 = pathValidationResult.path;
17788
+ const path17 = pathValidationResult.path;
17287
17789
  const files = await getLocalFiles({
17288
- path: path15,
17790
+ path: path17,
17289
17791
  maxFileSize: MCP_MAX_FILE_SIZE,
17290
17792
  maxFiles: args.maxFiles,
17291
17793
  scanContext: ScanContext.USER_REQUEST
@@ -17304,7 +17806,7 @@ Example payload:
17304
17806
  try {
17305
17807
  const fixResult = await this.vulnerabilityFixService.processVulnerabilities({
17306
17808
  fileList: files.map((file) => file.relativePath),
17307
- repositoryPath: path15,
17809
+ repositoryPath: path17,
17308
17810
  offset: args.offset,
17309
17811
  limit: args.limit,
17310
17812
  isRescan: args.rescan || !!args.maxFiles
@@ -17355,9 +17857,12 @@ function createMcpServer() {
17355
17857
  logInfo("MCP server created and configured");
17356
17858
  return server;
17357
17859
  }
17358
- async function startMcpServer() {
17860
+ async function startMcpServer({
17861
+ govOrgId = ""
17862
+ }) {
17359
17863
  try {
17360
17864
  logDebug("Initializing MCP server");
17865
+ configStore.set("GOV-ORG-ID", govOrgId);
17361
17866
  const server = createMcpServer();
17362
17867
  await server.start();
17363
17868
  } catch (error) {
@@ -17368,7 +17873,7 @@ async function startMcpServer() {
17368
17873
 
17369
17874
  // src/args/commands/mcp.ts
17370
17875
  var mcpBuilder = (yargs2) => {
17371
- return yargs2.example("$0 mcp", "Launch the MCP server").option("debug", {
17876
+ return yargs2.example("$0 mcp", "Launch the MCP server").option("gov-org-id", organizationIdOptions).option("debug", {
17372
17877
  alias: "d",
17373
17878
  type: "boolean",
17374
17879
  description: "Run in debug mode with communication logging",
@@ -17377,7 +17882,8 @@ var mcpBuilder = (yargs2) => {
17377
17882
  };
17378
17883
  var mcpHandler = async (_args) => {
17379
17884
  try {
17380
- await startMcpServer();
17885
+ validateOrganizationId(_args.govOrgId);
17886
+ await startMcpServer({ govOrgId: _args.govOrgId });
17381
17887
  } catch (error) {
17382
17888
  console.error("Failed to start MCP server:", error);
17383
17889
  process.exit(1);
@@ -17385,7 +17891,7 @@ var mcpHandler = async (_args) => {
17385
17891
  };
17386
17892
 
17387
17893
  // src/args/commands/review.ts
17388
- import fs15 from "fs";
17894
+ import fs16 from "fs";
17389
17895
  import chalk9 from "chalk";
17390
17896
  function reviewBuilder(yargs2) {
17391
17897
  return yargs2.option("f", {
@@ -17422,7 +17928,7 @@ function reviewBuilder(yargs2) {
17422
17928
  ).help();
17423
17929
  }
17424
17930
  function validateReviewOptions(argv) {
17425
- if (!fs15.existsSync(argv.f)) {
17931
+ if (!fs16.existsSync(argv.f)) {
17426
17932
  throw new CliError(`
17427
17933
  Can't access ${chalk9.bold(argv.f)}`);
17428
17934
  }
@@ -17494,60 +18000,183 @@ async function addScmTokenHandler(args) {
17494
18000
  await addScmToken(args);
17495
18001
  }
17496
18002
 
18003
+ // src/args/commands/upload_ai_blame.ts
18004
+ import fs17 from "fs/promises";
18005
+ import path16 from "path";
18006
+ import chalk10 from "chalk";
18007
+ function uploadAiBlameBuilder(args) {
18008
+ return args.option("prompt", {
18009
+ type: "string",
18010
+ array: true,
18011
+ demandOption: true,
18012
+ describe: chalk10.bold("Path(s) to prompt artifact(s) (one per session)")
18013
+ }).option("inference", {
18014
+ type: "string",
18015
+ array: true,
18016
+ demandOption: true,
18017
+ describe: chalk10.bold(
18018
+ "Path(s) to inference artifact(s) (one per session)"
18019
+ )
18020
+ }).option("ai-response-at", {
18021
+ type: "string",
18022
+ array: true,
18023
+ describe: chalk10.bold(
18024
+ "ISO timestamp(s) for AI response (one per session, defaults to now)"
18025
+ )
18026
+ }).option("model", {
18027
+ type: "string",
18028
+ array: true,
18029
+ describe: chalk10.bold("AI model name(s) (optional, one per session)")
18030
+ }).option("tool-name", {
18031
+ type: "string",
18032
+ array: true,
18033
+ describe: chalk10.bold("Tool/IDE name(s) (optional, one per session)")
18034
+ }).strict();
18035
+ }
18036
+ async function uploadAiBlameHandler(args) {
18037
+ const prompts = args.prompt || [];
18038
+ const inferences = args.inference || [];
18039
+ const models = args.model || [];
18040
+ const tools = args.toolName || args["tool-name"] || [];
18041
+ const responseTimes = args.aiResponseAt || args["ai-response-at"] || [];
18042
+ if (prompts.length !== inferences.length) {
18043
+ console.error(
18044
+ chalk10.red("prompt and inference must have the same number of entries")
18045
+ );
18046
+ process.exit(1);
18047
+ }
18048
+ const nowIso = (/* @__PURE__ */ new Date()).toISOString();
18049
+ const sessions = [];
18050
+ for (let i = 0; i < prompts.length; i++) {
18051
+ const promptPath = String(prompts[i]);
18052
+ const inferencePath = String(inferences[i]);
18053
+ try {
18054
+ await Promise.all([fs17.access(promptPath), fs17.access(inferencePath)]);
18055
+ } catch {
18056
+ console.error(chalk10.red(`File not found for session ${i + 1}`));
18057
+ process.exit(1);
18058
+ }
18059
+ sessions.push({
18060
+ promptFileName: path16.basename(promptPath),
18061
+ inferenceFileName: path16.basename(inferencePath),
18062
+ aiResponseAt: responseTimes[i] || nowIso,
18063
+ model: models[i],
18064
+ toolName: tools[i]
18065
+ });
18066
+ }
18067
+ const gqlClient = await createAuthenticatedMcpGQLClient();
18068
+ const initRes = await gqlClient.uploadAIBlameInferencesInitRaw({ sessions });
18069
+ const uploadSessions = initRes.uploadAIBlameInferencesInit?.uploadSessions ?? [];
18070
+ if (uploadSessions.length !== sessions.length) {
18071
+ console.error(
18072
+ chalk10.red("Init failed to return expected number of sessions")
18073
+ );
18074
+ process.exit(1);
18075
+ }
18076
+ for (let i = 0; i < uploadSessions.length; i++) {
18077
+ const us = uploadSessions[i];
18078
+ const promptPath = String(prompts[i]);
18079
+ const inferencePath = String(inferences[i]);
18080
+ await uploadFile({
18081
+ file: promptPath,
18082
+ url: us.prompt.url,
18083
+ uploadFields: JSON.parse(us.prompt.uploadFieldsJSON),
18084
+ uploadKey: us.prompt.uploadKey
18085
+ });
18086
+ await uploadFile({
18087
+ file: inferencePath,
18088
+ url: us.inference.url,
18089
+ uploadFields: JSON.parse(us.inference.uploadFieldsJSON),
18090
+ uploadKey: us.inference.uploadKey
18091
+ });
18092
+ }
18093
+ const finalizeSessions = uploadSessions.map((us, i) => {
18094
+ const s = sessions[i];
18095
+ return {
18096
+ aiBlameInferenceId: us.aiBlameInferenceId,
18097
+ promptKey: us.prompt.uploadKey,
18098
+ inferenceKey: us.inference.uploadKey,
18099
+ aiResponseAt: s.aiResponseAt,
18100
+ model: s.model,
18101
+ toolName: s.toolName
18102
+ };
18103
+ });
18104
+ const finRes = await gqlClient.finalizeAIBlameInferencesUploadRaw({
18105
+ sessions: finalizeSessions
18106
+ });
18107
+ const status = finRes?.finalizeAIBlameInferencesUpload?.status;
18108
+ if (status !== "OK") {
18109
+ console.error(
18110
+ chalk10.red(
18111
+ `Finalize failed: ${finRes?.finalizeAIBlameInferencesUpload?.error || "unknown error"}`
18112
+ )
18113
+ );
18114
+ process.exit(1);
18115
+ }
18116
+ console.log(chalk10.green("AI Blame uploads finalized successfully"));
18117
+ }
18118
+
17497
18119
  // src/args/yargs.ts
17498
18120
  var parseArgs = async (args) => {
17499
18121
  const yargsInstance = yargs(args);
17500
18122
  return yargsInstance.updateStrings({
17501
- "Commands:": chalk10.yellow.underline.bold("Commands:"),
17502
- "Options:": chalk10.yellow.underline.bold("Options:"),
17503
- "Examples:": chalk10.yellow.underline.bold("Examples:"),
17504
- "Show help": chalk10.bold("Show help")
18123
+ "Commands:": chalk11.yellow.underline.bold("Commands:"),
18124
+ "Options:": chalk11.yellow.underline.bold("Options:"),
18125
+ "Examples:": chalk11.yellow.underline.bold("Examples:"),
18126
+ "Show help": chalk11.bold("Show help")
17505
18127
  }).usage(
17506
- `${chalk10.bold(
18128
+ `${chalk11.bold(
17507
18129
  "\n Bugsy - Trusted, Automatic Vulnerability Fixer \u{1F575}\uFE0F\u200D\u2642\uFE0F\n\n"
17508
- )} ${chalk10.yellow.underline.bold("Usage:")}
17509
- $0 ${chalk10.green(
18130
+ )} ${chalk11.yellow.underline.bold("Usage:")}
18131
+ $0 ${chalk11.green(
17510
18132
  "<command>"
17511
- )} ${chalk10.dim("[options]")}
18133
+ )} ${chalk11.dim("[options]")}
17512
18134
  `
17513
18135
  ).version(false).command(
17514
18136
  mobbCliCommand.scan,
17515
- chalk10.bold(
18137
+ chalk11.bold(
17516
18138
  "Scan your code for vulnerabilities, get automated fixes right away."
17517
18139
  ),
17518
18140
  scanBuilder,
17519
18141
  scanHandler
17520
18142
  ).command(
17521
18143
  mobbCliCommand.analyze,
17522
- chalk10.bold(
18144
+ chalk11.bold(
17523
18145
  "Provide a code repository, get automated fixes right away. You can also provide a vulnerability report to analyze or have Mobb scan the code for you."
17524
18146
  ),
17525
18147
  analyzeBuilder,
17526
18148
  analyzeHandler
17527
18149
  ).command(
17528
18150
  mobbCliCommand.review,
17529
- chalk10.bold(
18151
+ chalk11.bold(
17530
18152
  "Mobb will review your github pull requests and provide comments with fixes "
17531
18153
  ),
17532
18154
  reviewBuilder,
17533
18155
  reviewHandler
17534
18156
  ).command(
17535
18157
  mobbCliCommand.addScmToken,
17536
- chalk10.bold(
18158
+ chalk11.bold(
17537
18159
  "Add your SCM (Github, Gitlab, Azure DevOps) token to Mobb to enable automated fixes."
17538
18160
  ),
17539
18161
  addScmTokenBuilder,
17540
18162
  addScmTokenHandler
17541
18163
  ).command(
17542
18164
  mobbCliCommand.convertToSarif,
17543
- chalk10.bold("Convert an existing SAST report to SARIF format."),
18165
+ chalk11.bold("Convert an existing SAST report to SARIF format."),
17544
18166
  convertToSarifBuilder,
17545
18167
  convertToSarifHandler
17546
18168
  ).command(
17547
18169
  mobbCliCommand.mcp,
17548
- chalk10.bold("Launch the MCP (Model Context Protocol) server."),
18170
+ chalk11.bold("Launch the MCP (Model Context Protocol) server."),
17549
18171
  mcpBuilder,
17550
18172
  mcpHandler
18173
+ ).command(
18174
+ mobbCliCommand.uploadAiBlame,
18175
+ chalk11.bold(
18176
+ "Upload AI Blame inference artifacts (prompt + inference) and finalize them."
18177
+ ),
18178
+ uploadAiBlameBuilder,
18179
+ uploadAiBlameHandler
17551
18180
  ).example(
17552
18181
  "npx mobbdev@latest scan -r https://github.com/WebGoat/WebGoat",
17553
18182
  "Scan an existing repository"
@@ -17556,7 +18185,7 @@ var parseArgs = async (args) => {
17556
18185
  handler() {
17557
18186
  yargsInstance.showHelp();
17558
18187
  }
17559
- }).strictOptions().help("h").alias("h", "help").epilog(chalk10.bgBlue("Made with \u2764\uFE0F by Mobb")).showHelpOnFail(true).wrap(Math.min(120, yargsInstance.terminalWidth())).parse();
18188
+ }).strictOptions().help("h").alias("h", "help").epilog(chalk11.bgBlue("Made with \u2764\uFE0F by Mobb")).showHelpOnFail(true).wrap(Math.min(120, yargsInstance.terminalWidth())).parse();
17560
18189
  };
17561
18190
 
17562
18191
  // src/index.ts