mobbdev 1.0.107 → 1.0.108
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.mjs +674 -289
- package/package.json +3 -4
package/dist/index.mjs
CHANGED
|
@@ -540,6 +540,9 @@ var FixDetailsFragmentDoc = `
|
|
|
540
540
|
vulnerability_report_issue_tag_value
|
|
541
541
|
}
|
|
542
542
|
}
|
|
543
|
+
sharedState {
|
|
544
|
+
id
|
|
545
|
+
}
|
|
543
546
|
patchAndQuestions {
|
|
544
547
|
__typename
|
|
545
548
|
... on FixData {
|
|
@@ -4444,9 +4447,15 @@ import { z as z15 } from "zod";
|
|
|
4444
4447
|
var EnvVariablesZod = z15.object({
|
|
4445
4448
|
GITLAB_API_TOKEN: z15.string().optional(),
|
|
4446
4449
|
GITHUB_API_TOKEN: z15.string().optional(),
|
|
4447
|
-
GIT_PROXY_HOST: z15.string().optional().default("http://tinyproxy:8888")
|
|
4450
|
+
GIT_PROXY_HOST: z15.string().optional().default("http://tinyproxy:8888"),
|
|
4451
|
+
MAX_UPLOAD_FILE_SIZE_MB: z15.coerce.number().gt(0).default(5)
|
|
4448
4452
|
});
|
|
4449
|
-
var {
|
|
4453
|
+
var {
|
|
4454
|
+
GITLAB_API_TOKEN,
|
|
4455
|
+
GITHUB_API_TOKEN,
|
|
4456
|
+
GIT_PROXY_HOST,
|
|
4457
|
+
MAX_UPLOAD_FILE_SIZE_MB
|
|
4458
|
+
} = EnvVariablesZod.parse(process.env);
|
|
4450
4459
|
|
|
4451
4460
|
// src/features/analysis/scm/ado/validation.ts
|
|
4452
4461
|
import { z as z16 } from "zod";
|
|
@@ -4838,7 +4847,7 @@ async function getAdoSdk(params) {
|
|
|
4838
4847
|
const url = new URL(repoUrl);
|
|
4839
4848
|
const origin2 = url.origin.toLowerCase().endsWith(".visualstudio.com") ? DEFUALT_ADO_ORIGIN : url.origin.toLowerCase();
|
|
4840
4849
|
const params2 = `path=/&versionDescriptor[versionOptions]=0&versionDescriptor[versionType]=commit&versionDescriptor[version]=${branch}&resolveLfs=true&$format=zip&api-version=5.0&download=true`;
|
|
4841
|
-
const
|
|
4850
|
+
const path13 = [
|
|
4842
4851
|
prefixPath,
|
|
4843
4852
|
owner,
|
|
4844
4853
|
projectName,
|
|
@@ -4849,7 +4858,7 @@ async function getAdoSdk(params) {
|
|
|
4849
4858
|
"items",
|
|
4850
4859
|
"items"
|
|
4851
4860
|
].filter(Boolean).join("/");
|
|
4852
|
-
return new URL(`${
|
|
4861
|
+
return new URL(`${path13}?${params2}`, origin2).toString();
|
|
4853
4862
|
},
|
|
4854
4863
|
async getAdoBranchList({ repoUrl }) {
|
|
4855
4864
|
try {
|
|
@@ -5076,6 +5085,18 @@ import { setTimeout as setTimeout2 } from "timers/promises";
|
|
|
5076
5085
|
import * as path2 from "path";
|
|
5077
5086
|
import { simpleGit } from "simple-git";
|
|
5078
5087
|
|
|
5088
|
+
// src/mcp/core/configs.ts
|
|
5089
|
+
var MCP_DEFAULT_API_URL = "https://api.mobb.ai/v1/graphql";
|
|
5090
|
+
var MCP_API_KEY_HEADER_NAME = "x-mobb-key";
|
|
5091
|
+
var MCP_LOGIN_MAX_WAIT = 10 * 60 * 1e3;
|
|
5092
|
+
var MCP_LOGIN_CHECK_DELAY = 1 * 1e3;
|
|
5093
|
+
var MCP_VUL_REPORT_DIGEST_TIMEOUT_MS = 5 * 60 * 1e3;
|
|
5094
|
+
var MCP_MAX_FILE_SIZE = MAX_UPLOAD_FILE_SIZE_MB * 1024 * 1024;
|
|
5095
|
+
var MCP_PERIODIC_CHECK_INTERVAL = 15 * 60 * 1e3;
|
|
5096
|
+
var MCP_DEFAULT_MAX_FILES_TO_SCAN = 10;
|
|
5097
|
+
var MCP_REPORT_ID_EXPIRATION_MS = 2 * 60 * 60 * 1e3;
|
|
5098
|
+
var MCP_TOOLS_BROWSER_COOLDOWN_MS = 24 * 60 * 60 * 1e3;
|
|
5099
|
+
|
|
5079
5100
|
// src/features/analysis/scm/FileUtils.ts
|
|
5080
5101
|
import fs2 from "fs";
|
|
5081
5102
|
import { isBinary } from "istextorbinary";
|
|
@@ -5083,6 +5104,9 @@ import path from "path";
|
|
|
5083
5104
|
var EXCLUDED_FILE_PATTERNS = [
|
|
5084
5105
|
// ... (copy the full array from FilePacking.ts)
|
|
5085
5106
|
".json",
|
|
5107
|
+
".snap",
|
|
5108
|
+
".env.vault",
|
|
5109
|
+
".env",
|
|
5086
5110
|
".yaml",
|
|
5087
5111
|
".yml",
|
|
5088
5112
|
".toml",
|
|
@@ -5234,16 +5258,24 @@ var FileUtils = class {
|
|
|
5234
5258
|
}
|
|
5235
5259
|
static shouldPackFile(filepath, maxFileSize = 1024 * 1024 * 5) {
|
|
5236
5260
|
const absoluteFilepath = path.resolve(filepath);
|
|
5237
|
-
if (this.isExcludedFileType(filepath))
|
|
5238
|
-
|
|
5239
|
-
|
|
5261
|
+
if (this.isExcludedFileType(filepath)) {
|
|
5262
|
+
return false;
|
|
5263
|
+
}
|
|
5264
|
+
if (!fs2.existsSync(absoluteFilepath)) {
|
|
5265
|
+
return false;
|
|
5266
|
+
}
|
|
5267
|
+
if (fs2.lstatSync(absoluteFilepath).size > maxFileSize) {
|
|
5268
|
+
return false;
|
|
5269
|
+
}
|
|
5240
5270
|
let data;
|
|
5241
5271
|
try {
|
|
5242
5272
|
data = fs2.readFileSync(absoluteFilepath);
|
|
5243
5273
|
} catch {
|
|
5244
5274
|
return false;
|
|
5245
5275
|
}
|
|
5246
|
-
if (isBinary(null, data))
|
|
5276
|
+
if (isBinary(null, data)) {
|
|
5277
|
+
return false;
|
|
5278
|
+
}
|
|
5247
5279
|
return true;
|
|
5248
5280
|
}
|
|
5249
5281
|
static getAllFiles(dir, rootDir) {
|
|
@@ -5253,7 +5285,7 @@ var FileUtils = class {
|
|
|
5253
5285
|
if (relativeDepth > 20) {
|
|
5254
5286
|
return [];
|
|
5255
5287
|
}
|
|
5256
|
-
if (results.length >
|
|
5288
|
+
if (results.length > 1e3) {
|
|
5257
5289
|
return [];
|
|
5258
5290
|
}
|
|
5259
5291
|
try {
|
|
@@ -5284,10 +5316,14 @@ var FileUtils = class {
|
|
|
5284
5316
|
}
|
|
5285
5317
|
return results;
|
|
5286
5318
|
}
|
|
5287
|
-
static getLastChangedFiles(
|
|
5319
|
+
static getLastChangedFiles({
|
|
5320
|
+
dir,
|
|
5321
|
+
maxFileSize,
|
|
5322
|
+
maxFiles = MCP_DEFAULT_MAX_FILES_TO_SCAN
|
|
5323
|
+
}) {
|
|
5288
5324
|
if (!fs2.existsSync(dir) || !fs2.lstatSync(dir).isDirectory()) return [];
|
|
5289
5325
|
const files = this.getAllFiles(dir);
|
|
5290
|
-
return files.filter((file) => this.shouldPackFile(file.fullPath, maxFileSize)).sort((a, b) => b.time - a.time).slice(0,
|
|
5326
|
+
return files.filter((file) => this.shouldPackFile(file.fullPath, maxFileSize)).sort((a, b) => b.time - a.time).slice(0, maxFiles).map((file) => file.relativePath);
|
|
5291
5327
|
}
|
|
5292
5328
|
};
|
|
5293
5329
|
|
|
@@ -5336,7 +5372,10 @@ var GitService = class {
|
|
|
5336
5372
|
gitRoot,
|
|
5337
5373
|
this.repositoryPath
|
|
5338
5374
|
);
|
|
5339
|
-
const
|
|
5375
|
+
const deletedFiles = status.files.filter((file) => file.index === "D" || file.working_dir === "D").map((file) => file.path);
|
|
5376
|
+
const files = status.files.filter((file) => {
|
|
5377
|
+
return !(file.index === "D" || file.working_dir === "D");
|
|
5378
|
+
}).map((file) => {
|
|
5340
5379
|
const gitRelativePath = file.path;
|
|
5341
5380
|
if (relativePathFromGitRoot === "") {
|
|
5342
5381
|
return gitRelativePath;
|
|
@@ -5353,11 +5392,13 @@ var GitService = class {
|
|
|
5353
5392
|
fileCount: files.length,
|
|
5354
5393
|
files: files.slice(0, 10),
|
|
5355
5394
|
// Log first 10 files to avoid spam
|
|
5395
|
+
deletedFileCount: deletedFiles.length,
|
|
5396
|
+
deletedFiles: deletedFiles.slice(0, 10),
|
|
5356
5397
|
gitRoot,
|
|
5357
5398
|
workingDir: this.repositoryPath,
|
|
5358
5399
|
relativePathFromGitRoot
|
|
5359
5400
|
});
|
|
5360
|
-
return { files, status };
|
|
5401
|
+
return { files, deletedFiles, status };
|
|
5361
5402
|
} catch (error) {
|
|
5362
5403
|
const errorMessage = `Failed to get git status: ${error.message}`;
|
|
5363
5404
|
this.log(errorMessage, "error", { error });
|
|
@@ -5482,11 +5523,13 @@ var GitService = class {
|
|
|
5482
5523
|
}
|
|
5483
5524
|
}
|
|
5484
5525
|
/**
|
|
5485
|
-
* Gets the
|
|
5526
|
+
* Gets the maxFiles most recently changed files based on commit history
|
|
5486
5527
|
*/
|
|
5487
|
-
async getRecentlyChangedFiles(
|
|
5528
|
+
async getRecentlyChangedFiles({
|
|
5529
|
+
maxFiles = MCP_DEFAULT_MAX_FILES_TO_SCAN
|
|
5530
|
+
}) {
|
|
5488
5531
|
this.log(
|
|
5489
|
-
|
|
5532
|
+
`Getting the ${maxFiles} most recently changed files from commit history`,
|
|
5490
5533
|
"debug"
|
|
5491
5534
|
);
|
|
5492
5535
|
try {
|
|
@@ -5499,8 +5542,8 @@ var GitService = class {
|
|
|
5499
5542
|
const files = [];
|
|
5500
5543
|
let commitsProcessed = 0;
|
|
5501
5544
|
const logResult = await this.git.log({
|
|
5502
|
-
maxCount:
|
|
5503
|
-
//
|
|
5545
|
+
maxCount: maxFiles * 5,
|
|
5546
|
+
// 5 times the max files to scan to ensure we find enough files
|
|
5504
5547
|
format: {
|
|
5505
5548
|
hash: "%H",
|
|
5506
5549
|
date: "%ai",
|
|
@@ -5510,7 +5553,7 @@ var GitService = class {
|
|
|
5510
5553
|
}
|
|
5511
5554
|
});
|
|
5512
5555
|
for (const commit of logResult.all) {
|
|
5513
|
-
if (files.length >=
|
|
5556
|
+
if (files.length >= maxFiles) {
|
|
5514
5557
|
break;
|
|
5515
5558
|
}
|
|
5516
5559
|
commitsProcessed++;
|
|
@@ -5522,7 +5565,7 @@ var GitService = class {
|
|
|
5522
5565
|
]);
|
|
5523
5566
|
const commitFiles = filesOutput.split("\n").filter((file) => file.trim() !== "");
|
|
5524
5567
|
for (const file of commitFiles) {
|
|
5525
|
-
if (files.length >=
|
|
5568
|
+
if (files.length >= maxFiles) {
|
|
5526
5569
|
break;
|
|
5527
5570
|
}
|
|
5528
5571
|
const gitRelativePath = file.trim();
|
|
@@ -5540,7 +5583,7 @@ var GitService = class {
|
|
|
5540
5583
|
);
|
|
5541
5584
|
}
|
|
5542
5585
|
this.log(`Considering file: ${adjustedPath}`, "debug");
|
|
5543
|
-
if (!fileSet.has(adjustedPath) && FileUtils.shouldPackFile(path2.join(gitRoot, gitRelativePath))) {
|
|
5586
|
+
if (!fileSet.has(adjustedPath) && FileUtils.shouldPackFile(path2.join(gitRoot, gitRelativePath)) && !adjustedPath.startsWith("..")) {
|
|
5544
5587
|
fileSet.add(adjustedPath);
|
|
5545
5588
|
files.push(adjustedPath);
|
|
5546
5589
|
}
|
|
@@ -5555,8 +5598,8 @@ var GitService = class {
|
|
|
5555
5598
|
fileCount: files.length,
|
|
5556
5599
|
commitsProcessed,
|
|
5557
5600
|
totalCommitsAvailable: logResult.all.length,
|
|
5558
|
-
files: files.slice(0,
|
|
5559
|
-
// Log the files (should be all of them since we limit to
|
|
5601
|
+
files: files.slice(0, maxFiles),
|
|
5602
|
+
// Log the files (should be all of them since we limit to maxFiles)
|
|
5560
5603
|
gitRoot,
|
|
5561
5604
|
workingDir: this.repositoryPath,
|
|
5562
5605
|
relativePathFromGitRoot
|
|
@@ -6887,14 +6930,14 @@ function getGithubSdk(params = {}) {
|
|
|
6887
6930
|
};
|
|
6888
6931
|
},
|
|
6889
6932
|
async getGithubBlameRanges(params2) {
|
|
6890
|
-
const { ref, gitHubUrl, path:
|
|
6933
|
+
const { ref, gitHubUrl, path: path13 } = params2;
|
|
6891
6934
|
const { owner, repo } = parseGithubOwnerAndRepo(gitHubUrl);
|
|
6892
6935
|
const res = await octokit.graphql(
|
|
6893
6936
|
GET_BLAME_DOCUMENT,
|
|
6894
6937
|
{
|
|
6895
6938
|
owner,
|
|
6896
6939
|
repo,
|
|
6897
|
-
path:
|
|
6940
|
+
path: path13,
|
|
6898
6941
|
ref
|
|
6899
6942
|
}
|
|
6900
6943
|
);
|
|
@@ -7203,11 +7246,11 @@ var GithubSCMLib = class extends SCMLib {
|
|
|
7203
7246
|
markdownComment: comment
|
|
7204
7247
|
});
|
|
7205
7248
|
}
|
|
7206
|
-
async getRepoBlameRanges(ref,
|
|
7249
|
+
async getRepoBlameRanges(ref, path13) {
|
|
7207
7250
|
this._validateUrl();
|
|
7208
7251
|
return await this.githubSdk.getGithubBlameRanges({
|
|
7209
7252
|
ref,
|
|
7210
|
-
path:
|
|
7253
|
+
path: path13,
|
|
7211
7254
|
gitHubUrl: this.url
|
|
7212
7255
|
});
|
|
7213
7256
|
}
|
|
@@ -7613,13 +7656,13 @@ function parseGitlabOwnerAndRepo(gitlabUrl) {
|
|
|
7613
7656
|
const { organization, repoName, projectPath } = parsingResult;
|
|
7614
7657
|
return { owner: organization, repo: repoName, projectPath };
|
|
7615
7658
|
}
|
|
7616
|
-
async function getGitlabBlameRanges({ ref, gitlabUrl, path:
|
|
7659
|
+
async function getGitlabBlameRanges({ ref, gitlabUrl, path: path13 }, options) {
|
|
7617
7660
|
const { projectPath } = parseGitlabOwnerAndRepo(gitlabUrl);
|
|
7618
7661
|
const api2 = getGitBeaker({
|
|
7619
7662
|
url: gitlabUrl,
|
|
7620
7663
|
gitlabAuthToken: options?.gitlabAuthToken
|
|
7621
7664
|
});
|
|
7622
|
-
const resp = await api2.RepositoryFiles.allFileBlames(projectPath,
|
|
7665
|
+
const resp = await api2.RepositoryFiles.allFileBlames(projectPath, path13, ref);
|
|
7623
7666
|
let lineNumber = 1;
|
|
7624
7667
|
return resp.filter((range) => range.lines).map((range) => {
|
|
7625
7668
|
const oldLineNumber = lineNumber;
|
|
@@ -7795,10 +7838,10 @@ var GitlabSCMLib = class extends SCMLib {
|
|
|
7795
7838
|
markdownComment: comment
|
|
7796
7839
|
});
|
|
7797
7840
|
}
|
|
7798
|
-
async getRepoBlameRanges(ref,
|
|
7841
|
+
async getRepoBlameRanges(ref, path13) {
|
|
7799
7842
|
this._validateUrl();
|
|
7800
7843
|
return await getGitlabBlameRanges(
|
|
7801
|
-
{ ref, path:
|
|
7844
|
+
{ ref, path: path13, gitlabUrl: this.url },
|
|
7802
7845
|
{
|
|
7803
7846
|
url: this.url,
|
|
7804
7847
|
gitlabAuthToken: this.accessToken
|
|
@@ -8797,7 +8840,7 @@ async function postIssueComment(params) {
|
|
|
8797
8840
|
fpDescription
|
|
8798
8841
|
} = params;
|
|
8799
8842
|
const {
|
|
8800
|
-
path:
|
|
8843
|
+
path: path13,
|
|
8801
8844
|
startLine,
|
|
8802
8845
|
vulnerabilityReportIssue: {
|
|
8803
8846
|
vulnerabilityReportIssueTags,
|
|
@@ -8812,7 +8855,7 @@ async function postIssueComment(params) {
|
|
|
8812
8855
|
Refresh the page in order to see the changes.`,
|
|
8813
8856
|
pull_number: pullRequest,
|
|
8814
8857
|
commit_id: commitSha,
|
|
8815
|
-
path:
|
|
8858
|
+
path: path13,
|
|
8816
8859
|
line: startLine
|
|
8817
8860
|
});
|
|
8818
8861
|
const commentId = commentRes.data.id;
|
|
@@ -8846,7 +8889,7 @@ async function postFixComment(params) {
|
|
|
8846
8889
|
scanner
|
|
8847
8890
|
} = params;
|
|
8848
8891
|
const {
|
|
8849
|
-
path:
|
|
8892
|
+
path: path13,
|
|
8850
8893
|
startLine,
|
|
8851
8894
|
vulnerabilityReportIssue: { fixId, vulnerabilityReportIssueTags, category },
|
|
8852
8895
|
vulnerabilityReportIssueId
|
|
@@ -8864,7 +8907,7 @@ async function postFixComment(params) {
|
|
|
8864
8907
|
Refresh the page in order to see the changes.`,
|
|
8865
8908
|
pull_number: pullRequest,
|
|
8866
8909
|
commit_id: commitSha,
|
|
8867
|
-
path:
|
|
8910
|
+
path: path13,
|
|
8868
8911
|
line: startLine
|
|
8869
8912
|
});
|
|
8870
8913
|
const commentId = commentRes.data.id;
|
|
@@ -10059,8 +10102,8 @@ async function forkSnyk(args, { display }) {
|
|
|
10059
10102
|
}
|
|
10060
10103
|
async function getSnykReport(reportPath, repoRoot, { skipPrompts = false }) {
|
|
10061
10104
|
debug15("get snyk report start %s %s", reportPath, repoRoot);
|
|
10062
|
-
const
|
|
10063
|
-
const { message: configMessage } =
|
|
10105
|
+
const config4 = await forkSnyk(["config"], { display: false });
|
|
10106
|
+
const { message: configMessage } = config4;
|
|
10064
10107
|
if (!configMessage.includes("api: ")) {
|
|
10065
10108
|
const snykLoginSpinner = createSpinner3().start();
|
|
10066
10109
|
if (!skipPrompts) {
|
|
@@ -10072,7 +10115,7 @@ async function getSnykReport(reportPath, repoRoot, { skipPrompts = false }) {
|
|
|
10072
10115
|
snykLoginSpinner.update({
|
|
10073
10116
|
text: "\u{1F513} Waiting for Snyk login to complete"
|
|
10074
10117
|
});
|
|
10075
|
-
debug15("no token in the config %s",
|
|
10118
|
+
debug15("no token in the config %s", config4);
|
|
10076
10119
|
await forkSnyk(["auth"], { display: true });
|
|
10077
10120
|
snykLoginSpinner.success({ text: "\u{1F513} Login to Snyk Successful" });
|
|
10078
10121
|
}
|
|
@@ -11289,10 +11332,6 @@ import { GraphQLClient as GraphQLClient2 } from "graphql-request";
|
|
|
11289
11332
|
import open4 from "open";
|
|
11290
11333
|
import { v4 as uuidv42 } from "uuid";
|
|
11291
11334
|
|
|
11292
|
-
// src/mcp/constants.ts
|
|
11293
|
-
var DEFAULT_API_URL2 = "https://api.mobb.ai/v1/graphql";
|
|
11294
|
-
var API_KEY_HEADER_NAME2 = "x-mobb-key";
|
|
11295
|
-
|
|
11296
11335
|
// src/mcp/core/Errors.ts
|
|
11297
11336
|
var ApiConnectionError = class extends Error {
|
|
11298
11337
|
constructor(message = "Failed to connect to the API") {
|
|
@@ -11356,21 +11395,17 @@ var FailedToGetApiTokenError = class extends Error {
|
|
|
11356
11395
|
};
|
|
11357
11396
|
|
|
11358
11397
|
// src/mcp/services/McpGQLClient.ts
|
|
11359
|
-
var
|
|
11360
|
-
var LOGIN_CHECK_DELAY2 = 1 * 1e3;
|
|
11361
|
-
var config4 = new Configstore3(packageJson.name, { apiToken: "" });
|
|
11362
|
-
var BROWSER_COOLDOWN_MS = 5e3;
|
|
11363
|
-
var lastBrowserOpenTime = 0;
|
|
11398
|
+
var mobbConfigStore = new Configstore3(packageJson.name, { apiToken: "" });
|
|
11364
11399
|
var McpGQLClient = class {
|
|
11365
11400
|
constructor(args) {
|
|
11366
11401
|
__publicField(this, "client");
|
|
11367
11402
|
__publicField(this, "clientSdk");
|
|
11368
11403
|
__publicField(this, "_auth");
|
|
11369
11404
|
this._auth = args;
|
|
11370
|
-
const API_URL2 = process.env["API_URL"] ||
|
|
11405
|
+
const API_URL2 = process.env["API_URL"] || MCP_DEFAULT_API_URL;
|
|
11371
11406
|
logDebug("creating graphql client", { API_URL: API_URL2, args });
|
|
11372
11407
|
this.client = new GraphQLClient2(API_URL2, {
|
|
11373
|
-
headers: args.type === "apiKey" ? { [
|
|
11408
|
+
headers: args.type === "apiKey" ? { [MCP_API_KEY_HEADER_NAME]: args.apiKey || "" } : {
|
|
11374
11409
|
Authorization: `Bearer ${args.token}`
|
|
11375
11410
|
},
|
|
11376
11411
|
requestMiddleware: (request) => {
|
|
@@ -11388,10 +11423,10 @@ var McpGQLClient = class {
|
|
|
11388
11423
|
}
|
|
11389
11424
|
getErrorContext() {
|
|
11390
11425
|
return {
|
|
11391
|
-
endpoint: process.env["API_URL"] ||
|
|
11426
|
+
endpoint: process.env["API_URL"] || MCP_DEFAULT_API_URL,
|
|
11392
11427
|
apiKey: this._auth.type === "apiKey" ? this._auth.apiKey : "",
|
|
11393
11428
|
headers: {
|
|
11394
|
-
[
|
|
11429
|
+
[MCP_API_KEY_HEADER_NAME]: this._auth.type === "apiKey" ? "[REDACTED]" : "undefined",
|
|
11395
11430
|
"x-hasura-request-id": "[DYNAMIC]"
|
|
11396
11431
|
}
|
|
11397
11432
|
};
|
|
@@ -11578,7 +11613,7 @@ var McpGQLClient = class {
|
|
|
11578
11613
|
try {
|
|
11579
11614
|
const res = await this.clientSdk.CreateCliLogin(variables, {
|
|
11580
11615
|
// We may have outdated API key in the config storage. Avoid using it for the login request.
|
|
11581
|
-
[
|
|
11616
|
+
[MCP_API_KEY_HEADER_NAME]: ""
|
|
11582
11617
|
});
|
|
11583
11618
|
const loginId = res.insert_cli_login_one?.id || "";
|
|
11584
11619
|
if (!loginId) {
|
|
@@ -11595,7 +11630,7 @@ var McpGQLClient = class {
|
|
|
11595
11630
|
try {
|
|
11596
11631
|
const res = await this.clientSdk.GetEncryptedApiToken(variables, {
|
|
11597
11632
|
// We may have outdated API key in the config storage. Avoid using it for the login request.
|
|
11598
|
-
[
|
|
11633
|
+
[MCP_API_KEY_HEADER_NAME]: ""
|
|
11599
11634
|
});
|
|
11600
11635
|
return res?.cli_login_by_pk?.encryptedApiToken || null;
|
|
11601
11636
|
} catch (e) {
|
|
@@ -11688,21 +11723,26 @@ var McpGQLClient = class {
|
|
|
11688
11723
|
}
|
|
11689
11724
|
}
|
|
11690
11725
|
};
|
|
11691
|
-
async function openBrowser(url) {
|
|
11692
|
-
|
|
11693
|
-
|
|
11694
|
-
|
|
11695
|
-
|
|
11726
|
+
async function openBrowser(url, isToolsCall) {
|
|
11727
|
+
if (isToolsCall) {
|
|
11728
|
+
const now = Date.now();
|
|
11729
|
+
const lastBrowserOpenTime = mobbConfigStore.get("lastBrowserOpenTime") || 0;
|
|
11730
|
+
if (now - lastBrowserOpenTime < MCP_TOOLS_BROWSER_COOLDOWN_MS) {
|
|
11731
|
+
logDebug(`browser cooldown active, skipping open for ${url}`);
|
|
11732
|
+
return;
|
|
11733
|
+
}
|
|
11696
11734
|
}
|
|
11697
11735
|
logDebug(`opening browser url ${url}`);
|
|
11698
11736
|
await open4(url);
|
|
11699
|
-
lastBrowserOpenTime
|
|
11737
|
+
mobbConfigStore.set("lastBrowserOpenTime", Date.now());
|
|
11700
11738
|
}
|
|
11701
|
-
async function getMcpGQLClient(
|
|
11702
|
-
|
|
11739
|
+
async function getMcpGQLClient({
|
|
11740
|
+
isToolsCall = false
|
|
11741
|
+
} = {}) {
|
|
11742
|
+
logDebug("getting config", { apiToken: mobbConfigStore.get("apiToken") });
|
|
11703
11743
|
const inGqlClient = new McpGQLClient({
|
|
11704
11744
|
apiKey: process.env["MOBB_API_KEY"] || process.env["API_KEY"] || // fallback for backward compatibility
|
|
11705
|
-
|
|
11745
|
+
mobbConfigStore.get("apiToken") || "",
|
|
11706
11746
|
type: "apiKey"
|
|
11707
11747
|
});
|
|
11708
11748
|
const isConnected = await inGqlClient.verifyConnection();
|
|
@@ -11730,10 +11770,10 @@ async function getMcpGQLClient() {
|
|
|
11730
11770
|
const webLoginUrl2 = `${WEB_APP_URL}/cli-login`;
|
|
11731
11771
|
const browserUrl = `${webLoginUrl2}/${loginId}?hostname=${os2.hostname()}`;
|
|
11732
11772
|
logDebug(`opening browser url ${browserUrl}`);
|
|
11733
|
-
await openBrowser(browserUrl);
|
|
11773
|
+
await openBrowser(browserUrl, isToolsCall);
|
|
11734
11774
|
logDebug(`waiting for login to complete`);
|
|
11735
11775
|
let newApiToken = null;
|
|
11736
|
-
for (let i = 0; i <
|
|
11776
|
+
for (let i = 0; i < MCP_LOGIN_MAX_WAIT / MCP_LOGIN_CHECK_DELAY; i++) {
|
|
11737
11777
|
const encryptedApiToken = await inGqlClient.getEncryptedApiToken({
|
|
11738
11778
|
loginId
|
|
11739
11779
|
});
|
|
@@ -11743,7 +11783,7 @@ async function getMcpGQLClient() {
|
|
|
11743
11783
|
logDebug("API token decrypted");
|
|
11744
11784
|
break;
|
|
11745
11785
|
}
|
|
11746
|
-
await sleep(
|
|
11786
|
+
await sleep(MCP_LOGIN_CHECK_DELAY);
|
|
11747
11787
|
}
|
|
11748
11788
|
if (!newApiToken) {
|
|
11749
11789
|
throw new FailedToGetApiTokenError(
|
|
@@ -11754,7 +11794,7 @@ async function getMcpGQLClient() {
|
|
|
11754
11794
|
const loginSuccess = await newGqlClient.verifyToken();
|
|
11755
11795
|
if (loginSuccess) {
|
|
11756
11796
|
logDebug(`set api token ${newApiToken}`);
|
|
11757
|
-
|
|
11797
|
+
mobbConfigStore.set("apiToken", newApiToken);
|
|
11758
11798
|
} else {
|
|
11759
11799
|
throw new AuthenticationError("Invalid API token");
|
|
11760
11800
|
}
|
|
@@ -11797,14 +11837,14 @@ var ToolRegistry = class {
|
|
|
11797
11837
|
|
|
11798
11838
|
// src/mcp/core/McpServer.ts
|
|
11799
11839
|
var McpServer = class {
|
|
11800
|
-
constructor(
|
|
11840
|
+
constructor(config4) {
|
|
11801
11841
|
__publicField(this, "server");
|
|
11802
11842
|
__publicField(this, "toolRegistry");
|
|
11803
11843
|
__publicField(this, "isEventHandlersSetup", false);
|
|
11804
11844
|
this.server = new Server(
|
|
11805
11845
|
{
|
|
11806
|
-
name:
|
|
11807
|
-
version:
|
|
11846
|
+
name: config4.name,
|
|
11847
|
+
version: config4.version
|
|
11808
11848
|
},
|
|
11809
11849
|
{
|
|
11810
11850
|
capabilities: {
|
|
@@ -11815,7 +11855,7 @@ var McpServer = class {
|
|
|
11815
11855
|
this.toolRegistry = new ToolRegistry();
|
|
11816
11856
|
this.setupHandlers();
|
|
11817
11857
|
this.setupProcessEventHandlers();
|
|
11818
|
-
logInfo("MCP server instance created",
|
|
11858
|
+
logInfo("MCP server instance created", config4);
|
|
11819
11859
|
}
|
|
11820
11860
|
setupProcessEventHandlers() {
|
|
11821
11861
|
if (this.isEventHandlersSetup) {
|
|
@@ -11866,7 +11906,7 @@ var McpServer = class {
|
|
|
11866
11906
|
logInfo("Request", {
|
|
11867
11907
|
request: JSON.parse(JSON.stringify(request))
|
|
11868
11908
|
});
|
|
11869
|
-
void getMcpGQLClient();
|
|
11909
|
+
void getMcpGQLClient({ isToolsCall: true });
|
|
11870
11910
|
const toolsDefinitions = this.toolRegistry.getAllTools();
|
|
11871
11911
|
const response = {
|
|
11872
11912
|
tools: toolsDefinitions.map((tool) => ({
|
|
@@ -11951,7 +11991,7 @@ var McpServer = class {
|
|
|
11951
11991
|
}
|
|
11952
11992
|
};
|
|
11953
11993
|
|
|
11954
|
-
// src/mcp/tools/
|
|
11994
|
+
// src/mcp/tools/checkForNewAvailableFixes/CheckForNewAvailableFixesTool.ts
|
|
11955
11995
|
import { z as z32 } from "zod";
|
|
11956
11996
|
|
|
11957
11997
|
// src/mcp/services/PathValidation.ts
|
|
@@ -11959,7 +11999,10 @@ import fs9 from "fs";
|
|
|
11959
11999
|
import path11 from "path";
|
|
11960
12000
|
async function validatePath(inputPath) {
|
|
11961
12001
|
logDebug("Validating MCP path", { inputPath });
|
|
11962
|
-
if (inputPath
|
|
12002
|
+
if (/^\/[a-zA-Z]:\//.test(inputPath)) {
|
|
12003
|
+
inputPath = inputPath.slice(1);
|
|
12004
|
+
}
|
|
12005
|
+
if (inputPath === "." || inputPath === "./") {
|
|
11963
12006
|
if (process.env["WORKSPACE_FOLDER_PATHS"]) {
|
|
11964
12007
|
logDebug("Fallback to workspace folder path", {
|
|
11965
12008
|
inputPath,
|
|
@@ -12029,7 +12072,6 @@ var BaseTool = class {
|
|
|
12029
12072
|
};
|
|
12030
12073
|
}
|
|
12031
12074
|
async execute(args) {
|
|
12032
|
-
logInfo(`Executing tool: ${this.name}`, { args });
|
|
12033
12075
|
logInfo(`Authenticating tool: ${this.name}`, { args });
|
|
12034
12076
|
const mcpGqlClient = await getMcpGQLClient();
|
|
12035
12077
|
const userInfo = await mcpGqlClient.getUserInfo();
|
|
@@ -12282,9 +12324,36 @@ ${applyFixesPrompt({
|
|
|
12282
12324
|
offset
|
|
12283
12325
|
})}`;
|
|
12284
12326
|
};
|
|
12285
|
-
var
|
|
12327
|
+
var nextStepsPrompt = ({ scannedFiles }) => `
|
|
12328
|
+
### \u{1F4C1} Scanned Files
|
|
12329
|
+
${scannedFiles.map((file) => `- ${file}`).join("\n")}
|
|
12330
|
+
|
|
12331
|
+
### Extend the scan scope
|
|
12332
|
+
|
|
12333
|
+
To scan a larger number of files, include the additional parameter:
|
|
12334
|
+
|
|
12335
|
+
- **maxFiles**: <number_of_files_to_scan>
|
|
12336
|
+
|
|
12337
|
+
This will scan up to the specified number of recently changed files.
|
|
12338
|
+
|
|
12339
|
+
### \u{1F504} Running a Fresh Scan
|
|
12340
|
+
|
|
12341
|
+
To perform a **rescan** of your repository (fetching a brand-new vulnerability report and updated fixes), include the additional parameter:
|
|
12342
|
+
|
|
12343
|
+
- **rescan**: true
|
|
12344
|
+
|
|
12345
|
+
This will start a new analysis, discard any cached results.
|
|
12346
|
+
|
|
12347
|
+
\u26A0\uFE0F *Note:* A full rescan may take longer to complete than simply fetching additional fixes because your repository is re-uploaded and re-analyzed from scratch.
|
|
12348
|
+
|
|
12349
|
+
`;
|
|
12350
|
+
var noFixesFoundPrompt = ({
|
|
12351
|
+
scannedFiles
|
|
12352
|
+
}) => `\u{1F50D} **MOBB SECURITY SCAN COMPLETED**
|
|
12286
12353
|
|
|
12287
12354
|
Mobb security scan completed successfully but found no automated fixes available at this time.
|
|
12355
|
+
|
|
12356
|
+
${nextStepsPrompt({ scannedFiles })}
|
|
12288
12357
|
`;
|
|
12289
12358
|
var fixesPrompt = ({
|
|
12290
12359
|
fixes,
|
|
@@ -12293,7 +12362,7 @@ var fixesPrompt = ({
|
|
|
12293
12362
|
scannedFiles
|
|
12294
12363
|
}) => {
|
|
12295
12364
|
if (totalCount === 0) {
|
|
12296
|
-
return noFixesFoundPrompt;
|
|
12365
|
+
return noFixesFoundPrompt({ scannedFiles });
|
|
12297
12366
|
}
|
|
12298
12367
|
const shownCount = fixes.length;
|
|
12299
12368
|
const nextOffset = offset + shownCount;
|
|
@@ -12310,22 +12379,472 @@ ${applyFixesPrompt({
|
|
|
12310
12379
|
offset
|
|
12311
12380
|
})}
|
|
12312
12381
|
|
|
12313
|
-
|
|
12314
|
-
|
|
12382
|
+
${nextStepsPrompt({ scannedFiles })}
|
|
12383
|
+
`;
|
|
12384
|
+
};
|
|
12385
|
+
var noFreshFixesPrompt = `No fresh fixes available for this repository at this time.
|
|
12386
|
+
`;
|
|
12387
|
+
var initialScanInProgressPrompt = `Initial scan in progress. Call the tool again in 1 minute to check for available fixes.`;
|
|
12388
|
+
var freshFixesPrompt = ({ fixes }) => {
|
|
12389
|
+
return `Here are the fresh fixes to the vulnerabilities discovered by Mobb MCP
|
|
12315
12390
|
|
|
12316
|
-
|
|
12391
|
+
${applyFixesPrompt({
|
|
12392
|
+
fixes,
|
|
12393
|
+
totalCount: fixes.length,
|
|
12394
|
+
hasMore: false,
|
|
12395
|
+
nextOffset: 0,
|
|
12396
|
+
shownCount: fixes.length,
|
|
12397
|
+
currentTool: "fetch_available_fixes",
|
|
12398
|
+
offset: 0
|
|
12399
|
+
})}
|
|
12400
|
+
`;
|
|
12401
|
+
};
|
|
12317
12402
|
|
|
12318
|
-
|
|
12403
|
+
// src/mcp/services/GetLocalFiles.ts
|
|
12404
|
+
import fs10 from "fs/promises";
|
|
12405
|
+
import nodePath from "path";
|
|
12406
|
+
var getLocalFiles = async ({
|
|
12407
|
+
path: path13,
|
|
12408
|
+
maxFileSize = 1024 * 1024 * 5,
|
|
12409
|
+
maxFiles
|
|
12410
|
+
}) => {
|
|
12411
|
+
const resolvedRepoPath = await fs10.realpath(path13);
|
|
12412
|
+
const gitService = new GitService(resolvedRepoPath, log);
|
|
12413
|
+
const gitValidation = await gitService.validateRepository();
|
|
12414
|
+
let files = [];
|
|
12415
|
+
if (!gitValidation.isValid) {
|
|
12416
|
+
logDebug(
|
|
12417
|
+
"Git repository validation failed, using all files in the repository",
|
|
12418
|
+
{
|
|
12419
|
+
path: path13
|
|
12420
|
+
}
|
|
12421
|
+
);
|
|
12422
|
+
files = FileUtils.getLastChangedFiles({
|
|
12423
|
+
dir: path13,
|
|
12424
|
+
maxFileSize,
|
|
12425
|
+
maxFiles
|
|
12426
|
+
});
|
|
12427
|
+
logDebug("Found files in the repository", {
|
|
12428
|
+
files,
|
|
12429
|
+
fileCount: files.length
|
|
12430
|
+
});
|
|
12431
|
+
} else {
|
|
12432
|
+
logDebug("maxFiles", {
|
|
12433
|
+
maxFiles
|
|
12434
|
+
});
|
|
12435
|
+
const gitResult = await gitService.getChangedFiles();
|
|
12436
|
+
files = gitResult.files;
|
|
12437
|
+
if (files.length === 0 || maxFiles) {
|
|
12438
|
+
const recentResult = await gitService.getRecentlyChangedFiles({
|
|
12439
|
+
maxFiles
|
|
12440
|
+
});
|
|
12441
|
+
files = recentResult.files;
|
|
12442
|
+
logDebug(
|
|
12443
|
+
"No changes found, using recently changed files from git history",
|
|
12444
|
+
{
|
|
12445
|
+
files,
|
|
12446
|
+
fileCount: files.length,
|
|
12447
|
+
commitsChecked: recentResult.commitCount
|
|
12448
|
+
}
|
|
12449
|
+
);
|
|
12450
|
+
} else {
|
|
12451
|
+
logDebug("Found changed files in the git repository", {
|
|
12452
|
+
files,
|
|
12453
|
+
fileCount: files.length
|
|
12454
|
+
});
|
|
12455
|
+
}
|
|
12456
|
+
}
|
|
12457
|
+
files = files.filter(
|
|
12458
|
+
(file) => FileUtils.shouldPackFile(
|
|
12459
|
+
nodePath.resolve(resolvedRepoPath, file),
|
|
12460
|
+
maxFileSize
|
|
12461
|
+
)
|
|
12462
|
+
);
|
|
12463
|
+
const filesWithStats = await Promise.all(
|
|
12464
|
+
files.map(async (file) => {
|
|
12465
|
+
const absoluteFilePath = nodePath.resolve(resolvedRepoPath, file);
|
|
12466
|
+
const relativePath = nodePath.relative(resolvedRepoPath, absoluteFilePath);
|
|
12467
|
+
let fileStat;
|
|
12468
|
+
try {
|
|
12469
|
+
fileStat = await fs10.stat(absoluteFilePath);
|
|
12470
|
+
} catch (e) {
|
|
12471
|
+
logDebug("File not found", {
|
|
12472
|
+
file
|
|
12473
|
+
});
|
|
12474
|
+
}
|
|
12475
|
+
return {
|
|
12476
|
+
filename: nodePath.basename(absoluteFilePath),
|
|
12477
|
+
relativePath,
|
|
12478
|
+
fullPath: absoluteFilePath,
|
|
12479
|
+
lastEdited: fileStat?.mtime.getTime() ?? 0
|
|
12480
|
+
};
|
|
12481
|
+
})
|
|
12482
|
+
);
|
|
12483
|
+
return filesWithStats.filter((file) => file.lastEdited > 0);
|
|
12484
|
+
};
|
|
12319
12485
|
|
|
12320
|
-
|
|
12486
|
+
// src/mcp/services/ScanFiles.ts
|
|
12487
|
+
import fs11 from "fs";
|
|
12488
|
+
import path12 from "path";
|
|
12489
|
+
import AdmZip2 from "adm-zip";
|
|
12490
|
+
var scanFiles = async (fileList, repositoryPath, gqlClient) => {
|
|
12491
|
+
const repoUploadInfo = await initializeReport(gqlClient);
|
|
12492
|
+
const fixReportId = repoUploadInfo.fixReportId;
|
|
12493
|
+
const zipBuffer = await packFiles(fileList, repositoryPath);
|
|
12494
|
+
await uploadFiles(zipBuffer, repoUploadInfo);
|
|
12495
|
+
const projectId = await getProjectId(gqlClient);
|
|
12496
|
+
await runScan({ fixReportId, projectId, gqlClient });
|
|
12497
|
+
return {
|
|
12498
|
+
fixReportId,
|
|
12499
|
+
projectId
|
|
12500
|
+
};
|
|
12501
|
+
};
|
|
12502
|
+
var initializeReport = async (gqlClient) => {
|
|
12503
|
+
if (!gqlClient) {
|
|
12504
|
+
throw new GqlClientError();
|
|
12505
|
+
}
|
|
12506
|
+
try {
|
|
12507
|
+
const {
|
|
12508
|
+
uploadS3BucketInfo: { repoUploadInfo }
|
|
12509
|
+
} = await gqlClient.uploadS3BucketInfo();
|
|
12510
|
+
logInfo("Upload info retrieved", { uploadKey: repoUploadInfo?.uploadKey });
|
|
12511
|
+
return repoUploadInfo;
|
|
12512
|
+
} catch (error) {
|
|
12513
|
+
const message = error.message;
|
|
12514
|
+
throw new ReportInitializationError(`Error initializing report: ${message}`);
|
|
12515
|
+
}
|
|
12516
|
+
};
|
|
12517
|
+
var packFiles = async (fileList, repositoryPath) => {
|
|
12518
|
+
try {
|
|
12519
|
+
logInfo(`FilePacking: packing files from ${repositoryPath}`);
|
|
12520
|
+
const zip = new AdmZip2();
|
|
12521
|
+
let packedFilesCount = 0;
|
|
12522
|
+
const resolvedRepoPath = path12.resolve(repositoryPath);
|
|
12523
|
+
logInfo("FilePacking: compressing files");
|
|
12524
|
+
for (const filepath of fileList) {
|
|
12525
|
+
const absoluteFilepath = path12.join(repositoryPath, filepath);
|
|
12526
|
+
const resolvedFilePath = path12.resolve(absoluteFilepath);
|
|
12527
|
+
if (!resolvedFilePath.startsWith(resolvedRepoPath)) {
|
|
12528
|
+
logInfo(
|
|
12529
|
+
`FilePacking: skipping ${filepath} due to potential path traversal`
|
|
12530
|
+
);
|
|
12531
|
+
continue;
|
|
12532
|
+
}
|
|
12533
|
+
if (!FileUtils.shouldPackFile(absoluteFilepath, MCP_MAX_FILE_SIZE)) {
|
|
12534
|
+
logInfo(
|
|
12535
|
+
`FilePacking: ignoring ${filepath} because it is excluded or invalid`
|
|
12536
|
+
);
|
|
12537
|
+
continue;
|
|
12538
|
+
}
|
|
12539
|
+
let data;
|
|
12540
|
+
try {
|
|
12541
|
+
data = fs11.readFileSync(absoluteFilepath);
|
|
12542
|
+
} catch (fsError) {
|
|
12543
|
+
logInfo(
|
|
12544
|
+
`FilePacking: failed to read ${filepath} from filesystem: ${fsError}`
|
|
12545
|
+
);
|
|
12546
|
+
continue;
|
|
12547
|
+
}
|
|
12548
|
+
zip.addFile(filepath, data);
|
|
12549
|
+
packedFilesCount++;
|
|
12550
|
+
}
|
|
12551
|
+
const zipBuffer = zip.toBuffer();
|
|
12552
|
+
logInfo(
|
|
12553
|
+
`FilePacking: read ${packedFilesCount} source files. total size: ${zipBuffer.length} bytes`
|
|
12554
|
+
);
|
|
12555
|
+
logInfo("Files packed successfully", { fileCount: fileList.length });
|
|
12556
|
+
return zipBuffer;
|
|
12557
|
+
} catch (error) {
|
|
12558
|
+
const message = error.message;
|
|
12559
|
+
throw new FileProcessingError(`Error packing files: ${message}`);
|
|
12560
|
+
}
|
|
12561
|
+
};
|
|
12562
|
+
var uploadFiles = async (zipBuffer, repoUploadInfo) => {
|
|
12563
|
+
if (!repoUploadInfo) {
|
|
12564
|
+
throw new FileUploadError("Upload info is required");
|
|
12565
|
+
}
|
|
12566
|
+
try {
|
|
12567
|
+
await uploadFile({
|
|
12568
|
+
file: zipBuffer,
|
|
12569
|
+
url: repoUploadInfo.url,
|
|
12570
|
+
uploadFields: JSON.parse(repoUploadInfo.uploadFieldsJSON),
|
|
12571
|
+
uploadKey: repoUploadInfo.uploadKey
|
|
12572
|
+
});
|
|
12573
|
+
logInfo("File uploaded successfully");
|
|
12574
|
+
} catch (error) {
|
|
12575
|
+
logError("File upload failed", { error: error.message });
|
|
12576
|
+
throw new FileUploadError(
|
|
12577
|
+
`Failed to upload the file: ${error.message}`
|
|
12578
|
+
);
|
|
12579
|
+
}
|
|
12580
|
+
};
|
|
12581
|
+
var getProjectId = async (gqlClient) => {
|
|
12582
|
+
if (!gqlClient) {
|
|
12583
|
+
throw new GqlClientError();
|
|
12584
|
+
}
|
|
12585
|
+
const projectId = await gqlClient.getProjectId();
|
|
12586
|
+
logInfo("Project ID retrieved", { projectId });
|
|
12587
|
+
return projectId;
|
|
12588
|
+
};
|
|
12589
|
+
var runScan = async ({
|
|
12590
|
+
fixReportId,
|
|
12591
|
+
projectId,
|
|
12592
|
+
gqlClient
|
|
12593
|
+
}) => {
|
|
12594
|
+
if (!gqlClient) {
|
|
12595
|
+
throw new GqlClientError();
|
|
12596
|
+
}
|
|
12597
|
+
logInfo("Starting scan", { fixReportId, projectId });
|
|
12598
|
+
const submitVulnerabilityReportVariables = {
|
|
12599
|
+
fixReportId,
|
|
12600
|
+
projectId,
|
|
12601
|
+
repoUrl: "",
|
|
12602
|
+
reference: "no-branch",
|
|
12603
|
+
scanSource: "MCP" /* Mcp */
|
|
12604
|
+
};
|
|
12605
|
+
logInfo("Submitting vulnerability report");
|
|
12606
|
+
const submitRes = await gqlClient.submitVulnerabilityReport(
|
|
12607
|
+
submitVulnerabilityReportVariables
|
|
12608
|
+
);
|
|
12609
|
+
if (submitRes.submitVulnerabilityReport.__typename !== "VulnerabilityReport") {
|
|
12610
|
+
logError("Vulnerability report submission failed", {
|
|
12611
|
+
response: submitRes
|
|
12612
|
+
});
|
|
12613
|
+
throw new ScanError("\u{1F575}\uFE0F\u200D\u2642\uFE0F Mobb analysis failed");
|
|
12614
|
+
}
|
|
12615
|
+
logInfo("Vulnerability report submitted successfully", {
|
|
12616
|
+
analysisId: submitRes.submitVulnerabilityReport.fixReportId
|
|
12617
|
+
});
|
|
12618
|
+
logInfo("Starting analysis subscription");
|
|
12619
|
+
await gqlClient.subscribeToGetAnalysis({
|
|
12620
|
+
subscribeToAnalysisParams: {
|
|
12621
|
+
analysisId: submitRes.submitVulnerabilityReport.fixReportId
|
|
12622
|
+
},
|
|
12623
|
+
callback: () => {
|
|
12624
|
+
},
|
|
12625
|
+
callbackStates: ["Finished" /* Finished */],
|
|
12626
|
+
timeoutInMs: MCP_VUL_REPORT_DIGEST_TIMEOUT_MS
|
|
12627
|
+
});
|
|
12628
|
+
logInfo("Analysis subscription completed");
|
|
12629
|
+
};
|
|
12321
12630
|
|
|
12322
|
-
|
|
12631
|
+
// src/mcp/tools/checkForNewAvailableFixes/CheckForNewAvailableFixesService.ts
|
|
12632
|
+
function extractPathFromPatch(patch) {
|
|
12633
|
+
const match = patch?.match(/^diff --git a\/([^\s]+) b\//);
|
|
12634
|
+
return match?.[1] ?? null;
|
|
12635
|
+
}
|
|
12636
|
+
var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService {
|
|
12637
|
+
constructor() {
|
|
12638
|
+
/**
|
|
12639
|
+
* Cache of the last known total number of fixes per repository URL so that we
|
|
12640
|
+
* can determine whether *new* fixes have been generated since the user last
|
|
12641
|
+
* asked.
|
|
12642
|
+
*/
|
|
12643
|
+
__publicField(this, "path", "");
|
|
12644
|
+
__publicField(this, "filesLastScanned", {});
|
|
12645
|
+
__publicField(this, "freshFixes", []);
|
|
12646
|
+
__publicField(this, "reportedFixes", []);
|
|
12647
|
+
__publicField(this, "intervalId", null);
|
|
12648
|
+
__publicField(this, "isInitialScanComplete", false);
|
|
12649
|
+
}
|
|
12650
|
+
static getInstance() {
|
|
12651
|
+
if (!_CheckForNewAvailableFixesService.instance) {
|
|
12652
|
+
_CheckForNewAvailableFixesService.instance = new _CheckForNewAvailableFixesService();
|
|
12653
|
+
}
|
|
12654
|
+
return _CheckForNewAvailableFixesService.instance;
|
|
12655
|
+
}
|
|
12656
|
+
/**
|
|
12657
|
+
* Resets any cached state so the service can be reused between independent
|
|
12658
|
+
* MCP sessions.
|
|
12659
|
+
*/
|
|
12660
|
+
reset() {
|
|
12661
|
+
this.filesLastScanned = {};
|
|
12662
|
+
this.freshFixes = [];
|
|
12663
|
+
this.reportedFixes = [];
|
|
12664
|
+
if (this.intervalId) {
|
|
12665
|
+
clearInterval(this.intervalId);
|
|
12666
|
+
this.intervalId = null;
|
|
12667
|
+
}
|
|
12668
|
+
}
|
|
12669
|
+
/**
|
|
12670
|
+
* Stub implementation – in a future version this will query the backend for
|
|
12671
|
+
* the latest fixes count and compare it with the cached value. For now it
|
|
12672
|
+
* simply returns a placeholder string so that the tool can be wired into the
|
|
12673
|
+
* system and used in tests.
|
|
12674
|
+
*/
|
|
12675
|
+
async scan({ path: path13 }) {
|
|
12676
|
+
logInfo("Scanning for new fixes", { path: path13 });
|
|
12677
|
+
const gqlClient = await getMcpGQLClient();
|
|
12678
|
+
const isConnected = await gqlClient.verifyConnection();
|
|
12679
|
+
if (!isConnected) {
|
|
12680
|
+
logError("Failed to connect to the API, scan aborted");
|
|
12681
|
+
return;
|
|
12682
|
+
}
|
|
12683
|
+
logInfo("Connected to the API, assebling list of files to scan", { path: path13 });
|
|
12684
|
+
const files = await getLocalFiles({
|
|
12685
|
+
path: path13,
|
|
12686
|
+
maxFileSize: MCP_MAX_FILE_SIZE
|
|
12687
|
+
});
|
|
12688
|
+
logInfo("Active files", { files });
|
|
12689
|
+
const filesToScan = files.filter((file) => {
|
|
12690
|
+
const lastScannedEditTime = this.filesLastScanned[file.fullPath];
|
|
12691
|
+
if (!lastScannedEditTime) {
|
|
12692
|
+
return true;
|
|
12693
|
+
}
|
|
12694
|
+
return file.lastEdited > lastScannedEditTime;
|
|
12695
|
+
});
|
|
12696
|
+
if (filesToScan.length === 0) {
|
|
12697
|
+
logInfo("No files to scan", { path: path13 });
|
|
12698
|
+
return;
|
|
12699
|
+
}
|
|
12700
|
+
logInfo("Files to scan", { filesToScan });
|
|
12701
|
+
const { fixReportId, projectId } = await scanFiles(
|
|
12702
|
+
filesToScan.map((file) => file.relativePath),
|
|
12703
|
+
path13,
|
|
12704
|
+
gqlClient
|
|
12705
|
+
);
|
|
12706
|
+
logInfo("Scan completed", { fixReportId, projectId });
|
|
12707
|
+
const fixes = await gqlClient.getReportFixesPaginated({
|
|
12708
|
+
reportId: fixReportId,
|
|
12709
|
+
offset: 0,
|
|
12710
|
+
limit: 1e3
|
|
12711
|
+
});
|
|
12712
|
+
const newFixes = fixes?.fixes?.filter((fix) => !this.isAlreadyReported(fix));
|
|
12713
|
+
logInfo("Fixes retrieved", {
|
|
12714
|
+
count: fixes?.fixes?.length || 0,
|
|
12715
|
+
newFixes: newFixes?.length || 0
|
|
12716
|
+
});
|
|
12717
|
+
this.freshFixes = this.freshFixes.filter((fix) => !this.isFixFromOldScan(fix, filesToScan)).concat(newFixes || []);
|
|
12718
|
+
logInfo("Fresh fixes", { freshFixes: this.freshFixes });
|
|
12719
|
+
filesToScan.forEach((file) => {
|
|
12720
|
+
this.filesLastScanned[file.fullPath] = file.lastEdited;
|
|
12721
|
+
});
|
|
12722
|
+
this.isInitialScanComplete = true;
|
|
12723
|
+
}
|
|
12724
|
+
isAlreadyReported(fix) {
|
|
12725
|
+
return this.reportedFixes.some(
|
|
12726
|
+
(reportedFix) => reportedFix.sharedState?.id === fix.sharedState?.id
|
|
12727
|
+
);
|
|
12728
|
+
}
|
|
12729
|
+
isFixFromOldScan(fix, filesToScan) {
|
|
12730
|
+
const patch = fix.patchAndQuestions?.__typename === "FixData" ? fix.patchAndQuestions.patch : void 0;
|
|
12731
|
+
const fixFile = extractPathFromPatch(patch);
|
|
12732
|
+
if (!fixFile) {
|
|
12733
|
+
return false;
|
|
12734
|
+
}
|
|
12735
|
+
logInfo("isOldFix", {
|
|
12736
|
+
fixFile,
|
|
12737
|
+
filesToScan,
|
|
12738
|
+
isOldFix: filesToScan.some((file) => file.relativePath === fixFile)
|
|
12739
|
+
});
|
|
12740
|
+
return filesToScan.some((file) => file.relativePath === fixFile);
|
|
12741
|
+
}
|
|
12742
|
+
async getFreshFixes({ path: path13 }) {
|
|
12743
|
+
if (this.path !== path13) {
|
|
12744
|
+
this.path = path13;
|
|
12745
|
+
this.reset();
|
|
12746
|
+
}
|
|
12747
|
+
if (!this.intervalId) {
|
|
12748
|
+
logInfo("Starting periodic scan for new fixes", { path: path13 });
|
|
12749
|
+
this.intervalId = setInterval(() => {
|
|
12750
|
+
logDebug("Triggering periodic scan", { path: path13 });
|
|
12751
|
+
this.scan({ path: path13 }).catch((error) => {
|
|
12752
|
+
logError("Error during periodic scan", { error });
|
|
12753
|
+
});
|
|
12754
|
+
}, MCP_PERIODIC_CHECK_INTERVAL);
|
|
12755
|
+
logDebug("Triggering initial scan", { path: path13 });
|
|
12756
|
+
this.scan({ path: path13 }).catch((error) => {
|
|
12757
|
+
logError("Error during initial scan", { error });
|
|
12758
|
+
});
|
|
12759
|
+
}
|
|
12760
|
+
if (this.freshFixes.length > 0) {
|
|
12761
|
+
const freshFixes = this.freshFixes.splice(0, 3);
|
|
12762
|
+
if (freshFixes.length > 0) {
|
|
12763
|
+
this.reportedFixes.concat(freshFixes);
|
|
12764
|
+
return freshFixesPrompt({ fixes: freshFixes });
|
|
12765
|
+
}
|
|
12766
|
+
}
|
|
12767
|
+
if (!this.isInitialScanComplete) {
|
|
12768
|
+
return initialScanInProgressPrompt;
|
|
12769
|
+
}
|
|
12770
|
+
return noFreshFixesPrompt;
|
|
12771
|
+
}
|
|
12772
|
+
};
|
|
12773
|
+
__publicField(_CheckForNewAvailableFixesService, "instance");
|
|
12774
|
+
var CheckForNewAvailableFixesService = _CheckForNewAvailableFixesService;
|
|
12323
12775
|
|
|
12324
|
-
|
|
12776
|
+
// src/mcp/tools/checkForNewAvailableFixes/CheckForNewAvailableFixesTool.ts
|
|
12777
|
+
var CheckForNewAvailableFixesTool = class extends BaseTool {
|
|
12778
|
+
constructor() {
|
|
12779
|
+
super();
|
|
12780
|
+
__publicField(this, "name", "check_for_new_available_fixes");
|
|
12781
|
+
__publicField(this, "displayName", "Check for New Available Fixes");
|
|
12782
|
+
// A detailed description to guide the LLM on when and how to invoke this tool.
|
|
12783
|
+
__publicField(this, "description", `Continuesly monitors your code and scans for new security vulnerabilities.
|
|
12325
12784
|
|
|
12326
|
-
|
|
12785
|
+
When to invoke:
|
|
12786
|
+
\u2022 ALWAYS call this tool ONCE at the very end of every reasoning / code-generation session (after all code edits, linting, tests, etc.).
|
|
12787
|
+
\u2022 Invoke when the user explicitly asks to "check for new fixes", "look for fresh patches", or similar.
|
|
12788
|
+
\u2022 The repository must exist locally and be a valid Git repo with an 'origin' remote configured.
|
|
12789
|
+
|
|
12790
|
+
How to invoke:
|
|
12791
|
+
\u2022 Required argument:
|
|
12792
|
+
\u2013 path (string): absolute path to the repository root.
|
|
12793
|
+
|
|
12794
|
+
Behaviour:
|
|
12795
|
+
\u2022 If no new fixes are available, it returns a concise message indicating so.
|
|
12796
|
+
\u2022 If fixes are found, it returns a human-readable summary including total count and severity breakdown.
|
|
12797
|
+
|
|
12798
|
+
Example payload:
|
|
12799
|
+
{
|
|
12800
|
+
"path": "/home/user/my-project"
|
|
12801
|
+
}`);
|
|
12802
|
+
__publicField(this, "inputSchema", {
|
|
12803
|
+
type: "object",
|
|
12804
|
+
properties: {
|
|
12805
|
+
path: {
|
|
12806
|
+
type: "string",
|
|
12807
|
+
description: "Full local path to the cloned git repository to check for new available fixes"
|
|
12808
|
+
}
|
|
12809
|
+
},
|
|
12810
|
+
required: ["path"]
|
|
12811
|
+
});
|
|
12812
|
+
__publicField(this, "inputValidationSchema", z32.object({
|
|
12813
|
+
path: z32.string().describe(
|
|
12814
|
+
"Full local path to the cloned git repository to check for new available fixes"
|
|
12815
|
+
)
|
|
12816
|
+
}));
|
|
12817
|
+
__publicField(this, "newFixesService");
|
|
12818
|
+
this.newFixesService = new CheckForNewAvailableFixesService();
|
|
12819
|
+
}
|
|
12820
|
+
async executeInternal(args) {
|
|
12821
|
+
const pathValidationResult = await validatePath(args.path);
|
|
12822
|
+
if (!pathValidationResult.isValid) {
|
|
12823
|
+
throw new Error(
|
|
12824
|
+
`Invalid path: potential security risk detected in path: ${pathValidationResult.error}`
|
|
12825
|
+
);
|
|
12826
|
+
}
|
|
12827
|
+
const path13 = pathValidationResult.path;
|
|
12828
|
+
const resultText = await this.newFixesService.getFreshFixes({
|
|
12829
|
+
path: path13
|
|
12830
|
+
});
|
|
12831
|
+
logInfo("CheckForNewAvailableFixesTool execution completed", {
|
|
12832
|
+
resultText
|
|
12833
|
+
});
|
|
12834
|
+
return {
|
|
12835
|
+
content: [
|
|
12836
|
+
{
|
|
12837
|
+
type: "text",
|
|
12838
|
+
text: resultText
|
|
12839
|
+
}
|
|
12840
|
+
]
|
|
12841
|
+
};
|
|
12842
|
+
}
|
|
12327
12843
|
};
|
|
12328
12844
|
|
|
12845
|
+
// src/mcp/tools/fetchAvailableFixes/FetchAvailableFixesTool.ts
|
|
12846
|
+
import { z as z33 } from "zod";
|
|
12847
|
+
|
|
12329
12848
|
// src/mcp/tools/fetchAvailableFixes/FetchAvailableFixesService.ts
|
|
12330
12849
|
var _FetchAvailableFixesService = class _FetchAvailableFixesService {
|
|
12331
12850
|
constructor() {
|
|
@@ -12443,12 +12962,12 @@ Call this tool instead of scan_and_fix_vulnerabilities when you only need a fixe
|
|
|
12443
12962
|
},
|
|
12444
12963
|
required: ["path"]
|
|
12445
12964
|
});
|
|
12446
|
-
__publicField(this, "inputValidationSchema",
|
|
12447
|
-
path:
|
|
12965
|
+
__publicField(this, "inputValidationSchema", z33.object({
|
|
12966
|
+
path: z33.string().describe(
|
|
12448
12967
|
"Full local path to the cloned git repository to check for available fixes"
|
|
12449
12968
|
),
|
|
12450
|
-
offset:
|
|
12451
|
-
limit:
|
|
12969
|
+
offset: z33.number().optional().describe("Optional offset for pagination"),
|
|
12970
|
+
limit: z33.number().optional().describe("Optional maximum number of fixes to return")
|
|
12452
12971
|
}));
|
|
12453
12972
|
__publicField(this, "availableFixesService");
|
|
12454
12973
|
this.availableFixesService = FetchAvailableFixesService.getInstance();
|
|
@@ -12461,8 +12980,8 @@ Call this tool instead of scan_and_fix_vulnerabilities when you only need a fixe
|
|
|
12461
12980
|
`Invalid path: potential security risk detected in path: ${pathValidationResult.error}`
|
|
12462
12981
|
);
|
|
12463
12982
|
}
|
|
12464
|
-
const
|
|
12465
|
-
const gitService = new GitService(
|
|
12983
|
+
const path13 = pathValidationResult.path;
|
|
12984
|
+
const gitService = new GitService(path13, log);
|
|
12466
12985
|
const gitValidation = await gitService.validateRepository();
|
|
12467
12986
|
if (!gitValidation.isValid) {
|
|
12468
12987
|
throw new Error(`Invalid git repository: ${gitValidation.error}`);
|
|
@@ -12495,57 +13014,12 @@ Call this tool instead of scan_and_fix_vulnerabilities when you only need a fixe
|
|
|
12495
13014
|
};
|
|
12496
13015
|
|
|
12497
13016
|
// src/mcp/tools/scanAndFixVulnerabilities/ScanAndFixVulnerabilitiesTool.ts
|
|
12498
|
-
import
|
|
13017
|
+
import z34 from "zod";
|
|
12499
13018
|
|
|
12500
13019
|
// src/mcp/tools/scanAndFixVulnerabilities/ScanAndFixVulnerabilitiesService.ts
|
|
12501
|
-
import path13 from "path";
|
|
12502
|
-
|
|
12503
|
-
// src/mcp/services/FilePacking.ts
|
|
12504
|
-
import fs10 from "fs";
|
|
12505
|
-
import path12 from "path";
|
|
12506
|
-
import AdmZip2 from "adm-zip";
|
|
12507
|
-
var MAX_FILE_SIZE2 = 1024 * 1024 * 5;
|
|
12508
|
-
var FilePacking = class {
|
|
12509
|
-
async packFiles(sourceDirectoryPath, filesToPack) {
|
|
12510
|
-
logInfo(`FilePacking: packing files from ${sourceDirectoryPath}`);
|
|
12511
|
-
const zip = new AdmZip2();
|
|
12512
|
-
let packedFilesCount = 0;
|
|
12513
|
-
logInfo("FilePacking: compressing files");
|
|
12514
|
-
for (const filepath of filesToPack) {
|
|
12515
|
-
const absoluteFilepath = path12.join(sourceDirectoryPath, filepath);
|
|
12516
|
-
if (!FileUtils.shouldPackFile(absoluteFilepath, MAX_FILE_SIZE2)) {
|
|
12517
|
-
logInfo(
|
|
12518
|
-
`FilePacking: ignoring ${filepath} because it is excluded or invalid`
|
|
12519
|
-
);
|
|
12520
|
-
continue;
|
|
12521
|
-
}
|
|
12522
|
-
let data;
|
|
12523
|
-
try {
|
|
12524
|
-
data = fs10.readFileSync(absoluteFilepath);
|
|
12525
|
-
} catch (fsError) {
|
|
12526
|
-
logInfo(
|
|
12527
|
-
`FilePacking: failed to read ${filepath} from filesystem: ${fsError}`
|
|
12528
|
-
);
|
|
12529
|
-
continue;
|
|
12530
|
-
}
|
|
12531
|
-
zip.addFile(filepath, data);
|
|
12532
|
-
packedFilesCount++;
|
|
12533
|
-
}
|
|
12534
|
-
const zipBuffer = zip.toBuffer();
|
|
12535
|
-
logInfo(
|
|
12536
|
-
`FilePacking: read ${packedFilesCount} source files. total size: ${zipBuffer.length} bytes`
|
|
12537
|
-
);
|
|
12538
|
-
logInfo("FilePacking: Files packed successfully");
|
|
12539
|
-
return zipBuffer;
|
|
12540
|
-
}
|
|
12541
|
-
};
|
|
12542
|
-
|
|
12543
|
-
// src/mcp/tools/scanAndFixVulnerabilities/ScanAndFixVulnerabilitiesService.ts
|
|
12544
|
-
var VUL_REPORT_DIGEST_TIMEOUT_MS2 = 1e3 * 60 * 5;
|
|
12545
13020
|
var _ScanAndFixVulnerabilitiesService = class _ScanAndFixVulnerabilitiesService {
|
|
12546
13021
|
constructor() {
|
|
12547
13022
|
__publicField(this, "gqlClient");
|
|
12548
|
-
__publicField(this, "filePacking");
|
|
12549
13023
|
/**
|
|
12550
13024
|
* Stores the fix report id that is created on the first run so that subsequent
|
|
12551
13025
|
* calls can skip the expensive packing/uploading/scan flow and directly fetch
|
|
@@ -12553,7 +13027,11 @@ var _ScanAndFixVulnerabilitiesService = class _ScanAndFixVulnerabilitiesService
|
|
|
12553
13027
|
*/
|
|
12554
13028
|
__publicField(this, "storedFixReportId");
|
|
12555
13029
|
__publicField(this, "currentOffset", 0);
|
|
12556
|
-
|
|
13030
|
+
/**
|
|
13031
|
+
* Timestamp when the fixReportId was created
|
|
13032
|
+
* Used to expire the fixReportId after REPORT_ID_EXPIRATION_MS hours
|
|
13033
|
+
*/
|
|
13034
|
+
__publicField(this, "fixReportIdTimestamp");
|
|
12557
13035
|
}
|
|
12558
13036
|
static getInstance() {
|
|
12559
13037
|
if (!_ScanAndFixVulnerabilitiesService.instance) {
|
|
@@ -12564,6 +13042,17 @@ var _ScanAndFixVulnerabilitiesService = class _ScanAndFixVulnerabilitiesService
|
|
|
12564
13042
|
reset() {
|
|
12565
13043
|
this.storedFixReportId = void 0;
|
|
12566
13044
|
this.currentOffset = void 0;
|
|
13045
|
+
this.fixReportIdTimestamp = void 0;
|
|
13046
|
+
}
|
|
13047
|
+
/**
|
|
13048
|
+
* Checks if the stored fixReportId has expired (older than 2 hours)
|
|
13049
|
+
*/
|
|
13050
|
+
isFixReportIdExpired() {
|
|
13051
|
+
if (!this.fixReportIdTimestamp) {
|
|
13052
|
+
return true;
|
|
13053
|
+
}
|
|
13054
|
+
const currentTime = Date.now();
|
|
13055
|
+
return currentTime - this.fixReportIdTimestamp > MCP_REPORT_ID_EXPIRATION_MS;
|
|
12567
13056
|
}
|
|
12568
13057
|
async processVulnerabilities({
|
|
12569
13058
|
fileList,
|
|
@@ -12576,19 +13065,20 @@ var _ScanAndFixVulnerabilitiesService = class _ScanAndFixVulnerabilitiesService
|
|
|
12576
13065
|
this.gqlClient = await this.initializeGqlClient();
|
|
12577
13066
|
logInfo("storedFixReportId", {
|
|
12578
13067
|
storedFixReportId: this.storedFixReportId,
|
|
12579
|
-
currentOffset: this.currentOffset
|
|
13068
|
+
currentOffset: this.currentOffset,
|
|
13069
|
+
fixReportIdTimestamp: this.fixReportIdTimestamp,
|
|
13070
|
+
isExpired: this.storedFixReportId ? this.isFixReportIdExpired() : null
|
|
12580
13071
|
});
|
|
12581
13072
|
let fixReportId = this.storedFixReportId;
|
|
12582
|
-
if (!fixReportId || isRescan) {
|
|
13073
|
+
if (!fixReportId || isRescan || this.isFixReportIdExpired()) {
|
|
12583
13074
|
this.reset();
|
|
12584
13075
|
this.validateFiles(fileList);
|
|
12585
|
-
const
|
|
12586
|
-
|
|
12587
|
-
|
|
12588
|
-
|
|
12589
|
-
|
|
12590
|
-
|
|
12591
|
-
await this.runScan({ fixReportId, projectId });
|
|
13076
|
+
const scanResult = await scanFiles(
|
|
13077
|
+
fileList,
|
|
13078
|
+
repositoryPath,
|
|
13079
|
+
this.gqlClient
|
|
13080
|
+
);
|
|
13081
|
+
fixReportId = scanResult.fixReportId;
|
|
12592
13082
|
}
|
|
12593
13083
|
const effectiveOffset = offset ?? (this.currentOffset || 0);
|
|
12594
13084
|
logDebug("effectiveOffset", { effectiveOffset });
|
|
@@ -12597,11 +13087,17 @@ var _ScanAndFixVulnerabilitiesService = class _ScanAndFixVulnerabilitiesService
|
|
|
12597
13087
|
effectiveOffset,
|
|
12598
13088
|
limit
|
|
12599
13089
|
);
|
|
13090
|
+
if (fixes.totalCount > 0) {
|
|
13091
|
+
this.storedFixReportId = fixReportId;
|
|
13092
|
+
this.fixReportIdTimestamp = Date.now();
|
|
13093
|
+
} else {
|
|
13094
|
+
this.reset();
|
|
13095
|
+
}
|
|
12600
13096
|
const prompt = fixesPrompt({
|
|
12601
13097
|
fixes: fixes.fixes,
|
|
12602
13098
|
totalCount: fixes.totalCount,
|
|
12603
13099
|
offset: effectiveOffset,
|
|
12604
|
-
scannedFiles: fileList
|
|
13100
|
+
scannedFiles: [...fileList]
|
|
12605
13101
|
});
|
|
12606
13102
|
this.currentOffset = effectiveOffset + (fixes.fixes?.length || 0);
|
|
12607
13103
|
return prompt;
|
|
@@ -12626,101 +13122,6 @@ var _ScanAndFixVulnerabilitiesService = class _ScanAndFixVulnerabilitiesService
|
|
|
12626
13122
|
}
|
|
12627
13123
|
return gqlClient;
|
|
12628
13124
|
}
|
|
12629
|
-
async initializeReport() {
|
|
12630
|
-
if (!this.gqlClient) {
|
|
12631
|
-
throw new GqlClientError();
|
|
12632
|
-
}
|
|
12633
|
-
try {
|
|
12634
|
-
const {
|
|
12635
|
-
uploadS3BucketInfo: { repoUploadInfo }
|
|
12636
|
-
} = await this.gqlClient.uploadS3BucketInfo();
|
|
12637
|
-
logInfo("Upload info retrieved", { uploadKey: repoUploadInfo?.uploadKey });
|
|
12638
|
-
return repoUploadInfo;
|
|
12639
|
-
} catch (error) {
|
|
12640
|
-
const message = error.message;
|
|
12641
|
-
throw new ReportInitializationError(
|
|
12642
|
-
`Error initializing report: ${message}`
|
|
12643
|
-
);
|
|
12644
|
-
}
|
|
12645
|
-
}
|
|
12646
|
-
async packFiles(fileList, repositoryPath) {
|
|
12647
|
-
try {
|
|
12648
|
-
const zipBuffer = await this.filePacking.packFiles(
|
|
12649
|
-
repositoryPath,
|
|
12650
|
-
fileList
|
|
12651
|
-
);
|
|
12652
|
-
logInfo("Files packed successfully", { fileCount: fileList.length });
|
|
12653
|
-
return zipBuffer;
|
|
12654
|
-
} catch (error) {
|
|
12655
|
-
const message = error.message;
|
|
12656
|
-
throw new FileProcessingError(`Error packing files: ${message}`);
|
|
12657
|
-
}
|
|
12658
|
-
}
|
|
12659
|
-
async uploadFiles(zipBuffer, repoUploadInfo) {
|
|
12660
|
-
if (!repoUploadInfo) {
|
|
12661
|
-
throw new FileUploadError("Upload info is required");
|
|
12662
|
-
}
|
|
12663
|
-
try {
|
|
12664
|
-
await uploadFile({
|
|
12665
|
-
file: zipBuffer,
|
|
12666
|
-
url: repoUploadInfo.url,
|
|
12667
|
-
uploadFields: JSON.parse(repoUploadInfo.uploadFieldsJSON),
|
|
12668
|
-
uploadKey: repoUploadInfo.uploadKey
|
|
12669
|
-
});
|
|
12670
|
-
logInfo("File uploaded successfully");
|
|
12671
|
-
} catch (error) {
|
|
12672
|
-
logError("File upload failed", { error: error.message });
|
|
12673
|
-
throw new FileUploadError(
|
|
12674
|
-
`Failed to upload the file: ${error.message}`
|
|
12675
|
-
);
|
|
12676
|
-
}
|
|
12677
|
-
}
|
|
12678
|
-
async getProjectId() {
|
|
12679
|
-
if (!this.gqlClient) {
|
|
12680
|
-
throw new GqlClientError();
|
|
12681
|
-
}
|
|
12682
|
-
const projectId = await this.gqlClient.getProjectId();
|
|
12683
|
-
logInfo("Project ID retrieved", { projectId });
|
|
12684
|
-
return projectId;
|
|
12685
|
-
}
|
|
12686
|
-
async runScan(params) {
|
|
12687
|
-
if (!this.gqlClient) {
|
|
12688
|
-
throw new GqlClientError();
|
|
12689
|
-
}
|
|
12690
|
-
const { fixReportId, projectId } = params;
|
|
12691
|
-
logInfo("Starting scan", { fixReportId, projectId });
|
|
12692
|
-
const submitVulnerabilityReportVariables = {
|
|
12693
|
-
fixReportId,
|
|
12694
|
-
projectId,
|
|
12695
|
-
repoUrl: "",
|
|
12696
|
-
reference: "no-branch",
|
|
12697
|
-
scanSource: "MCP" /* Mcp */
|
|
12698
|
-
};
|
|
12699
|
-
logInfo("Submitting vulnerability report");
|
|
12700
|
-
const submitRes = await this.gqlClient.submitVulnerabilityReport(
|
|
12701
|
-
submitVulnerabilityReportVariables
|
|
12702
|
-
);
|
|
12703
|
-
if (submitRes.submitVulnerabilityReport.__typename !== "VulnerabilityReport") {
|
|
12704
|
-
logError("Vulnerability report submission failed", {
|
|
12705
|
-
response: submitRes
|
|
12706
|
-
});
|
|
12707
|
-
throw new ScanError("\u{1F575}\uFE0F\u200D\u2642\uFE0F Mobb analysis failed");
|
|
12708
|
-
}
|
|
12709
|
-
logInfo("Vulnerability report submitted successfully", {
|
|
12710
|
-
analysisId: submitRes.submitVulnerabilityReport.fixReportId
|
|
12711
|
-
});
|
|
12712
|
-
logInfo("Starting analysis subscription");
|
|
12713
|
-
await this.gqlClient.subscribeToGetAnalysis({
|
|
12714
|
-
subscribeToAnalysisParams: {
|
|
12715
|
-
analysisId: submitRes.submitVulnerabilityReport.fixReportId
|
|
12716
|
-
},
|
|
12717
|
-
callback: () => {
|
|
12718
|
-
},
|
|
12719
|
-
callbackStates: ["Finished" /* Finished */],
|
|
12720
|
-
timeoutInMs: VUL_REPORT_DIGEST_TIMEOUT_MS2
|
|
12721
|
-
});
|
|
12722
|
-
logInfo("Analysis subscription completed");
|
|
12723
|
-
}
|
|
12724
13125
|
async getReportFixes(fixReportId, offset, limit) {
|
|
12725
13126
|
logDebug("getReportFixes", { fixReportId, offset, limit });
|
|
12726
13127
|
if (!this.gqlClient) {
|
|
@@ -12761,10 +13162,13 @@ How to invoke:
|
|
|
12761
13162
|
\u2022 Optional arguments:
|
|
12762
13163
|
\u2013 offset (number): pagination offset used when the result set is large.
|
|
12763
13164
|
\u2013 limit (number): maximum number of fixes to include in the response.
|
|
13165
|
+
\u2013 maxFiles (number): maximum number of files to scan (default: ${MCP_DEFAULT_MAX_FILES_TO_SCAN}). Provide this value to increase the scope of the scan.
|
|
12764
13166
|
\u2013 rescan (boolean): true to force a complete rescan even if cached results exist.
|
|
12765
13167
|
|
|
12766
13168
|
Behaviour:
|
|
13169
|
+
\u2022 If the directory is a valid Git repository, the tool scans the changed files in the repository. If there are no changes, it scans the files included in the las commit.
|
|
12767
13170
|
\u2022 If the directory is not a valid Git repository, the tool falls back to scanning recently changed files in the folder.
|
|
13171
|
+
\u2022 If maxFiles is provided, the tool scans the maxFiles most recently changed files in the repository.
|
|
12768
13172
|
\u2022 By default, only new, modified, or staged files are scanned; if none are found, it checks recently changed files.
|
|
12769
13173
|
\u2022 The tool NEVER commits or pushes changes; it only returns proposed diffs/fixes as text.
|
|
12770
13174
|
|
|
@@ -12777,15 +13181,19 @@ Example payload:
|
|
|
12777
13181
|
{
|
|
12778
13182
|
"path": "/home/user/my-project",
|
|
12779
13183
|
"limit": 20,
|
|
13184
|
+
"maxFiles": 50,
|
|
12780
13185
|
"rescan": false
|
|
12781
13186
|
}`);
|
|
12782
|
-
__publicField(this, "inputValidationSchema",
|
|
12783
|
-
path:
|
|
13187
|
+
__publicField(this, "inputValidationSchema", z34.object({
|
|
13188
|
+
path: z34.string().describe(
|
|
12784
13189
|
"Full local path to repository to scan and fix vulnerabilities"
|
|
12785
13190
|
),
|
|
12786
|
-
offset:
|
|
12787
|
-
limit:
|
|
12788
|
-
|
|
13191
|
+
offset: z34.number().optional().describe("Optional offset for pagination"),
|
|
13192
|
+
limit: z34.number().optional().describe("Optional maximum number of results to return"),
|
|
13193
|
+
maxFiles: z34.number().optional().describe(
|
|
13194
|
+
`Optional maximum number of files to scan (default: ${MCP_DEFAULT_MAX_FILES_TO_SCAN}). Increase for comprehensive scans of larger codebases or decrease for faster focused scans.`
|
|
13195
|
+
),
|
|
13196
|
+
rescan: z34.boolean().optional().describe("Optional whether to rescan the repository")
|
|
12789
13197
|
}));
|
|
12790
13198
|
__publicField(this, "inputSchema", {
|
|
12791
13199
|
type: "object",
|
|
@@ -12802,6 +13210,10 @@ Example payload:
|
|
|
12802
13210
|
type: "number",
|
|
12803
13211
|
description: "[Optional] maximum number of results to return"
|
|
12804
13212
|
},
|
|
13213
|
+
maxFiles: {
|
|
13214
|
+
type: "number",
|
|
13215
|
+
description: `[Optional] maximum number of files to scan (default: ${MCP_DEFAULT_MAX_FILES_TO_SCAN}). Use higher values for more comprehensive scans or lower values for faster performance.`
|
|
13216
|
+
},
|
|
12805
13217
|
rescan: {
|
|
12806
13218
|
type: "boolean",
|
|
12807
13219
|
description: "[Optional] whether to rescan the repository"
|
|
@@ -12824,43 +13236,14 @@ Example payload:
|
|
|
12824
13236
|
`Invalid path: potential security risk detected in path: ${pathValidationResult.error}`
|
|
12825
13237
|
);
|
|
12826
13238
|
}
|
|
12827
|
-
const
|
|
12828
|
-
const
|
|
12829
|
-
|
|
12830
|
-
|
|
12831
|
-
|
|
12832
|
-
|
|
12833
|
-
|
|
12834
|
-
|
|
12835
|
-
path: path14
|
|
12836
|
-
}
|
|
12837
|
-
);
|
|
12838
|
-
files = FileUtils.getLastChangedFiles(path14);
|
|
12839
|
-
logDebug("Found files in the repository", {
|
|
12840
|
-
files,
|
|
12841
|
-
fileCount: files.length
|
|
12842
|
-
});
|
|
12843
|
-
} else {
|
|
12844
|
-
const gitResult = await gitService.getChangedFiles();
|
|
12845
|
-
files = gitResult.files;
|
|
12846
|
-
if (files.length === 0) {
|
|
12847
|
-
const recentResult = await gitService.getRecentlyChangedFiles();
|
|
12848
|
-
files = recentResult.files;
|
|
12849
|
-
logDebug(
|
|
12850
|
-
"No changes found, using recently changed files from git history",
|
|
12851
|
-
{
|
|
12852
|
-
files,
|
|
12853
|
-
fileCount: files.length,
|
|
12854
|
-
commitsChecked: recentResult.commitCount
|
|
12855
|
-
}
|
|
12856
|
-
);
|
|
12857
|
-
} else {
|
|
12858
|
-
logDebug("Found changed files in the git repository", {
|
|
12859
|
-
files,
|
|
12860
|
-
fileCount: files.length
|
|
12861
|
-
});
|
|
12862
|
-
}
|
|
12863
|
-
}
|
|
13239
|
+
const path13 = pathValidationResult.path;
|
|
13240
|
+
const files = await getLocalFiles({
|
|
13241
|
+
path: path13,
|
|
13242
|
+
maxFileSize: 1024 * 1024 * 5,
|
|
13243
|
+
// 5MB
|
|
13244
|
+
maxFiles: args.maxFiles
|
|
13245
|
+
});
|
|
13246
|
+
logInfo("Files", { files });
|
|
12864
13247
|
if (files.length === 0) {
|
|
12865
13248
|
return {
|
|
12866
13249
|
content: [
|
|
@@ -12873,11 +13256,11 @@ Example payload:
|
|
|
12873
13256
|
}
|
|
12874
13257
|
try {
|
|
12875
13258
|
const fixResult = await this.vulnerabilityFixService.processVulnerabilities({
|
|
12876
|
-
fileList: files,
|
|
12877
|
-
repositoryPath:
|
|
13259
|
+
fileList: files.map((file) => file.relativePath),
|
|
13260
|
+
repositoryPath: args.path,
|
|
12878
13261
|
offset: args.offset,
|
|
12879
13262
|
limit: args.limit,
|
|
12880
|
-
isRescan: args.rescan
|
|
13263
|
+
isRescan: args.rescan || !!args.maxFiles
|
|
12881
13264
|
});
|
|
12882
13265
|
const result = {
|
|
12883
13266
|
content: [
|
|
@@ -12932,8 +13315,10 @@ function createMcpServer() {
|
|
|
12932
13315
|
};
|
|
12933
13316
|
const scanAndFixVulnerabilitiesTool = new ScanAndFixVulnerabilitiesTool();
|
|
12934
13317
|
const fetchAvailableFixesTool = new FetchAvailableFixesTool();
|
|
13318
|
+
const checkForNewAvailableFixesTool = new CheckForNewAvailableFixesTool();
|
|
12935
13319
|
registerIfEnabled(scanAndFixVulnerabilitiesTool);
|
|
12936
13320
|
registerIfEnabled(fetchAvailableFixesTool);
|
|
13321
|
+
registerIfEnabled(checkForNewAvailableFixesTool);
|
|
12937
13322
|
logInfo("MCP server created and configured");
|
|
12938
13323
|
return server;
|
|
12939
13324
|
}
|
|
@@ -12967,7 +13352,7 @@ var mcpHandler = async (_args) => {
|
|
|
12967
13352
|
};
|
|
12968
13353
|
|
|
12969
13354
|
// src/args/commands/review.ts
|
|
12970
|
-
import
|
|
13355
|
+
import fs12 from "fs";
|
|
12971
13356
|
import chalk9 from "chalk";
|
|
12972
13357
|
function reviewBuilder(yargs2) {
|
|
12973
13358
|
return yargs2.option("f", {
|
|
@@ -13004,7 +13389,7 @@ function reviewBuilder(yargs2) {
|
|
|
13004
13389
|
).help();
|
|
13005
13390
|
}
|
|
13006
13391
|
function validateReviewOptions(argv) {
|
|
13007
|
-
if (!
|
|
13392
|
+
if (!fs12.existsSync(argv.f)) {
|
|
13008
13393
|
throw new CliError(`
|
|
13009
13394
|
Can't access ${chalk9.bold(argv.f)}`);
|
|
13010
13395
|
}
|