mobbdev 1.0.188 → 1.0.190
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/args/commands/upload_ai_blame.d.mts +15 -0
- package/dist/args/commands/upload_ai_blame.mjs +5625 -0
- package/dist/index.d.mts +2 -0
- package/dist/index.mjs +352 -111
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
|
@@ -32,7 +32,7 @@ var init_env = __esm({
|
|
|
32
32
|
});
|
|
33
33
|
|
|
34
34
|
// src/mcp/core/configs.ts
|
|
35
|
-
var MCP_DEFAULT_API_URL, MCP_API_KEY_HEADER_NAME, MCP_LOGIN_MAX_WAIT, MCP_LOGIN_CHECK_DELAY, MCP_VUL_REPORT_DIGEST_TIMEOUT_MS, MCP_MAX_FILE_SIZE, MCP_PERIODIC_CHECK_INTERVAL, MCP_DEFAULT_MAX_FILES_TO_SCAN, MCP_REPORT_ID_EXPIRATION_MS, MCP_TOOLS_BROWSER_COOLDOWN_MS, MCP_DEFAULT_LIMIT, isAutoScan, MVS_AUTO_FIX_OVERRIDE, MCP_AUTO_FIX_DEBUG_MODE, MCP_PERIODIC_TRACK_INTERVAL, MCP_DEFAULT_REST_API_URL;
|
|
35
|
+
var MCP_DEFAULT_API_URL, MCP_API_KEY_HEADER_NAME, MCP_LOGIN_MAX_WAIT, MCP_LOGIN_CHECK_DELAY, MCP_VUL_REPORT_DIGEST_TIMEOUT_MS, MCP_MAX_FILE_SIZE, MCP_PERIODIC_CHECK_INTERVAL, MCP_DEFAULT_MAX_FILES_TO_SCAN, MCP_REPORT_ID_EXPIRATION_MS, MCP_TOOLS_BROWSER_COOLDOWN_MS, MCP_DEFAULT_LIMIT, isAutoScan, MVS_AUTO_FIX_OVERRIDE, MCP_AUTO_FIX_DEBUG_MODE, MCP_PERIODIC_TRACK_INTERVAL, MCP_DEFAULT_REST_API_URL, MCP_SYSTEM_FIND_TIMEOUT_MS;
|
|
36
36
|
var init_configs = __esm({
|
|
37
37
|
"src/mcp/core/configs.ts"() {
|
|
38
38
|
"use strict";
|
|
@@ -53,6 +53,7 @@ var init_configs = __esm({
|
|
|
53
53
|
MCP_AUTO_FIX_DEBUG_MODE = true;
|
|
54
54
|
MCP_PERIODIC_TRACK_INTERVAL = 60 * 60 * 1e3;
|
|
55
55
|
MCP_DEFAULT_REST_API_URL = "https://api.mobb.ai/api/rest/mcp/track";
|
|
56
|
+
MCP_SYSTEM_FIND_TIMEOUT_MS = 15 * 60 * 1e3;
|
|
56
57
|
}
|
|
57
58
|
});
|
|
58
59
|
|
|
@@ -1322,10 +1323,10 @@ import Debug20 from "debug";
|
|
|
1322
1323
|
import { hideBin } from "yargs/helpers";
|
|
1323
1324
|
|
|
1324
1325
|
// src/args/commands/convert_to_sarif.ts
|
|
1325
|
-
import
|
|
1326
|
+
import fs7 from "fs";
|
|
1326
1327
|
|
|
1327
1328
|
// src/commands/convert_to_sarif.ts
|
|
1328
|
-
import
|
|
1329
|
+
import fs6 from "fs";
|
|
1329
1330
|
import path5 from "path";
|
|
1330
1331
|
|
|
1331
1332
|
// src/commands/fpr_stream_parser.ts
|
|
@@ -9086,6 +9087,7 @@ __export(utils_exports, {
|
|
|
9086
9087
|
CliError: () => CliError,
|
|
9087
9088
|
Spinner: () => Spinner,
|
|
9088
9089
|
getDirName: () => getDirName,
|
|
9090
|
+
getModuleRootDir: () => getModuleRootDir,
|
|
9089
9091
|
getTopLevelDirName: () => getTopLevelDirName,
|
|
9090
9092
|
keypress: () => keypress,
|
|
9091
9093
|
packageJson: () => packageJson,
|
|
@@ -9093,8 +9095,20 @@ __export(utils_exports, {
|
|
|
9093
9095
|
});
|
|
9094
9096
|
|
|
9095
9097
|
// src/utils/dirname.ts
|
|
9098
|
+
import fs4 from "fs";
|
|
9096
9099
|
import path3 from "path";
|
|
9097
9100
|
import { fileURLToPath } from "url";
|
|
9101
|
+
function getModuleRootDir() {
|
|
9102
|
+
let manifestDir = getDirName();
|
|
9103
|
+
for (let i = 0; i < 10; i++) {
|
|
9104
|
+
const manifestPath = path3.join(manifestDir, "package.json");
|
|
9105
|
+
if (fs4.existsSync(manifestPath)) {
|
|
9106
|
+
return manifestDir;
|
|
9107
|
+
}
|
|
9108
|
+
manifestDir = path3.join(manifestDir, "..");
|
|
9109
|
+
}
|
|
9110
|
+
throw new Error("Cannot locate package.json file");
|
|
9111
|
+
}
|
|
9098
9112
|
function getDirName() {
|
|
9099
9113
|
return path3.dirname(fileURLToPath(import.meta.url));
|
|
9100
9114
|
}
|
|
@@ -9162,15 +9176,13 @@ function Spinner({ ci = false } = {}) {
|
|
|
9162
9176
|
}
|
|
9163
9177
|
|
|
9164
9178
|
// src/utils/check_node_version.ts
|
|
9165
|
-
import
|
|
9179
|
+
import fs5 from "fs";
|
|
9166
9180
|
import path4 from "path";
|
|
9167
9181
|
import semver from "semver";
|
|
9168
9182
|
function getPackageJson() {
|
|
9169
|
-
|
|
9170
|
-
|
|
9171
|
-
|
|
9172
|
-
}
|
|
9173
|
-
return JSON.parse(fs4.readFileSync(manifestPath, "utf8"));
|
|
9183
|
+
return JSON.parse(
|
|
9184
|
+
fs5.readFileSync(path4.join(getModuleRootDir(), "package.json"), "utf8")
|
|
9185
|
+
);
|
|
9174
9186
|
}
|
|
9175
9187
|
var packageJson = getPackageJson();
|
|
9176
9188
|
if (!semver.satisfies(process.version, packageJson.engines.node)) {
|
|
@@ -9236,7 +9248,7 @@ async function convertFprToSarif(inputFilePath, outputFilePath, codePathPatterns
|
|
|
9236
9248
|
await auditXmlSaxParser.parse();
|
|
9237
9249
|
}
|
|
9238
9250
|
await zipIn.close();
|
|
9239
|
-
const writer =
|
|
9251
|
+
const writer = fs6.createWriteStream(outputFilePath);
|
|
9240
9252
|
writer.write(`{
|
|
9241
9253
|
"$schema": "https://json.schemastore.org/sarif-2.1.0.json",
|
|
9242
9254
|
"version": "2.1.0",
|
|
@@ -9342,14 +9354,12 @@ import chalk2 from "chalk";
|
|
|
9342
9354
|
|
|
9343
9355
|
// src/constants.ts
|
|
9344
9356
|
import path6 from "path";
|
|
9345
|
-
import { fileURLToPath as fileURLToPath2 } from "url";
|
|
9346
9357
|
import chalk from "chalk";
|
|
9347
9358
|
import Debug4 from "debug";
|
|
9348
9359
|
import * as dotenv from "dotenv";
|
|
9349
9360
|
import { z as z24 } from "zod";
|
|
9350
9361
|
var debug5 = Debug4("mobbdev:constants");
|
|
9351
|
-
|
|
9352
|
-
dotenv.config({ path: path6.join(__dirname, "../.env") });
|
|
9362
|
+
dotenv.config({ path: path6.join(getModuleRootDir(), ".env") });
|
|
9353
9363
|
var scmFriendlyText = {
|
|
9354
9364
|
["Ado" /* Ado */]: "Azure DevOps",
|
|
9355
9365
|
["Bitbucket" /* Bitbucket */]: "Bitbucket",
|
|
@@ -9570,7 +9580,7 @@ function convertToSarifBuilder(args) {
|
|
|
9570
9580
|
).help().demandOption(["input-file-path", "input-file-format", "output-file-path"]);
|
|
9571
9581
|
}
|
|
9572
9582
|
async function validateConvertToSarifOptions(args) {
|
|
9573
|
-
if (!
|
|
9583
|
+
if (!fs7.existsSync(args.inputFilePath)) {
|
|
9574
9584
|
throw new CliError(
|
|
9575
9585
|
"\nError: --input-file-path flag should point to an existing file"
|
|
9576
9586
|
);
|
|
@@ -9604,14 +9614,14 @@ import chalk11 from "chalk";
|
|
|
9604
9614
|
import yargs from "yargs/yargs";
|
|
9605
9615
|
|
|
9606
9616
|
// src/args/commands/analyze.ts
|
|
9607
|
-
import
|
|
9617
|
+
import fs10 from "fs";
|
|
9608
9618
|
|
|
9609
9619
|
// src/commands/index.ts
|
|
9610
9620
|
import crypto from "crypto";
|
|
9611
9621
|
import os from "os";
|
|
9612
9622
|
|
|
9613
9623
|
// src/features/analysis/index.ts
|
|
9614
|
-
import
|
|
9624
|
+
import fs9 from "fs";
|
|
9615
9625
|
import fsPromises2 from "fs/promises";
|
|
9616
9626
|
import path9 from "path";
|
|
9617
9627
|
import { env as env2 } from "process";
|
|
@@ -10891,7 +10901,7 @@ var GQLClient = class {
|
|
|
10891
10901
|
|
|
10892
10902
|
// src/features/analysis/pack.ts
|
|
10893
10903
|
init_configs();
|
|
10894
|
-
import
|
|
10904
|
+
import fs8 from "fs";
|
|
10895
10905
|
import path7 from "path";
|
|
10896
10906
|
import AdmZip from "adm-zip";
|
|
10897
10907
|
import Debug13 from "debug";
|
|
@@ -10968,11 +10978,11 @@ async function pack(srcDirPath, vulnFiles, isIncludeAllFiles = false) {
|
|
|
10968
10978
|
continue;
|
|
10969
10979
|
}
|
|
10970
10980
|
}
|
|
10971
|
-
if (
|
|
10981
|
+
if (fs8.lstatSync(absFilepath).size > MCP_MAX_FILE_SIZE) {
|
|
10972
10982
|
debug14("ignoring %s because the size is > 5MB", filepath);
|
|
10973
10983
|
continue;
|
|
10974
10984
|
}
|
|
10975
|
-
const data = git ? await git.showBuffer([`HEAD:./${filepath}`]) :
|
|
10985
|
+
const data = git ? await git.showBuffer([`HEAD:./${filepath}`]) : fs8.readFileSync(absFilepath);
|
|
10976
10986
|
if (isBinary2(null, data)) {
|
|
10977
10987
|
debug14("ignoring %s because is seems to be a binary file", filepath);
|
|
10978
10988
|
continue;
|
|
@@ -11132,8 +11142,8 @@ import path8 from "path";
|
|
|
11132
11142
|
var debug15 = Debug15("mobbdev:checkmarx");
|
|
11133
11143
|
var require2 = createRequire(import.meta.url);
|
|
11134
11144
|
var getCheckmarxPath = () => {
|
|
11135
|
-
const
|
|
11136
|
-
const cxFileName =
|
|
11145
|
+
const os6 = type();
|
|
11146
|
+
const cxFileName = os6 === "Windows_NT" ? "cx.exe" : "cx";
|
|
11137
11147
|
try {
|
|
11138
11148
|
return require2.resolve(`.bin/${cxFileName}`);
|
|
11139
11149
|
} catch (e) {
|
|
@@ -11396,13 +11406,13 @@ async function downloadRepo({
|
|
|
11396
11406
|
repoSpinner.error({ text: "\u{1F4BE} Repo download failed" });
|
|
11397
11407
|
throw new Error(`Can't access ${chalk5.bold(repoUrl)}`);
|
|
11398
11408
|
}
|
|
11399
|
-
const fileWriterStream =
|
|
11409
|
+
const fileWriterStream = fs9.createWriteStream(zipFilePath);
|
|
11400
11410
|
if (!response.body) {
|
|
11401
11411
|
throw new Error("Response body is empty");
|
|
11402
11412
|
}
|
|
11403
11413
|
await pipeline(response.body, fileWriterStream);
|
|
11404
11414
|
await extract(zipFilePath, { dir: dirname });
|
|
11405
|
-
const repoRoot =
|
|
11415
|
+
const repoRoot = fs9.readdirSync(dirname, { withFileTypes: true }).filter((dirent) => dirent.isDirectory()).map((dirent) => dirent.name)[0];
|
|
11406
11416
|
if (!repoRoot) {
|
|
11407
11417
|
throw new Error("Repo root not found");
|
|
11408
11418
|
}
|
|
@@ -12331,7 +12341,7 @@ function analyzeBuilder(yargs2) {
|
|
|
12331
12341
|
).help();
|
|
12332
12342
|
}
|
|
12333
12343
|
function validateAnalyzeOptions(argv) {
|
|
12334
|
-
if (argv.f && !
|
|
12344
|
+
if (argv.f && !fs10.existsSync(argv.f)) {
|
|
12335
12345
|
throw new CliError(`
|
|
12336
12346
|
Can't access ${chalk8.bold(argv.f)}`);
|
|
12337
12347
|
}
|
|
@@ -13223,13 +13233,15 @@ var McpGQLClient = class {
|
|
|
13223
13233
|
async getLatestReportByRepoUrl({
|
|
13224
13234
|
repoUrl,
|
|
13225
13235
|
limit = MCP_DEFAULT_LIMIT,
|
|
13226
|
-
offset = 0
|
|
13236
|
+
offset = 0,
|
|
13237
|
+
fileFilter
|
|
13227
13238
|
}) {
|
|
13228
13239
|
try {
|
|
13229
13240
|
logDebug("[GraphQL] Calling GetLatestReportByRepoUrl query", {
|
|
13230
13241
|
repoUrl,
|
|
13231
13242
|
limit,
|
|
13232
|
-
offset
|
|
13243
|
+
offset,
|
|
13244
|
+
fileFilter
|
|
13233
13245
|
});
|
|
13234
13246
|
let currentUserEmail = "%@%";
|
|
13235
13247
|
try {
|
|
@@ -13242,11 +13254,18 @@ var McpGQLClient = class {
|
|
|
13242
13254
|
error: err
|
|
13243
13255
|
});
|
|
13244
13256
|
}
|
|
13257
|
+
const filters = {};
|
|
13258
|
+
if (fileFilter && fileFilter.length > 0) {
|
|
13259
|
+
filters["vulnerabilityReportIssues"] = {
|
|
13260
|
+
codeNodes: { path: { _in: fileFilter } }
|
|
13261
|
+
};
|
|
13262
|
+
}
|
|
13245
13263
|
const resp = await this.clientSdk.GetLatestReportByRepoUrl({
|
|
13246
13264
|
repoUrl,
|
|
13247
13265
|
limit,
|
|
13248
13266
|
offset,
|
|
13249
|
-
currentUserEmail
|
|
13267
|
+
currentUserEmail,
|
|
13268
|
+
filters
|
|
13250
13269
|
});
|
|
13251
13270
|
logDebug("[GraphQL] GetLatestReportByRepoUrl successful", {
|
|
13252
13271
|
result: resp,
|
|
@@ -13278,7 +13297,8 @@ var McpGQLClient = class {
|
|
|
13278
13297
|
limit = MCP_DEFAULT_LIMIT,
|
|
13279
13298
|
offset = 0,
|
|
13280
13299
|
issueType,
|
|
13281
|
-
severity
|
|
13300
|
+
severity,
|
|
13301
|
+
fileFilter
|
|
13282
13302
|
}) {
|
|
13283
13303
|
const filters = {};
|
|
13284
13304
|
if (issueType && issueType.length > 0) {
|
|
@@ -13287,6 +13307,11 @@ var McpGQLClient = class {
|
|
|
13287
13307
|
if (severity && severity.length > 0) {
|
|
13288
13308
|
filters["severityText"] = { _in: severity };
|
|
13289
13309
|
}
|
|
13310
|
+
if (fileFilter && fileFilter.length > 0) {
|
|
13311
|
+
filters["vulnerabilityReportIssues"] = {
|
|
13312
|
+
codeNodes: { path: { _in: fileFilter } }
|
|
13313
|
+
};
|
|
13314
|
+
}
|
|
13290
13315
|
try {
|
|
13291
13316
|
logDebug("[GraphQL] Calling GetReportFixes query", {
|
|
13292
13317
|
reportId,
|
|
@@ -13294,7 +13319,8 @@ var McpGQLClient = class {
|
|
|
13294
13319
|
offset,
|
|
13295
13320
|
filters,
|
|
13296
13321
|
issueType,
|
|
13297
|
-
severity
|
|
13322
|
+
severity,
|
|
13323
|
+
fileFilter
|
|
13298
13324
|
});
|
|
13299
13325
|
let currentUserEmail = "%@%";
|
|
13300
13326
|
try {
|
|
@@ -13376,7 +13402,7 @@ async function createAuthenticatedMcpGQLClient({
|
|
|
13376
13402
|
|
|
13377
13403
|
// src/mcp/services/McpUsageService/host.ts
|
|
13378
13404
|
import { execSync } from "child_process";
|
|
13379
|
-
import
|
|
13405
|
+
import fs11 from "fs";
|
|
13380
13406
|
import os3 from "os";
|
|
13381
13407
|
import path11 from "path";
|
|
13382
13408
|
var IDEs = ["cursor", "windsurf", "webstorm", "vscode", "claude"];
|
|
@@ -13395,15 +13421,15 @@ var getClaudeWorkspacePaths = () => {
|
|
|
13395
13421
|
const home = os3.homedir();
|
|
13396
13422
|
const claudeIdePath = path11.join(home, ".claude", "ide");
|
|
13397
13423
|
const workspacePaths = [];
|
|
13398
|
-
if (!
|
|
13424
|
+
if (!fs11.existsSync(claudeIdePath)) {
|
|
13399
13425
|
return workspacePaths;
|
|
13400
13426
|
}
|
|
13401
13427
|
try {
|
|
13402
|
-
const lockFiles =
|
|
13428
|
+
const lockFiles = fs11.readdirSync(claudeIdePath).filter((file) => file.endsWith(".lock"));
|
|
13403
13429
|
for (const lockFile of lockFiles) {
|
|
13404
13430
|
const lockFilePath = path11.join(claudeIdePath, lockFile);
|
|
13405
13431
|
try {
|
|
13406
|
-
const lockContent = JSON.parse(
|
|
13432
|
+
const lockContent = JSON.parse(fs11.readFileSync(lockFilePath, "utf8"));
|
|
13407
13433
|
if (lockContent.workspaceFolders && Array.isArray(lockContent.workspaceFolders)) {
|
|
13408
13434
|
workspacePaths.push(...lockContent.workspaceFolders);
|
|
13409
13435
|
}
|
|
@@ -13469,38 +13495,42 @@ var getMCPConfigPaths = (hostName) => {
|
|
|
13469
13495
|
}
|
|
13470
13496
|
};
|
|
13471
13497
|
var readConfigFile = (filePath) => {
|
|
13472
|
-
if (!
|
|
13498
|
+
if (!fs11.existsSync(filePath)) return null;
|
|
13473
13499
|
try {
|
|
13474
|
-
return JSON.parse(
|
|
13500
|
+
return JSON.parse(fs11.readFileSync(filePath, "utf8"));
|
|
13475
13501
|
} catch (error) {
|
|
13476
13502
|
logWarn(`[UsageService] Failed to read MCP config: ${filePath}`);
|
|
13477
13503
|
return null;
|
|
13478
13504
|
}
|
|
13479
13505
|
};
|
|
13506
|
+
var mergeConfigIntoResult = (config4, mergedConfig) => {
|
|
13507
|
+
if (config4?.projects) {
|
|
13508
|
+
const allMcpServers = {};
|
|
13509
|
+
for (const projectPath in config4.projects) {
|
|
13510
|
+
const project = config4.projects[projectPath];
|
|
13511
|
+
if (project?.mcpServers) {
|
|
13512
|
+
Object.assign(allMcpServers, project.mcpServers);
|
|
13513
|
+
}
|
|
13514
|
+
}
|
|
13515
|
+
mergedConfig.mcpServers = { ...mergedConfig.mcpServers, ...allMcpServers };
|
|
13516
|
+
}
|
|
13517
|
+
if (config4?.mcpServers) {
|
|
13518
|
+
mergedConfig.mcpServers = {
|
|
13519
|
+
...mergedConfig.mcpServers,
|
|
13520
|
+
...config4.mcpServers
|
|
13521
|
+
};
|
|
13522
|
+
}
|
|
13523
|
+
if (config4?.servers) {
|
|
13524
|
+
mergedConfig.servers = { ...mergedConfig.servers, ...config4.servers };
|
|
13525
|
+
}
|
|
13526
|
+
};
|
|
13480
13527
|
var readMCPConfig = (hostName) => {
|
|
13481
13528
|
const configPaths = getMCPConfigPaths(hostName);
|
|
13482
13529
|
const mergedConfig = {};
|
|
13483
13530
|
for (const configPath of configPaths) {
|
|
13484
13531
|
const config4 = readConfigFile(configPath);
|
|
13485
|
-
if (
|
|
13486
|
-
|
|
13487
|
-
for (const projectPath in config4.projects) {
|
|
13488
|
-
const project = config4.projects[projectPath];
|
|
13489
|
-
if (project?.mcpServers) {
|
|
13490
|
-
Object.assign(allMcpServers, project.mcpServers);
|
|
13491
|
-
}
|
|
13492
|
-
}
|
|
13493
|
-
mergedConfig.mcpServers = { ...mergedConfig.mcpServers, ...allMcpServers };
|
|
13494
|
-
continue;
|
|
13495
|
-
}
|
|
13496
|
-
if (config4?.mcpServers) {
|
|
13497
|
-
mergedConfig.mcpServers = {
|
|
13498
|
-
...mergedConfig.mcpServers,
|
|
13499
|
-
...config4.mcpServers
|
|
13500
|
-
};
|
|
13501
|
-
}
|
|
13502
|
-
if (config4?.servers) {
|
|
13503
|
-
mergedConfig.servers = { ...mergedConfig.servers, ...config4.servers };
|
|
13532
|
+
if (config4) {
|
|
13533
|
+
mergeConfigIntoResult(config4, mergedConfig);
|
|
13504
13534
|
}
|
|
13505
13535
|
}
|
|
13506
13536
|
return Object.keys(mergedConfig).length > 0 ? mergedConfig : null;
|
|
@@ -13610,14 +13640,31 @@ var getProcessInfo = (pid) => {
|
|
|
13610
13640
|
return null;
|
|
13611
13641
|
}
|
|
13612
13642
|
};
|
|
13613
|
-
var getHostInfo = () => {
|
|
13643
|
+
var getHostInfo = (additionalMcpList) => {
|
|
13614
13644
|
const runningProcesses = getRunningProcesses().toLowerCase();
|
|
13615
13645
|
const results = [];
|
|
13616
13646
|
const allConfigs = {};
|
|
13647
|
+
const ideConfigPaths = /* @__PURE__ */ new Set();
|
|
13648
|
+
for (const ide of IDEs) {
|
|
13649
|
+
const configPaths = getMCPConfigPaths(ide);
|
|
13650
|
+
configPaths.forEach((path17) => ideConfigPaths.add(path17));
|
|
13651
|
+
}
|
|
13652
|
+
const uniqueAdditionalPaths = additionalMcpList.filter(
|
|
13653
|
+
(path17) => !ideConfigPaths.has(path17)
|
|
13654
|
+
);
|
|
13617
13655
|
for (const ide of IDEs) {
|
|
13618
13656
|
const cfg = readMCPConfig(ide);
|
|
13619
13657
|
if (cfg) allConfigs[ide] = cfg;
|
|
13620
13658
|
}
|
|
13659
|
+
for (const additionalPath of uniqueAdditionalPaths) {
|
|
13660
|
+
const config4 = readConfigFile(additionalPath);
|
|
13661
|
+
if (!config4) continue;
|
|
13662
|
+
const mergedConfig = {};
|
|
13663
|
+
mergeConfigIntoResult(config4, mergedConfig);
|
|
13664
|
+
if (Object.keys(mergedConfig).length > 0) {
|
|
13665
|
+
allConfigs["system"] = mergedConfig;
|
|
13666
|
+
}
|
|
13667
|
+
}
|
|
13621
13668
|
const servers = [];
|
|
13622
13669
|
for (const [ide, cfg] of Object.entries(allConfigs)) {
|
|
13623
13670
|
for (const [name, server] of Object.entries(
|
|
@@ -13718,8 +13765,90 @@ var getHostInfo = () => {
|
|
|
13718
13765
|
// src/mcp/services/McpUsageService/McpUsageService.ts
|
|
13719
13766
|
init_configs();
|
|
13720
13767
|
import fetch5 from "node-fetch";
|
|
13721
|
-
import
|
|
13768
|
+
import os5 from "os";
|
|
13722
13769
|
import { v4 as uuidv43, v5 as uuidv5 } from "uuid";
|
|
13770
|
+
|
|
13771
|
+
// src/mcp/services/McpUsageService/system.ts
|
|
13772
|
+
init_configs();
|
|
13773
|
+
import { spawn } from "child_process";
|
|
13774
|
+
import os4 from "os";
|
|
13775
|
+
var findSystemMCPConfigs = async () => {
|
|
13776
|
+
try {
|
|
13777
|
+
const platform = os4.platform();
|
|
13778
|
+
let command;
|
|
13779
|
+
let args;
|
|
13780
|
+
if (platform === "win32") {
|
|
13781
|
+
command = "powershell";
|
|
13782
|
+
args = [
|
|
13783
|
+
"-NoProfile",
|
|
13784
|
+
"-Command",
|
|
13785
|
+
"Get-ChildItem -Path $env:USERPROFILE -Recurse -Include *mcp*.json,*claude*.json -ErrorAction SilentlyContinue | ForEach-Object { $_.FullName }"
|
|
13786
|
+
];
|
|
13787
|
+
} else {
|
|
13788
|
+
const home = os4.homedir();
|
|
13789
|
+
command = "find";
|
|
13790
|
+
args = [
|
|
13791
|
+
home,
|
|
13792
|
+
"-type",
|
|
13793
|
+
"f",
|
|
13794
|
+
"(",
|
|
13795
|
+
"-iname",
|
|
13796
|
+
"*mcp*.json",
|
|
13797
|
+
"-o",
|
|
13798
|
+
"-iname",
|
|
13799
|
+
"*claude*.json",
|
|
13800
|
+
")"
|
|
13801
|
+
];
|
|
13802
|
+
}
|
|
13803
|
+
return await new Promise((resolve) => {
|
|
13804
|
+
const child = spawn(command, args, {
|
|
13805
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
13806
|
+
shell: platform === "win32"
|
|
13807
|
+
// needed for PowerShell
|
|
13808
|
+
});
|
|
13809
|
+
let output = "";
|
|
13810
|
+
let errorOutput = "";
|
|
13811
|
+
const timer = setTimeout(() => {
|
|
13812
|
+
child.kill("SIGTERM");
|
|
13813
|
+
logWarn(
|
|
13814
|
+
`MCP config search timed out after ${MCP_SYSTEM_FIND_TIMEOUT_MS / 1e3}s`
|
|
13815
|
+
);
|
|
13816
|
+
resolve([]);
|
|
13817
|
+
}, MCP_SYSTEM_FIND_TIMEOUT_MS);
|
|
13818
|
+
child.stdout.on("data", (data) => {
|
|
13819
|
+
output += data.toString();
|
|
13820
|
+
});
|
|
13821
|
+
child.stderr.on("data", (data) => {
|
|
13822
|
+
const msg = data.toString();
|
|
13823
|
+
if (!msg.includes("Operation not permitted") && !msg.includes("Permission denied") && !msg.includes("Access is denied")) {
|
|
13824
|
+
errorOutput += msg;
|
|
13825
|
+
}
|
|
13826
|
+
});
|
|
13827
|
+
child.on("error", (err) => {
|
|
13828
|
+
clearTimeout(timer);
|
|
13829
|
+
logWarn("MCP config search failed to start", { err });
|
|
13830
|
+
resolve([]);
|
|
13831
|
+
});
|
|
13832
|
+
child.on("close", (code) => {
|
|
13833
|
+
clearTimeout(timer);
|
|
13834
|
+
if (code === 0 || output.trim().length > 0) {
|
|
13835
|
+
const files = output.split(/\r?\n/).map((f) => f.trim()).filter(Boolean);
|
|
13836
|
+
resolve(files);
|
|
13837
|
+
} else {
|
|
13838
|
+
if (errorOutput.trim().length > 0) {
|
|
13839
|
+
logWarn("MCP config search finished with warnings", { errorOutput });
|
|
13840
|
+
}
|
|
13841
|
+
resolve([]);
|
|
13842
|
+
}
|
|
13843
|
+
});
|
|
13844
|
+
});
|
|
13845
|
+
} catch (err) {
|
|
13846
|
+
logWarn("MCP config search unexpected error", { err });
|
|
13847
|
+
return [];
|
|
13848
|
+
}
|
|
13849
|
+
};
|
|
13850
|
+
|
|
13851
|
+
// src/mcp/services/McpUsageService/McpUsageService.ts
|
|
13723
13852
|
var McpUsageService = class {
|
|
13724
13853
|
constructor(govOrgId) {
|
|
13725
13854
|
__publicField(this, "configKey", "mcpUsage");
|
|
@@ -13734,6 +13863,10 @@ var McpUsageService = class {
|
|
|
13734
13863
|
this.REST_API_URL = `${domain}/api/rest/mcp/track`;
|
|
13735
13864
|
}
|
|
13736
13865
|
}
|
|
13866
|
+
async performSystemSearchAndTracking() {
|
|
13867
|
+
const additionalMcpList = await findSystemMCPConfigs();
|
|
13868
|
+
await this.trackServerStart(additionalMcpList);
|
|
13869
|
+
}
|
|
13737
13870
|
startPeriodicTracking() {
|
|
13738
13871
|
if (!this.hasOrganizationId()) {
|
|
13739
13872
|
logDebug(
|
|
@@ -13742,17 +13875,21 @@ var McpUsageService = class {
|
|
|
13742
13875
|
return;
|
|
13743
13876
|
}
|
|
13744
13877
|
logDebug(`[UsageService] Starting periodic tracking for mcps`, {});
|
|
13878
|
+
if (this.intervalId) {
|
|
13879
|
+
return;
|
|
13880
|
+
}
|
|
13881
|
+
setTimeout(() => this.performSystemSearchAndTracking(), 0);
|
|
13745
13882
|
this.intervalId = setInterval(async () => {
|
|
13746
13883
|
logDebug(`[UsageService] Triggering periodic usage service`, {
|
|
13747
13884
|
MCP_PERIODIC_TRACK_INTERVAL
|
|
13748
13885
|
});
|
|
13749
|
-
await this.
|
|
13750
|
-
},
|
|
13886
|
+
await this.performSystemSearchAndTracking();
|
|
13887
|
+
}, MCP_PERIODIC_TRACK_INTERVAL);
|
|
13751
13888
|
}
|
|
13752
13889
|
generateHostId() {
|
|
13753
13890
|
const stored = configStore.get(this.configKey);
|
|
13754
13891
|
if (stored?.mcpHostId) return stored.mcpHostId;
|
|
13755
|
-
const interfaces =
|
|
13892
|
+
const interfaces = os5.networkInterfaces();
|
|
13756
13893
|
const macs = [];
|
|
13757
13894
|
for (const iface of Object.values(interfaces)) {
|
|
13758
13895
|
if (!iface) continue;
|
|
@@ -13760,7 +13897,7 @@ var McpUsageService = class {
|
|
|
13760
13897
|
if (net.mac && net.mac !== "00:00:00:00:00:00") macs.push(net.mac);
|
|
13761
13898
|
}
|
|
13762
13899
|
}
|
|
13763
|
-
const macString = macs.length ? macs.sort().join(",") : `${
|
|
13900
|
+
const macString = macs.length ? macs.sort().join(",") : `${os5.hostname()}-${uuidv43()}`;
|
|
13764
13901
|
const hostId = uuidv5(macString, uuidv5.DNS);
|
|
13765
13902
|
logDebug("[UsageService] Generated new host ID", { hostId });
|
|
13766
13903
|
return hostId;
|
|
@@ -13777,13 +13914,13 @@ var McpUsageService = class {
|
|
|
13777
13914
|
hasOrganizationId() {
|
|
13778
13915
|
return !!this.govOrgId;
|
|
13779
13916
|
}
|
|
13780
|
-
createUsageData(mcpHostId, organizationId, status) {
|
|
13781
|
-
const { user, mcps } = getHostInfo();
|
|
13917
|
+
createUsageData(mcpHostId, organizationId, status, additionalMcpList) {
|
|
13918
|
+
const { user, mcps } = getHostInfo(additionalMcpList);
|
|
13782
13919
|
return {
|
|
13783
13920
|
mcpHostId,
|
|
13784
13921
|
organizationId,
|
|
13785
13922
|
mcpVersion: packageJson.version,
|
|
13786
|
-
mcpOsName:
|
|
13923
|
+
mcpOsName: os5.platform(),
|
|
13787
13924
|
mcps: JSON.stringify(mcps),
|
|
13788
13925
|
status,
|
|
13789
13926
|
userName: user.name,
|
|
@@ -13792,7 +13929,10 @@ var McpUsageService = class {
|
|
|
13792
13929
|
// it's used to make sure we track the mcp usage daily
|
|
13793
13930
|
};
|
|
13794
13931
|
}
|
|
13795
|
-
async trackUsage(
|
|
13932
|
+
async trackUsage({
|
|
13933
|
+
status,
|
|
13934
|
+
additionalMcpList
|
|
13935
|
+
}) {
|
|
13796
13936
|
try {
|
|
13797
13937
|
const hostId = this.generateHostId();
|
|
13798
13938
|
const organizationId = this.getOrganizationId();
|
|
@@ -13802,7 +13942,12 @@ var McpUsageService = class {
|
|
|
13802
13942
|
);
|
|
13803
13943
|
return;
|
|
13804
13944
|
}
|
|
13805
|
-
const usageData = this.createUsageData(
|
|
13945
|
+
const usageData = this.createUsageData(
|
|
13946
|
+
hostId,
|
|
13947
|
+
organizationId,
|
|
13948
|
+
status,
|
|
13949
|
+
additionalMcpList
|
|
13950
|
+
);
|
|
13806
13951
|
const stored = configStore.get(this.configKey);
|
|
13807
13952
|
const hasChanges = !stored || Object.keys(usageData).some(
|
|
13808
13953
|
(key) => usageData[key] !== stored[key]
|
|
@@ -13847,11 +13992,11 @@ var McpUsageService = class {
|
|
|
13847
13992
|
);
|
|
13848
13993
|
}
|
|
13849
13994
|
}
|
|
13850
|
-
async trackServerStart() {
|
|
13851
|
-
await this.trackUsage("ACTIVE");
|
|
13995
|
+
async trackServerStart(additionalMcpList = []) {
|
|
13996
|
+
await this.trackUsage({ status: "ACTIVE", additionalMcpList });
|
|
13852
13997
|
}
|
|
13853
13998
|
async trackServerStop() {
|
|
13854
|
-
await this.trackUsage("INACTIVE");
|
|
13999
|
+
await this.trackUsage({ status: "INACTIVE", additionalMcpList: [] });
|
|
13855
14000
|
}
|
|
13856
14001
|
reset() {
|
|
13857
14002
|
if (!this.intervalId) {
|
|
@@ -14336,7 +14481,7 @@ var McpServer = class {
|
|
|
14336
14481
|
import { z as z34 } from "zod";
|
|
14337
14482
|
|
|
14338
14483
|
// src/mcp/services/PathValidation.ts
|
|
14339
|
-
import
|
|
14484
|
+
import fs12 from "fs";
|
|
14340
14485
|
import path12 from "path";
|
|
14341
14486
|
async function validatePath(inputPath) {
|
|
14342
14487
|
logDebug("Validating MCP path", { inputPath });
|
|
@@ -14396,7 +14541,7 @@ async function validatePath(inputPath) {
|
|
|
14396
14541
|
logDebug("Path validation successful", { inputPath });
|
|
14397
14542
|
logDebug("Checking path existence", { inputPath });
|
|
14398
14543
|
try {
|
|
14399
|
-
await
|
|
14544
|
+
await fs12.promises.access(inputPath);
|
|
14400
14545
|
logDebug("Path exists and is accessible", { inputPath });
|
|
14401
14546
|
WorkspaceService.setKnownWorkspacePath(inputPath);
|
|
14402
14547
|
logDebug("Stored validated path in WorkspaceService", { inputPath });
|
|
@@ -14984,28 +15129,59 @@ ${autoFixSettingsSection}
|
|
|
14984
15129
|
|
|
14985
15130
|
${whatHappensNextSection}`;
|
|
14986
15131
|
};
|
|
15132
|
+
var noChangedFilesFoundPrompt = `\u{1F50D} **MOBB SECURITY SCAN: NO CHANGED FILES DETECTED**
|
|
15133
|
+
|
|
15134
|
+
## \u{1F4CB} Current Status
|
|
15135
|
+
|
|
15136
|
+
No changed files were found in the working directory for security scanning.
|
|
15137
|
+
|
|
15138
|
+
## \u{1F914} What This Means
|
|
15139
|
+
|
|
15140
|
+
This situation occurs when:
|
|
15141
|
+
\u2022 **Clean Working Directory**: All files are committed and there are no uncommitted changes
|
|
15142
|
+
\u2022 **Fresh Repository**: The repository has been recently cloned or initialized
|
|
15143
|
+
\u2022 **All Changes Committed**: Recent modifications have already been committed to git
|
|
15144
|
+
|
|
15145
|
+
If you wish to scan files that were recently changed in your git history call the tool with the following parameters:
|
|
15146
|
+
|
|
15147
|
+
\`\`\`json
|
|
15148
|
+
{
|
|
15149
|
+
"path": "/path/to/your/repository",
|
|
15150
|
+
"maxFiles": 50,
|
|
15151
|
+
"rescan": true,
|
|
15152
|
+
"scanRecentlyChangedFiles": true
|
|
15153
|
+
}
|
|
15154
|
+
\`\`\`
|
|
15155
|
+
|
|
15156
|
+
|
|
15157
|
+
\u2022 **scanRecentlyChangedFiles**: Set to \`true\` to automatically scan recently modified files from git history
|
|
15158
|
+
\u2022 **maxFiles**: Specify the maximum number of files to scan (higher = more comprehensive) (default: ${MCP_DEFAULT_MAX_FILES_TO_SCAN})
|
|
15159
|
+
\u2022 **rescan**: Set to \`true\` to force a complete fresh analysis
|
|
15160
|
+
`;
|
|
14987
15161
|
|
|
14988
15162
|
// src/mcp/services/GetLocalFiles.ts
|
|
14989
15163
|
init_FileUtils();
|
|
14990
15164
|
init_GitService();
|
|
14991
15165
|
init_configs();
|
|
14992
|
-
import
|
|
15166
|
+
import fs13 from "fs/promises";
|
|
14993
15167
|
import nodePath from "path";
|
|
14994
15168
|
var getLocalFiles = async ({
|
|
14995
15169
|
path: path17,
|
|
14996
15170
|
maxFileSize = MCP_MAX_FILE_SIZE,
|
|
14997
15171
|
maxFiles,
|
|
14998
15172
|
isAllFilesScan,
|
|
14999
|
-
scanContext
|
|
15173
|
+
scanContext,
|
|
15174
|
+
scanRecentlyChangedFiles
|
|
15000
15175
|
}) => {
|
|
15001
15176
|
logDebug(`[${scanContext}] Starting getLocalFiles`, {
|
|
15002
15177
|
path: path17,
|
|
15003
15178
|
maxFileSize,
|
|
15004
15179
|
maxFiles,
|
|
15005
|
-
isAllFilesScan
|
|
15180
|
+
isAllFilesScan,
|
|
15181
|
+
scanRecentlyChangedFiles
|
|
15006
15182
|
});
|
|
15007
15183
|
try {
|
|
15008
|
-
const resolvedRepoPath = await
|
|
15184
|
+
const resolvedRepoPath = await fs13.realpath(path17);
|
|
15009
15185
|
logDebug(`[${scanContext}] Resolved repository path`, {
|
|
15010
15186
|
resolvedRepoPath,
|
|
15011
15187
|
originalPath: path17
|
|
@@ -15040,10 +15216,10 @@ var getLocalFiles = async ({
|
|
|
15040
15216
|
try {
|
|
15041
15217
|
const gitResult = await gitService.getChangedFiles();
|
|
15042
15218
|
files = gitResult.files;
|
|
15043
|
-
if (files.length === 0 || maxFiles) {
|
|
15219
|
+
if ((files.length === 0 || maxFiles) && (scanRecentlyChangedFiles || maxFiles)) {
|
|
15044
15220
|
logDebug(
|
|
15045
15221
|
`[${scanContext}] No changes found or maxFiles specified, getting recently changed files`,
|
|
15046
|
-
{ maxFiles }
|
|
15222
|
+
{ maxFiles, scanRecentlyChangedFiles }
|
|
15047
15223
|
);
|
|
15048
15224
|
const recentResult = await gitService.getRecentlyChangedFiles({
|
|
15049
15225
|
maxFiles
|
|
@@ -15085,7 +15261,7 @@ var getLocalFiles = async ({
|
|
|
15085
15261
|
absoluteFilePath
|
|
15086
15262
|
);
|
|
15087
15263
|
try {
|
|
15088
|
-
const fileStat = await
|
|
15264
|
+
const fileStat = await fs13.stat(absoluteFilePath);
|
|
15089
15265
|
return {
|
|
15090
15266
|
filename: nodePath.basename(absoluteFilePath),
|
|
15091
15267
|
relativePath,
|
|
@@ -15120,7 +15296,7 @@ var getLocalFiles = async ({
|
|
|
15120
15296
|
};
|
|
15121
15297
|
|
|
15122
15298
|
// src/mcp/services/LocalMobbFolderService.ts
|
|
15123
|
-
import
|
|
15299
|
+
import fs14 from "fs";
|
|
15124
15300
|
import path13 from "path";
|
|
15125
15301
|
import { z as z33 } from "zod";
|
|
15126
15302
|
init_GitService();
|
|
@@ -15212,15 +15388,15 @@ var LocalMobbFolderService = class {
|
|
|
15212
15388
|
this.repoPath,
|
|
15213
15389
|
this.defaultMobbFolderName
|
|
15214
15390
|
);
|
|
15215
|
-
if (!
|
|
15391
|
+
if (!fs14.existsSync(mobbFolderPath)) {
|
|
15216
15392
|
logInfo("[LocalMobbFolderService] Creating .mobb folder", {
|
|
15217
15393
|
mobbFolderPath
|
|
15218
15394
|
});
|
|
15219
|
-
|
|
15395
|
+
fs14.mkdirSync(mobbFolderPath, { recursive: true });
|
|
15220
15396
|
} else {
|
|
15221
15397
|
logDebug("[LocalMobbFolderService] .mobb folder already exists");
|
|
15222
15398
|
}
|
|
15223
|
-
const stats =
|
|
15399
|
+
const stats = fs14.statSync(mobbFolderPath);
|
|
15224
15400
|
if (!stats.isDirectory()) {
|
|
15225
15401
|
throw new Error(`Path exists but is not a directory: ${mobbFolderPath}`);
|
|
15226
15402
|
}
|
|
@@ -15261,13 +15437,13 @@ var LocalMobbFolderService = class {
|
|
|
15261
15437
|
logDebug("[LocalMobbFolderService] Git repository validated successfully");
|
|
15262
15438
|
} else {
|
|
15263
15439
|
try {
|
|
15264
|
-
const stats =
|
|
15440
|
+
const stats = fs14.statSync(this.repoPath);
|
|
15265
15441
|
if (!stats.isDirectory()) {
|
|
15266
15442
|
throw new Error(
|
|
15267
15443
|
`Path exists but is not a directory: ${this.repoPath}`
|
|
15268
15444
|
);
|
|
15269
15445
|
}
|
|
15270
|
-
|
|
15446
|
+
fs14.accessSync(this.repoPath, fs14.constants.R_OK | fs14.constants.W_OK);
|
|
15271
15447
|
logDebug(
|
|
15272
15448
|
"[LocalMobbFolderService] Non-git directory validated successfully"
|
|
15273
15449
|
);
|
|
@@ -15381,7 +15557,7 @@ var LocalMobbFolderService = class {
|
|
|
15381
15557
|
baseFileName
|
|
15382
15558
|
);
|
|
15383
15559
|
const filePath = path13.join(mobbFolderPath, uniqueFileName);
|
|
15384
|
-
await
|
|
15560
|
+
await fs14.promises.writeFile(filePath, patch, "utf8");
|
|
15385
15561
|
logInfo("[LocalMobbFolderService] Patch saved successfully", {
|
|
15386
15562
|
filePath,
|
|
15387
15563
|
fileName: uniqueFileName,
|
|
@@ -15442,7 +15618,7 @@ var LocalMobbFolderService = class {
|
|
|
15442
15618
|
const extension = path13.parse(baseFileName).ext;
|
|
15443
15619
|
let uniqueFileName = baseFileName;
|
|
15444
15620
|
let index = 1;
|
|
15445
|
-
while (
|
|
15621
|
+
while (fs14.existsSync(path13.join(folderPath, uniqueFileName))) {
|
|
15446
15622
|
uniqueFileName = `${baseName}-${index}${extension}`;
|
|
15447
15623
|
index++;
|
|
15448
15624
|
if (index > 1e3) {
|
|
@@ -15476,15 +15652,15 @@ var LocalMobbFolderService = class {
|
|
|
15476
15652
|
const patchInfoPath = path13.join(mobbFolderPath, "patchInfo.md");
|
|
15477
15653
|
const markdownContent = this.generateFixMarkdown(fix, savedPatchFileName);
|
|
15478
15654
|
let existingContent = "";
|
|
15479
|
-
if (
|
|
15480
|
-
existingContent = await
|
|
15655
|
+
if (fs14.existsSync(patchInfoPath)) {
|
|
15656
|
+
existingContent = await fs14.promises.readFile(patchInfoPath, "utf8");
|
|
15481
15657
|
logDebug("[LocalMobbFolderService] Existing patchInfo.md found");
|
|
15482
15658
|
} else {
|
|
15483
15659
|
logDebug("[LocalMobbFolderService] Creating new patchInfo.md file");
|
|
15484
15660
|
}
|
|
15485
15661
|
const separator = existingContent ? "\n\n================================================================================\n\n" : "";
|
|
15486
15662
|
const updatedContent = `${markdownContent}${separator}${existingContent}`;
|
|
15487
|
-
await
|
|
15663
|
+
await fs14.promises.writeFile(patchInfoPath, updatedContent, "utf8");
|
|
15488
15664
|
logInfo("[LocalMobbFolderService] Patch info logged successfully", {
|
|
15489
15665
|
patchInfoPath,
|
|
15490
15666
|
fixId: fix.id,
|
|
@@ -15857,7 +16033,7 @@ import {
|
|
|
15857
16033
|
unlinkSync,
|
|
15858
16034
|
writeFileSync
|
|
15859
16035
|
} from "fs";
|
|
15860
|
-
import
|
|
16036
|
+
import fs15 from "fs/promises";
|
|
15861
16037
|
import parseDiff2 from "parse-diff";
|
|
15862
16038
|
import path14 from "path";
|
|
15863
16039
|
var PatchApplicationService = class {
|
|
@@ -16355,7 +16531,7 @@ var PatchApplicationService = class {
|
|
|
16355
16531
|
try {
|
|
16356
16532
|
const absolutePath = path14.resolve(repositoryPath, targetFile);
|
|
16357
16533
|
if (existsSync2(absolutePath)) {
|
|
16358
|
-
const stats = await
|
|
16534
|
+
const stats = await fs15.stat(absolutePath);
|
|
16359
16535
|
const fileModTime = stats.mtime.getTime();
|
|
16360
16536
|
if (fileModTime > scanStartTime) {
|
|
16361
16537
|
logError(
|
|
@@ -16396,7 +16572,7 @@ var PatchApplicationService = class {
|
|
|
16396
16572
|
const appliedFixes = [];
|
|
16397
16573
|
const failedFixes = [];
|
|
16398
16574
|
const skippedFixes = [];
|
|
16399
|
-
const resolvedRepoPath = await
|
|
16575
|
+
const resolvedRepoPath = await fs15.realpath(repositoryPath);
|
|
16400
16576
|
logInfo(
|
|
16401
16577
|
`[${scanContext}] Starting patch application for ${fixes.length} fixes`,
|
|
16402
16578
|
{
|
|
@@ -16825,7 +17001,7 @@ init_configs();
|
|
|
16825
17001
|
|
|
16826
17002
|
// src/mcp/services/FileOperations.ts
|
|
16827
17003
|
init_FileUtils();
|
|
16828
|
-
import
|
|
17004
|
+
import fs16 from "fs";
|
|
16829
17005
|
import path15 from "path";
|
|
16830
17006
|
import AdmZip2 from "adm-zip";
|
|
16831
17007
|
var FileOperations = class {
|
|
@@ -16908,7 +17084,7 @@ var FileOperations = class {
|
|
|
16908
17084
|
continue;
|
|
16909
17085
|
}
|
|
16910
17086
|
try {
|
|
16911
|
-
await
|
|
17087
|
+
await fs16.promises.access(absoluteFilepath, fs16.constants.R_OK);
|
|
16912
17088
|
validatedPaths.push(filepath);
|
|
16913
17089
|
} catch (error) {
|
|
16914
17090
|
logDebug(
|
|
@@ -16927,7 +17103,7 @@ var FileOperations = class {
|
|
|
16927
17103
|
const fileDataArray = [];
|
|
16928
17104
|
for (const absolutePath of filePaths) {
|
|
16929
17105
|
try {
|
|
16930
|
-
const content = await
|
|
17106
|
+
const content = await fs16.promises.readFile(absolutePath);
|
|
16931
17107
|
const relativePath = path15.basename(absolutePath);
|
|
16932
17108
|
fileDataArray.push({
|
|
16933
17109
|
relativePath,
|
|
@@ -16953,7 +17129,7 @@ var FileOperations = class {
|
|
|
16953
17129
|
relativeFilepath
|
|
16954
17130
|
}) {
|
|
16955
17131
|
try {
|
|
16956
|
-
return await
|
|
17132
|
+
return await fs16.promises.readFile(absoluteFilepath);
|
|
16957
17133
|
} catch (fsError) {
|
|
16958
17134
|
logError(
|
|
16959
17135
|
`[FileOperations] Failed to read ${relativeFilepath} from filesystem: ${fsError}`
|
|
@@ -17265,10 +17441,12 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
|
|
|
17265
17441
|
`[${scanContext}] Connected to the API, assembling list of files to scan`,
|
|
17266
17442
|
{ path: path17 }
|
|
17267
17443
|
);
|
|
17444
|
+
const isBackgroundScan = scanContext === ScanContext.BACKGROUND_INITIAL || scanContext === ScanContext.BACKGROUND_PERIODIC;
|
|
17268
17445
|
const files = await getLocalFiles({
|
|
17269
17446
|
path: path17,
|
|
17270
17447
|
isAllFilesScan,
|
|
17271
|
-
scanContext
|
|
17448
|
+
scanContext,
|
|
17449
|
+
scanRecentlyChangedFiles: !isBackgroundScan
|
|
17272
17450
|
});
|
|
17273
17451
|
const scanStartTime = Date.now();
|
|
17274
17452
|
logDebug(
|
|
@@ -17859,19 +18037,21 @@ var _FetchAvailableFixesService = class _FetchAvailableFixesService {
|
|
|
17859
18037
|
async checkForAvailableFixes({
|
|
17860
18038
|
repoUrl,
|
|
17861
18039
|
limit = MCP_DEFAULT_LIMIT,
|
|
17862
|
-
offset
|
|
18040
|
+
offset,
|
|
18041
|
+
fileFilter
|
|
17863
18042
|
}) {
|
|
17864
18043
|
try {
|
|
17865
|
-
logDebug("Checking for available fixes", { repoUrl, limit });
|
|
18044
|
+
logDebug("Checking for available fixes", { repoUrl, limit, fileFilter });
|
|
17866
18045
|
const gqlClient = await this.initializeGqlClient();
|
|
17867
18046
|
logDebug("GQL client initialized");
|
|
17868
|
-
logDebug("querying for latest report", { repoUrl, limit });
|
|
18047
|
+
logDebug("querying for latest report", { repoUrl, limit, fileFilter });
|
|
17869
18048
|
const effectiveOffset = offset ?? (this.currentOffset || 0);
|
|
17870
18049
|
logDebug("effectiveOffset", { effectiveOffset });
|
|
17871
18050
|
const { fixReport, expiredReport } = await gqlClient.getLatestReportByRepoUrl({
|
|
17872
18051
|
repoUrl,
|
|
17873
18052
|
limit,
|
|
17874
|
-
offset: effectiveOffset
|
|
18053
|
+
offset: effectiveOffset,
|
|
18054
|
+
fileFilter
|
|
17875
18055
|
});
|
|
17876
18056
|
logDebug("received latest report result", { fixReport, expiredReport });
|
|
17877
18057
|
if (!fixReport) {
|
|
@@ -17926,11 +18106,20 @@ Required argument:
|
|
|
17926
18106
|
Optional arguments:
|
|
17927
18107
|
\u2022 offset \u2013 pagination offset (integer).
|
|
17928
18108
|
\u2022 limit \u2013 maximum number of fixes to return (integer).
|
|
18109
|
+
\u2022 fileFilter \u2013 list of file paths relative to the path parameter to filter fixes by. Only fixes affecting these files will be returned. INCOMPATIBLE with fetchFixesFromAnyFile.
|
|
18110
|
+
\u2022 fetchFixesFromAnyFile \u2013 if true, fetches fixes for all files in the repository. If false or not set (default), filters fixes to only those affecting files with changes in git status. INCOMPATIBLE with fileFilter.
|
|
17929
18111
|
|
|
17930
18112
|
The tool will:
|
|
17931
18113
|
1. Validate that the provided path is secure and exists.
|
|
17932
18114
|
2. Verify that the directory is a valid Git repository with an "origin" remote.
|
|
17933
|
-
3.
|
|
18115
|
+
3. Apply file filtering based on parameters (see below).
|
|
18116
|
+
4. Query the MOBB service by the origin remote URL and return a textual summary of available fixes (total and by severity) or a message if none are found.
|
|
18117
|
+
|
|
18118
|
+
File Filtering Behavior:
|
|
18119
|
+
\u2022 If fetchFixesFromAnyFile is true: Returns fixes for all files (no filtering).
|
|
18120
|
+
\u2022 If fileFilter is provided: Returns only fixes affecting the specified files.
|
|
18121
|
+
\u2022 If neither is provided (default): Returns only fixes affecting files with changes in git status.
|
|
18122
|
+
\u2022 If BOTH are provided: Returns an error (parameters are mutually exclusive).
|
|
17934
18123
|
|
|
17935
18124
|
Call this tool instead of ${MCP_TOOL_SCAN_AND_FIX_VULNERABILITIES} when you only need a fixes summary and do NOT want to perform scanning or code modifications.`);
|
|
17936
18125
|
__publicField(this, "hasAuthentication", true);
|
|
@@ -17948,6 +18137,17 @@ Call this tool instead of ${MCP_TOOL_SCAN_AND_FIX_VULNERABILITIES} when you only
|
|
|
17948
18137
|
limit: {
|
|
17949
18138
|
type: "number",
|
|
17950
18139
|
description: "[Optional] maximum number of results to return"
|
|
18140
|
+
},
|
|
18141
|
+
fileFilter: {
|
|
18142
|
+
type: "array",
|
|
18143
|
+
items: {
|
|
18144
|
+
type: "string"
|
|
18145
|
+
},
|
|
18146
|
+
description: "[Optional] list of file paths relative to the path parameter to filter fixes by. Only fixes affecting these files will be returned. INCOMPATIBLE with fetchFixesFromAnyFile"
|
|
18147
|
+
},
|
|
18148
|
+
fetchFixesFromAnyFile: {
|
|
18149
|
+
type: "boolean",
|
|
18150
|
+
description: "[Optional] if true, fetches fixes for all files in the repository. If false or not set, filters fixes to only those affecting files with changes in git status. INCOMPATIBLE with fileFilter"
|
|
17951
18151
|
}
|
|
17952
18152
|
},
|
|
17953
18153
|
required: ["path"]
|
|
@@ -17957,7 +18157,13 @@ Call this tool instead of ${MCP_TOOL_SCAN_AND_FIX_VULNERABILITIES} when you only
|
|
|
17957
18157
|
"Full local path to the cloned git repository to check for available fixes"
|
|
17958
18158
|
),
|
|
17959
18159
|
offset: z35.number().optional().describe("Optional offset for pagination"),
|
|
17960
|
-
limit: z35.number().optional().describe("Optional maximum number of fixes to return")
|
|
18160
|
+
limit: z35.number().optional().describe("Optional maximum number of fixes to return"),
|
|
18161
|
+
fileFilter: z35.array(z35.string()).optional().describe(
|
|
18162
|
+
"Optional list of file paths relative to the path parameter to filter fixes by. INCOMPATIBLE with fetchFixesFromAnyFile"
|
|
18163
|
+
),
|
|
18164
|
+
fetchFixesFromAnyFile: z35.boolean().optional().describe(
|
|
18165
|
+
"Optional boolean to fetch fixes for all files. INCOMPATIBLE with fileFilter"
|
|
18166
|
+
)
|
|
17961
18167
|
}));
|
|
17962
18168
|
__publicField(this, "availableFixesService");
|
|
17963
18169
|
this.availableFixesService = FetchAvailableFixesService.getInstance();
|
|
@@ -17984,10 +18190,37 @@ Call this tool instead of ${MCP_TOOL_SCAN_AND_FIX_VULNERABILITIES} when you only
|
|
|
17984
18190
|
if (!originUrl) {
|
|
17985
18191
|
throw new Error("No origin URL found for the repository");
|
|
17986
18192
|
}
|
|
18193
|
+
if (args.fileFilter && args.fetchFixesFromAnyFile) {
|
|
18194
|
+
throw new Error(
|
|
18195
|
+
'Parameters "fileFilter" and "fetchFixesFromAnyFile" are mutually exclusive. Please provide only one of these parameters:\n - Use "fileFilter" to specify a custom list of files to filter by\n - Use "fetchFixesFromAnyFile: true" to fetch fixes for all files without filtering\n - Use neither to automatically filter by files with changes in git status (default behavior)'
|
|
18196
|
+
);
|
|
18197
|
+
}
|
|
18198
|
+
let actualFileFilter;
|
|
18199
|
+
if (args.fetchFixesFromAnyFile === true) {
|
|
18200
|
+
actualFileFilter = void 0;
|
|
18201
|
+
logDebug("Fetching fixes for all files (no filtering)");
|
|
18202
|
+
} else if (args.fileFilter && args.fileFilter.length > 0) {
|
|
18203
|
+
actualFileFilter = args.fileFilter;
|
|
18204
|
+
logDebug("Using provided file filter", { fileFilter: actualFileFilter });
|
|
18205
|
+
} else {
|
|
18206
|
+
logDebug("Getting files from git status for filtering");
|
|
18207
|
+
const gitStatusResult = await gitService.getChangedFiles();
|
|
18208
|
+
if (gitStatusResult.files.length === 0) {
|
|
18209
|
+
logDebug("No changed files found in git status");
|
|
18210
|
+
actualFileFilter = void 0;
|
|
18211
|
+
} else {
|
|
18212
|
+
actualFileFilter = gitStatusResult.files;
|
|
18213
|
+
logDebug("Using files from git status as filter", {
|
|
18214
|
+
fileCount: actualFileFilter.length,
|
|
18215
|
+
files: actualFileFilter
|
|
18216
|
+
});
|
|
18217
|
+
}
|
|
18218
|
+
}
|
|
17987
18219
|
const fixResult = await this.availableFixesService.checkForAvailableFixes({
|
|
17988
18220
|
repoUrl: originUrl,
|
|
17989
18221
|
limit: args.limit,
|
|
17990
|
-
offset: args.offset
|
|
18222
|
+
offset: args.offset,
|
|
18223
|
+
fileFilter: actualFileFilter
|
|
17991
18224
|
});
|
|
17992
18225
|
logDebug("FetchAvailableFixesTool execution completed successfully", {
|
|
17993
18226
|
fixResult
|
|
@@ -18023,7 +18256,7 @@ var _McpCheckerService = class _McpCheckerService {
|
|
|
18023
18256
|
};
|
|
18024
18257
|
}
|
|
18025
18258
|
logInfo("Executing built-in mcp_checker tool");
|
|
18026
|
-
const hostInfo = getHostInfo();
|
|
18259
|
+
const hostInfo = getHostInfo([]);
|
|
18027
18260
|
const mcpServersInfo = hostInfo.mcps.filter((mcp) => mcp.mcpName !== "unknown").map(
|
|
18028
18261
|
(mcp) => `- ${mcp.mcpName} (${mcp.ideName} ${mcp.ideVersion}): ${mcp.isRunning ? "\u2705 Running" : "\u274C Not Running"}`
|
|
18029
18262
|
).join("\n");
|
|
@@ -18280,7 +18513,10 @@ Example payload:
|
|
|
18280
18513
|
maxFiles: z37.number().optional().describe(
|
|
18281
18514
|
`Optional maximum number of files to scan (default: ${MCP_DEFAULT_MAX_FILES_TO_SCAN}). Increase for comprehensive scans of larger codebases or decrease for faster focused scans.`
|
|
18282
18515
|
),
|
|
18283
|
-
rescan: z37.boolean().optional().describe("Optional whether to rescan the repository")
|
|
18516
|
+
rescan: z37.boolean().optional().describe("Optional whether to rescan the repository"),
|
|
18517
|
+
scanRecentlyChangedFiles: z37.boolean().optional().describe(
|
|
18518
|
+
"Optional whether to automatically scan recently changed files when no changed files are found in git status. If false, the tool will prompt the user instead."
|
|
18519
|
+
)
|
|
18284
18520
|
}));
|
|
18285
18521
|
__publicField(this, "inputSchema", {
|
|
18286
18522
|
type: "object",
|
|
@@ -18304,6 +18540,10 @@ Example payload:
|
|
|
18304
18540
|
rescan: {
|
|
18305
18541
|
type: "boolean",
|
|
18306
18542
|
description: "[Optional] whether to rescan the repository"
|
|
18543
|
+
},
|
|
18544
|
+
scanRecentlyChangedFiles: {
|
|
18545
|
+
type: "boolean",
|
|
18546
|
+
description: "[Optional] whether to automatically scan recently changed files when no changed files are found in git status. If false, the tool will prompt the user instead."
|
|
18307
18547
|
}
|
|
18308
18548
|
},
|
|
18309
18549
|
required: ["path"]
|
|
@@ -18330,7 +18570,8 @@ Example payload:
|
|
|
18330
18570
|
path: path17,
|
|
18331
18571
|
maxFileSize: MCP_MAX_FILE_SIZE,
|
|
18332
18572
|
maxFiles: args.maxFiles,
|
|
18333
|
-
scanContext: ScanContext.USER_REQUEST
|
|
18573
|
+
scanContext: ScanContext.USER_REQUEST,
|
|
18574
|
+
scanRecentlyChangedFiles: args.scanRecentlyChangedFiles
|
|
18334
18575
|
});
|
|
18335
18576
|
logDebug("Files", { files });
|
|
18336
18577
|
if (files.length === 0) {
|
|
@@ -18338,7 +18579,7 @@ Example payload:
|
|
|
18338
18579
|
content: [
|
|
18339
18580
|
{
|
|
18340
18581
|
type: "text",
|
|
18341
|
-
text:
|
|
18582
|
+
text: noChangedFilesFoundPrompt
|
|
18342
18583
|
}
|
|
18343
18584
|
]
|
|
18344
18585
|
};
|
|
@@ -18437,7 +18678,7 @@ var mcpHandler = async (_args) => {
|
|
|
18437
18678
|
};
|
|
18438
18679
|
|
|
18439
18680
|
// src/args/commands/review.ts
|
|
18440
|
-
import
|
|
18681
|
+
import fs17 from "fs";
|
|
18441
18682
|
import chalk9 from "chalk";
|
|
18442
18683
|
function reviewBuilder(yargs2) {
|
|
18443
18684
|
return yargs2.option("f", {
|
|
@@ -18474,7 +18715,7 @@ function reviewBuilder(yargs2) {
|
|
|
18474
18715
|
).help();
|
|
18475
18716
|
}
|
|
18476
18717
|
function validateReviewOptions(argv) {
|
|
18477
|
-
if (!
|
|
18718
|
+
if (!fs17.existsSync(argv.f)) {
|
|
18478
18719
|
throw new CliError(`
|
|
18479
18720
|
Can't access ${chalk9.bold(argv.f)}`);
|
|
18480
18721
|
}
|
|
@@ -18547,7 +18788,7 @@ async function addScmTokenHandler(args) {
|
|
|
18547
18788
|
}
|
|
18548
18789
|
|
|
18549
18790
|
// src/args/commands/upload_ai_blame.ts
|
|
18550
|
-
import
|
|
18791
|
+
import fs18 from "fs/promises";
|
|
18551
18792
|
import path16 from "path";
|
|
18552
18793
|
import chalk10 from "chalk";
|
|
18553
18794
|
function uploadAiBlameBuilder(args) {
|
|
@@ -18597,7 +18838,7 @@ async function uploadAiBlameHandler(args) {
|
|
|
18597
18838
|
const promptPath = String(prompts[i]);
|
|
18598
18839
|
const inferencePath = String(inferences[i]);
|
|
18599
18840
|
try {
|
|
18600
|
-
await Promise.all([
|
|
18841
|
+
await Promise.all([fs18.access(promptPath), fs18.access(inferencePath)]);
|
|
18601
18842
|
} catch {
|
|
18602
18843
|
console.error(chalk10.red(`File not found for session ${i + 1}`));
|
|
18603
18844
|
process.exit(1);
|