mobbdev 1.0.88 → 1.0.91
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.mjs +1522 -1120
- package/package.json +16 -14
package/dist/index.mjs
CHANGED
|
@@ -7,18 +7,19 @@ var __export = (target, all) => {
|
|
|
7
7
|
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
|
|
8
8
|
|
|
9
9
|
// src/index.ts
|
|
10
|
-
import
|
|
10
|
+
import Debug20 from "debug";
|
|
11
11
|
import { hideBin } from "yargs/helpers";
|
|
12
12
|
|
|
13
13
|
// src/args/commands/convert_to_sarif.ts
|
|
14
|
-
import
|
|
14
|
+
import fs5 from "fs";
|
|
15
15
|
|
|
16
16
|
// src/commands/convert_to_sarif.ts
|
|
17
|
-
import
|
|
18
|
-
import
|
|
17
|
+
import fs4 from "fs";
|
|
18
|
+
import path5 from "path";
|
|
19
19
|
|
|
20
20
|
// src/commands/fpr_stream_parser.ts
|
|
21
|
-
import fs from "
|
|
21
|
+
import fs from "fs";
|
|
22
|
+
import readline from "readline";
|
|
22
23
|
import sax from "sax";
|
|
23
24
|
var BaseStreamParser = class {
|
|
24
25
|
constructor(parser) {
|
|
@@ -134,14 +135,17 @@ var UnifiedNodePoolParser = class extends BaseStreamParser {
|
|
|
134
135
|
}
|
|
135
136
|
};
|
|
136
137
|
var VulnerabilityParser = class extends BaseStreamParser {
|
|
137
|
-
constructor() {
|
|
138
|
-
super(
|
|
139
|
-
__publicField(this, "vulnerabilities", []);
|
|
138
|
+
constructor(parser, tmpStorageFilePath) {
|
|
139
|
+
super(parser);
|
|
140
140
|
__publicField(this, "isInVulnerability", false);
|
|
141
141
|
__publicField(this, "codePoints", []);
|
|
142
142
|
__publicField(this, "metadata", {});
|
|
143
143
|
__publicField(this, "metaInfo", {});
|
|
144
144
|
__publicField(this, "groupName", "");
|
|
145
|
+
__publicField(this, "tmpStorageFileWriter");
|
|
146
|
+
__publicField(this, "tmpStorageFilePath");
|
|
147
|
+
this.tmpStorageFilePath = tmpStorageFilePath;
|
|
148
|
+
this.tmpStorageFileWriter = fs.createWriteStream(tmpStorageFilePath);
|
|
145
149
|
}
|
|
146
150
|
onOpenTag(tag) {
|
|
147
151
|
super.onOpenTag(tag);
|
|
@@ -195,25 +199,43 @@ var VulnerabilityParser = class extends BaseStreamParser {
|
|
|
195
199
|
onCloseTag() {
|
|
196
200
|
if (this.getPathString() === "FVDL > Vulnerabilities > Vulnerability") {
|
|
197
201
|
this.isInVulnerability = false;
|
|
198
|
-
this.
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
202
|
+
this.tmpStorageFileWriter.write(
|
|
203
|
+
JSON.stringify({
|
|
204
|
+
nodes: this.codePoints,
|
|
205
|
+
instanceID: this.metadata["InstanceID"] ?? "",
|
|
206
|
+
instanceSeverity: this.metadata["InstanceSeverity"] ?? "",
|
|
207
|
+
confidence: this.metadata["Confidence"] ?? "",
|
|
208
|
+
classID: this.metadata["ClassID"] ?? "",
|
|
209
|
+
type: this.metadata["Type"] ?? "",
|
|
210
|
+
subtype: this.metadata["Subtype"] ?? "",
|
|
211
|
+
metaInfo: this.metaInfo
|
|
212
|
+
}) + "\n"
|
|
213
|
+
);
|
|
208
214
|
}
|
|
209
215
|
super.onCloseTag();
|
|
210
216
|
}
|
|
211
|
-
getVulnerabilities() {
|
|
212
|
-
|
|
217
|
+
async *getVulnerabilities() {
|
|
218
|
+
await new Promise((r) => this.tmpStorageFileWriter.end(r));
|
|
219
|
+
const rl = readline.createInterface({
|
|
220
|
+
input: fs.createReadStream(this.tmpStorageFilePath),
|
|
221
|
+
crlfDelay: Infinity
|
|
222
|
+
});
|
|
223
|
+
for await (const line of rl) {
|
|
224
|
+
if (line) {
|
|
225
|
+
yield JSON.parse(line);
|
|
226
|
+
}
|
|
227
|
+
}
|
|
213
228
|
}
|
|
214
229
|
};
|
|
215
230
|
function initSaxParser(filepath) {
|
|
216
|
-
const parser = sax.createStream(true
|
|
231
|
+
const parser = sax.createStream(true, {
|
|
232
|
+
// All these flags help to improve parsing speed a lot.
|
|
233
|
+
trim: false,
|
|
234
|
+
normalize: false,
|
|
235
|
+
lowercase: false,
|
|
236
|
+
xmlns: false,
|
|
237
|
+
position: false
|
|
238
|
+
});
|
|
217
239
|
const awaiter = new Promise((resolve, reject) => {
|
|
218
240
|
parser.on("end", () => resolve(true));
|
|
219
241
|
parser.on("error", (e) => reject(e));
|
|
@@ -221,7 +243,10 @@ function initSaxParser(filepath) {
|
|
|
221
243
|
return {
|
|
222
244
|
parser,
|
|
223
245
|
parse: async () => {
|
|
224
|
-
fs.createReadStream(filepath
|
|
246
|
+
fs.createReadStream(filepath, {
|
|
247
|
+
// Set chunk size to 100 MB. The default is 16 KB, which makes the process too slow.
|
|
248
|
+
highWaterMark: 100 * 1024 * 1024
|
|
249
|
+
}).pipe(parser);
|
|
225
250
|
await awaiter;
|
|
226
251
|
}
|
|
227
252
|
};
|
|
@@ -409,6 +434,7 @@ var IssueType_Enum = /* @__PURE__ */ ((IssueType_Enum2) => {
|
|
|
409
434
|
IssueType_Enum2["SystemExitShouldReraise"] = "SYSTEM_EXIT_SHOULD_RERAISE";
|
|
410
435
|
IssueType_Enum2["SystemInformationLeak"] = "SYSTEM_INFORMATION_LEAK";
|
|
411
436
|
IssueType_Enum2["SystemInformationLeakExternal"] = "SYSTEM_INFORMATION_LEAK_EXTERNAL";
|
|
437
|
+
IssueType_Enum2["TarSlip"] = "TAR_SLIP";
|
|
412
438
|
IssueType_Enum2["TrustBoundaryViolation"] = "TRUST_BOUNDARY_VIOLATION";
|
|
413
439
|
IssueType_Enum2["TypeConfusion"] = "TYPE_CONFUSION";
|
|
414
440
|
IssueType_Enum2["UncheckedLoopCondition"] = "UNCHECKED_LOOP_CONDITION";
|
|
@@ -947,71 +973,71 @@ var GetMcpFixesDocument = `
|
|
|
947
973
|
var defaultWrapper = (action, _operationName, _operationType, _variables) => action();
|
|
948
974
|
function getSdk(client, withWrapper = defaultWrapper) {
|
|
949
975
|
return {
|
|
950
|
-
Me(variables, requestHeaders) {
|
|
951
|
-
return withWrapper((wrappedRequestHeaders) => client.request(MeDocument, variables, { ...requestHeaders, ...wrappedRequestHeaders }), "Me", "query", variables);
|
|
976
|
+
Me(variables, requestHeaders, signal) {
|
|
977
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: MeDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "Me", "query", variables);
|
|
952
978
|
},
|
|
953
|
-
getOrgAndProjectId(variables, requestHeaders) {
|
|
954
|
-
return withWrapper((wrappedRequestHeaders) => client.request(GetOrgAndProjectIdDocument, variables, { ...requestHeaders, ...wrappedRequestHeaders }), "getOrgAndProjectId", "query", variables);
|
|
979
|
+
getOrgAndProjectId(variables, requestHeaders, signal) {
|
|
980
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: GetOrgAndProjectIdDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "getOrgAndProjectId", "query", variables);
|
|
955
981
|
},
|
|
956
|
-
GetEncryptedApiToken(variables, requestHeaders) {
|
|
957
|
-
return withWrapper((wrappedRequestHeaders) => client.request(GetEncryptedApiTokenDocument, variables, { ...requestHeaders, ...wrappedRequestHeaders }), "GetEncryptedApiToken", "query", variables);
|
|
982
|
+
GetEncryptedApiToken(variables, requestHeaders, signal) {
|
|
983
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: GetEncryptedApiTokenDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "GetEncryptedApiToken", "query", variables);
|
|
958
984
|
},
|
|
959
|
-
FixReportState(variables, requestHeaders) {
|
|
960
|
-
return withWrapper((wrappedRequestHeaders) => client.request(FixReportStateDocument, variables, { ...requestHeaders, ...wrappedRequestHeaders }), "FixReportState", "query", variables);
|
|
985
|
+
FixReportState(variables, requestHeaders, signal) {
|
|
986
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: FixReportStateDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "FixReportState", "query", variables);
|
|
961
987
|
},
|
|
962
|
-
GetVulnerabilityReportPaths(variables, requestHeaders) {
|
|
963
|
-
return withWrapper((wrappedRequestHeaders) => client.request(GetVulnerabilityReportPathsDocument, variables, { ...requestHeaders, ...wrappedRequestHeaders }), "GetVulnerabilityReportPaths", "query", variables);
|
|
988
|
+
GetVulnerabilityReportPaths(variables, requestHeaders, signal) {
|
|
989
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: GetVulnerabilityReportPathsDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "GetVulnerabilityReportPaths", "query", variables);
|
|
964
990
|
},
|
|
965
|
-
getAnalysisSubscription(variables, requestHeaders) {
|
|
966
|
-
return withWrapper((wrappedRequestHeaders) => client.request(GetAnalysisSubscriptionDocument, variables, { ...requestHeaders, ...wrappedRequestHeaders }), "getAnalysisSubscription", "subscription", variables);
|
|
991
|
+
getAnalysisSubscription(variables, requestHeaders, signal) {
|
|
992
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: GetAnalysisSubscriptionDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "getAnalysisSubscription", "subscription", variables);
|
|
967
993
|
},
|
|
968
|
-
getAnalysis(variables, requestHeaders) {
|
|
969
|
-
return withWrapper((wrappedRequestHeaders) => client.request(GetAnalysisDocument, variables, { ...requestHeaders, ...wrappedRequestHeaders }), "getAnalysis", "query", variables);
|
|
994
|
+
getAnalysis(variables, requestHeaders, signal) {
|
|
995
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: GetAnalysisDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "getAnalysis", "query", variables);
|
|
970
996
|
},
|
|
971
|
-
getFixes(variables, requestHeaders) {
|
|
972
|
-
return withWrapper((wrappedRequestHeaders) => client.request(GetFixesDocument, variables, { ...requestHeaders, ...wrappedRequestHeaders }), "getFixes", "query", variables);
|
|
997
|
+
getFixes(variables, requestHeaders, signal) {
|
|
998
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: GetFixesDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "getFixes", "query", variables);
|
|
973
999
|
},
|
|
974
|
-
getVulByNodesMetadata(variables, requestHeaders) {
|
|
975
|
-
return withWrapper((wrappedRequestHeaders) => client.request(GetVulByNodesMetadataDocument, variables, { ...requestHeaders, ...wrappedRequestHeaders }), "getVulByNodesMetadata", "query", variables);
|
|
1000
|
+
getVulByNodesMetadata(variables, requestHeaders, signal) {
|
|
1001
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: GetVulByNodesMetadataDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "getVulByNodesMetadata", "query", variables);
|
|
976
1002
|
},
|
|
977
|
-
getFalsePositive(variables, requestHeaders) {
|
|
978
|
-
return withWrapper((wrappedRequestHeaders) => client.request(GetFalsePositiveDocument, variables, { ...requestHeaders, ...wrappedRequestHeaders }), "getFalsePositive", "query", variables);
|
|
1003
|
+
getFalsePositive(variables, requestHeaders, signal) {
|
|
1004
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: GetFalsePositiveDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "getFalsePositive", "query", variables);
|
|
979
1005
|
},
|
|
980
|
-
updateScmToken(variables, requestHeaders) {
|
|
981
|
-
return withWrapper((wrappedRequestHeaders) => client.request(UpdateScmTokenDocument, variables, { ...requestHeaders, ...wrappedRequestHeaders }), "updateScmToken", "mutation", variables);
|
|
1006
|
+
updateScmToken(variables, requestHeaders, signal) {
|
|
1007
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: UpdateScmTokenDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "updateScmToken", "mutation", variables);
|
|
982
1008
|
},
|
|
983
|
-
uploadS3BucketInfo(variables, requestHeaders) {
|
|
984
|
-
return withWrapper((wrappedRequestHeaders) => client.request(UploadS3BucketInfoDocument, variables, { ...requestHeaders, ...wrappedRequestHeaders }), "uploadS3BucketInfo", "mutation", variables);
|
|
1009
|
+
uploadS3BucketInfo(variables, requestHeaders, signal) {
|
|
1010
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: UploadS3BucketInfoDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "uploadS3BucketInfo", "mutation", variables);
|
|
985
1011
|
},
|
|
986
|
-
DigestVulnerabilityReport(variables, requestHeaders) {
|
|
987
|
-
return withWrapper((wrappedRequestHeaders) => client.request(DigestVulnerabilityReportDocument, variables, { ...requestHeaders, ...wrappedRequestHeaders }), "DigestVulnerabilityReport", "mutation", variables);
|
|
1012
|
+
DigestVulnerabilityReport(variables, requestHeaders, signal) {
|
|
1013
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: DigestVulnerabilityReportDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "DigestVulnerabilityReport", "mutation", variables);
|
|
988
1014
|
},
|
|
989
|
-
SubmitVulnerabilityReport(variables, requestHeaders) {
|
|
990
|
-
return withWrapper((wrappedRequestHeaders) => client.request(SubmitVulnerabilityReportDocument, variables, { ...requestHeaders, ...wrappedRequestHeaders }), "SubmitVulnerabilityReport", "mutation", variables);
|
|
1015
|
+
SubmitVulnerabilityReport(variables, requestHeaders, signal) {
|
|
1016
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: SubmitVulnerabilityReportDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "SubmitVulnerabilityReport", "mutation", variables);
|
|
991
1017
|
},
|
|
992
|
-
CreateCommunityUser(variables, requestHeaders) {
|
|
993
|
-
return withWrapper((wrappedRequestHeaders) => client.request(CreateCommunityUserDocument, variables, { ...requestHeaders, ...wrappedRequestHeaders }), "CreateCommunityUser", "mutation", variables);
|
|
1018
|
+
CreateCommunityUser(variables, requestHeaders, signal) {
|
|
1019
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: CreateCommunityUserDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "CreateCommunityUser", "mutation", variables);
|
|
994
1020
|
},
|
|
995
|
-
CreateCliLogin(variables, requestHeaders) {
|
|
996
|
-
return withWrapper((wrappedRequestHeaders) => client.request(CreateCliLoginDocument, variables, { ...requestHeaders, ...wrappedRequestHeaders }), "CreateCliLogin", "mutation", variables);
|
|
1021
|
+
CreateCliLogin(variables, requestHeaders, signal) {
|
|
1022
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: CreateCliLoginDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "CreateCliLogin", "mutation", variables);
|
|
997
1023
|
},
|
|
998
|
-
performCliLogin(variables, requestHeaders) {
|
|
999
|
-
return withWrapper((wrappedRequestHeaders) => client.request(PerformCliLoginDocument, variables, { ...requestHeaders, ...wrappedRequestHeaders }), "performCliLogin", "mutation", variables);
|
|
1024
|
+
performCliLogin(variables, requestHeaders, signal) {
|
|
1025
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: PerformCliLoginDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "performCliLogin", "mutation", variables);
|
|
1000
1026
|
},
|
|
1001
|
-
CreateProject(variables, requestHeaders) {
|
|
1002
|
-
return withWrapper((wrappedRequestHeaders) => client.request(CreateProjectDocument, variables, { ...requestHeaders, ...wrappedRequestHeaders }), "CreateProject", "mutation", variables);
|
|
1027
|
+
CreateProject(variables, requestHeaders, signal) {
|
|
1028
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: CreateProjectDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "CreateProject", "mutation", variables);
|
|
1003
1029
|
},
|
|
1004
|
-
validateRepoUrl(variables, requestHeaders) {
|
|
1005
|
-
return withWrapper((wrappedRequestHeaders) => client.request(ValidateRepoUrlDocument, variables, { ...requestHeaders, ...wrappedRequestHeaders }), "validateRepoUrl", "query", variables);
|
|
1030
|
+
validateRepoUrl(variables, requestHeaders, signal) {
|
|
1031
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: ValidateRepoUrlDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "validateRepoUrl", "query", variables);
|
|
1006
1032
|
},
|
|
1007
|
-
gitReference(variables, requestHeaders) {
|
|
1008
|
-
return withWrapper((wrappedRequestHeaders) => client.request(GitReferenceDocument, variables, { ...requestHeaders, ...wrappedRequestHeaders }), "gitReference", "query", variables);
|
|
1033
|
+
gitReference(variables, requestHeaders, signal) {
|
|
1034
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: GitReferenceDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "gitReference", "query", variables);
|
|
1009
1035
|
},
|
|
1010
|
-
autoPrAnalysis(variables, requestHeaders) {
|
|
1011
|
-
return withWrapper((wrappedRequestHeaders) => client.request(AutoPrAnalysisDocument, variables, { ...requestHeaders, ...wrappedRequestHeaders }), "autoPrAnalysis", "mutation", variables);
|
|
1036
|
+
autoPrAnalysis(variables, requestHeaders, signal) {
|
|
1037
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: AutoPrAnalysisDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "autoPrAnalysis", "mutation", variables);
|
|
1012
1038
|
},
|
|
1013
|
-
GetMCPFixes(variables, requestHeaders) {
|
|
1014
|
-
return withWrapper((wrappedRequestHeaders) => client.request(GetMcpFixesDocument, variables, { ...requestHeaders, ...wrappedRequestHeaders }), "GetMCPFixes", "query", variables);
|
|
1039
|
+
GetMCPFixes(variables, requestHeaders, signal) {
|
|
1040
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: GetMcpFixesDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "GetMCPFixes", "query", variables);
|
|
1015
1041
|
}
|
|
1016
1042
|
};
|
|
1017
1043
|
}
|
|
@@ -1445,7 +1471,8 @@ var issueTypeMap = {
|
|
|
1445
1471
|
["WILDCARD_IMPORTS" /* WildcardImports */]: "Wildcard Imports should not be used",
|
|
1446
1472
|
["AVOID_IDENTITY_COMPARISON_CACHED_TYPES" /* AvoidIdentityComparisonCachedTypes */]: "Avoid Identity Comparison of Cached Types",
|
|
1447
1473
|
["AVOID_BUILTIN_SHADOWING" /* AvoidBuiltinShadowing */]: "Avoid Builtin Shadowing",
|
|
1448
|
-
["IMPROPER_STRING_FORMATTING" /* ImproperStringFormatting */]: "Improper String Formatting"
|
|
1474
|
+
["IMPROPER_STRING_FORMATTING" /* ImproperStringFormatting */]: "Improper String Formatting",
|
|
1475
|
+
["TAR_SLIP" /* TarSlip */]: "Tar Slip"
|
|
1449
1476
|
};
|
|
1450
1477
|
var issueTypeZ = z5.nativeEnum(IssueType_Enum);
|
|
1451
1478
|
var getIssueTypeFriendlyString = (issueType) => {
|
|
@@ -1908,7 +1935,7 @@ var ConvertToSarifInputFileFormat = /* @__PURE__ */ ((ConvertToSarifInputFileFor
|
|
|
1908
1935
|
var DEFUALT_ADO_ORIGIN = scmCloudUrl.Ado;
|
|
1909
1936
|
|
|
1910
1937
|
// src/features/analysis/scm/ado/utils.ts
|
|
1911
|
-
import querystring from "
|
|
1938
|
+
import querystring from "querystring";
|
|
1912
1939
|
import * as api from "azure-devops-node-api";
|
|
1913
1940
|
import Debug from "debug";
|
|
1914
1941
|
import { z as z17 } from "zod";
|
|
@@ -2178,7 +2205,8 @@ var fixDetailsData = {
|
|
|
2178
2205
|
["AVOID_IDENTITY_COMPARISON_CACHED_TYPES" /* AvoidIdentityComparisonCachedTypes */]: void 0,
|
|
2179
2206
|
["AVOID_BUILTIN_SHADOWING" /* AvoidBuiltinShadowing */]: void 0,
|
|
2180
2207
|
["IMPROPER_STRING_FORMATTING" /* ImproperStringFormatting */]: void 0,
|
|
2181
|
-
["WILDCARD_IMPORTS" /* WildcardImports */]: void 0
|
|
2208
|
+
["WILDCARD_IMPORTS" /* WildcardImports */]: void 0,
|
|
2209
|
+
["TAR_SLIP" /* TarSlip */]: void 0
|
|
2182
2210
|
};
|
|
2183
2211
|
|
|
2184
2212
|
// src/features/analysis/scm/shared/src/commitDescriptionMarkup.ts
|
|
@@ -4630,7 +4658,7 @@ async function getAdoSdk(params) {
|
|
|
4630
4658
|
const url = new URL(repoUrl);
|
|
4631
4659
|
const origin2 = url.origin.toLowerCase().endsWith(".visualstudio.com") ? DEFUALT_ADO_ORIGIN : url.origin.toLowerCase();
|
|
4632
4660
|
const params2 = `path=/&versionDescriptor[versionOptions]=0&versionDescriptor[versionType]=commit&versionDescriptor[version]=${branch}&resolveLfs=true&$format=zip&api-version=5.0&download=true`;
|
|
4633
|
-
const
|
|
4661
|
+
const path13 = [
|
|
4634
4662
|
prefixPath,
|
|
4635
4663
|
owner,
|
|
4636
4664
|
projectName,
|
|
@@ -4641,7 +4669,7 @@ async function getAdoSdk(params) {
|
|
|
4641
4669
|
"items",
|
|
4642
4670
|
"items"
|
|
4643
4671
|
].filter(Boolean).join("/");
|
|
4644
|
-
return new URL(`${
|
|
4672
|
+
return new URL(`${path13}?${params2}`, origin2).toString();
|
|
4645
4673
|
},
|
|
4646
4674
|
async getAdoBranchList({ repoUrl }) {
|
|
4647
4675
|
try {
|
|
@@ -4862,108 +4890,601 @@ async function getAdoRepoList({
|
|
|
4862
4890
|
}
|
|
4863
4891
|
|
|
4864
4892
|
// src/features/analysis/scm/ado/AdoSCMLib.ts
|
|
4865
|
-
import { setTimeout as setTimeout2 } from "
|
|
4893
|
+
import { setTimeout as setTimeout2 } from "timers/promises";
|
|
4866
4894
|
|
|
4867
|
-
// src/features/analysis/scm/
|
|
4895
|
+
// src/features/analysis/scm/git/GitService.ts
|
|
4896
|
+
import * as path2 from "path";
|
|
4868
4897
|
import { simpleGit } from "simple-git";
|
|
4869
|
-
|
|
4870
|
-
|
|
4871
|
-
|
|
4872
|
-
|
|
4873
|
-
|
|
4898
|
+
|
|
4899
|
+
// src/features/analysis/scm/FileUtils.ts
|
|
4900
|
+
import fs2 from "fs";
|
|
4901
|
+
import { isBinary } from "istextorbinary";
|
|
4902
|
+
import path from "path";
|
|
4903
|
+
var EXCLUDED_FILE_PATTERNS = [
|
|
4904
|
+
// ... (copy the full array from FilePacking.ts)
|
|
4905
|
+
".json",
|
|
4906
|
+
".yaml",
|
|
4907
|
+
".yml",
|
|
4908
|
+
".toml",
|
|
4909
|
+
".ini",
|
|
4910
|
+
".conf",
|
|
4911
|
+
".config",
|
|
4912
|
+
".xml",
|
|
4913
|
+
".env",
|
|
4914
|
+
".md",
|
|
4915
|
+
".txt",
|
|
4916
|
+
".rst",
|
|
4917
|
+
".adoc",
|
|
4918
|
+
".lock",
|
|
4919
|
+
".png",
|
|
4920
|
+
".jpg",
|
|
4921
|
+
".jpeg",
|
|
4922
|
+
".gif",
|
|
4923
|
+
".svg",
|
|
4924
|
+
".ico",
|
|
4925
|
+
".webp",
|
|
4926
|
+
".bmp",
|
|
4927
|
+
".tiff",
|
|
4928
|
+
".ttf",
|
|
4929
|
+
".otf",
|
|
4930
|
+
".woff",
|
|
4931
|
+
".woff2",
|
|
4932
|
+
".eot",
|
|
4933
|
+
".zip",
|
|
4934
|
+
".tar",
|
|
4935
|
+
".gz",
|
|
4936
|
+
".rar",
|
|
4937
|
+
".7z",
|
|
4938
|
+
".log",
|
|
4939
|
+
".db",
|
|
4940
|
+
".sqlite",
|
|
4941
|
+
".sql",
|
|
4942
|
+
".pem",
|
|
4943
|
+
".crt",
|
|
4944
|
+
".key",
|
|
4945
|
+
".p12",
|
|
4946
|
+
".pfx",
|
|
4947
|
+
".editorconfig",
|
|
4948
|
+
".sublime-project",
|
|
4949
|
+
".sublime-workspace",
|
|
4950
|
+
".DS_Store",
|
|
4951
|
+
"Thumbs.db",
|
|
4952
|
+
".lcov",
|
|
4953
|
+
".exe",
|
|
4954
|
+
".dll",
|
|
4955
|
+
".so",
|
|
4956
|
+
".dylib",
|
|
4957
|
+
".class",
|
|
4958
|
+
".pyc",
|
|
4959
|
+
".pyo",
|
|
4960
|
+
".o",
|
|
4961
|
+
".obj",
|
|
4962
|
+
".min.js",
|
|
4963
|
+
".min.css",
|
|
4964
|
+
".min.html",
|
|
4965
|
+
".test.js",
|
|
4966
|
+
".test.ts",
|
|
4967
|
+
".test.jsx",
|
|
4968
|
+
".test.tsx",
|
|
4969
|
+
".spec.js",
|
|
4970
|
+
".spec.ts",
|
|
4971
|
+
".spec.jsx",
|
|
4972
|
+
".spec.tsx",
|
|
4973
|
+
".d.ts",
|
|
4974
|
+
".bundle.js",
|
|
4975
|
+
".chunk.js",
|
|
4976
|
+
"dockerfile",
|
|
4977
|
+
"jenkinsfile",
|
|
4978
|
+
"go.sum",
|
|
4979
|
+
".gitignore",
|
|
4980
|
+
".gitattributes",
|
|
4981
|
+
".gitmodules",
|
|
4982
|
+
".gitkeep",
|
|
4983
|
+
".keep",
|
|
4984
|
+
".hgignore",
|
|
4985
|
+
".nvmrc",
|
|
4986
|
+
".node-version",
|
|
4987
|
+
".npmrc",
|
|
4988
|
+
".yarnrc",
|
|
4989
|
+
".pnpmfile.cjs",
|
|
4990
|
+
".ruby-version",
|
|
4991
|
+
".python-version",
|
|
4992
|
+
".rvmrc",
|
|
4993
|
+
".rbenv-version",
|
|
4994
|
+
".gvmrc",
|
|
4995
|
+
"makefile",
|
|
4996
|
+
"rakefile",
|
|
4997
|
+
"gulpfile.js",
|
|
4998
|
+
"gruntfile.js",
|
|
4999
|
+
"webpack.config.js",
|
|
5000
|
+
"webpack.config.ts",
|
|
5001
|
+
"rollup.config.js",
|
|
5002
|
+
"vite.config.js",
|
|
5003
|
+
"vite.config.ts",
|
|
5004
|
+
"next.config.js",
|
|
5005
|
+
"nuxt.config.js",
|
|
5006
|
+
"tailwind.config.js",
|
|
5007
|
+
"postcss.config.js",
|
|
5008
|
+
".babelrc",
|
|
5009
|
+
".babelrc.js",
|
|
5010
|
+
".swcrc",
|
|
5011
|
+
".browserslistrc",
|
|
5012
|
+
"jest.config.js",
|
|
5013
|
+
"jest.config.ts",
|
|
5014
|
+
"vitest.config.js",
|
|
5015
|
+
"karma.conf.js",
|
|
5016
|
+
"protractor.conf.js",
|
|
5017
|
+
"cypress.config.js",
|
|
5018
|
+
"playwright.config.js",
|
|
5019
|
+
".nycrc",
|
|
5020
|
+
".c8rc",
|
|
5021
|
+
".eslintrc",
|
|
5022
|
+
".eslintrc.js",
|
|
5023
|
+
".prettierrc",
|
|
5024
|
+
".prettierrc.js",
|
|
5025
|
+
".stylelintrc",
|
|
5026
|
+
".stylelintrc.js",
|
|
5027
|
+
"pipfile",
|
|
5028
|
+
"gemfile",
|
|
5029
|
+
"go.mod",
|
|
5030
|
+
"project.clj",
|
|
5031
|
+
"setup.py",
|
|
5032
|
+
"setup.cfg",
|
|
5033
|
+
"manifest.in",
|
|
5034
|
+
".pythonrc",
|
|
5035
|
+
"readme",
|
|
5036
|
+
"changelog",
|
|
5037
|
+
"authors",
|
|
5038
|
+
"contributors",
|
|
5039
|
+
"license",
|
|
5040
|
+
"notice",
|
|
5041
|
+
"copyright",
|
|
5042
|
+
".htaccess"
|
|
5043
|
+
];
|
|
5044
|
+
var FileUtils = class {
|
|
5045
|
+
static isExcludedFileType(filepath) {
|
|
5046
|
+
const basename = path.basename(filepath).toLowerCase();
|
|
5047
|
+
if (basename === ".env" || basename.startsWith(".env.")) {
|
|
5048
|
+
return true;
|
|
5049
|
+
}
|
|
5050
|
+
if (EXCLUDED_FILE_PATTERNS.some((pattern) => basename.endsWith(pattern))) {
|
|
4874
5051
|
return true;
|
|
4875
5052
|
}
|
|
4876
|
-
return false;
|
|
4877
|
-
} catch (e) {
|
|
4878
5053
|
return false;
|
|
4879
5054
|
}
|
|
4880
|
-
|
|
4881
|
-
|
|
4882
|
-
|
|
4883
|
-
|
|
4884
|
-
|
|
4885
|
-
|
|
4886
|
-
|
|
4887
|
-
|
|
4888
|
-
|
|
4889
|
-
|
|
4890
|
-
this.scmOrg = scmOrg;
|
|
4891
|
-
}
|
|
4892
|
-
async getUrlWithCredentials() {
|
|
4893
|
-
if (!this.url) {
|
|
4894
|
-
console.error("no url for getUrlWithCredentials()");
|
|
4895
|
-
throw new Error("no url");
|
|
5055
|
+
static shouldPackFile(filepath, maxFileSize = 1024 * 1024 * 5) {
|
|
5056
|
+
const absoluteFilepath = path.resolve(filepath);
|
|
5057
|
+
if (this.isExcludedFileType(filepath)) return false;
|
|
5058
|
+
if (!fs2.existsSync(absoluteFilepath)) return false;
|
|
5059
|
+
if (fs2.lstatSync(absoluteFilepath).size > maxFileSize) return false;
|
|
5060
|
+
let data;
|
|
5061
|
+
try {
|
|
5062
|
+
data = fs2.readFileSync(absoluteFilepath);
|
|
5063
|
+
} catch {
|
|
5064
|
+
return false;
|
|
4896
5065
|
}
|
|
4897
|
-
|
|
4898
|
-
|
|
4899
|
-
|
|
4900
|
-
|
|
5066
|
+
if (isBinary(null, data)) return false;
|
|
5067
|
+
return true;
|
|
5068
|
+
}
|
|
5069
|
+
static getAllFiles(dir, rootDir) {
|
|
5070
|
+
const root = rootDir || dir;
|
|
5071
|
+
const results = [];
|
|
5072
|
+
const relativeDepth = path.relative(root, dir).split(path.sep).length;
|
|
5073
|
+
if (relativeDepth > 20) {
|
|
5074
|
+
return [];
|
|
4901
5075
|
}
|
|
4902
|
-
if (
|
|
4903
|
-
|
|
4904
|
-
return `${protocol}//${accessToken}@${host}${pathname}`;
|
|
5076
|
+
if (results.length > 1e5) {
|
|
5077
|
+
return [];
|
|
4905
5078
|
}
|
|
4906
|
-
|
|
4907
|
-
|
|
4908
|
-
|
|
4909
|
-
|
|
4910
|
-
username,
|
|
4911
|
-
password: accessToken
|
|
4912
|
-
});
|
|
4913
|
-
}
|
|
4914
|
-
getAccessToken() {
|
|
4915
|
-
return this.accessToken || "";
|
|
4916
|
-
}
|
|
4917
|
-
getUrl() {
|
|
4918
|
-
return this.url;
|
|
4919
|
-
}
|
|
4920
|
-
getName() {
|
|
4921
|
-
if (!this.url) {
|
|
4922
|
-
return "";
|
|
5079
|
+
try {
|
|
5080
|
+
fs2.accessSync(dir, fs2.constants.R_OK);
|
|
5081
|
+
} catch {
|
|
5082
|
+
return [];
|
|
4923
5083
|
}
|
|
4924
|
-
|
|
4925
|
-
|
|
4926
|
-
|
|
4927
|
-
|
|
4928
|
-
|
|
4929
|
-
|
|
5084
|
+
const items = fs2.readdirSync(dir);
|
|
5085
|
+
for (const item of items) {
|
|
5086
|
+
const fullPath = path.join(dir, item);
|
|
5087
|
+
try {
|
|
5088
|
+
fs2.accessSync(fullPath, fs2.constants.R_OK);
|
|
5089
|
+
} catch {
|
|
5090
|
+
continue;
|
|
5091
|
+
}
|
|
5092
|
+
const stat = fs2.statSync(fullPath);
|
|
5093
|
+
if (stat.isDirectory()) {
|
|
5094
|
+
results.push(...this.getAllFiles(fullPath, root));
|
|
5095
|
+
} else {
|
|
5096
|
+
results.push({
|
|
5097
|
+
name: item,
|
|
5098
|
+
fullPath,
|
|
5099
|
+
relativePath: path.relative(root, fullPath),
|
|
5100
|
+
time: stat.mtime.getTime(),
|
|
5101
|
+
isFile: true
|
|
5102
|
+
});
|
|
5103
|
+
}
|
|
4930
5104
|
}
|
|
5105
|
+
return results;
|
|
4931
5106
|
}
|
|
4932
|
-
static
|
|
4933
|
-
|
|
4934
|
-
|
|
4935
|
-
|
|
4936
|
-
this._validateAccessToken();
|
|
4937
|
-
this._validateUrl();
|
|
4938
|
-
}
|
|
4939
|
-
_validateUrl() {
|
|
4940
|
-
if (!this.url) {
|
|
4941
|
-
console.error("no url");
|
|
4942
|
-
throw new InvalidRepoUrlError("no url");
|
|
4943
|
-
}
|
|
5107
|
+
static getLastChangedFiles(dir, maxFileSize = 1024 * 1024 * 5, count = 10) {
|
|
5108
|
+
if (!fs2.existsSync(dir) || !fs2.lstatSync(dir).isDirectory()) return [];
|
|
5109
|
+
const files = this.getAllFiles(dir);
|
|
5110
|
+
return files.filter((file) => this.shouldPackFile(file.fullPath, maxFileSize)).sort((a, b) => b.time - a.time).slice(0, count).map((file) => file.relativePath);
|
|
4944
5111
|
}
|
|
4945
5112
|
};
|
|
4946
5113
|
|
|
4947
|
-
// src/features/analysis/scm/
|
|
4948
|
-
|
|
4949
|
-
|
|
4950
|
-
|
|
4951
|
-
|
|
4952
|
-
|
|
4953
|
-
|
|
4954
|
-
|
|
4955
|
-
|
|
4956
|
-
}
|
|
4957
|
-
|
|
4958
|
-
|
|
4959
|
-
super(url, accessToken, scmOrg);
|
|
4960
|
-
__publicField(this, "_adoSdkPromise");
|
|
4961
|
-
this._adoSdkPromise = initAdoSdk({ accessToken, url, scmOrg });
|
|
5114
|
+
// src/features/analysis/scm/git/GitService.ts
|
|
5115
|
+
var GitService = class {
|
|
5116
|
+
constructor(repositoryPath, log2) {
|
|
5117
|
+
__publicField(this, "git");
|
|
5118
|
+
__publicField(this, "repositoryPath");
|
|
5119
|
+
__publicField(this, "log");
|
|
5120
|
+
const noopLog = (_message, _level, _data) => {
|
|
5121
|
+
};
|
|
5122
|
+
this.log = log2 || noopLog;
|
|
5123
|
+
this.git = simpleGit(repositoryPath, { binary: "git" });
|
|
5124
|
+
this.repositoryPath = repositoryPath;
|
|
5125
|
+
this.log("Git service initialized", "debug", { repositoryPath });
|
|
4962
5126
|
}
|
|
4963
|
-
|
|
4964
|
-
|
|
4965
|
-
|
|
4966
|
-
|
|
5127
|
+
/**
|
|
5128
|
+
* Validates that the path is a valid git repository
|
|
5129
|
+
*/
|
|
5130
|
+
async validateRepository() {
|
|
5131
|
+
this.log("Validating git repository", "debug");
|
|
5132
|
+
try {
|
|
5133
|
+
const isRepo = await this.git.checkIsRepo();
|
|
5134
|
+
if (!isRepo) {
|
|
5135
|
+
const error = "Path is not a valid git repository";
|
|
5136
|
+
this.log(error, "error");
|
|
5137
|
+
return { isValid: false, error };
|
|
5138
|
+
}
|
|
5139
|
+
this.log("Git repository validation successful", "debug");
|
|
5140
|
+
return { isValid: true };
|
|
5141
|
+
} catch (error) {
|
|
5142
|
+
const errorMessage = `Failed to verify git repository: ${error.message}`;
|
|
5143
|
+
this.log(errorMessage, "error", { error });
|
|
5144
|
+
return { isValid: false, error: errorMessage };
|
|
5145
|
+
}
|
|
5146
|
+
}
|
|
5147
|
+
/**
|
|
5148
|
+
* Gets the current git status and returns changed files
|
|
5149
|
+
*/
|
|
5150
|
+
async getChangedFiles() {
|
|
5151
|
+
this.log("Getting git status", "debug");
|
|
5152
|
+
try {
|
|
5153
|
+
const status = await this.git.status();
|
|
5154
|
+
const gitRoot = await this.git.revparse(["--show-toplevel"]);
|
|
5155
|
+
const relativePathFromGitRoot = path2.relative(
|
|
5156
|
+
gitRoot,
|
|
5157
|
+
this.repositoryPath
|
|
5158
|
+
);
|
|
5159
|
+
const files = status.files.map((file) => {
|
|
5160
|
+
const gitRelativePath = file.path;
|
|
5161
|
+
if (relativePathFromGitRoot === "") {
|
|
5162
|
+
return gitRelativePath;
|
|
5163
|
+
}
|
|
5164
|
+
if (gitRelativePath.startsWith(relativePathFromGitRoot + "/")) {
|
|
5165
|
+
return gitRelativePath.substring(relativePathFromGitRoot.length + 1);
|
|
5166
|
+
}
|
|
5167
|
+
return path2.relative(
|
|
5168
|
+
this.repositoryPath,
|
|
5169
|
+
path2.join(gitRoot, gitRelativePath)
|
|
5170
|
+
);
|
|
5171
|
+
});
|
|
5172
|
+
this.log("Git status retrieved", "info", {
|
|
5173
|
+
fileCount: files.length,
|
|
5174
|
+
files: files.slice(0, 10),
|
|
5175
|
+
// Log first 10 files to avoid spam
|
|
5176
|
+
gitRoot,
|
|
5177
|
+
workingDir: this.repositoryPath,
|
|
5178
|
+
relativePathFromGitRoot
|
|
5179
|
+
});
|
|
5180
|
+
return { files, status };
|
|
5181
|
+
} catch (error) {
|
|
5182
|
+
const errorMessage = `Failed to get git status: ${error.message}`;
|
|
5183
|
+
this.log(errorMessage, "error", { error });
|
|
5184
|
+
throw new Error(errorMessage);
|
|
5185
|
+
}
|
|
5186
|
+
}
|
|
5187
|
+
/**
|
|
5188
|
+
* Gets git repository information including remote URL, current commit hash, and branch name
|
|
5189
|
+
*/
|
|
5190
|
+
async getGitInfo() {
|
|
5191
|
+
this.log("Getting git repository information", "debug");
|
|
5192
|
+
try {
|
|
5193
|
+
const [repoUrl, hash, reference] = await Promise.all([
|
|
5194
|
+
this.git.getConfig("remote.origin.url"),
|
|
5195
|
+
this.git.revparse(["HEAD"]),
|
|
5196
|
+
this.git.revparse(["--abbrev-ref", "HEAD"])
|
|
5197
|
+
]);
|
|
5198
|
+
let normalizedRepoUrl = repoUrl.value || "";
|
|
5199
|
+
if (normalizedRepoUrl.endsWith(".git")) {
|
|
5200
|
+
normalizedRepoUrl = normalizedRepoUrl.slice(0, -".git".length);
|
|
5201
|
+
}
|
|
5202
|
+
if (normalizedRepoUrl.startsWith("git@github.com:")) {
|
|
5203
|
+
normalizedRepoUrl = normalizedRepoUrl.replace(
|
|
5204
|
+
"git@github.com:",
|
|
5205
|
+
"https://github.com/"
|
|
5206
|
+
);
|
|
5207
|
+
}
|
|
5208
|
+
this.log("Git repository information retrieved", "debug", {
|
|
5209
|
+
repoUrl: normalizedRepoUrl,
|
|
5210
|
+
hash,
|
|
5211
|
+
reference
|
|
5212
|
+
});
|
|
5213
|
+
return {
|
|
5214
|
+
repoUrl: normalizedRepoUrl,
|
|
5215
|
+
hash,
|
|
5216
|
+
reference
|
|
5217
|
+
};
|
|
5218
|
+
} catch (error) {
|
|
5219
|
+
const errorMessage = `Failed to get git repository information: ${error.message}`;
|
|
5220
|
+
this.log(errorMessage, "error", { error });
|
|
5221
|
+
throw new Error(errorMessage);
|
|
5222
|
+
}
|
|
5223
|
+
}
|
|
5224
|
+
/**
|
|
5225
|
+
* Validates if a branch name is valid according to git's rules
|
|
5226
|
+
*/
|
|
5227
|
+
async isValidBranchName(branchName) {
|
|
5228
|
+
this.log("Validating branch name", "debug", { branchName });
|
|
5229
|
+
try {
|
|
5230
|
+
const result = await this.git.raw([
|
|
5231
|
+
"check-ref-format",
|
|
5232
|
+
"--branch",
|
|
5233
|
+
branchName
|
|
5234
|
+
]);
|
|
5235
|
+
const isValid = Boolean(result);
|
|
5236
|
+
this.log("Branch name validation result", "debug", {
|
|
5237
|
+
branchName,
|
|
5238
|
+
isValid
|
|
5239
|
+
});
|
|
5240
|
+
return isValid;
|
|
5241
|
+
} catch (error) {
|
|
5242
|
+
this.log("Branch name validation failed", "debug", { branchName, error });
|
|
5243
|
+
return false;
|
|
5244
|
+
}
|
|
5245
|
+
}
|
|
5246
|
+
/**
|
|
5247
|
+
* Gets the current branch name
|
|
5248
|
+
*/
|
|
5249
|
+
async getCurrentBranch() {
|
|
5250
|
+
this.log("Getting current branch name", "debug");
|
|
5251
|
+
try {
|
|
5252
|
+
const branch = await this.git.revparse(["--abbrev-ref", "HEAD"]);
|
|
5253
|
+
this.log("Current branch retrieved", "debug", { branch });
|
|
5254
|
+
return branch;
|
|
5255
|
+
} catch (error) {
|
|
5256
|
+
const errorMessage = `Failed to get current branch: ${error.message}`;
|
|
5257
|
+
this.log(errorMessage, "error", { error });
|
|
5258
|
+
throw new Error(errorMessage);
|
|
5259
|
+
}
|
|
5260
|
+
}
|
|
5261
|
+
/**
|
|
5262
|
+
* Gets the current commit hash
|
|
5263
|
+
*/
|
|
5264
|
+
async getCurrentCommitHash() {
|
|
5265
|
+
this.log("Getting current commit hash", "debug");
|
|
5266
|
+
try {
|
|
5267
|
+
const hash = await this.git.revparse(["HEAD"]);
|
|
5268
|
+
this.log("Current commit hash retrieved", "debug", { hash });
|
|
5269
|
+
return hash;
|
|
5270
|
+
} catch (error) {
|
|
5271
|
+
const errorMessage = `Failed to get current commit hash: ${error.message}`;
|
|
5272
|
+
this.log(errorMessage, "error", { error });
|
|
5273
|
+
throw new Error(errorMessage);
|
|
5274
|
+
}
|
|
5275
|
+
}
|
|
5276
|
+
/**
|
|
5277
|
+
* Gets the remote repository URL
|
|
5278
|
+
*/
|
|
5279
|
+
async getRemoteUrl() {
|
|
5280
|
+
this.log("Getting remote repository URL", "debug");
|
|
5281
|
+
try {
|
|
5282
|
+
const remoteUrl = await this.git.getConfig("remote.origin.url");
|
|
5283
|
+
const url = remoteUrl.value || "";
|
|
5284
|
+
let normalizedUrl = url;
|
|
5285
|
+
if (normalizedUrl.endsWith(".git")) {
|
|
5286
|
+
normalizedUrl = normalizedUrl.slice(0, -".git".length);
|
|
5287
|
+
}
|
|
5288
|
+
if (normalizedUrl.startsWith("git@github.com:")) {
|
|
5289
|
+
normalizedUrl = normalizedUrl.replace(
|
|
5290
|
+
"git@github.com:",
|
|
5291
|
+
"https://github.com/"
|
|
5292
|
+
);
|
|
5293
|
+
}
|
|
5294
|
+
this.log("Remote repository URL retrieved", "debug", {
|
|
5295
|
+
url: normalizedUrl
|
|
5296
|
+
});
|
|
5297
|
+
return normalizedUrl;
|
|
5298
|
+
} catch (error) {
|
|
5299
|
+
const errorMessage = `Failed to get remote repository URL: ${error.message}`;
|
|
5300
|
+
this.log(errorMessage, "error", { error });
|
|
5301
|
+
throw new Error(errorMessage);
|
|
5302
|
+
}
|
|
5303
|
+
}
|
|
5304
|
+
/**
|
|
5305
|
+
* Gets the 10 most recently changed files based on commit history
|
|
5306
|
+
*/
|
|
5307
|
+
async getRecentlyChangedFiles() {
|
|
5308
|
+
this.log(
|
|
5309
|
+
"Getting the 10 most recently changed files from commit history",
|
|
5310
|
+
"debug"
|
|
5311
|
+
);
|
|
5312
|
+
try {
|
|
5313
|
+
const gitRoot = await this.git.revparse(["--show-toplevel"]);
|
|
5314
|
+
const relativePathFromGitRoot = path2.relative(
|
|
5315
|
+
gitRoot,
|
|
5316
|
+
this.repositoryPath
|
|
5317
|
+
);
|
|
5318
|
+
const fileSet = /* @__PURE__ */ new Set();
|
|
5319
|
+
const files = [];
|
|
5320
|
+
let commitsProcessed = 0;
|
|
5321
|
+
const logResult = await this.git.log({
|
|
5322
|
+
maxCount: 100,
|
|
5323
|
+
// Get last 100 commits - should be enough to find 10 unique files
|
|
5324
|
+
format: {
|
|
5325
|
+
hash: "%H",
|
|
5326
|
+
date: "%ai",
|
|
5327
|
+
message: "%s",
|
|
5328
|
+
//the field name author_name can't follow the naming convention as we are using the git log command
|
|
5329
|
+
// eslint-disable-next-line @typescript-eslint/naming-convention
|
|
5330
|
+
author_name: "%an"
|
|
5331
|
+
}
|
|
5332
|
+
});
|
|
5333
|
+
for (const commit of logResult.all) {
|
|
5334
|
+
if (files.length >= 10) {
|
|
5335
|
+
break;
|
|
5336
|
+
}
|
|
5337
|
+
commitsProcessed++;
|
|
5338
|
+
try {
|
|
5339
|
+
const filesOutput = await this.git.show([
|
|
5340
|
+
"--name-only",
|
|
5341
|
+
"--pretty=format:",
|
|
5342
|
+
commit.hash
|
|
5343
|
+
]);
|
|
5344
|
+
const commitFiles = filesOutput.split("\n").filter((file) => file.trim() !== "");
|
|
5345
|
+
for (const file of commitFiles) {
|
|
5346
|
+
if (files.length >= 10) {
|
|
5347
|
+
break;
|
|
5348
|
+
}
|
|
5349
|
+
const gitRelativePath = file.trim();
|
|
5350
|
+
let adjustedPath;
|
|
5351
|
+
if (relativePathFromGitRoot === "") {
|
|
5352
|
+
adjustedPath = gitRelativePath;
|
|
5353
|
+
} else if (gitRelativePath.startsWith(relativePathFromGitRoot + "/")) {
|
|
5354
|
+
adjustedPath = gitRelativePath.substring(
|
|
5355
|
+
relativePathFromGitRoot.length + 1
|
|
5356
|
+
);
|
|
5357
|
+
} else {
|
|
5358
|
+
adjustedPath = path2.relative(
|
|
5359
|
+
this.repositoryPath,
|
|
5360
|
+
path2.join(gitRoot, gitRelativePath)
|
|
5361
|
+
);
|
|
5362
|
+
}
|
|
5363
|
+
this.log(`Considering file: ${adjustedPath}`, "debug");
|
|
5364
|
+
if (!fileSet.has(adjustedPath) && FileUtils.shouldPackFile(path2.join(gitRoot, gitRelativePath))) {
|
|
5365
|
+
fileSet.add(adjustedPath);
|
|
5366
|
+
files.push(adjustedPath);
|
|
5367
|
+
}
|
|
5368
|
+
}
|
|
5369
|
+
} catch (showError) {
|
|
5370
|
+
this.log(`Could not get files for commit ${commit.hash}`, "debug", {
|
|
5371
|
+
error: showError
|
|
5372
|
+
});
|
|
5373
|
+
}
|
|
5374
|
+
}
|
|
5375
|
+
this.log("Recently changed files retrieved", "info", {
|
|
5376
|
+
fileCount: files.length,
|
|
5377
|
+
commitsProcessed,
|
|
5378
|
+
totalCommitsAvailable: logResult.all.length,
|
|
5379
|
+
files: files.slice(0, 10),
|
|
5380
|
+
// Log the files (should be all of them since we limit to 10)
|
|
5381
|
+
gitRoot,
|
|
5382
|
+
workingDir: this.repositoryPath,
|
|
5383
|
+
relativePathFromGitRoot
|
|
5384
|
+
});
|
|
5385
|
+
return {
|
|
5386
|
+
files,
|
|
5387
|
+
commitCount: commitsProcessed
|
|
5388
|
+
};
|
|
5389
|
+
} catch (error) {
|
|
5390
|
+
const errorMessage = `Failed to get recently changed files: ${error.message}`;
|
|
5391
|
+
this.log(errorMessage, "error", { error });
|
|
5392
|
+
throw new Error(errorMessage);
|
|
5393
|
+
}
|
|
5394
|
+
}
|
|
5395
|
+
};
|
|
5396
|
+
|
|
5397
|
+
// src/features/analysis/scm/scmSubmit/index.ts
|
|
5398
|
+
var isValidBranchName = async (branchName) => {
|
|
5399
|
+
const gitService = new GitService(process.cwd());
|
|
5400
|
+
return gitService.isValidBranchName(branchName);
|
|
5401
|
+
};
|
|
5402
|
+
|
|
5403
|
+
// src/features/analysis/scm/scm.ts
|
|
5404
|
+
var SCMLib = class {
|
|
5405
|
+
constructor(url, accessToken, scmOrg) {
|
|
5406
|
+
__publicField(this, "url");
|
|
5407
|
+
__publicField(this, "accessToken");
|
|
5408
|
+
__publicField(this, "scmOrg");
|
|
5409
|
+
this.accessToken = accessToken;
|
|
5410
|
+
this.url = url;
|
|
5411
|
+
this.scmOrg = scmOrg;
|
|
5412
|
+
}
|
|
5413
|
+
async getUrlWithCredentials() {
|
|
5414
|
+
if (!this.url) {
|
|
5415
|
+
console.error("no url for getUrlWithCredentials()");
|
|
5416
|
+
throw new Error("no url");
|
|
5417
|
+
}
|
|
5418
|
+
const trimmedUrl = this.url.trim().replace(/\/$/, "");
|
|
5419
|
+
const accessToken = this.getAccessToken();
|
|
5420
|
+
if (!accessToken) {
|
|
5421
|
+
return trimmedUrl;
|
|
5422
|
+
}
|
|
5423
|
+
if (this.scmLibType === "ADO" /* ADO */) {
|
|
5424
|
+
const { host, protocol, pathname } = new URL(trimmedUrl);
|
|
5425
|
+
return `${protocol}//${accessToken}@${host}${pathname}`;
|
|
5426
|
+
}
|
|
5427
|
+
const finalUrl = this.scmLibType === "GITLAB" /* GITLAB */ ? `${trimmedUrl}.git` : trimmedUrl;
|
|
5428
|
+
const username = await this._getUsernameForAuthUrl();
|
|
5429
|
+
return buildAuthorizedRepoUrl({
|
|
5430
|
+
url: finalUrl,
|
|
5431
|
+
username,
|
|
5432
|
+
password: accessToken
|
|
5433
|
+
});
|
|
5434
|
+
}
|
|
5435
|
+
getAccessToken() {
|
|
5436
|
+
return this.accessToken || "";
|
|
5437
|
+
}
|
|
5438
|
+
getUrl() {
|
|
5439
|
+
return this.url;
|
|
5440
|
+
}
|
|
5441
|
+
getName() {
|
|
5442
|
+
if (!this.url) {
|
|
5443
|
+
return "";
|
|
5444
|
+
}
|
|
5445
|
+
return this.url.split("/").at(-1) || "";
|
|
5446
|
+
}
|
|
5447
|
+
_validateAccessToken() {
|
|
5448
|
+
if (!this.accessToken) {
|
|
5449
|
+
console.error("no access token");
|
|
5450
|
+
throw new Error("no access token");
|
|
5451
|
+
}
|
|
5452
|
+
}
|
|
5453
|
+
static async getIsValidBranchName(branchName) {
|
|
5454
|
+
return isValidBranchName(branchName);
|
|
5455
|
+
}
|
|
5456
|
+
_validateAccessTokenAndUrl() {
|
|
5457
|
+
this._validateAccessToken();
|
|
5458
|
+
this._validateUrl();
|
|
5459
|
+
}
|
|
5460
|
+
_validateUrl() {
|
|
5461
|
+
if (!this.url) {
|
|
5462
|
+
console.error("no url");
|
|
5463
|
+
throw new InvalidRepoUrlError("no url");
|
|
5464
|
+
}
|
|
5465
|
+
}
|
|
5466
|
+
};
|
|
5467
|
+
|
|
5468
|
+
// src/features/analysis/scm/ado/AdoSCMLib.ts
|
|
5469
|
+
async function initAdoSdk(params) {
|
|
5470
|
+
const { url, accessToken, scmOrg } = params;
|
|
5471
|
+
const adoClientParams = await getAdoClientParams({
|
|
5472
|
+
tokenOrg: scmOrg,
|
|
5473
|
+
accessToken,
|
|
5474
|
+
url
|
|
5475
|
+
});
|
|
5476
|
+
return getAdoSdk(adoClientParams);
|
|
5477
|
+
}
|
|
5478
|
+
var AdoSCMLib = class extends SCMLib {
|
|
5479
|
+
constructor(url, accessToken, scmOrg) {
|
|
5480
|
+
super(url, accessToken, scmOrg);
|
|
5481
|
+
__publicField(this, "_adoSdkPromise");
|
|
5482
|
+
this._adoSdkPromise = initAdoSdk({ accessToken, url, scmOrg });
|
|
5483
|
+
}
|
|
5484
|
+
async getAdoSdk() {
|
|
5485
|
+
if (!this._adoSdkPromise) {
|
|
5486
|
+
console.error("ado sdk was not initialized");
|
|
5487
|
+
throw new InvalidAccessTokenError("ado sdk was not initialized");
|
|
4967
5488
|
}
|
|
4968
5489
|
return this._adoSdkPromise;
|
|
4969
5490
|
}
|
|
@@ -5143,7 +5664,7 @@ var AdoSCMLib = class extends SCMLib {
|
|
|
5143
5664
|
};
|
|
5144
5665
|
|
|
5145
5666
|
// src/features/analysis/scm/bitbucket/bitbucket.ts
|
|
5146
|
-
import querystring2 from "
|
|
5667
|
+
import querystring2 from "querystring";
|
|
5147
5668
|
import * as bitbucketPkgNode from "bitbucket";
|
|
5148
5669
|
import bitbucketPkg from "bitbucket";
|
|
5149
5670
|
import Debug2 from "debug";
|
|
@@ -5463,7 +5984,7 @@ async function getRepositoriesByWorkspace(bitbucketClient, { workspaceSlug }) {
|
|
|
5463
5984
|
}
|
|
5464
5985
|
|
|
5465
5986
|
// src/features/analysis/scm/bitbucket/BitbucketSCMLib.ts
|
|
5466
|
-
import { setTimeout as setTimeout3 } from "
|
|
5987
|
+
import { setTimeout as setTimeout3 } from "timers/promises";
|
|
5467
5988
|
import { z as z20 } from "zod";
|
|
5468
5989
|
function getUserAndPassword(token) {
|
|
5469
5990
|
const [username, password] = token.split(":");
|
|
@@ -6099,14 +6620,14 @@ function getGithubSdk(params = {}) {
|
|
|
6099
6620
|
};
|
|
6100
6621
|
},
|
|
6101
6622
|
async getGithubBlameRanges(params2) {
|
|
6102
|
-
const { ref, gitHubUrl, path:
|
|
6623
|
+
const { ref, gitHubUrl, path: path13 } = params2;
|
|
6103
6624
|
const { owner, repo } = parseGithubOwnerAndRepo(gitHubUrl);
|
|
6104
6625
|
const res = await octokit.graphql(
|
|
6105
6626
|
GET_BLAME_DOCUMENT,
|
|
6106
6627
|
{
|
|
6107
6628
|
owner,
|
|
6108
6629
|
repo,
|
|
6109
|
-
path:
|
|
6630
|
+
path: path13,
|
|
6110
6631
|
ref
|
|
6111
6632
|
}
|
|
6112
6633
|
);
|
|
@@ -6412,11 +6933,11 @@ var GithubSCMLib = class extends SCMLib {
|
|
|
6412
6933
|
markdownComment: comment
|
|
6413
6934
|
});
|
|
6414
6935
|
}
|
|
6415
|
-
async getRepoBlameRanges(ref,
|
|
6936
|
+
async getRepoBlameRanges(ref, path13) {
|
|
6416
6937
|
this._validateUrl();
|
|
6417
6938
|
return await this.githubSdk.getGithubBlameRanges({
|
|
6418
6939
|
ref,
|
|
6419
|
-
path:
|
|
6940
|
+
path: path13,
|
|
6420
6941
|
gitHubUrl: this.url
|
|
6421
6942
|
});
|
|
6422
6943
|
}
|
|
@@ -6500,7 +7021,7 @@ var GithubSCMLib = class extends SCMLib {
|
|
|
6500
7021
|
};
|
|
6501
7022
|
|
|
6502
7023
|
// src/features/analysis/scm/gitlab/gitlab.ts
|
|
6503
|
-
import querystring3 from "
|
|
7024
|
+
import querystring3 from "querystring";
|
|
6504
7025
|
import {
|
|
6505
7026
|
createRequesterFn
|
|
6506
7027
|
} from "@gitbeaker/requester-utils";
|
|
@@ -6818,13 +7339,13 @@ function parseGitlabOwnerAndRepo(gitlabUrl) {
|
|
|
6818
7339
|
const { organization, repoName, projectPath } = parsingResult;
|
|
6819
7340
|
return { owner: organization, repo: repoName, projectPath };
|
|
6820
7341
|
}
|
|
6821
|
-
async function getGitlabBlameRanges({ ref, gitlabUrl, path:
|
|
7342
|
+
async function getGitlabBlameRanges({ ref, gitlabUrl, path: path13 }, options) {
|
|
6822
7343
|
const { projectPath } = parseGitlabOwnerAndRepo(gitlabUrl);
|
|
6823
7344
|
const api2 = getGitBeaker({
|
|
6824
7345
|
url: gitlabUrl,
|
|
6825
7346
|
gitlabAuthToken: options?.gitlabAuthToken
|
|
6826
7347
|
});
|
|
6827
|
-
const resp = await api2.RepositoryFiles.allFileBlames(projectPath,
|
|
7348
|
+
const resp = await api2.RepositoryFiles.allFileBlames(projectPath, path13, ref);
|
|
6828
7349
|
let lineNumber = 1;
|
|
6829
7350
|
return resp.filter((range) => range.lines).map((range) => {
|
|
6830
7351
|
const oldLineNumber = lineNumber;
|
|
@@ -7000,10 +7521,10 @@ var GitlabSCMLib = class extends SCMLib {
|
|
|
7000
7521
|
markdownComment: comment
|
|
7001
7522
|
});
|
|
7002
7523
|
}
|
|
7003
|
-
async getRepoBlameRanges(ref,
|
|
7524
|
+
async getRepoBlameRanges(ref, path13) {
|
|
7004
7525
|
this._validateUrl();
|
|
7005
7526
|
return await getGitlabBlameRanges(
|
|
7006
|
-
{ ref, path:
|
|
7527
|
+
{ ref, path: path13, gitlabUrl: this.url },
|
|
7007
7528
|
{
|
|
7008
7529
|
url: this.url,
|
|
7009
7530
|
gitlabAuthToken: this.accessToken
|
|
@@ -7206,19 +7727,19 @@ __export(utils_exports, {
|
|
|
7206
7727
|
});
|
|
7207
7728
|
|
|
7208
7729
|
// src/utils/dirname.ts
|
|
7209
|
-
import
|
|
7210
|
-
import { fileURLToPath } from "
|
|
7730
|
+
import path3 from "path";
|
|
7731
|
+
import { fileURLToPath } from "url";
|
|
7211
7732
|
function getDirName() {
|
|
7212
|
-
return
|
|
7733
|
+
return path3.dirname(fileURLToPath(import.meta.url));
|
|
7213
7734
|
}
|
|
7214
7735
|
function getTopLevelDirName(fullPath) {
|
|
7215
|
-
return
|
|
7736
|
+
return path3.parse(fullPath).name;
|
|
7216
7737
|
}
|
|
7217
7738
|
|
|
7218
7739
|
// src/utils/keypress.ts
|
|
7219
|
-
import
|
|
7740
|
+
import readline2 from "readline";
|
|
7220
7741
|
async function keypress() {
|
|
7221
|
-
const rl =
|
|
7742
|
+
const rl = readline2.createInterface({
|
|
7222
7743
|
input: process.stdin,
|
|
7223
7744
|
output: process.stdout
|
|
7224
7745
|
});
|
|
@@ -7275,15 +7796,15 @@ function Spinner({ ci = false } = {}) {
|
|
|
7275
7796
|
}
|
|
7276
7797
|
|
|
7277
7798
|
// src/utils/check_node_version.ts
|
|
7278
|
-
import
|
|
7279
|
-
import
|
|
7799
|
+
import fs3 from "fs";
|
|
7800
|
+
import path4 from "path";
|
|
7280
7801
|
import semver from "semver";
|
|
7281
7802
|
function getPackageJson() {
|
|
7282
|
-
let manifestPath =
|
|
7283
|
-
if (!
|
|
7284
|
-
manifestPath =
|
|
7803
|
+
let manifestPath = path4.join(getDirName(), "../package.json");
|
|
7804
|
+
if (!fs3.existsSync(manifestPath)) {
|
|
7805
|
+
manifestPath = path4.join(getDirName(), "../../package.json");
|
|
7285
7806
|
}
|
|
7286
|
-
return JSON.parse(
|
|
7807
|
+
return JSON.parse(fs3.readFileSync(manifestPath, "utf8"));
|
|
7287
7808
|
}
|
|
7288
7809
|
var packageJson = getPackageJson();
|
|
7289
7810
|
if (!semver.satisfies(process.version, packageJson.engines.node)) {
|
|
@@ -7326,11 +7847,12 @@ async function convertFprToSarif(inputFilePath, outputFilePath, codePathPatterns
|
|
|
7326
7847
|
unsafeCleanup: true
|
|
7327
7848
|
});
|
|
7328
7849
|
try {
|
|
7329
|
-
const auditFvdlPath =
|
|
7850
|
+
const auditFvdlPath = path5.join(tmpObj.name, "audit.fvdl");
|
|
7330
7851
|
await zipIn.extract("audit.fvdl", auditFvdlPath);
|
|
7331
7852
|
const auditFvdlSaxParser = initSaxParser(auditFvdlPath);
|
|
7332
7853
|
const vulnerabilityParser = new VulnerabilityParser(
|
|
7333
|
-
auditFvdlSaxParser.parser
|
|
7854
|
+
auditFvdlSaxParser.parser,
|
|
7855
|
+
path5.join(tmpObj.name, "vulns.json")
|
|
7334
7856
|
);
|
|
7335
7857
|
const unifiedNodePoolParser = new UnifiedNodePoolParser(
|
|
7336
7858
|
auditFvdlSaxParser.parser
|
|
@@ -7341,16 +7863,15 @@ async function convertFprToSarif(inputFilePath, outputFilePath, codePathPatterns
|
|
|
7341
7863
|
let auditMetadataParser = null;
|
|
7342
7864
|
await auditFvdlSaxParser.parse();
|
|
7343
7865
|
if ("audit.xml" in zipInEntries) {
|
|
7344
|
-
const auditXmlPath =
|
|
7866
|
+
const auditXmlPath = path5.join(tmpObj.name, "audit.xml");
|
|
7345
7867
|
await zipIn.extract("audit.xml", auditXmlPath);
|
|
7346
7868
|
const auditXmlSaxParser = initSaxParser(auditXmlPath);
|
|
7347
7869
|
auditMetadataParser = new AuditMetadataParser(auditXmlSaxParser.parser);
|
|
7348
7870
|
await auditXmlSaxParser.parse();
|
|
7349
7871
|
}
|
|
7350
7872
|
await zipIn.close();
|
|
7351
|
-
|
|
7352
|
-
|
|
7353
|
-
`{
|
|
7873
|
+
const writer = fs4.createWriteStream(outputFilePath);
|
|
7874
|
+
writer.write(`{
|
|
7354
7875
|
"$schema": "https://json.schemastore.org/sarif-2.1.0.json",
|
|
7355
7876
|
"version": "2.1.0",
|
|
7356
7877
|
"runs": [
|
|
@@ -7361,23 +7882,26 @@ async function convertFprToSarif(inputFilePath, outputFilePath, codePathPatterns
|
|
|
7361
7882
|
}
|
|
7362
7883
|
},
|
|
7363
7884
|
"results": [
|
|
7364
|
-
`
|
|
7365
|
-
|
|
7366
|
-
const
|
|
7367
|
-
|
|
7885
|
+
`);
|
|
7886
|
+
let isFirstVuln = true;
|
|
7887
|
+
for await (const vulnerability of vulnerabilityParser.getVulnerabilities()) {
|
|
7888
|
+
const sarifResult = fortifyVulnerabilityToSarifResult(
|
|
7368
7889
|
vulnerability,
|
|
7369
7890
|
auditMetadataParser,
|
|
7370
7891
|
reportMetadataParser,
|
|
7371
7892
|
unifiedNodePoolParser
|
|
7372
|
-
)
|
|
7373
|
-
|
|
7374
|
-
|
|
7375
|
-
|
|
7376
|
-
|
|
7377
|
-
|
|
7893
|
+
);
|
|
7894
|
+
if (filterSarifResult(sarifResult, codePathPatterns)) {
|
|
7895
|
+
if (isFirstVuln) {
|
|
7896
|
+
isFirstVuln = false;
|
|
7897
|
+
} else {
|
|
7898
|
+
writer.write(",\n");
|
|
7899
|
+
}
|
|
7900
|
+
writer.write(JSON.stringify(sarifResult, null, 2));
|
|
7378
7901
|
}
|
|
7379
|
-
}
|
|
7380
|
-
|
|
7902
|
+
}
|
|
7903
|
+
writer.write("\n]}]}");
|
|
7904
|
+
await new Promise((r) => writer.end(r));
|
|
7381
7905
|
} finally {
|
|
7382
7906
|
tmpObj.removeCallback();
|
|
7383
7907
|
}
|
|
@@ -7451,15 +7975,15 @@ function fortifyNodesToSarifLocations(nodes, unifiedNodePoolParser) {
|
|
|
7451
7975
|
import chalk2 from "chalk";
|
|
7452
7976
|
|
|
7453
7977
|
// src/constants.ts
|
|
7454
|
-
import
|
|
7455
|
-
import { fileURLToPath as fileURLToPath2 } from "
|
|
7978
|
+
import path6 from "path";
|
|
7979
|
+
import { fileURLToPath as fileURLToPath2 } from "url";
|
|
7456
7980
|
import chalk from "chalk";
|
|
7457
7981
|
import Debug4 from "debug";
|
|
7458
7982
|
import * as dotenv from "dotenv";
|
|
7459
7983
|
import { z as z24 } from "zod";
|
|
7460
7984
|
var debug4 = Debug4("mobbdev:constants");
|
|
7461
|
-
var __dirname =
|
|
7462
|
-
dotenv.config({ path:
|
|
7985
|
+
var __dirname = path6.dirname(fileURLToPath2(import.meta.url));
|
|
7986
|
+
dotenv.config({ path: path6.join(__dirname, "../.env") });
|
|
7463
7987
|
var scmFriendlyText = {
|
|
7464
7988
|
["Ado" /* Ado */]: "Azure DevOps",
|
|
7465
7989
|
["Bitbucket" /* Bitbucket */]: "Bitbucket",
|
|
@@ -7680,7 +8204,7 @@ function convertToSarifBuilder(args) {
|
|
|
7680
8204
|
).help().demandOption(["input-file-path", "input-file-format", "output-file-path"]);
|
|
7681
8205
|
}
|
|
7682
8206
|
async function validateConvertToSarifOptions(args) {
|
|
7683
|
-
if (!
|
|
8207
|
+
if (!fs5.existsSync(args.inputFilePath)) {
|
|
7684
8208
|
throw new CliError(
|
|
7685
8209
|
"\nError: --input-file-path flag should point to an existing file"
|
|
7686
8210
|
);
|
|
@@ -7706,18 +8230,18 @@ import chalk10 from "chalk";
|
|
|
7706
8230
|
import yargs from "yargs/yargs";
|
|
7707
8231
|
|
|
7708
8232
|
// src/args/commands/analyze.ts
|
|
7709
|
-
import
|
|
8233
|
+
import fs8 from "fs";
|
|
7710
8234
|
|
|
7711
8235
|
// src/commands/index.ts
|
|
7712
|
-
import crypto from "
|
|
7713
|
-
import os from "
|
|
8236
|
+
import crypto from "crypto";
|
|
8237
|
+
import os from "os";
|
|
7714
8238
|
|
|
7715
8239
|
// src/features/analysis/index.ts
|
|
7716
|
-
import
|
|
7717
|
-
import fsPromises from "
|
|
7718
|
-
import
|
|
7719
|
-
import { env as env2 } from "
|
|
7720
|
-
import { pipeline } from "
|
|
8240
|
+
import fs7 from "fs";
|
|
8241
|
+
import fsPromises from "fs/promises";
|
|
8242
|
+
import path9 from "path";
|
|
8243
|
+
import { env as env2 } from "process";
|
|
8244
|
+
import { pipeline } from "stream/promises";
|
|
7721
8245
|
import chalk5 from "chalk";
|
|
7722
8246
|
import Configstore from "configstore";
|
|
7723
8247
|
import Debug18 from "debug";
|
|
@@ -7999,7 +8523,7 @@ async function postIssueComment(params) {
|
|
|
7999
8523
|
fpDescription
|
|
8000
8524
|
} = params;
|
|
8001
8525
|
const {
|
|
8002
|
-
path:
|
|
8526
|
+
path: path13,
|
|
8003
8527
|
startLine,
|
|
8004
8528
|
vulnerabilityReportIssue: {
|
|
8005
8529
|
vulnerabilityReportIssueTags,
|
|
@@ -8014,7 +8538,7 @@ async function postIssueComment(params) {
|
|
|
8014
8538
|
Refresh the page in order to see the changes.`,
|
|
8015
8539
|
pull_number: pullRequest,
|
|
8016
8540
|
commit_id: commitSha,
|
|
8017
|
-
path:
|
|
8541
|
+
path: path13,
|
|
8018
8542
|
line: startLine
|
|
8019
8543
|
});
|
|
8020
8544
|
const commentId = commentRes.data.id;
|
|
@@ -8048,7 +8572,7 @@ async function postFixComment(params) {
|
|
|
8048
8572
|
scanner
|
|
8049
8573
|
} = params;
|
|
8050
8574
|
const {
|
|
8051
|
-
path:
|
|
8575
|
+
path: path13,
|
|
8052
8576
|
startLine,
|
|
8053
8577
|
vulnerabilityReportIssue: { fixId, vulnerabilityReportIssueTags, category },
|
|
8054
8578
|
vulnerabilityReportIssueId
|
|
@@ -8066,7 +8590,7 @@ async function postFixComment(params) {
|
|
|
8066
8590
|
Refresh the page in order to see the changes.`,
|
|
8067
8591
|
pull_number: pullRequest,
|
|
8068
8592
|
commit_id: commitSha,
|
|
8069
|
-
path:
|
|
8593
|
+
path: path13,
|
|
8070
8594
|
line: startLine
|
|
8071
8595
|
});
|
|
8072
8596
|
const commentId = commentRes.data.id;
|
|
@@ -8345,54 +8869,37 @@ async function handleAutoPr(params) {
|
|
|
8345
8869
|
|
|
8346
8870
|
// src/features/analysis/git.ts
|
|
8347
8871
|
import Debug10 from "debug";
|
|
8348
|
-
import { simpleGit as simpleGit2 } from "simple-git";
|
|
8349
8872
|
var debug10 = Debug10("mobbdev:git");
|
|
8350
|
-
var GIT_NOT_INITIALIZED_ERROR_MESSAGE = "not a git repository";
|
|
8351
8873
|
async function getGitInfo(srcDirPath) {
|
|
8352
8874
|
debug10("getting git info for %s", srcDirPath);
|
|
8353
|
-
const
|
|
8354
|
-
baseDir: srcDirPath,
|
|
8355
|
-
maxConcurrentProcesses: 1,
|
|
8356
|
-
trimmed: true
|
|
8357
|
-
});
|
|
8358
|
-
let repoUrl = "";
|
|
8359
|
-
let hash = "";
|
|
8360
|
-
let reference = "";
|
|
8875
|
+
const gitService = new GitService(srcDirPath);
|
|
8361
8876
|
try {
|
|
8362
|
-
|
|
8363
|
-
|
|
8364
|
-
|
|
8877
|
+
const validationResult = await gitService.validateRepository();
|
|
8878
|
+
if (!validationResult.isValid) {
|
|
8879
|
+
debug10("folder is not a git repo");
|
|
8880
|
+
return {
|
|
8881
|
+
success: false,
|
|
8882
|
+
hash: void 0,
|
|
8883
|
+
reference: void 0,
|
|
8884
|
+
repoUrl: void 0
|
|
8885
|
+
};
|
|
8886
|
+
}
|
|
8887
|
+
const gitInfo = await gitService.getGitInfo();
|
|
8888
|
+
return {
|
|
8889
|
+
success: true,
|
|
8890
|
+
...gitInfo
|
|
8891
|
+
};
|
|
8365
8892
|
} catch (e) {
|
|
8366
8893
|
if (e instanceof Error) {
|
|
8367
8894
|
debug10("failed to run git %o", e);
|
|
8368
8895
|
if (e.message.includes(" spawn ")) {
|
|
8369
8896
|
debug10("git cli not installed");
|
|
8370
|
-
} else if (e.message.includes(GIT_NOT_INITIALIZED_ERROR_MESSAGE)) {
|
|
8371
|
-
debug10("folder is not a git repo");
|
|
8372
|
-
return {
|
|
8373
|
-
success: false,
|
|
8374
|
-
hash: void 0,
|
|
8375
|
-
reference: void 0,
|
|
8376
|
-
repoUrl: void 0
|
|
8377
|
-
};
|
|
8378
8897
|
} else {
|
|
8379
8898
|
throw e;
|
|
8380
8899
|
}
|
|
8381
8900
|
}
|
|
8382
8901
|
throw e;
|
|
8383
8902
|
}
|
|
8384
|
-
if (repoUrl.endsWith(".git")) {
|
|
8385
|
-
repoUrl = repoUrl.slice(0, -".git".length);
|
|
8386
|
-
}
|
|
8387
|
-
if (repoUrl.startsWith("git@github.com:")) {
|
|
8388
|
-
repoUrl = repoUrl.replace("git@github.com:", "https://github.com/");
|
|
8389
|
-
}
|
|
8390
|
-
return {
|
|
8391
|
-
success: true,
|
|
8392
|
-
repoUrl,
|
|
8393
|
-
hash,
|
|
8394
|
-
reference
|
|
8395
|
-
};
|
|
8396
8903
|
}
|
|
8397
8904
|
|
|
8398
8905
|
// src/features/analysis/graphql/gql.ts
|
|
@@ -8408,6 +8915,7 @@ import Debug11 from "debug";
|
|
|
8408
8915
|
import { createClient } from "graphql-ws";
|
|
8409
8916
|
import { HttpsProxyAgent } from "https-proxy-agent";
|
|
8410
8917
|
import WebSocket from "ws";
|
|
8918
|
+
var DEFAULT_API_URL = "https://api.mobb.ai/v1/graphql";
|
|
8411
8919
|
var debug11 = Debug11("mobbdev:subscribe");
|
|
8412
8920
|
var SUBSCRIPTION_TIMEOUT_MS = 30 * 60 * 1e3;
|
|
8413
8921
|
function createWSClient(options) {
|
|
@@ -8439,10 +8947,11 @@ function subscribe(query, variables, callback, wsClientOptions) {
|
|
|
8439
8947
|
return new Promise((resolve, reject) => {
|
|
8440
8948
|
let timer = null;
|
|
8441
8949
|
const { timeoutInMs = SUBSCRIPTION_TIMEOUT_MS } = wsClientOptions;
|
|
8950
|
+
const API_URL2 = process.env["API_URL"] || DEFAULT_API_URL;
|
|
8442
8951
|
const client = createWSClient({
|
|
8443
8952
|
...wsClientOptions,
|
|
8444
8953
|
websocket: WebSocket,
|
|
8445
|
-
url:
|
|
8954
|
+
url: API_URL2.replace("http", "ws")
|
|
8446
8955
|
});
|
|
8447
8956
|
const unsubscribe = client.subscribe(
|
|
8448
8957
|
{ query, variables },
|
|
@@ -8905,13 +9414,13 @@ var GQLClient = class {
|
|
|
8905
9414
|
};
|
|
8906
9415
|
|
|
8907
9416
|
// src/features/analysis/pack.ts
|
|
8908
|
-
import
|
|
8909
|
-
import
|
|
9417
|
+
import fs6 from "fs";
|
|
9418
|
+
import path7 from "path";
|
|
8910
9419
|
import AdmZip from "adm-zip";
|
|
8911
9420
|
import Debug13 from "debug";
|
|
8912
9421
|
import { globby } from "globby";
|
|
8913
|
-
import { isBinary } from "istextorbinary";
|
|
8914
|
-
import { simpleGit as
|
|
9422
|
+
import { isBinary as isBinary2 } from "istextorbinary";
|
|
9423
|
+
import { simpleGit as simpleGit2 } from "simple-git";
|
|
8915
9424
|
import { parseStringPromise } from "xml2js";
|
|
8916
9425
|
import { z as z28 } from "zod";
|
|
8917
9426
|
var debug13 = Debug13("mobbdev:pack");
|
|
@@ -8940,7 +9449,7 @@ async function pack(srcDirPath, vulnFiles, isIncludeAllFiles = false) {
|
|
|
8940
9449
|
debug13("pack folder %s", srcDirPath);
|
|
8941
9450
|
let git = void 0;
|
|
8942
9451
|
try {
|
|
8943
|
-
git =
|
|
9452
|
+
git = simpleGit2({
|
|
8944
9453
|
baseDir: srcDirPath,
|
|
8945
9454
|
maxConcurrentProcesses: 1,
|
|
8946
9455
|
trimmed: true
|
|
@@ -8972,23 +9481,23 @@ async function pack(srcDirPath, vulnFiles, isIncludeAllFiles = false) {
|
|
|
8972
9481
|
const zip = new AdmZip();
|
|
8973
9482
|
debug13("compressing files");
|
|
8974
9483
|
for (const filepath of filepaths) {
|
|
8975
|
-
const absFilepath =
|
|
9484
|
+
const absFilepath = path7.join(srcDirPath, filepath.toString());
|
|
8976
9485
|
if (!isIncludeAllFiles) {
|
|
8977
9486
|
vulnFiles = vulnFiles.concat(getManifestFilesSuffixes());
|
|
8978
9487
|
if (!endsWithAny(
|
|
8979
|
-
absFilepath.toString().replaceAll(
|
|
9488
|
+
absFilepath.toString().replaceAll(path7.win32.sep, path7.posix.sep),
|
|
8980
9489
|
vulnFiles
|
|
8981
9490
|
)) {
|
|
8982
9491
|
debug13("ignoring %s because it is not a vulnerability file", filepath);
|
|
8983
9492
|
continue;
|
|
8984
9493
|
}
|
|
8985
9494
|
}
|
|
8986
|
-
if (
|
|
9495
|
+
if (fs6.lstatSync(absFilepath).size > MAX_FILE_SIZE) {
|
|
8987
9496
|
debug13("ignoring %s because the size is > 5MB", filepath);
|
|
8988
9497
|
continue;
|
|
8989
9498
|
}
|
|
8990
|
-
const data = git ? await git.showBuffer([`HEAD:./${filepath}`]) :
|
|
8991
|
-
if (
|
|
9499
|
+
const data = git ? await git.showBuffer([`HEAD:./${filepath}`]) : fs6.readFileSync(absFilepath);
|
|
9500
|
+
if (isBinary2(null, data)) {
|
|
8992
9501
|
debug13("ignoring %s because is seems to be a binary file", filepath);
|
|
8993
9502
|
continue;
|
|
8994
9503
|
}
|
|
@@ -9082,14 +9591,14 @@ async function snykArticlePrompt() {
|
|
|
9082
9591
|
}
|
|
9083
9592
|
|
|
9084
9593
|
// src/features/analysis/scanners/checkmarx.ts
|
|
9085
|
-
import { createRequire } from "
|
|
9594
|
+
import { createRequire } from "module";
|
|
9086
9595
|
|
|
9087
9596
|
// src/post_install/constants.mjs
|
|
9088
9597
|
var cxOperatingSystemSupportMessage = `Your operating system does not support checkmarx.
|
|
9089
9598
|
You can see the list of supported operating systems here: https://github.com/Checkmarx/ast-cli#releases`;
|
|
9090
9599
|
|
|
9091
9600
|
// src/utils/child_process.ts
|
|
9092
|
-
import cp from "
|
|
9601
|
+
import cp from "child_process";
|
|
9093
9602
|
import Debug14 from "debug";
|
|
9094
9603
|
import * as process2 from "process";
|
|
9095
9604
|
function createFork({ args, processPath, name }, options) {
|
|
@@ -9108,16 +9617,16 @@ function createSpawn({ args, processPath, name, cwd }, options) {
|
|
|
9108
9617
|
return createChildProcess({ childProcess: child, name }, options);
|
|
9109
9618
|
}
|
|
9110
9619
|
function createChildProcess({ childProcess, name }, options) {
|
|
9111
|
-
const
|
|
9620
|
+
const debug20 = Debug14(`mobbdev:${name}`);
|
|
9112
9621
|
const { display } = options;
|
|
9113
9622
|
return new Promise((resolve, reject) => {
|
|
9114
9623
|
let out = "";
|
|
9115
9624
|
const onData = (chunk) => {
|
|
9116
|
-
|
|
9625
|
+
debug20(`chunk received from ${name} std ${chunk}`);
|
|
9117
9626
|
out += chunk;
|
|
9118
9627
|
};
|
|
9119
9628
|
if (!childProcess?.stdout || !childProcess?.stderr) {
|
|
9120
|
-
|
|
9629
|
+
debug20(`unable to fork ${name}`);
|
|
9121
9630
|
reject(new Error(`unable to fork ${name}`));
|
|
9122
9631
|
}
|
|
9123
9632
|
childProcess.stdout?.on("data", onData);
|
|
@@ -9127,11 +9636,11 @@ function createChildProcess({ childProcess, name }, options) {
|
|
|
9127
9636
|
childProcess.stderr?.pipe(process2.stderr);
|
|
9128
9637
|
}
|
|
9129
9638
|
childProcess.on("exit", (code) => {
|
|
9130
|
-
|
|
9639
|
+
debug20(`${name} exit code ${code}`);
|
|
9131
9640
|
resolve({ message: out, code });
|
|
9132
9641
|
});
|
|
9133
9642
|
childProcess.on("error", (err) => {
|
|
9134
|
-
|
|
9643
|
+
debug20(`${name} error %o`, err);
|
|
9135
9644
|
reject(err);
|
|
9136
9645
|
});
|
|
9137
9646
|
});
|
|
@@ -9143,12 +9652,12 @@ import Debug15 from "debug";
|
|
|
9143
9652
|
import { existsSync } from "fs";
|
|
9144
9653
|
import { createSpinner as createSpinner2 } from "nanospinner";
|
|
9145
9654
|
import { type } from "os";
|
|
9146
|
-
import
|
|
9655
|
+
import path8 from "path";
|
|
9147
9656
|
var debug14 = Debug15("mobbdev:checkmarx");
|
|
9148
9657
|
var require2 = createRequire(import.meta.url);
|
|
9149
9658
|
var getCheckmarxPath = () => {
|
|
9150
|
-
const
|
|
9151
|
-
const cxFileName =
|
|
9659
|
+
const os3 = type();
|
|
9660
|
+
const cxFileName = os3 === "Windows_NT" ? "cx.exe" : "cx";
|
|
9152
9661
|
try {
|
|
9153
9662
|
return require2.resolve(`.bin/${cxFileName}`);
|
|
9154
9663
|
} catch (e) {
|
|
@@ -9203,9 +9712,9 @@ async function getCheckmarxReport({ reportPath, repositoryRoot, branch, projectN
|
|
|
9203
9712
|
await startCheckmarxConfigationPrompt();
|
|
9204
9713
|
await validateCheckamxCredentials();
|
|
9205
9714
|
}
|
|
9206
|
-
const extension =
|
|
9207
|
-
const filePath =
|
|
9208
|
-
const fileName =
|
|
9715
|
+
const extension = path8.extname(reportPath);
|
|
9716
|
+
const filePath = path8.dirname(reportPath);
|
|
9717
|
+
const fileName = path8.basename(reportPath, extension);
|
|
9209
9718
|
const checkmarxCommandArgs = getCheckmarxCommandArgs({
|
|
9210
9719
|
repoPath: repositoryRoot,
|
|
9211
9720
|
branch,
|
|
@@ -9258,7 +9767,7 @@ async function validateCheckamxCredentials() {
|
|
|
9258
9767
|
}
|
|
9259
9768
|
|
|
9260
9769
|
// src/features/analysis/scanners/snyk.ts
|
|
9261
|
-
import { createRequire as createRequire2 } from "
|
|
9770
|
+
import { createRequire as createRequire2 } from "module";
|
|
9262
9771
|
import chalk4 from "chalk";
|
|
9263
9772
|
import Debug16 from "debug";
|
|
9264
9773
|
import { createSpinner as createSpinner3 } from "nanospinner";
|
|
@@ -9274,8 +9783,8 @@ async function forkSnyk(args, { display }) {
|
|
|
9274
9783
|
}
|
|
9275
9784
|
async function getSnykReport(reportPath, repoRoot, { skipPrompts = false }) {
|
|
9276
9785
|
debug15("get snyk report start %s %s", reportPath, repoRoot);
|
|
9277
|
-
const
|
|
9278
|
-
const { message: configMessage } =
|
|
9786
|
+
const config5 = await forkSnyk(["config"], { display: false });
|
|
9787
|
+
const { message: configMessage } = config5;
|
|
9279
9788
|
if (!configMessage.includes("api: ")) {
|
|
9280
9789
|
const snykLoginSpinner = createSpinner3().start();
|
|
9281
9790
|
if (!skipPrompts) {
|
|
@@ -9287,7 +9796,7 @@ async function getSnykReport(reportPath, repoRoot, { skipPrompts = false }) {
|
|
|
9287
9796
|
snykLoginSpinner.update({
|
|
9288
9797
|
text: "\u{1F513} Waiting for Snyk login to complete"
|
|
9289
9798
|
});
|
|
9290
|
-
debug15("no token in the config %s",
|
|
9799
|
+
debug15("no token in the config %s", config5);
|
|
9291
9800
|
await forkSnyk(["auth"], { display: true });
|
|
9292
9801
|
snykLoginSpinner.success({ text: "\u{1F513} Login to Snyk Successful" });
|
|
9293
9802
|
}
|
|
@@ -9324,8 +9833,14 @@ async function uploadFile({
|
|
|
9324
9833
|
file,
|
|
9325
9834
|
url,
|
|
9326
9835
|
uploadKey,
|
|
9327
|
-
uploadFields
|
|
9836
|
+
uploadFields,
|
|
9837
|
+
logger: logger2
|
|
9328
9838
|
}) {
|
|
9839
|
+
const logInfo2 = logger2 || ((_message, _data) => {
|
|
9840
|
+
});
|
|
9841
|
+
logInfo2(`FileUpload: upload file start ${url}`);
|
|
9842
|
+
logInfo2(`FileUpload: upload fields`, uploadFields);
|
|
9843
|
+
logInfo2(`FileUpload: upload key ${uploadKey}`);
|
|
9329
9844
|
debug16("upload file start %s", url);
|
|
9330
9845
|
debug16("upload fields %o", uploadFields);
|
|
9331
9846
|
debug16("upload key %s", uploadKey);
|
|
@@ -9338,9 +9853,11 @@ async function uploadFile({
|
|
|
9338
9853
|
}
|
|
9339
9854
|
if (typeof file === "string") {
|
|
9340
9855
|
debug16("upload file from path %s", file);
|
|
9856
|
+
logInfo2(`FileUpload: upload file from path ${file}`);
|
|
9341
9857
|
form.append("file", await fileFrom(file));
|
|
9342
9858
|
} else {
|
|
9343
9859
|
debug16("upload file from buffer");
|
|
9860
|
+
logInfo2(`FileUpload: upload file from buffer`);
|
|
9344
9861
|
form.append("file", new File([file], "file"));
|
|
9345
9862
|
}
|
|
9346
9863
|
const agent = getProxyAgent(url);
|
|
@@ -9351,9 +9868,11 @@ async function uploadFile({
|
|
|
9351
9868
|
});
|
|
9352
9869
|
if (!response.ok) {
|
|
9353
9870
|
debug16("error from S3 %s %s", response.body, response.status);
|
|
9871
|
+
logInfo2(`FileUpload: error from S3 ${response.body} ${response.status}`);
|
|
9354
9872
|
throw new Error(`Failed to upload the file: ${response.status}`);
|
|
9355
9873
|
}
|
|
9356
9874
|
debug16("upload file done");
|
|
9875
|
+
logInfo2(`FileUpload: upload file done`);
|
|
9357
9876
|
}
|
|
9358
9877
|
|
|
9359
9878
|
// src/features/analysis/index.ts
|
|
@@ -9388,7 +9907,7 @@ async function downloadRepo({
|
|
|
9388
9907
|
const { createSpinner: createSpinner5 } = Spinner2({ ci });
|
|
9389
9908
|
const repoSpinner = createSpinner5("\u{1F4BE} Downloading Repo").start();
|
|
9390
9909
|
debug17("download repo %s %s %s", repoUrl, dirname);
|
|
9391
|
-
const zipFilePath =
|
|
9910
|
+
const zipFilePath = path9.join(dirname, "repo.zip");
|
|
9392
9911
|
debug17("download URL: %s auth headers: %o", downloadUrl, authHeaders);
|
|
9393
9912
|
const response = await fetch4(downloadUrl, {
|
|
9394
9913
|
method: "GET",
|
|
@@ -9401,19 +9920,19 @@ async function downloadRepo({
|
|
|
9401
9920
|
repoSpinner.error({ text: "\u{1F4BE} Repo download failed" });
|
|
9402
9921
|
throw new Error(`Can't access ${chalk5.bold(repoUrl)}`);
|
|
9403
9922
|
}
|
|
9404
|
-
const fileWriterStream =
|
|
9923
|
+
const fileWriterStream = fs7.createWriteStream(zipFilePath);
|
|
9405
9924
|
if (!response.body) {
|
|
9406
9925
|
throw new Error("Response body is empty");
|
|
9407
9926
|
}
|
|
9408
9927
|
await pipeline(response.body, fileWriterStream);
|
|
9409
9928
|
await extract(zipFilePath, { dir: dirname });
|
|
9410
|
-
const repoRoot =
|
|
9929
|
+
const repoRoot = fs7.readdirSync(dirname, { withFileTypes: true }).filter((dirent) => dirent.isDirectory()).map((dirent) => dirent.name)[0];
|
|
9411
9930
|
if (!repoRoot) {
|
|
9412
9931
|
throw new Error("Repo root not found");
|
|
9413
9932
|
}
|
|
9414
9933
|
debug17("repo root %s", repoRoot);
|
|
9415
9934
|
repoSpinner.success({ text: "\u{1F4BE} Repo downloaded successfully" });
|
|
9416
|
-
return
|
|
9935
|
+
return path9.join(dirname, repoRoot);
|
|
9417
9936
|
}
|
|
9418
9937
|
var getReportUrl = ({
|
|
9419
9938
|
organizationId,
|
|
@@ -9523,7 +10042,7 @@ async function getReport(params, { skipPrompts }) {
|
|
|
9523
10042
|
authHeaders: scm.getAuthHeaders(),
|
|
9524
10043
|
downloadUrl
|
|
9525
10044
|
});
|
|
9526
|
-
const reportPath =
|
|
10045
|
+
const reportPath = path9.join(dirname, REPORT_DEFAULT_FILE_NAME);
|
|
9527
10046
|
switch (scanner) {
|
|
9528
10047
|
case "snyk":
|
|
9529
10048
|
await getSnykReport(reportPath, repositoryRoot, { skipPrompts });
|
|
@@ -9899,7 +10418,7 @@ async function _zipAndUploadRepo({
|
|
|
9899
10418
|
const zippingSpinner = createSpinner4("\u{1F4E6} Zipping repo").start();
|
|
9900
10419
|
let zipBuffer;
|
|
9901
10420
|
let gitInfo = { success: false };
|
|
9902
|
-
if (srcFileStatus.isFile() &&
|
|
10421
|
+
if (srcFileStatus.isFile() && path9.extname(srcPath).toLowerCase() === ".fpr") {
|
|
9903
10422
|
zipBuffer = await repackFpr(srcPath);
|
|
9904
10423
|
} else {
|
|
9905
10424
|
gitInfo = await getGitInfo(srcPath);
|
|
@@ -10246,7 +10765,7 @@ import chalk8 from "chalk";
|
|
|
10246
10765
|
|
|
10247
10766
|
// src/args/validation.ts
|
|
10248
10767
|
import chalk7 from "chalk";
|
|
10249
|
-
import
|
|
10768
|
+
import path10 from "path";
|
|
10250
10769
|
import { z as z30 } from "zod";
|
|
10251
10770
|
function throwRepoUrlErrorMessage({
|
|
10252
10771
|
error,
|
|
@@ -10290,7 +10809,7 @@ function validateRepoUrl(args) {
|
|
|
10290
10809
|
}
|
|
10291
10810
|
var supportExtensions = [".json", ".xml", ".fpr", ".sarif"];
|
|
10292
10811
|
function validateReportFileFormat(reportFile) {
|
|
10293
|
-
if (!supportExtensions.includes(
|
|
10812
|
+
if (!supportExtensions.includes(path10.extname(reportFile))) {
|
|
10294
10813
|
throw new CliError(
|
|
10295
10814
|
`
|
|
10296
10815
|
${chalk7.bold(
|
|
@@ -10332,7 +10851,7 @@ function analyzeBuilder(yargs2) {
|
|
|
10332
10851
|
).help();
|
|
10333
10852
|
}
|
|
10334
10853
|
function validateAnalyzeOptions(argv) {
|
|
10335
|
-
if (argv.f && !
|
|
10854
|
+
if (argv.f && !fs8.existsSync(argv.f)) {
|
|
10336
10855
|
throw new CliError(`
|
|
10337
10856
|
Can't access ${chalk8.bold(argv.f)}`);
|
|
10338
10857
|
}
|
|
@@ -10385,6 +10904,7 @@ import {
|
|
|
10385
10904
|
|
|
10386
10905
|
// src/mcp/Logger.ts
|
|
10387
10906
|
var logglerUrl = "http://localhost:4444/log";
|
|
10907
|
+
var isTestEnvironment = process.env["VITEST"] || process.env["TEST"];
|
|
10388
10908
|
var Logger = class {
|
|
10389
10909
|
log(message, level = "info", data) {
|
|
10390
10910
|
const logMessage = {
|
|
@@ -10393,13 +10913,15 @@ var Logger = class {
|
|
|
10393
10913
|
message,
|
|
10394
10914
|
data
|
|
10395
10915
|
};
|
|
10396
|
-
|
|
10397
|
-
|
|
10398
|
-
|
|
10399
|
-
|
|
10400
|
-
|
|
10401
|
-
|
|
10402
|
-
|
|
10916
|
+
if (!isTestEnvironment) {
|
|
10917
|
+
try {
|
|
10918
|
+
fetch(logglerUrl, {
|
|
10919
|
+
method: "POST",
|
|
10920
|
+
headers: { "Content-Type": "application/json" },
|
|
10921
|
+
body: JSON.stringify(logMessage)
|
|
10922
|
+
});
|
|
10923
|
+
} catch (error) {
|
|
10924
|
+
}
|
|
10403
10925
|
}
|
|
10404
10926
|
}
|
|
10405
10927
|
};
|
|
@@ -10408,722 +10930,99 @@ var logInfo = (message, data) => logger.log(message, "info", data);
|
|
|
10408
10930
|
var logError = (message, data) => logger.log(message, "error", data);
|
|
10409
10931
|
var logWarn = (message, data) => logger.log(message, "warn", data);
|
|
10410
10932
|
var logDebug = (message, data) => logger.log(message, "debug", data);
|
|
10411
|
-
var
|
|
10412
|
-
|
|
10413
|
-
// src/mcp/core/ToolRegistry.ts
|
|
10414
|
-
var ToolRegistry = class {
|
|
10415
|
-
constructor() {
|
|
10416
|
-
__publicField(this, "tools", /* @__PURE__ */ new Map());
|
|
10417
|
-
}
|
|
10418
|
-
registerTool(tool) {
|
|
10419
|
-
if (this.tools.has(tool.name)) {
|
|
10420
|
-
logWarn(`Tool ${tool.name} is already registered, overwriting`, {
|
|
10421
|
-
toolName: tool.name
|
|
10422
|
-
});
|
|
10423
|
-
}
|
|
10424
|
-
this.tools.set(tool.name, tool);
|
|
10425
|
-
logDebug(`Tool registered: ${tool.name}`, {
|
|
10426
|
-
toolName: tool.name,
|
|
10427
|
-
description: tool.definition.description
|
|
10428
|
-
});
|
|
10429
|
-
}
|
|
10430
|
-
getTool(name) {
|
|
10431
|
-
return this.tools.get(name);
|
|
10432
|
-
}
|
|
10433
|
-
getAllTools() {
|
|
10434
|
-
return Array.from(this.tools.values()).map((tool) => tool.definition);
|
|
10435
|
-
}
|
|
10436
|
-
getToolNames() {
|
|
10437
|
-
return Array.from(this.tools.keys());
|
|
10438
|
-
}
|
|
10439
|
-
hasTool(name) {
|
|
10440
|
-
return this.tools.has(name);
|
|
10441
|
-
}
|
|
10442
|
-
getToolCount() {
|
|
10443
|
-
return this.tools.size;
|
|
10444
|
-
}
|
|
10445
|
-
};
|
|
10446
|
-
|
|
10447
|
-
// src/mcp/core/McpServer.ts
|
|
10448
|
-
var McpServer = class {
|
|
10449
|
-
constructor(config4) {
|
|
10450
|
-
__publicField(this, "server");
|
|
10451
|
-
__publicField(this, "toolRegistry");
|
|
10452
|
-
__publicField(this, "isEventHandlersSetup", false);
|
|
10453
|
-
this.server = new Server(
|
|
10454
|
-
{
|
|
10455
|
-
name: config4.name,
|
|
10456
|
-
version: config4.version
|
|
10457
|
-
},
|
|
10458
|
-
{
|
|
10459
|
-
capabilities: {
|
|
10460
|
-
tools: {}
|
|
10461
|
-
}
|
|
10462
|
-
}
|
|
10463
|
-
);
|
|
10464
|
-
this.toolRegistry = new ToolRegistry();
|
|
10465
|
-
this.setupHandlers();
|
|
10466
|
-
this.setupProcessEventHandlers();
|
|
10467
|
-
logInfo("MCP server instance created", config4);
|
|
10468
|
-
}
|
|
10469
|
-
setupProcessEventHandlers() {
|
|
10470
|
-
if (this.isEventHandlersSetup) {
|
|
10471
|
-
logDebug("Process event handlers already setup, skipping");
|
|
10472
|
-
return;
|
|
10473
|
-
}
|
|
10474
|
-
const signals = {
|
|
10475
|
-
SIGINT: "MCP server interrupted",
|
|
10476
|
-
SIGTERM: "MCP server terminated",
|
|
10477
|
-
exit: "MCP server exiting",
|
|
10478
|
-
uncaughtException: "Uncaught exception in MCP server",
|
|
10479
|
-
unhandledRejection: "Unhandled promise rejection in MCP server",
|
|
10480
|
-
warning: "Warning in MCP server"
|
|
10481
|
-
};
|
|
10482
|
-
Object.entries(signals).forEach(([signal, message]) => {
|
|
10483
|
-
process.on(
|
|
10484
|
-
signal,
|
|
10485
|
-
(error) => {
|
|
10486
|
-
if (error && signal !== "exit") {
|
|
10487
|
-
logError(`${message}`, { error, signal });
|
|
10488
|
-
} else {
|
|
10489
|
-
logInfo(message, { signal });
|
|
10490
|
-
}
|
|
10491
|
-
if (signal === "SIGINT" || signal === "SIGTERM") {
|
|
10492
|
-
process.exit(0);
|
|
10493
|
-
}
|
|
10494
|
-
if (signal === "uncaughtException") {
|
|
10495
|
-
process.exit(1);
|
|
10496
|
-
}
|
|
10497
|
-
}
|
|
10498
|
-
);
|
|
10499
|
-
});
|
|
10500
|
-
this.isEventHandlersSetup = true;
|
|
10501
|
-
logDebug("Process event handlers registered");
|
|
10502
|
-
}
|
|
10503
|
-
createShutdownPromise() {
|
|
10504
|
-
return new Promise((resolve) => {
|
|
10505
|
-
const cleanup = () => {
|
|
10506
|
-
logInfo("Process shutdown initiated");
|
|
10507
|
-
resolve();
|
|
10508
|
-
};
|
|
10509
|
-
process.once("SIGINT", cleanup);
|
|
10510
|
-
process.once("SIGTERM", cleanup);
|
|
10511
|
-
});
|
|
10512
|
-
}
|
|
10513
|
-
setupHandlers() {
|
|
10514
|
-
this.server.setRequestHandler(
|
|
10515
|
-
ListToolsRequestSchema,
|
|
10516
|
-
async (request) => {
|
|
10517
|
-
logInfo("Received list_tools request", { params: request.params });
|
|
10518
|
-
const tools = this.toolRegistry.getAllTools();
|
|
10519
|
-
const response = { tools };
|
|
10520
|
-
logInfo("Returning list_tools response", {
|
|
10521
|
-
toolCount: tools.length,
|
|
10522
|
-
toolNames: tools.map((t) => t.name),
|
|
10523
|
-
response
|
|
10524
|
-
});
|
|
10525
|
-
return response;
|
|
10526
|
-
}
|
|
10527
|
-
);
|
|
10528
|
-
this.server.setRequestHandler(
|
|
10529
|
-
CallToolRequestSchema,
|
|
10530
|
-
async (request) => {
|
|
10531
|
-
const { name, arguments: args } = request.params;
|
|
10532
|
-
logInfo(`Received call tool request for ${name}`, { name, args });
|
|
10533
|
-
try {
|
|
10534
|
-
const tool = this.toolRegistry.getTool(name);
|
|
10535
|
-
if (!tool) {
|
|
10536
|
-
const errorMsg = `Unknown tool: ${name}`;
|
|
10537
|
-
logWarn(errorMsg, {
|
|
10538
|
-
name,
|
|
10539
|
-
availableTools: this.toolRegistry.getToolNames()
|
|
10540
|
-
});
|
|
10541
|
-
throw new Error(errorMsg);
|
|
10542
|
-
}
|
|
10543
|
-
logDebug(`Executing tool: ${name}`, { args });
|
|
10544
|
-
const response = await tool.execute(args);
|
|
10545
|
-
const serializedResponse = JSON.parse(JSON.stringify(response));
|
|
10546
|
-
logInfo(`Tool ${name} executed successfully`, {
|
|
10547
|
-
responseType: typeof response,
|
|
10548
|
-
hasContent: !!serializedResponse.content
|
|
10549
|
-
});
|
|
10550
|
-
return serializedResponse;
|
|
10551
|
-
} catch (error) {
|
|
10552
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
10553
|
-
logError(`Error executing tool ${name}: ${errorMessage}`, {
|
|
10554
|
-
error,
|
|
10555
|
-
toolName: name,
|
|
10556
|
-
args
|
|
10557
|
-
});
|
|
10558
|
-
throw error;
|
|
10559
|
-
}
|
|
10560
|
-
}
|
|
10561
|
-
);
|
|
10562
|
-
logDebug("MCP server handlers registered");
|
|
10563
|
-
}
|
|
10564
|
-
registerTool(tool) {
|
|
10565
|
-
this.toolRegistry.registerTool({
|
|
10566
|
-
name: tool.name,
|
|
10567
|
-
definition: tool.definition,
|
|
10568
|
-
execute: tool.execute
|
|
10569
|
-
});
|
|
10570
|
-
logDebug(`Tool registered: ${tool.name}`);
|
|
10571
|
-
}
|
|
10572
|
-
async start() {
|
|
10573
|
-
try {
|
|
10574
|
-
logDebug("Starting MCP server");
|
|
10575
|
-
const transport = new StdioServerTransport();
|
|
10576
|
-
await this.server.connect(transport);
|
|
10577
|
-
logInfo("MCP server is running on stdin/stdout");
|
|
10578
|
-
process.stdin.resume();
|
|
10579
|
-
await this.createShutdownPromise();
|
|
10580
|
-
await this.stop();
|
|
10581
|
-
} catch (error) {
|
|
10582
|
-
logError("Failed to start MCP server", { error });
|
|
10583
|
-
throw error;
|
|
10584
|
-
}
|
|
10585
|
-
}
|
|
10586
|
-
async stop() {
|
|
10587
|
-
logInfo("MCP server shutting down");
|
|
10588
|
-
}
|
|
10589
|
-
};
|
|
10590
|
-
|
|
10591
|
-
// src/mcp/services/GitService.ts
|
|
10592
|
-
import * as path9 from "path";
|
|
10593
|
-
import { simpleGit as simpleGit4 } from "simple-git";
|
|
10594
|
-
var GitService = class {
|
|
10595
|
-
constructor(repositoryPath) {
|
|
10596
|
-
__publicField(this, "git");
|
|
10597
|
-
__publicField(this, "repositoryPath");
|
|
10598
|
-
this.git = simpleGit4(repositoryPath, { binary: "git" });
|
|
10599
|
-
this.repositoryPath = repositoryPath;
|
|
10600
|
-
logDebug("Git service initialized", { repositoryPath });
|
|
10601
|
-
}
|
|
10602
|
-
/**
|
|
10603
|
-
* Validates that the path is a valid git repository
|
|
10604
|
-
*/
|
|
10605
|
-
async validateRepository() {
|
|
10606
|
-
logDebug("Validating git repository");
|
|
10607
|
-
try {
|
|
10608
|
-
const isRepo = await this.git.checkIsRepo();
|
|
10609
|
-
if (!isRepo) {
|
|
10610
|
-
const error = "Path is not a valid git repository";
|
|
10611
|
-
logError(error);
|
|
10612
|
-
return { isValid: false, error };
|
|
10613
|
-
}
|
|
10614
|
-
logDebug("Git repository validation successful");
|
|
10615
|
-
return { isValid: true };
|
|
10616
|
-
} catch (error) {
|
|
10617
|
-
const errorMessage = `Failed to verify git repository: ${error.message}`;
|
|
10618
|
-
logError(errorMessage, { error });
|
|
10619
|
-
return { isValid: false, error: errorMessage };
|
|
10620
|
-
}
|
|
10621
|
-
}
|
|
10622
|
-
/**
|
|
10623
|
-
* Gets the current git status and returns changed files
|
|
10624
|
-
*/
|
|
10625
|
-
async getChangedFiles() {
|
|
10626
|
-
logDebug("Getting git status");
|
|
10627
|
-
try {
|
|
10628
|
-
const status = await this.git.status();
|
|
10629
|
-
const gitRoot = await this.git.revparse(["--show-toplevel"]);
|
|
10630
|
-
const relativePathFromGitRoot = path9.relative(
|
|
10631
|
-
gitRoot,
|
|
10632
|
-
this.repositoryPath
|
|
10633
|
-
);
|
|
10634
|
-
const files = status.files.map((file) => {
|
|
10635
|
-
const gitRelativePath = file.path;
|
|
10636
|
-
if (relativePathFromGitRoot === "") {
|
|
10637
|
-
return gitRelativePath;
|
|
10638
|
-
}
|
|
10639
|
-
if (gitRelativePath.startsWith(relativePathFromGitRoot + "/")) {
|
|
10640
|
-
return gitRelativePath.substring(relativePathFromGitRoot.length + 1);
|
|
10641
|
-
}
|
|
10642
|
-
return path9.relative(
|
|
10643
|
-
this.repositoryPath,
|
|
10644
|
-
path9.join(gitRoot, gitRelativePath)
|
|
10645
|
-
);
|
|
10646
|
-
});
|
|
10647
|
-
logInfo("Git status retrieved", {
|
|
10648
|
-
fileCount: files.length,
|
|
10649
|
-
files: files.slice(0, 10),
|
|
10650
|
-
// Log first 10 files to avoid spam
|
|
10651
|
-
gitRoot,
|
|
10652
|
-
workingDir: this.repositoryPath,
|
|
10653
|
-
relativePathFromGitRoot
|
|
10654
|
-
});
|
|
10655
|
-
return { files, status };
|
|
10656
|
-
} catch (error) {
|
|
10657
|
-
const errorMessage = `Failed to get git status: ${error.message}`;
|
|
10658
|
-
logError(errorMessage, { error });
|
|
10659
|
-
throw new Error(errorMessage);
|
|
10660
|
-
}
|
|
10661
|
-
}
|
|
10662
|
-
};
|
|
10933
|
+
var log = logger.log;
|
|
10663
10934
|
|
|
10664
|
-
// src/mcp/services/
|
|
10665
|
-
import
|
|
10666
|
-
import
|
|
10667
|
-
|
|
10668
|
-
|
|
10669
|
-
|
|
10670
|
-
|
|
10671
|
-
|
|
10672
|
-
|
|
10673
|
-
|
|
10674
|
-
|
|
10675
|
-
|
|
10676
|
-
|
|
10677
|
-
|
|
10678
|
-
|
|
10679
|
-
|
|
10680
|
-
|
|
10681
|
-
|
|
10682
|
-
|
|
10683
|
-
|
|
10684
|
-
|
|
10685
|
-
|
|
10686
|
-
|
|
10687
|
-
|
|
10688
|
-
|
|
10689
|
-
|
|
10690
|
-
|
|
10691
|
-
|
|
10692
|
-
|
|
10693
|
-
|
|
10694
|
-
|
|
10695
|
-
|
|
10696
|
-
|
|
10697
|
-
|
|
10698
|
-
|
|
10699
|
-
|
|
10700
|
-
|
|
10701
|
-
|
|
10702
|
-
|
|
10703
|
-
|
|
10704
|
-
|
|
10705
|
-
|
|
10706
|
-
|
|
10707
|
-
|
|
10708
|
-
|
|
10709
|
-
|
|
10710
|
-
|
|
10711
|
-
|
|
10712
|
-
|
|
10713
|
-
|
|
10714
|
-
|
|
10715
|
-
|
|
10716
|
-
|
|
10717
|
-
|
|
10718
|
-
|
|
10719
|
-
|
|
10720
|
-
|
|
10721
|
-
|
|
10722
|
-
|
|
10723
|
-
|
|
10724
|
-
|
|
10725
|
-
|
|
10726
|
-
|
|
10727
|
-
|
|
10728
|
-
|
|
10729
|
-
|
|
10730
|
-
|
|
10731
|
-
|
|
10732
|
-
|
|
10733
|
-
|
|
10734
|
-
|
|
10735
|
-
".jpg",
|
|
10736
|
-
".jpeg",
|
|
10737
|
-
".gif",
|
|
10738
|
-
".svg",
|
|
10739
|
-
".ico",
|
|
10740
|
-
".webp",
|
|
10741
|
-
".bmp",
|
|
10742
|
-
".tiff",
|
|
10743
|
-
// Fonts
|
|
10744
|
-
".ttf",
|
|
10745
|
-
".otf",
|
|
10746
|
-
".woff",
|
|
10747
|
-
".woff2",
|
|
10748
|
-
".eot",
|
|
10749
|
-
// Archives
|
|
10750
|
-
".zip",
|
|
10751
|
-
".tar",
|
|
10752
|
-
".gz",
|
|
10753
|
-
".rar",
|
|
10754
|
-
".7z",
|
|
10755
|
-
// Logs and databases
|
|
10756
|
-
".log",
|
|
10757
|
-
".db",
|
|
10758
|
-
".sqlite",
|
|
10759
|
-
".sql",
|
|
10760
|
-
// Certificates and keys
|
|
10761
|
-
".pem",
|
|
10762
|
-
".crt",
|
|
10763
|
-
".key",
|
|
10764
|
-
".p12",
|
|
10765
|
-
".pfx",
|
|
10766
|
-
// IDE/Editor files
|
|
10767
|
-
".editorconfig",
|
|
10768
|
-
".sublime-project",
|
|
10769
|
-
".sublime-workspace",
|
|
10770
|
-
// System files
|
|
10771
|
-
".DS_Store",
|
|
10772
|
-
"Thumbs.db",
|
|
10773
|
-
// Coverage reports
|
|
10774
|
-
".lcov",
|
|
10775
|
-
// Compiled/binary files
|
|
10776
|
-
".exe",
|
|
10777
|
-
".dll",
|
|
10778
|
-
".so",
|
|
10779
|
-
".dylib",
|
|
10780
|
-
".class",
|
|
10781
|
-
".pyc",
|
|
10782
|
-
".pyo",
|
|
10783
|
-
".o",
|
|
10784
|
-
".obj",
|
|
10785
|
-
// Minified files
|
|
10786
|
-
".min.js",
|
|
10787
|
-
".min.css",
|
|
10788
|
-
".min.html",
|
|
10789
|
-
// Test files
|
|
10790
|
-
".test.js",
|
|
10791
|
-
".test.ts",
|
|
10792
|
-
".test.jsx",
|
|
10793
|
-
".test.tsx",
|
|
10794
|
-
".spec.js",
|
|
10795
|
-
".spec.ts",
|
|
10796
|
-
".spec.jsx",
|
|
10797
|
-
".spec.tsx",
|
|
10798
|
-
// TypeScript declaration files
|
|
10799
|
-
".d.ts",
|
|
10800
|
-
// Build/generated files
|
|
10801
|
-
".bundle.js",
|
|
10802
|
-
".chunk.js",
|
|
10803
|
-
// Build/CI files (exact filenames)
|
|
10804
|
-
"dockerfile",
|
|
10805
|
-
"jenkinsfile",
|
|
10806
|
-
// Lock files (ones without standard extensions)
|
|
10807
|
-
"go.sum",
|
|
10808
|
-
// Version control
|
|
10809
|
-
".gitignore",
|
|
10810
|
-
".gitattributes",
|
|
10811
|
-
".gitmodules",
|
|
10812
|
-
".gitkeep",
|
|
10813
|
-
".keep",
|
|
10814
|
-
".hgignore",
|
|
10815
|
-
// Node.js specific
|
|
10816
|
-
".nvmrc",
|
|
10817
|
-
".node-version",
|
|
10818
|
-
".npmrc",
|
|
10819
|
-
".yarnrc",
|
|
10820
|
-
".pnpmfile.cjs",
|
|
10821
|
-
// Language version files
|
|
10822
|
-
".ruby-version",
|
|
10823
|
-
".python-version",
|
|
10824
|
-
".rvmrc",
|
|
10825
|
-
".rbenv-version",
|
|
10826
|
-
".gvmrc",
|
|
10827
|
-
// Build tools and task runners
|
|
10828
|
-
"makefile",
|
|
10829
|
-
"rakefile",
|
|
10830
|
-
"gulpfile.js",
|
|
10831
|
-
"gruntfile.js",
|
|
10832
|
-
"webpack.config.js",
|
|
10833
|
-
"webpack.config.ts",
|
|
10834
|
-
"rollup.config.js",
|
|
10835
|
-
"vite.config.js",
|
|
10836
|
-
"vite.config.ts",
|
|
10837
|
-
"next.config.js",
|
|
10838
|
-
"nuxt.config.js",
|
|
10839
|
-
"tailwind.config.js",
|
|
10840
|
-
"postcss.config.js",
|
|
10841
|
-
// JavaScript/TypeScript config
|
|
10842
|
-
".babelrc",
|
|
10843
|
-
".babelrc.js",
|
|
10844
|
-
".swcrc",
|
|
10845
|
-
".browserslistrc",
|
|
10846
|
-
// Testing frameworks
|
|
10847
|
-
"jest.config.js",
|
|
10848
|
-
"jest.config.ts",
|
|
10849
|
-
"vitest.config.js",
|
|
10850
|
-
"karma.conf.js",
|
|
10851
|
-
"protractor.conf.js",
|
|
10852
|
-
"cypress.config.js",
|
|
10853
|
-
"playwright.config.js",
|
|
10854
|
-
".nycrc",
|
|
10855
|
-
".c8rc",
|
|
10856
|
-
// Linting/formatting configs
|
|
10857
|
-
".eslintrc",
|
|
10858
|
-
".eslintrc.js",
|
|
10859
|
-
".prettierrc",
|
|
10860
|
-
".prettierrc.js",
|
|
10861
|
-
".stylelintrc",
|
|
10862
|
-
".stylelintrc.js",
|
|
10863
|
-
// Package manager configs (ones without standard extensions)
|
|
10864
|
-
"pipfile",
|
|
10865
|
-
"gemfile",
|
|
10866
|
-
"go.mod",
|
|
10867
|
-
"project.clj",
|
|
10868
|
-
// Python specific
|
|
10869
|
-
"setup.py",
|
|
10870
|
-
"setup.cfg",
|
|
10871
|
-
"manifest.in",
|
|
10872
|
-
".pythonrc",
|
|
10873
|
-
// Documentation files (ones without standard extensions)
|
|
10874
|
-
"readme",
|
|
10875
|
-
"changelog",
|
|
10876
|
-
"authors",
|
|
10877
|
-
"contributors",
|
|
10878
|
-
// License and legal (ones without standard extensions)
|
|
10879
|
-
"license",
|
|
10880
|
-
"notice",
|
|
10881
|
-
"copyright",
|
|
10882
|
-
// Web specific
|
|
10883
|
-
".htaccess"
|
|
10884
|
-
];
|
|
10885
|
-
var FilePacking = class {
|
|
10886
|
-
isExcludedFileType(filepath) {
|
|
10887
|
-
const basename = path11.basename(filepath).toLowerCase();
|
|
10888
|
-
if (basename === ".env" || basename.startsWith(".env.")) {
|
|
10889
|
-
return true;
|
|
10890
|
-
}
|
|
10891
|
-
if (EXCLUDED_FILE_PATTERNS.some((pattern) => basename.endsWith(pattern))) {
|
|
10892
|
-
return true;
|
|
10893
|
-
}
|
|
10894
|
-
return false;
|
|
10895
|
-
}
|
|
10896
|
-
async packFiles(sourceDirectoryPath, filesToPack) {
|
|
10897
|
-
logInfo(`FilePacking: packing files from ${sourceDirectoryPath}`);
|
|
10898
|
-
const zip = new AdmZip2();
|
|
10899
|
-
let packedFilesCount = 0;
|
|
10900
|
-
logInfo("FilePacking: compressing files");
|
|
10901
|
-
for (const filepath of filesToPack) {
|
|
10902
|
-
const absoluteFilepath = path11.join(sourceDirectoryPath, filepath);
|
|
10903
|
-
if (this.isExcludedFileType(filepath)) {
|
|
10904
|
-
logInfo(
|
|
10905
|
-
`FilePacking: ignoring ${filepath} because it is an excluded file type`
|
|
10906
|
-
);
|
|
10907
|
-
continue;
|
|
10908
|
-
}
|
|
10909
|
-
if (!fs9.existsSync(absoluteFilepath)) {
|
|
10910
|
-
logInfo(`FilePacking: ignoring ${filepath} because it does not exist`);
|
|
10911
|
-
continue;
|
|
10912
|
-
}
|
|
10913
|
-
if (fs9.lstatSync(absoluteFilepath).size > MAX_FILE_SIZE2) {
|
|
10914
|
-
logInfo(
|
|
10915
|
-
`FilePacking: ignoring ${filepath} because the size is > ${MAX_FILE_SIZE2 / (1024 * 1024)}MB`
|
|
10916
|
-
);
|
|
10917
|
-
continue;
|
|
10918
|
-
}
|
|
10919
|
-
let data;
|
|
10920
|
-
try {
|
|
10921
|
-
data = fs9.readFileSync(absoluteFilepath);
|
|
10922
|
-
} catch (fsError) {
|
|
10923
|
-
logInfo(
|
|
10924
|
-
`FilePacking: failed to read ${filepath} from filesystem: ${fsError}`
|
|
10925
|
-
);
|
|
10926
|
-
continue;
|
|
10927
|
-
}
|
|
10928
|
-
if (isBinary2(null, data)) {
|
|
10929
|
-
logInfo(
|
|
10930
|
-
`FilePacking: ignoring ${filepath} because it seems to be a binary file`
|
|
10931
|
-
);
|
|
10932
|
-
continue;
|
|
10933
|
-
}
|
|
10934
|
-
zip.addFile(filepath, data);
|
|
10935
|
-
packedFilesCount++;
|
|
10936
|
-
}
|
|
10937
|
-
const zipBuffer = zip.toBuffer();
|
|
10938
|
-
logInfo(
|
|
10939
|
-
`FilePacking: read ${packedFilesCount} source files. total size: ${zipBuffer.length} bytes`
|
|
10940
|
-
);
|
|
10941
|
-
logInfo("FilePacking: Files packed successfully");
|
|
10942
|
-
return zipBuffer;
|
|
10943
|
-
}
|
|
10944
|
-
};
|
|
10945
|
-
|
|
10946
|
-
// src/mcp/services/FileUpload.ts
|
|
10947
|
-
import { HttpProxyAgent as HttpProxyAgent2 } from "http-proxy-agent";
|
|
10948
|
-
import { HttpsProxyAgent as HttpsProxyAgent3 } from "https-proxy-agent";
|
|
10949
|
-
var FileUpload = class {
|
|
10950
|
-
getProxyAgent(url) {
|
|
10951
|
-
const HTTPS_PROXY2 = process.env["HTTPS_PROXY"];
|
|
10952
|
-
const HTTP_PROXY2 = process.env["HTTP_PROXY"];
|
|
10953
|
-
try {
|
|
10954
|
-
const parsedUrl = new URL(url);
|
|
10955
|
-
const isHttp = parsedUrl.protocol === "http:";
|
|
10956
|
-
const isHttps = parsedUrl.protocol === "https:";
|
|
10957
|
-
const proxy = isHttps ? HTTPS_PROXY2 : isHttp ? HTTP_PROXY2 : null;
|
|
10958
|
-
if (proxy) {
|
|
10959
|
-
logInfo(`FileUpload: Using proxy ${proxy}`);
|
|
10960
|
-
return isHttps ? new HttpsProxyAgent3(proxy) : new HttpProxyAgent2(proxy);
|
|
10961
|
-
}
|
|
10962
|
-
} catch (err) {
|
|
10963
|
-
logInfo(
|
|
10964
|
-
`FileUpload: Skipping proxy for ${url}. Reason: ${err.message}`
|
|
10965
|
-
);
|
|
10966
|
-
}
|
|
10967
|
-
return void 0;
|
|
10968
|
-
}
|
|
10969
|
-
async uploadFile(options) {
|
|
10970
|
-
const { file, url, uploadKey, uploadFields } = options;
|
|
10971
|
-
logInfo(`FileUpload: upload file start ${url}`);
|
|
10972
|
-
logInfo(`FileUpload: upload fields`, uploadFields);
|
|
10973
|
-
logInfo(`FileUpload: upload key ${uploadKey}`);
|
|
10974
|
-
const {
|
|
10975
|
-
default: fetch5,
|
|
10976
|
-
File: File2,
|
|
10977
|
-
fileFrom: fileFrom2,
|
|
10978
|
-
FormData: FormData2
|
|
10979
|
-
} = await import("node-fetch");
|
|
10980
|
-
const form = new FormData2();
|
|
10981
|
-
Object.entries(uploadFields).forEach(([key, value]) => {
|
|
10982
|
-
form.append(key, value);
|
|
10983
|
-
});
|
|
10984
|
-
if (!form.has("key")) {
|
|
10985
|
-
form.append("key", uploadKey);
|
|
10986
|
-
}
|
|
10987
|
-
if (typeof file === "string") {
|
|
10988
|
-
logInfo(`FileUpload: upload file from path ${file}`);
|
|
10989
|
-
form.append("file", await fileFrom2(file));
|
|
10990
|
-
} else {
|
|
10991
|
-
logInfo(`FileUpload: upload file from buffer`);
|
|
10992
|
-
form.append("file", new File2([file], "file"));
|
|
10993
|
-
}
|
|
10994
|
-
const agent = this.getProxyAgent(url);
|
|
10995
|
-
const response = await fetch5(url, {
|
|
10996
|
-
method: "POST",
|
|
10997
|
-
body: form,
|
|
10998
|
-
agent
|
|
10999
|
-
});
|
|
11000
|
-
if (!response.ok) {
|
|
11001
|
-
logInfo(`FileUpload: error from S3 ${response.body} ${response.status}`);
|
|
11002
|
-
throw new Error(`Failed to upload the file: ${response.status}`);
|
|
11003
|
-
}
|
|
11004
|
-
logInfo(`FileUpload: upload file done`);
|
|
11005
|
-
}
|
|
11006
|
-
};
|
|
11007
|
-
|
|
11008
|
-
// src/mcp/services/McpGQLClient.ts
|
|
11009
|
-
import { GraphQLClient as GraphQLClient2 } from "graphql-request";
|
|
11010
|
-
import { v4 as uuidv42 } from "uuid";
|
|
11011
|
-
|
|
11012
|
-
// src/mcp/constants.ts
|
|
11013
|
-
var DEFAULT_API_URL = "https://api.mobb.ai/v1/graphql";
|
|
11014
|
-
var API_KEY_HEADER_NAME2 = "x-mobb-key";
|
|
11015
|
-
|
|
11016
|
-
// src/mcp/services/Subscribe.ts
|
|
11017
|
-
import Debug20 from "debug";
|
|
11018
|
-
import { createClient as createClient2 } from "graphql-ws";
|
|
11019
|
-
import { HttpsProxyAgent as HttpsProxyAgent4 } from "https-proxy-agent";
|
|
11020
|
-
import WebSocket2 from "ws";
|
|
11021
|
-
var debug19 = Debug20("mobbdev:subscribe");
|
|
11022
|
-
var SUBSCRIPTION_TIMEOUT_MS2 = 30 * 60 * 1e3;
|
|
11023
|
-
function createWSClient2(options) {
|
|
11024
|
-
const proxy = options.url.startsWith("wss://") && process.env["HTTPS_PROXY"] ? new HttpsProxyAgent4(process.env["HTTPS_PROXY"]) : options.url.startsWith("ws://") && process.env["HTTP_PROXY"] ? new HttpsProxyAgent4(process.env["HTTP_PROXY"]) : null;
|
|
11025
|
-
debug19(
|
|
11026
|
-
`Using proxy: ${proxy ? "yes" : "no"} with url: ${options.url} and with proxy: ${process.env["HTTP_PROXY"]} for the websocket connection`
|
|
11027
|
-
);
|
|
11028
|
-
const CustomWebSocket = class extends WebSocket2 {
|
|
11029
|
-
constructor(address, protocols) {
|
|
11030
|
-
super(address, protocols, proxy ? { agent: proxy } : void 0);
|
|
11031
|
-
}
|
|
11032
|
-
};
|
|
11033
|
-
return createClient2({
|
|
11034
|
-
//this is needed to prevent AWS from killing the connection
|
|
11035
|
-
//currently our load balancer has a 29s idle timeout
|
|
11036
|
-
keepAlive: 1e4,
|
|
11037
|
-
url: options.url,
|
|
11038
|
-
webSocketImpl: proxy ? CustomWebSocket : options.websocket || WebSocket2,
|
|
11039
|
-
connectionParams: () => {
|
|
11040
|
-
return {
|
|
11041
|
-
headers: options.type === "apiKey" ? {
|
|
11042
|
-
[API_KEY_HEADER_NAME2]: options.apiKey
|
|
11043
|
-
} : { authorization: `Bearer ${options.token}` }
|
|
11044
|
-
};
|
|
11045
|
-
}
|
|
11046
|
-
});
|
|
11047
|
-
}
|
|
11048
|
-
var Subscribe = class {
|
|
11049
|
-
static subscribe(query, variables, callback, wsClientOptions) {
|
|
11050
|
-
return new Promise((resolve, reject) => {
|
|
11051
|
-
let timer = null;
|
|
11052
|
-
const { timeoutInMs = SUBSCRIPTION_TIMEOUT_MS2 } = wsClientOptions;
|
|
11053
|
-
const API_URL2 = process.env["API_URL"] || DEFAULT_API_URL;
|
|
11054
|
-
const client = createWSClient2({
|
|
11055
|
-
...wsClientOptions,
|
|
11056
|
-
websocket: WebSocket2,
|
|
11057
|
-
url: API_URL2.replace("http", "ws")
|
|
11058
|
-
});
|
|
11059
|
-
const unsubscribe = client.subscribe(
|
|
11060
|
-
{ query, variables },
|
|
11061
|
-
{
|
|
11062
|
-
next: (data) => {
|
|
11063
|
-
function callbackResolve(data2) {
|
|
11064
|
-
unsubscribe();
|
|
11065
|
-
if (timer) {
|
|
11066
|
-
clearTimeout(timer);
|
|
11067
|
-
}
|
|
11068
|
-
resolve(data2);
|
|
11069
|
-
}
|
|
11070
|
-
function callbackReject(data2) {
|
|
11071
|
-
unsubscribe();
|
|
11072
|
-
if (timer) {
|
|
11073
|
-
clearTimeout(timer);
|
|
11074
|
-
}
|
|
11075
|
-
reject(data2);
|
|
11076
|
-
}
|
|
11077
|
-
if (!data.data) {
|
|
11078
|
-
reject(
|
|
11079
|
-
new Error(
|
|
11080
|
-
`Broken data object from graphQL subscribe: ${JSON.stringify(
|
|
11081
|
-
data
|
|
11082
|
-
)} for query: ${query}`
|
|
11083
|
-
)
|
|
11084
|
-
);
|
|
11085
|
-
} else {
|
|
11086
|
-
callback(callbackResolve, callbackReject, data.data);
|
|
11087
|
-
}
|
|
11088
|
-
},
|
|
11089
|
-
error: (error) => {
|
|
11090
|
-
if (timer) {
|
|
11091
|
-
clearTimeout(timer);
|
|
11092
|
-
}
|
|
11093
|
-
reject(error);
|
|
11094
|
-
},
|
|
11095
|
-
complete: () => {
|
|
11096
|
-
return;
|
|
11097
|
-
}
|
|
11098
|
-
}
|
|
11099
|
-
);
|
|
11100
|
-
if (typeof timeoutInMs === "number") {
|
|
11101
|
-
timer = setTimeout(() => {
|
|
11102
|
-
unsubscribe();
|
|
11103
|
-
reject(
|
|
11104
|
-
new Error(
|
|
11105
|
-
`Timeout expired for graphQL subscribe query: ${query} with timeout: ${timeoutInMs}`
|
|
11106
|
-
)
|
|
11107
|
-
);
|
|
11108
|
-
}, timeoutInMs);
|
|
11109
|
-
}
|
|
11110
|
-
});
|
|
10935
|
+
// src/mcp/services/McpGQLClient.ts
|
|
10936
|
+
import crypto2 from "crypto";
|
|
10937
|
+
import os2 from "os";
|
|
10938
|
+
import Configstore3 from "configstore";
|
|
10939
|
+
import { GraphQLClient as GraphQLClient2 } from "graphql-request";
|
|
10940
|
+
import open4 from "open";
|
|
10941
|
+
import { v4 as uuidv42 } from "uuid";
|
|
10942
|
+
|
|
10943
|
+
// src/mcp/constants.ts
|
|
10944
|
+
var DEFAULT_API_URL2 = "https://api.mobb.ai/v1/graphql";
|
|
10945
|
+
var API_KEY_HEADER_NAME2 = "x-mobb-key";
|
|
10946
|
+
|
|
10947
|
+
// src/mcp/tools/fixVulnerabilities/errors/VulnerabilityFixErrors.ts
|
|
10948
|
+
var ApiConnectionError = class extends Error {
|
|
10949
|
+
constructor(message = "Failed to connect to the API") {
|
|
10950
|
+
super(message);
|
|
10951
|
+
this.name = "ApiConnectionError";
|
|
10952
|
+
}
|
|
10953
|
+
};
|
|
10954
|
+
var CliLoginError = class extends Error {
|
|
10955
|
+
constructor(message = "CLI login failed") {
|
|
10956
|
+
super(message);
|
|
10957
|
+
this.name = "CliLoginError";
|
|
10958
|
+
}
|
|
10959
|
+
};
|
|
10960
|
+
var AuthenticationError = class extends Error {
|
|
10961
|
+
constructor(message = "Authentication failed") {
|
|
10962
|
+
super(message);
|
|
10963
|
+
this.name = "AuthenticationError";
|
|
10964
|
+
}
|
|
10965
|
+
};
|
|
10966
|
+
var NoFilesError = class extends Error {
|
|
10967
|
+
constructor(message = "No files to fix") {
|
|
10968
|
+
super(message);
|
|
10969
|
+
this.name = "NoFilesError";
|
|
10970
|
+
}
|
|
10971
|
+
};
|
|
10972
|
+
var GqlClientError = class extends Error {
|
|
10973
|
+
constructor(message = "GraphQL client not initialized") {
|
|
10974
|
+
super(message);
|
|
10975
|
+
this.name = "GqlClientError";
|
|
10976
|
+
}
|
|
10977
|
+
};
|
|
10978
|
+
var FileProcessingError = class extends Error {
|
|
10979
|
+
constructor(message) {
|
|
10980
|
+
super(message);
|
|
10981
|
+
this.name = "FileProcessingError";
|
|
10982
|
+
}
|
|
10983
|
+
};
|
|
10984
|
+
var ReportInitializationError = class extends Error {
|
|
10985
|
+
constructor(message) {
|
|
10986
|
+
super(message);
|
|
10987
|
+
this.name = "ReportInitializationError";
|
|
10988
|
+
}
|
|
10989
|
+
};
|
|
10990
|
+
var FileUploadError = class extends Error {
|
|
10991
|
+
constructor(message) {
|
|
10992
|
+
super(message);
|
|
10993
|
+
this.name = "FileUploadError";
|
|
10994
|
+
}
|
|
10995
|
+
};
|
|
10996
|
+
var ScanError = class extends Error {
|
|
10997
|
+
constructor(message) {
|
|
10998
|
+
super(message);
|
|
10999
|
+
this.name = "ScanError";
|
|
11000
|
+
}
|
|
11001
|
+
};
|
|
11002
|
+
var FailedToGetApiTokenError = class extends Error {
|
|
11003
|
+
constructor(message) {
|
|
11004
|
+
super(message);
|
|
11005
|
+
this.name = "FailedToGetApiTokenError";
|
|
11111
11006
|
}
|
|
11112
11007
|
};
|
|
11113
|
-
var subscribe2 = Subscribe.subscribe;
|
|
11114
11008
|
|
|
11115
11009
|
// src/mcp/services/McpGQLClient.ts
|
|
11010
|
+
var LOGIN_MAX_WAIT2 = 10 * 1e3;
|
|
11011
|
+
var LOGIN_CHECK_DELAY2 = 1 * 1e3;
|
|
11012
|
+
var config4 = new Configstore3(packageJson.name, { apiToken: "" });
|
|
11013
|
+
var BROWSER_COOLDOWN_MS = 5e3;
|
|
11014
|
+
var lastBrowserOpenTime = 0;
|
|
11116
11015
|
var McpGQLClient = class {
|
|
11117
11016
|
constructor(args) {
|
|
11118
11017
|
__publicField(this, "client");
|
|
11119
11018
|
__publicField(this, "clientSdk");
|
|
11120
|
-
__publicField(this, "
|
|
11121
|
-
|
|
11122
|
-
const API_URL2 = process.env["API_URL"] ||
|
|
11123
|
-
this.apiKey = args.apiKey;
|
|
11124
|
-
this.apiUrl = API_URL2;
|
|
11019
|
+
__publicField(this, "_auth");
|
|
11020
|
+
this._auth = args;
|
|
11021
|
+
const API_URL2 = process.env["API_URL"] || DEFAULT_API_URL2;
|
|
11125
11022
|
this.client = new GraphQLClient2(API_URL2, {
|
|
11126
|
-
headers: { [API_KEY_HEADER_NAME2]: args.apiKey || "" }
|
|
11023
|
+
headers: args.type === "apiKey" ? { [API_KEY_HEADER_NAME2]: args.apiKey || "" } : {
|
|
11024
|
+
Authorization: `Bearer ${args.token}`
|
|
11025
|
+
},
|
|
11127
11026
|
requestMiddleware: (request) => {
|
|
11128
11027
|
const requestId = uuidv42();
|
|
11129
11028
|
return {
|
|
@@ -11139,9 +11038,10 @@ var McpGQLClient = class {
|
|
|
11139
11038
|
}
|
|
11140
11039
|
getErrorContext() {
|
|
11141
11040
|
return {
|
|
11142
|
-
endpoint:
|
|
11041
|
+
endpoint: process.env["API_URL"] || DEFAULT_API_URL2,
|
|
11042
|
+
apiKey: this._auth.type === "apiKey" ? this._auth.apiKey : "",
|
|
11143
11043
|
headers: {
|
|
11144
|
-
[API_KEY_HEADER_NAME2]: this.apiKey ? "[REDACTED]" : "undefined",
|
|
11044
|
+
[API_KEY_HEADER_NAME2]: this._auth.type === "apiKey" ? "[REDACTED]" : "undefined",
|
|
11145
11045
|
"x-hasura-request-id": "[DYNAMIC]"
|
|
11146
11046
|
}
|
|
11147
11047
|
};
|
|
@@ -11153,15 +11053,12 @@ var McpGQLClient = class {
|
|
|
11153
11053
|
logInfo("GraphQL: Me query successful", { result });
|
|
11154
11054
|
return true;
|
|
11155
11055
|
} catch (e) {
|
|
11156
|
-
|
|
11157
|
-
|
|
11158
|
-
|
|
11159
|
-
});
|
|
11160
|
-
if (e?.toString().startsWith("FetchError")) {
|
|
11161
|
-
console.error("Connection verification failed:", e);
|
|
11056
|
+
if (e?.toString().includes("FetchError")) {
|
|
11057
|
+
logError("verify connection failed %o", e);
|
|
11058
|
+
return false;
|
|
11162
11059
|
}
|
|
11163
|
-
return false;
|
|
11164
11060
|
}
|
|
11061
|
+
return true;
|
|
11165
11062
|
}
|
|
11166
11063
|
async uploadS3BucketInfo() {
|
|
11167
11064
|
try {
|
|
@@ -11222,7 +11119,7 @@ var McpGQLClient = class {
|
|
|
11222
11119
|
params: params.subscribeToAnalysisParams
|
|
11223
11120
|
});
|
|
11224
11121
|
const { callbackStates } = params;
|
|
11225
|
-
const result = await
|
|
11122
|
+
const result = await subscribe(
|
|
11226
11123
|
GetAnalysisSubscriptionDocument,
|
|
11227
11124
|
params.subscribeToAnalysisParams,
|
|
11228
11125
|
async (resolve, reject, data) => {
|
|
@@ -11236,92 +11133,489 @@ var McpGQLClient = class {
|
|
|
11236
11133
|
reject(new Error(`Analysis failed with id: ${data.analysis?.id}`));
|
|
11237
11134
|
return;
|
|
11238
11135
|
}
|
|
11239
|
-
if (callbackStates.includes(data.analysis?.state)) {
|
|
11240
|
-
logInfo("GraphQL: Analysis state matches callback states", {
|
|
11241
|
-
analysisId: data.analysis.id,
|
|
11242
|
-
state: data.analysis.state,
|
|
11243
|
-
callbackStates
|
|
11244
|
-
});
|
|
11245
|
-
await params.callback(data.analysis.id);
|
|
11246
|
-
resolve(data);
|
|
11136
|
+
if (callbackStates.includes(data.analysis?.state)) {
|
|
11137
|
+
logInfo("GraphQL: Analysis state matches callback states", {
|
|
11138
|
+
analysisId: data.analysis.id,
|
|
11139
|
+
state: data.analysis.state,
|
|
11140
|
+
callbackStates
|
|
11141
|
+
});
|
|
11142
|
+
await params.callback(data.analysis.id);
|
|
11143
|
+
resolve(data);
|
|
11144
|
+
}
|
|
11145
|
+
},
|
|
11146
|
+
this._auth.type === "apiKey" ? {
|
|
11147
|
+
apiKey: this._auth.apiKey,
|
|
11148
|
+
type: "apiKey",
|
|
11149
|
+
timeoutInMs: params.timeoutInMs
|
|
11150
|
+
} : {
|
|
11151
|
+
token: this._auth.token,
|
|
11152
|
+
type: "token",
|
|
11153
|
+
timeoutInMs: params.timeoutInMs
|
|
11154
|
+
}
|
|
11155
|
+
);
|
|
11156
|
+
logInfo("GraphQL: GetAnalysis subscription completed", { result });
|
|
11157
|
+
return result;
|
|
11158
|
+
} catch (e) {
|
|
11159
|
+
logError("GraphQL: GetAnalysis subscription failed", {
|
|
11160
|
+
error: e,
|
|
11161
|
+
params: params.subscribeToAnalysisParams,
|
|
11162
|
+
...this.getErrorContext()
|
|
11163
|
+
});
|
|
11164
|
+
throw e;
|
|
11165
|
+
}
|
|
11166
|
+
}
|
|
11167
|
+
async getProjectId() {
|
|
11168
|
+
try {
|
|
11169
|
+
const projectName = "MCP Scans";
|
|
11170
|
+
logDebug("GraphQL: Calling getOrgAndProjectId query", { projectName });
|
|
11171
|
+
const getOrgAndProjectIdResult = await this.clientSdk.getOrgAndProjectId({
|
|
11172
|
+
filters: {},
|
|
11173
|
+
limit: 1
|
|
11174
|
+
});
|
|
11175
|
+
logInfo("GraphQL: getOrgAndProjectId successful", {
|
|
11176
|
+
result: getOrgAndProjectIdResult
|
|
11177
|
+
});
|
|
11178
|
+
const [organizationToOrganizationRole] = getOrgAndProjectIdResult.organization_to_organization_role;
|
|
11179
|
+
if (!organizationToOrganizationRole) {
|
|
11180
|
+
throw new Error("Organization not found");
|
|
11181
|
+
}
|
|
11182
|
+
const { organization: org } = organizationToOrganizationRole;
|
|
11183
|
+
const project = projectName ? org?.projects.find((project2) => project2.name === projectName) ?? null : org?.projects[0];
|
|
11184
|
+
if (project?.id) {
|
|
11185
|
+
logInfo("GraphQL: Found existing project", {
|
|
11186
|
+
projectId: project.id,
|
|
11187
|
+
projectName
|
|
11188
|
+
});
|
|
11189
|
+
return project.id;
|
|
11190
|
+
}
|
|
11191
|
+
logDebug("GraphQL: Project not found, creating new project", {
|
|
11192
|
+
organizationId: org.id,
|
|
11193
|
+
projectName
|
|
11194
|
+
});
|
|
11195
|
+
const createdProject = await this.clientSdk.CreateProject({
|
|
11196
|
+
organizationId: org.id,
|
|
11197
|
+
projectName
|
|
11198
|
+
});
|
|
11199
|
+
logInfo("GraphQL: CreateProject successful", { result: createdProject });
|
|
11200
|
+
return createdProject.createProject.projectId;
|
|
11201
|
+
} catch (e) {
|
|
11202
|
+
logError("GraphQL: getProjectId failed", {
|
|
11203
|
+
error: e,
|
|
11204
|
+
...this.getErrorContext()
|
|
11205
|
+
});
|
|
11206
|
+
throw e;
|
|
11207
|
+
}
|
|
11208
|
+
}
|
|
11209
|
+
async getReportFixes(fixReportId) {
|
|
11210
|
+
try {
|
|
11211
|
+
logDebug("GraphQL: Calling GetMCPFixes query", { fixReportId });
|
|
11212
|
+
const res = await this.clientSdk.GetMCPFixes({ fixReportId });
|
|
11213
|
+
logInfo("GraphQL: GetMCPFixes successful", {
|
|
11214
|
+
result: res,
|
|
11215
|
+
fixCount: res.fix?.length || 0
|
|
11216
|
+
});
|
|
11217
|
+
return res.fix;
|
|
11218
|
+
} catch (e) {
|
|
11219
|
+
logError("GraphQL: GetMCPFixes failed", {
|
|
11220
|
+
error: e,
|
|
11221
|
+
fixReportId,
|
|
11222
|
+
...this.getErrorContext()
|
|
11223
|
+
});
|
|
11224
|
+
throw e;
|
|
11225
|
+
}
|
|
11226
|
+
}
|
|
11227
|
+
async getUserInfo() {
|
|
11228
|
+
const { me } = await this.clientSdk.Me();
|
|
11229
|
+
return me;
|
|
11230
|
+
}
|
|
11231
|
+
async verifyToken() {
|
|
11232
|
+
logDebug("verifying token");
|
|
11233
|
+
try {
|
|
11234
|
+
await this.clientSdk.CreateCommunityUser();
|
|
11235
|
+
const info = await this.getUserInfo();
|
|
11236
|
+
logDebug("token verified");
|
|
11237
|
+
return info?.email || true;
|
|
11238
|
+
} catch (e) {
|
|
11239
|
+
logError("verify token failed");
|
|
11240
|
+
return false;
|
|
11241
|
+
}
|
|
11242
|
+
}
|
|
11243
|
+
async createCliLogin(variables) {
|
|
11244
|
+
try {
|
|
11245
|
+
const res = await this.clientSdk.CreateCliLogin(variables, {
|
|
11246
|
+
// We may have outdated API key in the config storage. Avoid using it for the login request.
|
|
11247
|
+
[API_KEY_HEADER_NAME2]: ""
|
|
11248
|
+
});
|
|
11249
|
+
const loginId = res.insert_cli_login_one?.id || "";
|
|
11250
|
+
if (!loginId) {
|
|
11251
|
+
logError("create cli login failed - no login ID returned");
|
|
11252
|
+
return "";
|
|
11253
|
+
}
|
|
11254
|
+
return loginId;
|
|
11255
|
+
} catch (e) {
|
|
11256
|
+
logError("create cli login failed", { error: e });
|
|
11257
|
+
return "";
|
|
11258
|
+
}
|
|
11259
|
+
}
|
|
11260
|
+
async getEncryptedApiToken(variables) {
|
|
11261
|
+
try {
|
|
11262
|
+
const res = await this.clientSdk.GetEncryptedApiToken(variables, {
|
|
11263
|
+
// We may have outdated API key in the config storage. Avoid using it for the login request.
|
|
11264
|
+
[API_KEY_HEADER_NAME2]: ""
|
|
11265
|
+
});
|
|
11266
|
+
return res?.cli_login_by_pk?.encryptedApiToken || null;
|
|
11267
|
+
} catch (e) {
|
|
11268
|
+
logError("get encrypted api token failed", { error: e });
|
|
11269
|
+
return null;
|
|
11270
|
+
}
|
|
11271
|
+
}
|
|
11272
|
+
};
|
|
11273
|
+
async function openBrowser(url) {
|
|
11274
|
+
const now = Date.now();
|
|
11275
|
+
if (!process.env["TEST"] && now - lastBrowserOpenTime < BROWSER_COOLDOWN_MS) {
|
|
11276
|
+
logDebug(`browser cooldown active, skipping open for ${url}`);
|
|
11277
|
+
return;
|
|
11278
|
+
}
|
|
11279
|
+
logDebug(`opening browser url ${url}`);
|
|
11280
|
+
await open4(url);
|
|
11281
|
+
lastBrowserOpenTime = now;
|
|
11282
|
+
}
|
|
11283
|
+
async function getMcpGQLClient() {
|
|
11284
|
+
logDebug("getting config", { apiToken: config4.get("apiToken") });
|
|
11285
|
+
const inGqlClient = new McpGQLClient({
|
|
11286
|
+
apiKey: config4.get("apiToken") || process.env["API_KEY"] || "",
|
|
11287
|
+
type: "apiKey"
|
|
11288
|
+
});
|
|
11289
|
+
const isConnected = await inGqlClient.verifyConnection();
|
|
11290
|
+
if (!isConnected) {
|
|
11291
|
+
throw new ApiConnectionError("Error: failed to connect to the API");
|
|
11292
|
+
}
|
|
11293
|
+
const userVerify = await inGqlClient.verifyToken();
|
|
11294
|
+
if (userVerify) {
|
|
11295
|
+
return inGqlClient;
|
|
11296
|
+
}
|
|
11297
|
+
const { publicKey, privateKey } = crypto2.generateKeyPairSync("rsa", {
|
|
11298
|
+
modulusLength: 2048
|
|
11299
|
+
});
|
|
11300
|
+
logDebug("creating cli login");
|
|
11301
|
+
const loginId = await inGqlClient.createCliLogin({
|
|
11302
|
+
publicKey: publicKey.export({ format: "pem", type: "pkcs1" }).toString()
|
|
11303
|
+
});
|
|
11304
|
+
if (!loginId) {
|
|
11305
|
+
throw new CliLoginError("Error: createCliLogin failed");
|
|
11306
|
+
}
|
|
11307
|
+
logDebug(`cli login created ${loginId}`);
|
|
11308
|
+
const webLoginUrl2 = `${WEB_APP_URL}/cli-login`;
|
|
11309
|
+
const browserUrl = `${webLoginUrl2}/${loginId}?hostname=${os2.hostname()}`;
|
|
11310
|
+
logDebug(`opening browser url ${browserUrl}`);
|
|
11311
|
+
await openBrowser(browserUrl);
|
|
11312
|
+
logDebug(`waiting for login to complete`);
|
|
11313
|
+
let newApiToken = null;
|
|
11314
|
+
for (let i = 0; i < LOGIN_MAX_WAIT2 / LOGIN_CHECK_DELAY2; i++) {
|
|
11315
|
+
const encryptedApiToken = await inGqlClient.getEncryptedApiToken({
|
|
11316
|
+
loginId
|
|
11317
|
+
});
|
|
11318
|
+
if (encryptedApiToken) {
|
|
11319
|
+
logDebug("encrypted API token received");
|
|
11320
|
+
newApiToken = crypto2.privateDecrypt(privateKey, Buffer.from(encryptedApiToken, "base64")).toString("utf-8");
|
|
11321
|
+
logDebug("API token decrypted");
|
|
11322
|
+
break;
|
|
11323
|
+
}
|
|
11324
|
+
await sleep(LOGIN_CHECK_DELAY2);
|
|
11325
|
+
}
|
|
11326
|
+
if (!newApiToken) {
|
|
11327
|
+
throw new FailedToGetApiTokenError(
|
|
11328
|
+
"Error: failed to get encrypted api token"
|
|
11329
|
+
);
|
|
11330
|
+
}
|
|
11331
|
+
const newGqlClient = new McpGQLClient({ apiKey: newApiToken, type: "apiKey" });
|
|
11332
|
+
const loginSuccess = await newGqlClient.verifyToken();
|
|
11333
|
+
if (loginSuccess) {
|
|
11334
|
+
logDebug("set api token %s", newApiToken);
|
|
11335
|
+
config4.set("apiToken", newApiToken);
|
|
11336
|
+
} else {
|
|
11337
|
+
throw new AuthenticationError("Something went wrong, API token is invalid.");
|
|
11338
|
+
}
|
|
11339
|
+
return newGqlClient;
|
|
11340
|
+
}
|
|
11341
|
+
|
|
11342
|
+
// src/mcp/core/ToolRegistry.ts
|
|
11343
|
+
var ToolRegistry = class {
|
|
11344
|
+
constructor() {
|
|
11345
|
+
__publicField(this, "tools", /* @__PURE__ */ new Map());
|
|
11346
|
+
}
|
|
11347
|
+
registerTool(tool) {
|
|
11348
|
+
if (this.tools.has(tool.name)) {
|
|
11349
|
+
logWarn(`Tool ${tool.name} is already registered, overwriting`, {
|
|
11350
|
+
toolName: tool.name
|
|
11351
|
+
});
|
|
11352
|
+
}
|
|
11353
|
+
this.tools.set(tool.name, tool);
|
|
11354
|
+
logDebug(`Tool registered: ${tool.name}`, {
|
|
11355
|
+
toolName: tool.name,
|
|
11356
|
+
description: tool.definition.description
|
|
11357
|
+
});
|
|
11358
|
+
}
|
|
11359
|
+
getTool(name) {
|
|
11360
|
+
return this.tools.get(name);
|
|
11361
|
+
}
|
|
11362
|
+
getAllTools() {
|
|
11363
|
+
return Array.from(this.tools.values()).map((tool) => tool.definition);
|
|
11364
|
+
}
|
|
11365
|
+
getToolNames() {
|
|
11366
|
+
return Array.from(this.tools.keys());
|
|
11367
|
+
}
|
|
11368
|
+
hasTool(name) {
|
|
11369
|
+
return this.tools.has(name);
|
|
11370
|
+
}
|
|
11371
|
+
getToolCount() {
|
|
11372
|
+
return this.tools.size;
|
|
11373
|
+
}
|
|
11374
|
+
};
|
|
11375
|
+
|
|
11376
|
+
// src/mcp/core/McpServer.ts
|
|
11377
|
+
var McpServer = class {
|
|
11378
|
+
constructor(config5) {
|
|
11379
|
+
__publicField(this, "server");
|
|
11380
|
+
__publicField(this, "toolRegistry");
|
|
11381
|
+
__publicField(this, "isEventHandlersSetup", false);
|
|
11382
|
+
this.server = new Server(
|
|
11383
|
+
{
|
|
11384
|
+
name: config5.name,
|
|
11385
|
+
version: config5.version
|
|
11386
|
+
},
|
|
11387
|
+
{
|
|
11388
|
+
capabilities: {
|
|
11389
|
+
tools: {}
|
|
11390
|
+
}
|
|
11391
|
+
}
|
|
11392
|
+
);
|
|
11393
|
+
this.toolRegistry = new ToolRegistry();
|
|
11394
|
+
this.setupHandlers();
|
|
11395
|
+
this.setupProcessEventHandlers();
|
|
11396
|
+
logInfo("MCP server instance created", config5);
|
|
11397
|
+
}
|
|
11398
|
+
setupProcessEventHandlers() {
|
|
11399
|
+
if (this.isEventHandlersSetup) {
|
|
11400
|
+
logDebug("Process event handlers already setup, skipping");
|
|
11401
|
+
return;
|
|
11402
|
+
}
|
|
11403
|
+
const signals = {
|
|
11404
|
+
SIGINT: "MCP server interrupted",
|
|
11405
|
+
SIGTERM: "MCP server terminated",
|
|
11406
|
+
exit: "MCP server exiting",
|
|
11407
|
+
uncaughtException: "Uncaught exception in MCP server",
|
|
11408
|
+
unhandledRejection: "Unhandled promise rejection in MCP server",
|
|
11409
|
+
warning: "Warning in MCP server"
|
|
11410
|
+
};
|
|
11411
|
+
Object.entries(signals).forEach(([signal, message]) => {
|
|
11412
|
+
process.on(
|
|
11413
|
+
signal,
|
|
11414
|
+
(error) => {
|
|
11415
|
+
if (error && signal !== "exit") {
|
|
11416
|
+
logError(`${message}`, { error, signal });
|
|
11417
|
+
} else {
|
|
11418
|
+
logInfo(message, { signal });
|
|
11419
|
+
}
|
|
11420
|
+
if (signal === "SIGINT" || signal === "SIGTERM") {
|
|
11421
|
+
process.exit(0);
|
|
11422
|
+
}
|
|
11423
|
+
if (signal === "uncaughtException") {
|
|
11424
|
+
process.exit(1);
|
|
11247
11425
|
}
|
|
11248
|
-
},
|
|
11249
|
-
{
|
|
11250
|
-
apiKey: this.apiKey,
|
|
11251
|
-
type: "apiKey",
|
|
11252
|
-
timeoutInMs: params.timeoutInMs
|
|
11253
11426
|
}
|
|
11254
11427
|
);
|
|
11255
|
-
|
|
11256
|
-
|
|
11257
|
-
|
|
11258
|
-
|
|
11259
|
-
|
|
11260
|
-
|
|
11261
|
-
|
|
11262
|
-
|
|
11263
|
-
|
|
11428
|
+
});
|
|
11429
|
+
this.isEventHandlersSetup = true;
|
|
11430
|
+
logDebug("Process event handlers registered");
|
|
11431
|
+
}
|
|
11432
|
+
createShutdownPromise() {
|
|
11433
|
+
return new Promise((resolve) => {
|
|
11434
|
+
const cleanup = () => {
|
|
11435
|
+
logInfo("Process shutdown initiated");
|
|
11436
|
+
resolve();
|
|
11437
|
+
};
|
|
11438
|
+
process.once("SIGINT", cleanup);
|
|
11439
|
+
process.once("SIGTERM", cleanup);
|
|
11440
|
+
});
|
|
11441
|
+
}
|
|
11442
|
+
async handleListToolsRequest(request) {
|
|
11443
|
+
logInfo("Received list_tools request", { params: request.params });
|
|
11444
|
+
try {
|
|
11445
|
+
await getMcpGQLClient();
|
|
11446
|
+
} catch (error) {
|
|
11447
|
+
logError("Failed to get MCPGQLClient", { error });
|
|
11448
|
+
const authError = new Error(
|
|
11449
|
+
"Please authorize this client by visiting: https://mobb.ai"
|
|
11450
|
+
);
|
|
11451
|
+
authError.name = "AuthorizationRequired";
|
|
11452
|
+
throw authError;
|
|
11264
11453
|
}
|
|
11454
|
+
const tools = this.toolRegistry.getAllTools();
|
|
11455
|
+
return {
|
|
11456
|
+
tools: tools.map((tool) => ({
|
|
11457
|
+
name: tool.name,
|
|
11458
|
+
display_name: tool.name,
|
|
11459
|
+
description: tool.description || "",
|
|
11460
|
+
inputSchema: {
|
|
11461
|
+
type: "object",
|
|
11462
|
+
properties: tool.inputSchema.properties || {},
|
|
11463
|
+
required: tool.inputSchema.required || []
|
|
11464
|
+
}
|
|
11465
|
+
}))
|
|
11466
|
+
};
|
|
11265
11467
|
}
|
|
11266
|
-
async
|
|
11468
|
+
async handleCallToolRequest(request) {
|
|
11469
|
+
const { name, arguments: args } = request.params;
|
|
11470
|
+
logInfo(`Received call tool request for ${name}`, { name, args });
|
|
11267
11471
|
try {
|
|
11268
|
-
const
|
|
11269
|
-
|
|
11270
|
-
|
|
11271
|
-
|
|
11272
|
-
|
|
11273
|
-
|
|
11274
|
-
logInfo("GraphQL: getOrgAndProjectId successful", {
|
|
11275
|
-
result: getOrgAndProjectIdResult
|
|
11276
|
-
});
|
|
11277
|
-
const [organizationToOrganizationRole] = getOrgAndProjectIdResult.organization_to_organization_role;
|
|
11278
|
-
if (!organizationToOrganizationRole) {
|
|
11279
|
-
throw new Error("Organization not found");
|
|
11280
|
-
}
|
|
11281
|
-
const { organization: org } = organizationToOrganizationRole;
|
|
11282
|
-
const project = projectName ? org?.projects.find((project2) => project2.name === projectName) ?? null : org?.projects[0];
|
|
11283
|
-
if (project?.id) {
|
|
11284
|
-
logInfo("GraphQL: Found existing project", {
|
|
11285
|
-
projectId: project.id,
|
|
11286
|
-
projectName
|
|
11472
|
+
const tool = this.toolRegistry.getTool(name);
|
|
11473
|
+
if (!tool) {
|
|
11474
|
+
const errorMsg = `Unknown tool: ${name}`;
|
|
11475
|
+
logWarn(errorMsg, {
|
|
11476
|
+
name,
|
|
11477
|
+
availableTools: this.toolRegistry.getToolNames()
|
|
11287
11478
|
});
|
|
11288
|
-
|
|
11479
|
+
throw new Error(errorMsg);
|
|
11289
11480
|
}
|
|
11290
|
-
logDebug(
|
|
11291
|
-
|
|
11292
|
-
|
|
11293
|
-
}
|
|
11294
|
-
|
|
11295
|
-
|
|
11296
|
-
projectName
|
|
11481
|
+
logDebug(`Executing tool: ${name}`, { args });
|
|
11482
|
+
const response = await tool.execute(args);
|
|
11483
|
+
const serializedResponse = JSON.parse(JSON.stringify(response));
|
|
11484
|
+
logInfo(`Tool ${name} executed successfully`, {
|
|
11485
|
+
responseType: typeof response,
|
|
11486
|
+
hasContent: !!serializedResponse.content
|
|
11297
11487
|
});
|
|
11298
|
-
|
|
11299
|
-
|
|
11300
|
-
|
|
11301
|
-
logError(
|
|
11302
|
-
error
|
|
11303
|
-
|
|
11488
|
+
return serializedResponse;
|
|
11489
|
+
} catch (error) {
|
|
11490
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
11491
|
+
logError(`Error executing tool ${name}: ${errorMessage}`, {
|
|
11492
|
+
error,
|
|
11493
|
+
toolName: name,
|
|
11494
|
+
args
|
|
11304
11495
|
});
|
|
11305
|
-
throw
|
|
11496
|
+
throw error;
|
|
11306
11497
|
}
|
|
11307
11498
|
}
|
|
11308
|
-
|
|
11499
|
+
setupHandlers() {
|
|
11500
|
+
this.server.setRequestHandler(
|
|
11501
|
+
ListToolsRequestSchema,
|
|
11502
|
+
(request) => this.handleListToolsRequest(request)
|
|
11503
|
+
);
|
|
11504
|
+
this.server.setRequestHandler(
|
|
11505
|
+
CallToolRequestSchema,
|
|
11506
|
+
(request) => this.handleCallToolRequest(request)
|
|
11507
|
+
);
|
|
11508
|
+
logDebug("MCP server handlers registered");
|
|
11509
|
+
}
|
|
11510
|
+
registerTool(tool) {
|
|
11511
|
+
this.toolRegistry.registerTool({
|
|
11512
|
+
name: tool.name,
|
|
11513
|
+
definition: tool.definition,
|
|
11514
|
+
execute: tool.execute
|
|
11515
|
+
});
|
|
11516
|
+
logDebug(`Tool registered: ${tool.name}`);
|
|
11517
|
+
}
|
|
11518
|
+
async start() {
|
|
11309
11519
|
try {
|
|
11310
|
-
logDebug("
|
|
11311
|
-
const
|
|
11312
|
-
|
|
11313
|
-
|
|
11314
|
-
|
|
11315
|
-
|
|
11316
|
-
|
|
11317
|
-
} catch (
|
|
11318
|
-
logError("
|
|
11319
|
-
|
|
11320
|
-
|
|
11321
|
-
|
|
11322
|
-
|
|
11323
|
-
|
|
11520
|
+
logDebug("Starting MCP server");
|
|
11521
|
+
const transport = new StdioServerTransport();
|
|
11522
|
+
await this.server.connect(transport);
|
|
11523
|
+
logInfo("MCP server is running on stdin/stdout");
|
|
11524
|
+
process.stdin.resume();
|
|
11525
|
+
await this.createShutdownPromise();
|
|
11526
|
+
await this.stop();
|
|
11527
|
+
} catch (error) {
|
|
11528
|
+
logError("Failed to start MCP server", { error });
|
|
11529
|
+
throw error;
|
|
11530
|
+
}
|
|
11531
|
+
}
|
|
11532
|
+
async stop() {
|
|
11533
|
+
logInfo("MCP server shutting down");
|
|
11534
|
+
}
|
|
11535
|
+
};
|
|
11536
|
+
|
|
11537
|
+
// src/mcp/services/PathValidation.ts
|
|
11538
|
+
import fs9 from "fs";
|
|
11539
|
+
import path11 from "path";
|
|
11540
|
+
var PathValidation = class {
|
|
11541
|
+
/**
|
|
11542
|
+
* Validates a path for MCP usage - combines security and existence checks
|
|
11543
|
+
*/
|
|
11544
|
+
async validatePath(inputPath) {
|
|
11545
|
+
logDebug("Validating MCP path", { inputPath });
|
|
11546
|
+
if (inputPath.includes("..")) {
|
|
11547
|
+
const error = `Path contains path traversal patterns: ${inputPath}`;
|
|
11548
|
+
logError(error);
|
|
11549
|
+
return { isValid: false, error };
|
|
11550
|
+
}
|
|
11551
|
+
const normalizedPath = path11.normalize(inputPath);
|
|
11552
|
+
if (normalizedPath.includes("..")) {
|
|
11553
|
+
const error = `Normalized path contains path traversal patterns: ${inputPath}`;
|
|
11554
|
+
logError(error);
|
|
11555
|
+
return { isValid: false, error };
|
|
11556
|
+
}
|
|
11557
|
+
const decodedPath = decodeURIComponent(inputPath);
|
|
11558
|
+
if (decodedPath.includes("..") || decodedPath !== inputPath) {
|
|
11559
|
+
const error = `Path contains encoded traversal attempts: ${inputPath}`;
|
|
11560
|
+
logError(error);
|
|
11561
|
+
return { isValid: false, error };
|
|
11562
|
+
}
|
|
11563
|
+
if (inputPath.includes("\0") || inputPath.includes("\0")) {
|
|
11564
|
+
const error = `Path contains dangerous characters: ${inputPath}`;
|
|
11565
|
+
logError(error);
|
|
11566
|
+
return { isValid: false, error };
|
|
11567
|
+
}
|
|
11568
|
+
logDebug("Path validation successful", { inputPath });
|
|
11569
|
+
logDebug("Checking path existence", { inputPath });
|
|
11570
|
+
try {
|
|
11571
|
+
await fs9.promises.access(inputPath);
|
|
11572
|
+
logDebug("Path exists and is accessible", { inputPath });
|
|
11573
|
+
return { isValid: true };
|
|
11574
|
+
} catch (error) {
|
|
11575
|
+
const errorMessage = `Path does not exist or is not accessible: ${inputPath}`;
|
|
11576
|
+
logError(errorMessage, { error });
|
|
11577
|
+
return { isValid: false, error: errorMessage };
|
|
11578
|
+
}
|
|
11579
|
+
}
|
|
11580
|
+
};
|
|
11581
|
+
|
|
11582
|
+
// src/mcp/services/FilePacking.ts
|
|
11583
|
+
import fs10 from "fs";
|
|
11584
|
+
import path12 from "path";
|
|
11585
|
+
import AdmZip2 from "adm-zip";
|
|
11586
|
+
var MAX_FILE_SIZE2 = 1024 * 1024 * 5;
|
|
11587
|
+
var FilePacking = class {
|
|
11588
|
+
async packFiles(sourceDirectoryPath, filesToPack) {
|
|
11589
|
+
logInfo(`FilePacking: packing files from ${sourceDirectoryPath}`);
|
|
11590
|
+
const zip = new AdmZip2();
|
|
11591
|
+
let packedFilesCount = 0;
|
|
11592
|
+
logInfo("FilePacking: compressing files");
|
|
11593
|
+
for (const filepath of filesToPack) {
|
|
11594
|
+
const absoluteFilepath = path12.join(sourceDirectoryPath, filepath);
|
|
11595
|
+
if (!FileUtils.shouldPackFile(absoluteFilepath, MAX_FILE_SIZE2)) {
|
|
11596
|
+
logInfo(
|
|
11597
|
+
`FilePacking: ignoring ${filepath} because it is excluded or invalid`
|
|
11598
|
+
);
|
|
11599
|
+
continue;
|
|
11600
|
+
}
|
|
11601
|
+
let data;
|
|
11602
|
+
try {
|
|
11603
|
+
data = fs10.readFileSync(absoluteFilepath);
|
|
11604
|
+
} catch (fsError) {
|
|
11605
|
+
logInfo(
|
|
11606
|
+
`FilePacking: failed to read ${filepath} from filesystem: ${fsError}`
|
|
11607
|
+
);
|
|
11608
|
+
continue;
|
|
11609
|
+
}
|
|
11610
|
+
zip.addFile(filepath, data);
|
|
11611
|
+
packedFilesCount++;
|
|
11324
11612
|
}
|
|
11613
|
+
const zipBuffer = zip.toBuffer();
|
|
11614
|
+
logInfo(
|
|
11615
|
+
`FilePacking: read ${packedFilesCount} source files. total size: ${zipBuffer.length} bytes`
|
|
11616
|
+
);
|
|
11617
|
+
logInfo("FilePacking: Files packed successfully");
|
|
11618
|
+
return zipBuffer;
|
|
11325
11619
|
}
|
|
11326
11620
|
};
|
|
11327
11621
|
|
|
@@ -11449,6 +11743,85 @@ ${fix2.patch || "No patch available"}
|
|
|
11449
11743
|
- If any patch fails, continue with the others and report issues at the end
|
|
11450
11744
|
`;
|
|
11451
11745
|
};
|
|
11746
|
+
var failedToConnectToApiPrompt = `# CONNECTION ERROR: FAILED TO REACH MOBB API
|
|
11747
|
+
|
|
11748
|
+
## ANALYSIS SUMMARY
|
|
11749
|
+
- **Status:** \u274C Failed
|
|
11750
|
+
- **Issue Type:** Connection Error
|
|
11751
|
+
- **Error Details:** Unable to establish connection to the Mobb API service
|
|
11752
|
+
|
|
11753
|
+
## TROUBLESHOOTING STEPS FOR THE USER
|
|
11754
|
+
|
|
11755
|
+
The Mobb security scanning service is currently not reachable. This may be due to:
|
|
11756
|
+
|
|
11757
|
+
1. **Missing or invalid authentication credentials**
|
|
11758
|
+
- Ensure the \`API_KEY\` environment variable is properly set with your valid Mobb authentication token
|
|
11759
|
+
- Example: \`export API_KEY=your_mobb_api_key_here\`
|
|
11760
|
+
|
|
11761
|
+
2. **Incorrect API endpoint configuration**
|
|
11762
|
+
- Check if the \`API_URL\` environment variable needs to be set to the correct Mobb service endpoint
|
|
11763
|
+
- Example: \`export API_URL=https://api.mobb.ai/graphql\`
|
|
11764
|
+
|
|
11765
|
+
3. **Network connectivity issues**
|
|
11766
|
+
- Verify your internet connection is working properly
|
|
11767
|
+
- Check if any firewall or proxy settings might be blocking the connection
|
|
11768
|
+
|
|
11769
|
+
4. **Service outage**
|
|
11770
|
+
- The Mobb service might be temporarily unavailable
|
|
11771
|
+
- Please try again later or check the Mobb status page
|
|
11772
|
+
|
|
11773
|
+
## NEXT STEPS
|
|
11774
|
+
|
|
11775
|
+
Please resolve the connection issue using the steps above and try running the security scan again.
|
|
11776
|
+
|
|
11777
|
+
For additional assistance, please:
|
|
11778
|
+
- Visit the Mobb documentation at https://docs.mobb.ai
|
|
11779
|
+
- Contact Mobb support at support@mobb.ai
|
|
11780
|
+
|
|
11781
|
+
`;
|
|
11782
|
+
var failedToAuthenticatePrompt = `# AUTHENTICATION ERROR: MOBB LOGIN REQUIRED
|
|
11783
|
+
|
|
11784
|
+
## ANALYSIS SUMMARY
|
|
11785
|
+
- **Status:** \u274C Failed
|
|
11786
|
+
- **Issue Type:** Authentication Error
|
|
11787
|
+
- **Error Details:** Unable to authenticate with the Mobb service
|
|
11788
|
+
|
|
11789
|
+
## AUTHENTICATION REQUIRED
|
|
11790
|
+
|
|
11791
|
+
The Mobb security scanning service requires authentication before it can analyze your code for vulnerabilities. You need to:
|
|
11792
|
+
|
|
11793
|
+
1. **Login and authorize access to Mobb**
|
|
11794
|
+
- A browser window should have opened to complete the authentication process
|
|
11795
|
+
- If no browser window opened, please run the command again
|
|
11796
|
+
|
|
11797
|
+
2. **Create a Mobb account if you don't have one**
|
|
11798
|
+
- If you don't already have a Mobb account, you'll need to sign up
|
|
11799
|
+
- Visit https://app.mobb.ai/auth/signup to create your free account
|
|
11800
|
+
- Use your work email for easier team collaboration
|
|
11801
|
+
|
|
11802
|
+
3. **Authorization flow**
|
|
11803
|
+
- After logging in, you'll be asked to authorize the CLI tool
|
|
11804
|
+
- This creates a secure token that allows the CLI to access Mobb services
|
|
11805
|
+
- You only need to do this once per device
|
|
11806
|
+
|
|
11807
|
+
## TROUBLESHOOTING
|
|
11808
|
+
|
|
11809
|
+
If you're experiencing issues with authentication:
|
|
11810
|
+
|
|
11811
|
+
- Ensure you have an active internet connection
|
|
11812
|
+
- Check that you can access https://app.mobb.ai in your browser
|
|
11813
|
+
- Try running the command again with the \`--debug\` flag for more detailed output
|
|
11814
|
+
- Make sure your browser isn't blocking pop-ups from the authentication window
|
|
11815
|
+
|
|
11816
|
+
## NEXT STEPS
|
|
11817
|
+
|
|
11818
|
+
Please complete the authentication process and try running the security scan again.
|
|
11819
|
+
|
|
11820
|
+
For additional assistance, please:
|
|
11821
|
+
- Visit the Mobb documentation at https://docs.mobb.ai/cli/authentication
|
|
11822
|
+
- Contact Mobb support at support@mobb.ai
|
|
11823
|
+
|
|
11824
|
+
`;
|
|
11452
11825
|
|
|
11453
11826
|
// src/mcp/tools/fixVulnerabilities/VulnerabilityFixService.ts
|
|
11454
11827
|
var VUL_REPORT_DIGEST_TIMEOUT_MS2 = 1e3 * 60 * 5;
|
|
@@ -11456,15 +11829,12 @@ var VulnerabilityFixService = class {
|
|
|
11456
11829
|
constructor() {
|
|
11457
11830
|
__publicField(this, "gqlClient");
|
|
11458
11831
|
__publicField(this, "filePacking");
|
|
11459
|
-
__publicField(this, "fileUpload");
|
|
11460
11832
|
this.filePacking = new FilePacking();
|
|
11461
|
-
this.fileUpload = new FileUpload();
|
|
11462
11833
|
}
|
|
11463
11834
|
async processVulnerabilities(fileList, repositoryPath) {
|
|
11464
11835
|
try {
|
|
11465
11836
|
this.validateFiles(fileList);
|
|
11466
|
-
|
|
11467
|
-
this.gqlClient = await this.initializeGqlClient(apiKey);
|
|
11837
|
+
this.gqlClient = await this.initializeGqlClient();
|
|
11468
11838
|
const repoUploadInfo = await this.initializeReport();
|
|
11469
11839
|
const zipBuffer = await this.packFiles(fileList, repositoryPath);
|
|
11470
11840
|
await this.uploadFiles(zipBuffer, repoUploadInfo);
|
|
@@ -11476,6 +11846,12 @@ var VulnerabilityFixService = class {
|
|
|
11476
11846
|
const fixes = await this.getReportFixes(repoUploadInfo.fixReportId);
|
|
11477
11847
|
return fixesPrompt(fixes);
|
|
11478
11848
|
} catch (error) {
|
|
11849
|
+
if (error instanceof ApiConnectionError || error instanceof CliLoginError) {
|
|
11850
|
+
return failedToConnectToApiPrompt;
|
|
11851
|
+
}
|
|
11852
|
+
if (error instanceof AuthenticationError || error instanceof FailedToGetApiTokenError) {
|
|
11853
|
+
return failedToAuthenticatePrompt;
|
|
11854
|
+
}
|
|
11479
11855
|
const message = error.message;
|
|
11480
11856
|
logError("Vulnerability processing failed", { error: message });
|
|
11481
11857
|
throw error;
|
|
@@ -11483,30 +11859,22 @@ var VulnerabilityFixService = class {
|
|
|
11483
11859
|
}
|
|
11484
11860
|
validateFiles(fileList) {
|
|
11485
11861
|
if (fileList.length === 0) {
|
|
11486
|
-
throw new
|
|
11487
|
-
}
|
|
11488
|
-
}
|
|
11489
|
-
validateApiKey() {
|
|
11490
|
-
const apiKey = process.env["API_KEY"];
|
|
11491
|
-
if (!apiKey) {
|
|
11492
|
-
throw new Error("API_KEY environment variable is not set");
|
|
11862
|
+
throw new NoFilesError();
|
|
11493
11863
|
}
|
|
11494
|
-
return apiKey;
|
|
11495
11864
|
}
|
|
11496
|
-
async initializeGqlClient(
|
|
11497
|
-
const gqlClient =
|
|
11498
|
-
apiKey,
|
|
11499
|
-
type: "apiKey"
|
|
11500
|
-
});
|
|
11865
|
+
async initializeGqlClient() {
|
|
11866
|
+
const gqlClient = await getMcpGQLClient();
|
|
11501
11867
|
const isConnected = await gqlClient.verifyConnection();
|
|
11502
11868
|
if (!isConnected) {
|
|
11503
|
-
throw new
|
|
11869
|
+
throw new ApiConnectionError(
|
|
11870
|
+
"Failed to connect to the API. Please check your API_KEY"
|
|
11871
|
+
);
|
|
11504
11872
|
}
|
|
11505
11873
|
return gqlClient;
|
|
11506
11874
|
}
|
|
11507
11875
|
async initializeReport() {
|
|
11508
11876
|
if (!this.gqlClient) {
|
|
11509
|
-
throw new
|
|
11877
|
+
throw new GqlClientError();
|
|
11510
11878
|
}
|
|
11511
11879
|
try {
|
|
11512
11880
|
const {
|
|
@@ -11516,7 +11884,9 @@ var VulnerabilityFixService = class {
|
|
|
11516
11884
|
return repoUploadInfo;
|
|
11517
11885
|
} catch (error) {
|
|
11518
11886
|
const message = error.message;
|
|
11519
|
-
throw new
|
|
11887
|
+
throw new ReportInitializationError(
|
|
11888
|
+
`Error initializing report: ${message}`
|
|
11889
|
+
);
|
|
11520
11890
|
}
|
|
11521
11891
|
}
|
|
11522
11892
|
async packFiles(fileList, repositoryPath) {
|
|
@@ -11529,15 +11899,15 @@ var VulnerabilityFixService = class {
|
|
|
11529
11899
|
return zipBuffer;
|
|
11530
11900
|
} catch (error) {
|
|
11531
11901
|
const message = error.message;
|
|
11532
|
-
throw new
|
|
11902
|
+
throw new FileProcessingError(`Error packing files: ${message}`);
|
|
11533
11903
|
}
|
|
11534
11904
|
}
|
|
11535
11905
|
async uploadFiles(zipBuffer, repoUploadInfo) {
|
|
11536
11906
|
if (!repoUploadInfo) {
|
|
11537
|
-
throw new
|
|
11907
|
+
throw new FileUploadError("Upload info is required");
|
|
11538
11908
|
}
|
|
11539
11909
|
try {
|
|
11540
|
-
await
|
|
11910
|
+
await uploadFile({
|
|
11541
11911
|
file: zipBuffer,
|
|
11542
11912
|
url: repoUploadInfo.url,
|
|
11543
11913
|
uploadFields: JSON.parse(repoUploadInfo.uploadFieldsJSON),
|
|
@@ -11546,12 +11916,14 @@ var VulnerabilityFixService = class {
|
|
|
11546
11916
|
logInfo("File uploaded successfully");
|
|
11547
11917
|
} catch (error) {
|
|
11548
11918
|
logError("File upload failed", { error: error.message });
|
|
11549
|
-
throw new
|
|
11919
|
+
throw new FileUploadError(
|
|
11920
|
+
`Failed to upload the file: ${error.message}`
|
|
11921
|
+
);
|
|
11550
11922
|
}
|
|
11551
11923
|
}
|
|
11552
11924
|
async getProjectId() {
|
|
11553
11925
|
if (!this.gqlClient) {
|
|
11554
|
-
throw new
|
|
11926
|
+
throw new GqlClientError();
|
|
11555
11927
|
}
|
|
11556
11928
|
const projectId = await this.gqlClient.getProjectId();
|
|
11557
11929
|
logInfo("Project ID retrieved", { projectId });
|
|
@@ -11559,7 +11931,7 @@ var VulnerabilityFixService = class {
|
|
|
11559
11931
|
}
|
|
11560
11932
|
async runScan(params) {
|
|
11561
11933
|
if (!this.gqlClient) {
|
|
11562
|
-
throw new
|
|
11934
|
+
throw new GqlClientError();
|
|
11563
11935
|
}
|
|
11564
11936
|
const { fixReportId, projectId } = params;
|
|
11565
11937
|
logInfo("Starting scan", { fixReportId, projectId });
|
|
@@ -11578,7 +11950,7 @@ var VulnerabilityFixService = class {
|
|
|
11578
11950
|
logError("Vulnerability report submission failed", {
|
|
11579
11951
|
response: submitRes
|
|
11580
11952
|
});
|
|
11581
|
-
throw new
|
|
11953
|
+
throw new ScanError("\u{1F575}\uFE0F\u200D\u2642\uFE0F Mobb analysis failed");
|
|
11582
11954
|
}
|
|
11583
11955
|
logInfo("Vulnerability report submitted successfully", {
|
|
11584
11956
|
analysisId: submitRes.submitVulnerabilityReport.fixReportId
|
|
@@ -11597,7 +11969,7 @@ var VulnerabilityFixService = class {
|
|
|
11597
11969
|
}
|
|
11598
11970
|
async getReportFixes(fixReportId) {
|
|
11599
11971
|
if (!this.gqlClient) {
|
|
11600
|
-
throw new
|
|
11972
|
+
throw new GqlClientError();
|
|
11601
11973
|
}
|
|
11602
11974
|
const fixes = await this.gqlClient.getReportFixes(fixReportId);
|
|
11603
11975
|
logInfo("Fixes retrieved", { fixCount: fixes.length });
|
|
@@ -11634,18 +12006,48 @@ var FixVulnerabilitiesTool = class {
|
|
|
11634
12006
|
`Invalid path: potential security risk detected in path: ${pathValidationResult.error}`
|
|
11635
12007
|
);
|
|
11636
12008
|
}
|
|
11637
|
-
const gitService = new GitService(args.path);
|
|
12009
|
+
const gitService = new GitService(args.path, log);
|
|
11638
12010
|
const gitValidation = await gitService.validateRepository();
|
|
12011
|
+
let files = [];
|
|
11639
12012
|
if (!gitValidation.isValid) {
|
|
11640
|
-
|
|
12013
|
+
logDebug(
|
|
12014
|
+
"Git repository validation failed, using all files in the repository",
|
|
12015
|
+
{
|
|
12016
|
+
path: args.path
|
|
12017
|
+
}
|
|
12018
|
+
);
|
|
12019
|
+
files = FileUtils.getLastChangedFiles(args.path);
|
|
12020
|
+
logDebug("Found files in the repository", {
|
|
12021
|
+
files,
|
|
12022
|
+
fileCount: files.length
|
|
12023
|
+
});
|
|
12024
|
+
} else {
|
|
12025
|
+
const gitResult = await gitService.getChangedFiles();
|
|
12026
|
+
files = gitResult.files;
|
|
12027
|
+
if (files.length === 0) {
|
|
12028
|
+
const recentResult = await gitService.getRecentlyChangedFiles();
|
|
12029
|
+
files = recentResult.files;
|
|
12030
|
+
logDebug(
|
|
12031
|
+
"No changes found, using recently changed files from git history",
|
|
12032
|
+
{
|
|
12033
|
+
files,
|
|
12034
|
+
fileCount: files.length,
|
|
12035
|
+
commitsChecked: recentResult.commitCount
|
|
12036
|
+
}
|
|
12037
|
+
);
|
|
12038
|
+
} else {
|
|
12039
|
+
logDebug("Found changed files in the git repository", {
|
|
12040
|
+
files,
|
|
12041
|
+
fileCount: files.length
|
|
12042
|
+
});
|
|
12043
|
+
}
|
|
11641
12044
|
}
|
|
11642
|
-
|
|
11643
|
-
if (gitResult.files.length === 0) {
|
|
12045
|
+
if (files.length === 0) {
|
|
11644
12046
|
return {
|
|
11645
12047
|
content: [
|
|
11646
12048
|
{
|
|
11647
12049
|
type: "text",
|
|
11648
|
-
text: "No changed files found in the
|
|
12050
|
+
text: "No changed files found in the repository. The vulnerability scanner analyzes modified, added, or staged files. Make some changes to your code and try again."
|
|
11649
12051
|
}
|
|
11650
12052
|
]
|
|
11651
12053
|
};
|
|
@@ -11653,7 +12055,7 @@ var FixVulnerabilitiesTool = class {
|
|
|
11653
12055
|
try {
|
|
11654
12056
|
const vulnerabilityFixService = new VulnerabilityFixService();
|
|
11655
12057
|
const fixResult = await vulnerabilityFixService.processVulnerabilities(
|
|
11656
|
-
|
|
12058
|
+
files,
|
|
11657
12059
|
args.path
|
|
11658
12060
|
);
|
|
11659
12061
|
const result = {
|
|
@@ -11666,7 +12068,7 @@ var FixVulnerabilitiesTool = class {
|
|
|
11666
12068
|
};
|
|
11667
12069
|
logInfo("Tool execution completed successfully", {
|
|
11668
12070
|
resultLength: fixResult.length,
|
|
11669
|
-
fileCount:
|
|
12071
|
+
fileCount: files.length,
|
|
11670
12072
|
result
|
|
11671
12073
|
});
|
|
11672
12074
|
return result;
|
|
@@ -11739,7 +12141,7 @@ var mcpHandler = async (_args) => {
|
|
|
11739
12141
|
};
|
|
11740
12142
|
|
|
11741
12143
|
// src/args/commands/review.ts
|
|
11742
|
-
import
|
|
12144
|
+
import fs11 from "fs";
|
|
11743
12145
|
import chalk9 from "chalk";
|
|
11744
12146
|
function reviewBuilder(yargs2) {
|
|
11745
12147
|
return yargs2.option("f", {
|
|
@@ -11776,7 +12178,7 @@ function reviewBuilder(yargs2) {
|
|
|
11776
12178
|
).help();
|
|
11777
12179
|
}
|
|
11778
12180
|
function validateReviewOptions(argv) {
|
|
11779
|
-
if (!
|
|
12181
|
+
if (!fs11.existsSync(argv.f)) {
|
|
11780
12182
|
throw new CliError(`
|
|
11781
12183
|
Can't access ${chalk9.bold(argv.f)}`);
|
|
11782
12184
|
}
|
|
@@ -11914,13 +12316,13 @@ var parseArgs = async (args) => {
|
|
|
11914
12316
|
};
|
|
11915
12317
|
|
|
11916
12318
|
// src/index.ts
|
|
11917
|
-
var
|
|
12319
|
+
var debug19 = Debug20("mobbdev:index");
|
|
11918
12320
|
async function run() {
|
|
11919
12321
|
return parseArgs(hideBin(process.argv));
|
|
11920
12322
|
}
|
|
11921
12323
|
(async () => {
|
|
11922
12324
|
try {
|
|
11923
|
-
|
|
12325
|
+
debug19("Bugsy CLI v%s running...", packageJson.version);
|
|
11924
12326
|
await run();
|
|
11925
12327
|
process.exit(0);
|
|
11926
12328
|
} catch (err) {
|