mobbdev 1.0.2 → 1.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.mjs +105 -63
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
|
@@ -148,6 +148,7 @@ var IssueType_Enum = /* @__PURE__ */ ((IssueType_Enum2) => {
|
|
|
148
148
|
IssueType_Enum2["MissingEqualsOrHashcode"] = "MISSING_EQUALS_OR_HASHCODE";
|
|
149
149
|
IssueType_Enum2["MissingHstsHeader"] = "MISSING_HSTS_HEADER";
|
|
150
150
|
IssueType_Enum2["NonFinalPublicStaticField"] = "NON_FINAL_PUBLIC_STATIC_FIELD";
|
|
151
|
+
IssueType_Enum2["NonReadonlyField"] = "NON_READONLY_FIELD";
|
|
151
152
|
IssueType_Enum2["NoEquivalenceMethod"] = "NO_EQUIVALENCE_METHOD";
|
|
152
153
|
IssueType_Enum2["NoLimitsOrThrottling"] = "NO_LIMITS_OR_THROTTLING";
|
|
153
154
|
IssueType_Enum2["NullDereference"] = "NULL_DEREFERENCE";
|
|
@@ -740,7 +741,8 @@ var issueTypeMap = {
|
|
|
740
741
|
["HARDCODED_DOMAIN_IN_HTML" /* HardcodedDomainInHtml */]: "Hardcoded Domain in HTML",
|
|
741
742
|
["HEAP_INSPECTION" /* HeapInspection */]: "Heap Inspection",
|
|
742
743
|
["CLIENT_DOM_STORED_CODE_INJECTION" /* ClientDomStoredCodeInjection */]: "Client Code Injection",
|
|
743
|
-
["STRING_FORMAT_MISUSE" /* StringFormatMisuse */]: "String Format Misuse"
|
|
744
|
+
["STRING_FORMAT_MISUSE" /* StringFormatMisuse */]: "String Format Misuse",
|
|
745
|
+
["NON_READONLY_FIELD" /* NonReadonlyField */]: "Non Readonly Field"
|
|
744
746
|
};
|
|
745
747
|
var issueTypeZ = z.nativeEnum(IssueType_Enum);
|
|
746
748
|
var getIssueTypeFriendlyString = (issueType) => {
|
|
@@ -1435,6 +1437,7 @@ import chalk4 from "chalk";
|
|
|
1435
1437
|
import Configstore from "configstore";
|
|
1436
1438
|
import Debug16 from "debug";
|
|
1437
1439
|
import extract from "extract-zip";
|
|
1440
|
+
import { createSpinner as createSpinner4 } from "nanospinner";
|
|
1438
1441
|
import fetch4 from "node-fetch";
|
|
1439
1442
|
import open2 from "open";
|
|
1440
1443
|
import tmp2 from "tmp";
|
|
@@ -1467,8 +1470,8 @@ import { z as z16 } from "zod";
|
|
|
1467
1470
|
|
|
1468
1471
|
// src/features/analysis/scm/bitbucket/bitbucket.ts
|
|
1469
1472
|
import querystring from "node:querystring";
|
|
1470
|
-
import bitbucketPkg from "bitbucket";
|
|
1471
1473
|
import * as bitbucketPkgNode from "bitbucket";
|
|
1474
|
+
import bitbucketPkg from "bitbucket";
|
|
1472
1475
|
import Debug2 from "debug";
|
|
1473
1476
|
import { z as z12 } from "zod";
|
|
1474
1477
|
|
|
@@ -1706,7 +1709,8 @@ var fixDetailsData = {
|
|
|
1706
1709
|
issueDescription: "Client DOM Stored Code Injection is a client-side security vulnerability where malicious JavaScript code gets stored in the DOM and later executed when retrieved by legitimate scripts.",
|
|
1707
1710
|
fixInstructions: "Update the code to avoid the possibility for malicious JavaScript code to get stored in the DOM."
|
|
1708
1711
|
},
|
|
1709
|
-
["STRING_FORMAT_MISUSE" /* StringFormatMisuse */]: void 0
|
|
1712
|
+
["STRING_FORMAT_MISUSE" /* StringFormatMisuse */]: void 0,
|
|
1713
|
+
["NON_READONLY_FIELD" /* NonReadonlyField */]: void 0
|
|
1710
1714
|
};
|
|
1711
1715
|
|
|
1712
1716
|
// src/features/analysis/scm/shared/src/commitDescriptionMarkup.ts
|
|
@@ -3651,7 +3655,10 @@ async function validateBitbucketParams(params) {
|
|
|
3651
3655
|
throw new InvalidRepoUrlError(safeParseError.data.error.error.message);
|
|
3652
3656
|
}
|
|
3653
3657
|
}
|
|
3654
|
-
|
|
3658
|
+
console.log("validateBitbucketParams error", e);
|
|
3659
|
+
throw new InvalidRepoUrlError(
|
|
3660
|
+
`cannot access BB repo URL: ${params.url} with the provided access token`
|
|
3661
|
+
);
|
|
3655
3662
|
}
|
|
3656
3663
|
}
|
|
3657
3664
|
async function getUsersworkspacesSlugs(bitbucketClient) {
|
|
@@ -3824,7 +3831,10 @@ async function githubValidateParams(url, accessToken) {
|
|
|
3824
3831
|
if (code === 404) {
|
|
3825
3832
|
throw new InvalidRepoUrlError(`invalid github repo Url ${url}`);
|
|
3826
3833
|
}
|
|
3827
|
-
|
|
3834
|
+
console.log("githubValidateParams error", e);
|
|
3835
|
+
throw new InvalidRepoUrlError(
|
|
3836
|
+
`cannot access GH repo URL: ${url} with the provided access token`
|
|
3837
|
+
);
|
|
3828
3838
|
}
|
|
3829
3839
|
}
|
|
3830
3840
|
|
|
@@ -4260,7 +4270,10 @@ async function gitlabValidateParams({
|
|
|
4260
4270
|
if (code === 404 || description.includes("404") || description.includes("Not Found")) {
|
|
4261
4271
|
throw new InvalidRepoUrlError(`invalid gitlab repo URL: ${url}`);
|
|
4262
4272
|
}
|
|
4263
|
-
|
|
4273
|
+
console.log("gitlabValidateParams error", e);
|
|
4274
|
+
throw new InvalidRepoUrlError(
|
|
4275
|
+
`cannot access gitlab repo URL: ${url} with the provided access token`
|
|
4276
|
+
);
|
|
4264
4277
|
}
|
|
4265
4278
|
}
|
|
4266
4279
|
async function getGitlabUsername(url, accessToken) {
|
|
@@ -5997,7 +6010,10 @@ async function adoValidateParams({
|
|
|
5997
6010
|
if (code === 404 || description.includes("404") || description.includes("Not Found")) {
|
|
5998
6011
|
throw new InvalidRepoUrlError(`invalid ADO repo URL ${url}`);
|
|
5999
6012
|
}
|
|
6000
|
-
|
|
6013
|
+
console.log("adoValidateParams error", e);
|
|
6014
|
+
throw new InvalidRepoUrlError(
|
|
6015
|
+
`cannot access ADO repo URL: ${url} with the provided access token`
|
|
6016
|
+
);
|
|
6001
6017
|
}
|
|
6002
6018
|
}
|
|
6003
6019
|
async function getOrgsForOauthToken({
|
|
@@ -6802,8 +6818,8 @@ async function addFixCommentsForPr({
|
|
|
6802
6818
|
import Debug8 from "debug";
|
|
6803
6819
|
var debug8 = Debug8("mobbdev:handleAutoPr");
|
|
6804
6820
|
async function handleAutoPr(params) {
|
|
6805
|
-
const { gqlClient, analysisId, createSpinner:
|
|
6806
|
-
const createAutoPrSpinner =
|
|
6821
|
+
const { gqlClient, analysisId, createSpinner: createSpinner5 } = params;
|
|
6822
|
+
const createAutoPrSpinner = createSpinner5(
|
|
6807
6823
|
"\u{1F504} Waiting for the analysis to finish before initiating automatic pull request creation"
|
|
6808
6824
|
).start();
|
|
6809
6825
|
return await gqlClient.subscribeToAnalysis({
|
|
@@ -6899,6 +6915,9 @@ import WebSocket from "ws";
|
|
|
6899
6915
|
var SUBSCRIPTION_TIMEOUT_MS = 30 * 60 * 1e3;
|
|
6900
6916
|
function createWSClient(options) {
|
|
6901
6917
|
return createClient({
|
|
6918
|
+
//this is needed to prevent AWS from killing the connection
|
|
6919
|
+
//currently our load balancer has a 29s idle timeout
|
|
6920
|
+
keepAlive: 1e4,
|
|
6902
6921
|
url: options.url,
|
|
6903
6922
|
webSocketImpl: options.websocket || WebSocket,
|
|
6904
6923
|
connectionParams: () => {
|
|
@@ -7712,8 +7731,8 @@ async function downloadRepo({
|
|
|
7712
7731
|
dirname,
|
|
7713
7732
|
ci
|
|
7714
7733
|
}) {
|
|
7715
|
-
const { createSpinner:
|
|
7716
|
-
const repoSpinner =
|
|
7734
|
+
const { createSpinner: createSpinner5 } = Spinner2({ ci });
|
|
7735
|
+
const repoSpinner = createSpinner5("\u{1F4BE} Downloading Repo").start();
|
|
7717
7736
|
debug15("download repo %s %s %s", repoUrl, dirname);
|
|
7718
7737
|
const zipFilePath = path7.join(dirname, "repo.zip");
|
|
7719
7738
|
debug15("download URL: %s auth headers: %o", downloadUrl, authHeaders);
|
|
@@ -7892,7 +7911,7 @@ async function _scan(params, { skipPrompts = false } = {}) {
|
|
|
7892
7911
|
autoPr
|
|
7893
7912
|
} = params;
|
|
7894
7913
|
debug15("start %s %s", dirname, repo);
|
|
7895
|
-
const { createSpinner:
|
|
7914
|
+
const { createSpinner: createSpinner5 } = Spinner2({ ci });
|
|
7896
7915
|
skipPrompts = skipPrompts || ci;
|
|
7897
7916
|
let gqlClient = new GQLClient({
|
|
7898
7917
|
apiKey: apiKey || config2.get("apiToken"),
|
|
@@ -7929,7 +7948,7 @@ async function _scan(params, { skipPrompts = false } = {}) {
|
|
|
7929
7948
|
});
|
|
7930
7949
|
if (!isRepoAvailable) {
|
|
7931
7950
|
if (ci || !cloudScmLibType || !scmAuthUrl) {
|
|
7932
|
-
const errorMessage = scmAuthUrl ? `Cannot access repo ${repo}
|
|
7951
|
+
const errorMessage = scmAuthUrl ? `Cannot access repo ${repo}. Make sure that the repo is accessible and the SCM token configured on Mobb is correct.` : `Cannot access repo ${repo} with the provided token, please visit ${scmAuthUrl} to refresh your source control management system token`;
|
|
7933
7952
|
throw new Error(errorMessage);
|
|
7934
7953
|
}
|
|
7935
7954
|
if (cloudScmLibType && scmAuthUrl) {
|
|
@@ -7982,7 +8001,7 @@ async function _scan(params, { skipPrompts = false } = {}) {
|
|
|
7982
8001
|
if (!reportPath) {
|
|
7983
8002
|
throw new Error("reportPath is null");
|
|
7984
8003
|
}
|
|
7985
|
-
const uploadReportSpinner =
|
|
8004
|
+
const uploadReportSpinner = createSpinner5("\u{1F4C1} Uploading Report").start();
|
|
7986
8005
|
try {
|
|
7987
8006
|
await uploadFile({
|
|
7988
8007
|
file: reportPath,
|
|
@@ -7994,8 +8013,14 @@ async function _scan(params, { skipPrompts = false } = {}) {
|
|
|
7994
8013
|
uploadReportSpinner.error({ text: "\u{1F4C1} Report upload failed" });
|
|
7995
8014
|
throw e;
|
|
7996
8015
|
}
|
|
8016
|
+
await _digestReport({
|
|
8017
|
+
gqlClient,
|
|
8018
|
+
fixReportId: reportUploadInfo.fixReportId,
|
|
8019
|
+
projectId,
|
|
8020
|
+
command
|
|
8021
|
+
});
|
|
7997
8022
|
uploadReportSpinner.success({ text: "\u{1F4C1} Report uploaded successfully" });
|
|
7998
|
-
const mobbSpinner =
|
|
8023
|
+
const mobbSpinner = createSpinner5("\u{1F575}\uFE0F\u200D\u2642\uFE0F Initiating Mobb analysis").start();
|
|
7999
8024
|
const sendReportRes = await sendReport({
|
|
8000
8025
|
gqlClient,
|
|
8001
8026
|
spinner: mobbSpinner,
|
|
@@ -8022,7 +8047,7 @@ async function _scan(params, { skipPrompts = false } = {}) {
|
|
|
8022
8047
|
await handleAutoPr({
|
|
8023
8048
|
gqlClient,
|
|
8024
8049
|
analysisId: reportUploadInfo.fixReportId,
|
|
8025
|
-
createSpinner:
|
|
8050
|
+
createSpinner: createSpinner5
|
|
8026
8051
|
});
|
|
8027
8052
|
}
|
|
8028
8053
|
await askToOpenAnalysis();
|
|
@@ -8048,7 +8073,7 @@ async function _scan(params, { skipPrompts = false } = {}) {
|
|
|
8048
8073
|
const scmLibType = getCloudScmLibTypeFromUrl(repoUrl);
|
|
8049
8074
|
const scmName = scmLibType === "GITHUB" /* GITHUB */ ? "Github" : scmLibType === "GITLAB" /* GITLAB */ ? "Gitlab" : scmLibType === "ADO" /* ADO */ ? "Azure DevOps" : "";
|
|
8050
8075
|
const addScmIntegration = skipPrompts ? true : await scmIntegrationPrompt(scmName);
|
|
8051
|
-
const scmSpinner =
|
|
8076
|
+
const scmSpinner = createSpinner5(
|
|
8052
8077
|
`\u{1F517} Waiting for ${scmName} integration...`
|
|
8053
8078
|
).start();
|
|
8054
8079
|
if (!addScmIntegration) {
|
|
@@ -8092,7 +8117,7 @@ async function _scan(params, { skipPrompts = false } = {}) {
|
|
|
8092
8117
|
if (!srcPath || !reportPath) {
|
|
8093
8118
|
throw new Error("src path and reportPath is required");
|
|
8094
8119
|
}
|
|
8095
|
-
const uploadReportSpinner2 =
|
|
8120
|
+
const uploadReportSpinner2 = createSpinner5("\u{1F4C1} Uploading Report").start();
|
|
8096
8121
|
try {
|
|
8097
8122
|
await uploadFile({
|
|
8098
8123
|
file: reportPath,
|
|
@@ -8107,48 +8132,17 @@ async function _scan(params, { skipPrompts = false } = {}) {
|
|
|
8107
8132
|
uploadReportSpinner2.success({
|
|
8108
8133
|
text: "\u{1F4C1} Uploading Report successful!"
|
|
8109
8134
|
});
|
|
8110
|
-
const
|
|
8111
|
-
|
|
8112
|
-
|
|
8113
|
-
|
|
8114
|
-
|
|
8115
|
-
try {
|
|
8116
|
-
const { vulnerabilityReportId } = await gqlClient.digestVulnerabilityReport({
|
|
8117
|
-
fixReportId: reportUploadInfo.fixReportId,
|
|
8118
|
-
projectId,
|
|
8119
|
-
scanSource: _getScanSource(command)
|
|
8120
|
-
});
|
|
8121
|
-
try {
|
|
8122
|
-
await gqlClient.subscribeToAnalysis({
|
|
8123
|
-
subscribeToAnalysisParams: {
|
|
8124
|
-
analysisId: reportUploadInfo.fixReportId
|
|
8125
|
-
},
|
|
8126
|
-
callback: () => digestSpinner.update({
|
|
8127
|
-
text: progressMassages.processingVulnerabilityReportSuccess
|
|
8128
|
-
}),
|
|
8129
|
-
callbackStates: [
|
|
8130
|
-
"Digested" /* Digested */,
|
|
8131
|
-
"Finished" /* Finished */
|
|
8132
|
-
],
|
|
8133
|
-
timeoutInMs: VUL_REPORT_DIGEST_TIMEOUT_MS
|
|
8134
|
-
});
|
|
8135
|
-
} catch (e) {
|
|
8136
|
-
throw new Error(progressMassages.processingVulnerabilityReportFailed);
|
|
8137
|
-
}
|
|
8138
|
-
vulnFiles = await gqlClient.getVulnerabilityReportPaths(
|
|
8139
|
-
vulnerabilityReportId
|
|
8140
|
-
);
|
|
8141
|
-
} catch (e) {
|
|
8142
|
-
digestSpinner.error({ text: "\u{1F575}\uFE0F\u200D\u2642\uFE0F Digesting report failed" });
|
|
8143
|
-
throw e;
|
|
8144
|
-
}
|
|
8145
|
-
digestSpinner.success({
|
|
8146
|
-
text: progressMassages.processingVulnerabilityReportSuccess
|
|
8135
|
+
const vulnFiles = await _digestReport({
|
|
8136
|
+
gqlClient,
|
|
8137
|
+
fixReportId: reportUploadInfo.fixReportId,
|
|
8138
|
+
projectId,
|
|
8139
|
+
command
|
|
8147
8140
|
});
|
|
8148
|
-
const
|
|
8141
|
+
const gitInfo = await getGitInfo(srcPath);
|
|
8142
|
+
const zippingSpinner = createSpinner5("\u{1F4E6} Zipping repo").start();
|
|
8149
8143
|
const zipBuffer = await pack(srcPath, vulnFiles);
|
|
8150
8144
|
zippingSpinner.success({ text: "\u{1F4E6} Zipping repo successful!" });
|
|
8151
|
-
const uploadRepoSpinner =
|
|
8145
|
+
const uploadRepoSpinner = createSpinner5("\u{1F4C1} Uploading Repo").start();
|
|
8152
8146
|
try {
|
|
8153
8147
|
await uploadFile({
|
|
8154
8148
|
file: zipBuffer,
|
|
@@ -8161,7 +8155,7 @@ async function _scan(params, { skipPrompts = false } = {}) {
|
|
|
8161
8155
|
throw e;
|
|
8162
8156
|
}
|
|
8163
8157
|
uploadRepoSpinner.success({ text: "\u{1F4C1} Uploading Repo successful!" });
|
|
8164
|
-
const mobbSpinner2 =
|
|
8158
|
+
const mobbSpinner2 = createSpinner5("\u{1F575}\uFE0F\u200D\u2642\uFE0F Initiating Mobb analysis").start();
|
|
8165
8159
|
try {
|
|
8166
8160
|
await sendReport({
|
|
8167
8161
|
gqlClient,
|
|
@@ -8218,13 +8212,61 @@ async function _scan(params, { skipPrompts = false } = {}) {
|
|
|
8218
8212
|
await handleAutoPr({
|
|
8219
8213
|
gqlClient,
|
|
8220
8214
|
analysisId: reportUploadInfo.fixReportId,
|
|
8221
|
-
createSpinner:
|
|
8215
|
+
createSpinner: createSpinner5
|
|
8222
8216
|
});
|
|
8223
8217
|
}
|
|
8224
8218
|
await askToOpenAnalysis();
|
|
8225
8219
|
return reportUploadInfo.fixReportId;
|
|
8226
8220
|
}
|
|
8227
8221
|
}
|
|
8222
|
+
async function _digestReport({
|
|
8223
|
+
gqlClient,
|
|
8224
|
+
fixReportId,
|
|
8225
|
+
projectId,
|
|
8226
|
+
command
|
|
8227
|
+
}) {
|
|
8228
|
+
const digestSpinner = createSpinner4(
|
|
8229
|
+
progressMassages.processingVulnerabilityReport
|
|
8230
|
+
).start();
|
|
8231
|
+
try {
|
|
8232
|
+
const { vulnerabilityReportId } = await gqlClient.digestVulnerabilityReport(
|
|
8233
|
+
{
|
|
8234
|
+
fixReportId,
|
|
8235
|
+
projectId,
|
|
8236
|
+
scanSource: _getScanSource(command)
|
|
8237
|
+
}
|
|
8238
|
+
);
|
|
8239
|
+
try {
|
|
8240
|
+
await gqlClient.subscribeToAnalysis({
|
|
8241
|
+
subscribeToAnalysisParams: {
|
|
8242
|
+
analysisId: fixReportId
|
|
8243
|
+
},
|
|
8244
|
+
callback: () => digestSpinner.update({
|
|
8245
|
+
text: progressMassages.processingVulnerabilityReportSuccess
|
|
8246
|
+
}),
|
|
8247
|
+
callbackStates: [
|
|
8248
|
+
"Digested" /* Digested */,
|
|
8249
|
+
"Finished" /* Finished */
|
|
8250
|
+
],
|
|
8251
|
+
timeoutInMs: VUL_REPORT_DIGEST_TIMEOUT_MS
|
|
8252
|
+
});
|
|
8253
|
+
} catch (e) {
|
|
8254
|
+
throw new Error(progressMassages.processingVulnerabilityReportFailed);
|
|
8255
|
+
}
|
|
8256
|
+
const vulnFiles = await gqlClient.getVulnerabilityReportPaths(
|
|
8257
|
+
vulnerabilityReportId
|
|
8258
|
+
);
|
|
8259
|
+
digestSpinner.success({
|
|
8260
|
+
text: progressMassages.processingVulnerabilityReportSuccess
|
|
8261
|
+
});
|
|
8262
|
+
return vulnFiles;
|
|
8263
|
+
} catch (e) {
|
|
8264
|
+
digestSpinner.error({
|
|
8265
|
+
text: "\u{1F575}\uFE0F\u200D\u2642\uFE0F Digesting report failed. Please verify that the file provided is of a valid supported report format."
|
|
8266
|
+
});
|
|
8267
|
+
throw e;
|
|
8268
|
+
}
|
|
8269
|
+
}
|
|
8228
8270
|
|
|
8229
8271
|
// src/commands/index.ts
|
|
8230
8272
|
import chalk5 from "chalk";
|
|
@@ -8364,19 +8406,19 @@ async function handleMobbLogin({
|
|
|
8364
8406
|
apiKey,
|
|
8365
8407
|
skipPrompts
|
|
8366
8408
|
}) {
|
|
8367
|
-
const { createSpinner:
|
|
8409
|
+
const { createSpinner: createSpinner5 } = Spinner({ ci: skipPrompts });
|
|
8368
8410
|
if (await inGqlClient.verifyToken()) {
|
|
8369
|
-
|
|
8411
|
+
createSpinner5().start().success({
|
|
8370
8412
|
text: "\u{1F513} Logged in to Mobb successfully"
|
|
8371
8413
|
});
|
|
8372
8414
|
return inGqlClient;
|
|
8373
8415
|
} else if (apiKey) {
|
|
8374
|
-
|
|
8416
|
+
createSpinner5().start().error({
|
|
8375
8417
|
text: "\u{1F513} Logged in to Mobb failed - check your api-key"
|
|
8376
8418
|
});
|
|
8377
8419
|
throw new CliError();
|
|
8378
8420
|
}
|
|
8379
|
-
const loginSpinner =
|
|
8421
|
+
const loginSpinner = createSpinner5().start();
|
|
8380
8422
|
if (!skipPrompts) {
|
|
8381
8423
|
loginSpinner.update({ text: MOBB_LOGIN_REQUIRED_MSG });
|
|
8382
8424
|
await keypress();
|