mobbdev 1.0.82 → 1.0.84

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.mjs +154 -85
  2. package/package.json +1 -2
package/dist/index.mjs CHANGED
@@ -761,12 +761,15 @@ var UploadS3BucketInfoDocument = `
761
761
  }
762
762
  `;
763
763
  var DigestVulnerabilityReportDocument = `
764
- mutation DigestVulnerabilityReport($vulnerabilityReportFileName: String!, $fixReportId: String!, $projectId: String!, $scanSource: String!) {
764
+ mutation DigestVulnerabilityReport($vulnerabilityReportFileName: String, $fixReportId: String!, $projectId: String!, $scanSource: String!, $repoUrl: String, $reference: String, $sha: String) {
765
765
  digestVulnerabilityReport(
766
766
  fixReportId: $fixReportId
767
767
  vulnerabilityReportFileName: $vulnerabilityReportFileName
768
768
  projectId: $projectId
769
769
  scanSource: $scanSource
770
+ repoUrl: $repoUrl
771
+ reference: $reference
772
+ sha: $sha
770
773
  ) {
771
774
  __typename
772
775
  ... on VulnerabilityReport {
@@ -1188,6 +1191,10 @@ var ValidCategoriesZ = z4.union([
1188
1191
  z4.literal(CATEGORY.FalsePositive),
1189
1192
  z4.literal(CATEGORY.Fixable)
1190
1193
  ]);
1194
+ var VulnerabilityReportIssueSharedStateZ = z4.object({
1195
+ id: z4.string().uuid(),
1196
+ isArchived: z4.boolean()
1197
+ }).nullish();
1191
1198
  var BaseIssuePartsZ = z4.object({
1192
1199
  id: z4.string().uuid(),
1193
1200
  safeIssueType: z4.string(),
@@ -1239,7 +1246,8 @@ var BaseIssuePartsZ = z4.object({
1239
1246
  const codeDiff = await fetch(url).then((res) => res.text());
1240
1247
  return { codeDiff };
1241
1248
  })
1242
- }).nullish()
1249
+ }).nullish(),
1250
+ sharedState: VulnerabilityReportIssueSharedStateZ
1243
1251
  });
1244
1252
  var FalsePositivePartsZ = z4.object({
1245
1253
  extraContext: z4.array(z4.object({ key: z4.string(), value: z4.string() })),
@@ -1695,7 +1703,8 @@ var VulnerabilityReportIssueZ = z7.object({
1695
1703
  z7.object({
1696
1704
  vulnerability_report_issue_tag_value: z7.string()
1697
1705
  })
1698
- )
1706
+ ),
1707
+ sharedState: VulnerabilityReportIssueSharedStateZ
1699
1708
  });
1700
1709
  var GetReportIssuesQueryZ = z7.object({
1701
1710
  fixReport: z7.object({
@@ -5664,6 +5673,7 @@ var BitbucketSCMLib = class extends SCMLib {
5664
5673
  // src/features/analysis/scm/constants.ts
5665
5674
  var MOBB_ICON_IMG = "https://app.mobb.ai/gh-action/Logo_Rounded_Icon.svg";
5666
5675
  var MAX_BRANCHES_FETCH = 1e3;
5676
+ var REPORT_DEFAULT_FILE_NAME = "report.json";
5667
5677
 
5668
5678
  // src/features/analysis/scm/github/GithubSCMLib.ts
5669
5679
  import { z as z21 } from "zod";
@@ -7319,16 +7329,17 @@ async function convertFprToSarif(inputFilePath, outputFilePath, codePathPatterns
7319
7329
  "results": [
7320
7330
  `
7321
7331
  );
7322
- vulnerabilityParser.getVulnerabilities().map(
7332
+ const filteredVulns = vulnerabilityParser.getVulnerabilities().map(
7323
7333
  (vulnerability) => fortifyVulnerabilityToSarifResult(
7324
7334
  vulnerability,
7325
7335
  auditMetadataParser,
7326
7336
  reportMetadataParser,
7327
7337
  unifiedNodePoolParser
7328
7338
  )
7329
- ).filter((sarifResult) => filterSarifResult(sarifResult, codePathPatterns)).forEach((sarifResult, index) => {
7339
+ ).filter((sarifResult) => filterSarifResult(sarifResult, codePathPatterns));
7340
+ filteredVulns.forEach((sarifResult, index) => {
7330
7341
  fs3.appendFileSync(outputFilePath, JSON.stringify(sarifResult, null, 2));
7331
- if (index !== vulnerabilityParser.getVulnerabilities().length - 1) {
7342
+ if (index !== filteredVulns.length - 1) {
7332
7343
  fs3.appendFileSync(outputFilePath, ",\n");
7333
7344
  }
7334
7345
  });
@@ -8663,7 +8674,7 @@ var GQLClient = class {
8663
8674
  }
8664
8675
  async uploadS3BucketInfo() {
8665
8676
  const uploadS3BucketInfoResult = await this._clientSdk.uploadS3BucketInfo({
8666
- fileName: "report.json"
8677
+ fileName: REPORT_DEFAULT_FILE_NAME
8667
8678
  });
8668
8679
  return uploadS3BucketInfoResult;
8669
8680
  }
@@ -8720,13 +8731,20 @@ var GQLClient = class {
8720
8731
  async digestVulnerabilityReport({
8721
8732
  fixReportId,
8722
8733
  projectId,
8723
- scanSource
8734
+ scanSource,
8735
+ repoUrl,
8736
+ reference,
8737
+ sha,
8738
+ shouldScan
8724
8739
  }) {
8725
8740
  const res = await this._clientSdk.DigestVulnerabilityReport({
8726
8741
  fixReportId,
8727
- vulnerabilityReportFileName: "report.json",
8742
+ vulnerabilityReportFileName: shouldScan ? void 0 : REPORT_DEFAULT_FILE_NAME,
8728
8743
  projectId,
8729
- scanSource
8744
+ scanSource,
8745
+ repoUrl,
8746
+ reference,
8747
+ sha
8730
8748
  });
8731
8749
  if (res.digestVulnerabilityReport.__typename !== "VulnerabilityReport") {
8732
8750
  throw new Error("Digesting vulnerability report failed");
@@ -8882,7 +8900,7 @@ function endsWithAny(str, suffixes) {
8882
8900
  function _get_manifest_files_suffixes() {
8883
8901
  return ["package.json", "pom.xml"];
8884
8902
  }
8885
- async function pack(srcDirPath, vulnFiles) {
8903
+ async function pack(srcDirPath, vulnFiles, isIncludeAllFiles = false) {
8886
8904
  debug13("pack folder %s", srcDirPath);
8887
8905
  let git = void 0;
8888
8906
  try {
@@ -8919,13 +8937,15 @@ async function pack(srcDirPath, vulnFiles) {
8919
8937
  debug13("compressing files");
8920
8938
  for (const filepath of filepaths) {
8921
8939
  const absFilepath = path5.join(srcDirPath, filepath.toString());
8922
- vulnFiles = vulnFiles.concat(_get_manifest_files_suffixes());
8923
- if (!endsWithAny(
8924
- absFilepath.toString().replaceAll(path5.win32.sep, path5.posix.sep),
8925
- vulnFiles
8926
- )) {
8927
- debug13("ignoring %s because it is not a vulnerability file", filepath);
8928
- continue;
8940
+ if (!isIncludeAllFiles) {
8941
+ vulnFiles = vulnFiles.concat(_get_manifest_files_suffixes());
8942
+ if (!endsWithAny(
8943
+ absFilepath.toString().replaceAll(path5.win32.sep, path5.posix.sep),
8944
+ vulnFiles
8945
+ )) {
8946
+ debug13("ignoring %s because it is not a vulnerability file", filepath);
8947
+ continue;
8948
+ }
8929
8949
  }
8930
8950
  if (fs5.lstatSync(absFilepath).size > MAX_FILE_SIZE) {
8931
8951
  debug13("ignoring %s because the size is > 5MB", filepath);
@@ -9036,19 +9056,18 @@ var cxOperatingSystemSupportMessage = `Your operating system does not support ch
9036
9056
  import cp from "node:child_process";
9037
9057
  import Debug14 from "debug";
9038
9058
  import * as process2 from "process";
9039
- import supportsColor from "supports-color";
9040
- var { stdout: stdout2 } = supportsColor;
9041
9059
  function createFork({ args, processPath, name }, options) {
9042
9060
  const child = cp.fork(processPath, args, {
9043
9061
  stdio: ["inherit", "pipe", "pipe", "ipc"],
9044
- env: { ...process2.env, FORCE_COLOR: stdout2 ? "1" : "0" }
9062
+ env: { ...process2.env }
9045
9063
  });
9046
9064
  return createChildProcess({ childProcess: child, name }, options);
9047
9065
  }
9048
- function createSpwan({ args, processPath, name }, options) {
9066
+ function createSpawn({ args, processPath, name, cwd }, options) {
9049
9067
  const child = cp.spawn(processPath, args, {
9050
9068
  stdio: ["inherit", "pipe", "pipe", "ipc"],
9051
- env: { ...process2.env, FORCE_COLOR: stdout2 ? "1" : "0" }
9069
+ env: { ...process2.env },
9070
+ cwd
9052
9071
  });
9053
9072
  return createChildProcess({ childProcess: child, name }, options);
9054
9073
  }
@@ -9131,7 +9150,7 @@ function validateCheckmarxInstallation() {
9131
9150
  }
9132
9151
  async function forkCheckmarx(args, { display }) {
9133
9152
  debug14("fork checkmarx with args %o %s", args.join(" "), display);
9134
- return createSpwan(
9153
+ return createSpawn(
9135
9154
  { args, processPath: getCheckmarxPath(), name: "checkmarx" },
9136
9155
  { display }
9137
9156
  );
@@ -9468,7 +9487,7 @@ async function getReport(params, { skipPrompts }) {
9468
9487
  authHeaders: scm.getAuthHeaders(),
9469
9488
  downloadUrl
9470
9489
  });
9471
- const reportPath = path7.join(dirname, "report.json");
9490
+ const reportPath = path7.join(dirname, REPORT_DEFAULT_FILE_NAME);
9472
9491
  switch (scanner) {
9473
9492
  case "snyk":
9474
9493
  await getSnykReport(reportPath, repositoryRoot, { skipPrompts });
@@ -9600,17 +9619,17 @@ async function _scan(params, { skipPrompts = false } = {}) {
9600
9619
  { skipPrompts }
9601
9620
  );
9602
9621
  }
9603
- if (!reportPath) {
9604
- throw new Error("reportPath is null");
9605
- }
9622
+ const shouldScan = !reportPath;
9606
9623
  const uploadReportSpinner = createSpinner5("\u{1F4C1} Uploading Report").start();
9607
9624
  try {
9608
- await uploadFile({
9609
- file: reportPath,
9610
- url: reportUploadInfo.url,
9611
- uploadFields: JSON.parse(reportUploadInfo.uploadFieldsJSON),
9612
- uploadKey: reportUploadInfo.uploadKey
9613
- });
9625
+ if (reportPath) {
9626
+ await uploadFile({
9627
+ file: reportPath,
9628
+ url: reportUploadInfo.url,
9629
+ uploadFields: JSON.parse(reportUploadInfo.uploadFieldsJSON),
9630
+ uploadKey: reportUploadInfo.uploadKey
9631
+ });
9632
+ }
9614
9633
  } catch (e) {
9615
9634
  uploadReportSpinner.error({ text: "\u{1F4C1} Report upload failed" });
9616
9635
  throw e;
@@ -9620,7 +9639,11 @@ async function _scan(params, { skipPrompts = false } = {}) {
9620
9639
  fixReportId: reportUploadInfo.fixReportId,
9621
9640
  projectId,
9622
9641
  command,
9623
- ci
9642
+ ci,
9643
+ repoUrl: repo,
9644
+ sha,
9645
+ reference,
9646
+ shouldScan
9624
9647
  });
9625
9648
  uploadReportSpinner.success({ text: "\u{1F4C1} Report uploaded successfully" });
9626
9649
  const mobbSpinner = createSpinner5("\u{1F575}\uFE0F\u200D\u2642\uFE0F Initiating Mobb analysis").start();
@@ -9632,7 +9655,7 @@ async function _scan(params, { skipPrompts = false } = {}) {
9632
9655
  repoUrl: z29.string().parse(repo),
9633
9656
  reference,
9634
9657
  projectId,
9635
- vulnerabilityReportFileName: "report.json",
9658
+ vulnerabilityReportFileName: shouldScan ? void 0 : REPORT_DEFAULT_FILE_NAME,
9636
9659
  sha,
9637
9660
  experimentalEnabled: !!experimentalEnabled,
9638
9661
  pullRequest: params.pullRequest,
@@ -9729,55 +9752,61 @@ async function _scan(params, { skipPrompts = false } = {}) {
9729
9752
  if (!repoUploadInfo || !reportUploadInfo) {
9730
9753
  throw new Error("uploadS3BucketInfo is null");
9731
9754
  }
9732
- if (!srcPath || !reportPath) {
9733
- throw new Error("src path and reportPath is required");
9755
+ if (!srcPath) {
9756
+ throw new Error("src path is required");
9734
9757
  }
9735
- const uploadReportSpinner2 = createSpinner5("\u{1F4C1} Uploading Report").start();
9736
- try {
9737
- await uploadFile({
9738
- file: reportPath,
9739
- url: reportUploadInfo.url,
9740
- uploadFields: JSON.parse(reportUploadInfo.uploadFieldsJSON),
9741
- uploadKey: reportUploadInfo.uploadKey
9758
+ const shouldScan2 = !reportPath;
9759
+ if (reportPath) {
9760
+ const uploadReportSpinner2 = createSpinner5("\u{1F4C1} Uploading Report").start();
9761
+ try {
9762
+ await uploadFile({
9763
+ file: reportPath,
9764
+ url: reportUploadInfo.url,
9765
+ uploadFields: JSON.parse(reportUploadInfo.uploadFieldsJSON),
9766
+ uploadKey: reportUploadInfo.uploadKey
9767
+ });
9768
+ } catch (e) {
9769
+ uploadReportSpinner2.error({ text: "\u{1F4C1} Report upload failed" });
9770
+ throw e;
9771
+ }
9772
+ uploadReportSpinner2.success({
9773
+ text: "\u{1F4C1} Uploading Report successful!"
9742
9774
  });
9743
- } catch (e) {
9744
- uploadReportSpinner2.error({ text: "\u{1F4C1} Report upload failed" });
9745
- throw e;
9746
9775
  }
9747
- uploadReportSpinner2.success({
9748
- text: "\u{1F4C1} Uploading Report successful!"
9749
- });
9750
- const vulnFiles = await _digestReport({
9751
- gqlClient,
9752
- fixReportId: reportUploadInfo.fixReportId,
9753
- projectId,
9754
- command,
9755
- ci
9756
- });
9757
- const srcFileStatus = await fsPromises.lstat(srcPath);
9758
- const zippingSpinner = createSpinner5("\u{1F4E6} Zipping repo").start();
9759
- let zipBuffer;
9760
9776
  let gitInfo = { success: false };
9761
- if (srcFileStatus.isFile() && path7.extname(srcPath).toLowerCase() === ".fpr") {
9762
- zipBuffer = await repackFpr(srcPath);
9777
+ if (reportPath) {
9778
+ const vulnFiles = await _digestReport({
9779
+ gqlClient,
9780
+ fixReportId: reportUploadInfo.fixReportId,
9781
+ projectId,
9782
+ command,
9783
+ ci,
9784
+ shouldScan: shouldScan2
9785
+ });
9786
+ const res = await _zipAndUploadRepo({
9787
+ srcPath,
9788
+ vulnFiles,
9789
+ repoUploadInfo,
9790
+ isIncludeAllFiles: false
9791
+ });
9792
+ gitInfo = res.gitInfo;
9763
9793
  } else {
9764
- gitInfo = await getGitInfo(srcPath);
9765
- zipBuffer = await pack(srcPath, vulnFiles);
9766
- }
9767
- zippingSpinner.success({ text: "\u{1F4E6} Zipping repo successful!" });
9768
- const uploadRepoSpinner = createSpinner5("\u{1F4C1} Uploading Repo").start();
9769
- try {
9770
- await uploadFile({
9771
- file: zipBuffer,
9772
- url: repoUploadInfo.url,
9773
- uploadFields: JSON.parse(repoUploadInfo.uploadFieldsJSON),
9774
- uploadKey: repoUploadInfo.uploadKey
9794
+ const res = await _zipAndUploadRepo({
9795
+ srcPath,
9796
+ vulnFiles: [],
9797
+ repoUploadInfo,
9798
+ isIncludeAllFiles: true
9799
+ });
9800
+ gitInfo = res.gitInfo;
9801
+ await _digestReport({
9802
+ gqlClient,
9803
+ fixReportId: reportUploadInfo.fixReportId,
9804
+ projectId,
9805
+ command,
9806
+ ci,
9807
+ shouldScan: shouldScan2
9775
9808
  });
9776
- } catch (e) {
9777
- uploadRepoSpinner.error({ text: "\u{1F4C1} Repo upload failed" });
9778
- throw e;
9779
9809
  }
9780
- uploadRepoSpinner.success({ text: "\u{1F4C1} Uploading Repo successful!" });
9781
9810
  const mobbSpinner2 = createSpinner5("\u{1F575}\uFE0F\u200D\u2642\uFE0F Initiating Mobb analysis").start();
9782
9811
  try {
9783
9812
  await sendReport({
@@ -9824,12 +9853,48 @@ async function _scan(params, { skipPrompts = false } = {}) {
9824
9853
  return reportUploadInfo.fixReportId;
9825
9854
  }
9826
9855
  }
9856
+ async function _zipAndUploadRepo({
9857
+ srcPath,
9858
+ vulnFiles,
9859
+ repoUploadInfo,
9860
+ isIncludeAllFiles
9861
+ }) {
9862
+ const srcFileStatus = await fsPromises.lstat(srcPath);
9863
+ const zippingSpinner = createSpinner4("\u{1F4E6} Zipping repo").start();
9864
+ let zipBuffer;
9865
+ let gitInfo = { success: false };
9866
+ if (srcFileStatus.isFile() && path7.extname(srcPath).toLowerCase() === ".fpr") {
9867
+ zipBuffer = await repackFpr(srcPath);
9868
+ } else {
9869
+ gitInfo = await getGitInfo(srcPath);
9870
+ zipBuffer = await pack(srcPath, vulnFiles, isIncludeAllFiles);
9871
+ }
9872
+ zippingSpinner.success({ text: "\u{1F4E6} Zipping repo successful!" });
9873
+ const uploadRepoSpinner = createSpinner4("\u{1F4C1} Uploading Repo").start();
9874
+ try {
9875
+ await uploadFile({
9876
+ file: zipBuffer,
9877
+ url: repoUploadInfo.url,
9878
+ uploadFields: JSON.parse(repoUploadInfo.uploadFieldsJSON),
9879
+ uploadKey: repoUploadInfo.uploadKey
9880
+ });
9881
+ } catch (e) {
9882
+ uploadRepoSpinner.error({ text: "\u{1F4C1} Repo upload failed" });
9883
+ throw e;
9884
+ }
9885
+ uploadRepoSpinner.success({ text: "\u{1F4C1} Uploading Repo successful!" });
9886
+ return { gitInfo };
9887
+ }
9827
9888
  async function _digestReport({
9828
9889
  gqlClient,
9829
9890
  fixReportId,
9830
9891
  projectId,
9831
9892
  command,
9832
- ci
9893
+ ci,
9894
+ repoUrl,
9895
+ sha,
9896
+ reference,
9897
+ shouldScan
9833
9898
  }) {
9834
9899
  const digestSpinner = createSpinner4(
9835
9900
  progressMassages.processingVulnerabilityReport
@@ -9839,7 +9904,11 @@ async function _digestReport({
9839
9904
  {
9840
9905
  fixReportId,
9841
9906
  projectId,
9842
- scanSource: _getScanSource(command, ci)
9907
+ scanSource: _getScanSource(command, ci),
9908
+ repoUrl,
9909
+ sha,
9910
+ reference,
9911
+ shouldScan
9843
9912
  }
9844
9913
  );
9845
9914
  try {
@@ -10202,7 +10271,6 @@ ${chalk7.bold(
10202
10271
  function analyzeBuilder(yargs2) {
10203
10272
  return yargs2.option("f", {
10204
10273
  alias: "scan-file",
10205
- demandOption: true,
10206
10274
  type: "string",
10207
10275
  describe: chalk8.bold(
10208
10276
  "Select the vulnerability report to analyze (Checkmarx, Snyk, Fortify, CodeQL, Sonarqube, Semgrep, Datadog)"
@@ -10228,8 +10296,7 @@ function analyzeBuilder(yargs2) {
10228
10296
  ).help();
10229
10297
  }
10230
10298
  function validateAnalyzeOptions(argv) {
10231
- console.log("argv", argv);
10232
- if (!fs7.existsSync(argv.f)) {
10299
+ if (argv.f && !fs7.existsSync(argv.f)) {
10233
10300
  throw new CliError(`
10234
10301
  Can't access ${chalk8.bold(argv.f)}`);
10235
10302
  }
@@ -10263,7 +10330,9 @@ Can't access ${chalk8.bold(argv.f)}`);
10263
10330
  "--pull-request flag requires --commit-directly to be provided as well"
10264
10331
  );
10265
10332
  }
10266
- validateReportFileFormat(argv.f);
10333
+ if (argv.f) {
10334
+ validateReportFileFormat(argv.f);
10335
+ }
10267
10336
  }
10268
10337
  async function analyzeHandler(args) {
10269
10338
  validateAnalyzeOptions(args);
@@ -10406,7 +10475,7 @@ var parseArgs = async (args) => {
10406
10475
  ).command(
10407
10476
  mobbCliCommand.analyze,
10408
10477
  chalk10.bold(
10409
- "Provide a vulnerability report and relevant code repository, get automated fixes right away."
10478
+ "Provide a code repository, get automated fixes right away. You can also provide a vulnerability report to analyze or have Mobb scan the code for you."
10410
10479
  ),
10411
10480
  analyzeBuilder,
10412
10481
  analyzeHandler
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "mobbdev",
3
- "version": "1.0.82",
3
+ "version": "1.0.84",
4
4
  "description": "Automated secure code remediation tool",
5
5
  "repository": "git+https://github.com/mobb-dev/bugsy.git",
6
6
  "main": "dist/index.js",
@@ -71,7 +71,6 @@
71
71
  "semver": "7.7.1",
72
72
  "simple-git": "3.27.0",
73
73
  "snyk": "1.1296.2",
74
- "supports-color": "10.0.0",
75
74
  "tar": "6.2.1",
76
75
  "tmp": "0.2.3",
77
76
  "undici": "6.21.1",