mobbdev 1.0.83 → 1.0.85

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.mjs +153 -85
  2. package/package.json +1 -2
package/dist/index.mjs CHANGED
@@ -761,12 +761,15 @@ var UploadS3BucketInfoDocument = `
761
761
  }
762
762
  `;
763
763
  var DigestVulnerabilityReportDocument = `
764
- mutation DigestVulnerabilityReport($vulnerabilityReportFileName: String!, $fixReportId: String!, $projectId: String!, $scanSource: String!) {
764
+ mutation DigestVulnerabilityReport($vulnerabilityReportFileName: String, $fixReportId: String!, $projectId: String!, $scanSource: String!, $repoUrl: String, $reference: String, $sha: String) {
765
765
  digestVulnerabilityReport(
766
766
  fixReportId: $fixReportId
767
767
  vulnerabilityReportFileName: $vulnerabilityReportFileName
768
768
  projectId: $projectId
769
769
  scanSource: $scanSource
770
+ repoUrl: $repoUrl
771
+ reference: $reference
772
+ sha: $sha
770
773
  ) {
771
774
  __typename
772
775
  ... on VulnerabilityReport {
@@ -1188,6 +1191,10 @@ var ValidCategoriesZ = z4.union([
1188
1191
  z4.literal(CATEGORY.FalsePositive),
1189
1192
  z4.literal(CATEGORY.Fixable)
1190
1193
  ]);
1194
+ var VulnerabilityReportIssueSharedStateZ = z4.object({
1195
+ id: z4.string().uuid(),
1196
+ isArchived: z4.boolean()
1197
+ }).nullish();
1191
1198
  var BaseIssuePartsZ = z4.object({
1192
1199
  id: z4.string().uuid(),
1193
1200
  safeIssueType: z4.string(),
@@ -1239,7 +1246,8 @@ var BaseIssuePartsZ = z4.object({
1239
1246
  const codeDiff = await fetch(url).then((res) => res.text());
1240
1247
  return { codeDiff };
1241
1248
  })
1242
- }).nullish()
1249
+ }).nullish(),
1250
+ sharedState: VulnerabilityReportIssueSharedStateZ
1243
1251
  });
1244
1252
  var FalsePositivePartsZ = z4.object({
1245
1253
  extraContext: z4.array(z4.object({ key: z4.string(), value: z4.string() })),
@@ -1695,7 +1703,8 @@ var VulnerabilityReportIssueZ = z7.object({
1695
1703
  z7.object({
1696
1704
  vulnerability_report_issue_tag_value: z7.string()
1697
1705
  })
1698
- )
1706
+ ),
1707
+ sharedState: VulnerabilityReportIssueSharedStateZ
1699
1708
  });
1700
1709
  var GetReportIssuesQueryZ = z7.object({
1701
1710
  fixReport: z7.object({
@@ -5664,6 +5673,7 @@ var BitbucketSCMLib = class extends SCMLib {
5664
5673
  // src/features/analysis/scm/constants.ts
5665
5674
  var MOBB_ICON_IMG = "https://app.mobb.ai/gh-action/Logo_Rounded_Icon.svg";
5666
5675
  var MAX_BRANCHES_FETCH = 1e3;
5676
+ var REPORT_DEFAULT_FILE_NAME = "report.json";
5667
5677
 
5668
5678
  // src/features/analysis/scm/github/GithubSCMLib.ts
5669
5679
  import { z as z21 } from "zod";
@@ -8621,9 +8631,6 @@ var GQLClient = class {
8621
8631
  }
8622
8632
  const { organization: org } = organizationToOrganizationRole;
8623
8633
  const project = projectName ? org?.projects.find((project2) => project2.name === projectName) ?? null : org?.projects[0];
8624
- if (!project?.id) {
8625
- throw new Error("Project not found");
8626
- }
8627
8634
  let projectId = project?.id;
8628
8635
  if (!projectId) {
8629
8636
  const createdProject = await this._clientSdk.CreateProject({
@@ -8632,6 +8639,9 @@ var GQLClient = class {
8632
8639
  });
8633
8640
  projectId = createdProject.createProject.projectId;
8634
8641
  }
8642
+ if (!project?.id) {
8643
+ throw new Error("Project not found");
8644
+ }
8635
8645
  return {
8636
8646
  organizationId: org.id,
8637
8647
  projectId
@@ -8664,7 +8674,7 @@ var GQLClient = class {
8664
8674
  }
8665
8675
  async uploadS3BucketInfo() {
8666
8676
  const uploadS3BucketInfoResult = await this._clientSdk.uploadS3BucketInfo({
8667
- fileName: "report.json"
8677
+ fileName: REPORT_DEFAULT_FILE_NAME
8668
8678
  });
8669
8679
  return uploadS3BucketInfoResult;
8670
8680
  }
@@ -8721,13 +8731,20 @@ var GQLClient = class {
8721
8731
  async digestVulnerabilityReport({
8722
8732
  fixReportId,
8723
8733
  projectId,
8724
- scanSource
8734
+ scanSource,
8735
+ repoUrl,
8736
+ reference,
8737
+ sha,
8738
+ shouldScan
8725
8739
  }) {
8726
8740
  const res = await this._clientSdk.DigestVulnerabilityReport({
8727
8741
  fixReportId,
8728
- vulnerabilityReportFileName: "report.json",
8742
+ vulnerabilityReportFileName: shouldScan ? void 0 : REPORT_DEFAULT_FILE_NAME,
8729
8743
  projectId,
8730
- scanSource
8744
+ scanSource,
8745
+ repoUrl,
8746
+ reference,
8747
+ sha
8731
8748
  });
8732
8749
  if (res.digestVulnerabilityReport.__typename !== "VulnerabilityReport") {
8733
8750
  throw new Error("Digesting vulnerability report failed");
@@ -8883,7 +8900,7 @@ function endsWithAny(str, suffixes) {
8883
8900
  function _get_manifest_files_suffixes() {
8884
8901
  return ["package.json", "pom.xml"];
8885
8902
  }
8886
- async function pack(srcDirPath, vulnFiles) {
8903
+ async function pack(srcDirPath, vulnFiles, isIncludeAllFiles = false) {
8887
8904
  debug13("pack folder %s", srcDirPath);
8888
8905
  let git = void 0;
8889
8906
  try {
@@ -8920,13 +8937,15 @@ async function pack(srcDirPath, vulnFiles) {
8920
8937
  debug13("compressing files");
8921
8938
  for (const filepath of filepaths) {
8922
8939
  const absFilepath = path5.join(srcDirPath, filepath.toString());
8923
- vulnFiles = vulnFiles.concat(_get_manifest_files_suffixes());
8924
- if (!endsWithAny(
8925
- absFilepath.toString().replaceAll(path5.win32.sep, path5.posix.sep),
8926
- vulnFiles
8927
- )) {
8928
- debug13("ignoring %s because it is not a vulnerability file", filepath);
8929
- continue;
8940
+ if (!isIncludeAllFiles) {
8941
+ vulnFiles = vulnFiles.concat(_get_manifest_files_suffixes());
8942
+ if (!endsWithAny(
8943
+ absFilepath.toString().replaceAll(path5.win32.sep, path5.posix.sep),
8944
+ vulnFiles
8945
+ )) {
8946
+ debug13("ignoring %s because it is not a vulnerability file", filepath);
8947
+ continue;
8948
+ }
8930
8949
  }
8931
8950
  if (fs5.lstatSync(absFilepath).size > MAX_FILE_SIZE) {
8932
8951
  debug13("ignoring %s because the size is > 5MB", filepath);
@@ -9037,19 +9056,18 @@ var cxOperatingSystemSupportMessage = `Your operating system does not support ch
9037
9056
  import cp from "node:child_process";
9038
9057
  import Debug14 from "debug";
9039
9058
  import * as process2 from "process";
9040
- import supportsColor from "supports-color";
9041
- var { stdout: stdout2 } = supportsColor;
9042
9059
  function createFork({ args, processPath, name }, options) {
9043
9060
  const child = cp.fork(processPath, args, {
9044
9061
  stdio: ["inherit", "pipe", "pipe", "ipc"],
9045
- env: { ...process2.env, FORCE_COLOR: stdout2 ? "1" : "0" }
9062
+ env: { ...process2.env }
9046
9063
  });
9047
9064
  return createChildProcess({ childProcess: child, name }, options);
9048
9065
  }
9049
- function createSpwan({ args, processPath, name }, options) {
9066
+ function createSpawn({ args, processPath, name, cwd }, options) {
9050
9067
  const child = cp.spawn(processPath, args, {
9051
9068
  stdio: ["inherit", "pipe", "pipe", "ipc"],
9052
- env: { ...process2.env, FORCE_COLOR: stdout2 ? "1" : "0" }
9069
+ env: { ...process2.env },
9070
+ cwd
9053
9071
  });
9054
9072
  return createChildProcess({ childProcess: child, name }, options);
9055
9073
  }
@@ -9132,7 +9150,7 @@ function validateCheckmarxInstallation() {
9132
9150
  }
9133
9151
  async function forkCheckmarx(args, { display }) {
9134
9152
  debug14("fork checkmarx with args %o %s", args.join(" "), display);
9135
- return createSpwan(
9153
+ return createSpawn(
9136
9154
  { args, processPath: getCheckmarxPath(), name: "checkmarx" },
9137
9155
  { display }
9138
9156
  );
@@ -9469,7 +9487,7 @@ async function getReport(params, { skipPrompts }) {
9469
9487
  authHeaders: scm.getAuthHeaders(),
9470
9488
  downloadUrl
9471
9489
  });
9472
- const reportPath = path7.join(dirname, "report.json");
9490
+ const reportPath = path7.join(dirname, REPORT_DEFAULT_FILE_NAME);
9473
9491
  switch (scanner) {
9474
9492
  case "snyk":
9475
9493
  await getSnykReport(reportPath, repositoryRoot, { skipPrompts });
@@ -9601,17 +9619,17 @@ async function _scan(params, { skipPrompts = false } = {}) {
9601
9619
  { skipPrompts }
9602
9620
  );
9603
9621
  }
9604
- if (!reportPath) {
9605
- throw new Error("reportPath is null");
9606
- }
9622
+ const shouldScan = !reportPath;
9607
9623
  const uploadReportSpinner = createSpinner5("\u{1F4C1} Uploading Report").start();
9608
9624
  try {
9609
- await uploadFile({
9610
- file: reportPath,
9611
- url: reportUploadInfo.url,
9612
- uploadFields: JSON.parse(reportUploadInfo.uploadFieldsJSON),
9613
- uploadKey: reportUploadInfo.uploadKey
9614
- });
9625
+ if (reportPath) {
9626
+ await uploadFile({
9627
+ file: reportPath,
9628
+ url: reportUploadInfo.url,
9629
+ uploadFields: JSON.parse(reportUploadInfo.uploadFieldsJSON),
9630
+ uploadKey: reportUploadInfo.uploadKey
9631
+ });
9632
+ }
9615
9633
  } catch (e) {
9616
9634
  uploadReportSpinner.error({ text: "\u{1F4C1} Report upload failed" });
9617
9635
  throw e;
@@ -9621,7 +9639,11 @@ async function _scan(params, { skipPrompts = false } = {}) {
9621
9639
  fixReportId: reportUploadInfo.fixReportId,
9622
9640
  projectId,
9623
9641
  command,
9624
- ci
9642
+ ci,
9643
+ repoUrl: repo,
9644
+ sha,
9645
+ reference,
9646
+ shouldScan
9625
9647
  });
9626
9648
  uploadReportSpinner.success({ text: "\u{1F4C1} Report uploaded successfully" });
9627
9649
  const mobbSpinner = createSpinner5("\u{1F575}\uFE0F\u200D\u2642\uFE0F Initiating Mobb analysis").start();
@@ -9633,7 +9655,7 @@ async function _scan(params, { skipPrompts = false } = {}) {
9633
9655
  repoUrl: z29.string().parse(repo),
9634
9656
  reference,
9635
9657
  projectId,
9636
- vulnerabilityReportFileName: "report.json",
9658
+ vulnerabilityReportFileName: shouldScan ? void 0 : REPORT_DEFAULT_FILE_NAME,
9637
9659
  sha,
9638
9660
  experimentalEnabled: !!experimentalEnabled,
9639
9661
  pullRequest: params.pullRequest,
@@ -9730,55 +9752,61 @@ async function _scan(params, { skipPrompts = false } = {}) {
9730
9752
  if (!repoUploadInfo || !reportUploadInfo) {
9731
9753
  throw new Error("uploadS3BucketInfo is null");
9732
9754
  }
9733
- if (!srcPath || !reportPath) {
9734
- throw new Error("src path and reportPath is required");
9755
+ if (!srcPath) {
9756
+ throw new Error("src path is required");
9735
9757
  }
9736
- const uploadReportSpinner2 = createSpinner5("\u{1F4C1} Uploading Report").start();
9737
- try {
9738
- await uploadFile({
9739
- file: reportPath,
9740
- url: reportUploadInfo.url,
9741
- uploadFields: JSON.parse(reportUploadInfo.uploadFieldsJSON),
9742
- uploadKey: reportUploadInfo.uploadKey
9758
+ const shouldScan2 = !reportPath;
9759
+ if (reportPath) {
9760
+ const uploadReportSpinner2 = createSpinner5("\u{1F4C1} Uploading Report").start();
9761
+ try {
9762
+ await uploadFile({
9763
+ file: reportPath,
9764
+ url: reportUploadInfo.url,
9765
+ uploadFields: JSON.parse(reportUploadInfo.uploadFieldsJSON),
9766
+ uploadKey: reportUploadInfo.uploadKey
9767
+ });
9768
+ } catch (e) {
9769
+ uploadReportSpinner2.error({ text: "\u{1F4C1} Report upload failed" });
9770
+ throw e;
9771
+ }
9772
+ uploadReportSpinner2.success({
9773
+ text: "\u{1F4C1} Uploading Report successful!"
9743
9774
  });
9744
- } catch (e) {
9745
- uploadReportSpinner2.error({ text: "\u{1F4C1} Report upload failed" });
9746
- throw e;
9747
9775
  }
9748
- uploadReportSpinner2.success({
9749
- text: "\u{1F4C1} Uploading Report successful!"
9750
- });
9751
- const vulnFiles = await _digestReport({
9752
- gqlClient,
9753
- fixReportId: reportUploadInfo.fixReportId,
9754
- projectId,
9755
- command,
9756
- ci
9757
- });
9758
- const srcFileStatus = await fsPromises.lstat(srcPath);
9759
- const zippingSpinner = createSpinner5("\u{1F4E6} Zipping repo").start();
9760
- let zipBuffer;
9761
9776
  let gitInfo = { success: false };
9762
- if (srcFileStatus.isFile() && path7.extname(srcPath).toLowerCase() === ".fpr") {
9763
- zipBuffer = await repackFpr(srcPath);
9777
+ if (reportPath) {
9778
+ const vulnFiles = await _digestReport({
9779
+ gqlClient,
9780
+ fixReportId: reportUploadInfo.fixReportId,
9781
+ projectId,
9782
+ command,
9783
+ ci,
9784
+ shouldScan: shouldScan2
9785
+ });
9786
+ const res = await _zipAndUploadRepo({
9787
+ srcPath,
9788
+ vulnFiles,
9789
+ repoUploadInfo,
9790
+ isIncludeAllFiles: false
9791
+ });
9792
+ gitInfo = res.gitInfo;
9764
9793
  } else {
9765
- gitInfo = await getGitInfo(srcPath);
9766
- zipBuffer = await pack(srcPath, vulnFiles);
9767
- }
9768
- zippingSpinner.success({ text: "\u{1F4E6} Zipping repo successful!" });
9769
- const uploadRepoSpinner = createSpinner5("\u{1F4C1} Uploading Repo").start();
9770
- try {
9771
- await uploadFile({
9772
- file: zipBuffer,
9773
- url: repoUploadInfo.url,
9774
- uploadFields: JSON.parse(repoUploadInfo.uploadFieldsJSON),
9775
- uploadKey: repoUploadInfo.uploadKey
9794
+ const res = await _zipAndUploadRepo({
9795
+ srcPath,
9796
+ vulnFiles: [],
9797
+ repoUploadInfo,
9798
+ isIncludeAllFiles: true
9799
+ });
9800
+ gitInfo = res.gitInfo;
9801
+ await _digestReport({
9802
+ gqlClient,
9803
+ fixReportId: reportUploadInfo.fixReportId,
9804
+ projectId,
9805
+ command,
9806
+ ci,
9807
+ shouldScan: shouldScan2
9776
9808
  });
9777
- } catch (e) {
9778
- uploadRepoSpinner.error({ text: "\u{1F4C1} Repo upload failed" });
9779
- throw e;
9780
9809
  }
9781
- uploadRepoSpinner.success({ text: "\u{1F4C1} Uploading Repo successful!" });
9782
9810
  const mobbSpinner2 = createSpinner5("\u{1F575}\uFE0F\u200D\u2642\uFE0F Initiating Mobb analysis").start();
9783
9811
  try {
9784
9812
  await sendReport({
@@ -9825,12 +9853,48 @@ async function _scan(params, { skipPrompts = false } = {}) {
9825
9853
  return reportUploadInfo.fixReportId;
9826
9854
  }
9827
9855
  }
9856
+ async function _zipAndUploadRepo({
9857
+ srcPath,
9858
+ vulnFiles,
9859
+ repoUploadInfo,
9860
+ isIncludeAllFiles
9861
+ }) {
9862
+ const srcFileStatus = await fsPromises.lstat(srcPath);
9863
+ const zippingSpinner = createSpinner4("\u{1F4E6} Zipping repo").start();
9864
+ let zipBuffer;
9865
+ let gitInfo = { success: false };
9866
+ if (srcFileStatus.isFile() && path7.extname(srcPath).toLowerCase() === ".fpr") {
9867
+ zipBuffer = await repackFpr(srcPath);
9868
+ } else {
9869
+ gitInfo = await getGitInfo(srcPath);
9870
+ zipBuffer = await pack(srcPath, vulnFiles, isIncludeAllFiles);
9871
+ }
9872
+ zippingSpinner.success({ text: "\u{1F4E6} Zipping repo successful!" });
9873
+ const uploadRepoSpinner = createSpinner4("\u{1F4C1} Uploading Repo").start();
9874
+ try {
9875
+ await uploadFile({
9876
+ file: zipBuffer,
9877
+ url: repoUploadInfo.url,
9878
+ uploadFields: JSON.parse(repoUploadInfo.uploadFieldsJSON),
9879
+ uploadKey: repoUploadInfo.uploadKey
9880
+ });
9881
+ } catch (e) {
9882
+ uploadRepoSpinner.error({ text: "\u{1F4C1} Repo upload failed" });
9883
+ throw e;
9884
+ }
9885
+ uploadRepoSpinner.success({ text: "\u{1F4C1} Uploading Repo successful!" });
9886
+ return { gitInfo };
9887
+ }
9828
9888
  async function _digestReport({
9829
9889
  gqlClient,
9830
9890
  fixReportId,
9831
9891
  projectId,
9832
9892
  command,
9833
- ci
9893
+ ci,
9894
+ repoUrl,
9895
+ sha,
9896
+ reference,
9897
+ shouldScan
9834
9898
  }) {
9835
9899
  const digestSpinner = createSpinner4(
9836
9900
  progressMassages.processingVulnerabilityReport
@@ -9840,7 +9904,11 @@ async function _digestReport({
9840
9904
  {
9841
9905
  fixReportId,
9842
9906
  projectId,
9843
- scanSource: _getScanSource(command, ci)
9907
+ scanSource: _getScanSource(command, ci),
9908
+ repoUrl,
9909
+ sha,
9910
+ reference,
9911
+ shouldScan
9844
9912
  }
9845
9913
  );
9846
9914
  try {
@@ -10203,7 +10271,6 @@ ${chalk7.bold(
10203
10271
  function analyzeBuilder(yargs2) {
10204
10272
  return yargs2.option("f", {
10205
10273
  alias: "scan-file",
10206
- demandOption: true,
10207
10274
  type: "string",
10208
10275
  describe: chalk8.bold(
10209
10276
  "Select the vulnerability report to analyze (Checkmarx, Snyk, Fortify, CodeQL, Sonarqube, Semgrep, Datadog)"
@@ -10229,8 +10296,7 @@ function analyzeBuilder(yargs2) {
10229
10296
  ).help();
10230
10297
  }
10231
10298
  function validateAnalyzeOptions(argv) {
10232
- console.log("argv", argv);
10233
- if (!fs7.existsSync(argv.f)) {
10299
+ if (argv.f && !fs7.existsSync(argv.f)) {
10234
10300
  throw new CliError(`
10235
10301
  Can't access ${chalk8.bold(argv.f)}`);
10236
10302
  }
@@ -10264,7 +10330,9 @@ Can't access ${chalk8.bold(argv.f)}`);
10264
10330
  "--pull-request flag requires --commit-directly to be provided as well"
10265
10331
  );
10266
10332
  }
10267
- validateReportFileFormat(argv.f);
10333
+ if (argv.f) {
10334
+ validateReportFileFormat(argv.f);
10335
+ }
10268
10336
  }
10269
10337
  async function analyzeHandler(args) {
10270
10338
  validateAnalyzeOptions(args);
@@ -10407,7 +10475,7 @@ var parseArgs = async (args) => {
10407
10475
  ).command(
10408
10476
  mobbCliCommand.analyze,
10409
10477
  chalk10.bold(
10410
- "Provide a vulnerability report and relevant code repository, get automated fixes right away."
10478
+ "Provide a code repository, get automated fixes right away. You can also provide a vulnerability report to analyze or have Mobb scan the code for you."
10411
10479
  ),
10412
10480
  analyzeBuilder,
10413
10481
  analyzeHandler
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "mobbdev",
3
- "version": "1.0.83",
3
+ "version": "1.0.85",
4
4
  "description": "Automated secure code remediation tool",
5
5
  "repository": "git+https://github.com/mobb-dev/bugsy.git",
6
6
  "main": "dist/index.js",
@@ -71,7 +71,6 @@
71
71
  "semver": "7.7.1",
72
72
  "simple-git": "3.27.0",
73
73
  "snyk": "1.1296.2",
74
- "supports-color": "10.0.0",
75
74
  "tar": "6.2.1",
76
75
  "tmp": "0.2.3",
77
76
  "undici": "6.21.1",