mobbdev 0.0.37 → 0.0.45
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +18 -0
- package/bin/cli.mjs +1 -1
- package/dist/index.mjs +551 -201
- package/package.json +5 -1
package/README.md
CHANGED
|
@@ -40,6 +40,24 @@ npx mobbdev scan -r https://github.com/mobb-dev/simple-vulnerable-java-project
|
|
|
40
40
|
|
|
41
41
|
Bugsy will automatically generate a fix for each supported vulnerability identified in the SAST results, present it to developers for review and commit to their code.
|
|
42
42
|
|
|
43
|
+
## Contribution
|
|
44
|
+
|
|
45
|
+
Install the dependencies and run the tests:
|
|
46
|
+
|
|
47
|
+
```shell
|
|
48
|
+
pnpm install
|
|
49
|
+
|
|
50
|
+
# or use npm run build:dev to watch for changes
|
|
51
|
+
pnpm run build
|
|
52
|
+
|
|
53
|
+
# or use npm test:watch to watch for changes
|
|
54
|
+
pnpm run test
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
### Debugging
|
|
58
|
+
|
|
59
|
+
If you're using VSCode, you can use the `launch.json` file to debug the code. Run the `CLI tests` configuration to continuously run and debug the tests.
|
|
60
|
+
|
|
43
61
|
## Getting support
|
|
44
62
|
|
|
45
63
|
If you need support using Bugsy or just want to share your thoughts and learn more, you are more than welcome to join our [discord server](https://bit.ly/Mobb-discord)
|
package/bin/cli.mjs
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
import '../dist/index.
|
|
2
|
+
import '../dist/index.mjs'
|
package/dist/index.mjs
CHANGED
|
@@ -13,7 +13,7 @@ var __publicField = (obj, key, value) => {
|
|
|
13
13
|
import { hideBin } from "yargs/helpers";
|
|
14
14
|
|
|
15
15
|
// src/args/yargs.ts
|
|
16
|
-
import
|
|
16
|
+
import chalk8 from "chalk";
|
|
17
17
|
import yargs from "yargs/yargs";
|
|
18
18
|
|
|
19
19
|
// src/args/commands/analyze.ts
|
|
@@ -35,7 +35,6 @@ var SCANNERS = {
|
|
|
35
35
|
Snyk: "snyk"
|
|
36
36
|
};
|
|
37
37
|
var envVariablesSchema = z.object({
|
|
38
|
-
WEB_LOGIN_URL: z.string(),
|
|
39
38
|
WEB_APP_URL: z.string(),
|
|
40
39
|
API_URL: z.string()
|
|
41
40
|
}).required();
|
|
@@ -69,7 +68,6 @@ var mobbAscii = `
|
|
|
69
68
|
...............................
|
|
70
69
|
.................
|
|
71
70
|
`;
|
|
72
|
-
var WEB_LOGIN_URL = envVariables.WEB_LOGIN_URL;
|
|
73
71
|
var WEB_APP_URL = envVariables.WEB_APP_URL;
|
|
74
72
|
var API_URL = envVariables.API_URL;
|
|
75
73
|
|
|
@@ -77,7 +75,7 @@ var API_URL = envVariables.API_URL;
|
|
|
77
75
|
import crypto from "node:crypto";
|
|
78
76
|
import fs3 from "node:fs";
|
|
79
77
|
import os2 from "node:os";
|
|
80
|
-
import
|
|
78
|
+
import path6 from "node:path";
|
|
81
79
|
import { pipeline } from "node:stream/promises";
|
|
82
80
|
|
|
83
81
|
// src/utils/index.ts
|
|
@@ -141,9 +139,9 @@ var CliError = class extends Error {
|
|
|
141
139
|
};
|
|
142
140
|
|
|
143
141
|
// src/features/analysis/index.ts
|
|
144
|
-
import
|
|
142
|
+
import chalk3 from "chalk";
|
|
145
143
|
import Configstore from "configstore";
|
|
146
|
-
import
|
|
144
|
+
import Debug9 from "debug";
|
|
147
145
|
import extract from "extract-zip";
|
|
148
146
|
import fetch3 from "node-fetch";
|
|
149
147
|
import open2 from "open";
|
|
@@ -220,6 +218,50 @@ var UPLOAD_S3_BUCKET_INFO = gql`
|
|
|
220
218
|
}
|
|
221
219
|
}
|
|
222
220
|
`;
|
|
221
|
+
var DIGEST_VULNERABILITY_REPORT = gql`
|
|
222
|
+
mutation DigestVulnerabilityReport(
|
|
223
|
+
$vulnerabilityReportFileName: String!
|
|
224
|
+
$fixReportId: String!
|
|
225
|
+
$projectId: String!
|
|
226
|
+
$repoUrl: String!
|
|
227
|
+
$reference: String!
|
|
228
|
+
$sha: String
|
|
229
|
+
) {
|
|
230
|
+
digestVulnerabilityReport(
|
|
231
|
+
fixReportId: $fixReportId
|
|
232
|
+
vulnerabilityReportFileName: $vulnerabilityReportFileName
|
|
233
|
+
projectId: $projectId
|
|
234
|
+
repoUrl: $repoUrl
|
|
235
|
+
reference: $reference
|
|
236
|
+
sha: $sha
|
|
237
|
+
) {
|
|
238
|
+
__typename
|
|
239
|
+
... on VulnerabilityReport {
|
|
240
|
+
vulnerabilityReportId
|
|
241
|
+
fixReportId
|
|
242
|
+
}
|
|
243
|
+
... on RabbitSendError {
|
|
244
|
+
status
|
|
245
|
+
error
|
|
246
|
+
}
|
|
247
|
+
... on ReportValidationError {
|
|
248
|
+
status
|
|
249
|
+
error
|
|
250
|
+
}
|
|
251
|
+
... on ReferenceNotFoundError {
|
|
252
|
+
status
|
|
253
|
+
error
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
`;
|
|
258
|
+
var INITIALIZE_VULNERABILITY_REPORT = gql`
|
|
259
|
+
mutation InitializeVulnerabilityReport($fixReportId: String!) {
|
|
260
|
+
initializeVulnerabilityReport(fixReportId: $fixReportId) {
|
|
261
|
+
__typename
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
`;
|
|
223
265
|
var SUBMIT_VULNERABILITY_REPORT = gql`
|
|
224
266
|
mutation SubmitVulnerabilityReport(
|
|
225
267
|
$vulnerabilityReportFileName: String!
|
|
@@ -298,6 +340,22 @@ var GET_ENCRYPTED_API_TOKEN = gql2`
|
|
|
298
340
|
}
|
|
299
341
|
}
|
|
300
342
|
`;
|
|
343
|
+
var GET_FIX_REPORT_STATE = gql2`
|
|
344
|
+
query FixReportState($id: uuid!) {
|
|
345
|
+
fixReport_by_pk(id: $id) {
|
|
346
|
+
state
|
|
347
|
+
}
|
|
348
|
+
}
|
|
349
|
+
`;
|
|
350
|
+
var GET_VULNERABILITY_REPORT_PATHS = gql2`
|
|
351
|
+
query GetVulnerabilityReportPaths($vulnerabilityReportId: uuid!) {
|
|
352
|
+
vulnerability_report_path(
|
|
353
|
+
where: { vulnerabilityReportId: { _eq: $vulnerabilityReportId } }
|
|
354
|
+
) {
|
|
355
|
+
path
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
`;
|
|
301
359
|
|
|
302
360
|
// src/features/analysis/graphql/types.ts
|
|
303
361
|
import { z as z2 } from "zod";
|
|
@@ -361,10 +419,28 @@ var GetEncryptedApiTokenZ = z2.object({
|
|
|
361
419
|
encryptedApiToken: z2.string().nullable()
|
|
362
420
|
})
|
|
363
421
|
});
|
|
422
|
+
var DigestVulnerabilityReportZ = z2.object({
|
|
423
|
+
digestVulnerabilityReport: z2.object({
|
|
424
|
+
vulnerabilityReportId: z2.string()
|
|
425
|
+
})
|
|
426
|
+
});
|
|
427
|
+
var GetFixReportZ = z2.object({
|
|
428
|
+
fixReport_by_pk: z2.object({
|
|
429
|
+
state: z2.string()
|
|
430
|
+
})
|
|
431
|
+
});
|
|
432
|
+
var GetVulnerabilityReportPathsZ = z2.object({
|
|
433
|
+
vulnerability_report_path: z2.array(
|
|
434
|
+
z2.object({
|
|
435
|
+
path: z2.string()
|
|
436
|
+
})
|
|
437
|
+
)
|
|
438
|
+
});
|
|
364
439
|
|
|
365
440
|
// src/features/analysis/graphql/gql.ts
|
|
366
441
|
var debug3 = Debug3("mobbdev:gql");
|
|
367
442
|
var API_KEY_HEADER_NAME = "x-mobb-key";
|
|
443
|
+
var REPORT_STATE_CHECK_DELAY = 5 * 1e3;
|
|
368
444
|
var GQLClient = class {
|
|
369
445
|
constructor(args) {
|
|
370
446
|
__publicField(this, "_client");
|
|
@@ -438,6 +514,33 @@ var GQLClient = class {
|
|
|
438
514
|
});
|
|
439
515
|
return UploadS3BucketInfoZ.parse(uploadS3BucketInfoResult);
|
|
440
516
|
}
|
|
517
|
+
async digestVulnerabilityReport({
|
|
518
|
+
fixReportId,
|
|
519
|
+
projectId,
|
|
520
|
+
repoUrl,
|
|
521
|
+
reference,
|
|
522
|
+
sha
|
|
523
|
+
}) {
|
|
524
|
+
const res = await this._client.request(
|
|
525
|
+
DIGEST_VULNERABILITY_REPORT,
|
|
526
|
+
{
|
|
527
|
+
fixReportId,
|
|
528
|
+
vulnerabilityReportFileName: "report.json",
|
|
529
|
+
projectId,
|
|
530
|
+
repoUrl,
|
|
531
|
+
reference,
|
|
532
|
+
sha
|
|
533
|
+
}
|
|
534
|
+
);
|
|
535
|
+
return DigestVulnerabilityReportZ.parse(res).digestVulnerabilityReport;
|
|
536
|
+
}
|
|
537
|
+
async initializeVulnerabilityReport({
|
|
538
|
+
fixReportId
|
|
539
|
+
}) {
|
|
540
|
+
await this._client.request(INITIALIZE_VULNERABILITY_REPORT, {
|
|
541
|
+
fixReportId
|
|
542
|
+
});
|
|
543
|
+
}
|
|
441
544
|
async submitVulnerabilityReport({
|
|
442
545
|
fixReportId,
|
|
443
546
|
repoUrl,
|
|
@@ -454,6 +557,38 @@ var GQLClient = class {
|
|
|
454
557
|
sha: sha || ""
|
|
455
558
|
});
|
|
456
559
|
}
|
|
560
|
+
async getFixReportState(fixReportId) {
|
|
561
|
+
const res = await this._client.request(
|
|
562
|
+
GET_FIX_REPORT_STATE,
|
|
563
|
+
{ id: fixReportId }
|
|
564
|
+
);
|
|
565
|
+
return GetFixReportZ.parse(res).fixReport_by_pk.state;
|
|
566
|
+
}
|
|
567
|
+
async waitFixReportInit(fixReportId, includeDigested = false) {
|
|
568
|
+
const FINAL_STATES = ["Finished", "Failed"];
|
|
569
|
+
let lastState = "Created";
|
|
570
|
+
let attempts = 100;
|
|
571
|
+
if (includeDigested) {
|
|
572
|
+
FINAL_STATES.push("Digested");
|
|
573
|
+
}
|
|
574
|
+
do {
|
|
575
|
+
await sleep(REPORT_STATE_CHECK_DELAY);
|
|
576
|
+
lastState = await this.getFixReportState(fixReportId);
|
|
577
|
+
} while (!FINAL_STATES.includes(
|
|
578
|
+
lastState
|
|
579
|
+
// wait for final the state of the fix report
|
|
580
|
+
) && attempts-- > 0);
|
|
581
|
+
return lastState;
|
|
582
|
+
}
|
|
583
|
+
async getVulnerabilityReportPaths(vulnerabilityReportId) {
|
|
584
|
+
const res = await this._client.request(
|
|
585
|
+
GET_VULNERABILITY_REPORT_PATHS,
|
|
586
|
+
{ vulnerabilityReportId }
|
|
587
|
+
);
|
|
588
|
+
return GetVulnerabilityReportPathsZ.parse(
|
|
589
|
+
res
|
|
590
|
+
).vulnerability_report_path.map((p) => p.path);
|
|
591
|
+
}
|
|
457
592
|
};
|
|
458
593
|
|
|
459
594
|
// src/features/analysis/pack.ts
|
|
@@ -465,7 +600,7 @@ import { globby } from "globby";
|
|
|
465
600
|
import { isBinary } from "istextorbinary";
|
|
466
601
|
var debug4 = Debug4("mobbdev:pack");
|
|
467
602
|
var MAX_FILE_SIZE = 1024 * 1024 * 5;
|
|
468
|
-
async function pack(srcDirPath) {
|
|
603
|
+
async function pack(srcDirPath, vulnFiles) {
|
|
469
604
|
debug4("pack folder %s", srcDirPath);
|
|
470
605
|
const filepaths = await globby("**", {
|
|
471
606
|
gitignore: true,
|
|
@@ -478,6 +613,10 @@ async function pack(srcDirPath) {
|
|
|
478
613
|
debug4("compressing files");
|
|
479
614
|
for (const filepath of filepaths) {
|
|
480
615
|
const absFilepath = path3.join(srcDirPath, filepath.toString());
|
|
616
|
+
if (!vulnFiles.includes(filepath.toString())) {
|
|
617
|
+
debug4("ignoring %s because it is not a vulnerability file", filepath);
|
|
618
|
+
continue;
|
|
619
|
+
}
|
|
481
620
|
if (fs.lstatSync(absFilepath).size > MAX_FILE_SIZE) {
|
|
482
621
|
debug4("ignoring %s because the size is > 5MB", filepath);
|
|
483
622
|
continue;
|
|
@@ -511,6 +650,25 @@ async function choseScanner() {
|
|
|
511
650
|
});
|
|
512
651
|
return scanner;
|
|
513
652
|
}
|
|
653
|
+
async function tryCheckmarxConfiguarationAgain() {
|
|
654
|
+
console.log(
|
|
655
|
+
"\u{1F513} Oops, seems like checkmarx does not accept the current configuration"
|
|
656
|
+
);
|
|
657
|
+
const { confirmCheckmarxRetryConfigrations } = await inquirer.prompt({
|
|
658
|
+
name: "confirmCheckmarxRetryConfigrations",
|
|
659
|
+
type: "confirm",
|
|
660
|
+
message: "Would like to try to configure them again? ",
|
|
661
|
+
default: true
|
|
662
|
+
});
|
|
663
|
+
return confirmCheckmarxRetryConfigrations;
|
|
664
|
+
}
|
|
665
|
+
async function startCheckmarxConfigationPrompt() {
|
|
666
|
+
const checkmarxConfigreSpinner = createSpinner(
|
|
667
|
+
"\u{1F513} Checkmarx needs to be configured before we start, press any key to continue"
|
|
668
|
+
).start();
|
|
669
|
+
await keypress();
|
|
670
|
+
checkmarxConfigreSpinner.success();
|
|
671
|
+
}
|
|
514
672
|
async function scmIntegrationPrompt(scmName) {
|
|
515
673
|
const answers = await inquirer.prompt({
|
|
516
674
|
name: "scmConfirm",
|
|
@@ -536,6 +694,245 @@ async function snykArticlePrompt() {
|
|
|
536
694
|
return snykArticleConfirm;
|
|
537
695
|
}
|
|
538
696
|
|
|
697
|
+
// src/features/analysis/scanners/checkmarx.ts
|
|
698
|
+
import { createRequire } from "node:module";
|
|
699
|
+
|
|
700
|
+
// src/post_install/constants.mjs
|
|
701
|
+
var cxOperatingSystemSupportMessage = `Your operating system does not support checkmarx.
|
|
702
|
+
You can see the list of supported operating systems here: https://github.com/Checkmarx/ast-cli#releases`;
|
|
703
|
+
|
|
704
|
+
// src/utils/child_process.ts
|
|
705
|
+
import cp from "node:child_process";
|
|
706
|
+
import Debug5 from "debug";
|
|
707
|
+
import * as process2 from "process";
|
|
708
|
+
import supportsColor from "supports-color";
|
|
709
|
+
var { stdout: stdout2 } = supportsColor;
|
|
710
|
+
function createFork({ args, processPath, name }, options) {
|
|
711
|
+
const child = cp.fork(processPath, args, {
|
|
712
|
+
stdio: ["inherit", "pipe", "pipe", "ipc"],
|
|
713
|
+
env: { FORCE_COLOR: stdout2 ? "1" : "0" }
|
|
714
|
+
});
|
|
715
|
+
return createChildProcess({ childProcess: child, name }, options);
|
|
716
|
+
}
|
|
717
|
+
function createSpwan({ args, processPath, name }, options) {
|
|
718
|
+
const child = cp.spawn(processPath, args, {
|
|
719
|
+
stdio: ["inherit", "pipe", "pipe", "ipc"],
|
|
720
|
+
env: { FORCE_COLOR: stdout2 ? "1" : "0" }
|
|
721
|
+
});
|
|
722
|
+
return createChildProcess({ childProcess: child, name }, options);
|
|
723
|
+
}
|
|
724
|
+
function createChildProcess({ childProcess, name }, options) {
|
|
725
|
+
const debug9 = Debug5(`mobbdev:${name}`);
|
|
726
|
+
const { display } = options;
|
|
727
|
+
return new Promise((resolve, reject) => {
|
|
728
|
+
let out = "";
|
|
729
|
+
const onData = (chunk) => {
|
|
730
|
+
debug9(`chunk received from ${name} std ${chunk}`);
|
|
731
|
+
out += chunk;
|
|
732
|
+
};
|
|
733
|
+
if (!childProcess || !childProcess?.stdout || !childProcess?.stderr) {
|
|
734
|
+
debug9(`unable to fork ${name}`);
|
|
735
|
+
reject(new Error(`unable to fork ${name}`));
|
|
736
|
+
}
|
|
737
|
+
childProcess.stdout?.on("data", onData);
|
|
738
|
+
childProcess.stderr?.on("data", onData);
|
|
739
|
+
if (display) {
|
|
740
|
+
childProcess.stdout?.pipe(process2.stdout);
|
|
741
|
+
childProcess.stderr?.pipe(process2.stderr);
|
|
742
|
+
}
|
|
743
|
+
childProcess.on("exit", (code) => {
|
|
744
|
+
debug9(`${name} exit code ${code}`);
|
|
745
|
+
resolve({ message: out, code });
|
|
746
|
+
});
|
|
747
|
+
childProcess.on("error", (err) => {
|
|
748
|
+
debug9(`${name} error %o`, err);
|
|
749
|
+
reject(err);
|
|
750
|
+
});
|
|
751
|
+
});
|
|
752
|
+
}
|
|
753
|
+
|
|
754
|
+
// src/features/analysis/scanners/checkmarx.ts
|
|
755
|
+
import chalk from "chalk";
|
|
756
|
+
import Debug6 from "debug";
|
|
757
|
+
import { existsSync } from "fs";
|
|
758
|
+
import { createSpinner as createSpinner2 } from "nanospinner";
|
|
759
|
+
import path4 from "path";
|
|
760
|
+
var debug5 = Debug6("mobbdev:checkmarx");
|
|
761
|
+
var require2 = createRequire(import.meta.url);
|
|
762
|
+
var getCheckmarxPath = () => {
|
|
763
|
+
try {
|
|
764
|
+
return require2.resolve(".bin/cx");
|
|
765
|
+
} catch (e) {
|
|
766
|
+
throw new CliError(cxOperatingSystemSupportMessage);
|
|
767
|
+
}
|
|
768
|
+
};
|
|
769
|
+
var getCheckmarxCommandArgs = ({
|
|
770
|
+
repoPath,
|
|
771
|
+
branch,
|
|
772
|
+
fileName,
|
|
773
|
+
filePath,
|
|
774
|
+
projectName
|
|
775
|
+
}) => [
|
|
776
|
+
"--project-name",
|
|
777
|
+
projectName,
|
|
778
|
+
"-s",
|
|
779
|
+
repoPath,
|
|
780
|
+
"--branch",
|
|
781
|
+
branch,
|
|
782
|
+
"--scan-types",
|
|
783
|
+
"sast",
|
|
784
|
+
"--output-path",
|
|
785
|
+
filePath,
|
|
786
|
+
"--output-name",
|
|
787
|
+
fileName,
|
|
788
|
+
"--report-format",
|
|
789
|
+
"json"
|
|
790
|
+
];
|
|
791
|
+
var VALIDATE_COMMAND = ["auth", "validate"];
|
|
792
|
+
var CONFIGURE_COMMAND = ["configure"];
|
|
793
|
+
var SCAN_COMMAND = ["scan", "create"];
|
|
794
|
+
var CHECKMARX_SUCCESS_CODE = 0;
|
|
795
|
+
function validateCheckmarxInstallation() {
|
|
796
|
+
existsSync(getCheckmarxPath());
|
|
797
|
+
}
|
|
798
|
+
async function forkCheckmarx(args, { display }) {
|
|
799
|
+
debug5("fork checkmarx with args %o %s", args.join(" "), display);
|
|
800
|
+
return createSpwan(
|
|
801
|
+
{ args, processPath: getCheckmarxPath(), name: "checkmarx" },
|
|
802
|
+
{ display }
|
|
803
|
+
);
|
|
804
|
+
}
|
|
805
|
+
async function getCheckmarxReport({ reportPath, repositoryRoot, branch }, { skipPrompts = false }) {
|
|
806
|
+
debug5("get checkmarx report start %s %s", reportPath, repositoryRoot);
|
|
807
|
+
const { code: loginCode } = await forkCheckmarx(VALIDATE_COMMAND, {
|
|
808
|
+
display: false
|
|
809
|
+
});
|
|
810
|
+
if (loginCode !== CHECKMARX_SUCCESS_CODE) {
|
|
811
|
+
if (skipPrompts) {
|
|
812
|
+
await throwCheckmarxConfigError();
|
|
813
|
+
}
|
|
814
|
+
await startCheckmarxConfigationPrompt();
|
|
815
|
+
await validateCheckamxCredentials();
|
|
816
|
+
}
|
|
817
|
+
const extension = path4.extname(reportPath);
|
|
818
|
+
const filePath = path4.dirname(reportPath);
|
|
819
|
+
const fileName = path4.basename(reportPath, extension);
|
|
820
|
+
const checkmarxCommandArgs = getCheckmarxCommandArgs({
|
|
821
|
+
repoPath: repositoryRoot,
|
|
822
|
+
branch,
|
|
823
|
+
filePath,
|
|
824
|
+
fileName,
|
|
825
|
+
projectName: "mobb_dev"
|
|
826
|
+
});
|
|
827
|
+
console.log("\u280B \u{1F50D} Initiating Checkmarx Scan ");
|
|
828
|
+
const { code: scanCode } = await forkCheckmarx(
|
|
829
|
+
[...SCAN_COMMAND, ...checkmarxCommandArgs],
|
|
830
|
+
{
|
|
831
|
+
display: true
|
|
832
|
+
}
|
|
833
|
+
);
|
|
834
|
+
if (scanCode !== CHECKMARX_SUCCESS_CODE) {
|
|
835
|
+
createSpinner2("\u{1F50D} Something went wrong with the checkmarx scan").start().error();
|
|
836
|
+
throw new CliError();
|
|
837
|
+
}
|
|
838
|
+
await createSpinner2("\u{1F50D} Checkmarx Scan completed").start().success();
|
|
839
|
+
return true;
|
|
840
|
+
}
|
|
841
|
+
async function throwCheckmarxConfigError() {
|
|
842
|
+
await createSpinner2("\u{1F513} Checkmarx is not configued correctly").start().error();
|
|
843
|
+
throw new CliError(
|
|
844
|
+
`Checkmarx is not configued correctly
|
|
845
|
+
you can configure it by using the ${chalk.bold(
|
|
846
|
+
"cx configure"
|
|
847
|
+
)} command`
|
|
848
|
+
);
|
|
849
|
+
}
|
|
850
|
+
async function validateCheckamxCredentials() {
|
|
851
|
+
console.log(`
|
|
852
|
+
Here's a suggestion for checkmarx configuation:
|
|
853
|
+
${chalk.bold("AST Base URI:")} https://ast.checkmarx.net
|
|
854
|
+
${chalk.bold("AST Base Auth URI (IAM):")} https://iam.checkmarx.net
|
|
855
|
+
`);
|
|
856
|
+
await forkCheckmarx(CONFIGURE_COMMAND, { display: true });
|
|
857
|
+
const { code: loginCode } = await forkCheckmarx(VALIDATE_COMMAND, {
|
|
858
|
+
display: false
|
|
859
|
+
});
|
|
860
|
+
if (loginCode !== CHECKMARX_SUCCESS_CODE) {
|
|
861
|
+
const tryAgain = await tryCheckmarxConfiguarationAgain();
|
|
862
|
+
if (!tryAgain) {
|
|
863
|
+
await throwCheckmarxConfigError();
|
|
864
|
+
}
|
|
865
|
+
if (await tryCheckmarxConfiguarationAgain()) {
|
|
866
|
+
validateCheckamxCredentials();
|
|
867
|
+
}
|
|
868
|
+
}
|
|
869
|
+
await createSpinner2("\u{1F513} Checkmarx configured successfully!").start().success();
|
|
870
|
+
}
|
|
871
|
+
|
|
872
|
+
// src/features/analysis/scanners/snyk.ts
|
|
873
|
+
import { createRequire as createRequire2 } from "node:module";
|
|
874
|
+
import chalk2 from "chalk";
|
|
875
|
+
import Debug7 from "debug";
|
|
876
|
+
import { createSpinner as createSpinner3 } from "nanospinner";
|
|
877
|
+
import open from "open";
|
|
878
|
+
var debug6 = Debug7("mobbdev:snyk");
|
|
879
|
+
var require3 = createRequire2(import.meta.url);
|
|
880
|
+
var SNYK_PATH = require3.resolve("snyk/bin/snyk");
|
|
881
|
+
var SNYK_ARTICLE_URL = "https://docs.snyk.io/scan-application-code/snyk-code/getting-started-with-snyk-code/activating-snyk-code-using-the-web-ui/step-1-enabling-the-snyk-code-option";
|
|
882
|
+
debug6("snyk executable path %s", SNYK_PATH);
|
|
883
|
+
async function forkSnyk(args, { display }) {
|
|
884
|
+
debug6("fork snyk with args %o %s", args, display);
|
|
885
|
+
return createFork(
|
|
886
|
+
{ args, processPath: SNYK_PATH, name: "checkmarx" },
|
|
887
|
+
{ display }
|
|
888
|
+
);
|
|
889
|
+
}
|
|
890
|
+
async function getSnykReport(reportPath, repoRoot, { skipPrompts = false }) {
|
|
891
|
+
debug6("get snyk report start %s %s", reportPath, repoRoot);
|
|
892
|
+
const config3 = await forkSnyk(["config"], { display: false });
|
|
893
|
+
const { message: configMessage } = config3;
|
|
894
|
+
if (!configMessage.includes("api: ")) {
|
|
895
|
+
const snykLoginSpinner = createSpinner3().start();
|
|
896
|
+
if (!skipPrompts) {
|
|
897
|
+
snykLoginSpinner.update({
|
|
898
|
+
text: "\u{1F513} Login to Snyk is required, press any key to continue"
|
|
899
|
+
});
|
|
900
|
+
await keypress();
|
|
901
|
+
}
|
|
902
|
+
snykLoginSpinner.update({
|
|
903
|
+
text: "\u{1F513} Waiting for Snyk login to complete"
|
|
904
|
+
});
|
|
905
|
+
debug6("no token in the config %s", config3);
|
|
906
|
+
await forkSnyk(["auth"], { display: true });
|
|
907
|
+
snykLoginSpinner.success({ text: "\u{1F513} Login to Snyk Successful" });
|
|
908
|
+
}
|
|
909
|
+
const snykSpinner = createSpinner3("\u{1F50D} Scanning your repo with Snyk ").start();
|
|
910
|
+
const { message: scanOutput } = await forkSnyk(
|
|
911
|
+
["code", "test", `--sarif-file-output=${reportPath}`, repoRoot],
|
|
912
|
+
{ display: true }
|
|
913
|
+
);
|
|
914
|
+
if (scanOutput.includes(
|
|
915
|
+
"Snyk Code is not supported for org: enable in Settings > Snyk Code"
|
|
916
|
+
)) {
|
|
917
|
+
debug6("snyk code is not enabled %s", scanOutput);
|
|
918
|
+
snykSpinner.error({ text: "\u{1F50D} Snyk configuration needed" });
|
|
919
|
+
const answer = await snykArticlePrompt();
|
|
920
|
+
debug6("answer %s", answer);
|
|
921
|
+
if (answer) {
|
|
922
|
+
debug6("opening the browser");
|
|
923
|
+
await open(SNYK_ARTICLE_URL);
|
|
924
|
+
}
|
|
925
|
+
console.log(
|
|
926
|
+
chalk2.bgBlue(
|
|
927
|
+
"\nPlease enable Snyk Code in your Snyk account and try again."
|
|
928
|
+
)
|
|
929
|
+
);
|
|
930
|
+
throw Error("snyk is not enbabled");
|
|
931
|
+
}
|
|
932
|
+
snykSpinner.success({ text: "\u{1F50D} Snyk code scan completed" });
|
|
933
|
+
return true;
|
|
934
|
+
}
|
|
935
|
+
|
|
539
936
|
// src/features/analysis/scm/gitlab.ts
|
|
540
937
|
import querystring from "node:querystring";
|
|
541
938
|
import { Gitlab } from "@gitbeaker/rest";
|
|
@@ -848,12 +1245,12 @@ async function queryGithubGraphql(query, variables, options) {
|
|
|
848
1245
|
throw e;
|
|
849
1246
|
}
|
|
850
1247
|
}
|
|
851
|
-
async function getGithubBlameRanges({ ref, gitHubUrl, path:
|
|
1248
|
+
async function getGithubBlameRanges({ ref, gitHubUrl, path: path8 }, options) {
|
|
852
1249
|
const { owner, repo } = parseOwnerAndRepo(gitHubUrl);
|
|
853
1250
|
const variables = {
|
|
854
1251
|
owner,
|
|
855
1252
|
repo,
|
|
856
|
-
path:
|
|
1253
|
+
path: path8,
|
|
857
1254
|
ref
|
|
858
1255
|
};
|
|
859
1256
|
const res = await queryGithubGraphql(
|
|
@@ -876,7 +1273,7 @@ async function getGithubBlameRanges({ ref, gitHubUrl, path: path7 }, options) {
|
|
|
876
1273
|
// src/features/analysis/scm/scmSubmit.ts
|
|
877
1274
|
import fs2 from "node:fs/promises";
|
|
878
1275
|
import os from "os";
|
|
879
|
-
import
|
|
1276
|
+
import path5 from "path";
|
|
880
1277
|
import { simpleGit as simpleGit2 } from "simple-git";
|
|
881
1278
|
import { z as z4 } from "zod";
|
|
882
1279
|
var isValidBranchName = async (branchName) => {
|
|
@@ -1167,13 +1564,13 @@ var GitlabSCMLib = class extends SCMLib {
|
|
|
1167
1564
|
throw new Error(`unknown state ${state}`);
|
|
1168
1565
|
}
|
|
1169
1566
|
}
|
|
1170
|
-
async getRepoBlameRanges(ref,
|
|
1567
|
+
async getRepoBlameRanges(ref, path8) {
|
|
1171
1568
|
if (!this.url) {
|
|
1172
1569
|
console.error("no url");
|
|
1173
1570
|
throw new Error("no url");
|
|
1174
1571
|
}
|
|
1175
1572
|
return await getGitlabBlameRanges(
|
|
1176
|
-
{ ref, path:
|
|
1573
|
+
{ ref, path: path8, gitlabUrl: this.url },
|
|
1177
1574
|
{
|
|
1178
1575
|
gitlabAuthToken: this.accessToken
|
|
1179
1576
|
}
|
|
@@ -1293,13 +1690,13 @@ var GithubSCMLib = class extends SCMLib {
|
|
|
1293
1690
|
}
|
|
1294
1691
|
throw new Error(`unknown state ${state}`);
|
|
1295
1692
|
}
|
|
1296
|
-
async getRepoBlameRanges(ref,
|
|
1693
|
+
async getRepoBlameRanges(ref, path8) {
|
|
1297
1694
|
if (!this.url) {
|
|
1298
1695
|
console.error("no url");
|
|
1299
1696
|
throw new Error("no url");
|
|
1300
1697
|
}
|
|
1301
1698
|
return await getGithubBlameRanges(
|
|
1302
|
-
{ ref, path:
|
|
1699
|
+
{ ref, path: path8, gitHubUrl: this.url },
|
|
1303
1700
|
{
|
|
1304
1701
|
githubAuthToken: this.accessToken
|
|
1305
1702
|
}
|
|
@@ -1492,7 +1889,6 @@ async function getGitlabRepoList(accessToken) {
|
|
|
1492
1889
|
//directly with fetch()
|
|
1493
1890
|
sort: "asc",
|
|
1494
1891
|
orderBy: "updated_at",
|
|
1495
|
-
pagination: "keyset",
|
|
1496
1892
|
perPage: 100
|
|
1497
1893
|
});
|
|
1498
1894
|
return Promise.all(
|
|
@@ -1604,10 +2000,10 @@ function parseOwnerAndRepo2(gitlabUrl) {
|
|
|
1604
2000
|
const projectPath = `${groups[0]}${repo}`;
|
|
1605
2001
|
return { owner, repo, projectPath };
|
|
1606
2002
|
}
|
|
1607
|
-
async function getGitlabBlameRanges({ ref, gitlabUrl, path:
|
|
2003
|
+
async function getGitlabBlameRanges({ ref, gitlabUrl, path: path8 }, options) {
|
|
1608
2004
|
const { projectPath } = parseOwnerAndRepo2(gitlabUrl);
|
|
1609
2005
|
const api = getGitBeaker({ gitlabAuthToken: options?.gitlabAuthToken });
|
|
1610
|
-
const resp = await api.RepositoryFiles.allFileBlames(projectPath,
|
|
2006
|
+
const resp = await api.RepositoryFiles.allFileBlames(projectPath, path8, ref);
|
|
1611
2007
|
let lineNumber = 1;
|
|
1612
2008
|
return resp.filter((range) => range.lines).map((range) => {
|
|
1613
2009
|
const oldLineNumber = lineNumber;
|
|
@@ -1630,121 +2026,29 @@ var GitlabAuthResultZ = z5.object({
|
|
|
1630
2026
|
refresh_token: z5.string()
|
|
1631
2027
|
});
|
|
1632
2028
|
|
|
1633
|
-
// src/features/analysis/snyk.ts
|
|
1634
|
-
import cp from "node:child_process";
|
|
1635
|
-
import { createRequire } from "node:module";
|
|
1636
|
-
import chalk from "chalk";
|
|
1637
|
-
import Debug5 from "debug";
|
|
1638
|
-
import { createSpinner as createSpinner2 } from "nanospinner";
|
|
1639
|
-
import open from "open";
|
|
1640
|
-
import * as process2 from "process";
|
|
1641
|
-
import supportsColor from "supports-color";
|
|
1642
|
-
var { stdout: stdout2 } = supportsColor;
|
|
1643
|
-
var debug5 = Debug5("mobbdev:snyk");
|
|
1644
|
-
var require2 = createRequire(import.meta.url);
|
|
1645
|
-
var SNYK_PATH = require2.resolve("snyk/bin/snyk");
|
|
1646
|
-
var SNYK_ARTICLE_URL = "https://docs.snyk.io/scan-application-code/snyk-code/getting-started-with-snyk-code/activating-snyk-code-using-the-web-ui/step-1-enabling-the-snyk-code-option";
|
|
1647
|
-
debug5("snyk executable path %s", SNYK_PATH);
|
|
1648
|
-
async function forkSnyk(args, { display }) {
|
|
1649
|
-
debug5("fork snyk with args %o %s", args, display);
|
|
1650
|
-
return new Promise((resolve, reject) => {
|
|
1651
|
-
const child = cp.fork(SNYK_PATH, args, {
|
|
1652
|
-
stdio: ["inherit", "pipe", "pipe", "ipc"],
|
|
1653
|
-
env: { FORCE_COLOR: stdout2 ? "1" : "0" }
|
|
1654
|
-
});
|
|
1655
|
-
let out = "";
|
|
1656
|
-
const onData = (chunk) => {
|
|
1657
|
-
debug5("chunk received from snyk std %s", chunk);
|
|
1658
|
-
out += chunk;
|
|
1659
|
-
};
|
|
1660
|
-
if (!child || !child?.stdout || !child?.stderr) {
|
|
1661
|
-
debug5("unable to fork snyk");
|
|
1662
|
-
reject(new Error("unable to fork snyk"));
|
|
1663
|
-
}
|
|
1664
|
-
child.stdout?.on("data", onData);
|
|
1665
|
-
child.stderr?.on("data", onData);
|
|
1666
|
-
if (display) {
|
|
1667
|
-
child.stdout?.pipe(process2.stdout);
|
|
1668
|
-
child.stderr?.pipe(process2.stderr);
|
|
1669
|
-
}
|
|
1670
|
-
child.on("exit", () => {
|
|
1671
|
-
debug5("snyk exit");
|
|
1672
|
-
resolve(out);
|
|
1673
|
-
});
|
|
1674
|
-
child.on("error", (err) => {
|
|
1675
|
-
debug5("snyk error %o", err);
|
|
1676
|
-
reject(err);
|
|
1677
|
-
});
|
|
1678
|
-
});
|
|
1679
|
-
}
|
|
1680
|
-
async function getSnykReport(reportPath, repoRoot, { skipPrompts = false }) {
|
|
1681
|
-
debug5("get snyk report start %s %s", reportPath, repoRoot);
|
|
1682
|
-
const config3 = await forkSnyk(["config"], { display: false });
|
|
1683
|
-
if (!config3.includes("api: ")) {
|
|
1684
|
-
const snykLoginSpinner = createSpinner2().start();
|
|
1685
|
-
if (!skipPrompts) {
|
|
1686
|
-
snykLoginSpinner.update({
|
|
1687
|
-
text: "\u{1F513} Login to Snyk is required, press any key to continue"
|
|
1688
|
-
});
|
|
1689
|
-
await keypress();
|
|
1690
|
-
}
|
|
1691
|
-
snykLoginSpinner.update({
|
|
1692
|
-
text: "\u{1F513} Waiting for Snyk login to complete"
|
|
1693
|
-
});
|
|
1694
|
-
debug5("no token in the config %s", config3);
|
|
1695
|
-
await forkSnyk(["auth"], { display: true });
|
|
1696
|
-
snykLoginSpinner.success({ text: "\u{1F513} Login to Snyk Successful" });
|
|
1697
|
-
}
|
|
1698
|
-
const snykSpinner = createSpinner2("\u{1F50D} Scanning your repo with Snyk ").start();
|
|
1699
|
-
const out = await forkSnyk(
|
|
1700
|
-
["code", "test", `--sarif-file-output=${reportPath}`, repoRoot],
|
|
1701
|
-
{ display: true }
|
|
1702
|
-
);
|
|
1703
|
-
if (out.includes(
|
|
1704
|
-
"Snyk Code is not supported for org: enable in Settings > Snyk Code"
|
|
1705
|
-
)) {
|
|
1706
|
-
debug5("snyk code is not enabled %s", out);
|
|
1707
|
-
snykSpinner.error({ text: "\u{1F50D} Snyk configuration needed" });
|
|
1708
|
-
const answer = await snykArticlePrompt();
|
|
1709
|
-
debug5("answer %s", answer);
|
|
1710
|
-
if (answer) {
|
|
1711
|
-
debug5("opening the browser");
|
|
1712
|
-
await open(SNYK_ARTICLE_URL);
|
|
1713
|
-
}
|
|
1714
|
-
console.log(
|
|
1715
|
-
chalk.bgBlue(
|
|
1716
|
-
"\nPlease enable Snyk Code in your Snyk account and try again."
|
|
1717
|
-
)
|
|
1718
|
-
);
|
|
1719
|
-
return false;
|
|
1720
|
-
}
|
|
1721
|
-
snykSpinner.success({ text: "\u{1F50D} Snyk code scan completed" });
|
|
1722
|
-
return true;
|
|
1723
|
-
}
|
|
1724
|
-
|
|
1725
2029
|
// src/features/analysis/upload-file.ts
|
|
1726
|
-
import
|
|
2030
|
+
import Debug8 from "debug";
|
|
1727
2031
|
import fetch2, { File, fileFrom, FormData } from "node-fetch";
|
|
1728
|
-
var
|
|
2032
|
+
var debug7 = Debug8("mobbdev:upload-file");
|
|
1729
2033
|
async function uploadFile({
|
|
1730
2034
|
file,
|
|
1731
2035
|
url,
|
|
1732
2036
|
uploadKey,
|
|
1733
2037
|
uploadFields
|
|
1734
2038
|
}) {
|
|
1735
|
-
|
|
1736
|
-
|
|
1737
|
-
|
|
2039
|
+
debug7("upload file start %s", url);
|
|
2040
|
+
debug7("upload fields %o", uploadFields);
|
|
2041
|
+
debug7("upload key %s", uploadKey);
|
|
1738
2042
|
const form = new FormData();
|
|
1739
2043
|
Object.entries(uploadFields).forEach(([key, value]) => {
|
|
1740
2044
|
form.append(key, value);
|
|
1741
2045
|
});
|
|
1742
2046
|
form.append("key", uploadKey);
|
|
1743
2047
|
if (typeof file === "string") {
|
|
1744
|
-
|
|
2048
|
+
debug7("upload file from path %s", file);
|
|
1745
2049
|
form.append("file", await fileFrom(file));
|
|
1746
2050
|
} else {
|
|
1747
|
-
|
|
2051
|
+
debug7("upload file from buffer");
|
|
1748
2052
|
form.append("file", new File([file], "file"));
|
|
1749
2053
|
}
|
|
1750
2054
|
const response = await fetch2(url, {
|
|
@@ -1752,10 +2056,10 @@ async function uploadFile({
|
|
|
1752
2056
|
body: form
|
|
1753
2057
|
});
|
|
1754
2058
|
if (!response.ok) {
|
|
1755
|
-
|
|
2059
|
+
debug7("error from S3 %s %s", response.body, response.status);
|
|
1756
2060
|
throw new Error(`Failed to upload the file: ${response.status}`);
|
|
1757
2061
|
}
|
|
1758
|
-
|
|
2062
|
+
debug7("upload file done");
|
|
1759
2063
|
}
|
|
1760
2064
|
|
|
1761
2065
|
// src/features/analysis/index.ts
|
|
@@ -1770,10 +2074,10 @@ async function downloadRepo({
|
|
|
1770
2074
|
dirname,
|
|
1771
2075
|
ci
|
|
1772
2076
|
}) {
|
|
1773
|
-
const { createSpinner:
|
|
1774
|
-
const repoSpinner =
|
|
1775
|
-
|
|
1776
|
-
const zipFilePath =
|
|
2077
|
+
const { createSpinner: createSpinner4 } = Spinner2({ ci });
|
|
2078
|
+
const repoSpinner = createSpinner4("\u{1F4BE} Downloading Repo").start();
|
|
2079
|
+
debug8("download repo %s %s %s", repoUrl, dirname);
|
|
2080
|
+
const zipFilePath = path6.join(dirname, "repo.zip");
|
|
1777
2081
|
const response = await fetch3(downloadUrl, {
|
|
1778
2082
|
method: "GET",
|
|
1779
2083
|
headers: {
|
|
@@ -1781,9 +2085,9 @@ async function downloadRepo({
|
|
|
1781
2085
|
}
|
|
1782
2086
|
});
|
|
1783
2087
|
if (!response.ok) {
|
|
1784
|
-
|
|
2088
|
+
debug8("SCM zipball request failed %s %s", response.body, response.status);
|
|
1785
2089
|
repoSpinner.error({ text: "\u{1F4BE} Repo download failed" });
|
|
1786
|
-
throw new Error(`Can't access ${
|
|
2090
|
+
throw new Error(`Can't access ${chalk3.bold(repoUrl)}`);
|
|
1787
2091
|
}
|
|
1788
2092
|
const fileWriterStream = fs3.createWriteStream(zipFilePath);
|
|
1789
2093
|
if (!response.body) {
|
|
@@ -1795,13 +2099,13 @@ async function downloadRepo({
|
|
|
1795
2099
|
if (!repoRoot) {
|
|
1796
2100
|
throw new Error("Repo root not found");
|
|
1797
2101
|
}
|
|
1798
|
-
|
|
2102
|
+
debug8("repo root %s", repoRoot);
|
|
1799
2103
|
repoSpinner.success({ text: "\u{1F4BE} Repo downloaded successfully" });
|
|
1800
|
-
return
|
|
2104
|
+
return path6.join(dirname, repoRoot);
|
|
1801
2105
|
}
|
|
1802
2106
|
var LOGIN_MAX_WAIT = 10 * 60 * 1e3;
|
|
1803
2107
|
var LOGIN_CHECK_DELAY = 5 * 1e3;
|
|
1804
|
-
var MOBB_LOGIN_REQUIRED_MSG = `\u{1F513} Login to Mobb is Required, you will be redirected to our login page, once the authorization is complete return to this prompt, ${
|
|
2108
|
+
var MOBB_LOGIN_REQUIRED_MSG = `\u{1F513} Login to Mobb is Required, you will be redirected to our login page, once the authorization is complete return to this prompt, ${chalk3.bgBlue(
|
|
1805
2109
|
"press any key to continue"
|
|
1806
2110
|
)};`;
|
|
1807
2111
|
var tmpObj = tmp.dirSync({
|
|
@@ -1812,9 +2116,9 @@ var getReportUrl = ({
|
|
|
1812
2116
|
projectId,
|
|
1813
2117
|
fixReportId
|
|
1814
2118
|
}) => `${WEB_APP_URL}/organization/${organizationId}/project/${projectId}/report/${fixReportId}`;
|
|
1815
|
-
var
|
|
2119
|
+
var debug8 = Debug9("mobbdev:index");
|
|
1816
2120
|
var packageJson = JSON.parse(
|
|
1817
|
-
fs3.readFileSync(
|
|
2121
|
+
fs3.readFileSync(path6.join(getDirName2(), "../package.json"), "utf8")
|
|
1818
2122
|
);
|
|
1819
2123
|
if (!semver.satisfies(process.version, packageJson.engines.node)) {
|
|
1820
2124
|
throw new CliError2(
|
|
@@ -1822,7 +2126,7 @@ if (!semver.satisfies(process.version, packageJson.engines.node)) {
|
|
|
1822
2126
|
);
|
|
1823
2127
|
}
|
|
1824
2128
|
var config2 = new Configstore(packageJson.name, { apiToken: "" });
|
|
1825
|
-
|
|
2129
|
+
debug8("config %o", config2);
|
|
1826
2130
|
async function runAnalysis(params, options) {
|
|
1827
2131
|
try {
|
|
1828
2132
|
await _scan(
|
|
@@ -1844,10 +2148,11 @@ async function _scan({
|
|
|
1844
2148
|
ci,
|
|
1845
2149
|
srcPath,
|
|
1846
2150
|
commitHash,
|
|
1847
|
-
ref
|
|
2151
|
+
ref,
|
|
2152
|
+
scanner
|
|
1848
2153
|
}, { skipPrompts = false } = {}) {
|
|
1849
|
-
|
|
1850
|
-
const { createSpinner:
|
|
2154
|
+
debug8("start %s %s", dirname, repo);
|
|
2155
|
+
const { createSpinner: createSpinner4 } = Spinner2({ ci });
|
|
1851
2156
|
skipPrompts = skipPrompts || ci;
|
|
1852
2157
|
let gqlClient = new GQLClient({
|
|
1853
2158
|
apiKey: apiKey || config2.get("apiToken")
|
|
@@ -1900,9 +2205,9 @@ async function _scan({
|
|
|
1900
2205
|
});
|
|
1901
2206
|
const reference = ref ?? await scm.getRepoDefaultBranch();
|
|
1902
2207
|
const { sha } = await scm.getReferenceData(reference);
|
|
1903
|
-
|
|
1904
|
-
|
|
1905
|
-
|
|
2208
|
+
debug8("org id %s", organizationId);
|
|
2209
|
+
debug8("project id %s", projectId);
|
|
2210
|
+
debug8("default branch %s", reference);
|
|
1906
2211
|
const repositoryRoot = await downloadRepo({
|
|
1907
2212
|
repoUrl: repo,
|
|
1908
2213
|
dirname,
|
|
@@ -1910,10 +2215,13 @@ async function _scan({
|
|
|
1910
2215
|
authHeaders: scm.getAuthHeaders(),
|
|
1911
2216
|
downloadUrl: scm.getDownloadUrl(sha)
|
|
1912
2217
|
});
|
|
2218
|
+
if (scanner) {
|
|
2219
|
+
reportPath = await getReport(scanner);
|
|
2220
|
+
}
|
|
1913
2221
|
if (!reportPath) {
|
|
1914
|
-
reportPath
|
|
2222
|
+
throw new Error("reportPath is null");
|
|
1915
2223
|
}
|
|
1916
|
-
const uploadReportSpinner =
|
|
2224
|
+
const uploadReportSpinner = createSpinner4("\u{1F4C1} Uploading Report").start();
|
|
1917
2225
|
try {
|
|
1918
2226
|
await uploadFile({
|
|
1919
2227
|
file: reportPath,
|
|
@@ -1926,7 +2234,7 @@ async function _scan({
|
|
|
1926
2234
|
throw e;
|
|
1927
2235
|
}
|
|
1928
2236
|
uploadReportSpinner.success({ text: "\u{1F4C1} Report uploaded successfully" });
|
|
1929
|
-
const mobbSpinner =
|
|
2237
|
+
const mobbSpinner = createSpinner4("\u{1F575}\uFE0F\u200D\u2642\uFE0F Initiating Mobb analysis").start();
|
|
1930
2238
|
try {
|
|
1931
2239
|
await gqlClient.submitVulnerabilityReport({
|
|
1932
2240
|
fixReportId: reportUploadInfo.fixReportId,
|
|
@@ -1942,11 +2250,18 @@ async function _scan({
|
|
|
1942
2250
|
text: "\u{1F575}\uFE0F\u200D\u2642\uFE0F Generating fixes..."
|
|
1943
2251
|
});
|
|
1944
2252
|
await askToOpenAnalysis();
|
|
1945
|
-
async function
|
|
1946
|
-
const reportPath2 =
|
|
1947
|
-
|
|
1948
|
-
|
|
1949
|
-
|
|
2253
|
+
async function getReport(scanner2) {
|
|
2254
|
+
const reportPath2 = path6.join(dirname, "report.json");
|
|
2255
|
+
switch (scanner2) {
|
|
2256
|
+
case "snyk":
|
|
2257
|
+
await getSnykReport(reportPath2, repositoryRoot, { skipPrompts });
|
|
2258
|
+
break;
|
|
2259
|
+
case "checkmarx":
|
|
2260
|
+
await getCheckmarxReport(
|
|
2261
|
+
{ reportPath: reportPath2, repositoryRoot, branch: reference },
|
|
2262
|
+
{ skipPrompts }
|
|
2263
|
+
);
|
|
2264
|
+
break;
|
|
1950
2265
|
}
|
|
1951
2266
|
return reportPath2;
|
|
1952
2267
|
}
|
|
@@ -1956,27 +2271,27 @@ async function _scan({
|
|
|
1956
2271
|
projectId,
|
|
1957
2272
|
fixReportId: reportUploadInfo.fixReportId
|
|
1958
2273
|
});
|
|
1959
|
-
!ci && console.log("You can access the
|
|
1960
|
-
console.log(
|
|
2274
|
+
!ci && console.log("You can access the analysis at: \n");
|
|
2275
|
+
console.log(chalk3.bold(reportUrl));
|
|
1961
2276
|
!skipPrompts && await mobbAnalysisPrompt();
|
|
1962
2277
|
!ci && open2(reportUrl);
|
|
1963
2278
|
!ci && console.log(
|
|
1964
|
-
|
|
2279
|
+
chalk3.bgBlue("\n\n My work here is done for now, see you soon! \u{1F575}\uFE0F\u200D\u2642\uFE0F ")
|
|
1965
2280
|
);
|
|
1966
2281
|
}
|
|
1967
2282
|
async function handleMobbLogin() {
|
|
1968
2283
|
if (await gqlClient.verifyToken()) {
|
|
1969
|
-
|
|
2284
|
+
createSpinner4().start().success({
|
|
1970
2285
|
text: "\u{1F513} Logged in to Mobb successfully"
|
|
1971
2286
|
});
|
|
1972
2287
|
return;
|
|
1973
2288
|
} else if (apiKey) {
|
|
1974
|
-
|
|
2289
|
+
createSpinner4().start().error({
|
|
1975
2290
|
text: "\u{1F513} Logged in to Mobb failed - check your api-key"
|
|
1976
2291
|
});
|
|
1977
2292
|
throw new CliError2();
|
|
1978
2293
|
}
|
|
1979
|
-
const loginSpinner =
|
|
2294
|
+
const loginSpinner = createSpinner4().start();
|
|
1980
2295
|
if (!skipPrompts) {
|
|
1981
2296
|
loginSpinner.update({ text: MOBB_LOGIN_REQUIRED_MSG });
|
|
1982
2297
|
await keypress2();
|
|
@@ -2002,9 +2317,9 @@ async function _scan({
|
|
|
2002
2317
|
});
|
|
2003
2318
|
loginSpinner.spin();
|
|
2004
2319
|
if (encryptedApiToken) {
|
|
2005
|
-
|
|
2320
|
+
debug8("encrypted API token received %s", encryptedApiToken);
|
|
2006
2321
|
newApiToken = crypto.privateDecrypt(privateKey, Buffer.from(encryptedApiToken, "base64")).toString("utf-8");
|
|
2007
|
-
|
|
2322
|
+
debug8("API token decrypted");
|
|
2008
2323
|
break;
|
|
2009
2324
|
}
|
|
2010
2325
|
await sleep(LOGIN_CHECK_DELAY);
|
|
@@ -2017,7 +2332,7 @@ async function _scan({
|
|
|
2017
2332
|
}
|
|
2018
2333
|
gqlClient = new GQLClient({ apiKey: newApiToken });
|
|
2019
2334
|
if (await gqlClient.verifyToken()) {
|
|
2020
|
-
|
|
2335
|
+
debug8("set api token %s", newApiToken);
|
|
2021
2336
|
config2.set("apiToken", newApiToken);
|
|
2022
2337
|
loginSpinner.success({ text: "\u{1F513} Login to Mobb successful!" });
|
|
2023
2338
|
} else {
|
|
@@ -2030,7 +2345,7 @@ async function _scan({
|
|
|
2030
2345
|
async function handleScmIntegration(oldToken, scmLibType2, scmAuthUrl2) {
|
|
2031
2346
|
const scmName = scmLibType2 === "GITHUB" /* GITHUB */ ? "Github" : scmLibType2 === "GITLAB" /* GITLAB */ ? "Gitlab" : "";
|
|
2032
2347
|
const addScmIntegration = skipPrompts ? true : await scmIntegrationPrompt(scmName);
|
|
2033
|
-
const scmSpinner =
|
|
2348
|
+
const scmSpinner = createSpinner4(
|
|
2034
2349
|
`\u{1F517} Waiting for ${scmName} integration...`
|
|
2035
2350
|
).start();
|
|
2036
2351
|
if (!addScmIntegration) {
|
|
@@ -2063,11 +2378,7 @@ async function _scan({
|
|
|
2063
2378
|
if (!srcPath || !reportPath) {
|
|
2064
2379
|
throw new Error("src path and reportPath is required");
|
|
2065
2380
|
}
|
|
2066
|
-
const
|
|
2067
|
-
const zippingSpinner = createSpinner3("\u{1F4E6} Zipping repo").start();
|
|
2068
|
-
const zipBuffer = await pack(srcPath);
|
|
2069
|
-
zippingSpinner.success({ text: "\u{1F4E6} Zipping repo successful!" });
|
|
2070
|
-
const uploadReportSpinner2 = createSpinner3("\u{1F4C1} Uploading Report").start();
|
|
2381
|
+
const uploadReportSpinner2 = createSpinner4("\u{1F4C1} Uploading Report").start();
|
|
2071
2382
|
try {
|
|
2072
2383
|
await uploadFile({
|
|
2073
2384
|
file: reportPath,
|
|
@@ -2082,7 +2393,38 @@ async function _scan({
|
|
|
2082
2393
|
uploadReportSpinner2.success({
|
|
2083
2394
|
text: "\u{1F4C1} Uploading Report successful!"
|
|
2084
2395
|
});
|
|
2085
|
-
const
|
|
2396
|
+
const digestSpinner = createSpinner4("\u{1F575}\uFE0F\u200D\u2642\uFE0F Digesting report").start();
|
|
2397
|
+
let vulnFiles = [];
|
|
2398
|
+
try {
|
|
2399
|
+
const gitInfo = await getGitInfo(srcPath);
|
|
2400
|
+
const { vulnerabilityReportId } = await gqlClient.digestVulnerabilityReport({
|
|
2401
|
+
fixReportId: reportUploadInfo.fixReportId,
|
|
2402
|
+
projectId,
|
|
2403
|
+
repoUrl: repo || gitInfo.repoUrl,
|
|
2404
|
+
reference: gitInfo.reference,
|
|
2405
|
+
sha: commitHash || gitInfo.hash
|
|
2406
|
+
});
|
|
2407
|
+
const finalState = await gqlClient.waitFixReportInit(
|
|
2408
|
+
reportUploadInfo.fixReportId,
|
|
2409
|
+
true
|
|
2410
|
+
);
|
|
2411
|
+
if (finalState !== "Digested") {
|
|
2412
|
+
throw new Error("Digesting report failed");
|
|
2413
|
+
}
|
|
2414
|
+
vulnFiles = await gqlClient.getVulnerabilityReportPaths(
|
|
2415
|
+
vulnerabilityReportId
|
|
2416
|
+
);
|
|
2417
|
+
} catch (e) {
|
|
2418
|
+
digestSpinner.error({ text: "\u{1F575}\uFE0F\u200D\u2642\uFE0F Digesting report failed" });
|
|
2419
|
+
throw e;
|
|
2420
|
+
}
|
|
2421
|
+
digestSpinner.success({
|
|
2422
|
+
text: "\u{1F575}\uFE0F\u200D\u2642\uFE0F Digesting report successful!"
|
|
2423
|
+
});
|
|
2424
|
+
const zippingSpinner = createSpinner4("\u{1F4E6} Zipping repo").start();
|
|
2425
|
+
const zipBuffer = await pack(srcPath, vulnFiles);
|
|
2426
|
+
zippingSpinner.success({ text: "\u{1F4E6} Zipping repo successful!" });
|
|
2427
|
+
const uploadRepoSpinner = createSpinner4("\u{1F4C1} Uploading Repo").start();
|
|
2086
2428
|
try {
|
|
2087
2429
|
await uploadFile({
|
|
2088
2430
|
file: zipBuffer,
|
|
@@ -2095,14 +2437,10 @@ async function _scan({
|
|
|
2095
2437
|
throw e;
|
|
2096
2438
|
}
|
|
2097
2439
|
uploadRepoSpinner.success({ text: "\u{1F4C1} Uploading Repo successful!" });
|
|
2098
|
-
const mobbSpinner2 =
|
|
2440
|
+
const mobbSpinner2 = createSpinner4("\u{1F575}\uFE0F\u200D\u2642\uFE0F Initiating Mobb analysis").start();
|
|
2099
2441
|
try {
|
|
2100
|
-
await gqlClient.
|
|
2101
|
-
fixReportId: reportUploadInfo.fixReportId
|
|
2102
|
-
repoUrl: repo || gitInfo.repoUrl,
|
|
2103
|
-
reference: gitInfo.reference,
|
|
2104
|
-
sha: commitHash || gitInfo.hash,
|
|
2105
|
-
projectId
|
|
2442
|
+
await gqlClient.initializeVulnerabilityReport({
|
|
2443
|
+
fixReportId: reportUploadInfo.fixReportId
|
|
2106
2444
|
});
|
|
2107
2445
|
} catch (e) {
|
|
2108
2446
|
mobbSpinner2.error({ text: "\u{1F575}\uFE0F\u200D\u2642\uFE0F Mobb analysis failed" });
|
|
@@ -2136,11 +2474,15 @@ async function scan(scanOptions, { skipPrompts = false } = {}) {
|
|
|
2136
2474
|
const { scanner, ci } = scanOptions;
|
|
2137
2475
|
!ci && await showWelcomeMessage(skipPrompts);
|
|
2138
2476
|
const selectedScanner = scanner || await choseScanner();
|
|
2139
|
-
if (selectedScanner !== SCANNERS.Snyk) {
|
|
2477
|
+
if (selectedScanner !== SCANNERS.Checkmarx && selectedScanner !== SCANNERS.Snyk) {
|
|
2140
2478
|
throw new CliError(
|
|
2141
2479
|
"Vulnerability scanning via Bugsy is available only with Snyk at the moment. Additional scanners will follow soon."
|
|
2142
2480
|
);
|
|
2143
2481
|
}
|
|
2482
|
+
selectedScanner === SCANNERS.Checkmarx && validateCheckmarxInstallation();
|
|
2483
|
+
if (selectedScanner === SCANNERS.Checkmarx && !scanOptions.cxProjectName) {
|
|
2484
|
+
throw new CliError("Project name is needed if you're using checkmarx");
|
|
2485
|
+
}
|
|
2144
2486
|
await runAnalysis(
|
|
2145
2487
|
{ ...scanOptions, scanner: selectedScanner },
|
|
2146
2488
|
{ skipPrompts }
|
|
@@ -2154,28 +2496,32 @@ async function showWelcomeMessage(skipPrompts = false) {
|
|
|
2154
2496
|
}
|
|
2155
2497
|
|
|
2156
2498
|
// src/args/commands/analyze.ts
|
|
2157
|
-
import
|
|
2499
|
+
import chalk6 from "chalk";
|
|
2158
2500
|
|
|
2159
2501
|
// src/args/options.ts
|
|
2160
|
-
import
|
|
2502
|
+
import chalk4 from "chalk";
|
|
2161
2503
|
var repoOption = {
|
|
2162
2504
|
alias: "r",
|
|
2163
2505
|
demandOption: true,
|
|
2164
2506
|
type: "string",
|
|
2165
|
-
describe:
|
|
2507
|
+
describe: chalk4.bold("Github / GitLab repository URL")
|
|
2508
|
+
};
|
|
2509
|
+
var projectNameOption = {
|
|
2510
|
+
type: "string",
|
|
2511
|
+
describe: chalk4.bold("Checkmarx project name (when scanning with Checkmarx)")
|
|
2166
2512
|
};
|
|
2167
2513
|
var yesOption = {
|
|
2168
2514
|
alias: "yes",
|
|
2169
2515
|
type: "boolean",
|
|
2170
|
-
describe:
|
|
2516
|
+
describe: chalk4.bold("Skip prompts and use default values")
|
|
2171
2517
|
};
|
|
2172
2518
|
var refOption = {
|
|
2173
|
-
describe:
|
|
2519
|
+
describe: chalk4.bold("reference of the repository (branch, tag, commit)"),
|
|
2174
2520
|
type: "string",
|
|
2175
2521
|
demandOption: false
|
|
2176
2522
|
};
|
|
2177
2523
|
var ciOption = {
|
|
2178
|
-
describe:
|
|
2524
|
+
describe: chalk4.bold(
|
|
2179
2525
|
"Run in CI mode, prompts and browser will not be opened"
|
|
2180
2526
|
),
|
|
2181
2527
|
type: "boolean",
|
|
@@ -2183,17 +2529,17 @@ var ciOption = {
|
|
|
2183
2529
|
};
|
|
2184
2530
|
var apiKeyOption = {
|
|
2185
2531
|
type: "string",
|
|
2186
|
-
describe:
|
|
2532
|
+
describe: chalk4.bold("Mobb authentication api-key")
|
|
2187
2533
|
};
|
|
2188
2534
|
var commitHashOption = {
|
|
2189
2535
|
alias: "ch",
|
|
2190
|
-
describe:
|
|
2536
|
+
describe: chalk4.bold("Hash of the commit"),
|
|
2191
2537
|
type: "string"
|
|
2192
2538
|
};
|
|
2193
2539
|
|
|
2194
2540
|
// src/args/validation.ts
|
|
2195
|
-
import
|
|
2196
|
-
import
|
|
2541
|
+
import chalk5 from "chalk";
|
|
2542
|
+
import path7 from "path";
|
|
2197
2543
|
import { z as z6 } from "zod";
|
|
2198
2544
|
function throwRepoUrlErrorMessage({
|
|
2199
2545
|
error,
|
|
@@ -2202,11 +2548,11 @@ function throwRepoUrlErrorMessage({
|
|
|
2202
2548
|
}) {
|
|
2203
2549
|
const errorMessage = error.issues[error.issues.length - 1]?.message;
|
|
2204
2550
|
const formattedErrorMessage = `
|
|
2205
|
-
Error: ${
|
|
2551
|
+
Error: ${chalk5.bold(
|
|
2206
2552
|
repoUrl
|
|
2207
2553
|
)} is ${errorMessage}
|
|
2208
2554
|
Example:
|
|
2209
|
-
mobbdev ${command} -r ${
|
|
2555
|
+
mobbdev ${command} -r ${chalk5.bold(
|
|
2210
2556
|
"https://github.com/WebGoat/WebGoat"
|
|
2211
2557
|
)}`;
|
|
2212
2558
|
throw new CliError(formattedErrorMessage);
|
|
@@ -2234,12 +2580,12 @@ function validateRepoUrl(args) {
|
|
|
2234
2580
|
}
|
|
2235
2581
|
var supportExtensions = [".json", ".xml", ".fpr", ".sarif"];
|
|
2236
2582
|
function validateReportFileFormat(reportFile) {
|
|
2237
|
-
if (!supportExtensions.includes(
|
|
2583
|
+
if (!supportExtensions.includes(path7.extname(reportFile))) {
|
|
2238
2584
|
throw new CliError(
|
|
2239
2585
|
`
|
|
2240
|
-
${
|
|
2586
|
+
${chalk5.bold(
|
|
2241
2587
|
reportFile
|
|
2242
|
-
)} is not a supported file extension. Supported extensions are: ${
|
|
2588
|
+
)} is not a supported file extension. Supported extensions are: ${chalk5.bold(
|
|
2243
2589
|
supportExtensions.join(", ")
|
|
2244
2590
|
)}
|
|
2245
2591
|
`
|
|
@@ -2253,18 +2599,18 @@ function analyzeBuilder(yargs2) {
|
|
|
2253
2599
|
alias: "scan-file",
|
|
2254
2600
|
demandOption: true,
|
|
2255
2601
|
type: "string",
|
|
2256
|
-
describe:
|
|
2602
|
+
describe: chalk6.bold(
|
|
2257
2603
|
"Select the vulnerability report to analyze (Checkmarx, Snyk, Fortify, CodeQL)"
|
|
2258
2604
|
)
|
|
2259
2605
|
}).option("repo", repoOption).option("p", {
|
|
2260
2606
|
alias: "src-path",
|
|
2261
|
-
describe:
|
|
2607
|
+
describe: chalk6.bold(
|
|
2262
2608
|
"Path to the repository folder with the source code"
|
|
2263
2609
|
),
|
|
2264
2610
|
type: "string"
|
|
2265
2611
|
}).option("ref", refOption).option("ch", {
|
|
2266
2612
|
alias: "commit-hash",
|
|
2267
|
-
describe:
|
|
2613
|
+
describe: chalk6.bold("Hash of the commit"),
|
|
2268
2614
|
type: "string"
|
|
2269
2615
|
}).option("y", yesOption).option("ci", ciOption).option("api-key", apiKeyOption).option("commit-hash", commitHashOption).example(
|
|
2270
2616
|
"$0 analyze -r https://github.com/WebGoat/WebGoat -f <your_vulirabitliy_report_path>",
|
|
@@ -2274,7 +2620,7 @@ function analyzeBuilder(yargs2) {
|
|
|
2274
2620
|
function validateAnalyzeOptions(argv) {
|
|
2275
2621
|
if (!fs4.existsSync(argv.f)) {
|
|
2276
2622
|
throw new CliError(`
|
|
2277
|
-
Can't access ${
|
|
2623
|
+
Can't access ${chalk6.bold(argv.f)}`);
|
|
2278
2624
|
}
|
|
2279
2625
|
if (!argv.srcPath && !argv.repo) {
|
|
2280
2626
|
throw new CliError("You must supply either --src-path or --repo");
|
|
@@ -2293,19 +2639,23 @@ async function analyzeHandler(args) {
|
|
|
2293
2639
|
}
|
|
2294
2640
|
|
|
2295
2641
|
// src/args/commands/scan.ts
|
|
2296
|
-
import
|
|
2642
|
+
import chalk7 from "chalk";
|
|
2297
2643
|
function scanBuilder(args) {
|
|
2298
2644
|
return args.coerce("scanner", (arg) => arg.toLowerCase()).option("repo", repoOption).option("ref", refOption).option("s", {
|
|
2299
2645
|
alias: "scanner",
|
|
2300
2646
|
choices: Object.values(SCANNERS),
|
|
2301
|
-
describe:
|
|
2302
|
-
}).option("y", yesOption).option("ci", ciOption).option("api-key", apiKeyOption).example(
|
|
2647
|
+
describe: chalk7.bold("Select the scanner to use")
|
|
2648
|
+
}).option("y", yesOption).option("ci", ciOption).option("api-key", apiKeyOption).option("cx-project-name", projectNameOption).example(
|
|
2303
2649
|
"$0 scan -r https://github.com/WebGoat/WebGoat",
|
|
2304
2650
|
"Scan an existing repository"
|
|
2305
2651
|
).help();
|
|
2306
2652
|
}
|
|
2307
2653
|
function validateScanOptions(argv) {
|
|
2308
2654
|
validateRepoUrl(argv);
|
|
2655
|
+
argv.scanner === SCANNERS.Checkmarx && validateCheckmarxInstallation();
|
|
2656
|
+
if (argv.scanner === SCANNERS.Checkmarx && !argv.cxProjectName) {
|
|
2657
|
+
throw new CliError("project name is needed if you're using checkmarx");
|
|
2658
|
+
}
|
|
2309
2659
|
if (argv.ci && !argv.apiKey) {
|
|
2310
2660
|
throw new CliError(
|
|
2311
2661
|
"\nError: --ci flag requires --api-key to be provided as well"
|
|
@@ -2321,28 +2671,28 @@ async function scanHandler(args) {
|
|
|
2321
2671
|
var parseArgs = async (args) => {
|
|
2322
2672
|
const yargsInstance = yargs(args);
|
|
2323
2673
|
return yargsInstance.updateStrings({
|
|
2324
|
-
"Commands:":
|
|
2325
|
-
"Options:":
|
|
2326
|
-
"Examples:":
|
|
2327
|
-
"Show help":
|
|
2674
|
+
"Commands:": chalk8.yellow.underline.bold("Commands:"),
|
|
2675
|
+
"Options:": chalk8.yellow.underline.bold("Options:"),
|
|
2676
|
+
"Examples:": chalk8.yellow.underline.bold("Examples:"),
|
|
2677
|
+
"Show help": chalk8.bold("Show help")
|
|
2328
2678
|
}).usage(
|
|
2329
|
-
`${
|
|
2679
|
+
`${chalk8.bold(
|
|
2330
2680
|
"\n Bugsy - Trusted, Automatic Vulnerability Fixer \u{1F575}\uFE0F\u200D\u2642\uFE0F\n\n"
|
|
2331
|
-
)} ${
|
|
2332
|
-
$0 ${
|
|
2681
|
+
)} ${chalk8.yellow.underline.bold("Usage:")}
|
|
2682
|
+
$0 ${chalk8.green(
|
|
2333
2683
|
"<command>"
|
|
2334
|
-
)} ${
|
|
2684
|
+
)} ${chalk8.dim("[options]")}
|
|
2335
2685
|
`
|
|
2336
2686
|
).version(false).command(
|
|
2337
2687
|
"scan",
|
|
2338
|
-
|
|
2688
|
+
chalk8.bold(
|
|
2339
2689
|
"Scan your code for vulnerabilities, get automated fixes right away."
|
|
2340
2690
|
),
|
|
2341
2691
|
scanBuilder,
|
|
2342
2692
|
scanHandler
|
|
2343
2693
|
).command(
|
|
2344
2694
|
"analyze",
|
|
2345
|
-
|
|
2695
|
+
chalk8.bold(
|
|
2346
2696
|
"Provide a vulnerability report and relevant code repository, get automated fixes right away."
|
|
2347
2697
|
),
|
|
2348
2698
|
analyzeBuilder,
|
|
@@ -2355,7 +2705,7 @@ var parseArgs = async (args) => {
|
|
|
2355
2705
|
handler() {
|
|
2356
2706
|
yargsInstance.showHelp();
|
|
2357
2707
|
}
|
|
2358
|
-
}).strictOptions().help("h").alias("h", "help").epilog(
|
|
2708
|
+
}).strictOptions().help("h").alias("h", "help").epilog(chalk8.bgBlue("Made with \u2764\uFE0F by Mobb")).showHelpOnFail(true).wrap(Math.min(120, yargsInstance.terminalWidth())).parse();
|
|
2359
2709
|
};
|
|
2360
2710
|
|
|
2361
2711
|
// src/index.ts
|
package/package.json
CHANGED
|
@@ -1,11 +1,12 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "mobbdev",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.45",
|
|
4
4
|
"description": "Automated secure code remediation tool",
|
|
5
5
|
"repository": "https://github.com/mobb-dev/bugsy",
|
|
6
6
|
"main": "dist/index.js",
|
|
7
7
|
"module": "dist/index.js",
|
|
8
8
|
"scripts": {
|
|
9
|
+
"postinstall": "node ./src/post_install/cx_install.mjs",
|
|
9
10
|
"build": "tsc && tsup-node --env.NODE_ENV production",
|
|
10
11
|
"build:dev": "tsup-node --env.NODE_ENV development",
|
|
11
12
|
"test": "TOKEN=$(../../scripts/login_auth0.sh) vitest run",
|
|
@@ -26,6 +27,7 @@
|
|
|
26
27
|
"@octokit/plugin-rest-endpoint-methods": "7.0.1",
|
|
27
28
|
"@octokit/request-error": "3.0.3",
|
|
28
29
|
"adm-zip": "0.5.10",
|
|
30
|
+
"axios": "1.5.0",
|
|
29
31
|
"chalk": "5.3.0",
|
|
30
32
|
"chalk-animation": "2.0.3",
|
|
31
33
|
"configstore": "6.0.0",
|
|
@@ -45,6 +47,7 @@
|
|
|
45
47
|
"simple-git": "3.19.1",
|
|
46
48
|
"snyk": "1.1118.0",
|
|
47
49
|
"supports-color": "9.4.0",
|
|
50
|
+
"tar": "6.2.0",
|
|
48
51
|
"tmp": "0.2.1",
|
|
49
52
|
"yargs": "17.7.2",
|
|
50
53
|
"zod": "3.21.4"
|
|
@@ -58,6 +61,7 @@
|
|
|
58
61
|
"@types/debug": "4.1.8",
|
|
59
62
|
"@types/inquirer": "9.0.3",
|
|
60
63
|
"@types/semver": "7.5.0",
|
|
64
|
+
"@types/tar": "^6.1.6",
|
|
61
65
|
"@types/tmp": "0.2.3",
|
|
62
66
|
"@types/yargs": "17.0.24",
|
|
63
67
|
"@typescript-eslint/eslint-plugin": "5.44.0",
|