mobbdev 0.0.32 → 0.0.36
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env +3 -2
- package/dist/index.js +154 -184
- package/package.json +5 -4
package/.env
CHANGED
package/dist/index.js
CHANGED
|
@@ -71,6 +71,7 @@ var API_URL = envVariables.API_URL;
|
|
|
71
71
|
// src/features/analysis/index.ts
|
|
72
72
|
import crypto from "node:crypto";
|
|
73
73
|
import fs3 from "node:fs";
|
|
74
|
+
import os from "node:os";
|
|
74
75
|
import path5 from "node:path";
|
|
75
76
|
|
|
76
77
|
// src/utils/index.ts
|
|
@@ -136,83 +137,17 @@ var CliError = class extends Error {
|
|
|
136
137
|
// src/features/analysis/index.ts
|
|
137
138
|
import chalk3 from "chalk";
|
|
138
139
|
import Configstore from "configstore";
|
|
139
|
-
import
|
|
140
|
-
import
|
|
140
|
+
import Debug8 from "debug";
|
|
141
|
+
import open2 from "open";
|
|
141
142
|
import semver from "semver";
|
|
142
143
|
import tmp from "tmp";
|
|
143
144
|
|
|
144
|
-
// src/features/analysis/callback-server.ts
|
|
145
|
-
import * as http from "node:http";
|
|
146
|
-
import * as querystring from "node:querystring";
|
|
147
|
-
import { clearTimeout, setTimeout as setTimeout2 } from "node:timers";
|
|
148
|
-
import Debug2 from "debug";
|
|
149
|
-
import open from "open";
|
|
150
|
-
import { z as z2 } from "zod";
|
|
151
|
-
var debug2 = Debug2("mobbdev:web-login");
|
|
152
|
-
async function callbackServer({
|
|
153
|
-
url,
|
|
154
|
-
redirectUrl
|
|
155
|
-
}) {
|
|
156
|
-
debug2("web login start");
|
|
157
|
-
let responseResolver;
|
|
158
|
-
let responseRejecter;
|
|
159
|
-
const responseAwaiter = new Promise((resolve, reject) => {
|
|
160
|
-
responseResolver = resolve;
|
|
161
|
-
responseRejecter = reject;
|
|
162
|
-
});
|
|
163
|
-
const timerHandler = setTimeout2(() => {
|
|
164
|
-
debug2("timeout happened");
|
|
165
|
-
responseRejecter(new Error("No login happened in three minutes."));
|
|
166
|
-
}, 18e4);
|
|
167
|
-
const server = http.createServer((req, res) => {
|
|
168
|
-
debug2("incoming request");
|
|
169
|
-
let body = "";
|
|
170
|
-
req.on("data", (chunk) => {
|
|
171
|
-
debug2("http server get chunk %s", chunk);
|
|
172
|
-
body += chunk;
|
|
173
|
-
});
|
|
174
|
-
req.on("end", () => {
|
|
175
|
-
debug2("http server end %s", body);
|
|
176
|
-
res.writeHead(301, {
|
|
177
|
-
Location: redirectUrl
|
|
178
|
-
}).end();
|
|
179
|
-
const safeBody = z2.object({ token: z2.string() }).safeParse(querystring.parse(body));
|
|
180
|
-
if (!safeBody.success) {
|
|
181
|
-
return responseRejecter(new Error("Failed to parse the response"));
|
|
182
|
-
}
|
|
183
|
-
responseResolver({ token: safeBody.data.token });
|
|
184
|
-
});
|
|
185
|
-
});
|
|
186
|
-
debug2("http server starting");
|
|
187
|
-
const port = await new Promise((resolve, reject) => {
|
|
188
|
-
server.listen(0, "127.0.0.1", () => {
|
|
189
|
-
const address = server?.address();
|
|
190
|
-
if (typeof address === "string" || address == null) {
|
|
191
|
-
reject(new Error("Failed to get port"));
|
|
192
|
-
} else {
|
|
193
|
-
resolve(address.port);
|
|
194
|
-
}
|
|
195
|
-
});
|
|
196
|
-
});
|
|
197
|
-
debug2("http server started on port %d", port);
|
|
198
|
-
debug2("opening the browser on %s", `${url}?port=${port}`);
|
|
199
|
-
await open(`${url}?port=${port}`);
|
|
200
|
-
try {
|
|
201
|
-
debug2("waiting for http request");
|
|
202
|
-
return await responseAwaiter;
|
|
203
|
-
} finally {
|
|
204
|
-
debug2("http server close");
|
|
205
|
-
clearTimeout(timerHandler);
|
|
206
|
-
server.close();
|
|
207
|
-
}
|
|
208
|
-
}
|
|
209
|
-
|
|
210
145
|
// src/features/analysis/git.ts
|
|
211
|
-
import
|
|
146
|
+
import Debug2 from "debug";
|
|
212
147
|
import { simpleGit } from "simple-git";
|
|
213
|
-
var
|
|
148
|
+
var debug2 = Debug2("mobbdev:git");
|
|
214
149
|
async function getGitInfo(srcDirPath) {
|
|
215
|
-
|
|
150
|
+
debug2("getting git info for %s", srcDirPath);
|
|
216
151
|
const git = simpleGit({
|
|
217
152
|
baseDir: srcDirPath,
|
|
218
153
|
maxConcurrentProcesses: 1,
|
|
@@ -227,11 +162,11 @@ async function getGitInfo(srcDirPath) {
|
|
|
227
162
|
reference = await git.revparse(["--abbrev-ref", "HEAD"]) || "";
|
|
228
163
|
} catch (e) {
|
|
229
164
|
if (e instanceof Error) {
|
|
230
|
-
|
|
165
|
+
debug2("failed to run git %o", e);
|
|
231
166
|
if (e.message.includes(" spawn ")) {
|
|
232
|
-
|
|
167
|
+
debug2("git cli not installed");
|
|
233
168
|
} else if (e.message.includes(" not a git repository ")) {
|
|
234
|
-
|
|
169
|
+
debug2("folder is not a git repo");
|
|
235
170
|
} else {
|
|
236
171
|
throw e;
|
|
237
172
|
}
|
|
@@ -258,12 +193,12 @@ import stream from "node:stream";
|
|
|
258
193
|
import { promisify } from "node:util";
|
|
259
194
|
import { RequestError } from "@octokit/request-error";
|
|
260
195
|
import chalk from "chalk";
|
|
261
|
-
import
|
|
196
|
+
import Debug3 from "debug";
|
|
262
197
|
import extract from "extract-zip";
|
|
263
198
|
import fetch from "node-fetch";
|
|
264
199
|
import { Octokit } from "octokit";
|
|
265
200
|
var pipeline = promisify(stream.pipeline);
|
|
266
|
-
var
|
|
201
|
+
var debug3 = Debug3("mobbdev:github");
|
|
267
202
|
async function _getRepo({ owner, repo }, { token } = {}) {
|
|
268
203
|
const octokit = new Octokit({ auth: token });
|
|
269
204
|
return octokit.rest.repos.get({
|
|
@@ -272,7 +207,7 @@ async function _getRepo({ owner, repo }, { token } = {}) {
|
|
|
272
207
|
});
|
|
273
208
|
}
|
|
274
209
|
function extractSlug(repoUrl) {
|
|
275
|
-
|
|
210
|
+
debug3("get default branch %s", repoUrl);
|
|
276
211
|
let slug = repoUrl.replace(/https?:\/\/github\.com\//i, "");
|
|
277
212
|
if (slug.endsWith("/")) {
|
|
278
213
|
slug = slug.substring(0, slug.length - 1);
|
|
@@ -280,7 +215,7 @@ function extractSlug(repoUrl) {
|
|
|
280
215
|
if (slug.endsWith(".git")) {
|
|
281
216
|
slug = slug.substring(0, slug.length - ".git".length);
|
|
282
217
|
}
|
|
283
|
-
|
|
218
|
+
debug3("slug %s", slug);
|
|
284
219
|
return slug;
|
|
285
220
|
}
|
|
286
221
|
function parseRepoUrl(repoUrl) {
|
|
@@ -310,7 +245,7 @@ async function getRepo(repoUrl, { token } = {}) {
|
|
|
310
245
|
return res;
|
|
311
246
|
} catch (e) {
|
|
312
247
|
if (e instanceof RequestError) {
|
|
313
|
-
|
|
248
|
+
debug3("GH request failed %s %s", e.message, e.status);
|
|
314
249
|
throw new CliError(
|
|
315
250
|
`Can't get repository, make sure you have access to : ${repoUrl}.`
|
|
316
251
|
);
|
|
@@ -321,7 +256,7 @@ async function getRepo(repoUrl, { token } = {}) {
|
|
|
321
256
|
async function downloadRepo({ repoUrl, reference, dirname, ci }, { token } = {}) {
|
|
322
257
|
const { createSpinner: createSpinner3 } = Spinner({ ci });
|
|
323
258
|
const repoSpinner = createSpinner3("\u{1F4BE} Downloading Repo").start();
|
|
324
|
-
|
|
259
|
+
debug3("download repo %s %s %s", repoUrl, reference, dirname);
|
|
325
260
|
const zipFilePath = path3.join(dirname, "repo.zip");
|
|
326
261
|
const response = await fetch(`${repoUrl}/zipball/${reference}`, {
|
|
327
262
|
method: "GET",
|
|
@@ -330,7 +265,7 @@ async function downloadRepo({ repoUrl, reference, dirname, ci }, { token } = {})
|
|
|
330
265
|
}
|
|
331
266
|
});
|
|
332
267
|
if (!response.ok) {
|
|
333
|
-
|
|
268
|
+
debug3("GH zipball request failed %s %s", response.body, response.status);
|
|
334
269
|
repoSpinner.error({ text: "\u{1F4BE} Repo download failed" });
|
|
335
270
|
throw new Error(
|
|
336
271
|
`Can't access the the branch ${chalk.bold(reference)} on ${chalk.bold(
|
|
@@ -348,13 +283,13 @@ async function downloadRepo({ repoUrl, reference, dirname, ci }, { token } = {})
|
|
|
348
283
|
if (!repoRoot) {
|
|
349
284
|
throw new Error("Repo root not found");
|
|
350
285
|
}
|
|
351
|
-
|
|
286
|
+
debug3("repo root %s", repoRoot);
|
|
352
287
|
repoSpinner.success({ text: "\u{1F4BE} Repo downloaded successfully" });
|
|
353
288
|
return path3.join(dirname, repoRoot);
|
|
354
289
|
}
|
|
355
290
|
|
|
356
291
|
// src/features/analysis/graphql/gql.ts
|
|
357
|
-
import
|
|
292
|
+
import Debug4 from "debug";
|
|
358
293
|
import { GraphQLClient } from "graphql-request";
|
|
359
294
|
|
|
360
295
|
// src/features/analysis/graphql/mutations.ts
|
|
@@ -458,49 +393,49 @@ var GET_ENCRYPTED_API_TOKEN = gql2`
|
|
|
458
393
|
`;
|
|
459
394
|
|
|
460
395
|
// src/features/analysis/graphql/types.ts
|
|
461
|
-
import { z as
|
|
462
|
-
var UploadFieldsZ =
|
|
463
|
-
bucket:
|
|
464
|
-
"X-Amz-Algorithm":
|
|
465
|
-
"X-Amz-Credential":
|
|
466
|
-
"X-Amz-Date":
|
|
467
|
-
Policy:
|
|
468
|
-
"X-Amz-Signature":
|
|
396
|
+
import { z as z2 } from "zod";
|
|
397
|
+
var UploadFieldsZ = z2.object({
|
|
398
|
+
bucket: z2.string(),
|
|
399
|
+
"X-Amz-Algorithm": z2.string(),
|
|
400
|
+
"X-Amz-Credential": z2.string(),
|
|
401
|
+
"X-Amz-Date": z2.string(),
|
|
402
|
+
Policy: z2.string(),
|
|
403
|
+
"X-Amz-Signature": z2.string()
|
|
469
404
|
});
|
|
470
|
-
var ReportUploadInfoZ =
|
|
471
|
-
url:
|
|
472
|
-
fixReportId:
|
|
473
|
-
uploadFieldsJSON:
|
|
405
|
+
var ReportUploadInfoZ = z2.object({
|
|
406
|
+
url: z2.string(),
|
|
407
|
+
fixReportId: z2.string(),
|
|
408
|
+
uploadFieldsJSON: z2.string().transform((str, ctx) => {
|
|
474
409
|
try {
|
|
475
410
|
return JSON.parse(str);
|
|
476
411
|
} catch (e) {
|
|
477
412
|
ctx.addIssue({ code: "custom", message: "Invalid JSON" });
|
|
478
|
-
return
|
|
413
|
+
return z2.NEVER;
|
|
479
414
|
}
|
|
480
415
|
}),
|
|
481
|
-
uploadKey:
|
|
416
|
+
uploadKey: z2.string()
|
|
482
417
|
}).transform(({ uploadFieldsJSON, ...input }) => ({
|
|
483
418
|
...input,
|
|
484
419
|
uploadFields: uploadFieldsJSON
|
|
485
420
|
}));
|
|
486
|
-
var UploadS3BucketInfoZ =
|
|
487
|
-
uploadS3BucketInfo:
|
|
488
|
-
status:
|
|
489
|
-
error:
|
|
421
|
+
var UploadS3BucketInfoZ = z2.object({
|
|
422
|
+
uploadS3BucketInfo: z2.object({
|
|
423
|
+
status: z2.string(),
|
|
424
|
+
error: z2.string().nullish(),
|
|
490
425
|
reportUploadInfo: ReportUploadInfoZ,
|
|
491
426
|
repoUploadInfo: ReportUploadInfoZ
|
|
492
427
|
})
|
|
493
428
|
});
|
|
494
|
-
var GetOrgAndProjectIdQueryZ =
|
|
495
|
-
users:
|
|
496
|
-
|
|
497
|
-
userOrganizationsAndUserOrganizationRoles:
|
|
498
|
-
|
|
499
|
-
organization:
|
|
500
|
-
id:
|
|
501
|
-
projects:
|
|
502
|
-
|
|
503
|
-
id:
|
|
429
|
+
var GetOrgAndProjectIdQueryZ = z2.object({
|
|
430
|
+
users: z2.array(
|
|
431
|
+
z2.object({
|
|
432
|
+
userOrganizationsAndUserOrganizationRoles: z2.array(
|
|
433
|
+
z2.object({
|
|
434
|
+
organization: z2.object({
|
|
435
|
+
id: z2.string(),
|
|
436
|
+
projects: z2.array(
|
|
437
|
+
z2.object({
|
|
438
|
+
id: z2.string()
|
|
504
439
|
})
|
|
505
440
|
).nonempty()
|
|
506
441
|
})
|
|
@@ -509,25 +444,26 @@ var GetOrgAndProjectIdQueryZ = z3.object({
|
|
|
509
444
|
})
|
|
510
445
|
).nonempty()
|
|
511
446
|
});
|
|
512
|
-
var CreateCliLoginZ =
|
|
513
|
-
insert_cli_login_one:
|
|
514
|
-
id:
|
|
447
|
+
var CreateCliLoginZ = z2.object({
|
|
448
|
+
insert_cli_login_one: z2.object({
|
|
449
|
+
id: z2.string()
|
|
515
450
|
})
|
|
516
451
|
});
|
|
517
|
-
var GetEncryptedApiTokenZ =
|
|
518
|
-
cli_login_by_pk:
|
|
519
|
-
encryptedApiToken:
|
|
452
|
+
var GetEncryptedApiTokenZ = z2.object({
|
|
453
|
+
cli_login_by_pk: z2.object({
|
|
454
|
+
encryptedApiToken: z2.string().nullable()
|
|
520
455
|
})
|
|
521
456
|
});
|
|
522
457
|
|
|
523
458
|
// src/features/analysis/graphql/gql.ts
|
|
524
|
-
var
|
|
459
|
+
var debug4 = Debug4("mobbdev:gql");
|
|
460
|
+
var API_KEY_HEADER_NAME = "x-mobb-key";
|
|
525
461
|
var GQLClient = class {
|
|
526
462
|
constructor(args) {
|
|
527
463
|
const { apiKey } = args;
|
|
528
|
-
|
|
464
|
+
debug4(`init with apiKey ${apiKey}`);
|
|
529
465
|
this._client = new GraphQLClient(API_URL, {
|
|
530
|
-
headers: {
|
|
466
|
+
headers: { [API_KEY_HEADER_NAME]: apiKey || "" }
|
|
531
467
|
});
|
|
532
468
|
}
|
|
533
469
|
async getUserInfo() {
|
|
@@ -538,7 +474,11 @@ var GQLClient = class {
|
|
|
538
474
|
const res = CreateCliLoginZ.parse(
|
|
539
475
|
await this._client.request(
|
|
540
476
|
CREATE_CLI_LOGIN,
|
|
541
|
-
variables
|
|
477
|
+
variables,
|
|
478
|
+
{
|
|
479
|
+
// We may have outdated API key in the config storage. Avoid using it for the login request.
|
|
480
|
+
[API_KEY_HEADER_NAME]: ""
|
|
481
|
+
}
|
|
542
482
|
)
|
|
543
483
|
);
|
|
544
484
|
return res.insert_cli_login_one.id;
|
|
@@ -546,9 +486,9 @@ var GQLClient = class {
|
|
|
546
486
|
async verifyToken() {
|
|
547
487
|
await this.createCommunityUser();
|
|
548
488
|
try {
|
|
549
|
-
await this.
|
|
489
|
+
await this.getUserInfo();
|
|
550
490
|
} catch (e) {
|
|
551
|
-
|
|
491
|
+
debug4("verify token failed %o", e);
|
|
552
492
|
return false;
|
|
553
493
|
}
|
|
554
494
|
return true;
|
|
@@ -569,7 +509,11 @@ var GQLClient = class {
|
|
|
569
509
|
async getEncryptedApiToken(variables) {
|
|
570
510
|
const res = await this._client.request(
|
|
571
511
|
GET_ENCRYPTED_API_TOKEN,
|
|
572
|
-
variables
|
|
512
|
+
variables,
|
|
513
|
+
{
|
|
514
|
+
// We may have outdated API key in the config storage. Avoid using it for the login request.
|
|
515
|
+
[API_KEY_HEADER_NAME]: ""
|
|
516
|
+
}
|
|
573
517
|
);
|
|
574
518
|
return GetEncryptedApiTokenZ.parse(res).cli_login_by_pk.encryptedApiToken;
|
|
575
519
|
}
|
|
@@ -577,7 +521,7 @@ var GQLClient = class {
|
|
|
577
521
|
try {
|
|
578
522
|
await this._client.request(CREATE_COMMUNITY_USER);
|
|
579
523
|
} catch (e) {
|
|
580
|
-
|
|
524
|
+
debug4("create community user failed %o", e);
|
|
581
525
|
}
|
|
582
526
|
}
|
|
583
527
|
async uploadS3BucketInfo() {
|
|
@@ -608,27 +552,36 @@ var GQLClient = class {
|
|
|
608
552
|
import fs2 from "node:fs";
|
|
609
553
|
import path4 from "node:path";
|
|
610
554
|
import AdmZip from "adm-zip";
|
|
611
|
-
import
|
|
555
|
+
import Debug5 from "debug";
|
|
612
556
|
import { globby } from "globby";
|
|
613
|
-
|
|
557
|
+
import { isBinary } from "istextorbinary";
|
|
558
|
+
var debug5 = Debug5("mobbdev:pack");
|
|
559
|
+
var MAX_FILE_SIZE = 1024 * 1024 * 5;
|
|
614
560
|
async function pack(srcDirPath) {
|
|
615
|
-
|
|
561
|
+
debug5("pack folder %s", srcDirPath);
|
|
616
562
|
const filepaths = await globby("**", {
|
|
617
563
|
gitignore: true,
|
|
618
564
|
onlyFiles: true,
|
|
619
565
|
cwd: srcDirPath,
|
|
620
566
|
followSymbolicLinks: false
|
|
621
567
|
});
|
|
622
|
-
|
|
568
|
+
debug5("files found %d", filepaths.length);
|
|
623
569
|
const zip = new AdmZip();
|
|
624
|
-
|
|
570
|
+
debug5("compressing files");
|
|
625
571
|
for (const filepath of filepaths) {
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
572
|
+
const absFilepath = path4.join(srcDirPath, filepath.toString());
|
|
573
|
+
if (fs2.lstatSync(absFilepath).size > MAX_FILE_SIZE) {
|
|
574
|
+
debug5("ignoring %s because the size is > 5MB", filepath);
|
|
575
|
+
continue;
|
|
576
|
+
}
|
|
577
|
+
const data = fs2.readFileSync(absFilepath);
|
|
578
|
+
if (isBinary(null, data)) {
|
|
579
|
+
debug5("ignoring %s because is seems to be a binary file", filepath);
|
|
580
|
+
continue;
|
|
581
|
+
}
|
|
582
|
+
zip.addFile(filepath.toString(), data);
|
|
630
583
|
}
|
|
631
|
-
|
|
584
|
+
debug5("get zip file buffer");
|
|
632
585
|
return zip.toBuffer();
|
|
633
586
|
}
|
|
634
587
|
|
|
@@ -679,19 +632,19 @@ async function snykArticlePrompt() {
|
|
|
679
632
|
import cp from "node:child_process";
|
|
680
633
|
import { createRequire } from "node:module";
|
|
681
634
|
import chalk2 from "chalk";
|
|
682
|
-
import
|
|
635
|
+
import Debug6 from "debug";
|
|
683
636
|
import { createSpinner as createSpinner2 } from "nanospinner";
|
|
684
|
-
import
|
|
637
|
+
import open from "open";
|
|
685
638
|
import * as process2 from "process";
|
|
686
639
|
import supportsColor from "supports-color";
|
|
687
640
|
var { stdout: stdout2 } = supportsColor;
|
|
688
|
-
var
|
|
641
|
+
var debug6 = Debug6("mobbdev:snyk");
|
|
689
642
|
var require2 = createRequire(import.meta.url);
|
|
690
643
|
var SNYK_PATH = require2.resolve("snyk/bin/snyk");
|
|
691
644
|
var SNYK_ARTICLE_URL = "https://docs.snyk.io/scan-application-code/snyk-code/getting-started-with-snyk-code/activating-snyk-code-using-the-web-ui/step-1-enabling-the-snyk-code-option";
|
|
692
|
-
|
|
645
|
+
debug6("snyk executable path %s", SNYK_PATH);
|
|
693
646
|
async function forkSnyk(args, { display }) {
|
|
694
|
-
|
|
647
|
+
debug6("fork snyk with args %o %s", args, display);
|
|
695
648
|
return new Promise((resolve, reject) => {
|
|
696
649
|
const child = cp.fork(SNYK_PATH, args, {
|
|
697
650
|
stdio: ["inherit", "pipe", "pipe", "ipc"],
|
|
@@ -699,11 +652,11 @@ async function forkSnyk(args, { display }) {
|
|
|
699
652
|
});
|
|
700
653
|
let out = "";
|
|
701
654
|
const onData = (chunk) => {
|
|
702
|
-
|
|
655
|
+
debug6("chunk received from snyk std %s", chunk);
|
|
703
656
|
out += chunk;
|
|
704
657
|
};
|
|
705
658
|
if (!child || !child?.stdout || !child?.stderr) {
|
|
706
|
-
|
|
659
|
+
debug6("unable to fork snyk");
|
|
707
660
|
reject(new Error("unable to fork snyk"));
|
|
708
661
|
}
|
|
709
662
|
child.stdout?.on("data", onData);
|
|
@@ -713,17 +666,17 @@ async function forkSnyk(args, { display }) {
|
|
|
713
666
|
child.stderr?.pipe(process2.stderr);
|
|
714
667
|
}
|
|
715
668
|
child.on("exit", () => {
|
|
716
|
-
|
|
669
|
+
debug6("snyk exit");
|
|
717
670
|
resolve(out);
|
|
718
671
|
});
|
|
719
672
|
child.on("error", (err) => {
|
|
720
|
-
|
|
673
|
+
debug6("snyk error %o", err);
|
|
721
674
|
reject(err);
|
|
722
675
|
});
|
|
723
676
|
});
|
|
724
677
|
}
|
|
725
678
|
async function getSnykReport(reportPath, repoRoot, { skipPrompts = false }) {
|
|
726
|
-
|
|
679
|
+
debug6("get snyk report start %s %s", reportPath, repoRoot);
|
|
727
680
|
const config3 = await forkSnyk(["config"], { display: false });
|
|
728
681
|
if (!config3.includes("api: ")) {
|
|
729
682
|
const snykLoginSpinner = createSpinner2().start();
|
|
@@ -736,7 +689,7 @@ async function getSnykReport(reportPath, repoRoot, { skipPrompts = false }) {
|
|
|
736
689
|
snykLoginSpinner.update({
|
|
737
690
|
text: "\u{1F513} Waiting for Snyk login to complete"
|
|
738
691
|
});
|
|
739
|
-
|
|
692
|
+
debug6("no token in the config %s", config3);
|
|
740
693
|
await forkSnyk(["auth"], { display: true });
|
|
741
694
|
snykLoginSpinner.success({ text: "\u{1F513} Login to Snyk Successful" });
|
|
742
695
|
}
|
|
@@ -748,13 +701,13 @@ async function getSnykReport(reportPath, repoRoot, { skipPrompts = false }) {
|
|
|
748
701
|
if (out.includes(
|
|
749
702
|
"Snyk Code is not supported for org: enable in Settings > Snyk Code"
|
|
750
703
|
)) {
|
|
751
|
-
|
|
704
|
+
debug6("snyk code is not enabled %s", out);
|
|
752
705
|
snykSpinner.error({ text: "\u{1F50D} Snyk configuration needed" });
|
|
753
706
|
const answer = await snykArticlePrompt();
|
|
754
|
-
|
|
707
|
+
debug6("answer %s", answer);
|
|
755
708
|
if (answer) {
|
|
756
|
-
|
|
757
|
-
await
|
|
709
|
+
debug6("opening the browser");
|
|
710
|
+
await open(SNYK_ARTICLE_URL);
|
|
758
711
|
}
|
|
759
712
|
console.log(
|
|
760
713
|
chalk2.bgBlue(
|
|
@@ -768,28 +721,28 @@ async function getSnykReport(reportPath, repoRoot, { skipPrompts = false }) {
|
|
|
768
721
|
}
|
|
769
722
|
|
|
770
723
|
// src/features/analysis/upload-file.ts
|
|
771
|
-
import
|
|
724
|
+
import Debug7 from "debug";
|
|
772
725
|
import fetch2, { File, fileFrom, FormData } from "node-fetch";
|
|
773
|
-
var
|
|
726
|
+
var debug7 = Debug7("mobbdev:upload-file");
|
|
774
727
|
async function uploadFile({
|
|
775
728
|
file,
|
|
776
729
|
url,
|
|
777
730
|
uploadKey,
|
|
778
731
|
uploadFields
|
|
779
732
|
}) {
|
|
780
|
-
|
|
781
|
-
|
|
782
|
-
|
|
733
|
+
debug7("upload file start %s", url);
|
|
734
|
+
debug7("upload fields %o", uploadFields);
|
|
735
|
+
debug7("upload key %s", uploadKey);
|
|
783
736
|
const form = new FormData();
|
|
784
737
|
Object.entries(uploadFields).forEach(([key, value]) => {
|
|
785
738
|
form.append(key, value);
|
|
786
739
|
});
|
|
787
740
|
form.append("key", uploadKey);
|
|
788
741
|
if (typeof file === "string") {
|
|
789
|
-
|
|
742
|
+
debug7("upload file from path %s", file);
|
|
790
743
|
form.append("file", await fileFrom(file));
|
|
791
744
|
} else {
|
|
792
|
-
|
|
745
|
+
debug7("upload file from buffer");
|
|
793
746
|
form.append("file", new File([file], "file"));
|
|
794
747
|
}
|
|
795
748
|
const response = await fetch2(url, {
|
|
@@ -797,16 +750,15 @@ async function uploadFile({
|
|
|
797
750
|
body: form
|
|
798
751
|
});
|
|
799
752
|
if (!response.ok) {
|
|
800
|
-
|
|
753
|
+
debug7("error from S3 %s %s", response.body, response.status);
|
|
801
754
|
throw new Error(`Failed to upload the file: ${response.status}`);
|
|
802
755
|
}
|
|
803
|
-
|
|
756
|
+
debug7("upload file done");
|
|
804
757
|
}
|
|
805
758
|
|
|
806
759
|
// src/features/analysis/index.ts
|
|
807
760
|
var { CliError: CliError2, Spinner: Spinner2, keypress: keypress2, getDirName: getDirName2 } = utils_exports;
|
|
808
761
|
var webLoginUrl = `${WEB_APP_URL}/cli-login`;
|
|
809
|
-
var githubSubmitUrl = `${WEB_APP_URL}/gh-callback`;
|
|
810
762
|
var githubAuthUrl = `${WEB_APP_URL}/github-auth`;
|
|
811
763
|
var LOGIN_MAX_WAIT = 10 * 60 * 1e3;
|
|
812
764
|
var LOGIN_CHECK_DELAY = 5 * 1e3;
|
|
@@ -821,7 +773,7 @@ var getReportUrl = ({
|
|
|
821
773
|
projectId,
|
|
822
774
|
fixReportId
|
|
823
775
|
}) => `${WEB_APP_URL}/organization/${organizationId}/project/${projectId}/report/${fixReportId}`;
|
|
824
|
-
var
|
|
776
|
+
var debug8 = Debug8("mobbdev:index");
|
|
825
777
|
var packageJson = JSON.parse(
|
|
826
778
|
fs3.readFileSync(path5.join(getDirName2(), "../package.json"), "utf8")
|
|
827
779
|
);
|
|
@@ -831,7 +783,7 @@ if (!semver.satisfies(process.version, packageJson.engines.node)) {
|
|
|
831
783
|
);
|
|
832
784
|
}
|
|
833
785
|
var config2 = new Configstore(packageJson.name, { apiToken: "" });
|
|
834
|
-
|
|
786
|
+
debug8("config %o", config2);
|
|
835
787
|
async function runAnalysis(params, options) {
|
|
836
788
|
try {
|
|
837
789
|
await _scan(
|
|
@@ -855,7 +807,7 @@ async function _scan({
|
|
|
855
807
|
commitHash,
|
|
856
808
|
ref
|
|
857
809
|
}, { skipPrompts = false } = {}) {
|
|
858
|
-
|
|
810
|
+
debug8("start %s %s", dirname, repo);
|
|
859
811
|
const { createSpinner: createSpinner3 } = Spinner2({ ci });
|
|
860
812
|
skipPrompts = skipPrompts || ci;
|
|
861
813
|
let gqlClient = new GQLClient({
|
|
@@ -876,7 +828,7 @@ async function _scan({
|
|
|
876
828
|
const userInfo = await gqlClient.getUserInfo();
|
|
877
829
|
let { githubToken } = userInfo;
|
|
878
830
|
const isRepoAvailable = await canReachRepo(repo, {
|
|
879
|
-
token:
|
|
831
|
+
token: githubToken
|
|
880
832
|
});
|
|
881
833
|
if (!isRepoAvailable) {
|
|
882
834
|
if (ci) {
|
|
@@ -884,13 +836,20 @@ async function _scan({
|
|
|
884
836
|
`Cannot access repo ${repo} with the provided token, please visit ${githubAuthUrl} to refresh your Github token`
|
|
885
837
|
);
|
|
886
838
|
}
|
|
887
|
-
|
|
888
|
-
|
|
839
|
+
githubToken = await handleGithubIntegration(githubToken);
|
|
840
|
+
const isRepoAvailable2 = await canReachRepo(repo, {
|
|
841
|
+
token: githubToken
|
|
842
|
+
});
|
|
843
|
+
if (!isRepoAvailable2) {
|
|
844
|
+
throw new Error(
|
|
845
|
+
`Cannot access repo ${repo} with the provided credentials`
|
|
846
|
+
);
|
|
847
|
+
}
|
|
889
848
|
}
|
|
890
849
|
const reference = ref ?? (await getRepo(repo, { token: githubToken })).data.default_branch;
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
|
|
850
|
+
debug8("org id %s", organizationId);
|
|
851
|
+
debug8("project id %s", projectId);
|
|
852
|
+
debug8("default branch %s", reference);
|
|
894
853
|
const repositoryRoot = await downloadRepo(
|
|
895
854
|
{
|
|
896
855
|
repoUrl: repo,
|
|
@@ -935,7 +894,7 @@ async function _scan({
|
|
|
935
894
|
async function getReportFromSnyk() {
|
|
936
895
|
const reportPath2 = path5.join(dirname, "report.json");
|
|
937
896
|
if (!await getSnykReport(reportPath2, repositoryRoot, { skipPrompts })) {
|
|
938
|
-
|
|
897
|
+
debug8("snyk code is not enabled");
|
|
939
898
|
throw new CliError2("Snyk code is not enabled");
|
|
940
899
|
}
|
|
941
900
|
return reportPath2;
|
|
@@ -949,7 +908,7 @@ async function _scan({
|
|
|
949
908
|
!ci && console.log("You can access the report at: \n");
|
|
950
909
|
console.log(chalk3.bold(reportUrl));
|
|
951
910
|
!skipPrompts && await mobbAnalysisPrompt();
|
|
952
|
-
!ci &&
|
|
911
|
+
!ci && open2(reportUrl);
|
|
953
912
|
!ci && console.log(
|
|
954
913
|
chalk3.bgBlue("\n\n My work here is done for now, see you soon! \u{1F575}\uFE0F\u200D\u2642\uFE0F ")
|
|
955
914
|
);
|
|
@@ -980,11 +939,11 @@ async function _scan({
|
|
|
980
939
|
const loginId = await gqlClient.createCliLogin({
|
|
981
940
|
publicKey: publicKey.export({ format: "pem", type: "pkcs1" }).toString()
|
|
982
941
|
});
|
|
983
|
-
const browserUrl = `${webLoginUrl}/${loginId}`;
|
|
942
|
+
const browserUrl = `${webLoginUrl}/${loginId}?hostname=${os.hostname()}`;
|
|
984
943
|
!ci && console.log(
|
|
985
944
|
`If the page does not open automatically, kindly access it through ${browserUrl}.`
|
|
986
945
|
);
|
|
987
|
-
await
|
|
946
|
+
await open2(browserUrl);
|
|
988
947
|
let newApiToken = null;
|
|
989
948
|
for (let i = 0; i < LOGIN_MAX_WAIT / LOGIN_CHECK_DELAY; i++) {
|
|
990
949
|
const encryptedApiToken = await gqlClient.getEncryptedApiToken({
|
|
@@ -992,9 +951,9 @@ async function _scan({
|
|
|
992
951
|
});
|
|
993
952
|
loginSpinner.spin();
|
|
994
953
|
if (encryptedApiToken) {
|
|
995
|
-
|
|
954
|
+
debug8("encrypted API token received %s", encryptedApiToken);
|
|
996
955
|
newApiToken = crypto.privateDecrypt(privateKey, Buffer.from(encryptedApiToken, "base64")).toString("utf-8");
|
|
997
|
-
|
|
956
|
+
debug8("API token decrypted");
|
|
998
957
|
break;
|
|
999
958
|
}
|
|
1000
959
|
await sleep(LOGIN_CHECK_DELAY);
|
|
@@ -1007,7 +966,7 @@ async function _scan({
|
|
|
1007
966
|
}
|
|
1008
967
|
gqlClient = new GQLClient({ apiKey: newApiToken });
|
|
1009
968
|
if (await gqlClient.verifyToken()) {
|
|
1010
|
-
|
|
969
|
+
debug8("set api token %s", newApiToken);
|
|
1011
970
|
config2.set("apiToken", newApiToken);
|
|
1012
971
|
loginSpinner.success({ text: "\u{1F513} Login to Mobb successful!" });
|
|
1013
972
|
} else {
|
|
@@ -1017,7 +976,7 @@ async function _scan({
|
|
|
1017
976
|
throw new CliError2();
|
|
1018
977
|
}
|
|
1019
978
|
}
|
|
1020
|
-
async function handleGithubIntegration() {
|
|
979
|
+
async function handleGithubIntegration(oldToken) {
|
|
1021
980
|
const addGithubIntegration = skipPrompts ? true : await githubIntegrationPrompt();
|
|
1022
981
|
const githubSpinner = createSpinner3(
|
|
1023
982
|
"\u{1F517} Waiting for github integration..."
|
|
@@ -1026,12 +985,23 @@ async function _scan({
|
|
|
1026
985
|
githubSpinner.error();
|
|
1027
986
|
throw Error("Could not reach github repo");
|
|
1028
987
|
}
|
|
1029
|
-
|
|
1030
|
-
|
|
1031
|
-
|
|
988
|
+
console.log(
|
|
989
|
+
`If the page does not open automatically, kindly access it through ${githubAuthUrl}.`
|
|
990
|
+
);
|
|
991
|
+
await open2(githubAuthUrl);
|
|
992
|
+
for (let i = 0; i < LOGIN_MAX_WAIT / LOGIN_CHECK_DELAY; i++) {
|
|
993
|
+
const { githubToken: githubToken2 } = await gqlClient.getUserInfo();
|
|
994
|
+
if (githubToken2 && githubToken2 !== oldToken) {
|
|
995
|
+
githubSpinner.success({ text: "\u{1F517} Github integration successful!" });
|
|
996
|
+
return githubToken2;
|
|
997
|
+
}
|
|
998
|
+
githubSpinner.spin();
|
|
999
|
+
await sleep(LOGIN_CHECK_DELAY);
|
|
1000
|
+
}
|
|
1001
|
+
githubSpinner.error({
|
|
1002
|
+
text: "Github login timeout error"
|
|
1032
1003
|
});
|
|
1033
|
-
|
|
1034
|
-
return result;
|
|
1004
|
+
throw new CliError2("Github login timeout");
|
|
1035
1005
|
}
|
|
1036
1006
|
async function uploadExistingRepo() {
|
|
1037
1007
|
if (!srcPath || !reportPath) {
|
|
@@ -1168,7 +1138,7 @@ var commitHashOption = {
|
|
|
1168
1138
|
// src/args/validation.ts
|
|
1169
1139
|
import chalk5 from "chalk";
|
|
1170
1140
|
import path6 from "path";
|
|
1171
|
-
import { z as
|
|
1141
|
+
import { z as z3 } from "zod";
|
|
1172
1142
|
function throwRepoUrlErrorMessage({
|
|
1173
1143
|
error,
|
|
1174
1144
|
repoUrl,
|
|
@@ -1186,7 +1156,7 @@ Example:
|
|
|
1186
1156
|
throw new CliError(formattedErrorMessage);
|
|
1187
1157
|
}
|
|
1188
1158
|
var GITHUB_REPO_URL_PATTERN = new RegExp("https://github.com/[\\w-]+/[\\w-]+");
|
|
1189
|
-
var UrlZ =
|
|
1159
|
+
var UrlZ = z3.string({
|
|
1190
1160
|
invalid_type_error: "is not a valid github URL"
|
|
1191
1161
|
}).regex(GITHUB_REPO_URL_PATTERN, {
|
|
1192
1162
|
message: "is not a valid github URL"
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "mobbdev",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.36",
|
|
4
4
|
"description": "Automated secure code remediation tool",
|
|
5
5
|
"repository": "https://github.com/mobb-dev/bugsy",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -22,16 +22,17 @@
|
|
|
22
22
|
"license": "MIT",
|
|
23
23
|
"dependencies": {
|
|
24
24
|
"adm-zip": "0.5.10",
|
|
25
|
-
"chalk-animation": "2.0.3",
|
|
26
25
|
"chalk": "5.3.0",
|
|
26
|
+
"chalk-animation": "2.0.3",
|
|
27
27
|
"configstore": "6.0.0",
|
|
28
28
|
"debug": "4.3.4",
|
|
29
29
|
"dotenv": "16.0.3",
|
|
30
30
|
"extract-zip": "2.0.1",
|
|
31
31
|
"globby": "13.2.2",
|
|
32
|
-
"graphql-request": "5.0.0",
|
|
33
32
|
"graphql": "16.6.0",
|
|
33
|
+
"graphql-request": "5.0.0",
|
|
34
34
|
"inquirer": "9.2.7",
|
|
35
|
+
"istextorbinary": "6.0.0",
|
|
35
36
|
"nanospinner": "1.1.0",
|
|
36
37
|
"node-fetch": "3.3.1",
|
|
37
38
|
"octokit": "2.0.14",
|
|
@@ -57,10 +58,10 @@
|
|
|
57
58
|
"@types/yargs": "17.0.24",
|
|
58
59
|
"@typescript-eslint/eslint-plugin": "5.44.0",
|
|
59
60
|
"@typescript-eslint/parser": "5.44.0",
|
|
61
|
+
"eslint": "8.36.0",
|
|
60
62
|
"eslint-plugin-import": "2.27.5",
|
|
61
63
|
"eslint-plugin-prettier": "4.2.1",
|
|
62
64
|
"eslint-plugin-simple-import-sort": "10.0.0",
|
|
63
|
-
"eslint": "8.36.0",
|
|
64
65
|
"prettier": "2.8.4",
|
|
65
66
|
"tsup": "7.2.0",
|
|
66
67
|
"typescript": "4.9.3",
|