mobbdev 1.1.13 → 1.1.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/args/commands/upload_ai_blame.mjs +224 -132
- package/dist/index.mjs +384 -155
- package/package.json +2 -3
package/dist/index.mjs
CHANGED
|
@@ -1318,7 +1318,7 @@ ${rootContent}`;
|
|
|
1318
1318
|
});
|
|
1319
1319
|
|
|
1320
1320
|
// src/index.ts
|
|
1321
|
-
import
|
|
1321
|
+
import Debug19 from "debug";
|
|
1322
1322
|
import { hideBin } from "yargs/helpers";
|
|
1323
1323
|
|
|
1324
1324
|
// src/args/yargs.ts
|
|
@@ -1592,6 +1592,7 @@ var IssueType_Enum = /* @__PURE__ */ ((IssueType_Enum2) => {
|
|
|
1592
1592
|
var Pr_Status_Enum = /* @__PURE__ */ ((Pr_Status_Enum2) => {
|
|
1593
1593
|
Pr_Status_Enum2["Active"] = "ACTIVE";
|
|
1594
1594
|
Pr_Status_Enum2["Closed"] = "CLOSED";
|
|
1595
|
+
Pr_Status_Enum2["Draft"] = "DRAFT";
|
|
1595
1596
|
Pr_Status_Enum2["Merged"] = "MERGED";
|
|
1596
1597
|
return Pr_Status_Enum2;
|
|
1597
1598
|
})(Pr_Status_Enum || {});
|
|
@@ -2074,6 +2075,57 @@ var UploadS3BucketInfoDocument = `
|
|
|
2074
2075
|
}
|
|
2075
2076
|
}
|
|
2076
2077
|
`;
|
|
2078
|
+
var AnalyzeCommitForExtensionAiBlameDocument = `
|
|
2079
|
+
mutation AnalyzeCommitForExtensionAIBlame($repositoryURL: String!, $commitSha: String!, $organizationId: String!) {
|
|
2080
|
+
analyzeCommitForAIBlame(
|
|
2081
|
+
repositoryURL: $repositoryURL
|
|
2082
|
+
commitSha: $commitSha
|
|
2083
|
+
organizationId: $organizationId
|
|
2084
|
+
) {
|
|
2085
|
+
__typename
|
|
2086
|
+
... on ProcessAIBlameFinalResult {
|
|
2087
|
+
status
|
|
2088
|
+
inferencesProcessed
|
|
2089
|
+
attributionsCreated
|
|
2090
|
+
attributions {
|
|
2091
|
+
id
|
|
2092
|
+
aiBlameCommitId
|
|
2093
|
+
aiBlameInferenceId
|
|
2094
|
+
filePath
|
|
2095
|
+
lineNumber
|
|
2096
|
+
model
|
|
2097
|
+
toolName
|
|
2098
|
+
commitSha
|
|
2099
|
+
inferenceType
|
|
2100
|
+
}
|
|
2101
|
+
}
|
|
2102
|
+
... on ProcessAIBlameErrorResult {
|
|
2103
|
+
status
|
|
2104
|
+
error
|
|
2105
|
+
}
|
|
2106
|
+
... on ProcessAIBlameRequestedResult {
|
|
2107
|
+
status
|
|
2108
|
+
requestIds
|
|
2109
|
+
}
|
|
2110
|
+
}
|
|
2111
|
+
}
|
|
2112
|
+
`;
|
|
2113
|
+
var GetAiBlameInferenceDocument = `
|
|
2114
|
+
query GetAIBlameInference($aiBlameInferenceIds: [uuid!]) {
|
|
2115
|
+
ai_blame_inference(where: {id: {_in: $aiBlameInferenceIds}}) {
|
|
2116
|
+
id
|
|
2117
|
+
type
|
|
2118
|
+
aiResponseAt
|
|
2119
|
+
}
|
|
2120
|
+
}
|
|
2121
|
+
`;
|
|
2122
|
+
var GetAiBlameAttributionPromptDocument = `
|
|
2123
|
+
query GetAIBlameAttributionPrompt($aiBlameAttributionId: String!) {
|
|
2124
|
+
getAIBlameInferenceData(aiBlameAttributionId: $aiBlameAttributionId) {
|
|
2125
|
+
promptUrl
|
|
2126
|
+
}
|
|
2127
|
+
}
|
|
2128
|
+
`;
|
|
2077
2129
|
var UploadAiBlameInferencesInitDocument = `
|
|
2078
2130
|
mutation UploadAIBlameInferencesInit($sessions: [AIBlameInferenceInitInput!]!) {
|
|
2079
2131
|
uploadAIBlameInferencesInit(sessions: $sessions) {
|
|
@@ -2367,6 +2419,15 @@ function getSdk(client, withWrapper = defaultWrapper) {
|
|
|
2367
2419
|
uploadS3BucketInfo(variables, requestHeaders, signal) {
|
|
2368
2420
|
return withWrapper((wrappedRequestHeaders) => client.request({ document: UploadS3BucketInfoDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "uploadS3BucketInfo", "mutation", variables);
|
|
2369
2421
|
},
|
|
2422
|
+
AnalyzeCommitForExtensionAIBlame(variables, requestHeaders, signal) {
|
|
2423
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: AnalyzeCommitForExtensionAiBlameDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "AnalyzeCommitForExtensionAIBlame", "mutation", variables);
|
|
2424
|
+
},
|
|
2425
|
+
GetAIBlameInference(variables, requestHeaders, signal) {
|
|
2426
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: GetAiBlameInferenceDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "GetAIBlameInference", "query", variables);
|
|
2427
|
+
},
|
|
2428
|
+
GetAIBlameAttributionPrompt(variables, requestHeaders, signal) {
|
|
2429
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: GetAiBlameAttributionPromptDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "GetAIBlameAttributionPrompt", "query", variables);
|
|
2430
|
+
},
|
|
2370
2431
|
UploadAIBlameInferencesInit(variables, requestHeaders, signal) {
|
|
2371
2432
|
return withWrapper((wrappedRequestHeaders) => client.request({ document: UploadAiBlameInferencesInitDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "UploadAIBlameInferencesInit", "mutation", variables);
|
|
2372
2433
|
},
|
|
@@ -6885,6 +6946,11 @@ var AdoSCMLib = class extends SCMLib {
|
|
|
6885
6946
|
async getSubmitRequests(_repoUrl) {
|
|
6886
6947
|
throw new Error("getSubmitRequests not implemented for ADO");
|
|
6887
6948
|
}
|
|
6949
|
+
// TODO: Add comprehensive tests for getPullRequestMetrics (ADO)
|
|
6950
|
+
// See clients/cli/src/features/analysis/scm/__tests__/github.test.ts:589-648 for reference
|
|
6951
|
+
async getPullRequestMetrics(_prNumber) {
|
|
6952
|
+
throw new Error("getPullRequestMetrics not implemented for ADO");
|
|
6953
|
+
}
|
|
6888
6954
|
};
|
|
6889
6955
|
|
|
6890
6956
|
// src/features/analysis/scm/bitbucket/bitbucket.ts
|
|
@@ -7456,6 +7522,11 @@ var BitbucketSCMLib = class extends SCMLib {
|
|
|
7456
7522
|
async getSubmitRequests(_repoUrl) {
|
|
7457
7523
|
throw new Error("getSubmitRequests not implemented for Bitbucket");
|
|
7458
7524
|
}
|
|
7525
|
+
// TODO: Add comprehensive tests for getPullRequestMetrics (Bitbucket)
|
|
7526
|
+
// See clients/cli/src/features/analysis/scm/__tests__/github.test.ts:589-648 for reference
|
|
7527
|
+
async getPullRequestMetrics(_prNumber) {
|
|
7528
|
+
throw new Error("getPullRequestMetrics not implemented for Bitbucket");
|
|
7529
|
+
}
|
|
7459
7530
|
};
|
|
7460
7531
|
|
|
7461
7532
|
// src/features/analysis/scm/constants.ts
|
|
@@ -7566,6 +7637,40 @@ var GITHUB_GRAPHQL_FRAGMENTS = {
|
|
|
7566
7637
|
committedDate
|
|
7567
7638
|
`
|
|
7568
7639
|
};
|
|
7640
|
+
var GET_PR_METRICS_QUERY = `
|
|
7641
|
+
query GetPRMetrics($owner: String!, $repo: String!, $prNumber: Int!) {
|
|
7642
|
+
repository(owner: $owner, name: $repo) {
|
|
7643
|
+
pullRequest(number: $prNumber) {
|
|
7644
|
+
id
|
|
7645
|
+
number
|
|
7646
|
+
state
|
|
7647
|
+
isDraft
|
|
7648
|
+
createdAt
|
|
7649
|
+
mergedAt
|
|
7650
|
+
additions
|
|
7651
|
+
deletions
|
|
7652
|
+
commits(first: 100) {
|
|
7653
|
+
totalCount
|
|
7654
|
+
nodes {
|
|
7655
|
+
commit {
|
|
7656
|
+
oid
|
|
7657
|
+
committedDate
|
|
7658
|
+
author {
|
|
7659
|
+
date
|
|
7660
|
+
}
|
|
7661
|
+
}
|
|
7662
|
+
}
|
|
7663
|
+
}
|
|
7664
|
+
comments(first: 100) {
|
|
7665
|
+
totalCount
|
|
7666
|
+
nodes {
|
|
7667
|
+
id
|
|
7668
|
+
}
|
|
7669
|
+
}
|
|
7670
|
+
}
|
|
7671
|
+
}
|
|
7672
|
+
}
|
|
7673
|
+
`;
|
|
7569
7674
|
|
|
7570
7675
|
// src/features/analysis/scm/github/utils/encrypt_secret.ts
|
|
7571
7676
|
import sodium from "libsodium-wrappers";
|
|
@@ -8342,6 +8447,17 @@ function getGithubSdk(params = {}) {
|
|
|
8342
8447
|
return void 0;
|
|
8343
8448
|
}
|
|
8344
8449
|
});
|
|
8450
|
+
},
|
|
8451
|
+
async getPRMetricsGraphQL(params2) {
|
|
8452
|
+
const res = await octokit.graphql(
|
|
8453
|
+
GET_PR_METRICS_QUERY,
|
|
8454
|
+
{
|
|
8455
|
+
owner: params2.owner,
|
|
8456
|
+
repo: params2.repo,
|
|
8457
|
+
prNumber: params2.prNumber
|
|
8458
|
+
}
|
|
8459
|
+
);
|
|
8460
|
+
return res;
|
|
8345
8461
|
}
|
|
8346
8462
|
};
|
|
8347
8463
|
}
|
|
@@ -8782,12 +8898,67 @@ var GithubSCMLib = class extends SCMLib {
|
|
|
8782
8898
|
const { owner, repo } = parseGithubOwnerAndRepo(repoUrl);
|
|
8783
8899
|
return this.githubSdk.getPrCommitsBatch({ owner, repo, prNumbers });
|
|
8784
8900
|
}
|
|
8901
|
+
async getPullRequestMetrics(prNumber) {
|
|
8902
|
+
this._validateAccessTokenAndUrl();
|
|
8903
|
+
const { owner, repo } = parseGithubOwnerAndRepo(this.url);
|
|
8904
|
+
const res = await this.githubSdk.getPRMetricsGraphQL({
|
|
8905
|
+
owner,
|
|
8906
|
+
repo,
|
|
8907
|
+
prNumber
|
|
8908
|
+
});
|
|
8909
|
+
const pr = res.repository.pullRequest;
|
|
8910
|
+
if (!pr) {
|
|
8911
|
+
throw new Error(`Pull request #${prNumber} not found`);
|
|
8912
|
+
}
|
|
8913
|
+
let prStatus = "ACTIVE" /* Active */;
|
|
8914
|
+
if (pr.state === "CLOSED") {
|
|
8915
|
+
prStatus = pr.mergedAt ? "MERGED" /* Merged */ : "CLOSED" /* Closed */;
|
|
8916
|
+
} else if (pr.isDraft) {
|
|
8917
|
+
prStatus = "DRAFT" /* Draft */;
|
|
8918
|
+
}
|
|
8919
|
+
const firstCommit = pr.commits.nodes[0];
|
|
8920
|
+
const firstCommitDate = firstCommit ? new Date(
|
|
8921
|
+
firstCommit.commit.author?.date || firstCommit.commit.committedDate || pr.createdAt
|
|
8922
|
+
) : null;
|
|
8923
|
+
let commitShas = pr.commits.nodes.map((node) => node.commit.oid);
|
|
8924
|
+
if (pr.commits.totalCount > 100) {
|
|
8925
|
+
const commitsRes = await this.githubSdk.getPrCommits({
|
|
8926
|
+
owner,
|
|
8927
|
+
repo,
|
|
8928
|
+
pull_number: prNumber
|
|
8929
|
+
});
|
|
8930
|
+
commitShas = commitsRes.data.map((c) => c.sha);
|
|
8931
|
+
}
|
|
8932
|
+
let commentIds = pr.comments.nodes.map((node) => node.id);
|
|
8933
|
+
if (pr.comments.totalCount > 100) {
|
|
8934
|
+
const commentsRes = await this.githubSdk.getGeneralPrComments({
|
|
8935
|
+
owner,
|
|
8936
|
+
repo,
|
|
8937
|
+
issue_number: prNumber
|
|
8938
|
+
});
|
|
8939
|
+
commentIds = commentsRes.data.map((c) => String(c.id));
|
|
8940
|
+
}
|
|
8941
|
+
return {
|
|
8942
|
+
prId: String(prNumber),
|
|
8943
|
+
repositoryUrl: this.url,
|
|
8944
|
+
prCreatedAt: new Date(pr.createdAt),
|
|
8945
|
+
prMergedAt: pr.mergedAt ? new Date(pr.mergedAt) : null,
|
|
8946
|
+
firstCommitDate,
|
|
8947
|
+
linesAdded: pr.additions,
|
|
8948
|
+
commitsCount: pr.commits.totalCount,
|
|
8949
|
+
commitShas,
|
|
8950
|
+
prStatus,
|
|
8951
|
+
commentIds
|
|
8952
|
+
};
|
|
8953
|
+
}
|
|
8785
8954
|
/**
|
|
8786
8955
|
* Parse a Linear ticket from URL and name
|
|
8787
8956
|
* Returns null if invalid or missing data
|
|
8788
8957
|
*/
|
|
8789
8958
|
_parseLinearTicket(url, name) {
|
|
8790
|
-
if (!name || !url)
|
|
8959
|
+
if (!name || !url) {
|
|
8960
|
+
return null;
|
|
8961
|
+
}
|
|
8791
8962
|
const urlParts = url.split("/");
|
|
8792
8963
|
const titleSlug = urlParts[urlParts.length - 1] || "";
|
|
8793
8964
|
const title = titleSlug.replace(/-/g, " ");
|
|
@@ -9475,6 +9646,11 @@ var GitlabSCMLib = class extends SCMLib {
|
|
|
9475
9646
|
async getSubmitRequests(_repoUrl) {
|
|
9476
9647
|
throw new Error("getSubmitRequests not implemented for GitLab");
|
|
9477
9648
|
}
|
|
9649
|
+
// TODO: Add comprehensive tests for getPullRequestMetrics (GitLab)
|
|
9650
|
+
// See clients/cli/src/features/analysis/scm/__tests__/github.test.ts:589-648 for reference
|
|
9651
|
+
async getPullRequestMetrics(_prNumber) {
|
|
9652
|
+
throw new Error("getPullRequestMetrics not implemented for GitLab");
|
|
9653
|
+
}
|
|
9478
9654
|
};
|
|
9479
9655
|
|
|
9480
9656
|
// src/features/analysis/scm/scmFactory.ts
|
|
@@ -9604,6 +9780,10 @@ var StubSCMLib = class extends SCMLib {
|
|
|
9604
9780
|
console.warn("getSubmitRequests() returning empty array");
|
|
9605
9781
|
return [];
|
|
9606
9782
|
}
|
|
9783
|
+
async getPullRequestMetrics(_prNumber) {
|
|
9784
|
+
console.warn("getPullRequestMetrics() returning empty object");
|
|
9785
|
+
throw new Error("getPullRequestMetrics() not implemented");
|
|
9786
|
+
}
|
|
9607
9787
|
};
|
|
9608
9788
|
|
|
9609
9789
|
// src/features/analysis/scm/scmFactory.ts
|
|
@@ -10445,7 +10625,7 @@ import path9 from "path";
|
|
|
10445
10625
|
import { env as env2 } from "process";
|
|
10446
10626
|
import { pipeline } from "stream/promises";
|
|
10447
10627
|
import chalk6 from "chalk";
|
|
10448
|
-
import
|
|
10628
|
+
import Debug18 from "debug";
|
|
10449
10629
|
import extract from "extract-zip";
|
|
10450
10630
|
import { createSpinner as createSpinner4 } from "nanospinner";
|
|
10451
10631
|
import fetch4 from "node-fetch";
|
|
@@ -10457,15 +10637,14 @@ import { z as z29 } from "zod";
|
|
|
10457
10637
|
import crypto from "crypto";
|
|
10458
10638
|
import os from "os";
|
|
10459
10639
|
import chalk3 from "chalk";
|
|
10460
|
-
import
|
|
10640
|
+
import Debug6 from "debug";
|
|
10461
10641
|
import open from "open";
|
|
10462
10642
|
|
|
10463
10643
|
// src/features/analysis/graphql/gql.ts
|
|
10464
10644
|
import fetchOrig from "cross-fetch";
|
|
10465
|
-
import
|
|
10645
|
+
import Debug5 from "debug";
|
|
10466
10646
|
import { GraphQLClient } from "graphql-request";
|
|
10467
|
-
import {
|
|
10468
|
-
import { HttpsProxyAgent as HttpsProxyAgent2 } from "https-proxy-agent";
|
|
10647
|
+
import { HttpsProxyAgent } from "https-proxy-agent";
|
|
10469
10648
|
import { v4 as uuidv4 } from "uuid";
|
|
10470
10649
|
|
|
10471
10650
|
// src/mcp/core/Errors.ts
|
|
@@ -10540,39 +10719,56 @@ var _ReportDigestError = class _ReportDigestError extends Error {
|
|
|
10540
10719
|
__publicField(_ReportDigestError, "defaultMessage", "\u{1F575}\uFE0F\u200D\u2642\uFE0F Digesting report failed. Please verify that the file provided is of a valid supported report format.");
|
|
10541
10720
|
var ReportDigestError = _ReportDigestError;
|
|
10542
10721
|
|
|
10543
|
-
// src/
|
|
10544
|
-
import Debug5 from "debug";
|
|
10722
|
+
// src/utils/subscribe/subscribe.ts
|
|
10545
10723
|
import { createClient } from "graphql-ws";
|
|
10546
|
-
import
|
|
10547
|
-
|
|
10724
|
+
import WebsocketNode from "isomorphic-ws";
|
|
10725
|
+
|
|
10726
|
+
// src/utils/subscribe/graphql.ts
|
|
10727
|
+
function getGraphQlHeaders(options) {
|
|
10728
|
+
const headers = {
|
|
10729
|
+
"Content-Type": "application/json"
|
|
10730
|
+
};
|
|
10731
|
+
if ("type" in options) {
|
|
10732
|
+
if (options.type === "apiKey") {
|
|
10733
|
+
headers["x-mobb-key"] = options.apiKey;
|
|
10734
|
+
} else if (options.type === "token") {
|
|
10735
|
+
headers["Authorization"] = `Bearer ${options.token}`;
|
|
10736
|
+
}
|
|
10737
|
+
return headers;
|
|
10738
|
+
}
|
|
10739
|
+
if ("isAdmin" in options && options.isAdmin) {
|
|
10740
|
+
headers["x-hasura-access-key"] = options.adminPassword;
|
|
10741
|
+
} else if ("isApiKey" in options && options.isApiKey) {
|
|
10742
|
+
headers["x-mobb-key"] = options.apiKey;
|
|
10743
|
+
} else if ("accessToken" in options) {
|
|
10744
|
+
headers["Authorization"] = `Bearer ${options.accessToken}`;
|
|
10745
|
+
}
|
|
10746
|
+
return headers;
|
|
10747
|
+
}
|
|
10748
|
+
|
|
10749
|
+
// src/utils/subscribe/subscribe.ts
|
|
10548
10750
|
var DEFAULT_API_URL2 = "https://api.mobb.ai/v1/graphql";
|
|
10549
|
-
var debug6 = Debug5("mobbdev:subscribe");
|
|
10550
10751
|
var SUBSCRIPTION_TIMEOUT_MS = 30 * 60 * 1e3;
|
|
10551
10752
|
function createWSClient(options) {
|
|
10552
|
-
const
|
|
10553
|
-
|
|
10554
|
-
|
|
10555
|
-
|
|
10556
|
-
|
|
10557
|
-
|
|
10558
|
-
|
|
10559
|
-
|
|
10560
|
-
protocols,
|
|
10561
|
-
proxy ? { agent: proxy } : void 0
|
|
10562
|
-
);
|
|
10753
|
+
const url = options.url || (process.env["API_URL"] || DEFAULT_API_URL2).replace("http", "ws");
|
|
10754
|
+
const websocketImpl = options.websocket || (typeof WebSocket !== "undefined" ? WebSocket : WebsocketNode);
|
|
10755
|
+
const CustomWebSocket = options.proxyAgent ? (
|
|
10756
|
+
// biome-ignore lint/suspicious/noExplicitAny: Dynamic WebSocket extension requires any cast for cross-platform compatibility
|
|
10757
|
+
class extends websocketImpl {
|
|
10758
|
+
constructor(address, protocols) {
|
|
10759
|
+
super(address, protocols, { agent: options.proxyAgent });
|
|
10760
|
+
}
|
|
10563
10761
|
}
|
|
10564
|
-
|
|
10762
|
+
) : websocketImpl;
|
|
10565
10763
|
return createClient({
|
|
10566
10764
|
//this is needed to prevent AWS from killing the connection
|
|
10567
10765
|
//currently our load balancer has a 29s idle timeout
|
|
10568
10766
|
keepAlive: 1e4,
|
|
10569
|
-
url
|
|
10570
|
-
webSocketImpl:
|
|
10767
|
+
url,
|
|
10768
|
+
webSocketImpl: CustomWebSocket,
|
|
10571
10769
|
connectionParams: () => {
|
|
10572
10770
|
return {
|
|
10573
|
-
headers: options
|
|
10574
|
-
[API_KEY_HEADER_NAME]: options.apiKey
|
|
10575
|
-
} : { authorization: `Bearer ${options.token}` }
|
|
10771
|
+
headers: getGraphQlHeaders(options)
|
|
10576
10772
|
};
|
|
10577
10773
|
}
|
|
10578
10774
|
});
|
|
@@ -10580,33 +10776,44 @@ function createWSClient(options) {
|
|
|
10580
10776
|
function subscribe(query, variables, callback, wsClientOptions) {
|
|
10581
10777
|
return new Promise((resolve, reject) => {
|
|
10582
10778
|
let timer = null;
|
|
10779
|
+
let settled = false;
|
|
10583
10780
|
const { timeoutInMs = SUBSCRIPTION_TIMEOUT_MS } = wsClientOptions;
|
|
10584
|
-
const
|
|
10585
|
-
|
|
10586
|
-
|
|
10587
|
-
|
|
10588
|
-
|
|
10589
|
-
|
|
10590
|
-
|
|
10781
|
+
const client = createWSClient(wsClientOptions);
|
|
10782
|
+
let unsubscribe = () => {
|
|
10783
|
+
return;
|
|
10784
|
+
};
|
|
10785
|
+
function cleanup() {
|
|
10786
|
+
try {
|
|
10787
|
+
unsubscribe();
|
|
10788
|
+
} catch {
|
|
10789
|
+
}
|
|
10790
|
+
if (timer) {
|
|
10791
|
+
clearTimeout(timer);
|
|
10792
|
+
timer = null;
|
|
10793
|
+
}
|
|
10794
|
+
}
|
|
10795
|
+
function finalizeResolve(data) {
|
|
10796
|
+
if (settled) {
|
|
10797
|
+
return;
|
|
10798
|
+
}
|
|
10799
|
+
settled = true;
|
|
10800
|
+
cleanup();
|
|
10801
|
+
resolve(data);
|
|
10802
|
+
}
|
|
10803
|
+
function finalizeReject(error) {
|
|
10804
|
+
if (settled) {
|
|
10805
|
+
return;
|
|
10806
|
+
}
|
|
10807
|
+
settled = true;
|
|
10808
|
+
cleanup();
|
|
10809
|
+
reject(error);
|
|
10810
|
+
}
|
|
10811
|
+
unsubscribe = client.subscribe(
|
|
10591
10812
|
{ query, variables },
|
|
10592
10813
|
{
|
|
10593
10814
|
next: (data) => {
|
|
10594
|
-
function callbackResolve(data2) {
|
|
10595
|
-
unsubscribe();
|
|
10596
|
-
if (timer) {
|
|
10597
|
-
clearTimeout(timer);
|
|
10598
|
-
}
|
|
10599
|
-
resolve(data2);
|
|
10600
|
-
}
|
|
10601
|
-
function callbackReject(data2) {
|
|
10602
|
-
unsubscribe();
|
|
10603
|
-
if (timer) {
|
|
10604
|
-
clearTimeout(timer);
|
|
10605
|
-
}
|
|
10606
|
-
reject(data2);
|
|
10607
|
-
}
|
|
10608
10815
|
if (!data.data) {
|
|
10609
|
-
|
|
10816
|
+
finalizeReject(
|
|
10610
10817
|
new Error(
|
|
10611
10818
|
`Broken data object from graphQL subscribe: ${JSON.stringify(
|
|
10612
10819
|
data
|
|
@@ -10614,14 +10821,11 @@ function subscribe(query, variables, callback, wsClientOptions) {
|
|
|
10614
10821
|
)
|
|
10615
10822
|
);
|
|
10616
10823
|
} else {
|
|
10617
|
-
callback(
|
|
10824
|
+
callback(finalizeResolve, finalizeReject, data.data);
|
|
10618
10825
|
}
|
|
10619
10826
|
},
|
|
10620
10827
|
error: (error) => {
|
|
10621
|
-
|
|
10622
|
-
clearTimeout(timer);
|
|
10623
|
-
}
|
|
10624
|
-
reject(error);
|
|
10828
|
+
finalizeReject(error);
|
|
10625
10829
|
},
|
|
10626
10830
|
complete: () => {
|
|
10627
10831
|
return;
|
|
@@ -10630,8 +10834,7 @@ function subscribe(query, variables, callback, wsClientOptions) {
|
|
|
10630
10834
|
);
|
|
10631
10835
|
if (typeof timeoutInMs === "number") {
|
|
10632
10836
|
timer = setTimeout(() => {
|
|
10633
|
-
|
|
10634
|
-
reject(
|
|
10837
|
+
finalizeReject(
|
|
10635
10838
|
new Error(
|
|
10636
10839
|
`Timeout expired for graphQL subscribe query: ${query} with timeout: ${timeoutInMs}`
|
|
10637
10840
|
)
|
|
@@ -10703,7 +10906,7 @@ var GetVulByNodesMetadataZ = z25.object({
|
|
|
10703
10906
|
});
|
|
10704
10907
|
|
|
10705
10908
|
// src/features/analysis/graphql/gql.ts
|
|
10706
|
-
var
|
|
10909
|
+
var debug6 = Debug5("mobbdev:gql");
|
|
10707
10910
|
var API_KEY_HEADER_NAME = "x-mobb-key";
|
|
10708
10911
|
var REPORT_STATE_CHECK_DELAY = 5 * 1e3;
|
|
10709
10912
|
function getProxyAgent(url) {
|
|
@@ -10711,14 +10914,14 @@ function getProxyAgent(url) {
|
|
|
10711
10914
|
const parsedUrl = new URL(url);
|
|
10712
10915
|
const isHttp = parsedUrl.protocol === "http:";
|
|
10713
10916
|
const isHttps = parsedUrl.protocol === "https:";
|
|
10714
|
-
const proxy = isHttps ? HTTPS_PROXY : isHttp ? HTTP_PROXY : null;
|
|
10917
|
+
const proxy = isHttps ? HTTPS_PROXY || HTTP_PROXY : isHttp ? HTTP_PROXY : null;
|
|
10715
10918
|
if (proxy) {
|
|
10716
|
-
|
|
10717
|
-
|
|
10718
|
-
return
|
|
10919
|
+
debug6("Using proxy %s", proxy);
|
|
10920
|
+
debug6("Proxy agent %o", proxy);
|
|
10921
|
+
return new HttpsProxyAgent(proxy);
|
|
10719
10922
|
}
|
|
10720
10923
|
} catch (err) {
|
|
10721
|
-
|
|
10924
|
+
debug6(`Skipping proxy for ${url}. Reason: ${err.message}`);
|
|
10722
10925
|
}
|
|
10723
10926
|
return void 0;
|
|
10724
10927
|
}
|
|
@@ -10733,7 +10936,7 @@ var fetchWithProxy = (url, options = {}) => {
|
|
|
10733
10936
|
});
|
|
10734
10937
|
}
|
|
10735
10938
|
} catch (err) {
|
|
10736
|
-
|
|
10939
|
+
debug6(`Skipping proxy for ${url}. Reason: ${err.message}`);
|
|
10737
10940
|
}
|
|
10738
10941
|
return fetchOrig(url, options);
|
|
10739
10942
|
};
|
|
@@ -10742,7 +10945,7 @@ var GQLClient = class {
|
|
|
10742
10945
|
__publicField(this, "_client");
|
|
10743
10946
|
__publicField(this, "_clientSdk");
|
|
10744
10947
|
__publicField(this, "_auth");
|
|
10745
|
-
|
|
10948
|
+
debug6(`init with ${args}`);
|
|
10746
10949
|
this._auth = args;
|
|
10747
10950
|
this._client = new GraphQLClient(API_URL, {
|
|
10748
10951
|
headers: args.type === "apiKey" ? { [API_KEY_HEADER_NAME]: args.apiKey || "" } : {
|
|
@@ -10751,7 +10954,7 @@ var GQLClient = class {
|
|
|
10751
10954
|
fetch: fetchWithProxy,
|
|
10752
10955
|
requestMiddleware: (request) => {
|
|
10753
10956
|
const requestId = uuidv4();
|
|
10754
|
-
|
|
10957
|
+
debug6(
|
|
10755
10958
|
`sending API request with id: ${requestId} and with request: ${request.body}`
|
|
10756
10959
|
);
|
|
10757
10960
|
return {
|
|
@@ -10788,7 +10991,7 @@ var GQLClient = class {
|
|
|
10788
10991
|
await this.getUserInfo();
|
|
10789
10992
|
} catch (e) {
|
|
10790
10993
|
if (e?.toString().startsWith("FetchError")) {
|
|
10791
|
-
|
|
10994
|
+
debug6("verify connection failed %o", e);
|
|
10792
10995
|
return false;
|
|
10793
10996
|
}
|
|
10794
10997
|
}
|
|
@@ -10800,7 +11003,7 @@ var GQLClient = class {
|
|
|
10800
11003
|
try {
|
|
10801
11004
|
info = await this.getUserInfo();
|
|
10802
11005
|
} catch (e) {
|
|
10803
|
-
|
|
11006
|
+
debug6("verify token failed %o", e);
|
|
10804
11007
|
return false;
|
|
10805
11008
|
}
|
|
10806
11009
|
return info?.email || true;
|
|
@@ -10861,7 +11064,7 @@ var GQLClient = class {
|
|
|
10861
11064
|
try {
|
|
10862
11065
|
await this._clientSdk.CreateCommunityUser();
|
|
10863
11066
|
} catch (e) {
|
|
10864
|
-
|
|
11067
|
+
debug6("create community user failed %o", e);
|
|
10865
11068
|
}
|
|
10866
11069
|
}
|
|
10867
11070
|
async updateScmToken(args) {
|
|
@@ -11028,11 +11231,13 @@ var GQLClient = class {
|
|
|
11028
11231
|
this._auth.type === "apiKey" ? {
|
|
11029
11232
|
apiKey: this._auth.apiKey,
|
|
11030
11233
|
type: "apiKey",
|
|
11031
|
-
timeoutInMs: params.timeoutInMs
|
|
11234
|
+
timeoutInMs: params.timeoutInMs,
|
|
11235
|
+
proxyAgent: getProxyAgent(API_URL)
|
|
11032
11236
|
} : {
|
|
11033
11237
|
token: this._auth.token,
|
|
11034
11238
|
type: "token",
|
|
11035
|
-
timeoutInMs: params.timeoutInMs
|
|
11239
|
+
timeoutInMs: params.timeoutInMs,
|
|
11240
|
+
proxyAgent: getProxyAgent(API_URL)
|
|
11036
11241
|
}
|
|
11037
11242
|
);
|
|
11038
11243
|
}
|
|
@@ -11085,6 +11290,12 @@ var GQLClient = class {
|
|
|
11085
11290
|
async finalizeAIBlameInferencesUploadRaw(variables) {
|
|
11086
11291
|
return await this._clientSdk.FinalizeAIBlameInferencesUpload(variables);
|
|
11087
11292
|
}
|
|
11293
|
+
async analyzeCommitForExtensionAIBlame(variables) {
|
|
11294
|
+
return await this._clientSdk.AnalyzeCommitForExtensionAIBlame(variables);
|
|
11295
|
+
}
|
|
11296
|
+
async getAIBlameAttributionPrompt(variables) {
|
|
11297
|
+
return await this._clientSdk.GetAIBlameAttributionPrompt(variables);
|
|
11298
|
+
}
|
|
11088
11299
|
};
|
|
11089
11300
|
|
|
11090
11301
|
// src/utils/ConfigStoreService.ts
|
|
@@ -11107,7 +11318,7 @@ function getConfigStore() {
|
|
|
11107
11318
|
var configStore = getConfigStore();
|
|
11108
11319
|
|
|
11109
11320
|
// src/commands/handleMobbLogin.ts
|
|
11110
|
-
var
|
|
11321
|
+
var debug7 = Debug6("mobbdev:commands");
|
|
11111
11322
|
var LOGIN_MAX_WAIT = 10 * 60 * 1e3;
|
|
11112
11323
|
var LOGIN_CHECK_DELAY = 5 * 1e3;
|
|
11113
11324
|
var webLoginUrl = `${WEB_APP_URL}/cli-login`;
|
|
@@ -11186,9 +11397,9 @@ async function handleMobbLogin({
|
|
|
11186
11397
|
});
|
|
11187
11398
|
loginSpinner.spin();
|
|
11188
11399
|
if (encryptedApiToken) {
|
|
11189
|
-
|
|
11400
|
+
debug7("encrypted API token received %s", encryptedApiToken);
|
|
11190
11401
|
newApiToken = crypto.privateDecrypt(privateKey, Buffer.from(encryptedApiToken, "base64")).toString("utf-8");
|
|
11191
|
-
|
|
11402
|
+
debug7("API token decrypted");
|
|
11192
11403
|
break;
|
|
11193
11404
|
}
|
|
11194
11405
|
await sleep(LOGIN_CHECK_DELAY);
|
|
@@ -11202,7 +11413,7 @@ async function handleMobbLogin({
|
|
|
11202
11413
|
const newGqlClient = new GQLClient({ apiKey: newApiToken, type: "apiKey" });
|
|
11203
11414
|
const loginSuccess = await newGqlClient.validateUserToken();
|
|
11204
11415
|
if (loginSuccess) {
|
|
11205
|
-
|
|
11416
|
+
debug7(`set api token ${newApiToken}`);
|
|
11206
11417
|
configStore.set("apiToken", newApiToken);
|
|
11207
11418
|
loginSpinner.success({
|
|
11208
11419
|
text: `\u{1F513} Login to Mobb successful! ${typeof loginSpinner === "string" ? `Logged in as ${loginSuccess}` : ""}`
|
|
@@ -11217,10 +11428,10 @@ async function handleMobbLogin({
|
|
|
11217
11428
|
}
|
|
11218
11429
|
|
|
11219
11430
|
// src/features/analysis/add_fix_comments_for_pr/add_fix_comments_for_pr.ts
|
|
11220
|
-
import
|
|
11431
|
+
import Debug10 from "debug";
|
|
11221
11432
|
|
|
11222
11433
|
// src/features/analysis/add_fix_comments_for_pr/utils/utils.ts
|
|
11223
|
-
import
|
|
11434
|
+
import Debug9 from "debug";
|
|
11224
11435
|
import parseDiff from "parse-diff";
|
|
11225
11436
|
import { z as z27 } from "zod";
|
|
11226
11437
|
|
|
@@ -11232,8 +11443,8 @@ function keyBy(array, keyBy2) {
|
|
|
11232
11443
|
}
|
|
11233
11444
|
|
|
11234
11445
|
// src/features/analysis/utils/send_report.ts
|
|
11235
|
-
import
|
|
11236
|
-
var
|
|
11446
|
+
import Debug7 from "debug";
|
|
11447
|
+
var debug8 = Debug7("mobbdev:index");
|
|
11237
11448
|
async function sendReport({
|
|
11238
11449
|
spinner,
|
|
11239
11450
|
submitVulnerabilityReportVariables,
|
|
@@ -11244,7 +11455,7 @@ async function sendReport({
|
|
|
11244
11455
|
submitVulnerabilityReportVariables
|
|
11245
11456
|
);
|
|
11246
11457
|
if (submitRes.submitVulnerabilityReport.__typename !== "VulnerabilityReport") {
|
|
11247
|
-
|
|
11458
|
+
debug8("error submit vul report %s", submitRes);
|
|
11248
11459
|
throw new Error("\u{1F575}\uFE0F\u200D\u2642\uFE0F Mobb analysis failed");
|
|
11249
11460
|
}
|
|
11250
11461
|
spinner.update({ text: progressMassages.processingVulnerabilityReport });
|
|
@@ -11294,9 +11505,9 @@ var scannerToFriendlyString = {
|
|
|
11294
11505
|
};
|
|
11295
11506
|
|
|
11296
11507
|
// src/features/analysis/add_fix_comments_for_pr/utils/buildCommentBody.ts
|
|
11297
|
-
import
|
|
11508
|
+
import Debug8 from "debug";
|
|
11298
11509
|
import { z as z26 } from "zod";
|
|
11299
|
-
var
|
|
11510
|
+
var debug9 = Debug8("mobbdev:handle-finished-analysis");
|
|
11300
11511
|
var getCommitFixButton = (commitUrl) => `<a href="${commitUrl}"><img src=${COMMIT_FIX_SVG}></a>`;
|
|
11301
11512
|
function buildFixCommentBody({
|
|
11302
11513
|
fix,
|
|
@@ -11355,7 +11566,7 @@ function buildFixCommentBody({
|
|
|
11355
11566
|
safeIssueType: z26.nativeEnum(IssueType_Enum)
|
|
11356
11567
|
}).safeParse(fix);
|
|
11357
11568
|
if (!validFixParseRes.success) {
|
|
11358
|
-
|
|
11569
|
+
debug9(
|
|
11359
11570
|
`fix ${fixId} has custom issue type or language, therefore the commit description will not be added`,
|
|
11360
11571
|
validFixParseRes.error
|
|
11361
11572
|
);
|
|
@@ -11419,7 +11630,7 @@ ${issuePageLink}`;
|
|
|
11419
11630
|
}
|
|
11420
11631
|
|
|
11421
11632
|
// src/features/analysis/add_fix_comments_for_pr/utils/utils.ts
|
|
11422
|
-
var
|
|
11633
|
+
var debug10 = Debug9("mobbdev:handle-finished-analysis");
|
|
11423
11634
|
function calculateRanges(integers) {
|
|
11424
11635
|
if (integers.length === 0) {
|
|
11425
11636
|
return [];
|
|
@@ -11453,7 +11664,7 @@ function deleteAllPreviousComments({
|
|
|
11453
11664
|
try {
|
|
11454
11665
|
return scm.deleteComment({ comment_id: comment.id });
|
|
11455
11666
|
} catch (e) {
|
|
11456
|
-
|
|
11667
|
+
debug10("delete comment failed %s", e);
|
|
11457
11668
|
return Promise.resolve();
|
|
11458
11669
|
}
|
|
11459
11670
|
});
|
|
@@ -11469,7 +11680,7 @@ function deleteAllPreviousGeneralPrComments(params) {
|
|
|
11469
11680
|
try {
|
|
11470
11681
|
return scm.deleteGeneralPrComment({ commentId: comment.id });
|
|
11471
11682
|
} catch (e) {
|
|
11472
|
-
|
|
11683
|
+
debug10("delete comment failed %s", e);
|
|
11473
11684
|
return Promise.resolve();
|
|
11474
11685
|
}
|
|
11475
11686
|
});
|
|
@@ -11613,7 +11824,7 @@ async function postAnalysisInsightComment(params) {
|
|
|
11613
11824
|
fixablePrVuls,
|
|
11614
11825
|
nonFixablePrVuls
|
|
11615
11826
|
} = prVulenrabilities;
|
|
11616
|
-
|
|
11827
|
+
debug10({
|
|
11617
11828
|
fixablePrVuls,
|
|
11618
11829
|
nonFixablePrVuls,
|
|
11619
11830
|
vulnerabilitiesOutsidePr,
|
|
@@ -11668,7 +11879,7 @@ ${contactUsMarkdown}`;
|
|
|
11668
11879
|
}
|
|
11669
11880
|
|
|
11670
11881
|
// src/features/analysis/add_fix_comments_for_pr/add_fix_comments_for_pr.ts
|
|
11671
|
-
var
|
|
11882
|
+
var debug11 = Debug10("mobbdev:handle-finished-analysis");
|
|
11672
11883
|
async function addFixCommentsForPr({
|
|
11673
11884
|
analysisId,
|
|
11674
11885
|
scm: _scm,
|
|
@@ -11680,7 +11891,7 @@ async function addFixCommentsForPr({
|
|
|
11680
11891
|
}
|
|
11681
11892
|
const scm = _scm;
|
|
11682
11893
|
const getAnalysisRes = await gqlClient.getAnalysis(analysisId);
|
|
11683
|
-
|
|
11894
|
+
debug11("getAnalysis %o", getAnalysisRes);
|
|
11684
11895
|
const {
|
|
11685
11896
|
vulnerabilityReport: {
|
|
11686
11897
|
projectId,
|
|
@@ -11789,8 +12000,8 @@ ${contextString}` : description;
|
|
|
11789
12000
|
}
|
|
11790
12001
|
|
|
11791
12002
|
// src/features/analysis/auto_pr_handler.ts
|
|
11792
|
-
import
|
|
11793
|
-
var
|
|
12003
|
+
import Debug11 from "debug";
|
|
12004
|
+
var debug12 = Debug11("mobbdev:handleAutoPr");
|
|
11794
12005
|
async function handleAutoPr(params) {
|
|
11795
12006
|
const {
|
|
11796
12007
|
gqlClient,
|
|
@@ -11814,7 +12025,7 @@ async function handleAutoPr(params) {
|
|
|
11814
12025
|
prId,
|
|
11815
12026
|
prStrategy: createOnePr ? "CONDENSE" /* Condense */ : "SPREAD" /* Spread */
|
|
11816
12027
|
});
|
|
11817
|
-
|
|
12028
|
+
debug12("auto pr analysis res %o", autoPrAnalysisRes);
|
|
11818
12029
|
if (autoPrAnalysisRes.autoPrAnalysis?.__typename === "AutoPrError") {
|
|
11819
12030
|
createAutoPrSpinner.error({
|
|
11820
12031
|
text: `\u{1F504} Automatic pull request failed - ${autoPrAnalysisRes.autoPrAnalysis.error}`
|
|
@@ -11840,15 +12051,15 @@ async function handleAutoPr(params) {
|
|
|
11840
12051
|
|
|
11841
12052
|
// src/features/analysis/git.ts
|
|
11842
12053
|
init_GitService();
|
|
11843
|
-
import
|
|
11844
|
-
var
|
|
12054
|
+
import Debug12 from "debug";
|
|
12055
|
+
var debug13 = Debug12("mobbdev:git");
|
|
11845
12056
|
async function getGitInfo(srcDirPath) {
|
|
11846
|
-
|
|
12057
|
+
debug13("getting git info for %s", srcDirPath);
|
|
11847
12058
|
const gitService = new GitService(srcDirPath);
|
|
11848
12059
|
try {
|
|
11849
12060
|
const validationResult = await gitService.validateRepository();
|
|
11850
12061
|
if (!validationResult.isValid) {
|
|
11851
|
-
|
|
12062
|
+
debug13("folder is not a git repo");
|
|
11852
12063
|
return {
|
|
11853
12064
|
success: false,
|
|
11854
12065
|
hash: void 0,
|
|
@@ -11863,9 +12074,9 @@ async function getGitInfo(srcDirPath) {
|
|
|
11863
12074
|
};
|
|
11864
12075
|
} catch (e) {
|
|
11865
12076
|
if (e instanceof Error) {
|
|
11866
|
-
|
|
12077
|
+
debug13("failed to run git %o", e);
|
|
11867
12078
|
if (e.message.includes(" spawn ")) {
|
|
11868
|
-
|
|
12079
|
+
debug13("git cli not installed");
|
|
11869
12080
|
} else {
|
|
11870
12081
|
throw e;
|
|
11871
12082
|
}
|
|
@@ -11879,13 +12090,13 @@ init_configs();
|
|
|
11879
12090
|
import fs8 from "fs";
|
|
11880
12091
|
import path7 from "path";
|
|
11881
12092
|
import AdmZip from "adm-zip";
|
|
11882
|
-
import
|
|
12093
|
+
import Debug13 from "debug";
|
|
11883
12094
|
import { globby } from "globby";
|
|
11884
12095
|
import { isBinary as isBinary2 } from "istextorbinary";
|
|
11885
12096
|
import { simpleGit as simpleGit2 } from "simple-git";
|
|
11886
12097
|
import { parseStringPromise } from "xml2js";
|
|
11887
12098
|
import { z as z28 } from "zod";
|
|
11888
|
-
var
|
|
12099
|
+
var debug14 = Debug13("mobbdev:pack");
|
|
11889
12100
|
var FPR_SOURCE_CODE_FILE_MAPPING_SCHEMA = z28.object({
|
|
11890
12101
|
properties: z28.object({
|
|
11891
12102
|
entry: z28.array(
|
|
@@ -11907,7 +12118,7 @@ function getManifestFilesSuffixes() {
|
|
|
11907
12118
|
return ["package.json", "pom.xml"];
|
|
11908
12119
|
}
|
|
11909
12120
|
async function pack(srcDirPath, vulnFiles, isIncludeAllFiles = false) {
|
|
11910
|
-
|
|
12121
|
+
debug14("pack folder %s", srcDirPath);
|
|
11911
12122
|
let git = void 0;
|
|
11912
12123
|
try {
|
|
11913
12124
|
git = simpleGit2({
|
|
@@ -11917,13 +12128,13 @@ async function pack(srcDirPath, vulnFiles, isIncludeAllFiles = false) {
|
|
|
11917
12128
|
});
|
|
11918
12129
|
await git.status();
|
|
11919
12130
|
} catch (e) {
|
|
11920
|
-
|
|
12131
|
+
debug14("failed to run git %o", e);
|
|
11921
12132
|
git = void 0;
|
|
11922
12133
|
if (e instanceof Error) {
|
|
11923
12134
|
if (e.message.includes(" spawn ")) {
|
|
11924
|
-
|
|
12135
|
+
debug14("git cli not installed");
|
|
11925
12136
|
} else if (e.message.includes("not a git repository")) {
|
|
11926
|
-
|
|
12137
|
+
debug14("folder is not a git repo");
|
|
11927
12138
|
} else {
|
|
11928
12139
|
throw e;
|
|
11929
12140
|
}
|
|
@@ -11938,9 +12149,9 @@ async function pack(srcDirPath, vulnFiles, isIncludeAllFiles = false) {
|
|
|
11938
12149
|
followSymbolicLinks: false,
|
|
11939
12150
|
dot: true
|
|
11940
12151
|
});
|
|
11941
|
-
|
|
12152
|
+
debug14("files found %d", filepaths.length);
|
|
11942
12153
|
const zip = new AdmZip();
|
|
11943
|
-
|
|
12154
|
+
debug14("compressing files");
|
|
11944
12155
|
for (const filepath of filepaths) {
|
|
11945
12156
|
const absFilepath = path7.join(srcDirPath, filepath.toString());
|
|
11946
12157
|
if (!isIncludeAllFiles) {
|
|
@@ -11949,12 +12160,12 @@ async function pack(srcDirPath, vulnFiles, isIncludeAllFiles = false) {
|
|
|
11949
12160
|
absFilepath.toString().replaceAll(path7.win32.sep, path7.posix.sep),
|
|
11950
12161
|
vulnFiles
|
|
11951
12162
|
)) {
|
|
11952
|
-
|
|
12163
|
+
debug14("ignoring %s because it is not a vulnerability file", filepath);
|
|
11953
12164
|
continue;
|
|
11954
12165
|
}
|
|
11955
12166
|
}
|
|
11956
12167
|
if (fs8.lstatSync(absFilepath).size > MCP_MAX_FILE_SIZE) {
|
|
11957
|
-
|
|
12168
|
+
debug14("ignoring %s because the size is > 5MB", filepath);
|
|
11958
12169
|
continue;
|
|
11959
12170
|
}
|
|
11960
12171
|
let data;
|
|
@@ -11968,16 +12179,16 @@ async function pack(srcDirPath, vulnFiles, isIncludeAllFiles = false) {
|
|
|
11968
12179
|
data = fs8.readFileSync(absFilepath);
|
|
11969
12180
|
}
|
|
11970
12181
|
if (isBinary2(null, data)) {
|
|
11971
|
-
|
|
12182
|
+
debug14("ignoring %s because is seems to be a binary file", filepath);
|
|
11972
12183
|
continue;
|
|
11973
12184
|
}
|
|
11974
12185
|
zip.addFile(filepath.toString(), data);
|
|
11975
12186
|
}
|
|
11976
|
-
|
|
12187
|
+
debug14("get zip file buffer");
|
|
11977
12188
|
return zip.toBuffer();
|
|
11978
12189
|
}
|
|
11979
12190
|
async function repackFpr(fprPath) {
|
|
11980
|
-
|
|
12191
|
+
debug14("repack fpr file %s", fprPath);
|
|
11981
12192
|
const zipIn = new AdmZip(fprPath);
|
|
11982
12193
|
const zipOut = new AdmZip();
|
|
11983
12194
|
const mappingXML = zipIn.readAsText("src-archive/index.xml", "utf-8");
|
|
@@ -11992,7 +12203,7 @@ async function repackFpr(fprPath) {
|
|
|
11992
12203
|
zipOut.addFile(realPath, buf);
|
|
11993
12204
|
}
|
|
11994
12205
|
}
|
|
11995
|
-
|
|
12206
|
+
debug14("get repacked zip file buffer");
|
|
11996
12207
|
return zipOut.toBuffer();
|
|
11997
12208
|
}
|
|
11998
12209
|
|
|
@@ -12063,7 +12274,7 @@ async function snykArticlePrompt() {
|
|
|
12063
12274
|
// src/features/analysis/scanners/checkmarx.ts
|
|
12064
12275
|
import { createRequire } from "module";
|
|
12065
12276
|
import chalk4 from "chalk";
|
|
12066
|
-
import
|
|
12277
|
+
import Debug15 from "debug";
|
|
12067
12278
|
import { existsSync } from "fs";
|
|
12068
12279
|
import { createSpinner as createSpinner2 } from "nanospinner";
|
|
12069
12280
|
import { type } from "os";
|
|
@@ -12075,7 +12286,7 @@ var cxOperatingSystemSupportMessage = `Your operating system does not support ch
|
|
|
12075
12286
|
|
|
12076
12287
|
// src/utils/child_process.ts
|
|
12077
12288
|
import cp from "child_process";
|
|
12078
|
-
import
|
|
12289
|
+
import Debug14 from "debug";
|
|
12079
12290
|
import * as process2 from "process";
|
|
12080
12291
|
function createFork({ args, processPath, name }, options) {
|
|
12081
12292
|
const child = cp.fork(processPath, args, {
|
|
@@ -12093,16 +12304,16 @@ function createSpawn({ args, processPath, name, cwd }, options) {
|
|
|
12093
12304
|
return createChildProcess({ childProcess: child, name }, options);
|
|
12094
12305
|
}
|
|
12095
12306
|
function createChildProcess({ childProcess, name }, options) {
|
|
12096
|
-
const
|
|
12307
|
+
const debug20 = Debug14(`mobbdev:${name}`);
|
|
12097
12308
|
const { display } = options;
|
|
12098
12309
|
return new Promise((resolve, reject) => {
|
|
12099
12310
|
let out = "";
|
|
12100
12311
|
const onData = (chunk) => {
|
|
12101
|
-
|
|
12312
|
+
debug20(`chunk received from ${name} std ${chunk}`);
|
|
12102
12313
|
out += chunk;
|
|
12103
12314
|
};
|
|
12104
12315
|
if (!childProcess?.stdout || !childProcess?.stderr) {
|
|
12105
|
-
|
|
12316
|
+
debug20(`unable to fork ${name}`);
|
|
12106
12317
|
reject(new Error(`unable to fork ${name}`));
|
|
12107
12318
|
}
|
|
12108
12319
|
childProcess.stdout?.on("data", onData);
|
|
@@ -12112,18 +12323,18 @@ function createChildProcess({ childProcess, name }, options) {
|
|
|
12112
12323
|
childProcess.stderr?.pipe(process2.stderr);
|
|
12113
12324
|
}
|
|
12114
12325
|
childProcess.on("exit", (code) => {
|
|
12115
|
-
|
|
12326
|
+
debug20(`${name} exit code ${code}`);
|
|
12116
12327
|
resolve({ message: out, code });
|
|
12117
12328
|
});
|
|
12118
12329
|
childProcess.on("error", (err) => {
|
|
12119
|
-
|
|
12330
|
+
debug20(`${name} error %o`, err);
|
|
12120
12331
|
reject(err);
|
|
12121
12332
|
});
|
|
12122
12333
|
});
|
|
12123
12334
|
}
|
|
12124
12335
|
|
|
12125
12336
|
// src/features/analysis/scanners/checkmarx.ts
|
|
12126
|
-
var
|
|
12337
|
+
var debug15 = Debug15("mobbdev:checkmarx");
|
|
12127
12338
|
var moduleUrl;
|
|
12128
12339
|
if (typeof __filename !== "undefined") {
|
|
12129
12340
|
moduleUrl = __filename;
|
|
@@ -12182,14 +12393,14 @@ function validateCheckmarxInstallation() {
|
|
|
12182
12393
|
existsSync(getCheckmarxPath());
|
|
12183
12394
|
}
|
|
12184
12395
|
async function forkCheckmarx(args, { display }) {
|
|
12185
|
-
|
|
12396
|
+
debug15("fork checkmarx with args %o %s", args.join(" "), display);
|
|
12186
12397
|
return createSpawn(
|
|
12187
12398
|
{ args, processPath: getCheckmarxPath(), name: "checkmarx" },
|
|
12188
12399
|
{ display }
|
|
12189
12400
|
);
|
|
12190
12401
|
}
|
|
12191
12402
|
async function getCheckmarxReport({ reportPath, repositoryRoot, branch, projectName }, { skipPrompts = false }) {
|
|
12192
|
-
|
|
12403
|
+
debug15("get checkmarx report start %s %s", reportPath, repositoryRoot);
|
|
12193
12404
|
const { code: loginCode } = await forkCheckmarx(VALIDATE_COMMAND, {
|
|
12194
12405
|
display: false
|
|
12195
12406
|
});
|
|
@@ -12257,10 +12468,10 @@ async function validateCheckamxCredentials() {
|
|
|
12257
12468
|
// src/features/analysis/scanners/snyk.ts
|
|
12258
12469
|
import { createRequire as createRequire2 } from "module";
|
|
12259
12470
|
import chalk5 from "chalk";
|
|
12260
|
-
import
|
|
12471
|
+
import Debug16 from "debug";
|
|
12261
12472
|
import { createSpinner as createSpinner3 } from "nanospinner";
|
|
12262
12473
|
import open2 from "open";
|
|
12263
|
-
var
|
|
12474
|
+
var debug16 = Debug16("mobbdev:snyk");
|
|
12264
12475
|
var moduleUrl2;
|
|
12265
12476
|
if (typeof __filename !== "undefined") {
|
|
12266
12477
|
moduleUrl2 = __filename;
|
|
@@ -12282,13 +12493,13 @@ if (typeof __filename !== "undefined") {
|
|
|
12282
12493
|
var costumeRequire2 = createRequire2(moduleUrl2);
|
|
12283
12494
|
var SNYK_PATH = costumeRequire2.resolve("snyk/bin/snyk");
|
|
12284
12495
|
var SNYK_ARTICLE_URL = "https://docs.snyk.io/scan-using-snyk/snyk-code/configure-snyk-code#enable-snyk-code";
|
|
12285
|
-
|
|
12496
|
+
debug16("snyk executable path %s", SNYK_PATH);
|
|
12286
12497
|
async function forkSnyk(args, { display }) {
|
|
12287
|
-
|
|
12498
|
+
debug16("fork snyk with args %o %s", args, display);
|
|
12288
12499
|
return createFork({ args, processPath: SNYK_PATH, name: "snyk" }, { display });
|
|
12289
12500
|
}
|
|
12290
12501
|
async function getSnykReport(reportPath, repoRoot, { skipPrompts = false }) {
|
|
12291
|
-
|
|
12502
|
+
debug16("get snyk report start %s %s", reportPath, repoRoot);
|
|
12292
12503
|
const config2 = await forkSnyk(["config"], { display: false });
|
|
12293
12504
|
const { message: configMessage } = config2;
|
|
12294
12505
|
if (!configMessage.includes("api: ")) {
|
|
@@ -12302,7 +12513,7 @@ async function getSnykReport(reportPath, repoRoot, { skipPrompts = false }) {
|
|
|
12302
12513
|
snykLoginSpinner.update({
|
|
12303
12514
|
text: "\u{1F513} Waiting for Snyk login to complete"
|
|
12304
12515
|
});
|
|
12305
|
-
|
|
12516
|
+
debug16("no token in the config %s", config2);
|
|
12306
12517
|
await forkSnyk(["auth"], { display: true });
|
|
12307
12518
|
snykLoginSpinner.success({ text: "\u{1F513} Login to Snyk Successful" });
|
|
12308
12519
|
}
|
|
@@ -12312,12 +12523,12 @@ async function getSnykReport(reportPath, repoRoot, { skipPrompts = false }) {
|
|
|
12312
12523
|
{ display: true }
|
|
12313
12524
|
);
|
|
12314
12525
|
if (scanOutput.includes("Snyk Code is not supported for org")) {
|
|
12315
|
-
|
|
12526
|
+
debug16("snyk code is not enabled %s", scanOutput);
|
|
12316
12527
|
snykSpinner.error({ text: "\u{1F50D} Snyk configuration needed" });
|
|
12317
12528
|
const answer = await snykArticlePrompt();
|
|
12318
|
-
|
|
12529
|
+
debug16("answer %s", answer);
|
|
12319
12530
|
if (answer) {
|
|
12320
|
-
|
|
12531
|
+
debug16("opening the browser");
|
|
12321
12532
|
await open2(SNYK_ARTICLE_URL);
|
|
12322
12533
|
}
|
|
12323
12534
|
console.log(
|
|
@@ -12332,9 +12543,9 @@ async function getSnykReport(reportPath, repoRoot, { skipPrompts = false }) {
|
|
|
12332
12543
|
}
|
|
12333
12544
|
|
|
12334
12545
|
// src/features/analysis/upload-file.ts
|
|
12335
|
-
import
|
|
12546
|
+
import Debug17 from "debug";
|
|
12336
12547
|
import fetch3, { File, fileFrom, FormData } from "node-fetch";
|
|
12337
|
-
var
|
|
12548
|
+
var debug17 = Debug17("mobbdev:upload-file");
|
|
12338
12549
|
async function uploadFile({
|
|
12339
12550
|
file,
|
|
12340
12551
|
url,
|
|
@@ -12347,9 +12558,9 @@ async function uploadFile({
|
|
|
12347
12558
|
logInfo2(`FileUpload: upload file start ${url}`);
|
|
12348
12559
|
logInfo2(`FileUpload: upload fields`, uploadFields);
|
|
12349
12560
|
logInfo2(`FileUpload: upload key ${uploadKey}`);
|
|
12350
|
-
|
|
12351
|
-
|
|
12352
|
-
|
|
12561
|
+
debug17("upload file start %s", url);
|
|
12562
|
+
debug17("upload fields %o", uploadFields);
|
|
12563
|
+
debug17("upload key %s", uploadKey);
|
|
12353
12564
|
const form = new FormData();
|
|
12354
12565
|
Object.entries(uploadFields).forEach(([key, value]) => {
|
|
12355
12566
|
form.append(key, value);
|
|
@@ -12358,11 +12569,11 @@ async function uploadFile({
|
|
|
12358
12569
|
form.append("key", uploadKey);
|
|
12359
12570
|
}
|
|
12360
12571
|
if (typeof file === "string") {
|
|
12361
|
-
|
|
12572
|
+
debug17("upload file from path %s", file);
|
|
12362
12573
|
logInfo2(`FileUpload: upload file from path ${file}`);
|
|
12363
12574
|
form.append("file", await fileFrom(file));
|
|
12364
12575
|
} else {
|
|
12365
|
-
|
|
12576
|
+
debug17("upload file from buffer");
|
|
12366
12577
|
logInfo2(`FileUpload: upload file from buffer`);
|
|
12367
12578
|
form.append("file", new File([new Uint8Array(file)], "file"));
|
|
12368
12579
|
}
|
|
@@ -12373,11 +12584,11 @@ async function uploadFile({
|
|
|
12373
12584
|
agent
|
|
12374
12585
|
});
|
|
12375
12586
|
if (!response.ok) {
|
|
12376
|
-
|
|
12587
|
+
debug17("error from S3 %s %s", response.body, response.status);
|
|
12377
12588
|
logInfo2(`FileUpload: error from S3 ${response.body} ${response.status}`);
|
|
12378
12589
|
throw new Error(`Failed to upload the file: ${response.status}`);
|
|
12379
12590
|
}
|
|
12380
|
-
|
|
12591
|
+
debug17("upload file done");
|
|
12381
12592
|
logInfo2(`FileUpload: upload file done`);
|
|
12382
12593
|
}
|
|
12383
12594
|
|
|
@@ -12412,9 +12623,9 @@ async function downloadRepo({
|
|
|
12412
12623
|
}) {
|
|
12413
12624
|
const { createSpinner: createSpinner5 } = Spinner2({ ci });
|
|
12414
12625
|
const repoSpinner = createSpinner5("\u{1F4BE} Downloading Repo").start();
|
|
12415
|
-
|
|
12626
|
+
debug18("download repo %s %s %s", repoUrl, dirname);
|
|
12416
12627
|
const zipFilePath = path9.join(dirname, "repo.zip");
|
|
12417
|
-
|
|
12628
|
+
debug18("download URL: %s auth headers: %o", downloadUrl, authHeaders);
|
|
12418
12629
|
const response = await fetch4(downloadUrl, {
|
|
12419
12630
|
method: "GET",
|
|
12420
12631
|
headers: {
|
|
@@ -12422,7 +12633,7 @@ async function downloadRepo({
|
|
|
12422
12633
|
}
|
|
12423
12634
|
});
|
|
12424
12635
|
if (!response.ok) {
|
|
12425
|
-
|
|
12636
|
+
debug18("SCM zipball request failed %s %s", response.body, response.status);
|
|
12426
12637
|
repoSpinner.error({ text: "\u{1F4BE} Repo download failed" });
|
|
12427
12638
|
throw new Error(`Can't access ${chalk6.bold(repoUrl)}`);
|
|
12428
12639
|
}
|
|
@@ -12436,7 +12647,7 @@ async function downloadRepo({
|
|
|
12436
12647
|
if (!repoRoot) {
|
|
12437
12648
|
throw new Error("Repo root not found");
|
|
12438
12649
|
}
|
|
12439
|
-
|
|
12650
|
+
debug18("repo root %s", repoRoot);
|
|
12440
12651
|
repoSpinner.success({ text: "\u{1F4BE} Repo downloaded successfully" });
|
|
12441
12652
|
return path9.join(dirname, repoRoot);
|
|
12442
12653
|
}
|
|
@@ -12445,7 +12656,7 @@ var getReportUrl = ({
|
|
|
12445
12656
|
projectId,
|
|
12446
12657
|
fixReportId
|
|
12447
12658
|
}) => `${WEB_APP_URL}/organization/${organizationId}/project/${projectId}/report/${fixReportId}`;
|
|
12448
|
-
var
|
|
12659
|
+
var debug18 = Debug18("mobbdev:index");
|
|
12449
12660
|
async function runAnalysis(params, options) {
|
|
12450
12661
|
const tmpObj = tmp2.dirSync({
|
|
12451
12662
|
unsafeCleanup: true
|
|
@@ -12590,7 +12801,7 @@ async function _scan(params, { skipPrompts = false } = {}) {
|
|
|
12590
12801
|
commitDirectly,
|
|
12591
12802
|
pullRequest
|
|
12592
12803
|
} = params;
|
|
12593
|
-
|
|
12804
|
+
debug18("start %s %s", dirname, repo);
|
|
12594
12805
|
const { createSpinner: createSpinner5 } = Spinner2({ ci });
|
|
12595
12806
|
skipPrompts = skipPrompts || ci;
|
|
12596
12807
|
const gqlClient = await getAuthenticatedGQLClient({
|
|
@@ -12659,8 +12870,8 @@ async function _scan(params, { skipPrompts = false } = {}) {
|
|
|
12659
12870
|
);
|
|
12660
12871
|
}
|
|
12661
12872
|
const { sha } = getReferenceDataRes.gitReference;
|
|
12662
|
-
|
|
12663
|
-
|
|
12873
|
+
debug18("project id %s", projectId);
|
|
12874
|
+
debug18("default branch %s", reference);
|
|
12664
12875
|
if (command === "scan") {
|
|
12665
12876
|
reportPath = await getReport(
|
|
12666
12877
|
{
|
|
@@ -14363,6 +14574,7 @@ var log = logger.log.bind(logger);
|
|
|
14363
14574
|
// src/mcp/services/McpGQLClient.ts
|
|
14364
14575
|
import crypto3 from "crypto";
|
|
14365
14576
|
import { GraphQLClient as GraphQLClient2 } from "graphql-request";
|
|
14577
|
+
import { HttpsProxyAgent as HttpsProxyAgent2 } from "https-proxy-agent";
|
|
14366
14578
|
import { v4 as uuidv42 } from "uuid";
|
|
14367
14579
|
init_configs();
|
|
14368
14580
|
|
|
@@ -14574,6 +14786,23 @@ var McpAuthService = class {
|
|
|
14574
14786
|
};
|
|
14575
14787
|
|
|
14576
14788
|
// src/mcp/services/McpGQLClient.ts
|
|
14789
|
+
function getProxyAgent2(url) {
|
|
14790
|
+
try {
|
|
14791
|
+
const parsedUrl = new URL(url);
|
|
14792
|
+
const isHttp = parsedUrl.protocol === "http:";
|
|
14793
|
+
const isHttps = parsedUrl.protocol === "https:";
|
|
14794
|
+
const proxy = isHttps ? HTTPS_PROXY || HTTP_PROXY : isHttp ? HTTP_PROXY : null;
|
|
14795
|
+
if (proxy) {
|
|
14796
|
+
logDebug("[GraphQL] Using proxy for websocket subscriptions", { proxy });
|
|
14797
|
+
return new HttpsProxyAgent2(proxy);
|
|
14798
|
+
}
|
|
14799
|
+
} catch (err) {
|
|
14800
|
+
logDebug(`[GraphQL] Skipping proxy for ${url}`, {
|
|
14801
|
+
error: err.message
|
|
14802
|
+
});
|
|
14803
|
+
}
|
|
14804
|
+
return void 0;
|
|
14805
|
+
}
|
|
14577
14806
|
var McpGQLClient = class {
|
|
14578
14807
|
constructor(args) {
|
|
14579
14808
|
__publicField(this, "client");
|
|
@@ -14747,11 +14976,13 @@ var McpGQLClient = class {
|
|
|
14747
14976
|
this._auth.type === "apiKey" ? {
|
|
14748
14977
|
apiKey: this._auth.apiKey,
|
|
14749
14978
|
type: "apiKey",
|
|
14750
|
-
timeoutInMs: params.timeoutInMs
|
|
14979
|
+
timeoutInMs: params.timeoutInMs,
|
|
14980
|
+
proxyAgent: getProxyAgent2(this.apiUrl)
|
|
14751
14981
|
} : {
|
|
14752
14982
|
token: this._auth.token,
|
|
14753
14983
|
type: "token",
|
|
14754
|
-
timeoutInMs: params.timeoutInMs
|
|
14984
|
+
timeoutInMs: params.timeoutInMs,
|
|
14985
|
+
proxyAgent: getProxyAgent2(this.apiUrl)
|
|
14755
14986
|
}
|
|
14756
14987
|
);
|
|
14757
14988
|
logDebug(`[${scanContext}] GraphQL: GetAnalysis subscription completed`, {
|
|
@@ -16147,7 +16378,6 @@ var McpServer = class {
|
|
|
16147
16378
|
tools: [
|
|
16148
16379
|
{
|
|
16149
16380
|
name: mcpCheckerTool.name,
|
|
16150
|
-
display_name: mcpCheckerTool.display_name || mcpCheckerTool.name,
|
|
16151
16381
|
description: mcpCheckerTool.description,
|
|
16152
16382
|
inputSchema: {
|
|
16153
16383
|
type: "object",
|
|
@@ -16177,7 +16407,6 @@ var McpServer = class {
|
|
|
16177
16407
|
const response = {
|
|
16178
16408
|
tools: toolsDefinitions.map((tool) => ({
|
|
16179
16409
|
name: tool.name,
|
|
16180
|
-
display_name: tool.display_name || tool.name,
|
|
16181
16410
|
description: tool.description || "",
|
|
16182
16411
|
inputSchema: {
|
|
16183
16412
|
type: "object",
|
|
@@ -22478,13 +22707,13 @@ var parseArgs = async (args) => {
|
|
|
22478
22707
|
};
|
|
22479
22708
|
|
|
22480
22709
|
// src/index.ts
|
|
22481
|
-
var
|
|
22710
|
+
var debug19 = Debug19("mobbdev:index");
|
|
22482
22711
|
async function run() {
|
|
22483
22712
|
return parseArgs(hideBin(process.argv));
|
|
22484
22713
|
}
|
|
22485
22714
|
(async () => {
|
|
22486
22715
|
try {
|
|
22487
|
-
|
|
22716
|
+
debug19("Bugsy CLI v%s running...", packageJson.version);
|
|
22488
22717
|
await run();
|
|
22489
22718
|
process.exit(0);
|
|
22490
22719
|
} catch (err) {
|