mobbdev 1.0.215 → 1.0.216
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/args/commands/upload_ai_blame.d.mts +52 -28
- package/dist/args/commands/upload_ai_blame.mjs +24 -0
- package/dist/index.mjs +180 -136
- package/package.json +1 -1
|
@@ -3293,6 +3293,25 @@ type GetLastOrgAndNamedProjectQuery = {
|
|
|
3293
3293
|
}>;
|
|
3294
3294
|
}>;
|
|
3295
3295
|
};
|
|
3296
|
+
type GetLastOrgQueryVariables = Exact<{
|
|
3297
|
+
email: Scalars['String']['input'];
|
|
3298
|
+
}>;
|
|
3299
|
+
type GetLastOrgQuery = {
|
|
3300
|
+
__typename?: 'query_root';
|
|
3301
|
+
user: Array<{
|
|
3302
|
+
__typename?: 'user';
|
|
3303
|
+
id: any;
|
|
3304
|
+
name?: string | null;
|
|
3305
|
+
userOrganizationsAndUserOrganizationRoles: Array<{
|
|
3306
|
+
__typename?: 'organization_to_user';
|
|
3307
|
+
id: any;
|
|
3308
|
+
organization: {
|
|
3309
|
+
__typename?: 'organization';
|
|
3310
|
+
id: any;
|
|
3311
|
+
};
|
|
3312
|
+
}>;
|
|
3313
|
+
}>;
|
|
3314
|
+
};
|
|
3296
3315
|
type GetEncryptedApiTokenQueryVariables = Exact<{
|
|
3297
3316
|
loginId: Scalars['uuid']['input'];
|
|
3298
3317
|
}>;
|
|
@@ -4196,6 +4215,7 @@ type SdkFunctionWrapper = <T>(action: (requestHeaders?: Record<string, string>)
|
|
|
4196
4215
|
declare function getSdk(client: GraphQLClient, withWrapper?: SdkFunctionWrapper): {
|
|
4197
4216
|
Me(variables?: MeQueryVariables, requestHeaders?: GraphQLClientRequestHeaders, signal?: RequestInit["signal"]): Promise<MeQuery>;
|
|
4198
4217
|
getLastOrgAndNamedProject(variables: GetLastOrgAndNamedProjectQueryVariables, requestHeaders?: GraphQLClientRequestHeaders, signal?: RequestInit["signal"]): Promise<GetLastOrgAndNamedProjectQuery>;
|
|
4218
|
+
getLastOrg(variables: GetLastOrgQueryVariables, requestHeaders?: GraphQLClientRequestHeaders, signal?: RequestInit["signal"]): Promise<GetLastOrgQuery>;
|
|
4199
4219
|
GetEncryptedApiToken(variables: GetEncryptedApiTokenQueryVariables, requestHeaders?: GraphQLClientRequestHeaders, signal?: RequestInit["signal"]): Promise<GetEncryptedApiTokenQuery>;
|
|
4200
4220
|
FixReportState(variables: FixReportStateQueryVariables, requestHeaders?: GraphQLClientRequestHeaders, signal?: RequestInit["signal"]): Promise<FixReportStateQuery>;
|
|
4201
4221
|
GetVulnerabilityReportPaths(variables: GetVulnerabilityReportPathsQueryVariables, requestHeaders?: GraphQLClientRequestHeaders, signal?: RequestInit["signal"]): Promise<GetVulnerabilityReportPathsQuery>;
|
|
@@ -4279,6 +4299,10 @@ declare class GQLClient {
|
|
|
4279
4299
|
isTokenAvailable: boolean;
|
|
4280
4300
|
} | null>;
|
|
4281
4301
|
} | null | undefined>;
|
|
4302
|
+
getLastOrg(email: string): Promise<{
|
|
4303
|
+
organizationId: any;
|
|
4304
|
+
userName: string;
|
|
4305
|
+
}>;
|
|
4282
4306
|
createCliLogin(variables: CreateCliLoginMutationVariables): Promise<string>;
|
|
4283
4307
|
verifyApiConnection(): Promise<boolean>;
|
|
4284
4308
|
validateUserToken(): Promise<string | boolean>;
|
|
@@ -4438,26 +4462,18 @@ declare const PromptItemZ: z.ZodObject<{
|
|
|
4438
4462
|
name: string;
|
|
4439
4463
|
parameters: string;
|
|
4440
4464
|
result: string;
|
|
4441
|
-
accepted?: boolean | undefined;
|
|
4442
4465
|
rawArguments?: string | undefined;
|
|
4466
|
+
accepted?: boolean | undefined;
|
|
4443
4467
|
}, {
|
|
4444
4468
|
name: string;
|
|
4445
4469
|
parameters: string;
|
|
4446
4470
|
result: string;
|
|
4447
|
-
accepted?: boolean | undefined;
|
|
4448
4471
|
rawArguments?: string | undefined;
|
|
4472
|
+
accepted?: boolean | undefined;
|
|
4449
4473
|
}>>;
|
|
4450
4474
|
}, "strip", z.ZodTypeAny, {
|
|
4451
4475
|
type: "USER_PROMPT" | "AI_RESPONSE" | "TOOL_EXECUTION" | "AI_THINKING";
|
|
4452
|
-
tool?: {
|
|
4453
|
-
name: string;
|
|
4454
|
-
parameters: string;
|
|
4455
|
-
result: string;
|
|
4456
|
-
accepted?: boolean | undefined;
|
|
4457
|
-
rawArguments?: string | undefined;
|
|
4458
|
-
} | undefined;
|
|
4459
4476
|
date?: Date | undefined;
|
|
4460
|
-
text?: string | undefined;
|
|
4461
4477
|
attachedFiles?: {
|
|
4462
4478
|
relativePath: string;
|
|
4463
4479
|
startLine?: number | undefined;
|
|
@@ -4466,17 +4482,17 @@ declare const PromptItemZ: z.ZodObject<{
|
|
|
4466
4482
|
inputCount: number;
|
|
4467
4483
|
outputCount: number;
|
|
4468
4484
|
} | undefined;
|
|
4469
|
-
|
|
4470
|
-
type: "USER_PROMPT" | "AI_RESPONSE" | "TOOL_EXECUTION" | "AI_THINKING";
|
|
4485
|
+
text?: string | undefined;
|
|
4471
4486
|
tool?: {
|
|
4472
4487
|
name: string;
|
|
4473
4488
|
parameters: string;
|
|
4474
4489
|
result: string;
|
|
4475
|
-
accepted?: boolean | undefined;
|
|
4476
4490
|
rawArguments?: string | undefined;
|
|
4491
|
+
accepted?: boolean | undefined;
|
|
4477
4492
|
} | undefined;
|
|
4493
|
+
}, {
|
|
4494
|
+
type: "USER_PROMPT" | "AI_RESPONSE" | "TOOL_EXECUTION" | "AI_THINKING";
|
|
4478
4495
|
date?: Date | undefined;
|
|
4479
|
-
text?: string | undefined;
|
|
4480
4496
|
attachedFiles?: {
|
|
4481
4497
|
relativePath: string;
|
|
4482
4498
|
startLine?: number | undefined;
|
|
@@ -4485,6 +4501,14 @@ declare const PromptItemZ: z.ZodObject<{
|
|
|
4485
4501
|
inputCount: number;
|
|
4486
4502
|
outputCount: number;
|
|
4487
4503
|
} | undefined;
|
|
4504
|
+
text?: string | undefined;
|
|
4505
|
+
tool?: {
|
|
4506
|
+
name: string;
|
|
4507
|
+
parameters: string;
|
|
4508
|
+
result: string;
|
|
4509
|
+
rawArguments?: string | undefined;
|
|
4510
|
+
accepted?: boolean | undefined;
|
|
4511
|
+
} | undefined;
|
|
4488
4512
|
}>;
|
|
4489
4513
|
type PromptItem = z.infer<typeof PromptItemZ>;
|
|
4490
4514
|
declare const PromptItemArrayZ: z.ZodArray<z.ZodObject<{
|
|
@@ -4521,26 +4545,18 @@ declare const PromptItemArrayZ: z.ZodArray<z.ZodObject<{
|
|
|
4521
4545
|
name: string;
|
|
4522
4546
|
parameters: string;
|
|
4523
4547
|
result: string;
|
|
4524
|
-
accepted?: boolean | undefined;
|
|
4525
4548
|
rawArguments?: string | undefined;
|
|
4549
|
+
accepted?: boolean | undefined;
|
|
4526
4550
|
}, {
|
|
4527
4551
|
name: string;
|
|
4528
4552
|
parameters: string;
|
|
4529
4553
|
result: string;
|
|
4530
|
-
accepted?: boolean | undefined;
|
|
4531
4554
|
rawArguments?: string | undefined;
|
|
4555
|
+
accepted?: boolean | undefined;
|
|
4532
4556
|
}>>;
|
|
4533
4557
|
}, "strip", z.ZodTypeAny, {
|
|
4534
4558
|
type: "USER_PROMPT" | "AI_RESPONSE" | "TOOL_EXECUTION" | "AI_THINKING";
|
|
4535
|
-
tool?: {
|
|
4536
|
-
name: string;
|
|
4537
|
-
parameters: string;
|
|
4538
|
-
result: string;
|
|
4539
|
-
accepted?: boolean | undefined;
|
|
4540
|
-
rawArguments?: string | undefined;
|
|
4541
|
-
} | undefined;
|
|
4542
4559
|
date?: Date | undefined;
|
|
4543
|
-
text?: string | undefined;
|
|
4544
4560
|
attachedFiles?: {
|
|
4545
4561
|
relativePath: string;
|
|
4546
4562
|
startLine?: number | undefined;
|
|
@@ -4549,17 +4565,17 @@ declare const PromptItemArrayZ: z.ZodArray<z.ZodObject<{
|
|
|
4549
4565
|
inputCount: number;
|
|
4550
4566
|
outputCount: number;
|
|
4551
4567
|
} | undefined;
|
|
4552
|
-
|
|
4553
|
-
type: "USER_PROMPT" | "AI_RESPONSE" | "TOOL_EXECUTION" | "AI_THINKING";
|
|
4568
|
+
text?: string | undefined;
|
|
4554
4569
|
tool?: {
|
|
4555
4570
|
name: string;
|
|
4556
4571
|
parameters: string;
|
|
4557
4572
|
result: string;
|
|
4558
|
-
accepted?: boolean | undefined;
|
|
4559
4573
|
rawArguments?: string | undefined;
|
|
4574
|
+
accepted?: boolean | undefined;
|
|
4560
4575
|
} | undefined;
|
|
4576
|
+
}, {
|
|
4577
|
+
type: "USER_PROMPT" | "AI_RESPONSE" | "TOOL_EXECUTION" | "AI_THINKING";
|
|
4561
4578
|
date?: Date | undefined;
|
|
4562
|
-
text?: string | undefined;
|
|
4563
4579
|
attachedFiles?: {
|
|
4564
4580
|
relativePath: string;
|
|
4565
4581
|
startLine?: number | undefined;
|
|
@@ -4568,6 +4584,14 @@ declare const PromptItemArrayZ: z.ZodArray<z.ZodObject<{
|
|
|
4568
4584
|
inputCount: number;
|
|
4569
4585
|
outputCount: number;
|
|
4570
4586
|
} | undefined;
|
|
4587
|
+
text?: string | undefined;
|
|
4588
|
+
tool?: {
|
|
4589
|
+
name: string;
|
|
4590
|
+
parameters: string;
|
|
4591
|
+
result: string;
|
|
4592
|
+
rawArguments?: string | undefined;
|
|
4593
|
+
accepted?: boolean | undefined;
|
|
4594
|
+
} | undefined;
|
|
4571
4595
|
}>, "many">;
|
|
4572
4596
|
type PromptItemArray = z.infer<typeof PromptItemArrayZ>;
|
|
4573
4597
|
type UploadAiBlameOptions = {
|
|
@@ -567,6 +567,20 @@ var GetLastOrgAndNamedProjectDocument = `
|
|
|
567
567
|
}
|
|
568
568
|
}
|
|
569
569
|
`;
|
|
570
|
+
var GetLastOrgDocument = `
|
|
571
|
+
query getLastOrg($email: String!) {
|
|
572
|
+
user(where: {email: {_eq: $email}}, limit: 1) {
|
|
573
|
+
id
|
|
574
|
+
name
|
|
575
|
+
userOrganizationsAndUserOrganizationRoles(order_by: {createdOn: desc}) {
|
|
576
|
+
id
|
|
577
|
+
organization {
|
|
578
|
+
id
|
|
579
|
+
}
|
|
580
|
+
}
|
|
581
|
+
}
|
|
582
|
+
}
|
|
583
|
+
`;
|
|
570
584
|
var GetEncryptedApiTokenDocument = `
|
|
571
585
|
query GetEncryptedApiToken($loginId: uuid!) {
|
|
572
586
|
cli_login_by_pk(id: $loginId) {
|
|
@@ -1062,6 +1076,9 @@ function getSdk(client, withWrapper = defaultWrapper) {
|
|
|
1062
1076
|
getLastOrgAndNamedProject(variables, requestHeaders, signal) {
|
|
1063
1077
|
return withWrapper((wrappedRequestHeaders) => client.request({ document: GetLastOrgAndNamedProjectDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "getLastOrgAndNamedProject", "query", variables);
|
|
1064
1078
|
},
|
|
1079
|
+
getLastOrg(variables, requestHeaders, signal) {
|
|
1080
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: GetLastOrgDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "getLastOrg", "query", variables);
|
|
1081
|
+
},
|
|
1065
1082
|
GetEncryptedApiToken(variables, requestHeaders, signal) {
|
|
1066
1083
|
return withWrapper((wrappedRequestHeaders) => client.request({ document: GetEncryptedApiTokenDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "GetEncryptedApiToken", "query", variables);
|
|
1067
1084
|
},
|
|
@@ -4615,6 +4632,13 @@ var GQLClient = class {
|
|
|
4615
4632
|
const { me } = await this._clientSdk.Me();
|
|
4616
4633
|
return me;
|
|
4617
4634
|
}
|
|
4635
|
+
async getLastOrg(email) {
|
|
4636
|
+
const getLastOrgRes = await this._clientSdk.getLastOrg({ email });
|
|
4637
|
+
return {
|
|
4638
|
+
organizationId: getLastOrgRes?.user?.[0]?.userOrganizationsAndUserOrganizationRoles?.[0]?.organization?.id,
|
|
4639
|
+
userName: getLastOrgRes?.user?.[0]?.name ?? ""
|
|
4640
|
+
};
|
|
4641
|
+
}
|
|
4618
4642
|
async createCliLogin(variables) {
|
|
4619
4643
|
const res = await this._clientSdk.CreateCliLogin(variables, {
|
|
4620
4644
|
// We may have outdated API key in the config storage. Avoid using it for the login request.
|
package/dist/index.mjs
CHANGED
|
@@ -1826,6 +1826,20 @@ var GetLastOrgAndNamedProjectDocument = `
|
|
|
1826
1826
|
}
|
|
1827
1827
|
}
|
|
1828
1828
|
`;
|
|
1829
|
+
var GetLastOrgDocument = `
|
|
1830
|
+
query getLastOrg($email: String!) {
|
|
1831
|
+
user(where: {email: {_eq: $email}}, limit: 1) {
|
|
1832
|
+
id
|
|
1833
|
+
name
|
|
1834
|
+
userOrganizationsAndUserOrganizationRoles(order_by: {createdOn: desc}) {
|
|
1835
|
+
id
|
|
1836
|
+
organization {
|
|
1837
|
+
id
|
|
1838
|
+
}
|
|
1839
|
+
}
|
|
1840
|
+
}
|
|
1841
|
+
}
|
|
1842
|
+
`;
|
|
1829
1843
|
var GetEncryptedApiTokenDocument = `
|
|
1830
1844
|
query GetEncryptedApiToken($loginId: uuid!) {
|
|
1831
1845
|
cli_login_by_pk(id: $loginId) {
|
|
@@ -2321,6 +2335,9 @@ function getSdk(client, withWrapper = defaultWrapper) {
|
|
|
2321
2335
|
getLastOrgAndNamedProject(variables, requestHeaders, signal) {
|
|
2322
2336
|
return withWrapper((wrappedRequestHeaders) => client.request({ document: GetLastOrgAndNamedProjectDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "getLastOrgAndNamedProject", "query", variables);
|
|
2323
2337
|
},
|
|
2338
|
+
getLastOrg(variables, requestHeaders, signal) {
|
|
2339
|
+
return withWrapper((wrappedRequestHeaders) => client.request({ document: GetLastOrgDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "getLastOrg", "query", variables);
|
|
2340
|
+
},
|
|
2324
2341
|
GetEncryptedApiToken(variables, requestHeaders, signal) {
|
|
2325
2342
|
return withWrapper((wrappedRequestHeaders) => client.request({ document: GetEncryptedApiTokenDocument, variables, requestHeaders: { ...requestHeaders, ...wrappedRequestHeaders }, signal }), "GetEncryptedApiToken", "query", variables);
|
|
2326
2343
|
},
|
|
@@ -6230,7 +6247,7 @@ async function getAdoSdk(params) {
|
|
|
6230
6247
|
const url = new URL(repoUrl);
|
|
6231
6248
|
const origin2 = url.origin.toLowerCase().endsWith(".visualstudio.com") ? DEFUALT_ADO_ORIGIN : url.origin.toLowerCase();
|
|
6232
6249
|
const params2 = `path=/&versionDescriptor[versionOptions]=0&versionDescriptor[versionType]=commit&versionDescriptor[version]=${branch}&resolveLfs=true&$format=zip&api-version=5.0&download=true`;
|
|
6233
|
-
const
|
|
6250
|
+
const path21 = [
|
|
6234
6251
|
prefixPath,
|
|
6235
6252
|
owner,
|
|
6236
6253
|
projectName,
|
|
@@ -6241,7 +6258,7 @@ async function getAdoSdk(params) {
|
|
|
6241
6258
|
"items",
|
|
6242
6259
|
"items"
|
|
6243
6260
|
].filter(Boolean).join("/");
|
|
6244
|
-
return new URL(`${
|
|
6261
|
+
return new URL(`${path21}?${params2}`, origin2).toString();
|
|
6245
6262
|
},
|
|
6246
6263
|
async getAdoBranchList({ repoUrl }) {
|
|
6247
6264
|
try {
|
|
@@ -7788,14 +7805,14 @@ function getGithubSdk(params = {}) {
|
|
|
7788
7805
|
};
|
|
7789
7806
|
},
|
|
7790
7807
|
async getGithubBlameRanges(params2) {
|
|
7791
|
-
const { ref, gitHubUrl, path:
|
|
7808
|
+
const { ref, gitHubUrl, path: path21 } = params2;
|
|
7792
7809
|
const { owner, repo } = parseGithubOwnerAndRepo(gitHubUrl);
|
|
7793
7810
|
const res = await octokit.graphql(
|
|
7794
7811
|
GET_BLAME_DOCUMENT,
|
|
7795
7812
|
{
|
|
7796
7813
|
owner,
|
|
7797
7814
|
repo,
|
|
7798
|
-
path:
|
|
7815
|
+
path: path21,
|
|
7799
7816
|
ref
|
|
7800
7817
|
}
|
|
7801
7818
|
);
|
|
@@ -8162,11 +8179,11 @@ var GithubSCMLib = class extends SCMLib {
|
|
|
8162
8179
|
markdownComment: comment
|
|
8163
8180
|
});
|
|
8164
8181
|
}
|
|
8165
|
-
async getRepoBlameRanges(ref,
|
|
8182
|
+
async getRepoBlameRanges(ref, path21) {
|
|
8166
8183
|
this._validateUrl();
|
|
8167
8184
|
return await this.githubSdk.getGithubBlameRanges({
|
|
8168
8185
|
ref,
|
|
8169
|
-
path:
|
|
8186
|
+
path: path21,
|
|
8170
8187
|
gitHubUrl: this.url
|
|
8171
8188
|
});
|
|
8172
8189
|
}
|
|
@@ -8846,13 +8863,13 @@ function parseGitlabOwnerAndRepo(gitlabUrl) {
|
|
|
8846
8863
|
const { organization, repoName, projectPath } = parsingResult;
|
|
8847
8864
|
return { owner: organization, repo: repoName, projectPath };
|
|
8848
8865
|
}
|
|
8849
|
-
async function getGitlabBlameRanges({ ref, gitlabUrl, path:
|
|
8866
|
+
async function getGitlabBlameRanges({ ref, gitlabUrl, path: path21 }, options) {
|
|
8850
8867
|
const { projectPath } = parseGitlabOwnerAndRepo(gitlabUrl);
|
|
8851
8868
|
const api2 = getGitBeaker({
|
|
8852
8869
|
url: gitlabUrl,
|
|
8853
8870
|
gitlabAuthToken: options?.gitlabAuthToken
|
|
8854
8871
|
});
|
|
8855
|
-
const resp = await api2.RepositoryFiles.allFileBlames(projectPath,
|
|
8872
|
+
const resp = await api2.RepositoryFiles.allFileBlames(projectPath, path21, ref);
|
|
8856
8873
|
let lineNumber = 1;
|
|
8857
8874
|
return resp.filter((range) => range.lines).map((range) => {
|
|
8858
8875
|
const oldLineNumber = lineNumber;
|
|
@@ -9029,10 +9046,10 @@ var GitlabSCMLib = class extends SCMLib {
|
|
|
9029
9046
|
markdownComment: comment
|
|
9030
9047
|
});
|
|
9031
9048
|
}
|
|
9032
|
-
async getRepoBlameRanges(ref,
|
|
9049
|
+
async getRepoBlameRanges(ref, path21) {
|
|
9033
9050
|
this._validateUrl();
|
|
9034
9051
|
return await getGitlabBlameRanges(
|
|
9035
|
-
{ ref, path:
|
|
9052
|
+
{ ref, path: path21, gitlabUrl: this.url },
|
|
9036
9053
|
{
|
|
9037
9054
|
url: this.url,
|
|
9038
9055
|
gitlabAuthToken: this.accessToken
|
|
@@ -10385,6 +10402,13 @@ var GQLClient = class {
|
|
|
10385
10402
|
const { me } = await this._clientSdk.Me();
|
|
10386
10403
|
return me;
|
|
10387
10404
|
}
|
|
10405
|
+
async getLastOrg(email) {
|
|
10406
|
+
const getLastOrgRes = await this._clientSdk.getLastOrg({ email });
|
|
10407
|
+
return {
|
|
10408
|
+
organizationId: getLastOrgRes?.user?.[0]?.userOrganizationsAndUserOrganizationRoles?.[0]?.organization?.id,
|
|
10409
|
+
userName: getLastOrgRes?.user?.[0]?.name ?? ""
|
|
10410
|
+
};
|
|
10411
|
+
}
|
|
10388
10412
|
async createCliLogin(variables) {
|
|
10389
10413
|
const res = await this._clientSdk.CreateCliLogin(variables, {
|
|
10390
10414
|
// We may have outdated API key in the config storage. Avoid using it for the login request.
|
|
@@ -11064,7 +11088,7 @@ async function postIssueComment(params) {
|
|
|
11064
11088
|
fpDescription
|
|
11065
11089
|
} = params;
|
|
11066
11090
|
const {
|
|
11067
|
-
path:
|
|
11091
|
+
path: path21,
|
|
11068
11092
|
startLine,
|
|
11069
11093
|
vulnerabilityReportIssue: {
|
|
11070
11094
|
vulnerabilityReportIssueTags,
|
|
@@ -11079,7 +11103,7 @@ async function postIssueComment(params) {
|
|
|
11079
11103
|
Refresh the page in order to see the changes.`,
|
|
11080
11104
|
pull_number: pullRequest,
|
|
11081
11105
|
commit_id: commitSha,
|
|
11082
|
-
path:
|
|
11106
|
+
path: path21,
|
|
11083
11107
|
line: startLine
|
|
11084
11108
|
});
|
|
11085
11109
|
const commentId = commentRes.data.id;
|
|
@@ -11113,7 +11137,7 @@ async function postFixComment(params) {
|
|
|
11113
11137
|
scanner
|
|
11114
11138
|
} = params;
|
|
11115
11139
|
const {
|
|
11116
|
-
path:
|
|
11140
|
+
path: path21,
|
|
11117
11141
|
startLine,
|
|
11118
11142
|
vulnerabilityReportIssue: { fixId, vulnerabilityReportIssueTags, category },
|
|
11119
11143
|
vulnerabilityReportIssueId
|
|
@@ -11131,7 +11155,7 @@ async function postFixComment(params) {
|
|
|
11131
11155
|
Refresh the page in order to see the changes.`,
|
|
11132
11156
|
pull_number: pullRequest,
|
|
11133
11157
|
commit_id: commitSha,
|
|
11134
|
-
path:
|
|
11158
|
+
path: path21,
|
|
11135
11159
|
line: startLine
|
|
11136
11160
|
});
|
|
11137
11161
|
const commentId = commentRes.data.id;
|
|
@@ -11712,8 +11736,8 @@ if (typeof __filename !== "undefined") {
|
|
|
11712
11736
|
}
|
|
11713
11737
|
var costumeRequire = createRequire(moduleUrl);
|
|
11714
11738
|
var getCheckmarxPath = () => {
|
|
11715
|
-
const
|
|
11716
|
-
const cxFileName =
|
|
11739
|
+
const os9 = type();
|
|
11740
|
+
const cxFileName = os9 === "Windows_NT" ? "cx.exe" : "cx";
|
|
11717
11741
|
try {
|
|
11718
11742
|
return costumeRequire.resolve(`.bin/${cxFileName}`);
|
|
11719
11743
|
} catch (e) {
|
|
@@ -12889,8 +12913,8 @@ var WorkspaceService = class {
|
|
|
12889
12913
|
* Sets a known workspace path that was discovered through successful validation
|
|
12890
12914
|
* @param path The validated workspace path to store
|
|
12891
12915
|
*/
|
|
12892
|
-
static setKnownWorkspacePath(
|
|
12893
|
-
this.knownWorkspacePath =
|
|
12916
|
+
static setKnownWorkspacePath(path21) {
|
|
12917
|
+
this.knownWorkspacePath = path21;
|
|
12894
12918
|
}
|
|
12895
12919
|
/**
|
|
12896
12920
|
* Gets the known workspace path that was previously validated
|
|
@@ -14103,16 +14127,16 @@ var versionCommands = {
|
|
|
14103
14127
|
}
|
|
14104
14128
|
};
|
|
14105
14129
|
var getProcessInfo = (pid) => {
|
|
14106
|
-
const
|
|
14130
|
+
const platform2 = os3.platform();
|
|
14107
14131
|
try {
|
|
14108
|
-
if (
|
|
14132
|
+
if (platform2 === "linux" || platform2 === "darwin") {
|
|
14109
14133
|
const output = execSync(`ps -o pid=,ppid=,comm= -p ${pid}`, {
|
|
14110
14134
|
stdio: ["pipe", "pipe", "ignore"]
|
|
14111
14135
|
}).toString().trim();
|
|
14112
14136
|
if (!output) return null;
|
|
14113
14137
|
const [pidStr, ppid, ...cmd] = output.trim().split(/\s+/);
|
|
14114
14138
|
return { pid: pidStr ?? "", ppid: ppid ?? "", cmd: cmd.join(" ") };
|
|
14115
|
-
} else if (
|
|
14139
|
+
} else if (platform2 === "win32") {
|
|
14116
14140
|
const output = execSync(
|
|
14117
14141
|
`powershell -Command "Get-CimInstance Win32_Process -Filter 'ProcessId=${pid}' | Select-Object ProcessId,ParentProcessId,Name | Format-Table -HideTableHeaders"`,
|
|
14118
14142
|
{ stdio: ["pipe", "pipe", "ignore"] }
|
|
@@ -14124,7 +14148,7 @@ var getProcessInfo = (pid) => {
|
|
|
14124
14148
|
const cmd = parts.slice(2).join(" ");
|
|
14125
14149
|
return { pid: pidStr ?? "", ppid: ppid ?? "", cmd };
|
|
14126
14150
|
} else {
|
|
14127
|
-
logWarn(`[UsageService] Unsupported platform: ${
|
|
14151
|
+
logWarn(`[UsageService] Unsupported platform: ${platform2}`);
|
|
14128
14152
|
return null;
|
|
14129
14153
|
}
|
|
14130
14154
|
} catch {
|
|
@@ -14138,10 +14162,10 @@ var getHostInfo = (additionalMcpList) => {
|
|
|
14138
14162
|
const ideConfigPaths = /* @__PURE__ */ new Set();
|
|
14139
14163
|
for (const ide of IDEs) {
|
|
14140
14164
|
const configPaths = getMCPConfigPaths(ide);
|
|
14141
|
-
configPaths.forEach((
|
|
14165
|
+
configPaths.forEach((path21) => ideConfigPaths.add(path21));
|
|
14142
14166
|
}
|
|
14143
14167
|
const uniqueAdditionalPaths = additionalMcpList.filter(
|
|
14144
|
-
(
|
|
14168
|
+
(path21) => !ideConfigPaths.has(path21)
|
|
14145
14169
|
);
|
|
14146
14170
|
for (const ide of IDEs) {
|
|
14147
14171
|
const cfg = readMCPConfig(ide);
|
|
@@ -14222,8 +14246,8 @@ var getHostInfo = (additionalMcpList) => {
|
|
|
14222
14246
|
const config6 = allConfigs[ide] || null;
|
|
14223
14247
|
const ideName = ide.charAt(0).toUpperCase() + ide.slice(1) || "Unknown";
|
|
14224
14248
|
let ideVersion = "Unknown";
|
|
14225
|
-
const
|
|
14226
|
-
const cmds = versionCommands[ideName]?.[
|
|
14249
|
+
const platform2 = os3.platform();
|
|
14250
|
+
const cmds = versionCommands[ideName]?.[platform2] ?? [];
|
|
14227
14251
|
for (const cmd of cmds) {
|
|
14228
14252
|
try {
|
|
14229
14253
|
const versionOutput = cmd.includes("grep") || cmd.includes("--version") || cmd.includes("sed") ? execSync(cmd, { stdio: ["pipe", "pipe", "ignore"] }).toString().split("\n")[0] ?? "" : cmd;
|
|
@@ -14298,8 +14322,8 @@ var searchDir = async (dir, depth = 0) => {
|
|
|
14298
14322
|
var findSystemMCPConfigs = async () => {
|
|
14299
14323
|
try {
|
|
14300
14324
|
const home = os4.homedir();
|
|
14301
|
-
const
|
|
14302
|
-
const knownDirs =
|
|
14325
|
+
const platform2 = os4.platform();
|
|
14326
|
+
const knownDirs = platform2 === "win32" ? [
|
|
14303
14327
|
path12.join(home, ".cursor"),
|
|
14304
14328
|
path12.join(home, "Documents"),
|
|
14305
14329
|
path12.join(home, "Downloads")
|
|
@@ -16722,12 +16746,32 @@ For a complete security audit workflow, use the \`full-security-audit\` prompt.
|
|
|
16722
16746
|
}
|
|
16723
16747
|
};
|
|
16724
16748
|
|
|
16749
|
+
// src/mcp/services/McpDetectionService/CursorMcpDetectionService.ts
|
|
16750
|
+
import * as fs15 from "fs";
|
|
16751
|
+
import * as os7 from "os";
|
|
16752
|
+
import * as path14 from "path";
|
|
16753
|
+
|
|
16754
|
+
// src/mcp/services/McpDetectionService/BaseMcpDetectionService.ts
|
|
16755
|
+
init_configs();
|
|
16756
|
+
import * as fs14 from "fs";
|
|
16757
|
+
import fetch6 from "node-fetch";
|
|
16758
|
+
import * as path13 from "path";
|
|
16759
|
+
|
|
16760
|
+
// src/mcp/services/McpDetectionService/McpDetectionServiceUtils.ts
|
|
16761
|
+
import * as fs13 from "fs";
|
|
16762
|
+
import * as os6 from "os";
|
|
16763
|
+
|
|
16764
|
+
// src/mcp/services/McpDetectionService/VscodeMcpDetectionService.ts
|
|
16765
|
+
import * as fs16 from "fs";
|
|
16766
|
+
import * as os8 from "os";
|
|
16767
|
+
import * as path15 from "path";
|
|
16768
|
+
|
|
16725
16769
|
// src/mcp/tools/checkForNewAvailableFixes/CheckForNewAvailableFixesTool.ts
|
|
16726
16770
|
import { z as z40 } from "zod";
|
|
16727
16771
|
|
|
16728
16772
|
// src/mcp/services/PathValidation.ts
|
|
16729
|
-
import
|
|
16730
|
-
import
|
|
16773
|
+
import fs17 from "fs";
|
|
16774
|
+
import path16 from "path";
|
|
16731
16775
|
async function validatePath(inputPath) {
|
|
16732
16776
|
logDebug("Validating MCP path", { inputPath });
|
|
16733
16777
|
if (/^\/[a-zA-Z]:\//.test(inputPath)) {
|
|
@@ -16759,7 +16803,7 @@ async function validatePath(inputPath) {
|
|
|
16759
16803
|
logError(error);
|
|
16760
16804
|
return { isValid: false, error, path: inputPath };
|
|
16761
16805
|
}
|
|
16762
|
-
const normalizedPath =
|
|
16806
|
+
const normalizedPath = path16.normalize(inputPath);
|
|
16763
16807
|
if (normalizedPath.includes("..")) {
|
|
16764
16808
|
const error = `Normalized path contains path traversal patterns: ${inputPath}`;
|
|
16765
16809
|
logError(error);
|
|
@@ -16786,7 +16830,7 @@ async function validatePath(inputPath) {
|
|
|
16786
16830
|
logDebug("Path validation successful", { inputPath });
|
|
16787
16831
|
logDebug("Checking path existence", { inputPath });
|
|
16788
16832
|
try {
|
|
16789
|
-
await
|
|
16833
|
+
await fs17.promises.access(inputPath);
|
|
16790
16834
|
logDebug("Path exists and is accessible", { inputPath });
|
|
16791
16835
|
WorkspaceService.setKnownWorkspacePath(inputPath);
|
|
16792
16836
|
logDebug("Stored validated path in WorkspaceService", { inputPath });
|
|
@@ -17410,10 +17454,10 @@ If you wish to scan files that were recently changed in your git history call th
|
|
|
17410
17454
|
init_FileUtils();
|
|
17411
17455
|
init_GitService();
|
|
17412
17456
|
init_configs();
|
|
17413
|
-
import
|
|
17457
|
+
import fs18 from "fs/promises";
|
|
17414
17458
|
import nodePath from "path";
|
|
17415
17459
|
var getLocalFiles = async ({
|
|
17416
|
-
path:
|
|
17460
|
+
path: path21,
|
|
17417
17461
|
maxFileSize = MCP_MAX_FILE_SIZE,
|
|
17418
17462
|
maxFiles,
|
|
17419
17463
|
isAllFilesScan,
|
|
@@ -17421,17 +17465,17 @@ var getLocalFiles = async ({
|
|
|
17421
17465
|
scanRecentlyChangedFiles
|
|
17422
17466
|
}) => {
|
|
17423
17467
|
logDebug(`[${scanContext}] Starting getLocalFiles`, {
|
|
17424
|
-
path:
|
|
17468
|
+
path: path21,
|
|
17425
17469
|
maxFileSize,
|
|
17426
17470
|
maxFiles,
|
|
17427
17471
|
isAllFilesScan,
|
|
17428
17472
|
scanRecentlyChangedFiles
|
|
17429
17473
|
});
|
|
17430
17474
|
try {
|
|
17431
|
-
const resolvedRepoPath = await
|
|
17475
|
+
const resolvedRepoPath = await fs18.realpath(path21);
|
|
17432
17476
|
logDebug(`[${scanContext}] Resolved repository path`, {
|
|
17433
17477
|
resolvedRepoPath,
|
|
17434
|
-
originalPath:
|
|
17478
|
+
originalPath: path21
|
|
17435
17479
|
});
|
|
17436
17480
|
const gitService = new GitService(resolvedRepoPath, log);
|
|
17437
17481
|
const gitValidation = await gitService.validateRepository();
|
|
@@ -17444,7 +17488,7 @@ var getLocalFiles = async ({
|
|
|
17444
17488
|
if (!gitValidation.isValid || isAllFilesScan) {
|
|
17445
17489
|
try {
|
|
17446
17490
|
files = await FileUtils.getLastChangedFiles({
|
|
17447
|
-
dir:
|
|
17491
|
+
dir: path21,
|
|
17448
17492
|
maxFileSize,
|
|
17449
17493
|
maxFiles,
|
|
17450
17494
|
isAllFilesScan
|
|
@@ -17508,7 +17552,7 @@ var getLocalFiles = async ({
|
|
|
17508
17552
|
absoluteFilePath
|
|
17509
17553
|
);
|
|
17510
17554
|
try {
|
|
17511
|
-
const fileStat = await
|
|
17555
|
+
const fileStat = await fs18.stat(absoluteFilePath);
|
|
17512
17556
|
return {
|
|
17513
17557
|
filename: nodePath.basename(absoluteFilePath),
|
|
17514
17558
|
relativePath,
|
|
@@ -17536,15 +17580,15 @@ var getLocalFiles = async ({
|
|
|
17536
17580
|
logError(`${scanContext}Unexpected error in getLocalFiles`, {
|
|
17537
17581
|
error: error instanceof Error ? error.message : String(error),
|
|
17538
17582
|
stack: error instanceof Error ? error.stack : void 0,
|
|
17539
|
-
path:
|
|
17583
|
+
path: path21
|
|
17540
17584
|
});
|
|
17541
17585
|
throw error;
|
|
17542
17586
|
}
|
|
17543
17587
|
};
|
|
17544
17588
|
|
|
17545
17589
|
// src/mcp/services/LocalMobbFolderService.ts
|
|
17546
|
-
import
|
|
17547
|
-
import
|
|
17590
|
+
import fs19 from "fs";
|
|
17591
|
+
import path17 from "path";
|
|
17548
17592
|
import { z as z39 } from "zod";
|
|
17549
17593
|
init_GitService();
|
|
17550
17594
|
function extractPathFromPatch(patch) {
|
|
@@ -17631,19 +17675,19 @@ var LocalMobbFolderService = class {
|
|
|
17631
17675
|
"[LocalMobbFolderService] Non-git repository detected, skipping .gitignore operations"
|
|
17632
17676
|
);
|
|
17633
17677
|
}
|
|
17634
|
-
const mobbFolderPath =
|
|
17678
|
+
const mobbFolderPath = path17.join(
|
|
17635
17679
|
this.repoPath,
|
|
17636
17680
|
this.defaultMobbFolderName
|
|
17637
17681
|
);
|
|
17638
|
-
if (!
|
|
17682
|
+
if (!fs19.existsSync(mobbFolderPath)) {
|
|
17639
17683
|
logInfo("[LocalMobbFolderService] Creating .mobb folder", {
|
|
17640
17684
|
mobbFolderPath
|
|
17641
17685
|
});
|
|
17642
|
-
|
|
17686
|
+
fs19.mkdirSync(mobbFolderPath, { recursive: true });
|
|
17643
17687
|
} else {
|
|
17644
17688
|
logDebug("[LocalMobbFolderService] .mobb folder already exists");
|
|
17645
17689
|
}
|
|
17646
|
-
const stats =
|
|
17690
|
+
const stats = fs19.statSync(mobbFolderPath);
|
|
17647
17691
|
if (!stats.isDirectory()) {
|
|
17648
17692
|
throw new Error(`Path exists but is not a directory: ${mobbFolderPath}`);
|
|
17649
17693
|
}
|
|
@@ -17684,13 +17728,13 @@ var LocalMobbFolderService = class {
|
|
|
17684
17728
|
logDebug("[LocalMobbFolderService] Git repository validated successfully");
|
|
17685
17729
|
} else {
|
|
17686
17730
|
try {
|
|
17687
|
-
const stats =
|
|
17731
|
+
const stats = fs19.statSync(this.repoPath);
|
|
17688
17732
|
if (!stats.isDirectory()) {
|
|
17689
17733
|
throw new Error(
|
|
17690
17734
|
`Path exists but is not a directory: ${this.repoPath}`
|
|
17691
17735
|
);
|
|
17692
17736
|
}
|
|
17693
|
-
|
|
17737
|
+
fs19.accessSync(this.repoPath, fs19.constants.R_OK | fs19.constants.W_OK);
|
|
17694
17738
|
logDebug(
|
|
17695
17739
|
"[LocalMobbFolderService] Non-git directory validated successfully"
|
|
17696
17740
|
);
|
|
@@ -17803,8 +17847,8 @@ var LocalMobbFolderService = class {
|
|
|
17803
17847
|
mobbFolderPath,
|
|
17804
17848
|
baseFileName
|
|
17805
17849
|
);
|
|
17806
|
-
const filePath =
|
|
17807
|
-
await
|
|
17850
|
+
const filePath = path17.join(mobbFolderPath, uniqueFileName);
|
|
17851
|
+
await fs19.promises.writeFile(filePath, patch, "utf8");
|
|
17808
17852
|
logInfo("[LocalMobbFolderService] Patch saved successfully", {
|
|
17809
17853
|
filePath,
|
|
17810
17854
|
fileName: uniqueFileName,
|
|
@@ -17861,11 +17905,11 @@ var LocalMobbFolderService = class {
|
|
|
17861
17905
|
* @returns Unique filename that doesn't conflict with existing files
|
|
17862
17906
|
*/
|
|
17863
17907
|
getUniqueFileName(folderPath, baseFileName) {
|
|
17864
|
-
const baseName =
|
|
17865
|
-
const extension =
|
|
17908
|
+
const baseName = path17.parse(baseFileName).name;
|
|
17909
|
+
const extension = path17.parse(baseFileName).ext;
|
|
17866
17910
|
let uniqueFileName = baseFileName;
|
|
17867
17911
|
let index = 1;
|
|
17868
|
-
while (
|
|
17912
|
+
while (fs19.existsSync(path17.join(folderPath, uniqueFileName))) {
|
|
17869
17913
|
uniqueFileName = `${baseName}-${index}${extension}`;
|
|
17870
17914
|
index++;
|
|
17871
17915
|
if (index > 1e3) {
|
|
@@ -17896,18 +17940,18 @@ var LocalMobbFolderService = class {
|
|
|
17896
17940
|
logDebug("[LocalMobbFolderService] Logging patch info", { fixId: fix.id });
|
|
17897
17941
|
try {
|
|
17898
17942
|
const mobbFolderPath = await this.getFolder();
|
|
17899
|
-
const patchInfoPath =
|
|
17943
|
+
const patchInfoPath = path17.join(mobbFolderPath, "patchInfo.md");
|
|
17900
17944
|
const markdownContent = this.generateFixMarkdown(fix, savedPatchFileName);
|
|
17901
17945
|
let existingContent = "";
|
|
17902
|
-
if (
|
|
17903
|
-
existingContent = await
|
|
17946
|
+
if (fs19.existsSync(patchInfoPath)) {
|
|
17947
|
+
existingContent = await fs19.promises.readFile(patchInfoPath, "utf8");
|
|
17904
17948
|
logDebug("[LocalMobbFolderService] Existing patchInfo.md found");
|
|
17905
17949
|
} else {
|
|
17906
17950
|
logDebug("[LocalMobbFolderService] Creating new patchInfo.md file");
|
|
17907
17951
|
}
|
|
17908
17952
|
const separator = existingContent ? "\n\n================================================================================\n\n" : "";
|
|
17909
17953
|
const updatedContent = `${markdownContent}${separator}${existingContent}`;
|
|
17910
|
-
await
|
|
17954
|
+
await fs19.promises.writeFile(patchInfoPath, updatedContent, "utf8");
|
|
17911
17955
|
logInfo("[LocalMobbFolderService] Patch info logged successfully", {
|
|
17912
17956
|
patchInfoPath,
|
|
17913
17957
|
fixId: fix.id,
|
|
@@ -17938,7 +17982,7 @@ var LocalMobbFolderService = class {
|
|
|
17938
17982
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString();
|
|
17939
17983
|
const patch = this.extractPatchFromFix(fix);
|
|
17940
17984
|
const relativePatchedFilePath = patch ? extractPathFromPatch(patch) : null;
|
|
17941
|
-
const patchedFilePath = relativePatchedFilePath ?
|
|
17985
|
+
const patchedFilePath = relativePatchedFilePath ? path17.resolve(this.repoPath, relativePatchedFilePath) : null;
|
|
17942
17986
|
const fixIdentifier = savedPatchFileName ? savedPatchFileName.replace(".patch", "") : fix.id;
|
|
17943
17987
|
let markdown = `# Fix ${fixIdentifier}
|
|
17944
17988
|
|
|
@@ -18274,22 +18318,22 @@ var LocalMobbFolderService = class {
|
|
|
18274
18318
|
// src/mcp/services/PatchApplicationService.ts
|
|
18275
18319
|
init_configs();
|
|
18276
18320
|
import {
|
|
18277
|
-
existsSync as
|
|
18321
|
+
existsSync as existsSync6,
|
|
18278
18322
|
mkdirSync,
|
|
18279
|
-
readFileSync,
|
|
18323
|
+
readFileSync as readFileSync3,
|
|
18280
18324
|
unlinkSync,
|
|
18281
18325
|
writeFileSync
|
|
18282
18326
|
} from "fs";
|
|
18283
|
-
import
|
|
18327
|
+
import fs20 from "fs/promises";
|
|
18284
18328
|
import parseDiff2 from "parse-diff";
|
|
18285
|
-
import
|
|
18329
|
+
import path18 from "path";
|
|
18286
18330
|
var PatchApplicationService = class {
|
|
18287
18331
|
/**
|
|
18288
18332
|
* Gets the appropriate comment syntax for a file based on its extension
|
|
18289
18333
|
*/
|
|
18290
18334
|
static getCommentSyntax(filePath) {
|
|
18291
|
-
const ext =
|
|
18292
|
-
const basename2 =
|
|
18335
|
+
const ext = path18.extname(filePath).toLowerCase();
|
|
18336
|
+
const basename2 = path18.basename(filePath);
|
|
18293
18337
|
const commentMap = {
|
|
18294
18338
|
// C-style languages (single line comments)
|
|
18295
18339
|
".js": "//",
|
|
@@ -18492,7 +18536,7 @@ var PatchApplicationService = class {
|
|
|
18492
18536
|
}
|
|
18493
18537
|
);
|
|
18494
18538
|
}
|
|
18495
|
-
const dirPath =
|
|
18539
|
+
const dirPath = path18.dirname(filePath);
|
|
18496
18540
|
mkdirSync(dirPath, { recursive: true });
|
|
18497
18541
|
writeFileSync(filePath, finalContent, "utf8");
|
|
18498
18542
|
return filePath;
|
|
@@ -18776,9 +18820,9 @@ var PatchApplicationService = class {
|
|
|
18776
18820
|
continue;
|
|
18777
18821
|
}
|
|
18778
18822
|
try {
|
|
18779
|
-
const absolutePath =
|
|
18780
|
-
if (
|
|
18781
|
-
const stats = await
|
|
18823
|
+
const absolutePath = path18.resolve(repositoryPath, targetFile);
|
|
18824
|
+
if (existsSync6(absolutePath)) {
|
|
18825
|
+
const stats = await fs20.stat(absolutePath);
|
|
18782
18826
|
const fileModTime = stats.mtime.getTime();
|
|
18783
18827
|
if (fileModTime > scanStartTime) {
|
|
18784
18828
|
logError(
|
|
@@ -18819,7 +18863,7 @@ var PatchApplicationService = class {
|
|
|
18819
18863
|
const appliedFixes = [];
|
|
18820
18864
|
const failedFixes = [];
|
|
18821
18865
|
const skippedFixes = [];
|
|
18822
|
-
const resolvedRepoPath = await
|
|
18866
|
+
const resolvedRepoPath = await fs20.realpath(repositoryPath);
|
|
18823
18867
|
logInfo(
|
|
18824
18868
|
`[${scanContext}] Starting patch application for ${fixes.length} fixes`,
|
|
18825
18869
|
{
|
|
@@ -18967,11 +19011,11 @@ var PatchApplicationService = class {
|
|
|
18967
19011
|
}) {
|
|
18968
19012
|
const sanitizedRepoPath = String(repositoryPath || "").replace("\0", "").replace(/^(\.\.(\/|\\))+/, "");
|
|
18969
19013
|
const sanitizedTargetFile = String(targetFile || "").replace("\0", "").replace(/^(\.\.(\/|\\))+/, "");
|
|
18970
|
-
const absoluteFilePath =
|
|
19014
|
+
const absoluteFilePath = path18.resolve(
|
|
18971
19015
|
sanitizedRepoPath,
|
|
18972
19016
|
sanitizedTargetFile
|
|
18973
19017
|
);
|
|
18974
|
-
const relativePath =
|
|
19018
|
+
const relativePath = path18.relative(sanitizedRepoPath, absoluteFilePath);
|
|
18975
19019
|
if (relativePath.startsWith("..")) {
|
|
18976
19020
|
throw new Error(
|
|
18977
19021
|
`Security violation: target file ${targetFile} resolves outside repository`
|
|
@@ -18982,7 +19026,7 @@ var PatchApplicationService = class {
|
|
|
18982
19026
|
targetFile: sanitizedTargetFile,
|
|
18983
19027
|
absoluteFilePath,
|
|
18984
19028
|
relativePath,
|
|
18985
|
-
exists:
|
|
19029
|
+
exists: existsSync6(absoluteFilePath)
|
|
18986
19030
|
});
|
|
18987
19031
|
return { absoluteFilePath, relativePath };
|
|
18988
19032
|
}
|
|
@@ -19005,7 +19049,7 @@ var PatchApplicationService = class {
|
|
|
19005
19049
|
fix,
|
|
19006
19050
|
scanContext
|
|
19007
19051
|
});
|
|
19008
|
-
appliedFiles.push(
|
|
19052
|
+
appliedFiles.push(path18.relative(repositoryPath, actualPath));
|
|
19009
19053
|
logDebug(`[${scanContext}] Created new file: ${relativePath}`);
|
|
19010
19054
|
}
|
|
19011
19055
|
/**
|
|
@@ -19017,7 +19061,7 @@ var PatchApplicationService = class {
|
|
|
19017
19061
|
appliedFiles,
|
|
19018
19062
|
scanContext
|
|
19019
19063
|
}) {
|
|
19020
|
-
if (
|
|
19064
|
+
if (existsSync6(absoluteFilePath)) {
|
|
19021
19065
|
unlinkSync(absoluteFilePath);
|
|
19022
19066
|
appliedFiles.push(relativePath);
|
|
19023
19067
|
logDebug(`[${scanContext}] Deleted file: ${relativePath}`);
|
|
@@ -19036,12 +19080,12 @@ var PatchApplicationService = class {
|
|
|
19036
19080
|
appliedFiles,
|
|
19037
19081
|
scanContext
|
|
19038
19082
|
}) {
|
|
19039
|
-
if (!
|
|
19083
|
+
if (!existsSync6(absoluteFilePath)) {
|
|
19040
19084
|
throw new Error(
|
|
19041
19085
|
`Target file does not exist: ${targetFile} (resolved to: ${absoluteFilePath})`
|
|
19042
19086
|
);
|
|
19043
19087
|
}
|
|
19044
|
-
const originalContent =
|
|
19088
|
+
const originalContent = readFileSync3(absoluteFilePath, "utf8");
|
|
19045
19089
|
const modifiedContent = this.applyHunksToFile(
|
|
19046
19090
|
originalContent,
|
|
19047
19091
|
fileDiff.chunks
|
|
@@ -19053,7 +19097,7 @@ var PatchApplicationService = class {
|
|
|
19053
19097
|
fix,
|
|
19054
19098
|
scanContext
|
|
19055
19099
|
});
|
|
19056
|
-
appliedFiles.push(
|
|
19100
|
+
appliedFiles.push(path18.relative(repositoryPath, actualPath));
|
|
19057
19101
|
logDebug(`[${scanContext}] Modified file: ${relativePath}`);
|
|
19058
19102
|
}
|
|
19059
19103
|
}
|
|
@@ -19248,8 +19292,8 @@ init_configs();
|
|
|
19248
19292
|
|
|
19249
19293
|
// src/mcp/services/FileOperations.ts
|
|
19250
19294
|
init_FileUtils();
|
|
19251
|
-
import
|
|
19252
|
-
import
|
|
19295
|
+
import fs21 from "fs";
|
|
19296
|
+
import path19 from "path";
|
|
19253
19297
|
import AdmZip2 from "adm-zip";
|
|
19254
19298
|
var FileOperations = class {
|
|
19255
19299
|
/**
|
|
@@ -19269,10 +19313,10 @@ var FileOperations = class {
|
|
|
19269
19313
|
let packedFilesCount = 0;
|
|
19270
19314
|
const packedFiles = [];
|
|
19271
19315
|
const excludedFiles = [];
|
|
19272
|
-
const resolvedRepoPath =
|
|
19316
|
+
const resolvedRepoPath = path19.resolve(repositoryPath);
|
|
19273
19317
|
for (const filepath of fileList) {
|
|
19274
|
-
const absoluteFilepath =
|
|
19275
|
-
const resolvedFilePath =
|
|
19318
|
+
const absoluteFilepath = path19.join(repositoryPath, filepath);
|
|
19319
|
+
const resolvedFilePath = path19.resolve(absoluteFilepath);
|
|
19276
19320
|
if (!resolvedFilePath.startsWith(resolvedRepoPath)) {
|
|
19277
19321
|
const reason = "potential path traversal security risk";
|
|
19278
19322
|
logDebug(`[FileOperations] Skipping ${filepath} due to ${reason}`);
|
|
@@ -19319,11 +19363,11 @@ var FileOperations = class {
|
|
|
19319
19363
|
fileList,
|
|
19320
19364
|
repositoryPath
|
|
19321
19365
|
}) {
|
|
19322
|
-
const resolvedRepoPath =
|
|
19366
|
+
const resolvedRepoPath = path19.resolve(repositoryPath);
|
|
19323
19367
|
const validatedPaths = [];
|
|
19324
19368
|
for (const filepath of fileList) {
|
|
19325
|
-
const absoluteFilepath =
|
|
19326
|
-
const resolvedFilePath =
|
|
19369
|
+
const absoluteFilepath = path19.join(repositoryPath, filepath);
|
|
19370
|
+
const resolvedFilePath = path19.resolve(absoluteFilepath);
|
|
19327
19371
|
if (!resolvedFilePath.startsWith(resolvedRepoPath)) {
|
|
19328
19372
|
logDebug(
|
|
19329
19373
|
`[FileOperations] Rejecting ${filepath} - path traversal attempt detected`
|
|
@@ -19331,7 +19375,7 @@ var FileOperations = class {
|
|
|
19331
19375
|
continue;
|
|
19332
19376
|
}
|
|
19333
19377
|
try {
|
|
19334
|
-
await
|
|
19378
|
+
await fs21.promises.access(absoluteFilepath, fs21.constants.R_OK);
|
|
19335
19379
|
validatedPaths.push(filepath);
|
|
19336
19380
|
} catch (error) {
|
|
19337
19381
|
logDebug(
|
|
@@ -19350,8 +19394,8 @@ var FileOperations = class {
|
|
|
19350
19394
|
const fileDataArray = [];
|
|
19351
19395
|
for (const absolutePath of filePaths) {
|
|
19352
19396
|
try {
|
|
19353
|
-
const content = await
|
|
19354
|
-
const relativePath =
|
|
19397
|
+
const content = await fs21.promises.readFile(absolutePath);
|
|
19398
|
+
const relativePath = path19.basename(absolutePath);
|
|
19355
19399
|
fileDataArray.push({
|
|
19356
19400
|
relativePath,
|
|
19357
19401
|
absolutePath,
|
|
@@ -19376,7 +19420,7 @@ var FileOperations = class {
|
|
|
19376
19420
|
relativeFilepath
|
|
19377
19421
|
}) {
|
|
19378
19422
|
try {
|
|
19379
|
-
return await
|
|
19423
|
+
return await fs21.promises.readFile(absoluteFilepath);
|
|
19380
19424
|
} catch (fsError) {
|
|
19381
19425
|
logError(
|
|
19382
19426
|
`[FileOperations] Failed to read ${relativeFilepath} from filesystem: ${fsError}`
|
|
@@ -19663,14 +19707,14 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
|
|
|
19663
19707
|
* since the last scan.
|
|
19664
19708
|
*/
|
|
19665
19709
|
async scanForSecurityVulnerabilities({
|
|
19666
|
-
path:
|
|
19710
|
+
path: path21,
|
|
19667
19711
|
isAllDetectionRulesScan,
|
|
19668
19712
|
isAllFilesScan,
|
|
19669
19713
|
scanContext
|
|
19670
19714
|
}) {
|
|
19671
19715
|
this.hasAuthenticationFailed = false;
|
|
19672
19716
|
logDebug(`[${scanContext}] Scanning for new security vulnerabilities`, {
|
|
19673
|
-
path:
|
|
19717
|
+
path: path21
|
|
19674
19718
|
});
|
|
19675
19719
|
if (!this.gqlClient) {
|
|
19676
19720
|
logInfo(`[${scanContext}] No GQL client found, skipping scan`);
|
|
@@ -19686,11 +19730,11 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
|
|
|
19686
19730
|
}
|
|
19687
19731
|
logDebug(
|
|
19688
19732
|
`[${scanContext}] Connected to the API, assembling list of files to scan`,
|
|
19689
|
-
{ path:
|
|
19733
|
+
{ path: path21 }
|
|
19690
19734
|
);
|
|
19691
19735
|
const isBackgroundScan = scanContext === ScanContext.BACKGROUND_INITIAL || scanContext === ScanContext.BACKGROUND_PERIODIC;
|
|
19692
19736
|
const files = await getLocalFiles({
|
|
19693
|
-
path:
|
|
19737
|
+
path: path21,
|
|
19694
19738
|
isAllFilesScan,
|
|
19695
19739
|
scanContext,
|
|
19696
19740
|
scanRecentlyChangedFiles: !isBackgroundScan
|
|
@@ -19716,13 +19760,13 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
|
|
|
19716
19760
|
});
|
|
19717
19761
|
const { fixReportId, projectId } = await scanFiles({
|
|
19718
19762
|
fileList: filesToScan.map((file) => file.relativePath),
|
|
19719
|
-
repositoryPath:
|
|
19763
|
+
repositoryPath: path21,
|
|
19720
19764
|
gqlClient: this.gqlClient,
|
|
19721
19765
|
isAllDetectionRulesScan,
|
|
19722
19766
|
scanContext
|
|
19723
19767
|
});
|
|
19724
19768
|
logInfo(
|
|
19725
|
-
`[${scanContext}] Security scan completed for ${
|
|
19769
|
+
`[${scanContext}] Security scan completed for ${path21} reportId: ${fixReportId} projectId: ${projectId}`
|
|
19726
19770
|
);
|
|
19727
19771
|
if (isAllFilesScan) {
|
|
19728
19772
|
return;
|
|
@@ -20016,13 +20060,13 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
|
|
|
20016
20060
|
});
|
|
20017
20061
|
return scannedFiles.some((file) => file.relativePath === fixFile);
|
|
20018
20062
|
}
|
|
20019
|
-
async getFreshFixes({ path:
|
|
20063
|
+
async getFreshFixes({ path: path21 }) {
|
|
20020
20064
|
const scanContext = ScanContext.USER_REQUEST;
|
|
20021
|
-
logDebug(`[${scanContext}] Getting fresh fixes`, { path:
|
|
20022
|
-
if (this.path !==
|
|
20023
|
-
this.path =
|
|
20065
|
+
logDebug(`[${scanContext}] Getting fresh fixes`, { path: path21 });
|
|
20066
|
+
if (this.path !== path21) {
|
|
20067
|
+
this.path = path21;
|
|
20024
20068
|
this.reset();
|
|
20025
|
-
logInfo(`[${scanContext}] Reset service state for new path`, { path:
|
|
20069
|
+
logInfo(`[${scanContext}] Reset service state for new path`, { path: path21 });
|
|
20026
20070
|
}
|
|
20027
20071
|
try {
|
|
20028
20072
|
this.gqlClient = await createAuthenticatedMcpGQLClient();
|
|
@@ -20040,7 +20084,7 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
|
|
|
20040
20084
|
}
|
|
20041
20085
|
throw error;
|
|
20042
20086
|
}
|
|
20043
|
-
this.triggerScan({ path:
|
|
20087
|
+
this.triggerScan({ path: path21, gqlClient: this.gqlClient });
|
|
20044
20088
|
let isMvsAutoFixEnabled = null;
|
|
20045
20089
|
try {
|
|
20046
20090
|
isMvsAutoFixEnabled = await this.gqlClient.getMvsAutoFixSettings();
|
|
@@ -20074,33 +20118,33 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
|
|
|
20074
20118
|
return noFreshFixesPrompt;
|
|
20075
20119
|
}
|
|
20076
20120
|
triggerScan({
|
|
20077
|
-
path:
|
|
20121
|
+
path: path21,
|
|
20078
20122
|
gqlClient
|
|
20079
20123
|
}) {
|
|
20080
|
-
if (this.path !==
|
|
20081
|
-
this.path =
|
|
20124
|
+
if (this.path !== path21) {
|
|
20125
|
+
this.path = path21;
|
|
20082
20126
|
this.reset();
|
|
20083
|
-
logInfo(`Reset service state for new path in triggerScan`, { path:
|
|
20127
|
+
logInfo(`Reset service state for new path in triggerScan`, { path: path21 });
|
|
20084
20128
|
}
|
|
20085
20129
|
this.gqlClient = gqlClient;
|
|
20086
20130
|
if (!this.intervalId) {
|
|
20087
|
-
this.startPeriodicScanning(
|
|
20088
|
-
this.executeInitialScan(
|
|
20089
|
-
void this.executeInitialFullScan(
|
|
20131
|
+
this.startPeriodicScanning(path21);
|
|
20132
|
+
this.executeInitialScan(path21);
|
|
20133
|
+
void this.executeInitialFullScan(path21);
|
|
20090
20134
|
}
|
|
20091
20135
|
}
|
|
20092
|
-
startPeriodicScanning(
|
|
20136
|
+
startPeriodicScanning(path21) {
|
|
20093
20137
|
const scanContext = ScanContext.BACKGROUND_PERIODIC;
|
|
20094
20138
|
logDebug(
|
|
20095
20139
|
`[${scanContext}] Starting periodic scan for new security vulnerabilities`,
|
|
20096
20140
|
{
|
|
20097
|
-
path:
|
|
20141
|
+
path: path21
|
|
20098
20142
|
}
|
|
20099
20143
|
);
|
|
20100
20144
|
this.intervalId = setInterval(() => {
|
|
20101
|
-
logDebug(`[${scanContext}] Triggering periodic security scan`, { path:
|
|
20145
|
+
logDebug(`[${scanContext}] Triggering periodic security scan`, { path: path21 });
|
|
20102
20146
|
this.scanForSecurityVulnerabilities({
|
|
20103
|
-
path:
|
|
20147
|
+
path: path21,
|
|
20104
20148
|
scanContext
|
|
20105
20149
|
}).catch((error) => {
|
|
20106
20150
|
logError(`[${scanContext}] Error during periodic security scan`, {
|
|
@@ -20109,45 +20153,45 @@ var _CheckForNewAvailableFixesService = class _CheckForNewAvailableFixesService
|
|
|
20109
20153
|
});
|
|
20110
20154
|
}, MCP_PERIODIC_CHECK_INTERVAL);
|
|
20111
20155
|
}
|
|
20112
|
-
async executeInitialFullScan(
|
|
20156
|
+
async executeInitialFullScan(path21) {
|
|
20113
20157
|
const scanContext = ScanContext.FULL_SCAN;
|
|
20114
|
-
logDebug(`[${scanContext}] Triggering initial full security scan`, { path:
|
|
20158
|
+
logDebug(`[${scanContext}] Triggering initial full security scan`, { path: path21 });
|
|
20115
20159
|
logDebug(`[${scanContext}] Full scan paths scanned`, {
|
|
20116
20160
|
fullScanPathsScanned: this.fullScanPathsScanned
|
|
20117
20161
|
});
|
|
20118
|
-
if (this.fullScanPathsScanned.includes(
|
|
20162
|
+
if (this.fullScanPathsScanned.includes(path21)) {
|
|
20119
20163
|
logDebug(`[${scanContext}] Full scan already executed for this path`, {
|
|
20120
|
-
path:
|
|
20164
|
+
path: path21
|
|
20121
20165
|
});
|
|
20122
20166
|
return;
|
|
20123
20167
|
}
|
|
20124
20168
|
configStore.set("fullScanPathsScanned", [
|
|
20125
20169
|
...this.fullScanPathsScanned,
|
|
20126
|
-
|
|
20170
|
+
path21
|
|
20127
20171
|
]);
|
|
20128
20172
|
try {
|
|
20129
20173
|
await this.scanForSecurityVulnerabilities({
|
|
20130
|
-
path:
|
|
20174
|
+
path: path21,
|
|
20131
20175
|
isAllFilesScan: true,
|
|
20132
20176
|
isAllDetectionRulesScan: true,
|
|
20133
20177
|
scanContext: ScanContext.FULL_SCAN
|
|
20134
20178
|
});
|
|
20135
|
-
if (!this.fullScanPathsScanned.includes(
|
|
20136
|
-
this.fullScanPathsScanned.push(
|
|
20179
|
+
if (!this.fullScanPathsScanned.includes(path21)) {
|
|
20180
|
+
this.fullScanPathsScanned.push(path21);
|
|
20137
20181
|
configStore.set("fullScanPathsScanned", this.fullScanPathsScanned);
|
|
20138
20182
|
}
|
|
20139
|
-
logInfo(`[${scanContext}] Full scan completed`, { path:
|
|
20183
|
+
logInfo(`[${scanContext}] Full scan completed`, { path: path21 });
|
|
20140
20184
|
} catch (error) {
|
|
20141
20185
|
logError(`[${scanContext}] Error during initial full security scan`, {
|
|
20142
20186
|
error
|
|
20143
20187
|
});
|
|
20144
20188
|
}
|
|
20145
20189
|
}
|
|
20146
|
-
executeInitialScan(
|
|
20190
|
+
executeInitialScan(path21) {
|
|
20147
20191
|
const scanContext = ScanContext.BACKGROUND_INITIAL;
|
|
20148
|
-
logDebug(`[${scanContext}] Triggering initial security scan`, { path:
|
|
20192
|
+
logDebug(`[${scanContext}] Triggering initial security scan`, { path: path21 });
|
|
20149
20193
|
this.scanForSecurityVulnerabilities({
|
|
20150
|
-
path:
|
|
20194
|
+
path: path21,
|
|
20151
20195
|
scanContext: ScanContext.BACKGROUND_INITIAL
|
|
20152
20196
|
}).catch((error) => {
|
|
20153
20197
|
logError(`[${scanContext}] Error during initial security scan`, { error });
|
|
@@ -20244,9 +20288,9 @@ Example payload:
|
|
|
20244
20288
|
`Invalid path: potential security risk detected in path: ${pathValidationResult.error}`
|
|
20245
20289
|
);
|
|
20246
20290
|
}
|
|
20247
|
-
const
|
|
20291
|
+
const path21 = pathValidationResult.path;
|
|
20248
20292
|
const resultText = await this.newFixesService.getFreshFixes({
|
|
20249
|
-
path:
|
|
20293
|
+
path: path21
|
|
20250
20294
|
});
|
|
20251
20295
|
logInfo("CheckForNewAvailableFixesTool execution completed", {
|
|
20252
20296
|
resultText
|
|
@@ -20423,8 +20467,8 @@ Call this tool instead of ${MCP_TOOL_SCAN_AND_FIX_VULNERABILITIES} when you only
|
|
|
20423
20467
|
`Invalid path: potential security risk detected in path: ${pathValidationResult.error}`
|
|
20424
20468
|
);
|
|
20425
20469
|
}
|
|
20426
|
-
const
|
|
20427
|
-
const gitService = new GitService(
|
|
20470
|
+
const path21 = pathValidationResult.path;
|
|
20471
|
+
const gitService = new GitService(path21, log);
|
|
20428
20472
|
const gitValidation = await gitService.validateRepository();
|
|
20429
20473
|
if (!gitValidation.isValid) {
|
|
20430
20474
|
throw new Error(`Invalid git repository: ${gitValidation.error}`);
|
|
@@ -20812,9 +20856,9 @@ Example payload:
|
|
|
20812
20856
|
`Invalid path: potential security risk detected in path: ${pathValidationResult.error}`
|
|
20813
20857
|
);
|
|
20814
20858
|
}
|
|
20815
|
-
const
|
|
20859
|
+
const path21 = pathValidationResult.path;
|
|
20816
20860
|
const files = await getLocalFiles({
|
|
20817
|
-
path:
|
|
20861
|
+
path: path21,
|
|
20818
20862
|
maxFileSize: MCP_MAX_FILE_SIZE,
|
|
20819
20863
|
maxFiles: args.maxFiles,
|
|
20820
20864
|
scanContext: ScanContext.USER_REQUEST,
|
|
@@ -20834,7 +20878,7 @@ Example payload:
|
|
|
20834
20878
|
try {
|
|
20835
20879
|
const fixResult = await this.vulnerabilityFixService.processVulnerabilities({
|
|
20836
20880
|
fileList: files.map((file) => file.relativePath),
|
|
20837
|
-
repositoryPath:
|
|
20881
|
+
repositoryPath: path21,
|
|
20838
20882
|
offset: args.offset,
|
|
20839
20883
|
limit: args.limit,
|
|
20840
20884
|
isRescan: args.rescan || !!args.maxFiles
|
|
@@ -20938,7 +20982,7 @@ var mcpHandler = async (_args) => {
|
|
|
20938
20982
|
};
|
|
20939
20983
|
|
|
20940
20984
|
// src/args/commands/review.ts
|
|
20941
|
-
import
|
|
20985
|
+
import fs22 from "fs";
|
|
20942
20986
|
import chalk9 from "chalk";
|
|
20943
20987
|
function reviewBuilder(yargs2) {
|
|
20944
20988
|
return yargs2.option("f", {
|
|
@@ -20975,7 +21019,7 @@ function reviewBuilder(yargs2) {
|
|
|
20975
21019
|
).help();
|
|
20976
21020
|
}
|
|
20977
21021
|
function validateReviewOptions(argv) {
|
|
20978
|
-
if (!
|
|
21022
|
+
if (!fs22.existsSync(argv.f)) {
|
|
20979
21023
|
throw new CliError(`
|
|
20980
21024
|
Can't access ${chalk9.bold(argv.f)}`);
|
|
20981
21025
|
}
|
|
@@ -21049,7 +21093,7 @@ async function addScmTokenHandler(args) {
|
|
|
21049
21093
|
|
|
21050
21094
|
// src/args/commands/upload_ai_blame.ts
|
|
21051
21095
|
import fsPromises3 from "fs/promises";
|
|
21052
|
-
import
|
|
21096
|
+
import path20 from "path";
|
|
21053
21097
|
import chalk10 from "chalk";
|
|
21054
21098
|
import Configstore6 from "configstore";
|
|
21055
21099
|
import { withFile } from "tmp-promise";
|
|
@@ -21159,8 +21203,8 @@ async function uploadAiBlameHandler(args, exitOnError = true) {
|
|
|
21159
21203
|
throw new Error(errorMsg);
|
|
21160
21204
|
}
|
|
21161
21205
|
sessions.push({
|
|
21162
|
-
promptFileName:
|
|
21163
|
-
inferenceFileName:
|
|
21206
|
+
promptFileName: path20.basename(promptPath),
|
|
21207
|
+
inferenceFileName: path20.basename(inferencePath),
|
|
21164
21208
|
aiResponseAt: responseTimes[i] || nowIso,
|
|
21165
21209
|
model: models[i],
|
|
21166
21210
|
toolName: tools[i],
|