mobbdev 1.1.37 → 1.1.40
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/args/commands/upload_ai_blame.d.mts +28 -28
- package/dist/args/commands/upload_ai_blame.mjs +12 -13
- package/dist/index.mjs +802 -52
- package/package.json +3 -3
|
@@ -54,26 +54,18 @@ declare const PromptItemZ: z.ZodObject<{
|
|
|
54
54
|
name: string;
|
|
55
55
|
parameters: string;
|
|
56
56
|
result: string;
|
|
57
|
-
accepted?: boolean | undefined;
|
|
58
57
|
rawArguments?: string | undefined;
|
|
58
|
+
accepted?: boolean | undefined;
|
|
59
59
|
}, {
|
|
60
60
|
name: string;
|
|
61
61
|
parameters: string;
|
|
62
62
|
result: string;
|
|
63
|
-
accepted?: boolean | undefined;
|
|
64
63
|
rawArguments?: string | undefined;
|
|
64
|
+
accepted?: boolean | undefined;
|
|
65
65
|
}>>;
|
|
66
66
|
}, "strip", z.ZodTypeAny, {
|
|
67
67
|
type: "USER_PROMPT" | "AI_RESPONSE" | "TOOL_EXECUTION" | "AI_THINKING";
|
|
68
|
-
tool?: {
|
|
69
|
-
name: string;
|
|
70
|
-
parameters: string;
|
|
71
|
-
result: string;
|
|
72
|
-
accepted?: boolean | undefined;
|
|
73
|
-
rawArguments?: string | undefined;
|
|
74
|
-
} | undefined;
|
|
75
68
|
date?: Date | undefined;
|
|
76
|
-
text?: string | undefined;
|
|
77
69
|
attachedFiles?: {
|
|
78
70
|
relativePath: string;
|
|
79
71
|
startLine?: number | undefined;
|
|
@@ -82,17 +74,17 @@ declare const PromptItemZ: z.ZodObject<{
|
|
|
82
74
|
inputCount: number;
|
|
83
75
|
outputCount: number;
|
|
84
76
|
} | undefined;
|
|
85
|
-
|
|
86
|
-
type: "USER_PROMPT" | "AI_RESPONSE" | "TOOL_EXECUTION" | "AI_THINKING";
|
|
77
|
+
text?: string | undefined;
|
|
87
78
|
tool?: {
|
|
88
79
|
name: string;
|
|
89
80
|
parameters: string;
|
|
90
81
|
result: string;
|
|
91
|
-
accepted?: boolean | undefined;
|
|
92
82
|
rawArguments?: string | undefined;
|
|
83
|
+
accepted?: boolean | undefined;
|
|
93
84
|
} | undefined;
|
|
85
|
+
}, {
|
|
86
|
+
type: "USER_PROMPT" | "AI_RESPONSE" | "TOOL_EXECUTION" | "AI_THINKING";
|
|
94
87
|
date?: Date | undefined;
|
|
95
|
-
text?: string | undefined;
|
|
96
88
|
attachedFiles?: {
|
|
97
89
|
relativePath: string;
|
|
98
90
|
startLine?: number | undefined;
|
|
@@ -101,6 +93,14 @@ declare const PromptItemZ: z.ZodObject<{
|
|
|
101
93
|
inputCount: number;
|
|
102
94
|
outputCount: number;
|
|
103
95
|
} | undefined;
|
|
96
|
+
text?: string | undefined;
|
|
97
|
+
tool?: {
|
|
98
|
+
name: string;
|
|
99
|
+
parameters: string;
|
|
100
|
+
result: string;
|
|
101
|
+
rawArguments?: string | undefined;
|
|
102
|
+
accepted?: boolean | undefined;
|
|
103
|
+
} | undefined;
|
|
104
104
|
}>;
|
|
105
105
|
type PromptItem = z.infer<typeof PromptItemZ>;
|
|
106
106
|
declare const PromptItemArrayZ: z.ZodArray<z.ZodObject<{
|
|
@@ -137,26 +137,18 @@ declare const PromptItemArrayZ: z.ZodArray<z.ZodObject<{
|
|
|
137
137
|
name: string;
|
|
138
138
|
parameters: string;
|
|
139
139
|
result: string;
|
|
140
|
-
accepted?: boolean | undefined;
|
|
141
140
|
rawArguments?: string | undefined;
|
|
141
|
+
accepted?: boolean | undefined;
|
|
142
142
|
}, {
|
|
143
143
|
name: string;
|
|
144
144
|
parameters: string;
|
|
145
145
|
result: string;
|
|
146
|
-
accepted?: boolean | undefined;
|
|
147
146
|
rawArguments?: string | undefined;
|
|
147
|
+
accepted?: boolean | undefined;
|
|
148
148
|
}>>;
|
|
149
149
|
}, "strip", z.ZodTypeAny, {
|
|
150
150
|
type: "USER_PROMPT" | "AI_RESPONSE" | "TOOL_EXECUTION" | "AI_THINKING";
|
|
151
|
-
tool?: {
|
|
152
|
-
name: string;
|
|
153
|
-
parameters: string;
|
|
154
|
-
result: string;
|
|
155
|
-
accepted?: boolean | undefined;
|
|
156
|
-
rawArguments?: string | undefined;
|
|
157
|
-
} | undefined;
|
|
158
151
|
date?: Date | undefined;
|
|
159
|
-
text?: string | undefined;
|
|
160
152
|
attachedFiles?: {
|
|
161
153
|
relativePath: string;
|
|
162
154
|
startLine?: number | undefined;
|
|
@@ -165,17 +157,17 @@ declare const PromptItemArrayZ: z.ZodArray<z.ZodObject<{
|
|
|
165
157
|
inputCount: number;
|
|
166
158
|
outputCount: number;
|
|
167
159
|
} | undefined;
|
|
168
|
-
|
|
169
|
-
type: "USER_PROMPT" | "AI_RESPONSE" | "TOOL_EXECUTION" | "AI_THINKING";
|
|
160
|
+
text?: string | undefined;
|
|
170
161
|
tool?: {
|
|
171
162
|
name: string;
|
|
172
163
|
parameters: string;
|
|
173
164
|
result: string;
|
|
174
|
-
accepted?: boolean | undefined;
|
|
175
165
|
rawArguments?: string | undefined;
|
|
166
|
+
accepted?: boolean | undefined;
|
|
176
167
|
} | undefined;
|
|
168
|
+
}, {
|
|
169
|
+
type: "USER_PROMPT" | "AI_RESPONSE" | "TOOL_EXECUTION" | "AI_THINKING";
|
|
177
170
|
date?: Date | undefined;
|
|
178
|
-
text?: string | undefined;
|
|
179
171
|
attachedFiles?: {
|
|
180
172
|
relativePath: string;
|
|
181
173
|
startLine?: number | undefined;
|
|
@@ -184,6 +176,14 @@ declare const PromptItemArrayZ: z.ZodArray<z.ZodObject<{
|
|
|
184
176
|
inputCount: number;
|
|
185
177
|
outputCount: number;
|
|
186
178
|
} | undefined;
|
|
179
|
+
text?: string | undefined;
|
|
180
|
+
tool?: {
|
|
181
|
+
name: string;
|
|
182
|
+
parameters: string;
|
|
183
|
+
result: string;
|
|
184
|
+
rawArguments?: string | undefined;
|
|
185
|
+
accepted?: boolean | undefined;
|
|
186
|
+
} | undefined;
|
|
187
187
|
}>, "many">;
|
|
188
188
|
type PromptItemArray = z.infer<typeof PromptItemArrayZ>;
|
|
189
189
|
type UploadAiBlameOptions = {
|
|
@@ -12,7 +12,7 @@ var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "sy
|
|
|
12
12
|
|
|
13
13
|
// src/features/analysis/scm/env.ts
|
|
14
14
|
import { z as z16 } from "zod";
|
|
15
|
-
var EnvVariablesZod, GITLAB_API_TOKEN, GITHUB_API_TOKEN, GIT_PROXY_HOST, MAX_UPLOAD_FILE_SIZE_MB;
|
|
15
|
+
var EnvVariablesZod, GITLAB_API_TOKEN, GITHUB_API_TOKEN, GIT_PROXY_HOST, MAX_UPLOAD_FILE_SIZE_MB, GITHUB_API_CONCURRENCY;
|
|
16
16
|
var init_env = __esm({
|
|
17
17
|
"src/features/analysis/scm/env.ts"() {
|
|
18
18
|
"use strict";
|
|
@@ -20,13 +20,15 @@ var init_env = __esm({
|
|
|
20
20
|
GITLAB_API_TOKEN: z16.string().optional(),
|
|
21
21
|
GITHUB_API_TOKEN: z16.string().optional(),
|
|
22
22
|
GIT_PROXY_HOST: z16.string().optional().default("http://tinyproxy:8888"),
|
|
23
|
-
MAX_UPLOAD_FILE_SIZE_MB: z16.coerce.number().gt(0).default(5)
|
|
23
|
+
MAX_UPLOAD_FILE_SIZE_MB: z16.coerce.number().gt(0).default(5),
|
|
24
|
+
GITHUB_API_CONCURRENCY: z16.coerce.number().gt(0).optional().default(10)
|
|
24
25
|
});
|
|
25
26
|
({
|
|
26
27
|
GITLAB_API_TOKEN,
|
|
27
28
|
GITHUB_API_TOKEN,
|
|
28
29
|
GIT_PROXY_HOST,
|
|
29
|
-
MAX_UPLOAD_FILE_SIZE_MB
|
|
30
|
+
MAX_UPLOAD_FILE_SIZE_MB,
|
|
31
|
+
GITHUB_API_CONCURRENCY
|
|
30
32
|
} = EnvVariablesZod.parse(process.env));
|
|
31
33
|
}
|
|
32
34
|
});
|
|
@@ -5915,11 +5917,17 @@ var REPORT_DEFAULT_FILE_NAME = "report.json";
|
|
|
5915
5917
|
init_env();
|
|
5916
5918
|
|
|
5917
5919
|
// src/features/analysis/scm/github/GithubSCMLib.ts
|
|
5918
|
-
|
|
5920
|
+
init_env();
|
|
5921
|
+
import pLimit2 from "p-limit";
|
|
5919
5922
|
import { z as z22 } from "zod";
|
|
5920
5923
|
|
|
5921
5924
|
// src/features/analysis/scm/github/github.ts
|
|
5922
5925
|
import { RequestError } from "@octokit/request-error";
|
|
5926
|
+
import pLimit from "p-limit";
|
|
5927
|
+
|
|
5928
|
+
// src/utils/contextLogger.ts
|
|
5929
|
+
import debugModule from "debug";
|
|
5930
|
+
var debug4 = debugModule("mobb:shared");
|
|
5923
5931
|
|
|
5924
5932
|
// src/features/analysis/scm/github/utils/encrypt_secret.ts
|
|
5925
5933
|
import sodium from "libsodium-wrappers";
|
|
@@ -5928,9 +5936,6 @@ import sodium from "libsodium-wrappers";
|
|
|
5928
5936
|
import { Octokit } from "octokit";
|
|
5929
5937
|
import { fetch as fetch2, ProxyAgent } from "undici";
|
|
5930
5938
|
|
|
5931
|
-
// src/features/analysis/scm/github/GithubSCMLib.ts
|
|
5932
|
-
var GITHUB_COMMIT_FETCH_CONCURRENCY = parseInt(process.env["GITHUB_COMMIT_CONCURRENCY"] || "10", 10) || 10;
|
|
5933
|
-
|
|
5934
5939
|
// src/features/analysis/scm/gitlab/gitlab.ts
|
|
5935
5940
|
import querystring3 from "querystring";
|
|
5936
5941
|
import {
|
|
@@ -5946,12 +5951,6 @@ import {
|
|
|
5946
5951
|
fetch as undiciFetch,
|
|
5947
5952
|
ProxyAgent as ProxyAgent2
|
|
5948
5953
|
} from "undici";
|
|
5949
|
-
|
|
5950
|
-
// src/utils/contextLogger.ts
|
|
5951
|
-
import debugModule from "debug";
|
|
5952
|
-
var debug4 = debugModule("mobb:shared");
|
|
5953
|
-
|
|
5954
|
-
// src/features/analysis/scm/gitlab/gitlab.ts
|
|
5955
5954
|
init_env();
|
|
5956
5955
|
|
|
5957
5956
|
// src/features/analysis/scm/gitlab/types.ts
|
package/dist/index.mjs
CHANGED
|
@@ -12,7 +12,7 @@ var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "sy
|
|
|
12
12
|
|
|
13
13
|
// src/features/analysis/scm/env.ts
|
|
14
14
|
import { z as z15 } from "zod";
|
|
15
|
-
var EnvVariablesZod, GITLAB_API_TOKEN, GITHUB_API_TOKEN, GIT_PROXY_HOST, MAX_UPLOAD_FILE_SIZE_MB;
|
|
15
|
+
var EnvVariablesZod, GITLAB_API_TOKEN, GITHUB_API_TOKEN, GIT_PROXY_HOST, MAX_UPLOAD_FILE_SIZE_MB, GITHUB_API_CONCURRENCY;
|
|
16
16
|
var init_env = __esm({
|
|
17
17
|
"src/features/analysis/scm/env.ts"() {
|
|
18
18
|
"use strict";
|
|
@@ -20,13 +20,15 @@ var init_env = __esm({
|
|
|
20
20
|
GITLAB_API_TOKEN: z15.string().optional(),
|
|
21
21
|
GITHUB_API_TOKEN: z15.string().optional(),
|
|
22
22
|
GIT_PROXY_HOST: z15.string().optional().default("http://tinyproxy:8888"),
|
|
23
|
-
MAX_UPLOAD_FILE_SIZE_MB: z15.coerce.number().gt(0).default(5)
|
|
23
|
+
MAX_UPLOAD_FILE_SIZE_MB: z15.coerce.number().gt(0).default(5),
|
|
24
|
+
GITHUB_API_CONCURRENCY: z15.coerce.number().gt(0).optional().default(10)
|
|
24
25
|
});
|
|
25
26
|
({
|
|
26
27
|
GITLAB_API_TOKEN,
|
|
27
28
|
GITHUB_API_TOKEN,
|
|
28
29
|
GIT_PROXY_HOST,
|
|
29
|
-
MAX_UPLOAD_FILE_SIZE_MB
|
|
30
|
+
MAX_UPLOAD_FILE_SIZE_MB,
|
|
31
|
+
GITHUB_API_CONCURRENCY
|
|
30
32
|
} = EnvVariablesZod.parse(process.env));
|
|
31
33
|
}
|
|
32
34
|
});
|
|
@@ -1383,19 +1385,16 @@ import StreamZip from "node-stream-zip";
|
|
|
1383
1385
|
import tmp from "tmp";
|
|
1384
1386
|
|
|
1385
1387
|
// src/features/analysis/scm/errors.ts
|
|
1386
|
-
var InvalidRepoUrlError = class extends Error {
|
|
1387
|
-
constructor(m) {
|
|
1388
|
-
super(m);
|
|
1389
|
-
}
|
|
1390
|
-
};
|
|
1391
1388
|
var InvalidAccessTokenError = class extends Error {
|
|
1392
|
-
constructor(m) {
|
|
1389
|
+
constructor(m, scmType) {
|
|
1393
1390
|
super(m);
|
|
1391
|
+
this.scmType = scmType;
|
|
1394
1392
|
}
|
|
1395
1393
|
};
|
|
1396
1394
|
var InvalidUrlPatternError = class extends Error {
|
|
1397
|
-
constructor(m) {
|
|
1395
|
+
constructor(m, scmType) {
|
|
1398
1396
|
super(m);
|
|
1397
|
+
this.scmType = scmType;
|
|
1399
1398
|
}
|
|
1400
1399
|
};
|
|
1401
1400
|
var RefNotFoundError = class extends Error {
|
|
@@ -1403,12 +1402,38 @@ var RefNotFoundError = class extends Error {
|
|
|
1403
1402
|
super(m);
|
|
1404
1403
|
}
|
|
1405
1404
|
};
|
|
1405
|
+
var ScmBadCredentialsError = class extends Error {
|
|
1406
|
+
constructor(m, scmType) {
|
|
1407
|
+
super(m);
|
|
1408
|
+
this.scmType = scmType;
|
|
1409
|
+
}
|
|
1410
|
+
};
|
|
1411
|
+
var InvalidRepoUrlError = class extends Error {
|
|
1412
|
+
constructor(m, scmType) {
|
|
1413
|
+
super(m);
|
|
1414
|
+
this.scmType = scmType;
|
|
1415
|
+
}
|
|
1416
|
+
};
|
|
1406
1417
|
var RepoNoTokenAccessError = class extends Error {
|
|
1407
1418
|
constructor(m, scmType) {
|
|
1408
1419
|
super(m);
|
|
1409
1420
|
this.scmType = scmType;
|
|
1410
1421
|
}
|
|
1411
1422
|
};
|
|
1423
|
+
var RateLimitError = class extends Error {
|
|
1424
|
+
constructor(m, scmType, retryAfter) {
|
|
1425
|
+
super(m);
|
|
1426
|
+
this.scmType = scmType;
|
|
1427
|
+
this.retryAfter = retryAfter;
|
|
1428
|
+
}
|
|
1429
|
+
};
|
|
1430
|
+
var NetworkError = class extends Error {
|
|
1431
|
+
constructor(m, scmType, errorCode) {
|
|
1432
|
+
super(m);
|
|
1433
|
+
this.scmType = scmType;
|
|
1434
|
+
this.errorCode = errorCode;
|
|
1435
|
+
}
|
|
1436
|
+
};
|
|
1412
1437
|
|
|
1413
1438
|
// src/features/analysis/scm/utils/index.ts
|
|
1414
1439
|
import { z as z14 } from "zod";
|
|
@@ -6820,6 +6845,35 @@ var SCMLib = class {
|
|
|
6820
6845
|
password: accessToken
|
|
6821
6846
|
});
|
|
6822
6847
|
}
|
|
6848
|
+
/**
|
|
6849
|
+
* Search for PRs with optional filters and sorting.
|
|
6850
|
+
* IMPORTANT: Sort order must remain consistent across paginated requests
|
|
6851
|
+
* for cursor-based pagination to work correctly.
|
|
6852
|
+
*
|
|
6853
|
+
* Default implementation uses getSubmitRequests and applies filters/sorting in-memory.
|
|
6854
|
+
* Override in subclasses for provider-specific optimizations (e.g., GitHub Search API).
|
|
6855
|
+
*
|
|
6856
|
+
* @param params - Search parameters including filters, sort, and pagination
|
|
6857
|
+
* @returns Paginated search results with cursor
|
|
6858
|
+
*/
|
|
6859
|
+
async searchSubmitRequests(_params) {
|
|
6860
|
+
throw new Error(
|
|
6861
|
+
"searchSubmitRequests is not implemented for this SCM provider"
|
|
6862
|
+
);
|
|
6863
|
+
}
|
|
6864
|
+
/**
|
|
6865
|
+
* Search repositories with pagination support.
|
|
6866
|
+
* IMPORTANT: Sort order must remain consistent across paginated requests
|
|
6867
|
+
* for cursor-based pagination to work correctly.
|
|
6868
|
+
*
|
|
6869
|
+
* Must be overridden in subclasses with provider-specific implementations.
|
|
6870
|
+
*
|
|
6871
|
+
* @param params - Search parameters including sort and pagination
|
|
6872
|
+
* @returns Paginated search results with cursor
|
|
6873
|
+
*/
|
|
6874
|
+
async searchRepos(_params) {
|
|
6875
|
+
throw new Error("searchRepos is not implemented for this SCM provider");
|
|
6876
|
+
}
|
|
6823
6877
|
/**
|
|
6824
6878
|
* Fetches commits for multiple PRs in a single batch request.
|
|
6825
6879
|
* This is an optimization that not all SCM providers may support efficiently.
|
|
@@ -6832,6 +6886,31 @@ var SCMLib = class {
|
|
|
6832
6886
|
async getPrCommitsBatch(_repoUrl, _prNumbers) {
|
|
6833
6887
|
throw new Error("getPrCommitsBatch not implemented for this SCM provider");
|
|
6834
6888
|
}
|
|
6889
|
+
/**
|
|
6890
|
+
* Fetches additions and deletions counts for multiple PRs in batch.
|
|
6891
|
+
* More efficient than fetching individual PR details.
|
|
6892
|
+
*
|
|
6893
|
+
* @param repoUrl - Repository URL
|
|
6894
|
+
* @param prNumbers - Array of PR numbers to fetch metrics for
|
|
6895
|
+
* @returns Map of PR number to additions/deletions count
|
|
6896
|
+
*/
|
|
6897
|
+
async getPrAdditionsDeletionsBatch(_repoUrl, _prNumbers) {
|
|
6898
|
+
throw new Error(
|
|
6899
|
+
"getPrAdditionsDeletionsBatch not implemented for this SCM provider"
|
|
6900
|
+
);
|
|
6901
|
+
}
|
|
6902
|
+
/**
|
|
6903
|
+
* Batch fetch PR data (additions/deletions + comments) for multiple PRs.
|
|
6904
|
+
* Only implemented for GitHub (via GraphQL). Other providers should override if supported.
|
|
6905
|
+
* This is more efficient than calling getPrAdditionsDeletionsBatch separately.
|
|
6906
|
+
*
|
|
6907
|
+
* @param _repoUrl - Repository URL
|
|
6908
|
+
* @param _prNumbers - Array of PR numbers to fetch data for
|
|
6909
|
+
* @returns Map of PR number to { changedLines, comments }
|
|
6910
|
+
*/
|
|
6911
|
+
async getPrDataBatch(_repoUrl, _prNumbers) {
|
|
6912
|
+
throw new Error("getPrDataBatch not implemented for this SCM provider");
|
|
6913
|
+
}
|
|
6835
6914
|
getAccessToken() {
|
|
6836
6915
|
return this.accessToken || "";
|
|
6837
6916
|
}
|
|
@@ -7070,6 +7149,12 @@ var AdoSCMLib = class extends SCMLib {
|
|
|
7070
7149
|
async getSubmitRequests(_repoUrl) {
|
|
7071
7150
|
throw new Error("getSubmitRequests not implemented for ADO");
|
|
7072
7151
|
}
|
|
7152
|
+
async searchSubmitRequests(_params) {
|
|
7153
|
+
throw new Error("searchSubmitRequests not implemented for ADO");
|
|
7154
|
+
}
|
|
7155
|
+
async searchRepos(_params) {
|
|
7156
|
+
throw new Error("searchRepos not implemented for ADO");
|
|
7157
|
+
}
|
|
7073
7158
|
// TODO: Add comprehensive tests for getPullRequestMetrics (ADO)
|
|
7074
7159
|
// See clients/cli/src/features/analysis/scm/__tests__/github.test.ts:589-648 for reference
|
|
7075
7160
|
async getPullRequestMetrics(_prNumber) {
|
|
@@ -7491,7 +7576,7 @@ var BitbucketSCMLib = class extends SCMLib {
|
|
|
7491
7576
|
return String(z20.number().parse(pullRequestRes.id));
|
|
7492
7577
|
} catch (e) {
|
|
7493
7578
|
console.warn(
|
|
7494
|
-
`error creating pull request for BB. Try number ${i + 1}`,
|
|
7579
|
+
`error creating pull request for BB. Try number ${String(i + 1).replace(/\n|\r/g, "")}`,
|
|
7495
7580
|
e
|
|
7496
7581
|
);
|
|
7497
7582
|
await setTimeout3(1e3);
|
|
@@ -7646,6 +7731,12 @@ var BitbucketSCMLib = class extends SCMLib {
|
|
|
7646
7731
|
async getSubmitRequests(_repoUrl) {
|
|
7647
7732
|
throw new Error("getSubmitRequests not implemented for Bitbucket");
|
|
7648
7733
|
}
|
|
7734
|
+
async searchSubmitRequests(_params) {
|
|
7735
|
+
throw new Error("searchSubmitRequests not implemented for Bitbucket");
|
|
7736
|
+
}
|
|
7737
|
+
async searchRepos(_params) {
|
|
7738
|
+
throw new Error("searchRepos not implemented for Bitbucket");
|
|
7739
|
+
}
|
|
7649
7740
|
// TODO: Add comprehensive tests for getPullRequestMetrics (Bitbucket)
|
|
7650
7741
|
// See clients/cli/src/features/analysis/scm/__tests__/github.test.ts:589-648 for reference
|
|
7651
7742
|
async getPullRequestMetrics(_prNumber) {
|
|
@@ -7662,11 +7753,78 @@ var REPORT_DEFAULT_FILE_NAME = "report.json";
|
|
|
7662
7753
|
init_env();
|
|
7663
7754
|
|
|
7664
7755
|
// src/features/analysis/scm/github/GithubSCMLib.ts
|
|
7665
|
-
|
|
7756
|
+
init_env();
|
|
7757
|
+
import pLimit2 from "p-limit";
|
|
7666
7758
|
import { z as z21 } from "zod";
|
|
7667
7759
|
|
|
7760
|
+
// src/features/analysis/scm/utils/cursorValidation.ts
|
|
7761
|
+
var MAX_CURSOR_VALUE = 1e5;
|
|
7762
|
+
function parseCursorSafe(cursor, defaultValue = 0, maxValue = MAX_CURSOR_VALUE) {
|
|
7763
|
+
if (cursor === null || cursor === void 0 || cursor === "") {
|
|
7764
|
+
return defaultValue;
|
|
7765
|
+
}
|
|
7766
|
+
const parsed = parseInt(cursor, 10);
|
|
7767
|
+
if (isNaN(parsed) || parsed < 0 || parsed > maxValue) {
|
|
7768
|
+
return defaultValue;
|
|
7769
|
+
}
|
|
7770
|
+
return parsed;
|
|
7771
|
+
}
|
|
7772
|
+
|
|
7668
7773
|
// src/features/analysis/scm/github/github.ts
|
|
7669
7774
|
import { RequestError } from "@octokit/request-error";
|
|
7775
|
+
import pLimit from "p-limit";
|
|
7776
|
+
|
|
7777
|
+
// src/utils/contextLogger.ts
|
|
7778
|
+
import debugModule from "debug";
|
|
7779
|
+
var debug3 = debugModule("mobb:shared");
|
|
7780
|
+
var _contextLogger = null;
|
|
7781
|
+
var createContextLogger = async () => {
|
|
7782
|
+
if (_contextLogger) return _contextLogger;
|
|
7783
|
+
try {
|
|
7784
|
+
let logger2;
|
|
7785
|
+
try {
|
|
7786
|
+
let module;
|
|
7787
|
+
try {
|
|
7788
|
+
const buildPath = "../../../../../tscommon/backend/build/src/utils/logger";
|
|
7789
|
+
module = await import(buildPath);
|
|
7790
|
+
} catch (e) {
|
|
7791
|
+
const sourcePath = "../../../../../tscommon/backend/src/utils/logger";
|
|
7792
|
+
module = await import(sourcePath);
|
|
7793
|
+
}
|
|
7794
|
+
logger2 = module.logger;
|
|
7795
|
+
} catch {
|
|
7796
|
+
}
|
|
7797
|
+
if (logger2) {
|
|
7798
|
+
_contextLogger = {
|
|
7799
|
+
info: (message, data) => data ? logger2.info(data, message) : logger2.info(message),
|
|
7800
|
+
debug: (message, data) => data ? logger2.debug(data, message) : logger2.debug(message),
|
|
7801
|
+
error: (message, data) => data ? logger2.error(data, message) : logger2.error(message)
|
|
7802
|
+
};
|
|
7803
|
+
return _contextLogger;
|
|
7804
|
+
}
|
|
7805
|
+
} catch {
|
|
7806
|
+
}
|
|
7807
|
+
_contextLogger = {
|
|
7808
|
+
info: (message, data) => debug3(message, data),
|
|
7809
|
+
debug: (message, data) => debug3(message, data),
|
|
7810
|
+
error: (message, data) => debug3(message, data)
|
|
7811
|
+
};
|
|
7812
|
+
return _contextLogger;
|
|
7813
|
+
};
|
|
7814
|
+
var contextLogger = {
|
|
7815
|
+
info: async (message, data) => {
|
|
7816
|
+
const logger2 = await createContextLogger();
|
|
7817
|
+
return logger2.info(message, data);
|
|
7818
|
+
},
|
|
7819
|
+
debug: async (message, data) => {
|
|
7820
|
+
const logger2 = await createContextLogger();
|
|
7821
|
+
return logger2.debug(message, data);
|
|
7822
|
+
},
|
|
7823
|
+
error: async (message, data) => {
|
|
7824
|
+
const logger2 = await createContextLogger();
|
|
7825
|
+
return logger2.error(message, data);
|
|
7826
|
+
}
|
|
7827
|
+
};
|
|
7670
7828
|
|
|
7671
7829
|
// src/features/analysis/scm/github/consts.ts
|
|
7672
7830
|
var POST_COMMENT_PATH = "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments";
|
|
@@ -7900,6 +8058,55 @@ function getOctoKit(options) {
|
|
|
7900
8058
|
function isGithubActionActionToken(token) {
|
|
7901
8059
|
return token.startsWith("ghs_");
|
|
7902
8060
|
}
|
|
8061
|
+
function handleGitHubError(error, scmType = "GitHub" /* GitHub */) {
|
|
8062
|
+
const errorObj = error;
|
|
8063
|
+
const status = errorObj.status || errorObj.statusCode || errorObj.response?.status || errorObj.response?.statusCode;
|
|
8064
|
+
const headers = errorObj.headers || errorObj.response?.headers;
|
|
8065
|
+
const retryAfter = headers?.["retry-after"] ? Number.parseInt(headers["retry-after"], 10) : headers?.["x-ratelimit-reset"] ? Math.max(
|
|
8066
|
+
0,
|
|
8067
|
+
Math.floor(
|
|
8068
|
+
(Number.parseInt(headers["x-ratelimit-reset"], 10) * 1e3 - Date.now()) / 1e3
|
|
8069
|
+
)
|
|
8070
|
+
) : void 0;
|
|
8071
|
+
const errorMessage = errorObj.message || (error instanceof Error ? error.message : String(error));
|
|
8072
|
+
if (status === 403 && retryAfter !== void 0 || errorMessage.toLowerCase().includes("rate limit") || errorMessage.toLowerCase().includes("api rate limit exceeded")) {
|
|
8073
|
+
throw new RateLimitError(
|
|
8074
|
+
"GitHub API rate limit exceeded",
|
|
8075
|
+
scmType,
|
|
8076
|
+
retryAfter
|
|
8077
|
+
);
|
|
8078
|
+
}
|
|
8079
|
+
if (status === 401) {
|
|
8080
|
+
throw new InvalidAccessTokenError(
|
|
8081
|
+
"GitHub authentication failed - token may be expired or invalid",
|
|
8082
|
+
scmType
|
|
8083
|
+
);
|
|
8084
|
+
}
|
|
8085
|
+
if (status === 403) {
|
|
8086
|
+
throw new ScmBadCredentialsError(
|
|
8087
|
+
"GitHub access forbidden - insufficient permissions or invalid credentials",
|
|
8088
|
+
scmType
|
|
8089
|
+
);
|
|
8090
|
+
}
|
|
8091
|
+
if (status === 404) {
|
|
8092
|
+
throw new InvalidRepoUrlError(
|
|
8093
|
+
"GitHub repository or resource not found",
|
|
8094
|
+
scmType
|
|
8095
|
+
);
|
|
8096
|
+
}
|
|
8097
|
+
const errorCode = errorObj.code || errorObj.response?.code;
|
|
8098
|
+
if (errorCode === "ECONNREFUSED" || errorCode === "ETIMEDOUT" || errorCode === "ENOTFOUND" || errorCode === "EAI_AGAIN") {
|
|
8099
|
+
throw new NetworkError(
|
|
8100
|
+
`GitHub network error: ${errorMessage}`,
|
|
8101
|
+
scmType,
|
|
8102
|
+
errorCode
|
|
8103
|
+
);
|
|
8104
|
+
}
|
|
8105
|
+
if (error instanceof RateLimitError || error instanceof InvalidAccessTokenError || error instanceof ScmBadCredentialsError || error instanceof InvalidRepoUrlError || error instanceof NetworkError || error instanceof InvalidUrlPatternError) {
|
|
8106
|
+
throw error;
|
|
8107
|
+
}
|
|
8108
|
+
throw new Error(`GitHub API error: ${errorMessage}`);
|
|
8109
|
+
}
|
|
7903
8110
|
async function githubValidateParams(url, accessToken) {
|
|
7904
8111
|
try {
|
|
7905
8112
|
const oktoKit = getOctoKit({ auth: accessToken, url });
|
|
@@ -7916,23 +8123,118 @@ async function githubValidateParams(url, accessToken) {
|
|
|
7916
8123
|
}
|
|
7917
8124
|
} catch (e) {
|
|
7918
8125
|
console.log("could not init github scm", e);
|
|
7919
|
-
|
|
7920
|
-
const code = error.status || error.statusCode || error.response?.status || error.response?.statusCode || error.response?.code;
|
|
7921
|
-
if (code === 401 || code === 403) {
|
|
7922
|
-
throw new InvalidAccessTokenError(`invalid github access token`);
|
|
7923
|
-
}
|
|
7924
|
-
if (code === 404) {
|
|
7925
|
-
throw new InvalidRepoUrlError(`invalid github repo Url ${url}`);
|
|
7926
|
-
}
|
|
7927
|
-
console.log("githubValidateParams error", e);
|
|
7928
|
-
throw new InvalidRepoUrlError(
|
|
7929
|
-
`cannot access GH repo URL: ${url} with the provided access token`
|
|
7930
|
-
);
|
|
8126
|
+
handleGitHubError(e, "GitHub" /* GitHub */);
|
|
7931
8127
|
}
|
|
7932
8128
|
}
|
|
7933
8129
|
|
|
7934
8130
|
// src/features/analysis/scm/github/github.ts
|
|
7935
8131
|
var MAX_GH_PR_BODY_LENGTH = 65536;
|
|
8132
|
+
var BLAME_LARGE_FILE_THRESHOLD_BYTES = 1e6;
|
|
8133
|
+
var BLAME_THRESHOLD_REDUCTION_BYTES = 1e5;
|
|
8134
|
+
var BLAME_MIN_THRESHOLD_BYTES = 1e5;
|
|
8135
|
+
var GRAPHQL_INPUT_PATTERNS = {
|
|
8136
|
+
// File paths: most printable ASCII chars, unicode letters/numbers
|
|
8137
|
+
// Allows: letters, numbers, spaces, common punctuation, path separators
|
|
8138
|
+
// Disallows: control characters, null bytes
|
|
8139
|
+
path: /^[\p{L}\p{N}\p{Zs}\-._/@+#~%()[\]{}=!,;'&]+$/u,
|
|
8140
|
+
// Git refs: branch/tag names follow git-check-ref-format rules
|
|
8141
|
+
// Allows: letters, numbers, slashes, dots, hyphens, underscores
|
|
8142
|
+
// Can also be "ref:path" format for expressions
|
|
8143
|
+
ref: /^[\p{L}\p{N}\-._/:@]+$/u,
|
|
8144
|
+
// Git SHAs: strictly hexadecimal (short or full)
|
|
8145
|
+
sha: /^[0-9a-fA-F]+$/
|
|
8146
|
+
};
|
|
8147
|
+
function validateGraphQLInput(value, type2) {
|
|
8148
|
+
const pattern = GRAPHQL_INPUT_PATTERNS[type2];
|
|
8149
|
+
if (!pattern.test(value)) {
|
|
8150
|
+
void contextLogger.info(
|
|
8151
|
+
"[GraphQL] Input contains unexpected characters, proceeding with escaping",
|
|
8152
|
+
{
|
|
8153
|
+
type: type2,
|
|
8154
|
+
valueLength: value.length,
|
|
8155
|
+
// Log first 100 chars to help debug without exposing full value
|
|
8156
|
+
valueSample: value.slice(0, 100)
|
|
8157
|
+
}
|
|
8158
|
+
);
|
|
8159
|
+
return false;
|
|
8160
|
+
}
|
|
8161
|
+
return true;
|
|
8162
|
+
}
|
|
8163
|
+
function escapeGraphQLString(value) {
|
|
8164
|
+
return value.replace(/\\/g, "\\\\").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t").replace(/\f/g, "\\f").replace(/[\b]/g, "\\b");
|
|
8165
|
+
}
|
|
8166
|
+
function safeGraphQLString(value, type2) {
|
|
8167
|
+
validateGraphQLInput(value, type2);
|
|
8168
|
+
return escapeGraphQLString(value);
|
|
8169
|
+
}
|
|
8170
|
+
function extractBlameRanges(data) {
|
|
8171
|
+
const fileData = data;
|
|
8172
|
+
if (fileData.blame?.ranges) {
|
|
8173
|
+
return fileData.blame.ranges.map((range) => ({
|
|
8174
|
+
startingLine: range.startingLine,
|
|
8175
|
+
endingLine: range.endingLine,
|
|
8176
|
+
commitSha: range.commit.oid
|
|
8177
|
+
}));
|
|
8178
|
+
}
|
|
8179
|
+
return void 0;
|
|
8180
|
+
}
|
|
8181
|
+
function buildBlameFragment(ref) {
|
|
8182
|
+
const escapedRef = safeGraphQLString(ref, "ref");
|
|
8183
|
+
return (path22, index) => {
|
|
8184
|
+
const escapedPath = safeGraphQLString(path22, "path");
|
|
8185
|
+
return `
|
|
8186
|
+
file${index}: object(expression: "${escapedRef}") {
|
|
8187
|
+
... on Commit {
|
|
8188
|
+
${GITHUB_GRAPHQL_FRAGMENTS.BLAME_RANGES.replace("$path", escapedPath)}
|
|
8189
|
+
}
|
|
8190
|
+
}`;
|
|
8191
|
+
};
|
|
8192
|
+
}
|
|
8193
|
+
function createBatchesByTotalSize(files, threshold) {
|
|
8194
|
+
const batches = [];
|
|
8195
|
+
let currentBatch = [];
|
|
8196
|
+
let currentBatchSize = 0;
|
|
8197
|
+
for (const file of files) {
|
|
8198
|
+
if (currentBatchSize + file.size > threshold && currentBatch.length > 0) {
|
|
8199
|
+
batches.push(currentBatch);
|
|
8200
|
+
currentBatch = [];
|
|
8201
|
+
currentBatchSize = 0;
|
|
8202
|
+
}
|
|
8203
|
+
currentBatch.push(file);
|
|
8204
|
+
currentBatchSize += file.size;
|
|
8205
|
+
}
|
|
8206
|
+
if (currentBatch.length > 0) {
|
|
8207
|
+
batches.push(currentBatch);
|
|
8208
|
+
}
|
|
8209
|
+
return batches;
|
|
8210
|
+
}
|
|
8211
|
+
async function fetchBlameForBatch(octokit, owner, repo, ref, files) {
|
|
8212
|
+
if (files.length === 0) {
|
|
8213
|
+
return /* @__PURE__ */ new Map();
|
|
8214
|
+
}
|
|
8215
|
+
return executeBatchGraphQL(octokit, owner, repo, {
|
|
8216
|
+
items: files.map((f) => f.path),
|
|
8217
|
+
aliasPrefix: "file",
|
|
8218
|
+
buildFragment: buildBlameFragment(ref),
|
|
8219
|
+
extractResult: extractBlameRanges
|
|
8220
|
+
});
|
|
8221
|
+
}
|
|
8222
|
+
async function processBlameAttempt(params) {
|
|
8223
|
+
const { octokit, owner, repo, ref, batches, concurrency } = params;
|
|
8224
|
+
const result = /* @__PURE__ */ new Map();
|
|
8225
|
+
const limit = pLimit(concurrency);
|
|
8226
|
+
const batchResults = await Promise.all(
|
|
8227
|
+
batches.map(
|
|
8228
|
+
(batch) => limit(() => fetchBlameForBatch(octokit, owner, repo, ref, batch))
|
|
8229
|
+
)
|
|
8230
|
+
);
|
|
8231
|
+
for (const batchResult of batchResults) {
|
|
8232
|
+
for (const [path22, blameData] of batchResult) {
|
|
8233
|
+
result.set(path22, blameData);
|
|
8234
|
+
}
|
|
8235
|
+
}
|
|
8236
|
+
return result;
|
|
8237
|
+
}
|
|
7936
8238
|
async function executeBatchGraphQL(octokit, owner, repo, config2) {
|
|
7937
8239
|
const { items, aliasPrefix, buildFragment, extractResult } = config2;
|
|
7938
8240
|
if (items.length === 0) {
|
|
@@ -8520,20 +8822,223 @@ function getGithubSdk(params = {}) {
|
|
|
8520
8822
|
}
|
|
8521
8823
|
});
|
|
8522
8824
|
},
|
|
8825
|
+
/**
|
|
8826
|
+
* Batch fetch PR data (additions/deletions + comments) for multiple PRs via GraphQL.
|
|
8827
|
+
* Combines PR_CHANGES and PR_COMMENTS fragments into a single API call for efficiency.
|
|
8828
|
+
* This is more efficient than calling getPrAdditionsDeletionsBatch and getPrCommentsBatch separately.
|
|
8829
|
+
*/
|
|
8830
|
+
async getPrDataBatch(params2) {
|
|
8831
|
+
return executeBatchGraphQL(octokit, params2.owner, params2.repo, {
|
|
8832
|
+
items: params2.prNumbers,
|
|
8833
|
+
aliasPrefix: "pr",
|
|
8834
|
+
buildFragment: (prNumber, index) => `
|
|
8835
|
+
pr${index}: pullRequest(number: ${prNumber}) {
|
|
8836
|
+
${GITHUB_GRAPHQL_FRAGMENTS.PR_CHANGES}
|
|
8837
|
+
${GITHUB_GRAPHQL_FRAGMENTS.PR_COMMENTS}
|
|
8838
|
+
}`,
|
|
8839
|
+
extractResult: (data) => {
|
|
8840
|
+
const prData = data;
|
|
8841
|
+
if (prData.additions !== void 0 && prData.deletions !== void 0) {
|
|
8842
|
+
const comments = prData.comments?.nodes ? prData.comments.nodes.map((node) => ({
|
|
8843
|
+
author: node.author ? { login: node.author.login, type: node.author.__typename } : null,
|
|
8844
|
+
body: node.body
|
|
8845
|
+
})) : [];
|
|
8846
|
+
return {
|
|
8847
|
+
changedLines: {
|
|
8848
|
+
additions: prData.additions,
|
|
8849
|
+
deletions: prData.deletions
|
|
8850
|
+
},
|
|
8851
|
+
comments
|
|
8852
|
+
};
|
|
8853
|
+
}
|
|
8854
|
+
return void 0;
|
|
8855
|
+
}
|
|
8856
|
+
});
|
|
8857
|
+
},
|
|
8858
|
+
/**
|
|
8859
|
+
* Batch fetch blob sizes for multiple files via GraphQL.
|
|
8860
|
+
* Used to determine which files are too large to batch in blame queries.
|
|
8861
|
+
*/
|
|
8862
|
+
async getBlobSizesBatch(params2) {
|
|
8863
|
+
return executeBatchGraphQL(octokit, params2.owner, params2.repo, {
|
|
8864
|
+
items: params2.blobShas,
|
|
8865
|
+
aliasPrefix: "blob",
|
|
8866
|
+
buildFragment: (sha, index) => {
|
|
8867
|
+
const escapedSha = safeGraphQLString(sha, "sha");
|
|
8868
|
+
return `
|
|
8869
|
+
blob${index}: object(oid: "${escapedSha}") {
|
|
8870
|
+
... on Blob {
|
|
8871
|
+
byteSize
|
|
8872
|
+
}
|
|
8873
|
+
}`;
|
|
8874
|
+
},
|
|
8875
|
+
extractResult: (data) => {
|
|
8876
|
+
const blobData = data;
|
|
8877
|
+
if (blobData.byteSize !== void 0) {
|
|
8878
|
+
return blobData.byteSize;
|
|
8879
|
+
}
|
|
8880
|
+
return void 0;
|
|
8881
|
+
}
|
|
8882
|
+
});
|
|
8883
|
+
},
|
|
8523
8884
|
/**
|
|
8524
8885
|
* Batch fetch blame data for multiple files via GraphQL.
|
|
8525
8886
|
* Uses GITHUB_GRAPHQL_FRAGMENTS.BLAME_RANGES for the field selection.
|
|
8887
|
+
*
|
|
8888
|
+
* Optimized to handle large files with retry logic:
|
|
8889
|
+
* - Files above threshold are processed individually with rate limiting
|
|
8890
|
+
* - On failure, retries with reduced threshold (-100KB) and concurrency (-1)
|
|
8891
|
+
* - Continues until success or threshold < 100KB
|
|
8892
|
+
*
|
|
8893
|
+
* @param params.files - Array of files with path and blobSha for size lookup
|
|
8894
|
+
* @param params.concurrency - Max concurrent requests for large files (default: 2)
|
|
8526
8895
|
*/
|
|
8527
8896
|
async getBlameBatch(params2) {
|
|
8897
|
+
const {
|
|
8898
|
+
owner,
|
|
8899
|
+
repo,
|
|
8900
|
+
ref,
|
|
8901
|
+
files,
|
|
8902
|
+
concurrency: initialConcurrency = 2
|
|
8903
|
+
} = params2;
|
|
8904
|
+
if (files.length === 0) {
|
|
8905
|
+
return /* @__PURE__ */ new Map();
|
|
8906
|
+
}
|
|
8907
|
+
const filesWithSizes = await this.fetchFilesWithSizes(owner, repo, files);
|
|
8908
|
+
return this.executeBlameWithRetries({
|
|
8909
|
+
owner,
|
|
8910
|
+
repo,
|
|
8911
|
+
ref,
|
|
8912
|
+
filesWithSizes,
|
|
8913
|
+
initialConcurrency
|
|
8914
|
+
});
|
|
8915
|
+
},
|
|
8916
|
+
/**
|
|
8917
|
+
* Fetches blob sizes and creates a list of files with their sizes.
|
|
8918
|
+
*/
|
|
8919
|
+
async fetchFilesWithSizes(owner, repo, files) {
|
|
8920
|
+
const blobShas = files.map((f) => f.blobSha);
|
|
8921
|
+
const blobSizes = await this.getBlobSizesBatch({ owner, repo, blobShas });
|
|
8922
|
+
return files.map((file) => ({
|
|
8923
|
+
...file,
|
|
8924
|
+
size: blobSizes.get(file.blobSha) ?? 0
|
|
8925
|
+
}));
|
|
8926
|
+
},
|
|
8927
|
+
/**
|
|
8928
|
+
* Executes blame fetching with retry logic on failure.
|
|
8929
|
+
* Reduces threshold and concurrency on each retry attempt.
|
|
8930
|
+
*/
|
|
8931
|
+
async executeBlameWithRetries(params2) {
|
|
8932
|
+
const { owner, repo, ref, filesWithSizes, initialConcurrency } = params2;
|
|
8933
|
+
let threshold = BLAME_LARGE_FILE_THRESHOLD_BYTES;
|
|
8934
|
+
let concurrency = initialConcurrency;
|
|
8935
|
+
let attempt = 1;
|
|
8936
|
+
let lastError = null;
|
|
8937
|
+
while (threshold >= BLAME_MIN_THRESHOLD_BYTES) {
|
|
8938
|
+
const batches = createBatchesByTotalSize(filesWithSizes, threshold);
|
|
8939
|
+
this.logBlameAttemptStart(
|
|
8940
|
+
attempt,
|
|
8941
|
+
threshold,
|
|
8942
|
+
concurrency,
|
|
8943
|
+
filesWithSizes.length,
|
|
8944
|
+
batches.length,
|
|
8945
|
+
owner,
|
|
8946
|
+
repo,
|
|
8947
|
+
ref
|
|
8948
|
+
);
|
|
8949
|
+
try {
|
|
8950
|
+
const result = await processBlameAttempt({
|
|
8951
|
+
octokit,
|
|
8952
|
+
owner,
|
|
8953
|
+
repo,
|
|
8954
|
+
ref,
|
|
8955
|
+
batches,
|
|
8956
|
+
concurrency
|
|
8957
|
+
});
|
|
8958
|
+
this.logBlameAttemptSuccess(attempt, result.size, owner, repo);
|
|
8959
|
+
return result;
|
|
8960
|
+
} catch (error) {
|
|
8961
|
+
lastError = error instanceof Error ? error : new Error(String(error));
|
|
8962
|
+
this.logBlameAttemptFailure(
|
|
8963
|
+
attempt,
|
|
8964
|
+
threshold,
|
|
8965
|
+
concurrency,
|
|
8966
|
+
lastError.message,
|
|
8967
|
+
owner,
|
|
8968
|
+
repo
|
|
8969
|
+
);
|
|
8970
|
+
threshold -= BLAME_THRESHOLD_REDUCTION_BYTES;
|
|
8971
|
+
concurrency = Math.max(1, concurrency - 1);
|
|
8972
|
+
attempt++;
|
|
8973
|
+
}
|
|
8974
|
+
}
|
|
8975
|
+
void contextLogger.error("[getBlameBatch] Exhausted all retries", {
|
|
8976
|
+
attempts: attempt - 1,
|
|
8977
|
+
repo: `${owner}/${repo}`,
|
|
8978
|
+
ref,
|
|
8979
|
+
error: lastError?.message || "unknown"
|
|
8980
|
+
});
|
|
8981
|
+
throw lastError || new Error("getBlameBatch failed after all retries");
|
|
8982
|
+
},
|
|
8983
|
+
/**
|
|
8984
|
+
* Logs the start of a blame batch attempt.
|
|
8985
|
+
*/
|
|
8986
|
+
logBlameAttemptStart(attempt, threshold, concurrency, totalFiles, batchCount, owner, repo, ref) {
|
|
8987
|
+
void contextLogger.debug("[getBlameBatch] Processing attempt", {
|
|
8988
|
+
attempt,
|
|
8989
|
+
threshold,
|
|
8990
|
+
concurrency,
|
|
8991
|
+
totalFiles,
|
|
8992
|
+
batchCount,
|
|
8993
|
+
repo: `${owner}/${repo}`,
|
|
8994
|
+
ref
|
|
8995
|
+
});
|
|
8996
|
+
},
|
|
8997
|
+
/**
|
|
8998
|
+
* Logs a successful blame batch attempt.
|
|
8999
|
+
*/
|
|
9000
|
+
logBlameAttemptSuccess(attempt, filesProcessed, owner, repo) {
|
|
9001
|
+
void contextLogger.debug("[getBlameBatch] Successfully processed batch", {
|
|
9002
|
+
attempt,
|
|
9003
|
+
filesProcessed,
|
|
9004
|
+
repo: `${owner}/${repo}`
|
|
9005
|
+
});
|
|
9006
|
+
},
|
|
9007
|
+
/**
|
|
9008
|
+
* Logs a failed blame batch attempt.
|
|
9009
|
+
*/
|
|
9010
|
+
logBlameAttemptFailure(attempt, threshold, concurrency, errorMessage, owner, repo) {
|
|
9011
|
+
void contextLogger.debug(
|
|
9012
|
+
"[getBlameBatch] Attempt failed, retrying with reduced threshold",
|
|
9013
|
+
{
|
|
9014
|
+
attempt,
|
|
9015
|
+
threshold,
|
|
9016
|
+
concurrency,
|
|
9017
|
+
error: errorMessage,
|
|
9018
|
+
repo: `${owner}/${repo}`
|
|
9019
|
+
}
|
|
9020
|
+
);
|
|
9021
|
+
},
|
|
9022
|
+
/**
|
|
9023
|
+
* Batch fetch blame data for multiple files via GraphQL (legacy interface).
|
|
9024
|
+
* This is a convenience wrapper that accepts file paths without blob SHAs.
|
|
9025
|
+
* Note: This does NOT perform size-based optimization. Use getBlameBatch with
|
|
9026
|
+
* files array including blobSha for optimized large file handling.
|
|
9027
|
+
*/
|
|
9028
|
+
async getBlameBatchByPaths(params2) {
|
|
9029
|
+
const escapedRef = safeGraphQLString(params2.ref, "ref");
|
|
8528
9030
|
return executeBatchGraphQL(octokit, params2.owner, params2.repo, {
|
|
8529
9031
|
items: params2.filePaths,
|
|
8530
9032
|
aliasPrefix: "file",
|
|
8531
|
-
buildFragment: (path22, index) =>
|
|
8532
|
-
|
|
9033
|
+
buildFragment: (path22, index) => {
|
|
9034
|
+
const escapedPath = safeGraphQLString(path22, "path");
|
|
9035
|
+
return `
|
|
9036
|
+
file${index}: object(expression: "${escapedRef}") {
|
|
8533
9037
|
... on Commit {
|
|
8534
|
-
${GITHUB_GRAPHQL_FRAGMENTS.BLAME_RANGES.replace("$path",
|
|
9038
|
+
${GITHUB_GRAPHQL_FRAGMENTS.BLAME_RANGES.replace("$path", escapedPath)}
|
|
8535
9039
|
}
|
|
8536
|
-
}
|
|
9040
|
+
}`;
|
|
9041
|
+
},
|
|
8537
9042
|
extractResult: (data) => {
|
|
8538
9043
|
const fileData = data;
|
|
8539
9044
|
if (fileData.blame?.ranges) {
|
|
@@ -8555,12 +9060,15 @@ function getGithubSdk(params = {}) {
|
|
|
8555
9060
|
return executeBatchGraphQL(octokit, params2.owner, params2.repo, {
|
|
8556
9061
|
items: params2.commitShas,
|
|
8557
9062
|
aliasPrefix: "commit",
|
|
8558
|
-
buildFragment: (sha, index) =>
|
|
8559
|
-
|
|
9063
|
+
buildFragment: (sha, index) => {
|
|
9064
|
+
const escapedSha = safeGraphQLString(sha, "sha");
|
|
9065
|
+
return `
|
|
9066
|
+
commit${index}: object(oid: "${escapedSha}") {
|
|
8560
9067
|
... on Commit {
|
|
8561
9068
|
${GITHUB_GRAPHQL_FRAGMENTS.COMMIT_TIMESTAMP}
|
|
8562
9069
|
}
|
|
8563
|
-
}
|
|
9070
|
+
}`;
|
|
9071
|
+
},
|
|
8564
9072
|
extractResult: (data) => {
|
|
8565
9073
|
const commitData = data;
|
|
8566
9074
|
if (commitData.oid && commitData.committedDate) {
|
|
@@ -8583,12 +9091,76 @@ function getGithubSdk(params = {}) {
|
|
|
8583
9091
|
}
|
|
8584
9092
|
);
|
|
8585
9093
|
return res;
|
|
9094
|
+
},
|
|
9095
|
+
/**
|
|
9096
|
+
* Search PRs using GitHub's Search API with sorting
|
|
9097
|
+
* https://docs.github.com/en/rest/search/search?apiVersion=2022-11-28#search-issues-and-pull-requests
|
|
9098
|
+
*/
|
|
9099
|
+
async searchPullRequests(params2) {
|
|
9100
|
+
const {
|
|
9101
|
+
owner,
|
|
9102
|
+
repo,
|
|
9103
|
+
updatedAfter,
|
|
9104
|
+
state = "all",
|
|
9105
|
+
sort = { field: "updated", order: "desc" },
|
|
9106
|
+
perPage = 10,
|
|
9107
|
+
page = 1
|
|
9108
|
+
} = params2;
|
|
9109
|
+
let query = `repo:${owner}/${repo} is:pr`;
|
|
9110
|
+
if (updatedAfter) {
|
|
9111
|
+
const dateStr = updatedAfter.toISOString().split("T")[0];
|
|
9112
|
+
query += ` updated:>=${dateStr}`;
|
|
9113
|
+
}
|
|
9114
|
+
if (state !== "all") {
|
|
9115
|
+
query += ` is:${state}`;
|
|
9116
|
+
}
|
|
9117
|
+
const githubSortField = sort.field === "updated" || sort.field === "created" ? sort.field : "comments";
|
|
9118
|
+
const response = await octokit.rest.search.issuesAndPullRequests({
|
|
9119
|
+
q: query,
|
|
9120
|
+
sort: githubSortField,
|
|
9121
|
+
order: sort.order,
|
|
9122
|
+
per_page: perPage,
|
|
9123
|
+
page
|
|
9124
|
+
});
|
|
9125
|
+
return {
|
|
9126
|
+
items: response.data.items,
|
|
9127
|
+
totalCount: response.data.total_count,
|
|
9128
|
+
hasMore: page * perPage < response.data.total_count
|
|
9129
|
+
};
|
|
9130
|
+
},
|
|
9131
|
+
/**
|
|
9132
|
+
* Search repositories using GitHub's Search API.
|
|
9133
|
+
* Docs: https://docs.github.com/en/rest/search/search?apiVersion=2022-11-28#search-repositories
|
|
9134
|
+
*/
|
|
9135
|
+
async searchRepositories(params2) {
|
|
9136
|
+
const {
|
|
9137
|
+
org,
|
|
9138
|
+
sort = { field: "updated", order: "desc" },
|
|
9139
|
+
perPage = 10,
|
|
9140
|
+
page = 1
|
|
9141
|
+
} = params2;
|
|
9142
|
+
if (!org) {
|
|
9143
|
+
throw new Error("Organization is required for repository search");
|
|
9144
|
+
}
|
|
9145
|
+
const query = `org:${org}`;
|
|
9146
|
+
const githubSortField = sort.field === "name" ? void 0 : "updated";
|
|
9147
|
+
const response = await octokit.rest.search.repos({
|
|
9148
|
+
q: query,
|
|
9149
|
+
sort: githubSortField,
|
|
9150
|
+
order: sort.order,
|
|
9151
|
+
per_page: perPage,
|
|
9152
|
+
page
|
|
9153
|
+
});
|
|
9154
|
+
return {
|
|
9155
|
+
items: response.data.items,
|
|
9156
|
+
totalCount: response.data.total_count,
|
|
9157
|
+
hasMore: page * perPage < response.data.total_count
|
|
9158
|
+
};
|
|
8586
9159
|
}
|
|
8587
9160
|
};
|
|
8588
9161
|
}
|
|
8589
9162
|
|
|
8590
9163
|
// src/features/analysis/scm/github/GithubSCMLib.ts
|
|
8591
|
-
var GITHUB_COMMIT_FETCH_CONCURRENCY = parseInt(process.env["GITHUB_COMMIT_CONCURRENCY"] || "10", 10) || 10;
|
|
8592
9164
|
function determinePrStatus(state, isDraft) {
|
|
8593
9165
|
switch (state) {
|
|
8594
9166
|
case "CLOSED":
|
|
@@ -8599,7 +9171,7 @@ function determinePrStatus(state, isDraft) {
|
|
|
8599
9171
|
return isDraft ? "DRAFT" /* Draft */ : "ACTIVE" /* Active */;
|
|
8600
9172
|
}
|
|
8601
9173
|
}
|
|
8602
|
-
var GithubSCMLib = class extends SCMLib {
|
|
9174
|
+
var GithubSCMLib = class _GithubSCMLib extends SCMLib {
|
|
8603
9175
|
// we don't always need a url, what's important is that we have an access token
|
|
8604
9176
|
constructor(url, accessToken, scmOrg) {
|
|
8605
9177
|
super(url, accessToken, scmOrg);
|
|
@@ -8955,7 +9527,7 @@ var GithubSCMLib = class extends SCMLib {
|
|
|
8955
9527
|
}),
|
|
8956
9528
|
this.getPrDiff({ pull_number: prNumber })
|
|
8957
9529
|
]);
|
|
8958
|
-
const limit =
|
|
9530
|
+
const limit = pLimit2(GITHUB_API_CONCURRENCY);
|
|
8959
9531
|
const commits = await Promise.all(
|
|
8960
9532
|
commitsRes.data.map(
|
|
8961
9533
|
(commit) => limit(
|
|
@@ -8966,7 +9538,11 @@ var GithubSCMLib = class extends SCMLib {
|
|
|
8966
9538
|
)
|
|
8967
9539
|
)
|
|
8968
9540
|
);
|
|
8969
|
-
const diffLines = filesRes ? await this._attributeLinesViaBlame(
|
|
9541
|
+
const diffLines = filesRes ? await this._attributeLinesViaBlame({
|
|
9542
|
+
headSha: pr.head.sha,
|
|
9543
|
+
changedFiles: filesRes.data,
|
|
9544
|
+
prCommits: commits
|
|
9545
|
+
}) : [];
|
|
8970
9546
|
return {
|
|
8971
9547
|
diff: prDiff,
|
|
8972
9548
|
createdAt: new Date(pr.created_at),
|
|
@@ -9005,7 +9581,7 @@ var GithubSCMLib = class extends SCMLib {
|
|
|
9005
9581
|
removed: changedLinesData.deletions
|
|
9006
9582
|
} : { added: 0, removed: 0 };
|
|
9007
9583
|
const comments = commentsMap.get(pr.number) || [];
|
|
9008
|
-
const tickets =
|
|
9584
|
+
const tickets = _GithubSCMLib.extractLinearTicketsFromComments(comments);
|
|
9009
9585
|
return {
|
|
9010
9586
|
submitRequestId: String(pr.number),
|
|
9011
9587
|
submitRequestNumber: pr.number,
|
|
@@ -9024,6 +9600,59 @@ var GithubSCMLib = class extends SCMLib {
|
|
|
9024
9600
|
});
|
|
9025
9601
|
return submitRequests;
|
|
9026
9602
|
}
|
|
9603
|
+
/**
|
|
9604
|
+
* Override searchSubmitRequests to use GitHub's Search API for efficient pagination.
|
|
9605
|
+
* This is much faster than fetching all PRs and filtering in-memory.
|
|
9606
|
+
*/
|
|
9607
|
+
async searchSubmitRequests(params) {
|
|
9608
|
+
this._validateAccessToken();
|
|
9609
|
+
const { owner, repo } = parseGithubOwnerAndRepo(params.repoUrl);
|
|
9610
|
+
const page = parseCursorSafe(params.cursor, 1);
|
|
9611
|
+
const perPage = params.limit || 10;
|
|
9612
|
+
const sort = params.sort || { field: "updated", order: "desc" };
|
|
9613
|
+
const searchResult = await this.githubSdk.searchPullRequests({
|
|
9614
|
+
owner,
|
|
9615
|
+
repo,
|
|
9616
|
+
updatedAfter: params.filters?.updatedAfter,
|
|
9617
|
+
state: params.filters?.state,
|
|
9618
|
+
sort,
|
|
9619
|
+
perPage,
|
|
9620
|
+
page
|
|
9621
|
+
});
|
|
9622
|
+
const results = searchResult.items.map((issue) => {
|
|
9623
|
+
let status = "open";
|
|
9624
|
+
if (issue.state === "closed") {
|
|
9625
|
+
status = issue.pull_request?.merged_at ? "merged" : "closed";
|
|
9626
|
+
} else if (issue.draft) {
|
|
9627
|
+
status = "draft";
|
|
9628
|
+
}
|
|
9629
|
+
return {
|
|
9630
|
+
submitRequestId: String(issue.number),
|
|
9631
|
+
submitRequestNumber: issue.number,
|
|
9632
|
+
title: issue.title,
|
|
9633
|
+
status,
|
|
9634
|
+
sourceBranch: "",
|
|
9635
|
+
// Not available in search API
|
|
9636
|
+
targetBranch: "",
|
|
9637
|
+
// Not available in search API
|
|
9638
|
+
authorName: issue.user?.login,
|
|
9639
|
+
authorEmail: void 0,
|
|
9640
|
+
// Not available in search API
|
|
9641
|
+
createdAt: new Date(issue.created_at),
|
|
9642
|
+
updatedAt: new Date(issue.updated_at),
|
|
9643
|
+
description: issue.body || void 0,
|
|
9644
|
+
tickets: [],
|
|
9645
|
+
// Would need separate parsing
|
|
9646
|
+
changedLines: { added: 0, removed: 0 }
|
|
9647
|
+
// Not available in search API
|
|
9648
|
+
};
|
|
9649
|
+
});
|
|
9650
|
+
return {
|
|
9651
|
+
results,
|
|
9652
|
+
nextCursor: searchResult.hasMore ? String(page + 1) : void 0,
|
|
9653
|
+
hasMore: searchResult.hasMore
|
|
9654
|
+
};
|
|
9655
|
+
}
|
|
9027
9656
|
/**
|
|
9028
9657
|
* Fetches commits for multiple PRs in a single GraphQL request.
|
|
9029
9658
|
* Much more efficient than calling getSubmitRequestDiff for each PR.
|
|
@@ -9037,6 +9666,109 @@ var GithubSCMLib = class extends SCMLib {
|
|
|
9037
9666
|
const { owner, repo } = parseGithubOwnerAndRepo(repoUrl);
|
|
9038
9667
|
return this.githubSdk.getPrCommitsBatch({ owner, repo, prNumbers });
|
|
9039
9668
|
}
|
|
9669
|
+
/**
|
|
9670
|
+
* Fetches additions and deletions counts for multiple PRs in a single GraphQL request.
|
|
9671
|
+
* Used to enrich search results with changed lines data.
|
|
9672
|
+
*
|
|
9673
|
+
* @param repoUrl - Repository URL
|
|
9674
|
+
* @param prNumbers - Array of PR numbers to fetch metrics for
|
|
9675
|
+
* @returns Map of PR number to additions/deletions count
|
|
9676
|
+
*/
|
|
9677
|
+
async getPrAdditionsDeletionsBatch(repoUrl, prNumbers) {
|
|
9678
|
+
this._validateAccessToken();
|
|
9679
|
+
const { owner, repo } = parseGithubOwnerAndRepo(repoUrl);
|
|
9680
|
+
return this.githubSdk.getPrAdditionsDeletionsBatch({
|
|
9681
|
+
owner,
|
|
9682
|
+
repo,
|
|
9683
|
+
prNumbers
|
|
9684
|
+
});
|
|
9685
|
+
}
|
|
9686
|
+
/**
|
|
9687
|
+
* Batch fetch PR data (additions/deletions + comments) for multiple PRs.
|
|
9688
|
+
* Combines both metrics into a single GraphQL call for efficiency.
|
|
9689
|
+
*
|
|
9690
|
+
* @param repoUrl - Repository URL
|
|
9691
|
+
* @param prNumbers - Array of PR numbers to fetch data for
|
|
9692
|
+
* @returns Map of PR number to { changedLines, comments }
|
|
9693
|
+
*/
|
|
9694
|
+
async getPrDataBatch(repoUrl, prNumbers) {
|
|
9695
|
+
this._validateAccessToken();
|
|
9696
|
+
const { owner, repo } = parseGithubOwnerAndRepo(repoUrl);
|
|
9697
|
+
return this.githubSdk.getPrDataBatch({
|
|
9698
|
+
owner,
|
|
9699
|
+
repo,
|
|
9700
|
+
prNumbers
|
|
9701
|
+
});
|
|
9702
|
+
}
|
|
9703
|
+
/**
|
|
9704
|
+
* Override searchRepos to use GitHub's Search API for efficient pagination.
|
|
9705
|
+
* This is much faster than fetching all repos and filtering in-memory.
|
|
9706
|
+
*
|
|
9707
|
+
* Note: GitHub Search API doesn't support sorting by name, so when name sorting
|
|
9708
|
+
* is requested, we fall back to fetching all repos and sorting in-memory.
|
|
9709
|
+
*/
|
|
9710
|
+
async searchRepos(params) {
|
|
9711
|
+
this._validateAccessToken();
|
|
9712
|
+
const sort = params.sort || { field: "updated", order: "desc" };
|
|
9713
|
+
if (!params.scmOrg || sort.field === "name") {
|
|
9714
|
+
return this.searchReposInMemory(params);
|
|
9715
|
+
}
|
|
9716
|
+
return this.searchReposWithApi(params);
|
|
9717
|
+
}
|
|
9718
|
+
/**
|
|
9719
|
+
* Search repos by fetching all and sorting/paginating in-memory.
|
|
9720
|
+
* Used when name sorting is requested or no organization is provided.
|
|
9721
|
+
*/
|
|
9722
|
+
async searchReposInMemory(params) {
|
|
9723
|
+
const repos = await this.getRepoList(params.scmOrg);
|
|
9724
|
+
const sort = params.sort || { field: "updated", order: "desc" };
|
|
9725
|
+
const sortOrder = sort.order === "asc" ? 1 : -1;
|
|
9726
|
+
const sortedRepos = [...repos].sort((a, b) => {
|
|
9727
|
+
if (sort.field === "name") {
|
|
9728
|
+
return a.repoName.localeCompare(b.repoName) * sortOrder;
|
|
9729
|
+
}
|
|
9730
|
+
const aDate = a.repoUpdatedAt ? Date.parse(a.repoUpdatedAt) : 0;
|
|
9731
|
+
const bDate = b.repoUpdatedAt ? Date.parse(b.repoUpdatedAt) : 0;
|
|
9732
|
+
return (aDate - bDate) * sortOrder;
|
|
9733
|
+
});
|
|
9734
|
+
const limit = params.limit || 10;
|
|
9735
|
+
const offset = parseCursorSafe(params.cursor, 0);
|
|
9736
|
+
const paged = sortedRepos.slice(offset, offset + limit);
|
|
9737
|
+
const nextOffset = offset + limit;
|
|
9738
|
+
return {
|
|
9739
|
+
results: paged,
|
|
9740
|
+
nextCursor: nextOffset < sortedRepos.length ? String(nextOffset) : void 0,
|
|
9741
|
+
hasMore: nextOffset < sortedRepos.length
|
|
9742
|
+
};
|
|
9743
|
+
}
|
|
9744
|
+
/**
|
|
9745
|
+
* Search repos using GitHub Search API for efficient server-side pagination.
|
|
9746
|
+
* Only supports date-based sorting (updated/created).
|
|
9747
|
+
*/
|
|
9748
|
+
async searchReposWithApi(params) {
|
|
9749
|
+
const page = parseCursorSafe(params.cursor, 1);
|
|
9750
|
+
const perPage = params.limit || 10;
|
|
9751
|
+
const sort = params.sort || { field: "updated", order: "desc" };
|
|
9752
|
+
const searchResult = await this.githubSdk.searchRepositories({
|
|
9753
|
+
org: params.scmOrg,
|
|
9754
|
+
sort,
|
|
9755
|
+
perPage,
|
|
9756
|
+
page
|
|
9757
|
+
});
|
|
9758
|
+
const results = searchResult.items.map((repo) => ({
|
|
9759
|
+
repoName: repo.name,
|
|
9760
|
+
repoUrl: repo.html_url || repo.url,
|
|
9761
|
+
repoOwner: repo.owner?.login || "",
|
|
9762
|
+
repoLanguages: repo.language ? [repo.language] : [],
|
|
9763
|
+
repoIsPublic: !repo.private,
|
|
9764
|
+
repoUpdatedAt: repo.updated_at || null
|
|
9765
|
+
}));
|
|
9766
|
+
return {
|
|
9767
|
+
results,
|
|
9768
|
+
nextCursor: searchResult.hasMore ? String(page + 1) : void 0,
|
|
9769
|
+
hasMore: searchResult.hasMore
|
|
9770
|
+
};
|
|
9771
|
+
}
|
|
9040
9772
|
async getPullRequestMetrics(prNumber) {
|
|
9041
9773
|
this._validateAccessTokenAndUrl();
|
|
9042
9774
|
const { owner, repo } = parseGithubOwnerAndRepo(this.url);
|
|
@@ -9089,7 +9821,7 @@ var GithubSCMLib = class extends SCMLib {
|
|
|
9089
9821
|
* Parse a Linear ticket from URL and name
|
|
9090
9822
|
* Returns null if invalid or missing data
|
|
9091
9823
|
*/
|
|
9092
|
-
_parseLinearTicket(url, name) {
|
|
9824
|
+
static _parseLinearTicket(url, name) {
|
|
9093
9825
|
if (!name || !url) {
|
|
9094
9826
|
return null;
|
|
9095
9827
|
}
|
|
@@ -9100,8 +9832,9 @@ var GithubSCMLib = class extends SCMLib {
|
|
|
9100
9832
|
}
|
|
9101
9833
|
/**
|
|
9102
9834
|
* Extract Linear ticket links from pre-fetched comments (pure function, no API calls)
|
|
9835
|
+
* Public static method so it can be reused by backend services.
|
|
9103
9836
|
*/
|
|
9104
|
-
|
|
9837
|
+
static extractLinearTicketsFromComments(comments) {
|
|
9105
9838
|
const tickets = [];
|
|
9106
9839
|
const seen = /* @__PURE__ */ new Set();
|
|
9107
9840
|
for (const comment of comments) {
|
|
@@ -9110,7 +9843,7 @@ var GithubSCMLib = class extends SCMLib {
|
|
|
9110
9843
|
const htmlPattern = /<a href="(https:\/\/linear\.app\/[^"]+)">([A-Z]+-\d+)<\/a>/g;
|
|
9111
9844
|
let match;
|
|
9112
9845
|
while ((match = htmlPattern.exec(body)) !== null) {
|
|
9113
|
-
const ticket =
|
|
9846
|
+
const ticket = _GithubSCMLib._parseLinearTicket(match[1], match[2]);
|
|
9114
9847
|
if (ticket && !seen.has(`${ticket.name}|${ticket.url}`)) {
|
|
9115
9848
|
seen.add(`${ticket.name}|${ticket.url}`);
|
|
9116
9849
|
tickets.push(ticket);
|
|
@@ -9118,7 +9851,7 @@ var GithubSCMLib = class extends SCMLib {
|
|
|
9118
9851
|
}
|
|
9119
9852
|
const markdownPattern = /\[([A-Z]+-\d+)\]\((https:\/\/linear\.app\/[^)]+)\)/g;
|
|
9120
9853
|
while ((match = markdownPattern.exec(body)) !== null) {
|
|
9121
|
-
const ticket =
|
|
9854
|
+
const ticket = _GithubSCMLib._parseLinearTicket(match[2], match[1]);
|
|
9122
9855
|
if (ticket && !seen.has(`${ticket.name}|${ticket.url}`)) {
|
|
9123
9856
|
seen.add(`${ticket.name}|${ticket.url}`);
|
|
9124
9857
|
tickets.push(ticket);
|
|
@@ -9180,12 +9913,24 @@ var GithubSCMLib = class extends SCMLib {
|
|
|
9180
9913
|
/**
|
|
9181
9914
|
* Optimized helper to attribute PR lines to commits using blame API
|
|
9182
9915
|
* Batch blame queries for minimal API call time (1 call instead of M calls)
|
|
9916
|
+
*
|
|
9917
|
+
* Uses size-based batching to handle large files:
|
|
9918
|
+
* - Files > 1MB are processed individually with rate limiting
|
|
9919
|
+
* - Smaller files are batched together in a single request
|
|
9920
|
+
* This prevents GitHub API timeouts (~10s) on large generated files.
|
|
9183
9921
|
*/
|
|
9184
|
-
async _attributeLinesViaBlame(
|
|
9922
|
+
async _attributeLinesViaBlame(params) {
|
|
9923
|
+
const { headSha, changedFiles, prCommits } = params;
|
|
9185
9924
|
const prCommitShas = new Set(prCommits.map((c) => c.commitSha));
|
|
9186
|
-
const filesWithAdditions = changedFiles.filter(
|
|
9187
|
-
(file
|
|
9188
|
-
|
|
9925
|
+
const filesWithAdditions = changedFiles.filter((file) => {
|
|
9926
|
+
if (!file.patch || file.patch.trim().length === 0) {
|
|
9927
|
+
return false;
|
|
9928
|
+
}
|
|
9929
|
+
if (!file.sha) {
|
|
9930
|
+
return false;
|
|
9931
|
+
}
|
|
9932
|
+
return true;
|
|
9933
|
+
});
|
|
9189
9934
|
if (filesWithAdditions.length === 0) {
|
|
9190
9935
|
return [];
|
|
9191
9936
|
}
|
|
@@ -9193,8 +9938,13 @@ var GithubSCMLib = class extends SCMLib {
|
|
|
9193
9938
|
const blameMap = await this.githubSdk.getBlameBatch({
|
|
9194
9939
|
owner,
|
|
9195
9940
|
repo,
|
|
9196
|
-
ref:
|
|
9197
|
-
|
|
9941
|
+
ref: headSha,
|
|
9942
|
+
// Use commit SHA directly from PR.head.sha
|
|
9943
|
+
files: filesWithAdditions.map((f) => ({
|
|
9944
|
+
path: f.filename,
|
|
9945
|
+
blobSha: f.sha
|
|
9946
|
+
})),
|
|
9947
|
+
concurrency: GITHUB_API_CONCURRENCY
|
|
9198
9948
|
});
|
|
9199
9949
|
const allAttributions = [];
|
|
9200
9950
|
for (const file of filesWithAdditions) {
|
|
@@ -9225,12 +9975,6 @@ import {
|
|
|
9225
9975
|
fetch as undiciFetch,
|
|
9226
9976
|
ProxyAgent as ProxyAgent2
|
|
9227
9977
|
} from "undici";
|
|
9228
|
-
|
|
9229
|
-
// src/utils/contextLogger.ts
|
|
9230
|
-
import debugModule from "debug";
|
|
9231
|
-
var debug3 = debugModule("mobb:shared");
|
|
9232
|
-
|
|
9233
|
-
// src/features/analysis/scm/gitlab/gitlab.ts
|
|
9234
9978
|
init_env();
|
|
9235
9979
|
|
|
9236
9980
|
// src/features/analysis/scm/gitlab/types.ts
|
|
@@ -9780,6 +10524,12 @@ var GitlabSCMLib = class extends SCMLib {
|
|
|
9780
10524
|
async getSubmitRequests(_repoUrl) {
|
|
9781
10525
|
throw new Error("getSubmitRequests not implemented for GitLab");
|
|
9782
10526
|
}
|
|
10527
|
+
async searchSubmitRequests(_params) {
|
|
10528
|
+
throw new Error("searchSubmitRequests not implemented for GitLab");
|
|
10529
|
+
}
|
|
10530
|
+
async searchRepos(_params) {
|
|
10531
|
+
throw new Error("searchRepos not implemented for GitLab");
|
|
10532
|
+
}
|
|
9783
10533
|
// TODO: Add comprehensive tests for getPullRequestMetrics (GitLab)
|
|
9784
10534
|
// See clients/cli/src/features/analysis/scm/__tests__/github.test.ts:589-648 for reference
|
|
9785
10535
|
async getPullRequestMetrics(_prNumber) {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "mobbdev",
|
|
3
|
-
"version": "1.1.
|
|
3
|
+
"version": "1.1.40",
|
|
4
4
|
"description": "Automated secure code remediation tool",
|
|
5
5
|
"repository": "git+https://github.com/mobb-dev/bugsy.git",
|
|
6
6
|
"main": "dist/index.mjs",
|
|
@@ -53,7 +53,7 @@
|
|
|
53
53
|
"dependencies": {
|
|
54
54
|
"@gitbeaker/requester-utils": "43.8.0",
|
|
55
55
|
"@gitbeaker/rest": "43.8.0",
|
|
56
|
-
"@modelcontextprotocol/sdk": "1.25.
|
|
56
|
+
"@modelcontextprotocol/sdk": "1.25.2",
|
|
57
57
|
"@octokit/core": "5.2.0",
|
|
58
58
|
"@octokit/request-error": "5.1.1",
|
|
59
59
|
"@openredaction/openredaction": "1.0.4",
|
|
@@ -95,7 +95,7 @@
|
|
|
95
95
|
"tar": "6.2.1",
|
|
96
96
|
"tmp": "0.2.5",
|
|
97
97
|
"tmp-promise": "3.0.3",
|
|
98
|
-
"undici": "6.
|
|
98
|
+
"undici": "6.23.0",
|
|
99
99
|
"uuid": "11.1.0",
|
|
100
100
|
"ws": "8.18.3",
|
|
101
101
|
"xml2js": "0.6.2",
|