mobbdev 1.1.39 → 1.1.41

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -12,7 +12,7 @@ var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "sy
12
12
 
13
13
  // src/features/analysis/scm/env.ts
14
14
  import { z as z15 } from "zod";
15
- var EnvVariablesZod, GITLAB_API_TOKEN, GITHUB_API_TOKEN, GIT_PROXY_HOST, MAX_UPLOAD_FILE_SIZE_MB;
15
+ var EnvVariablesZod, GITLAB_API_TOKEN, GITHUB_API_TOKEN, GIT_PROXY_HOST, MAX_UPLOAD_FILE_SIZE_MB, GITHUB_API_CONCURRENCY;
16
16
  var init_env = __esm({
17
17
  "src/features/analysis/scm/env.ts"() {
18
18
  "use strict";
@@ -20,13 +20,15 @@ var init_env = __esm({
20
20
  GITLAB_API_TOKEN: z15.string().optional(),
21
21
  GITHUB_API_TOKEN: z15.string().optional(),
22
22
  GIT_PROXY_HOST: z15.string().optional().default("http://tinyproxy:8888"),
23
- MAX_UPLOAD_FILE_SIZE_MB: z15.coerce.number().gt(0).default(5)
23
+ MAX_UPLOAD_FILE_SIZE_MB: z15.coerce.number().gt(0).default(5),
24
+ GITHUB_API_CONCURRENCY: z15.coerce.number().gt(0).optional().default(10)
24
25
  });
25
26
  ({
26
27
  GITLAB_API_TOKEN,
27
28
  GITHUB_API_TOKEN,
28
29
  GIT_PROXY_HOST,
29
- MAX_UPLOAD_FILE_SIZE_MB
30
+ MAX_UPLOAD_FILE_SIZE_MB,
31
+ GITHUB_API_CONCURRENCY
30
32
  } = EnvVariablesZod.parse(process.env));
31
33
  }
32
34
  });
@@ -1383,19 +1385,16 @@ import StreamZip from "node-stream-zip";
1383
1385
  import tmp from "tmp";
1384
1386
 
1385
1387
  // src/features/analysis/scm/errors.ts
1386
- var InvalidRepoUrlError = class extends Error {
1387
- constructor(m) {
1388
- super(m);
1389
- }
1390
- };
1391
1388
  var InvalidAccessTokenError = class extends Error {
1392
- constructor(m) {
1389
+ constructor(m, scmType) {
1393
1390
  super(m);
1391
+ this.scmType = scmType;
1394
1392
  }
1395
1393
  };
1396
1394
  var InvalidUrlPatternError = class extends Error {
1397
- constructor(m) {
1395
+ constructor(m, scmType) {
1398
1396
  super(m);
1397
+ this.scmType = scmType;
1399
1398
  }
1400
1399
  };
1401
1400
  var RefNotFoundError = class extends Error {
@@ -1403,12 +1402,38 @@ var RefNotFoundError = class extends Error {
1403
1402
  super(m);
1404
1403
  }
1405
1404
  };
1405
+ var ScmBadCredentialsError = class extends Error {
1406
+ constructor(m, scmType) {
1407
+ super(m);
1408
+ this.scmType = scmType;
1409
+ }
1410
+ };
1411
+ var InvalidRepoUrlError = class extends Error {
1412
+ constructor(m, scmType) {
1413
+ super(m);
1414
+ this.scmType = scmType;
1415
+ }
1416
+ };
1406
1417
  var RepoNoTokenAccessError = class extends Error {
1407
1418
  constructor(m, scmType) {
1408
1419
  super(m);
1409
1420
  this.scmType = scmType;
1410
1421
  }
1411
1422
  };
1423
+ var RateLimitError = class extends Error {
1424
+ constructor(m, scmType, retryAfter) {
1425
+ super(m);
1426
+ this.scmType = scmType;
1427
+ this.retryAfter = retryAfter;
1428
+ }
1429
+ };
1430
+ var NetworkError = class extends Error {
1431
+ constructor(m, scmType, errorCode) {
1432
+ super(m);
1433
+ this.scmType = scmType;
1434
+ this.errorCode = errorCode;
1435
+ }
1436
+ };
1412
1437
 
1413
1438
  // src/features/analysis/scm/utils/index.ts
1414
1439
  import { z as z14 } from "zod";
@@ -6820,6 +6845,35 @@ var SCMLib = class {
6820
6845
  password: accessToken
6821
6846
  });
6822
6847
  }
6848
+ /**
6849
+ * Search for PRs with optional filters and sorting.
6850
+ * IMPORTANT: Sort order must remain consistent across paginated requests
6851
+ * for cursor-based pagination to work correctly.
6852
+ *
6853
+ * Default implementation uses getSubmitRequests and applies filters/sorting in-memory.
6854
+ * Override in subclasses for provider-specific optimizations (e.g., GitHub Search API).
6855
+ *
6856
+ * @param params - Search parameters including filters, sort, and pagination
6857
+ * @returns Paginated search results with cursor
6858
+ */
6859
+ async searchSubmitRequests(_params) {
6860
+ throw new Error(
6861
+ "searchSubmitRequests is not implemented for this SCM provider"
6862
+ );
6863
+ }
6864
+ /**
6865
+ * Search repositories with pagination support.
6866
+ * IMPORTANT: Sort order must remain consistent across paginated requests
6867
+ * for cursor-based pagination to work correctly.
6868
+ *
6869
+ * Must be overridden in subclasses with provider-specific implementations.
6870
+ *
6871
+ * @param params - Search parameters including sort and pagination
6872
+ * @returns Paginated search results with cursor
6873
+ */
6874
+ async searchRepos(_params) {
6875
+ throw new Error("searchRepos is not implemented for this SCM provider");
6876
+ }
6823
6877
  /**
6824
6878
  * Fetches commits for multiple PRs in a single batch request.
6825
6879
  * This is an optimization that not all SCM providers may support efficiently.
@@ -6832,6 +6886,31 @@ var SCMLib = class {
6832
6886
  async getPrCommitsBatch(_repoUrl, _prNumbers) {
6833
6887
  throw new Error("getPrCommitsBatch not implemented for this SCM provider");
6834
6888
  }
6889
+ /**
6890
+ * Fetches additions and deletions counts for multiple PRs in batch.
6891
+ * More efficient than fetching individual PR details.
6892
+ *
6893
+ * @param repoUrl - Repository URL
6894
+ * @param prNumbers - Array of PR numbers to fetch metrics for
6895
+ * @returns Map of PR number to additions/deletions count
6896
+ */
6897
+ async getPrAdditionsDeletionsBatch(_repoUrl, _prNumbers) {
6898
+ throw new Error(
6899
+ "getPrAdditionsDeletionsBatch not implemented for this SCM provider"
6900
+ );
6901
+ }
6902
+ /**
6903
+ * Batch fetch PR data (additions/deletions + comments) for multiple PRs.
6904
+ * Only implemented for GitHub (via GraphQL). Other providers should override if supported.
6905
+ * This is more efficient than calling getPrAdditionsDeletionsBatch separately.
6906
+ *
6907
+ * @param _repoUrl - Repository URL
6908
+ * @param _prNumbers - Array of PR numbers to fetch data for
6909
+ * @returns Map of PR number to { changedLines, comments }
6910
+ */
6911
+ async getPrDataBatch(_repoUrl, _prNumbers) {
6912
+ throw new Error("getPrDataBatch not implemented for this SCM provider");
6913
+ }
6835
6914
  getAccessToken() {
6836
6915
  return this.accessToken || "";
6837
6916
  }
@@ -7070,6 +7149,12 @@ var AdoSCMLib = class extends SCMLib {
7070
7149
  async getSubmitRequests(_repoUrl) {
7071
7150
  throw new Error("getSubmitRequests not implemented for ADO");
7072
7151
  }
7152
+ async searchSubmitRequests(_params) {
7153
+ throw new Error("searchSubmitRequests not implemented for ADO");
7154
+ }
7155
+ async searchRepos(_params) {
7156
+ throw new Error("searchRepos not implemented for ADO");
7157
+ }
7073
7158
  // TODO: Add comprehensive tests for getPullRequestMetrics (ADO)
7074
7159
  // See clients/cli/src/features/analysis/scm/__tests__/github.test.ts:589-648 for reference
7075
7160
  async getPullRequestMetrics(_prNumber) {
@@ -7491,7 +7576,7 @@ var BitbucketSCMLib = class extends SCMLib {
7491
7576
  return String(z20.number().parse(pullRequestRes.id));
7492
7577
  } catch (e) {
7493
7578
  console.warn(
7494
- `error creating pull request for BB. Try number ${i + 1}`,
7579
+ `error creating pull request for BB. Try number ${String(i + 1).replace(/\n|\r/g, "")}`,
7495
7580
  e
7496
7581
  );
7497
7582
  await setTimeout3(1e3);
@@ -7646,6 +7731,12 @@ var BitbucketSCMLib = class extends SCMLib {
7646
7731
  async getSubmitRequests(_repoUrl) {
7647
7732
  throw new Error("getSubmitRequests not implemented for Bitbucket");
7648
7733
  }
7734
+ async searchSubmitRequests(_params) {
7735
+ throw new Error("searchSubmitRequests not implemented for Bitbucket");
7736
+ }
7737
+ async searchRepos(_params) {
7738
+ throw new Error("searchRepos not implemented for Bitbucket");
7739
+ }
7649
7740
  // TODO: Add comprehensive tests for getPullRequestMetrics (Bitbucket)
7650
7741
  // See clients/cli/src/features/analysis/scm/__tests__/github.test.ts:589-648 for reference
7651
7742
  async getPullRequestMetrics(_prNumber) {
@@ -7662,11 +7753,78 @@ var REPORT_DEFAULT_FILE_NAME = "report.json";
7662
7753
  init_env();
7663
7754
 
7664
7755
  // src/features/analysis/scm/github/GithubSCMLib.ts
7665
- import pLimit from "p-limit";
7756
+ init_env();
7757
+ import pLimit2 from "p-limit";
7666
7758
  import { z as z21 } from "zod";
7667
7759
 
7760
+ // src/features/analysis/scm/utils/cursorValidation.ts
7761
+ var MAX_CURSOR_VALUE = 1e5;
7762
+ function parseCursorSafe(cursor, defaultValue = 0, maxValue = MAX_CURSOR_VALUE) {
7763
+ if (cursor === null || cursor === void 0 || cursor === "") {
7764
+ return defaultValue;
7765
+ }
7766
+ const parsed = parseInt(cursor, 10);
7767
+ if (isNaN(parsed) || parsed < 0 || parsed > maxValue) {
7768
+ return defaultValue;
7769
+ }
7770
+ return parsed;
7771
+ }
7772
+
7668
7773
  // src/features/analysis/scm/github/github.ts
7669
7774
  import { RequestError } from "@octokit/request-error";
7775
+ import pLimit from "p-limit";
7776
+
7777
+ // src/utils/contextLogger.ts
7778
+ import debugModule from "debug";
7779
+ var debug3 = debugModule("mobb:shared");
7780
+ var _contextLogger = null;
7781
+ var createContextLogger = async () => {
7782
+ if (_contextLogger) return _contextLogger;
7783
+ try {
7784
+ let logger2;
7785
+ try {
7786
+ let module;
7787
+ try {
7788
+ const buildPath = "../../../../../tscommon/backend/build/src/utils/logger";
7789
+ module = await import(buildPath);
7790
+ } catch (e) {
7791
+ const sourcePath = "../../../../../tscommon/backend/src/utils/logger";
7792
+ module = await import(sourcePath);
7793
+ }
7794
+ logger2 = module.logger;
7795
+ } catch {
7796
+ }
7797
+ if (logger2) {
7798
+ _contextLogger = {
7799
+ info: (message, data) => data ? logger2.info(data, message) : logger2.info(message),
7800
+ debug: (message, data) => data ? logger2.debug(data, message) : logger2.debug(message),
7801
+ error: (message, data) => data ? logger2.error(data, message) : logger2.error(message)
7802
+ };
7803
+ return _contextLogger;
7804
+ }
7805
+ } catch {
7806
+ }
7807
+ _contextLogger = {
7808
+ info: (message, data) => debug3(message, data),
7809
+ debug: (message, data) => debug3(message, data),
7810
+ error: (message, data) => debug3(message, data)
7811
+ };
7812
+ return _contextLogger;
7813
+ };
7814
+ var contextLogger = {
7815
+ info: async (message, data) => {
7816
+ const logger2 = await createContextLogger();
7817
+ return logger2.info(message, data);
7818
+ },
7819
+ debug: async (message, data) => {
7820
+ const logger2 = await createContextLogger();
7821
+ return logger2.debug(message, data);
7822
+ },
7823
+ error: async (message, data) => {
7824
+ const logger2 = await createContextLogger();
7825
+ return logger2.error(message, data);
7826
+ }
7827
+ };
7670
7828
 
7671
7829
  // src/features/analysis/scm/github/consts.ts
7672
7830
  var POST_COMMENT_PATH = "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments";
@@ -7900,6 +8058,55 @@ function getOctoKit(options) {
7900
8058
  function isGithubActionActionToken(token) {
7901
8059
  return token.startsWith("ghs_");
7902
8060
  }
8061
+ function handleGitHubError(error, scmType = "GitHub" /* GitHub */) {
8062
+ const errorObj = error;
8063
+ const status = errorObj.status || errorObj.statusCode || errorObj.response?.status || errorObj.response?.statusCode;
8064
+ const headers = errorObj.headers || errorObj.response?.headers;
8065
+ const retryAfter = headers?.["retry-after"] ? Number.parseInt(headers["retry-after"], 10) : headers?.["x-ratelimit-reset"] ? Math.max(
8066
+ 0,
8067
+ Math.floor(
8068
+ (Number.parseInt(headers["x-ratelimit-reset"], 10) * 1e3 - Date.now()) / 1e3
8069
+ )
8070
+ ) : void 0;
8071
+ const errorMessage = errorObj.message || (error instanceof Error ? error.message : String(error));
8072
+ if (status === 403 && retryAfter !== void 0 || errorMessage.toLowerCase().includes("rate limit") || errorMessage.toLowerCase().includes("api rate limit exceeded")) {
8073
+ throw new RateLimitError(
8074
+ "GitHub API rate limit exceeded",
8075
+ scmType,
8076
+ retryAfter
8077
+ );
8078
+ }
8079
+ if (status === 401) {
8080
+ throw new InvalidAccessTokenError(
8081
+ "GitHub authentication failed - token may be expired or invalid",
8082
+ scmType
8083
+ );
8084
+ }
8085
+ if (status === 403) {
8086
+ throw new ScmBadCredentialsError(
8087
+ "GitHub access forbidden - insufficient permissions or invalid credentials",
8088
+ scmType
8089
+ );
8090
+ }
8091
+ if (status === 404) {
8092
+ throw new InvalidRepoUrlError(
8093
+ "GitHub repository or resource not found",
8094
+ scmType
8095
+ );
8096
+ }
8097
+ const errorCode = errorObj.code || errorObj.response?.code;
8098
+ if (errorCode === "ECONNREFUSED" || errorCode === "ETIMEDOUT" || errorCode === "ENOTFOUND" || errorCode === "EAI_AGAIN") {
8099
+ throw new NetworkError(
8100
+ `GitHub network error: ${errorMessage}`,
8101
+ scmType,
8102
+ errorCode
8103
+ );
8104
+ }
8105
+ if (error instanceof RateLimitError || error instanceof InvalidAccessTokenError || error instanceof ScmBadCredentialsError || error instanceof InvalidRepoUrlError || error instanceof NetworkError || error instanceof InvalidUrlPatternError) {
8106
+ throw error;
8107
+ }
8108
+ throw new Error(`GitHub API error: ${errorMessage}`);
8109
+ }
7903
8110
  async function githubValidateParams(url, accessToken) {
7904
8111
  try {
7905
8112
  const oktoKit = getOctoKit({ auth: accessToken, url });
@@ -7916,23 +8123,118 @@ async function githubValidateParams(url, accessToken) {
7916
8123
  }
7917
8124
  } catch (e) {
7918
8125
  console.log("could not init github scm", e);
7919
- const error = e;
7920
- const code = error.status || error.statusCode || error.response?.status || error.response?.statusCode || error.response?.code;
7921
- if (code === 401 || code === 403) {
7922
- throw new InvalidAccessTokenError(`invalid github access token`);
7923
- }
7924
- if (code === 404) {
7925
- throw new InvalidRepoUrlError(`invalid github repo Url ${url}`);
7926
- }
7927
- console.log("githubValidateParams error", e);
7928
- throw new InvalidRepoUrlError(
7929
- `cannot access GH repo URL: ${url} with the provided access token`
7930
- );
8126
+ handleGitHubError(e, "GitHub" /* GitHub */);
7931
8127
  }
7932
8128
  }
7933
8129
 
7934
8130
  // src/features/analysis/scm/github/github.ts
7935
8131
  var MAX_GH_PR_BODY_LENGTH = 65536;
8132
+ var BLAME_LARGE_FILE_THRESHOLD_BYTES = 1e6;
8133
+ var BLAME_THRESHOLD_REDUCTION_BYTES = 1e5;
8134
+ var BLAME_MIN_THRESHOLD_BYTES = 1e5;
8135
+ var GRAPHQL_INPUT_PATTERNS = {
8136
+ // File paths: most printable ASCII chars, unicode letters/numbers
8137
+ // Allows: letters, numbers, spaces, common punctuation, path separators
8138
+ // Disallows: control characters, null bytes
8139
+ path: /^[\p{L}\p{N}\p{Zs}\-._/@+#~%()[\]{}=!,;'&]+$/u,
8140
+ // Git refs: branch/tag names follow git-check-ref-format rules
8141
+ // Allows: letters, numbers, slashes, dots, hyphens, underscores
8142
+ // Can also be "ref:path" format for expressions
8143
+ ref: /^[\p{L}\p{N}\-._/:@]+$/u,
8144
+ // Git SHAs: strictly hexadecimal (short or full)
8145
+ sha: /^[0-9a-fA-F]+$/
8146
+ };
8147
+ function validateGraphQLInput(value, type2) {
8148
+ const pattern = GRAPHQL_INPUT_PATTERNS[type2];
8149
+ if (!pattern.test(value)) {
8150
+ void contextLogger.info(
8151
+ "[GraphQL] Input contains unexpected characters, proceeding with escaping",
8152
+ {
8153
+ type: type2,
8154
+ valueLength: value.length,
8155
+ // Log first 100 chars to help debug without exposing full value
8156
+ valueSample: value.slice(0, 100)
8157
+ }
8158
+ );
8159
+ return false;
8160
+ }
8161
+ return true;
8162
+ }
8163
+ function escapeGraphQLString(value) {
8164
+ return value.replace(/\\/g, "\\\\").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t").replace(/\f/g, "\\f").replace(/[\b]/g, "\\b");
8165
+ }
8166
+ function safeGraphQLString(value, type2) {
8167
+ validateGraphQLInput(value, type2);
8168
+ return escapeGraphQLString(value);
8169
+ }
8170
+ function extractBlameRanges(data) {
8171
+ const fileData = data;
8172
+ if (fileData.blame?.ranges) {
8173
+ return fileData.blame.ranges.map((range) => ({
8174
+ startingLine: range.startingLine,
8175
+ endingLine: range.endingLine,
8176
+ commitSha: range.commit.oid
8177
+ }));
8178
+ }
8179
+ return void 0;
8180
+ }
8181
+ function buildBlameFragment(ref) {
8182
+ const escapedRef = safeGraphQLString(ref, "ref");
8183
+ return (path22, index) => {
8184
+ const escapedPath = safeGraphQLString(path22, "path");
8185
+ return `
8186
+ file${index}: object(expression: "${escapedRef}") {
8187
+ ... on Commit {
8188
+ ${GITHUB_GRAPHQL_FRAGMENTS.BLAME_RANGES.replace("$path", escapedPath)}
8189
+ }
8190
+ }`;
8191
+ };
8192
+ }
8193
+ function createBatchesByTotalSize(files, threshold) {
8194
+ const batches = [];
8195
+ let currentBatch = [];
8196
+ let currentBatchSize = 0;
8197
+ for (const file of files) {
8198
+ if (currentBatchSize + file.size > threshold && currentBatch.length > 0) {
8199
+ batches.push(currentBatch);
8200
+ currentBatch = [];
8201
+ currentBatchSize = 0;
8202
+ }
8203
+ currentBatch.push(file);
8204
+ currentBatchSize += file.size;
8205
+ }
8206
+ if (currentBatch.length > 0) {
8207
+ batches.push(currentBatch);
8208
+ }
8209
+ return batches;
8210
+ }
8211
+ async function fetchBlameForBatch(octokit, owner, repo, ref, files) {
8212
+ if (files.length === 0) {
8213
+ return /* @__PURE__ */ new Map();
8214
+ }
8215
+ return executeBatchGraphQL(octokit, owner, repo, {
8216
+ items: files.map((f) => f.path),
8217
+ aliasPrefix: "file",
8218
+ buildFragment: buildBlameFragment(ref),
8219
+ extractResult: extractBlameRanges
8220
+ });
8221
+ }
8222
+ async function processBlameAttempt(params) {
8223
+ const { octokit, owner, repo, ref, batches, concurrency } = params;
8224
+ const result = /* @__PURE__ */ new Map();
8225
+ const limit = pLimit(concurrency);
8226
+ const batchResults = await Promise.all(
8227
+ batches.map(
8228
+ (batch) => limit(() => fetchBlameForBatch(octokit, owner, repo, ref, batch))
8229
+ )
8230
+ );
8231
+ for (const batchResult of batchResults) {
8232
+ for (const [path22, blameData] of batchResult) {
8233
+ result.set(path22, blameData);
8234
+ }
8235
+ }
8236
+ return result;
8237
+ }
7936
8238
  async function executeBatchGraphQL(octokit, owner, repo, config2) {
7937
8239
  const { items, aliasPrefix, buildFragment, extractResult } = config2;
7938
8240
  if (items.length === 0) {
@@ -8520,20 +8822,223 @@ function getGithubSdk(params = {}) {
8520
8822
  }
8521
8823
  });
8522
8824
  },
8825
+ /**
8826
+ * Batch fetch PR data (additions/deletions + comments) for multiple PRs via GraphQL.
8827
+ * Combines PR_CHANGES and PR_COMMENTS fragments into a single API call for efficiency.
8828
+ * This is more efficient than calling getPrAdditionsDeletionsBatch and getPrCommentsBatch separately.
8829
+ */
8830
+ async getPrDataBatch(params2) {
8831
+ return executeBatchGraphQL(octokit, params2.owner, params2.repo, {
8832
+ items: params2.prNumbers,
8833
+ aliasPrefix: "pr",
8834
+ buildFragment: (prNumber, index) => `
8835
+ pr${index}: pullRequest(number: ${prNumber}) {
8836
+ ${GITHUB_GRAPHQL_FRAGMENTS.PR_CHANGES}
8837
+ ${GITHUB_GRAPHQL_FRAGMENTS.PR_COMMENTS}
8838
+ }`,
8839
+ extractResult: (data) => {
8840
+ const prData = data;
8841
+ if (prData.additions !== void 0 && prData.deletions !== void 0) {
8842
+ const comments = prData.comments?.nodes ? prData.comments.nodes.map((node) => ({
8843
+ author: node.author ? { login: node.author.login, type: node.author.__typename } : null,
8844
+ body: node.body
8845
+ })) : [];
8846
+ return {
8847
+ changedLines: {
8848
+ additions: prData.additions,
8849
+ deletions: prData.deletions
8850
+ },
8851
+ comments
8852
+ };
8853
+ }
8854
+ return void 0;
8855
+ }
8856
+ });
8857
+ },
8858
+ /**
8859
+ * Batch fetch blob sizes for multiple files via GraphQL.
8860
+ * Used to determine which files are too large to batch in blame queries.
8861
+ */
8862
+ async getBlobSizesBatch(params2) {
8863
+ return executeBatchGraphQL(octokit, params2.owner, params2.repo, {
8864
+ items: params2.blobShas,
8865
+ aliasPrefix: "blob",
8866
+ buildFragment: (sha, index) => {
8867
+ const escapedSha = safeGraphQLString(sha, "sha");
8868
+ return `
8869
+ blob${index}: object(oid: "${escapedSha}") {
8870
+ ... on Blob {
8871
+ byteSize
8872
+ }
8873
+ }`;
8874
+ },
8875
+ extractResult: (data) => {
8876
+ const blobData = data;
8877
+ if (blobData.byteSize !== void 0) {
8878
+ return blobData.byteSize;
8879
+ }
8880
+ return void 0;
8881
+ }
8882
+ });
8883
+ },
8523
8884
  /**
8524
8885
  * Batch fetch blame data for multiple files via GraphQL.
8525
8886
  * Uses GITHUB_GRAPHQL_FRAGMENTS.BLAME_RANGES for the field selection.
8887
+ *
8888
+ * Optimized to handle large files with retry logic:
8889
+ * - Files above threshold are processed individually with rate limiting
8890
+ * - On failure, retries with reduced threshold (-100KB) and concurrency (-1)
8891
+ * - Continues until success or threshold < 100KB
8892
+ *
8893
+ * @param params.files - Array of files with path and blobSha for size lookup
8894
+ * @param params.concurrency - Max concurrent requests for large files (default: 2)
8526
8895
  */
8527
8896
  async getBlameBatch(params2) {
8897
+ const {
8898
+ owner,
8899
+ repo,
8900
+ ref,
8901
+ files,
8902
+ concurrency: initialConcurrency = 2
8903
+ } = params2;
8904
+ if (files.length === 0) {
8905
+ return /* @__PURE__ */ new Map();
8906
+ }
8907
+ const filesWithSizes = await this.fetchFilesWithSizes(owner, repo, files);
8908
+ return this.executeBlameWithRetries({
8909
+ owner,
8910
+ repo,
8911
+ ref,
8912
+ filesWithSizes,
8913
+ initialConcurrency
8914
+ });
8915
+ },
8916
+ /**
8917
+ * Fetches blob sizes and creates a list of files with their sizes.
8918
+ */
8919
+ async fetchFilesWithSizes(owner, repo, files) {
8920
+ const blobShas = files.map((f) => f.blobSha);
8921
+ const blobSizes = await this.getBlobSizesBatch({ owner, repo, blobShas });
8922
+ return files.map((file) => ({
8923
+ ...file,
8924
+ size: blobSizes.get(file.blobSha) ?? 0
8925
+ }));
8926
+ },
8927
+ /**
8928
+ * Executes blame fetching with retry logic on failure.
8929
+ * Reduces threshold and concurrency on each retry attempt.
8930
+ */
8931
+ async executeBlameWithRetries(params2) {
8932
+ const { owner, repo, ref, filesWithSizes, initialConcurrency } = params2;
8933
+ let threshold = BLAME_LARGE_FILE_THRESHOLD_BYTES;
8934
+ let concurrency = initialConcurrency;
8935
+ let attempt = 1;
8936
+ let lastError = null;
8937
+ while (threshold >= BLAME_MIN_THRESHOLD_BYTES) {
8938
+ const batches = createBatchesByTotalSize(filesWithSizes, threshold);
8939
+ this.logBlameAttemptStart(
8940
+ attempt,
8941
+ threshold,
8942
+ concurrency,
8943
+ filesWithSizes.length,
8944
+ batches.length,
8945
+ owner,
8946
+ repo,
8947
+ ref
8948
+ );
8949
+ try {
8950
+ const result = await processBlameAttempt({
8951
+ octokit,
8952
+ owner,
8953
+ repo,
8954
+ ref,
8955
+ batches,
8956
+ concurrency
8957
+ });
8958
+ this.logBlameAttemptSuccess(attempt, result.size, owner, repo);
8959
+ return result;
8960
+ } catch (error) {
8961
+ lastError = error instanceof Error ? error : new Error(String(error));
8962
+ this.logBlameAttemptFailure(
8963
+ attempt,
8964
+ threshold,
8965
+ concurrency,
8966
+ lastError.message,
8967
+ owner,
8968
+ repo
8969
+ );
8970
+ threshold -= BLAME_THRESHOLD_REDUCTION_BYTES;
8971
+ concurrency = Math.max(1, concurrency - 1);
8972
+ attempt++;
8973
+ }
8974
+ }
8975
+ void contextLogger.error("[getBlameBatch] Exhausted all retries", {
8976
+ attempts: attempt - 1,
8977
+ repo: `${owner}/${repo}`,
8978
+ ref,
8979
+ error: lastError?.message || "unknown"
8980
+ });
8981
+ throw lastError || new Error("getBlameBatch failed after all retries");
8982
+ },
8983
+ /**
8984
+ * Logs the start of a blame batch attempt.
8985
+ */
8986
+ logBlameAttemptStart(attempt, threshold, concurrency, totalFiles, batchCount, owner, repo, ref) {
8987
+ void contextLogger.debug("[getBlameBatch] Processing attempt", {
8988
+ attempt,
8989
+ threshold,
8990
+ concurrency,
8991
+ totalFiles,
8992
+ batchCount,
8993
+ repo: `${owner}/${repo}`,
8994
+ ref
8995
+ });
8996
+ },
8997
+ /**
8998
+ * Logs a successful blame batch attempt.
8999
+ */
9000
+ logBlameAttemptSuccess(attempt, filesProcessed, owner, repo) {
9001
+ void contextLogger.debug("[getBlameBatch] Successfully processed batch", {
9002
+ attempt,
9003
+ filesProcessed,
9004
+ repo: `${owner}/${repo}`
9005
+ });
9006
+ },
9007
+ /**
9008
+ * Logs a failed blame batch attempt.
9009
+ */
9010
+ logBlameAttemptFailure(attempt, threshold, concurrency, errorMessage, owner, repo) {
9011
+ void contextLogger.debug(
9012
+ "[getBlameBatch] Attempt failed, retrying with reduced threshold",
9013
+ {
9014
+ attempt,
9015
+ threshold,
9016
+ concurrency,
9017
+ error: errorMessage,
9018
+ repo: `${owner}/${repo}`
9019
+ }
9020
+ );
9021
+ },
9022
+ /**
9023
+ * Batch fetch blame data for multiple files via GraphQL (legacy interface).
9024
+ * This is a convenience wrapper that accepts file paths without blob SHAs.
9025
+ * Note: This does NOT perform size-based optimization. Use getBlameBatch with
9026
+ * files array including blobSha for optimized large file handling.
9027
+ */
9028
+ async getBlameBatchByPaths(params2) {
9029
+ const escapedRef = safeGraphQLString(params2.ref, "ref");
8528
9030
  return executeBatchGraphQL(octokit, params2.owner, params2.repo, {
8529
9031
  items: params2.filePaths,
8530
9032
  aliasPrefix: "file",
8531
- buildFragment: (path22, index) => `
8532
- file${index}: object(expression: "${params2.ref}") {
9033
+ buildFragment: (path22, index) => {
9034
+ const escapedPath = safeGraphQLString(path22, "path");
9035
+ return `
9036
+ file${index}: object(expression: "${escapedRef}") {
8533
9037
  ... on Commit {
8534
- ${GITHUB_GRAPHQL_FRAGMENTS.BLAME_RANGES.replace("$path", path22)}
9038
+ ${GITHUB_GRAPHQL_FRAGMENTS.BLAME_RANGES.replace("$path", escapedPath)}
8535
9039
  }
8536
- }`,
9040
+ }`;
9041
+ },
8537
9042
  extractResult: (data) => {
8538
9043
  const fileData = data;
8539
9044
  if (fileData.blame?.ranges) {
@@ -8555,12 +9060,15 @@ function getGithubSdk(params = {}) {
8555
9060
  return executeBatchGraphQL(octokit, params2.owner, params2.repo, {
8556
9061
  items: params2.commitShas,
8557
9062
  aliasPrefix: "commit",
8558
- buildFragment: (sha, index) => `
8559
- commit${index}: object(oid: "${sha}") {
9063
+ buildFragment: (sha, index) => {
9064
+ const escapedSha = safeGraphQLString(sha, "sha");
9065
+ return `
9066
+ commit${index}: object(oid: "${escapedSha}") {
8560
9067
  ... on Commit {
8561
9068
  ${GITHUB_GRAPHQL_FRAGMENTS.COMMIT_TIMESTAMP}
8562
9069
  }
8563
- }`,
9070
+ }`;
9071
+ },
8564
9072
  extractResult: (data) => {
8565
9073
  const commitData = data;
8566
9074
  if (commitData.oid && commitData.committedDate) {
@@ -8583,12 +9091,76 @@ function getGithubSdk(params = {}) {
8583
9091
  }
8584
9092
  );
8585
9093
  return res;
9094
+ },
9095
+ /**
9096
+ * Search PRs using GitHub's Search API with sorting
9097
+ * https://docs.github.com/en/rest/search/search?apiVersion=2022-11-28#search-issues-and-pull-requests
9098
+ */
9099
+ async searchPullRequests(params2) {
9100
+ const {
9101
+ owner,
9102
+ repo,
9103
+ updatedAfter,
9104
+ state = "all",
9105
+ sort = { field: "updated", order: "desc" },
9106
+ perPage = 10,
9107
+ page = 1
9108
+ } = params2;
9109
+ let query = `repo:${owner}/${repo} is:pr`;
9110
+ if (updatedAfter) {
9111
+ const dateStr = updatedAfter.toISOString().split("T")[0];
9112
+ query += ` updated:>=${dateStr}`;
9113
+ }
9114
+ if (state !== "all") {
9115
+ query += ` is:${state}`;
9116
+ }
9117
+ const githubSortField = sort.field === "updated" || sort.field === "created" ? sort.field : "comments";
9118
+ const response = await octokit.rest.search.issuesAndPullRequests({
9119
+ q: query,
9120
+ sort: githubSortField,
9121
+ order: sort.order,
9122
+ per_page: perPage,
9123
+ page
9124
+ });
9125
+ return {
9126
+ items: response.data.items,
9127
+ totalCount: response.data.total_count,
9128
+ hasMore: page * perPage < response.data.total_count
9129
+ };
9130
+ },
9131
+ /**
9132
+ * Search repositories using GitHub's Search API.
9133
+ * Docs: https://docs.github.com/en/rest/search/search?apiVersion=2022-11-28#search-repositories
9134
+ */
9135
+ async searchRepositories(params2) {
9136
+ const {
9137
+ org,
9138
+ sort = { field: "updated", order: "desc" },
9139
+ perPage = 10,
9140
+ page = 1
9141
+ } = params2;
9142
+ if (!org) {
9143
+ throw new Error("Organization is required for repository search");
9144
+ }
9145
+ const query = `org:${org}`;
9146
+ const githubSortField = sort.field === "name" ? void 0 : "updated";
9147
+ const response = await octokit.rest.search.repos({
9148
+ q: query,
9149
+ sort: githubSortField,
9150
+ order: sort.order,
9151
+ per_page: perPage,
9152
+ page
9153
+ });
9154
+ return {
9155
+ items: response.data.items,
9156
+ totalCount: response.data.total_count,
9157
+ hasMore: page * perPage < response.data.total_count
9158
+ };
8586
9159
  }
8587
9160
  };
8588
9161
  }
8589
9162
 
8590
9163
  // src/features/analysis/scm/github/GithubSCMLib.ts
8591
- var GITHUB_COMMIT_FETCH_CONCURRENCY = parseInt(process.env["GITHUB_COMMIT_CONCURRENCY"] || "10", 10) || 10;
8592
9164
  function determinePrStatus(state, isDraft) {
8593
9165
  switch (state) {
8594
9166
  case "CLOSED":
@@ -8599,7 +9171,7 @@ function determinePrStatus(state, isDraft) {
8599
9171
  return isDraft ? "DRAFT" /* Draft */ : "ACTIVE" /* Active */;
8600
9172
  }
8601
9173
  }
8602
- var GithubSCMLib = class extends SCMLib {
9174
+ var GithubSCMLib = class _GithubSCMLib extends SCMLib {
8603
9175
  // we don't always need a url, what's important is that we have an access token
8604
9176
  constructor(url, accessToken, scmOrg) {
8605
9177
  super(url, accessToken, scmOrg);
@@ -8955,7 +9527,7 @@ var GithubSCMLib = class extends SCMLib {
8955
9527
  }),
8956
9528
  this.getPrDiff({ pull_number: prNumber })
8957
9529
  ]);
8958
- const limit = pLimit(GITHUB_COMMIT_FETCH_CONCURRENCY);
9530
+ const limit = pLimit2(GITHUB_API_CONCURRENCY);
8959
9531
  const commits = await Promise.all(
8960
9532
  commitsRes.data.map(
8961
9533
  (commit) => limit(
@@ -9009,7 +9581,7 @@ var GithubSCMLib = class extends SCMLib {
9009
9581
  removed: changedLinesData.deletions
9010
9582
  } : { added: 0, removed: 0 };
9011
9583
  const comments = commentsMap.get(pr.number) || [];
9012
- const tickets = this._extractLinearTicketsFromComments(comments);
9584
+ const tickets = _GithubSCMLib.extractLinearTicketsFromComments(comments);
9013
9585
  return {
9014
9586
  submitRequestId: String(pr.number),
9015
9587
  submitRequestNumber: pr.number,
@@ -9028,6 +9600,59 @@ var GithubSCMLib = class extends SCMLib {
9028
9600
  });
9029
9601
  return submitRequests;
9030
9602
  }
9603
+ /**
9604
+ * Override searchSubmitRequests to use GitHub's Search API for efficient pagination.
9605
+ * This is much faster than fetching all PRs and filtering in-memory.
9606
+ */
9607
+ async searchSubmitRequests(params) {
9608
+ this._validateAccessToken();
9609
+ const { owner, repo } = parseGithubOwnerAndRepo(params.repoUrl);
9610
+ const page = parseCursorSafe(params.cursor, 1);
9611
+ const perPage = params.limit || 10;
9612
+ const sort = params.sort || { field: "updated", order: "desc" };
9613
+ const searchResult = await this.githubSdk.searchPullRequests({
9614
+ owner,
9615
+ repo,
9616
+ updatedAfter: params.filters?.updatedAfter,
9617
+ state: params.filters?.state,
9618
+ sort,
9619
+ perPage,
9620
+ page
9621
+ });
9622
+ const results = searchResult.items.map((issue) => {
9623
+ let status = "open";
9624
+ if (issue.state === "closed") {
9625
+ status = issue.pull_request?.merged_at ? "merged" : "closed";
9626
+ } else if (issue.draft) {
9627
+ status = "draft";
9628
+ }
9629
+ return {
9630
+ submitRequestId: String(issue.number),
9631
+ submitRequestNumber: issue.number,
9632
+ title: issue.title,
9633
+ status,
9634
+ sourceBranch: "",
9635
+ // Not available in search API
9636
+ targetBranch: "",
9637
+ // Not available in search API
9638
+ authorName: issue.user?.login,
9639
+ authorEmail: void 0,
9640
+ // Not available in search API
9641
+ createdAt: new Date(issue.created_at),
9642
+ updatedAt: new Date(issue.updated_at),
9643
+ description: issue.body || void 0,
9644
+ tickets: [],
9645
+ // Would need separate parsing
9646
+ changedLines: { added: 0, removed: 0 }
9647
+ // Not available in search API
9648
+ };
9649
+ });
9650
+ return {
9651
+ results,
9652
+ nextCursor: searchResult.hasMore ? String(page + 1) : void 0,
9653
+ hasMore: searchResult.hasMore
9654
+ };
9655
+ }
9031
9656
  /**
9032
9657
  * Fetches commits for multiple PRs in a single GraphQL request.
9033
9658
  * Much more efficient than calling getSubmitRequestDiff for each PR.
@@ -9041,6 +9666,109 @@ var GithubSCMLib = class extends SCMLib {
9041
9666
  const { owner, repo } = parseGithubOwnerAndRepo(repoUrl);
9042
9667
  return this.githubSdk.getPrCommitsBatch({ owner, repo, prNumbers });
9043
9668
  }
9669
+ /**
9670
+ * Fetches additions and deletions counts for multiple PRs in a single GraphQL request.
9671
+ * Used to enrich search results with changed lines data.
9672
+ *
9673
+ * @param repoUrl - Repository URL
9674
+ * @param prNumbers - Array of PR numbers to fetch metrics for
9675
+ * @returns Map of PR number to additions/deletions count
9676
+ */
9677
+ async getPrAdditionsDeletionsBatch(repoUrl, prNumbers) {
9678
+ this._validateAccessToken();
9679
+ const { owner, repo } = parseGithubOwnerAndRepo(repoUrl);
9680
+ return this.githubSdk.getPrAdditionsDeletionsBatch({
9681
+ owner,
9682
+ repo,
9683
+ prNumbers
9684
+ });
9685
+ }
9686
+ /**
9687
+ * Batch fetch PR data (additions/deletions + comments) for multiple PRs.
9688
+ * Combines both metrics into a single GraphQL call for efficiency.
9689
+ *
9690
+ * @param repoUrl - Repository URL
9691
+ * @param prNumbers - Array of PR numbers to fetch data for
9692
+ * @returns Map of PR number to { changedLines, comments }
9693
+ */
9694
+ async getPrDataBatch(repoUrl, prNumbers) {
9695
+ this._validateAccessToken();
9696
+ const { owner, repo } = parseGithubOwnerAndRepo(repoUrl);
9697
+ return this.githubSdk.getPrDataBatch({
9698
+ owner,
9699
+ repo,
9700
+ prNumbers
9701
+ });
9702
+ }
9703
+ /**
9704
+ * Override searchRepos to use GitHub's Search API for efficient pagination.
9705
+ * This is much faster than fetching all repos and filtering in-memory.
9706
+ *
9707
+ * Note: GitHub Search API doesn't support sorting by name, so when name sorting
9708
+ * is requested, we fall back to fetching all repos and sorting in-memory.
9709
+ */
9710
+ async searchRepos(params) {
9711
+ this._validateAccessToken();
9712
+ const sort = params.sort || { field: "updated", order: "desc" };
9713
+ if (!params.scmOrg || sort.field === "name") {
9714
+ return this.searchReposInMemory(params);
9715
+ }
9716
+ return this.searchReposWithApi(params);
9717
+ }
9718
+ /**
9719
+ * Search repos by fetching all and sorting/paginating in-memory.
9720
+ * Used when name sorting is requested or no organization is provided.
9721
+ */
9722
+ async searchReposInMemory(params) {
9723
+ const repos = await this.getRepoList(params.scmOrg);
9724
+ const sort = params.sort || { field: "updated", order: "desc" };
9725
+ const sortOrder = sort.order === "asc" ? 1 : -1;
9726
+ const sortedRepos = [...repos].sort((a, b) => {
9727
+ if (sort.field === "name") {
9728
+ return a.repoName.localeCompare(b.repoName) * sortOrder;
9729
+ }
9730
+ const aDate = a.repoUpdatedAt ? Date.parse(a.repoUpdatedAt) : 0;
9731
+ const bDate = b.repoUpdatedAt ? Date.parse(b.repoUpdatedAt) : 0;
9732
+ return (aDate - bDate) * sortOrder;
9733
+ });
9734
+ const limit = params.limit || 10;
9735
+ const offset = parseCursorSafe(params.cursor, 0);
9736
+ const paged = sortedRepos.slice(offset, offset + limit);
9737
+ const nextOffset = offset + limit;
9738
+ return {
9739
+ results: paged,
9740
+ nextCursor: nextOffset < sortedRepos.length ? String(nextOffset) : void 0,
9741
+ hasMore: nextOffset < sortedRepos.length
9742
+ };
9743
+ }
9744
+ /**
9745
+ * Search repos using GitHub Search API for efficient server-side pagination.
9746
+ * Only supports date-based sorting (updated/created).
9747
+ */
9748
+ async searchReposWithApi(params) {
9749
+ const page = parseCursorSafe(params.cursor, 1);
9750
+ const perPage = params.limit || 10;
9751
+ const sort = params.sort || { field: "updated", order: "desc" };
9752
+ const searchResult = await this.githubSdk.searchRepositories({
9753
+ org: params.scmOrg,
9754
+ sort,
9755
+ perPage,
9756
+ page
9757
+ });
9758
+ const results = searchResult.items.map((repo) => ({
9759
+ repoName: repo.name,
9760
+ repoUrl: repo.html_url || repo.url,
9761
+ repoOwner: repo.owner?.login || "",
9762
+ repoLanguages: repo.language ? [repo.language] : [],
9763
+ repoIsPublic: !repo.private,
9764
+ repoUpdatedAt: repo.updated_at || null
9765
+ }));
9766
+ return {
9767
+ results,
9768
+ nextCursor: searchResult.hasMore ? String(page + 1) : void 0,
9769
+ hasMore: searchResult.hasMore
9770
+ };
9771
+ }
9044
9772
  async getPullRequestMetrics(prNumber) {
9045
9773
  this._validateAccessTokenAndUrl();
9046
9774
  const { owner, repo } = parseGithubOwnerAndRepo(this.url);
@@ -9093,7 +9821,7 @@ var GithubSCMLib = class extends SCMLib {
9093
9821
  * Parse a Linear ticket from URL and name
9094
9822
  * Returns null if invalid or missing data
9095
9823
  */
9096
- _parseLinearTicket(url, name) {
9824
+ static _parseLinearTicket(url, name) {
9097
9825
  if (!name || !url) {
9098
9826
  return null;
9099
9827
  }
@@ -9104,8 +9832,9 @@ var GithubSCMLib = class extends SCMLib {
9104
9832
  }
9105
9833
  /**
9106
9834
  * Extract Linear ticket links from pre-fetched comments (pure function, no API calls)
9835
+ * Public static method so it can be reused by backend services.
9107
9836
  */
9108
- _extractLinearTicketsFromComments(comments) {
9837
+ static extractLinearTicketsFromComments(comments) {
9109
9838
  const tickets = [];
9110
9839
  const seen = /* @__PURE__ */ new Set();
9111
9840
  for (const comment of comments) {
@@ -9114,7 +9843,7 @@ var GithubSCMLib = class extends SCMLib {
9114
9843
  const htmlPattern = /<a href="(https:\/\/linear\.app\/[^"]+)">([A-Z]+-\d+)<\/a>/g;
9115
9844
  let match;
9116
9845
  while ((match = htmlPattern.exec(body)) !== null) {
9117
- const ticket = this._parseLinearTicket(match[1], match[2]);
9846
+ const ticket = _GithubSCMLib._parseLinearTicket(match[1], match[2]);
9118
9847
  if (ticket && !seen.has(`${ticket.name}|${ticket.url}`)) {
9119
9848
  seen.add(`${ticket.name}|${ticket.url}`);
9120
9849
  tickets.push(ticket);
@@ -9122,7 +9851,7 @@ var GithubSCMLib = class extends SCMLib {
9122
9851
  }
9123
9852
  const markdownPattern = /\[([A-Z]+-\d+)\]\((https:\/\/linear\.app\/[^)]+)\)/g;
9124
9853
  while ((match = markdownPattern.exec(body)) !== null) {
9125
- const ticket = this._parseLinearTicket(match[2], match[1]);
9854
+ const ticket = _GithubSCMLib._parseLinearTicket(match[2], match[1]);
9126
9855
  if (ticket && !seen.has(`${ticket.name}|${ticket.url}`)) {
9127
9856
  seen.add(`${ticket.name}|${ticket.url}`);
9128
9857
  tickets.push(ticket);
@@ -9184,6 +9913,11 @@ var GithubSCMLib = class extends SCMLib {
9184
9913
  /**
9185
9914
  * Optimized helper to attribute PR lines to commits using blame API
9186
9915
  * Batch blame queries for minimal API call time (1 call instead of M calls)
9916
+ *
9917
+ * Uses size-based batching to handle large files:
9918
+ * - Files > 1MB are processed individually with rate limiting
9919
+ * - Smaller files are batched together in a single request
9920
+ * This prevents GitHub API timeouts (~10s) on large generated files.
9187
9921
  */
9188
9922
  async _attributeLinesViaBlame(params) {
9189
9923
  const { headSha, changedFiles, prCommits } = params;
@@ -9192,19 +9926,25 @@ var GithubSCMLib = class extends SCMLib {
9192
9926
  if (!file.patch || file.patch.trim().length === 0) {
9193
9927
  return false;
9194
9928
  }
9929
+ if (!file.sha) {
9930
+ return false;
9931
+ }
9195
9932
  return true;
9196
9933
  });
9197
9934
  if (filesWithAdditions.length === 0) {
9198
9935
  return [];
9199
9936
  }
9200
9937
  const { owner, repo } = parseGithubOwnerAndRepo(this.url);
9201
- const refToUse = headSha;
9202
9938
  const blameMap = await this.githubSdk.getBlameBatch({
9203
9939
  owner,
9204
9940
  repo,
9205
- ref: refToUse,
9941
+ ref: headSha,
9206
9942
  // Use commit SHA directly from PR.head.sha
9207
- filePaths: filesWithAdditions.map((f) => f.filename)
9943
+ files: filesWithAdditions.map((f) => ({
9944
+ path: f.filename,
9945
+ blobSha: f.sha
9946
+ })),
9947
+ concurrency: GITHUB_API_CONCURRENCY
9208
9948
  });
9209
9949
  const allAttributions = [];
9210
9950
  for (const file of filesWithAdditions) {
@@ -9235,12 +9975,6 @@ import {
9235
9975
  fetch as undiciFetch,
9236
9976
  ProxyAgent as ProxyAgent2
9237
9977
  } from "undici";
9238
-
9239
- // src/utils/contextLogger.ts
9240
- import debugModule from "debug";
9241
- var debug3 = debugModule("mobb:shared");
9242
-
9243
- // src/features/analysis/scm/gitlab/gitlab.ts
9244
9978
  init_env();
9245
9979
 
9246
9980
  // src/features/analysis/scm/gitlab/types.ts
@@ -9790,6 +10524,12 @@ var GitlabSCMLib = class extends SCMLib {
9790
10524
  async getSubmitRequests(_repoUrl) {
9791
10525
  throw new Error("getSubmitRequests not implemented for GitLab");
9792
10526
  }
10527
+ async searchSubmitRequests(_params) {
10528
+ throw new Error("searchSubmitRequests not implemented for GitLab");
10529
+ }
10530
+ async searchRepos(_params) {
10531
+ throw new Error("searchRepos not implemented for GitLab");
10532
+ }
9793
10533
  // TODO: Add comprehensive tests for getPullRequestMetrics (GitLab)
9794
10534
  // See clients/cli/src/features/analysis/scm/__tests__/github.test.ts:589-648 for reference
9795
10535
  async getPullRequestMetrics(_prNumber) {
@@ -10847,10 +11587,12 @@ import tmp2 from "tmp";
10847
11587
  import { z as z29 } from "zod";
10848
11588
 
10849
11589
  // src/commands/handleMobbLogin.ts
10850
- import crypto from "crypto";
10851
- import os from "os";
10852
11590
  import chalk3 from "chalk";
10853
11591
  import Debug6 from "debug";
11592
+
11593
+ // src/commands/AuthManager.ts
11594
+ import crypto from "crypto";
11595
+ import os from "os";
10854
11596
  import open from "open";
10855
11597
 
10856
11598
  // src/features/analysis/graphql/gql.ts
@@ -11606,10 +12348,174 @@ function getConfigStore() {
11606
12348
  }
11607
12349
  var configStore = getConfigStore();
11608
12350
 
11609
- // src/commands/handleMobbLogin.ts
11610
- var debug7 = Debug6("mobbdev:commands");
12351
+ // src/commands/AuthManager.ts
11611
12352
  var LOGIN_MAX_WAIT = 10 * 60 * 1e3;
11612
12353
  var LOGIN_CHECK_DELAY = 5 * 1e3;
12354
+ var AuthManager = class {
12355
+ constructor(webAppUrl, apiUrl) {
12356
+ __publicField(this, "publicKey");
12357
+ __publicField(this, "privateKey");
12358
+ __publicField(this, "loginId");
12359
+ __publicField(this, "gqlClient");
12360
+ __publicField(this, "currentBrowserUrl");
12361
+ __publicField(this, "authenticated", null);
12362
+ __publicField(this, "resolvedWebAppUrl");
12363
+ __publicField(this, "resolvedApiUrl");
12364
+ this.resolvedWebAppUrl = webAppUrl || WEB_APP_URL;
12365
+ this.resolvedApiUrl = apiUrl || API_URL;
12366
+ }
12367
+ openUrlInBrowser() {
12368
+ if (this.currentBrowserUrl) {
12369
+ open(this.currentBrowserUrl);
12370
+ return true;
12371
+ }
12372
+ return false;
12373
+ }
12374
+ async waitForAuthentication() {
12375
+ let newApiToken = null;
12376
+ for (let i = 0; i < LOGIN_MAX_WAIT / LOGIN_CHECK_DELAY; i++) {
12377
+ newApiToken = await this.getApiToken();
12378
+ if (newApiToken) {
12379
+ break;
12380
+ }
12381
+ await sleep(LOGIN_CHECK_DELAY);
12382
+ }
12383
+ if (!newApiToken) {
12384
+ return false;
12385
+ }
12386
+ this.gqlClient = new GQLClient({
12387
+ apiKey: newApiToken,
12388
+ type: "apiKey",
12389
+ apiUrl: this.resolvedApiUrl
12390
+ });
12391
+ const loginSuccess = await this.gqlClient.validateUserToken();
12392
+ if (loginSuccess) {
12393
+ configStore.set("apiToken", newApiToken);
12394
+ this.authenticated = true;
12395
+ return true;
12396
+ }
12397
+ return false;
12398
+ }
12399
+ /**
12400
+ * Checks if the user is already authenticated
12401
+ */
12402
+ async isAuthenticated() {
12403
+ if (this.authenticated === null) {
12404
+ const result = await this.checkAuthentication();
12405
+ this.authenticated = result.isAuthenticated;
12406
+ }
12407
+ return this.authenticated;
12408
+ }
12409
+ /**
12410
+ * Private function to check if the user is authenticated with the server
12411
+ */
12412
+ async checkAuthentication(apiKey) {
12413
+ try {
12414
+ if (!this.gqlClient) {
12415
+ this.gqlClient = this.getGQLClient(apiKey);
12416
+ }
12417
+ const isConnected = await this.gqlClient.verifyApiConnection();
12418
+ if (!isConnected) {
12419
+ return {
12420
+ isAuthenticated: false,
12421
+ message: "Failed to connect to Mobb server"
12422
+ };
12423
+ }
12424
+ const userVerify = await this.gqlClient.validateUserToken();
12425
+ if (!userVerify) {
12426
+ return {
12427
+ isAuthenticated: false,
12428
+ message: "User token validation failed"
12429
+ };
12430
+ }
12431
+ } catch (error) {
12432
+ return {
12433
+ isAuthenticated: false,
12434
+ message: error instanceof Error ? error.message : "Unknown authentication error"
12435
+ };
12436
+ }
12437
+ return { isAuthenticated: true, message: "Successfully authenticated" };
12438
+ }
12439
+ /**
12440
+ * Generates a login URL for manual authentication
12441
+ */
12442
+ async generateLoginUrl(loginContext) {
12443
+ try {
12444
+ if (!this.gqlClient) {
12445
+ this.gqlClient = this.getGQLClient();
12446
+ }
12447
+ const { publicKey, privateKey } = crypto.generateKeyPairSync("rsa", {
12448
+ modulusLength: 2048
12449
+ });
12450
+ this.publicKey = publicKey;
12451
+ this.privateKey = privateKey;
12452
+ this.loginId = await this.gqlClient.createCliLogin({
12453
+ publicKey: this.publicKey.export({ format: "pem", type: "pkcs1" }).toString()
12454
+ });
12455
+ const webLoginUrl = `${this.resolvedWebAppUrl}/cli-login`;
12456
+ const browserUrl = loginContext ? buildLoginUrl(webLoginUrl, this.loginId, os.hostname(), loginContext) : `${webLoginUrl}/${this.loginId}?hostname=${os.hostname()}`;
12457
+ this.currentBrowserUrl = browserUrl;
12458
+ return browserUrl;
12459
+ } catch (error) {
12460
+ console.error("Failed to generate login URL:", error);
12461
+ return null;
12462
+ }
12463
+ }
12464
+ /**
12465
+ * Retrieves and decrypts the API token after authentication
12466
+ */
12467
+ async getApiToken() {
12468
+ if (!this.gqlClient || !this.loginId || !this.privateKey) {
12469
+ return null;
12470
+ }
12471
+ const encryptedApiToken = await this.gqlClient.getEncryptedApiToken({
12472
+ loginId: this.loginId
12473
+ });
12474
+ if (encryptedApiToken) {
12475
+ return crypto.privateDecrypt(
12476
+ this.privateKey,
12477
+ Buffer.from(encryptedApiToken, "base64")
12478
+ ).toString("utf-8");
12479
+ }
12480
+ return null;
12481
+ }
12482
+ /**
12483
+ * Gets the current GQL client (if authenticated)
12484
+ */
12485
+ getGQLClient(inputApiKey) {
12486
+ if (this.gqlClient === void 0) {
12487
+ this.gqlClient = new GQLClient({
12488
+ apiKey: inputApiKey || configStore.get("apiToken") || "",
12489
+ type: "apiKey",
12490
+ apiUrl: this.resolvedApiUrl
12491
+ });
12492
+ }
12493
+ return this.gqlClient;
12494
+ }
12495
+ /**
12496
+ * Assigns a GQL client instance to the AuthManager, and resets auth state
12497
+ * @param gqlClient The GQL client instance to set
12498
+ */
12499
+ setGQLClient(gqlClient) {
12500
+ this.gqlClient = gqlClient;
12501
+ this.cleanup();
12502
+ }
12503
+ /**
12504
+ * Cleans up any active login session
12505
+ */
12506
+ cleanup() {
12507
+ this.publicKey = void 0;
12508
+ this.privateKey = void 0;
12509
+ this.loginId = void 0;
12510
+ this.authenticated = null;
12511
+ this.currentBrowserUrl = null;
12512
+ }
12513
+ };
12514
+
12515
+ // src/commands/handleMobbLogin.ts
12516
+ var debug7 = Debug6("mobbdev:commands");
12517
+ var LOGIN_MAX_WAIT2 = 10 * 60 * 1e3;
12518
+ var LOGIN_CHECK_DELAY2 = 5 * 1e3;
11613
12519
  var MOBB_LOGIN_REQUIRED_MSG = `\u{1F513} Login to Mobb is Required, you will be redirected to our login page, once the authorization is complete return to this prompt, ${chalk3.bgBlue(
11614
12520
  "press any key to continue"
11615
12521
  )};`;
@@ -11624,11 +12530,8 @@ async function getAuthenticatedGQLClient({
11624
12530
  apiUrl || "undefined",
11625
12531
  webAppUrl || "undefined"
11626
12532
  );
11627
- let gqlClient = new GQLClient({
11628
- apiKey: inputApiKey || configStore.get("apiToken") || "",
11629
- type: "apiKey",
11630
- apiUrl
11631
- });
12533
+ const authManager = new AuthManager(webAppUrl, apiUrl);
12534
+ let gqlClient = authManager.getGQLClient(inputApiKey);
11632
12535
  gqlClient = await handleMobbLogin({
11633
12536
  inGqlClient: gqlClient,
11634
12537
  skipPrompts: isSkipPrompts,
@@ -11645,35 +12548,28 @@ async function handleMobbLogin({
11645
12548
  webAppUrl,
11646
12549
  loginContext
11647
12550
  }) {
11648
- const resolvedWebAppUrl = webAppUrl || WEB_APP_URL;
11649
- const resolvedApiUrl = apiUrl || API_URL;
11650
12551
  debug7(
11651
12552
  "handleMobbLogin: resolved URLs - apiUrl=%s (from param: %s), webAppUrl=%s (from param: %s)",
11652
- resolvedApiUrl,
11653
12553
  apiUrl || "fallback",
11654
- resolvedWebAppUrl,
12554
+ apiUrl || "fallback",
12555
+ webAppUrl || "fallback",
11655
12556
  webAppUrl || "fallback"
11656
12557
  );
11657
12558
  const { createSpinner: createSpinner5 } = Spinner({ ci: skipPrompts });
11658
- const isConnected = await inGqlClient.verifyApiConnection();
11659
- if (!isConnected) {
11660
- createSpinner5().start().error({
11661
- text: "\u{1F513} Connection to Mobb: failed to connect to the Mobb server"
11662
- });
11663
- throw new CliError(
11664
- "Connection to Mobb: failed to connect to the Mobb server"
11665
- );
12559
+ const authManager = new AuthManager(webAppUrl, apiUrl);
12560
+ authManager.setGQLClient(inGqlClient);
12561
+ try {
12562
+ const isAuthenticated = await authManager.isAuthenticated();
12563
+ if (isAuthenticated) {
12564
+ createSpinner5().start().success({
12565
+ text: `\u{1F513} Login to Mobb succeeded. Already authenticated`
12566
+ });
12567
+ return authManager.getGQLClient();
12568
+ }
12569
+ } catch (error) {
12570
+ debug7("Authentication check failed:", error);
11666
12571
  }
11667
- createSpinner5().start().success({
11668
- text: `\u{1F513} Connection to Mobb: succeeded`
11669
- });
11670
- const userVerify = await inGqlClient.validateUserToken();
11671
- if (userVerify) {
11672
- createSpinner5().start().success({
11673
- text: `\u{1F513} Login to Mobb succeeded. ${typeof userVerify === "string" ? `Logged in as ${userVerify}` : ""}`
11674
- });
11675
- return inGqlClient;
11676
- } else if (apiKey) {
12572
+ if (apiKey) {
11677
12573
  createSpinner5().start().error({
11678
12574
  text: "\u{1F513} Login to Mobb failed: The provided API key does not match any configured API key on the system"
11679
12575
  });
@@ -11689,57 +12585,32 @@ async function handleMobbLogin({
11689
12585
  loginSpinner.update({
11690
12586
  text: "\u{1F513} Waiting for Mobb login..."
11691
12587
  });
11692
- const { publicKey, privateKey } = crypto.generateKeyPairSync("rsa", {
11693
- modulusLength: 2048
11694
- });
11695
- const loginId = await inGqlClient.createCliLogin({
11696
- publicKey: publicKey.export({ format: "pem", type: "pkcs1" }).toString()
11697
- });
11698
- const webLoginUrl = `${resolvedWebAppUrl}/cli-login`;
11699
- const browserUrl = loginContext ? buildLoginUrl(webLoginUrl, loginId, os.hostname(), loginContext) : `${webLoginUrl}/${loginId}?hostname=${os.hostname()}`;
11700
- !skipPrompts && console.log(
11701
- `If the page does not open automatically, kindly access it through ${browserUrl}.`
11702
- );
11703
- await open(browserUrl);
11704
- let newApiToken = null;
11705
- for (let i = 0; i < LOGIN_MAX_WAIT / LOGIN_CHECK_DELAY; i++) {
11706
- const encryptedApiToken = await inGqlClient.getEncryptedApiToken({
11707
- loginId
11708
- });
11709
- loginSpinner.spin();
11710
- if (encryptedApiToken) {
11711
- debug7("encrypted API token received %s", encryptedApiToken);
11712
- newApiToken = crypto.privateDecrypt(privateKey, Buffer.from(encryptedApiToken, "base64")).toString("utf-8");
11713
- debug7("API token decrypted");
11714
- break;
12588
+ try {
12589
+ const loginUrl = await authManager.generateLoginUrl(loginContext);
12590
+ if (!loginUrl) {
12591
+ loginSpinner.error({
12592
+ text: "Failed to generate login URL"
12593
+ });
12594
+ throw new CliError("Failed to generate login URL");
12595
+ }
12596
+ !skipPrompts && console.log(
12597
+ `If the page does not open automatically, kindly access it through ${loginUrl}.`
12598
+ );
12599
+ authManager.openUrlInBrowser();
12600
+ const authSuccess = await authManager.waitForAuthentication();
12601
+ if (!authSuccess) {
12602
+ loginSpinner.error({
12603
+ text: "Login timeout error"
12604
+ });
12605
+ throw new CliError("Login timeout error");
11715
12606
  }
11716
- await sleep(LOGIN_CHECK_DELAY);
11717
- }
11718
- if (!newApiToken) {
11719
- loginSpinner.error({
11720
- text: "Login timeout error"
11721
- });
11722
- throw new CliError();
11723
- }
11724
- const newGqlClient = new GQLClient({
11725
- apiKey: newApiToken,
11726
- type: "apiKey",
11727
- apiUrl: resolvedApiUrl
11728
- });
11729
- const loginSuccess = await newGqlClient.validateUserToken();
11730
- if (loginSuccess) {
11731
- debug7(`set api token ${newApiToken}`);
11732
- configStore.set("apiToken", newApiToken);
11733
12607
  loginSpinner.success({
11734
- text: `\u{1F513} Login to Mobb successful! ${typeof loginSpinner === "string" ? `Logged in as ${loginSuccess}` : ""}`
12608
+ text: `\u{1F513} Login to Mobb successful!`
11735
12609
  });
11736
- } else {
11737
- loginSpinner.error({
11738
- text: "Something went wrong, API token is invalid."
11739
- });
11740
- throw new CliError();
12610
+ return authManager.getGQLClient();
12611
+ } finally {
12612
+ authManager.cleanup();
11741
12613
  }
11742
- return newGqlClient;
11743
12614
  }
11744
12615
 
11745
12616
  // src/features/analysis/add_fix_comments_for_pr/add_fix_comments_for_pr.ts
@@ -13306,7 +14177,7 @@ async function _scan(params, { skipPrompts = false } = {}) {
13306
14177
  `If the page does not open automatically, kindly access it through ${scmAuthUrl2}.`
13307
14178
  );
13308
14179
  await open3(scmAuthUrl2);
13309
- for (let i = 0; i < LOGIN_MAX_WAIT / LOGIN_CHECK_DELAY; i++) {
14180
+ for (let i = 0; i < LOGIN_MAX_WAIT2 / LOGIN_CHECK_DELAY2; i++) {
13310
14181
  const userInfo2 = await gqlClient.getUserInfo();
13311
14182
  if (!userInfo2) {
13312
14183
  throw new CliError2("User info not found");
@@ -13325,7 +14196,7 @@ async function _scan(params, { skipPrompts = false } = {}) {
13325
14196
  return tokenInfo2.accessToken;
13326
14197
  }
13327
14198
  scmSpinner.spin();
13328
- await sleep(LOGIN_CHECK_DELAY);
14199
+ await sleep(LOGIN_CHECK_DELAY2);
13329
14200
  }
13330
14201
  scmSpinner.error({
13331
14202
  text: `${scmName} login timeout error`