@junctionpanel/server 0.1.69 → 0.1.70

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,6 +4,7 @@ import { resolve, dirname, basename } from "path";
4
4
  import { realpathSync } from "fs";
5
5
  import { open as openFile, stat as statFile } from "fs/promises";
6
6
  import { parseAndHighlightDiff } from "../server/utils/diff-highlighter.js";
7
+ import { expandTilde } from "./path.js";
7
8
  import { isJunctionOwnedWorktreeCwd } from "./worktree.js";
8
9
  import { requireJunctionWorktreeBaseRefName } from "./worktree-metadata.js";
9
10
  const execAsync = promisify(exec);
@@ -71,6 +72,7 @@ async function spawnLimitedText(params) {
71
72
  }
72
73
  resolvePromise({
73
74
  text: Buffer.concat(stdoutChunks).toString("utf8"),
75
+ stderrText: stderrPreview.trim(),
74
76
  truncated,
75
77
  exitCode: code,
76
78
  signal,
@@ -411,6 +413,19 @@ async function getCurrentBranch(cwd) {
411
413
  return null;
412
414
  }
413
415
  }
416
+ async function getCurrentHeadSha(cwd) {
417
+ try {
418
+ const { stdout } = await execAsync("git rev-parse --verify HEAD", {
419
+ cwd,
420
+ env: READ_ONLY_GIT_ENV,
421
+ });
422
+ const headSha = stdout.trim();
423
+ return headSha.length > 0 ? headSha : null;
424
+ }
425
+ catch {
426
+ return null;
427
+ }
428
+ }
414
429
  async function getWorktreeRoot(cwd) {
415
430
  try {
416
431
  const { stdout } = await execAsync("git rev-parse --path-format=absolute --show-toplevel", { cwd, env: READ_ONLY_GIT_ENV });
@@ -1474,6 +1489,23 @@ export async function pushCurrentBranch(cwd, options) {
1474
1489
  const GH_JSON_MAX_BYTES = 512 * 1024;
1475
1490
  const GH_FAILED_LOG_MAX_BYTES = 200 * 1024;
1476
1491
  const GH_FAILED_LOG_TOTAL_MAX_BYTES = 300 * 1024;
1492
+ const PULL_REQUEST_STATUS_ACTIVE_CACHE_MS = 15000;
1493
+ const PULL_REQUEST_STATUS_PASSIVE_CACHE_MS = 3 * 60000;
1494
+ const PULL_REQUEST_STATUS_READY_CACHE_MS = 45000;
1495
+ const PULL_REQUEST_STATUS_MERGED_CACHE_MS = 5 * 60000;
1496
+ const PULL_REQUEST_STATUS_EMPTY_CACHE_MS = 90000;
1497
+ const PULL_REQUEST_SEARCH_CACHE_MS = 15000;
1498
+ const PULL_REQUEST_LOOKUP_CACHE_MAX_ENTRIES = 256;
1499
+ const PULL_REQUEST_SEARCH_CACHE_MAX_ENTRIES = 128;
1500
+ const pullRequestLookupCache = new Map();
1501
+ const pullRequestLookupCacheGenerationByCwd = new Map();
1502
+ const pullRequestSearchCache = new Map();
1503
+ export class PullRequestRateLimitError extends Error {
1504
+ constructor(message = "GitHub rate limit reached while loading pull request data") {
1505
+ super(message);
1506
+ this.name = "PullRequestRateLimitError";
1507
+ }
1508
+ }
1477
1509
  async function ensureGhAvailable(cwd) {
1478
1510
  try {
1479
1511
  await execAsync("gh --version", { cwd });
@@ -1501,6 +1533,61 @@ function isGhAuthError(error) {
1501
1533
  text.includes("bad credentials") ||
1502
1534
  text.includes("http 401"));
1503
1535
  }
1536
+ function isGhRateLimitError(error) {
1537
+ const text = getCommandErrorText(error);
1538
+ return (text.includes("rate limit") ||
1539
+ text.includes("secondary rate limit") ||
1540
+ text.includes("retry-after") ||
1541
+ text.includes("x-ratelimit") ||
1542
+ text.includes("abuse detection"));
1543
+ }
1544
+ function indicatesNoRequiredChecks(text) {
1545
+ const normalized = text.toLowerCase();
1546
+ return (normalized.includes("no checks reported") ||
1547
+ normalized.includes("no required checks reported"));
1548
+ }
1549
+ function normalizePullRequestDetailLevel(detailLevel) {
1550
+ return detailLevel ?? "detail";
1551
+ }
1552
+ function hasSufficientPullRequestDetail(cached, requested) {
1553
+ if (cached === "detail") {
1554
+ return true;
1555
+ }
1556
+ return requested === "summary";
1557
+ }
1558
+ function normalizePullRequestCacheScope(input) {
1559
+ return {
1560
+ cwd: expandTilde(input.cwd.trim()),
1561
+ remoteName: input.remoteName?.trim() || null,
1562
+ };
1563
+ }
1564
+ function getPullRequestLookupCacheGeneration(cwd) {
1565
+ const normalizedScope = normalizePullRequestCacheScope({ cwd });
1566
+ return pullRequestLookupCacheGenerationByCwd.get(normalizedScope.cwd) ?? 0;
1567
+ }
1568
+ function bumpPullRequestLookupCacheGeneration(cwd) {
1569
+ const normalizedScope = normalizePullRequestCacheScope({ cwd });
1570
+ const nextGeneration = getPullRequestLookupCacheGeneration(normalizedScope.cwd) + 1;
1571
+ pullRequestLookupCacheGenerationByCwd.set(normalizedScope.cwd, nextGeneration);
1572
+ return nextGeneration;
1573
+ }
1574
+ function resolvePullRequestLookupMaxAgeMs(input) {
1575
+ if (!input.status) {
1576
+ return PULL_REQUEST_STATUS_EMPTY_CACHE_MS;
1577
+ }
1578
+ if (input.status.isMerged) {
1579
+ return PULL_REQUEST_STATUS_MERGED_CACHE_MS;
1580
+ }
1581
+ if (input.detailLevel === "summary") {
1582
+ return PULL_REQUEST_STATUS_PASSIVE_CACHE_MS;
1583
+ }
1584
+ if (input.status.requiredChecksPassed === true &&
1585
+ input.status.canMerge === true &&
1586
+ input.status.hasConflicts === false) {
1587
+ return PULL_REQUEST_STATUS_READY_CACHE_MS;
1588
+ }
1589
+ return PULL_REQUEST_STATUS_ACTIVE_CACHE_MS;
1590
+ }
1504
1591
  function parseGitHubRepoFromRemoteUrl(url) {
1505
1592
  let cleaned = url.trim();
1506
1593
  if (!cleaned) {
@@ -1553,6 +1640,23 @@ async function hasAuthenticatedGitHubCliSession(cwd) {
1553
1640
  throw error;
1554
1641
  }
1555
1642
  }
1643
+ async function ghApiJson(input) {
1644
+ const result = await spawnLimitedText({
1645
+ cmd: "gh",
1646
+ args: ["api", ...input.args],
1647
+ cwd: input.cwd,
1648
+ maxBytes: GH_JSON_MAX_BYTES,
1649
+ acceptExitCodes: input.acceptExitCodes,
1650
+ });
1651
+ if (result.truncated) {
1652
+ throw new Error(`GitHub API response exceeded the ${GH_JSON_MAX_BYTES} byte limit.`);
1653
+ }
1654
+ const text = result.text.trim();
1655
+ if (!text) {
1656
+ throw new Error(`GitHub API returned an empty response for: gh api ${input.args.join(" ")}`);
1657
+ }
1658
+ return JSON.parse(text);
1659
+ }
1556
1660
  async function listPullRequestsForHead(options) {
1557
1661
  const { stdout } = await execFileAsync("gh", [
1558
1662
  "api",
@@ -1567,6 +1671,125 @@ async function listPullRequestsForHead(options) {
1567
1671
  const parsed = JSON.parse(stdout.trim());
1568
1672
  return Array.isArray(parsed) ? parsed : [];
1569
1673
  }
1674
+ function buildPullRequestLookupCacheKey(input) {
1675
+ const normalizedScope = normalizePullRequestCacheScope({
1676
+ cwd: input.cwd,
1677
+ remoteName: input.remoteName,
1678
+ });
1679
+ return JSON.stringify([
1680
+ normalizedScope.cwd,
1681
+ normalizedScope.remoteName ?? "",
1682
+ input.repoIdentity?.trim() ?? "",
1683
+ input.branch.trim(),
1684
+ ]);
1685
+ }
1686
+ function getCompatiblePullRequestLookup(input) {
1687
+ prunePullRequestLookupCache();
1688
+ const cached = pullRequestLookupCache.get(input.cacheKey);
1689
+ if (!cached) {
1690
+ return null;
1691
+ }
1692
+ if (cached.localHeadSha !== input.localHeadSha) {
1693
+ pullRequestLookupCache.delete(input.cacheKey);
1694
+ return null;
1695
+ }
1696
+ if (!hasSufficientPullRequestDetail(cached.lookup.detailLevel, input.detailLevel)) {
1697
+ return null;
1698
+ }
1699
+ return cached.lookup;
1700
+ }
1701
+ function getCachedPullRequestLookup(input) {
1702
+ const cached = getCompatiblePullRequestLookup(input);
1703
+ if (!cached) {
1704
+ return null;
1705
+ }
1706
+ if (Date.now() - cached.fetchedAt > input.maxAgeMs) {
1707
+ pullRequestLookupCache.delete(input.cacheKey);
1708
+ return null;
1709
+ }
1710
+ return cached;
1711
+ }
1712
+ function setCachedPullRequestLookup(cacheKey, localHeadSha, lookup, options) {
1713
+ if (getPullRequestLookupCacheGeneration(options.cwd) !== options.generation) {
1714
+ return lookup;
1715
+ }
1716
+ prunePullRequestLookupCache();
1717
+ pullRequestLookupCache.set(cacheKey, {
1718
+ lookup,
1719
+ localHeadSha,
1720
+ });
1721
+ prunePullRequestLookupCache();
1722
+ return lookup;
1723
+ }
1724
+ function getPullRequestLookupEntryMaxAgeMs(entry) {
1725
+ return resolvePullRequestLookupMaxAgeMs({
1726
+ detailLevel: entry.lookup.detailLevel,
1727
+ status: entry.lookup.status,
1728
+ });
1729
+ }
1730
+ function prunePullRequestLookupCache(now = Date.now()) {
1731
+ for (const [key, entry] of pullRequestLookupCache.entries()) {
1732
+ if (now - entry.lookup.fetchedAt > getPullRequestLookupEntryMaxAgeMs(entry)) {
1733
+ pullRequestLookupCache.delete(key);
1734
+ }
1735
+ }
1736
+ while (pullRequestLookupCache.size > PULL_REQUEST_LOOKUP_CACHE_MAX_ENTRIES) {
1737
+ const oldestKey = pullRequestLookupCache.keys().next().value;
1738
+ if (typeof oldestKey !== "string") {
1739
+ break;
1740
+ }
1741
+ pullRequestLookupCache.delete(oldestKey);
1742
+ }
1743
+ }
1744
+ function prunePullRequestSearchCache(now = Date.now()) {
1745
+ for (const [key, entry] of pullRequestSearchCache.entries()) {
1746
+ if (now - entry.fetchedAt > PULL_REQUEST_SEARCH_CACHE_MS) {
1747
+ pullRequestSearchCache.delete(key);
1748
+ }
1749
+ }
1750
+ while (pullRequestSearchCache.size > PULL_REQUEST_SEARCH_CACHE_MAX_ENTRIES) {
1751
+ const oldestKey = pullRequestSearchCache.keys().next().value;
1752
+ if (typeof oldestKey !== "string") {
1753
+ break;
1754
+ }
1755
+ pullRequestSearchCache.delete(oldestKey);
1756
+ }
1757
+ }
1758
+ /**
1759
+ * Clear cached pull request lookup and search entries for a workspace.
1760
+ *
1761
+ * When `options.remoteName` is omitted, entries across every remote for the
1762
+ * normalized workspace path are cleared.
1763
+ */
1764
+ export function invalidatePullRequestCache(cwd, options) {
1765
+ prunePullRequestLookupCache();
1766
+ prunePullRequestSearchCache();
1767
+ const normalizedScope = normalizePullRequestCacheScope({
1768
+ cwd,
1769
+ remoteName: options?.remoteName,
1770
+ });
1771
+ bumpPullRequestLookupCacheGeneration(normalizedScope.cwd);
1772
+ for (const key of pullRequestLookupCache.keys()) {
1773
+ const [entryCwd, entryRemoteName] = JSON.parse(key);
1774
+ if (entryCwd !== normalizedScope.cwd) {
1775
+ continue;
1776
+ }
1777
+ if (normalizedScope.remoteName !== null && entryRemoteName !== normalizedScope.remoteName) {
1778
+ continue;
1779
+ }
1780
+ pullRequestLookupCache.delete(key);
1781
+ }
1782
+ for (const key of pullRequestSearchCache.keys()) {
1783
+ const [entryCwd, entryRemoteName] = JSON.parse(key);
1784
+ if (entryCwd !== normalizedScope.cwd) {
1785
+ continue;
1786
+ }
1787
+ if (normalizedScope.remoteName !== null && entryRemoteName !== normalizedScope.remoteName) {
1788
+ continue;
1789
+ }
1790
+ pullRequestSearchCache.delete(key);
1791
+ }
1792
+ }
1570
1793
  function matchesPullRequestHeadRef(current, expectedHead) {
1571
1794
  if (!current || typeof current !== "object") {
1572
1795
  return false;
@@ -1667,6 +1890,7 @@ function parsePullRequestChecks(input) {
1667
1890
  : null,
1668
1891
  state,
1669
1892
  bucket,
1893
+ required: true,
1670
1894
  };
1671
1895
  })
1672
1896
  .filter((entry) => entry !== null);
@@ -1690,64 +1914,114 @@ async function getRequiredPullRequestChecks(cwd, prNumber) {
1690
1914
  });
1691
1915
  }
1692
1916
  catch (error) {
1693
- if (getCommandErrorText(error).includes("no checks reported")) {
1917
+ if (indicatesNoRequiredChecks(getCommandErrorText(error))) {
1694
1918
  return [];
1695
1919
  }
1696
1920
  throw error;
1697
1921
  }
1698
1922
  const text = result.text.trim();
1699
1923
  if (!text) {
1700
- return [];
1924
+ if (result.exitCode === 0 || indicatesNoRequiredChecks(result.stderrText)) {
1925
+ return [];
1926
+ }
1927
+ throw new Error(result.stderrText || `gh pr checks returned no JSON output for PR #${prNumber}.`);
1701
1928
  }
1702
1929
  return parsePullRequestChecks(JSON.parse(text));
1703
1930
  }
1704
- function parseStatusCheckRollup(input) {
1705
- if (!Array.isArray(input)) {
1706
- return [];
1707
- }
1708
- return input
1709
- .map((entry) => {
1931
+ function parseRestCheckRuns(input) {
1932
+ const runs = Array.isArray(input)
1933
+ ? input.flatMap((page) => page && typeof page === "object" && Array.isArray(page.check_runs)
1934
+ ? page.check_runs
1935
+ : [])
1936
+ : input && typeof input === "object" && Array.isArray(input.check_runs)
1937
+ ? input.check_runs
1938
+ : [];
1939
+ const parsedRuns = [];
1940
+ for (const entry of runs) {
1710
1941
  if (!entry || typeof entry !== "object") {
1711
- return null;
1942
+ continue;
1712
1943
  }
1713
- const statusValue = typeof entry.status === "string" && entry.status.trim().length > 0
1714
- ? entry.status.trim()
1715
- : typeof entry.state === "string" && entry.state.trim().length > 0
1716
- ? entry.state.trim()
1717
- : "";
1718
- const conclusionValue = typeof entry.conclusion === "string" && entry.conclusion.trim().length > 0
1719
- ? entry.conclusion.trim()
1944
+ const run = entry;
1945
+ const statusValue = typeof run.status === "string" && run.status.trim().length > 0
1946
+ ? run.status.trim()
1947
+ : "";
1948
+ const conclusionValue = typeof run.conclusion === "string" && run.conclusion.trim().length > 0
1949
+ ? run.conclusion.trim()
1720
1950
  : "";
1721
- const bucket = normalizeCheckBucket(undefined, conclusionValue || statusValue) ??
1722
- (statusValue.toLowerCase() === "completed" && conclusionValue
1723
- ? normalizeCheckBucket(undefined, conclusionValue)
1724
- : null);
1725
- const name = typeof entry.name === "string" && entry.name.trim().length > 0
1726
- ? entry.name.trim()
1727
- : typeof entry.context === "string" && entry.context.trim().length > 0
1728
- ? entry.context.trim()
1729
- : "";
1951
+ const state = conclusionValue || statusValue;
1952
+ const bucket = normalizeCheckBucket(undefined, state);
1953
+ const name = typeof run.name === "string" ? run.name.trim() : "";
1730
1954
  if (!bucket || !name) {
1731
- return null;
1955
+ continue;
1732
1956
  }
1733
- return {
1957
+ parsedRuns.push({
1734
1958
  name,
1735
- workflow: typeof entry.workflowName === "string" && entry.workflowName.trim().length > 0
1736
- ? entry.workflowName.trim()
1737
- : null,
1738
- link: typeof entry.detailsUrl === "string" && entry.detailsUrl.trim().length > 0
1739
- ? entry.detailsUrl.trim()
1740
- : typeof entry.targetUrl === "string" && entry.targetUrl.trim().length > 0
1741
- ? entry.targetUrl.trim()
1959
+ workflow: null,
1960
+ link: typeof run.details_url === "string" && run.details_url.trim().length > 0
1961
+ ? run.details_url.trim()
1962
+ : typeof run.html_url === "string" && run.html_url.trim().length > 0
1963
+ ? run.html_url.trim()
1742
1964
  : null,
1743
- description: typeof entry.description === "string" && entry.description.trim().length > 0
1744
- ? entry.description.trim()
1965
+ description: null,
1966
+ state,
1967
+ bucket,
1968
+ required: false,
1969
+ });
1970
+ }
1971
+ return parsedRuns;
1972
+ }
1973
+ function parseRestCombinedStatuses(input) {
1974
+ const pages = Array.isArray(input) ? input : [input];
1975
+ const statuses = pages.flatMap((page) => {
1976
+ if (!page || typeof page !== "object") {
1977
+ return [];
1978
+ }
1979
+ return Array.isArray(page.statuses)
1980
+ ? page.statuses
1981
+ : [];
1982
+ });
1983
+ const parsedStatuses = [];
1984
+ for (const entry of statuses) {
1985
+ if (!entry || typeof entry !== "object") {
1986
+ continue;
1987
+ }
1988
+ const statusEntry = entry;
1989
+ const state = typeof statusEntry.state === "string" ? statusEntry.state.trim() : "";
1990
+ const bucket = normalizeCheckBucket(undefined, state);
1991
+ const name = typeof statusEntry.context === "string" ? statusEntry.context.trim() : "";
1992
+ if (!bucket || !name) {
1993
+ continue;
1994
+ }
1995
+ parsedStatuses.push({
1996
+ name,
1997
+ workflow: null,
1998
+ link: typeof statusEntry.target_url === "string" &&
1999
+ statusEntry.target_url.trim().length > 0
2000
+ ? statusEntry.target_url.trim()
2001
+ : null,
2002
+ description: typeof statusEntry.description === "string" &&
2003
+ statusEntry.description.trim().length > 0
2004
+ ? statusEntry.description.trim()
1745
2005
  : null,
1746
- state: conclusionValue || statusValue,
2006
+ state,
1747
2007
  bucket,
1748
- };
1749
- })
1750
- .filter((entry) => entry !== null);
2008
+ required: false,
2009
+ });
2010
+ }
2011
+ return parsedStatuses;
2012
+ }
2013
+ async function getPullRequestChecksViaRest(cwd, repo, ref) {
2014
+ const [checkRuns, combinedStatuses] = await Promise.all([
2015
+ ghApiJson({
2016
+ cwd,
2017
+ args: ["--paginate", "--slurp", `repos/${repo}/commits/${ref}/check-runs?per_page=100`],
2018
+ }),
2019
+ ghApiJson({
2020
+ cwd,
2021
+ args: ["--paginate", "--slurp", `repos/${repo}/commits/${ref}/status?per_page=100`],
2022
+ }),
2023
+ ]);
2024
+ return mergeCheckStatuses(parseRestCombinedStatuses(combinedStatuses), parseRestCheckRuns(checkRuns));
1751
2025
  }
1752
2026
  function mergeCheckStatuses(requiredChecks, rollupChecks) {
1753
2027
  const merged = new Map();
@@ -1770,39 +2044,39 @@ function deriveChecksState(checks) {
1770
2044
  }
1771
2045
  return "passing";
1772
2046
  }
1773
- async function getPullRequestMergeability(cwd, prNumber) {
1774
- const result = await spawnLimitedText({
1775
- cmd: "gh",
1776
- args: [
1777
- "pr",
1778
- "view",
1779
- String(prNumber),
1780
- "--json",
1781
- "mergeable,mergeStateStatus,statusCheckRollup",
1782
- ],
2047
+ async function getPullRequestMergeabilityViaRest(cwd, repo, prNumber) {
2048
+ const parsed = await ghApiJson({
1783
2049
  cwd,
1784
- maxBytes: GH_JSON_MAX_BYTES,
2050
+ args: [`repos/${repo}/pulls/${prNumber}`],
1785
2051
  });
1786
- const text = result.text.trim();
1787
- if (!text) {
1788
- return { hasConflicts: false, rollupChecks: [] };
1789
- }
1790
- const parsed = JSON.parse(text);
1791
- const mergeable = typeof parsed.mergeable === "string" ? parsed.mergeable.trim().toUpperCase() : "";
1792
- const mergeStateStatus = typeof parsed.mergeStateStatus === "string"
1793
- ? parsed.mergeStateStatus.trim().toUpperCase()
2052
+ const mergeable = typeof parsed.mergeable === "boolean" ? parsed.mergeable : null;
2053
+ const mergeableState = typeof parsed.mergeable_state === "string"
2054
+ ? parsed.mergeable_state.trim().toUpperCase()
1794
2055
  : "";
1795
2056
  return {
1796
- hasConflicts: mergeable === "CONFLICTING" || mergeStateStatus === "DIRTY",
1797
- rollupChecks: parseStatusCheckRollup(parsed.statusCheckRollup),
2057
+ hasConflicts: mergeable === false || mergeableState === "DIRTY",
2058
+ mergeableKnown: mergeable !== null || (mergeableState.length > 0 && mergeableState !== "UNKNOWN"),
1798
2059
  };
1799
2060
  }
1800
- function toPullRequestStatus(baseStatus, checks, mergeability) {
1801
- const checksState = baseStatus.isMerged ? "passing" : deriveChecksState(checks);
2061
+ function toPullRequestStatus(baseStatus, checks, mergeability, options) {
2062
+ const requiredChecks = checks.filter((check) => check.required);
2063
+ const requiredChecksKnown = options?.requiredChecksKnown ?? true;
2064
+ const checksState = baseStatus.isMerged
2065
+ ? "passing"
2066
+ : !requiredChecksKnown
2067
+ ? deriveChecksState(checks)
2068
+ : requiredChecks.length === 0
2069
+ ? "passing"
2070
+ : deriveChecksState(requiredChecks);
1802
2071
  const requiredChecksPassed = baseStatus.isMerged
1803
2072
  ? true
1804
- : checks.every((check) => check.bucket === "pass" || check.bucket === "skipping");
2073
+ : !requiredChecksKnown
2074
+ ? false
2075
+ : requiredChecks.length === 0
2076
+ ? true
2077
+ : requiredChecks.every((check) => check.bucket === "pass" || check.bucket === "skipping");
1805
2078
  const hasConflicts = baseStatus.isMerged ? false : (mergeability?.hasConflicts ?? false);
2079
+ const mergeableKnown = baseStatus.isMerged ? true : (mergeability?.mergeableKnown ?? true);
1806
2080
  return {
1807
2081
  number: baseStatus.number,
1808
2082
  url: baseStatus.url,
@@ -1811,9 +2085,14 @@ function toPullRequestStatus(baseStatus, checks, mergeability) {
1811
2085
  baseRefName: baseStatus.baseRefName,
1812
2086
  headRefName: baseStatus.headRefName,
1813
2087
  isMerged: baseStatus.isMerged,
2088
+ detailLevel: "detail",
1814
2089
  checksState,
1815
2090
  requiredChecksPassed,
1816
- canMerge: !baseStatus.isMerged && !baseStatus.draft && requiredChecksPassed && !hasConflicts,
2091
+ canMerge: !baseStatus.isMerged &&
2092
+ !baseStatus.draft &&
2093
+ requiredChecksPassed &&
2094
+ !hasConflicts &&
2095
+ mergeableKnown,
1817
2096
  hasConflicts,
1818
2097
  };
1819
2098
  }
@@ -1940,17 +2219,68 @@ function appendBoundedText(parts, next, currentBytes) {
1940
2219
  parts.push(`${truncated}\n\n[combined log output truncated]`);
1941
2220
  return GH_FAILED_LOG_TOTAL_MAX_BYTES;
1942
2221
  }
2222
+ function toSummaryOnlyPullRequestStatus(baseStatus) {
2223
+ return {
2224
+ number: baseStatus.number,
2225
+ url: baseStatus.url,
2226
+ title: baseStatus.title,
2227
+ state: baseStatus.state,
2228
+ baseRefName: baseStatus.baseRefName,
2229
+ headRefName: baseStatus.headRefName,
2230
+ isMerged: baseStatus.isMerged,
2231
+ detailLevel: "summary",
2232
+ checksState: null,
2233
+ requiredChecksPassed: null,
2234
+ canMerge: null,
2235
+ hasConflicts: null,
2236
+ };
2237
+ }
1943
2238
  async function loadCurrentPullRequest(cwd, options) {
1944
2239
  await requireGitRepo(cwd);
2240
+ const detailLevel = normalizePullRequestDetailLevel(options?.detailLevel);
1945
2241
  const head = await getCurrentBranch(cwd);
2242
+ const localHeadSha = await getCurrentHeadSha(cwd);
1946
2243
  if (!head) {
1947
2244
  return {
1948
2245
  status: null,
1949
2246
  githubFeaturesEnabled: false,
1950
2247
  checks: [],
1951
2248
  headSha: null,
2249
+ repo: null,
2250
+ detailLevel,
2251
+ fetchedAt: Date.now(),
1952
2252
  };
1953
2253
  }
2254
+ const remoteName = await resolveEffectiveRemoteName(cwd, options?.remoteName);
2255
+ const remoteUrl = remoteName ? await getRemoteUrl(cwd, remoteName) : null;
2256
+ const cacheKey = buildPullRequestLookupCacheKey({
2257
+ cwd,
2258
+ remoteName,
2259
+ repoIdentity: remoteUrl,
2260
+ branch: head,
2261
+ });
2262
+ const cacheGeneration = getPullRequestLookupCacheGeneration(cwd);
2263
+ const compatibleCachedLookup = getCompatiblePullRequestLookup({
2264
+ cacheKey,
2265
+ detailLevel,
2266
+ localHeadSha,
2267
+ });
2268
+ const maxAgeMs = options?.maxAgeMs ??
2269
+ resolvePullRequestLookupMaxAgeMs({
2270
+ detailLevel,
2271
+ status: compatibleCachedLookup?.status ?? null,
2272
+ });
2273
+ const freshCachedLookup = options?.forceRefresh
2274
+ ? null
2275
+ : getCachedPullRequestLookup({
2276
+ cacheKey,
2277
+ detailLevel,
2278
+ maxAgeMs,
2279
+ localHeadSha,
2280
+ });
2281
+ if (freshCachedLookup) {
2282
+ return freshCachedLookup;
2283
+ }
1954
2284
  try {
1955
2285
  await ensureGhAvailable(cwd);
1956
2286
  }
@@ -1960,10 +2290,11 @@ async function loadCurrentPullRequest(cwd, options) {
1960
2290
  githubFeaturesEnabled: false,
1961
2291
  checks: [],
1962
2292
  headSha: null,
2293
+ repo: null,
2294
+ detailLevel,
2295
+ fetchedAt: Date.now(),
1963
2296
  };
1964
2297
  }
1965
- const remoteName = await resolveEffectiveRemoteName(cwd, options?.remoteName);
1966
- const remoteUrl = remoteName ? await getRemoteUrl(cwd, remoteName) : null;
1967
2298
  if (!remoteUrl) {
1968
2299
  const hasAuthenticatedGhSession = await hasAuthenticatedGitHubCliSession(cwd);
1969
2300
  return {
@@ -1971,6 +2302,9 @@ async function loadCurrentPullRequest(cwd, options) {
1971
2302
  githubFeaturesEnabled: hasAuthenticatedGhSession,
1972
2303
  checks: [],
1973
2304
  headSha: null,
2305
+ repo: null,
2306
+ detailLevel,
2307
+ fetchedAt: Date.now(),
1974
2308
  };
1975
2309
  }
1976
2310
  const repo = parseGitHubRepoFromRemoteUrl(remoteUrl);
@@ -1980,6 +2314,9 @@ async function loadCurrentPullRequest(cwd, options) {
1980
2314
  githubFeaturesEnabled: false,
1981
2315
  checks: [],
1982
2316
  headSha: null,
2317
+ repo: null,
2318
+ detailLevel,
2319
+ fetchedAt: Date.now(),
1983
2320
  };
1984
2321
  }
1985
2322
  const owner = repo.split("/")[0];
@@ -2000,15 +2337,36 @@ async function loadCurrentPullRequest(cwd, options) {
2000
2337
  githubFeaturesEnabled: false,
2001
2338
  checks: [],
2002
2339
  headSha: null,
2340
+ repo,
2341
+ detailLevel,
2342
+ fetchedAt: Date.now(),
2003
2343
  };
2004
2344
  }
2345
+ if (isGhRateLimitError(error)) {
2346
+ if (compatibleCachedLookup) {
2347
+ return compatibleCachedLookup;
2348
+ }
2349
+ throw new PullRequestRateLimitError();
2350
+ }
2005
2351
  throw error;
2006
2352
  }
2007
2353
  const openPull = openPulls.find((entry) => matchesPullRequestHeadRef(entry, head)) ?? null;
2008
2354
  const openBaseStatus = buildPullRequestBaseStatus(openPull, head);
2009
2355
  if (openBaseStatus) {
2010
- let checks;
2011
- let mergeability = { hasConflicts: false, rollupChecks: [] };
2356
+ if (detailLevel === "summary") {
2357
+ return setCachedPullRequestLookup(cacheKey, localHeadSha, {
2358
+ status: toSummaryOnlyPullRequestStatus(openBaseStatus),
2359
+ githubFeaturesEnabled: true,
2360
+ checks: [],
2361
+ headSha: openBaseStatus.headSha,
2362
+ repo,
2363
+ detailLevel,
2364
+ fetchedAt: Date.now(),
2365
+ }, { cwd, generation: cacheGeneration });
2366
+ }
2367
+ let checks = [];
2368
+ let requiredChecksKnown = true;
2369
+ let mergeability = { hasConflicts: false, mergeableKnown: false };
2012
2370
  try {
2013
2371
  checks = await getRequiredPullRequestChecks(cwd, openBaseStatus.number);
2014
2372
  }
@@ -2019,12 +2377,22 @@ async function loadCurrentPullRequest(cwd, options) {
2019
2377
  githubFeaturesEnabled: false,
2020
2378
  checks: [],
2021
2379
  headSha: null,
2380
+ repo,
2381
+ detailLevel,
2382
+ fetchedAt: Date.now(),
2022
2383
  };
2023
2384
  }
2024
- throw error;
2385
+ if (isGhRateLimitError(error)) {
2386
+ if (compatibleCachedLookup) {
2387
+ return compatibleCachedLookup;
2388
+ }
2389
+ throw new PullRequestRateLimitError();
2390
+ }
2391
+ requiredChecksKnown = false;
2392
+ checks = await getPullRequestChecksViaRest(cwd, repo, openBaseStatus.headSha ?? localHeadSha ?? openBaseStatus.headRefName);
2025
2393
  }
2026
2394
  try {
2027
- mergeability = await getPullRequestMergeability(cwd, openBaseStatus.number);
2395
+ mergeability = await getPullRequestMergeabilityViaRest(cwd, repo, openBaseStatus.number);
2028
2396
  }
2029
2397
  catch (error) {
2030
2398
  if (isGhAuthError(error)) {
@@ -2033,15 +2401,29 @@ async function loadCurrentPullRequest(cwd, options) {
2033
2401
  githubFeaturesEnabled: false,
2034
2402
  checks: [],
2035
2403
  headSha: null,
2404
+ repo,
2405
+ detailLevel,
2406
+ fetchedAt: Date.now(),
2036
2407
  };
2037
2408
  }
2409
+ if (isGhRateLimitError(error)) {
2410
+ if (compatibleCachedLookup) {
2411
+ return compatibleCachedLookup;
2412
+ }
2413
+ throw new PullRequestRateLimitError();
2414
+ }
2038
2415
  }
2039
- return {
2040
- status: toPullRequestStatus(openBaseStatus, mergeCheckStatuses(checks, mergeability.rollupChecks), mergeability),
2416
+ return setCachedPullRequestLookup(cacheKey, localHeadSha, {
2417
+ status: toPullRequestStatus(openBaseStatus, checks, mergeability, {
2418
+ requiredChecksKnown,
2419
+ }),
2041
2420
  githubFeaturesEnabled: true,
2042
- checks: mergeCheckStatuses(checks, mergeability.rollupChecks),
2421
+ checks,
2043
2422
  headSha: openBaseStatus.headSha,
2044
- };
2423
+ repo,
2424
+ detailLevel,
2425
+ fetchedAt: Date.now(),
2426
+ }, { cwd, generation: cacheGeneration });
2045
2427
  }
2046
2428
  let closedPulls;
2047
2429
  try {
@@ -2060,8 +2442,17 @@ async function loadCurrentPullRequest(cwd, options) {
2060
2442
  githubFeaturesEnabled: false,
2061
2443
  checks: [],
2062
2444
  headSha: null,
2445
+ repo,
2446
+ detailLevel,
2447
+ fetchedAt: Date.now(),
2063
2448
  };
2064
2449
  }
2450
+ if (isGhRateLimitError(error)) {
2451
+ if (compatibleCachedLookup) {
2452
+ return compatibleCachedLookup;
2453
+ }
2454
+ throw new PullRequestRateLimitError();
2455
+ }
2065
2456
  throw error;
2066
2457
  }
2067
2458
  const mergedClosedPull = closedPulls.find((entry) => matchesPullRequestHeadRef(entry, head) &&
@@ -2071,19 +2462,33 @@ async function loadCurrentPullRequest(cwd, options) {
2071
2462
  entry.merged_at.trim().length > 0) ?? null;
2072
2463
  const mergedBaseStatus = buildPullRequestBaseStatus(mergedClosedPull, head);
2073
2464
  if (!mergedBaseStatus) {
2074
- return {
2465
+ return setCachedPullRequestLookup(cacheKey, localHeadSha, {
2075
2466
  status: null,
2076
2467
  githubFeaturesEnabled: true,
2077
2468
  checks: [],
2078
2469
  headSha: null,
2079
- };
2080
- }
2081
- return {
2082
- status: toPullRequestStatus(mergedBaseStatus, []),
2470
+ repo,
2471
+ detailLevel,
2472
+ fetchedAt: Date.now(),
2473
+ }, { cwd, generation: cacheGeneration });
2474
+ }
2475
+ return setCachedPullRequestLookup(cacheKey, localHeadSha, {
2476
+ status: detailLevel === "summary"
2477
+ ? toSummaryOnlyPullRequestStatus(mergedBaseStatus)
2478
+ : toPullRequestStatus(mergedBaseStatus, []),
2083
2479
  githubFeaturesEnabled: true,
2084
2480
  checks: [],
2085
2481
  headSha: mergedBaseStatus.headSha,
2086
- };
2482
+ repo,
2483
+ detailLevel,
2484
+ fetchedAt: Date.now(),
2485
+ }, { cwd, generation: cacheGeneration });
2486
+ }
2487
+ /**
2488
+ * Read the current branch pull request snapshot using cache-aware summary/detail hydration.
2489
+ */
2490
+ export async function getPullRequestSnapshot(cwd, options) {
2491
+ return loadCurrentPullRequest(cwd, options);
2087
2492
  }
2088
2493
  export async function createPullRequest(cwd, options) {
2089
2494
  await requireGitRepo(cwd);
@@ -2121,15 +2526,27 @@ export async function createPullRequest(cwd, options) {
2121
2526
  if (!parsed?.url || !parsed?.number) {
2122
2527
  throw new Error("GitHub CLI did not return PR url/number");
2123
2528
  }
2529
+ invalidatePullRequestCache(cwd, { remoteName });
2124
2530
  return { url: parsed.url, number: parsed.number };
2125
2531
  }
2532
+ /**
2533
+ * Read a detailed pull request status for the current branch.
2534
+ */
2126
2535
  export async function getPullRequestStatus(cwd, options) {
2127
- const lookup = await loadCurrentPullRequest(cwd, options);
2536
+ const lookup = await loadCurrentPullRequest(cwd, {
2537
+ remoteName: options?.remoteName,
2538
+ detailLevel: "detail",
2539
+ maxAgeMs: options?.maxAgeMs ?? PULL_REQUEST_STATUS_ACTIVE_CACHE_MS,
2540
+ forceRefresh: options?.forceRefresh,
2541
+ });
2128
2542
  return {
2129
2543
  status: lookup.status,
2130
2544
  githubFeaturesEnabled: lookup.githubFeaturesEnabled,
2131
2545
  };
2132
2546
  }
2547
+ /**
2548
+ * Search pull requests for the current repository using a short-lived cache.
2549
+ */
2133
2550
  export async function searchPullRequests(cwd, options = {}) {
2134
2551
  await requireGitRepo(cwd);
2135
2552
  try {
@@ -2149,6 +2566,22 @@ export async function searchPullRequests(cwd, options = {}) {
2149
2566
  githubFeaturesEnabled: false,
2150
2567
  };
2151
2568
  }
2569
+ const normalizedScope = normalizePullRequestCacheScope({
2570
+ cwd,
2571
+ remoteName,
2572
+ });
2573
+ const searchCacheKey = JSON.stringify([
2574
+ normalizedScope.cwd,
2575
+ normalizedScope.remoteName ?? "",
2576
+ repo,
2577
+ options.query?.trim() ?? "",
2578
+ options.limit ?? 25,
2579
+ ]);
2580
+ prunePullRequestSearchCache();
2581
+ const cachedSearch = pullRequestSearchCache.get(searchCacheKey);
2582
+ if (cachedSearch && Date.now() - cachedSearch.fetchedAt <= PULL_REQUEST_SEARCH_CACHE_MS) {
2583
+ return cachedSearch.result;
2584
+ }
2152
2585
  const args = [
2153
2586
  "pr",
2154
2587
  "list",
@@ -2203,10 +2636,17 @@ export async function searchPullRequests(cwd, options = {}) {
2203
2636
  }];
2204
2637
  })
2205
2638
  : [];
2206
- return {
2639
+ const result = {
2207
2640
  pullRequests,
2208
2641
  githubFeaturesEnabled: true,
2209
2642
  };
2643
+ prunePullRequestSearchCache();
2644
+ pullRequestSearchCache.set(searchCacheKey, {
2645
+ result,
2646
+ fetchedAt: Date.now(),
2647
+ });
2648
+ prunePullRequestSearchCache();
2649
+ return result;
2210
2650
  }
2211
2651
  catch {
2212
2652
  return {
@@ -2215,8 +2655,16 @@ export async function searchPullRequests(cwd, options = {}) {
2215
2655
  };
2216
2656
  }
2217
2657
  }
2658
+ /**
2659
+ * Return failed-check logs for the current branch pull request when available.
2660
+ */
2218
2661
  export async function getPullRequestFailureLogs(cwd, options) {
2219
- const lookup = await loadCurrentPullRequest(cwd, options);
2662
+ const lookup = await loadCurrentPullRequest(cwd, {
2663
+ remoteName: options?.remoteName,
2664
+ detailLevel: "detail",
2665
+ maxAgeMs: options?.maxAgeMs ?? PULL_REQUEST_STATUS_ACTIVE_CACHE_MS,
2666
+ forceRefresh: options?.forceRefresh,
2667
+ });
2220
2668
  if (!lookup.githubFeaturesEnabled) {
2221
2669
  return {
2222
2670
  logs: null,
@@ -2224,6 +2672,8 @@ export async function getPullRequestFailureLogs(cwd, options) {
2224
2672
  };
2225
2673
  }
2226
2674
  const failedChecks = lookup.checks.filter((check) => check.bucket === "fail" || check.bucket === "cancel");
2675
+ const failedRequiredChecks = failedChecks.filter((check) => check.required);
2676
+ const summarizedChecks = failedRequiredChecks.length > 0 ? failedRequiredChecks : failedChecks;
2227
2677
  if (!lookup.status || lookup.status.isMerged || failedChecks.length === 0) {
2228
2678
  return {
2229
2679
  logs: null,
@@ -2242,8 +2692,8 @@ export async function getPullRequestFailureLogs(cwd, options) {
2242
2692
  `PR #${lookup.status.number}: ${lookup.status.title}`,
2243
2693
  `Branch: ${lookup.status.headRefName} -> ${lookup.status.baseRefName}`,
2244
2694
  "",
2245
- "Failed required checks:",
2246
- ...failedChecks.map((check) => {
2695
+ failedRequiredChecks.length > 0 ? "Failed required checks:" : "Failing checks:",
2696
+ ...summarizedChecks.map((check) => {
2247
2697
  const workflowSuffix = check.workflow ? ` [${check.workflow}]` : "";
2248
2698
  const descriptionSuffix = check.description ? ` - ${check.description}` : "";
2249
2699
  return `- ${check.name}${workflowSuffix}${descriptionSuffix}`;
@@ -2288,20 +2738,50 @@ export async function getPullRequestFailureLogs(cwd, options) {
2288
2738
  githubFeaturesEnabled: true,
2289
2739
  };
2290
2740
  }
2741
+ function isMergeQueueFallbackError(error) {
2742
+ const text = getCommandErrorText(error);
2743
+ return (text.includes("merge queue") ||
2744
+ text.includes("enqueue") ||
2745
+ text.includes("auto-merge") ||
2746
+ text.includes("changes must be made through the merge queue"));
2747
+ }
2748
+ /**
2749
+ * Merge the current branch pull request using a REST-first strategy with queue-aware fallback.
2750
+ */
2291
2751
  export async function mergePullRequest(cwd, options = {}) {
2292
2752
  const method = options.method ?? "squash";
2293
2753
  if (method !== "squash") {
2294
2754
  throw new Error("Only squash merge is supported");
2295
2755
  }
2296
- const statusResult = await getPullRequestStatus(cwd, { remoteName: options.remoteName });
2297
- if (!statusResult.githubFeaturesEnabled) {
2756
+ const remoteName = await resolveEffectiveRemoteName(cwd, options.remoteName);
2757
+ const lookup = await loadCurrentPullRequest(cwd, {
2758
+ remoteName,
2759
+ detailLevel: "summary",
2760
+ maxAgeMs: options.maxAgeMs ?? PULL_REQUEST_STATUS_ACTIVE_CACHE_MS,
2761
+ forceRefresh: true,
2762
+ });
2763
+ if (!lookup.githubFeaturesEnabled) {
2298
2764
  throw new Error("GitHub CLI (gh) is not available or not authenticated");
2299
2765
  }
2300
- if (!statusResult.status || statusResult.status.isMerged) {
2766
+ if (!lookup.status || lookup.status.isMerged || !lookup.repo) {
2301
2767
  throw new Error("No open pull request found for current branch");
2302
2768
  }
2303
- await execFileAsync("gh", ["pr", "merge", String(statusResult.status.number), "--squash"], {
2304
- cwd,
2305
- });
2769
+ try {
2770
+ await execFileAsync("gh", [
2771
+ "api",
2772
+ "-X",
2773
+ "PUT",
2774
+ `repos/${lookup.repo}/pulls/${lookup.status.number}/merge`,
2775
+ "-f",
2776
+ `merge_method=${method}`,
2777
+ ], { cwd });
2778
+ }
2779
+ catch (error) {
2780
+ if (!isMergeQueueFallbackError(error)) {
2781
+ throw error;
2782
+ }
2783
+ await execFileAsync("gh", ["pr", "merge", String(lookup.status.number), "--squash"], { cwd });
2784
+ }
2785
+ invalidatePullRequestCache(cwd, { remoteName });
2306
2786
  }
2307
2787
  //# sourceMappingURL=checkout-git.js.map