@lumy-pack/line-lore 0.0.4 → 0.0.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -40,6 +40,9 @@ npx @lumy-pack/line-lore graph pr 42 --depth 2
40
40
  # Check system health
41
41
  npx @lumy-pack/line-lore health
42
42
 
43
+ # Return cached results only (no API calls)
44
+ npx @lumy-pack/line-lore trace src/auth.ts -L 42 --cache-only
45
+
43
46
  # Clear caches
44
47
  npx @lumy-pack/line-lore cache clear
45
48
 
@@ -95,6 +98,48 @@ console.log(`Git version: ${report.gitVersion}`);
95
98
 
96
99
  No ML or heuristics — results are always reproducible.
97
100
 
101
+ ### PR Lookup Algorithm
102
+
103
+ Once a commit is identified, line-lore resolves it to a PR using a **cost-ascending sequential fallback chain**. Each strategy is tried in order; the first success returns immediately.
104
+
105
+ ```
106
+ lookupPR(commitSha)
107
+
108
+
109
+ Strategy 1 — Cache ─────────────────── cost: O(1), instant
110
+ │ ShardedCache<PRInfo> lookup by SHA
111
+ │ hit? → return cached PRInfo
112
+ │ miss + --cache-only? → return null (skip all fallbacks)
113
+
114
+ Strategy 2 — Ancestry-path + Message ─ cost: 1 git-log
115
+ │ 1st: git log --merges --ancestry-path --first-parent sha..HEAD
116
+ │ 2nd: (fallback) full ancestry-path without --first-parent
117
+ │ Parse merge subject with 3 regex patterns:
118
+ │ • /Merge pull request #(\d+)/ — GitHub merge commit
119
+ │ • /\(#(\d+)\)\s*$/ — Squash merge convention
120
+ │ • /!(\d+)\s*$/ — GitLab merge commit
121
+ │ If PR# found + adapter available → enrich via API
122
+ │ found? → return PRInfo
123
+
124
+ Strategy 3 — Platform API ──────────── cost: 1 HTTP request
125
+ │ gh api repos/{owner}/{repo}/commits/{sha}/pulls
126
+ │ Filter: merged PRs only (mergedAt != null)
127
+ │ found? → return PRInfo
128
+
129
+ Strategy 4 — Patch-ID matching ─────── cost: streaming 500+ commits
130
+ │ Compute target: git diff sha^..sha | git patch-id --stable
131
+ │ Batch scan: git log -500 -p HEAD | git patch-id --stable
132
+ │ (--deep mode: 2000 commits)
133
+ │ Find candidate with same patch-id but different SHA
134
+ │ match? → lookupPR(matchedSha) recursively
135
+
136
+ All failed → null
137
+ ```
138
+
139
+ **Why this order?** The chain is sorted by cost. Most repositories use merge or squash workflows, so Strategy 2 resolves >90% of lookups with zero API calls. Strategy 3 (single HTTP) is cheaper than Strategy 4 (streaming hundreds of commit diffs), so API is tried before patch-id scanning.
140
+
141
+ **Patch-ID explained**: `git patch-id --stable` generates a content-based hash from a commit's diff, ignoring all metadata (author, date, message). When a commit is rebased, its SHA changes but the patch-id stays the same — enabling deterministic matching of rebased commits.
142
+
98
143
  ## Understanding the Output
99
144
 
100
145
  ### TraceNode — the core unit of output
@@ -302,6 +347,7 @@ Trace a code line to its originating PR.
302
347
  | `deep` | `boolean` | no | `false` | Expand patch-id scan range (500→2000), continue search after merge commit match |
303
348
  | `noAst` | `boolean` | no | `false` | Disable AST analysis |
304
349
  | `noCache` | `boolean` | no | `false` | Disable cache reads and writes |
350
+ | `cacheOnly` | `boolean` | no | `false` | Return cached results only — skip API calls, ancestry traversal, and patch-id scan. If both `cacheOnly` and `noCache` are set, `cacheOnly` takes precedence (cache reads remain enabled) and a warning is emitted. |
305
351
 
306
352
  **Returns (`TraceFullResult`):**
307
353
 
@@ -499,6 +545,30 @@ async function analyzeChangedLines(file: string, lines: number[]) {
499
545
  }
500
546
  ```
501
547
 
548
+ ### Cache-Only Lookups
549
+
550
+ Use `cacheOnly` to instantly return previously cached PR data without any API calls or git operations beyond blame. Ideal for IDE integrations and repeated lookups where speed matters more than freshness.
551
+
552
+ ```typescript
553
+ import { trace } from '@lumy-pack/line-lore';
554
+
555
+ async function getCachedPR(filePath: string, lineNumber: number) {
556
+ const result = await trace({
557
+ file: filePath,
558
+ line: lineNumber,
559
+ cacheOnly: true, // return cached data only, never fetch
560
+ });
561
+
562
+ const pr = result.nodes.find(n => n.type === 'pull_request');
563
+ if (pr) {
564
+ return { number: pr.prNumber, url: pr.prUrl };
565
+ }
566
+
567
+ // Cache miss — no PR data available without fetching
568
+ return null;
569
+ }
570
+ ```
571
+
502
572
  ### Batch Processing with Cache Control
503
573
 
504
574
  ```typescript
@@ -566,6 +636,7 @@ import type {
566
636
  | `--end-line <num>` | Ending line for range |
567
637
  | `--deep` | Deep trace (squash merges) |
568
638
  | `--output <format>` | Output as json, llm, or human |
639
+ | `--cache-only` | Return cached results only (no API calls) |
569
640
  | `--quiet` | Suppress formatting |
570
641
  | `npx @lumy-pack/line-lore health` | Check system health |
571
642
  | `npx @lumy-pack/line-lore graph pr <num>` | Show issues linked to a PR |
package/dist/cli.mjs CHANGED
@@ -305,6 +305,51 @@ async function gitExec(args, options) {
305
305
  LineLoreErrorCode.GIT_COMMAND_FAILED
306
306
  );
307
307
  }
308
+ async function gitPipe(producerArgs, consumerArgs, options) {
309
+ const { cwd, timeout } = options ?? {};
310
+ const pipeArgs = [...producerArgs, "|", ...consumerArgs];
311
+ try {
312
+ const result = await execa("git", producerArgs, {
313
+ cwd,
314
+ timeout,
315
+ reject: false
316
+ }).pipe("git", consumerArgs, { cwd, timeout, reject: false });
317
+ const exitCode = result.exitCode ?? 0;
318
+ if (exitCode !== 0) {
319
+ throw new LineLoreError(
320
+ LineLoreErrorCode.GIT_COMMAND_FAILED,
321
+ `git pipe failed with exit code ${exitCode}: ${result.stderr}`,
322
+ {
323
+ command: "git",
324
+ args: pipeArgs,
325
+ exitCode,
326
+ stderr: result.stderr,
327
+ cwd
328
+ }
329
+ );
330
+ }
331
+ return {
332
+ stdout: result.stdout,
333
+ stderr: result.stderr,
334
+ exitCode
335
+ };
336
+ } catch (error) {
337
+ if (error instanceof LineLoreError) throw error;
338
+ const isTimeout = error instanceof Error && "isTerminated" in error && error.timedOut === true;
339
+ if (isTimeout) {
340
+ throw new LineLoreError(
341
+ LineLoreErrorCode.GIT_TIMEOUT,
342
+ `git pipe timed out after ${timeout}ms`,
343
+ { command: "git", args: pipeArgs, timeout, cwd }
344
+ );
345
+ }
346
+ throw new LineLoreError(
347
+ LineLoreErrorCode.GIT_COMMAND_FAILED,
348
+ `git pipe failed: ${error instanceof Error ? error.message : String(error)}`,
349
+ { command: "git", args: pipeArgs, cwd }
350
+ );
351
+ }
352
+ }
308
353
  async function shellExec(command, args, options) {
309
354
  return execCommand(
310
355
  command,
@@ -324,14 +369,22 @@ var init_executor = __esm({
324
369
  import { filter as filter4, isTruthy as isTruthy4 } from "@winglet/common-utils";
325
370
  async function findMergeCommit(commitSha, options) {
326
371
  const ref = options?.ref ?? "HEAD";
372
+ const budget = options?.timeout ?? DEFAULT_ANCESTRY_TIMEOUT;
373
+ const startTime = Date.now();
327
374
  const firstParentResult = await findMergeCommitWithArgs(
328
375
  commitSha,
329
376
  ref,
330
377
  ["--first-parent"],
331
- options
378
+ { ...options, timeout: budget }
332
379
  );
333
380
  if (firstParentResult) return firstParentResult;
334
- return findMergeCommitWithArgs(commitSha, ref, [], options);
381
+ const elapsed = Date.now() - startTime;
382
+ const remaining = budget - elapsed;
383
+ if (remaining <= 0) return null;
384
+ return findMergeCommitWithArgs(commitSha, ref, [], {
385
+ ...options,
386
+ timeout: remaining
387
+ });
335
388
  }
336
389
  async function findMergeCommitWithArgs(commitSha, ref, extraArgs, options) {
337
390
  try {
@@ -379,12 +432,16 @@ function extractPRFromMergeMessage(subject) {
379
432
  if (squashMatch) return parseInt(squashMatch[1], 10);
380
433
  const glMatch = /!(\d+)\s*$/.exec(subject);
381
434
  if (glMatch) return parseInt(glMatch[1], 10);
435
+ const adoMatch = /Merged PR (\d+):/.exec(subject);
436
+ if (adoMatch) return parseInt(adoMatch[1], 10);
382
437
  return null;
383
438
  }
439
+ var DEFAULT_ANCESTRY_TIMEOUT;
384
440
  var init_ancestry = __esm({
385
441
  "src/core/ancestry/ancestry.ts"() {
386
442
  "use strict";
387
443
  init_executor();
444
+ DEFAULT_ANCESTRY_TIMEOUT = 3e4;
388
445
  }
389
446
  });
390
447
 
@@ -420,14 +477,10 @@ async function computePatchId(commitSha, options) {
420
477
  const cached = await cache.get(commitSha);
421
478
  if (cached) return cached;
422
479
  try {
423
- const cwd = options?.cwd ?? ".";
424
- const result = await shellExec(
425
- "bash",
426
- [
427
- "-c",
428
- `git -C "${cwd}" diff "${commitSha}^..${commitSha}" | git patch-id --stable`
429
- ],
430
- { timeout: options?.timeout }
480
+ const result = await gitPipe(
481
+ ["diff", `${commitSha}^..${commitSha}`],
482
+ ["patch-id", "--stable"],
483
+ { cwd: options?.cwd, timeout: options?.timeout }
431
484
  );
432
485
  const patchId = result.stdout.trim().split(/\s+/)[0];
433
486
  if (!patchId) return null;
@@ -443,15 +496,18 @@ async function findPatchIdMatch(commitSha, options) {
443
496
  const targetPatchId = await computePatchId(commitSha, options);
444
497
  if (!targetPatchId) return null;
445
498
  try {
446
- const logResult = await gitExec(
447
- ["log", "--format=%H", `-${scanDepth}`, ref],
448
- { cwd: options?.cwd, timeout: options?.timeout }
499
+ const result = await gitPipe(
500
+ ["log", `-${scanDepth}`, "-p", ref],
501
+ ["patch-id", "--stable"],
502
+ { cwd: options?.cwd, timeout: options?.timeout ?? 6e4 }
449
503
  );
450
- const candidates = filter5(logResult.stdout.trim().split("\n"), isTruthy5);
451
- for (const candidateSha of candidates) {
452
- if (candidateSha === commitSha) continue;
453
- const candidatePatchId = await computePatchId(candidateSha, options);
454
- if (candidatePatchId && candidatePatchId === targetPatchId) {
504
+ const lines = filter5(result.stdout.trim().split("\n"), isTruthy5);
505
+ const cache = getCache(options?.repoId, options?.noCache);
506
+ for (const line of lines) {
507
+ const [patchId, candidateSha] = line.split(/\s+/);
508
+ if (!patchId || !candidateSha) continue;
509
+ await cache.set(candidateSha, patchId);
510
+ if (candidateSha !== commitSha && patchId === targetPatchId) {
455
511
  return { matchedSha: candidateSha, patchId: targetPatchId };
456
512
  }
457
513
  }
@@ -505,10 +561,40 @@ function getCache2(repoId, noCache) {
505
561
  }
506
562
  return cache;
507
563
  }
508
- async function lookupPR(commitSha, adapter, options) {
509
- const cache = getCache2(options?.repoId, options?.noCache);
564
+ function toCachedPR(pr) {
565
+ return {
566
+ number: pr.number,
567
+ title: pr.title,
568
+ author: pr.author,
569
+ url: pr.url,
570
+ mergeCommit: pr.mergeCommit,
571
+ baseBranch: pr.baseBranch,
572
+ mergedAt: pr.mergedAt ? new Date(pr.mergedAt).getTime() : void 0
573
+ };
574
+ }
575
+ function fromCachedPR(cached) {
576
+ let mergedAt;
577
+ if (cached.mergedAt != null) {
578
+ mergedAt = typeof cached.mergedAt === "number" ? new Date(cached.mergedAt).toISOString() : String(cached.mergedAt);
579
+ }
580
+ return {
581
+ number: cached.number,
582
+ title: cached.title,
583
+ author: cached.author,
584
+ url: cached.url,
585
+ mergeCommit: cached.mergeCommit,
586
+ baseBranch: cached.baseBranch,
587
+ mergedAt
588
+ };
589
+ }
590
+ async function lookupPR(commitSha, adapter, options, _recursionDepth = 0) {
591
+ const cache = getCache2(
592
+ options?.repoId,
593
+ options?.cacheOnly ? false : options?.noCache
594
+ );
510
595
  const cached = await cache.get(commitSha);
511
- if (cached) return cached;
596
+ if (cached) return fromCachedPR(cached);
597
+ if (options?.cacheOnly) return null;
512
598
  let mergeBasedPR = null;
513
599
  const mergeResult = await findMergeCommit(commitSha, options);
514
600
  if (mergeResult) {
@@ -531,39 +617,46 @@ async function lookupPR(commitSha, adapter, options) {
531
617
  };
532
618
  }
533
619
  if (!options?.deep || mergeBasedPR.mergedAt) {
534
- await cache.set(commitSha, mergeBasedPR);
620
+ await cache.set(commitSha, toCachedPR(mergeBasedPR));
535
621
  return mergeBasedPR;
536
622
  }
537
623
  }
538
624
  }
539
- const patchIdMatch = await findPatchIdMatch(commitSha, {
540
- ...options,
541
- scanDepth: options?.deep ? DEEP_SCAN_DEPTH : void 0
542
- });
543
- if (patchIdMatch) {
544
- const result = await lookupPR(patchIdMatch.matchedSha, adapter, options);
545
- if (result) {
546
- await cache.set(commitSha, result);
547
- return result;
548
- }
549
- }
550
625
  if (mergeBasedPR) {
551
- await cache.set(commitSha, mergeBasedPR);
626
+ await cache.set(commitSha, toCachedPR(mergeBasedPR));
552
627
  return mergeBasedPR;
553
628
  }
554
629
  if (adapter) {
555
630
  const prInfo = await adapter.getPRForCommit(commitSha);
556
631
  if (prInfo?.mergedAt) {
557
- await cache.set(commitSha, prInfo);
632
+ await cache.set(commitSha, toCachedPR(prInfo));
558
633
  return prInfo;
559
634
  }
560
635
  }
636
+ if (!options?.skipPatchIdScan && _recursionDepth < MAX_RECURSION_DEPTH) {
637
+ const patchIdMatch = await findPatchIdMatch(commitSha, {
638
+ ...options,
639
+ scanDepth: options?.deep ? DEEP_SCAN_DEPTH : void 0
640
+ });
641
+ if (patchIdMatch) {
642
+ const result = await lookupPR(
643
+ patchIdMatch.matchedSha,
644
+ adapter,
645
+ options,
646
+ _recursionDepth + 1
647
+ );
648
+ if (result) {
649
+ await cache.set(commitSha, toCachedPR(result));
650
+ return result;
651
+ }
652
+ }
653
+ }
561
654
  return null;
562
655
  }
563
656
  function resetPRCache() {
564
657
  cacheRegistry2.clear();
565
658
  }
566
- var cacheRegistry2, DEEP_SCAN_DEPTH;
659
+ var cacheRegistry2, DEEP_SCAN_DEPTH, MAX_RECURSION_DEPTH;
567
660
  var init_pr_lookup = __esm({
568
661
  "src/core/pr-lookup/pr-lookup.ts"() {
569
662
  "use strict";
@@ -572,6 +665,7 @@ var init_pr_lookup = __esm({
572
665
  init_patch_id2();
573
666
  cacheRegistry2 = /* @__PURE__ */ new Map();
574
667
  DEEP_SCAN_DEPTH = 2e3;
668
+ MAX_RECURSION_DEPTH = 2;
575
669
  }
576
670
  });
577
671
 
@@ -593,7 +687,7 @@ var VERSION;
593
687
  var init_version = __esm({
594
688
  "src/version.ts"() {
595
689
  "use strict";
596
- VERSION = "0.0.4";
690
+ VERSION = "0.0.6";
597
691
  }
598
692
  });
599
693
 
@@ -918,6 +1012,27 @@ function isVersionAtLeast(version2, minVersion) {
918
1012
  }
919
1013
  return true;
920
1014
  }
1015
+ async function checkCloneStatus(options) {
1016
+ let partialClone = false;
1017
+ let shallow = false;
1018
+ try {
1019
+ const shallowResult = await gitExec(
1020
+ ["rev-parse", "--is-shallow-repository"],
1021
+ { cwd: options?.cwd }
1022
+ );
1023
+ shallow = shallowResult.stdout.trim() === "true";
1024
+ } catch {
1025
+ }
1026
+ try {
1027
+ const partialResult = await gitExec(
1028
+ ["config", "--get", "extensions.partialclone"],
1029
+ { cwd: options?.cwd }
1030
+ );
1031
+ partialClone = partialResult.stdout.trim().length > 0;
1032
+ } catch {
1033
+ }
1034
+ return { partialClone, shallow };
1035
+ }
921
1036
  async function checkGitHealth(options) {
922
1037
  const hints = [];
923
1038
  let gitVersion = "0.0.0";
@@ -944,7 +1059,18 @@ async function checkGitHealth(options) {
944
1059
  `Upgrade git to ${BLOOM_FILTER_MIN_VERSION.join(".")}+ for bloom filter support (current: ${gitVersion}).`
945
1060
  );
946
1061
  }
947
- return { commitGraph, bloomFilter, gitVersion, hints };
1062
+ const cloneStatus = await checkCloneStatus({ cwd: options?.cwd });
1063
+ if (cloneStatus.partialClone) {
1064
+ hints.push(
1065
+ "Partial clone detected. Patch-ID scan (Strategy 4) will be skipped to avoid blob downloads."
1066
+ );
1067
+ }
1068
+ if (cloneStatus.shallow) {
1069
+ hints.push(
1070
+ "Shallow repository detected. Ancestry-path results may be incomplete."
1071
+ );
1072
+ }
1073
+ return { commitGraph, bloomFilter, gitVersion, hints, ...cloneStatus };
948
1074
  }
949
1075
 
950
1076
  // src/git/remote.ts
@@ -2117,7 +2243,7 @@ async function runBlameAndAuth(adapter, options, execOptions) {
2117
2243
  }
2118
2244
  return { analyzed: blameResult.value, operatingLevel, warnings };
2119
2245
  }
2120
- async function processEntry(entry, featureFlags, adapter, options, execOptions, repoId) {
2246
+ async function processEntry(entry, featureFlags, adapter, options, execOptions, repoId, skipPatchIdScan) {
2121
2247
  const nodes = [];
2122
2248
  const commitNode = {
2123
2249
  type: entry.isCosmetic ? "cosmetic_commit" : "original_commit",
@@ -2148,8 +2274,10 @@ async function processEntry(entry, featureFlags, adapter, options, execOptions,
2148
2274
  const prInfo = await lookupPR(targetSha, adapter, {
2149
2275
  ...execOptions,
2150
2276
  noCache: options.noCache,
2277
+ cacheOnly: options.cacheOnly,
2151
2278
  deep: featureFlags.deepTrace,
2152
- repoId
2279
+ repoId,
2280
+ skipPatchIdScan
2153
2281
  });
2154
2282
  if (prInfo) {
2155
2283
  nodes.push({
@@ -2166,11 +2294,19 @@ async function processEntry(entry, featureFlags, adapter, options, execOptions,
2166
2294
  }
2167
2295
  return nodes;
2168
2296
  }
2169
- async function buildTraceNodes(analyzed, featureFlags, adapter, options, execOptions, repoId) {
2297
+ async function buildTraceNodes(analyzed, featureFlags, adapter, options, execOptions, repoId, skipPatchIdScan) {
2170
2298
  const results = await Promise.allSettled(
2171
2299
  map8(
2172
2300
  analyzed,
2173
- (entry) => processEntry(entry, featureFlags, adapter, options, execOptions, repoId)
2301
+ (entry) => processEntry(
2302
+ entry,
2303
+ featureFlags,
2304
+ adapter,
2305
+ options,
2306
+ execOptions,
2307
+ repoId,
2308
+ skipPatchIdScan
2309
+ )
2174
2310
  )
2175
2311
  );
2176
2312
  return results.flatMap((r) => r.status === "fulfilled" ? r.value : []);
@@ -2201,14 +2337,36 @@ async function trace(options) {
2201
2337
  );
2202
2338
  const operatingLevel = blameAuth.operatingLevel || platform.operatingLevel;
2203
2339
  const warnings = [...platform.warnings, ...blameAuth.warnings];
2340
+ if (options.cacheOnly && options.noCache) {
2341
+ warnings.push(
2342
+ "Both cacheOnly and noCache are set. cacheOnly takes precedence \u2014 cache reads are enabled."
2343
+ );
2344
+ }
2204
2345
  const featureFlags = computeFeatureFlags(operatingLevel, options);
2346
+ let cloneStatus = { partialClone: false, shallow: false };
2347
+ try {
2348
+ const result = await checkCloneStatus({ cwd: options.cwd });
2349
+ if (result) cloneStatus = result;
2350
+ } catch {
2351
+ }
2352
+ if (cloneStatus.partialClone) {
2353
+ warnings.push(
2354
+ "Partial clone detected. Patch-ID scan (Strategy 4) will be skipped to avoid blob downloads."
2355
+ );
2356
+ }
2357
+ if (cloneStatus.shallow) {
2358
+ warnings.push(
2359
+ "Shallow repository detected. Ancestry-path results may be incomplete."
2360
+ );
2361
+ }
2205
2362
  const nodes = await buildTraceNodes(
2206
2363
  blameAuth.analyzed,
2207
2364
  featureFlags,
2208
2365
  platform.adapter,
2209
2366
  options,
2210
2367
  execOptions,
2211
- repoId
2368
+ repoId,
2369
+ cloneStatus.partialClone || void 0
2212
2370
  );
2213
2371
  return { nodes, operatingLevel, featureFlags, warnings };
2214
2372
  }
@@ -2415,7 +2573,7 @@ function formatNodeHuman(node) {
2415
2573
  init_normalizer();
2416
2574
  init_errors();
2417
2575
  function registerTraceCommand(program2) {
2418
- program2.command("trace <file>").description("Trace a file line to its originating PR").requiredOption("-L, --line <range>", 'Line number or range (e.g., "42" or "10,50")').option("--deep", "Enable deep trace for squash PRs").option("--no-ast", "Disable AST diff analysis").option("--no-cache", "Disable cache").option("--json", "Output in JSON format").option("-q, --quiet", "Output PR number only").option("--output <format>", "Output format: human, json, llm", "human").option("--no-color", "Disable colored output").action(async (file, opts) => {
2576
+ program2.command("trace <file>").description("Trace a file line to its originating PR").requiredOption("-L, --line <range>", 'Line number or range (e.g., "42" or "10,50")').option("--deep", "Enable deep trace for squash PRs").option("--no-ast", "Disable AST diff analysis").option("--no-cache", "Disable cache").option("--cache-only", "Return cached results only (no API calls)").option("--json", "Output in JSON format").option("-q, --quiet", "Output PR number only").option("--output <format>", "Output format: human, json, llm", "human").option("--no-color", "Disable colored output").action(async (file, opts) => {
2419
2577
  const lineStr = opts.line;
2420
2578
  const parts = lineStr.split(",");
2421
2579
  const line = parseInt(parts[0], 10);
@@ -2426,7 +2584,8 @@ function registerTraceCommand(program2) {
2426
2584
  endLine,
2427
2585
  deep: opts.deep,
2428
2586
  noAst: opts.ast === false,
2429
- noCache: opts.cache === false
2587
+ noCache: opts.cache === false,
2588
+ cacheOnly: opts.cacheOnly
2430
2589
  };
2431
2590
  const cliOptions = {
2432
2591
  json: opts.json,
@@ -4,6 +4,7 @@ export interface AncestryResult {
4
4
  parentShas: string[];
5
5
  subject: string;
6
6
  }
7
+ export declare const DEFAULT_ANCESTRY_TIMEOUT = 30000;
7
8
  export declare function findMergeCommit(commitSha: string, options?: GitExecOptions & {
8
9
  ref?: string;
9
10
  }): Promise<AncestryResult | null>;
@@ -2,8 +2,14 @@ import type { RepoIdentity } from '../../cache/index.js';
2
2
  import type { GitExecOptions, PRInfo, PlatformAdapter } from '../../types/index.js';
3
3
  export interface PRLookupOptions extends GitExecOptions {
4
4
  noCache?: boolean;
5
+ /** Return cached results only — skip all fallback strategies */
6
+ cacheOnly?: boolean;
5
7
  deep?: boolean;
6
8
  repoId?: RepoIdentity;
9
+ /** Skip Strategy 4 (patch-id scan) — set automatically for partial clone environments */
10
+ skipPatchIdScan?: boolean;
7
11
  }
8
- export declare function lookupPR(commitSha: string, adapter: PlatformAdapter | null, options?: PRLookupOptions): Promise<PRInfo | null>;
12
+ export declare function lookupPR(commitSha: string, adapter: PlatformAdapter | null, options?: PRLookupOptions,
13
+ /** @internal recursion depth tracker — do not set from external callers */
14
+ _recursionDepth?: number): Promise<PRInfo | null>;
9
15
  export declare function resetPRCache(): void;
@@ -1,3 +1,4 @@
1
1
  import type { GitExecOptions, GitExecResult } from '../types/index.js';
2
2
  export declare function gitExec(args: string[], options?: GitExecOptions): Promise<GitExecResult>;
3
+ export declare function gitPipe(producerArgs: string[], consumerArgs: string[], options?: GitExecOptions): Promise<GitExecResult>;
3
4
  export declare function shellExec(command: string, args: string[], options?: GitExecOptions): Promise<GitExecResult>;
@@ -1,4 +1,7 @@
1
- import type { HealthReport } from '../types/index.js';
1
+ import type { CloneStatus, HealthReport } from '../types/index.js';
2
+ export declare function checkCloneStatus(options?: {
3
+ cwd?: string;
4
+ }): Promise<CloneStatus>;
2
5
  export declare function checkGitHealth(options?: {
3
6
  cwd?: string;
4
7
  }): Promise<HealthReport>;
@@ -1,3 +1,3 @@
1
- export { gitExec, shellExec } from './executor.js';
1
+ export { gitExec, gitPipe, shellExec } from './executor.js';
2
2
  export { detectPlatform, getRemoteInfo, parseRemoteUrl } from './remote.js';
3
- export { checkGitHealth } from './health.js';
3
+ export { checkCloneStatus, checkGitHealth } from './health.js';