@lumy-pack/line-lore 0.0.5 → 0.0.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -40,6 +40,9 @@ npx @lumy-pack/line-lore graph pr 42 --depth 2
40
40
  # Check system health
41
41
  npx @lumy-pack/line-lore health
42
42
 
43
+ # Return cached results only (no API calls)
44
+ npx @lumy-pack/line-lore trace src/auth.ts -L 42 --cache-only
45
+
43
46
  # Clear caches
44
47
  npx @lumy-pack/line-lore cache clear
45
48
 
@@ -95,6 +98,48 @@ console.log(`Git version: ${report.gitVersion}`);
95
98
 
96
99
  No ML or heuristics — results are always reproducible.
97
100
 
101
+ ### PR Lookup Algorithm
102
+
103
+ Once a commit is identified, line-lore resolves it to a PR using a **cost-ascending sequential fallback chain**. Each strategy is tried in order; the first success returns immediately.
104
+
105
+ ```
106
+ lookupPR(commitSha)
107
+
108
+
109
+ Strategy 1 — Cache ─────────────────── cost: O(1), instant
110
+ │ ShardedCache<PRInfo> lookup by SHA
111
+ │ hit? → return cached PRInfo
112
+ │ miss + --cache-only? → return null (skip all fallbacks)
113
+
114
+ Strategy 2 — Ancestry-path + Message ─ cost: 1 git-log
115
+ │ 1st: git log --merges --ancestry-path --first-parent sha..HEAD
116
+ │ 2nd: (fallback) full ancestry-path without --first-parent
117
+ │ Parse merge subject with 3 regex patterns:
118
+ │ • /Merge pull request #(\d+)/ — GitHub merge commit
119
+ │ • /\(#(\d+)\)\s*$/ — Squash merge convention
120
+ │ • /!(\d+)\s*$/ — GitLab merge commit
121
+ │ If PR# found + adapter available → enrich via API
122
+ │ found? → return PRInfo
123
+
124
+ Strategy 3 — Platform API ──────────── cost: 1 HTTP request
125
+ │ gh api repos/{owner}/{repo}/commits/{sha}/pulls
126
+ │ Filter: merged PRs only (mergedAt != null)
127
+ │ found? → return PRInfo
128
+
129
+ Strategy 4 — Patch-ID matching ─────── cost: streaming 500+ commits
130
+ │ Compute target: git diff sha^..sha | git patch-id --stable
131
+ │ Batch scan: git log -500 -p HEAD | git patch-id --stable
132
+ │ (--deep mode: 2000 commits)
133
+ │ Find candidate with same patch-id but different SHA
134
+ │ match? → lookupPR(matchedSha) recursively
135
+
136
+ All failed → null
137
+ ```
138
+
139
+ **Why this order?** The chain is sorted by cost. Most repositories use merge or squash workflows, so Strategy 2 resolves >90% of lookups with zero API calls. Strategy 3 (single HTTP) is cheaper than Strategy 4 (streaming hundreds of commit diffs), so API is tried before patch-id scanning.
140
+
141
+ **Patch-ID explained**: `git patch-id --stable` generates a content-based hash from a commit's diff, ignoring all metadata (author, date, message). When a commit is rebased, its SHA changes but the patch-id stays the same — enabling deterministic matching of rebased commits.
142
+
98
143
  ## Understanding the Output
99
144
 
100
145
  ### TraceNode — the core unit of output
@@ -302,6 +347,7 @@ Trace a code line to its originating PR.
302
347
  | `deep` | `boolean` | no | `false` | Expand patch-id scan range (500→2000), continue search after merge commit match |
303
348
  | `noAst` | `boolean` | no | `false` | Disable AST analysis |
304
349
  | `noCache` | `boolean` | no | `false` | Disable cache reads and writes |
350
+ | `cacheOnly` | `boolean` | no | `false` | Return cached results only — skip API calls, ancestry traversal, and patch-id scan. If both `cacheOnly` and `noCache` are set, `cacheOnly` takes precedence (cache reads remain enabled) and a warning is emitted. |
305
351
 
306
352
  **Returns (`TraceFullResult`):**
307
353
 
@@ -499,6 +545,30 @@ async function analyzeChangedLines(file: string, lines: number[]) {
499
545
  }
500
546
  ```
501
547
 
548
+ ### Cache-Only Lookups
549
+
550
+ Use `cacheOnly` to instantly return previously cached PR data without any API calls or git operations beyond blame. Ideal for IDE integrations and repeated lookups where speed matters more than freshness.
551
+
552
+ ```typescript
553
+ import { trace } from '@lumy-pack/line-lore';
554
+
555
+ async function getCachedPR(filePath: string, lineNumber: number) {
556
+ const result = await trace({
557
+ file: filePath,
558
+ line: lineNumber,
559
+ cacheOnly: true, // return cached data only, never fetch
560
+ });
561
+
562
+ const pr = result.nodes.find(n => n.type === 'pull_request');
563
+ if (pr) {
564
+ return { number: pr.prNumber, url: pr.prUrl };
565
+ }
566
+
567
+ // Cache miss — no PR data available without fetching
568
+ return null;
569
+ }
570
+ ```
571
+
502
572
  ### Batch Processing with Cache Control
503
573
 
504
574
  ```typescript
@@ -566,6 +636,7 @@ import type {
566
636
  | `--end-line <num>` | Ending line for range |
567
637
  | `--deep` | Deep trace (squash merges) |
568
638
  | `--output <format>` | Output as json, llm, or human |
639
+ | `--cache-only` | Return cached results only (no API calls) |
569
640
  | `--quiet` | Suppress formatting |
570
641
  | `npx @lumy-pack/line-lore health` | Check system health |
571
642
  | `npx @lumy-pack/line-lore graph pr <num>` | Show issues linked to a PR |
package/dist/cli.mjs CHANGED
@@ -369,14 +369,22 @@ var init_executor = __esm({
369
369
  import { filter as filter4, isTruthy as isTruthy4 } from "@winglet/common-utils";
370
370
  async function findMergeCommit(commitSha, options) {
371
371
  const ref = options?.ref ?? "HEAD";
372
+ const budget = options?.timeout ?? DEFAULT_ANCESTRY_TIMEOUT;
373
+ const startTime = Date.now();
372
374
  const firstParentResult = await findMergeCommitWithArgs(
373
375
  commitSha,
374
376
  ref,
375
377
  ["--first-parent"],
376
- options
378
+ { ...options, timeout: budget }
377
379
  );
378
380
  if (firstParentResult) return firstParentResult;
379
- return findMergeCommitWithArgs(commitSha, ref, [], options);
381
+ const elapsed = Date.now() - startTime;
382
+ const remaining = budget - elapsed;
383
+ if (remaining <= 0) return null;
384
+ return findMergeCommitWithArgs(commitSha, ref, [], {
385
+ ...options,
386
+ timeout: remaining
387
+ });
380
388
  }
381
389
  async function findMergeCommitWithArgs(commitSha, ref, extraArgs, options) {
382
390
  try {
@@ -424,12 +432,16 @@ function extractPRFromMergeMessage(subject) {
424
432
  if (squashMatch) return parseInt(squashMatch[1], 10);
425
433
  const glMatch = /!(\d+)\s*$/.exec(subject);
426
434
  if (glMatch) return parseInt(glMatch[1], 10);
435
+ const adoMatch = /Merged PR (\d+):/.exec(subject);
436
+ if (adoMatch) return parseInt(adoMatch[1], 10);
427
437
  return null;
428
438
  }
439
+ var DEFAULT_ANCESTRY_TIMEOUT;
429
440
  var init_ancestry = __esm({
430
441
  "src/core/ancestry/ancestry.ts"() {
431
442
  "use strict";
432
443
  init_executor();
444
+ DEFAULT_ANCESTRY_TIMEOUT = 3e4;
433
445
  }
434
446
  });
435
447
 
@@ -549,10 +561,40 @@ function getCache2(repoId, noCache) {
549
561
  }
550
562
  return cache;
551
563
  }
552
- async function lookupPR(commitSha, adapter, options) {
553
- const cache = getCache2(options?.repoId, options?.noCache);
564
+ function toCachedPR(pr) {
565
+ return {
566
+ number: pr.number,
567
+ title: pr.title,
568
+ author: pr.author,
569
+ url: pr.url,
570
+ mergeCommit: pr.mergeCommit,
571
+ baseBranch: pr.baseBranch,
572
+ mergedAt: pr.mergedAt ? new Date(pr.mergedAt).getTime() : void 0
573
+ };
574
+ }
575
+ function fromCachedPR(cached) {
576
+ let mergedAt;
577
+ if (cached.mergedAt != null) {
578
+ mergedAt = typeof cached.mergedAt === "number" ? new Date(cached.mergedAt).toISOString() : String(cached.mergedAt);
579
+ }
580
+ return {
581
+ number: cached.number,
582
+ title: cached.title,
583
+ author: cached.author,
584
+ url: cached.url,
585
+ mergeCommit: cached.mergeCommit,
586
+ baseBranch: cached.baseBranch,
587
+ mergedAt
588
+ };
589
+ }
590
+ async function lookupPR(commitSha, adapter, options, _recursionDepth = 0) {
591
+ const cache = getCache2(
592
+ options?.repoId,
593
+ options?.cacheOnly ? false : options?.noCache
594
+ );
554
595
  const cached = await cache.get(commitSha);
555
- if (cached) return cached;
596
+ if (cached) return fromCachedPR(cached);
597
+ if (options?.cacheOnly) return null;
556
598
  let mergeBasedPR = null;
557
599
  const mergeResult = await findMergeCommit(commitSha, options);
558
600
  if (mergeResult) {
@@ -575,31 +617,38 @@ async function lookupPR(commitSha, adapter, options) {
575
617
  };
576
618
  }
577
619
  if (!options?.deep || mergeBasedPR.mergedAt) {
578
- await cache.set(commitSha, mergeBasedPR);
620
+ await cache.set(commitSha, toCachedPR(mergeBasedPR));
579
621
  return mergeBasedPR;
580
622
  }
581
623
  }
582
624
  }
583
625
  if (mergeBasedPR) {
584
- await cache.set(commitSha, mergeBasedPR);
626
+ await cache.set(commitSha, toCachedPR(mergeBasedPR));
585
627
  return mergeBasedPR;
586
628
  }
587
629
  if (adapter) {
588
630
  const prInfo = await adapter.getPRForCommit(commitSha);
589
631
  if (prInfo?.mergedAt) {
590
- await cache.set(commitSha, prInfo);
632
+ await cache.set(commitSha, toCachedPR(prInfo));
591
633
  return prInfo;
592
634
  }
593
635
  }
594
- const patchIdMatch = await findPatchIdMatch(commitSha, {
595
- ...options,
596
- scanDepth: options?.deep ? DEEP_SCAN_DEPTH : void 0
597
- });
598
- if (patchIdMatch) {
599
- const result = await lookupPR(patchIdMatch.matchedSha, adapter, options);
600
- if (result) {
601
- await cache.set(commitSha, result);
602
- return result;
636
+ if (!options?.skipPatchIdScan && _recursionDepth < MAX_RECURSION_DEPTH) {
637
+ const patchIdMatch = await findPatchIdMatch(commitSha, {
638
+ ...options,
639
+ scanDepth: options?.deep ? DEEP_SCAN_DEPTH : void 0
640
+ });
641
+ if (patchIdMatch) {
642
+ const result = await lookupPR(
643
+ patchIdMatch.matchedSha,
644
+ adapter,
645
+ options,
646
+ _recursionDepth + 1
647
+ );
648
+ if (result) {
649
+ await cache.set(commitSha, toCachedPR(result));
650
+ return result;
651
+ }
603
652
  }
604
653
  }
605
654
  return null;
@@ -607,7 +656,7 @@ async function lookupPR(commitSha, adapter, options) {
607
656
  function resetPRCache() {
608
657
  cacheRegistry2.clear();
609
658
  }
610
- var cacheRegistry2, DEEP_SCAN_DEPTH;
659
+ var cacheRegistry2, DEEP_SCAN_DEPTH, MAX_RECURSION_DEPTH;
611
660
  var init_pr_lookup = __esm({
612
661
  "src/core/pr-lookup/pr-lookup.ts"() {
613
662
  "use strict";
@@ -616,6 +665,7 @@ var init_pr_lookup = __esm({
616
665
  init_patch_id2();
617
666
  cacheRegistry2 = /* @__PURE__ */ new Map();
618
667
  DEEP_SCAN_DEPTH = 2e3;
668
+ MAX_RECURSION_DEPTH = 2;
619
669
  }
620
670
  });
621
671
 
@@ -637,7 +687,7 @@ var VERSION;
637
687
  var init_version = __esm({
638
688
  "src/version.ts"() {
639
689
  "use strict";
640
- VERSION = "0.0.5";
690
+ VERSION = "0.0.6";
641
691
  }
642
692
  });
643
693
 
@@ -962,6 +1012,27 @@ function isVersionAtLeast(version2, minVersion) {
962
1012
  }
963
1013
  return true;
964
1014
  }
1015
+ async function checkCloneStatus(options) {
1016
+ let partialClone = false;
1017
+ let shallow = false;
1018
+ try {
1019
+ const shallowResult = await gitExec(
1020
+ ["rev-parse", "--is-shallow-repository"],
1021
+ { cwd: options?.cwd }
1022
+ );
1023
+ shallow = shallowResult.stdout.trim() === "true";
1024
+ } catch {
1025
+ }
1026
+ try {
1027
+ const partialResult = await gitExec(
1028
+ ["config", "--get", "extensions.partialclone"],
1029
+ { cwd: options?.cwd }
1030
+ );
1031
+ partialClone = partialResult.stdout.trim().length > 0;
1032
+ } catch {
1033
+ }
1034
+ return { partialClone, shallow };
1035
+ }
965
1036
  async function checkGitHealth(options) {
966
1037
  const hints = [];
967
1038
  let gitVersion = "0.0.0";
@@ -988,7 +1059,18 @@ async function checkGitHealth(options) {
988
1059
  `Upgrade git to ${BLOOM_FILTER_MIN_VERSION.join(".")}+ for bloom filter support (current: ${gitVersion}).`
989
1060
  );
990
1061
  }
991
- return { commitGraph, bloomFilter, gitVersion, hints };
1062
+ const cloneStatus = await checkCloneStatus({ cwd: options?.cwd });
1063
+ if (cloneStatus.partialClone) {
1064
+ hints.push(
1065
+ "Partial clone detected. Patch-ID scan (Strategy 4) will be skipped to avoid blob downloads."
1066
+ );
1067
+ }
1068
+ if (cloneStatus.shallow) {
1069
+ hints.push(
1070
+ "Shallow repository detected. Ancestry-path results may be incomplete."
1071
+ );
1072
+ }
1073
+ return { commitGraph, bloomFilter, gitVersion, hints, ...cloneStatus };
992
1074
  }
993
1075
 
994
1076
  // src/git/remote.ts
@@ -2161,7 +2243,7 @@ async function runBlameAndAuth(adapter, options, execOptions) {
2161
2243
  }
2162
2244
  return { analyzed: blameResult.value, operatingLevel, warnings };
2163
2245
  }
2164
- async function processEntry(entry, featureFlags, adapter, options, execOptions, repoId) {
2246
+ async function processEntry(entry, featureFlags, adapter, options, execOptions, repoId, skipPatchIdScan) {
2165
2247
  const nodes = [];
2166
2248
  const commitNode = {
2167
2249
  type: entry.isCosmetic ? "cosmetic_commit" : "original_commit",
@@ -2192,8 +2274,10 @@ async function processEntry(entry, featureFlags, adapter, options, execOptions,
2192
2274
  const prInfo = await lookupPR(targetSha, adapter, {
2193
2275
  ...execOptions,
2194
2276
  noCache: options.noCache,
2277
+ cacheOnly: options.cacheOnly,
2195
2278
  deep: featureFlags.deepTrace,
2196
- repoId
2279
+ repoId,
2280
+ skipPatchIdScan
2197
2281
  });
2198
2282
  if (prInfo) {
2199
2283
  nodes.push({
@@ -2210,11 +2294,19 @@ async function processEntry(entry, featureFlags, adapter, options, execOptions,
2210
2294
  }
2211
2295
  return nodes;
2212
2296
  }
2213
- async function buildTraceNodes(analyzed, featureFlags, adapter, options, execOptions, repoId) {
2297
+ async function buildTraceNodes(analyzed, featureFlags, adapter, options, execOptions, repoId, skipPatchIdScan) {
2214
2298
  const results = await Promise.allSettled(
2215
2299
  map8(
2216
2300
  analyzed,
2217
- (entry) => processEntry(entry, featureFlags, adapter, options, execOptions, repoId)
2301
+ (entry) => processEntry(
2302
+ entry,
2303
+ featureFlags,
2304
+ adapter,
2305
+ options,
2306
+ execOptions,
2307
+ repoId,
2308
+ skipPatchIdScan
2309
+ )
2218
2310
  )
2219
2311
  );
2220
2312
  return results.flatMap((r) => r.status === "fulfilled" ? r.value : []);
@@ -2245,14 +2337,36 @@ async function trace(options) {
2245
2337
  );
2246
2338
  const operatingLevel = blameAuth.operatingLevel || platform.operatingLevel;
2247
2339
  const warnings = [...platform.warnings, ...blameAuth.warnings];
2340
+ if (options.cacheOnly && options.noCache) {
2341
+ warnings.push(
2342
+ "Both cacheOnly and noCache are set. cacheOnly takes precedence \u2014 cache reads are enabled."
2343
+ );
2344
+ }
2248
2345
  const featureFlags = computeFeatureFlags(operatingLevel, options);
2346
+ let cloneStatus = { partialClone: false, shallow: false };
2347
+ try {
2348
+ const result = await checkCloneStatus({ cwd: options.cwd });
2349
+ if (result) cloneStatus = result;
2350
+ } catch {
2351
+ }
2352
+ if (cloneStatus.partialClone) {
2353
+ warnings.push(
2354
+ "Partial clone detected. Patch-ID scan (Strategy 4) will be skipped to avoid blob downloads."
2355
+ );
2356
+ }
2357
+ if (cloneStatus.shallow) {
2358
+ warnings.push(
2359
+ "Shallow repository detected. Ancestry-path results may be incomplete."
2360
+ );
2361
+ }
2249
2362
  const nodes = await buildTraceNodes(
2250
2363
  blameAuth.analyzed,
2251
2364
  featureFlags,
2252
2365
  platform.adapter,
2253
2366
  options,
2254
2367
  execOptions,
2255
- repoId
2368
+ repoId,
2369
+ cloneStatus.partialClone || void 0
2256
2370
  );
2257
2371
  return { nodes, operatingLevel, featureFlags, warnings };
2258
2372
  }
@@ -2459,7 +2573,7 @@ function formatNodeHuman(node) {
2459
2573
  init_normalizer();
2460
2574
  init_errors();
2461
2575
  function registerTraceCommand(program2) {
2462
- program2.command("trace <file>").description("Trace a file line to its originating PR").requiredOption("-L, --line <range>", 'Line number or range (e.g., "42" or "10,50")').option("--deep", "Enable deep trace for squash PRs").option("--no-ast", "Disable AST diff analysis").option("--no-cache", "Disable cache").option("--json", "Output in JSON format").option("-q, --quiet", "Output PR number only").option("--output <format>", "Output format: human, json, llm", "human").option("--no-color", "Disable colored output").action(async (file, opts) => {
2576
+ program2.command("trace <file>").description("Trace a file line to its originating PR").requiredOption("-L, --line <range>", 'Line number or range (e.g., "42" or "10,50")').option("--deep", "Enable deep trace for squash PRs").option("--no-ast", "Disable AST diff analysis").option("--no-cache", "Disable cache").option("--cache-only", "Return cached results only (no API calls)").option("--json", "Output in JSON format").option("-q, --quiet", "Output PR number only").option("--output <format>", "Output format: human, json, llm", "human").option("--no-color", "Disable colored output").action(async (file, opts) => {
2463
2577
  const lineStr = opts.line;
2464
2578
  const parts = lineStr.split(",");
2465
2579
  const line = parseInt(parts[0], 10);
@@ -2470,7 +2584,8 @@ function registerTraceCommand(program2) {
2470
2584
  endLine,
2471
2585
  deep: opts.deep,
2472
2586
  noAst: opts.ast === false,
2473
- noCache: opts.cache === false
2587
+ noCache: opts.cache === false,
2588
+ cacheOnly: opts.cacheOnly
2474
2589
  };
2475
2590
  const cliOptions = {
2476
2591
  json: opts.json,
@@ -4,6 +4,7 @@ export interface AncestryResult {
4
4
  parentShas: string[];
5
5
  subject: string;
6
6
  }
7
+ export declare const DEFAULT_ANCESTRY_TIMEOUT = 30000;
7
8
  export declare function findMergeCommit(commitSha: string, options?: GitExecOptions & {
8
9
  ref?: string;
9
10
  }): Promise<AncestryResult | null>;
@@ -2,8 +2,14 @@ import type { RepoIdentity } from '../../cache/index.js';
2
2
  import type { GitExecOptions, PRInfo, PlatformAdapter } from '../../types/index.js';
3
3
  export interface PRLookupOptions extends GitExecOptions {
4
4
  noCache?: boolean;
5
+ /** Return cached results only — skip all fallback strategies */
6
+ cacheOnly?: boolean;
5
7
  deep?: boolean;
6
8
  repoId?: RepoIdentity;
9
+ /** Skip Strategy 4 (patch-id scan) — set automatically for partial clone environments */
10
+ skipPatchIdScan?: boolean;
7
11
  }
8
- export declare function lookupPR(commitSha: string, adapter: PlatformAdapter | null, options?: PRLookupOptions): Promise<PRInfo | null>;
12
+ export declare function lookupPR(commitSha: string, adapter: PlatformAdapter | null, options?: PRLookupOptions,
13
+ /** @internal recursion depth tracker — do not set from external callers */
14
+ _recursionDepth?: number): Promise<PRInfo | null>;
9
15
  export declare function resetPRCache(): void;
@@ -1,4 +1,7 @@
1
- import type { HealthReport } from '../types/index.js';
1
+ import type { CloneStatus, HealthReport } from '../types/index.js';
2
+ export declare function checkCloneStatus(options?: {
3
+ cwd?: string;
4
+ }): Promise<CloneStatus>;
2
5
  export declare function checkGitHealth(options?: {
3
6
  cwd?: string;
4
7
  }): Promise<HealthReport>;
@@ -1,3 +1,3 @@
1
1
  export { gitExec, gitPipe, shellExec } from './executor.js';
2
2
  export { detectPlatform, getRemoteInfo, parseRemoteUrl } from './remote.js';
3
- export { checkGitHealth } from './health.js';
3
+ export { checkCloneStatus, checkGitHealth } from './health.js';
package/dist/index.cjs CHANGED
@@ -383,14 +383,22 @@ var init_executor = __esm({
383
383
  // src/core/ancestry/ancestry.ts
384
384
  async function findMergeCommit(commitSha, options) {
385
385
  const ref = options?.ref ?? "HEAD";
386
+ const budget = options?.timeout ?? DEFAULT_ANCESTRY_TIMEOUT;
387
+ const startTime = Date.now();
386
388
  const firstParentResult = await findMergeCommitWithArgs(
387
389
  commitSha,
388
390
  ref,
389
391
  ["--first-parent"],
390
- options
392
+ { ...options, timeout: budget }
391
393
  );
392
394
  if (firstParentResult) return firstParentResult;
393
- return findMergeCommitWithArgs(commitSha, ref, [], options);
395
+ const elapsed = Date.now() - startTime;
396
+ const remaining = budget - elapsed;
397
+ if (remaining <= 0) return null;
398
+ return findMergeCommitWithArgs(commitSha, ref, [], {
399
+ ...options,
400
+ timeout: remaining
401
+ });
394
402
  }
395
403
  async function findMergeCommitWithArgs(commitSha, ref, extraArgs, options) {
396
404
  try {
@@ -438,15 +446,18 @@ function extractPRFromMergeMessage(subject) {
438
446
  if (squashMatch) return parseInt(squashMatch[1], 10);
439
447
  const glMatch = /!(\d+)\s*$/.exec(subject);
440
448
  if (glMatch) return parseInt(glMatch[1], 10);
449
+ const adoMatch = /Merged PR (\d+):/.exec(subject);
450
+ if (adoMatch) return parseInt(adoMatch[1], 10);
441
451
  return null;
442
452
  }
443
- var import_common_utils9;
453
+ var import_common_utils9, DEFAULT_ANCESTRY_TIMEOUT;
444
454
  var init_ancestry = __esm({
445
455
  "src/core/ancestry/ancestry.ts"() {
446
456
  "use strict";
447
457
  init_cjs_shims();
448
458
  import_common_utils9 = require("@winglet/common-utils");
449
459
  init_executor();
460
+ DEFAULT_ANCESTRY_TIMEOUT = 3e4;
450
461
  }
451
462
  });
452
463
 
@@ -569,10 +580,40 @@ function getCache2(repoId, noCache) {
569
580
  }
570
581
  return cache;
571
582
  }
572
- async function lookupPR(commitSha, adapter, options) {
573
- const cache = getCache2(options?.repoId, options?.noCache);
583
+ function toCachedPR(pr) {
584
+ return {
585
+ number: pr.number,
586
+ title: pr.title,
587
+ author: pr.author,
588
+ url: pr.url,
589
+ mergeCommit: pr.mergeCommit,
590
+ baseBranch: pr.baseBranch,
591
+ mergedAt: pr.mergedAt ? new Date(pr.mergedAt).getTime() : void 0
592
+ };
593
+ }
594
+ function fromCachedPR(cached) {
595
+ let mergedAt;
596
+ if (cached.mergedAt != null) {
597
+ mergedAt = typeof cached.mergedAt === "number" ? new Date(cached.mergedAt).toISOString() : String(cached.mergedAt);
598
+ }
599
+ return {
600
+ number: cached.number,
601
+ title: cached.title,
602
+ author: cached.author,
603
+ url: cached.url,
604
+ mergeCommit: cached.mergeCommit,
605
+ baseBranch: cached.baseBranch,
606
+ mergedAt
607
+ };
608
+ }
609
+ async function lookupPR(commitSha, adapter, options, _recursionDepth = 0) {
610
+ const cache = getCache2(
611
+ options?.repoId,
612
+ options?.cacheOnly ? false : options?.noCache
613
+ );
574
614
  const cached = await cache.get(commitSha);
575
- if (cached) return cached;
615
+ if (cached) return fromCachedPR(cached);
616
+ if (options?.cacheOnly) return null;
576
617
  let mergeBasedPR = null;
577
618
  const mergeResult = await findMergeCommit(commitSha, options);
578
619
  if (mergeResult) {
@@ -595,31 +636,38 @@ async function lookupPR(commitSha, adapter, options) {
595
636
  };
596
637
  }
597
638
  if (!options?.deep || mergeBasedPR.mergedAt) {
598
- await cache.set(commitSha, mergeBasedPR);
639
+ await cache.set(commitSha, toCachedPR(mergeBasedPR));
599
640
  return mergeBasedPR;
600
641
  }
601
642
  }
602
643
  }
603
644
  if (mergeBasedPR) {
604
- await cache.set(commitSha, mergeBasedPR);
645
+ await cache.set(commitSha, toCachedPR(mergeBasedPR));
605
646
  return mergeBasedPR;
606
647
  }
607
648
  if (adapter) {
608
649
  const prInfo = await adapter.getPRForCommit(commitSha);
609
650
  if (prInfo?.mergedAt) {
610
- await cache.set(commitSha, prInfo);
651
+ await cache.set(commitSha, toCachedPR(prInfo));
611
652
  return prInfo;
612
653
  }
613
654
  }
614
- const patchIdMatch = await findPatchIdMatch(commitSha, {
615
- ...options,
616
- scanDepth: options?.deep ? DEEP_SCAN_DEPTH : void 0
617
- });
618
- if (patchIdMatch) {
619
- const result = await lookupPR(patchIdMatch.matchedSha, adapter, options);
620
- if (result) {
621
- await cache.set(commitSha, result);
622
- return result;
655
+ if (!options?.skipPatchIdScan && _recursionDepth < MAX_RECURSION_DEPTH) {
656
+ const patchIdMatch = await findPatchIdMatch(commitSha, {
657
+ ...options,
658
+ scanDepth: options?.deep ? DEEP_SCAN_DEPTH : void 0
659
+ });
660
+ if (patchIdMatch) {
661
+ const result = await lookupPR(
662
+ patchIdMatch.matchedSha,
663
+ adapter,
664
+ options,
665
+ _recursionDepth + 1
666
+ );
667
+ if (result) {
668
+ await cache.set(commitSha, toCachedPR(result));
669
+ return result;
670
+ }
623
671
  }
624
672
  }
625
673
  return null;
@@ -627,7 +675,7 @@ async function lookupPR(commitSha, adapter, options) {
627
675
  function resetPRCache() {
628
676
  cacheRegistry2.clear();
629
677
  }
630
- var cacheRegistry2, DEEP_SCAN_DEPTH;
678
+ var cacheRegistry2, DEEP_SCAN_DEPTH, MAX_RECURSION_DEPTH;
631
679
  var init_pr_lookup = __esm({
632
680
  "src/core/pr-lookup/pr-lookup.ts"() {
633
681
  "use strict";
@@ -637,6 +685,7 @@ var init_pr_lookup = __esm({
637
685
  init_patch_id2();
638
686
  cacheRegistry2 = /* @__PURE__ */ new Map();
639
687
  DEEP_SCAN_DEPTH = 2e3;
688
+ MAX_RECURSION_DEPTH = 2;
640
689
  }
641
690
  });
642
691
 
@@ -905,6 +954,27 @@ function isVersionAtLeast(version, minVersion) {
905
954
  }
906
955
  return true;
907
956
  }
957
+ async function checkCloneStatus(options) {
958
+ let partialClone = false;
959
+ let shallow = false;
960
+ try {
961
+ const shallowResult = await gitExec(
962
+ ["rev-parse", "--is-shallow-repository"],
963
+ { cwd: options?.cwd }
964
+ );
965
+ shallow = shallowResult.stdout.trim() === "true";
966
+ } catch {
967
+ }
968
+ try {
969
+ const partialResult = await gitExec(
970
+ ["config", "--get", "extensions.partialclone"],
971
+ { cwd: options?.cwd }
972
+ );
973
+ partialClone = partialResult.stdout.trim().length > 0;
974
+ } catch {
975
+ }
976
+ return { partialClone, shallow };
977
+ }
908
978
  async function checkGitHealth(options) {
909
979
  const hints = [];
910
980
  let gitVersion = "0.0.0";
@@ -931,7 +1001,18 @@ async function checkGitHealth(options) {
931
1001
  `Upgrade git to ${BLOOM_FILTER_MIN_VERSION.join(".")}+ for bloom filter support (current: ${gitVersion}).`
932
1002
  );
933
1003
  }
934
- return { commitGraph, bloomFilter, gitVersion, hints };
1004
+ const cloneStatus = await checkCloneStatus({ cwd: options?.cwd });
1005
+ if (cloneStatus.partialClone) {
1006
+ hints.push(
1007
+ "Partial clone detected. Patch-ID scan (Strategy 4) will be skipped to avoid blob downloads."
1008
+ );
1009
+ }
1010
+ if (cloneStatus.shallow) {
1011
+ hints.push(
1012
+ "Shallow repository detected. Ancestry-path results may be incomplete."
1013
+ );
1014
+ }
1015
+ return { commitGraph, bloomFilter, gitVersion, hints, ...cloneStatus };
935
1016
  }
936
1017
 
937
1018
  // src/platform/index.ts
@@ -2155,7 +2236,7 @@ async function runBlameAndAuth(adapter, options, execOptions) {
2155
2236
  }
2156
2237
  return { analyzed: blameResult.value, operatingLevel, warnings };
2157
2238
  }
2158
- async function processEntry(entry, featureFlags, adapter, options, execOptions, repoId) {
2239
+ async function processEntry(entry, featureFlags, adapter, options, execOptions, repoId, skipPatchIdScan) {
2159
2240
  const nodes = [];
2160
2241
  const commitNode = {
2161
2242
  type: entry.isCosmetic ? "cosmetic_commit" : "original_commit",
@@ -2186,8 +2267,10 @@ async function processEntry(entry, featureFlags, adapter, options, execOptions,
2186
2267
  const prInfo = await lookupPR(targetSha, adapter, {
2187
2268
  ...execOptions,
2188
2269
  noCache: options.noCache,
2270
+ cacheOnly: options.cacheOnly,
2189
2271
  deep: featureFlags.deepTrace,
2190
- repoId
2272
+ repoId,
2273
+ skipPatchIdScan
2191
2274
  });
2192
2275
  if (prInfo) {
2193
2276
  nodes.push({
@@ -2204,11 +2287,19 @@ async function processEntry(entry, featureFlags, adapter, options, execOptions,
2204
2287
  }
2205
2288
  return nodes;
2206
2289
  }
2207
- async function buildTraceNodes(analyzed, featureFlags, adapter, options, execOptions, repoId) {
2290
+ async function buildTraceNodes(analyzed, featureFlags, adapter, options, execOptions, repoId, skipPatchIdScan) {
2208
2291
  const results = await Promise.allSettled(
2209
2292
  (0, import_common_utils11.map)(
2210
2293
  analyzed,
2211
- (entry) => processEntry(entry, featureFlags, adapter, options, execOptions, repoId)
2294
+ (entry) => processEntry(
2295
+ entry,
2296
+ featureFlags,
2297
+ adapter,
2298
+ options,
2299
+ execOptions,
2300
+ repoId,
2301
+ skipPatchIdScan
2302
+ )
2212
2303
  )
2213
2304
  );
2214
2305
  return results.flatMap((r) => r.status === "fulfilled" ? r.value : []);
@@ -2239,14 +2330,36 @@ async function trace(options) {
2239
2330
  );
2240
2331
  const operatingLevel = blameAuth.operatingLevel || platform.operatingLevel;
2241
2332
  const warnings = [...platform.warnings, ...blameAuth.warnings];
2333
+ if (options.cacheOnly && options.noCache) {
2334
+ warnings.push(
2335
+ "Both cacheOnly and noCache are set. cacheOnly takes precedence \u2014 cache reads are enabled."
2336
+ );
2337
+ }
2242
2338
  const featureFlags = computeFeatureFlags(operatingLevel, options);
2339
+ let cloneStatus = { partialClone: false, shallow: false };
2340
+ try {
2341
+ const result = await checkCloneStatus({ cwd: options.cwd });
2342
+ if (result) cloneStatus = result;
2343
+ } catch {
2344
+ }
2345
+ if (cloneStatus.partialClone) {
2346
+ warnings.push(
2347
+ "Partial clone detected. Patch-ID scan (Strategy 4) will be skipped to avoid blob downloads."
2348
+ );
2349
+ }
2350
+ if (cloneStatus.shallow) {
2351
+ warnings.push(
2352
+ "Shallow repository detected. Ancestry-path results may be incomplete."
2353
+ );
2354
+ }
2243
2355
  const nodes = await buildTraceNodes(
2244
2356
  blameAuth.analyzed,
2245
2357
  featureFlags,
2246
2358
  platform.adapter,
2247
2359
  options,
2248
2360
  execOptions,
2249
- repoId
2361
+ repoId,
2362
+ cloneStatus.partialClone || void 0
2250
2363
  );
2251
2364
  return { nodes, operatingLevel, featureFlags, warnings };
2252
2365
  }
package/dist/index.mjs CHANGED
@@ -370,14 +370,22 @@ var init_executor = __esm({
370
370
  import { filter as filter4, isTruthy as isTruthy4 } from "@winglet/common-utils";
371
371
  async function findMergeCommit(commitSha, options) {
372
372
  const ref = options?.ref ?? "HEAD";
373
+ const budget = options?.timeout ?? DEFAULT_ANCESTRY_TIMEOUT;
374
+ const startTime = Date.now();
373
375
  const firstParentResult = await findMergeCommitWithArgs(
374
376
  commitSha,
375
377
  ref,
376
378
  ["--first-parent"],
377
- options
379
+ { ...options, timeout: budget }
378
380
  );
379
381
  if (firstParentResult) return firstParentResult;
380
- return findMergeCommitWithArgs(commitSha, ref, [], options);
382
+ const elapsed = Date.now() - startTime;
383
+ const remaining = budget - elapsed;
384
+ if (remaining <= 0) return null;
385
+ return findMergeCommitWithArgs(commitSha, ref, [], {
386
+ ...options,
387
+ timeout: remaining
388
+ });
381
389
  }
382
390
  async function findMergeCommitWithArgs(commitSha, ref, extraArgs, options) {
383
391
  try {
@@ -425,13 +433,17 @@ function extractPRFromMergeMessage(subject) {
425
433
  if (squashMatch) return parseInt(squashMatch[1], 10);
426
434
  const glMatch = /!(\d+)\s*$/.exec(subject);
427
435
  if (glMatch) return parseInt(glMatch[1], 10);
436
+ const adoMatch = /Merged PR (\d+):/.exec(subject);
437
+ if (adoMatch) return parseInt(adoMatch[1], 10);
428
438
  return null;
429
439
  }
440
+ var DEFAULT_ANCESTRY_TIMEOUT;
430
441
  var init_ancestry = __esm({
431
442
  "src/core/ancestry/ancestry.ts"() {
432
443
  "use strict";
433
444
  init_esm_shims();
434
445
  init_executor();
446
+ DEFAULT_ANCESTRY_TIMEOUT = 3e4;
435
447
  }
436
448
  });
437
449
 
@@ -554,10 +566,40 @@ function getCache2(repoId, noCache) {
554
566
  }
555
567
  return cache;
556
568
  }
557
- async function lookupPR(commitSha, adapter, options) {
558
- const cache = getCache2(options?.repoId, options?.noCache);
569
+ function toCachedPR(pr) {
570
+ return {
571
+ number: pr.number,
572
+ title: pr.title,
573
+ author: pr.author,
574
+ url: pr.url,
575
+ mergeCommit: pr.mergeCommit,
576
+ baseBranch: pr.baseBranch,
577
+ mergedAt: pr.mergedAt ? new Date(pr.mergedAt).getTime() : void 0
578
+ };
579
+ }
580
+ function fromCachedPR(cached) {
581
+ let mergedAt;
582
+ if (cached.mergedAt != null) {
583
+ mergedAt = typeof cached.mergedAt === "number" ? new Date(cached.mergedAt).toISOString() : String(cached.mergedAt);
584
+ }
585
+ return {
586
+ number: cached.number,
587
+ title: cached.title,
588
+ author: cached.author,
589
+ url: cached.url,
590
+ mergeCommit: cached.mergeCommit,
591
+ baseBranch: cached.baseBranch,
592
+ mergedAt
593
+ };
594
+ }
595
+ async function lookupPR(commitSha, adapter, options, _recursionDepth = 0) {
596
+ const cache = getCache2(
597
+ options?.repoId,
598
+ options?.cacheOnly ? false : options?.noCache
599
+ );
559
600
  const cached = await cache.get(commitSha);
560
- if (cached) return cached;
601
+ if (cached) return fromCachedPR(cached);
602
+ if (options?.cacheOnly) return null;
561
603
  let mergeBasedPR = null;
562
604
  const mergeResult = await findMergeCommit(commitSha, options);
563
605
  if (mergeResult) {
@@ -580,31 +622,38 @@ async function lookupPR(commitSha, adapter, options) {
580
622
  };
581
623
  }
582
624
  if (!options?.deep || mergeBasedPR.mergedAt) {
583
- await cache.set(commitSha, mergeBasedPR);
625
+ await cache.set(commitSha, toCachedPR(mergeBasedPR));
584
626
  return mergeBasedPR;
585
627
  }
586
628
  }
587
629
  }
588
630
  if (mergeBasedPR) {
589
- await cache.set(commitSha, mergeBasedPR);
631
+ await cache.set(commitSha, toCachedPR(mergeBasedPR));
590
632
  return mergeBasedPR;
591
633
  }
592
634
  if (adapter) {
593
635
  const prInfo = await adapter.getPRForCommit(commitSha);
594
636
  if (prInfo?.mergedAt) {
595
- await cache.set(commitSha, prInfo);
637
+ await cache.set(commitSha, toCachedPR(prInfo));
596
638
  return prInfo;
597
639
  }
598
640
  }
599
- const patchIdMatch = await findPatchIdMatch(commitSha, {
600
- ...options,
601
- scanDepth: options?.deep ? DEEP_SCAN_DEPTH : void 0
602
- });
603
- if (patchIdMatch) {
604
- const result = await lookupPR(patchIdMatch.matchedSha, adapter, options);
605
- if (result) {
606
- await cache.set(commitSha, result);
607
- return result;
641
+ if (!options?.skipPatchIdScan && _recursionDepth < MAX_RECURSION_DEPTH) {
642
+ const patchIdMatch = await findPatchIdMatch(commitSha, {
643
+ ...options,
644
+ scanDepth: options?.deep ? DEEP_SCAN_DEPTH : void 0
645
+ });
646
+ if (patchIdMatch) {
647
+ const result = await lookupPR(
648
+ patchIdMatch.matchedSha,
649
+ adapter,
650
+ options,
651
+ _recursionDepth + 1
652
+ );
653
+ if (result) {
654
+ await cache.set(commitSha, toCachedPR(result));
655
+ return result;
656
+ }
608
657
  }
609
658
  }
610
659
  return null;
@@ -612,7 +661,7 @@ async function lookupPR(commitSha, adapter, options) {
612
661
  function resetPRCache() {
613
662
  cacheRegistry2.clear();
614
663
  }
615
- var cacheRegistry2, DEEP_SCAN_DEPTH;
664
+ var cacheRegistry2, DEEP_SCAN_DEPTH, MAX_RECURSION_DEPTH;
616
665
  var init_pr_lookup = __esm({
617
666
  "src/core/pr-lookup/pr-lookup.ts"() {
618
667
  "use strict";
@@ -622,6 +671,7 @@ var init_pr_lookup = __esm({
622
671
  init_patch_id2();
623
672
  cacheRegistry2 = /* @__PURE__ */ new Map();
624
673
  DEEP_SCAN_DEPTH = 2e3;
674
+ MAX_RECURSION_DEPTH = 2;
625
675
  }
626
676
  });
627
677
 
@@ -879,6 +929,27 @@ function isVersionAtLeast(version, minVersion) {
879
929
  }
880
930
  return true;
881
931
  }
932
+ async function checkCloneStatus(options) {
933
+ let partialClone = false;
934
+ let shallow = false;
935
+ try {
936
+ const shallowResult = await gitExec(
937
+ ["rev-parse", "--is-shallow-repository"],
938
+ { cwd: options?.cwd }
939
+ );
940
+ shallow = shallowResult.stdout.trim() === "true";
941
+ } catch {
942
+ }
943
+ try {
944
+ const partialResult = await gitExec(
945
+ ["config", "--get", "extensions.partialclone"],
946
+ { cwd: options?.cwd }
947
+ );
948
+ partialClone = partialResult.stdout.trim().length > 0;
949
+ } catch {
950
+ }
951
+ return { partialClone, shallow };
952
+ }
882
953
  async function checkGitHealth(options) {
883
954
  const hints = [];
884
955
  let gitVersion = "0.0.0";
@@ -905,7 +976,18 @@ async function checkGitHealth(options) {
905
976
  `Upgrade git to ${BLOOM_FILTER_MIN_VERSION.join(".")}+ for bloom filter support (current: ${gitVersion}).`
906
977
  );
907
978
  }
908
- return { commitGraph, bloomFilter, gitVersion, hints };
979
+ const cloneStatus = await checkCloneStatus({ cwd: options?.cwd });
980
+ if (cloneStatus.partialClone) {
981
+ hints.push(
982
+ "Partial clone detected. Patch-ID scan (Strategy 4) will be skipped to avoid blob downloads."
983
+ );
984
+ }
985
+ if (cloneStatus.shallow) {
986
+ hints.push(
987
+ "Shallow repository detected. Ancestry-path results may be incomplete."
988
+ );
989
+ }
990
+ return { commitGraph, bloomFilter, gitVersion, hints, ...cloneStatus };
909
991
  }
910
992
 
911
993
  // src/platform/index.ts
@@ -2129,7 +2211,7 @@ async function runBlameAndAuth(adapter, options, execOptions) {
2129
2211
  }
2130
2212
  return { analyzed: blameResult.value, operatingLevel, warnings };
2131
2213
  }
2132
- async function processEntry(entry, featureFlags, adapter, options, execOptions, repoId) {
2214
+ async function processEntry(entry, featureFlags, adapter, options, execOptions, repoId, skipPatchIdScan) {
2133
2215
  const nodes = [];
2134
2216
  const commitNode = {
2135
2217
  type: entry.isCosmetic ? "cosmetic_commit" : "original_commit",
@@ -2160,8 +2242,10 @@ async function processEntry(entry, featureFlags, adapter, options, execOptions,
2160
2242
  const prInfo = await lookupPR(targetSha, adapter, {
2161
2243
  ...execOptions,
2162
2244
  noCache: options.noCache,
2245
+ cacheOnly: options.cacheOnly,
2163
2246
  deep: featureFlags.deepTrace,
2164
- repoId
2247
+ repoId,
2248
+ skipPatchIdScan
2165
2249
  });
2166
2250
  if (prInfo) {
2167
2251
  nodes.push({
@@ -2178,11 +2262,19 @@ async function processEntry(entry, featureFlags, adapter, options, execOptions,
2178
2262
  }
2179
2263
  return nodes;
2180
2264
  }
2181
- async function buildTraceNodes(analyzed, featureFlags, adapter, options, execOptions, repoId) {
2265
+ async function buildTraceNodes(analyzed, featureFlags, adapter, options, execOptions, repoId, skipPatchIdScan) {
2182
2266
  const results = await Promise.allSettled(
2183
2267
  map8(
2184
2268
  analyzed,
2185
- (entry) => processEntry(entry, featureFlags, adapter, options, execOptions, repoId)
2269
+ (entry) => processEntry(
2270
+ entry,
2271
+ featureFlags,
2272
+ adapter,
2273
+ options,
2274
+ execOptions,
2275
+ repoId,
2276
+ skipPatchIdScan
2277
+ )
2186
2278
  )
2187
2279
  );
2188
2280
  return results.flatMap((r) => r.status === "fulfilled" ? r.value : []);
@@ -2213,14 +2305,36 @@ async function trace(options) {
2213
2305
  );
2214
2306
  const operatingLevel = blameAuth.operatingLevel || platform.operatingLevel;
2215
2307
  const warnings = [...platform.warnings, ...blameAuth.warnings];
2308
+ if (options.cacheOnly && options.noCache) {
2309
+ warnings.push(
2310
+ "Both cacheOnly and noCache are set. cacheOnly takes precedence \u2014 cache reads are enabled."
2311
+ );
2312
+ }
2216
2313
  const featureFlags = computeFeatureFlags(operatingLevel, options);
2314
+ let cloneStatus = { partialClone: false, shallow: false };
2315
+ try {
2316
+ const result = await checkCloneStatus({ cwd: options.cwd });
2317
+ if (result) cloneStatus = result;
2318
+ } catch {
2319
+ }
2320
+ if (cloneStatus.partialClone) {
2321
+ warnings.push(
2322
+ "Partial clone detected. Patch-ID scan (Strategy 4) will be skipped to avoid blob downloads."
2323
+ );
2324
+ }
2325
+ if (cloneStatus.shallow) {
2326
+ warnings.push(
2327
+ "Shallow repository detected. Ancestry-path results may be incomplete."
2328
+ );
2329
+ }
2217
2330
  const nodes = await buildTraceNodes(
2218
2331
  blameAuth.analyzed,
2219
2332
  featureFlags,
2220
2333
  platform.adapter,
2221
2334
  options,
2222
2335
  execOptions,
2223
- repoId
2336
+ repoId,
2337
+ cloneStatus.partialClone || void 0
2224
2338
  );
2225
2339
  return { nodes, operatingLevel, featureFlags, warnings };
2226
2340
  }
@@ -3,3 +3,14 @@ export interface CacheEntry<T> {
3
3
  value: T;
4
4
  createdAt: number;
5
5
  }
6
+ /** Disk-serialized PRInfo — date fields stored as numeric timestamps (ms) */
7
+ export interface CachedPRInfo {
8
+ number: number;
9
+ title: string;
10
+ author: string;
11
+ url: string;
12
+ mergeCommit: string;
13
+ baseBranch: string;
14
+ /** Unix timestamp in milliseconds, NOT ISO 8601 string */
15
+ mergedAt?: number;
16
+ }
@@ -15,9 +15,15 @@ export interface RemoteInfo {
15
15
  host: string;
16
16
  platform: PlatformType | 'unknown';
17
17
  }
18
+ export interface CloneStatus {
19
+ partialClone: boolean;
20
+ shallow: boolean;
21
+ }
18
22
  export interface HealthReport {
19
23
  commitGraph: boolean;
20
24
  bloomFilter: boolean;
21
25
  gitVersion: string;
22
26
  hints: string[];
27
+ partialClone: boolean;
28
+ shallow: boolean;
23
29
  }
@@ -1,7 +1,7 @@
1
1
  export type { SymbolKind, SymbolInfo, ContentHash, ChangeType, ComparisonResult, AstTraceResult, } from './ast.js';
2
2
  export type { BlameResult, CommitInfo } from './blame.js';
3
- export type { CacheEntry } from './cache.js';
4
- export type { GitExecResult, GitExecOptions, RemoteInfo, HealthReport, } from './git.js';
3
+ export type { CacheEntry, CachedPRInfo } from './cache.js';
4
+ export type { GitExecResult, GitExecOptions, RemoteInfo, HealthReport, CloneStatus, } from './git.js';
5
5
  export type { GraphOptions, GraphResult } from './graph.js';
6
6
  export type { NormalizedResponse } from './output.js';
7
7
  export type { TraceNodeType, TrackingMethod, Confidence, TraceNode, OperatingLevel, FeatureFlags, } from './pipeline.js';
@@ -29,4 +29,6 @@ export interface TraceOptions {
29
29
  noAst?: boolean;
30
30
  /** Disable cache for this invocation */
31
31
  noCache?: boolean;
32
+ /** Return cached results only — skip API calls, ancestry traversal, and patch-id scan */
33
+ cacheOnly?: boolean;
32
34
  }
package/dist/version.d.ts CHANGED
@@ -2,4 +2,4 @@
2
2
  * Current package version from package.json
3
3
  * Automatically synchronized during build process
4
4
  */
5
- export declare const VERSION = "0.0.5";
5
+ export declare const VERSION = "0.0.6";
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@lumy-pack/line-lore",
3
- "version": "0.0.5",
3
+ "version": "0.0.6",
4
4
  "description": "CLI tool for tracing code lines to their originating Pull Requests via git blame",
5
5
  "keywords": [
6
6
  "cli",