@lumy-pack/line-lore 0.0.7 → 0.0.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +44 -20
- package/dist/cli.mjs +120 -64
- package/dist/core/ancestry/ancestry.d.ts +18 -0
- package/dist/core/ancestry/index.d.ts +1 -1
- package/dist/core/blame/blame.d.ts +2 -2
- package/dist/core/index.d.ts +1 -1
- package/dist/index.cjs +106 -61
- package/dist/index.d.ts +1 -1
- package/dist/index.mjs +106 -61
- package/dist/types/blame.d.ts +2 -0
- package/dist/types/git.d.ts +5 -0
- package/dist/types/index.d.ts +2 -2
- package/dist/types/trace.d.ts +3 -0
- package/dist/version.d.ts +1 -1
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -25,9 +25,12 @@ yarn add @lumy-pack/line-lore
|
|
|
25
25
|
### CLI Usage
|
|
26
26
|
|
|
27
27
|
```bash
|
|
28
|
-
# Trace a single line to its PR
|
|
28
|
+
# Trace a single line to its originating PR
|
|
29
29
|
npx @lumy-pack/line-lore trace src/auth.ts -L 42
|
|
30
30
|
|
|
31
|
+
# Trace the last meaningful change to a line
|
|
32
|
+
npx @lumy-pack/line-lore trace src/auth.ts -L 42 --mode change
|
|
33
|
+
|
|
31
34
|
# Trace a line range
|
|
32
35
|
npx @lumy-pack/line-lore trace src/config.ts -L 10,50
|
|
33
36
|
|
|
@@ -61,14 +64,21 @@ npx @lumy-pack/line-lore trace src/auth.ts -L 42 --quiet
|
|
|
61
64
|
```typescript
|
|
62
65
|
import { trace, graph, health, clearCache } from '@lumy-pack/line-lore';
|
|
63
66
|
|
|
64
|
-
// Trace a line to its PR
|
|
65
|
-
const
|
|
67
|
+
// Trace a line to its originating PR (default mode)
|
|
68
|
+
const originResult = await trace({
|
|
69
|
+
file: 'src/auth.ts',
|
|
70
|
+
line: 42,
|
|
71
|
+
});
|
|
72
|
+
|
|
73
|
+
// Trace the last meaningful change to a line
|
|
74
|
+
const changeResult = await trace({
|
|
66
75
|
file: 'src/auth.ts',
|
|
67
76
|
line: 42,
|
|
77
|
+
mode: 'change',
|
|
68
78
|
});
|
|
69
79
|
|
|
70
80
|
// Find the PR node
|
|
71
|
-
const prNode =
|
|
81
|
+
const prNode = originResult.nodes.find(n => n.type === 'pull_request');
|
|
72
82
|
if (prNode) {
|
|
73
83
|
console.log(`PR #${prNode.prNumber}: ${prNode.prTitle}`);
|
|
74
84
|
}
|
|
@@ -89,9 +99,11 @@ console.log(`Git version: ${report.gitVersion}`);
|
|
|
89
99
|
|
|
90
100
|
## How It Works
|
|
91
101
|
|
|
92
|
-
@lumy-pack/line-lore executes a
|
|
102
|
+
@lumy-pack/line-lore executes a deterministic pipeline with two trace modes:
|
|
93
103
|
|
|
94
|
-
1. **Line → Commit (Blame)**:
|
|
104
|
+
1. **Line → Commit (Blame)**:
|
|
105
|
+
- `origin`: `git blame -w -C -C -M` — follows copy/move history across renames; whitespace-only changes ignored
|
|
106
|
+
- `change`: `git blame -w` — finds the last meaningful local change; ignores whitespace but does **not** track renames/copies, so a rename commit itself is attributed as the change
|
|
95
107
|
2. **Cosmetic Detection**: AST structural comparison to skip formatting-only changes
|
|
96
108
|
3. **Commit → Merge Commit**: Ancestry-path traversal + patch-id matching to resolve merge commits
|
|
97
109
|
4. **Merge Commit → PR**: Commit message parsing + platform API lookup (filters unmerged PRs)
|
|
@@ -111,19 +123,21 @@ Strategy 1 — Cache ─────────────────── c
|
|
|
111
123
|
│ hit? → return cached PRInfo
|
|
112
124
|
│ miss + --cache-only? → return null (skip all fallbacks)
|
|
113
125
|
▼
|
|
114
|
-
Strategy 2 —
|
|
115
|
-
│
|
|
116
|
-
│
|
|
126
|
+
Strategy 2 — Platform API ──────────── cost: 1 HTTP request
|
|
127
|
+
│ gh api repos/{owner}/{repo}/commits/{sha}/pulls
|
|
128
|
+
│ Filter: merged PRs only (mergedAt != null)
|
|
129
|
+
│ found? → return PRInfo
|
|
130
|
+
▼
|
|
131
|
+
Strategy 3 — Ancestry-path + Message ─ cost: 1-2 git-log traversals
|
|
132
|
+
│ Search verified merge candidates:
|
|
133
|
+
│ • first-parent ancestry path first
|
|
134
|
+
│ • full ancestry path second
|
|
117
135
|
│ Parse merge subject with 3 regex patterns:
|
|
118
136
|
│ • /Merge pull request #(\d+)/ — GitHub merge commit
|
|
119
137
|
│ • /\(#(\d+)\)\s*$/ — Squash merge convention
|
|
120
|
-
│ •
|
|
121
|
-
│ If
|
|
122
|
-
│
|
|
123
|
-
▼
|
|
124
|
-
Strategy 3 — Platform API ──────────── cost: 1 HTTP request
|
|
125
|
-
│ gh api repos/{owner}/{repo}/commits/{sha}/pulls
|
|
126
|
-
│ Filter: merged PRs only (mergedAt != null)
|
|
138
|
+
│ • /See merge request ...!(\d+)$/ — GitLab merge commit
|
|
139
|
+
│ If message has no PR number and API is available:
|
|
140
|
+
│ query the merge commit SHA directly
|
|
127
141
|
│ found? → return PRInfo
|
|
128
142
|
▼
|
|
129
143
|
Strategy 4 — Patch-ID matching ─────── cost: streaming 500+ commits
|
|
@@ -136,7 +150,7 @@ Strategy 4 — Patch-ID matching ─────── cost: streaming 500+ comm
|
|
|
136
150
|
All failed → null
|
|
137
151
|
```
|
|
138
152
|
|
|
139
|
-
**Why this order?**
|
|
153
|
+
**Why this order?** Direct API lookup is the strongest Level 2 signal, so it runs before local ancestry heuristics. Verified ancestry is still cheaper than patch-id scanning and resolves most merge-based workflows without diff streaming.
|
|
140
154
|
|
|
141
155
|
**Patch-ID explained**: `git patch-id --stable` generates a content-based hash from a commit's diff, ignoring all metadata (author, date, message). When a commit is rebased, its SHA changes but the patch-id stays the same — enabling deterministic matching of rebased commits.
|
|
142
156
|
|
|
@@ -170,7 +184,7 @@ interface TraceNode {
|
|
|
170
184
|
|
|
171
185
|
| Type | Symbol | Meaning | When it appears |
|
|
172
186
|
|------|--------|---------|-----------------|
|
|
173
|
-
| `original_commit` | `●` | The commit
|
|
187
|
+
| `original_commit` | `●` | The commit selected by the active trace mode | Always (at least one) |
|
|
174
188
|
| `cosmetic_commit` | `○` | A formatting-only change (whitespace, imports) | When AST detects no logic change |
|
|
175
189
|
| `merge_commit` | `◆` | The merge commit on the base branch | Merge-based workflows |
|
|
176
190
|
| `rebased_commit` | `◇` | A rebased version of the original commit | Rebase workflows with patch-id match |
|
|
@@ -206,6 +220,13 @@ interface TraceNode {
|
|
|
206
220
|
└─ https://github.com/org/repo/pull/42
|
|
207
221
|
```
|
|
208
222
|
|
|
223
|
+
**Last meaningful change mode (`--mode change`):**
|
|
224
|
+
```
|
|
225
|
+
● Commit e4f5a6b [exact] via blame
|
|
226
|
+
▸ PR #55 refactor: update validation logic
|
|
227
|
+
└─ https://github.com/org/repo/pull/55
|
|
228
|
+
```
|
|
229
|
+
|
|
209
230
|
**Squash merge (Level 2):**
|
|
210
231
|
```
|
|
211
232
|
● Commit e4f5a6b [exact] via blame-CMw
|
|
@@ -334,7 +355,7 @@ import { trace, graph, health, clearCache, LineLoreError } from '@lumy-pack/line
|
|
|
334
355
|
|
|
335
356
|
### `trace(options): Promise<TraceFullResult>`
|
|
336
357
|
|
|
337
|
-
Trace a code line to its originating PR.
|
|
358
|
+
Trace a code line to its originating or last-change PR, depending on the selected mode.
|
|
338
359
|
|
|
339
360
|
**Options (`TraceOptions`):**
|
|
340
361
|
|
|
@@ -344,6 +365,7 @@ Trace a code line to its originating PR.
|
|
|
344
365
|
| `line` | `number` | yes | — | Starting line number (1-indexed) |
|
|
345
366
|
| `endLine` | `number` | no | — | Ending line for range queries |
|
|
346
367
|
| `remote` | `string` | no | `'origin'` | Git remote name |
|
|
368
|
+
| `mode` | `'origin' \| 'change'` | no | `'origin'` | `origin` uses `git blame -w -C -C -M` (follows copy/move history across renames), `change` uses `git blame -w` (finds the last meaningful local change, ignoring whitespace but not copy/move) |
|
|
347
369
|
| `deep` | `boolean` | no | `false` | Expand patch-id scan range (500→2000), continue search after merge commit match |
|
|
348
370
|
| `noAst` | `boolean` | no | `false` | Disable AST analysis |
|
|
349
371
|
| `noCache` | `boolean` | no | `false` | Disable cache reads and writes |
|
|
@@ -389,6 +411,7 @@ const result = await trace({
|
|
|
389
411
|
file: 'src/config.ts',
|
|
390
412
|
line: 10,
|
|
391
413
|
endLine: 50,
|
|
414
|
+
mode: 'origin',
|
|
392
415
|
deep: true, // search harder for squash merges
|
|
393
416
|
noCache: true, // skip cache for fresh results
|
|
394
417
|
});
|
|
@@ -631,8 +654,9 @@ import type {
|
|
|
631
654
|
|
|
632
655
|
| Command | Purpose |
|
|
633
656
|
|---------|---------|
|
|
634
|
-
| `npx @lumy-pack/line-lore trace <file>` | Trace a line to its PR |
|
|
657
|
+
| `npx @lumy-pack/line-lore trace <file>` | Trace a line to its origin or last-change PR |
|
|
635
658
|
| `-L, --line <num>` | Starting line (required) |
|
|
659
|
+
| `--mode <origin\|change>` | Choose between content origin and last meaningful change |
|
|
636
660
|
| `--end-line <num>` | Ending line for range |
|
|
637
661
|
| `--deep` | Deep trace (squash merges) |
|
|
638
662
|
| `--output <format>` | Output as json, llm, or human |
|
package/dist/cli.mjs
CHANGED
|
@@ -367,25 +367,6 @@ var init_executor = __esm({
|
|
|
367
367
|
|
|
368
368
|
// src/core/ancestry/ancestry.ts
|
|
369
369
|
import { filter as filter4, isTruthy as isTruthy4 } from "@winglet/common-utils";
|
|
370
|
-
async function findMergeCommit(commitSha, options) {
|
|
371
|
-
const ref = options?.ref ?? "HEAD";
|
|
372
|
-
const budget = options?.timeout ?? DEFAULT_ANCESTRY_TIMEOUT;
|
|
373
|
-
const startTime = Date.now();
|
|
374
|
-
const firstParentResult = await findMergeCommitWithArgs(
|
|
375
|
-
commitSha,
|
|
376
|
-
ref,
|
|
377
|
-
["--first-parent"],
|
|
378
|
-
{ ...options, timeout: budget }
|
|
379
|
-
);
|
|
380
|
-
if (firstParentResult) return firstParentResult;
|
|
381
|
-
const elapsed = Date.now() - startTime;
|
|
382
|
-
const remaining = budget - elapsed;
|
|
383
|
-
if (remaining <= 0) return null;
|
|
384
|
-
return findMergeCommitWithArgs(commitSha, ref, [], {
|
|
385
|
-
...options,
|
|
386
|
-
timeout: remaining
|
|
387
|
-
});
|
|
388
|
-
}
|
|
389
370
|
async function verifyMergeIntroducesCommit(targetSha, mergeResult, options) {
|
|
390
371
|
if (mergeResult.parentShas.length < 2) return true;
|
|
391
372
|
const firstParent = mergeResult.parentShas[0];
|
|
@@ -415,7 +396,7 @@ async function isAncestor(commitA, commitB, options) {
|
|
|
415
396
|
return null;
|
|
416
397
|
}
|
|
417
398
|
}
|
|
418
|
-
async function
|
|
399
|
+
async function findMergeCommitsWithArgs(commitSha, ref, extraArgs, options) {
|
|
419
400
|
try {
|
|
420
401
|
const result = await gitExec(
|
|
421
402
|
[
|
|
@@ -431,28 +412,29 @@ async function findMergeCommitWithArgs(commitSha, ref, extraArgs, options) {
|
|
|
431
412
|
{ cwd: options?.cwd, timeout: options?.timeout }
|
|
432
413
|
);
|
|
433
414
|
const lines = filter4(result.stdout.trim().split("\n"), isTruthy4);
|
|
434
|
-
if (lines.length === 0) return
|
|
415
|
+
if (lines.length === 0) return [];
|
|
416
|
+
const verifiedCandidates = [];
|
|
435
417
|
const candidateCount = Math.min(lines.length, MAX_CANDIDATES);
|
|
436
|
-
let
|
|
418
|
+
let attemptedCount = 0;
|
|
437
419
|
for (let i = 0; i < candidateCount; i++) {
|
|
438
420
|
const candidate = parseMergeLogLine(lines[i]);
|
|
439
421
|
if (!candidate) continue;
|
|
440
|
-
|
|
422
|
+
attemptedCount++;
|
|
441
423
|
const verified = await verifyMergeIntroducesCommit(
|
|
442
424
|
commitSha,
|
|
443
425
|
candidate,
|
|
444
426
|
options
|
|
445
427
|
);
|
|
446
|
-
if (verified)
|
|
428
|
+
if (verified) verifiedCandidates.push(candidate);
|
|
447
429
|
}
|
|
448
|
-
if (
|
|
430
|
+
if (attemptedCount > 0 && verifiedCandidates.length === 0 && options?.warnings) {
|
|
449
431
|
options.warnings.push(
|
|
450
|
-
`ancestry: all ${
|
|
432
|
+
`ancestry: all ${attemptedCount} merge candidate(s) failed verification for ${commitSha.slice(0, 8)}`
|
|
451
433
|
);
|
|
452
434
|
}
|
|
453
|
-
return
|
|
435
|
+
return verifiedCandidates;
|
|
454
436
|
} catch {
|
|
455
|
-
return
|
|
437
|
+
return [];
|
|
456
438
|
}
|
|
457
439
|
}
|
|
458
440
|
function parseMergeLogLine(line) {
|
|
@@ -472,6 +454,38 @@ function parseMergeLogLine(line) {
|
|
|
472
454
|
const subject = parts.slice(subjectStart).join(" ");
|
|
473
455
|
return { mergeCommitSha, parentShas, subject };
|
|
474
456
|
}
|
|
457
|
+
async function findMergeCommits(commitSha, options) {
|
|
458
|
+
const ref = options?.ref ?? "HEAD";
|
|
459
|
+
const budget = options?.timeout ?? DEFAULT_ANCESTRY_TIMEOUT;
|
|
460
|
+
const startTime = Date.now();
|
|
461
|
+
const results = [];
|
|
462
|
+
const seen = /* @__PURE__ */ new Set();
|
|
463
|
+
const pushUnique = (candidates) => {
|
|
464
|
+
for (const candidate of candidates) {
|
|
465
|
+
if (seen.has(candidate.mergeCommitSha)) continue;
|
|
466
|
+
seen.add(candidate.mergeCommitSha);
|
|
467
|
+
results.push(candidate);
|
|
468
|
+
if (results.length >= MAX_CANDIDATES) break;
|
|
469
|
+
}
|
|
470
|
+
};
|
|
471
|
+
const firstParent = await findMergeCommitsWithArgs(
|
|
472
|
+
commitSha,
|
|
473
|
+
ref,
|
|
474
|
+
["--first-parent"],
|
|
475
|
+
{ ...options, timeout: budget }
|
|
476
|
+
);
|
|
477
|
+
pushUnique(firstParent);
|
|
478
|
+
const elapsed = Date.now() - startTime;
|
|
479
|
+
const remaining = budget - elapsed;
|
|
480
|
+
if (remaining > 0 && results.length < MAX_CANDIDATES) {
|
|
481
|
+
const full = await findMergeCommitsWithArgs(commitSha, ref, [], {
|
|
482
|
+
...options,
|
|
483
|
+
timeout: remaining
|
|
484
|
+
});
|
|
485
|
+
pushUnique(full);
|
|
486
|
+
}
|
|
487
|
+
return results;
|
|
488
|
+
}
|
|
475
489
|
async function getCommitSubject(sha, options) {
|
|
476
490
|
try {
|
|
477
491
|
const result = await gitExec(["log", "-1", "--format=%s", sha], {
|
|
@@ -670,16 +684,17 @@ async function lookupPR(commitSha, adapter, options, _recursionDepth = 0) {
|
|
|
670
684
|
}
|
|
671
685
|
}
|
|
672
686
|
let mergeBasedPR = null;
|
|
673
|
-
const
|
|
674
|
-
|
|
687
|
+
const mergeCandidates = await findMergeCommits(commitSha, options);
|
|
688
|
+
const hasAncestryMerges = mergeCandidates.length > 0;
|
|
689
|
+
for (const candidate of mergeCandidates) {
|
|
675
690
|
const prNumber = extractPRFromMergeMessage(
|
|
676
|
-
|
|
691
|
+
candidate.subject,
|
|
677
692
|
options?.platform
|
|
678
693
|
);
|
|
679
694
|
if (prNumber) {
|
|
680
695
|
if (adapter) {
|
|
681
696
|
const prInfo = await adapter.getPRForCommit(
|
|
682
|
-
|
|
697
|
+
candidate.mergeCommitSha,
|
|
683
698
|
prSelectOptions
|
|
684
699
|
);
|
|
685
700
|
if (prInfo?.mergedAt) {
|
|
@@ -689,23 +704,32 @@ async function lookupPR(commitSha, adapter, options, _recursionDepth = 0) {
|
|
|
689
704
|
if (!mergeBasedPR) {
|
|
690
705
|
mergeBasedPR = {
|
|
691
706
|
number: prNumber,
|
|
692
|
-
title:
|
|
707
|
+
title: candidate.subject,
|
|
693
708
|
author: "",
|
|
694
709
|
url: "",
|
|
695
|
-
mergeCommit:
|
|
710
|
+
mergeCommit: candidate.mergeCommitSha,
|
|
696
711
|
baseBranch: "",
|
|
697
712
|
resolvedVia: "ancestry"
|
|
698
713
|
};
|
|
699
714
|
}
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
|
|
715
|
+
break;
|
|
716
|
+
}
|
|
717
|
+
if (adapter) {
|
|
718
|
+
const mergeCommitPR = await adapter.getPRForCommit(
|
|
719
|
+
candidate.mergeCommitSha,
|
|
720
|
+
prSelectOptions
|
|
721
|
+
);
|
|
722
|
+
if (mergeCommitPR?.mergedAt) {
|
|
723
|
+
mergeBasedPR = { ...mergeCommitPR, resolvedVia: "ancestry" };
|
|
724
|
+
break;
|
|
703
725
|
}
|
|
704
726
|
}
|
|
705
727
|
}
|
|
706
728
|
if (mergeBasedPR) {
|
|
707
|
-
|
|
708
|
-
|
|
729
|
+
if (!options?.deep || mergeBasedPR.mergedAt) {
|
|
730
|
+
await cache.set(commitSha, toCachedPR(mergeBasedPR));
|
|
731
|
+
return mergeBasedPR;
|
|
732
|
+
}
|
|
709
733
|
}
|
|
710
734
|
const commitSubject = await getCommitSubject(commitSha, options);
|
|
711
735
|
if (commitSubject) {
|
|
@@ -727,7 +751,7 @@ async function lookupPR(commitSha, adapter, options, _recursionDepth = 0) {
|
|
|
727
751
|
return subjectPR;
|
|
728
752
|
}
|
|
729
753
|
}
|
|
730
|
-
if (!options?.skipPatchIdScan && _recursionDepth < MAX_RECURSION_DEPTH) {
|
|
754
|
+
if (!options?.skipPatchIdScan && _recursionDepth < MAX_RECURSION_DEPTH && (!hasAncestryMerges || options?.deep)) {
|
|
731
755
|
const patchIdMatch = await findPatchIdMatch(commitSha, {
|
|
732
756
|
...options,
|
|
733
757
|
scanDepth: options?.deep ? DEEP_SCAN_DEPTH : void 0
|
|
@@ -745,6 +769,10 @@ async function lookupPR(commitSha, adapter, options, _recursionDepth = 0) {
|
|
|
745
769
|
}
|
|
746
770
|
}
|
|
747
771
|
}
|
|
772
|
+
if (mergeBasedPR) {
|
|
773
|
+
await cache.set(commitSha, toCachedPR(mergeBasedPR));
|
|
774
|
+
return mergeBasedPR;
|
|
775
|
+
}
|
|
748
776
|
return null;
|
|
749
777
|
}
|
|
750
778
|
function resetPRCache() {
|
|
@@ -781,7 +809,7 @@ var VERSION;
|
|
|
781
809
|
var init_version = __esm({
|
|
782
810
|
"src/version.ts"() {
|
|
783
811
|
"use strict";
|
|
784
|
-
VERSION = "0.0.
|
|
812
|
+
VERSION = "0.0.8";
|
|
785
813
|
}
|
|
786
814
|
});
|
|
787
815
|
|
|
@@ -2094,6 +2122,7 @@ function parsePorcelainOutput(output) {
|
|
|
2094
2122
|
}
|
|
2095
2123
|
let commitHash = headerMatch[1];
|
|
2096
2124
|
const originalLine = parseInt(headerMatch[2], 10);
|
|
2125
|
+
const finalLine = parseInt(headerMatch[3], 10) || 0;
|
|
2097
2126
|
const isBoundary = commitHash.startsWith("^");
|
|
2098
2127
|
if (isBoundary) {
|
|
2099
2128
|
commitHash = commitHash.slice(1).padStart(40, "0");
|
|
@@ -2137,6 +2166,7 @@ function parsePorcelainOutput(output) {
|
|
|
2137
2166
|
authorEmail: cleanEmail,
|
|
2138
2167
|
date,
|
|
2139
2168
|
lineContent,
|
|
2169
|
+
finalLine,
|
|
2140
2170
|
originalFile,
|
|
2141
2171
|
originalLine: originalFile ? originalLine : void 0
|
|
2142
2172
|
});
|
|
@@ -2147,10 +2177,8 @@ function parsePorcelainOutput(output) {
|
|
|
2147
2177
|
// src/core/blame/blame.ts
|
|
2148
2178
|
async function executeBlame(file, lineRange, options) {
|
|
2149
2179
|
const lineSpec = `${lineRange.start},${lineRange.end}`;
|
|
2150
|
-
const
|
|
2151
|
-
|
|
2152
|
-
options
|
|
2153
|
-
);
|
|
2180
|
+
const args = options?.mode === "change" ? ["blame", "-w", "--porcelain", "-L", lineSpec, file] : ["blame", "-w", "-C", "-C", "-M", "--porcelain", "-L", lineSpec, file];
|
|
2181
|
+
const result = await gitExec(args, options);
|
|
2154
2182
|
return parsePorcelainOutput(result.stdout);
|
|
2155
2183
|
}
|
|
2156
2184
|
async function analyzeBlameResults(results, filePath, options) {
|
|
@@ -2357,9 +2385,10 @@ async function runBlameAndAuth(adapter, options, execOptions) {
|
|
|
2357
2385
|
const lineRange = parseLineRange(
|
|
2358
2386
|
options.endLine ? `${options.line},${options.endLine}` : `${options.line}`
|
|
2359
2387
|
);
|
|
2360
|
-
const blameChain = executeBlame(options.file, lineRange,
|
|
2361
|
-
|
|
2362
|
-
|
|
2388
|
+
const blameChain = executeBlame(options.file, lineRange, {
|
|
2389
|
+
...execOptions,
|
|
2390
|
+
mode: options.mode
|
|
2391
|
+
}).then((results) => analyzeBlameResults(results, options.file, execOptions));
|
|
2363
2392
|
const [authResult, blameResult] = await Promise.allSettled([
|
|
2364
2393
|
adapter ? adapter.checkAuth() : Promise.resolve({ authenticated: false }),
|
|
2365
2394
|
blameChain
|
|
@@ -2380,12 +2409,24 @@ async function runBlameAndAuth(adapter, options, execOptions) {
|
|
|
2380
2409
|
}
|
|
2381
2410
|
return { analyzed: blameResult.value, operatingLevel, warnings };
|
|
2382
2411
|
}
|
|
2383
|
-
|
|
2412
|
+
function resolveTraceMode(mode) {
|
|
2413
|
+
return mode ?? "origin";
|
|
2414
|
+
}
|
|
2415
|
+
function deduplicatedLookupPR(sha, adapter, options, inflight) {
|
|
2416
|
+
const existing = inflight.get(sha);
|
|
2417
|
+
if (existing) return existing;
|
|
2418
|
+
const promise = lookupPR(sha, adapter, options);
|
|
2419
|
+
inflight.set(sha, promise);
|
|
2420
|
+
promise.finally(() => inflight.delete(sha));
|
|
2421
|
+
return promise;
|
|
2422
|
+
}
|
|
2423
|
+
async function processEntry(entry, featureFlags, adapter, options, execOptions, repoId, inflightPR, skipPatchIdScan, preferredBase) {
|
|
2384
2424
|
const nodes = [];
|
|
2425
|
+
const traceMode = resolveTraceMode(options.mode);
|
|
2385
2426
|
const commitNode = {
|
|
2386
2427
|
type: entry.isCosmetic ? "cosmetic_commit" : "original_commit",
|
|
2387
2428
|
sha: entry.blame.commitHash,
|
|
2388
|
-
trackingMethod: "blame-CMw",
|
|
2429
|
+
trackingMethod: traceMode === "change" ? "blame" : "blame-CMw",
|
|
2389
2430
|
confidence: "exact",
|
|
2390
2431
|
note: entry.cosmeticReason ? `Cosmetic change: ${entry.cosmeticReason}` : void 0
|
|
2391
2432
|
};
|
|
@@ -2407,17 +2448,18 @@ async function processEntry(entry, featureFlags, adapter, options, execOptions,
|
|
|
2407
2448
|
}
|
|
2408
2449
|
}
|
|
2409
2450
|
const targetSha = nodes[nodes.length - 1].sha;
|
|
2451
|
+
const prLookupOptions = {
|
|
2452
|
+
...execOptions,
|
|
2453
|
+
noCache: options.noCache,
|
|
2454
|
+
cacheOnly: options.cacheOnly,
|
|
2455
|
+
deep: featureFlags.deepTrace,
|
|
2456
|
+
repoId,
|
|
2457
|
+
skipPatchIdScan,
|
|
2458
|
+
preferredBase,
|
|
2459
|
+
platform: adapter?.platform
|
|
2460
|
+
};
|
|
2410
2461
|
if (targetSha) {
|
|
2411
|
-
const prInfo = await
|
|
2412
|
-
...execOptions,
|
|
2413
|
-
noCache: options.noCache,
|
|
2414
|
-
cacheOnly: options.cacheOnly,
|
|
2415
|
-
deep: featureFlags.deepTrace,
|
|
2416
|
-
repoId,
|
|
2417
|
-
skipPatchIdScan,
|
|
2418
|
-
preferredBase,
|
|
2419
|
-
platform: adapter?.platform
|
|
2420
|
-
});
|
|
2462
|
+
const prInfo = await deduplicatedLookupPR(targetSha, adapter, prLookupOptions, inflightPR);
|
|
2421
2463
|
if (prInfo) {
|
|
2422
2464
|
nodes.push({
|
|
2423
2465
|
type: "pull_request",
|
|
@@ -2434,6 +2476,7 @@ async function processEntry(entry, featureFlags, adapter, options, execOptions,
|
|
|
2434
2476
|
return nodes;
|
|
2435
2477
|
}
|
|
2436
2478
|
async function buildTraceNodes(analyzed, featureFlags, adapter, options, execOptions, repoId, skipPatchIdScan, preferredBase) {
|
|
2479
|
+
const inflightPR = /* @__PURE__ */ new Map();
|
|
2437
2480
|
const results = await Promise.allSettled(
|
|
2438
2481
|
map8(
|
|
2439
2482
|
analyzed,
|
|
@@ -2444,6 +2487,7 @@ async function buildTraceNodes(analyzed, featureFlags, adapter, options, execOpt
|
|
|
2444
2487
|
options,
|
|
2445
2488
|
execOptions,
|
|
2446
2489
|
repoId,
|
|
2490
|
+
inflightPR,
|
|
2447
2491
|
skipPatchIdScan,
|
|
2448
2492
|
preferredBase
|
|
2449
2493
|
)
|
|
@@ -2453,6 +2497,7 @@ async function buildTraceNodes(analyzed, featureFlags, adapter, options, execOpt
|
|
|
2453
2497
|
}
|
|
2454
2498
|
var legacyCacheCleaned = false;
|
|
2455
2499
|
async function trace(options) {
|
|
2500
|
+
const mode = resolveTraceMode(options.mode);
|
|
2456
2501
|
const { file, cwd } = await resolveFileContext(options.file, options.cwd);
|
|
2457
2502
|
const warnings = [];
|
|
2458
2503
|
const execOptions = { cwd, warnings };
|
|
@@ -2474,7 +2519,7 @@ async function trace(options) {
|
|
|
2474
2519
|
}
|
|
2475
2520
|
const blameAuth = await runBlameAndAuth(
|
|
2476
2521
|
platform.adapter,
|
|
2477
|
-
{ ...options, file, cwd },
|
|
2522
|
+
{ ...options, mode, file, cwd },
|
|
2478
2523
|
execOptions
|
|
2479
2524
|
);
|
|
2480
2525
|
const operatingLevel = blameAuth.operatingLevel || platform.operatingLevel;
|
|
@@ -2517,7 +2562,7 @@ async function trace(options) {
|
|
|
2517
2562
|
blameAuth.analyzed,
|
|
2518
2563
|
featureFlags,
|
|
2519
2564
|
platform.adapter,
|
|
2520
|
-
{ ...options, file, cwd },
|
|
2565
|
+
{ ...options, mode, file, cwd },
|
|
2521
2566
|
execOptions,
|
|
2522
2567
|
repoId,
|
|
2523
2568
|
cloneStatus.partialClone || void 0,
|
|
@@ -2728,7 +2773,11 @@ function formatNodeHuman(node) {
|
|
|
2728
2773
|
init_normalizer();
|
|
2729
2774
|
init_errors();
|
|
2730
2775
|
function registerTraceCommand(program2) {
|
|
2731
|
-
program2.command("trace <file>").description("Trace a file line to its originating PR").requiredOption("-L, --line <range>", 'Line number or range (e.g., "42" or "10,50")').option("--deep", "Enable deep trace for squash PRs").option("--no-ast", "Disable AST diff analysis").option("--no-cache", "Disable cache").option("--cache-only", "Return cached results only (no API calls)").option("--json", "Output in JSON format").option("-q, --quiet", "Output PR number only").option("--output <format>", "Output format: human, json, llm", "human").option("--no-color", "Disable colored output").action(async (file, opts) => {
|
|
2776
|
+
program2.command("trace <file>").description("Trace a file line to its originating or last-change PR").requiredOption("-L, --line <range>", 'Line number or range (e.g., "42" or "10,50")').option("--mode <mode>", "Trace mode: change (default) or origin (includes copy/move source)", "change").option("--deep", "Enable deep trace for squash PRs").option("--no-ast", "Disable AST diff analysis").option("--no-cache", "Disable cache").option("--cache-only", "Return cached results only (no API calls)").option("--json", "Output in JSON format").option("-q, --quiet", "Output PR number only").option("--output <format>", "Output format: human, json, llm", "human").option("--no-color", "Disable colored output").action(async (file, opts) => {
|
|
2777
|
+
const mode = opts.mode;
|
|
2778
|
+
if (mode !== "origin" && mode !== "change") {
|
|
2779
|
+
throw new Error(`Invalid trace mode: ${String(mode)}`);
|
|
2780
|
+
}
|
|
2732
2781
|
const lineStr = opts.line;
|
|
2733
2782
|
const parts = lineStr.split(",");
|
|
2734
2783
|
const line = parseInt(parts[0], 10);
|
|
@@ -2737,6 +2786,7 @@ function registerTraceCommand(program2) {
|
|
|
2737
2786
|
file,
|
|
2738
2787
|
line,
|
|
2739
2788
|
endLine,
|
|
2789
|
+
mode,
|
|
2740
2790
|
deep: opts.deep,
|
|
2741
2791
|
noAst: opts.ast === false,
|
|
2742
2792
|
noCache: opts.cache === false,
|
|
@@ -2777,7 +2827,7 @@ init_errors();
|
|
|
2777
2827
|
// src/utils/command-registry.ts
|
|
2778
2828
|
var TRACE_COMMAND = {
|
|
2779
2829
|
name: "trace",
|
|
2780
|
-
description: "Trace a file line to its originating PR",
|
|
2830
|
+
description: "Trace a file line to its originating or last-change PR",
|
|
2781
2831
|
usage: "line-lore trace <file> [options]",
|
|
2782
2832
|
arguments: [
|
|
2783
2833
|
{
|
|
@@ -2792,6 +2842,12 @@ var TRACE_COMMAND = {
|
|
|
2792
2842
|
description: 'Line number or range (e.g., "42" or "10,50")',
|
|
2793
2843
|
type: "string"
|
|
2794
2844
|
},
|
|
2845
|
+
{
|
|
2846
|
+
flag: "--mode <mode>",
|
|
2847
|
+
description: "Trace mode: origin or change",
|
|
2848
|
+
type: "string",
|
|
2849
|
+
default: "origin"
|
|
2850
|
+
},
|
|
2795
2851
|
{
|
|
2796
2852
|
flag: "--deep",
|
|
2797
2853
|
description: "Enable deep trace for squash PRs",
|
|
@@ -5,6 +5,11 @@ export interface AncestryResult {
|
|
|
5
5
|
subject: string;
|
|
6
6
|
}
|
|
7
7
|
export declare const DEFAULT_ANCESTRY_TIMEOUT = 30000;
|
|
8
|
+
/**
|
|
9
|
+
* @deprecated Use {@link findMergeCommits} (plural) instead.
|
|
10
|
+
* Returns only the first verified merge commit. The plural version returns
|
|
11
|
+
* multiple candidates from both first-parent and full ancestry paths.
|
|
12
|
+
*/
|
|
8
13
|
export declare function findMergeCommit(commitSha: string, options?: GitExecOptions & {
|
|
9
14
|
ref?: string;
|
|
10
15
|
}): Promise<AncestryResult | null>;
|
|
@@ -19,6 +24,19 @@ export declare function findMergeCommit(commitSha: string, options?: GitExecOpti
|
|
|
19
24
|
* Returns false on git command failure (fail-skip policy).
|
|
20
25
|
*/
|
|
21
26
|
export declare function verifyMergeIntroducesCommit(targetSha: string, mergeResult: AncestryResult, options?: GitExecOptions): Promise<boolean>;
|
|
27
|
+
/**
|
|
28
|
+
* Multi-candidate merge commit search.
|
|
29
|
+
* Returns up to MAX_CANDIDATES verified merge commits from both first-parent
|
|
30
|
+
* and full ancestry paths, deduplicated by mergeCommitSha and ordered with
|
|
31
|
+
* first-parent results first.
|
|
32
|
+
*
|
|
33
|
+
* Unlike `findMergeCommit` (singular) which returns only the first verified candidate,
|
|
34
|
+
* this function enables callers to iterate through multiple candidates when the
|
|
35
|
+
* first one doesn't yield a PR (e.g., bulk merge with non-standard message).
|
|
36
|
+
*/
|
|
37
|
+
export declare function findMergeCommits(commitSha: string, options?: GitExecOptions & {
|
|
38
|
+
ref?: string;
|
|
39
|
+
}): Promise<AncestryResult[]>;
|
|
22
40
|
/** Retrieve the subject line of a single commit. Returns null on git failure. */
|
|
23
41
|
export declare function getCommitSubject(sha: string, options?: GitExecOptions): Promise<string | null>;
|
|
24
42
|
export declare function extractPRFromMergeMessage(subject: string, platform?: string): number | null;
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
export { extractPRFromMergeMessage, findMergeCommit, getCommitSubject, verifyMergeIntroducesCommit, } from './ancestry.js';
|
|
1
|
+
export { extractPRFromMergeMessage, findMergeCommit, findMergeCommits, getCommitSubject, verifyMergeIntroducesCommit, } from './ancestry.js';
|
|
2
2
|
export type { AncestryResult } from './ancestry.js';
|
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import type { BlameResult, BlameStageResult, GitExecOptions, LineRange } from '../../types/index.js';
|
|
2
|
-
export declare function executeBlame(file: string, lineRange: LineRange, options?:
|
|
1
|
+
import type { BlameExecOptions, BlameResult, BlameStageResult, GitExecOptions, LineRange } from '../../types/index.js';
|
|
2
|
+
export declare function executeBlame(file: string, lineRange: LineRange, options?: BlameExecOptions): Promise<BlameResult[]>;
|
|
3
3
|
export declare function analyzeBlameResults(results: BlameResult[], filePath: string, options?: GitExecOptions): Promise<BlameStageResult[]>;
|
package/dist/core/index.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
export { analyzeBlameResults, executeBlame, isCosmeticDiff, parsePorcelainOutput, } from './blame/index.js';
|
|
2
2
|
export { compareSymbolMaps, computeContentHash, computeExactHash, computeStructuralHash, extractSymbols, findContainingSymbol, findMatchAcrossFiles, traceByAst, } from './ast-diff/index.js';
|
|
3
|
-
export { extractPRFromMergeMessage, findMergeCommit, } from './ancestry/index.js';
|
|
3
|
+
export { extractPRFromMergeMessage, findMergeCommit, findMergeCommits, } from './ancestry/index.js';
|
|
4
4
|
export type { AncestryResult } from './ancestry/index.js';
|
|
5
5
|
export { computePatchId, findPatchIdMatch, resetPatchIdCache, } from './patch-id/index.js';
|
|
6
6
|
export type { PatchIdResult } from './patch-id/index.js';
|
package/dist/index.cjs
CHANGED
|
@@ -381,25 +381,6 @@ var init_executor = __esm({
|
|
|
381
381
|
});
|
|
382
382
|
|
|
383
383
|
// src/core/ancestry/ancestry.ts
|
|
384
|
-
async function findMergeCommit(commitSha, options) {
|
|
385
|
-
const ref = options?.ref ?? "HEAD";
|
|
386
|
-
const budget = options?.timeout ?? DEFAULT_ANCESTRY_TIMEOUT;
|
|
387
|
-
const startTime = Date.now();
|
|
388
|
-
const firstParentResult = await findMergeCommitWithArgs(
|
|
389
|
-
commitSha,
|
|
390
|
-
ref,
|
|
391
|
-
["--first-parent"],
|
|
392
|
-
{ ...options, timeout: budget }
|
|
393
|
-
);
|
|
394
|
-
if (firstParentResult) return firstParentResult;
|
|
395
|
-
const elapsed = Date.now() - startTime;
|
|
396
|
-
const remaining = budget - elapsed;
|
|
397
|
-
if (remaining <= 0) return null;
|
|
398
|
-
return findMergeCommitWithArgs(commitSha, ref, [], {
|
|
399
|
-
...options,
|
|
400
|
-
timeout: remaining
|
|
401
|
-
});
|
|
402
|
-
}
|
|
403
384
|
async function verifyMergeIntroducesCommit(targetSha, mergeResult, options) {
|
|
404
385
|
if (mergeResult.parentShas.length < 2) return true;
|
|
405
386
|
const firstParent = mergeResult.parentShas[0];
|
|
@@ -429,7 +410,7 @@ async function isAncestor(commitA, commitB, options) {
|
|
|
429
410
|
return null;
|
|
430
411
|
}
|
|
431
412
|
}
|
|
432
|
-
async function
|
|
413
|
+
async function findMergeCommitsWithArgs(commitSha, ref, extraArgs, options) {
|
|
433
414
|
try {
|
|
434
415
|
const result = await gitExec(
|
|
435
416
|
[
|
|
@@ -445,28 +426,29 @@ async function findMergeCommitWithArgs(commitSha, ref, extraArgs, options) {
|
|
|
445
426
|
{ cwd: options?.cwd, timeout: options?.timeout }
|
|
446
427
|
);
|
|
447
428
|
const lines = (0, import_common_utils9.filter)(result.stdout.trim().split("\n"), import_common_utils9.isTruthy);
|
|
448
|
-
if (lines.length === 0) return
|
|
429
|
+
if (lines.length === 0) return [];
|
|
430
|
+
const verifiedCandidates = [];
|
|
449
431
|
const candidateCount = Math.min(lines.length, MAX_CANDIDATES);
|
|
450
|
-
let
|
|
432
|
+
let attemptedCount = 0;
|
|
451
433
|
for (let i = 0; i < candidateCount; i++) {
|
|
452
434
|
const candidate = parseMergeLogLine(lines[i]);
|
|
453
435
|
if (!candidate) continue;
|
|
454
|
-
|
|
436
|
+
attemptedCount++;
|
|
455
437
|
const verified = await verifyMergeIntroducesCommit(
|
|
456
438
|
commitSha,
|
|
457
439
|
candidate,
|
|
458
440
|
options
|
|
459
441
|
);
|
|
460
|
-
if (verified)
|
|
442
|
+
if (verified) verifiedCandidates.push(candidate);
|
|
461
443
|
}
|
|
462
|
-
if (
|
|
444
|
+
if (attemptedCount > 0 && verifiedCandidates.length === 0 && options?.warnings) {
|
|
463
445
|
options.warnings.push(
|
|
464
|
-
`ancestry: all ${
|
|
446
|
+
`ancestry: all ${attemptedCount} merge candidate(s) failed verification for ${commitSha.slice(0, 8)}`
|
|
465
447
|
);
|
|
466
448
|
}
|
|
467
|
-
return
|
|
449
|
+
return verifiedCandidates;
|
|
468
450
|
} catch {
|
|
469
|
-
return
|
|
451
|
+
return [];
|
|
470
452
|
}
|
|
471
453
|
}
|
|
472
454
|
function parseMergeLogLine(line) {
|
|
@@ -486,6 +468,38 @@ function parseMergeLogLine(line) {
|
|
|
486
468
|
const subject = parts.slice(subjectStart).join(" ");
|
|
487
469
|
return { mergeCommitSha, parentShas, subject };
|
|
488
470
|
}
|
|
471
|
+
async function findMergeCommits(commitSha, options) {
|
|
472
|
+
const ref = options?.ref ?? "HEAD";
|
|
473
|
+
const budget = options?.timeout ?? DEFAULT_ANCESTRY_TIMEOUT;
|
|
474
|
+
const startTime = Date.now();
|
|
475
|
+
const results = [];
|
|
476
|
+
const seen = /* @__PURE__ */ new Set();
|
|
477
|
+
const pushUnique = (candidates) => {
|
|
478
|
+
for (const candidate of candidates) {
|
|
479
|
+
if (seen.has(candidate.mergeCommitSha)) continue;
|
|
480
|
+
seen.add(candidate.mergeCommitSha);
|
|
481
|
+
results.push(candidate);
|
|
482
|
+
if (results.length >= MAX_CANDIDATES) break;
|
|
483
|
+
}
|
|
484
|
+
};
|
|
485
|
+
const firstParent = await findMergeCommitsWithArgs(
|
|
486
|
+
commitSha,
|
|
487
|
+
ref,
|
|
488
|
+
["--first-parent"],
|
|
489
|
+
{ ...options, timeout: budget }
|
|
490
|
+
);
|
|
491
|
+
pushUnique(firstParent);
|
|
492
|
+
const elapsed = Date.now() - startTime;
|
|
493
|
+
const remaining = budget - elapsed;
|
|
494
|
+
if (remaining > 0 && results.length < MAX_CANDIDATES) {
|
|
495
|
+
const full = await findMergeCommitsWithArgs(commitSha, ref, [], {
|
|
496
|
+
...options,
|
|
497
|
+
timeout: remaining
|
|
498
|
+
});
|
|
499
|
+
pushUnique(full);
|
|
500
|
+
}
|
|
501
|
+
return results;
|
|
502
|
+
}
|
|
489
503
|
async function getCommitSubject(sha, options) {
|
|
490
504
|
try {
|
|
491
505
|
const result = await gitExec(["log", "-1", "--format=%s", sha], {
|
|
@@ -689,16 +703,17 @@ async function lookupPR(commitSha, adapter, options, _recursionDepth = 0) {
|
|
|
689
703
|
}
|
|
690
704
|
}
|
|
691
705
|
let mergeBasedPR = null;
|
|
692
|
-
const
|
|
693
|
-
|
|
706
|
+
const mergeCandidates = await findMergeCommits(commitSha, options);
|
|
707
|
+
const hasAncestryMerges = mergeCandidates.length > 0;
|
|
708
|
+
for (const candidate of mergeCandidates) {
|
|
694
709
|
const prNumber = extractPRFromMergeMessage(
|
|
695
|
-
|
|
710
|
+
candidate.subject,
|
|
696
711
|
options?.platform
|
|
697
712
|
);
|
|
698
713
|
if (prNumber) {
|
|
699
714
|
if (adapter) {
|
|
700
715
|
const prInfo = await adapter.getPRForCommit(
|
|
701
|
-
|
|
716
|
+
candidate.mergeCommitSha,
|
|
702
717
|
prSelectOptions
|
|
703
718
|
);
|
|
704
719
|
if (prInfo?.mergedAt) {
|
|
@@ -708,23 +723,32 @@ async function lookupPR(commitSha, adapter, options, _recursionDepth = 0) {
|
|
|
708
723
|
if (!mergeBasedPR) {
|
|
709
724
|
mergeBasedPR = {
|
|
710
725
|
number: prNumber,
|
|
711
|
-
title:
|
|
726
|
+
title: candidate.subject,
|
|
712
727
|
author: "",
|
|
713
728
|
url: "",
|
|
714
|
-
mergeCommit:
|
|
729
|
+
mergeCommit: candidate.mergeCommitSha,
|
|
715
730
|
baseBranch: "",
|
|
716
731
|
resolvedVia: "ancestry"
|
|
717
732
|
};
|
|
718
733
|
}
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
734
|
+
break;
|
|
735
|
+
}
|
|
736
|
+
if (adapter) {
|
|
737
|
+
const mergeCommitPR = await adapter.getPRForCommit(
|
|
738
|
+
candidate.mergeCommitSha,
|
|
739
|
+
prSelectOptions
|
|
740
|
+
);
|
|
741
|
+
if (mergeCommitPR?.mergedAt) {
|
|
742
|
+
mergeBasedPR = { ...mergeCommitPR, resolvedVia: "ancestry" };
|
|
743
|
+
break;
|
|
722
744
|
}
|
|
723
745
|
}
|
|
724
746
|
}
|
|
725
747
|
if (mergeBasedPR) {
|
|
726
|
-
|
|
727
|
-
|
|
748
|
+
if (!options?.deep || mergeBasedPR.mergedAt) {
|
|
749
|
+
await cache.set(commitSha, toCachedPR(mergeBasedPR));
|
|
750
|
+
return mergeBasedPR;
|
|
751
|
+
}
|
|
728
752
|
}
|
|
729
753
|
const commitSubject = await getCommitSubject(commitSha, options);
|
|
730
754
|
if (commitSubject) {
|
|
@@ -746,7 +770,7 @@ async function lookupPR(commitSha, adapter, options, _recursionDepth = 0) {
|
|
|
746
770
|
return subjectPR;
|
|
747
771
|
}
|
|
748
772
|
}
|
|
749
|
-
if (!options?.skipPatchIdScan && _recursionDepth < MAX_RECURSION_DEPTH) {
|
|
773
|
+
if (!options?.skipPatchIdScan && _recursionDepth < MAX_RECURSION_DEPTH && (!hasAncestryMerges || options?.deep)) {
|
|
750
774
|
const patchIdMatch = await findPatchIdMatch(commitSha, {
|
|
751
775
|
...options,
|
|
752
776
|
scanDepth: options?.deep ? DEEP_SCAN_DEPTH : void 0
|
|
@@ -764,6 +788,10 @@ async function lookupPR(commitSha, adapter, options, _recursionDepth = 0) {
|
|
|
764
788
|
}
|
|
765
789
|
}
|
|
766
790
|
}
|
|
791
|
+
if (mergeBasedPR) {
|
|
792
|
+
await cache.set(commitSha, toCachedPR(mergeBasedPR));
|
|
793
|
+
return mergeBasedPR;
|
|
794
|
+
}
|
|
767
795
|
return null;
|
|
768
796
|
}
|
|
769
797
|
function resetPRCache() {
|
|
@@ -2083,6 +2111,7 @@ function parsePorcelainOutput(output) {
|
|
|
2083
2111
|
}
|
|
2084
2112
|
let commitHash = headerMatch[1];
|
|
2085
2113
|
const originalLine = parseInt(headerMatch[2], 10);
|
|
2114
|
+
const finalLine = parseInt(headerMatch[3], 10) || 0;
|
|
2086
2115
|
const isBoundary = commitHash.startsWith("^");
|
|
2087
2116
|
if (isBoundary) {
|
|
2088
2117
|
commitHash = commitHash.slice(1).padStart(40, "0");
|
|
@@ -2126,6 +2155,7 @@ function parsePorcelainOutput(output) {
|
|
|
2126
2155
|
authorEmail: cleanEmail,
|
|
2127
2156
|
date,
|
|
2128
2157
|
lineContent,
|
|
2158
|
+
finalLine,
|
|
2129
2159
|
originalFile,
|
|
2130
2160
|
originalLine: originalFile ? originalLine : void 0
|
|
2131
2161
|
});
|
|
@@ -2136,10 +2166,8 @@ function parsePorcelainOutput(output) {
|
|
|
2136
2166
|
// src/core/blame/blame.ts
|
|
2137
2167
|
async function executeBlame(file, lineRange, options) {
|
|
2138
2168
|
const lineSpec = `${lineRange.start},${lineRange.end}`;
|
|
2139
|
-
const
|
|
2140
|
-
|
|
2141
|
-
options
|
|
2142
|
-
);
|
|
2169
|
+
const args = options?.mode === "change" ? ["blame", "-w", "--porcelain", "-L", lineSpec, file] : ["blame", "-w", "-C", "-C", "-M", "--porcelain", "-L", lineSpec, file];
|
|
2170
|
+
const result = await gitExec(args, options);
|
|
2143
2171
|
return parsePorcelainOutput(result.stdout);
|
|
2144
2172
|
}
|
|
2145
2173
|
async function analyzeBlameResults(results, filePath, options) {
|
|
@@ -2350,9 +2378,10 @@ async function runBlameAndAuth(adapter, options, execOptions) {
|
|
|
2350
2378
|
const lineRange = parseLineRange(
|
|
2351
2379
|
options.endLine ? `${options.line},${options.endLine}` : `${options.line}`
|
|
2352
2380
|
);
|
|
2353
|
-
const blameChain = executeBlame(options.file, lineRange,
|
|
2354
|
-
|
|
2355
|
-
|
|
2381
|
+
const blameChain = executeBlame(options.file, lineRange, {
|
|
2382
|
+
...execOptions,
|
|
2383
|
+
mode: options.mode
|
|
2384
|
+
}).then((results) => analyzeBlameResults(results, options.file, execOptions));
|
|
2356
2385
|
const [authResult, blameResult] = await Promise.allSettled([
|
|
2357
2386
|
adapter ? adapter.checkAuth() : Promise.resolve({ authenticated: false }),
|
|
2358
2387
|
blameChain
|
|
@@ -2373,12 +2402,24 @@ async function runBlameAndAuth(adapter, options, execOptions) {
|
|
|
2373
2402
|
}
|
|
2374
2403
|
return { analyzed: blameResult.value, operatingLevel, warnings };
|
|
2375
2404
|
}
|
|
2376
|
-
|
|
2405
|
+
function resolveTraceMode(mode) {
|
|
2406
|
+
return mode ?? "origin";
|
|
2407
|
+
}
|
|
2408
|
+
function deduplicatedLookupPR(sha, adapter, options, inflight) {
|
|
2409
|
+
const existing = inflight.get(sha);
|
|
2410
|
+
if (existing) return existing;
|
|
2411
|
+
const promise = lookupPR(sha, adapter, options);
|
|
2412
|
+
inflight.set(sha, promise);
|
|
2413
|
+
promise.finally(() => inflight.delete(sha));
|
|
2414
|
+
return promise;
|
|
2415
|
+
}
|
|
2416
|
+
async function processEntry(entry, featureFlags, adapter, options, execOptions, repoId, inflightPR, skipPatchIdScan, preferredBase) {
|
|
2377
2417
|
const nodes = [];
|
|
2418
|
+
const traceMode = resolveTraceMode(options.mode);
|
|
2378
2419
|
const commitNode = {
|
|
2379
2420
|
type: entry.isCosmetic ? "cosmetic_commit" : "original_commit",
|
|
2380
2421
|
sha: entry.blame.commitHash,
|
|
2381
|
-
trackingMethod: "blame-CMw",
|
|
2422
|
+
trackingMethod: traceMode === "change" ? "blame" : "blame-CMw",
|
|
2382
2423
|
confidence: "exact",
|
|
2383
2424
|
note: entry.cosmeticReason ? `Cosmetic change: ${entry.cosmeticReason}` : void 0
|
|
2384
2425
|
};
|
|
@@ -2400,17 +2441,18 @@ async function processEntry(entry, featureFlags, adapter, options, execOptions,
|
|
|
2400
2441
|
}
|
|
2401
2442
|
}
|
|
2402
2443
|
const targetSha = nodes[nodes.length - 1].sha;
|
|
2444
|
+
const prLookupOptions = {
|
|
2445
|
+
...execOptions,
|
|
2446
|
+
noCache: options.noCache,
|
|
2447
|
+
cacheOnly: options.cacheOnly,
|
|
2448
|
+
deep: featureFlags.deepTrace,
|
|
2449
|
+
repoId,
|
|
2450
|
+
skipPatchIdScan,
|
|
2451
|
+
preferredBase,
|
|
2452
|
+
platform: adapter?.platform
|
|
2453
|
+
};
|
|
2403
2454
|
if (targetSha) {
|
|
2404
|
-
const prInfo = await
|
|
2405
|
-
...execOptions,
|
|
2406
|
-
noCache: options.noCache,
|
|
2407
|
-
cacheOnly: options.cacheOnly,
|
|
2408
|
-
deep: featureFlags.deepTrace,
|
|
2409
|
-
repoId,
|
|
2410
|
-
skipPatchIdScan,
|
|
2411
|
-
preferredBase,
|
|
2412
|
-
platform: adapter?.platform
|
|
2413
|
-
});
|
|
2455
|
+
const prInfo = await deduplicatedLookupPR(targetSha, adapter, prLookupOptions, inflightPR);
|
|
2414
2456
|
if (prInfo) {
|
|
2415
2457
|
nodes.push({
|
|
2416
2458
|
type: "pull_request",
|
|
@@ -2427,6 +2469,7 @@ async function processEntry(entry, featureFlags, adapter, options, execOptions,
|
|
|
2427
2469
|
return nodes;
|
|
2428
2470
|
}
|
|
2429
2471
|
async function buildTraceNodes(analyzed, featureFlags, adapter, options, execOptions, repoId, skipPatchIdScan, preferredBase) {
|
|
2472
|
+
const inflightPR = /* @__PURE__ */ new Map();
|
|
2430
2473
|
const results = await Promise.allSettled(
|
|
2431
2474
|
(0, import_common_utils11.map)(
|
|
2432
2475
|
analyzed,
|
|
@@ -2437,6 +2480,7 @@ async function buildTraceNodes(analyzed, featureFlags, adapter, options, execOpt
|
|
|
2437
2480
|
options,
|
|
2438
2481
|
execOptions,
|
|
2439
2482
|
repoId,
|
|
2483
|
+
inflightPR,
|
|
2440
2484
|
skipPatchIdScan,
|
|
2441
2485
|
preferredBase
|
|
2442
2486
|
)
|
|
@@ -2446,6 +2490,7 @@ async function buildTraceNodes(analyzed, featureFlags, adapter, options, execOpt
|
|
|
2446
2490
|
}
|
|
2447
2491
|
var legacyCacheCleaned = false;
|
|
2448
2492
|
async function trace(options) {
|
|
2493
|
+
const mode = resolveTraceMode(options.mode);
|
|
2449
2494
|
const { file, cwd } = await resolveFileContext(options.file, options.cwd);
|
|
2450
2495
|
const warnings = [];
|
|
2451
2496
|
const execOptions = { cwd, warnings };
|
|
@@ -2467,7 +2512,7 @@ async function trace(options) {
|
|
|
2467
2512
|
}
|
|
2468
2513
|
const blameAuth = await runBlameAndAuth(
|
|
2469
2514
|
platform.adapter,
|
|
2470
|
-
{ ...options, file, cwd },
|
|
2515
|
+
{ ...options, mode, file, cwd },
|
|
2471
2516
|
execOptions
|
|
2472
2517
|
);
|
|
2473
2518
|
const operatingLevel = blameAuth.operatingLevel || platform.operatingLevel;
|
|
@@ -2510,7 +2555,7 @@ async function trace(options) {
|
|
|
2510
2555
|
blameAuth.analyzed,
|
|
2511
2556
|
featureFlags,
|
|
2512
2557
|
platform.adapter,
|
|
2513
|
-
{ ...options, file, cwd },
|
|
2558
|
+
{ ...options, mode, file, cwd },
|
|
2514
2559
|
execOptions,
|
|
2515
2560
|
repoId,
|
|
2516
2561
|
cloneStatus.partialClone || void 0,
|
package/dist/index.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
export type { AstDiffStageResult, AstTraceResult, AuthStatus, BlameResult, BlameStageResult, CacheEntry, ChangeType, CommitInfo, ComparisonResult, Confidence, ContentHash, CosmeticReason, FeatureFlags, GraphOptions, GraphResult, HealthReport, IssueInfo, LineRange, NormalizedResponse, OperatingLevel, PlatformAdapter, PlatformType, PRInfo, RateLimitInfo, RemoteInfo, SymbolInfo, SymbolKind, TraceNode, TraceNodeType, TraceOptions, TraceResult, TrackingMethod, } from './types/index.js';
|
|
1
|
+
export type { AstDiffStageResult, AstTraceResult, AuthStatus, BlameResult, BlameStageResult, CacheEntry, ChangeType, CommitInfo, ComparisonResult, Confidence, ContentHash, CosmeticReason, FeatureFlags, GraphOptions, GraphResult, HealthReport, IssueInfo, LineRange, NormalizedResponse, OperatingLevel, PlatformAdapter, PlatformType, PRInfo, RateLimitInfo, RemoteInfo, SymbolInfo, SymbolKind, TraceNode, TraceNodeType, TraceMode, TraceOptions, TraceResult, TrackingMethod, } from './types/index.js';
|
|
2
2
|
export { LineLoreError, LineLoreErrorCode } from './errors.js';
|
|
3
3
|
export { clearCache, graph, health, trace } from './core/core.js';
|
|
4
4
|
export type { TraceFullResult } from './core/core.js';
|
package/dist/index.mjs
CHANGED
|
@@ -368,25 +368,6 @@ var init_executor = __esm({
|
|
|
368
368
|
|
|
369
369
|
// src/core/ancestry/ancestry.ts
|
|
370
370
|
import { filter as filter4, isTruthy as isTruthy4 } from "@winglet/common-utils";
|
|
371
|
-
async function findMergeCommit(commitSha, options) {
|
|
372
|
-
const ref = options?.ref ?? "HEAD";
|
|
373
|
-
const budget = options?.timeout ?? DEFAULT_ANCESTRY_TIMEOUT;
|
|
374
|
-
const startTime = Date.now();
|
|
375
|
-
const firstParentResult = await findMergeCommitWithArgs(
|
|
376
|
-
commitSha,
|
|
377
|
-
ref,
|
|
378
|
-
["--first-parent"],
|
|
379
|
-
{ ...options, timeout: budget }
|
|
380
|
-
);
|
|
381
|
-
if (firstParentResult) return firstParentResult;
|
|
382
|
-
const elapsed = Date.now() - startTime;
|
|
383
|
-
const remaining = budget - elapsed;
|
|
384
|
-
if (remaining <= 0) return null;
|
|
385
|
-
return findMergeCommitWithArgs(commitSha, ref, [], {
|
|
386
|
-
...options,
|
|
387
|
-
timeout: remaining
|
|
388
|
-
});
|
|
389
|
-
}
|
|
390
371
|
async function verifyMergeIntroducesCommit(targetSha, mergeResult, options) {
|
|
391
372
|
if (mergeResult.parentShas.length < 2) return true;
|
|
392
373
|
const firstParent = mergeResult.parentShas[0];
|
|
@@ -416,7 +397,7 @@ async function isAncestor(commitA, commitB, options) {
|
|
|
416
397
|
return null;
|
|
417
398
|
}
|
|
418
399
|
}
|
|
419
|
-
async function
|
|
400
|
+
async function findMergeCommitsWithArgs(commitSha, ref, extraArgs, options) {
|
|
420
401
|
try {
|
|
421
402
|
const result = await gitExec(
|
|
422
403
|
[
|
|
@@ -432,28 +413,29 @@ async function findMergeCommitWithArgs(commitSha, ref, extraArgs, options) {
|
|
|
432
413
|
{ cwd: options?.cwd, timeout: options?.timeout }
|
|
433
414
|
);
|
|
434
415
|
const lines = filter4(result.stdout.trim().split("\n"), isTruthy4);
|
|
435
|
-
if (lines.length === 0) return
|
|
416
|
+
if (lines.length === 0) return [];
|
|
417
|
+
const verifiedCandidates = [];
|
|
436
418
|
const candidateCount = Math.min(lines.length, MAX_CANDIDATES);
|
|
437
|
-
let
|
|
419
|
+
let attemptedCount = 0;
|
|
438
420
|
for (let i = 0; i < candidateCount; i++) {
|
|
439
421
|
const candidate = parseMergeLogLine(lines[i]);
|
|
440
422
|
if (!candidate) continue;
|
|
441
|
-
|
|
423
|
+
attemptedCount++;
|
|
442
424
|
const verified = await verifyMergeIntroducesCommit(
|
|
443
425
|
commitSha,
|
|
444
426
|
candidate,
|
|
445
427
|
options
|
|
446
428
|
);
|
|
447
|
-
if (verified)
|
|
429
|
+
if (verified) verifiedCandidates.push(candidate);
|
|
448
430
|
}
|
|
449
|
-
if (
|
|
431
|
+
if (attemptedCount > 0 && verifiedCandidates.length === 0 && options?.warnings) {
|
|
450
432
|
options.warnings.push(
|
|
451
|
-
`ancestry: all ${
|
|
433
|
+
`ancestry: all ${attemptedCount} merge candidate(s) failed verification for ${commitSha.slice(0, 8)}`
|
|
452
434
|
);
|
|
453
435
|
}
|
|
454
|
-
return
|
|
436
|
+
return verifiedCandidates;
|
|
455
437
|
} catch {
|
|
456
|
-
return
|
|
438
|
+
return [];
|
|
457
439
|
}
|
|
458
440
|
}
|
|
459
441
|
function parseMergeLogLine(line) {
|
|
@@ -473,6 +455,38 @@ function parseMergeLogLine(line) {
|
|
|
473
455
|
const subject = parts.slice(subjectStart).join(" ");
|
|
474
456
|
return { mergeCommitSha, parentShas, subject };
|
|
475
457
|
}
|
|
458
|
+
async function findMergeCommits(commitSha, options) {
|
|
459
|
+
const ref = options?.ref ?? "HEAD";
|
|
460
|
+
const budget = options?.timeout ?? DEFAULT_ANCESTRY_TIMEOUT;
|
|
461
|
+
const startTime = Date.now();
|
|
462
|
+
const results = [];
|
|
463
|
+
const seen = /* @__PURE__ */ new Set();
|
|
464
|
+
const pushUnique = (candidates) => {
|
|
465
|
+
for (const candidate of candidates) {
|
|
466
|
+
if (seen.has(candidate.mergeCommitSha)) continue;
|
|
467
|
+
seen.add(candidate.mergeCommitSha);
|
|
468
|
+
results.push(candidate);
|
|
469
|
+
if (results.length >= MAX_CANDIDATES) break;
|
|
470
|
+
}
|
|
471
|
+
};
|
|
472
|
+
const firstParent = await findMergeCommitsWithArgs(
|
|
473
|
+
commitSha,
|
|
474
|
+
ref,
|
|
475
|
+
["--first-parent"],
|
|
476
|
+
{ ...options, timeout: budget }
|
|
477
|
+
);
|
|
478
|
+
pushUnique(firstParent);
|
|
479
|
+
const elapsed = Date.now() - startTime;
|
|
480
|
+
const remaining = budget - elapsed;
|
|
481
|
+
if (remaining > 0 && results.length < MAX_CANDIDATES) {
|
|
482
|
+
const full = await findMergeCommitsWithArgs(commitSha, ref, [], {
|
|
483
|
+
...options,
|
|
484
|
+
timeout: remaining
|
|
485
|
+
});
|
|
486
|
+
pushUnique(full);
|
|
487
|
+
}
|
|
488
|
+
return results;
|
|
489
|
+
}
|
|
476
490
|
async function getCommitSubject(sha, options) {
|
|
477
491
|
try {
|
|
478
492
|
const result = await gitExec(["log", "-1", "--format=%s", sha], {
|
|
@@ -675,16 +689,17 @@ async function lookupPR(commitSha, adapter, options, _recursionDepth = 0) {
|
|
|
675
689
|
}
|
|
676
690
|
}
|
|
677
691
|
let mergeBasedPR = null;
|
|
678
|
-
const
|
|
679
|
-
|
|
692
|
+
const mergeCandidates = await findMergeCommits(commitSha, options);
|
|
693
|
+
const hasAncestryMerges = mergeCandidates.length > 0;
|
|
694
|
+
for (const candidate of mergeCandidates) {
|
|
680
695
|
const prNumber = extractPRFromMergeMessage(
|
|
681
|
-
|
|
696
|
+
candidate.subject,
|
|
682
697
|
options?.platform
|
|
683
698
|
);
|
|
684
699
|
if (prNumber) {
|
|
685
700
|
if (adapter) {
|
|
686
701
|
const prInfo = await adapter.getPRForCommit(
|
|
687
|
-
|
|
702
|
+
candidate.mergeCommitSha,
|
|
688
703
|
prSelectOptions
|
|
689
704
|
);
|
|
690
705
|
if (prInfo?.mergedAt) {
|
|
@@ -694,23 +709,32 @@ async function lookupPR(commitSha, adapter, options, _recursionDepth = 0) {
|
|
|
694
709
|
if (!mergeBasedPR) {
|
|
695
710
|
mergeBasedPR = {
|
|
696
711
|
number: prNumber,
|
|
697
|
-
title:
|
|
712
|
+
title: candidate.subject,
|
|
698
713
|
author: "",
|
|
699
714
|
url: "",
|
|
700
|
-
mergeCommit:
|
|
715
|
+
mergeCommit: candidate.mergeCommitSha,
|
|
701
716
|
baseBranch: "",
|
|
702
717
|
resolvedVia: "ancestry"
|
|
703
718
|
};
|
|
704
719
|
}
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
720
|
+
break;
|
|
721
|
+
}
|
|
722
|
+
if (adapter) {
|
|
723
|
+
const mergeCommitPR = await adapter.getPRForCommit(
|
|
724
|
+
candidate.mergeCommitSha,
|
|
725
|
+
prSelectOptions
|
|
726
|
+
);
|
|
727
|
+
if (mergeCommitPR?.mergedAt) {
|
|
728
|
+
mergeBasedPR = { ...mergeCommitPR, resolvedVia: "ancestry" };
|
|
729
|
+
break;
|
|
708
730
|
}
|
|
709
731
|
}
|
|
710
732
|
}
|
|
711
733
|
if (mergeBasedPR) {
|
|
712
|
-
|
|
713
|
-
|
|
734
|
+
if (!options?.deep || mergeBasedPR.mergedAt) {
|
|
735
|
+
await cache.set(commitSha, toCachedPR(mergeBasedPR));
|
|
736
|
+
return mergeBasedPR;
|
|
737
|
+
}
|
|
714
738
|
}
|
|
715
739
|
const commitSubject = await getCommitSubject(commitSha, options);
|
|
716
740
|
if (commitSubject) {
|
|
@@ -732,7 +756,7 @@ async function lookupPR(commitSha, adapter, options, _recursionDepth = 0) {
|
|
|
732
756
|
return subjectPR;
|
|
733
757
|
}
|
|
734
758
|
}
|
|
735
|
-
if (!options?.skipPatchIdScan && _recursionDepth < MAX_RECURSION_DEPTH) {
|
|
759
|
+
if (!options?.skipPatchIdScan && _recursionDepth < MAX_RECURSION_DEPTH && (!hasAncestryMerges || options?.deep)) {
|
|
736
760
|
const patchIdMatch = await findPatchIdMatch(commitSha, {
|
|
737
761
|
...options,
|
|
738
762
|
scanDepth: options?.deep ? DEEP_SCAN_DEPTH : void 0
|
|
@@ -750,6 +774,10 @@ async function lookupPR(commitSha, adapter, options, _recursionDepth = 0) {
|
|
|
750
774
|
}
|
|
751
775
|
}
|
|
752
776
|
}
|
|
777
|
+
if (mergeBasedPR) {
|
|
778
|
+
await cache.set(commitSha, toCachedPR(mergeBasedPR));
|
|
779
|
+
return mergeBasedPR;
|
|
780
|
+
}
|
|
753
781
|
return null;
|
|
754
782
|
}
|
|
755
783
|
function resetPRCache() {
|
|
@@ -2058,6 +2086,7 @@ function parsePorcelainOutput(output) {
|
|
|
2058
2086
|
}
|
|
2059
2087
|
let commitHash = headerMatch[1];
|
|
2060
2088
|
const originalLine = parseInt(headerMatch[2], 10);
|
|
2089
|
+
const finalLine = parseInt(headerMatch[3], 10) || 0;
|
|
2061
2090
|
const isBoundary = commitHash.startsWith("^");
|
|
2062
2091
|
if (isBoundary) {
|
|
2063
2092
|
commitHash = commitHash.slice(1).padStart(40, "0");
|
|
@@ -2101,6 +2130,7 @@ function parsePorcelainOutput(output) {
|
|
|
2101
2130
|
authorEmail: cleanEmail,
|
|
2102
2131
|
date,
|
|
2103
2132
|
lineContent,
|
|
2133
|
+
finalLine,
|
|
2104
2134
|
originalFile,
|
|
2105
2135
|
originalLine: originalFile ? originalLine : void 0
|
|
2106
2136
|
});
|
|
@@ -2111,10 +2141,8 @@ function parsePorcelainOutput(output) {
|
|
|
2111
2141
|
// src/core/blame/blame.ts
|
|
2112
2142
|
async function executeBlame(file, lineRange, options) {
|
|
2113
2143
|
const lineSpec = `${lineRange.start},${lineRange.end}`;
|
|
2114
|
-
const
|
|
2115
|
-
|
|
2116
|
-
options
|
|
2117
|
-
);
|
|
2144
|
+
const args = options?.mode === "change" ? ["blame", "-w", "--porcelain", "-L", lineSpec, file] : ["blame", "-w", "-C", "-C", "-M", "--porcelain", "-L", lineSpec, file];
|
|
2145
|
+
const result = await gitExec(args, options);
|
|
2118
2146
|
return parsePorcelainOutput(result.stdout);
|
|
2119
2147
|
}
|
|
2120
2148
|
async function analyzeBlameResults(results, filePath, options) {
|
|
@@ -2325,9 +2353,10 @@ async function runBlameAndAuth(adapter, options, execOptions) {
|
|
|
2325
2353
|
const lineRange = parseLineRange(
|
|
2326
2354
|
options.endLine ? `${options.line},${options.endLine}` : `${options.line}`
|
|
2327
2355
|
);
|
|
2328
|
-
const blameChain = executeBlame(options.file, lineRange,
|
|
2329
|
-
|
|
2330
|
-
|
|
2356
|
+
const blameChain = executeBlame(options.file, lineRange, {
|
|
2357
|
+
...execOptions,
|
|
2358
|
+
mode: options.mode
|
|
2359
|
+
}).then((results) => analyzeBlameResults(results, options.file, execOptions));
|
|
2331
2360
|
const [authResult, blameResult] = await Promise.allSettled([
|
|
2332
2361
|
adapter ? adapter.checkAuth() : Promise.resolve({ authenticated: false }),
|
|
2333
2362
|
blameChain
|
|
@@ -2348,12 +2377,24 @@ async function runBlameAndAuth(adapter, options, execOptions) {
|
|
|
2348
2377
|
}
|
|
2349
2378
|
return { analyzed: blameResult.value, operatingLevel, warnings };
|
|
2350
2379
|
}
|
|
2351
|
-
|
|
2380
|
+
function resolveTraceMode(mode) {
|
|
2381
|
+
return mode ?? "origin";
|
|
2382
|
+
}
|
|
2383
|
+
function deduplicatedLookupPR(sha, adapter, options, inflight) {
|
|
2384
|
+
const existing = inflight.get(sha);
|
|
2385
|
+
if (existing) return existing;
|
|
2386
|
+
const promise = lookupPR(sha, adapter, options);
|
|
2387
|
+
inflight.set(sha, promise);
|
|
2388
|
+
promise.finally(() => inflight.delete(sha));
|
|
2389
|
+
return promise;
|
|
2390
|
+
}
|
|
2391
|
+
async function processEntry(entry, featureFlags, adapter, options, execOptions, repoId, inflightPR, skipPatchIdScan, preferredBase) {
|
|
2352
2392
|
const nodes = [];
|
|
2393
|
+
const traceMode = resolveTraceMode(options.mode);
|
|
2353
2394
|
const commitNode = {
|
|
2354
2395
|
type: entry.isCosmetic ? "cosmetic_commit" : "original_commit",
|
|
2355
2396
|
sha: entry.blame.commitHash,
|
|
2356
|
-
trackingMethod: "blame-CMw",
|
|
2397
|
+
trackingMethod: traceMode === "change" ? "blame" : "blame-CMw",
|
|
2357
2398
|
confidence: "exact",
|
|
2358
2399
|
note: entry.cosmeticReason ? `Cosmetic change: ${entry.cosmeticReason}` : void 0
|
|
2359
2400
|
};
|
|
@@ -2375,17 +2416,18 @@ async function processEntry(entry, featureFlags, adapter, options, execOptions,
|
|
|
2375
2416
|
}
|
|
2376
2417
|
}
|
|
2377
2418
|
const targetSha = nodes[nodes.length - 1].sha;
|
|
2419
|
+
const prLookupOptions = {
|
|
2420
|
+
...execOptions,
|
|
2421
|
+
noCache: options.noCache,
|
|
2422
|
+
cacheOnly: options.cacheOnly,
|
|
2423
|
+
deep: featureFlags.deepTrace,
|
|
2424
|
+
repoId,
|
|
2425
|
+
skipPatchIdScan,
|
|
2426
|
+
preferredBase,
|
|
2427
|
+
platform: adapter?.platform
|
|
2428
|
+
};
|
|
2378
2429
|
if (targetSha) {
|
|
2379
|
-
const prInfo = await
|
|
2380
|
-
...execOptions,
|
|
2381
|
-
noCache: options.noCache,
|
|
2382
|
-
cacheOnly: options.cacheOnly,
|
|
2383
|
-
deep: featureFlags.deepTrace,
|
|
2384
|
-
repoId,
|
|
2385
|
-
skipPatchIdScan,
|
|
2386
|
-
preferredBase,
|
|
2387
|
-
platform: adapter?.platform
|
|
2388
|
-
});
|
|
2430
|
+
const prInfo = await deduplicatedLookupPR(targetSha, adapter, prLookupOptions, inflightPR);
|
|
2389
2431
|
if (prInfo) {
|
|
2390
2432
|
nodes.push({
|
|
2391
2433
|
type: "pull_request",
|
|
@@ -2402,6 +2444,7 @@ async function processEntry(entry, featureFlags, adapter, options, execOptions,
|
|
|
2402
2444
|
return nodes;
|
|
2403
2445
|
}
|
|
2404
2446
|
async function buildTraceNodes(analyzed, featureFlags, adapter, options, execOptions, repoId, skipPatchIdScan, preferredBase) {
|
|
2447
|
+
const inflightPR = /* @__PURE__ */ new Map();
|
|
2405
2448
|
const results = await Promise.allSettled(
|
|
2406
2449
|
map8(
|
|
2407
2450
|
analyzed,
|
|
@@ -2412,6 +2455,7 @@ async function buildTraceNodes(analyzed, featureFlags, adapter, options, execOpt
|
|
|
2412
2455
|
options,
|
|
2413
2456
|
execOptions,
|
|
2414
2457
|
repoId,
|
|
2458
|
+
inflightPR,
|
|
2415
2459
|
skipPatchIdScan,
|
|
2416
2460
|
preferredBase
|
|
2417
2461
|
)
|
|
@@ -2421,6 +2465,7 @@ async function buildTraceNodes(analyzed, featureFlags, adapter, options, execOpt
|
|
|
2421
2465
|
}
|
|
2422
2466
|
var legacyCacheCleaned = false;
|
|
2423
2467
|
async function trace(options) {
|
|
2468
|
+
const mode = resolveTraceMode(options.mode);
|
|
2424
2469
|
const { file, cwd } = await resolveFileContext(options.file, options.cwd);
|
|
2425
2470
|
const warnings = [];
|
|
2426
2471
|
const execOptions = { cwd, warnings };
|
|
@@ -2442,7 +2487,7 @@ async function trace(options) {
|
|
|
2442
2487
|
}
|
|
2443
2488
|
const blameAuth = await runBlameAndAuth(
|
|
2444
2489
|
platform.adapter,
|
|
2445
|
-
{ ...options, file, cwd },
|
|
2490
|
+
{ ...options, mode, file, cwd },
|
|
2446
2491
|
execOptions
|
|
2447
2492
|
);
|
|
2448
2493
|
const operatingLevel = blameAuth.operatingLevel || platform.operatingLevel;
|
|
@@ -2485,7 +2530,7 @@ async function trace(options) {
|
|
|
2485
2530
|
blameAuth.analyzed,
|
|
2486
2531
|
featureFlags,
|
|
2487
2532
|
platform.adapter,
|
|
2488
|
-
{ ...options, file, cwd },
|
|
2533
|
+
{ ...options, mode, file, cwd },
|
|
2489
2534
|
execOptions,
|
|
2490
2535
|
repoId,
|
|
2491
2536
|
cloneStatus.partialClone || void 0,
|
package/dist/types/blame.d.ts
CHANGED
|
@@ -12,6 +12,8 @@ export interface BlameResult {
|
|
|
12
12
|
date: string;
|
|
13
13
|
/** The actual content of the blamed line */
|
|
14
14
|
lineContent: string;
|
|
15
|
+
/** Final line number in the current file */
|
|
16
|
+
finalLine: number;
|
|
15
17
|
/** Original filename if the line was moved/renamed */
|
|
16
18
|
originalFile?: string;
|
|
17
19
|
/** Original line number before any moves/renames */
|
package/dist/types/git.d.ts
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import type { PlatformType } from './platform.js';
|
|
2
|
+
import type { TraceMode } from './trace.js';
|
|
2
3
|
export interface GitExecResult {
|
|
3
4
|
stdout: string;
|
|
4
5
|
stderr: string;
|
|
@@ -11,6 +12,10 @@ export interface GitExecOptions {
|
|
|
11
12
|
/** Mutable array for collecting diagnostic warnings throughout the pipeline */
|
|
12
13
|
warnings?: string[];
|
|
13
14
|
}
|
|
15
|
+
export interface BlameExecOptions extends GitExecOptions {
|
|
16
|
+
/** Blame semantics used by trace mode selection */
|
|
17
|
+
mode?: TraceMode;
|
|
18
|
+
}
|
|
14
19
|
export interface RemoteInfo {
|
|
15
20
|
owner: string;
|
|
16
21
|
repo: string;
|
package/dist/types/index.d.ts
CHANGED
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
export type { SymbolKind, SymbolInfo, ContentHash, ChangeType, ComparisonResult, AstTraceResult, } from './ast.js';
|
|
2
2
|
export type { BlameResult, CommitInfo } from './blame.js';
|
|
3
3
|
export type { CacheEntry, CachedPRInfo } from './cache.js';
|
|
4
|
-
export type { GitExecResult, GitExecOptions, RemoteInfo, HealthReport, CloneStatus, } from './git.js';
|
|
4
|
+
export type { GitExecResult, GitExecOptions, BlameExecOptions, RemoteInfo, HealthReport, CloneStatus, } from './git.js';
|
|
5
5
|
export type { GraphOptions, GraphResult } from './graph.js';
|
|
6
6
|
export type { NormalizedResponse } from './output.js';
|
|
7
7
|
export type { TraceNodeType, TrackingMethod, Confidence, TraceNode, OperatingLevel, FeatureFlags, } from './pipeline.js';
|
|
8
8
|
export type { PlatformType, AuthStatus, PRInfo, IssueInfo, RateLimitInfo, PlatformAdapter, } from './platform.js';
|
|
9
9
|
export type { CosmeticReason, BlameStageResult, AstDiffStageResult, } from './stage.js';
|
|
10
|
-
export type { TraceResult, TraceOptions } from './trace.js';
|
|
10
|
+
export type { TraceMode, TraceResult, TraceOptions } from './trace.js';
|
|
11
11
|
export type { LineRange } from './util.js';
|
package/dist/types/trace.d.ts
CHANGED
|
@@ -9,6 +9,7 @@ export interface TraceResult {
|
|
|
9
9
|
/** PR information if found, null if commit is not from a PR */
|
|
10
10
|
pr: PRInfo | null;
|
|
11
11
|
}
|
|
12
|
+
export type TraceMode = 'origin' | 'change';
|
|
12
13
|
/**
|
|
13
14
|
* Options for the trace operation (library API).
|
|
14
15
|
*/
|
|
@@ -31,4 +32,6 @@ export interface TraceOptions {
|
|
|
31
32
|
noCache?: boolean;
|
|
32
33
|
/** Return cached results only — skip API calls, ancestry traversal, and patch-id scan */
|
|
33
34
|
cacheOnly?: boolean;
|
|
35
|
+
/** Trace mode. `origin` follows copy/move history, `change` finds the last meaningful local change. */
|
|
36
|
+
mode?: TraceMode;
|
|
34
37
|
}
|
package/dist/version.d.ts
CHANGED