newpr 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +189 -0
- package/package.json +78 -0
- package/src/analyzer/errors.ts +22 -0
- package/src/analyzer/pipeline.ts +299 -0
- package/src/analyzer/progress.ts +69 -0
- package/src/cli/args.ts +192 -0
- package/src/cli/auth.ts +82 -0
- package/src/cli/history-cmd.ts +64 -0
- package/src/cli/index.ts +115 -0
- package/src/cli/pretty.ts +79 -0
- package/src/config/index.ts +103 -0
- package/src/config/store.ts +50 -0
- package/src/diff/chunker.ts +30 -0
- package/src/diff/parser.ts +116 -0
- package/src/diff/stats.ts +37 -0
- package/src/github/auth.ts +16 -0
- package/src/github/fetch-diff.ts +24 -0
- package/src/github/fetch-pr.ts +90 -0
- package/src/github/parse-pr.ts +39 -0
- package/src/history/store.ts +96 -0
- package/src/history/types.ts +15 -0
- package/src/llm/claude-code-client.ts +134 -0
- package/src/llm/client.ts +240 -0
- package/src/llm/prompts.ts +176 -0
- package/src/llm/response-parser.ts +71 -0
- package/src/tui/App.tsx +97 -0
- package/src/tui/Footer.tsx +34 -0
- package/src/tui/Header.tsx +27 -0
- package/src/tui/HelpOverlay.tsx +46 -0
- package/src/tui/InputBar.tsx +65 -0
- package/src/tui/Loading.tsx +192 -0
- package/src/tui/Shell.tsx +384 -0
- package/src/tui/TabBar.tsx +31 -0
- package/src/tui/commands.ts +75 -0
- package/src/tui/narrative-parser.ts +143 -0
- package/src/tui/panels/FilesPanel.tsx +134 -0
- package/src/tui/panels/GroupsPanel.tsx +140 -0
- package/src/tui/panels/NarrativePanel.tsx +102 -0
- package/src/tui/panels/StoryPanel.tsx +296 -0
- package/src/tui/panels/SummaryPanel.tsx +59 -0
- package/src/tui/panels/WalkthroughPanel.tsx +149 -0
- package/src/tui/render.tsx +62 -0
- package/src/tui/theme.ts +44 -0
- package/src/types/config.ts +19 -0
- package/src/types/diff.ts +36 -0
- package/src/types/github.ts +28 -0
- package/src/types/output.ts +59 -0
- package/src/web/client/App.tsx +121 -0
- package/src/web/client/components/AppShell.tsx +203 -0
- package/src/web/client/components/DetailPane.tsx +141 -0
- package/src/web/client/components/ErrorScreen.tsx +119 -0
- package/src/web/client/components/InputScreen.tsx +41 -0
- package/src/web/client/components/LoadingTimeline.tsx +179 -0
- package/src/web/client/components/Markdown.tsx +109 -0
- package/src/web/client/components/ResizeHandle.tsx +45 -0
- package/src/web/client/components/ResultsScreen.tsx +185 -0
- package/src/web/client/components/SettingsPanel.tsx +299 -0
- package/src/web/client/hooks/useAnalysis.ts +153 -0
- package/src/web/client/hooks/useGithubUser.ts +24 -0
- package/src/web/client/hooks/useSessions.ts +17 -0
- package/src/web/client/hooks/useTheme.ts +34 -0
- package/src/web/client/main.tsx +12 -0
- package/src/web/client/panels/FilesPanel.tsx +85 -0
- package/src/web/client/panels/GroupsPanel.tsx +62 -0
- package/src/web/client/panels/NarrativePanel.tsx +9 -0
- package/src/web/client/panels/StoryPanel.tsx +54 -0
- package/src/web/client/panels/SummaryPanel.tsx +20 -0
- package/src/web/components/ui/button.tsx +46 -0
- package/src/web/components/ui/card.tsx +37 -0
- package/src/web/components/ui/scroll-area.tsx +39 -0
- package/src/web/components/ui/tabs.tsx +52 -0
- package/src/web/index.html +14 -0
- package/src/web/lib/utils.ts +6 -0
- package/src/web/server/routes.ts +202 -0
- package/src/web/server/session-manager.ts +147 -0
- package/src/web/server.ts +96 -0
- package/src/web/styles/globals.css +91 -0
- package/src/workspace/agent.ts +317 -0
- package/src/workspace/explore.ts +82 -0
- package/src/workspace/repo-cache.ts +69 -0
- package/src/workspace/types.ts +30 -0
- package/src/workspace/worktree.ts +129 -0
package/README.md
ADDED
|
@@ -0,0 +1,189 @@
|
|
|
1
|
+
# newpr
|
|
2
|
+
|
|
3
|
+
AI-powered PR review tool for understanding large pull requests with 1000+ lines of changes.
|
|
4
|
+
|
|
5
|
+
newpr fetches a GitHub PR, optionally clones the repo for deep codebase exploration using an agentic coding tool, then uses an LLM to produce a structured analysis: file summaries, logical groupings, an overall summary, and a narrative walkthrough with clickable cross-references.
|
|
6
|
+
|
|
7
|
+
## Features
|
|
8
|
+
|
|
9
|
+
- **Narrative walkthrough** — reads like an article, with `[[group:...]]` and `[[file:...]]` cross-references
|
|
10
|
+
- **Logical grouping** — clusters changed files by purpose (feature, refactor, bugfix, etc.)
|
|
11
|
+
- **Codebase exploration** — uses Claude Code / OpenCode / Codex to analyze the actual repository, not just the diff
|
|
12
|
+
- **Interactive TUI** — Ink-based terminal UI with tabbed panels, slash commands, ASCII logo
|
|
13
|
+
- **Web UI** — browser-based interface with sidebar, resizable panels, markdown rendering, dark/light mode
|
|
14
|
+
- **Streaming progress** — real-time SSE streaming of analysis steps
|
|
15
|
+
- **Session history** — saves past analyses for instant recall
|
|
16
|
+
- **Multi-language** — output in any language (auto-detected or configured)
|
|
17
|
+
|
|
18
|
+
## Quick Start
|
|
19
|
+
|
|
20
|
+
```bash
|
|
21
|
+
bun install
|
|
22
|
+
```
|
|
23
|
+
|
|
24
|
+
### Option A: OpenRouter API key
|
|
25
|
+
|
|
26
|
+
```bash
|
|
27
|
+
export OPENROUTER_API_KEY=sk-or-...
|
|
28
|
+
newpr https://github.com/owner/repo/pull/123
|
|
29
|
+
```
|
|
30
|
+
|
|
31
|
+
### Option B: Claude Code (no API key needed)
|
|
32
|
+
|
|
33
|
+
If you have [Claude Code](https://docs.anthropic.com/en/docs/claude-code) installed, newpr uses it as a fallback when no OpenRouter API key is set — for both LLM analysis and codebase exploration.
|
|
34
|
+
|
|
35
|
+
```bash
|
|
36
|
+
newpr https://github.com/owner/repo/pull/123
|
|
37
|
+
```
|
|
38
|
+
|
|
39
|
+
### Web UI
|
|
40
|
+
|
|
41
|
+
```bash
|
|
42
|
+
newpr --web --port 3000
|
|
43
|
+
```
|
|
44
|
+
|
|
45
|
+
Opens a browser-based UI at `http://localhost:3000` with:
|
|
46
|
+
- Left sidebar with session history
|
|
47
|
+
- Resizable detail panel on the right
|
|
48
|
+
- Clickable group/file anchors in the narrative
|
|
49
|
+
- Settings modal for model, agent, language configuration
|
|
50
|
+
- GitHub profile integration
|
|
51
|
+
|
|
52
|
+
## Usage
|
|
53
|
+
|
|
54
|
+
```
|
|
55
|
+
newpr # launch interactive shell
|
|
56
|
+
newpr <pr-url> # shell with PR pre-loaded
|
|
57
|
+
newpr --web [--port 3000] # launch web UI
|
|
58
|
+
newpr review <pr-url> --json # non-interactive JSON output
|
|
59
|
+
newpr history # list past review sessions
|
|
60
|
+
newpr auth [--key <api-key>] # configure API key
|
|
61
|
+
```
|
|
62
|
+
|
|
63
|
+
### Review mode options
|
|
64
|
+
|
|
65
|
+
```
|
|
66
|
+
--repo <owner/repo> Repository (when using PR number only)
|
|
67
|
+
--model <model> Override LLM model (default: anthropic/claude-sonnet-4.5)
|
|
68
|
+
--agent <tool> Preferred agent: claude | opencode | codex (default: auto)
|
|
69
|
+
--no-clone Skip git clone, diff-only analysis (faster, less context)
|
|
70
|
+
--json Output raw JSON
|
|
71
|
+
--stream-json Stream progress as NDJSON, then emit result
|
|
72
|
+
--verbose Show progress on stderr
|
|
73
|
+
```
|
|
74
|
+
|
|
75
|
+
### PR input formats
|
|
76
|
+
|
|
77
|
+
```bash
|
|
78
|
+
newpr https://github.com/owner/repo/pull/123
|
|
79
|
+
newpr owner/repo#123
|
|
80
|
+
newpr review 123 --repo owner/repo
|
|
81
|
+
```
|
|
82
|
+
|
|
83
|
+
## Architecture
|
|
84
|
+
|
|
85
|
+
```
|
|
86
|
+
src/
|
|
87
|
+
├── cli/ # CLI entry, arg parsing, auth, history commands
|
|
88
|
+
├── config/ # Config loading (~/.newpr/config.json)
|
|
89
|
+
├── github/ # GitHub API (fetch PR data, diff, parse URL)
|
|
90
|
+
├── diff/ # Unified diff parser + chunker
|
|
91
|
+
├── llm/ # LLM clients (OpenRouter + Claude Code fallback), prompts, response parser
|
|
92
|
+
├── analyzer/ # Pipeline orchestrator + progress events
|
|
93
|
+
├── workspace/ # Agent system (claude/opencode/codex), git operations, codebase exploration
|
|
94
|
+
├── types/ # Shared TypeScript types
|
|
95
|
+
├── history/ # Session persistence (~/.newpr/history/)
|
|
96
|
+
├── tui/ # Ink TUI (shell, panels, theme, slash commands)
|
|
97
|
+
└── web/ # Web UI
|
|
98
|
+
├── server.ts # Bun.serve() with Tailwind CSS build
|
|
99
|
+
├── server/ # REST API + SSE endpoints, session manager
|
|
100
|
+
├── client/ # React frontend
|
|
101
|
+
│ ├── components/ # AppShell, ResultsScreen, Markdown, DetailPane, etc.
|
|
102
|
+
│ ├── panels/ # Story, Summary, Groups, Files, Narrative
|
|
103
|
+
│ └── hooks/ # useAnalysis, useSessions, useTheme, useGithubUser
|
|
104
|
+
└── styles/ # Tailwind v4 + Pretendard font
|
|
105
|
+
```
|
|
106
|
+
|
|
107
|
+
## Analysis Pipeline
|
|
108
|
+
|
|
109
|
+
1. **Fetch** — PR metadata, commits, and diff from GitHub API
|
|
110
|
+
2. **Parse** — unified diff into per-file chunks
|
|
111
|
+
3. **Clone** — bare repo clone with worktree checkout (cached)
|
|
112
|
+
4. **Explore** — 3-phase codebase exploration via agentic tool (structure → related code → issues)
|
|
113
|
+
5. **Analyze** — LLM summarizes each file chunk in parallel batches
|
|
114
|
+
6. **Group** — LLM clusters files into logical groups with types
|
|
115
|
+
7. **Summarize** — LLM generates purpose, scope, impact, risk level
|
|
116
|
+
8. **Narrate** — LLM writes a walkthrough article with cross-references
|
|
117
|
+
|
|
118
|
+
## LLM Backend
|
|
119
|
+
|
|
120
|
+
newpr supports two LLM backends:
|
|
121
|
+
|
|
122
|
+
| Backend | Setup | Use case |
|
|
123
|
+
|---------|-------|----------|
|
|
124
|
+
| **OpenRouter** | Set `OPENROUTER_API_KEY` | Full model selection (Claude, GPT-4, Gemini, etc.) |
|
|
125
|
+
| **Claude Code** | Install `claude` CLI | Zero-config fallback, uses your existing Claude subscription |
|
|
126
|
+
|
|
127
|
+
When no OpenRouter API key is configured, newpr automatically falls back to Claude Code for all LLM calls.
|
|
128
|
+
|
|
129
|
+
## Codebase Exploration Agents
|
|
130
|
+
|
|
131
|
+
For deep analysis beyond the diff, newpr uses an agentic coding tool to explore the actual repository:
|
|
132
|
+
|
|
133
|
+
| Agent | Command | Detection |
|
|
134
|
+
|-------|---------|-----------|
|
|
135
|
+
| Claude Code | `claude` | `which claude` |
|
|
136
|
+
| OpenCode | `opencode` | `which opencode` |
|
|
137
|
+
| Codex | `codex` | `which codex` |
|
|
138
|
+
|
|
139
|
+
The agent runs 3 exploration phases:
|
|
140
|
+
1. **Structure** — project type, key directories, architecture pattern
|
|
141
|
+
2. **Related code** — imports, usages, test coverage for changed files
|
|
142
|
+
3. **Issues** — breaking changes, missing error handling, inconsistencies
|
|
143
|
+
|
|
144
|
+
## Environment Variables
|
|
145
|
+
|
|
146
|
+
| Variable | Required | Description |
|
|
147
|
+
|----------|----------|-------------|
|
|
148
|
+
| `OPENROUTER_API_KEY` | No* | OpenRouter API key (*falls back to Claude Code) |
|
|
149
|
+
| `GITHUB_TOKEN` | No | GitHub token (falls back to `gh` CLI) |
|
|
150
|
+
| `NEWPR_MODEL` | No | Default model (default: `anthropic/claude-sonnet-4.5`) |
|
|
151
|
+
| `NEWPR_MAX_FILES` | No | Max files to analyze (default: 100) |
|
|
152
|
+
| `NEWPR_TIMEOUT` | No | Timeout per LLM call in seconds (default: 120) |
|
|
153
|
+
| `NEWPR_CONCURRENCY` | No | Parallel LLM calls (default: 5) |
|
|
154
|
+
|
|
155
|
+
## Config File
|
|
156
|
+
|
|
157
|
+
Persistent settings are stored in `~/.newpr/config.json`:
|
|
158
|
+
|
|
159
|
+
```json
|
|
160
|
+
{
|
|
161
|
+
"openrouter_api_key": "sk-or-...",
|
|
162
|
+
"model": "anthropic/claude-sonnet-4.5",
|
|
163
|
+
"language": "auto",
|
|
164
|
+
"agent": "claude",
|
|
165
|
+
"max_files": 100,
|
|
166
|
+
"timeout": 120,
|
|
167
|
+
"concurrency": 5
|
|
168
|
+
}
|
|
169
|
+
```
|
|
170
|
+
|
|
171
|
+
## Development
|
|
172
|
+
|
|
173
|
+
```bash
|
|
174
|
+
bun install
|
|
175
|
+
bun test # run tests (91 tests)
|
|
176
|
+
bun run typecheck # tsc --noEmit
|
|
177
|
+
bun run lint # biome check
|
|
178
|
+
bun run start # launch CLI
|
|
179
|
+
```
|
|
180
|
+
|
|
181
|
+
## Requirements
|
|
182
|
+
|
|
183
|
+
- [Bun](https://bun.sh) ≥ 1.3
|
|
184
|
+
- GitHub CLI (`gh`) for authentication, or `GITHUB_TOKEN`
|
|
185
|
+
- One of: `OPENROUTER_API_KEY` or Claude Code (`claude` CLI)
|
|
186
|
+
|
|
187
|
+
## License
|
|
188
|
+
|
|
189
|
+
Private
|
package/package.json
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "newpr",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "AI-powered large PR review tool - understand PRs with 1000+ lines of changes",
|
|
5
|
+
"module": "src/cli/index.ts",
|
|
6
|
+
"type": "module",
|
|
7
|
+
"bin": {
|
|
8
|
+
"newpr": "./src/cli/index.ts"
|
|
9
|
+
},
|
|
10
|
+
"files": [
|
|
11
|
+
"src/",
|
|
12
|
+
"README.md"
|
|
13
|
+
],
|
|
14
|
+
"scripts": {
|
|
15
|
+
"start": "bun run src/cli/index.ts",
|
|
16
|
+
"test": "bun test",
|
|
17
|
+
"lint": "bunx @biomejs/biome check .",
|
|
18
|
+
"lint:fix": "bunx @biomejs/biome check --write .",
|
|
19
|
+
"format": "bunx @biomejs/biome format --write .",
|
|
20
|
+
"typecheck": "tsc --noEmit",
|
|
21
|
+
"prepublishOnly": "bun run typecheck && bun test"
|
|
22
|
+
},
|
|
23
|
+
"keywords": [
|
|
24
|
+
"pr",
|
|
25
|
+
"pull-request",
|
|
26
|
+
"code-review",
|
|
27
|
+
"ai",
|
|
28
|
+
"llm",
|
|
29
|
+
"github",
|
|
30
|
+
"cli",
|
|
31
|
+
"tui",
|
|
32
|
+
"claude",
|
|
33
|
+
"openrouter",
|
|
34
|
+
"bun"
|
|
35
|
+
],
|
|
36
|
+
"author": "Jiwon Park <jiwon@sionic.ai>",
|
|
37
|
+
"license": "MIT",
|
|
38
|
+
"repository": {
|
|
39
|
+
"type": "git",
|
|
40
|
+
"url": "git+https://github.com/jiwonMe/newpr.git"
|
|
41
|
+
},
|
|
42
|
+
"homepage": "https://github.com/jiwonMe/newpr",
|
|
43
|
+
"bugs": {
|
|
44
|
+
"url": "https://github.com/jiwonMe/newpr/issues"
|
|
45
|
+
},
|
|
46
|
+
"engines": {
|
|
47
|
+
"bun": ">=1.3.0"
|
|
48
|
+
},
|
|
49
|
+
"devDependencies": {
|
|
50
|
+
"@biomejs/biome": "latest",
|
|
51
|
+
"@types/bun": "latest",
|
|
52
|
+
"@types/react": "19.1.0",
|
|
53
|
+
"@types/react-dom": "^19.2.3"
|
|
54
|
+
},
|
|
55
|
+
"peerDependencies": {
|
|
56
|
+
"typescript": "^5"
|
|
57
|
+
},
|
|
58
|
+
"dependencies": {
|
|
59
|
+
"@radix-ui/react-dropdown-menu": "^2.1.16",
|
|
60
|
+
"@radix-ui/react-scroll-area": "^1.2.10",
|
|
61
|
+
"@radix-ui/react-slot": "^1.2.4",
|
|
62
|
+
"@radix-ui/react-tabs": "^1.1.13",
|
|
63
|
+
"@tailwindcss/cli": "^4.1.18",
|
|
64
|
+
"class-variance-authority": "^0.7.1",
|
|
65
|
+
"clsx": "^2.1.1",
|
|
66
|
+
"ink": "6.6.0",
|
|
67
|
+
"ink-spinner": "5.0.0",
|
|
68
|
+
"ink-text-input": "6.0.0",
|
|
69
|
+
"lucide-react": "^0.567.0",
|
|
70
|
+
"react": "19.1.0",
|
|
71
|
+
"react-dom": "19.1.0",
|
|
72
|
+
"react-markdown": "^10.1.0",
|
|
73
|
+
"remark-gfm": "^4.0.1",
|
|
74
|
+
"tailwind-merge": "^3.4.1",
|
|
75
|
+
"tailwindcss": "^4.1.18",
|
|
76
|
+
"tailwindcss-animate": "^1.0.7"
|
|
77
|
+
}
|
|
78
|
+
}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
export class AnalysisError extends Error {
|
|
2
|
+
constructor(
|
|
3
|
+
message: string,
|
|
4
|
+
public readonly stage: string,
|
|
5
|
+
public override readonly cause?: Error,
|
|
6
|
+
) {
|
|
7
|
+
super(message);
|
|
8
|
+
this.name = "AnalysisError";
|
|
9
|
+
}
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export class PartialAnalysisError extends AnalysisError {
|
|
13
|
+
constructor(
|
|
14
|
+
message: string,
|
|
15
|
+
stage: string,
|
|
16
|
+
public readonly failedFiles: string[],
|
|
17
|
+
cause?: Error,
|
|
18
|
+
) {
|
|
19
|
+
super(message, stage, cause);
|
|
20
|
+
this.name = "PartialAnalysisError";
|
|
21
|
+
}
|
|
22
|
+
}
|
|
@@ -0,0 +1,299 @@
|
|
|
1
|
+
import type { NewprConfig } from "../types/config.ts";
|
|
2
|
+
import type { DiffChunk } from "../types/diff.ts";
|
|
3
|
+
import type { PrIdentifier } from "../types/github.ts";
|
|
4
|
+
import type { FileChange, FileGroup, NewprOutput, PrSummary } from "../types/output.ts";
|
|
5
|
+
import type { ExplorationResult } from "../workspace/types.ts";
|
|
6
|
+
import type { AgentToolName } from "../workspace/types.ts";
|
|
7
|
+
import { parseDiff } from "../diff/parser.ts";
|
|
8
|
+
import { chunkDiff } from "../diff/chunker.ts";
|
|
9
|
+
import { fetchPrData } from "../github/fetch-pr.ts";
|
|
10
|
+
import { fetchPrDiff } from "../github/fetch-diff.ts";
|
|
11
|
+
import { createLlmClient, type LlmClient, type LlmResponse } from "../llm/client.ts";
|
|
12
|
+
import {
|
|
13
|
+
buildFileSummaryPrompt,
|
|
14
|
+
buildGroupingPrompt,
|
|
15
|
+
buildOverallSummaryPrompt,
|
|
16
|
+
buildNarrativePrompt,
|
|
17
|
+
buildEnrichedSummaryPrompt,
|
|
18
|
+
buildEnrichedNarrativePrompt,
|
|
19
|
+
type FileSummaryInput,
|
|
20
|
+
type PromptContext,
|
|
21
|
+
} from "../llm/prompts.ts";
|
|
22
|
+
import {
|
|
23
|
+
parseFileSummaries,
|
|
24
|
+
parseGroups,
|
|
25
|
+
parseSummary,
|
|
26
|
+
parseNarrative,
|
|
27
|
+
} from "../llm/response-parser.ts";
|
|
28
|
+
import { ensureRepo } from "../workspace/repo-cache.ts";
|
|
29
|
+
import { createWorktrees, cleanupWorktrees } from "../workspace/worktree.ts";
|
|
30
|
+
import { requireAgent } from "../workspace/agent.ts";
|
|
31
|
+
import { exploreCodebase } from "../workspace/explore.ts";
|
|
32
|
+
import type { ProgressCallback, ProgressStage } from "./progress.ts";
|
|
33
|
+
import { createSilentProgress } from "./progress.ts";
|
|
34
|
+
|
|
35
|
+
async function streamLlmCall(
|
|
36
|
+
client: LlmClient,
|
|
37
|
+
system: string,
|
|
38
|
+
user: string,
|
|
39
|
+
stage: ProgressStage,
|
|
40
|
+
message: string,
|
|
41
|
+
progress: ProgressCallback,
|
|
42
|
+
): Promise<LlmResponse> {
|
|
43
|
+
return client.completeStream(system, user, (_chunk, accumulated) => {
|
|
44
|
+
progress({ stage, message, partial_content: accumulated });
|
|
45
|
+
});
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
interface PipelineOptions {
|
|
49
|
+
pr: PrIdentifier;
|
|
50
|
+
token: string;
|
|
51
|
+
config: NewprConfig;
|
|
52
|
+
onProgress?: ProgressCallback;
|
|
53
|
+
noClone?: boolean;
|
|
54
|
+
preferredAgent?: AgentToolName;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
async function analyzeFileChunkBatch(
|
|
58
|
+
client: LlmClient,
|
|
59
|
+
chunks: DiffChunk[],
|
|
60
|
+
ctx?: import("../llm/prompts.ts").PromptContext,
|
|
61
|
+
): Promise<Array<{ path: string; summary: string }>> {
|
|
62
|
+
const { system, user } = buildFileSummaryPrompt(chunks, ctx);
|
|
63
|
+
const response = await client.complete(system, user);
|
|
64
|
+
return parseFileSummaries(response.content);
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
function batchChunks(chunks: DiffChunk[], batchSize: number): DiffChunk[][] {
|
|
68
|
+
const batches: DiffChunk[][] = [];
|
|
69
|
+
for (let i = 0; i < chunks.length; i += batchSize) {
|
|
70
|
+
batches.push(chunks.slice(i, i + batchSize));
|
|
71
|
+
}
|
|
72
|
+
return batches;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
async function runExploration(
|
|
76
|
+
pr: PrIdentifier,
|
|
77
|
+
token: string,
|
|
78
|
+
baseBranch: string,
|
|
79
|
+
changedFiles: string[],
|
|
80
|
+
prTitle: string,
|
|
81
|
+
rawDiff: string,
|
|
82
|
+
preferredAgent?: AgentToolName,
|
|
83
|
+
onProgress?: ProgressCallback,
|
|
84
|
+
): Promise<ExplorationResult> {
|
|
85
|
+
const agent = await requireAgent(preferredAgent);
|
|
86
|
+
|
|
87
|
+
onProgress?.({ stage: "cloning", message: `${pr.owner}/${pr.repo}` });
|
|
88
|
+
const bareRepoPath = await ensureRepo(pr.owner, pr.repo, token, (msg) => {
|
|
89
|
+
onProgress?.({ stage: "cloning", message: msg });
|
|
90
|
+
});
|
|
91
|
+
onProgress?.({ stage: "cloning", message: `${pr.owner}/${pr.repo} ready` });
|
|
92
|
+
|
|
93
|
+
onProgress?.({ stage: "checkout", message: `${baseBranch} ← PR #${pr.number}` });
|
|
94
|
+
const worktrees = await createWorktrees(
|
|
95
|
+
bareRepoPath, baseBranch, pr.number, pr.owner, pr.repo,
|
|
96
|
+
(msg) => onProgress?.({ stage: "checkout", message: msg }),
|
|
97
|
+
);
|
|
98
|
+
onProgress?.({ stage: "checkout", message: `${baseBranch} ← PR #${pr.number} worktrees ready` });
|
|
99
|
+
|
|
100
|
+
onProgress?.({ stage: "exploring", message: `${agent.name}: analyzing ${changedFiles.length} files...` });
|
|
101
|
+
const exploration = await exploreCodebase(
|
|
102
|
+
agent, worktrees.headPath, changedFiles, prTitle, rawDiff,
|
|
103
|
+
(msg, current, total) => onProgress?.({ stage: "exploring", message: msg, current, total }),
|
|
104
|
+
);
|
|
105
|
+
onProgress?.({ stage: "exploring", message: `${agent.name}: exploration complete` });
|
|
106
|
+
|
|
107
|
+
await cleanupWorktrees(bareRepoPath, pr.number, pr.owner, pr.repo).catch(() => {});
|
|
108
|
+
|
|
109
|
+
return exploration;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
const MAX_EXPLORE_RETRIES = 2;
|
|
113
|
+
|
|
114
|
+
async function tryExploreCodebase(
|
|
115
|
+
pr: PrIdentifier,
|
|
116
|
+
token: string,
|
|
117
|
+
baseBranch: string,
|
|
118
|
+
changedFiles: string[],
|
|
119
|
+
prTitle: string,
|
|
120
|
+
rawDiff: string,
|
|
121
|
+
preferredAgent?: AgentToolName,
|
|
122
|
+
onProgress?: ProgressCallback,
|
|
123
|
+
): Promise<ExplorationResult | null> {
|
|
124
|
+
for (let attempt = 1; attempt <= MAX_EXPLORE_RETRIES; attempt++) {
|
|
125
|
+
try {
|
|
126
|
+
return await runExploration(
|
|
127
|
+
pr, token, baseBranch, changedFiles, prTitle, rawDiff,
|
|
128
|
+
preferredAgent, onProgress,
|
|
129
|
+
);
|
|
130
|
+
} catch (err) {
|
|
131
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
132
|
+
if (attempt < MAX_EXPLORE_RETRIES) {
|
|
133
|
+
onProgress?.({
|
|
134
|
+
stage: "exploring",
|
|
135
|
+
message: `Attempt ${attempt} failed: ${msg}. Retrying...`,
|
|
136
|
+
});
|
|
137
|
+
await cleanupWorktrees(
|
|
138
|
+
await ensureRepo(pr.owner, pr.repo, token).catch(() => ""),
|
|
139
|
+
pr.number, pr.owner, pr.repo,
|
|
140
|
+
).catch(() => {});
|
|
141
|
+
} else {
|
|
142
|
+
onProgress?.({
|
|
143
|
+
stage: "exploring",
|
|
144
|
+
message: `Exploration failed after ${MAX_EXPLORE_RETRIES} attempts: ${msg}`,
|
|
145
|
+
});
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
return null;
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
export async function analyzePr(options: PipelineOptions): Promise<NewprOutput> {
|
|
153
|
+
const { pr, token, config, noClone, preferredAgent } = options;
|
|
154
|
+
const progress = options.onProgress ?? createSilentProgress();
|
|
155
|
+
const client = createLlmClient({
|
|
156
|
+
api_key: config.openrouter_api_key,
|
|
157
|
+
model: config.model,
|
|
158
|
+
timeout: config.timeout,
|
|
159
|
+
});
|
|
160
|
+
|
|
161
|
+
progress({ stage: "fetching", message: "Fetching PR data and diff..." });
|
|
162
|
+
const [prData, rawDiff] = await Promise.all([
|
|
163
|
+
fetchPrData(pr, token),
|
|
164
|
+
fetchPrDiff(pr, token),
|
|
165
|
+
]);
|
|
166
|
+
progress({ stage: "fetching", message: `#${prData.number} "${prData.title}" by ${prData.author} · +${prData.additions} −${prData.deletions}` });
|
|
167
|
+
|
|
168
|
+
progress({ stage: "parsing", message: "Parsing diff..." });
|
|
169
|
+
const parsed = parseDiff(rawDiff);
|
|
170
|
+
const allChunks = chunkDiff(parsed);
|
|
171
|
+
const chunks = allChunks.slice(0, config.max_files);
|
|
172
|
+
const wasTruncated = allChunks.length > config.max_files;
|
|
173
|
+
const changedFiles = chunks.map((c) => c.file_path);
|
|
174
|
+
const totalAdd = chunks.reduce((s, c) => s + c.additions, 0);
|
|
175
|
+
const totalDel = chunks.reduce((s, c) => s + c.deletions, 0);
|
|
176
|
+
progress({ stage: "parsing", message: `${chunks.length} files · +${totalAdd} −${totalDel}${wasTruncated ? ` (${allChunks.length - config.max_files} skipped)` : ""}` });
|
|
177
|
+
|
|
178
|
+
let exploration: ExplorationResult | null = null;
|
|
179
|
+
if (!noClone) {
|
|
180
|
+
exploration = await tryExploreCodebase(
|
|
181
|
+
pr, token, prData.base_branch, changedFiles, prData.title, rawDiff,
|
|
182
|
+
preferredAgent, progress,
|
|
183
|
+
);
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
const promptCtx: PromptContext = { commits: prData.commits, language: config.language };
|
|
187
|
+
const enrichedTag = exploration ? " + codebase context" : "";
|
|
188
|
+
|
|
189
|
+
progress({
|
|
190
|
+
stage: "analyzing",
|
|
191
|
+
message: `Analyzing ${chunks.length} files${enrichedTag}...`,
|
|
192
|
+
});
|
|
193
|
+
|
|
194
|
+
const fileBatchSize = 10;
|
|
195
|
+
const batches = batchChunks(chunks, fileBatchSize);
|
|
196
|
+
const allFileSummaries: Array<{ path: string; summary: string }> = [];
|
|
197
|
+
|
|
198
|
+
for (let i = 0; i < batches.length; i++) {
|
|
199
|
+
const batchFiles = batches.slice(i, i + config.concurrency)
|
|
200
|
+
.flat()
|
|
201
|
+
.map((c) => c.file_path.split("/").pop() ?? c.file_path);
|
|
202
|
+
progress({
|
|
203
|
+
stage: "analyzing",
|
|
204
|
+
message: batchFiles.join(", "),
|
|
205
|
+
current: Math.min((i + 1) * fileBatchSize, chunks.length),
|
|
206
|
+
total: chunks.length,
|
|
207
|
+
});
|
|
208
|
+
|
|
209
|
+
const concurrentBatches = batches.slice(i, i + config.concurrency);
|
|
210
|
+
const results = await Promise.all(
|
|
211
|
+
concurrentBatches.map((batch) => analyzeFileChunkBatch(client, batch, promptCtx)),
|
|
212
|
+
);
|
|
213
|
+
allFileSummaries.push(...results.flat());
|
|
214
|
+
i += config.concurrency - 1;
|
|
215
|
+
}
|
|
216
|
+
progress({ stage: "analyzing", message: `${allFileSummaries.length} files summarized${enrichedTag}` });
|
|
217
|
+
|
|
218
|
+
progress({ stage: "grouping", message: `Grouping ${chunks.length} files by purpose...` });
|
|
219
|
+
const fileSummaryInputs: FileSummaryInput[] = chunks.map((chunk) => {
|
|
220
|
+
const summary = allFileSummaries.find((s) => s.path === chunk.file_path);
|
|
221
|
+
return {
|
|
222
|
+
path: chunk.file_path,
|
|
223
|
+
summary: summary?.summary ?? "No summary available",
|
|
224
|
+
status: chunk.status,
|
|
225
|
+
};
|
|
226
|
+
});
|
|
227
|
+
|
|
228
|
+
const { system: groupSystem, user: groupUser } = buildGroupingPrompt(fileSummaryInputs, promptCtx);
|
|
229
|
+
const groupResponse = await streamLlmCall(
|
|
230
|
+
client, groupSystem, groupUser, "grouping", "Grouping files...", progress,
|
|
231
|
+
);
|
|
232
|
+
const groups: FileGroup[] = parseGroups(groupResponse.content);
|
|
233
|
+
progress({ stage: "grouping", message: `${groups.length} groups: ${groups.map((g) => g.name).join(", ")}` });
|
|
234
|
+
|
|
235
|
+
progress({ stage: "summarizing", message: `Generating summary${enrichedTag}...` });
|
|
236
|
+
const summaryPrompt = exploration
|
|
237
|
+
? buildEnrichedSummaryPrompt(prData.title, groups, allFileSummaries, exploration, promptCtx)
|
|
238
|
+
: buildOverallSummaryPrompt(prData.title, groups, allFileSummaries, promptCtx);
|
|
239
|
+
const summaryResponse = await streamLlmCall(
|
|
240
|
+
client, summaryPrompt.system, summaryPrompt.user, "summarizing", "Generating summary...", progress,
|
|
241
|
+
);
|
|
242
|
+
const summary: PrSummary = parseSummary(summaryResponse.content);
|
|
243
|
+
progress({ stage: "summarizing", message: `${summary.risk_level} risk · ${summary.purpose.slice(0, 60)}` });
|
|
244
|
+
|
|
245
|
+
progress({ stage: "narrating", message: `Writing narrative${enrichedTag}...` });
|
|
246
|
+
const narrativePrompt = exploration
|
|
247
|
+
? buildEnrichedNarrativePrompt(prData.title, summary, groups, exploration, promptCtx)
|
|
248
|
+
: buildNarrativePrompt(prData.title, summary, groups, promptCtx);
|
|
249
|
+
const narrativeResponse = await streamLlmCall(
|
|
250
|
+
client, narrativePrompt.system, narrativePrompt.user, "narrating", "Writing narrative...", progress,
|
|
251
|
+
);
|
|
252
|
+
const narrative = parseNarrative(narrativeResponse.content);
|
|
253
|
+
progress({ stage: "narrating", message: `${narrative.split("\n").length} lines` });
|
|
254
|
+
|
|
255
|
+
progress({ stage: "done", message: "Analysis complete." });
|
|
256
|
+
|
|
257
|
+
const fileGroupsMap = new Map<string, string[]>();
|
|
258
|
+
for (const group of groups) {
|
|
259
|
+
for (const filePath of group.files) {
|
|
260
|
+
const existing = fileGroupsMap.get(filePath) ?? [];
|
|
261
|
+
existing.push(group.name);
|
|
262
|
+
fileGroupsMap.set(filePath, existing);
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
const files: FileChange[] = chunks.map((chunk) => {
|
|
267
|
+
const summaryEntry = allFileSummaries.find((s) => s.path === chunk.file_path);
|
|
268
|
+
return {
|
|
269
|
+
path: chunk.file_path,
|
|
270
|
+
status: chunk.status,
|
|
271
|
+
additions: chunk.additions,
|
|
272
|
+
deletions: chunk.deletions,
|
|
273
|
+
summary: summaryEntry?.summary ?? "No summary available",
|
|
274
|
+
groups: fileGroupsMap.get(chunk.file_path) ?? ["Ungrouped"],
|
|
275
|
+
};
|
|
276
|
+
});
|
|
277
|
+
|
|
278
|
+
return {
|
|
279
|
+
meta: {
|
|
280
|
+
pr_number: prData.number,
|
|
281
|
+
pr_title: prData.title,
|
|
282
|
+
pr_url: prData.url,
|
|
283
|
+
base_branch: prData.base_branch,
|
|
284
|
+
head_branch: prData.head_branch,
|
|
285
|
+
author: prData.author,
|
|
286
|
+
author_avatar: prData.author_avatar,
|
|
287
|
+
author_url: prData.author_url,
|
|
288
|
+
total_files_changed: prData.changed_files,
|
|
289
|
+
total_additions: prData.additions,
|
|
290
|
+
total_deletions: prData.deletions,
|
|
291
|
+
analyzed_at: new Date().toISOString(),
|
|
292
|
+
model_used: config.model,
|
|
293
|
+
},
|
|
294
|
+
summary,
|
|
295
|
+
groups,
|
|
296
|
+
files,
|
|
297
|
+
narrative,
|
|
298
|
+
};
|
|
299
|
+
}
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
export type ProgressStage =
|
|
2
|
+
| "fetching"
|
|
3
|
+
| "cloning"
|
|
4
|
+
| "checkout"
|
|
5
|
+
| "exploring"
|
|
6
|
+
| "parsing"
|
|
7
|
+
| "analyzing"
|
|
8
|
+
| "grouping"
|
|
9
|
+
| "summarizing"
|
|
10
|
+
| "narrating"
|
|
11
|
+
| "done";
|
|
12
|
+
|
|
13
|
+
export interface ProgressEvent {
|
|
14
|
+
stage: ProgressStage;
|
|
15
|
+
message: string;
|
|
16
|
+
current?: number;
|
|
17
|
+
total?: number;
|
|
18
|
+
partial_content?: string;
|
|
19
|
+
timestamp?: number;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
export type ProgressCallback = (event: ProgressEvent) => void;
|
|
23
|
+
|
|
24
|
+
const STAGE_ORDER: ProgressStage[] = [
|
|
25
|
+
"fetching", "parsing", "cloning", "checkout", "exploring",
|
|
26
|
+
"analyzing", "grouping", "summarizing", "narrating", "done",
|
|
27
|
+
];
|
|
28
|
+
|
|
29
|
+
export function stageIndex(stage: ProgressStage): number {
|
|
30
|
+
return STAGE_ORDER.indexOf(stage);
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
export function allStages(): ProgressStage[] {
|
|
34
|
+
return [...STAGE_ORDER];
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
export function createStderrProgress(): ProgressCallback {
|
|
38
|
+
return (event: ProgressEvent) => {
|
|
39
|
+
const prefix = "[newpr]";
|
|
40
|
+
const progress =
|
|
41
|
+
event.current !== undefined && event.total !== undefined
|
|
42
|
+
? ` (${event.current}/${event.total})`
|
|
43
|
+
: "";
|
|
44
|
+
process.stderr.write(`${prefix} ${event.stage}: ${event.message}${progress}\n`);
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
export function createSilentProgress(): ProgressCallback {
|
|
49
|
+
return () => {};
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
export function createStreamJsonProgress(): ProgressCallback {
|
|
53
|
+
const startTime = Date.now();
|
|
54
|
+
return (event: ProgressEvent) => {
|
|
55
|
+
const line = JSON.stringify({
|
|
56
|
+
type: "progress",
|
|
57
|
+
timestamp: new Date().toISOString(),
|
|
58
|
+
elapsed_ms: Date.now() - startTime,
|
|
59
|
+
stage: event.stage,
|
|
60
|
+
stage_index: stageIndex(event.stage),
|
|
61
|
+
total_stages: STAGE_ORDER.length,
|
|
62
|
+
message: event.message,
|
|
63
|
+
current: event.current ?? null,
|
|
64
|
+
total: event.total ?? null,
|
|
65
|
+
partial_content: event.partial_content ?? null,
|
|
66
|
+
});
|
|
67
|
+
process.stdout.write(`${line}\n`);
|
|
68
|
+
};
|
|
69
|
+
}
|