gh-load-pull-request 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +24 -0
- package/README.md +233 -0
- package/package.json +80 -0
- package/src/gh-download-pull-request.mjs +924 -0
- package/src/version.mjs +100 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
This is free and unencumbered software released into the public domain.
|
|
2
|
+
|
|
3
|
+
Anyone is free to copy, modify, publish, use, compile, sell, or
|
|
4
|
+
distribute this software, either in source code form or as a compiled
|
|
5
|
+
binary, for any purpose, commercial or non-commercial, and by any
|
|
6
|
+
means.
|
|
7
|
+
|
|
8
|
+
In jurisdictions that recognize copyright laws, the author or authors
|
|
9
|
+
of this software dedicate any and all copyright interest in the
|
|
10
|
+
software to the public domain. We make this dedication for the benefit
|
|
11
|
+
of the public at large and to the detriment of our heirs and
|
|
12
|
+
successors. We intend this dedication to be an overt act of
|
|
13
|
+
relinquishment in perpetuity of all present and future rights to this
|
|
14
|
+
software under copyright law.
|
|
15
|
+
|
|
16
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
17
|
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
|
18
|
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
|
19
|
+
IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
|
|
20
|
+
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
|
21
|
+
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
|
22
|
+
OTHER DEALINGS IN THE SOFTWARE.
|
|
23
|
+
|
|
24
|
+
For more information, please refer to <https://unlicense.org>
|
package/README.md
ADDED
|
@@ -0,0 +1,233 @@
|
|
|
1
|
+
# gh-download-pull-request
|
|
2
|
+
|
|
3
|
+
[](https://www.npmjs.com/package/gh-download-pull-request)
|
|
4
|
+
|
|
5
|
+
Download GitHub pull request and convert it to markdown - perfect for AI review and analysis.
|
|
6
|
+
|
|
7
|
+
## Features
|
|
8
|
+
|
|
9
|
+
- Download any GitHub pull request as markdown
|
|
10
|
+
- Includes PR metadata, commits, files, reviews, and comments
|
|
11
|
+
- Support for both public and private repositories
|
|
12
|
+
- Multiple input formats for convenience
|
|
13
|
+
- GitHub CLI integration for seamless authentication
|
|
14
|
+
- Output to file or stdout
|
|
15
|
+
|
|
16
|
+
## Quick Start
|
|
17
|
+
|
|
18
|
+
```bash
|
|
19
|
+
# Download a PR and display as markdown
|
|
20
|
+
gh-download-pull-request https://github.com/owner/repo/pull/123
|
|
21
|
+
|
|
22
|
+
# Using shorthand format
|
|
23
|
+
gh-download-pull-request owner/repo#123
|
|
24
|
+
|
|
25
|
+
# Save to file
|
|
26
|
+
gh-download-pull-request owner/repo#123 -o pr.md
|
|
27
|
+
|
|
28
|
+
# Download private PR (uses gh CLI auth automatically)
|
|
29
|
+
gh-download-pull-request owner/private-repo#456
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
## Installation
|
|
33
|
+
|
|
34
|
+
### Global Installation (Recommended)
|
|
35
|
+
|
|
36
|
+
Install globally for system-wide access:
|
|
37
|
+
|
|
38
|
+
```bash
|
|
39
|
+
# Using bun
|
|
40
|
+
bun install -g gh-download-pull-request
|
|
41
|
+
|
|
42
|
+
# Using npm
|
|
43
|
+
npm install -g gh-download-pull-request
|
|
44
|
+
|
|
45
|
+
# After installation, use anywhere:
|
|
46
|
+
gh-download-pull-request --help
|
|
47
|
+
```
|
|
48
|
+
|
|
49
|
+
### Uninstall
|
|
50
|
+
|
|
51
|
+
Remove the global installation:
|
|
52
|
+
|
|
53
|
+
```bash
|
|
54
|
+
# Using bun
|
|
55
|
+
bun uninstall -g gh-download-pull-request
|
|
56
|
+
|
|
57
|
+
# Using npm
|
|
58
|
+
npm uninstall -g gh-download-pull-request
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
### Local Installation
|
|
62
|
+
|
|
63
|
+
```bash
|
|
64
|
+
# Clone the repository
|
|
65
|
+
git clone https://github.com/link-foundation/gh-download-pull-request.git
|
|
66
|
+
cd gh-download-pull-request
|
|
67
|
+
|
|
68
|
+
# Install dependencies
|
|
69
|
+
npm install
|
|
70
|
+
|
|
71
|
+
# Make the script executable
|
|
72
|
+
chmod +x gh-download-pull-request.mjs
|
|
73
|
+
|
|
74
|
+
# Run it
|
|
75
|
+
./gh-download-pull-request.mjs --help
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
## Usage
|
|
79
|
+
|
|
80
|
+
```
|
|
81
|
+
Usage: gh-download-pull-request <pr-url> [options]
|
|
82
|
+
|
|
83
|
+
Options:
|
|
84
|
+
-t, --token GitHub personal access token (optional for public PRs)
|
|
85
|
+
-o, --output Output file path (default: stdout)
|
|
86
|
+
-h, --help Show help
|
|
87
|
+
--version Show version number
|
|
88
|
+
```
|
|
89
|
+
|
|
90
|
+
## Input Formats
|
|
91
|
+
|
|
92
|
+
The tool supports multiple formats for specifying a pull request:
|
|
93
|
+
|
|
94
|
+
1. **Full URL**: `https://github.com/owner/repo/pull/123`
|
|
95
|
+
2. **Shorthand with #**: `owner/repo#123`
|
|
96
|
+
3. **Shorthand with /**: `owner/repo/123`
|
|
97
|
+
|
|
98
|
+
## Authentication
|
|
99
|
+
|
|
100
|
+
The tool supports multiple authentication methods for accessing private repositories:
|
|
101
|
+
|
|
102
|
+
### 1. GitHub CLI (Recommended)
|
|
103
|
+
|
|
104
|
+
If you have [GitHub CLI](https://cli.github.com/) installed and authenticated, the tool will automatically use your credentials:
|
|
105
|
+
|
|
106
|
+
```bash
|
|
107
|
+
# Authenticate with GitHub CLI (one-time setup)
|
|
108
|
+
gh auth login
|
|
109
|
+
|
|
110
|
+
# Tool automatically detects and uses gh CLI authentication
|
|
111
|
+
gh-download-pull-request owner/private-repo#123
|
|
112
|
+
```
|
|
113
|
+
|
|
114
|
+
### 2. Environment Variable
|
|
115
|
+
|
|
116
|
+
Set the `GITHUB_TOKEN` environment variable:
|
|
117
|
+
|
|
118
|
+
```bash
|
|
119
|
+
export GITHUB_TOKEN=ghp_your_token_here
|
|
120
|
+
gh-download-pull-request owner/repo#123
|
|
121
|
+
```
|
|
122
|
+
|
|
123
|
+
### 3. Command Line Token
|
|
124
|
+
|
|
125
|
+
Pass the token directly with `--token`:
|
|
126
|
+
|
|
127
|
+
```bash
|
|
128
|
+
gh-download-pull-request owner/repo#123 --token ghp_your_token_here
|
|
129
|
+
```
|
|
130
|
+
|
|
131
|
+
### Authentication Priority
|
|
132
|
+
|
|
133
|
+
The tool uses this fallback chain:
|
|
134
|
+
|
|
135
|
+
1. `--token` command line argument (highest priority)
|
|
136
|
+
2. `GITHUB_TOKEN` environment variable
|
|
137
|
+
3. GitHub CLI authentication (if `gh` is installed and authenticated)
|
|
138
|
+
4. No authentication (public PRs only)
|
|
139
|
+
|
|
140
|
+
## Output Format
|
|
141
|
+
|
|
142
|
+
The markdown output includes:
|
|
143
|
+
|
|
144
|
+
- **Header**: PR title
|
|
145
|
+
- **Metadata**: PR number, author, status, dates, branch info, stats
|
|
146
|
+
- **Description**: Full PR description/body
|
|
147
|
+
- **Commits**: List of all commits with links and authors
|
|
148
|
+
- **Files Changed**: All modified files with change stats
|
|
149
|
+
- **Reviews**: All PR reviews with approval status
|
|
150
|
+
- **Review Comments**: Inline code review comments with diff context
|
|
151
|
+
- **Comments**: General discussion comments
|
|
152
|
+
|
|
153
|
+
## Examples
|
|
154
|
+
|
|
155
|
+
```bash
|
|
156
|
+
# Basic usage - download and display PR
|
|
157
|
+
gh-download-pull-request https://github.com/facebook/react/pull/28000
|
|
158
|
+
|
|
159
|
+
# Using shorthand format
|
|
160
|
+
gh-download-pull-request facebook/react#28000
|
|
161
|
+
|
|
162
|
+
# Save to file
|
|
163
|
+
gh-download-pull-request facebook/react#28000 -o react-pr-28000.md
|
|
164
|
+
|
|
165
|
+
# Download private PR using gh CLI auth
|
|
166
|
+
gh-download-pull-request myorg/private-repo#42
|
|
167
|
+
|
|
168
|
+
# Download with explicit token
|
|
169
|
+
gh-download-pull-request myorg/repo#123 --token ghp_your_token_here
|
|
170
|
+
|
|
171
|
+
# Pipe to other tools (e.g., AI for review)
|
|
172
|
+
gh-download-pull-request owner/repo#123 | claude-analyze
|
|
173
|
+
```
|
|
174
|
+
|
|
175
|
+
## Requirements
|
|
176
|
+
|
|
177
|
+
- [Bun](https://bun.sh/) (>=1.2.0) or [Node.js](https://nodejs.org/) (>=22.17.0) runtime
|
|
178
|
+
- For private repositories (optional):
|
|
179
|
+
- [GitHub CLI](https://cli.github.com/) (recommended) OR
|
|
180
|
+
- GitHub personal access token (via `--token` or `GITHUB_TOKEN` env var)
|
|
181
|
+
|
|
182
|
+
## Use Cases
|
|
183
|
+
|
|
184
|
+
- **AI Code Review**: Download PRs as markdown for analysis by AI assistants
|
|
185
|
+
- **Documentation**: Archive important PRs for future reference
|
|
186
|
+
- **Offline Review**: Review PRs without internet connection
|
|
187
|
+
- **Custom Analysis**: Process PR data with custom scripts
|
|
188
|
+
- **Team Workflows**: Integrate PR data into custom review processes
|
|
189
|
+
|
|
190
|
+
## Testing
|
|
191
|
+
|
|
192
|
+
```bash
|
|
193
|
+
# Run all tests
|
|
194
|
+
npm test
|
|
195
|
+
|
|
196
|
+
# Or run test files directly
|
|
197
|
+
node tests/all.test.mjs
|
|
198
|
+
node tests/cli.test.mjs
|
|
199
|
+
```
|
|
200
|
+
|
|
201
|
+
## Development
|
|
202
|
+
|
|
203
|
+
```bash
|
|
204
|
+
# Clone the repository
|
|
205
|
+
git clone https://github.com/link-foundation/gh-download-pull-request.git
|
|
206
|
+
cd gh-download-pull-request
|
|
207
|
+
|
|
208
|
+
# Install dependencies
|
|
209
|
+
npm install
|
|
210
|
+
|
|
211
|
+
# Make executable
|
|
212
|
+
chmod +x gh-download-pull-request.mjs
|
|
213
|
+
|
|
214
|
+
# Test locally
|
|
215
|
+
./gh-download-pull-request.mjs owner/repo#123
|
|
216
|
+
|
|
217
|
+
# Run tests
|
|
218
|
+
npm test
|
|
219
|
+
|
|
220
|
+
# Run linting
|
|
221
|
+
npm run lint
|
|
222
|
+
|
|
223
|
+
# Bump version
|
|
224
|
+
./version.mjs patch # or minor, major
|
|
225
|
+
```
|
|
226
|
+
|
|
227
|
+
## Related Projects
|
|
228
|
+
|
|
229
|
+
- [gh-pull-all](https://github.com/link-foundation/gh-pull-all) - Efficiently sync all repositories from a GitHub organization or user
|
|
230
|
+
|
|
231
|
+
## License
|
|
232
|
+
|
|
233
|
+
This project is released into the public domain under The Unlicense - see [LICENSE](LICENSE) file for details.
|
package/package.json
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "gh-load-pull-request",
|
|
3
|
+
"version": "0.4.0",
|
|
4
|
+
"description": "Download GitHub pull request and convert it to markdown",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "src/gh-download-pull-request.mjs",
|
|
7
|
+
"bin": {
|
|
8
|
+
"gh-download-pull-request": "./src/gh-download-pull-request.mjs"
|
|
9
|
+
},
|
|
10
|
+
"scripts": {
|
|
11
|
+
"test": "node --test tests/",
|
|
12
|
+
"lint": "eslint .",
|
|
13
|
+
"lint:fix": "eslint . --fix",
|
|
14
|
+
"format": "prettier --write .",
|
|
15
|
+
"format:check": "prettier --check .",
|
|
16
|
+
"check:file-size": "node scripts/check-file-size.mjs",
|
|
17
|
+
"check": "npm run lint && npm run format:check && npm run check:file-size",
|
|
18
|
+
"prepare": "husky",
|
|
19
|
+
"changeset": "changeset",
|
|
20
|
+
"changeset:version": "node scripts/changeset-version.mjs",
|
|
21
|
+
"changeset:publish": "changeset publish",
|
|
22
|
+
"changeset:status": "changeset status --since=origin/main",
|
|
23
|
+
"version:patch": "./src/version.mjs patch",
|
|
24
|
+
"version:minor": "./src/version.mjs minor",
|
|
25
|
+
"version:major": "./src/version.mjs major"
|
|
26
|
+
},
|
|
27
|
+
"keywords": [
|
|
28
|
+
"github",
|
|
29
|
+
"pull-request",
|
|
30
|
+
"markdown",
|
|
31
|
+
"cli",
|
|
32
|
+
"tool",
|
|
33
|
+
"pr",
|
|
34
|
+
"download"
|
|
35
|
+
],
|
|
36
|
+
"author": "Link.Foundation",
|
|
37
|
+
"license": "Unlicense",
|
|
38
|
+
"repository": {
|
|
39
|
+
"type": "git",
|
|
40
|
+
"url": "git+https://github.com/link-foundation/gh-download-pull-request.git"
|
|
41
|
+
},
|
|
42
|
+
"bugs": {
|
|
43
|
+
"url": "https://github.com/link-foundation/gh-download-pull-request/issues"
|
|
44
|
+
},
|
|
45
|
+
"homepage": "https://github.com/link-foundation/gh-download-pull-request#readme",
|
|
46
|
+
"engines": {
|
|
47
|
+
"node": ">=20.0.0"
|
|
48
|
+
},
|
|
49
|
+
"files": [
|
|
50
|
+
"src/",
|
|
51
|
+
"README.md",
|
|
52
|
+
"LICENSE"
|
|
53
|
+
],
|
|
54
|
+
"dependencies": {
|
|
55
|
+
"@octokit/rest": "^22.0.0",
|
|
56
|
+
"fs-extra": "^11.3.0",
|
|
57
|
+
"yargs": "^17.7.2"
|
|
58
|
+
},
|
|
59
|
+
"devDependencies": {
|
|
60
|
+
"@changesets/cli": "^2.29.7",
|
|
61
|
+
"eslint": "^9.38.0",
|
|
62
|
+
"eslint-config-prettier": "^10.1.8",
|
|
63
|
+
"eslint-plugin-prettier": "^5.5.4",
|
|
64
|
+
"husky": "^9.1.7",
|
|
65
|
+
"lint-staged": "^16.2.6",
|
|
66
|
+
"prettier": "^3.6.2",
|
|
67
|
+
"test-anywhere": "^0.8.48"
|
|
68
|
+
},
|
|
69
|
+
"lint-staged": {
|
|
70
|
+
"*.{js,mjs,cjs}": [
|
|
71
|
+
"eslint --fix --max-warnings 0",
|
|
72
|
+
"prettier --write",
|
|
73
|
+
"prettier --check"
|
|
74
|
+
],
|
|
75
|
+
"*.md": [
|
|
76
|
+
"prettier --write",
|
|
77
|
+
"prettier --check"
|
|
78
|
+
]
|
|
79
|
+
}
|
|
80
|
+
}
|
|
@@ -0,0 +1,924 @@
|
|
|
1
|
+
#!/usr/bin/env sh
|
|
2
|
+
':'; // # ; exec "$(command -v bun || command -v node)" "$0" "$@"
|
|
3
|
+
|
|
4
|
+
// Import built-in Node.js modules
|
|
5
|
+
import path from 'node:path';
|
|
6
|
+
import { fileURLToPath } from 'node:url';
|
|
7
|
+
import https from 'node:https';
|
|
8
|
+
import http from 'node:http';
|
|
9
|
+
|
|
10
|
+
// Get __dirname equivalent for ES modules
|
|
11
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
12
|
+
const __dirname = path.dirname(__filename);
|
|
13
|
+
|
|
14
|
+
// Import npm dependencies
|
|
15
|
+
import { Octokit } from '@octokit/rest';
|
|
16
|
+
import fs from 'fs-extra';
|
|
17
|
+
import yargs from 'yargs';
|
|
18
|
+
import { hideBin } from 'yargs/helpers';
|
|
19
|
+
|
|
20
|
+
// Get version from package.json or fallback
|
|
21
|
+
let version = '0.1.0'; // Fallback version
|
|
22
|
+
|
|
23
|
+
try {
|
|
24
|
+
const packagePath = path.join(__dirname, 'package.json');
|
|
25
|
+
// Use node:fs for Deno compatibility (fs-extra has issues with Deno)
|
|
26
|
+
const { readFileSync, existsSync } = await import('node:fs');
|
|
27
|
+
if (existsSync(packagePath)) {
|
|
28
|
+
const packageJson = JSON.parse(readFileSync(packagePath, 'utf8'));
|
|
29
|
+
version = packageJson.version;
|
|
30
|
+
}
|
|
31
|
+
} catch (_error) {
|
|
32
|
+
// Use fallback version if package.json can't be read
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
// Colors for console output
|
|
36
|
+
const colors = {
|
|
37
|
+
green: '\x1b[32m',
|
|
38
|
+
yellow: '\x1b[33m',
|
|
39
|
+
blue: '\x1b[34m',
|
|
40
|
+
red: '\x1b[31m',
|
|
41
|
+
cyan: '\x1b[36m',
|
|
42
|
+
magenta: '\x1b[35m',
|
|
43
|
+
dim: '\x1b[2m',
|
|
44
|
+
bold: '\x1b[1m',
|
|
45
|
+
reset: '\x1b[0m',
|
|
46
|
+
};
|
|
47
|
+
|
|
48
|
+
// Verbose logging flag (set by CLI option)
|
|
49
|
+
let verboseMode = false;
|
|
50
|
+
|
|
51
|
+
const log = (color, message) =>
|
|
52
|
+
console.error(`${colors[color]}${message}${colors.reset}`);
|
|
53
|
+
|
|
54
|
+
const verboseLog = (color, message) => {
|
|
55
|
+
if (verboseMode) {
|
|
56
|
+
log(color, message);
|
|
57
|
+
}
|
|
58
|
+
};
|
|
59
|
+
|
|
60
|
+
// Helper function to check if gh CLI is installed
|
|
61
|
+
async function isGhInstalled() {
|
|
62
|
+
try {
|
|
63
|
+
const { execSync } = await import('node:child_process');
|
|
64
|
+
execSync('gh --version', { stdio: 'pipe' });
|
|
65
|
+
return true;
|
|
66
|
+
} catch (_error) {
|
|
67
|
+
return false;
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
// Helper function to get GitHub token from gh CLI if available
|
|
72
|
+
async function getGhToken() {
|
|
73
|
+
try {
|
|
74
|
+
if (!(await isGhInstalled())) {
|
|
75
|
+
return null;
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
const { execSync } = await import('node:child_process');
|
|
79
|
+
const token = execSync('gh auth token', {
|
|
80
|
+
encoding: 'utf8',
|
|
81
|
+
stdio: 'pipe',
|
|
82
|
+
}).trim();
|
|
83
|
+
return token;
|
|
84
|
+
} catch (_error) {
|
|
85
|
+
return null;
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
// Parse PR URL to extract owner, repo, and PR number
|
|
90
|
+
function parsePrUrl(url) {
|
|
91
|
+
// Support multiple formats:
|
|
92
|
+
// https://github.com/owner/repo/pull/123
|
|
93
|
+
// owner/repo#123
|
|
94
|
+
// owner/repo/123
|
|
95
|
+
|
|
96
|
+
// Try full URL format
|
|
97
|
+
const urlMatch = url.match(/github\.com\/([^/]+)\/([^/]+)\/pull\/(\d+)/);
|
|
98
|
+
if (urlMatch) {
|
|
99
|
+
return {
|
|
100
|
+
owner: urlMatch[1],
|
|
101
|
+
repo: urlMatch[2],
|
|
102
|
+
prNumber: parseInt(urlMatch[3], 10),
|
|
103
|
+
};
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
// Try shorthand format: owner/repo#123
|
|
107
|
+
const shortMatch = url.match(/^([^/]+)\/([^#/]+)#(\d+)$/);
|
|
108
|
+
if (shortMatch) {
|
|
109
|
+
return {
|
|
110
|
+
owner: shortMatch[1],
|
|
111
|
+
repo: shortMatch[2],
|
|
112
|
+
prNumber: parseInt(shortMatch[3], 10),
|
|
113
|
+
};
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
// Try alternative format: owner/repo/123
|
|
117
|
+
const altMatch = url.match(/^([^/]+)\/([^/]+)\/(\d+)$/);
|
|
118
|
+
if (altMatch) {
|
|
119
|
+
return {
|
|
120
|
+
owner: altMatch[1],
|
|
121
|
+
repo: altMatch[2],
|
|
122
|
+
prNumber: parseInt(altMatch[3], 10),
|
|
123
|
+
};
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
return null;
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
// Image magic bytes for validation
|
|
130
|
+
const imageMagicBytes = {
|
|
131
|
+
png: [0x89, 0x50, 0x4e, 0x47],
|
|
132
|
+
jpg: [0xff, 0xd8, 0xff],
|
|
133
|
+
gif: [0x47, 0x49, 0x46, 0x38],
|
|
134
|
+
webp: [0x52, 0x49, 0x46, 0x46], // RIFF header for WebP
|
|
135
|
+
bmp: [0x42, 0x4d],
|
|
136
|
+
ico: [0x00, 0x00, 0x01, 0x00],
|
|
137
|
+
svg: [0x3c, 0x3f, 0x78, 0x6d, 0x6c], // <?xml for SVG (though SVG can also start with <svg)
|
|
138
|
+
};
|
|
139
|
+
|
|
140
|
+
// Validate image by checking magic bytes
|
|
141
|
+
function validateImageBuffer(buffer, url) {
|
|
142
|
+
if (!buffer || buffer.length < 4) {
|
|
143
|
+
return { valid: false, reason: 'Buffer too small' };
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
const bytes = [...buffer.slice(0, 8)];
|
|
147
|
+
|
|
148
|
+
// Check for HTML error page (starts with <!DOCTYPE or <html or <!)
|
|
149
|
+
const htmlMarkers = [
|
|
150
|
+
[0x3c, 0x21], // <!
|
|
151
|
+
[0x3c, 0x68, 0x74, 0x6d, 0x6c], // <html
|
|
152
|
+
[0x3c, 0x48, 0x54, 0x4d, 0x4c], // <HTML
|
|
153
|
+
];
|
|
154
|
+
|
|
155
|
+
for (const marker of htmlMarkers) {
|
|
156
|
+
if (marker.every((byte, i) => bytes[i] === byte)) {
|
|
157
|
+
return {
|
|
158
|
+
valid: false,
|
|
159
|
+
reason: 'Downloaded file is HTML (likely error page)',
|
|
160
|
+
};
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
// Check for valid image formats
|
|
165
|
+
for (const [format, magic] of Object.entries(imageMagicBytes)) {
|
|
166
|
+
if (magic.every((byte, i) => bytes[i] === byte)) {
|
|
167
|
+
return { valid: true, format };
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
// Special check for SVG (can start with <svg directly)
|
|
172
|
+
const svgMarker = [0x3c, 0x73, 0x76, 0x67]; // <svg
|
|
173
|
+
if (svgMarker.every((byte, i) => bytes[i] === byte)) {
|
|
174
|
+
return { valid: true, format: 'svg' };
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
// If we can't identify it but it's not HTML, give it the benefit of the doubt
|
|
178
|
+
// Some images might have unusual headers
|
|
179
|
+
verboseLog(
|
|
180
|
+
'yellow',
|
|
181
|
+
`⚠️ Unknown image format for ${url}, bytes: [${bytes
|
|
182
|
+
.slice(0, 8)
|
|
183
|
+
.map((b) => `0x${b.toString(16)}`)
|
|
184
|
+
.join(', ')}]`
|
|
185
|
+
);
|
|
186
|
+
return { valid: true, format: 'unknown' };
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
// Get file extension from format or URL
|
|
190
|
+
function getExtensionFromFormat(format, url) {
|
|
191
|
+
const formatExtensions = {
|
|
192
|
+
png: '.png',
|
|
193
|
+
jpg: '.jpg',
|
|
194
|
+
gif: '.gif',
|
|
195
|
+
webp: '.webp',
|
|
196
|
+
bmp: '.bmp',
|
|
197
|
+
ico: '.ico',
|
|
198
|
+
svg: '.svg',
|
|
199
|
+
};
|
|
200
|
+
|
|
201
|
+
if (format && formatExtensions[format]) {
|
|
202
|
+
return formatExtensions[format];
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
// Try to get from URL
|
|
206
|
+
try {
|
|
207
|
+
const urlPath = new globalThis.URL(url).pathname;
|
|
208
|
+
const ext = path.extname(urlPath).toLowerCase();
|
|
209
|
+
if (
|
|
210
|
+
ext &&
|
|
211
|
+
[
|
|
212
|
+
'.png',
|
|
213
|
+
'.jpg',
|
|
214
|
+
'.jpeg',
|
|
215
|
+
'.gif',
|
|
216
|
+
'.webp',
|
|
217
|
+
'.bmp',
|
|
218
|
+
'.ico',
|
|
219
|
+
'.svg',
|
|
220
|
+
].includes(ext)
|
|
221
|
+
) {
|
|
222
|
+
return ext === '.jpeg' ? '.jpg' : ext;
|
|
223
|
+
}
|
|
224
|
+
} catch (_e) {
|
|
225
|
+
// Ignore URL parsing errors
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
return '.png'; // Default fallback
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
// Download a file with redirect support
|
|
232
|
+
function downloadFile(url, token, maxRedirects = 5) {
|
|
233
|
+
return new Promise((resolve, reject) => {
|
|
234
|
+
if (maxRedirects <= 0) {
|
|
235
|
+
reject(new Error('Too many redirects'));
|
|
236
|
+
return;
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
const parsedUrl = new globalThis.URL(url);
|
|
240
|
+
const protocol = parsedUrl.protocol === 'https:' ? https : http;
|
|
241
|
+
|
|
242
|
+
const headers = {
|
|
243
|
+
'User-Agent': 'gh-download-pull-request',
|
|
244
|
+
};
|
|
245
|
+
|
|
246
|
+
// Add auth for GitHub URLs
|
|
247
|
+
if (token && parsedUrl.hostname.includes('github')) {
|
|
248
|
+
headers['Authorization'] = `token ${token}`;
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
const req = protocol.get(url, { headers }, (res) => {
|
|
252
|
+
// Handle redirects
|
|
253
|
+
if (
|
|
254
|
+
res.statusCode >= 300 &&
|
|
255
|
+
res.statusCode < 400 &&
|
|
256
|
+
res.headers.location
|
|
257
|
+
) {
|
|
258
|
+
verboseLog(
|
|
259
|
+
'dim',
|
|
260
|
+
` ↳ Redirecting to: ${res.headers.location.substring(0, 80)}...`
|
|
261
|
+
);
|
|
262
|
+
resolve(downloadFile(res.headers.location, token, maxRedirects - 1));
|
|
263
|
+
return;
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
if (res.statusCode !== 200) {
|
|
267
|
+
reject(new Error(`HTTP ${res.statusCode}`));
|
|
268
|
+
return;
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
const chunks = [];
|
|
272
|
+
res.on('data', (chunk) => chunks.push(chunk));
|
|
273
|
+
res.on('end', () => resolve(Buffer.concat(chunks)));
|
|
274
|
+
res.on('error', reject);
|
|
275
|
+
});
|
|
276
|
+
|
|
277
|
+
req.on('error', reject);
|
|
278
|
+
req.setTimeout(30000, () => {
|
|
279
|
+
req.destroy();
|
|
280
|
+
reject(new Error('Request timeout'));
|
|
281
|
+
});
|
|
282
|
+
});
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
// Extract image URLs from markdown content
|
|
286
|
+
function extractMarkdownImageUrls(content) {
|
|
287
|
+
if (!content) {
|
|
288
|
+
return [];
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
const urls = [];
|
|
292
|
+
|
|
293
|
+
// Match markdown images:  or 
|
|
294
|
+
const mdImageRegex = /!\[([^\]]*)\]\(([^)\s]+)(?:\s+"[^"]*")?\)/g;
|
|
295
|
+
let match;
|
|
296
|
+
while ((match = mdImageRegex.exec(content)) !== null) {
|
|
297
|
+
urls.push({ url: match[2], alt: match[1] });
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
// Match HTML images: <img src="url" /> or <img src='url'>
|
|
301
|
+
const htmlImageRegex = /<img[^>]+src=["']([^"']+)["'][^>]*>/gi;
|
|
302
|
+
while ((match = htmlImageRegex.exec(content)) !== null) {
|
|
303
|
+
urls.push({ url: match[1], alt: '' });
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
return urls;
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
// Download all images from content and update the markdown
|
|
310
|
+
async function downloadImages(content, imagesDir, token, _prNumber) {
|
|
311
|
+
if (!content) {
|
|
312
|
+
return { content, downloadedImages: [] };
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
const images = extractMarkdownImageUrls(content);
|
|
316
|
+
if (images.length === 0) {
|
|
317
|
+
return { content, downloadedImages: [] };
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
const downloadedImages = [];
|
|
321
|
+
let updatedContent = content;
|
|
322
|
+
let imageCounter = 1;
|
|
323
|
+
|
|
324
|
+
// Ensure images directory exists
|
|
325
|
+
await fs.ensureDir(imagesDir);
|
|
326
|
+
|
|
327
|
+
for (const { url } of images) {
|
|
328
|
+
try {
|
|
329
|
+
verboseLog('dim', ` 📥 Downloading: ${url.substring(0, 60)}...`);
|
|
330
|
+
|
|
331
|
+
const buffer = await downloadFile(url, token);
|
|
332
|
+
const validation = validateImageBuffer(buffer, url);
|
|
333
|
+
|
|
334
|
+
if (!validation.valid) {
|
|
335
|
+
log('yellow', ` ⚠️ Skipping invalid image: ${validation.reason}`);
|
|
336
|
+
continue;
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
const ext = getExtensionFromFormat(validation.format, url);
|
|
340
|
+
const filename = `image-${imageCounter}${ext}`;
|
|
341
|
+
const localPath = path.join(imagesDir, filename);
|
|
342
|
+
const relativePath = `./${path.basename(imagesDir)}/${filename}`;
|
|
343
|
+
|
|
344
|
+
await fs.writeFile(localPath, buffer);
|
|
345
|
+
downloadedImages.push({
|
|
346
|
+
originalUrl: url,
|
|
347
|
+
localPath,
|
|
348
|
+
relativePath,
|
|
349
|
+
format: validation.format,
|
|
350
|
+
});
|
|
351
|
+
|
|
352
|
+
// Replace URL in content
|
|
353
|
+
updatedContent = updatedContent.split(url).join(relativePath);
|
|
354
|
+
imageCounter++;
|
|
355
|
+
|
|
356
|
+
verboseLog('green', ` ✅ Saved: ${filename} (${validation.format})`);
|
|
357
|
+
} catch (error) {
|
|
358
|
+
log('yellow', ` ⚠️ Failed to download image: ${error.message}`);
|
|
359
|
+
verboseLog('dim', ` URL: ${url}`);
|
|
360
|
+
}
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
return { content: updatedContent, downloadedImages };
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
// Fetch pull request data from GitHub API
|
|
367
|
+
async function fetchPullRequest(owner, repo, prNumber, token, options = {}) {
|
|
368
|
+
try {
|
|
369
|
+
log('blue', `🔍 Fetching pull request ${owner}/${repo}#${prNumber}...`);
|
|
370
|
+
|
|
371
|
+
const octokit = new Octokit({
|
|
372
|
+
auth: token,
|
|
373
|
+
baseUrl: 'https://api.github.com',
|
|
374
|
+
});
|
|
375
|
+
|
|
376
|
+
// Fetch PR data
|
|
377
|
+
const { data: pr } = await octokit.rest.pulls.get({
|
|
378
|
+
owner,
|
|
379
|
+
repo,
|
|
380
|
+
pull_number: prNumber,
|
|
381
|
+
});
|
|
382
|
+
|
|
383
|
+
// Fetch PR files
|
|
384
|
+
const { data: files } = await octokit.rest.pulls.listFiles({
|
|
385
|
+
owner,
|
|
386
|
+
repo,
|
|
387
|
+
pull_number: prNumber,
|
|
388
|
+
});
|
|
389
|
+
|
|
390
|
+
// Fetch PR comments
|
|
391
|
+
const { data: comments } = await octokit.rest.issues.listComments({
|
|
392
|
+
owner,
|
|
393
|
+
repo,
|
|
394
|
+
issue_number: prNumber,
|
|
395
|
+
});
|
|
396
|
+
|
|
397
|
+
// Fetch PR review comments
|
|
398
|
+
const { data: reviewComments } =
|
|
399
|
+
await octokit.rest.pulls.listReviewComments({
|
|
400
|
+
owner,
|
|
401
|
+
repo,
|
|
402
|
+
pull_number: prNumber,
|
|
403
|
+
});
|
|
404
|
+
|
|
405
|
+
// Fetch PR reviews (only if includeReviews is true)
|
|
406
|
+
let reviews = [];
|
|
407
|
+
if (options.includeReviews !== false) {
|
|
408
|
+
const { data: reviewsData } = await octokit.rest.pulls.listReviews({
|
|
409
|
+
owner,
|
|
410
|
+
repo,
|
|
411
|
+
pull_number: prNumber,
|
|
412
|
+
});
|
|
413
|
+
reviews = reviewsData;
|
|
414
|
+
}
|
|
415
|
+
|
|
416
|
+
// Fetch PR commits
|
|
417
|
+
const { data: commits } = await octokit.rest.pulls.listCommits({
|
|
418
|
+
owner,
|
|
419
|
+
repo,
|
|
420
|
+
pull_number: prNumber,
|
|
421
|
+
});
|
|
422
|
+
|
|
423
|
+
log('green', `✅ Successfully fetched PR data`);
|
|
424
|
+
|
|
425
|
+
return {
|
|
426
|
+
pr,
|
|
427
|
+
files,
|
|
428
|
+
comments,
|
|
429
|
+
reviewComments,
|
|
430
|
+
reviews,
|
|
431
|
+
commits,
|
|
432
|
+
};
|
|
433
|
+
} catch (error) {
|
|
434
|
+
if (error.status === 404) {
|
|
435
|
+
log('red', `❌ Pull request not found: ${owner}/${repo}#${prNumber}`);
|
|
436
|
+
} else if (error.status === 401) {
|
|
437
|
+
log(
|
|
438
|
+
'red',
|
|
439
|
+
`❌ Authentication failed. Please provide a valid GitHub token`
|
|
440
|
+
);
|
|
441
|
+
} else {
|
|
442
|
+
log('red', `❌ Failed to fetch pull request: ${error.message}`);
|
|
443
|
+
}
|
|
444
|
+
process.exit(1);
|
|
445
|
+
}
|
|
446
|
+
}
|
|
447
|
+
|
|
448
|
+
// Process content and download images if enabled
|
|
449
|
+
function processContent(
|
|
450
|
+
content,
|
|
451
|
+
imagesDir,
|
|
452
|
+
token,
|
|
453
|
+
prNumber,
|
|
454
|
+
downloadImagesFlag
|
|
455
|
+
) {
|
|
456
|
+
if (!downloadImagesFlag) {
|
|
457
|
+
return Promise.resolve({ content, downloadedImages: [] });
|
|
458
|
+
}
|
|
459
|
+
return downloadImages(content, imagesDir, token, prNumber);
|
|
460
|
+
}
|
|
461
|
+
|
|
462
|
+
// Convert PR data to markdown
|
|
463
|
+
async function convertToMarkdown(data, options = {}) {
|
|
464
|
+
const { pr, files, comments, reviewComments, reviews, commits } = data;
|
|
465
|
+
const {
|
|
466
|
+
downloadImagesFlag = true,
|
|
467
|
+
imagesDir = '',
|
|
468
|
+
token = '',
|
|
469
|
+
prNumber = 0,
|
|
470
|
+
} = options;
|
|
471
|
+
|
|
472
|
+
let markdown = '';
|
|
473
|
+
let allDownloadedImages = [];
|
|
474
|
+
|
|
475
|
+
// Process PR body for images
|
|
476
|
+
let prBody = pr.body || '';
|
|
477
|
+
if (downloadImagesFlag && prBody) {
|
|
478
|
+
log('blue', '🖼️ Processing images in PR description...');
|
|
479
|
+
const result = await processContent(
|
|
480
|
+
prBody,
|
|
481
|
+
imagesDir,
|
|
482
|
+
token,
|
|
483
|
+
prNumber,
|
|
484
|
+
downloadImagesFlag
|
|
485
|
+
);
|
|
486
|
+
prBody = result.content;
|
|
487
|
+
allDownloadedImages = [...allDownloadedImages, ...result.downloadedImages];
|
|
488
|
+
}
|
|
489
|
+
|
|
490
|
+
// Header
|
|
491
|
+
markdown += `# ${pr.title}\n\n`;
|
|
492
|
+
|
|
493
|
+
// Metadata
|
|
494
|
+
markdown += `**Author:** @${pr.user.login}\n`;
|
|
495
|
+
markdown += `**Created:** ${pr.created_at}\n`;
|
|
496
|
+
markdown += `**State:** ${pr.state}\n`;
|
|
497
|
+
markdown += `**Branch:** ${pr.head.ref} → ${pr.base.ref}\n`;
|
|
498
|
+
|
|
499
|
+
// Labels
|
|
500
|
+
if (pr.labels && pr.labels.length > 0) {
|
|
501
|
+
markdown += `**Labels:** ${pr.labels.map((l) => l.name).join(', ')}\n`;
|
|
502
|
+
}
|
|
503
|
+
|
|
504
|
+
markdown += '\n## Description\n\n';
|
|
505
|
+
markdown += prBody ? `${prBody}\n\n` : '_No description provided._\n\n';
|
|
506
|
+
|
|
507
|
+
markdown += '---\n\n';
|
|
508
|
+
|
|
509
|
+
// Comments
|
|
510
|
+
if (comments.length > 0) {
|
|
511
|
+
markdown += `## Comments\n\n`;
|
|
512
|
+
for (const comment of comments) {
|
|
513
|
+
let commentBody = comment.body || '';
|
|
514
|
+
if (downloadImagesFlag && commentBody) {
|
|
515
|
+
verboseLog(
|
|
516
|
+
'blue',
|
|
517
|
+
`Processing images in comment by @${comment.user.login}...`
|
|
518
|
+
);
|
|
519
|
+
const result = await processContent(
|
|
520
|
+
commentBody,
|
|
521
|
+
imagesDir,
|
|
522
|
+
token,
|
|
523
|
+
prNumber,
|
|
524
|
+
downloadImagesFlag
|
|
525
|
+
);
|
|
526
|
+
commentBody = result.content;
|
|
527
|
+
allDownloadedImages = [
|
|
528
|
+
...allDownloadedImages,
|
|
529
|
+
...result.downloadedImages,
|
|
530
|
+
];
|
|
531
|
+
}
|
|
532
|
+
|
|
533
|
+
markdown += `### Comment by @${comment.user.login} (${comment.created_at})\n\n`;
|
|
534
|
+
markdown += `${commentBody}\n\n`;
|
|
535
|
+
markdown += '---\n\n';
|
|
536
|
+
}
|
|
537
|
+
}
|
|
538
|
+
|
|
539
|
+
// Reviews
|
|
540
|
+
if (reviews.length > 0) {
|
|
541
|
+
markdown += `## Reviews\n\n`;
|
|
542
|
+
for (const review of reviews) {
|
|
543
|
+
let reviewBody = review.body || '';
|
|
544
|
+
if (downloadImagesFlag && reviewBody) {
|
|
545
|
+
verboseLog(
|
|
546
|
+
'blue',
|
|
547
|
+
`Processing images in review by @${review.user.login}...`
|
|
548
|
+
);
|
|
549
|
+
const result = await processContent(
|
|
550
|
+
reviewBody,
|
|
551
|
+
imagesDir,
|
|
552
|
+
token,
|
|
553
|
+
prNumber,
|
|
554
|
+
downloadImagesFlag
|
|
555
|
+
);
|
|
556
|
+
reviewBody = result.content;
|
|
557
|
+
allDownloadedImages = [
|
|
558
|
+
...allDownloadedImages,
|
|
559
|
+
...result.downloadedImages,
|
|
560
|
+
];
|
|
561
|
+
}
|
|
562
|
+
|
|
563
|
+
markdown += `### Review by @${review.user.login} (${review.submitted_at})\n`;
|
|
564
|
+
markdown += `**State:** ${review.state}\n\n`;
|
|
565
|
+
|
|
566
|
+
if (reviewBody) {
|
|
567
|
+
markdown += `${reviewBody}\n\n`;
|
|
568
|
+
}
|
|
569
|
+
|
|
570
|
+
// Add review comments for this review
|
|
571
|
+
const reviewReviewComments = reviewComments.filter(
|
|
572
|
+
(rc) => rc.pull_request_review_id === review.id
|
|
573
|
+
);
|
|
574
|
+
if (reviewReviewComments.length > 0) {
|
|
575
|
+
markdown += `#### Review Comments\n\n`;
|
|
576
|
+
for (const rc of reviewReviewComments) {
|
|
577
|
+
let rcBody = rc.body || '';
|
|
578
|
+
if (downloadImagesFlag && rcBody) {
|
|
579
|
+
const result = await processContent(
|
|
580
|
+
rcBody,
|
|
581
|
+
imagesDir,
|
|
582
|
+
token,
|
|
583
|
+
prNumber,
|
|
584
|
+
downloadImagesFlag
|
|
585
|
+
);
|
|
586
|
+
rcBody = result.content;
|
|
587
|
+
allDownloadedImages = [
|
|
588
|
+
...allDownloadedImages,
|
|
589
|
+
...result.downloadedImages,
|
|
590
|
+
];
|
|
591
|
+
}
|
|
592
|
+
|
|
593
|
+
const lineInfo = rc.line ? `:${rc.line}` : '';
|
|
594
|
+
markdown += `**File:** ${rc.path}${lineInfo}\n`;
|
|
595
|
+
markdown += `${rcBody}\n\n`;
|
|
596
|
+
}
|
|
597
|
+
}
|
|
598
|
+
|
|
599
|
+
markdown += '---\n\n';
|
|
600
|
+
}
|
|
601
|
+
}
|
|
602
|
+
|
|
603
|
+
// Standalone review comments (not associated with a review)
|
|
604
|
+
const standaloneReviewComments = reviewComments.filter(
|
|
605
|
+
(rc) => !rc.pull_request_review_id
|
|
606
|
+
);
|
|
607
|
+
if (standaloneReviewComments.length > 0) {
|
|
608
|
+
markdown += `## Review Comments\n\n`;
|
|
609
|
+
for (const comment of standaloneReviewComments) {
|
|
610
|
+
let commentBody = comment.body || '';
|
|
611
|
+
if (downloadImagesFlag && commentBody) {
|
|
612
|
+
const result = await processContent(
|
|
613
|
+
commentBody,
|
|
614
|
+
imagesDir,
|
|
615
|
+
token,
|
|
616
|
+
prNumber,
|
|
617
|
+
downloadImagesFlag
|
|
618
|
+
);
|
|
619
|
+
commentBody = result.content;
|
|
620
|
+
allDownloadedImages = [
|
|
621
|
+
...allDownloadedImages,
|
|
622
|
+
...result.downloadedImages,
|
|
623
|
+
];
|
|
624
|
+
}
|
|
625
|
+
|
|
626
|
+
markdown += `**@${comment.user.login}** commented on \`${comment.path}\``;
|
|
627
|
+
if (comment.line) {
|
|
628
|
+
markdown += ` (line ${comment.line})`;
|
|
629
|
+
}
|
|
630
|
+
markdown += `:\n`;
|
|
631
|
+
markdown += `*${comment.created_at}*\n\n`;
|
|
632
|
+
markdown += `${commentBody}\n\n`;
|
|
633
|
+
if (comment.diff_hunk) {
|
|
634
|
+
markdown += '```diff\n';
|
|
635
|
+
markdown += `${comment.diff_hunk}\n`;
|
|
636
|
+
markdown += '```\n\n';
|
|
637
|
+
}
|
|
638
|
+
}
|
|
639
|
+
}
|
|
640
|
+
|
|
641
|
+
// Commits
|
|
642
|
+
if (commits.length > 0) {
|
|
643
|
+
markdown += `## Commits (${commits.length})\n\n`;
|
|
644
|
+
for (const commit of commits) {
|
|
645
|
+
const message = commit.commit.message.split('\n')[0]; // First line only
|
|
646
|
+
const sha = commit.sha.substring(0, 7);
|
|
647
|
+
markdown += `- [\`${sha}\`](${commit.html_url}) ${message} - @${commit.author?.login || 'unknown'}\n`;
|
|
648
|
+
}
|
|
649
|
+
markdown += '\n';
|
|
650
|
+
}
|
|
651
|
+
|
|
652
|
+
// Files changed
|
|
653
|
+
if (files.length > 0) {
|
|
654
|
+
markdown += `## Files Changed (${files.length})\n\n`;
|
|
655
|
+
for (const file of files) {
|
|
656
|
+
const statusIcon =
|
|
657
|
+
file.status === 'added'
|
|
658
|
+
? '🆕'
|
|
659
|
+
: file.status === 'removed'
|
|
660
|
+
? '🗑️'
|
|
661
|
+
: file.status === 'modified'
|
|
662
|
+
? '✏️'
|
|
663
|
+
: file.status === 'renamed'
|
|
664
|
+
? '📝'
|
|
665
|
+
: '📄';
|
|
666
|
+
markdown += `${statusIcon} **${file.filename}** (+${file.additions} -${file.deletions})\n`;
|
|
667
|
+
if (file.status === 'renamed') {
|
|
668
|
+
markdown += ` - Renamed from: \`${file.previous_filename}\`\n`;
|
|
669
|
+
}
|
|
670
|
+
}
|
|
671
|
+
markdown += '\n';
|
|
672
|
+
}
|
|
673
|
+
|
|
674
|
+
return { markdown, downloadedImages: allDownloadedImages };
|
|
675
|
+
}
|
|
676
|
+
|
|
677
|
+
// Convert PR data to JSON format
|
|
678
|
+
function convertToJson(data, downloadedImages = []) {
|
|
679
|
+
const { pr, files, comments, reviewComments, reviews, commits } = data;
|
|
680
|
+
|
|
681
|
+
return JSON.stringify(
|
|
682
|
+
{
|
|
683
|
+
pullRequest: {
|
|
684
|
+
number: pr.number,
|
|
685
|
+
title: pr.title,
|
|
686
|
+
state: pr.state,
|
|
687
|
+
url: pr.html_url,
|
|
688
|
+
author: pr.user.login,
|
|
689
|
+
createdAt: pr.created_at,
|
|
690
|
+
updatedAt: pr.updated_at,
|
|
691
|
+
mergedAt: pr.merged_at,
|
|
692
|
+
closedAt: pr.closed_at,
|
|
693
|
+
base: pr.base.ref,
|
|
694
|
+
head: pr.head.ref,
|
|
695
|
+
additions: pr.additions,
|
|
696
|
+
deletions: pr.deletions,
|
|
697
|
+
changedFiles: pr.changed_files,
|
|
698
|
+
labels: pr.labels?.map((l) => l.name) || [],
|
|
699
|
+
body: pr.body,
|
|
700
|
+
},
|
|
701
|
+
commits: commits.map((c) => ({
|
|
702
|
+
sha: c.sha,
|
|
703
|
+
message: c.commit.message,
|
|
704
|
+
author: c.author?.login || 'unknown',
|
|
705
|
+
url: c.html_url,
|
|
706
|
+
})),
|
|
707
|
+
files: files.map((f) => ({
|
|
708
|
+
filename: f.filename,
|
|
709
|
+
status: f.status,
|
|
710
|
+
additions: f.additions,
|
|
711
|
+
deletions: f.deletions,
|
|
712
|
+
previousFilename: f.previous_filename,
|
|
713
|
+
})),
|
|
714
|
+
reviews: reviews.map((r) => ({
|
|
715
|
+
id: r.id,
|
|
716
|
+
author: r.user.login,
|
|
717
|
+
state: r.state,
|
|
718
|
+
body: r.body,
|
|
719
|
+
submittedAt: r.submitted_at,
|
|
720
|
+
})),
|
|
721
|
+
reviewComments: reviewComments.map((c) => ({
|
|
722
|
+
id: c.id,
|
|
723
|
+
author: c.user.login,
|
|
724
|
+
body: c.body,
|
|
725
|
+
path: c.path,
|
|
726
|
+
line: c.line,
|
|
727
|
+
createdAt: c.created_at,
|
|
728
|
+
diffHunk: c.diff_hunk,
|
|
729
|
+
reviewId: c.pull_request_review_id,
|
|
730
|
+
})),
|
|
731
|
+
comments: comments.map((c) => ({
|
|
732
|
+
id: c.id,
|
|
733
|
+
author: c.user.login,
|
|
734
|
+
body: c.body,
|
|
735
|
+
createdAt: c.created_at,
|
|
736
|
+
})),
|
|
737
|
+
downloadedImages: downloadedImages.map((img) => ({
|
|
738
|
+
originalUrl: img.originalUrl,
|
|
739
|
+
localPath: img.relativePath,
|
|
740
|
+
format: img.format,
|
|
741
|
+
})),
|
|
742
|
+
},
|
|
743
|
+
null,
|
|
744
|
+
2
|
|
745
|
+
);
|
|
746
|
+
}
|
|
747
|
+
|
|
748
|
+
// Configure CLI arguments
|
|
749
|
+
const scriptName = path.basename(process.argv[1]);
|
|
750
|
+
const argv = yargs(hideBin(process.argv))
|
|
751
|
+
.scriptName(scriptName)
|
|
752
|
+
.version(version)
|
|
753
|
+
.usage('Usage: $0 <pr-url> [options]')
|
|
754
|
+
.command(
|
|
755
|
+
'$0 <pr>',
|
|
756
|
+
'Download a GitHub pull request and convert it to markdown',
|
|
757
|
+
(yargs) => {
|
|
758
|
+
yargs.positional('pr', {
|
|
759
|
+
describe:
|
|
760
|
+
'Pull request URL or shorthand (e.g., https://github.com/owner/repo/pull/123 or owner/repo#123)',
|
|
761
|
+
type: 'string',
|
|
762
|
+
});
|
|
763
|
+
}
|
|
764
|
+
)
|
|
765
|
+
.option('token', {
|
|
766
|
+
alias: 't',
|
|
767
|
+
type: 'string',
|
|
768
|
+
describe: 'GitHub personal access token (optional for public PRs)',
|
|
769
|
+
default: process.env.GITHUB_TOKEN,
|
|
770
|
+
})
|
|
771
|
+
.option('output', {
|
|
772
|
+
alias: 'o',
|
|
773
|
+
type: 'string',
|
|
774
|
+
describe: 'Output directory (default: current directory)',
|
|
775
|
+
})
|
|
776
|
+
.option('download-images', {
|
|
777
|
+
type: 'boolean',
|
|
778
|
+
describe: 'Download embedded images',
|
|
779
|
+
default: true,
|
|
780
|
+
})
|
|
781
|
+
.option('include-reviews', {
|
|
782
|
+
type: 'boolean',
|
|
783
|
+
describe: 'Include PR reviews',
|
|
784
|
+
default: true,
|
|
785
|
+
})
|
|
786
|
+
.option('format', {
|
|
787
|
+
type: 'string',
|
|
788
|
+
describe: 'Output format: markdown, json',
|
|
789
|
+
default: 'markdown',
|
|
790
|
+
choices: ['markdown', 'json'],
|
|
791
|
+
})
|
|
792
|
+
.option('verbose', {
|
|
793
|
+
alias: 'v',
|
|
794
|
+
type: 'boolean',
|
|
795
|
+
describe: 'Enable verbose logging',
|
|
796
|
+
default: false,
|
|
797
|
+
})
|
|
798
|
+
.help('h')
|
|
799
|
+
.alias('h', 'help')
|
|
800
|
+
.example('$0 https://github.com/owner/repo/pull/123', 'Download PR #123')
|
|
801
|
+
.example('$0 owner/repo#123', 'Download PR using shorthand format')
|
|
802
|
+
.example('$0 owner/repo#123 -o ./output', 'Save to output directory')
|
|
803
|
+
.example('$0 owner/repo#123 --format json', 'Output as JSON')
|
|
804
|
+
.example('$0 owner/repo#123 --no-download-images', 'Skip image download')
|
|
805
|
+
.example(
|
|
806
|
+
'$0 https://github.com/owner/repo/pull/123 --token ghp_xxx',
|
|
807
|
+
'Download private PR'
|
|
808
|
+
).argv;
|
|
809
|
+
|
|
810
|
+
async function main() {
|
|
811
|
+
const {
|
|
812
|
+
pr: prInput,
|
|
813
|
+
token: tokenArg,
|
|
814
|
+
output,
|
|
815
|
+
'download-images': downloadImagesFlag,
|
|
816
|
+
'include-reviews': includeReviews,
|
|
817
|
+
format,
|
|
818
|
+
verbose,
|
|
819
|
+
} = argv;
|
|
820
|
+
|
|
821
|
+
// Set verbose mode
|
|
822
|
+
verboseMode = verbose;
|
|
823
|
+
|
|
824
|
+
let token = tokenArg;
|
|
825
|
+
|
|
826
|
+
// If no token provided, try to get it from gh CLI
|
|
827
|
+
if (!token || token === undefined) {
|
|
828
|
+
const ghToken = await getGhToken();
|
|
829
|
+
if (ghToken) {
|
|
830
|
+
token = ghToken;
|
|
831
|
+
log('cyan', '🔑 Using GitHub token from gh CLI');
|
|
832
|
+
}
|
|
833
|
+
}
|
|
834
|
+
|
|
835
|
+
// Parse PR input
|
|
836
|
+
const prInfo = parsePrUrl(prInput);
|
|
837
|
+
if (!prInfo) {
|
|
838
|
+
log('red', `❌ Invalid PR URL or format: ${prInput}`);
|
|
839
|
+
log('yellow', '💡 Supported formats:');
|
|
840
|
+
log('yellow', ' - https://github.com/owner/repo/pull/123');
|
|
841
|
+
log('yellow', ' - owner/repo#123');
|
|
842
|
+
log('yellow', ' - owner/repo/123');
|
|
843
|
+
process.exit(1);
|
|
844
|
+
}
|
|
845
|
+
|
|
846
|
+
const { owner, repo, prNumber } = prInfo;
|
|
847
|
+
|
|
848
|
+
// Fetch PR data
|
|
849
|
+
const data = await fetchPullRequest(owner, repo, prNumber, token, {
|
|
850
|
+
includeReviews,
|
|
851
|
+
});
|
|
852
|
+
|
|
853
|
+
// Determine output paths
|
|
854
|
+
const outputDir = output || process.cwd();
|
|
855
|
+
const imagesDir = path.join(outputDir, `pr-${prNumber}-images`);
|
|
856
|
+
const mdOutputPath = path.join(outputDir, `pr-${prNumber}.md`);
|
|
857
|
+
const jsonOutputPath = path.join(outputDir, `pr-${prNumber}.json`);
|
|
858
|
+
|
|
859
|
+
// Convert to appropriate format
|
|
860
|
+
log('blue', `📝 Converting to ${format}...`);
|
|
861
|
+
|
|
862
|
+
let outputContent;
|
|
863
|
+
let downloadedImages = [];
|
|
864
|
+
|
|
865
|
+
if (format === 'json') {
|
|
866
|
+
// For JSON, we might still want to download images
|
|
867
|
+
if (downloadImagesFlag) {
|
|
868
|
+
log('blue', '🖼️ Processing images...');
|
|
869
|
+
// Process all content for images
|
|
870
|
+
const allContent = [
|
|
871
|
+
data.pr.body || '',
|
|
872
|
+
...data.comments.map((c) => c.body || ''),
|
|
873
|
+
...data.reviews.map((r) => r.body || ''),
|
|
874
|
+
...data.reviewComments.map((rc) => rc.body || ''),
|
|
875
|
+
].join('\n\n');
|
|
876
|
+
|
|
877
|
+
const result = await downloadImages(
|
|
878
|
+
allContent,
|
|
879
|
+
imagesDir,
|
|
880
|
+
token,
|
|
881
|
+
prNumber
|
|
882
|
+
);
|
|
883
|
+
downloadedImages = result.downloadedImages;
|
|
884
|
+
}
|
|
885
|
+
outputContent = convertToJson(data, downloadedImages);
|
|
886
|
+
} else {
|
|
887
|
+
// Markdown format with image processing
|
|
888
|
+
const result = await convertToMarkdown(data, {
|
|
889
|
+
downloadImagesFlag,
|
|
890
|
+
imagesDir,
|
|
891
|
+
token,
|
|
892
|
+
prNumber,
|
|
893
|
+
});
|
|
894
|
+
outputContent = result.markdown;
|
|
895
|
+
downloadedImages = result.downloadedImages;
|
|
896
|
+
}
|
|
897
|
+
|
|
898
|
+
// Output
|
|
899
|
+
if (output) {
|
|
900
|
+
await fs.ensureDir(outputDir);
|
|
901
|
+
const outputPath = format === 'json' ? jsonOutputPath : mdOutputPath;
|
|
902
|
+
await fs.writeFile(outputPath, outputContent, 'utf8');
|
|
903
|
+
log('green', `✅ Saved to ${outputPath}`);
|
|
904
|
+
|
|
905
|
+
if (downloadedImages.length > 0) {
|
|
906
|
+
log(
|
|
907
|
+
'green',
|
|
908
|
+
`📁 Downloaded ${downloadedImages.length} image(s) to ${imagesDir}`
|
|
909
|
+
);
|
|
910
|
+
}
|
|
911
|
+
} else {
|
|
912
|
+
console.log(outputContent);
|
|
913
|
+
}
|
|
914
|
+
|
|
915
|
+
log('blue', '🎉 Done!');
|
|
916
|
+
}
|
|
917
|
+
|
|
918
|
+
main().catch((error) => {
|
|
919
|
+
log('red', `💥 Script failed: ${error.message}`);
|
|
920
|
+
if (verboseMode) {
|
|
921
|
+
console.error(error.stack);
|
|
922
|
+
}
|
|
923
|
+
process.exit(1);
|
|
924
|
+
});
|
package/src/version.mjs
ADDED
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
#!/usr/bin/env sh
|
|
2
|
+
':'; // # ; exec "$(command -v bun || command -v node)" "$0" "$@"
|
|
3
|
+
|
|
4
|
+
import fs from 'node:fs';
|
|
5
|
+
import path from 'node:path';
|
|
6
|
+
import { fileURLToPath } from 'node:url';
|
|
7
|
+
import { execSync } from 'node:child_process';
|
|
8
|
+
|
|
9
|
+
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
10
|
+
|
|
11
|
+
// Download use-m dynamically
|
|
12
|
+
const { use } = eval(
|
|
13
|
+
await (await fetch('https://unpkg.com/use-m/use.js')).text()
|
|
14
|
+
);
|
|
15
|
+
|
|
16
|
+
// Import semver for version management
|
|
17
|
+
const semver = await use('semver@7.7.2');
|
|
18
|
+
|
|
19
|
+
function updatePackageJson(newVersion) {
|
|
20
|
+
const packagePath = path.join(__dirname, 'package.json');
|
|
21
|
+
const packageJson = JSON.parse(fs.readFileSync(packagePath, 'utf8'));
|
|
22
|
+
packageJson.version = newVersion;
|
|
23
|
+
fs.writeFileSync(packagePath, `${JSON.stringify(packageJson, null, 2)}\n`);
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
function updateMainScript(newVersion) {
|
|
27
|
+
const scriptPath = path.join(__dirname, 'gh-download-pull-request.mjs');
|
|
28
|
+
const content = fs.readFileSync(scriptPath, 'utf8');
|
|
29
|
+
const updatedContent = content.replace(
|
|
30
|
+
/let version = '[^']+'/,
|
|
31
|
+
`let version = '${newVersion}'`
|
|
32
|
+
);
|
|
33
|
+
fs.writeFileSync(scriptPath, updatedContent);
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
function runGitCommand(command, description) {
|
|
37
|
+
try {
|
|
38
|
+
console.log(`🔄 ${description}...`);
|
|
39
|
+
execSync(command, { stdio: 'inherit', cwd: __dirname });
|
|
40
|
+
console.log(`✅ ${description} completed`);
|
|
41
|
+
} catch (error) {
|
|
42
|
+
console.error(
|
|
43
|
+
`❌ Failed to ${description.toLowerCase()}: ${error.message}`
|
|
44
|
+
);
|
|
45
|
+
process.exit(1);
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
function main() {
|
|
50
|
+
const versionType = process.argv[2];
|
|
51
|
+
|
|
52
|
+
if (!versionType || !['patch', 'minor', 'major'].includes(versionType)) {
|
|
53
|
+
console.error('Usage: ./version.mjs <patch|minor|major>');
|
|
54
|
+
console.error('Examples:');
|
|
55
|
+
console.error(' ./version.mjs patch # 0.1.0 → 0.1.1');
|
|
56
|
+
console.error(' ./version.mjs minor # 0.1.0 → 0.2.0');
|
|
57
|
+
console.error(' ./version.mjs major # 0.1.0 → 1.0.0');
|
|
58
|
+
process.exit(1);
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// Read current version from package.json
|
|
62
|
+
const packagePath = path.join(__dirname, 'package.json');
|
|
63
|
+
const packageJson = JSON.parse(fs.readFileSync(packagePath, 'utf8'));
|
|
64
|
+
const currentVersion = packageJson.version;
|
|
65
|
+
|
|
66
|
+
// Calculate new version using semver
|
|
67
|
+
const newVersion = semver.inc(currentVersion, versionType);
|
|
68
|
+
|
|
69
|
+
if (!newVersion) {
|
|
70
|
+
console.error(
|
|
71
|
+
`❌ Failed to calculate new ${versionType} version from ${currentVersion}`
|
|
72
|
+
);
|
|
73
|
+
process.exit(1);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
console.log(`📦 Bumping version from ${currentVersion} to ${newVersion}`);
|
|
77
|
+
|
|
78
|
+
try {
|
|
79
|
+
// Update both files
|
|
80
|
+
updatePackageJson(newVersion);
|
|
81
|
+
updateMainScript(newVersion);
|
|
82
|
+
|
|
83
|
+
console.log('✅ Version updated successfully!');
|
|
84
|
+
console.log(` 📄 package.json: ${newVersion}`);
|
|
85
|
+
console.log(` 📄 gh-download-pull-request.mjs: ${newVersion}`);
|
|
86
|
+
console.log('');
|
|
87
|
+
|
|
88
|
+
// Automatically commit and push changes
|
|
89
|
+
runGitCommand('git add .', 'Adding changes to git');
|
|
90
|
+
runGitCommand(`git commit -m "${newVersion}"`, 'Committing changes');
|
|
91
|
+
runGitCommand('git push', 'Pushing to remote repository');
|
|
92
|
+
|
|
93
|
+
console.log('🎉 Version bump completed and pushed!');
|
|
94
|
+
} catch (error) {
|
|
95
|
+
console.error('❌ Error updating files:', error.message);
|
|
96
|
+
process.exit(1);
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
main();
|