git-ripper 1.3.6 → 1.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +28 -1
- package/package.json +3 -2
- package/src/archiver.js +215 -0
- package/src/downloader.js +321 -61
- package/src/index.js +110 -14
- package/src/parser.js +28 -6
package/README.md
CHANGED
|
@@ -31,6 +31,7 @@ Have you ever needed just a single component from a massive repository? Or wante
|
|
|
31
31
|
- **Directory Structure**: Preserves complete folder structure
|
|
32
32
|
- **Custom Output**: Specify your preferred output directory
|
|
33
33
|
- **Branch Support**: Works with any branch, not just the default one
|
|
34
|
+
- **Archive Export**: Create ZIP or TAR archives of downloaded content
|
|
34
35
|
- **Simple Interface**: Clean, intuitive command-line experience
|
|
35
36
|
- **Lightweight**: Minimal dependencies and fast execution
|
|
36
37
|
- **No Authentication**: Works with public repositories without requiring credentials
|
|
@@ -67,11 +68,26 @@ git-ripper https://github.com/username/repository/tree/branch/folder
|
|
|
67
68
|
git-ripper https://github.com/username/repository/tree/branch/folder -o ./my-output-folder
|
|
68
69
|
```
|
|
69
70
|
|
|
71
|
+
### Creating ZIP Archive
|
|
72
|
+
|
|
73
|
+
```bash
|
|
74
|
+
git-ripper https://github.com/username/repository/tree/branch/folder --zip
|
|
75
|
+
```
|
|
76
|
+
|
|
77
|
+
### Creating TAR Archive with Custom Name
|
|
78
|
+
|
|
79
|
+
```bash
|
|
80
|
+
git-ripper https://github.com/username/repository/tree/branch/folder --tar="my-archive.tar"
|
|
81
|
+
```
|
|
82
|
+
|
|
70
83
|
### Command Line Options
|
|
71
84
|
|
|
72
85
|
| Option | Description | Default |
|
|
73
86
|
|--------|-------------|---------|
|
|
74
87
|
| `-o, --output <directory>` | Specify output directory | Current directory |
|
|
88
|
+
| `--zip [filename]` | Create ZIP archive of downloaded content | - |
|
|
89
|
+
| `--tar [filename]` | Create TAR archive of downloaded content | - |
|
|
90
|
+
| `--compression-level <level>` | Set compression level (1-9) | 6 |
|
|
75
91
|
| `-V, --version` | Show version number | - |
|
|
76
92
|
| `-h, --help` | Show help | - |
|
|
77
93
|
|
|
@@ -105,6 +121,16 @@ git-ripper https://github.com/nodejs/node/tree/main/doc -o ./node-docs
|
|
|
105
121
|
git-ripper https://github.com/tailwindlabs/tailwindcss/tree/master/src/components -o ./tailwind-components
|
|
106
122
|
```
|
|
107
123
|
|
|
124
|
+
### Download and Create Archive
|
|
125
|
+
|
|
126
|
+
```bash
|
|
127
|
+
# Download React DOM package and create a ZIP archive
|
|
128
|
+
git-ripper https://github.com/facebook/react/tree/main/packages/react-dom --zip
|
|
129
|
+
|
|
130
|
+
# Extract VS Code build configuration with maximum compression
|
|
131
|
+
git-ripper https://github.com/microsoft/vscode/tree/main/build --tar --compression-level=9
|
|
132
|
+
```
|
|
133
|
+
|
|
108
134
|
## How It Works
|
|
109
135
|
|
|
110
136
|
Git-ripper operates in four stages:
|
|
@@ -112,7 +138,7 @@ Git-ripper operates in four stages:
|
|
|
112
138
|
1. **URL Parsing**: Extracts repository owner, name, branch, and target folder path
|
|
113
139
|
2. **API Request**: Uses GitHub's API to fetch the folder structure
|
|
114
140
|
3. **Content Download**: Retrieves each file individually while maintaining directory structure
|
|
115
|
-
4. **Local Storage**: Saves files to your specified output directory
|
|
141
|
+
4. **Local Storage or Archiving**: Saves files to your specified output directory or creates an archive
|
|
116
142
|
|
|
117
143
|
## Configuration
|
|
118
144
|
|
|
@@ -160,6 +186,7 @@ See the [open issues](https://github.com/sairajB/git-ripper/issues) for a list o
|
|
|
160
186
|
|
|
161
187
|
## Roadmap
|
|
162
188
|
|
|
189
|
+
- [x] Add archive export options (ZIP/TAR)
|
|
163
190
|
- [ ] Add GitHub token authentication
|
|
164
191
|
- [ ] Support for GitLab and Bitbucket repositories
|
|
165
192
|
- [ ] Download from specific commits or tags
|
package/package.json
CHANGED
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "git-ripper",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.4.1",
|
|
4
4
|
"description": "CLI tool that lets you download specific folders from GitHub repositories without cloning the entire repo.",
|
|
5
5
|
"main": "src/index.js",
|
|
6
6
|
"type": "module",
|
|
7
7
|
"bin": {
|
|
8
|
-
"git-ripper": "
|
|
8
|
+
"git-ripper": "bin/git-ripper.js"
|
|
9
9
|
},
|
|
10
10
|
"scripts": {
|
|
11
11
|
"test": "echo \"Error: no test specified\" && exit 1",
|
|
@@ -30,6 +30,7 @@
|
|
|
30
30
|
"author": "sairajb",
|
|
31
31
|
"license": "MIT",
|
|
32
32
|
"dependencies": {
|
|
33
|
+
"archiver": "^6.0.1",
|
|
33
34
|
"axios": "^1.6.7",
|
|
34
35
|
"chalk": "^5.3.0",
|
|
35
36
|
"cli-progress": "^3.12.0",
|
package/src/archiver.js
ADDED
|
@@ -0,0 +1,215 @@
|
|
|
1
|
+
import fs from "fs";
|
|
2
|
+
import path from "path";
|
|
3
|
+
import archiver from "archiver";
|
|
4
|
+
import chalk from "chalk";
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Validates the output path for an archive file
|
|
8
|
+
* @param {string} outputPath - Path where the archive should be saved
|
|
9
|
+
* @returns {boolean} - True if the path is valid, throws an error otherwise
|
|
10
|
+
* @throws {Error} - If the output path is invalid
|
|
11
|
+
*/
|
|
12
|
+
const validateArchivePath = (outputPath) => {
|
|
13
|
+
// Check if path is provided
|
|
14
|
+
if (!outputPath) {
|
|
15
|
+
throw new Error("Output path is required");
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
// Check if the output directory exists or can be created
|
|
19
|
+
const outputDir = path.dirname(outputPath);
|
|
20
|
+
try {
|
|
21
|
+
if (!fs.existsSync(outputDir)) {
|
|
22
|
+
fs.mkdirSync(outputDir, { recursive: true });
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
// Check if the directory is writable
|
|
26
|
+
fs.accessSync(outputDir, fs.constants.W_OK);
|
|
27
|
+
|
|
28
|
+
// Check if file already exists and is writable
|
|
29
|
+
if (fs.existsSync(outputPath)) {
|
|
30
|
+
fs.accessSync(outputPath, fs.constants.W_OK);
|
|
31
|
+
// File exists and is writable, so we'll overwrite it
|
|
32
|
+
console.warn(
|
|
33
|
+
chalk.yellow(
|
|
34
|
+
`Warning: File ${outputPath} already exists and will be overwritten`
|
|
35
|
+
)
|
|
36
|
+
);
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
return true;
|
|
40
|
+
} catch (error) {
|
|
41
|
+
if (error.code === "EACCES") {
|
|
42
|
+
throw new Error(`Permission denied: Cannot write to ${outputPath}`);
|
|
43
|
+
}
|
|
44
|
+
throw new Error(`Invalid output path: ${error.message}`);
|
|
45
|
+
}
|
|
46
|
+
};
|
|
47
|
+
|
|
48
|
+
/**
|
|
49
|
+
* Creates an archive (zip or tar) from a directory
|
|
50
|
+
*
|
|
51
|
+
* @param {string} sourceDir - Source directory to archive
|
|
52
|
+
* @param {string} outputPath - Path where the archive should be saved
|
|
53
|
+
* @param {object} options - Archive options
|
|
54
|
+
* @param {string} options.format - Archive format ('zip' or 'tar')
|
|
55
|
+
* @param {number} options.compressionLevel - Compression level (0-9, default: 6)
|
|
56
|
+
* @returns {Promise<string>} - Path to the created archive
|
|
57
|
+
*/
|
|
58
|
+
export const createArchive = (sourceDir, outputPath, options = {}) => {
|
|
59
|
+
return new Promise((resolve, reject) => {
|
|
60
|
+
try {
|
|
61
|
+
const { format = "zip", compressionLevel = 6 } = options;
|
|
62
|
+
|
|
63
|
+
// Validate source directory
|
|
64
|
+
if (!fs.existsSync(sourceDir)) {
|
|
65
|
+
return reject(
|
|
66
|
+
new Error(`Source directory does not exist: ${sourceDir}`)
|
|
67
|
+
);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
const stats = fs.statSync(sourceDir);
|
|
71
|
+
if (!stats.isDirectory()) {
|
|
72
|
+
return reject(
|
|
73
|
+
new Error(`Source path is not a directory: ${sourceDir}`)
|
|
74
|
+
);
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// Validate output path
|
|
78
|
+
validateArchivePath(outputPath);
|
|
79
|
+
|
|
80
|
+
// Create output stream
|
|
81
|
+
const output = fs.createWriteStream(outputPath);
|
|
82
|
+
let archive;
|
|
83
|
+
|
|
84
|
+
// Create the appropriate archive type
|
|
85
|
+
if (format === "zip") {
|
|
86
|
+
archive = archiver("zip", {
|
|
87
|
+
zlib: { level: compressionLevel },
|
|
88
|
+
});
|
|
89
|
+
} else if (format === "tar") {
|
|
90
|
+
// Use gzip compression for tar if compressionLevel > 0
|
|
91
|
+
if (compressionLevel > 0) {
|
|
92
|
+
archive = archiver("tar", {
|
|
93
|
+
gzip: true,
|
|
94
|
+
gzipOptions: { level: compressionLevel },
|
|
95
|
+
});
|
|
96
|
+
} else {
|
|
97
|
+
// Create a tar archive without gzip compression
|
|
98
|
+
archive = archiver("tar");
|
|
99
|
+
}
|
|
100
|
+
} else {
|
|
101
|
+
return reject(new Error(`Unsupported archive format: ${format}`));
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
// Listen for archive events
|
|
105
|
+
output.on("close", () => {
|
|
106
|
+
const size = archive.pointer();
|
|
107
|
+
console.log(
|
|
108
|
+
chalk.green(
|
|
109
|
+
`✓ Archive created: ${outputPath} (${(size / 1024 / 1024).toFixed(
|
|
110
|
+
2
|
|
111
|
+
)} MB)`
|
|
112
|
+
)
|
|
113
|
+
);
|
|
114
|
+
resolve(outputPath);
|
|
115
|
+
});
|
|
116
|
+
|
|
117
|
+
archive.on("error", (err) => {
|
|
118
|
+
reject(err);
|
|
119
|
+
});
|
|
120
|
+
|
|
121
|
+
archive.on("warning", (err) => {
|
|
122
|
+
if (err.code === "ENOENT") {
|
|
123
|
+
console.warn(chalk.yellow(`Warning: ${err.message}`));
|
|
124
|
+
} else {
|
|
125
|
+
reject(err);
|
|
126
|
+
}
|
|
127
|
+
});
|
|
128
|
+
|
|
129
|
+
// Pipe archive data to the output file
|
|
130
|
+
archive.pipe(output);
|
|
131
|
+
|
|
132
|
+
// Add the directory contents to the archive
|
|
133
|
+
archive.directory(sourceDir, false);
|
|
134
|
+
|
|
135
|
+
// Finalize the archive
|
|
136
|
+
archive.finalize();
|
|
137
|
+
} catch (error) {
|
|
138
|
+
reject(error);
|
|
139
|
+
}
|
|
140
|
+
});
|
|
141
|
+
};
|
|
142
|
+
|
|
143
|
+
/**
|
|
144
|
+
* Downloads folder contents and creates an archive
|
|
145
|
+
*
|
|
146
|
+
* @param {object} repoInfo - Repository information object
|
|
147
|
+
* @param {string} outputDir - Directory where files should be downloaded
|
|
148
|
+
* @param {string} archiveFormat - Archive format ('zip' or 'tar')
|
|
149
|
+
* @param {string} archiveName - Custom name for the archive file
|
|
150
|
+
* @param {number} compressionLevel - Compression level (0-9)
|
|
151
|
+
* @returns {Promise<string>} - Path to the created archive
|
|
152
|
+
*/
|
|
153
|
+
export const downloadAndArchive = async (
|
|
154
|
+
repoInfo,
|
|
155
|
+
outputDir,
|
|
156
|
+
archiveFormat = "zip",
|
|
157
|
+
archiveName = null,
|
|
158
|
+
compressionLevel = 6
|
|
159
|
+
) => {
|
|
160
|
+
const { downloadFolder } = await import("./downloader.js");
|
|
161
|
+
|
|
162
|
+
console.log(
|
|
163
|
+
chalk.cyan(
|
|
164
|
+
`Downloading folder and preparing to create ${archiveFormat.toUpperCase()} archive...`
|
|
165
|
+
)
|
|
166
|
+
);
|
|
167
|
+
|
|
168
|
+
// Create a temporary directory for the download
|
|
169
|
+
const tempDir = path.join(outputDir, `.temp-${Date.now()}`);
|
|
170
|
+
fs.mkdirSync(tempDir, { recursive: true });
|
|
171
|
+
|
|
172
|
+
try {
|
|
173
|
+
// Download the folder contents
|
|
174
|
+
await downloadFolder(repoInfo, tempDir);
|
|
175
|
+
|
|
176
|
+
// Determine archive filename
|
|
177
|
+
let archiveFileName = archiveName;
|
|
178
|
+
if (!archiveFileName) {
|
|
179
|
+
const { owner, repo, folderPath } = repoInfo;
|
|
180
|
+
const folderName = folderPath ? folderPath.split("/").pop() : repo;
|
|
181
|
+
archiveFileName = `${folderName || repo}-${owner}`;
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
// Add extension if not present
|
|
185
|
+
if (!archiveFileName.endsWith(`.${archiveFormat}`)) {
|
|
186
|
+
archiveFileName += `.${archiveFormat}`;
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
const archivePath = path.join(outputDir, archiveFileName);
|
|
190
|
+
|
|
191
|
+
// Create the archive
|
|
192
|
+
console.log(
|
|
193
|
+
chalk.cyan(`Creating ${archiveFormat.toUpperCase()} archive...`)
|
|
194
|
+
);
|
|
195
|
+
await createArchive(tempDir, archivePath, {
|
|
196
|
+
format: archiveFormat,
|
|
197
|
+
compressionLevel,
|
|
198
|
+
});
|
|
199
|
+
|
|
200
|
+
return archivePath;
|
|
201
|
+
} catch (error) {
|
|
202
|
+
throw new Error(`Failed to create archive: ${error.message}`);
|
|
203
|
+
} finally {
|
|
204
|
+
// Clean up temporary directory
|
|
205
|
+
try {
|
|
206
|
+
fs.rmSync(tempDir, { recursive: true, force: true });
|
|
207
|
+
} catch (err) {
|
|
208
|
+
console.warn(
|
|
209
|
+
chalk.yellow(
|
|
210
|
+
`Warning: Failed to clean up temporary directory: ${err.message}`
|
|
211
|
+
)
|
|
212
|
+
);
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
};
|
package/src/downloader.js
CHANGED
|
@@ -9,18 +9,19 @@ import chalk from "chalk";
|
|
|
9
9
|
import prettyBytes from "pretty-bytes";
|
|
10
10
|
|
|
11
11
|
// Set concurrency limit (adjustable based on network performance)
|
|
12
|
-
|
|
12
|
+
// Reduced from 500 to 5 to prevent GitHub API rate limiting
|
|
13
|
+
const limit = pLimit(5);
|
|
13
14
|
|
|
14
15
|
// Ensure __dirname and __filename are available in ESM
|
|
15
16
|
const __filename = fileURLToPath(import.meta.url);
|
|
16
17
|
const __dirname = dirname(__filename);
|
|
17
18
|
|
|
18
19
|
// Define spinner animation frames
|
|
19
|
-
const spinnerFrames = [
|
|
20
|
+
const spinnerFrames = ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"];
|
|
20
21
|
// Alternative progress bar characters for more visual appeal
|
|
21
22
|
const progressChars = {
|
|
22
|
-
complete:
|
|
23
|
-
incomplete:
|
|
23
|
+
complete: "▰", // Alternative: '■', '●', '◆', '▣'
|
|
24
|
+
incomplete: "▱", // Alternative: '□', '○', '◇', '▢'
|
|
24
25
|
};
|
|
25
26
|
|
|
26
27
|
// Track frame index for spinner animation
|
|
@@ -45,17 +46,115 @@ const getSpinnerFrame = () => {
|
|
|
45
46
|
* @returns {Promise<Array>} - Promise resolving to an array of file objects
|
|
46
47
|
*/
|
|
47
48
|
const fetchFolderContents = async (owner, repo, branch, folderPath) => {
|
|
48
|
-
|
|
49
|
+
let effectiveBranch = branch;
|
|
50
|
+
if (!effectiveBranch) {
|
|
51
|
+
// If no branch is specified, fetch the default branch for the repository
|
|
52
|
+
try {
|
|
53
|
+
const repoInfoUrl = `https://api.github.com/repos/${owner}/${repo}`;
|
|
54
|
+
const repoInfoResponse = await axios.get(repoInfoUrl);
|
|
55
|
+
effectiveBranch = repoInfoResponse.data.default_branch;
|
|
56
|
+
if (!effectiveBranch) {
|
|
57
|
+
console.error(
|
|
58
|
+
chalk.red(
|
|
59
|
+
`Could not determine default branch for ${owner}/${repo}. Please specify a branch in the URL.`
|
|
60
|
+
)
|
|
61
|
+
);
|
|
62
|
+
return [];
|
|
63
|
+
}
|
|
64
|
+
console.log(
|
|
65
|
+
chalk.blue(
|
|
66
|
+
`No branch specified, using default branch: ${effectiveBranch}`
|
|
67
|
+
)
|
|
68
|
+
);
|
|
69
|
+
} catch (error) {
|
|
70
|
+
console.error(
|
|
71
|
+
chalk.red(
|
|
72
|
+
`Failed to fetch default branch for ${owner}/${repo}: ${error.message}`
|
|
73
|
+
)
|
|
74
|
+
);
|
|
75
|
+
return [];
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
const apiUrl = `https://api.github.com/repos/${owner}/${repo}/git/trees/${effectiveBranch}?recursive=1`;
|
|
49
80
|
|
|
50
81
|
try {
|
|
51
82
|
const response = await axios.get(apiUrl);
|
|
52
|
-
|
|
83
|
+
|
|
84
|
+
// Check if GitHub API returned truncated results
|
|
85
|
+
if (response.data.truncated) {
|
|
86
|
+
console.warn(
|
|
87
|
+
chalk.yellow(
|
|
88
|
+
`Warning: The repository is too large and some files may be missing. ` +
|
|
89
|
+
`Consider using git clone for complete repositories.`
|
|
90
|
+
)
|
|
91
|
+
);
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
// Original filter:
|
|
95
|
+
// return response.data.tree.filter((item) =>
|
|
96
|
+
// item.path.startsWith(folderPath)
|
|
97
|
+
// );
|
|
98
|
+
|
|
99
|
+
// New filter logic:
|
|
100
|
+
if (folderPath === "") {
|
|
101
|
+
// For the root directory, all items from the recursive tree are relevant.
|
|
102
|
+
// item.path.startsWith("") would also achieve this.
|
|
103
|
+
return response.data.tree;
|
|
104
|
+
} else {
|
|
105
|
+
// For a specific folder, items must be *inside* that folder.
|
|
106
|
+
// Ensure folderPath is treated as a directory prefix by adding a trailing slash if not present.
|
|
107
|
+
const prefix = folderPath.endsWith("/") ? folderPath : folderPath + "/";
|
|
108
|
+
return response.data.tree.filter((item) => item.path.startsWith(prefix));
|
|
109
|
+
}
|
|
53
110
|
} catch (error) {
|
|
54
|
-
if (error.response
|
|
55
|
-
|
|
56
|
-
|
|
111
|
+
if (error.response) {
|
|
112
|
+
// Handle specific HTTP error codes
|
|
113
|
+
switch (error.response.status) {
|
|
114
|
+
case 403:
|
|
115
|
+
if (error.response.headers["x-ratelimit-remaining"] === "0") {
|
|
116
|
+
console.error(
|
|
117
|
+
chalk.red(
|
|
118
|
+
`GitHub API rate limit exceeded. Please wait until ${new Date(
|
|
119
|
+
parseInt(error.response.headers["x-ratelimit-reset"]) * 1000
|
|
120
|
+
).toLocaleTimeString()} or add a GitHub token (feature coming soon).`
|
|
121
|
+
)
|
|
122
|
+
);
|
|
123
|
+
} else {
|
|
124
|
+
console.error(
|
|
125
|
+
chalk.red(
|
|
126
|
+
`Access forbidden: ${
|
|
127
|
+
error.response.data.message || "Unknown reason"
|
|
128
|
+
}`
|
|
129
|
+
)
|
|
130
|
+
);
|
|
131
|
+
}
|
|
132
|
+
break;
|
|
133
|
+
case 404:
|
|
134
|
+
console.error(
|
|
135
|
+
chalk.red(
|
|
136
|
+
`Repository, branch, or folder not found: ${owner}/${repo}/${branch}/${folderPath}`
|
|
137
|
+
)
|
|
138
|
+
);
|
|
139
|
+
break;
|
|
140
|
+
default:
|
|
141
|
+
console.error(
|
|
142
|
+
chalk.red(
|
|
143
|
+
`API error (${error.response.status}): ${
|
|
144
|
+
error.response.data.message || error.message
|
|
145
|
+
}`
|
|
146
|
+
)
|
|
147
|
+
);
|
|
148
|
+
}
|
|
149
|
+
} else if (error.request) {
|
|
150
|
+
console.error(
|
|
151
|
+
chalk.red(
|
|
152
|
+
`Network error: No response received from GitHub. Please check your internet connection.`
|
|
153
|
+
)
|
|
154
|
+
);
|
|
155
|
+
} else {
|
|
156
|
+
console.error(chalk.red(`Error preparing request: ${error.message}`));
|
|
57
157
|
}
|
|
58
|
-
console.error(`Failed to fetch folder contents: ${error.message}`);
|
|
59
158
|
return [];
|
|
60
159
|
}
|
|
61
160
|
};
|
|
@@ -70,23 +169,103 @@ const fetchFolderContents = async (owner, repo, branch, folderPath) => {
|
|
|
70
169
|
* @returns {Promise<Object>} - Object containing download status
|
|
71
170
|
*/
|
|
72
171
|
const downloadFile = async (owner, repo, branch, filePath, outputPath) => {
|
|
73
|
-
|
|
172
|
+
let effectiveBranch = branch;
|
|
173
|
+
if (!effectiveBranch) {
|
|
174
|
+
// If no branch is specified, fetch the default branch for the repository
|
|
175
|
+
// This check might be redundant if fetchFolderContents already resolved it,
|
|
176
|
+
// but it's a good fallback for direct downloadFile calls if any.
|
|
177
|
+
try {
|
|
178
|
+
const repoInfoUrl = `https://api.github.com/repos/${owner}/${repo}`;
|
|
179
|
+
const repoInfoResponse = await axios.get(repoInfoUrl);
|
|
180
|
+
effectiveBranch = repoInfoResponse.data.default_branch;
|
|
181
|
+
if (!effectiveBranch) {
|
|
182
|
+
// console.error(chalk.red(`Could not determine default branch for ${owner}/${repo} for file ${filePath}.`));
|
|
183
|
+
// Do not log error here as it might be a root file download where branch is not in URL
|
|
184
|
+
}
|
|
185
|
+
} catch (error) {
|
|
186
|
+
// console.error(chalk.red(`Failed to fetch default branch for ${owner}/${repo} for file ${filePath}: ${error.message}`));
|
|
187
|
+
// Do not log error here
|
|
188
|
+
}
|
|
189
|
+
// If still no branch, the raw URL might work for default branch, or fail.
|
|
190
|
+
// The original code didn't explicitly handle this for downloadFile, relying on raw.githubusercontent default behavior.
|
|
191
|
+
// For robustness, we should ensure effectiveBranch is set. If not, the URL will be malformed or use GitHub's default.
|
|
192
|
+
if (!effectiveBranch) {
|
|
193
|
+
// Fallback to a common default, or let the API call fail if truly ambiguous
|
|
194
|
+
// For raw content, GitHub often defaults to the main branch if not specified,
|
|
195
|
+
// but it's better to be explicit if we can.
|
|
196
|
+
// However, altering the URL structure for raw.githubusercontent.com without a branch
|
|
197
|
+
// might be tricky if the original URL didn't have it.
|
|
198
|
+
// The existing raw URL construction assumes branch is present or GitHub handles its absence.
|
|
199
|
+
// Let's stick to the original logic for raw URL construction if branch is not found,
|
|
200
|
+
// as `https://raw.githubusercontent.com/${owner}/${repo}/${filePath}` might work for root files on default branch.
|
|
201
|
+
// The critical part is `fetchFolderContents` determining the branch for listing.
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
const baseUrl = `https://raw.githubusercontent.com/${owner}/${repo}`;
|
|
206
|
+
const fileUrlPath = effectiveBranch
|
|
207
|
+
? `/${effectiveBranch}/${filePath}`
|
|
208
|
+
: `/${filePath}`; // filePath might be at root
|
|
209
|
+
const url = `${baseUrl}${fileUrlPath}`;
|
|
74
210
|
|
|
75
211
|
try {
|
|
76
212
|
const response = await axios.get(url, { responseType: "arraybuffer" });
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
213
|
+
|
|
214
|
+
// Ensure the directory exists
|
|
215
|
+
try {
|
|
216
|
+
fs.mkdirSync(path.dirname(outputPath), { recursive: true });
|
|
217
|
+
} catch (dirError) {
|
|
218
|
+
return {
|
|
219
|
+
filePath,
|
|
220
|
+
success: false,
|
|
221
|
+
error: `Failed to create directory: ${dirError.message}`,
|
|
222
|
+
size: 0,
|
|
223
|
+
};
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
// Write the file
|
|
227
|
+
try {
|
|
228
|
+
fs.writeFileSync(outputPath, Buffer.from(response.data));
|
|
229
|
+
} catch (fileError) {
|
|
230
|
+
return {
|
|
231
|
+
filePath,
|
|
232
|
+
success: false,
|
|
233
|
+
error: `Failed to write file: ${fileError.message}`,
|
|
234
|
+
size: 0,
|
|
235
|
+
};
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
return {
|
|
239
|
+
filePath,
|
|
81
240
|
success: true,
|
|
82
|
-
size: response.data.length
|
|
241
|
+
size: response.data.length,
|
|
83
242
|
};
|
|
84
243
|
} catch (error) {
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
244
|
+
// More detailed error handling for network requests
|
|
245
|
+
let errorMessage = error.message;
|
|
246
|
+
|
|
247
|
+
if (error.response) {
|
|
248
|
+
// The request was made and the server responded with an error status
|
|
249
|
+
switch (error.response.status) {
|
|
250
|
+
case 403:
|
|
251
|
+
errorMessage = "Access forbidden (possibly rate limited)";
|
|
252
|
+
break;
|
|
253
|
+
case 404:
|
|
254
|
+
errorMessage = "File not found";
|
|
255
|
+
break;
|
|
256
|
+
default:
|
|
257
|
+
errorMessage = `HTTP error ${error.response.status}`;
|
|
258
|
+
}
|
|
259
|
+
} else if (error.request) {
|
|
260
|
+
// The request was made but no response was received
|
|
261
|
+
errorMessage = "No response from server";
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
return {
|
|
265
|
+
filePath,
|
|
266
|
+
success: false,
|
|
267
|
+
error: errorMessage,
|
|
268
|
+
size: 0,
|
|
90
269
|
};
|
|
91
270
|
}
|
|
92
271
|
};
|
|
@@ -106,31 +285,40 @@ const createProgressRenderer = (owner, repo, folderPath) => {
|
|
|
106
285
|
try {
|
|
107
286
|
const { value, total, startTime } = params;
|
|
108
287
|
const { downloadedSize = 0 } = payload || { downloadedSize: 0 };
|
|
109
|
-
|
|
288
|
+
|
|
110
289
|
// Calculate progress percentage
|
|
111
290
|
const progress = Math.min(1, Math.max(0, value / Math.max(1, total)));
|
|
112
291
|
const percentage = Math.floor(progress * 100);
|
|
113
|
-
|
|
292
|
+
|
|
114
293
|
// Calculate elapsed time
|
|
115
294
|
const elapsedSecs = Math.max(0.1, (Date.now() - startTime) / 1000);
|
|
116
|
-
|
|
295
|
+
|
|
117
296
|
// Create the progress bar
|
|
118
|
-
const barLength = Math.max(
|
|
297
|
+
const barLength = Math.max(
|
|
298
|
+
20,
|
|
299
|
+
Math.min(40, Math.floor(terminalWidth / 2))
|
|
300
|
+
);
|
|
119
301
|
const completedLength = Math.round(barLength * progress);
|
|
120
302
|
const remainingLength = barLength - completedLength;
|
|
121
|
-
|
|
303
|
+
|
|
122
304
|
// Build the bar with custom progress characters
|
|
123
|
-
const completedBar = chalk.greenBright(
|
|
124
|
-
|
|
125
|
-
|
|
305
|
+
const completedBar = chalk.greenBright(
|
|
306
|
+
progressChars.complete.repeat(completedLength)
|
|
307
|
+
);
|
|
308
|
+
const remainingBar = chalk.gray(
|
|
309
|
+
progressChars.incomplete.repeat(remainingLength)
|
|
310
|
+
);
|
|
311
|
+
|
|
126
312
|
// Add spinner for animation
|
|
127
313
|
const spinner = chalk.cyanBright(getSpinnerFrame());
|
|
128
|
-
|
|
314
|
+
|
|
129
315
|
// Format the output
|
|
130
316
|
const progressInfo = `${chalk.cyan(`${value}/${total}`)} files`;
|
|
131
317
|
const sizeInfo = prettyBytes(downloadedSize || 0);
|
|
132
|
-
|
|
133
|
-
return `${spinner} ${completedBar}${remainingBar} ${chalk.yellow(
|
|
318
|
+
|
|
319
|
+
return `${spinner} ${completedBar}${remainingBar} ${chalk.yellow(
|
|
320
|
+
percentage + "%"
|
|
321
|
+
)} | ${progressInfo} | ${chalk.magenta(sizeInfo)}`;
|
|
134
322
|
} catch (error) {
|
|
135
323
|
// Fallback to a very simple progress indicator
|
|
136
324
|
return `${Math.floor((params.value / params.total) * 100)}% complete`;
|
|
@@ -148,88 +336,160 @@ const createProgressRenderer = (owner, repo, folderPath) => {
|
|
|
148
336
|
* @param {string} outputDir - Directory where files should be saved
|
|
149
337
|
* @returns {Promise<void>} - Promise that resolves when all files are downloaded
|
|
150
338
|
*/
|
|
151
|
-
const downloadFolder = async (
|
|
152
|
-
|
|
339
|
+
const downloadFolder = async (
|
|
340
|
+
{ owner, repo, branch, folderPath },
|
|
341
|
+
outputDir
|
|
342
|
+
) => {
|
|
343
|
+
console.log(
|
|
344
|
+
chalk.cyan(`Analyzing repository structure for ${owner}/${repo}...`)
|
|
345
|
+
);
|
|
153
346
|
|
|
154
347
|
try {
|
|
155
348
|
const contents = await fetchFolderContents(owner, repo, branch, folderPath);
|
|
156
|
-
|
|
349
|
+
|
|
157
350
|
if (!contents || contents.length === 0) {
|
|
158
|
-
console.log(
|
|
351
|
+
console.log(
|
|
352
|
+
chalk.yellow(`No files found in ${folderPath || "repository root"}`)
|
|
353
|
+
);
|
|
159
354
|
console.log(chalk.green(`Folder cloned successfully!`));
|
|
160
355
|
return;
|
|
161
356
|
}
|
|
162
357
|
|
|
163
|
-
|
|
358
|
+
// Filter for blob type (files)
|
|
359
|
+
const files = contents.filter((item) => item.type === "blob");
|
|
164
360
|
const totalFiles = files.length;
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
361
|
+
|
|
362
|
+
if (totalFiles === 0) {
|
|
363
|
+
console.log(
|
|
364
|
+
chalk.yellow(
|
|
365
|
+
`No files found in ${
|
|
366
|
+
folderPath || "repository root"
|
|
367
|
+
} (only directories)`
|
|
368
|
+
)
|
|
369
|
+
);
|
|
370
|
+
console.log(chalk.green(`Folder cloned successfully!`));
|
|
371
|
+
return;
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
console.log(
|
|
375
|
+
chalk.cyan(
|
|
376
|
+
`Downloading ${totalFiles} files from ${chalk.white(
|
|
377
|
+
owner + "/" + repo
|
|
378
|
+
)}...`
|
|
379
|
+
)
|
|
380
|
+
);
|
|
381
|
+
|
|
168
382
|
// Simplified progress bar setup
|
|
169
383
|
const progressBar = new cliProgress.SingleBar({
|
|
170
384
|
format: createProgressRenderer(owner, repo, folderPath),
|
|
171
385
|
hideCursor: true,
|
|
172
386
|
clearOnComplete: false,
|
|
173
387
|
stopOnComplete: true,
|
|
174
|
-
forceRedraw: true
|
|
388
|
+
forceRedraw: true,
|
|
175
389
|
});
|
|
176
|
-
|
|
390
|
+
|
|
177
391
|
// Track download metrics
|
|
178
392
|
let downloadedSize = 0;
|
|
179
393
|
const startTime = Date.now();
|
|
180
|
-
|
|
394
|
+
let failedFiles = [];
|
|
395
|
+
|
|
181
396
|
// Start progress bar
|
|
182
397
|
progressBar.start(totalFiles, 0, {
|
|
183
398
|
downloadedSize: 0,
|
|
184
|
-
startTime
|
|
399
|
+
startTime,
|
|
185
400
|
});
|
|
186
|
-
|
|
401
|
+
|
|
187
402
|
// Create download promises with concurrency control
|
|
188
403
|
const fileDownloadPromises = files.map((item) => {
|
|
189
404
|
// Keep the original structure by preserving the folder name
|
|
190
405
|
let relativePath = item.path;
|
|
191
|
-
if (folderPath && folderPath.trim() !==
|
|
192
|
-
relativePath = item.path
|
|
406
|
+
if (folderPath && folderPath.trim() !== "") {
|
|
407
|
+
relativePath = item.path
|
|
408
|
+
.substring(folderPath.length)
|
|
409
|
+
.replace(/^\//, "");
|
|
193
410
|
}
|
|
194
411
|
const outputFilePath = path.join(outputDir, relativePath);
|
|
195
|
-
|
|
412
|
+
|
|
196
413
|
return limit(async () => {
|
|
197
414
|
try {
|
|
198
|
-
const result = await downloadFile(
|
|
199
|
-
|
|
415
|
+
const result = await downloadFile(
|
|
416
|
+
owner,
|
|
417
|
+
repo,
|
|
418
|
+
branch,
|
|
419
|
+
item.path,
|
|
420
|
+
outputFilePath
|
|
421
|
+
);
|
|
422
|
+
|
|
200
423
|
// Update progress metrics
|
|
201
424
|
if (result.success) {
|
|
202
|
-
downloadedSize +=
|
|
425
|
+
downloadedSize += result.size || 0;
|
|
426
|
+
} else {
|
|
427
|
+
// Track failed files for reporting
|
|
428
|
+
failedFiles.push({
|
|
429
|
+
path: item.path,
|
|
430
|
+
error: result.error,
|
|
431
|
+
});
|
|
203
432
|
}
|
|
204
|
-
|
|
433
|
+
|
|
205
434
|
// Update progress bar with current metrics
|
|
206
435
|
progressBar.increment(1, {
|
|
207
|
-
downloadedSize
|
|
436
|
+
downloadedSize,
|
|
208
437
|
});
|
|
209
|
-
|
|
438
|
+
|
|
210
439
|
return result;
|
|
211
440
|
} catch (error) {
|
|
212
|
-
|
|
441
|
+
failedFiles.push({
|
|
442
|
+
path: item.path,
|
|
443
|
+
error: error.message,
|
|
444
|
+
});
|
|
445
|
+
|
|
446
|
+
progressBar.increment(1, { downloadedSize });
|
|
447
|
+
return {
|
|
448
|
+
filePath: item.path,
|
|
449
|
+
success: false,
|
|
450
|
+
error: error.message,
|
|
451
|
+
size: 0,
|
|
452
|
+
};
|
|
213
453
|
}
|
|
214
454
|
});
|
|
215
455
|
});
|
|
216
456
|
|
|
217
|
-
// Execute downloads in parallel
|
|
457
|
+
// Execute downloads in parallel with controlled concurrency
|
|
218
458
|
const results = await Promise.all(fileDownloadPromises);
|
|
219
459
|
progressBar.stop();
|
|
220
|
-
|
|
460
|
+
|
|
221
461
|
console.log(); // Add an empty line after progress bar
|
|
222
462
|
|
|
223
463
|
// Count successful and failed downloads
|
|
224
464
|
const succeeded = results.filter((r) => r.success).length;
|
|
225
|
-
const failed =
|
|
465
|
+
const failed = failedFiles.length;
|
|
226
466
|
|
|
227
467
|
if (failed > 0) {
|
|
228
|
-
console.log(
|
|
468
|
+
console.log(
|
|
469
|
+
chalk.yellow(
|
|
470
|
+
`Downloaded ${succeeded} files successfully, ${failed} files failed`
|
|
471
|
+
)
|
|
472
|
+
);
|
|
473
|
+
|
|
474
|
+
// Show detailed errors if there aren't too many
|
|
475
|
+
if (failed <= 5) {
|
|
476
|
+
console.log(chalk.yellow("Failed files:"));
|
|
477
|
+
failedFiles.forEach((file) => {
|
|
478
|
+
console.log(chalk.yellow(` - ${file.path}: ${file.error}`));
|
|
479
|
+
});
|
|
480
|
+
} else {
|
|
481
|
+
console.log(
|
|
482
|
+
chalk.yellow(
|
|
483
|
+
`${failed} files failed to download. Check your connection or repository access.`
|
|
484
|
+
)
|
|
485
|
+
);
|
|
486
|
+
}
|
|
229
487
|
} else {
|
|
230
|
-
console.log(
|
|
488
|
+
console.log(
|
|
489
|
+
chalk.green(` All ${succeeded} files downloaded successfully!`)
|
|
490
|
+
);
|
|
231
491
|
}
|
|
232
|
-
|
|
492
|
+
|
|
233
493
|
console.log(chalk.green(`Folder cloned successfully!`));
|
|
234
494
|
} catch (error) {
|
|
235
495
|
console.error(chalk.red(`Error downloading folder: ${error.message}`));
|
package/src/index.js
CHANGED
|
@@ -1,25 +1,122 @@
|
|
|
1
|
-
import { program } from
|
|
2
|
-
import { parseGitHubUrl } from
|
|
3
|
-
import { downloadFolder } from
|
|
1
|
+
import { program } from "commander";
|
|
2
|
+
import { parseGitHubUrl } from "./parser.js";
|
|
3
|
+
import { downloadFolder } from "./downloader.js";
|
|
4
|
+
import { downloadAndArchive } from "./archiver.js";
|
|
5
|
+
import { fileURLToPath } from "url";
|
|
6
|
+
import { dirname, join, resolve } from "path";
|
|
7
|
+
import fs from "fs";
|
|
8
|
+
|
|
9
|
+
// Get package.json for version
|
|
10
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
11
|
+
const __dirname = dirname(__filename);
|
|
12
|
+
const packagePath = join(__dirname, "..", "package.json");
|
|
13
|
+
const packageJson = JSON.parse(fs.readFileSync(packagePath, "utf8"));
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Validates and ensures the output directory exists
|
|
17
|
+
* @param {string} outputDir - The directory path to validate
|
|
18
|
+
* @returns {string} - The resolved directory path
|
|
19
|
+
* @throws {Error} - If the directory is invalid or cannot be created
|
|
20
|
+
*/
|
|
21
|
+
const validateOutputDirectory = (outputDir) => {
|
|
22
|
+
if (!outputDir) {
|
|
23
|
+
throw new Error("Output directory is required");
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
// Resolve to absolute path
|
|
27
|
+
const resolvedDir = resolve(outputDir);
|
|
28
|
+
|
|
29
|
+
try {
|
|
30
|
+
// Check if directory exists, if not try to create it
|
|
31
|
+
if (!fs.existsSync(resolvedDir)) {
|
|
32
|
+
fs.mkdirSync(resolvedDir, { recursive: true });
|
|
33
|
+
} else {
|
|
34
|
+
// Check if it's actually a directory
|
|
35
|
+
const stats = fs.statSync(resolvedDir);
|
|
36
|
+
if (!stats.isDirectory()) {
|
|
37
|
+
throw new Error(
|
|
38
|
+
`Output path exists but is not a directory: ${outputDir}`
|
|
39
|
+
);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// Check if the directory is writable
|
|
44
|
+
fs.accessSync(resolvedDir, fs.constants.W_OK);
|
|
45
|
+
|
|
46
|
+
return resolvedDir;
|
|
47
|
+
} catch (error) {
|
|
48
|
+
if (error.code === "EACCES") {
|
|
49
|
+
throw new Error(`Permission denied: Cannot write to ${outputDir}`);
|
|
50
|
+
}
|
|
51
|
+
throw new Error(`Invalid output directory: ${error.message}`);
|
|
52
|
+
}
|
|
53
|
+
};
|
|
4
54
|
|
|
5
55
|
const initializeCLI = () => {
|
|
6
56
|
program
|
|
7
|
-
.version(
|
|
8
|
-
.description(
|
|
9
|
-
.argument(
|
|
10
|
-
.option(
|
|
57
|
+
.version(packageJson.version)
|
|
58
|
+
.description("Clone specific folders from GitHub repositories")
|
|
59
|
+
.argument("<url>", "GitHub URL of the folder to clone")
|
|
60
|
+
.option("-o, --output <directory>", "Output directory", process.cwd())
|
|
61
|
+
.option("--zip [filename]", "Create ZIP archive of downloaded files")
|
|
62
|
+
.option("--tar [filename]", "Create TAR archive of downloaded files")
|
|
63
|
+
.option("--compression-level <level>", "Compression level (1-9)", "6")
|
|
11
64
|
.action(async (url, options) => {
|
|
12
65
|
try {
|
|
13
66
|
console.log(`Parsing URL: ${url}`);
|
|
14
67
|
const parsedUrl = parseGitHubUrl(url);
|
|
15
|
-
console.log(`Parsed URL:`, parsedUrl);
|
|
16
68
|
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
69
|
+
// Validate options
|
|
70
|
+
if (options.compressionLevel) {
|
|
71
|
+
const level = parseInt(options.compressionLevel, 10);
|
|
72
|
+
if (isNaN(level) || level < 0 || level > 9) {
|
|
73
|
+
// Allow 0 for no compression
|
|
74
|
+
throw new Error(
|
|
75
|
+
"Compression level must be a number between 0 and 9"
|
|
76
|
+
);
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
if (options.zip && options.tar) {
|
|
81
|
+
throw new Error(
|
|
82
|
+
"Cannot specify both --zip and --tar options at the same time"
|
|
83
|
+
);
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
// Validate output directory
|
|
87
|
+
try {
|
|
88
|
+
options.output = validateOutputDirectory(options.output);
|
|
89
|
+
} catch (dirError) {
|
|
90
|
+
throw new Error(`Output directory error: ${dirError.message}`);
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
// Handle archive options
|
|
94
|
+
const archiveFormat = options.zip ? "zip" : options.tar ? "tar" : null;
|
|
95
|
+
const archiveName =
|
|
96
|
+
typeof options.zip === "string"
|
|
97
|
+
? options.zip
|
|
98
|
+
: typeof options.tar === "string"
|
|
99
|
+
? options.tar
|
|
100
|
+
: null;
|
|
101
|
+
const compressionLevel = parseInt(options.compressionLevel, 10) || 6;
|
|
102
|
+
|
|
103
|
+
if (archiveFormat) {
|
|
104
|
+
console.log(`Creating ${archiveFormat.toUpperCase()} archive...`);
|
|
105
|
+
await downloadAndArchive(
|
|
106
|
+
parsedUrl,
|
|
107
|
+
options.output,
|
|
108
|
+
archiveFormat,
|
|
109
|
+
archiveName,
|
|
110
|
+
compressionLevel
|
|
111
|
+
);
|
|
112
|
+
} else {
|
|
113
|
+
console.log(`Downloading folder to: ${options.output}`);
|
|
114
|
+
await downloadFolder(parsedUrl, options.output);
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
console.log("Operation completed successfully!");
|
|
21
118
|
} catch (error) {
|
|
22
|
-
console.error(
|
|
119
|
+
console.error("Error:", error.message);
|
|
23
120
|
process.exit(1);
|
|
24
121
|
}
|
|
25
122
|
});
|
|
@@ -32,5 +129,4 @@ if (import.meta.url === `file://${process.argv[1]}`) {
|
|
|
32
129
|
initializeCLI();
|
|
33
130
|
}
|
|
34
131
|
|
|
35
|
-
// ✅ Fix the incorrect export
|
|
36
132
|
export { initializeCLI, downloadFolder };
|
package/src/parser.js
CHANGED
|
@@ -1,8 +1,30 @@
|
|
|
1
1
|
export function parseGitHubUrl(url) {
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
2
|
+
// Validate the URL format
|
|
3
|
+
if (!url || typeof url !== "string") {
|
|
4
|
+
throw new Error("Invalid URL: URL must be a non-empty string");
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
// Validate if it's a GitHub URL
|
|
8
|
+
const githubUrlPattern =
|
|
9
|
+
/^https?:\/\/(?:www\.)?github\.com\/([^\/]+)\/([^\/]+)(?:\/(?:tree|blob)\/([^\/]+)(?:\/(.+))?)?$/;
|
|
10
|
+
const match = url.match(githubUrlPattern);
|
|
11
|
+
|
|
12
|
+
if (!match) {
|
|
13
|
+
throw new Error(
|
|
14
|
+
"Invalid GitHub URL format. Expected: https://github.com/owner/repo[/tree|/blob]/branch/folder_or_file"
|
|
15
|
+
);
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
// Extract components from the matched pattern
|
|
19
|
+
const owner = match[1];
|
|
20
|
+
const repo = match[2];
|
|
21
|
+
const branch = match[3]; // Branch might not be in the URL for root downloads
|
|
22
|
+
const folderPath = match[4] || ""; // Empty string if no folder path
|
|
23
|
+
|
|
24
|
+
// Additional validation
|
|
25
|
+
if (!owner || !repo) {
|
|
26
|
+
throw new Error("Invalid GitHub URL: Missing repository owner or name");
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
return { owner, repo, branch, folderPath };
|
|
8
30
|
}
|