gitpadi 2.0.0 → 2.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -0
- package/dist/applicant-scorer.js +256 -0
- package/dist/cli.js +789 -0
- package/dist/commands/contribute.js +291 -0
- package/dist/commands/contributors.js +101 -0
- package/dist/commands/issues.js +319 -0
- package/dist/commands/prs.js +229 -0
- package/dist/commands/releases.js +53 -0
- package/dist/commands/repos.js +128 -0
- package/dist/core/github.js +106 -0
- package/dist/core/scorer.js +95 -0
- package/dist/create-issues.js +179 -0
- package/dist/pr-review.js +117 -0
- package/package.json +6 -3
- package/src/applicant-scorer.ts +1 -1
- package/src/cli.ts +345 -170
- package/src/commands/contribute.ts +331 -0
- package/src/commands/contributors.ts +1 -1
- package/src/commands/issues.ts +76 -9
- package/src/commands/prs.ts +1 -1
- package/src/commands/releases.ts +1 -1
- package/src/commands/repos.ts +41 -26
- package/src/core/github.ts +99 -15
- package/src/create-issues.ts +1 -1
- package/src/pr-review.ts +1 -1
|
@@ -0,0 +1,331 @@
|
|
|
1
|
+
// commands/contribute.ts — Contributor workflow for GitPadi
|
|
2
|
+
|
|
3
|
+
import chalk from 'chalk';
|
|
4
|
+
import ora from 'ora';
|
|
5
|
+
import { execSync } from 'child_process';
|
|
6
|
+
import * as fs from 'fs';
|
|
7
|
+
import boxen from 'boxen';
|
|
8
|
+
import {
|
|
9
|
+
getOctokit,
|
|
10
|
+
getOwner,
|
|
11
|
+
getRepo,
|
|
12
|
+
forkRepo,
|
|
13
|
+
getAuthenticatedUser,
|
|
14
|
+
getLatestCheckRuns,
|
|
15
|
+
setRepo
|
|
16
|
+
} from '../core/github.js';
|
|
17
|
+
|
|
18
|
+
const dim = chalk.dim;
|
|
19
|
+
const yellow = chalk.yellow;
|
|
20
|
+
const green = chalk.green;
|
|
21
|
+
const cyan = chalk.cyan;
|
|
22
|
+
const white = chalk.white;
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Parses a GitHub Issue/Repo URL or "owner/repo" string
|
|
26
|
+
*/
|
|
27
|
+
function parseTarget(input: string): { owner: string, repo: string, issue?: number } {
|
|
28
|
+
const urlPattern = /github\.com\/([^/]+)\/([^/]+)(\/issues\/(\d+))?/;
|
|
29
|
+
const match = input.match(urlPattern);
|
|
30
|
+
|
|
31
|
+
if (match) {
|
|
32
|
+
return {
|
|
33
|
+
owner: match[1],
|
|
34
|
+
repo: match[2],
|
|
35
|
+
issue: match[4] ? parseInt(match[4]) : undefined
|
|
36
|
+
};
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
const parts = input.split('/');
|
|
40
|
+
if (parts.length === 2) {
|
|
41
|
+
return { owner: parts[0], repo: parts[1] };
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
throw new Error('Invalid target. Use a GitHub URL or "owner/repo" format.');
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* Phase 2: Fork & Clone Workflow (The "Start" Command)
|
|
49
|
+
*/
|
|
50
|
+
export async function forkAndClone(target: string) {
|
|
51
|
+
const { owner, repo, issue } = parseTarget(target);
|
|
52
|
+
|
|
53
|
+
// 1. Fork first (idempotent — GitHub returns existing fork if it exists)
|
|
54
|
+
const spinner = ora(`Forking ${cyan(`${owner}/${repo}`)}...`).start();
|
|
55
|
+
let myUser: string;
|
|
56
|
+
|
|
57
|
+
try {
|
|
58
|
+
myUser = await getAuthenticatedUser();
|
|
59
|
+
const forkFullName = await forkRepo(owner, repo);
|
|
60
|
+
spinner.succeed(`Forked to ${green(forkFullName)}`);
|
|
61
|
+
} catch (e: any) {
|
|
62
|
+
spinner.fail(e.message);
|
|
63
|
+
return;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
// 2. Ask where to clone
|
|
67
|
+
const inquirer = (await import('inquirer')).default;
|
|
68
|
+
const path = (await import('path')).default;
|
|
69
|
+
const os = (await import('os')).default;
|
|
70
|
+
|
|
71
|
+
const { parentDir } = await inquirer.prompt([{
|
|
72
|
+
type: 'input',
|
|
73
|
+
name: 'parentDir',
|
|
74
|
+
message: cyan('📂 Where to clone? (parent directory):'),
|
|
75
|
+
default: '.',
|
|
76
|
+
}]);
|
|
77
|
+
|
|
78
|
+
// Resolve ~ and build full path: parentDir/repoName
|
|
79
|
+
const resolvedParent = parentDir.startsWith('~')
|
|
80
|
+
? parentDir.replace('~', os.homedir())
|
|
81
|
+
: parentDir;
|
|
82
|
+
let cloneDir: string = path.resolve(resolvedParent, repo);
|
|
83
|
+
|
|
84
|
+
// 3. Handle existing directory
|
|
85
|
+
if (fs.existsSync(cloneDir)) {
|
|
86
|
+
// Check if it's already a valid git clone of this repo
|
|
87
|
+
let isValidClone = false;
|
|
88
|
+
try {
|
|
89
|
+
const remoteUrl = execSync('git remote get-url origin', { cwd: cloneDir, encoding: 'utf-8', stdio: 'pipe' }).trim();
|
|
90
|
+
if (remoteUrl.includes(repo)) isValidClone = true;
|
|
91
|
+
} catch { /* not a git repo */ }
|
|
92
|
+
|
|
93
|
+
if (isValidClone) {
|
|
94
|
+
console.log(yellow(`\n 📂 "${cloneDir}" already contains a clone of ${repo}.`));
|
|
95
|
+
console.log(dim(' Syncing with upstream...\n'));
|
|
96
|
+
|
|
97
|
+
process.chdir(cloneDir);
|
|
98
|
+
setRepo(owner, repo);
|
|
99
|
+
|
|
100
|
+
// Ensure upstream remote exists
|
|
101
|
+
try { execSync(`git remote add upstream https://github.com/${owner}/${repo}.git`, { stdio: 'pipe' }); } catch { /* exists */ }
|
|
102
|
+
|
|
103
|
+
await syncBranch();
|
|
104
|
+
|
|
105
|
+
// Create or switch to issue branch
|
|
106
|
+
const branchName = issue ? `fix/issue-${issue}` : null;
|
|
107
|
+
if (branchName) {
|
|
108
|
+
try {
|
|
109
|
+
execSync(`git checkout -b ${branchName}`, { stdio: 'pipe' });
|
|
110
|
+
console.log(green(` ✔ Created branch ${branchName}`));
|
|
111
|
+
} catch {
|
|
112
|
+
execSync(`git checkout ${branchName}`, { stdio: 'pipe' });
|
|
113
|
+
console.log(green(` ✔ Switched to branch ${branchName}`));
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
console.log(green('\n✨ Workspace ready!'));
|
|
118
|
+
return;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
// Directory exists but isn't a valid clone — ask for new path
|
|
122
|
+
const { newDir } = await inquirer.prompt([{
|
|
123
|
+
type: 'input',
|
|
124
|
+
name: 'newDir',
|
|
125
|
+
message: yellow(`"${cloneDir}" already exists. Enter a different folder name:`),
|
|
126
|
+
default: `${repo}-contrib`,
|
|
127
|
+
}]);
|
|
128
|
+
cloneDir = path.resolve(resolvedParent, newDir);
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
// 4. Clone
|
|
132
|
+
const cloneSpinner = ora(`Cloning your fork...`).start();
|
|
133
|
+
const cloneUrl = `https://github.com/${myUser}/${repo}.git`;
|
|
134
|
+
|
|
135
|
+
try {
|
|
136
|
+
execSync(`git clone ${cloneUrl} ${cloneDir}`, { stdio: 'pipe' });
|
|
137
|
+
cloneSpinner.succeed(`Cloned into ${green(cloneDir)}`);
|
|
138
|
+
} catch (e: any) {
|
|
139
|
+
cloneSpinner.fail(`Clone failed: ${e.message}`);
|
|
140
|
+
return;
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
// 5. Setup Remotes & Branch
|
|
144
|
+
process.chdir(cloneDir);
|
|
145
|
+
execSync(`git remote add upstream https://github.com/${owner}/${repo}.git`, { stdio: 'pipe' });
|
|
146
|
+
|
|
147
|
+
const branchName = issue ? `fix/issue-${issue}` : `contrib-${Math.floor(Date.now() / 1000)}`;
|
|
148
|
+
|
|
149
|
+
try {
|
|
150
|
+
execSync(`git checkout -b ${branchName}`, { stdio: 'pipe' });
|
|
151
|
+
} catch {
|
|
152
|
+
execSync(`git checkout ${branchName}`, { stdio: 'pipe' });
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
// Update local GitPadi state
|
|
156
|
+
setRepo(owner, repo);
|
|
157
|
+
|
|
158
|
+
console.log(`\n${green('✨ Workspace Ready!')}`);
|
|
159
|
+
console.log(`${dim('Directory:')} ${cloneDir}`);
|
|
160
|
+
console.log(`${dim('Branch:')} ${branchName}`);
|
|
161
|
+
console.log(`${dim('Upstream:')} ${owner}/${repo}`);
|
|
162
|
+
|
|
163
|
+
console.log(boxen(
|
|
164
|
+
green('Next step:\n') +
|
|
165
|
+
white(`cd ${cloneDir}\n\n`) +
|
|
166
|
+
dim('Start coding! When you\'re done, run ') + yellow('gitpadi submit'),
|
|
167
|
+
{ padding: 1, borderColor: 'green', borderStyle: 'round' }
|
|
168
|
+
));
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
/**
|
|
172
|
+
* Phase 2: Sync Fork with Upstream (full flow)
|
|
173
|
+
* 1. Sync fork on GitHub (API)
|
|
174
|
+
* 2. Pull synced changes locally
|
|
175
|
+
* 3. Merge upstream into current branch
|
|
176
|
+
*/
|
|
177
|
+
export async function syncBranch() {
|
|
178
|
+
try {
|
|
179
|
+
// Check if we are in a git repo and if upstream exists
|
|
180
|
+
const remotes = execSync('git remote', { encoding: 'utf-8' });
|
|
181
|
+
if (!remotes.includes('upstream')) return;
|
|
182
|
+
|
|
183
|
+
const spinner = ora('Syncing fork...').start();
|
|
184
|
+
|
|
185
|
+
// 1. Detect upstream default branch (main or master)
|
|
186
|
+
execSync('git fetch upstream', { stdio: 'pipe' });
|
|
187
|
+
let upstreamBranch = 'main';
|
|
188
|
+
try {
|
|
189
|
+
execSync('git rev-parse upstream/main', { stdio: 'pipe' });
|
|
190
|
+
} catch {
|
|
191
|
+
upstreamBranch = 'master';
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
// 2. Sync fork on GitHub via API
|
|
195
|
+
spinner.text = 'Syncing fork on GitHub...';
|
|
196
|
+
try {
|
|
197
|
+
const octokit = getOctokit();
|
|
198
|
+
const myUser = await getAuthenticatedUser();
|
|
199
|
+
const repo = getRepo();
|
|
200
|
+
await octokit.request('POST /repos/{owner}/{repo}/merge-upstream', {
|
|
201
|
+
owner: myUser,
|
|
202
|
+
repo: repo,
|
|
203
|
+
branch: upstreamBranch,
|
|
204
|
+
});
|
|
205
|
+
spinner.succeed(green('Fork synced on GitHub ✓'));
|
|
206
|
+
} catch (e: any) {
|
|
207
|
+
// May fail if already in sync or permissions — continue anyway
|
|
208
|
+
spinner.info(dim('GitHub sync skipped (may already be in sync)'));
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
// 3. Pull synced changes locally
|
|
212
|
+
const pullSpinner = ora('Pulling latest changes...').start();
|
|
213
|
+
const currentBranch = execSync('git rev-parse --abbrev-ref HEAD', { encoding: 'utf-8' }).trim();
|
|
214
|
+
|
|
215
|
+
// Fetch origin (our fork) with the new synced data
|
|
216
|
+
execSync('git fetch origin', { stdio: 'pipe' });
|
|
217
|
+
execSync('git fetch upstream', { stdio: 'pipe' });
|
|
218
|
+
|
|
219
|
+
// If we're on main/master, just pull
|
|
220
|
+
if (currentBranch === upstreamBranch) {
|
|
221
|
+
try {
|
|
222
|
+
execSync(`git pull origin ${upstreamBranch} --no-edit`, { stdio: 'pipe' });
|
|
223
|
+
pullSpinner.succeed(green(`Pulled latest ${upstreamBranch} ✓`));
|
|
224
|
+
} catch {
|
|
225
|
+
pullSpinner.warn(yellow('Pull had conflicts — resolve manually'));
|
|
226
|
+
}
|
|
227
|
+
} else {
|
|
228
|
+
// We're on a feature branch — merge upstream into it
|
|
229
|
+
pullSpinner.text = `Merging upstream/${upstreamBranch} into ${currentBranch}...`;
|
|
230
|
+
try {
|
|
231
|
+
execSync(`git merge upstream/${upstreamBranch} --no-edit`, { stdio: 'pipe' });
|
|
232
|
+
pullSpinner.succeed(green(`Merged upstream/${upstreamBranch} into ${cyan(currentBranch)} ✓`));
|
|
233
|
+
} catch {
|
|
234
|
+
pullSpinner.warn(yellow('Merge conflict detected!'));
|
|
235
|
+
console.log(dim(' Resolve conflicts, then run:'));
|
|
236
|
+
console.log(dim(' git add . && git commit'));
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
console.log(green('\n✨ Fork is synced and up to date!'));
|
|
241
|
+
} catch (e: any) {
|
|
242
|
+
// Silently fail if git commands error (likely not in a repo)
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
/**
|
|
247
|
+
* Phase 2: Submit PR
|
|
248
|
+
*/
|
|
249
|
+
export async function submitPR(opts: { title: string, body?: string, issue?: number, message?: string }) {
|
|
250
|
+
const spinner = ora('Preparing submission...').start();
|
|
251
|
+
try {
|
|
252
|
+
const owner = getOwner();
|
|
253
|
+
const repo = getRepo();
|
|
254
|
+
const branch = execSync('git rev-parse --abbrev-ref HEAD', { encoding: 'utf-8' }).trim();
|
|
255
|
+
|
|
256
|
+
// 1. Stage and Commit
|
|
257
|
+
spinner.text = 'Staging and committing changes...';
|
|
258
|
+
const commitMsg = opts.message || opts.title || 'Automated contribution via GitPadi';
|
|
259
|
+
|
|
260
|
+
try {
|
|
261
|
+
execSync('git add .', { stdio: 'pipe' });
|
|
262
|
+
// Check if there are changes to commit
|
|
263
|
+
const status = execSync('git status --porcelain', { encoding: 'utf-8' });
|
|
264
|
+
if (status.trim()) {
|
|
265
|
+
execSync(`git commit -m "${commitMsg.replace(/"/g, '\\"')}"`, { stdio: 'pipe' });
|
|
266
|
+
}
|
|
267
|
+
} catch (e: any) {
|
|
268
|
+
// If commit fails (e.g. no changes), we might still want to push if there are unpushed commits
|
|
269
|
+
dim(' (Note: No new changes to commit or commit failed)');
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
// Auto-infer issue from branch name (e.g. fix/issue-303)
|
|
273
|
+
let linkedIssue = opts.issue;
|
|
274
|
+
if (!linkedIssue) {
|
|
275
|
+
const match = branch.match(/issue-(\d+)/);
|
|
276
|
+
if (match) linkedIssue = parseInt(match[1]);
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
spinner.text = 'Pushing to your fork...';
|
|
280
|
+
execSync(`git push origin ${branch}`, { stdio: 'pipe' });
|
|
281
|
+
|
|
282
|
+
spinner.text = 'Creating Pull Request...';
|
|
283
|
+
const body = opts.body || (linkedIssue ? `Fixes #${linkedIssue}` : 'Automated PR via GitPadi');
|
|
284
|
+
|
|
285
|
+
const { data: pr } = await getOctokit().pulls.create({
|
|
286
|
+
owner,
|
|
287
|
+
repo,
|
|
288
|
+
title: opts.title,
|
|
289
|
+
body,
|
|
290
|
+
head: `${await getAuthenticatedUser()}:${branch}`,
|
|
291
|
+
base: 'main',
|
|
292
|
+
});
|
|
293
|
+
|
|
294
|
+
spinner.succeed(`PR Created: ${green(pr.html_url)}`);
|
|
295
|
+
} catch (e: any) {
|
|
296
|
+
spinner.fail(e.message);
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
/**
|
|
301
|
+
* Phase 2: View Logs
|
|
302
|
+
*/
|
|
303
|
+
export async function viewLogs() {
|
|
304
|
+
const spinner = ora('Fetching GitHub Action logs...').start();
|
|
305
|
+
try {
|
|
306
|
+
const owner = getOwner();
|
|
307
|
+
const repo = getRepo();
|
|
308
|
+
const sha = execSync('git rev-parse HEAD', { encoding: 'utf-8' }).trim();
|
|
309
|
+
|
|
310
|
+
const { checkRuns, combinedState } = await getLatestCheckRuns(owner, repo, sha);
|
|
311
|
+
spinner.stop();
|
|
312
|
+
|
|
313
|
+
if (checkRuns.length === 0) {
|
|
314
|
+
console.log(dim('\n ℹ️ No active check runs found for this commit.\n'));
|
|
315
|
+
return;
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
console.log(`\n${chalk.bold(`📋 GitHub Actions status:`)} ${combinedState === 'success' ? green('✅ Success') : combinedState === 'failure' ? chalk.red('❌ Failure') : yellow('⏳ Pending')}\n`);
|
|
319
|
+
|
|
320
|
+
checkRuns.forEach(run => {
|
|
321
|
+
const icon = run.status === 'completed' ? (run.conclusion === 'success' ? green('✅') : chalk.red('❌')) : yellow('⏳');
|
|
322
|
+
console.log(` ${icon} ${chalk.bold(run.name)}: ${dim(run.conclusion || run.status)}`);
|
|
323
|
+
if (run.conclusion === 'failure') {
|
|
324
|
+
console.log(chalk.red(` → Build failed. View details at: ${run.html_url}`));
|
|
325
|
+
}
|
|
326
|
+
});
|
|
327
|
+
console.log('');
|
|
328
|
+
} catch (e: any) {
|
|
329
|
+
spinner.fail(e.message);
|
|
330
|
+
}
|
|
331
|
+
}
|
|
@@ -94,7 +94,7 @@ export async function listContributors(opts: { limit?: number }) {
|
|
|
94
94
|
|
|
95
95
|
try {
|
|
96
96
|
const { data } = await getOctokit().repos.listContributors({
|
|
97
|
-
owner: getOwner(), repo: getRepo(), per_page: opts.limit ||
|
|
97
|
+
owner: getOwner(), repo: getRepo(), per_page: opts.limit || 50,
|
|
98
98
|
});
|
|
99
99
|
spinner.stop();
|
|
100
100
|
|
package/src/commands/issues.ts
CHANGED
|
@@ -18,7 +18,7 @@ export async function listIssues(opts: { state?: string; labels?: string; limit?
|
|
|
18
18
|
owner: getOwner(), repo: getRepo(),
|
|
19
19
|
state: (opts.state as 'open' | 'closed' | 'all') || 'open',
|
|
20
20
|
labels: opts.labels || undefined,
|
|
21
|
-
per_page: opts.limit ||
|
|
21
|
+
per_page: opts.limit || 50,
|
|
22
22
|
});
|
|
23
23
|
|
|
24
24
|
// Filter out PRs (GitHub API returns PRs in issues endpoint)
|
|
@@ -70,6 +70,46 @@ export async function createIssue(opts: { title: string; body?: string; labels?:
|
|
|
70
70
|
}
|
|
71
71
|
}
|
|
72
72
|
|
|
73
|
+
/**
|
|
74
|
+
* Parse a markdown file into issues.
|
|
75
|
+
* Format:
|
|
76
|
+
* ## Issue Title
|
|
77
|
+
* **Labels:** bug, frontend
|
|
78
|
+
* Body text here...
|
|
79
|
+
*/
|
|
80
|
+
function parseMarkdownIssues(content: string): { issues: any[]; labels: Record<string, string> } {
|
|
81
|
+
const issues: any[] = [];
|
|
82
|
+
const sections = content.split(/^## /m).filter(s => s.trim());
|
|
83
|
+
|
|
84
|
+
let num = 1;
|
|
85
|
+
for (const section of sections) {
|
|
86
|
+
const lines = section.split('\n');
|
|
87
|
+
const title = lines[0].trim();
|
|
88
|
+
if (!title) continue;
|
|
89
|
+
|
|
90
|
+
let labels: string[] = [];
|
|
91
|
+
const bodyLines: string[] = [];
|
|
92
|
+
|
|
93
|
+
for (let i = 1; i < lines.length; i++) {
|
|
94
|
+
const labelsMatch = lines[i].match(/^\*\*Labels?:\*\*\s*(.+)/i);
|
|
95
|
+
if (labelsMatch) {
|
|
96
|
+
labels = labelsMatch[1].split(',').map(l => l.trim()).filter(Boolean);
|
|
97
|
+
} else {
|
|
98
|
+
bodyLines.push(lines[i]);
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
issues.push({
|
|
103
|
+
number: num++,
|
|
104
|
+
title,
|
|
105
|
+
body: bodyLines.join('\n').trim(),
|
|
106
|
+
labels,
|
|
107
|
+
});
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
return { issues, labels: {} };
|
|
111
|
+
}
|
|
112
|
+
|
|
73
113
|
export async function createIssuesFromFile(filePath: string, opts: { dryRun?: boolean; start?: number; end?: number }) {
|
|
74
114
|
requireRepo();
|
|
75
115
|
const resolved = path.resolve(filePath);
|
|
@@ -78,22 +118,49 @@ export async function createIssuesFromFile(filePath: string, opts: { dryRun?: bo
|
|
|
78
118
|
return;
|
|
79
119
|
}
|
|
80
120
|
|
|
81
|
-
const
|
|
82
|
-
const
|
|
121
|
+
const raw = fs.readFileSync(resolved, 'utf-8');
|
|
122
|
+
const ext = path.extname(resolved).toLowerCase();
|
|
123
|
+
|
|
124
|
+
let config: { issues: any[]; labels?: Record<string, string> };
|
|
125
|
+
let detectedFormat = 'JSON';
|
|
126
|
+
|
|
127
|
+
if (ext === '.md' || ext === '.markdown') {
|
|
128
|
+
config = parseMarkdownIssues(raw);
|
|
129
|
+
detectedFormat = 'Markdown';
|
|
130
|
+
} else {
|
|
131
|
+
// Try JSON first, fallback to Markdown if it fails
|
|
132
|
+
try {
|
|
133
|
+
const parsed = JSON.parse(raw);
|
|
134
|
+
config = { issues: parsed.issues || [], labels: parsed.labels };
|
|
135
|
+
} catch {
|
|
136
|
+
// Not valid JSON — try markdown parser
|
|
137
|
+
if (raw.trimStart().startsWith('#')) {
|
|
138
|
+
console.log(chalk.yellow(`\n ⚠ File has .json extension but contains Markdown — parsing as Markdown.\n`));
|
|
139
|
+
config = parseMarkdownIssues(raw);
|
|
140
|
+
detectedFormat = 'Markdown (auto-detected)';
|
|
141
|
+
} else {
|
|
142
|
+
console.error(chalk.red(`\n ❌ Fatal: File is not valid JSON or Markdown.\n`));
|
|
143
|
+
return;
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
const issues = config.issues;
|
|
83
149
|
const start = opts.start || 1;
|
|
84
150
|
const end = opts.end || 999;
|
|
85
151
|
const filtered = issues.filter((i: any) => i.number >= start && i.number <= end);
|
|
86
152
|
|
|
87
153
|
console.log(`\n${chalk.bold('📋 GitPadi Issue Creator')}`);
|
|
88
|
-
console.log(chalk.dim(` Repo:
|
|
89
|
-
console.log(chalk.dim(` File:
|
|
90
|
-
console.log(chalk.dim(` Range:
|
|
91
|
-
console.log(chalk.dim(` Mode:
|
|
154
|
+
console.log(chalk.dim(` Repo: ${getFullRepo()}`));
|
|
155
|
+
console.log(chalk.dim(` File: ${filePath} (${detectedFormat})`));
|
|
156
|
+
console.log(chalk.dim(` Range: #${start}-#${end} (${filtered.length} issues)`));
|
|
157
|
+
console.log(chalk.dim(` Mode: ${opts.dryRun ? 'DRY RUN' : 'LIVE'}\n`));
|
|
92
158
|
|
|
93
159
|
if (opts.dryRun) {
|
|
94
160
|
filtered.forEach((i: any) => {
|
|
95
161
|
console.log(` ${chalk.dim(`#${String(i.number).padStart(2, '0')}`)} ${i.title}`);
|
|
96
|
-
console.log(chalk.dim(` [${i.labels.join(', ')}]`));
|
|
162
|
+
console.log(chalk.dim(` [${(i.labels || []).join(', ')}]`));
|
|
163
|
+
if (i.body) console.log(chalk.dim(` ${i.body.substring(0, 80)}...`));
|
|
97
164
|
});
|
|
98
165
|
console.log(chalk.green(`\n✅ Dry run: ${filtered.length} issues would be created.\n`));
|
|
99
166
|
return;
|
|
@@ -103,7 +170,7 @@ export async function createIssuesFromFile(filePath: string, opts: { dryRun?: bo
|
|
|
103
170
|
let created = 0, failed = 0;
|
|
104
171
|
|
|
105
172
|
// Create labels if defined
|
|
106
|
-
if (config.labels) {
|
|
173
|
+
if (config.labels && Object.keys(config.labels).length > 0) {
|
|
107
174
|
const spinner = ora('Setting up labels...').start();
|
|
108
175
|
try {
|
|
109
176
|
for (const [name, color] of Object.entries(config.labels)) {
|
package/src/commands/prs.ts
CHANGED
|
@@ -13,7 +13,7 @@ export async function listPRs(opts: { state?: string; limit?: number }) {
|
|
|
13
13
|
const { data: prs } = await getOctokit().pulls.list({
|
|
14
14
|
owner: getOwner(), repo: getRepo(),
|
|
15
15
|
state: (opts.state as 'open' | 'closed' | 'all') || 'open',
|
|
16
|
-
per_page: opts.limit ||
|
|
16
|
+
per_page: opts.limit || 50,
|
|
17
17
|
});
|
|
18
18
|
|
|
19
19
|
spinner.stop();
|
package/src/commands/releases.ts
CHANGED
|
@@ -31,7 +31,7 @@ export async function listReleases(opts: { limit?: number }) {
|
|
|
31
31
|
|
|
32
32
|
try {
|
|
33
33
|
const { data } = await getOctokit().repos.listReleases({
|
|
34
|
-
owner: getOwner(), repo: getRepo(), per_page: opts.limit ||
|
|
34
|
+
owner: getOwner(), repo: getRepo(), per_page: opts.limit || 50,
|
|
35
35
|
});
|
|
36
36
|
spinner.stop();
|
|
37
37
|
|
package/src/commands/repos.ts
CHANGED
|
@@ -52,7 +52,11 @@ export async function cloneRepo(name: string, opts: { org?: string; dir?: string
|
|
|
52
52
|
try {
|
|
53
53
|
execSync(`git clone ${url} ${dir}`, { stdio: 'pipe' });
|
|
54
54
|
spinner.succeed(`Cloned to ${chalk.green(`./${dir}`)}`);
|
|
55
|
-
|
|
55
|
+
return true;
|
|
56
|
+
} catch (e: any) {
|
|
57
|
+
spinner.fail(`Clone failed: ${e.message}`);
|
|
58
|
+
return false;
|
|
59
|
+
}
|
|
56
60
|
}
|
|
57
61
|
|
|
58
62
|
export async function repoInfo(name: string, opts: { org?: string }) {
|
|
@@ -80,7 +84,11 @@ export async function repoInfo(name: string, opts: { org?: string }) {
|
|
|
80
84
|
|
|
81
85
|
console.log(table.toString());
|
|
82
86
|
console.log('');
|
|
83
|
-
|
|
87
|
+
return repo;
|
|
88
|
+
} catch (e: any) {
|
|
89
|
+
spinner.fail(e.message);
|
|
90
|
+
return null;
|
|
91
|
+
}
|
|
84
92
|
}
|
|
85
93
|
|
|
86
94
|
export async function setTopics(name: string, topics: string[], opts: { org?: string }) {
|
|
@@ -93,37 +101,44 @@ export async function setTopics(name: string, topics: string[], opts: { org?: st
|
|
|
93
101
|
} catch (e: any) { spinner.fail(e.message); }
|
|
94
102
|
}
|
|
95
103
|
|
|
96
|
-
export async function listRepos(opts: { org?: string; limit?: number }) {
|
|
97
|
-
const spinner = ora('Fetching repos...').start();
|
|
104
|
+
export async function listRepos(opts: { org?: string; limit?: number; silent?: boolean }) {
|
|
105
|
+
const spinner = !opts.silent ? ora('Fetching repos...').start() : null;
|
|
98
106
|
const octokit = getOctokit();
|
|
99
107
|
|
|
100
108
|
try {
|
|
101
109
|
let repos: any[];
|
|
102
110
|
if (opts.org) {
|
|
103
|
-
({ data: repos } = await octokit.repos.listForOrg({ org: opts.org, per_page: opts.limit ||
|
|
111
|
+
({ data: repos } = await octokit.repos.listForOrg({ org: opts.org, per_page: opts.limit || 100, sort: 'updated' }));
|
|
104
112
|
} else {
|
|
105
|
-
({ data: repos } = await octokit.repos.listForAuthenticatedUser({ per_page: opts.limit ||
|
|
113
|
+
({ data: repos } = await octokit.repos.listForAuthenticatedUser({ per_page: opts.limit || 100, sort: 'updated' }));
|
|
106
114
|
}
|
|
107
115
|
|
|
108
|
-
spinner.stop();
|
|
116
|
+
if (spinner) spinner.stop();
|
|
117
|
+
|
|
118
|
+
if (!opts.silent) {
|
|
119
|
+
const table = new Table({
|
|
120
|
+
head: ['Name', 'Stars', 'Language', 'Visibility', 'Updated'].map((h) => chalk.cyan(h)),
|
|
121
|
+
style: { head: [], border: [] },
|
|
122
|
+
});
|
|
123
|
+
|
|
124
|
+
repos.forEach((r: any) => {
|
|
125
|
+
table.push([
|
|
126
|
+
r.full_name,
|
|
127
|
+
`⭐ ${r.stargazers_count}`,
|
|
128
|
+
r.language || '-',
|
|
129
|
+
r.private ? chalk.yellow('private') : chalk.green('public'),
|
|
130
|
+
new Date(r.updated_at).toLocaleDateString(),
|
|
131
|
+
]);
|
|
132
|
+
});
|
|
133
|
+
|
|
134
|
+
console.log(`\n${chalk.bold('📦 Repositories')} (${repos.length})\n`);
|
|
135
|
+
console.log(table.toString());
|
|
136
|
+
console.log('');
|
|
137
|
+
}
|
|
109
138
|
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
repos.forEach((r: any) => {
|
|
116
|
-
table.push([
|
|
117
|
-
r.full_name,
|
|
118
|
-
`⭐ ${r.stargazers_count}`,
|
|
119
|
-
r.language || '-',
|
|
120
|
-
r.private ? chalk.yellow('private') : chalk.green('public'),
|
|
121
|
-
new Date(r.updated_at).toLocaleDateString(),
|
|
122
|
-
]);
|
|
123
|
-
});
|
|
124
|
-
|
|
125
|
-
console.log(`\n${chalk.bold('📦 Repositories')} (${repos.length})\n`);
|
|
126
|
-
console.log(table.toString());
|
|
127
|
-
console.log('');
|
|
128
|
-
} catch (e: any) { spinner.fail(e.message); }
|
|
139
|
+
return repos;
|
|
140
|
+
} catch (e: any) {
|
|
141
|
+
if (spinner) spinner.fail(e.message);
|
|
142
|
+
return [];
|
|
143
|
+
}
|
|
129
144
|
}
|