@oss-autopilot/core 0.41.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +85 -0
- package/dist/cli.bundle.cjs +17657 -0
- package/dist/cli.d.ts +12 -0
- package/dist/cli.js +325 -0
- package/dist/commands/check-integration.d.ts +10 -0
- package/dist/commands/check-integration.js +192 -0
- package/dist/commands/comments.d.ts +24 -0
- package/dist/commands/comments.js +311 -0
- package/dist/commands/config.d.ts +11 -0
- package/dist/commands/config.js +82 -0
- package/dist/commands/daily.d.ts +29 -0
- package/dist/commands/daily.js +433 -0
- package/dist/commands/dashboard-data.d.ts +45 -0
- package/dist/commands/dashboard-data.js +132 -0
- package/dist/commands/dashboard-templates.d.ts +23 -0
- package/dist/commands/dashboard-templates.js +1627 -0
- package/dist/commands/dashboard.d.ts +18 -0
- package/dist/commands/dashboard.js +134 -0
- package/dist/commands/dismiss.d.ts +13 -0
- package/dist/commands/dismiss.js +49 -0
- package/dist/commands/init.d.ts +10 -0
- package/dist/commands/init.js +27 -0
- package/dist/commands/local-repos.d.ts +14 -0
- package/dist/commands/local-repos.js +155 -0
- package/dist/commands/parse-list.d.ts +13 -0
- package/dist/commands/parse-list.js +139 -0
- package/dist/commands/read.d.ts +12 -0
- package/dist/commands/read.js +33 -0
- package/dist/commands/search.d.ts +10 -0
- package/dist/commands/search.js +74 -0
- package/dist/commands/setup.d.ts +15 -0
- package/dist/commands/setup.js +276 -0
- package/dist/commands/shelve.d.ts +13 -0
- package/dist/commands/shelve.js +49 -0
- package/dist/commands/snooze.d.ts +18 -0
- package/dist/commands/snooze.js +83 -0
- package/dist/commands/startup.d.ts +33 -0
- package/dist/commands/startup.js +197 -0
- package/dist/commands/status.d.ts +10 -0
- package/dist/commands/status.js +43 -0
- package/dist/commands/track.d.ts +16 -0
- package/dist/commands/track.js +59 -0
- package/dist/commands/validation.d.ts +43 -0
- package/dist/commands/validation.js +112 -0
- package/dist/commands/vet.d.ts +10 -0
- package/dist/commands/vet.js +36 -0
- package/dist/core/checklist-analysis.d.ts +17 -0
- package/dist/core/checklist-analysis.js +39 -0
- package/dist/core/ci-analysis.d.ts +78 -0
- package/dist/core/ci-analysis.js +163 -0
- package/dist/core/comment-utils.d.ts +15 -0
- package/dist/core/comment-utils.js +52 -0
- package/dist/core/concurrency.d.ts +5 -0
- package/dist/core/concurrency.js +15 -0
- package/dist/core/daily-logic.d.ts +77 -0
- package/dist/core/daily-logic.js +512 -0
- package/dist/core/display-utils.d.ts +10 -0
- package/dist/core/display-utils.js +100 -0
- package/dist/core/errors.d.ts +24 -0
- package/dist/core/errors.js +34 -0
- package/dist/core/github-stats.d.ts +73 -0
- package/dist/core/github-stats.js +272 -0
- package/dist/core/github.d.ts +19 -0
- package/dist/core/github.js +60 -0
- package/dist/core/http-cache.d.ts +97 -0
- package/dist/core/http-cache.js +269 -0
- package/dist/core/index.d.ts +15 -0
- package/dist/core/index.js +15 -0
- package/dist/core/issue-conversation.d.ts +29 -0
- package/dist/core/issue-conversation.js +231 -0
- package/dist/core/issue-discovery.d.ts +85 -0
- package/dist/core/issue-discovery.js +589 -0
- package/dist/core/issue-filtering.d.ts +51 -0
- package/dist/core/issue-filtering.js +103 -0
- package/dist/core/issue-scoring.d.ts +40 -0
- package/dist/core/issue-scoring.js +92 -0
- package/dist/core/issue-vetting.d.ts +49 -0
- package/dist/core/issue-vetting.js +536 -0
- package/dist/core/logger.d.ts +21 -0
- package/dist/core/logger.js +49 -0
- package/dist/core/maintainer-analysis.d.ts +10 -0
- package/dist/core/maintainer-analysis.js +59 -0
- package/dist/core/pagination.d.ts +11 -0
- package/dist/core/pagination.js +20 -0
- package/dist/core/pr-monitor.d.ts +109 -0
- package/dist/core/pr-monitor.js +594 -0
- package/dist/core/review-analysis.d.ts +72 -0
- package/dist/core/review-analysis.js +163 -0
- package/dist/core/state.d.ts +371 -0
- package/dist/core/state.js +1089 -0
- package/dist/core/types.d.ts +507 -0
- package/dist/core/types.js +34 -0
- package/dist/core/utils.d.ts +249 -0
- package/dist/core/utils.js +422 -0
- package/dist/formatters/json.d.ts +269 -0
- package/dist/formatters/json.js +88 -0
- package/package.json +67 -0
|
@@ -0,0 +1,536 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Issue Vetting — checks individual issues for claimability, existing PRs,
|
|
3
|
+
* project health, contribution guidelines, and requirement clarity.
|
|
4
|
+
*
|
|
5
|
+
* Extracted from issue-discovery.ts (#356) to isolate vetting logic.
|
|
6
|
+
*/
|
|
7
|
+
import { paginateAll } from './pagination.js';
|
|
8
|
+
import { parseGitHubUrl, daysBetween } from './utils.js';
|
|
9
|
+
import { ValidationError } from './errors.js';
|
|
10
|
+
import { warn } from './logger.js';
|
|
11
|
+
import { getHttpCache, cachedRequest } from './http-cache.js';
|
|
12
|
+
import { calculateRepoQualityBonus, calculateViabilityScore } from './issue-scoring.js';
|
|
13
|
+
const MODULE = 'issue-vetting';
|
|
14
|
+
// Concurrency limit for parallel API calls
|
|
15
|
+
const MAX_CONCURRENT_REQUESTS = 5;
|
|
16
|
+
// Cache for contribution guidelines (expires after 1 hour, max 100 entries)
|
|
17
|
+
const guidelinesCache = new Map();
|
|
18
|
+
const CACHE_TTL_MS = 60 * 60 * 1000; // 1 hour
|
|
19
|
+
const CACHE_MAX_SIZE = 100;
|
|
20
|
+
function pruneCache() {
|
|
21
|
+
const now = Date.now();
|
|
22
|
+
// First, remove expired entries (older than CACHE_TTL_MS)
|
|
23
|
+
for (const [key, value] of guidelinesCache.entries()) {
|
|
24
|
+
if (now - value.fetchedAt > CACHE_TTL_MS) {
|
|
25
|
+
guidelinesCache.delete(key);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
// Then, if still over size limit, remove oldest entries
|
|
29
|
+
if (guidelinesCache.size > CACHE_MAX_SIZE) {
|
|
30
|
+
const entries = Array.from(guidelinesCache.entries()).sort((a, b) => a[1].fetchedAt - b[1].fetchedAt);
|
|
31
|
+
const toRemove = entries.slice(0, guidelinesCache.size - CACHE_MAX_SIZE);
|
|
32
|
+
for (const [key] of toRemove) {
|
|
33
|
+
guidelinesCache.delete(key);
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
export class IssueVetter {
|
|
38
|
+
octokit;
|
|
39
|
+
stateManager;
|
|
40
|
+
constructor(octokit, stateManager) {
|
|
41
|
+
this.octokit = octokit;
|
|
42
|
+
this.stateManager = stateManager;
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Vet a specific issue — runs all checks and computes recommendation + viability score.
|
|
46
|
+
*/
|
|
47
|
+
async vetIssue(issueUrl) {
|
|
48
|
+
// Parse URL
|
|
49
|
+
const parsed = parseGitHubUrl(issueUrl);
|
|
50
|
+
if (!parsed || parsed.type !== 'issues') {
|
|
51
|
+
throw new ValidationError(`Invalid issue URL: ${issueUrl}`);
|
|
52
|
+
}
|
|
53
|
+
const { owner, repo, number } = parsed;
|
|
54
|
+
const repoFullName = `${owner}/${repo}`;
|
|
55
|
+
// Fetch issue data
|
|
56
|
+
const { data: ghIssue } = await this.octokit.issues.get({
|
|
57
|
+
owner,
|
|
58
|
+
repo,
|
|
59
|
+
issue_number: number,
|
|
60
|
+
});
|
|
61
|
+
// Run all vetting checks in parallel
|
|
62
|
+
const [existingPRCheck, claimCheck, projectHealth, contributionGuidelines, userMergedPRCount] = await Promise.all([
|
|
63
|
+
this.checkNoExistingPR(owner, repo, number),
|
|
64
|
+
this.checkNotClaimed(owner, repo, number, ghIssue.comments),
|
|
65
|
+
this.checkProjectHealth(owner, repo),
|
|
66
|
+
this.fetchContributionGuidelines(owner, repo),
|
|
67
|
+
this.checkUserMergedPRsInRepo(owner, repo),
|
|
68
|
+
]);
|
|
69
|
+
const noExistingPR = existingPRCheck.passed;
|
|
70
|
+
const notClaimed = claimCheck.passed;
|
|
71
|
+
// Analyze issue quality
|
|
72
|
+
const clearRequirements = this.analyzeRequirements(ghIssue.body || '');
|
|
73
|
+
// When the health check itself failed (API error), use a neutral default:
|
|
74
|
+
// don't penalize the repo as inactive, but don't credit it as active either.
|
|
75
|
+
const projectActive = projectHealth.checkFailed ? true : projectHealth.isActive;
|
|
76
|
+
const vettingResult = {
|
|
77
|
+
passedAllChecks: noExistingPR && notClaimed && projectActive && clearRequirements,
|
|
78
|
+
checks: {
|
|
79
|
+
noExistingPR,
|
|
80
|
+
notClaimed,
|
|
81
|
+
projectActive,
|
|
82
|
+
clearRequirements,
|
|
83
|
+
contributionGuidelinesFound: !!contributionGuidelines,
|
|
84
|
+
},
|
|
85
|
+
contributionGuidelines,
|
|
86
|
+
notes: [],
|
|
87
|
+
};
|
|
88
|
+
// Build notes
|
|
89
|
+
if (!noExistingPR)
|
|
90
|
+
vettingResult.notes.push('Existing PR found for this issue');
|
|
91
|
+
if (!notClaimed)
|
|
92
|
+
vettingResult.notes.push('Issue appears to be claimed by someone');
|
|
93
|
+
if (existingPRCheck.inconclusive) {
|
|
94
|
+
vettingResult.notes.push(`Could not verify absence of existing PRs: ${existingPRCheck.reason || 'API error'}`);
|
|
95
|
+
}
|
|
96
|
+
if (claimCheck.inconclusive) {
|
|
97
|
+
vettingResult.notes.push(`Could not verify claim status: ${claimCheck.reason || 'API error'}`);
|
|
98
|
+
}
|
|
99
|
+
if (projectHealth.checkFailed) {
|
|
100
|
+
vettingResult.notes.push(`Could not verify project activity: ${projectHealth.failureReason || 'API error'}`);
|
|
101
|
+
}
|
|
102
|
+
else if (!projectHealth.isActive) {
|
|
103
|
+
vettingResult.notes.push('Project may be inactive');
|
|
104
|
+
}
|
|
105
|
+
if (!clearRequirements)
|
|
106
|
+
vettingResult.notes.push('Issue requirements are unclear');
|
|
107
|
+
if (!contributionGuidelines)
|
|
108
|
+
vettingResult.notes.push('No CONTRIBUTING.md found');
|
|
109
|
+
// Create tracked issue
|
|
110
|
+
const trackedIssue = {
|
|
111
|
+
id: ghIssue.id,
|
|
112
|
+
url: issueUrl,
|
|
113
|
+
repo: repoFullName,
|
|
114
|
+
number,
|
|
115
|
+
title: ghIssue.title,
|
|
116
|
+
status: 'candidate',
|
|
117
|
+
labels: ghIssue.labels.map((l) => (typeof l === 'string' ? l : l.name || '')),
|
|
118
|
+
createdAt: ghIssue.created_at,
|
|
119
|
+
updatedAt: ghIssue.updated_at,
|
|
120
|
+
vetted: true,
|
|
121
|
+
vettingResult,
|
|
122
|
+
};
|
|
123
|
+
// Determine recommendation
|
|
124
|
+
const reasonsToSkip = [];
|
|
125
|
+
const reasonsToApprove = [];
|
|
126
|
+
if (!noExistingPR)
|
|
127
|
+
reasonsToSkip.push('Has existing PR');
|
|
128
|
+
if (!notClaimed)
|
|
129
|
+
reasonsToSkip.push('Already claimed');
|
|
130
|
+
if (!projectHealth.isActive && !projectHealth.checkFailed)
|
|
131
|
+
reasonsToSkip.push('Inactive project');
|
|
132
|
+
if (!clearRequirements)
|
|
133
|
+
reasonsToSkip.push('Unclear requirements');
|
|
134
|
+
if (noExistingPR)
|
|
135
|
+
reasonsToApprove.push('No existing PR');
|
|
136
|
+
if (notClaimed)
|
|
137
|
+
reasonsToApprove.push('Not claimed');
|
|
138
|
+
if (projectHealth.isActive && !projectHealth.checkFailed)
|
|
139
|
+
reasonsToApprove.push('Active project');
|
|
140
|
+
if (clearRequirements)
|
|
141
|
+
reasonsToApprove.push('Clear requirements');
|
|
142
|
+
if (contributionGuidelines)
|
|
143
|
+
reasonsToApprove.push('Has contribution guidelines');
|
|
144
|
+
// Determine effective merged PR count: prefer local state (authoritative if present),
|
|
145
|
+
// fall back to live GitHub API count to detect contributions made before using oss-autopilot (#373)
|
|
146
|
+
const config = this.stateManager.getState().config;
|
|
147
|
+
const repoScoreRecord = this.stateManager.getRepoScore(repoFullName);
|
|
148
|
+
const effectiveMergedCount = repoScoreRecord && repoScoreRecord.mergedPRCount > 0 ? repoScoreRecord.mergedPRCount : userMergedPRCount;
|
|
149
|
+
if (effectiveMergedCount > 0) {
|
|
150
|
+
reasonsToApprove.push(`Trusted project (${effectiveMergedCount} PR${effectiveMergedCount > 1 ? 's' : ''} merged)`);
|
|
151
|
+
}
|
|
152
|
+
else if (config.trustedProjects.includes(repoFullName)) {
|
|
153
|
+
reasonsToApprove.push('Trusted project (previous PR merged)');
|
|
154
|
+
}
|
|
155
|
+
// Check for closed/rejected PR history in this repo
|
|
156
|
+
// Use effectiveMergedCount to avoid contradictory signals when API data
|
|
157
|
+
// shows merges that local state doesn't know about (#373)
|
|
158
|
+
if (repoScoreRecord) {
|
|
159
|
+
if (repoScoreRecord.closedWithoutMergeCount > 0 && effectiveMergedCount === 0) {
|
|
160
|
+
reasonsToSkip.push('User has rejected PR(s) in this repo with no successful merges');
|
|
161
|
+
}
|
|
162
|
+
else if (repoScoreRecord.closedWithoutMergeCount > 0 && effectiveMergedCount > 0) {
|
|
163
|
+
vettingResult.notes.push(`Mixed history: ${effectiveMergedCount} merged, ${repoScoreRecord.closedWithoutMergeCount} closed without merge`);
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
// Check for org-level affinity (user has merged PRs in another repo under same org)
|
|
167
|
+
const orgName = repoFullName.split('/')[0];
|
|
168
|
+
let orgHasMergedPRs = false;
|
|
169
|
+
if (orgName && repoFullName.includes('/')) {
|
|
170
|
+
orgHasMergedPRs = Object.values(this.stateManager.getState().repoScores).some((rs) => rs.repo && rs.mergedPRCount > 0 && rs.repo.startsWith(orgName + '/') && rs.repo !== repoFullName);
|
|
171
|
+
}
|
|
172
|
+
if (orgHasMergedPRs) {
|
|
173
|
+
reasonsToApprove.push(`Org affinity (merged PRs in other ${orgName} repos)`);
|
|
174
|
+
}
|
|
175
|
+
let recommendation;
|
|
176
|
+
if (vettingResult.passedAllChecks) {
|
|
177
|
+
recommendation = 'approve';
|
|
178
|
+
}
|
|
179
|
+
else if (reasonsToSkip.length > 2) {
|
|
180
|
+
recommendation = 'skip';
|
|
181
|
+
}
|
|
182
|
+
else {
|
|
183
|
+
recommendation = 'needs_review';
|
|
184
|
+
}
|
|
185
|
+
// Downgrade to needs_review if any check was inconclusive —
|
|
186
|
+
// "approve" should only be given when all checks actually passed, not when they were skipped.
|
|
187
|
+
const hasInconclusiveChecks = projectHealth.checkFailed || existingPRCheck.inconclusive || claimCheck.inconclusive;
|
|
188
|
+
if (recommendation === 'approve' && hasInconclusiveChecks) {
|
|
189
|
+
recommendation = 'needs_review';
|
|
190
|
+
vettingResult.notes.push('Recommendation downgraded: one or more checks were inconclusive');
|
|
191
|
+
}
|
|
192
|
+
// Calculate repo quality bonus from star/fork counts (#98)
|
|
193
|
+
const repoQualityBonus = calculateRepoQualityBonus(projectHealth.stargazersCount ?? 0, projectHealth.forksCount ?? 0);
|
|
194
|
+
if (projectHealth.checkFailed && repoQualityBonus === 0) {
|
|
195
|
+
vettingResult.notes.push('Repo quality bonus unavailable: could not fetch star/fork counts due to API error');
|
|
196
|
+
}
|
|
197
|
+
const repoScore = this.getRepoScore(repoFullName);
|
|
198
|
+
const viabilityScore = calculateViabilityScore({
|
|
199
|
+
repoScore,
|
|
200
|
+
hasExistingPR: !noExistingPR,
|
|
201
|
+
isClaimed: !notClaimed,
|
|
202
|
+
clearRequirements,
|
|
203
|
+
hasContributionGuidelines: !!contributionGuidelines,
|
|
204
|
+
issueUpdatedAt: ghIssue.updated_at,
|
|
205
|
+
closedWithoutMergeCount: repoScoreRecord?.closedWithoutMergeCount ?? 0,
|
|
206
|
+
mergedPRCount: effectiveMergedCount,
|
|
207
|
+
orgHasMergedPRs,
|
|
208
|
+
repoQualityBonus,
|
|
209
|
+
});
|
|
210
|
+
const starredRepos = this.stateManager.getStarredRepos();
|
|
211
|
+
let searchPriority = 'normal';
|
|
212
|
+
if (effectiveMergedCount > 0) {
|
|
213
|
+
searchPriority = 'merged_pr';
|
|
214
|
+
}
|
|
215
|
+
else if (starredRepos.includes(repoFullName)) {
|
|
216
|
+
searchPriority = 'starred';
|
|
217
|
+
}
|
|
218
|
+
return {
|
|
219
|
+
issue: trackedIssue,
|
|
220
|
+
vettingResult,
|
|
221
|
+
projectHealth,
|
|
222
|
+
recommendation,
|
|
223
|
+
reasonsToSkip,
|
|
224
|
+
reasonsToApprove,
|
|
225
|
+
viabilityScore,
|
|
226
|
+
searchPriority,
|
|
227
|
+
};
|
|
228
|
+
}
|
|
229
|
+
/**
|
|
230
|
+
* Vet multiple issues in parallel with concurrency limit
|
|
231
|
+
*/
|
|
232
|
+
async vetIssuesParallel(urls, maxResults, priority) {
|
|
233
|
+
const candidates = [];
|
|
234
|
+
const pending = [];
|
|
235
|
+
let failedVettingCount = 0;
|
|
236
|
+
let rateLimitFailures = 0;
|
|
237
|
+
let attemptedCount = 0;
|
|
238
|
+
for (const url of urls) {
|
|
239
|
+
if (candidates.length >= maxResults)
|
|
240
|
+
break;
|
|
241
|
+
attemptedCount++;
|
|
242
|
+
const task = this.vetIssue(url)
|
|
243
|
+
.then((candidate) => {
|
|
244
|
+
if (candidates.length < maxResults) {
|
|
245
|
+
// Override the priority if provided
|
|
246
|
+
if (priority) {
|
|
247
|
+
candidate.searchPriority = priority;
|
|
248
|
+
}
|
|
249
|
+
candidates.push(candidate);
|
|
250
|
+
}
|
|
251
|
+
})
|
|
252
|
+
.catch((error) => {
|
|
253
|
+
failedVettingCount++;
|
|
254
|
+
if (IssueVetter.isRateLimitError(error)) {
|
|
255
|
+
rateLimitFailures++;
|
|
256
|
+
}
|
|
257
|
+
warn(MODULE, `Error vetting issue ${url}:`, error instanceof Error ? error.message : error);
|
|
258
|
+
});
|
|
259
|
+
pending.push(task);
|
|
260
|
+
// Limit concurrency
|
|
261
|
+
if (pending.length >= MAX_CONCURRENT_REQUESTS) {
|
|
262
|
+
// Wait for at least one to complete, then remove it
|
|
263
|
+
const completed = await Promise.race(pending.map((p, i) => p.then(() => i)));
|
|
264
|
+
pending.splice(completed, 1);
|
|
265
|
+
}
|
|
266
|
+
}
|
|
267
|
+
// Wait for remaining
|
|
268
|
+
await Promise.allSettled(pending);
|
|
269
|
+
const allFailed = failedVettingCount === attemptedCount && attemptedCount > 0;
|
|
270
|
+
if (allFailed) {
|
|
271
|
+
warn(MODULE, `All ${attemptedCount} issue(s) failed vetting. ` +
|
|
272
|
+
`This may indicate a systemic issue (rate limit, auth, network).`);
|
|
273
|
+
}
|
|
274
|
+
return { candidates: candidates.slice(0, maxResults), allFailed, rateLimitHit: rateLimitFailures > 0 };
|
|
275
|
+
}
|
|
276
|
+
/** Check if an error is a GitHub rate limit error (429 or rate-limit 403). */
|
|
277
|
+
static isRateLimitError(error) {
|
|
278
|
+
const status = error?.status;
|
|
279
|
+
if (status === 429)
|
|
280
|
+
return true;
|
|
281
|
+
if (status === 403) {
|
|
282
|
+
const msg = error instanceof Error ? error.message.toLowerCase() : String(error).toLowerCase();
|
|
283
|
+
return msg.includes('rate limit');
|
|
284
|
+
}
|
|
285
|
+
return false;
|
|
286
|
+
}
|
|
287
|
+
async checkNoExistingPR(owner, repo, issueNumber) {
|
|
288
|
+
try {
|
|
289
|
+
// Search for PRs that mention this issue
|
|
290
|
+
const { data } = await this.octokit.search.issuesAndPullRequests({
|
|
291
|
+
q: `repo:${owner}/${repo} is:pr ${issueNumber}`,
|
|
292
|
+
per_page: 5,
|
|
293
|
+
});
|
|
294
|
+
// Also check timeline for linked PRs
|
|
295
|
+
const timeline = await paginateAll((page) => this.octokit.issues.listEventsForTimeline({
|
|
296
|
+
owner,
|
|
297
|
+
repo,
|
|
298
|
+
issue_number: issueNumber,
|
|
299
|
+
per_page: 100,
|
|
300
|
+
page,
|
|
301
|
+
}));
|
|
302
|
+
const linkedPRs = timeline.filter((event) => {
|
|
303
|
+
const e = event;
|
|
304
|
+
return e.event === 'cross-referenced' && e.source?.issue?.pull_request;
|
|
305
|
+
});
|
|
306
|
+
return { passed: data.total_count === 0 && linkedPRs.length === 0 };
|
|
307
|
+
}
|
|
308
|
+
catch (error) {
|
|
309
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
310
|
+
warn(MODULE, `Failed to check for existing PRs on ${owner}/${repo}#${issueNumber}: ${errorMessage}. Assuming no existing PR.`);
|
|
311
|
+
return { passed: true, inconclusive: true, reason: errorMessage };
|
|
312
|
+
}
|
|
313
|
+
}
|
|
314
|
+
/**
|
|
315
|
+
* Check how many merged PRs the authenticated user has in a repo.
|
|
316
|
+
* Uses GitHub Search API. Returns 0 on error (non-fatal).
|
|
317
|
+
*/
|
|
318
|
+
async checkUserMergedPRsInRepo(owner, repo) {
|
|
319
|
+
try {
|
|
320
|
+
// Use @me to search as the authenticated user
|
|
321
|
+
const { data } = await this.octokit.search.issuesAndPullRequests({
|
|
322
|
+
q: `repo:${owner}/${repo} is:pr is:merged author:@me`,
|
|
323
|
+
per_page: 1, // We only need total_count
|
|
324
|
+
});
|
|
325
|
+
return data.total_count;
|
|
326
|
+
}
|
|
327
|
+
catch (error) {
|
|
328
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
329
|
+
warn(MODULE, `Could not check merged PRs in ${owner}/${repo}: ${errorMessage}. Defaulting to 0.`);
|
|
330
|
+
return 0;
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
async checkNotClaimed(owner, repo, issueNumber, commentCount) {
|
|
334
|
+
if (commentCount === 0)
|
|
335
|
+
return { passed: true };
|
|
336
|
+
try {
|
|
337
|
+
// Paginate through all comments (up to 100)
|
|
338
|
+
const comments = await this.octokit.paginate(this.octokit.issues.listComments, {
|
|
339
|
+
owner,
|
|
340
|
+
repo,
|
|
341
|
+
issue_number: issueNumber,
|
|
342
|
+
per_page: 100,
|
|
343
|
+
}, (response) => response.data);
|
|
344
|
+
// Limit to last 100 comments to avoid excessive processing
|
|
345
|
+
const recentComments = comments.slice(-100);
|
|
346
|
+
// Look for claiming phrases
|
|
347
|
+
const claimPhrases = [
|
|
348
|
+
"i'm working on this",
|
|
349
|
+
'i am working on this',
|
|
350
|
+
"i'll take this",
|
|
351
|
+
'i will take this',
|
|
352
|
+
'working on it',
|
|
353
|
+
"i'd like to work on",
|
|
354
|
+
'i would like to work on',
|
|
355
|
+
'can i work on',
|
|
356
|
+
'may i work on',
|
|
357
|
+
'assigned to me',
|
|
358
|
+
"i'm on it",
|
|
359
|
+
"i'll submit a pr",
|
|
360
|
+
'i will submit a pr',
|
|
361
|
+
'working on a fix',
|
|
362
|
+
'working on a pr',
|
|
363
|
+
];
|
|
364
|
+
for (const comment of recentComments) {
|
|
365
|
+
const body = (comment.body || '').toLowerCase();
|
|
366
|
+
if (claimPhrases.some((phrase) => body.includes(phrase))) {
|
|
367
|
+
return { passed: false };
|
|
368
|
+
}
|
|
369
|
+
}
|
|
370
|
+
return { passed: true };
|
|
371
|
+
}
|
|
372
|
+
catch (error) {
|
|
373
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
374
|
+
warn(MODULE, `Failed to check claim status on ${owner}/${repo}#${issueNumber}: ${errorMessage}. Assuming not claimed.`);
|
|
375
|
+
return { passed: true, inconclusive: true, reason: errorMessage };
|
|
376
|
+
}
|
|
377
|
+
}
|
|
378
|
+
async checkProjectHealth(owner, repo) {
|
|
379
|
+
try {
|
|
380
|
+
// Get repo info (with ETag caching — repo metadata changes infrequently)
|
|
381
|
+
const cache = getHttpCache();
|
|
382
|
+
const url = `/repos/${owner}/${repo}`;
|
|
383
|
+
const repoData = await cachedRequest(cache, url, (headers) => this.octokit.repos.get({ owner, repo, headers }));
|
|
384
|
+
// Get recent commits
|
|
385
|
+
const { data: commits } = await this.octokit.repos.listCommits({
|
|
386
|
+
owner,
|
|
387
|
+
repo,
|
|
388
|
+
per_page: 1,
|
|
389
|
+
});
|
|
390
|
+
const lastCommit = commits[0];
|
|
391
|
+
const lastCommitAt = lastCommit?.commit?.author?.date || repoData.pushed_at;
|
|
392
|
+
const daysSinceLastCommit = daysBetween(new Date(lastCommitAt));
|
|
393
|
+
// Check CI status (simplified - just check if workflows exist)
|
|
394
|
+
let ciStatus = 'unknown';
|
|
395
|
+
try {
|
|
396
|
+
const { data: workflows } = await this.octokit.actions.listRepoWorkflows({
|
|
397
|
+
owner,
|
|
398
|
+
repo,
|
|
399
|
+
per_page: 1,
|
|
400
|
+
});
|
|
401
|
+
if (workflows.total_count > 0) {
|
|
402
|
+
ciStatus = 'passing'; // Assume passing if workflows exist
|
|
403
|
+
}
|
|
404
|
+
}
|
|
405
|
+
catch (error) {
|
|
406
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
407
|
+
warn(MODULE, `Failed to check CI status for ${owner}/${repo}: ${errorMessage}. Defaulting to unknown.`);
|
|
408
|
+
}
|
|
409
|
+
return {
|
|
410
|
+
repo: `${owner}/${repo}`,
|
|
411
|
+
lastCommitAt,
|
|
412
|
+
daysSinceLastCommit,
|
|
413
|
+
openIssuesCount: repoData.open_issues_count,
|
|
414
|
+
avgIssueResponseDays: 0, // Would need more API calls to calculate
|
|
415
|
+
ciStatus,
|
|
416
|
+
isActive: daysSinceLastCommit < 30,
|
|
417
|
+
stargazersCount: repoData.stargazers_count,
|
|
418
|
+
forksCount: repoData.forks_count,
|
|
419
|
+
};
|
|
420
|
+
}
|
|
421
|
+
catch (error) {
|
|
422
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
423
|
+
warn(MODULE, `Error checking project health for ${owner}/${repo}: ${errorMessage}`);
|
|
424
|
+
return {
|
|
425
|
+
repo: `${owner}/${repo}`,
|
|
426
|
+
lastCommitAt: '',
|
|
427
|
+
daysSinceLastCommit: 999,
|
|
428
|
+
openIssuesCount: 0,
|
|
429
|
+
avgIssueResponseDays: 0,
|
|
430
|
+
ciStatus: 'unknown',
|
|
431
|
+
isActive: false,
|
|
432
|
+
checkFailed: true,
|
|
433
|
+
failureReason: errorMessage,
|
|
434
|
+
};
|
|
435
|
+
}
|
|
436
|
+
}
|
|
437
|
+
async fetchContributionGuidelines(owner, repo) {
|
|
438
|
+
const cacheKey = `${owner}/${repo}`;
|
|
439
|
+
// Check cache first
|
|
440
|
+
const cached = guidelinesCache.get(cacheKey);
|
|
441
|
+
if (cached && Date.now() - cached.fetchedAt < CACHE_TTL_MS) {
|
|
442
|
+
return cached.guidelines;
|
|
443
|
+
}
|
|
444
|
+
const filesToCheck = ['CONTRIBUTING.md', '.github/CONTRIBUTING.md', 'docs/CONTRIBUTING.md', 'contributing.md'];
|
|
445
|
+
for (const file of filesToCheck) {
|
|
446
|
+
try {
|
|
447
|
+
const { data } = await this.octokit.repos.getContent({
|
|
448
|
+
owner,
|
|
449
|
+
repo,
|
|
450
|
+
path: file,
|
|
451
|
+
});
|
|
452
|
+
if ('content' in data) {
|
|
453
|
+
const content = Buffer.from(data.content, 'base64').toString('utf-8');
|
|
454
|
+
const guidelines = this.parseContributionGuidelines(content);
|
|
455
|
+
// Cache the result and prune if needed
|
|
456
|
+
guidelinesCache.set(cacheKey, { guidelines, fetchedAt: Date.now() });
|
|
457
|
+
pruneCache();
|
|
458
|
+
return guidelines;
|
|
459
|
+
}
|
|
460
|
+
}
|
|
461
|
+
catch (error) {
|
|
462
|
+
// File not found is expected; only log unexpected errors
|
|
463
|
+
if (error instanceof Error && !error.message.includes('404') && !error.message.includes('Not Found')) {
|
|
464
|
+
warn(MODULE, `Unexpected error fetching ${file} from ${owner}/${repo}: ${error.message}`);
|
|
465
|
+
}
|
|
466
|
+
}
|
|
467
|
+
}
|
|
468
|
+
// Cache the negative result too and prune if needed
|
|
469
|
+
guidelinesCache.set(cacheKey, { guidelines: undefined, fetchedAt: Date.now() });
|
|
470
|
+
pruneCache();
|
|
471
|
+
return undefined;
|
|
472
|
+
}
|
|
473
|
+
parseContributionGuidelines(content) {
|
|
474
|
+
const guidelines = {
|
|
475
|
+
rawContent: content,
|
|
476
|
+
};
|
|
477
|
+
const lowerContent = content.toLowerCase();
|
|
478
|
+
// Detect branch naming conventions
|
|
479
|
+
if (lowerContent.includes('branch')) {
|
|
480
|
+
const branchMatch = content.match(/branch[^\n]*(?:named?|format|convention)[^\n]*[`"]([^`"]+)[`"]/i);
|
|
481
|
+
if (branchMatch) {
|
|
482
|
+
guidelines.branchNamingConvention = branchMatch[1];
|
|
483
|
+
}
|
|
484
|
+
}
|
|
485
|
+
// Detect commit message format
|
|
486
|
+
if (lowerContent.includes('conventional commit')) {
|
|
487
|
+
guidelines.commitMessageFormat = 'conventional commits';
|
|
488
|
+
}
|
|
489
|
+
else if (lowerContent.includes('commit message')) {
|
|
490
|
+
const commitMatch = content.match(/commit message[^\n]*[`"]([^`"]+)[`"]/i);
|
|
491
|
+
if (commitMatch) {
|
|
492
|
+
guidelines.commitMessageFormat = commitMatch[1];
|
|
493
|
+
}
|
|
494
|
+
}
|
|
495
|
+
// Detect test framework
|
|
496
|
+
if (lowerContent.includes('jest'))
|
|
497
|
+
guidelines.testFramework = 'Jest';
|
|
498
|
+
else if (lowerContent.includes('rspec'))
|
|
499
|
+
guidelines.testFramework = 'RSpec';
|
|
500
|
+
else if (lowerContent.includes('pytest'))
|
|
501
|
+
guidelines.testFramework = 'pytest';
|
|
502
|
+
else if (lowerContent.includes('mocha'))
|
|
503
|
+
guidelines.testFramework = 'Mocha';
|
|
504
|
+
// Detect linter
|
|
505
|
+
if (lowerContent.includes('eslint'))
|
|
506
|
+
guidelines.linter = 'ESLint';
|
|
507
|
+
else if (lowerContent.includes('rubocop'))
|
|
508
|
+
guidelines.linter = 'RuboCop';
|
|
509
|
+
else if (lowerContent.includes('prettier'))
|
|
510
|
+
guidelines.formatter = 'Prettier';
|
|
511
|
+
// Detect CLA requirement
|
|
512
|
+
if (lowerContent.includes('cla') || lowerContent.includes('contributor license agreement')) {
|
|
513
|
+
guidelines.claRequired = true;
|
|
514
|
+
}
|
|
515
|
+
return guidelines;
|
|
516
|
+
}
|
|
517
|
+
analyzeRequirements(body) {
|
|
518
|
+
if (!body || body.length < 50)
|
|
519
|
+
return false;
|
|
520
|
+
// Check for clear structure
|
|
521
|
+
const hasSteps = /\d+\.|[-*]\s/.test(body);
|
|
522
|
+
const hasCodeBlock = /```/.test(body);
|
|
523
|
+
const hasExpectedBehavior = /expect|should|must|want/i.test(body);
|
|
524
|
+
// Must have at least two indicators of clarity
|
|
525
|
+
const indicators = [hasSteps, hasCodeBlock, hasExpectedBehavior, body.length > 200];
|
|
526
|
+
return indicators.filter(Boolean).length >= 2;
|
|
527
|
+
}
|
|
528
|
+
/**
|
|
529
|
+
* Get the repo score from state, or return null if not evaluated
|
|
530
|
+
*/
|
|
531
|
+
getRepoScore(repoFullName) {
|
|
532
|
+
const state = this.stateManager.getState();
|
|
533
|
+
const repoScore = state.repoScores?.[repoFullName];
|
|
534
|
+
return repoScore?.score ?? null;
|
|
535
|
+
}
|
|
536
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Lightweight debug logger for oss-autopilot.
|
|
3
|
+
* Activated by the global --debug CLI flag.
|
|
4
|
+
*
|
|
5
|
+
* All debug/warn output goes to stderr so it never contaminates
|
|
6
|
+
* the --json stdout contract.
|
|
7
|
+
*/
|
|
8
|
+
export declare function enableDebug(): void;
|
|
9
|
+
export declare function isDebugEnabled(): boolean;
|
|
10
|
+
/**
|
|
11
|
+
* Log a debug message. Only outputs when --debug is enabled.
|
|
12
|
+
*/
|
|
13
|
+
export declare function debug(module: string, message: string, ...args: unknown[]): void;
|
|
14
|
+
/**
|
|
15
|
+
* Log a warning. Always outputs.
|
|
16
|
+
*/
|
|
17
|
+
export declare function warn(module: string, message: string, ...args: unknown[]): void;
|
|
18
|
+
/**
|
|
19
|
+
* Time an async operation and log duration in debug mode.
|
|
20
|
+
*/
|
|
21
|
+
export declare function timed<T>(module: string, label: string, fn: () => Promise<T>): Promise<T>;
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Lightweight debug logger for oss-autopilot.
|
|
3
|
+
* Activated by the global --debug CLI flag.
|
|
4
|
+
*
|
|
5
|
+
* All debug/warn output goes to stderr so it never contaminates
|
|
6
|
+
* the --json stdout contract.
|
|
7
|
+
*/
|
|
8
|
+
let debugEnabled = false;
|
|
9
|
+
export function enableDebug() {
|
|
10
|
+
debugEnabled = true;
|
|
11
|
+
}
|
|
12
|
+
export function isDebugEnabled() {
|
|
13
|
+
return debugEnabled;
|
|
14
|
+
}
|
|
15
|
+
/**
|
|
16
|
+
* Log a debug message. Only outputs when --debug is enabled.
|
|
17
|
+
*/
|
|
18
|
+
export function debug(module, message, ...args) {
|
|
19
|
+
if (!debugEnabled)
|
|
20
|
+
return;
|
|
21
|
+
const timestamp = new Date().toISOString();
|
|
22
|
+
console.error(`[${timestamp}] [DEBUG] [${module}] ${message}`, ...args);
|
|
23
|
+
}
|
|
24
|
+
/**
|
|
25
|
+
* Log a warning. Always outputs.
|
|
26
|
+
*/
|
|
27
|
+
export function warn(module, message, ...args) {
|
|
28
|
+
const timestamp = new Date().toISOString();
|
|
29
|
+
console.error(`[${timestamp}] [WARN] [${module}] ${message}`, ...args);
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Time an async operation and log duration in debug mode.
|
|
33
|
+
*/
|
|
34
|
+
export async function timed(module, label, fn) {
|
|
35
|
+
if (!debugEnabled)
|
|
36
|
+
return fn();
|
|
37
|
+
const start = performance.now();
|
|
38
|
+
try {
|
|
39
|
+
const result = await fn();
|
|
40
|
+
const duration = (performance.now() - start).toFixed(0);
|
|
41
|
+
debug(module, `${label} completed in ${duration}ms`);
|
|
42
|
+
return result;
|
|
43
|
+
}
|
|
44
|
+
catch (err) {
|
|
45
|
+
const duration = (performance.now() - start).toFixed(0);
|
|
46
|
+
debug(module, `${label} failed after ${duration}ms`);
|
|
47
|
+
throw err;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Maintainer Analysis - Action hint extraction from maintainer comments.
|
|
3
|
+
* Extracted from PRMonitor to isolate maintainer-comment-related logic (#263).
|
|
4
|
+
*/
|
|
5
|
+
import { MaintainerActionHint, ReviewDecision } from './types.js';
|
|
6
|
+
/**
|
|
7
|
+
* Extract action hints from maintainer comments using keyword matching.
|
|
8
|
+
* Returns an array of hints about what the maintainer is asking for.
|
|
9
|
+
*/
|
|
10
|
+
export declare function extractMaintainerActionHints(commentBody: string | undefined, reviewDecision: ReviewDecision): MaintainerActionHint[];
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Maintainer Analysis - Action hint extraction from maintainer comments.
|
|
3
|
+
* Extracted from PRMonitor to isolate maintainer-comment-related logic (#263).
|
|
4
|
+
*/
|
|
5
|
+
/**
|
|
6
|
+
* Extract action hints from maintainer comments using keyword matching.
|
|
7
|
+
* Returns an array of hints about what the maintainer is asking for.
|
|
8
|
+
*/
|
|
9
|
+
export function extractMaintainerActionHints(commentBody, reviewDecision) {
|
|
10
|
+
const hints = [];
|
|
11
|
+
if (reviewDecision === 'changes_requested') {
|
|
12
|
+
hints.push('changes_requested');
|
|
13
|
+
}
|
|
14
|
+
if (!commentBody)
|
|
15
|
+
return hints;
|
|
16
|
+
const lower = commentBody.toLowerCase();
|
|
17
|
+
// Demo/screenshot requests
|
|
18
|
+
const demoKeywords = [
|
|
19
|
+
'screenshot',
|
|
20
|
+
'demo',
|
|
21
|
+
'recording',
|
|
22
|
+
'screen recording',
|
|
23
|
+
'before/after',
|
|
24
|
+
'before and after',
|
|
25
|
+
'gif',
|
|
26
|
+
'video',
|
|
27
|
+
'screencast',
|
|
28
|
+
'show me',
|
|
29
|
+
'can you show',
|
|
30
|
+
];
|
|
31
|
+
if (demoKeywords.some((kw) => lower.includes(kw))) {
|
|
32
|
+
hints.push('demo_requested');
|
|
33
|
+
}
|
|
34
|
+
// Test requests
|
|
35
|
+
const testKeywords = [
|
|
36
|
+
'add test',
|
|
37
|
+
'test coverage',
|
|
38
|
+
'unit test',
|
|
39
|
+
'missing test',
|
|
40
|
+
'add a test',
|
|
41
|
+
'write test',
|
|
42
|
+
'needs test',
|
|
43
|
+
'need test',
|
|
44
|
+
];
|
|
45
|
+
if (testKeywords.some((kw) => lower.includes(kw))) {
|
|
46
|
+
hints.push('tests_requested');
|
|
47
|
+
}
|
|
48
|
+
// Documentation requests
|
|
49
|
+
const docKeywords = ['documentation', 'readme', 'jsdoc', 'docstring', 'add docs', 'update docs', 'document this'];
|
|
50
|
+
if (docKeywords.some((kw) => lower.includes(kw))) {
|
|
51
|
+
hints.push('docs_requested');
|
|
52
|
+
}
|
|
53
|
+
// Rebase requests
|
|
54
|
+
const rebaseKeywords = ['rebase', 'merge conflict', 'out of date', 'behind main', 'behind master'];
|
|
55
|
+
if (rebaseKeywords.some((kw) => lower.includes(kw))) {
|
|
56
|
+
hints.push('rebase_requested');
|
|
57
|
+
}
|
|
58
|
+
return hints;
|
|
59
|
+
}
|