@oss-autopilot/core 0.41.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (98) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +85 -0
  3. package/dist/cli.bundle.cjs +17657 -0
  4. package/dist/cli.d.ts +12 -0
  5. package/dist/cli.js +325 -0
  6. package/dist/commands/check-integration.d.ts +10 -0
  7. package/dist/commands/check-integration.js +192 -0
  8. package/dist/commands/comments.d.ts +24 -0
  9. package/dist/commands/comments.js +311 -0
  10. package/dist/commands/config.d.ts +11 -0
  11. package/dist/commands/config.js +82 -0
  12. package/dist/commands/daily.d.ts +29 -0
  13. package/dist/commands/daily.js +433 -0
  14. package/dist/commands/dashboard-data.d.ts +45 -0
  15. package/dist/commands/dashboard-data.js +132 -0
  16. package/dist/commands/dashboard-templates.d.ts +23 -0
  17. package/dist/commands/dashboard-templates.js +1627 -0
  18. package/dist/commands/dashboard.d.ts +18 -0
  19. package/dist/commands/dashboard.js +134 -0
  20. package/dist/commands/dismiss.d.ts +13 -0
  21. package/dist/commands/dismiss.js +49 -0
  22. package/dist/commands/init.d.ts +10 -0
  23. package/dist/commands/init.js +27 -0
  24. package/dist/commands/local-repos.d.ts +14 -0
  25. package/dist/commands/local-repos.js +155 -0
  26. package/dist/commands/parse-list.d.ts +13 -0
  27. package/dist/commands/parse-list.js +139 -0
  28. package/dist/commands/read.d.ts +12 -0
  29. package/dist/commands/read.js +33 -0
  30. package/dist/commands/search.d.ts +10 -0
  31. package/dist/commands/search.js +74 -0
  32. package/dist/commands/setup.d.ts +15 -0
  33. package/dist/commands/setup.js +276 -0
  34. package/dist/commands/shelve.d.ts +13 -0
  35. package/dist/commands/shelve.js +49 -0
  36. package/dist/commands/snooze.d.ts +18 -0
  37. package/dist/commands/snooze.js +83 -0
  38. package/dist/commands/startup.d.ts +33 -0
  39. package/dist/commands/startup.js +197 -0
  40. package/dist/commands/status.d.ts +10 -0
  41. package/dist/commands/status.js +43 -0
  42. package/dist/commands/track.d.ts +16 -0
  43. package/dist/commands/track.js +59 -0
  44. package/dist/commands/validation.d.ts +43 -0
  45. package/dist/commands/validation.js +112 -0
  46. package/dist/commands/vet.d.ts +10 -0
  47. package/dist/commands/vet.js +36 -0
  48. package/dist/core/checklist-analysis.d.ts +17 -0
  49. package/dist/core/checklist-analysis.js +39 -0
  50. package/dist/core/ci-analysis.d.ts +78 -0
  51. package/dist/core/ci-analysis.js +163 -0
  52. package/dist/core/comment-utils.d.ts +15 -0
  53. package/dist/core/comment-utils.js +52 -0
  54. package/dist/core/concurrency.d.ts +5 -0
  55. package/dist/core/concurrency.js +15 -0
  56. package/dist/core/daily-logic.d.ts +77 -0
  57. package/dist/core/daily-logic.js +512 -0
  58. package/dist/core/display-utils.d.ts +10 -0
  59. package/dist/core/display-utils.js +100 -0
  60. package/dist/core/errors.d.ts +24 -0
  61. package/dist/core/errors.js +34 -0
  62. package/dist/core/github-stats.d.ts +73 -0
  63. package/dist/core/github-stats.js +272 -0
  64. package/dist/core/github.d.ts +19 -0
  65. package/dist/core/github.js +60 -0
  66. package/dist/core/http-cache.d.ts +97 -0
  67. package/dist/core/http-cache.js +269 -0
  68. package/dist/core/index.d.ts +15 -0
  69. package/dist/core/index.js +15 -0
  70. package/dist/core/issue-conversation.d.ts +29 -0
  71. package/dist/core/issue-conversation.js +231 -0
  72. package/dist/core/issue-discovery.d.ts +85 -0
  73. package/dist/core/issue-discovery.js +589 -0
  74. package/dist/core/issue-filtering.d.ts +51 -0
  75. package/dist/core/issue-filtering.js +103 -0
  76. package/dist/core/issue-scoring.d.ts +40 -0
  77. package/dist/core/issue-scoring.js +92 -0
  78. package/dist/core/issue-vetting.d.ts +49 -0
  79. package/dist/core/issue-vetting.js +536 -0
  80. package/dist/core/logger.d.ts +21 -0
  81. package/dist/core/logger.js +49 -0
  82. package/dist/core/maintainer-analysis.d.ts +10 -0
  83. package/dist/core/maintainer-analysis.js +59 -0
  84. package/dist/core/pagination.d.ts +11 -0
  85. package/dist/core/pagination.js +20 -0
  86. package/dist/core/pr-monitor.d.ts +109 -0
  87. package/dist/core/pr-monitor.js +594 -0
  88. package/dist/core/review-analysis.d.ts +72 -0
  89. package/dist/core/review-analysis.js +163 -0
  90. package/dist/core/state.d.ts +371 -0
  91. package/dist/core/state.js +1089 -0
  92. package/dist/core/types.d.ts +507 -0
  93. package/dist/core/types.js +34 -0
  94. package/dist/core/utils.d.ts +249 -0
  95. package/dist/core/utils.js +422 -0
  96. package/dist/formatters/json.d.ts +269 -0
  97. package/dist/formatters/json.js +88 -0
  98. package/package.json +67 -0
@@ -0,0 +1,269 @@
1
+ /**
2
+ * HTTP caching with ETags for GitHub API responses.
3
+ *
4
+ * Stores ETags and response bodies for cacheable GET endpoints in
5
+ * `~/.oss-autopilot/cache/`. On subsequent requests, sends `If-None-Match`
6
+ * headers — 304 responses don't count against GitHub rate limits.
7
+ *
8
+ * Also provides in-flight request deduplication so that concurrent calls
9
+ * for the same endpoint (e.g., star counts for two PRs in the same repo)
10
+ * share a single HTTP round-trip.
11
+ */
12
+ import * as fs from 'fs';
13
+ import * as path from 'path';
14
+ import * as crypto from 'crypto';
15
+ import { getCacheDir } from './utils.js';
16
+ import { debug } from './logger.js';
17
+ const MODULE = 'http-cache';
18
+ /**
19
+ * Maximum age (in ms) before a cache entry is considered stale and eligible for
20
+ * eviction during cleanup. Defaults to 24 hours. Entries older than this are
21
+ * still *usable* for conditional requests (the ETag may still be valid), but
22
+ * `evictStale()` will remove them.
23
+ */
24
+ const DEFAULT_MAX_AGE_MS = 24 * 60 * 60 * 1000;
25
+ /**
26
+ * File-based HTTP cache backed by `~/.oss-autopilot/cache/`.
27
+ *
28
+ * Each cache entry is stored as a separate JSON file keyed by the SHA-256
29
+ * hash of the request URL. This avoids filesystem issues with URL-based
30
+ * filenames and keeps lookup O(1).
31
+ */
32
+ export class HttpCache {
33
+ cacheDir;
34
+ /** In-flight request deduplication map: URL -> Promise<response>. */
35
+ inflightRequests = new Map();
36
+ constructor(cacheDir) {
37
+ this.cacheDir = cacheDir ?? getCacheDir();
38
+ }
39
+ /** Derive a filesystem-safe cache key from a URL. */
40
+ keyFor(url) {
41
+ return crypto.createHash('sha256').update(url).digest('hex');
42
+ }
43
+ /** Full path to the cache file for a given URL. */
44
+ pathFor(url) {
45
+ return path.join(this.cacheDir, `${this.keyFor(url)}.json`);
46
+ }
47
+ /**
48
+ * Look up a cached response. Returns `null` if no cache entry exists.
49
+ */
50
+ get(url) {
51
+ const filePath = this.pathFor(url);
52
+ try {
53
+ const raw = fs.readFileSync(filePath, 'utf-8');
54
+ const entry = JSON.parse(raw);
55
+ // Sanity-check: the file should contain the URL we asked for
56
+ if (entry.url !== url) {
57
+ debug(MODULE, `Cache collision detected for ${url}, ignoring`);
58
+ return null;
59
+ }
60
+ return entry;
61
+ }
62
+ catch {
63
+ return null;
64
+ }
65
+ }
66
+ /**
67
+ * Store a response with its ETag.
68
+ */
69
+ set(url, etag, body) {
70
+ const entry = {
71
+ etag,
72
+ url,
73
+ body,
74
+ cachedAt: new Date().toISOString(),
75
+ };
76
+ try {
77
+ fs.writeFileSync(this.pathFor(url), JSON.stringify(entry), { encoding: 'utf-8', mode: 0o600 });
78
+ debug(MODULE, `Cached response for ${url}`);
79
+ }
80
+ catch (err) {
81
+ // Non-fatal: cache write failure should not break the request
82
+ debug(MODULE, `Failed to write cache for ${url}`, err);
83
+ }
84
+ }
85
+ /**
86
+ * Check whether a URL has an in-flight request.
87
+ */
88
+ hasInflight(url) {
89
+ return this.inflightRequests.has(url);
90
+ }
91
+ /**
92
+ * Get the in-flight promise for a URL (for deduplication).
93
+ */
94
+ getInflight(url) {
95
+ return this.inflightRequests.get(url);
96
+ }
97
+ /**
98
+ * Register an in-flight request for deduplication.
99
+ * Returns a cleanup function to call when the request completes.
100
+ */
101
+ setInflight(url, promise) {
102
+ this.inflightRequests.set(url, promise);
103
+ return () => {
104
+ this.inflightRequests.delete(url);
105
+ };
106
+ }
107
+ /**
108
+ * Remove stale entries older than `maxAgeMs` from the cache directory.
109
+ * Intended to be called periodically (e.g., once per daily run).
110
+ */
111
+ evictStale(maxAgeMs = DEFAULT_MAX_AGE_MS) {
112
+ let evicted = 0;
113
+ try {
114
+ const files = fs.readdirSync(this.cacheDir);
115
+ const now = Date.now();
116
+ for (const file of files) {
117
+ if (!file.endsWith('.json'))
118
+ continue;
119
+ const filePath = path.join(this.cacheDir, file);
120
+ try {
121
+ const raw = fs.readFileSync(filePath, 'utf-8');
122
+ const entry = JSON.parse(raw);
123
+ const age = now - new Date(entry.cachedAt).getTime();
124
+ if (age > maxAgeMs) {
125
+ fs.unlinkSync(filePath);
126
+ evicted++;
127
+ }
128
+ }
129
+ catch {
130
+ // Corrupt entry — remove it
131
+ try {
132
+ fs.unlinkSync(filePath);
133
+ evicted++;
134
+ }
135
+ catch {
136
+ // Ignore
137
+ }
138
+ }
139
+ }
140
+ }
141
+ catch {
142
+ // Cache dir might not exist yet — that's fine
143
+ }
144
+ if (evicted > 0) {
145
+ debug(MODULE, `Evicted ${evicted} stale cache entries`);
146
+ }
147
+ return evicted;
148
+ }
149
+ /**
150
+ * Remove all entries from the cache.
151
+ */
152
+ clear() {
153
+ try {
154
+ const files = fs.readdirSync(this.cacheDir);
155
+ for (const file of files) {
156
+ if (!file.endsWith('.json'))
157
+ continue;
158
+ fs.unlinkSync(path.join(this.cacheDir, file));
159
+ }
160
+ debug(MODULE, 'Cache cleared');
161
+ }
162
+ catch {
163
+ // Cache dir might not exist yet — that's fine
164
+ }
165
+ }
166
+ /**
167
+ * Return the number of entries currently in the cache.
168
+ */
169
+ size() {
170
+ try {
171
+ return fs.readdirSync(this.cacheDir).filter((f) => f.endsWith('.json')).length;
172
+ }
173
+ catch {
174
+ return 0;
175
+ }
176
+ }
177
+ }
178
+ // ---------------------------------------------------------------------------
179
+ // Singleton
180
+ // ---------------------------------------------------------------------------
181
+ let _httpCache = null;
182
+ /**
183
+ * Get (or create) the shared HttpCache singleton.
184
+ * The singleton is lazily initialized on first access.
185
+ */
186
+ export function getHttpCache() {
187
+ if (!_httpCache) {
188
+ _httpCache = new HttpCache();
189
+ }
190
+ return _httpCache;
191
+ }
192
+ /** Reset the singleton (for tests). */
193
+ export function resetHttpCache() {
194
+ _httpCache = null;
195
+ }
196
+ // ---------------------------------------------------------------------------
197
+ // Octokit integration helpers
198
+ // ---------------------------------------------------------------------------
199
+ /**
200
+ * Wraps an Octokit `repos.get`-style call with ETag caching and request
201
+ * deduplication.
202
+ *
203
+ * Usage:
204
+ * ```ts
205
+ * const data = await cachedRequest(cache, octokit, '/repos/owner/repo', () =>
206
+ * octokit.repos.get({ owner, repo: name }),
207
+ * );
208
+ * ```
209
+ *
210
+ * 1. If an identical request is already in-flight, returns the existing promise
211
+ * (request deduplication).
212
+ * 2. If a cached ETag exists, sends `If-None-Match`. On 304, returns the
213
+ * cached body without consuming a rate-limit point.
214
+ * 3. On a fresh 200, caches the ETag + body for next time.
215
+ */
216
+ export async function cachedRequest(cache, url, fetcher) {
217
+ // --- Deduplication ---
218
+ const existing = cache.getInflight(url);
219
+ if (existing) {
220
+ debug(MODULE, `Dedup hit for ${url}`);
221
+ return (await existing);
222
+ }
223
+ const doFetch = async () => {
224
+ const extraHeaders = {};
225
+ const cached = cache.get(url);
226
+ if (cached) {
227
+ extraHeaders['if-none-match'] = cached.etag;
228
+ }
229
+ try {
230
+ const response = await fetcher(extraHeaders);
231
+ // Store ETag if present (headers may be absent in test mocks)
232
+ const etag = response.headers?.['etag'];
233
+ if (etag) {
234
+ cache.set(url, etag, response.data);
235
+ }
236
+ return response.data;
237
+ }
238
+ catch (err) {
239
+ // Check for 304 Not Modified — re-read cache to avoid stale closure snapshot
240
+ if (isNotModifiedError(err)) {
241
+ const freshCached = cache.get(url);
242
+ if (freshCached) {
243
+ debug(MODULE, `304 cache hit for ${url}`);
244
+ return freshCached.body;
245
+ }
246
+ }
247
+ throw err;
248
+ }
249
+ };
250
+ const promise = doFetch();
251
+ const cleanup = cache.setInflight(url, promise);
252
+ try {
253
+ const result = await promise;
254
+ return result;
255
+ }
256
+ finally {
257
+ cleanup();
258
+ }
259
+ }
260
+ /**
261
+ * Detect whether an error is a 304 Not Modified response.
262
+ * Octokit throws a RequestError with status 304 for conditional requests.
263
+ */
264
+ function isNotModifiedError(err) {
265
+ if (err && typeof err === 'object' && 'status' in err) {
266
+ return err.status === 304;
267
+ }
268
+ return false;
269
+ }
@@ -0,0 +1,15 @@
1
+ /**
2
+ * Core module exports
3
+ * Re-exports all core functionality for convenient imports
4
+ */
5
+ export { StateManager, getStateManager, resetStateManager } from './state.js';
6
+ export { PRMonitor, type PRCheckFailure, type FetchPRsResult, computeDisplayLabel, classifyCICheck, classifyFailingChecks, } from './pr-monitor.js';
7
+ export { IssueDiscovery, type IssueCandidate, type SearchPriority, isDocOnlyIssue, applyPerRepoCap, DOC_ONLY_LABELS, } from './issue-discovery.js';
8
+ export { IssueConversationMonitor } from './issue-conversation.js';
9
+ export { isBotAuthor, isAcknowledgmentComment } from './comment-utils.js';
10
+ export { getOctokit, checkRateLimit, type RateLimitInfo } from './github.js';
11
+ export { parseGitHubUrl, daysBetween, splitRepo, getDataDir, getStatePath, getBackupDir, getCacheDir, getDashboardPath, formatRelativeTime, byDateDescending, getGitHubToken, getGitHubTokenAsync, requireGitHubToken, resetGitHubTokenCache, } from './utils.js';
12
+ export { enableDebug, isDebugEnabled, debug, warn, timed } from './logger.js';
13
+ export { HttpCache, getHttpCache, resetHttpCache, cachedRequest, type CacheEntry } from './http-cache.js';
14
+ export { CRITICAL_STATUSES, computeRepoSignals, groupPRsByRepo, assessCapacity, collectActionableIssues, computeActionMenu, toShelvedPRRef, formatActionHint, formatBriefSummary, formatSummary, printDigest, } from './daily-logic.js';
15
+ export * from './types.js';
@@ -0,0 +1,15 @@
1
+ /**
2
+ * Core module exports
3
+ * Re-exports all core functionality for convenient imports
4
+ */
5
+ export { StateManager, getStateManager, resetStateManager } from './state.js';
6
+ export { PRMonitor, computeDisplayLabel, classifyCICheck, classifyFailingChecks, } from './pr-monitor.js';
7
+ export { IssueDiscovery, isDocOnlyIssue, applyPerRepoCap, DOC_ONLY_LABELS, } from './issue-discovery.js';
8
+ export { IssueConversationMonitor } from './issue-conversation.js';
9
+ export { isBotAuthor, isAcknowledgmentComment } from './comment-utils.js';
10
+ export { getOctokit, checkRateLimit } from './github.js';
11
+ export { parseGitHubUrl, daysBetween, splitRepo, getDataDir, getStatePath, getBackupDir, getCacheDir, getDashboardPath, formatRelativeTime, byDateDescending, getGitHubToken, getGitHubTokenAsync, requireGitHubToken, resetGitHubTokenCache, } from './utils.js';
12
+ export { enableDebug, isDebugEnabled, debug, warn, timed } from './logger.js';
13
+ export { HttpCache, getHttpCache, resetHttpCache, cachedRequest } from './http-cache.js';
14
+ export { CRITICAL_STATUSES, computeRepoSignals, groupPRsByRepo, assessCapacity, collectActionableIssues, computeActionMenu, toShelvedPRRef, formatActionHint, formatBriefSummary, formatSummary, printDigest, } from './daily-logic.js';
15
+ export * from './types.js';
@@ -0,0 +1,29 @@
1
+ /**
2
+ * Issue Conversation Monitor — tracks issues the user has commented on
3
+ * and detects maintainer responses that need attention.
4
+ *
5
+ * Follows the same pattern as PRMonitor: stateless fetch from GitHub,
6
+ * bounded concurrency via worker pool, bot/acknowledgment filtering.
7
+ */
8
+ import type { CommentedIssue } from './types.js';
9
+ export declare class IssueConversationMonitor {
10
+ private octokit;
11
+ private stateManager;
12
+ constructor(githubToken: string);
13
+ /**
14
+ * Fetch issues the user has commented on and determine conversation state.
15
+ * Filters out: user-authored issues, user-owned repos, excluded repos/orgs,
16
+ * AI policy blocklisted repos, already-tracked issues, and pull requests.
17
+ */
18
+ fetchCommentedIssues(maxDays?: number): Promise<{
19
+ issues: CommentedIssue[];
20
+ failures: Array<{
21
+ issueUrl: string;
22
+ error: string;
23
+ }>;
24
+ }>;
25
+ /**
26
+ * Analyze a single issue's comment thread to determine conversation status.
27
+ */
28
+ private analyzeIssueConversation;
29
+ }
@@ -0,0 +1,231 @@
1
+ /**
2
+ * Issue Conversation Monitor — tracks issues the user has commented on
3
+ * and detects maintainer responses that need attention.
4
+ *
5
+ * Follows the same pattern as PRMonitor: stateless fetch from GitHub,
6
+ * bounded concurrency via worker pool, bot/acknowledgment filtering.
7
+ */
8
+ import { getOctokit } from './github.js';
9
+ import { isBotAuthor, isAcknowledgmentComment } from './comment-utils.js';
10
+ import { paginateAll } from './pagination.js';
11
+ import { getStateManager } from './state.js';
12
+ import { daysBetween, splitRepo, extractOwnerRepo } from './utils.js';
13
+ import { runWorkerPool } from './concurrency.js';
14
+ import { ConfigurationError } from './errors.js';
15
+ import { debug, warn } from './logger.js';
16
+ const MODULE = 'issue-conversation';
17
+ const MAX_CONCURRENT_REQUESTS = 5;
18
+ /** Associations that indicate someone with repo-level permissions. */
19
+ const MAINTAINER_ASSOCIATIONS = new Set(['OWNER', 'MEMBER', 'COLLABORATOR']);
20
+ export class IssueConversationMonitor {
21
+ octokit;
22
+ stateManager;
23
+ constructor(githubToken) {
24
+ this.octokit = getOctokit(githubToken);
25
+ this.stateManager = getStateManager();
26
+ }
27
+ /**
28
+ * Fetch issues the user has commented on and determine conversation state.
29
+ * Filters out: user-authored issues, user-owned repos, excluded repos/orgs,
30
+ * AI policy blocklisted repos, already-tracked issues, and pull requests.
31
+ */
32
+ async fetchCommentedIssues(maxDays = 30) {
33
+ const config = this.stateManager.getState().config;
34
+ if (!config.githubUsername) {
35
+ throw new ConfigurationError('No GitHub username configured. Run setup first.');
36
+ }
37
+ const username = config.githubUsername;
38
+ const cutoffDate = new Date();
39
+ cutoffDate.setDate(cutoffDate.getDate() - maxDays);
40
+ const since = cutoffDate.toISOString().split('T')[0];
41
+ debug(MODULE, `Fetching commented issues for @${username} (last ${maxDays} days)...`);
42
+ // Search for open issues the user has commented on, updated within window
43
+ // Single page (100) is sufficient for most users; log if truncated.
44
+ const { data } = await this.octokit.search.issuesAndPullRequests({
45
+ q: `commenter:${username} type:issue state:open updated:>=${since}`,
46
+ sort: 'updated',
47
+ order: 'desc',
48
+ per_page: 100,
49
+ });
50
+ if (data.total_count > 100) {
51
+ warn(MODULE, `Search returned ${data.total_count} results but only first 100 were fetched. Some commented issues may be missing.`);
52
+ }
53
+ // Build sets for filtering
54
+ const trackedIssues = this.stateManager.getState().activeIssues || [];
55
+ const trackedIssueKeys = new Set(trackedIssues
56
+ .filter((i) => i.status === 'claimed' || i.status === 'in_progress' || i.status === 'pr_submitted')
57
+ .map((i) => `${i.repo}#${i.number}`));
58
+ const blocklist = new Set((config.aiPolicyBlocklist || []).map((r) => r.toLowerCase()));
59
+ // Filter out PRs, user-authored issues, excluded repos, blocklisted repos, and already-tracked issues.
60
+ // Also parse repo info for each candidate to avoid re-parsing in analyzeIssueConversation.
61
+ const candidates = [];
62
+ for (const item of data.items) {
63
+ // Defensive: skip pull requests in case type:issue qualifier is unreliable
64
+ if (item.pull_request)
65
+ continue;
66
+ const parsed = extractOwnerRepo(item.html_url);
67
+ if (!parsed) {
68
+ warn(MODULE, `Skipping issue with unparseable URL: ${item.html_url}`);
69
+ continue;
70
+ }
71
+ const { owner, repo } = parsed;
72
+ const repoFullName = `${owner}/${repo}`;
73
+ // Skip issues in user-owned repos (we only care about contributing to others' projects)
74
+ if (owner.toLowerCase() === username.toLowerCase())
75
+ continue;
76
+ // Skip user-authored issues
77
+ if (item.user?.login?.toLowerCase() === username.toLowerCase())
78
+ continue;
79
+ // Skip excluded repos and orgs
80
+ if (config.excludeRepos.includes(repoFullName))
81
+ continue;
82
+ if (config.excludeOrgs?.some((org) => owner.toLowerCase() === org.toLowerCase()))
83
+ continue;
84
+ // Skip blocklisted repos
85
+ if (blocklist.has(repoFullName.toLowerCase()))
86
+ continue;
87
+ // Skip issues already in the tracked pipeline
88
+ if (trackedIssueKeys.has(`${repoFullName}#${item.number}`))
89
+ continue;
90
+ candidates.push({ item, repoFullName });
91
+ }
92
+ debug(MODULE, `Found ${candidates.length} commented issues to check`);
93
+ // Fetch comments for each issue using worker pool.
94
+ const results = [];
95
+ const failures = [];
96
+ await runWorkerPool(candidates, async ({ item, repoFullName }) => {
97
+ try {
98
+ const issue = await this.analyzeIssueConversation(item, repoFullName, username);
99
+ if (issue) {
100
+ results.push(issue);
101
+ }
102
+ else {
103
+ failures.push({
104
+ issueUrl: item.html_url,
105
+ error: 'No user comment found despite commenter: search match (possible pagination or eventual consistency)',
106
+ });
107
+ }
108
+ }
109
+ catch (error) {
110
+ const msg = error instanceof Error ? error.message : String(error);
111
+ failures.push({ issueUrl: item.html_url, error: msg });
112
+ warn(MODULE, `Error analyzing issue ${item.html_url}: ${msg}`);
113
+ }
114
+ }, MAX_CONCURRENT_REQUESTS);
115
+ if (failures.length > 0) {
116
+ warn(MODULE, `${failures.length}/${candidates.length} issue analysis call(s) failed`);
117
+ }
118
+ if (failures.length === candidates.length && candidates.length > 0) {
119
+ warn(MODULE, `All ${candidates.length} issue analysis call(s) failed. Possible systemic issue (rate limit, auth, network).`);
120
+ }
121
+ // Sort: new_response first, then waiting, then acknowledged
122
+ const statusOrder = {
123
+ new_response: 0,
124
+ waiting: 1,
125
+ acknowledged: 2,
126
+ };
127
+ results.sort((a, b) => statusOrder[a.status] - statusOrder[b.status]);
128
+ debug(MODULE, `Analyzed ${results.length} issue conversations (${results.filter((i) => i.status === 'new_response').length} with new responses)`);
129
+ return { issues: results, failures };
130
+ }
131
+ /**
132
+ * Analyze a single issue's comment thread to determine conversation status.
133
+ */
134
+ async analyzeIssueConversation(item, repoFullName, username) {
135
+ const { owner, repo } = splitRepo(repoFullName);
136
+ const allComments = await paginateAll((page) => this.octokit.issues.listComments({
137
+ owner,
138
+ repo,
139
+ issue_number: item.number,
140
+ per_page: 100,
141
+ page,
142
+ }));
143
+ const timeline = [];
144
+ for (const comment of allComments) {
145
+ if (!comment.user?.login)
146
+ continue; // Skip comments from deleted accounts
147
+ const author = comment.user.login;
148
+ timeline.push({
149
+ author,
150
+ body: comment.body || '',
151
+ createdAt: comment.created_at,
152
+ isUser: author.toLowerCase() === username.toLowerCase(),
153
+ authorAssociation: String(comment.author_association ?? ''),
154
+ });
155
+ }
156
+ timeline.sort((a, b) => new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime());
157
+ // Find the user's last comment
158
+ let userLastComment;
159
+ for (const entry of timeline) {
160
+ if (entry.isUser)
161
+ userLastComment = entry;
162
+ }
163
+ // If user never commented (shouldn't happen with commenter: search, but be safe)
164
+ if (!userLastComment) {
165
+ warn(MODULE, `No user comment found for ${item.html_url} despite commenter: search match`);
166
+ return null;
167
+ }
168
+ const userLastCommentTime = new Date(userLastComment.createdAt);
169
+ // Only surface comments directed at the user (#343):
170
+ // 1. From a maintainer (OWNER/MEMBER/COLLABORATOR) — inherently authoritative
171
+ // 2. @mentions the user — explicitly addressed
172
+ // This filters out community "+1" and "me too" noise.
173
+ const userMention = `@${username.toLowerCase()}`;
174
+ function isDirectedAtUser(entry) {
175
+ return MAINTAINER_ASSOCIATIONS.has(entry.authorAssociation) || entry.body.toLowerCase().includes(userMention);
176
+ }
177
+ // Find the last substantive, directed response after the user's last comment
178
+ let lastResponse;
179
+ for (const entry of timeline) {
180
+ if (entry.isUser)
181
+ continue;
182
+ if (isBotAuthor(entry.author))
183
+ continue;
184
+ const entryTime = new Date(entry.createdAt);
185
+ if (entryTime > userLastCommentTime) {
186
+ if (isAcknowledgmentComment(entry.body))
187
+ continue;
188
+ if (!isDirectedAtUser(entry))
189
+ continue;
190
+ lastResponse = {
191
+ author: entry.author,
192
+ body: entry.body.slice(0, 200) + (entry.body.length > 200 ? '...' : ''),
193
+ createdAt: entry.createdAt,
194
+ authorAssociation: entry.authorAssociation,
195
+ };
196
+ }
197
+ }
198
+ const labels = (item.labels || []).map((l) => l.name || '').filter(Boolean);
199
+ const base = {
200
+ repo: repoFullName,
201
+ number: item.number,
202
+ title: item.title,
203
+ url: item.html_url,
204
+ userLastCommentedAt: userLastComment.createdAt,
205
+ labels,
206
+ daysSinceUserComment: daysBetween(userLastCommentTime, new Date()),
207
+ };
208
+ if (lastResponse) {
209
+ return {
210
+ ...base,
211
+ status: 'new_response',
212
+ lastResponseAuthor: lastResponse.author,
213
+ lastResponseBody: lastResponse.body,
214
+ lastResponseAt: lastResponse.createdAt,
215
+ isFromMaintainer: MAINTAINER_ASSOCIATIONS.has(lastResponse.authorAssociation),
216
+ };
217
+ }
218
+ // No directed response found. Determine whether the user or a relevant
219
+ // commenter (maintainer / @mention) spoke last. Irrelevant community
220
+ // comments are excluded from this check too (#343).
221
+ const lastRelevantComment = [...timeline].reverse().find((e) => {
222
+ if (isBotAuthor(e.author))
223
+ return false;
224
+ if (e.isUser)
225
+ return true;
226
+ return isDirectedAtUser(e);
227
+ });
228
+ const status = lastRelevantComment?.isUser ? 'acknowledged' : 'waiting';
229
+ return { ...base, status };
230
+ }
231
+ }
@@ -0,0 +1,85 @@
1
+ /**
2
+ * Issue Discovery — orchestrates multi-phase issue search across GitHub.
3
+ *
4
+ * Delegates filtering, scoring, and vetting to focused modules (#356):
5
+ * - issue-filtering.ts — spam detection, doc-only filtering, per-repo caps
6
+ * - issue-scoring.ts — viability scores, repo quality bonuses
7
+ * - issue-vetting.ts — individual issue checks (PRs, claims, health, guidelines)
8
+ */
9
+ import { type IssueCandidate } from './types.js';
10
+ import { type ViabilityScoreParams } from './issue-scoring.js';
11
+ export { isDocOnlyIssue, applyPerRepoCap, isLabelFarming, hasTemplatedTitle, detectLabelFarmingRepos, DOC_ONLY_LABELS, BEGINNER_LABELS, type GitHubSearchItem, } from './issue-filtering.js';
12
+ export { calculateRepoQualityBonus, calculateViabilityScore, type ViabilityScoreParams } from './issue-scoring.js';
13
+ export { type CheckResult } from './issue-vetting.js';
14
+ export type { SearchPriority, IssueCandidate } from './types.js';
15
+ export declare class IssueDiscovery {
16
+ private octokit;
17
+ private stateManager;
18
+ private githubToken;
19
+ private vetter;
20
+ /** Set after searchIssues() runs if rate limits affected the search (low pre-flight quota or mid-search rate limit hits). */
21
+ rateLimitWarning: string | null;
22
+ constructor(githubToken: string);
23
+ /**
24
+ * Fetch the authenticated user's starred repositories from GitHub.
25
+ * Updates the state manager with the list and timestamp.
26
+ */
27
+ fetchStarredRepos(): Promise<string[]>;
28
+ /**
29
+ * Get starred repos, fetching from GitHub if cache is stale
30
+ */
31
+ getStarredReposWithRefresh(): Promise<string[]>;
32
+ /**
33
+ * Search for issues matching our criteria.
34
+ * Searches in priority order: merged-PR repos first (no label filter), then starred repos,
35
+ * then general search, then actively maintained repos (#349).
36
+ * Filters out issues from low-scoring and excluded repos.
37
+ */
38
+ searchIssues(options?: {
39
+ languages?: string[];
40
+ labels?: string[];
41
+ maxResults?: number;
42
+ }): Promise<IssueCandidate[]>;
43
+ /**
44
+ * Search for issues within specific repos using batched queries.
45
+ *
46
+ * To avoid GitHub's secondary rate limit (30 requests/minute), we batch
47
+ * multiple repos into a single search query using OR syntax:
48
+ * repo:owner1/repo1 OR repo:owner2/repo2 OR repo:owner3/repo3
49
+ *
50
+ * This reduces API calls from N (one per repo) to ceil(N/BATCH_SIZE).
51
+ */
52
+ private searchInRepos;
53
+ /**
54
+ * Split repos into batches of the specified size.
55
+ */
56
+ private batchRepos;
57
+ /**
58
+ * Check if an error is a GitHub rate limit error (429 or rate-limit 403).
59
+ * Static proxy kept for backward compatibility with tests.
60
+ */
61
+ static isRateLimitError(error: unknown): boolean;
62
+ /**
63
+ * Vet a specific issue (delegates to IssueVetter).
64
+ */
65
+ vetIssue(issueUrl: string): Promise<IssueCandidate>;
66
+ /**
67
+ * Analyze issue requirements for clarity (delegates to IssueVetter).
68
+ * Kept on class for backward compatibility.
69
+ */
70
+ analyzeRequirements(body: string): boolean;
71
+ /**
72
+ * Calculate viability score for an issue (delegates to issue-scoring module).
73
+ * Kept on class for backward compatibility with tests that call instance.calculateViabilityScore().
74
+ */
75
+ calculateViabilityScore(params: ViabilityScoreParams): number;
76
+ /**
77
+ * Save search results to ~/.oss-autopilot/found-issues.md
78
+ * Results are sorted by viability score (highest first)
79
+ */
80
+ saveSearchResults(candidates: IssueCandidate[]): string;
81
+ /**
82
+ * Format issue candidate for display
83
+ */
84
+ formatCandidate(candidate: IssueCandidate): string;
85
+ }