@link-assistant/hive-mind 0.39.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +20 -0
- package/LICENSE +24 -0
- package/README.md +769 -0
- package/package.json +58 -0
- package/src/agent.lib.mjs +705 -0
- package/src/agent.prompts.lib.mjs +196 -0
- package/src/buildUserMention.lib.mjs +71 -0
- package/src/claude-limits.lib.mjs +389 -0
- package/src/claude.lib.mjs +1445 -0
- package/src/claude.prompts.lib.mjs +203 -0
- package/src/codex.lib.mjs +552 -0
- package/src/codex.prompts.lib.mjs +194 -0
- package/src/config.lib.mjs +207 -0
- package/src/contributing-guidelines.lib.mjs +268 -0
- package/src/exit-handler.lib.mjs +205 -0
- package/src/git.lib.mjs +145 -0
- package/src/github-issue-creator.lib.mjs +246 -0
- package/src/github-linking.lib.mjs +152 -0
- package/src/github.batch.lib.mjs +272 -0
- package/src/github.graphql.lib.mjs +258 -0
- package/src/github.lib.mjs +1479 -0
- package/src/hive.config.lib.mjs +254 -0
- package/src/hive.mjs +1500 -0
- package/src/instrument.mjs +191 -0
- package/src/interactive-mode.lib.mjs +1000 -0
- package/src/lenv-reader.lib.mjs +206 -0
- package/src/lib.mjs +490 -0
- package/src/lino.lib.mjs +176 -0
- package/src/local-ci-checks.lib.mjs +324 -0
- package/src/memory-check.mjs +419 -0
- package/src/model-mapping.lib.mjs +145 -0
- package/src/model-validation.lib.mjs +278 -0
- package/src/opencode.lib.mjs +479 -0
- package/src/opencode.prompts.lib.mjs +194 -0
- package/src/protect-branch.mjs +159 -0
- package/src/review.mjs +433 -0
- package/src/reviewers-hive.mjs +643 -0
- package/src/sentry.lib.mjs +284 -0
- package/src/solve.auto-continue.lib.mjs +568 -0
- package/src/solve.auto-pr.lib.mjs +1374 -0
- package/src/solve.branch-errors.lib.mjs +341 -0
- package/src/solve.branch.lib.mjs +230 -0
- package/src/solve.config.lib.mjs +342 -0
- package/src/solve.error-handlers.lib.mjs +256 -0
- package/src/solve.execution.lib.mjs +291 -0
- package/src/solve.feedback.lib.mjs +436 -0
- package/src/solve.mjs +1128 -0
- package/src/solve.preparation.lib.mjs +210 -0
- package/src/solve.repo-setup.lib.mjs +114 -0
- package/src/solve.repository.lib.mjs +961 -0
- package/src/solve.results.lib.mjs +558 -0
- package/src/solve.session.lib.mjs +135 -0
- package/src/solve.validation.lib.mjs +325 -0
- package/src/solve.watch.lib.mjs +572 -0
- package/src/start-screen.mjs +324 -0
- package/src/task.mjs +308 -0
- package/src/telegram-bot.mjs +1481 -0
- package/src/telegram-markdown.lib.mjs +64 -0
- package/src/usage-limit.lib.mjs +218 -0
- package/src/version.lib.mjs +41 -0
- package/src/youtrack/solve.youtrack.lib.mjs +116 -0
- package/src/youtrack/youtrack-sync.mjs +219 -0
- package/src/youtrack/youtrack.lib.mjs +425 -0
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* GitHub Issue Linking Detection Library
|
|
5
|
+
*
|
|
6
|
+
* This module provides utilities to detect GitHub's reserved keywords for linking
|
|
7
|
+
* pull requests to issues according to GitHub's official documentation:
|
|
8
|
+
* https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue
|
|
9
|
+
*
|
|
10
|
+
* Valid linking keywords (case-insensitive):
|
|
11
|
+
* - close, closes, closed
|
|
12
|
+
* - fix, fixes, fixed
|
|
13
|
+
* - resolve, resolves, resolved
|
|
14
|
+
*
|
|
15
|
+
* Valid formats:
|
|
16
|
+
* - KEYWORD #ISSUE-NUMBER
|
|
17
|
+
* - KEYWORD OWNER/REPO#ISSUE-NUMBER
|
|
18
|
+
* - KEYWORD https://github.com/OWNER/REPO/issues/ISSUE-NUMBER
|
|
19
|
+
*/
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Get all valid GitHub linking keywords
|
|
23
|
+
* @returns {string[]} Array of valid linking keywords
|
|
24
|
+
*/
|
|
25
|
+
export function getGitHubLinkingKeywords() {
|
|
26
|
+
return [
|
|
27
|
+
'close', 'closes', 'closed',
|
|
28
|
+
'fix', 'fixes', 'fixed',
|
|
29
|
+
'resolve', 'resolves', 'resolved'
|
|
30
|
+
];
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* Check if PR body contains a valid GitHub linking keyword for the given issue
|
|
35
|
+
*
|
|
36
|
+
* @param {string} prBody - The pull request body text
|
|
37
|
+
* @param {string|number} issueNumber - The issue number to check for
|
|
38
|
+
* @param {string} [owner] - Repository owner (for cross-repo references)
|
|
39
|
+
* @param {string} [repo] - Repository name (for cross-repo references)
|
|
40
|
+
* @returns {boolean} True if a valid linking keyword is found
|
|
41
|
+
*/
|
|
42
|
+
export function hasGitHubLinkingKeyword(prBody, issueNumber, owner = null, repo = null) {
|
|
43
|
+
if (!prBody || !issueNumber) {
|
|
44
|
+
return false;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
const keywords = getGitHubLinkingKeywords();
|
|
48
|
+
const issueNumStr = issueNumber.toString();
|
|
49
|
+
|
|
50
|
+
// Build regex patterns for each valid format:
|
|
51
|
+
// 1. KEYWORD #123
|
|
52
|
+
// 2. KEYWORD owner/repo#123
|
|
53
|
+
// 3. KEYWORD https://github.com/owner/repo/issues/123
|
|
54
|
+
|
|
55
|
+
for (const keyword of keywords) {
|
|
56
|
+
// Pattern 1: KEYWORD #123
|
|
57
|
+
// Must have word boundary before keyword and # immediately before number
|
|
58
|
+
const pattern1 = new RegExp(
|
|
59
|
+
`\\b${keyword}\\s+#${issueNumStr}\\b`,
|
|
60
|
+
'i'
|
|
61
|
+
);
|
|
62
|
+
|
|
63
|
+
if (pattern1.test(prBody)) {
|
|
64
|
+
return true;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
// Pattern 2: KEYWORD owner/repo#123 (for cross-repo or fork references)
|
|
68
|
+
if (owner && repo) {
|
|
69
|
+
const pattern2 = new RegExp(
|
|
70
|
+
`\\b${keyword}\\s+${owner}/${repo}#${issueNumStr}\\b`,
|
|
71
|
+
'i'
|
|
72
|
+
);
|
|
73
|
+
|
|
74
|
+
if (pattern2.test(prBody)) {
|
|
75
|
+
return true;
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
// Pattern 3: KEYWORD https://github.com/owner/repo/issues/123
|
|
80
|
+
if (owner && repo) {
|
|
81
|
+
const pattern3 = new RegExp(
|
|
82
|
+
`\\b${keyword}\\s+https://github\\.com/${owner}/${repo}/issues/${issueNumStr}\\b`,
|
|
83
|
+
'i'
|
|
84
|
+
);
|
|
85
|
+
|
|
86
|
+
if (pattern3.test(prBody)) {
|
|
87
|
+
return true;
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
// Pattern 4: Also check for any URL format (generic)
|
|
92
|
+
const pattern4 = new RegExp(
|
|
93
|
+
`\\b${keyword}\\s+https://github\\.com/[^/]+/[^/]+/issues/${issueNumStr}\\b`,
|
|
94
|
+
'i'
|
|
95
|
+
);
|
|
96
|
+
|
|
97
|
+
if (pattern4.test(prBody)) {
|
|
98
|
+
return true;
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
return false;
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
/**
|
|
106
|
+
* Extract issue number from PR body using GitHub linking keywords
|
|
107
|
+
* This is used to find which issue a PR is linked to
|
|
108
|
+
*
|
|
109
|
+
* @param {string} prBody - The pull request body text
|
|
110
|
+
* @returns {string|null} The issue number if found, null otherwise
|
|
111
|
+
*/
|
|
112
|
+
export function extractLinkedIssueNumber(prBody) {
|
|
113
|
+
if (!prBody) {
|
|
114
|
+
return null;
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
const keywords = getGitHubLinkingKeywords();
|
|
118
|
+
|
|
119
|
+
for (const keyword of keywords) {
|
|
120
|
+
// Try to match: KEYWORD #123
|
|
121
|
+
const pattern1 = new RegExp(
|
|
122
|
+
`\\b${keyword}\\s+#(\\d+)\\b`,
|
|
123
|
+
'i'
|
|
124
|
+
);
|
|
125
|
+
const match1 = prBody.match(pattern1);
|
|
126
|
+
if (match1) {
|
|
127
|
+
return match1[1];
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
// Try to match: KEYWORD owner/repo#123
|
|
131
|
+
const pattern2 = new RegExp(
|
|
132
|
+
`\\b${keyword}\\s+[^/\\s]+/[^/\\s]+#(\\d+)\\b`,
|
|
133
|
+
'i'
|
|
134
|
+
);
|
|
135
|
+
const match2 = prBody.match(pattern2);
|
|
136
|
+
if (match2) {
|
|
137
|
+
return match2[1];
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
// Try to match: KEYWORD https://github.com/owner/repo/issues/123
|
|
141
|
+
const pattern3 = new RegExp(
|
|
142
|
+
`\\b${keyword}\\s+https://github\\.com/[^/]+/[^/]+/issues/(\\d+)\\b`,
|
|
143
|
+
'i'
|
|
144
|
+
);
|
|
145
|
+
const match3 = prBody.match(pattern3);
|
|
146
|
+
if (match3) {
|
|
147
|
+
return match3[1];
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
return null;
|
|
152
|
+
}
|
|
@@ -0,0 +1,272 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
// GitHub batch operations using GraphQL
|
|
3
|
+
|
|
4
|
+
// Check if use is already defined (when imported from solve.mjs)
|
|
5
|
+
// If not, fetch it (when running standalone)
|
|
6
|
+
if (typeof globalThis.use === 'undefined') {
|
|
7
|
+
globalThis.use = (await eval(await (await fetch('https://unpkg.com/use-m/use.js')).text())).use;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
// Import dependencies
|
|
11
|
+
import { log, cleanErrorMessage } from './lib.mjs';
|
|
12
|
+
import { githubLimits, timeouts } from './config.lib.mjs';
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Batch fetch pull request information for multiple issues using GraphQL
|
|
16
|
+
* @param {string} owner - Repository owner
|
|
17
|
+
* @param {string} repo - Repository name
|
|
18
|
+
* @param {Array<number>} issueNumbers - Array of issue numbers to check
|
|
19
|
+
* @returns {Promise<Object>} Object mapping issue numbers to their linked PRs
|
|
20
|
+
*/
|
|
21
|
+
export async function batchCheckPullRequestsForIssues(owner, repo, issueNumbers) {
|
|
22
|
+
try {
|
|
23
|
+
if (!issueNumbers || issueNumbers.length === 0) {
|
|
24
|
+
return {};
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
await log(` 🔍 Batch checking PRs for ${issueNumbers.length} issues using GraphQL...`, { verbose: true });
|
|
28
|
+
|
|
29
|
+
// GraphQL has complexity limits, so batch in groups of 50
|
|
30
|
+
const BATCH_SIZE = 50;
|
|
31
|
+
const results = {};
|
|
32
|
+
|
|
33
|
+
for (let i = 0; i < issueNumbers.length; i += BATCH_SIZE) {
|
|
34
|
+
const batch = issueNumbers.slice(i, i + BATCH_SIZE);
|
|
35
|
+
|
|
36
|
+
// Build GraphQL query for this batch
|
|
37
|
+
const query = `
|
|
38
|
+
query GetPullRequestsForIssues {
|
|
39
|
+
repository(owner: "${owner}", name: "${repo}") {
|
|
40
|
+
${batch.map(num => `
|
|
41
|
+
issue${num}: issue(number: ${num}) {
|
|
42
|
+
number
|
|
43
|
+
title
|
|
44
|
+
state
|
|
45
|
+
timelineItems(first: 100, itemTypes: [CROSS_REFERENCED_EVENT]) {
|
|
46
|
+
nodes {
|
|
47
|
+
... on CrossReferencedEvent {
|
|
48
|
+
source {
|
|
49
|
+
... on PullRequest {
|
|
50
|
+
number
|
|
51
|
+
title
|
|
52
|
+
state
|
|
53
|
+
isDraft
|
|
54
|
+
url
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
}`).join('\n')}
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
`;
|
|
64
|
+
|
|
65
|
+
try {
|
|
66
|
+
// Add small delay between batches to respect rate limits
|
|
67
|
+
if (i > 0) {
|
|
68
|
+
await log(' ⏰ Waiting 2 seconds before next batch...', { verbose: true });
|
|
69
|
+
await new Promise(resolve => setTimeout(resolve, timeouts.githubRepoDelay));
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
// Execute GraphQL query
|
|
73
|
+
const { exec } = await import('child_process');
|
|
74
|
+
const { promisify } = await import('util');
|
|
75
|
+
const execAsync = promisify(exec);
|
|
76
|
+
const { stdout } = await execAsync(`gh api graphql -f query='${query}'`, {
|
|
77
|
+
encoding: 'utf8',
|
|
78
|
+
maxBuffer: githubLimits.bufferMaxSize,
|
|
79
|
+
env: process.env
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
const data = JSON.parse(stdout);
|
|
83
|
+
|
|
84
|
+
// Process results for this batch
|
|
85
|
+
for (const issueNum of batch) {
|
|
86
|
+
const issueData = data.data?.repository?.[`issue${issueNum}`];
|
|
87
|
+
if (issueData) {
|
|
88
|
+
const linkedPRs = [];
|
|
89
|
+
|
|
90
|
+
// Extract linked PRs from timeline items
|
|
91
|
+
for (const item of issueData.timelineItems?.nodes || []) {
|
|
92
|
+
if (item?.source && item.source.state === 'OPEN' && !item.source.isDraft) {
|
|
93
|
+
linkedPRs.push({
|
|
94
|
+
number: item.source.number,
|
|
95
|
+
title: item.source.title,
|
|
96
|
+
state: item.source.state,
|
|
97
|
+
url: item.source.url
|
|
98
|
+
});
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
results[issueNum] = {
|
|
103
|
+
title: issueData.title,
|
|
104
|
+
state: issueData.state,
|
|
105
|
+
openPRCount: linkedPRs.length,
|
|
106
|
+
linkedPRs: linkedPRs
|
|
107
|
+
};
|
|
108
|
+
} else {
|
|
109
|
+
// Issue not found or error
|
|
110
|
+
results[issueNum] = {
|
|
111
|
+
openPRCount: 0,
|
|
112
|
+
linkedPRs: [],
|
|
113
|
+
error: 'Issue not found'
|
|
114
|
+
};
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
await log(` ✅ Batch ${Math.floor(i / BATCH_SIZE) + 1}/${Math.ceil(issueNumbers.length / BATCH_SIZE)} processed (${batch.length} issues)`, { verbose: true });
|
|
119
|
+
|
|
120
|
+
} catch (batchError) {
|
|
121
|
+
await log(` ⚠️ GraphQL batch query failed: ${cleanErrorMessage(batchError)}`, { level: 'warning' });
|
|
122
|
+
|
|
123
|
+
// Fall back to individual REST API calls for this batch
|
|
124
|
+
await log(' 🔄 Falling back to REST API for batch...', { verbose: true });
|
|
125
|
+
|
|
126
|
+
for (const issueNum of batch) {
|
|
127
|
+
try {
|
|
128
|
+
const { exec } = await import('child_process');
|
|
129
|
+
const { promisify } = await import('util');
|
|
130
|
+
const execAsync = promisify(exec);
|
|
131
|
+
const cmd = `gh api repos/${owner}/${repo}/issues/${issueNum}/timeline --jq '[.[] | select(.event == "cross-referenced" and .source.issue.pull_request != null and .source.issue.state == "open")] | length'`;
|
|
132
|
+
|
|
133
|
+
const { stdout } = await execAsync(cmd, { encoding: 'utf8', env: process.env });
|
|
134
|
+
const openPrCount = parseInt(stdout.trim()) || 0;
|
|
135
|
+
|
|
136
|
+
results[issueNum] = {
|
|
137
|
+
openPRCount: openPrCount,
|
|
138
|
+
linkedPRs: [] // REST API doesn't give us PR details easily
|
|
139
|
+
};
|
|
140
|
+
} catch (restError) {
|
|
141
|
+
results[issueNum] = {
|
|
142
|
+
openPRCount: 0,
|
|
143
|
+
linkedPRs: [],
|
|
144
|
+
error: cleanErrorMessage(restError)
|
|
145
|
+
};
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
// Log summary
|
|
152
|
+
const totalIssues = issueNumbers.length;
|
|
153
|
+
const issuesWithPRs = Object.values(results).filter(r => r.openPRCount > 0).length;
|
|
154
|
+
await log(` 📊 Batch PR check complete: ${issuesWithPRs}/${totalIssues} issues have open PRs`, { verbose: true });
|
|
155
|
+
|
|
156
|
+
return results;
|
|
157
|
+
|
|
158
|
+
} catch (error) {
|
|
159
|
+
await log(` ❌ Batch PR check failed: ${cleanErrorMessage(error)}`, { level: 'error' });
|
|
160
|
+
return {};
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
/**
|
|
165
|
+
* Batch check if repositories are archived using GraphQL
|
|
166
|
+
* This is more efficient than checking each repository individually
|
|
167
|
+
* @param {Array<{owner: string, name: string}>} repositories - Array of repository objects with owner and name
|
|
168
|
+
* @returns {Promise<Object>} Object mapping "owner/repo" to isArchived boolean
|
|
169
|
+
*/
|
|
170
|
+
export async function batchCheckArchivedRepositories(repositories) {
|
|
171
|
+
try {
|
|
172
|
+
if (!repositories || repositories.length === 0) {
|
|
173
|
+
return {};
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
await log(` 🔍 Batch checking archived status for ${repositories.length} repositories...`, { verbose: true });
|
|
177
|
+
|
|
178
|
+
// GraphQL has complexity limits, so batch in groups of 50
|
|
179
|
+
const BATCH_SIZE = 50;
|
|
180
|
+
const results = {};
|
|
181
|
+
|
|
182
|
+
for (let i = 0; i < repositories.length; i += BATCH_SIZE) {
|
|
183
|
+
const batch = repositories.slice(i, i + BATCH_SIZE);
|
|
184
|
+
|
|
185
|
+
// Build GraphQL query for this batch
|
|
186
|
+
const queryFields = batch.map((repo, index) => `
|
|
187
|
+
repo${index}: repository(owner: "${repo.owner}", name: "${repo.name}") {
|
|
188
|
+
nameWithOwner
|
|
189
|
+
isArchived
|
|
190
|
+
}`).join('\n');
|
|
191
|
+
|
|
192
|
+
const query = `
|
|
193
|
+
query CheckArchivedStatus {
|
|
194
|
+
${queryFields}
|
|
195
|
+
}
|
|
196
|
+
`;
|
|
197
|
+
|
|
198
|
+
try {
|
|
199
|
+
// Add small delay between batches to respect rate limits
|
|
200
|
+
if (i > 0) {
|
|
201
|
+
await log(' ⏰ Waiting 2 seconds before next batch...', { verbose: true });
|
|
202
|
+
await new Promise(resolve => setTimeout(resolve, timeouts.githubRepoDelay));
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
// Execute GraphQL query
|
|
206
|
+
const { exec } = await import('child_process');
|
|
207
|
+
const { promisify } = await import('util');
|
|
208
|
+
const execAsync = promisify(exec);
|
|
209
|
+
const { stdout } = await execAsync(`gh api graphql -f query='${query}'`, {
|
|
210
|
+
encoding: 'utf8',
|
|
211
|
+
maxBuffer: githubLimits.bufferMaxSize,
|
|
212
|
+
env: process.env
|
|
213
|
+
});
|
|
214
|
+
|
|
215
|
+
const data = JSON.parse(stdout);
|
|
216
|
+
|
|
217
|
+
// Process results for this batch
|
|
218
|
+
batch.forEach((repo, index) => {
|
|
219
|
+
const repoData = data.data?.[`repo${index}`];
|
|
220
|
+
if (repoData) {
|
|
221
|
+
const repoKey = `${repo.owner}/${repo.name}`;
|
|
222
|
+
results[repoKey] = repoData.isArchived;
|
|
223
|
+
}
|
|
224
|
+
});
|
|
225
|
+
|
|
226
|
+
await log(` ✅ Batch ${Math.floor(i / BATCH_SIZE) + 1}/${Math.ceil(repositories.length / BATCH_SIZE)} processed (${batch.length} repositories)`, { verbose: true });
|
|
227
|
+
|
|
228
|
+
} catch (batchError) {
|
|
229
|
+
await log(` ⚠️ GraphQL batch query failed: ${cleanErrorMessage(batchError)}`, { level: 'warning' });
|
|
230
|
+
|
|
231
|
+
// Fall back to individual REST API calls for this batch
|
|
232
|
+
await log(' 🔄 Falling back to REST API for batch...', { verbose: true });
|
|
233
|
+
|
|
234
|
+
for (const repo of batch) {
|
|
235
|
+
try {
|
|
236
|
+
const { exec } = await import('child_process');
|
|
237
|
+
const { promisify } = await import('util');
|
|
238
|
+
const execAsync = promisify(exec);
|
|
239
|
+
const cmd = `gh api repos/${repo.owner}/${repo.name} --jq .archived`;
|
|
240
|
+
|
|
241
|
+
const { stdout } = await execAsync(cmd, { encoding: 'utf8', env: process.env });
|
|
242
|
+
const isArchived = stdout.trim() === 'true';
|
|
243
|
+
|
|
244
|
+
const repoKey = `${repo.owner}/${repo.name}`;
|
|
245
|
+
results[repoKey] = isArchived;
|
|
246
|
+
} catch {
|
|
247
|
+
// If we can't check, assume it's not archived (safer to include than exclude)
|
|
248
|
+
const repoKey = `${repo.owner}/${repo.name}`;
|
|
249
|
+
results[repoKey] = false;
|
|
250
|
+
await log(` ⚠️ Could not check ${repoKey}, assuming not archived`, { verbose: true });
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
// Log summary
|
|
257
|
+
const archivedCount = Object.values(results).filter(isArchived => isArchived).length;
|
|
258
|
+
await log(` 📊 Batch archived check complete: ${archivedCount}/${repositories.length} repositories are archived`, { verbose: true });
|
|
259
|
+
|
|
260
|
+
return results;
|
|
261
|
+
|
|
262
|
+
} catch (error) {
|
|
263
|
+
await log(` ❌ Batch archived check failed: ${cleanErrorMessage(error)}`, { level: 'error' });
|
|
264
|
+
return {};
|
|
265
|
+
}
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
// Export all functions as default object too
|
|
269
|
+
export default {
|
|
270
|
+
batchCheckPullRequestsForIssues,
|
|
271
|
+
batchCheckArchivedRepositories
|
|
272
|
+
};
|
|
@@ -0,0 +1,258 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* GraphQL API utilities for GitHub issue fetching
|
|
3
|
+
* This module provides functions to fetch issues using GitHub's GraphQL API
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Fetch issues from a single repository with pagination support for >100 issues
|
|
8
|
+
* @param {string} owner - Repository owner
|
|
9
|
+
* @param {string} repoName - Repository name
|
|
10
|
+
* @param {Function} log - Logging function
|
|
11
|
+
* @param {Function} cleanErrorMessage - Error message cleaner
|
|
12
|
+
* @param {number} issueLimit - Maximum number of issues to fetch per query (default 100)
|
|
13
|
+
* @returns {Promise<Array>} Array of issues
|
|
14
|
+
*/
|
|
15
|
+
async function fetchRepositoryIssuesWithPagination(owner, repoName, log, cleanErrorMessage, issueLimit = 100) {
|
|
16
|
+
const { exec } = await import('child_process');
|
|
17
|
+
const { promisify } = await import('util');
|
|
18
|
+
const execAsync = promisify(exec);
|
|
19
|
+
const allIssues = [];
|
|
20
|
+
let hasNextPage = true;
|
|
21
|
+
let cursor = null;
|
|
22
|
+
let pageNum = 0;
|
|
23
|
+
|
|
24
|
+
try {
|
|
25
|
+
while (hasNextPage) {
|
|
26
|
+
pageNum++;
|
|
27
|
+
|
|
28
|
+
// Build query with cursor for pagination
|
|
29
|
+
const graphqlQuery = `
|
|
30
|
+
query($owner: String!, $repo: String!, $issueLimit: Int!, $cursor: String) {
|
|
31
|
+
repository(owner: $owner, name: $repo) {
|
|
32
|
+
issues(states: OPEN, first: $issueLimit, after: $cursor) {
|
|
33
|
+
totalCount
|
|
34
|
+
pageInfo {
|
|
35
|
+
hasNextPage
|
|
36
|
+
endCursor
|
|
37
|
+
}
|
|
38
|
+
nodes {
|
|
39
|
+
number
|
|
40
|
+
title
|
|
41
|
+
url
|
|
42
|
+
createdAt
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
`;
|
|
48
|
+
|
|
49
|
+
// Execute GraphQL query
|
|
50
|
+
const escapedQuery = graphqlQuery.replace(/'/g, '\'\\\'\'');
|
|
51
|
+
let graphqlCmd = `gh api graphql -f query='${escapedQuery}' -f owner='${owner}' -f repo='${repoName}' -F issueLimit=${issueLimit}`;
|
|
52
|
+
|
|
53
|
+
if (cursor) {
|
|
54
|
+
graphqlCmd += ` -f cursor='${cursor}'`;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
await log(` 📄 Fetching issues page ${pageNum} from ${owner}/${repoName}...`, { verbose: true });
|
|
58
|
+
|
|
59
|
+
// Add delay for rate limiting
|
|
60
|
+
await new Promise(resolve => setTimeout(resolve, 1000));
|
|
61
|
+
|
|
62
|
+
const { stdout } = await execAsync(graphqlCmd, { encoding: 'utf8', env: process.env });
|
|
63
|
+
const data = JSON.parse(stdout);
|
|
64
|
+
const issuesData = data.data.repository.issues;
|
|
65
|
+
|
|
66
|
+
// Add issues to collection
|
|
67
|
+
allIssues.push(...issuesData.nodes);
|
|
68
|
+
|
|
69
|
+
// Check if there are more pages
|
|
70
|
+
hasNextPage = issuesData.pageInfo.hasNextPage;
|
|
71
|
+
cursor = issuesData.pageInfo.endCursor;
|
|
72
|
+
|
|
73
|
+
await log(` ✅ Fetched ${issuesData.nodes.length} issues (total so far: ${allIssues.length}/${issuesData.totalCount})`, { verbose: true });
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
return allIssues;
|
|
77
|
+
|
|
78
|
+
} catch (error) {
|
|
79
|
+
await log(` ❌ Failed to fetch issues from ${owner}/${repoName}: ${cleanErrorMessage(error)}`, { verbose: true });
|
|
80
|
+
// Return what we have so far
|
|
81
|
+
return allIssues;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
/**
|
|
86
|
+
* Try to fetch issues using GraphQL API with full pagination support
|
|
87
|
+
* This approach uses cursor-based pagination to handle unlimited repositories and issues
|
|
88
|
+
* @param {string} owner - Organization or user name
|
|
89
|
+
* @param {string} scope - 'organization' or 'user'
|
|
90
|
+
* @param {Function} log - Logging function
|
|
91
|
+
* @param {Function} cleanErrorMessage - Error message cleaner
|
|
92
|
+
* @param {number} repoLimit - Maximum number of repos to fetch per query (default 100)
|
|
93
|
+
* @param {number} issueLimit - Maximum number of issues to fetch per repo query (default 100)
|
|
94
|
+
* @returns {Promise<{success: boolean, issues: Array, repoCount: number}>}
|
|
95
|
+
*/
|
|
96
|
+
export async function tryFetchIssuesWithGraphQL(owner, scope, log, cleanErrorMessage, repoLimit = 100, issueLimit = 100) {
|
|
97
|
+
const { exec } = await import('child_process');
|
|
98
|
+
const { promisify } = await import('util');
|
|
99
|
+
const execAsync = promisify(exec);
|
|
100
|
+
|
|
101
|
+
try {
|
|
102
|
+
await log(' 🧪 Attempting GraphQL approach with pagination support...', { verbose: true });
|
|
103
|
+
|
|
104
|
+
const isOrg = scope === 'organization';
|
|
105
|
+
const allRepos = [];
|
|
106
|
+
let hasNextRepoPage = true;
|
|
107
|
+
let repoCursor = null;
|
|
108
|
+
let repoPageNum = 0;
|
|
109
|
+
|
|
110
|
+
// Fetch all repositories with pagination
|
|
111
|
+
while (hasNextRepoPage) {
|
|
112
|
+
repoPageNum++;
|
|
113
|
+
|
|
114
|
+
// Build GraphQL query to fetch repos
|
|
115
|
+
const graphqlQuery = isOrg ? `
|
|
116
|
+
query($owner: String!, $repoLimit: Int!, $cursor: String) {
|
|
117
|
+
organization(login: $owner) {
|
|
118
|
+
repositories(first: $repoLimit, orderBy: {field: UPDATED_AT, direction: DESC}, after: $cursor) {
|
|
119
|
+
totalCount
|
|
120
|
+
pageInfo {
|
|
121
|
+
hasNextPage
|
|
122
|
+
endCursor
|
|
123
|
+
}
|
|
124
|
+
nodes {
|
|
125
|
+
name
|
|
126
|
+
owner {
|
|
127
|
+
login
|
|
128
|
+
}
|
|
129
|
+
isArchived
|
|
130
|
+
issues(states: OPEN, first: 1) {
|
|
131
|
+
totalCount
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
` : `
|
|
138
|
+
query($owner: String!, $repoLimit: Int!, $cursor: String) {
|
|
139
|
+
user(login: $owner) {
|
|
140
|
+
repositories(first: $repoLimit, orderBy: {field: UPDATED_AT, direction: DESC}, after: $cursor) {
|
|
141
|
+
totalCount
|
|
142
|
+
pageInfo {
|
|
143
|
+
hasNextPage
|
|
144
|
+
endCursor
|
|
145
|
+
}
|
|
146
|
+
nodes {
|
|
147
|
+
name
|
|
148
|
+
owner {
|
|
149
|
+
login
|
|
150
|
+
}
|
|
151
|
+
isArchived
|
|
152
|
+
issues(states: OPEN, first: 1) {
|
|
153
|
+
totalCount
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
`;
|
|
160
|
+
|
|
161
|
+
// Execute GraphQL query
|
|
162
|
+
const escapedQuery = graphqlQuery.replace(/'/g, '\'\\\'\'');
|
|
163
|
+
let graphqlCmd = `gh api graphql -f query='${escapedQuery}' -f owner='${owner}' -F repoLimit=${repoLimit}`;
|
|
164
|
+
|
|
165
|
+
if (repoCursor) {
|
|
166
|
+
graphqlCmd += ` -f cursor='${repoCursor}'`;
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
await log(` 📄 Fetching repositories page ${repoPageNum} for ${owner}...`, { verbose: true });
|
|
170
|
+
|
|
171
|
+
// Add delay for rate limiting
|
|
172
|
+
await new Promise(resolve => setTimeout(resolve, 2000));
|
|
173
|
+
|
|
174
|
+
const { stdout } = await execAsync(graphqlCmd, { encoding: 'utf8', env: process.env });
|
|
175
|
+
const data = JSON.parse(stdout);
|
|
176
|
+
const repos = isOrg ? data.data.organization.repositories : data.data.user.repositories;
|
|
177
|
+
|
|
178
|
+
// Add repos to collection
|
|
179
|
+
allRepos.push(...repos.nodes);
|
|
180
|
+
|
|
181
|
+
// Check if there are more pages
|
|
182
|
+
hasNextRepoPage = repos.pageInfo.hasNextPage;
|
|
183
|
+
repoCursor = repos.pageInfo.endCursor;
|
|
184
|
+
|
|
185
|
+
const totalRepos = repos.totalCount;
|
|
186
|
+
await log(` ✅ Fetched ${repos.nodes.length} repositories (total so far: ${allRepos.length}/${totalRepos})`, { verbose: true });
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
await log(` 📊 Fetched all ${allRepos.length} repositories`, { verbose: true });
|
|
190
|
+
|
|
191
|
+
// Filter out archived repositories AND repositories not owned by the target user/org
|
|
192
|
+
const ownedRepos = allRepos.filter(repo => repo.owner.login === owner);
|
|
193
|
+
const unownedCount = allRepos.length - ownedRepos.length;
|
|
194
|
+
|
|
195
|
+
if (unownedCount > 0) {
|
|
196
|
+
await log(` ⏭️ Skipping ${unownedCount} repository(ies) not owned by ${owner}`);
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
const nonArchivedRepos = ownedRepos.filter(repo => !repo.isArchived);
|
|
200
|
+
const archivedCount = ownedRepos.length - nonArchivedRepos.length;
|
|
201
|
+
|
|
202
|
+
if (archivedCount > 0) {
|
|
203
|
+
await log(` ⏭️ Skipping ${archivedCount} archived repository(ies)`);
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
await log(` ✅ Processing ${nonArchivedRepos.length} non-archived repositories owned by ${owner}`);
|
|
207
|
+
|
|
208
|
+
// Now fetch issues from each repository
|
|
209
|
+
// For repositories with >100 issues, use pagination
|
|
210
|
+
const allIssues = [];
|
|
211
|
+
let reposWithIssues = 0;
|
|
212
|
+
|
|
213
|
+
for (const repo of nonArchivedRepos) {
|
|
214
|
+
const issueCount = repo.issues.totalCount;
|
|
215
|
+
|
|
216
|
+
// Skip repos with no issues
|
|
217
|
+
if (issueCount === 0) {
|
|
218
|
+
continue;
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
await log(` 🔍 Fetching ${issueCount} issue(s) from ${repo.owner.login}/${repo.name}...`, { verbose: true });
|
|
222
|
+
|
|
223
|
+
// Fetch all issues from this repository with pagination
|
|
224
|
+
const repoIssues = await fetchRepositoryIssuesWithPagination(
|
|
225
|
+
repo.owner.login,
|
|
226
|
+
repo.name,
|
|
227
|
+
log,
|
|
228
|
+
cleanErrorMessage,
|
|
229
|
+
issueLimit
|
|
230
|
+
);
|
|
231
|
+
|
|
232
|
+
// Add repository information to each issue
|
|
233
|
+
for (const issue of repoIssues) {
|
|
234
|
+
allIssues.push({
|
|
235
|
+
...issue,
|
|
236
|
+
repository: {
|
|
237
|
+
name: repo.name,
|
|
238
|
+
owner: repo.owner
|
|
239
|
+
}
|
|
240
|
+
});
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
if (repoIssues.length > 0) {
|
|
244
|
+
reposWithIssues++;
|
|
245
|
+
await log(` ✅ Collected ${repoIssues.length} issue(s) from ${repo.owner.login}/${repo.name}`, { verbose: true });
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
await log(` ✅ GraphQL pagination complete: ${nonArchivedRepos.length} non-archived repos, ${allIssues.length} issues from ${reposWithIssues} repos with issues`, { verbose: true });
|
|
250
|
+
|
|
251
|
+
return { success: true, issues: allIssues, repoCount: nonArchivedRepos.length };
|
|
252
|
+
|
|
253
|
+
} catch (error) {
|
|
254
|
+
await log(` ❌ GraphQL approach failed: ${cleanErrorMessage(error)}`, { verbose: true });
|
|
255
|
+
await log(' 💡 Falling back to gh api --paginate approach...', { verbose: true });
|
|
256
|
+
return { success: false, issues: [], repoCount: 0 };
|
|
257
|
+
}
|
|
258
|
+
}
|