@link-assistant/hive-mind 0.39.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +20 -0
- package/LICENSE +24 -0
- package/README.md +769 -0
- package/package.json +58 -0
- package/src/agent.lib.mjs +705 -0
- package/src/agent.prompts.lib.mjs +196 -0
- package/src/buildUserMention.lib.mjs +71 -0
- package/src/claude-limits.lib.mjs +389 -0
- package/src/claude.lib.mjs +1445 -0
- package/src/claude.prompts.lib.mjs +203 -0
- package/src/codex.lib.mjs +552 -0
- package/src/codex.prompts.lib.mjs +194 -0
- package/src/config.lib.mjs +207 -0
- package/src/contributing-guidelines.lib.mjs +268 -0
- package/src/exit-handler.lib.mjs +205 -0
- package/src/git.lib.mjs +145 -0
- package/src/github-issue-creator.lib.mjs +246 -0
- package/src/github-linking.lib.mjs +152 -0
- package/src/github.batch.lib.mjs +272 -0
- package/src/github.graphql.lib.mjs +258 -0
- package/src/github.lib.mjs +1479 -0
- package/src/hive.config.lib.mjs +254 -0
- package/src/hive.mjs +1500 -0
- package/src/instrument.mjs +191 -0
- package/src/interactive-mode.lib.mjs +1000 -0
- package/src/lenv-reader.lib.mjs +206 -0
- package/src/lib.mjs +490 -0
- package/src/lino.lib.mjs +176 -0
- package/src/local-ci-checks.lib.mjs +324 -0
- package/src/memory-check.mjs +419 -0
- package/src/model-mapping.lib.mjs +145 -0
- package/src/model-validation.lib.mjs +278 -0
- package/src/opencode.lib.mjs +479 -0
- package/src/opencode.prompts.lib.mjs +194 -0
- package/src/protect-branch.mjs +159 -0
- package/src/review.mjs +433 -0
- package/src/reviewers-hive.mjs +643 -0
- package/src/sentry.lib.mjs +284 -0
- package/src/solve.auto-continue.lib.mjs +568 -0
- package/src/solve.auto-pr.lib.mjs +1374 -0
- package/src/solve.branch-errors.lib.mjs +341 -0
- package/src/solve.branch.lib.mjs +230 -0
- package/src/solve.config.lib.mjs +342 -0
- package/src/solve.error-handlers.lib.mjs +256 -0
- package/src/solve.execution.lib.mjs +291 -0
- package/src/solve.feedback.lib.mjs +436 -0
- package/src/solve.mjs +1128 -0
- package/src/solve.preparation.lib.mjs +210 -0
- package/src/solve.repo-setup.lib.mjs +114 -0
- package/src/solve.repository.lib.mjs +961 -0
- package/src/solve.results.lib.mjs +558 -0
- package/src/solve.session.lib.mjs +135 -0
- package/src/solve.validation.lib.mjs +325 -0
- package/src/solve.watch.lib.mjs +572 -0
- package/src/start-screen.mjs +324 -0
- package/src/task.mjs +308 -0
- package/src/telegram-bot.mjs +1481 -0
- package/src/telegram-markdown.lib.mjs +64 -0
- package/src/usage-limit.lib.mjs +218 -0
- package/src/version.lib.mjs +41 -0
- package/src/youtrack/solve.youtrack.lib.mjs +116 -0
- package/src/youtrack/youtrack-sync.mjs +219 -0
- package/src/youtrack/youtrack.lib.mjs +425 -0
package/src/hive.mjs
ADDED
|
@@ -0,0 +1,1500 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
// Import Sentry instrumentation first (must be before other imports)
|
|
3
|
+
import './instrument.mjs';
|
|
4
|
+
const earlyArgs = process.argv.slice(2);
|
|
5
|
+
if (earlyArgs.includes('--version')) {
|
|
6
|
+
const { getVersion } = await import('./version.lib.mjs');
|
|
7
|
+
try {
|
|
8
|
+
const version = await getVersion();
|
|
9
|
+
console.log(version);
|
|
10
|
+
} catch {
|
|
11
|
+
console.error('Error: Unable to determine version');
|
|
12
|
+
process.exit(1);
|
|
13
|
+
}
|
|
14
|
+
process.exit(0);
|
|
15
|
+
}
|
|
16
|
+
if (earlyArgs.includes('--help') || earlyArgs.includes('-h')) {
|
|
17
|
+
try {
|
|
18
|
+
// Load minimal modules needed for help
|
|
19
|
+
const { use } = eval(await (await fetch('https://unpkg.com/use-m/use.js')).text());
|
|
20
|
+
globalThis.use = use;
|
|
21
|
+
const yargsModule = await use('yargs@17.7.2');
|
|
22
|
+
const yargs = yargsModule.default || yargsModule;
|
|
23
|
+
const { hideBin } = await use('yargs@17.7.2/helpers');
|
|
24
|
+
const rawArgs = hideBin(process.argv);
|
|
25
|
+
|
|
26
|
+
// Reuse createYargsConfig from shared module to avoid duplication
|
|
27
|
+
const { createYargsConfig } = await import('./hive.config.lib.mjs');
|
|
28
|
+
const helpYargs = createYargsConfig(yargs(rawArgs)).version(false);
|
|
29
|
+
|
|
30
|
+
// Show help and exit
|
|
31
|
+
helpYargs.showHelp();
|
|
32
|
+
process.exit(0);
|
|
33
|
+
} catch (error) {
|
|
34
|
+
console.error('ā Error: Failed to load help information');
|
|
35
|
+
console.error(` ${error.message}`);
|
|
36
|
+
console.error(' This might be due to network issues or missing dependencies.');
|
|
37
|
+
console.error(' Please check your internet connection and try again.');
|
|
38
|
+
process.exit(1);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
export { createYargsConfig } from './hive.config.lib.mjs';
|
|
42
|
+
// Only execute main logic if this module is being run directly (not imported)
|
|
43
|
+
// This prevents heavy module loading when hive.mjs is imported by other modules
|
|
44
|
+
// Check if we're being executed (not imported) by looking at various indicators:
|
|
45
|
+
// 1. process.argv[1] is the executed file path
|
|
46
|
+
// 2. import.meta.url is this file's URL
|
|
47
|
+
// 3. For global installs, argv[1] might be a symlink, so we check if it contains 'hive'
|
|
48
|
+
import { fileURLToPath } from 'url';
|
|
49
|
+
const isDirectExecution = process.argv[1] === fileURLToPath(import.meta.url) ||
|
|
50
|
+
(process.argv[1] && (process.argv[1].includes('/hive') || process.argv[1].endsWith('hive')));
|
|
51
|
+
|
|
52
|
+
if (isDirectExecution) {
|
|
53
|
+
console.log('š Hive Mind - AI-powered issue solver');
|
|
54
|
+
console.log(' Initializing...');
|
|
55
|
+
try {
|
|
56
|
+
console.log(' Loading dependencies (this may take a moment)...');
|
|
57
|
+
// Helper function to add timeout to async operations
|
|
58
|
+
const withTimeout = (promise, timeoutMs, operation) => {
|
|
59
|
+
return Promise.race([
|
|
60
|
+
promise,
|
|
61
|
+
new Promise((_, reject) =>
|
|
62
|
+
setTimeout(() => reject(new Error(`Operation '${operation}' timed out after ${timeoutMs}ms. This might be due to slow network or npm configuration issues.`)), timeoutMs)
|
|
63
|
+
)
|
|
64
|
+
]);
|
|
65
|
+
};
|
|
66
|
+
|
|
67
|
+
// Use use-m to dynamically import modules for cross-runtime compatibility
|
|
68
|
+
if (typeof use === 'undefined') {
|
|
69
|
+
try {
|
|
70
|
+
// Wrap fetch in timeout to prevent hanging
|
|
71
|
+
const useMCode = await withTimeout(
|
|
72
|
+
fetch('https://unpkg.com/use-m/use.js').then(r => r.text()),
|
|
73
|
+
10000,
|
|
74
|
+
'fetching use-m library'
|
|
75
|
+
);
|
|
76
|
+
globalThis.use = (await eval(useMCode)).use;
|
|
77
|
+
} catch (error) {
|
|
78
|
+
console.error('ā Fatal error: Failed to load dependencies');
|
|
79
|
+
console.error(` ${error.message}`);
|
|
80
|
+
console.error(' This might be due to network issues or missing dependencies.');
|
|
81
|
+
console.error(' Please check your internet connection and try again.');
|
|
82
|
+
process.exit(1);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
// Use command-stream for consistent $ behavior across runtimes
|
|
86
|
+
const { $ } = await withTimeout(
|
|
87
|
+
use('command-stream'),
|
|
88
|
+
30000, // 30 second timeout
|
|
89
|
+
'loading command-stream'
|
|
90
|
+
);
|
|
91
|
+
const yargsModule = await withTimeout(
|
|
92
|
+
use('yargs@17.7.2'),
|
|
93
|
+
30000,
|
|
94
|
+
'loading yargs'
|
|
95
|
+
);
|
|
96
|
+
const yargs = yargsModule.default || yargsModule;
|
|
97
|
+
const { hideBin } = await withTimeout(
|
|
98
|
+
use('yargs@17.7.2/helpers'),
|
|
99
|
+
30000,
|
|
100
|
+
'loading yargs helpers'
|
|
101
|
+
);
|
|
102
|
+
const path = (await withTimeout(use('path'), 30000, 'loading path')).default;
|
|
103
|
+
const fs = (await withTimeout(use('fs'), 30000, 'loading fs')).promises;
|
|
104
|
+
// Import shared library functions
|
|
105
|
+
const lib = await import('./lib.mjs');
|
|
106
|
+
const { log, setLogFile, getAbsoluteLogPath, formatTimestamp, cleanErrorMessage, cleanupTempDirectories } = lib;
|
|
107
|
+
const yargsConfigLib = await import('./hive.config.lib.mjs');
|
|
108
|
+
const { createYargsConfig } = yargsConfigLib;
|
|
109
|
+
const claudeLib = await import('./claude.lib.mjs');
|
|
110
|
+
const { validateClaudeConnection } = claudeLib;
|
|
111
|
+
// Import model validation library
|
|
112
|
+
const modelValidation = await import('./model-validation.lib.mjs');
|
|
113
|
+
const { validateAndExitOnInvalidModel } = modelValidation;
|
|
114
|
+
const githubLib = await import('./github.lib.mjs');
|
|
115
|
+
const { checkGitHubPermissions, fetchAllIssuesWithPagination, fetchProjectIssues, isRateLimitError, batchCheckPullRequestsForIssues, parseGitHubUrl, batchCheckArchivedRepositories } = githubLib;
|
|
116
|
+
// Import YouTrack-related functions
|
|
117
|
+
const youTrackLib = await import('./youtrack/youtrack.lib.mjs');
|
|
118
|
+
const {
|
|
119
|
+
validateYouTrackConfig,
|
|
120
|
+
testYouTrackConnection,
|
|
121
|
+
createYouTrackConfigFromEnv
|
|
122
|
+
} = youTrackLib;
|
|
123
|
+
const youTrackSync = await import('./youtrack/youtrack-sync.mjs');
|
|
124
|
+
const { syncYouTrackToGitHub, formatIssuesForHive } = youTrackSync;
|
|
125
|
+
const memCheck = await import('./memory-check.mjs');
|
|
126
|
+
const { checkSystem } = memCheck;
|
|
127
|
+
const exitHandler = await import('./exit-handler.lib.mjs');
|
|
128
|
+
const { initializeExitHandler, installGlobalExitHandlers, safeExit } = exitHandler;
|
|
129
|
+
const sentryLib = await import('./sentry.lib.mjs');
|
|
130
|
+
const { initializeSentry, withSentry, addBreadcrumb, reportError } = sentryLib;
|
|
131
|
+
const graphqlLib = await import('./github.graphql.lib.mjs');
|
|
132
|
+
const { tryFetchIssuesWithGraphQL } = graphqlLib;
|
|
133
|
+
const commandName = process.argv[1] ? process.argv[1].split('/').pop() : '';
|
|
134
|
+
const isLocalScript = commandName.endsWith('.mjs');
|
|
135
|
+
const solveCommand = isLocalScript ? './solve.mjs' : 'solve';
|
|
136
|
+
|
|
137
|
+
/**
|
|
138
|
+
* Fallback function to fetch issues from organization/user repositories
|
|
139
|
+
* when search API hits rate limits
|
|
140
|
+
* @param {string} owner - Organization or user name
|
|
141
|
+
* @param {string} scope - 'organization' or 'user'
|
|
142
|
+
* @param {string} monitorTag - Label to filter by (optional)
|
|
143
|
+
* @param {boolean} allIssues - Whether to fetch all issues or only labeled ones
|
|
144
|
+
* @returns {Promise<Array>} Array of issues
|
|
145
|
+
*/
|
|
146
|
+
async function fetchIssuesFromRepositories(owner, scope, monitorTag, fetchAllIssues = false) {
|
|
147
|
+
const { exec } = await import('child_process');
|
|
148
|
+
const { promisify } = await import('util');
|
|
149
|
+
const execAsync = promisify(exec);
|
|
150
|
+
try {
|
|
151
|
+
await log(` š Using repository-by-repository fallback for ${scope}: ${owner}`);
|
|
152
|
+
// Strategy 1: Try GraphQL approach first (faster but has limitations)
|
|
153
|
+
// Only try GraphQL for "all issues" mode, not for labeled issues
|
|
154
|
+
if (fetchAllIssues) {
|
|
155
|
+
const graphqlResult = await tryFetchIssuesWithGraphQL(owner, scope, log, cleanErrorMessage);
|
|
156
|
+
if (graphqlResult.success) {
|
|
157
|
+
await log(` ā
GraphQL approach successful: ${graphqlResult.issues.length} issues from ${graphqlResult.repoCount} repositories`);
|
|
158
|
+
return graphqlResult.issues;
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
// Strategy 2: Fallback to gh api --paginate approach (comprehensive but slower)
|
|
163
|
+
await log(' š Using gh api --paginate approach for comprehensive coverage...', { verbose: true });
|
|
164
|
+
|
|
165
|
+
// First, get list of ALL repositories using gh api with --paginate for unlimited pagination
|
|
166
|
+
// This approach uses the GitHub API directly to fetch all repositories without any limits
|
|
167
|
+
// Include isArchived field to filter out archived repositories
|
|
168
|
+
let repoListCmd;
|
|
169
|
+
if (scope === 'organization') {
|
|
170
|
+
repoListCmd = `gh api orgs/${owner}/repos --paginate --jq '.[] | {name: .name, owner: .owner.login, isArchived: .archived}'`;
|
|
171
|
+
} else {
|
|
172
|
+
repoListCmd = `gh api users/${owner}/repos --paginate --jq '.[] | {name: .name, owner: .owner.login, isArchived: .archived}'`;
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
await log(' š Fetching repository list (using --paginate for unlimited pagination)...', { verbose: true });
|
|
176
|
+
await log(` š Command: ${repoListCmd}`, { verbose: true });
|
|
177
|
+
|
|
178
|
+
// Add delay for rate limiting
|
|
179
|
+
await new Promise(resolve => setTimeout(resolve, 2000));
|
|
180
|
+
|
|
181
|
+
const { stdout: repoOutput } = await execAsync(repoListCmd, { encoding: 'utf8', env: process.env });
|
|
182
|
+
// Parse the output line by line, as gh api with --jq outputs one JSON object per line
|
|
183
|
+
const repoLines = repoOutput.trim().split('\n').filter(line => line.trim());
|
|
184
|
+
const allRepositories = repoLines.map(line => JSON.parse(line));
|
|
185
|
+
|
|
186
|
+
await log(` š Found ${allRepositories.length} repositories`);
|
|
187
|
+
|
|
188
|
+
// Filter repositories to only include those owned by the target user/org
|
|
189
|
+
const ownedRepositories = allRepositories.filter(repo => {
|
|
190
|
+
const repoOwner = repo.owner?.login || repo.owner;
|
|
191
|
+
return repoOwner === owner;
|
|
192
|
+
});
|
|
193
|
+
const unownedCount = allRepositories.length - ownedRepositories.length;
|
|
194
|
+
|
|
195
|
+
if (unownedCount > 0) {
|
|
196
|
+
await log(` āļø Skipping ${unownedCount} repository(ies) not owned by ${owner}`);
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
// Filter out archived repositories from owned repositories
|
|
200
|
+
const repositories = ownedRepositories.filter(repo => !repo.isArchived);
|
|
201
|
+
const archivedCount = ownedRepositories.length - repositories.length;
|
|
202
|
+
|
|
203
|
+
if (archivedCount > 0) {
|
|
204
|
+
await log(` āļø Skipping ${archivedCount} archived repository(ies)`);
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
await log(` ā
Processing ${repositories.length} non-archived repositories owned by ${owner}`);
|
|
208
|
+
|
|
209
|
+
let collectedIssues = [];
|
|
210
|
+
let processedRepos = 0;
|
|
211
|
+
|
|
212
|
+
// Process repositories in batches to avoid overwhelming the API
|
|
213
|
+
for (const repo of repositories) {
|
|
214
|
+
try {
|
|
215
|
+
const repoName = repo.name;
|
|
216
|
+
const ownerName = repo.owner?.login || owner;
|
|
217
|
+
|
|
218
|
+
await log(` š Fetching issues from ${ownerName}/${repoName}...`, { verbose: true });
|
|
219
|
+
|
|
220
|
+
// Build the appropriate issue list command
|
|
221
|
+
let issueCmd;
|
|
222
|
+
if (fetchAllIssues) {
|
|
223
|
+
issueCmd = `gh issue list --repo ${ownerName}/${repoName} --state open --json url,title,number,createdAt`;
|
|
224
|
+
} else {
|
|
225
|
+
issueCmd = `gh issue list --repo ${ownerName}/${repoName} --state open --label "${monitorTag}" --json url,title,number,createdAt`;
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
// Add delay between repository requests
|
|
229
|
+
await new Promise(resolve => setTimeout(resolve, 1000));
|
|
230
|
+
|
|
231
|
+
const repoIssues = await fetchAllIssuesWithPagination(issueCmd);
|
|
232
|
+
|
|
233
|
+
// Add repository information to each issue
|
|
234
|
+
const issuesWithRepo = repoIssues.map(issue => ({
|
|
235
|
+
...issue,
|
|
236
|
+
repository: {
|
|
237
|
+
name: repoName,
|
|
238
|
+
owner: { login: ownerName }
|
|
239
|
+
}
|
|
240
|
+
}));
|
|
241
|
+
|
|
242
|
+
collectedIssues.push(...issuesWithRepo);
|
|
243
|
+
processedRepos++;
|
|
244
|
+
|
|
245
|
+
if (issuesWithRepo.length > 0) {
|
|
246
|
+
await log(` ā
Found ${issuesWithRepo.length} issues in ${ownerName}/${repoName}`, { verbose: true });
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
} catch (repoError) {
|
|
250
|
+
reportError(repoError, {
|
|
251
|
+
context: 'fetchIssuesFromRepositories',
|
|
252
|
+
repo: repo.name,
|
|
253
|
+
operation: 'fetch_repo_issues'
|
|
254
|
+
});
|
|
255
|
+
await log(` ā ļø Failed to fetch issues from ${repo.name}: ${cleanErrorMessage(repoError)}`, { verbose: true });
|
|
256
|
+
// Continue with other repositories
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
await log(` ā
Repository fallback complete: ${collectedIssues.length} issues from ${processedRepos}/${repositories.length} repositories`);
|
|
261
|
+
return collectedIssues;
|
|
262
|
+
|
|
263
|
+
} catch (error) {
|
|
264
|
+
reportError(error, {
|
|
265
|
+
context: 'fetchIssuesFromRepositories',
|
|
266
|
+
owner,
|
|
267
|
+
scope,
|
|
268
|
+
operation: 'repository_fallback'
|
|
269
|
+
});
|
|
270
|
+
await log(` ā Repository fallback failed: ${cleanErrorMessage(error)}`, { level: 'error' });
|
|
271
|
+
return [];
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
// Configure command line arguments - GitHub URL as positional argument
|
|
276
|
+
const rawArgs = hideBin(process.argv);
|
|
277
|
+
// Use .parse() instead of .argv to ensure .strict() mode works correctly
|
|
278
|
+
// When you use .argv, strict mode doesn't trigger properly
|
|
279
|
+
// See: https://github.com/yargs/yargs/issues - .strict() only works with .parse()
|
|
280
|
+
let argv;
|
|
281
|
+
|
|
282
|
+
// Temporarily suppress stderr to prevent yargs from printing error messages
|
|
283
|
+
// We'll handle error reporting ourselves
|
|
284
|
+
const originalStderrWrite = process.stderr.write;
|
|
285
|
+
let stderrBuffer = '';
|
|
286
|
+
process.stderr.write = function(chunk, encoding, callback) {
|
|
287
|
+
// Capture stderr output instead of writing it
|
|
288
|
+
stderrBuffer += chunk.toString();
|
|
289
|
+
if (typeof encoding === 'function') {
|
|
290
|
+
encoding();
|
|
291
|
+
} else if (callback) {
|
|
292
|
+
callback();
|
|
293
|
+
}
|
|
294
|
+
return true;
|
|
295
|
+
};
|
|
296
|
+
|
|
297
|
+
try {
|
|
298
|
+
argv = await createYargsConfig(yargs()).parse(rawArgs);
|
|
299
|
+
// Restore stderr if parsing succeeded
|
|
300
|
+
process.stderr.write = originalStderrWrite;
|
|
301
|
+
} catch (error) {
|
|
302
|
+
// Restore stderr before handling the error
|
|
303
|
+
process.stderr.write = originalStderrWrite;
|
|
304
|
+
|
|
305
|
+
// If .strict() mode catches an unknown argument, yargs will throw an error
|
|
306
|
+
// We should fail fast for truly invalid arguments
|
|
307
|
+
if (error.message && error.message.includes('Unknown argument')) {
|
|
308
|
+
console.error('Error:', error.message);
|
|
309
|
+
process.exit(1);
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
// Yargs sometimes throws "Not enough arguments" errors even when arguments are present
|
|
313
|
+
// This is a quirk with optional positional arguments [github-url]
|
|
314
|
+
// The error.argv object still contains the parsed arguments, so we can safely continue
|
|
315
|
+
if (error.argv) {
|
|
316
|
+
argv = error.argv;
|
|
317
|
+
} else {
|
|
318
|
+
// If there's no argv object, it's a real error - show the captured stderr
|
|
319
|
+
if (stderrBuffer) {
|
|
320
|
+
process.stderr.write(stderrBuffer);
|
|
321
|
+
}
|
|
322
|
+
throw error;
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
// Normalize deprecated flags to new names
|
|
326
|
+
if (argv && (argv.skipToolCheck || argv.skipClaudeCheck)) argv.skipToolConnectionCheck = true;
|
|
327
|
+
if (argv && argv.toolCheck === false) argv.toolConnectionCheck = false;
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
let githubUrl = argv['github-url'];
|
|
331
|
+
|
|
332
|
+
// Set global verbose mode
|
|
333
|
+
global.verboseMode = argv.verbose;
|
|
334
|
+
|
|
335
|
+
// Use the universal GitHub URL parser
|
|
336
|
+
if (githubUrl) {
|
|
337
|
+
const parsedUrl = parseGitHubUrl(githubUrl);
|
|
338
|
+
|
|
339
|
+
if (!parsedUrl.valid) {
|
|
340
|
+
console.error('Error: Invalid GitHub URL format');
|
|
341
|
+
if (parsedUrl.error) {
|
|
342
|
+
console.error(` ${parsedUrl.error}`);
|
|
343
|
+
}
|
|
344
|
+
console.error('Expected: https://github.com/owner or https://github.com/owner/repo');
|
|
345
|
+
console.error('You can use any of these formats:');
|
|
346
|
+
console.error(' - https://github.com/owner');
|
|
347
|
+
console.error(' - https://github.com/owner/repo');
|
|
348
|
+
console.error(' - http://github.com/owner (will be converted to https)');
|
|
349
|
+
console.error(' - github.com/owner (will add https://)');
|
|
350
|
+
console.error(' - owner (will be converted to https://github.com/owner)');
|
|
351
|
+
console.error(' - owner/repo (will be converted to https://github.com/owner/repo)');
|
|
352
|
+
await safeExit(1, 'Error occurred');
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
// Check if it's a valid type for hive (user or repo)
|
|
356
|
+
if (parsedUrl.type !== 'user' && parsedUrl.type !== 'repo') {
|
|
357
|
+
console.error('Error: Invalid GitHub URL for monitoring');
|
|
358
|
+
console.error(` URL type '${parsedUrl.type}' is not supported`);
|
|
359
|
+
console.error('Expected: https://github.com/owner or https://github.com/owner/repo');
|
|
360
|
+
await safeExit(1, 'Error occurred');
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
// Use the normalized URL
|
|
364
|
+
githubUrl = parsedUrl.normalized;
|
|
365
|
+
}
|
|
366
|
+
|
|
367
|
+
// Validate GitHub URL format ONCE AND FOR ALL at the beginning
|
|
368
|
+
// Parse URL format: https://github.com/owner or https://github.com/owner/repo
|
|
369
|
+
let urlMatch = null;
|
|
370
|
+
|
|
371
|
+
// Only validate if we have a URL
|
|
372
|
+
const needsUrlValidation = githubUrl;
|
|
373
|
+
|
|
374
|
+
if (needsUrlValidation) {
|
|
375
|
+
// Do the regex matching ONCE - this result will be used everywhere
|
|
376
|
+
urlMatch = githubUrl.match(/^https:\/\/github\.com\/([^/]+)(\/([^/]+))?$/);
|
|
377
|
+
if (!urlMatch) {
|
|
378
|
+
console.error('Error: Invalid GitHub URL format');
|
|
379
|
+
console.error('Expected: https://github.com/owner or https://github.com/owner/repo');
|
|
380
|
+
console.error('You can use any of these formats:');
|
|
381
|
+
console.error(' - https://github.com/owner');
|
|
382
|
+
console.error(' - https://github.com/owner/repo');
|
|
383
|
+
console.error(' - http://github.com/owner (will be converted to https)');
|
|
384
|
+
console.error(' - github.com/owner (will add https://)');
|
|
385
|
+
console.error(' - owner (will be converted to https://github.com/owner)');
|
|
386
|
+
console.error(' - owner/repo (will be converted to https://github.com/owner/repo)');
|
|
387
|
+
await safeExit(1, 'Error occurred');
|
|
388
|
+
}
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
// Create log file with timestamp
|
|
392
|
+
// Use log-dir option if provided, otherwise use current working directory
|
|
393
|
+
let targetDir = argv.logDir || process.cwd();
|
|
394
|
+
|
|
395
|
+
// Verify the directory exists, create if necessary
|
|
396
|
+
try {
|
|
397
|
+
await fs.access(targetDir);
|
|
398
|
+
} catch (error) {
|
|
399
|
+
reportError(error, {
|
|
400
|
+
context: 'log_directory_access',
|
|
401
|
+
targetDir,
|
|
402
|
+
operation: 'check_directory_exists'
|
|
403
|
+
});
|
|
404
|
+
// If directory doesn't exist, try to create it
|
|
405
|
+
try {
|
|
406
|
+
await fs.mkdir(targetDir, { recursive: true });
|
|
407
|
+
} catch (mkdirError) {
|
|
408
|
+
reportError(mkdirError, {
|
|
409
|
+
context: 'log_directory_creation',
|
|
410
|
+
targetDir,
|
|
411
|
+
operation: 'create_directory'
|
|
412
|
+
});
|
|
413
|
+
console.error(`ā ļø Unable to create log directory: ${targetDir}`);
|
|
414
|
+
console.error(' Falling back to current working directory');
|
|
415
|
+
// Fall back to current working directory
|
|
416
|
+
targetDir = process.cwd();
|
|
417
|
+
}
|
|
418
|
+
}
|
|
419
|
+
|
|
420
|
+
const timestamp = formatTimestamp();
|
|
421
|
+
const logFile = path.join(targetDir, `hive-${timestamp}.log`);
|
|
422
|
+
|
|
423
|
+
// Set the log file for the lib.mjs logging system
|
|
424
|
+
setLogFile(logFile);
|
|
425
|
+
|
|
426
|
+
// Create the log file immediately
|
|
427
|
+
await fs.writeFile(logFile, `# Hive.mjs Log - ${new Date().toISOString()}\n\n`);
|
|
428
|
+
// Always use absolute path for log file display
|
|
429
|
+
const absoluteLogPath = path.resolve(logFile);
|
|
430
|
+
await log(`š Log file: ${absoluteLogPath}`);
|
|
431
|
+
await log(' (All output will be logged here)');
|
|
432
|
+
|
|
433
|
+
// Initialize Sentry integration (unless disabled)
|
|
434
|
+
if (argv.sentry) {
|
|
435
|
+
await initializeSentry({
|
|
436
|
+
noSentry: !argv.sentry,
|
|
437
|
+
debug: argv.verbose,
|
|
438
|
+
version: process.env.npm_package_version || '0.12.0'
|
|
439
|
+
});
|
|
440
|
+
|
|
441
|
+
// Add breadcrumb for monitoring configuration
|
|
442
|
+
addBreadcrumb({
|
|
443
|
+
category: 'hive',
|
|
444
|
+
message: 'Started monitoring',
|
|
445
|
+
level: 'info',
|
|
446
|
+
data: {
|
|
447
|
+
mode: argv.projectMode ? 'project' : (argv.allIssues ? 'all' : 'label'),
|
|
448
|
+
concurrency: argv.concurrency,
|
|
449
|
+
model: argv.model
|
|
450
|
+
}
|
|
451
|
+
});
|
|
452
|
+
}
|
|
453
|
+
|
|
454
|
+
// Initialize the exit handler with getAbsoluteLogPath function and Sentry cleanup
|
|
455
|
+
initializeExitHandler(getAbsoluteLogPath, log);
|
|
456
|
+
installGlobalExitHandlers();
|
|
457
|
+
|
|
458
|
+
// Unhandled error handlers are now managed by exit-handler.lib.mjs
|
|
459
|
+
|
|
460
|
+
// Validate GitHub URL requirement
|
|
461
|
+
if (!githubUrl) {
|
|
462
|
+
await log('ā GitHub URL is required', { level: 'error' });
|
|
463
|
+
await log(' Usage: hive <github-url> [options]', { level: 'error' });
|
|
464
|
+
await log(` š Full log file: ${absoluteLogPath}`, { level: 'error' });
|
|
465
|
+
await safeExit(1, 'Error occurred');
|
|
466
|
+
}
|
|
467
|
+
|
|
468
|
+
// Validate project mode arguments
|
|
469
|
+
if (argv.projectMode) {
|
|
470
|
+
if (!argv.projectNumber) {
|
|
471
|
+
await log('ā Project mode requires --project-number', { level: 'error' });
|
|
472
|
+
await log(' Usage: hive <github-url> --project-mode --project-number NUMBER --project-owner OWNER', { level: 'error' });
|
|
473
|
+
await safeExit(1, 'Error occurred');
|
|
474
|
+
}
|
|
475
|
+
|
|
476
|
+
if (!argv.projectOwner) {
|
|
477
|
+
await log('ā Project mode requires --project-owner', { level: 'error' });
|
|
478
|
+
await log(' Usage: hive <github-url> --project-mode --project-number NUMBER --project-owner OWNER', { level: 'error' });
|
|
479
|
+
await safeExit(1, 'Error occurred');
|
|
480
|
+
}
|
|
481
|
+
|
|
482
|
+
if (typeof argv.projectNumber !== 'number' || argv.projectNumber <= 0) {
|
|
483
|
+
await log('ā Project number must be a positive integer', { level: 'error' });
|
|
484
|
+
await safeExit(1, 'Error occurred');
|
|
485
|
+
}
|
|
486
|
+
}
|
|
487
|
+
|
|
488
|
+
// Validate model name EARLY - this always runs regardless of --skip-tool-connection-check
|
|
489
|
+
// Model validation is a simple string check and should always be performed
|
|
490
|
+
const tool = argv.tool || 'claude';
|
|
491
|
+
await validateAndExitOnInvalidModel(argv.model, tool, safeExit);
|
|
492
|
+
|
|
493
|
+
// Validate conflicting options
|
|
494
|
+
if (argv.skipIssuesWithPrs && argv.autoContinue) {
|
|
495
|
+
await log('ā Conflicting options: --skip-issues-with-prs and --auto-continue cannot be used together', { level: 'error' });
|
|
496
|
+
await log(' --skip-issues-with-prs: Skips issues that have any open PRs', { level: 'error' });
|
|
497
|
+
await log(' --auto-continue: Continues with existing PRs instead of creating new ones', { level: 'error' });
|
|
498
|
+
await log(` š Full log file: ${absoluteLogPath}`, { level: 'error' });
|
|
499
|
+
await safeExit(1, 'Error occurred');
|
|
500
|
+
}
|
|
501
|
+
|
|
502
|
+
// Helper function to check GitHub permissions - moved to github.lib.mjs
|
|
503
|
+
|
|
504
|
+
// Check GitHub permissions early in the process (skip in dry-run mode or when explicitly requested)
|
|
505
|
+
if (argv.dryRun || argv.skipToolConnectionCheck || argv.toolConnectionCheck === false) {
|
|
506
|
+
await log('ā© Skipping GitHub permissions check (dry-run mode or skip-tool-connection-check enabled)', { verbose: true });
|
|
507
|
+
} else {
|
|
508
|
+
const hasValidAuth = await checkGitHubPermissions();
|
|
509
|
+
if (!hasValidAuth) {
|
|
510
|
+
await log('\nā Cannot proceed without valid GitHub authentication', { level: 'error' });
|
|
511
|
+
await safeExit(1, 'Error occurred');
|
|
512
|
+
}
|
|
513
|
+
}
|
|
514
|
+
|
|
515
|
+
// YouTrack configuration and validation
|
|
516
|
+
let youTrackConfig = null;
|
|
517
|
+
if (argv.youtrackMode) {
|
|
518
|
+
// Create YouTrack config from environment variables and CLI overrides
|
|
519
|
+
youTrackConfig = createYouTrackConfigFromEnv();
|
|
520
|
+
|
|
521
|
+
if (!youTrackConfig) {
|
|
522
|
+
await log('ā YouTrack mode requires environment variables to be set', { level: 'error' });
|
|
523
|
+
await log(' Required: YOUTRACK_URL, YOUTRACK_API_KEY, YOUTRACK_PROJECT_CODE, YOUTRACK_STAGE', { level: 'error' });
|
|
524
|
+
await log(' Example: YOUTRACK_URL=https://mycompany.youtrack.cloud', { level: 'error' });
|
|
525
|
+
process.exit(1);
|
|
526
|
+
}
|
|
527
|
+
|
|
528
|
+
// Apply CLI overrides
|
|
529
|
+
if (argv.youtrackStage) {
|
|
530
|
+
youTrackConfig.stage = argv.youtrackStage;
|
|
531
|
+
}
|
|
532
|
+
if (argv.youtrackProject) {
|
|
533
|
+
youTrackConfig.projectCode = argv.youtrackProject;
|
|
534
|
+
}
|
|
535
|
+
|
|
536
|
+
// Validate configuration
|
|
537
|
+
try {
|
|
538
|
+
validateYouTrackConfig(youTrackConfig);
|
|
539
|
+
} catch (error) {
|
|
540
|
+
await log(`ā YouTrack configuration error: ${error.message}`, { level: 'error' });
|
|
541
|
+
process.exit(1);
|
|
542
|
+
}
|
|
543
|
+
|
|
544
|
+
// Test YouTrack connection
|
|
545
|
+
const youTrackConnected = await testYouTrackConnection(youTrackConfig);
|
|
546
|
+
if (!youTrackConnected) {
|
|
547
|
+
await log('\nā Cannot proceed without valid YouTrack connection', { level: 'error' });
|
|
548
|
+
process.exit(1);
|
|
549
|
+
}
|
|
550
|
+
}
|
|
551
|
+
|
|
552
|
+
// Parse GitHub URL to determine organization, repository, or user
|
|
553
|
+
let scope = 'repository';
|
|
554
|
+
let owner = null;
|
|
555
|
+
let repo = null;
|
|
556
|
+
|
|
557
|
+
// NO DUPLICATE VALIDATION! URL was already validated at the beginning.
|
|
558
|
+
// If we have a URL but no validation results, that's a logic error.
|
|
559
|
+
if (githubUrl && urlMatch === null) {
|
|
560
|
+
// This should never happen - it means our early validation was skipped incorrectly
|
|
561
|
+
await log('Internal error: URL validation was not performed correctly', { level: 'error' });
|
|
562
|
+
await log('This is a bug in the script logic', { level: 'error' });
|
|
563
|
+
await safeExit(1, 'Error occurred');
|
|
564
|
+
}
|
|
565
|
+
|
|
566
|
+
if (urlMatch) {
|
|
567
|
+
owner = urlMatch[1];
|
|
568
|
+
repo = urlMatch[3] || null;
|
|
569
|
+
}
|
|
570
|
+
|
|
571
|
+
// Determine scope
|
|
572
|
+
if (!repo) {
|
|
573
|
+
// Check if it's an organization or user (skip in dry-run mode to avoid hanging)
|
|
574
|
+
if (argv.dryRun || argv.skipToolConnectionCheck || argv.toolConnectionCheck === false) {
|
|
575
|
+
// In dry-run mode, default to user to avoid GitHub API calls
|
|
576
|
+
scope = 'user';
|
|
577
|
+
await log(' ā¹ļø Assuming user scope (dry-run mode, skipping API detection)', { verbose: true });
|
|
578
|
+
} else {
|
|
579
|
+
try {
|
|
580
|
+
const typeResult = await $`gh api users/${owner} --jq .type`;
|
|
581
|
+
const accountType = typeResult.stdout.toString().trim();
|
|
582
|
+
scope = accountType === 'Organization' ? 'organization' : 'user';
|
|
583
|
+
} catch (e) {
|
|
584
|
+
reportError(e, {
|
|
585
|
+
context: 'detect_scope',
|
|
586
|
+
owner,
|
|
587
|
+
operation: 'detect_account_type'
|
|
588
|
+
});
|
|
589
|
+
// Default to user if API call fails
|
|
590
|
+
scope = 'user';
|
|
591
|
+
}
|
|
592
|
+
}
|
|
593
|
+
} else {
|
|
594
|
+
scope = 'repository';
|
|
595
|
+
}
|
|
596
|
+
|
|
597
|
+
await log('šÆ Monitoring Configuration:');
|
|
598
|
+
if (argv.youtrackMode) {
|
|
599
|
+
await log(` š Source: YouTrack - ${youTrackConfig.url}`);
|
|
600
|
+
await log(` š Project: ${youTrackConfig.projectCode}`);
|
|
601
|
+
await log(` š Stage: "${youTrackConfig.stage}"`);
|
|
602
|
+
await log(` š GitHub Target: ${scope.charAt(0).toUpperCase() + scope.slice(1)} - ${owner}${repo ? `/${repo}` : ''}`);
|
|
603
|
+
} else {
|
|
604
|
+
await log(` š Target: ${scope.charAt(0).toUpperCase() + scope.slice(1)} - ${owner}${repo ? `/${repo}` : ''}`);
|
|
605
|
+
if (argv.projectMode) {
|
|
606
|
+
await log(` š Mode: PROJECT #${argv.projectNumber} (owner: ${argv.projectOwner})`);
|
|
607
|
+
await log(` š Status: "${argv.projectStatus}"`);
|
|
608
|
+
} else if (argv.allIssues) {
|
|
609
|
+
await log(' š·ļø Mode: ALL ISSUES (no label filter)');
|
|
610
|
+
} else {
|
|
611
|
+
await log(` š·ļø Tag: "${argv.monitorTag}"`);
|
|
612
|
+
}
|
|
613
|
+
}
|
|
614
|
+
if (argv.skipIssuesWithPrs) {
|
|
615
|
+
await log(' š« Skipping: Issues with open PRs');
|
|
616
|
+
}
|
|
617
|
+
await log(` š Concurrency: ${argv.concurrency} parallel workers`);
|
|
618
|
+
await log(` š Pull Requests per Issue: ${argv.pullRequestsPerIssue}`);
|
|
619
|
+
await log(` š¤ Model: ${argv.model}`);
|
|
620
|
+
if (argv.fork) {
|
|
621
|
+
await log(' š“ Fork: ENABLED (will fork repos if no write access)');
|
|
622
|
+
}
|
|
623
|
+
if (argv.autoFork) {
|
|
624
|
+
await log(' š“ Auto-Fork: ENABLED (will auto-fork public repos without write access)');
|
|
625
|
+
}
|
|
626
|
+
if (argv.autoContinue) {
|
|
627
|
+
await log(' š Auto-Continue: ENABLED (will work on issues with existing PRs)');
|
|
628
|
+
}
|
|
629
|
+
if (argv.watch) {
|
|
630
|
+
await log(' šļø Watch Mode: ENABLED (will monitor continuously for feedback)');
|
|
631
|
+
}
|
|
632
|
+
if (argv.targetBranch) {
|
|
633
|
+
await log(` šÆ Target Branch: ${argv.targetBranch}`);
|
|
634
|
+
}
|
|
635
|
+
if (!argv.once) {
|
|
636
|
+
await log(` ā±ļø Polling Interval: ${argv.interval} seconds`);
|
|
637
|
+
}
|
|
638
|
+
await log(` ${argv.once ? 'š Mode: Single run' : 'ā¾ļø Mode: Continuous monitoring'}`);
|
|
639
|
+
if (argv.maxIssues > 0) {
|
|
640
|
+
await log(` š¢ Max Issues: ${argv.maxIssues}`);
|
|
641
|
+
}
|
|
642
|
+
if (argv.dryRun) await log(' š§Ŗ DRY RUN MODE - No actual processing');
|
|
643
|
+
if (argv.autoCleanup) await log(' š§¹ Auto-cleanup: ENABLED (will clean /tmp/* /var/tmp/* on success)');
|
|
644
|
+
if (argv.interactiveMode) await log(' š Interactive Mode: ENABLED');
|
|
645
|
+
await log('');
|
|
646
|
+
|
|
647
|
+
// Producer/Consumer Queue implementation
|
|
648
|
+
class IssueQueue {
|
|
649
|
+
constructor() {
|
|
650
|
+
this.queue = [];
|
|
651
|
+
this.processing = new Set();
|
|
652
|
+
this.completed = new Set();
|
|
653
|
+
this.failed = new Set();
|
|
654
|
+
this.workers = [];
|
|
655
|
+
this.isRunning = true;
|
|
656
|
+
}
|
|
657
|
+
|
|
658
|
+
// Add issue to queue if not already processed or in queue
|
|
659
|
+
enqueue(issueUrl) {
|
|
660
|
+
if (this.completed.has(issueUrl) ||
|
|
661
|
+
this.processing.has(issueUrl) ||
|
|
662
|
+
this.queue.includes(issueUrl)) {
|
|
663
|
+
return false;
|
|
664
|
+
}
|
|
665
|
+
this.queue.push(issueUrl);
|
|
666
|
+
return true;
|
|
667
|
+
}
|
|
668
|
+
|
|
669
|
+
// Get next issue from queue
|
|
670
|
+
dequeue() {
|
|
671
|
+
if (this.queue.length === 0) {
|
|
672
|
+
return null;
|
|
673
|
+
}
|
|
674
|
+
const issue = this.queue.shift();
|
|
675
|
+
this.processing.add(issue);
|
|
676
|
+
return issue;
|
|
677
|
+
}
|
|
678
|
+
|
|
679
|
+
// Mark issue as completed
|
|
680
|
+
markCompleted(issueUrl) {
|
|
681
|
+
this.processing.delete(issueUrl);
|
|
682
|
+
this.completed.add(issueUrl);
|
|
683
|
+
}
|
|
684
|
+
|
|
685
|
+
// Mark issue as failed
|
|
686
|
+
markFailed(issueUrl) {
|
|
687
|
+
this.processing.delete(issueUrl);
|
|
688
|
+
this.failed.add(issueUrl);
|
|
689
|
+
}
|
|
690
|
+
|
|
691
|
+
// Get queue statistics
|
|
692
|
+
getStats() {
|
|
693
|
+
return {
|
|
694
|
+
queued: this.queue.length,
|
|
695
|
+
processing: this.processing.size,
|
|
696
|
+
completed: this.completed.size,
|
|
697
|
+
failed: this.failed.size,
|
|
698
|
+
processingIssues: Array.from(this.processing)
|
|
699
|
+
};
|
|
700
|
+
}
|
|
701
|
+
|
|
702
|
+
// Stop all workers
|
|
703
|
+
stop() {
|
|
704
|
+
this.isRunning = false;
|
|
705
|
+
}
|
|
706
|
+
}
|
|
707
|
+
|
|
708
|
+
// Create global queue instance
|
|
709
|
+
const issueQueue = new IssueQueue();
|
|
710
|
+
|
|
711
|
+
// Global shutdown state to prevent duplicate shutdown messages
|
|
712
|
+
let isShuttingDown = false;
|
|
713
|
+
|
|
714
|
+
// Worker function to process issues from queue
|
|
715
|
+
async function worker(workerId) {
|
|
716
|
+
await log(`š§ Worker ${workerId} started`, { verbose: true });
|
|
717
|
+
|
|
718
|
+
while (issueQueue.isRunning) {
|
|
719
|
+
const issueUrl = issueQueue.dequeue();
|
|
720
|
+
|
|
721
|
+
if (!issueUrl) {
|
|
722
|
+
// No work available, wait a bit
|
|
723
|
+
await new Promise(resolve => setTimeout(resolve, 5000));
|
|
724
|
+
continue;
|
|
725
|
+
}
|
|
726
|
+
|
|
727
|
+
await log(`\nš· Worker ${workerId} processing: ${issueUrl}`);
|
|
728
|
+
|
|
729
|
+
// Track if this issue failed
|
|
730
|
+
let issueFailed = false;
|
|
731
|
+
|
|
732
|
+
// Process the issue multiple times if needed
|
|
733
|
+
for (let prNum = 1; prNum <= argv.pullRequestsPerIssue; prNum++) {
|
|
734
|
+
if (argv.pullRequestsPerIssue > 1) {
|
|
735
|
+
await log(` š Creating PR ${prNum}/${argv.pullRequestsPerIssue} for issue`);
|
|
736
|
+
}
|
|
737
|
+
|
|
738
|
+
try {
|
|
739
|
+
// Execute solve command using spawn to enable real-time streaming while avoiding command-stream quoting issues
|
|
740
|
+
if (argv.dryRun) {
|
|
741
|
+
await log(` š§Ŗ [DRY RUN] Executing ${solveCommand} in dry-run mode for ${issueUrl}...`);
|
|
742
|
+
} else {
|
|
743
|
+
await log(` š Executing ${solveCommand} for ${issueUrl}...`);
|
|
744
|
+
}
|
|
745
|
+
|
|
746
|
+
const startTime = Date.now();
|
|
747
|
+
const forkFlag = argv.fork ? ' --fork' : '';
|
|
748
|
+
const autoForkFlag = argv.autoFork ? ' --auto-fork' : '';
|
|
749
|
+
const verboseFlag = argv.verbose ? ' --verbose' : '';
|
|
750
|
+
const attachLogsFlag = argv.attachLogs ? ' --attach-logs' : '';
|
|
751
|
+
const targetBranchFlag = argv.targetBranch ? ` --target-branch ${argv.targetBranch}` : '';
|
|
752
|
+
const logDirFlag = argv.logDir ? ` --log-dir "${argv.logDir}"` : '';
|
|
753
|
+
const dryRunFlag = argv.dryRun ? ' --dry-run' : '';
|
|
754
|
+
const skipToolConnectionCheckFlag = (argv.skipToolConnectionCheck || argv.toolConnectionCheck === false) ? ' --skip-tool-connection-check' : '';
|
|
755
|
+
const toolFlag = argv.tool ? ` --tool ${argv.tool}` : '';
|
|
756
|
+
const autoContinueFlag = argv.autoContinue ? ' --auto-continue' : '';
|
|
757
|
+
const thinkFlag = argv.think ? ` --think ${argv.think}` : '';
|
|
758
|
+
const noSentryFlag = !argv.sentry ? ' --no-sentry' : '';
|
|
759
|
+
const watchFlag = argv.watch ? ' --watch' : '';
|
|
760
|
+
const prefixForkNameWithOwnerNameFlag = argv.prefixForkNameWithOwnerName ? ' --prefix-fork-name-with-owner-name' : '';
|
|
761
|
+
const interactiveModeFlag = argv.interactiveMode ? ' --interactive-mode' : '';
|
|
762
|
+
const promptExploreSubAgentFlag = argv.promptExploreSubAgent ? ' --prompt-explore-sub-agent' : '';
|
|
763
|
+
|
|
764
|
+
// Use spawn to get real-time streaming output while avoiding command-stream's automatic quote addition
|
|
765
|
+
const { spawn } = await import('child_process');
|
|
766
|
+
|
|
767
|
+
// Build arguments array to avoid shell parsing issues
|
|
768
|
+
const args = [issueUrl, '--model', argv.model];
|
|
769
|
+
if (argv.tool) {
|
|
770
|
+
args.push('--tool', argv.tool);
|
|
771
|
+
}
|
|
772
|
+
if (argv.fork) {
|
|
773
|
+
args.push('--fork');
|
|
774
|
+
}
|
|
775
|
+
if (argv.autoFork) {
|
|
776
|
+
args.push('--auto-fork');
|
|
777
|
+
}
|
|
778
|
+
if (argv.verbose) {
|
|
779
|
+
args.push('--verbose');
|
|
780
|
+
}
|
|
781
|
+
if (argv.attachLogs) {
|
|
782
|
+
args.push('--attach-logs');
|
|
783
|
+
}
|
|
784
|
+
if (argv.targetBranch) {
|
|
785
|
+
args.push('--target-branch', argv.targetBranch);
|
|
786
|
+
}
|
|
787
|
+
if (argv.logDir) {
|
|
788
|
+
args.push('--log-dir', argv.logDir);
|
|
789
|
+
}
|
|
790
|
+
if (argv.dryRun) {
|
|
791
|
+
args.push('--dry-run');
|
|
792
|
+
}
|
|
793
|
+
if (argv.skipToolConnectionCheck || argv.toolConnectionCheck === false) {
|
|
794
|
+
args.push('--skip-tool-connection-check');
|
|
795
|
+
}
|
|
796
|
+
if (argv.autoContinue) {
|
|
797
|
+
args.push('--auto-continue');
|
|
798
|
+
}
|
|
799
|
+
if (argv.think) {
|
|
800
|
+
args.push('--think', argv.think);
|
|
801
|
+
}
|
|
802
|
+
if (!argv.sentry) {
|
|
803
|
+
args.push('--no-sentry');
|
|
804
|
+
}
|
|
805
|
+
if (argv.watch) args.push('--watch');
|
|
806
|
+
if (argv.prefixForkNameWithOwnerName) args.push('--prefix-fork-name-with-owner-name');
|
|
807
|
+
if (argv.interactiveMode) args.push('--interactive-mode');
|
|
808
|
+
if (argv.promptExploreSubAgent) args.push('--prompt-explore-sub-agent');
|
|
809
|
+
|
|
810
|
+
// Log the actual command being executed so users can investigate/reproduce
|
|
811
|
+
const command = `${solveCommand} "${issueUrl}" --model ${argv.model}${toolFlag}${forkFlag}${autoForkFlag}${verboseFlag}${attachLogsFlag}${targetBranchFlag}${logDirFlag}${dryRunFlag}${skipToolConnectionCheckFlag}${autoContinueFlag}${thinkFlag}${noSentryFlag}${watchFlag}${prefixForkNameWithOwnerNameFlag}${interactiveModeFlag}${promptExploreSubAgentFlag}`;
|
|
812
|
+
await log(` š Command: ${command}`);
|
|
813
|
+
|
|
814
|
+
let exitCode = 0;
|
|
815
|
+
|
|
816
|
+
// Create promise to handle async spawn process
|
|
817
|
+
await new Promise((resolve) => {
|
|
818
|
+
const child = spawn(solveCommand, args, {
|
|
819
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
820
|
+
env: process.env
|
|
821
|
+
});
|
|
822
|
+
|
|
823
|
+
// Handle stdout data - stream output in real-time
|
|
824
|
+
child.stdout.on('data', (data) => {
|
|
825
|
+
const lines = data.toString().split('\n');
|
|
826
|
+
for (const line of lines) {
|
|
827
|
+
if (line.trim()) {
|
|
828
|
+
log(` [${solveCommand} worker-${workerId}] ${line}`).catch((logError) => {
|
|
829
|
+
reportError(logError, {
|
|
830
|
+
context: 'worker_stdout_log',
|
|
831
|
+
workerId,
|
|
832
|
+
operation: 'log_output'
|
|
833
|
+
});
|
|
834
|
+
});
|
|
835
|
+
}
|
|
836
|
+
}
|
|
837
|
+
});
|
|
838
|
+
|
|
839
|
+
// Handle stderr data - stream errors in real-time
|
|
840
|
+
child.stderr.on('data', (data) => {
|
|
841
|
+
const lines = data.toString().split('\n');
|
|
842
|
+
for (const line of lines) {
|
|
843
|
+
if (line.trim()) {
|
|
844
|
+
log(` [${solveCommand} worker-${workerId} ERROR] ${line}`, { level: 'error' }).catch((logError) => {
|
|
845
|
+
reportError(logError, {
|
|
846
|
+
context: 'worker_stderr_log',
|
|
847
|
+
workerId,
|
|
848
|
+
operation: 'log_error'
|
|
849
|
+
});
|
|
850
|
+
});
|
|
851
|
+
}
|
|
852
|
+
}
|
|
853
|
+
});
|
|
854
|
+
|
|
855
|
+
// Handle process completion
|
|
856
|
+
child.on('close', (code) => {
|
|
857
|
+
exitCode = code || 0;
|
|
858
|
+
resolve();
|
|
859
|
+
});
|
|
860
|
+
|
|
861
|
+
// Handle process errors
|
|
862
|
+
child.on('error', (error) => {
|
|
863
|
+
exitCode = 1;
|
|
864
|
+
log(` [${solveCommand} worker-${workerId} ERROR] Process error: ${error.message}`, { level: 'error' }).catch((logError) => {
|
|
865
|
+
reportError(logError, {
|
|
866
|
+
context: 'worker_process_error_log',
|
|
867
|
+
workerId,
|
|
868
|
+
operation: 'log_process_error'
|
|
869
|
+
});
|
|
870
|
+
});
|
|
871
|
+
resolve();
|
|
872
|
+
});
|
|
873
|
+
});
|
|
874
|
+
|
|
875
|
+
const duration = Math.round((Date.now() - startTime) / 1000);
|
|
876
|
+
|
|
877
|
+
if (exitCode === 0) {
|
|
878
|
+
await log(` ā
Worker ${workerId} completed ${issueUrl} (${duration}s)`);
|
|
879
|
+
} else {
|
|
880
|
+
throw new Error(`${solveCommand} exited with code ${exitCode}`);
|
|
881
|
+
}
|
|
882
|
+
|
|
883
|
+
// Small delay between multiple PRs for same issue
|
|
884
|
+
if (prNum < argv.pullRequestsPerIssue) {
|
|
885
|
+
await new Promise(resolve => setTimeout(resolve, 10000));
|
|
886
|
+
}
|
|
887
|
+
} catch (error) {
|
|
888
|
+
reportError(error, {
|
|
889
|
+
context: 'worker_process_issue',
|
|
890
|
+
workerId,
|
|
891
|
+
issueUrl,
|
|
892
|
+
operation: 'spawn_solve_worker'
|
|
893
|
+
});
|
|
894
|
+
await log(` ā Worker ${workerId} failed on ${issueUrl}: ${cleanErrorMessage(error)}`, { level: 'error' });
|
|
895
|
+
issueQueue.markFailed(issueUrl);
|
|
896
|
+
issueFailed = true;
|
|
897
|
+
break; // Stop trying more PRs for this issue
|
|
898
|
+
}
|
|
899
|
+
}
|
|
900
|
+
|
|
901
|
+
// Only mark as completed if it didn't fail
|
|
902
|
+
if (!issueFailed) {
|
|
903
|
+
issueQueue.markCompleted(issueUrl);
|
|
904
|
+
}
|
|
905
|
+
|
|
906
|
+
// Show queue stats
|
|
907
|
+
const stats = issueQueue.getStats();
|
|
908
|
+
await log(` š Queue: ${stats.queued} waiting, ${stats.processing} processing, ${stats.completed} completed, ${stats.failed} failed`);
|
|
909
|
+
await log(` š Hive log file: ${absoluteLogPath}`);
|
|
910
|
+
|
|
911
|
+
// Show which issues are currently being processed
|
|
912
|
+
if (stats.processingIssues && stats.processingIssues.length > 0) {
|
|
913
|
+
await log(' š§ Currently processing solve commands:');
|
|
914
|
+
for (const issueUrl of stats.processingIssues) {
|
|
915
|
+
await log(` - ${issueUrl}`);
|
|
916
|
+
}
|
|
917
|
+
}
|
|
918
|
+
}
|
|
919
|
+
|
|
920
|
+
await log(`š§ Worker ${workerId} stopped`, { verbose: true });
|
|
921
|
+
}
|
|
922
|
+
|
|
923
|
+
// Function to check if an issue has open pull requests
|
|
924
|
+
// Note: hasOpenPullRequests function has been replaced by batchCheckPullRequestsForIssues
|
|
925
|
+
// in github.lib.mjs for better performance and reduced API calls
|
|
926
|
+
|
|
927
|
+
// Function to fetch issues from GitHub
|
|
928
|
+
async function fetchIssues() {
|
|
929
|
+
if (argv.youtrackMode) {
|
|
930
|
+
await log(`\nš Fetching issues from YouTrack project ${youTrackConfig.projectCode} (stage: "${youTrackConfig.stage}")...`);
|
|
931
|
+
} else if (argv.projectMode) {
|
|
932
|
+
await log(`\nš Fetching issues from GitHub Project #${argv.projectNumber} (status: "${argv.projectStatus}")...`);
|
|
933
|
+
} else if (argv.allIssues) {
|
|
934
|
+
await log('\nš Fetching ALL open issues...');
|
|
935
|
+
} else {
|
|
936
|
+
await log(`\nš Fetching issues with label "${argv.monitorTag}"...`);
|
|
937
|
+
}
|
|
938
|
+
|
|
939
|
+
// In dry-run mode, skip actual API calls and return empty list immediately
|
|
940
|
+
if (argv.dryRun) {
|
|
941
|
+
await log(' š§Ŗ Dry-run mode: Skipping actual issue fetching');
|
|
942
|
+
return [];
|
|
943
|
+
}
|
|
944
|
+
|
|
945
|
+
try {
|
|
946
|
+
let issues = [];
|
|
947
|
+
|
|
948
|
+
if (argv.youtrackMode) {
|
|
949
|
+
// Sync YouTrack issues to GitHub
|
|
950
|
+
if (!owner || !repo) {
|
|
951
|
+
throw new Error('YouTrack mode requires a specific repository URL (not organization/user)');
|
|
952
|
+
}
|
|
953
|
+
|
|
954
|
+
const githubIssues = await syncYouTrackToGitHub(youTrackConfig, owner, repo, $, log);
|
|
955
|
+
|
|
956
|
+
// Convert to format expected by hive
|
|
957
|
+
issues = formatIssuesForHive(githubIssues).map(issue => ({
|
|
958
|
+
url: issue.html_url,
|
|
959
|
+
title: issue.title,
|
|
960
|
+
number: issue.number
|
|
961
|
+
}));
|
|
962
|
+
|
|
963
|
+
} else if (argv.projectMode) {
|
|
964
|
+
// Use GitHub Projects v2 mode
|
|
965
|
+
if (!argv.projectNumber || !argv.projectOwner) {
|
|
966
|
+
throw new Error('Project mode requires --project-number and --project-owner');
|
|
967
|
+
}
|
|
968
|
+
|
|
969
|
+
issues = await fetchProjectIssues(argv.projectNumber, argv.projectOwner, argv.projectStatus);
|
|
970
|
+
|
|
971
|
+
} else if (argv.allIssues) {
|
|
972
|
+
// Fetch all open issues without label filter using pagination
|
|
973
|
+
let searchCmd;
|
|
974
|
+
if (scope === 'repository') {
|
|
975
|
+
searchCmd = `gh issue list --repo ${owner}/${repo} --state open --json url,title,number,createdAt`;
|
|
976
|
+
} else if (scope === 'organization') {
|
|
977
|
+
searchCmd = `gh search issues org:${owner} is:open --json url,title,number,createdAt,repository`;
|
|
978
|
+
} else {
|
|
979
|
+
// User scope
|
|
980
|
+
searchCmd = `gh search issues user:${owner} is:open --json url,title,number,createdAt,repository`;
|
|
981
|
+
}
|
|
982
|
+
|
|
983
|
+
await log(' š Fetching all issues with pagination and rate limiting...');
|
|
984
|
+
await log(` š Command: ${searchCmd}`, { verbose: true });
|
|
985
|
+
|
|
986
|
+
try {
|
|
987
|
+
issues = await fetchAllIssuesWithPagination(searchCmd);
|
|
988
|
+
} catch (searchError) {
|
|
989
|
+
reportError(searchError, {
|
|
990
|
+
context: 'github_all_issues_search',
|
|
991
|
+
scope,
|
|
992
|
+
owner,
|
|
993
|
+
operation: 'search_all_issues'
|
|
994
|
+
});
|
|
995
|
+
await log(` ā ļø Search failed: ${cleanErrorMessage(searchError)}`, { verbose: true });
|
|
996
|
+
|
|
997
|
+
// Check if the error is due to rate limiting or search API limit and we're not in repository scope
|
|
998
|
+
const errorMsg = searchError.message || searchError.toString();
|
|
999
|
+
const isSearchLimitError = errorMsg.includes('Hit search API limit') || errorMsg.includes('repository-by-repository fallback');
|
|
1000
|
+
if ((isRateLimitError(searchError) || isSearchLimitError) && scope !== 'repository') {
|
|
1001
|
+
await log(' š Search limit detected - attempting repository fallback...');
|
|
1002
|
+
try {
|
|
1003
|
+
issues = await fetchIssuesFromRepositories(owner, scope, null, true);
|
|
1004
|
+
} catch (fallbackError) {
|
|
1005
|
+
reportError(fallbackError, {
|
|
1006
|
+
context: 'github_all_issues_fallback',
|
|
1007
|
+
scope,
|
|
1008
|
+
owner,
|
|
1009
|
+
operation: 'fallback_all_fetch'
|
|
1010
|
+
});
|
|
1011
|
+
await log(` ā Repository fallback failed: ${cleanErrorMessage(fallbackError)}`, { verbose: true });
|
|
1012
|
+
issues = [];
|
|
1013
|
+
}
|
|
1014
|
+
} else {
|
|
1015
|
+
issues = [];
|
|
1016
|
+
}
|
|
1017
|
+
}
|
|
1018
|
+
|
|
1019
|
+
} else {
|
|
1020
|
+
// Use label filter
|
|
1021
|
+
// execSync is used within fetchAllIssuesWithPagination
|
|
1022
|
+
|
|
1023
|
+
// For repositories, use gh issue list which works better with new repos
|
|
1024
|
+
if (scope === 'repository') {
|
|
1025
|
+
const listCmd = `gh issue list --repo ${owner}/${repo} --state open --label "${argv.monitorTag}" --json url,title,number,createdAt`;
|
|
1026
|
+
await log(' š Fetching labeled issues with pagination and rate limiting...');
|
|
1027
|
+
await log(` š Command: ${listCmd}`, { verbose: true });
|
|
1028
|
+
|
|
1029
|
+
try {
|
|
1030
|
+
issues = await fetchAllIssuesWithPagination(listCmd);
|
|
1031
|
+
} catch (listError) {
|
|
1032
|
+
reportError(listError, {
|
|
1033
|
+
context: 'github_list_issues',
|
|
1034
|
+
scope,
|
|
1035
|
+
owner,
|
|
1036
|
+
monitorTag: argv.monitorTag,
|
|
1037
|
+
operation: 'list_repository_issues'
|
|
1038
|
+
});
|
|
1039
|
+
await log(` ā ļø List failed: ${cleanErrorMessage(listError)}`, { verbose: true });
|
|
1040
|
+
issues = [];
|
|
1041
|
+
}
|
|
1042
|
+
} else {
|
|
1043
|
+
// For organizations and users, use search (may not work with new repos)
|
|
1044
|
+
let baseQuery;
|
|
1045
|
+
if (scope === 'organization') {
|
|
1046
|
+
baseQuery = `org:${owner} is:issue is:open`;
|
|
1047
|
+
} else {
|
|
1048
|
+
baseQuery = `user:${owner} is:issue is:open`;
|
|
1049
|
+
}
|
|
1050
|
+
|
|
1051
|
+
// Handle label with potential spaces
|
|
1052
|
+
let searchQuery;
|
|
1053
|
+
let searchCmd;
|
|
1054
|
+
|
|
1055
|
+
if (argv.monitorTag.includes(' ')) {
|
|
1056
|
+
searchQuery = `${baseQuery} label:"${argv.monitorTag}"`;
|
|
1057
|
+
searchCmd = `gh search issues '${searchQuery}' --json url,title,number,createdAt,repository`;
|
|
1058
|
+
} else {
|
|
1059
|
+
searchQuery = `${baseQuery} label:${argv.monitorTag}`;
|
|
1060
|
+
searchCmd = `gh search issues '${searchQuery}' --json url,title,number,createdAt,repository`;
|
|
1061
|
+
}
|
|
1062
|
+
|
|
1063
|
+
await log(' š Fetching labeled issues with pagination and rate limiting...');
|
|
1064
|
+
await log(` š Search query: ${searchQuery}`, { verbose: true });
|
|
1065
|
+
await log(` š Command: ${searchCmd}`, { verbose: true });
|
|
1066
|
+
|
|
1067
|
+
try {
|
|
1068
|
+
issues = await fetchAllIssuesWithPagination(searchCmd);
|
|
1069
|
+
} catch (searchError) {
|
|
1070
|
+
reportError(searchError, {
|
|
1071
|
+
context: 'github_labeled_issues_search',
|
|
1072
|
+
scope,
|
|
1073
|
+
owner,
|
|
1074
|
+
monitorTag: argv.monitorTag,
|
|
1075
|
+
operation: 'search_labeled_issues'
|
|
1076
|
+
});
|
|
1077
|
+
await log(` ā ļø Search failed: ${cleanErrorMessage(searchError)}`, { verbose: true });
|
|
1078
|
+
|
|
1079
|
+
// Check if the error is due to rate limiting or search API limit
|
|
1080
|
+
const errorMsg = searchError.message || searchError.toString();
|
|
1081
|
+
const isSearchLimitError = errorMsg.includes('Hit search API limit') || errorMsg.includes('repository-by-repository fallback');
|
|
1082
|
+
if (isRateLimitError(searchError) || isSearchLimitError) {
|
|
1083
|
+
await log(' š Search limit detected - attempting repository fallback...');
|
|
1084
|
+
try {
|
|
1085
|
+
issues = await fetchIssuesFromRepositories(owner, scope, argv.monitorTag, false);
|
|
1086
|
+
} catch (fallbackError) {
|
|
1087
|
+
reportError(fallbackError, {
|
|
1088
|
+
context: 'github_labeled_issues_fallback',
|
|
1089
|
+
scope,
|
|
1090
|
+
owner,
|
|
1091
|
+
monitorTag: argv.monitorTag,
|
|
1092
|
+
operation: 'fallback_labeled_fetch'
|
|
1093
|
+
});
|
|
1094
|
+
await log(` ā Repository fallback failed: ${cleanErrorMessage(fallbackError)}`, { verbose: true });
|
|
1095
|
+
issues = [];
|
|
1096
|
+
}
|
|
1097
|
+
} else {
|
|
1098
|
+
issues = [];
|
|
1099
|
+
}
|
|
1100
|
+
}
|
|
1101
|
+
}
|
|
1102
|
+
}
|
|
1103
|
+
|
|
1104
|
+
if (issues.length === 0) {
|
|
1105
|
+
if (argv.youtrackMode) {
|
|
1106
|
+
await log(` ā¹ļø No issues found in YouTrack with stage "${youTrackConfig.stage}"`);
|
|
1107
|
+
} else if (argv.projectMode) {
|
|
1108
|
+
await log(` ā¹ļø No issues found in project with status "${argv.projectStatus}"`);
|
|
1109
|
+
} else if (argv.allIssues) {
|
|
1110
|
+
await log(' ā¹ļø No open issues found');
|
|
1111
|
+
} else {
|
|
1112
|
+
await log(` ā¹ļø No issues found with label "${argv.monitorTag}"`);
|
|
1113
|
+
}
|
|
1114
|
+
return [];
|
|
1115
|
+
}
|
|
1116
|
+
|
|
1117
|
+
if (argv.youtrackMode) {
|
|
1118
|
+
await log(` š Found ${issues.length} YouTrack issue(s) with stage "${youTrackConfig.stage}"`);
|
|
1119
|
+
} else if (argv.projectMode) {
|
|
1120
|
+
await log(` š Found ${issues.length} issue(s) with status "${argv.projectStatus}"`);
|
|
1121
|
+
} else if (argv.allIssues) {
|
|
1122
|
+
await log(` š Found ${issues.length} open issue(s)`);
|
|
1123
|
+
} else {
|
|
1124
|
+
await log(` š Found ${issues.length} issue(s) with label "${argv.monitorTag}"`);
|
|
1125
|
+
}
|
|
1126
|
+
|
|
1127
|
+
// Sort issues by publication date (createdAt) based on issue-order option
|
|
1128
|
+
if (issues.length > 0 && issues[0].createdAt) {
|
|
1129
|
+
await log(` š Sorting issues by publication date (${argv.issueOrder === 'asc' ? 'oldest first' : 'newest first'})...`);
|
|
1130
|
+
issues.sort((a, b) => {
|
|
1131
|
+
const dateA = new Date(a.createdAt);
|
|
1132
|
+
const dateB = new Date(b.createdAt);
|
|
1133
|
+
return argv.issueOrder === 'asc' ? dateA - dateB : dateB - dateA;
|
|
1134
|
+
});
|
|
1135
|
+
await log(' ā
Issues sorted by publication date');
|
|
1136
|
+
}
|
|
1137
|
+
|
|
1138
|
+
// Filter out issues from archived repositories
|
|
1139
|
+
// This is critical because we cannot do write operations on archived repositories
|
|
1140
|
+
let issuesToProcess = issues;
|
|
1141
|
+
|
|
1142
|
+
// Helper function to extract repository info from issue (API response or URL)
|
|
1143
|
+
const getRepoInfo = (issue) => {
|
|
1144
|
+
let repoName = issue.repository?.name;
|
|
1145
|
+
let repoOwner = issue.repository?.owner?.login || issue.repository?.nameWithOwner?.split('/')[0];
|
|
1146
|
+
|
|
1147
|
+
// If repository info is not available, extract it from the issue URL
|
|
1148
|
+
if (!repoName || !repoOwner) {
|
|
1149
|
+
const urlMatch = issue.url?.match(/github\.com\/([^/]+)\/([^/]+)\/issues\/\d+/);
|
|
1150
|
+
if (urlMatch) {
|
|
1151
|
+
repoOwner = urlMatch[1];
|
|
1152
|
+
repoName = urlMatch[2];
|
|
1153
|
+
}
|
|
1154
|
+
}
|
|
1155
|
+
|
|
1156
|
+
return { repoOwner, repoName };
|
|
1157
|
+
};
|
|
1158
|
+
|
|
1159
|
+
// Only filter for organization/user scopes
|
|
1160
|
+
// For repository scope, we're already working on a specific repo
|
|
1161
|
+
if (scope !== 'repository' && issues.length > 0) {
|
|
1162
|
+
await log(' š Checking for archived repositories...');
|
|
1163
|
+
|
|
1164
|
+
// Extract unique repositories from issues
|
|
1165
|
+
const uniqueRepos = new Map();
|
|
1166
|
+
for (const issue of issues) {
|
|
1167
|
+
const { repoOwner, repoName } = getRepoInfo(issue);
|
|
1168
|
+
if (repoOwner && repoName) {
|
|
1169
|
+
const repoKey = `${repoOwner}/${repoName}`;
|
|
1170
|
+
if (!uniqueRepos.has(repoKey)) {
|
|
1171
|
+
uniqueRepos.set(repoKey, { owner: repoOwner, name: repoName });
|
|
1172
|
+
}
|
|
1173
|
+
}
|
|
1174
|
+
}
|
|
1175
|
+
|
|
1176
|
+
// Batch check archived status for all repositories
|
|
1177
|
+
const archivedStatusMap = await batchCheckArchivedRepositories(Array.from(uniqueRepos.values()));
|
|
1178
|
+
|
|
1179
|
+
// Filter out issues from archived repositories
|
|
1180
|
+
const filteredIssues = [];
|
|
1181
|
+
let archivedIssuesCount = 0;
|
|
1182
|
+
|
|
1183
|
+
for (const issue of issues) {
|
|
1184
|
+
const { repoOwner, repoName } = getRepoInfo(issue);
|
|
1185
|
+
|
|
1186
|
+
if (repoOwner && repoName) {
|
|
1187
|
+
const repoKey = `${repoOwner}/${repoName}`;
|
|
1188
|
+
|
|
1189
|
+
if (archivedStatusMap[repoKey] === true) {
|
|
1190
|
+
await log(` āļø Skipping (archived repository): ${issue.title || 'Untitled'} (${issue.url})`, { verbose: true });
|
|
1191
|
+
archivedIssuesCount++;
|
|
1192
|
+
} else {
|
|
1193
|
+
filteredIssues.push(issue);
|
|
1194
|
+
}
|
|
1195
|
+
} else {
|
|
1196
|
+
// If we can't determine repository, include the issue to be safe
|
|
1197
|
+
await log(` ā ļø Could not determine repository for issue: ${issue.url}`, { verbose: true });
|
|
1198
|
+
filteredIssues.push(issue);
|
|
1199
|
+
}
|
|
1200
|
+
}
|
|
1201
|
+
|
|
1202
|
+
if (archivedIssuesCount > 0) {
|
|
1203
|
+
await log(` āļø Skipped ${archivedIssuesCount} issue(s) from archived repositories`);
|
|
1204
|
+
}
|
|
1205
|
+
|
|
1206
|
+
issuesToProcess = filteredIssues;
|
|
1207
|
+
}
|
|
1208
|
+
|
|
1209
|
+
// Filter out issues with open PRs if option is enabled
|
|
1210
|
+
if (argv.skipIssuesWithPrs) {
|
|
1211
|
+
await log(' š Checking for existing pull requests using batch GraphQL query...');
|
|
1212
|
+
|
|
1213
|
+
// Extract issue numbers and repository info from URLs
|
|
1214
|
+
const issuesByRepo = {};
|
|
1215
|
+
for (const issue of issuesToProcess) {
|
|
1216
|
+
const urlMatch = issue.url.match(/github\.com\/([^/]+)\/([^/]+)\/issues\/(\d+)/);
|
|
1217
|
+
if (urlMatch) {
|
|
1218
|
+
const [, issueOwner, issueRepo, issueNumber] = urlMatch;
|
|
1219
|
+
const repoKey = `${issueOwner}/${issueRepo}`;
|
|
1220
|
+
|
|
1221
|
+
if (!issuesByRepo[repoKey]) {
|
|
1222
|
+
issuesByRepo[repoKey] = {
|
|
1223
|
+
owner: issueOwner,
|
|
1224
|
+
repo: issueRepo,
|
|
1225
|
+
issues: []
|
|
1226
|
+
};
|
|
1227
|
+
}
|
|
1228
|
+
|
|
1229
|
+
issuesByRepo[repoKey].issues.push({
|
|
1230
|
+
number: parseInt(issueNumber),
|
|
1231
|
+
issue: issue
|
|
1232
|
+
});
|
|
1233
|
+
}
|
|
1234
|
+
}
|
|
1235
|
+
|
|
1236
|
+
// Batch check PRs for each repository
|
|
1237
|
+
const filteredIssues = [];
|
|
1238
|
+
let totalSkipped = 0;
|
|
1239
|
+
|
|
1240
|
+
for (const repoData of Object.values(issuesByRepo)) {
|
|
1241
|
+
const issueNumbers = repoData.issues.map(i => i.number);
|
|
1242
|
+
const prResults = await batchCheckPullRequestsForIssues(repoData.owner, repoData.repo, issueNumbers);
|
|
1243
|
+
|
|
1244
|
+
// Process results
|
|
1245
|
+
for (const issueData of repoData.issues) {
|
|
1246
|
+
const prInfo = prResults[issueData.number];
|
|
1247
|
+
if (prInfo && prInfo.openPRCount > 0) {
|
|
1248
|
+
await log(` āļø Skipping (has ${prInfo.openPRCount} PR${prInfo.openPRCount > 1 ? 's' : ''}): ${issueData.issue.title || 'Untitled'} (${issueData.issue.url})`, { verbose: true });
|
|
1249
|
+
totalSkipped++;
|
|
1250
|
+
} else {
|
|
1251
|
+
filteredIssues.push(issueData.issue);
|
|
1252
|
+
}
|
|
1253
|
+
}
|
|
1254
|
+
}
|
|
1255
|
+
|
|
1256
|
+
if (totalSkipped > 0) {
|
|
1257
|
+
await log(` āļø Skipped ${totalSkipped} issue(s) with existing pull requests`);
|
|
1258
|
+
}
|
|
1259
|
+
issuesToProcess = filteredIssues;
|
|
1260
|
+
}
|
|
1261
|
+
|
|
1262
|
+
// Apply max issues limit if set (after filtering to exclude skipped issues from count)
|
|
1263
|
+
if (argv.maxIssues > 0 && issuesToProcess.length > argv.maxIssues) {
|
|
1264
|
+
issuesToProcess = issuesToProcess.slice(0, argv.maxIssues);
|
|
1265
|
+
await log(` š¢ Limiting to first ${argv.maxIssues} issues (after filtering)`);
|
|
1266
|
+
}
|
|
1267
|
+
|
|
1268
|
+
// In dry-run mode, show the issues that would be processed
|
|
1269
|
+
if (argv.dryRun && issuesToProcess.length > 0) {
|
|
1270
|
+
await log('\n š Issues that would be processed:');
|
|
1271
|
+
for (const issue of issuesToProcess) {
|
|
1272
|
+
await log(` - ${issue.title || 'Untitled'} (${issue.url})`);
|
|
1273
|
+
}
|
|
1274
|
+
}
|
|
1275
|
+
|
|
1276
|
+
return issuesToProcess.map(issue => issue.url);
|
|
1277
|
+
|
|
1278
|
+
} catch (error) {
|
|
1279
|
+
reportError(error, {
|
|
1280
|
+
context: 'fetchIssues',
|
|
1281
|
+
projectMode: argv.projectMode,
|
|
1282
|
+
allIssues: argv.allIssues,
|
|
1283
|
+
monitorTag: argv.monitorTag,
|
|
1284
|
+
operation: 'fetch_issues'
|
|
1285
|
+
});
|
|
1286
|
+
await log(` ā Error fetching issues: ${cleanErrorMessage(error)}`, { level: 'error' });
|
|
1287
|
+
return [];
|
|
1288
|
+
}
|
|
1289
|
+
}
|
|
1290
|
+
|
|
1291
|
+
// Main monitoring loop
|
|
1292
|
+
async function monitor() {
|
|
1293
|
+
await log('\nš Starting Hive Mind monitoring system...');
|
|
1294
|
+
|
|
1295
|
+
// Start workers
|
|
1296
|
+
await log(`\nš· Starting ${argv.concurrency} workers...`);
|
|
1297
|
+
for (let i = 1; i <= argv.concurrency; i++) {
|
|
1298
|
+
issueQueue.workers.push(worker(i));
|
|
1299
|
+
}
|
|
1300
|
+
|
|
1301
|
+
// Main monitoring loop
|
|
1302
|
+
let iteration = 0;
|
|
1303
|
+
while (true) {
|
|
1304
|
+
iteration++;
|
|
1305
|
+
await log(`\nš Monitoring iteration ${iteration} at ${new Date().toISOString()}`);
|
|
1306
|
+
|
|
1307
|
+
// Fetch issues
|
|
1308
|
+
const issueUrls = await fetchIssues();
|
|
1309
|
+
|
|
1310
|
+
// Add new issues to queue
|
|
1311
|
+
let newIssues = 0;
|
|
1312
|
+
for (const url of issueUrls) {
|
|
1313
|
+
if (issueQueue.enqueue(url)) {
|
|
1314
|
+
newIssues++;
|
|
1315
|
+
await log(` ā Added to queue: ${url}`);
|
|
1316
|
+
}
|
|
1317
|
+
}
|
|
1318
|
+
|
|
1319
|
+
if (newIssues > 0) {
|
|
1320
|
+
await log(` š„ Added ${newIssues} new issue(s) to queue`);
|
|
1321
|
+
} else {
|
|
1322
|
+
await log(' ā¹ļø No new issues to add (all already processed or in queue)');
|
|
1323
|
+
}
|
|
1324
|
+
|
|
1325
|
+
// Show current stats
|
|
1326
|
+
const stats = issueQueue.getStats();
|
|
1327
|
+
await log('\nš Current Status:');
|
|
1328
|
+
await log(` š Queued: ${stats.queued}`);
|
|
1329
|
+
await log(` āļø Processing: ${stats.processing}`);
|
|
1330
|
+
await log(` ā
Completed: ${stats.completed}`);
|
|
1331
|
+
await log(` ā Failed: ${stats.failed}`);
|
|
1332
|
+
await log(` š Hive log file: ${absoluteLogPath}`);
|
|
1333
|
+
|
|
1334
|
+
// Show which issues are currently being processed
|
|
1335
|
+
if (stats.processingIssues && stats.processingIssues.length > 0) {
|
|
1336
|
+
await log(' š§ Currently processing solve commands:');
|
|
1337
|
+
for (const issueUrl of stats.processingIssues) {
|
|
1338
|
+
await log(` - ${issueUrl}`);
|
|
1339
|
+
}
|
|
1340
|
+
}
|
|
1341
|
+
|
|
1342
|
+
// If running once, wait for queue to empty then exit
|
|
1343
|
+
if (argv.once) {
|
|
1344
|
+
await log('\nš Single run mode - waiting for queue to empty...');
|
|
1345
|
+
|
|
1346
|
+
while (stats.queued > 0 || stats.processing > 0) {
|
|
1347
|
+
await new Promise(resolve => setTimeout(resolve, 5000));
|
|
1348
|
+
const currentStats = issueQueue.getStats();
|
|
1349
|
+
if (currentStats.queued !== stats.queued || currentStats.processing !== stats.processing) {
|
|
1350
|
+
await log(` ā³ Waiting... Queue: ${currentStats.queued}, Processing: ${currentStats.processing}`);
|
|
1351
|
+
}
|
|
1352
|
+
Object.assign(stats, currentStats);
|
|
1353
|
+
}
|
|
1354
|
+
|
|
1355
|
+
await log('\nā
All issues processed!');
|
|
1356
|
+
await log(` Completed: ${stats.completed}`);
|
|
1357
|
+
await log(` Failed: ${stats.failed}`);
|
|
1358
|
+
await log(` š Full log file: ${absoluteLogPath}`);
|
|
1359
|
+
|
|
1360
|
+
// Perform cleanup if enabled and there were successful completions
|
|
1361
|
+
if (stats.completed > 0) {
|
|
1362
|
+
await cleanupTempDirectories(argv);
|
|
1363
|
+
}
|
|
1364
|
+
|
|
1365
|
+
// Stop workers before breaking to avoid hanging
|
|
1366
|
+
issueQueue.stop();
|
|
1367
|
+
break;
|
|
1368
|
+
}
|
|
1369
|
+
|
|
1370
|
+
// Wait for next iteration
|
|
1371
|
+
await log(`\nā° Next check in ${argv.interval} seconds...`);
|
|
1372
|
+
await new Promise(resolve => setTimeout(resolve, argv.interval * 1000));
|
|
1373
|
+
}
|
|
1374
|
+
|
|
1375
|
+
// Stop workers
|
|
1376
|
+
issueQueue.stop();
|
|
1377
|
+
await Promise.all(issueQueue.workers);
|
|
1378
|
+
|
|
1379
|
+
// Perform cleanup if enabled and there were successful completions
|
|
1380
|
+
const finalStats = issueQueue.getStats();
|
|
1381
|
+
if (finalStats.completed > 0) {
|
|
1382
|
+
await cleanupTempDirectories();
|
|
1383
|
+
}
|
|
1384
|
+
|
|
1385
|
+
await log('\nš Hive Mind monitoring stopped');
|
|
1386
|
+
await log(` š Full log file: ${absoluteLogPath}`);
|
|
1387
|
+
}
|
|
1388
|
+
|
|
1389
|
+
// Graceful shutdown handler
|
|
1390
|
+
async function gracefulShutdown(signal) {
|
|
1391
|
+
if (isShuttingDown) {
|
|
1392
|
+
return; // Prevent duplicate shutdown messages
|
|
1393
|
+
}
|
|
1394
|
+
isShuttingDown = true;
|
|
1395
|
+
|
|
1396
|
+
try {
|
|
1397
|
+
await log(`\n\nš Received ${signal} signal, shutting down gracefully...`);
|
|
1398
|
+
|
|
1399
|
+
// Stop the queue and wait for workers to finish
|
|
1400
|
+
issueQueue.stop();
|
|
1401
|
+
|
|
1402
|
+
// Give workers a moment to finish their current tasks
|
|
1403
|
+
const stats = issueQueue.getStats();
|
|
1404
|
+
if (stats.processing > 0) {
|
|
1405
|
+
await log(` ā³ Waiting for ${stats.processing} worker(s) to finish current tasks...`);
|
|
1406
|
+
|
|
1407
|
+
// Wait up to 10 seconds for workers to finish
|
|
1408
|
+
const maxWaitTime = 10000;
|
|
1409
|
+
const startTime = Date.now();
|
|
1410
|
+
while (issueQueue.getStats().processing > 0 && (Date.now() - startTime) < maxWaitTime) {
|
|
1411
|
+
await new Promise(resolve => setTimeout(resolve, 500));
|
|
1412
|
+
}
|
|
1413
|
+
}
|
|
1414
|
+
|
|
1415
|
+
await Promise.all(issueQueue.workers);
|
|
1416
|
+
|
|
1417
|
+
// Perform cleanup if enabled and there were successful completions
|
|
1418
|
+
const finalStats = issueQueue.getStats();
|
|
1419
|
+
if (finalStats.completed > 0) {
|
|
1420
|
+
await cleanupTempDirectories(argv);
|
|
1421
|
+
}
|
|
1422
|
+
|
|
1423
|
+
await log(' ā
Shutdown complete');
|
|
1424
|
+
await log(` š Full log file: ${absoluteLogPath}`);
|
|
1425
|
+
|
|
1426
|
+
} catch (error) {
|
|
1427
|
+
reportError(error, {
|
|
1428
|
+
context: 'monitor_issues_shutdown',
|
|
1429
|
+
operation: 'cleanup_and_exit'
|
|
1430
|
+
});
|
|
1431
|
+
await log(` ā ļø Error during shutdown: ${cleanErrorMessage(error)}`, { level: 'error' });
|
|
1432
|
+
await log(` š Full log file: ${absoluteLogPath}`);
|
|
1433
|
+
}
|
|
1434
|
+
|
|
1435
|
+
await safeExit(0, 'Process completed');
|
|
1436
|
+
}
|
|
1437
|
+
|
|
1438
|
+
// Function to validate Claude CLI connection
|
|
1439
|
+
// validateClaudeConnection is now imported from lib.mjs
|
|
1440
|
+
|
|
1441
|
+
// Handle graceful shutdown
|
|
1442
|
+
process.on('SIGINT', () => gracefulShutdown('interrupt'));
|
|
1443
|
+
process.on('SIGTERM', () => gracefulShutdown('termination'));
|
|
1444
|
+
|
|
1445
|
+
// Check system resources (disk space and RAM) before starting monitoring (skip in dry-run mode)
|
|
1446
|
+
if (argv.dryRun || argv.skipToolConnectionCheck || argv.toolConnectionCheck === false) {
|
|
1447
|
+
await log('ā© Skipping system resource check (dry-run mode or skip-tool-connection-check enabled)', { verbose: true });
|
|
1448
|
+
await log('ā© Skipping Claude CLI connection check (dry-run mode or skip-tool-connection-check enabled)', { verbose: true });
|
|
1449
|
+
} else {
|
|
1450
|
+
const systemCheck = await checkSystem(
|
|
1451
|
+
{
|
|
1452
|
+
minDiskSpaceMB: argv.minDiskSpace || 500,
|
|
1453
|
+
minMemoryMB: 256,
|
|
1454
|
+
exitOnFailure: true
|
|
1455
|
+
},
|
|
1456
|
+
{ log }
|
|
1457
|
+
);
|
|
1458
|
+
|
|
1459
|
+
if (!systemCheck.success) {
|
|
1460
|
+
await safeExit(1, 'Error occurred');
|
|
1461
|
+
}
|
|
1462
|
+
|
|
1463
|
+
// Validate Claude CLI connection before starting monitoring with the same model that will be used
|
|
1464
|
+
const isClaudeConnected = await validateClaudeConnection(argv.model);
|
|
1465
|
+
if (!isClaudeConnected) {
|
|
1466
|
+
await log('ā Cannot start monitoring without Claude CLI connection', { level: 'error' });
|
|
1467
|
+
await safeExit(1, 'Error occurred');
|
|
1468
|
+
}
|
|
1469
|
+
}
|
|
1470
|
+
|
|
1471
|
+
// Wrap monitor function with Sentry error tracking
|
|
1472
|
+
const monitorWithSentry = !argv.sentry ? monitor : withSentry(monitor, 'hive.monitor', 'command');
|
|
1473
|
+
|
|
1474
|
+
// Start monitoring
|
|
1475
|
+
try {
|
|
1476
|
+
await monitorWithSentry();
|
|
1477
|
+
} catch (error) {
|
|
1478
|
+
reportError(error, {
|
|
1479
|
+
context: 'hive_main',
|
|
1480
|
+
operation: 'monitor_with_sentry'
|
|
1481
|
+
});
|
|
1482
|
+
await log(`\nā Fatal error: ${cleanErrorMessage(error)}`, { level: 'error' });
|
|
1483
|
+
await log(` š Full log file: ${absoluteLogPath}`, { level: 'error' });
|
|
1484
|
+
await safeExit(1, 'Error occurred');
|
|
1485
|
+
}
|
|
1486
|
+
|
|
1487
|
+
} catch (fatalError) {
|
|
1488
|
+
// Handle any errors that occurred during initialization or execution
|
|
1489
|
+
// This prevents silent failures when the script hangs or crashes
|
|
1490
|
+
console.error('\nā Fatal error occurred during hive initialization or execution');
|
|
1491
|
+
console.error(` ${fatalError.message || fatalError}`);
|
|
1492
|
+
if (fatalError.stack) {
|
|
1493
|
+
console.error('\nStack trace:');
|
|
1494
|
+
console.error(fatalError.stack);
|
|
1495
|
+
}
|
|
1496
|
+
console.error('\nPlease report this issue at: https://github.com/link-assistant/hive-mind/issues');
|
|
1497
|
+
process.exit(1);
|
|
1498
|
+
}
|
|
1499
|
+
|
|
1500
|
+
} // End of main execution block
|