@link-assistant/hive-mind 0.46.0 ā 0.47.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +26 -13
- package/README.md +42 -8
- package/package.json +16 -3
- package/src/agent.lib.mjs +49 -70
- package/src/agent.prompts.lib.mjs +6 -20
- package/src/buildUserMention.lib.mjs +4 -17
- package/src/claude-limits.lib.mjs +15 -15
- package/src/claude.lib.mjs +617 -626
- package/src/claude.prompts.lib.mjs +7 -22
- package/src/codex.lib.mjs +39 -71
- package/src/codex.prompts.lib.mjs +6 -20
- package/src/config.lib.mjs +3 -16
- package/src/contributing-guidelines.lib.mjs +5 -18
- package/src/exit-handler.lib.mjs +4 -4
- package/src/git.lib.mjs +7 -7
- package/src/github-issue-creator.lib.mjs +17 -17
- package/src/github-linking.lib.mjs +8 -33
- package/src/github.batch.lib.mjs +20 -16
- package/src/github.graphql.lib.mjs +18 -18
- package/src/github.lib.mjs +89 -91
- package/src/hive.config.lib.mjs +50 -50
- package/src/hive.mjs +1293 -1296
- package/src/instrument.mjs +7 -11
- package/src/interactive-mode.lib.mjs +112 -138
- package/src/lenv-reader.lib.mjs +1 -6
- package/src/lib.mjs +36 -45
- package/src/lino.lib.mjs +2 -2
- package/src/local-ci-checks.lib.mjs +15 -14
- package/src/memory-check.mjs +52 -60
- package/src/model-mapping.lib.mjs +25 -32
- package/src/model-validation.lib.mjs +31 -31
- package/src/opencode.lib.mjs +37 -62
- package/src/opencode.prompts.lib.mjs +7 -21
- package/src/protect-branch.mjs +14 -15
- package/src/review.mjs +28 -27
- package/src/reviewers-hive.mjs +64 -69
- package/src/sentry.lib.mjs +13 -10
- package/src/solve.auto-continue.lib.mjs +48 -38
- package/src/solve.auto-pr.lib.mjs +111 -69
- package/src/solve.branch-errors.lib.mjs +17 -46
- package/src/solve.branch.lib.mjs +16 -23
- package/src/solve.config.lib.mjs +263 -261
- package/src/solve.error-handlers.lib.mjs +21 -79
- package/src/solve.execution.lib.mjs +10 -18
- package/src/solve.feedback.lib.mjs +25 -46
- package/src/solve.mjs +59 -60
- package/src/solve.preparation.lib.mjs +10 -36
- package/src/solve.repo-setup.lib.mjs +4 -19
- package/src/solve.repository.lib.mjs +37 -37
- package/src/solve.results.lib.mjs +32 -46
- package/src/solve.session.lib.mjs +7 -22
- package/src/solve.validation.lib.mjs +19 -17
- package/src/solve.watch.lib.mjs +20 -33
- package/src/start-screen.mjs +24 -24
- package/src/task.mjs +38 -44
- package/src/telegram-bot.mjs +125 -121
- package/src/telegram-top-command.lib.mjs +32 -48
- package/src/usage-limit.lib.mjs +9 -13
- package/src/version-info.lib.mjs +1 -1
- package/src/version.lib.mjs +1 -1
- package/src/youtrack/solve.youtrack.lib.mjs +3 -8
- package/src/youtrack/youtrack-sync.mjs +8 -14
- package/src/youtrack/youtrack.lib.mjs +26 -28
package/src/hive.mjs
CHANGED
|
@@ -44,1458 +44,1455 @@ export { createYargsConfig } from './hive.config.lib.mjs';
|
|
|
44
44
|
// 2. import.meta.url is this file's URL
|
|
45
45
|
// 3. For global installs, argv[1] might be a symlink, so we check if it contains 'hive'
|
|
46
46
|
import { fileURLToPath } from 'url';
|
|
47
|
-
const isDirectExecution = process.argv[1] === fileURLToPath(import.meta.url) ||
|
|
48
|
-
(process.argv[1] && (process.argv[1].includes('/hive') || process.argv[1].endsWith('hive')));
|
|
47
|
+
const isDirectExecution = process.argv[1] === fileURLToPath(import.meta.url) || (process.argv[1] && (process.argv[1].includes('/hive') || process.argv[1].endsWith('hive')));
|
|
49
48
|
if (isDirectExecution) {
|
|
50
|
-
console.log('š Hive Mind - AI-powered issue solver');
|
|
51
|
-
console.log(' Initializing...');
|
|
52
|
-
try {
|
|
53
|
-
console.log(' Loading dependencies (this may take a moment)...');
|
|
54
|
-
// Helper function to add timeout to async operations
|
|
55
|
-
const withTimeout = (promise, timeoutMs, operation) => {
|
|
56
|
-
return Promise.race([
|
|
57
|
-
promise,
|
|
58
|
-
new Promise((_, reject) =>
|
|
59
|
-
setTimeout(() => reject(new Error(`Operation '${operation}' timed out after ${timeoutMs}ms. This might be due to slow network or npm configuration issues.`)), timeoutMs)
|
|
60
|
-
)
|
|
61
|
-
]);
|
|
62
|
-
};
|
|
63
|
-
// Use use-m to dynamically import modules for cross-runtime compatibility
|
|
64
|
-
if (typeof use === 'undefined') {
|
|
49
|
+
console.log('š Hive Mind - AI-powered issue solver');
|
|
50
|
+
console.log(' Initializing...');
|
|
65
51
|
try {
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
);
|
|
87
|
-
const yargsModule = await withTimeout(
|
|
88
|
-
use('yargs@17.7.2'),
|
|
89
|
-
30000,
|
|
90
|
-
'loading yargs'
|
|
91
|
-
);
|
|
92
|
-
const yargs = yargsModule.default || yargsModule;
|
|
93
|
-
const { hideBin } = await withTimeout(
|
|
94
|
-
use('yargs@17.7.2/helpers'),
|
|
95
|
-
30000,
|
|
96
|
-
'loading yargs helpers'
|
|
97
|
-
);
|
|
98
|
-
const path = (await withTimeout(use('path'), 30000, 'loading path')).default;
|
|
99
|
-
const fs = (await withTimeout(use('fs'), 30000, 'loading fs')).promises;
|
|
100
|
-
// Import shared library functions
|
|
101
|
-
const lib = await import('./lib.mjs');
|
|
102
|
-
const { log, setLogFile, getAbsoluteLogPath, formatTimestamp, cleanErrorMessage, cleanupTempDirectories } = lib;
|
|
103
|
-
const yargsConfigLib = await import('./hive.config.lib.mjs');
|
|
104
|
-
const { createYargsConfig } = yargsConfigLib;
|
|
105
|
-
const claudeLib = await import('./claude.lib.mjs');
|
|
106
|
-
const { validateClaudeConnection } = claudeLib;
|
|
107
|
-
// Import model validation library
|
|
108
|
-
const modelValidation = await import('./model-validation.lib.mjs');
|
|
109
|
-
const { validateAndExitOnInvalidModel } = modelValidation;
|
|
110
|
-
const githubLib = await import('./github.lib.mjs');
|
|
111
|
-
const { checkGitHubPermissions, fetchAllIssuesWithPagination, fetchProjectIssues, isRateLimitError, batchCheckPullRequestsForIssues, parseGitHubUrl, batchCheckArchivedRepositories } = githubLib;
|
|
112
|
-
// Import YouTrack-related functions
|
|
113
|
-
const youTrackLib = await import('./youtrack/youtrack.lib.mjs');
|
|
114
|
-
const {
|
|
115
|
-
validateYouTrackConfig,
|
|
116
|
-
testYouTrackConnection,
|
|
117
|
-
createYouTrackConfigFromEnv
|
|
118
|
-
} = youTrackLib;
|
|
119
|
-
const youTrackSync = await import('./youtrack/youtrack-sync.mjs');
|
|
120
|
-
const { syncYouTrackToGitHub, formatIssuesForHive } = youTrackSync;
|
|
121
|
-
const memCheck = await import('./memory-check.mjs');
|
|
122
|
-
const { checkSystem } = memCheck;
|
|
123
|
-
const exitHandler = await import('./exit-handler.lib.mjs');
|
|
124
|
-
const { initializeExitHandler, installGlobalExitHandlers, safeExit } = exitHandler;
|
|
125
|
-
const sentryLib = await import('./sentry.lib.mjs');
|
|
126
|
-
const { initializeSentry, withSentry, addBreadcrumb, reportError } = sentryLib;
|
|
127
|
-
const graphqlLib = await import('./github.graphql.lib.mjs');
|
|
128
|
-
const { tryFetchIssuesWithGraphQL } = graphqlLib;
|
|
129
|
-
const commandName = process.argv[1] ? process.argv[1].split('/').pop() : '';
|
|
130
|
-
const isLocalScript = commandName.endsWith('.mjs');
|
|
131
|
-
const solveCommand = isLocalScript ? './solve.mjs' : 'solve';
|
|
132
|
-
/**
|
|
133
|
-
* Fallback function to fetch issues from organization/user repositories
|
|
134
|
-
* when search API hits rate limits
|
|
135
|
-
* @param {string} owner - Organization or user name
|
|
136
|
-
* @param {string} scope - 'organization' or 'user'
|
|
137
|
-
* @param {string} monitorTag - Label to filter by (optional)
|
|
138
|
-
* @param {boolean} allIssues - Whether to fetch all issues or only labeled ones
|
|
139
|
-
* @returns {Promise<Array>} Array of issues
|
|
140
|
-
*/
|
|
141
|
-
async function fetchIssuesFromRepositories(owner, scope, monitorTag, fetchAllIssues = false) {
|
|
142
|
-
const { exec } = await import('child_process');
|
|
143
|
-
const { promisify } = await import('util');
|
|
144
|
-
const execAsync = promisify(exec);
|
|
145
|
-
try {
|
|
146
|
-
await log(` š Using repository-by-repository fallback for ${scope}: ${owner}`);
|
|
147
|
-
// Strategy 1: Try GraphQL approach first (faster but has limitations)
|
|
148
|
-
// Only try GraphQL for "all issues" mode, not for labeled issues
|
|
149
|
-
if (fetchAllIssues) {
|
|
150
|
-
const graphqlResult = await tryFetchIssuesWithGraphQL(owner, scope, log, cleanErrorMessage);
|
|
151
|
-
if (graphqlResult.success) {
|
|
152
|
-
await log(` ā
GraphQL approach successful: ${graphqlResult.issues.length} issues from ${graphqlResult.repoCount} repositories`);
|
|
153
|
-
return graphqlResult.issues;
|
|
52
|
+
console.log(' Loading dependencies (this may take a moment)...');
|
|
53
|
+
// Helper function to add timeout to async operations
|
|
54
|
+
const withTimeout = (promise, timeoutMs, operation) => {
|
|
55
|
+
return Promise.race([promise, new Promise((_, reject) => setTimeout(() => reject(new Error(`Operation '${operation}' timed out after ${timeoutMs}ms. This might be due to slow network or npm configuration issues.`)), timeoutMs))]);
|
|
56
|
+
};
|
|
57
|
+
// Use use-m to dynamically import modules for cross-runtime compatibility
|
|
58
|
+
if (typeof use === 'undefined') {
|
|
59
|
+
try {
|
|
60
|
+
// Wrap fetch in timeout to prevent hanging
|
|
61
|
+
const useMCode = await withTimeout(
|
|
62
|
+
fetch('https://unpkg.com/use-m/use.js').then(r => r.text()),
|
|
63
|
+
10000,
|
|
64
|
+
'fetching use-m library'
|
|
65
|
+
);
|
|
66
|
+
globalThis.use = (await eval(useMCode)).use;
|
|
67
|
+
} catch (error) {
|
|
68
|
+
console.error('ā Fatal error: Failed to load dependencies');
|
|
69
|
+
console.error(` ${error.message}`);
|
|
70
|
+
console.error(' This might be due to network issues or missing dependencies.');
|
|
71
|
+
console.error(' Please check your internet connection and try again.');
|
|
72
|
+
process.exit(1);
|
|
154
73
|
}
|
|
155
74
|
}
|
|
156
|
-
//
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
const {
|
|
174
|
-
//
|
|
175
|
-
const
|
|
176
|
-
const
|
|
177
|
-
await
|
|
178
|
-
|
|
179
|
-
//
|
|
180
|
-
const
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
}
|
|
184
|
-
const
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
}
|
|
188
|
-
|
|
189
|
-
const
|
|
190
|
-
const
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
75
|
+
// Use command-stream for consistent $ behavior across runtimes
|
|
76
|
+
const { $ } = await withTimeout(
|
|
77
|
+
use('command-stream'),
|
|
78
|
+
30000, // 30 second timeout
|
|
79
|
+
'loading command-stream'
|
|
80
|
+
);
|
|
81
|
+
const yargsModule = await withTimeout(use('yargs@17.7.2'), 30000, 'loading yargs');
|
|
82
|
+
const yargs = yargsModule.default || yargsModule;
|
|
83
|
+
const { hideBin } = await withTimeout(use('yargs@17.7.2/helpers'), 30000, 'loading yargs helpers');
|
|
84
|
+
const path = (await withTimeout(use('path'), 30000, 'loading path')).default;
|
|
85
|
+
const fs = (await withTimeout(use('fs'), 30000, 'loading fs')).promises;
|
|
86
|
+
// Import shared library functions
|
|
87
|
+
const lib = await import('./lib.mjs');
|
|
88
|
+
const { log, setLogFile, getAbsoluteLogPath, formatTimestamp, cleanErrorMessage, cleanupTempDirectories } = lib;
|
|
89
|
+
const yargsConfigLib = await import('./hive.config.lib.mjs');
|
|
90
|
+
const { createYargsConfig } = yargsConfigLib;
|
|
91
|
+
const claudeLib = await import('./claude.lib.mjs');
|
|
92
|
+
const { validateClaudeConnection } = claudeLib;
|
|
93
|
+
// Import model validation library
|
|
94
|
+
const modelValidation = await import('./model-validation.lib.mjs');
|
|
95
|
+
const { validateAndExitOnInvalidModel } = modelValidation;
|
|
96
|
+
const githubLib = await import('./github.lib.mjs');
|
|
97
|
+
const { checkGitHubPermissions, fetchAllIssuesWithPagination, fetchProjectIssues, isRateLimitError, batchCheckPullRequestsForIssues, parseGitHubUrl, batchCheckArchivedRepositories } = githubLib;
|
|
98
|
+
// Import YouTrack-related functions
|
|
99
|
+
const youTrackLib = await import('./youtrack/youtrack.lib.mjs');
|
|
100
|
+
const { validateYouTrackConfig, testYouTrackConnection, createYouTrackConfigFromEnv } = youTrackLib;
|
|
101
|
+
const youTrackSync = await import('./youtrack/youtrack-sync.mjs');
|
|
102
|
+
const { syncYouTrackToGitHub, formatIssuesForHive } = youTrackSync;
|
|
103
|
+
const memCheck = await import('./memory-check.mjs');
|
|
104
|
+
const { checkSystem } = memCheck;
|
|
105
|
+
const exitHandler = await import('./exit-handler.lib.mjs');
|
|
106
|
+
const { initializeExitHandler, installGlobalExitHandlers, safeExit } = exitHandler;
|
|
107
|
+
const sentryLib = await import('./sentry.lib.mjs');
|
|
108
|
+
const { initializeSentry, withSentry, addBreadcrumb, reportError } = sentryLib;
|
|
109
|
+
const graphqlLib = await import('./github.graphql.lib.mjs');
|
|
110
|
+
const { tryFetchIssuesWithGraphQL } = graphqlLib;
|
|
111
|
+
const commandName = process.argv[1] ? process.argv[1].split('/').pop() : '';
|
|
112
|
+
const isLocalScript = commandName.endsWith('.mjs');
|
|
113
|
+
const solveCommand = isLocalScript ? './solve.mjs' : 'solve';
|
|
114
|
+
/**
|
|
115
|
+
* Fallback function to fetch issues from organization/user repositories
|
|
116
|
+
* when search API hits rate limits
|
|
117
|
+
* @param {string} owner - Organization or user name
|
|
118
|
+
* @param {string} scope - 'organization' or 'user'
|
|
119
|
+
* @param {string} monitorTag - Label to filter by (optional)
|
|
120
|
+
* @param {boolean} allIssues - Whether to fetch all issues or only labeled ones
|
|
121
|
+
* @returns {Promise<Array>} Array of issues
|
|
122
|
+
*/
|
|
123
|
+
async function fetchIssuesFromRepositories(owner, scope, monitorTag, fetchAllIssues = false) {
|
|
124
|
+
const { exec } = await import('child_process');
|
|
125
|
+
const { promisify } = await import('util');
|
|
126
|
+
const execAsync = promisify(exec);
|
|
200
127
|
try {
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
// Build the appropriate issue list command
|
|
206
|
-
let issueCmd;
|
|
128
|
+
await log(` š Using repository-by-repository fallback for ${scope}: ${owner}`);
|
|
129
|
+
// Strategy 1: Try GraphQL approach first (faster but has limitations)
|
|
130
|
+
// Only try GraphQL for "all issues" mode, not for labeled issues
|
|
207
131
|
if (fetchAllIssues) {
|
|
208
|
-
|
|
132
|
+
const graphqlResult = await tryFetchIssuesWithGraphQL(owner, scope, log, cleanErrorMessage);
|
|
133
|
+
if (graphqlResult.success) {
|
|
134
|
+
await log(` ā
GraphQL approach successful: ${graphqlResult.issues.length} issues from ${graphqlResult.repoCount} repositories`);
|
|
135
|
+
return graphqlResult.issues;
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
// Strategy 2: Fallback to gh api --paginate approach (comprehensive but slower)
|
|
139
|
+
await log(' š Using gh api --paginate approach for comprehensive coverage...', { verbose: true });
|
|
140
|
+
|
|
141
|
+
// First, get list of ALL repositories using gh api with --paginate for unlimited pagination
|
|
142
|
+
// This approach uses the GitHub API directly to fetch all repositories without any limits
|
|
143
|
+
// Include isArchived field to filter out archived repositories
|
|
144
|
+
let repoListCmd;
|
|
145
|
+
if (scope === 'organization') {
|
|
146
|
+
repoListCmd = `gh api orgs/${owner}/repos --paginate --jq '.[] | {name: .name, owner: .owner.login, isArchived: .archived}'`;
|
|
209
147
|
} else {
|
|
210
|
-
|
|
148
|
+
repoListCmd = `gh api users/${owner}/repos --paginate --jq '.[] | {name: .name, owner: .owner.login, isArchived: .archived}'`;
|
|
211
149
|
}
|
|
212
|
-
|
|
213
|
-
await
|
|
150
|
+
await log(' š Fetching repository list (using --paginate for unlimited pagination)...', { verbose: true });
|
|
151
|
+
await log(` š Command: ${repoListCmd}`, { verbose: true });
|
|
152
|
+
|
|
153
|
+
// Add delay for rate limiting
|
|
154
|
+
await new Promise(resolve => setTimeout(resolve, 2000));
|
|
155
|
+
const { stdout: repoOutput } = await execAsync(repoListCmd, { encoding: 'utf8', env: process.env });
|
|
156
|
+
// Parse the output line by line, as gh api with --jq outputs one JSON object per line
|
|
157
|
+
const repoLines = repoOutput
|
|
158
|
+
.trim()
|
|
159
|
+
.split('\n')
|
|
160
|
+
.filter(line => line.trim());
|
|
161
|
+
const allRepositories = repoLines.map(line => JSON.parse(line));
|
|
162
|
+
await log(` š Found ${allRepositories.length} repositories`);
|
|
163
|
+
|
|
164
|
+
// Filter repositories to only include those owned by the target user/org
|
|
165
|
+
const ownedRepositories = allRepositories.filter(repo => {
|
|
166
|
+
const repoOwner = repo.owner?.login || repo.owner;
|
|
167
|
+
return repoOwner === owner;
|
|
168
|
+
});
|
|
169
|
+
const unownedCount = allRepositories.length - ownedRepositories.length;
|
|
170
|
+
if (unownedCount > 0) {
|
|
171
|
+
await log(` āļø Skipping ${unownedCount} repository(ies) not owned by ${owner}`);
|
|
172
|
+
}
|
|
173
|
+
// Filter out archived repositories from owned repositories
|
|
174
|
+
const repositories = ownedRepositories.filter(repo => !repo.isArchived);
|
|
175
|
+
const archivedCount = ownedRepositories.length - repositories.length;
|
|
176
|
+
if (archivedCount > 0) {
|
|
177
|
+
await log(` āļø Skipping ${archivedCount} archived repository(ies)`);
|
|
178
|
+
}
|
|
179
|
+
await log(` ā
Processing ${repositories.length} non-archived repositories owned by ${owner}`);
|
|
180
|
+
|
|
181
|
+
let collectedIssues = [];
|
|
182
|
+
let processedRepos = 0;
|
|
183
|
+
// Process repositories in batches to avoid overwhelming the API
|
|
184
|
+
for (const repo of repositories) {
|
|
185
|
+
try {
|
|
186
|
+
const repoName = repo.name;
|
|
187
|
+
const ownerName = repo.owner?.login || owner;
|
|
188
|
+
await log(` š Fetching issues from ${ownerName}/${repoName}...`, { verbose: true });
|
|
189
|
+
|
|
190
|
+
// Build the appropriate issue list command
|
|
191
|
+
let issueCmd;
|
|
192
|
+
if (fetchAllIssues) {
|
|
193
|
+
issueCmd = `gh issue list --repo ${ownerName}/${repoName} --state open --json url,title,number,createdAt`;
|
|
194
|
+
} else {
|
|
195
|
+
issueCmd = `gh issue list --repo ${ownerName}/${repoName} --state open --label "${monitorTag}" --json url,title,number,createdAt`;
|
|
196
|
+
}
|
|
197
|
+
// Add delay between repository requests
|
|
198
|
+
await new Promise(resolve => setTimeout(resolve, 1000));
|
|
214
199
|
|
|
215
|
-
|
|
200
|
+
const repoIssues = await fetchAllIssuesWithPagination(issueCmd);
|
|
216
201
|
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
202
|
+
// Add repository information to each issue
|
|
203
|
+
const issuesWithRepo = repoIssues.map(issue => ({
|
|
204
|
+
...issue,
|
|
205
|
+
repository: {
|
|
206
|
+
name: repoName,
|
|
207
|
+
owner: { login: ownerName },
|
|
208
|
+
},
|
|
209
|
+
}));
|
|
225
210
|
|
|
226
|
-
|
|
227
|
-
|
|
211
|
+
collectedIssues.push(...issuesWithRepo);
|
|
212
|
+
processedRepos++;
|
|
228
213
|
|
|
229
|
-
|
|
230
|
-
|
|
214
|
+
if (issuesWithRepo.length > 0) {
|
|
215
|
+
await log(` ā
Found ${issuesWithRepo.length} issues in ${ownerName}/${repoName}`, { verbose: true });
|
|
216
|
+
}
|
|
217
|
+
} catch (repoError) {
|
|
218
|
+
reportError(repoError, {
|
|
219
|
+
context: 'fetchIssuesFromRepositories',
|
|
220
|
+
repo: repo.name,
|
|
221
|
+
operation: 'fetch_repo_issues',
|
|
222
|
+
});
|
|
223
|
+
await log(` ā ļø Failed to fetch issues from ${repo.name}: ${cleanErrorMessage(repoError)}`, {
|
|
224
|
+
verbose: true,
|
|
225
|
+
});
|
|
226
|
+
// Continue with other repositories
|
|
227
|
+
}
|
|
231
228
|
}
|
|
232
229
|
|
|
233
|
-
|
|
234
|
-
|
|
230
|
+
await log(` ā
Repository fallback complete: ${collectedIssues.length} issues from ${processedRepos}/${repositories.length} repositories`);
|
|
231
|
+
return collectedIssues;
|
|
232
|
+
} catch (error) {
|
|
233
|
+
reportError(error, {
|
|
235
234
|
context: 'fetchIssuesFromRepositories',
|
|
236
|
-
|
|
237
|
-
|
|
235
|
+
owner,
|
|
236
|
+
scope,
|
|
237
|
+
operation: 'repository_fallback',
|
|
238
238
|
});
|
|
239
|
-
await log(`
|
|
240
|
-
|
|
239
|
+
await log(` ā Repository fallback failed: ${cleanErrorMessage(error)}`, { level: 'error' });
|
|
240
|
+
return [];
|
|
241
241
|
}
|
|
242
242
|
}
|
|
243
243
|
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
// Temporarily suppress stderr to prevent yargs from printing error messages
|
|
267
|
-
// We'll handle error reporting ourselves
|
|
268
|
-
const originalStderrWrite = process.stderr.write;
|
|
269
|
-
let stderrBuffer = '';
|
|
270
|
-
process.stderr.write = function(chunk, encoding, callback) {
|
|
271
|
-
// Capture stderr output instead of writing it
|
|
272
|
-
stderrBuffer += chunk.toString();
|
|
273
|
-
if (typeof encoding === 'function') {
|
|
274
|
-
encoding();
|
|
275
|
-
} else if (callback) {
|
|
276
|
-
callback();
|
|
277
|
-
}
|
|
278
|
-
return true;
|
|
279
|
-
};
|
|
244
|
+
// Configure command line arguments - GitHub URL as positional argument
|
|
245
|
+
const rawArgs = hideBin(process.argv);
|
|
246
|
+
// Use .parse() instead of .argv to ensure .strict() mode works correctly
|
|
247
|
+
// When you use .argv, strict mode doesn't trigger properly
|
|
248
|
+
// See: https://github.com/yargs/yargs/issues - .strict() only works with .parse()
|
|
249
|
+
let argv;
|
|
250
|
+
|
|
251
|
+
// Temporarily suppress stderr to prevent yargs from printing error messages
|
|
252
|
+
// We'll handle error reporting ourselves
|
|
253
|
+
const originalStderrWrite = process.stderr.write;
|
|
254
|
+
let stderrBuffer = '';
|
|
255
|
+
process.stderr.write = function (chunk, encoding, callback) {
|
|
256
|
+
// Capture stderr output instead of writing it
|
|
257
|
+
stderrBuffer += chunk.toString();
|
|
258
|
+
if (typeof encoding === 'function') {
|
|
259
|
+
encoding();
|
|
260
|
+
} else if (callback) {
|
|
261
|
+
callback();
|
|
262
|
+
}
|
|
263
|
+
return true;
|
|
264
|
+
};
|
|
280
265
|
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
266
|
+
try {
|
|
267
|
+
argv = await createYargsConfig(yargs()).parse(rawArgs);
|
|
268
|
+
// Restore stderr if parsing succeeded
|
|
269
|
+
process.stderr.write = originalStderrWrite;
|
|
270
|
+
} catch (error) {
|
|
271
|
+
// Restore stderr before handling the error
|
|
272
|
+
process.stderr.write = originalStderrWrite;
|
|
273
|
+
|
|
274
|
+
// If .strict() mode catches an unknown argument, yargs will throw an error
|
|
275
|
+
// We should fail fast for truly invalid arguments
|
|
276
|
+
if (error.message && error.message.includes('Unknown argument')) {
|
|
277
|
+
console.error('Error:', error.message);
|
|
278
|
+
process.exit(1);
|
|
279
|
+
}
|
|
288
280
|
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
281
|
+
// Yargs sometimes throws "Not enough arguments" errors even when arguments are present
|
|
282
|
+
// This is a quirk with optional positional arguments [github-url]
|
|
283
|
+
// The error.argv object still contains the parsed arguments, so we can safely continue
|
|
284
|
+
if (error.argv) {
|
|
285
|
+
argv = error.argv;
|
|
286
|
+
} else {
|
|
287
|
+
// If there's no argv object, it's a real error - show the captured stderr
|
|
288
|
+
if (stderrBuffer) {
|
|
289
|
+
process.stderr.write(stderrBuffer);
|
|
290
|
+
}
|
|
291
|
+
throw error;
|
|
292
|
+
}
|
|
295
293
|
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
if (error.argv) {
|
|
300
|
-
argv = error.argv;
|
|
301
|
-
} else {
|
|
302
|
-
// If there's no argv object, it's a real error - show the captured stderr
|
|
303
|
-
if (stderrBuffer) {
|
|
304
|
-
process.stderr.write(stderrBuffer);
|
|
294
|
+
// Normalize deprecated flags to new names
|
|
295
|
+
if (argv && (argv.skipToolCheck || argv.skipClaudeCheck)) argv.skipToolConnectionCheck = true;
|
|
296
|
+
if (argv && argv.toolCheck === false) argv.toolConnectionCheck = false;
|
|
305
297
|
}
|
|
306
|
-
throw error;
|
|
307
|
-
}
|
|
308
298
|
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
299
|
+
let githubUrl = argv['github-url'];
|
|
300
|
+
|
|
301
|
+
// Set global verbose mode
|
|
302
|
+
global.verboseMode = argv.verbose;
|
|
303
|
+
|
|
304
|
+
// Use the universal GitHub URL parser
|
|
305
|
+
if (githubUrl) {
|
|
306
|
+
const parsedUrl = parseGitHubUrl(githubUrl);
|
|
307
|
+
|
|
308
|
+
if (!parsedUrl.valid) {
|
|
309
|
+
console.error('Error: Invalid GitHub URL format');
|
|
310
|
+
if (parsedUrl.error) console.error(` ${parsedUrl.error}`);
|
|
311
|
+
if (parsedUrl.suggestion) console.error(`\nš” Did you mean: ${parsedUrl.suggestion}`);
|
|
312
|
+
console.error('\nExpected: https://github.com/owner or https://github.com/owner/repo');
|
|
313
|
+
console.error('You can use any of these formats:');
|
|
314
|
+
console.error(' - https://github.com/owner');
|
|
315
|
+
console.error(' - https://github.com/owner/repo');
|
|
316
|
+
console.error(' - http://github.com/owner (will be converted to https)');
|
|
317
|
+
console.error(' - github.com/owner (will add https://)');
|
|
318
|
+
console.error(' - owner (will be converted to https://github.com/owner)');
|
|
319
|
+
console.error(' - owner/repo (will be converted to https://github.com/owner/repo)');
|
|
320
|
+
await safeExit(1, 'Error occurred');
|
|
321
|
+
}
|
|
313
322
|
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
const parsedUrl = parseGitHubUrl(githubUrl);
|
|
322
|
-
|
|
323
|
-
if (!parsedUrl.valid) {
|
|
324
|
-
console.error('Error: Invalid GitHub URL format');
|
|
325
|
-
if (parsedUrl.error) console.error(` ${parsedUrl.error}`);
|
|
326
|
-
if (parsedUrl.suggestion) console.error(`\nš” Did you mean: ${parsedUrl.suggestion}`);
|
|
327
|
-
console.error('\nExpected: https://github.com/owner or https://github.com/owner/repo');
|
|
328
|
-
console.error('You can use any of these formats:');
|
|
329
|
-
console.error(' - https://github.com/owner');
|
|
330
|
-
console.error(' - https://github.com/owner/repo');
|
|
331
|
-
console.error(' - http://github.com/owner (will be converted to https)');
|
|
332
|
-
console.error(' - github.com/owner (will add https://)');
|
|
333
|
-
console.error(' - owner (will be converted to https://github.com/owner)');
|
|
334
|
-
console.error(' - owner/repo (will be converted to https://github.com/owner/repo)');
|
|
335
|
-
await safeExit(1, 'Error occurred');
|
|
336
|
-
}
|
|
323
|
+
// Check if it's a valid type for hive (user or repo)
|
|
324
|
+
if (parsedUrl.type !== 'user' && parsedUrl.type !== 'repo') {
|
|
325
|
+
console.error('Error: Invalid GitHub URL for monitoring');
|
|
326
|
+
console.error(` URL type '${parsedUrl.type}' is not supported`);
|
|
327
|
+
console.error('Expected: https://github.com/owner or https://github.com/owner/repo');
|
|
328
|
+
await safeExit(1, 'Error occurred');
|
|
329
|
+
}
|
|
337
330
|
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
console.error(` URL type '${parsedUrl.type}' is not supported`);
|
|
342
|
-
console.error('Expected: https://github.com/owner or https://github.com/owner/repo');
|
|
343
|
-
await safeExit(1, 'Error occurred');
|
|
344
|
-
}
|
|
331
|
+
// Use the normalized URL
|
|
332
|
+
githubUrl = parsedUrl.normalized;
|
|
333
|
+
}
|
|
345
334
|
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
335
|
+
// Validate GitHub URL format ONCE AND FOR ALL at the beginning
|
|
336
|
+
// Parse URL format: https://github.com/owner or https://github.com/owner/repo
|
|
337
|
+
let urlMatch = null;
|
|
338
|
+
|
|
339
|
+
// Only validate if we have a URL
|
|
340
|
+
const needsUrlValidation = githubUrl;
|
|
341
|
+
|
|
342
|
+
if (needsUrlValidation) {
|
|
343
|
+
// Do the regex matching ONCE - this result will be used everywhere
|
|
344
|
+
urlMatch = githubUrl.match(/^https:\/\/github\.com\/([^/]+)(\/([^/]+))?$/);
|
|
345
|
+
if (!urlMatch) {
|
|
346
|
+
console.error('Error: Invalid GitHub URL format');
|
|
347
|
+
console.error('Expected: https://github.com/owner or https://github.com/owner/repo');
|
|
348
|
+
console.error('You can use any of these formats:');
|
|
349
|
+
console.error(' - https://github.com/owner');
|
|
350
|
+
console.error(' - https://github.com/owner/repo');
|
|
351
|
+
console.error(' - http://github.com/owner (will be converted to https)');
|
|
352
|
+
console.error(' - github.com/owner (will add https://)');
|
|
353
|
+
console.error(' - owner (will be converted to https://github.com/owner)');
|
|
354
|
+
console.error(' - owner/repo (will be converted to https://github.com/owner/repo)');
|
|
355
|
+
await safeExit(1, 'Error occurred');
|
|
356
|
+
}
|
|
357
|
+
}
|
|
349
358
|
|
|
350
|
-
//
|
|
351
|
-
//
|
|
352
|
-
let
|
|
353
|
-
|
|
354
|
-
// Only validate if we have a URL
|
|
355
|
-
const needsUrlValidation = githubUrl;
|
|
356
|
-
|
|
357
|
-
if (needsUrlValidation) {
|
|
358
|
-
// Do the regex matching ONCE - this result will be used everywhere
|
|
359
|
-
urlMatch = githubUrl.match(/^https:\/\/github\.com\/([^/]+)(\/([^/]+))?$/);
|
|
360
|
-
if (!urlMatch) {
|
|
361
|
-
console.error('Error: Invalid GitHub URL format');
|
|
362
|
-
console.error('Expected: https://github.com/owner or https://github.com/owner/repo');
|
|
363
|
-
console.error('You can use any of these formats:');
|
|
364
|
-
console.error(' - https://github.com/owner');
|
|
365
|
-
console.error(' - https://github.com/owner/repo');
|
|
366
|
-
console.error(' - http://github.com/owner (will be converted to https)');
|
|
367
|
-
console.error(' - github.com/owner (will add https://)');
|
|
368
|
-
console.error(' - owner (will be converted to https://github.com/owner)');
|
|
369
|
-
console.error(' - owner/repo (will be converted to https://github.com/owner/repo)');
|
|
370
|
-
await safeExit(1, 'Error occurred');
|
|
371
|
-
}
|
|
372
|
-
}
|
|
359
|
+
// Create log file with timestamp
|
|
360
|
+
// Use log-dir option if provided, otherwise use current working directory
|
|
361
|
+
let targetDir = argv.logDir || process.cwd();
|
|
373
362
|
|
|
374
|
-
//
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
363
|
+
// Verify the directory exists, create if necessary
|
|
364
|
+
try {
|
|
365
|
+
await fs.access(targetDir);
|
|
366
|
+
} catch (error) {
|
|
367
|
+
reportError(error, {
|
|
368
|
+
context: 'log_directory_access',
|
|
369
|
+
targetDir,
|
|
370
|
+
operation: 'check_directory_exists',
|
|
371
|
+
});
|
|
372
|
+
// If directory doesn't exist, try to create it
|
|
373
|
+
try {
|
|
374
|
+
await fs.mkdir(targetDir, { recursive: true });
|
|
375
|
+
} catch (mkdirError) {
|
|
376
|
+
reportError(mkdirError, {
|
|
377
|
+
context: 'log_directory_creation',
|
|
378
|
+
targetDir,
|
|
379
|
+
operation: 'create_directory',
|
|
380
|
+
});
|
|
381
|
+
console.error(`ā ļø Unable to create log directory: ${targetDir}`);
|
|
382
|
+
console.error(' Falling back to current working directory');
|
|
383
|
+
// Fall back to current working directory
|
|
384
|
+
targetDir = process.cwd();
|
|
385
|
+
}
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
const timestamp = formatTimestamp();
|
|
389
|
+
const logFile = path.join(targetDir, `hive-${timestamp}.log`);
|
|
390
|
+
|
|
391
|
+
// Set the log file for the lib.mjs logging system
|
|
392
|
+
setLogFile(logFile);
|
|
393
|
+
|
|
394
|
+
// Create the log file immediately
|
|
395
|
+
await fs.writeFile(logFile, `# Hive.mjs Log - ${new Date().toISOString()}\n\n`);
|
|
396
|
+
// Always use absolute path for log file display
|
|
397
|
+
const absoluteLogPath = path.resolve(logFile);
|
|
398
|
+
await log(`š Log file: ${absoluteLogPath}`);
|
|
399
|
+
await log(' (All output will be logged here)');
|
|
400
|
+
|
|
401
|
+
// Initialize Sentry integration (unless disabled)
|
|
402
|
+
if (argv.sentry) {
|
|
403
|
+
await initializeSentry({
|
|
404
|
+
noSentry: !argv.sentry,
|
|
405
|
+
debug: argv.verbose,
|
|
406
|
+
version: process.env.npm_package_version || '0.12.0',
|
|
407
|
+
});
|
|
402
408
|
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
await log(' (All output will be logged here)');
|
|
415
|
-
|
|
416
|
-
// Initialize Sentry integration (unless disabled)
|
|
417
|
-
if (argv.sentry) {
|
|
418
|
-
await initializeSentry({
|
|
419
|
-
noSentry: !argv.sentry,
|
|
420
|
-
debug: argv.verbose,
|
|
421
|
-
version: process.env.npm_package_version || '0.12.0'
|
|
422
|
-
});
|
|
423
|
-
|
|
424
|
-
// Add breadcrumb for monitoring configuration
|
|
425
|
-
addBreadcrumb({
|
|
426
|
-
category: 'hive',
|
|
427
|
-
message: 'Started monitoring',
|
|
428
|
-
level: 'info',
|
|
429
|
-
data: {
|
|
430
|
-
mode: argv.projectMode ? 'project' : (argv.allIssues ? 'all' : 'label'),
|
|
431
|
-
concurrency: argv.concurrency,
|
|
432
|
-
model: argv.model
|
|
409
|
+
// Add breadcrumb for monitoring configuration
|
|
410
|
+
addBreadcrumb({
|
|
411
|
+
category: 'hive',
|
|
412
|
+
message: 'Started monitoring',
|
|
413
|
+
level: 'info',
|
|
414
|
+
data: {
|
|
415
|
+
mode: argv.projectMode ? 'project' : argv.allIssues ? 'all' : 'label',
|
|
416
|
+
concurrency: argv.concurrency,
|
|
417
|
+
model: argv.model,
|
|
418
|
+
},
|
|
419
|
+
});
|
|
433
420
|
}
|
|
434
|
-
});
|
|
435
|
-
}
|
|
436
421
|
|
|
437
|
-
// Initialize the exit handler with getAbsoluteLogPath function and Sentry cleanup
|
|
438
|
-
initializeExitHandler(getAbsoluteLogPath, log);
|
|
439
|
-
installGlobalExitHandlers();
|
|
422
|
+
// Initialize the exit handler with getAbsoluteLogPath function and Sentry cleanup
|
|
423
|
+
initializeExitHandler(getAbsoluteLogPath, log);
|
|
424
|
+
installGlobalExitHandlers();
|
|
440
425
|
|
|
441
|
-
// Unhandled error handlers are now managed by exit-handler.lib.mjs
|
|
426
|
+
// Unhandled error handlers are now managed by exit-handler.lib.mjs
|
|
442
427
|
|
|
443
|
-
// Validate GitHub URL requirement
|
|
444
|
-
if (!githubUrl) {
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
}
|
|
428
|
+
// Validate GitHub URL requirement
|
|
429
|
+
if (!githubUrl) {
|
|
430
|
+
await log('ā GitHub URL is required', { level: 'error' });
|
|
431
|
+
await log(' Usage: hive <github-url> [options]', { level: 'error' });
|
|
432
|
+
await log(` š Full log file: ${absoluteLogPath}`, { level: 'error' });
|
|
433
|
+
await safeExit(1, 'Error occurred');
|
|
434
|
+
}
|
|
450
435
|
|
|
451
|
-
// Validate project mode arguments
|
|
452
|
-
if (argv.projectMode) {
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
436
|
+
// Validate project mode arguments
|
|
437
|
+
if (argv.projectMode) {
|
|
438
|
+
if (!argv.projectNumber) {
|
|
439
|
+
await log('ā Project mode requires --project-number', { level: 'error' });
|
|
440
|
+
await log(' Usage: hive <github-url> --project-mode --project-number NUMBER --project-owner OWNER', {
|
|
441
|
+
level: 'error',
|
|
442
|
+
});
|
|
443
|
+
await safeExit(1, 'Error occurred');
|
|
444
|
+
}
|
|
458
445
|
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
446
|
+
if (!argv.projectOwner) {
|
|
447
|
+
await log('ā Project mode requires --project-owner', { level: 'error' });
|
|
448
|
+
await log(' Usage: hive <github-url> --project-mode --project-number NUMBER --project-owner OWNER', {
|
|
449
|
+
level: 'error',
|
|
450
|
+
});
|
|
451
|
+
await safeExit(1, 'Error occurred');
|
|
452
|
+
}
|
|
464
453
|
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
}
|
|
454
|
+
if (typeof argv.projectNumber !== 'number' || argv.projectNumber <= 0) {
|
|
455
|
+
await log('ā Project number must be a positive integer', { level: 'error' });
|
|
456
|
+
await safeExit(1, 'Error occurred');
|
|
457
|
+
}
|
|
458
|
+
}
|
|
470
459
|
|
|
471
|
-
// Validate model name EARLY - this always runs regardless of --skip-tool-connection-check
|
|
472
|
-
// Model validation is a simple string check and should always be performed
|
|
473
|
-
const tool = argv.tool || 'claude';
|
|
474
|
-
await validateAndExitOnInvalidModel(argv.model, tool, safeExit);
|
|
475
|
-
|
|
476
|
-
// Handle -s (--skip-issues-with-prs) and --auto-continue interaction
|
|
477
|
-
// Detect if user explicitly passed --auto-continue or --no-auto-continue
|
|
478
|
-
const hasExplicitAutoContinue = rawArgs.includes('--auto-continue');
|
|
479
|
-
const hasExplicitNoAutoContinue = rawArgs.includes('--no-auto-continue');
|
|
480
|
-
|
|
481
|
-
if (argv.skipIssuesWithPrs) {
|
|
482
|
-
// If user explicitly passed --auto-continue with -s, that's a conflict
|
|
483
|
-
if (hasExplicitAutoContinue) {
|
|
484
|
-
await log('ā Conflicting options: --skip-issues-with-prs and --auto-continue cannot be used together', { level: 'error' });
|
|
485
|
-
await log(' --skip-issues-with-prs: Skips issues that have any open PRs', { level: 'error' });
|
|
486
|
-
await log(' --auto-continue: Continues with existing PRs instead of creating new ones', { level: 'error' });
|
|
487
|
-
await log(` š Full log file: ${absoluteLogPath}`, { level: 'error' });
|
|
488
|
-
await safeExit(1, 'Error occurred');
|
|
489
|
-
}
|
|
460
|
+
// Validate model name EARLY - this always runs regardless of --skip-tool-connection-check
|
|
461
|
+
// Model validation is a simple string check and should always be performed
|
|
462
|
+
const tool = argv.tool || 'claude';
|
|
463
|
+
await validateAndExitOnInvalidModel(argv.model, tool, safeExit);
|
|
490
464
|
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
}
|
|
496
|
-
}
|
|
465
|
+
// Handle -s (--skip-issues-with-prs) and --auto-continue interaction
|
|
466
|
+
// Detect if user explicitly passed --auto-continue or --no-auto-continue
|
|
467
|
+
const hasExplicitAutoContinue = rawArgs.includes('--auto-continue');
|
|
468
|
+
const hasExplicitNoAutoContinue = rawArgs.includes('--no-auto-continue');
|
|
497
469
|
|
|
498
|
-
|
|
470
|
+
if (argv.skipIssuesWithPrs) {
|
|
471
|
+
// If user explicitly passed --auto-continue with -s, that's a conflict
|
|
472
|
+
if (hasExplicitAutoContinue) {
|
|
473
|
+
await log('ā Conflicting options: --skip-issues-with-prs and --auto-continue cannot be used together', {
|
|
474
|
+
level: 'error',
|
|
475
|
+
});
|
|
476
|
+
await log(' --skip-issues-with-prs: Skips issues that have any open PRs', { level: 'error' });
|
|
477
|
+
await log(' --auto-continue: Continues with existing PRs instead of creating new ones', { level: 'error' });
|
|
478
|
+
await log(` š Full log file: ${absoluteLogPath}`, { level: 'error' });
|
|
479
|
+
await safeExit(1, 'Error occurred');
|
|
480
|
+
}
|
|
499
481
|
|
|
500
|
-
//
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
await log('\nā Cannot proceed without valid GitHub authentication', { level: 'error' });
|
|
507
|
-
await safeExit(1, 'Error occurred');
|
|
508
|
-
}
|
|
509
|
-
}
|
|
482
|
+
// If user didn't explicitly set auto-continue, disable it when -s is used
|
|
483
|
+
// This is because -s means "skip issues with PRs" which conflicts with auto-continue
|
|
484
|
+
if (!hasExplicitNoAutoContinue) {
|
|
485
|
+
argv.autoContinue = false;
|
|
486
|
+
}
|
|
487
|
+
}
|
|
510
488
|
|
|
511
|
-
//
|
|
512
|
-
let youTrackConfig = null;
|
|
513
|
-
if (argv.youtrackMode) {
|
|
514
|
-
// Create YouTrack config from environment variables and CLI overrides
|
|
515
|
-
youTrackConfig = createYouTrackConfigFromEnv();
|
|
489
|
+
// Helper function to check GitHub permissions - moved to github.lib.mjs
|
|
516
490
|
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
491
|
+
// Check GitHub permissions early in the process (skip in dry-run mode or when explicitly requested)
|
|
492
|
+
if (argv.dryRun || argv.skipToolConnectionCheck || argv.toolConnectionCheck === false) {
|
|
493
|
+
await log('ā© Skipping GitHub permissions check (dry-run mode or skip-tool-connection-check enabled)', {
|
|
494
|
+
verbose: true,
|
|
495
|
+
});
|
|
496
|
+
} else {
|
|
497
|
+
const hasValidAuth = await checkGitHubPermissions();
|
|
498
|
+
if (!hasValidAuth) {
|
|
499
|
+
await log('\nā Cannot proceed without valid GitHub authentication', { level: 'error' });
|
|
500
|
+
await safeExit(1, 'Error occurred');
|
|
501
|
+
}
|
|
502
|
+
}
|
|
523
503
|
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
youTrackConfig.projectCode = argv.youtrackProject;
|
|
530
|
-
}
|
|
504
|
+
// YouTrack configuration and validation
|
|
505
|
+
let youTrackConfig = null;
|
|
506
|
+
if (argv.youtrackMode) {
|
|
507
|
+
// Create YouTrack config from environment variables and CLI overrides
|
|
508
|
+
youTrackConfig = createYouTrackConfigFromEnv();
|
|
531
509
|
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
510
|
+
if (!youTrackConfig) {
|
|
511
|
+
await log('ā YouTrack mode requires environment variables to be set', { level: 'error' });
|
|
512
|
+
await log(' Required: YOUTRACK_URL, YOUTRACK_API_KEY, YOUTRACK_PROJECT_CODE, YOUTRACK_STAGE', {
|
|
513
|
+
level: 'error',
|
|
514
|
+
});
|
|
515
|
+
await log(' Example: YOUTRACK_URL=https://mycompany.youtrack.cloud', { level: 'error' });
|
|
516
|
+
process.exit(1);
|
|
517
|
+
}
|
|
539
518
|
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
}
|
|
519
|
+
// Apply CLI overrides
|
|
520
|
+
if (argv.youtrackStage) {
|
|
521
|
+
youTrackConfig.stage = argv.youtrackStage;
|
|
522
|
+
}
|
|
523
|
+
if (argv.youtrackProject) {
|
|
524
|
+
youTrackConfig.projectCode = argv.youtrackProject;
|
|
525
|
+
}
|
|
547
526
|
|
|
548
|
-
//
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
if (githubUrl && urlMatch === null) {
|
|
556
|
-
// This should never happen - it means our early validation was skipped incorrectly
|
|
557
|
-
await log('Internal error: URL validation was not performed correctly', { level: 'error' });
|
|
558
|
-
await log('This is a bug in the script logic', { level: 'error' });
|
|
559
|
-
await safeExit(1, 'Error occurred');
|
|
560
|
-
}
|
|
527
|
+
// Validate configuration
|
|
528
|
+
try {
|
|
529
|
+
validateYouTrackConfig(youTrackConfig);
|
|
530
|
+
} catch (error) {
|
|
531
|
+
await log(`ā YouTrack configuration error: ${error.message}`, { level: 'error' });
|
|
532
|
+
process.exit(1);
|
|
533
|
+
}
|
|
561
534
|
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
}
|
|
535
|
+
// Test YouTrack connection
|
|
536
|
+
const youTrackConnected = await testYouTrackConnection(youTrackConfig);
|
|
537
|
+
if (!youTrackConnected) {
|
|
538
|
+
await log('\nā Cannot proceed without valid YouTrack connection', { level: 'error' });
|
|
539
|
+
process.exit(1);
|
|
540
|
+
}
|
|
541
|
+
}
|
|
566
542
|
|
|
567
|
-
//
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
} catch (e) {
|
|
580
|
-
reportError(e, {
|
|
581
|
-
context: 'detect_scope',
|
|
582
|
-
owner,
|
|
583
|
-
operation: 'detect_account_type'
|
|
584
|
-
});
|
|
585
|
-
// Default to user if API call fails
|
|
586
|
-
scope = 'user';
|
|
543
|
+
// Parse GitHub URL to determine organization, repository, or user
|
|
544
|
+
let scope = 'repository';
|
|
545
|
+
let owner = null;
|
|
546
|
+
let repo = null;
|
|
547
|
+
|
|
548
|
+
// NO DUPLICATE VALIDATION! URL was already validated at the beginning.
|
|
549
|
+
// If we have a URL but no validation results, that's a logic error.
|
|
550
|
+
if (githubUrl && urlMatch === null) {
|
|
551
|
+
// This should never happen - it means our early validation was skipped incorrectly
|
|
552
|
+
await log('Internal error: URL validation was not performed correctly', { level: 'error' });
|
|
553
|
+
await log('This is a bug in the script logic', { level: 'error' });
|
|
554
|
+
await safeExit(1, 'Error occurred');
|
|
587
555
|
}
|
|
588
|
-
}
|
|
589
|
-
} else {
|
|
590
|
-
scope = 'repository';
|
|
591
|
-
}
|
|
592
556
|
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
await log(` š Stage: "${youTrackConfig.stage}"`);
|
|
598
|
-
await log(` š GitHub Target: ${scope.charAt(0).toUpperCase() + scope.slice(1)} - ${owner}${repo ? `/${repo}` : ''}`);
|
|
599
|
-
} else {
|
|
600
|
-
await log(` š Target: ${scope.charAt(0).toUpperCase() + scope.slice(1)} - ${owner}${repo ? `/${repo}` : ''}`);
|
|
601
|
-
if (argv.projectMode) {
|
|
602
|
-
await log(` š Mode: PROJECT #${argv.projectNumber} (owner: ${argv.projectOwner})`);
|
|
603
|
-
await log(` š Status: "${argv.projectStatus}"`);
|
|
604
|
-
} else if (argv.allIssues) {
|
|
605
|
-
await log(' š·ļø Mode: ALL ISSUES (no label filter)');
|
|
606
|
-
} else {
|
|
607
|
-
await log(` š·ļø Tag: "${argv.monitorTag}"`);
|
|
608
|
-
}
|
|
609
|
-
}
|
|
610
|
-
if (argv.skipIssuesWithPrs) {
|
|
611
|
-
await log(' š« Skipping: Issues with open PRs');
|
|
612
|
-
}
|
|
613
|
-
await log(` š Concurrency: ${argv.concurrency} parallel workers`);
|
|
614
|
-
await log(` š Pull Requests per Issue: ${argv.pullRequestsPerIssue}`);
|
|
615
|
-
await log(` š¤ Model: ${argv.model}`);
|
|
616
|
-
if (argv.fork) {
|
|
617
|
-
await log(' š“ Fork: ENABLED (will fork repos if no write access)');
|
|
618
|
-
}
|
|
619
|
-
if (argv.autoFork) {
|
|
620
|
-
await log(' š“ Auto-Fork: ENABLED (will auto-fork public repos without write access)');
|
|
621
|
-
}
|
|
622
|
-
if (argv.autoContinue) {
|
|
623
|
-
await log(' š Auto-Continue: ENABLED (will work on issues with existing PRs)');
|
|
624
|
-
}
|
|
625
|
-
if (argv.watch) {
|
|
626
|
-
await log(' šļø Watch Mode: ENABLED (will monitor continuously for feedback)');
|
|
627
|
-
}
|
|
628
|
-
if (argv.targetBranch) {
|
|
629
|
-
await log(` šÆ Target Branch: ${argv.targetBranch}`);
|
|
630
|
-
}
|
|
631
|
-
if (!argv.once) {
|
|
632
|
-
await log(` ā±ļø Polling Interval: ${argv.interval} seconds`);
|
|
633
|
-
}
|
|
634
|
-
await log(` ${argv.once ? 'š Mode: Single run' : 'ā¾ļø Mode: Continuous monitoring'}`);
|
|
635
|
-
if (argv.maxIssues > 0) {
|
|
636
|
-
await log(` š¢ Max Issues: ${argv.maxIssues}`);
|
|
637
|
-
}
|
|
638
|
-
if (argv.dryRun) await log(' š§Ŗ DRY RUN MODE - No actual processing');
|
|
639
|
-
if (argv.autoCleanup) await log(' š§¹ Auto-cleanup: ENABLED (will clean /tmp/* /var/tmp/* on success)');
|
|
640
|
-
if (argv.interactiveMode) await log(' š Interactive Mode: ENABLED');
|
|
641
|
-
await log('');
|
|
642
|
-
|
|
643
|
-
// Producer/Consumer Queue implementation
|
|
644
|
-
class IssueQueue {
|
|
645
|
-
constructor() {
|
|
646
|
-
this.queue = [];
|
|
647
|
-
this.processing = new Set();
|
|
648
|
-
this.completed = new Set();
|
|
649
|
-
this.failed = new Set();
|
|
650
|
-
this.workers = [];
|
|
651
|
-
this.isRunning = true;
|
|
652
|
-
}
|
|
557
|
+
if (urlMatch) {
|
|
558
|
+
owner = urlMatch[1];
|
|
559
|
+
repo = urlMatch[3] || null;
|
|
560
|
+
}
|
|
653
561
|
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
562
|
+
// Determine scope
|
|
563
|
+
if (!repo) {
|
|
564
|
+
// Check if it's an organization or user (skip in dry-run mode to avoid hanging)
|
|
565
|
+
if (argv.dryRun || argv.skipToolConnectionCheck || argv.toolConnectionCheck === false) {
|
|
566
|
+
// In dry-run mode, default to user to avoid GitHub API calls
|
|
567
|
+
scope = 'user';
|
|
568
|
+
await log(' ā¹ļø Assuming user scope (dry-run mode, skipping API detection)', { verbose: true });
|
|
569
|
+
} else {
|
|
570
|
+
try {
|
|
571
|
+
const typeResult = await $`gh api users/${owner} --jq .type`;
|
|
572
|
+
const accountType = typeResult.stdout.toString().trim();
|
|
573
|
+
scope = accountType === 'Organization' ? 'organization' : 'user';
|
|
574
|
+
} catch (e) {
|
|
575
|
+
reportError(e, {
|
|
576
|
+
context: 'detect_scope',
|
|
577
|
+
owner,
|
|
578
|
+
operation: 'detect_account_type',
|
|
579
|
+
});
|
|
580
|
+
// Default to user if API call fails
|
|
581
|
+
scope = 'user';
|
|
582
|
+
}
|
|
583
|
+
}
|
|
584
|
+
} else {
|
|
585
|
+
scope = 'repository';
|
|
660
586
|
}
|
|
661
|
-
this.queue.push(issueUrl);
|
|
662
|
-
return true;
|
|
663
|
-
}
|
|
664
587
|
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
588
|
+
await log('šÆ Monitoring Configuration:');
|
|
589
|
+
if (argv.youtrackMode) {
|
|
590
|
+
await log(` š Source: YouTrack - ${youTrackConfig.url}`);
|
|
591
|
+
await log(` š Project: ${youTrackConfig.projectCode}`);
|
|
592
|
+
await log(` š Stage: "${youTrackConfig.stage}"`);
|
|
593
|
+
await log(` š GitHub Target: ${scope.charAt(0).toUpperCase() + scope.slice(1)} - ${owner}${repo ? `/${repo}` : ''}`);
|
|
594
|
+
} else {
|
|
595
|
+
await log(` š Target: ${scope.charAt(0).toUpperCase() + scope.slice(1)} - ${owner}${repo ? `/${repo}` : ''}`);
|
|
596
|
+
if (argv.projectMode) {
|
|
597
|
+
await log(` š Mode: PROJECT #${argv.projectNumber} (owner: ${argv.projectOwner})`);
|
|
598
|
+
await log(` š Status: "${argv.projectStatus}"`);
|
|
599
|
+
} else if (argv.allIssues) {
|
|
600
|
+
await log(' š·ļø Mode: ALL ISSUES (no label filter)');
|
|
601
|
+
} else {
|
|
602
|
+
await log(` š·ļø Tag: "${argv.monitorTag}"`);
|
|
603
|
+
}
|
|
669
604
|
}
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
605
|
+
if (argv.skipIssuesWithPrs) {
|
|
606
|
+
await log(' š« Skipping: Issues with open PRs');
|
|
607
|
+
}
|
|
608
|
+
await log(` š Concurrency: ${argv.concurrency} parallel workers`);
|
|
609
|
+
await log(` š Pull Requests per Issue: ${argv.pullRequestsPerIssue}`);
|
|
610
|
+
await log(` š¤ Model: ${argv.model}`);
|
|
611
|
+
if (argv.fork) {
|
|
612
|
+
await log(' š“ Fork: ENABLED (will fork repos if no write access)');
|
|
613
|
+
}
|
|
614
|
+
if (argv.autoFork) {
|
|
615
|
+
await log(' š“ Auto-Fork: ENABLED (will auto-fork public repos without write access)');
|
|
616
|
+
}
|
|
617
|
+
if (argv.autoContinue) {
|
|
618
|
+
await log(' š Auto-Continue: ENABLED (will work on issues with existing PRs)');
|
|
619
|
+
}
|
|
620
|
+
if (argv.watch) {
|
|
621
|
+
await log(' šļø Watch Mode: ENABLED (will monitor continuously for feedback)');
|
|
622
|
+
}
|
|
623
|
+
if (argv.targetBranch) {
|
|
624
|
+
await log(` šÆ Target Branch: ${argv.targetBranch}`);
|
|
625
|
+
}
|
|
626
|
+
if (!argv.once) {
|
|
627
|
+
await log(` ā±ļø Polling Interval: ${argv.interval} seconds`);
|
|
628
|
+
}
|
|
629
|
+
await log(` ${argv.once ? 'š Mode: Single run' : 'ā¾ļø Mode: Continuous monitoring'}`);
|
|
630
|
+
if (argv.maxIssues > 0) {
|
|
631
|
+
await log(` š¢ Max Issues: ${argv.maxIssues}`);
|
|
632
|
+
}
|
|
633
|
+
if (argv.dryRun) await log(' š§Ŗ DRY RUN MODE - No actual processing');
|
|
634
|
+
if (argv.autoCleanup) await log(' š§¹ Auto-cleanup: ENABLED (will clean /tmp/* /var/tmp/* on success)');
|
|
635
|
+
if (argv.interactiveMode) await log(' š Interactive Mode: ENABLED');
|
|
636
|
+
await log('');
|
|
637
|
+
|
|
638
|
+
// Producer/Consumer Queue implementation
|
|
639
|
+
class IssueQueue {
|
|
640
|
+
constructor() {
|
|
641
|
+
this.queue = [];
|
|
642
|
+
this.processing = new Set();
|
|
643
|
+
this.completed = new Set();
|
|
644
|
+
this.failed = new Set();
|
|
645
|
+
this.workers = [];
|
|
646
|
+
this.isRunning = true;
|
|
647
|
+
}
|
|
674
648
|
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
649
|
+
// Add issue to queue if not already processed or in queue
|
|
650
|
+
enqueue(issueUrl) {
|
|
651
|
+
if (this.completed.has(issueUrl) || this.processing.has(issueUrl) || this.queue.includes(issueUrl)) {
|
|
652
|
+
return false;
|
|
653
|
+
}
|
|
654
|
+
this.queue.push(issueUrl);
|
|
655
|
+
return true;
|
|
656
|
+
}
|
|
680
657
|
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
658
|
+
// Get next issue from queue
|
|
659
|
+
dequeue() {
|
|
660
|
+
if (this.queue.length === 0) {
|
|
661
|
+
return null;
|
|
662
|
+
}
|
|
663
|
+
const issue = this.queue.shift();
|
|
664
|
+
this.processing.add(issue);
|
|
665
|
+
return issue;
|
|
666
|
+
}
|
|
686
667
|
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
completed: this.completed.size,
|
|
693
|
-
failed: this.failed.size,
|
|
694
|
-
processingIssues: Array.from(this.processing)
|
|
695
|
-
};
|
|
696
|
-
}
|
|
668
|
+
// Mark issue as completed
|
|
669
|
+
markCompleted(issueUrl) {
|
|
670
|
+
this.processing.delete(issueUrl);
|
|
671
|
+
this.completed.add(issueUrl);
|
|
672
|
+
}
|
|
697
673
|
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
}
|
|
674
|
+
// Mark issue as failed
|
|
675
|
+
markFailed(issueUrl) {
|
|
676
|
+
this.processing.delete(issueUrl);
|
|
677
|
+
this.failed.add(issueUrl);
|
|
678
|
+
}
|
|
703
679
|
|
|
704
|
-
//
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
while (issueQueue.isRunning) {
|
|
715
|
-
const issueUrl = issueQueue.dequeue();
|
|
716
|
-
|
|
717
|
-
if (!issueUrl) {
|
|
718
|
-
// No work available, wait a bit
|
|
719
|
-
await new Promise(resolve => setTimeout(resolve, 5000));
|
|
720
|
-
continue;
|
|
721
|
-
}
|
|
680
|
+
// Get queue statistics
|
|
681
|
+
getStats() {
|
|
682
|
+
return {
|
|
683
|
+
queued: this.queue.length,
|
|
684
|
+
processing: this.processing.size,
|
|
685
|
+
completed: this.completed.size,
|
|
686
|
+
failed: this.failed.size,
|
|
687
|
+
processingIssues: Array.from(this.processing),
|
|
688
|
+
};
|
|
689
|
+
}
|
|
722
690
|
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
let issueFailed = false;
|
|
727
|
-
|
|
728
|
-
// Process the issue multiple times if needed
|
|
729
|
-
for (let prNum = 1; prNum <= argv.pullRequestsPerIssue; prNum++) {
|
|
730
|
-
if (argv.pullRequestsPerIssue > 1) {
|
|
731
|
-
await log(` š Creating PR ${prNum}/${argv.pullRequestsPerIssue} for issue`);
|
|
691
|
+
// Stop all workers
|
|
692
|
+
stop() {
|
|
693
|
+
this.isRunning = false;
|
|
732
694
|
}
|
|
733
|
-
|
|
734
|
-
try {
|
|
735
|
-
// Execute solve command using spawn to enable real-time streaming while avoiding command-stream quoting issues
|
|
736
|
-
if (argv.dryRun) {
|
|
737
|
-
await log(` š§Ŗ [DRY RUN] Executing ${solveCommand} in dry-run mode for ${issueUrl}...`);
|
|
738
|
-
} else {
|
|
739
|
-
await log(` š Executing ${solveCommand} for ${issueUrl}...`);
|
|
740
|
-
}
|
|
695
|
+
}
|
|
741
696
|
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
const
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
const promptExploreSubAgentFlag = argv.promptExploreSubAgent ? ' --prompt-explore-sub-agent' : '';
|
|
760
|
-
const promptIssueReportingFlag = argv.promptIssueReporting ? ' --prompt-issue-reporting' : '';
|
|
761
|
-
const promptCaseStudiesFlag = argv.promptCaseStudies ? ' --prompt-case-studies' : '';
|
|
762
|
-
// Use spawn to get real-time streaming output while avoiding command-stream's automatic quote addition
|
|
763
|
-
const { spawn } = await import('child_process');
|
|
764
|
-
|
|
765
|
-
// Build arguments array to avoid shell parsing issues
|
|
766
|
-
const args = [issueUrl, '--model', argv.model];
|
|
767
|
-
if (argv.tool) {
|
|
768
|
-
args.push('--tool', argv.tool);
|
|
769
|
-
}
|
|
770
|
-
if (argv.fork) {
|
|
771
|
-
args.push('--fork');
|
|
772
|
-
}
|
|
773
|
-
if (argv.autoFork) {
|
|
774
|
-
args.push('--auto-fork');
|
|
775
|
-
}
|
|
776
|
-
if (argv.verbose) {
|
|
777
|
-
args.push('--verbose');
|
|
778
|
-
}
|
|
779
|
-
if (argv.attachLogs) {
|
|
780
|
-
args.push('--attach-logs');
|
|
781
|
-
}
|
|
782
|
-
if (argv.targetBranch) {
|
|
783
|
-
args.push('--target-branch', argv.targetBranch);
|
|
784
|
-
}
|
|
785
|
-
if (argv.logDir) {
|
|
786
|
-
args.push('--log-dir', argv.logDir);
|
|
787
|
-
}
|
|
788
|
-
if (argv.dryRun) {
|
|
789
|
-
args.push('--dry-run');
|
|
790
|
-
}
|
|
791
|
-
if (argv.skipToolConnectionCheck || argv.toolConnectionCheck === false) {
|
|
792
|
-
args.push('--skip-tool-connection-check');
|
|
793
|
-
}
|
|
794
|
-
if (argv.autoContinue) {
|
|
795
|
-
args.push('--auto-continue');
|
|
796
|
-
} else {
|
|
797
|
-
args.push('--no-auto-continue');
|
|
798
|
-
}
|
|
799
|
-
if (argv.think) {
|
|
800
|
-
args.push('--think', argv.think);
|
|
801
|
-
}
|
|
802
|
-
if (argv.promptPlanSubAgent) args.push('--prompt-plan-sub-agent');
|
|
803
|
-
if (!argv.sentry) {
|
|
804
|
-
args.push('--no-sentry');
|
|
697
|
+
// Create global queue instance
|
|
698
|
+
const issueQueue = new IssueQueue();
|
|
699
|
+
|
|
700
|
+
// Global shutdown state to prevent duplicate shutdown messages
|
|
701
|
+
let isShuttingDown = false;
|
|
702
|
+
|
|
703
|
+
// Worker function to process issues from queue
|
|
704
|
+
async function worker(workerId) {
|
|
705
|
+
await log(`š§ Worker ${workerId} started`, { verbose: true });
|
|
706
|
+
|
|
707
|
+
while (issueQueue.isRunning) {
|
|
708
|
+
const issueUrl = issueQueue.dequeue();
|
|
709
|
+
|
|
710
|
+
if (!issueUrl) {
|
|
711
|
+
// No work available, wait a bit
|
|
712
|
+
await new Promise(resolve => setTimeout(resolve, 5000));
|
|
713
|
+
continue;
|
|
805
714
|
}
|
|
806
|
-
if (argv.watch) args.push('--watch');
|
|
807
|
-
if (argv.prefixForkNameWithOwnerName) args.push('--prefix-fork-name-with-owner-name');
|
|
808
|
-
if (argv.interactiveMode) args.push('--interactive-mode');
|
|
809
|
-
if (argv.promptExploreSubAgent) args.push('--prompt-explore-sub-agent');
|
|
810
|
-
if (argv.promptIssueReporting) args.push('--prompt-issue-reporting');
|
|
811
|
-
if (argv.promptCaseStudies) args.push('--prompt-case-studies');
|
|
812
|
-
|
|
813
|
-
// Log the actual command being executed so users can investigate/reproduce
|
|
814
|
-
const command = `${solveCommand} "${issueUrl}" --model ${argv.model}${toolFlag}${forkFlag}${autoForkFlag}${verboseFlag}${attachLogsFlag}${targetBranchFlag}${logDirFlag}${dryRunFlag}${skipToolConnectionCheckFlag}${autoContinueFlag}${thinkFlag}${promptPlanSubAgentFlag}${noSentryFlag}${watchFlag}${prefixForkNameWithOwnerNameFlag}${interactiveModeFlag}${promptExploreSubAgentFlag}${promptIssueReportingFlag}${promptCaseStudiesFlag}`;
|
|
815
|
-
await log(` š Command: ${command}`);
|
|
816
|
-
|
|
817
|
-
let exitCode = 0;
|
|
818
|
-
// Create promise to handle async spawn process
|
|
819
|
-
await new Promise((resolve) => {
|
|
820
|
-
const child = spawn(solveCommand, args, {
|
|
821
|
-
stdio: ['pipe', 'pipe', 'pipe'],
|
|
822
|
-
env: process.env
|
|
823
|
-
});
|
|
824
715
|
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
|
|
829
|
-
|
|
830
|
-
|
|
831
|
-
|
|
832
|
-
|
|
833
|
-
|
|
834
|
-
|
|
716
|
+
await log(`\nš· Worker ${workerId} processing: ${issueUrl}`);
|
|
717
|
+
|
|
718
|
+
// Track if this issue failed
|
|
719
|
+
let issueFailed = false;
|
|
720
|
+
|
|
721
|
+
// Process the issue multiple times if needed
|
|
722
|
+
for (let prNum = 1; prNum <= argv.pullRequestsPerIssue; prNum++) {
|
|
723
|
+
if (argv.pullRequestsPerIssue > 1) {
|
|
724
|
+
await log(` š Creating PR ${prNum}/${argv.pullRequestsPerIssue} for issue`);
|
|
725
|
+
}
|
|
726
|
+
|
|
727
|
+
try {
|
|
728
|
+
// Execute solve command using spawn to enable real-time streaming while avoiding command-stream quoting issues
|
|
729
|
+
if (argv.dryRun) {
|
|
730
|
+
await log(` š§Ŗ [DRY RUN] Executing ${solveCommand} in dry-run mode for ${issueUrl}...`);
|
|
731
|
+
} else {
|
|
732
|
+
await log(` š Executing ${solveCommand} for ${issueUrl}...`);
|
|
733
|
+
}
|
|
734
|
+
|
|
735
|
+
const startTime = Date.now();
|
|
736
|
+
const forkFlag = argv.fork ? ' --fork' : '';
|
|
737
|
+
const autoForkFlag = argv.autoFork ? ' --auto-fork' : '';
|
|
738
|
+
const verboseFlag = argv.verbose ? ' --verbose' : '';
|
|
739
|
+
const attachLogsFlag = argv.attachLogs ? ' --attach-logs' : '';
|
|
740
|
+
const targetBranchFlag = argv.targetBranch ? ` --target-branch ${argv.targetBranch}` : '';
|
|
741
|
+
const logDirFlag = argv.logDir ? ` --log-dir "${argv.logDir}"` : '';
|
|
742
|
+
const dryRunFlag = argv.dryRun ? ' --dry-run' : '';
|
|
743
|
+
const skipToolConnectionCheckFlag = argv.skipToolConnectionCheck || argv.toolConnectionCheck === false ? ' --skip-tool-connection-check' : '';
|
|
744
|
+
const toolFlag = argv.tool ? ` --tool ${argv.tool}` : '';
|
|
745
|
+
const autoContinueFlag = argv.autoContinue ? ' --auto-continue' : ' --no-auto-continue';
|
|
746
|
+
const thinkFlag = argv.think ? ` --think ${argv.think}` : '';
|
|
747
|
+
const promptPlanSubAgentFlag = argv.promptPlanSubAgent ? ' --prompt-plan-sub-agent' : '';
|
|
748
|
+
const noSentryFlag = !argv.sentry ? ' --no-sentry' : '';
|
|
749
|
+
const watchFlag = argv.watch ? ' --watch' : '';
|
|
750
|
+
const prefixForkNameWithOwnerNameFlag = argv.prefixForkNameWithOwnerName ? ' --prefix-fork-name-with-owner-name' : '';
|
|
751
|
+
const interactiveModeFlag = argv.interactiveMode ? ' --interactive-mode' : '';
|
|
752
|
+
const promptExploreSubAgentFlag = argv.promptExploreSubAgent ? ' --prompt-explore-sub-agent' : '';
|
|
753
|
+
const promptIssueReportingFlag = argv.promptIssueReporting ? ' --prompt-issue-reporting' : '';
|
|
754
|
+
const promptCaseStudiesFlag = argv.promptCaseStudies ? ' --prompt-case-studies' : '';
|
|
755
|
+
// Use spawn to get real-time streaming output while avoiding command-stream's automatic quote addition
|
|
756
|
+
const { spawn } = await import('child_process');
|
|
757
|
+
|
|
758
|
+
// Build arguments array to avoid shell parsing issues
|
|
759
|
+
const args = [issueUrl, '--model', argv.model];
|
|
760
|
+
if (argv.tool) {
|
|
761
|
+
args.push('--tool', argv.tool);
|
|
762
|
+
}
|
|
763
|
+
if (argv.fork) {
|
|
764
|
+
args.push('--fork');
|
|
765
|
+
}
|
|
766
|
+
if (argv.autoFork) {
|
|
767
|
+
args.push('--auto-fork');
|
|
768
|
+
}
|
|
769
|
+
if (argv.verbose) {
|
|
770
|
+
args.push('--verbose');
|
|
771
|
+
}
|
|
772
|
+
if (argv.attachLogs) {
|
|
773
|
+
args.push('--attach-logs');
|
|
774
|
+
}
|
|
775
|
+
if (argv.targetBranch) {
|
|
776
|
+
args.push('--target-branch', argv.targetBranch);
|
|
777
|
+
}
|
|
778
|
+
if (argv.logDir) {
|
|
779
|
+
args.push('--log-dir', argv.logDir);
|
|
780
|
+
}
|
|
781
|
+
if (argv.dryRun) {
|
|
782
|
+
args.push('--dry-run');
|
|
783
|
+
}
|
|
784
|
+
if (argv.skipToolConnectionCheck || argv.toolConnectionCheck === false) {
|
|
785
|
+
args.push('--skip-tool-connection-check');
|
|
786
|
+
}
|
|
787
|
+
if (argv.autoContinue) {
|
|
788
|
+
args.push('--auto-continue');
|
|
789
|
+
} else {
|
|
790
|
+
args.push('--no-auto-continue');
|
|
791
|
+
}
|
|
792
|
+
if (argv.think) {
|
|
793
|
+
args.push('--think', argv.think);
|
|
794
|
+
}
|
|
795
|
+
if (argv.promptPlanSubAgent) args.push('--prompt-plan-sub-agent');
|
|
796
|
+
if (!argv.sentry) {
|
|
797
|
+
args.push('--no-sentry');
|
|
798
|
+
}
|
|
799
|
+
if (argv.watch) args.push('--watch');
|
|
800
|
+
if (argv.prefixForkNameWithOwnerName) args.push('--prefix-fork-name-with-owner-name');
|
|
801
|
+
if (argv.interactiveMode) args.push('--interactive-mode');
|
|
802
|
+
if (argv.promptExploreSubAgent) args.push('--prompt-explore-sub-agent');
|
|
803
|
+
if (argv.promptIssueReporting) args.push('--prompt-issue-reporting');
|
|
804
|
+
if (argv.promptCaseStudies) args.push('--prompt-case-studies');
|
|
805
|
+
|
|
806
|
+
// Log the actual command being executed so users can investigate/reproduce
|
|
807
|
+
const command = `${solveCommand} "${issueUrl}" --model ${argv.model}${toolFlag}${forkFlag}${autoForkFlag}${verboseFlag}${attachLogsFlag}${targetBranchFlag}${logDirFlag}${dryRunFlag}${skipToolConnectionCheckFlag}${autoContinueFlag}${thinkFlag}${promptPlanSubAgentFlag}${noSentryFlag}${watchFlag}${prefixForkNameWithOwnerNameFlag}${interactiveModeFlag}${promptExploreSubAgentFlag}${promptIssueReportingFlag}${promptCaseStudiesFlag}`;
|
|
808
|
+
await log(` š Command: ${command}`);
|
|
809
|
+
|
|
810
|
+
let exitCode = 0;
|
|
811
|
+
// Create promise to handle async spawn process
|
|
812
|
+
await new Promise(resolve => {
|
|
813
|
+
const child = spawn(solveCommand, args, {
|
|
814
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
815
|
+
env: process.env,
|
|
816
|
+
});
|
|
817
|
+
|
|
818
|
+
// Handle stdout data - stream output in real-time
|
|
819
|
+
child.stdout.on('data', data => {
|
|
820
|
+
const lines = data.toString().split('\n');
|
|
821
|
+
for (const line of lines) {
|
|
822
|
+
if (line.trim()) {
|
|
823
|
+
log(` [${solveCommand} worker-${workerId}] ${line}`).catch(logError => {
|
|
824
|
+
reportError(logError, {
|
|
825
|
+
context: 'worker_stdout_log',
|
|
826
|
+
workerId,
|
|
827
|
+
operation: 'log_output',
|
|
828
|
+
});
|
|
835
829
|
});
|
|
836
|
-
}
|
|
830
|
+
}
|
|
837
831
|
}
|
|
838
|
-
}
|
|
839
|
-
});
|
|
832
|
+
});
|
|
840
833
|
|
|
841
|
-
|
|
842
|
-
|
|
843
|
-
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
|
|
847
|
-
|
|
848
|
-
|
|
849
|
-
|
|
850
|
-
|
|
834
|
+
// Handle stderr data - stream errors in real-time
|
|
835
|
+
child.stderr.on('data', data => {
|
|
836
|
+
const lines = data.toString().split('\n');
|
|
837
|
+
for (const line of lines) {
|
|
838
|
+
if (line.trim()) {
|
|
839
|
+
log(` [${solveCommand} worker-${workerId} ERROR] ${line}`, { level: 'error' }).catch(logError => {
|
|
840
|
+
reportError(logError, {
|
|
841
|
+
context: 'worker_stderr_log',
|
|
842
|
+
workerId,
|
|
843
|
+
operation: 'log_error',
|
|
844
|
+
});
|
|
851
845
|
});
|
|
852
|
-
}
|
|
846
|
+
}
|
|
853
847
|
}
|
|
854
|
-
}
|
|
855
|
-
|
|
856
|
-
|
|
857
|
-
|
|
858
|
-
|
|
859
|
-
|
|
860
|
-
|
|
861
|
-
|
|
862
|
-
|
|
863
|
-
|
|
864
|
-
|
|
865
|
-
|
|
866
|
-
|
|
867
|
-
|
|
868
|
-
|
|
869
|
-
|
|
870
|
-
|
|
848
|
+
});
|
|
849
|
+
|
|
850
|
+
// Handle process completion
|
|
851
|
+
child.on('close', code => {
|
|
852
|
+
exitCode = code || 0;
|
|
853
|
+
resolve();
|
|
854
|
+
});
|
|
855
|
+
|
|
856
|
+
// Handle process errors
|
|
857
|
+
child.on('error', error => {
|
|
858
|
+
exitCode = 1;
|
|
859
|
+
log(` [${solveCommand} worker-${workerId} ERROR] Process error: ${error.message}`, {
|
|
860
|
+
level: 'error',
|
|
861
|
+
}).catch(logError => {
|
|
862
|
+
reportError(logError, {
|
|
863
|
+
context: 'worker_process_error_log',
|
|
864
|
+
workerId,
|
|
865
|
+
operation: 'log_process_error',
|
|
866
|
+
});
|
|
871
867
|
});
|
|
868
|
+
resolve();
|
|
872
869
|
});
|
|
873
|
-
resolve();
|
|
874
870
|
});
|
|
875
|
-
});
|
|
876
|
-
|
|
877
|
-
const duration = Math.round((Date.now() - startTime) / 1000);
|
|
878
871
|
|
|
879
|
-
|
|
880
|
-
|
|
881
|
-
|
|
882
|
-
|
|
872
|
+
const duration = Math.round((Date.now() - startTime) / 1000);
|
|
873
|
+
|
|
874
|
+
if (exitCode === 0) {
|
|
875
|
+
await log(` ā
Worker ${workerId} completed ${issueUrl} (${duration}s)`);
|
|
876
|
+
} else {
|
|
877
|
+
throw new Error(`${solveCommand} exited with code ${exitCode}`);
|
|
878
|
+
}
|
|
879
|
+
|
|
880
|
+
// Small delay between multiple PRs for same issue
|
|
881
|
+
if (prNum < argv.pullRequestsPerIssue) {
|
|
882
|
+
await new Promise(resolve => setTimeout(resolve, 10000));
|
|
883
|
+
}
|
|
884
|
+
} catch (error) {
|
|
885
|
+
reportError(error, {
|
|
886
|
+
context: 'worker_process_issue',
|
|
887
|
+
workerId,
|
|
888
|
+
issueUrl,
|
|
889
|
+
operation: 'spawn_solve_worker',
|
|
890
|
+
});
|
|
891
|
+
await log(` ā Worker ${workerId} failed on ${issueUrl}: ${cleanErrorMessage(error)}`, {
|
|
892
|
+
level: 'error',
|
|
893
|
+
});
|
|
894
|
+
issueQueue.markFailed(issueUrl);
|
|
895
|
+
issueFailed = true;
|
|
896
|
+
break; // Stop trying more PRs for this issue
|
|
883
897
|
}
|
|
884
|
-
|
|
885
|
-
// Small delay between multiple PRs for same issue
|
|
886
|
-
if (prNum < argv.pullRequestsPerIssue) {
|
|
887
|
-
await new Promise(resolve => setTimeout(resolve, 10000));
|
|
888
898
|
}
|
|
889
|
-
} catch (error) {
|
|
890
|
-
reportError(error, {
|
|
891
|
-
context: 'worker_process_issue',
|
|
892
|
-
workerId,
|
|
893
|
-
issueUrl,
|
|
894
|
-
operation: 'spawn_solve_worker'
|
|
895
|
-
});
|
|
896
|
-
await log(` ā Worker ${workerId} failed on ${issueUrl}: ${cleanErrorMessage(error)}`, { level: 'error' });
|
|
897
|
-
issueQueue.markFailed(issueUrl);
|
|
898
|
-
issueFailed = true;
|
|
899
|
-
break; // Stop trying more PRs for this issue
|
|
900
|
-
}
|
|
901
|
-
}
|
|
902
|
-
|
|
903
|
-
// Only mark as completed if it didn't fail
|
|
904
|
-
if (!issueFailed) {
|
|
905
|
-
issueQueue.markCompleted(issueUrl);
|
|
906
|
-
}
|
|
907
899
|
|
|
908
|
-
|
|
909
|
-
|
|
910
|
-
|
|
911
|
-
|
|
900
|
+
// Only mark as completed if it didn't fail
|
|
901
|
+
if (!issueFailed) {
|
|
902
|
+
issueQueue.markCompleted(issueUrl);
|
|
903
|
+
}
|
|
912
904
|
|
|
913
|
-
|
|
914
|
-
|
|
915
|
-
|
|
916
|
-
|
|
917
|
-
await log(` - ${issueUrl}`);
|
|
918
|
-
}
|
|
919
|
-
}
|
|
920
|
-
}
|
|
921
|
-
|
|
922
|
-
await log(`š§ Worker ${workerId} stopped`, { verbose: true });
|
|
923
|
-
}
|
|
905
|
+
// Show queue stats
|
|
906
|
+
const stats = issueQueue.getStats();
|
|
907
|
+
await log(` š Queue: ${stats.queued} waiting, ${stats.processing} processing, ${stats.completed} completed, ${stats.failed} failed`);
|
|
908
|
+
await log(` š Hive log file: ${absoluteLogPath}`);
|
|
924
909
|
|
|
925
|
-
//
|
|
926
|
-
|
|
927
|
-
|
|
928
|
-
|
|
929
|
-
|
|
930
|
-
|
|
931
|
-
|
|
932
|
-
|
|
933
|
-
} else if (argv.projectMode) {
|
|
934
|
-
await log(`\nš Fetching issues from GitHub Project #${argv.projectNumber} (status: "${argv.projectStatus}")...`);
|
|
935
|
-
} else if (argv.allIssues) {
|
|
936
|
-
await log('\nš Fetching ALL open issues...');
|
|
937
|
-
} else {
|
|
938
|
-
await log(`\nš Fetching issues with label "${argv.monitorTag}"...`);
|
|
939
|
-
}
|
|
910
|
+
// Show which issues are currently being processed
|
|
911
|
+
if (stats.processingIssues && stats.processingIssues.length > 0) {
|
|
912
|
+
await log(' š§ Currently processing solve commands:');
|
|
913
|
+
for (const issueUrl of stats.processingIssues) {
|
|
914
|
+
await log(` - ${issueUrl}`);
|
|
915
|
+
}
|
|
916
|
+
}
|
|
917
|
+
}
|
|
940
918
|
|
|
941
|
-
|
|
942
|
-
|
|
943
|
-
await log(' š§Ŗ Dry-run mode: Skipping actual issue fetching');
|
|
944
|
-
return [];
|
|
945
|
-
}
|
|
919
|
+
await log(`š§ Worker ${workerId} stopped`, { verbose: true });
|
|
920
|
+
}
|
|
946
921
|
|
|
947
|
-
|
|
948
|
-
|
|
922
|
+
// Function to check if an issue has open pull requests
|
|
923
|
+
// Note: hasOpenPullRequests function has been replaced by batchCheckPullRequestsForIssues
|
|
924
|
+
// in github.lib.mjs for better performance and reduced API calls
|
|
949
925
|
|
|
950
|
-
|
|
951
|
-
|
|
952
|
-
if (
|
|
953
|
-
|
|
926
|
+
// Function to fetch issues from GitHub
|
|
927
|
+
async function fetchIssues() {
|
|
928
|
+
if (argv.youtrackMode) {
|
|
929
|
+
await log(`\nš Fetching issues from YouTrack project ${youTrackConfig.projectCode} (stage: "${youTrackConfig.stage}")...`);
|
|
930
|
+
} else if (argv.projectMode) {
|
|
931
|
+
await log(`\nš Fetching issues from GitHub Project #${argv.projectNumber} (status: "${argv.projectStatus}")...`);
|
|
932
|
+
} else if (argv.allIssues) {
|
|
933
|
+
await log('\nš Fetching ALL open issues...');
|
|
934
|
+
} else {
|
|
935
|
+
await log(`\nš Fetching issues with label "${argv.monitorTag}"...`);
|
|
954
936
|
}
|
|
955
937
|
|
|
956
|
-
|
|
938
|
+
// In dry-run mode, skip actual API calls and return empty list immediately
|
|
939
|
+
if (argv.dryRun) {
|
|
940
|
+
await log(' š§Ŗ Dry-run mode: Skipping actual issue fetching');
|
|
941
|
+
return [];
|
|
942
|
+
}
|
|
957
943
|
|
|
958
|
-
|
|
959
|
-
|
|
960
|
-
url: issue.html_url,
|
|
961
|
-
title: issue.title,
|
|
962
|
-
number: issue.number
|
|
963
|
-
}));
|
|
944
|
+
try {
|
|
945
|
+
let issues = [];
|
|
964
946
|
|
|
965
|
-
|
|
966
|
-
|
|
967
|
-
|
|
968
|
-
|
|
969
|
-
|
|
947
|
+
if (argv.youtrackMode) {
|
|
948
|
+
// Sync YouTrack issues to GitHub
|
|
949
|
+
if (!owner || !repo) {
|
|
950
|
+
throw new Error('YouTrack mode requires a specific repository URL (not organization/user)');
|
|
951
|
+
}
|
|
970
952
|
|
|
971
|
-
|
|
953
|
+
const githubIssues = await syncYouTrackToGitHub(youTrackConfig, owner, repo, $, log);
|
|
954
|
+
|
|
955
|
+
// Convert to format expected by hive
|
|
956
|
+
issues = formatIssuesForHive(githubIssues).map(issue => ({
|
|
957
|
+
url: issue.html_url,
|
|
958
|
+
title: issue.title,
|
|
959
|
+
number: issue.number,
|
|
960
|
+
}));
|
|
961
|
+
} else if (argv.projectMode) {
|
|
962
|
+
// Use GitHub Projects v2 mode
|
|
963
|
+
if (!argv.projectNumber || !argv.projectOwner) {
|
|
964
|
+
throw new Error('Project mode requires --project-number and --project-owner');
|
|
965
|
+
}
|
|
972
966
|
|
|
973
|
-
|
|
974
|
-
|
|
975
|
-
|
|
976
|
-
|
|
977
|
-
|
|
978
|
-
|
|
979
|
-
|
|
980
|
-
|
|
981
|
-
|
|
982
|
-
|
|
983
|
-
|
|
984
|
-
|
|
985
|
-
await log(' š Fetching all issues with pagination and rate limiting...');
|
|
986
|
-
await log(` š Command: ${searchCmd}`, { verbose: true });
|
|
967
|
+
issues = await fetchProjectIssues(argv.projectNumber, argv.projectOwner, argv.projectStatus);
|
|
968
|
+
} else if (argv.allIssues) {
|
|
969
|
+
// Fetch all open issues without label filter using pagination
|
|
970
|
+
let searchCmd;
|
|
971
|
+
if (scope === 'repository') {
|
|
972
|
+
searchCmd = `gh issue list --repo ${owner}/${repo} --state open --json url,title,number,createdAt`;
|
|
973
|
+
} else if (scope === 'organization') {
|
|
974
|
+
searchCmd = `gh search issues org:${owner} is:open --json url,title,number,createdAt,repository`;
|
|
975
|
+
} else {
|
|
976
|
+
// User scope
|
|
977
|
+
searchCmd = `gh search issues user:${owner} is:open --json url,title,number,createdAt,repository`;
|
|
978
|
+
}
|
|
987
979
|
|
|
988
|
-
|
|
989
|
-
|
|
990
|
-
} catch (searchError) {
|
|
991
|
-
reportError(searchError, {
|
|
992
|
-
context: 'github_all_issues_search',
|
|
993
|
-
scope,
|
|
994
|
-
owner,
|
|
995
|
-
operation: 'search_all_issues'
|
|
996
|
-
});
|
|
997
|
-
await log(` ā ļø Search failed: ${cleanErrorMessage(searchError)}`, { verbose: true });
|
|
980
|
+
await log(' š Fetching all issues with pagination and rate limiting...');
|
|
981
|
+
await log(` š Command: ${searchCmd}`, { verbose: true });
|
|
998
982
|
|
|
999
|
-
// Check if the error is due to rate limiting or search API limit and we're not in repository scope
|
|
1000
|
-
const errorMsg = searchError.message || searchError.toString();
|
|
1001
|
-
const isSearchLimitError = errorMsg.includes('Hit search API limit') || errorMsg.includes('repository-by-repository fallback');
|
|
1002
|
-
if ((isRateLimitError(searchError) || isSearchLimitError) && scope !== 'repository') {
|
|
1003
|
-
await log(' š Search limit detected - attempting repository fallback...');
|
|
1004
983
|
try {
|
|
1005
|
-
issues = await
|
|
1006
|
-
} catch (
|
|
1007
|
-
reportError(
|
|
1008
|
-
context: '
|
|
984
|
+
issues = await fetchAllIssuesWithPagination(searchCmd);
|
|
985
|
+
} catch (searchError) {
|
|
986
|
+
reportError(searchError, {
|
|
987
|
+
context: 'github_all_issues_search',
|
|
1009
988
|
scope,
|
|
1010
989
|
owner,
|
|
1011
|
-
operation: '
|
|
990
|
+
operation: 'search_all_issues',
|
|
1012
991
|
});
|
|
1013
|
-
await log(`
|
|
1014
|
-
|
|
992
|
+
await log(` ā ļø Search failed: ${cleanErrorMessage(searchError)}`, { verbose: true });
|
|
993
|
+
|
|
994
|
+
// Check if the error is due to rate limiting or search API limit and we're not in repository scope
|
|
995
|
+
const errorMsg = searchError.message || searchError.toString();
|
|
996
|
+
const isSearchLimitError = errorMsg.includes('Hit search API limit') || errorMsg.includes('repository-by-repository fallback');
|
|
997
|
+
if ((isRateLimitError(searchError) || isSearchLimitError) && scope !== 'repository') {
|
|
998
|
+
await log(' š Search limit detected - attempting repository fallback...');
|
|
999
|
+
try {
|
|
1000
|
+
issues = await fetchIssuesFromRepositories(owner, scope, null, true);
|
|
1001
|
+
} catch (fallbackError) {
|
|
1002
|
+
reportError(fallbackError, {
|
|
1003
|
+
context: 'github_all_issues_fallback',
|
|
1004
|
+
scope,
|
|
1005
|
+
owner,
|
|
1006
|
+
operation: 'fallback_all_fetch',
|
|
1007
|
+
});
|
|
1008
|
+
await log(` ā Repository fallback failed: ${cleanErrorMessage(fallbackError)}`, { verbose: true });
|
|
1009
|
+
issues = [];
|
|
1010
|
+
}
|
|
1011
|
+
} else {
|
|
1012
|
+
issues = [];
|
|
1013
|
+
}
|
|
1015
1014
|
}
|
|
1016
1015
|
} else {
|
|
1017
|
-
|
|
1018
|
-
|
|
1019
|
-
|
|
1020
|
-
|
|
1021
|
-
|
|
1022
|
-
|
|
1023
|
-
|
|
1024
|
-
|
|
1025
|
-
// For repositories, use gh issue list which works better with new repos
|
|
1026
|
-
if (scope === 'repository') {
|
|
1027
|
-
const listCmd = `gh issue list --repo ${owner}/${repo} --state open --label "${argv.monitorTag}" --json url,title,number,createdAt`;
|
|
1028
|
-
await log(' š Fetching labeled issues with pagination and rate limiting...');
|
|
1029
|
-
await log(` š Command: ${listCmd}`, { verbose: true });
|
|
1030
|
-
|
|
1031
|
-
try {
|
|
1032
|
-
issues = await fetchAllIssuesWithPagination(listCmd);
|
|
1033
|
-
} catch (listError) {
|
|
1034
|
-
reportError(listError, {
|
|
1035
|
-
context: 'github_list_issues',
|
|
1036
|
-
scope,
|
|
1037
|
-
owner,
|
|
1038
|
-
monitorTag: argv.monitorTag,
|
|
1039
|
-
operation: 'list_repository_issues'
|
|
1040
|
-
});
|
|
1041
|
-
await log(` ā ļø List failed: ${cleanErrorMessage(listError)}`, { verbose: true });
|
|
1042
|
-
issues = [];
|
|
1043
|
-
}
|
|
1044
|
-
} else {
|
|
1045
|
-
// For organizations and users, use search (may not work with new repos)
|
|
1046
|
-
let baseQuery;
|
|
1047
|
-
if (scope === 'organization') {
|
|
1048
|
-
baseQuery = `org:${owner} is:issue is:open`;
|
|
1049
|
-
} else {
|
|
1050
|
-
baseQuery = `user:${owner} is:issue is:open`;
|
|
1051
|
-
}
|
|
1052
|
-
|
|
1053
|
-
// Handle label with potential spaces
|
|
1054
|
-
let searchQuery;
|
|
1055
|
-
let searchCmd;
|
|
1056
|
-
|
|
1057
|
-
if (argv.monitorTag.includes(' ')) {
|
|
1058
|
-
searchQuery = `${baseQuery} label:"${argv.monitorTag}"`;
|
|
1059
|
-
searchCmd = `gh search issues '${searchQuery}' --json url,title,number,createdAt,repository`;
|
|
1060
|
-
} else {
|
|
1061
|
-
searchQuery = `${baseQuery} label:${argv.monitorTag}`;
|
|
1062
|
-
searchCmd = `gh search issues '${searchQuery}' --json url,title,number,createdAt,repository`;
|
|
1063
|
-
}
|
|
1064
|
-
|
|
1065
|
-
await log(' š Fetching labeled issues with pagination and rate limiting...');
|
|
1066
|
-
await log(` š Search query: ${searchQuery}`, { verbose: true });
|
|
1067
|
-
await log(` š Command: ${searchCmd}`, { verbose: true });
|
|
1068
|
-
|
|
1069
|
-
try {
|
|
1070
|
-
issues = await fetchAllIssuesWithPagination(searchCmd);
|
|
1071
|
-
} catch (searchError) {
|
|
1072
|
-
reportError(searchError, {
|
|
1073
|
-
context: 'github_labeled_issues_search',
|
|
1074
|
-
scope,
|
|
1075
|
-
owner,
|
|
1076
|
-
monitorTag: argv.monitorTag,
|
|
1077
|
-
operation: 'search_labeled_issues'
|
|
1078
|
-
});
|
|
1079
|
-
await log(` ā ļø Search failed: ${cleanErrorMessage(searchError)}`, { verbose: true });
|
|
1016
|
+
// Use label filter
|
|
1017
|
+
// execSync is used within fetchAllIssuesWithPagination
|
|
1018
|
+
|
|
1019
|
+
// For repositories, use gh issue list which works better with new repos
|
|
1020
|
+
if (scope === 'repository') {
|
|
1021
|
+
const listCmd = `gh issue list --repo ${owner}/${repo} --state open --label "${argv.monitorTag}" --json url,title,number,createdAt`;
|
|
1022
|
+
await log(' š Fetching labeled issues with pagination and rate limiting...');
|
|
1023
|
+
await log(` š Command: ${listCmd}`, { verbose: true });
|
|
1080
1024
|
|
|
1081
|
-
// Check if the error is due to rate limiting or search API limit
|
|
1082
|
-
const errorMsg = searchError.message || searchError.toString();
|
|
1083
|
-
const isSearchLimitError = errorMsg.includes('Hit search API limit') || errorMsg.includes('repository-by-repository fallback');
|
|
1084
|
-
if (isRateLimitError(searchError) || isSearchLimitError) {
|
|
1085
|
-
await log(' š Search limit detected - attempting repository fallback...');
|
|
1086
1025
|
try {
|
|
1087
|
-
issues = await
|
|
1088
|
-
} catch (
|
|
1089
|
-
reportError(
|
|
1090
|
-
context: '
|
|
1026
|
+
issues = await fetchAllIssuesWithPagination(listCmd);
|
|
1027
|
+
} catch (listError) {
|
|
1028
|
+
reportError(listError, {
|
|
1029
|
+
context: 'github_list_issues',
|
|
1091
1030
|
scope,
|
|
1092
1031
|
owner,
|
|
1093
1032
|
monitorTag: argv.monitorTag,
|
|
1094
|
-
operation: '
|
|
1033
|
+
operation: 'list_repository_issues',
|
|
1095
1034
|
});
|
|
1096
|
-
await log(`
|
|
1035
|
+
await log(` ā ļø List failed: ${cleanErrorMessage(listError)}`, { verbose: true });
|
|
1097
1036
|
issues = [];
|
|
1098
1037
|
}
|
|
1099
1038
|
} else {
|
|
1100
|
-
|
|
1039
|
+
// For organizations and users, use search (may not work with new repos)
|
|
1040
|
+
let baseQuery;
|
|
1041
|
+
if (scope === 'organization') {
|
|
1042
|
+
baseQuery = `org:${owner} is:issue is:open`;
|
|
1043
|
+
} else {
|
|
1044
|
+
baseQuery = `user:${owner} is:issue is:open`;
|
|
1045
|
+
}
|
|
1046
|
+
|
|
1047
|
+
// Handle label with potential spaces
|
|
1048
|
+
let searchQuery;
|
|
1049
|
+
let searchCmd;
|
|
1050
|
+
|
|
1051
|
+
if (argv.monitorTag.includes(' ')) {
|
|
1052
|
+
searchQuery = `${baseQuery} label:"${argv.monitorTag}"`;
|
|
1053
|
+
searchCmd = `gh search issues '${searchQuery}' --json url,title,number,createdAt,repository`;
|
|
1054
|
+
} else {
|
|
1055
|
+
searchQuery = `${baseQuery} label:${argv.monitorTag}`;
|
|
1056
|
+
searchCmd = `gh search issues '${searchQuery}' --json url,title,number,createdAt,repository`;
|
|
1057
|
+
}
|
|
1058
|
+
|
|
1059
|
+
await log(' š Fetching labeled issues with pagination and rate limiting...');
|
|
1060
|
+
await log(` š Search query: ${searchQuery}`, { verbose: true });
|
|
1061
|
+
await log(` š Command: ${searchCmd}`, { verbose: true });
|
|
1062
|
+
|
|
1063
|
+
try {
|
|
1064
|
+
issues = await fetchAllIssuesWithPagination(searchCmd);
|
|
1065
|
+
} catch (searchError) {
|
|
1066
|
+
reportError(searchError, {
|
|
1067
|
+
context: 'github_labeled_issues_search',
|
|
1068
|
+
scope,
|
|
1069
|
+
owner,
|
|
1070
|
+
monitorTag: argv.monitorTag,
|
|
1071
|
+
operation: 'search_labeled_issues',
|
|
1072
|
+
});
|
|
1073
|
+
await log(` ā ļø Search failed: ${cleanErrorMessage(searchError)}`, { verbose: true });
|
|
1074
|
+
|
|
1075
|
+
// Check if the error is due to rate limiting or search API limit
|
|
1076
|
+
const errorMsg = searchError.message || searchError.toString();
|
|
1077
|
+
const isSearchLimitError = errorMsg.includes('Hit search API limit') || errorMsg.includes('repository-by-repository fallback');
|
|
1078
|
+
if (isRateLimitError(searchError) || isSearchLimitError) {
|
|
1079
|
+
await log(' š Search limit detected - attempting repository fallback...');
|
|
1080
|
+
try {
|
|
1081
|
+
issues = await fetchIssuesFromRepositories(owner, scope, argv.monitorTag, false);
|
|
1082
|
+
} catch (fallbackError) {
|
|
1083
|
+
reportError(fallbackError, {
|
|
1084
|
+
context: 'github_labeled_issues_fallback',
|
|
1085
|
+
scope,
|
|
1086
|
+
owner,
|
|
1087
|
+
monitorTag: argv.monitorTag,
|
|
1088
|
+
operation: 'fallback_labeled_fetch',
|
|
1089
|
+
});
|
|
1090
|
+
await log(` ā Repository fallback failed: ${cleanErrorMessage(fallbackError)}`, { verbose: true });
|
|
1091
|
+
issues = [];
|
|
1092
|
+
}
|
|
1093
|
+
} else {
|
|
1094
|
+
issues = [];
|
|
1095
|
+
}
|
|
1096
|
+
}
|
|
1101
1097
|
}
|
|
1102
1098
|
}
|
|
1103
|
-
}
|
|
1104
|
-
}
|
|
1105
|
-
|
|
1106
|
-
if (issues.length === 0) {
|
|
1107
|
-
if (argv.youtrackMode) {
|
|
1108
|
-
await log(` ā¹ļø No issues found in YouTrack with stage "${youTrackConfig.stage}"`);
|
|
1109
|
-
} else if (argv.projectMode) {
|
|
1110
|
-
await log(` ā¹ļø No issues found in project with status "${argv.projectStatus}"`);
|
|
1111
|
-
} else if (argv.allIssues) {
|
|
1112
|
-
await log(' ā¹ļø No open issues found');
|
|
1113
|
-
} else {
|
|
1114
|
-
await log(` ā¹ļø No issues found with label "${argv.monitorTag}"`);
|
|
1115
|
-
}
|
|
1116
|
-
return [];
|
|
1117
|
-
}
|
|
1118
1099
|
|
|
1119
|
-
|
|
1120
|
-
|
|
1121
|
-
|
|
1122
|
-
|
|
1123
|
-
|
|
1124
|
-
|
|
1125
|
-
|
|
1126
|
-
|
|
1127
|
-
|
|
1100
|
+
if (issues.length === 0) {
|
|
1101
|
+
if (argv.youtrackMode) {
|
|
1102
|
+
await log(` ā¹ļø No issues found in YouTrack with stage "${youTrackConfig.stage}"`);
|
|
1103
|
+
} else if (argv.projectMode) {
|
|
1104
|
+
await log(` ā¹ļø No issues found in project with status "${argv.projectStatus}"`);
|
|
1105
|
+
} else if (argv.allIssues) {
|
|
1106
|
+
await log(' ā¹ļø No open issues found');
|
|
1107
|
+
} else {
|
|
1108
|
+
await log(` ā¹ļø No issues found with label "${argv.monitorTag}"`);
|
|
1109
|
+
}
|
|
1110
|
+
return [];
|
|
1111
|
+
}
|
|
1128
1112
|
|
|
1129
|
-
|
|
1130
|
-
|
|
1131
|
-
|
|
1132
|
-
|
|
1133
|
-
|
|
1134
|
-
|
|
1135
|
-
|
|
1136
|
-
|
|
1137
|
-
|
|
1138
|
-
}
|
|
1113
|
+
if (argv.youtrackMode) {
|
|
1114
|
+
await log(` š Found ${issues.length} YouTrack issue(s) with stage "${youTrackConfig.stage}"`);
|
|
1115
|
+
} else if (argv.projectMode) {
|
|
1116
|
+
await log(` š Found ${issues.length} issue(s) with status "${argv.projectStatus}"`);
|
|
1117
|
+
} else if (argv.allIssues) {
|
|
1118
|
+
await log(` š Found ${issues.length} open issue(s)`);
|
|
1119
|
+
} else {
|
|
1120
|
+
await log(` š Found ${issues.length} issue(s) with label "${argv.monitorTag}"`);
|
|
1121
|
+
}
|
|
1139
1122
|
|
|
1140
|
-
|
|
1141
|
-
|
|
1142
|
-
|
|
1143
|
-
|
|
1144
|
-
|
|
1145
|
-
|
|
1146
|
-
|
|
1147
|
-
|
|
1148
|
-
|
|
1149
|
-
// If repository info is not available, extract it from the issue URL
|
|
1150
|
-
if (!repoName || !repoOwner) {
|
|
1151
|
-
const urlMatch = issue.url?.match(/github\.com\/([^/]+)\/([^/]+)\/issues\/\d+/);
|
|
1152
|
-
if (urlMatch) {
|
|
1153
|
-
repoOwner = urlMatch[1];
|
|
1154
|
-
repoName = urlMatch[2];
|
|
1123
|
+
// Sort issues by publication date (createdAt) based on issue-order option
|
|
1124
|
+
if (issues.length > 0 && issues[0].createdAt) {
|
|
1125
|
+
await log(` š Sorting issues by publication date (${argv.issueOrder === 'asc' ? 'oldest first' : 'newest first'})...`);
|
|
1126
|
+
issues.sort((a, b) => {
|
|
1127
|
+
const dateA = new Date(a.createdAt);
|
|
1128
|
+
const dateB = new Date(b.createdAt);
|
|
1129
|
+
return argv.issueOrder === 'asc' ? dateA - dateB : dateB - dateA;
|
|
1130
|
+
});
|
|
1131
|
+
await log(' ā
Issues sorted by publication date');
|
|
1155
1132
|
}
|
|
1156
|
-
}
|
|
1157
1133
|
|
|
1158
|
-
|
|
1159
|
-
|
|
1134
|
+
// Filter out issues from archived repositories
|
|
1135
|
+
// This is critical because we cannot do write operations on archived repositories
|
|
1136
|
+
let issuesToProcess = issues;
|
|
1137
|
+
|
|
1138
|
+
// Helper function to extract repository info from issue (API response or URL)
|
|
1139
|
+
const getRepoInfo = issue => {
|
|
1140
|
+
let repoName = issue.repository?.name;
|
|
1141
|
+
let repoOwner = issue.repository?.owner?.login || issue.repository?.nameWithOwner?.split('/')[0];
|
|
1142
|
+
|
|
1143
|
+
// If repository info is not available, extract it from the issue URL
|
|
1144
|
+
if (!repoName || !repoOwner) {
|
|
1145
|
+
const urlMatch = issue.url?.match(/github\.com\/([^/]+)\/([^/]+)\/issues\/\d+/);
|
|
1146
|
+
if (urlMatch) {
|
|
1147
|
+
repoOwner = urlMatch[1];
|
|
1148
|
+
repoName = urlMatch[2];
|
|
1149
|
+
}
|
|
1150
|
+
}
|
|
1160
1151
|
|
|
1161
|
-
|
|
1162
|
-
|
|
1163
|
-
|
|
1164
|
-
|
|
1165
|
-
|
|
1166
|
-
|
|
1167
|
-
|
|
1168
|
-
|
|
1169
|
-
|
|
1170
|
-
|
|
1171
|
-
const
|
|
1172
|
-
|
|
1173
|
-
|
|
1152
|
+
return { repoOwner, repoName };
|
|
1153
|
+
};
|
|
1154
|
+
|
|
1155
|
+
// Only filter for organization/user scopes
|
|
1156
|
+
// For repository scope, we're already working on a specific repo
|
|
1157
|
+
if (scope !== 'repository' && issues.length > 0) {
|
|
1158
|
+
await log(' š Checking for archived repositories...');
|
|
1159
|
+
|
|
1160
|
+
// Extract unique repositories from issues
|
|
1161
|
+
const uniqueRepos = new Map();
|
|
1162
|
+
for (const issue of issues) {
|
|
1163
|
+
const { repoOwner, repoName } = getRepoInfo(issue);
|
|
1164
|
+
if (repoOwner && repoName) {
|
|
1165
|
+
const repoKey = `${repoOwner}/${repoName}`;
|
|
1166
|
+
if (!uniqueRepos.has(repoKey)) {
|
|
1167
|
+
uniqueRepos.set(repoKey, { owner: repoOwner, name: repoName });
|
|
1168
|
+
}
|
|
1169
|
+
}
|
|
1174
1170
|
}
|
|
1175
|
-
}
|
|
1176
|
-
}
|
|
1177
1171
|
|
|
1178
|
-
|
|
1179
|
-
|
|
1172
|
+
// Batch check archived status for all repositories
|
|
1173
|
+
const archivedStatusMap = await batchCheckArchivedRepositories(Array.from(uniqueRepos.values()));
|
|
1180
1174
|
|
|
1181
|
-
|
|
1182
|
-
|
|
1183
|
-
|
|
1175
|
+
// Filter out issues from archived repositories
|
|
1176
|
+
const filteredIssues = [];
|
|
1177
|
+
let archivedIssuesCount = 0;
|
|
1184
1178
|
|
|
1185
|
-
|
|
1186
|
-
|
|
1179
|
+
for (const issue of issues) {
|
|
1180
|
+
const { repoOwner, repoName } = getRepoInfo(issue);
|
|
1187
1181
|
|
|
1188
|
-
|
|
1189
|
-
|
|
1182
|
+
if (repoOwner && repoName) {
|
|
1183
|
+
const repoKey = `${repoOwner}/${repoName}`;
|
|
1190
1184
|
|
|
1191
|
-
|
|
1192
|
-
|
|
1193
|
-
|
|
1194
|
-
|
|
1195
|
-
|
|
1185
|
+
if (archivedStatusMap[repoKey] === true) {
|
|
1186
|
+
await log(` āļø Skipping (archived repository): ${issue.title || 'Untitled'} (${issue.url})`, {
|
|
1187
|
+
verbose: true,
|
|
1188
|
+
});
|
|
1189
|
+
archivedIssuesCount++;
|
|
1190
|
+
} else {
|
|
1191
|
+
filteredIssues.push(issue);
|
|
1192
|
+
}
|
|
1193
|
+
} else {
|
|
1194
|
+
// If we can't determine repository, include the issue to be safe
|
|
1195
|
+
await log(` ā ļø Could not determine repository for issue: ${issue.url}`, { verbose: true });
|
|
1196
|
+
filteredIssues.push(issue);
|
|
1197
|
+
}
|
|
1196
1198
|
}
|
|
1197
|
-
|
|
1198
|
-
|
|
1199
|
-
|
|
1200
|
-
|
|
1199
|
+
|
|
1200
|
+
if (archivedIssuesCount > 0) {
|
|
1201
|
+
await log(` āļø Skipped ${archivedIssuesCount} issue(s) from archived repositories`);
|
|
1202
|
+
}
|
|
1203
|
+
|
|
1204
|
+
issuesToProcess = filteredIssues;
|
|
1201
1205
|
}
|
|
1202
|
-
}
|
|
1203
1206
|
|
|
1204
|
-
|
|
1205
|
-
|
|
1206
|
-
|
|
1207
|
+
// Filter out issues with open PRs if option is enabled
|
|
1208
|
+
if (argv.skipIssuesWithPrs) {
|
|
1209
|
+
await log(' š Checking for existing pull requests using batch GraphQL query...');
|
|
1210
|
+
|
|
1211
|
+
// Extract issue numbers and repository info from URLs
|
|
1212
|
+
const issuesByRepo = {};
|
|
1213
|
+
for (const issue of issuesToProcess) {
|
|
1214
|
+
const urlMatch = issue.url.match(/github\.com\/([^/]+)\/([^/]+)\/issues\/(\d+)/);
|
|
1215
|
+
if (urlMatch) {
|
|
1216
|
+
const [, issueOwner, issueRepo, issueNumber] = urlMatch;
|
|
1217
|
+
const repoKey = `${issueOwner}/${issueRepo}`;
|
|
1218
|
+
|
|
1219
|
+
if (!issuesByRepo[repoKey]) {
|
|
1220
|
+
issuesByRepo[repoKey] = {
|
|
1221
|
+
owner: issueOwner,
|
|
1222
|
+
repo: issueRepo,
|
|
1223
|
+
issues: [],
|
|
1224
|
+
};
|
|
1225
|
+
}
|
|
1207
1226
|
|
|
1208
|
-
|
|
1209
|
-
|
|
1227
|
+
issuesByRepo[repoKey].issues.push({
|
|
1228
|
+
number: parseInt(issueNumber),
|
|
1229
|
+
issue: issue,
|
|
1230
|
+
});
|
|
1231
|
+
}
|
|
1232
|
+
}
|
|
1210
1233
|
|
|
1211
|
-
|
|
1212
|
-
|
|
1213
|
-
|
|
1214
|
-
|
|
1215
|
-
|
|
1216
|
-
|
|
1217
|
-
|
|
1218
|
-
|
|
1219
|
-
|
|
1220
|
-
|
|
1221
|
-
|
|
1222
|
-
|
|
1223
|
-
|
|
1224
|
-
|
|
1225
|
-
|
|
1226
|
-
|
|
1227
|
-
|
|
1228
|
-
}
|
|
1234
|
+
// Batch check PRs for each repository
|
|
1235
|
+
const filteredIssues = [];
|
|
1236
|
+
let totalSkipped = 0;
|
|
1237
|
+
|
|
1238
|
+
for (const repoData of Object.values(issuesByRepo)) {
|
|
1239
|
+
const issueNumbers = repoData.issues.map(i => i.number);
|
|
1240
|
+
const prResults = await batchCheckPullRequestsForIssues(repoData.owner, repoData.repo, issueNumbers);
|
|
1241
|
+
|
|
1242
|
+
// Process results
|
|
1243
|
+
for (const issueData of repoData.issues) {
|
|
1244
|
+
const prInfo = prResults[issueData.number];
|
|
1245
|
+
if (prInfo && prInfo.openPRCount > 0) {
|
|
1246
|
+
await log(` āļø Skipping (has ${prInfo.openPRCount} PR${prInfo.openPRCount > 1 ? 's' : ''}): ${issueData.issue.title || 'Untitled'} (${issueData.issue.url})`, { verbose: true });
|
|
1247
|
+
totalSkipped++;
|
|
1248
|
+
} else {
|
|
1249
|
+
filteredIssues.push(issueData.issue);
|
|
1250
|
+
}
|
|
1251
|
+
}
|
|
1229
1252
|
}
|
|
1230
1253
|
|
|
1231
|
-
|
|
1232
|
-
|
|
1233
|
-
|
|
1234
|
-
|
|
1254
|
+
if (totalSkipped > 0) {
|
|
1255
|
+
await log(` āļø Skipped ${totalSkipped} issue(s) with existing pull requests`);
|
|
1256
|
+
}
|
|
1257
|
+
issuesToProcess = filteredIssues;
|
|
1235
1258
|
}
|
|
1236
|
-
}
|
|
1237
1259
|
|
|
1238
|
-
|
|
1239
|
-
|
|
1240
|
-
|
|
1241
|
-
|
|
1242
|
-
|
|
1243
|
-
const issueNumbers = repoData.issues.map(i => i.number);
|
|
1244
|
-
const prResults = await batchCheckPullRequestsForIssues(repoData.owner, repoData.repo, issueNumbers);
|
|
1260
|
+
// Apply max issues limit if set (after filtering to exclude skipped issues from count)
|
|
1261
|
+
if (argv.maxIssues > 0 && issuesToProcess.length > argv.maxIssues) {
|
|
1262
|
+
issuesToProcess = issuesToProcess.slice(0, argv.maxIssues);
|
|
1263
|
+
await log(` š¢ Limiting to first ${argv.maxIssues} issues (after filtering)`);
|
|
1264
|
+
}
|
|
1245
1265
|
|
|
1246
|
-
//
|
|
1247
|
-
|
|
1248
|
-
|
|
1249
|
-
|
|
1250
|
-
await log(`
|
|
1251
|
-
totalSkipped++;
|
|
1252
|
-
} else {
|
|
1253
|
-
filteredIssues.push(issueData.issue);
|
|
1266
|
+
// In dry-run mode, show the issues that would be processed
|
|
1267
|
+
if (argv.dryRun && issuesToProcess.length > 0) {
|
|
1268
|
+
await log('\n š Issues that would be processed:');
|
|
1269
|
+
for (const issue of issuesToProcess) {
|
|
1270
|
+
await log(` - ${issue.title || 'Untitled'} (${issue.url})`);
|
|
1254
1271
|
}
|
|
1255
1272
|
}
|
|
1256
|
-
}
|
|
1257
1273
|
|
|
1258
|
-
|
|
1259
|
-
|
|
1260
|
-
|
|
1261
|
-
|
|
1262
|
-
|
|
1263
|
-
|
|
1264
|
-
|
|
1265
|
-
|
|
1266
|
-
|
|
1267
|
-
|
|
1268
|
-
|
|
1269
|
-
|
|
1270
|
-
// In dry-run mode, show the issues that would be processed
|
|
1271
|
-
if (argv.dryRun && issuesToProcess.length > 0) {
|
|
1272
|
-
await log('\n š Issues that would be processed:');
|
|
1273
|
-
for (const issue of issuesToProcess) {
|
|
1274
|
-
await log(` - ${issue.title || 'Untitled'} (${issue.url})`);
|
|
1274
|
+
return issuesToProcess.map(issue => issue.url);
|
|
1275
|
+
} catch (error) {
|
|
1276
|
+
reportError(error, {
|
|
1277
|
+
context: 'fetchIssues',
|
|
1278
|
+
projectMode: argv.projectMode,
|
|
1279
|
+
allIssues: argv.allIssues,
|
|
1280
|
+
monitorTag: argv.monitorTag,
|
|
1281
|
+
operation: 'fetch_issues',
|
|
1282
|
+
});
|
|
1283
|
+
await log(` ā Error fetching issues: ${cleanErrorMessage(error)}`, { level: 'error' });
|
|
1284
|
+
return [];
|
|
1275
1285
|
}
|
|
1276
1286
|
}
|
|
1277
|
-
|
|
1278
|
-
return issuesToProcess.map(issue => issue.url);
|
|
1279
|
-
|
|
1280
|
-
} catch (error) {
|
|
1281
|
-
reportError(error, {
|
|
1282
|
-
context: 'fetchIssues',
|
|
1283
|
-
projectMode: argv.projectMode,
|
|
1284
|
-
allIssues: argv.allIssues,
|
|
1285
|
-
monitorTag: argv.monitorTag,
|
|
1286
|
-
operation: 'fetch_issues'
|
|
1287
|
-
});
|
|
1288
|
-
await log(` ā Error fetching issues: ${cleanErrorMessage(error)}`, { level: 'error' });
|
|
1289
|
-
return [];
|
|
1290
|
-
}
|
|
1291
|
-
}
|
|
1292
1287
|
|
|
1293
|
-
// Main monitoring loop
|
|
1294
|
-
async function monitor() {
|
|
1295
|
-
|
|
1296
|
-
|
|
1297
|
-
|
|
1298
|
-
|
|
1299
|
-
|
|
1300
|
-
|
|
1301
|
-
}
|
|
1302
|
-
|
|
1303
|
-
// Main monitoring loop
|
|
1304
|
-
let iteration = 0;
|
|
1305
|
-
while (true) {
|
|
1306
|
-
iteration++;
|
|
1307
|
-
await log(`\nš Monitoring iteration ${iteration} at ${new Date().toISOString()}`);
|
|
1308
|
-
|
|
1309
|
-
// Fetch issues
|
|
1310
|
-
const issueUrls = await fetchIssues();
|
|
1311
|
-
|
|
1312
|
-
// Add new issues to queue
|
|
1313
|
-
let newIssues = 0;
|
|
1314
|
-
for (const url of issueUrls) {
|
|
1315
|
-
if (issueQueue.enqueue(url)) {
|
|
1316
|
-
newIssues++;
|
|
1317
|
-
await log(` ā Added to queue: ${url}`);
|
|
1318
|
-
}
|
|
1319
|
-
}
|
|
1320
|
-
|
|
1321
|
-
if (newIssues > 0) {
|
|
1322
|
-
await log(` š„ Added ${newIssues} new issue(s) to queue`);
|
|
1323
|
-
} else {
|
|
1324
|
-
await log(' ā¹ļø No new issues to add (all already processed or in queue)');
|
|
1325
|
-
}
|
|
1326
|
-
|
|
1327
|
-
// Show current stats
|
|
1328
|
-
const stats = issueQueue.getStats();
|
|
1329
|
-
await log('\nš Current Status:');
|
|
1330
|
-
await log(` š Queued: ${stats.queued}`);
|
|
1331
|
-
await log(` āļø Processing: ${stats.processing}`);
|
|
1332
|
-
await log(` ā
Completed: ${stats.completed}`);
|
|
1333
|
-
await log(` ā Failed: ${stats.failed}`);
|
|
1334
|
-
await log(` š Hive log file: ${absoluteLogPath}`);
|
|
1335
|
-
|
|
1336
|
-
// Show which issues are currently being processed
|
|
1337
|
-
if (stats.processingIssues && stats.processingIssues.length > 0) {
|
|
1338
|
-
await log(' š§ Currently processing solve commands:');
|
|
1339
|
-
for (const issueUrl of stats.processingIssues) {
|
|
1340
|
-
await log(` - ${issueUrl}`);
|
|
1288
|
+
// Main monitoring loop
|
|
1289
|
+
async function monitor() {
|
|
1290
|
+
await log('\nš Starting Hive Mind monitoring system...');
|
|
1291
|
+
|
|
1292
|
+
// Start workers
|
|
1293
|
+
await log(`\nš· Starting ${argv.concurrency} workers...`);
|
|
1294
|
+
for (let i = 1; i <= argv.concurrency; i++) {
|
|
1295
|
+
issueQueue.workers.push(worker(i));
|
|
1341
1296
|
}
|
|
1342
|
-
|
|
1343
|
-
|
|
1344
|
-
|
|
1345
|
-
|
|
1346
|
-
|
|
1347
|
-
|
|
1348
|
-
|
|
1349
|
-
|
|
1350
|
-
const
|
|
1351
|
-
|
|
1352
|
-
|
|
1297
|
+
|
|
1298
|
+
// Main monitoring loop
|
|
1299
|
+
let iteration = 0;
|
|
1300
|
+
while (true) {
|
|
1301
|
+
iteration++;
|
|
1302
|
+
await log(`\nš Monitoring iteration ${iteration} at ${new Date().toISOString()}`);
|
|
1303
|
+
|
|
1304
|
+
// Fetch issues
|
|
1305
|
+
const issueUrls = await fetchIssues();
|
|
1306
|
+
|
|
1307
|
+
// Add new issues to queue
|
|
1308
|
+
let newIssues = 0;
|
|
1309
|
+
for (const url of issueUrls) {
|
|
1310
|
+
if (issueQueue.enqueue(url)) {
|
|
1311
|
+
newIssues++;
|
|
1312
|
+
await log(` ā Added to queue: ${url}`);
|
|
1313
|
+
}
|
|
1314
|
+
}
|
|
1315
|
+
|
|
1316
|
+
if (newIssues > 0) {
|
|
1317
|
+
await log(` š„ Added ${newIssues} new issue(s) to queue`);
|
|
1318
|
+
} else {
|
|
1319
|
+
await log(' ā¹ļø No new issues to add (all already processed or in queue)');
|
|
1320
|
+
}
|
|
1321
|
+
|
|
1322
|
+
// Show current stats
|
|
1323
|
+
const stats = issueQueue.getStats();
|
|
1324
|
+
await log('\nš Current Status:');
|
|
1325
|
+
await log(` š Queued: ${stats.queued}`);
|
|
1326
|
+
await log(` āļø Processing: ${stats.processing}`);
|
|
1327
|
+
await log(` ā
Completed: ${stats.completed}`);
|
|
1328
|
+
await log(` ā Failed: ${stats.failed}`);
|
|
1329
|
+
await log(` š Hive log file: ${absoluteLogPath}`);
|
|
1330
|
+
|
|
1331
|
+
// Show which issues are currently being processed
|
|
1332
|
+
if (stats.processingIssues && stats.processingIssues.length > 0) {
|
|
1333
|
+
await log(' š§ Currently processing solve commands:');
|
|
1334
|
+
for (const issueUrl of stats.processingIssues) {
|
|
1335
|
+
await log(` - ${issueUrl}`);
|
|
1336
|
+
}
|
|
1337
|
+
}
|
|
1338
|
+
|
|
1339
|
+
// If running once, wait for queue to empty then exit
|
|
1340
|
+
if (argv.once) {
|
|
1341
|
+
await log('\nš Single run mode - waiting for queue to empty...');
|
|
1342
|
+
|
|
1343
|
+
while (stats.queued > 0 || stats.processing > 0) {
|
|
1344
|
+
await new Promise(resolve => setTimeout(resolve, 5000));
|
|
1345
|
+
const currentStats = issueQueue.getStats();
|
|
1346
|
+
if (currentStats.queued !== stats.queued || currentStats.processing !== stats.processing) {
|
|
1347
|
+
await log(` ā³ Waiting... Queue: ${currentStats.queued}, Processing: ${currentStats.processing}`);
|
|
1348
|
+
}
|
|
1349
|
+
Object.assign(stats, currentStats);
|
|
1350
|
+
}
|
|
1351
|
+
|
|
1352
|
+
await log('\nā
All issues processed!');
|
|
1353
|
+
await log(` Completed: ${stats.completed}`);
|
|
1354
|
+
await log(` Failed: ${stats.failed}`);
|
|
1355
|
+
await log(` š Full log file: ${absoluteLogPath}`);
|
|
1356
|
+
|
|
1357
|
+
// Perform cleanup if enabled and there were successful completions
|
|
1358
|
+
if (stats.completed > 0) {
|
|
1359
|
+
await cleanupTempDirectories(argv);
|
|
1360
|
+
}
|
|
1361
|
+
|
|
1362
|
+
// Stop workers before breaking to avoid hanging
|
|
1363
|
+
issueQueue.stop();
|
|
1364
|
+
break;
|
|
1353
1365
|
}
|
|
1354
|
-
|
|
1366
|
+
|
|
1367
|
+
// Wait for next iteration
|
|
1368
|
+
await log(`\nā° Next check in ${argv.interval} seconds...`);
|
|
1369
|
+
await new Promise(resolve => setTimeout(resolve, argv.interval * 1000));
|
|
1355
1370
|
}
|
|
1356
1371
|
|
|
1357
|
-
|
|
1358
|
-
|
|
1359
|
-
await
|
|
1360
|
-
await log(` š Full log file: ${absoluteLogPath}`);
|
|
1372
|
+
// Stop workers
|
|
1373
|
+
issueQueue.stop();
|
|
1374
|
+
await Promise.all(issueQueue.workers);
|
|
1361
1375
|
|
|
1362
1376
|
// Perform cleanup if enabled and there were successful completions
|
|
1363
|
-
|
|
1364
|
-
|
|
1377
|
+
const finalStats = issueQueue.getStats();
|
|
1378
|
+
if (finalStats.completed > 0) {
|
|
1379
|
+
await cleanupTempDirectories();
|
|
1365
1380
|
}
|
|
1366
1381
|
|
|
1367
|
-
|
|
1368
|
-
|
|
1369
|
-
break;
|
|
1382
|
+
await log('\nš Hive Mind monitoring stopped');
|
|
1383
|
+
await log(` š Full log file: ${absoluteLogPath}`);
|
|
1370
1384
|
}
|
|
1371
|
-
|
|
1372
|
-
// Wait for next iteration
|
|
1373
|
-
await log(`\nā° Next check in ${argv.interval} seconds...`);
|
|
1374
|
-
await new Promise(resolve => setTimeout(resolve, argv.interval * 1000));
|
|
1375
|
-
}
|
|
1376
|
-
|
|
1377
|
-
// Stop workers
|
|
1378
|
-
issueQueue.stop();
|
|
1379
|
-
await Promise.all(issueQueue.workers);
|
|
1380
|
-
|
|
1381
|
-
// Perform cleanup if enabled and there were successful completions
|
|
1382
|
-
const finalStats = issueQueue.getStats();
|
|
1383
|
-
if (finalStats.completed > 0) {
|
|
1384
|
-
await cleanupTempDirectories();
|
|
1385
|
-
}
|
|
1386
|
-
|
|
1387
|
-
await log('\nš Hive Mind monitoring stopped');
|
|
1388
|
-
await log(` š Full log file: ${absoluteLogPath}`);
|
|
1389
|
-
}
|
|
1390
1385
|
|
|
1391
|
-
// Graceful shutdown handler
|
|
1392
|
-
async function gracefulShutdown(signal) {
|
|
1393
|
-
|
|
1394
|
-
|
|
1395
|
-
|
|
1396
|
-
|
|
1386
|
+
// Graceful shutdown handler
|
|
1387
|
+
async function gracefulShutdown(signal) {
|
|
1388
|
+
if (isShuttingDown) {
|
|
1389
|
+
return; // Prevent duplicate shutdown messages
|
|
1390
|
+
}
|
|
1391
|
+
isShuttingDown = true;
|
|
1397
1392
|
|
|
1398
|
-
|
|
1399
|
-
|
|
1393
|
+
try {
|
|
1394
|
+
await log(`\n\nš Received ${signal} signal, shutting down gracefully...`);
|
|
1400
1395
|
|
|
1401
|
-
|
|
1402
|
-
|
|
1396
|
+
// Stop the queue and wait for workers to finish
|
|
1397
|
+
issueQueue.stop();
|
|
1403
1398
|
|
|
1404
|
-
|
|
1405
|
-
|
|
1406
|
-
|
|
1407
|
-
|
|
1399
|
+
// Give workers a moment to finish their current tasks
|
|
1400
|
+
const stats = issueQueue.getStats();
|
|
1401
|
+
if (stats.processing > 0) {
|
|
1402
|
+
await log(` ā³ Waiting for ${stats.processing} worker(s) to finish current tasks...`);
|
|
1408
1403
|
|
|
1409
|
-
|
|
1410
|
-
|
|
1411
|
-
|
|
1412
|
-
|
|
1413
|
-
|
|
1414
|
-
|
|
1415
|
-
|
|
1404
|
+
// Wait up to 10 seconds for workers to finish
|
|
1405
|
+
const maxWaitTime = 10000;
|
|
1406
|
+
const startTime = Date.now();
|
|
1407
|
+
while (issueQueue.getStats().processing > 0 && Date.now() - startTime < maxWaitTime) {
|
|
1408
|
+
await new Promise(resolve => setTimeout(resolve, 500));
|
|
1409
|
+
}
|
|
1410
|
+
}
|
|
1416
1411
|
|
|
1417
|
-
|
|
1412
|
+
await Promise.all(issueQueue.workers);
|
|
1418
1413
|
|
|
1419
|
-
|
|
1420
|
-
|
|
1421
|
-
|
|
1422
|
-
|
|
1423
|
-
|
|
1414
|
+
// Perform cleanup if enabled and there were successful completions
|
|
1415
|
+
const finalStats = issueQueue.getStats();
|
|
1416
|
+
if (finalStats.completed > 0) {
|
|
1417
|
+
await cleanupTempDirectories(argv);
|
|
1418
|
+
}
|
|
1424
1419
|
|
|
1425
|
-
|
|
1426
|
-
|
|
1420
|
+
await log(' ā
Shutdown complete');
|
|
1421
|
+
await log(` š Full log file: ${absoluteLogPath}`);
|
|
1422
|
+
} catch (error) {
|
|
1423
|
+
reportError(error, {
|
|
1424
|
+
context: 'monitor_issues_shutdown',
|
|
1425
|
+
operation: 'cleanup_and_exit',
|
|
1426
|
+
});
|
|
1427
|
+
await log(` ā ļø Error during shutdown: ${cleanErrorMessage(error)}`, { level: 'error' });
|
|
1428
|
+
await log(` š Full log file: ${absoluteLogPath}`);
|
|
1429
|
+
}
|
|
1427
1430
|
|
|
1428
|
-
|
|
1429
|
-
|
|
1430
|
-
context: 'monitor_issues_shutdown',
|
|
1431
|
-
operation: 'cleanup_and_exit'
|
|
1432
|
-
});
|
|
1433
|
-
await log(` ā ļø Error during shutdown: ${cleanErrorMessage(error)}`, { level: 'error' });
|
|
1434
|
-
await log(` š Full log file: ${absoluteLogPath}`);
|
|
1435
|
-
}
|
|
1431
|
+
await safeExit(0, 'Process completed');
|
|
1432
|
+
}
|
|
1436
1433
|
|
|
1437
|
-
|
|
1438
|
-
|
|
1434
|
+
// Function to validate Claude CLI connection
|
|
1435
|
+
// validateClaudeConnection is now imported from lib.mjs
|
|
1439
1436
|
|
|
1440
|
-
//
|
|
1441
|
-
|
|
1442
|
-
|
|
1443
|
-
// Handle graceful shutdown
|
|
1444
|
-
process.on('SIGINT', () => gracefulShutdown('interrupt'));
|
|
1445
|
-
process.on('SIGTERM', () => gracefulShutdown('termination'));
|
|
1446
|
-
|
|
1447
|
-
// Check system resources (disk space and RAM) before starting monitoring (skip in dry-run mode)
|
|
1448
|
-
if (argv.dryRun || argv.skipToolConnectionCheck || argv.toolConnectionCheck === false) {
|
|
1449
|
-
await log('ā© Skipping system resource check (dry-run mode or skip-tool-connection-check enabled)', { verbose: true });
|
|
1450
|
-
await log('ā© Skipping Claude CLI connection check (dry-run mode or skip-tool-connection-check enabled)', { verbose: true });
|
|
1451
|
-
} else {
|
|
1452
|
-
const systemCheck = await checkSystem(
|
|
1453
|
-
{
|
|
1454
|
-
minDiskSpaceMB: argv.minDiskSpace || 500,
|
|
1455
|
-
minMemoryMB: 256,
|
|
1456
|
-
exitOnFailure: true
|
|
1457
|
-
},
|
|
1458
|
-
{ log }
|
|
1459
|
-
);
|
|
1460
|
-
|
|
1461
|
-
if (!systemCheck.success) {
|
|
1462
|
-
await safeExit(1, 'Error occurred');
|
|
1463
|
-
}
|
|
1437
|
+
// Handle graceful shutdown
|
|
1438
|
+
process.on('SIGINT', () => gracefulShutdown('interrupt'));
|
|
1439
|
+
process.on('SIGTERM', () => gracefulShutdown('termination'));
|
|
1464
1440
|
|
|
1465
|
-
|
|
1466
|
-
|
|
1467
|
-
|
|
1468
|
-
|
|
1469
|
-
|
|
1470
|
-
|
|
1471
|
-
|
|
1441
|
+
// Check system resources (disk space and RAM) before starting monitoring (skip in dry-run mode)
|
|
1442
|
+
if (argv.dryRun || argv.skipToolConnectionCheck || argv.toolConnectionCheck === false) {
|
|
1443
|
+
await log('ā© Skipping system resource check (dry-run mode or skip-tool-connection-check enabled)', {
|
|
1444
|
+
verbose: true,
|
|
1445
|
+
});
|
|
1446
|
+
await log('ā© Skipping Claude CLI connection check (dry-run mode or skip-tool-connection-check enabled)', {
|
|
1447
|
+
verbose: true,
|
|
1448
|
+
});
|
|
1449
|
+
} else {
|
|
1450
|
+
const systemCheck = await checkSystem(
|
|
1451
|
+
{
|
|
1452
|
+
minDiskSpaceMB: argv.minDiskSpace || 500,
|
|
1453
|
+
minMemoryMB: 256,
|
|
1454
|
+
exitOnFailure: true,
|
|
1455
|
+
},
|
|
1456
|
+
{ log }
|
|
1457
|
+
);
|
|
1458
|
+
|
|
1459
|
+
if (!systemCheck.success) {
|
|
1460
|
+
await safeExit(1, 'Error occurred');
|
|
1461
|
+
}
|
|
1472
1462
|
|
|
1473
|
-
//
|
|
1474
|
-
const
|
|
1475
|
-
|
|
1476
|
-
|
|
1477
|
-
|
|
1478
|
-
|
|
1479
|
-
}
|
|
1480
|
-
reportError(error, {
|
|
1481
|
-
context: 'hive_main',
|
|
1482
|
-
operation: 'monitor_with_sentry'
|
|
1483
|
-
});
|
|
1484
|
-
await log(`\nā Fatal error: ${cleanErrorMessage(error)}`, { level: 'error' });
|
|
1485
|
-
await log(` š Full log file: ${absoluteLogPath}`, { level: 'error' });
|
|
1486
|
-
await safeExit(1, 'Error occurred');
|
|
1487
|
-
}
|
|
1463
|
+
// Validate Claude CLI connection before starting monitoring with the same model that will be used
|
|
1464
|
+
const isClaudeConnected = await validateClaudeConnection(argv.model);
|
|
1465
|
+
if (!isClaudeConnected) {
|
|
1466
|
+
await log('ā Cannot start monitoring without Claude CLI connection', { level: 'error' });
|
|
1467
|
+
await safeExit(1, 'Error occurred');
|
|
1468
|
+
}
|
|
1469
|
+
}
|
|
1488
1470
|
|
|
1489
|
-
|
|
1490
|
-
|
|
1491
|
-
|
|
1492
|
-
|
|
1493
|
-
|
|
1494
|
-
|
|
1495
|
-
|
|
1496
|
-
|
|
1471
|
+
// Wrap monitor function with Sentry error tracking
|
|
1472
|
+
const monitorWithSentry = !argv.sentry ? monitor : withSentry(monitor, 'hive.monitor', 'command');
|
|
1473
|
+
|
|
1474
|
+
// Start monitoring
|
|
1475
|
+
try {
|
|
1476
|
+
await monitorWithSentry();
|
|
1477
|
+
} catch (error) {
|
|
1478
|
+
reportError(error, {
|
|
1479
|
+
context: 'hive_main',
|
|
1480
|
+
operation: 'monitor_with_sentry',
|
|
1481
|
+
});
|
|
1482
|
+
await log(`\nā Fatal error: ${cleanErrorMessage(error)}`, { level: 'error' });
|
|
1483
|
+
await log(` š Full log file: ${absoluteLogPath}`, { level: 'error' });
|
|
1484
|
+
await safeExit(1, 'Error occurred');
|
|
1485
|
+
}
|
|
1486
|
+
} catch (fatalError) {
|
|
1487
|
+
// Handle any errors that occurred during initialization or execution
|
|
1488
|
+
// This prevents silent failures when the script hangs or crashes
|
|
1489
|
+
console.error('\nā Fatal error occurred during hive initialization or execution');
|
|
1490
|
+
console.error(` ${fatalError.message || fatalError}`);
|
|
1491
|
+
if (fatalError.stack) {
|
|
1492
|
+
console.error('\nStack trace:');
|
|
1493
|
+
console.error(fatalError.stack);
|
|
1494
|
+
}
|
|
1495
|
+
console.error('\nPlease report this issue at: https://github.com/link-assistant/hive-mind/issues');
|
|
1496
|
+
process.exit(1);
|
|
1497
1497
|
}
|
|
1498
|
-
|
|
1499
|
-
process.exit(1);
|
|
1500
|
-
}
|
|
1501
|
-
} // End of main execution block
|
|
1498
|
+
} // End of main execution block
|