@link-assistant/hive-mind 0.39.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +20 -0
- package/LICENSE +24 -0
- package/README.md +769 -0
- package/package.json +58 -0
- package/src/agent.lib.mjs +705 -0
- package/src/agent.prompts.lib.mjs +196 -0
- package/src/buildUserMention.lib.mjs +71 -0
- package/src/claude-limits.lib.mjs +389 -0
- package/src/claude.lib.mjs +1445 -0
- package/src/claude.prompts.lib.mjs +203 -0
- package/src/codex.lib.mjs +552 -0
- package/src/codex.prompts.lib.mjs +194 -0
- package/src/config.lib.mjs +207 -0
- package/src/contributing-guidelines.lib.mjs +268 -0
- package/src/exit-handler.lib.mjs +205 -0
- package/src/git.lib.mjs +145 -0
- package/src/github-issue-creator.lib.mjs +246 -0
- package/src/github-linking.lib.mjs +152 -0
- package/src/github.batch.lib.mjs +272 -0
- package/src/github.graphql.lib.mjs +258 -0
- package/src/github.lib.mjs +1479 -0
- package/src/hive.config.lib.mjs +254 -0
- package/src/hive.mjs +1500 -0
- package/src/instrument.mjs +191 -0
- package/src/interactive-mode.lib.mjs +1000 -0
- package/src/lenv-reader.lib.mjs +206 -0
- package/src/lib.mjs +490 -0
- package/src/lino.lib.mjs +176 -0
- package/src/local-ci-checks.lib.mjs +324 -0
- package/src/memory-check.mjs +419 -0
- package/src/model-mapping.lib.mjs +145 -0
- package/src/model-validation.lib.mjs +278 -0
- package/src/opencode.lib.mjs +479 -0
- package/src/opencode.prompts.lib.mjs +194 -0
- package/src/protect-branch.mjs +159 -0
- package/src/review.mjs +433 -0
- package/src/reviewers-hive.mjs +643 -0
- package/src/sentry.lib.mjs +284 -0
- package/src/solve.auto-continue.lib.mjs +568 -0
- package/src/solve.auto-pr.lib.mjs +1374 -0
- package/src/solve.branch-errors.lib.mjs +341 -0
- package/src/solve.branch.lib.mjs +230 -0
- package/src/solve.config.lib.mjs +342 -0
- package/src/solve.error-handlers.lib.mjs +256 -0
- package/src/solve.execution.lib.mjs +291 -0
- package/src/solve.feedback.lib.mjs +436 -0
- package/src/solve.mjs +1128 -0
- package/src/solve.preparation.lib.mjs +210 -0
- package/src/solve.repo-setup.lib.mjs +114 -0
- package/src/solve.repository.lib.mjs +961 -0
- package/src/solve.results.lib.mjs +558 -0
- package/src/solve.session.lib.mjs +135 -0
- package/src/solve.validation.lib.mjs +325 -0
- package/src/solve.watch.lib.mjs +572 -0
- package/src/start-screen.mjs +324 -0
- package/src/task.mjs +308 -0
- package/src/telegram-bot.mjs +1481 -0
- package/src/telegram-markdown.lib.mjs +64 -0
- package/src/usage-limit.lib.mjs +218 -0
- package/src/version.lib.mjs +41 -0
- package/src/youtrack/solve.youtrack.lib.mjs +116 -0
- package/src/youtrack/youtrack-sync.mjs +219 -0
- package/src/youtrack/youtrack.lib.mjs +425 -0
package/src/solve.mjs
ADDED
|
@@ -0,0 +1,1128 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
// Import Sentry instrumentation first (must be before other imports)
|
|
3
|
+
import './instrument.mjs';
|
|
4
|
+
// Early exit paths - handle these before loading all modules to speed up testing
|
|
5
|
+
const earlyArgs = process.argv.slice(2);
|
|
6
|
+
if (earlyArgs.includes('--version')) {
|
|
7
|
+
const { getVersion } = await import('./version.lib.mjs');
|
|
8
|
+
try {
|
|
9
|
+
const version = await getVersion();
|
|
10
|
+
console.log(version);
|
|
11
|
+
} catch {
|
|
12
|
+
console.error('Error: Unable to determine version');
|
|
13
|
+
process.exit(1);
|
|
14
|
+
}
|
|
15
|
+
process.exit(0);
|
|
16
|
+
}
|
|
17
|
+
if (earlyArgs.includes('--help') || earlyArgs.includes('-h')) {
|
|
18
|
+
// Load minimal modules needed for help
|
|
19
|
+
const { use } = eval(await (await fetch('https://unpkg.com/use-m/use.js')).text());
|
|
20
|
+
globalThis.use = use;
|
|
21
|
+
const config = await import('./solve.config.lib.mjs');
|
|
22
|
+
const { initializeConfig, createYargsConfig } = config;
|
|
23
|
+
const { yargs, hideBin } = await initializeConfig(use);
|
|
24
|
+
const rawArgs = hideBin(process.argv);
|
|
25
|
+
// Filter out help flags to avoid duplicate display
|
|
26
|
+
const argsWithoutHelp = rawArgs.filter(arg => arg !== '--help' && arg !== '-h');
|
|
27
|
+
createYargsConfig(yargs(argsWithoutHelp)).showHelp();
|
|
28
|
+
process.exit(0);
|
|
29
|
+
}
|
|
30
|
+
if (earlyArgs.length === 0) {
|
|
31
|
+
console.error('Usage: solve.mjs <issue-url> [options]');
|
|
32
|
+
console.error('\nError: Missing required github issue or pull request URL');
|
|
33
|
+
console.error('\nRun "solve.mjs --help" for more information');
|
|
34
|
+
process.exit(1);
|
|
35
|
+
}
|
|
36
|
+
// Now load all modules for normal operation
|
|
37
|
+
const { use } = eval(await (await fetch('https://unpkg.com/use-m/use.js')).text());
|
|
38
|
+
globalThis.use = use;
|
|
39
|
+
const { $ } = await use('command-stream');
|
|
40
|
+
const config = await import('./solve.config.lib.mjs');
|
|
41
|
+
const { initializeConfig, parseArguments } = config;
|
|
42
|
+
// Import Sentry integration
|
|
43
|
+
const sentryLib = await import('./sentry.lib.mjs');
|
|
44
|
+
const { initializeSentry, addBreadcrumb, reportError } = sentryLib;
|
|
45
|
+
const { yargs, hideBin } = await initializeConfig(use);
|
|
46
|
+
const path = (await use('path')).default;
|
|
47
|
+
const fs = (await use('fs')).promises;
|
|
48
|
+
const crypto = (await use('crypto')).default;
|
|
49
|
+
const memoryCheck = await import('./memory-check.mjs');
|
|
50
|
+
const lib = await import('./lib.mjs');
|
|
51
|
+
const { log, setLogFile, getLogFile, getAbsoluteLogPath, cleanErrorMessage, formatAligned, getVersionInfo } = lib;
|
|
52
|
+
const githubLib = await import('./github.lib.mjs');
|
|
53
|
+
const { sanitizeLogContent, attachLogToGitHub } = githubLib;
|
|
54
|
+
const validation = await import('./solve.validation.lib.mjs');
|
|
55
|
+
const { validateGitHubUrl, showAttachLogsWarning, initializeLogFile, validateUrlRequirement, validateContinueOnlyOnFeedback, performSystemChecks, parseUrlComponents } = validation;
|
|
56
|
+
const autoContinue = await import('./solve.auto-continue.lib.mjs');
|
|
57
|
+
const { processAutoContinueForIssue } = autoContinue;
|
|
58
|
+
const repository = await import('./solve.repository.lib.mjs');
|
|
59
|
+
const { setupTempDirectory, cleanupTempDirectory } = repository;
|
|
60
|
+
const results = await import('./solve.results.lib.mjs');
|
|
61
|
+
const { cleanupClaudeFile, showSessionSummary, verifyResults } = results;
|
|
62
|
+
const claudeLib = await import('./claude.lib.mjs');
|
|
63
|
+
const { executeClaude } = claudeLib;
|
|
64
|
+
|
|
65
|
+
const githubLinking = await import('./github-linking.lib.mjs');
|
|
66
|
+
const { extractLinkedIssueNumber } = githubLinking;
|
|
67
|
+
|
|
68
|
+
const errorHandlers = await import('./solve.error-handlers.lib.mjs');
|
|
69
|
+
const { createUncaughtExceptionHandler, createUnhandledRejectionHandler, handleMainExecutionError } = errorHandlers;
|
|
70
|
+
|
|
71
|
+
const watchLib = await import('./solve.watch.lib.mjs');
|
|
72
|
+
const { startWatchMode } = watchLib;
|
|
73
|
+
const exitHandler = await import('./exit-handler.lib.mjs');
|
|
74
|
+
const { initializeExitHandler, installGlobalExitHandlers, safeExit } = exitHandler;
|
|
75
|
+
const getResourceSnapshot = memoryCheck.getResourceSnapshot;
|
|
76
|
+
|
|
77
|
+
// Import new modular components
|
|
78
|
+
const autoPrLib = await import('./solve.auto-pr.lib.mjs');
|
|
79
|
+
const { handleAutoPrCreation } = autoPrLib;
|
|
80
|
+
const repoSetupLib = await import('./solve.repo-setup.lib.mjs');
|
|
81
|
+
const { setupRepositoryAndClone, verifyDefaultBranchAndStatus } = repoSetupLib;
|
|
82
|
+
const branchLib = await import('./solve.branch.lib.mjs');
|
|
83
|
+
const { createOrCheckoutBranch } = branchLib;
|
|
84
|
+
const sessionLib = await import('./solve.session.lib.mjs');
|
|
85
|
+
const { startWorkSession, endWorkSession } = sessionLib;
|
|
86
|
+
const preparationLib = await import('./solve.preparation.lib.mjs');
|
|
87
|
+
const { prepareFeedbackAndTimestamps, checkUncommittedChanges, checkForkActions } = preparationLib;
|
|
88
|
+
|
|
89
|
+
// Import model validation library
|
|
90
|
+
const modelValidation = await import('./model-validation.lib.mjs');
|
|
91
|
+
const { validateAndExitOnInvalidModel } = modelValidation;
|
|
92
|
+
|
|
93
|
+
// Initialize log file EARLY to capture all output including version and command
|
|
94
|
+
// Use default directory (cwd) initially, will be set from argv.logDir after parsing
|
|
95
|
+
const logFile = await initializeLogFile(null);
|
|
96
|
+
|
|
97
|
+
// Log version and raw command IMMEDIATELY after log file initialization
|
|
98
|
+
// This ensures they appear in both console and log file, even if argument parsing fails
|
|
99
|
+
const versionInfo = await getVersionInfo();
|
|
100
|
+
await log('');
|
|
101
|
+
await log(`🚀 solve v${versionInfo}`);
|
|
102
|
+
const rawCommand = process.argv.join(' ');
|
|
103
|
+
await log('🔧 Raw command executed:');
|
|
104
|
+
await log(` ${rawCommand}`);
|
|
105
|
+
await log('');
|
|
106
|
+
|
|
107
|
+
const argv = await parseArguments(yargs, hideBin);
|
|
108
|
+
global.verboseMode = argv.verbose;
|
|
109
|
+
|
|
110
|
+
// If user specified a custom log directory, we would need to move the log file
|
|
111
|
+
// However, this adds complexity, so we accept that early logs go to cwd
|
|
112
|
+
// The trade-off is: early logs in cwd vs missing version/command in error cases
|
|
113
|
+
|
|
114
|
+
// Conditionally import tool-specific functions after argv is parsed
|
|
115
|
+
let checkForUncommittedChanges;
|
|
116
|
+
if (argv.tool === 'opencode') {
|
|
117
|
+
const opencodeLib = await import('./opencode.lib.mjs');
|
|
118
|
+
checkForUncommittedChanges = opencodeLib.checkForUncommittedChanges;
|
|
119
|
+
} else if (argv.tool === 'codex') {
|
|
120
|
+
const codexLib = await import('./codex.lib.mjs');
|
|
121
|
+
checkForUncommittedChanges = codexLib.checkForUncommittedChanges;
|
|
122
|
+
} else if (argv.tool === 'agent') {
|
|
123
|
+
const agentLib = await import('./agent.lib.mjs');
|
|
124
|
+
checkForUncommittedChanges = agentLib.checkForUncommittedChanges;
|
|
125
|
+
} else {
|
|
126
|
+
checkForUncommittedChanges = claudeLib.checkForUncommittedChanges;
|
|
127
|
+
}
|
|
128
|
+
const shouldAttachLogs = argv.attachLogs || argv['attach-logs'];
|
|
129
|
+
await showAttachLogsWarning(shouldAttachLogs);
|
|
130
|
+
const absoluteLogPath = path.resolve(logFile);
|
|
131
|
+
// Initialize Sentry integration (unless disabled)
|
|
132
|
+
if (argv.sentry) {
|
|
133
|
+
await initializeSentry({
|
|
134
|
+
noSentry: !argv.sentry,
|
|
135
|
+
debug: argv.verbose,
|
|
136
|
+
version: process.env.npm_package_version || '0.12.0'
|
|
137
|
+
});
|
|
138
|
+
// Add breadcrumb for solve operation
|
|
139
|
+
addBreadcrumb({
|
|
140
|
+
category: 'solve',
|
|
141
|
+
message: 'Started solving issue',
|
|
142
|
+
level: 'info',
|
|
143
|
+
data: {
|
|
144
|
+
model: argv.model,
|
|
145
|
+
issueUrl: argv['issue-url'] || argv._?.[0] || 'not-set-yet'
|
|
146
|
+
}
|
|
147
|
+
});
|
|
148
|
+
}
|
|
149
|
+
// Create a cleanup wrapper that will be populated with context later
|
|
150
|
+
let cleanupContext = { tempDir: null, argv: null, limitReached: false };
|
|
151
|
+
const cleanupWrapper = async () => {
|
|
152
|
+
if (cleanupContext.tempDir && cleanupContext.argv) {
|
|
153
|
+
await cleanupTempDirectory(cleanupContext.tempDir, cleanupContext.argv, cleanupContext.limitReached);
|
|
154
|
+
}
|
|
155
|
+
};
|
|
156
|
+
// Initialize the exit handler with getAbsoluteLogPath function and cleanup wrapper
|
|
157
|
+
initializeExitHandler(getAbsoluteLogPath, log, cleanupWrapper);
|
|
158
|
+
installGlobalExitHandlers();
|
|
159
|
+
|
|
160
|
+
// Note: Version and raw command are logged BEFORE parseArguments() (see above)
|
|
161
|
+
// This ensures they appear even if strict validation fails
|
|
162
|
+
// Strict options validation is now handled by yargs .strict() mode in solve.config.lib.mjs
|
|
163
|
+
// This prevents unrecognized options from being silently ignored (issue #453, #482)
|
|
164
|
+
|
|
165
|
+
// Now handle argument validation that was moved from early checks
|
|
166
|
+
let issueUrl = argv['issue-url'] || argv._[0];
|
|
167
|
+
if (!issueUrl) {
|
|
168
|
+
await log('Usage: solve.mjs <issue-url> [options]', { level: 'error' });
|
|
169
|
+
await log('Error: Missing required github issue or pull request URL', { level: 'error' });
|
|
170
|
+
await log('Run "solve.mjs --help" for more information', { level: 'error' });
|
|
171
|
+
await safeExit(1, 'Missing required GitHub URL');
|
|
172
|
+
}
|
|
173
|
+
// Validate GitHub URL using validation module (more thorough check)
|
|
174
|
+
const urlValidation = validateGitHubUrl(issueUrl);
|
|
175
|
+
if (!urlValidation.isValid) {
|
|
176
|
+
await safeExit(1, 'Invalid GitHub URL');
|
|
177
|
+
}
|
|
178
|
+
const { isIssueUrl, isPrUrl, normalizedUrl } = urlValidation;
|
|
179
|
+
issueUrl = normalizedUrl || issueUrl;
|
|
180
|
+
// Setup unhandled error handlers to ensure log path is always shown
|
|
181
|
+
const errorHandlerOptions = {
|
|
182
|
+
log,
|
|
183
|
+
cleanErrorMessage,
|
|
184
|
+
absoluteLogPath,
|
|
185
|
+
shouldAttachLogs,
|
|
186
|
+
argv,
|
|
187
|
+
global,
|
|
188
|
+
owner: null, // Will be set later when parsed
|
|
189
|
+
repo: null, // Will be set later when parsed
|
|
190
|
+
getLogFile,
|
|
191
|
+
attachLogToGitHub,
|
|
192
|
+
sanitizeLogContent,
|
|
193
|
+
$
|
|
194
|
+
};
|
|
195
|
+
process.on('uncaughtException', createUncaughtExceptionHandler(errorHandlerOptions));
|
|
196
|
+
process.on('unhandledRejection', createUnhandledRejectionHandler(errorHandlerOptions));
|
|
197
|
+
// Validate GitHub URL requirement and options using validation module
|
|
198
|
+
if (!(await validateUrlRequirement(issueUrl))) {
|
|
199
|
+
await safeExit(1, 'URL requirement validation failed');
|
|
200
|
+
}
|
|
201
|
+
if (!(await validateContinueOnlyOnFeedback(argv, isPrUrl, isIssueUrl))) {
|
|
202
|
+
await safeExit(1, 'Feedback validation failed');
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
// Validate model name EARLY - this always runs regardless of --skip-tool-connection-check
|
|
206
|
+
// Model validation is a simple string check and should always be performed
|
|
207
|
+
const tool = argv.tool || 'claude';
|
|
208
|
+
await validateAndExitOnInvalidModel(argv.model, tool, safeExit);
|
|
209
|
+
|
|
210
|
+
// Perform all system checks using validation module
|
|
211
|
+
// Skip tool CONNECTION validation in dry-run mode or when --skip-tool-connection-check or --no-tool-connection-check is enabled
|
|
212
|
+
// Note: This does NOT skip model validation which is performed above
|
|
213
|
+
const skipToolConnectionCheck = argv.dryRun || argv.skipToolConnectionCheck || argv.toolConnectionCheck === false;
|
|
214
|
+
if (!(await performSystemChecks(argv.minDiskSpace || 500, skipToolConnectionCheck, argv.model, argv))) {
|
|
215
|
+
await safeExit(1, 'System checks failed');
|
|
216
|
+
}
|
|
217
|
+
// URL validation debug logging
|
|
218
|
+
if (argv.verbose) {
|
|
219
|
+
await log('📋 URL validation:', { verbose: true });
|
|
220
|
+
await log(` Input URL: ${issueUrl}`, { verbose: true });
|
|
221
|
+
await log(` Is Issue URL: ${!!isIssueUrl}`, { verbose: true });
|
|
222
|
+
await log(` Is PR URL: ${!!isPrUrl}`, { verbose: true });
|
|
223
|
+
}
|
|
224
|
+
const claudePath = process.env.CLAUDE_PATH || 'claude';
|
|
225
|
+
// Parse URL components using validation module
|
|
226
|
+
const { owner, repo, urlNumber } = parseUrlComponents(issueUrl);
|
|
227
|
+
// Store owner and repo globally for error handlers
|
|
228
|
+
global.owner = owner;
|
|
229
|
+
global.repo = repo;
|
|
230
|
+
|
|
231
|
+
// Handle --auto-fork option: automatically fork public repositories without write access
|
|
232
|
+
if (argv.autoFork && !argv.fork) {
|
|
233
|
+
const { detectRepositoryVisibility } = githubLib;
|
|
234
|
+
|
|
235
|
+
// Check if we have write access first
|
|
236
|
+
await log('🔍 Checking repository access for auto-fork...');
|
|
237
|
+
const permResult = await $`gh api repos/${owner}/${repo} --jq .permissions`;
|
|
238
|
+
|
|
239
|
+
if (permResult.code === 0) {
|
|
240
|
+
const permissions = JSON.parse(permResult.stdout.toString().trim());
|
|
241
|
+
const hasWriteAccess = permissions.push === true || permissions.admin === true || permissions.maintain === true;
|
|
242
|
+
|
|
243
|
+
if (!hasWriteAccess) {
|
|
244
|
+
// No write access - check if repository is public before enabling fork mode
|
|
245
|
+
const { isPublic } = await detectRepositoryVisibility(owner, repo);
|
|
246
|
+
|
|
247
|
+
if (!isPublic) {
|
|
248
|
+
// Private repository without write access - cannot fork
|
|
249
|
+
await log('');
|
|
250
|
+
await log('❌ --auto-fork failed: Repository is private and you don\'t have write access', { level: 'error' });
|
|
251
|
+
await log('');
|
|
252
|
+
await log(' 🔍 What happened:', { level: 'error' });
|
|
253
|
+
await log(` Repository ${owner}/${repo} is private`, { level: 'error' });
|
|
254
|
+
await log(' You don\'t have write access to this repository', { level: 'error' });
|
|
255
|
+
await log(' --auto-fork cannot create a fork of a private repository you cannot access', { level: 'error' });
|
|
256
|
+
await log('');
|
|
257
|
+
await log(' 💡 Solution:', { level: 'error' });
|
|
258
|
+
await log(' • Request collaborator access from the repository owner', { level: 'error' });
|
|
259
|
+
await log(` https://github.com/${owner}/${repo}/settings/access`, { level: 'error' });
|
|
260
|
+
await log('');
|
|
261
|
+
await safeExit(1, 'Auto-fork failed - private repository without access');
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
// Public repository without write access - automatically enable fork mode
|
|
265
|
+
await log('✅ Auto-fork: No write access detected, enabling fork mode');
|
|
266
|
+
argv.fork = true;
|
|
267
|
+
} else {
|
|
268
|
+
// Has write access - work directly on the repo (works for both public and private repos)
|
|
269
|
+
const { isPublic } = await detectRepositoryVisibility(owner, repo);
|
|
270
|
+
await log(`✅ Auto-fork: Write access detected to ${isPublic ? 'public' : 'private'} repository, working directly on repository`);
|
|
271
|
+
}
|
|
272
|
+
} else {
|
|
273
|
+
// Could not check permissions - assume no access and try to fork if public
|
|
274
|
+
const { isPublic } = await detectRepositoryVisibility(owner, repo);
|
|
275
|
+
|
|
276
|
+
if (!isPublic) {
|
|
277
|
+
// Cannot determine permissions for private repo - fail safely
|
|
278
|
+
await log('');
|
|
279
|
+
await log('❌ --auto-fork failed: Could not verify permissions for private repository', { level: 'error' });
|
|
280
|
+
await log('');
|
|
281
|
+
await log(' 🔍 What happened:', { level: 'error' });
|
|
282
|
+
await log(` Repository ${owner}/${repo} is private`, { level: 'error' });
|
|
283
|
+
await log(' Could not check your permissions to this repository', { level: 'error' });
|
|
284
|
+
await log('');
|
|
285
|
+
await log(' 💡 Solutions:', { level: 'error' });
|
|
286
|
+
await log(' • Check your GitHub CLI authentication: gh auth status', { level: 'error' });
|
|
287
|
+
await log(' • Request collaborator access if you don\'t have it yet', { level: 'error' });
|
|
288
|
+
await log(` https://github.com/${owner}/${repo}/settings/access`, { level: 'error' });
|
|
289
|
+
await log('');
|
|
290
|
+
await safeExit(1, 'Auto-fork failed - cannot verify private repository permissions');
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
// Public repository but couldn't check permissions - assume no access and fork
|
|
294
|
+
await log('⚠️ Auto-fork: Could not check permissions, enabling fork mode for public repository');
|
|
295
|
+
argv.fork = true;
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
// Early check: Verify repository write permissions BEFORE doing any work
|
|
300
|
+
// This prevents wasting AI tokens when user doesn't have access and --fork is not used
|
|
301
|
+
const { checkRepositoryWritePermission } = githubLib;
|
|
302
|
+
const hasWriteAccess = await checkRepositoryWritePermission(owner, repo, {
|
|
303
|
+
useFork: argv.fork,
|
|
304
|
+
issueUrl: issueUrl
|
|
305
|
+
});
|
|
306
|
+
|
|
307
|
+
if (!hasWriteAccess) {
|
|
308
|
+
await log('');
|
|
309
|
+
await log('❌ Cannot proceed without repository write access or --fork option', { level: 'error' });
|
|
310
|
+
await safeExit(1, 'Permission check failed');
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
// Detect repository visibility and set auto-cleanup default if not explicitly set
|
|
314
|
+
if (argv.autoCleanup === undefined) {
|
|
315
|
+
const { detectRepositoryVisibility } = githubLib;
|
|
316
|
+
const { isPublic } = await detectRepositoryVisibility(owner, repo);
|
|
317
|
+
// For public repos: keep temp directories (default false)
|
|
318
|
+
// For private repos: clean up temp directories (default true)
|
|
319
|
+
argv.autoCleanup = !isPublic;
|
|
320
|
+
if (argv.verbose) {
|
|
321
|
+
await log(` Auto-cleanup default: ${argv.autoCleanup} (repository is ${isPublic ? 'public' : 'private'})`, { verbose: true });
|
|
322
|
+
}
|
|
323
|
+
}
|
|
324
|
+
// Determine mode and get issue details
|
|
325
|
+
let issueNumber;
|
|
326
|
+
let prNumber;
|
|
327
|
+
let prBranch;
|
|
328
|
+
let mergeStateStatus;
|
|
329
|
+
let prState;
|
|
330
|
+
let forkOwner = null;
|
|
331
|
+
let isContinueMode = false;
|
|
332
|
+
// Auto-continue logic: check for existing PRs if --auto-continue is enabled
|
|
333
|
+
const autoContinueResult = await processAutoContinueForIssue(argv, isIssueUrl, urlNumber, owner, repo);
|
|
334
|
+
if (autoContinueResult.isContinueMode) {
|
|
335
|
+
isContinueMode = true;
|
|
336
|
+
prNumber = autoContinueResult.prNumber;
|
|
337
|
+
prBranch = autoContinueResult.prBranch;
|
|
338
|
+
issueNumber = autoContinueResult.issueNumber;
|
|
339
|
+
// Only check PR details if we have a PR number
|
|
340
|
+
if (prNumber) {
|
|
341
|
+
// Store PR info globally for error handlers
|
|
342
|
+
global.createdPR = { number: prNumber };
|
|
343
|
+
// Check if PR is from a fork and get fork owner, merge status, and PR state
|
|
344
|
+
if (argv.verbose) {
|
|
345
|
+
await log(' Checking if PR is from a fork...', { verbose: true });
|
|
346
|
+
}
|
|
347
|
+
try {
|
|
348
|
+
const prCheckResult = await $`gh pr view ${prNumber} --repo ${owner}/${repo} --json headRepositoryOwner,headRepository,mergeStateStatus,state`;
|
|
349
|
+
if (prCheckResult.code === 0) {
|
|
350
|
+
const prCheckData = JSON.parse(prCheckResult.stdout.toString());
|
|
351
|
+
// Extract merge status and PR state
|
|
352
|
+
mergeStateStatus = prCheckData.mergeStateStatus;
|
|
353
|
+
prState = prCheckData.state;
|
|
354
|
+
if (argv.verbose) {
|
|
355
|
+
await log(` PR state: ${prState || 'UNKNOWN'}`, { verbose: true });
|
|
356
|
+
await log(` Merge status: ${mergeStateStatus || 'UNKNOWN'}`, { verbose: true });
|
|
357
|
+
}
|
|
358
|
+
if (prCheckData.headRepositoryOwner && prCheckData.headRepositoryOwner.login !== owner) {
|
|
359
|
+
forkOwner = prCheckData.headRepositoryOwner.login;
|
|
360
|
+
// Get actual fork repository name (may be prefixed)
|
|
361
|
+
const forkRepoName = (prCheckData.headRepository && prCheckData.headRepository.name) ? prCheckData.headRepository.name : repo;
|
|
362
|
+
await log(`🍴 Detected fork PR from ${forkOwner}/${forkRepoName}`);
|
|
363
|
+
if (argv.verbose) {
|
|
364
|
+
await log(` Fork owner: ${forkOwner}`, { verbose: true });
|
|
365
|
+
await log(' Will clone fork repository for continue mode', { verbose: true });
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
// Check if maintainer can push to the fork when --allow-to-push-to-contributors-pull-requests-as-maintainer is enabled
|
|
369
|
+
if (argv.allowToPushToContributorsPullRequestsAsMaintainer && argv.autoFork) {
|
|
370
|
+
const { checkMaintainerCanModifyPR, requestMaintainerAccess } = githubLib;
|
|
371
|
+
const { canModify } = await checkMaintainerCanModifyPR(owner, repo, prNumber);
|
|
372
|
+
|
|
373
|
+
if (canModify) {
|
|
374
|
+
await log('✅ Maintainer can push to fork: Enabled by contributor');
|
|
375
|
+
await log(' Will push changes directly to contributor\'s fork instead of creating own fork');
|
|
376
|
+
// Don't disable fork mode, but we'll use the contributor's fork
|
|
377
|
+
} else {
|
|
378
|
+
await log('⚠️ Maintainer cannot push to fork: "Allow edits by maintainers" is not enabled', { level: 'warning' });
|
|
379
|
+
await log(' Posting comment to request access...', { level: 'warning' });
|
|
380
|
+
await requestMaintainerAccess(owner, repo, prNumber);
|
|
381
|
+
await log(' Comment posted. Proceeding with own fork instead.', { level: 'warning' });
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
}
|
|
385
|
+
}
|
|
386
|
+
} catch (forkCheckError) {
|
|
387
|
+
if (argv.verbose) {
|
|
388
|
+
await log(` Warning: Could not check fork status: ${forkCheckError.message}`, { verbose: true });
|
|
389
|
+
}
|
|
390
|
+
}
|
|
391
|
+
} else {
|
|
392
|
+
// We have a branch but no PR - we'll use the existing branch and create a PR later
|
|
393
|
+
await log(`🔄 Using existing branch: ${prBranch} (no PR yet - will create one)`);
|
|
394
|
+
if (argv.verbose) {
|
|
395
|
+
await log(' Branch will be checked out and PR will be created during auto-PR creation phase', { verbose: true });
|
|
396
|
+
}
|
|
397
|
+
}
|
|
398
|
+
} else if (isIssueUrl) {
|
|
399
|
+
issueNumber = autoContinueResult.issueNumber || urlNumber;
|
|
400
|
+
}
|
|
401
|
+
if (isPrUrl) {
|
|
402
|
+
isContinueMode = true;
|
|
403
|
+
prNumber = urlNumber;
|
|
404
|
+
// Store PR info globally for error handlers
|
|
405
|
+
global.createdPR = { number: prNumber, url: issueUrl };
|
|
406
|
+
await log(`🔄 Continue mode: Working with PR #${prNumber}`);
|
|
407
|
+
if (argv.verbose) {
|
|
408
|
+
await log(' Continue mode activated: PR URL provided directly', { verbose: true });
|
|
409
|
+
await log(` PR Number set to: ${prNumber}`, { verbose: true });
|
|
410
|
+
await log(' Will fetch PR details and linked issue', { verbose: true });
|
|
411
|
+
}
|
|
412
|
+
// Get PR details to find the linked issue and branch
|
|
413
|
+
try {
|
|
414
|
+
const prResult = await githubLib.ghPrView({
|
|
415
|
+
prNumber,
|
|
416
|
+
owner,
|
|
417
|
+
repo,
|
|
418
|
+
jsonFields: 'headRefName,body,number,mergeStateStatus,state,headRepositoryOwner,headRepository'
|
|
419
|
+
});
|
|
420
|
+
if (prResult.code !== 0 || !prResult.data) {
|
|
421
|
+
await log('Error: Failed to get PR details', { level: 'error' });
|
|
422
|
+
if (prResult.output.includes('Could not resolve to a PullRequest')) {
|
|
423
|
+
await githubLib.handlePRNotFoundError({ prNumber, owner, repo, argv, shouldAttachLogs });
|
|
424
|
+
} else {
|
|
425
|
+
await log(`Error: ${prResult.stderr || 'Unknown error'}`, { level: 'error' });
|
|
426
|
+
}
|
|
427
|
+
await safeExit(1, 'Failed to get PR details');
|
|
428
|
+
}
|
|
429
|
+
const prData = prResult.data;
|
|
430
|
+
prBranch = prData.headRefName;
|
|
431
|
+
mergeStateStatus = prData.mergeStateStatus;
|
|
432
|
+
prState = prData.state;
|
|
433
|
+
// Check if this is a fork PR
|
|
434
|
+
if (prData.headRepositoryOwner && prData.headRepositoryOwner.login !== owner) {
|
|
435
|
+
forkOwner = prData.headRepositoryOwner.login;
|
|
436
|
+
// Get actual fork repository name (may be prefixed)
|
|
437
|
+
const forkRepoName = (prData.headRepository && prData.headRepository.name) ? prData.headRepository.name : repo;
|
|
438
|
+
await log(`🍴 Detected fork PR from ${forkOwner}/${forkRepoName}`);
|
|
439
|
+
if (argv.verbose) {
|
|
440
|
+
await log(` Fork owner: ${forkOwner}`, { verbose: true });
|
|
441
|
+
await log(' Will clone fork repository for continue mode', { verbose: true });
|
|
442
|
+
}
|
|
443
|
+
|
|
444
|
+
// Check if maintainer can push to the fork when --allow-to-push-to-contributors-pull-requests-as-maintainer is enabled
|
|
445
|
+
if (argv.allowToPushToContributorsPullRequestsAsMaintainer && argv.autoFork) {
|
|
446
|
+
const { checkMaintainerCanModifyPR, requestMaintainerAccess } = githubLib;
|
|
447
|
+
const { canModify } = await checkMaintainerCanModifyPR(owner, repo, prNumber);
|
|
448
|
+
|
|
449
|
+
if (canModify) {
|
|
450
|
+
await log('✅ Maintainer can push to fork: Enabled by contributor');
|
|
451
|
+
await log(' Will push changes directly to contributor\'s fork instead of creating own fork');
|
|
452
|
+
// Don't disable fork mode, but we'll use the contributor's fork
|
|
453
|
+
} else {
|
|
454
|
+
await log('⚠️ Maintainer cannot push to fork: "Allow edits by maintainers" is not enabled', { level: 'warning' });
|
|
455
|
+
await log(' Posting comment to request access...', { level: 'warning' });
|
|
456
|
+
await requestMaintainerAccess(owner, repo, prNumber);
|
|
457
|
+
await log(' Comment posted. Proceeding with own fork instead.', { level: 'warning' });
|
|
458
|
+
}
|
|
459
|
+
}
|
|
460
|
+
}
|
|
461
|
+
await log(`📝 PR branch: ${prBranch}`);
|
|
462
|
+
// Extract issue number from PR body using GitHub linking detection library
|
|
463
|
+
// This ensures we only detect actual GitHub-recognized linking keywords
|
|
464
|
+
const prBody = prData.body || '';
|
|
465
|
+
const extractedIssueNumber = extractLinkedIssueNumber(prBody);
|
|
466
|
+
if (extractedIssueNumber) {
|
|
467
|
+
issueNumber = extractedIssueNumber;
|
|
468
|
+
await log(`🔗 Found linked issue #${issueNumber}`);
|
|
469
|
+
} else {
|
|
470
|
+
// If no linked issue found, we can still continue but warn
|
|
471
|
+
await log('⚠️ Warning: No linked issue found in PR body', { level: 'warning' });
|
|
472
|
+
await log(' The PR should contain "Fixes #123" or similar to link an issue', { level: 'warning' });
|
|
473
|
+
// Set issueNumber to PR number as fallback
|
|
474
|
+
issueNumber = prNumber;
|
|
475
|
+
}
|
|
476
|
+
} catch (error) {
|
|
477
|
+
reportError(error, {
|
|
478
|
+
context: 'pr_processing',
|
|
479
|
+
prNumber,
|
|
480
|
+
operation: 'process_pull_request'
|
|
481
|
+
});
|
|
482
|
+
await log(`Error: Failed to process PR: ${cleanErrorMessage(error)}`, { level: 'error' });
|
|
483
|
+
await safeExit(1, 'Failed to process PR');
|
|
484
|
+
}
|
|
485
|
+
} else {
|
|
486
|
+
// Traditional issue mode
|
|
487
|
+
issueNumber = urlNumber;
|
|
488
|
+
await log(`📝 Issue mode: Working with issue #${issueNumber}`);
|
|
489
|
+
}
|
|
490
|
+
// Create or find temporary directory for cloning the repository
|
|
491
|
+
const { tempDir } = await setupTempDirectory(argv);
|
|
492
|
+
// Populate cleanup context for signal handlers
|
|
493
|
+
cleanupContext.tempDir = tempDir;
|
|
494
|
+
cleanupContext.argv = argv;
|
|
495
|
+
// Initialize limitReached variable outside try block for finally clause
|
|
496
|
+
let limitReached = false;
|
|
497
|
+
try {
|
|
498
|
+
// Set up repository and clone using the new module
|
|
499
|
+
const { forkedRepo } = await setupRepositoryAndClone({
|
|
500
|
+
argv,
|
|
501
|
+
owner,
|
|
502
|
+
repo,
|
|
503
|
+
forkOwner,
|
|
504
|
+
tempDir,
|
|
505
|
+
isContinueMode,
|
|
506
|
+
issueUrl,
|
|
507
|
+
log,
|
|
508
|
+
formatAligned,
|
|
509
|
+
$
|
|
510
|
+
});
|
|
511
|
+
|
|
512
|
+
// Verify default branch and status using the new module
|
|
513
|
+
const defaultBranch = await verifyDefaultBranchAndStatus({
|
|
514
|
+
tempDir,
|
|
515
|
+
log,
|
|
516
|
+
formatAligned,
|
|
517
|
+
$
|
|
518
|
+
});
|
|
519
|
+
// Create or checkout branch using the new module
|
|
520
|
+
const branchName = await createOrCheckoutBranch({
|
|
521
|
+
isContinueMode,
|
|
522
|
+
prBranch,
|
|
523
|
+
issueNumber,
|
|
524
|
+
tempDir,
|
|
525
|
+
defaultBranch,
|
|
526
|
+
argv,
|
|
527
|
+
log,
|
|
528
|
+
formatAligned,
|
|
529
|
+
$,
|
|
530
|
+
crypto
|
|
531
|
+
});
|
|
532
|
+
|
|
533
|
+
// Auto-merge default branch to pull request branch if enabled
|
|
534
|
+
let autoMergeFeedbackLines = [];
|
|
535
|
+
if (isContinueMode && argv['auto-merge-default-branch-to-pull-request-branch']) {
|
|
536
|
+
await log(`\n${formatAligned('🔀', 'Auto-merging:', `Merging ${defaultBranch} into ${branchName}`)}`);
|
|
537
|
+
try {
|
|
538
|
+
const mergeResult = await $({ cwd: tempDir })`git merge ${defaultBranch} --no-edit`;
|
|
539
|
+
if (mergeResult.code === 0) {
|
|
540
|
+
await log(`${formatAligned('✅', 'Merge successful:', 'Pushing merged branch...')}`);
|
|
541
|
+
const pushResult = await $({ cwd: tempDir })`git push origin ${branchName}`;
|
|
542
|
+
if (pushResult.code === 0) {
|
|
543
|
+
await log(`${formatAligned('✅', 'Push successful:', 'Branch updated with latest changes')}`);
|
|
544
|
+
} else {
|
|
545
|
+
await log(`${formatAligned('⚠️', 'Push failed:', 'Merge completed but push failed')}`, { level: 'warning' });
|
|
546
|
+
await log(` Error: ${pushResult.stderr?.toString() || 'Unknown error'}`, { level: 'warning' });
|
|
547
|
+
}
|
|
548
|
+
} else {
|
|
549
|
+
// Merge failed - likely due to conflicts
|
|
550
|
+
await log(`${formatAligned('⚠️', 'Merge failed:', 'Conflicts detected')}`, { level: 'warning' });
|
|
551
|
+
autoMergeFeedbackLines.push('');
|
|
552
|
+
autoMergeFeedbackLines.push('⚠️ AUTOMATIC MERGE FAILED:');
|
|
553
|
+
autoMergeFeedbackLines.push(`git merge ${defaultBranch} was executed but resulted in conflicts that should be resolved first.`);
|
|
554
|
+
autoMergeFeedbackLines.push('Please resolve the merge conflicts and commit the changes.');
|
|
555
|
+
autoMergeFeedbackLines.push('');
|
|
556
|
+
}
|
|
557
|
+
} catch (mergeError) {
|
|
558
|
+
await log(`${formatAligned('❌', 'Merge error:', mergeError.message)}`, { level: 'error' });
|
|
559
|
+
autoMergeFeedbackLines.push('');
|
|
560
|
+
autoMergeFeedbackLines.push('⚠️ AUTOMATIC MERGE ERROR:');
|
|
561
|
+
autoMergeFeedbackLines.push(`git merge ${defaultBranch} failed with error: ${mergeError.message}`);
|
|
562
|
+
autoMergeFeedbackLines.push('Please check the repository state and resolve any issues.');
|
|
563
|
+
autoMergeFeedbackLines.push('');
|
|
564
|
+
}
|
|
565
|
+
}
|
|
566
|
+
|
|
567
|
+
// Initialize PR variables early
|
|
568
|
+
let prUrl = null;
|
|
569
|
+
|
|
570
|
+
// In continue mode, we already have the PR details
|
|
571
|
+
if (isContinueMode) {
|
|
572
|
+
prUrl = issueUrl; // The input URL is the PR URL
|
|
573
|
+
// prNumber is already set from earlier when we parsed the PR
|
|
574
|
+
}
|
|
575
|
+
|
|
576
|
+
// Don't build the prompt yet - we'll build it after we have all the information
|
|
577
|
+
// This includes PR URL (if created) and comment info (if in continue mode)
|
|
578
|
+
|
|
579
|
+
// Handle auto PR creation using the new module
|
|
580
|
+
const autoPrResult = await handleAutoPrCreation({
|
|
581
|
+
argv,
|
|
582
|
+
tempDir,
|
|
583
|
+
branchName,
|
|
584
|
+
issueNumber,
|
|
585
|
+
owner,
|
|
586
|
+
repo,
|
|
587
|
+
defaultBranch,
|
|
588
|
+
forkedRepo,
|
|
589
|
+
isContinueMode,
|
|
590
|
+
prNumber,
|
|
591
|
+
log,
|
|
592
|
+
formatAligned,
|
|
593
|
+
$,
|
|
594
|
+
reportError,
|
|
595
|
+
path,
|
|
596
|
+
fs
|
|
597
|
+
});
|
|
598
|
+
|
|
599
|
+
let claudeCommitHash = null;
|
|
600
|
+
if (autoPrResult) {
|
|
601
|
+
prUrl = autoPrResult.prUrl;
|
|
602
|
+
if (autoPrResult.prNumber) {
|
|
603
|
+
prNumber = autoPrResult.prNumber;
|
|
604
|
+
}
|
|
605
|
+
if (autoPrResult.claudeCommitHash) {
|
|
606
|
+
claudeCommitHash = autoPrResult.claudeCommitHash;
|
|
607
|
+
}
|
|
608
|
+
}
|
|
609
|
+
|
|
610
|
+
// CRITICAL: Validate that we have a PR number when required
|
|
611
|
+
// This prevents continuing without a PR when one was supposed to be created
|
|
612
|
+
if ((isContinueMode || argv.autoPullRequestCreation) && !prNumber) {
|
|
613
|
+
await log('');
|
|
614
|
+
await log(formatAligned('❌', 'FATAL ERROR:', 'No pull request available'), { level: 'error' });
|
|
615
|
+
await log('');
|
|
616
|
+
await log(' 🔍 What happened:');
|
|
617
|
+
if (isContinueMode) {
|
|
618
|
+
await log(' Continue mode is active but no PR number is available.');
|
|
619
|
+
await log(' This usually means PR creation failed or was skipped incorrectly.');
|
|
620
|
+
} else {
|
|
621
|
+
await log(' Auto-PR creation is enabled but no PR was created.');
|
|
622
|
+
await log(' PR creation may have failed without throwing an error.');
|
|
623
|
+
}
|
|
624
|
+
await log('');
|
|
625
|
+
await log(' 💡 Why this is critical:');
|
|
626
|
+
await log(' The solve command requires a PR for:');
|
|
627
|
+
await log(' • Tracking work progress');
|
|
628
|
+
await log(' • Receiving and processing feedback');
|
|
629
|
+
await log(' • Managing code changes');
|
|
630
|
+
await log(' • Auto-merging when complete');
|
|
631
|
+
await log('');
|
|
632
|
+
await log(' 🔧 How to fix:');
|
|
633
|
+
await log('');
|
|
634
|
+
await log(' Option 1: Create PR manually and use --continue');
|
|
635
|
+
await log(` cd ${tempDir}`);
|
|
636
|
+
await log(` gh pr create --draft --title "Fix issue #${issueNumber}" --body "Fixes #${issueNumber}"`);
|
|
637
|
+
await log(' # Then use the PR URL with solve.mjs');
|
|
638
|
+
await log('');
|
|
639
|
+
await log(' Option 2: Start fresh without continue mode');
|
|
640
|
+
await log(` ./solve.mjs "${issueUrl}" --auto-pull-request-creation`);
|
|
641
|
+
await log('');
|
|
642
|
+
await log(' Option 3: Disable auto-PR creation (Claude will create it)');
|
|
643
|
+
await log(` ./solve.mjs "${issueUrl}" --no-auto-pull-request-creation`);
|
|
644
|
+
await log('');
|
|
645
|
+
await safeExit(1, 'No PR available');
|
|
646
|
+
}
|
|
647
|
+
|
|
648
|
+
if (isContinueMode) {
|
|
649
|
+
await log(`\n${formatAligned('🔄', 'Continue mode:', 'ACTIVE')}`);
|
|
650
|
+
await log(formatAligned('', 'Using existing PR:', `#${prNumber}`, 2));
|
|
651
|
+
await log(formatAligned('', 'PR URL:', prUrl, 2));
|
|
652
|
+
} else if (!argv.autoPullRequestCreation) {
|
|
653
|
+
await log(`\n${formatAligned('⏭️', 'Auto PR creation:', 'DISABLED')}`);
|
|
654
|
+
await log(formatAligned('', 'Workflow:', 'AI will create the PR', 2));
|
|
655
|
+
}
|
|
656
|
+
|
|
657
|
+
// Don't build the prompt yet - we'll build it after we have all the information
|
|
658
|
+
// This includes PR URL (if created) and comment info (if in continue mode)
|
|
659
|
+
|
|
660
|
+
// Start work session using the new module
|
|
661
|
+
await startWorkSession({
|
|
662
|
+
isContinueMode,
|
|
663
|
+
prNumber,
|
|
664
|
+
argv,
|
|
665
|
+
log,
|
|
666
|
+
formatAligned,
|
|
667
|
+
$
|
|
668
|
+
});
|
|
669
|
+
|
|
670
|
+
// Prepare feedback and timestamps using the new module
|
|
671
|
+
const { feedbackLines: preparedFeedbackLines, referenceTime } = await prepareFeedbackAndTimestamps({
|
|
672
|
+
prNumber,
|
|
673
|
+
branchName,
|
|
674
|
+
owner,
|
|
675
|
+
repo,
|
|
676
|
+
issueNumber,
|
|
677
|
+
isContinueMode,
|
|
678
|
+
mergeStateStatus,
|
|
679
|
+
prState,
|
|
680
|
+
argv,
|
|
681
|
+
log,
|
|
682
|
+
formatAligned,
|
|
683
|
+
cleanErrorMessage,
|
|
684
|
+
$
|
|
685
|
+
});
|
|
686
|
+
|
|
687
|
+
// Initialize feedback lines
|
|
688
|
+
let feedbackLines = null;
|
|
689
|
+
|
|
690
|
+
// Add auto-merge feedback lines if any
|
|
691
|
+
if (autoMergeFeedbackLines && autoMergeFeedbackLines.length > 0) {
|
|
692
|
+
if (!feedbackLines) {
|
|
693
|
+
feedbackLines = [];
|
|
694
|
+
}
|
|
695
|
+
feedbackLines.push(...autoMergeFeedbackLines);
|
|
696
|
+
}
|
|
697
|
+
|
|
698
|
+
// Merge feedback lines
|
|
699
|
+
if (preparedFeedbackLines && preparedFeedbackLines.length > 0) {
|
|
700
|
+
if (!feedbackLines) {
|
|
701
|
+
feedbackLines = [];
|
|
702
|
+
}
|
|
703
|
+
feedbackLines.push(...preparedFeedbackLines);
|
|
704
|
+
}
|
|
705
|
+
|
|
706
|
+
// Check for uncommitted changes and merge with feedback
|
|
707
|
+
const uncommittedFeedbackLines = await checkUncommittedChanges({
|
|
708
|
+
tempDir,
|
|
709
|
+
argv,
|
|
710
|
+
log,
|
|
711
|
+
$
|
|
712
|
+
});
|
|
713
|
+
if (uncommittedFeedbackLines && uncommittedFeedbackLines.length > 0) {
|
|
714
|
+
if (!feedbackLines) {
|
|
715
|
+
feedbackLines = [];
|
|
716
|
+
}
|
|
717
|
+
feedbackLines.push(...uncommittedFeedbackLines);
|
|
718
|
+
}
|
|
719
|
+
|
|
720
|
+
// Check for fork actions
|
|
721
|
+
const forkActionsUrl = await checkForkActions({
|
|
722
|
+
argv,
|
|
723
|
+
forkedRepo,
|
|
724
|
+
branchName,
|
|
725
|
+
log,
|
|
726
|
+
formatAligned,
|
|
727
|
+
$
|
|
728
|
+
});
|
|
729
|
+
|
|
730
|
+
// Execute tool command with all prompts and settings
|
|
731
|
+
let toolResult;
|
|
732
|
+
if (argv.tool === 'opencode') {
|
|
733
|
+
const opencodeLib = await import('./opencode.lib.mjs');
|
|
734
|
+
const { executeOpenCode } = opencodeLib;
|
|
735
|
+
const opencodePath = process.env.OPENCODE_PATH || 'opencode';
|
|
736
|
+
|
|
737
|
+
toolResult = await executeOpenCode({
|
|
738
|
+
issueUrl,
|
|
739
|
+
issueNumber,
|
|
740
|
+
prNumber,
|
|
741
|
+
prUrl,
|
|
742
|
+
branchName,
|
|
743
|
+
tempDir,
|
|
744
|
+
isContinueMode,
|
|
745
|
+
mergeStateStatus,
|
|
746
|
+
forkedRepo,
|
|
747
|
+
feedbackLines,
|
|
748
|
+
forkActionsUrl,
|
|
749
|
+
owner,
|
|
750
|
+
repo,
|
|
751
|
+
argv,
|
|
752
|
+
log,
|
|
753
|
+
setLogFile,
|
|
754
|
+
getLogFile,
|
|
755
|
+
formatAligned,
|
|
756
|
+
getResourceSnapshot,
|
|
757
|
+
opencodePath,
|
|
758
|
+
$
|
|
759
|
+
});
|
|
760
|
+
} else if (argv.tool === 'codex') {
|
|
761
|
+
const codexLib = await import('./codex.lib.mjs');
|
|
762
|
+
const { executeCodex } = codexLib;
|
|
763
|
+
const codexPath = process.env.CODEX_PATH || 'codex';
|
|
764
|
+
|
|
765
|
+
toolResult = await executeCodex({
|
|
766
|
+
issueUrl,
|
|
767
|
+
issueNumber,
|
|
768
|
+
prNumber,
|
|
769
|
+
prUrl,
|
|
770
|
+
branchName,
|
|
771
|
+
tempDir,
|
|
772
|
+
isContinueMode,
|
|
773
|
+
mergeStateStatus,
|
|
774
|
+
forkedRepo,
|
|
775
|
+
feedbackLines,
|
|
776
|
+
forkActionsUrl,
|
|
777
|
+
owner,
|
|
778
|
+
repo,
|
|
779
|
+
argv,
|
|
780
|
+
log,
|
|
781
|
+
setLogFile,
|
|
782
|
+
getLogFile,
|
|
783
|
+
formatAligned,
|
|
784
|
+
getResourceSnapshot,
|
|
785
|
+
codexPath,
|
|
786
|
+
$
|
|
787
|
+
});
|
|
788
|
+
} else if (argv.tool === 'agent') {
|
|
789
|
+
const agentLib = await import('./agent.lib.mjs');
|
|
790
|
+
const { executeAgent } = agentLib;
|
|
791
|
+
const agentPath = process.env.AGENT_PATH || 'agent';
|
|
792
|
+
|
|
793
|
+
toolResult = await executeAgent({
|
|
794
|
+
issueUrl,
|
|
795
|
+
issueNumber,
|
|
796
|
+
prNumber,
|
|
797
|
+
prUrl,
|
|
798
|
+
branchName,
|
|
799
|
+
tempDir,
|
|
800
|
+
isContinueMode,
|
|
801
|
+
mergeStateStatus,
|
|
802
|
+
forkedRepo,
|
|
803
|
+
feedbackLines,
|
|
804
|
+
forkActionsUrl,
|
|
805
|
+
owner,
|
|
806
|
+
repo,
|
|
807
|
+
argv,
|
|
808
|
+
log,
|
|
809
|
+
setLogFile,
|
|
810
|
+
getLogFile,
|
|
811
|
+
formatAligned,
|
|
812
|
+
getResourceSnapshot,
|
|
813
|
+
agentPath,
|
|
814
|
+
$
|
|
815
|
+
});
|
|
816
|
+
} else {
|
|
817
|
+
// Default to Claude
|
|
818
|
+
const claudeResult = await executeClaude({
|
|
819
|
+
issueUrl,
|
|
820
|
+
issueNumber,
|
|
821
|
+
prNumber,
|
|
822
|
+
prUrl,
|
|
823
|
+
branchName,
|
|
824
|
+
tempDir,
|
|
825
|
+
isContinueMode,
|
|
826
|
+
mergeStateStatus,
|
|
827
|
+
forkedRepo,
|
|
828
|
+
feedbackLines,
|
|
829
|
+
forkActionsUrl,
|
|
830
|
+
owner,
|
|
831
|
+
repo,
|
|
832
|
+
argv,
|
|
833
|
+
log,
|
|
834
|
+
setLogFile,
|
|
835
|
+
getLogFile,
|
|
836
|
+
formatAligned,
|
|
837
|
+
getResourceSnapshot,
|
|
838
|
+
claudePath,
|
|
839
|
+
$
|
|
840
|
+
});
|
|
841
|
+
toolResult = claudeResult;
|
|
842
|
+
}
|
|
843
|
+
|
|
844
|
+
const { success } = toolResult;
|
|
845
|
+
let sessionId = toolResult.sessionId;
|
|
846
|
+
let anthropicTotalCostUSD = toolResult.anthropicTotalCostUSD;
|
|
847
|
+
let publicPricingEstimate = toolResult.publicPricingEstimate; // Used by agent tool
|
|
848
|
+
let pricingInfo = toolResult.pricingInfo; // Used by agent tool for detailed pricing
|
|
849
|
+
limitReached = toolResult.limitReached;
|
|
850
|
+
cleanupContext.limitReached = limitReached;
|
|
851
|
+
|
|
852
|
+
// Capture limit reset time globally for downstream handlers (auto-continue, cleanup decisions)
|
|
853
|
+
if (toolResult && toolResult.limitResetTime) {
|
|
854
|
+
global.limitResetTime = toolResult.limitResetTime;
|
|
855
|
+
}
|
|
856
|
+
|
|
857
|
+
// Handle limit reached scenario
|
|
858
|
+
if (limitReached) {
|
|
859
|
+
const shouldAutoContinueOnReset = argv.autoContinueOnLimitReset;
|
|
860
|
+
|
|
861
|
+
// If limit was reached but auto-continue-on-limit-reset is NOT enabled, fail immediately
|
|
862
|
+
if (!shouldAutoContinueOnReset) {
|
|
863
|
+
await log('\n❌ USAGE LIMIT REACHED!');
|
|
864
|
+
await log(' The AI tool has reached its usage limit.');
|
|
865
|
+
|
|
866
|
+
// Post failure comment to PR if we have one
|
|
867
|
+
if (prNumber) {
|
|
868
|
+
try {
|
|
869
|
+
const resetTime = global.limitResetTime;
|
|
870
|
+
const failureComment = resetTime
|
|
871
|
+
? `❌ **Usage Limit Reached**\n\nThe AI tool has reached its usage limit. The limit will reset at: **${resetTime}**\n\nThis session has failed because \`--auto-continue-on-limit-reset\` was not enabled.\n\nTo automatically wait for the limit to reset and continue, use:\n\`\`\`bash\n./solve.mjs "${issueUrl}" --resume ${sessionId} --auto-continue-on-limit-reset\n\`\`\``
|
|
872
|
+
: `❌ **Usage Limit Reached**\n\nThe AI tool has reached its usage limit. Please wait for the limit to reset.\n\nThis session has failed because \`--auto-continue-on-limit-reset\` was not enabled.\n\nTo resume after the limit resets, use:\n\`\`\`bash\n./solve.mjs "${issueUrl}" --resume ${sessionId}\n\`\`\``;
|
|
873
|
+
|
|
874
|
+
const commentResult = await $`gh pr comment ${prNumber} --repo ${owner}/${repo} --body ${failureComment}`;
|
|
875
|
+
if (commentResult.code === 0) {
|
|
876
|
+
await log(' Posted failure comment to PR');
|
|
877
|
+
}
|
|
878
|
+
} catch (error) {
|
|
879
|
+
await log(` Warning: Could not post failure comment: ${cleanErrorMessage(error)}`, { verbose: true });
|
|
880
|
+
}
|
|
881
|
+
}
|
|
882
|
+
|
|
883
|
+
await safeExit(1, 'Usage limit reached - use --auto-continue-on-limit-reset to wait for reset');
|
|
884
|
+
} else {
|
|
885
|
+
// auto-continue-on-limit-reset is enabled - post waiting comment
|
|
886
|
+
if (prNumber && global.limitResetTime) {
|
|
887
|
+
try {
|
|
888
|
+
// Calculate wait time in d:h:m:s format
|
|
889
|
+
const validation = await import('./solve.validation.lib.mjs');
|
|
890
|
+
const { calculateWaitTime } = validation;
|
|
891
|
+
const waitMs = calculateWaitTime(global.limitResetTime);
|
|
892
|
+
|
|
893
|
+
const formatWaitTime = (ms) => {
|
|
894
|
+
const seconds = Math.floor(ms / 1000);
|
|
895
|
+
const minutes = Math.floor(seconds / 60);
|
|
896
|
+
const hours = Math.floor(minutes / 60);
|
|
897
|
+
const days = Math.floor(hours / 24);
|
|
898
|
+
const s = seconds % 60;
|
|
899
|
+
const m = minutes % 60;
|
|
900
|
+
const h = hours % 24;
|
|
901
|
+
return `${days}:${String(h).padStart(2, '0')}:${String(m).padStart(2, '0')}:${String(s).padStart(2, '0')}`;
|
|
902
|
+
};
|
|
903
|
+
|
|
904
|
+
const waitingComment = `⏳ **Usage Limit Reached - Waiting to Continue**\n\nThe AI tool has reached its usage limit. Auto-continue is enabled with \`--auto-continue-on-limit-reset\`.\n\n**Reset time:** ${global.limitResetTime}\n**Wait time:** ${formatWaitTime(waitMs)} (days:hours:minutes:seconds)\n\nThe session will automatically resume when the limit resets.\n\nSession ID: \`${sessionId}\``;
|
|
905
|
+
|
|
906
|
+
const commentResult = await $`gh pr comment ${prNumber} --repo ${owner}/${repo} --body ${waitingComment}`;
|
|
907
|
+
if (commentResult.code === 0) {
|
|
908
|
+
await log(' Posted waiting comment to PR');
|
|
909
|
+
}
|
|
910
|
+
} catch (error) {
|
|
911
|
+
await log(` Warning: Could not post waiting comment: ${cleanErrorMessage(error)}`, { verbose: true });
|
|
912
|
+
}
|
|
913
|
+
}
|
|
914
|
+
}
|
|
915
|
+
}
|
|
916
|
+
|
|
917
|
+
if (!success) {
|
|
918
|
+
// If --attach-logs is enabled and we have a PR, attach failure logs before exiting
|
|
919
|
+
if (shouldAttachLogs && sessionId && global.createdPR && global.createdPR.number) {
|
|
920
|
+
await log('\n📄 Attaching failure logs to Pull Request...');
|
|
921
|
+
try {
|
|
922
|
+
// Build resume command if we have session info
|
|
923
|
+
const resumeCommand = sessionId ? `${process.argv[0]} ${process.argv[1]} ${issueUrl} --resume ${sessionId}` : null;
|
|
924
|
+
const logUploadSuccess = await attachLogToGitHub({
|
|
925
|
+
logFile: getLogFile(),
|
|
926
|
+
targetType: 'pr',
|
|
927
|
+
targetNumber: global.createdPR.number,
|
|
928
|
+
owner,
|
|
929
|
+
repo,
|
|
930
|
+
$,
|
|
931
|
+
log,
|
|
932
|
+
sanitizeLogContent,
|
|
933
|
+
// For usage limit, use a dedicated comment format to make it clear and actionable
|
|
934
|
+
isUsageLimit: !!limitReached,
|
|
935
|
+
limitResetTime: limitReached ? toolResult.limitResetTime : null,
|
|
936
|
+
toolName: (argv.tool || 'AI tool').toString().toLowerCase() === 'claude' ? 'Claude' :
|
|
937
|
+
(argv.tool || 'AI tool').toString().toLowerCase() === 'codex' ? 'Codex' :
|
|
938
|
+
(argv.tool || 'AI tool').toString().toLowerCase() === 'opencode' ? 'OpenCode' :
|
|
939
|
+
(argv.tool || 'AI tool').toString().toLowerCase() === 'agent' ? 'Agent' : 'AI tool',
|
|
940
|
+
resumeCommand,
|
|
941
|
+
// Include sessionId so the PR comment can present it
|
|
942
|
+
sessionId,
|
|
943
|
+
// If not a usage limit case, fall back to generic failure format
|
|
944
|
+
errorMessage: limitReached ? undefined : `${argv.tool.toUpperCase()} execution failed`
|
|
945
|
+
});
|
|
946
|
+
|
|
947
|
+
if (logUploadSuccess) {
|
|
948
|
+
await log(' ✅ Failure logs uploaded successfully');
|
|
949
|
+
} else {
|
|
950
|
+
await log(' ⚠️ Failed to upload logs', { verbose: true });
|
|
951
|
+
}
|
|
952
|
+
} catch (uploadError) {
|
|
953
|
+
await log(` ⚠️ Error uploading logs: ${uploadError.message}`, { verbose: true });
|
|
954
|
+
}
|
|
955
|
+
}
|
|
956
|
+
|
|
957
|
+
await safeExit(1, `${argv.tool.toUpperCase()} execution failed`);
|
|
958
|
+
}
|
|
959
|
+
|
|
960
|
+
// Check for uncommitted changes
|
|
961
|
+
// When limit is reached, force auto-commit of any uncommitted changes to preserve work
|
|
962
|
+
const shouldAutoCommit = argv['auto-commit-uncommitted-changes'] || limitReached;
|
|
963
|
+
const autoRestartEnabled = argv['autoRestartOnUncommittedChanges'] !== false;
|
|
964
|
+
const shouldRestart = await checkForUncommittedChanges(tempDir, owner, repo, branchName, $, log, shouldAutoCommit, autoRestartEnabled);
|
|
965
|
+
|
|
966
|
+
// Remove CLAUDE.md now that Claude command has finished
|
|
967
|
+
await cleanupClaudeFile(tempDir, branchName, claudeCommitHash);
|
|
968
|
+
|
|
969
|
+
// Show summary of session and log file
|
|
970
|
+
await showSessionSummary(sessionId, limitReached, argv, issueUrl, tempDir, shouldAttachLogs);
|
|
971
|
+
|
|
972
|
+
// Search for newly created pull requests and comments
|
|
973
|
+
// Pass shouldRestart to prevent early exit when auto-restart is needed
|
|
974
|
+
// Include agent tool pricing data when available (publicPricingEstimate, pricingInfo)
|
|
975
|
+
await verifyResults(owner, repo, branchName, issueNumber, prNumber, prUrl, referenceTime, argv, shouldAttachLogs, shouldRestart, sessionId, tempDir, anthropicTotalCostUSD, publicPricingEstimate, pricingInfo);
|
|
976
|
+
|
|
977
|
+
// Start watch mode if enabled OR if we need to handle uncommitted changes
|
|
978
|
+
if (argv.verbose) {
|
|
979
|
+
await log('');
|
|
980
|
+
await log('🔍 Auto-restart debug:', { verbose: true });
|
|
981
|
+
await log(` argv.watch (user flag): ${argv.watch}`, { verbose: true });
|
|
982
|
+
await log(` shouldRestart (auto-detected): ${shouldRestart}`, { verbose: true });
|
|
983
|
+
await log(` temporaryWatch (will be enabled): ${shouldRestart && !argv.watch}`, { verbose: true });
|
|
984
|
+
await log(` prNumber: ${prNumber || 'null'}`, { verbose: true });
|
|
985
|
+
await log(` prBranch: ${prBranch || 'null'}`, { verbose: true });
|
|
986
|
+
await log(` branchName: ${branchName}`, { verbose: true });
|
|
987
|
+
await log(` isContinueMode: ${isContinueMode}`, { verbose: true });
|
|
988
|
+
}
|
|
989
|
+
|
|
990
|
+
// If uncommitted changes detected and auto-commit is disabled, enter temporary watch mode
|
|
991
|
+
const temporaryWatchMode = shouldRestart && !argv.watch;
|
|
992
|
+
if (temporaryWatchMode) {
|
|
993
|
+
await log('');
|
|
994
|
+
await log('🔄 AUTO-RESTART: Uncommitted changes detected');
|
|
995
|
+
await log(' Starting temporary monitoring cycle (NOT --watch mode)');
|
|
996
|
+
await log(' The tool will run once more to commit the changes');
|
|
997
|
+
await log(' Will exit automatically after changes are committed');
|
|
998
|
+
await log('');
|
|
999
|
+
}
|
|
1000
|
+
|
|
1001
|
+
const watchResult = await startWatchMode({
|
|
1002
|
+
issueUrl,
|
|
1003
|
+
owner,
|
|
1004
|
+
repo,
|
|
1005
|
+
issueNumber,
|
|
1006
|
+
prNumber,
|
|
1007
|
+
prBranch,
|
|
1008
|
+
branchName,
|
|
1009
|
+
tempDir,
|
|
1010
|
+
argv: {
|
|
1011
|
+
...argv,
|
|
1012
|
+
watch: argv.watch || shouldRestart, // Enable watch if uncommitted changes
|
|
1013
|
+
temporaryWatch: temporaryWatchMode // Flag to indicate temporary watch mode
|
|
1014
|
+
}
|
|
1015
|
+
});
|
|
1016
|
+
|
|
1017
|
+
// Update session data with latest from watch mode for accurate pricing
|
|
1018
|
+
if (watchResult && watchResult.latestSessionId) {
|
|
1019
|
+
sessionId = watchResult.latestSessionId;
|
|
1020
|
+
anthropicTotalCostUSD = watchResult.latestAnthropicCost;
|
|
1021
|
+
if (argv.verbose) {
|
|
1022
|
+
await log('');
|
|
1023
|
+
await log('📊 Updated session data from watch mode:', { verbose: true });
|
|
1024
|
+
await log(` Session ID: ${sessionId}`, { verbose: true });
|
|
1025
|
+
if (anthropicTotalCostUSD !== null && anthropicTotalCostUSD !== undefined) {
|
|
1026
|
+
await log(` Anthropic cost: $${anthropicTotalCostUSD.toFixed(6)}`, { verbose: true });
|
|
1027
|
+
}
|
|
1028
|
+
}
|
|
1029
|
+
}
|
|
1030
|
+
|
|
1031
|
+
// Track whether logs were successfully attached (used by endWorkSession)
|
|
1032
|
+
let logsAttached = false;
|
|
1033
|
+
|
|
1034
|
+
// After watch mode completes (either user watch or temporary)
|
|
1035
|
+
// Push any committed changes if this was a temporary watch mode
|
|
1036
|
+
if (temporaryWatchMode) {
|
|
1037
|
+
await log('');
|
|
1038
|
+
await log('📤 Pushing committed changes to GitHub...');
|
|
1039
|
+
await log('');
|
|
1040
|
+
|
|
1041
|
+
try {
|
|
1042
|
+
const pushResult = await $({ cwd: tempDir })`git push origin ${branchName}`;
|
|
1043
|
+
if (pushResult.code === 0) {
|
|
1044
|
+
await log('✅ Changes pushed successfully to remote branch');
|
|
1045
|
+
await log(` Branch: ${branchName}`);
|
|
1046
|
+
await log('');
|
|
1047
|
+
} else {
|
|
1048
|
+
const errorMsg = pushResult.stderr?.toString() || 'Unknown error';
|
|
1049
|
+
await log('⚠️ Push failed:', { level: 'error' });
|
|
1050
|
+
await log(` ${errorMsg.trim()}`, { level: 'error' });
|
|
1051
|
+
await log(' Please push manually:', { level: 'error' });
|
|
1052
|
+
await log(` cd ${tempDir} && git push origin ${branchName}`, { level: 'error' });
|
|
1053
|
+
}
|
|
1054
|
+
} catch (error) {
|
|
1055
|
+
await log('⚠️ Push failed:', { level: 'error' });
|
|
1056
|
+
await log(` ${cleanErrorMessage(error)}`, { level: 'error' });
|
|
1057
|
+
await log(' Please push manually:', { level: 'error' });
|
|
1058
|
+
await log(` cd ${tempDir} && git push origin ${branchName}`, { level: 'error' });
|
|
1059
|
+
}
|
|
1060
|
+
|
|
1061
|
+
// Attach updated logs to PR after auto-restart completes
|
|
1062
|
+
if (shouldAttachLogs && prNumber) {
|
|
1063
|
+
await log('📎 Uploading working session logs to Pull Request...');
|
|
1064
|
+
try {
|
|
1065
|
+
const logUploadSuccess = await attachLogToGitHub({
|
|
1066
|
+
logFile: getLogFile(),
|
|
1067
|
+
targetType: 'pr',
|
|
1068
|
+
targetNumber: prNumber,
|
|
1069
|
+
owner,
|
|
1070
|
+
repo,
|
|
1071
|
+
$,
|
|
1072
|
+
log,
|
|
1073
|
+
sanitizeLogContent,
|
|
1074
|
+
verbose: argv.verbose,
|
|
1075
|
+
sessionId,
|
|
1076
|
+
tempDir,
|
|
1077
|
+
anthropicTotalCostUSD
|
|
1078
|
+
});
|
|
1079
|
+
|
|
1080
|
+
if (logUploadSuccess) {
|
|
1081
|
+
await log('✅ Working session logs uploaded successfully');
|
|
1082
|
+
logsAttached = true;
|
|
1083
|
+
} else {
|
|
1084
|
+
await log('⚠️ Failed to upload working session logs', { level: 'warning' });
|
|
1085
|
+
}
|
|
1086
|
+
} catch (uploadError) {
|
|
1087
|
+
await log(`⚠️ Error uploading logs: ${uploadError.message}`, { level: 'warning' });
|
|
1088
|
+
}
|
|
1089
|
+
}
|
|
1090
|
+
}
|
|
1091
|
+
|
|
1092
|
+
// End work session using the new module
|
|
1093
|
+
await endWorkSession({
|
|
1094
|
+
isContinueMode,
|
|
1095
|
+
prNumber,
|
|
1096
|
+
argv,
|
|
1097
|
+
log,
|
|
1098
|
+
formatAligned,
|
|
1099
|
+
$,
|
|
1100
|
+
logsAttached
|
|
1101
|
+
});
|
|
1102
|
+
} catch (error) {
|
|
1103
|
+
// Don't report authentication errors to Sentry as they are user configuration issues
|
|
1104
|
+
if (!error.isAuthError) {
|
|
1105
|
+
reportError(error, {
|
|
1106
|
+
context: 'solve_main',
|
|
1107
|
+
operation: 'main_execution'
|
|
1108
|
+
});
|
|
1109
|
+
}
|
|
1110
|
+
await handleMainExecutionError({
|
|
1111
|
+
error,
|
|
1112
|
+
log,
|
|
1113
|
+
cleanErrorMessage,
|
|
1114
|
+
absoluteLogPath,
|
|
1115
|
+
shouldAttachLogs,
|
|
1116
|
+
argv,
|
|
1117
|
+
global,
|
|
1118
|
+
owner,
|
|
1119
|
+
repo,
|
|
1120
|
+
getLogFile,
|
|
1121
|
+
attachLogToGitHub,
|
|
1122
|
+
sanitizeLogContent,
|
|
1123
|
+
$
|
|
1124
|
+
});
|
|
1125
|
+
} finally {
|
|
1126
|
+
// Clean up temporary directory using repository module
|
|
1127
|
+
await cleanupTempDirectory(tempDir, argv, limitReached);
|
|
1128
|
+
}
|