@link-assistant/hive-mind 1.30.4 ā 1.31.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +57 -0
- package/package.json +1 -1
- package/src/agent.lib.mjs +3 -0
- package/src/agent.prompts.lib.mjs +6 -1
- package/src/claude.lib.mjs +4 -1
- package/src/claude.prompts.lib.mjs +6 -1
- package/src/codex.lib.mjs +2 -0
- package/src/codex.prompts.lib.mjs +6 -1
- package/src/exit-handler.lib.mjs +16 -1
- package/src/github-merge-ready-sync.lib.mjs +251 -0
- package/src/github-merge.lib.mjs +15 -185
- package/src/opencode.lib.mjs +2 -0
- package/src/opencode.prompts.lib.mjs +6 -1
- package/src/option-suggestions.lib.mjs +3 -0
- package/src/solve.auto-ensure.lib.mjs +120 -0
- package/src/solve.config.lib.mjs +26 -0
- package/src/solve.interrupt.lib.mjs +70 -0
- package/src/solve.mjs +26 -27
- package/src/telegram-merge-command.lib.mjs +23 -1
- package/src/telegram-merge-queue.lib.mjs +16 -0
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,62 @@
|
|
|
1
1
|
# @link-assistant/hive-mind
|
|
2
2
|
|
|
3
|
+
## 1.31.0
|
|
4
|
+
|
|
5
|
+
### Minor Changes
|
|
6
|
+
|
|
7
|
+
- feat: add --finalize option (Issue #1383)
|
|
8
|
+
|
|
9
|
+
Adds new experimental CLI options to the `solve` command:
|
|
10
|
+
- `--finalize [N]`: After the main solve completes, automatically restarts the AI tool N times (default: 1 when used as a flag) with a requirements-check prompt to verify all requirements are met. Uses the same model as `--model` by default.
|
|
11
|
+
- `--finalize-model`: Override the model used during `--finalize` iterations (defaults to `--model`).
|
|
12
|
+
- `--prompt-ensure-all-requirements-are-met`: Adds a system prompt hint in the "Self review" section instructing the AI to ensure all changes are correct, consistent, validated, tested, logged and fully meet all discussed requirements. Enabled automatically during `--finalize` iterations only (not the first regular run).
|
|
13
|
+
|
|
14
|
+
This forces the AI tool to double-check itself after the main solve, verifying changes meet all requirements from the issue description and PR comments, and that CI/CD checks pass.
|
|
15
|
+
|
|
16
|
+
feat: auto-commit uncommitted changes and upload log on CTRL+C interrupt (Issue #1351)
|
|
17
|
+
|
|
18
|
+
Previously, when a user pressed CTRL+C to interrupt a running solve session, uncommitted changes were silently lost (or left uncommitted) and log files were not uploaded to the PR/issue even when `--attach-logs` was enabled. Additionally, the terminal showed "Claude command completed" instead of "Claude command interrupted".
|
|
19
|
+
|
|
20
|
+
Now on CTRL+C:
|
|
21
|
+
1. **Auto-commit**: Any uncommitted changes in the working directory are automatically committed and pushed to the branch before cleanup occurs.
|
|
22
|
+
2. **Log upload**: If `--attach-logs` is enabled, the log file is automatically uploaded to the GitHub PR/issue as a comment.
|
|
23
|
+
3. **Accurate message**: The terminal now correctly shows "Claude command interrupted" instead of "Claude command completed" when the process exits with code 130 (SIGINT).
|
|
24
|
+
|
|
25
|
+
Changes made:
|
|
26
|
+
- `src/exit-handler.lib.mjs`: Added optional `interrupt` parameter to `initializeExitHandler()`; SIGINT handler now calls it before cleanup, guarded against double invocation
|
|
27
|
+
- `src/solve.mjs`: Extended `cleanupContext` with branch/PR/owner/repo fields; new `interruptWrapper` auto-commits and uploads logs on CTRL+C
|
|
28
|
+
- `src/claude.lib.mjs`, `src/opencode.lib.mjs`, `src/codex.lib.mjs`, `src/agent.lib.mjs`: Detect exit code 130 and print "interrupted" instead of "completed"
|
|
29
|
+
|
|
30
|
+
Full case study analysis including timeline reconstruction, root cause analysis, and implementation details in `docs/case-studies/issue-1351/`.
|
|
31
|
+
|
|
32
|
+
fix: prevent false positive ready tag sync by using issue timeline API (Issue #1413)
|
|
33
|
+
|
|
34
|
+
Previously, `syncReadyTags()` used a GitHub full-text body search to find PRs linked to an issue:
|
|
35
|
+
|
|
36
|
+
```js
|
|
37
|
+
gh pr list --search "in:body closes #1411 OR fixes #1411 OR resolves #1411"
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
This caused a false positive: PR #843 matched because `1411` appeared as a source code line reference inside its body, not as a genuine issue-closing keyword.
|
|
41
|
+
|
|
42
|
+
Now uses the GitHub issue timeline API (`GET /repos/{owner}/{repo}/issues/{issue_number}/timeline`) to find PRs with genuine `cross-referenced` events, which is the same data GitHub uses to auto-close issues when PRs are merged.
|
|
43
|
+
|
|
44
|
+
fix: hide cancel button and show cancelling state on /merge cancel (Issue #1407)
|
|
45
|
+
|
|
46
|
+
When user clicked the "š Cancel" button during `/merge` queue processing, the cancel button remained visible in the Telegram message until the current PR finished processing (potentially hours if waiting for CI). The toast message "The current PR will finish processing" was also confusing.
|
|
47
|
+
|
|
48
|
+
The fix immediately hides the cancel button by editing the message without `reply_markup`, shows a "š Cancelling..." indicator in the progress message when cancellation is requested, and adds `isCancelled` support to `waitForCI()` for early exit when the operation is cancelled.
|
|
49
|
+
|
|
50
|
+
## 1.30.5
|
|
51
|
+
|
|
52
|
+
### Patch Changes
|
|
53
|
+
|
|
54
|
+
- a9a58ab: Switch Docker builds to registry cache for faster arm64 builds
|
|
55
|
+
- Changed from GitHub Actions cache to Docker Hub registry cache backend
|
|
56
|
+
- Use architecture-specific cache tags (buildcache-amd64, buildcache-arm64) to prevent cross-platform cache overwriting
|
|
57
|
+
- Increased Docker job timeout from 45 to 60 minutes for safety margin
|
|
58
|
+
- Added comprehensive case study documentation for issue #1415
|
|
59
|
+
|
|
3
60
|
## 1.30.4
|
|
4
61
|
|
|
5
62
|
### Patch Changes
|
package/package.json
CHANGED
package/src/agent.lib.mjs
CHANGED
|
@@ -911,6 +911,9 @@ export const executeAgentCommand = async params => {
|
|
|
911
911
|
// Explicit JSON error message from agent (Issue #1201: includes streaming-detected errors)
|
|
912
912
|
errorInfo.message = `Agent reported error: ${outputError.match}`;
|
|
913
913
|
await log(`\n\nā ${errorInfo.message}`, { level: 'error' });
|
|
914
|
+
} else if (exitCode === 130) {
|
|
915
|
+
errorInfo.message = 'Agent command interrupted (CTRL+C)';
|
|
916
|
+
await log('\n\nā ļø Agent command interrupted (CTRL+C)');
|
|
914
917
|
} else {
|
|
915
918
|
errorInfo.message = `Agent command failed with exit code ${exitCode}`;
|
|
916
919
|
await log(`\n\nā ${errorInfo.message}`, { level: 'error' });
|
|
@@ -212,7 +212,12 @@ Self review.
|
|
|
212
212
|
- When you check your solution draft, run all tests locally.
|
|
213
213
|
- When you check your solution draft, verify git status shows a clean working tree with no uncommitted changes.
|
|
214
214
|
- When you compare with repo style, use gh pr diff [number].
|
|
215
|
-
- When you finalize, confirm code, tests, and description are consistent
|
|
215
|
+
- When you finalize, confirm code, tests, and description are consistent.${
|
|
216
|
+
argv && argv.promptEnsureAllRequirementsAreMet
|
|
217
|
+
? `
|
|
218
|
+
- When no explicit feedback or requirements is provided, ensure all changes are correct, consistent, validated, tested, logged and fully meet all discussed requirements (check issue description and all comments in issue and in pull request). Ensure all CI/CD checks pass.`
|
|
219
|
+
: ''
|
|
220
|
+
}
|
|
216
221
|
|
|
217
222
|
GitHub CLI command patterns.
|
|
218
223
|
- IMPORTANT: Always use --paginate flag when fetching lists from GitHub API to ensure all results are returned (GitHub returns max 30 per page by default).
|
package/src/claude.lib.mjs
CHANGED
|
@@ -1295,7 +1295,10 @@ export const executeClaudeCommand = async params => {
|
|
|
1295
1295
|
}
|
|
1296
1296
|
// Issue #1088: If error_during_execution occurred but command didn't fail,
|
|
1297
1297
|
// log it as "Finished with errors" instead of pure success
|
|
1298
|
-
|
|
1298
|
+
// Issue #1351: Distinguish interrupted sessions (exit code 130) from normal completion
|
|
1299
|
+
if (exitCode === 130) {
|
|
1300
|
+
await log('\n\nā ļø Claude command interrupted (CTRL+C)');
|
|
1301
|
+
} else if (errorDuringExecution) {
|
|
1299
1302
|
await log('\n\nā ļø Claude command finished with errors');
|
|
1300
1303
|
} else {
|
|
1301
1304
|
await log('\n\nā
Claude command completed');
|
|
@@ -254,7 +254,12 @@ Self review.
|
|
|
254
254
|
- When you check your solution draft, run all tests locally.
|
|
255
255
|
- When you check your solution draft, verify git status shows a clean working tree with no uncommitted changes.
|
|
256
256
|
- When you compare with repo style, use gh pr diff [number].
|
|
257
|
-
- When you finalize, confirm code, tests, and description are consistent
|
|
257
|
+
- When you finalize, confirm code, tests, and description are consistent.${
|
|
258
|
+
argv && argv.promptEnsureAllRequirementsAreMet
|
|
259
|
+
? `
|
|
260
|
+
- When no explicit feedback or requirements is provided, ensure all changes are correct, consistent, validated, tested, logged and fully meet all discussed requirements (check issue description and all comments in issue and in pull request). Ensure all CI/CD checks pass.`
|
|
261
|
+
: ''
|
|
262
|
+
}
|
|
258
263
|
|
|
259
264
|
GitHub CLI command patterns.
|
|
260
265
|
- IMPORTANT: Always use --paginate flag when fetching lists from GitHub API to ensure all results are returned (GitHub returns max 30 per page by default).
|
package/src/codex.lib.mjs
CHANGED
|
@@ -398,6 +398,8 @@ export const executeCodexCommand = async params => {
|
|
|
398
398
|
for (const line of messageLines) {
|
|
399
399
|
await log(line, { level: 'warning' });
|
|
400
400
|
}
|
|
401
|
+
} else if (exitCode === 130) {
|
|
402
|
+
await log('\n\nā ļø Codex command interrupted (CTRL+C)');
|
|
401
403
|
} else {
|
|
402
404
|
await log(`\n\nā Codex command failed with exit code ${exitCode}`, { level: 'error' });
|
|
403
405
|
}
|
|
@@ -220,7 +220,12 @@ Self review.
|
|
|
220
220
|
- When you check your solution draft, run all tests locally.
|
|
221
221
|
- When you check your solution draft, verify git status shows a clean working tree with no uncommitted changes.
|
|
222
222
|
- When you compare with repo style, use gh pr diff [number].
|
|
223
|
-
- When you finalize, confirm code, tests, and description are consistent
|
|
223
|
+
- When you finalize, confirm code, tests, and description are consistent.${
|
|
224
|
+
argv && argv.promptEnsureAllRequirementsAreMet
|
|
225
|
+
? `
|
|
226
|
+
- When no explicit feedback or requirements is provided, ensure all changes are correct, consistent, validated, tested, logged and fully meet all discussed requirements (check issue description and all comments in issue and in pull request). Ensure all CI/CD checks pass.`
|
|
227
|
+
: ''
|
|
228
|
+
}
|
|
224
229
|
|
|
225
230
|
GitHub CLI command patterns.
|
|
226
231
|
- IMPORTANT: Always use --paginate flag when fetching lists from GitHub API to ensure all results are returned (GitHub returns max 30 per page by default).
|
package/src/exit-handler.lib.mjs
CHANGED
|
@@ -25,17 +25,22 @@ let exitMessageShown = false;
|
|
|
25
25
|
let getLogPathFunction = null;
|
|
26
26
|
let logFunction = null;
|
|
27
27
|
let cleanupFunction = null;
|
|
28
|
+
let interruptFunction = null;
|
|
29
|
+
let interruptHandlerRan = false;
|
|
28
30
|
|
|
29
31
|
/**
|
|
30
32
|
* Initialize the exit handler with required dependencies
|
|
31
33
|
* @param {Function} getLogPath - Function that returns the current log path
|
|
32
34
|
* @param {Function} log - Logging function
|
|
33
35
|
* @param {Function} cleanup - Optional cleanup function to call on exit
|
|
36
|
+
* @param {Function} interrupt - Optional interrupt function to call on SIGINT/SIGTERM before cleanup
|
|
37
|
+
* (e.g., auto-commit uncommitted changes, upload logs)
|
|
34
38
|
*/
|
|
35
|
-
export const initializeExitHandler = (getLogPath, log, cleanup = null) => {
|
|
39
|
+
export const initializeExitHandler = (getLogPath, log, cleanup = null, interrupt = null) => {
|
|
36
40
|
getLogPathFunction = getLogPath;
|
|
37
41
|
logFunction = log;
|
|
38
42
|
cleanupFunction = cleanup;
|
|
43
|
+
interruptFunction = interrupt;
|
|
39
44
|
};
|
|
40
45
|
|
|
41
46
|
/**
|
|
@@ -114,6 +119,15 @@ export const installGlobalExitHandlers = () => {
|
|
|
114
119
|
|
|
115
120
|
// Handle SIGINT (CTRL+C)
|
|
116
121
|
process.on('SIGINT', async () => {
|
|
122
|
+
// Run interrupt handler first (auto-commit, log upload, etc.) ā guard against double invocation
|
|
123
|
+
if (interruptFunction && !interruptHandlerRan) {
|
|
124
|
+
interruptHandlerRan = true;
|
|
125
|
+
try {
|
|
126
|
+
await interruptFunction();
|
|
127
|
+
} catch {
|
|
128
|
+
// Ignore interrupt handler errors
|
|
129
|
+
}
|
|
130
|
+
}
|
|
117
131
|
if (cleanupFunction) {
|
|
118
132
|
try {
|
|
119
133
|
await cleanupFunction();
|
|
@@ -208,4 +222,5 @@ export const installGlobalExitHandlers = () => {
|
|
|
208
222
|
*/
|
|
209
223
|
export const resetExitHandler = () => {
|
|
210
224
|
exitMessageShown = false;
|
|
225
|
+
interruptHandlerRan = false;
|
|
211
226
|
};
|
|
@@ -0,0 +1,251 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
* GitHub Merge Ready Tag Sync Library
|
|
4
|
+
*
|
|
5
|
+
* Provides utilities for syncing 'ready' tags between linked PRs and issues,
|
|
6
|
+
* and for finding genuinely linked PRs via the GitHub issue timeline API.
|
|
7
|
+
* Split from github-merge.lib.mjs to maintain file size limits.
|
|
8
|
+
*
|
|
9
|
+
* @see https://github.com/link-assistant/hive-mind/issues/1413
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
import { promisify } from 'util';
|
|
13
|
+
import { exec as execCallback } from 'child_process';
|
|
14
|
+
|
|
15
|
+
const exec = promisify(execCallback);
|
|
16
|
+
|
|
17
|
+
import { extractLinkedIssueNumber } from './github-linking.lib.mjs';
|
|
18
|
+
|
|
19
|
+
// READY_LABEL is also exported from github-merge.lib.mjs (which re-exports it from here)
|
|
20
|
+
export const READY_LABEL = {
|
|
21
|
+
name: 'ready',
|
|
22
|
+
description: 'Is ready to be merged',
|
|
23
|
+
color: '0E8A16', // Green color
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* Add a label to a GitHub issue or pull request
|
|
28
|
+
* @param {'issue'|'pr'} type - Whether to add to issue or PR
|
|
29
|
+
* @param {string} owner - Repository owner
|
|
30
|
+
* @param {string} repo - Repository name
|
|
31
|
+
* @param {number} number - Issue or PR number
|
|
32
|
+
* @param {string} labelName - Label name to add
|
|
33
|
+
* @param {boolean} verbose - Whether to log verbose output
|
|
34
|
+
* @returns {Promise<{success: boolean, error: string|null}>}
|
|
35
|
+
*/
|
|
36
|
+
async function addLabel(type, owner, repo, number, labelName, verbose = false) {
|
|
37
|
+
const cmd = type === 'issue' ? 'issue' : 'pr';
|
|
38
|
+
try {
|
|
39
|
+
await exec(`gh ${cmd} edit ${number} --repo ${owner}/${repo} --add-label "${labelName}"`);
|
|
40
|
+
if (verbose) console.log(`[VERBOSE] /merge: Added '${labelName}' label to ${type} #${number}`);
|
|
41
|
+
return { success: true, error: null };
|
|
42
|
+
} catch (error) {
|
|
43
|
+
if (verbose) console.log(`[VERBOSE] /merge: Failed to add label to ${type} #${number}: ${error.message}`);
|
|
44
|
+
return { success: false, error: error.message };
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
/**
|
|
49
|
+
* Get open PRs that are genuinely linked to an issue via GitHub's issue timeline.
|
|
50
|
+
*
|
|
51
|
+
* Issue #1413: This replaces the previous full-text body search approach which
|
|
52
|
+
* caused false positives. For example, a search for `fixes #1411` would incorrectly
|
|
53
|
+
* match PR #843 because its body contained the string `1411ā` as a source code line
|
|
54
|
+
* number in a code snippet ā not as an issue closing reference.
|
|
55
|
+
*
|
|
56
|
+
* The GitHub issue timeline API returns `cross-referenced` events for PRs that
|
|
57
|
+
* explicitly close the issue using GitHub's reserved keywords (fixes/closes/resolves).
|
|
58
|
+
* This is the same data GitHub uses to auto-close issues when PRs are merged, so
|
|
59
|
+
* it reliably identifies genuine closing references.
|
|
60
|
+
*
|
|
61
|
+
* @param {string} owner - Repository owner
|
|
62
|
+
* @param {string} repo - Repository name
|
|
63
|
+
* @param {number} issueNumber - Issue number to find linked PRs for
|
|
64
|
+
* @param {boolean} verbose - Whether to log verbose output
|
|
65
|
+
* @returns {Promise<Array<{number: number, title: string}>>} Array of open PRs that close this issue
|
|
66
|
+
*/
|
|
67
|
+
export async function getLinkedPRsFromTimeline(owner, repo, issueNumber, verbose = false) {
|
|
68
|
+
try {
|
|
69
|
+
const { stdout: timelineJson } = await exec(`gh api repos/${owner}/${repo}/issues/${issueNumber}/timeline --paginate`);
|
|
70
|
+
const timeline = JSON.parse(timelineJson.trim() || '[]');
|
|
71
|
+
|
|
72
|
+
// Extract cross-referenced events where the source is an open PR
|
|
73
|
+
// (source.issue.pull_request != null means the source is a PR, not a plain issue)
|
|
74
|
+
const linkedPRNumbers = new Set();
|
|
75
|
+
const linkedPRs = [];
|
|
76
|
+
|
|
77
|
+
for (const event of timeline) {
|
|
78
|
+
if (event.event === 'cross-referenced' && event.source?.issue?.pull_request != null && event.source?.issue?.state === 'open') {
|
|
79
|
+
const prNumber = event.source.issue.number;
|
|
80
|
+
if (!linkedPRNumbers.has(prNumber)) {
|
|
81
|
+
linkedPRNumbers.add(prNumber);
|
|
82
|
+
linkedPRs.push({
|
|
83
|
+
number: prNumber,
|
|
84
|
+
title: event.source.issue.title || '',
|
|
85
|
+
});
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
if (verbose) {
|
|
91
|
+
console.log(`[VERBOSE] /merge: Issue #${issueNumber} has ${linkedPRs.length} genuinely linked open PR(s) via timeline`);
|
|
92
|
+
for (const pr of linkedPRs) {
|
|
93
|
+
console.log(`[VERBOSE] /merge: PR #${pr.number}: ${pr.title}`);
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
return linkedPRs;
|
|
98
|
+
} catch (error) {
|
|
99
|
+
if (verbose) {
|
|
100
|
+
console.log(`[VERBOSE] /merge: Error fetching timeline for issue #${issueNumber}: ${error.message}`);
|
|
101
|
+
}
|
|
102
|
+
return [];
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
/**
|
|
107
|
+
* Sync 'ready' tags between linked pull requests and issues
|
|
108
|
+
*
|
|
109
|
+
* Issue #1367: Before building the merge queue, ensure that:
|
|
110
|
+
* 1. If a PR has 'ready' label and is clearly linked to an issue (via standard GitHub
|
|
111
|
+
* keywords in the PR body/title), the issue also gets 'ready' label.
|
|
112
|
+
* 2. If an issue has 'ready' label and has a clearly linked open PR, the PR also gets
|
|
113
|
+
* 'ready' label.
|
|
114
|
+
*
|
|
115
|
+
* This ensures the final list of ready PRs reflects all ready work, regardless of
|
|
116
|
+
* where the 'ready' label was originally applied.
|
|
117
|
+
*
|
|
118
|
+
* @param {string} owner - Repository owner
|
|
119
|
+
* @param {string} repo - Repository name
|
|
120
|
+
* @param {boolean} verbose - Whether to log verbose output
|
|
121
|
+
* @returns {Promise<{synced: number, errors: number, details: Array<Object>}>}
|
|
122
|
+
*/
|
|
123
|
+
export async function syncReadyTags(owner, repo, verbose = false) {
|
|
124
|
+
const synced = [];
|
|
125
|
+
const errors = [];
|
|
126
|
+
|
|
127
|
+
if (verbose) {
|
|
128
|
+
console.log(`[VERBOSE] /merge: Syncing 'ready' tags for ${owner}/${repo}...`);
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
try {
|
|
132
|
+
// Fetch open PRs with 'ready' label (including body for link detection)
|
|
133
|
+
const { stdout: prsJson } = await exec(`gh pr list --repo ${owner}/${repo} --label "${READY_LABEL.name}" --state open --json number,title,body,labels --limit 100`);
|
|
134
|
+
const readyPRs = JSON.parse(prsJson.trim() || '[]');
|
|
135
|
+
|
|
136
|
+
if (verbose) {
|
|
137
|
+
console.log(`[VERBOSE] /merge: Found ${readyPRs.length} open PRs with 'ready' label for tag sync`);
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
// Fetch open issues with 'ready' label
|
|
141
|
+
const { stdout: issuesJson } = await exec(`gh issue list --repo ${owner}/${repo} --label "${READY_LABEL.name}" --state open --json number,title --limit 100`);
|
|
142
|
+
const readyIssues = JSON.parse(issuesJson.trim() || '[]');
|
|
143
|
+
|
|
144
|
+
if (verbose) {
|
|
145
|
+
console.log(`[VERBOSE] /merge: Found ${readyIssues.length} open issues with 'ready' label for tag sync`);
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
// Build a set of issue numbers that already have 'ready'
|
|
149
|
+
const readyIssueNumbers = new Set(readyIssues.map(i => String(i.number)));
|
|
150
|
+
|
|
151
|
+
// Step 1: For each PR with 'ready', find linked issue and sync label to it
|
|
152
|
+
for (const pr of readyPRs) {
|
|
153
|
+
try {
|
|
154
|
+
const prBody = pr.body || '';
|
|
155
|
+
const linkedIssueNumber = extractLinkedIssueNumber(prBody);
|
|
156
|
+
|
|
157
|
+
if (!linkedIssueNumber) {
|
|
158
|
+
if (verbose) {
|
|
159
|
+
console.log(`[VERBOSE] /merge: PR #${pr.number} has no linked issue (no closing keyword in body)`);
|
|
160
|
+
}
|
|
161
|
+
continue;
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
if (readyIssueNumbers.has(String(linkedIssueNumber))) {
|
|
165
|
+
if (verbose) {
|
|
166
|
+
console.log(`[VERBOSE] /merge: Issue #${linkedIssueNumber} already has 'ready' label (linked from PR #${pr.number})`);
|
|
167
|
+
}
|
|
168
|
+
continue;
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
// Issue doesn't have 'ready' label yet - add it
|
|
172
|
+
if (verbose) {
|
|
173
|
+
console.log(`[VERBOSE] /merge: PR #${pr.number} has 'ready', adding to linked issue #${linkedIssueNumber}`);
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
const result = await addLabel('issue', owner, repo, linkedIssueNumber, READY_LABEL.name, verbose);
|
|
177
|
+
if (result.success) {
|
|
178
|
+
synced.push({ type: 'pr-to-issue', prNumber: pr.number, issueNumber: Number(linkedIssueNumber) });
|
|
179
|
+
// Mark this issue as now having 'ready' so we don't process it again
|
|
180
|
+
readyIssueNumbers.add(String(linkedIssueNumber));
|
|
181
|
+
} else {
|
|
182
|
+
errors.push({ type: 'pr-to-issue', prNumber: pr.number, issueNumber: Number(linkedIssueNumber), error: result.error });
|
|
183
|
+
}
|
|
184
|
+
} catch (err) {
|
|
185
|
+
if (verbose) {
|
|
186
|
+
console.log(`[VERBOSE] /merge: Error syncing label from PR #${pr.number}: ${err.message}`);
|
|
187
|
+
}
|
|
188
|
+
errors.push({ type: 'pr-to-issue', prNumber: pr.number, error: err.message });
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
// Build a set of PR numbers that already have 'ready'
|
|
193
|
+
const readyPRNumbers = new Set(readyPRs.map(p => String(p.number)));
|
|
194
|
+
|
|
195
|
+
// Step 2: For each issue with 'ready', find linked PRs and sync label to them
|
|
196
|
+
for (const issue of readyIssues) {
|
|
197
|
+
try {
|
|
198
|
+
// Issue #1413: Use the GitHub issue timeline API to find PRs that genuinely
|
|
199
|
+
// close this issue via closing keywords. This avoids false positives from
|
|
200
|
+
// full-text search, which can match PRs that contain the issue number as a
|
|
201
|
+
// source code line number (e.g. "1411ā await log(...)") rather than as a
|
|
202
|
+
// real closing reference.
|
|
203
|
+
const linkedPRs = await getLinkedPRsFromTimeline(owner, repo, issue.number, verbose);
|
|
204
|
+
|
|
205
|
+
for (const linkedPR of linkedPRs) {
|
|
206
|
+
if (readyPRNumbers.has(String(linkedPR.number))) {
|
|
207
|
+
if (verbose) {
|
|
208
|
+
console.log(`[VERBOSE] /merge: PR #${linkedPR.number} already has 'ready' label (linked from issue #${issue.number})`);
|
|
209
|
+
}
|
|
210
|
+
continue;
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
// PR doesn't have 'ready' label yet - add it
|
|
214
|
+
if (verbose) {
|
|
215
|
+
console.log(`[VERBOSE] /merge: Issue #${issue.number} has 'ready', adding to linked PR #${linkedPR.number}`);
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
const result = await addLabel('pr', owner, repo, linkedPR.number, READY_LABEL.name, verbose);
|
|
219
|
+
if (result.success) {
|
|
220
|
+
synced.push({ type: 'issue-to-pr', issueNumber: issue.number, prNumber: linkedPR.number });
|
|
221
|
+
// Mark this PR as now having 'ready'
|
|
222
|
+
readyPRNumbers.add(String(linkedPR.number));
|
|
223
|
+
} else {
|
|
224
|
+
errors.push({ type: 'issue-to-pr', issueNumber: issue.number, prNumber: linkedPR.number, error: result.error });
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
} catch (err) {
|
|
228
|
+
if (verbose) {
|
|
229
|
+
console.log(`[VERBOSE] /merge: Error syncing label from issue #${issue.number}: ${err.message}`);
|
|
230
|
+
}
|
|
231
|
+
errors.push({ type: 'issue-to-pr', issueNumber: issue.number, error: err.message });
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
} catch (error) {
|
|
235
|
+
if (verbose) {
|
|
236
|
+
console.log(`[VERBOSE] /merge: Error during tag sync: ${error.message}`);
|
|
237
|
+
}
|
|
238
|
+
errors.push({ type: 'fetch', error: error.message });
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
if (verbose) {
|
|
242
|
+
console.log(`[VERBOSE] /merge: Tag sync complete. Synced: ${synced.length}, Errors: ${errors.length}`);
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
return {
|
|
246
|
+
synced: synced.length,
|
|
247
|
+
errors: errors.length,
|
|
248
|
+
details: synced,
|
|
249
|
+
errorDetails: errors,
|
|
250
|
+
};
|
|
251
|
+
}
|
package/src/github-merge.lib.mjs
CHANGED
|
@@ -19,15 +19,10 @@ const exec = promisify(execCallback);
|
|
|
19
19
|
// Import GitHub URL parser
|
|
20
20
|
import { parseGitHubUrl } from './github.lib.mjs';
|
|
21
21
|
|
|
22
|
-
// Import
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
export const READY_LABEL = {
|
|
27
|
-
name: 'ready',
|
|
28
|
-
description: 'Is ready to be merged',
|
|
29
|
-
color: '0E8A16', // Green color
|
|
30
|
-
};
|
|
22
|
+
// Issue #1413: Import ready tag sync, timeline, and label constant from separate module
|
|
23
|
+
// to keep this file under the 1500 line limit
|
|
24
|
+
import { syncReadyTags, getLinkedPRsFromTimeline, READY_LABEL } from './github-merge-ready-sync.lib.mjs';
|
|
25
|
+
export { syncReadyTags, getLinkedPRsFromTimeline, READY_LABEL };
|
|
31
26
|
|
|
32
27
|
/**
|
|
33
28
|
* Check if 'ready' label exists in repository
|
|
@@ -254,172 +249,6 @@ export async function fetchReadyIssuesWithPRs(owner, repo, verbose = false) {
|
|
|
254
249
|
}
|
|
255
250
|
}
|
|
256
251
|
|
|
257
|
-
/**
|
|
258
|
-
* Add a label to a GitHub issue or pull request
|
|
259
|
-
* @param {'issue'|'pr'} type - Whether to add to issue or PR
|
|
260
|
-
* @param {string} owner - Repository owner
|
|
261
|
-
* @param {string} repo - Repository name
|
|
262
|
-
* @param {number} number - Issue or PR number
|
|
263
|
-
* @param {string} labelName - Label name to add
|
|
264
|
-
* @param {boolean} verbose - Whether to log verbose output
|
|
265
|
-
* @returns {Promise<{success: boolean, error: string|null}>}
|
|
266
|
-
*/
|
|
267
|
-
async function addLabel(type, owner, repo, number, labelName, verbose = false) {
|
|
268
|
-
const cmd = type === 'issue' ? 'issue' : 'pr';
|
|
269
|
-
try {
|
|
270
|
-
await exec(`gh ${cmd} edit ${number} --repo ${owner}/${repo} --add-label "${labelName}"`);
|
|
271
|
-
if (verbose) console.log(`[VERBOSE] /merge: Added '${labelName}' label to ${type} #${number}`);
|
|
272
|
-
return { success: true, error: null };
|
|
273
|
-
} catch (error) {
|
|
274
|
-
if (verbose) console.log(`[VERBOSE] /merge: Failed to add label to ${type} #${number}: ${error.message}`);
|
|
275
|
-
return { success: false, error: error.message };
|
|
276
|
-
}
|
|
277
|
-
}
|
|
278
|
-
|
|
279
|
-
/**
|
|
280
|
-
* Sync 'ready' tags between linked pull requests and issues
|
|
281
|
-
*
|
|
282
|
-
* Issue #1367: Before building the merge queue, ensure that:
|
|
283
|
-
* 1. If a PR has 'ready' label and is clearly linked to an issue (via standard GitHub
|
|
284
|
-
* keywords in the PR body/title), the issue also gets 'ready' label.
|
|
285
|
-
* 2. If an issue has 'ready' label and has a clearly linked open PR, the PR also gets
|
|
286
|
-
* 'ready' label.
|
|
287
|
-
*
|
|
288
|
-
* This ensures the final list of ready PRs reflects all ready work, regardless of
|
|
289
|
-
* where the 'ready' label was originally applied.
|
|
290
|
-
*
|
|
291
|
-
* @param {string} owner - Repository owner
|
|
292
|
-
* @param {string} repo - Repository name
|
|
293
|
-
* @param {boolean} verbose - Whether to log verbose output
|
|
294
|
-
* @returns {Promise<{synced: number, errors: number, details: Array<Object>}>}
|
|
295
|
-
*/
|
|
296
|
-
export async function syncReadyTags(owner, repo, verbose = false) {
|
|
297
|
-
const synced = [];
|
|
298
|
-
const errors = [];
|
|
299
|
-
|
|
300
|
-
if (verbose) {
|
|
301
|
-
console.log(`[VERBOSE] /merge: Syncing 'ready' tags for ${owner}/${repo}...`);
|
|
302
|
-
}
|
|
303
|
-
|
|
304
|
-
try {
|
|
305
|
-
// Fetch open PRs with 'ready' label (including body for link detection)
|
|
306
|
-
const { stdout: prsJson } = await exec(`gh pr list --repo ${owner}/${repo} --label "${READY_LABEL.name}" --state open --json number,title,body,labels --limit 100`);
|
|
307
|
-
const readyPRs = JSON.parse(prsJson.trim() || '[]');
|
|
308
|
-
|
|
309
|
-
if (verbose) {
|
|
310
|
-
console.log(`[VERBOSE] /merge: Found ${readyPRs.length} open PRs with 'ready' label for tag sync`);
|
|
311
|
-
}
|
|
312
|
-
|
|
313
|
-
// Fetch open issues with 'ready' label
|
|
314
|
-
const { stdout: issuesJson } = await exec(`gh issue list --repo ${owner}/${repo} --label "${READY_LABEL.name}" --state open --json number,title --limit 100`);
|
|
315
|
-
const readyIssues = JSON.parse(issuesJson.trim() || '[]');
|
|
316
|
-
|
|
317
|
-
if (verbose) {
|
|
318
|
-
console.log(`[VERBOSE] /merge: Found ${readyIssues.length} open issues with 'ready' label for tag sync`);
|
|
319
|
-
}
|
|
320
|
-
|
|
321
|
-
// Build a set of issue numbers that already have 'ready'
|
|
322
|
-
const readyIssueNumbers = new Set(readyIssues.map(i => String(i.number)));
|
|
323
|
-
|
|
324
|
-
// Step 1: For each PR with 'ready', find linked issue and sync label to it
|
|
325
|
-
for (const pr of readyPRs) {
|
|
326
|
-
try {
|
|
327
|
-
const prBody = pr.body || '';
|
|
328
|
-
const linkedIssueNumber = extractLinkedIssueNumber(prBody);
|
|
329
|
-
|
|
330
|
-
if (!linkedIssueNumber) {
|
|
331
|
-
if (verbose) {
|
|
332
|
-
console.log(`[VERBOSE] /merge: PR #${pr.number} has no linked issue (no closing keyword in body)`);
|
|
333
|
-
}
|
|
334
|
-
continue;
|
|
335
|
-
}
|
|
336
|
-
|
|
337
|
-
if (readyIssueNumbers.has(String(linkedIssueNumber))) {
|
|
338
|
-
if (verbose) {
|
|
339
|
-
console.log(`[VERBOSE] /merge: Issue #${linkedIssueNumber} already has 'ready' label (linked from PR #${pr.number})`);
|
|
340
|
-
}
|
|
341
|
-
continue;
|
|
342
|
-
}
|
|
343
|
-
|
|
344
|
-
// Issue doesn't have 'ready' label yet - add it
|
|
345
|
-
if (verbose) {
|
|
346
|
-
console.log(`[VERBOSE] /merge: PR #${pr.number} has 'ready', adding to linked issue #${linkedIssueNumber}`);
|
|
347
|
-
}
|
|
348
|
-
|
|
349
|
-
const result = await addLabel('issue', owner, repo, linkedIssueNumber, READY_LABEL.name, verbose);
|
|
350
|
-
if (result.success) {
|
|
351
|
-
synced.push({ type: 'pr-to-issue', prNumber: pr.number, issueNumber: Number(linkedIssueNumber) });
|
|
352
|
-
// Mark this issue as now having 'ready' so we don't process it again
|
|
353
|
-
readyIssueNumbers.add(String(linkedIssueNumber));
|
|
354
|
-
} else {
|
|
355
|
-
errors.push({ type: 'pr-to-issue', prNumber: pr.number, issueNumber: Number(linkedIssueNumber), error: result.error });
|
|
356
|
-
}
|
|
357
|
-
} catch (err) {
|
|
358
|
-
if (verbose) {
|
|
359
|
-
console.log(`[VERBOSE] /merge: Error syncing label from PR #${pr.number}: ${err.message}`);
|
|
360
|
-
}
|
|
361
|
-
errors.push({ type: 'pr-to-issue', prNumber: pr.number, error: err.message });
|
|
362
|
-
}
|
|
363
|
-
}
|
|
364
|
-
|
|
365
|
-
// Build a set of PR numbers that already have 'ready'
|
|
366
|
-
const readyPRNumbers = new Set(readyPRs.map(p => String(p.number)));
|
|
367
|
-
|
|
368
|
-
// Step 2: For each issue with 'ready', find linked PRs and sync label to them
|
|
369
|
-
for (const issue of readyIssues) {
|
|
370
|
-
try {
|
|
371
|
-
// Search for open PRs linked to this issue via closing keywords
|
|
372
|
-
const { stdout: linkedPRsJson } = await exec(`gh pr list --repo ${owner}/${repo} --search "in:body closes #${issue.number} OR fixes #${issue.number} OR resolves #${issue.number}" --state open --json number,title,labels --limit 10`);
|
|
373
|
-
const linkedPRs = JSON.parse(linkedPRsJson.trim() || '[]');
|
|
374
|
-
|
|
375
|
-
for (const linkedPR of linkedPRs) {
|
|
376
|
-
if (readyPRNumbers.has(String(linkedPR.number))) {
|
|
377
|
-
if (verbose) {
|
|
378
|
-
console.log(`[VERBOSE] /merge: PR #${linkedPR.number} already has 'ready' label (linked from issue #${issue.number})`);
|
|
379
|
-
}
|
|
380
|
-
continue;
|
|
381
|
-
}
|
|
382
|
-
|
|
383
|
-
// PR doesn't have 'ready' label yet - add it
|
|
384
|
-
if (verbose) {
|
|
385
|
-
console.log(`[VERBOSE] /merge: Issue #${issue.number} has 'ready', adding to linked PR #${linkedPR.number}`);
|
|
386
|
-
}
|
|
387
|
-
|
|
388
|
-
const result = await addLabel('pr', owner, repo, linkedPR.number, READY_LABEL.name, verbose);
|
|
389
|
-
if (result.success) {
|
|
390
|
-
synced.push({ type: 'issue-to-pr', issueNumber: issue.number, prNumber: linkedPR.number });
|
|
391
|
-
// Mark this PR as now having 'ready'
|
|
392
|
-
readyPRNumbers.add(String(linkedPR.number));
|
|
393
|
-
} else {
|
|
394
|
-
errors.push({ type: 'issue-to-pr', issueNumber: issue.number, prNumber: linkedPR.number, error: result.error });
|
|
395
|
-
}
|
|
396
|
-
}
|
|
397
|
-
} catch (err) {
|
|
398
|
-
if (verbose) {
|
|
399
|
-
console.log(`[VERBOSE] /merge: Error syncing label from issue #${issue.number}: ${err.message}`);
|
|
400
|
-
}
|
|
401
|
-
errors.push({ type: 'issue-to-pr', issueNumber: issue.number, error: err.message });
|
|
402
|
-
}
|
|
403
|
-
}
|
|
404
|
-
} catch (error) {
|
|
405
|
-
if (verbose) {
|
|
406
|
-
console.log(`[VERBOSE] /merge: Error during tag sync: ${error.message}`);
|
|
407
|
-
}
|
|
408
|
-
errors.push({ type: 'fetch', error: error.message });
|
|
409
|
-
}
|
|
410
|
-
|
|
411
|
-
if (verbose) {
|
|
412
|
-
console.log(`[VERBOSE] /merge: Tag sync complete. Synced: ${synced.length}, Errors: ${errors.length}`);
|
|
413
|
-
}
|
|
414
|
-
|
|
415
|
-
return {
|
|
416
|
-
synced: synced.length,
|
|
417
|
-
errors: errors.length,
|
|
418
|
-
details: synced,
|
|
419
|
-
errorDetails: errors,
|
|
420
|
-
};
|
|
421
|
-
}
|
|
422
|
-
|
|
423
252
|
/**
|
|
424
253
|
* Get combined list of ready PRs (from both direct PR labels and issue labels)
|
|
425
254
|
* @param {string} owner - Repository owner
|
|
@@ -751,11 +580,15 @@ export async function waitForCI(owner, repo, prNumber, options = {}, verbose = f
|
|
|
751
580
|
onStatusUpdate = null,
|
|
752
581
|
// Issue #1269: Add timeout for callback to prevent infinite blocking
|
|
753
582
|
callbackTimeout = 60 * 1000, // 1 minute max for callback
|
|
583
|
+
isCancelled = null, // Issue #1407: Support early exit when cancellation is requested
|
|
754
584
|
} = options;
|
|
755
585
|
|
|
756
586
|
const startTime = Date.now();
|
|
757
587
|
|
|
758
588
|
while (Date.now() - startTime < timeout) {
|
|
589
|
+
// Issue #1407: Check for cancellation before each poll to allow early exit
|
|
590
|
+
if (isCancelled?.()) return { success: false, status: 'cancelled', error: 'Operation was cancelled' };
|
|
591
|
+
|
|
759
592
|
let ciStatus;
|
|
760
593
|
try {
|
|
761
594
|
ciStatus = await checkPRCIStatus(owner, repo, prNumber, verbose);
|
|
@@ -1455,8 +1288,7 @@ export async function getActiveRepoWorkflows(owner, repo, verbose = false) {
|
|
|
1455
1288
|
}
|
|
1456
1289
|
}
|
|
1457
1290
|
|
|
1458
|
-
// Issue #1341:
|
|
1459
|
-
// to keep this file under the 1500 line limit
|
|
1291
|
+
// Issue #1341: Re-export post-merge CI functions from separate module
|
|
1460
1292
|
import { waitForCommitCI, checkBranchCIHealth, getMergeCommitSha } from './github-merge-ci.lib.mjs';
|
|
1461
1293
|
export { waitForCommitCI, checkBranchCIHealth, getMergeCommitSha };
|
|
1462
1294
|
|
|
@@ -1469,32 +1301,30 @@ export default {
|
|
|
1469
1301
|
fetchReadyPullRequests,
|
|
1470
1302
|
fetchReadyIssuesWithPRs,
|
|
1471
1303
|
getAllReadyPRs,
|
|
1472
|
-
// Issue #1367: Sync 'ready' tags between linked PRs and issues
|
|
1473
|
-
syncReadyTags,
|
|
1304
|
+
syncReadyTags, // Issue #1367: Sync 'ready' tags between linked PRs and issues
|
|
1474
1305
|
checkPRCIStatus,
|
|
1475
1306
|
checkPRMergeable,
|
|
1476
1307
|
checkMergePermissions,
|
|
1477
1308
|
mergePullRequest,
|
|
1478
1309
|
waitForCI,
|
|
1479
1310
|
parseRepositoryUrl,
|
|
1480
|
-
// Issue #1307: New exports for target branch CI waiting
|
|
1481
|
-
getActiveBranchRuns,
|
|
1311
|
+
getActiveBranchRuns, // Issue #1307: New exports for target branch CI waiting
|
|
1482
1312
|
waitForBranchCI,
|
|
1483
1313
|
getDefaultBranch,
|
|
1484
|
-
// Issue #1314: Billing limit detection
|
|
1314
|
+
// Issue #1314: Billing limit detection and enhanced CI status and re-run capabilities
|
|
1485
1315
|
getCheckRunAnnotations,
|
|
1486
1316
|
getRepoVisibility,
|
|
1487
1317
|
checkForBillingLimitError,
|
|
1488
1318
|
BILLING_LIMIT_ERROR_PATTERN,
|
|
1489
|
-
// Issue #1314: Enhanced CI status and re-run capabilities
|
|
1490
1319
|
getDetailedCIStatus,
|
|
1491
1320
|
rerunWorkflowRun,
|
|
1492
1321
|
rerunFailedJobs,
|
|
1493
1322
|
getWorkflowRunsForSha,
|
|
1494
|
-
// Issue #1341: Post-merge CI waiting
|
|
1323
|
+
// Issue #1341: Post-merge CI waiting; Issue #1363: Detect active workflows
|
|
1495
1324
|
waitForCommitCI,
|
|
1496
1325
|
checkBranchCIHealth,
|
|
1497
1326
|
getMergeCommitSha,
|
|
1498
|
-
// Issue #1363: Detect active workflows to distinguish "no CI" from race condition
|
|
1499
1327
|
getActiveRepoWorkflows,
|
|
1328
|
+
// Issue #1413: Use issue timeline to find genuinely linked PRs (avoids false positives from text search)
|
|
1329
|
+
getLinkedPRsFromTimeline,
|
|
1500
1330
|
};
|
package/src/opencode.lib.mjs
CHANGED
|
@@ -465,6 +465,8 @@ export const executeOpenCodeCommand = async params => {
|
|
|
465
465
|
for (const line of messageLines) {
|
|
466
466
|
await log(line, { level: 'warning' });
|
|
467
467
|
}
|
|
468
|
+
} else if (exitCode === 130) {
|
|
469
|
+
await log('\n\nā ļø OpenCode command interrupted (CTRL+C)');
|
|
468
470
|
} else {
|
|
469
471
|
await log(`\n\nā OpenCode command failed with exit code ${exitCode}`, { level: 'error' });
|
|
470
472
|
}
|
|
@@ -213,7 +213,12 @@ Workflow and collaboration.
|
|
|
213
213
|
Self review.
|
|
214
214
|
- When you check your solution draft, run all tests locally.
|
|
215
215
|
- When you compare with repo style, use gh pr diff [number].
|
|
216
|
-
- When you finalize, confirm code, tests, and description are consistent
|
|
216
|
+
- When you finalize, confirm code, tests, and description are consistent.${
|
|
217
|
+
argv && argv.promptEnsureAllRequirementsAreMet
|
|
218
|
+
? `
|
|
219
|
+
- When no explicit feedback or requirements is provided, ensure all changes are correct, consistent, validated, tested, logged and fully meet all discussed requirements (check issue description and all comments in issue and in pull request). Ensure all CI/CD checks pass.`
|
|
220
|
+
: ''
|
|
221
|
+
}
|
|
217
222
|
|
|
218
223
|
GitHub CLI command patterns.
|
|
219
224
|
- IMPORTANT: Always use --paginate flag when fetching lists from GitHub API to ensure all results are returned (GitHub returns max 30 per page by default).
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Finalize module for solve.mjs
|
|
5
|
+
* After the main solve completes, restarts the AI tool N times with a
|
|
6
|
+
* requirements-check prompt to verify all requirements are met.
|
|
7
|
+
*
|
|
8
|
+
* Extracted from solve.mjs to keep files under 1500 lines.
|
|
9
|
+
*
|
|
10
|
+
* @see https://github.com/link-assistant/hive-mind/issues/1383
|
|
11
|
+
*/
|
|
12
|
+
|
|
13
|
+
// Check if use is already defined globally (when imported from solve.mjs)
|
|
14
|
+
// If not, fetch it (when running standalone)
|
|
15
|
+
if (typeof globalThis.use === 'undefined') {
|
|
16
|
+
globalThis.use = (await eval(await (await fetch('https://unpkg.com/use-m/use.js')).text())).use;
|
|
17
|
+
}
|
|
18
|
+
const use = globalThis.use;
|
|
19
|
+
|
|
20
|
+
// Use command-stream for consistent $ behavior across runtimes
|
|
21
|
+
const { $ } = await use('command-stream');
|
|
22
|
+
|
|
23
|
+
// Import shared library functions
|
|
24
|
+
const lib = await import('./lib.mjs');
|
|
25
|
+
const { log } = lib;
|
|
26
|
+
|
|
27
|
+
// Import shared restart utilities
|
|
28
|
+
const restartShared = await import('./solve.restart-shared.lib.mjs');
|
|
29
|
+
const { executeToolIteration } = restartShared;
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Runs finalize requirements-check iterations after the main solve.
|
|
33
|
+
*
|
|
34
|
+
* @param {object} params
|
|
35
|
+
* @param {string} params.issueUrl
|
|
36
|
+
* @param {string} params.owner
|
|
37
|
+
* @param {string} params.repo
|
|
38
|
+
* @param {string|number} params.issueNumber
|
|
39
|
+
* @param {string|number} params.prNumber
|
|
40
|
+
* @param {string} params.branchName
|
|
41
|
+
* @param {string} params.tempDir
|
|
42
|
+
* @param {object} params.argv - CLI arguments
|
|
43
|
+
* @param {function} params.cleanupClaudeFile - cleanup function
|
|
44
|
+
* @returns {Promise<{sessionId, anthropicTotalCostUSD, publicPricingEstimate, pricingInfo}|null>}
|
|
45
|
+
*/
|
|
46
|
+
export const runAutoEnsureRequirements = async ({ issueUrl, owner, repo, issueNumber, prNumber, branchName, tempDir, argv, cleanupClaudeFile }) => {
|
|
47
|
+
const finalizeCount = argv.finalize;
|
|
48
|
+
if (!finalizeCount || finalizeCount <= 0 || !prNumber) {
|
|
49
|
+
return null;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
await log('');
|
|
53
|
+
await log(`š FINALIZE: Starting ${finalizeCount} requirements-check restart(s)`);
|
|
54
|
+
await log(' Will restart the AI tool to verify all requirements are met');
|
|
55
|
+
await log('');
|
|
56
|
+
|
|
57
|
+
// Get PR merge state status for the iterations
|
|
58
|
+
let currentMergeStateStatus = null;
|
|
59
|
+
try {
|
|
60
|
+
const prStateResult = await $`gh api repos/${owner}/${repo}/pulls/${prNumber} --jq '.mergeStateStatus'`;
|
|
61
|
+
if (prStateResult.code === 0) {
|
|
62
|
+
currentMergeStateStatus = prStateResult.stdout.toString().trim();
|
|
63
|
+
}
|
|
64
|
+
} catch {
|
|
65
|
+
// Ignore errors getting merge state
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
let sessionId;
|
|
69
|
+
let anthropicTotalCostUSD;
|
|
70
|
+
let publicPricingEstimate;
|
|
71
|
+
let pricingInfo;
|
|
72
|
+
|
|
73
|
+
// Use --finalize-model if provided, otherwise fall back to --model
|
|
74
|
+
const finalizeModel = argv.finalizeModel || argv.model;
|
|
75
|
+
|
|
76
|
+
for (let ensureIteration = 1; ensureIteration <= finalizeCount; ensureIteration++) {
|
|
77
|
+
await log(`š FINALIZE iteration ${ensureIteration}/${finalizeCount}: Restarting to verify requirements...`);
|
|
78
|
+
|
|
79
|
+
const ensureFeedbackLines = ['', '='.repeat(60), 'š FINALIZE REQUIREMENTS CHECK:', '='.repeat(60), '', 'We need to ensure all changes are correct, consistent, validated, tested, logged and fully meet all discussed requirements (check issue description and all comments in issue and in pull request). Ensure all CI/CD checks pass.', ''];
|
|
80
|
+
|
|
81
|
+
const ensureResult = await executeToolIteration({
|
|
82
|
+
issueUrl,
|
|
83
|
+
owner,
|
|
84
|
+
repo,
|
|
85
|
+
issueNumber,
|
|
86
|
+
prNumber,
|
|
87
|
+
branchName,
|
|
88
|
+
tempDir,
|
|
89
|
+
mergeStateStatus: currentMergeStateStatus,
|
|
90
|
+
feedbackLines: ensureFeedbackLines,
|
|
91
|
+
argv: {
|
|
92
|
+
...argv,
|
|
93
|
+
// Override model with finalize-model for this iteration
|
|
94
|
+
model: finalizeModel,
|
|
95
|
+
// Enable prompt-ensure only during finalize cycle (not the first regular run)
|
|
96
|
+
promptEnsureAllRequirementsAreMet: true,
|
|
97
|
+
// Prevent recursive finalize
|
|
98
|
+
finalize: 0,
|
|
99
|
+
},
|
|
100
|
+
});
|
|
101
|
+
|
|
102
|
+
// Update session data from finalize restart
|
|
103
|
+
if (ensureResult) {
|
|
104
|
+
if (ensureResult.sessionId) sessionId = ensureResult.sessionId;
|
|
105
|
+
if (ensureResult.anthropicTotalCostUSD) anthropicTotalCostUSD = ensureResult.anthropicTotalCostUSD;
|
|
106
|
+
if (ensureResult.publicPricingEstimate) publicPricingEstimate = ensureResult.publicPricingEstimate;
|
|
107
|
+
if (ensureResult.pricingInfo) pricingInfo = ensureResult.pricingInfo;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
await log(`ā
FINALIZE iteration ${ensureIteration}/${finalizeCount} complete`);
|
|
111
|
+
await log('');
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
// Clean up CLAUDE.md/.gitkeep after ensure restarts
|
|
115
|
+
await cleanupClaudeFile(tempDir, branchName, null, argv);
|
|
116
|
+
|
|
117
|
+
return { sessionId, anthropicTotalCostUSD, publicPricingEstimate, pricingInfo };
|
|
118
|
+
};
|
|
119
|
+
|
|
120
|
+
export default { runAutoEnsureRequirements };
|
package/src/solve.config.lib.mjs
CHANGED
|
@@ -374,6 +374,21 @@ export const SOLVE_OPTION_DEFINITIONS = {
|
|
|
374
374
|
description: 'Automatically accept the pending GitHub repository or organization invitation for the specific repository/organization being solved, before checking write access. Unlike /accept_invites which accepts all pending invitations, this only accepts the invite for the target repo/org.',
|
|
375
375
|
default: false,
|
|
376
376
|
},
|
|
377
|
+
'prompt-ensure-all-requirements-are-met': {
|
|
378
|
+
type: 'boolean',
|
|
379
|
+
description: '[EXPERIMENTAL] Add a prompt hint to the system prompt to ensure all changes are correct, consistent, validated, tested, logged and fully meet all discussed requirements. Enabled automatically by --finalize during finalize cycle iterations only.',
|
|
380
|
+
default: false,
|
|
381
|
+
},
|
|
382
|
+
finalize: {
|
|
383
|
+
type: 'number',
|
|
384
|
+
description: '[EXPERIMENTAL] After the main solve completes, automatically restart the AI tool N times (default: 1) with a requirements-check prompt to verify all requirements are met. Use --finalize-model to override the model for finalize iterations.',
|
|
385
|
+
default: 0,
|
|
386
|
+
},
|
|
387
|
+
'finalize-model': {
|
|
388
|
+
type: 'string',
|
|
389
|
+
description: '[EXPERIMENTAL] Model to use for --finalize iterations. Defaults to the same model as --model.',
|
|
390
|
+
default: undefined,
|
|
391
|
+
},
|
|
377
392
|
};
|
|
378
393
|
|
|
379
394
|
// Function to create yargs configuration - avoids duplication
|
|
@@ -535,6 +550,17 @@ export const parseArguments = async (yargs, hideBin) => {
|
|
|
535
550
|
}
|
|
536
551
|
}
|
|
537
552
|
|
|
553
|
+
// --finalize normalization
|
|
554
|
+
// Issue #1383: When finalize is enabled (as boolean or number), normalize to iteration count
|
|
555
|
+
// NOTE: promptEnsureAllRequirementsAreMet is NOT set here ā it is only enabled during
|
|
556
|
+
// the finalize cycle iterations themselves (not the first regular worker model run)
|
|
557
|
+
if (argv && argv.finalize) {
|
|
558
|
+
// Normalize: if passed as boolean true (flag without value), treat as 1 iteration
|
|
559
|
+
if (argv.finalize === true) {
|
|
560
|
+
argv.finalize = 1;
|
|
561
|
+
}
|
|
562
|
+
}
|
|
563
|
+
|
|
538
564
|
if (argv.tool === 'opencode' && !modelExplicitlyProvided) {
|
|
539
565
|
// User did not explicitly provide --model, so use the correct default for opencode
|
|
540
566
|
argv.model = 'grok-code-fast-1';
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Interrupt wrapper factory for CTRL+C handling in solve sessions.
|
|
3
|
+
*
|
|
4
|
+
* On SIGINT, auto-commits uncommitted changes and uploads session logs if --attach-logs is enabled.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Creates an interrupt wrapper function that auto-commits and uploads logs on CTRL+C.
|
|
9
|
+
* @param {object} deps - Dependencies
|
|
10
|
+
* @param {object} deps.cleanupContext - Mutable context object with tempDir, argv, branchName, prNumber, owner, repo
|
|
11
|
+
* @param {Function} deps.checkForUncommittedChanges - Tool-specific function to check and commit changes
|
|
12
|
+
* @param {boolean} deps.shouldAttachLogs - Whether --attach-logs is enabled
|
|
13
|
+
* @param {Function} deps.attachLogToGitHub - Function to upload log to GitHub PR
|
|
14
|
+
* @param {Function} deps.getLogFile - Function that returns the current log file path
|
|
15
|
+
* @param {Function} deps.sanitizeLogContent - Function to sanitize log content before upload
|
|
16
|
+
* @param {object} deps.$ - Shell command runner
|
|
17
|
+
* @param {Function} deps.log - Logging function
|
|
18
|
+
* @returns {Function} Async interrupt wrapper
|
|
19
|
+
*/
|
|
20
|
+
export const createInterruptWrapper = ({ cleanupContext, checkForUncommittedChanges, shouldAttachLogs, attachLogToGitHub, getLogFile, sanitizeLogContent, $, log }) => {
|
|
21
|
+
return async () => {
|
|
22
|
+
const ctx = cleanupContext;
|
|
23
|
+
if (!ctx.tempDir || !ctx.argv) return;
|
|
24
|
+
|
|
25
|
+
await log('\nā ļø Session interrupted by user (CTRL+C)');
|
|
26
|
+
|
|
27
|
+
// Always auto-commit uncommitted changes on CTRL+C to preserve work
|
|
28
|
+
if (ctx.branchName) {
|
|
29
|
+
try {
|
|
30
|
+
await checkForUncommittedChanges(
|
|
31
|
+
ctx.tempDir,
|
|
32
|
+
ctx.owner,
|
|
33
|
+
ctx.repo,
|
|
34
|
+
ctx.branchName,
|
|
35
|
+
$,
|
|
36
|
+
log,
|
|
37
|
+
true, // always autoCommit on CTRL+C to preserve work
|
|
38
|
+
false // no autoRestart
|
|
39
|
+
);
|
|
40
|
+
} catch (commitError) {
|
|
41
|
+
await log(`ā ļø Could not auto-commit changes on interrupt: ${commitError.message}`, {
|
|
42
|
+
level: 'warning',
|
|
43
|
+
});
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// Upload logs if --attach-logs is enabled and we have a PR
|
|
48
|
+
if (shouldAttachLogs && ctx.prNumber && ctx.owner && ctx.repo) {
|
|
49
|
+
await log('š Uploading interrupted session logs to Pull Request...');
|
|
50
|
+
try {
|
|
51
|
+
await attachLogToGitHub({
|
|
52
|
+
logFile: getLogFile(),
|
|
53
|
+
targetType: 'pr',
|
|
54
|
+
targetNumber: ctx.prNumber,
|
|
55
|
+
owner: ctx.owner,
|
|
56
|
+
repo: ctx.repo,
|
|
57
|
+
$,
|
|
58
|
+
log,
|
|
59
|
+
sanitizeLogContent,
|
|
60
|
+
verbose: ctx.argv.verbose || false,
|
|
61
|
+
errorMessage: 'Session interrupted by user (CTRL+C)',
|
|
62
|
+
});
|
|
63
|
+
} catch (uploadError) {
|
|
64
|
+
await log(`ā ļø Could not upload logs on interrupt: ${uploadError.message}`, {
|
|
65
|
+
level: 'warning',
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
};
|
|
70
|
+
};
|
package/src/solve.mjs
CHANGED
|
@@ -73,10 +73,12 @@ const { createUncaughtExceptionHandler, createUnhandledRejectionHandler, handleM
|
|
|
73
73
|
|
|
74
74
|
const watchLib = await import('./solve.watch.lib.mjs');
|
|
75
75
|
const { startWatchMode } = watchLib;
|
|
76
|
-
const
|
|
77
|
-
const {
|
|
76
|
+
const { startAutoRestartUntilMergeable } = await import('./solve.auto-merge.lib.mjs');
|
|
77
|
+
const { runAutoEnsureRequirements } = await import('./solve.auto-ensure.lib.mjs');
|
|
78
78
|
const exitHandler = await import('./exit-handler.lib.mjs');
|
|
79
79
|
const { initializeExitHandler, installGlobalExitHandlers, safeExit } = exitHandler;
|
|
80
|
+
const interruptLib = await import('./solve.interrupt.lib.mjs');
|
|
81
|
+
const { createInterruptWrapper } = interruptLib;
|
|
80
82
|
const getResourceSnapshot = memoryCheck.getResourceSnapshot;
|
|
81
83
|
|
|
82
84
|
// Import new modular components
|
|
@@ -162,22 +164,17 @@ if (argv.sentry) {
|
|
|
162
164
|
},
|
|
163
165
|
});
|
|
164
166
|
}
|
|
165
|
-
// Create
|
|
166
|
-
let cleanupContext = { tempDir: null, argv: null, limitReached: false };
|
|
167
|
+
// Create cleanup/interrupt wrappers populated with context as solve progresses
|
|
168
|
+
let cleanupContext = { tempDir: null, argv: null, limitReached: false, branchName: null, prNumber: null, owner: null, repo: null };
|
|
167
169
|
const cleanupWrapper = async () => {
|
|
168
170
|
if (cleanupContext.tempDir && cleanupContext.argv) {
|
|
169
171
|
await cleanupTempDirectory(cleanupContext.tempDir, cleanupContext.argv, cleanupContext.limitReached);
|
|
170
172
|
}
|
|
171
173
|
};
|
|
172
|
-
|
|
173
|
-
initializeExitHandler(getAbsoluteLogPath, log, cleanupWrapper);
|
|
174
|
+
const interruptWrapper = createInterruptWrapper({ cleanupContext, checkForUncommittedChanges, shouldAttachLogs, attachLogToGitHub, getLogFile, sanitizeLogContent, $, log });
|
|
175
|
+
initializeExitHandler(getAbsoluteLogPath, log, cleanupWrapper, interruptWrapper);
|
|
174
176
|
installGlobalExitHandlers();
|
|
175
177
|
|
|
176
|
-
// Note: Version and raw command are logged BEFORE parseArguments() (see above)
|
|
177
|
-
// This ensures they appear even if strict validation fails
|
|
178
|
-
// Strict options validation is now handled by yargs .strict() mode in solve.config.lib.mjs
|
|
179
|
-
// This prevents unrecognized options from being silently ignored (issue #453, #482)
|
|
180
|
-
|
|
181
178
|
// Now handle argument validation that was moved from early checks
|
|
182
179
|
let issueUrl = argv['issue-url'] || argv._[0];
|
|
183
180
|
if (!issueUrl) {
|
|
@@ -193,9 +190,11 @@ if (!urlValidation.isValid) {
|
|
|
193
190
|
}
|
|
194
191
|
const { isIssueUrl, isPrUrl, normalizedUrl, owner, repo, number: urlNumber } = urlValidation;
|
|
195
192
|
issueUrl = normalizedUrl || issueUrl;
|
|
196
|
-
// Store owner and repo globally for error handlers
|
|
193
|
+
// Store owner and repo globally for error handlers and interrupt context
|
|
197
194
|
global.owner = owner;
|
|
198
195
|
global.repo = repo;
|
|
196
|
+
cleanupContext.owner = owner;
|
|
197
|
+
cleanupContext.repo = repo;
|
|
199
198
|
// Setup unhandled error handlers to ensure log path is always shown
|
|
200
199
|
const errorHandlerOptions = {
|
|
201
200
|
log,
|
|
@@ -492,8 +491,6 @@ if (isPrUrl) {
|
|
|
492
491
|
}
|
|
493
492
|
}
|
|
494
493
|
await log(`š PR branch: ${prBranch}`);
|
|
495
|
-
// Extract issue number from PR body using GitHub linking detection library
|
|
496
|
-
// This ensures we only detect actual GitHub-recognized linking keywords
|
|
497
494
|
const prBody = prData.body || '';
|
|
498
495
|
const extractedIssueNumber = extractLinkedIssueNumber(prBody);
|
|
499
496
|
if (extractedIssueNumber) {
|
|
@@ -524,9 +521,12 @@ if (isPrUrl) {
|
|
|
524
521
|
// Pass workspace info for --enable-workspaces mode (works with all tools)
|
|
525
522
|
const workspaceInfo = argv.enableWorkspaces ? { owner, repo, issueNumber } : null;
|
|
526
523
|
const { tempDir, workspaceTmpDir, needsClone } = await setupTempDirectory(argv, workspaceInfo);
|
|
527
|
-
// Populate cleanup context for signal handlers
|
|
524
|
+
// Populate cleanup context for signal handlers (owner/repo updated again here for redundancy)
|
|
528
525
|
cleanupContext.tempDir = tempDir;
|
|
529
526
|
cleanupContext.argv = argv;
|
|
527
|
+
cleanupContext.owner = owner;
|
|
528
|
+
cleanupContext.repo = repo;
|
|
529
|
+
if (prNumber) cleanupContext.prNumber = prNumber;
|
|
530
530
|
// Initialize limitReached variable outside try block for finally clause
|
|
531
531
|
let limitReached = false;
|
|
532
532
|
try {
|
|
@@ -570,6 +570,7 @@ try {
|
|
|
570
570
|
repo,
|
|
571
571
|
prNumber,
|
|
572
572
|
});
|
|
573
|
+
cleanupContext.branchName = branchName;
|
|
573
574
|
|
|
574
575
|
// Auto-merge default branch to pull request branch if enabled
|
|
575
576
|
let autoMergeFeedbackLines = [];
|
|
@@ -614,9 +615,6 @@ try {
|
|
|
614
615
|
// prNumber is already set from earlier when we parsed the PR
|
|
615
616
|
}
|
|
616
617
|
|
|
617
|
-
// Don't build the prompt yet - we'll build it after we have all the information
|
|
618
|
-
// This includes PR URL (if created) and comment info (if in continue mode)
|
|
619
|
-
|
|
620
618
|
// Handle auto PR creation using the new module
|
|
621
619
|
const autoPrResult = await handleAutoPrCreation({
|
|
622
620
|
argv,
|
|
@@ -647,6 +645,7 @@ try {
|
|
|
647
645
|
claudeCommitHash = autoPrResult.claudeCommitHash;
|
|
648
646
|
}
|
|
649
647
|
}
|
|
648
|
+
if (prNumber) cleanupContext.prNumber = prNumber;
|
|
650
649
|
|
|
651
650
|
// CRITICAL: Validate that we have a PR number when required
|
|
652
651
|
// This prevents continuing without a PR when one was supposed to be created
|
|
@@ -695,9 +694,6 @@ try {
|
|
|
695
694
|
await log(formatAligned('', 'Workflow:', 'AI will create the PR', 2));
|
|
696
695
|
}
|
|
697
696
|
|
|
698
|
-
// Don't build the prompt yet - we'll build it after we have all the information
|
|
699
|
-
// This includes PR URL (if created) and comment info (if in continue mode)
|
|
700
|
-
|
|
701
697
|
// Start work session using the new module
|
|
702
698
|
// Determine session type based on command line flags
|
|
703
699
|
// See: https://github.com/link-assistant/hive-mind/issues/1152
|
|
@@ -1186,7 +1182,6 @@ try {
|
|
|
1186
1182
|
await log('ā¹ļø Playwright MCP auto-cleanup disabled via --no-playwright-mcp-auto-cleanup', { verbose: true });
|
|
1187
1183
|
}
|
|
1188
1184
|
|
|
1189
|
-
// Check for uncommitted changes
|
|
1190
1185
|
// When limit is reached, force auto-commit of any uncommitted changes to preserve work
|
|
1191
1186
|
const shouldAutoCommit = argv['auto-commit-uncommitted-changes'] || limitReached;
|
|
1192
1187
|
const autoRestartEnabled = argv['autoRestartOnUncommittedChanges'] !== false;
|
|
@@ -1234,11 +1229,6 @@ try {
|
|
|
1234
1229
|
}
|
|
1235
1230
|
|
|
1236
1231
|
// Search for newly created pull requests and comments
|
|
1237
|
-
// Pass shouldRestart to prevent early exit when auto-restart is needed
|
|
1238
|
-
// Include agent tool pricing data when available (publicPricingEstimate, pricingInfo)
|
|
1239
|
-
// Issue #1088: Pass errorDuringExecution for "Finished with errors" state
|
|
1240
|
-
// Issue #1152: Pass sessionType for differentiated log comments
|
|
1241
|
-
// Issue #1154: Track if logs were already uploaded to prevent duplicates
|
|
1242
1232
|
const verifyResult = await verifyResults(owner, repo, branchName, issueNumber, prNumber, prUrl, referenceTime, argv, shouldAttachLogs, shouldRestart, sessionId, tempDir, anthropicTotalCostUSD, publicPricingEstimate, pricingInfo, errorDuringExecution, sessionType);
|
|
1243
1233
|
const logsAlreadyUploaded = verifyResult?.logUploadSuccess || false;
|
|
1244
1234
|
|
|
@@ -1293,6 +1283,15 @@ try {
|
|
|
1293
1283
|
}
|
|
1294
1284
|
}
|
|
1295
1285
|
|
|
1286
|
+
// Issue #1383: --finalize
|
|
1287
|
+
const autoEnsureResult = await runAutoEnsureRequirements({ issueUrl, owner, repo, issueNumber, prNumber, branchName, tempDir, argv, cleanupClaudeFile });
|
|
1288
|
+
if (autoEnsureResult) {
|
|
1289
|
+
if (autoEnsureResult.sessionId) sessionId = autoEnsureResult.sessionId;
|
|
1290
|
+
if (autoEnsureResult.anthropicTotalCostUSD) anthropicTotalCostUSD = autoEnsureResult.anthropicTotalCostUSD;
|
|
1291
|
+
if (autoEnsureResult.publicPricingEstimate) publicPricingEstimate = autoEnsureResult.publicPricingEstimate;
|
|
1292
|
+
if (autoEnsureResult.pricingInfo) pricingInfo = autoEnsureResult.pricingInfo;
|
|
1293
|
+
}
|
|
1294
|
+
|
|
1296
1295
|
// Start watch mode if enabled OR if we need to handle uncommitted changes
|
|
1297
1296
|
if (argv.verbose) {
|
|
1298
1297
|
await log('');
|
|
@@ -366,7 +366,29 @@ export function registerMergeCommand(bot, options) {
|
|
|
366
366
|
|
|
367
367
|
// Cancel the operation
|
|
368
368
|
operation.processor.cancel();
|
|
369
|
-
|
|
369
|
+
// Issue #1407: Acknowledge the cancel with a short toast message
|
|
370
|
+
await ctx.answerCbQuery('Cancellation requested.');
|
|
371
|
+
|
|
372
|
+
// Issue #1407: Immediately hide the cancel button and update the message to show
|
|
373
|
+
// that the queue is being cancelled. Without this, the button stays visible until
|
|
374
|
+
// the current PR finishes processing (which can take hours if waiting for CI).
|
|
375
|
+
try {
|
|
376
|
+
const cancellingMessage = operation.processor.formatProgressMessage();
|
|
377
|
+
await ctx.editMessageText(cancellingMessage, {
|
|
378
|
+
parse_mode: 'MarkdownV2',
|
|
379
|
+
// No reply_markup = cancel button is removed immediately
|
|
380
|
+
});
|
|
381
|
+
} catch (err) {
|
|
382
|
+
// If the full message edit fails, fall back to just removing the button
|
|
383
|
+
if (!err.message?.includes('message is not modified')) {
|
|
384
|
+
VERBOSE && console.log(`[VERBOSE] /merge: Error updating message on cancel: ${err.message}`);
|
|
385
|
+
}
|
|
386
|
+
try {
|
|
387
|
+
await ctx.editMessageReplyMarkup({ inline_keyboard: [] });
|
|
388
|
+
} catch {
|
|
389
|
+
// Ignore errors - the button will be removed when the operation completes
|
|
390
|
+
}
|
|
391
|
+
}
|
|
370
392
|
|
|
371
393
|
VERBOSE && console.log(`[VERBOSE] /merge: Cancelled operation for ${repoKey}`);
|
|
372
394
|
});
|
|
@@ -408,11 +408,22 @@ export class MergeQueueProcessor {
|
|
|
408
408
|
await this.onProgress(this.getProgressUpdate());
|
|
409
409
|
}
|
|
410
410
|
},
|
|
411
|
+
// Issue #1407: Pass cancellation check so CI wait can abort early
|
|
412
|
+
isCancelled: () => this.isCancelled,
|
|
411
413
|
},
|
|
412
414
|
this.verbose
|
|
413
415
|
);
|
|
414
416
|
|
|
415
417
|
if (!waitResult.success) {
|
|
418
|
+
// Issue #1407: If cancelled during CI wait, mark as skipped (not failed)
|
|
419
|
+
// so the queue can cleanly stop without misleading failure statistics
|
|
420
|
+
if (waitResult.status === 'cancelled') {
|
|
421
|
+
item.status = MergeItemStatus.SKIPPED;
|
|
422
|
+
item.error = 'Cancelled';
|
|
423
|
+
this.stats.skipped++;
|
|
424
|
+
this.log(`Skipped PR #${item.pr.number}: cancelled during CI wait`);
|
|
425
|
+
return;
|
|
426
|
+
}
|
|
416
427
|
item.status = MergeItemStatus.FAILED;
|
|
417
428
|
item.error = waitResult.error;
|
|
418
429
|
this.stats.failed++;
|
|
@@ -686,6 +697,11 @@ export class MergeQueueProcessor {
|
|
|
686
697
|
message += `${update.progress.processed}/${update.progress.total} PRs processed\n`;
|
|
687
698
|
message += '```\n\n';
|
|
688
699
|
|
|
700
|
+
// Issue #1407: Show cancelling indicator when cancellation requested but queue still running
|
|
701
|
+
if (this.isCancelled) {
|
|
702
|
+
message += `š *Cancelling\\.\\.\\.*\n\n`;
|
|
703
|
+
}
|
|
704
|
+
|
|
689
705
|
// Status summary with emojis
|
|
690
706
|
message += `ā
Merged: ${update.stats.merged} `;
|
|
691
707
|
message += `ā Failed: ${update.stats.failed} `;
|