@grunnverk/github-tools 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/github.js ADDED
@@ -0,0 +1,1406 @@
1
+ import { Octokit } from '@octokit/rest';
2
+ import { getLogger } from './logger.js';
3
+ import { run } from '@grunnverk/git-tools';
4
+
5
+ // Make promptConfirmation injectable
6
+ const defaultPrompt = async (message)=>{
7
+ // Default to true for non-interactive environments
8
+ // eslint-disable-next-line no-console
9
+ console.warn(`Prompt: ${message} (defaulting to YES in non-interactive mode)`);
10
+ return true;
11
+ };
12
+ let currentPrompt = defaultPrompt;
13
+ const setPromptFunction = (fn)=>{
14
+ currentPrompt = fn;
15
+ };
16
+ const promptConfirmation = async (message)=>{
17
+ return currentPrompt(message);
18
+ };
19
+ const getOctokit = ()=>{
20
+ const logger = getLogger();
21
+ const token = process.env.GITHUB_TOKEN;
22
+ if (!token) {
23
+ logger.error('GITHUB_TOKEN environment variable is not set.');
24
+ throw new Error('GITHUB_TOKEN is not set.');
25
+ }
26
+ return new Octokit({
27
+ auth: token
28
+ });
29
+ };
30
+ const getCurrentBranchName = async (cwd)=>{
31
+ const { stdout } = await run('git rev-parse --abbrev-ref HEAD', {
32
+ cwd
33
+ });
34
+ return stdout.trim();
35
+ };
36
+ const getRepoDetails = async (cwd)=>{
37
+ try {
38
+ const { stdout } = await run('git remote get-url origin', {
39
+ cwd,
40
+ suppressErrorLogging: true
41
+ });
42
+ const url = stdout.trim();
43
+ // Extract owner/repo from the URL - just look for the pattern owner/repo at the end
44
+ // Works with any hostname or SSH alias:
45
+ // - git@github.com:owner/repo.git
46
+ // - git@github.com-fjell:owner/repo.git
47
+ // - https://github.com/owner/repo.git
48
+ // - ssh://git@host/owner/repo.git
49
+ // Two cases:
50
+ // 1. SSH format: :owner/repo (after colon)
51
+ // 2. HTTPS format: //hostname/owner/repo (need at least 2 path segments)
52
+ const match = url.match(/(?::([^/:]+)\/([^/:]+)|\/\/[^/]+\/([^/:]+)\/([^/:]+))(?:\.git)?$/);
53
+ if (!match) {
54
+ throw new Error(`Could not parse repository owner and name from origin URL: "${url}". Expected format: git@host:owner/repo.git or https://host/owner/repo.git`);
55
+ }
56
+ // Match groups: either [1,2] for SSH or [3,4] for HTTPS
57
+ const owner = match[1] || match[3];
58
+ let repo = match[2] || match[4];
59
+ // Strip .git extension if present
60
+ if (repo.endsWith('.git')) {
61
+ repo = repo.slice(0, -4);
62
+ }
63
+ return {
64
+ owner,
65
+ repo
66
+ };
67
+ } catch (error) {
68
+ const logger = getLogger();
69
+ const isNotGitRepo = error.message.includes('not a git repository');
70
+ const hasNoOrigin = error.message.includes('remote origin does not exist');
71
+ if (isNotGitRepo || hasNoOrigin) {
72
+ logger.debug(`Failed to get repository details (expected): ${error.message} (${cwd || process.cwd()})`);
73
+ } else {
74
+ logger.debug(`Failed to get repository details: ${error.message}`);
75
+ }
76
+ throw error;
77
+ }
78
+ };
79
+ // GitHub API limit for pull request titles
80
+ const GITHUB_PR_TITLE_LIMIT = 256;
81
+ const truncatePullRequestTitle = (title)=>{
82
+ if (title.length <= GITHUB_PR_TITLE_LIMIT) {
83
+ return title;
84
+ }
85
+ // Reserve space for "..." suffix
86
+ const maxLength = GITHUB_PR_TITLE_LIMIT - 3;
87
+ let truncated = title.substring(0, maxLength);
88
+ // Try to break at word boundary to avoid cutting words in half
89
+ const lastSpaceIndex = truncated.lastIndexOf(' ');
90
+ if (lastSpaceIndex > maxLength * 0.8) {
91
+ truncated = truncated.substring(0, lastSpaceIndex);
92
+ }
93
+ return truncated + '...';
94
+ };
95
+ const createPullRequest = async (title, body, head, base = 'main', options = {})=>{
96
+ const octokit = getOctokit();
97
+ const { owner, repo } = await getRepoDetails(options.cwd);
98
+ const logger = getLogger();
99
+ // Check if PR already exists (pre-flight check)
100
+ if (options.reuseExisting !== false) {
101
+ logger.debug(`Checking for existing PR with head: ${head}`);
102
+ const existingPR = await findOpenPullRequestByHeadRef(head, options.cwd);
103
+ if (existingPR) {
104
+ if (existingPR.base.ref === base) {
105
+ logger.info(`♻️ Reusing existing PR #${existingPR.number}: ${existingPR.html_url}`);
106
+ return existingPR;
107
+ } else {
108
+ logger.warn(`⚠️ Existing PR #${existingPR.number} found but targets different base (${existingPR.base.ref} vs ${base})`);
109
+ logger.warn(` PR URL: ${existingPR.html_url}`);
110
+ logger.warn(` You may need to close the existing PR or use a different branch name`);
111
+ }
112
+ }
113
+ }
114
+ // Truncate title if it exceeds GitHub's limit
115
+ const truncatedTitle = truncatePullRequestTitle(title.trim());
116
+ if (truncatedTitle !== title.trim()) {
117
+ logger.debug(`Pull request title truncated from ${title.trim().length} to ${truncatedTitle.length} characters to meet GitHub's 256-character limit`);
118
+ }
119
+ try {
120
+ const response = await octokit.pulls.create({
121
+ owner,
122
+ repo,
123
+ title: truncatedTitle,
124
+ body,
125
+ head,
126
+ base
127
+ });
128
+ return response.data;
129
+ } catch (error) {
130
+ // Enhanced error handling for 422 errors
131
+ if (error.status === 422) {
132
+ var _error_response;
133
+ const { PullRequestCreationError } = await import('./errors.js');
134
+ // Try to find existing PR to provide more helpful info
135
+ let existingPR = null;
136
+ try {
137
+ existingPR = await findOpenPullRequestByHeadRef(head);
138
+ } catch {
139
+ // Ignore errors finding existing PR
140
+ }
141
+ // If we found an existing PR that matches our target, reuse it instead of failing
142
+ if (existingPR && existingPR.base.ref === base) {
143
+ logger.info(`♻️ Found and reusing existing PR #${existingPR.number} (created after initial check)`);
144
+ logger.info(` URL: ${existingPR.html_url}`);
145
+ logger.info(` This can happen when PRs are created in parallel or from a previous failed run`);
146
+ return existingPR;
147
+ }
148
+ const prError = new PullRequestCreationError(`Failed to create pull request: ${error.message}`, 422, head, base, (_error_response = error.response) === null || _error_response === void 0 ? void 0 : _error_response.data, existingPR === null || existingPR === void 0 ? void 0 : existingPR.number, existingPR === null || existingPR === void 0 ? void 0 : existingPR.html_url);
149
+ // Log the detailed recovery instructions
150
+ const instructions = prError.getRecoveryInstructions();
151
+ for (const line of instructions.split('\n')){
152
+ logger.error(line);
153
+ }
154
+ logger.error('');
155
+ throw prError;
156
+ }
157
+ // Re-throw other errors
158
+ throw error;
159
+ }
160
+ };
161
+ const findOpenPullRequestByHeadRef = async (head, cwd)=>{
162
+ const octokit = getOctokit();
163
+ const logger = getLogger();
164
+ try {
165
+ var _response_data_;
166
+ const { owner, repo } = await getRepoDetails(cwd);
167
+ logger.debug(`Searching for open pull requests with head: ${owner}:${head} in ${owner}/${repo}`);
168
+ const response = await octokit.pulls.list({
169
+ owner,
170
+ repo,
171
+ state: 'open',
172
+ head: `${owner}:${head}`
173
+ });
174
+ logger.debug(`Found ${response.data.length} open pull requests`);
175
+ return (_response_data_ = response.data[0]) !== null && _response_data_ !== void 0 ? _response_data_ : null;
176
+ } catch (error) {
177
+ // Only log error if it's NOT a "not a git repository" error which we already logged at debug
178
+ if (!error.message.includes('not a git repository')) {
179
+ logger.error(`Failed to find open pull requests: ${error.message}`);
180
+ } else {
181
+ logger.debug(`Skipping PR search: not a git repository (${cwd || process.cwd()})`);
182
+ }
183
+ if (error.status === 404) {
184
+ logger.error(`Repository not found or access denied. Please check your GITHUB_TOKEN permissions.`);
185
+ }
186
+ throw error;
187
+ }
188
+ };
189
+ const delay = (ms)=>new Promise((resolve)=>setTimeout(resolve, ms));
190
+ // Check if repository has GitHub Actions workflows configured
191
+ const hasWorkflowsConfigured = async (cwd)=>{
192
+ const octokit = getOctokit();
193
+ const { owner, repo } = await getRepoDetails(cwd);
194
+ try {
195
+ const response = await octokit.actions.listRepoWorkflows({
196
+ owner,
197
+ repo
198
+ });
199
+ return response.data.workflows.length > 0;
200
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
201
+ } catch (error) {
202
+ // If we can't check workflows (e.g., no Actions permission), assume they might exist
203
+ return true;
204
+ }
205
+ };
206
+ /**
207
+ * Check if workflows are configured and would be triggered for PRs to the target branch
208
+ * Returns detailed information about workflow configuration
209
+ */ const checkWorkflowConfiguration = async (targetBranch = 'main', cwd)=>{
210
+ const octokit = getOctokit();
211
+ const { owner, repo } = await getRepoDetails(cwd);
212
+ const logger = getLogger();
213
+ try {
214
+ logger.debug(`Checking workflow configuration for PRs to ${targetBranch}...`);
215
+ const response = await octokit.actions.listRepoWorkflows({
216
+ owner,
217
+ repo
218
+ });
219
+ const workflows = response.data.workflows;
220
+ if (workflows.length === 0) {
221
+ return {
222
+ hasWorkflows: false,
223
+ workflowCount: 0,
224
+ hasPullRequestTriggers: false,
225
+ triggeredWorkflowNames: [],
226
+ warning: 'No GitHub Actions workflows are configured in this repository'
227
+ };
228
+ }
229
+ // Check each workflow to see if it would be triggered by a PR
230
+ const triggeredWorkflows = [];
231
+ for (const workflow of workflows){
232
+ try {
233
+ const workflowPath = workflow.path;
234
+ logger.debug(`Checking workflow: ${workflow.name} (${workflowPath})`);
235
+ const contentResponse = await octokit.repos.getContent({
236
+ owner,
237
+ repo,
238
+ path: workflowPath
239
+ });
240
+ if ('content' in contentResponse.data && contentResponse.data.type === 'file') {
241
+ const content = Buffer.from(contentResponse.data.content, 'base64').toString('utf-8');
242
+ if (isTriggeredByPullRequest(content, targetBranch, workflow.name)) {
243
+ logger.debug(`✓ Workflow "${workflow.name}" will be triggered by PRs to ${targetBranch}`);
244
+ triggeredWorkflows.push(workflow.name);
245
+ } else {
246
+ logger.debug(`✗ Workflow "${workflow.name}" will not be triggered by PRs to ${targetBranch}`);
247
+ }
248
+ }
249
+ } catch (error) {
250
+ logger.debug(`Failed to analyze workflow ${workflow.name}: ${error.message}`);
251
+ }
252
+ }
253
+ const hasPullRequestTriggers = triggeredWorkflows.length > 0;
254
+ const warning = !hasPullRequestTriggers ? `${workflows.length} workflow(s) are configured, but none appear to trigger on pull requests to ${targetBranch}` : undefined;
255
+ return {
256
+ hasWorkflows: true,
257
+ workflowCount: workflows.length,
258
+ hasPullRequestTriggers,
259
+ triggeredWorkflowNames: triggeredWorkflows,
260
+ warning
261
+ };
262
+ } catch (error) {
263
+ logger.debug(`Failed to check workflow configuration: ${error.message}`);
264
+ // If we can't check, assume workflows might exist to avoid false negatives
265
+ return {
266
+ hasWorkflows: true,
267
+ workflowCount: -1,
268
+ hasPullRequestTriggers: true,
269
+ triggeredWorkflowNames: []
270
+ };
271
+ }
272
+ };
273
+ /**
274
+ * Check if a workflow is triggered by pull requests to a specific branch
275
+ */ const isTriggeredByPullRequest = (workflowContent, targetBranch, workflowName)=>{
276
+ const logger = getLogger();
277
+ try {
278
+ // Look for pull_request trigger with branch patterns
279
+ // Pattern 1: on.pull_request (with or without branch filters)
280
+ // on:
281
+ // pull_request:
282
+ // branches: [main, develop, ...]
283
+ const prEventPattern = /(?:^|\r?\n)[^\S\r\n]*on\s*:\s*\r?\n(?:[^\S\r\n]*[^\r\n]+(?:\r?\n))*?[^\S\r\n]*pull_request\s*:/mi;
284
+ // Pattern 2: on: [push, pull_request] or on: pull_request
285
+ const onPullRequestPattern = /(?:^|\n)\s*on\s*:\s*(?:\[.*pull_request.*\]|pull_request)\s*(?:\n|$)/m;
286
+ const hasPullRequestTrigger = prEventPattern.test(workflowContent) || onPullRequestPattern.test(workflowContent);
287
+ if (!hasPullRequestTrigger) {
288
+ return false;
289
+ }
290
+ // If pull_request trigger is found, check if it matches our target branch
291
+ // Look for branch restrictions
292
+ const branchPattern = /pull_request\s*:\s*\r?\n(?:[^\S\r\n]*[^\r\n]+(?:\r?\n))*?[^\S\r\n]*branches\s*:\s*(?:\r?\n|\[)([^\]\r\n]+)/mi;
293
+ const branchMatch = workflowContent.match(branchPattern);
294
+ if (branchMatch) {
295
+ const branchesSection = branchMatch[1];
296
+ logger.debug(`Workflow "${workflowName}" has branch filter: ${branchesSection}`);
297
+ // Check if target branch is explicitly mentioned
298
+ if (branchesSection.includes(targetBranch)) {
299
+ logger.debug(`Workflow "${workflowName}" branch filter matches ${targetBranch} (exact match)`);
300
+ return true;
301
+ }
302
+ // Check for catch-all patterns (** or standalone *)
303
+ // But not patterns like "feature/*" which are specific to a prefix
304
+ if (branchesSection.includes('**') || branchesSection.match(/[[,\s]'?\*'?[,\s\]]/)) {
305
+ logger.debug(`Workflow "${workflowName}" branch filter matches ${targetBranch} (wildcard match)`);
306
+ return true;
307
+ }
308
+ logger.debug(`Workflow "${workflowName}" branch filter does not match ${targetBranch}`);
309
+ return false;
310
+ }
311
+ // If no branch filter is specified, the workflow triggers on all PRs
312
+ logger.debug(`Workflow "${workflowName}" has no branch filter, triggers on all PRs`);
313
+ return true;
314
+ } catch (error) {
315
+ logger.debug(`Failed to parse workflow content for ${workflowName}: ${error.message}`);
316
+ // If we can't parse, assume it might trigger to avoid false negatives
317
+ return true;
318
+ }
319
+ };
320
+ /**
321
+ * Check if any workflow runs have been triggered for a specific PR
322
+ * This is more specific than hasWorkflowsConfigured as it checks for actual runs
323
+ */ const hasWorkflowRunsForPR = async (prNumber, cwd)=>{
324
+ const octokit = getOctokit();
325
+ const { owner, repo } = await getRepoDetails(cwd);
326
+ const logger = getLogger();
327
+ try {
328
+ // Get the PR to find the head SHA
329
+ const pr = await octokit.pulls.get({
330
+ owner,
331
+ repo,
332
+ pull_number: prNumber
333
+ });
334
+ const headSha = pr.data.head.sha;
335
+ const headRef = pr.data.head.ref;
336
+ // Check for workflow runs triggered by this PR
337
+ const workflowRuns = await octokit.actions.listWorkflowRunsForRepo({
338
+ owner,
339
+ repo,
340
+ head_sha: headSha,
341
+ per_page: 50
342
+ });
343
+ // Also check for runs on the branch
344
+ const branchRuns = await octokit.actions.listWorkflowRunsForRepo({
345
+ owner,
346
+ repo,
347
+ branch: headRef,
348
+ per_page: 50
349
+ });
350
+ const allRuns = [
351
+ ...workflowRuns.data.workflow_runs,
352
+ ...branchRuns.data.workflow_runs
353
+ ];
354
+ // Filter to runs that match our PR's head SHA or are very recent on the branch
355
+ const relevantRuns = allRuns.filter((run)=>run.head_sha === headSha || run.head_branch === headRef && new Date(run.created_at).getTime() > Date.now() - 300000 // Last 5 minutes
356
+ );
357
+ if (relevantRuns.length > 0) {
358
+ logger.debug(`Found ${relevantRuns.length} workflow runs for PR #${prNumber} (SHA: ${headSha})`);
359
+ return true;
360
+ }
361
+ logger.debug(`No workflow runs found for PR #${prNumber} (SHA: ${headSha}, branch: ${headRef})`);
362
+ return false;
363
+ } catch (error) {
364
+ logger.debug(`Error checking workflow runs for PR #${prNumber}: ${error.message}`);
365
+ // If we can't check workflow runs, assume they might exist
366
+ return true;
367
+ }
368
+ };
369
+ const waitForPullRequestChecks = async (prNumber, options = {})=>{
370
+ const octokit = getOctokit();
371
+ const { owner, repo } = await getRepoDetails(options.cwd);
372
+ const logger = getLogger();
373
+ const timeout = options.timeout || 3600000; // 1 hour default timeout
374
+ const skipUserConfirmation = options.skipUserConfirmation || false;
375
+ const startTime = Date.now();
376
+ let consecutiveNoChecksCount = 0;
377
+ const maxConsecutiveNoChecks = 3; // 3 consecutive checks (30 seconds) with no checks before deeper investigation
378
+ let checkedWorkflowRuns = false; // Track if we've already checked for workflow runs to avoid repeated checks
379
+ while(true){
380
+ const elapsedTime = Date.now() - startTime;
381
+ // Check for timeout
382
+ if (elapsedTime > timeout) {
383
+ logger.warn(`Timeout reached (${timeout / 1000}s) while waiting for PR #${prNumber} checks.`);
384
+ if (!skipUserConfirmation) {
385
+ const proceedWithoutChecks = await promptConfirmation(`⚠️ Timeout reached while waiting for PR #${prNumber} checks.\n` + `This might indicate that no checks are configured for this repository.\n` + `Do you want to proceed with merging the PR without waiting for checks?`);
386
+ if (proceedWithoutChecks) {
387
+ logger.info('User chose to proceed without waiting for checks.');
388
+ return;
389
+ } else {
390
+ throw new Error(`Timeout waiting for PR #${prNumber} checks. User chose not to proceed.`);
391
+ }
392
+ } else {
393
+ throw new Error(`Timeout waiting for PR #${prNumber} checks (${timeout / 1000}s)`);
394
+ }
395
+ }
396
+ const pr = await octokit.pulls.get({
397
+ owner,
398
+ repo,
399
+ pull_number: prNumber
400
+ });
401
+ const checkRunsResponse = await octokit.checks.listForRef({
402
+ owner,
403
+ repo,
404
+ ref: pr.data.head.sha
405
+ });
406
+ const checkRuns = checkRunsResponse.data.check_runs;
407
+ if (checkRuns.length === 0) {
408
+ consecutiveNoChecksCount++;
409
+ logger.info(`PR #${prNumber}: No checks found (${consecutiveNoChecksCount}/${maxConsecutiveNoChecks}). Waiting...`);
410
+ // After several consecutive "no checks" responses, check if workflows are configured
411
+ if (consecutiveNoChecksCount >= maxConsecutiveNoChecks) {
412
+ logger.info(`No checks detected for ${maxConsecutiveNoChecks} consecutive attempts. Checking repository configuration...`);
413
+ const hasWorkflows = await hasWorkflowsConfigured(options.cwd);
414
+ if (!hasWorkflows) {
415
+ logger.warn(`No GitHub Actions workflows found in repository ${owner}/${repo}.`);
416
+ if (!skipUserConfirmation) {
417
+ const proceedWithoutChecks = await promptConfirmation(`⚠️ No GitHub Actions workflows or checks are configured for this repository.\n` + `PR #${prNumber} will never have status checks to wait for.\n` + `Do you want to proceed with merging the PR without checks?`);
418
+ if (proceedWithoutChecks) {
419
+ logger.info('User chose to proceed without checks (no workflows configured).');
420
+ return;
421
+ } else {
422
+ throw new Error(`No checks configured for PR #${prNumber}. User chose not to proceed.`);
423
+ }
424
+ } else {
425
+ // In non-interactive mode, proceed if no workflows are configured
426
+ logger.info('No workflows configured, proceeding without checks.');
427
+ return;
428
+ }
429
+ } else {
430
+ // Workflows exist, but check if any are actually running for this PR
431
+ if (!checkedWorkflowRuns) {
432
+ logger.info('GitHub Actions workflows are configured. Checking if any workflows are triggered for this PR...');
433
+ // First check if workflow runs exist at all for this PR's branch/SHA
434
+ const hasRunsForPR = await hasWorkflowRunsForPR(prNumber, options.cwd);
435
+ checkedWorkflowRuns = true; // Mark that we've checked
436
+ if (!hasRunsForPR) {
437
+ logger.warn(`No workflow runs detected for PR #${prNumber}. This may indicate that the configured workflows don't match this branch pattern.`);
438
+ if (!skipUserConfirmation) {
439
+ const proceedWithoutChecks = await promptConfirmation(`⚠️ GitHub Actions workflows are configured in this repository, but none appear to be triggered by PR #${prNumber}.\n` + `This usually means the workflow trigger patterns (branches, paths) don't match this PR.\n` + `PR #${prNumber} will likely never have status checks to wait for.\n` + `Do you want to proceed with merging the PR without waiting for checks?`);
440
+ if (proceedWithoutChecks) {
441
+ logger.info('User chose to proceed without checks (no matching workflow triggers).');
442
+ return;
443
+ } else {
444
+ throw new Error(`No matching workflow triggers for PR #${prNumber}. User chose not to proceed.`);
445
+ }
446
+ } else {
447
+ // In non-interactive mode, proceed if no workflow runs are detected
448
+ logger.info('No workflow runs detected for this PR, proceeding without checks.');
449
+ return;
450
+ }
451
+ } else {
452
+ // Workflow runs exist on the branch, but they might not be associated with the PR
453
+ // This happens when workflows trigger on 'push' but not 'pull_request'
454
+ logger.info(`Found workflow runs on the branch, but none appear as PR checks.`);
455
+ logger.info(`This usually means workflows trigger on 'push' but not 'pull_request'.`);
456
+ if (!skipUserConfirmation) {
457
+ const proceedWithoutChecks = await promptConfirmation(`⚠️ Workflow runs exist for the branch, but no check runs are associated with PR #${prNumber}.\n` + `This typically means workflows are configured for 'push' events but not 'pull_request' events.\n` + `Do you want to proceed with merging the PR without waiting for checks?`);
458
+ if (proceedWithoutChecks) {
459
+ logger.info('User chose to proceed without PR checks (workflows not configured for pull_request events).');
460
+ return;
461
+ } else {
462
+ throw new Error(`No PR check runs for #${prNumber} (workflows trigger on push only). User chose not to proceed.`);
463
+ }
464
+ } else {
465
+ // In non-interactive mode, proceed if workflow runs exist but aren't PR checks
466
+ logger.info('Workflow runs exist but are not PR checks, proceeding without checks.');
467
+ return;
468
+ }
469
+ }
470
+ } else {
471
+ // We've already checked workflow runs and found them on the branch but not as PR checks
472
+ // At this point, we should give up to avoid infinite loops
473
+ logger.warn(`Still no checks after ${consecutiveNoChecksCount} attempts. Workflow runs exist on branch but not as PR checks.`);
474
+ if (!skipUserConfirmation) {
475
+ const proceedWithoutChecks = await promptConfirmation(`⚠️ After waiting ${Math.round(elapsedTime / 1000)}s, no checks have appeared for PR #${prNumber}.\n` + `The configured workflows don't appear to trigger for this branch.\n` + `Do you want to proceed with merging the PR without checks?`);
476
+ if (proceedWithoutChecks) {
477
+ logger.info('User chose to proceed without checks (timeout waiting for workflow triggers).');
478
+ return;
479
+ } else {
480
+ throw new Error(`No workflow triggers matched PR #${prNumber} after waiting. User chose not to proceed.`);
481
+ }
482
+ } else {
483
+ // In non-interactive mode, proceed after reasonable waiting
484
+ logger.info('No workflow runs detected after waiting, proceeding without checks.');
485
+ return;
486
+ }
487
+ }
488
+ }
489
+ }
490
+ await delay(10000);
491
+ continue;
492
+ }
493
+ // Reset the no-checks counter since we found some checks
494
+ consecutiveNoChecksCount = 0;
495
+ // ... rest of the while loop logic ...
496
+ const failingChecks = checkRuns.filter((cr)=>cr.conclusion && [
497
+ 'failure',
498
+ 'timed_out',
499
+ 'cancelled'
500
+ ].includes(cr.conclusion));
501
+ if (failingChecks.length > 0) {
502
+ const { owner, repo } = await getRepoDetails(options.cwd);
503
+ const prUrl = `https://github.com/${owner}/${repo}/pull/${prNumber}`;
504
+ // Collect detailed information about each failed check
505
+ const detailedFailedChecks = await Promise.all(failingChecks.map(async (check)=>{
506
+ try {
507
+ var _checkDetails_data_output, _checkDetails_data_output1, _checkDetails_data_output2, _checkDetails_data_output3;
508
+ // Get additional details from the check run
509
+ const checkDetails = await octokit.checks.get({
510
+ owner,
511
+ repo,
512
+ check_run_id: check.id
513
+ });
514
+ return {
515
+ name: check.name,
516
+ conclusion: check.conclusion || 'unknown',
517
+ detailsUrl: check.details_url || undefined,
518
+ summary: ((_checkDetails_data_output = checkDetails.data.output) === null || _checkDetails_data_output === void 0 ? void 0 : _checkDetails_data_output.summary) || undefined,
519
+ output: {
520
+ title: ((_checkDetails_data_output1 = checkDetails.data.output) === null || _checkDetails_data_output1 === void 0 ? void 0 : _checkDetails_data_output1.title) || undefined,
521
+ summary: ((_checkDetails_data_output2 = checkDetails.data.output) === null || _checkDetails_data_output2 === void 0 ? void 0 : _checkDetails_data_output2.summary) || undefined,
522
+ text: ((_checkDetails_data_output3 = checkDetails.data.output) === null || _checkDetails_data_output3 === void 0 ? void 0 : _checkDetails_data_output3.text) || undefined
523
+ }
524
+ };
525
+ } catch {
526
+ // Fallback to basic information if we can't get details
527
+ return {
528
+ name: check.name,
529
+ conclusion: check.conclusion || 'unknown',
530
+ detailsUrl: check.details_url || undefined
531
+ };
532
+ }
533
+ }));
534
+ logger.error(`❌ PR #${prNumber} has ${failingChecks.length} failing check${failingChecks.length > 1 ? 's' : ''}:`);
535
+ logger.error('');
536
+ for (const check of detailedFailedChecks){
537
+ var _check_output, _check_output1;
538
+ const statusIcon = check.conclusion === 'failure' ? '❌' : check.conclusion === 'timed_out' ? '⏰' : '🚫';
539
+ logger.error(`${statusIcon} ${check.name}: ${check.conclusion}`);
540
+ // Show more detailed error information if available
541
+ if (((_check_output = check.output) === null || _check_output === void 0 ? void 0 : _check_output.title) && check.output.title !== check.name) {
542
+ logger.error(` Issue: ${check.output.title}`);
543
+ }
544
+ if ((_check_output1 = check.output) === null || _check_output1 === void 0 ? void 0 : _check_output1.summary) {
545
+ // Truncate very long summaries
546
+ const summary = check.output.summary.length > 200 ? check.output.summary.substring(0, 200) + '...' : check.output.summary;
547
+ logger.error(` Summary: ${summary}`);
548
+ }
549
+ // Include direct link to check details
550
+ if (check.detailsUrl) {
551
+ logger.error(` Details: ${check.detailsUrl}`);
552
+ }
553
+ logger.error('');
554
+ }
555
+ // Import the new error class
556
+ const { PullRequestCheckError } = await import('./errors.js');
557
+ // Create and throw the enhanced error with detailed recovery instructions
558
+ const prError = new PullRequestCheckError(`PR #${prNumber} checks failed. ${failingChecks.length} check${failingChecks.length > 1 ? 's' : ''} failed.`, prNumber, detailedFailedChecks, prUrl);
559
+ // Display recovery instructions (split by line to avoid character-by-character logging)
560
+ const instructions = prError.getRecoveryInstructions();
561
+ for (const line of instructions.split('\n')){
562
+ logger.error(line);
563
+ }
564
+ logger.error('');
565
+ throw prError;
566
+ }
567
+ const allChecksCompleted = checkRuns.every((cr)=>cr.status === 'completed');
568
+ if (allChecksCompleted) {
569
+ logger.info(`All checks for PR #${prNumber} have completed successfully.`);
570
+ return;
571
+ }
572
+ const completedCount = checkRuns.filter((cr)=>cr.status === 'completed').length;
573
+ logger.info(`PR #${prNumber} checks: ${completedCount}/${checkRuns.length} completed. Waiting...`);
574
+ await delay(10000); // wait 10 seconds
575
+ }
576
+ };
577
+ const mergePullRequest = async (prNumber, mergeMethod = 'squash', deleteBranch = true, cwd)=>{
578
+ const octokit = getOctokit();
579
+ const { owner, repo } = await getRepoDetails(cwd);
580
+ const logger = getLogger();
581
+ logger.info(`Merging PR #${prNumber} using ${mergeMethod} method...`);
582
+ const pr = await octokit.pulls.get({
583
+ owner,
584
+ repo,
585
+ pull_number: prNumber
586
+ });
587
+ const headBranch = pr.data.head.ref;
588
+ await octokit.pulls.merge({
589
+ owner,
590
+ repo,
591
+ pull_number: prNumber,
592
+ merge_method: mergeMethod
593
+ });
594
+ logger.info(`PR #${prNumber} merged using ${mergeMethod} method.`);
595
+ if (deleteBranch) {
596
+ logger.info(`Deleting branch ${headBranch}...`);
597
+ await octokit.git.deleteRef({
598
+ owner,
599
+ repo,
600
+ ref: `heads/${headBranch}`
601
+ });
602
+ logger.info(`Branch ${headBranch} deleted.`);
603
+ } else {
604
+ logger.info(`Preserving branch ${headBranch} (deletion skipped).`);
605
+ }
606
+ };
607
+ const createRelease = async (tagName, title, notes, cwd)=>{
608
+ const octokit = getOctokit();
609
+ const { owner, repo } = await getRepoDetails(cwd);
610
+ const logger = getLogger();
611
+ logger.info(`Creating release for tag ${tagName}...`);
612
+ // Unescape the release notes body and title in case they contain escaped newlines from JSON serialization
613
+ // Background: If release notes were generated by agentic AI systems or went through JSON serialization,
614
+ // the newlines may be stored as escaped sequences (literal \n characters) rather than actual line breaks.
615
+ // This can happen when:
616
+ // 1. Release notes are JSON.stringify'd and stored in a file, then re-read as a string
617
+ // 2. The release notes pass through multiple serialization layers
618
+ // Without unescaping, GitHub will render "some text\nmore text" instead of line-separated content
619
+ const unescapedNotes = notes.replace(/\\n/g, '\n').replace(/\\r/g, '\r').replace(/\\t/g, '\t');
620
+ const unescapedTitle = title.replace(/\\n/g, '\n').replace(/\\r/g, '\r').replace(/\\t/g, '\t');
621
+ await octokit.repos.createRelease({
622
+ owner,
623
+ repo,
624
+ tag_name: tagName,
625
+ name: unescapedTitle,
626
+ body: unescapedNotes
627
+ });
628
+ logger.info(`Release ${tagName} created.`);
629
+ };
630
+ const getReleaseByTagName = async (tagName, cwd)=>{
631
+ const octokit = getOctokit();
632
+ const { owner, repo } = await getRepoDetails(cwd);
633
+ const logger = getLogger();
634
+ try {
635
+ const response = await octokit.repos.getReleaseByTag({
636
+ owner,
637
+ repo,
638
+ tag: tagName
639
+ });
640
+ logger.debug(`Found release for tag ${tagName}: created at ${response.data.created_at}`);
641
+ return response.data;
642
+ } catch (error) {
643
+ logger.debug(`Failed to get release for tag ${tagName}: ${error.message}`);
644
+ throw error;
645
+ }
646
+ };
647
+ const getOpenIssues = async (limit = 20, cwd)=>{
648
+ const octokit = getOctokit();
649
+ const { owner, repo } = await getRepoDetails(cwd);
650
+ const logger = getLogger();
651
+ try {
652
+ logger.debug(`Fetching up to ${limit} open GitHub issues...`);
653
+ const response = await octokit.issues.listForRepo({
654
+ owner,
655
+ repo,
656
+ state: 'open',
657
+ per_page: Math.min(limit, 100),
658
+ sort: 'updated',
659
+ direction: 'desc'
660
+ });
661
+ const issues = response.data.filter((issue)=>!issue.pull_request); // Filter out PRs
662
+ if (issues.length === 0) {
663
+ logger.debug('No open issues found');
664
+ return '';
665
+ }
666
+ const issueStrings = issues.slice(0, limit).map((issue)=>{
667
+ var _issue_body;
668
+ const labels = issue.labels.map((label)=>typeof label === 'string' ? label : label.name).join(', ');
669
+ return [
670
+ `Issue #${issue.number}: ${issue.title}`,
671
+ `Labels: ${labels || 'none'}`,
672
+ `Created: ${issue.created_at}`,
673
+ `Updated: ${issue.updated_at}`,
674
+ `Body: ${((_issue_body = issue.body) === null || _issue_body === void 0 ? void 0 : _issue_body.substring(0, 500)) || 'No description'}${issue.body && issue.body.length > 500 ? '...' : ''}`,
675
+ '---'
676
+ ].join('\n');
677
+ });
678
+ logger.debug(`Fetched ${issues.length} open issues`);
679
+ return issueStrings.join('\n\n');
680
+ } catch (error) {
681
+ logger.warn(`Failed to fetch GitHub issues: ${error.message}`);
682
+ return '';
683
+ }
684
+ };
685
+ const createIssue = async (title, body, labels, cwd)=>{
686
+ const octokit = getOctokit();
687
+ const { owner, repo } = await getRepoDetails(cwd);
688
+ const response = await octokit.issues.create({
689
+ owner,
690
+ repo,
691
+ title,
692
+ body,
693
+ labels: labels || []
694
+ });
695
+ return {
696
+ number: response.data.number,
697
+ html_url: response.data.html_url
698
+ };
699
+ };
700
+ const getWorkflowRunsTriggeredByRelease = async (tagName, workflowNames, cwd)=>{
701
+ const octokit = getOctokit();
702
+ const { owner, repo } = await getRepoDetails(cwd);
703
+ const logger = getLogger();
704
+ try {
705
+ logger.debug(`Fetching workflow runs triggered by release ${tagName}...`);
706
+ // Get release information to filter by creation time and commit SHA
707
+ let releaseInfo;
708
+ let releaseCreatedAt;
709
+ let releaseCommitSha;
710
+ try {
711
+ releaseInfo = await getReleaseByTagName(tagName, cwd);
712
+ releaseCreatedAt = releaseInfo === null || releaseInfo === void 0 ? void 0 : releaseInfo.created_at;
713
+ releaseCommitSha = releaseInfo === null || releaseInfo === void 0 ? void 0 : releaseInfo.target_commitish;
714
+ } catch (error) {
715
+ logger.debug(`Could not get release info for ${tagName}: ${error.message}. Using more permissive filtering.`);
716
+ }
717
+ if (releaseCreatedAt) {
718
+ logger.debug(`Release ${tagName} was created at ${releaseCreatedAt}, filtering workflows created after this time`);
719
+ } else {
720
+ logger.debug(`No release creation time available for ${tagName}, using more permissive time filtering`);
721
+ }
722
+ if (releaseCommitSha) {
723
+ logger.debug(`Release ${tagName} targets commit ${releaseCommitSha}`);
724
+ }
725
+ // Get all workflows
726
+ const workflowsResponse = await octokit.actions.listRepoWorkflows({
727
+ owner,
728
+ repo
729
+ });
730
+ const relevantWorkflows = workflowsResponse.data.workflows.filter((workflow)=>{
731
+ // If specific workflow names are provided, only include those
732
+ if (workflowNames && workflowNames.length > 0) {
733
+ return workflowNames.includes(workflow.name);
734
+ }
735
+ // Otherwise, find workflows that trigger on releases
736
+ return true; // We'll filter by event later when we get the runs
737
+ });
738
+ logger.debug(`Found ${relevantWorkflows.length} workflows to check`);
739
+ const allRuns = [];
740
+ // Get recent workflow runs for each workflow
741
+ for (const workflow of relevantWorkflows){
742
+ try {
743
+ const runsResponse = await octokit.actions.listWorkflowRuns({
744
+ owner,
745
+ repo,
746
+ workflow_id: workflow.id,
747
+ per_page: 30
748
+ });
749
+ logger.debug(`Checking ${runsResponse.data.workflow_runs.length} recent runs for workflow "${workflow.name}"`);
750
+ // Filter runs that were triggered by our specific release
751
+ const releaseRuns = runsResponse.data.workflow_runs.filter((run)=>{
752
+ logger.debug(`Evaluating run ${run.id} for workflow "${workflow.name}": event=${run.event}, created_at=${run.created_at}`);
753
+ // Must have required data
754
+ if (!run.created_at) {
755
+ logger.debug(`Excluding workflow run ${run.id}: missing created_at`);
756
+ return false;
757
+ }
758
+ // Simple logic: if we have release info, just check that the run was created after the release
759
+ if (releaseCreatedAt) {
760
+ const runCreatedAt = new Date(run.created_at).getTime();
761
+ const releaseCreatedAtTime = new Date(releaseCreatedAt).getTime();
762
+ // Include any run that started after the release (with 1 minute buffer for timing)
763
+ if (runCreatedAt < releaseCreatedAtTime - 60000) {
764
+ logger.debug(`Excluding workflow run ${run.id}: created before release (run: ${run.created_at}, release: ${releaseCreatedAt})`);
765
+ return false;
766
+ }
767
+ } else {
768
+ // No release info - just look for recent runs (within last 30 minutes)
769
+ const runAge = Date.now() - new Date(run.created_at).getTime();
770
+ if (runAge > 1800000) {
771
+ logger.debug(`Excluding old workflow run ${run.id}: created ${run.created_at}`);
772
+ return false;
773
+ }
774
+ }
775
+ logger.debug(`Including workflow run ${run.id}: ${workflow.name} (${run.status}/${run.conclusion || 'pending'}) created ${run.created_at}`);
776
+ return true;
777
+ });
778
+ allRuns.push(...releaseRuns);
779
+ if (releaseRuns.length > 0) {
780
+ logger.debug(`Found ${releaseRuns.length} relevant workflow runs for ${workflow.name}`);
781
+ } else {
782
+ logger.debug(`No relevant workflow runs found for ${workflow.name}`);
783
+ }
784
+ } catch (error) {
785
+ logger.warn(`Failed to get runs for workflow ${workflow.name}: ${error.message}`);
786
+ }
787
+ }
788
+ // Sort by creation time (newest first)
789
+ allRuns.sort((a, b)=>{
790
+ return new Date(b.created_at).getTime() - new Date(a.created_at).getTime();
791
+ });
792
+ logger.debug(`Found ${allRuns.length} workflow runs triggered by release ${tagName}`);
793
+ return allRuns;
794
+ } catch (error) {
795
+ logger.error(`Failed to get workflow runs for release ${tagName}: ${error.message}`);
796
+ return [];
797
+ }
798
+ };
799
+ const waitForReleaseWorkflows = async (tagName, options = {})=>{
800
+ const logger = getLogger();
801
+ const timeout = options.timeout || 1800000; // 30 minutes default
802
+ const skipUserConfirmation = options.skipUserConfirmation || false;
803
+ logger.info(`Waiting for workflows triggered by release ${tagName}...`);
804
+ // Wait for workflows to start (GitHub can take time to process the release and trigger workflows)
805
+ logger.debug('Waiting 20 seconds for workflows to start...');
806
+ await delay(20000);
807
+ const startTime = Date.now();
808
+ let workflowRuns = [];
809
+ let consecutiveNoWorkflowsCount = 0;
810
+ const maxConsecutiveNoWorkflows = 20;
811
+ while(true){
812
+ const elapsedTime = Date.now() - startTime;
813
+ // Check for timeout
814
+ if (elapsedTime > timeout) {
815
+ logger.warn(`Timeout reached (${timeout / 1000}s) while waiting for release workflows.`);
816
+ if (!skipUserConfirmation) {
817
+ const proceedWithoutWorkflows = await promptConfirmation(`⚠️ Timeout reached while waiting for release workflows for ${tagName}.\n` + `This might indicate that no workflows are configured to trigger on releases.\n` + `Do you want to proceed anyway?`);
818
+ if (proceedWithoutWorkflows) {
819
+ logger.info('User chose to proceed without waiting for release workflows.');
820
+ return;
821
+ } else {
822
+ throw new Error(`Timeout waiting for release workflows for ${tagName}. User chose not to proceed.`);
823
+ }
824
+ } else {
825
+ throw new Error(`Timeout waiting for release workflows for ${tagName} (${timeout / 1000}s)`);
826
+ }
827
+ }
828
+ // Get current workflow runs
829
+ workflowRuns = await getWorkflowRunsTriggeredByRelease(tagName, options.workflowNames, options.cwd);
830
+ if (workflowRuns.length === 0) {
831
+ consecutiveNoWorkflowsCount++;
832
+ logger.info(`No release workflows found (${consecutiveNoWorkflowsCount}/${maxConsecutiveNoWorkflows}). Waiting...`);
833
+ // Add debug info about what we're looking for
834
+ if (consecutiveNoWorkflowsCount === 1) {
835
+ logger.debug(`Looking for workflows triggered by release ${tagName}`);
836
+ if (options.workflowNames && options.workflowNames.length > 0) {
837
+ logger.debug(`Specific workflows to monitor: ${options.workflowNames.join(', ')}`);
838
+ } else {
839
+ logger.debug('Monitoring all workflows that might be triggered by releases');
840
+ }
841
+ }
842
+ // After several attempts with no workflows, ask user if they want to continue
843
+ if (consecutiveNoWorkflowsCount >= maxConsecutiveNoWorkflows) {
844
+ logger.warn(`No workflows triggered by release ${tagName} after ${maxConsecutiveNoWorkflows} attempts.`);
845
+ if (!skipUserConfirmation) {
846
+ const proceedWithoutWorkflows = await promptConfirmation(`⚠️ No GitHub Actions workflows appear to be triggered by the release ${tagName}.\n` + `This might be expected if no workflows are configured for release events.\n` + `Do you want to proceed without waiting for workflows?`);
847
+ if (proceedWithoutWorkflows) {
848
+ logger.info('User chose to proceed without release workflows.');
849
+ return;
850
+ } else {
851
+ throw new Error(`No release workflows found for ${tagName}. User chose not to proceed.`);
852
+ }
853
+ } else {
854
+ // In non-interactive mode, proceed if no workflows are found
855
+ logger.info('No release workflows found, proceeding.');
856
+ return;
857
+ }
858
+ }
859
+ await delay(10000);
860
+ continue;
861
+ }
862
+ // Reset counter since we found workflows
863
+ consecutiveNoWorkflowsCount = 0;
864
+ // Check status of all workflow runs
865
+ const failingRuns = workflowRuns.filter((run)=>run.conclusion && [
866
+ 'failure',
867
+ 'timed_out',
868
+ 'cancelled'
869
+ ].includes(run.conclusion));
870
+ if (failingRuns.length > 0) {
871
+ logger.error(`Release workflows for ${tagName} have failures:`);
872
+ for (const run of failingRuns){
873
+ logger.error(`- ${run.name}: ${run.conclusion} (${run.html_url})`);
874
+ }
875
+ throw new Error(`Release workflows for ${tagName} failed.`);
876
+ }
877
+ const allWorkflowsCompleted = workflowRuns.every((run)=>run.status === 'completed');
878
+ if (allWorkflowsCompleted) {
879
+ const successfulRuns = workflowRuns.filter((run)=>run.conclusion === 'success');
880
+ logger.info(`All ${workflowRuns.length} release workflows for ${tagName} completed successfully.`);
881
+ for (const run of successfulRuns){
882
+ logger.info(`✓ ${run.name}: ${run.conclusion}`);
883
+ }
884
+ return;
885
+ }
886
+ const completedCount = workflowRuns.filter((run)=>run.status === 'completed').length;
887
+ const runningCount = workflowRuns.filter((run)=>run.status === 'in_progress').length;
888
+ const queuedCount = workflowRuns.filter((run)=>run.status === 'queued').length;
889
+ // Log detailed information about each workflow run being tracked
890
+ if (workflowRuns.length > 0) {
891
+ logger.debug(`Tracking ${workflowRuns.length} workflow runs for release ${tagName}:`);
892
+ workflowRuns.forEach((run)=>{
893
+ const statusIcon = run.status === 'completed' ? run.conclusion === 'success' ? '✅' : run.conclusion === 'failure' ? '❌' : '⚠️' : run.status === 'in_progress' ? '🔄' : '⏳';
894
+ logger.debug(` ${statusIcon} ${run.name} (${run.status}${run.conclusion ? `/${run.conclusion}` : ''}) - created ${run.created_at}`);
895
+ });
896
+ }
897
+ logger.info(`Release workflows for ${tagName}: ${completedCount} completed, ${runningCount} running, ${queuedCount} queued (${workflowRuns.length} total)`);
898
+ await delay(15000); // wait 15 seconds
899
+ }
900
+ };
901
+ const getWorkflowsTriggeredByRelease = async (cwd)=>{
902
+ const octokit = getOctokit();
903
+ const { owner, repo } = await getRepoDetails(cwd);
904
+ const logger = getLogger();
905
+ try {
906
+ logger.debug('Analyzing workflows to find those triggered by release events...');
907
+ // Get all workflows
908
+ const workflowsResponse = await octokit.actions.listRepoWorkflows({
909
+ owner,
910
+ repo
911
+ });
912
+ const releaseWorkflows = [];
913
+ // Check each workflow's configuration
914
+ for (const workflow of workflowsResponse.data.workflows){
915
+ try {
916
+ // Get the workflow file content
917
+ const workflowPath = workflow.path;
918
+ logger.debug(`Analyzing workflow: ${workflow.name} (${workflowPath})`);
919
+ const contentResponse = await octokit.repos.getContent({
920
+ owner,
921
+ repo,
922
+ path: workflowPath
923
+ });
924
+ // Handle the response - it could be a file or directory
925
+ if ('content' in contentResponse.data && contentResponse.data.type === 'file') {
926
+ // Decode the base64 content
927
+ const content = Buffer.from(contentResponse.data.content, 'base64').toString('utf-8');
928
+ // Parse the YAML to check trigger conditions
929
+ if (isTriggeredByRelease(content, workflow.name)) {
930
+ logger.debug(`✓ Workflow "${workflow.name}" will be triggered by release events`);
931
+ releaseWorkflows.push(workflow.name);
932
+ } else {
933
+ logger.debug(`✗ Workflow "${workflow.name}" will not be triggered by release events`);
934
+ }
935
+ } else {
936
+ logger.warn(`Could not read content for workflow ${workflow.name}`);
937
+ }
938
+ } catch (error) {
939
+ logger.warn(`Failed to analyze workflow ${workflow.name}: ${error.message}`);
940
+ }
941
+ }
942
+ logger.info(`Found ${releaseWorkflows.length} workflows that will be triggered by release events: ${releaseWorkflows.join(', ')}`);
943
+ return releaseWorkflows;
944
+ } catch (error) {
945
+ logger.error(`Failed to analyze workflows: ${error.message}`);
946
+ return [];
947
+ }
948
+ };
949
+ const isTriggeredByRelease = (workflowContent, workflowName)=>{
950
+ const logger = getLogger();
951
+ try {
952
+ // Simple regex-based parsing since we don't want to add a YAML dependency
953
+ // Look for common release trigger patterns
954
+ // Pattern 1: on.release (with or without types)
955
+ // on:
956
+ // release:
957
+ // types: [published, created, ...]
958
+ const releaseEventPattern = /(?:^|\n)\s*on\s*:\s*(?:\n|\r\n)(?:\s+[^\S\r\n]+)*(?:\s+release\s*:)/m;
959
+ // Pattern 2: on: [push, release] or on: release
960
+ const onReleasePattern = /(?:^|\n)\s*on\s*:\s*(?:\[.*release.*\]|release)\s*(?:\n|$)/m;
961
+ // Pattern 3: push with tag patterns that look like releases
962
+ // on:
963
+ // push:
964
+ // tags:
965
+ // - 'v*'
966
+ // - 'release/*'
967
+ const tagPushPattern = /(?:^|\r?\n)[^\S\r\n]*on\s*:\s*\r?\n(?:[^\S\r\n]*[^\r\n]+(?:\r?\n))*?[^\S\r\n]*push\s*:\s*\r?\n(?:[^\S\r\n]*tags\s*:\s*(?:\r?\n|\[)[^\]\r\n]*(?:v\*|release|tag)[^\]\r\n]*)/mi;
968
+ const isTriggered = releaseEventPattern.test(workflowContent) || onReleasePattern.test(workflowContent) || tagPushPattern.test(workflowContent);
969
+ if (isTriggered) {
970
+ logger.debug(`Workflow "${workflowName}" trigger patterns detected in content`);
971
+ }
972
+ return isTriggered;
973
+ } catch (error) {
974
+ logger.warn(`Failed to parse workflow content for ${workflowName}: ${error.message}`);
975
+ return false;
976
+ }
977
+ };
978
+ // Milestone Management Functions
979
+ const findMilestoneByTitle = async (title, cwd)=>{
980
+ const octokit = getOctokit();
981
+ const { owner, repo } = await getRepoDetails(cwd);
982
+ const logger = getLogger();
983
+ try {
984
+ logger.debug(`Searching for milestone: ${title}`);
985
+ const response = await octokit.issues.listMilestones({
986
+ owner,
987
+ repo,
988
+ state: 'all',
989
+ per_page: 100
990
+ });
991
+ const milestone = response.data.find((m)=>m.title === title);
992
+ if (milestone) {
993
+ logger.debug(`Found milestone: ${milestone.title} (${milestone.state})`);
994
+ } else {
995
+ logger.debug(`Milestone not found: ${title}`);
996
+ }
997
+ return milestone || null;
998
+ } catch (error) {
999
+ logger.error(`Failed to search for milestone ${title}: ${error.message}`);
1000
+ throw error;
1001
+ }
1002
+ };
1003
+ const createMilestone = async (title, description, cwd)=>{
1004
+ const octokit = getOctokit();
1005
+ const { owner, repo } = await getRepoDetails(cwd);
1006
+ const logger = getLogger();
1007
+ try {
1008
+ logger.info(`Creating milestone: ${title}`);
1009
+ const response = await octokit.issues.createMilestone({
1010
+ owner,
1011
+ repo,
1012
+ title,
1013
+ description
1014
+ });
1015
+ logger.info(`✅ Milestone created: ${title} (#${response.data.number})`);
1016
+ return response.data;
1017
+ } catch (error) {
1018
+ logger.error(`Failed to create milestone ${title}: ${error.message}`);
1019
+ throw error;
1020
+ }
1021
+ };
1022
+ const closeMilestone = async (milestoneNumber, cwd)=>{
1023
+ const octokit = getOctokit();
1024
+ const { owner, repo } = await getRepoDetails(cwd);
1025
+ const logger = getLogger();
1026
+ try {
1027
+ logger.info(`Closing milestone #${milestoneNumber}...`);
1028
+ await octokit.issues.updateMilestone({
1029
+ owner,
1030
+ repo,
1031
+ milestone_number: milestoneNumber,
1032
+ state: 'closed'
1033
+ });
1034
+ logger.info(`✅ Milestone #${milestoneNumber} closed`);
1035
+ } catch (error) {
1036
+ logger.error(`Failed to close milestone #${milestoneNumber}: ${error.message}`);
1037
+ throw error;
1038
+ }
1039
+ };
1040
+ const getOpenIssuesForMilestone = async (milestoneNumber, cwd)=>{
1041
+ const octokit = getOctokit();
1042
+ const { owner, repo } = await getRepoDetails(cwd);
1043
+ const logger = getLogger();
1044
+ try {
1045
+ logger.debug(`Getting open issues for milestone #${milestoneNumber}`);
1046
+ const response = await octokit.issues.listForRepo({
1047
+ owner,
1048
+ repo,
1049
+ state: 'open',
1050
+ milestone: milestoneNumber.toString(),
1051
+ per_page: 100
1052
+ });
1053
+ const issues = response.data.filter((issue)=>!issue.pull_request); // Filter out PRs
1054
+ logger.debug(`Found ${issues.length} open issues for milestone #${milestoneNumber}`);
1055
+ return issues;
1056
+ } catch (error) {
1057
+ logger.error(`Failed to get issues for milestone #${milestoneNumber}: ${error.message}`);
1058
+ throw error;
1059
+ }
1060
+ };
1061
+ const moveIssueToMilestone = async (issueNumber, milestoneNumber, cwd)=>{
1062
+ const octokit = getOctokit();
1063
+ const { owner, repo } = await getRepoDetails(cwd);
1064
+ const logger = getLogger();
1065
+ try {
1066
+ logger.debug(`Moving issue #${issueNumber} to milestone #${milestoneNumber}`);
1067
+ await octokit.issues.update({
1068
+ owner,
1069
+ repo,
1070
+ issue_number: issueNumber,
1071
+ milestone: milestoneNumber
1072
+ });
1073
+ logger.debug(`✅ Issue #${issueNumber} moved to milestone #${milestoneNumber}`);
1074
+ } catch (error) {
1075
+ logger.error(`Failed to move issue #${issueNumber} to milestone #${milestoneNumber}: ${error.message}`);
1076
+ throw error;
1077
+ }
1078
+ };
1079
+ const moveOpenIssuesToNewMilestone = async (fromMilestoneNumber, toMilestoneNumber, cwd)=>{
1080
+ const logger = getLogger();
1081
+ try {
1082
+ const openIssues = await getOpenIssuesForMilestone(fromMilestoneNumber, cwd);
1083
+ if (openIssues.length === 0) {
1084
+ logger.debug(`No open issues to move from milestone #${fromMilestoneNumber}`);
1085
+ return 0;
1086
+ }
1087
+ logger.info(`Moving ${openIssues.length} open issues from milestone #${fromMilestoneNumber} to #${toMilestoneNumber}`);
1088
+ for (const issue of openIssues){
1089
+ await moveIssueToMilestone(issue.number, toMilestoneNumber, cwd);
1090
+ }
1091
+ logger.info(`✅ Moved ${openIssues.length} issues to new milestone`);
1092
+ return openIssues.length;
1093
+ } catch (error) {
1094
+ logger.error(`Failed to move issues between milestones: ${error.message}`);
1095
+ throw error;
1096
+ }
1097
+ };
1098
+ const ensureMilestoneForVersion = async (version, fromVersion, cwd)=>{
1099
+ const logger = getLogger();
1100
+ try {
1101
+ const milestoneTitle = `release/${version}`;
1102
+ logger.debug(`Ensuring milestone exists: ${milestoneTitle}`);
1103
+ // Check if milestone already exists
1104
+ let milestone = await findMilestoneByTitle(milestoneTitle, cwd);
1105
+ if (milestone) {
1106
+ logger.info(`✅ Milestone already exists: ${milestoneTitle}`);
1107
+ return;
1108
+ }
1109
+ // Create new milestone
1110
+ milestone = await createMilestone(milestoneTitle, `Release ${version}`, cwd);
1111
+ // If we have a previous version, move open issues from its milestone
1112
+ if (fromVersion) {
1113
+ const previousMilestoneTitle = `release/${fromVersion}`;
1114
+ const previousMilestone = await findMilestoneByTitle(previousMilestoneTitle, cwd);
1115
+ if (previousMilestone && previousMilestone.state === 'closed') {
1116
+ const movedCount = await moveOpenIssuesToNewMilestone(previousMilestone.number, milestone.number, cwd);
1117
+ if (movedCount > 0) {
1118
+ logger.info(`📋 Moved ${movedCount} open issues from ${previousMilestoneTitle} to ${milestoneTitle}`);
1119
+ }
1120
+ }
1121
+ }
1122
+ } catch (error) {
1123
+ // Don't fail the whole operation if milestone management fails
1124
+ logger.warn(`⚠️ Milestone management failed (continuing): ${error.message}`);
1125
+ }
1126
+ };
1127
+ const closeMilestoneForVersion = async (version, cwd)=>{
1128
+ const logger = getLogger();
1129
+ try {
1130
+ const milestoneTitle = `release/${version}`;
1131
+ logger.debug(`Closing milestone: ${milestoneTitle}`);
1132
+ const milestone = await findMilestoneByTitle(milestoneTitle, cwd);
1133
+ if (!milestone) {
1134
+ logger.debug(`Milestone not found: ${milestoneTitle}`);
1135
+ return;
1136
+ }
1137
+ if (milestone.state === 'closed') {
1138
+ logger.debug(`Milestone already closed: ${milestoneTitle}`);
1139
+ return;
1140
+ }
1141
+ await closeMilestone(milestone.number, cwd);
1142
+ logger.info(`🏁 Closed milestone: ${milestoneTitle}`);
1143
+ } catch (error) {
1144
+ // Don't fail the whole operation if milestone management fails
1145
+ logger.warn(`⚠️ Failed to close milestone (continuing): ${error.message}`);
1146
+ }
1147
+ };
1148
+ const getClosedIssuesForMilestone = async (milestoneNumber, limit = 50, cwd)=>{
1149
+ const octokit = getOctokit();
1150
+ const { owner, repo } = await getRepoDetails(cwd);
1151
+ const logger = getLogger();
1152
+ try {
1153
+ logger.debug(`Getting closed issues for milestone #${milestoneNumber}`);
1154
+ const response = await octokit.issues.listForRepo({
1155
+ owner,
1156
+ repo,
1157
+ state: 'closed',
1158
+ milestone: milestoneNumber.toString(),
1159
+ per_page: Math.min(limit, 100),
1160
+ sort: 'updated',
1161
+ direction: 'desc'
1162
+ });
1163
+ // Filter out PRs and only include issues closed as completed
1164
+ const issues = response.data.filter((issue)=>!issue.pull_request && issue.state_reason === 'completed');
1165
+ logger.debug(`Found ${issues.length} closed issues for milestone #${milestoneNumber}`);
1166
+ return issues;
1167
+ } catch (error) {
1168
+ logger.error(`Failed to get closed issues for milestone #${milestoneNumber}: ${error.message}`);
1169
+ throw error;
1170
+ }
1171
+ };
1172
+ const getIssueDetails = async (issueNumber, maxTokens = 20000, cwd)=>{
1173
+ const octokit = getOctokit();
1174
+ const { owner, repo } = await getRepoDetails(cwd);
1175
+ const logger = getLogger();
1176
+ try {
1177
+ logger.debug(`Getting details for issue #${issueNumber}`);
1178
+ // Get the issue
1179
+ const issueResponse = await octokit.issues.get({
1180
+ owner,
1181
+ repo,
1182
+ issue_number: issueNumber
1183
+ });
1184
+ const issue = issueResponse.data;
1185
+ const content = {
1186
+ title: issue.title,
1187
+ body: issue.body || '',
1188
+ comments: [],
1189
+ totalTokens: 0
1190
+ };
1191
+ // Estimate tokens (rough approximation: 1 token ≈ 4 characters)
1192
+ const estimateTokens = (text)=>Math.ceil(text.length / 4);
1193
+ let currentTokens = estimateTokens(content.title + content.body);
1194
+ content.totalTokens = currentTokens;
1195
+ // If we're already at or near the limit with just title and body, return now
1196
+ if (currentTokens >= maxTokens * 0.9) {
1197
+ logger.debug(`Issue #${issueNumber} title/body already uses ${currentTokens} tokens, skipping comments`);
1198
+ return content;
1199
+ }
1200
+ // Get comments
1201
+ try {
1202
+ const commentsResponse = await octokit.issues.listComments({
1203
+ owner,
1204
+ repo,
1205
+ issue_number: issueNumber,
1206
+ per_page: 100
1207
+ });
1208
+ for (const comment of commentsResponse.data){
1209
+ var _comment_user;
1210
+ const commentTokens = estimateTokens(comment.body || '');
1211
+ if (currentTokens + commentTokens > maxTokens) {
1212
+ logger.debug(`Stopping at comment to stay under ${maxTokens} token limit for issue #${issueNumber}`);
1213
+ break;
1214
+ }
1215
+ content.comments.push({
1216
+ author: (_comment_user = comment.user) === null || _comment_user === void 0 ? void 0 : _comment_user.login,
1217
+ body: comment.body,
1218
+ created_at: comment.created_at
1219
+ });
1220
+ currentTokens += commentTokens;
1221
+ }
1222
+ } catch (error) {
1223
+ logger.debug(`Failed to get comments for issue #${issueNumber}: ${error.message}`);
1224
+ }
1225
+ content.totalTokens = currentTokens;
1226
+ logger.debug(`Issue #${issueNumber} details: ${currentTokens} tokens`);
1227
+ return content;
1228
+ } catch (error) {
1229
+ logger.error(`Failed to get details for issue #${issueNumber}: ${error.message}`);
1230
+ throw error;
1231
+ }
1232
+ };
1233
+ const getMilestoneIssuesForRelease = async (versions, maxTotalTokens = 50000, cwd)=>{
1234
+ const logger = getLogger();
1235
+ try {
1236
+ const allIssues = [];
1237
+ const processedVersions = [];
1238
+ for (const version of versions){
1239
+ const milestoneTitle = `release/${version}`;
1240
+ logger.debug(`Looking for milestone: ${milestoneTitle}`);
1241
+ const milestone = await findMilestoneByTitle(milestoneTitle, cwd);
1242
+ if (!milestone) {
1243
+ logger.debug(`Milestone not found: ${milestoneTitle}`);
1244
+ continue;
1245
+ }
1246
+ const issues = await getClosedIssuesForMilestone(milestone.number, 50, cwd);
1247
+ if (issues.length > 0) {
1248
+ allIssues.push(...issues.map((issue)=>({
1249
+ ...issue,
1250
+ version
1251
+ })));
1252
+ processedVersions.push(version);
1253
+ logger.info(`📋 Found ${issues.length} closed issues in milestone ${milestoneTitle}`);
1254
+ }
1255
+ }
1256
+ if (allIssues.length === 0) {
1257
+ logger.debug('No closed issues found in any milestones');
1258
+ return '';
1259
+ }
1260
+ // Sort issues by updated date (most recent first)
1261
+ allIssues.sort((a, b)=>new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime());
1262
+ logger.info(`📋 Processing ${allIssues.length} issues for release notes (max ${maxTotalTokens} tokens)`);
1263
+ let releaseNotesContent = '';
1264
+ let totalTokens = 0;
1265
+ const estimateTokens = (text)=>Math.ceil(text.length / 4);
1266
+ // Add header
1267
+ const header = `## Issues Resolved\n\nThe following issues were resolved in this release:\n\n`;
1268
+ releaseNotesContent += header;
1269
+ totalTokens += estimateTokens(header);
1270
+ for (const issue of allIssues){
1271
+ // Get detailed issue content with individual token limit
1272
+ const issueDetails = await getIssueDetails(issue.number, 20000, cwd);
1273
+ // Create issue section
1274
+ let issueSection = `### #${issue.number}: ${issueDetails.title}\n\n`;
1275
+ if (issueDetails.body) {
1276
+ issueSection += `**Description:**\n${issueDetails.body}\n\n`;
1277
+ }
1278
+ if (issueDetails.comments.length > 0) {
1279
+ issueSection += `**Key Discussion Points:**\n`;
1280
+ for (const comment of issueDetails.comments){
1281
+ issueSection += `- **${comment.author}**: ${comment.body}\n`;
1282
+ }
1283
+ issueSection += '\n';
1284
+ }
1285
+ // Add labels if present
1286
+ if (issue.labels && issue.labels.length > 0) {
1287
+ const labelNames = issue.labels.map((label)=>typeof label === 'string' ? label : label.name).join(', ');
1288
+ issueSection += `**Labels:** ${labelNames}\n\n`;
1289
+ }
1290
+ issueSection += '---\n\n';
1291
+ const sectionTokens = estimateTokens(issueSection);
1292
+ // Check if adding this issue would exceed the total limit
1293
+ if (totalTokens + sectionTokens > maxTotalTokens) {
1294
+ logger.info(`Stopping at issue #${issue.number} to stay under ${maxTotalTokens} token limit`);
1295
+ break;
1296
+ }
1297
+ releaseNotesContent += issueSection;
1298
+ totalTokens += sectionTokens;
1299
+ logger.debug(`Added issue #${issue.number} (${sectionTokens} tokens, total: ${totalTokens})`);
1300
+ }
1301
+ logger.info(`📋 Generated release notes from milestone issues (${totalTokens} tokens)`);
1302
+ return releaseNotesContent;
1303
+ } catch (error) {
1304
+ // Don't fail the whole operation if milestone content fails
1305
+ logger.warn(`⚠️ Failed to get milestone issues for release notes (continuing): ${error.message}`);
1306
+ return '';
1307
+ }
1308
+ };
1309
+ /**
1310
+ * Get recently closed GitHub issues for commit message context.
1311
+ * Prioritizes issues from milestones that match the current version.
1312
+ */ const getRecentClosedIssuesForCommit = async (currentVersion, limit = 10, cwd)=>{
1313
+ const octokit = getOctokit();
1314
+ const { owner, repo } = await getRepoDetails(cwd);
1315
+ const logger = getLogger();
1316
+ try {
1317
+ logger.debug(`Fetching up to ${limit} recently closed GitHub issues for commit context...`);
1318
+ // Get recently closed issues
1319
+ const response = await octokit.issues.listForRepo({
1320
+ owner,
1321
+ repo,
1322
+ state: 'closed',
1323
+ per_page: Math.min(limit, 100),
1324
+ sort: 'updated',
1325
+ direction: 'desc'
1326
+ });
1327
+ const issues = response.data.filter((issue)=>!issue.pull_request && // Filter out PRs
1328
+ issue.state_reason === 'completed' // Only issues closed as completed
1329
+ );
1330
+ if (issues.length === 0) {
1331
+ logger.debug('No recently closed issues found');
1332
+ return '';
1333
+ }
1334
+ // Determine relevant milestone if we have a current version
1335
+ let relevantMilestone = null;
1336
+ if (currentVersion) {
1337
+ // Extract base version for milestone matching (e.g., "0.1.1" from "0.1.1-dev.0")
1338
+ const baseVersion = currentVersion.includes('-dev.') ? currentVersion.split('-')[0] : currentVersion;
1339
+ const milestoneTitle = `release/${baseVersion}`;
1340
+ relevantMilestone = await findMilestoneByTitle(milestoneTitle, cwd);
1341
+ if (relevantMilestone) {
1342
+ logger.debug(`Found relevant milestone: ${milestoneTitle}`);
1343
+ } else {
1344
+ logger.debug(`No milestone found for version: ${baseVersion}`);
1345
+ }
1346
+ }
1347
+ // Categorize issues by relevance
1348
+ const milestoneIssues = [];
1349
+ const otherIssues = [];
1350
+ for (const issue of issues.slice(0, limit)){
1351
+ var _issue_milestone;
1352
+ if (relevantMilestone && ((_issue_milestone = issue.milestone) === null || _issue_milestone === void 0 ? void 0 : _issue_milestone.number) === relevantMilestone.number) {
1353
+ milestoneIssues.push(issue);
1354
+ } else {
1355
+ otherIssues.push(issue);
1356
+ }
1357
+ }
1358
+ // Build the content, prioritizing milestone issues
1359
+ const issueStrings = [];
1360
+ // Add milestone issues first (these are most relevant)
1361
+ if (milestoneIssues.length > 0) {
1362
+ issueStrings.push(`## Recent Issues from Current Milestone (${relevantMilestone.title}):`);
1363
+ milestoneIssues.forEach((issue)=>{
1364
+ var _issue_body;
1365
+ const labels = issue.labels.map((label)=>typeof label === 'string' ? label : label.name).join(', ');
1366
+ issueStrings.push([
1367
+ `Issue #${issue.number}: ${issue.title}`,
1368
+ `Labels: ${labels || 'none'}`,
1369
+ `Closed: ${issue.closed_at}`,
1370
+ `Body: ${((_issue_body = issue.body) === null || _issue_body === void 0 ? void 0 : _issue_body.substring(0, 300)) || 'No description'}${issue.body && issue.body.length > 300 ? '...' : ''}`,
1371
+ '---'
1372
+ ].join('\n'));
1373
+ });
1374
+ }
1375
+ // Add other recent issues if we have space
1376
+ const remainingLimit = limit - milestoneIssues.length;
1377
+ if (otherIssues.length > 0 && remainingLimit > 0) {
1378
+ if (milestoneIssues.length > 0) {
1379
+ issueStrings.push('\n## Other Recent Closed Issues:');
1380
+ }
1381
+ otherIssues.slice(0, remainingLimit).forEach((issue)=>{
1382
+ var _issue_body;
1383
+ const labels = issue.labels.map((label)=>typeof label === 'string' ? label : label.name).join(', ');
1384
+ const milestoneInfo = issue.milestone ? `Milestone: ${issue.milestone.title}` : 'Milestone: none';
1385
+ issueStrings.push([
1386
+ `Issue #${issue.number}: ${issue.title}`,
1387
+ `Labels: ${labels || 'none'}`,
1388
+ milestoneInfo,
1389
+ `Closed: ${issue.closed_at}`,
1390
+ `Body: ${((_issue_body = issue.body) === null || _issue_body === void 0 ? void 0 : _issue_body.substring(0, 300)) || 'No description'}${issue.body && issue.body.length > 300 ? '...' : ''}`,
1391
+ '---'
1392
+ ].join('\n'));
1393
+ });
1394
+ }
1395
+ const totalRelevantIssues = milestoneIssues.length;
1396
+ const totalOtherIssues = Math.min(otherIssues.length, remainingLimit);
1397
+ logger.debug(`Fetched ${totalRelevantIssues + totalOtherIssues} closed issues (${totalRelevantIssues} from relevant milestone, ${totalOtherIssues} others)`);
1398
+ return issueStrings.join('\n\n');
1399
+ } catch (error) {
1400
+ logger.warn(`Failed to fetch recent closed GitHub issues: ${error.message}`);
1401
+ return '';
1402
+ }
1403
+ };
1404
+
1405
+ export { checkWorkflowConfiguration, closeMilestone, closeMilestoneForVersion, createIssue, createMilestone, createPullRequest, createRelease, ensureMilestoneForVersion, findMilestoneByTitle, findOpenPullRequestByHeadRef, getClosedIssuesForMilestone, getCurrentBranchName, getIssueDetails, getMilestoneIssuesForRelease, getOctokit, getOpenIssues, getOpenIssuesForMilestone, getRecentClosedIssuesForCommit, getReleaseByTagName, getRepoDetails, getWorkflowRunsTriggeredByRelease, getWorkflowsTriggeredByRelease, mergePullRequest, moveIssueToMilestone, moveOpenIssuesToNewMilestone, setPromptFunction, waitForPullRequestChecks, waitForReleaseWorkflows };
1406
+ //# sourceMappingURL=github.js.map