@eldrforge/kodrdriv 1.2.19 → 1.2.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. package/GITHUB-TOOLS-INTEGRATION.md +323 -0
  2. package/INTEGRATION-SUMMARY.md +232 -0
  3. package/TEST-STATUS.md +168 -0
  4. package/dist/application.js +7 -0
  5. package/dist/application.js.map +1 -1
  6. package/dist/arguments.js +1 -1
  7. package/dist/arguments.js.map +1 -1
  8. package/dist/commands/commit.js +3 -3
  9. package/dist/commands/commit.js.map +1 -1
  10. package/dist/commands/development.js +1 -2
  11. package/dist/commands/development.js.map +1 -1
  12. package/dist/commands/link.js +1 -2
  13. package/dist/commands/link.js.map +1 -1
  14. package/dist/commands/publish.js +15 -17
  15. package/dist/commands/publish.js.map +1 -1
  16. package/dist/commands/release.js +4 -4
  17. package/dist/commands/release.js.map +1 -1
  18. package/dist/commands/review.js +3 -4
  19. package/dist/commands/review.js.map +1 -1
  20. package/dist/commands/tree.js +38 -10
  21. package/dist/commands/tree.js.map +1 -1
  22. package/dist/commands/unlink.js +1 -2
  23. package/dist/commands/unlink.js.map +1 -1
  24. package/dist/commands/updates.js +1 -1
  25. package/dist/commands/updates.js.map +1 -1
  26. package/dist/commands/versions.js +1 -1
  27. package/dist/commands/versions.js.map +1 -1
  28. package/dist/constants.js +1 -1
  29. package/dist/content/diff.js +1 -1
  30. package/dist/content/diff.js.map +1 -1
  31. package/dist/content/log.js +1 -1
  32. package/dist/content/log.js.map +1 -1
  33. package/dist/error/CommandErrors.js +1 -65
  34. package/dist/error/CommandErrors.js.map +1 -1
  35. package/dist/util/general.js +2 -3
  36. package/dist/util/general.js.map +1 -1
  37. package/dist/util/openai.js +1 -1
  38. package/dist/util/openai.js.map +1 -1
  39. package/dist/util/performance.js +1 -1
  40. package/dist/util/performance.js.map +1 -1
  41. package/dist/util/safety.js +1 -1
  42. package/dist/util/safety.js.map +1 -1
  43. package/dist/util/validation.js +4 -39
  44. package/dist/util/validation.js.map +1 -1
  45. package/package.json +4 -2
  46. package/test_output.txt +3 -3
  47. package/dist/content/issues.js +0 -331
  48. package/dist/content/issues.js.map +0 -1
  49. package/dist/content/releaseNotes.js +0 -90
  50. package/dist/content/releaseNotes.js.map +0 -1
  51. package/dist/util/child.js +0 -174
  52. package/dist/util/child.js.map +0 -1
  53. package/dist/util/git.js +0 -836
  54. package/dist/util/git.js.map +0 -1
  55. package/dist/util/github.js +0 -1071
  56. package/dist/util/github.js.map +0 -1
@@ -1,1071 +0,0 @@
1
- import { Octokit } from '@octokit/rest';
2
- import { getLogger } from '../logging.js';
3
- import { run } from './child.js';
4
- import { promptConfirmation } from './stdin.js';
5
-
6
- const getOctokit = ()=>{
7
- const logger = getLogger();
8
- const token = process.env.GITHUB_TOKEN;
9
- if (!token) {
10
- logger.error('GITHUB_TOKEN environment variable is not set.');
11
- throw new Error('GITHUB_TOKEN is not set.');
12
- }
13
- return new Octokit({
14
- auth: token
15
- });
16
- };
17
- const getCurrentBranchName = async ()=>{
18
- const { stdout } = await run('git rev-parse --abbrev-ref HEAD');
19
- return stdout.trim();
20
- };
21
- const getRepoDetails = async ()=>{
22
- const { stdout } = await run('git remote get-url origin');
23
- const url = stdout.trim();
24
- // git@github.com:owner/repo.git or https://github.com/owner/repo.git
25
- const match = url.match(/github\.com[/:]([\w-]+)\/([\w.-]+)\.git/);
26
- if (!match) {
27
- throw new Error(`Could not parse repository owner and name from origin URL: "${url}". Expected format: git@github.com:owner/repo.git or https://github.com/owner/repo.git`);
28
- }
29
- return {
30
- owner: match[1],
31
- repo: match[2]
32
- };
33
- };
34
- // GitHub API limit for pull request titles
35
- const GITHUB_PR_TITLE_LIMIT = 256;
36
- const truncatePullRequestTitle = (title)=>{
37
- if (title.length <= GITHUB_PR_TITLE_LIMIT) {
38
- return title;
39
- }
40
- // Reserve space for "..." suffix
41
- const maxLength = GITHUB_PR_TITLE_LIMIT - 3;
42
- let truncated = title.substring(0, maxLength);
43
- // Try to break at word boundary to avoid cutting words in half
44
- const lastSpaceIndex = truncated.lastIndexOf(' ');
45
- if (lastSpaceIndex > maxLength * 0.8) {
46
- truncated = truncated.substring(0, lastSpaceIndex);
47
- }
48
- return truncated + '...';
49
- };
50
- const createPullRequest = async (title, body, head, base = 'main')=>{
51
- const octokit = getOctokit();
52
- const { owner, repo } = await getRepoDetails();
53
- const logger = getLogger();
54
- // Truncate title if it exceeds GitHub's limit
55
- const truncatedTitle = truncatePullRequestTitle(title.trim());
56
- if (truncatedTitle !== title.trim()) {
57
- logger.debug(`Pull request title truncated from ${title.trim().length} to ${truncatedTitle.length} characters to meet GitHub's 256-character limit`);
58
- }
59
- const response = await octokit.pulls.create({
60
- owner,
61
- repo,
62
- title: truncatedTitle,
63
- body,
64
- head,
65
- base
66
- });
67
- return response.data;
68
- };
69
- const findOpenPullRequestByHeadRef = async (head)=>{
70
- const octokit = getOctokit();
71
- const { owner, repo } = await getRepoDetails();
72
- const logger = getLogger();
73
- try {
74
- logger.debug(`Searching for open pull requests with head: ${owner}:${head} in ${owner}/${repo}`);
75
- const response = await octokit.pulls.list({
76
- owner,
77
- repo,
78
- state: 'open',
79
- head: `${owner}:${head}`
80
- });
81
- logger.debug(`Found ${response.data.length} open pull requests`);
82
- var _response_data_;
83
- return (_response_data_ = response.data[0]) !== null && _response_data_ !== void 0 ? _response_data_ : null;
84
- } catch (error) {
85
- logger.error(`Failed to find open pull requests: ${error.message}`);
86
- if (error.status === 404) {
87
- logger.error(`Repository ${owner}/${repo} not found or access denied. Please check your GITHUB_TOKEN permissions.`);
88
- }
89
- throw error;
90
- }
91
- };
92
- const delay = (ms)=>new Promise((resolve)=>setTimeout(resolve, ms));
93
- // Check if repository has GitHub Actions workflows configured
94
- const hasWorkflowsConfigured = async ()=>{
95
- const octokit = getOctokit();
96
- const { owner, repo } = await getRepoDetails();
97
- try {
98
- const response = await octokit.actions.listRepoWorkflows({
99
- owner,
100
- repo
101
- });
102
- return response.data.workflows.length > 0;
103
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
104
- } catch (error) {
105
- // If we can't check workflows (e.g., no Actions permission), assume they might exist
106
- return true;
107
- }
108
- };
109
- /**
110
- * Check if any workflow runs have been triggered for a specific PR
111
- * This is more specific than hasWorkflowsConfigured as it checks for actual runs
112
- */ const hasWorkflowRunsForPR = async (prNumber)=>{
113
- const octokit = getOctokit();
114
- const { owner, repo } = await getRepoDetails();
115
- const logger = getLogger();
116
- try {
117
- // Get the PR to find the head SHA
118
- const pr = await octokit.pulls.get({
119
- owner,
120
- repo,
121
- pull_number: prNumber
122
- });
123
- const headSha = pr.data.head.sha;
124
- const headRef = pr.data.head.ref;
125
- // Check for workflow runs triggered by this PR
126
- const workflowRuns = await octokit.actions.listWorkflowRunsForRepo({
127
- owner,
128
- repo,
129
- head_sha: headSha,
130
- per_page: 50
131
- });
132
- // Also check for runs on the branch
133
- const branchRuns = await octokit.actions.listWorkflowRunsForRepo({
134
- owner,
135
- repo,
136
- branch: headRef,
137
- per_page: 50
138
- });
139
- const allRuns = [
140
- ...workflowRuns.data.workflow_runs,
141
- ...branchRuns.data.workflow_runs
142
- ];
143
- // Filter to runs that match our PR's head SHA or are very recent on the branch
144
- const relevantRuns = allRuns.filter((run)=>run.head_sha === headSha || run.head_branch === headRef && new Date(run.created_at).getTime() > Date.now() - 300000 // Last 5 minutes
145
- );
146
- if (relevantRuns.length > 0) {
147
- logger.debug(`Found ${relevantRuns.length} workflow runs for PR #${prNumber} (SHA: ${headSha})`);
148
- return true;
149
- }
150
- logger.debug(`No workflow runs found for PR #${prNumber} (SHA: ${headSha}, branch: ${headRef})`);
151
- return false;
152
- } catch (error) {
153
- logger.debug(`Error checking workflow runs for PR #${prNumber}: ${error.message}`);
154
- // If we can't check workflow runs, assume they might exist
155
- return true;
156
- }
157
- };
158
- const waitForPullRequestChecks = async (prNumber, options = {})=>{
159
- const octokit = getOctokit();
160
- const { owner, repo } = await getRepoDetails();
161
- const logger = getLogger();
162
- const timeout = options.timeout || 3600000; // 1 hour default timeout
163
- const skipUserConfirmation = options.skipUserConfirmation || false;
164
- const startTime = Date.now();
165
- let consecutiveNoChecksCount = 0;
166
- const maxConsecutiveNoChecks = 6; // 6 consecutive checks (1 minute) with no checks before asking user
167
- let checkedWorkflowRuns = false; // Track if we've already checked for workflow runs to avoid repeated checks
168
- while(true){
169
- const elapsedTime = Date.now() - startTime;
170
- // Check for timeout
171
- if (elapsedTime > timeout) {
172
- logger.warn(`Timeout reached (${timeout / 1000}s) while waiting for PR #${prNumber} checks.`);
173
- if (!skipUserConfirmation) {
174
- const proceedWithoutChecks = await promptConfirmation(`⚠️ Timeout reached while waiting for PR #${prNumber} checks.\n` + `This might indicate that no checks are configured for this repository.\n` + `Do you want to proceed with merging the PR without waiting for checks?`);
175
- if (proceedWithoutChecks) {
176
- logger.info('User chose to proceed without waiting for checks.');
177
- return;
178
- } else {
179
- throw new Error(`Timeout waiting for PR #${prNumber} checks. User chose not to proceed.`);
180
- }
181
- } else {
182
- throw new Error(`Timeout waiting for PR #${prNumber} checks (${timeout / 1000}s)`);
183
- }
184
- }
185
- const pr = await octokit.pulls.get({
186
- owner,
187
- repo,
188
- pull_number: prNumber
189
- });
190
- const checkRunsResponse = await octokit.checks.listForRef({
191
- owner,
192
- repo,
193
- ref: pr.data.head.sha
194
- });
195
- const checkRuns = checkRunsResponse.data.check_runs;
196
- if (checkRuns.length === 0) {
197
- consecutiveNoChecksCount++;
198
- logger.info(`PR #${prNumber}: No checks found (${consecutiveNoChecksCount}/${maxConsecutiveNoChecks}). Waiting...`);
199
- // After several consecutive "no checks" responses, check if workflows are configured
200
- if (consecutiveNoChecksCount >= maxConsecutiveNoChecks) {
201
- logger.info(`No checks detected for ${maxConsecutiveNoChecks} consecutive attempts. Checking repository configuration...`);
202
- const hasWorkflows = await hasWorkflowsConfigured();
203
- if (!hasWorkflows) {
204
- logger.warn(`No GitHub Actions workflows found in repository ${owner}/${repo}.`);
205
- if (!skipUserConfirmation) {
206
- const proceedWithoutChecks = await promptConfirmation(`⚠️ No GitHub Actions workflows or checks are configured for this repository.\n` + `PR #${prNumber} will never have status checks to wait for.\n` + `Do you want to proceed with merging the PR without checks?`);
207
- if (proceedWithoutChecks) {
208
- logger.info('User chose to proceed without checks (no workflows configured).');
209
- return;
210
- } else {
211
- throw new Error(`No checks configured for PR #${prNumber}. User chose not to proceed.`);
212
- }
213
- } else {
214
- // In non-interactive mode, proceed if no workflows are configured
215
- logger.info('No workflows configured, proceeding without checks.');
216
- return;
217
- }
218
- } else {
219
- // Workflows exist, but check if any are actually running for this PR
220
- if (!checkedWorkflowRuns) {
221
- logger.info('GitHub Actions workflows are configured. Checking if any workflows are triggered for this PR...');
222
- const hasRunsForPR = await hasWorkflowRunsForPR(prNumber);
223
- checkedWorkflowRuns = true; // Mark that we've checked
224
- if (!hasRunsForPR) {
225
- logger.warn(`No workflow runs detected for PR #${prNumber}. This may indicate that the configured workflows don't match this branch pattern.`);
226
- if (!skipUserConfirmation) {
227
- const proceedWithoutChecks = await promptConfirmation(`⚠️ GitHub Actions workflows are configured in this repository, but none appear to be triggered by PR #${prNumber}.\n` + `This usually means the workflow trigger patterns (branches, paths) don't match this PR.\n` + `PR #${prNumber} will likely never have status checks to wait for.\n` + `Do you want to proceed with merging the PR without waiting for checks?`);
228
- if (proceedWithoutChecks) {
229
- logger.info('User chose to proceed without checks (no matching workflow triggers).');
230
- return;
231
- } else {
232
- throw new Error(`No matching workflow triggers for PR #${prNumber}. User chose not to proceed.`);
233
- }
234
- } else {
235
- // In non-interactive mode, proceed if no workflow runs are detected
236
- logger.info('No workflow runs detected for this PR, proceeding without checks.');
237
- return;
238
- }
239
- } else {
240
- logger.info('Workflow runs detected for this PR. Continuing to wait for checks...');
241
- consecutiveNoChecksCount = 0; // Reset counter since workflow runs exist
242
- }
243
- } else {
244
- // We've already checked workflow runs and found none that match this PR
245
- // At this point, we should give up to avoid infinite loops
246
- logger.warn(`Still no checks after ${consecutiveNoChecksCount} attempts. No workflow runs match this PR.`);
247
- if (!skipUserConfirmation) {
248
- const proceedWithoutChecks = await promptConfirmation(`⚠️ After waiting ${Math.round(elapsedTime / 1000)}s, no checks have appeared for PR #${prNumber}.\n` + `The configured workflows don't appear to trigger for this branch.\n` + `Do you want to proceed with merging the PR without checks?`);
249
- if (proceedWithoutChecks) {
250
- logger.info('User chose to proceed without checks (timeout waiting for workflow triggers).');
251
- return;
252
- } else {
253
- throw new Error(`No workflow triggers matched PR #${prNumber} after waiting. User chose not to proceed.`);
254
- }
255
- } else {
256
- // In non-interactive mode, proceed after reasonable waiting
257
- logger.info('No workflow runs detected after waiting, proceeding without checks.');
258
- return;
259
- }
260
- }
261
- }
262
- }
263
- await delay(10000);
264
- continue;
265
- }
266
- // Reset the no-checks counter since we found some checks
267
- consecutiveNoChecksCount = 0;
268
- const failingChecks = checkRuns.filter((cr)=>cr.conclusion && [
269
- 'failure',
270
- 'timed_out',
271
- 'cancelled'
272
- ].includes(cr.conclusion));
273
- if (failingChecks.length > 0) {
274
- const { owner, repo } = await getRepoDetails();
275
- const prUrl = `https://github.com/${owner}/${repo}/pull/${prNumber}`;
276
- // Get current branch name for better guidance
277
- let currentBranch;
278
- try {
279
- currentBranch = await getCurrentBranchName();
280
- } catch {
281
- // Fallback to generic branch reference if we can't get the current branch
282
- currentBranch = undefined;
283
- }
284
- // Collect detailed information about each failed check
285
- const detailedFailedChecks = await Promise.all(failingChecks.map(async (check)=>{
286
- try {
287
- var _checkDetails_data_output, _checkDetails_data_output1, _checkDetails_data_output2, _checkDetails_data_output3;
288
- // Get additional details from the check run
289
- const checkDetails = await octokit.checks.get({
290
- owner,
291
- repo,
292
- check_run_id: check.id
293
- });
294
- return {
295
- name: check.name,
296
- conclusion: check.conclusion || 'unknown',
297
- detailsUrl: check.details_url || undefined,
298
- summary: ((_checkDetails_data_output = checkDetails.data.output) === null || _checkDetails_data_output === void 0 ? void 0 : _checkDetails_data_output.summary) || undefined,
299
- output: {
300
- title: ((_checkDetails_data_output1 = checkDetails.data.output) === null || _checkDetails_data_output1 === void 0 ? void 0 : _checkDetails_data_output1.title) || undefined,
301
- summary: ((_checkDetails_data_output2 = checkDetails.data.output) === null || _checkDetails_data_output2 === void 0 ? void 0 : _checkDetails_data_output2.summary) || undefined,
302
- text: ((_checkDetails_data_output3 = checkDetails.data.output) === null || _checkDetails_data_output3 === void 0 ? void 0 : _checkDetails_data_output3.text) || undefined
303
- }
304
- };
305
- } catch {
306
- // Fallback to basic information if we can't get details
307
- return {
308
- name: check.name,
309
- conclusion: check.conclusion || 'unknown',
310
- detailsUrl: check.details_url || undefined
311
- };
312
- }
313
- }));
314
- logger.error(`❌ PR #${prNumber} has ${failingChecks.length} failing check${failingChecks.length > 1 ? 's' : ''}:`);
315
- logger.error('');
316
- for (const check of detailedFailedChecks){
317
- var _check_output, _check_output1;
318
- const statusIcon = check.conclusion === 'failure' ? '❌' : check.conclusion === 'timed_out' ? '⏰' : '🚫';
319
- logger.error(`${statusIcon} ${check.name}: ${check.conclusion}`);
320
- // Show more detailed error information if available
321
- if (((_check_output = check.output) === null || _check_output === void 0 ? void 0 : _check_output.title) && check.output.title !== check.name) {
322
- logger.error(` Issue: ${check.output.title}`);
323
- }
324
- if ((_check_output1 = check.output) === null || _check_output1 === void 0 ? void 0 : _check_output1.summary) {
325
- // Truncate very long summaries
326
- const summary = check.output.summary.length > 200 ? check.output.summary.substring(0, 200) + '...' : check.output.summary;
327
- logger.error(` Summary: ${summary}`);
328
- }
329
- // Include direct link to check details
330
- if (check.detailsUrl) {
331
- logger.error(` Details: ${check.detailsUrl}`);
332
- }
333
- logger.error('');
334
- }
335
- // Import the new error class
336
- const { PullRequestCheckError } = await import('../error/CommandErrors.js');
337
- // Create and throw the enhanced error with detailed recovery instructions
338
- const prError = new PullRequestCheckError(`PR #${prNumber} checks failed. ${failingChecks.length} check${failingChecks.length > 1 ? 's' : ''} failed.`, prNumber, detailedFailedChecks, prUrl, currentBranch);
339
- // Display recovery instructions
340
- const instructions = prError.getRecoveryInstructions();
341
- for (const instruction of instructions){
342
- logger.error(instruction);
343
- }
344
- logger.error('');
345
- throw prError;
346
- }
347
- const allChecksCompleted = checkRuns.every((cr)=>cr.status === 'completed');
348
- if (allChecksCompleted) {
349
- logger.info(`All checks for PR #${prNumber} have completed successfully.`);
350
- return;
351
- }
352
- const completedCount = checkRuns.filter((cr)=>cr.status === 'completed').length;
353
- logger.info(`PR #${prNumber} checks: ${completedCount}/${checkRuns.length} completed. Waiting...`);
354
- await delay(10000); // wait 10 seconds
355
- }
356
- };
357
- const mergePullRequest = async (prNumber, mergeMethod = 'squash', deleteBranch = true)=>{
358
- const octokit = getOctokit();
359
- const { owner, repo } = await getRepoDetails();
360
- const logger = getLogger();
361
- logger.info(`Merging PR #${prNumber} using ${mergeMethod} method...`);
362
- const pr = await octokit.pulls.get({
363
- owner,
364
- repo,
365
- pull_number: prNumber
366
- });
367
- const headBranch = pr.data.head.ref;
368
- await octokit.pulls.merge({
369
- owner,
370
- repo,
371
- pull_number: prNumber,
372
- merge_method: mergeMethod
373
- });
374
- logger.info(`PR #${prNumber} merged using ${mergeMethod} method.`);
375
- if (deleteBranch) {
376
- logger.info(`Deleting branch ${headBranch}...`);
377
- await octokit.git.deleteRef({
378
- owner,
379
- repo,
380
- ref: `heads/${headBranch}`
381
- });
382
- logger.info(`Branch ${headBranch} deleted.`);
383
- } else {
384
- logger.info(`Preserving branch ${headBranch} (deletion skipped).`);
385
- }
386
- };
387
- const createRelease = async (tagName, title, notes)=>{
388
- const octokit = getOctokit();
389
- const { owner, repo } = await getRepoDetails();
390
- const logger = getLogger();
391
- logger.info(`Creating release for tag ${tagName}...`);
392
- await octokit.repos.createRelease({
393
- owner,
394
- repo,
395
- tag_name: tagName,
396
- name: title,
397
- body: notes
398
- });
399
- logger.info(`Release ${tagName} created.`);
400
- };
401
- const getReleaseByTagName = async (tagName)=>{
402
- const octokit = getOctokit();
403
- const { owner, repo } = await getRepoDetails();
404
- const logger = getLogger();
405
- try {
406
- const response = await octokit.repos.getReleaseByTag({
407
- owner,
408
- repo,
409
- tag: tagName
410
- });
411
- logger.debug(`Found release for tag ${tagName}: created at ${response.data.created_at}`);
412
- return response.data;
413
- } catch (error) {
414
- logger.debug(`Failed to get release for tag ${tagName}: ${error.message}`);
415
- throw error;
416
- }
417
- };
418
- const getOpenIssues = async (limit = 20)=>{
419
- const octokit = getOctokit();
420
- const { owner, repo } = await getRepoDetails();
421
- const logger = getLogger();
422
- try {
423
- logger.debug(`Fetching up to ${limit} open GitHub issues...`);
424
- const response = await octokit.issues.listForRepo({
425
- owner,
426
- repo,
427
- state: 'open',
428
- per_page: Math.min(limit, 100),
429
- sort: 'updated',
430
- direction: 'desc'
431
- });
432
- const issues = response.data.filter((issue)=>!issue.pull_request); // Filter out PRs
433
- if (issues.length === 0) {
434
- logger.debug('No open issues found');
435
- return '';
436
- }
437
- const issueStrings = issues.slice(0, limit).map((issue)=>{
438
- var _issue_body;
439
- const labels = issue.labels.map((label)=>typeof label === 'string' ? label : label.name).join(', ');
440
- return [
441
- `Issue #${issue.number}: ${issue.title}`,
442
- `Labels: ${labels || 'none'}`,
443
- `Created: ${issue.created_at}`,
444
- `Updated: ${issue.updated_at}`,
445
- `Body: ${((_issue_body = issue.body) === null || _issue_body === void 0 ? void 0 : _issue_body.substring(0, 500)) || 'No description'}${issue.body && issue.body.length > 500 ? '...' : ''}`,
446
- '---'
447
- ].join('\n');
448
- });
449
- logger.debug(`Fetched ${issues.length} open issues`);
450
- return issueStrings.join('\n\n');
451
- } catch (error) {
452
- logger.warn('Failed to fetch GitHub issues: %s', error.message);
453
- return '';
454
- }
455
- };
456
- const createIssue = async (title, body, labels)=>{
457
- const octokit = getOctokit();
458
- const { owner, repo } = await getRepoDetails();
459
- const response = await octokit.issues.create({
460
- owner,
461
- repo,
462
- title,
463
- body,
464
- labels: labels || []
465
- });
466
- return {
467
- number: response.data.number,
468
- html_url: response.data.html_url
469
- };
470
- };
471
- const getWorkflowRunsTriggeredByRelease = async (tagName, workflowNames)=>{
472
- const octokit = getOctokit();
473
- const { owner, repo } = await getRepoDetails();
474
- const logger = getLogger();
475
- try {
476
- logger.debug(`Fetching workflow runs triggered by release ${tagName}...`);
477
- // Get release information to filter by creation time and commit SHA
478
- let releaseInfo;
479
- let releaseCreatedAt;
480
- let releaseCommitSha;
481
- try {
482
- releaseInfo = await getReleaseByTagName(tagName);
483
- releaseCreatedAt = releaseInfo === null || releaseInfo === void 0 ? void 0 : releaseInfo.created_at;
484
- releaseCommitSha = releaseInfo === null || releaseInfo === void 0 ? void 0 : releaseInfo.target_commitish;
485
- } catch (error) {
486
- logger.debug(`Could not get release info for ${tagName}: ${error.message}. Using more permissive filtering.`);
487
- }
488
- if (releaseCreatedAt) {
489
- logger.debug(`Release ${tagName} was created at ${releaseCreatedAt}, filtering workflows created after this time`);
490
- } else {
491
- logger.debug(`No release creation time available for ${tagName}, using more permissive time filtering`);
492
- }
493
- if (releaseCommitSha) {
494
- logger.debug(`Release ${tagName} targets commit ${releaseCommitSha}`);
495
- }
496
- // Get all workflows
497
- const workflowsResponse = await octokit.actions.listRepoWorkflows({
498
- owner,
499
- repo
500
- });
501
- const relevantWorkflows = workflowsResponse.data.workflows.filter((workflow)=>{
502
- // If specific workflow names are provided, only include those
503
- if (workflowNames && workflowNames.length > 0) {
504
- return workflowNames.includes(workflow.name);
505
- }
506
- // Otherwise, find workflows that trigger on releases
507
- return true; // We'll filter by event later when we get the runs
508
- });
509
- logger.debug(`Found ${relevantWorkflows.length} workflows to check`);
510
- const allRuns = [];
511
- // Get recent workflow runs for each workflow
512
- for (const workflow of relevantWorkflows){
513
- try {
514
- const runsResponse = await octokit.actions.listWorkflowRuns({
515
- owner,
516
- repo,
517
- workflow_id: workflow.id,
518
- per_page: 30
519
- });
520
- logger.debug(`Checking ${runsResponse.data.workflow_runs.length} recent runs for workflow "${workflow.name}"`);
521
- // Filter runs that were triggered by our specific release
522
- const releaseRuns = runsResponse.data.workflow_runs.filter((run)=>{
523
- logger.debug(`Evaluating run ${run.id} for workflow "${workflow.name}": event=${run.event}, created_at=${run.created_at}`);
524
- // Must have required data
525
- if (!run.created_at) {
526
- logger.debug(`Excluding workflow run ${run.id}: missing created_at`);
527
- return false;
528
- }
529
- // Simple logic: if we have release info, just check that the run was created after the release
530
- if (releaseCreatedAt) {
531
- const runCreatedAt = new Date(run.created_at).getTime();
532
- const releaseCreatedAtTime = new Date(releaseCreatedAt).getTime();
533
- // Include any run that started after the release (with 1 minute buffer for timing)
534
- if (runCreatedAt < releaseCreatedAtTime - 60000) {
535
- logger.debug(`Excluding workflow run ${run.id}: created before release (run: ${run.created_at}, release: ${releaseCreatedAt})`);
536
- return false;
537
- }
538
- } else {
539
- // No release info - just look for recent runs (within last 30 minutes)
540
- const runAge = Date.now() - new Date(run.created_at).getTime();
541
- if (runAge > 1800000) {
542
- logger.debug(`Excluding old workflow run ${run.id}: created ${run.created_at}`);
543
- return false;
544
- }
545
- }
546
- logger.debug(`Including workflow run ${run.id}: ${workflow.name} (${run.status}/${run.conclusion || 'pending'}) created ${run.created_at}`);
547
- return true;
548
- });
549
- allRuns.push(...releaseRuns);
550
- if (releaseRuns.length > 0) {
551
- logger.debug(`Found ${releaseRuns.length} relevant workflow runs for ${workflow.name}`);
552
- } else {
553
- logger.debug(`No relevant workflow runs found for ${workflow.name}`);
554
- }
555
- } catch (error) {
556
- logger.warn(`Failed to get runs for workflow ${workflow.name}: ${error.message}`);
557
- }
558
- }
559
- // Sort by creation time (newest first)
560
- allRuns.sort((a, b)=>{
561
- return new Date(b.created_at).getTime() - new Date(a.created_at).getTime();
562
- });
563
- logger.debug(`Found ${allRuns.length} workflow runs triggered by release ${tagName}`);
564
- return allRuns;
565
- } catch (error) {
566
- logger.error(`Failed to get workflow runs for release ${tagName}: ${error.message}`);
567
- return [];
568
- }
569
- };
570
- const waitForReleaseWorkflows = async (tagName, options = {})=>{
571
- const logger = getLogger();
572
- const timeout = options.timeout || 1800000; // 30 minutes default
573
- const skipUserConfirmation = options.skipUserConfirmation || false;
574
- logger.info(`Waiting for workflows triggered by release ${tagName}...`);
575
- // Wait for workflows to start (GitHub can take time to process the release and trigger workflows)
576
- logger.debug('Waiting 20 seconds for workflows to start...');
577
- await delay(20000);
578
- const startTime = Date.now();
579
- let workflowRuns = [];
580
- let consecutiveNoWorkflowsCount = 0;
581
- const maxConsecutiveNoWorkflows = 20;
582
- while(true){
583
- const elapsedTime = Date.now() - startTime;
584
- // Check for timeout
585
- if (elapsedTime > timeout) {
586
- logger.warn(`Timeout reached (${timeout / 1000}s) while waiting for release workflows.`);
587
- if (!skipUserConfirmation) {
588
- const proceedWithoutWorkflows = await promptConfirmation(`⚠️ Timeout reached while waiting for release workflows for ${tagName}.\n` + `This might indicate that no workflows are configured to trigger on releases.\n` + `Do you want to proceed anyway?`);
589
- if (proceedWithoutWorkflows) {
590
- logger.info('User chose to proceed without waiting for release workflows.');
591
- return;
592
- } else {
593
- throw new Error(`Timeout waiting for release workflows for ${tagName}. User chose not to proceed.`);
594
- }
595
- } else {
596
- throw new Error(`Timeout waiting for release workflows for ${tagName} (${timeout / 1000}s)`);
597
- }
598
- }
599
- // Get current workflow runs
600
- workflowRuns = await getWorkflowRunsTriggeredByRelease(tagName, options.workflowNames);
601
- if (workflowRuns.length === 0) {
602
- consecutiveNoWorkflowsCount++;
603
- logger.info(`No release workflows found (${consecutiveNoWorkflowsCount}/${maxConsecutiveNoWorkflows}). Waiting...`);
604
- // Add debug info about what we're looking for
605
- if (consecutiveNoWorkflowsCount === 1) {
606
- logger.debug(`Looking for workflows triggered by release ${tagName}`);
607
- if (options.workflowNames && options.workflowNames.length > 0) {
608
- logger.debug(`Specific workflows to monitor: ${options.workflowNames.join(', ')}`);
609
- } else {
610
- logger.debug('Monitoring all workflows that might be triggered by releases');
611
- }
612
- }
613
- // After several attempts with no workflows, ask user if they want to continue
614
- if (consecutiveNoWorkflowsCount >= maxConsecutiveNoWorkflows) {
615
- logger.warn(`No workflows triggered by release ${tagName} after ${maxConsecutiveNoWorkflows} attempts.`);
616
- if (!skipUserConfirmation) {
617
- const proceedWithoutWorkflows = await promptConfirmation(`⚠️ No GitHub Actions workflows appear to be triggered by the release ${tagName}.\n` + `This might be expected if no workflows are configured for release events.\n` + `Do you want to proceed without waiting for workflows?`);
618
- if (proceedWithoutWorkflows) {
619
- logger.info('User chose to proceed without release workflows.');
620
- return;
621
- } else {
622
- throw new Error(`No release workflows found for ${tagName}. User chose not to proceed.`);
623
- }
624
- } else {
625
- // In non-interactive mode, proceed if no workflows are found
626
- logger.info('No release workflows found, proceeding.');
627
- return;
628
- }
629
- }
630
- await delay(10000);
631
- continue;
632
- }
633
- // Reset counter since we found workflows
634
- consecutiveNoWorkflowsCount = 0;
635
- // Check status of all workflow runs
636
- const failingRuns = workflowRuns.filter((run)=>run.conclusion && [
637
- 'failure',
638
- 'timed_out',
639
- 'cancelled'
640
- ].includes(run.conclusion));
641
- if (failingRuns.length > 0) {
642
- logger.error(`Release workflows for ${tagName} have failures:`);
643
- for (const run of failingRuns){
644
- logger.error(`- ${run.name}: ${run.conclusion} (${run.html_url})`);
645
- }
646
- throw new Error(`Release workflows for ${tagName} failed.`);
647
- }
648
- const allWorkflowsCompleted = workflowRuns.every((run)=>run.status === 'completed');
649
- if (allWorkflowsCompleted) {
650
- const successfulRuns = workflowRuns.filter((run)=>run.conclusion === 'success');
651
- logger.info(`All ${workflowRuns.length} release workflows for ${tagName} completed successfully.`);
652
- for (const run of successfulRuns){
653
- logger.info(`✓ ${run.name}: ${run.conclusion}`);
654
- }
655
- return;
656
- }
657
- const completedCount = workflowRuns.filter((run)=>run.status === 'completed').length;
658
- const runningCount = workflowRuns.filter((run)=>run.status === 'in_progress').length;
659
- const queuedCount = workflowRuns.filter((run)=>run.status === 'queued').length;
660
- // Log detailed information about each workflow run being tracked
661
- if (workflowRuns.length > 0) {
662
- logger.debug(`Tracking ${workflowRuns.length} workflow runs for release ${tagName}:`);
663
- workflowRuns.forEach((run)=>{
664
- const statusIcon = run.status === 'completed' ? run.conclusion === 'success' ? '✅' : run.conclusion === 'failure' ? '❌' : '⚠️' : run.status === 'in_progress' ? '🔄' : '⏳';
665
- logger.debug(` ${statusIcon} ${run.name} (${run.status}${run.conclusion ? `/${run.conclusion}` : ''}) - created ${run.created_at}`);
666
- });
667
- }
668
- logger.info(`Release workflows for ${tagName}: ${completedCount} completed, ${runningCount} running, ${queuedCount} queued (${workflowRuns.length} total)`);
669
- await delay(15000); // wait 15 seconds
670
- }
671
- };
672
- const getWorkflowsTriggeredByRelease = async ()=>{
673
- const octokit = getOctokit();
674
- const { owner, repo } = await getRepoDetails();
675
- const logger = getLogger();
676
- try {
677
- logger.debug('Analyzing workflows to find those triggered by release events...');
678
- // Get all workflows
679
- const workflowsResponse = await octokit.actions.listRepoWorkflows({
680
- owner,
681
- repo
682
- });
683
- const releaseWorkflows = [];
684
- // Check each workflow's configuration
685
- for (const workflow of workflowsResponse.data.workflows){
686
- try {
687
- // Get the workflow file content
688
- const workflowPath = workflow.path;
689
- logger.debug(`Analyzing workflow: ${workflow.name} (${workflowPath})`);
690
- const contentResponse = await octokit.repos.getContent({
691
- owner,
692
- repo,
693
- path: workflowPath
694
- });
695
- // Handle the response - it could be a file or directory
696
- if ('content' in contentResponse.data && contentResponse.data.type === 'file') {
697
- // Decode the base64 content
698
- const content = Buffer.from(contentResponse.data.content, 'base64').toString('utf-8');
699
- // Parse the YAML to check trigger conditions
700
- if (isTriggeredByRelease(content, workflow.name)) {
701
- logger.debug(`✓ Workflow "${workflow.name}" will be triggered by release events`);
702
- releaseWorkflows.push(workflow.name);
703
- } else {
704
- logger.debug(`✗ Workflow "${workflow.name}" will not be triggered by release events`);
705
- }
706
- } else {
707
- logger.warn(`Could not read content for workflow ${workflow.name}`);
708
- }
709
- } catch (error) {
710
- logger.warn(`Failed to analyze workflow ${workflow.name}: ${error.message}`);
711
- }
712
- }
713
- logger.info(`Found ${releaseWorkflows.length} workflows that will be triggered by release events: ${releaseWorkflows.join(', ')}`);
714
- return releaseWorkflows;
715
- } catch (error) {
716
- logger.error(`Failed to analyze workflows: ${error.message}`);
717
- return [];
718
- }
719
- };
720
- const isTriggeredByRelease = (workflowContent, workflowName)=>{
721
- const logger = getLogger();
722
- try {
723
- // Simple regex-based parsing since we don't want to add a YAML dependency
724
- // Look for common release trigger patterns
725
- // Pattern 1: on.release (with or without types)
726
- // on:
727
- // release:
728
- // types: [published, created, ...]
729
- const releaseEventPattern = /(?:^|\n)\s*on\s*:\s*(?:\n|\r\n)(?:\s+[^\S\r\n]+)*(?:\s+release\s*:)/m;
730
- // Pattern 2: on: [push, release] or on: release
731
- const onReleasePattern = /(?:^|\n)\s*on\s*:\s*(?:\[.*release.*\]|release)\s*(?:\n|$)/m;
732
- // Pattern 3: push with tag patterns that look like releases
733
- // on:
734
- // push:
735
- // tags:
736
- // - 'v*'
737
- // - 'release/*'
738
- const tagPushPattern = /(?:^|\r?\n)[^\S\r\n]*on\s*:\s*\r?\n(?:[^\S\r\n]*[^\r\n]+(?:\r?\n))*?[^\S\r\n]*push\s*:\s*\r?\n(?:[^\S\r\n]*tags\s*:\s*(?:\r?\n|\[)[^\]\r\n]*(?:v\*|release|tag)[^\]\r\n]*)/mi;
739
- const isTriggered = releaseEventPattern.test(workflowContent) || onReleasePattern.test(workflowContent) || tagPushPattern.test(workflowContent);
740
- if (isTriggered) {
741
- logger.debug(`Workflow "${workflowName}" trigger patterns detected in content`);
742
- }
743
- return isTriggered;
744
- } catch (error) {
745
- logger.warn(`Failed to parse workflow content for ${workflowName}: ${error.message}`);
746
- return false;
747
- }
748
- };
749
- // Milestone Management Functions
750
- const findMilestoneByTitle = async (title)=>{
751
- const octokit = getOctokit();
752
- const { owner, repo } = await getRepoDetails();
753
- const logger = getLogger();
754
- try {
755
- logger.debug(`Searching for milestone: ${title}`);
756
- const response = await octokit.issues.listMilestones({
757
- owner,
758
- repo,
759
- state: 'all',
760
- per_page: 100
761
- });
762
- const milestone = response.data.find((m)=>m.title === title);
763
- if (milestone) {
764
- logger.debug(`Found milestone: ${milestone.title} (${milestone.state})`);
765
- } else {
766
- logger.debug(`Milestone not found: ${title}`);
767
- }
768
- return milestone || null;
769
- } catch (error) {
770
- logger.error(`Failed to search for milestone ${title}: ${error.message}`);
771
- throw error;
772
- }
773
- };
774
- const closeMilestone = async (milestoneNumber)=>{
775
- const octokit = getOctokit();
776
- const { owner, repo } = await getRepoDetails();
777
- const logger = getLogger();
778
- try {
779
- logger.info(`Closing milestone #${milestoneNumber}...`);
780
- await octokit.issues.updateMilestone({
781
- owner,
782
- repo,
783
- milestone_number: milestoneNumber,
784
- state: 'closed'
785
- });
786
- logger.info(`✅ Milestone #${milestoneNumber} closed`);
787
- } catch (error) {
788
- logger.error(`Failed to close milestone #${milestoneNumber}: ${error.message}`);
789
- throw error;
790
- }
791
- };
792
- const closeMilestoneForVersion = async (version)=>{
793
- const logger = getLogger();
794
- try {
795
- const milestoneTitle = `release/${version}`;
796
- logger.debug(`Closing milestone: ${milestoneTitle}`);
797
- const milestone = await findMilestoneByTitle(milestoneTitle);
798
- if (!milestone) {
799
- logger.debug(`Milestone not found: ${milestoneTitle}`);
800
- return;
801
- }
802
- if (milestone.state === 'closed') {
803
- logger.debug(`Milestone already closed: ${milestoneTitle}`);
804
- return;
805
- }
806
- await closeMilestone(milestone.number);
807
- logger.info(`🏁 Closed milestone: ${milestoneTitle}`);
808
- } catch (error) {
809
- // Don't fail the whole operation if milestone management fails
810
- logger.warn(`⚠️ Failed to close milestone (continuing): ${error.message}`);
811
- }
812
- };
813
- const getClosedIssuesForMilestone = async (milestoneNumber, limit = 50)=>{
814
- const octokit = getOctokit();
815
- const { owner, repo } = await getRepoDetails();
816
- const logger = getLogger();
817
- try {
818
- logger.debug(`Getting closed issues for milestone #${milestoneNumber}`);
819
- const response = await octokit.issues.listForRepo({
820
- owner,
821
- repo,
822
- state: 'closed',
823
- milestone: milestoneNumber.toString(),
824
- per_page: Math.min(limit, 100),
825
- sort: 'updated',
826
- direction: 'desc'
827
- });
828
- // Filter out PRs and only include issues closed as completed
829
- const issues = response.data.filter((issue)=>!issue.pull_request && issue.state_reason === 'completed');
830
- logger.debug(`Found ${issues.length} closed issues for milestone #${milestoneNumber}`);
831
- return issues;
832
- } catch (error) {
833
- logger.error(`Failed to get closed issues for milestone #${milestoneNumber}: ${error.message}`);
834
- throw error;
835
- }
836
- };
837
- const getIssueDetails = async (issueNumber, maxTokens = 20000)=>{
838
- const octokit = getOctokit();
839
- const { owner, repo } = await getRepoDetails();
840
- const logger = getLogger();
841
- try {
842
- logger.debug(`Getting details for issue #${issueNumber}`);
843
- // Get the issue
844
- const issueResponse = await octokit.issues.get({
845
- owner,
846
- repo,
847
- issue_number: issueNumber
848
- });
849
- const issue = issueResponse.data;
850
- const content = {
851
- title: issue.title,
852
- body: issue.body || '',
853
- comments: [],
854
- totalTokens: 0
855
- };
856
- // Estimate tokens (rough approximation: 1 token ≈ 4 characters)
857
- const estimateTokens = (text)=>Math.ceil(text.length / 4);
858
- let currentTokens = estimateTokens(content.title + content.body);
859
- content.totalTokens = currentTokens;
860
- // If we're already at or near the limit with just title and body, return now
861
- if (currentTokens >= maxTokens * 0.9) {
862
- logger.debug(`Issue #${issueNumber} title/body already uses ${currentTokens} tokens, skipping comments`);
863
- return content;
864
- }
865
- // Get comments
866
- try {
867
- const commentsResponse = await octokit.issues.listComments({
868
- owner,
869
- repo,
870
- issue_number: issueNumber,
871
- per_page: 100
872
- });
873
- for (const comment of commentsResponse.data){
874
- var _comment_user;
875
- const commentTokens = estimateTokens(comment.body || '');
876
- if (currentTokens + commentTokens > maxTokens) {
877
- logger.debug(`Stopping at comment to stay under ${maxTokens} token limit for issue #${issueNumber}`);
878
- break;
879
- }
880
- content.comments.push({
881
- author: (_comment_user = comment.user) === null || _comment_user === void 0 ? void 0 : _comment_user.login,
882
- body: comment.body,
883
- created_at: comment.created_at
884
- });
885
- currentTokens += commentTokens;
886
- }
887
- } catch (error) {
888
- logger.debug(`Failed to get comments for issue #${issueNumber}: ${error.message}`);
889
- }
890
- content.totalTokens = currentTokens;
891
- logger.debug(`Issue #${issueNumber} details: ${currentTokens} tokens`);
892
- return content;
893
- } catch (error) {
894
- logger.error(`Failed to get details for issue #${issueNumber}: ${error.message}`);
895
- throw error;
896
- }
897
- };
898
- const getMilestoneIssuesForRelease = async (versions, maxTotalTokens = 50000)=>{
899
- const logger = getLogger();
900
- try {
901
- const allIssues = [];
902
- const processedVersions = [];
903
- for (const version of versions){
904
- const milestoneTitle = `release/${version}`;
905
- logger.debug(`Looking for milestone: ${milestoneTitle}`);
906
- const milestone = await findMilestoneByTitle(milestoneTitle);
907
- if (!milestone) {
908
- logger.debug(`Milestone not found: ${milestoneTitle}`);
909
- continue;
910
- }
911
- const issues = await getClosedIssuesForMilestone(milestone.number);
912
- if (issues.length > 0) {
913
- allIssues.push(...issues.map((issue)=>({
914
- ...issue,
915
- version
916
- })));
917
- processedVersions.push(version);
918
- logger.info(`📋 Found ${issues.length} closed issues in milestone ${milestoneTitle}`);
919
- }
920
- }
921
- if (allIssues.length === 0) {
922
- logger.debug('No closed issues found in any milestones');
923
- return '';
924
- }
925
- // Sort issues by updated date (most recent first)
926
- allIssues.sort((a, b)=>new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime());
927
- logger.info(`📋 Processing ${allIssues.length} issues for release notes (max ${maxTotalTokens} tokens)`);
928
- let releaseNotesContent = '';
929
- let totalTokens = 0;
930
- const estimateTokens = (text)=>Math.ceil(text.length / 4);
931
- // Add header
932
- const header = `## Issues Resolved\n\nThe following issues were resolved in this release:\n\n`;
933
- releaseNotesContent += header;
934
- totalTokens += estimateTokens(header);
935
- for (const issue of allIssues){
936
- // Get detailed issue content with individual token limit
937
- const issueDetails = await getIssueDetails(issue.number, 20000);
938
- // Create issue section
939
- let issueSection = `### #${issue.number}: ${issueDetails.title}\n\n`;
940
- if (issueDetails.body) {
941
- issueSection += `**Description:**\n${issueDetails.body}\n\n`;
942
- }
943
- if (issueDetails.comments.length > 0) {
944
- issueSection += `**Key Discussion Points:**\n`;
945
- for (const comment of issueDetails.comments){
946
- issueSection += `- **${comment.author}**: ${comment.body}\n`;
947
- }
948
- issueSection += '\n';
949
- }
950
- // Add labels if present
951
- if (issue.labels && issue.labels.length > 0) {
952
- const labelNames = issue.labels.map((label)=>typeof label === 'string' ? label : label.name).join(', ');
953
- issueSection += `**Labels:** ${labelNames}\n\n`;
954
- }
955
- issueSection += '---\n\n';
956
- const sectionTokens = estimateTokens(issueSection);
957
- // Check if adding this issue would exceed the total limit
958
- if (totalTokens + sectionTokens > maxTotalTokens) {
959
- logger.info(`Stopping at issue #${issue.number} to stay under ${maxTotalTokens} token limit`);
960
- break;
961
- }
962
- releaseNotesContent += issueSection;
963
- totalTokens += sectionTokens;
964
- logger.debug(`Added issue #${issue.number} (${sectionTokens} tokens, total: ${totalTokens})`);
965
- }
966
- logger.info(`📋 Generated release notes from milestone issues (${totalTokens} tokens)`);
967
- return releaseNotesContent;
968
- } catch (error) {
969
- // Don't fail the whole operation if milestone content fails
970
- logger.warn(`⚠️ Failed to get milestone issues for release notes (continuing): ${error.message}`);
971
- return '';
972
- }
973
- };
974
- /**
975
- * Get recently closed GitHub issues for commit message context.
976
- * Prioritizes issues from milestones that match the current version.
977
- */ const getRecentClosedIssuesForCommit = async (currentVersion, limit = 10)=>{
978
- const octokit = getOctokit();
979
- const { owner, repo } = await getRepoDetails();
980
- const logger = getLogger();
981
- try {
982
- logger.debug(`Fetching up to ${limit} recently closed GitHub issues for commit context...`);
983
- // Get recently closed issues
984
- const response = await octokit.issues.listForRepo({
985
- owner,
986
- repo,
987
- state: 'closed',
988
- per_page: Math.min(limit, 100),
989
- sort: 'updated',
990
- direction: 'desc'
991
- });
992
- const issues = response.data.filter((issue)=>!issue.pull_request && // Filter out PRs
993
- issue.state_reason === 'completed' // Only issues closed as completed
994
- );
995
- if (issues.length === 0) {
996
- logger.debug('No recently closed issues found');
997
- return '';
998
- }
999
- // Determine relevant milestone if we have a current version
1000
- let relevantMilestone = null;
1001
- if (currentVersion) {
1002
- // Extract base version for milestone matching (e.g., "0.1.1" from "0.1.1-dev.0")
1003
- const baseVersion = currentVersion.includes('-dev.') ? currentVersion.split('-')[0] : currentVersion;
1004
- const milestoneTitle = `release/${baseVersion}`;
1005
- relevantMilestone = await findMilestoneByTitle(milestoneTitle);
1006
- if (relevantMilestone) {
1007
- logger.debug(`Found relevant milestone: ${milestoneTitle}`);
1008
- } else {
1009
- logger.debug(`No milestone found for version: ${baseVersion}`);
1010
- }
1011
- }
1012
- // Categorize issues by relevance
1013
- const milestoneIssues = [];
1014
- const otherIssues = [];
1015
- for (const issue of issues.slice(0, limit)){
1016
- var _issue_milestone;
1017
- if (relevantMilestone && ((_issue_milestone = issue.milestone) === null || _issue_milestone === void 0 ? void 0 : _issue_milestone.number) === relevantMilestone.number) {
1018
- milestoneIssues.push(issue);
1019
- } else {
1020
- otherIssues.push(issue);
1021
- }
1022
- }
1023
- // Build the content, prioritizing milestone issues
1024
- const issueStrings = [];
1025
- // Add milestone issues first (these are most relevant)
1026
- if (milestoneIssues.length > 0) {
1027
- issueStrings.push(`## Recent Issues from Current Milestone (${relevantMilestone.title}):`);
1028
- milestoneIssues.forEach((issue)=>{
1029
- var _issue_body;
1030
- const labels = issue.labels.map((label)=>typeof label === 'string' ? label : label.name).join(', ');
1031
- issueStrings.push([
1032
- `Issue #${issue.number}: ${issue.title}`,
1033
- `Labels: ${labels || 'none'}`,
1034
- `Closed: ${issue.closed_at}`,
1035
- `Body: ${((_issue_body = issue.body) === null || _issue_body === void 0 ? void 0 : _issue_body.substring(0, 300)) || 'No description'}${issue.body && issue.body.length > 300 ? '...' : ''}`,
1036
- '---'
1037
- ].join('\n'));
1038
- });
1039
- }
1040
- // Add other recent issues if we have space
1041
- const remainingLimit = limit - milestoneIssues.length;
1042
- if (otherIssues.length > 0 && remainingLimit > 0) {
1043
- if (milestoneIssues.length > 0) {
1044
- issueStrings.push('\n## Other Recent Closed Issues:');
1045
- }
1046
- otherIssues.slice(0, remainingLimit).forEach((issue)=>{
1047
- var _issue_body;
1048
- const labels = issue.labels.map((label)=>typeof label === 'string' ? label : label.name).join(', ');
1049
- const milestoneInfo = issue.milestone ? `Milestone: ${issue.milestone.title}` : 'Milestone: none';
1050
- issueStrings.push([
1051
- `Issue #${issue.number}: ${issue.title}`,
1052
- `Labels: ${labels || 'none'}`,
1053
- milestoneInfo,
1054
- `Closed: ${issue.closed_at}`,
1055
- `Body: ${((_issue_body = issue.body) === null || _issue_body === void 0 ? void 0 : _issue_body.substring(0, 300)) || 'No description'}${issue.body && issue.body.length > 300 ? '...' : ''}`,
1056
- '---'
1057
- ].join('\n'));
1058
- });
1059
- }
1060
- const totalRelevantIssues = milestoneIssues.length;
1061
- const totalOtherIssues = Math.min(otherIssues.length, remainingLimit);
1062
- logger.debug(`Fetched ${totalRelevantIssues + totalOtherIssues} closed issues (${totalRelevantIssues} from relevant milestone, ${totalOtherIssues} others)`);
1063
- return issueStrings.join('\n\n');
1064
- } catch (error) {
1065
- logger.warn('Failed to fetch recent closed GitHub issues: %s', error.message);
1066
- return '';
1067
- }
1068
- };
1069
-
1070
- export { closeMilestone, closeMilestoneForVersion, createIssue, createPullRequest, createRelease, findMilestoneByTitle, findOpenPullRequestByHeadRef, getClosedIssuesForMilestone, getCurrentBranchName, getIssueDetails, getMilestoneIssuesForRelease, getOctokit, getOpenIssues, getRecentClosedIssuesForCommit, getReleaseByTagName, getRepoDetails, getWorkflowRunsTriggeredByRelease, getWorkflowsTriggeredByRelease, mergePullRequest, waitForPullRequestChecks, waitForReleaseWorkflows };
1071
- //# sourceMappingURL=github.js.map