@eldrforge/kodrdriv 0.1.0 → 1.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -0
- package/dist/application.js +25 -3
- package/dist/application.js.map +1 -1
- package/dist/arguments.js +103 -18
- package/dist/arguments.js.map +1 -1
- package/dist/commands/audio-commit.js +28 -7
- package/dist/commands/audio-commit.js.map +1 -1
- package/dist/commands/audio-review.js +28 -7
- package/dist/commands/audio-review.js.map +1 -1
- package/dist/commands/commit.js +75 -18
- package/dist/commands/commit.js.map +1 -1
- package/dist/commands/development.js +264 -0
- package/dist/commands/development.js.map +1 -0
- package/dist/commands/link.js +356 -181
- package/dist/commands/link.js.map +1 -1
- package/dist/commands/publish.js +166 -32
- package/dist/commands/publish.js.map +1 -1
- package/dist/commands/release.js +78 -13
- package/dist/commands/release.js.map +1 -1
- package/dist/commands/review.js +10 -6
- package/dist/commands/review.js.map +1 -1
- package/dist/commands/tree.js +450 -24
- package/dist/commands/tree.js.map +1 -1
- package/dist/commands/unlink.js +267 -372
- package/dist/commands/unlink.js.map +1 -1
- package/dist/commands/versions.js +224 -0
- package/dist/commands/versions.js.map +1 -0
- package/dist/constants.js +29 -10
- package/dist/constants.js.map +1 -1
- package/dist/content/diff.js.map +1 -1
- package/dist/content/files.js +192 -0
- package/dist/content/files.js.map +1 -0
- package/dist/content/log.js +16 -0
- package/dist/content/log.js.map +1 -1
- package/dist/main.js +0 -0
- package/dist/prompt/commit.js +9 -2
- package/dist/prompt/commit.js.map +1 -1
- package/dist/prompt/instructions/commit.md +20 -2
- package/dist/prompt/instructions/release.md +27 -10
- package/dist/prompt/instructions/review.md +75 -8
- package/dist/prompt/release.js +13 -5
- package/dist/prompt/release.js.map +1 -1
- package/dist/types.js +21 -5
- package/dist/types.js.map +1 -1
- package/dist/util/child.js +112 -26
- package/dist/util/child.js.map +1 -1
- package/dist/util/countdown.js +215 -0
- package/dist/util/countdown.js.map +1 -0
- package/dist/util/general.js +31 -7
- package/dist/util/general.js.map +1 -1
- package/dist/util/git.js +587 -0
- package/dist/util/git.js.map +1 -0
- package/dist/util/github.js +519 -3
- package/dist/util/github.js.map +1 -1
- package/dist/util/interactive.js +245 -79
- package/dist/util/interactive.js.map +1 -1
- package/dist/util/openai.js +70 -22
- package/dist/util/openai.js.map +1 -1
- package/dist/util/performance.js +1 -69
- package/dist/util/performance.js.map +1 -1
- package/dist/util/storage.js +28 -1
- package/dist/util/storage.js.map +1 -1
- package/dist/util/validation.js +1 -25
- package/dist/util/validation.js.map +1 -1
- package/package.json +10 -8
- package/test-multiline/cli/package.json +8 -0
- package/test-multiline/core/package.json +5 -0
- package/test-multiline/mobile/package.json +8 -0
- package/test-multiline/web/package.json +8 -0
- package/dist/util/npmOptimizations.js +0 -174
- package/dist/util/npmOptimizations.js.map +0 -1
package/dist/util/github.js
CHANGED
|
@@ -84,6 +84,55 @@ const hasWorkflowsConfigured = async ()=>{
|
|
|
84
84
|
return true;
|
|
85
85
|
}
|
|
86
86
|
};
|
|
87
|
+
/**
|
|
88
|
+
* Check if any workflow runs have been triggered for a specific PR
|
|
89
|
+
* This is more specific than hasWorkflowsConfigured as it checks for actual runs
|
|
90
|
+
*/ const hasWorkflowRunsForPR = async (prNumber)=>{
|
|
91
|
+
const octokit = getOctokit();
|
|
92
|
+
const { owner, repo } = await getRepoDetails();
|
|
93
|
+
const logger = getLogger();
|
|
94
|
+
try {
|
|
95
|
+
// Get the PR to find the head SHA
|
|
96
|
+
const pr = await octokit.pulls.get({
|
|
97
|
+
owner,
|
|
98
|
+
repo,
|
|
99
|
+
pull_number: prNumber
|
|
100
|
+
});
|
|
101
|
+
const headSha = pr.data.head.sha;
|
|
102
|
+
const headRef = pr.data.head.ref;
|
|
103
|
+
// Check for workflow runs triggered by this PR
|
|
104
|
+
const workflowRuns = await octokit.actions.listWorkflowRunsForRepo({
|
|
105
|
+
owner,
|
|
106
|
+
repo,
|
|
107
|
+
head_sha: headSha,
|
|
108
|
+
per_page: 50
|
|
109
|
+
});
|
|
110
|
+
// Also check for runs on the branch
|
|
111
|
+
const branchRuns = await octokit.actions.listWorkflowRunsForRepo({
|
|
112
|
+
owner,
|
|
113
|
+
repo,
|
|
114
|
+
branch: headRef,
|
|
115
|
+
per_page: 50
|
|
116
|
+
});
|
|
117
|
+
const allRuns = [
|
|
118
|
+
...workflowRuns.data.workflow_runs,
|
|
119
|
+
...branchRuns.data.workflow_runs
|
|
120
|
+
];
|
|
121
|
+
// Filter to runs that match our PR's head SHA or are very recent on the branch
|
|
122
|
+
const relevantRuns = allRuns.filter((run)=>run.head_sha === headSha || run.head_branch === headRef && new Date(run.created_at).getTime() > Date.now() - 300000 // Last 5 minutes
|
|
123
|
+
);
|
|
124
|
+
if (relevantRuns.length > 0) {
|
|
125
|
+
logger.debug(`Found ${relevantRuns.length} workflow runs for PR #${prNumber} (SHA: ${headSha})`);
|
|
126
|
+
return true;
|
|
127
|
+
}
|
|
128
|
+
logger.debug(`No workflow runs found for PR #${prNumber} (SHA: ${headSha}, branch: ${headRef})`);
|
|
129
|
+
return false;
|
|
130
|
+
} catch (error) {
|
|
131
|
+
logger.debug(`Error checking workflow runs for PR #${prNumber}: ${error.message}`);
|
|
132
|
+
// If we can't check workflow runs, assume they might exist
|
|
133
|
+
return true;
|
|
134
|
+
}
|
|
135
|
+
};
|
|
87
136
|
const waitForPullRequestChecks = async (prNumber, options = {})=>{
|
|
88
137
|
const octokit = getOctokit();
|
|
89
138
|
const { owner, repo } = await getRepoDetails();
|
|
@@ -93,6 +142,7 @@ const waitForPullRequestChecks = async (prNumber, options = {})=>{
|
|
|
93
142
|
const startTime = Date.now();
|
|
94
143
|
let consecutiveNoChecksCount = 0;
|
|
95
144
|
const maxConsecutiveNoChecks = 6; // 6 consecutive checks (1 minute) with no checks before asking user
|
|
145
|
+
let checkedWorkflowRuns = false; // Track if we've already checked for workflow runs to avoid repeated checks
|
|
96
146
|
while(true){
|
|
97
147
|
const elapsedTime = Date.now() - startTime;
|
|
98
148
|
// Check for timeout
|
|
@@ -144,8 +194,48 @@ const waitForPullRequestChecks = async (prNumber, options = {})=>{
|
|
|
144
194
|
return;
|
|
145
195
|
}
|
|
146
196
|
} else {
|
|
147
|
-
|
|
148
|
-
|
|
197
|
+
// Workflows exist, but check if any are actually running for this PR
|
|
198
|
+
if (!checkedWorkflowRuns) {
|
|
199
|
+
logger.info('GitHub Actions workflows are configured. Checking if any workflows are triggered for this PR...');
|
|
200
|
+
const hasRunsForPR = await hasWorkflowRunsForPR(prNumber);
|
|
201
|
+
checkedWorkflowRuns = true; // Mark that we've checked
|
|
202
|
+
if (!hasRunsForPR) {
|
|
203
|
+
logger.warn(`No workflow runs detected for PR #${prNumber}. This may indicate that the configured workflows don't match this branch pattern.`);
|
|
204
|
+
if (!skipUserConfirmation) {
|
|
205
|
+
const proceedWithoutChecks = await promptConfirmation(`⚠️ GitHub Actions workflows are configured in this repository, but none appear to be triggered by PR #${prNumber}.\n` + `This usually means the workflow trigger patterns (branches, paths) don't match this PR.\n` + `PR #${prNumber} will likely never have status checks to wait for.\n` + `Do you want to proceed with merging the PR without waiting for checks?`);
|
|
206
|
+
if (proceedWithoutChecks) {
|
|
207
|
+
logger.info('User chose to proceed without checks (no matching workflow triggers).');
|
|
208
|
+
return;
|
|
209
|
+
} else {
|
|
210
|
+
throw new Error(`No matching workflow triggers for PR #${prNumber}. User chose not to proceed.`);
|
|
211
|
+
}
|
|
212
|
+
} else {
|
|
213
|
+
// In non-interactive mode, proceed if no workflow runs are detected
|
|
214
|
+
logger.info('No workflow runs detected for this PR, proceeding without checks.');
|
|
215
|
+
return;
|
|
216
|
+
}
|
|
217
|
+
} else {
|
|
218
|
+
logger.info('Workflow runs detected for this PR. Continuing to wait for checks...');
|
|
219
|
+
consecutiveNoChecksCount = 0; // Reset counter since workflow runs exist
|
|
220
|
+
}
|
|
221
|
+
} else {
|
|
222
|
+
// We've already checked workflow runs and found none that match this PR
|
|
223
|
+
// At this point, we should give up to avoid infinite loops
|
|
224
|
+
logger.warn(`Still no checks after ${consecutiveNoChecksCount} attempts. No workflow runs match this PR.`);
|
|
225
|
+
if (!skipUserConfirmation) {
|
|
226
|
+
const proceedWithoutChecks = await promptConfirmation(`⚠️ After waiting ${Math.round(elapsedTime / 1000)}s, no checks have appeared for PR #${prNumber}.\n` + `The configured workflows don't appear to trigger for this branch.\n` + `Do you want to proceed with merging the PR without checks?`);
|
|
227
|
+
if (proceedWithoutChecks) {
|
|
228
|
+
logger.info('User chose to proceed without checks (timeout waiting for workflow triggers).');
|
|
229
|
+
return;
|
|
230
|
+
} else {
|
|
231
|
+
throw new Error(`No workflow triggers matched PR #${prNumber} after waiting. User chose not to proceed.`);
|
|
232
|
+
}
|
|
233
|
+
} else {
|
|
234
|
+
// In non-interactive mode, proceed after reasonable waiting
|
|
235
|
+
logger.info('No workflow runs detected after waiting, proceeding without checks.');
|
|
236
|
+
return;
|
|
237
|
+
}
|
|
238
|
+
}
|
|
149
239
|
}
|
|
150
240
|
}
|
|
151
241
|
await delay(10000);
|
|
@@ -634,6 +724,432 @@ const isTriggeredByRelease = (workflowContent, workflowName)=>{
|
|
|
634
724
|
return false;
|
|
635
725
|
}
|
|
636
726
|
};
|
|
727
|
+
// Milestone Management Functions
|
|
728
|
+
const findMilestoneByTitle = async (title)=>{
|
|
729
|
+
const octokit = getOctokit();
|
|
730
|
+
const { owner, repo } = await getRepoDetails();
|
|
731
|
+
const logger = getLogger();
|
|
732
|
+
try {
|
|
733
|
+
logger.debug(`Searching for milestone: ${title}`);
|
|
734
|
+
const response = await octokit.issues.listMilestones({
|
|
735
|
+
owner,
|
|
736
|
+
repo,
|
|
737
|
+
state: 'all',
|
|
738
|
+
per_page: 100
|
|
739
|
+
});
|
|
740
|
+
const milestone = response.data.find((m)=>m.title === title);
|
|
741
|
+
if (milestone) {
|
|
742
|
+
logger.debug(`Found milestone: ${milestone.title} (${milestone.state})`);
|
|
743
|
+
} else {
|
|
744
|
+
logger.debug(`Milestone not found: ${title}`);
|
|
745
|
+
}
|
|
746
|
+
return milestone || null;
|
|
747
|
+
} catch (error) {
|
|
748
|
+
logger.error(`Failed to search for milestone ${title}: ${error.message}`);
|
|
749
|
+
throw error;
|
|
750
|
+
}
|
|
751
|
+
};
|
|
752
|
+
const createMilestone = async (title, description)=>{
|
|
753
|
+
const octokit = getOctokit();
|
|
754
|
+
const { owner, repo } = await getRepoDetails();
|
|
755
|
+
const logger = getLogger();
|
|
756
|
+
try {
|
|
757
|
+
logger.info(`Creating milestone: ${title}`);
|
|
758
|
+
const response = await octokit.issues.createMilestone({
|
|
759
|
+
owner,
|
|
760
|
+
repo,
|
|
761
|
+
title,
|
|
762
|
+
description
|
|
763
|
+
});
|
|
764
|
+
logger.info(`✅ Milestone created: ${title} (#${response.data.number})`);
|
|
765
|
+
return response.data;
|
|
766
|
+
} catch (error) {
|
|
767
|
+
logger.error(`Failed to create milestone ${title}: ${error.message}`);
|
|
768
|
+
throw error;
|
|
769
|
+
}
|
|
770
|
+
};
|
|
771
|
+
const closeMilestone = async (milestoneNumber)=>{
|
|
772
|
+
const octokit = getOctokit();
|
|
773
|
+
const { owner, repo } = await getRepoDetails();
|
|
774
|
+
const logger = getLogger();
|
|
775
|
+
try {
|
|
776
|
+
logger.info(`Closing milestone #${milestoneNumber}...`);
|
|
777
|
+
await octokit.issues.updateMilestone({
|
|
778
|
+
owner,
|
|
779
|
+
repo,
|
|
780
|
+
milestone_number: milestoneNumber,
|
|
781
|
+
state: 'closed'
|
|
782
|
+
});
|
|
783
|
+
logger.info(`✅ Milestone #${milestoneNumber} closed`);
|
|
784
|
+
} catch (error) {
|
|
785
|
+
logger.error(`Failed to close milestone #${milestoneNumber}: ${error.message}`);
|
|
786
|
+
throw error;
|
|
787
|
+
}
|
|
788
|
+
};
|
|
789
|
+
const getOpenIssuesForMilestone = async (milestoneNumber)=>{
|
|
790
|
+
const octokit = getOctokit();
|
|
791
|
+
const { owner, repo } = await getRepoDetails();
|
|
792
|
+
const logger = getLogger();
|
|
793
|
+
try {
|
|
794
|
+
logger.debug(`Getting open issues for milestone #${milestoneNumber}`);
|
|
795
|
+
const response = await octokit.issues.listForRepo({
|
|
796
|
+
owner,
|
|
797
|
+
repo,
|
|
798
|
+
state: 'open',
|
|
799
|
+
milestone: milestoneNumber.toString(),
|
|
800
|
+
per_page: 100
|
|
801
|
+
});
|
|
802
|
+
const issues = response.data.filter((issue)=>!issue.pull_request); // Filter out PRs
|
|
803
|
+
logger.debug(`Found ${issues.length} open issues for milestone #${milestoneNumber}`);
|
|
804
|
+
return issues;
|
|
805
|
+
} catch (error) {
|
|
806
|
+
logger.error(`Failed to get issues for milestone #${milestoneNumber}: ${error.message}`);
|
|
807
|
+
throw error;
|
|
808
|
+
}
|
|
809
|
+
};
|
|
810
|
+
const moveIssueToMilestone = async (issueNumber, milestoneNumber)=>{
|
|
811
|
+
const octokit = getOctokit();
|
|
812
|
+
const { owner, repo } = await getRepoDetails();
|
|
813
|
+
const logger = getLogger();
|
|
814
|
+
try {
|
|
815
|
+
logger.debug(`Moving issue #${issueNumber} to milestone #${milestoneNumber}`);
|
|
816
|
+
await octokit.issues.update({
|
|
817
|
+
owner,
|
|
818
|
+
repo,
|
|
819
|
+
issue_number: issueNumber,
|
|
820
|
+
milestone: milestoneNumber
|
|
821
|
+
});
|
|
822
|
+
logger.debug(`✅ Issue #${issueNumber} moved to milestone #${milestoneNumber}`);
|
|
823
|
+
} catch (error) {
|
|
824
|
+
logger.error(`Failed to move issue #${issueNumber} to milestone #${milestoneNumber}: ${error.message}`);
|
|
825
|
+
throw error;
|
|
826
|
+
}
|
|
827
|
+
};
|
|
828
|
+
const moveOpenIssuesToNewMilestone = async (fromMilestoneNumber, toMilestoneNumber)=>{
|
|
829
|
+
const logger = getLogger();
|
|
830
|
+
try {
|
|
831
|
+
const openIssues = await getOpenIssuesForMilestone(fromMilestoneNumber);
|
|
832
|
+
if (openIssues.length === 0) {
|
|
833
|
+
logger.debug(`No open issues to move from milestone #${fromMilestoneNumber}`);
|
|
834
|
+
return 0;
|
|
835
|
+
}
|
|
836
|
+
logger.info(`Moving ${openIssues.length} open issues from milestone #${fromMilestoneNumber} to #${toMilestoneNumber}`);
|
|
837
|
+
for (const issue of openIssues){
|
|
838
|
+
await moveIssueToMilestone(issue.number, toMilestoneNumber);
|
|
839
|
+
}
|
|
840
|
+
logger.info(`✅ Moved ${openIssues.length} issues to new milestone`);
|
|
841
|
+
return openIssues.length;
|
|
842
|
+
} catch (error) {
|
|
843
|
+
logger.error(`Failed to move issues between milestones: ${error.message}`);
|
|
844
|
+
throw error;
|
|
845
|
+
}
|
|
846
|
+
};
|
|
847
|
+
const ensureMilestoneForVersion = async (version, fromVersion)=>{
|
|
848
|
+
const logger = getLogger();
|
|
849
|
+
try {
|
|
850
|
+
const milestoneTitle = `release/${version}`;
|
|
851
|
+
logger.debug(`Ensuring milestone exists: ${milestoneTitle}`);
|
|
852
|
+
// Check if milestone already exists
|
|
853
|
+
let milestone = await findMilestoneByTitle(milestoneTitle);
|
|
854
|
+
if (milestone) {
|
|
855
|
+
logger.info(`✅ Milestone already exists: ${milestoneTitle}`);
|
|
856
|
+
return;
|
|
857
|
+
}
|
|
858
|
+
// Create new milestone
|
|
859
|
+
milestone = await createMilestone(milestoneTitle, `Release ${version}`);
|
|
860
|
+
// If we have a previous version, move open issues from its milestone
|
|
861
|
+
if (fromVersion) {
|
|
862
|
+
const previousMilestoneTitle = `release/${fromVersion}`;
|
|
863
|
+
const previousMilestone = await findMilestoneByTitle(previousMilestoneTitle);
|
|
864
|
+
if (previousMilestone && previousMilestone.state === 'closed') {
|
|
865
|
+
const movedCount = await moveOpenIssuesToNewMilestone(previousMilestone.number, milestone.number);
|
|
866
|
+
if (movedCount > 0) {
|
|
867
|
+
logger.info(`📋 Moved ${movedCount} open issues from ${previousMilestoneTitle} to ${milestoneTitle}`);
|
|
868
|
+
}
|
|
869
|
+
}
|
|
870
|
+
}
|
|
871
|
+
} catch (error) {
|
|
872
|
+
// Don't fail the whole operation if milestone management fails
|
|
873
|
+
logger.warn(`⚠️ Milestone management failed (continuing): ${error.message}`);
|
|
874
|
+
}
|
|
875
|
+
};
|
|
876
|
+
const closeMilestoneForVersion = async (version)=>{
|
|
877
|
+
const logger = getLogger();
|
|
878
|
+
try {
|
|
879
|
+
const milestoneTitle = `release/${version}`;
|
|
880
|
+
logger.debug(`Closing milestone: ${milestoneTitle}`);
|
|
881
|
+
const milestone = await findMilestoneByTitle(milestoneTitle);
|
|
882
|
+
if (!milestone) {
|
|
883
|
+
logger.debug(`Milestone not found: ${milestoneTitle}`);
|
|
884
|
+
return;
|
|
885
|
+
}
|
|
886
|
+
if (milestone.state === 'closed') {
|
|
887
|
+
logger.debug(`Milestone already closed: ${milestoneTitle}`);
|
|
888
|
+
return;
|
|
889
|
+
}
|
|
890
|
+
await closeMilestone(milestone.number);
|
|
891
|
+
logger.info(`🏁 Closed milestone: ${milestoneTitle}`);
|
|
892
|
+
} catch (error) {
|
|
893
|
+
// Don't fail the whole operation if milestone management fails
|
|
894
|
+
logger.warn(`⚠️ Failed to close milestone (continuing): ${error.message}`);
|
|
895
|
+
}
|
|
896
|
+
};
|
|
897
|
+
const getClosedIssuesForMilestone = async (milestoneNumber, limit = 50)=>{
|
|
898
|
+
const octokit = getOctokit();
|
|
899
|
+
const { owner, repo } = await getRepoDetails();
|
|
900
|
+
const logger = getLogger();
|
|
901
|
+
try {
|
|
902
|
+
logger.debug(`Getting closed issues for milestone #${milestoneNumber}`);
|
|
903
|
+
const response = await octokit.issues.listForRepo({
|
|
904
|
+
owner,
|
|
905
|
+
repo,
|
|
906
|
+
state: 'closed',
|
|
907
|
+
milestone: milestoneNumber.toString(),
|
|
908
|
+
per_page: Math.min(limit, 100),
|
|
909
|
+
sort: 'updated',
|
|
910
|
+
direction: 'desc'
|
|
911
|
+
});
|
|
912
|
+
// Filter out PRs and only include issues closed as completed
|
|
913
|
+
const issues = response.data.filter((issue)=>!issue.pull_request && issue.state_reason === 'completed');
|
|
914
|
+
logger.debug(`Found ${issues.length} closed issues for milestone #${milestoneNumber}`);
|
|
915
|
+
return issues;
|
|
916
|
+
} catch (error) {
|
|
917
|
+
logger.error(`Failed to get closed issues for milestone #${milestoneNumber}: ${error.message}`);
|
|
918
|
+
throw error;
|
|
919
|
+
}
|
|
920
|
+
};
|
|
921
|
+
const getIssueDetails = async (issueNumber, maxTokens = 20000)=>{
|
|
922
|
+
const octokit = getOctokit();
|
|
923
|
+
const { owner, repo } = await getRepoDetails();
|
|
924
|
+
const logger = getLogger();
|
|
925
|
+
try {
|
|
926
|
+
logger.debug(`Getting details for issue #${issueNumber}`);
|
|
927
|
+
// Get the issue
|
|
928
|
+
const issueResponse = await octokit.issues.get({
|
|
929
|
+
owner,
|
|
930
|
+
repo,
|
|
931
|
+
issue_number: issueNumber
|
|
932
|
+
});
|
|
933
|
+
const issue = issueResponse.data;
|
|
934
|
+
const content = {
|
|
935
|
+
title: issue.title,
|
|
936
|
+
body: issue.body || '',
|
|
937
|
+
comments: [],
|
|
938
|
+
totalTokens: 0
|
|
939
|
+
};
|
|
940
|
+
// Estimate tokens (rough approximation: 1 token ≈ 4 characters)
|
|
941
|
+
const estimateTokens = (text)=>Math.ceil(text.length / 4);
|
|
942
|
+
let currentTokens = estimateTokens(content.title + content.body);
|
|
943
|
+
content.totalTokens = currentTokens;
|
|
944
|
+
// If we're already at or near the limit with just title and body, return now
|
|
945
|
+
if (currentTokens >= maxTokens * 0.9) {
|
|
946
|
+
logger.debug(`Issue #${issueNumber} title/body already uses ${currentTokens} tokens, skipping comments`);
|
|
947
|
+
return content;
|
|
948
|
+
}
|
|
949
|
+
// Get comments
|
|
950
|
+
try {
|
|
951
|
+
const commentsResponse = await octokit.issues.listComments({
|
|
952
|
+
owner,
|
|
953
|
+
repo,
|
|
954
|
+
issue_number: issueNumber,
|
|
955
|
+
per_page: 100
|
|
956
|
+
});
|
|
957
|
+
for (const comment of commentsResponse.data){
|
|
958
|
+
var _comment_user;
|
|
959
|
+
const commentTokens = estimateTokens(comment.body || '');
|
|
960
|
+
if (currentTokens + commentTokens > maxTokens) {
|
|
961
|
+
logger.debug(`Stopping at comment to stay under ${maxTokens} token limit for issue #${issueNumber}`);
|
|
962
|
+
break;
|
|
963
|
+
}
|
|
964
|
+
content.comments.push({
|
|
965
|
+
author: (_comment_user = comment.user) === null || _comment_user === void 0 ? void 0 : _comment_user.login,
|
|
966
|
+
body: comment.body,
|
|
967
|
+
created_at: comment.created_at
|
|
968
|
+
});
|
|
969
|
+
currentTokens += commentTokens;
|
|
970
|
+
}
|
|
971
|
+
} catch (error) {
|
|
972
|
+
logger.debug(`Failed to get comments for issue #${issueNumber}: ${error.message}`);
|
|
973
|
+
}
|
|
974
|
+
content.totalTokens = currentTokens;
|
|
975
|
+
logger.debug(`Issue #${issueNumber} details: ${currentTokens} tokens`);
|
|
976
|
+
return content;
|
|
977
|
+
} catch (error) {
|
|
978
|
+
logger.error(`Failed to get details for issue #${issueNumber}: ${error.message}`);
|
|
979
|
+
throw error;
|
|
980
|
+
}
|
|
981
|
+
};
|
|
982
|
+
const getMilestoneIssuesForRelease = async (versions, maxTotalTokens = 50000)=>{
|
|
983
|
+
const logger = getLogger();
|
|
984
|
+
try {
|
|
985
|
+
const allIssues = [];
|
|
986
|
+
const processedVersions = [];
|
|
987
|
+
for (const version of versions){
|
|
988
|
+
const milestoneTitle = `release/${version}`;
|
|
989
|
+
logger.debug(`Looking for milestone: ${milestoneTitle}`);
|
|
990
|
+
const milestone = await findMilestoneByTitle(milestoneTitle);
|
|
991
|
+
if (!milestone) {
|
|
992
|
+
logger.debug(`Milestone not found: ${milestoneTitle}`);
|
|
993
|
+
continue;
|
|
994
|
+
}
|
|
995
|
+
const issues = await getClosedIssuesForMilestone(milestone.number);
|
|
996
|
+
if (issues.length > 0) {
|
|
997
|
+
allIssues.push(...issues.map((issue)=>({
|
|
998
|
+
...issue,
|
|
999
|
+
version
|
|
1000
|
+
})));
|
|
1001
|
+
processedVersions.push(version);
|
|
1002
|
+
logger.info(`📋 Found ${issues.length} closed issues in milestone ${milestoneTitle}`);
|
|
1003
|
+
}
|
|
1004
|
+
}
|
|
1005
|
+
if (allIssues.length === 0) {
|
|
1006
|
+
logger.debug('No closed issues found in any milestones');
|
|
1007
|
+
return '';
|
|
1008
|
+
}
|
|
1009
|
+
// Sort issues by updated date (most recent first)
|
|
1010
|
+
allIssues.sort((a, b)=>new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime());
|
|
1011
|
+
logger.info(`📋 Processing ${allIssues.length} issues for release notes (max ${maxTotalTokens} tokens)`);
|
|
1012
|
+
let releaseNotesContent = '';
|
|
1013
|
+
let totalTokens = 0;
|
|
1014
|
+
const estimateTokens = (text)=>Math.ceil(text.length / 4);
|
|
1015
|
+
// Add header
|
|
1016
|
+
const header = `## Issues Resolved\n\nThe following issues were resolved in this release:\n\n`;
|
|
1017
|
+
releaseNotesContent += header;
|
|
1018
|
+
totalTokens += estimateTokens(header);
|
|
1019
|
+
for (const issue of allIssues){
|
|
1020
|
+
// Get detailed issue content with individual token limit
|
|
1021
|
+
const issueDetails = await getIssueDetails(issue.number, 20000);
|
|
1022
|
+
// Create issue section
|
|
1023
|
+
let issueSection = `### #${issue.number}: ${issueDetails.title}\n\n`;
|
|
1024
|
+
if (issueDetails.body) {
|
|
1025
|
+
issueSection += `**Description:**\n${issueDetails.body}\n\n`;
|
|
1026
|
+
}
|
|
1027
|
+
if (issueDetails.comments.length > 0) {
|
|
1028
|
+
issueSection += `**Key Discussion Points:**\n`;
|
|
1029
|
+
for (const comment of issueDetails.comments){
|
|
1030
|
+
issueSection += `- **${comment.author}**: ${comment.body}\n`;
|
|
1031
|
+
}
|
|
1032
|
+
issueSection += '\n';
|
|
1033
|
+
}
|
|
1034
|
+
// Add labels if present
|
|
1035
|
+
if (issue.labels && issue.labels.length > 0) {
|
|
1036
|
+
const labelNames = issue.labels.map((label)=>typeof label === 'string' ? label : label.name).join(', ');
|
|
1037
|
+
issueSection += `**Labels:** ${labelNames}\n\n`;
|
|
1038
|
+
}
|
|
1039
|
+
issueSection += '---\n\n';
|
|
1040
|
+
const sectionTokens = estimateTokens(issueSection);
|
|
1041
|
+
// Check if adding this issue would exceed the total limit
|
|
1042
|
+
if (totalTokens + sectionTokens > maxTotalTokens) {
|
|
1043
|
+
logger.info(`Stopping at issue #${issue.number} to stay under ${maxTotalTokens} token limit`);
|
|
1044
|
+
break;
|
|
1045
|
+
}
|
|
1046
|
+
releaseNotesContent += issueSection;
|
|
1047
|
+
totalTokens += sectionTokens;
|
|
1048
|
+
logger.debug(`Added issue #${issue.number} (${sectionTokens} tokens, total: ${totalTokens})`);
|
|
1049
|
+
}
|
|
1050
|
+
logger.info(`📋 Generated release notes from milestone issues (${totalTokens} tokens)`);
|
|
1051
|
+
return releaseNotesContent;
|
|
1052
|
+
} catch (error) {
|
|
1053
|
+
// Don't fail the whole operation if milestone content fails
|
|
1054
|
+
logger.warn(`⚠️ Failed to get milestone issues for release notes (continuing): ${error.message}`);
|
|
1055
|
+
return '';
|
|
1056
|
+
}
|
|
1057
|
+
};
|
|
1058
|
+
/**
|
|
1059
|
+
* Get recently closed GitHub issues for commit message context.
|
|
1060
|
+
* Prioritizes issues from milestones that match the current version.
|
|
1061
|
+
*/ const getRecentClosedIssuesForCommit = async (currentVersion, limit = 10)=>{
|
|
1062
|
+
const octokit = getOctokit();
|
|
1063
|
+
const { owner, repo } = await getRepoDetails();
|
|
1064
|
+
const logger = getLogger();
|
|
1065
|
+
try {
|
|
1066
|
+
logger.debug(`Fetching up to ${limit} recently closed GitHub issues for commit context...`);
|
|
1067
|
+
// Get recently closed issues
|
|
1068
|
+
const response = await octokit.issues.listForRepo({
|
|
1069
|
+
owner,
|
|
1070
|
+
repo,
|
|
1071
|
+
state: 'closed',
|
|
1072
|
+
per_page: Math.min(limit, 100),
|
|
1073
|
+
sort: 'updated',
|
|
1074
|
+
direction: 'desc'
|
|
1075
|
+
});
|
|
1076
|
+
const issues = response.data.filter((issue)=>!issue.pull_request && // Filter out PRs
|
|
1077
|
+
issue.state_reason === 'completed' // Only issues closed as completed
|
|
1078
|
+
);
|
|
1079
|
+
if (issues.length === 0) {
|
|
1080
|
+
logger.debug('No recently closed issues found');
|
|
1081
|
+
return '';
|
|
1082
|
+
}
|
|
1083
|
+
// Determine relevant milestone if we have a current version
|
|
1084
|
+
let relevantMilestone = null;
|
|
1085
|
+
if (currentVersion) {
|
|
1086
|
+
// Extract base version for milestone matching (e.g., "0.1.1" from "0.1.1-dev.0")
|
|
1087
|
+
const baseVersion = currentVersion.includes('-dev.') ? currentVersion.split('-')[0] : currentVersion;
|
|
1088
|
+
const milestoneTitle = `release/${baseVersion}`;
|
|
1089
|
+
relevantMilestone = await findMilestoneByTitle(milestoneTitle);
|
|
1090
|
+
if (relevantMilestone) {
|
|
1091
|
+
logger.debug(`Found relevant milestone: ${milestoneTitle}`);
|
|
1092
|
+
} else {
|
|
1093
|
+
logger.debug(`No milestone found for version: ${baseVersion}`);
|
|
1094
|
+
}
|
|
1095
|
+
}
|
|
1096
|
+
// Categorize issues by relevance
|
|
1097
|
+
const milestoneIssues = [];
|
|
1098
|
+
const otherIssues = [];
|
|
1099
|
+
for (const issue of issues.slice(0, limit)){
|
|
1100
|
+
var _issue_milestone;
|
|
1101
|
+
if (relevantMilestone && ((_issue_milestone = issue.milestone) === null || _issue_milestone === void 0 ? void 0 : _issue_milestone.number) === relevantMilestone.number) {
|
|
1102
|
+
milestoneIssues.push(issue);
|
|
1103
|
+
} else {
|
|
1104
|
+
otherIssues.push(issue);
|
|
1105
|
+
}
|
|
1106
|
+
}
|
|
1107
|
+
// Build the content, prioritizing milestone issues
|
|
1108
|
+
const issueStrings = [];
|
|
1109
|
+
// Add milestone issues first (these are most relevant)
|
|
1110
|
+
if (milestoneIssues.length > 0) {
|
|
1111
|
+
issueStrings.push(`## Recent Issues from Current Milestone (${relevantMilestone.title}):`);
|
|
1112
|
+
milestoneIssues.forEach((issue)=>{
|
|
1113
|
+
var _issue_body;
|
|
1114
|
+
const labels = issue.labels.map((label)=>typeof label === 'string' ? label : label.name).join(', ');
|
|
1115
|
+
issueStrings.push([
|
|
1116
|
+
`Issue #${issue.number}: ${issue.title}`,
|
|
1117
|
+
`Labels: ${labels || 'none'}`,
|
|
1118
|
+
`Closed: ${issue.closed_at}`,
|
|
1119
|
+
`Body: ${((_issue_body = issue.body) === null || _issue_body === void 0 ? void 0 : _issue_body.substring(0, 300)) || 'No description'}${issue.body && issue.body.length > 300 ? '...' : ''}`,
|
|
1120
|
+
'---'
|
|
1121
|
+
].join('\n'));
|
|
1122
|
+
});
|
|
1123
|
+
}
|
|
1124
|
+
// Add other recent issues if we have space
|
|
1125
|
+
const remainingLimit = limit - milestoneIssues.length;
|
|
1126
|
+
if (otherIssues.length > 0 && remainingLimit > 0) {
|
|
1127
|
+
if (milestoneIssues.length > 0) {
|
|
1128
|
+
issueStrings.push('\n## Other Recent Closed Issues:');
|
|
1129
|
+
}
|
|
1130
|
+
otherIssues.slice(0, remainingLimit).forEach((issue)=>{
|
|
1131
|
+
var _issue_body;
|
|
1132
|
+
const labels = issue.labels.map((label)=>typeof label === 'string' ? label : label.name).join(', ');
|
|
1133
|
+
const milestoneInfo = issue.milestone ? `Milestone: ${issue.milestone.title}` : 'Milestone: none';
|
|
1134
|
+
issueStrings.push([
|
|
1135
|
+
`Issue #${issue.number}: ${issue.title}`,
|
|
1136
|
+
`Labels: ${labels || 'none'}`,
|
|
1137
|
+
milestoneInfo,
|
|
1138
|
+
`Closed: ${issue.closed_at}`,
|
|
1139
|
+
`Body: ${((_issue_body = issue.body) === null || _issue_body === void 0 ? void 0 : _issue_body.substring(0, 300)) || 'No description'}${issue.body && issue.body.length > 300 ? '...' : ''}`,
|
|
1140
|
+
'---'
|
|
1141
|
+
].join('\n'));
|
|
1142
|
+
});
|
|
1143
|
+
}
|
|
1144
|
+
const totalRelevantIssues = milestoneIssues.length;
|
|
1145
|
+
const totalOtherIssues = Math.min(otherIssues.length, remainingLimit);
|
|
1146
|
+
logger.debug(`Fetched ${totalRelevantIssues + totalOtherIssues} closed issues (${totalRelevantIssues} from relevant milestone, ${totalOtherIssues} others)`);
|
|
1147
|
+
return issueStrings.join('\n\n');
|
|
1148
|
+
} catch (error) {
|
|
1149
|
+
logger.warn('Failed to fetch recent closed GitHub issues: %s', error.message);
|
|
1150
|
+
return '';
|
|
1151
|
+
}
|
|
1152
|
+
};
|
|
637
1153
|
|
|
638
|
-
export { createIssue, createPullRequest, createRelease, findOpenPullRequestByHeadRef, getCurrentBranchName, getOctokit, getOpenIssues, getReleaseByTagName, getRepoDetails, getWorkflowRunsTriggeredByRelease, getWorkflowsTriggeredByRelease, mergePullRequest, waitForPullRequestChecks, waitForReleaseWorkflows };
|
|
1154
|
+
export { closeMilestone, closeMilestoneForVersion, createIssue, createMilestone, createPullRequest, createRelease, ensureMilestoneForVersion, findMilestoneByTitle, findOpenPullRequestByHeadRef, getClosedIssuesForMilestone, getCurrentBranchName, getIssueDetails, getMilestoneIssuesForRelease, getOctokit, getOpenIssues, getOpenIssuesForMilestone, getRecentClosedIssuesForCommit, getReleaseByTagName, getRepoDetails, getWorkflowRunsTriggeredByRelease, getWorkflowsTriggeredByRelease, mergePullRequest, moveIssueToMilestone, moveOpenIssuesToNewMilestone, waitForPullRequestChecks, waitForReleaseWorkflows };
|
|
639
1155
|
//# sourceMappingURL=github.js.map
|