@oss-autopilot/core 1.17.4 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/cli-registry.js +417 -326
- package/dist/cli.bundle.cjs +99 -96
- package/dist/commands/daily-render.d.ts +39 -0
- package/dist/commands/daily-render.js +189 -0
- package/dist/commands/dashboard-data.js +9 -3
- package/dist/commands/index.d.ts +4 -8
- package/dist/commands/index.js +3 -5
- package/dist/commands/list-move-tier.d.ts +46 -0
- package/dist/commands/list-move-tier.js +192 -0
- package/dist/commands/pr-template.js +2 -1
- package/dist/commands/state-cmd.d.ts +10 -1
- package/dist/commands/state-cmd.js +22 -3
- package/dist/commands/track.d.ts +7 -28
- package/dist/commands/track.js +8 -30
- package/dist/core/auth.d.ts +50 -0
- package/dist/core/auth.js +160 -0
- package/dist/core/concurrency.d.ts +7 -0
- package/dist/core/concurrency.js +9 -0
- package/dist/core/daily-logic.d.ts +10 -42
- package/dist/core/daily-logic.js +14 -201
- package/dist/core/dates.d.ts +37 -0
- package/dist/core/dates.js +60 -0
- package/dist/core/errors.d.ts +14 -0
- package/dist/core/errors.js +22 -0
- package/dist/core/gist-state-store.d.ts +48 -2
- package/dist/core/gist-state-store.js +120 -24
- package/dist/core/github-stats.js +1 -1
- package/dist/core/http-cache.js +1 -1
- package/dist/core/index.d.ts +5 -1
- package/dist/core/index.js +5 -1
- package/dist/core/issue-conversation.js +3 -2
- package/dist/core/paths.d.ts +68 -0
- package/dist/core/paths.js +106 -0
- package/dist/core/pr-monitor.js +3 -1
- package/dist/core/repo-score-manager.js +1 -1
- package/dist/core/state-persistence.js +1 -1
- package/dist/core/state.d.ts +16 -2
- package/dist/core/state.js +42 -7
- package/dist/core/types.d.ts +57 -0
- package/dist/core/urls.d.ts +63 -0
- package/dist/core/urls.js +101 -0
- package/dist/formatters/json.d.ts +464 -74
- package/dist/formatters/json.js +380 -0
- package/package.json +3 -3
- package/dist/commands/read.d.ts +0 -18
- package/dist/commands/read.js +0 -20
- package/dist/core/utils.d.ts +0 -303
- package/dist/core/utils.js +0 -529
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Rendering functions for the daily digest.
|
|
3
|
+
*
|
|
4
|
+
* Extracted from src/core/daily-logic.ts under #1117 so that
|
|
5
|
+
* core/daily-logic.ts can be a pure-aggregations module with no console
|
|
6
|
+
* or markdown output mixed in. The aggregations live in core because
|
|
7
|
+
* other surfaces (dashboard, MCP) consume them too.
|
|
8
|
+
*
|
|
9
|
+
* - formatActionHint — human-readable maintainer action hint label
|
|
10
|
+
* - formatBriefSummary — single-line status string
|
|
11
|
+
* - formatSummary — multi-line markdown summary
|
|
12
|
+
* - printDigest — console.log-based plain-text rendering
|
|
13
|
+
*
|
|
14
|
+
* Tests for these renderers live alongside the aggregations test
|
|
15
|
+
* (daily-logic.test.ts) — pure rendering, easy to assert on string output.
|
|
16
|
+
*/
|
|
17
|
+
import type { CapacityAssessment, CommentedIssue, CommentedIssueWithResponse, DailyDigest, MaintainerActionHint } from '../core/types.js';
|
|
18
|
+
/**
|
|
19
|
+
* Format a maintainer action hint as a human-readable label.
|
|
20
|
+
*/
|
|
21
|
+
export declare function formatActionHint(hint: MaintainerActionHint): string;
|
|
22
|
+
/**
|
|
23
|
+
* Format a brief one-liner summary for the action-first flow.
|
|
24
|
+
*
|
|
25
|
+
* @returns One-line status string (e.g., "3 Active PRs | 1 needs attention | 2 issue replies")
|
|
26
|
+
*/
|
|
27
|
+
export declare function formatBriefSummary(digest: DailyDigest, issueCount: number, issueResponseCount?: number): string;
|
|
28
|
+
/**
|
|
29
|
+
* Format the full dashboard summary as markdown.
|
|
30
|
+
* Used in JSON output for Claude to display verbatim — includes all PR sections,
|
|
31
|
+
* issue replies, and capacity status.
|
|
32
|
+
*/
|
|
33
|
+
export declare function formatSummary(digest: DailyDigest, capacity: CapacityAssessment, issueResponses?: CommentedIssueWithResponse[]): string;
|
|
34
|
+
/**
|
|
35
|
+
* Print digest to console as plain text. Unified renderer: uses the same
|
|
36
|
+
* section ordering as {@link formatSummary} but outputs plain text with
|
|
37
|
+
* console.log instead of markdown links.
|
|
38
|
+
*/
|
|
39
|
+
export declare function printDigest(digest: DailyDigest, capacity: CapacityAssessment, commentedIssues?: CommentedIssue[]): void;
|
|
@@ -0,0 +1,189 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Rendering functions for the daily digest.
|
|
3
|
+
*
|
|
4
|
+
* Extracted from src/core/daily-logic.ts under #1117 so that
|
|
5
|
+
* core/daily-logic.ts can be a pure-aggregations module with no console
|
|
6
|
+
* or markdown output mixed in. The aggregations live in core because
|
|
7
|
+
* other surfaces (dashboard, MCP) consume them too.
|
|
8
|
+
*
|
|
9
|
+
* - formatActionHint — human-readable maintainer action hint label
|
|
10
|
+
* - formatBriefSummary — single-line status string
|
|
11
|
+
* - formatSummary — multi-line markdown summary
|
|
12
|
+
* - printDigest — console.log-based plain-text rendering
|
|
13
|
+
*
|
|
14
|
+
* Tests for these renderers live alongside the aggregations test
|
|
15
|
+
* (daily-logic.test.ts) — pure rendering, easy to assert on string output.
|
|
16
|
+
*/
|
|
17
|
+
import { formatRelativeTime } from '../core/dates.js';
|
|
18
|
+
/**
|
|
19
|
+
* Format a maintainer action hint as a human-readable label.
|
|
20
|
+
*/
|
|
21
|
+
export function formatActionHint(hint) {
|
|
22
|
+
switch (hint) {
|
|
23
|
+
case 'demo_requested':
|
|
24
|
+
return 'demo/screenshot requested';
|
|
25
|
+
case 'tests_requested':
|
|
26
|
+
return 'tests requested';
|
|
27
|
+
case 'changes_requested':
|
|
28
|
+
return 'code changes requested';
|
|
29
|
+
case 'docs_requested':
|
|
30
|
+
return 'documentation requested';
|
|
31
|
+
case 'rebase_requested':
|
|
32
|
+
return 'rebase requested';
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
/**
|
|
36
|
+
* Format a brief one-liner summary for the action-first flow.
|
|
37
|
+
*
|
|
38
|
+
* @returns One-line status string (e.g., "3 Active PRs | 1 needs attention | 2 issue replies")
|
|
39
|
+
*/
|
|
40
|
+
export function formatBriefSummary(digest, issueCount, issueResponseCount = 0) {
|
|
41
|
+
const attentionText = issueCount > 0 ? `${issueCount} need${issueCount === 1 ? 's' : ''} attention` : 'all on track';
|
|
42
|
+
const issueReplyText = issueResponseCount > 0 ? ` | ${issueResponseCount} issue repl${issueResponseCount === 1 ? 'y' : 'ies'}` : '';
|
|
43
|
+
return `\u{1F4CA} ${digest.summary.totalActivePRs} Active PRs | ${attentionText}${issueReplyText}`;
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* Format the full dashboard summary as markdown.
|
|
47
|
+
* Used in JSON output for Claude to display verbatim — includes all PR sections,
|
|
48
|
+
* issue replies, and capacity status.
|
|
49
|
+
*/
|
|
50
|
+
export function formatSummary(digest, capacity, issueResponses = []) {
|
|
51
|
+
const lines = [];
|
|
52
|
+
lines.push('## OSS Dashboard');
|
|
53
|
+
lines.push('');
|
|
54
|
+
lines.push(`\u{1F4CA} **${digest.summary.totalActivePRs} Active PRs** | ${digest.summary.totalMergedAllTime} Merged | ${digest.summary.mergeRate}% Merge Rate`);
|
|
55
|
+
lines.push('✓ Dashboard generated — say "open dashboard" to view in browser');
|
|
56
|
+
lines.push('');
|
|
57
|
+
if (digest.needsAddressingPRs.length > 0) {
|
|
58
|
+
lines.push('### ❌ Needs Addressing');
|
|
59
|
+
for (const pr of digest.needsAddressingPRs) {
|
|
60
|
+
lines.push(`- [${pr.repo}#${pr.number}](${pr.url}): ${pr.title}`);
|
|
61
|
+
lines.push(` └─ ${pr.displayLabel} ${pr.displayDescription}`);
|
|
62
|
+
}
|
|
63
|
+
lines.push('');
|
|
64
|
+
}
|
|
65
|
+
if (digest.waitingOnMaintainerPRs.length > 0) {
|
|
66
|
+
lines.push('### ⏳ Waiting on Maintainer');
|
|
67
|
+
for (const pr of digest.waitingOnMaintainerPRs) {
|
|
68
|
+
lines.push(`- [${pr.repo}#${pr.number}](${pr.url}): ${pr.title}`);
|
|
69
|
+
lines.push(` └─ ${pr.displayDescription}`);
|
|
70
|
+
}
|
|
71
|
+
lines.push('');
|
|
72
|
+
}
|
|
73
|
+
if (digest.recentlyMergedPRs.length > 0) {
|
|
74
|
+
lines.push('### \u{1F389} Recently Merged');
|
|
75
|
+
for (const pr of digest.recentlyMergedPRs) {
|
|
76
|
+
const mergedDate = pr.mergedAt ? new Date(pr.mergedAt).toLocaleDateString() : '';
|
|
77
|
+
lines.push(`- [${pr.repo}#${pr.number}](${pr.url}): ${pr.title}${mergedDate ? ` (merged ${mergedDate})` : ''}`);
|
|
78
|
+
}
|
|
79
|
+
lines.push('');
|
|
80
|
+
}
|
|
81
|
+
if (digest.recentlyClosedPRs.length > 0) {
|
|
82
|
+
lines.push('### \u{1F6AB} Recently Closed');
|
|
83
|
+
for (const pr of digest.recentlyClosedPRs) {
|
|
84
|
+
const closedDate = pr.closedAt ? new Date(pr.closedAt).toLocaleDateString() : '';
|
|
85
|
+
lines.push(`- [${pr.repo}#${pr.number}](${pr.url}): ${pr.title}${closedDate ? ` (closed ${closedDate})` : ''}`);
|
|
86
|
+
}
|
|
87
|
+
lines.push('');
|
|
88
|
+
}
|
|
89
|
+
if (digest.autoUnshelvedPRs.length > 0) {
|
|
90
|
+
lines.push('### \u{1F514} Auto-Unshelved');
|
|
91
|
+
lines.push('> These PRs were shelved but a maintainer engaged — moved back to active.');
|
|
92
|
+
for (const pr of digest.autoUnshelvedPRs) {
|
|
93
|
+
lines.push(`- [${pr.repo}#${pr.number}](${pr.url}): ${pr.title} (${pr.status.replace(/_/g, ' ')})`);
|
|
94
|
+
}
|
|
95
|
+
lines.push('');
|
|
96
|
+
}
|
|
97
|
+
if (digest.shelvedPRs.length > 0) {
|
|
98
|
+
lines.push('### \u{1F4E6} Shelved');
|
|
99
|
+
for (const pr of digest.shelvedPRs) {
|
|
100
|
+
lines.push(`- [${pr.repo}#${pr.number}](${pr.url}): ${pr.title}`);
|
|
101
|
+
}
|
|
102
|
+
lines.push('');
|
|
103
|
+
}
|
|
104
|
+
if (issueResponses.length > 0) {
|
|
105
|
+
lines.push('### \u{1F4AC} Issue Replies');
|
|
106
|
+
for (const issue of issueResponses) {
|
|
107
|
+
lines.push(`- [${issue.repo}#${issue.number}](${issue.url}): ${issue.title}`);
|
|
108
|
+
const timeAgo = formatRelativeTime(issue.lastResponseAt);
|
|
109
|
+
lines.push(` └─ @${issue.lastResponseAuthor}: "${issue.lastResponseBody.slice(0, 80)}${issue.lastResponseBody.length > 80 ? '...' : ''}"${timeAgo ? ` (${timeAgo})` : ''}`);
|
|
110
|
+
}
|
|
111
|
+
lines.push('');
|
|
112
|
+
}
|
|
113
|
+
const capacityIcon = capacity.hasCapacity ? '✅' : '⚠️';
|
|
114
|
+
const capacityLabel = capacity.hasCapacity ? 'Ready for new work' : 'Focus on existing PRs';
|
|
115
|
+
const shelvedNote = capacity.shelvedPRCount > 0 ? ` + ${capacity.shelvedPRCount} shelved` : '';
|
|
116
|
+
lines.push(`**Capacity:** ${capacityIcon} ${capacityLabel} (${capacity.activePRCount}/${capacity.maxActivePRs} PRs${shelvedNote})`);
|
|
117
|
+
return lines.join('\n');
|
|
118
|
+
}
|
|
119
|
+
/**
|
|
120
|
+
* Print digest to console as plain text. Unified renderer: uses the same
|
|
121
|
+
* section ordering as {@link formatSummary} but outputs plain text with
|
|
122
|
+
* console.log instead of markdown links.
|
|
123
|
+
*/
|
|
124
|
+
export function printDigest(digest, capacity, commentedIssues = []) {
|
|
125
|
+
console.log('\n\u{1F4CA} OSS Daily Check\n');
|
|
126
|
+
console.log(`Active PRs: ${digest.summary.totalActivePRs}`);
|
|
127
|
+
console.log(`Needing Attention: ${digest.summary.totalNeedingAttention}`);
|
|
128
|
+
console.log(`Merged (all time): ${digest.summary.totalMergedAllTime}`);
|
|
129
|
+
console.log(`Merge Rate: ${digest.summary.mergeRate}%`);
|
|
130
|
+
console.log(`\nCapacity: ${capacity.hasCapacity ? '✅ Ready for new work' : '⚠️ Focus on existing work'}`);
|
|
131
|
+
console.log(` ${capacity.reason}\n`);
|
|
132
|
+
if (digest.needsAddressingPRs.length > 0) {
|
|
133
|
+
console.log('❌ Needs Addressing:');
|
|
134
|
+
for (const pr of digest.needsAddressingPRs) {
|
|
135
|
+
console.log(` - ${pr.repo}#${pr.number}: ${pr.title}`);
|
|
136
|
+
console.log(` ${pr.displayLabel} ${pr.displayDescription}`);
|
|
137
|
+
}
|
|
138
|
+
console.log('');
|
|
139
|
+
}
|
|
140
|
+
if (digest.waitingOnMaintainerPRs.length > 0) {
|
|
141
|
+
console.log('⏳ Waiting on Maintainer:');
|
|
142
|
+
for (const pr of digest.waitingOnMaintainerPRs) {
|
|
143
|
+
console.log(` - ${pr.repo}#${pr.number}: ${pr.title}`);
|
|
144
|
+
console.log(` ${pr.displayDescription}`);
|
|
145
|
+
}
|
|
146
|
+
console.log('');
|
|
147
|
+
}
|
|
148
|
+
if (digest.recentlyMergedPRs.length > 0) {
|
|
149
|
+
console.log('\u{1F389} Recently Merged:');
|
|
150
|
+
for (const pr of digest.recentlyMergedPRs) {
|
|
151
|
+
const mergedDate = pr.mergedAt ? new Date(pr.mergedAt).toLocaleDateString() : '';
|
|
152
|
+
console.log(` - ${pr.repo}#${pr.number}: ${pr.title}${mergedDate ? ` (merged ${mergedDate})` : ''}`);
|
|
153
|
+
}
|
|
154
|
+
console.log('');
|
|
155
|
+
}
|
|
156
|
+
if (digest.recentlyClosedPRs.length > 0) {
|
|
157
|
+
console.log('\u{1F6AB} Recently Closed:');
|
|
158
|
+
for (const pr of digest.recentlyClosedPRs) {
|
|
159
|
+
const closedDate = pr.closedAt ? new Date(pr.closedAt).toLocaleDateString() : '';
|
|
160
|
+
console.log(` - ${pr.repo}#${pr.number}: ${pr.title}${closedDate ? ` (closed ${closedDate})` : ''}`);
|
|
161
|
+
}
|
|
162
|
+
console.log('');
|
|
163
|
+
}
|
|
164
|
+
if (digest.autoUnshelvedPRs.length > 0) {
|
|
165
|
+
console.log('\u{1F514} Auto-Unshelved:');
|
|
166
|
+
for (const pr of digest.autoUnshelvedPRs) {
|
|
167
|
+
console.log(` - ${pr.repo}#${pr.number}: ${pr.title} (${pr.status.replace(/_/g, ' ')})`);
|
|
168
|
+
}
|
|
169
|
+
console.log('');
|
|
170
|
+
}
|
|
171
|
+
if (digest.shelvedPRs.length > 0) {
|
|
172
|
+
console.log('\u{1F4E6} Shelved:');
|
|
173
|
+
for (const pr of digest.shelvedPRs) {
|
|
174
|
+
console.log(` - ${pr.repo}#${pr.number}: ${pr.title}`);
|
|
175
|
+
}
|
|
176
|
+
console.log('');
|
|
177
|
+
}
|
|
178
|
+
const issueResponses = commentedIssues.filter((i) => i.status === 'new_response');
|
|
179
|
+
if (issueResponses.length > 0) {
|
|
180
|
+
console.log('\u{1F4AC} Issue Replies:');
|
|
181
|
+
for (const issue of issueResponses) {
|
|
182
|
+
console.log(` - ${issue.repo}#${issue.number}: ${issue.title}`);
|
|
183
|
+
console.log(` @${issue.lastResponseAuthor}: ${issue.lastResponseBody.slice(0, 80)}${issue.lastResponseBody.length > 80 ? '...' : ''}`);
|
|
184
|
+
}
|
|
185
|
+
console.log('');
|
|
186
|
+
}
|
|
187
|
+
console.log('Run with --json for structured output');
|
|
188
|
+
console.log('Run "dashboard serve" for browser view');
|
|
189
|
+
}
|
|
@@ -7,7 +7,7 @@ import { getStateManager, PRMonitor, IssueConversationMonitor, getOctokit } from
|
|
|
7
7
|
import { errorMessage, isRateLimitOrAuthError } from '../core/errors.js';
|
|
8
8
|
import { warn } from '../core/logger.js';
|
|
9
9
|
import { emptyPRCountsResult, fetchMergedPRsSince, fetchClosedPRsSince } from '../core/github-stats.js';
|
|
10
|
-
import { parseGitHubUrl } from '../core/
|
|
10
|
+
import { parseGitHubUrl } from '../core/urls.js';
|
|
11
11
|
import { isBelowMinStars, } from '../core/types.js';
|
|
12
12
|
import { toShelvedPRRef, buildStarFilter } from './daily.js';
|
|
13
13
|
const MODULE = 'dashboard-data';
|
|
@@ -182,14 +182,20 @@ export async function fetchDashboardData(token) {
|
|
|
182
182
|
stateManager.batch(() => {
|
|
183
183
|
// Store new merged PRs incrementally (dedupes by URL)
|
|
184
184
|
try {
|
|
185
|
-
stateManager.addMergedPRs(newMergedPRs);
|
|
185
|
+
const { dropped } = stateManager.addMergedPRs(newMergedPRs);
|
|
186
|
+
if (dropped > 0) {
|
|
187
|
+
partialFailures.push(`Dropped ${dropped} merged PR(s) with invalid URLs before persistence`);
|
|
188
|
+
}
|
|
186
189
|
}
|
|
187
190
|
catch (error) {
|
|
188
191
|
warn(MODULE, `Failed to store merged PRs: ${errorMessage(error)}`);
|
|
189
192
|
}
|
|
190
193
|
// Store new closed PRs incrementally (dedupes by URL)
|
|
191
194
|
try {
|
|
192
|
-
stateManager.addClosedPRs(newClosedPRs);
|
|
195
|
+
const { dropped } = stateManager.addClosedPRs(newClosedPRs);
|
|
196
|
+
if (dropped > 0) {
|
|
197
|
+
partialFailures.push(`Dropped ${dropped} closed PR(s) with invalid URLs before persistence`);
|
|
198
|
+
}
|
|
193
199
|
}
|
|
194
200
|
catch (error) {
|
|
195
201
|
warn(MODULE, `Failed to store closed PRs: ${errorMessage(error)}`);
|
package/dist/commands/index.d.ts
CHANGED
|
@@ -27,12 +27,8 @@ export { runSearch, MAX_SEARCH_RESULTS } from './search.js';
|
|
|
27
27
|
export { runVet } from './vet.js';
|
|
28
28
|
/** Re-vet all available issues in a curated issue list for freshness. */
|
|
29
29
|
export { runVetList } from './vet-list.js';
|
|
30
|
-
/**
|
|
30
|
+
/** Fetch PR metadata from GitHub (informational; nothing is persisted). */
|
|
31
31
|
export { runTrack } from './track.js';
|
|
32
|
-
/** Remove a PR from tracking state. */
|
|
33
|
-
export { runUntrack } from './track.js';
|
|
34
|
-
/** Mark PR comments as read. */
|
|
35
|
-
export { runRead } from './read.js';
|
|
36
32
|
/** Temporarily hide a PR from the daily digest. */
|
|
37
33
|
export { runShelve } from './shelve.js';
|
|
38
34
|
/** Restore a shelved PR to the daily digest. */
|
|
@@ -65,6 +61,8 @@ export { runStateSync } from './state-cmd.js';
|
|
|
65
61
|
export { runStateUnlink } from './state-cmd.js';
|
|
66
62
|
/** Parse a curated markdown issue list file into structured issue items. */
|
|
67
63
|
export { runParseList, pruneIssueList } from './parse-list.js';
|
|
64
|
+
/** Move an issue between Pursue / Maybe / Skip sections of a curated list (#1107). */
|
|
65
|
+
export { runListMoveTier, moveIssueToTier, type Tier, type ListMoveTierOptions, type ListMoveTierOutput, } from './list-move-tier.js';
|
|
68
66
|
/** Check if new files are properly referenced/integrated. */
|
|
69
67
|
export { runCheckIntegration } from './check-integration.js';
|
|
70
68
|
/** System-health diagnostic — verifies tokens, bundle, state, scout, rate limit. */
|
|
@@ -78,13 +76,11 @@ export type { ErrorCode } from '../formatters/json.js';
|
|
|
78
76
|
export type { DailyOutput, SearchOutput, StartupOutput, StatusOutput, TrackOutput } from '../formatters/json.js';
|
|
79
77
|
export type { VetOutput, CommentsOutput, PostOutput, ClaimOutput, VetListOutput, VetListItemStatus, } from '../formatters/json.js';
|
|
80
78
|
export type { ConfigOutput, DetectFormattersOutput, ParseIssueListOutput, ParsedIssueItem, CheckIntegrationOutput, LocalReposOutput, } from '../formatters/json.js';
|
|
81
|
-
export type { ReadOutput } from './read.js';
|
|
82
79
|
export type { ShelveOutput, UnshelveOutput } from './shelve.js';
|
|
83
80
|
export type { MoveOutput, MoveTarget } from './move.js';
|
|
84
81
|
export type { DismissOutput, UndismissOutput } from './dismiss.js';
|
|
85
|
-
export type { UntrackOutput } from './track.js';
|
|
86
82
|
export type { InitOutput } from './init.js';
|
|
87
83
|
export type { ConfigSetOutput, ConfigCommandOutput } from './config.js';
|
|
88
84
|
export type { SetupSetOutput, SetupCompleteOutput, SetupRequiredOutput, SetupOutput, CheckSetupOutput, } from './setup.js';
|
|
89
85
|
export type { DailyCheckResult } from './daily.js';
|
|
90
|
-
export type { StateShowOutput, StateSyncOutput, StateUnlinkOutput } from './state-cmd.js';
|
|
86
|
+
export type { StateShowOutput, StateSyncOutput, StateUnlinkOutput, InvalidUrlEntry } from './state-cmd.js';
|
package/dist/commands/index.js
CHANGED
|
@@ -29,12 +29,8 @@ export { runVet } from './vet.js';
|
|
|
29
29
|
/** Re-vet all available issues in a curated issue list for freshness. */
|
|
30
30
|
export { runVetList } from './vet-list.js';
|
|
31
31
|
// ── PR Management ───────────────────────────────────────────────────────────
|
|
32
|
-
/**
|
|
32
|
+
/** Fetch PR metadata from GitHub (informational; nothing is persisted). */
|
|
33
33
|
export { runTrack } from './track.js';
|
|
34
|
-
/** Remove a PR from tracking state. */
|
|
35
|
-
export { runUntrack } from './track.js';
|
|
36
|
-
/** Mark PR comments as read. */
|
|
37
|
-
export { runRead } from './read.js';
|
|
38
34
|
/** Temporarily hide a PR from the daily digest. */
|
|
39
35
|
export { runShelve } from './shelve.js';
|
|
40
36
|
/** Restore a shelved PR to the daily digest. */
|
|
@@ -71,6 +67,8 @@ export { runStateUnlink } from './state-cmd.js';
|
|
|
71
67
|
// ── Utilities ───────────────────────────────────────────────────────────────
|
|
72
68
|
/** Parse a curated markdown issue list file into structured issue items. */
|
|
73
69
|
export { runParseList, pruneIssueList } from './parse-list.js';
|
|
70
|
+
/** Move an issue between Pursue / Maybe / Skip sections of a curated list (#1107). */
|
|
71
|
+
export { runListMoveTier, moveIssueToTier, } from './list-move-tier.js';
|
|
74
72
|
/** Check if new files are properly referenced/integrated. */
|
|
75
73
|
export { runCheckIntegration } from './check-integration.js';
|
|
76
74
|
/** System-health diagnostic — verifies tokens, bundle, state, scout, rate limit. */
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* list-move-tier command (#1107).
|
|
3
|
+
*
|
|
4
|
+
* Move an issue line — and any indented sub-bullets that belong to it —
|
|
5
|
+
* between the `## Pursue`, `## Maybe`, and `## Skip` sections of a curated
|
|
6
|
+
* issue-list markdown file. Replaces the model-driven prose rewrite that
|
|
7
|
+
* lived in /oss-search with a deterministic file manipulation.
|
|
8
|
+
*
|
|
9
|
+
* Idempotent: re-running with the same target tier is a no-op.
|
|
10
|
+
*
|
|
11
|
+
* No GitHub calls — pure read/transform/write of a local file.
|
|
12
|
+
*/
|
|
13
|
+
export type Tier = 'pursue' | 'maybe' | 'skip';
|
|
14
|
+
export interface ListMoveTierOptions {
|
|
15
|
+
issueUrl: string;
|
|
16
|
+
tier: Tier;
|
|
17
|
+
listPath: string;
|
|
18
|
+
}
|
|
19
|
+
export interface ListMoveTierOutput {
|
|
20
|
+
/** Whether anything moved (false when the URL isn't in the list, or it was already in the target tier). */
|
|
21
|
+
moved: boolean;
|
|
22
|
+
/** Fully-resolved file path that was inspected. */
|
|
23
|
+
filePath: string;
|
|
24
|
+
/** The issue URL that was searched for. */
|
|
25
|
+
url: string;
|
|
26
|
+
/** The target tier (always normalized to one of pursue/maybe/skip). */
|
|
27
|
+
toTier: Tier;
|
|
28
|
+
/** The tier the issue was moved out of, if it had one. Absent when not found or already in target. */
|
|
29
|
+
fromTier?: string;
|
|
30
|
+
/** Number of matching entries moved. Should normally be 1; >1 means the list contained duplicate entries (all moved). */
|
|
31
|
+
count: number;
|
|
32
|
+
/** Human-readable explanation when `moved` is false. */
|
|
33
|
+
reason?: string;
|
|
34
|
+
}
|
|
35
|
+
/**
|
|
36
|
+
* Pure transform — accepts the file content and returns the rewritten content
|
|
37
|
+
* plus a summary of what changed. Exported for unit testing.
|
|
38
|
+
*/
|
|
39
|
+
export declare function moveIssueToTier(content: string, issueUrl: string, targetTier: Tier): {
|
|
40
|
+
content: string;
|
|
41
|
+
moved: boolean;
|
|
42
|
+
fromTier?: string;
|
|
43
|
+
count: number;
|
|
44
|
+
reason?: string;
|
|
45
|
+
};
|
|
46
|
+
export declare function runListMoveTier(options: ListMoveTierOptions): Promise<ListMoveTierOutput>;
|
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* list-move-tier command (#1107).
|
|
3
|
+
*
|
|
4
|
+
* Move an issue line — and any indented sub-bullets that belong to it —
|
|
5
|
+
* between the `## Pursue`, `## Maybe`, and `## Skip` sections of a curated
|
|
6
|
+
* issue-list markdown file. Replaces the model-driven prose rewrite that
|
|
7
|
+
* lived in /oss-search with a deterministic file manipulation.
|
|
8
|
+
*
|
|
9
|
+
* Idempotent: re-running with the same target tier is a no-op.
|
|
10
|
+
*
|
|
11
|
+
* No GitHub calls — pure read/transform/write of a local file.
|
|
12
|
+
*/
|
|
13
|
+
import * as fs from 'fs';
|
|
14
|
+
import * as path from 'path';
|
|
15
|
+
import { errorMessage } from '../core/errors.js';
|
|
16
|
+
const TIER_HEADERS = {
|
|
17
|
+
pursue: '## Pursue',
|
|
18
|
+
maybe: '## Maybe',
|
|
19
|
+
skip: '## Skip',
|
|
20
|
+
};
|
|
21
|
+
/** Extract the heading text from a line that starts with `## ` (returns undefined otherwise).
|
|
22
|
+
* Avoids regex on the heading body so a long whitespace prefix can't trigger
|
|
23
|
+
* backtracking in static analyzers (CodeQL js/polynomial-redos).
|
|
24
|
+
*/
|
|
25
|
+
function parseLevel2Heading(line) {
|
|
26
|
+
if (line.startsWith('## '))
|
|
27
|
+
return line.slice(3).trim();
|
|
28
|
+
return undefined;
|
|
29
|
+
}
|
|
30
|
+
/** Identify which "## Pursue|Maybe|Skip" section a line index sits under, if any. */
|
|
31
|
+
function tierForLine(lines, lineIndex) {
|
|
32
|
+
for (let i = lineIndex - 1; i >= 0; i--) {
|
|
33
|
+
const heading = parseLevel2Heading(lines[i]);
|
|
34
|
+
if (heading !== undefined)
|
|
35
|
+
return heading;
|
|
36
|
+
// A higher-level heading also resets — we don't reach back across `# Foo`.
|
|
37
|
+
if (lines[i].startsWith('# '))
|
|
38
|
+
return undefined;
|
|
39
|
+
}
|
|
40
|
+
return undefined;
|
|
41
|
+
}
|
|
42
|
+
/** Locate every issue block (issue line + any sub-bullets) that mentions the URL. */
|
|
43
|
+
function findIssueBlocks(lines, issueUrl) {
|
|
44
|
+
const blocks = [];
|
|
45
|
+
// Use exact substring match on the URL — no regex escaping pitfalls and
|
|
46
|
+
// the URL itself contains no markdown delimiters that would split a line.
|
|
47
|
+
for (let i = 0; i < lines.length; i++) {
|
|
48
|
+
const line = lines[i];
|
|
49
|
+
// Top-level list item — `- `, `* `, `+ `, or `1.` at the start (with no leading whitespace).
|
|
50
|
+
const isTopLevelListItem = /^[-*+]\s|^\d+\.\s/.test(line);
|
|
51
|
+
if (!isTopLevelListItem || !line.includes(issueUrl))
|
|
52
|
+
continue;
|
|
53
|
+
// Capture indented sub-bullets that follow this line.
|
|
54
|
+
let end = i + 1;
|
|
55
|
+
while (end < lines.length && /^\s{2,}/.test(lines[end])) {
|
|
56
|
+
end++;
|
|
57
|
+
}
|
|
58
|
+
blocks.push({ start: i, end, tier: tierForLine(lines, i) });
|
|
59
|
+
i = end - 1; // continue past the captured sub-bullets
|
|
60
|
+
}
|
|
61
|
+
return blocks;
|
|
62
|
+
}
|
|
63
|
+
/** Find the line index of the target tier's `## ...` header, or undefined if absent. */
|
|
64
|
+
function findTierHeaderIndex(lines, tier) {
|
|
65
|
+
const expected = TIER_HEADERS[tier].toLowerCase();
|
|
66
|
+
for (let i = 0; i < lines.length; i++) {
|
|
67
|
+
const heading = parseLevel2Heading(lines[i]);
|
|
68
|
+
if (heading !== undefined && `## ${heading}`.toLowerCase() === expected) {
|
|
69
|
+
return i;
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
return undefined;
|
|
73
|
+
}
|
|
74
|
+
/** Find the insertion point for a new entry under a tier's header — the first
|
|
75
|
+
* blank line or next-section boundary after the header. Returns an index where
|
|
76
|
+
* splice() can insert. */
|
|
77
|
+
function findTierInsertionIndex(lines, headerIndex) {
|
|
78
|
+
for (let i = headerIndex + 1; i < lines.length; i++) {
|
|
79
|
+
// Stop at the next heading of the same or shallower depth.
|
|
80
|
+
if (/^#{1,2}\s+/.test(lines[i]))
|
|
81
|
+
return i;
|
|
82
|
+
}
|
|
83
|
+
return lines.length;
|
|
84
|
+
}
|
|
85
|
+
/**
|
|
86
|
+
* Pure transform — accepts the file content and returns the rewritten content
|
|
87
|
+
* plus a summary of what changed. Exported for unit testing.
|
|
88
|
+
*/
|
|
89
|
+
export function moveIssueToTier(content, issueUrl, targetTier) {
|
|
90
|
+
// Preserve the trailing newline if present so we don't accidentally strip it.
|
|
91
|
+
const hadTrailingNewline = content.endsWith('\n');
|
|
92
|
+
const lines = (hadTrailingNewline ? content.slice(0, -1) : content).split('\n');
|
|
93
|
+
const blocks = findIssueBlocks(lines, issueUrl);
|
|
94
|
+
if (blocks.length === 0) {
|
|
95
|
+
return { content, moved: false, count: 0, reason: 'issue URL not found in the list' };
|
|
96
|
+
}
|
|
97
|
+
const targetHeader = TIER_HEADERS[targetTier];
|
|
98
|
+
// If every match is already in the target tier, no-op (idempotent).
|
|
99
|
+
if (blocks.every((b) => b.tier !== undefined && `## ${b.tier}`.toLowerCase() === targetHeader.toLowerCase())) {
|
|
100
|
+
return {
|
|
101
|
+
content,
|
|
102
|
+
moved: false,
|
|
103
|
+
fromTier: blocks[0].tier,
|
|
104
|
+
count: blocks.length,
|
|
105
|
+
reason: 'already in target tier',
|
|
106
|
+
};
|
|
107
|
+
}
|
|
108
|
+
// Extract the blocks (highest start index first so earlier indices stay
|
|
109
|
+
// valid as we splice). Capture the original tier for reporting.
|
|
110
|
+
const sortedBlocks = [...blocks].sort((a, b) => b.start - a.start);
|
|
111
|
+
const extracted = [];
|
|
112
|
+
for (const block of sortedBlocks) {
|
|
113
|
+
extracted.push({ tier: block.tier, lines: lines.slice(block.start, block.end) });
|
|
114
|
+
lines.splice(block.start, block.end - block.start);
|
|
115
|
+
}
|
|
116
|
+
// We popped in reverse order; reverse to restore source order.
|
|
117
|
+
extracted.reverse();
|
|
118
|
+
// Ensure the target section exists. If not, append a new header at the end
|
|
119
|
+
// of the document.
|
|
120
|
+
let headerIndex = findTierHeaderIndex(lines, targetTier);
|
|
121
|
+
if (headerIndex === undefined) {
|
|
122
|
+
// Append a blank line (if none precedes) and the new header.
|
|
123
|
+
if (lines.length > 0 && lines[lines.length - 1].trim() !== '') {
|
|
124
|
+
lines.push('');
|
|
125
|
+
}
|
|
126
|
+
lines.push(targetHeader);
|
|
127
|
+
headerIndex = lines.length - 1;
|
|
128
|
+
// Add a blank line after the header so insertion below stays clean.
|
|
129
|
+
lines.push('');
|
|
130
|
+
}
|
|
131
|
+
// Insert the extracted blocks at the top of the target section (just after
|
|
132
|
+
// the header, after any leading blank line).
|
|
133
|
+
let insertAt = headerIndex + 1;
|
|
134
|
+
// Skip a single immediate blank line so entries land before the blank that
|
|
135
|
+
// already separates the section header from its first item, not before the
|
|
136
|
+
// header.
|
|
137
|
+
if (insertAt < lines.length && lines[insertAt].trim() === '') {
|
|
138
|
+
insertAt += 1;
|
|
139
|
+
}
|
|
140
|
+
// Don't cross into a different section.
|
|
141
|
+
const sectionBoundary = findTierInsertionIndex(lines, headerIndex);
|
|
142
|
+
if (insertAt > sectionBoundary)
|
|
143
|
+
insertAt = sectionBoundary;
|
|
144
|
+
const flattened = [];
|
|
145
|
+
for (const e of extracted) {
|
|
146
|
+
flattened.push(...e.lines);
|
|
147
|
+
}
|
|
148
|
+
lines.splice(insertAt, 0, ...flattened);
|
|
149
|
+
let next = lines.join('\n');
|
|
150
|
+
if (hadTrailingNewline)
|
|
151
|
+
next += '\n';
|
|
152
|
+
// Report fromTier as the original tier of the first match. (Multi-match
|
|
153
|
+
// moves across multiple source tiers are uncommon enough that one label is
|
|
154
|
+
// fine; the count surfaces the multiplicity if needed.)
|
|
155
|
+
return {
|
|
156
|
+
content: next,
|
|
157
|
+
moved: true,
|
|
158
|
+
fromTier: extracted[0].tier,
|
|
159
|
+
count: extracted.length,
|
|
160
|
+
};
|
|
161
|
+
}
|
|
162
|
+
export async function runListMoveTier(options) {
|
|
163
|
+
const filePath = path.resolve(options.listPath);
|
|
164
|
+
if (!fs.existsSync(filePath)) {
|
|
165
|
+
throw new Error(`File not found: ${filePath}`);
|
|
166
|
+
}
|
|
167
|
+
let content;
|
|
168
|
+
try {
|
|
169
|
+
content = fs.readFileSync(filePath, 'utf-8');
|
|
170
|
+
}
|
|
171
|
+
catch (error) {
|
|
172
|
+
throw new Error(`Failed to read file: ${errorMessage(error)}`, { cause: error });
|
|
173
|
+
}
|
|
174
|
+
const result = moveIssueToTier(content, options.issueUrl, options.tier);
|
|
175
|
+
if (result.moved) {
|
|
176
|
+
try {
|
|
177
|
+
fs.writeFileSync(filePath, result.content, 'utf-8');
|
|
178
|
+
}
|
|
179
|
+
catch (error) {
|
|
180
|
+
throw new Error(`Failed to write file: ${errorMessage(error)}`, { cause: error });
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
return {
|
|
184
|
+
moved: result.moved,
|
|
185
|
+
filePath,
|
|
186
|
+
url: options.issueUrl,
|
|
187
|
+
toTier: options.tier,
|
|
188
|
+
fromTier: result.fromTier,
|
|
189
|
+
count: result.count,
|
|
190
|
+
reason: result.reason,
|
|
191
|
+
};
|
|
192
|
+
}
|
|
@@ -4,7 +4,8 @@
|
|
|
4
4
|
* Usage: oss-autopilot pr-template owner/repo --json
|
|
5
5
|
*/
|
|
6
6
|
import { getOctokit } from '../core/github.js';
|
|
7
|
-
import { requireGitHubToken
|
|
7
|
+
import { requireGitHubToken } from '../core/auth.js';
|
|
8
|
+
import { splitRepo } from '../core/urls.js';
|
|
8
9
|
import { fetchPRTemplate } from '../core/pr-template.js';
|
|
9
10
|
export async function runPRTemplate(opts) {
|
|
10
11
|
const { owner, repo } = splitRepo(opts.repo);
|
|
@@ -2,11 +2,18 @@
|
|
|
2
2
|
* State persistence management commands.
|
|
3
3
|
* Provides --show, --sync, and --unlink subcommands for the Gist persistence layer.
|
|
4
4
|
*/
|
|
5
|
+
export interface InvalidUrlEntry {
|
|
6
|
+
kind: 'merged' | 'closed';
|
|
7
|
+
url: string;
|
|
8
|
+
title: string;
|
|
9
|
+
}
|
|
5
10
|
export interface StateShowOutput {
|
|
6
11
|
persistence: 'local' | 'gist';
|
|
7
12
|
gistId: string | null;
|
|
8
13
|
gistDegraded: boolean;
|
|
9
14
|
lastRunAt: string | undefined;
|
|
15
|
+
/** Present only when validate=true. Existing entries with unparseable URLs. */
|
|
16
|
+
invalidEntries?: InvalidUrlEntry[];
|
|
10
17
|
}
|
|
11
18
|
export interface StateSyncOutput {
|
|
12
19
|
pushed: boolean;
|
|
@@ -17,6 +24,8 @@ export interface StateUnlinkOutput {
|
|
|
17
24
|
localStatePath: string;
|
|
18
25
|
previousGistId: string | null;
|
|
19
26
|
}
|
|
20
|
-
export declare function runStateShow(
|
|
27
|
+
export declare function runStateShow(options?: {
|
|
28
|
+
validate?: boolean;
|
|
29
|
+
}): Promise<StateShowOutput>;
|
|
21
30
|
export declare function runStateSync(): Promise<StateSyncOutput>;
|
|
22
31
|
export declare function runStateUnlink(): Promise<StateUnlinkOutput>;
|
|
@@ -5,18 +5,37 @@
|
|
|
5
5
|
import * as fs from 'fs';
|
|
6
6
|
import { getStateManager, resetStateManager } from '../core/state.js';
|
|
7
7
|
import { atomicWriteFileSync } from '../core/state-persistence.js';
|
|
8
|
-
import { getStatePath, getGistIdPath } from '../core/
|
|
8
|
+
import { getStatePath, getGistIdPath } from '../core/paths.js';
|
|
9
|
+
import { parseGitHubUrl } from '../core/urls.js';
|
|
9
10
|
import { warn } from '../core/logger.js';
|
|
10
11
|
const MODULE = 'state-cmd';
|
|
11
|
-
export async function runStateShow() {
|
|
12
|
+
export async function runStateShow(options = {}) {
|
|
12
13
|
const sm = getStateManager();
|
|
13
14
|
const state = sm.getState();
|
|
14
|
-
|
|
15
|
+
const output = {
|
|
15
16
|
persistence: state.config.persistence ?? 'local',
|
|
16
17
|
gistId: state.gistId ?? null,
|
|
17
18
|
gistDegraded: sm.isGistDegraded(),
|
|
18
19
|
lastRunAt: state.lastRunAt,
|
|
19
20
|
};
|
|
21
|
+
if (options.validate) {
|
|
22
|
+
output.invalidEntries = collectInvalidUrlEntries(sm);
|
|
23
|
+
}
|
|
24
|
+
return output;
|
|
25
|
+
}
|
|
26
|
+
function collectInvalidUrlEntries(sm) {
|
|
27
|
+
const invalid = [];
|
|
28
|
+
for (const pr of sm.getMergedPRs()) {
|
|
29
|
+
if (parseGitHubUrl(pr.url) === null) {
|
|
30
|
+
invalid.push({ kind: 'merged', url: pr.url, title: pr.title });
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
for (const pr of sm.getClosedPRs()) {
|
|
34
|
+
if (parseGitHubUrl(pr.url) === null) {
|
|
35
|
+
invalid.push({ kind: 'closed', url: pr.url, title: pr.title });
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
return invalid;
|
|
20
39
|
}
|
|
21
40
|
export async function runStateSync() {
|
|
22
41
|
const sm = getStateManager();
|