@oss-autopilot/core 1.16.2 → 1.17.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. package/dist/cli-registry.js +53 -11
  2. package/dist/cli.bundle.cjs +82 -69
  3. package/dist/cli.js +22 -10
  4. package/dist/commands/comments.js +38 -20
  5. package/dist/commands/config.d.ts +9 -2
  6. package/dist/commands/config.js +12 -3
  7. package/dist/commands/daily.d.ts +3 -1
  8. package/dist/commands/daily.js +126 -37
  9. package/dist/commands/dashboard-data.d.ts +26 -2
  10. package/dist/commands/dashboard-data.js +45 -19
  11. package/dist/commands/dashboard-server.d.ts +1 -1
  12. package/dist/commands/dashboard-server.js +104 -19
  13. package/dist/commands/dismiss.js +4 -1
  14. package/dist/commands/doctor.d.ts +49 -0
  15. package/dist/commands/doctor.js +358 -0
  16. package/dist/commands/index.d.ts +2 -0
  17. package/dist/commands/index.js +2 -0
  18. package/dist/commands/move.d.ts +1 -2
  19. package/dist/commands/move.js +8 -4
  20. package/dist/commands/read.js +2 -1
  21. package/dist/commands/search.d.ts +0 -18
  22. package/dist/commands/search.js +38 -1
  23. package/dist/commands/setup.js +42 -2
  24. package/dist/commands/shelve.js +4 -1
  25. package/dist/commands/skip-add.js +1 -1
  26. package/dist/commands/startup.js +14 -4
  27. package/dist/commands/track.js +2 -1
  28. package/dist/commands/vet-list.d.ts +23 -2
  29. package/dist/commands/vet-list.js +57 -10
  30. package/dist/core/anti-llm-policy.d.ts +5 -0
  31. package/dist/core/anti-llm-policy.js +5 -0
  32. package/dist/core/ci-analysis.js +6 -1
  33. package/dist/core/config-registry.d.ts +44 -0
  34. package/dist/core/config-registry.js +286 -0
  35. package/dist/core/dashboard-data-schema.d.ts +78 -0
  36. package/dist/core/dashboard-data-schema.js +80 -0
  37. package/dist/core/errors.d.ts +14 -0
  38. package/dist/core/errors.js +22 -0
  39. package/dist/core/http-cache.d.ts +8 -1
  40. package/dist/core/http-cache.js +59 -1
  41. package/dist/core/index.d.ts +3 -1
  42. package/dist/core/index.js +3 -1
  43. package/dist/core/maintainer-analysis.js +9 -3
  44. package/dist/core/pr-monitor.d.ts +7 -0
  45. package/dist/core/pr-monitor.js +45 -4
  46. package/dist/core/repo-score-manager.d.ts +17 -3
  47. package/dist/core/repo-score-manager.js +48 -19
  48. package/dist/core/state-persistence.d.ts +14 -1
  49. package/dist/core/state-persistence.js +24 -2
  50. package/dist/core/state-schema.d.ts +2 -0
  51. package/dist/core/state-schema.js +5 -0
  52. package/dist/core/state.d.ts +26 -2
  53. package/dist/core/state.js +50 -5
  54. package/dist/core/status-determination.d.ts +16 -0
  55. package/dist/core/status-determination.js +44 -11
  56. package/dist/formatters/json.d.ts +40 -2
  57. package/dist/formatters/json.js +1 -0
  58. package/package.json +1 -1
package/dist/cli.js CHANGED
@@ -37,16 +37,28 @@ program.hook('preAction', async (thisCommand, actionCommand) => {
37
37
  if (!localOnlySet.has(commandName)) {
38
38
  const token = await getGitHubTokenAsync();
39
39
  if (!token) {
40
- console.error('Error: GitHub authentication required.');
41
- console.error('');
42
- console.error('Option 1 (Recommended): Install and authenticate GitHub CLI');
43
- console.error(' Install: https://cli.github.com/');
44
- console.error(' Then run: gh auth login');
45
- console.error('');
46
- console.error('Option 2: Set GITHUB_TOKEN environment variable');
47
- console.error(' export GITHUB_TOKEN="your-github-token-here"');
48
- console.error('');
49
- console.error('Then run your command again.');
40
+ // Honor --json at the CLI boundary so machine consumers (plugins, MCP
41
+ // stdio harnesses, scripts) get a parseable envelope instead of a
42
+ // stderr blob followed by a non-zero exit. Commander has already parsed
43
+ // the action's own options, so we check both the action command and the
44
+ // raw argv as a fallback (#1056 M20).
45
+ const wantsJson = Boolean(actionCommand.opts().json) || process.argv.includes('--json');
46
+ if (wantsJson) {
47
+ const { outputJsonError } = await import('./formatters/json.js');
48
+ outputJsonError('GitHub authentication required. Install gh CLI and run `gh auth login`, or set GITHUB_TOKEN.', 'AUTH_REQUIRED');
49
+ }
50
+ else {
51
+ console.error('Error: GitHub authentication required.');
52
+ console.error('');
53
+ console.error('Option 1 (Recommended): Install and authenticate GitHub CLI');
54
+ console.error(' Install: https://cli.github.com/');
55
+ console.error(' Then run: gh auth login');
56
+ console.error('');
57
+ console.error('Option 2: Set GITHUB_TOKEN environment variable');
58
+ console.error(' export GITHUB_TOKEN="your-github-token-here"');
59
+ console.error('');
60
+ console.error('Then run your command again.');
61
+ }
50
62
  process.exit(1);
51
63
  }
52
64
  // Activate Gist persistence if configured, before any command runs.
@@ -2,7 +2,10 @@
2
2
  * Comments, Post, and Claim commands
3
3
  * Handles GitHub comment interactions
4
4
  */
5
- import { getStateManager, getOctokit, parseGitHubUrl, requireGitHubToken } from '../core/index.js';
5
+ import { getStateManager, getOctokit, parseGitHubUrl, requireGitHubToken, maybeCheckpoint } from '../core/index.js';
6
+ import { ValidationError } from '../core/errors.js';
7
+ import { warn } from '../core/logger.js';
8
+ const MODULE = 'comments';
6
9
  import { paginateAll } from '../core/pagination.js';
7
10
  import { validateUrl, validateMessage, validateGitHubUrl, PR_URL_PATTERN, ISSUE_OR_PR_URL_PATTERN, ISSUE_URL_PATTERN, } from './validation.js';
8
11
  /**
@@ -24,7 +27,7 @@ export async function runComments(options) {
24
27
  // Parse PR URL
25
28
  const parsed = parseGitHubUrl(options.prUrl);
26
29
  if (!parsed || parsed.type !== 'pull') {
27
- throw new Error('Invalid PR URL format');
30
+ throw new ValidationError('Invalid PR URL format');
28
31
  }
29
32
  const { owner, repo, number: pull_number } = parsed;
30
33
  // Get PR details
@@ -119,14 +122,14 @@ export async function runPost(options) {
119
122
  validateUrl(options.url);
120
123
  validateGitHubUrl(options.url, ISSUE_OR_PR_URL_PATTERN, 'issue or PR');
121
124
  if (!options.message.trim()) {
122
- throw new Error('No message provided');
125
+ throw new ValidationError('No message provided');
123
126
  }
124
127
  validateMessage(options.message);
125
128
  const token = requireGitHubToken();
126
129
  // Parse URL
127
130
  const parsed = parseGitHubUrl(options.url);
128
131
  if (!parsed) {
129
- throw new Error('Invalid GitHub URL format');
132
+ throw new ValidationError('Invalid GitHub URL format');
130
133
  }
131
134
  const { owner, repo, number } = parsed;
132
135
  const octokit = getOctokit(token);
@@ -160,7 +163,7 @@ export async function runClaim(options) {
160
163
  // Parse URL
161
164
  const parsed = parseGitHubUrl(options.issueUrl);
162
165
  if (!parsed || parsed.type !== 'issues') {
163
- throw new Error('Invalid issue URL format (must be an issue, not a PR)');
166
+ throw new ValidationError('Invalid issue URL format (must be an issue, not a PR)');
164
167
  }
165
168
  const { owner, repo, number } = parsed;
166
169
  const octokit = getOctokit(token);
@@ -170,6 +173,26 @@ export async function runClaim(options) {
170
173
  issue_number: number,
171
174
  body: message,
172
175
  });
176
+ // Fetch the real issue title + labels so the tracked entry has useful metadata
177
+ // rather than a permanent "(claimed)" placeholder that never gets backfilled
178
+ // (#1056 M24). Best-effort: if the fetch fails, fall back to the placeholder
179
+ // so state still records the claim.
180
+ let issueTitle = '(claimed)';
181
+ let issueLabels = [];
182
+ let issueCreatedAt = new Date().toISOString();
183
+ try {
184
+ const { data: issue } = await octokit.issues.get({ owner, repo, issue_number: number });
185
+ if (issue.title)
186
+ issueTitle = issue.title;
187
+ issueLabels = (issue.labels ?? [])
188
+ .map((l) => (typeof l === 'string' ? l : (l.name ?? '')))
189
+ .filter((name) => Boolean(name));
190
+ if (issue.created_at)
191
+ issueCreatedAt = issue.created_at;
192
+ }
193
+ catch (error) {
194
+ warn(MODULE, `Claimed ${options.issueUrl} but failed to enrich issue metadata (title/labels): ${error instanceof Error ? error.message : error}`);
195
+ }
173
196
  // Add to tracked issues — non-fatal if state save fails (comment already posted)
174
197
  try {
175
198
  const stateManager = getStateManager();
@@ -178,27 +201,22 @@ export async function runClaim(options) {
178
201
  url: options.issueUrl,
179
202
  repo: `${owner}/${repo}`,
180
203
  number,
181
- title: '(claimed)',
204
+ title: issueTitle,
182
205
  status: 'claimed',
183
- labels: [],
184
- createdAt: new Date().toISOString(),
206
+ labels: issueLabels,
207
+ createdAt: issueCreatedAt,
185
208
  updatedAt: new Date().toISOString(),
186
209
  vetted: false,
187
210
  });
188
- // Push state to Gist if in Gist mode.
189
- // If getStateManagerAsync was not called before this command ran,
190
- // isGistMode() will be false and checkpoint is correctly skipped.
191
- try {
192
- if (stateManager.isGistMode()) {
193
- await stateManager.checkpoint();
194
- }
195
- }
196
- catch {
197
- /* best-effort */
198
- }
211
+ // Push state to Gist if in Gist mode. Best-effort — logs on failure
212
+ // rather than silently swallowing, so operators see the degraded-sync
213
+ // signal (#1036 audit H1).
214
+ await maybeCheckpoint(stateManager, MODULE);
199
215
  }
200
216
  catch (error) {
201
- console.error(`Warning: Comment posted on ${options.issueUrl} but failed to save to local state: ${error instanceof Error ? error.message : error}`);
217
+ // Structured warning instead of bare console.error so the breadcrumb shows
218
+ // up in the plugin's log pipeline (#1056 M24).
219
+ warn(MODULE, `Comment posted on ${options.issueUrl} but failed to save to local state: ${error instanceof Error ? error.message : error}`);
202
220
  }
203
221
  return {
204
222
  commentUrl: comment.html_url,
@@ -2,26 +2,33 @@
2
2
  * Config command
3
3
  * Shows or updates configuration
4
4
  */
5
+ import { type ConfigKeyDef } from '../core/index.js';
5
6
  import type { ConfigOutput } from '../formatters/json.js';
6
7
  interface ConfigOptions {
7
8
  key?: string;
8
9
  value?: string;
10
+ listKeys?: boolean;
9
11
  }
10
12
  export interface ConfigSetOutput {
11
13
  success: true;
12
14
  key: string;
13
15
  value: string;
14
16
  }
15
- export type ConfigCommandOutput = ConfigOutput | ConfigSetOutput;
17
+ export interface ConfigListKeysOutput {
18
+ keys: readonly ConfigKeyDef[];
19
+ }
20
+ export type ConfigCommandOutput = ConfigOutput | ConfigSetOutput | ConfigListKeysOutput;
16
21
  /**
17
22
  * Read or write user configuration settings.
18
23
  * When called without a key, returns the full config.
19
24
  * When called with a key and value, updates the setting.
25
+ * When called with --list-keys, returns the full registry of known keys.
20
26
  *
21
27
  * @param options - Config options
22
28
  * @param options.key - Setting key (e.g., 'username', 'add-language', 'exclude-repo')
23
29
  * @param options.value - Setting value (required when key is provided)
24
- * @returns Current config (when reading) or success confirmation (when writing)
30
+ * @param options.listKeys - When true, return the registry of known keys
31
+ * @returns Current config, success confirmation, or key registry
25
32
  * @throws {Error} If the key is unknown or the value is invalid
26
33
  */
27
34
  export declare function runConfig(options: ConfigOptions): Promise<ConfigCommandOutput>;
@@ -2,7 +2,8 @@
2
2
  * Config command
3
3
  * Shows or updates configuration
4
4
  */
5
- import { getStateManager } from '../core/index.js';
5
+ import { CONFIG_KEY_REGISTRY, formatUnknownKeyError, getStateManager } from '../core/index.js';
6
+ import { ValidationError } from '../core/errors.js';
6
7
  import { ISSUE_SCOPES, DIFF_TOOLS } from '../core/types.js';
7
8
  import { validateGitHubUsername } from './validation.js';
8
9
  function validateScope(value) {
@@ -15,14 +16,22 @@ function validateScope(value) {
15
16
  * Read or write user configuration settings.
16
17
  * When called without a key, returns the full config.
17
18
  * When called with a key and value, updates the setting.
19
+ * When called with --list-keys, returns the full registry of known keys.
18
20
  *
19
21
  * @param options - Config options
20
22
  * @param options.key - Setting key (e.g., 'username', 'add-language', 'exclude-repo')
21
23
  * @param options.value - Setting value (required when key is provided)
22
- * @returns Current config (when reading) or success confirmation (when writing)
24
+ * @param options.listKeys - When true, return the registry of known keys
25
+ * @returns Current config, success confirmation, or key registry
23
26
  * @throws {Error} If the key is unknown or the value is invalid
24
27
  */
25
28
  export async function runConfig(options) {
29
+ if (options.listKeys) {
30
+ if (options.key || options.value) {
31
+ throw new ValidationError('`--list-keys` cannot be combined with a key/value. Run `config --list-keys` on its own.');
32
+ }
33
+ return { keys: CONFIG_KEY_REGISTRY };
34
+ }
26
35
  const stateManager = getStateManager();
27
36
  const currentConfig = stateManager.getState().config;
28
37
  if (!options.key) {
@@ -118,7 +127,7 @@ export async function runConfig(options) {
118
127
  });
119
128
  break;
120
129
  default:
121
- throw new Error(`Unknown config key: ${options.key}`);
130
+ throw new ValidationError(formatUnknownKeyError(options.key, 'config'));
122
131
  }
123
132
  return { success: true, key: options.key, value };
124
133
  }
@@ -7,7 +7,7 @@
7
7
  * orchestration layer that wires up the phases and handles I/O.
8
8
  */
9
9
  import { type DailyDigest, type CommentedIssue, type PRCheckFailure, type RepoGroup, type AgentState, type StarFilter } from '../core/index.js';
10
- import { type DailyOutput, type CapacityAssessment, type ActionableIssue, type ActionMenu } from '../formatters/json.js';
10
+ import { type DailyOutput, type DailyWarning, type CapacityAssessment, type ActionableIssue, type ActionMenu } from '../formatters/json.js';
11
11
  export { applyStatusOverrides, computeRepoSignals, groupPRsByRepo, assessCapacity, collectActionableIssues, computeActionMenu, toShelvedPRRef, formatBriefSummary, formatSummary, printDigest, CRITICAL_STATUSES, } from '../core/index.js';
12
12
  /**
13
13
  * Build a star filter from state for use in fetchUserPRCounts.
@@ -32,6 +32,8 @@ export interface DailyCheckResult {
32
32
  commentedIssues: CommentedIssue[];
33
33
  repoGroups: RepoGroup[];
34
34
  failures: PRCheckFailure[];
35
+ /** Non-fatal warnings from ancillary pipeline phases — see #1042. */
36
+ warnings: DailyWarning[];
35
37
  }
36
38
  /**
37
39
  * Convert a full DailyCheckResult to the compact DailyOutput for JSON serialization (#287).
@@ -7,13 +7,35 @@
7
7
  * orchestration layer that wires up the phases and handles I/O.
8
8
  */
9
9
  import { getStateManager, PRMonitor, IssueConversationMonitor, requireGitHubToken, CRITICAL_STATUSES, applyStatusOverrides, computeRepoSignals, groupPRsByRepo, assessCapacity, collectActionableIssues, computeActionMenu, toShelvedPRRef, formatBriefSummary, formatSummary, } from '../core/index.js';
10
- import { errorMessage, isRateLimitOrAuthError, nonFatalCatch } from '../core/errors.js';
10
+ import { errorMessage, isRateLimitOrAuthError } from '../core/errors.js';
11
11
  import { warn } from '../core/logger.js';
12
12
  import { emptyPRCountsResult } from '../core/github-stats.js';
13
13
  import { createAutopilotScout } from './scout-bridge.js';
14
14
  import { updateMonthlyAnalytics } from './dashboard-data.js';
15
15
  import { deduplicateDigest, compactActionableIssues, compactRepoGroups, } from '../formatters/json.js';
16
16
  const MODULE = 'daily';
17
+ /**
18
+ * Record a non-fatal failure: push a structured entry into the run's warnings
19
+ * collector AND emit the existing log line. Consumers (dashboard, MCP, tests)
20
+ * inspect `DailyOutput.warnings` so a partial run is visible beyond log noise.
21
+ * See #1042.
22
+ */
23
+ function recordWarning(warnings, phase, operation, err, humanMessage) {
24
+ const message = humanMessage ?? errorMessage(err);
25
+ warnings.push({ phase, operation, message });
26
+ warn(MODULE, `${operation}: ${message}`);
27
+ }
28
+ /**
29
+ * Variant of `nonFatalCatch` that also records a structured warning. Returns
30
+ * the fallback value on error (same semantics as `nonFatalCatch`) AND pushes
31
+ * an entry into the collector so the failure shows up in `DailyOutput.warnings`.
32
+ */
33
+ function nonFatalCatchWithWarning(opts) {
34
+ return (err) => {
35
+ recordWarning(opts.warnings, opts.phase, opts.operation, err);
36
+ return opts.fallback;
37
+ };
38
+ }
17
39
  // Re-export domain functions so existing consumers (tests, dashboard, startup)
18
40
  // can continue importing from './daily.js' without changes.
19
41
  export { applyStatusOverrides, computeRepoSignals, groupPRsByRepo, assessCapacity, collectActionableIssues, computeActionMenu, toShelvedPRRef, formatBriefSummary, formatSummary, printDigest, CRITICAL_STATUSES, } from '../core/index.js';
@@ -45,13 +67,33 @@ export function buildStarFilter(state) {
45
67
  * Retrieves open PRs, merged/closed counts, recently closed/merged PRs, and
46
68
  * issue conversation data — all in parallel where possible.
47
69
  */
48
- async function fetchPRData(prMonitor, token) {
70
+ async function fetchPRData(prMonitor, token, warnings) {
49
71
  // Fetch all open PRs fresh from GitHub
50
- const { prs, failures } = await prMonitor.fetchUserOpenPRs();
72
+ const fetchResult = await prMonitor.fetchUserOpenPRs();
73
+ const { prs, failures } = fetchResult;
51
74
  // Log any failures (but continue with successful checks)
52
75
  if (failures.length > 0) {
76
+ // Per-PR detail lives in `result.failures`; record a rollup warning so
77
+ // consumers that only read `warnings[]` still see the degradation signal.
78
+ warnings.push({
79
+ phase: 'fetch',
80
+ operation: 'fetch open PRs',
81
+ message: `${failures.length} PR fetch(es) failed`,
82
+ });
53
83
  warn(MODULE, `${failures.length} PR fetch(es) failed`);
54
84
  }
85
+ // Surface search-API truncation warnings (#1057 M25) so daily consumers
86
+ // see the partial-view signal in their `warnings` array rather than only
87
+ // in server logs.
88
+ if (fetchResult.warnings) {
89
+ for (const message of fetchResult.warnings) {
90
+ warnings.push({
91
+ phase: 'fetch',
92
+ operation: 'fetch open PRs (truncated)',
93
+ message,
94
+ });
95
+ }
96
+ }
55
97
  // Build star filter from cached repoScores so low-star repos are excluded
56
98
  // from merged/closed histograms (#576). Repos with no cached star data pass through.
57
99
  const state = getStateManager().getState();
@@ -60,31 +102,40 @@ async function fetchPRData(prMonitor, token) {
60
102
  // All stats fetches are non-critical (cosmetic/scoring), so isolate their failure
61
103
  const issueMonitor = new IssueConversationMonitor(token);
62
104
  const [mergedResult, closedResult, recentlyClosedPRs, recentlyMergedPRs, issueConversationResult] = await Promise.all([
63
- prMonitor.fetchUserMergedPRCounts(starFilter).catch(nonFatalCatch({
64
- module: MODULE,
65
- label: 'fetch merged PR counts',
105
+ prMonitor.fetchUserMergedPRCounts(starFilter).catch(nonFatalCatchWithWarning({
106
+ warnings,
107
+ phase: 'fetch',
108
+ operation: 'fetch merged PR counts',
109
+ fallback: emptyPRCountsResult(),
110
+ })),
111
+ prMonitor.fetchUserClosedPRCounts(starFilter).catch(nonFatalCatchWithWarning({
112
+ warnings,
113
+ phase: 'fetch',
114
+ operation: 'fetch closed PR counts',
66
115
  fallback: emptyPRCountsResult(),
67
116
  })),
68
- prMonitor
69
- .fetchUserClosedPRCounts(starFilter)
70
- .catch(nonFatalCatch({ module: MODULE, label: 'fetch closed PR counts', fallback: emptyPRCountsResult() })),
71
- prMonitor
72
- .fetchRecentlyClosedPRs()
73
- .catch(nonFatalCatch({ module: MODULE, label: 'fetch recently closed PRs', fallback: [] })),
74
- prMonitor
75
- .fetchRecentlyMergedPRs()
76
- .catch(nonFatalCatch({ module: MODULE, label: 'fetch recently merged PRs', fallback: [] })),
117
+ prMonitor.fetchRecentlyClosedPRs().catch(nonFatalCatchWithWarning({
118
+ warnings,
119
+ phase: 'fetch',
120
+ operation: 'fetch recently closed PRs',
121
+ fallback: [],
122
+ })),
123
+ prMonitor.fetchRecentlyMergedPRs().catch(nonFatalCatchWithWarning({
124
+ warnings,
125
+ phase: 'fetch',
126
+ operation: 'fetch recently merged PRs',
127
+ fallback: [],
128
+ })),
77
129
  // Issue conversation fetch has custom messaging based on the error content, so it keeps its bespoke catch.
78
130
  issueMonitor.fetchCommentedIssues().catch((error) => {
79
131
  if (isRateLimitOrAuthError(error))
80
132
  throw error;
81
133
  const msg = errorMessage(error);
82
- if (msg.includes('No GitHub username configured')) {
83
- warn(MODULE, `Issue conversation tracking requires setup: ${msg}`);
84
- }
85
- else {
86
- warn(MODULE, `Issue conversation fetch failed: ${msg}`);
87
- }
134
+ const needsSetup = msg.includes('No GitHub username configured');
135
+ const humanMessage = needsSetup
136
+ ? `Issue conversation tracking requires setup: ${msg}`
137
+ : `Issue conversation fetch failed: ${msg}`;
138
+ recordWarning(warnings, 'fetch', 'fetch commented issues', error, humanMessage);
88
139
  return {
89
140
  issues: [],
90
141
  failures: [{ issueUrl: 'N/A', error: `Issue conversation fetch failed: ${msg}` }],
@@ -93,6 +144,11 @@ async function fetchPRData(prMonitor, token) {
93
144
  ]);
94
145
  const commentedIssues = issueConversationResult.issues;
95
146
  if (issueConversationResult.failures.length > 0) {
147
+ warnings.push({
148
+ phase: 'fetch',
149
+ operation: 'fetch commented issues',
150
+ message: `${issueConversationResult.failures.length} issue conversation check(s) failed`,
151
+ });
96
152
  warn(MODULE, `${issueConversationResult.failures.length} issue conversation check(s) failed`);
97
153
  }
98
154
  const { repos: mergedCounts, monthlyCounts, monthlyOpenedCounts: openedFromMerged } = mergedResult;
@@ -116,7 +172,7 @@ async function fetchPRData(prMonitor, token) {
116
172
  * Applies stale repo reset, updates merged/closed counts, computes and stores
117
173
  * repo signals from open PR data, refreshes star counts, and syncs trusted projects.
118
174
  */
119
- async function updateRepoScores(prMonitor, prs, mergedCounts, closedCounts) {
175
+ async function updateRepoScores(prMonitor, prs, mergedCounts, closedCounts, warnings) {
120
176
  const stateManager = getStateManager();
121
177
  // Batch all synchronous score mutations for a single disk write.
122
178
  // Per-repo try-catch: a single corrupted repo should not prevent updates to others.
@@ -149,6 +205,14 @@ async function updateRepoScores(prMonitor, prs, mergedCounts, closedCounts) {
149
205
  }
150
206
  }
151
207
  if (mergedCountFailures === mergedCounts.size && mergedCounts.size > 0) {
208
+ // Total failure: batch outer-catch sees nothing because the batch itself
209
+ // succeeded, but every individual mutation inside threw. State may be
210
+ // silently stale — surface it as a warning distinct from the outer catch.
211
+ warnings.push({
212
+ phase: 'repo-scores',
213
+ operation: 'update merged counts',
214
+ message: `All ${mergedCounts.size} merged count update(s) failed. This may indicate corrupted state.`,
215
+ });
152
216
  warn(MODULE, `[ALL_MERGED_COUNT_UPDATES_FAILED] All ${mergedCounts.size} merged count update(s) failed.`);
153
217
  }
154
218
  // Populate closedWithoutMergeCount in repo scores.
@@ -167,6 +231,11 @@ async function updateRepoScores(prMonitor, prs, mergedCounts, closedCounts) {
167
231
  }
168
232
  }
169
233
  if (closedCountFailures === closedCounts.size && closedCounts.size > 0) {
234
+ warnings.push({
235
+ phase: 'repo-scores',
236
+ operation: 'update closed counts',
237
+ message: `All ${closedCounts.size} closed count update(s) failed. This may indicate corrupted state.`,
238
+ });
170
239
  warn(MODULE, `[ALL_CLOSED_COUNT_UPDATES_FAILED] All ${closedCounts.size} closed count update(s) failed.`);
171
240
  }
172
241
  // Update repo signals from observed open PR data
@@ -182,12 +251,17 @@ async function updateRepoScores(prMonitor, prs, mergedCounts, closedCounts) {
182
251
  }
183
252
  }
184
253
  if (signalUpdateFailures === repoSignals.size && repoSignals.size > 0) {
254
+ warnings.push({
255
+ phase: 'repo-scores',
256
+ operation: 'update repo signals',
257
+ message: `All ${repoSignals.size} signal update(s) failed. This may indicate corrupted state.`,
258
+ });
185
259
  warn(MODULE, `[ALL_SIGNAL_UPDATES_FAILED] All ${repoSignals.size} signal update(s) failed. This may indicate corrupted state.`);
186
260
  }
187
261
  });
188
262
  }
189
263
  catch (error) {
190
- warn(MODULE, `Failed to persist repo score updates: ${errorMessage(error)}`);
264
+ recordWarning(warnings, 'repo-scores', 'persist repo score updates', error);
191
265
  }
192
266
  // Fetch metadata (stars + language) for all scored repos — async, so outside the batch above
193
267
  const allRepos = Object.keys(stateManager.getState().repoScores);
@@ -198,7 +272,7 @@ async function updateRepoScores(prMonitor, prs, mergedCounts, closedCounts) {
198
272
  catch (error) {
199
273
  if (isRateLimitOrAuthError(error))
200
274
  throw error;
201
- warn(MODULE, `Failed to fetch repo metadata: ${errorMessage(error)}`);
275
+ recordWarning(warnings, 'repo-scores', 'fetch repo metadata', error);
202
276
  warn(MODULE, 'Repos without cached metadata will be excluded from dashboard stats and metadata badges until fetched on the next successful run.');
203
277
  repoMetadata = new Map();
204
278
  }
@@ -216,6 +290,11 @@ async function updateRepoScores(prMonitor, prs, mergedCounts, closedCounts) {
216
290
  }
217
291
  }
218
292
  if (metadataUpdateFailures === repoMetadata.size && repoMetadata.size > 0) {
293
+ warnings.push({
294
+ phase: 'repo-scores',
295
+ operation: 'update repo metadata',
296
+ message: `All ${repoMetadata.size} metadata update(s) failed. This may indicate corrupted state.`,
297
+ });
219
298
  warn(MODULE, `[ALL_METADATA_UPDATES_FAILED] All ${repoMetadata.size} metadata update(s) failed.`);
220
299
  }
221
300
  // Auto-sync trustedProjects from repos with merged PRs
@@ -230,12 +309,17 @@ async function updateRepoScores(prMonitor, prs, mergedCounts, closedCounts) {
230
309
  }
231
310
  }
232
311
  if (trustSyncFailures === mergedCounts.size && mergedCounts.size > 0) {
312
+ warnings.push({
313
+ phase: 'repo-scores',
314
+ operation: 'sync trusted projects',
315
+ message: `All ${mergedCounts.size} trusted project sync(s) failed. This may indicate corrupted state.`,
316
+ });
233
317
  warn(MODULE, `[ALL_TRUST_SYNCS_FAILED] All ${mergedCounts.size} trusted project sync(s) failed. This may indicate corrupted state.`);
234
318
  }
235
319
  });
236
320
  }
237
321
  catch (error) {
238
- warn(MODULE, `Failed to persist metadata/trust updates: ${errorMessage(error)}`);
322
+ recordWarning(warnings, 'repo-scores', 'persist metadata/trust updates', error);
239
323
  }
240
324
  }
241
325
  /**
@@ -243,7 +327,7 @@ async function updateRepoScores(prMonitor, prs, mergedCounts, closedCounts) {
243
327
  * Auto-unshelves PRs where maintainers have engaged, generates the digest,
244
328
  * and persists state.
245
329
  */
246
- function partitionPRs(prMonitor, prs, recentlyClosedPRs, recentlyMergedPRs) {
330
+ function partitionPRs(prMonitor, prs, recentlyClosedPRs, recentlyMergedPRs, warnings) {
247
331
  const stateManager = getStateManager();
248
332
  // Apply dashboard/CLI status overrides before partitioning.
249
333
  // This ensures PRs reclassified in the dashboard (e.g., "Need Attention" → "Waiting")
@@ -290,7 +374,7 @@ function partitionPRs(prMonitor, prs, recentlyClosedPRs, recentlyMergedPRs) {
290
374
  });
291
375
  }
292
376
  catch (error) {
293
- warn(MODULE, `Failed to persist partition state: ${errorMessage(error)}`);
377
+ recordWarning(warnings, 'partition', 'persist partition state', error);
294
378
  }
295
379
  // Digest was created inside batch — reconstruct from state
296
380
  const digest = stateManager.getState().lastDigest;
@@ -301,7 +385,7 @@ function partitionPRs(prMonitor, prs, recentlyClosedPRs, recentlyMergedPRs) {
301
385
  * Assesses capacity, filters dismissed issues, computes actionable items,
302
386
  * and assembles the action menu.
303
387
  */
304
- function generateDigestOutput(digest, activePRs, shelvedPRs, commentedIssues, failures, previousLastDigestAt) {
388
+ function generateDigestOutput(digest, activePRs, shelvedPRs, commentedIssues, failures, warnings, previousLastDigestAt) {
305
389
  const stateManager = getStateManager();
306
390
  // Assess capacity from active PRs only (shelved PRs excluded)
307
391
  const capacity = assessCapacity(activePRs, stateManager.getState().config.maxActivePRs, shelvedPRs.length);
@@ -330,7 +414,7 @@ function generateDigestOutput(digest, activePRs, shelvedPRs, commentedIssues, fa
330
414
  stateManager.undismissIssue(issue.url);
331
415
  }
332
416
  catch (error) {
333
- warn(MODULE, `Failed to persist auto-undismiss for ${issue.url}: ${errorMessage(error)}`);
417
+ recordWarning(warnings, 'dismiss-filter', `persist auto-undismiss for ${issue.url}`, error);
334
418
  }
335
419
  return true;
336
420
  }
@@ -341,7 +425,7 @@ function generateDigestOutput(digest, activePRs, shelvedPRs, commentedIssues, fa
341
425
  });
342
426
  }
343
427
  catch (error) {
344
- warn(MODULE, `Failed to persist auto-undismiss state: ${errorMessage(error)}`);
428
+ recordWarning(warnings, 'dismiss-filter', 'persist auto-undismiss state', error);
345
429
  }
346
430
  const issueResponses = filteredCommentedIssues.filter((i) => i.status === 'new_response');
347
431
  const summary = formatSummary(digest, capacity, issueResponses);
@@ -361,6 +445,7 @@ function generateDigestOutput(digest, activePRs, shelvedPRs, commentedIssues, fa
361
445
  commentedIssues: filteredCommentedIssues,
362
446
  repoGroups,
363
447
  failures,
448
+ warnings,
364
449
  };
365
450
  }
366
451
  // ---------------------------------------------------------------------------
@@ -385,6 +470,7 @@ export function toDailyOutput(result) {
385
470
  commentedIssues: result.commentedIssues,
386
471
  repoGroups: compactRepoGroups(result.repoGroups),
387
472
  failures: result.failures,
473
+ warnings: result.warnings,
388
474
  };
389
475
  }
390
476
  /**
@@ -415,10 +501,13 @@ export async function executeDailyCheck(token) {
415
501
  */
416
502
  async function executeDailyCheckInternal(token) {
417
503
  const prMonitor = new PRMonitor(token);
504
+ // One collector shared by every phase — threaded through explicitly so the
505
+ // callgraph documents which phases can produce non-fatal warnings. See #1042.
506
+ const warnings = [];
418
507
  // Phase 1: Fetch all PR data from GitHub
419
- const { prs, failures, mergedCounts, closedCounts, monthlyCounts, monthlyClosedCounts, openedFromMerged, openedFromClosed, recentlyClosedPRs, recentlyMergedPRs, commentedIssues, } = await fetchPRData(prMonitor, token);
508
+ const { prs, failures, mergedCounts, closedCounts, monthlyCounts, monthlyClosedCounts, openedFromMerged, openedFromClosed, recentlyClosedPRs, recentlyMergedPRs, commentedIssues, } = await fetchPRData(prMonitor, token, warnings);
420
509
  // Phase 2: Update repo scores (signals, star counts, trust sync)
421
- await updateRepoScores(prMonitor, prs, mergedCounts, closedCounts);
510
+ await updateRepoScores(prMonitor, prs, mergedCounts, closedCounts, warnings);
422
511
  // Phase 3: Persist monthly analytics and store merged/closed PR history.
423
512
  // try-catch: analytics are supplementary — save failure should not crash the daily check.
424
513
  try {
@@ -437,7 +526,7 @@ async function executeDailyCheckInternal(token) {
437
526
  });
438
527
  }
439
528
  catch (error) {
440
- warn(MODULE, `Failed to persist monthly analytics: ${errorMessage(error)}`);
529
+ recordWarning(warnings, 'analytics', 'persist monthly analytics', error);
441
530
  }
442
531
  // Phase 3.5: Feed merged/closed PRs to oss-scout for cross-tool state sync.
443
532
  if (recentlyMergedPRs.length > 0 || recentlyClosedPRs.length > 0) {
@@ -452,16 +541,16 @@ async function executeDailyCheckInternal(token) {
452
541
  await scout.checkpoint();
453
542
  }
454
543
  catch (error) {
455
- warn(MODULE, `Failed to sync PR data to oss-scout: ${errorMessage(error)}`);
544
+ recordWarning(warnings, 'scout-sync', 'sync PR data to oss-scout', error);
456
545
  }
457
546
  }
458
547
  // Capture lastDigestAt BEFORE Phase 4 overwrites it with the current run's timestamp.
459
548
  // Used by collectActionableIssues to determine which PRs are "new" (created since last digest).
460
549
  const previousLastDigestAt = getStateManager().getState().lastDigestAt;
461
550
  // Phase 4: Partition PRs, generate and save digest
462
- const { activePRs, shelvedPRs, digest } = partitionPRs(prMonitor, prs, recentlyClosedPRs, recentlyMergedPRs);
551
+ const { activePRs, shelvedPRs, digest } = partitionPRs(prMonitor, prs, recentlyClosedPRs, recentlyMergedPRs, warnings);
463
552
  // Phase 5: Build structured output (capacity, dismiss filter, action menu)
464
- const result = generateDigestOutput(digest, activePRs, shelvedPRs, commentedIssues, failures, previousLastDigestAt);
553
+ const result = generateDigestOutput(digest, activePRs, shelvedPRs, commentedIssues, failures, warnings, previousLastDigestAt);
465
554
  // Checkpoint: push state to Gist if in Gist mode.
466
555
  // If getStateManagerAsync was not called before this command ran,
467
556
  // isGistMode() will be false and checkpoint is correctly skipped.
@@ -472,7 +561,7 @@ async function executeDailyCheckInternal(token) {
472
561
  }
473
562
  }
474
563
  catch (err) {
475
- warn(MODULE, `Gist checkpoint failed: ${errorMessage(err)}`);
564
+ recordWarning(warnings, 'gist-checkpoint', 'Gist checkpoint', err);
476
565
  }
477
566
  return result;
478
567
  }
@@ -48,6 +48,14 @@ export interface DashboardJsonData {
48
48
  vettedIssues?: ParseIssueListOutput | null;
49
49
  offline?: boolean;
50
50
  lastUpdated?: string;
51
+ /**
52
+ * Labels of sub-fetches that degraded to empty fallbacks during this data
53
+ * build. Non-empty means one or more background calls failed and the
54
+ * corresponding sections of the response are approximations (stale or
55
+ * zero'd) rather than authoritative. The SPA surfaces this as a banner
56
+ * so users know the dashboard is showing partial data. See #1035.
57
+ */
58
+ partialFailures?: string[];
51
59
  }
52
60
  /** Action types the dashboard can request via POST /api/action. */
53
61
  export type DashboardActionType = 'move' | 'dismiss_issue_response';
@@ -62,8 +70,15 @@ export declare function buildDashboardStats(digest: DailyDigest, state: Readonly
62
70
  /**
63
71
  * Merge fresh API counts into existing stored counts.
64
72
  * Months present in the fresh data are updated; months only in the existing data are preserved.
65
- * This prevents historical data loss when the API returns incomplete results
66
- * (e.g. due to pagination limits or transient failures).
73
+ *
74
+ * Anti-regression guard (#1035): when the fresh count for a given month is
75
+ * smaller than the already-stored count for that month, we keep the larger
76
+ * value. This matters when the fresh fetch was capped (pagination limits,
77
+ * 1000-result Search API ceiling, or partial failures) and would otherwise
78
+ * silently overwrite authoritative historical data with a partial window.
79
+ * The trade-off: a month that genuinely shrinks (e.g., user deleted a merged
80
+ * PR reference remotely) cannot be decremented via this path — but that is
81
+ * a rare case, and the alternative is silent decay of historical analytics.
67
82
  */
68
83
  export declare function mergeMonthlyCounts(existing: Record<string, number>, fresh: Record<string, number>): Record<string, number>;
69
84
  /**
@@ -80,6 +95,15 @@ export interface DashboardFetchResult {
80
95
  commentedIssues: CommentedIssue[];
81
96
  allMergedPRs: MergedPR[];
82
97
  allClosedPRs: ClosedPR[];
98
+ /**
99
+ * Labels of non-critical sub-fetches that degraded to empty fallbacks
100
+ * during this run. Empty array means every fetch succeeded. Non-empty
101
+ * means one or more slices of the returned data are approximations —
102
+ * callers surface this to the user so "0 recently merged" does not look
103
+ * authoritative when it is actually "fetch failed, fell back to empty".
104
+ * See #1035.
105
+ */
106
+ partialFailures: string[];
83
107
  }
84
108
  /**
85
109
  * Fetch fresh dashboard data from GitHub.