@oss-autopilot/core 0.41.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +85 -0
- package/dist/cli.bundle.cjs +17657 -0
- package/dist/cli.d.ts +12 -0
- package/dist/cli.js +325 -0
- package/dist/commands/check-integration.d.ts +10 -0
- package/dist/commands/check-integration.js +192 -0
- package/dist/commands/comments.d.ts +24 -0
- package/dist/commands/comments.js +311 -0
- package/dist/commands/config.d.ts +11 -0
- package/dist/commands/config.js +82 -0
- package/dist/commands/daily.d.ts +29 -0
- package/dist/commands/daily.js +433 -0
- package/dist/commands/dashboard-data.d.ts +45 -0
- package/dist/commands/dashboard-data.js +132 -0
- package/dist/commands/dashboard-templates.d.ts +23 -0
- package/dist/commands/dashboard-templates.js +1627 -0
- package/dist/commands/dashboard.d.ts +18 -0
- package/dist/commands/dashboard.js +134 -0
- package/dist/commands/dismiss.d.ts +13 -0
- package/dist/commands/dismiss.js +49 -0
- package/dist/commands/init.d.ts +10 -0
- package/dist/commands/init.js +27 -0
- package/dist/commands/local-repos.d.ts +14 -0
- package/dist/commands/local-repos.js +155 -0
- package/dist/commands/parse-list.d.ts +13 -0
- package/dist/commands/parse-list.js +139 -0
- package/dist/commands/read.d.ts +12 -0
- package/dist/commands/read.js +33 -0
- package/dist/commands/search.d.ts +10 -0
- package/dist/commands/search.js +74 -0
- package/dist/commands/setup.d.ts +15 -0
- package/dist/commands/setup.js +276 -0
- package/dist/commands/shelve.d.ts +13 -0
- package/dist/commands/shelve.js +49 -0
- package/dist/commands/snooze.d.ts +18 -0
- package/dist/commands/snooze.js +83 -0
- package/dist/commands/startup.d.ts +33 -0
- package/dist/commands/startup.js +197 -0
- package/dist/commands/status.d.ts +10 -0
- package/dist/commands/status.js +43 -0
- package/dist/commands/track.d.ts +16 -0
- package/dist/commands/track.js +59 -0
- package/dist/commands/validation.d.ts +43 -0
- package/dist/commands/validation.js +112 -0
- package/dist/commands/vet.d.ts +10 -0
- package/dist/commands/vet.js +36 -0
- package/dist/core/checklist-analysis.d.ts +17 -0
- package/dist/core/checklist-analysis.js +39 -0
- package/dist/core/ci-analysis.d.ts +78 -0
- package/dist/core/ci-analysis.js +163 -0
- package/dist/core/comment-utils.d.ts +15 -0
- package/dist/core/comment-utils.js +52 -0
- package/dist/core/concurrency.d.ts +5 -0
- package/dist/core/concurrency.js +15 -0
- package/dist/core/daily-logic.d.ts +77 -0
- package/dist/core/daily-logic.js +512 -0
- package/dist/core/display-utils.d.ts +10 -0
- package/dist/core/display-utils.js +100 -0
- package/dist/core/errors.d.ts +24 -0
- package/dist/core/errors.js +34 -0
- package/dist/core/github-stats.d.ts +73 -0
- package/dist/core/github-stats.js +272 -0
- package/dist/core/github.d.ts +19 -0
- package/dist/core/github.js +60 -0
- package/dist/core/http-cache.d.ts +97 -0
- package/dist/core/http-cache.js +269 -0
- package/dist/core/index.d.ts +15 -0
- package/dist/core/index.js +15 -0
- package/dist/core/issue-conversation.d.ts +29 -0
- package/dist/core/issue-conversation.js +231 -0
- package/dist/core/issue-discovery.d.ts +85 -0
- package/dist/core/issue-discovery.js +589 -0
- package/dist/core/issue-filtering.d.ts +51 -0
- package/dist/core/issue-filtering.js +103 -0
- package/dist/core/issue-scoring.d.ts +40 -0
- package/dist/core/issue-scoring.js +92 -0
- package/dist/core/issue-vetting.d.ts +49 -0
- package/dist/core/issue-vetting.js +536 -0
- package/dist/core/logger.d.ts +21 -0
- package/dist/core/logger.js +49 -0
- package/dist/core/maintainer-analysis.d.ts +10 -0
- package/dist/core/maintainer-analysis.js +59 -0
- package/dist/core/pagination.d.ts +11 -0
- package/dist/core/pagination.js +20 -0
- package/dist/core/pr-monitor.d.ts +109 -0
- package/dist/core/pr-monitor.js +594 -0
- package/dist/core/review-analysis.d.ts +72 -0
- package/dist/core/review-analysis.js +163 -0
- package/dist/core/state.d.ts +371 -0
- package/dist/core/state.js +1089 -0
- package/dist/core/types.d.ts +507 -0
- package/dist/core/types.js +34 -0
- package/dist/core/utils.d.ts +249 -0
- package/dist/core/utils.js +422 -0
- package/dist/formatters/json.d.ts +269 -0
- package/dist/formatters/json.js +88 -0
- package/package.json +67 -0
|
@@ -0,0 +1,1089 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* State management for the OSS Contribution Agent
|
|
3
|
+
* Persists state to a JSON file in ~/.oss-autopilot/
|
|
4
|
+
*/
|
|
5
|
+
import * as fs from 'fs';
|
|
6
|
+
import * as path from 'path';
|
|
7
|
+
import { INITIAL_STATE, } from './types.js';
|
|
8
|
+
import { getStatePath, getBackupDir, getDataDir } from './utils.js';
|
|
9
|
+
import { ValidationError } from './errors.js';
|
|
10
|
+
import { debug, warn } from './logger.js';
|
|
11
|
+
const MODULE = 'state';
|
|
12
|
+
// Current state version
|
|
13
|
+
const CURRENT_STATE_VERSION = 2;
|
|
14
|
+
// Maximum number of events to retain in the event log
|
|
15
|
+
const MAX_EVENTS = 1000;
|
|
16
|
+
// Lock file timeout: if a lock is older than this, it is considered stale
|
|
17
|
+
const LOCK_TIMEOUT_MS = 30_000; // 30 seconds
|
|
18
|
+
// Legacy path for migration
|
|
19
|
+
const LEGACY_STATE_FILE = path.join(process.cwd(), 'data', 'state.json');
|
|
20
|
+
const LEGACY_BACKUP_DIR = path.join(process.cwd(), 'data', 'backups');
|
|
21
|
+
/**
|
|
22
|
+
* Check whether an existing lock file is stale (expired or corrupt).
|
|
23
|
+
* Returns true if the lock should be considered stale and can be removed.
|
|
24
|
+
*/
|
|
25
|
+
function isLockStale(lockPath) {
|
|
26
|
+
try {
|
|
27
|
+
const existing = JSON.parse(fs.readFileSync(lockPath, 'utf-8'));
|
|
28
|
+
return Date.now() - existing.timestamp > LOCK_TIMEOUT_MS;
|
|
29
|
+
}
|
|
30
|
+
catch (err) {
|
|
31
|
+
// Lock file is unreadable or contains invalid JSON — treat as stale
|
|
32
|
+
debug(MODULE, 'Lock file unreadable or invalid JSON, treating as stale', err);
|
|
33
|
+
return true;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Acquire an advisory file lock using exclusive-create (`wx` flag).
|
|
38
|
+
* If the lock file already exists but is stale (older than LOCK_TIMEOUT_MS or corrupt),
|
|
39
|
+
* it is removed and re-acquired.
|
|
40
|
+
* @throws Error if the lock is held by another active process.
|
|
41
|
+
*/
|
|
42
|
+
export function acquireLock(lockPath) {
|
|
43
|
+
const lockData = JSON.stringify({ pid: process.pid, timestamp: Date.now() });
|
|
44
|
+
try {
|
|
45
|
+
fs.writeFileSync(lockPath, lockData, { flag: 'wx' }); // Fails if file exists
|
|
46
|
+
return;
|
|
47
|
+
}
|
|
48
|
+
catch (err) {
|
|
49
|
+
// Lock file exists (EEXIST from 'wx' flag) — check if it is stale
|
|
50
|
+
debug(MODULE, 'Lock file already exists, checking staleness', err);
|
|
51
|
+
}
|
|
52
|
+
if (!isLockStale(lockPath)) {
|
|
53
|
+
throw new Error('State file is locked by another process');
|
|
54
|
+
}
|
|
55
|
+
// Stale lock detected — remove it and try to re-acquire
|
|
56
|
+
try {
|
|
57
|
+
fs.unlinkSync(lockPath);
|
|
58
|
+
}
|
|
59
|
+
catch (err) {
|
|
60
|
+
// Another process may have removed the stale lock first — proceed to re-acquire regardless
|
|
61
|
+
debug(MODULE, 'Stale lock already removed by another process', err);
|
|
62
|
+
}
|
|
63
|
+
try {
|
|
64
|
+
fs.writeFileSync(lockPath, lockData, { flag: 'wx' });
|
|
65
|
+
}
|
|
66
|
+
catch (err) {
|
|
67
|
+
// Another process grabbed the lock between unlink and write
|
|
68
|
+
debug(MODULE, 'Lock re-acquire failed (race condition)', err);
|
|
69
|
+
throw new Error('State file is locked by another process', { cause: err });
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Release an advisory file lock, but only if this process owns it.
|
|
74
|
+
* Silently ignores missing lock files or locks owned by other processes.
|
|
75
|
+
*/
|
|
76
|
+
export function releaseLock(lockPath) {
|
|
77
|
+
try {
|
|
78
|
+
const data = JSON.parse(fs.readFileSync(lockPath, 'utf-8'));
|
|
79
|
+
if (data.pid === process.pid) {
|
|
80
|
+
fs.unlinkSync(lockPath);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
catch (err) {
|
|
84
|
+
// Lock already removed or unreadable — nothing to do
|
|
85
|
+
debug(MODULE, 'Lock file already removed or unreadable during release', err);
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
/**
|
|
89
|
+
* Write data to `filePath` atomically by first writing to a temporary file
|
|
90
|
+
* in the same directory and then renaming. Rename is atomic on POSIX filesystems,
|
|
91
|
+
* preventing partial/corrupt state files if the process crashes mid-write.
|
|
92
|
+
*/
|
|
93
|
+
export function atomicWriteFileSync(filePath, data, mode) {
|
|
94
|
+
const tmpPath = filePath + '.tmp';
|
|
95
|
+
fs.writeFileSync(tmpPath, data, { mode: mode ?? 0o600 });
|
|
96
|
+
fs.renameSync(tmpPath, filePath);
|
|
97
|
+
// Ensure permissions are correct (rename preserves the tmp file's mode,
|
|
98
|
+
// but on some systems the mode from writeFileSync is masked by umask)
|
|
99
|
+
if (mode !== undefined) {
|
|
100
|
+
fs.chmodSync(filePath, mode);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
/**
|
|
104
|
+
* Migrate state from v1 (local PR tracking) to v2 (fresh GitHub fetching).
|
|
105
|
+
* Preserves repoScores and config; drops the legacy PR arrays.
|
|
106
|
+
*/
|
|
107
|
+
function migrateV1ToV2(rawState) {
|
|
108
|
+
debug(MODULE, 'Migrating state from v1 to v2 (fresh GitHub fetching)...');
|
|
109
|
+
// Extract merged/closed PR arrays from v1 state to seed repo scores
|
|
110
|
+
const mergedPRs = rawState.mergedPRs || [];
|
|
111
|
+
const closedPRs = rawState.closedPRs || [];
|
|
112
|
+
// Update repo scores from historical PR data if not already present
|
|
113
|
+
const repoScores = { ...(rawState.repoScores || {}) };
|
|
114
|
+
for (const pr of mergedPRs) {
|
|
115
|
+
if (!repoScores[pr.repo]) {
|
|
116
|
+
repoScores[pr.repo] = {
|
|
117
|
+
repo: pr.repo,
|
|
118
|
+
score: 5,
|
|
119
|
+
mergedPRCount: 0,
|
|
120
|
+
closedWithoutMergeCount: 0,
|
|
121
|
+
avgResponseDays: null,
|
|
122
|
+
lastEvaluatedAt: new Date().toISOString(),
|
|
123
|
+
signals: {
|
|
124
|
+
hasActiveMaintainers: true,
|
|
125
|
+
isResponsive: false,
|
|
126
|
+
hasHostileComments: false,
|
|
127
|
+
},
|
|
128
|
+
};
|
|
129
|
+
}
|
|
130
|
+
// Note: Don't increment here as the score may already reflect these PRs
|
|
131
|
+
}
|
|
132
|
+
for (const pr of closedPRs) {
|
|
133
|
+
if (!repoScores[pr.repo]) {
|
|
134
|
+
repoScores[pr.repo] = {
|
|
135
|
+
repo: pr.repo,
|
|
136
|
+
score: 5,
|
|
137
|
+
mergedPRCount: 0,
|
|
138
|
+
closedWithoutMergeCount: 0,
|
|
139
|
+
avgResponseDays: null,
|
|
140
|
+
lastEvaluatedAt: new Date().toISOString(),
|
|
141
|
+
signals: {
|
|
142
|
+
hasActiveMaintainers: true,
|
|
143
|
+
isResponsive: false,
|
|
144
|
+
hasHostileComments: false,
|
|
145
|
+
},
|
|
146
|
+
};
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
const migratedState = {
|
|
150
|
+
version: 2,
|
|
151
|
+
activeIssues: rawState.activeIssues || [],
|
|
152
|
+
repoScores,
|
|
153
|
+
config: rawState.config,
|
|
154
|
+
events: rawState.events || [],
|
|
155
|
+
lastRunAt: new Date().toISOString(),
|
|
156
|
+
};
|
|
157
|
+
debug(MODULE, `Migration complete. Preserved ${Object.keys(repoScores).length} repo scores.`);
|
|
158
|
+
return migratedState;
|
|
159
|
+
}
|
|
160
|
+
/**
|
|
161
|
+
* Singleton manager for persistent agent state stored in ~/.oss-autopilot/state.json.
|
|
162
|
+
*
|
|
163
|
+
* Handles loading, saving, backup/restore, and v1-to-v2 migration of state. Supports
|
|
164
|
+
* an in-memory mode (no disk I/O) for use in tests. In v2 architecture, PR arrays are
|
|
165
|
+
* legacy -- open PRs are fetched fresh from GitHub on each run rather than stored locally.
|
|
166
|
+
*/
|
|
167
|
+
export class StateManager {
|
|
168
|
+
state;
|
|
169
|
+
inMemoryOnly;
|
|
170
|
+
/**
|
|
171
|
+
* Create a new StateManager instance.
|
|
172
|
+
* @param inMemoryOnly - When true, state is held only in memory and never read from or
|
|
173
|
+
* written to disk. Useful for unit tests that need isolated state without side effects.
|
|
174
|
+
* Defaults to false (normal persistent mode).
|
|
175
|
+
*/
|
|
176
|
+
constructor(inMemoryOnly = false) {
|
|
177
|
+
this.inMemoryOnly = inMemoryOnly;
|
|
178
|
+
this.state = inMemoryOnly ? this.createFreshState() : this.load();
|
|
179
|
+
}
|
|
180
|
+
/**
|
|
181
|
+
* Create a fresh state (v2: fresh GitHub fetching)
|
|
182
|
+
*/
|
|
183
|
+
createFreshState() {
|
|
184
|
+
return {
|
|
185
|
+
version: CURRENT_STATE_VERSION,
|
|
186
|
+
activeIssues: [],
|
|
187
|
+
repoScores: {},
|
|
188
|
+
config: {
|
|
189
|
+
...INITIAL_STATE.config,
|
|
190
|
+
setupComplete: false,
|
|
191
|
+
languages: [...INITIAL_STATE.config.languages],
|
|
192
|
+
labels: [...INITIAL_STATE.config.labels],
|
|
193
|
+
excludeRepos: [],
|
|
194
|
+
trustedProjects: [],
|
|
195
|
+
shelvedPRUrls: [],
|
|
196
|
+
dismissedIssues: {},
|
|
197
|
+
snoozedPRs: {},
|
|
198
|
+
},
|
|
199
|
+
events: [],
|
|
200
|
+
lastRunAt: new Date().toISOString(),
|
|
201
|
+
};
|
|
202
|
+
}
|
|
203
|
+
/**
|
|
204
|
+
* Check if initial setup has been completed.
|
|
205
|
+
* @returns true if the user has run `/setup-oss` and completed configuration.
|
|
206
|
+
*/
|
|
207
|
+
isSetupComplete() {
|
|
208
|
+
return this.state.config.setupComplete === true;
|
|
209
|
+
}
|
|
210
|
+
/**
|
|
211
|
+
* Mark setup as complete and record the completion timestamp.
|
|
212
|
+
*/
|
|
213
|
+
markSetupComplete() {
|
|
214
|
+
this.state.config.setupComplete = true;
|
|
215
|
+
this.state.config.setupCompletedAt = new Date().toISOString();
|
|
216
|
+
}
|
|
217
|
+
/**
|
|
218
|
+
* Migrate state from legacy ./data/ location to ~/.oss-autopilot/
|
|
219
|
+
* Returns true if migration was performed
|
|
220
|
+
*/
|
|
221
|
+
migrateFromLegacyLocation() {
|
|
222
|
+
const newStatePath = getStatePath();
|
|
223
|
+
// If new state already exists, no migration needed
|
|
224
|
+
if (fs.existsSync(newStatePath)) {
|
|
225
|
+
return false;
|
|
226
|
+
}
|
|
227
|
+
// Check for legacy state file
|
|
228
|
+
if (!fs.existsSync(LEGACY_STATE_FILE)) {
|
|
229
|
+
return false;
|
|
230
|
+
}
|
|
231
|
+
debug(MODULE, 'Migrating state from ./data/ to ~/.oss-autopilot/...');
|
|
232
|
+
try {
|
|
233
|
+
// Ensure the new data directory exists
|
|
234
|
+
getDataDir();
|
|
235
|
+
// Copy state file
|
|
236
|
+
fs.copyFileSync(LEGACY_STATE_FILE, newStatePath);
|
|
237
|
+
debug(MODULE, `Migrated state file to ${newStatePath}`);
|
|
238
|
+
// Copy backups if they exist
|
|
239
|
+
if (fs.existsSync(LEGACY_BACKUP_DIR)) {
|
|
240
|
+
const newBackupDir = getBackupDir();
|
|
241
|
+
const backupFiles = fs
|
|
242
|
+
.readdirSync(LEGACY_BACKUP_DIR)
|
|
243
|
+
.filter((f) => f.startsWith('state-') && f.endsWith('.json'));
|
|
244
|
+
for (const backupFile of backupFiles) {
|
|
245
|
+
const srcPath = path.join(LEGACY_BACKUP_DIR, backupFile);
|
|
246
|
+
const destPath = path.join(newBackupDir, backupFile);
|
|
247
|
+
fs.copyFileSync(srcPath, destPath);
|
|
248
|
+
}
|
|
249
|
+
debug(MODULE, `Migrated ${backupFiles.length} backup files`);
|
|
250
|
+
}
|
|
251
|
+
// Remove legacy files
|
|
252
|
+
fs.unlinkSync(LEGACY_STATE_FILE);
|
|
253
|
+
debug(MODULE, 'Removed legacy state file');
|
|
254
|
+
// Remove legacy backup files
|
|
255
|
+
if (fs.existsSync(LEGACY_BACKUP_DIR)) {
|
|
256
|
+
const backupFiles = fs.readdirSync(LEGACY_BACKUP_DIR);
|
|
257
|
+
for (const file of backupFiles) {
|
|
258
|
+
fs.unlinkSync(path.join(LEGACY_BACKUP_DIR, file));
|
|
259
|
+
}
|
|
260
|
+
fs.rmdirSync(LEGACY_BACKUP_DIR);
|
|
261
|
+
}
|
|
262
|
+
// Try to remove legacy data directory if empty
|
|
263
|
+
const legacyDataDir = path.dirname(LEGACY_STATE_FILE);
|
|
264
|
+
if (fs.existsSync(legacyDataDir)) {
|
|
265
|
+
const remaining = fs.readdirSync(legacyDataDir);
|
|
266
|
+
if (remaining.length === 0) {
|
|
267
|
+
fs.rmdirSync(legacyDataDir);
|
|
268
|
+
debug(MODULE, 'Removed empty legacy data directory');
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
debug(MODULE, 'Migration complete!');
|
|
272
|
+
return true;
|
|
273
|
+
}
|
|
274
|
+
catch (error) {
|
|
275
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
276
|
+
warn(MODULE, `Failed to migrate state: ${errorMessage}`);
|
|
277
|
+
// Clean up partial migration to avoid inconsistent state
|
|
278
|
+
const newStatePath = getStatePath();
|
|
279
|
+
if (fs.existsSync(newStatePath) && fs.existsSync(LEGACY_STATE_FILE)) {
|
|
280
|
+
// If both files exist, the migration was partial - remove the new file
|
|
281
|
+
try {
|
|
282
|
+
fs.unlinkSync(newStatePath);
|
|
283
|
+
debug(MODULE, 'Cleaned up partial migration - removed incomplete new state file');
|
|
284
|
+
}
|
|
285
|
+
catch (cleanupErr) {
|
|
286
|
+
warn(MODULE, 'Could not clean up partial migration file');
|
|
287
|
+
debug(MODULE, 'Partial migration cleanup failed', cleanupErr);
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
warn(MODULE, 'To resolve this issue:');
|
|
291
|
+
warn(MODULE, ' 1. Ensure you have write permissions to ~/.oss-autopilot/');
|
|
292
|
+
warn(MODULE, ' 2. Check available disk space');
|
|
293
|
+
warn(MODULE, ' 3. Manually copy ./data/state.json to ~/.oss-autopilot/state.json');
|
|
294
|
+
warn(MODULE, ' 4. Or delete ./data/state.json to start fresh');
|
|
295
|
+
return false;
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
/**
|
|
299
|
+
* Load state from file, or create initial state if none exists.
|
|
300
|
+
* If the main state file is corrupted, attempts to restore from the most recent backup.
|
|
301
|
+
* Performs migration from legacy ./data/ location if needed.
|
|
302
|
+
*/
|
|
303
|
+
load() {
|
|
304
|
+
// Try to migrate from legacy location first
|
|
305
|
+
this.migrateFromLegacyLocation();
|
|
306
|
+
const statePath = getStatePath();
|
|
307
|
+
try {
|
|
308
|
+
if (fs.existsSync(statePath)) {
|
|
309
|
+
const data = fs.readFileSync(statePath, 'utf-8');
|
|
310
|
+
let state = JSON.parse(data);
|
|
311
|
+
// Validate required fields exist
|
|
312
|
+
if (!this.isValidState(state)) {
|
|
313
|
+
warn(MODULE, 'Invalid state file structure, attempting to restore from backup...');
|
|
314
|
+
const restoredState = this.tryRestoreFromBackup();
|
|
315
|
+
if (restoredState) {
|
|
316
|
+
return restoredState;
|
|
317
|
+
}
|
|
318
|
+
warn(MODULE, 'No valid backup found, starting fresh');
|
|
319
|
+
return this.createFreshState();
|
|
320
|
+
}
|
|
321
|
+
// Migrate from v1 to v2 if needed
|
|
322
|
+
if (state.version === 1) {
|
|
323
|
+
state = migrateV1ToV2(state);
|
|
324
|
+
// Save the migrated state immediately (atomic write)
|
|
325
|
+
atomicWriteFileSync(statePath, JSON.stringify(state, null, 2), 0o600);
|
|
326
|
+
debug(MODULE, 'Migrated state saved');
|
|
327
|
+
}
|
|
328
|
+
// Log appropriate message based on version
|
|
329
|
+
const repoCount = Object.keys(state.repoScores).length;
|
|
330
|
+
debug(MODULE, `Loaded state v${state.version}: ${repoCount} repo scores tracked`);
|
|
331
|
+
return state;
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
catch (error) {
|
|
335
|
+
warn(MODULE, 'Error loading state:', error);
|
|
336
|
+
warn(MODULE, 'Attempting to restore from backup...');
|
|
337
|
+
const restoredState = this.tryRestoreFromBackup();
|
|
338
|
+
if (restoredState) {
|
|
339
|
+
return restoredState;
|
|
340
|
+
}
|
|
341
|
+
warn(MODULE, 'No valid backup found, starting fresh');
|
|
342
|
+
}
|
|
343
|
+
debug(MODULE, 'No existing state found, initializing...');
|
|
344
|
+
return this.createFreshState();
|
|
345
|
+
}
|
|
346
|
+
/**
|
|
347
|
+
* Attempt to restore state from the most recent valid backup.
|
|
348
|
+
* Returns the restored state if successful, or null if no valid backup is found.
|
|
349
|
+
*/
|
|
350
|
+
tryRestoreFromBackup() {
|
|
351
|
+
const backupDir = getBackupDir();
|
|
352
|
+
if (!fs.existsSync(backupDir)) {
|
|
353
|
+
return null;
|
|
354
|
+
}
|
|
355
|
+
// Get backup files sorted by name (most recent first, since names include timestamps)
|
|
356
|
+
const backupFiles = fs
|
|
357
|
+
.readdirSync(backupDir)
|
|
358
|
+
.filter((f) => f.startsWith('state-') && f.endsWith('.json'))
|
|
359
|
+
.sort()
|
|
360
|
+
.reverse();
|
|
361
|
+
for (const backupFile of backupFiles) {
|
|
362
|
+
const backupPath = path.join(backupDir, backupFile);
|
|
363
|
+
try {
|
|
364
|
+
const data = fs.readFileSync(backupPath, 'utf-8');
|
|
365
|
+
let state = JSON.parse(data);
|
|
366
|
+
if (this.isValidState(state)) {
|
|
367
|
+
debug(MODULE, `Successfully restored state from backup: ${backupFile}`);
|
|
368
|
+
// Migrate from v1 to v2 if needed
|
|
369
|
+
if (state.version === 1) {
|
|
370
|
+
state = migrateV1ToV2(state);
|
|
371
|
+
}
|
|
372
|
+
const repoCount = Object.keys(state.repoScores).length;
|
|
373
|
+
debug(MODULE, `Restored state v${state.version}: ${repoCount} repo scores`);
|
|
374
|
+
// Overwrite the corrupted main state file with the restored backup (atomic write)
|
|
375
|
+
const statePath = getStatePath();
|
|
376
|
+
atomicWriteFileSync(statePath, JSON.stringify(state, null, 2), 0o600);
|
|
377
|
+
debug(MODULE, 'Restored backup written to main state file');
|
|
378
|
+
return state;
|
|
379
|
+
}
|
|
380
|
+
}
|
|
381
|
+
catch (backupErr) {
|
|
382
|
+
// This backup is also corrupted, try the next one
|
|
383
|
+
warn(MODULE, `Backup ${backupFile} is corrupted, trying next...`);
|
|
384
|
+
debug(MODULE, `Backup ${backupFile} parse failed`, backupErr);
|
|
385
|
+
}
|
|
386
|
+
}
|
|
387
|
+
return null;
|
|
388
|
+
}
|
|
389
|
+
/**
|
|
390
|
+
* Validate that a loaded state has the required structure
|
|
391
|
+
* Handles both v1 (with PR arrays) and v2 (without)
|
|
392
|
+
*/
|
|
393
|
+
isValidState(state) {
|
|
394
|
+
if (!state || typeof state !== 'object')
|
|
395
|
+
return false;
|
|
396
|
+
const s = state;
|
|
397
|
+
// Migrate older states that don't have repoScores
|
|
398
|
+
if (s.repoScores === undefined) {
|
|
399
|
+
s.repoScores = {};
|
|
400
|
+
}
|
|
401
|
+
// Migrate older states that don't have events
|
|
402
|
+
if (s.events === undefined) {
|
|
403
|
+
s.events = [];
|
|
404
|
+
}
|
|
405
|
+
// Base requirements for all versions
|
|
406
|
+
const hasBaseFields = typeof s.version === 'number' &&
|
|
407
|
+
typeof s.repoScores === 'object' &&
|
|
408
|
+
s.repoScores !== null &&
|
|
409
|
+
Array.isArray(s.events) &&
|
|
410
|
+
typeof s.config === 'object' &&
|
|
411
|
+
s.config !== null;
|
|
412
|
+
if (!hasBaseFields)
|
|
413
|
+
return false;
|
|
414
|
+
// v1 requires base PR arrays to be present (they will be dropped during migration)
|
|
415
|
+
if (s.version === 1) {
|
|
416
|
+
return (Array.isArray(s.activePRs) &&
|
|
417
|
+
Array.isArray(s.dormantPRs) &&
|
|
418
|
+
Array.isArray(s.mergedPRs) &&
|
|
419
|
+
Array.isArray(s.closedPRs));
|
|
420
|
+
}
|
|
421
|
+
// v2+ doesn't require PR arrays
|
|
422
|
+
return true;
|
|
423
|
+
}
|
|
424
|
+
/**
|
|
425
|
+
* Persist the current state to disk, creating a timestamped backup of the previous
|
|
426
|
+
* state file first. Updates `lastRunAt` to the current time. In in-memory mode,
|
|
427
|
+
* only updates `lastRunAt` without any file I/O. Retains at most 10 backup files.
|
|
428
|
+
*/
|
|
429
|
+
save() {
|
|
430
|
+
// Update lastRunAt
|
|
431
|
+
this.state.lastRunAt = new Date().toISOString();
|
|
432
|
+
// Skip file operations in in-memory mode
|
|
433
|
+
if (this.inMemoryOnly) {
|
|
434
|
+
return;
|
|
435
|
+
}
|
|
436
|
+
const statePath = getStatePath();
|
|
437
|
+
const lockPath = statePath + '.lock';
|
|
438
|
+
const backupDir = getBackupDir();
|
|
439
|
+
// Acquire advisory lock to prevent concurrent writes
|
|
440
|
+
acquireLock(lockPath);
|
|
441
|
+
try {
|
|
442
|
+
// Create backup of existing state
|
|
443
|
+
if (fs.existsSync(statePath)) {
|
|
444
|
+
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
|
445
|
+
const randomSuffix = Math.random().toString(36).slice(2, 8).padEnd(6, '0');
|
|
446
|
+
const backupFile = path.join(backupDir, `state-${timestamp}-${randomSuffix}.json`);
|
|
447
|
+
fs.copyFileSync(statePath, backupFile);
|
|
448
|
+
fs.chmodSync(backupFile, 0o600);
|
|
449
|
+
// Keep only last 10 backups
|
|
450
|
+
this.cleanupBackups();
|
|
451
|
+
}
|
|
452
|
+
// Atomic write: write to temp file then rename to prevent corruption on crash
|
|
453
|
+
atomicWriteFileSync(statePath, JSON.stringify(this.state, null, 2), 0o600);
|
|
454
|
+
debug(MODULE, 'State saved successfully');
|
|
455
|
+
}
|
|
456
|
+
finally {
|
|
457
|
+
releaseLock(lockPath);
|
|
458
|
+
}
|
|
459
|
+
}
|
|
460
|
+
cleanupBackups() {
|
|
461
|
+
const backupDir = getBackupDir();
|
|
462
|
+
try {
|
|
463
|
+
const files = fs
|
|
464
|
+
.readdirSync(backupDir)
|
|
465
|
+
.filter((f) => f.startsWith('state-'))
|
|
466
|
+
.sort()
|
|
467
|
+
.reverse();
|
|
468
|
+
// Keep only the 10 most recent backups
|
|
469
|
+
for (const file of files.slice(10)) {
|
|
470
|
+
try {
|
|
471
|
+
fs.unlinkSync(path.join(backupDir, file));
|
|
472
|
+
}
|
|
473
|
+
catch (error) {
|
|
474
|
+
warn(MODULE, `Could not delete old backup ${file}:`, error instanceof Error ? error.message : error);
|
|
475
|
+
}
|
|
476
|
+
}
|
|
477
|
+
}
|
|
478
|
+
catch (error) {
|
|
479
|
+
warn(MODULE, 'Could not clean up backups:', error instanceof Error ? error.message : error);
|
|
480
|
+
}
|
|
481
|
+
}
|
|
482
|
+
/**
|
|
483
|
+
* Get the current state as a read-only snapshot.
|
|
484
|
+
* @returns The full agent state. Callers should not mutate the returned object;
|
|
485
|
+
* use the StateManager methods to make changes.
|
|
486
|
+
*/
|
|
487
|
+
getState() {
|
|
488
|
+
return this.state;
|
|
489
|
+
}
|
|
490
|
+
/**
|
|
491
|
+
* Store the latest daily digest for dashboard rendering.
|
|
492
|
+
* @param digest - The freshly generated digest from the current daily run.
|
|
493
|
+
*/
|
|
494
|
+
setLastDigest(digest) {
|
|
495
|
+
this.state.lastDigest = digest;
|
|
496
|
+
this.state.lastDigestAt = digest.generatedAt;
|
|
497
|
+
}
|
|
498
|
+
/**
|
|
499
|
+
* Store monthly merged PR counts for the contribution timeline chart.
|
|
500
|
+
* @param counts - Map of "YYYY-MM" strings to merged PR counts for that month.
|
|
501
|
+
*/
|
|
502
|
+
setMonthlyMergedCounts(counts) {
|
|
503
|
+
this.state.monthlyMergedCounts = counts;
|
|
504
|
+
}
|
|
505
|
+
/**
|
|
506
|
+
* Store monthly closed (without merge) PR counts for the contribution timeline and success rate charts.
|
|
507
|
+
* @param counts - Map of "YYYY-MM" strings to closed PR counts for that month.
|
|
508
|
+
*/
|
|
509
|
+
setMonthlyClosedCounts(counts) {
|
|
510
|
+
this.state.monthlyClosedCounts = counts;
|
|
511
|
+
}
|
|
512
|
+
/**
|
|
513
|
+
* Store monthly opened PR counts for the contribution timeline chart.
|
|
514
|
+
* @param counts - Map of "YYYY-MM" strings to opened PR counts for that month.
|
|
515
|
+
*/
|
|
516
|
+
setMonthlyOpenedCounts(counts) {
|
|
517
|
+
this.state.monthlyOpenedCounts = counts;
|
|
518
|
+
}
|
|
519
|
+
setDailyActivityCounts(counts) {
|
|
520
|
+
this.state.dailyActivityCounts = counts;
|
|
521
|
+
}
|
|
522
|
+
/**
|
|
523
|
+
* Store cached local repo scan results (#84).
|
|
524
|
+
* @param cache - The scan results, paths scanned, and timestamp.
|
|
525
|
+
*/
|
|
526
|
+
setLocalRepoCache(cache) {
|
|
527
|
+
this.state.localRepoCache = cache;
|
|
528
|
+
}
|
|
529
|
+
/**
|
|
530
|
+
* Shallow-merge partial configuration updates into the current config.
|
|
531
|
+
* @param config - Partial config object whose properties override existing values.
|
|
532
|
+
*/
|
|
533
|
+
updateConfig(config) {
|
|
534
|
+
this.state.config = { ...this.state.config, ...config };
|
|
535
|
+
}
|
|
536
|
+
// === Event Logging ===
|
|
537
|
+
/**
|
|
538
|
+
* Append an event to the event log. Events are capped at {@link MAX_EVENTS} (1000);
|
|
539
|
+
* when the cap is exceeded, the oldest events are trimmed to stay within the limit.
|
|
540
|
+
* @param type - The event type (e.g. 'pr_tracked').
|
|
541
|
+
* @param data - Arbitrary key-value payload for the event.
|
|
542
|
+
*/
|
|
543
|
+
appendEvent(type, data) {
|
|
544
|
+
const event = {
|
|
545
|
+
id: `evt_${Date.now()}_${Math.random().toString(36).slice(2, 8)}`,
|
|
546
|
+
type,
|
|
547
|
+
at: new Date().toISOString(),
|
|
548
|
+
data,
|
|
549
|
+
};
|
|
550
|
+
this.state.events.push(event);
|
|
551
|
+
// Cap the events array to prevent unbounded growth
|
|
552
|
+
if (this.state.events.length > MAX_EVENTS) {
|
|
553
|
+
this.state.events = this.state.events.slice(-MAX_EVENTS);
|
|
554
|
+
}
|
|
555
|
+
}
|
|
556
|
+
/**
|
|
557
|
+
* Filter the event log to events of a specific type.
|
|
558
|
+
* @param type - The event type to filter by.
|
|
559
|
+
* @returns All events matching the given type, in chronological order.
|
|
560
|
+
*/
|
|
561
|
+
getEventsByType(type) {
|
|
562
|
+
return this.state.events.filter((e) => e.type === type);
|
|
563
|
+
}
|
|
564
|
+
/**
|
|
565
|
+
* Filter the event log to events within an inclusive time range.
|
|
566
|
+
* @param since - Start of the range (inclusive).
|
|
567
|
+
* @param until - End of the range (inclusive). Defaults to now.
|
|
568
|
+
* @returns Events whose timestamps fall within [since, until].
|
|
569
|
+
*/
|
|
570
|
+
getEventsInRange(since, until = new Date()) {
|
|
571
|
+
return this.state.events.filter((e) => {
|
|
572
|
+
const eventTime = new Date(e.at);
|
|
573
|
+
return eventTime >= since && eventTime <= until;
|
|
574
|
+
});
|
|
575
|
+
}
|
|
576
|
+
// === Issue Management ===
|
|
577
|
+
/**
|
|
578
|
+
* Add an issue to the active tracking list. If an issue with the same URL is
|
|
579
|
+
* already tracked, the call is a no-op.
|
|
580
|
+
* @param issue - The issue to begin tracking.
|
|
581
|
+
*/
|
|
582
|
+
addIssue(issue) {
|
|
583
|
+
const existing = this.state.activeIssues.find((i) => i.url === issue.url);
|
|
584
|
+
if (existing) {
|
|
585
|
+
debug(MODULE, `Issue ${issue.url} already tracked`);
|
|
586
|
+
return;
|
|
587
|
+
}
|
|
588
|
+
this.state.activeIssues.push(issue);
|
|
589
|
+
debug(MODULE, `Added issue: ${issue.repo}#${issue.number}`);
|
|
590
|
+
}
|
|
591
|
+
// === Trusted Projects ===
|
|
592
|
+
/**
|
|
593
|
+
* Add a repository to the trusted projects list. Trusted projects are prioritized
|
|
594
|
+
* in issue search results. No-op if the repo is already trusted.
|
|
595
|
+
* @param repo - Repository in "owner/repo" format.
|
|
596
|
+
*/
|
|
597
|
+
addTrustedProject(repo) {
|
|
598
|
+
if (!this.state.config.trustedProjects.includes(repo)) {
|
|
599
|
+
this.state.config.trustedProjects.push(repo);
|
|
600
|
+
debug(MODULE, `Added trusted project: ${repo}`);
|
|
601
|
+
}
|
|
602
|
+
}
|
|
603
|
+
/**
|
|
604
|
+
* Test whether a repo matches any of the given exclusion lists.
|
|
605
|
+
* Both repo and org comparisons are case-insensitive (GitHub names are case-insensitive).
|
|
606
|
+
* @param repo - Repository in "owner/repo" format.
|
|
607
|
+
* @param repos - Full "owner/repo" strings (case-insensitive match).
|
|
608
|
+
* @param orgs - Org names (case-insensitive match against the owner segment of the repo).
|
|
609
|
+
*/
|
|
610
|
+
static matchesExclusion(repo, repos, orgs) {
|
|
611
|
+
const repoLower = repo.toLowerCase();
|
|
612
|
+
if (repos.some((r) => r.toLowerCase() === repoLower))
|
|
613
|
+
return true;
|
|
614
|
+
if (orgs?.some((o) => o.toLowerCase() === repoLower.split('/')[0]))
|
|
615
|
+
return true;
|
|
616
|
+
return false;
|
|
617
|
+
}
|
|
618
|
+
/**
|
|
619
|
+
* Check whether a repository matches any exclusion rule from the current config.
|
|
620
|
+
* A repo is excluded if it matches an entry in `excludeRepos` (case-insensitive)
|
|
621
|
+
* or if its owner segment matches an entry in `excludeOrgs` (case-insensitive).
|
|
622
|
+
* @param repo - Repository in "owner/repo" format.
|
|
623
|
+
*/
|
|
624
|
+
isExcluded(repo) {
|
|
625
|
+
const { excludeRepos, excludeOrgs } = this.state.config;
|
|
626
|
+
return StateManager.matchesExclusion(repo, excludeRepos, excludeOrgs);
|
|
627
|
+
}
|
|
628
|
+
/**
|
|
629
|
+
* Remove repositories matching the given exclusion lists from `trustedProjects`
|
|
630
|
+
* and `repoScores`. Called when a repo or org is newly excluded to keep stored
|
|
631
|
+
* data consistent with current filters.
|
|
632
|
+
* @param repos - Full "owner/repo" strings to exclude (case-insensitive match).
|
|
633
|
+
* @param orgs - Org names to exclude (case-insensitive match against owner segment).
|
|
634
|
+
*/
|
|
635
|
+
cleanupExcludedData(repos, orgs) {
|
|
636
|
+
const matches = (repo) => StateManager.matchesExclusion(repo, repos, orgs);
|
|
637
|
+
const beforeTrusted = this.state.config.trustedProjects.length;
|
|
638
|
+
this.state.config.trustedProjects = this.state.config.trustedProjects.filter((p) => !matches(p));
|
|
639
|
+
const removedTrusted = beforeTrusted - this.state.config.trustedProjects.length;
|
|
640
|
+
let removedScoreCount = 0;
|
|
641
|
+
for (const key of Object.keys(this.state.repoScores)) {
|
|
642
|
+
if (matches(key)) {
|
|
643
|
+
delete this.state.repoScores[key];
|
|
644
|
+
removedScoreCount++;
|
|
645
|
+
}
|
|
646
|
+
}
|
|
647
|
+
if (removedTrusted > 0 || removedScoreCount > 0) {
|
|
648
|
+
debug(MODULE, `Removed ${removedTrusted} trusted project(s) and ${removedScoreCount} repo score(s) for excluded repos/orgs`);
|
|
649
|
+
}
|
|
650
|
+
}
|
|
651
|
+
// === Starred Repos Management ===
|
|
652
|
+
/**
|
|
653
|
+
* Get the cached list of the user's GitHub starred repositories.
|
|
654
|
+
* @returns Array of "owner/repo" strings, or an empty array if never fetched.
|
|
655
|
+
*/
|
|
656
|
+
getStarredRepos() {
|
|
657
|
+
return this.state.config.starredRepos || [];
|
|
658
|
+
}
|
|
659
|
+
/**
|
|
660
|
+
* Replace the cached starred repositories list and update the fetch timestamp.
|
|
661
|
+
* @param repos - Array of "owner/repo" strings from the user's GitHub stars.
|
|
662
|
+
*/
|
|
663
|
+
setStarredRepos(repos) {
|
|
664
|
+
this.state.config.starredRepos = repos;
|
|
665
|
+
this.state.config.starredReposLastFetched = new Date().toISOString();
|
|
666
|
+
debug(MODULE, `Updated starred repos: ${repos.length} repositories`);
|
|
667
|
+
}
|
|
668
|
+
/**
|
|
669
|
+
* Check if the starred repos cache is stale (older than 24 hours) or has never been fetched.
|
|
670
|
+
* @returns true if the cache should be refreshed.
|
|
671
|
+
*/
|
|
672
|
+
isStarredReposStale() {
|
|
673
|
+
const lastFetched = this.state.config.starredReposLastFetched;
|
|
674
|
+
if (!lastFetched) {
|
|
675
|
+
return true;
|
|
676
|
+
}
|
|
677
|
+
const staleThresholdMs = 24 * 60 * 60 * 1000; // 24 hours
|
|
678
|
+
const lastFetchedDate = new Date(lastFetched);
|
|
679
|
+
const now = new Date();
|
|
680
|
+
return now.getTime() - lastFetchedDate.getTime() > staleThresholdMs;
|
|
681
|
+
}
|
|
682
|
+
// === Shelve/Unshelve ===
|
|
683
|
+
/**
|
|
684
|
+
* Shelve a PR by URL. Shelved PRs are excluded from capacity and actionable issues.
|
|
685
|
+
* They are auto-unshelved when a maintainer engages (needs_response, needs_changes, etc.).
|
|
686
|
+
* @param url - The full GitHub PR URL.
|
|
687
|
+
* @returns true if newly added, false if already shelved.
|
|
688
|
+
*/
|
|
689
|
+
shelvePR(url) {
|
|
690
|
+
if (!this.state.config.shelvedPRUrls) {
|
|
691
|
+
this.state.config.shelvedPRUrls = [];
|
|
692
|
+
}
|
|
693
|
+
if (this.state.config.shelvedPRUrls.includes(url)) {
|
|
694
|
+
return false;
|
|
695
|
+
}
|
|
696
|
+
this.state.config.shelvedPRUrls.push(url);
|
|
697
|
+
return true;
|
|
698
|
+
}
|
|
699
|
+
/**
|
|
700
|
+
* Unshelve a PR by URL.
|
|
701
|
+
* @param url - The full GitHub PR URL.
|
|
702
|
+
* @returns true if found and removed, false if not shelved.
|
|
703
|
+
*/
|
|
704
|
+
unshelvePR(url) {
|
|
705
|
+
if (!this.state.config.shelvedPRUrls) {
|
|
706
|
+
return false;
|
|
707
|
+
}
|
|
708
|
+
const index = this.state.config.shelvedPRUrls.indexOf(url);
|
|
709
|
+
if (index === -1) {
|
|
710
|
+
return false;
|
|
711
|
+
}
|
|
712
|
+
this.state.config.shelvedPRUrls.splice(index, 1);
|
|
713
|
+
return true;
|
|
714
|
+
}
|
|
715
|
+
/**
|
|
716
|
+
* Check if a PR is shelved.
|
|
717
|
+
* @param url - The full GitHub PR URL.
|
|
718
|
+
* @returns true if the URL is in the shelved list.
|
|
719
|
+
*/
|
|
720
|
+
isPRShelved(url) {
|
|
721
|
+
return this.state.config.shelvedPRUrls?.includes(url) ?? false;
|
|
722
|
+
}
|
|
723
|
+
// === Dismiss / Undismiss Issues ===
|
|
724
|
+
/**
|
|
725
|
+
* Dismiss an issue by URL. Dismissed issues are excluded from `new_response` notifications
|
|
726
|
+
* until new activity occurs after the dismiss timestamp.
|
|
727
|
+
* @param url - The full GitHub issue URL.
|
|
728
|
+
* @param timestamp - ISO timestamp of when the issue was dismissed.
|
|
729
|
+
* @returns true if newly dismissed, false if already dismissed.
|
|
730
|
+
*/
|
|
731
|
+
dismissIssue(url, timestamp) {
|
|
732
|
+
if (!this.state.config.dismissedIssues) {
|
|
733
|
+
this.state.config.dismissedIssues = {};
|
|
734
|
+
}
|
|
735
|
+
if (url in this.state.config.dismissedIssues) {
|
|
736
|
+
return false;
|
|
737
|
+
}
|
|
738
|
+
this.state.config.dismissedIssues[url] = timestamp;
|
|
739
|
+
return true;
|
|
740
|
+
}
|
|
741
|
+
/**
|
|
742
|
+
* Undismiss an issue by URL.
|
|
743
|
+
* @param url - The full GitHub issue URL.
|
|
744
|
+
* @returns true if found and removed, false if not dismissed.
|
|
745
|
+
*/
|
|
746
|
+
undismissIssue(url) {
|
|
747
|
+
if (!this.state.config.dismissedIssues || !(url in this.state.config.dismissedIssues)) {
|
|
748
|
+
return false;
|
|
749
|
+
}
|
|
750
|
+
delete this.state.config.dismissedIssues[url];
|
|
751
|
+
return true;
|
|
752
|
+
}
|
|
753
|
+
/**
|
|
754
|
+
* Get the timestamp when an issue was dismissed.
|
|
755
|
+
* @param url - The full GitHub issue URL.
|
|
756
|
+
* @returns The ISO dismiss timestamp, or undefined if not dismissed.
|
|
757
|
+
*/
|
|
758
|
+
getIssueDismissedAt(url) {
|
|
759
|
+
return this.state.config.dismissedIssues?.[url];
|
|
760
|
+
}
|
|
761
|
+
// === Snooze / Unsnooze CI Failures ===
|
|
762
|
+
/**
|
|
763
|
+
* Snooze a PR's CI failure for a given number of days.
|
|
764
|
+
* Snoozed PRs are excluded from actionable CI failure lists until the snooze expires.
|
|
765
|
+
* @param url - The full GitHub PR URL.
|
|
766
|
+
* @param reason - Why the CI failure is being snoozed (e.g., "upstream infrastructure issue").
|
|
767
|
+
* @param durationDays - Number of days to snooze. Default 7.
|
|
768
|
+
* @returns true if newly snoozed, false if already snoozed.
|
|
769
|
+
*/
|
|
770
|
+
snoozePR(url, reason, durationDays) {
|
|
771
|
+
if (!Number.isFinite(durationDays) || durationDays <= 0) {
|
|
772
|
+
throw new ValidationError(`Invalid snooze duration: ${durationDays}. Must be a positive finite number.`);
|
|
773
|
+
}
|
|
774
|
+
if (!this.state.config.snoozedPRs) {
|
|
775
|
+
this.state.config.snoozedPRs = {};
|
|
776
|
+
}
|
|
777
|
+
if (url in this.state.config.snoozedPRs) {
|
|
778
|
+
return false;
|
|
779
|
+
}
|
|
780
|
+
const now = new Date();
|
|
781
|
+
const expiresAt = new Date(now.getTime() + durationDays * 24 * 60 * 60 * 1000);
|
|
782
|
+
this.state.config.snoozedPRs[url] = {
|
|
783
|
+
reason,
|
|
784
|
+
snoozedAt: now.toISOString(),
|
|
785
|
+
expiresAt: expiresAt.toISOString(),
|
|
786
|
+
};
|
|
787
|
+
return true;
|
|
788
|
+
}
|
|
789
|
+
/**
|
|
790
|
+
* Unsnooze a PR by URL.
|
|
791
|
+
* @param url - The full GitHub PR URL.
|
|
792
|
+
* @returns true if found and removed, false if not snoozed.
|
|
793
|
+
*/
|
|
794
|
+
unsnoozePR(url) {
|
|
795
|
+
if (!this.state.config.snoozedPRs || !(url in this.state.config.snoozedPRs)) {
|
|
796
|
+
return false;
|
|
797
|
+
}
|
|
798
|
+
delete this.state.config.snoozedPRs[url];
|
|
799
|
+
return true;
|
|
800
|
+
}
|
|
801
|
+
/**
|
|
802
|
+
* Check if a PR is currently snoozed (not expired).
|
|
803
|
+
* @param url - The full GitHub PR URL.
|
|
804
|
+
* @returns true if the PR is snoozed and the snooze has not expired.
|
|
805
|
+
*/
|
|
806
|
+
isSnoozed(url) {
|
|
807
|
+
const info = this.getSnoozeInfo(url);
|
|
808
|
+
if (!info)
|
|
809
|
+
return false;
|
|
810
|
+
const expiresAtMs = new Date(info.expiresAt).getTime();
|
|
811
|
+
if (isNaN(expiresAtMs)) {
|
|
812
|
+
warn(MODULE, `Invalid expiresAt for snoozed PR ${url}: "${info.expiresAt}". Treating as not snoozed.`);
|
|
813
|
+
return false;
|
|
814
|
+
}
|
|
815
|
+
return expiresAtMs > Date.now();
|
|
816
|
+
}
|
|
817
|
+
/**
|
|
818
|
+
* Get snooze metadata for a PR.
|
|
819
|
+
* @param url - The full GitHub PR URL.
|
|
820
|
+
* @returns The snooze metadata, or undefined if not snoozed.
|
|
821
|
+
*/
|
|
822
|
+
getSnoozeInfo(url) {
|
|
823
|
+
return this.state.config.snoozedPRs?.[url];
|
|
824
|
+
}
|
|
825
|
+
/**
|
|
826
|
+
* Expire all snoozes that are past their `expiresAt` timestamp.
|
|
827
|
+
* @returns Array of PR URLs whose snoozes were expired.
|
|
828
|
+
*/
|
|
829
|
+
expireSnoozes() {
|
|
830
|
+
if (!this.state.config.snoozedPRs)
|
|
831
|
+
return [];
|
|
832
|
+
const expired = [];
|
|
833
|
+
const now = Date.now();
|
|
834
|
+
for (const [url, info] of Object.entries(this.state.config.snoozedPRs)) {
|
|
835
|
+
const expiresAtMs = new Date(info.expiresAt).getTime();
|
|
836
|
+
if (isNaN(expiresAtMs) || expiresAtMs <= now) {
|
|
837
|
+
expired.push(url);
|
|
838
|
+
}
|
|
839
|
+
}
|
|
840
|
+
for (const url of expired) {
|
|
841
|
+
delete this.state.config.snoozedPRs[url];
|
|
842
|
+
}
|
|
843
|
+
return expired;
|
|
844
|
+
}
|
|
845
|
+
// === Repository Scoring ===
|
|
846
|
+
/**
|
|
847
|
+
* Get the score record for a repository.
|
|
848
|
+
* @param repo - Repository in "owner/repo" format.
|
|
849
|
+
* @returns The RepoScore if the repo has been scored, or undefined if never evaluated.
|
|
850
|
+
*/
|
|
851
|
+
getRepoScore(repo) {
|
|
852
|
+
return this.state.repoScores[repo];
|
|
853
|
+
}
|
|
854
|
+
/**
|
|
855
|
+
* Create a default repo score for a new repository
|
|
856
|
+
*/
|
|
857
|
+
createDefaultRepoScore(repo) {
|
|
858
|
+
return {
|
|
859
|
+
repo,
|
|
860
|
+
score: 5, // Base score
|
|
861
|
+
mergedPRCount: 0,
|
|
862
|
+
closedWithoutMergeCount: 0,
|
|
863
|
+
avgResponseDays: null,
|
|
864
|
+
lastEvaluatedAt: new Date().toISOString(),
|
|
865
|
+
signals: {
|
|
866
|
+
hasActiveMaintainers: true, // Assume positive by default
|
|
867
|
+
isResponsive: false,
|
|
868
|
+
hasHostileComments: false,
|
|
869
|
+
},
|
|
870
|
+
};
|
|
871
|
+
}
|
|
872
|
+
/**
|
|
873
|
+
* Calculate the score based on the repo's metrics.
|
|
874
|
+
* Base 5, logarithmic merge bonus (max +5), -1 per closed without merge (max -3),
|
|
875
|
+
* +1 if recently merged (within 90 days), +1 if responsive, -2 if hostile. Clamp 1-10.
|
|
876
|
+
*/
|
|
877
|
+
calculateScore(repoScore) {
|
|
878
|
+
let score = 5; // Base score
|
|
879
|
+
// Logarithmic merge bonus (max +5): 1→+2, 2→+3, 3→+4, 5+→+5
|
|
880
|
+
if (repoScore.mergedPRCount > 0) {
|
|
881
|
+
const mergedBonus = Math.min(Math.round(Math.log2(repoScore.mergedPRCount + 1) * 2), 5);
|
|
882
|
+
score += mergedBonus;
|
|
883
|
+
}
|
|
884
|
+
// -1 per closed without merge (max -3)
|
|
885
|
+
const closedPenalty = Math.min(repoScore.closedWithoutMergeCount, 3);
|
|
886
|
+
score -= closedPenalty;
|
|
887
|
+
// +1 if lastMergedAt is set and within 90 days (recency)
|
|
888
|
+
if (repoScore.lastMergedAt) {
|
|
889
|
+
const lastMergedDate = new Date(repoScore.lastMergedAt);
|
|
890
|
+
if (isNaN(lastMergedDate.getTime())) {
|
|
891
|
+
warn(MODULE, `Invalid lastMergedAt date for ${repoScore.repo}: "${repoScore.lastMergedAt}". Skipping recency bonus.`);
|
|
892
|
+
}
|
|
893
|
+
else {
|
|
894
|
+
const msPerDay = 1000 * 60 * 60 * 24;
|
|
895
|
+
const daysSince = Math.floor((Date.now() - lastMergedDate.getTime()) / msPerDay);
|
|
896
|
+
if (daysSince <= 90) {
|
|
897
|
+
score += 1;
|
|
898
|
+
}
|
|
899
|
+
}
|
|
900
|
+
}
|
|
901
|
+
// +1 if responsive
|
|
902
|
+
if (repoScore.signals.isResponsive) {
|
|
903
|
+
score += 1;
|
|
904
|
+
}
|
|
905
|
+
// -2 if hostile
|
|
906
|
+
if (repoScore.signals.hasHostileComments) {
|
|
907
|
+
score -= 2;
|
|
908
|
+
}
|
|
909
|
+
// Clamp to 1-10
|
|
910
|
+
return Math.max(1, Math.min(10, score));
|
|
911
|
+
}
|
|
912
|
+
/**
|
|
913
|
+
* Update a repository's score with partial updates. If the repo has no existing score,
|
|
914
|
+
* a default score record is created first (base score 5). After applying updates, the
|
|
915
|
+
* numeric score is recalculated using the formula: base 5, logarithmic merge bonus (max +5),
|
|
916
|
+
* -1 per closed-without-merge (max -3), +1 if recently merged, +1 if responsive, -2 if hostile, clamped to [1, 10].
|
|
917
|
+
* @param repo - Repository in "owner/repo" format.
|
|
918
|
+
* @param updates - Updatable RepoScore fields to merge. The `score`, `repo`, and
|
|
919
|
+
* `lastEvaluatedAt` fields are not accepted — score is always derived via
|
|
920
|
+
* calculateScore(), and repo/lastEvaluatedAt are managed internally.
|
|
921
|
+
*/
|
|
922
|
+
updateRepoScore(repo, updates) {
|
|
923
|
+
if (!this.state.repoScores[repo]) {
|
|
924
|
+
this.state.repoScores[repo] = this.createDefaultRepoScore(repo);
|
|
925
|
+
}
|
|
926
|
+
const repoScore = this.state.repoScores[repo];
|
|
927
|
+
// Apply updates
|
|
928
|
+
if (updates.mergedPRCount !== undefined) {
|
|
929
|
+
repoScore.mergedPRCount = updates.mergedPRCount;
|
|
930
|
+
}
|
|
931
|
+
if (updates.closedWithoutMergeCount !== undefined) {
|
|
932
|
+
repoScore.closedWithoutMergeCount = updates.closedWithoutMergeCount;
|
|
933
|
+
}
|
|
934
|
+
if (updates.avgResponseDays !== undefined) {
|
|
935
|
+
repoScore.avgResponseDays = updates.avgResponseDays;
|
|
936
|
+
}
|
|
937
|
+
if (updates.lastMergedAt !== undefined) {
|
|
938
|
+
repoScore.lastMergedAt = updates.lastMergedAt;
|
|
939
|
+
}
|
|
940
|
+
if (updates.stargazersCount !== undefined) {
|
|
941
|
+
repoScore.stargazersCount = updates.stargazersCount;
|
|
942
|
+
}
|
|
943
|
+
if (updates.signals) {
|
|
944
|
+
repoScore.signals = { ...repoScore.signals, ...updates.signals };
|
|
945
|
+
}
|
|
946
|
+
// Recalculate score
|
|
947
|
+
repoScore.score = this.calculateScore(repoScore);
|
|
948
|
+
repoScore.lastEvaluatedAt = new Date().toISOString();
|
|
949
|
+
debug(MODULE, `Updated repo score for ${repo}: ${repoScore.score}/10`);
|
|
950
|
+
}
|
|
951
|
+
/**
|
|
952
|
+
* Increment the merged PR count for a repository and recalculate its score.
|
|
953
|
+
* Routes through {@link updateRepoScore} for a single mutation path.
|
|
954
|
+
* @param repo - Repository in "owner/repo" format.
|
|
955
|
+
*/
|
|
956
|
+
incrementMergedCount(repo) {
|
|
957
|
+
const current = this.state.repoScores[repo];
|
|
958
|
+
const newCount = (current?.mergedPRCount ?? 0) + 1;
|
|
959
|
+
this.updateRepoScore(repo, {
|
|
960
|
+
mergedPRCount: newCount,
|
|
961
|
+
lastMergedAt: new Date().toISOString(),
|
|
962
|
+
});
|
|
963
|
+
debug(MODULE, `Incremented merged count for ${repo}: ${newCount}`);
|
|
964
|
+
}
|
|
965
|
+
/**
|
|
966
|
+
* Increment the closed-without-merge count for a repository and recalculate its score.
|
|
967
|
+
* Routes through {@link updateRepoScore} for a single mutation path.
|
|
968
|
+
* @param repo - Repository in "owner/repo" format.
|
|
969
|
+
*/
|
|
970
|
+
incrementClosedCount(repo) {
|
|
971
|
+
const current = this.state.repoScores[repo];
|
|
972
|
+
const newCount = (current?.closedWithoutMergeCount ?? 0) + 1;
|
|
973
|
+
this.updateRepoScore(repo, {
|
|
974
|
+
closedWithoutMergeCount: newCount,
|
|
975
|
+
});
|
|
976
|
+
debug(MODULE, `Incremented closed count for ${repo}: ${newCount}`);
|
|
977
|
+
}
|
|
978
|
+
/**
|
|
979
|
+
* Mark a repository as having hostile maintainer comments and recalculate its score.
|
|
980
|
+
* This applies a -2 penalty to the score. Creates a default score record if needed.
|
|
981
|
+
* @param repo - Repository in "owner/repo" format.
|
|
982
|
+
*/
|
|
983
|
+
markRepoHostile(repo) {
|
|
984
|
+
this.updateRepoScore(repo, { signals: { hasHostileComments: true } });
|
|
985
|
+
debug(MODULE, `Marked ${repo} as hostile, score: ${this.state.repoScores[repo].score}/10`);
|
|
986
|
+
}
|
|
987
|
+
/**
|
|
988
|
+
* Get repositories where the user has at least one merged PR, sorted by merged count descending.
|
|
989
|
+
* These repos represent proven relationships with high merge probability.
|
|
990
|
+
* @returns Array of "owner/repo" strings for repos with mergedPRCount > 0.
|
|
991
|
+
*/
|
|
992
|
+
getReposWithMergedPRs() {
|
|
993
|
+
return Object.values(this.state.repoScores)
|
|
994
|
+
.filter((rs) => rs.mergedPRCount > 0)
|
|
995
|
+
.sort((a, b) => b.mergedPRCount - a.mergedPRCount)
|
|
996
|
+
.map((rs) => rs.repo);
|
|
997
|
+
}
|
|
998
|
+
/**
|
|
999
|
+
* Get repositories where the user has interacted (has a score record) but has NOT
|
|
1000
|
+
* yet had a PR merged, excluding repos where the only interaction was rejection.
|
|
1001
|
+
* These represent repos with open or in-progress PRs — relationships that benefit
|
|
1002
|
+
* from continued search attention.
|
|
1003
|
+
* @returns Array of "owner/repo" strings, sorted by score descending.
|
|
1004
|
+
*/
|
|
1005
|
+
getReposWithOpenPRs() {
|
|
1006
|
+
return Object.values(this.state.repoScores)
|
|
1007
|
+
.filter((rs) => rs.mergedPRCount === 0 && rs.closedWithoutMergeCount === 0)
|
|
1008
|
+
.sort((a, b) => b.score - a.score)
|
|
1009
|
+
.map((rs) => rs.repo);
|
|
1010
|
+
}
|
|
1011
|
+
/**
|
|
1012
|
+
* Get repositories with a score at or above the given threshold, sorted highest first.
|
|
1013
|
+
* @param minScore - Minimum score (inclusive). Defaults to `config.minRepoScoreThreshold`.
|
|
1014
|
+
* @returns Array of "owner/repo" strings for repos meeting the threshold.
|
|
1015
|
+
*/
|
|
1016
|
+
getHighScoringRepos(minScore) {
|
|
1017
|
+
const threshold = minScore ?? this.state.config.minRepoScoreThreshold;
|
|
1018
|
+
return Object.values(this.state.repoScores)
|
|
1019
|
+
.filter((rs) => rs.score >= threshold)
|
|
1020
|
+
.sort((a, b) => b.score - a.score)
|
|
1021
|
+
.map((rs) => rs.repo);
|
|
1022
|
+
}
|
|
1023
|
+
/**
|
|
1024
|
+
* Get repositories with a score at or below the given threshold, sorted lowest first.
|
|
1025
|
+
* @param maxScore - Maximum score (inclusive). Defaults to `config.minRepoScoreThreshold`.
|
|
1026
|
+
* @returns Array of "owner/repo" strings for repos at or below the threshold.
|
|
1027
|
+
*/
|
|
1028
|
+
getLowScoringRepos(maxScore) {
|
|
1029
|
+
const threshold = maxScore ?? this.state.config.minRepoScoreThreshold;
|
|
1030
|
+
return Object.values(this.state.repoScores)
|
|
1031
|
+
.filter((rs) => rs.score <= threshold)
|
|
1032
|
+
.sort((a, b) => a.score - b.score)
|
|
1033
|
+
.map((rs) => rs.repo);
|
|
1034
|
+
}
|
|
1035
|
+
// === Statistics ===
|
|
1036
|
+
/**
|
|
1037
|
+
* Compute aggregate statistics from the current state. `mergedPRs` and `closedPRs` counts
|
|
1038
|
+
* are summed from repo score records, excluding repos that match `excludeRepos` or `excludeOrgs`
|
|
1039
|
+
* in the config (#211). `totalTracked` reflects the number of non-excluded repositories with
|
|
1040
|
+
* score records.
|
|
1041
|
+
* @returns A Stats snapshot computed from the current state.
|
|
1042
|
+
*/
|
|
1043
|
+
getStats() {
|
|
1044
|
+
// v2: Calculate from repoScores, filtering out excluded repos/orgs (#211)
|
|
1045
|
+
let totalMerged = 0;
|
|
1046
|
+
let totalClosed = 0;
|
|
1047
|
+
let totalTracked = 0;
|
|
1048
|
+
for (const [repoKey, score] of Object.entries(this.state.repoScores)) {
|
|
1049
|
+
if (this.isExcluded(repoKey))
|
|
1050
|
+
continue;
|
|
1051
|
+
totalTracked++;
|
|
1052
|
+
totalMerged += score.mergedPRCount;
|
|
1053
|
+
totalClosed += score.closedWithoutMergeCount;
|
|
1054
|
+
}
|
|
1055
|
+
const completed = totalMerged + totalClosed;
|
|
1056
|
+
const mergeRate = completed > 0 ? (totalMerged / completed) * 100 : 0;
|
|
1057
|
+
return {
|
|
1058
|
+
mergedPRs: totalMerged,
|
|
1059
|
+
closedPRs: totalClosed,
|
|
1060
|
+
activeIssues: 0,
|
|
1061
|
+
trustedProjects: this.state.config.trustedProjects.filter((p) => !this.isExcluded(p)).length,
|
|
1062
|
+
mergeRate: mergeRate.toFixed(1) + '%',
|
|
1063
|
+
totalTracked,
|
|
1064
|
+
needsResponse: 0,
|
|
1065
|
+
};
|
|
1066
|
+
}
|
|
1067
|
+
}
|
|
1068
|
+
// Singleton instance
|
|
1069
|
+
let stateManager = null;
|
|
1070
|
+
/**
|
|
1071
|
+
* Get the singleton StateManager instance, creating it on first call.
|
|
1072
|
+
* Subsequent calls return the same instance. Use {@link resetStateManager} to
|
|
1073
|
+
* clear the singleton (primarily for testing).
|
|
1074
|
+
* @returns The shared StateManager instance.
|
|
1075
|
+
*/
|
|
1076
|
+
export function getStateManager() {
|
|
1077
|
+
if (!stateManager) {
|
|
1078
|
+
stateManager = new StateManager();
|
|
1079
|
+
}
|
|
1080
|
+
return stateManager;
|
|
1081
|
+
}
|
|
1082
|
+
/**
|
|
1083
|
+
* Reset the singleton StateManager instance to null. The next call to
|
|
1084
|
+
* {@link getStateManager} will create a fresh instance. Intended for test
|
|
1085
|
+
* isolation -- should not be called in production code.
|
|
1086
|
+
*/
|
|
1087
|
+
export function resetStateManager() {
|
|
1088
|
+
stateManager = null;
|
|
1089
|
+
}
|