switchman-dev 0.1.6 → 0.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/ci.yml +26 -0
- package/CHANGELOG.md +36 -0
- package/CLAUDE.md +113 -0
- package/README.md +296 -15
- package/examples/README.md +37 -2
- package/package.json +6 -1
- package/src/cli/index.js +3939 -130
- package/src/core/ci.js +205 -1
- package/src/core/db.js +963 -45
- package/src/core/enforcement.js +140 -15
- package/src/core/git.js +286 -1
- package/src/core/ignore.js +1 -0
- package/src/core/licence.js +365 -0
- package/src/core/mcp.js +41 -2
- package/src/core/merge-gate.js +22 -5
- package/src/core/outcome.js +43 -44
- package/src/core/pipeline.js +2459 -88
- package/src/core/planner.js +35 -11
- package/src/core/policy.js +106 -1
- package/src/core/queue.js +654 -29
- package/src/core/semantic.js +71 -5
- package/src/core/sync.js +216 -0
- package/src/mcp/server.js +18 -6
- package/tests.zip +0 -0
package/src/core/queue.js
CHANGED
|
@@ -1,9 +1,36 @@
|
|
|
1
|
-
import { getMergeQueueItem, listMergeQueue, listTasks, listWorktrees, markMergeQueueState, startMergeQueueItem } from './db.js';
|
|
2
|
-
import { gitBranchExists, gitMergeBranchInto, gitRebaseOnto } from './git.js';
|
|
1
|
+
import { finishOperationJournalEntry, getMergeQueueItem, getTaskSpec, listDependencyInvalidations, listMergeQueue, listTasks, listWorktrees, markMergeQueueState, startMergeQueueItem, startOperationJournalEntry } from './db.js';
|
|
2
|
+
import { gitAssessBranchFreshness, gitBranchExists, gitMergeBranchInto, gitRebaseOnto } from './git.js';
|
|
3
3
|
import { runAiMergeGate } from './merge-gate.js';
|
|
4
|
+
import { evaluatePipelinePolicyGate, getPipelineStaleWaveContext, preparePipelineLandingTarget } from './pipeline.js';
|
|
4
5
|
import { scanAllWorktrees } from './detector.js';
|
|
5
6
|
|
|
6
|
-
|
|
7
|
+
const QUEUE_RETRY_BACKOFF_BASE_MS = 30_000;
|
|
8
|
+
const QUEUE_RETRY_BACKOFF_MAX_MS = 5 * 60_000;
|
|
9
|
+
|
|
10
|
+
function formatQueueTimestamp(value) {
|
|
11
|
+
const timestamp = Date.parse(String(value || ''));
|
|
12
|
+
if (!Number.isFinite(timestamp)) return null;
|
|
13
|
+
return new Date(timestamp).toISOString();
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
function isQueueBackoffActive(item) {
|
|
17
|
+
const raw = item?.backoff_until;
|
|
18
|
+
if (!raw) return false;
|
|
19
|
+
const timestamp = Date.parse(String(raw));
|
|
20
|
+
return Number.isFinite(timestamp) && timestamp > Date.now();
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
function computeQueueRetryBackoff(item) {
|
|
24
|
+
const retriesUsed = Number(item?.retry_count || 0);
|
|
25
|
+
const delayMs = Math.min(QUEUE_RETRY_BACKOFF_MAX_MS, QUEUE_RETRY_BACKOFF_BASE_MS * (2 ** retriesUsed));
|
|
26
|
+
const backoffUntil = new Date(Date.now() + delayMs).toISOString();
|
|
27
|
+
return {
|
|
28
|
+
delay_ms: delayMs,
|
|
29
|
+
backoff_until: backoffUntil,
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
export function describeQueueError(err) {
|
|
7
34
|
const message = String(err?.stderr || err?.message || err || '').trim();
|
|
8
35
|
if (/conflict/i.test(message)) {
|
|
9
36
|
return {
|
|
@@ -23,6 +50,15 @@ function describeQueueError(err) {
|
|
|
23
50
|
};
|
|
24
51
|
}
|
|
25
52
|
|
|
53
|
+
if (/untracked working tree files would be overwritten by merge/i.test(message)) {
|
|
54
|
+
return {
|
|
55
|
+
code: 'untracked_worktree_files',
|
|
56
|
+
summary: message || 'Untracked local files would be overwritten by merge.',
|
|
57
|
+
nextAction: 'Remove or ignore the untracked files in the target worktree, then run `switchman queue retry <itemId>`. Project-local MCP files should be excluded via `.git/info/exclude` after `switchman setup`.',
|
|
58
|
+
retryable: true,
|
|
59
|
+
};
|
|
60
|
+
}
|
|
61
|
+
|
|
26
62
|
return {
|
|
27
63
|
code: 'merge_failed',
|
|
28
64
|
summary: message || 'Merge queue item failed.',
|
|
@@ -35,14 +71,16 @@ function scheduleRetryOrBlock(db, item, failure) {
|
|
|
35
71
|
const retriesUsed = Number(item.retry_count || 0);
|
|
36
72
|
const maxRetries = Number(item.max_retries || 0);
|
|
37
73
|
if (failure.retryable && retriesUsed < maxRetries) {
|
|
74
|
+
const backoff = computeQueueRetryBackoff(item);
|
|
38
75
|
return {
|
|
39
76
|
status: 'retrying',
|
|
40
77
|
item: markMergeQueueState(db, item.id, {
|
|
41
78
|
status: 'retrying',
|
|
42
79
|
lastErrorCode: failure.code,
|
|
43
80
|
lastErrorSummary: failure.summary,
|
|
44
|
-
nextAction: `Retry ${retriesUsed + 1} of ${maxRetries}
|
|
81
|
+
nextAction: `Retry ${retriesUsed + 1} of ${maxRetries} is waiting until ${backoff.backoff_until}. Run \`switchman queue retry ${item.id}\` to retry sooner after fixing any underlying branch drift.`,
|
|
45
82
|
incrementRetry: true,
|
|
83
|
+
backoffUntil: backoff.backoff_until,
|
|
46
84
|
}),
|
|
47
85
|
};
|
|
48
86
|
}
|
|
@@ -58,7 +96,7 @@ function scheduleRetryOrBlock(db, item, failure) {
|
|
|
58
96
|
};
|
|
59
97
|
}
|
|
60
98
|
|
|
61
|
-
async function evaluateQueueRepoGate(db, repoRoot) {
|
|
99
|
+
export async function evaluateQueueRepoGate(db, repoRoot) {
|
|
62
100
|
const report = await scanAllWorktrees(db, repoRoot);
|
|
63
101
|
const aiGate = await runAiMergeGate(db, repoRoot);
|
|
64
102
|
const ok = report.conflicts.length === 0
|
|
@@ -87,9 +125,13 @@ export function resolveQueueSource(db, repoRoot, item) {
|
|
|
87
125
|
}
|
|
88
126
|
|
|
89
127
|
if (item.source_type === 'branch') {
|
|
128
|
+
const worktree = listWorktrees(db).find((entry) =>
|
|
129
|
+
(item.source_worktree && entry.name === item.source_worktree)
|
|
130
|
+
|| entry.branch === item.source_ref);
|
|
90
131
|
return {
|
|
91
132
|
branch: item.source_ref,
|
|
92
|
-
worktree: item.source_worktree || null,
|
|
133
|
+
worktree: worktree?.name || item.source_worktree || null,
|
|
134
|
+
worktree_path: worktree?.path || null,
|
|
93
135
|
pipeline_id: item.source_pipeline_id || null,
|
|
94
136
|
};
|
|
95
137
|
}
|
|
@@ -108,20 +150,21 @@ export function resolveQueueSource(db, repoRoot, item) {
|
|
|
108
150
|
}
|
|
109
151
|
|
|
110
152
|
if (item.source_type === 'pipeline') {
|
|
111
|
-
const
|
|
112
|
-
const
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
153
|
+
const pipelineId = item.source_pipeline_id || item.source_ref;
|
|
154
|
+
const landingTarget = preparePipelineLandingTarget(db, repoRoot, pipelineId, {
|
|
155
|
+
baseBranch: item.target_branch || 'main',
|
|
156
|
+
requireCompleted: true,
|
|
157
|
+
allowCurrentBranchFallback: false,
|
|
158
|
+
});
|
|
159
|
+
const worktree = landingTarget.worktree
|
|
160
|
+
? listWorktrees(db).find((entry) => entry.name === landingTarget.worktree) || null
|
|
161
|
+
: null;
|
|
162
|
+
|
|
120
163
|
return {
|
|
121
|
-
branch:
|
|
122
|
-
worktree: worktree
|
|
123
|
-
worktree_path: worktree
|
|
124
|
-
pipeline_id:
|
|
164
|
+
branch: landingTarget.branch,
|
|
165
|
+
worktree: worktree?.name || null,
|
|
166
|
+
worktree_path: worktree?.path || null,
|
|
167
|
+
pipeline_id: pipelineId,
|
|
125
168
|
};
|
|
126
169
|
}
|
|
127
170
|
|
|
@@ -135,27 +178,515 @@ export function inferQueueNextAction(item) {
|
|
|
135
178
|
return null;
|
|
136
179
|
}
|
|
137
180
|
|
|
138
|
-
|
|
181
|
+
function summarizeQueueGoalContext(db, item) {
|
|
182
|
+
const pipelineId = item.source_pipeline_id || (item.source_type === 'pipeline' ? item.source_ref : null);
|
|
183
|
+
if (!db || !pipelineId) {
|
|
184
|
+
return {
|
|
185
|
+
pipeline_id: pipelineId,
|
|
186
|
+
goal_priority: null,
|
|
187
|
+
goal_title: null,
|
|
188
|
+
integration_risk: 'normal',
|
|
189
|
+
task_count: 0,
|
|
190
|
+
};
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
const pipelineTasks = listTasks(db)
|
|
194
|
+
.map((task) => ({ ...task, task_spec: getTaskSpec(db, task.id) }))
|
|
195
|
+
.filter((task) => task.task_spec?.pipeline_id === pipelineId);
|
|
196
|
+
const goalPriority = pipelineTasks.reduce((highest, task) => Math.max(highest, Number(task.priority || 0)), 0) || null;
|
|
197
|
+
const goalTitle = pipelineTasks[0]?.title || pipelineId;
|
|
198
|
+
const riskLevels = new Set(pipelineTasks.map((task) => task.task_spec?.risk_level).filter(Boolean));
|
|
199
|
+
const integrationRisk = riskLevels.has('high')
|
|
200
|
+
? 'high'
|
|
201
|
+
: riskLevels.has('medium')
|
|
202
|
+
? 'medium'
|
|
203
|
+
: 'normal';
|
|
204
|
+
|
|
205
|
+
return {
|
|
206
|
+
pipeline_id: pipelineId,
|
|
207
|
+
goal_priority: goalPriority,
|
|
208
|
+
goal_title: goalTitle,
|
|
209
|
+
integration_risk: integrationRisk,
|
|
210
|
+
task_count: pipelineTasks.length,
|
|
211
|
+
};
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
function assessQueueCandidate(db, repoRoot, item) {
|
|
215
|
+
if (!db || !repoRoot || !['queued', 'retrying', 'held', 'wave_blocked', 'escalated'].includes(item.status)) {
|
|
216
|
+
return {
|
|
217
|
+
freshness: 'unknown',
|
|
218
|
+
revalidation_state: 'unknown',
|
|
219
|
+
stale_invalidation_count: 0,
|
|
220
|
+
stale_severity: 'clear',
|
|
221
|
+
branch_availability: 'unknown',
|
|
222
|
+
goal_priority: null,
|
|
223
|
+
integration_risk: 'normal',
|
|
224
|
+
priority_score: 99,
|
|
225
|
+
reason: item.status === 'retrying'
|
|
226
|
+
? 'retrying item waiting for another landing attempt'
|
|
227
|
+
: item.status === 'held'
|
|
228
|
+
? 'held item waiting for a safe landing window'
|
|
229
|
+
: item.status === 'wave_blocked'
|
|
230
|
+
? 'wave-blocked item waiting for coordinated revalidation across the same stale wave'
|
|
231
|
+
: item.status === 'escalated'
|
|
232
|
+
? 'escalated item waiting for operator review'
|
|
233
|
+
: 'queued item waiting to land',
|
|
234
|
+
};
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
try {
|
|
238
|
+
const resolved = resolveQueueSource(db, repoRoot, item);
|
|
239
|
+
const sourceBranchExists = gitBranchExists(repoRoot, resolved.branch);
|
|
240
|
+
const targetBranchExists = gitBranchExists(repoRoot, item.target_branch || 'main');
|
|
241
|
+
if (!sourceBranchExists || !targetBranchExists) {
|
|
242
|
+
return {
|
|
243
|
+
freshness: 'unknown',
|
|
244
|
+
revalidation_state: 'unknown',
|
|
245
|
+
stale_invalidation_count: 0,
|
|
246
|
+
stale_severity: 'clear',
|
|
247
|
+
branch_availability: !sourceBranchExists ? 'source_missing' : 'target_missing',
|
|
248
|
+
goal_priority: null,
|
|
249
|
+
integration_risk: 'normal',
|
|
250
|
+
priority_score: 50,
|
|
251
|
+
reason: !sourceBranchExists
|
|
252
|
+
? 'source branch is missing, so landing should surface an explicit queue block'
|
|
253
|
+
: 'target branch is missing, so landing should surface an explicit queue block',
|
|
254
|
+
};
|
|
255
|
+
}
|
|
256
|
+
const freshness = gitAssessBranchFreshness(repoRoot, item.target_branch || 'main', resolved.branch);
|
|
257
|
+
const goalContext = summarizeQueueGoalContext(db, item);
|
|
258
|
+
const pipelineId = goalContext.pipeline_id;
|
|
259
|
+
const staleInvalidations = pipelineId
|
|
260
|
+
? listDependencyInvalidations(db, { pipelineId }).filter((entry) => entry.affected_pipeline_id === pipelineId)
|
|
261
|
+
: [];
|
|
262
|
+
const staleWaveContext = pipelineId
|
|
263
|
+
? getPipelineStaleWaveContext(db, pipelineId)
|
|
264
|
+
: { shared_wave_count: 0, largest_wave_size: 0, primary_wave: null };
|
|
265
|
+
const statusWeight = item.status === 'queued' ? 0 : 1;
|
|
266
|
+
const freshnessWeight = freshness.state === 'fresh' ? 0 : freshness.state === 'behind' ? 2 : 4;
|
|
267
|
+
const urgencyWeight = goalContext.goal_priority >= 8 ? -2 : goalContext.goal_priority >= 6 ? -1 : 0;
|
|
268
|
+
const staleSeverity = staleInvalidations.some((entry) => entry.severity === 'blocked')
|
|
269
|
+
? 'block'
|
|
270
|
+
: staleInvalidations.length > 0
|
|
271
|
+
? 'warn'
|
|
272
|
+
: 'clear';
|
|
273
|
+
const revalidationWeight = staleSeverity === 'block' ? 6 : staleSeverity === 'warn' ? 3 : 0;
|
|
274
|
+
const waveWeight = staleWaveContext.largest_wave_size >= 3 ? 3 : staleWaveContext.largest_wave_size >= 2 ? 2 : 0;
|
|
275
|
+
const integrationWeight = goalContext.integration_risk === 'high' ? 1 : 0;
|
|
276
|
+
const backoffWaiting = item.status === 'retrying' && isQueueBackoffActive(item);
|
|
277
|
+
const backoffWeight = backoffWaiting ? 3 : 0;
|
|
278
|
+
const freshnessReason = freshness.state === 'fresh'
|
|
279
|
+
? 'fresh branch is most likely to land cleanly next'
|
|
280
|
+
: freshness.state === 'behind'
|
|
281
|
+
? `branch is behind ${item.target_branch || 'main'}, so fresher queue items land first`
|
|
282
|
+
: 'freshness is unknown, so this item stays behind clearly fresher work';
|
|
283
|
+
const urgencyReason = goalContext.goal_priority >= 8
|
|
284
|
+
? `goal priority ${goalContext.goal_priority} raises this landing candidate above lower-priority work`
|
|
285
|
+
: goalContext.goal_priority >= 6
|
|
286
|
+
? `goal priority ${goalContext.goal_priority} gives this candidate a small landing preference`
|
|
287
|
+
: null;
|
|
288
|
+
const revalidationReason = staleSeverity === 'block'
|
|
289
|
+
? `pipeline ${pipelineId} has stale work to revalidate before it should land`
|
|
290
|
+
: staleSeverity === 'warn'
|
|
291
|
+
? `pipeline ${pipelineId} has stale work to revalidate, so clearer landing candidates land first`
|
|
292
|
+
: null;
|
|
293
|
+
const waveReason = staleWaveContext.primary_wave && staleWaveContext.largest_wave_size > 1
|
|
294
|
+
? `the same stale wave also affects ${staleWaveContext.primary_wave.related_affected_pipelines.filter((entry) => entry !== pipelineId).join(', ')}`
|
|
295
|
+
: null;
|
|
296
|
+
const riskReason = goalContext.integration_risk === 'high'
|
|
297
|
+
? `pipeline ${pipelineId} carries high integration risk and may need escalation if it is not clearly ready`
|
|
298
|
+
: goalContext.integration_risk === 'medium'
|
|
299
|
+
? `pipeline ${pipelineId} carries moderate integration risk`
|
|
300
|
+
: null;
|
|
301
|
+
const backoffReason = backoffWaiting
|
|
302
|
+
? `automatic retry backoff is active until ${formatQueueTimestamp(item.backoff_until)}`
|
|
303
|
+
: null;
|
|
304
|
+
return {
|
|
305
|
+
freshness: freshness.state,
|
|
306
|
+
revalidation_state: staleSeverity === 'clear' ? 'clear' : 'stale',
|
|
307
|
+
stale_invalidation_count: staleInvalidations.length,
|
|
308
|
+
stale_severity: staleSeverity,
|
|
309
|
+
stale_wave_count: staleWaveContext.shared_wave_count,
|
|
310
|
+
stale_wave_size: staleWaveContext.largest_wave_size,
|
|
311
|
+
stale_wave_summary: staleWaveContext.primary_wave?.summary || null,
|
|
312
|
+
branch_availability: 'ready',
|
|
313
|
+
goal_priority: goalContext.goal_priority,
|
|
314
|
+
goal_title: goalContext.goal_title,
|
|
315
|
+
integration_risk: goalContext.integration_risk,
|
|
316
|
+
priority_score: freshnessWeight + statusWeight + revalidationWeight + waveWeight + integrationWeight + urgencyWeight + backoffWeight,
|
|
317
|
+
reason: [freshnessReason, urgencyReason, revalidationReason, waveReason, riskReason, backoffReason].filter(Boolean).join('; '),
|
|
318
|
+
freshness_details: freshness,
|
|
319
|
+
backoff_until: item.backoff_until || null,
|
|
320
|
+
backoff_active: backoffWaiting,
|
|
321
|
+
next_action: staleInvalidations.length > 0 && pipelineId
|
|
322
|
+
? `switchman task retry-stale --pipeline ${pipelineId}`
|
|
323
|
+
: null,
|
|
324
|
+
};
|
|
325
|
+
} catch {
|
|
326
|
+
return {
|
|
327
|
+
freshness: 'unknown',
|
|
328
|
+
revalidation_state: 'unknown',
|
|
329
|
+
stale_invalidation_count: 0,
|
|
330
|
+
stale_severity: 'clear',
|
|
331
|
+
branch_availability: 'unknown',
|
|
332
|
+
goal_priority: null,
|
|
333
|
+
integration_risk: 'normal',
|
|
334
|
+
priority_score: 60,
|
|
335
|
+
reason: 'queue source could not be resolved cleanly yet',
|
|
336
|
+
};
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
function rankQueueItems(items, { db = null, repoRoot = null } = {}) {
|
|
341
|
+
return items
|
|
342
|
+
.filter((item) => ['queued', 'retrying', 'held', 'wave_blocked', 'escalated'].includes(item.status))
|
|
343
|
+
.map((item) => ({
|
|
344
|
+
...item,
|
|
345
|
+
queue_assessment: assessQueueCandidate(db, repoRoot, item),
|
|
346
|
+
}))
|
|
347
|
+
.sort((left, right) => {
|
|
348
|
+
const scoreDelta = (left.queue_assessment?.priority_score ?? 99) - (right.queue_assessment?.priority_score ?? 99);
|
|
349
|
+
if (scoreDelta !== 0) return scoreDelta;
|
|
350
|
+
return String(left.created_at || '').localeCompare(String(right.created_at || ''));
|
|
351
|
+
});
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
function annotateQueueCandidates(items, { db = null, repoRoot = null } = {}) {
|
|
355
|
+
return rankQueueItems(items, { db, repoRoot }).map((item) => ({
|
|
356
|
+
...item,
|
|
357
|
+
recommendation: recommendQueueAction(item),
|
|
358
|
+
}));
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
function recommendQueueAction(item) {
|
|
362
|
+
const assessment = item.queue_assessment || {};
|
|
363
|
+
if (item.status === 'retrying') {
|
|
364
|
+
if (assessment.backoff_active) {
|
|
365
|
+
return {
|
|
366
|
+
action: 'retry',
|
|
367
|
+
summary: `wait for retry backoff until ${assessment.backoff_until}, or run \`switchman queue retry ${item.id}\` to retry sooner`,
|
|
368
|
+
command: `switchman queue retry ${item.id}`,
|
|
369
|
+
};
|
|
370
|
+
}
|
|
371
|
+
return {
|
|
372
|
+
action: 'retry',
|
|
373
|
+
summary: item.next_action || 'retry the item after the underlying landing issue is resolved',
|
|
374
|
+
command: 'switchman queue run',
|
|
375
|
+
};
|
|
376
|
+
}
|
|
377
|
+
|
|
378
|
+
if (item.status === 'held' && assessment.stale_invalidation_count > 0) {
|
|
379
|
+
return {
|
|
380
|
+
action: 'hold',
|
|
381
|
+
summary: item.next_action || (assessment.stale_wave_size > 1
|
|
382
|
+
? `hold for coordinated revalidation: ${assessment.stale_wave_summary || 'the same stale wave'} affects ${assessment.stale_wave_size} goals`
|
|
383
|
+
: assessment.next_action) || 'hold until the stale pipeline work is revalidated',
|
|
384
|
+
command: assessment.next_action || 'switchman queue retry <itemId>',
|
|
385
|
+
};
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
if (item.status === 'wave_blocked' && assessment.stale_invalidation_count > 0) {
|
|
389
|
+
return {
|
|
390
|
+
action: 'hold',
|
|
391
|
+
summary: item.next_action || `hold for coordinated revalidation: ${assessment.stale_wave_summary || 'shared stale wave'} affects ${assessment.stale_wave_size} goals`,
|
|
392
|
+
command: assessment.next_action || 'switchman queue status',
|
|
393
|
+
};
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
if (item.status === 'escalated' && assessment.integration_risk === 'high' && (assessment.stale_invalidation_count > 0 || assessment.freshness !== 'fresh')) {
|
|
397
|
+
return {
|
|
398
|
+
action: 'escalate',
|
|
399
|
+
summary: item.last_error_summary || 'escalate before landing: high-risk work is not clearly ready yet',
|
|
400
|
+
command: item.next_action || `switchman explain queue ${item.id}`,
|
|
401
|
+
};
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
if (assessment.branch_availability === 'source_missing' || assessment.branch_availability === 'target_missing') {
|
|
405
|
+
return {
|
|
406
|
+
action: 'retry',
|
|
407
|
+
summary: assessment.branch_availability === 'source_missing'
|
|
408
|
+
? 'attempt landing so Switchman can block the missing source branch explicitly'
|
|
409
|
+
: 'attempt landing so Switchman can block the missing target branch explicitly',
|
|
410
|
+
command: 'switchman queue run',
|
|
411
|
+
};
|
|
412
|
+
}
|
|
413
|
+
|
|
414
|
+
if (assessment.integration_risk === 'high' && (assessment.stale_invalidation_count > 0 || assessment.freshness !== 'fresh')) {
|
|
415
|
+
return {
|
|
416
|
+
action: 'escalate',
|
|
417
|
+
summary: assessment.next_action
|
|
418
|
+
? `escalate before landing: high-risk work is not clearly ready and still needs ${assessment.next_action}`
|
|
419
|
+
: 'escalate before landing: high-risk work is not clearly ready yet',
|
|
420
|
+
command: `switchman explain queue ${item.id}`,
|
|
421
|
+
};
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
if (assessment.stale_invalidation_count > 0) {
|
|
425
|
+
return {
|
|
426
|
+
action: 'hold',
|
|
427
|
+
summary: assessment.stale_wave_size > 1
|
|
428
|
+
? `hold for coordinated revalidation first: ${assessment.stale_wave_summary || 'shared stale wave'} affects ${assessment.stale_wave_size} goals`
|
|
429
|
+
: assessment.next_action
|
|
430
|
+
? `hold for revalidation first: ${assessment.next_action}`
|
|
431
|
+
: 'hold until the stale pipeline work is revalidated',
|
|
432
|
+
command: assessment.next_action || 'switchman queue status',
|
|
433
|
+
};
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
if (assessment.freshness === 'behind') {
|
|
437
|
+
return {
|
|
438
|
+
action: 'hold',
|
|
439
|
+
summary: `hold until fresher ${item.target_branch || 'main'} candidates land first`,
|
|
440
|
+
command: 'switchman queue run',
|
|
441
|
+
};
|
|
442
|
+
}
|
|
443
|
+
|
|
444
|
+
if (assessment.freshness === 'unknown') {
|
|
445
|
+
return {
|
|
446
|
+
action: 'hold',
|
|
447
|
+
summary: 'hold until branch freshness can be resolved cleanly',
|
|
448
|
+
command: 'switchman queue status',
|
|
449
|
+
};
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
return {
|
|
453
|
+
action: 'land_now',
|
|
454
|
+
summary: assessment.integration_risk === 'high'
|
|
455
|
+
? 'land now with elevated integration attention: this is the clearest current high-risk merge candidate'
|
|
456
|
+
: 'land now: this is the clearest current merge candidate',
|
|
457
|
+
command: 'switchman queue run',
|
|
458
|
+
};
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
function classifyQueuePlanLane(item) {
|
|
462
|
+
const action = item.recommendation?.action || 'hold';
|
|
463
|
+
const assessment = item.queue_assessment || {};
|
|
464
|
+
|
|
465
|
+
if (action === 'escalate') {
|
|
466
|
+
return {
|
|
467
|
+
lane: 'escalate',
|
|
468
|
+
summary: item.recommendation?.summary || 'needs operator review before it should land',
|
|
469
|
+
command: item.recommendation?.command || `switchman explain queue ${item.id}`,
|
|
470
|
+
};
|
|
471
|
+
}
|
|
472
|
+
|
|
473
|
+
if (action === 'retry') {
|
|
474
|
+
if (assessment.backoff_active) {
|
|
475
|
+
return {
|
|
476
|
+
lane: 'prepare_next',
|
|
477
|
+
summary: item.recommendation?.summary || 'wait for retry backoff, then retry this landing candidate',
|
|
478
|
+
command: item.recommendation?.command || `switchman queue retry ${item.id}`,
|
|
479
|
+
};
|
|
480
|
+
}
|
|
481
|
+
return {
|
|
482
|
+
lane: 'prepare_next',
|
|
483
|
+
summary: item.recommendation?.summary || 'retry this landing candidate once the immediate issue is cleared',
|
|
484
|
+
command: item.recommendation?.command || 'switchman queue run',
|
|
485
|
+
};
|
|
486
|
+
}
|
|
487
|
+
|
|
488
|
+
if (action === 'land_now') {
|
|
489
|
+
return {
|
|
490
|
+
lane: 'land_now',
|
|
491
|
+
summary: item.recommendation?.summary || 'this is ready to land now',
|
|
492
|
+
command: item.recommendation?.command || 'switchman queue run',
|
|
493
|
+
};
|
|
494
|
+
}
|
|
495
|
+
|
|
496
|
+
if (assessment.stale_invalidation_count > 0) {
|
|
497
|
+
return {
|
|
498
|
+
lane: 'unblock_first',
|
|
499
|
+
summary: item.recommendation?.summary || 'revalidate this goal before it can land',
|
|
500
|
+
command: item.recommendation?.command || assessment.next_action || 'switchman queue status',
|
|
501
|
+
};
|
|
502
|
+
}
|
|
503
|
+
|
|
504
|
+
if (assessment.freshness === 'behind' || assessment.freshness === 'unknown') {
|
|
505
|
+
return {
|
|
506
|
+
lane: 'defer',
|
|
507
|
+
summary: item.recommendation?.summary || 'wait until fresher candidates land first',
|
|
508
|
+
command: item.recommendation?.command || 'switchman queue run',
|
|
509
|
+
};
|
|
510
|
+
}
|
|
511
|
+
|
|
512
|
+
return {
|
|
513
|
+
lane: 'prepare_next',
|
|
514
|
+
summary: item.recommendation?.summary || 'keep this candidate close behind the current landing focus',
|
|
515
|
+
command: item.recommendation?.command || 'switchman queue status',
|
|
516
|
+
};
|
|
517
|
+
}
|
|
518
|
+
|
|
519
|
+
function buildQueueGoalPlan(candidates = []) {
|
|
520
|
+
const lanes = {
|
|
521
|
+
land_now: [],
|
|
522
|
+
prepare_next: [],
|
|
523
|
+
unblock_first: [],
|
|
524
|
+
escalate: [],
|
|
525
|
+
defer: [],
|
|
526
|
+
};
|
|
527
|
+
|
|
528
|
+
for (const item of candidates) {
|
|
529
|
+
const plan = classifyQueuePlanLane(item);
|
|
530
|
+
lanes[plan.lane].push({
|
|
531
|
+
item_id: item.id,
|
|
532
|
+
source_ref: item.source_ref,
|
|
533
|
+
source_type: item.source_type,
|
|
534
|
+
pipeline_id: item.source_pipeline_id || null,
|
|
535
|
+
goal_title: item.queue_assessment?.goal_title || null,
|
|
536
|
+
goal_priority: item.queue_assessment?.goal_priority || null,
|
|
537
|
+
action: item.recommendation?.action || 'hold',
|
|
538
|
+
freshness: item.queue_assessment?.freshness || 'unknown',
|
|
539
|
+
stale_invalidation_count: item.queue_assessment?.stale_invalidation_count || 0,
|
|
540
|
+
integration_risk: item.queue_assessment?.integration_risk || 'normal',
|
|
541
|
+
summary: plan.summary,
|
|
542
|
+
command: plan.command,
|
|
543
|
+
});
|
|
544
|
+
}
|
|
545
|
+
|
|
546
|
+
return lanes;
|
|
547
|
+
}
|
|
548
|
+
|
|
549
|
+
function buildQueueRecommendedSequence(candidates = [], limit = 5) {
|
|
550
|
+
const ordered = [];
|
|
551
|
+
const pushLane = (laneName, items, stage) => {
|
|
552
|
+
for (const item of items) {
|
|
553
|
+
if (ordered.length >= limit) return;
|
|
554
|
+
ordered.push({
|
|
555
|
+
stage,
|
|
556
|
+
lane: laneName,
|
|
557
|
+
item_id: item.item_id,
|
|
558
|
+
source_ref: item.source_ref,
|
|
559
|
+
source_type: item.source_type,
|
|
560
|
+
pipeline_id: item.pipeline_id,
|
|
561
|
+
goal_title: item.goal_title,
|
|
562
|
+
goal_priority: item.goal_priority,
|
|
563
|
+
action: item.action,
|
|
564
|
+
summary: item.summary,
|
|
565
|
+
command: item.command,
|
|
566
|
+
});
|
|
567
|
+
}
|
|
568
|
+
};
|
|
569
|
+
|
|
570
|
+
const plan = buildQueueGoalPlan(candidates);
|
|
571
|
+
pushLane('land_now', plan.land_now, '1');
|
|
572
|
+
pushLane('prepare_next', plan.prepare_next, '2');
|
|
573
|
+
pushLane('unblock_first', plan.unblock_first, '3');
|
|
574
|
+
pushLane('escalate', plan.escalate, '4');
|
|
575
|
+
pushLane('defer', plan.defer, '5');
|
|
576
|
+
return ordered;
|
|
577
|
+
}
|
|
578
|
+
|
|
579
|
+
function chooseNextQueueItem(items, { db = null, repoRoot = null } = {}) {
|
|
580
|
+
const candidates = annotateQueueCandidates(items, { db, repoRoot });
|
|
581
|
+
return candidates[0] || null;
|
|
582
|
+
}
|
|
583
|
+
|
|
584
|
+
function isQueueItemRunnable(item) {
|
|
585
|
+
if (!item?.recommendation?.action) return false;
|
|
586
|
+
if (item.recommendation.action === 'retry' && item.queue_assessment?.backoff_active) {
|
|
587
|
+
return false;
|
|
588
|
+
}
|
|
589
|
+
return ['land_now', 'retry'].includes(item.recommendation.action);
|
|
590
|
+
}
|
|
591
|
+
|
|
592
|
+
function chooseRunnableQueueItem(items, { db = null, repoRoot = null, followPlan = false } = {}) {
|
|
593
|
+
const candidates = annotateQueueCandidates(items, { db, repoRoot });
|
|
594
|
+
if (followPlan) {
|
|
595
|
+
return candidates.find((item) => classifyQueuePlanLane(item).lane === 'land_now' && isQueueItemRunnable(item)) || null;
|
|
596
|
+
}
|
|
597
|
+
return candidates.find((item) => isQueueItemRunnable(item))
|
|
598
|
+
|| candidates.find((item) =>
|
|
599
|
+
item.recommendation?.action === 'hold'
|
|
600
|
+
&& item.queue_assessment?.stale_invalidation_count === 0
|
|
601
|
+
&& item.queue_assessment?.integration_risk !== 'high')
|
|
602
|
+
|| null;
|
|
603
|
+
}
|
|
604
|
+
|
|
605
|
+
function syncDeferredQueueState(db, item) {
|
|
606
|
+
if (!item?.recommendation?.action || !['hold', 'escalate'].includes(item.recommendation.action)) {
|
|
607
|
+
return item;
|
|
608
|
+
}
|
|
609
|
+
|
|
610
|
+
const desiredStatus = item.recommendation.action === 'hold'
|
|
611
|
+
? (item.queue_assessment?.stale_wave_size > 1 ? 'wave_blocked' : 'held')
|
|
612
|
+
: 'escalated';
|
|
613
|
+
const desiredNextAction = item.recommendation.action === 'escalate'
|
|
614
|
+
? `Run \`switchman explain queue ${item.id}\` to review the landing risk, then \`switchman queue retry ${item.id}\` when it is ready again.`
|
|
615
|
+
: item.queue_assessment?.next_action || item.recommendation.command || null;
|
|
616
|
+
const desiredSummary = item.recommendation.summary || item.queue_assessment?.reason || null;
|
|
617
|
+
|
|
618
|
+
if (
|
|
619
|
+
item.status === desiredStatus
|
|
620
|
+
&& (item.next_action || null) === desiredNextAction
|
|
621
|
+
&& (item.last_error_summary || null) === desiredSummary
|
|
622
|
+
) {
|
|
623
|
+
return item;
|
|
624
|
+
}
|
|
625
|
+
|
|
626
|
+
return markMergeQueueState(db, item.id, {
|
|
627
|
+
status: desiredStatus,
|
|
628
|
+
lastErrorCode: desiredStatus === 'wave_blocked' ? 'queue_wave_blocked' : desiredStatus === 'held' ? 'queue_hold' : 'queue_escalated',
|
|
629
|
+
lastErrorSummary: desiredSummary,
|
|
630
|
+
nextAction: desiredNextAction,
|
|
631
|
+
});
|
|
632
|
+
}
|
|
633
|
+
|
|
634
|
+
export function buildQueueStatusSummary(items, { db = null, repoRoot = null } = {}) {
|
|
635
|
+
const rankedCandidates = annotateQueueCandidates(items, { db, repoRoot });
|
|
636
|
+
const plan = buildQueueGoalPlan(rankedCandidates.slice(0, 8));
|
|
637
|
+
const next = rankedCandidates[0]
|
|
638
|
+
|| items.find((item) => ['validating', 'rebasing', 'merging'].includes(item.status))
|
|
639
|
+
|| null;
|
|
139
640
|
const counts = {
|
|
140
641
|
queued: items.filter((item) => item.status === 'queued').length,
|
|
141
642
|
validating: items.filter((item) => item.status === 'validating').length,
|
|
142
643
|
rebasing: items.filter((item) => item.status === 'rebasing').length,
|
|
143
644
|
merging: items.filter((item) => item.status === 'merging').length,
|
|
144
645
|
retrying: items.filter((item) => item.status === 'retrying').length,
|
|
646
|
+
held: items.filter((item) => item.status === 'held').length,
|
|
647
|
+
wave_blocked: items.filter((item) => item.status === 'wave_blocked').length,
|
|
648
|
+
escalated: items.filter((item) => item.status === 'escalated').length,
|
|
145
649
|
blocked: items.filter((item) => item.status === 'blocked').length,
|
|
146
650
|
merged: items.filter((item) => item.status === 'merged').length,
|
|
147
651
|
};
|
|
148
652
|
|
|
149
653
|
return {
|
|
150
654
|
counts,
|
|
151
|
-
next
|
|
655
|
+
next,
|
|
152
656
|
blocked: items.filter((item) => item.status === 'blocked'),
|
|
657
|
+
held_back: rankedCandidates.slice(1, 4),
|
|
658
|
+
decision_summary: next?.queue_assessment?.reason || null,
|
|
659
|
+
focus_decision: next?.recommendation || null,
|
|
660
|
+
plan,
|
|
661
|
+
recommended_sequence: buildQueueRecommendedSequence(rankedCandidates.slice(0, 8)),
|
|
662
|
+
recommendations: rankedCandidates.slice(0, 5).map((item) => ({
|
|
663
|
+
item_id: item.id,
|
|
664
|
+
source_ref: item.source_ref,
|
|
665
|
+
source_type: item.source_type,
|
|
666
|
+
action: item.recommendation?.action || 'hold',
|
|
667
|
+
summary: item.recommendation?.summary || null,
|
|
668
|
+
command: item.recommendation?.command || null,
|
|
669
|
+
freshness: item.queue_assessment?.freshness || 'unknown',
|
|
670
|
+
stale_invalidation_count: item.queue_assessment?.stale_invalidation_count || 0,
|
|
671
|
+
stale_wave_count: item.queue_assessment?.stale_wave_count || 0,
|
|
672
|
+
stale_wave_size: item.queue_assessment?.stale_wave_size || 0,
|
|
673
|
+
stale_wave_summary: item.queue_assessment?.stale_wave_summary || null,
|
|
674
|
+
goal_priority: item.queue_assessment?.goal_priority || null,
|
|
675
|
+
integration_risk: item.queue_assessment?.integration_risk || 'normal',
|
|
676
|
+
})),
|
|
153
677
|
};
|
|
154
678
|
}
|
|
155
679
|
|
|
156
|
-
export async function runNextQueueItem(db, repoRoot, { targetBranch = 'main' } = {}) {
|
|
157
|
-
const
|
|
680
|
+
export async function runNextQueueItem(db, repoRoot, { targetBranch = 'main', followPlan = false } = {}) {
|
|
681
|
+
const currentItems = listMergeQueue(db);
|
|
682
|
+
const nextItem = chooseRunnableQueueItem(currentItems, { db, repoRoot, followPlan });
|
|
158
683
|
if (!nextItem) {
|
|
684
|
+
const deferred = chooseNextQueueItem(currentItems, { db, repoRoot });
|
|
685
|
+
if (deferred) {
|
|
686
|
+
syncDeferredQueueState(db, deferred);
|
|
687
|
+
const refreshedDeferred = chooseNextQueueItem(listMergeQueue(db), { db, repoRoot });
|
|
688
|
+
return { status: 'deferred', item: null, deferred: refreshedDeferred };
|
|
689
|
+
}
|
|
159
690
|
return { status: 'idle', item: null };
|
|
160
691
|
}
|
|
161
692
|
|
|
@@ -168,6 +699,21 @@ export async function runNextQueueItem(db, repoRoot, { targetBranch = 'main' } =
|
|
|
168
699
|
const resolved = resolveQueueSource(db, repoRoot, started);
|
|
169
700
|
const queueTarget = started.target_branch || targetBranch;
|
|
170
701
|
|
|
702
|
+
if (resolved.pipeline_id) {
|
|
703
|
+
const policyGate = await evaluatePipelinePolicyGate(db, repoRoot, resolved.pipeline_id);
|
|
704
|
+
if (!policyGate.ok) {
|
|
705
|
+
return {
|
|
706
|
+
status: 'blocked',
|
|
707
|
+
item: markMergeQueueState(db, started.id, {
|
|
708
|
+
status: 'blocked',
|
|
709
|
+
lastErrorCode: policyGate.reason_code,
|
|
710
|
+
lastErrorSummary: policyGate.summary,
|
|
711
|
+
nextAction: policyGate.next_action,
|
|
712
|
+
}),
|
|
713
|
+
};
|
|
714
|
+
}
|
|
715
|
+
}
|
|
716
|
+
|
|
171
717
|
if (!gitBranchExists(repoRoot, resolved.branch)) {
|
|
172
718
|
return scheduleRetryOrBlock(db, started, {
|
|
173
719
|
code: 'source_missing',
|
|
@@ -177,8 +723,34 @@ export async function runNextQueueItem(db, repoRoot, { targetBranch = 'main' } =
|
|
|
177
723
|
});
|
|
178
724
|
}
|
|
179
725
|
|
|
726
|
+
const rebaseOperation = startOperationJournalEntry(db, {
|
|
727
|
+
scopeType: 'queue_item',
|
|
728
|
+
scopeId: started.id,
|
|
729
|
+
operationType: 'queue_rebase',
|
|
730
|
+
details: JSON.stringify({
|
|
731
|
+
queue_item_id: started.id,
|
|
732
|
+
branch: resolved.branch,
|
|
733
|
+
target_branch: queueTarget,
|
|
734
|
+
}),
|
|
735
|
+
});
|
|
180
736
|
markMergeQueueState(db, started.id, { status: 'rebasing' });
|
|
181
|
-
|
|
737
|
+
try {
|
|
738
|
+
gitRebaseOnto(resolved.worktree_path || repoRoot, queueTarget, resolved.branch);
|
|
739
|
+
finishOperationJournalEntry(db, rebaseOperation.id, {
|
|
740
|
+
status: 'completed',
|
|
741
|
+
});
|
|
742
|
+
} catch (err) {
|
|
743
|
+
finishOperationJournalEntry(db, rebaseOperation.id, {
|
|
744
|
+
status: 'failed',
|
|
745
|
+
details: JSON.stringify({
|
|
746
|
+
queue_item_id: started.id,
|
|
747
|
+
branch: resolved.branch,
|
|
748
|
+
target_branch: queueTarget,
|
|
749
|
+
error: String(err?.message || err),
|
|
750
|
+
}),
|
|
751
|
+
});
|
|
752
|
+
throw err;
|
|
753
|
+
}
|
|
182
754
|
|
|
183
755
|
const gate = await evaluateQueueRepoGate(db, repoRoot);
|
|
184
756
|
if (!gate.ok) {
|
|
@@ -193,8 +765,41 @@ export async function runNextQueueItem(db, repoRoot, { targetBranch = 'main' } =
|
|
|
193
765
|
};
|
|
194
766
|
}
|
|
195
767
|
|
|
768
|
+
const mergeOperation = startOperationJournalEntry(db, {
|
|
769
|
+
scopeType: 'queue_item',
|
|
770
|
+
scopeId: started.id,
|
|
771
|
+
operationType: 'queue_merge',
|
|
772
|
+
details: JSON.stringify({
|
|
773
|
+
queue_item_id: started.id,
|
|
774
|
+
branch: resolved.branch,
|
|
775
|
+
target_branch: queueTarget,
|
|
776
|
+
}),
|
|
777
|
+
});
|
|
196
778
|
markMergeQueueState(db, started.id, { status: 'merging' });
|
|
197
|
-
|
|
779
|
+
let mergedCommit;
|
|
780
|
+
try {
|
|
781
|
+
mergedCommit = gitMergeBranchInto(repoRoot, queueTarget, resolved.branch);
|
|
782
|
+
finishOperationJournalEntry(db, mergeOperation.id, {
|
|
783
|
+
status: 'completed',
|
|
784
|
+
details: JSON.stringify({
|
|
785
|
+
queue_item_id: started.id,
|
|
786
|
+
branch: resolved.branch,
|
|
787
|
+
target_branch: queueTarget,
|
|
788
|
+
merged_commit: mergedCommit,
|
|
789
|
+
}),
|
|
790
|
+
});
|
|
791
|
+
} catch (err) {
|
|
792
|
+
finishOperationJournalEntry(db, mergeOperation.id, {
|
|
793
|
+
status: 'failed',
|
|
794
|
+
details: JSON.stringify({
|
|
795
|
+
queue_item_id: started.id,
|
|
796
|
+
branch: resolved.branch,
|
|
797
|
+
target_branch: queueTarget,
|
|
798
|
+
error: String(err?.message || err),
|
|
799
|
+
}),
|
|
800
|
+
});
|
|
801
|
+
throw err;
|
|
802
|
+
}
|
|
198
803
|
|
|
199
804
|
return {
|
|
200
805
|
status: 'merged',
|
|
@@ -209,17 +814,37 @@ export async function runNextQueueItem(db, repoRoot, { targetBranch = 'main' } =
|
|
|
209
814
|
}
|
|
210
815
|
}
|
|
211
816
|
|
|
212
|
-
export async function runMergeQueue(db, repoRoot, {
|
|
817
|
+
export async function runMergeQueue(db, repoRoot, {
|
|
818
|
+
maxItems = 1,
|
|
819
|
+
targetBranch = 'main',
|
|
820
|
+
followPlan = false,
|
|
821
|
+
mergeBudget = null,
|
|
822
|
+
} = {}) {
|
|
213
823
|
const processed = [];
|
|
824
|
+
let deferred = null;
|
|
825
|
+
let mergedCount = 0;
|
|
214
826
|
for (let count = 0; count < maxItems; count++) {
|
|
215
|
-
|
|
216
|
-
|
|
827
|
+
if (mergeBudget !== null && mergedCount >= mergeBudget) break;
|
|
828
|
+
const result = await runNextQueueItem(db, repoRoot, { targetBranch, followPlan });
|
|
829
|
+
if (!result.item) {
|
|
830
|
+
deferred = result.deferred || deferred;
|
|
831
|
+
break;
|
|
832
|
+
}
|
|
217
833
|
processed.push(result);
|
|
834
|
+
if (result.status === 'merged') {
|
|
835
|
+
mergedCount += 1;
|
|
836
|
+
}
|
|
218
837
|
if (result.status !== 'merged') break;
|
|
219
838
|
}
|
|
220
839
|
|
|
221
840
|
return {
|
|
222
841
|
processed,
|
|
223
|
-
|
|
842
|
+
deferred,
|
|
843
|
+
execution_policy: {
|
|
844
|
+
follow_plan: followPlan,
|
|
845
|
+
merge_budget: mergeBudget,
|
|
846
|
+
merged_count: mergedCount,
|
|
847
|
+
},
|
|
848
|
+
summary: buildQueueStatusSummary(listMergeQueue(db), { db, repoRoot }),
|
|
224
849
|
};
|
|
225
850
|
}
|