wiggum-cli 0.17.3 → 0.18.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1149 @@
1
+ import { z } from 'zod';
2
+ import { listRepoIssues, fetchGitHubIssue } from '../utils/github.js';
3
+ import { assessFeatureStateImpl } from './tools/feature-state.js';
4
+ import { getTracedAI } from '../utils/tracing.js';
5
+ import { loadContext } from '../context/index.js';
6
+ import { isReasoningModel } from '../ai/providers.js';
7
+ const DEPENDENCY_PATTERN = /\b(?:depends on|blocked by|requires|after)\s+#(\d+)/gi;
8
+ const STOP_WORDS = new Set([
9
+ 'the', 'and', 'for', 'with', 'from', 'into', 'that', 'this', 'have', 'has', 'will', 'would',
10
+ 'should', 'about', 'issue', 'task', 'feature', 'support', 'implement', 'add', 'build', 'create',
11
+ 'update', 'fix', 'make', 'allow', 'user', 'users', 'cli', 'agent', 'part', 'related', 'summary',
12
+ ]);
13
+ const GENERIC_DEPENDENCY_TOKENS = new Set([
14
+ 'protocol',
15
+ 'runtime',
16
+ 'modal',
17
+ 'status',
18
+ 'contract',
19
+ 'interface',
20
+ 'implementation',
21
+ 'bridge',
22
+ 'workflow',
23
+ 'execution',
24
+ ]);
25
+ const DEPENDENCY_CUE_PATTERN = /\b(depends on|blocked by|requires|after)\b/i;
26
+ const MAX_MODEL_INFERENCE_CANDIDATES = 12;
27
+ const ENRICHMENT_CONCURRENCY = 6;
28
+ const INFERENCE_CONCURRENCY = 3;
29
+ const BACKLOG_DISCOVERY_MAX_LIMIT = 5000;
30
+ const inferredDependencySchema = z.object({
31
+ edges: z.array(z.object({
32
+ targetIssue: z.number().int().positive(),
33
+ confidence: z.enum(['high', 'medium', 'low']),
34
+ evidence: z.string().min(1),
35
+ })).default([]),
36
+ });
37
+ export function createSchedulerRunCache() {
38
+ return {
39
+ listed: undefined,
40
+ listedUnfiltered: undefined,
41
+ issueDetails: new Map(),
42
+ featureStates: new Map(),
43
+ persistedContext: undefined,
44
+ };
45
+ }
46
+ export function invalidateSchedulerRunCache(cache, issueNumbers = []) {
47
+ cache.listed = undefined;
48
+ cache.listedUnfiltered = undefined;
49
+ for (const issueNumber of issueNumbers) {
50
+ cache.issueDetails.delete(issueNumber);
51
+ cache.featureStates.delete(issueNumber);
52
+ }
53
+ }
54
+ function emitBacklogEvent(config, event) {
55
+ config.onOrchestratorEvent?.(event);
56
+ }
57
+ function nowMs() {
58
+ return Date.now();
59
+ }
60
+ async function mapWithConcurrency(items, concurrency, mapper) {
61
+ if (items.length === 0)
62
+ return [];
63
+ const results = new Array(items.length);
64
+ let nextIndex = 0;
65
+ const worker = async () => {
66
+ while (true) {
67
+ const currentIndex = nextIndex;
68
+ nextIndex += 1;
69
+ if (currentIndex >= items.length)
70
+ return;
71
+ results[currentIndex] = await mapper(items[currentIndex], currentIndex);
72
+ }
73
+ };
74
+ const workerCount = Math.min(concurrency, items.length);
75
+ await Promise.all(Array.from({ length: workerCount }, () => worker()));
76
+ return results;
77
+ }
78
+ async function getIssueDetail(config, issueNumber, cache) {
79
+ if (cache?.issueDetails.has(issueNumber)) {
80
+ return cache.issueDetails.get(issueNumber) ?? null;
81
+ }
82
+ const detail = await fetchGitHubIssue(config.owner, config.repo, issueNumber);
83
+ cache?.issueDetails.set(issueNumber, detail);
84
+ return detail;
85
+ }
86
+ async function getFeatureState(config, issueNumber, featureName, cache) {
87
+ const cached = cache?.featureStates.get(issueNumber);
88
+ if (cached)
89
+ return cached;
90
+ const featureState = await assessFeatureStateImpl(config.projectRoot, featureName, issueNumber);
91
+ cache?.featureStates.set(issueNumber, featureState);
92
+ return featureState;
93
+ }
94
+ async function getPersistedContext(config, cache) {
95
+ if (cache && cache.persistedContext !== undefined) {
96
+ return cache.persistedContext;
97
+ }
98
+ const persistedContext = await loadContext(config.projectRoot).catch(() => null);
99
+ if (cache) {
100
+ cache.persistedContext = persistedContext;
101
+ }
102
+ return persistedContext;
103
+ }
104
+ async function discoverListedIssues(config, search, cache, cacheKey = 'listed') {
105
+ const cached = cache?.[cacheKey];
106
+ if (cached)
107
+ return cached;
108
+ const latest = await listRepoIssues(config.owner, config.repo, search, BACKLOG_DISCOVERY_MAX_LIMIT);
109
+ if (cache) {
110
+ cache[cacheKey] = latest;
111
+ }
112
+ return latest;
113
+ }
114
+ async function resolveOpenDependencies(config, dependencies, listedIssues, cache) {
115
+ const listedOpen = new Set(listedIssues.map(issue => issue.number));
116
+ const openDependencies = [];
117
+ const errors = [];
118
+ for (const dependencyNumber of dependencies) {
119
+ if (listedOpen.has(dependencyNumber)) {
120
+ openDependencies.push(dependencyNumber);
121
+ continue;
122
+ }
123
+ const dependencyDetail = await getIssueDetail(config, dependencyNumber, cache);
124
+ if (dependencyDetail?.state === 'open') {
125
+ openDependencies.push(dependencyNumber);
126
+ continue;
127
+ }
128
+ if (!dependencyDetail) {
129
+ openDependencies.push(dependencyNumber);
130
+ errors.push(`Failed to fetch dependency issue #${dependencyNumber} from GitHub while resolving explicit prerequisites. Check gh connectivity.`);
131
+ }
132
+ }
133
+ return { openDependencies, errors };
134
+ }
135
+ export function extractDependencyHints(body, backlogIssues = [], currentIssueNumber) {
136
+ const matches = [...body.matchAll(DEPENDENCY_PATTERN)];
137
+ const numbers = matches.map(m => parseInt(m[1], 10));
138
+ const inferredFromTitles = [];
139
+ const cueMatches = [...body.matchAll(/\b(?:depends on|blocked by|requires|after)\b([^\n.;]*)/gi)];
140
+ for (const cueMatch of cueMatches) {
141
+ const phrase = cueMatch[1]?.trim() ?? '';
142
+ const segments = phrase
143
+ .split(/\s+\+\s+|\s+and\s+|,/i)
144
+ .map(segment => segment.trim())
145
+ .filter(Boolean);
146
+ for (const segment of segments) {
147
+ const segmentTokens = normalizeTokens(segment);
148
+ if (segmentTokens.length === 0)
149
+ continue;
150
+ const allowSingleTokenFallback = segmentTokens.length === 1
151
+ && segmentTokens[0].length >= 6
152
+ && !GENERIC_DEPENDENCY_TOKENS.has(segmentTokens[0]);
153
+ const scored = backlogIssues
154
+ .filter(issue => issue.number !== currentIssueNumber)
155
+ .map(issue => {
156
+ const titleTokens = normalizeTokens(issue.title);
157
+ const overlap = titleTokens.filter(token => segmentTokens.includes(token)).length;
158
+ const uniqueTokenMatch = allowSingleTokenFallback && titleTokens.includes(segmentTokens[0]);
159
+ return { issue, overlap, uniqueTokenMatch };
160
+ })
161
+ .filter(item => item.overlap > 0 || item.uniqueTokenMatch);
162
+ if (scored.length === 0)
163
+ continue;
164
+ const bestOverlap = Math.max(...scored.map(item => item.overlap));
165
+ for (const item of scored) {
166
+ if (bestOverlap >= 2) {
167
+ if (item.overlap === bestOverlap) {
168
+ inferredFromTitles.push(item.issue.number);
169
+ }
170
+ }
171
+ }
172
+ if (bestOverlap < 2 && allowSingleTokenFallback) {
173
+ const exactTokenMatches = scored.filter(item => item.uniqueTokenMatch);
174
+ if (exactTokenMatches.length === 1) {
175
+ inferredFromTitles.push(exactTokenMatches[0].issue.number);
176
+ }
177
+ }
178
+ }
179
+ }
180
+ return [...new Set([...numbers, ...inferredFromTitles])].sort((a, b) => a - b);
181
+ }
182
+ export function deriveFeatureNameFromTitle(title) {
183
+ const words = title
184
+ .toLowerCase()
185
+ .replace(/#[0-9]+/g, ' ')
186
+ .replace(/[^a-z0-9\s-]/g, ' ')
187
+ .split(/\s+/)
188
+ .filter(Boolean)
189
+ .filter(word => !STOP_WORDS.has(word))
190
+ .slice(0, 4);
191
+ return (words.length > 0 ? words : ['feature']).join('-');
192
+ }
193
+ function derivePriorityTier(labels) {
194
+ if (labels.includes('P0'))
195
+ return 'P0';
196
+ if (labels.includes('P1'))
197
+ return 'P1';
198
+ if (labels.includes('P2'))
199
+ return 'P2';
200
+ return 'unlabeled';
201
+ }
202
+ function priorityWeight(tier) {
203
+ switch (tier) {
204
+ case 'P0': return 300;
205
+ case 'P1': return 200;
206
+ case 'P2': return 100;
207
+ default: return 0;
208
+ }
209
+ }
210
+ function attemptWeight(attemptState, recommendation) {
211
+ const retryBase = attemptState === 'failure' || attemptState === 'partial' ? 200 : 0;
212
+ const resumeBase = recommendation === 'resume_implementation' || recommendation === 'resume_pr_phase'
213
+ ? 160
214
+ : recommendation === 'generate_plan'
215
+ ? 80
216
+ : 0;
217
+ return retryBase + resumeBase;
218
+ }
219
+ function actionabilityWeight(actionability) {
220
+ switch (actionability) {
221
+ case 'housekeeping': return 1000;
222
+ case 'waiting_pr': return 900;
223
+ case 'ready': return 800;
224
+ case 'blocked_dependency': return 0;
225
+ case 'blocked_cycle': return 0;
226
+ case 'blocked_out_of_scope': return 0;
227
+ default: return 0;
228
+ }
229
+ }
230
+ function existingWorkWeight(candidate) {
231
+ const state = candidate.featureState;
232
+ if (!state)
233
+ return 0;
234
+ let weight = 0;
235
+ if (state.hasExistingBranch)
236
+ weight += 75;
237
+ if (state.hasPlan)
238
+ weight += 75;
239
+ if ((state.commitsAhead ?? 0) > 0)
240
+ weight += 75;
241
+ return weight;
242
+ }
243
+ function issueNumberWeight(issueNumber) {
244
+ return Math.max(0, 1000 - issueNumber);
245
+ }
246
+ function getAttemptState(memories, issueNumber) {
247
+ const entry = memories.find((item) => item.relatedIssue === issueNumber);
248
+ if (!entry)
249
+ return 'never_tried';
250
+ const tags = new Set(entry.tags ?? []);
251
+ if (tags.has('failure'))
252
+ return 'failure';
253
+ if (tags.has('partial'))
254
+ return 'partial';
255
+ if (tags.has('success'))
256
+ return 'success';
257
+ if (tags.has('skipped'))
258
+ return 'skipped';
259
+ return 'never_tried';
260
+ }
261
+ function buildCodebaseContext(persisted) {
262
+ if (!persisted?.aiAnalysis)
263
+ return 'No persisted codebase context available.';
264
+ const projectContext = persisted.aiAnalysis.projectContext;
265
+ const lines = [];
266
+ if (projectContext?.entryPoints?.length) {
267
+ lines.push(`Entry points: ${projectContext.entryPoints.join(', ')}`);
268
+ }
269
+ if (projectContext?.keyDirectories && Object.keys(projectContext.keyDirectories).length > 0) {
270
+ lines.push(`Key directories: ${Object.entries(projectContext.keyDirectories).map(([key, value]) => `${key} (${value})`).join(', ')}`);
271
+ }
272
+ if (persisted.aiAnalysis.implementationGuidelines?.length) {
273
+ lines.push(`Implementation guidelines: ${persisted.aiAnalysis.implementationGuidelines.slice(0, 8).join('; ')}`);
274
+ }
275
+ if (persisted.aiAnalysis.technologyPractices?.practices?.length) {
276
+ lines.push(`Technology practices: ${persisted.aiAnalysis.technologyPractices.practices.slice(0, 8).join('; ')}`);
277
+ }
278
+ return lines.length > 0 ? lines.join('\n') : 'No persisted codebase context available.';
279
+ }
280
+ function normalizeTokens(text) {
281
+ return text
282
+ .replace(/([a-z])([A-Z])/g, '$1 $2')
283
+ .toLowerCase()
284
+ .replace(/#[0-9]+/g, ' ')
285
+ .replace(/[^a-z0-9\s]/g, ' ')
286
+ .split(/\s+/)
287
+ .filter(Boolean)
288
+ .filter(word => word.length > 2 && !STOP_WORDS.has(word));
289
+ }
290
+ function tokenOverlap(a, b) {
291
+ const left = new Set(normalizeTokens(a));
292
+ const right = new Set(normalizeTokens(b));
293
+ if (left.size === 0 || right.size === 0)
294
+ return 0;
295
+ let overlap = 0;
296
+ for (const token of left) {
297
+ if (right.has(token))
298
+ overlap += 1;
299
+ }
300
+ return overlap;
301
+ }
302
+ function sharedSignals(issueText, peerText) {
303
+ const issueTokens = new Set(normalizeTokens(issueText));
304
+ const peerTokens = new Set(normalizeTokens(peerText));
305
+ const signals = [...issueTokens]
306
+ .filter(token => peerTokens.has(token))
307
+ .filter(token => token.length >= 5)
308
+ .slice(0, 3);
309
+ return signals;
310
+ }
311
+ function peerHasRuntimeFoundation(peerText) {
312
+ return /\b(runtime|contract|interface|protocol)\b/i.test(peerText);
313
+ }
314
+ function peerHasApiFoundation(peerText) {
315
+ return /\b(schema|api|backend|storage|infrastructure|foundation|setup|config)\b/i.test(peerText);
316
+ }
317
+ function issueLooksLikeConsumerWork(issueText) {
318
+ return /\b(ui|screen|render|surface|workflow|integration|consume|page|command)\b/i.test(issueText);
319
+ }
320
+ function issueHasPrerequisiteLanguage(issueText) {
321
+ return /\b(after|once|using|reuse|extend|build on|depends on|blocked by|requires)\b/i.test(issueText);
322
+ }
323
+ function peerIsAncillary(peerText) {
324
+ return /\b(debug|logging|logger|observability|instrumentation|tracking|breadcrumb|sentry|telemetry|diagnos|monitoring)\b/i.test(peerText);
325
+ }
326
+ function peerDeclaresOrdering(peerText) {
327
+ return /\b(protocol-first|foundation-first|should land before|must land before|before ui|before integration|prerequisite|groundwork)\b/i.test(peerText);
328
+ }
329
+ function qualifiesForHardInferredBlock(issue, peer, overlap) {
330
+ if (peer.issueNumber >= issue.issueNumber)
331
+ return false;
332
+ const issueText = `${issue.title}\n${issue.body}`;
333
+ const peerText = `${peer.title}\n${peer.body}`;
334
+ if (peerIsAncillary(peerText))
335
+ return false;
336
+ const runtimeFoundation = peerHasRuntimeFoundation(peerText);
337
+ const apiFoundation = peerHasApiFoundation(peerText);
338
+ const foundational = runtimeFoundation || apiFoundation || peerLooksFoundational(peer) || peerDeclaresOrdering(peerText);
339
+ if (!foundational || overlap < 2)
340
+ return false;
341
+ const evalWork = /\b(eval|evaluation|benchmark|baseline|metric|report|harness)\b/i.test(issueText);
342
+ const rolloutWork = /\b(rollout|rollback|flag|fallback|kill switch|control)\b/i.test(issueText);
343
+ const hybridWork = /\b(hybrid|handoff|routing|phase)\b/i.test(issueText);
344
+ const issueNeedsFoundation = issueLooksLikeConsumerWork(issueText) || issueHasPrerequisiteLanguage(issueText) || evalWork || rolloutWork || hybridWork;
345
+ return issueNeedsFoundation;
346
+ }
347
+ function normalizeInferredEdge(issue, peer, edge) {
348
+ const issueText = `${issue.title}\n${issue.body}`;
349
+ const peerText = `${peer.title}\n${peer.body}`;
350
+ const overlap = tokenOverlap(issueText, peerText);
351
+ const canHardBlock = qualifiesForHardInferredBlock(issue, peer, overlap);
352
+ if (edge.confidence === 'high' && !canHardBlock) {
353
+ return {
354
+ ...edge,
355
+ confidence: 'medium',
356
+ blocking: false,
357
+ };
358
+ }
359
+ return {
360
+ ...edge,
361
+ blocking: edge.confidence === 'high' && canHardBlock,
362
+ };
363
+ }
364
+ function describeFallbackDependency(issue, peer, shared) {
365
+ const issueText = `${issue.title}\n${issue.body}`;
366
+ const peerText = `${peer.title}\n${peer.body}`;
367
+ const runtimeFoundation = peerHasRuntimeFoundation(peerText);
368
+ const evalWork = /\b(eval|evaluation|benchmark|baseline|metric|report|harness)\b/i.test(issueText);
369
+ const rolloutWork = /\b(rollout|rollback|flag|fallback|kill switch|control)\b/i.test(issueText);
370
+ const hybridWork = /\b(hybrid|handoff|routing|phase)\b/i.test(issueText);
371
+ const consumerWork = issueLooksLikeConsumerWork(issueText);
372
+ const apiFoundation = peerHasApiFoundation(peerText);
373
+ if (runtimeFoundation && evalWork) {
374
+ return `#${peer.issueNumber} defines the native runtime contract that this evaluation work should measure against first.`;
375
+ }
376
+ if (runtimeFoundation && rolloutWork) {
377
+ return `#${peer.issueNumber} establishes the native runtime foundation that this rollout work depends on.`;
378
+ }
379
+ if (runtimeFoundation && hybridWork) {
380
+ return `#${peer.issueNumber} defines the runtime contract that this hybrid execution work builds on.`;
381
+ }
382
+ if (runtimeFoundation) {
383
+ return `#${peer.issueNumber} defines the runtime contract that this issue likely needs first.`;
384
+ }
385
+ if (apiFoundation && consumerWork) {
386
+ return `#${peer.issueNumber} provides the foundational API or infrastructure that this issue consumes.`;
387
+ }
388
+ if (shared.length > 0) {
389
+ return `#${peer.issueNumber} appears to be the more foundational issue in the same subsystem (${shared.join(', ')}).`;
390
+ }
391
+ return `#${peer.issueNumber} appears to be the more foundational issue that this work builds on.`;
392
+ }
393
+ function buildFallbackInferredEdges(issue, peers) {
394
+ const edges = [];
395
+ const issueText = `${issue.title}\n${issue.body}`;
396
+ for (const peer of peers) {
397
+ if (peer.issueNumber === issue.issueNumber)
398
+ continue;
399
+ const peerText = `${peer.title}\n${peer.body}`;
400
+ const overlap = tokenOverlap(issueText, peerText);
401
+ if (overlap === 0)
402
+ continue;
403
+ const peerFoundation = /\b(core|base|foundation|scaffold|setup|config|schema|api|storage|data|backend|infrastructure)\b/i.test(peerText);
404
+ const issueConsumer = issueLooksLikeConsumerWork(issueText);
405
+ const issuePrereqLanguage = issueHasPrerequisiteLanguage(issueText);
406
+ const runtimeFoundation = peerHasRuntimeFoundation(peerText);
407
+ const blocking = peer.issueNumber < issue.issueNumber && (peerFoundation || runtimeFoundation || issuePrereqLanguage) && overlap >= 1;
408
+ if (!blocking)
409
+ continue;
410
+ const confidence = ((peerFoundation || runtimeFoundation) && (issueConsumer || issuePrereqLanguage) && overlap >= 2) ? 'high' : 'medium';
411
+ const shared = sharedSignals(issueText, peerText);
412
+ edges.push(normalizeInferredEdge(issue, peer, {
413
+ sourceIssue: issue.issueNumber,
414
+ targetIssue: peer.issueNumber,
415
+ kind: 'inferred',
416
+ confidence,
417
+ blocking: confidence === 'high',
418
+ evidence: {
419
+ summary: describeFallbackDependency(issue, peer, shared),
420
+ backlogSignals: [`Issue #${peer.issueNumber} appears more foundational and lower-numbered.`],
421
+ },
422
+ }));
423
+ }
424
+ return edges;
425
+ }
426
+ function buildInferenceBacklog(candidates, contextIssues) {
427
+ const byNumber = new Map();
428
+ for (const candidate of candidates) {
429
+ byNumber.set(candidate.issueNumber, candidate);
430
+ }
431
+ for (const issue of contextIssues) {
432
+ if (byNumber.has(issue.number))
433
+ continue;
434
+ byNumber.set(issue.number, {
435
+ issueNumber: issue.number,
436
+ title: issue.title,
437
+ body: '',
438
+ labels: issue.labels ?? [],
439
+ createdAt: issue.createdAt,
440
+ phase: 'idle',
441
+ priorityTier: derivePriorityTier(issue.labels ?? []),
442
+ dependsOn: [],
443
+ explicitDependencyEdges: [],
444
+ inferredDependencyEdges: [],
445
+ attemptState: 'never_tried',
446
+ featureState: {
447
+ recommendation: 'start_fresh',
448
+ hasExistingBranch: false,
449
+ commitsAhead: 0,
450
+ hasPlan: false,
451
+ hasOpenPr: false,
452
+ },
453
+ loopFeatureName: deriveFeatureNameFromTitle(issue.title),
454
+ recommendation: 'start_fresh',
455
+ });
456
+ }
457
+ return [...byNumber.values()];
458
+ }
459
+ async function inferDependenciesWithModel(config, issue, peers, codebaseContext) {
460
+ if (!config.modelId || peers.length === 0)
461
+ return [];
462
+ const { generateObject } = getTracedAI();
463
+ const peerSummary = peers.map((peer) => (`#${peer.issueNumber}: ${peer.title}\nLabels: ${peer.labels.join(', ') || 'none'}\nBody: ${peer.body.slice(0, 400)}`)).join('\n\n');
464
+ try {
465
+ const { object } = await generateObject({
466
+ model: config.model,
467
+ schema: inferredDependencySchema,
468
+ system: `You infer issue dependencies conservatively for a backlog orchestrator.
469
+
470
+ Rules:
471
+ - Only return a dependency if the current issue truly needs another issue to be completed first.
472
+ - Favor infra-before-feature, schema/API-before-consumer, and groundwork-before-integration patterns.
473
+ - Use backlog and codebase context as evidence.
474
+ - Do not return duplicates or speculative edges.`,
475
+ prompt: `Codebase context:\n${codebaseContext}
476
+
477
+ Current issue:
478
+ #${issue.issueNumber}: ${issue.title}
479
+ Labels: ${issue.labels.join(', ') || 'none'}
480
+ Body:
481
+ ${issue.body.slice(0, 1200)}
482
+
483
+ Candidate prerequisite issues:
484
+ ${peerSummary}
485
+
486
+ Return only the issues that must or likely should come first.`,
487
+ ...(isReasoningModel(config.modelId) ? {} : { temperature: 0.1 }),
488
+ experimental_telemetry: {
489
+ isEnabled: true,
490
+ metadata: {
491
+ agent: 'agent-dependency-inference',
492
+ issueNumber: String(issue.issueNumber),
493
+ },
494
+ },
495
+ });
496
+ return object.edges
497
+ .filter(edge => peers.some(peer => peer.issueNumber === edge.targetIssue))
498
+ .map((edge) => {
499
+ const peer = peers.find(candidate => candidate.issueNumber === edge.targetIssue);
500
+ if (!peer)
501
+ return null;
502
+ return normalizeInferredEdge(issue, peer, {
503
+ sourceIssue: issue.issueNumber,
504
+ targetIssue: edge.targetIssue,
505
+ kind: 'inferred',
506
+ confidence: edge.confidence,
507
+ blocking: edge.confidence === 'high',
508
+ evidence: {
509
+ summary: edge.evidence,
510
+ codebaseSignals: codebaseContext !== 'No persisted codebase context available.'
511
+ ? ['Persisted project context was used during dependency inference.']
512
+ : undefined,
513
+ },
514
+ });
515
+ })
516
+ .filter((edge) => edge != null);
517
+ }
518
+ catch {
519
+ return [];
520
+ }
521
+ }
522
+ async function inferDependencyEdges(config, issue, backlog, codebaseContext) {
523
+ const peers = backlog
524
+ .filter(peer => peer.issueNumber !== issue.issueNumber)
525
+ .map(peer => ({ peer, overlap: tokenOverlap(`${issue.title}\n${issue.body}`, `${peer.title}\n${peer.body}`) }))
526
+ .filter(item => item.overlap > 0 || peerLooksFoundational(item.peer))
527
+ .sort((a, b) => {
528
+ if (b.overlap !== a.overlap)
529
+ return b.overlap - a.overlap;
530
+ return a.peer.issueNumber - b.peer.issueNumber;
531
+ })
532
+ .slice(0, 6)
533
+ .map(item => item.peer);
534
+ const modelEdges = await inferDependenciesWithModel(config, issue, peers, codebaseContext);
535
+ if (modelEdges.length > 0)
536
+ return dedupeEdges([...buildFallbackInferredEdges(issue, peers), ...modelEdges]);
537
+ return dedupeEdges(buildFallbackInferredEdges(issue, peers));
538
+ }
539
+ function peerLooksFoundational(peer) {
540
+ return /\b(core|base|foundation|setup|scaffold|schema|api|config|storage|backend|infrastructure)\b/i.test(`${peer.title}\n${peer.body}`);
541
+ }
542
+ export function dedupeEdges(edges) {
543
+ const deduped = new Map();
544
+ const confidenceWeight = { low: 1, medium: 2, high: 3 };
545
+ for (const edge of edges) {
546
+ const key = `${edge.sourceIssue}:${edge.targetIssue}:${edge.kind}`;
547
+ const existing = deduped.get(key);
548
+ if (!existing) {
549
+ deduped.set(key, edge);
550
+ continue;
551
+ }
552
+ const existingScore = confidenceWeight[existing.confidence] + (existing.blocking ? 10 : 0);
553
+ const nextScore = confidenceWeight[edge.confidence] + (edge.blocking ? 10 : 0);
554
+ if (nextScore >= existingScore) {
555
+ deduped.set(key, edge);
556
+ }
557
+ }
558
+ return [...deduped.values()];
559
+ }
560
+ function detectCycleForIssue(issueNumber, adjacency, visiting, visited) {
561
+ if (visiting.has(issueNumber))
562
+ return true;
563
+ if (visited.has(issueNumber))
564
+ return false;
565
+ visiting.add(issueNumber);
566
+ const next = adjacency.get(issueNumber) ?? [];
567
+ for (const dependency of next) {
568
+ if (detectCycleForIssue(dependency, adjacency, visiting, visited))
569
+ return true;
570
+ }
571
+ visiting.delete(issueNumber);
572
+ visited.add(issueNumber);
573
+ return false;
574
+ }
575
+ function buildSelectionReasons(candidate) {
576
+ const reasons = [];
577
+ if (candidate.scopeOrigin === 'dependency' && candidate.requestedBy?.length) {
578
+ reasons.push({
579
+ kind: 'scope_expansion',
580
+ message: `Pulled into scope as a prerequisite for ${candidate.requestedBy.map(issueNumber => `#${issueNumber}`).join(', ')}.`,
581
+ });
582
+ }
583
+ if (candidate.actionability === 'housekeeping') {
584
+ reasons.push({ kind: 'housekeeping', message: 'Issue appears already shipped and only needs housekeeping.' });
585
+ }
586
+ if (candidate.attemptState === 'failure' || candidate.attemptState === 'partial') {
587
+ reasons.push({ kind: 'retry', message: `Previous outcome was ${candidate.attemptState}; resume is preferred over starting new work.` });
588
+ }
589
+ if (candidate.priorityTier && candidate.priorityTier !== 'unlabeled') {
590
+ reasons.push({ kind: 'priority', message: `PM priority ${candidate.priorityTier}.` });
591
+ }
592
+ if ((candidate.featureState?.hasExistingBranch ?? false) || (candidate.featureState?.hasPlan ?? false)) {
593
+ reasons.push({ kind: 'existing_work', message: 'Existing branch or plan work makes this item cheaper to continue.' });
594
+ }
595
+ for (const edge of candidate.explicitDependencyEdges) {
596
+ reasons.push({
597
+ kind: 'explicit_dependency',
598
+ message: `Explicitly depends on #${edge.targetIssue}.`,
599
+ issueNumber: edge.targetIssue,
600
+ confidence: edge.confidence,
601
+ });
602
+ }
603
+ for (const edge of candidate.inferredDependencyEdges) {
604
+ reasons.push({
605
+ kind: 'inferred_dependency',
606
+ message: edge.evidence.summary,
607
+ issueNumber: edge.targetIssue,
608
+ confidence: edge.confidence,
609
+ });
610
+ }
611
+ for (const blocked of candidate.blockedBy ?? []) {
612
+ reasons.push({
613
+ kind: 'blocked',
614
+ message: blocked.reason,
615
+ issueNumber: blocked.issueNumber,
616
+ confidence: blocked.confidence,
617
+ });
618
+ }
619
+ return reasons;
620
+ }
621
+ function buildScore(candidate) {
622
+ const actionability = actionabilityWeight(candidate.actionability ?? 'blocked_dependency');
623
+ const retryResume = attemptWeight(candidate.attemptState ?? 'never_tried', candidate.recommendation);
624
+ const priority = priorityWeight(candidate.priorityTier ?? 'unlabeled');
625
+ const dependencyHint = -50 * candidate.inferredDependencyEdges.filter(edge => edge.confidence === 'medium').length;
626
+ const existingWork = existingWorkWeight(candidate);
627
+ const issueNumber = issueNumberWeight(candidate.issueNumber);
628
+ return {
629
+ actionability,
630
+ retryResume,
631
+ priority,
632
+ dependencyHint,
633
+ existingWork,
634
+ issueNumber,
635
+ total: actionability + retryResume + priority + dependencyHint + existingWork + issueNumber,
636
+ };
637
+ }
638
+ function evaluateActionability(candidate, enforcedDependencyMap, issueScope) {
639
+ const recommendation = candidate.recommendation;
640
+ if (recommendation === 'pr_exists_open' || recommendation === 'linked_pr_open') {
641
+ return 'waiting_pr';
642
+ }
643
+ if (recommendation === 'pr_merged' || recommendation === 'linked_pr_merged') {
644
+ return 'housekeeping';
645
+ }
646
+ const blockers = [];
647
+ const dependencies = enforcedDependencyMap.get(candidate.issueNumber) ?? [];
648
+ for (const edge of dependencies) {
649
+ if (issueScope && !issueScope.has(edge.targetIssue)) {
650
+ blockers.push({
651
+ issueNumber: edge.targetIssue,
652
+ reason: `Depends on out-of-scope issue #${edge.targetIssue}.`,
653
+ confidence: edge.confidence,
654
+ });
655
+ continue;
656
+ }
657
+ blockers.push({
658
+ issueNumber: edge.targetIssue,
659
+ reason: `${edge.kind === 'explicit' ? 'Explicit' : 'Inferred'} dependency on #${edge.targetIssue}.`,
660
+ confidence: edge.confidence,
661
+ });
662
+ }
663
+ candidate.blockedBy = blockers;
664
+ if (blockers.some(blocker => blocker.reason.includes('out-of-scope'))) {
665
+ return 'blocked_out_of_scope';
666
+ }
667
+ if (blockers.length > 0) {
668
+ return 'blocked_dependency';
669
+ }
670
+ return 'ready';
671
+ }
672
+ function toIssueState(candidate) {
673
+ return {
674
+ issueNumber: candidate.issueNumber,
675
+ title: candidate.title,
676
+ labels: candidate.labels,
677
+ phase: candidate.phase,
678
+ scopeOrigin: candidate.scopeOrigin,
679
+ requestedBy: candidate.requestedBy,
680
+ actionability: candidate.actionability,
681
+ priorityTier: candidate.priorityTier,
682
+ dependsOn: candidate.dependsOn,
683
+ inferredDependsOn: candidate.inferredDependsOn,
684
+ blockedBy: candidate.blockedBy,
685
+ recommendation: candidate.recommendation,
686
+ selectionReasons: candidate.selectionReasons,
687
+ score: candidate.score,
688
+ attemptState: candidate.attemptState,
689
+ featureState: candidate.featureState,
690
+ loopFeatureName: candidate.loopFeatureName,
691
+ };
692
+ }
693
+ async function expandIssueScope(config, listedIssues, cache) {
694
+ if (!config.issues?.length) {
695
+ return { effectiveScope: undefined, expansions: [], errors: [] };
696
+ }
697
+ const effectiveScope = new Set();
698
+ const expansions = [];
699
+ const errors = [];
700
+ const backlogSummaries = listedIssues.map(issue => ({ number: issue.number, title: issue.title }));
701
+ const queue = [];
702
+ for (const issueNumber of config.issues) {
703
+ if (backlogSummaries.some(issue => issue.number === issueNumber)) {
704
+ effectiveScope.add(issueNumber);
705
+ queue.push({ issueNumber, requestedBy: issueNumber });
706
+ continue;
707
+ }
708
+ const detail = await getIssueDetail(config, issueNumber, cache);
709
+ if (!detail) {
710
+ errors.push(`Failed to fetch issue #${issueNumber} from GitHub while expanding dependencies. Check gh connectivity.`);
711
+ continue;
712
+ }
713
+ if (detail.state !== 'open')
714
+ continue;
715
+ backlogSummaries.push({ number: detail.number, title: detail.title });
716
+ effectiveScope.add(issueNumber);
717
+ queue.push({ issueNumber, requestedBy: issueNumber });
718
+ }
719
+ while (queue.length > 0) {
720
+ const current = queue.shift();
721
+ if (!current)
722
+ continue;
723
+ const detail = await getIssueDetail(config, current.issueNumber, cache);
724
+ if (!detail) {
725
+ errors.push(`Failed to fetch issue #${current.issueNumber} from GitHub while expanding dependencies. Check gh connectivity.`);
726
+ continue;
727
+ }
728
+ if (current.issueNumber === current.requestedBy) {
729
+ const featureState = await getFeatureState(config, current.issueNumber, deriveFeatureNameFromTitle(detail.title), cache);
730
+ if (featureState.recommendation === 'pr_exists_open'
731
+ || featureState.recommendation === 'linked_pr_open'
732
+ || featureState.recommendation === 'pr_merged'
733
+ || featureState.recommendation === 'linked_pr_merged') {
734
+ continue;
735
+ }
736
+ }
737
+ const dependencyNumbers = extractDependencyHints(detail.body ?? '', backlogSummaries, current.issueNumber);
738
+ for (const dependencyNumber of dependencyNumbers) {
739
+ if (effectiveScope.has(dependencyNumber))
740
+ continue;
741
+ if (!backlogSummaries.some(issue => issue.number === dependencyNumber)) {
742
+ const dependencyDetail = await getIssueDetail(config, dependencyNumber, cache);
743
+ if (!dependencyDetail) {
744
+ errors.push(`Failed to fetch dependency issue #${dependencyNumber} from GitHub. Check gh connectivity.`);
745
+ continue;
746
+ }
747
+ if (dependencyDetail.state !== 'open')
748
+ continue;
749
+ backlogSummaries.push({ number: dependencyNumber, title: dependencyDetail.title });
750
+ }
751
+ effectiveScope.add(dependencyNumber);
752
+ const existingExpansion = expansions.find(expansion => expansion.issueNumber === dependencyNumber);
753
+ if (existingExpansion) {
754
+ if (!existingExpansion.requestedBy.includes(current.requestedBy)) {
755
+ existingExpansion.requestedBy.push(current.requestedBy);
756
+ existingExpansion.requestedBy.sort((a, b) => a - b);
757
+ }
758
+ }
759
+ else {
760
+ expansions.push({
761
+ issueNumber: dependencyNumber,
762
+ requestedBy: [current.requestedBy],
763
+ });
764
+ }
765
+ queue.push({
766
+ issueNumber: dependencyNumber,
767
+ requestedBy: current.requestedBy,
768
+ });
769
+ }
770
+ }
771
+ return { effectiveScope, expansions, errors };
772
+ }
773
+ async function hydrateScopedIssues(config, listedIssues, issueScope, cache) {
774
+ if (!issueScope)
775
+ return { issues: listedIssues, errors: [] };
776
+ const hydrated = [];
777
+ const errors = [];
778
+ const listedByNumber = new Map(listedIssues.map(issue => [issue.number, issue]));
779
+ for (const issueNumber of issueScope) {
780
+ const listedIssue = listedByNumber.get(issueNumber);
781
+ if (listedIssue) {
782
+ hydrated.push(listedIssue);
783
+ continue;
784
+ }
785
+ const detail = await getIssueDetail(config, issueNumber, cache);
786
+ if (!detail) {
787
+ errors.push(`Failed to fetch requested issue #${issueNumber} from GitHub. Check gh connectivity.`);
788
+ continue;
789
+ }
790
+ if (detail.state !== 'open')
791
+ continue;
792
+ hydrated.push({
793
+ number: detail.number,
794
+ title: detail.title,
795
+ state: detail.state,
796
+ labels: detail.labels,
797
+ createdAt: detail.createdAt,
798
+ });
799
+ }
800
+ return { issues: hydrated, errors };
801
+ }
802
+ export async function buildRankedBacklog(config, store, cache) {
803
+ const search = config.labels?.length
804
+ ? config.labels.map(label => `label:${label}`).join(' ')
805
+ : undefined;
806
+ const listStart = nowMs();
807
+ emitBacklogEvent(config, {
808
+ type: 'backlog_progress',
809
+ phase: 'listing',
810
+ message: 'Listing open GitHub issues.',
811
+ });
812
+ const listed = await discoverListedIssues(config, search, cache);
813
+ const expansionSeed = config.issues?.length && config.labels?.length
814
+ ? await discoverListedIssues(config, undefined, cache, 'listedUnfiltered')
815
+ : listed;
816
+ emitBacklogEvent(config, {
817
+ type: 'backlog_timing',
818
+ phase: 'listing',
819
+ durationMs: nowMs() - listStart,
820
+ count: (listed.issues ?? []).length,
821
+ });
822
+ const scopeStart = nowMs();
823
+ emitBacklogEvent(config, {
824
+ type: 'backlog_progress',
825
+ phase: 'scope_expansion',
826
+ message: config.issues?.length
827
+ ? `Resolving scoped dependencies for ${config.issues.length} requested issue(s).`
828
+ : 'No scoped dependency expansion required.',
829
+ });
830
+ const { effectiveScope: issueScope, expansions, errors: scopeErrors } = await expandIssueScope(config, expansionSeed.issues ?? [], cache);
831
+ emitBacklogEvent(config, {
832
+ type: 'backlog_timing',
833
+ phase: 'scope_expansion',
834
+ durationMs: nowMs() - scopeStart,
835
+ count: expansions.length,
836
+ });
837
+ const hydrationStart = nowMs();
838
+ emitBacklogEvent(config, {
839
+ type: 'backlog_progress',
840
+ phase: 'hydration',
841
+ message: issueScope
842
+ ? `Hydrating ${issueScope.size} scoped issue(s).`
843
+ : `Hydrating up to ${(listed.issues ?? []).length} listed issue(s).`,
844
+ total: issueScope?.size ?? (listed.issues ?? []).length,
845
+ });
846
+ const { issues: scopedIssues, errors: hydrateErrors } = await hydrateScopedIssues(config, listed.issues ?? [], issueScope, cache);
847
+ emitBacklogEvent(config, {
848
+ type: 'backlog_timing',
849
+ phase: 'hydration',
850
+ durationMs: nowMs() - hydrationStart,
851
+ count: scopedIssues.length,
852
+ });
853
+ const inferenceSeed = !issueScope && config.labels?.length
854
+ ? await discoverListedIssues(config, undefined, cache, 'listedUnfiltered')
855
+ : listed;
856
+ const errors = [
857
+ ...(listed.error ? [listed.error] : []),
858
+ ...(expansionSeed.error && expansionSeed.error !== listed.error ? [expansionSeed.error] : []),
859
+ ...(inferenceSeed.error && inferenceSeed.error !== listed.error && inferenceSeed.error !== expansionSeed.error ? [inferenceSeed.error] : []),
860
+ ...scopeErrors,
861
+ ...hydrateErrors,
862
+ ];
863
+ const contextIssues = issueScope
864
+ ? (() => {
865
+ const visibleIssues = config.issues?.length && config.labels?.length
866
+ ? (expansionSeed.issues ?? [])
867
+ : (listed.issues ?? []);
868
+ const byNumber = new Map();
869
+ for (const issue of visibleIssues) {
870
+ byNumber.set(issue.number, issue);
871
+ }
872
+ for (const issue of scopedIssues) {
873
+ byNumber.set(issue.number, issue);
874
+ }
875
+ return [...byNumber.values()];
876
+ })()
877
+ : (() => {
878
+ const visibleIssues = config.labels?.length
879
+ ? (inferenceSeed.issues ?? [])
880
+ : (listed.issues ?? []);
881
+ const byNumber = new Map();
882
+ for (const issue of visibleIssues) {
883
+ byNumber.set(issue.number, issue);
884
+ }
885
+ for (const issue of scopedIssues) {
886
+ byNumber.set(issue.number, issue);
887
+ }
888
+ return [...byNumber.values()];
889
+ })();
890
+ const baseIssues = issueScope
891
+ ? scopedIssues
892
+ : contextIssues.filter(issue => {
893
+ if (config.labels?.length) {
894
+ return config.labels.some(label => issue.labels.includes(label));
895
+ }
896
+ return true;
897
+ });
898
+ const memories = await store.read({ type: 'work_log', limit: 200 });
899
+ const persistedContext = await getPersistedContext(config, cache);
900
+ const codebaseContext = buildCodebaseContext(persistedContext);
901
+ const enrichmentStart = nowMs();
902
+ emitBacklogEvent(config, {
903
+ type: 'backlog_progress',
904
+ phase: 'enrichment',
905
+ message: `Enriching ${baseIssues.length} issue(s) with details and feature state.`,
906
+ total: baseIssues.length,
907
+ });
908
+ const enrichmentErrors = [];
909
+ let enrichedCount = 0;
910
+ const enrichIssueCandidate = async (issue) => {
911
+ const detail = await getIssueDetail(config, issue.number, cache);
912
+ if (!detail) {
913
+ enrichmentErrors.push(`Failed to fetch issue #${issue.number} from GitHub while enriching backlog. Check gh connectivity.`);
914
+ return null;
915
+ }
916
+ const loopFeatureName = deriveFeatureNameFromTitle(detail.title || issue.title);
917
+ const featureState = await getFeatureState(config, issue.number, loopFeatureName, cache);
918
+ const hintedDependencies = extractDependencyHints(detail.body ?? '', contextIssues.map(contextIssue => ({ number: contextIssue.number, title: contextIssue.title })), issue.number);
919
+ const dependencyResolution = await resolveOpenDependencies(config, hintedDependencies, contextIssues, cache);
920
+ enrichmentErrors.push(...dependencyResolution.errors);
921
+ const dependsOn = dependencyResolution.openDependencies;
922
+ const attemptState = getAttemptState(memories, issue.number);
923
+ const requestedBy = expansions.find(expansion => expansion.issueNumber === issue.number)?.requestedBy;
924
+ const scopeOrigin = config.issues?.includes(issue.number)
925
+ ? 'requested'
926
+ : requestedBy?.length
927
+ ? 'dependency'
928
+ : issueScope?.has(issue.number)
929
+ ? 'dependency'
930
+ : undefined;
931
+ const candidate = {
932
+ issueNumber: issue.number,
933
+ title: detail.title || issue.title,
934
+ labels: detail.labels ?? issue.labels ?? [],
935
+ body: detail.body ?? '',
936
+ createdAt: issue.createdAt,
937
+ phase: 'idle',
938
+ scopeOrigin,
939
+ requestedBy,
940
+ priorityTier: derivePriorityTier(detail.labels ?? issue.labels ?? []),
941
+ dependsOn,
942
+ explicitDependencyEdges: dependsOn.map((targetIssue) => ({
943
+ sourceIssue: issue.number,
944
+ targetIssue,
945
+ kind: 'explicit',
946
+ confidence: 'high',
947
+ blocking: true,
948
+ evidence: { summary: `Issue body explicitly references #${targetIssue} as a prerequisite.` },
949
+ })),
950
+ inferredDependencyEdges: [],
951
+ attemptState,
952
+ featureState: {
953
+ recommendation: featureState.recommendation,
954
+ hasExistingBranch: featureState.branch.exists,
955
+ commitsAhead: featureState.branch.commitsAhead,
956
+ hasPlan: featureState.plan.exists,
957
+ hasOpenPr: featureState.pr.state === 'OPEN' || featureState.linkedPr.state === 'OPEN',
958
+ },
959
+ loopFeatureName,
960
+ recommendation: featureState.recommendation,
961
+ };
962
+ enrichedCount += 1;
963
+ if (enrichedCount === 1 || enrichedCount === baseIssues.length || enrichedCount % 5 === 0) {
964
+ emitBacklogEvent(config, {
965
+ type: 'backlog_progress',
966
+ phase: 'enrichment',
967
+ message: `Enriched ${enrichedCount}/${baseIssues.length} issue(s).`,
968
+ completed: enrichedCount,
969
+ total: baseIssues.length,
970
+ });
971
+ }
972
+ return candidate;
973
+ };
974
+ const candidateResults = await mapWithConcurrency(baseIssues, ENRICHMENT_CONCURRENCY, enrichIssueCandidate);
975
+ const candidates = candidateResults.filter((candidate) => candidate != null);
976
+ if (!issueScope && config.labels?.length) {
977
+ const knownIssues = new Map(contextIssues.map(issue => [issue.number, issue]));
978
+ const candidatesByNumber = new Map(candidates.map(candidate => [candidate.issueNumber, candidate]));
979
+ const pending = [...new Set(candidates.flatMap(candidate => candidate.dependsOn ?? []))]
980
+ .filter((issueNumber) => issueNumber != null && !candidatesByNumber.has(issueNumber));
981
+ while (pending.length > 0) {
982
+ const dependencyNumber = pending.shift();
983
+ if (dependencyNumber == null || candidatesByNumber.has(dependencyNumber))
984
+ continue;
985
+ let seed = knownIssues.get(dependencyNumber);
986
+ if (!seed) {
987
+ const detail = await getIssueDetail(config, dependencyNumber, cache);
988
+ if (!detail || detail.state !== 'open')
989
+ continue;
990
+ seed = {
991
+ number: detail.number,
992
+ title: detail.title,
993
+ state: detail.state,
994
+ labels: detail.labels,
995
+ createdAt: detail.createdAt,
996
+ };
997
+ knownIssues.set(dependencyNumber, seed);
998
+ }
999
+ const candidate = await enrichIssueCandidate(seed);
1000
+ if (!candidate)
1001
+ continue;
1002
+ candidate.scopeOrigin = 'dependency';
1003
+ candidates.push(candidate);
1004
+ candidatesByNumber.set(candidate.issueNumber, candidate);
1005
+ for (const transitiveDependency of candidate.dependsOn ?? []) {
1006
+ if (!candidatesByNumber.has(transitiveDependency) && !pending.includes(transitiveDependency)) {
1007
+ pending.push(transitiveDependency);
1008
+ }
1009
+ }
1010
+ }
1011
+ }
1012
+ errors.push(...enrichmentErrors);
1013
+ emitBacklogEvent(config, {
1014
+ type: 'backlog_timing',
1015
+ phase: 'enrichment',
1016
+ durationMs: nowMs() - enrichmentStart,
1017
+ count: candidates.length,
1018
+ });
1019
+ const inferenceBacklog = buildInferenceBacklog(candidates, contextIssues);
1020
+ const preliminaryEnforcedDependencyMap = new Map();
1021
+ for (const candidate of candidates) {
1022
+ preliminaryEnforcedDependencyMap.set(candidate.issueNumber, [...candidate.explicitDependencyEdges]);
1023
+ }
1024
+ for (const candidate of candidates) {
1025
+ const adjacency = new Map();
1026
+ for (const [issueNumber, edges] of preliminaryEnforcedDependencyMap.entries()) {
1027
+ adjacency.set(issueNumber, edges.map(edge => edge.targetIssue));
1028
+ }
1029
+ const hasCycle = detectCycleForIssue(candidate.issueNumber, adjacency, new Set(), new Set());
1030
+ if (hasCycle) {
1031
+ candidate.actionability = 'blocked_cycle';
1032
+ candidate.blockedBy = [{ issueNumber: candidate.issueNumber, reason: 'Dependency cycle detected.' }];
1033
+ }
1034
+ else {
1035
+ candidate.actionability = evaluateActionability(candidate, preliminaryEnforcedDependencyMap, issueScope);
1036
+ }
1037
+ candidate.selectionReasons = buildSelectionReasons(candidate);
1038
+ candidate.score = buildScore(candidate);
1039
+ }
1040
+ const modelInferenceCandidates = new Set([...candidates]
1041
+ .sort((left, right) => {
1042
+ const leftScore = left.score?.total ?? 0;
1043
+ const rightScore = right.score?.total ?? 0;
1044
+ if (rightScore !== leftScore)
1045
+ return rightScore - leftScore;
1046
+ return left.issueNumber - right.issueNumber;
1047
+ })
1048
+ .slice(0, issueScope ? candidates.length : Math.min(candidates.length, MAX_MODEL_INFERENCE_CANDIDATES))
1049
+ .map(candidate => candidate.issueNumber));
1050
+ const inferenceStart = nowMs();
1051
+ emitBacklogEvent(config, {
1052
+ type: 'backlog_progress',
1053
+ phase: 'dependency_inference',
1054
+ message: issueScope
1055
+ ? `Inferring dependencies for ${candidates.length} scoped issue(s).`
1056
+ : `Inferring dependencies for top ${modelInferenceCandidates.size} of ${candidates.length} issue(s) before first rank.`,
1057
+ total: modelInferenceCandidates.size,
1058
+ });
1059
+ let inferredCount = 0;
1060
+ await mapWithConcurrency(candidates, INFERENCE_CONCURRENCY, async (candidate) => {
1061
+ if (!modelInferenceCandidates.has(candidate.issueNumber)) {
1062
+ candidate.inferredDependencyEdges = [];
1063
+ candidate.inferredDependsOn = [];
1064
+ return;
1065
+ }
1066
+ candidate.inferredDependencyEdges = await inferDependencyEdges(config, candidate, inferenceBacklog, codebaseContext);
1067
+ candidate.inferredDependsOn = candidate.inferredDependencyEdges.map((edge) => ({
1068
+ issueNumber: edge.targetIssue,
1069
+ confidence: edge.confidence,
1070
+ }));
1071
+ inferredCount += 1;
1072
+ if (inferredCount === 1 || inferredCount === modelInferenceCandidates.size || inferredCount % 5 === 0) {
1073
+ emitBacklogEvent(config, {
1074
+ type: 'backlog_progress',
1075
+ phase: 'dependency_inference',
1076
+ message: `Analyzed dependencies for ${inferredCount}/${modelInferenceCandidates.size} issue(s).`,
1077
+ completed: inferredCount,
1078
+ total: modelInferenceCandidates.size,
1079
+ });
1080
+ }
1081
+ });
1082
+ emitBacklogEvent(config, {
1083
+ type: 'backlog_timing',
1084
+ phase: 'dependency_inference',
1085
+ durationMs: nowMs() - inferenceStart,
1086
+ count: modelInferenceCandidates.size,
1087
+ });
1088
+ const enforcedDependencyMap = new Map();
1089
+ for (const candidate of candidates) {
1090
+ const enforced = [
1091
+ ...candidate.explicitDependencyEdges,
1092
+ ...candidate.inferredDependencyEdges.filter(edge => edge.confidence === 'high'),
1093
+ ];
1094
+ enforcedDependencyMap.set(candidate.issueNumber, enforced);
1095
+ }
1096
+ const adjacency = new Map();
1097
+ for (const [issueNumber, edges] of enforcedDependencyMap.entries()) {
1098
+ adjacency.set(issueNumber, edges.map(edge => edge.targetIssue));
1099
+ }
1100
+ const visited = new Set();
1101
+ for (const candidate of candidates) {
1102
+ const hasCycle = detectCycleForIssue(candidate.issueNumber, adjacency, new Set(), visited);
1103
+ if (hasCycle) {
1104
+ candidate.actionability = 'blocked_cycle';
1105
+ candidate.blockedBy = [{ issueNumber: candidate.issueNumber, reason: 'Dependency cycle detected.' }];
1106
+ }
1107
+ else {
1108
+ candidate.actionability = evaluateActionability(candidate, enforcedDependencyMap, issueScope);
1109
+ }
1110
+ candidate.selectionReasons = buildSelectionReasons(candidate);
1111
+ candidate.score = buildScore(candidate);
1112
+ }
1113
+ const rankingStart = nowMs();
1114
+ emitBacklogEvent(config, {
1115
+ type: 'backlog_progress',
1116
+ phase: 'ranking',
1117
+ message: `Ranking ${candidates.length} issue(s).`,
1118
+ total: candidates.length,
1119
+ });
1120
+ const queue = [...candidates]
1121
+ .filter(candidate => !issueScope || issueScope.has(candidate.issueNumber))
1122
+ .sort((left, right) => {
1123
+ const leftScore = left.score?.total ?? 0;
1124
+ const rightScore = right.score?.total ?? 0;
1125
+ if (rightScore !== leftScore)
1126
+ return rightScore - leftScore;
1127
+ return left.issueNumber - right.issueNumber;
1128
+ });
1129
+ emitBacklogEvent(config, {
1130
+ type: 'backlog_timing',
1131
+ phase: 'ranking',
1132
+ durationMs: nowMs() - rankingStart,
1133
+ count: queue.length,
1134
+ });
1135
+ return {
1136
+ queue,
1137
+ actionable: queue.filter(candidate => candidate.actionability === 'ready'
1138
+ || candidate.actionability === 'housekeeping'
1139
+ || candidate.actionability === 'waiting_pr'),
1140
+ blocked: queue.filter(candidate => candidate.actionability !== 'ready'
1141
+ && candidate.actionability !== 'housekeeping'
1142
+ && candidate.actionability !== 'waiting_pr'),
1143
+ expansions,
1144
+ errors,
1145
+ };
1146
+ }
1147
+ export function toIssueStates(queue) {
1148
+ return queue.map(toIssueState);
1149
+ }