@renseiai/agentfactory 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +125 -0
- package/dist/src/config/index.d.ts +3 -0
- package/dist/src/config/index.d.ts.map +1 -0
- package/dist/src/config/index.js +1 -0
- package/dist/src/config/repository-config.d.ts +44 -0
- package/dist/src/config/repository-config.d.ts.map +1 -0
- package/dist/src/config/repository-config.js +88 -0
- package/dist/src/config/repository-config.test.d.ts +2 -0
- package/dist/src/config/repository-config.test.d.ts.map +1 -0
- package/dist/src/config/repository-config.test.js +249 -0
- package/dist/src/deployment/deployment-checker.d.ts +110 -0
- package/dist/src/deployment/deployment-checker.d.ts.map +1 -0
- package/dist/src/deployment/deployment-checker.js +242 -0
- package/dist/src/deployment/index.d.ts +3 -0
- package/dist/src/deployment/index.d.ts.map +1 -0
- package/dist/src/deployment/index.js +2 -0
- package/dist/src/frontend/index.d.ts +2 -0
- package/dist/src/frontend/index.d.ts.map +1 -0
- package/dist/src/frontend/index.js +1 -0
- package/dist/src/frontend/types.d.ts +106 -0
- package/dist/src/frontend/types.d.ts.map +1 -0
- package/dist/src/frontend/types.js +11 -0
- package/dist/src/governor/decision-engine.d.ts +52 -0
- package/dist/src/governor/decision-engine.d.ts.map +1 -0
- package/dist/src/governor/decision-engine.js +220 -0
- package/dist/src/governor/decision-engine.test.d.ts +2 -0
- package/dist/src/governor/decision-engine.test.d.ts.map +1 -0
- package/dist/src/governor/decision-engine.test.js +629 -0
- package/dist/src/governor/event-bus.d.ts +43 -0
- package/dist/src/governor/event-bus.d.ts.map +1 -0
- package/dist/src/governor/event-bus.js +8 -0
- package/dist/src/governor/event-deduplicator.d.ts +43 -0
- package/dist/src/governor/event-deduplicator.d.ts.map +1 -0
- package/dist/src/governor/event-deduplicator.js +53 -0
- package/dist/src/governor/event-driven-governor.d.ts +131 -0
- package/dist/src/governor/event-driven-governor.d.ts.map +1 -0
- package/dist/src/governor/event-driven-governor.js +379 -0
- package/dist/src/governor/event-driven-governor.test.d.ts +2 -0
- package/dist/src/governor/event-driven-governor.test.d.ts.map +1 -0
- package/dist/src/governor/event-driven-governor.test.js +673 -0
- package/dist/src/governor/event-types.d.ts +78 -0
- package/dist/src/governor/event-types.d.ts.map +1 -0
- package/dist/src/governor/event-types.js +32 -0
- package/dist/src/governor/governor-types.d.ts +82 -0
- package/dist/src/governor/governor-types.d.ts.map +1 -0
- package/dist/src/governor/governor-types.js +21 -0
- package/dist/src/governor/governor.d.ts +100 -0
- package/dist/src/governor/governor.d.ts.map +1 -0
- package/dist/src/governor/governor.js +262 -0
- package/dist/src/governor/governor.test.d.ts +2 -0
- package/dist/src/governor/governor.test.d.ts.map +1 -0
- package/dist/src/governor/governor.test.js +514 -0
- package/dist/src/governor/human-touchpoints.d.ts +131 -0
- package/dist/src/governor/human-touchpoints.d.ts.map +1 -0
- package/dist/src/governor/human-touchpoints.js +251 -0
- package/dist/src/governor/human-touchpoints.test.d.ts +2 -0
- package/dist/src/governor/human-touchpoints.test.d.ts.map +1 -0
- package/dist/src/governor/human-touchpoints.test.js +366 -0
- package/dist/src/governor/in-memory-event-bus.d.ts +29 -0
- package/dist/src/governor/in-memory-event-bus.d.ts.map +1 -0
- package/dist/src/governor/in-memory-event-bus.js +79 -0
- package/dist/src/governor/index.d.ts +14 -0
- package/dist/src/governor/index.d.ts.map +1 -0
- package/dist/src/governor/index.js +13 -0
- package/dist/src/governor/override-parser.d.ts +60 -0
- package/dist/src/governor/override-parser.d.ts.map +1 -0
- package/dist/src/governor/override-parser.js +98 -0
- package/dist/src/governor/override-parser.test.d.ts +2 -0
- package/dist/src/governor/override-parser.test.d.ts.map +1 -0
- package/dist/src/governor/override-parser.test.js +312 -0
- package/dist/src/governor/platform-adapter.d.ts +69 -0
- package/dist/src/governor/platform-adapter.d.ts.map +1 -0
- package/dist/src/governor/platform-adapter.js +11 -0
- package/dist/src/governor/processing-state.d.ts +66 -0
- package/dist/src/governor/processing-state.d.ts.map +1 -0
- package/dist/src/governor/processing-state.js +43 -0
- package/dist/src/governor/processing-state.test.d.ts +2 -0
- package/dist/src/governor/processing-state.test.d.ts.map +1 -0
- package/dist/src/governor/processing-state.test.js +96 -0
- package/dist/src/governor/top-of-funnel.d.ts +118 -0
- package/dist/src/governor/top-of-funnel.d.ts.map +1 -0
- package/dist/src/governor/top-of-funnel.js +168 -0
- package/dist/src/governor/top-of-funnel.test.d.ts +2 -0
- package/dist/src/governor/top-of-funnel.test.d.ts.map +1 -0
- package/dist/src/governor/top-of-funnel.test.js +331 -0
- package/dist/src/index.d.ts +11 -0
- package/dist/src/index.d.ts.map +1 -0
- package/dist/src/index.js +10 -0
- package/dist/src/linear-cli.d.ts +38 -0
- package/dist/src/linear-cli.d.ts.map +1 -0
- package/dist/src/linear-cli.js +674 -0
- package/dist/src/logger.d.ts +117 -0
- package/dist/src/logger.d.ts.map +1 -0
- package/dist/src/logger.js +430 -0
- package/dist/src/manifest/generate.d.ts +20 -0
- package/dist/src/manifest/generate.d.ts.map +1 -0
- package/dist/src/manifest/generate.js +65 -0
- package/dist/src/manifest/index.d.ts +4 -0
- package/dist/src/manifest/index.d.ts.map +1 -0
- package/dist/src/manifest/index.js +2 -0
- package/dist/src/manifest/route-manifest.d.ts +34 -0
- package/dist/src/manifest/route-manifest.d.ts.map +1 -0
- package/dist/src/manifest/route-manifest.js +148 -0
- package/dist/src/orchestrator/activity-emitter.d.ts +119 -0
- package/dist/src/orchestrator/activity-emitter.d.ts.map +1 -0
- package/dist/src/orchestrator/activity-emitter.js +306 -0
- package/dist/src/orchestrator/api-activity-emitter.d.ts +167 -0
- package/dist/src/orchestrator/api-activity-emitter.d.ts.map +1 -0
- package/dist/src/orchestrator/api-activity-emitter.js +417 -0
- package/dist/src/orchestrator/heartbeat-writer.d.ts +57 -0
- package/dist/src/orchestrator/heartbeat-writer.d.ts.map +1 -0
- package/dist/src/orchestrator/heartbeat-writer.js +137 -0
- package/dist/src/orchestrator/index.d.ts +20 -0
- package/dist/src/orchestrator/index.d.ts.map +1 -0
- package/dist/src/orchestrator/index.js +22 -0
- package/dist/src/orchestrator/log-analyzer.d.ts +160 -0
- package/dist/src/orchestrator/log-analyzer.d.ts.map +1 -0
- package/dist/src/orchestrator/log-analyzer.js +572 -0
- package/dist/src/orchestrator/log-config.d.ts +39 -0
- package/dist/src/orchestrator/log-config.d.ts.map +1 -0
- package/dist/src/orchestrator/log-config.js +45 -0
- package/dist/src/orchestrator/orchestrator.d.ts +316 -0
- package/dist/src/orchestrator/orchestrator.d.ts.map +1 -0
- package/dist/src/orchestrator/orchestrator.js +3290 -0
- package/dist/src/orchestrator/parse-work-result.d.ts +16 -0
- package/dist/src/orchestrator/parse-work-result.d.ts.map +1 -0
- package/dist/src/orchestrator/parse-work-result.js +135 -0
- package/dist/src/orchestrator/parse-work-result.test.d.ts +2 -0
- package/dist/src/orchestrator/parse-work-result.test.d.ts.map +1 -0
- package/dist/src/orchestrator/parse-work-result.test.js +234 -0
- package/dist/src/orchestrator/progress-logger.d.ts +72 -0
- package/dist/src/orchestrator/progress-logger.d.ts.map +1 -0
- package/dist/src/orchestrator/progress-logger.js +135 -0
- package/dist/src/orchestrator/session-logger.d.ts +159 -0
- package/dist/src/orchestrator/session-logger.d.ts.map +1 -0
- package/dist/src/orchestrator/session-logger.js +275 -0
- package/dist/src/orchestrator/state-recovery.d.ts +96 -0
- package/dist/src/orchestrator/state-recovery.d.ts.map +1 -0
- package/dist/src/orchestrator/state-recovery.js +302 -0
- package/dist/src/orchestrator/state-types.d.ts +165 -0
- package/dist/src/orchestrator/state-types.d.ts.map +1 -0
- package/dist/src/orchestrator/state-types.js +7 -0
- package/dist/src/orchestrator/stream-parser.d.ts +151 -0
- package/dist/src/orchestrator/stream-parser.d.ts.map +1 -0
- package/dist/src/orchestrator/stream-parser.js +137 -0
- package/dist/src/orchestrator/types.d.ts +232 -0
- package/dist/src/orchestrator/types.d.ts.map +1 -0
- package/dist/src/orchestrator/types.js +4 -0
- package/dist/src/orchestrator/validate-git-remote.test.d.ts +2 -0
- package/dist/src/orchestrator/validate-git-remote.test.d.ts.map +1 -0
- package/dist/src/orchestrator/validate-git-remote.test.js +61 -0
- package/dist/src/providers/a2a-auth.d.ts +81 -0
- package/dist/src/providers/a2a-auth.d.ts.map +1 -0
- package/dist/src/providers/a2a-auth.js +188 -0
- package/dist/src/providers/a2a-auth.test.d.ts +2 -0
- package/dist/src/providers/a2a-auth.test.d.ts.map +1 -0
- package/dist/src/providers/a2a-auth.test.js +232 -0
- package/dist/src/providers/a2a-provider.d.ts +254 -0
- package/dist/src/providers/a2a-provider.d.ts.map +1 -0
- package/dist/src/providers/a2a-provider.integration.test.d.ts +9 -0
- package/dist/src/providers/a2a-provider.integration.test.d.ts.map +1 -0
- package/dist/src/providers/a2a-provider.integration.test.js +665 -0
- package/dist/src/providers/a2a-provider.js +811 -0
- package/dist/src/providers/a2a-provider.test.d.ts +2 -0
- package/dist/src/providers/a2a-provider.test.d.ts.map +1 -0
- package/dist/src/providers/a2a-provider.test.js +681 -0
- package/dist/src/providers/amp-provider.d.ts +20 -0
- package/dist/src/providers/amp-provider.d.ts.map +1 -0
- package/dist/src/providers/amp-provider.js +24 -0
- package/dist/src/providers/claude-provider.d.ts +18 -0
- package/dist/src/providers/claude-provider.d.ts.map +1 -0
- package/dist/src/providers/claude-provider.js +437 -0
- package/dist/src/providers/codex-provider.d.ts +133 -0
- package/dist/src/providers/codex-provider.d.ts.map +1 -0
- package/dist/src/providers/codex-provider.js +381 -0
- package/dist/src/providers/codex-provider.test.d.ts +2 -0
- package/dist/src/providers/codex-provider.test.d.ts.map +1 -0
- package/dist/src/providers/codex-provider.test.js +387 -0
- package/dist/src/providers/index.d.ts +44 -0
- package/dist/src/providers/index.d.ts.map +1 -0
- package/dist/src/providers/index.js +85 -0
- package/dist/src/providers/spring-ai-provider.d.ts +90 -0
- package/dist/src/providers/spring-ai-provider.d.ts.map +1 -0
- package/dist/src/providers/spring-ai-provider.integration.test.d.ts +13 -0
- package/dist/src/providers/spring-ai-provider.integration.test.d.ts.map +1 -0
- package/dist/src/providers/spring-ai-provider.integration.test.js +351 -0
- package/dist/src/providers/spring-ai-provider.js +317 -0
- package/dist/src/providers/spring-ai-provider.test.d.ts +2 -0
- package/dist/src/providers/spring-ai-provider.test.d.ts.map +1 -0
- package/dist/src/providers/spring-ai-provider.test.js +200 -0
- package/dist/src/providers/types.d.ts +165 -0
- package/dist/src/providers/types.d.ts.map +1 -0
- package/dist/src/providers/types.js +13 -0
- package/dist/src/templates/adapters.d.ts +51 -0
- package/dist/src/templates/adapters.d.ts.map +1 -0
- package/dist/src/templates/adapters.js +104 -0
- package/dist/src/templates/adapters.test.d.ts +2 -0
- package/dist/src/templates/adapters.test.d.ts.map +1 -0
- package/dist/src/templates/adapters.test.js +165 -0
- package/dist/src/templates/agent-definition.d.ts +85 -0
- package/dist/src/templates/agent-definition.d.ts.map +1 -0
- package/dist/src/templates/agent-definition.js +97 -0
- package/dist/src/templates/agent-definition.test.d.ts +2 -0
- package/dist/src/templates/agent-definition.test.d.ts.map +1 -0
- package/dist/src/templates/agent-definition.test.js +209 -0
- package/dist/src/templates/index.d.ts +14 -0
- package/dist/src/templates/index.d.ts.map +1 -0
- package/dist/src/templates/index.js +11 -0
- package/dist/src/templates/loader.d.ts +41 -0
- package/dist/src/templates/loader.d.ts.map +1 -0
- package/dist/src/templates/loader.js +114 -0
- package/dist/src/templates/registry.d.ts +80 -0
- package/dist/src/templates/registry.d.ts.map +1 -0
- package/dist/src/templates/registry.js +177 -0
- package/dist/src/templates/registry.test.d.ts +2 -0
- package/dist/src/templates/registry.test.d.ts.map +1 -0
- package/dist/src/templates/registry.test.js +198 -0
- package/dist/src/templates/renderer.d.ts +29 -0
- package/dist/src/templates/renderer.d.ts.map +1 -0
- package/dist/src/templates/renderer.js +35 -0
- package/dist/src/templates/strategy-templates.test.d.ts +2 -0
- package/dist/src/templates/strategy-templates.test.d.ts.map +1 -0
- package/dist/src/templates/strategy-templates.test.js +619 -0
- package/dist/src/templates/types.d.ts +233 -0
- package/dist/src/templates/types.d.ts.map +1 -0
- package/dist/src/templates/types.js +127 -0
- package/dist/src/templates/types.test.d.ts +2 -0
- package/dist/src/templates/types.test.d.ts.map +1 -0
- package/dist/src/templates/types.test.js +232 -0
- package/dist/src/tools/index.d.ts +6 -0
- package/dist/src/tools/index.d.ts.map +1 -0
- package/dist/src/tools/index.js +3 -0
- package/dist/src/tools/linear-runner.d.ts +34 -0
- package/dist/src/tools/linear-runner.d.ts.map +1 -0
- package/dist/src/tools/linear-runner.js +700 -0
- package/dist/src/tools/plugins/linear.d.ts +9 -0
- package/dist/src/tools/plugins/linear.d.ts.map +1 -0
- package/dist/src/tools/plugins/linear.js +138 -0
- package/dist/src/tools/registry.d.ts +9 -0
- package/dist/src/tools/registry.d.ts.map +1 -0
- package/dist/src/tools/registry.js +18 -0
- package/dist/src/tools/types.d.ts +18 -0
- package/dist/src/tools/types.d.ts.map +1 -0
- package/dist/src/tools/types.js +1 -0
- package/package.json +78 -0
|
@@ -0,0 +1,3290 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Agent Orchestrator
|
|
3
|
+
* Spawns concurrent Claude agents to work on Linear backlog issues
|
|
4
|
+
* Uses the Claude Agent SDK for programmatic control
|
|
5
|
+
*/
|
|
6
|
+
import { randomUUID } from 'crypto';
|
|
7
|
+
import { execSync } from 'child_process';
|
|
8
|
+
import { existsSync, lstatSync, mkdirSync, readFileSync, readdirSync, rmSync, statSync, symlinkSync, unlinkSync, writeFileSync } from 'fs';
|
|
9
|
+
import { resolve, dirname } from 'path';
|
|
10
|
+
import { parse as parseDotenv } from 'dotenv';
|
|
11
|
+
import { createProvider, resolveProviderName, } from '../providers/index.js';
|
|
12
|
+
import { initializeAgentDir, writeState, updateState, writeTodos, createInitialState, checkRecovery, buildRecoveryPrompt, getHeartbeatTimeoutFromEnv, getMaxRecoveryAttemptsFromEnv, } from './state-recovery.js';
|
|
13
|
+
import { createHeartbeatWriter, getHeartbeatIntervalFromEnv } from './heartbeat-writer.js';
|
|
14
|
+
import { createProgressLogger } from './progress-logger.js';
|
|
15
|
+
import { createSessionLogger } from './session-logger.js';
|
|
16
|
+
import { isSessionLoggingEnabled, getLogAnalysisConfig } from './log-config.js';
|
|
17
|
+
import { createLinearAgentClient, createAgentSession, buildCompletionComments, STATUS_WORK_TYPE_MAP, WORK_TYPE_START_STATUS, WORK_TYPE_COMPLETE_STATUS, WORK_TYPE_FAIL_STATUS, TERMINAL_STATUSES, WORK_TYPES_REQUIRING_WORKTREE, } from '@renseiai/agentfactory-linear';
|
|
18
|
+
import { parseWorkResult } from './parse-work-result.js';
|
|
19
|
+
import { createActivityEmitter } from './activity-emitter.js';
|
|
20
|
+
import { createApiActivityEmitter } from './api-activity-emitter.js';
|
|
21
|
+
import { createLogger } from '../logger.js';
|
|
22
|
+
import { TemplateRegistry, createToolPermissionAdapter } from '../templates/index.js';
|
|
23
|
+
import { loadRepositoryConfig } from '../config/index.js';
|
|
24
|
+
import { ToolRegistry, linearPlugin } from '../tools/index.js';
|
|
25
|
+
// Default inactivity timeout: 5 minutes
|
|
26
|
+
const DEFAULT_INACTIVITY_TIMEOUT_MS = 300000;
|
|
27
|
+
// Default max session timeout: unlimited (undefined)
|
|
28
|
+
const DEFAULT_MAX_SESSION_TIMEOUT_MS = undefined;
|
|
29
|
+
// Env vars that Claude Code interprets for authentication/routing. If these
|
|
30
|
+
// leak into agent processes from app .env.local files, Claude Code switches
|
|
31
|
+
// from Max subscription billing to API-key billing. Apps that need an
|
|
32
|
+
// Anthropic API key should use a namespaced name instead (e.g.
|
|
33
|
+
// RENSEI_SOCIAL_ANTHROPIC_API_KEY) which won't be recognised by Claude Code.
|
|
34
|
+
const AGENT_ENV_BLOCKLIST = [
|
|
35
|
+
'ANTHROPIC_API_KEY',
|
|
36
|
+
'ANTHROPIC_AUTH_TOKEN',
|
|
37
|
+
'ANTHROPIC_BASE_URL',
|
|
38
|
+
'OPENCLAW_GATEWAY_TOKEN',
|
|
39
|
+
];
|
|
40
|
+
/**
|
|
41
|
+
* Validate that the git remote origin URL contains the expected repository pattern.
|
|
42
|
+
* Supports both HTTPS (github.com/org/repo) and SSH (git@github.com:org/repo) formats.
|
|
43
|
+
*
|
|
44
|
+
* @param expectedRepo - The expected repository pattern (e.g. 'github.com/renseiai/agentfactory')
|
|
45
|
+
* @param cwd - Working directory to run git commands in
|
|
46
|
+
* @throws Error if the git remote does not match the expected repository
|
|
47
|
+
*/
|
|
48
|
+
export function validateGitRemote(expectedRepo, cwd) {
|
|
49
|
+
let remoteUrl;
|
|
50
|
+
try {
|
|
51
|
+
remoteUrl = execSync('git remote get-url origin', {
|
|
52
|
+
encoding: 'utf-8',
|
|
53
|
+
cwd,
|
|
54
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
55
|
+
}).trim();
|
|
56
|
+
}
|
|
57
|
+
catch {
|
|
58
|
+
throw new Error(`Repository validation failed: could not get git remote URL. Expected '${expectedRepo}'.`);
|
|
59
|
+
}
|
|
60
|
+
// Normalize: convert SSH format (git@github.com:org/repo.git) to comparable form
|
|
61
|
+
const normalizedRemote = remoteUrl
|
|
62
|
+
.replace(/^git@([^:]+):/, '$1/') // git@github.com:org/repo -> github.com/org/repo
|
|
63
|
+
.replace(/^https?:\/\//, '') // https://github.com/org/repo -> github.com/org/repo
|
|
64
|
+
.replace(/\.git$/, ''); // remove trailing .git
|
|
65
|
+
const normalizedExpected = expectedRepo
|
|
66
|
+
.replace(/^https?:\/\//, '')
|
|
67
|
+
.replace(/\.git$/, '');
|
|
68
|
+
if (!normalizedRemote.includes(normalizedExpected)) {
|
|
69
|
+
throw new Error(`Repository mismatch: expected '${expectedRepo}' but git remote is '${remoteUrl}'. Refusing to proceed.`);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
const DEFAULT_CONFIG = {
|
|
73
|
+
maxConcurrent: 3,
|
|
74
|
+
worktreePath: '.worktrees',
|
|
75
|
+
autoTransition: true,
|
|
76
|
+
// Preserve worktree when PR creation fails to prevent data loss
|
|
77
|
+
preserveWorkOnPrFailure: true,
|
|
78
|
+
// Sandbox disabled by default due to known bugs:
|
|
79
|
+
// - https://github.com/anthropics/claude-code/issues/14162
|
|
80
|
+
// - https://github.com/anthropics/claude-code/issues/12150
|
|
81
|
+
sandboxEnabled: false,
|
|
82
|
+
streamConfig: {
|
|
83
|
+
minInterval: 500,
|
|
84
|
+
maxOutputLength: 2000,
|
|
85
|
+
includeTimestamps: false,
|
|
86
|
+
},
|
|
87
|
+
// Inactivity timeout: agent is stopped if no activity for this duration
|
|
88
|
+
inactivityTimeoutMs: DEFAULT_INACTIVITY_TIMEOUT_MS,
|
|
89
|
+
// Max session timeout: hard cap on runtime (unlimited by default)
|
|
90
|
+
maxSessionTimeoutMs: DEFAULT_MAX_SESSION_TIMEOUT_MS,
|
|
91
|
+
};
|
|
92
|
+
/**
|
|
93
|
+
* Load environment variables from .claude/settings.local.json
|
|
94
|
+
*/
|
|
95
|
+
function loadSettingsEnv(workDir, log) {
|
|
96
|
+
// Walk up from workDir to find .claude/settings.local.json
|
|
97
|
+
let currentDir = workDir;
|
|
98
|
+
let prevDir = '';
|
|
99
|
+
// Keep walking up until we reach the filesystem root
|
|
100
|
+
while (currentDir !== prevDir) {
|
|
101
|
+
const settingsPath = resolve(currentDir, '.claude', 'settings.local.json');
|
|
102
|
+
const exists = existsSync(settingsPath);
|
|
103
|
+
if (exists) {
|
|
104
|
+
try {
|
|
105
|
+
const content = readFileSync(settingsPath, 'utf-8');
|
|
106
|
+
const settings = JSON.parse(content);
|
|
107
|
+
if (settings.env && typeof settings.env === 'object') {
|
|
108
|
+
// Filter to only string values
|
|
109
|
+
const env = {};
|
|
110
|
+
for (const [key, value] of Object.entries(settings.env)) {
|
|
111
|
+
if (typeof value === 'string') {
|
|
112
|
+
env[key] = value;
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
log?.debug('Loaded settings.local.json', { envVars: Object.keys(env).length });
|
|
116
|
+
return env;
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
catch (error) {
|
|
120
|
+
log?.warn('Failed to load settings.local.json', {
|
|
121
|
+
error: error instanceof Error ? error.message : String(error),
|
|
122
|
+
});
|
|
123
|
+
}
|
|
124
|
+
// File exists but has no env property — not an error
|
|
125
|
+
log?.debug('settings.local.json found but contains no env property', { path: settingsPath });
|
|
126
|
+
return {};
|
|
127
|
+
}
|
|
128
|
+
prevDir = currentDir;
|
|
129
|
+
currentDir = dirname(currentDir);
|
|
130
|
+
}
|
|
131
|
+
log?.debug('settings.local.json not found', { startDir: workDir });
|
|
132
|
+
return {};
|
|
133
|
+
}
|
|
134
|
+
/**
|
|
135
|
+
* Find the repository root by walking up from a directory
|
|
136
|
+
* The repo root is identified by having a .git directory (not file, which worktrees have)
|
|
137
|
+
*/
|
|
138
|
+
function findRepoRoot(startDir) {
|
|
139
|
+
let currentDir = startDir;
|
|
140
|
+
let prevDir = '';
|
|
141
|
+
while (currentDir !== prevDir) {
|
|
142
|
+
const gitPath = resolve(currentDir, '.git');
|
|
143
|
+
if (existsSync(gitPath)) {
|
|
144
|
+
// Check if it's a directory (main repo) not a file (worktree)
|
|
145
|
+
try {
|
|
146
|
+
const content = readFileSync(gitPath, 'utf-8');
|
|
147
|
+
// If it starts with "gitdir:", it's a worktree reference
|
|
148
|
+
if (!content.startsWith('gitdir:')) {
|
|
149
|
+
return currentDir;
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
catch {
|
|
153
|
+
// If we can't read it as a file, it's a directory (main repo)
|
|
154
|
+
return currentDir;
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
prevDir = currentDir;
|
|
158
|
+
currentDir = dirname(currentDir);
|
|
159
|
+
}
|
|
160
|
+
return null;
|
|
161
|
+
}
|
|
162
|
+
/**
|
|
163
|
+
* Load environment variables from app .env files based on work type
|
|
164
|
+
*
|
|
165
|
+
* - Development work: loads .env.local from all apps
|
|
166
|
+
* - QA/Acceptance work: loads .env.test.local from all apps
|
|
167
|
+
*
|
|
168
|
+
* This ensures agents running in worktrees have access to database config
|
|
169
|
+
* and other environment variables that are gitignored.
|
|
170
|
+
*/
|
|
171
|
+
function loadAppEnvFiles(workDir, workType, log) {
|
|
172
|
+
// Find the repo root (worktrees are inside .worktrees/ which is in the repo)
|
|
173
|
+
const repoRoot = findRepoRoot(workDir);
|
|
174
|
+
if (!repoRoot) {
|
|
175
|
+
log?.warn('Could not find repo root for env file loading', { startDir: workDir });
|
|
176
|
+
return {};
|
|
177
|
+
}
|
|
178
|
+
const appsDir = resolve(repoRoot, 'apps');
|
|
179
|
+
if (!existsSync(appsDir)) {
|
|
180
|
+
log?.warn('Apps directory not found', { appsDir });
|
|
181
|
+
return {};
|
|
182
|
+
}
|
|
183
|
+
// Determine which env file to load based on work type
|
|
184
|
+
const isTestWork = workType === 'qa' || workType === 'acceptance' || workType === 'qa-coordination' || workType === 'acceptance-coordination';
|
|
185
|
+
const envFileName = isTestWork ? '.env.test.local' : '.env.local';
|
|
186
|
+
const env = {};
|
|
187
|
+
let loadedCount = 0;
|
|
188
|
+
try {
|
|
189
|
+
const appDirs = readdirSync(appsDir, { withFileTypes: true })
|
|
190
|
+
.filter(dirent => dirent.isDirectory())
|
|
191
|
+
.map(dirent => dirent.name);
|
|
192
|
+
for (const appName of appDirs) {
|
|
193
|
+
const envPath = resolve(appsDir, appName, envFileName);
|
|
194
|
+
if (existsSync(envPath)) {
|
|
195
|
+
// Parse the file without injecting into process.env (avoids dotenv log spam)
|
|
196
|
+
const parsed = parseDotenv(readFileSync(envPath, 'utf-8'));
|
|
197
|
+
if (parsed && Object.keys(parsed).length > 0) {
|
|
198
|
+
Object.assign(env, parsed);
|
|
199
|
+
loadedCount++;
|
|
200
|
+
log?.debug(`Loaded ${envFileName} from ${appName}`, {
|
|
201
|
+
vars: Object.keys(parsed).length,
|
|
202
|
+
});
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
if (loadedCount > 0) {
|
|
207
|
+
log?.info(`Loaded ${envFileName} from ${loadedCount} app(s)`, {
|
|
208
|
+
workType,
|
|
209
|
+
totalVars: Object.keys(env).length,
|
|
210
|
+
});
|
|
211
|
+
}
|
|
212
|
+
else {
|
|
213
|
+
log?.warn(`No ${envFileName} files found in apps/`, { workType });
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
catch (error) {
|
|
217
|
+
log?.warn('Failed to load app env files', {
|
|
218
|
+
error: error instanceof Error ? error.message : String(error),
|
|
219
|
+
});
|
|
220
|
+
}
|
|
221
|
+
return env;
|
|
222
|
+
}
|
|
223
|
+
/**
|
|
224
|
+
* Patterns that indicate tool-related errors (not API or resource limit errors)
|
|
225
|
+
*/
|
|
226
|
+
const TOOL_ERROR_PATTERNS = [
|
|
227
|
+
// Sandbox violations
|
|
228
|
+
/sandbox/i,
|
|
229
|
+
/not allowed/i,
|
|
230
|
+
/operation not permitted/i,
|
|
231
|
+
// Permission errors
|
|
232
|
+
/permission denied/i,
|
|
233
|
+
/EACCES/,
|
|
234
|
+
/access denied/i,
|
|
235
|
+
// File system errors
|
|
236
|
+
/ENOENT/,
|
|
237
|
+
/no such file or directory/i,
|
|
238
|
+
/file not found/i,
|
|
239
|
+
// Network errors
|
|
240
|
+
/ECONNREFUSED/,
|
|
241
|
+
/ETIMEDOUT/,
|
|
242
|
+
/ENOTFOUND/,
|
|
243
|
+
/connection refused/i,
|
|
244
|
+
/network error/i,
|
|
245
|
+
// Command/tool failures
|
|
246
|
+
/command failed/i,
|
|
247
|
+
/exited with code/i,
|
|
248
|
+
/tool.*error/i,
|
|
249
|
+
/tool.*failed/i,
|
|
250
|
+
// General error indicators from tools
|
|
251
|
+
/is_error.*true/i,
|
|
252
|
+
];
|
|
253
|
+
/**
|
|
254
|
+
* Check if an error message is related to tool execution
|
|
255
|
+
* (vs API errors, resource limits, etc.)
|
|
256
|
+
*/
|
|
257
|
+
function isToolRelatedError(error) {
|
|
258
|
+
return TOOL_ERROR_PATTERNS.some((pattern) => pattern.test(error));
|
|
259
|
+
}
|
|
260
|
+
/**
|
|
261
|
+
* Extract tool name from an error message if present
|
|
262
|
+
*/
|
|
263
|
+
function extractToolNameFromError(error) {
|
|
264
|
+
// Try to extract tool name from common patterns
|
|
265
|
+
const patterns = [
|
|
266
|
+
/Tool\s+["']?(\w+)["']?/i,
|
|
267
|
+
/(\w+)\s+tool.*(?:error|failed)/i,
|
|
268
|
+
/Failed to (?:run|execute|call)\s+["']?(\w+)["']?/i,
|
|
269
|
+
];
|
|
270
|
+
for (const pattern of patterns) {
|
|
271
|
+
const match = error.match(pattern);
|
|
272
|
+
if (match && match[1]) {
|
|
273
|
+
return match[1];
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
return 'unknown';
|
|
277
|
+
}
|
|
278
|
+
/**
|
|
279
|
+
* Check if a worktree has uncommitted changes or unpushed commits
|
|
280
|
+
*
|
|
281
|
+
* @param worktreePath - Path to the git worktree
|
|
282
|
+
* @returns Check result with reason if incomplete work is found
|
|
283
|
+
*/
|
|
284
|
+
function checkForIncompleteWork(worktreePath) {
|
|
285
|
+
try {
|
|
286
|
+
// Check for uncommitted changes (staged or unstaged)
|
|
287
|
+
const statusOutput = execSync('git status --porcelain', {
|
|
288
|
+
cwd: worktreePath,
|
|
289
|
+
encoding: 'utf-8',
|
|
290
|
+
timeout: 10000,
|
|
291
|
+
}).trim();
|
|
292
|
+
if (statusOutput.length > 0) {
|
|
293
|
+
const changedFiles = statusOutput.split('\n').length;
|
|
294
|
+
return {
|
|
295
|
+
hasIncompleteWork: true,
|
|
296
|
+
reason: 'uncommitted_changes',
|
|
297
|
+
details: `${changedFiles} file(s) with uncommitted changes`,
|
|
298
|
+
};
|
|
299
|
+
}
|
|
300
|
+
// Check for unpushed commits
|
|
301
|
+
// First, check if we have an upstream branch
|
|
302
|
+
try {
|
|
303
|
+
const trackingBranch = execSync('git rev-parse --abbrev-ref @{u}', {
|
|
304
|
+
cwd: worktreePath,
|
|
305
|
+
encoding: 'utf-8',
|
|
306
|
+
timeout: 10000,
|
|
307
|
+
}).trim();
|
|
308
|
+
// Count commits ahead of upstream
|
|
309
|
+
const unpushedOutput = execSync(`git rev-list --count ${trackingBranch}..HEAD`, {
|
|
310
|
+
cwd: worktreePath,
|
|
311
|
+
encoding: 'utf-8',
|
|
312
|
+
timeout: 10000,
|
|
313
|
+
}).trim();
|
|
314
|
+
const unpushedCount = parseInt(unpushedOutput, 10);
|
|
315
|
+
if (unpushedCount > 0) {
|
|
316
|
+
return {
|
|
317
|
+
hasIncompleteWork: true,
|
|
318
|
+
reason: 'unpushed_commits',
|
|
319
|
+
details: `${unpushedCount} commit(s) not pushed to ${trackingBranch}`,
|
|
320
|
+
};
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
catch {
|
|
324
|
+
// No upstream branch set - check if we have any local commits
|
|
325
|
+
// This happens when branch was created but never pushed
|
|
326
|
+
try {
|
|
327
|
+
const logOutput = execSync('git log --oneline -1', {
|
|
328
|
+
cwd: worktreePath,
|
|
329
|
+
encoding: 'utf-8',
|
|
330
|
+
timeout: 10000,
|
|
331
|
+
}).trim();
|
|
332
|
+
if (logOutput.length > 0) {
|
|
333
|
+
// Check if remote branch exists
|
|
334
|
+
const currentBranch = execSync('git branch --show-current', {
|
|
335
|
+
cwd: worktreePath,
|
|
336
|
+
encoding: 'utf-8',
|
|
337
|
+
timeout: 10000,
|
|
338
|
+
}).trim();
|
|
339
|
+
try {
|
|
340
|
+
execSync(`git ls-remote --heads origin ${currentBranch}`, {
|
|
341
|
+
cwd: worktreePath,
|
|
342
|
+
encoding: 'utf-8',
|
|
343
|
+
timeout: 10000,
|
|
344
|
+
});
|
|
345
|
+
// Remote branch exists, no issue
|
|
346
|
+
}
|
|
347
|
+
catch {
|
|
348
|
+
// Remote branch doesn't exist - branch never pushed
|
|
349
|
+
return {
|
|
350
|
+
hasIncompleteWork: true,
|
|
351
|
+
reason: 'unpushed_commits',
|
|
352
|
+
details: `Branch '${currentBranch}' has not been pushed to remote`,
|
|
353
|
+
};
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
}
|
|
357
|
+
catch {
|
|
358
|
+
// Empty repo or other issue - assume safe to clean
|
|
359
|
+
}
|
|
360
|
+
}
|
|
361
|
+
return { hasIncompleteWork: false };
|
|
362
|
+
}
|
|
363
|
+
catch (error) {
|
|
364
|
+
// If git commands fail, err on the side of caution and report incomplete
|
|
365
|
+
return {
|
|
366
|
+
hasIncompleteWork: true,
|
|
367
|
+
reason: 'uncommitted_changes',
|
|
368
|
+
details: `Failed to check git status: ${error instanceof Error ? error.message : String(error)}`,
|
|
369
|
+
};
|
|
370
|
+
}
|
|
371
|
+
}
|
|
372
|
+
/**
|
|
373
|
+
* Generate a prompt for the agent based on work type
|
|
374
|
+
*
|
|
375
|
+
* @param identifier - Issue identifier (e.g., SUP-123)
|
|
376
|
+
* @param workType - Type of work being performed
|
|
377
|
+
* @param options - Optional configuration
|
|
378
|
+
* @param options.parentContext - Pre-built enriched prompt for parent issues with sub-issues.
|
|
379
|
+
* When provided for 'qa' or 'acceptance' work types, this overrides the default prompt
|
|
380
|
+
* to include sub-issue context and holistic validation instructions.
|
|
381
|
+
* @returns The appropriate prompt for the work type
|
|
382
|
+
*/
|
|
383
|
+
function generatePromptForWorkType(identifier, workType, options) {
|
|
384
|
+
// Use enriched parent context for QA/acceptance if provided
|
|
385
|
+
if (options?.parentContext && (workType === 'qa' || workType === 'acceptance')) {
|
|
386
|
+
return options.parentContext;
|
|
387
|
+
}
|
|
388
|
+
const LINEAR_CLI_INSTRUCTION = `
|
|
389
|
+
|
|
390
|
+
LINEAR CLI (CRITICAL):
|
|
391
|
+
Use the Linear CLI (\`pnpm af-linear\`) for ALL Linear operations. Do NOT use Linear MCP tools.
|
|
392
|
+
See the project documentation (CLAUDE.md / AGENTS.md) for the full command reference.
|
|
393
|
+
|
|
394
|
+
HUMAN-NEEDED BLOCKERS:
|
|
395
|
+
If you encounter work that requires human action and cannot be resolved autonomously
|
|
396
|
+
(e.g., missing API keys/credentials, infrastructure not provisioned, third-party onboarding,
|
|
397
|
+
manual setup steps, policy decisions, access permissions), create a blocker issue:
|
|
398
|
+
pnpm af-linear create-blocker <SOURCE-ISSUE-ID> --title "What human needs to do" --description "Detailed steps"
|
|
399
|
+
This creates a tracked issue in Icebox with 'Needs Human' label, linked as blocking the source issue.
|
|
400
|
+
Do NOT silently skip human-needed work or bury it in comments.
|
|
401
|
+
Only create blockers for things that genuinely require a human — not for things you can retry or work around.`;
|
|
402
|
+
let basePrompt;
|
|
403
|
+
switch (workType) {
|
|
404
|
+
case 'research':
|
|
405
|
+
basePrompt = `Research and flesh out story ${identifier}.
|
|
406
|
+
Analyze requirements, identify technical approach, estimate complexity,
|
|
407
|
+
and update the story description with detailed acceptance criteria.
|
|
408
|
+
Do NOT implement code. Focus on story refinement only.${LINEAR_CLI_INSTRUCTION}`;
|
|
409
|
+
break;
|
|
410
|
+
case 'backlog-creation':
|
|
411
|
+
basePrompt = `Create backlog issues from the researched story ${identifier}.
|
|
412
|
+
Read the issue description, identify distinct work items, classify each as bug/feature/chore,
|
|
413
|
+
and create appropriately scoped Linear issues in Icebox status (so a human can review before moving to Backlog).
|
|
414
|
+
Choose the correct issue structure based on the work:
|
|
415
|
+
- Sub-issues (--parentId): When work is a single concern with sequential/parallel phases sharing context and dependencies. Keep source in Icebox as parent. Add blocking relations (--type blocks) between sub-issues to define execution order for the coordinator.
|
|
416
|
+
- Independent issues (--type related): When items are unrelated work in different codebase areas with no shared context. Source stays in Icebox.
|
|
417
|
+
- Single issue rewrite: When scope is atomic (single concern, \u22643 files, no phases). Rewrite source in-place, keep in Icebox.
|
|
418
|
+
IMPORTANT: When creating multiple issues (sub-issues or independent), always add "related" links between them AND blocking relations where one step depends on another. This informs sub-agents and the coordinator of execution order.
|
|
419
|
+
Do NOT wait for user approval - create issues automatically.${LINEAR_CLI_INSTRUCTION}`;
|
|
420
|
+
break;
|
|
421
|
+
case 'development':
|
|
422
|
+
basePrompt = `Start work on ${identifier}.
|
|
423
|
+
Implement the feature/fix as specified in the issue description.
|
|
424
|
+
|
|
425
|
+
DEPENDENCY INSTALLATION:
|
|
426
|
+
Dependencies are symlinked from the main repo by the orchestrator. Do NOT run pnpm install.
|
|
427
|
+
If you encounter a specific "Cannot find module" error, run it SYNCHRONOUSLY
|
|
428
|
+
(never with run_in_background). Never use sleep or polling loops to wait for commands.
|
|
429
|
+
|
|
430
|
+
IMPORTANT: If you encounter "exceeds maximum allowed tokens" error when reading files:
|
|
431
|
+
- Use Grep to search for specific code patterns instead of reading entire files
|
|
432
|
+
- Use Read with offset/limit parameters to paginate through large files
|
|
433
|
+
- Avoid reading auto-generated files like payload-types.ts (use Grep instead)
|
|
434
|
+
See the "Working with Large Files" section in the project documentation (CLAUDE.md / AGENTS.md) for details.${LINEAR_CLI_INSTRUCTION}`;
|
|
435
|
+
break;
|
|
436
|
+
case 'inflight':
|
|
437
|
+
basePrompt = `Continue work on ${identifier}.
|
|
438
|
+
Resume where you left off. Check the issue for any new comments or feedback.
|
|
439
|
+
|
|
440
|
+
DEPENDENCY INSTALLATION:
|
|
441
|
+
Dependencies are symlinked from the main repo by the orchestrator. Do NOT run pnpm install.
|
|
442
|
+
If you encounter a specific "Cannot find module" error, run it SYNCHRONOUSLY
|
|
443
|
+
(never with run_in_background). Never use sleep or polling loops to wait for commands.
|
|
444
|
+
|
|
445
|
+
IMPORTANT: If you encounter "exceeds maximum allowed tokens" error when reading files:
|
|
446
|
+
- Use Grep to search for specific code patterns instead of reading entire files
|
|
447
|
+
- Use Read with offset/limit parameters to paginate through large files
|
|
448
|
+
- Avoid reading auto-generated files like payload-types.ts (use Grep instead)
|
|
449
|
+
See the "Working with Large Files" section in the project documentation (CLAUDE.md / AGENTS.md) for details.${LINEAR_CLI_INSTRUCTION}`;
|
|
450
|
+
break;
|
|
451
|
+
case 'qa':
|
|
452
|
+
basePrompt = `QA ${identifier}.
|
|
453
|
+
Validate the implementation against acceptance criteria.
|
|
454
|
+
Run tests, check for regressions, verify the PR meets requirements.
|
|
455
|
+
|
|
456
|
+
STRUCTURED RESULT MARKER (REQUIRED):
|
|
457
|
+
You MUST include a structured result marker in your final output message.
|
|
458
|
+
The orchestrator parses your output to determine whether to promote or reject the issue.
|
|
459
|
+
Without this marker, the issue status will NOT be updated automatically.
|
|
460
|
+
- On QA pass: Include <!-- WORK_RESULT:passed --> in your final message
|
|
461
|
+
- On QA fail: Include <!-- WORK_RESULT:failed --> in your final message
|
|
462
|
+
|
|
463
|
+
DEPENDENCY INSTALLATION:
|
|
464
|
+
Dependencies are symlinked from the main repo by the orchestrator. Do NOT run pnpm install.
|
|
465
|
+
If you encounter a specific "Cannot find module" error, run it SYNCHRONOUSLY
|
|
466
|
+
(never with run_in_background). Never use sleep or polling loops to wait for commands.
|
|
467
|
+
|
|
468
|
+
IMPORTANT: If you encounter "exceeds maximum allowed tokens" error when reading files:
|
|
469
|
+
- Use Grep to search for specific code patterns instead of reading entire files
|
|
470
|
+
- Use Read with offset/limit parameters to paginate through large files
|
|
471
|
+
- Avoid reading auto-generated files like payload-types.ts (use Grep instead)
|
|
472
|
+
See the "Working with Large Files" section in the project documentation (CLAUDE.md / AGENTS.md) for details.${LINEAR_CLI_INSTRUCTION}`;
|
|
473
|
+
break;
|
|
474
|
+
case 'acceptance':
|
|
475
|
+
basePrompt = `Process acceptance for ${identifier}.
|
|
476
|
+
Validate development and QA work is complete.
|
|
477
|
+
Verify PR is ready to merge (CI passing, no conflicts).
|
|
478
|
+
Merge the PR using: gh pr merge <PR_NUMBER> --squash
|
|
479
|
+
After merge succeeds, delete the remote branch: git push origin --delete <BRANCH_NAME>
|
|
480
|
+
|
|
481
|
+
STRUCTURED RESULT MARKER (REQUIRED):
|
|
482
|
+
You MUST include a structured result marker in your final output message.
|
|
483
|
+
The orchestrator parses your output to determine whether to promote or reject the issue.
|
|
484
|
+
Without this marker, the issue status will NOT be updated automatically.
|
|
485
|
+
- On acceptance pass: Include <!-- WORK_RESULT:passed --> in your final message
|
|
486
|
+
- On acceptance fail: Include <!-- WORK_RESULT:failed --> in your final message${LINEAR_CLI_INSTRUCTION}`;
|
|
487
|
+
break;
|
|
488
|
+
case 'refinement':
|
|
489
|
+
basePrompt = `Refine ${identifier} based on rejection feedback.
|
|
490
|
+
Read the rejection comments, identify required changes,
|
|
491
|
+
update the issue description with refined requirements,
|
|
492
|
+
then return to Backlog for re-implementation.${LINEAR_CLI_INSTRUCTION}`;
|
|
493
|
+
break;
|
|
494
|
+
case 'refinement-coordination':
|
|
495
|
+
basePrompt = `Coordinate refinement across sub-issues for parent issue ${identifier}.
|
|
496
|
+
|
|
497
|
+
WORKFLOW:
|
|
498
|
+
1. Read the QA/acceptance failure comments on ${identifier} to identify which sub-issues failed and why
|
|
499
|
+
2. Fetch sub-issues: pnpm af-linear list-sub-issues ${identifier}
|
|
500
|
+
3. For each FAILING sub-issue:
|
|
501
|
+
a. Update its description with the specific failure feedback from the QA/acceptance report
|
|
502
|
+
b. Move it back to Backlog: pnpm af-linear update-sub-issue <id> --state Backlog --comment "Refinement: <failure summary>"
|
|
503
|
+
4. Leave PASSING sub-issues in their current state (Finished) — do not re-run them
|
|
504
|
+
5. Once all failing sub-issues are updated, the parent issue will be moved to Backlog by the orchestrator,
|
|
505
|
+
which will trigger a coordination agent that picks up only the Backlog sub-issues for re-implementation.
|
|
506
|
+
|
|
507
|
+
IMPORTANT CONSTRAINTS:
|
|
508
|
+
- This is a REFINEMENT task — do NOT implement fixes yourself, only triage and route feedback to sub-issues.
|
|
509
|
+
- NEVER run pnpm af-linear update-issue --state on the parent issue. The orchestrator manages parent status transitions.
|
|
510
|
+
- Only use pnpm af-linear for: list-sub-issues, list-sub-issue-statuses, get-issue, list-comments, create-comment, update-sub-issue${LINEAR_CLI_INSTRUCTION}`;
|
|
511
|
+
break;
|
|
512
|
+
case 'coordination':
|
|
513
|
+
basePrompt = `Coordinate sub-issue execution for parent issue ${identifier}.
|
|
514
|
+
Fetch sub-issues with dependency graph, create Claude Code Tasks mapping to each sub-issue,
|
|
515
|
+
spawn sub-agents for unblocked sub-issues in parallel, monitor completion,
|
|
516
|
+
and create a single PR with all changes when done.
|
|
517
|
+
|
|
518
|
+
SUB-ISSUE STATUS MANAGEMENT:
|
|
519
|
+
You MUST update sub-issue statuses in Linear as work progresses:
|
|
520
|
+
- When starting work on a sub-issue: pnpm af-linear update-sub-issue <id> --state Started
|
|
521
|
+
- When a sub-agent completes a sub-issue: pnpm af-linear update-sub-issue <id> --state Finished --comment "Completed by coordinator agent"
|
|
522
|
+
- If a sub-agent fails on a sub-issue: pnpm af-linear create-comment <sub-issue-id> --body "Sub-agent failed: <reason>"
|
|
523
|
+
|
|
524
|
+
COMPLETION VERIFICATION:
|
|
525
|
+
Before marking the parent issue as complete, verify ALL sub-issues are in Finished status:
|
|
526
|
+
pnpm af-linear list-sub-issue-statuses ${identifier}
|
|
527
|
+
If any sub-issue is not Finished, report the failure and do not mark the parent as complete.
|
|
528
|
+
|
|
529
|
+
SUB-AGENT SAFETY RULES (CRITICAL):
|
|
530
|
+
This is a SHARED WORKTREE. Multiple sub-agents run concurrently in this directory.
|
|
531
|
+
Every sub-agent prompt you construct MUST include these rules:
|
|
532
|
+
|
|
533
|
+
1. NEVER run: git worktree remove, git worktree prune
|
|
534
|
+
2. NEVER run: git checkout, git switch (to a different branch)
|
|
535
|
+
3. NEVER run: git reset --hard, git clean -fd, git restore .
|
|
536
|
+
4. NEVER delete or modify the .git file in the worktree root
|
|
537
|
+
5. Only the orchestrator manages worktree lifecycle
|
|
538
|
+
6. Work only on files relevant to your sub-issue to minimize conflicts
|
|
539
|
+
7. Commit changes with descriptive messages before reporting completion
|
|
540
|
+
|
|
541
|
+
Prefix every sub-agent prompt with: "SHARED WORKTREE \u2014 DO NOT MODIFY GIT STATE"
|
|
542
|
+
|
|
543
|
+
DEPENDENCY INSTALLATION:
|
|
544
|
+
Dependencies are symlinked from the main repo by the orchestrator. Do NOT run pnpm install.
|
|
545
|
+
If you encounter a specific "Cannot find module" error, run it SYNCHRONOUSLY
|
|
546
|
+
(never with run_in_background). Never use sleep or polling loops to wait for commands.
|
|
547
|
+
|
|
548
|
+
IMPORTANT: If you encounter "exceeds maximum allowed tokens" error when reading files:
|
|
549
|
+
- Use Grep to search for specific code patterns instead of reading entire files
|
|
550
|
+
- Use Read with offset/limit parameters to paginate through large files
|
|
551
|
+
- Avoid reading auto-generated files like payload-types.ts (use Grep instead)
|
|
552
|
+
See the "Working with Large Files" section in the project documentation (CLAUDE.md / AGENTS.md) for details.${LINEAR_CLI_INSTRUCTION}`;
|
|
553
|
+
break;
|
|
554
|
+
case 'qa-coordination':
|
|
555
|
+
basePrompt = `Coordinate QA across sub-issues for parent issue ${identifier}.
|
|
556
|
+
|
|
557
|
+
WORKFLOW:
|
|
558
|
+
1. Fetch sub-issues: pnpm af-linear list-sub-issues ${identifier}
|
|
559
|
+
2. Create Claude Code Tasks for each sub-issue's QA verification
|
|
560
|
+
3. Spawn qa-reviewer sub-agents in parallel \u2014 no dependency graph needed, all sub-issues are already Finished
|
|
561
|
+
4. Each sub-agent: reads sub-issue requirements, runs scoped tests, validates implementation, emits pass/fail
|
|
562
|
+
5. Collect results \u2014 ALL sub-issues must pass QA for the parent to pass
|
|
563
|
+
|
|
564
|
+
RESULT HANDLING:
|
|
565
|
+
- If ALL pass: Mark parent as complete (transitions to Delivered). Update each sub-issue to Delivered.
|
|
566
|
+
- If ANY fail: Post rollup comment listing per-sub-issue results. Emit <!-- WORK_RESULT:failed -->. The orchestrator will move the issue to Rejected for coordinated refinement.
|
|
567
|
+
|
|
568
|
+
IMPORTANT CONSTRAINTS:
|
|
569
|
+
- This is READ-ONLY validation \u2014 do NOT create PRs or make git commits
|
|
570
|
+
- The PR already exists from the development coordination phase
|
|
571
|
+
- Run pnpm test, pnpm typecheck, and pnpm build as part of validation
|
|
572
|
+
- Verify each sub-issue's acceptance criteria against the actual code changes
|
|
573
|
+
|
|
574
|
+
SUB-AGENT SAFETY RULES (CRITICAL):
|
|
575
|
+
This is a SHARED WORKTREE. Multiple sub-agents run concurrently in this directory.
|
|
576
|
+
Every sub-agent prompt you construct MUST include these rules:
|
|
577
|
+
1. NEVER run: git worktree remove, git worktree prune
|
|
578
|
+
2. NEVER run: git checkout, git switch (to a different branch)
|
|
579
|
+
3. NEVER run: git reset --hard, git clean -fd, git restore .
|
|
580
|
+
4. NEVER delete or modify the .git file in the worktree root
|
|
581
|
+
5. Work only on files relevant to your sub-issue to minimize conflicts
|
|
582
|
+
Prefix every sub-agent prompt with: "SHARED WORKTREE \u2014 DO NOT MODIFY GIT STATE"
|
|
583
|
+
|
|
584
|
+
STRUCTURED RESULT MARKER (REQUIRED):
|
|
585
|
+
You MUST include a structured result marker in your final output message.
|
|
586
|
+
The orchestrator parses your output to determine whether to promote or reject the issue.
|
|
587
|
+
Without this marker, the issue status will NOT be updated automatically.
|
|
588
|
+
- On QA pass: Include <!-- WORK_RESULT:passed --> in your final message
|
|
589
|
+
- On QA fail: Include <!-- WORK_RESULT:failed --> in your final message
|
|
590
|
+
|
|
591
|
+
DEPENDENCY INSTALLATION:
|
|
592
|
+
Dependencies are symlinked from the main repo by the orchestrator. Do NOT run pnpm install.
|
|
593
|
+
If you encounter a specific "Cannot find module" error, run it SYNCHRONOUSLY
|
|
594
|
+
(never with run_in_background). Never use sleep or polling loops to wait for commands.
|
|
595
|
+
|
|
596
|
+
IMPORTANT: If you encounter "exceeds maximum allowed tokens" error when reading files:
|
|
597
|
+
- Use Grep to search for specific code patterns instead of reading entire files
|
|
598
|
+
- Use Read with offset/limit parameters to paginate through large files
|
|
599
|
+
- Avoid reading auto-generated files like payload-types.ts (use Grep instead)
|
|
600
|
+
See the "Working with Large Files" section in the project documentation (CLAUDE.md / AGENTS.md) for details.${LINEAR_CLI_INSTRUCTION}`;
|
|
601
|
+
break;
|
|
602
|
+
case 'acceptance-coordination':
|
|
603
|
+
basePrompt = `Coordinate acceptance across sub-issues for parent issue ${identifier}.
|
|
604
|
+
|
|
605
|
+
WORKFLOW:
|
|
606
|
+
1. Verify all sub-issues are in Delivered status: pnpm af-linear list-sub-issue-statuses ${identifier}
|
|
607
|
+
2. If any sub-issue is NOT Delivered, report which sub-issues need attention and fail
|
|
608
|
+
3. Validate the PR:
|
|
609
|
+
- CI checks are passing
|
|
610
|
+
- No merge conflicts
|
|
611
|
+
- Preview deployment succeeded (if applicable)
|
|
612
|
+
4. Merge the PR: gh pr merge <PR_NUMBER> --squash
|
|
613
|
+
5. After merge succeeds, delete the remote branch: git push origin --delete <BRANCH_NAME>
|
|
614
|
+
6. Bulk-update all sub-issues to Accepted: for each sub-issue, run pnpm af-linear update-sub-issue <id> --state Accepted
|
|
615
|
+
7. Mark parent as complete (transitions to Accepted)
|
|
616
|
+
|
|
617
|
+
IMPORTANT CONSTRAINTS:
|
|
618
|
+
- ALL sub-issues must be in Delivered status before proceeding
|
|
619
|
+
- The PR must pass CI and have no conflicts
|
|
620
|
+
- If merge fails, report the error and do not mark as Accepted
|
|
621
|
+
|
|
622
|
+
STRUCTURED RESULT MARKER (REQUIRED):
|
|
623
|
+
You MUST include a structured result marker in your final output message.
|
|
624
|
+
The orchestrator parses your output to determine whether to promote or reject the issue.
|
|
625
|
+
Without this marker, the issue status will NOT be updated automatically.
|
|
626
|
+
- On acceptance pass: Include <!-- WORK_RESULT:passed --> in your final message
|
|
627
|
+
- On acceptance fail: Include <!-- WORK_RESULT:failed --> in your final message
|
|
628
|
+
|
|
629
|
+
IMPORTANT: If you encounter "exceeds maximum allowed tokens" error when reading files:
|
|
630
|
+
- Use Grep to search for specific code patterns instead of reading entire files
|
|
631
|
+
- Use Read with offset/limit parameters to paginate through large files
|
|
632
|
+
- Avoid reading auto-generated files like payload-types.ts (use Grep instead)
|
|
633
|
+
See the "Working with Large Files" section in the project documentation (CLAUDE.md / AGENTS.md) for details.${LINEAR_CLI_INSTRUCTION}`;
|
|
634
|
+
break;
|
|
635
|
+
}
|
|
636
|
+
// Inject workflow failure context for retries
|
|
637
|
+
if (options?.failureContext) {
|
|
638
|
+
basePrompt += options.failureContext;
|
|
639
|
+
}
|
|
640
|
+
if (options?.mentionContext) {
|
|
641
|
+
return `${basePrompt}\n\nAdditional context from the user's mention:\n${options.mentionContext}`;
|
|
642
|
+
}
|
|
643
|
+
return basePrompt;
|
|
644
|
+
}
|
|
645
|
+
/**
|
|
646
|
+
* Map work type to worktree identifier suffix
|
|
647
|
+
* This prevents different work types from using the same worktree directory
|
|
648
|
+
*/
|
|
649
|
+
const WORK_TYPE_SUFFIX = {
|
|
650
|
+
research: 'RES',
|
|
651
|
+
'backlog-creation': 'BC',
|
|
652
|
+
development: 'DEV',
|
|
653
|
+
inflight: 'INF',
|
|
654
|
+
coordination: 'COORD',
|
|
655
|
+
qa: 'QA',
|
|
656
|
+
acceptance: 'AC',
|
|
657
|
+
refinement: 'REF',
|
|
658
|
+
'refinement-coordination': 'REF-COORD',
|
|
659
|
+
'qa-coordination': 'QA-COORD',
|
|
660
|
+
'acceptance-coordination': 'AC-COORD',
|
|
661
|
+
};
|
|
662
|
+
/**
|
|
663
|
+
* Generate a worktree identifier that includes the work type suffix
|
|
664
|
+
*
|
|
665
|
+
* @param issueIdentifier - Issue identifier (e.g., "SUP-294")
|
|
666
|
+
* @param workType - Type of work being performed
|
|
667
|
+
* @returns Worktree identifier with suffix (e.g., "SUP-294-QA")
|
|
668
|
+
*/
|
|
669
|
+
export function getWorktreeIdentifier(issueIdentifier, workType) {
|
|
670
|
+
const suffix = WORK_TYPE_SUFFIX[workType];
|
|
671
|
+
return `${issueIdentifier}-${suffix}`;
|
|
672
|
+
}
|
|
673
|
+
export class AgentOrchestrator {
|
|
674
|
+
config;
|
|
675
|
+
client;
|
|
676
|
+
events;
|
|
677
|
+
activeAgents = new Map();
|
|
678
|
+
agentHandles = new Map();
|
|
679
|
+
provider;
|
|
680
|
+
agentSessions = new Map();
|
|
681
|
+
activityEmitters = new Map();
|
|
682
|
+
// Track session ID to issue ID mapping for stop signal handling
|
|
683
|
+
sessionToIssue = new Map();
|
|
684
|
+
// Track AbortControllers for stopping agents
|
|
685
|
+
abortControllers = new Map();
|
|
686
|
+
// Loggers per agent for structured output
|
|
687
|
+
agentLoggers = new Map();
|
|
688
|
+
// Heartbeat writers per agent for crash detection
|
|
689
|
+
heartbeatWriters = new Map();
|
|
690
|
+
// Progress loggers per agent for debugging
|
|
691
|
+
progressLoggers = new Map();
|
|
692
|
+
// Session loggers per agent for verbose analysis logging
|
|
693
|
+
sessionLoggers = new Map();
|
|
694
|
+
// Template registry for configurable workflow prompts
|
|
695
|
+
templateRegistry;
|
|
696
|
+
// Allowlisted project names from .agentfactory/config.yaml
|
|
697
|
+
allowedProjects;
|
|
698
|
+
// Project-to-path mapping from .agentfactory/config.yaml (monorepo support)
|
|
699
|
+
projectPaths;
|
|
700
|
+
// Shared paths from .agentfactory/config.yaml (monorepo support)
|
|
701
|
+
sharedPaths;
|
|
702
|
+
// Linear CLI command from .agentfactory/config.yaml (non-Node project support)
|
|
703
|
+
linearCli;
|
|
704
|
+
// Package manager from .agentfactory/config.yaml (non-Node project support)
|
|
705
|
+
packageManager;
|
|
706
|
+
// Configurable build/test/validate commands from .agentfactory/config.yaml
|
|
707
|
+
buildCommand;
|
|
708
|
+
testCommand;
|
|
709
|
+
validateCommand;
|
|
710
|
+
// Tool plugin registry for in-process agent tools
|
|
711
|
+
toolRegistry;
|
|
712
|
+
constructor(config = {}, events = {}) {
|
|
713
|
+
const apiKey = config.linearApiKey ?? process.env.LINEAR_API_KEY;
|
|
714
|
+
if (!apiKey) {
|
|
715
|
+
throw new Error('LINEAR_API_KEY is required');
|
|
716
|
+
}
|
|
717
|
+
// Parse timeout config from environment variables (can be overridden by config)
|
|
718
|
+
const envInactivityTimeout = process.env.AGENT_INACTIVITY_TIMEOUT_MS
|
|
719
|
+
? parseInt(process.env.AGENT_INACTIVITY_TIMEOUT_MS, 10)
|
|
720
|
+
: undefined;
|
|
721
|
+
const envMaxSessionTimeout = process.env.AGENT_MAX_SESSION_TIMEOUT_MS
|
|
722
|
+
? parseInt(process.env.AGENT_MAX_SESSION_TIMEOUT_MS, 10)
|
|
723
|
+
: undefined;
|
|
724
|
+
this.config = {
|
|
725
|
+
...DEFAULT_CONFIG,
|
|
726
|
+
...config,
|
|
727
|
+
linearApiKey: apiKey,
|
|
728
|
+
streamConfig: {
|
|
729
|
+
...DEFAULT_CONFIG.streamConfig,
|
|
730
|
+
...config.streamConfig,
|
|
731
|
+
},
|
|
732
|
+
apiActivityConfig: config.apiActivityConfig,
|
|
733
|
+
workTypeTimeouts: config.workTypeTimeouts,
|
|
734
|
+
// Config takes precedence over env vars, which take precedence over defaults
|
|
735
|
+
inactivityTimeoutMs: config.inactivityTimeoutMs ?? envInactivityTimeout ?? DEFAULT_CONFIG.inactivityTimeoutMs,
|
|
736
|
+
maxSessionTimeoutMs: config.maxSessionTimeoutMs ?? envMaxSessionTimeout ?? DEFAULT_CONFIG.maxSessionTimeoutMs,
|
|
737
|
+
};
|
|
738
|
+
// Validate git remote matches configured repository (if set)
|
|
739
|
+
if (this.config.repository) {
|
|
740
|
+
validateGitRemote(this.config.repository);
|
|
741
|
+
}
|
|
742
|
+
this.client = createLinearAgentClient({ apiKey });
|
|
743
|
+
this.events = events;
|
|
744
|
+
// Initialize agent provider — defaults to Claude, configurable via env
|
|
745
|
+
const providerName = resolveProviderName({ project: config.project });
|
|
746
|
+
this.provider = config.provider ?? createProvider(providerName);
|
|
747
|
+
// Initialize template registry for configurable workflow prompts
|
|
748
|
+
try {
|
|
749
|
+
const templateDirs = [];
|
|
750
|
+
if (config.templateDir) {
|
|
751
|
+
templateDirs.push(config.templateDir);
|
|
752
|
+
}
|
|
753
|
+
// Auto-detect .agentfactory/templates/ in working directory
|
|
754
|
+
const projectTemplateDir = resolve(process.cwd(), '.agentfactory', 'templates');
|
|
755
|
+
if (existsSync(projectTemplateDir) && !templateDirs.includes(projectTemplateDir)) {
|
|
756
|
+
templateDirs.push(projectTemplateDir);
|
|
757
|
+
}
|
|
758
|
+
this.templateRegistry = TemplateRegistry.create({
|
|
759
|
+
templateDirs,
|
|
760
|
+
useBuiltinDefaults: true,
|
|
761
|
+
frontend: 'linear',
|
|
762
|
+
});
|
|
763
|
+
this.templateRegistry.setToolPermissionAdapter(createToolPermissionAdapter(this.provider.name));
|
|
764
|
+
}
|
|
765
|
+
catch {
|
|
766
|
+
// If template loading fails, fall back to hardcoded prompts
|
|
767
|
+
this.templateRegistry = null;
|
|
768
|
+
}
|
|
769
|
+
// Auto-load .agentfactory/config.yaml from repository root
|
|
770
|
+
try {
|
|
771
|
+
const repoRoot = findRepoRoot(process.cwd());
|
|
772
|
+
if (repoRoot) {
|
|
773
|
+
const repoConfig = loadRepositoryConfig(repoRoot);
|
|
774
|
+
if (repoConfig) {
|
|
775
|
+
// Use repository from config as fallback if not set in OrchestratorConfig
|
|
776
|
+
if (!this.config.repository && repoConfig.repository) {
|
|
777
|
+
this.config.repository = repoConfig.repository;
|
|
778
|
+
validateGitRemote(this.config.repository);
|
|
779
|
+
}
|
|
780
|
+
// Store allowedProjects for backlog filtering
|
|
781
|
+
if (repoConfig.projectPaths) {
|
|
782
|
+
this.projectPaths = repoConfig.projectPaths;
|
|
783
|
+
this.sharedPaths = repoConfig.sharedPaths;
|
|
784
|
+
this.allowedProjects = Object.keys(repoConfig.projectPaths);
|
|
785
|
+
}
|
|
786
|
+
else if (repoConfig.allowedProjects) {
|
|
787
|
+
this.allowedProjects = repoConfig.allowedProjects;
|
|
788
|
+
}
|
|
789
|
+
// Store non-Node project config
|
|
790
|
+
if (repoConfig.linearCli) {
|
|
791
|
+
this.linearCli = repoConfig.linearCli;
|
|
792
|
+
}
|
|
793
|
+
if (repoConfig.packageManager) {
|
|
794
|
+
this.packageManager = repoConfig.packageManager;
|
|
795
|
+
}
|
|
796
|
+
// Store configurable build/test/validate commands
|
|
797
|
+
if (repoConfig.buildCommand) {
|
|
798
|
+
this.buildCommand = repoConfig.buildCommand;
|
|
799
|
+
}
|
|
800
|
+
if (repoConfig.testCommand) {
|
|
801
|
+
this.testCommand = repoConfig.testCommand;
|
|
802
|
+
}
|
|
803
|
+
if (repoConfig.validateCommand) {
|
|
804
|
+
this.validateCommand = repoConfig.validateCommand;
|
|
805
|
+
}
|
|
806
|
+
}
|
|
807
|
+
}
|
|
808
|
+
}
|
|
809
|
+
catch (err) {
|
|
810
|
+
console.warn('[orchestrator] Failed to load .agentfactory/config.yaml:', err instanceof Error ? err.message : err);
|
|
811
|
+
}
|
|
812
|
+
// Initialize tool plugin registry with Linear plugin
|
|
813
|
+
this.toolRegistry = new ToolRegistry();
|
|
814
|
+
this.toolRegistry.register(linearPlugin);
|
|
815
|
+
}
|
|
816
|
+
/**
|
|
817
|
+
* Update the last activity timestamp for an agent (for inactivity timeout tracking)
|
|
818
|
+
* @param issueId - The issue ID of the agent
|
|
819
|
+
* @param activityType - Optional description of the activity type
|
|
820
|
+
*/
|
|
821
|
+
updateLastActivity(issueId, activityType = 'activity') {
|
|
822
|
+
const agent = this.activeAgents.get(issueId);
|
|
823
|
+
if (agent) {
|
|
824
|
+
agent.lastActivityAt = new Date();
|
|
825
|
+
this.events.onActivityEmitted?.(agent, activityType);
|
|
826
|
+
}
|
|
827
|
+
}
|
|
828
|
+
/**
|
|
829
|
+
* Get timeout configuration for a specific work type
|
|
830
|
+
* @param workType - The work type to get timeout config for
|
|
831
|
+
* @returns Timeout configuration with inactivity and max session values
|
|
832
|
+
*/
|
|
833
|
+
getTimeoutConfig(workType) {
|
|
834
|
+
const baseConfig = {
|
|
835
|
+
inactivityTimeoutMs: this.config.inactivityTimeoutMs,
|
|
836
|
+
maxSessionTimeoutMs: this.config.maxSessionTimeoutMs,
|
|
837
|
+
};
|
|
838
|
+
// Apply work-type-specific overrides if configured
|
|
839
|
+
if (workType && this.config.workTypeTimeouts?.[workType]) {
|
|
840
|
+
const override = this.config.workTypeTimeouts[workType];
|
|
841
|
+
return {
|
|
842
|
+
inactivityTimeoutMs: override?.inactivityTimeoutMs ?? baseConfig.inactivityTimeoutMs,
|
|
843
|
+
maxSessionTimeoutMs: override?.maxSessionTimeoutMs ?? baseConfig.maxSessionTimeoutMs,
|
|
844
|
+
};
|
|
845
|
+
}
|
|
846
|
+
return baseConfig;
|
|
847
|
+
}
|
|
848
|
+
/**
|
|
849
|
+
* Get backlog issues for the configured project
|
|
850
|
+
*/
|
|
851
|
+
async getBacklogIssues(limit) {
|
|
852
|
+
const maxIssues = limit ?? this.config.maxConcurrent;
|
|
853
|
+
// Build filter based on project
|
|
854
|
+
const filter = {
|
|
855
|
+
state: { name: { eqIgnoreCase: 'Backlog' } },
|
|
856
|
+
};
|
|
857
|
+
if (this.config.project) {
|
|
858
|
+
const projects = await this.client.linearClient.projects({
|
|
859
|
+
filter: { name: { eqIgnoreCase: this.config.project } },
|
|
860
|
+
});
|
|
861
|
+
if (projects.nodes.length > 0) {
|
|
862
|
+
filter.project = { id: { eq: projects.nodes[0].id } };
|
|
863
|
+
// Cross-reference project repo metadata with config (SUP-725)
|
|
864
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- runtime check for method added in SUP-725
|
|
865
|
+
const clientAny = this.client;
|
|
866
|
+
if (this.config.repository && typeof clientAny.getProjectRepositoryUrl === 'function') {
|
|
867
|
+
try {
|
|
868
|
+
const projectRepoUrl = await clientAny.getProjectRepositoryUrl(projects.nodes[0].id);
|
|
869
|
+
if (projectRepoUrl) {
|
|
870
|
+
const normalizedProjectRepo = projectRepoUrl
|
|
871
|
+
.replace(/^https?:\/\//, '')
|
|
872
|
+
.replace(/\.git$/, '');
|
|
873
|
+
const normalizedConfigRepo = this.config.repository
|
|
874
|
+
.replace(/^https?:\/\//, '')
|
|
875
|
+
.replace(/\.git$/, '');
|
|
876
|
+
if (!normalizedProjectRepo.includes(normalizedConfigRepo) && !normalizedConfigRepo.includes(normalizedProjectRepo)) {
|
|
877
|
+
console.warn(`Warning: Project '${this.config.project}' repository metadata '${projectRepoUrl}' ` +
|
|
878
|
+
`does not match configured repository '${this.config.repository}'. Skipping issues.`);
|
|
879
|
+
return [];
|
|
880
|
+
}
|
|
881
|
+
}
|
|
882
|
+
}
|
|
883
|
+
catch (error) {
|
|
884
|
+
// Non-fatal: log warning but continue if metadata check fails
|
|
885
|
+
console.warn('Warning: Could not check project repository metadata:', error instanceof Error ? error.message : String(error));
|
|
886
|
+
}
|
|
887
|
+
}
|
|
888
|
+
}
|
|
889
|
+
}
|
|
890
|
+
const issues = await this.client.linearClient.issues({
|
|
891
|
+
filter,
|
|
892
|
+
first: maxIssues * 2, // Fetch extra to account for filtering
|
|
893
|
+
});
|
|
894
|
+
const results = [];
|
|
895
|
+
for (const issue of issues.nodes) {
|
|
896
|
+
if (results.length >= maxIssues)
|
|
897
|
+
break;
|
|
898
|
+
// Filter by allowedProjects from .agentfactory/config.yaml
|
|
899
|
+
let resolvedProjectName;
|
|
900
|
+
if (this.allowedProjects && this.allowedProjects.length > 0) {
|
|
901
|
+
const project = await issue.project;
|
|
902
|
+
const projectName = project?.name;
|
|
903
|
+
if (!projectName || !this.allowedProjects.includes(projectName)) {
|
|
904
|
+
console.warn(`[orchestrator] Skipping issue ${issue.identifier} — project "${projectName ?? '(none)'}" is not in allowedProjects: [${this.allowedProjects.join(', ')}]`);
|
|
905
|
+
continue;
|
|
906
|
+
}
|
|
907
|
+
resolvedProjectName = projectName;
|
|
908
|
+
}
|
|
909
|
+
// Resolve project name for path scoping even when not filtering by allowedProjects
|
|
910
|
+
if (!resolvedProjectName && this.projectPaths) {
|
|
911
|
+
const project = await issue.project;
|
|
912
|
+
resolvedProjectName = project?.name;
|
|
913
|
+
}
|
|
914
|
+
const labels = await issue.labels();
|
|
915
|
+
const team = await issue.team;
|
|
916
|
+
results.push({
|
|
917
|
+
id: issue.id,
|
|
918
|
+
identifier: issue.identifier,
|
|
919
|
+
title: issue.title,
|
|
920
|
+
description: issue.description ?? undefined,
|
|
921
|
+
url: issue.url,
|
|
922
|
+
priority: issue.priority,
|
|
923
|
+
labels: labels.nodes.map((l) => l.name),
|
|
924
|
+
teamName: team?.key,
|
|
925
|
+
projectName: resolvedProjectName,
|
|
926
|
+
});
|
|
927
|
+
}
|
|
928
|
+
// Sort by priority (lower number = higher priority, 0 means no priority -> goes last)
|
|
929
|
+
return results.sort((a, b) => {
|
|
930
|
+
const aPriority = a.priority || 5;
|
|
931
|
+
const bPriority = b.priority || 5;
|
|
932
|
+
return aPriority - bPriority;
|
|
933
|
+
});
|
|
934
|
+
}
|
|
935
|
+
/**
|
|
936
|
+
* Validate that a path is a valid git worktree
|
|
937
|
+
*/
|
|
938
|
+
validateWorktree(worktreePath) {
|
|
939
|
+
if (!existsSync(worktreePath)) {
|
|
940
|
+
return { valid: false, reason: 'Directory does not exist' };
|
|
941
|
+
}
|
|
942
|
+
const gitPath = resolve(worktreePath, '.git');
|
|
943
|
+
if (!existsSync(gitPath)) {
|
|
944
|
+
return { valid: false, reason: 'Missing .git file' };
|
|
945
|
+
}
|
|
946
|
+
// Verify .git is a worktree reference file (not a directory)
|
|
947
|
+
try {
|
|
948
|
+
const stat = statSync(gitPath);
|
|
949
|
+
if (stat.isDirectory()) {
|
|
950
|
+
return { valid: false, reason: '.git is a directory, not a worktree reference' };
|
|
951
|
+
}
|
|
952
|
+
const content = readFileSync(gitPath, 'utf-8');
|
|
953
|
+
if (!content.includes('gitdir:')) {
|
|
954
|
+
return { valid: false, reason: '.git file missing gitdir reference' };
|
|
955
|
+
}
|
|
956
|
+
}
|
|
957
|
+
catch {
|
|
958
|
+
return { valid: false, reason: 'Cannot read .git file' };
|
|
959
|
+
}
|
|
960
|
+
return { valid: true };
|
|
961
|
+
}
|
|
962
|
+
/**
|
|
963
|
+
* Extract the full error message from an execSync error.
|
|
964
|
+
*
|
|
965
|
+
* Node's execSync throws an Error where .message only contains
|
|
966
|
+
* "Command failed: <command>", but the actual git error output
|
|
967
|
+
* is in .stderr. This helper combines both for reliable pattern matching.
|
|
968
|
+
*/
|
|
969
|
+
getExecSyncErrorMessage(error) {
|
|
970
|
+
if (error && typeof error === 'object') {
|
|
971
|
+
const parts = [];
|
|
972
|
+
if ('message' in error && typeof error.message === 'string') {
|
|
973
|
+
parts.push(error.message);
|
|
974
|
+
}
|
|
975
|
+
if ('stderr' in error && typeof error.stderr === 'string') {
|
|
976
|
+
parts.push(error.stderr);
|
|
977
|
+
}
|
|
978
|
+
if ('stdout' in error && typeof error.stdout === 'string') {
|
|
979
|
+
parts.push(error.stdout);
|
|
980
|
+
}
|
|
981
|
+
return parts.join('\n');
|
|
982
|
+
}
|
|
983
|
+
return String(error);
|
|
984
|
+
}
|
|
985
|
+
/**
|
|
986
|
+
* Check if a git error indicates a branch/worktree conflict.
|
|
987
|
+
*
|
|
988
|
+
* Git uses different error messages depending on the situation:
|
|
989
|
+
* - "is already checked out at '/path'" - branch checked out in another worktree
|
|
990
|
+
* - "is already used by worktree at '/path'" - branch associated with another worktree
|
|
991
|
+
*
|
|
992
|
+
* Both mean the same thing: the branch is occupied by another worktree.
|
|
993
|
+
*/
|
|
994
|
+
isBranchConflictError(errorMsg) {
|
|
995
|
+
return errorMsg.includes('is already checked out at') ||
|
|
996
|
+
errorMsg.includes('is already used by worktree at');
|
|
997
|
+
}
|
|
998
|
+
/**
|
|
999
|
+
* Extract the conflicting worktree path from a git branch conflict error.
|
|
1000
|
+
*
|
|
1001
|
+
* Parses paths like:
|
|
1002
|
+
* - "fatal: 'SUP-402' is already checked out at '/path/to/.worktrees/SUP-402-DEV'"
|
|
1003
|
+
* - "fatal: 'SUP-402' is already used by worktree at '/path/to/.worktrees/SUP-402-DEV'"
|
|
1004
|
+
*/
|
|
1005
|
+
parseConflictingWorktreePath(errorMsg) {
|
|
1006
|
+
// Match either "checked out at" or "used by worktree at" followed by a quoted path
|
|
1007
|
+
const match = errorMsg.match(/(?:already checked out at|already used by worktree at)\s+'([^']+)'/);
|
|
1008
|
+
return match?.[1] ?? null;
|
|
1009
|
+
}
|
|
1010
|
+
/**
|
|
1011
|
+
* Check if a path is the main git working tree (not a worktree).
|
|
1012
|
+
*
|
|
1013
|
+
* The main working tree has a `.git` directory, while worktrees have a
|
|
1014
|
+
* `.git` file containing a `gitdir:` pointer. This is the primary safeguard
|
|
1015
|
+
* against accidentally destroying the main repository.
|
|
1016
|
+
*/
|
|
1017
|
+
isMainWorktree(targetPath) {
|
|
1018
|
+
try {
|
|
1019
|
+
const gitPath = resolve(targetPath, '.git');
|
|
1020
|
+
if (!existsSync(gitPath))
|
|
1021
|
+
return false;
|
|
1022
|
+
const stat = statSync(gitPath);
|
|
1023
|
+
// Main working tree has .git as a directory; worktrees have .git as a file
|
|
1024
|
+
if (stat.isDirectory())
|
|
1025
|
+
return true;
|
|
1026
|
+
// Double-check via `git worktree list --porcelain`
|
|
1027
|
+
const output = execSync('git worktree list --porcelain', {
|
|
1028
|
+
stdio: 'pipe',
|
|
1029
|
+
encoding: 'utf-8',
|
|
1030
|
+
});
|
|
1031
|
+
const mainTreeMatch = output.match(/^worktree (.+)$/m);
|
|
1032
|
+
if (mainTreeMatch) {
|
|
1033
|
+
const mainTreePath = mainTreeMatch[1];
|
|
1034
|
+
return resolve(targetPath) === resolve(mainTreePath);
|
|
1035
|
+
}
|
|
1036
|
+
}
|
|
1037
|
+
catch {
|
|
1038
|
+
// If we can't determine, err on the side of caution - treat as main
|
|
1039
|
+
return true;
|
|
1040
|
+
}
|
|
1041
|
+
return false;
|
|
1042
|
+
}
|
|
1043
|
+
/**
|
|
1044
|
+
* Check if a path is inside the configured .worktrees/ directory.
|
|
1045
|
+
*
|
|
1046
|
+
* Only paths within the worktrees directory should ever be candidates for
|
|
1047
|
+
* automated cleanup. This prevents the main repo or other directories from
|
|
1048
|
+
* being targeted.
|
|
1049
|
+
*/
|
|
1050
|
+
isInsideWorktreesDir(targetPath) {
|
|
1051
|
+
const worktreesDir = resolve(this.config.worktreePath);
|
|
1052
|
+
const normalizedTarget = resolve(targetPath);
|
|
1053
|
+
// Must be inside .worktrees/ (not equal to it)
|
|
1054
|
+
return normalizedTarget.startsWith(worktreesDir + '/');
|
|
1055
|
+
}
|
|
1056
|
+
/**
|
|
1057
|
+
* Attempt to clean up a stale worktree that is blocking branch creation.
|
|
1058
|
+
*
|
|
1059
|
+
* During dev\u2192qa\u2192acceptance handoffs, the prior work type's worktree may still
|
|
1060
|
+
* exist after its agent has finished (the orchestrator cleans up externally,
|
|
1061
|
+
* but there's a race window). This method checks if the blocking worktree's
|
|
1062
|
+
* agent is still alive via heartbeat. If not, it removes the stale worktree
|
|
1063
|
+
* so the new work type can proceed.
|
|
1064
|
+
*
|
|
1065
|
+
* SAFETY: This method will NEVER clean up the main working tree. It only
|
|
1066
|
+
* operates on paths inside the .worktrees/ directory. This prevents
|
|
1067
|
+
* catastrophic data loss when a branch is checked out in the main tree
|
|
1068
|
+
* (e.g., by a user in their IDE).
|
|
1069
|
+
*
|
|
1070
|
+
* @returns true if the conflicting worktree was cleaned up
|
|
1071
|
+
*/
|
|
1072
|
+
tryCleanupConflictingWorktree(conflictPath, branchName) {
|
|
1073
|
+
// SAFETY GUARD 1: Never touch the main working tree
|
|
1074
|
+
if (this.isMainWorktree(conflictPath)) {
|
|
1075
|
+
console.warn(`SAFETY: Refusing to clean up ${conflictPath} \u2014 it is the main working tree. ` +
|
|
1076
|
+
`Branch '${branchName}' appears to be checked out in the main repo (e.g., via IDE). ` +
|
|
1077
|
+
`The agent will retry or skip this issue.`);
|
|
1078
|
+
return false;
|
|
1079
|
+
}
|
|
1080
|
+
// SAFETY GUARD 2: Only clean up paths inside .worktrees/
|
|
1081
|
+
if (!this.isInsideWorktreesDir(conflictPath)) {
|
|
1082
|
+
console.warn(`SAFETY: Refusing to clean up ${conflictPath} \u2014 it is not inside the worktrees directory. ` +
|
|
1083
|
+
`Only paths inside '${resolve(this.config.worktreePath)}' can be auto-cleaned.`);
|
|
1084
|
+
return false;
|
|
1085
|
+
}
|
|
1086
|
+
if (!existsSync(conflictPath)) {
|
|
1087
|
+
// Directory doesn't exist - just prune git's worktree list
|
|
1088
|
+
try {
|
|
1089
|
+
execSync('git worktree prune', { stdio: 'pipe', encoding: 'utf-8' });
|
|
1090
|
+
console.log(`Pruned stale worktree reference for branch ${branchName}`);
|
|
1091
|
+
return true;
|
|
1092
|
+
}
|
|
1093
|
+
catch {
|
|
1094
|
+
return false;
|
|
1095
|
+
}
|
|
1096
|
+
}
|
|
1097
|
+
// Check if the agent in the conflicting worktree is still alive
|
|
1098
|
+
const recoveryInfo = checkRecovery(conflictPath, {
|
|
1099
|
+
heartbeatTimeoutMs: getHeartbeatTimeoutFromEnv(),
|
|
1100
|
+
maxRecoveryAttempts: 0, // We don't want to recover, just check liveness
|
|
1101
|
+
});
|
|
1102
|
+
if (recoveryInfo.agentAlive) {
|
|
1103
|
+
console.log(`Branch ${branchName} is held by a running agent at ${conflictPath} - cannot clean up`);
|
|
1104
|
+
return false;
|
|
1105
|
+
}
|
|
1106
|
+
// Agent is not alive - check for incomplete work before cleaning up
|
|
1107
|
+
const incompleteCheck = checkForIncompleteWork(conflictPath);
|
|
1108
|
+
if (incompleteCheck.hasIncompleteWork) {
|
|
1109
|
+
// Save a patch before removing so work can be recovered
|
|
1110
|
+
try {
|
|
1111
|
+
const patchDir = resolve(this.config.worktreePath, '.patches');
|
|
1112
|
+
if (!existsSync(patchDir)) {
|
|
1113
|
+
mkdirSync(patchDir, { recursive: true });
|
|
1114
|
+
}
|
|
1115
|
+
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
|
1116
|
+
const patchName = `${branchName}-${timestamp}.patch`;
|
|
1117
|
+
const patchPath = resolve(patchDir, patchName);
|
|
1118
|
+
// Capture both staged and unstaged changes
|
|
1119
|
+
const diff = execSync('git diff HEAD', {
|
|
1120
|
+
cwd: conflictPath,
|
|
1121
|
+
encoding: 'utf-8',
|
|
1122
|
+
timeout: 10000,
|
|
1123
|
+
});
|
|
1124
|
+
if (diff.trim().length > 0) {
|
|
1125
|
+
writeFileSync(patchPath, diff);
|
|
1126
|
+
console.log(`Saved incomplete work patch: ${patchPath}`);
|
|
1127
|
+
}
|
|
1128
|
+
// Also capture untracked files list
|
|
1129
|
+
const untracked = execSync('git ls-files --others --exclude-standard', {
|
|
1130
|
+
cwd: conflictPath,
|
|
1131
|
+
encoding: 'utf-8',
|
|
1132
|
+
timeout: 10000,
|
|
1133
|
+
}).trim();
|
|
1134
|
+
if (untracked.length > 0) {
|
|
1135
|
+
// Create a full diff including untracked files
|
|
1136
|
+
const fullDiff = execSync('git diff HEAD -- . && git diff --no-index /dev/null $(git ls-files --others --exclude-standard) 2>/dev/null || true', {
|
|
1137
|
+
cwd: conflictPath,
|
|
1138
|
+
encoding: 'utf-8',
|
|
1139
|
+
timeout: 10000,
|
|
1140
|
+
shell: '/bin/bash',
|
|
1141
|
+
});
|
|
1142
|
+
if (fullDiff.trim().length > 0) {
|
|
1143
|
+
writeFileSync(patchPath, fullDiff);
|
|
1144
|
+
console.log(`Saved incomplete work patch (including untracked files): ${patchPath}`);
|
|
1145
|
+
}
|
|
1146
|
+
}
|
|
1147
|
+
}
|
|
1148
|
+
catch (patchError) {
|
|
1149
|
+
console.warn('Failed to save work patch before cleanup:', patchError instanceof Error ? patchError.message : String(patchError));
|
|
1150
|
+
}
|
|
1151
|
+
}
|
|
1152
|
+
console.log(`Cleaning up stale worktree at ${conflictPath} (agent no longer running) ` +
|
|
1153
|
+
`to unblock branch ${branchName}`);
|
|
1154
|
+
try {
|
|
1155
|
+
execSync(`git worktree remove "${conflictPath}" --force`, {
|
|
1156
|
+
stdio: 'pipe',
|
|
1157
|
+
encoding: 'utf-8',
|
|
1158
|
+
});
|
|
1159
|
+
console.log(`Removed stale worktree: ${conflictPath}`);
|
|
1160
|
+
return true;
|
|
1161
|
+
}
|
|
1162
|
+
catch (removeError) {
|
|
1163
|
+
const removeMsg = removeError instanceof Error ? removeError.message : String(removeError);
|
|
1164
|
+
console.warn(`Failed to remove stale worktree ${conflictPath}:`, removeMsg);
|
|
1165
|
+
// SAFETY GUARD 3: If git itself says "main working tree", absolutely stop
|
|
1166
|
+
if (removeMsg.includes('is a main working tree')) {
|
|
1167
|
+
console.error(`SAFETY: git confirmed ${conflictPath} is the main working tree. Aborting cleanup.`);
|
|
1168
|
+
return false;
|
|
1169
|
+
}
|
|
1170
|
+
// Fallback: rm -rf + prune (safe because guards 1 & 2 already verified
|
|
1171
|
+
// this path is inside .worktrees/ and is not the main tree)
|
|
1172
|
+
try {
|
|
1173
|
+
execSync(`rm -rf "${conflictPath}"`, { stdio: 'pipe', encoding: 'utf-8' });
|
|
1174
|
+
execSync('git worktree prune', { stdio: 'pipe', encoding: 'utf-8' });
|
|
1175
|
+
console.log(`Force-removed stale worktree: ${conflictPath}`);
|
|
1176
|
+
return true;
|
|
1177
|
+
}
|
|
1178
|
+
catch {
|
|
1179
|
+
return false;
|
|
1180
|
+
}
|
|
1181
|
+
}
|
|
1182
|
+
}
|
|
1183
|
+
/**
|
|
1184
|
+
* Handle a branch conflict error by attempting to clean up the stale worktree
|
|
1185
|
+
* and retrying, or throwing a retriable error for the worker's retry loop.
|
|
1186
|
+
*/
|
|
1187
|
+
handleBranchConflict(errorMsg, branchName) {
|
|
1188
|
+
const conflictPath = this.parseConflictingWorktreePath(errorMsg);
|
|
1189
|
+
if (conflictPath) {
|
|
1190
|
+
const cleaned = this.tryCleanupConflictingWorktree(conflictPath, branchName);
|
|
1191
|
+
if (cleaned) {
|
|
1192
|
+
// Return without throwing - the caller should retry the git command
|
|
1193
|
+
return;
|
|
1194
|
+
}
|
|
1195
|
+
}
|
|
1196
|
+
// Could not clean up - throw retriable error for worker's retry loop
|
|
1197
|
+
throw new Error(`Branch '${branchName}' is already checked out in another worktree. ` +
|
|
1198
|
+
`This may indicate another agent is still working on this issue.`);
|
|
1199
|
+
}
|
|
1200
|
+
/**
|
|
1201
|
+
* Create a git worktree for an issue with work type suffix
|
|
1202
|
+
*
|
|
1203
|
+
* @param issueIdentifier - Issue identifier (e.g., "SUP-294")
|
|
1204
|
+
* @param workType - Type of work being performed
|
|
1205
|
+
* @returns Object containing worktreePath and worktreeIdentifier
|
|
1206
|
+
*/
|
|
1207
|
+
createWorktree(issueIdentifier, workType) {
|
|
1208
|
+
const worktreeIdentifier = getWorktreeIdentifier(issueIdentifier, workType);
|
|
1209
|
+
const worktreePath = resolve(this.config.worktreePath, worktreeIdentifier);
|
|
1210
|
+
// Use issue identifier for branch name (shared across work types)
|
|
1211
|
+
const branchName = issueIdentifier;
|
|
1212
|
+
// Ensure parent directory exists
|
|
1213
|
+
const parentDir = resolve(this.config.worktreePath);
|
|
1214
|
+
if (!existsSync(parentDir)) {
|
|
1215
|
+
mkdirSync(parentDir, { recursive: true });
|
|
1216
|
+
}
|
|
1217
|
+
// Prune any stale worktrees first (handles deleted directories)
|
|
1218
|
+
try {
|
|
1219
|
+
execSync('git worktree prune', { stdio: 'pipe', encoding: 'utf-8' });
|
|
1220
|
+
}
|
|
1221
|
+
catch {
|
|
1222
|
+
// Ignore prune errors
|
|
1223
|
+
}
|
|
1224
|
+
// Check if worktree already exists AND is valid
|
|
1225
|
+
// A valid worktree has a .git file (not directory) pointing to parent repo with gitdir reference
|
|
1226
|
+
if (existsSync(worktreePath)) {
|
|
1227
|
+
const validation = this.validateWorktree(worktreePath);
|
|
1228
|
+
if (validation.valid) {
|
|
1229
|
+
console.log(`Worktree already exists: ${worktreePath}`);
|
|
1230
|
+
return { worktreePath, worktreeIdentifier };
|
|
1231
|
+
}
|
|
1232
|
+
// Invalid/incomplete worktree - must clean up
|
|
1233
|
+
console.log(`Removing invalid worktree: ${worktreePath} (${validation.reason})`);
|
|
1234
|
+
try {
|
|
1235
|
+
rmSync(worktreePath, { recursive: true, force: true, maxRetries: 3, retryDelay: 1000 });
|
|
1236
|
+
}
|
|
1237
|
+
catch (cleanupError) {
|
|
1238
|
+
throw new Error(`Failed to clean up invalid worktree at ${worktreePath}: ` +
|
|
1239
|
+
`${cleanupError instanceof Error ? cleanupError.message : String(cleanupError)}`);
|
|
1240
|
+
}
|
|
1241
|
+
// Verify cleanup worked
|
|
1242
|
+
if (existsSync(worktreePath)) {
|
|
1243
|
+
throw new Error(`Failed to remove invalid worktree directory at ${worktreePath}`);
|
|
1244
|
+
}
|
|
1245
|
+
}
|
|
1246
|
+
console.log(`Creating worktree: ${worktreePath} (branch: ${branchName})`);
|
|
1247
|
+
// Determine the base branch for new worktrees
|
|
1248
|
+
// Always base new feature branches on 'main' to avoid HEAD resolution issues
|
|
1249
|
+
// when running from worktrees with deleted branches (e.g., after PR merge in acceptance)
|
|
1250
|
+
const baseBranch = 'main';
|
|
1251
|
+
// Try to create worktree with new branch
|
|
1252
|
+
// Uses a two-attempt strategy: if a branch conflict is detected and the
|
|
1253
|
+
// conflicting worktree's agent is no longer alive, clean it up and retry once.
|
|
1254
|
+
const MAX_CONFLICT_RETRIES = 1;
|
|
1255
|
+
let conflictRetries = 0;
|
|
1256
|
+
const attemptCreateWorktree = () => {
|
|
1257
|
+
try {
|
|
1258
|
+
execSync(`git worktree add "${worktreePath}" -b ${branchName} ${baseBranch}`, {
|
|
1259
|
+
stdio: 'pipe',
|
|
1260
|
+
encoding: 'utf-8',
|
|
1261
|
+
});
|
|
1262
|
+
}
|
|
1263
|
+
catch (error) {
|
|
1264
|
+
// Branch might already exist or be checked out elsewhere
|
|
1265
|
+
// Note: execSync errors have the git message in .stderr, not just .message
|
|
1266
|
+
const errorMsg = this.getExecSyncErrorMessage(error);
|
|
1267
|
+
// If branch is in use by another worktree, try to clean up the stale worktree
|
|
1268
|
+
if (this.isBranchConflictError(errorMsg)) {
|
|
1269
|
+
if (conflictRetries < MAX_CONFLICT_RETRIES) {
|
|
1270
|
+
conflictRetries++;
|
|
1271
|
+
// handleBranchConflict returns if cleanup succeeded, throws if not
|
|
1272
|
+
this.handleBranchConflict(errorMsg, branchName);
|
|
1273
|
+
// Cleanup succeeded - retry
|
|
1274
|
+
console.log(`Retrying worktree creation after cleaning up stale worktree`);
|
|
1275
|
+
attemptCreateWorktree();
|
|
1276
|
+
return;
|
|
1277
|
+
}
|
|
1278
|
+
throw new Error(`Branch '${branchName}' is already checked out in another worktree. ` +
|
|
1279
|
+
`This may indicate another agent is still working on this issue.`);
|
|
1280
|
+
}
|
|
1281
|
+
if (errorMsg.includes('already exists')) {
|
|
1282
|
+
// Branch exists, try without -b flag
|
|
1283
|
+
try {
|
|
1284
|
+
execSync(`git worktree add "${worktreePath}" ${branchName}`, {
|
|
1285
|
+
stdio: 'pipe',
|
|
1286
|
+
encoding: 'utf-8',
|
|
1287
|
+
});
|
|
1288
|
+
}
|
|
1289
|
+
catch (innerError) {
|
|
1290
|
+
const innerMsg = this.getExecSyncErrorMessage(innerError);
|
|
1291
|
+
// If branch is in use by another worktree, try to clean up
|
|
1292
|
+
if (this.isBranchConflictError(innerMsg)) {
|
|
1293
|
+
if (conflictRetries < MAX_CONFLICT_RETRIES) {
|
|
1294
|
+
conflictRetries++;
|
|
1295
|
+
this.handleBranchConflict(innerMsg, branchName);
|
|
1296
|
+
console.log(`Retrying worktree creation after cleaning up stale worktree`);
|
|
1297
|
+
attemptCreateWorktree();
|
|
1298
|
+
return;
|
|
1299
|
+
}
|
|
1300
|
+
throw new Error(`Branch '${branchName}' is already checked out in another worktree. ` +
|
|
1301
|
+
`This may indicate another agent is still working on this issue.`);
|
|
1302
|
+
}
|
|
1303
|
+
// For any other error, propagate it
|
|
1304
|
+
throw innerError;
|
|
1305
|
+
}
|
|
1306
|
+
}
|
|
1307
|
+
else {
|
|
1308
|
+
throw error;
|
|
1309
|
+
}
|
|
1310
|
+
}
|
|
1311
|
+
};
|
|
1312
|
+
attemptCreateWorktree();
|
|
1313
|
+
// Validate worktree was created correctly
|
|
1314
|
+
const validation = this.validateWorktree(worktreePath);
|
|
1315
|
+
if (!validation.valid) {
|
|
1316
|
+
// Clean up partial state
|
|
1317
|
+
try {
|
|
1318
|
+
if (existsSync(worktreePath)) {
|
|
1319
|
+
execSync(`rm -rf "${worktreePath}"`, { stdio: 'pipe', encoding: 'utf-8' });
|
|
1320
|
+
}
|
|
1321
|
+
execSync('git worktree prune', { stdio: 'pipe', encoding: 'utf-8' });
|
|
1322
|
+
}
|
|
1323
|
+
catch {
|
|
1324
|
+
// Ignore cleanup errors
|
|
1325
|
+
}
|
|
1326
|
+
throw new Error(`Failed to create valid worktree at ${worktreePath}: ${validation.reason}. ` +
|
|
1327
|
+
`This may indicate a race condition with another agent.`);
|
|
1328
|
+
}
|
|
1329
|
+
console.log(`Worktree created successfully: ${worktreePath}`);
|
|
1330
|
+
// Initialize .agent/ directory for state persistence
|
|
1331
|
+
try {
|
|
1332
|
+
initializeAgentDir(worktreePath);
|
|
1333
|
+
}
|
|
1334
|
+
catch (initError) {
|
|
1335
|
+
// Log but don't fail - state persistence is optional
|
|
1336
|
+
console.warn(`Failed to initialize .agent/ directory: ${initError instanceof Error ? initError.message : String(initError)}`);
|
|
1337
|
+
}
|
|
1338
|
+
// Write helper scripts into .agent/ for agent use
|
|
1339
|
+
this.writeWorktreeHelpers(worktreePath);
|
|
1340
|
+
return { worktreePath, worktreeIdentifier };
|
|
1341
|
+
}
|
|
1342
|
+
/**
|
|
1343
|
+
* Clean up a git worktree
|
|
1344
|
+
*
|
|
1345
|
+
* @param worktreeIdentifier - Worktree identifier with work type suffix (e.g., "SUP-294-QA")
|
|
1346
|
+
*/
|
|
1347
|
+
removeWorktree(worktreeIdentifier) {
|
|
1348
|
+
const worktreePath = resolve(this.config.worktreePath, worktreeIdentifier);
|
|
1349
|
+
if (existsSync(worktreePath)) {
|
|
1350
|
+
try {
|
|
1351
|
+
execSync(`git worktree remove "${worktreePath}" --force`, {
|
|
1352
|
+
stdio: 'pipe',
|
|
1353
|
+
encoding: 'utf-8',
|
|
1354
|
+
});
|
|
1355
|
+
}
|
|
1356
|
+
catch (error) {
|
|
1357
|
+
console.warn(`Failed to remove worktree via git, trying fallback:`, error);
|
|
1358
|
+
try {
|
|
1359
|
+
execSync(`rm -rf "${worktreePath}"`, { stdio: 'pipe', encoding: 'utf-8' });
|
|
1360
|
+
execSync('git worktree prune', { stdio: 'pipe', encoding: 'utf-8' });
|
|
1361
|
+
}
|
|
1362
|
+
catch (fallbackError) {
|
|
1363
|
+
console.warn(`Fallback worktree removal also failed:`, fallbackError);
|
|
1364
|
+
}
|
|
1365
|
+
}
|
|
1366
|
+
}
|
|
1367
|
+
else {
|
|
1368
|
+
// Directory gone but git may still track it
|
|
1369
|
+
try {
|
|
1370
|
+
execSync('git worktree prune', { stdio: 'pipe', encoding: 'utf-8' });
|
|
1371
|
+
}
|
|
1372
|
+
catch {
|
|
1373
|
+
// Ignore
|
|
1374
|
+
}
|
|
1375
|
+
}
|
|
1376
|
+
}
|
|
1377
|
+
/**
|
|
1378
|
+
* Write helper scripts into the worktree's .agent/ directory.
|
|
1379
|
+
*
|
|
1380
|
+
* Currently writes:
|
|
1381
|
+
* - .agent/add-dep.sh: Safely adds a new dependency by removing symlinked
|
|
1382
|
+
* node_modules first, then running `pnpm add` with the guard bypass.
|
|
1383
|
+
*/
|
|
1384
|
+
writeWorktreeHelpers(worktreePath) {
|
|
1385
|
+
// Skip helper scripts for non-Node projects (no pnpm/npm available)
|
|
1386
|
+
if (this.packageManager === 'none') {
|
|
1387
|
+
return;
|
|
1388
|
+
}
|
|
1389
|
+
const agentDir = resolve(worktreePath, '.agent');
|
|
1390
|
+
const scriptPath = resolve(agentDir, 'add-dep.sh');
|
|
1391
|
+
const script = `#!/bin/bash
|
|
1392
|
+
# Safe dependency addition for agents in worktrees.
|
|
1393
|
+
# Removes symlinked node_modules, then runs pnpm add with guard bypass.
|
|
1394
|
+
# Usage: bash .agent/add-dep.sh <package> [--filter <workspace>]
|
|
1395
|
+
set -e
|
|
1396
|
+
if [ $# -eq 0 ]; then
|
|
1397
|
+
echo "Usage: bash .agent/add-dep.sh <package> [--filter <workspace>]"
|
|
1398
|
+
exit 1
|
|
1399
|
+
fi
|
|
1400
|
+
echo "Cleaning symlinked node_modules..."
|
|
1401
|
+
rm -rf node_modules
|
|
1402
|
+
for subdir in apps packages; do
|
|
1403
|
+
[ -d "$subdir" ] && find "$subdir" -maxdepth 2 -name node_modules -type d -exec rm -rf {} + 2>/dev/null || true
|
|
1404
|
+
done
|
|
1405
|
+
echo "Installing: pnpm add $@"
|
|
1406
|
+
ORCHESTRATOR_INSTALL=1 exec pnpm add "$@"
|
|
1407
|
+
`;
|
|
1408
|
+
try {
|
|
1409
|
+
if (!existsSync(agentDir)) {
|
|
1410
|
+
mkdirSync(agentDir, { recursive: true });
|
|
1411
|
+
}
|
|
1412
|
+
writeFileSync(scriptPath, script, { mode: 0o755 });
|
|
1413
|
+
}
|
|
1414
|
+
catch (error) {
|
|
1415
|
+
// Log but don't fail — the helper is optional
|
|
1416
|
+
console.warn(`Failed to write worktree helper scripts: ${error instanceof Error ? error.message : String(error)}`);
|
|
1417
|
+
}
|
|
1418
|
+
}
|
|
1419
|
+
/**
|
|
1420
|
+
* Link dependencies from the main repo into a worktree via symlinks.
|
|
1421
|
+
*
|
|
1422
|
+
* Creates a REAL node_modules directory in the worktree and symlinks each
|
|
1423
|
+
* entry (packages, .pnpm, .bin) individually. This prevents pnpm from
|
|
1424
|
+
* resolving through a directory-level symlink and corrupting the main
|
|
1425
|
+
* repo's node_modules when an agent accidentally runs `pnpm install`.
|
|
1426
|
+
*
|
|
1427
|
+
* For non-Node repos (no node_modules in main repo), this is a no-op.
|
|
1428
|
+
*
|
|
1429
|
+
* Falls back to `pnpm install --frozen-lockfile` if symlinking fails.
|
|
1430
|
+
*/
|
|
1431
|
+
linkDependencies(worktreePath, identifier) {
|
|
1432
|
+
const repoRoot = findRepoRoot(worktreePath);
|
|
1433
|
+
if (!repoRoot) {
|
|
1434
|
+
console.warn(`[${identifier}] Could not find repo root, skipping dependency linking`);
|
|
1435
|
+
return;
|
|
1436
|
+
}
|
|
1437
|
+
const mainNodeModules = resolve(repoRoot, 'node_modules');
|
|
1438
|
+
if (!existsSync(mainNodeModules)) {
|
|
1439
|
+
// Not a Node.js project, or deps not installed in main repo — nothing to do
|
|
1440
|
+
console.log(`[${identifier}] No node_modules in main repo, skipping dependency linking`);
|
|
1441
|
+
return;
|
|
1442
|
+
}
|
|
1443
|
+
console.log(`[${identifier}] Linking dependencies from main repo...`);
|
|
1444
|
+
try {
|
|
1445
|
+
// Link root node_modules — create a real directory with symlinked contents
|
|
1446
|
+
// so pnpm can't follow a top-level symlink to corrupt the main repo
|
|
1447
|
+
const destRoot = resolve(worktreePath, 'node_modules');
|
|
1448
|
+
this.linkNodeModulesContents(mainNodeModules, destRoot, identifier);
|
|
1449
|
+
// Link per-workspace node_modules (apps/*, packages/*)
|
|
1450
|
+
let skipped = 0;
|
|
1451
|
+
for (const subdir of ['apps', 'packages']) {
|
|
1452
|
+
const mainSubdir = resolve(repoRoot, subdir);
|
|
1453
|
+
if (!existsSync(mainSubdir))
|
|
1454
|
+
continue;
|
|
1455
|
+
for (const entry of readdirSync(mainSubdir)) {
|
|
1456
|
+
const src = resolve(mainSubdir, entry, 'node_modules');
|
|
1457
|
+
const destParent = resolve(worktreePath, subdir, entry);
|
|
1458
|
+
const dest = resolve(destParent, 'node_modules');
|
|
1459
|
+
if (!existsSync(src))
|
|
1460
|
+
continue;
|
|
1461
|
+
// Skip entries where the app/package doesn't exist on this branch
|
|
1462
|
+
if (!existsSync(destParent)) {
|
|
1463
|
+
skipped++;
|
|
1464
|
+
continue;
|
|
1465
|
+
}
|
|
1466
|
+
this.linkNodeModulesContents(src, dest, identifier);
|
|
1467
|
+
}
|
|
1468
|
+
}
|
|
1469
|
+
if (skipped > 0) {
|
|
1470
|
+
console.log(`[${identifier}] Dependencies linked successfully (${skipped} workspace(s) skipped — not on this branch)`);
|
|
1471
|
+
}
|
|
1472
|
+
else {
|
|
1473
|
+
console.log(`[${identifier}] Dependencies linked successfully`);
|
|
1474
|
+
}
|
|
1475
|
+
}
|
|
1476
|
+
catch (error) {
|
|
1477
|
+
console.warn(`[${identifier}] Symlink failed, falling back to install:`, error instanceof Error ? error.message : String(error));
|
|
1478
|
+
this.installDependencies(worktreePath, identifier);
|
|
1479
|
+
}
|
|
1480
|
+
}
|
|
1481
|
+
/**
|
|
1482
|
+
* Create a real node_modules directory and symlink each entry from the source.
|
|
1483
|
+
*
|
|
1484
|
+
* Instead of symlinking the entire node_modules directory (which lets pnpm
|
|
1485
|
+
* resolve through the symlink and corrupt the original), we create a real
|
|
1486
|
+
* directory and symlink each entry individually. If pnpm "recreates" this
|
|
1487
|
+
* directory, it only destroys the worktree's symlinks — not the originals.
|
|
1488
|
+
*/
|
|
1489
|
+
linkNodeModulesContents(srcNodeModules, destNodeModules, identifier) {
|
|
1490
|
+
if (existsSync(destNodeModules))
|
|
1491
|
+
return;
|
|
1492
|
+
mkdirSync(destNodeModules, { recursive: true });
|
|
1493
|
+
for (const entry of readdirSync(srcNodeModules)) {
|
|
1494
|
+
const srcEntry = resolve(srcNodeModules, entry);
|
|
1495
|
+
const destEntry = resolve(destNodeModules, entry);
|
|
1496
|
+
// For scoped packages (@org/), create the scope dir and symlink contents
|
|
1497
|
+
if (entry.startsWith('@')) {
|
|
1498
|
+
const stat = lstatSync(srcEntry);
|
|
1499
|
+
if (stat.isDirectory()) {
|
|
1500
|
+
mkdirSync(destEntry, { recursive: true });
|
|
1501
|
+
for (const scopedEntry of readdirSync(srcEntry)) {
|
|
1502
|
+
const srcScoped = resolve(srcEntry, scopedEntry);
|
|
1503
|
+
const destScoped = resolve(destEntry, scopedEntry);
|
|
1504
|
+
if (!existsSync(destScoped)) {
|
|
1505
|
+
symlinkSync(srcScoped, destScoped);
|
|
1506
|
+
}
|
|
1507
|
+
}
|
|
1508
|
+
continue;
|
|
1509
|
+
}
|
|
1510
|
+
}
|
|
1511
|
+
if (!existsSync(destEntry)) {
|
|
1512
|
+
symlinkSync(srcEntry, destEntry);
|
|
1513
|
+
}
|
|
1514
|
+
}
|
|
1515
|
+
}
|
|
1516
|
+
/**
|
|
1517
|
+
* Fallback: install dependencies via pnpm install.
|
|
1518
|
+
* Only called when symlinking fails.
|
|
1519
|
+
*/
|
|
1520
|
+
installDependencies(worktreePath, identifier) {
|
|
1521
|
+
console.log(`[${identifier}] Installing dependencies via pnpm...`);
|
|
1522
|
+
// Remove any node_modules from a partial linkDependencies attempt.
|
|
1523
|
+
// Handles both old format (directory-level symlink) and new format
|
|
1524
|
+
// (real directory with symlinked contents).
|
|
1525
|
+
const destRoot = resolve(worktreePath, 'node_modules');
|
|
1526
|
+
try {
|
|
1527
|
+
if (existsSync(destRoot)) {
|
|
1528
|
+
rmSync(destRoot, { recursive: true, force: true });
|
|
1529
|
+
console.log(`[${identifier}] Removed partial node_modules before install`);
|
|
1530
|
+
}
|
|
1531
|
+
}
|
|
1532
|
+
catch {
|
|
1533
|
+
// Ignore cleanup errors — pnpm install may still work
|
|
1534
|
+
}
|
|
1535
|
+
// Also remove any per-workspace node_modules that were partially created
|
|
1536
|
+
for (const subdir of ['apps', 'packages']) {
|
|
1537
|
+
const subPath = resolve(worktreePath, subdir);
|
|
1538
|
+
if (!existsSync(subPath))
|
|
1539
|
+
continue;
|
|
1540
|
+
try {
|
|
1541
|
+
for (const entry of readdirSync(subPath)) {
|
|
1542
|
+
const nm = resolve(subPath, entry, 'node_modules');
|
|
1543
|
+
if (existsSync(nm)) {
|
|
1544
|
+
rmSync(nm, { recursive: true, force: true });
|
|
1545
|
+
}
|
|
1546
|
+
}
|
|
1547
|
+
}
|
|
1548
|
+
catch {
|
|
1549
|
+
// Ignore cleanup errors
|
|
1550
|
+
}
|
|
1551
|
+
}
|
|
1552
|
+
// Set ORCHESTRATOR_INSTALL=1 to bypass the preinstall guard script
|
|
1553
|
+
// that blocks pnpm install in worktrees (to prevent symlink corruption).
|
|
1554
|
+
const installEnv = { ...process.env, ORCHESTRATOR_INSTALL: '1' };
|
|
1555
|
+
try {
|
|
1556
|
+
execSync('pnpm install --frozen-lockfile 2>&1', {
|
|
1557
|
+
cwd: worktreePath,
|
|
1558
|
+
stdio: 'pipe',
|
|
1559
|
+
encoding: 'utf-8',
|
|
1560
|
+
timeout: 120_000,
|
|
1561
|
+
env: installEnv,
|
|
1562
|
+
});
|
|
1563
|
+
console.log(`[${identifier}] Dependencies installed successfully`);
|
|
1564
|
+
}
|
|
1565
|
+
catch {
|
|
1566
|
+
try {
|
|
1567
|
+
execSync('pnpm install 2>&1', {
|
|
1568
|
+
cwd: worktreePath,
|
|
1569
|
+
stdio: 'pipe',
|
|
1570
|
+
encoding: 'utf-8',
|
|
1571
|
+
timeout: 120_000,
|
|
1572
|
+
env: installEnv,
|
|
1573
|
+
});
|
|
1574
|
+
console.log(`[${identifier}] Dependencies installed (without frozen lockfile)`);
|
|
1575
|
+
}
|
|
1576
|
+
catch (retryError) {
|
|
1577
|
+
console.warn(`[${identifier}] Install failed (agent may retry):`, retryError instanceof Error ? retryError.message : String(retryError));
|
|
1578
|
+
}
|
|
1579
|
+
}
|
|
1580
|
+
}
|
|
1581
|
+
/**
|
|
1582
|
+
* @deprecated Use linkDependencies() instead. This now delegates to linkDependencies.
|
|
1583
|
+
*/
|
|
1584
|
+
preInstallDependencies(worktreePath, identifier) {
|
|
1585
|
+
this.linkDependencies(worktreePath, identifier);
|
|
1586
|
+
}
|
|
1587
|
+
/**
|
|
1588
|
+
* Spawn a Claude agent for a specific issue using the Agent SDK
|
|
1589
|
+
*/
|
|
1590
|
+
spawnAgent(options) {
|
|
1591
|
+
const { issueId, identifier, worktreeIdentifier, sessionId, worktreePath, streamActivities, workType = 'development', prompt: customPrompt, teamName, projectName, } = options;
|
|
1592
|
+
// Generate prompt based on work type, or use custom prompt if provided
|
|
1593
|
+
// Try template registry first, fall back to hardcoded prompts
|
|
1594
|
+
let prompt;
|
|
1595
|
+
if (customPrompt) {
|
|
1596
|
+
prompt = customPrompt;
|
|
1597
|
+
}
|
|
1598
|
+
else if (this.templateRegistry?.hasTemplate(workType)) {
|
|
1599
|
+
const context = {
|
|
1600
|
+
identifier,
|
|
1601
|
+
repository: this.config.repository,
|
|
1602
|
+
projectPath: this.projectPaths?.[projectName ?? ''],
|
|
1603
|
+
sharedPaths: this.sharedPaths,
|
|
1604
|
+
useToolPlugins: this.provider.name === 'claude',
|
|
1605
|
+
linearCli: this.linearCli ?? 'pnpm af-linear',
|
|
1606
|
+
packageManager: this.packageManager ?? 'pnpm',
|
|
1607
|
+
buildCommand: this.buildCommand,
|
|
1608
|
+
testCommand: this.testCommand,
|
|
1609
|
+
validateCommand: this.validateCommand,
|
|
1610
|
+
};
|
|
1611
|
+
const rendered = this.templateRegistry.renderPrompt(workType, context);
|
|
1612
|
+
prompt = rendered ?? generatePromptForWorkType(identifier, workType);
|
|
1613
|
+
}
|
|
1614
|
+
else {
|
|
1615
|
+
prompt = generatePromptForWorkType(identifier, workType);
|
|
1616
|
+
}
|
|
1617
|
+
// Create logger for this agent
|
|
1618
|
+
const log = createLogger({ issueIdentifier: identifier });
|
|
1619
|
+
this.agentLoggers.set(issueId, log);
|
|
1620
|
+
const now = new Date();
|
|
1621
|
+
const agent = {
|
|
1622
|
+
issueId,
|
|
1623
|
+
identifier,
|
|
1624
|
+
worktreeIdentifier,
|
|
1625
|
+
sessionId,
|
|
1626
|
+
worktreePath,
|
|
1627
|
+
pid: undefined,
|
|
1628
|
+
status: 'starting',
|
|
1629
|
+
startedAt: now,
|
|
1630
|
+
lastActivityAt: now, // Initialize for inactivity tracking
|
|
1631
|
+
workType,
|
|
1632
|
+
};
|
|
1633
|
+
this.activeAgents.set(issueId, agent);
|
|
1634
|
+
// Track session to issue mapping for stop signal handling
|
|
1635
|
+
if (sessionId) {
|
|
1636
|
+
this.sessionToIssue.set(sessionId, issueId);
|
|
1637
|
+
}
|
|
1638
|
+
// Initialize state persistence and monitoring (only for worktree-based agents)
|
|
1639
|
+
if (worktreePath) {
|
|
1640
|
+
try {
|
|
1641
|
+
// Write initial state
|
|
1642
|
+
const initialState = createInitialState({
|
|
1643
|
+
issueId,
|
|
1644
|
+
issueIdentifier: identifier,
|
|
1645
|
+
linearSessionId: sessionId ?? null,
|
|
1646
|
+
workType,
|
|
1647
|
+
prompt,
|
|
1648
|
+
workerId: this.config.apiActivityConfig?.workerId ?? null,
|
|
1649
|
+
pid: null, // Will be updated when process spawns
|
|
1650
|
+
});
|
|
1651
|
+
writeState(worktreePath, initialState);
|
|
1652
|
+
// Start heartbeat writer for crash detection
|
|
1653
|
+
const heartbeatWriter = createHeartbeatWriter({
|
|
1654
|
+
agentDir: resolve(worktreePath, '.agent'),
|
|
1655
|
+
pid: process.pid, // Will be updated to child PID after spawn
|
|
1656
|
+
intervalMs: getHeartbeatIntervalFromEnv(),
|
|
1657
|
+
startTime: now.getTime(),
|
|
1658
|
+
});
|
|
1659
|
+
heartbeatWriter.start();
|
|
1660
|
+
this.heartbeatWriters.set(issueId, heartbeatWriter);
|
|
1661
|
+
// Start progress logger for debugging
|
|
1662
|
+
const progressLogger = createProgressLogger({
|
|
1663
|
+
agentDir: resolve(worktreePath, '.agent'),
|
|
1664
|
+
});
|
|
1665
|
+
progressLogger.logStart({ issueId, workType, prompt: prompt.substring(0, 200) });
|
|
1666
|
+
this.progressLoggers.set(issueId, progressLogger);
|
|
1667
|
+
// Start session logger for verbose analysis if enabled
|
|
1668
|
+
if (isSessionLoggingEnabled()) {
|
|
1669
|
+
const logConfig = getLogAnalysisConfig();
|
|
1670
|
+
const sessionLogger = createSessionLogger({
|
|
1671
|
+
sessionId: sessionId ?? issueId,
|
|
1672
|
+
issueId,
|
|
1673
|
+
issueIdentifier: identifier,
|
|
1674
|
+
workType,
|
|
1675
|
+
prompt,
|
|
1676
|
+
logsDir: logConfig.logsDir,
|
|
1677
|
+
workerId: this.config.apiActivityConfig?.workerId,
|
|
1678
|
+
});
|
|
1679
|
+
this.sessionLoggers.set(issueId, sessionLogger);
|
|
1680
|
+
log.debug('Session logging initialized', { logsDir: logConfig.logsDir });
|
|
1681
|
+
}
|
|
1682
|
+
log.debug('State persistence initialized', { agentDir: resolve(worktreePath, '.agent') });
|
|
1683
|
+
}
|
|
1684
|
+
catch (stateError) {
|
|
1685
|
+
// Log but don't fail - state persistence is optional
|
|
1686
|
+
log.warn('Failed to initialize state persistence', {
|
|
1687
|
+
error: stateError instanceof Error ? stateError.message : String(stateError),
|
|
1688
|
+
});
|
|
1689
|
+
}
|
|
1690
|
+
}
|
|
1691
|
+
this.events.onAgentStart?.(agent);
|
|
1692
|
+
// Set up activity streaming if sessionId is provided
|
|
1693
|
+
const shouldStream = streamActivities ?? !!sessionId;
|
|
1694
|
+
let emitter = null;
|
|
1695
|
+
if (shouldStream && sessionId) {
|
|
1696
|
+
// Check if we should use API-based activity emitter (for remote workers)
|
|
1697
|
+
// This proxies activities through the agent app which has OAuth tokens
|
|
1698
|
+
if (this.config.apiActivityConfig) {
|
|
1699
|
+
const { baseUrl, apiKey, workerId } = this.config.apiActivityConfig;
|
|
1700
|
+
log.debug('Using API activity emitter', { baseUrl });
|
|
1701
|
+
emitter = createApiActivityEmitter({
|
|
1702
|
+
sessionId,
|
|
1703
|
+
workerId,
|
|
1704
|
+
apiBaseUrl: baseUrl,
|
|
1705
|
+
apiKey,
|
|
1706
|
+
minInterval: this.config.streamConfig.minInterval,
|
|
1707
|
+
maxOutputLength: this.config.streamConfig.maxOutputLength,
|
|
1708
|
+
includeTimestamps: this.config.streamConfig.includeTimestamps,
|
|
1709
|
+
onActivityEmitted: (type, content) => {
|
|
1710
|
+
log.activity(type, content);
|
|
1711
|
+
},
|
|
1712
|
+
onActivityError: (type, error) => {
|
|
1713
|
+
log.error(`Activity error (${type})`, { error: error.message });
|
|
1714
|
+
},
|
|
1715
|
+
});
|
|
1716
|
+
}
|
|
1717
|
+
else {
|
|
1718
|
+
// Direct Linear API - only works with OAuth tokens (not API keys)
|
|
1719
|
+
// This will fail for createAgentActivity calls but works for comments
|
|
1720
|
+
const session = createAgentSession({
|
|
1721
|
+
client: this.client.linearClient,
|
|
1722
|
+
issueId,
|
|
1723
|
+
sessionId,
|
|
1724
|
+
autoTransition: false, // Orchestrator handles transitions
|
|
1725
|
+
});
|
|
1726
|
+
this.agentSessions.set(issueId, session);
|
|
1727
|
+
// Create ActivityEmitter with rate limiting
|
|
1728
|
+
emitter = createActivityEmitter({
|
|
1729
|
+
session,
|
|
1730
|
+
minInterval: this.config.streamConfig.minInterval,
|
|
1731
|
+
maxOutputLength: this.config.streamConfig.maxOutputLength,
|
|
1732
|
+
includeTimestamps: this.config.streamConfig.includeTimestamps,
|
|
1733
|
+
onActivityEmitted: (type, content) => {
|
|
1734
|
+
log.activity(type, content);
|
|
1735
|
+
},
|
|
1736
|
+
});
|
|
1737
|
+
}
|
|
1738
|
+
this.activityEmitters.set(issueId, emitter);
|
|
1739
|
+
}
|
|
1740
|
+
// Create AbortController for cancellation
|
|
1741
|
+
const abortController = new AbortController();
|
|
1742
|
+
this.abortControllers.set(issueId, abortController);
|
|
1743
|
+
// Load environment from settings.local.json
|
|
1744
|
+
const envBaseDir = worktreePath ?? process.cwd();
|
|
1745
|
+
const settingsEnv = loadSettingsEnv(envBaseDir, log);
|
|
1746
|
+
// Load app-specific env files based on work type
|
|
1747
|
+
// Development work loads .env.local, QA/acceptance loads .env.test.local
|
|
1748
|
+
const appEnv = loadAppEnvFiles(envBaseDir, workType, log);
|
|
1749
|
+
// Build environment variables - inherit ALL from process.env (required for node to be found)
|
|
1750
|
+
// Then overlay app env vars, settings.local.json env vars, then our specific vars
|
|
1751
|
+
const processEnvFiltered = {};
|
|
1752
|
+
for (const [key, value] of Object.entries(process.env)) {
|
|
1753
|
+
if (typeof value === 'string' && !AGENT_ENV_BLOCKLIST.includes(key)) {
|
|
1754
|
+
processEnvFiltered[key] = value;
|
|
1755
|
+
}
|
|
1756
|
+
}
|
|
1757
|
+
const filteredAppEnv = Object.fromEntries(Object.entries(appEnv).filter(([key]) => !AGENT_ENV_BLOCKLIST.includes(key)));
|
|
1758
|
+
const filteredSettingsEnv = Object.fromEntries(Object.entries(settingsEnv).filter(([key]) => !AGENT_ENV_BLOCKLIST.includes(key)));
|
|
1759
|
+
const env = {
|
|
1760
|
+
...processEnvFiltered, // Include all parent env vars (PATH, NODE_PATH, etc.)
|
|
1761
|
+
...filteredAppEnv, // Include app env vars (blocklisted keys stripped)
|
|
1762
|
+
...filteredSettingsEnv, // Include settings.local.json env vars (blocklisted keys stripped)
|
|
1763
|
+
LINEAR_ISSUE_ID: issueId,
|
|
1764
|
+
// Disable user .npmrc to prevent picking up expired auth tokens from ~/.npmrc
|
|
1765
|
+
// Point to a non-existent file so npm/pnpm won't try to use stale credentials
|
|
1766
|
+
NPM_CONFIG_USERCONFIG: '/dev/null',
|
|
1767
|
+
npm_config_userconfig: '/dev/null',
|
|
1768
|
+
};
|
|
1769
|
+
if (sessionId) {
|
|
1770
|
+
env.LINEAR_SESSION_ID = sessionId;
|
|
1771
|
+
}
|
|
1772
|
+
// Set work type so agent knows what kind of work it's doing
|
|
1773
|
+
env.LINEAR_WORK_TYPE = workType;
|
|
1774
|
+
// Flag shared worktree for coordination mode so sub-agents know not to modify git state
|
|
1775
|
+
if (workType === 'coordination' || workType === 'qa-coordination' || workType === 'acceptance-coordination' || workType === 'refinement-coordination') {
|
|
1776
|
+
env.SHARED_WORKTREE = 'true';
|
|
1777
|
+
}
|
|
1778
|
+
// Set Claude Code Task List ID for intra-issue task coordination
|
|
1779
|
+
// This enables Tasks to persist across crashes and be shared between subagents
|
|
1780
|
+
// Format: {issueIdentifier}-{WORKTYPE} (e.g., "SUP-123-DEV")
|
|
1781
|
+
env.CLAUDE_CODE_TASK_LIST_ID = worktreeIdentifier ?? `${identifier}-${WORK_TYPE_SUFFIX[workType]}`;
|
|
1782
|
+
// Set team name so agents can use `pnpm af-linear create-issue` without --team
|
|
1783
|
+
if (teamName) {
|
|
1784
|
+
env.LINEAR_TEAM_NAME = teamName;
|
|
1785
|
+
}
|
|
1786
|
+
log.info('Starting agent via provider', { provider: this.provider.name, cwd: worktreePath ?? 'repo-root', workType, promptPreview: prompt.substring(0, 50) });
|
|
1787
|
+
// Create in-process tool servers from registered plugins
|
|
1788
|
+
const mcpServers = this.provider.name === 'claude'
|
|
1789
|
+
? this.toolRegistry.createServers({ env, cwd: worktreePath ?? process.cwd() })
|
|
1790
|
+
: undefined;
|
|
1791
|
+
// Coordinators need significantly more turns than standard agents
|
|
1792
|
+
// since they spawn sub-agents and poll their status repeatedly.
|
|
1793
|
+
// Inflight also gets the bump — it may be resuming coordination work.
|
|
1794
|
+
const needsMoreTurns = workType === 'coordination' || workType === 'qa-coordination' || workType === 'acceptance-coordination' || workType === 'refinement-coordination' || workType === 'inflight';
|
|
1795
|
+
const maxTurns = needsMoreTurns ? 200 : undefined;
|
|
1796
|
+
// Spawn agent via provider interface
|
|
1797
|
+
const spawnConfig = {
|
|
1798
|
+
prompt,
|
|
1799
|
+
cwd: worktreePath ?? process.cwd(),
|
|
1800
|
+
env,
|
|
1801
|
+
abortController,
|
|
1802
|
+
autonomous: true,
|
|
1803
|
+
sandboxEnabled: this.config.sandboxEnabled,
|
|
1804
|
+
mcpServers,
|
|
1805
|
+
maxTurns,
|
|
1806
|
+
onProcessSpawned: (pid) => {
|
|
1807
|
+
agent.pid = pid;
|
|
1808
|
+
log.info('Agent process spawned', { pid });
|
|
1809
|
+
},
|
|
1810
|
+
};
|
|
1811
|
+
const handle = this.provider.spawn(spawnConfig);
|
|
1812
|
+
this.agentHandles.set(issueId, handle);
|
|
1813
|
+
agent.status = 'running';
|
|
1814
|
+
// Process the event stream in the background
|
|
1815
|
+
this.processEventStream(issueId, identifier, sessionId, handle, emitter, agent);
|
|
1816
|
+
return agent;
|
|
1817
|
+
}
|
|
1818
|
+
/**
|
|
1819
|
+
* Process the provider event stream and emit activities
|
|
1820
|
+
*/
|
|
1821
|
+
async processEventStream(issueId, identifier, sessionId, handle, emitter, agent) {
|
|
1822
|
+
const log = this.agentLoggers.get(issueId);
|
|
1823
|
+
// Accumulate all assistant text for WORK_RESULT marker fallback scanning.
|
|
1824
|
+
// The provider's result message only contains the final turn's text, but
|
|
1825
|
+
// the agent may have emitted the marker in an earlier turn.
|
|
1826
|
+
const assistantTextChunks = [];
|
|
1827
|
+
try {
|
|
1828
|
+
for await (const event of handle.stream) {
|
|
1829
|
+
if (event.type === 'assistant_text') {
|
|
1830
|
+
assistantTextChunks.push(event.text);
|
|
1831
|
+
}
|
|
1832
|
+
// Also capture tool call inputs that may contain WORK_RESULT markers.
|
|
1833
|
+
// Agents sometimes embed the marker inside a create-comment body rather
|
|
1834
|
+
// than in their direct text output.
|
|
1835
|
+
if (event.type === 'tool_use' && event.input) {
|
|
1836
|
+
const inputStr = typeof event.input === 'string' ? event.input : JSON.stringify(event.input);
|
|
1837
|
+
if (inputStr.includes('WORK_RESULT')) {
|
|
1838
|
+
assistantTextChunks.push(inputStr);
|
|
1839
|
+
}
|
|
1840
|
+
}
|
|
1841
|
+
await this.handleAgentEvent(issueId, sessionId, event, emitter, agent, handle);
|
|
1842
|
+
}
|
|
1843
|
+
// Query completed successfully — preserve 'failed' status set by error results
|
|
1844
|
+
// (e.g., error_max_turns, error_during_execution) so auto-transition doesn't
|
|
1845
|
+
// fire with an empty resultMessage
|
|
1846
|
+
if (agent.status !== 'stopped' && agent.status !== 'failed') {
|
|
1847
|
+
agent.status = 'completed';
|
|
1848
|
+
}
|
|
1849
|
+
agent.completedAt = new Date();
|
|
1850
|
+
// Update state file to completed (only for worktree-based agents)
|
|
1851
|
+
if (agent.worktreePath) {
|
|
1852
|
+
try {
|
|
1853
|
+
updateState(agent.worktreePath, {
|
|
1854
|
+
status: agent.status === 'stopped' ? 'stopped' : agent.status === 'failed' ? 'failed' : 'completed',
|
|
1855
|
+
pullRequestUrl: agent.pullRequestUrl ?? undefined,
|
|
1856
|
+
});
|
|
1857
|
+
}
|
|
1858
|
+
catch {
|
|
1859
|
+
// Ignore state update errors
|
|
1860
|
+
}
|
|
1861
|
+
}
|
|
1862
|
+
// Emit a final response activity to close the Linear agent session.
|
|
1863
|
+
// Linear auto-transitions sessions to "complete" when a response activity is emitted.
|
|
1864
|
+
if (emitter && (agent.status === 'completed' || agent.status === 'failed')) {
|
|
1865
|
+
try {
|
|
1866
|
+
if (agent.status === 'completed') {
|
|
1867
|
+
const summary = agent.resultMessage
|
|
1868
|
+
? agent.resultMessage.substring(0, 500)
|
|
1869
|
+
: 'Work completed successfully.';
|
|
1870
|
+
await emitter.emitResponse(summary);
|
|
1871
|
+
}
|
|
1872
|
+
else {
|
|
1873
|
+
await emitter.emitResponse(agent.resultMessage || 'Agent encountered an error during execution.');
|
|
1874
|
+
}
|
|
1875
|
+
}
|
|
1876
|
+
catch (emitError) {
|
|
1877
|
+
log?.warn('Failed to emit completion response activity', {
|
|
1878
|
+
error: emitError instanceof Error ? emitError.message : String(emitError),
|
|
1879
|
+
});
|
|
1880
|
+
}
|
|
1881
|
+
}
|
|
1882
|
+
// Flush remaining activities
|
|
1883
|
+
if (emitter) {
|
|
1884
|
+
await emitter.flush();
|
|
1885
|
+
}
|
|
1886
|
+
// Update Linear status based on work type if auto-transition is enabled
|
|
1887
|
+
if (agent.status === 'completed' && this.config.autoTransition) {
|
|
1888
|
+
const workType = agent.workType ?? 'development';
|
|
1889
|
+
const isResultSensitive = workType === 'qa' || workType === 'acceptance' || workType === 'coordination' || workType === 'qa-coordination' || workType === 'acceptance-coordination';
|
|
1890
|
+
let targetStatus = null;
|
|
1891
|
+
if (isResultSensitive) {
|
|
1892
|
+
// For QA/acceptance: parse result to decide promote vs reject.
|
|
1893
|
+
// Try the final result message first, then fall back to scanning
|
|
1894
|
+
// all accumulated assistant text (the marker may be in an earlier turn).
|
|
1895
|
+
let workResult = parseWorkResult(agent.resultMessage, workType);
|
|
1896
|
+
if (workResult === 'unknown' && assistantTextChunks.length > 0) {
|
|
1897
|
+
const fullText = assistantTextChunks.join('\n');
|
|
1898
|
+
workResult = parseWorkResult(fullText, workType);
|
|
1899
|
+
if (workResult !== 'unknown') {
|
|
1900
|
+
log?.info('Work result found in accumulated text (not in final message)', { workResult });
|
|
1901
|
+
}
|
|
1902
|
+
}
|
|
1903
|
+
agent.workResult = workResult;
|
|
1904
|
+
if (workResult === 'passed') {
|
|
1905
|
+
targetStatus = WORK_TYPE_COMPLETE_STATUS[workType];
|
|
1906
|
+
log?.info('Work result: passed, promoting', { workType, targetStatus });
|
|
1907
|
+
}
|
|
1908
|
+
else if (workResult === 'failed') {
|
|
1909
|
+
targetStatus = WORK_TYPE_FAIL_STATUS[workType];
|
|
1910
|
+
log?.info('Work result: failed, transitioning to fail status', { workType, targetStatus });
|
|
1911
|
+
}
|
|
1912
|
+
else {
|
|
1913
|
+
// unknown — safe default: don't transition
|
|
1914
|
+
log?.warn('Work result: unknown, skipping auto-transition', {
|
|
1915
|
+
workType,
|
|
1916
|
+
hasResultMessage: !!agent.resultMessage,
|
|
1917
|
+
});
|
|
1918
|
+
// Post a diagnostic comment so the issue doesn't silently stall
|
|
1919
|
+
try {
|
|
1920
|
+
await this.client.createComment(issueId, `⚠️ Agent completed but no structured result marker was detected in the output.\n\n` +
|
|
1921
|
+
`**Issue status was NOT updated automatically.**\n\n` +
|
|
1922
|
+
`The orchestrator expected one of:\n` +
|
|
1923
|
+
`- \`<!-- WORK_RESULT:passed -->\` to promote the issue\n` +
|
|
1924
|
+
`- \`<!-- WORK_RESULT:failed -->\` to record a failure\n\n` +
|
|
1925
|
+
`This usually means the agent exited early (timeout, error, or missing logic). ` +
|
|
1926
|
+
`Check the agent logs for details, then manually update the issue status or re-trigger the agent.`);
|
|
1927
|
+
log?.info('Posted diagnostic comment for unknown work result');
|
|
1928
|
+
}
|
|
1929
|
+
catch (error) {
|
|
1930
|
+
log?.warn('Failed to post diagnostic comment for unknown work result', {
|
|
1931
|
+
error: error instanceof Error ? error.message : String(error),
|
|
1932
|
+
});
|
|
1933
|
+
}
|
|
1934
|
+
}
|
|
1935
|
+
}
|
|
1936
|
+
else {
|
|
1937
|
+
// Non-QA/acceptance: unchanged behavior — always promote on completion
|
|
1938
|
+
targetStatus = WORK_TYPE_COMPLETE_STATUS[workType];
|
|
1939
|
+
}
|
|
1940
|
+
if (targetStatus) {
|
|
1941
|
+
try {
|
|
1942
|
+
await this.client.updateIssueStatus(issueId, targetStatus);
|
|
1943
|
+
log?.info('Issue status updated', { from: workType, to: targetStatus });
|
|
1944
|
+
}
|
|
1945
|
+
catch (error) {
|
|
1946
|
+
log?.error('Failed to update status', {
|
|
1947
|
+
targetStatus,
|
|
1948
|
+
error: error instanceof Error ? error.message : String(error),
|
|
1949
|
+
});
|
|
1950
|
+
}
|
|
1951
|
+
}
|
|
1952
|
+
else if (!isResultSensitive) {
|
|
1953
|
+
log?.info('No auto-transition configured for work type', { workType });
|
|
1954
|
+
}
|
|
1955
|
+
// Unassign agent from issue for clean handoff visibility
|
|
1956
|
+
// This enables automated QA pickup via webhook
|
|
1957
|
+
// Skip unassignment for research work (user should decide when to move to backlog)
|
|
1958
|
+
if (workType !== 'research') {
|
|
1959
|
+
try {
|
|
1960
|
+
await this.client.unassignIssue(issueId);
|
|
1961
|
+
log?.info('Agent unassigned from issue');
|
|
1962
|
+
}
|
|
1963
|
+
catch (error) {
|
|
1964
|
+
log?.warn('Failed to unassign agent from issue', {
|
|
1965
|
+
error: error instanceof Error ? error.message : String(error),
|
|
1966
|
+
});
|
|
1967
|
+
}
|
|
1968
|
+
}
|
|
1969
|
+
}
|
|
1970
|
+
// Post completion comment with full result (not truncated)
|
|
1971
|
+
// This uses multi-comment splitting for long messages
|
|
1972
|
+
if (agent.status === 'completed' && agent.resultMessage) {
|
|
1973
|
+
await this.postCompletionComment(issueId, sessionId, agent.resultMessage, log);
|
|
1974
|
+
}
|
|
1975
|
+
// Clean up worktree for completed agents
|
|
1976
|
+
// NOTE: This must happen AFTER the agent exits to avoid breaking its shell session
|
|
1977
|
+
// Agents should NEVER clean up their own worktree - this is the orchestrator's job
|
|
1978
|
+
if (agent.status === 'completed' && agent.worktreePath) {
|
|
1979
|
+
const shouldPreserve = this.config.preserveWorkOnPrFailure ?? DEFAULT_CONFIG.preserveWorkOnPrFailure;
|
|
1980
|
+
let shouldCleanup = true;
|
|
1981
|
+
// Validate that PR was created or work was fully pushed before cleanup
|
|
1982
|
+
if (shouldPreserve) {
|
|
1983
|
+
if (!agent.pullRequestUrl) {
|
|
1984
|
+
// No PR detected - check for uncommitted/unpushed work
|
|
1985
|
+
const incompleteCheck = checkForIncompleteWork(agent.worktreePath);
|
|
1986
|
+
if (incompleteCheck.hasIncompleteWork) {
|
|
1987
|
+
// Mark as incomplete and preserve worktree
|
|
1988
|
+
agent.status = 'incomplete';
|
|
1989
|
+
agent.incompleteReason = incompleteCheck.reason;
|
|
1990
|
+
shouldCleanup = false;
|
|
1991
|
+
log?.warn('Work incomplete - preserving worktree', {
|
|
1992
|
+
reason: incompleteCheck.reason,
|
|
1993
|
+
details: incompleteCheck.details,
|
|
1994
|
+
worktreePath: agent.worktreePath,
|
|
1995
|
+
});
|
|
1996
|
+
// Delete the heartbeat file so the preserved worktree isn't falsely
|
|
1997
|
+
// detected as having a live agent (which would block branch reuse)
|
|
1998
|
+
try {
|
|
1999
|
+
const heartbeatPath = resolve(agent.worktreePath, '.agent', 'heartbeat.json');
|
|
2000
|
+
if (existsSync(heartbeatPath)) {
|
|
2001
|
+
unlinkSync(heartbeatPath);
|
|
2002
|
+
}
|
|
2003
|
+
}
|
|
2004
|
+
catch {
|
|
2005
|
+
// Best-effort - heartbeat will go stale naturally after timeout
|
|
2006
|
+
}
|
|
2007
|
+
}
|
|
2008
|
+
else {
|
|
2009
|
+
// No PR but also no local changes - agent may not have made any changes
|
|
2010
|
+
log?.warn('No PR created but worktree is clean - proceeding with cleanup', {
|
|
2011
|
+
worktreePath: agent.worktreePath,
|
|
2012
|
+
});
|
|
2013
|
+
}
|
|
2014
|
+
}
|
|
2015
|
+
}
|
|
2016
|
+
if (shouldCleanup && agent.worktreeIdentifier) {
|
|
2017
|
+
try {
|
|
2018
|
+
this.removeWorktree(agent.worktreeIdentifier);
|
|
2019
|
+
log?.info('Worktree cleaned up', { worktreePath: agent.worktreePath });
|
|
2020
|
+
}
|
|
2021
|
+
catch (error) {
|
|
2022
|
+
log?.warn('Failed to clean up worktree', {
|
|
2023
|
+
worktreePath: agent.worktreePath,
|
|
2024
|
+
error: error instanceof Error ? error.message : String(error),
|
|
2025
|
+
});
|
|
2026
|
+
}
|
|
2027
|
+
}
|
|
2028
|
+
}
|
|
2029
|
+
// Finalize session logger before cleanup
|
|
2030
|
+
const finalStatus = agent.status === 'completed' ? 'completed' : (agent.status === 'stopped' ? 'stopped' : 'completed');
|
|
2031
|
+
this.finalizeSessionLogger(issueId, finalStatus, {
|
|
2032
|
+
pullRequestUrl: agent.pullRequestUrl,
|
|
2033
|
+
});
|
|
2034
|
+
// Clean up in-memory resources
|
|
2035
|
+
this.cleanupAgent(issueId, sessionId);
|
|
2036
|
+
if (agent.status === 'completed') {
|
|
2037
|
+
this.events.onAgentComplete?.(agent);
|
|
2038
|
+
}
|
|
2039
|
+
else if (agent.status === 'incomplete') {
|
|
2040
|
+
this.events.onAgentIncomplete?.(agent);
|
|
2041
|
+
}
|
|
2042
|
+
else if (agent.status === 'stopped') {
|
|
2043
|
+
this.events.onAgentStopped?.(agent);
|
|
2044
|
+
}
|
|
2045
|
+
}
|
|
2046
|
+
catch (error) {
|
|
2047
|
+
// Handle abort/cancellation
|
|
2048
|
+
if (error instanceof Error && error.name === 'AbortError') {
|
|
2049
|
+
agent.status = 'stopped';
|
|
2050
|
+
agent.completedAt = new Date();
|
|
2051
|
+
this.finalizeSessionLogger(issueId, 'stopped');
|
|
2052
|
+
this.cleanupAgent(issueId, sessionId);
|
|
2053
|
+
this.events.onAgentStopped?.(agent);
|
|
2054
|
+
return;
|
|
2055
|
+
}
|
|
2056
|
+
// Handle other errors
|
|
2057
|
+
log?.error('Agent error', { error: error instanceof Error ? error.message : String(error) });
|
|
2058
|
+
agent.status = 'failed';
|
|
2059
|
+
agent.completedAt = new Date();
|
|
2060
|
+
agent.error = error instanceof Error ? error : new Error(String(error));
|
|
2061
|
+
// Flush remaining activities
|
|
2062
|
+
if (emitter) {
|
|
2063
|
+
await emitter.flush();
|
|
2064
|
+
}
|
|
2065
|
+
// Clean up worktree for failed agents (but preserve if there's work)
|
|
2066
|
+
if (agent.worktreePath) {
|
|
2067
|
+
const shouldPreserve = this.config.preserveWorkOnPrFailure ?? DEFAULT_CONFIG.preserveWorkOnPrFailure;
|
|
2068
|
+
let shouldCleanup = true;
|
|
2069
|
+
// Check for any uncommitted/unpushed work before cleaning up
|
|
2070
|
+
if (shouldPreserve) {
|
|
2071
|
+
const incompleteCheck = checkForIncompleteWork(agent.worktreePath);
|
|
2072
|
+
if (incompleteCheck.hasIncompleteWork) {
|
|
2073
|
+
// Preserve worktree - there's work that could be recovered
|
|
2074
|
+
shouldCleanup = false;
|
|
2075
|
+
agent.incompleteReason = incompleteCheck.reason;
|
|
2076
|
+
log?.warn('Agent failed but has uncommitted work - preserving worktree', {
|
|
2077
|
+
reason: incompleteCheck.reason,
|
|
2078
|
+
details: incompleteCheck.details,
|
|
2079
|
+
worktreePath: agent.worktreePath,
|
|
2080
|
+
});
|
|
2081
|
+
// Delete the heartbeat file so the preserved worktree isn't falsely
|
|
2082
|
+
// detected as having a live agent (which would block branch reuse)
|
|
2083
|
+
try {
|
|
2084
|
+
const heartbeatPath = resolve(agent.worktreePath, '.agent', 'heartbeat.json');
|
|
2085
|
+
if (existsSync(heartbeatPath)) {
|
|
2086
|
+
unlinkSync(heartbeatPath);
|
|
2087
|
+
}
|
|
2088
|
+
}
|
|
2089
|
+
catch {
|
|
2090
|
+
// Best-effort - heartbeat will go stale naturally after timeout
|
|
2091
|
+
}
|
|
2092
|
+
}
|
|
2093
|
+
}
|
|
2094
|
+
if (shouldCleanup && agent.worktreeIdentifier) {
|
|
2095
|
+
try {
|
|
2096
|
+
this.removeWorktree(agent.worktreeIdentifier);
|
|
2097
|
+
log?.info('Worktree cleaned up after failure', { worktreePath: agent.worktreePath });
|
|
2098
|
+
}
|
|
2099
|
+
catch (cleanupError) {
|
|
2100
|
+
log?.warn('Failed to clean up worktree after failure', {
|
|
2101
|
+
worktreePath: agent.worktreePath,
|
|
2102
|
+
error: cleanupError instanceof Error ? cleanupError.message : String(cleanupError),
|
|
2103
|
+
});
|
|
2104
|
+
}
|
|
2105
|
+
}
|
|
2106
|
+
}
|
|
2107
|
+
// Finalize session logger with error
|
|
2108
|
+
this.finalizeSessionLogger(issueId, 'failed', {
|
|
2109
|
+
errorMessage: agent.error?.message,
|
|
2110
|
+
});
|
|
2111
|
+
this.cleanupAgent(issueId, sessionId);
|
|
2112
|
+
this.events.onAgentError?.(agent, agent.error);
|
|
2113
|
+
}
|
|
2114
|
+
}
|
|
2115
|
+
/**
|
|
2116
|
+
* Handle a single normalized agent event from any provider
|
|
2117
|
+
*/
|
|
2118
|
+
async handleAgentEvent(issueId, sessionId, event, emitter, agent, handle) {
|
|
2119
|
+
const log = this.agentLoggers.get(issueId);
|
|
2120
|
+
// Get heartbeat writer and progress logger for state updates
|
|
2121
|
+
const heartbeatWriter = this.heartbeatWriters.get(issueId);
|
|
2122
|
+
const progressLogger = this.progressLoggers.get(issueId);
|
|
2123
|
+
const sessionLogger = this.sessionLoggers.get(issueId);
|
|
2124
|
+
switch (event.type) {
|
|
2125
|
+
case 'init':
|
|
2126
|
+
log?.success('Agent initialized', { session: event.sessionId.substring(0, 12) });
|
|
2127
|
+
agent.providerSessionId = event.sessionId;
|
|
2128
|
+
this.updateLastActivity(issueId, 'init');
|
|
2129
|
+
// Update state with provider session ID (only for worktree-based agents)
|
|
2130
|
+
if (agent.worktreePath) {
|
|
2131
|
+
try {
|
|
2132
|
+
updateState(agent.worktreePath, {
|
|
2133
|
+
providerSessionId: event.sessionId,
|
|
2134
|
+
status: 'running',
|
|
2135
|
+
pid: agent.pid ?? null,
|
|
2136
|
+
});
|
|
2137
|
+
}
|
|
2138
|
+
catch {
|
|
2139
|
+
// Ignore state update errors
|
|
2140
|
+
}
|
|
2141
|
+
}
|
|
2142
|
+
// Notify via callback for external persistence
|
|
2143
|
+
if (sessionId) {
|
|
2144
|
+
await this.events.onProviderSessionId?.(sessionId, event.sessionId);
|
|
2145
|
+
}
|
|
2146
|
+
break;
|
|
2147
|
+
case 'system':
|
|
2148
|
+
// System-level events (status changes, compaction, auth, etc.)
|
|
2149
|
+
if (event.subtype === 'status') {
|
|
2150
|
+
log?.debug('Status change', { status: event.message });
|
|
2151
|
+
}
|
|
2152
|
+
else if (event.subtype === 'compact_boundary') {
|
|
2153
|
+
log?.debug('Context compacted');
|
|
2154
|
+
}
|
|
2155
|
+
else if (event.subtype === 'hook_response') {
|
|
2156
|
+
// Provider-specific hook handling — access raw event for details
|
|
2157
|
+
const raw = event.raw;
|
|
2158
|
+
if (raw.exit_code !== undefined && raw.exit_code !== 0) {
|
|
2159
|
+
log?.warn('Hook failed', { hook: raw.hook_name, exitCode: raw.exit_code });
|
|
2160
|
+
}
|
|
2161
|
+
}
|
|
2162
|
+
else if (event.subtype === 'auth_status') {
|
|
2163
|
+
if (event.message?.includes('error') || event.message?.includes('Error')) {
|
|
2164
|
+
log?.error('Auth error', { error: event.message });
|
|
2165
|
+
}
|
|
2166
|
+
}
|
|
2167
|
+
else {
|
|
2168
|
+
log?.debug('System event', { subtype: event.subtype, message: event.message });
|
|
2169
|
+
}
|
|
2170
|
+
break;
|
|
2171
|
+
case 'tool_result':
|
|
2172
|
+
// Tool results — track activity and detect PR URLs
|
|
2173
|
+
this.updateLastActivity(issueId, 'tool_result');
|
|
2174
|
+
sessionLogger?.logToolResult(event.toolUseId ?? 'unknown', event.content, event.isError);
|
|
2175
|
+
// Detect GitHub PR URLs in tool output (from gh pr create)
|
|
2176
|
+
if (sessionId) {
|
|
2177
|
+
const prUrl = this.extractPullRequestUrl(event.content);
|
|
2178
|
+
if (prUrl) {
|
|
2179
|
+
log?.info('Pull request detected', { prUrl });
|
|
2180
|
+
agent.pullRequestUrl = prUrl;
|
|
2181
|
+
await this.updateSessionPullRequest(sessionId, prUrl, agent);
|
|
2182
|
+
}
|
|
2183
|
+
}
|
|
2184
|
+
break;
|
|
2185
|
+
case 'assistant_text':
|
|
2186
|
+
// Assistant text output
|
|
2187
|
+
this.updateLastActivity(issueId, 'assistant');
|
|
2188
|
+
heartbeatWriter?.recordThinking();
|
|
2189
|
+
sessionLogger?.logAssistant(event.text);
|
|
2190
|
+
if (emitter) {
|
|
2191
|
+
await emitter.emitThought(event.text.substring(0, 200));
|
|
2192
|
+
}
|
|
2193
|
+
break;
|
|
2194
|
+
case 'tool_use':
|
|
2195
|
+
// Tool invocation
|
|
2196
|
+
this.updateLastActivity(issueId, 'assistant');
|
|
2197
|
+
log?.toolCall(event.toolName, event.input);
|
|
2198
|
+
heartbeatWriter?.recordToolCall(event.toolName);
|
|
2199
|
+
progressLogger?.logTool(event.toolName, event.input);
|
|
2200
|
+
sessionLogger?.logToolUse(event.toolName, event.input);
|
|
2201
|
+
// Intercept TodoWrite tool calls to persist todos
|
|
2202
|
+
if (event.toolName === 'TodoWrite') {
|
|
2203
|
+
try {
|
|
2204
|
+
const input = event.input;
|
|
2205
|
+
if (input.todos && Array.isArray(input.todos) && agent.worktreePath) {
|
|
2206
|
+
const todosState = {
|
|
2207
|
+
updatedAt: Date.now(),
|
|
2208
|
+
items: input.todos,
|
|
2209
|
+
};
|
|
2210
|
+
writeTodos(agent.worktreePath, todosState);
|
|
2211
|
+
log?.debug('Todos persisted', { count: input.todos.length });
|
|
2212
|
+
}
|
|
2213
|
+
}
|
|
2214
|
+
catch {
|
|
2215
|
+
// Ignore todos persistence errors
|
|
2216
|
+
}
|
|
2217
|
+
}
|
|
2218
|
+
if (emitter) {
|
|
2219
|
+
await emitter.emitToolUse(event.toolName, event.input);
|
|
2220
|
+
}
|
|
2221
|
+
break;
|
|
2222
|
+
case 'tool_progress':
|
|
2223
|
+
// Tool execution progress — track activity for long-running tools
|
|
2224
|
+
this.updateLastActivity(issueId, `tool_progress:${event.toolName}`);
|
|
2225
|
+
log?.debug('Tool progress', { tool: event.toolName, elapsed: `${event.elapsedSeconds}s` });
|
|
2226
|
+
break;
|
|
2227
|
+
case 'result':
|
|
2228
|
+
if (event.success) {
|
|
2229
|
+
log?.success('Agent completed', {
|
|
2230
|
+
cost: event.cost?.totalCostUsd ? `$${event.cost.totalCostUsd.toFixed(4)}` : 'N/A',
|
|
2231
|
+
turns: event.cost?.numTurns,
|
|
2232
|
+
});
|
|
2233
|
+
// Track cost data on the agent
|
|
2234
|
+
if (event.cost) {
|
|
2235
|
+
agent.totalCostUsd = event.cost.totalCostUsd;
|
|
2236
|
+
agent.inputTokens = event.cost.inputTokens;
|
|
2237
|
+
agent.outputTokens = event.cost.outputTokens;
|
|
2238
|
+
}
|
|
2239
|
+
// Store full result for completion comment posting later
|
|
2240
|
+
if (event.message) {
|
|
2241
|
+
agent.resultMessage = event.message;
|
|
2242
|
+
}
|
|
2243
|
+
// Update state to completing/completed (only for worktree-based agents)
|
|
2244
|
+
if (agent.worktreePath) {
|
|
2245
|
+
try {
|
|
2246
|
+
updateState(agent.worktreePath, {
|
|
2247
|
+
status: 'completing',
|
|
2248
|
+
currentPhase: 'Finalizing work',
|
|
2249
|
+
});
|
|
2250
|
+
}
|
|
2251
|
+
catch {
|
|
2252
|
+
// Ignore state update errors
|
|
2253
|
+
}
|
|
2254
|
+
}
|
|
2255
|
+
progressLogger?.logComplete({ message: event.message?.substring(0, 200) });
|
|
2256
|
+
// Check cost limit
|
|
2257
|
+
const maxCostUsd = parseFloat(process.env.AGENT_MAX_COST_USD ?? '0');
|
|
2258
|
+
if (maxCostUsd > 0 && event.cost?.totalCostUsd && event.cost.totalCostUsd > maxCostUsd) {
|
|
2259
|
+
log?.warn('Agent exceeded cost limit', {
|
|
2260
|
+
totalCost: event.cost.totalCostUsd,
|
|
2261
|
+
limit: maxCostUsd,
|
|
2262
|
+
});
|
|
2263
|
+
}
|
|
2264
|
+
// Emit truncated preview to activity feed (ephemeral)
|
|
2265
|
+
if (emitter && event.message) {
|
|
2266
|
+
await emitter.emitThought(`Completed: ${event.message.substring(0, 200)}...`, true);
|
|
2267
|
+
}
|
|
2268
|
+
}
|
|
2269
|
+
else {
|
|
2270
|
+
// Error result — mark agent as failed so auto-transition doesn't fire
|
|
2271
|
+
// with an empty resultMessage (which would always produce 'unknown')
|
|
2272
|
+
agent.status = 'failed';
|
|
2273
|
+
log?.error('Agent error result', { subtype: event.errorSubtype });
|
|
2274
|
+
// Update state to failed
|
|
2275
|
+
const errorMessage = event.errors && event.errors.length > 0
|
|
2276
|
+
? event.errors[0]
|
|
2277
|
+
: `Agent error: ${event.errorSubtype}`;
|
|
2278
|
+
if (agent.worktreePath) {
|
|
2279
|
+
try {
|
|
2280
|
+
updateState(agent.worktreePath, {
|
|
2281
|
+
status: 'failed',
|
|
2282
|
+
errorMessage,
|
|
2283
|
+
});
|
|
2284
|
+
}
|
|
2285
|
+
catch {
|
|
2286
|
+
// Ignore state update errors
|
|
2287
|
+
}
|
|
2288
|
+
}
|
|
2289
|
+
progressLogger?.logError('Agent error result', new Error(errorMessage));
|
|
2290
|
+
sessionLogger?.logError('Agent error result', new Error(errorMessage), { subtype: event.errorSubtype });
|
|
2291
|
+
// Report tool errors as Linear issues for tracking
|
|
2292
|
+
// Only report for 'error_during_execution' subtype (tool/execution errors)
|
|
2293
|
+
if (event.errorSubtype === 'error_during_execution' &&
|
|
2294
|
+
event.errors &&
|
|
2295
|
+
emitter) {
|
|
2296
|
+
for (const err of event.errors) {
|
|
2297
|
+
log?.error('Error detail', { error: err });
|
|
2298
|
+
if (isToolRelatedError(err)) {
|
|
2299
|
+
const toolName = extractToolNameFromError(err);
|
|
2300
|
+
try {
|
|
2301
|
+
const issue = await emitter.reportToolError(toolName, err, {
|
|
2302
|
+
issueIdentifier: agent.identifier,
|
|
2303
|
+
additionalContext: {
|
|
2304
|
+
agentStatus: agent.status,
|
|
2305
|
+
workType: agent.workType,
|
|
2306
|
+
subtype: event.errorSubtype,
|
|
2307
|
+
},
|
|
2308
|
+
});
|
|
2309
|
+
if (issue) {
|
|
2310
|
+
log?.info('Tool error reported to Linear', {
|
|
2311
|
+
issue: issue.identifier,
|
|
2312
|
+
toolName,
|
|
2313
|
+
});
|
|
2314
|
+
}
|
|
2315
|
+
}
|
|
2316
|
+
catch (reportError) {
|
|
2317
|
+
log?.warn('Failed to report tool error', {
|
|
2318
|
+
error: reportError instanceof Error
|
|
2319
|
+
? reportError.message
|
|
2320
|
+
: String(reportError),
|
|
2321
|
+
});
|
|
2322
|
+
}
|
|
2323
|
+
}
|
|
2324
|
+
}
|
|
2325
|
+
}
|
|
2326
|
+
else if (event.errors) {
|
|
2327
|
+
for (const err of event.errors) {
|
|
2328
|
+
log?.error('Error detail', { error: err });
|
|
2329
|
+
}
|
|
2330
|
+
}
|
|
2331
|
+
}
|
|
2332
|
+
break;
|
|
2333
|
+
case 'error':
|
|
2334
|
+
log?.error('Agent error', { message: event.message, code: event.code });
|
|
2335
|
+
break;
|
|
2336
|
+
default:
|
|
2337
|
+
log?.debug('Unhandled event type', { type: event.type });
|
|
2338
|
+
}
|
|
2339
|
+
}
|
|
2340
|
+
/**
|
|
2341
|
+
* Extract GitHub PR URL from text (typically from gh pr create output)
|
|
2342
|
+
*/
|
|
2343
|
+
extractPullRequestUrl(text) {
|
|
2344
|
+
// GitHub PR URL pattern: https://github.com/owner/repo/pull/123
|
|
2345
|
+
const prUrlPattern = /https:\/\/github\.com\/[^/]+\/[^/]+\/pull\/\d+/g;
|
|
2346
|
+
const matches = text.match(prUrlPattern);
|
|
2347
|
+
return matches ? matches[0] : null;
|
|
2348
|
+
}
|
|
2349
|
+
/**
|
|
2350
|
+
* Update the Linear session with the PR URL
|
|
2351
|
+
*/
|
|
2352
|
+
async updateSessionPullRequest(sessionId, prUrl, agent) {
|
|
2353
|
+
const log = this.agentLoggers.get(agent.issueId);
|
|
2354
|
+
// If using API activity config, call the API endpoint
|
|
2355
|
+
if (this.config.apiActivityConfig) {
|
|
2356
|
+
const { baseUrl, apiKey } = this.config.apiActivityConfig;
|
|
2357
|
+
try {
|
|
2358
|
+
const response = await fetch(`${baseUrl}/api/sessions/${sessionId}/external-urls`, {
|
|
2359
|
+
method: 'POST',
|
|
2360
|
+
headers: {
|
|
2361
|
+
'Content-Type': 'application/json',
|
|
2362
|
+
Authorization: `Bearer ${apiKey}`,
|
|
2363
|
+
},
|
|
2364
|
+
body: JSON.stringify({
|
|
2365
|
+
externalUrls: [{ label: 'Pull Request', url: prUrl }],
|
|
2366
|
+
}),
|
|
2367
|
+
});
|
|
2368
|
+
if (!response.ok) {
|
|
2369
|
+
const error = await response.text();
|
|
2370
|
+
log?.warn('Failed to update session PR URL via API', { status: response.status, error });
|
|
2371
|
+
}
|
|
2372
|
+
else {
|
|
2373
|
+
log?.info('Session PR URL updated via API');
|
|
2374
|
+
}
|
|
2375
|
+
}
|
|
2376
|
+
catch (error) {
|
|
2377
|
+
log?.warn('Failed to update session PR URL via API', {
|
|
2378
|
+
error: error instanceof Error ? error.message : String(error),
|
|
2379
|
+
});
|
|
2380
|
+
}
|
|
2381
|
+
}
|
|
2382
|
+
else {
|
|
2383
|
+
// Direct Linear API - use AgentSession if available
|
|
2384
|
+
const session = this.agentSessions.get(agent.issueId);
|
|
2385
|
+
if (session) {
|
|
2386
|
+
try {
|
|
2387
|
+
await session.setPullRequestUrl(prUrl);
|
|
2388
|
+
log?.info('Session PR URL updated via Linear API');
|
|
2389
|
+
}
|
|
2390
|
+
catch (error) {
|
|
2391
|
+
log?.warn('Failed to update session PR URL via Linear API', {
|
|
2392
|
+
error: error instanceof Error ? error.message : String(error),
|
|
2393
|
+
});
|
|
2394
|
+
}
|
|
2395
|
+
}
|
|
2396
|
+
}
|
|
2397
|
+
}
|
|
2398
|
+
/**
|
|
2399
|
+
* Post completion comment with full result message
|
|
2400
|
+
* Uses multi-comment splitting for long messages (up to 10 comments, 10k chars each)
|
|
2401
|
+
*/
|
|
2402
|
+
async postCompletionComment(issueId, sessionId, resultMessage, log) {
|
|
2403
|
+
// Build completion comments with multi-part splitting
|
|
2404
|
+
const comments = buildCompletionComments(resultMessage, [], // No plan items to include (already shown via activities)
|
|
2405
|
+
sessionId ?? null);
|
|
2406
|
+
log?.info('Posting completion comment', {
|
|
2407
|
+
parts: comments.length,
|
|
2408
|
+
totalLength: resultMessage.length,
|
|
2409
|
+
});
|
|
2410
|
+
// If using API activity config, call the API endpoint
|
|
2411
|
+
if (this.config.apiActivityConfig) {
|
|
2412
|
+
const { baseUrl, apiKey } = this.config.apiActivityConfig;
|
|
2413
|
+
try {
|
|
2414
|
+
const response = await fetch(`${baseUrl}/api/sessions/${sessionId}/completion`, {
|
|
2415
|
+
method: 'POST',
|
|
2416
|
+
headers: {
|
|
2417
|
+
'Content-Type': 'application/json',
|
|
2418
|
+
Authorization: `Bearer ${apiKey}`,
|
|
2419
|
+
},
|
|
2420
|
+
body: JSON.stringify({
|
|
2421
|
+
summary: resultMessage,
|
|
2422
|
+
}),
|
|
2423
|
+
});
|
|
2424
|
+
if (!response.ok) {
|
|
2425
|
+
const error = await response.text();
|
|
2426
|
+
log?.warn('Failed to post completion comment via API', { status: response.status, error });
|
|
2427
|
+
}
|
|
2428
|
+
else {
|
|
2429
|
+
log?.info('Completion comment posted via API');
|
|
2430
|
+
}
|
|
2431
|
+
}
|
|
2432
|
+
catch (error) {
|
|
2433
|
+
log?.warn('Failed to post completion comment via API', {
|
|
2434
|
+
error: error instanceof Error ? error.message : String(error),
|
|
2435
|
+
});
|
|
2436
|
+
}
|
|
2437
|
+
}
|
|
2438
|
+
else {
|
|
2439
|
+
// Direct Linear API - post comments sequentially
|
|
2440
|
+
for (const chunk of comments) {
|
|
2441
|
+
try {
|
|
2442
|
+
await this.client.createComment(issueId, chunk.body);
|
|
2443
|
+
log?.info(`Posted completion comment part ${chunk.partNumber}/${chunk.totalParts}`);
|
|
2444
|
+
// Small delay between comments to ensure ordering
|
|
2445
|
+
if (chunk.partNumber < chunk.totalParts) {
|
|
2446
|
+
await new Promise(resolve => setTimeout(resolve, 100));
|
|
2447
|
+
}
|
|
2448
|
+
}
|
|
2449
|
+
catch (error) {
|
|
2450
|
+
log?.error(`Failed to post completion comment part ${chunk.partNumber}`, {
|
|
2451
|
+
error: error instanceof Error ? error.message : String(error),
|
|
2452
|
+
});
|
|
2453
|
+
}
|
|
2454
|
+
}
|
|
2455
|
+
}
|
|
2456
|
+
}
|
|
2457
|
+
/**
|
|
2458
|
+
* Clean up agent resources
|
|
2459
|
+
*/
|
|
2460
|
+
cleanupAgent(issueId, sessionId) {
|
|
2461
|
+
this.agentHandles.delete(issueId);
|
|
2462
|
+
this.agentSessions.delete(issueId);
|
|
2463
|
+
this.activityEmitters.delete(issueId);
|
|
2464
|
+
this.abortControllers.delete(issueId);
|
|
2465
|
+
this.agentLoggers.delete(issueId);
|
|
2466
|
+
// Stop heartbeat writer
|
|
2467
|
+
const heartbeatWriter = this.heartbeatWriters.get(issueId);
|
|
2468
|
+
if (heartbeatWriter) {
|
|
2469
|
+
heartbeatWriter.stop();
|
|
2470
|
+
this.heartbeatWriters.delete(issueId);
|
|
2471
|
+
}
|
|
2472
|
+
// Stop progress logger
|
|
2473
|
+
const progressLogger = this.progressLoggers.get(issueId);
|
|
2474
|
+
if (progressLogger) {
|
|
2475
|
+
progressLogger.stop();
|
|
2476
|
+
this.progressLoggers.delete(issueId);
|
|
2477
|
+
}
|
|
2478
|
+
// Session logger is cleaned up separately (in finalizeSessionLogger)
|
|
2479
|
+
// to ensure the final status is captured before cleanup
|
|
2480
|
+
this.sessionLoggers.delete(issueId);
|
|
2481
|
+
if (sessionId) {
|
|
2482
|
+
this.sessionToIssue.delete(sessionId);
|
|
2483
|
+
}
|
|
2484
|
+
}
|
|
2485
|
+
/**
|
|
2486
|
+
* Finalize the session logger with final status
|
|
2487
|
+
*/
|
|
2488
|
+
finalizeSessionLogger(issueId, status, options) {
|
|
2489
|
+
const sessionLogger = this.sessionLoggers.get(issueId);
|
|
2490
|
+
if (sessionLogger) {
|
|
2491
|
+
sessionLogger.finalize(status, options);
|
|
2492
|
+
}
|
|
2493
|
+
}
|
|
2494
|
+
/**
|
|
2495
|
+
* Run the orchestrator - spawn agents for backlog issues
|
|
2496
|
+
*/
|
|
2497
|
+
async run() {
|
|
2498
|
+
const issues = await this.getBacklogIssues();
|
|
2499
|
+
const result = {
|
|
2500
|
+
success: true,
|
|
2501
|
+
agents: [],
|
|
2502
|
+
errors: [],
|
|
2503
|
+
};
|
|
2504
|
+
if (issues.length === 0) {
|
|
2505
|
+
console.log('No backlog issues found');
|
|
2506
|
+
return result;
|
|
2507
|
+
}
|
|
2508
|
+
console.log(`Found ${issues.length} backlog issue(s)`);
|
|
2509
|
+
for (const issue of issues) {
|
|
2510
|
+
this.events.onIssueSelected?.(issue);
|
|
2511
|
+
console.log(`Processing: ${issue.identifier} - ${issue.title}`);
|
|
2512
|
+
try {
|
|
2513
|
+
// Backlog issues are always development work
|
|
2514
|
+
const workType = 'development';
|
|
2515
|
+
// Create worktree with work type suffix
|
|
2516
|
+
const { worktreePath, worktreeIdentifier } = this.createWorktree(issue.identifier, workType);
|
|
2517
|
+
// Link dependencies from main repo into worktree
|
|
2518
|
+
this.linkDependencies(worktreePath, issue.identifier);
|
|
2519
|
+
const startStatus = WORK_TYPE_START_STATUS[workType];
|
|
2520
|
+
// Update issue status based on work type if auto-transition is enabled
|
|
2521
|
+
if (this.config.autoTransition && startStatus) {
|
|
2522
|
+
await this.client.updateIssueStatus(issue.id, startStatus);
|
|
2523
|
+
console.log(`Updated ${issue.identifier} status to ${startStatus}`);
|
|
2524
|
+
}
|
|
2525
|
+
// Spawn agent with generated session ID for autonomous mode
|
|
2526
|
+
const agent = this.spawnAgent({
|
|
2527
|
+
issueId: issue.id,
|
|
2528
|
+
identifier: issue.identifier,
|
|
2529
|
+
worktreeIdentifier,
|
|
2530
|
+
sessionId: randomUUID(),
|
|
2531
|
+
worktreePath,
|
|
2532
|
+
workType,
|
|
2533
|
+
teamName: issue.teamName,
|
|
2534
|
+
projectName: issue.projectName,
|
|
2535
|
+
});
|
|
2536
|
+
result.agents.push(agent);
|
|
2537
|
+
}
|
|
2538
|
+
catch (error) {
|
|
2539
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
2540
|
+
result.errors.push({ issueId: issue.id, error: err });
|
|
2541
|
+
console.error(`Failed to process ${issue.identifier}:`, err.message);
|
|
2542
|
+
}
|
|
2543
|
+
}
|
|
2544
|
+
result.success = result.errors.length === 0;
|
|
2545
|
+
return result;
|
|
2546
|
+
}
|
|
2547
|
+
/**
|
|
2548
|
+
* Spawn agent for a single issue (webhook-triggered or CLI)
|
|
2549
|
+
* Generates a session ID if not provided to enable autonomous mode
|
|
2550
|
+
*
|
|
2551
|
+
* This method includes crash recovery support:
|
|
2552
|
+
* - If a worktree exists with valid state and stale heartbeat, triggers recovery
|
|
2553
|
+
* - If a worktree exists with fresh heartbeat (agent alive), throws error to prevent duplicates
|
|
2554
|
+
*
|
|
2555
|
+
* @param issueIdOrIdentifier - Issue ID or identifier (e.g., SUP-123)
|
|
2556
|
+
* @param sessionId - Optional Linear session ID
|
|
2557
|
+
* @param workType - Optional work type (auto-detected from issue status if not provided)
|
|
2558
|
+
* @param prompt - Optional custom prompt override
|
|
2559
|
+
*/
|
|
2560
|
+
async spawnAgentForIssue(issueIdOrIdentifier, sessionId, workType, prompt) {
|
|
2561
|
+
console.log(`Fetching issue:`, issueIdOrIdentifier);
|
|
2562
|
+
const issue = await this.client.getIssue(issueIdOrIdentifier);
|
|
2563
|
+
const identifier = issue.identifier;
|
|
2564
|
+
const issueId = issue.id; // Use the actual UUID
|
|
2565
|
+
const team = await issue.team;
|
|
2566
|
+
const teamName = team?.key;
|
|
2567
|
+
// Resolve project name for path scoping in monorepos
|
|
2568
|
+
let projectName;
|
|
2569
|
+
if (this.projectPaths) {
|
|
2570
|
+
const project = await issue.project;
|
|
2571
|
+
projectName = project?.name;
|
|
2572
|
+
}
|
|
2573
|
+
console.log(`Processing single issue: ${identifier} (${issueId}) - ${issue.title}`);
|
|
2574
|
+
// Guard: skip work if the issue has moved to a terminal status since being queued
|
|
2575
|
+
const currentState = await issue.state;
|
|
2576
|
+
const currentStatus = currentState?.name;
|
|
2577
|
+
if (currentStatus && TERMINAL_STATUSES.includes(currentStatus)) {
|
|
2578
|
+
throw new Error(`Issue ${identifier} is in terminal status '${currentStatus}' — skipping ${workType ?? 'auto'} work. ` +
|
|
2579
|
+
`The issue was likely accepted/canceled after being queued.`);
|
|
2580
|
+
}
|
|
2581
|
+
// Defense in depth: re-validate git remote before spawning (guards against long-running instances)
|
|
2582
|
+
if (this.config.repository) {
|
|
2583
|
+
validateGitRemote(this.config.repository);
|
|
2584
|
+
}
|
|
2585
|
+
// Auto-detect work type from issue status if not provided
|
|
2586
|
+
// This must happen BEFORE creating worktree since path includes work type suffix
|
|
2587
|
+
let effectiveWorkType = workType;
|
|
2588
|
+
if (!effectiveWorkType) {
|
|
2589
|
+
const state = await issue.state;
|
|
2590
|
+
const statusName = state?.name ?? 'Backlog';
|
|
2591
|
+
effectiveWorkType = STATUS_WORK_TYPE_MAP[statusName] ?? 'development';
|
|
2592
|
+
console.log(`Auto-detected work type: ${effectiveWorkType} (from status: ${statusName})`);
|
|
2593
|
+
// Parent issues use coordination variants
|
|
2594
|
+
const isParent = await this.client.isParentIssue(issueId);
|
|
2595
|
+
if (isParent) {
|
|
2596
|
+
if (effectiveWorkType === 'development')
|
|
2597
|
+
effectiveWorkType = 'coordination';
|
|
2598
|
+
else if (effectiveWorkType === 'qa')
|
|
2599
|
+
effectiveWorkType = 'qa-coordination';
|
|
2600
|
+
else if (effectiveWorkType === 'acceptance')
|
|
2601
|
+
effectiveWorkType = 'acceptance-coordination';
|
|
2602
|
+
else if (effectiveWorkType === 'refinement')
|
|
2603
|
+
effectiveWorkType = 'refinement-coordination';
|
|
2604
|
+
console.log(`Upgraded to coordination work type: ${effectiveWorkType} (parent issue)`);
|
|
2605
|
+
}
|
|
2606
|
+
}
|
|
2607
|
+
// Create isolated worktree for the agent
|
|
2608
|
+
let worktreePath;
|
|
2609
|
+
let worktreeIdentifier;
|
|
2610
|
+
if (WORK_TYPES_REQUIRING_WORKTREE.has(effectiveWorkType)) {
|
|
2611
|
+
const wt = this.createWorktree(identifier, effectiveWorkType);
|
|
2612
|
+
worktreePath = wt.worktreePath;
|
|
2613
|
+
worktreeIdentifier = wt.worktreeIdentifier;
|
|
2614
|
+
// Link dependencies from main repo into worktree
|
|
2615
|
+
this.linkDependencies(worktreePath, identifier);
|
|
2616
|
+
// Check for existing state and potential recovery
|
|
2617
|
+
const recoveryCheck = checkRecovery(worktreePath, {
|
|
2618
|
+
heartbeatTimeoutMs: getHeartbeatTimeoutFromEnv(),
|
|
2619
|
+
maxRecoveryAttempts: getMaxRecoveryAttemptsFromEnv(),
|
|
2620
|
+
});
|
|
2621
|
+
if (recoveryCheck.agentAlive) {
|
|
2622
|
+
// Agent is still running - prevent duplicate
|
|
2623
|
+
throw new Error(`Agent already running for ${identifier}: ${recoveryCheck.message}. ` +
|
|
2624
|
+
`Stop the existing agent before spawning a new one.`);
|
|
2625
|
+
}
|
|
2626
|
+
if (recoveryCheck.canRecover && recoveryCheck.state) {
|
|
2627
|
+
// Crashed agent detected - trigger recovery
|
|
2628
|
+
console.log(`Recovery detected for ${identifier}: ${recoveryCheck.message}`);
|
|
2629
|
+
// Increment recovery attempts in state
|
|
2630
|
+
const updatedState = updateState(worktreePath, {
|
|
2631
|
+
recoveryAttempts: (recoveryCheck.state.recoveryAttempts ?? 0) + 1,
|
|
2632
|
+
});
|
|
2633
|
+
// Build recovery prompt
|
|
2634
|
+
const recoveryPrompt = prompt ?? buildRecoveryPrompt(recoveryCheck.state, recoveryCheck.todos);
|
|
2635
|
+
// Use existing provider session ID for resume if available
|
|
2636
|
+
const providerSessionId = recoveryCheck.state.providerSessionId ?? undefined;
|
|
2637
|
+
// Inherit work type from previous state if not provided
|
|
2638
|
+
const recoveryWorkType = workType ?? recoveryCheck.state.workType ?? effectiveWorkType;
|
|
2639
|
+
const effectiveSessionId = sessionId ?? recoveryCheck.state.linearSessionId ?? randomUUID();
|
|
2640
|
+
console.log(`Resuming work on ${identifier} (recovery attempt ${updatedState?.recoveryAttempts ?? 1})`);
|
|
2641
|
+
// Update status based on work type if auto-transition is enabled
|
|
2642
|
+
const startStatus = WORK_TYPE_START_STATUS[recoveryWorkType];
|
|
2643
|
+
if (this.config.autoTransition && startStatus) {
|
|
2644
|
+
await this.client.updateIssueStatus(issueId, startStatus);
|
|
2645
|
+
console.log(`Updated ${identifier} status to ${startStatus}`);
|
|
2646
|
+
}
|
|
2647
|
+
// Spawn with resume capability
|
|
2648
|
+
return this.spawnAgentWithResume({
|
|
2649
|
+
issueId,
|
|
2650
|
+
identifier,
|
|
2651
|
+
worktreeIdentifier,
|
|
2652
|
+
sessionId: effectiveSessionId,
|
|
2653
|
+
worktreePath,
|
|
2654
|
+
prompt: recoveryPrompt,
|
|
2655
|
+
providerSessionId,
|
|
2656
|
+
workType: recoveryWorkType,
|
|
2657
|
+
teamName,
|
|
2658
|
+
projectName,
|
|
2659
|
+
});
|
|
2660
|
+
}
|
|
2661
|
+
}
|
|
2662
|
+
// No recovery needed - proceed with fresh spawn
|
|
2663
|
+
// Update status based on work type if auto-transition is enabled
|
|
2664
|
+
const startStatus = WORK_TYPE_START_STATUS[effectiveWorkType];
|
|
2665
|
+
if (this.config.autoTransition && startStatus) {
|
|
2666
|
+
await this.client.updateIssueStatus(issueId, startStatus);
|
|
2667
|
+
console.log(`Updated ${identifier} status to ${startStatus}`);
|
|
2668
|
+
}
|
|
2669
|
+
// Generate session ID if not provided to enable autonomous mode
|
|
2670
|
+
// This ensures LINEAR_SESSION_ID is always set, triggering headless operation
|
|
2671
|
+
const effectiveSessionId = sessionId ?? randomUUID();
|
|
2672
|
+
// Spawn agent with work type and optional custom prompt
|
|
2673
|
+
return this.spawnAgent({
|
|
2674
|
+
issueId,
|
|
2675
|
+
identifier,
|
|
2676
|
+
worktreeIdentifier,
|
|
2677
|
+
sessionId: effectiveSessionId,
|
|
2678
|
+
worktreePath,
|
|
2679
|
+
workType: effectiveWorkType,
|
|
2680
|
+
prompt,
|
|
2681
|
+
teamName,
|
|
2682
|
+
projectName,
|
|
2683
|
+
});
|
|
2684
|
+
}
|
|
2685
|
+
/**
|
|
2686
|
+
* Get all active agents
|
|
2687
|
+
*/
|
|
2688
|
+
getActiveAgents() {
|
|
2689
|
+
return Array.from(this.activeAgents.values()).filter((a) => a.status === 'running' || a.status === 'starting');
|
|
2690
|
+
}
|
|
2691
|
+
/**
|
|
2692
|
+
* Stop a running agent by issue ID
|
|
2693
|
+
* @param issueId - The Linear issue ID
|
|
2694
|
+
* @param cleanupWorktree - Whether to remove the git worktree
|
|
2695
|
+
* @param stopReason - Why the agent is being stopped: 'user_request' or 'timeout'
|
|
2696
|
+
*/
|
|
2697
|
+
async stopAgent(issueId, cleanupWorktree = false, stopReason = 'user_request') {
|
|
2698
|
+
const agent = this.activeAgents.get(issueId);
|
|
2699
|
+
if (!agent) {
|
|
2700
|
+
return { stopped: false, reason: 'not_found' };
|
|
2701
|
+
}
|
|
2702
|
+
if (agent.status !== 'running' && agent.status !== 'starting') {
|
|
2703
|
+
return { stopped: false, reason: 'already_stopped', agent };
|
|
2704
|
+
}
|
|
2705
|
+
const abortController = this.abortControllers.get(issueId);
|
|
2706
|
+
if (!abortController) {
|
|
2707
|
+
return { stopped: false, reason: 'not_found', agent };
|
|
2708
|
+
}
|
|
2709
|
+
const log = this.agentLoggers.get(issueId);
|
|
2710
|
+
try {
|
|
2711
|
+
// Emit final activity before stopping
|
|
2712
|
+
const emitter = this.activityEmitters.get(issueId);
|
|
2713
|
+
if (emitter) {
|
|
2714
|
+
try {
|
|
2715
|
+
const message = stopReason === 'user_request'
|
|
2716
|
+
? 'Agent stopped by user request.'
|
|
2717
|
+
: 'Agent stopped due to timeout.';
|
|
2718
|
+
await emitter.emitResponse(message);
|
|
2719
|
+
await emitter.flush();
|
|
2720
|
+
}
|
|
2721
|
+
catch (emitError) {
|
|
2722
|
+
log?.warn('Failed to emit stop activity', {
|
|
2723
|
+
error: emitError instanceof Error ? emitError.message : String(emitError),
|
|
2724
|
+
});
|
|
2725
|
+
}
|
|
2726
|
+
}
|
|
2727
|
+
// Mark as stopped with reason before aborting
|
|
2728
|
+
agent.status = 'stopped';
|
|
2729
|
+
agent.stopReason = stopReason;
|
|
2730
|
+
agent.completedAt = new Date();
|
|
2731
|
+
// Abort the query
|
|
2732
|
+
abortController.abort();
|
|
2733
|
+
// Clean up worktree if requested (only if agent has a worktree)
|
|
2734
|
+
if (cleanupWorktree && agent.worktreeIdentifier) {
|
|
2735
|
+
this.removeWorktree(agent.worktreeIdentifier);
|
|
2736
|
+
}
|
|
2737
|
+
const logMessage = stopReason === 'user_request'
|
|
2738
|
+
? 'Agent stopped by user request'
|
|
2739
|
+
: 'Agent stopped due to timeout';
|
|
2740
|
+
log?.status('stopped', logMessage);
|
|
2741
|
+
return { stopped: true, agent };
|
|
2742
|
+
}
|
|
2743
|
+
catch (error) {
|
|
2744
|
+
log?.warn('Failed to stop agent', {
|
|
2745
|
+
error: error instanceof Error ? error.message : String(error),
|
|
2746
|
+
});
|
|
2747
|
+
return { stopped: false, reason: 'signal_failed', agent };
|
|
2748
|
+
}
|
|
2749
|
+
}
|
|
2750
|
+
/**
|
|
2751
|
+
* Stop a running agent by session ID
|
|
2752
|
+
*/
|
|
2753
|
+
async stopAgentBySession(sessionId, cleanupWorktree = false) {
|
|
2754
|
+
const issueId = this.sessionToIssue.get(sessionId);
|
|
2755
|
+
if (!issueId) {
|
|
2756
|
+
return { stopped: false, reason: 'not_found' };
|
|
2757
|
+
}
|
|
2758
|
+
return this.stopAgent(issueId, cleanupWorktree);
|
|
2759
|
+
}
|
|
2760
|
+
/**
|
|
2761
|
+
* Get agent by session ID
|
|
2762
|
+
*/
|
|
2763
|
+
getAgentBySession(sessionId) {
|
|
2764
|
+
const issueId = this.sessionToIssue.get(sessionId);
|
|
2765
|
+
if (!issueId)
|
|
2766
|
+
return undefined;
|
|
2767
|
+
return this.activeAgents.get(issueId);
|
|
2768
|
+
}
|
|
2769
|
+
/**
|
|
2770
|
+
* Update the worker ID for all active activity emitters.
|
|
2771
|
+
* Called after worker re-registration to ensure activities are attributed
|
|
2772
|
+
* to the new worker ID and pass ownership checks.
|
|
2773
|
+
*
|
|
2774
|
+
* @param newWorkerId - The new worker ID after re-registration
|
|
2775
|
+
*/
|
|
2776
|
+
updateWorkerId(newWorkerId) {
|
|
2777
|
+
// Update the config for any future emitters
|
|
2778
|
+
if (this.config.apiActivityConfig) {
|
|
2779
|
+
this.config.apiActivityConfig.workerId = newWorkerId;
|
|
2780
|
+
}
|
|
2781
|
+
// Update all existing activity emitters
|
|
2782
|
+
for (const [issueId, emitter] of this.activityEmitters.entries()) {
|
|
2783
|
+
// Only ApiActivityEmitter has updateWorkerId method
|
|
2784
|
+
if ('updateWorkerId' in emitter && typeof emitter.updateWorkerId === 'function') {
|
|
2785
|
+
emitter.updateWorkerId(newWorkerId);
|
|
2786
|
+
console.log(`[Orchestrator] Updated worker ID for emitter ${issueId}`);
|
|
2787
|
+
}
|
|
2788
|
+
}
|
|
2789
|
+
}
|
|
2790
|
+
/**
|
|
2791
|
+
* Forward a follow-up prompt to an existing or new agent
|
|
2792
|
+
*
|
|
2793
|
+
* If the agent is running, attempts to inject the message into the running session
|
|
2794
|
+
* without stopping it. If injection fails or agent isn't running, it will be
|
|
2795
|
+
* stopped gracefully and resumed with the new prompt.
|
|
2796
|
+
*
|
|
2797
|
+
* @param workType - Optional work type. If not provided, inherits from existing agent or defaults to 'development'.
|
|
2798
|
+
*/
|
|
2799
|
+
async forwardPrompt(issueId, sessionId, prompt, providerSessionId, workType) {
|
|
2800
|
+
const existingAgent = this.activeAgents.get(issueId);
|
|
2801
|
+
// If agent is running, try to inject the message without stopping
|
|
2802
|
+
if (existingAgent && (existingAgent.status === 'running' || existingAgent.status === 'starting')) {
|
|
2803
|
+
const injectResult = await this.injectMessage(issueId, sessionId, prompt);
|
|
2804
|
+
if (injectResult.injected) {
|
|
2805
|
+
console.log(`Message injected into running agent for ${existingAgent.identifier}`);
|
|
2806
|
+
return {
|
|
2807
|
+
forwarded: true,
|
|
2808
|
+
resumed: false,
|
|
2809
|
+
injected: true,
|
|
2810
|
+
agent: existingAgent,
|
|
2811
|
+
};
|
|
2812
|
+
}
|
|
2813
|
+
// Injection failed - fall back to stop and respawn
|
|
2814
|
+
console.log(`Message injection failed for ${existingAgent.identifier}: ${injectResult.reason} - stopping and respawning`);
|
|
2815
|
+
await this.stopAgent(issueId, false); // Don't cleanup worktree
|
|
2816
|
+
}
|
|
2817
|
+
// Get worktree path from existing agent or create new one
|
|
2818
|
+
let worktreePath;
|
|
2819
|
+
let worktreeIdentifier;
|
|
2820
|
+
let identifier;
|
|
2821
|
+
let teamName;
|
|
2822
|
+
if (existingAgent) {
|
|
2823
|
+
worktreePath = existingAgent.worktreePath;
|
|
2824
|
+
worktreeIdentifier = existingAgent.worktreeIdentifier;
|
|
2825
|
+
identifier = existingAgent.identifier;
|
|
2826
|
+
// Use existing provider session ID if not provided
|
|
2827
|
+
providerSessionId = providerSessionId ?? existingAgent.providerSessionId;
|
|
2828
|
+
// Inherit work type from existing agent if not provided
|
|
2829
|
+
workType = workType ?? existingAgent.workType;
|
|
2830
|
+
}
|
|
2831
|
+
else {
|
|
2832
|
+
// Need to fetch issue to get identifier
|
|
2833
|
+
try {
|
|
2834
|
+
const issue = await this.client.getIssue(issueId);
|
|
2835
|
+
identifier = issue.identifier;
|
|
2836
|
+
const issueTeam = await issue.team;
|
|
2837
|
+
teamName = issueTeam?.key;
|
|
2838
|
+
// Guard: skip work if the issue has moved to a terminal status since being queued
|
|
2839
|
+
const currentState = await issue.state;
|
|
2840
|
+
const currentStatus = currentState?.name;
|
|
2841
|
+
if (currentStatus && TERMINAL_STATUSES.includes(currentStatus)) {
|
|
2842
|
+
console.log(`Issue ${identifier} is in terminal status '${currentStatus}' — skipping work`);
|
|
2843
|
+
return {
|
|
2844
|
+
forwarded: false,
|
|
2845
|
+
resumed: false,
|
|
2846
|
+
reason: 'terminal_status',
|
|
2847
|
+
};
|
|
2848
|
+
}
|
|
2849
|
+
// Auto-detect work type from issue status if not provided
|
|
2850
|
+
// This prevents defaulting to 'development' which would cause
|
|
2851
|
+
// incorrect status transitions (e.g., Delivered → Started for acceptance work)
|
|
2852
|
+
if (!workType) {
|
|
2853
|
+
const statusName = currentStatus ?? 'Backlog';
|
|
2854
|
+
workType = STATUS_WORK_TYPE_MAP[statusName] ?? 'development';
|
|
2855
|
+
// Parent issues use coordination variants
|
|
2856
|
+
const isParent = await this.client.isParentIssue(issue.id);
|
|
2857
|
+
if (isParent) {
|
|
2858
|
+
if (workType === 'development')
|
|
2859
|
+
workType = 'coordination';
|
|
2860
|
+
else if (workType === 'qa')
|
|
2861
|
+
workType = 'qa-coordination';
|
|
2862
|
+
else if (workType === 'acceptance')
|
|
2863
|
+
workType = 'acceptance-coordination';
|
|
2864
|
+
else if (workType === 'refinement')
|
|
2865
|
+
workType = 'refinement-coordination';
|
|
2866
|
+
}
|
|
2867
|
+
}
|
|
2868
|
+
// Create isolated worktree for the agent
|
|
2869
|
+
if (WORK_TYPES_REQUIRING_WORKTREE.has(workType)) {
|
|
2870
|
+
const result = this.createWorktree(identifier, workType);
|
|
2871
|
+
worktreePath = result.worktreePath;
|
|
2872
|
+
worktreeIdentifier = result.worktreeIdentifier;
|
|
2873
|
+
// Link dependencies from main repo into worktree
|
|
2874
|
+
this.linkDependencies(worktreePath, identifier);
|
|
2875
|
+
}
|
|
2876
|
+
}
|
|
2877
|
+
catch (error) {
|
|
2878
|
+
return {
|
|
2879
|
+
forwarded: false,
|
|
2880
|
+
resumed: false,
|
|
2881
|
+
reason: 'not_found',
|
|
2882
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
2883
|
+
};
|
|
2884
|
+
}
|
|
2885
|
+
}
|
|
2886
|
+
// Check if worktree exists (only relevant for code work types)
|
|
2887
|
+
const effectiveWorkType = workType ?? 'development';
|
|
2888
|
+
if (WORK_TYPES_REQUIRING_WORKTREE.has(effectiveWorkType) && worktreePath && !existsSync(worktreePath)) {
|
|
2889
|
+
try {
|
|
2890
|
+
const result = this.createWorktree(identifier, effectiveWorkType);
|
|
2891
|
+
worktreePath = result.worktreePath;
|
|
2892
|
+
worktreeIdentifier = result.worktreeIdentifier;
|
|
2893
|
+
// Link dependencies from main repo into worktree
|
|
2894
|
+
this.linkDependencies(worktreePath, identifier);
|
|
2895
|
+
}
|
|
2896
|
+
catch (error) {
|
|
2897
|
+
return {
|
|
2898
|
+
forwarded: false,
|
|
2899
|
+
resumed: false,
|
|
2900
|
+
reason: 'no_worktree',
|
|
2901
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
2902
|
+
};
|
|
2903
|
+
}
|
|
2904
|
+
}
|
|
2905
|
+
// Spawn agent with resume if we have a provider session ID
|
|
2906
|
+
try {
|
|
2907
|
+
const agent = await this.spawnAgentWithResume({
|
|
2908
|
+
issueId,
|
|
2909
|
+
identifier,
|
|
2910
|
+
worktreeIdentifier,
|
|
2911
|
+
sessionId,
|
|
2912
|
+
worktreePath,
|
|
2913
|
+
prompt,
|
|
2914
|
+
providerSessionId,
|
|
2915
|
+
workType,
|
|
2916
|
+
teamName,
|
|
2917
|
+
});
|
|
2918
|
+
return {
|
|
2919
|
+
forwarded: true,
|
|
2920
|
+
resumed: !!providerSessionId,
|
|
2921
|
+
agent,
|
|
2922
|
+
};
|
|
2923
|
+
}
|
|
2924
|
+
catch (error) {
|
|
2925
|
+
return {
|
|
2926
|
+
forwarded: false,
|
|
2927
|
+
resumed: false,
|
|
2928
|
+
reason: 'spawn_failed',
|
|
2929
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
2930
|
+
};
|
|
2931
|
+
}
|
|
2932
|
+
}
|
|
2933
|
+
/**
|
|
2934
|
+
* Inject a user message into a running agent session without stopping it.
|
|
2935
|
+
*
|
|
2936
|
+
* Uses the SDK's streamInput() method to send follow-up messages to a running session.
|
|
2937
|
+
* This is the preferred method for user follow-ups as it doesn't interrupt agent work.
|
|
2938
|
+
*
|
|
2939
|
+
* @param issueId - The issue ID the agent is working on
|
|
2940
|
+
* @param sessionId - The Linear session ID
|
|
2941
|
+
* @param message - The user message to inject
|
|
2942
|
+
* @returns Result indicating if injection was successful
|
|
2943
|
+
*/
|
|
2944
|
+
async injectMessage(issueId, sessionId, message) {
|
|
2945
|
+
const log = this.agentLoggers.get(issueId);
|
|
2946
|
+
const agent = this.activeAgents.get(issueId);
|
|
2947
|
+
const handle = this.agentHandles.get(issueId);
|
|
2948
|
+
// Check if agent is running
|
|
2949
|
+
if (!agent || (agent.status !== 'running' && agent.status !== 'starting')) {
|
|
2950
|
+
return {
|
|
2951
|
+
injected: false,
|
|
2952
|
+
reason: 'not_running',
|
|
2953
|
+
};
|
|
2954
|
+
}
|
|
2955
|
+
// Check if we have the handle
|
|
2956
|
+
if (!handle) {
|
|
2957
|
+
log?.warn('No AgentHandle found for running agent', { issueId, sessionId });
|
|
2958
|
+
return {
|
|
2959
|
+
injected: false,
|
|
2960
|
+
reason: 'no_query',
|
|
2961
|
+
};
|
|
2962
|
+
}
|
|
2963
|
+
try {
|
|
2964
|
+
// Inject the message into the running session via provider handle
|
|
2965
|
+
log?.info('Injecting user message into running session', {
|
|
2966
|
+
issueId,
|
|
2967
|
+
sessionId,
|
|
2968
|
+
messageLength: message.length,
|
|
2969
|
+
});
|
|
2970
|
+
await handle.injectMessage(message);
|
|
2971
|
+
// Update activity timestamp since we just interacted with the agent
|
|
2972
|
+
agent.lastActivityAt = new Date();
|
|
2973
|
+
log?.success('Message injected successfully');
|
|
2974
|
+
return {
|
|
2975
|
+
injected: true,
|
|
2976
|
+
};
|
|
2977
|
+
}
|
|
2978
|
+
catch (error) {
|
|
2979
|
+
log?.error('Failed to inject message', {
|
|
2980
|
+
error: error instanceof Error ? error.message : String(error),
|
|
2981
|
+
});
|
|
2982
|
+
return {
|
|
2983
|
+
injected: false,
|
|
2984
|
+
reason: 'injection_failed',
|
|
2985
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
2986
|
+
};
|
|
2987
|
+
}
|
|
2988
|
+
}
|
|
2989
|
+
/**
|
|
2990
|
+
* Spawn an agent with resume capability for continuing a previous session
|
|
2991
|
+
* If autoTransition is enabled, also transitions the issue status to the appropriate working state
|
|
2992
|
+
*/
|
|
2993
|
+
async spawnAgentWithResume(options) {
|
|
2994
|
+
const { issueId, identifier, worktreeIdentifier, sessionId, worktreePath, prompt, providerSessionId, workType, teamName } = options;
|
|
2995
|
+
// Create logger for this agent
|
|
2996
|
+
const log = createLogger({ issueIdentifier: identifier });
|
|
2997
|
+
this.agentLoggers.set(issueId, log);
|
|
2998
|
+
// Use the work type to determine if we need to transition on start
|
|
2999
|
+
// Only certain work types trigger a start transition
|
|
3000
|
+
const effectiveWorkType = workType ?? 'development';
|
|
3001
|
+
const startStatus = WORK_TYPE_START_STATUS[effectiveWorkType];
|
|
3002
|
+
if (this.config.autoTransition && startStatus) {
|
|
3003
|
+
try {
|
|
3004
|
+
await this.client.updateIssueStatus(issueId, startStatus);
|
|
3005
|
+
log.info('Transitioned issue status on resume', { workType: effectiveWorkType, to: startStatus });
|
|
3006
|
+
}
|
|
3007
|
+
catch (error) {
|
|
3008
|
+
// Log but don't fail - status might already be in a working state
|
|
3009
|
+
log.warn('Failed to transition issue status', {
|
|
3010
|
+
error: error instanceof Error ? error.message : String(error),
|
|
3011
|
+
});
|
|
3012
|
+
}
|
|
3013
|
+
}
|
|
3014
|
+
const now = new Date();
|
|
3015
|
+
const agent = {
|
|
3016
|
+
issueId,
|
|
3017
|
+
identifier,
|
|
3018
|
+
worktreeIdentifier,
|
|
3019
|
+
sessionId,
|
|
3020
|
+
providerSessionId,
|
|
3021
|
+
worktreePath,
|
|
3022
|
+
pid: undefined,
|
|
3023
|
+
status: 'starting',
|
|
3024
|
+
startedAt: now,
|
|
3025
|
+
lastActivityAt: now, // Initialize for inactivity tracking
|
|
3026
|
+
workType,
|
|
3027
|
+
};
|
|
3028
|
+
this.activeAgents.set(issueId, agent);
|
|
3029
|
+
// Track session to issue mapping for stop signal handling
|
|
3030
|
+
this.sessionToIssue.set(sessionId, issueId);
|
|
3031
|
+
// Initialize state persistence and monitoring (only for worktree-based agents)
|
|
3032
|
+
if (worktreePath) {
|
|
3033
|
+
try {
|
|
3034
|
+
// Write/update state with resume info
|
|
3035
|
+
const initialState = createInitialState({
|
|
3036
|
+
issueId,
|
|
3037
|
+
issueIdentifier: identifier,
|
|
3038
|
+
linearSessionId: sessionId,
|
|
3039
|
+
workType: effectiveWorkType,
|
|
3040
|
+
prompt,
|
|
3041
|
+
workerId: this.config.apiActivityConfig?.workerId ?? null,
|
|
3042
|
+
pid: null, // Will be updated when process spawns
|
|
3043
|
+
});
|
|
3044
|
+
// Preserve provider session ID if resuming
|
|
3045
|
+
if (providerSessionId) {
|
|
3046
|
+
initialState.providerSessionId = providerSessionId;
|
|
3047
|
+
}
|
|
3048
|
+
writeState(worktreePath, initialState);
|
|
3049
|
+
// Start heartbeat writer for crash detection
|
|
3050
|
+
const heartbeatWriter = createHeartbeatWriter({
|
|
3051
|
+
agentDir: resolve(worktreePath, '.agent'),
|
|
3052
|
+
pid: process.pid, // Will be updated to child PID after spawn
|
|
3053
|
+
intervalMs: getHeartbeatIntervalFromEnv(),
|
|
3054
|
+
startTime: now.getTime(),
|
|
3055
|
+
});
|
|
3056
|
+
heartbeatWriter.start();
|
|
3057
|
+
this.heartbeatWriters.set(issueId, heartbeatWriter);
|
|
3058
|
+
// Start progress logger for debugging
|
|
3059
|
+
const progressLogger = createProgressLogger({
|
|
3060
|
+
agentDir: resolve(worktreePath, '.agent'),
|
|
3061
|
+
});
|
|
3062
|
+
progressLogger.logStart({ issueId, workType: effectiveWorkType, prompt: prompt.substring(0, 200) });
|
|
3063
|
+
this.progressLoggers.set(issueId, progressLogger);
|
|
3064
|
+
// Start session logger for verbose analysis if enabled
|
|
3065
|
+
if (isSessionLoggingEnabled()) {
|
|
3066
|
+
const logConfig = getLogAnalysisConfig();
|
|
3067
|
+
const sessionLogger = createSessionLogger({
|
|
3068
|
+
sessionId,
|
|
3069
|
+
issueId,
|
|
3070
|
+
issueIdentifier: identifier,
|
|
3071
|
+
workType: effectiveWorkType,
|
|
3072
|
+
prompt,
|
|
3073
|
+
logsDir: logConfig.logsDir,
|
|
3074
|
+
workerId: this.config.apiActivityConfig?.workerId,
|
|
3075
|
+
});
|
|
3076
|
+
this.sessionLoggers.set(issueId, sessionLogger);
|
|
3077
|
+
log.debug('Session logging initialized', { logsDir: logConfig.logsDir });
|
|
3078
|
+
}
|
|
3079
|
+
log.debug('State persistence initialized', { agentDir: resolve(worktreePath, '.agent') });
|
|
3080
|
+
}
|
|
3081
|
+
catch (stateError) {
|
|
3082
|
+
// Log but don't fail - state persistence is optional
|
|
3083
|
+
log.warn('Failed to initialize state persistence', {
|
|
3084
|
+
error: stateError instanceof Error ? stateError.message : String(stateError),
|
|
3085
|
+
});
|
|
3086
|
+
}
|
|
3087
|
+
}
|
|
3088
|
+
this.events.onAgentStart?.(agent);
|
|
3089
|
+
// Set up activity streaming
|
|
3090
|
+
let emitter;
|
|
3091
|
+
// Check if we should use API-based activity emitter (for remote workers)
|
|
3092
|
+
if (this.config.apiActivityConfig) {
|
|
3093
|
+
const { baseUrl, apiKey, workerId } = this.config.apiActivityConfig;
|
|
3094
|
+
log.debug('Using API activity emitter', { baseUrl });
|
|
3095
|
+
emitter = createApiActivityEmitter({
|
|
3096
|
+
sessionId,
|
|
3097
|
+
workerId,
|
|
3098
|
+
apiBaseUrl: baseUrl,
|
|
3099
|
+
apiKey,
|
|
3100
|
+
minInterval: this.config.streamConfig.minInterval,
|
|
3101
|
+
maxOutputLength: this.config.streamConfig.maxOutputLength,
|
|
3102
|
+
includeTimestamps: this.config.streamConfig.includeTimestamps,
|
|
3103
|
+
onActivityEmitted: (type, content) => {
|
|
3104
|
+
log.activity(type, content);
|
|
3105
|
+
},
|
|
3106
|
+
onActivityError: (type, error) => {
|
|
3107
|
+
log.error(`Activity error (${type})`, { error: error.message });
|
|
3108
|
+
},
|
|
3109
|
+
});
|
|
3110
|
+
}
|
|
3111
|
+
else {
|
|
3112
|
+
// Direct Linear API
|
|
3113
|
+
const session = createAgentSession({
|
|
3114
|
+
client: this.client.linearClient,
|
|
3115
|
+
issueId,
|
|
3116
|
+
sessionId,
|
|
3117
|
+
autoTransition: false,
|
|
3118
|
+
});
|
|
3119
|
+
this.agentSessions.set(issueId, session);
|
|
3120
|
+
emitter = createActivityEmitter({
|
|
3121
|
+
session,
|
|
3122
|
+
minInterval: this.config.streamConfig.minInterval,
|
|
3123
|
+
maxOutputLength: this.config.streamConfig.maxOutputLength,
|
|
3124
|
+
includeTimestamps: this.config.streamConfig.includeTimestamps,
|
|
3125
|
+
onActivityEmitted: (type, content) => {
|
|
3126
|
+
log.activity(type, content);
|
|
3127
|
+
},
|
|
3128
|
+
});
|
|
3129
|
+
}
|
|
3130
|
+
this.activityEmitters.set(issueId, emitter);
|
|
3131
|
+
// Create AbortController for cancellation
|
|
3132
|
+
const abortController = new AbortController();
|
|
3133
|
+
this.abortControllers.set(issueId, abortController);
|
|
3134
|
+
// Load environment from settings.local.json
|
|
3135
|
+
const envBaseDir = worktreePath ?? process.cwd();
|
|
3136
|
+
const settingsEnv = loadSettingsEnv(envBaseDir, log);
|
|
3137
|
+
// Load app-specific env files based on work type
|
|
3138
|
+
// Development work loads .env.local, QA/acceptance loads .env.test.local
|
|
3139
|
+
const effectiveWorkTypeForEnv = workType ?? 'development';
|
|
3140
|
+
const appEnv = loadAppEnvFiles(envBaseDir, effectiveWorkTypeForEnv, log);
|
|
3141
|
+
// Build environment variables - inherit ALL from process.env (required for node to be found)
|
|
3142
|
+
// Then overlay app env vars, settings.local.json env vars, then our specific vars
|
|
3143
|
+
// Apply the same blocklist as spawnAgent() to prevent API key leakage
|
|
3144
|
+
const processEnvFiltered = {};
|
|
3145
|
+
for (const [key, value] of Object.entries(process.env)) {
|
|
3146
|
+
if (typeof value === 'string' && !AGENT_ENV_BLOCKLIST.includes(key)) {
|
|
3147
|
+
processEnvFiltered[key] = value;
|
|
3148
|
+
}
|
|
3149
|
+
}
|
|
3150
|
+
const filteredAppEnv = Object.fromEntries(Object.entries(appEnv).filter(([key]) => !AGENT_ENV_BLOCKLIST.includes(key)));
|
|
3151
|
+
const filteredSettingsEnv = Object.fromEntries(Object.entries(settingsEnv).filter(([key]) => !AGENT_ENV_BLOCKLIST.includes(key)));
|
|
3152
|
+
const env = {
|
|
3153
|
+
...processEnvFiltered, // Include all parent env vars (PATH, NODE_PATH, etc.)
|
|
3154
|
+
...filteredAppEnv, // Include app env vars (blocklisted keys stripped)
|
|
3155
|
+
...filteredSettingsEnv, // Include settings.local.json env vars (blocklisted keys stripped)
|
|
3156
|
+
LINEAR_ISSUE_ID: issueId,
|
|
3157
|
+
LINEAR_SESSION_ID: sessionId,
|
|
3158
|
+
// Set work type so agent knows if it's doing QA or development work
|
|
3159
|
+
...(workType && { LINEAR_WORK_TYPE: workType }),
|
|
3160
|
+
// Set team name so agents can use `pnpm af-linear create-issue` without --team
|
|
3161
|
+
...(teamName && { LINEAR_TEAM_NAME: teamName }),
|
|
3162
|
+
};
|
|
3163
|
+
log.info('Starting agent via provider', {
|
|
3164
|
+
provider: this.provider.name,
|
|
3165
|
+
cwd: worktreePath ?? 'repo-root',
|
|
3166
|
+
resuming: !!providerSessionId,
|
|
3167
|
+
workType: workType ?? 'development',
|
|
3168
|
+
});
|
|
3169
|
+
// Create in-process tool servers from registered plugins
|
|
3170
|
+
const mcpServers = this.provider.name === 'claude'
|
|
3171
|
+
? this.toolRegistry.createServers({ env, cwd: worktreePath ?? process.cwd() })
|
|
3172
|
+
: undefined;
|
|
3173
|
+
// Coordinators need significantly more turns than standard agents
|
|
3174
|
+
const resolvedWorkType = workType ?? 'development';
|
|
3175
|
+
const needsMoreTurns = resolvedWorkType === 'coordination' || resolvedWorkType === 'qa-coordination' || resolvedWorkType === 'acceptance-coordination' || resolvedWorkType === 'refinement-coordination' || resolvedWorkType === 'inflight';
|
|
3176
|
+
const maxTurns = needsMoreTurns ? 200 : undefined;
|
|
3177
|
+
// Spawn agent via provider interface (with resume if session ID available)
|
|
3178
|
+
const spawnConfig = {
|
|
3179
|
+
prompt,
|
|
3180
|
+
cwd: worktreePath ?? process.cwd(),
|
|
3181
|
+
env,
|
|
3182
|
+
abortController,
|
|
3183
|
+
autonomous: true,
|
|
3184
|
+
sandboxEnabled: this.config.sandboxEnabled,
|
|
3185
|
+
mcpServers,
|
|
3186
|
+
maxTurns,
|
|
3187
|
+
onProcessSpawned: (pid) => {
|
|
3188
|
+
agent.pid = pid;
|
|
3189
|
+
log.info('Agent process spawned', { pid });
|
|
3190
|
+
},
|
|
3191
|
+
};
|
|
3192
|
+
const handle = providerSessionId
|
|
3193
|
+
? this.provider.resume(providerSessionId, spawnConfig)
|
|
3194
|
+
: this.provider.spawn(spawnConfig);
|
|
3195
|
+
this.agentHandles.set(issueId, handle);
|
|
3196
|
+
agent.status = 'running';
|
|
3197
|
+
// Process the event stream in the background
|
|
3198
|
+
this.processEventStream(issueId, identifier, sessionId, handle, emitter, agent);
|
|
3199
|
+
return agent;
|
|
3200
|
+
}
|
|
3201
|
+
/**
|
|
3202
|
+
* Stop all running agents
|
|
3203
|
+
*/
|
|
3204
|
+
stopAll() {
|
|
3205
|
+
for (const [issueId] of this.abortControllers) {
|
|
3206
|
+
try {
|
|
3207
|
+
const agent = this.activeAgents.get(issueId);
|
|
3208
|
+
if (agent) {
|
|
3209
|
+
agent.status = 'stopped';
|
|
3210
|
+
agent.completedAt = new Date();
|
|
3211
|
+
}
|
|
3212
|
+
const abortController = this.abortControllers.get(issueId);
|
|
3213
|
+
abortController?.abort();
|
|
3214
|
+
}
|
|
3215
|
+
catch (error) {
|
|
3216
|
+
console.warn(`Failed to stop agent for ${issueId}:`, error);
|
|
3217
|
+
}
|
|
3218
|
+
}
|
|
3219
|
+
this.abortControllers.clear();
|
|
3220
|
+
this.sessionToIssue.clear();
|
|
3221
|
+
}
|
|
3222
|
+
/**
|
|
3223
|
+
* Wait for all agents to complete with inactivity-based timeout
|
|
3224
|
+
*
|
|
3225
|
+
* Unlike a simple session timeout, this method monitors each agent's activity
|
|
3226
|
+
* and only stops agents that have been inactive for longer than the inactivity
|
|
3227
|
+
* timeout. Active agents are allowed to run indefinitely (unless maxSessionTimeoutMs
|
|
3228
|
+
* is set as a hard cap).
|
|
3229
|
+
*
|
|
3230
|
+
* @param inactivityTimeoutMsOverride - Override inactivity timeout from config (for backwards compatibility)
|
|
3231
|
+
*/
|
|
3232
|
+
async waitForAll(inactivityTimeoutMsOverride) {
|
|
3233
|
+
const activeAgents = this.getActiveAgents();
|
|
3234
|
+
if (activeAgents.length === 0) {
|
|
3235
|
+
return Array.from(this.activeAgents.values());
|
|
3236
|
+
}
|
|
3237
|
+
return new Promise((resolve) => {
|
|
3238
|
+
const checkInterval = setInterval(async () => {
|
|
3239
|
+
const stillActive = this.getActiveAgents();
|
|
3240
|
+
if (stillActive.length === 0) {
|
|
3241
|
+
clearInterval(checkInterval);
|
|
3242
|
+
resolve(Array.from(this.activeAgents.values()));
|
|
3243
|
+
return;
|
|
3244
|
+
}
|
|
3245
|
+
const now = Date.now();
|
|
3246
|
+
// Check each agent for inactivity timeout and max session timeout
|
|
3247
|
+
for (const agent of stillActive) {
|
|
3248
|
+
// Get timeout config for this agent's work type
|
|
3249
|
+
const timeoutConfig = this.getTimeoutConfig(agent.workType);
|
|
3250
|
+
// Use override if provided (for backwards compatibility), otherwise use config
|
|
3251
|
+
const inactivityTimeout = inactivityTimeoutMsOverride ?? timeoutConfig.inactivityTimeoutMs;
|
|
3252
|
+
const maxSessionTimeout = timeoutConfig.maxSessionTimeoutMs;
|
|
3253
|
+
const log = this.agentLoggers.get(agent.issueId);
|
|
3254
|
+
const timeSinceLastActivity = now - agent.lastActivityAt.getTime();
|
|
3255
|
+
const totalRuntime = now - agent.startedAt.getTime();
|
|
3256
|
+
// Check max session timeout (hard cap regardless of activity)
|
|
3257
|
+
if (maxSessionTimeout && totalRuntime > maxSessionTimeout) {
|
|
3258
|
+
log?.warn('Agent reached max session timeout', {
|
|
3259
|
+
totalRuntime: `${Math.floor(totalRuntime / 1000)}s`,
|
|
3260
|
+
maxSessionTimeout: `${Math.floor(maxSessionTimeout / 1000)}s`,
|
|
3261
|
+
});
|
|
3262
|
+
await this.stopAgent(agent.issueId, false, 'timeout');
|
|
3263
|
+
continue;
|
|
3264
|
+
}
|
|
3265
|
+
// Check inactivity timeout (agent is "hung" only if no activity)
|
|
3266
|
+
if (timeSinceLastActivity > inactivityTimeout) {
|
|
3267
|
+
log?.warn('Agent timed out due to inactivity', {
|
|
3268
|
+
timeSinceLastActivity: `${Math.floor(timeSinceLastActivity / 1000)}s`,
|
|
3269
|
+
inactivityTimeout: `${Math.floor(inactivityTimeout / 1000)}s`,
|
|
3270
|
+
lastActivityAt: agent.lastActivityAt.toISOString(),
|
|
3271
|
+
});
|
|
3272
|
+
await this.stopAgent(agent.issueId, false, 'timeout');
|
|
3273
|
+
}
|
|
3274
|
+
}
|
|
3275
|
+
// Check again if all agents are done after potential stops
|
|
3276
|
+
const remaining = this.getActiveAgents();
|
|
3277
|
+
if (remaining.length === 0) {
|
|
3278
|
+
clearInterval(checkInterval);
|
|
3279
|
+
resolve(Array.from(this.activeAgents.values()));
|
|
3280
|
+
}
|
|
3281
|
+
}, 1000);
|
|
3282
|
+
});
|
|
3283
|
+
}
|
|
3284
|
+
}
|
|
3285
|
+
/**
|
|
3286
|
+
* Create an orchestrator instance
|
|
3287
|
+
*/
|
|
3288
|
+
export function createOrchestrator(config, events) {
|
|
3289
|
+
return new AgentOrchestrator(config, events);
|
|
3290
|
+
}
|