oh-my-codex 0.12.2 → 0.12.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/Cargo.lock +5 -5
- package/Cargo.toml +1 -1
- package/README.md +2 -0
- package/dist/cli/__tests__/index.test.js +73 -12
- package/dist/cli/__tests__/index.test.js.map +1 -1
- package/dist/cli/__tests__/launch-fallback.test.js +8 -27
- package/dist/cli/__tests__/launch-fallback.test.js.map +1 -1
- package/dist/cli/__tests__/mcp-parity.test.d.ts +2 -0
- package/dist/cli/__tests__/mcp-parity.test.d.ts.map +1 -0
- package/dist/cli/__tests__/mcp-parity.test.js +111 -0
- package/dist/cli/__tests__/mcp-parity.test.js.map +1 -0
- package/dist/cli/__tests__/nested-help-routing.test.js +13 -0
- package/dist/cli/__tests__/nested-help-routing.test.js.map +1 -1
- package/dist/cli/__tests__/package-bin-contract.test.js +6 -1
- package/dist/cli/__tests__/package-bin-contract.test.js.map +1 -1
- package/dist/cli/__tests__/setup-hooks-shared-ownership.test.d.ts +2 -0
- package/dist/cli/__tests__/setup-hooks-shared-ownership.test.d.ts.map +1 -0
- package/dist/cli/__tests__/setup-hooks-shared-ownership.test.js +189 -0
- package/dist/cli/__tests__/setup-hooks-shared-ownership.test.js.map +1 -0
- package/dist/cli/__tests__/setup-scope.test.js +48 -0
- package/dist/cli/__tests__/setup-scope.test.js.map +1 -1
- package/dist/cli/__tests__/state.test.d.ts +2 -0
- package/dist/cli/__tests__/state.test.d.ts.map +1 -0
- package/dist/cli/__tests__/state.test.js +46 -0
- package/dist/cli/__tests__/state.test.js.map +1 -0
- package/dist/cli/__tests__/team.test.js +238 -2
- package/dist/cli/__tests__/team.test.js.map +1 -1
- package/dist/cli/__tests__/uninstall.test.js +37 -2
- package/dist/cli/__tests__/uninstall.test.js.map +1 -1
- package/dist/cli/index.d.ts +6 -13
- package/dist/cli/index.d.ts.map +1 -1
- package/dist/cli/index.js +47 -60
- package/dist/cli/index.js.map +1 -1
- package/dist/cli/mcp-parity.d.ts +22 -0
- package/dist/cli/mcp-parity.d.ts.map +1 -0
- package/dist/cli/mcp-parity.js +227 -0
- package/dist/cli/mcp-parity.js.map +1 -0
- package/dist/cli/setup.d.ts.map +1 -1
- package/dist/cli/setup.js +5 -2
- package/dist/cli/setup.js.map +1 -1
- package/dist/cli/state.d.ts +8 -0
- package/dist/cli/state.d.ts.map +1 -0
- package/dist/cli/state.js +71 -0
- package/dist/cli/state.js.map +1 -0
- package/dist/cli/team.d.ts.map +1 -1
- package/dist/cli/team.js +6 -5
- package/dist/cli/team.js.map +1 -1
- package/dist/cli/uninstall.d.ts.map +1 -1
- package/dist/cli/uninstall.js +18 -4
- package/dist/cli/uninstall.js.map +1 -1
- package/dist/config/__tests__/codex-hooks.test.d.ts +2 -0
- package/dist/config/__tests__/codex-hooks.test.d.ts.map +1 -0
- package/dist/config/__tests__/codex-hooks.test.js +53 -0
- package/dist/config/__tests__/codex-hooks.test.js.map +1 -0
- package/dist/config/codex-hooks.d.ts +16 -7
- package/dist/config/codex-hooks.d.ts.map +1 -1
- package/dist/config/codex-hooks.js +134 -2
- package/dist/config/codex-hooks.js.map +1 -1
- package/dist/hooks/__tests__/keyword-detector.test.js +62 -0
- package/dist/hooks/__tests__/keyword-detector.test.js.map +1 -1
- package/dist/hooks/keyword-detector.d.ts.map +1 -1
- package/dist/hooks/keyword-detector.js +20 -8
- package/dist/hooks/keyword-detector.js.map +1 -1
- package/dist/hud/__tests__/reconcile.test.d.ts +2 -0
- package/dist/hud/__tests__/reconcile.test.d.ts.map +1 -0
- package/dist/hud/__tests__/reconcile.test.js +83 -0
- package/dist/hud/__tests__/reconcile.test.js.map +1 -0
- package/dist/hud/__tests__/render.test.js +43 -0
- package/dist/hud/__tests__/render.test.js.map +1 -1
- package/dist/hud/constants.d.ts +2 -1
- package/dist/hud/constants.d.ts.map +1 -1
- package/dist/hud/constants.js +2 -1
- package/dist/hud/constants.js.map +1 -1
- package/dist/hud/index.d.ts +4 -1
- package/dist/hud/index.d.ts.map +1 -1
- package/dist/hud/index.js +11 -5
- package/dist/hud/index.js.map +1 -1
- package/dist/hud/reconcile.d.ts +23 -0
- package/dist/hud/reconcile.d.ts.map +1 -0
- package/dist/hud/reconcile.js +71 -0
- package/dist/hud/reconcile.js.map +1 -0
- package/dist/hud/render.d.ts +6 -1
- package/dist/hud/render.d.ts.map +1 -1
- package/dist/hud/render.js +77 -3
- package/dist/hud/render.js.map +1 -1
- package/dist/hud/tmux.d.ts +26 -0
- package/dist/hud/tmux.d.ts.map +1 -0
- package/dist/hud/tmux.js +126 -0
- package/dist/hud/tmux.js.map +1 -0
- package/dist/mcp/bootstrap.d.ts.map +1 -1
- package/dist/mcp/bootstrap.js +16 -6
- package/dist/mcp/bootstrap.js.map +1 -1
- package/dist/mcp/code-intel-server.d.ts +298 -0
- package/dist/mcp/code-intel-server.d.ts.map +1 -1
- package/dist/mcp/code-intel-server.js +9 -5
- package/dist/mcp/code-intel-server.js.map +1 -1
- package/dist/mcp/memory-server.d.ts +195 -1
- package/dist/mcp/memory-server.d.ts.map +1 -1
- package/dist/mcp/memory-server.js +9 -5
- package/dist/mcp/memory-server.js.map +1 -1
- package/dist/mcp/trace-server.d.ts +51 -0
- package/dist/mcp/trace-server.d.ts.map +1 -1
- package/dist/mcp/trace-server.js +9 -5
- package/dist/mcp/trace-server.js.map +1 -1
- package/dist/scripts/__tests__/codex-native-hook.test.js +481 -8
- package/dist/scripts/__tests__/codex-native-hook.test.js.map +1 -1
- package/dist/scripts/codex-native-hook.d.ts.map +1 -1
- package/dist/scripts/codex-native-hook.js +171 -52
- package/dist/scripts/codex-native-hook.js.map +1 -1
- package/dist/scripts/codex-native-pre-post.d.ts +5 -0
- package/dist/scripts/codex-native-pre-post.d.ts.map +1 -1
- package/dist/scripts/codex-native-pre-post.js +86 -0
- package/dist/scripts/codex-native-pre-post.js.map +1 -1
- package/dist/scripts/notify-hook/operational-events.d.ts.map +1 -1
- package/dist/scripts/notify-hook/operational-events.js +7 -2
- package/dist/scripts/notify-hook/operational-events.js.map +1 -1
- package/dist/state/__tests__/operations-ralph-phase.test.d.ts +2 -0
- package/dist/state/__tests__/operations-ralph-phase.test.d.ts.map +1 -0
- package/dist/state/__tests__/operations-ralph-phase.test.js +82 -0
- package/dist/state/__tests__/operations-ralph-phase.test.js.map +1 -0
- package/dist/state/__tests__/operations.test.d.ts +2 -0
- package/dist/state/__tests__/operations.test.d.ts.map +1 -0
- package/dist/state/__tests__/operations.test.js +200 -0
- package/dist/state/__tests__/operations.test.js.map +1 -0
- package/dist/state/__tests__/path-traversal.test.d.ts +2 -0
- package/dist/state/__tests__/path-traversal.test.d.ts.map +1 -0
- package/dist/state/__tests__/path-traversal.test.js +49 -0
- package/dist/state/__tests__/path-traversal.test.js.map +1 -0
- package/dist/state/operations.d.ts +11 -0
- package/dist/state/operations.d.ts.map +1 -0
- package/dist/state/operations.js +233 -0
- package/dist/state/operations.js.map +1 -0
- package/dist/team/__tests__/api-interop.test.js +24 -2
- package/dist/team/__tests__/api-interop.test.js.map +1 -1
- package/dist/team/__tests__/delivery-e2e-smoke.test.js +9 -1
- package/dist/team/__tests__/delivery-e2e-smoke.test.js.map +1 -1
- package/dist/team/__tests__/runtime-cli.test.js +45 -0
- package/dist/team/__tests__/runtime-cli.test.js.map +1 -1
- package/dist/team/__tests__/runtime.test.js +227 -66
- package/dist/team/__tests__/runtime.test.js.map +1 -1
- package/dist/team/__tests__/tmux-session.test.js +33 -0
- package/dist/team/__tests__/tmux-session.test.js.map +1 -1
- package/dist/team/api-interop.d.ts.map +1 -1
- package/dist/team/api-interop.js +2 -1
- package/dist/team/api-interop.js.map +1 -1
- package/dist/team/runtime-cli.d.ts.map +1 -1
- package/dist/team/runtime-cli.js +21 -2
- package/dist/team/runtime-cli.js.map +1 -1
- package/dist/team/runtime.d.ts +8 -0
- package/dist/team/runtime.d.ts.map +1 -1
- package/dist/team/runtime.js +203 -85
- package/dist/team/runtime.js.map +1 -1
- package/dist/team/state/dispatch.d.ts.map +1 -1
- package/dist/team/state/dispatch.js +9 -0
- package/dist/team/state/dispatch.js.map +1 -1
- package/dist/team/tmux-session.js +3 -3
- package/dist/team/tmux-session.js.map +1 -1
- package/dist/team/worktree.d.ts +2 -0
- package/dist/team/worktree.d.ts.map +1 -1
- package/dist/team/worktree.js +7 -1
- package/dist/team/worktree.js.map +1 -1
- package/dist/utils/__tests__/paths.test.js +76 -1
- package/dist/utils/__tests__/paths.test.js.map +1 -1
- package/dist/utils/paths.d.ts +6 -0
- package/dist/utils/paths.d.ts.map +1 -1
- package/dist/utils/paths.js +14 -0
- package/dist/utils/paths.js.map +1 -1
- package/dist/verification/__tests__/ci-rust-gates.test.js +59 -11
- package/dist/verification/__tests__/ci-rust-gates.test.js.map +1 -1
- package/dist/verification/__tests__/ralph-persistence-gate.test.js +1 -4
- package/dist/verification/__tests__/ralph-persistence-gate.test.js.map +1 -1
- package/package.json +6 -1
- package/src/scripts/__tests__/codex-native-hook.test.ts +636 -8
- package/src/scripts/codex-native-hook.ts +249 -60
- package/src/scripts/codex-native-pre-post.ts +104 -0
- package/src/scripts/notify-hook/operational-events.ts +6 -2
package/dist/team/runtime.js
CHANGED
|
@@ -3,7 +3,7 @@ import { existsSync, appendFileSync, mkdirSync } from 'fs';
|
|
|
3
3
|
import { mkdir, readdir, readFile, writeFile } from 'fs/promises';
|
|
4
4
|
import { performance } from 'perf_hooks';
|
|
5
5
|
import { spawn, spawnSync } from 'child_process';
|
|
6
|
-
import { sanitizeTeamName, isTmuxAvailable, hasCurrentTmuxClientContext, createTeamSession, buildWorkerProcessLaunchSpec, resolveTeamWorkerCli, resolveTeamWorkerCliPlan, resolveTeamWorkerLaunchMode, waitForWorkerReady, dismissTrustPromptIfPresent,
|
|
6
|
+
import { sanitizeTeamName, isTmuxAvailable, hasCurrentTmuxClientContext, createTeamSession, buildWorkerProcessLaunchSpec, resolveTeamWorkerCli, resolveTeamWorkerCliPlan, resolveTeamWorkerLaunchMode, waitForWorkerReady, dismissTrustPromptIfPresent, sleepFractionalSeconds, sendToWorker, sendToWorkerStdin, isWorkerAlive, getWorkerPanePid, killWorkerByPaneIdAsync, restoreStandaloneHudPane, teardownWorkerPanes, unregisterResizeHook, destroyTeamSession, listPaneIds, listTeamSessions, } from './tmux-session.js';
|
|
7
7
|
import { teamInit as initTeamState, DEFAULT_MAX_WORKERS, teamReadConfig as readTeamConfig, teamWriteWorkerIdentity as writeWorkerIdentity, teamReadWorkerHeartbeat as readWorkerHeartbeat, teamReadWorkerStatus as readWorkerStatus, teamWriteWorkerInbox as writeWorkerInbox, teamCreateTask as createStateTask, teamReadTask as readTask, teamListTasks as listTasks, teamReadManifest as readTeamManifestV2, teamNormalizeGovernance as normalizeTeamGovernance, teamNormalizePolicy as normalizeTeamPolicy, teamClaimTask as claimTask, teamReleaseTaskClaim as releaseTaskClaim, teamReclaimExpiredTaskClaim as reclaimExpiredTaskClaim, teamAppendEvent as appendTeamEvent, teamReadTaskApproval as readTaskApproval, teamListMailbox as listMailboxMessages, teamMarkMessageDelivered as markMessageDelivered, teamMarkMessageNotified as markMessageNotified, teamEnqueueDispatchRequest as enqueueDispatchRequest, teamMarkDispatchRequestNotified as markDispatchRequestNotified, teamTransitionDispatchRequest as transitionDispatchRequest, teamReadDispatchRequest as readDispatchRequest, teamCleanup as cleanupTeamState, teamSaveConfig as saveTeamConfig, teamWriteShutdownRequest as writeShutdownRequest, teamReadShutdownAck as readShutdownAck, teamReadMonitorSnapshot as readMonitorSnapshot, teamWriteMonitorSnapshot as writeMonitorSnapshot, teamReadPhase as readTeamPhaseState, teamWritePhase as writeTeamPhaseState, } from './team-ops.js';
|
|
8
8
|
import { queueInboxInstruction, queueDirectMailboxMessage, queueBroadcastMailboxMessage, waitForDispatchReceipt, } from './mcp-comm.js';
|
|
9
9
|
import { appendTeamDeliveryLogForCwd } from './delivery-log.js';
|
|
@@ -11,6 +11,7 @@ import { generateWorkerOverlay, writeTeamWorkerInstructionsFile, removeTeamWorke
|
|
|
11
11
|
import { loadRolePrompt } from './role-router.js';
|
|
12
12
|
import { composeRoleInstructionsForRole } from '../agents/native-config.js';
|
|
13
13
|
import { codexPromptsDir } from '../utils/paths.js';
|
|
14
|
+
import { isTerminalPhase } from './orchestrator.js';
|
|
14
15
|
import { resolveTeamWorkerLaunchArgs, TEAM_LOW_COMPLEXITY_DEFAULT_MODEL, parseTeamWorkerLaunchArgs, splitWorkerLaunchArgs, resolveAgentDefaultModel, resolveAgentReasoningEffort, } from './model-contract.js';
|
|
15
16
|
import { resolveCanonicalTeamStateRoot } from './state-root.js';
|
|
16
17
|
import { inferPhaseTargetFromTaskCounts, reconcilePhaseStateForMonitor } from './phase-controller.js';
|
|
@@ -19,7 +20,7 @@ import { hasStructuredVerificationEvidence } from '../verification/verifier.js';
|
|
|
19
20
|
import { buildRebalanceDecisions } from './rebalance-policy.js';
|
|
20
21
|
import { readModeState, updateModeState } from '../modes/base.js';
|
|
21
22
|
import { appendTeamCommitHygieneEntries, buildTeamCommitHygieneContext, writeTeamCommitHygieneContext, } from './commit-hygiene.js';
|
|
22
|
-
import { assertCleanLeaderWorkspaceForWorkerWorktrees, ensureWorktree, isGitRepository, planWorktreeTarget, rollbackProvisionedWorktrees, } from './worktree.js';
|
|
23
|
+
import { assertCleanLeaderWorkspaceForWorkerWorktrees, ensureWorktree, isGitRepository, isWorktreeDirty, planWorktreeTarget, removeWorktreeForce, rollbackProvisionedWorktrees, } from './worktree.js';
|
|
23
24
|
async function syncRootTeamModeStateOnTerminalPhase(teamName, phase, cwd) {
|
|
24
25
|
if (phase !== 'complete' && phase !== 'failed' && phase !== 'cancelled')
|
|
25
26
|
return;
|
|
@@ -50,6 +51,26 @@ async function syncRootTeamModeStateOnTerminalPhase(teamName, phase, cwd) {
|
|
|
50
51
|
// Best-effort compatibility sync only.
|
|
51
52
|
}
|
|
52
53
|
}
|
|
54
|
+
async function assertTeamStartupIsNonDestructive(teamName, cwd, leaderSessionId) {
|
|
55
|
+
const activeTeams = await findActiveTeams(cwd, leaderSessionId);
|
|
56
|
+
if (activeTeams.length > 0) {
|
|
57
|
+
throw new Error(`leader_session_conflict: active team exists (${activeTeams.join(', ')})`);
|
|
58
|
+
}
|
|
59
|
+
const [existingConfig, existingManifest, existingPhase] = await Promise.all([
|
|
60
|
+
readTeamConfig(teamName, cwd),
|
|
61
|
+
readTeamManifestV2(teamName, cwd),
|
|
62
|
+
readTeamPhaseState(teamName, cwd),
|
|
63
|
+
]);
|
|
64
|
+
if (!existingConfig && !existingManifest)
|
|
65
|
+
return;
|
|
66
|
+
const currentPhase = existingPhase?.current_phase;
|
|
67
|
+
if (currentPhase && isTerminalPhase(currentPhase))
|
|
68
|
+
return;
|
|
69
|
+
const tmuxSession = existingConfig?.tmux_session ?? existingManifest?.tmux_session ?? `omx-team-${teamName}`;
|
|
70
|
+
const renderedPhase = currentPhase ?? 'team-exec';
|
|
71
|
+
throw new Error(`team_name_conflict: active team state already exists for "${teamName}" (phase: ${renderedPhase}, tmux: ${tmuxSession}). `
|
|
72
|
+
+ `Use "omx team status ${teamName}", "omx team resume ${teamName}", or "omx team shutdown ${teamName}" instead of launching a duplicate team.`);
|
|
73
|
+
}
|
|
53
74
|
export function applyCreatedInteractiveSessionToConfig(config, createdSession, workerPaneIds) {
|
|
54
75
|
config.tmux_session = createdSession.name;
|
|
55
76
|
config.leader_pane_id = createdSession.leaderPaneId;
|
|
@@ -88,9 +109,15 @@ function collectShutdownPaneIds(params) {
|
|
|
88
109
|
return [...paneIds];
|
|
89
110
|
}
|
|
90
111
|
export function shouldPrekillInteractiveShutdownProcessTrees(sessionName) {
|
|
91
|
-
//
|
|
92
|
-
//
|
|
93
|
-
|
|
112
|
+
// Shared-window tmux sessions can expose overlapping ancestry around the
|
|
113
|
+
// invoking leader client. Rely on pane-targeted teardown there so shutdown
|
|
114
|
+
// does not signal the leader while tearing down worker panes.
|
|
115
|
+
if (sessionName.includes(':'))
|
|
116
|
+
return false;
|
|
117
|
+
// Detached session teardown still benefits from process-tree prekill,
|
|
118
|
+
// including native Windows prompt-worker ancestry where pane-targeted
|
|
119
|
+
// teardown alone is insufficient.
|
|
120
|
+
return true;
|
|
94
121
|
}
|
|
95
122
|
async function logRuntimeDispatchOutcome(params) {
|
|
96
123
|
const { cwd, teamName, workerName, requestId, messageId, intent, outcome, source = 'team.runtime' } = params;
|
|
@@ -768,6 +795,44 @@ async function prepareWorkerWorktreeShutdownReports(config, leaderCwd) {
|
|
|
768
795
|
}
|
|
769
796
|
return reports;
|
|
770
797
|
}
|
|
798
|
+
function listDirtyShutdownWorkers(config) {
|
|
799
|
+
const dirtyWorkers = [];
|
|
800
|
+
for (const worker of config.workers) {
|
|
801
|
+
if (!worker.worktree_repo_root || !worker.worktree_path || !existsSync(worker.worktree_path))
|
|
802
|
+
continue;
|
|
803
|
+
const worktreePath = resolve(worker.worktree_path);
|
|
804
|
+
const repoRoot = resolve(worker.worktree_repo_root);
|
|
805
|
+
const status = runGitCommand(repoRoot, ['status', '--porcelain'], worktreePath);
|
|
806
|
+
if (!status.ok || status.stdout.trim().length > 0) {
|
|
807
|
+
dirtyWorkers.push(worker.name);
|
|
808
|
+
}
|
|
809
|
+
}
|
|
810
|
+
return dirtyWorkers;
|
|
811
|
+
}
|
|
812
|
+
async function classifyShutdown(params) {
|
|
813
|
+
const { teamName, cwd, config, governance, confirmIssues } = params;
|
|
814
|
+
const allTasks = await listTasks(teamName, cwd);
|
|
815
|
+
const gate = {
|
|
816
|
+
total: allTasks.length,
|
|
817
|
+
pending: allTasks.filter((t) => t.status === 'pending').length,
|
|
818
|
+
blocked: allTasks.filter((t) => t.status === 'blocked').length,
|
|
819
|
+
in_progress: allTasks.filter((t) => t.status === 'in_progress').length,
|
|
820
|
+
completed: allTasks.filter((t) => t.status === 'completed').length,
|
|
821
|
+
failed: allTasks.filter((t) => t.status === 'failed').length,
|
|
822
|
+
allowed: false,
|
|
823
|
+
};
|
|
824
|
+
const dirtyWorkers = listDirtyShutdownWorkers(config);
|
|
825
|
+
const hasBlockingBacklog = gate.pending > 0 || gate.blocked > 0 || gate.in_progress > 0;
|
|
826
|
+
const requiresIssueConfirmation = gate.failed > 0 && dirtyWorkers.length === 0 && !confirmIssues;
|
|
827
|
+
gate.allowed = governance.cleanup_requires_all_workers_inactive !== true
|
|
828
|
+
|| (!hasBlockingBacklog && !requiresIssueConfirmation);
|
|
829
|
+
return {
|
|
830
|
+
gate,
|
|
831
|
+
dirtyWorkers,
|
|
832
|
+
requiresIssueConfirmation,
|
|
833
|
+
useCleanFastPath: dirtyWorkers.length === 0 && !hasBlockingBacklog && (gate.failed === 0 || confirmIssues),
|
|
834
|
+
};
|
|
835
|
+
}
|
|
771
836
|
function resolveEffectiveTeamWorktreeMode(leaderCwd, requestedMode) {
|
|
772
837
|
if (!isGitRepository(leaderCwd)) {
|
|
773
838
|
return { enabled: false };
|
|
@@ -1275,6 +1340,9 @@ export async function startTeam(teamName, task, agentType, workerCount, tasks, c
|
|
|
1275
1340
|
const leaderCwd = resolve(cwd);
|
|
1276
1341
|
await assertNestedTeamAllowed(leaderCwd);
|
|
1277
1342
|
const effectiveWorktreeMode = resolveEffectiveTeamWorktreeMode(leaderCwd, options.worktreeMode);
|
|
1343
|
+
const sanitized = sanitizeTeamName(teamName);
|
|
1344
|
+
const leaderSessionId = await resolveLeaderSessionId(leaderCwd);
|
|
1345
|
+
await assertTeamStartupIsNonDestructive(sanitized, leaderCwd, leaderSessionId);
|
|
1278
1346
|
const workerLaunchMode = resolveTeamWorkerLaunchMode(process.env);
|
|
1279
1347
|
const displayMode = workerLaunchMode === 'interactive' ? 'split_pane' : 'auto';
|
|
1280
1348
|
if (workerLaunchMode === 'interactive') {
|
|
@@ -1285,7 +1353,6 @@ export async function startTeam(teamName, task, agentType, workerCount, tasks, c
|
|
|
1285
1353
|
throw new Error('Team mode requires running inside tmux current leader pane');
|
|
1286
1354
|
}
|
|
1287
1355
|
}
|
|
1288
|
-
const sanitized = sanitizeTeamName(teamName);
|
|
1289
1356
|
const teamStateRoot = resolveCanonicalTeamStateRoot(leaderCwd);
|
|
1290
1357
|
const activeWorktreeMode = effectiveWorktreeMode.enabled
|
|
1291
1358
|
? (effectiveWorktreeMode.detached ? 'detached' : 'named')
|
|
@@ -1296,6 +1363,7 @@ export async function startTeam(teamName, task, agentType, workerCount, tasks, c
|
|
|
1296
1363
|
for (let i = 1; i <= workerCount; i++) {
|
|
1297
1364
|
workerWorkspaceByName.set(`worker-${i}`, { cwd: leaderCwd });
|
|
1298
1365
|
}
|
|
1366
|
+
await detectAndCleanStaleTeam(sanitized, leaderCwd, workerCount, options.confirmStaleCleanup);
|
|
1299
1367
|
if (activeWorktreeMode) {
|
|
1300
1368
|
assertCleanLeaderWorkspaceForWorkerWorktrees(leaderCwd);
|
|
1301
1369
|
for (let i = 1; i <= workerCount; i++) {
|
|
@@ -1321,12 +1389,6 @@ export async function startTeam(teamName, task, agentType, workerCount, tasks, c
|
|
|
1321
1389
|
}
|
|
1322
1390
|
}
|
|
1323
1391
|
}
|
|
1324
|
-
const leaderSessionId = await resolveLeaderSessionId(leaderCwd);
|
|
1325
|
-
// Topology guard: one active team per leader session/process context.
|
|
1326
|
-
const activeTeams = await findActiveTeams(leaderCwd, leaderSessionId);
|
|
1327
|
-
if (activeTeams.length > 0) {
|
|
1328
|
-
throw new Error(`leader_session_conflict: active team exists (${activeTeams.join(', ')})`);
|
|
1329
|
-
}
|
|
1330
1392
|
// 2. Team name is already sanitized above.
|
|
1331
1393
|
let sessionName = `omx-team-${sanitized}`;
|
|
1332
1394
|
const overlay = generateWorkerOverlay(sanitized);
|
|
@@ -2043,6 +2105,8 @@ export async function reassignTask(teamName, taskId, _fromWorker, toWorker, cwd)
|
|
|
2043
2105
|
*/
|
|
2044
2106
|
export async function shutdownTeam(teamName, cwd, options = {}) {
|
|
2045
2107
|
const force = options.force === true;
|
|
2108
|
+
const confirmIssues = options.confirmIssues === true;
|
|
2109
|
+
let skipWorkerAcks = false;
|
|
2046
2110
|
const sanitized = sanitizeTeamName(teamName);
|
|
2047
2111
|
const config = await readTeamConfig(sanitized, cwd);
|
|
2048
2112
|
if (!config) {
|
|
@@ -2060,26 +2124,26 @@ export async function shutdownTeam(teamName, cwd, options = {}) {
|
|
|
2060
2124
|
const manifest = await readTeamManifestV2(sanitized, cwd);
|
|
2061
2125
|
const governance = resolveGovernancePolicy(manifest?.governance, manifest?.policy);
|
|
2062
2126
|
if (!force) {
|
|
2063
|
-
const
|
|
2064
|
-
|
|
2065
|
-
|
|
2066
|
-
|
|
2067
|
-
|
|
2068
|
-
|
|
2069
|
-
|
|
2070
|
-
|
|
2071
|
-
allowed: false,
|
|
2072
|
-
};
|
|
2073
|
-
gate.allowed = governance.cleanup_requires_all_workers_inactive !== true
|
|
2074
|
-
|| (gate.pending === 0 && gate.blocked === 0 && gate.in_progress === 0 && gate.failed === 0);
|
|
2127
|
+
const classification = await classifyShutdown({
|
|
2128
|
+
teamName: sanitized,
|
|
2129
|
+
cwd,
|
|
2130
|
+
config,
|
|
2131
|
+
governance,
|
|
2132
|
+
confirmIssues,
|
|
2133
|
+
});
|
|
2134
|
+
const { gate, dirtyWorkers, requiresIssueConfirmation, useCleanFastPath } = classification;
|
|
2075
2135
|
await appendTeamEvent(sanitized, {
|
|
2076
2136
|
type: 'shutdown_gate',
|
|
2077
2137
|
worker: 'leader-fixed',
|
|
2078
|
-
reason: `allowed=${gate.allowed} total=${gate.total} pending=${gate.pending} blocked=${gate.blocked} in_progress=${gate.in_progress} completed=${gate.completed} failed=${gate.failed} cleanup_requires_all_workers_inactive=${governance.cleanup_requires_all_workers_inactive}`,
|
|
2138
|
+
reason: `allowed=${gate.allowed} total=${gate.total} pending=${gate.pending} blocked=${gate.blocked} in_progress=${gate.in_progress} completed=${gate.completed} failed=${gate.failed} cleanup_requires_all_workers_inactive=${governance.cleanup_requires_all_workers_inactive} dirty_workers=${dirtyWorkers.join('|') || 'none'} confirm_issues=${confirmIssues} clean_fast_path=${useCleanFastPath}`,
|
|
2079
2139
|
}, cwd).catch(() => { });
|
|
2080
2140
|
if (!gate.allowed) {
|
|
2141
|
+
if (requiresIssueConfirmation) {
|
|
2142
|
+
throw new Error(`shutdown_confirm_issues_required:failed=${gate.failed}:rerun=omx team shutdown ${sanitized} --confirm-issues`);
|
|
2143
|
+
}
|
|
2081
2144
|
throw new Error(`shutdown_gate_blocked:pending=${gate.pending},blocked=${gate.blocked},in_progress=${gate.in_progress},failed=${gate.failed}`);
|
|
2082
2145
|
}
|
|
2146
|
+
skipWorkerAcks = useCleanFastPath;
|
|
2083
2147
|
}
|
|
2084
2148
|
if (force) {
|
|
2085
2149
|
await appendTeamEvent(sanitized, {
|
|
@@ -2088,73 +2152,81 @@ export async function shutdownTeam(teamName, cwd, options = {}) {
|
|
|
2088
2152
|
reason: 'force_bypass',
|
|
2089
2153
|
}, cwd).catch(() => { });
|
|
2090
2154
|
}
|
|
2155
|
+
if (force && config.worker_launch_mode === 'prompt') {
|
|
2156
|
+
// Prompt-mode workers are raw CLI children, not team-runtime workers that
|
|
2157
|
+
// participate in the shutdown-ack handshake. Waiting the full ack window
|
|
2158
|
+
// before force-killing them only adds deterministic suite slowness.
|
|
2159
|
+
skipWorkerAcks = true;
|
|
2160
|
+
}
|
|
2091
2161
|
const sessionName = config.tmux_session;
|
|
2092
2162
|
const dispatchPolicy = resolveDispatchPolicy(manifest?.policy, config.worker_launch_mode);
|
|
2093
2163
|
const shutdownRequestTimes = new Map();
|
|
2094
|
-
|
|
2095
|
-
|
|
2096
|
-
try {
|
|
2097
|
-
const requestedAt = new Date().toISOString();
|
|
2098
|
-
await writeShutdownRequest(sanitized, w.name, 'leader-fixed', cwd);
|
|
2099
|
-
shutdownRequestTimes.set(w.name, requestedAt);
|
|
2100
|
-
const triggerDirective = buildTriggerDirective(w.name, sanitized, resolveInstructionStateRoot(w.worktree_path));
|
|
2101
|
-
await dispatchCriticalInboxInstruction({
|
|
2102
|
-
teamName: sanitized,
|
|
2103
|
-
config,
|
|
2104
|
-
workerName: w.name,
|
|
2105
|
-
workerIndex: w.index,
|
|
2106
|
-
paneId: w.pane_id,
|
|
2107
|
-
inbox: generateShutdownInbox(sanitized, w.name),
|
|
2108
|
-
triggerMessage: triggerDirective.text,
|
|
2109
|
-
intent: triggerDirective.intent,
|
|
2110
|
-
cwd,
|
|
2111
|
-
dispatchPolicy,
|
|
2112
|
-
inboxCorrelationKey: `shutdown:${w.name}`,
|
|
2113
|
-
});
|
|
2114
|
-
}
|
|
2115
|
-
catch (err) {
|
|
2116
|
-
process.stderr.write(`[team/runtime] operation failed: ${err}\n`);
|
|
2117
|
-
}
|
|
2118
|
-
}
|
|
2119
|
-
// 2. Wait up to 15s for workers to exit and collect acks
|
|
2120
|
-
const deadline = Date.now() + 15_000;
|
|
2121
|
-
const rejected = [];
|
|
2122
|
-
const ackedWorkers = new Set();
|
|
2123
|
-
while (Date.now() < deadline) {
|
|
2164
|
+
if (!skipWorkerAcks) {
|
|
2165
|
+
// 1. Send shutdown inbox to each worker
|
|
2124
2166
|
for (const w of config.workers) {
|
|
2125
|
-
|
|
2126
|
-
|
|
2127
|
-
|
|
2128
|
-
|
|
2129
|
-
|
|
2130
|
-
|
|
2131
|
-
|
|
2132
|
-
|
|
2167
|
+
try {
|
|
2168
|
+
const requestedAt = new Date().toISOString();
|
|
2169
|
+
await writeShutdownRequest(sanitized, w.name, 'leader-fixed', cwd);
|
|
2170
|
+
shutdownRequestTimes.set(w.name, requestedAt);
|
|
2171
|
+
const triggerDirective = buildTriggerDirective(w.name, sanitized, resolveInstructionStateRoot(w.worktree_path));
|
|
2172
|
+
await dispatchCriticalInboxInstruction({
|
|
2173
|
+
teamName: sanitized,
|
|
2174
|
+
config,
|
|
2175
|
+
workerName: w.name,
|
|
2176
|
+
workerIndex: w.index,
|
|
2177
|
+
paneId: w.pane_id,
|
|
2178
|
+
inbox: generateShutdownInbox(sanitized, w.name),
|
|
2179
|
+
triggerMessage: triggerDirective.text,
|
|
2180
|
+
intent: triggerDirective.intent,
|
|
2181
|
+
cwd,
|
|
2182
|
+
dispatchPolicy,
|
|
2183
|
+
inboxCorrelationKey: `shutdown:${w.name}`,
|
|
2184
|
+
});
|
|
2133
2185
|
}
|
|
2134
|
-
|
|
2135
|
-
|
|
2136
|
-
rejected.push({ worker: w.name, reason: ack.reason || 'no_reason' });
|
|
2137
|
-
}
|
|
2186
|
+
catch (err) {
|
|
2187
|
+
process.stderr.write(`[team/runtime] operation failed: ${err}\n`);
|
|
2138
2188
|
}
|
|
2139
2189
|
}
|
|
2140
|
-
|
|
2141
|
-
|
|
2142
|
-
|
|
2190
|
+
// 2. Wait up to 15s for workers to exit and collect acks
|
|
2191
|
+
const deadline = Date.now() + 15_000;
|
|
2192
|
+
const rejected = [];
|
|
2193
|
+
const ackedWorkers = new Set();
|
|
2194
|
+
while (Date.now() < deadline) {
|
|
2195
|
+
for (const w of config.workers) {
|
|
2196
|
+
const ack = await readShutdownAck(sanitized, w.name, cwd, shutdownRequestTimes.get(w.name));
|
|
2197
|
+
if (ack && !ackedWorkers.has(w.name)) {
|
|
2198
|
+
ackedWorkers.add(w.name);
|
|
2199
|
+
await appendTeamEvent(sanitized, {
|
|
2200
|
+
type: 'shutdown_ack',
|
|
2201
|
+
worker: w.name,
|
|
2202
|
+
reason: ack.status === 'reject' ? `reject:${ack.reason || 'no_reason'}` : 'accept',
|
|
2203
|
+
}, cwd);
|
|
2204
|
+
}
|
|
2205
|
+
if (ack?.status === 'reject') {
|
|
2206
|
+
if (!rejected.some((r) => r.worker === w.name)) {
|
|
2207
|
+
rejected.push({ worker: w.name, reason: ack.reason || 'no_reason' });
|
|
2208
|
+
}
|
|
2209
|
+
}
|
|
2210
|
+
}
|
|
2211
|
+
if (rejected.length > 0 && !force) {
|
|
2212
|
+
const detail = rejected.map(r => `${r.worker}:${r.reason}`).join(',');
|
|
2213
|
+
throw new Error(`shutdown_rejected:${detail}`);
|
|
2214
|
+
}
|
|
2215
|
+
const anyAlive = config.workers.some((w) => (config.worker_launch_mode === 'prompt'
|
|
2216
|
+
? isPromptWorkerAlive(config, w)
|
|
2217
|
+
: isWorkerAlive(sessionName, w.index, w.pane_id)));
|
|
2218
|
+
if (!anyAlive)
|
|
2219
|
+
break;
|
|
2220
|
+
// Sleep 2s
|
|
2221
|
+
await new Promise(resolve => setTimeout(resolve, 2000));
|
|
2143
2222
|
}
|
|
2144
|
-
const
|
|
2223
|
+
const anyAliveAfterWait = config.workers.some((w) => (config.worker_launch_mode === 'prompt'
|
|
2145
2224
|
? isPromptWorkerAlive(config, w)
|
|
2146
2225
|
: isWorkerAlive(sessionName, w.index, w.pane_id)));
|
|
2147
|
-
if (!
|
|
2148
|
-
|
|
2149
|
-
|
|
2150
|
-
|
|
2151
|
-
}
|
|
2152
|
-
const anyAliveAfterWait = config.workers.some((w) => (config.worker_launch_mode === 'prompt'
|
|
2153
|
-
? isPromptWorkerAlive(config, w)
|
|
2154
|
-
: isWorkerAlive(sessionName, w.index, w.pane_id)));
|
|
2155
|
-
if (anyAliveAfterWait && !force) {
|
|
2156
|
-
// Workers may have accepted shutdown but not exited (Codex TUI requires explicit exit).
|
|
2157
|
-
// In this case, proceed to force kill panes (next step) rather than failing and leaving state around.
|
|
2226
|
+
if (anyAliveAfterWait && !force) {
|
|
2227
|
+
// Workers may have accepted shutdown but not exited (Codex TUI requires explicit exit).
|
|
2228
|
+
// In this case, proceed to force kill panes (next step) rather than failing and leaving state around.
|
|
2229
|
+
}
|
|
2158
2230
|
}
|
|
2159
2231
|
// 3. Force kill remaining workers
|
|
2160
2232
|
const leaderPaneId = config.leader_pane_id;
|
|
@@ -2396,6 +2468,52 @@ async function findActiveTeams(cwd, leaderSessionId) {
|
|
|
2396
2468
|
}
|
|
2397
2469
|
return active;
|
|
2398
2470
|
}
|
|
2471
|
+
async function detectAndCleanStaleTeam(teamName, leaderCwd, workerCount, confirmFn) {
|
|
2472
|
+
const stateDir = join(leaderCwd, '.omx', 'state', 'team', teamName);
|
|
2473
|
+
if (!existsSync(stateDir))
|
|
2474
|
+
return;
|
|
2475
|
+
const sessions = new Set(listTeamSessions());
|
|
2476
|
+
if (sessions.has(`omx-team-${teamName}`))
|
|
2477
|
+
return;
|
|
2478
|
+
const repoRootResult = spawnSync('git', ['rev-parse', '--show-toplevel'], {
|
|
2479
|
+
cwd: leaderCwd, encoding: 'utf-8', windowsHide: true,
|
|
2480
|
+
});
|
|
2481
|
+
if (repoRootResult.status !== 0)
|
|
2482
|
+
return;
|
|
2483
|
+
const repoRoot = repoRootResult.stdout.trim();
|
|
2484
|
+
const worktreePaths = [];
|
|
2485
|
+
for (let i = 1; i <= workerCount; i++) {
|
|
2486
|
+
const wtPath = join(repoRoot, '.omx', 'team', teamName, 'worktrees', `worker-${i}`);
|
|
2487
|
+
if (existsSync(wtPath))
|
|
2488
|
+
worktreePaths.push(wtPath);
|
|
2489
|
+
}
|
|
2490
|
+
if (worktreePaths.length === 0) {
|
|
2491
|
+
await cleanupTeamState(teamName, leaderCwd);
|
|
2492
|
+
return;
|
|
2493
|
+
}
|
|
2494
|
+
const hasDirtyWorktrees = worktreePaths.some((p) => {
|
|
2495
|
+
try {
|
|
2496
|
+
return isWorktreeDirty(p);
|
|
2497
|
+
}
|
|
2498
|
+
catch {
|
|
2499
|
+
return false;
|
|
2500
|
+
}
|
|
2501
|
+
});
|
|
2502
|
+
const summary = { teamName, worktreePaths, statePath: stateDir, hasDirtyWorktrees };
|
|
2503
|
+
if (!confirmFn) {
|
|
2504
|
+
throw new Error(`stale_team_artifacts:${teamName}:${worktreePaths.length}_worktrees:` +
|
|
2505
|
+
'pass_confirmStaleCleanup_or_manually_remove');
|
|
2506
|
+
}
|
|
2507
|
+
const confirmed = await confirmFn(summary);
|
|
2508
|
+
if (!confirmed) {
|
|
2509
|
+
throw new Error(`stale_team_cleanup_declined:${teamName}:` +
|
|
2510
|
+
'manually_remove_worktrees_and_state_before_retrying');
|
|
2511
|
+
}
|
|
2512
|
+
for (const wtPath of worktreePaths) {
|
|
2513
|
+
await removeWorktreeForce(repoRoot, wtPath);
|
|
2514
|
+
}
|
|
2515
|
+
await cleanupTeamState(teamName, leaderCwd);
|
|
2516
|
+
}
|
|
2399
2517
|
async function resolveLeaderSessionId(cwd) {
|
|
2400
2518
|
const fromEnv = process.env.OMX_SESSION_ID || process.env.CODEX_SESSION_ID || process.env.SESSION_ID;
|
|
2401
2519
|
if (fromEnv && fromEnv.trim() !== '')
|
|
@@ -2615,7 +2733,7 @@ async function dispatchCriticalInboxInstruction(params) {
|
|
|
2615
2733
|
if (receipt?.status === 'failed') {
|
|
2616
2734
|
const fallback = await notifyWorkerOutcome(config, workerIndex, triggerMessage, paneId);
|
|
2617
2735
|
if (fallback.ok) {
|
|
2618
|
-
await transitionDispatchRequest(teamName, queued.request_id, '
|
|
2736
|
+
await transitionDispatchRequest(teamName, queued.request_id, 'pending', 'failed', { last_reason: `fallback_confirmed_after_failed_receipt:${fallback.reason}` }, cwd).catch(() => { });
|
|
2619
2737
|
return {
|
|
2620
2738
|
ok: true,
|
|
2621
2739
|
transport: fallback.transport,
|
|
@@ -2641,7 +2759,7 @@ async function dispatchCriticalInboxInstruction(params) {
|
|
|
2641
2759
|
if (fallback.ok) {
|
|
2642
2760
|
const marked = await markDispatchRequestNotified(teamName, queued.request_id, { last_reason: `fallback_confirmed:${fallback.reason}` }, cwd);
|
|
2643
2761
|
if (!marked) {
|
|
2644
|
-
await transitionDispatchRequest(teamName, queued.request_id, '
|
|
2762
|
+
await transitionDispatchRequest(teamName, queued.request_id, 'pending', 'failed', { last_reason: `fallback_confirmed_after_failed_receipt:${fallback.reason}` }, cwd).catch(() => { });
|
|
2645
2763
|
}
|
|
2646
2764
|
return {
|
|
2647
2765
|
ok: true,
|
|
@@ -2683,7 +2801,7 @@ async function finalizeHookPreferredMailboxDispatch(params) {
|
|
|
2683
2801
|
if (receipt?.status === 'failed') {
|
|
2684
2802
|
if (fallback.ok) {
|
|
2685
2803
|
await markMessageNotified(teamName, workerName, messageId, cwd).catch(() => false);
|
|
2686
|
-
await transitionDispatchRequest(teamName, requestId, 'failed', 'failed', { message_id: messageId, last_reason: `fallback_confirmed_after_failed_receipt:${fallback.reason}` }, cwd).catch(() =>
|
|
2804
|
+
await transitionDispatchRequest(teamName, requestId, 'failed', 'failed', { message_id: messageId, last_reason: `fallback_confirmed_after_failed_receipt:${fallback.reason}` }, cwd).catch(() => null);
|
|
2687
2805
|
const outcome = {
|
|
2688
2806
|
ok: true,
|
|
2689
2807
|
transport: fallback.transport,
|