agentxchain 2.128.0 → 2.130.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. package/README.md +2 -0
  2. package/bin/agentxchain.js +38 -4
  3. package/package.json +1 -1
  4. package/scripts/verify-post-publish.sh +55 -5
  5. package/src/commands/accept-turn.js +14 -0
  6. package/src/commands/checkpoint-turn.js +35 -0
  7. package/src/commands/connector.js +17 -2
  8. package/src/commands/doctor.js +151 -1
  9. package/src/commands/events.js +7 -1
  10. package/src/commands/init.js +42 -11
  11. package/src/commands/inject.js +1 -1
  12. package/src/commands/mission.js +803 -7
  13. package/src/commands/reissue-turn.js +122 -0
  14. package/src/commands/reject-turn.js +60 -6
  15. package/src/commands/restart.js +81 -10
  16. package/src/commands/resume.js +20 -9
  17. package/src/commands/run.js +13 -0
  18. package/src/commands/status.js +58 -4
  19. package/src/commands/step.js +49 -10
  20. package/src/commands/validate.js +78 -20
  21. package/src/lib/cli-version.js +106 -0
  22. package/src/lib/connector-probe.js +146 -5
  23. package/src/lib/continuous-run.js +22 -87
  24. package/src/lib/coordinator-dispatch.js +25 -0
  25. package/src/lib/dispatch-bundle.js +39 -0
  26. package/src/lib/governed-state.js +624 -11
  27. package/src/lib/governed-templates.js +1 -0
  28. package/src/lib/intake.js +233 -77
  29. package/src/lib/mission-plans.js +510 -6
  30. package/src/lib/missions.js +65 -6
  31. package/src/lib/normalized-config.js +50 -15
  32. package/src/lib/repo-observer.js +8 -2
  33. package/src/lib/run-events.js +5 -0
  34. package/src/lib/run-loop.js +25 -0
  35. package/src/lib/runner-interface.js +2 -0
  36. package/src/lib/session-checkpoint.js +18 -2
  37. package/src/lib/turn-checkpoint.js +221 -0
  38. package/src/templates/governed/full-local-cli.json +71 -0
@@ -1,8 +1,9 @@
1
1
  import { existsSync, mkdirSync, readdirSync, readFileSync, writeFileSync } from 'fs';
2
2
  import { join } from 'path';
3
3
  import { loadAllChainReports, loadChainReport, loadLatestChainReport } from './chain-reports.js';
4
- import { buildPlanProgressSummary, loadLatestPlan } from './mission-plans.js';
4
+ import { buildPlanProgressSummary, loadLatestPlan, synchronizeCoordinatorPlanState } from './mission-plans.js';
5
5
  import { getActiveRepoDecisions } from './repo-decisions.js';
6
+ import { getCoordinatorStatus } from './coordinator-state.js';
6
7
 
7
8
  const MISSION_ATTENTION_TERMINALS = new Set(['operator_abort', 'parent_validation_failed']);
8
9
  const MISSION_ATTENTION_RUN_STATUSES = new Set(['blocked', 'failed']);
@@ -140,12 +141,31 @@ export function buildMissionSnapshot(root, missionArtifact) {
140
141
  const totalRuns = chains.reduce((sum, chain) => sum + (chain.runs?.length || 0), 0);
141
142
  const totalTurns = chains.reduce((sum, chain) => sum + (chain.total_turns || 0), 0);
142
143
  const latestChain = chains[0] || null;
143
- const latestPlan = loadLatestPlan(root, missionArtifact.mission_id);
144
+ let latestPlan = loadLatestPlan(root, missionArtifact.mission_id);
144
145
  const activeRepoDecisions = getActiveRepoDecisions(root);
145
146
 
147
+ // Load coordinator status if mission is bound to a multi-repo coordinator
148
+ let coordinatorStatus = null;
149
+ if (missionArtifact.coordinator && missionArtifact.coordinator.super_run_id) {
150
+ const workspacePath = missionArtifact.coordinator.workspace_path || root;
151
+ try {
152
+ const cs = getCoordinatorStatus(workspacePath);
153
+ coordinatorStatus = cs || { unreachable: true, super_run_id: missionArtifact.coordinator.super_run_id };
154
+ } catch {
155
+ coordinatorStatus = { unreachable: true, super_run_id: missionArtifact.coordinator.super_run_id };
156
+ }
157
+ }
158
+
159
+ if (latestPlan && missionArtifact.coordinator && latestPlan.coordinator_scope) {
160
+ const syncedPlan = synchronizeCoordinatorPlanState(root, missionArtifact, latestPlan);
161
+ if (syncedPlan.ok) {
162
+ latestPlan = syncedPlan.plan;
163
+ }
164
+ }
165
+
146
166
  return {
147
167
  ...missionArtifact,
148
- derived_status: deriveMissionStatus(missionArtifact, chains, missingChainIds),
168
+ derived_status: deriveMissionStatus(missionArtifact, chains, missingChainIds, coordinatorStatus),
149
169
  chain_count: chainIds.length,
150
170
  attached_chain_count: chains.length,
151
171
  missing_chain_ids: missingChainIds,
@@ -155,6 +175,7 @@ export function buildMissionSnapshot(root, missionArtifact) {
155
175
  latest_terminal_reason: latestChain?.terminal_reason || null,
156
176
  latest_plan: buildPlanProgressSummary(latestPlan),
157
177
  active_repo_decisions_count: activeRepoDecisions.length,
178
+ coordinator_status: coordinatorStatus,
158
179
  chains,
159
180
  };
160
181
  }
@@ -163,19 +184,24 @@ export function loadAllMissionSnapshots(root) {
163
184
  return loadAllMissionArtifacts(root).map((mission) => buildMissionSnapshot(root, mission));
164
185
  }
165
186
 
166
- function deriveMissionStatus(missionArtifact, chains, missingChainIds) {
187
+ function deriveMissionStatus(missionArtifact, chains, missingChainIds, coordinatorStatus) {
167
188
  if (missionArtifact.status && missionArtifact.status !== 'active') {
168
189
  return missionArtifact.status;
169
190
  }
191
+ // Coordinator-bound missions: check coordinator health
192
+ if (coordinatorStatus && !coordinatorStatus.unreachable && coordinatorStatus.status === 'blocked') {
193
+ return 'needs_attention';
194
+ }
170
195
  if (missingChainIds.length > 0) return 'degraded';
171
- if (chains.length === 0) return 'planned';
196
+ if (chains.length === 0 && !coordinatorStatus) return 'planned';
172
197
  if (chains.some((chain) => (
173
198
  MISSION_ATTENTION_TERMINALS.has(chain.terminal_reason)
174
199
  || (chain.runs || []).some((run) => MISSION_ATTENTION_RUN_STATUSES.has(run.status))
175
200
  ))) {
176
201
  return 'needs_attention';
177
202
  }
178
- return 'progressing';
203
+ if (coordinatorStatus && !coordinatorStatus.unreachable) return 'progressing';
204
+ return chains.length > 0 ? 'progressing' : 'planned';
179
205
  }
180
206
 
181
207
  export function loadLatestMissionSnapshot(root) {
@@ -196,3 +222,36 @@ export function loadMissionAttachmentTarget(root, missionId) {
196
222
  if (missionId) return loadMissionArtifact(root, missionId);
197
223
  return loadLatestMissionArtifact(root);
198
224
  }
225
+
226
+ /**
227
+ * Bind a coordinator super_run_id to a mission artifact.
228
+ *
229
+ * @param {string} root - project root
230
+ * @param {string} missionId - mission to bind
231
+ * @param {{ super_run_id: string, config_path: string, workspace_path?: string }} coordinatorRef
232
+ * @returns {{ ok: boolean, mission?: object, error?: string }}
233
+ */
234
+ export function bindCoordinatorToMission(root, missionId, coordinatorRef) {
235
+ const mission = loadMissionArtifact(root, missionId);
236
+ if (!mission) {
237
+ return { ok: false, error: `Mission not found: ${missionId}` };
238
+ }
239
+
240
+ if (!coordinatorRef || typeof coordinatorRef.super_run_id !== 'string') {
241
+ return { ok: false, error: 'coordinator super_run_id is required' };
242
+ }
243
+
244
+ const updated = {
245
+ ...mission,
246
+ coordinator: {
247
+ super_run_id: coordinatorRef.super_run_id,
248
+ config_path: coordinatorRef.config_path || null,
249
+ workspace_path: coordinatorRef.workspace_path || '.',
250
+ },
251
+ updated_at: new Date().toISOString(),
252
+ };
253
+
254
+ mkdirSync(getMissionsDir(root), { recursive: true });
255
+ writeFileSync(join(getMissionsDir(root), `${updated.mission_id}.json`), JSON.stringify(updated, null, 2));
256
+ return { ok: true, mission: updated };
257
+ }
@@ -330,6 +330,53 @@ export function detectConfigVersion(raw) {
330
330
  return null;
331
331
  }
332
332
 
333
+ function formatInvalidReviewOnlyLocalCliBindingError(roleId, runtimeId) {
334
+ return `Role "${roleId}" uses invalid review_only + local_cli binding on runtime "${runtimeId}" — change write_authority to "authoritative" for local CLI automation, or move the role to "manual", "api_proxy", "mcp", or "remote_agent"`;
335
+ }
336
+
337
+ function formatInvalidAuthoritativeBindingError(roleId, runtimeId, runtimeType, writeAuthority) {
338
+ return `Role "${roleId}" has write_authority "${writeAuthority}" but uses ${runtimeType} runtime "${runtimeId}" — ${runtimeType} only supports review_only and proposed roles`;
339
+ }
340
+
341
+ export function findAuthorityRuntimeBindingIssues(data) {
342
+ const issues = [];
343
+
344
+ if (!data?.roles || !data?.runtimes) {
345
+ return issues;
346
+ }
347
+
348
+ for (const [roleId, role] of Object.entries(data.roles)) {
349
+ if (!role?.runtime || !data.runtimes[role.runtime]) {
350
+ continue;
351
+ }
352
+
353
+ const runtime = data.runtimes[role.runtime];
354
+ const contract = getRoleRuntimeCapabilityContract(roleId, role, runtime);
355
+
356
+ if (contract.effective_write_path === 'invalid_review_only_binding') {
357
+ issues.push({
358
+ role_id: roleId,
359
+ runtime_id: role.runtime,
360
+ runtime_type: runtime.type,
361
+ write_authority: role.write_authority,
362
+ effective_write_path: contract.effective_write_path,
363
+ message: formatInvalidReviewOnlyLocalCliBindingError(roleId, role.runtime),
364
+ });
365
+ } else if (contract.effective_write_path === 'invalid_authoritative_binding') {
366
+ issues.push({
367
+ role_id: roleId,
368
+ runtime_id: role.runtime,
369
+ runtime_type: runtime.type,
370
+ write_authority: role.write_authority,
371
+ effective_write_path: contract.effective_write_path,
372
+ message: formatInvalidAuthoritativeBindingError(roleId, role.runtime, runtime.type, role.write_authority),
373
+ });
374
+ }
375
+ }
376
+
377
+ return issues;
378
+ }
379
+
333
380
  /**
334
381
  * Validate a governed config.
335
382
  * Returns { ok, errors }.
@@ -458,21 +505,9 @@ export function validateV4Config(data, projectRoot) {
458
505
  }
459
506
  }
460
507
 
461
- // Cross-reference: review_only roles should not use authoritative runtimes
462
- if (data.roles && data.runtimes) {
463
- for (const [id, role] of Object.entries(data.roles)) {
464
- if (role.runtime && data.runtimes[role.runtime]) {
465
- const rt = data.runtimes[role.runtime];
466
- const contract = getRoleRuntimeCapabilityContract(id, role, rt);
467
- if (contract.effective_write_path === 'invalid_review_only_binding') {
468
- errors.push(`Role "${id}" is review_only but uses local_cli runtime "${role.runtime}" — review_only roles should not have authoritative write access`);
469
- } else if (contract.effective_write_path === 'invalid_authoritative_binding') {
470
- errors.push(
471
- `Role "${id}" has write_authority "${role.write_authority}" but uses ${rt.type} runtime "${role.runtime}" — ${rt.type} only supports review_only and proposed roles`
472
- );
473
- }
474
- }
475
- }
508
+ // Cross-reference: role authority must match the runtime's write contract
509
+ for (const issue of findAuthorityRuntimeBindingIssues(data)) {
510
+ errors.push(issue.message);
476
511
  }
477
512
 
478
513
  // Routing (optional but validated if present)
@@ -540,7 +540,7 @@ export function compareDeclaredVsObserved(declared, observed, writeAuthority, op
540
540
  // The attribution system will handle later-accepted siblings correctly.
541
541
  warnings.push(`Undeclared file changes detected (likely from concurrent sibling turns): ${undeclared.join(', ')}`);
542
542
  } else {
543
- errors.push(`Undeclared file changes detected (observed but not in files_changed): ${undeclared.join(', ')}`);
543
+ errors.push(`Undeclared file changes detected (observed but not in files_changed): ${undeclared.join(', ')}. If these files were changed by the operator (not the turn), add them to the dispatch baseline by committing or stashing them before dispatch.`);
544
544
  }
545
545
  }
546
546
  if (phantom.length > 0) {
@@ -622,6 +622,7 @@ export function checkCleanBaseline(root, writeAuthority) {
622
622
 
623
623
  return {
624
624
  clean: false,
625
+ dirty_files: actorDirtyFiles,
625
626
  reason: `Working tree has uncommitted changes in actor-owned files: ${actorDirtyFiles.slice(0, 5).join(', ')}${actorDirtyFiles.length > 5 ? '...' : ''}. Authoritative/proposed turns require a clean baseline in v1. Commit or stash those changes before assigning the next code-writing turn.`,
626
627
  };
627
628
  }
@@ -701,7 +702,12 @@ function getWorkingTreeChanges(root) {
701
702
  }
702
703
  }
703
704
 
704
- function captureDirtyWorkspaceSnapshot(root) {
705
+ /**
706
+ * Capture hashes of all non-operational dirty files in the workspace.
707
+ * Used at baseline time AND at dispatch time to snapshot pre-existing dirt
708
+ * so acceptance can filter unchanged files (BUG-1 fix).
709
+ */
710
+ export function captureDirtyWorkspaceSnapshot(root) {
705
711
  const snapshot = {};
706
712
  for (const filePath of getWorkingTreeChanges(root).filter((filePath) => !isOperationalPath(filePath))) {
707
713
  snapshot[filePath] = getWorkspaceFileMarker(root, filePath);
@@ -18,6 +18,9 @@ export const VALID_RUN_EVENTS = [
18
18
  'turn_accepted',
19
19
  'turn_rejected',
20
20
  'turn_conflicted',
21
+ 'acceptance_failed',
22
+ 'turn_reissued',
23
+ 'turn_checkpointed',
21
24
  'run_blocked',
22
25
  'run_completed',
23
26
  'escalation_raised',
@@ -41,6 +44,7 @@ export const VALID_RUN_EVENTS = [
41
44
  * @param {string} [details.phase] - Current phase
42
45
  * @param {string} [details.status] - Current run status
43
46
  * @param {object} [details.turn] - Turn context (turn_id, role_id, etc.)
47
+ * @param {string} [details.intent_id] - Intake intent id when the event services queued intake work
44
48
  * @param {object} [details.payload] - Additional event-specific data
45
49
  * @returns {{ ok: boolean, event_id: string }}
46
50
  */
@@ -54,6 +58,7 @@ export function emitRunEvent(root, eventType, details = {}) {
54
58
  phase: details.phase || null,
55
59
  status: details.status || null,
56
60
  turn: details.turn || null,
61
+ intent_id: details.intent_id || null,
57
62
  payload: details.payload || {},
58
63
  };
59
64
 
@@ -23,6 +23,7 @@ import {
23
23
  rejectTurn,
24
24
  markRunBlocked,
25
25
  writeDispatchBundle,
26
+ refreshTurnBaselineSnapshot,
26
27
  getTurnStagingResultPath,
27
28
  approvePhaseGate,
28
29
  approveCompletionGate,
@@ -308,6 +309,8 @@ async function executeParallelTurns(root, config, state, maxConcurrent, callback
308
309
  // ── Build dispatch contexts ──────────────────────────────────────────
309
310
  const contexts = [];
310
311
  for (const { turn, state: turnState } of turnsToDispatch) {
312
+ // BUG-1 fix: refresh baseline to capture files dirtied between assignment and dispatch
313
+ refreshTurnBaselineSnapshot(root, turn.turn_id);
311
314
  const bundleResult = writeDispatchBundle(root, turnState, config, { turnId: turn.turn_id });
312
315
  if (!bundleResult.ok) {
313
316
  errors.push(`writeDispatchBundle(${turn.assigned_role}): ${bundleResult.error}`);
@@ -393,6 +396,16 @@ async function executeParallelTurns(root, config, state, maxConcurrent, callback
393
396
 
394
397
  acceptedCount++;
395
398
  history.push({ role: roleId, turn_id: turn.turn_id, accepted: true });
399
+ if (callbacks.afterAccept) {
400
+ const afterAcceptResult = await callbacks.afterAccept({ turn, acceptResult });
401
+ if (afterAcceptResult?.ok === false) {
402
+ errors.push(`afterAccept(${roleId}): ${afterAcceptResult.error}`);
403
+ if (afterAcceptResult.state) {
404
+ emit({ type: 'blocked', state: afterAcceptResult.state, reason: 'after_accept_failed' });
405
+ }
406
+ return { terminal: true, ok: false, stop_reason: 'blocked', history, acceptedCount };
407
+ }
408
+ }
396
409
  emit({ type: 'turn_accepted', turn, role: roleId, state: acceptResult.state });
397
410
  } else {
398
411
  const validationResult = {
@@ -442,6 +455,8 @@ async function dispatchAndProcess(root, config, turn, assignState, callbacks, em
442
455
  const roleId = turn.assigned_role;
443
456
  const history = [];
444
457
 
458
+ // BUG-1 fix: refresh baseline to capture files dirtied between assignment and dispatch
459
+ refreshTurnBaselineSnapshot(root, turn.turn_id);
445
460
  const bundleResult = writeDispatchBundle(root, assignState, config);
446
461
  if (!bundleResult.ok) {
447
462
  errors.push(`writeDispatchBundle(${roleId}): ${bundleResult.error}`);
@@ -507,6 +522,16 @@ async function dispatchAndProcess(root, config, turn, assignState, callbacks, em
507
522
  }
508
523
 
509
524
  history.push({ role: roleId, turn_id: turn.turn_id, accepted: true });
525
+ if (callbacks.afterAccept) {
526
+ const afterAcceptResult = await callbacks.afterAccept({ turn, acceptResult });
527
+ if (afterAcceptResult?.ok === false) {
528
+ errors.push(`afterAccept(${roleId}): ${afterAcceptResult.error}`);
529
+ if (afterAcceptResult.state) {
530
+ emit({ type: 'blocked', state: afterAcceptResult.state, reason: 'after_accept_failed' });
531
+ }
532
+ return { terminal: true, ok: false, stop_reason: 'blocked', history };
533
+ }
534
+ }
510
535
  emit({ type: 'turn_accepted', turn, role: roleId, state: acceptResult.state });
511
536
  return { terminal: false, accepted: true, history };
512
537
  }
@@ -39,6 +39,8 @@ export {
39
39
  getActiveTurn,
40
40
  acquireAcceptanceLock as acquireLock,
41
41
  releaseAcceptanceLock as releaseLock,
42
+ refreshTurnBaselineSnapshot,
43
+ reissueTurn,
42
44
  } from './governed-state.js';
43
45
 
44
46
  // ── Dispatch ────────────────────────────────────────────────────────────────
@@ -103,8 +103,24 @@ export function writeSessionCheckpoint(root, state, reason, extra = {}) {
103
103
  const pendingGate = state.pending_phase_transition?.gate || state.pending_transition?.gate || null;
104
104
  const pendingRunCompletion = state.pending_run_completion?.gate || null;
105
105
 
106
- // Capture git baseline for repo-drift detection
107
- const baselineRef = extra.baseline_ref || captureBaselineRef(root);
106
+ // Capture git baseline for repo-drift detection.
107
+ // When a turn_baseline from captureBaseline() is provided, derive
108
+ // baseline_ref from it so session.json and state.json always agree
109
+ // on workspace-dirty status (BUG-2 fix).
110
+ let baselineRef;
111
+ if (extra.turn_baseline) {
112
+ baselineRef = {
113
+ git_head: extra.turn_baseline.head_ref || null,
114
+ git_branch: null,
115
+ workspace_dirty: !extra.turn_baseline.clean,
116
+ };
117
+ // Fill in git_branch if available
118
+ try {
119
+ baselineRef.git_branch = shellExec('git rev-parse --abbrev-ref HEAD', { cwd: root, encoding: 'utf8', stdio: ['pipe', 'pipe', 'pipe'] }).trim();
120
+ } catch { /* non-fatal */ }
121
+ } else {
122
+ baselineRef = extra.baseline_ref || captureBaselineRef(root);
123
+ }
108
124
 
109
125
  const checkpoint = {
110
126
  session_id: sessionId,
@@ -0,0 +1,221 @@
1
+ import { execFileSync } from 'node:child_process';
2
+ import { existsSync, readFileSync, writeFileSync } from 'node:fs';
3
+ import { join } from 'node:path';
4
+ import { resolveAcceptedTurnHistoryReference } from './accepted-turn-history.js';
5
+ import { emitRunEvent } from './run-events.js';
6
+ import { safeWriteJson } from './safe-write.js';
7
+
8
+ const STATE_PATH = '.agentxchain/state.json';
9
+ const HISTORY_PATH = '.agentxchain/history.jsonl';
10
+
11
+ function readState(root) {
12
+ const filePath = join(root, STATE_PATH);
13
+ if (!existsSync(filePath)) return null;
14
+ try {
15
+ return JSON.parse(readFileSync(filePath, 'utf8'));
16
+ } catch {
17
+ return null;
18
+ }
19
+ }
20
+
21
+ function writeState(root, state) {
22
+ safeWriteJson(join(root, STATE_PATH), state);
23
+ }
24
+
25
+ function readHistoryEntries(root) {
26
+ const filePath = join(root, HISTORY_PATH);
27
+ if (!existsSync(filePath)) return [];
28
+ const raw = readFileSync(filePath, 'utf8').trim();
29
+ if (!raw) return [];
30
+ return raw.split('\n').filter(Boolean).map((line) => JSON.parse(line));
31
+ }
32
+
33
+ function writeHistoryEntries(root, entries) {
34
+ const filePath = join(root, HISTORY_PATH);
35
+ const content = entries.map((entry) => JSON.stringify(entry)).join('\n');
36
+ writeFileSync(filePath, content ? `${content}\n` : '');
37
+ }
38
+
39
+ function git(root, args) {
40
+ return execFileSync('git', args, {
41
+ cwd: root,
42
+ encoding: 'utf8',
43
+ stdio: ['ignore', 'pipe', 'pipe'],
44
+ }).trim();
45
+ }
46
+
47
+ function isGitRepo(root) {
48
+ try {
49
+ return git(root, ['rev-parse', '--is-inside-work-tree']) === 'true';
50
+ } catch {
51
+ return false;
52
+ }
53
+ }
54
+
55
+ function normalizeFilesChanged(filesChanged) {
56
+ return [...new Set(
57
+ (Array.isArray(filesChanged) ? filesChanged : [])
58
+ .filter((value) => typeof value === 'string')
59
+ .map((value) => value.trim())
60
+ .filter(Boolean),
61
+ )];
62
+ }
63
+
64
+ function extractGitError(err) {
65
+ const stderr = typeof err?.stderr === 'string' ? err.stderr.trim() : '';
66
+ const stdout = typeof err?.stdout === 'string' ? err.stdout.trim() : '';
67
+ return stderr || stdout || err?.message || 'git command failed';
68
+ }
69
+
70
+ function buildCheckpointCommit(entry) {
71
+ const subject = `checkpoint: ${entry.turn_id} (role=${entry.role}, phase=${entry.phase})`;
72
+ const bodyLines = [
73
+ `Summary: ${entry.summary || '(none)'}`,
74
+ `Turn-ID: ${entry.turn_id}`,
75
+ `Role: ${entry.role || '(unknown)'}`,
76
+ `Phase: ${entry.phase || '(unknown)'}`,
77
+ `Runtime: ${entry.runtime_id || '(unknown)'}`,
78
+ ];
79
+ if (entry.intent_id) bodyLines.push(`Intent-ID: ${entry.intent_id}`);
80
+ if (entry.accepted_at) bodyLines.push(`Accepted-At: ${entry.accepted_at}`);
81
+ return { subject, body: bodyLines.join('\n') };
82
+ }
83
+
84
+ export function detectPendingCheckpoint(root, dirtyFiles = []) {
85
+ const actorDirtyFiles = normalizeFilesChanged(dirtyFiles);
86
+ if (actorDirtyFiles.length === 0) return { required: false };
87
+
88
+ const resolved = resolveAcceptedTurnHistoryReference(root, null);
89
+ if (!resolved.ok || !resolved.entry) return { required: false };
90
+
91
+ const entry = resolved.entry;
92
+ if (entry.checkpoint_sha) return { required: false };
93
+
94
+ const turnFiles = normalizeFilesChanged(entry.files_changed);
95
+ if (turnFiles.length === 0) return { required: false };
96
+
97
+ const dirtyOutsideTurn = actorDirtyFiles.filter((file) => !turnFiles.includes(file));
98
+ if (dirtyOutsideTurn.length > 0) return { required: false };
99
+
100
+ return {
101
+ required: true,
102
+ turn_id: entry.turn_id,
103
+ message: `Accepted turn ${entry.turn_id} is not checkpointed yet. Run agentxchain checkpoint-turn --turn ${entry.turn_id} before assigning the next code-writing turn.`,
104
+ };
105
+ }
106
+
107
+ export function checkpointAcceptedTurn(root, opts = {}) {
108
+ if (!isGitRepo(root)) {
109
+ return { ok: false, error: 'checkpoint-turn requires a git repository.' };
110
+ }
111
+
112
+ const resolved = resolveAcceptedTurnHistoryReference(root, opts.turnId || opts.turn || null);
113
+ if (!resolved.ok || !resolved.entry) {
114
+ return { ok: false, error: resolved.error || 'Accepted turn not found.' };
115
+ }
116
+
117
+ const entry = resolved.entry;
118
+ if (entry.checkpoint_sha) {
119
+ return {
120
+ ok: true,
121
+ already_checkpointed: true,
122
+ turn: entry,
123
+ checkpoint_sha: entry.checkpoint_sha,
124
+ };
125
+ }
126
+
127
+ const filesChanged = normalizeFilesChanged(entry.files_changed);
128
+ if (filesChanged.length === 0) {
129
+ return {
130
+ ok: true,
131
+ skipped: true,
132
+ turn: entry,
133
+ reason: 'Accepted turn has no writable files_changed paths to checkpoint.',
134
+ };
135
+ }
136
+
137
+ try {
138
+ git(root, ['add', '-A', '--', ...filesChanged]);
139
+ } catch (err) {
140
+ return {
141
+ ok: false,
142
+ turn: entry,
143
+ error: `Failed to stage accepted files for checkpoint: ${extractGitError(err)}`,
144
+ };
145
+ }
146
+
147
+ let staged = [];
148
+ try {
149
+ staged = git(root, ['diff', '--cached', '--name-only', '--', ...filesChanged])
150
+ .split('\n')
151
+ .map((value) => value.trim())
152
+ .filter(Boolean);
153
+ } catch (err) {
154
+ return {
155
+ ok: false,
156
+ turn: entry,
157
+ error: `Failed to inspect staged checkpoint diff: ${extractGitError(err)}`,
158
+ };
159
+ }
160
+
161
+ if (staged.length === 0) {
162
+ return {
163
+ ok: true,
164
+ skipped: true,
165
+ turn: entry,
166
+ reason: `Accepted turn ${entry.turn_id} has no staged repo changes to checkpoint.`,
167
+ };
168
+ }
169
+
170
+ const commit = buildCheckpointCommit(entry);
171
+ try {
172
+ git(root, ['commit', '-m', commit.subject, '-m', commit.body]);
173
+ } catch (err) {
174
+ return {
175
+ ok: false,
176
+ turn: entry,
177
+ error: `Checkpoint commit failed: ${extractGitError(err)}`,
178
+ };
179
+ }
180
+
181
+ const checkpointSha = git(root, ['rev-parse', 'HEAD']);
182
+ const checkpointedAt = new Date().toISOString();
183
+
184
+ const historyEntries = readHistoryEntries(root).map((historyEntry) => (
185
+ historyEntry.turn_id === entry.turn_id
186
+ ? { ...historyEntry, checkpoint_sha: checkpointSha, checkpointed_at: checkpointedAt }
187
+ : historyEntry
188
+ ));
189
+ writeHistoryEntries(root, historyEntries);
190
+
191
+ const state = readState(root);
192
+ if (state) {
193
+ writeState(root, {
194
+ ...state,
195
+ last_completed_turn: {
196
+ turn_id: entry.turn_id,
197
+ role: entry.role || null,
198
+ phase: entry.phase || null,
199
+ checkpoint_sha: checkpointSha,
200
+ checkpointed_at: checkpointedAt,
201
+ intent_id: entry.intent_id || null,
202
+ },
203
+ });
204
+
205
+ emitRunEvent(root, 'turn_checkpointed', {
206
+ run_id: state.run_id || null,
207
+ phase: state.phase || null,
208
+ status: state.status || null,
209
+ turn: { turn_id: entry.turn_id, role_id: entry.role || null },
210
+ intent_id: entry.intent_id || null,
211
+ payload: { checkpoint_sha: checkpointSha, checkpointed_at: checkpointedAt },
212
+ });
213
+ }
214
+
215
+ return {
216
+ ok: true,
217
+ turn: entry,
218
+ checkpoint_sha: checkpointSha,
219
+ checkpointed_at: checkpointedAt,
220
+ };
221
+ }
@@ -0,0 +1,71 @@
1
+ {
2
+ "id": "full-local-cli",
3
+ "display_name": "Full Local CLI",
4
+ "description": "Human-gated automation pattern with PM, Dev, QA, and Director all running as authoritative local_cli roles.",
5
+ "version": "1",
6
+ "protocol_compatibility": ["1.0", "1.1"],
7
+ "planning_artifacts": [],
8
+ "system_spec_overlay": {
9
+ "purpose_guidance": "Describe the repo as a human-gated all-local automation system. Clarify that PM, Dev, QA, and Engineering Director execute through local CLI runtimes while humans retain approval authority at phase and completion gates.",
10
+ "interface_guidance": "Document the governed operator boundary: which local CLI runtimes are expected, how prompts are delivered, and which approvals remain human-gated even though all core roles are automated.",
11
+ "behavior_guidance": "Explain the dispatch rhythm for authoritative local CLI turns, the clean-working-tree requirement between accepted automated turns, and how injected intents or reissued turns change the next dispatch charter.",
12
+ "error_cases_guidance": "Call out weak authority flags, stale runtime bindings, dirty-baseline failures, and post-dispatch drift as first-class operational risks for this template.",
13
+ "acceptance_tests_guidance": "- [ ] PM, Dev, QA, and Engineering Director all bind to authoritative local_cli runtimes\n- [ ] Human approval remains required for phase transitions and run completion\n- [ ] Operator docs include the clean-tree checkpoint rule between accepted automated turns\n- [ ] Connector validation confirms the downstream CLI authority mode is strong enough for unattended governed execution"
14
+ },
15
+ "prompt_overrides": {},
16
+ "acceptance_hints": [],
17
+ "scaffold_blueprint": {
18
+ "roles": {
19
+ "pm": {
20
+ "title": "Product Manager",
21
+ "mandate": "Protect user value, scope clarity, and acceptance criteria.",
22
+ "write_authority": "authoritative",
23
+ "runtime": "local-pm"
24
+ },
25
+ "dev": {
26
+ "title": "Developer",
27
+ "mandate": "Implement approved work safely and verify behavior.",
28
+ "write_authority": "authoritative",
29
+ "runtime": "local-dev"
30
+ },
31
+ "qa": {
32
+ "title": "QA",
33
+ "mandate": "Challenge correctness, acceptance coverage, and ship readiness.",
34
+ "write_authority": "authoritative",
35
+ "runtime": "local-qa"
36
+ },
37
+ "eng_director": {
38
+ "title": "Engineering Director",
39
+ "mandate": "Resolve tactical deadlocks and enforce technical coherence.",
40
+ "write_authority": "authoritative",
41
+ "runtime": "local-director"
42
+ }
43
+ },
44
+ "runtimes": {
45
+ "local-pm": {
46
+ "type": "local_cli",
47
+ "command": ["claude", "--print", "--dangerously-skip-permissions"],
48
+ "cwd": ".",
49
+ "prompt_transport": "stdin"
50
+ },
51
+ "local-dev": {
52
+ "type": "local_cli",
53
+ "command": ["claude", "--print", "--dangerously-skip-permissions"],
54
+ "cwd": ".",
55
+ "prompt_transport": "stdin"
56
+ },
57
+ "local-qa": {
58
+ "type": "local_cli",
59
+ "command": ["claude", "--print", "--dangerously-skip-permissions"],
60
+ "cwd": ".",
61
+ "prompt_transport": "stdin"
62
+ },
63
+ "local-director": {
64
+ "type": "local_cli",
65
+ "command": ["claude", "--print", "--dangerously-skip-permissions"],
66
+ "cwd": ".",
67
+ "prompt_transport": "stdin"
68
+ }
69
+ }
70
+ }
71
+ }