agentxchain 2.94.0 → 2.96.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -61,6 +61,7 @@ import { superviseCommand } from '../src/commands/supervise.js';
61
61
  import { validateCommand } from '../src/commands/validate.js';
62
62
  import { verifyExportCommand, verifyProtocolCommand, verifyTurnCommand } from '../src/commands/verify.js';
63
63
  import { replayTurnCommand } from '../src/commands/replay.js';
64
+ import { replayExportCommand } from '../src/commands/replay-export.js';
64
65
  import { kickoffCommand } from '../src/commands/kickoff.js';
65
66
  import { rebindCommand } from '../src/commands/rebind.js';
66
67
  import { branchCommand } from '../src/commands/branch.js';
@@ -410,6 +411,14 @@ replayCmd
410
411
  .option('--timeout <ms>', 'Per-command replay timeout in milliseconds', '30000')
411
412
  .action(replayTurnCommand);
412
413
 
414
+ replayCmd
415
+ .command('export <export-file>')
416
+ .description('Browse a completed export in the dashboard for offline post-mortem analysis')
417
+ .option('-j, --json', 'Output session info as JSON')
418
+ .option('--port <port>', 'Dashboard port', '3847')
419
+ .option('--no-open', 'Do not auto-open browser')
420
+ .action(replayExportCommand);
421
+
413
422
  program
414
423
  .command('migrate')
415
424
  .description('Migrate a legacy v3 project to governed format')
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "agentxchain",
3
- "version": "2.94.0",
3
+ "version": "2.96.1",
4
4
  "description": "CLI for AgentXchain — governed multi-agent software delivery",
5
5
  "type": "module",
6
6
  "bin": {
@@ -0,0 +1,166 @@
1
+ /**
2
+ * CLI command: agentxchain replay export <export-file>
3
+ *
4
+ * Starts the dashboard bridge-server serving a completed export's state
5
+ * for offline post-mortem analysis. The dashboard is fully read-only:
6
+ * no file watcher, no gate approval, no WebSocket push.
7
+ */
8
+
9
+ import { existsSync, mkdirSync, readFileSync, rmSync, writeFileSync } from 'fs';
10
+ import { dirname, join, resolve } from 'path';
11
+ import { fileURLToPath } from 'url';
12
+ import { tmpdir } from 'os';
13
+ import { randomBytes } from 'crypto';
14
+ import chalk from 'chalk';
15
+
16
+ import { createBridgeServer } from '../lib/dashboard/bridge-server.js';
17
+
18
+ const __dirname = dirname(fileURLToPath(import.meta.url));
19
+
20
+ export async function replayExportCommand(exportFile, opts = {}) {
21
+ if (!exportFile) {
22
+ console.error(chalk.red('Usage: agentxchain replay export <export-file>'));
23
+ process.exit(2);
24
+ }
25
+
26
+ const exportPath = resolve(exportFile);
27
+ if (!existsSync(exportPath)) {
28
+ console.error(chalk.red(`Export file not found: ${exportPath}`));
29
+ process.exit(2);
30
+ }
31
+
32
+ let exportData;
33
+ try {
34
+ exportData = JSON.parse(readFileSync(exportPath, 'utf8'));
35
+ } catch (err) {
36
+ console.error(chalk.red(`Failed to parse export file: ${err.message}`));
37
+ process.exit(2);
38
+ }
39
+
40
+ if (!exportData.files || typeof exportData.files !== 'object') {
41
+ console.error(chalk.red('Export file missing "files" object. Not a valid agentxchain export.'));
42
+ process.exit(2);
43
+ }
44
+
45
+ // Create temp workspace with exported files
46
+ const tempId = randomBytes(8).toString('hex');
47
+ const tempRoot = join(tmpdir(), `agentxchain-replay-${tempId}`);
48
+ const tempAgentxchainDir = join(tempRoot, '.agentxchain');
49
+
50
+ try {
51
+ mkdirSync(tempRoot, { recursive: true });
52
+ mkdirSync(tempAgentxchainDir, { recursive: true });
53
+
54
+ // Write all embedded files from the export
55
+ let fileCount = 0;
56
+ for (const [relPath, content] of Object.entries(exportData.files)) {
57
+ const absPath = join(tempRoot, relPath);
58
+ mkdirSync(dirname(absPath), { recursive: true });
59
+ writeFileSync(absPath, typeof content === 'string' ? content : JSON.stringify(content, null, 2));
60
+ fileCount++;
61
+ }
62
+
63
+ // Ensure agentxchain.json exists (needed by some dashboard endpoints)
64
+ const configPath = join(tempRoot, 'agentxchain.json');
65
+ if (!existsSync(configPath)) {
66
+ // Synthesize a minimal config from export summary
67
+ const minimalConfig = {
68
+ protocol_version: exportData.summary?.protocol_version || 6,
69
+ protocol_mode: 'governed',
70
+ version: 4,
71
+ project: { name: exportData.summary?.project_name || 'replay-export' },
72
+ roles: exportData.summary?.roles || {},
73
+ runtimes: {},
74
+ workflow: exportData.summary?.workflow || {},
75
+ };
76
+ writeFileSync(configPath, JSON.stringify(minimalConfig, null, 2));
77
+ }
78
+
79
+ const dashboardDir = join(__dirname, '..', '..', 'dashboard');
80
+ if (!existsSync(dashboardDir)) {
81
+ console.error(chalk.red('Dashboard assets not found.'));
82
+ cleanup(tempRoot);
83
+ process.exit(1);
84
+ }
85
+
86
+ const port = parseInt(opts.port, 10) || 3847;
87
+ const bridge = createBridgeServer({
88
+ agentxchainDir: tempAgentxchainDir,
89
+ dashboardDir,
90
+ port,
91
+ replayMode: true,
92
+ });
93
+
94
+ const { port: actualPort } = await bridge.start();
95
+ const url = `http://localhost:${actualPort}`;
96
+
97
+ const runId = exportData.summary?.run_id || null;
98
+ const schemaVersion = exportData.schema_version || null;
99
+
100
+ if (opts.json) {
101
+ console.log(JSON.stringify({
102
+ port: actualPort,
103
+ url,
104
+ export_file: exportPath,
105
+ run_id: runId,
106
+ export_schema_version: schemaVersion,
107
+ files_restored: fileCount,
108
+ temp_dir: tempRoot,
109
+ }, null, 2));
110
+ } else {
111
+ console.log('');
112
+ console.log(chalk.bold(` Replay Export Dashboard`));
113
+ console.log(chalk.dim(' ' + '─'.repeat(40)));
114
+ console.log(` ${chalk.dim('Export:')} ${exportPath}`);
115
+ console.log(` ${chalk.dim('Run ID:')} ${runId || '—'}`);
116
+ console.log(` ${chalk.dim('Schema:')} ${schemaVersion || '—'}`);
117
+ console.log(` ${chalk.dim('Files:')} ${fileCount} restored`);
118
+ console.log(` ${chalk.dim('URL:')} ${chalk.cyan(url)}`);
119
+ console.log('');
120
+ console.log(chalk.dim(' Read-only mode — no live updates, no gate approval.'));
121
+ console.log(chalk.dim(' Press Ctrl+C to stop.'));
122
+ console.log('');
123
+ }
124
+
125
+ if (opts.open !== false && !opts.json) {
126
+ try {
127
+ const { exec } = await import('child_process');
128
+ const openCmd = process.platform === 'darwin' ? 'open'
129
+ : process.platform === 'win32' ? 'start'
130
+ : 'xdg-open';
131
+ exec(`${openCmd} ${url}`);
132
+ } catch {
133
+ // Browser open is best-effort
134
+ }
135
+ }
136
+
137
+ let shuttingDown = false;
138
+ const shutdown = async () => {
139
+ if (shuttingDown) return;
140
+ shuttingDown = true;
141
+ if (!opts.json) {
142
+ console.log('\nShutting down replay dashboard...');
143
+ }
144
+ await bridge.stop();
145
+ cleanup(tempRoot);
146
+ process.exit(0);
147
+ };
148
+ process.on('SIGINT', shutdown);
149
+ process.on('SIGTERM', shutdown);
150
+ } catch (err) {
151
+ cleanup(tempRoot);
152
+ if (err.code === 'EADDRINUSE') {
153
+ console.error(chalk.red(`Port ${opts.port || 3847} is already in use. Try --port <number>.`));
154
+ process.exit(1);
155
+ }
156
+ throw err;
157
+ }
158
+ }
159
+
160
+ function cleanup(tempRoot) {
161
+ try {
162
+ rmSync(tempRoot, { recursive: true, force: true });
163
+ } catch {
164
+ // Best-effort cleanup
165
+ }
166
+ }
@@ -16,9 +16,11 @@ import { readFileSync, existsSync } from 'fs';
16
16
  import { join, extname, resolve, sep } from 'path';
17
17
  import { readResource } from './state-reader.js';
18
18
  import { FileWatcher } from './file-watcher.js';
19
+ import { readRunEvents, RUN_EVENTS_PATH } from '../run-events.js';
19
20
  import { approvePendingDashboardGate } from './actions.js';
20
21
  import { readCoordinatorBlockerSnapshot } from './coordinator-blockers.js';
21
22
  import { readCoordinatorTimeoutStatus } from './coordinator-timeout-status.js';
23
+ import { readAggregatedCoordinatorEvents, watchChildRepoEvents } from './coordinator-event-aggregation.js';
22
24
  import { readWorkflowKitArtifacts } from './workflow-kit-artifacts.js';
23
25
  import { readConnectorHealthSnapshot } from './connectors.js';
24
26
  import { readTimeoutStatus } from './timeout-status.js';
@@ -213,11 +215,22 @@ function resolveDashboardAssetPath(dashboardDir, pathname) {
213
215
 
214
216
  // ── Bridge Server ───────────────────────────────────────────────────────────
215
217
 
216
- export function createBridgeServer({ agentxchainDir, dashboardDir, port = 3847 }) {
218
+ export function createBridgeServer({ agentxchainDir, dashboardDir, port = 3847, replayMode = false }) {
217
219
  const workspacePath = resolve(agentxchainDir, '..');
218
220
  const wsClients = new Set();
221
+ /** @type {Map<import('net').Socket, Set<string>|null>} null = all events */
222
+ const wsEventSubscriptions = new Map();
219
223
  const watcher = new FileWatcher(agentxchainDir);
220
224
  const mutationToken = randomBytes(24).toString('hex');
225
+ let lastEventsFileSize = 0;
226
+
227
+ // Initialize events file size tracking
228
+ try {
229
+ const eventsPath = join(agentxchainDir, 'events.jsonl');
230
+ if (existsSync(eventsPath)) {
231
+ lastEventsFileSize = readFileSync(eventsPath).length;
232
+ }
233
+ } catch {}
221
234
 
222
235
  // Broadcast invalidation events to all connected WebSocket clients
223
236
  watcher.on('invalidate', ({ resource }) => {
@@ -225,8 +238,53 @@ export function createBridgeServer({ agentxchainDir, dashboardDir, port = 3847 }
225
238
  for (const socket of wsClients) {
226
239
  sendWsFrame(socket, msg);
227
240
  }
241
+
242
+ // For events.jsonl changes, also push actual event data
243
+ if (resource === '/api/events') {
244
+ try {
245
+ const eventsPath = join(agentxchainDir, 'events.jsonl');
246
+ if (!existsSync(eventsPath)) return;
247
+ const content = readFileSync(eventsPath, 'utf8');
248
+ if (content.length <= lastEventsFileSize) {
249
+ // File was truncated — reset and push all
250
+ if (content.length < lastEventsFileSize) lastEventsFileSize = 0;
251
+ else return;
252
+ }
253
+ const newContent = content.slice(lastEventsFileSize);
254
+ lastEventsFileSize = content.length;
255
+ const lines = newContent.split('\n').filter(Boolean);
256
+ for (const line of lines) {
257
+ try {
258
+ const evt = JSON.parse(line);
259
+ for (const socket of wsClients) {
260
+ const filter = wsEventSubscriptions.get(socket);
261
+ if (filter && !filter.has(evt.event_type)) continue;
262
+ sendWsFrame(socket, JSON.stringify({ type: 'event', event: evt }));
263
+ }
264
+ } catch {}
265
+ }
266
+ } catch {}
267
+ }
228
268
  });
229
269
 
270
+ // Set up child-repo event watchers for coordinator event aggregation
271
+ let childRepoWatcher = null;
272
+ try {
273
+ const watchResult = watchChildRepoEvents(workspacePath, (_repoId, newEvents) => {
274
+ for (const evt of newEvents) {
275
+ const msg = JSON.stringify({ type: 'coordinator_event', repo_id: evt.repo_id, event: evt });
276
+ for (const socket of wsClients) {
277
+ const filter = wsEventSubscriptions.get(socket);
278
+ if (filter && !filter.has('coordinator_event')) continue;
279
+ sendWsFrame(socket, msg);
280
+ }
281
+ }
282
+ });
283
+ if (watchResult.ok) {
284
+ childRepoWatcher = watchResult;
285
+ }
286
+ } catch {}
287
+
230
288
  const server = createServer(async (req, res) => {
231
289
  const method = req.method || 'GET';
232
290
  const isApproveGateRequest = method === 'POST' && req.url && new URL(req.url, `http://${req.headers.host}`).pathname === '/api/actions/approve-gate';
@@ -247,15 +305,21 @@ export function createBridgeServer({ agentxchainDir, dashboardDir, port = 3847 }
247
305
  if (pathname === '/api/session') {
248
306
  writeJson(res, 200, {
249
307
  session_version: '1',
250
- mutation_token: mutationToken,
308
+ mutation_token: replayMode ? null : mutationToken,
309
+ replay_mode: replayMode,
251
310
  capabilities: {
252
- approve_gate: true,
311
+ approve_gate: !replayMode,
253
312
  },
254
313
  });
255
314
  return;
256
315
  }
257
316
 
258
317
  if (pathname === '/api/actions/approve-gate') {
318
+ if (replayMode) {
319
+ writeJson(res, 403, { ok: false, code: 'replay_mode', error: 'Replay mode: gate approval is not available on exported snapshots.' });
320
+ return;
321
+ }
322
+
259
323
  if (method !== 'POST') {
260
324
  writeJson(res, 405, { ok: false, code: 'method_not_allowed', error: 'Use POST for dashboard actions.' }, { Allow: 'POST' });
261
325
  return;
@@ -290,6 +354,24 @@ export function createBridgeServer({ agentxchainDir, dashboardDir, port = 3847 }
290
354
  return;
291
355
  }
292
356
 
357
+ if (pathname === '/api/coordinator/events') {
358
+ const type = url.searchParams.get('type') || undefined;
359
+ const since = url.searchParams.get('since') || undefined;
360
+ const repoId = url.searchParams.get('repo_id') || undefined;
361
+ const limitParam = url.searchParams.get('limit');
362
+ const limit = limitParam != null ? parseInt(limitParam, 10) : 100;
363
+ const result = readAggregatedCoordinatorEvents(workspacePath, {
364
+ type, since, repo_id: repoId, limit: limit === 0 ? undefined : limit,
365
+ });
366
+ if (!result.ok) {
367
+ const isMissingConfig = typeof result.error === 'string' && result.error.includes('config_missing:');
368
+ writeJson(res, isMissingConfig ? 404 : 500, { error: result.error });
369
+ return;
370
+ }
371
+ writeJson(res, 200, result.events);
372
+ return;
373
+ }
374
+
293
375
  if (pathname === '/api/workflow-kit-artifacts') {
294
376
  const result = readWorkflowKitArtifacts(workspacePath);
295
377
  writeJson(res, result.status, result.body);
@@ -308,6 +390,20 @@ export function createBridgeServer({ agentxchainDir, dashboardDir, port = 3847 }
308
390
  return;
309
391
  }
310
392
 
393
+ if (pathname === '/api/events') {
394
+ const type = url.searchParams.get('type') || undefined;
395
+ const since = url.searchParams.get('since') || undefined;
396
+ const runId = url.searchParams.get('run_id') || undefined;
397
+ const limitParam = url.searchParams.get('limit');
398
+ const limit = limitParam != null ? parseInt(limitParam, 10) : 50;
399
+ let events = readRunEvents(workspacePath, { type, since, limit: limit === 0 ? undefined : limit });
400
+ if (runId) {
401
+ events = events.filter(e => e.run_id === runId);
402
+ }
403
+ writeJson(res, 200, events);
404
+ return;
405
+ }
406
+
311
407
  if (pathname === '/api/run-history') {
312
408
  const url = new URL(req.url, `http://${req.headers.host}`);
313
409
  const limit = url.searchParams.get('limit') ? parseInt(url.searchParams.get('limit'), 10) : undefined;
@@ -365,11 +461,12 @@ export function createBridgeServer({ agentxchainDir, dashboardDir, port = 3847 }
365
461
  if (!ws) return;
366
462
 
367
463
  wsClients.add(ws);
464
+ wsEventSubscriptions.set(ws, null); // null = all events
368
465
 
369
- ws.on('close', () => wsClients.delete(ws));
370
- ws.on('error', () => wsClients.delete(ws));
466
+ ws.on('close', () => { wsClients.delete(ws); wsEventSubscriptions.delete(ws); });
467
+ ws.on('error', () => { wsClients.delete(ws); wsEventSubscriptions.delete(ws); });
371
468
 
372
- // Handle incoming frames (for ping/pong and close detection)
469
+ // Handle incoming frames (for ping/pong, close detection, and subscribe)
373
470
  ws.on('data', (data) => {
374
471
  const frame = parseClientFrame(data);
375
472
  if (!frame) return;
@@ -377,15 +474,30 @@ export function createBridgeServer({ agentxchainDir, dashboardDir, port = 3847 }
377
474
  if (frame.opcode === 0x08) {
378
475
  // Close frame
379
476
  wsClients.delete(ws);
477
+ wsEventSubscriptions.delete(ws);
380
478
  sendWsControlFrame(ws, 0x08, frame.payload);
381
479
  try { ws.end(); } catch {}
382
480
  } else if (frame.opcode === 0x09) {
383
481
  // Ping → Pong
384
482
  sendWsControlFrame(ws, 0x0a, frame.payload);
385
483
  } else if (frame.opcode === 0x01) {
484
+ // Text frame — check for subscribe message
485
+ try {
486
+ const msg = JSON.parse(frame.payload.toString('utf8'));
487
+ if (msg.type === 'subscribe' && Array.isArray(msg.event_types)) {
488
+ wsEventSubscriptions.set(ws, new Set(msg.event_types));
489
+ sendWsFrame(ws, JSON.stringify({ type: 'subscribed', event_types: msg.event_types }));
490
+ return;
491
+ }
492
+ if (msg.type === 'subscribe' && !msg.event_types) {
493
+ wsEventSubscriptions.set(ws, null); // reset to all
494
+ sendWsFrame(ws, JSON.stringify({ type: 'subscribed', event_types: null }));
495
+ return;
496
+ }
497
+ } catch {}
386
498
  sendWsError(
387
499
  ws,
388
- 'Dashboard WebSocket is read-only. Use the authenticated HTTP approve-gate action instead.'
500
+ 'Dashboard WebSocket is read-only except for event subscription. Use the authenticated HTTP approve-gate action for mutations.'
389
501
  );
390
502
  }
391
503
  });
@@ -404,6 +516,7 @@ export function createBridgeServer({ agentxchainDir, dashboardDir, port = 3847 }
404
516
  function stop() {
405
517
  return new Promise((resolve) => {
406
518
  watcher.stop();
519
+ if (childRepoWatcher?.stop) childRepoWatcher.stop();
407
520
  for (const socket of wsClients) {
408
521
  try { socket.destroy(); } catch {}
409
522
  }
@@ -0,0 +1,169 @@
1
+ /**
2
+ * Coordinator event aggregation — merges lifecycle events from all child
3
+ * repos in a multi-repo coordinator run into a single time-ordered stream.
4
+ *
5
+ * See: .planning/COORDINATOR_EVENT_AGGREGATION_SPEC.md
6
+ */
7
+
8
+ import { existsSync, readFileSync, watchFile, unwatchFile, statSync } from 'fs';
9
+ import { join, resolve } from 'path';
10
+ import { loadCoordinatorConfig } from '../coordinator-config.js';
11
+ import { RUN_EVENTS_PATH } from '../run-events.js';
12
+
13
+ /**
14
+ * Read and merge events from all child repos defined in the coordinator config.
15
+ *
16
+ * @param {string} workspacePath - Coordinator workspace root
17
+ * @param {object} [opts] - Filter options
18
+ * @param {string} [opts.type] - Comma-separated event types
19
+ * @param {string} [opts.since] - ISO-8601 timestamp
20
+ * @param {number} [opts.limit] - Max events (from end, default 100)
21
+ * @param {string} [opts.repo_id] - Filter to one repo
22
+ * @returns {{ ok: boolean, events?: object[], error?: string }}
23
+ */
24
+ export function readAggregatedCoordinatorEvents(workspacePath, opts = {}) {
25
+ const configResult = loadCoordinatorConfig(workspacePath);
26
+ if (!configResult.ok) {
27
+ return { ok: false, error: configResult.errors.join('; ') };
28
+ }
29
+
30
+ const config = configResult.config;
31
+ let allEvents = [];
32
+
33
+ for (const [repoId, repo] of Object.entries(config.repos)) {
34
+ if (opts.repo_id && opts.repo_id !== repoId) continue;
35
+
36
+ const repoPath = resolve(workspacePath, repo.path);
37
+ const eventsPath = join(repoPath, RUN_EVENTS_PATH);
38
+
39
+ if (!existsSync(eventsPath)) continue;
40
+
41
+ let raw;
42
+ try {
43
+ raw = readFileSync(eventsPath, 'utf8');
44
+ } catch {
45
+ continue;
46
+ }
47
+
48
+ const lines = raw.split('\n').filter(Boolean);
49
+ for (const line of lines) {
50
+ try {
51
+ const evt = JSON.parse(line);
52
+ evt.repo_id = repoId;
53
+ allEvents.push(evt);
54
+ } catch {
55
+ // Skip malformed lines
56
+ }
57
+ }
58
+ }
59
+
60
+ // Apply type filter
61
+ if (opts.type) {
62
+ const types = new Set(opts.type.split(',').map(t => t.trim()));
63
+ allEvents = allEvents.filter(e => types.has(e.event_type));
64
+ }
65
+
66
+ // Apply since filter
67
+ if (opts.since) {
68
+ const sinceMs = new Date(opts.since).getTime();
69
+ if (!Number.isNaN(sinceMs)) {
70
+ allEvents = allEvents.filter(e => new Date(e.timestamp).getTime() > sinceMs);
71
+ }
72
+ }
73
+
74
+ // Sort by timestamp ascending, ties broken by event_id
75
+ allEvents.sort((a, b) => {
76
+ const tDiff = new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime();
77
+ if (tDiff !== 0) return tDiff;
78
+ return (a.event_id || '').localeCompare(b.event_id || '');
79
+ });
80
+
81
+ // Apply limit (from end)
82
+ const limit = opts.limit ?? 100;
83
+ if (limit > 0 && allEvents.length > limit) {
84
+ allEvents = allEvents.slice(-limit);
85
+ }
86
+
87
+ return { ok: true, events: allEvents };
88
+ }
89
+
90
+ /**
91
+ * Set up file watchers on child repo events.jsonl files for real-time
92
+ * WebSocket push. Returns a controller with a stop() method.
93
+ *
94
+ * @param {string} workspacePath - Coordinator workspace root
95
+ * @param {function} onNewEvents - Callback: (repoId, events[]) => void
96
+ * @returns {{ ok: boolean, stop?: function, error?: string }}
97
+ */
98
+ export function watchChildRepoEvents(workspacePath, onNewEvents) {
99
+ const configResult = loadCoordinatorConfig(workspacePath);
100
+ if (!configResult.ok) {
101
+ return { ok: false, error: configResult.errors.join('; ') };
102
+ }
103
+
104
+ const config = configResult.config;
105
+ const trackedFiles = new Map(); // eventsPath → { repoId, lastSize }
106
+ const watchedPaths = [];
107
+
108
+ for (const [repoId, repo] of Object.entries(config.repos)) {
109
+ const repoPath = resolve(workspacePath, repo.path);
110
+ const eventsPath = join(repoPath, RUN_EVENTS_PATH);
111
+
112
+ let initialSize = 0;
113
+ try {
114
+ if (existsSync(eventsPath)) {
115
+ initialSize = statSync(eventsPath).size;
116
+ }
117
+ } catch {}
118
+
119
+ trackedFiles.set(eventsPath, { repoId, lastSize: initialSize });
120
+
121
+ // Use fs.watchFile (polling) for reliability across platforms
122
+ try {
123
+ watchFile(eventsPath, { interval: 500 }, (curr) => {
124
+ const tracked = trackedFiles.get(eventsPath);
125
+ if (!tracked) return;
126
+
127
+ const currentSize = curr.size;
128
+ if (currentSize <= tracked.lastSize) {
129
+ // File truncated or unchanged
130
+ if (currentSize < tracked.lastSize) {
131
+ tracked.lastSize = 0;
132
+ } else {
133
+ return;
134
+ }
135
+ }
136
+
137
+ try {
138
+ const content = readFileSync(eventsPath, 'utf8');
139
+ const newContent = content.slice(tracked.lastSize);
140
+ tracked.lastSize = content.length;
141
+
142
+ const lines = newContent.split('\n').filter(Boolean);
143
+ const newEvents = [];
144
+ for (const line of lines) {
145
+ try {
146
+ const evt = JSON.parse(line);
147
+ evt.repo_id = tracked.repoId;
148
+ newEvents.push(evt);
149
+ } catch {}
150
+ }
151
+
152
+ if (newEvents.length > 0) {
153
+ onNewEvents(tracked.repoId, newEvents);
154
+ }
155
+ } catch {}
156
+ });
157
+ watchedPaths.push(eventsPath);
158
+ } catch {}
159
+ }
160
+
161
+ function stop() {
162
+ for (const path of watchedPaths) {
163
+ try { unwatchFile(path); } catch {}
164
+ }
165
+ trackedFiles.clear();
166
+ }
167
+
168
+ return { ok: true, stop };
169
+ }
@@ -17,6 +17,7 @@ const HISTORY_FILE = 'history.jsonl';
17
17
  const LEDGER_FILE = 'decision-ledger.jsonl';
18
18
  const HOOK_AUDIT_FILE = 'hook-audit.jsonl';
19
19
  const HOOK_ANNOTATIONS_FILE = 'hook-annotations.jsonl';
20
+ const EVENTS_FILE = 'events.jsonl';
20
21
  const MULTIREPO_DIR = 'multirepo';
21
22
  const BARRIERS_FILE = 'barriers.json';
22
23
  const BARRIER_LEDGER_FILE = 'barrier-ledger.jsonl';
@@ -38,6 +39,7 @@ export const RESOURCE_MAP = {
38
39
  '/api/coordinator/barrier-ledger': join(MULTIREPO_DIR, BARRIER_LEDGER_FILE),
39
40
  '/api/coordinator/hooks/audit': join(MULTIREPO_DIR, HOOK_AUDIT_FILE),
40
41
  '/api/coordinator/hooks/annotations': join(MULTIREPO_DIR, HOOK_ANNOTATIONS_FILE),
42
+ '/api/events': EVENTS_FILE,
41
43
  };
42
44
 
43
45
  /**
@@ -110,6 +110,338 @@ function verifyFilesMap(files, errors) {
110
110
  }
111
111
  }
112
112
 
113
+ function compareEventOrder(a, b) {
114
+ const left = Date.parse(a?.timestamp || '');
115
+ const right = Date.parse(b?.timestamp || '');
116
+ const leftTime = Number.isNaN(left) ? Number.POSITIVE_INFINITY : left;
117
+ const rightTime = Number.isNaN(right) ? Number.POSITIVE_INFINITY : right;
118
+ if (leftTime !== rightTime) {
119
+ return leftTime - rightTime;
120
+ }
121
+ return String(a?.event_id || '').localeCompare(String(b?.event_id || ''));
122
+ }
123
+
124
+ function buildExpectedAggregatedEventsSummary(repos) {
125
+ const events = [];
126
+ const reposWithEvents = new Set();
127
+
128
+ for (const [repoId, repoEntry] of Object.entries(repos || {})) {
129
+ if (!repoEntry?.ok || !repoEntry.export || typeof repoEntry.export !== 'object' || Array.isArray(repoEntry.export)) {
130
+ continue;
131
+ }
132
+
133
+ const repoEvents = repoEntry.export.files?.['.agentxchain/events.jsonl']?.data;
134
+ if (!Array.isArray(repoEvents)) {
135
+ continue;
136
+ }
137
+
138
+ for (const event of repoEvents) {
139
+ if (!event || typeof event !== 'object' || Array.isArray(event)) {
140
+ continue;
141
+ }
142
+ events.push({
143
+ ...event,
144
+ repo_id: repoId,
145
+ });
146
+ reposWithEvents.add(repoId);
147
+ }
148
+ }
149
+
150
+ events.sort(compareEventOrder);
151
+
152
+ const eventTypeCounts = {};
153
+ for (const event of events) {
154
+ const type = event.event_type || event.type || 'unknown';
155
+ eventTypeCounts[type] = (eventTypeCounts[type] || 0) + 1;
156
+ }
157
+
158
+ return {
159
+ total_events: events.length,
160
+ repos_with_events: [...reposWithEvents].sort(),
161
+ event_type_counts: eventTypeCounts,
162
+ events,
163
+ };
164
+ }
165
+
166
+ function verifyAggregatedEventsSummary(artifact, errors) {
167
+ const summary = artifact.summary?.aggregated_events;
168
+ if (summary === undefined || summary === null) {
169
+ return;
170
+ }
171
+
172
+ if (!summary || typeof summary !== 'object' || Array.isArray(summary)) {
173
+ addError(errors, 'summary.aggregated_events', 'must be an object when present');
174
+ return;
175
+ }
176
+
177
+ const failedRepoIds = Object.entries(artifact.repos || {})
178
+ .filter(([, repoEntry]) => repoEntry && typeof repoEntry === 'object' && !Array.isArray(repoEntry) && repoEntry.ok === false)
179
+ .map(([repoId]) => repoId);
180
+
181
+ for (const repoId of failedRepoIds) {
182
+ if (summary.repos_with_events?.includes(repoId)) {
183
+ addError(
184
+ errors,
185
+ 'summary.aggregated_events.repos_with_events',
186
+ `cannot include repo "${repoId}" when repos.${repoId}.ok is false because no nested export proof is available`,
187
+ );
188
+ }
189
+ }
190
+
191
+ const expected = buildExpectedAggregatedEventsSummary(artifact.repos);
192
+
193
+ if (summary.total_events !== expected.total_events) {
194
+ addError(errors, 'summary.aggregated_events.total_events', 'must match reconstructed aggregated event count');
195
+ }
196
+ if (!isDeepStrictEqual(summary.repos_with_events, expected.repos_with_events)) {
197
+ addError(errors, 'summary.aggregated_events.repos_with_events', 'must match reconstructed contributing repo ids');
198
+ }
199
+ if (!isDeepStrictEqual(summary.event_type_counts, expected.event_type_counts)) {
200
+ addError(errors, 'summary.aggregated_events.event_type_counts', 'must match reconstructed event type counts');
201
+ }
202
+ if (!isDeepStrictEqual(summary.events, expected.events)) {
203
+ addError(errors, 'summary.aggregated_events.events', 'must match reconstructed sorted aggregated event list');
204
+ }
205
+ }
206
+
207
+ function buildExpectedDelegationSummary(files) {
208
+ const historyData = files?.['.agentxchain/history.jsonl']?.data;
209
+ if (!Array.isArray(historyData)) {
210
+ return null;
211
+ }
212
+
213
+ const parentTurns = new Map();
214
+ const childTurns = new Map();
215
+ const reviewTurns = new Map();
216
+
217
+ for (const entry of historyData) {
218
+ if (entry.delegations_issued && Array.isArray(entry.delegations_issued)) {
219
+ parentTurns.set(entry.turn_id, {
220
+ role: entry.role,
221
+ delegations_issued: entry.delegations_issued,
222
+ });
223
+ }
224
+ if (entry.delegation_context) {
225
+ childTurns.set(entry.delegation_context.delegation_id, {
226
+ turn_id: entry.turn_id,
227
+ status: entry.status || 'completed',
228
+ });
229
+ }
230
+ if (entry.delegation_review) {
231
+ reviewTurns.set(entry.delegation_review.parent_turn_id, {
232
+ turn_id: entry.turn_id,
233
+ results: entry.delegation_review.results || [],
234
+ });
235
+ }
236
+ }
237
+
238
+ let totalDelegationsIssued = 0;
239
+ const delegationChains = [];
240
+
241
+ for (const [parentTurnId, parent] of parentTurns) {
242
+ totalDelegationsIssued += parent.delegations_issued.length;
243
+
244
+ const review = reviewTurns.get(parentTurnId);
245
+ const reviewResultsByDelegation = new Map();
246
+ if (review) {
247
+ for (const r of review.results) {
248
+ if (r.delegation_id) {
249
+ reviewResultsByDelegation.set(r.delegation_id, r);
250
+ }
251
+ }
252
+ }
253
+
254
+ const delegations = parent.delegations_issued.map((del) => {
255
+ const child = childTurns.get(del.id);
256
+ const reviewResult = reviewResultsByDelegation.get(del.id);
257
+ return {
258
+ delegation_id: del.id,
259
+ to_role: del.to_role,
260
+ charter: del.charter,
261
+ required_decision_ids: Array.isArray(del.required_decision_ids) ? del.required_decision_ids : [],
262
+ satisfied_decision_ids: Array.isArray(reviewResult?.satisfied_decision_ids) ? reviewResult.satisfied_decision_ids : [],
263
+ missing_decision_ids: Array.isArray(reviewResult?.missing_decision_ids) ? reviewResult.missing_decision_ids : [],
264
+ status: reviewResult?.status || child?.status || 'pending',
265
+ child_turn_id: child?.turn_id || null,
266
+ };
267
+ });
268
+
269
+ let outcome;
270
+ if (!review) {
271
+ outcome = 'pending';
272
+ } else {
273
+ const statuses = delegations.map((d) => d.status);
274
+ const allCompleted = statuses.every((s) => s === 'completed');
275
+ const allFailed = statuses.every((s) => s === 'failed');
276
+ if (allCompleted) outcome = 'completed';
277
+ else if (allFailed) outcome = 'failed';
278
+ else outcome = 'mixed';
279
+ }
280
+
281
+ delegationChains.push({
282
+ parent_turn_id: parentTurnId,
283
+ parent_role: parent.role,
284
+ delegations,
285
+ review_turn_id: review?.turn_id || null,
286
+ outcome,
287
+ });
288
+ }
289
+
290
+ return {
291
+ total_delegations_issued: totalDelegationsIssued,
292
+ delegation_chains: delegationChains,
293
+ };
294
+ }
295
+
296
+ function verifyDelegationSummary(artifact, errors) {
297
+ const summary = artifact.summary?.delegation_summary;
298
+ const expected = buildExpectedDelegationSummary(artifact.files);
299
+
300
+ // Both absent — valid
301
+ if (summary == null && expected == null) {
302
+ return;
303
+ }
304
+
305
+ // One present, one absent — mismatch
306
+ if (summary == null && expected != null) {
307
+ if (expected.total_delegations_issued > 0) {
308
+ addError(errors, 'summary.delegation_summary', 'is null but history.jsonl contains delegation entries');
309
+ }
310
+ return;
311
+ }
312
+ if (summary != null && expected == null) {
313
+ addError(errors, 'summary.delegation_summary', 'claims delegations but no history.jsonl in export');
314
+ return;
315
+ }
316
+
317
+ if (summary.total_delegations_issued !== expected.total_delegations_issued) {
318
+ addError(errors, 'summary.delegation_summary.total_delegations_issued', 'must match reconstructed delegation count');
319
+ }
320
+
321
+ if (!isDeepStrictEqual(summary.delegation_chains, expected.delegation_chains)) {
322
+ addError(errors, 'summary.delegation_summary.delegation_chains', 'must match reconstructed delegation chains from history.jsonl');
323
+ }
324
+ }
325
+
326
+ function buildExpectedRepoDecisionsSummary(files) {
327
+ const repoDecisionsData = files?.['.agentxchain/repo-decisions.jsonl']?.data;
328
+ if (!Array.isArray(repoDecisionsData) || repoDecisionsData.length === 0) {
329
+ return null;
330
+ }
331
+
332
+ const active = repoDecisionsData.filter((d) => d.status === 'active');
333
+ const overridden = repoDecisionsData.filter((d) => d.status === 'overridden');
334
+
335
+ return {
336
+ total: repoDecisionsData.length,
337
+ active_count: active.length,
338
+ overridden_count: overridden.length,
339
+ active: active.map((d) => ({
340
+ id: d.id,
341
+ category: d.category,
342
+ statement: d.statement,
343
+ role: d.role,
344
+ run_id: d.run_id,
345
+ })),
346
+ overridden: overridden.map((d) => ({
347
+ id: d.id,
348
+ overridden_by: d.overridden_by,
349
+ statement: d.statement,
350
+ })),
351
+ };
352
+ }
353
+
354
+ function verifyRepoDecisionsSummary(artifact, errors) {
355
+ const summary = artifact.summary?.repo_decisions;
356
+ const hasFile = '.agentxchain/repo-decisions.jsonl' in (artifact.files || {});
357
+ const expected = buildExpectedRepoDecisionsSummary(artifact.files);
358
+
359
+ if (summary === null && expected === null) {
360
+ return;
361
+ }
362
+ if (summary === undefined && expected === null) {
363
+ return;
364
+ }
365
+
366
+ if (summary !== null && summary !== undefined && !hasFile && expected === null) {
367
+ addError(errors, 'summary.repo_decisions', 'claims repo decisions but no .agentxchain/repo-decisions.jsonl in export');
368
+ return;
369
+ }
370
+
371
+ if (summary === null && expected !== null) {
372
+ addError(errors, 'summary.repo_decisions', 'is null but repo-decisions.jsonl contains entries');
373
+ return;
374
+ }
375
+
376
+ if (summary !== null && expected === null) {
377
+ addError(errors, 'summary.repo_decisions', 'claims repo decisions but repo-decisions.jsonl is empty');
378
+ return;
379
+ }
380
+
381
+ if (summary.total !== expected.total) {
382
+ addError(errors, 'summary.repo_decisions.total', 'must match reconstructed repo decision count');
383
+ }
384
+ if (summary.active_count !== expected.active_count) {
385
+ addError(errors, 'summary.repo_decisions.active_count', 'must match reconstructed active count');
386
+ }
387
+ if (summary.overridden_count !== expected.overridden_count) {
388
+ addError(errors, 'summary.repo_decisions.overridden_count', 'must match reconstructed overridden count');
389
+ }
390
+ if (!isDeepStrictEqual(summary.active, expected.active)) {
391
+ addError(errors, 'summary.repo_decisions.active', 'must match reconstructed active decisions from repo-decisions.jsonl');
392
+ }
393
+ if (!isDeepStrictEqual(summary.overridden, expected.overridden)) {
394
+ addError(errors, 'summary.repo_decisions.overridden', 'must match reconstructed overridden decisions from repo-decisions.jsonl');
395
+ }
396
+ }
397
+
398
+ const VALID_DASHBOARD_STATUSES = new Set(['running', 'pid_only', 'stale', 'not_running']);
399
+
400
+ function verifyDashboardSessionSummary(artifact, errors) {
401
+ const session = artifact.summary?.dashboard_session;
402
+ if (session === undefined) {
403
+ return;
404
+ }
405
+
406
+ if (session === null || typeof session !== 'object' || Array.isArray(session)) {
407
+ addError(errors, 'summary.dashboard_session', 'must be an object when present');
408
+ return;
409
+ }
410
+
411
+ if (!VALID_DASHBOARD_STATUSES.has(session.status)) {
412
+ addError(errors, 'summary.dashboard_session.status', `must be one of: ${[...VALID_DASHBOARD_STATUSES].join(', ')}`);
413
+ return;
414
+ }
415
+
416
+ if (session.pid !== null && (!Number.isInteger(session.pid) || session.pid <= 0)) {
417
+ addError(errors, 'summary.dashboard_session.pid', 'must be a positive integer or null');
418
+ }
419
+
420
+ if (session.url !== null && typeof session.url !== 'string') {
421
+ addError(errors, 'summary.dashboard_session.url', 'must be a string or null');
422
+ }
423
+
424
+ if (session.started_at !== null && (typeof session.started_at !== 'string' || Number.isNaN(Date.parse(session.started_at)))) {
425
+ addError(errors, 'summary.dashboard_session.started_at', 'must be a valid ISO timestamp or null');
426
+ }
427
+
428
+ if (session.status === 'not_running') {
429
+ if (session.pid !== null) {
430
+ addError(errors, 'summary.dashboard_session.pid', 'must be null when status is not_running');
431
+ }
432
+ if (session.url !== null) {
433
+ addError(errors, 'summary.dashboard_session.url', 'must be null when status is not_running');
434
+ }
435
+ if (session.started_at !== null) {
436
+ addError(errors, 'summary.dashboard_session.started_at', 'must be null when status is not_running');
437
+ }
438
+ }
439
+
440
+ if (session.status === 'running' && (session.pid === null || !Number.isInteger(session.pid) || session.pid <= 0)) {
441
+ addError(errors, 'summary.dashboard_session.pid', 'must be a positive integer when status is running');
442
+ }
443
+ }
444
+
113
445
  function countJsonl(files, relPath) {
114
446
  return Array.isArray(files?.[relPath]?.data) ? files[relPath].data.length : 0;
115
447
  }
@@ -240,6 +572,10 @@ function verifyRunExport(artifact, errors) {
240
572
  if (artifact.summary.coordinator_present !== expectedCoordinatorPresent) {
241
573
  addError(errors, 'summary.coordinator_present', 'must match multirepo file presence');
242
574
  }
575
+
576
+ verifyDelegationSummary(artifact, errors);
577
+ verifyRepoDecisionsSummary(artifact, errors);
578
+ verifyDashboardSessionSummary(artifact, errors);
243
579
  }
244
580
 
245
581
  function verifyCoordinatorExport(artifact, errors) {
@@ -343,6 +679,8 @@ function verifyCoordinatorExport(artifact, errors) {
343
679
  addError(errors, `${repoPath}.export`, nestedError);
344
680
  }
345
681
  }
682
+
683
+ verifyAggregatedEventsSummary(artifact, errors);
346
684
  }
347
685
 
348
686
  export function verifyExportArtifact(artifact) {
package/src/lib/export.js CHANGED
@@ -9,6 +9,7 @@ import { loadCoordinatorState } from './coordinator-state.js';
9
9
  import { normalizeRunProvenance } from './run-provenance.js';
10
10
  import { getDashboardPid, getDashboardSession } from '../commands/dashboard.js';
11
11
  import { readRepoDecisions } from './repo-decisions.js';
12
+ import { RUN_EVENTS_PATH } from './run-events.js';
12
13
 
13
14
  const EXPORT_SCHEMA_VERSION = '0.3';
14
15
 
@@ -477,6 +478,62 @@ export function buildRunExport(startDir = process.cwd()) {
477
478
  };
478
479
  }
479
480
 
481
+ /**
482
+ * Build aggregated child-repo lifecycle events summary for coordinator exports.
483
+ * Reads events.jsonl from each child repo, tags with repo_id, merges, sorts.
484
+ */
485
+ function buildAggregatedEventsSummary(workspaceRoot, repoEntries) {
486
+ let allEvents = [];
487
+ const reposWithEvents = new Set();
488
+
489
+ for (const [repoId, repoDef] of repoEntries) {
490
+ const repoPath = resolve(workspaceRoot, repoDef?.path || '');
491
+ const eventsPath = join(repoPath, RUN_EVENTS_PATH);
492
+
493
+ if (!existsSync(eventsPath)) continue;
494
+
495
+ let raw;
496
+ try {
497
+ raw = readFileSync(eventsPath, 'utf8');
498
+ } catch {
499
+ continue;
500
+ }
501
+
502
+ const lines = raw.split('\n').filter(Boolean);
503
+ for (const line of lines) {
504
+ try {
505
+ const evt = JSON.parse(line);
506
+ evt.repo_id = repoId;
507
+ allEvents.push(evt);
508
+ reposWithEvents.add(repoId);
509
+ } catch {
510
+ // Skip malformed lines
511
+ }
512
+ }
513
+ }
514
+
515
+ // Sort by timestamp ascending, ties broken by event_id
516
+ allEvents.sort((a, b) => {
517
+ const tDiff = new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime();
518
+ if (tDiff !== 0) return tDiff;
519
+ return (a.event_id || '').localeCompare(b.event_id || '');
520
+ });
521
+
522
+ // Count event types
523
+ const eventTypeCounts = {};
524
+ for (const evt of allEvents) {
525
+ const t = evt.event_type || evt.type || 'unknown';
526
+ eventTypeCounts[t] = (eventTypeCounts[t] || 0) + 1;
527
+ }
528
+
529
+ return {
530
+ total_events: allEvents.length,
531
+ repos_with_events: [...reposWithEvents].sort(),
532
+ event_type_counts: eventTypeCounts,
533
+ events: allEvents,
534
+ };
535
+ }
536
+
480
537
  export function buildCoordinatorExport(startDir = process.cwd()) {
481
538
  const workspaceRoot = resolve(startDir);
482
539
  const configPath = join(workspaceRoot, COORDINATOR_CONFIG_FILE);
@@ -587,6 +644,7 @@ export function buildCoordinatorExport(startDir = process.cwd()) {
587
644
  barrier_count: barrierCount,
588
645
  history_entries: countJsonl(files, '.agentxchain/multirepo/history.jsonl'),
589
646
  decision_entries: countJsonl(files, '.agentxchain/multirepo/decision-ledger.jsonl'),
647
+ aggregated_events: buildAggregatedEventsSummary(workspaceRoot, repoEntries),
590
648
  },
591
649
  files,
592
650
  config: rawConfig,
@@ -1,4 +1,4 @@
1
- export const CURRENT_PROTOCOL_VERSION = 'v6';
1
+ export const CURRENT_PROTOCOL_VERSION = 'v7';
2
2
  export const CURRENT_CONFIG_GENERATION = 4;
3
3
 
4
4
  export function getGovernedConfigSchemaVersion(rawConfig) {
@@ -14,6 +14,7 @@ import { finalizeDispatchManifest, verifyDispatchManifest } from './dispatch-man
14
14
  import { getDispatchTurnDir } from './turn-paths.js';
15
15
  import { runHooks } from './hook-runner.js';
16
16
  import { validateCoordinatorConfig, normalizeCoordinatorConfig } from './coordinator-config.js';
17
+ import { VALID_RUN_EVENTS, emitRunEvent } from './run-events.js';
17
18
  import { projectRepoAcceptance, evaluateBarriers } from './coordinator-acceptance.js';
18
19
  import { readBarriers, saveCoordinatorState, readCoordinatorHistory } from './coordinator-state.js';
19
20
 
@@ -96,6 +97,11 @@ function validateFixtureConfig(config) {
96
97
  if (route.exit_gate && !config.gates?.[route.exit_gate]) {
97
98
  errors.push(`Routing references unknown gate: "${route.exit_gate}"`);
98
99
  }
100
+ if ('max_concurrent_turns' in route) {
101
+ if (!Number.isInteger(route.max_concurrent_turns) || route.max_concurrent_turns < 1 || route.max_concurrent_turns > 4) {
102
+ errors.push(`Routing "${phase}": max_concurrent_turns must be an integer between 1 and 4`);
103
+ }
104
+ }
99
105
  }
100
106
 
101
107
  return errors;
@@ -853,6 +859,75 @@ function executeFixtureOperation(workspace, fixture) {
853
859
  };
854
860
  }
855
861
 
862
+ // ── Tier 1: Event Lifecycle ──────────────────────────────────────────
863
+
864
+ case 'validate_event': {
865
+ const event = fixture.input.args.event;
866
+ const errors = [];
867
+ if (!event || typeof event !== 'object') {
868
+ return { result: 'error', error_type: 'invalid_event', errors: ['Event must be an object'] };
869
+ }
870
+ if (typeof event.event_id !== 'string' || !event.event_id.trim()) {
871
+ errors.push('event_id must be a non-empty string');
872
+ }
873
+ if (!VALID_RUN_EVENTS.includes(event.event_type)) {
874
+ errors.push(`event_type must be one of: ${VALID_RUN_EVENTS.join(', ')}`);
875
+ }
876
+ if (typeof event.timestamp !== 'string' || Number.isNaN(Date.parse(event.timestamp))) {
877
+ errors.push('timestamp must be a valid ISO-8601 string');
878
+ }
879
+ // Turn-scoped events must have turn.turn_id
880
+ const turnScopedEvents = ['turn_dispatched', 'turn_accepted', 'turn_rejected'];
881
+ if (turnScopedEvents.includes(event.event_type)) {
882
+ if (!event.turn?.turn_id || typeof event.turn.turn_id !== 'string' || !event.turn.turn_id.trim()) {
883
+ errors.push(`${event.event_type} requires a non-empty turn.turn_id`);
884
+ }
885
+ }
886
+ if (errors.length > 0) {
887
+ return { result: 'error', error_type: 'invalid_event', errors };
888
+ }
889
+ return { result: 'success', errors: [] };
890
+ }
891
+
892
+ case 'validate_event_ordering': {
893
+ const events = fixture.input.args.events;
894
+ const errors = [];
895
+ if (!Array.isArray(events) || events.length === 0) {
896
+ return { result: 'error', error_type: 'invalid_events', errors: ['Events must be a non-empty array'] };
897
+ }
898
+ // run_started must be first
899
+ if (events[0].event_type !== 'run_started') {
900
+ errors.push('First event must be run_started');
901
+ }
902
+ // run_completed must be last (if present)
903
+ const lastEvent = events[events.length - 1];
904
+ if (events.some((e) => e.event_type === 'run_completed') && lastEvent.event_type !== 'run_completed') {
905
+ errors.push('run_completed must be the last event');
906
+ }
907
+ // turn_dispatched must precede turn_accepted for same turn
908
+ const dispatchedTurns = new Map();
909
+ for (const event of events) {
910
+ if (event.event_type === 'turn_dispatched' && event.turn?.turn_id) {
911
+ dispatchedTurns.set(event.turn.turn_id, true);
912
+ }
913
+ if (event.event_type === 'turn_accepted' && event.turn?.turn_id) {
914
+ if (!dispatchedTurns.has(event.turn.turn_id)) {
915
+ errors.push(`turn_accepted for ${event.turn.turn_id} without preceding turn_dispatched`);
916
+ }
917
+ }
918
+ }
919
+ // Timestamps must be monotonically non-decreasing
920
+ for (let i = 1; i < events.length; i++) {
921
+ if (new Date(events[i].timestamp) < new Date(events[i - 1].timestamp)) {
922
+ errors.push(`Event ${i} timestamp is before event ${i - 1}`);
923
+ }
924
+ }
925
+ if (errors.length > 0) {
926
+ return { result: 'error', error_type: 'ordering_violation', errors };
927
+ }
928
+ return { result: 'success', errors: [] };
929
+ }
930
+
856
931
  default:
857
932
  return { result: 'error', error_type: 'unsupported_operation', operation };
858
933
  }
package/src/lib/report.js CHANGED
@@ -628,6 +628,19 @@ function extractCoordinatorTimeline(artifact) {
628
628
  });
629
629
  }
630
630
 
631
+ function extractAggregatedEventTimeline(artifact) {
632
+ const aggEvents = artifact.summary?.aggregated_events;
633
+ if (!aggEvents || !Array.isArray(aggEvents.events) || aggEvents.events.length === 0) return [];
634
+ return aggEvents.events.map((evt) => ({
635
+ repo_id: evt.repo_id || null,
636
+ type: evt.event_type || evt.type || 'unknown',
637
+ timestamp: evt.timestamp || null,
638
+ run_id: evt.run_id || null,
639
+ event_id: evt.event_id || null,
640
+ summary: `[${evt.repo_id || '?'}] ${evt.event_type || evt.type || 'unknown'} at ${evt.timestamp || '?'}`,
641
+ }));
642
+ }
643
+
631
644
  function computeCoordinatorTiming(artifact, coordinatorTimeline) {
632
645
  const coordinatorState = extractFileData(artifact, '.agentxchain/multirepo/state.json');
633
646
  const createdAtFromHistory = coordinatorTimeline
@@ -1121,6 +1134,7 @@ function buildCoordinatorSubject(artifact) {
1121
1134
  repo_error_count: repoErrorCount,
1122
1135
  },
1123
1136
  coordinator_timeline: coordinatorTimeline,
1137
+ aggregated_event_timeline: extractAggregatedEventTimeline(artifact),
1124
1138
  barrier_summary: barrierSummary,
1125
1139
  barrier_ledger_timeline: barrierLedgerTimeline,
1126
1140
  decision_digest: decisionDigest,
@@ -1504,6 +1518,17 @@ export function formatGovernanceReportText(report) {
1504
1518
  }
1505
1519
  }
1506
1520
 
1521
+ const aggregated_event_timeline = report.subject.aggregated_event_timeline;
1522
+ if (aggregated_event_timeline && aggregated_event_timeline.length > 0) {
1523
+ lines.push('', 'Aggregated Child Repo Events:');
1524
+ for (const evt of aggregated_event_timeline) {
1525
+ const ts = evt.timestamp ? ` [${evt.timestamp}]` : '';
1526
+ lines.push(` [${evt.repo_id || '?'}] ${evt.type}${ts}`);
1527
+ }
1528
+ } else {
1529
+ lines.push('', 'Aggregated Child Repo Events:', ' No child repo events.');
1530
+ }
1531
+
1507
1532
  if (barrier_summary && barrier_summary.length > 0) {
1508
1533
  lines.push('', 'Barrier Summary:');
1509
1534
  for (const b of barrier_summary) {
@@ -2006,6 +2031,18 @@ export function formatGovernanceReportMarkdown(report) {
2006
2031
  }
2007
2032
  }
2008
2033
 
2034
+ const aggregated_event_timeline = report.subject.aggregated_event_timeline;
2035
+ if (aggregated_event_timeline && aggregated_event_timeline.length > 0) {
2036
+ mdLines.push('', '## Aggregated Child Repo Events', '', '| Timestamp | Repo | Event Type | Summary |', '|-----------|------|------------|---------|');
2037
+ for (const evt of aggregated_event_timeline) {
2038
+ const ts = evt.timestamp ? `\`${evt.timestamp}\`` : 'n/a';
2039
+ const escapedSummary = evt.summary.replace(/\|/g, '\\|');
2040
+ mdLines.push(`| ${ts} | \`${evt.repo_id || '?'}\` | \`${evt.type}\` | ${escapedSummary} |`);
2041
+ }
2042
+ } else {
2043
+ mdLines.push('', '## Aggregated Child Repo Events', '', 'No child repo events.');
2044
+ }
2045
+
2009
2046
  if (barrier_summary && barrier_summary.length > 0) {
2010
2047
  mdLines.push('', '## Barrier Summary', '', '| Barrier | Workstream | Type | Status | Satisfied |', '|---------|------------|------|--------|-----------|');
2011
2048
  for (const b of barrier_summary) {
@@ -2615,6 +2652,22 @@ function renderCoordinatorHtml(report) {
2615
2652
  sections.push(`<div class="section">${htmlSection('Coordinator Timeline', htmlTable(['#', 'Type', 'Time', 'Summary'], tlRows))}</div>`);
2616
2653
  }
2617
2654
 
2655
+ // Aggregated Child Repo Events
2656
+ {
2657
+ const aggTimeline = report.subject.aggregated_event_timeline;
2658
+ if (aggTimeline?.length > 0) {
2659
+ const aggRows = aggTimeline.map((evt) => [
2660
+ `<code>${esc(evt.timestamp || 'n/a')}</code>`,
2661
+ `<span class="badge" style="background:#4a90d9">${esc(evt.repo_id || '?')}</span>`,
2662
+ `<code>${esc(evt.type)}</code>`,
2663
+ esc(evt.summary),
2664
+ ]);
2665
+ sections.push(`<div class="section">${htmlSection('Aggregated Child Repo Events', htmlTable(['Timestamp', 'Repo', 'Event Type', 'Summary'], aggRows))}</div>`);
2666
+ } else {
2667
+ sections.push(`<div class="section">${htmlSection('Aggregated Child Repo Events', '<p>No child repo events.</p>')}</div>`);
2668
+ }
2669
+ }
2670
+
2618
2671
  // Barrier Summary
2619
2672
  if (barrier_summary?.length > 0) {
2620
2673
  const bRows = barrier_summary.map((b) => [