agentxchain 0.8.8 → 2.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +126 -142
- package/bin/agentxchain.js +186 -5
- package/dashboard/app.js +305 -0
- package/dashboard/components/blocked.js +145 -0
- package/dashboard/components/cross-repo.js +126 -0
- package/dashboard/components/gate.js +311 -0
- package/dashboard/components/hooks.js +177 -0
- package/dashboard/components/initiative.js +147 -0
- package/dashboard/components/ledger.js +165 -0
- package/dashboard/components/timeline.js +222 -0
- package/dashboard/index.html +352 -0
- package/package.json +14 -6
- package/scripts/live-api-proxy-preflight-smoke.sh +531 -0
- package/scripts/publish-from-tag.sh +88 -0
- package/scripts/release-postflight.sh +231 -0
- package/scripts/release-preflight.sh +167 -0
- package/src/commands/accept-turn.js +160 -0
- package/src/commands/approve-completion.js +80 -0
- package/src/commands/approve-transition.js +85 -0
- package/src/commands/dashboard.js +70 -0
- package/src/commands/init.js +516 -0
- package/src/commands/migrate.js +348 -0
- package/src/commands/multi.js +549 -0
- package/src/commands/plugin.js +157 -0
- package/src/commands/reject-turn.js +204 -0
- package/src/commands/resume.js +389 -0
- package/src/commands/status.js +196 -3
- package/src/commands/step.js +947 -0
- package/src/commands/template-list.js +33 -0
- package/src/commands/template-set.js +279 -0
- package/src/commands/validate.js +20 -11
- package/src/commands/verify.js +71 -0
- package/src/lib/adapters/api-proxy-adapter.js +1076 -0
- package/src/lib/adapters/local-cli-adapter.js +337 -0
- package/src/lib/adapters/manual-adapter.js +169 -0
- package/src/lib/blocked-state.js +94 -0
- package/src/lib/config.js +97 -1
- package/src/lib/context-compressor.js +121 -0
- package/src/lib/context-section-parser.js +220 -0
- package/src/lib/coordinator-acceptance.js +428 -0
- package/src/lib/coordinator-config.js +461 -0
- package/src/lib/coordinator-dispatch.js +276 -0
- package/src/lib/coordinator-gates.js +487 -0
- package/src/lib/coordinator-hooks.js +239 -0
- package/src/lib/coordinator-recovery.js +523 -0
- package/src/lib/coordinator-state.js +365 -0
- package/src/lib/cross-repo-context.js +247 -0
- package/src/lib/dashboard/bridge-server.js +284 -0
- package/src/lib/dashboard/file-watcher.js +93 -0
- package/src/lib/dashboard/state-reader.js +96 -0
- package/src/lib/dispatch-bundle.js +568 -0
- package/src/lib/dispatch-manifest.js +252 -0
- package/src/lib/gate-evaluator.js +285 -0
- package/src/lib/governed-state.js +2139 -0
- package/src/lib/governed-templates.js +145 -0
- package/src/lib/hook-runner.js +788 -0
- package/src/lib/normalized-config.js +539 -0
- package/src/lib/plugin-config-schema.js +192 -0
- package/src/lib/plugins.js +692 -0
- package/src/lib/protocol-conformance.js +291 -0
- package/src/lib/reference-conformance-adapter.js +717 -0
- package/src/lib/repo-observer.js +597 -0
- package/src/lib/repo.js +0 -31
- package/src/lib/schema.js +121 -0
- package/src/lib/schemas/turn-result.schema.json +205 -0
- package/src/lib/token-budget.js +206 -0
- package/src/lib/token-counter.js +27 -0
- package/src/lib/turn-paths.js +67 -0
- package/src/lib/turn-result-validator.js +496 -0
- package/src/lib/validation.js +137 -0
- package/src/templates/governed/api-service.json +31 -0
- package/src/templates/governed/cli-tool.json +30 -0
- package/src/templates/governed/generic.json +10 -0
- package/src/templates/governed/web-app.json +30 -0
|
@@ -0,0 +1,365 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Coordinator state machine — multi-repo run lifecycle.
|
|
3
|
+
*
|
|
4
|
+
* Manages: super_run initialization, repo-local run linkage,
|
|
5
|
+
* coordinator state persistence, barrier bootstrapping, and status queries.
|
|
6
|
+
*
|
|
7
|
+
* Design rules:
|
|
8
|
+
* - Coordinator state lives under <workspace>/.agentxchain/multirepo/
|
|
9
|
+
* - Repo-local .agentxchain/ directories are NEVER mutated by coordinator code
|
|
10
|
+
* - Init is atomic: if any required repo fails, no coordinator state is written
|
|
11
|
+
* - Uses loadCoordinatorConfig() as the sole config entry point (DEC-MR-IMPL-006)
|
|
12
|
+
*/
|
|
13
|
+
|
|
14
|
+
import { readFileSync, writeFileSync, existsSync, mkdirSync, rmSync } from 'node:fs';
|
|
15
|
+
import { join, dirname } from 'node:path';
|
|
16
|
+
import { randomBytes } from 'node:crypto';
|
|
17
|
+
import { safeWriteJson } from './safe-write.js';
|
|
18
|
+
import { loadCoordinatorConfig } from './coordinator-config.js';
|
|
19
|
+
import { initializeGovernedRun } from './governed-state.js';
|
|
20
|
+
|
|
21
|
+
// ── Paths ───────────────────────────────────────────────────────────────────
|
|
22
|
+
|
|
23
|
+
const MULTIREPO_DIR = '.agentxchain/multirepo';
|
|
24
|
+
const STATE_FILE = 'state.json';
|
|
25
|
+
const HISTORY_FILE = 'history.jsonl';
|
|
26
|
+
const LEDGER_FILE = 'decision-ledger.jsonl';
|
|
27
|
+
const BARRIERS_FILE = 'barriers.json';
|
|
28
|
+
const BARRIER_LEDGER_FILE = 'barrier-ledger.jsonl';
|
|
29
|
+
|
|
30
|
+
function multiDir(workspacePath) {
|
|
31
|
+
return join(workspacePath, MULTIREPO_DIR);
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
function statePath(workspacePath) {
|
|
35
|
+
return join(multiDir(workspacePath), STATE_FILE);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
function historyPath(workspacePath) {
|
|
39
|
+
return join(multiDir(workspacePath), HISTORY_FILE);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
function ledgerPath(workspacePath) {
|
|
43
|
+
return join(multiDir(workspacePath), LEDGER_FILE);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
function barriersPath(workspacePath) {
|
|
47
|
+
return join(multiDir(workspacePath), BARRIERS_FILE);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
function barrierLedgerPath(workspacePath) {
|
|
51
|
+
return join(multiDir(workspacePath), BARRIER_LEDGER_FILE);
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
// ── Helpers ─────────────────────────────────────────────────────────────────
|
|
55
|
+
|
|
56
|
+
function generateSuperRunId() {
|
|
57
|
+
const ts = Date.now();
|
|
58
|
+
const rand = randomBytes(4).toString('hex');
|
|
59
|
+
return `srun_${ts}_${rand}`;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
function appendJsonl(filePath, entry) {
|
|
63
|
+
mkdirSync(dirname(filePath), { recursive: true });
|
|
64
|
+
writeFileSync(filePath, JSON.stringify(entry) + '\n', { flag: 'a' });
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
function readRepoLocalState(repoPath) {
|
|
68
|
+
const stateFile = join(repoPath, '.agentxchain/state.json');
|
|
69
|
+
if (!existsSync(stateFile)) return null;
|
|
70
|
+
try {
|
|
71
|
+
return JSON.parse(readFileSync(stateFile, 'utf8'));
|
|
72
|
+
} catch {
|
|
73
|
+
return null;
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
function readRepoLocalConfig(repoPath) {
|
|
78
|
+
const configFile = join(repoPath, 'agentxchain.json');
|
|
79
|
+
if (!existsSync(configFile)) return null;
|
|
80
|
+
try {
|
|
81
|
+
return JSON.parse(readFileSync(configFile, 'utf8'));
|
|
82
|
+
} catch {
|
|
83
|
+
return null;
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
// ── Barrier bootstrapping ───────────────────────────────────────────────────
|
|
88
|
+
|
|
89
|
+
function bootstrapBarriers(config) {
|
|
90
|
+
const barriers = {};
|
|
91
|
+
|
|
92
|
+
for (const [workstreamId, workstream] of Object.entries(config.workstreams)) {
|
|
93
|
+
const barrierId = `${workstreamId}_completion`;
|
|
94
|
+
barriers[barrierId] = {
|
|
95
|
+
workstream_id: workstreamId,
|
|
96
|
+
type: workstream.completion_barrier,
|
|
97
|
+
status: 'pending',
|
|
98
|
+
required_repos: [...workstream.repos],
|
|
99
|
+
satisfied_repos: [],
|
|
100
|
+
created_at: new Date().toISOString(),
|
|
101
|
+
};
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
return barriers;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
// ── Core API ────────────────────────────────────────────────────────────────
|
|
108
|
+
|
|
109
|
+
/**
|
|
110
|
+
* Initialize a multi-repo coordinator run.
|
|
111
|
+
*
|
|
112
|
+
* 1. Validates config via loadCoordinatorConfig().
|
|
113
|
+
* 2. For each repo: links existing active run or initializes a new one.
|
|
114
|
+
* 3. Writes all coordinator state files atomically.
|
|
115
|
+
* 4. If any required repo fails, no coordinator state is written.
|
|
116
|
+
*
|
|
117
|
+
* @param {string} workspacePath - path to the coordinator workspace
|
|
118
|
+
* @param {object} [preloadedConfig] - optional pre-loaded config (skips loadCoordinatorConfig)
|
|
119
|
+
* @returns {{ ok: boolean, super_run_id?: string, repo_runs?: object, errors?: string[] }}
|
|
120
|
+
*/
|
|
121
|
+
export function initializeCoordinatorRun(workspacePath, preloadedConfig) {
|
|
122
|
+
// Step 1: Load and validate config
|
|
123
|
+
let config;
|
|
124
|
+
if (preloadedConfig) {
|
|
125
|
+
config = preloadedConfig;
|
|
126
|
+
} else {
|
|
127
|
+
const configResult = loadCoordinatorConfig(workspacePath);
|
|
128
|
+
if (!configResult.ok) {
|
|
129
|
+
return { ok: false, errors: configResult.errors };
|
|
130
|
+
}
|
|
131
|
+
config = configResult.config;
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
// Step 2: Link or initialize repo-local runs
|
|
135
|
+
const repoRuns = {};
|
|
136
|
+
const errors = [];
|
|
137
|
+
|
|
138
|
+
for (const repoId of config.repo_order) {
|
|
139
|
+
const repo = config.repos[repoId];
|
|
140
|
+
const repoPath = repo.resolved_path;
|
|
141
|
+
|
|
142
|
+
const repoState = readRepoLocalState(repoPath);
|
|
143
|
+
|
|
144
|
+
if (repoState && repoState.run_id && repoState.status === 'active') {
|
|
145
|
+
// Link existing active run
|
|
146
|
+
repoRuns[repoId] = {
|
|
147
|
+
run_id: repoState.run_id,
|
|
148
|
+
status: 'linked',
|
|
149
|
+
phase: repoState.phase ?? 'implementation',
|
|
150
|
+
initialized_by_coordinator: false,
|
|
151
|
+
};
|
|
152
|
+
} else if (repoState && (repoState.status === 'idle' || repoState.status === 'paused' ||
|
|
153
|
+
(repoState.status === 'blocked' && !repoState.run_id))) {
|
|
154
|
+
// Initialize a new run
|
|
155
|
+
const repoConfig = readRepoLocalConfig(repoPath);
|
|
156
|
+
if (!repoConfig) {
|
|
157
|
+
errors.push(`repo_init_failed: repo "${repoId}" config could not be read`);
|
|
158
|
+
continue;
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
const initResult = initializeGovernedRun(repoPath, repoConfig);
|
|
162
|
+
if (!initResult.ok) {
|
|
163
|
+
errors.push(`repo_init_failed: repo "${repoId}" initialization failed: ${initResult.error}`);
|
|
164
|
+
continue;
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
repoRuns[repoId] = {
|
|
168
|
+
run_id: initResult.state.run_id,
|
|
169
|
+
status: 'initialized',
|
|
170
|
+
phase: initResult.state.phase ?? 'implementation',
|
|
171
|
+
initialized_by_coordinator: true,
|
|
172
|
+
};
|
|
173
|
+
} else if (repoState && repoState.status === 'completed') {
|
|
174
|
+
errors.push(`repo_completed: repo "${repoId}" has a completed run. Reset or start a new project before coordinator init.`);
|
|
175
|
+
continue;
|
|
176
|
+
} else if (repoState && repoState.status === 'blocked' && repoState.run_id) {
|
|
177
|
+
errors.push(`repo_blocked: repo "${repoId}" is blocked with an active run. Resolve the blocked state before coordinator init.`);
|
|
178
|
+
continue;
|
|
179
|
+
} else {
|
|
180
|
+
errors.push(`repo_no_state: repo "${repoId}" has no governed state`);
|
|
181
|
+
continue;
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
// Check if all required repos succeeded
|
|
186
|
+
const requiredRepos = config.repo_order.filter(id => config.repos[id].required);
|
|
187
|
+
const failedRequired = requiredRepos.filter(id => !repoRuns[id]);
|
|
188
|
+
if (failedRequired.length > 0) {
|
|
189
|
+
return {
|
|
190
|
+
ok: false,
|
|
191
|
+
errors: [
|
|
192
|
+
...errors,
|
|
193
|
+
`atomic_failure: required repos failed: ${failedRequired.join(', ')}. No coordinator state written.`,
|
|
194
|
+
],
|
|
195
|
+
};
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
// Step 3: Generate coordinator state
|
|
199
|
+
const superRunId = generateSuperRunId();
|
|
200
|
+
const now = new Date().toISOString();
|
|
201
|
+
const barriers = bootstrapBarriers(config);
|
|
202
|
+
|
|
203
|
+
const state = {
|
|
204
|
+
schema_version: '0.1',
|
|
205
|
+
super_run_id: superRunId,
|
|
206
|
+
project_id: config.project.id,
|
|
207
|
+
status: 'active',
|
|
208
|
+
phase: config.workstream_order.length > 0
|
|
209
|
+
? config.workstreams[config.workstream_order[0]].phase
|
|
210
|
+
: 'implementation',
|
|
211
|
+
repo_runs: repoRuns,
|
|
212
|
+
pending_gate: null,
|
|
213
|
+
phase_gate_status: {},
|
|
214
|
+
created_at: now,
|
|
215
|
+
updated_at: now,
|
|
216
|
+
};
|
|
217
|
+
|
|
218
|
+
// Step 4: Atomic write — create directory and all files
|
|
219
|
+
const dir = multiDir(workspacePath);
|
|
220
|
+
try {
|
|
221
|
+
mkdirSync(dir, { recursive: true });
|
|
222
|
+
|
|
223
|
+
// Write state
|
|
224
|
+
safeWriteJson(statePath(workspacePath), state);
|
|
225
|
+
|
|
226
|
+
// Write barriers
|
|
227
|
+
safeWriteJson(barriersPath(workspacePath), barriers);
|
|
228
|
+
|
|
229
|
+
// Create empty JSONL files
|
|
230
|
+
const emptyFiles = [
|
|
231
|
+
historyPath(workspacePath),
|
|
232
|
+
ledgerPath(workspacePath),
|
|
233
|
+
barrierLedgerPath(workspacePath),
|
|
234
|
+
];
|
|
235
|
+
for (const filePath of emptyFiles) {
|
|
236
|
+
if (!existsSync(filePath)) {
|
|
237
|
+
writeFileSync(filePath, '', { flag: 'w' });
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
// Append run_initialized to history
|
|
242
|
+
appendJsonl(historyPath(workspacePath), {
|
|
243
|
+
type: 'run_initialized',
|
|
244
|
+
super_run_id: superRunId,
|
|
245
|
+
project_id: config.project.id,
|
|
246
|
+
repo_runs: Object.fromEntries(
|
|
247
|
+
Object.entries(repoRuns).map(([id, run]) => [id, {
|
|
248
|
+
run_id: run.run_id,
|
|
249
|
+
initialized_by_coordinator: run.initialized_by_coordinator,
|
|
250
|
+
}]),
|
|
251
|
+
),
|
|
252
|
+
timestamp: now,
|
|
253
|
+
});
|
|
254
|
+
} catch (err) {
|
|
255
|
+
// Atomic failure: clean up partial state
|
|
256
|
+
try {
|
|
257
|
+
rmSync(dir, { recursive: true, force: true });
|
|
258
|
+
} catch { /* best effort cleanup */ }
|
|
259
|
+
return {
|
|
260
|
+
ok: false,
|
|
261
|
+
errors: [...errors, `write_failed: ${err.message}`],
|
|
262
|
+
};
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
return {
|
|
266
|
+
ok: true,
|
|
267
|
+
super_run_id: superRunId,
|
|
268
|
+
repo_runs: repoRuns,
|
|
269
|
+
errors: errors.length > 0 ? errors : undefined,
|
|
270
|
+
};
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
/**
|
|
274
|
+
* Load coordinator state from disk.
|
|
275
|
+
*
|
|
276
|
+
* @param {string} workspacePath
|
|
277
|
+
* @returns {object|null} - parsed state or null if not found
|
|
278
|
+
*/
|
|
279
|
+
export function loadCoordinatorState(workspacePath) {
|
|
280
|
+
const file = statePath(workspacePath);
|
|
281
|
+
if (!existsSync(file)) return null;
|
|
282
|
+
try {
|
|
283
|
+
return JSON.parse(readFileSync(file, 'utf8'));
|
|
284
|
+
} catch {
|
|
285
|
+
return null;
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
/**
|
|
290
|
+
* Save coordinator state to disk.
|
|
291
|
+
*
|
|
292
|
+
* @param {string} workspacePath
|
|
293
|
+
* @param {object} state
|
|
294
|
+
*/
|
|
295
|
+
export function saveCoordinatorState(workspacePath, state) {
|
|
296
|
+
const updated = { ...state, updated_at: new Date().toISOString() };
|
|
297
|
+
safeWriteJson(statePath(workspacePath), updated);
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
/**
|
|
301
|
+
* Get a full coordinator status snapshot.
|
|
302
|
+
*
|
|
303
|
+
* @param {string} workspacePath
|
|
304
|
+
* @returns {{ super_run_id, status, phase, repo_runs, pending_barriers, pending_gate }|null}
|
|
305
|
+
*/
|
|
306
|
+
export function getCoordinatorStatus(workspacePath) {
|
|
307
|
+
const state = loadCoordinatorState(workspacePath);
|
|
308
|
+
if (!state) return null;
|
|
309
|
+
|
|
310
|
+
// Read barriers
|
|
311
|
+
let barriers = {};
|
|
312
|
+
const barriersFile = barriersPath(workspacePath);
|
|
313
|
+
if (existsSync(barriersFile)) {
|
|
314
|
+
try {
|
|
315
|
+
barriers = JSON.parse(readFileSync(barriersFile, 'utf8'));
|
|
316
|
+
} catch { /* empty or corrupt — treat as empty */ }
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
const pendingBarriers = Object.entries(barriers)
|
|
320
|
+
.filter(([, b]) => b.status === 'pending' || b.status === 'partially_satisfied')
|
|
321
|
+
.map(([id, b]) => ({ id, ...b }));
|
|
322
|
+
|
|
323
|
+
return {
|
|
324
|
+
super_run_id: state.super_run_id,
|
|
325
|
+
status: state.status,
|
|
326
|
+
phase: state.phase,
|
|
327
|
+
repo_runs: state.repo_runs,
|
|
328
|
+
pending_barriers: pendingBarriers,
|
|
329
|
+
pending_gate: state.pending_gate ?? null,
|
|
330
|
+
};
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
/**
|
|
334
|
+
* Read coordinator history entries.
|
|
335
|
+
*
|
|
336
|
+
* @param {string} workspacePath
|
|
337
|
+
* @returns {object[]}
|
|
338
|
+
*/
|
|
339
|
+
export function readCoordinatorHistory(workspacePath) {
|
|
340
|
+
const file = historyPath(workspacePath);
|
|
341
|
+
if (!existsSync(file)) return [];
|
|
342
|
+
try {
|
|
343
|
+
const content = readFileSync(file, 'utf8').trim();
|
|
344
|
+
if (!content) return [];
|
|
345
|
+
return content.split('\n').map(line => JSON.parse(line));
|
|
346
|
+
} catch {
|
|
347
|
+
return [];
|
|
348
|
+
}
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
/**
|
|
352
|
+
* Read coordinator barriers snapshot.
|
|
353
|
+
*
|
|
354
|
+
* @param {string} workspacePath
|
|
355
|
+
* @returns {object}
|
|
356
|
+
*/
|
|
357
|
+
export function readBarriers(workspacePath) {
|
|
358
|
+
const file = barriersPath(workspacePath);
|
|
359
|
+
if (!existsSync(file)) return {};
|
|
360
|
+
try {
|
|
361
|
+
return JSON.parse(readFileSync(file, 'utf8'));
|
|
362
|
+
} catch {
|
|
363
|
+
return {};
|
|
364
|
+
}
|
|
365
|
+
}
|
|
@@ -0,0 +1,247 @@
|
|
|
1
|
+
import { mkdirSync, writeFileSync, appendFileSync } from 'node:fs';
|
|
2
|
+
import { join } from 'node:path';
|
|
3
|
+
import { randomBytes } from 'node:crypto';
|
|
4
|
+
import { readCoordinatorHistory, readBarriers } from './coordinator-state.js';
|
|
5
|
+
|
|
6
|
+
const CONTEXT_ROOT = '.agentxchain/multirepo/context';
|
|
7
|
+
|
|
8
|
+
function buildContextRef(workstreamId, targetRepoId) {
|
|
9
|
+
return `ctx_${workstreamId}_${targetRepoId}_${randomBytes(4).toString('hex')}`;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
function getContextDir(workspacePath, contextRef) {
|
|
13
|
+
return join(workspacePath, CONTEXT_ROOT, contextRef);
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
function isProjectionEntry(entry) {
|
|
17
|
+
return entry && typeof entry === 'object' && entry.type === 'acceptance_projection';
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
function collectUpstreamAcceptances(history, targetRepoId, relevantWorkstreamIds) {
|
|
21
|
+
const relevantIds = new Set(relevantWorkstreamIds);
|
|
22
|
+
return history
|
|
23
|
+
.filter(isProjectionEntry)
|
|
24
|
+
.filter((entry) => relevantIds.has(entry.workstream_id))
|
|
25
|
+
.filter((entry) => entry.repo_id !== targetRepoId)
|
|
26
|
+
.map((entry) => ({
|
|
27
|
+
projection_ref: entry.projection_ref || null,
|
|
28
|
+
workstream_id: entry.workstream_id,
|
|
29
|
+
repo_id: entry.repo_id,
|
|
30
|
+
repo_turn_id: entry.repo_turn_id || null,
|
|
31
|
+
summary: entry.summary || '',
|
|
32
|
+
decisions: Array.isArray(entry.decisions) ? entry.decisions : [],
|
|
33
|
+
files_changed: Array.isArray(entry.files_changed) ? entry.files_changed : [],
|
|
34
|
+
verification: entry.verification || null,
|
|
35
|
+
}));
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
function collectActiveBarriers(barriers, workstreamIds, targetRepoId) {
|
|
39
|
+
const relevantIds = new Set(workstreamIds);
|
|
40
|
+
return Object.entries(barriers)
|
|
41
|
+
.filter(([, barrier]) => barrier && typeof barrier === 'object')
|
|
42
|
+
.filter(([, barrier]) => barrier.type !== 'all_repos_accepted')
|
|
43
|
+
.filter(([, barrier]) => relevantIds.has(barrier.workstream_id))
|
|
44
|
+
.filter(([, barrier]) => barrier.status && barrier.status !== 'satisfied')
|
|
45
|
+
.filter(([, barrier]) => {
|
|
46
|
+
if (Array.isArray(barrier.downstream_repos) && barrier.downstream_repos.includes(targetRepoId)) {
|
|
47
|
+
return true;
|
|
48
|
+
}
|
|
49
|
+
if (Array.isArray(barrier.required_repos) && barrier.required_repos.includes(targetRepoId)) {
|
|
50
|
+
return true;
|
|
51
|
+
}
|
|
52
|
+
if (Array.isArray(barrier.blocked_assignments)) {
|
|
53
|
+
return barrier.blocked_assignments.some((assignment) => assignment === targetRepoId || assignment.startsWith(`${targetRepoId}:`));
|
|
54
|
+
}
|
|
55
|
+
return barrier.workstream_id === workstreamIds[0];
|
|
56
|
+
})
|
|
57
|
+
.map(([barrierId, barrier]) => ({
|
|
58
|
+
barrier_id: barrier.barrier_id || barrierId,
|
|
59
|
+
workstream_id: barrier.workstream_id,
|
|
60
|
+
type: barrier.type || 'unknown',
|
|
61
|
+
status: barrier.status,
|
|
62
|
+
notes: barrier.notes || null,
|
|
63
|
+
}));
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
function buildRequiredFollowups(workstreamId, dependencyIds, upstreamAcceptances, activeBarriers, targetRepoId) {
|
|
67
|
+
const followups = [];
|
|
68
|
+
|
|
69
|
+
for (const dependencyId of dependencyIds) {
|
|
70
|
+
followups.push(`Incorporate accepted output from dependency "${dependencyId}" before changing ${targetRepoId}.`);
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
for (const acceptance of upstreamAcceptances) {
|
|
74
|
+
followups.push(`Review ${acceptance.repo_id} acceptance${acceptance.summary ? `: ${acceptance.summary}` : ''}`.trim());
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
for (const barrier of activeBarriers) {
|
|
78
|
+
if (barrier.notes) {
|
|
79
|
+
followups.push(barrier.notes);
|
|
80
|
+
} else {
|
|
81
|
+
followups.push(`Respect barrier "${barrier.barrier_id}" (${barrier.type}) before proceeding.`);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
return [...new Set(followups)];
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
function renderContextMarkdown(snapshot) {
|
|
89
|
+
const lines = [
|
|
90
|
+
'# Coordinator Context',
|
|
91
|
+
'',
|
|
92
|
+
`- Super Run: ${snapshot.super_run_id}`,
|
|
93
|
+
`- Workstream: ${snapshot.workstream_id}`,
|
|
94
|
+
`- Target Repo: ${snapshot.target_repo_id}`,
|
|
95
|
+
`- Context Ref: ${snapshot.context_ref}`,
|
|
96
|
+
`- Generated At: ${snapshot.generated_at}`,
|
|
97
|
+
'',
|
|
98
|
+
'## Upstream Acceptances',
|
|
99
|
+
'',
|
|
100
|
+
];
|
|
101
|
+
|
|
102
|
+
if (snapshot.upstream_acceptances.length === 0) {
|
|
103
|
+
lines.push('- None');
|
|
104
|
+
} else {
|
|
105
|
+
for (const acceptance of snapshot.upstream_acceptances) {
|
|
106
|
+
const decisionText = acceptance.decisions.length > 0 ? ` Decisions: ${acceptance.decisions.join(', ')}.` : '';
|
|
107
|
+
lines.push(`- ${acceptance.repo_id} (${acceptance.workstream_id}): ${acceptance.summary || 'No summary recorded.'}${decisionText}`);
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
lines.push('');
|
|
112
|
+
lines.push('## Active Barriers');
|
|
113
|
+
lines.push('');
|
|
114
|
+
|
|
115
|
+
if (snapshot.active_barriers.length === 0) {
|
|
116
|
+
lines.push('- None');
|
|
117
|
+
} else {
|
|
118
|
+
for (const barrier of snapshot.active_barriers) {
|
|
119
|
+
lines.push(`- ${barrier.barrier_id}: ${barrier.type} (${barrier.status})`);
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
lines.push('');
|
|
124
|
+
lines.push('## Required Follow-ups');
|
|
125
|
+
lines.push('');
|
|
126
|
+
|
|
127
|
+
if (snapshot.required_followups.length === 0) {
|
|
128
|
+
lines.push('- None');
|
|
129
|
+
} else {
|
|
130
|
+
for (const followup of snapshot.required_followups) {
|
|
131
|
+
lines.push(`- ${followup}`);
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
lines.push('');
|
|
136
|
+
return lines.join('\n');
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
/**
|
|
140
|
+
* Compute which repos have stale cross-repo context after a turn is accepted in sourceRepoId.
|
|
141
|
+
*
|
|
142
|
+
* Returns an array of invalidation signals:
|
|
143
|
+
* { target_repo_id, context_ref, workstream_id, reason }
|
|
144
|
+
*
|
|
145
|
+
* A context is invalidated when:
|
|
146
|
+
* 1. It was generated for a target repo (not the source)
|
|
147
|
+
* 2. It included upstream acceptances from the source repo
|
|
148
|
+
* 3. The source repo has since accepted a new turn (the files_changed may affect
|
|
149
|
+
* decisions/context the target repo relied on)
|
|
150
|
+
*
|
|
151
|
+
* This is informational — the coordinator regenerates context on next dispatch anyway.
|
|
152
|
+
* The signal is for external consumers (notification pipelines, compliance validators).
|
|
153
|
+
*/
|
|
154
|
+
export function computeContextInvalidations(workspacePath, sourceRepoId, workstreamId, filesChanged) {
|
|
155
|
+
const history = readCoordinatorHistory(workspacePath);
|
|
156
|
+
|
|
157
|
+
// Find all context-generation events that included upstream acceptances from sourceRepoId
|
|
158
|
+
const contextEvents = history.filter(
|
|
159
|
+
(e) => e?.type === 'context_generated' && e.target_repo_id !== sourceRepoId
|
|
160
|
+
);
|
|
161
|
+
|
|
162
|
+
const invalidations = [];
|
|
163
|
+
|
|
164
|
+
for (const ctx of contextEvents) {
|
|
165
|
+
// Check if this context included projections from the source repo
|
|
166
|
+
const includedSourceProjections = (ctx.upstream_repo_ids || []).includes(sourceRepoId);
|
|
167
|
+
const sameWorkstream = ctx.workstream_id === workstreamId ||
|
|
168
|
+
(Array.isArray(ctx.relevant_workstream_ids) && ctx.relevant_workstream_ids.includes(workstreamId));
|
|
169
|
+
|
|
170
|
+
if (includedSourceProjections || sameWorkstream) {
|
|
171
|
+
invalidations.push({
|
|
172
|
+
target_repo_id: ctx.target_repo_id,
|
|
173
|
+
context_ref: ctx.context_ref,
|
|
174
|
+
workstream_id: ctx.workstream_id,
|
|
175
|
+
source_repo_id: sourceRepoId,
|
|
176
|
+
files_changed: Array.isArray(filesChanged) ? filesChanged : [],
|
|
177
|
+
reason: includedSourceProjections
|
|
178
|
+
? `Context ${ctx.context_ref} included projections from ${sourceRepoId} which has a new accepted turn`
|
|
179
|
+
: `Context ${ctx.context_ref} shares workstream "${workstreamId}" with newly accepted turn in ${sourceRepoId}`,
|
|
180
|
+
});
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
return invalidations;
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
export function generateCrossRepoContext(workspacePath, state, config, targetRepoId, workstreamId) {
|
|
188
|
+
const workstream = config.workstreams?.[workstreamId];
|
|
189
|
+
if (!workstream) {
|
|
190
|
+
return { ok: false, error: `Unknown workstream "${workstreamId}"` };
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
const contextRef = buildContextRef(workstreamId, targetRepoId);
|
|
194
|
+
const generatedAt = new Date().toISOString();
|
|
195
|
+
const history = readCoordinatorHistory(workspacePath);
|
|
196
|
+
const barriers = readBarriers(workspacePath);
|
|
197
|
+
const relevantWorkstreamIds = [workstreamId, ...(Array.isArray(workstream.depends_on) ? workstream.depends_on : [])];
|
|
198
|
+
const upstreamAcceptances = collectUpstreamAcceptances(history, targetRepoId, relevantWorkstreamIds);
|
|
199
|
+
const activeBarriers = collectActiveBarriers(barriers, relevantWorkstreamIds, targetRepoId);
|
|
200
|
+
const snapshot = {
|
|
201
|
+
schema_version: '0.1',
|
|
202
|
+
super_run_id: state.super_run_id,
|
|
203
|
+
workstream_id: workstreamId,
|
|
204
|
+
target_repo_id: targetRepoId,
|
|
205
|
+
context_ref: contextRef,
|
|
206
|
+
generated_at: generatedAt,
|
|
207
|
+
upstream_acceptances: upstreamAcceptances,
|
|
208
|
+
active_barriers: activeBarriers,
|
|
209
|
+
required_followups: buildRequiredFollowups(
|
|
210
|
+
workstreamId,
|
|
211
|
+
Array.isArray(workstream.depends_on) ? workstream.depends_on : [],
|
|
212
|
+
upstreamAcceptances,
|
|
213
|
+
activeBarriers,
|
|
214
|
+
targetRepoId,
|
|
215
|
+
),
|
|
216
|
+
};
|
|
217
|
+
|
|
218
|
+
const contextDir = getContextDir(workspacePath, contextRef);
|
|
219
|
+
const jsonPath = join(contextDir, 'COORDINATOR_CONTEXT.json');
|
|
220
|
+
const mdPath = join(contextDir, 'COORDINATOR_CONTEXT.md');
|
|
221
|
+
|
|
222
|
+
mkdirSync(contextDir, { recursive: true });
|
|
223
|
+
writeFileSync(jsonPath, JSON.stringify(snapshot, null, 2) + '\n');
|
|
224
|
+
writeFileSync(mdPath, renderContextMarkdown(snapshot));
|
|
225
|
+
|
|
226
|
+
// Record context-generation event in coordinator history for invalidation tracking
|
|
227
|
+
const historyFile = join(workspacePath, '.agentxchain/multirepo/history.jsonl');
|
|
228
|
+
const contextEvent = {
|
|
229
|
+
type: 'context_generated',
|
|
230
|
+
timestamp: generatedAt,
|
|
231
|
+
super_run_id: state.super_run_id,
|
|
232
|
+
context_ref: contextRef,
|
|
233
|
+
workstream_id: workstreamId,
|
|
234
|
+
target_repo_id: targetRepoId,
|
|
235
|
+
relevant_workstream_ids: relevantWorkstreamIds,
|
|
236
|
+
upstream_repo_ids: [...new Set(upstreamAcceptances.map((a) => a.repo_id))],
|
|
237
|
+
};
|
|
238
|
+
appendFileSync(historyFile, JSON.stringify(contextEvent) + '\n');
|
|
239
|
+
|
|
240
|
+
return {
|
|
241
|
+
ok: true,
|
|
242
|
+
contextRef,
|
|
243
|
+
jsonPath,
|
|
244
|
+
mdPath,
|
|
245
|
+
snapshot,
|
|
246
|
+
};
|
|
247
|
+
}
|