agentxchain 0.8.8 → 2.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. package/README.md +136 -136
  2. package/bin/agentxchain.js +186 -5
  3. package/dashboard/app.js +305 -0
  4. package/dashboard/components/blocked.js +145 -0
  5. package/dashboard/components/cross-repo.js +126 -0
  6. package/dashboard/components/gate.js +311 -0
  7. package/dashboard/components/hooks.js +177 -0
  8. package/dashboard/components/initiative.js +147 -0
  9. package/dashboard/components/ledger.js +165 -0
  10. package/dashboard/components/timeline.js +222 -0
  11. package/dashboard/index.html +352 -0
  12. package/package.json +14 -6
  13. package/scripts/live-api-proxy-preflight-smoke.sh +531 -0
  14. package/scripts/publish-from-tag.sh +88 -0
  15. package/scripts/release-postflight.sh +231 -0
  16. package/scripts/release-preflight.sh +167 -0
  17. package/src/commands/accept-turn.js +160 -0
  18. package/src/commands/approve-completion.js +80 -0
  19. package/src/commands/approve-transition.js +85 -0
  20. package/src/commands/dashboard.js +70 -0
  21. package/src/commands/init.js +516 -0
  22. package/src/commands/migrate.js +348 -0
  23. package/src/commands/multi.js +549 -0
  24. package/src/commands/plugin.js +157 -0
  25. package/src/commands/reject-turn.js +204 -0
  26. package/src/commands/resume.js +389 -0
  27. package/src/commands/status.js +196 -3
  28. package/src/commands/step.js +947 -0
  29. package/src/commands/template-list.js +33 -0
  30. package/src/commands/template-set.js +279 -0
  31. package/src/commands/validate.js +20 -11
  32. package/src/commands/verify.js +71 -0
  33. package/src/lib/adapters/api-proxy-adapter.js +1076 -0
  34. package/src/lib/adapters/local-cli-adapter.js +337 -0
  35. package/src/lib/adapters/manual-adapter.js +169 -0
  36. package/src/lib/blocked-state.js +94 -0
  37. package/src/lib/config.js +97 -1
  38. package/src/lib/context-compressor.js +121 -0
  39. package/src/lib/context-section-parser.js +220 -0
  40. package/src/lib/coordinator-acceptance.js +428 -0
  41. package/src/lib/coordinator-config.js +461 -0
  42. package/src/lib/coordinator-dispatch.js +276 -0
  43. package/src/lib/coordinator-gates.js +487 -0
  44. package/src/lib/coordinator-hooks.js +239 -0
  45. package/src/lib/coordinator-recovery.js +523 -0
  46. package/src/lib/coordinator-state.js +365 -0
  47. package/src/lib/cross-repo-context.js +247 -0
  48. package/src/lib/dashboard/bridge-server.js +284 -0
  49. package/src/lib/dashboard/file-watcher.js +93 -0
  50. package/src/lib/dashboard/state-reader.js +96 -0
  51. package/src/lib/dispatch-bundle.js +568 -0
  52. package/src/lib/dispatch-manifest.js +252 -0
  53. package/src/lib/gate-evaluator.js +285 -0
  54. package/src/lib/governed-state.js +2139 -0
  55. package/src/lib/governed-templates.js +145 -0
  56. package/src/lib/hook-runner.js +788 -0
  57. package/src/lib/normalized-config.js +539 -0
  58. package/src/lib/plugin-config-schema.js +192 -0
  59. package/src/lib/plugins.js +692 -0
  60. package/src/lib/protocol-conformance.js +291 -0
  61. package/src/lib/reference-conformance-adapter.js +858 -0
  62. package/src/lib/repo-observer.js +597 -0
  63. package/src/lib/repo.js +0 -31
  64. package/src/lib/schema.js +121 -0
  65. package/src/lib/schemas/turn-result.schema.json +205 -0
  66. package/src/lib/token-budget.js +206 -0
  67. package/src/lib/token-counter.js +27 -0
  68. package/src/lib/turn-paths.js +67 -0
  69. package/src/lib/turn-result-validator.js +496 -0
  70. package/src/lib/validation.js +137 -0
  71. package/src/templates/governed/api-service.json +31 -0
  72. package/src/templates/governed/cli-tool.json +30 -0
  73. package/src/templates/governed/generic.json +10 -0
  74. package/src/templates/governed/web-app.json +30 -0
@@ -0,0 +1,461 @@
1
+ import { existsSync, readFileSync, statSync } from 'node:fs';
2
+ import { join, resolve } from 'node:path';
3
+ import { loadNormalizedConfig } from './normalized-config.js';
4
+ import { safeParseJson } from './schema.js';
5
+
6
+ export const COORDINATOR_CONFIG_FILE = 'agentxchain-multi.json';
7
+
8
+ const VALID_ID = /^[a-z0-9_-]+$/;
9
+ const VALID_PHASES = new Set(['planning', 'implementation', 'qa']);
10
+ const VALID_BARRIER_TYPES = new Set([
11
+ 'all_repos_accepted',
12
+ 'interface_alignment',
13
+ 'ordered_repo_sequence',
14
+ 'shared_human_gate',
15
+ ]);
16
+
17
+ function pushError(errors, code, message) {
18
+ errors.push(`${code}: ${message}`);
19
+ }
20
+
21
+ function validateProject(raw, errors) {
22
+ if (!raw.project || typeof raw.project !== 'object' || Array.isArray(raw.project)) {
23
+ pushError(errors, 'project_invalid', 'project must be an object with id and name');
24
+ return;
25
+ }
26
+
27
+ if (typeof raw.project.id !== 'string' || !raw.project.id.trim()) {
28
+ pushError(errors, 'project_id_invalid', 'project.id must be a non-empty string');
29
+ }
30
+
31
+ if (typeof raw.project.name !== 'string' || !raw.project.name.trim()) {
32
+ pushError(errors, 'project_name_invalid', 'project.name must be a non-empty string');
33
+ }
34
+ }
35
+
36
+ function validateRepos(raw, errors) {
37
+ if (!raw.repos || typeof raw.repos !== 'object' || Array.isArray(raw.repos) || Object.keys(raw.repos).length === 0) {
38
+ pushError(errors, 'repos_invalid', 'repos must be a non-empty object');
39
+ return [];
40
+ }
41
+
42
+ const repoIds = [];
43
+ for (const [repoId, repo] of Object.entries(raw.repos)) {
44
+ repoIds.push(repoId);
45
+
46
+ if (!VALID_ID.test(repoId)) {
47
+ pushError(errors, 'repo_id_invalid', `repo id "${repoId}" must be lowercase alphanumeric, hyphen, or underscore`);
48
+ }
49
+
50
+ if (!repo || typeof repo !== 'object' || Array.isArray(repo)) {
51
+ pushError(errors, 'repo_invalid', `repo "${repoId}" must be an object`);
52
+ continue;
53
+ }
54
+
55
+ if (typeof repo.path !== 'string' || !repo.path.trim()) {
56
+ pushError(errors, 'repo_path_invalid', `repo "${repoId}" path must be a non-empty string`);
57
+ }
58
+
59
+ if ('default_branch' in repo && (typeof repo.default_branch !== 'string' || !repo.default_branch.trim())) {
60
+ pushError(errors, 'repo_default_branch_invalid', `repo "${repoId}" default_branch must be a non-empty string when provided`);
61
+ }
62
+
63
+ if ('required' in repo && typeof repo.required !== 'boolean') {
64
+ pushError(errors, 'repo_required_invalid', `repo "${repoId}" required must be a boolean when provided`);
65
+ }
66
+ }
67
+
68
+ return repoIds;
69
+ }
70
+
71
+ function validateWorkstreams(raw, repoIds, errors) {
72
+ if (!raw.workstreams || typeof raw.workstreams !== 'object' || Array.isArray(raw.workstreams) || Object.keys(raw.workstreams).length === 0) {
73
+ pushError(errors, 'workstreams_invalid', 'workstreams must be a non-empty object');
74
+ return [];
75
+ }
76
+
77
+ const repoIdSet = new Set(repoIds);
78
+ const workstreamIds = Object.keys(raw.workstreams);
79
+ const workstreamIdSet = new Set(workstreamIds);
80
+
81
+ for (const [workstreamId, workstream] of Object.entries(raw.workstreams)) {
82
+ if (!VALID_ID.test(workstreamId)) {
83
+ pushError(errors, 'workstream_id_invalid', `workstream id "${workstreamId}" must be lowercase alphanumeric, hyphen, or underscore`);
84
+ }
85
+
86
+ if (!workstream || typeof workstream !== 'object' || Array.isArray(workstream)) {
87
+ pushError(errors, 'workstream_invalid', `workstream "${workstreamId}" must be an object`);
88
+ continue;
89
+ }
90
+
91
+ if (!VALID_PHASES.has(workstream.phase)) {
92
+ pushError(
93
+ errors,
94
+ 'workstream_phase_invalid',
95
+ `workstream "${workstreamId}" phase must be one of: planning, implementation, qa`,
96
+ );
97
+ }
98
+
99
+ if (!Array.isArray(workstream.repos) || workstream.repos.length === 0) {
100
+ pushError(errors, 'workstream_repos_invalid', `workstream "${workstreamId}" repos must be a non-empty array`);
101
+ } else {
102
+ const seenRepos = new Set();
103
+ for (const repoId of workstream.repos) {
104
+ if (typeof repoId !== 'string' || !repoId.trim()) {
105
+ pushError(errors, 'workstream_repo_invalid', `workstream "${workstreamId}" repos entries must be non-empty strings`);
106
+ continue;
107
+ }
108
+
109
+ if (seenRepos.has(repoId)) {
110
+ pushError(errors, 'workstream_repo_duplicate', `workstream "${workstreamId}" contains duplicate repo "${repoId}"`);
111
+ continue;
112
+ }
113
+ seenRepos.add(repoId);
114
+
115
+ if (!repoIdSet.has(repoId)) {
116
+ pushError(errors, 'workstream_repo_unknown', `workstream "${workstreamId}" references undeclared repo "${repoId}"`);
117
+ }
118
+ }
119
+ }
120
+
121
+ if (typeof workstream.entry_repo !== 'string' || !workstream.entry_repo.trim()) {
122
+ pushError(errors, 'workstream_entry_repo_invalid', `workstream "${workstreamId}" entry_repo must be a non-empty string`);
123
+ } else if (!Array.isArray(workstream.repos) || !workstream.repos.includes(workstream.entry_repo)) {
124
+ pushError(errors, 'workstream_entry_repo_mismatch', `workstream "${workstreamId}" entry_repo must also appear in workstream.repos`);
125
+ }
126
+
127
+ if (!Array.isArray(workstream.depends_on)) {
128
+ pushError(errors, 'workstream_depends_on_invalid', `workstream "${workstreamId}" depends_on must be an array`);
129
+ } else {
130
+ for (const dependencyId of workstream.depends_on) {
131
+ if (typeof dependencyId !== 'string' || !dependencyId.trim()) {
132
+ pushError(errors, 'workstream_dependency_invalid', `workstream "${workstreamId}" depends_on entries must be non-empty strings`);
133
+ continue;
134
+ }
135
+
136
+ if (!workstreamIdSet.has(dependencyId)) {
137
+ pushError(errors, 'workstream_dependency_unknown', `workstream "${workstreamId}" depends_on undeclared workstream "${dependencyId}"`);
138
+ }
139
+ }
140
+ }
141
+
142
+ if (!VALID_BARRIER_TYPES.has(workstream.completion_barrier)) {
143
+ pushError(
144
+ errors,
145
+ 'workstream_completion_barrier_invalid',
146
+ `workstream "${workstreamId}" completion_barrier must be one of: ${Array.from(VALID_BARRIER_TYPES).join(', ')}`,
147
+ );
148
+ }
149
+ }
150
+
151
+ detectWorkstreamCycles(raw.workstreams, errors);
152
+ return workstreamIds;
153
+ }
154
+
155
+ function detectWorkstreamCycles(workstreams, errors) {
156
+ const visiting = new Set();
157
+ const visited = new Set();
158
+
159
+ function visit(workstreamId, lineage) {
160
+ if (visiting.has(workstreamId)) {
161
+ const cycleStart = lineage.indexOf(workstreamId);
162
+ const cyclePath = [...lineage.slice(cycleStart), workstreamId].join(' -> ');
163
+ pushError(errors, 'workstream_cycle', `circular workstream dependency detected: ${cyclePath}`);
164
+ return;
165
+ }
166
+
167
+ if (visited.has(workstreamId)) {
168
+ return;
169
+ }
170
+
171
+ visiting.add(workstreamId);
172
+ const dependencies = Array.isArray(workstreams[workstreamId]?.depends_on) ? workstreams[workstreamId].depends_on : [];
173
+ for (const dependencyId of dependencies) {
174
+ if (workstreams[dependencyId]) {
175
+ visit(dependencyId, [...lineage, workstreamId]);
176
+ }
177
+ }
178
+ visiting.delete(workstreamId);
179
+ visited.add(workstreamId);
180
+ }
181
+
182
+ for (const workstreamId of Object.keys(workstreams)) {
183
+ visit(workstreamId, []);
184
+ }
185
+ }
186
+
187
+ function validateRouting(raw, workstreamIds, errors) {
188
+ if (raw.routing === undefined) {
189
+ return;
190
+ }
191
+
192
+ if (!raw.routing || typeof raw.routing !== 'object' || Array.isArray(raw.routing)) {
193
+ pushError(errors, 'routing_invalid', 'routing must be an object when provided');
194
+ return;
195
+ }
196
+
197
+ const workstreamIdSet = new Set(workstreamIds);
198
+ for (const [phase, route] of Object.entries(raw.routing)) {
199
+ if (!VALID_PHASES.has(phase)) {
200
+ pushError(errors, 'routing_phase_invalid', `routing phase "${phase}" must be one of: planning, implementation, qa`);
201
+ }
202
+
203
+ if (!route || typeof route !== 'object' || Array.isArray(route)) {
204
+ pushError(errors, 'routing_entry_invalid', `routing "${phase}" must be an object`);
205
+ continue;
206
+ }
207
+
208
+ if ('entry_workstream' in route) {
209
+ if (typeof route.entry_workstream !== 'string' || !route.entry_workstream.trim()) {
210
+ pushError(errors, 'routing_entry_workstream_invalid', `routing "${phase}" entry_workstream must be a non-empty string`);
211
+ } else if (!workstreamIdSet.has(route.entry_workstream)) {
212
+ pushError(errors, 'routing_entry_workstream_unknown', `routing "${phase}" references undeclared workstream "${route.entry_workstream}"`);
213
+ } else {
214
+ const targetWorkstream = raw.workstreams?.[route.entry_workstream];
215
+ if (targetWorkstream?.phase && targetWorkstream.phase !== phase) {
216
+ pushError(errors, 'routing_phase_mismatch', `routing "${phase}" entry_workstream "${route.entry_workstream}" must belong to phase "${phase}"`);
217
+ }
218
+ }
219
+ }
220
+ }
221
+ }
222
+
223
+ function validateGates(raw, repoIds, errors) {
224
+ if (raw.gates === undefined) {
225
+ return;
226
+ }
227
+
228
+ if (!raw.gates || typeof raw.gates !== 'object' || Array.isArray(raw.gates)) {
229
+ pushError(errors, 'gates_invalid', 'gates must be an object when provided');
230
+ return;
231
+ }
232
+
233
+ const repoIdSet = new Set(repoIds);
234
+ for (const [gateId, gate] of Object.entries(raw.gates)) {
235
+ if (!VALID_ID.test(gateId)) {
236
+ pushError(errors, 'gate_id_invalid', `gate id "${gateId}" must be lowercase alphanumeric, hyphen, or underscore`);
237
+ }
238
+
239
+ if (!gate || typeof gate !== 'object' || Array.isArray(gate)) {
240
+ pushError(errors, 'gate_invalid', `gate "${gateId}" must be an object`);
241
+ continue;
242
+ }
243
+
244
+ if ('requires_human_approval' in gate && typeof gate.requires_human_approval !== 'boolean') {
245
+ pushError(errors, 'gate_requires_human_approval_invalid', `gate "${gateId}" requires_human_approval must be a boolean when provided`);
246
+ }
247
+
248
+ if ('requires_repos' in gate) {
249
+ if (!Array.isArray(gate.requires_repos)) {
250
+ pushError(errors, 'gate_requires_repos_invalid', `gate "${gateId}" requires_repos must be an array`);
251
+ } else {
252
+ const seenRepos = new Set();
253
+ for (const repoId of gate.requires_repos) {
254
+ if (typeof repoId !== 'string' || !repoId.trim()) {
255
+ pushError(errors, 'gate_requires_repo_invalid', `gate "${gateId}" requires_repos entries must be non-empty strings`);
256
+ continue;
257
+ }
258
+
259
+ if (seenRepos.has(repoId)) {
260
+ pushError(errors, 'gate_requires_repo_duplicate', `gate "${gateId}" contains duplicate required repo "${repoId}"`);
261
+ continue;
262
+ }
263
+ seenRepos.add(repoId);
264
+
265
+ if (!repoIdSet.has(repoId)) {
266
+ pushError(errors, 'gate_requires_repo_unknown', `gate "${gateId}" references undeclared repo "${repoId}"`);
267
+ }
268
+ }
269
+ }
270
+ }
271
+ }
272
+ }
273
+
274
+ function normalizeRouting(rawRouting = {}) {
275
+ return Object.fromEntries(
276
+ Object.entries(rawRouting).map(([phase, route]) => [
277
+ phase,
278
+ {
279
+ entry_workstream: route?.entry_workstream ?? null,
280
+ },
281
+ ]),
282
+ );
283
+ }
284
+
285
+ function normalizeGates(rawGates = {}) {
286
+ return Object.fromEntries(
287
+ Object.entries(rawGates).map(([gateId, gate]) => [
288
+ gateId,
289
+ {
290
+ requires_human_approval: gate?.requires_human_approval === true,
291
+ requires_repos: Array.isArray(gate?.requires_repos) ? [...new Set(gate.requires_repos)] : [],
292
+ },
293
+ ]),
294
+ );
295
+ }
296
+
297
+ export function validateCoordinatorConfig(raw) {
298
+ const errors = [];
299
+
300
+ if (!raw || typeof raw !== 'object' || Array.isArray(raw)) {
301
+ return { ok: false, errors: ['config_invalid: agentxchain-multi.json must be a JSON object'] };
302
+ }
303
+
304
+ if (raw.schema_version !== '0.1') {
305
+ pushError(errors, 'schema_version_invalid', 'schema_version must be "0.1"');
306
+ }
307
+
308
+ validateProject(raw, errors);
309
+ const repoIds = validateRepos(raw, errors);
310
+ const workstreamIds = validateWorkstreams(raw, repoIds, errors);
311
+ validateRouting(raw, workstreamIds, errors);
312
+ validateGates(raw, repoIds, errors);
313
+
314
+ return { ok: errors.length === 0, errors };
315
+ }
316
+
317
+ export function normalizeCoordinatorConfig(raw) {
318
+ return {
319
+ schema_version: raw.schema_version,
320
+ project: {
321
+ id: raw.project.id.trim(),
322
+ name: raw.project.name.trim(),
323
+ },
324
+ repo_order: Object.keys(raw.repos),
325
+ repos: Object.fromEntries(
326
+ Object.entries(raw.repos).map(([repoId, repo]) => [
327
+ repoId,
328
+ {
329
+ path: repo.path.trim(),
330
+ default_branch: typeof repo.default_branch === 'string' && repo.default_branch.trim()
331
+ ? repo.default_branch.trim()
332
+ : 'main',
333
+ required: repo.required !== false,
334
+ },
335
+ ]),
336
+ ),
337
+ workstream_order: Object.keys(raw.workstreams),
338
+ workstreams: Object.fromEntries(
339
+ Object.entries(raw.workstreams).map(([workstreamId, workstream]) => [
340
+ workstreamId,
341
+ {
342
+ phase: workstream.phase,
343
+ repos: [...new Set(workstream.repos)],
344
+ entry_repo: workstream.entry_repo,
345
+ depends_on: Array.isArray(workstream.depends_on) ? [...new Set(workstream.depends_on)] : [],
346
+ completion_barrier: workstream.completion_barrier,
347
+ },
348
+ ]),
349
+ ),
350
+ routing: normalizeRouting(raw.routing),
351
+ gates: normalizeGates(raw.gates),
352
+ hooks: raw.hooks && typeof raw.hooks === 'object' ? raw.hooks : {},
353
+ };
354
+ }
355
+
356
+ export function resolveRepoPaths(config, workspacePath) {
357
+ const errors = [];
358
+ const resolved = {};
359
+ const workspaceRoot = resolve(workspacePath);
360
+
361
+ for (const [repoId, repo] of Object.entries(config.repos)) {
362
+ const resolvedPath = resolve(workspaceRoot, repo.path);
363
+ resolved[repoId] = resolvedPath;
364
+
365
+ if (!existsSync(resolvedPath)) {
366
+ pushError(errors, 'repo_path_missing', `repo "${repoId}" path does not exist: ${resolvedPath}`);
367
+ continue;
368
+ }
369
+
370
+ let stats;
371
+ try {
372
+ stats = statSync(resolvedPath);
373
+ } catch (err) {
374
+ pushError(errors, 'repo_path_unreadable', `repo "${repoId}" path could not be read: ${err.message}`);
375
+ continue;
376
+ }
377
+
378
+ if (!stats.isDirectory()) {
379
+ pushError(errors, 'repo_path_not_directory', `repo "${repoId}" path is not a directory: ${resolvedPath}`);
380
+ continue;
381
+ }
382
+
383
+ const configPath = join(resolvedPath, 'agentxchain.json');
384
+ if (!existsSync(configPath)) {
385
+ pushError(errors, 'repo_not_governed', `repo "${repoId}" does not contain agentxchain.json`);
386
+ continue;
387
+ }
388
+
389
+ let parsed;
390
+ try {
391
+ parsed = safeParseJson(readFileSync(configPath, 'utf8'));
392
+ } catch (err) {
393
+ pushError(errors, 'repo_not_governed', `repo "${repoId}" config could not be read: ${err.message}`);
394
+ continue;
395
+ }
396
+
397
+ if (!parsed.ok) {
398
+ pushError(errors, 'repo_not_governed', `repo "${repoId}" config is invalid JSON: ${parsed.errors.join(', ')}`);
399
+ continue;
400
+ }
401
+
402
+ const normalized = loadNormalizedConfig(parsed.data, resolvedPath);
403
+ if (!normalized?.ok || normalized.normalized?.protocol_mode !== 'governed') {
404
+ const detail = normalized?.errors?.length ? ` (${normalized.errors.join(', ')})` : '';
405
+ pushError(errors, 'repo_not_governed', `repo "${repoId}" is not a governed project${detail}`);
406
+ }
407
+ }
408
+
409
+ return { ok: errors.length === 0, resolved, errors };
410
+ }
411
+
412
+ export function loadCoordinatorConfig(workspacePath) {
413
+ const workspaceRoot = resolve(workspacePath);
414
+ const configPath = join(workspaceRoot, COORDINATOR_CONFIG_FILE);
415
+
416
+ if (!existsSync(configPath)) {
417
+ return {
418
+ ok: false,
419
+ config: null,
420
+ errors: [`config_missing: No ${COORDINATOR_CONFIG_FILE} found at ${workspaceRoot}`],
421
+ };
422
+ }
423
+
424
+ const parsed = safeParseJson(readFileSync(configPath, 'utf8'));
425
+ if (!parsed.ok) {
426
+ return {
427
+ ok: false,
428
+ config: null,
429
+ errors: parsed.errors.map((error) => `config_invalid: ${error}`),
430
+ };
431
+ }
432
+
433
+ const validation = validateCoordinatorConfig(parsed.data);
434
+ if (!validation.ok) {
435
+ return { ok: false, config: null, errors: validation.errors };
436
+ }
437
+
438
+ const normalized = normalizeCoordinatorConfig(parsed.data);
439
+ const resolution = resolveRepoPaths(normalized, workspaceRoot);
440
+ if (!resolution.ok) {
441
+ return { ok: false, config: null, errors: resolution.errors };
442
+ }
443
+
444
+ return {
445
+ ok: true,
446
+ config: {
447
+ ...normalized,
448
+ workspace_path: workspaceRoot,
449
+ repos: Object.fromEntries(
450
+ Object.entries(normalized.repos).map(([repoId, repo]) => [
451
+ repoId,
452
+ {
453
+ ...repo,
454
+ resolved_path: resolution.resolved[repoId],
455
+ },
456
+ ]),
457
+ ),
458
+ },
459
+ errors: [],
460
+ };
461
+ }