agentxchain 2.4.0 → 2.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/agentxchain.js +16 -1
- package/package.json +1 -1
- package/src/commands/export.js +63 -0
- package/src/commands/step.js +4 -3
- package/src/commands/verify.js +60 -0
- package/src/lib/adapters/mcp-adapter.js +81 -18
- package/src/lib/export-verifier.js +422 -0
- package/src/lib/export.js +303 -0
- package/src/lib/normalized-config.js +61 -4
package/bin/agentxchain.js
CHANGED
|
@@ -59,7 +59,7 @@ import { generateCommand } from '../src/commands/generate.js';
|
|
|
59
59
|
import { doctorCommand } from '../src/commands/doctor.js';
|
|
60
60
|
import { superviseCommand } from '../src/commands/supervise.js';
|
|
61
61
|
import { validateCommand } from '../src/commands/validate.js';
|
|
62
|
-
import { verifyProtocolCommand } from '../src/commands/verify.js';
|
|
62
|
+
import { verifyExportCommand, verifyProtocolCommand } from '../src/commands/verify.js';
|
|
63
63
|
import { kickoffCommand } from '../src/commands/kickoff.js';
|
|
64
64
|
import { rebindCommand } from '../src/commands/rebind.js';
|
|
65
65
|
import { branchCommand } from '../src/commands/branch.js';
|
|
@@ -71,6 +71,7 @@ import { stepCommand } from '../src/commands/step.js';
|
|
|
71
71
|
import { approveTransitionCommand } from '../src/commands/approve-transition.js';
|
|
72
72
|
import { approveCompletionCommand } from '../src/commands/approve-completion.js';
|
|
73
73
|
import { dashboardCommand } from '../src/commands/dashboard.js';
|
|
74
|
+
import { exportCommand } from '../src/commands/export.js';
|
|
74
75
|
import {
|
|
75
76
|
pluginInstallCommand,
|
|
76
77
|
pluginListCommand,
|
|
@@ -121,6 +122,13 @@ program
|
|
|
121
122
|
.option('-j, --json', 'Output as JSON')
|
|
122
123
|
.action(statusCommand);
|
|
123
124
|
|
|
125
|
+
program
|
|
126
|
+
.command('export')
|
|
127
|
+
.description('Export the governed run audit surface as a single artifact')
|
|
128
|
+
.option('--format <format>', 'Export format (json)', 'json')
|
|
129
|
+
.option('--output <path>', 'Write the export artifact to a file instead of stdout')
|
|
130
|
+
.action(exportCommand);
|
|
131
|
+
|
|
124
132
|
program
|
|
125
133
|
.command('start')
|
|
126
134
|
.description('Launch legacy v3 agents in your IDE')
|
|
@@ -231,6 +239,13 @@ verifyCmd
|
|
|
231
239
|
.option('--format <format>', 'Output format: text or json', 'text')
|
|
232
240
|
.action(verifyProtocolCommand);
|
|
233
241
|
|
|
242
|
+
verifyCmd
|
|
243
|
+
.command('export')
|
|
244
|
+
.description('Verify an AgentXchain export artifact against its embedded file bytes and summaries')
|
|
245
|
+
.option('--input <path>', 'Export artifact path, or "-" for stdin', '-')
|
|
246
|
+
.option('--format <format>', 'Output format: text or json', 'text')
|
|
247
|
+
.action(verifyExportCommand);
|
|
248
|
+
|
|
234
249
|
program
|
|
235
250
|
.command('migrate')
|
|
236
251
|
.description('Migrate a legacy v3 project to governed format')
|
package/package.json
CHANGED
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
import { existsSync } from 'node:fs';
|
|
2
|
+
import { join, resolve } from 'node:path';
|
|
3
|
+
|
|
4
|
+
import { buildRunExport, buildCoordinatorExport } from '../lib/export.js';
|
|
5
|
+
import { COORDINATOR_CONFIG_FILE } from '../lib/coordinator-config.js';
|
|
6
|
+
import { safeWriteJson } from '../lib/safe-write.js';
|
|
7
|
+
|
|
8
|
+
function detectExportKind(cwd) {
|
|
9
|
+
// Governed project takes priority (agentxchain.json)
|
|
10
|
+
if (existsSync(join(cwd, 'agentxchain.json'))) {
|
|
11
|
+
return 'governed';
|
|
12
|
+
}
|
|
13
|
+
// Coordinator workspace (agentxchain-multi.json)
|
|
14
|
+
if (existsSync(join(cwd, COORDINATOR_CONFIG_FILE))) {
|
|
15
|
+
return 'coordinator';
|
|
16
|
+
}
|
|
17
|
+
return null;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export async function exportCommand(options) {
|
|
21
|
+
const format = options.format || 'json';
|
|
22
|
+
if (format !== 'json') {
|
|
23
|
+
console.error(`Unsupported export format "${format}". Only "json" is supported in this slice.`);
|
|
24
|
+
process.exitCode = 1;
|
|
25
|
+
return;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
const cwd = process.cwd();
|
|
29
|
+
const kind = detectExportKind(cwd);
|
|
30
|
+
|
|
31
|
+
let result;
|
|
32
|
+
try {
|
|
33
|
+
if (kind === 'governed') {
|
|
34
|
+
result = buildRunExport(cwd);
|
|
35
|
+
} else if (kind === 'coordinator') {
|
|
36
|
+
result = buildCoordinatorExport(cwd);
|
|
37
|
+
} else {
|
|
38
|
+
result = {
|
|
39
|
+
ok: false,
|
|
40
|
+
error: 'No governed project or coordinator workspace found. Run this inside an AgentXchain governed project or coordinator workspace.',
|
|
41
|
+
};
|
|
42
|
+
}
|
|
43
|
+
} catch (error) {
|
|
44
|
+
console.error(error.message || String(error));
|
|
45
|
+
process.exitCode = 1;
|
|
46
|
+
return;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
if (!result.ok) {
|
|
50
|
+
console.error(result.error);
|
|
51
|
+
process.exitCode = 1;
|
|
52
|
+
return;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
if (options.output) {
|
|
56
|
+
const outputPath = resolve(cwd, options.output);
|
|
57
|
+
safeWriteJson(outputPath, result.export);
|
|
58
|
+
console.log(`Exported ${kind === 'coordinator' ? 'coordinator workspace' : 'governed run'} audit to ${options.output}`);
|
|
59
|
+
return;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
console.log(JSON.stringify(result.export, null, 2));
|
|
63
|
+
}
|
package/src/commands/step.js
CHANGED
|
@@ -17,7 +17,7 @@
|
|
|
17
17
|
* - local_cli: implemented via subprocess dispatch + staged turn result
|
|
18
18
|
* - api_proxy: implemented for synchronous review-only turns and stages
|
|
19
19
|
* provider-backed JSON before validation/acceptance
|
|
20
|
-
* - mcp: implemented for synchronous MCP stdio tool dispatch
|
|
20
|
+
* - mcp: implemented for synchronous MCP stdio or streamable_http tool dispatch
|
|
21
21
|
*/
|
|
22
22
|
|
|
23
23
|
import chalk from 'chalk';
|
|
@@ -46,7 +46,7 @@ import {
|
|
|
46
46
|
saveDispatchLogs,
|
|
47
47
|
resolvePromptTransport,
|
|
48
48
|
} from '../lib/adapters/local-cli-adapter.js';
|
|
49
|
-
import { dispatchMcp } from '../lib/adapters/mcp-adapter.js';
|
|
49
|
+
import { describeMcpRuntimeTarget, dispatchMcp, resolveMcpTransport } from '../lib/adapters/mcp-adapter.js';
|
|
50
50
|
import {
|
|
51
51
|
getDispatchAssignmentPath,
|
|
52
52
|
getDispatchContextPath,
|
|
@@ -426,7 +426,8 @@ export async function stepCommand(opts) {
|
|
|
426
426
|
}
|
|
427
427
|
console.log('');
|
|
428
428
|
} else if (runtimeType === 'mcp') {
|
|
429
|
-
|
|
429
|
+
const mcpTransport = resolveMcpTransport(runtime);
|
|
430
|
+
console.log(chalk.cyan(`Dispatching to MCP ${mcpTransport}: ${describeMcpRuntimeTarget(runtime)}`));
|
|
430
431
|
console.log(chalk.dim(`Turn: ${turn.turn_id} Role: ${roleId} Phase: ${state.phase} Tool: ${runtime?.tool_name || 'agentxchain_turn'}`));
|
|
431
432
|
|
|
432
433
|
const mcpResult = await dispatchMcp(root, state, config, {
|
package/src/commands/verify.js
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import chalk from 'chalk';
|
|
2
2
|
import { resolve } from 'node:path';
|
|
3
|
+
import { loadExportArtifact, verifyExportArtifact } from '../lib/export-verifier.js';
|
|
3
4
|
import { verifyProtocolConformance } from '../lib/protocol-conformance.js';
|
|
4
5
|
|
|
5
6
|
export async function verifyProtocolCommand(opts) {
|
|
@@ -35,6 +36,38 @@ export async function verifyProtocolCommand(opts) {
|
|
|
35
36
|
process.exit(result.exitCode);
|
|
36
37
|
}
|
|
37
38
|
|
|
39
|
+
export async function verifyExportCommand(opts) {
|
|
40
|
+
const format = opts.format || 'text';
|
|
41
|
+
const loaded = loadExportArtifact(opts.input || '-', process.cwd());
|
|
42
|
+
|
|
43
|
+
if (!loaded.ok) {
|
|
44
|
+
if (format === 'json') {
|
|
45
|
+
console.log(JSON.stringify({
|
|
46
|
+
overall: 'error',
|
|
47
|
+
input: loaded.input,
|
|
48
|
+
message: loaded.error,
|
|
49
|
+
}, null, 2));
|
|
50
|
+
} else {
|
|
51
|
+
console.log(chalk.red(`Export verification failed: ${loaded.error}`));
|
|
52
|
+
}
|
|
53
|
+
process.exit(2);
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
const result = verifyExportArtifact(loaded.artifact);
|
|
57
|
+
const report = {
|
|
58
|
+
...result.report,
|
|
59
|
+
input: loaded.input,
|
|
60
|
+
};
|
|
61
|
+
|
|
62
|
+
if (format === 'json') {
|
|
63
|
+
console.log(JSON.stringify(report, null, 2));
|
|
64
|
+
} else {
|
|
65
|
+
printExportReport(report);
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
process.exit(result.ok ? 0 : 1);
|
|
69
|
+
}
|
|
70
|
+
|
|
38
71
|
function printProtocolReport(report) {
|
|
39
72
|
console.log('');
|
|
40
73
|
console.log(chalk.bold(' AgentXchain Protocol Conformance'));
|
|
@@ -74,3 +107,30 @@ function printProtocolReport(report) {
|
|
|
74
107
|
|
|
75
108
|
console.log('');
|
|
76
109
|
}
|
|
110
|
+
|
|
111
|
+
function printExportReport(report) {
|
|
112
|
+
console.log('');
|
|
113
|
+
console.log(chalk.bold(' AgentXchain Export Verification'));
|
|
114
|
+
console.log(chalk.dim(' ' + '─'.repeat(43)));
|
|
115
|
+
console.log(chalk.dim(` Input: ${report.input}`));
|
|
116
|
+
console.log(chalk.dim(` Export kind: ${report.export_kind || 'unknown'}`));
|
|
117
|
+
console.log(chalk.dim(` Schema: ${report.schema_version || 'unknown'}`));
|
|
118
|
+
console.log('');
|
|
119
|
+
|
|
120
|
+
const overallLabel = report.overall === 'pass'
|
|
121
|
+
? chalk.green('PASS')
|
|
122
|
+
: report.overall === 'fail'
|
|
123
|
+
? chalk.red('FAIL')
|
|
124
|
+
: chalk.red('ERROR');
|
|
125
|
+
console.log(` Overall: ${overallLabel}`);
|
|
126
|
+
console.log(chalk.dim(` Files verified: ${report.file_count}`));
|
|
127
|
+
if (report.repo_count) {
|
|
128
|
+
console.log(chalk.dim(` Embedded repos: ${report.repo_count}`));
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
for (const error of report.errors || []) {
|
|
132
|
+
console.log(chalk.red(` ✗ ${error}`));
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
console.log('');
|
|
136
|
+
}
|
|
@@ -2,6 +2,7 @@ import { mkdirSync, existsSync, readFileSync, writeFileSync } from 'fs';
|
|
|
2
2
|
import { join } from 'path';
|
|
3
3
|
import { Client } from '@modelcontextprotocol/sdk/client/index.js';
|
|
4
4
|
import { StdioClientTransport } from '@modelcontextprotocol/sdk/client/stdio.js';
|
|
5
|
+
import { StreamableHTTPClientTransport } from '@modelcontextprotocol/sdk/client/streamableHttp.js';
|
|
5
6
|
import {
|
|
6
7
|
getDispatchAssignmentPath,
|
|
7
8
|
getDispatchContextPath,
|
|
@@ -13,12 +14,13 @@ import {
|
|
|
13
14
|
import { verifyDispatchManifestForAdapter } from '../dispatch-manifest.js';
|
|
14
15
|
|
|
15
16
|
export const DEFAULT_MCP_TOOL_NAME = 'agentxchain_turn';
|
|
17
|
+
export const DEFAULT_MCP_TRANSPORT = 'stdio';
|
|
16
18
|
|
|
17
19
|
/**
|
|
18
|
-
* Dispatch a governed turn to an MCP server
|
|
20
|
+
* Dispatch a governed turn to an MCP server.
|
|
19
21
|
*
|
|
20
|
-
*
|
|
21
|
-
* - stdio transport
|
|
22
|
+
* Current scope:
|
|
23
|
+
* - stdio or streamable_http transport
|
|
22
24
|
* - single tool call per turn
|
|
23
25
|
* - required governed-turn tool contract
|
|
24
26
|
* - synchronous dispatch/wait flow (like api_proxy)
|
|
@@ -55,7 +57,8 @@ export async function dispatchMcp(root, state, config, options = {}) {
|
|
|
55
57
|
const prompt = readFileSync(promptPath, 'utf8');
|
|
56
58
|
const context = existsSync(contextPath) ? readFileSync(contextPath, 'utf8') : '';
|
|
57
59
|
const { command, args } = resolveMcpCommand(runtime);
|
|
58
|
-
|
|
60
|
+
const transportType = resolveMcpTransport(runtime);
|
|
61
|
+
if (transportType === 'stdio' && !command) {
|
|
59
62
|
return { ok: false, error: `Cannot resolve MCP command for runtime "${runtimeId}". Expected "command" field in runtime config.` };
|
|
60
63
|
}
|
|
61
64
|
|
|
@@ -68,20 +71,16 @@ export async function dispatchMcp(root, state, config, options = {}) {
|
|
|
68
71
|
const stagingDir = join(root, getTurnStagingDir(turn.turn_id));
|
|
69
72
|
mkdirSync(stagingDir, { recursive: true });
|
|
70
73
|
|
|
71
|
-
const transport =
|
|
74
|
+
const transport = buildMcpClientTransport({
|
|
75
|
+
root,
|
|
76
|
+
runtime,
|
|
72
77
|
command,
|
|
73
78
|
args,
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
stderr: 'pipe',
|
|
79
|
+
logs,
|
|
80
|
+
onStderr,
|
|
77
81
|
});
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
transport.stderr.on('data', (chunk) => {
|
|
81
|
-
const text = chunk.toString();
|
|
82
|
-
logs.push(`[stderr] ${text}`);
|
|
83
|
-
if (onStderr) onStderr(text);
|
|
84
|
-
});
|
|
82
|
+
if (!transport.ok) {
|
|
83
|
+
return { ok: false, error: transport.error, logs };
|
|
85
84
|
}
|
|
86
85
|
|
|
87
86
|
const client = new Client({
|
|
@@ -100,8 +99,8 @@ export async function dispatchMcp(root, state, config, options = {}) {
|
|
|
100
99
|
return { ok: false, aborted: true, logs };
|
|
101
100
|
}
|
|
102
101
|
|
|
103
|
-
onStatus?.(`Connecting to MCP
|
|
104
|
-
await client.connect(transport);
|
|
102
|
+
onStatus?.(`Connecting to MCP ${transportType} server (${describeMcpRuntimeTarget(runtime)})`);
|
|
103
|
+
await client.connect(transport.transport);
|
|
105
104
|
|
|
106
105
|
if (signal?.aborted) {
|
|
107
106
|
return { ok: false, aborted: true, logs };
|
|
@@ -182,7 +181,7 @@ export async function dispatchMcp(root, state, config, options = {}) {
|
|
|
182
181
|
logs,
|
|
183
182
|
};
|
|
184
183
|
} finally {
|
|
185
|
-
await safeCloseClient(client, transport);
|
|
184
|
+
await safeCloseClient(client, transport.transport);
|
|
186
185
|
}
|
|
187
186
|
}
|
|
188
187
|
|
|
@@ -206,6 +205,18 @@ export function resolveMcpToolName(runtime) {
|
|
|
206
205
|
: DEFAULT_MCP_TOOL_NAME;
|
|
207
206
|
}
|
|
208
207
|
|
|
208
|
+
export function resolveMcpTransport(runtime) {
|
|
209
|
+
return typeof runtime?.transport === 'string' && runtime.transport.trim()
|
|
210
|
+
? runtime.transport.trim()
|
|
211
|
+
: DEFAULT_MCP_TRANSPORT;
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
export function describeMcpRuntimeTarget(runtime) {
|
|
215
|
+
return resolveMcpTransport(runtime) === 'streamable_http'
|
|
216
|
+
? runtime?.url || '(unknown)'
|
|
217
|
+
: resolveMcpCommand(runtime).command || '(unknown)';
|
|
218
|
+
}
|
|
219
|
+
|
|
209
220
|
export function extractTurnResultFromMcpToolResult(toolResult) {
|
|
210
221
|
const directCandidates = [
|
|
211
222
|
toolResult?.structuredContent,
|
|
@@ -268,6 +279,58 @@ function buildTransportEnv(env) {
|
|
|
268
279
|
return result;
|
|
269
280
|
}
|
|
270
281
|
|
|
282
|
+
function buildMcpClientTransport({ root, runtime, command, args, logs, onStderr }) {
|
|
283
|
+
if (resolveMcpTransport(runtime) === 'streamable_http') {
|
|
284
|
+
try {
|
|
285
|
+
const requestHeaders = buildRequestHeaders(runtime?.headers);
|
|
286
|
+
return {
|
|
287
|
+
ok: true,
|
|
288
|
+
transport: new StreamableHTTPClientTransport(new URL(runtime.url), {
|
|
289
|
+
requestInit: requestHeaders ? { headers: requestHeaders } : undefined,
|
|
290
|
+
}),
|
|
291
|
+
};
|
|
292
|
+
} catch (error) {
|
|
293
|
+
logs.push(`[transport-error] ${error.message}`);
|
|
294
|
+
return {
|
|
295
|
+
ok: false,
|
|
296
|
+
error: `Cannot resolve MCP streamable_http runtime: ${error.message}`,
|
|
297
|
+
};
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
const transport = new StdioClientTransport({
|
|
302
|
+
command,
|
|
303
|
+
args,
|
|
304
|
+
cwd: runtime.cwd ? join(root, runtime.cwd) : root,
|
|
305
|
+
env: buildTransportEnv(process.env),
|
|
306
|
+
stderr: 'pipe',
|
|
307
|
+
});
|
|
308
|
+
|
|
309
|
+
if (transport.stderr) {
|
|
310
|
+
transport.stderr.on('data', (chunk) => {
|
|
311
|
+
const text = chunk.toString();
|
|
312
|
+
logs.push(`[stderr] ${text}`);
|
|
313
|
+
if (onStderr) onStderr(text);
|
|
314
|
+
});
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
return { ok: true, transport };
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
function buildRequestHeaders(headers) {
|
|
321
|
+
if (!headers || typeof headers !== 'object' || Array.isArray(headers)) {
|
|
322
|
+
return null;
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
const result = {};
|
|
326
|
+
for (const [key, value] of Object.entries(headers)) {
|
|
327
|
+
if (typeof value === 'string') {
|
|
328
|
+
result[key] = value;
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
return Object.keys(result).length > 0 ? result : null;
|
|
332
|
+
}
|
|
333
|
+
|
|
271
334
|
function isPlainObject(value) {
|
|
272
335
|
return !!value && typeof value === 'object' && !Array.isArray(value);
|
|
273
336
|
}
|
|
@@ -0,0 +1,422 @@
|
|
|
1
|
+
import { createHash } from 'node:crypto';
|
|
2
|
+
import { readFileSync } from 'node:fs';
|
|
3
|
+
import { resolve } from 'node:path';
|
|
4
|
+
import { isDeepStrictEqual } from 'node:util';
|
|
5
|
+
|
|
6
|
+
const SUPPORTED_EXPORT_SCHEMA_VERSION = '0.2';
|
|
7
|
+
const VALID_FILE_FORMATS = new Set(['json', 'jsonl', 'text']);
|
|
8
|
+
|
|
9
|
+
function sha256(buffer) {
|
|
10
|
+
return createHash('sha256').update(buffer).digest('hex');
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
function parseJsonl(raw, relPath) {
|
|
14
|
+
if (!raw.trim()) {
|
|
15
|
+
return [];
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
return raw
|
|
19
|
+
.split('\n')
|
|
20
|
+
.filter((line) => line.trim())
|
|
21
|
+
.map((line, index) => {
|
|
22
|
+
try {
|
|
23
|
+
return JSON.parse(line);
|
|
24
|
+
} catch (error) {
|
|
25
|
+
throw new Error(`${relPath}: invalid JSONL at line ${index + 1}: ${error.message}`);
|
|
26
|
+
}
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
function addError(errors, path, message) {
|
|
31
|
+
errors.push(`${path}: ${message}`);
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
function verifyFileEntry(relPath, entry, errors) {
|
|
35
|
+
const path = `files.${relPath}`;
|
|
36
|
+
if (!entry || typeof entry !== 'object' || Array.isArray(entry)) {
|
|
37
|
+
addError(errors, path, 'file entry must be an object');
|
|
38
|
+
return;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
if (!VALID_FILE_FORMATS.has(entry.format)) {
|
|
42
|
+
addError(errors, path, `unsupported format "${entry.format}"`);
|
|
43
|
+
return;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
if (!Number.isInteger(entry.bytes) || entry.bytes < 0) {
|
|
47
|
+
addError(errors, path, 'bytes must be a non-negative integer');
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
if (typeof entry.sha256 !== 'string' || !/^[a-f0-9]{64}$/.test(entry.sha256)) {
|
|
51
|
+
addError(errors, path, 'sha256 must be a 64-character lowercase hex digest');
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
if (typeof entry.content_base64 !== 'string' || entry.content_base64.length === 0) {
|
|
55
|
+
addError(errors, path, 'content_base64 must be a non-empty string');
|
|
56
|
+
return;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
let buffer;
|
|
60
|
+
try {
|
|
61
|
+
buffer = Buffer.from(entry.content_base64, 'base64');
|
|
62
|
+
} catch (error) {
|
|
63
|
+
addError(errors, path, `content_base64 is not valid base64: ${error.message}`);
|
|
64
|
+
return;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
if (buffer.byteLength !== entry.bytes) {
|
|
68
|
+
addError(errors, path, `bytes mismatch: expected ${entry.bytes}, got ${buffer.byteLength}`);
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
if (sha256(buffer) !== entry.sha256) {
|
|
72
|
+
addError(errors, path, 'sha256 does not match content_base64');
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
const raw = buffer.toString('utf8');
|
|
76
|
+
|
|
77
|
+
try {
|
|
78
|
+
if (entry.format === 'json') {
|
|
79
|
+
const parsed = JSON.parse(raw);
|
|
80
|
+
if (!isDeepStrictEqual(parsed, entry.data)) {
|
|
81
|
+
addError(errors, path, 'data does not match decoded JSON content');
|
|
82
|
+
}
|
|
83
|
+
return;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
if (entry.format === 'jsonl') {
|
|
87
|
+
const parsed = parseJsonl(raw, relPath);
|
|
88
|
+
if (!isDeepStrictEqual(parsed, entry.data)) {
|
|
89
|
+
addError(errors, path, 'data does not match decoded JSONL content');
|
|
90
|
+
}
|
|
91
|
+
return;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
if (raw !== entry.data) {
|
|
95
|
+
addError(errors, path, 'data does not match decoded text content');
|
|
96
|
+
}
|
|
97
|
+
} catch (error) {
|
|
98
|
+
addError(errors, path, error.message);
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
function verifyFilesMap(files, errors) {
|
|
103
|
+
if (!files || typeof files !== 'object' || Array.isArray(files)) {
|
|
104
|
+
addError(errors, 'files', 'must be an object keyed by relative path');
|
|
105
|
+
return;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
for (const [relPath, entry] of Object.entries(files)) {
|
|
109
|
+
verifyFileEntry(relPath, entry, errors);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
function countJsonl(files, relPath) {
|
|
114
|
+
return Array.isArray(files?.[relPath]?.data) ? files[relPath].data.length : 0;
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
function countDirectoryFiles(files, prefix) {
|
|
118
|
+
return Object.keys(files || {}).filter((path) => path.startsWith(`${prefix}/`)).length;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
function verifyRunExport(artifact, errors) {
|
|
122
|
+
if (typeof artifact.project_root !== 'string' || artifact.project_root.length === 0) {
|
|
123
|
+
addError(errors, 'project_root', 'must be a non-empty string');
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
if (!artifact.project || typeof artifact.project !== 'object' || Array.isArray(artifact.project)) {
|
|
127
|
+
addError(errors, 'project', 'must be an object');
|
|
128
|
+
} else {
|
|
129
|
+
const expectedProtocolMode = artifact.config?.protocol_mode
|
|
130
|
+
|| artifact.state?.protocol_mode
|
|
131
|
+
|| 'governed';
|
|
132
|
+
if (artifact.project.id !== artifact.config?.project?.id) {
|
|
133
|
+
addError(errors, 'project.id', 'must match config.project.id');
|
|
134
|
+
}
|
|
135
|
+
if (artifact.project.name !== artifact.config?.project?.name) {
|
|
136
|
+
addError(errors, 'project.name', 'must match config.project.name');
|
|
137
|
+
}
|
|
138
|
+
if (artifact.project.template !== (artifact.config?.template || 'generic')) {
|
|
139
|
+
addError(errors, 'project.template', 'must match config.template or implicit generic');
|
|
140
|
+
}
|
|
141
|
+
if (artifact.project.protocol_mode !== expectedProtocolMode) {
|
|
142
|
+
addError(errors, 'project.protocol_mode', 'must match exported protocol mode');
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
if (!artifact.summary || typeof artifact.summary !== 'object' || Array.isArray(artifact.summary)) {
|
|
147
|
+
addError(errors, 'summary', 'must be an object');
|
|
148
|
+
return;
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
if (!artifact.state || typeof artifact.state !== 'object' || Array.isArray(artifact.state)) {
|
|
152
|
+
addError(errors, 'state', 'must be an object');
|
|
153
|
+
return;
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
if (!isDeepStrictEqual(artifact.config, artifact.files?.['agentxchain.json']?.data)) {
|
|
157
|
+
addError(errors, 'config', 'must match files.agentxchain.json.data');
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
if (!isDeepStrictEqual(artifact.state, artifact.files?.['.agentxchain/state.json']?.data)) {
|
|
161
|
+
addError(errors, 'state', 'must match files..agentxchain/state.json.data');
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
const activeTurnIds = Object.keys(artifact.state.active_turns || {}).sort((a, b) => a.localeCompare(b, 'en'));
|
|
165
|
+
const retainedTurnIds = Object.keys(artifact.state.retained_turns || {}).sort((a, b) => a.localeCompare(b, 'en'));
|
|
166
|
+
|
|
167
|
+
if (!isDeepStrictEqual(artifact.summary.active_turn_ids, activeTurnIds)) {
|
|
168
|
+
addError(errors, 'summary.active_turn_ids', 'must match sorted state.active_turns keys');
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
if (!isDeepStrictEqual(artifact.summary.retained_turn_ids, retainedTurnIds)) {
|
|
172
|
+
addError(errors, 'summary.retained_turn_ids', 'must match sorted state.retained_turns keys');
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
if (artifact.summary.run_id !== (artifact.state.run_id || null)) {
|
|
176
|
+
addError(errors, 'summary.run_id', 'must match state.run_id');
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
if (artifact.summary.status !== (artifact.state.status || null)) {
|
|
180
|
+
addError(errors, 'summary.status', 'must match state.status');
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
if (artifact.summary.phase !== (artifact.state.phase || null)) {
|
|
184
|
+
addError(errors, 'summary.phase', 'must match state.phase');
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
const expectedHistoryEntries = countJsonl(artifact.files, '.agentxchain/history.jsonl');
|
|
188
|
+
const expectedDecisionEntries = countJsonl(artifact.files, '.agentxchain/decision-ledger.jsonl');
|
|
189
|
+
const expectedHookAuditEntries = countJsonl(artifact.files, '.agentxchain/hook-audit.jsonl');
|
|
190
|
+
const expectedDispatchFiles = countDirectoryFiles(artifact.files, '.agentxchain/dispatch');
|
|
191
|
+
const expectedStagingFiles = countDirectoryFiles(artifact.files, '.agentxchain/staging');
|
|
192
|
+
const expectedIntakePresent = Object.keys(artifact.files).some((path) => path.startsWith('.agentxchain/intake/'));
|
|
193
|
+
const expectedCoordinatorPresent = Object.keys(artifact.files).some((path) => path.startsWith('.agentxchain/multirepo/'));
|
|
194
|
+
|
|
195
|
+
if (artifact.summary.history_entries !== expectedHistoryEntries) {
|
|
196
|
+
addError(errors, 'summary.history_entries', 'must match .agentxchain/history.jsonl entry count');
|
|
197
|
+
}
|
|
198
|
+
if (artifact.summary.decision_entries !== expectedDecisionEntries) {
|
|
199
|
+
addError(errors, 'summary.decision_entries', 'must match .agentxchain/decision-ledger.jsonl entry count');
|
|
200
|
+
}
|
|
201
|
+
if (artifact.summary.hook_audit_entries !== expectedHookAuditEntries) {
|
|
202
|
+
addError(errors, 'summary.hook_audit_entries', 'must match .agentxchain/hook-audit.jsonl entry count');
|
|
203
|
+
}
|
|
204
|
+
if (artifact.summary.dispatch_artifact_files !== expectedDispatchFiles) {
|
|
205
|
+
addError(errors, 'summary.dispatch_artifact_files', 'must match .agentxchain/dispatch file count');
|
|
206
|
+
}
|
|
207
|
+
if (artifact.summary.staging_artifact_files !== expectedStagingFiles) {
|
|
208
|
+
addError(errors, 'summary.staging_artifact_files', 'must match .agentxchain/staging file count');
|
|
209
|
+
}
|
|
210
|
+
if (artifact.summary.intake_present !== expectedIntakePresent) {
|
|
211
|
+
addError(errors, 'summary.intake_present', 'must match intake file presence');
|
|
212
|
+
}
|
|
213
|
+
if (artifact.summary.coordinator_present !== expectedCoordinatorPresent) {
|
|
214
|
+
addError(errors, 'summary.coordinator_present', 'must match multirepo file presence');
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
function verifyCoordinatorExport(artifact, errors) {
|
|
219
|
+
if (typeof artifact.workspace_root !== 'string' || artifact.workspace_root.length === 0) {
|
|
220
|
+
addError(errors, 'workspace_root', 'must be a non-empty string');
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
if (!artifact.coordinator || typeof artifact.coordinator !== 'object' || Array.isArray(artifact.coordinator)) {
|
|
224
|
+
addError(errors, 'coordinator', 'must be an object');
|
|
225
|
+
} else {
|
|
226
|
+
if (artifact.coordinator.project_id !== (artifact.config?.project?.id || null)) {
|
|
227
|
+
addError(errors, 'coordinator.project_id', 'must match config.project.id');
|
|
228
|
+
}
|
|
229
|
+
if (artifact.coordinator.project_name !== (artifact.config?.project?.name || null)) {
|
|
230
|
+
addError(errors, 'coordinator.project_name', 'must match config.project.name');
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
const expectedRepoCount = Object.keys(artifact.config?.repos || {}).length;
|
|
234
|
+
const expectedWorkstreamCount = Object.keys(artifact.config?.workstreams || {}).length;
|
|
235
|
+
if (artifact.coordinator.repo_count !== expectedRepoCount) {
|
|
236
|
+
addError(errors, 'coordinator.repo_count', 'must match config.repos size');
|
|
237
|
+
}
|
|
238
|
+
if (artifact.coordinator.workstream_count !== expectedWorkstreamCount) {
|
|
239
|
+
addError(errors, 'coordinator.workstream_count', 'must match config.workstreams size');
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
if (!artifact.summary || typeof artifact.summary !== 'object' || Array.isArray(artifact.summary)) {
|
|
244
|
+
addError(errors, 'summary', 'must be an object');
|
|
245
|
+
} else {
|
|
246
|
+
const coordinatorState = artifact.files?.['.agentxchain/multirepo/state.json']?.data || null;
|
|
247
|
+
const expectedStatuses = {};
|
|
248
|
+
if (coordinatorState?.repo_runs && typeof coordinatorState.repo_runs === 'object') {
|
|
249
|
+
for (const [repoId, repoRun] of Object.entries(coordinatorState.repo_runs)) {
|
|
250
|
+
expectedStatuses[repoId] = repoRun.status || 'unknown';
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
const barriers = artifact.files?.['.agentxchain/multirepo/barriers.json']?.data;
|
|
255
|
+
const expectedBarrierCount = barriers && typeof barriers === 'object' && !Array.isArray(barriers)
|
|
256
|
+
? Object.keys(barriers).length
|
|
257
|
+
: 0;
|
|
258
|
+
|
|
259
|
+
if (artifact.summary.super_run_id !== (coordinatorState?.super_run_id || null)) {
|
|
260
|
+
addError(errors, 'summary.super_run_id', 'must match coordinator state super_run_id');
|
|
261
|
+
}
|
|
262
|
+
if (artifact.summary.status !== (coordinatorState?.status || null)) {
|
|
263
|
+
addError(errors, 'summary.status', 'must match coordinator state status');
|
|
264
|
+
}
|
|
265
|
+
if (artifact.summary.phase !== (coordinatorState?.phase || null)) {
|
|
266
|
+
addError(errors, 'summary.phase', 'must match coordinator state phase');
|
|
267
|
+
}
|
|
268
|
+
if (!isDeepStrictEqual(artifact.summary.repo_run_statuses, expectedStatuses)) {
|
|
269
|
+
addError(errors, 'summary.repo_run_statuses', 'must match coordinator state repo run statuses');
|
|
270
|
+
}
|
|
271
|
+
if (artifact.summary.barrier_count !== expectedBarrierCount) {
|
|
272
|
+
addError(errors, 'summary.barrier_count', 'must match barriers.json object size');
|
|
273
|
+
}
|
|
274
|
+
if (artifact.summary.history_entries !== countJsonl(artifact.files, '.agentxchain/multirepo/history.jsonl')) {
|
|
275
|
+
addError(errors, 'summary.history_entries', 'must match multirepo history entry count');
|
|
276
|
+
}
|
|
277
|
+
if (artifact.summary.decision_entries !== countJsonl(artifact.files, '.agentxchain/multirepo/decision-ledger.jsonl')) {
|
|
278
|
+
addError(errors, 'summary.decision_entries', 'must match multirepo decision entry count');
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
if (!isDeepStrictEqual(artifact.config, artifact.files?.['agentxchain-multi.json']?.data)) {
|
|
283
|
+
addError(errors, 'config', 'must match files.agentxchain-multi.json.data');
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
if (!artifact.repos || typeof artifact.repos !== 'object' || Array.isArray(artifact.repos)) {
|
|
287
|
+
addError(errors, 'repos', 'must be an object');
|
|
288
|
+
return;
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
for (const [repoId, repoEntry] of Object.entries(artifact.repos)) {
|
|
292
|
+
const repoPath = `repos.${repoId}`;
|
|
293
|
+
if (!repoEntry || typeof repoEntry !== 'object' || Array.isArray(repoEntry)) {
|
|
294
|
+
addError(errors, repoPath, 'must be an object');
|
|
295
|
+
continue;
|
|
296
|
+
}
|
|
297
|
+
if (typeof repoEntry.path !== 'string' || repoEntry.path.length === 0) {
|
|
298
|
+
addError(errors, `${repoPath}.path`, 'must be a non-empty string');
|
|
299
|
+
}
|
|
300
|
+
if (typeof repoEntry.ok !== 'boolean') {
|
|
301
|
+
addError(errors, `${repoPath}.ok`, 'must be a boolean');
|
|
302
|
+
continue;
|
|
303
|
+
}
|
|
304
|
+
if (!repoEntry.ok) {
|
|
305
|
+
if (typeof repoEntry.error !== 'string' || repoEntry.error.length === 0) {
|
|
306
|
+
addError(errors, `${repoPath}.error`, 'must be a non-empty string when ok is false');
|
|
307
|
+
}
|
|
308
|
+
continue;
|
|
309
|
+
}
|
|
310
|
+
if (!repoEntry.export || typeof repoEntry.export !== 'object' || Array.isArray(repoEntry.export)) {
|
|
311
|
+
addError(errors, `${repoPath}.export`, 'must be an object when ok is true');
|
|
312
|
+
continue;
|
|
313
|
+
}
|
|
314
|
+
const nested = verifyExportArtifact(repoEntry.export);
|
|
315
|
+
for (const nestedError of nested.errors) {
|
|
316
|
+
addError(errors, `${repoPath}.export`, nestedError);
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
export function verifyExportArtifact(artifact) {
|
|
322
|
+
const errors = [];
|
|
323
|
+
|
|
324
|
+
if (!artifact || typeof artifact !== 'object' || Array.isArray(artifact)) {
|
|
325
|
+
addError(errors, 'artifact', 'must be a JSON object');
|
|
326
|
+
return {
|
|
327
|
+
ok: false,
|
|
328
|
+
errors,
|
|
329
|
+
report: {
|
|
330
|
+
overall: 'fail',
|
|
331
|
+
schema_version: null,
|
|
332
|
+
export_kind: null,
|
|
333
|
+
file_count: 0,
|
|
334
|
+
errors,
|
|
335
|
+
},
|
|
336
|
+
};
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
if (artifact.schema_version !== SUPPORTED_EXPORT_SCHEMA_VERSION) {
|
|
340
|
+
addError(errors, 'schema_version', `must be "${SUPPORTED_EXPORT_SCHEMA_VERSION}"`);
|
|
341
|
+
}
|
|
342
|
+
|
|
343
|
+
if (typeof artifact.export_kind !== 'string') {
|
|
344
|
+
addError(errors, 'export_kind', 'must be a string');
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
if (typeof artifact.exported_at !== 'string' || Number.isNaN(Date.parse(artifact.exported_at))) {
|
|
348
|
+
addError(errors, 'exported_at', 'must be a valid ISO timestamp');
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
if (!artifact.config || typeof artifact.config !== 'object' || Array.isArray(artifact.config)) {
|
|
352
|
+
addError(errors, 'config', 'must be an object');
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
verifyFilesMap(artifact.files, errors);
|
|
356
|
+
|
|
357
|
+
if (artifact.export_kind === 'agentxchain_run_export') {
|
|
358
|
+
verifyRunExport(artifact, errors);
|
|
359
|
+
} else if (artifact.export_kind === 'agentxchain_coordinator_export') {
|
|
360
|
+
verifyCoordinatorExport(artifact, errors);
|
|
361
|
+
} else {
|
|
362
|
+
addError(errors, 'export_kind', `unsupported export kind "${artifact.export_kind}"`);
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
return {
|
|
366
|
+
ok: errors.length === 0,
|
|
367
|
+
errors,
|
|
368
|
+
report: {
|
|
369
|
+
overall: errors.length === 0 ? 'pass' : 'fail',
|
|
370
|
+
schema_version: artifact.schema_version || null,
|
|
371
|
+
export_kind: artifact.export_kind || null,
|
|
372
|
+
file_count: artifact.files && typeof artifact.files === 'object' && !Array.isArray(artifact.files)
|
|
373
|
+
? Object.keys(artifact.files).length
|
|
374
|
+
: 0,
|
|
375
|
+
repo_count: artifact.repos && typeof artifact.repos === 'object' && !Array.isArray(artifact.repos)
|
|
376
|
+
? Object.keys(artifact.repos).length
|
|
377
|
+
: 0,
|
|
378
|
+
errors,
|
|
379
|
+
},
|
|
380
|
+
};
|
|
381
|
+
}
|
|
382
|
+
|
|
383
|
+
export function loadExportArtifact(input, cwd = process.cwd()) {
|
|
384
|
+
const source = input || '-';
|
|
385
|
+
let raw;
|
|
386
|
+
|
|
387
|
+
try {
|
|
388
|
+
if (source === '-') {
|
|
389
|
+
if (process.stdin.isTTY) {
|
|
390
|
+
return {
|
|
391
|
+
ok: false,
|
|
392
|
+
input: 'stdin',
|
|
393
|
+
error: 'No export input provided. Pass --input <path> or pipe JSON on stdin.',
|
|
394
|
+
};
|
|
395
|
+
}
|
|
396
|
+
raw = readFileSync(0, 'utf8');
|
|
397
|
+
} else {
|
|
398
|
+
const resolved = resolve(cwd, source);
|
|
399
|
+
raw = readFileSync(resolved, 'utf8');
|
|
400
|
+
}
|
|
401
|
+
} catch (error) {
|
|
402
|
+
return {
|
|
403
|
+
ok: false,
|
|
404
|
+
input: source === '-' ? 'stdin' : resolve(cwd, source),
|
|
405
|
+
error: error.message,
|
|
406
|
+
};
|
|
407
|
+
}
|
|
408
|
+
|
|
409
|
+
try {
|
|
410
|
+
return {
|
|
411
|
+
ok: true,
|
|
412
|
+
input: source === '-' ? 'stdin' : resolve(cwd, source),
|
|
413
|
+
artifact: JSON.parse(raw),
|
|
414
|
+
};
|
|
415
|
+
} catch (error) {
|
|
416
|
+
return {
|
|
417
|
+
ok: false,
|
|
418
|
+
input: source === '-' ? 'stdin' : resolve(cwd, source),
|
|
419
|
+
error: `Invalid JSON export artifact: ${error.message}`,
|
|
420
|
+
};
|
|
421
|
+
}
|
|
422
|
+
}
|
|
@@ -0,0 +1,303 @@
|
|
|
1
|
+
import { createHash } from 'node:crypto';
|
|
2
|
+
import { existsSync, readFileSync, readdirSync, statSync } from 'node:fs';
|
|
3
|
+
import { join, relative, resolve } from 'node:path';
|
|
4
|
+
|
|
5
|
+
import { loadProjectContext, loadProjectState } from './config.js';
|
|
6
|
+
import { loadCoordinatorConfig, COORDINATOR_CONFIG_FILE } from './coordinator-config.js';
|
|
7
|
+
import { loadCoordinatorState } from './coordinator-state.js';
|
|
8
|
+
|
|
9
|
+
const EXPORT_SCHEMA_VERSION = '0.2';
|
|
10
|
+
|
|
11
|
+
const COORDINATOR_INCLUDED_ROOTS = [
|
|
12
|
+
'agentxchain-multi.json',
|
|
13
|
+
'.agentxchain/multirepo/state.json',
|
|
14
|
+
'.agentxchain/multirepo/history.jsonl',
|
|
15
|
+
'.agentxchain/multirepo/barriers.json',
|
|
16
|
+
'.agentxchain/multirepo/decision-ledger.jsonl',
|
|
17
|
+
'.agentxchain/multirepo/barrier-ledger.jsonl',
|
|
18
|
+
];
|
|
19
|
+
|
|
20
|
+
const INCLUDED_ROOTS = [
|
|
21
|
+
'agentxchain.json',
|
|
22
|
+
'.agentxchain/state.json',
|
|
23
|
+
'.agentxchain/history.jsonl',
|
|
24
|
+
'.agentxchain/decision-ledger.jsonl',
|
|
25
|
+
'.agentxchain/hook-audit.jsonl',
|
|
26
|
+
'.agentxchain/hook-annotations.jsonl',
|
|
27
|
+
'.agentxchain/dispatch',
|
|
28
|
+
'.agentxchain/staging',
|
|
29
|
+
'.agentxchain/transactions/accept',
|
|
30
|
+
'.agentxchain/intake',
|
|
31
|
+
'.agentxchain/multirepo',
|
|
32
|
+
];
|
|
33
|
+
|
|
34
|
+
function sha256(buffer) {
|
|
35
|
+
return createHash('sha256').update(buffer).digest('hex');
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
function collectPaths(root, relPath) {
|
|
39
|
+
const absPath = join(root, relPath);
|
|
40
|
+
if (!existsSync(absPath)) {
|
|
41
|
+
return [];
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
const stats = statSync(absPath);
|
|
45
|
+
if (stats.isFile()) {
|
|
46
|
+
return [relPath];
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
if (!stats.isDirectory()) {
|
|
50
|
+
return [];
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
const entries = readdirSync(absPath, { withFileTypes: true })
|
|
54
|
+
.sort((a, b) => a.name.localeCompare(b.name, 'en'));
|
|
55
|
+
|
|
56
|
+
const files = [];
|
|
57
|
+
for (const entry of entries) {
|
|
58
|
+
const childRelPath = `${relPath}/${entry.name}`;
|
|
59
|
+
if (entry.isDirectory()) {
|
|
60
|
+
files.push(...collectPaths(root, childRelPath));
|
|
61
|
+
} else if (entry.isFile()) {
|
|
62
|
+
files.push(childRelPath);
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
return files;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
function parseJsonl(relPath, raw) {
|
|
69
|
+
if (!raw.trim()) {
|
|
70
|
+
return [];
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
return raw
|
|
74
|
+
.split('\n')
|
|
75
|
+
.filter((line) => line.trim())
|
|
76
|
+
.map((line, index) => {
|
|
77
|
+
try {
|
|
78
|
+
return JSON.parse(line);
|
|
79
|
+
} catch (error) {
|
|
80
|
+
throw new Error(`${relPath}: invalid JSONL at line ${index + 1}: ${error.message}`);
|
|
81
|
+
}
|
|
82
|
+
});
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
function parseFile(root, relPath) {
|
|
86
|
+
const absPath = join(root, relPath);
|
|
87
|
+
const buffer = readFileSync(absPath);
|
|
88
|
+
const raw = buffer.toString('utf8');
|
|
89
|
+
|
|
90
|
+
let format = 'text';
|
|
91
|
+
let data = raw;
|
|
92
|
+
|
|
93
|
+
if (relPath.endsWith('.json')) {
|
|
94
|
+
try {
|
|
95
|
+
data = JSON.parse(raw);
|
|
96
|
+
format = 'json';
|
|
97
|
+
} catch (error) {
|
|
98
|
+
throw new Error(`${relPath}: invalid JSON: ${error.message}`);
|
|
99
|
+
}
|
|
100
|
+
} else if (relPath.endsWith('.jsonl')) {
|
|
101
|
+
data = parseJsonl(relPath, raw);
|
|
102
|
+
format = 'jsonl';
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
return {
|
|
106
|
+
format,
|
|
107
|
+
bytes: buffer.byteLength,
|
|
108
|
+
sha256: sha256(buffer),
|
|
109
|
+
content_base64: buffer.toString('base64'),
|
|
110
|
+
data,
|
|
111
|
+
};
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
function countJsonl(files, relPath) {
|
|
115
|
+
return Array.isArray(files[relPath]?.data) ? files[relPath].data.length : 0;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
function countDirectoryFiles(files, prefix) {
|
|
119
|
+
return Object.keys(files).filter((path) => path.startsWith(`${prefix}/`)).length;
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
export function buildRunExport(startDir = process.cwd()) {
|
|
123
|
+
const context = loadProjectContext(startDir);
|
|
124
|
+
if (!context) {
|
|
125
|
+
return {
|
|
126
|
+
ok: false,
|
|
127
|
+
error: 'No governed project found. Run this inside an AgentXchain governed project.',
|
|
128
|
+
};
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
if (context.config.protocol_mode !== 'governed') {
|
|
132
|
+
return {
|
|
133
|
+
ok: false,
|
|
134
|
+
error: 'Run export only supports governed projects in this slice.',
|
|
135
|
+
};
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
const { root, rawConfig, config, version } = context;
|
|
139
|
+
const state = loadProjectState(root, config);
|
|
140
|
+
|
|
141
|
+
const collectedPaths = [...new Set(INCLUDED_ROOTS.flatMap((relPath) => collectPaths(root, relPath)))]
|
|
142
|
+
.sort((a, b) => a.localeCompare(b, 'en'));
|
|
143
|
+
|
|
144
|
+
const files = {};
|
|
145
|
+
for (const relPath of collectedPaths) {
|
|
146
|
+
files[relPath] = parseFile(root, relPath);
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
const activeTurns = Object.keys(state?.active_turns || {}).sort((a, b) => a.localeCompare(b, 'en'));
|
|
150
|
+
const retainedTurns = Object.keys(state?.retained_turns || {}).sort((a, b) => a.localeCompare(b, 'en'));
|
|
151
|
+
|
|
152
|
+
return {
|
|
153
|
+
ok: true,
|
|
154
|
+
export: {
|
|
155
|
+
schema_version: EXPORT_SCHEMA_VERSION,
|
|
156
|
+
export_kind: 'agentxchain_run_export',
|
|
157
|
+
exported_at: new Date().toISOString(),
|
|
158
|
+
project_root: relative(process.cwd(), root) || '.',
|
|
159
|
+
project: {
|
|
160
|
+
id: config.project.id,
|
|
161
|
+
name: config.project.name,
|
|
162
|
+
template: config.template || 'generic',
|
|
163
|
+
protocol_mode: config.protocol_mode,
|
|
164
|
+
schema_version: version,
|
|
165
|
+
},
|
|
166
|
+
summary: {
|
|
167
|
+
run_id: state?.run_id || null,
|
|
168
|
+
status: state?.status || null,
|
|
169
|
+
phase: state?.phase || null,
|
|
170
|
+
active_turn_ids: activeTurns,
|
|
171
|
+
retained_turn_ids: retainedTurns,
|
|
172
|
+
history_entries: countJsonl(files, '.agentxchain/history.jsonl'),
|
|
173
|
+
decision_entries: countJsonl(files, '.agentxchain/decision-ledger.jsonl'),
|
|
174
|
+
hook_audit_entries: countJsonl(files, '.agentxchain/hook-audit.jsonl'),
|
|
175
|
+
dispatch_artifact_files: countDirectoryFiles(files, '.agentxchain/dispatch'),
|
|
176
|
+
staging_artifact_files: countDirectoryFiles(files, '.agentxchain/staging'),
|
|
177
|
+
intake_present: Object.keys(files).some((path) => path.startsWith('.agentxchain/intake/')),
|
|
178
|
+
coordinator_present: Object.keys(files).some((path) => path.startsWith('.agentxchain/multirepo/')),
|
|
179
|
+
},
|
|
180
|
+
files,
|
|
181
|
+
config: rawConfig,
|
|
182
|
+
state,
|
|
183
|
+
},
|
|
184
|
+
};
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
export function buildCoordinatorExport(startDir = process.cwd()) {
|
|
188
|
+
const workspaceRoot = resolve(startDir);
|
|
189
|
+
const configPath = join(workspaceRoot, COORDINATOR_CONFIG_FILE);
|
|
190
|
+
|
|
191
|
+
if (!existsSync(configPath)) {
|
|
192
|
+
return {
|
|
193
|
+
ok: false,
|
|
194
|
+
error: `No ${COORDINATOR_CONFIG_FILE} found at ${workspaceRoot}.`,
|
|
195
|
+
};
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
let rawConfig;
|
|
199
|
+
try {
|
|
200
|
+
rawConfig = JSON.parse(readFileSync(configPath, 'utf8'));
|
|
201
|
+
} catch (err) {
|
|
202
|
+
return {
|
|
203
|
+
ok: false,
|
|
204
|
+
error: `Invalid JSON in ${COORDINATOR_CONFIG_FILE}: ${err.message}`,
|
|
205
|
+
};
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
const configResult = loadCoordinatorConfig(workspaceRoot);
|
|
209
|
+
const normalizedConfig = configResult.ok ? configResult.config : null;
|
|
210
|
+
|
|
211
|
+
// Collect coordinator-level files
|
|
212
|
+
const collectedPaths = [...new Set(
|
|
213
|
+
COORDINATOR_INCLUDED_ROOTS.flatMap((relPath) => collectPaths(workspaceRoot, relPath)),
|
|
214
|
+
)].sort((a, b) => a.localeCompare(b, 'en'));
|
|
215
|
+
|
|
216
|
+
const files = {};
|
|
217
|
+
for (const relPath of collectedPaths) {
|
|
218
|
+
files[relPath] = parseFile(workspaceRoot, relPath);
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
// Load coordinator state for summary
|
|
222
|
+
const coordState = loadCoordinatorState(workspaceRoot);
|
|
223
|
+
|
|
224
|
+
// Build repo run statuses from coordinator state
|
|
225
|
+
const repoRunStatuses = {};
|
|
226
|
+
if (coordState?.repo_runs) {
|
|
227
|
+
for (const [repoId, repoRun] of Object.entries(coordState.repo_runs)) {
|
|
228
|
+
repoRunStatuses[repoId] = repoRun.status || 'unknown';
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
// Count barriers from barriers.json
|
|
233
|
+
let barrierCount = 0;
|
|
234
|
+
const barriersKey = '.agentxchain/multirepo/barriers.json';
|
|
235
|
+
if (files[barriersKey]?.format === 'json' && files[barriersKey]?.data) {
|
|
236
|
+
barrierCount = Object.keys(files[barriersKey].data).length;
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
// Determine repos from config
|
|
240
|
+
const repos = {};
|
|
241
|
+
const repoEntries = rawConfig.repos && typeof rawConfig.repos === 'object'
|
|
242
|
+
? Object.entries(rawConfig.repos)
|
|
243
|
+
: [];
|
|
244
|
+
|
|
245
|
+
for (const [repoId, repoDef] of repoEntries) {
|
|
246
|
+
const repoPath = repoDef?.path || '';
|
|
247
|
+
const resolvedPath = resolve(workspaceRoot, repoPath);
|
|
248
|
+
|
|
249
|
+
try {
|
|
250
|
+
const childExport = buildRunExport(resolvedPath);
|
|
251
|
+
if (childExport.ok) {
|
|
252
|
+
repos[repoId] = {
|
|
253
|
+
ok: true,
|
|
254
|
+
path: repoPath,
|
|
255
|
+
export: childExport.export,
|
|
256
|
+
};
|
|
257
|
+
} else {
|
|
258
|
+
repos[repoId] = {
|
|
259
|
+
ok: false,
|
|
260
|
+
path: repoPath,
|
|
261
|
+
error: childExport.error,
|
|
262
|
+
};
|
|
263
|
+
}
|
|
264
|
+
} catch (err) {
|
|
265
|
+
repos[repoId] = {
|
|
266
|
+
ok: false,
|
|
267
|
+
path: repoPath,
|
|
268
|
+
error: err.message || String(err),
|
|
269
|
+
};
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
return {
|
|
274
|
+
ok: true,
|
|
275
|
+
export: {
|
|
276
|
+
schema_version: EXPORT_SCHEMA_VERSION,
|
|
277
|
+
export_kind: 'agentxchain_coordinator_export',
|
|
278
|
+
exported_at: new Date().toISOString(),
|
|
279
|
+
workspace_root: relative(process.cwd(), workspaceRoot) || '.',
|
|
280
|
+
coordinator: {
|
|
281
|
+
project_id: rawConfig.project?.id || null,
|
|
282
|
+
project_name: rawConfig.project?.name || null,
|
|
283
|
+
schema_version: rawConfig.schema_version || null,
|
|
284
|
+
repo_count: repoEntries.length,
|
|
285
|
+
workstream_count: rawConfig.workstreams
|
|
286
|
+
? Object.keys(rawConfig.workstreams).length
|
|
287
|
+
: 0,
|
|
288
|
+
},
|
|
289
|
+
summary: {
|
|
290
|
+
super_run_id: coordState?.super_run_id || null,
|
|
291
|
+
status: coordState?.status || null,
|
|
292
|
+
phase: coordState?.phase || null,
|
|
293
|
+
repo_run_statuses: repoRunStatuses,
|
|
294
|
+
barrier_count: barrierCount,
|
|
295
|
+
history_entries: countJsonl(files, '.agentxchain/multirepo/history.jsonl'),
|
|
296
|
+
decision_entries: countJsonl(files, '.agentxchain/multirepo/decision-ledger.jsonl'),
|
|
297
|
+
},
|
|
298
|
+
files,
|
|
299
|
+
config: rawConfig,
|
|
300
|
+
repos,
|
|
301
|
+
},
|
|
302
|
+
};
|
|
303
|
+
}
|
|
@@ -19,6 +19,7 @@ const VALID_WRITE_AUTHORITIES = ['authoritative', 'proposed', 'review_only'];
|
|
|
19
19
|
const VALID_RUNTIME_TYPES = ['manual', 'local_cli', 'api_proxy', 'mcp'];
|
|
20
20
|
const VALID_API_PROXY_PROVIDERS = ['anthropic', 'openai'];
|
|
21
21
|
const VALID_PROMPT_TRANSPORTS = ['argv', 'stdin', 'dispatch_bundle_only'];
|
|
22
|
+
const VALID_MCP_TRANSPORTS = ['stdio', 'streamable_http'];
|
|
22
23
|
const VALID_PHASES = ['planning', 'implementation', 'qa'];
|
|
23
24
|
const VALID_API_PROXY_RETRY_JITTER = ['none', 'full'];
|
|
24
25
|
const VALID_API_PROXY_RETRY_CLASSES = [
|
|
@@ -47,8 +48,60 @@ const VALID_API_PROXY_PREFLIGHT_FIELDS = [
|
|
|
47
48
|
];
|
|
48
49
|
|
|
49
50
|
function validateMcpRuntime(runtimeId, runtime, errors) {
|
|
51
|
+
const transport = typeof runtime?.transport === 'string' && runtime.transport.trim()
|
|
52
|
+
? runtime.transport.trim()
|
|
53
|
+
: 'stdio';
|
|
50
54
|
const command = runtime?.command;
|
|
51
55
|
|
|
56
|
+
if (!VALID_MCP_TRANSPORTS.includes(transport)) {
|
|
57
|
+
errors.push(`Runtime "${runtimeId}": mcp transport must be one of: ${VALID_MCP_TRANSPORTS.join(', ')}`);
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
if ('tool_name' in runtime && (typeof runtime.tool_name !== 'string' || !runtime.tool_name.trim())) {
|
|
61
|
+
errors.push(`Runtime "${runtimeId}": mcp tool_name must be a non-empty string`);
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
if (transport === 'streamable_http') {
|
|
65
|
+
if (typeof runtime?.url !== 'string' || !runtime.url.trim()) {
|
|
66
|
+
errors.push(`Runtime "${runtimeId}": mcp streamable_http requires "url"`);
|
|
67
|
+
} else {
|
|
68
|
+
try {
|
|
69
|
+
const parsed = new URL(runtime.url);
|
|
70
|
+
if (parsed.protocol !== 'http:' && parsed.protocol !== 'https:') {
|
|
71
|
+
errors.push(`Runtime "${runtimeId}": mcp url must use http or https`);
|
|
72
|
+
}
|
|
73
|
+
} catch {
|
|
74
|
+
errors.push(`Runtime "${runtimeId}": mcp url must be a valid absolute URL`);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
if ('headers' in runtime) {
|
|
79
|
+
if (!runtime.headers || typeof runtime.headers !== 'object' || Array.isArray(runtime.headers)) {
|
|
80
|
+
errors.push(`Runtime "${runtimeId}": mcp headers must be an object of string values`);
|
|
81
|
+
} else {
|
|
82
|
+
for (const [key, value] of Object.entries(runtime.headers)) {
|
|
83
|
+
if (typeof value !== 'string') {
|
|
84
|
+
errors.push(`Runtime "${runtimeId}": mcp headers["${key}"] must be a string`);
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
if ('command' in runtime) {
|
|
91
|
+
errors.push(`Runtime "${runtimeId}": mcp streamable_http does not accept "command"`);
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
if ('args' in runtime) {
|
|
95
|
+
errors.push(`Runtime "${runtimeId}": mcp streamable_http does not accept "args"`);
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
if ('cwd' in runtime) {
|
|
99
|
+
errors.push(`Runtime "${runtimeId}": mcp streamable_http does not accept "cwd"`);
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
return;
|
|
103
|
+
}
|
|
104
|
+
|
|
52
105
|
if (typeof command === 'string') {
|
|
53
106
|
if (!command.trim()) {
|
|
54
107
|
errors.push(`Runtime "${runtimeId}": mcp command must be a non-empty string`);
|
|
@@ -67,13 +120,17 @@ function validateMcpRuntime(runtimeId, runtime, errors) {
|
|
|
67
120
|
}
|
|
68
121
|
}
|
|
69
122
|
|
|
70
|
-
if ('tool_name' in runtime && (typeof runtime.tool_name !== 'string' || !runtime.tool_name.trim())) {
|
|
71
|
-
errors.push(`Runtime "${runtimeId}": mcp tool_name must be a non-empty string`);
|
|
72
|
-
}
|
|
73
|
-
|
|
74
123
|
if ('cwd' in runtime && (typeof runtime.cwd !== 'string' || !runtime.cwd.trim())) {
|
|
75
124
|
errors.push(`Runtime "${runtimeId}": mcp cwd must be a non-empty string`);
|
|
76
125
|
}
|
|
126
|
+
|
|
127
|
+
if ('url' in runtime) {
|
|
128
|
+
errors.push(`Runtime "${runtimeId}": mcp stdio does not accept "url"`);
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
if ('headers' in runtime) {
|
|
132
|
+
errors.push(`Runtime "${runtimeId}": mcp stdio does not accept "headers"`);
|
|
133
|
+
}
|
|
77
134
|
}
|
|
78
135
|
|
|
79
136
|
function validateApiProxyRetryPolicy(runtimeId, retryPolicy, errors) {
|