agentxchain 2.5.0 → 2.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/agentxchain.js +16 -1
- package/package.json +1 -1
- package/src/commands/export.js +63 -0
- package/src/commands/verify.js +60 -0
- package/src/lib/export-verifier.js +422 -0
- package/src/lib/export.js +303 -0
package/bin/agentxchain.js
CHANGED
|
@@ -59,7 +59,7 @@ import { generateCommand } from '../src/commands/generate.js';
|
|
|
59
59
|
import { doctorCommand } from '../src/commands/doctor.js';
|
|
60
60
|
import { superviseCommand } from '../src/commands/supervise.js';
|
|
61
61
|
import { validateCommand } from '../src/commands/validate.js';
|
|
62
|
-
import { verifyProtocolCommand } from '../src/commands/verify.js';
|
|
62
|
+
import { verifyExportCommand, verifyProtocolCommand } from '../src/commands/verify.js';
|
|
63
63
|
import { kickoffCommand } from '../src/commands/kickoff.js';
|
|
64
64
|
import { rebindCommand } from '../src/commands/rebind.js';
|
|
65
65
|
import { branchCommand } from '../src/commands/branch.js';
|
|
@@ -71,6 +71,7 @@ import { stepCommand } from '../src/commands/step.js';
|
|
|
71
71
|
import { approveTransitionCommand } from '../src/commands/approve-transition.js';
|
|
72
72
|
import { approveCompletionCommand } from '../src/commands/approve-completion.js';
|
|
73
73
|
import { dashboardCommand } from '../src/commands/dashboard.js';
|
|
74
|
+
import { exportCommand } from '../src/commands/export.js';
|
|
74
75
|
import {
|
|
75
76
|
pluginInstallCommand,
|
|
76
77
|
pluginListCommand,
|
|
@@ -121,6 +122,13 @@ program
|
|
|
121
122
|
.option('-j, --json', 'Output as JSON')
|
|
122
123
|
.action(statusCommand);
|
|
123
124
|
|
|
125
|
+
program
|
|
126
|
+
.command('export')
|
|
127
|
+
.description('Export the governed run audit surface as a single artifact')
|
|
128
|
+
.option('--format <format>', 'Export format (json)', 'json')
|
|
129
|
+
.option('--output <path>', 'Write the export artifact to a file instead of stdout')
|
|
130
|
+
.action(exportCommand);
|
|
131
|
+
|
|
124
132
|
program
|
|
125
133
|
.command('start')
|
|
126
134
|
.description('Launch legacy v3 agents in your IDE')
|
|
@@ -231,6 +239,13 @@ verifyCmd
|
|
|
231
239
|
.option('--format <format>', 'Output format: text or json', 'text')
|
|
232
240
|
.action(verifyProtocolCommand);
|
|
233
241
|
|
|
242
|
+
verifyCmd
|
|
243
|
+
.command('export')
|
|
244
|
+
.description('Verify an AgentXchain export artifact against its embedded file bytes and summaries')
|
|
245
|
+
.option('--input <path>', 'Export artifact path, or "-" for stdin', '-')
|
|
246
|
+
.option('--format <format>', 'Output format: text or json', 'text')
|
|
247
|
+
.action(verifyExportCommand);
|
|
248
|
+
|
|
234
249
|
program
|
|
235
250
|
.command('migrate')
|
|
236
251
|
.description('Migrate a legacy v3 project to governed format')
|
package/package.json
CHANGED
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
import { existsSync } from 'node:fs';
|
|
2
|
+
import { join, resolve } from 'node:path';
|
|
3
|
+
|
|
4
|
+
import { buildRunExport, buildCoordinatorExport } from '../lib/export.js';
|
|
5
|
+
import { COORDINATOR_CONFIG_FILE } from '../lib/coordinator-config.js';
|
|
6
|
+
import { safeWriteJson } from '../lib/safe-write.js';
|
|
7
|
+
|
|
8
|
+
function detectExportKind(cwd) {
|
|
9
|
+
// Governed project takes priority (agentxchain.json)
|
|
10
|
+
if (existsSync(join(cwd, 'agentxchain.json'))) {
|
|
11
|
+
return 'governed';
|
|
12
|
+
}
|
|
13
|
+
// Coordinator workspace (agentxchain-multi.json)
|
|
14
|
+
if (existsSync(join(cwd, COORDINATOR_CONFIG_FILE))) {
|
|
15
|
+
return 'coordinator';
|
|
16
|
+
}
|
|
17
|
+
return null;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export async function exportCommand(options) {
|
|
21
|
+
const format = options.format || 'json';
|
|
22
|
+
if (format !== 'json') {
|
|
23
|
+
console.error(`Unsupported export format "${format}". Only "json" is supported in this slice.`);
|
|
24
|
+
process.exitCode = 1;
|
|
25
|
+
return;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
const cwd = process.cwd();
|
|
29
|
+
const kind = detectExportKind(cwd);
|
|
30
|
+
|
|
31
|
+
let result;
|
|
32
|
+
try {
|
|
33
|
+
if (kind === 'governed') {
|
|
34
|
+
result = buildRunExport(cwd);
|
|
35
|
+
} else if (kind === 'coordinator') {
|
|
36
|
+
result = buildCoordinatorExport(cwd);
|
|
37
|
+
} else {
|
|
38
|
+
result = {
|
|
39
|
+
ok: false,
|
|
40
|
+
error: 'No governed project or coordinator workspace found. Run this inside an AgentXchain governed project or coordinator workspace.',
|
|
41
|
+
};
|
|
42
|
+
}
|
|
43
|
+
} catch (error) {
|
|
44
|
+
console.error(error.message || String(error));
|
|
45
|
+
process.exitCode = 1;
|
|
46
|
+
return;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
if (!result.ok) {
|
|
50
|
+
console.error(result.error);
|
|
51
|
+
process.exitCode = 1;
|
|
52
|
+
return;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
if (options.output) {
|
|
56
|
+
const outputPath = resolve(cwd, options.output);
|
|
57
|
+
safeWriteJson(outputPath, result.export);
|
|
58
|
+
console.log(`Exported ${kind === 'coordinator' ? 'coordinator workspace' : 'governed run'} audit to ${options.output}`);
|
|
59
|
+
return;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
console.log(JSON.stringify(result.export, null, 2));
|
|
63
|
+
}
|
package/src/commands/verify.js
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import chalk from 'chalk';
|
|
2
2
|
import { resolve } from 'node:path';
|
|
3
|
+
import { loadExportArtifact, verifyExportArtifact } from '../lib/export-verifier.js';
|
|
3
4
|
import { verifyProtocolConformance } from '../lib/protocol-conformance.js';
|
|
4
5
|
|
|
5
6
|
export async function verifyProtocolCommand(opts) {
|
|
@@ -35,6 +36,38 @@ export async function verifyProtocolCommand(opts) {
|
|
|
35
36
|
process.exit(result.exitCode);
|
|
36
37
|
}
|
|
37
38
|
|
|
39
|
+
export async function verifyExportCommand(opts) {
|
|
40
|
+
const format = opts.format || 'text';
|
|
41
|
+
const loaded = loadExportArtifact(opts.input || '-', process.cwd());
|
|
42
|
+
|
|
43
|
+
if (!loaded.ok) {
|
|
44
|
+
if (format === 'json') {
|
|
45
|
+
console.log(JSON.stringify({
|
|
46
|
+
overall: 'error',
|
|
47
|
+
input: loaded.input,
|
|
48
|
+
message: loaded.error,
|
|
49
|
+
}, null, 2));
|
|
50
|
+
} else {
|
|
51
|
+
console.log(chalk.red(`Export verification failed: ${loaded.error}`));
|
|
52
|
+
}
|
|
53
|
+
process.exit(2);
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
const result = verifyExportArtifact(loaded.artifact);
|
|
57
|
+
const report = {
|
|
58
|
+
...result.report,
|
|
59
|
+
input: loaded.input,
|
|
60
|
+
};
|
|
61
|
+
|
|
62
|
+
if (format === 'json') {
|
|
63
|
+
console.log(JSON.stringify(report, null, 2));
|
|
64
|
+
} else {
|
|
65
|
+
printExportReport(report);
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
process.exit(result.ok ? 0 : 1);
|
|
69
|
+
}
|
|
70
|
+
|
|
38
71
|
function printProtocolReport(report) {
|
|
39
72
|
console.log('');
|
|
40
73
|
console.log(chalk.bold(' AgentXchain Protocol Conformance'));
|
|
@@ -74,3 +107,30 @@ function printProtocolReport(report) {
|
|
|
74
107
|
|
|
75
108
|
console.log('');
|
|
76
109
|
}
|
|
110
|
+
|
|
111
|
+
function printExportReport(report) {
|
|
112
|
+
console.log('');
|
|
113
|
+
console.log(chalk.bold(' AgentXchain Export Verification'));
|
|
114
|
+
console.log(chalk.dim(' ' + '─'.repeat(43)));
|
|
115
|
+
console.log(chalk.dim(` Input: ${report.input}`));
|
|
116
|
+
console.log(chalk.dim(` Export kind: ${report.export_kind || 'unknown'}`));
|
|
117
|
+
console.log(chalk.dim(` Schema: ${report.schema_version || 'unknown'}`));
|
|
118
|
+
console.log('');
|
|
119
|
+
|
|
120
|
+
const overallLabel = report.overall === 'pass'
|
|
121
|
+
? chalk.green('PASS')
|
|
122
|
+
: report.overall === 'fail'
|
|
123
|
+
? chalk.red('FAIL')
|
|
124
|
+
: chalk.red('ERROR');
|
|
125
|
+
console.log(` Overall: ${overallLabel}`);
|
|
126
|
+
console.log(chalk.dim(` Files verified: ${report.file_count}`));
|
|
127
|
+
if (report.repo_count) {
|
|
128
|
+
console.log(chalk.dim(` Embedded repos: ${report.repo_count}`));
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
for (const error of report.errors || []) {
|
|
132
|
+
console.log(chalk.red(` ✗ ${error}`));
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
console.log('');
|
|
136
|
+
}
|
|
@@ -0,0 +1,422 @@
|
|
|
1
|
+
import { createHash } from 'node:crypto';
|
|
2
|
+
import { readFileSync } from 'node:fs';
|
|
3
|
+
import { resolve } from 'node:path';
|
|
4
|
+
import { isDeepStrictEqual } from 'node:util';
|
|
5
|
+
|
|
6
|
+
const SUPPORTED_EXPORT_SCHEMA_VERSION = '0.2';
|
|
7
|
+
const VALID_FILE_FORMATS = new Set(['json', 'jsonl', 'text']);
|
|
8
|
+
|
|
9
|
+
function sha256(buffer) {
|
|
10
|
+
return createHash('sha256').update(buffer).digest('hex');
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
function parseJsonl(raw, relPath) {
|
|
14
|
+
if (!raw.trim()) {
|
|
15
|
+
return [];
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
return raw
|
|
19
|
+
.split('\n')
|
|
20
|
+
.filter((line) => line.trim())
|
|
21
|
+
.map((line, index) => {
|
|
22
|
+
try {
|
|
23
|
+
return JSON.parse(line);
|
|
24
|
+
} catch (error) {
|
|
25
|
+
throw new Error(`${relPath}: invalid JSONL at line ${index + 1}: ${error.message}`);
|
|
26
|
+
}
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
function addError(errors, path, message) {
|
|
31
|
+
errors.push(`${path}: ${message}`);
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
function verifyFileEntry(relPath, entry, errors) {
|
|
35
|
+
const path = `files.${relPath}`;
|
|
36
|
+
if (!entry || typeof entry !== 'object' || Array.isArray(entry)) {
|
|
37
|
+
addError(errors, path, 'file entry must be an object');
|
|
38
|
+
return;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
if (!VALID_FILE_FORMATS.has(entry.format)) {
|
|
42
|
+
addError(errors, path, `unsupported format "${entry.format}"`);
|
|
43
|
+
return;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
if (!Number.isInteger(entry.bytes) || entry.bytes < 0) {
|
|
47
|
+
addError(errors, path, 'bytes must be a non-negative integer');
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
if (typeof entry.sha256 !== 'string' || !/^[a-f0-9]{64}$/.test(entry.sha256)) {
|
|
51
|
+
addError(errors, path, 'sha256 must be a 64-character lowercase hex digest');
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
if (typeof entry.content_base64 !== 'string' || entry.content_base64.length === 0) {
|
|
55
|
+
addError(errors, path, 'content_base64 must be a non-empty string');
|
|
56
|
+
return;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
let buffer;
|
|
60
|
+
try {
|
|
61
|
+
buffer = Buffer.from(entry.content_base64, 'base64');
|
|
62
|
+
} catch (error) {
|
|
63
|
+
addError(errors, path, `content_base64 is not valid base64: ${error.message}`);
|
|
64
|
+
return;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
if (buffer.byteLength !== entry.bytes) {
|
|
68
|
+
addError(errors, path, `bytes mismatch: expected ${entry.bytes}, got ${buffer.byteLength}`);
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
if (sha256(buffer) !== entry.sha256) {
|
|
72
|
+
addError(errors, path, 'sha256 does not match content_base64');
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
const raw = buffer.toString('utf8');
|
|
76
|
+
|
|
77
|
+
try {
|
|
78
|
+
if (entry.format === 'json') {
|
|
79
|
+
const parsed = JSON.parse(raw);
|
|
80
|
+
if (!isDeepStrictEqual(parsed, entry.data)) {
|
|
81
|
+
addError(errors, path, 'data does not match decoded JSON content');
|
|
82
|
+
}
|
|
83
|
+
return;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
if (entry.format === 'jsonl') {
|
|
87
|
+
const parsed = parseJsonl(raw, relPath);
|
|
88
|
+
if (!isDeepStrictEqual(parsed, entry.data)) {
|
|
89
|
+
addError(errors, path, 'data does not match decoded JSONL content');
|
|
90
|
+
}
|
|
91
|
+
return;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
if (raw !== entry.data) {
|
|
95
|
+
addError(errors, path, 'data does not match decoded text content');
|
|
96
|
+
}
|
|
97
|
+
} catch (error) {
|
|
98
|
+
addError(errors, path, error.message);
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
function verifyFilesMap(files, errors) {
|
|
103
|
+
if (!files || typeof files !== 'object' || Array.isArray(files)) {
|
|
104
|
+
addError(errors, 'files', 'must be an object keyed by relative path');
|
|
105
|
+
return;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
for (const [relPath, entry] of Object.entries(files)) {
|
|
109
|
+
verifyFileEntry(relPath, entry, errors);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
function countJsonl(files, relPath) {
|
|
114
|
+
return Array.isArray(files?.[relPath]?.data) ? files[relPath].data.length : 0;
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
function countDirectoryFiles(files, prefix) {
|
|
118
|
+
return Object.keys(files || {}).filter((path) => path.startsWith(`${prefix}/`)).length;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
function verifyRunExport(artifact, errors) {
|
|
122
|
+
if (typeof artifact.project_root !== 'string' || artifact.project_root.length === 0) {
|
|
123
|
+
addError(errors, 'project_root', 'must be a non-empty string');
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
if (!artifact.project || typeof artifact.project !== 'object' || Array.isArray(artifact.project)) {
|
|
127
|
+
addError(errors, 'project', 'must be an object');
|
|
128
|
+
} else {
|
|
129
|
+
const expectedProtocolMode = artifact.config?.protocol_mode
|
|
130
|
+
|| artifact.state?.protocol_mode
|
|
131
|
+
|| 'governed';
|
|
132
|
+
if (artifact.project.id !== artifact.config?.project?.id) {
|
|
133
|
+
addError(errors, 'project.id', 'must match config.project.id');
|
|
134
|
+
}
|
|
135
|
+
if (artifact.project.name !== artifact.config?.project?.name) {
|
|
136
|
+
addError(errors, 'project.name', 'must match config.project.name');
|
|
137
|
+
}
|
|
138
|
+
if (artifact.project.template !== (artifact.config?.template || 'generic')) {
|
|
139
|
+
addError(errors, 'project.template', 'must match config.template or implicit generic');
|
|
140
|
+
}
|
|
141
|
+
if (artifact.project.protocol_mode !== expectedProtocolMode) {
|
|
142
|
+
addError(errors, 'project.protocol_mode', 'must match exported protocol mode');
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
if (!artifact.summary || typeof artifact.summary !== 'object' || Array.isArray(artifact.summary)) {
|
|
147
|
+
addError(errors, 'summary', 'must be an object');
|
|
148
|
+
return;
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
if (!artifact.state || typeof artifact.state !== 'object' || Array.isArray(artifact.state)) {
|
|
152
|
+
addError(errors, 'state', 'must be an object');
|
|
153
|
+
return;
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
if (!isDeepStrictEqual(artifact.config, artifact.files?.['agentxchain.json']?.data)) {
|
|
157
|
+
addError(errors, 'config', 'must match files.agentxchain.json.data');
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
if (!isDeepStrictEqual(artifact.state, artifact.files?.['.agentxchain/state.json']?.data)) {
|
|
161
|
+
addError(errors, 'state', 'must match files..agentxchain/state.json.data');
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
const activeTurnIds = Object.keys(artifact.state.active_turns || {}).sort((a, b) => a.localeCompare(b, 'en'));
|
|
165
|
+
const retainedTurnIds = Object.keys(artifact.state.retained_turns || {}).sort((a, b) => a.localeCompare(b, 'en'));
|
|
166
|
+
|
|
167
|
+
if (!isDeepStrictEqual(artifact.summary.active_turn_ids, activeTurnIds)) {
|
|
168
|
+
addError(errors, 'summary.active_turn_ids', 'must match sorted state.active_turns keys');
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
if (!isDeepStrictEqual(artifact.summary.retained_turn_ids, retainedTurnIds)) {
|
|
172
|
+
addError(errors, 'summary.retained_turn_ids', 'must match sorted state.retained_turns keys');
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
if (artifact.summary.run_id !== (artifact.state.run_id || null)) {
|
|
176
|
+
addError(errors, 'summary.run_id', 'must match state.run_id');
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
if (artifact.summary.status !== (artifact.state.status || null)) {
|
|
180
|
+
addError(errors, 'summary.status', 'must match state.status');
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
if (artifact.summary.phase !== (artifact.state.phase || null)) {
|
|
184
|
+
addError(errors, 'summary.phase', 'must match state.phase');
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
const expectedHistoryEntries = countJsonl(artifact.files, '.agentxchain/history.jsonl');
|
|
188
|
+
const expectedDecisionEntries = countJsonl(artifact.files, '.agentxchain/decision-ledger.jsonl');
|
|
189
|
+
const expectedHookAuditEntries = countJsonl(artifact.files, '.agentxchain/hook-audit.jsonl');
|
|
190
|
+
const expectedDispatchFiles = countDirectoryFiles(artifact.files, '.agentxchain/dispatch');
|
|
191
|
+
const expectedStagingFiles = countDirectoryFiles(artifact.files, '.agentxchain/staging');
|
|
192
|
+
const expectedIntakePresent = Object.keys(artifact.files).some((path) => path.startsWith('.agentxchain/intake/'));
|
|
193
|
+
const expectedCoordinatorPresent = Object.keys(artifact.files).some((path) => path.startsWith('.agentxchain/multirepo/'));
|
|
194
|
+
|
|
195
|
+
if (artifact.summary.history_entries !== expectedHistoryEntries) {
|
|
196
|
+
addError(errors, 'summary.history_entries', 'must match .agentxchain/history.jsonl entry count');
|
|
197
|
+
}
|
|
198
|
+
if (artifact.summary.decision_entries !== expectedDecisionEntries) {
|
|
199
|
+
addError(errors, 'summary.decision_entries', 'must match .agentxchain/decision-ledger.jsonl entry count');
|
|
200
|
+
}
|
|
201
|
+
if (artifact.summary.hook_audit_entries !== expectedHookAuditEntries) {
|
|
202
|
+
addError(errors, 'summary.hook_audit_entries', 'must match .agentxchain/hook-audit.jsonl entry count');
|
|
203
|
+
}
|
|
204
|
+
if (artifact.summary.dispatch_artifact_files !== expectedDispatchFiles) {
|
|
205
|
+
addError(errors, 'summary.dispatch_artifact_files', 'must match .agentxchain/dispatch file count');
|
|
206
|
+
}
|
|
207
|
+
if (artifact.summary.staging_artifact_files !== expectedStagingFiles) {
|
|
208
|
+
addError(errors, 'summary.staging_artifact_files', 'must match .agentxchain/staging file count');
|
|
209
|
+
}
|
|
210
|
+
if (artifact.summary.intake_present !== expectedIntakePresent) {
|
|
211
|
+
addError(errors, 'summary.intake_present', 'must match intake file presence');
|
|
212
|
+
}
|
|
213
|
+
if (artifact.summary.coordinator_present !== expectedCoordinatorPresent) {
|
|
214
|
+
addError(errors, 'summary.coordinator_present', 'must match multirepo file presence');
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
function verifyCoordinatorExport(artifact, errors) {
|
|
219
|
+
if (typeof artifact.workspace_root !== 'string' || artifact.workspace_root.length === 0) {
|
|
220
|
+
addError(errors, 'workspace_root', 'must be a non-empty string');
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
if (!artifact.coordinator || typeof artifact.coordinator !== 'object' || Array.isArray(artifact.coordinator)) {
|
|
224
|
+
addError(errors, 'coordinator', 'must be an object');
|
|
225
|
+
} else {
|
|
226
|
+
if (artifact.coordinator.project_id !== (artifact.config?.project?.id || null)) {
|
|
227
|
+
addError(errors, 'coordinator.project_id', 'must match config.project.id');
|
|
228
|
+
}
|
|
229
|
+
if (artifact.coordinator.project_name !== (artifact.config?.project?.name || null)) {
|
|
230
|
+
addError(errors, 'coordinator.project_name', 'must match config.project.name');
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
const expectedRepoCount = Object.keys(artifact.config?.repos || {}).length;
|
|
234
|
+
const expectedWorkstreamCount = Object.keys(artifact.config?.workstreams || {}).length;
|
|
235
|
+
if (artifact.coordinator.repo_count !== expectedRepoCount) {
|
|
236
|
+
addError(errors, 'coordinator.repo_count', 'must match config.repos size');
|
|
237
|
+
}
|
|
238
|
+
if (artifact.coordinator.workstream_count !== expectedWorkstreamCount) {
|
|
239
|
+
addError(errors, 'coordinator.workstream_count', 'must match config.workstreams size');
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
if (!artifact.summary || typeof artifact.summary !== 'object' || Array.isArray(artifact.summary)) {
|
|
244
|
+
addError(errors, 'summary', 'must be an object');
|
|
245
|
+
} else {
|
|
246
|
+
const coordinatorState = artifact.files?.['.agentxchain/multirepo/state.json']?.data || null;
|
|
247
|
+
const expectedStatuses = {};
|
|
248
|
+
if (coordinatorState?.repo_runs && typeof coordinatorState.repo_runs === 'object') {
|
|
249
|
+
for (const [repoId, repoRun] of Object.entries(coordinatorState.repo_runs)) {
|
|
250
|
+
expectedStatuses[repoId] = repoRun.status || 'unknown';
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
const barriers = artifact.files?.['.agentxchain/multirepo/barriers.json']?.data;
|
|
255
|
+
const expectedBarrierCount = barriers && typeof barriers === 'object' && !Array.isArray(barriers)
|
|
256
|
+
? Object.keys(barriers).length
|
|
257
|
+
: 0;
|
|
258
|
+
|
|
259
|
+
if (artifact.summary.super_run_id !== (coordinatorState?.super_run_id || null)) {
|
|
260
|
+
addError(errors, 'summary.super_run_id', 'must match coordinator state super_run_id');
|
|
261
|
+
}
|
|
262
|
+
if (artifact.summary.status !== (coordinatorState?.status || null)) {
|
|
263
|
+
addError(errors, 'summary.status', 'must match coordinator state status');
|
|
264
|
+
}
|
|
265
|
+
if (artifact.summary.phase !== (coordinatorState?.phase || null)) {
|
|
266
|
+
addError(errors, 'summary.phase', 'must match coordinator state phase');
|
|
267
|
+
}
|
|
268
|
+
if (!isDeepStrictEqual(artifact.summary.repo_run_statuses, expectedStatuses)) {
|
|
269
|
+
addError(errors, 'summary.repo_run_statuses', 'must match coordinator state repo run statuses');
|
|
270
|
+
}
|
|
271
|
+
if (artifact.summary.barrier_count !== expectedBarrierCount) {
|
|
272
|
+
addError(errors, 'summary.barrier_count', 'must match barriers.json object size');
|
|
273
|
+
}
|
|
274
|
+
if (artifact.summary.history_entries !== countJsonl(artifact.files, '.agentxchain/multirepo/history.jsonl')) {
|
|
275
|
+
addError(errors, 'summary.history_entries', 'must match multirepo history entry count');
|
|
276
|
+
}
|
|
277
|
+
if (artifact.summary.decision_entries !== countJsonl(artifact.files, '.agentxchain/multirepo/decision-ledger.jsonl')) {
|
|
278
|
+
addError(errors, 'summary.decision_entries', 'must match multirepo decision entry count');
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
if (!isDeepStrictEqual(artifact.config, artifact.files?.['agentxchain-multi.json']?.data)) {
|
|
283
|
+
addError(errors, 'config', 'must match files.agentxchain-multi.json.data');
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
if (!artifact.repos || typeof artifact.repos !== 'object' || Array.isArray(artifact.repos)) {
|
|
287
|
+
addError(errors, 'repos', 'must be an object');
|
|
288
|
+
return;
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
for (const [repoId, repoEntry] of Object.entries(artifact.repos)) {
|
|
292
|
+
const repoPath = `repos.${repoId}`;
|
|
293
|
+
if (!repoEntry || typeof repoEntry !== 'object' || Array.isArray(repoEntry)) {
|
|
294
|
+
addError(errors, repoPath, 'must be an object');
|
|
295
|
+
continue;
|
|
296
|
+
}
|
|
297
|
+
if (typeof repoEntry.path !== 'string' || repoEntry.path.length === 0) {
|
|
298
|
+
addError(errors, `${repoPath}.path`, 'must be a non-empty string');
|
|
299
|
+
}
|
|
300
|
+
if (typeof repoEntry.ok !== 'boolean') {
|
|
301
|
+
addError(errors, `${repoPath}.ok`, 'must be a boolean');
|
|
302
|
+
continue;
|
|
303
|
+
}
|
|
304
|
+
if (!repoEntry.ok) {
|
|
305
|
+
if (typeof repoEntry.error !== 'string' || repoEntry.error.length === 0) {
|
|
306
|
+
addError(errors, `${repoPath}.error`, 'must be a non-empty string when ok is false');
|
|
307
|
+
}
|
|
308
|
+
continue;
|
|
309
|
+
}
|
|
310
|
+
if (!repoEntry.export || typeof repoEntry.export !== 'object' || Array.isArray(repoEntry.export)) {
|
|
311
|
+
addError(errors, `${repoPath}.export`, 'must be an object when ok is true');
|
|
312
|
+
continue;
|
|
313
|
+
}
|
|
314
|
+
const nested = verifyExportArtifact(repoEntry.export);
|
|
315
|
+
for (const nestedError of nested.errors) {
|
|
316
|
+
addError(errors, `${repoPath}.export`, nestedError);
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
export function verifyExportArtifact(artifact) {
|
|
322
|
+
const errors = [];
|
|
323
|
+
|
|
324
|
+
if (!artifact || typeof artifact !== 'object' || Array.isArray(artifact)) {
|
|
325
|
+
addError(errors, 'artifact', 'must be a JSON object');
|
|
326
|
+
return {
|
|
327
|
+
ok: false,
|
|
328
|
+
errors,
|
|
329
|
+
report: {
|
|
330
|
+
overall: 'fail',
|
|
331
|
+
schema_version: null,
|
|
332
|
+
export_kind: null,
|
|
333
|
+
file_count: 0,
|
|
334
|
+
errors,
|
|
335
|
+
},
|
|
336
|
+
};
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
if (artifact.schema_version !== SUPPORTED_EXPORT_SCHEMA_VERSION) {
|
|
340
|
+
addError(errors, 'schema_version', `must be "${SUPPORTED_EXPORT_SCHEMA_VERSION}"`);
|
|
341
|
+
}
|
|
342
|
+
|
|
343
|
+
if (typeof artifact.export_kind !== 'string') {
|
|
344
|
+
addError(errors, 'export_kind', 'must be a string');
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
if (typeof artifact.exported_at !== 'string' || Number.isNaN(Date.parse(artifact.exported_at))) {
|
|
348
|
+
addError(errors, 'exported_at', 'must be a valid ISO timestamp');
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
if (!artifact.config || typeof artifact.config !== 'object' || Array.isArray(artifact.config)) {
|
|
352
|
+
addError(errors, 'config', 'must be an object');
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
verifyFilesMap(artifact.files, errors);
|
|
356
|
+
|
|
357
|
+
if (artifact.export_kind === 'agentxchain_run_export') {
|
|
358
|
+
verifyRunExport(artifact, errors);
|
|
359
|
+
} else if (artifact.export_kind === 'agentxchain_coordinator_export') {
|
|
360
|
+
verifyCoordinatorExport(artifact, errors);
|
|
361
|
+
} else {
|
|
362
|
+
addError(errors, 'export_kind', `unsupported export kind "${artifact.export_kind}"`);
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
return {
|
|
366
|
+
ok: errors.length === 0,
|
|
367
|
+
errors,
|
|
368
|
+
report: {
|
|
369
|
+
overall: errors.length === 0 ? 'pass' : 'fail',
|
|
370
|
+
schema_version: artifact.schema_version || null,
|
|
371
|
+
export_kind: artifact.export_kind || null,
|
|
372
|
+
file_count: artifact.files && typeof artifact.files === 'object' && !Array.isArray(artifact.files)
|
|
373
|
+
? Object.keys(artifact.files).length
|
|
374
|
+
: 0,
|
|
375
|
+
repo_count: artifact.repos && typeof artifact.repos === 'object' && !Array.isArray(artifact.repos)
|
|
376
|
+
? Object.keys(artifact.repos).length
|
|
377
|
+
: 0,
|
|
378
|
+
errors,
|
|
379
|
+
},
|
|
380
|
+
};
|
|
381
|
+
}
|
|
382
|
+
|
|
383
|
+
export function loadExportArtifact(input, cwd = process.cwd()) {
|
|
384
|
+
const source = input || '-';
|
|
385
|
+
let raw;
|
|
386
|
+
|
|
387
|
+
try {
|
|
388
|
+
if (source === '-') {
|
|
389
|
+
if (process.stdin.isTTY) {
|
|
390
|
+
return {
|
|
391
|
+
ok: false,
|
|
392
|
+
input: 'stdin',
|
|
393
|
+
error: 'No export input provided. Pass --input <path> or pipe JSON on stdin.',
|
|
394
|
+
};
|
|
395
|
+
}
|
|
396
|
+
raw = readFileSync(0, 'utf8');
|
|
397
|
+
} else {
|
|
398
|
+
const resolved = resolve(cwd, source);
|
|
399
|
+
raw = readFileSync(resolved, 'utf8');
|
|
400
|
+
}
|
|
401
|
+
} catch (error) {
|
|
402
|
+
return {
|
|
403
|
+
ok: false,
|
|
404
|
+
input: source === '-' ? 'stdin' : resolve(cwd, source),
|
|
405
|
+
error: error.message,
|
|
406
|
+
};
|
|
407
|
+
}
|
|
408
|
+
|
|
409
|
+
try {
|
|
410
|
+
return {
|
|
411
|
+
ok: true,
|
|
412
|
+
input: source === '-' ? 'stdin' : resolve(cwd, source),
|
|
413
|
+
artifact: JSON.parse(raw),
|
|
414
|
+
};
|
|
415
|
+
} catch (error) {
|
|
416
|
+
return {
|
|
417
|
+
ok: false,
|
|
418
|
+
input: source === '-' ? 'stdin' : resolve(cwd, source),
|
|
419
|
+
error: `Invalid JSON export artifact: ${error.message}`,
|
|
420
|
+
};
|
|
421
|
+
}
|
|
422
|
+
}
|
|
@@ -0,0 +1,303 @@
|
|
|
1
|
+
import { createHash } from 'node:crypto';
|
|
2
|
+
import { existsSync, readFileSync, readdirSync, statSync } from 'node:fs';
|
|
3
|
+
import { join, relative, resolve } from 'node:path';
|
|
4
|
+
|
|
5
|
+
import { loadProjectContext, loadProjectState } from './config.js';
|
|
6
|
+
import { loadCoordinatorConfig, COORDINATOR_CONFIG_FILE } from './coordinator-config.js';
|
|
7
|
+
import { loadCoordinatorState } from './coordinator-state.js';
|
|
8
|
+
|
|
9
|
+
const EXPORT_SCHEMA_VERSION = '0.2';
|
|
10
|
+
|
|
11
|
+
const COORDINATOR_INCLUDED_ROOTS = [
|
|
12
|
+
'agentxchain-multi.json',
|
|
13
|
+
'.agentxchain/multirepo/state.json',
|
|
14
|
+
'.agentxchain/multirepo/history.jsonl',
|
|
15
|
+
'.agentxchain/multirepo/barriers.json',
|
|
16
|
+
'.agentxchain/multirepo/decision-ledger.jsonl',
|
|
17
|
+
'.agentxchain/multirepo/barrier-ledger.jsonl',
|
|
18
|
+
];
|
|
19
|
+
|
|
20
|
+
const INCLUDED_ROOTS = [
|
|
21
|
+
'agentxchain.json',
|
|
22
|
+
'.agentxchain/state.json',
|
|
23
|
+
'.agentxchain/history.jsonl',
|
|
24
|
+
'.agentxchain/decision-ledger.jsonl',
|
|
25
|
+
'.agentxchain/hook-audit.jsonl',
|
|
26
|
+
'.agentxchain/hook-annotations.jsonl',
|
|
27
|
+
'.agentxchain/dispatch',
|
|
28
|
+
'.agentxchain/staging',
|
|
29
|
+
'.agentxchain/transactions/accept',
|
|
30
|
+
'.agentxchain/intake',
|
|
31
|
+
'.agentxchain/multirepo',
|
|
32
|
+
];
|
|
33
|
+
|
|
34
|
+
function sha256(buffer) {
|
|
35
|
+
return createHash('sha256').update(buffer).digest('hex');
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
function collectPaths(root, relPath) {
|
|
39
|
+
const absPath = join(root, relPath);
|
|
40
|
+
if (!existsSync(absPath)) {
|
|
41
|
+
return [];
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
const stats = statSync(absPath);
|
|
45
|
+
if (stats.isFile()) {
|
|
46
|
+
return [relPath];
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
if (!stats.isDirectory()) {
|
|
50
|
+
return [];
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
const entries = readdirSync(absPath, { withFileTypes: true })
|
|
54
|
+
.sort((a, b) => a.name.localeCompare(b.name, 'en'));
|
|
55
|
+
|
|
56
|
+
const files = [];
|
|
57
|
+
for (const entry of entries) {
|
|
58
|
+
const childRelPath = `${relPath}/${entry.name}`;
|
|
59
|
+
if (entry.isDirectory()) {
|
|
60
|
+
files.push(...collectPaths(root, childRelPath));
|
|
61
|
+
} else if (entry.isFile()) {
|
|
62
|
+
files.push(childRelPath);
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
return files;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
function parseJsonl(relPath, raw) {
|
|
69
|
+
if (!raw.trim()) {
|
|
70
|
+
return [];
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
return raw
|
|
74
|
+
.split('\n')
|
|
75
|
+
.filter((line) => line.trim())
|
|
76
|
+
.map((line, index) => {
|
|
77
|
+
try {
|
|
78
|
+
return JSON.parse(line);
|
|
79
|
+
} catch (error) {
|
|
80
|
+
throw new Error(`${relPath}: invalid JSONL at line ${index + 1}: ${error.message}`);
|
|
81
|
+
}
|
|
82
|
+
});
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
function parseFile(root, relPath) {
|
|
86
|
+
const absPath = join(root, relPath);
|
|
87
|
+
const buffer = readFileSync(absPath);
|
|
88
|
+
const raw = buffer.toString('utf8');
|
|
89
|
+
|
|
90
|
+
let format = 'text';
|
|
91
|
+
let data = raw;
|
|
92
|
+
|
|
93
|
+
if (relPath.endsWith('.json')) {
|
|
94
|
+
try {
|
|
95
|
+
data = JSON.parse(raw);
|
|
96
|
+
format = 'json';
|
|
97
|
+
} catch (error) {
|
|
98
|
+
throw new Error(`${relPath}: invalid JSON: ${error.message}`);
|
|
99
|
+
}
|
|
100
|
+
} else if (relPath.endsWith('.jsonl')) {
|
|
101
|
+
data = parseJsonl(relPath, raw);
|
|
102
|
+
format = 'jsonl';
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
return {
|
|
106
|
+
format,
|
|
107
|
+
bytes: buffer.byteLength,
|
|
108
|
+
sha256: sha256(buffer),
|
|
109
|
+
content_base64: buffer.toString('base64'),
|
|
110
|
+
data,
|
|
111
|
+
};
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
function countJsonl(files, relPath) {
|
|
115
|
+
return Array.isArray(files[relPath]?.data) ? files[relPath].data.length : 0;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
function countDirectoryFiles(files, prefix) {
|
|
119
|
+
return Object.keys(files).filter((path) => path.startsWith(`${prefix}/`)).length;
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
export function buildRunExport(startDir = process.cwd()) {
|
|
123
|
+
const context = loadProjectContext(startDir);
|
|
124
|
+
if (!context) {
|
|
125
|
+
return {
|
|
126
|
+
ok: false,
|
|
127
|
+
error: 'No governed project found. Run this inside an AgentXchain governed project.',
|
|
128
|
+
};
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
if (context.config.protocol_mode !== 'governed') {
|
|
132
|
+
return {
|
|
133
|
+
ok: false,
|
|
134
|
+
error: 'Run export only supports governed projects in this slice.',
|
|
135
|
+
};
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
const { root, rawConfig, config, version } = context;
|
|
139
|
+
const state = loadProjectState(root, config);
|
|
140
|
+
|
|
141
|
+
const collectedPaths = [...new Set(INCLUDED_ROOTS.flatMap((relPath) => collectPaths(root, relPath)))]
|
|
142
|
+
.sort((a, b) => a.localeCompare(b, 'en'));
|
|
143
|
+
|
|
144
|
+
const files = {};
|
|
145
|
+
for (const relPath of collectedPaths) {
|
|
146
|
+
files[relPath] = parseFile(root, relPath);
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
const activeTurns = Object.keys(state?.active_turns || {}).sort((a, b) => a.localeCompare(b, 'en'));
|
|
150
|
+
const retainedTurns = Object.keys(state?.retained_turns || {}).sort((a, b) => a.localeCompare(b, 'en'));
|
|
151
|
+
|
|
152
|
+
return {
|
|
153
|
+
ok: true,
|
|
154
|
+
export: {
|
|
155
|
+
schema_version: EXPORT_SCHEMA_VERSION,
|
|
156
|
+
export_kind: 'agentxchain_run_export',
|
|
157
|
+
exported_at: new Date().toISOString(),
|
|
158
|
+
project_root: relative(process.cwd(), root) || '.',
|
|
159
|
+
project: {
|
|
160
|
+
id: config.project.id,
|
|
161
|
+
name: config.project.name,
|
|
162
|
+
template: config.template || 'generic',
|
|
163
|
+
protocol_mode: config.protocol_mode,
|
|
164
|
+
schema_version: version,
|
|
165
|
+
},
|
|
166
|
+
summary: {
|
|
167
|
+
run_id: state?.run_id || null,
|
|
168
|
+
status: state?.status || null,
|
|
169
|
+
phase: state?.phase || null,
|
|
170
|
+
active_turn_ids: activeTurns,
|
|
171
|
+
retained_turn_ids: retainedTurns,
|
|
172
|
+
history_entries: countJsonl(files, '.agentxchain/history.jsonl'),
|
|
173
|
+
decision_entries: countJsonl(files, '.agentxchain/decision-ledger.jsonl'),
|
|
174
|
+
hook_audit_entries: countJsonl(files, '.agentxchain/hook-audit.jsonl'),
|
|
175
|
+
dispatch_artifact_files: countDirectoryFiles(files, '.agentxchain/dispatch'),
|
|
176
|
+
staging_artifact_files: countDirectoryFiles(files, '.agentxchain/staging'),
|
|
177
|
+
intake_present: Object.keys(files).some((path) => path.startsWith('.agentxchain/intake/')),
|
|
178
|
+
coordinator_present: Object.keys(files).some((path) => path.startsWith('.agentxchain/multirepo/')),
|
|
179
|
+
},
|
|
180
|
+
files,
|
|
181
|
+
config: rawConfig,
|
|
182
|
+
state,
|
|
183
|
+
},
|
|
184
|
+
};
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
export function buildCoordinatorExport(startDir = process.cwd()) {
|
|
188
|
+
const workspaceRoot = resolve(startDir);
|
|
189
|
+
const configPath = join(workspaceRoot, COORDINATOR_CONFIG_FILE);
|
|
190
|
+
|
|
191
|
+
if (!existsSync(configPath)) {
|
|
192
|
+
return {
|
|
193
|
+
ok: false,
|
|
194
|
+
error: `No ${COORDINATOR_CONFIG_FILE} found at ${workspaceRoot}.`,
|
|
195
|
+
};
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
let rawConfig;
|
|
199
|
+
try {
|
|
200
|
+
rawConfig = JSON.parse(readFileSync(configPath, 'utf8'));
|
|
201
|
+
} catch (err) {
|
|
202
|
+
return {
|
|
203
|
+
ok: false,
|
|
204
|
+
error: `Invalid JSON in ${COORDINATOR_CONFIG_FILE}: ${err.message}`,
|
|
205
|
+
};
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
const configResult = loadCoordinatorConfig(workspaceRoot);
|
|
209
|
+
const normalizedConfig = configResult.ok ? configResult.config : null;
|
|
210
|
+
|
|
211
|
+
// Collect coordinator-level files
|
|
212
|
+
const collectedPaths = [...new Set(
|
|
213
|
+
COORDINATOR_INCLUDED_ROOTS.flatMap((relPath) => collectPaths(workspaceRoot, relPath)),
|
|
214
|
+
)].sort((a, b) => a.localeCompare(b, 'en'));
|
|
215
|
+
|
|
216
|
+
const files = {};
|
|
217
|
+
for (const relPath of collectedPaths) {
|
|
218
|
+
files[relPath] = parseFile(workspaceRoot, relPath);
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
// Load coordinator state for summary
|
|
222
|
+
const coordState = loadCoordinatorState(workspaceRoot);
|
|
223
|
+
|
|
224
|
+
// Build repo run statuses from coordinator state
|
|
225
|
+
const repoRunStatuses = {};
|
|
226
|
+
if (coordState?.repo_runs) {
|
|
227
|
+
for (const [repoId, repoRun] of Object.entries(coordState.repo_runs)) {
|
|
228
|
+
repoRunStatuses[repoId] = repoRun.status || 'unknown';
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
// Count barriers from barriers.json
|
|
233
|
+
let barrierCount = 0;
|
|
234
|
+
const barriersKey = '.agentxchain/multirepo/barriers.json';
|
|
235
|
+
if (files[barriersKey]?.format === 'json' && files[barriersKey]?.data) {
|
|
236
|
+
barrierCount = Object.keys(files[barriersKey].data).length;
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
// Determine repos from config
|
|
240
|
+
const repos = {};
|
|
241
|
+
const repoEntries = rawConfig.repos && typeof rawConfig.repos === 'object'
|
|
242
|
+
? Object.entries(rawConfig.repos)
|
|
243
|
+
: [];
|
|
244
|
+
|
|
245
|
+
for (const [repoId, repoDef] of repoEntries) {
|
|
246
|
+
const repoPath = repoDef?.path || '';
|
|
247
|
+
const resolvedPath = resolve(workspaceRoot, repoPath);
|
|
248
|
+
|
|
249
|
+
try {
|
|
250
|
+
const childExport = buildRunExport(resolvedPath);
|
|
251
|
+
if (childExport.ok) {
|
|
252
|
+
repos[repoId] = {
|
|
253
|
+
ok: true,
|
|
254
|
+
path: repoPath,
|
|
255
|
+
export: childExport.export,
|
|
256
|
+
};
|
|
257
|
+
} else {
|
|
258
|
+
repos[repoId] = {
|
|
259
|
+
ok: false,
|
|
260
|
+
path: repoPath,
|
|
261
|
+
error: childExport.error,
|
|
262
|
+
};
|
|
263
|
+
}
|
|
264
|
+
} catch (err) {
|
|
265
|
+
repos[repoId] = {
|
|
266
|
+
ok: false,
|
|
267
|
+
path: repoPath,
|
|
268
|
+
error: err.message || String(err),
|
|
269
|
+
};
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
return {
|
|
274
|
+
ok: true,
|
|
275
|
+
export: {
|
|
276
|
+
schema_version: EXPORT_SCHEMA_VERSION,
|
|
277
|
+
export_kind: 'agentxchain_coordinator_export',
|
|
278
|
+
exported_at: new Date().toISOString(),
|
|
279
|
+
workspace_root: relative(process.cwd(), workspaceRoot) || '.',
|
|
280
|
+
coordinator: {
|
|
281
|
+
project_id: rawConfig.project?.id || null,
|
|
282
|
+
project_name: rawConfig.project?.name || null,
|
|
283
|
+
schema_version: rawConfig.schema_version || null,
|
|
284
|
+
repo_count: repoEntries.length,
|
|
285
|
+
workstream_count: rawConfig.workstreams
|
|
286
|
+
? Object.keys(rawConfig.workstreams).length
|
|
287
|
+
: 0,
|
|
288
|
+
},
|
|
289
|
+
summary: {
|
|
290
|
+
super_run_id: coordState?.super_run_id || null,
|
|
291
|
+
status: coordState?.status || null,
|
|
292
|
+
phase: coordState?.phase || null,
|
|
293
|
+
repo_run_statuses: repoRunStatuses,
|
|
294
|
+
barrier_count: barrierCount,
|
|
295
|
+
history_entries: countJsonl(files, '.agentxchain/multirepo/history.jsonl'),
|
|
296
|
+
decision_entries: countJsonl(files, '.agentxchain/multirepo/decision-ledger.jsonl'),
|
|
297
|
+
},
|
|
298
|
+
files,
|
|
299
|
+
config: rawConfig,
|
|
300
|
+
repos,
|
|
301
|
+
},
|
|
302
|
+
};
|
|
303
|
+
}
|