agentxchain 2.47.0 → 2.49.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/agentxchain.js +42 -0
- package/package.json +1 -1
- package/src/commands/events.js +150 -0
- package/src/commands/run.js +97 -69
- package/src/commands/schedule.js +265 -0
- package/src/lib/export.js +4 -0
- package/src/lib/governed-state.js +107 -0
- package/src/lib/normalized-config.js +80 -0
- package/src/lib/repo-observer.js +3 -0
- package/src/lib/run-events.js +117 -0
- package/src/lib/run-schedule.js +160 -0
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* run-events.js — Repo-local structured event log for governed runs.
|
|
3
|
+
*
|
|
4
|
+
* Appends lifecycle events to `.agentxchain/events.jsonl` so operators
|
|
5
|
+
* can observe run progress without webhooks or dashboard.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { appendFileSync, readFileSync, existsSync, mkdirSync } from 'node:fs';
|
|
9
|
+
import { join, dirname } from 'node:path';
|
|
10
|
+
import { randomBytes } from 'node:crypto';
|
|
11
|
+
|
|
12
|
+
export const RUN_EVENTS_PATH = '.agentxchain/events.jsonl';
|
|
13
|
+
|
|
14
|
+
export const VALID_RUN_EVENTS = [
|
|
15
|
+
'run_started',
|
|
16
|
+
'phase_entered',
|
|
17
|
+
'turn_dispatched',
|
|
18
|
+
'turn_accepted',
|
|
19
|
+
'turn_rejected',
|
|
20
|
+
'run_blocked',
|
|
21
|
+
'run_completed',
|
|
22
|
+
'escalation_raised',
|
|
23
|
+
'escalation_resolved',
|
|
24
|
+
'gate_pending',
|
|
25
|
+
'gate_approved',
|
|
26
|
+
];
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Emit a structured lifecycle event to the local event log.
|
|
30
|
+
*
|
|
31
|
+
* @param {string} root - Project root directory
|
|
32
|
+
* @param {string} eventType - One of VALID_RUN_EVENTS
|
|
33
|
+
* @param {object} details - Event details
|
|
34
|
+
* @param {string} [details.run_id] - Current run ID
|
|
35
|
+
* @param {string} [details.phase] - Current phase
|
|
36
|
+
* @param {string} [details.status] - Current run status
|
|
37
|
+
* @param {object} [details.turn] - Turn context (turn_id, role_id, etc.)
|
|
38
|
+
* @param {object} [details.payload] - Additional event-specific data
|
|
39
|
+
* @returns {{ ok: boolean, event_id: string }}
|
|
40
|
+
*/
|
|
41
|
+
export function emitRunEvent(root, eventType, details = {}) {
|
|
42
|
+
const event_id = `evt_${randomBytes(8).toString('hex')}`;
|
|
43
|
+
const entry = {
|
|
44
|
+
event_id,
|
|
45
|
+
event_type: eventType,
|
|
46
|
+
timestamp: new Date().toISOString(),
|
|
47
|
+
run_id: details.run_id || null,
|
|
48
|
+
phase: details.phase || null,
|
|
49
|
+
status: details.status || null,
|
|
50
|
+
turn: details.turn || null,
|
|
51
|
+
payload: details.payload || {},
|
|
52
|
+
};
|
|
53
|
+
|
|
54
|
+
try {
|
|
55
|
+
const filePath = join(root, RUN_EVENTS_PATH);
|
|
56
|
+
const dir = dirname(filePath);
|
|
57
|
+
if (!existsSync(dir)) {
|
|
58
|
+
mkdirSync(dir, { recursive: true });
|
|
59
|
+
}
|
|
60
|
+
appendFileSync(filePath, `${JSON.stringify(entry)}\n`);
|
|
61
|
+
return { ok: true, event_id };
|
|
62
|
+
} catch (err) {
|
|
63
|
+
// Best-effort — never interrupt governed operations for event logging.
|
|
64
|
+
if (process.env.AGENTXCHAIN_DEBUG) {
|
|
65
|
+
process.stderr.write(`[run-events] write failed: ${err.message}\n`);
|
|
66
|
+
}
|
|
67
|
+
return { ok: false, event_id };
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
/**
|
|
72
|
+
* Read events from the local event log.
|
|
73
|
+
*
|
|
74
|
+
* @param {string} root - Project root directory
|
|
75
|
+
* @param {object} [opts] - Filter options
|
|
76
|
+
* @param {string} [opts.type] - Comma-separated event types to include
|
|
77
|
+
* @param {string} [opts.since] - ISO-8601 timestamp; only events after this
|
|
78
|
+
* @param {number} [opts.limit] - Max events to return (from end of file)
|
|
79
|
+
* @returns {object[]}
|
|
80
|
+
*/
|
|
81
|
+
export function readRunEvents(root, opts = {}) {
|
|
82
|
+
const filePath = join(root, RUN_EVENTS_PATH);
|
|
83
|
+
if (!existsSync(filePath)) return [];
|
|
84
|
+
|
|
85
|
+
const raw = readFileSync(filePath, 'utf8');
|
|
86
|
+
const lines = raw.split('\n').filter(Boolean);
|
|
87
|
+
|
|
88
|
+
let events = [];
|
|
89
|
+
for (const line of lines) {
|
|
90
|
+
try {
|
|
91
|
+
events.push(JSON.parse(line));
|
|
92
|
+
} catch {
|
|
93
|
+
// Skip malformed lines.
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
// Apply type filter.
|
|
98
|
+
if (opts.type) {
|
|
99
|
+
const types = new Set(opts.type.split(',').map(t => t.trim()));
|
|
100
|
+
events = events.filter(e => types.has(e.event_type));
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
// Apply since filter.
|
|
104
|
+
if (opts.since) {
|
|
105
|
+
const sinceMs = new Date(opts.since).getTime();
|
|
106
|
+
if (!Number.isNaN(sinceMs)) {
|
|
107
|
+
events = events.filter(e => new Date(e.timestamp).getTime() > sinceMs);
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
// Apply limit (from end).
|
|
112
|
+
if (opts.limit && opts.limit > 0 && events.length > opts.limit) {
|
|
113
|
+
events = events.slice(-opts.limit);
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
return events;
|
|
117
|
+
}
|
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
import { existsSync, mkdirSync, readFileSync } from 'fs';
|
|
2
|
+
import { join, dirname } from 'path';
|
|
3
|
+
import { safeWriteJson } from './safe-write.js';
|
|
4
|
+
import { loadProjectState } from './config.js';
|
|
5
|
+
|
|
6
|
+
export const SCHEDULE_STATE_PATH = '.agentxchain/schedule-state.json';
|
|
7
|
+
const SCHEDULE_STATE_SCHEMA_VERSION = '0.1';
|
|
8
|
+
|
|
9
|
+
function parseIsoTime(value) {
|
|
10
|
+
if (typeof value !== 'string' || !value.trim()) return null;
|
|
11
|
+
const ts = Date.parse(value);
|
|
12
|
+
return Number.isFinite(ts) ? ts : null;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
function toIso(value) {
|
|
16
|
+
return new Date(value).toISOString();
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
function normalizeScheduleStateRecord(value) {
|
|
20
|
+
if (!value || typeof value !== 'object' || Array.isArray(value)) {
|
|
21
|
+
return {
|
|
22
|
+
last_started_at: null,
|
|
23
|
+
last_finished_at: null,
|
|
24
|
+
last_run_id: null,
|
|
25
|
+
last_status: null,
|
|
26
|
+
last_skip_at: null,
|
|
27
|
+
last_skip_reason: null,
|
|
28
|
+
};
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
return {
|
|
32
|
+
last_started_at: typeof value.last_started_at === 'string' ? value.last_started_at : null,
|
|
33
|
+
last_finished_at: typeof value.last_finished_at === 'string' ? value.last_finished_at : null,
|
|
34
|
+
last_run_id: typeof value.last_run_id === 'string' ? value.last_run_id : null,
|
|
35
|
+
last_status: typeof value.last_status === 'string' ? value.last_status : null,
|
|
36
|
+
last_skip_at: typeof value.last_skip_at === 'string' ? value.last_skip_at : null,
|
|
37
|
+
last_skip_reason: typeof value.last_skip_reason === 'string' ? value.last_skip_reason : null,
|
|
38
|
+
};
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
function normalizeScheduleState(value, config) {
|
|
42
|
+
const schedules = {};
|
|
43
|
+
const configuredIds = Object.keys(config?.schedules || {});
|
|
44
|
+
for (const scheduleId of configuredIds) {
|
|
45
|
+
schedules[scheduleId] = normalizeScheduleStateRecord(value?.schedules?.[scheduleId]);
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
return {
|
|
49
|
+
schema_version: SCHEDULE_STATE_SCHEMA_VERSION,
|
|
50
|
+
schedules,
|
|
51
|
+
};
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
export function readScheduleState(root, config) {
|
|
55
|
+
const absPath = join(root, SCHEDULE_STATE_PATH);
|
|
56
|
+
if (!existsSync(absPath)) {
|
|
57
|
+
return normalizeScheduleState(null, config);
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
try {
|
|
61
|
+
const parsed = JSON.parse(readFileSync(absPath, 'utf8'));
|
|
62
|
+
return normalizeScheduleState(parsed, config);
|
|
63
|
+
} catch {
|
|
64
|
+
return normalizeScheduleState(null, config);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
export function writeScheduleState(root, state) {
|
|
69
|
+
const absPath = join(root, SCHEDULE_STATE_PATH);
|
|
70
|
+
mkdirSync(dirname(absPath), { recursive: true });
|
|
71
|
+
safeWriteJson(absPath, state);
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
export function updateScheduleState(root, config, scheduleId, updater) {
|
|
75
|
+
const state = readScheduleState(root, config);
|
|
76
|
+
const current = normalizeScheduleStateRecord(state.schedules[scheduleId]);
|
|
77
|
+
state.schedules[scheduleId] = normalizeScheduleStateRecord(updater(current));
|
|
78
|
+
writeScheduleState(root, state);
|
|
79
|
+
return state.schedules[scheduleId];
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
export function computeScheduleStatus(schedule, record, now = Date.now()) {
|
|
83
|
+
if (schedule.enabled === false) {
|
|
84
|
+
return {
|
|
85
|
+
due: false,
|
|
86
|
+
next_due_at: null,
|
|
87
|
+
due_reason: 'disabled',
|
|
88
|
+
};
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
const lastStartedAt = parseIsoTime(record.last_started_at);
|
|
92
|
+
if (lastStartedAt === null) {
|
|
93
|
+
return {
|
|
94
|
+
due: true,
|
|
95
|
+
next_due_at: toIso(now),
|
|
96
|
+
due_reason: 'never_started',
|
|
97
|
+
};
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
const nextDueTs = lastStartedAt + (schedule.every_minutes * 60 * 1000);
|
|
101
|
+
return {
|
|
102
|
+
due: now >= nextDueTs,
|
|
103
|
+
next_due_at: toIso(nextDueTs),
|
|
104
|
+
due_reason: now >= nextDueTs ? 'interval_elapsed' : 'waiting_interval',
|
|
105
|
+
};
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
export function listSchedules(root, config, { at } = {}) {
|
|
109
|
+
const now = at ? Date.parse(at) : Date.now();
|
|
110
|
+
const scheduleState = readScheduleState(root, config);
|
|
111
|
+
const projectState = loadProjectState(root, config);
|
|
112
|
+
const projectStatus = projectState?.status || 'missing';
|
|
113
|
+
|
|
114
|
+
return Object.entries(config?.schedules || {}).map(([scheduleId, schedule]) => {
|
|
115
|
+
const record = scheduleState.schedules[scheduleId] || normalizeScheduleStateRecord(null);
|
|
116
|
+
const status = computeScheduleStatus(schedule, record, now);
|
|
117
|
+
return {
|
|
118
|
+
id: scheduleId,
|
|
119
|
+
enabled: schedule.enabled !== false,
|
|
120
|
+
every_minutes: schedule.every_minutes,
|
|
121
|
+
auto_approve: schedule.auto_approve !== false,
|
|
122
|
+
max_turns: schedule.max_turns ?? 50,
|
|
123
|
+
initial_role: schedule.initial_role || null,
|
|
124
|
+
trigger_reason: schedule.trigger_reason || `schedule:${scheduleId}`,
|
|
125
|
+
due: status.due,
|
|
126
|
+
due_reason: status.due_reason,
|
|
127
|
+
next_due_at: status.next_due_at,
|
|
128
|
+
project_status: projectStatus,
|
|
129
|
+
last_started_at: record.last_started_at,
|
|
130
|
+
last_finished_at: record.last_finished_at,
|
|
131
|
+
last_run_id: record.last_run_id,
|
|
132
|
+
last_status: record.last_status,
|
|
133
|
+
last_skip_at: record.last_skip_at,
|
|
134
|
+
last_skip_reason: record.last_skip_reason,
|
|
135
|
+
};
|
|
136
|
+
});
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
export function evaluateScheduleLaunchEligibility(root, config) {
|
|
140
|
+
const projectState = loadProjectState(root, config);
|
|
141
|
+
const status = projectState?.status || 'missing';
|
|
142
|
+
|
|
143
|
+
if (status === 'missing' || status === 'idle' || status === 'completed') {
|
|
144
|
+
return { ok: true, status };
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
if (status === 'blocked') {
|
|
148
|
+
return { ok: false, status, reason: 'run_blocked' };
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
if (status === 'active') {
|
|
152
|
+
return { ok: false, status, reason: 'run_active' };
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
if (status === 'paused') {
|
|
156
|
+
return { ok: false, status, reason: 'run_paused' };
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
return { ok: false, status, reason: `run_${status}` };
|
|
160
|
+
}
|