@renseiai/agentfactory-cli 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +123 -0
- package/dist/src/agent.d.ts +20 -0
- package/dist/src/agent.d.ts.map +1 -0
- package/dist/src/agent.js +109 -0
- package/dist/src/analyze-logs.d.ts +26 -0
- package/dist/src/analyze-logs.d.ts.map +1 -0
- package/dist/src/analyze-logs.js +152 -0
- package/dist/src/cleanup.d.ts +17 -0
- package/dist/src/cleanup.d.ts.map +1 -0
- package/dist/src/cleanup.js +111 -0
- package/dist/src/governor.d.ts +26 -0
- package/dist/src/governor.d.ts.map +1 -0
- package/dist/src/governor.js +305 -0
- package/dist/src/index.d.ts +10 -0
- package/dist/src/index.d.ts.map +1 -0
- package/dist/src/index.js +76 -0
- package/dist/src/lib/agent-runner.d.ts +28 -0
- package/dist/src/lib/agent-runner.d.ts.map +1 -0
- package/dist/src/lib/agent-runner.js +272 -0
- package/dist/src/lib/analyze-logs-runner.d.ts +47 -0
- package/dist/src/lib/analyze-logs-runner.d.ts.map +1 -0
- package/dist/src/lib/analyze-logs-runner.js +216 -0
- package/dist/src/lib/auto-updater.d.ts +40 -0
- package/dist/src/lib/auto-updater.d.ts.map +1 -0
- package/dist/src/lib/auto-updater.js +109 -0
- package/dist/src/lib/cleanup-runner.d.ts +29 -0
- package/dist/src/lib/cleanup-runner.d.ts.map +1 -0
- package/dist/src/lib/cleanup-runner.js +295 -0
- package/dist/src/lib/governor-dependencies.d.ts +23 -0
- package/dist/src/lib/governor-dependencies.d.ts.map +1 -0
- package/dist/src/lib/governor-dependencies.js +361 -0
- package/dist/src/lib/governor-logger.d.ts +30 -0
- package/dist/src/lib/governor-logger.d.ts.map +1 -0
- package/dist/src/lib/governor-logger.js +210 -0
- package/dist/src/lib/governor-runner.d.ts +103 -0
- package/dist/src/lib/governor-runner.d.ts.map +1 -0
- package/dist/src/lib/governor-runner.js +210 -0
- package/dist/src/lib/linear-runner.d.ts +8 -0
- package/dist/src/lib/linear-runner.d.ts.map +1 -0
- package/dist/src/lib/linear-runner.js +7 -0
- package/dist/src/lib/orchestrator-runner.d.ts +51 -0
- package/dist/src/lib/orchestrator-runner.d.ts.map +1 -0
- package/dist/src/lib/orchestrator-runner.js +151 -0
- package/dist/src/lib/queue-admin-runner.d.ts +30 -0
- package/dist/src/lib/queue-admin-runner.d.ts.map +1 -0
- package/dist/src/lib/queue-admin-runner.js +378 -0
- package/dist/src/lib/sync-routes-runner.d.ts +28 -0
- package/dist/src/lib/sync-routes-runner.d.ts.map +1 -0
- package/dist/src/lib/sync-routes-runner.js +110 -0
- package/dist/src/lib/version.d.ts +35 -0
- package/dist/src/lib/version.d.ts.map +1 -0
- package/dist/src/lib/version.js +168 -0
- package/dist/src/lib/worker-fleet-runner.d.ts +32 -0
- package/dist/src/lib/worker-fleet-runner.d.ts.map +1 -0
- package/dist/src/lib/worker-fleet-runner.js +256 -0
- package/dist/src/lib/worker-runner.d.ts +33 -0
- package/dist/src/lib/worker-runner.d.ts.map +1 -0
- package/dist/src/lib/worker-runner.js +781 -0
- package/dist/src/linear.d.ts +37 -0
- package/dist/src/linear.d.ts.map +1 -0
- package/dist/src/linear.js +118 -0
- package/dist/src/orchestrator.d.ts +21 -0
- package/dist/src/orchestrator.d.ts.map +1 -0
- package/dist/src/orchestrator.js +190 -0
- package/dist/src/queue-admin.d.ts +25 -0
- package/dist/src/queue-admin.d.ts.map +1 -0
- package/dist/src/queue-admin.js +96 -0
- package/dist/src/sync-routes.d.ts +17 -0
- package/dist/src/sync-routes.d.ts.map +1 -0
- package/dist/src/sync-routes.js +100 -0
- package/dist/src/worker-fleet.d.ts +25 -0
- package/dist/src/worker-fleet.d.ts.map +1 -0
- package/dist/src/worker-fleet.js +140 -0
- package/dist/src/worker.d.ts +26 -0
- package/dist/src/worker.d.ts.map +1 -0
- package/dist/src/worker.js +135 -0
- package/package.json +175 -0
|
@@ -0,0 +1,378 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Queue Admin Runner -- Programmatic API for the queue admin CLI.
|
|
3
|
+
*
|
|
4
|
+
* Extracts ALL command handlers from the queue-admin bin script so they can be
|
|
5
|
+
* invoked programmatically (e.g. from a Next.js route handler or test) without
|
|
6
|
+
* process.exit / dotenv / argv coupling.
|
|
7
|
+
*/
|
|
8
|
+
import { getRedisClient, redisKeys, redisDel, redisGet, redisSet, redisZRangeByScore, redisZRem, redisHGetAll, disconnectRedis, } from '@renseiai/agentfactory-server';
|
|
9
|
+
// ---------------------------------------------------------------------------
|
|
10
|
+
// Redis key constants
|
|
11
|
+
// ---------------------------------------------------------------------------
|
|
12
|
+
const WORK_QUEUE_KEY = 'work:queue';
|
|
13
|
+
const WORK_ITEMS_KEY = 'work:items';
|
|
14
|
+
const WORK_CLAIM_PREFIX = 'work:claim:';
|
|
15
|
+
const SESSION_KEY_PREFIX = 'agent:session:';
|
|
16
|
+
const WORKER_PREFIX = 'work:worker:';
|
|
17
|
+
// ---------------------------------------------------------------------------
|
|
18
|
+
// ANSI colors
|
|
19
|
+
// ---------------------------------------------------------------------------
|
|
20
|
+
export const C = {
|
|
21
|
+
reset: '\x1b[0m',
|
|
22
|
+
red: '\x1b[31m',
|
|
23
|
+
green: '\x1b[32m',
|
|
24
|
+
yellow: '\x1b[33m',
|
|
25
|
+
cyan: '\x1b[36m',
|
|
26
|
+
gray: '\x1b[90m',
|
|
27
|
+
};
|
|
28
|
+
// ---------------------------------------------------------------------------
|
|
29
|
+
// Helpers
|
|
30
|
+
// ---------------------------------------------------------------------------
|
|
31
|
+
function ensureRedis() {
|
|
32
|
+
if (!process.env.REDIS_URL) {
|
|
33
|
+
throw new Error('REDIS_URL environment variable is not set');
|
|
34
|
+
}
|
|
35
|
+
// Initialize the Redis client
|
|
36
|
+
getRedisClient();
|
|
37
|
+
}
|
|
38
|
+
// ---------------------------------------------------------------------------
|
|
39
|
+
// Command handlers
|
|
40
|
+
// ---------------------------------------------------------------------------
|
|
41
|
+
async function listQueue() {
|
|
42
|
+
ensureRedis();
|
|
43
|
+
// Get items from sorted set (current queue format)
|
|
44
|
+
const queuedSessionIds = await redisZRangeByScore(WORK_QUEUE_KEY, '-inf', '+inf');
|
|
45
|
+
const workItems = await redisHGetAll(WORK_ITEMS_KEY);
|
|
46
|
+
const workItemCount = Object.keys(workItems).length;
|
|
47
|
+
console.log(`\n${C.cyan}Work Queue${C.reset} (${Math.max(queuedSessionIds.length, workItemCount)} items):`);
|
|
48
|
+
console.log('='.repeat(60));
|
|
49
|
+
if (workItemCount === 0 && queuedSessionIds.length === 0) {
|
|
50
|
+
console.log('(empty)');
|
|
51
|
+
}
|
|
52
|
+
else {
|
|
53
|
+
for (const [sessionId, itemJson] of Object.entries(workItems)) {
|
|
54
|
+
try {
|
|
55
|
+
const work = JSON.parse(itemJson);
|
|
56
|
+
console.log(`- ${work.issueIdentifier ?? sessionId.slice(0, 8)} (session: ${sessionId.slice(0, 8)}...)`);
|
|
57
|
+
console.log(` Priority: ${work.priority ?? 'none'}, WorkType: ${work.workType ?? 'development'}`);
|
|
58
|
+
if (work.queuedAt) {
|
|
59
|
+
console.log(` Queued: ${new Date(work.queuedAt).toISOString()}`);
|
|
60
|
+
}
|
|
61
|
+
if (work.providerSessionId) {
|
|
62
|
+
console.log(` ${C.yellow}Has providerSessionId: ${work.providerSessionId.substring(0, 12)}${C.reset}`);
|
|
63
|
+
}
|
|
64
|
+
if (work.prompt) {
|
|
65
|
+
console.log(` Prompt: "${work.prompt.slice(0, 50)}..."`);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
catch {
|
|
69
|
+
console.log(`- [invalid JSON]: ${sessionId}`);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
await disconnectRedis();
|
|
74
|
+
}
|
|
75
|
+
async function listSessions() {
|
|
76
|
+
ensureRedis();
|
|
77
|
+
const keys = await redisKeys(`${SESSION_KEY_PREFIX}*`);
|
|
78
|
+
console.log(`\n${C.cyan}Sessions${C.reset} (${keys.length} total):`);
|
|
79
|
+
console.log('='.repeat(60));
|
|
80
|
+
if (keys.length === 0) {
|
|
81
|
+
console.log('(none)');
|
|
82
|
+
}
|
|
83
|
+
else {
|
|
84
|
+
for (const key of keys) {
|
|
85
|
+
const session = await redisGet(key);
|
|
86
|
+
if (session) {
|
|
87
|
+
const statusColors = {
|
|
88
|
+
pending: C.yellow,
|
|
89
|
+
claimed: C.cyan,
|
|
90
|
+
running: C.cyan,
|
|
91
|
+
completed: C.green,
|
|
92
|
+
failed: C.red,
|
|
93
|
+
stopped: C.yellow,
|
|
94
|
+
};
|
|
95
|
+
const statusColor = statusColors[session.status] || '';
|
|
96
|
+
console.log(`- ${session.issueIdentifier || session.issueId.slice(0, 8)} [${statusColor}${session.status}${C.reset}]`);
|
|
97
|
+
console.log(` Session: ${session.linearSessionId.slice(0, 12)}...`);
|
|
98
|
+
if (session.workType) {
|
|
99
|
+
console.log(` WorkType: ${session.workType}`);
|
|
100
|
+
}
|
|
101
|
+
console.log(` Updated: ${new Date(session.updatedAt * 1000).toISOString()}`);
|
|
102
|
+
if (session.workerId) {
|
|
103
|
+
console.log(` Worker: ${session.workerId}`);
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
await disconnectRedis();
|
|
109
|
+
}
|
|
110
|
+
async function listWorkersFn() {
|
|
111
|
+
ensureRedis();
|
|
112
|
+
const keys = await redisKeys(`${WORKER_PREFIX}*`);
|
|
113
|
+
console.log(`\n${C.cyan}Workers${C.reset} (${keys.length} total):`);
|
|
114
|
+
console.log('='.repeat(60));
|
|
115
|
+
if (keys.length === 0) {
|
|
116
|
+
console.log('(none)');
|
|
117
|
+
}
|
|
118
|
+
else {
|
|
119
|
+
for (const key of keys) {
|
|
120
|
+
const worker = await redisGet(key);
|
|
121
|
+
if (worker) {
|
|
122
|
+
const statusColor = worker.status === 'active' ? C.green : C.yellow;
|
|
123
|
+
console.log(`- ${worker.id.slice(0, 12)} [${statusColor}${worker.status}${C.reset}]`);
|
|
124
|
+
if (worker.hostname) {
|
|
125
|
+
console.log(` Hostname: ${worker.hostname}`);
|
|
126
|
+
}
|
|
127
|
+
console.log(` Capacity: ${worker.activeCount ?? 0}/${worker.capacity ?? '?'}`);
|
|
128
|
+
if (worker.lastHeartbeat) {
|
|
129
|
+
const ago = Math.round((Date.now() - worker.lastHeartbeat) / 1000);
|
|
130
|
+
console.log(` Last heartbeat: ${ago}s ago`);
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
await disconnectRedis();
|
|
136
|
+
}
|
|
137
|
+
async function clearClaims() {
|
|
138
|
+
ensureRedis();
|
|
139
|
+
console.log('Clearing work claims...');
|
|
140
|
+
const claimKeys = await redisKeys(`${WORK_CLAIM_PREFIX}*`);
|
|
141
|
+
console.log(`Found ${claimKeys.length} claim(s)`);
|
|
142
|
+
if (claimKeys.length === 0) {
|
|
143
|
+
console.log('No claims to clear');
|
|
144
|
+
await disconnectRedis();
|
|
145
|
+
return;
|
|
146
|
+
}
|
|
147
|
+
let deleted = 0;
|
|
148
|
+
for (const key of claimKeys) {
|
|
149
|
+
const result = await redisDel(key);
|
|
150
|
+
if (result > 0) {
|
|
151
|
+
console.log(` Deleted: ${key}`);
|
|
152
|
+
deleted++;
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
console.log(`\nCleared ${deleted} claim(s)`);
|
|
156
|
+
await disconnectRedis();
|
|
157
|
+
}
|
|
158
|
+
async function clearQueue() {
|
|
159
|
+
ensureRedis();
|
|
160
|
+
console.log('Clearing work queue...');
|
|
161
|
+
const queuedSessionIds = await redisZRangeByScore(WORK_QUEUE_KEY, '-inf', '+inf');
|
|
162
|
+
console.log(`Found ${queuedSessionIds.length} item(s) in queue sorted set`);
|
|
163
|
+
const workItems = await redisHGetAll(WORK_ITEMS_KEY);
|
|
164
|
+
const workItemCount = Object.keys(workItems).length;
|
|
165
|
+
console.log(`Found ${workItemCount} item(s) in work items hash`);
|
|
166
|
+
// Show what we're clearing
|
|
167
|
+
for (const [sessionId, itemJson] of Object.entries(workItems)) {
|
|
168
|
+
try {
|
|
169
|
+
const work = JSON.parse(itemJson);
|
|
170
|
+
console.log(` - ${work.issueIdentifier ?? sessionId.slice(0, 8)} (workType: ${work.workType || 'development'})`);
|
|
171
|
+
if (work.providerSessionId) {
|
|
172
|
+
console.log(` ${C.yellow}Has providerSessionId: ${work.providerSessionId.substring(0, 12)}${C.reset}`);
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
catch {
|
|
176
|
+
console.log(` - [invalid JSON]: ${sessionId}`);
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
let cleared = 0;
|
|
180
|
+
if (queuedSessionIds.length > 0) {
|
|
181
|
+
await redisDel(WORK_QUEUE_KEY);
|
|
182
|
+
cleared++;
|
|
183
|
+
}
|
|
184
|
+
if (workItemCount > 0) {
|
|
185
|
+
await redisDel(WORK_ITEMS_KEY);
|
|
186
|
+
cleared++;
|
|
187
|
+
}
|
|
188
|
+
const totalItems = Math.max(queuedSessionIds.length, workItemCount);
|
|
189
|
+
if (cleared > 0) {
|
|
190
|
+
console.log(`\nCleared ${totalItems} item(s) from work queue`);
|
|
191
|
+
}
|
|
192
|
+
else {
|
|
193
|
+
console.log('\nQueue was already empty');
|
|
194
|
+
}
|
|
195
|
+
await disconnectRedis();
|
|
196
|
+
}
|
|
197
|
+
async function clearAll() {
|
|
198
|
+
ensureRedis();
|
|
199
|
+
console.log('Clearing ALL state...\n');
|
|
200
|
+
// Clear work queue
|
|
201
|
+
const queuedSessionIds = await redisZRangeByScore(WORK_QUEUE_KEY, '-inf', '+inf');
|
|
202
|
+
const workItems = await redisHGetAll(WORK_ITEMS_KEY);
|
|
203
|
+
if (queuedSessionIds.length > 0)
|
|
204
|
+
await redisDel(WORK_QUEUE_KEY);
|
|
205
|
+
if (Object.keys(workItems).length > 0)
|
|
206
|
+
await redisDel(WORK_ITEMS_KEY);
|
|
207
|
+
console.log(`Cleared ${Math.max(queuedSessionIds.length, Object.keys(workItems).length)} queue items`);
|
|
208
|
+
// Clear all sessions
|
|
209
|
+
const sessionKeys = await redisKeys(`${SESSION_KEY_PREFIX}*`);
|
|
210
|
+
for (const key of sessionKeys) {
|
|
211
|
+
await redisDel(key);
|
|
212
|
+
}
|
|
213
|
+
console.log(`Cleared ${sessionKeys.length} sessions`);
|
|
214
|
+
// Clear all claims
|
|
215
|
+
const claimKeys = await redisKeys(`${WORK_CLAIM_PREFIX}*`);
|
|
216
|
+
for (const key of claimKeys) {
|
|
217
|
+
await redisDel(key);
|
|
218
|
+
}
|
|
219
|
+
console.log(`Cleared ${claimKeys.length} claims`);
|
|
220
|
+
// Clear all workers
|
|
221
|
+
const workerKeys = await redisKeys(`${WORKER_PREFIX}*`);
|
|
222
|
+
for (const key of workerKeys) {
|
|
223
|
+
await redisDel(key);
|
|
224
|
+
}
|
|
225
|
+
console.log(`Cleared ${workerKeys.length} worker registrations`);
|
|
226
|
+
console.log('\nAll cleared!');
|
|
227
|
+
await disconnectRedis();
|
|
228
|
+
}
|
|
229
|
+
async function resetWorkState() {
|
|
230
|
+
ensureRedis();
|
|
231
|
+
console.log('Resetting work state...');
|
|
232
|
+
console.log('-'.repeat(60));
|
|
233
|
+
let totalCleared = 0;
|
|
234
|
+
// 1. Clear work claims
|
|
235
|
+
console.log('\nClearing work claims...');
|
|
236
|
+
const claimKeys = await redisKeys(`${WORK_CLAIM_PREFIX}*`);
|
|
237
|
+
console.log(` Found ${claimKeys.length} claim(s)`);
|
|
238
|
+
for (const key of claimKeys) {
|
|
239
|
+
const result = await redisDel(key);
|
|
240
|
+
if (result > 0) {
|
|
241
|
+
console.log(` Deleted: ${key}`);
|
|
242
|
+
totalCleared++;
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
// 2. Clear work queue
|
|
246
|
+
console.log('\nClearing work queue...');
|
|
247
|
+
const queuedSessionIds = await redisZRangeByScore(WORK_QUEUE_KEY, '-inf', '+inf');
|
|
248
|
+
console.log(` Found ${queuedSessionIds.length} queued item(s) in sorted set`);
|
|
249
|
+
const workItems = await redisHGetAll(WORK_ITEMS_KEY);
|
|
250
|
+
const workItemCount = Object.keys(workItems).length;
|
|
251
|
+
console.log(` Found ${workItemCount} item(s) in work items hash`);
|
|
252
|
+
for (const [sessionId, itemJson] of Object.entries(workItems)) {
|
|
253
|
+
try {
|
|
254
|
+
const work = JSON.parse(itemJson);
|
|
255
|
+
console.log(` - ${work.issueIdentifier ?? sessionId.slice(0, 8)} (workType: ${work.workType || 'development'})`);
|
|
256
|
+
}
|
|
257
|
+
catch {
|
|
258
|
+
console.log(` - [invalid item: ${sessionId}]`);
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
if (queuedSessionIds.length > 0 || workItemCount > 0) {
|
|
262
|
+
await redisDel(WORK_QUEUE_KEY);
|
|
263
|
+
await redisDel(WORK_ITEMS_KEY);
|
|
264
|
+
totalCleared += Math.max(queuedSessionIds.length, workItemCount);
|
|
265
|
+
console.log(` Cleared queue and items hash`);
|
|
266
|
+
}
|
|
267
|
+
// 3. Reset stuck sessions
|
|
268
|
+
console.log('\nResetting stuck sessions...');
|
|
269
|
+
const sessionKeys = await redisKeys(`${SESSION_KEY_PREFIX}*`);
|
|
270
|
+
console.log(` Found ${sessionKeys.length} session(s)`);
|
|
271
|
+
let sessionsReset = 0;
|
|
272
|
+
for (const key of sessionKeys) {
|
|
273
|
+
const session = await redisGet(key);
|
|
274
|
+
if (!session)
|
|
275
|
+
continue;
|
|
276
|
+
if (session.status === 'running' || session.status === 'claimed') {
|
|
277
|
+
console.log(` Resetting: ${session.issueIdentifier || session.linearSessionId}`);
|
|
278
|
+
console.log(` Status: ${session.status}, WorkerId: ${session.workerId || 'none'}`);
|
|
279
|
+
const updated = {
|
|
280
|
+
...session,
|
|
281
|
+
status: 'pending',
|
|
282
|
+
workerId: undefined,
|
|
283
|
+
claimedAt: undefined,
|
|
284
|
+
providerSessionId: undefined,
|
|
285
|
+
updatedAt: Math.floor(Date.now() / 1000),
|
|
286
|
+
};
|
|
287
|
+
await redisSet(key, updated, 24 * 60 * 60);
|
|
288
|
+
sessionsReset++;
|
|
289
|
+
console.log(` Reset to pending`);
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
console.log('\n' + '-'.repeat(60));
|
|
293
|
+
console.log(`\nReset complete:`);
|
|
294
|
+
console.log(` - Claims cleared: ${claimKeys.length}`);
|
|
295
|
+
console.log(` - Queue items cleared: ${Math.max(queuedSessionIds.length, workItemCount)}`);
|
|
296
|
+
console.log(` - Sessions reset: ${sessionsReset}`);
|
|
297
|
+
await disconnectRedis();
|
|
298
|
+
}
|
|
299
|
+
async function removeSession(sessionId) {
|
|
300
|
+
ensureRedis();
|
|
301
|
+
let found = false;
|
|
302
|
+
// Find session by partial ID match
|
|
303
|
+
const keys = await redisKeys(`${SESSION_KEY_PREFIX}*`);
|
|
304
|
+
for (const key of keys) {
|
|
305
|
+
if (key.includes(sessionId)) {
|
|
306
|
+
await redisDel(key);
|
|
307
|
+
console.log(`Removed session: ${key.replace(SESSION_KEY_PREFIX, '')}`);
|
|
308
|
+
found = true;
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
// Also remove from queue if present
|
|
312
|
+
const workItems = await redisHGetAll(WORK_ITEMS_KEY);
|
|
313
|
+
for (const [sid, itemJson] of Object.entries(workItems)) {
|
|
314
|
+
if (sid.includes(sessionId)) {
|
|
315
|
+
// Remove from hash via direct Redis command
|
|
316
|
+
const redis = getRedisClient();
|
|
317
|
+
await redis.hdel(WORK_ITEMS_KEY, sid);
|
|
318
|
+
// Remove from sorted set
|
|
319
|
+
await redisZRem(WORK_QUEUE_KEY, sid);
|
|
320
|
+
const work = JSON.parse(itemJson);
|
|
321
|
+
console.log(`Removed from queue: ${work.issueIdentifier ?? sid.slice(0, 8)}`);
|
|
322
|
+
found = true;
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
// Remove claim if present
|
|
326
|
+
const claimKeys = await redisKeys(`${WORK_CLAIM_PREFIX}*`);
|
|
327
|
+
for (const key of claimKeys) {
|
|
328
|
+
if (key.includes(sessionId)) {
|
|
329
|
+
await redisDel(key);
|
|
330
|
+
console.log(`Removed claim: ${key}`);
|
|
331
|
+
found = true;
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
if (!found) {
|
|
335
|
+
console.log(`No session found matching: ${sessionId}`);
|
|
336
|
+
}
|
|
337
|
+
await disconnectRedis();
|
|
338
|
+
}
|
|
339
|
+
// ---------------------------------------------------------------------------
|
|
340
|
+
// Public entry point
|
|
341
|
+
// ---------------------------------------------------------------------------
|
|
342
|
+
/**
|
|
343
|
+
* Run a queue admin command programmatically.
|
|
344
|
+
*
|
|
345
|
+
* Throws if REDIS_URL is not set or if the 'remove' command is called without
|
|
346
|
+
* a sessionId.
|
|
347
|
+
*/
|
|
348
|
+
export async function runQueueAdmin(config) {
|
|
349
|
+
switch (config.command) {
|
|
350
|
+
case 'list':
|
|
351
|
+
await listQueue();
|
|
352
|
+
break;
|
|
353
|
+
case 'sessions':
|
|
354
|
+
await listSessions();
|
|
355
|
+
break;
|
|
356
|
+
case 'workers':
|
|
357
|
+
await listWorkersFn();
|
|
358
|
+
break;
|
|
359
|
+
case 'clear-claims':
|
|
360
|
+
await clearClaims();
|
|
361
|
+
break;
|
|
362
|
+
case 'clear-queue':
|
|
363
|
+
await clearQueue();
|
|
364
|
+
break;
|
|
365
|
+
case 'clear-all':
|
|
366
|
+
await clearAll();
|
|
367
|
+
break;
|
|
368
|
+
case 'reset':
|
|
369
|
+
await resetWorkState();
|
|
370
|
+
break;
|
|
371
|
+
case 'remove':
|
|
372
|
+
if (!config.sessionId) {
|
|
373
|
+
throw new Error('remove command requires a sessionId');
|
|
374
|
+
}
|
|
375
|
+
await removeSession(config.sessionId);
|
|
376
|
+
break;
|
|
377
|
+
}
|
|
378
|
+
}
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Sync Routes Runner -- Programmatic API for the af-sync-routes CLI.
|
|
3
|
+
*
|
|
4
|
+
* Exports `runSyncRoutes()` so route syncing can be invoked from code
|
|
5
|
+
* without going through process.argv / process.env / process.exit.
|
|
6
|
+
*/
|
|
7
|
+
export interface SyncRoutesConfig {
|
|
8
|
+
/** Preview what would be created without writing (default: false) */
|
|
9
|
+
dryRun?: boolean;
|
|
10
|
+
/** Also sync dashboard page.tsx files (default: false) */
|
|
11
|
+
pages?: boolean;
|
|
12
|
+
/** Custom app directory (default: "src/app") */
|
|
13
|
+
appDir?: string;
|
|
14
|
+
/** Project root directory (default: process.cwd()) */
|
|
15
|
+
projectRoot?: string;
|
|
16
|
+
}
|
|
17
|
+
export interface SyncRoutesResult {
|
|
18
|
+
checked: number;
|
|
19
|
+
created: number;
|
|
20
|
+
skipped: number;
|
|
21
|
+
errors: Array<{
|
|
22
|
+
path: string;
|
|
23
|
+
error: string;
|
|
24
|
+
}>;
|
|
25
|
+
warnings: string[];
|
|
26
|
+
}
|
|
27
|
+
export declare function runSyncRoutes(config?: SyncRoutesConfig): SyncRoutesResult;
|
|
28
|
+
//# sourceMappingURL=sync-routes-runner.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"sync-routes-runner.d.ts","sourceRoot":"","sources":["../../../src/lib/sync-routes-runner.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAcH,MAAM,WAAW,gBAAgB;IAC/B,qEAAqE;IACrE,MAAM,CAAC,EAAE,OAAO,CAAA;IAChB,0DAA0D;IAC1D,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,gDAAgD;IAChD,MAAM,CAAC,EAAE,MAAM,CAAA;IACf,sDAAsD;IACtD,WAAW,CAAC,EAAE,MAAM,CAAA;CACrB;AAED,MAAM,WAAW,gBAAgB;IAC/B,OAAO,EAAE,MAAM,CAAA;IACf,OAAO,EAAE,MAAM,CAAA;IACf,OAAO,EAAE,MAAM,CAAA;IACf,MAAM,EAAE,KAAK,CAAC;QAAE,IAAI,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,MAAM,CAAA;KAAE,CAAC,CAAA;IAC9C,QAAQ,EAAE,MAAM,EAAE,CAAA;CACnB;AAMD,wBAAgB,aAAa,CAAC,MAAM,CAAC,EAAE,gBAAgB,GAAG,gBAAgB,CA6GzE"}
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Sync Routes Runner -- Programmatic API for the af-sync-routes CLI.
|
|
3
|
+
*
|
|
4
|
+
* Exports `runSyncRoutes()` so route syncing can be invoked from code
|
|
5
|
+
* without going through process.argv / process.env / process.exit.
|
|
6
|
+
*/
|
|
7
|
+
import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs';
|
|
8
|
+
import { dirname, resolve } from 'path';
|
|
9
|
+
import { ROUTE_MANIFEST, generateRouteContent, generatePageContent, } from '@renseiai/agentfactory';
|
|
10
|
+
// ---------------------------------------------------------------------------
|
|
11
|
+
// Runner
|
|
12
|
+
// ---------------------------------------------------------------------------
|
|
13
|
+
export function runSyncRoutes(config) {
|
|
14
|
+
const projectRoot = config?.projectRoot ?? process.cwd();
|
|
15
|
+
const appDir = config?.appDir ?? 'src/app';
|
|
16
|
+
const dryRun = config?.dryRun ?? false;
|
|
17
|
+
const syncPages = config?.pages ?? false;
|
|
18
|
+
const result = {
|
|
19
|
+
checked: 0,
|
|
20
|
+
created: 0,
|
|
21
|
+
skipped: 0,
|
|
22
|
+
errors: [],
|
|
23
|
+
warnings: [],
|
|
24
|
+
};
|
|
25
|
+
// Validate project structure
|
|
26
|
+
const srcDir = resolve(projectRoot, 'src');
|
|
27
|
+
if (!existsSync(srcDir)) {
|
|
28
|
+
result.errors.push({ path: srcDir, error: 'src/ directory not found — is this a Next.js project?' });
|
|
29
|
+
return result;
|
|
30
|
+
}
|
|
31
|
+
const configFile = resolve(projectRoot, 'src/lib/config.ts');
|
|
32
|
+
if (!existsSync(configFile)) {
|
|
33
|
+
result.warnings.push('src/lib/config.ts not found — route files import { routes } from this file');
|
|
34
|
+
}
|
|
35
|
+
// Check for dashboard dependency when syncing pages
|
|
36
|
+
if (syncPages) {
|
|
37
|
+
const pkgJsonPath = resolve(projectRoot, 'package.json');
|
|
38
|
+
if (existsSync(pkgJsonPath)) {
|
|
39
|
+
try {
|
|
40
|
+
const pkg = JSON.parse(readFileSync(pkgJsonPath, 'utf-8'));
|
|
41
|
+
const allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
|
|
42
|
+
if (!allDeps['@renseiai/agentfactory-dashboard']) {
|
|
43
|
+
result.warnings.push('@renseiai/agentfactory-dashboard not found in dependencies — page files require this package');
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
catch {
|
|
47
|
+
// Ignore JSON parse errors
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
// Sync route files
|
|
52
|
+
for (const entry of ROUTE_MANIFEST.routes) {
|
|
53
|
+
result.checked++;
|
|
54
|
+
const filePath = resolve(projectRoot, entry.path);
|
|
55
|
+
if (existsSync(filePath)) {
|
|
56
|
+
result.skipped++;
|
|
57
|
+
if (dryRun) {
|
|
58
|
+
console.log(` exists ${entry.path}`);
|
|
59
|
+
}
|
|
60
|
+
continue;
|
|
61
|
+
}
|
|
62
|
+
const content = generateRouteContent(entry);
|
|
63
|
+
if (dryRun) {
|
|
64
|
+
console.log(` create ${entry.path}`);
|
|
65
|
+
result.created++;
|
|
66
|
+
continue;
|
|
67
|
+
}
|
|
68
|
+
try {
|
|
69
|
+
mkdirSync(dirname(filePath), { recursive: true });
|
|
70
|
+
writeFileSync(filePath, content, 'utf-8');
|
|
71
|
+
console.log(` created ${entry.path}`);
|
|
72
|
+
result.created++;
|
|
73
|
+
}
|
|
74
|
+
catch (err) {
|
|
75
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
76
|
+
result.errors.push({ path: entry.path, error: message });
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
// Sync page files (opt-in)
|
|
80
|
+
if (syncPages) {
|
|
81
|
+
for (const entry of ROUTE_MANIFEST.pages) {
|
|
82
|
+
result.checked++;
|
|
83
|
+
const filePath = resolve(projectRoot, entry.path);
|
|
84
|
+
if (existsSync(filePath)) {
|
|
85
|
+
result.skipped++;
|
|
86
|
+
if (dryRun) {
|
|
87
|
+
console.log(` exists ${entry.path}`);
|
|
88
|
+
}
|
|
89
|
+
continue;
|
|
90
|
+
}
|
|
91
|
+
const content = generatePageContent(entry);
|
|
92
|
+
if (dryRun) {
|
|
93
|
+
console.log(` create ${entry.path}`);
|
|
94
|
+
result.created++;
|
|
95
|
+
continue;
|
|
96
|
+
}
|
|
97
|
+
try {
|
|
98
|
+
mkdirSync(dirname(filePath), { recursive: true });
|
|
99
|
+
writeFileSync(filePath, content, 'utf-8');
|
|
100
|
+
console.log(` created ${entry.path}`);
|
|
101
|
+
result.created++;
|
|
102
|
+
}
|
|
103
|
+
catch (err) {
|
|
104
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
105
|
+
result.errors.push({ path: entry.path, error: message });
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
return result;
|
|
110
|
+
}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Shared version utilities for AgentFactory CLI commands.
|
|
3
|
+
*
|
|
4
|
+
* Provides current version detection and npm update checking with
|
|
5
|
+
* file-based caching to avoid excessive network requests.
|
|
6
|
+
*/
|
|
7
|
+
/**
|
|
8
|
+
* Read the current package version from the CLI's package.json.
|
|
9
|
+
*/
|
|
10
|
+
export declare function getVersion(): string;
|
|
11
|
+
export interface UpdateCheckResult {
|
|
12
|
+
currentVersion: string;
|
|
13
|
+
latestVersion: string;
|
|
14
|
+
updateAvailable: boolean;
|
|
15
|
+
}
|
|
16
|
+
/**
|
|
17
|
+
* Check whether a newer version is available on npm.
|
|
18
|
+
*
|
|
19
|
+
* Uses a file-based cache (4-hour TTL) to avoid hitting the registry
|
|
20
|
+
* on every CLI invocation. Returns null if the check is skipped or fails.
|
|
21
|
+
*
|
|
22
|
+
* Disabled when:
|
|
23
|
+
* - `AF_NO_UPDATE_CHECK=1` env var is set
|
|
24
|
+
* - `--no-update-check` was passed
|
|
25
|
+
* - Current version is 'unknown'
|
|
26
|
+
*/
|
|
27
|
+
export declare function checkForUpdate(opts?: {
|
|
28
|
+
noUpdateCheck?: boolean;
|
|
29
|
+
}): Promise<UpdateCheckResult | null>;
|
|
30
|
+
/**
|
|
31
|
+
* Print an update notification to stderr if a newer version is available.
|
|
32
|
+
* Designed to be non-intrusive — just a single line after the startup banner.
|
|
33
|
+
*/
|
|
34
|
+
export declare function printUpdateNotification(result: UpdateCheckResult | null): void;
|
|
35
|
+
//# sourceMappingURL=version.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"version.d.ts","sourceRoot":"","sources":["../../../src/lib/version.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAaH;;GAEG;AACH,wBAAgB,UAAU,IAAI,MAAM,CAkBnC;AAMD,MAAM,WAAW,iBAAiB;IAChC,cAAc,EAAE,MAAM,CAAA;IACtB,aAAa,EAAE,MAAM,CAAA;IACrB,eAAe,EAAE,OAAO,CAAA;CACzB;AAoFD;;;;;;;;;;GAUG;AACH,wBAAsB,cAAc,CAAC,IAAI,CAAC,EAAE;IAC1C,aAAa,CAAC,EAAE,OAAO,CAAA;CACxB,GAAG,OAAO,CAAC,iBAAiB,GAAG,IAAI,CAAC,CA4BpC;AAeD;;;GAGG;AACH,wBAAgB,uBAAuB,CAAC,MAAM,EAAE,iBAAiB,GAAG,IAAI,GAAG,IAAI,CAO9E"}
|