@cuylabs/agent-runtime 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +201 -0
- package/README.md +242 -0
- package/dist/chunk-DBQSJ476.js +153 -0
- package/dist/chunk-H6BHDIUX.js +1466 -0
- package/dist/chunk-ZVISWF7S.js +108 -0
- package/dist/driver-KdyMlXQq.d.ts +127 -0
- package/dist/drivers/in-memory.d.ts +27 -0
- package/dist/drivers/in-memory.js +7 -0
- package/dist/index-CbLKNFMq.d.ts +266 -0
- package/dist/index.d.ts +44 -0
- package/dist/index.js +408 -0
- package/dist/orchestration/index.d.ts +2 -0
- package/dist/orchestration/index.js +13 -0
- package/package.json +66 -0
|
@@ -0,0 +1,1466 @@
|
|
|
1
|
+
import {
|
|
2
|
+
computeNextRunAtMs,
|
|
3
|
+
normalizeSchedule
|
|
4
|
+
} from "./chunk-ZVISWF7S.js";
|
|
5
|
+
|
|
6
|
+
// src/orchestration/types.ts
|
|
7
|
+
var TERMINAL_STATUSES = {
|
|
8
|
+
queued: void 0,
|
|
9
|
+
running: void 0,
|
|
10
|
+
completed: true,
|
|
11
|
+
failed: true,
|
|
12
|
+
cancelled: true
|
|
13
|
+
};
|
|
14
|
+
function isTerminalRunStatus(status) {
|
|
15
|
+
return TERMINAL_STATUSES[status] === true;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
// src/orchestration/stores/in-memory.ts
|
|
19
|
+
function cloneRun(run) {
|
|
20
|
+
return structuredClone(run);
|
|
21
|
+
}
|
|
22
|
+
var InMemoryOrchestratorRunStore = class {
|
|
23
|
+
runs = /* @__PURE__ */ new Map();
|
|
24
|
+
async get(runId) {
|
|
25
|
+
const current = this.runs.get(runId);
|
|
26
|
+
return current ? cloneRun(current) : void 0;
|
|
27
|
+
}
|
|
28
|
+
async list() {
|
|
29
|
+
return [...this.runs.values()].map((entry) => cloneRun(entry)).sort((a, b) => a.createdAt.localeCompare(b.createdAt));
|
|
30
|
+
}
|
|
31
|
+
async upsert(run) {
|
|
32
|
+
const copy = cloneRun(run);
|
|
33
|
+
this.runs.set(copy.id, copy);
|
|
34
|
+
return cloneRun(copy);
|
|
35
|
+
}
|
|
36
|
+
async remove(runId) {
|
|
37
|
+
return this.runs.delete(runId);
|
|
38
|
+
}
|
|
39
|
+
};
|
|
40
|
+
|
|
41
|
+
// src/orchestration/service.ts
|
|
42
|
+
import { randomUUID as randomUUID2 } from "crypto";
|
|
43
|
+
|
|
44
|
+
// src/logger.ts
|
|
45
|
+
function noop() {
|
|
46
|
+
}
|
|
47
|
+
var silentRuntimeLogger = {
|
|
48
|
+
debug: noop,
|
|
49
|
+
info: noop,
|
|
50
|
+
warn: noop,
|
|
51
|
+
error: noop
|
|
52
|
+
};
|
|
53
|
+
function createConsoleRuntimeLogger(prefix = "agent-runtime") {
|
|
54
|
+
function format(message, meta) {
|
|
55
|
+
return meta ? `[${prefix}] ${message} ${JSON.stringify(meta)}` : `[${prefix}] ${message}`;
|
|
56
|
+
}
|
|
57
|
+
return {
|
|
58
|
+
debug(message, meta) {
|
|
59
|
+
console.debug(format(message, meta));
|
|
60
|
+
},
|
|
61
|
+
info(message, meta) {
|
|
62
|
+
console.info(format(message, meta));
|
|
63
|
+
},
|
|
64
|
+
warn(message, meta) {
|
|
65
|
+
console.warn(format(message, meta));
|
|
66
|
+
},
|
|
67
|
+
error(message, meta) {
|
|
68
|
+
console.error(format(message, meta));
|
|
69
|
+
}
|
|
70
|
+
};
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// src/runtime.ts
|
|
74
|
+
import { randomUUID } from "crypto";
|
|
75
|
+
var DEFAULT_MAX_CONCURRENT_RUNS = 4;
|
|
76
|
+
var DEFAULT_EXECUTION_TIMEOUT_MS = 15 * 60 * 1e3;
|
|
77
|
+
var DEFAULT_ABORT_IN_FLIGHT_ON_STOP = true;
|
|
78
|
+
var MAX_JOB_ID_LENGTH = 256;
|
|
79
|
+
var MAX_JOB_NAME_LENGTH = 256;
|
|
80
|
+
var MAX_METADATA_ENTRIES = 64;
|
|
81
|
+
var MAX_METADATA_KEY_LENGTH = 128;
|
|
82
|
+
var MAX_METADATA_VALUE_LENGTH = 2048;
|
|
83
|
+
function normalizeExecutionTimeoutMs(value) {
|
|
84
|
+
if (value === void 0) {
|
|
85
|
+
return DEFAULT_EXECUTION_TIMEOUT_MS;
|
|
86
|
+
}
|
|
87
|
+
if (!Number.isFinite(value)) {
|
|
88
|
+
throw new Error(`executionTimeoutMs must be a finite number. Received: ${String(value)}`);
|
|
89
|
+
}
|
|
90
|
+
const normalized = Math.floor(value);
|
|
91
|
+
if (normalized <= 0) {
|
|
92
|
+
return void 0;
|
|
93
|
+
}
|
|
94
|
+
return normalized;
|
|
95
|
+
}
|
|
96
|
+
function normalizeMaxQueuedDispatches(value) {
|
|
97
|
+
if (value === void 0) {
|
|
98
|
+
return void 0;
|
|
99
|
+
}
|
|
100
|
+
if (!Number.isFinite(value)) {
|
|
101
|
+
throw new Error(`maxQueuedDispatches must be a finite number. Received: ${String(value)}`);
|
|
102
|
+
}
|
|
103
|
+
return Math.max(0, Math.floor(value));
|
|
104
|
+
}
|
|
105
|
+
function normalizeJobId(jobId) {
|
|
106
|
+
const normalized = jobId.trim();
|
|
107
|
+
if (!normalized) {
|
|
108
|
+
throw new Error("Job id must not be empty");
|
|
109
|
+
}
|
|
110
|
+
if (normalized.length > MAX_JOB_ID_LENGTH) {
|
|
111
|
+
throw new Error(
|
|
112
|
+
`Job id exceeds max length (${MAX_JOB_ID_LENGTH}). Received: ${normalized.length}`
|
|
113
|
+
);
|
|
114
|
+
}
|
|
115
|
+
return normalized;
|
|
116
|
+
}
|
|
117
|
+
function normalizeJobName(name) {
|
|
118
|
+
const normalized = name.trim();
|
|
119
|
+
if (!normalized) {
|
|
120
|
+
throw new Error("Job name must not be empty");
|
|
121
|
+
}
|
|
122
|
+
if (normalized.length > MAX_JOB_NAME_LENGTH) {
|
|
123
|
+
throw new Error(
|
|
124
|
+
`Job name exceeds max length (${MAX_JOB_NAME_LENGTH}). Received: ${normalized.length}`
|
|
125
|
+
);
|
|
126
|
+
}
|
|
127
|
+
return normalized;
|
|
128
|
+
}
|
|
129
|
+
function validateMetadata(metadata) {
|
|
130
|
+
if (!metadata) {
|
|
131
|
+
return;
|
|
132
|
+
}
|
|
133
|
+
const entries = Object.entries(metadata);
|
|
134
|
+
if (entries.length > MAX_METADATA_ENTRIES) {
|
|
135
|
+
throw new Error(
|
|
136
|
+
`Job metadata exceeds max entries (${MAX_METADATA_ENTRIES}). Received: ${entries.length}`
|
|
137
|
+
);
|
|
138
|
+
}
|
|
139
|
+
for (const [key, value] of entries) {
|
|
140
|
+
const normalizedKey = key.trim();
|
|
141
|
+
if (!normalizedKey) {
|
|
142
|
+
throw new Error("Job metadata keys must not be empty");
|
|
143
|
+
}
|
|
144
|
+
if (normalizedKey.length > MAX_METADATA_KEY_LENGTH) {
|
|
145
|
+
throw new Error(
|
|
146
|
+
`Job metadata key exceeds max length (${MAX_METADATA_KEY_LENGTH}). Received: ${normalizedKey.length}`
|
|
147
|
+
);
|
|
148
|
+
}
|
|
149
|
+
if (value.length > MAX_METADATA_VALUE_LENGTH) {
|
|
150
|
+
throw new Error(
|
|
151
|
+
`Job metadata value exceeds max length (${MAX_METADATA_VALUE_LENGTH}) for key "${normalizedKey}"`
|
|
152
|
+
);
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
function normalizeRetryPolicy(policy) {
|
|
157
|
+
if (!policy) {
|
|
158
|
+
return void 0;
|
|
159
|
+
}
|
|
160
|
+
if (!Number.isFinite(policy.maxAttempts)) {
|
|
161
|
+
throw new Error(`retryPolicy.maxAttempts must be a finite number. Received: ${String(policy.maxAttempts)}`);
|
|
162
|
+
}
|
|
163
|
+
const maxAttempts = Math.max(1, Math.floor(policy.maxAttempts));
|
|
164
|
+
const strategy = policy.strategy ?? "fixed";
|
|
165
|
+
if (strategy !== "fixed" && strategy !== "exponential") {
|
|
166
|
+
throw new Error(`retryPolicy.strategy must be "fixed" or "exponential". Received: ${String(policy.strategy)}`);
|
|
167
|
+
}
|
|
168
|
+
let backoffMs;
|
|
169
|
+
if (policy.backoffMs !== void 0) {
|
|
170
|
+
if (!Number.isFinite(policy.backoffMs) || policy.backoffMs < 0) {
|
|
171
|
+
throw new Error(`retryPolicy.backoffMs must be a non-negative finite number. Received: ${String(policy.backoffMs)}`);
|
|
172
|
+
}
|
|
173
|
+
backoffMs = Math.floor(policy.backoffMs);
|
|
174
|
+
}
|
|
175
|
+
let maxBackoffMs;
|
|
176
|
+
if (policy.maxBackoffMs !== void 0) {
|
|
177
|
+
if (!Number.isFinite(policy.maxBackoffMs) || policy.maxBackoffMs < 0) {
|
|
178
|
+
throw new Error(
|
|
179
|
+
`retryPolicy.maxBackoffMs must be a non-negative finite number. Received: ${String(policy.maxBackoffMs)}`
|
|
180
|
+
);
|
|
181
|
+
}
|
|
182
|
+
maxBackoffMs = Math.floor(policy.maxBackoffMs);
|
|
183
|
+
}
|
|
184
|
+
return {
|
|
185
|
+
maxAttempts,
|
|
186
|
+
...backoffMs !== void 0 ? { backoffMs } : {},
|
|
187
|
+
strategy,
|
|
188
|
+
...maxBackoffMs !== void 0 ? { maxBackoffMs } : {}
|
|
189
|
+
};
|
|
190
|
+
}
|
|
191
|
+
function computeRetryDelayMs(policy, attempt) {
|
|
192
|
+
const baseDelay = policy.backoffMs ?? 0;
|
|
193
|
+
if (baseDelay <= 0) {
|
|
194
|
+
return 0;
|
|
195
|
+
}
|
|
196
|
+
if (policy.strategy === "exponential") {
|
|
197
|
+
const rawDelay = baseDelay * 2 ** Math.max(0, attempt - 1);
|
|
198
|
+
return policy.maxBackoffMs !== void 0 ? Math.min(rawDelay, policy.maxBackoffMs) : rawDelay;
|
|
199
|
+
}
|
|
200
|
+
return policy.maxBackoffMs !== void 0 ? Math.min(baseDelay, policy.maxBackoffMs) : baseDelay;
|
|
201
|
+
}
|
|
202
|
+
function waitForSignalOrTimeout(ms, signal) {
|
|
203
|
+
if (ms <= 0) {
|
|
204
|
+
return Promise.resolve();
|
|
205
|
+
}
|
|
206
|
+
return new Promise((resolve, reject) => {
|
|
207
|
+
const timeout = setTimeout(() => {
|
|
208
|
+
signal.removeEventListener("abort", onAbort);
|
|
209
|
+
resolve();
|
|
210
|
+
}, ms);
|
|
211
|
+
timeout.unref?.();
|
|
212
|
+
const onAbort = () => {
|
|
213
|
+
clearTimeout(timeout);
|
|
214
|
+
reject(new Error("aborted"));
|
|
215
|
+
};
|
|
216
|
+
signal.addEventListener("abort", onAbort, { once: true });
|
|
217
|
+
});
|
|
218
|
+
}
|
|
219
|
+
var AgentRuntime = class {
|
|
220
|
+
driver;
|
|
221
|
+
executeJob;
|
|
222
|
+
logger;
|
|
223
|
+
maxConcurrentRuns;
|
|
224
|
+
maxQueuedDispatches;
|
|
225
|
+
createId;
|
|
226
|
+
executionTimeoutMs;
|
|
227
|
+
abortInFlightOnStop;
|
|
228
|
+
onDeadLetter;
|
|
229
|
+
observers;
|
|
230
|
+
started = false;
|
|
231
|
+
activeRuns = 0;
|
|
232
|
+
inFlight = /* @__PURE__ */ new Set();
|
|
233
|
+
runControllers = /* @__PURE__ */ new Map();
|
|
234
|
+
pendingDispatches = [];
|
|
235
|
+
pendingDispatchByJobId = /* @__PURE__ */ new Map();
|
|
236
|
+
constructor(options) {
|
|
237
|
+
this.driver = options.driver;
|
|
238
|
+
this.executeJob = options.execute;
|
|
239
|
+
this.logger = options.logger ?? silentRuntimeLogger;
|
|
240
|
+
this.maxConcurrentRuns = Math.max(
|
|
241
|
+
1,
|
|
242
|
+
Math.floor(options.maxConcurrentRuns ?? DEFAULT_MAX_CONCURRENT_RUNS)
|
|
243
|
+
);
|
|
244
|
+
this.maxQueuedDispatches = normalizeMaxQueuedDispatches(options.maxQueuedDispatches);
|
|
245
|
+
this.createId = options.createId ?? (() => randomUUID());
|
|
246
|
+
this.executionTimeoutMs = normalizeExecutionTimeoutMs(options.executionTimeoutMs);
|
|
247
|
+
this.abortInFlightOnStop = options.abortInFlightOnStop ?? DEFAULT_ABORT_IN_FLIGHT_ON_STOP;
|
|
248
|
+
this.onDeadLetter = options.onDeadLetter;
|
|
249
|
+
this.observers = options.observers ?? [];
|
|
250
|
+
}
|
|
251
|
+
async forEachObserver(label, meta, fn) {
|
|
252
|
+
for (const observer of this.observers) {
|
|
253
|
+
try {
|
|
254
|
+
await fn(observer);
|
|
255
|
+
} catch (error) {
|
|
256
|
+
this.logger.error(`Runtime observer failed during ${label}`, {
|
|
257
|
+
...meta,
|
|
258
|
+
error: String(error)
|
|
259
|
+
});
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
async start() {
|
|
264
|
+
if (this.started) {
|
|
265
|
+
return;
|
|
266
|
+
}
|
|
267
|
+
this.started = true;
|
|
268
|
+
try {
|
|
269
|
+
await this.driver.start({
|
|
270
|
+
now: () => Date.now(),
|
|
271
|
+
logger: this.logger,
|
|
272
|
+
onDue: async (jobId) => {
|
|
273
|
+
return await this.dispatchDue(jobId);
|
|
274
|
+
}
|
|
275
|
+
});
|
|
276
|
+
await this.forEachObserver("runtime start", { driver: this.driver.name }, async (observer) => {
|
|
277
|
+
await observer.notifyRuntimeStart?.({
|
|
278
|
+
driver: this.driver.name,
|
|
279
|
+
at: Date.now()
|
|
280
|
+
});
|
|
281
|
+
});
|
|
282
|
+
this.logger.info("Runtime started", { driver: this.driver.name });
|
|
283
|
+
} catch (error) {
|
|
284
|
+
this.started = false;
|
|
285
|
+
throw error;
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
async stop() {
|
|
289
|
+
if (!this.started) {
|
|
290
|
+
return;
|
|
291
|
+
}
|
|
292
|
+
this.started = false;
|
|
293
|
+
if (this.abortInFlightOnStop && this.runControllers.size > 0) {
|
|
294
|
+
this.logger.warn("Aborting in-flight runtime jobs during stop", {
|
|
295
|
+
count: this.runControllers.size
|
|
296
|
+
});
|
|
297
|
+
for (const controller of this.runControllers.values()) {
|
|
298
|
+
controller.abort();
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
if (this.pendingDispatches.length > 0) {
|
|
302
|
+
const error = new Error("Runtime stopped before queued jobs could start");
|
|
303
|
+
for (const pending of this.pendingDispatches.splice(0)) {
|
|
304
|
+
this.pendingDispatchByJobId.delete(pending.jobId);
|
|
305
|
+
pending.reject(error);
|
|
306
|
+
}
|
|
307
|
+
}
|
|
308
|
+
await this.driver.stop();
|
|
309
|
+
await this.forEachObserver("runtime stop", { driver: this.driver.name }, async (observer) => {
|
|
310
|
+
await observer.notifyRuntimeStop?.({
|
|
311
|
+
driver: this.driver.name,
|
|
312
|
+
at: Date.now()
|
|
313
|
+
});
|
|
314
|
+
});
|
|
315
|
+
this.logger.info("Runtime stopped", { driver: this.driver.name });
|
|
316
|
+
}
|
|
317
|
+
status() {
|
|
318
|
+
return {
|
|
319
|
+
started: this.started,
|
|
320
|
+
driver: this.driver.name,
|
|
321
|
+
inFlightJobs: [...this.inFlight],
|
|
322
|
+
queuedJobs: this.pendingDispatches.map((entry) => entry.jobId),
|
|
323
|
+
maxConcurrentRuns: this.maxConcurrentRuns,
|
|
324
|
+
...this.maxQueuedDispatches !== void 0 ? { maxQueuedDispatches: this.maxQueuedDispatches } : {}
|
|
325
|
+
};
|
|
326
|
+
}
|
|
327
|
+
async listJobs() {
|
|
328
|
+
return await this.driver.listJobs();
|
|
329
|
+
}
|
|
330
|
+
async getJob(jobId) {
|
|
331
|
+
if (this.driver.getJob) {
|
|
332
|
+
return await this.driver.getJob(jobId);
|
|
333
|
+
}
|
|
334
|
+
const jobs = await this.driver.listJobs();
|
|
335
|
+
return jobs.find((entry) => entry.id === jobId);
|
|
336
|
+
}
|
|
337
|
+
async schedule(input) {
|
|
338
|
+
const now = Date.now();
|
|
339
|
+
const existing = input.id ? await this.getJob(input.id) : void 0;
|
|
340
|
+
const schedule = normalizeSchedule(input.schedule);
|
|
341
|
+
const jobId = normalizeJobId(input.id ?? this.createId());
|
|
342
|
+
const name = normalizeJobName(input.name);
|
|
343
|
+
const metadata = input.metadata ?? existing?.metadata;
|
|
344
|
+
const retryPolicy = normalizeRetryPolicy(input.retryPolicy ?? existing?.retryPolicy);
|
|
345
|
+
validateMetadata(metadata);
|
|
346
|
+
let enabled = input.enabled ?? existing?.enabled ?? true;
|
|
347
|
+
let nextRunAtMs = enabled ? computeNextRunAtMs(schedule, now) : void 0;
|
|
348
|
+
if (enabled && schedule.kind === "at" && nextRunAtMs === void 0) {
|
|
349
|
+
enabled = false;
|
|
350
|
+
nextRunAtMs = void 0;
|
|
351
|
+
}
|
|
352
|
+
const record = {
|
|
353
|
+
id: jobId,
|
|
354
|
+
name,
|
|
355
|
+
payload: input.payload,
|
|
356
|
+
schedule,
|
|
357
|
+
...retryPolicy ? { retryPolicy } : {},
|
|
358
|
+
enabled,
|
|
359
|
+
metadata,
|
|
360
|
+
createdAt: existing?.createdAt ?? new Date(now).toISOString(),
|
|
361
|
+
updatedAt: new Date(now).toISOString(),
|
|
362
|
+
state: {
|
|
363
|
+
...existing?.state,
|
|
364
|
+
nextRunAtMs
|
|
365
|
+
}
|
|
366
|
+
};
|
|
367
|
+
return await this.driver.upsertJob(record);
|
|
368
|
+
}
|
|
369
|
+
async update(jobId, patch) {
|
|
370
|
+
const current = await this.requireJob(jobId);
|
|
371
|
+
const now = Date.now();
|
|
372
|
+
const name = normalizeJobName(patch.name ?? current.name);
|
|
373
|
+
const nextSchedule = patch.schedule ? normalizeSchedule(patch.schedule) : current.schedule;
|
|
374
|
+
const retryPolicy = normalizeRetryPolicy(patch.retryPolicy ?? current.retryPolicy);
|
|
375
|
+
let enabled = patch.enabled ?? current.enabled;
|
|
376
|
+
const metadata = patch.metadata ?? current.metadata;
|
|
377
|
+
validateMetadata(metadata);
|
|
378
|
+
let nextRunAtMs = current.state.nextRunAtMs;
|
|
379
|
+
if (patch.schedule || patch.enabled !== void 0) {
|
|
380
|
+
nextRunAtMs = enabled ? computeNextRunAtMs(nextSchedule, now) : void 0;
|
|
381
|
+
}
|
|
382
|
+
if (enabled && nextSchedule.kind === "at" && nextRunAtMs === void 0) {
|
|
383
|
+
enabled = false;
|
|
384
|
+
nextRunAtMs = void 0;
|
|
385
|
+
}
|
|
386
|
+
const nextRecord = {
|
|
387
|
+
...current,
|
|
388
|
+
name,
|
|
389
|
+
payload: patch.payload ?? current.payload,
|
|
390
|
+
schedule: nextSchedule,
|
|
391
|
+
retryPolicy,
|
|
392
|
+
enabled,
|
|
393
|
+
metadata,
|
|
394
|
+
updatedAt: new Date(now).toISOString(),
|
|
395
|
+
state: {
|
|
396
|
+
...current.state,
|
|
397
|
+
nextRunAtMs
|
|
398
|
+
}
|
|
399
|
+
};
|
|
400
|
+
return await this.driver.upsertJob(nextRecord);
|
|
401
|
+
}
|
|
402
|
+
async pause(jobId) {
|
|
403
|
+
return await this.update(jobId, { enabled: false });
|
|
404
|
+
}
|
|
405
|
+
async resume(jobId) {
|
|
406
|
+
return await this.update(jobId, { enabled: true });
|
|
407
|
+
}
|
|
408
|
+
async remove(jobId) {
|
|
409
|
+
return await this.driver.removeJob(jobId);
|
|
410
|
+
}
|
|
411
|
+
async runNow(jobId) {
|
|
412
|
+
return await this.dispatch({ jobId, trigger: "manual" });
|
|
413
|
+
}
|
|
414
|
+
async dispatch(input) {
|
|
415
|
+
return await this.runJob(input.jobId, input.trigger);
|
|
416
|
+
}
|
|
417
|
+
async dispatchDue(jobId) {
|
|
418
|
+
return await this.dispatch({ jobId, trigger: "due" });
|
|
419
|
+
}
|
|
420
|
+
async runJob(jobId, trigger) {
|
|
421
|
+
const reservation = this.tryReserveRun(jobId);
|
|
422
|
+
if (reservation === "already-running") {
|
|
423
|
+
const startedAt = Date.now();
|
|
424
|
+
return {
|
|
425
|
+
jobId,
|
|
426
|
+
trigger,
|
|
427
|
+
startedAt,
|
|
428
|
+
endedAt: startedAt,
|
|
429
|
+
status: "skipped",
|
|
430
|
+
error: "Job already running"
|
|
431
|
+
};
|
|
432
|
+
}
|
|
433
|
+
if (reservation === "queued") {
|
|
434
|
+
return await this.enqueuePendingDispatch(jobId, trigger);
|
|
435
|
+
}
|
|
436
|
+
return await this.executeReservedJob(jobId, trigger);
|
|
437
|
+
}
|
|
438
|
+
async requireJob(jobId) {
|
|
439
|
+
const current = await this.getJob(jobId);
|
|
440
|
+
if (!current) {
|
|
441
|
+
throw new Error(`Job not found: ${jobId}`);
|
|
442
|
+
}
|
|
443
|
+
return current;
|
|
444
|
+
}
|
|
445
|
+
tryReserveRun(jobId) {
|
|
446
|
+
if (this.inFlight.has(jobId)) {
|
|
447
|
+
return "already-running";
|
|
448
|
+
}
|
|
449
|
+
if (this.activeRuns >= this.maxConcurrentRuns) {
|
|
450
|
+
return "queued";
|
|
451
|
+
}
|
|
452
|
+
this.inFlight.add(jobId);
|
|
453
|
+
this.activeRuns += 1;
|
|
454
|
+
return "reserved";
|
|
455
|
+
}
|
|
456
|
+
async enqueuePendingDispatch(jobId, trigger) {
|
|
457
|
+
const existing = this.pendingDispatchByJobId.get(jobId);
|
|
458
|
+
if (existing) {
|
|
459
|
+
if (existing.trigger === "due" && trigger === "manual") {
|
|
460
|
+
existing.trigger = "manual";
|
|
461
|
+
}
|
|
462
|
+
return await existing.promise;
|
|
463
|
+
}
|
|
464
|
+
if (this.maxQueuedDispatches !== void 0 && this.pendingDispatches.length >= this.maxQueuedDispatches) {
|
|
465
|
+
const startedAt = Date.now();
|
|
466
|
+
await this.forEachObserver(
|
|
467
|
+
"dispatch dropped",
|
|
468
|
+
{ jobId, trigger, queueLength: this.pendingDispatches.length },
|
|
469
|
+
async (observer) => {
|
|
470
|
+
await observer.notifyDispatchDropped?.({
|
|
471
|
+
jobId,
|
|
472
|
+
trigger,
|
|
473
|
+
droppedAt: startedAt,
|
|
474
|
+
queueLength: this.pendingDispatches.length,
|
|
475
|
+
reason: `Runtime dispatch queue is full (${this.maxQueuedDispatches})`
|
|
476
|
+
});
|
|
477
|
+
}
|
|
478
|
+
);
|
|
479
|
+
return {
|
|
480
|
+
jobId,
|
|
481
|
+
trigger,
|
|
482
|
+
startedAt,
|
|
483
|
+
endedAt: startedAt,
|
|
484
|
+
status: "skipped",
|
|
485
|
+
error: `Runtime dispatch queue is full (${this.maxQueuedDispatches})`
|
|
486
|
+
};
|
|
487
|
+
}
|
|
488
|
+
let resolvePending;
|
|
489
|
+
let rejectPending;
|
|
490
|
+
const promise = new Promise((resolve, reject) => {
|
|
491
|
+
resolvePending = resolve;
|
|
492
|
+
rejectPending = reject;
|
|
493
|
+
});
|
|
494
|
+
const pending = {
|
|
495
|
+
jobId,
|
|
496
|
+
trigger,
|
|
497
|
+
enqueuedAt: Date.now(),
|
|
498
|
+
promise,
|
|
499
|
+
resolve: resolvePending,
|
|
500
|
+
reject: rejectPending
|
|
501
|
+
};
|
|
502
|
+
this.pendingDispatches.push(pending);
|
|
503
|
+
this.pendingDispatchByJobId.set(jobId, pending);
|
|
504
|
+
await this.forEachObserver(
|
|
505
|
+
"dispatch queued",
|
|
506
|
+
{ jobId, trigger, queueLength: this.pendingDispatches.length },
|
|
507
|
+
async (observer) => {
|
|
508
|
+
await observer.notifyDispatchQueued?.({
|
|
509
|
+
jobId,
|
|
510
|
+
trigger,
|
|
511
|
+
queuedAt: pending.enqueuedAt,
|
|
512
|
+
queueLength: this.pendingDispatches.length
|
|
513
|
+
});
|
|
514
|
+
}
|
|
515
|
+
);
|
|
516
|
+
this.logger.debug("Queued runtime job dispatch", {
|
|
517
|
+
jobId,
|
|
518
|
+
trigger,
|
|
519
|
+
queueLength: this.pendingDispatches.length
|
|
520
|
+
});
|
|
521
|
+
return await promise;
|
|
522
|
+
}
|
|
523
|
+
async executeJobAttempt(params) {
|
|
524
|
+
const attemptStartedAt = Date.now();
|
|
525
|
+
const attemptController = new AbortController();
|
|
526
|
+
const onRunAbort = () => attemptController.abort();
|
|
527
|
+
params.runSignal.addEventListener("abort", onRunAbort, { once: true });
|
|
528
|
+
let timeoutHandle;
|
|
529
|
+
let timedOut = false;
|
|
530
|
+
const deadlineAt = this.executionTimeoutMs ? attemptStartedAt + this.executionTimeoutMs : void 0;
|
|
531
|
+
if (this.executionTimeoutMs !== void 0) {
|
|
532
|
+
timeoutHandle = setTimeout(() => {
|
|
533
|
+
timedOut = true;
|
|
534
|
+
attemptController.abort();
|
|
535
|
+
}, this.executionTimeoutMs);
|
|
536
|
+
timeoutHandle.unref?.();
|
|
537
|
+
}
|
|
538
|
+
let status = "ok";
|
|
539
|
+
let errorMessage;
|
|
540
|
+
try {
|
|
541
|
+
const result = await this.executeJob(params.job, {
|
|
542
|
+
trigger: params.trigger,
|
|
543
|
+
startedAt: attemptStartedAt,
|
|
544
|
+
attempt: params.attempt,
|
|
545
|
+
signal: attemptController.signal,
|
|
546
|
+
deadlineAt
|
|
547
|
+
});
|
|
548
|
+
if (result && typeof result === "object") {
|
|
549
|
+
status = result.status ?? status;
|
|
550
|
+
errorMessage = result.error ?? errorMessage;
|
|
551
|
+
}
|
|
552
|
+
if (timedOut) {
|
|
553
|
+
status = "error";
|
|
554
|
+
errorMessage = `Execution timed out after ${this.executionTimeoutMs}ms`;
|
|
555
|
+
} else if (params.runSignal.aborted && !this.started) {
|
|
556
|
+
status = "error";
|
|
557
|
+
errorMessage = "Execution aborted because runtime stopped";
|
|
558
|
+
}
|
|
559
|
+
if (status === "error" && !errorMessage) {
|
|
560
|
+
errorMessage = "Execution returned error status";
|
|
561
|
+
}
|
|
562
|
+
} catch (error) {
|
|
563
|
+
status = "error";
|
|
564
|
+
if (timedOut) {
|
|
565
|
+
errorMessage = `Execution timed out after ${this.executionTimeoutMs}ms`;
|
|
566
|
+
} else if (params.runSignal.aborted && !this.started) {
|
|
567
|
+
errorMessage = "Execution aborted because runtime stopped";
|
|
568
|
+
} else {
|
|
569
|
+
errorMessage = error instanceof Error ? error.message : String(error);
|
|
570
|
+
}
|
|
571
|
+
} finally {
|
|
572
|
+
if (timeoutHandle) {
|
|
573
|
+
clearTimeout(timeoutHandle);
|
|
574
|
+
}
|
|
575
|
+
params.runSignal.removeEventListener("abort", onRunAbort);
|
|
576
|
+
}
|
|
577
|
+
return {
|
|
578
|
+
status,
|
|
579
|
+
...errorMessage ? { errorMessage } : {}
|
|
580
|
+
};
|
|
581
|
+
}
|
|
582
|
+
async emitDeadLetter(event) {
|
|
583
|
+
if (!this.onDeadLetter) {
|
|
584
|
+
await this.forEachObserver("dead letter", { jobId: event.job.id }, async (observer) => {
|
|
585
|
+
await observer.notifyDeadLetter?.(event);
|
|
586
|
+
});
|
|
587
|
+
return;
|
|
588
|
+
}
|
|
589
|
+
try {
|
|
590
|
+
await this.onDeadLetter(event);
|
|
591
|
+
} catch (error) {
|
|
592
|
+
this.logger.error("Runtime dead-letter handler failed", {
|
|
593
|
+
jobId: event.job.id,
|
|
594
|
+
error: String(error)
|
|
595
|
+
});
|
|
596
|
+
}
|
|
597
|
+
await this.forEachObserver("dead letter", { jobId: event.job.id }, async (observer) => {
|
|
598
|
+
await observer.notifyDeadLetter?.(event);
|
|
599
|
+
});
|
|
600
|
+
}
|
|
601
|
+
async executeReservedJob(jobId, trigger) {
|
|
602
|
+
const startedAt = Date.now();
|
|
603
|
+
try {
|
|
604
|
+
const job = await this.getJob(jobId);
|
|
605
|
+
if (!job) {
|
|
606
|
+
return {
|
|
607
|
+
jobId,
|
|
608
|
+
trigger,
|
|
609
|
+
startedAt,
|
|
610
|
+
endedAt: startedAt,
|
|
611
|
+
status: "skipped",
|
|
612
|
+
error: "Job not found"
|
|
613
|
+
};
|
|
614
|
+
}
|
|
615
|
+
if (trigger === "due" && !job.enabled) {
|
|
616
|
+
return {
|
|
617
|
+
jobId,
|
|
618
|
+
trigger,
|
|
619
|
+
startedAt,
|
|
620
|
+
endedAt: startedAt,
|
|
621
|
+
status: "skipped",
|
|
622
|
+
error: "Job is disabled"
|
|
623
|
+
};
|
|
624
|
+
}
|
|
625
|
+
await this.driver.upsertJob({
|
|
626
|
+
...job,
|
|
627
|
+
state: {
|
|
628
|
+
...job.state,
|
|
629
|
+
running: true
|
|
630
|
+
}
|
|
631
|
+
}).catch((error) => {
|
|
632
|
+
this.logger.warn("Failed to mark job as running", {
|
|
633
|
+
jobId,
|
|
634
|
+
error: String(error)
|
|
635
|
+
});
|
|
636
|
+
});
|
|
637
|
+
await this.forEachObserver("run start", { jobId, trigger }, async (observer) => {
|
|
638
|
+
await observer.notifyRunStart?.({
|
|
639
|
+
job,
|
|
640
|
+
trigger,
|
|
641
|
+
startedAt
|
|
642
|
+
});
|
|
643
|
+
});
|
|
644
|
+
const retryPolicy = normalizeRetryPolicy(job.retryPolicy);
|
|
645
|
+
const maxAttempts = retryPolicy?.maxAttempts ?? 1;
|
|
646
|
+
const abortController = new AbortController();
|
|
647
|
+
this.runControllers.set(jobId, abortController);
|
|
648
|
+
let status = "ok";
|
|
649
|
+
let errorMessage;
|
|
650
|
+
let attemptCount = 0;
|
|
651
|
+
let deadLettered = false;
|
|
652
|
+
try {
|
|
653
|
+
while (attemptCount < maxAttempts) {
|
|
654
|
+
attemptCount += 1;
|
|
655
|
+
const attemptResult = await this.executeJobAttempt({
|
|
656
|
+
job,
|
|
657
|
+
trigger,
|
|
658
|
+
attempt: attemptCount,
|
|
659
|
+
runSignal: abortController.signal
|
|
660
|
+
});
|
|
661
|
+
status = attemptResult.status;
|
|
662
|
+
errorMessage = attemptResult.errorMessage;
|
|
663
|
+
if (status !== "error" || abortController.signal.aborted || attemptCount >= maxAttempts) {
|
|
664
|
+
break;
|
|
665
|
+
}
|
|
666
|
+
const delayMs = retryPolicy ? computeRetryDelayMs(retryPolicy, attemptCount) : 0;
|
|
667
|
+
this.logger.warn("Retrying runtime job after failure", {
|
|
668
|
+
jobId,
|
|
669
|
+
trigger,
|
|
670
|
+
attempt: attemptCount,
|
|
671
|
+
maxAttempts,
|
|
672
|
+
delayMs,
|
|
673
|
+
error: errorMessage
|
|
674
|
+
});
|
|
675
|
+
await this.forEachObserver(
|
|
676
|
+
"run retry",
|
|
677
|
+
{ jobId, trigger, attempt: attemptCount, nextAttempt: attemptCount + 1 },
|
|
678
|
+
async (observer) => {
|
|
679
|
+
await observer.notifyRunRetry?.({
|
|
680
|
+
job,
|
|
681
|
+
trigger,
|
|
682
|
+
attempt: attemptCount,
|
|
683
|
+
nextAttempt: attemptCount + 1,
|
|
684
|
+
delayMs,
|
|
685
|
+
...errorMessage ? { error: errorMessage } : {}
|
|
686
|
+
});
|
|
687
|
+
}
|
|
688
|
+
);
|
|
689
|
+
try {
|
|
690
|
+
await waitForSignalOrTimeout(delayMs, abortController.signal);
|
|
691
|
+
} catch {
|
|
692
|
+
if (abortController.signal.aborted && !this.started) {
|
|
693
|
+
status = "error";
|
|
694
|
+
errorMessage = "Execution aborted because runtime stopped";
|
|
695
|
+
}
|
|
696
|
+
break;
|
|
697
|
+
}
|
|
698
|
+
}
|
|
699
|
+
deadLettered = status === "error" && !abortController.signal.aborted && maxAttempts > 1 && attemptCount >= maxAttempts && Boolean(errorMessage);
|
|
700
|
+
} finally {
|
|
701
|
+
this.runControllers.delete(jobId);
|
|
702
|
+
}
|
|
703
|
+
const endedAt = Date.now();
|
|
704
|
+
const latest = await this.getJob(jobId) ?? job;
|
|
705
|
+
let enabled = latest.enabled;
|
|
706
|
+
let nextRunAtMs;
|
|
707
|
+
if (!enabled) {
|
|
708
|
+
nextRunAtMs = void 0;
|
|
709
|
+
} else if (latest.schedule.kind === "at") {
|
|
710
|
+
enabled = false;
|
|
711
|
+
nextRunAtMs = void 0;
|
|
712
|
+
} else {
|
|
713
|
+
nextRunAtMs = computeNextRunAtMs(latest.schedule, endedAt);
|
|
714
|
+
}
|
|
715
|
+
const persisted = {
|
|
716
|
+
...latest,
|
|
717
|
+
enabled,
|
|
718
|
+
updatedAt: new Date(endedAt).toISOString(),
|
|
719
|
+
state: {
|
|
720
|
+
...latest.state,
|
|
721
|
+
running: false,
|
|
722
|
+
lastRunAtMs: endedAt,
|
|
723
|
+
lastStatus: status,
|
|
724
|
+
lastError: status === "error" ? errorMessage : void 0,
|
|
725
|
+
...attemptCount > 0 ? { lastAttemptCount: attemptCount } : {},
|
|
726
|
+
...deadLettered ? {
|
|
727
|
+
deadLetterCount: (latest.state.deadLetterCount ?? 0) + 1,
|
|
728
|
+
lastDeadLetterAtMs: endedAt
|
|
729
|
+
} : {},
|
|
730
|
+
nextRunAtMs
|
|
731
|
+
}
|
|
732
|
+
};
|
|
733
|
+
await this.driver.upsertJob(persisted);
|
|
734
|
+
await this.forEachObserver(
|
|
735
|
+
"run complete",
|
|
736
|
+
{ jobId, trigger, status },
|
|
737
|
+
async (observer) => {
|
|
738
|
+
await observer.notifyRunComplete?.({
|
|
739
|
+
job: persisted,
|
|
740
|
+
trigger,
|
|
741
|
+
startedAt,
|
|
742
|
+
endedAt,
|
|
743
|
+
attempts: attemptCount,
|
|
744
|
+
status,
|
|
745
|
+
...errorMessage ? { error: errorMessage } : {},
|
|
746
|
+
...deadLettered ? { deadLettered: true } : {}
|
|
747
|
+
});
|
|
748
|
+
}
|
|
749
|
+
);
|
|
750
|
+
if (deadLettered && errorMessage) {
|
|
751
|
+
await this.emitDeadLetter({
|
|
752
|
+
job: persisted,
|
|
753
|
+
trigger,
|
|
754
|
+
attempts: attemptCount,
|
|
755
|
+
startedAt,
|
|
756
|
+
endedAt,
|
|
757
|
+
error: errorMessage
|
|
758
|
+
});
|
|
759
|
+
}
|
|
760
|
+
return {
|
|
761
|
+
jobId,
|
|
762
|
+
trigger,
|
|
763
|
+
status,
|
|
764
|
+
startedAt,
|
|
765
|
+
endedAt,
|
|
766
|
+
...errorMessage ? { error: errorMessage } : {}
|
|
767
|
+
};
|
|
768
|
+
} finally {
|
|
769
|
+
this.releaseRunReservation(jobId);
|
|
770
|
+
}
|
|
771
|
+
}
|
|
772
|
+
releaseRunReservation(jobId) {
|
|
773
|
+
this.inFlight.delete(jobId);
|
|
774
|
+
this.activeRuns = Math.max(0, this.activeRuns - 1);
|
|
775
|
+
void this.drainPendingDispatches();
|
|
776
|
+
}
|
|
777
|
+
async drainPendingDispatches() {
|
|
778
|
+
if (!this.started) {
|
|
779
|
+
return;
|
|
780
|
+
}
|
|
781
|
+
while (this.activeRuns < this.maxConcurrentRuns && this.pendingDispatches.length > 0) {
|
|
782
|
+
const next = this.pendingDispatches.shift();
|
|
783
|
+
if (!next) {
|
|
784
|
+
return;
|
|
785
|
+
}
|
|
786
|
+
if (this.pendingDispatchByJobId.get(next.jobId) !== next) {
|
|
787
|
+
continue;
|
|
788
|
+
}
|
|
789
|
+
const reservation = this.tryReserveRun(next.jobId);
|
|
790
|
+
if (reservation === "queued") {
|
|
791
|
+
this.pendingDispatches.unshift(next);
|
|
792
|
+
return;
|
|
793
|
+
}
|
|
794
|
+
this.pendingDispatchByJobId.delete(next.jobId);
|
|
795
|
+
if (reservation === "already-running") {
|
|
796
|
+
const startedAt = Date.now();
|
|
797
|
+
next.resolve({
|
|
798
|
+
jobId: next.jobId,
|
|
799
|
+
trigger: next.trigger,
|
|
800
|
+
startedAt,
|
|
801
|
+
endedAt: startedAt,
|
|
802
|
+
status: "skipped",
|
|
803
|
+
error: "Job already running"
|
|
804
|
+
});
|
|
805
|
+
continue;
|
|
806
|
+
}
|
|
807
|
+
void this.executeReservedJob(next.jobId, next.trigger).then((result) => {
|
|
808
|
+
next.resolve(result);
|
|
809
|
+
}).catch((error) => {
|
|
810
|
+
next.reject(error);
|
|
811
|
+
});
|
|
812
|
+
}
|
|
813
|
+
}
|
|
814
|
+
};
|
|
815
|
+
function createAgentRuntime(options) {
|
|
816
|
+
return new AgentRuntime(options);
|
|
817
|
+
}
|
|
818
|
+
|
|
819
|
+
// src/orchestration/service.ts
|
|
820
|
+
var DEFAULT_JOB_ID_PREFIX = "orchestrator:run:";
|
|
821
|
+
var DEFAULT_RUN_LABEL_PREFIX = "orchestrator-run";
|
|
822
|
+
var DEFAULT_SCHEDULE_AHEAD_MS = 6e4;
|
|
823
|
+
var DEFAULT_WAIT_TIMEOUT_ON_CLOSE_MS = 1e4;
|
|
824
|
+
var GUIDE_RESTART_REASON = "__guide-restart__";
|
|
825
|
+
function toErrorMessage(error) {
|
|
826
|
+
if (error instanceof Error && error.message) {
|
|
827
|
+
return error.message;
|
|
828
|
+
}
|
|
829
|
+
return String(error);
|
|
830
|
+
}
|
|
831
|
+
function normalizeNonEmpty(value, label) {
|
|
832
|
+
const normalized = value.trim();
|
|
833
|
+
if (!normalized) {
|
|
834
|
+
throw new Error(`${label} must not be empty`);
|
|
835
|
+
}
|
|
836
|
+
return normalized;
|
|
837
|
+
}
|
|
838
|
+
function normalizeId(value, label) {
|
|
839
|
+
return normalizeNonEmpty(value, label);
|
|
840
|
+
}
|
|
841
|
+
function normalizeLimit(value) {
|
|
842
|
+
if (value === void 0) {
|
|
843
|
+
return void 0;
|
|
844
|
+
}
|
|
845
|
+
if (!Number.isFinite(value)) {
|
|
846
|
+
throw new Error(`limit must be a finite number. Received: ${String(value)}`);
|
|
847
|
+
}
|
|
848
|
+
return Math.max(0, Math.floor(value));
|
|
849
|
+
}
|
|
850
|
+
function normalizeScheduleAheadMs(value) {
|
|
851
|
+
if (value === void 0) {
|
|
852
|
+
return DEFAULT_SCHEDULE_AHEAD_MS;
|
|
853
|
+
}
|
|
854
|
+
if (!Number.isFinite(value)) {
|
|
855
|
+
throw new Error(`scheduleAheadMs must be a finite number. Received: ${String(value)}`);
|
|
856
|
+
}
|
|
857
|
+
return Math.max(1, Math.floor(value));
|
|
858
|
+
}
|
|
859
|
+
function resolveGuidedInputFallback(params) {
|
|
860
|
+
if (typeof params.currentInput === "string") {
|
|
861
|
+
return `${params.currentInput}
|
|
862
|
+
|
|
863
|
+
[GUIDE]
|
|
864
|
+
${params.guideMessage}`;
|
|
865
|
+
}
|
|
866
|
+
if (params.currentInput && typeof params.currentInput === "object") {
|
|
867
|
+
const inputObject = params.currentInput;
|
|
868
|
+
if (typeof inputObject.message === "string") {
|
|
869
|
+
return {
|
|
870
|
+
...inputObject,
|
|
871
|
+
message: `${inputObject.message}
|
|
872
|
+
|
|
873
|
+
[GUIDE]
|
|
874
|
+
${params.guideMessage}`
|
|
875
|
+
};
|
|
876
|
+
}
|
|
877
|
+
}
|
|
878
|
+
throw new Error(
|
|
879
|
+
"Cannot derive guided input automatically for this payload shape. Provide `input` in guide(...) or configure `resolveGuidedInput`."
|
|
880
|
+
);
|
|
881
|
+
}
|
|
882
|
+
function nowIso(now = Date.now()) {
|
|
883
|
+
return new Date(now).toISOString();
|
|
884
|
+
}
|
|
885
|
+
var AgentOrchestrator = class {
|
|
886
|
+
runtime;
|
|
887
|
+
store;
|
|
888
|
+
executeRun;
|
|
889
|
+
logger;
|
|
890
|
+
createId;
|
|
891
|
+
jobIdPrefix;
|
|
892
|
+
runLabelPrefix;
|
|
893
|
+
scheduleAheadMs;
|
|
894
|
+
resolveGuidedInput;
|
|
895
|
+
waiters = /* @__PURE__ */ new Map();
|
|
896
|
+
runControllers = /* @__PURE__ */ new Map();
|
|
897
|
+
closeReasons = /* @__PURE__ */ new Map();
|
|
898
|
+
pendingGuides = /* @__PURE__ */ new Map();
|
|
899
|
+
constructor(options) {
|
|
900
|
+
this.executeRun = options.execute;
|
|
901
|
+
this.store = options.store ?? new InMemoryOrchestratorRunStore();
|
|
902
|
+
this.logger = options.logger ?? options.runtime?.logger ?? silentRuntimeLogger;
|
|
903
|
+
this.createId = options.createId ?? (() => randomUUID2());
|
|
904
|
+
this.jobIdPrefix = normalizeNonEmpty(options.jobIdPrefix ?? DEFAULT_JOB_ID_PREFIX, "jobIdPrefix");
|
|
905
|
+
this.runLabelPrefix = normalizeNonEmpty(
|
|
906
|
+
options.runLabelPrefix ?? DEFAULT_RUN_LABEL_PREFIX,
|
|
907
|
+
"runLabelPrefix"
|
|
908
|
+
);
|
|
909
|
+
this.scheduleAheadMs = normalizeScheduleAheadMs(options.scheduleAheadMs);
|
|
910
|
+
this.resolveGuidedInput = options.resolveGuidedInput ?? ((params) => resolveGuidedInputFallback({
|
|
911
|
+
currentInput: params.run.input,
|
|
912
|
+
guideMessage: params.guideMessage
|
|
913
|
+
}));
|
|
914
|
+
this.runtime = createAgentRuntime({
|
|
915
|
+
...options.runtime,
|
|
916
|
+
logger: options.runtime?.logger ?? this.logger,
|
|
917
|
+
driver: options.driver,
|
|
918
|
+
execute: async (job, context) => {
|
|
919
|
+
return await this.executeRuntimeJob(job.payload, context);
|
|
920
|
+
}
|
|
921
|
+
});
|
|
922
|
+
}
|
|
923
|
+
async start() {
|
|
924
|
+
if (this.runtime.status().started) {
|
|
925
|
+
return;
|
|
926
|
+
}
|
|
927
|
+
await this.store.start?.();
|
|
928
|
+
try {
|
|
929
|
+
await this.runtime.start();
|
|
930
|
+
} catch (error) {
|
|
931
|
+
await this.store.stop?.().catch(() => {
|
|
932
|
+
});
|
|
933
|
+
throw error;
|
|
934
|
+
}
|
|
935
|
+
}
|
|
936
|
+
async stop() {
|
|
937
|
+
let runtimeError;
|
|
938
|
+
try {
|
|
939
|
+
await this.runtime.stop();
|
|
940
|
+
} catch (error) {
|
|
941
|
+
runtimeError = error;
|
|
942
|
+
}
|
|
943
|
+
try {
|
|
944
|
+
await this.store.stop?.();
|
|
945
|
+
} catch (error) {
|
|
946
|
+
if (!runtimeError) {
|
|
947
|
+
throw error;
|
|
948
|
+
}
|
|
949
|
+
this.logger.error("Failed to stop orchestrator run store after runtime stop failure", {
|
|
950
|
+
error: toErrorMessage(error)
|
|
951
|
+
});
|
|
952
|
+
}
|
|
953
|
+
if (runtimeError) {
|
|
954
|
+
throw runtimeError;
|
|
955
|
+
}
|
|
956
|
+
}
|
|
957
|
+
async status() {
|
|
958
|
+
const runs = await this.store.list();
|
|
959
|
+
const counts = {
|
|
960
|
+
total: runs.length,
|
|
961
|
+
queued: 0,
|
|
962
|
+
running: 0,
|
|
963
|
+
completed: 0,
|
|
964
|
+
failed: 0,
|
|
965
|
+
cancelled: 0
|
|
966
|
+
};
|
|
967
|
+
for (const run of runs) {
|
|
968
|
+
counts[run.state.status] += 1;
|
|
969
|
+
}
|
|
970
|
+
return {
|
|
971
|
+
runtime: this.runtime.status(),
|
|
972
|
+
runs: counts
|
|
973
|
+
};
|
|
974
|
+
}
|
|
975
|
+
async invoke(input) {
|
|
976
|
+
this.ensureStarted();
|
|
977
|
+
const runId = normalizeId(input.id ?? this.createId(), "runId");
|
|
978
|
+
const existing = await this.store.get(runId);
|
|
979
|
+
if (existing) {
|
|
980
|
+
throw new Error(`Run already exists: ${runId}`);
|
|
981
|
+
}
|
|
982
|
+
const now = Date.now();
|
|
983
|
+
const run = {
|
|
984
|
+
id: runId,
|
|
985
|
+
label: normalizeNonEmpty(input.label ?? `${this.runLabelPrefix}:${runId}`, "label"),
|
|
986
|
+
input: input.input,
|
|
987
|
+
parentRunId: input.parentRunId?.trim() || void 0,
|
|
988
|
+
metadata: input.metadata,
|
|
989
|
+
createdAt: nowIso(now),
|
|
990
|
+
updatedAt: nowIso(now),
|
|
991
|
+
state: {
|
|
992
|
+
status: "queued",
|
|
993
|
+
running: false,
|
|
994
|
+
attempt: 0,
|
|
995
|
+
guideCount: 0
|
|
996
|
+
}
|
|
997
|
+
};
|
|
998
|
+
await this.store.upsert(run);
|
|
999
|
+
let jobId;
|
|
1000
|
+
try {
|
|
1001
|
+
jobId = await this.launchRun(run, {
|
|
1002
|
+
runId,
|
|
1003
|
+
input: run.input,
|
|
1004
|
+
mode: "invoke"
|
|
1005
|
+
});
|
|
1006
|
+
} catch (error) {
|
|
1007
|
+
await this.failRunLaunch(run, error, "Failed to schedule run");
|
|
1008
|
+
throw new Error(`Failed to schedule run "${runId}": ${toErrorMessage(error)}`);
|
|
1009
|
+
}
|
|
1010
|
+
return {
|
|
1011
|
+
run: await this.requireRun(runId),
|
|
1012
|
+
jobId
|
|
1013
|
+
};
|
|
1014
|
+
}
|
|
1015
|
+
async get(runId) {
|
|
1016
|
+
return await this.store.get(normalizeId(runId, "runId"));
|
|
1017
|
+
}
|
|
1018
|
+
async listInvocations(options = {}) {
|
|
1019
|
+
const records = await this.store.list();
|
|
1020
|
+
const statuses = this.resolveStatusFilter(options.status);
|
|
1021
|
+
const parentRunId = options.parentRunId?.trim();
|
|
1022
|
+
const limit = normalizeLimit(options.limit);
|
|
1023
|
+
const filtered = records.filter((record) => {
|
|
1024
|
+
if (statuses && !statuses.has(record.state.status)) {
|
|
1025
|
+
return false;
|
|
1026
|
+
}
|
|
1027
|
+
if (parentRunId && record.parentRunId !== parentRunId) {
|
|
1028
|
+
return false;
|
|
1029
|
+
}
|
|
1030
|
+
return true;
|
|
1031
|
+
});
|
|
1032
|
+
filtered.sort((a, b) => b.createdAt.localeCompare(a.createdAt));
|
|
1033
|
+
return limit === void 0 ? filtered : filtered.slice(0, limit);
|
|
1034
|
+
}
|
|
1035
|
+
async waitForInvocation(runId, options = {}) {
|
|
1036
|
+
const normalizedRunId = normalizeId(runId, "runId");
|
|
1037
|
+
if (options.signal?.aborted) {
|
|
1038
|
+
throw new Error(`Waiting for run "${normalizedRunId}" aborted`);
|
|
1039
|
+
}
|
|
1040
|
+
const current = await this.requireRun(normalizedRunId);
|
|
1041
|
+
if (isTerminalRunStatus(current.state.status)) {
|
|
1042
|
+
return current;
|
|
1043
|
+
}
|
|
1044
|
+
return await new Promise((resolve, reject) => {
|
|
1045
|
+
let timeout;
|
|
1046
|
+
let abortHandler;
|
|
1047
|
+
const cleanup = () => {
|
|
1048
|
+
if (timeout) {
|
|
1049
|
+
clearTimeout(timeout);
|
|
1050
|
+
}
|
|
1051
|
+
if (abortHandler && options.signal) {
|
|
1052
|
+
options.signal.removeEventListener("abort", abortHandler);
|
|
1053
|
+
}
|
|
1054
|
+
const currentGroup = this.waiters.get(normalizedRunId);
|
|
1055
|
+
currentGroup?.delete(waiter);
|
|
1056
|
+
if (currentGroup && currentGroup.size === 0) {
|
|
1057
|
+
this.waiters.delete(normalizedRunId);
|
|
1058
|
+
}
|
|
1059
|
+
};
|
|
1060
|
+
const waiter = {
|
|
1061
|
+
resolve: (record) => {
|
|
1062
|
+
cleanup();
|
|
1063
|
+
resolve(record);
|
|
1064
|
+
},
|
|
1065
|
+
reject: (error) => {
|
|
1066
|
+
cleanup();
|
|
1067
|
+
reject(error);
|
|
1068
|
+
}
|
|
1069
|
+
};
|
|
1070
|
+
const group = this.waiters.get(normalizedRunId) ?? /* @__PURE__ */ new Set();
|
|
1071
|
+
group.add(waiter);
|
|
1072
|
+
this.waiters.set(normalizedRunId, group);
|
|
1073
|
+
if (options.signal) {
|
|
1074
|
+
abortHandler = () => {
|
|
1075
|
+
waiter.reject(new Error(`Waiting for run "${normalizedRunId}" aborted`));
|
|
1076
|
+
};
|
|
1077
|
+
if (options.signal.aborted) {
|
|
1078
|
+
abortHandler();
|
|
1079
|
+
return;
|
|
1080
|
+
}
|
|
1081
|
+
options.signal.addEventListener("abort", abortHandler, { once: true });
|
|
1082
|
+
}
|
|
1083
|
+
if (options.timeoutMs === void 0) {
|
|
1084
|
+
return;
|
|
1085
|
+
}
|
|
1086
|
+
if (!Number.isFinite(options.timeoutMs) || options.timeoutMs <= 0) {
|
|
1087
|
+
waiter.reject(
|
|
1088
|
+
new Error(`timeoutMs must be greater than zero. Received: ${String(options.timeoutMs)}`)
|
|
1089
|
+
);
|
|
1090
|
+
return;
|
|
1091
|
+
}
|
|
1092
|
+
const timeoutMs = Math.floor(options.timeoutMs);
|
|
1093
|
+
timeout = setTimeout(() => {
|
|
1094
|
+
waiter.reject(new Error(`Timed out waiting for run "${normalizedRunId}" after ${timeoutMs}ms`));
|
|
1095
|
+
}, timeoutMs);
|
|
1096
|
+
timeout.unref?.();
|
|
1097
|
+
});
|
|
1098
|
+
}
|
|
1099
|
+
async close(runId, options = {}) {
|
|
1100
|
+
const normalizedRunId = normalizeId(runId, "runId");
|
|
1101
|
+
const run = await this.requireRun(normalizedRunId);
|
|
1102
|
+
if (isTerminalRunStatus(run.state.status)) {
|
|
1103
|
+
return run;
|
|
1104
|
+
}
|
|
1105
|
+
const reason = options.reason?.trim() || "Run cancelled";
|
|
1106
|
+
const controller = this.runControllers.get(normalizedRunId);
|
|
1107
|
+
if (controller) {
|
|
1108
|
+
this.closeReasons.set(normalizedRunId, reason);
|
|
1109
|
+
controller.abort();
|
|
1110
|
+
try {
|
|
1111
|
+
return await this.waitForInvocation(normalizedRunId, {
|
|
1112
|
+
timeoutMs: DEFAULT_WAIT_TIMEOUT_ON_CLOSE_MS
|
|
1113
|
+
});
|
|
1114
|
+
} catch {
|
|
1115
|
+
return await this.requireRun(normalizedRunId);
|
|
1116
|
+
}
|
|
1117
|
+
}
|
|
1118
|
+
await this.disableRunJob(normalizedRunId);
|
|
1119
|
+
const now = Date.now();
|
|
1120
|
+
const cancelled = {
|
|
1121
|
+
...run,
|
|
1122
|
+
updatedAt: nowIso(now),
|
|
1123
|
+
state: {
|
|
1124
|
+
...run.state,
|
|
1125
|
+
status: "cancelled",
|
|
1126
|
+
running: false,
|
|
1127
|
+
endedAtMs: now,
|
|
1128
|
+
error: reason,
|
|
1129
|
+
result: void 0
|
|
1130
|
+
}
|
|
1131
|
+
};
|
|
1132
|
+
await this.store.upsert(cancelled);
|
|
1133
|
+
this.notifyWaiters(cancelled);
|
|
1134
|
+
return cancelled;
|
|
1135
|
+
}
|
|
1136
|
+
async guide(input) {
|
|
1137
|
+
this.ensureStarted();
|
|
1138
|
+
const runId = normalizeId(input.runId, "runId");
|
|
1139
|
+
const guideMessage = normalizeNonEmpty(input.message, "message");
|
|
1140
|
+
const run = await this.requireRun(runId);
|
|
1141
|
+
if (isTerminalRunStatus(run.state.status)) {
|
|
1142
|
+
throw new Error(`Cannot guide terminal run "${runId}" (${run.state.status})`);
|
|
1143
|
+
}
|
|
1144
|
+
const nextInput = input.input ?? this.resolveGuidedInput({
|
|
1145
|
+
run,
|
|
1146
|
+
guideMessage
|
|
1147
|
+
});
|
|
1148
|
+
const now = Date.now();
|
|
1149
|
+
const next = {
|
|
1150
|
+
...run,
|
|
1151
|
+
input: nextInput,
|
|
1152
|
+
metadata: input.metadata ?? run.metadata,
|
|
1153
|
+
updatedAt: nowIso(now),
|
|
1154
|
+
state: {
|
|
1155
|
+
...run.state,
|
|
1156
|
+
guideCount: run.state.guideCount + 1,
|
|
1157
|
+
error: void 0,
|
|
1158
|
+
result: void 0
|
|
1159
|
+
}
|
|
1160
|
+
};
|
|
1161
|
+
await this.store.upsert(next);
|
|
1162
|
+
const runningController = this.runControllers.get(runId);
|
|
1163
|
+
if (runningController) {
|
|
1164
|
+
this.pendingGuides.set(runId, {
|
|
1165
|
+
message: guideMessage,
|
|
1166
|
+
input: nextInput,
|
|
1167
|
+
guideCount: next.state.guideCount,
|
|
1168
|
+
metadata: input.metadata ?? run.metadata
|
|
1169
|
+
});
|
|
1170
|
+
this.closeReasons.set(runId, GUIDE_RESTART_REASON);
|
|
1171
|
+
runningController.abort();
|
|
1172
|
+
return await this.requireRun(runId);
|
|
1173
|
+
}
|
|
1174
|
+
try {
|
|
1175
|
+
await this.launchRun(next, {
|
|
1176
|
+
runId,
|
|
1177
|
+
input: nextInput,
|
|
1178
|
+
mode: "guide",
|
|
1179
|
+
guideMessage
|
|
1180
|
+
});
|
|
1181
|
+
} catch (error) {
|
|
1182
|
+
await this.failRunLaunch(next, error, "Failed to relaunch guided run");
|
|
1183
|
+
throw new Error(`Failed to relaunch guided run "${runId}": ${toErrorMessage(error)}`);
|
|
1184
|
+
}
|
|
1185
|
+
return await this.requireRun(runId);
|
|
1186
|
+
}
|
|
1187
|
+
ensureStarted() {
|
|
1188
|
+
if (!this.runtime.status().started) {
|
|
1189
|
+
throw new Error("Orchestrator runtime is not started. Call start() before this operation.");
|
|
1190
|
+
}
|
|
1191
|
+
}
|
|
1192
|
+
toJobId(runId) {
|
|
1193
|
+
return `${this.jobIdPrefix}${runId}`;
|
|
1194
|
+
}
|
|
1195
|
+
resolveStatusFilter(status) {
|
|
1196
|
+
if (status === void 0) {
|
|
1197
|
+
return void 0;
|
|
1198
|
+
}
|
|
1199
|
+
const raw = Array.isArray(status) ? status : [status];
|
|
1200
|
+
const normalized = raw.filter(Boolean);
|
|
1201
|
+
if (normalized.length === 0) {
|
|
1202
|
+
return void 0;
|
|
1203
|
+
}
|
|
1204
|
+
return new Set(normalized);
|
|
1205
|
+
}
|
|
1206
|
+
async requireRun(runId) {
|
|
1207
|
+
const run = await this.store.get(runId);
|
|
1208
|
+
if (!run) {
|
|
1209
|
+
throw new Error(`Run not found: ${runId}`);
|
|
1210
|
+
}
|
|
1211
|
+
return run;
|
|
1212
|
+
}
|
|
1213
|
+
async failRunLaunch(run, error, prefix) {
|
|
1214
|
+
const endedAt = Date.now();
|
|
1215
|
+
const failed = {
|
|
1216
|
+
...run,
|
|
1217
|
+
updatedAt: nowIso(endedAt),
|
|
1218
|
+
state: {
|
|
1219
|
+
...run.state,
|
|
1220
|
+
status: "failed",
|
|
1221
|
+
running: false,
|
|
1222
|
+
endedAtMs: endedAt,
|
|
1223
|
+
error: `${prefix}: ${toErrorMessage(error)}`,
|
|
1224
|
+
result: void 0
|
|
1225
|
+
}
|
|
1226
|
+
};
|
|
1227
|
+
await this.store.upsert(failed);
|
|
1228
|
+
this.notifyWaiters(failed);
|
|
1229
|
+
return failed;
|
|
1230
|
+
}
|
|
1231
|
+
async failPendingRunLaunch(runId, error, prefix) {
|
|
1232
|
+
const run = await this.store.get(runId);
|
|
1233
|
+
if (!run || isTerminalRunStatus(run.state.status)) {
|
|
1234
|
+
return;
|
|
1235
|
+
}
|
|
1236
|
+
await this.failRunLaunch(run, error, prefix);
|
|
1237
|
+
}
|
|
1238
|
+
async disableRunJob(runId) {
|
|
1239
|
+
const jobId = this.toJobId(runId);
|
|
1240
|
+
try {
|
|
1241
|
+
await this.runtime.pause(jobId);
|
|
1242
|
+
} catch {
|
|
1243
|
+
}
|
|
1244
|
+
}
|
|
1245
|
+
async launchRun(run, payload) {
|
|
1246
|
+
this.ensureStarted();
|
|
1247
|
+
const jobId = this.toJobId(run.id);
|
|
1248
|
+
const scheduleAt = Date.now() + this.scheduleAheadMs;
|
|
1249
|
+
const existing = await this.runtime.getJob(jobId);
|
|
1250
|
+
if (existing) {
|
|
1251
|
+
await this.runtime.update(jobId, {
|
|
1252
|
+
name: run.label,
|
|
1253
|
+
payload,
|
|
1254
|
+
enabled: true,
|
|
1255
|
+
metadata: run.metadata,
|
|
1256
|
+
schedule: {
|
|
1257
|
+
kind: "at",
|
|
1258
|
+
at: scheduleAt
|
|
1259
|
+
}
|
|
1260
|
+
});
|
|
1261
|
+
} else {
|
|
1262
|
+
await this.runtime.schedule({
|
|
1263
|
+
id: jobId,
|
|
1264
|
+
name: run.label,
|
|
1265
|
+
payload,
|
|
1266
|
+
enabled: true,
|
|
1267
|
+
metadata: run.metadata,
|
|
1268
|
+
schedule: {
|
|
1269
|
+
kind: "at",
|
|
1270
|
+
at: scheduleAt
|
|
1271
|
+
}
|
|
1272
|
+
});
|
|
1273
|
+
}
|
|
1274
|
+
void this.runtime.runNow(jobId).then((result) => {
|
|
1275
|
+
if (result.status === "skipped") {
|
|
1276
|
+
this.logger.debug("Run dispatch skipped", {
|
|
1277
|
+
runId: run.id,
|
|
1278
|
+
jobId,
|
|
1279
|
+
reason: result.error
|
|
1280
|
+
});
|
|
1281
|
+
}
|
|
1282
|
+
}).catch((error) => {
|
|
1283
|
+
this.logger.error("Run dispatch failed", {
|
|
1284
|
+
runId: run.id,
|
|
1285
|
+
jobId,
|
|
1286
|
+
error: toErrorMessage(error)
|
|
1287
|
+
});
|
|
1288
|
+
void this.failPendingRunLaunch(run.id, error, "Run dispatch failed").catch((persistError) => {
|
|
1289
|
+
this.logger.error("Failed to persist run dispatch failure", {
|
|
1290
|
+
runId: run.id,
|
|
1291
|
+
jobId,
|
|
1292
|
+
error: toErrorMessage(persistError)
|
|
1293
|
+
});
|
|
1294
|
+
});
|
|
1295
|
+
});
|
|
1296
|
+
return jobId;
|
|
1297
|
+
}
|
|
1298
|
+
notifyWaiters(run) {
|
|
1299
|
+
if (!isTerminalRunStatus(run.state.status)) {
|
|
1300
|
+
return;
|
|
1301
|
+
}
|
|
1302
|
+
const group = this.waiters.get(run.id);
|
|
1303
|
+
if (!group || group.size === 0) {
|
|
1304
|
+
return;
|
|
1305
|
+
}
|
|
1306
|
+
this.waiters.delete(run.id);
|
|
1307
|
+
for (const waiter of group) {
|
|
1308
|
+
waiter.resolve(run);
|
|
1309
|
+
}
|
|
1310
|
+
}
|
|
1311
|
+
async executeRuntimeJob(payload, runtimeContext) {
|
|
1312
|
+
const run = await this.store.get(payload.runId);
|
|
1313
|
+
if (!run) {
|
|
1314
|
+
return {
|
|
1315
|
+
status: "error",
|
|
1316
|
+
error: `Run not found: ${payload.runId}`
|
|
1317
|
+
};
|
|
1318
|
+
}
|
|
1319
|
+
if (run.state.status === "cancelled") {
|
|
1320
|
+
return {
|
|
1321
|
+
status: "skipped",
|
|
1322
|
+
error: "Run already cancelled"
|
|
1323
|
+
};
|
|
1324
|
+
}
|
|
1325
|
+
const startedAt = Date.now();
|
|
1326
|
+
const running = {
|
|
1327
|
+
...run,
|
|
1328
|
+
input: payload.input,
|
|
1329
|
+
updatedAt: nowIso(startedAt),
|
|
1330
|
+
state: {
|
|
1331
|
+
...run.state,
|
|
1332
|
+
status: "running",
|
|
1333
|
+
running: true,
|
|
1334
|
+
attempt: run.state.attempt + 1,
|
|
1335
|
+
startedAtMs: startedAt,
|
|
1336
|
+
endedAtMs: void 0,
|
|
1337
|
+
error: void 0,
|
|
1338
|
+
result: void 0
|
|
1339
|
+
}
|
|
1340
|
+
};
|
|
1341
|
+
await this.store.upsert(running);
|
|
1342
|
+
const runController = new AbortController();
|
|
1343
|
+
const onRuntimeAbort = () => runController.abort();
|
|
1344
|
+
runtimeContext.signal.addEventListener("abort", onRuntimeAbort, { once: true });
|
|
1345
|
+
this.runControllers.set(run.id, runController);
|
|
1346
|
+
try {
|
|
1347
|
+
const result = await this.executeRun(running, {
|
|
1348
|
+
signal: runController.signal,
|
|
1349
|
+
trigger: runtimeContext.trigger,
|
|
1350
|
+
mode: payload.mode,
|
|
1351
|
+
attempt: running.state.attempt,
|
|
1352
|
+
startedAt,
|
|
1353
|
+
deadlineAt: runtimeContext.deadlineAt,
|
|
1354
|
+
guideMessage: payload.guideMessage
|
|
1355
|
+
});
|
|
1356
|
+
const endedAt = Date.now();
|
|
1357
|
+
const completed = {
|
|
1358
|
+
...running,
|
|
1359
|
+
updatedAt: nowIso(endedAt),
|
|
1360
|
+
state: {
|
|
1361
|
+
...running.state,
|
|
1362
|
+
status: "completed",
|
|
1363
|
+
running: false,
|
|
1364
|
+
endedAtMs: endedAt,
|
|
1365
|
+
result,
|
|
1366
|
+
error: void 0
|
|
1367
|
+
}
|
|
1368
|
+
};
|
|
1369
|
+
await this.store.upsert(completed);
|
|
1370
|
+
this.notifyWaiters(completed);
|
|
1371
|
+
return { status: "ok" };
|
|
1372
|
+
} catch (error) {
|
|
1373
|
+
const endedAt = Date.now();
|
|
1374
|
+
const reason = this.closeReasons.get(run.id);
|
|
1375
|
+
const wasAborted = runController.signal.aborted;
|
|
1376
|
+
if (wasAborted && reason === GUIDE_RESTART_REASON) {
|
|
1377
|
+
const pendingGuide = this.pendingGuides.get(run.id);
|
|
1378
|
+
if (pendingGuide) {
|
|
1379
|
+
this.pendingGuides.delete(run.id);
|
|
1380
|
+
this.closeReasons.delete(run.id);
|
|
1381
|
+
const queued = {
|
|
1382
|
+
...running,
|
|
1383
|
+
input: pendingGuide.input,
|
|
1384
|
+
metadata: pendingGuide.metadata ?? running.metadata,
|
|
1385
|
+
updatedAt: nowIso(endedAt),
|
|
1386
|
+
state: {
|
|
1387
|
+
...running.state,
|
|
1388
|
+
guideCount: pendingGuide.guideCount,
|
|
1389
|
+
status: "queued",
|
|
1390
|
+
running: false,
|
|
1391
|
+
endedAtMs: void 0,
|
|
1392
|
+
error: void 0,
|
|
1393
|
+
result: void 0
|
|
1394
|
+
}
|
|
1395
|
+
};
|
|
1396
|
+
await this.store.upsert(queued);
|
|
1397
|
+
const retryTimer = setTimeout(() => {
|
|
1398
|
+
void this.launchRun(queued, {
|
|
1399
|
+
runId: queued.id,
|
|
1400
|
+
input: pendingGuide.input,
|
|
1401
|
+
mode: "guide",
|
|
1402
|
+
guideMessage: pendingGuide.message
|
|
1403
|
+
}).catch((retryError) => {
|
|
1404
|
+
this.logger.error("Failed to relaunch guided run", {
|
|
1405
|
+
runId: queued.id,
|
|
1406
|
+
error: toErrorMessage(retryError)
|
|
1407
|
+
});
|
|
1408
|
+
void this.failRunLaunch(queued, retryError, "Failed to relaunch guided run").catch(
|
|
1409
|
+
(persistError) => {
|
|
1410
|
+
this.logger.error("Failed to persist guided relaunch failure", {
|
|
1411
|
+
runId: queued.id,
|
|
1412
|
+
error: toErrorMessage(persistError)
|
|
1413
|
+
});
|
|
1414
|
+
}
|
|
1415
|
+
);
|
|
1416
|
+
});
|
|
1417
|
+
}, 0);
|
|
1418
|
+
retryTimer.unref?.();
|
|
1419
|
+
return {
|
|
1420
|
+
status: "skipped",
|
|
1421
|
+
error: "Run restarted after guide"
|
|
1422
|
+
};
|
|
1423
|
+
}
|
|
1424
|
+
}
|
|
1425
|
+
const errorMessage = toErrorMessage(error);
|
|
1426
|
+
const cancellationMessage = reason && reason !== GUIDE_RESTART_REASON ? reason : "Run cancelled";
|
|
1427
|
+
const cancelled = wasAborted;
|
|
1428
|
+
const terminal = {
|
|
1429
|
+
...running,
|
|
1430
|
+
updatedAt: nowIso(endedAt),
|
|
1431
|
+
state: {
|
|
1432
|
+
...running.state,
|
|
1433
|
+
status: cancelled ? "cancelled" : "failed",
|
|
1434
|
+
running: false,
|
|
1435
|
+
endedAtMs: endedAt,
|
|
1436
|
+
error: cancelled ? cancellationMessage : errorMessage,
|
|
1437
|
+
result: void 0
|
|
1438
|
+
}
|
|
1439
|
+
};
|
|
1440
|
+
await this.store.upsert(terminal);
|
|
1441
|
+
this.notifyWaiters(terminal);
|
|
1442
|
+
return {
|
|
1443
|
+
status: "error",
|
|
1444
|
+
error: terminal.state.error
|
|
1445
|
+
};
|
|
1446
|
+
} finally {
|
|
1447
|
+
runtimeContext.signal.removeEventListener("abort", onRuntimeAbort);
|
|
1448
|
+
this.runControllers.delete(run.id);
|
|
1449
|
+
this.closeReasons.delete(run.id);
|
|
1450
|
+
}
|
|
1451
|
+
}
|
|
1452
|
+
};
|
|
1453
|
+
function createAgentOrchestrator(options) {
|
|
1454
|
+
return new AgentOrchestrator(options);
|
|
1455
|
+
}
|
|
1456
|
+
|
|
1457
|
+
export {
|
|
1458
|
+
silentRuntimeLogger,
|
|
1459
|
+
createConsoleRuntimeLogger,
|
|
1460
|
+
AgentRuntime,
|
|
1461
|
+
createAgentRuntime,
|
|
1462
|
+
isTerminalRunStatus,
|
|
1463
|
+
InMemoryOrchestratorRunStore,
|
|
1464
|
+
AgentOrchestrator,
|
|
1465
|
+
createAgentOrchestrator
|
|
1466
|
+
};
|