@kortyx/agent 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +8 -0
- package/dist/chat/create-agent.d.ts +16 -0
- package/dist/chat/create-agent.d.ts.map +1 -0
- package/dist/chat/create-agent.js +61 -0
- package/dist/chat/process-chat.d.ts +25 -0
- package/dist/chat/process-chat.d.ts.map +1 -0
- package/dist/chat/process-chat.js +69 -0
- package/dist/index.d.ts +12 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +16 -0
- package/dist/interrupt/resume-handler.d.ts +27 -0
- package/dist/interrupt/resume-handler.d.ts.map +1 -0
- package/dist/interrupt/resume-handler.js +89 -0
- package/dist/orchestrator.d.ts +20 -0
- package/dist/orchestrator.d.ts.map +1 -0
- package/dist/orchestrator.js +391 -0
- package/dist/stream/transform-graph-stream-for-ui.d.ts +10 -0
- package/dist/stream/transform-graph-stream-for-ui.d.ts.map +1 -0
- package/dist/stream/transform-graph-stream-for-ui.js +194 -0
- package/dist/types/chat-message.d.ts +8 -0
- package/dist/types/chat-message.d.ts.map +1 -0
- package/dist/types/chat-message.js +2 -0
- package/dist/utils/extract-latest-message.d.ts +3 -0
- package/dist/utils/extract-latest-message.d.ts.map +1 -0
- package/dist/utils/extract-latest-message.js +14 -0
- package/package.json +36 -0
- package/src/chat/create-agent.ts +97 -0
- package/src/chat/process-chat.ts +132 -0
- package/src/index.ts +22 -0
- package/src/interrupt/resume-handler.ts +146 -0
- package/src/orchestrator.ts +532 -0
- package/src/stream/transform-graph-stream-for-ui.ts +245 -0
- package/src/types/chat-message.ts +7 -0
- package/src/utils/extract-latest-message.ts +13 -0
- package/tsconfig.build.json +21 -0
- package/tsconfig.build.tsbuildinfo +1 -0
- package/tsconfig.json +16 -0
|
@@ -0,0 +1,532 @@
|
|
|
1
|
+
import type { GraphState, WorkflowDefinition, WorkflowId } from "@kortyx/core";
|
|
2
|
+
import {
|
|
3
|
+
type PendingRequestRecord,
|
|
4
|
+
savePendingRequest,
|
|
5
|
+
updatePendingRequest,
|
|
6
|
+
} from "@kortyx/memory";
|
|
7
|
+
import {
|
|
8
|
+
createLangGraph,
|
|
9
|
+
makeRequestId,
|
|
10
|
+
makeResumeToken,
|
|
11
|
+
} from "@kortyx/runtime";
|
|
12
|
+
import type { StreamChunk } from "@kortyx/stream";
|
|
13
|
+
import { Command } from "@langchain/langgraph";
|
|
14
|
+
import { PassThrough } from "stream";
|
|
15
|
+
import { transformGraphStreamForUI } from "./stream/transform-graph-stream-for-ui";
|
|
16
|
+
|
|
17
|
+
export type SelectWorkflowFn = (
|
|
18
|
+
workflowId: string,
|
|
19
|
+
) => Promise<WorkflowDefinition<any, any>>;
|
|
20
|
+
|
|
21
|
+
export type SaveMemoryFn = (
|
|
22
|
+
sessionId: string,
|
|
23
|
+
state: GraphState,
|
|
24
|
+
) => Promise<void>;
|
|
25
|
+
|
|
26
|
+
export interface CompiledGraphLike {
|
|
27
|
+
config?: Record<string, unknown>;
|
|
28
|
+
streamEvents: (
|
|
29
|
+
state: GraphState,
|
|
30
|
+
options?: { version?: string; configurable?: Record<string, unknown> },
|
|
31
|
+
) => AsyncIterable<unknown> | AsyncGenerator<unknown>;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
export interface OrchestrateArgs {
|
|
35
|
+
sessionId?: string;
|
|
36
|
+
graph: CompiledGraphLike; // minimal graph surface used here
|
|
37
|
+
state: GraphState; // initial state
|
|
38
|
+
config: Record<string, unknown>; // runtime config
|
|
39
|
+
saveMemory?: SaveMemoryFn;
|
|
40
|
+
selectWorkflow: SelectWorkflowFn;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
/**
|
|
44
|
+
* Orchestrates LangGraph execution with mid-stream transitions.
|
|
45
|
+
* Works with both:
|
|
46
|
+
* - transition chunks emitted via transformGraphStreamForUI
|
|
47
|
+
* - transition events emitted via ctx.emit("transition", ...)
|
|
48
|
+
*/
|
|
49
|
+
export async function orchestrateGraphStream({
|
|
50
|
+
sessionId,
|
|
51
|
+
graph,
|
|
52
|
+
state,
|
|
53
|
+
config,
|
|
54
|
+
saveMemory,
|
|
55
|
+
selectWorkflow,
|
|
56
|
+
}: OrchestrateArgs): Promise<NodeJS.ReadableStream> {
|
|
57
|
+
const out = new PassThrough({ objectMode: true });
|
|
58
|
+
|
|
59
|
+
let currentGraph = graph;
|
|
60
|
+
let currentState: GraphState = state;
|
|
61
|
+
let finished = false;
|
|
62
|
+
|
|
63
|
+
// Announce session id to clients so they can persist it
|
|
64
|
+
try {
|
|
65
|
+
const sid = (config as any)?.session?.id as string | undefined;
|
|
66
|
+
if (sid && typeof sid === "string") {
|
|
67
|
+
out.write({ type: "session", sessionId: sid } as any);
|
|
68
|
+
}
|
|
69
|
+
} catch {}
|
|
70
|
+
|
|
71
|
+
// Pending transition captured from ctx.emit(...)
|
|
72
|
+
const pending: { to: string | null; payload: Record<string, unknown> } = {
|
|
73
|
+
to: null,
|
|
74
|
+
payload: {},
|
|
75
|
+
};
|
|
76
|
+
|
|
77
|
+
// Bridge internal graph emits to our stream AND capture transitions
|
|
78
|
+
// Track which nodes streamed text so we can suppress duplicate full messages
|
|
79
|
+
const streamedNodes = new Set<string>();
|
|
80
|
+
let lastStatusMsg = "";
|
|
81
|
+
let lastStatusAt = 0;
|
|
82
|
+
|
|
83
|
+
// No chunking needed for final ui.message; we forward a single "message" event now.
|
|
84
|
+
// Capture interrupt payloads to emit a final interrupt chunk after graph ends.
|
|
85
|
+
interface HumanInputPayload {
|
|
86
|
+
node?: string;
|
|
87
|
+
workflow?: string;
|
|
88
|
+
input?: {
|
|
89
|
+
kind?: string;
|
|
90
|
+
multiple?: boolean;
|
|
91
|
+
question?: string;
|
|
92
|
+
options?: Array<{
|
|
93
|
+
id: string;
|
|
94
|
+
label: string;
|
|
95
|
+
description?: string;
|
|
96
|
+
value?: unknown;
|
|
97
|
+
}>;
|
|
98
|
+
};
|
|
99
|
+
}
|
|
100
|
+
// Track latest interrupt token for updating stored snapshot at end
|
|
101
|
+
let pendingRecordToken: string | null = null;
|
|
102
|
+
// Track if current invocation is a resume, so we can de-dupe interrupt events
|
|
103
|
+
let activeIsResume = false;
|
|
104
|
+
// Avoid emitting duplicate interrupt chunks (e.g., both from forwardEmit and placeholder)
|
|
105
|
+
let wroteHumanInput = false;
|
|
106
|
+
|
|
107
|
+
const forwardEmit = (event: string, payload: unknown) => {
|
|
108
|
+
if (event === "error") {
|
|
109
|
+
const msg = String(
|
|
110
|
+
(payload as { message?: unknown })?.message ?? "Unexpected error",
|
|
111
|
+
);
|
|
112
|
+
out.write({ type: "error", message: msg });
|
|
113
|
+
out.write({ type: "done" });
|
|
114
|
+
finished = true;
|
|
115
|
+
out.end();
|
|
116
|
+
return;
|
|
117
|
+
}
|
|
118
|
+
if (event === "status") {
|
|
119
|
+
const msg = String((payload as { message?: unknown })?.message ?? "");
|
|
120
|
+
const now = Date.now();
|
|
121
|
+
if (msg && msg === lastStatusMsg && now - lastStatusAt < 250) return; // de-dupe rapid duplicates
|
|
122
|
+
lastStatusMsg = msg;
|
|
123
|
+
lastStatusAt = now;
|
|
124
|
+
out.write({ type: "status", message: msg });
|
|
125
|
+
return;
|
|
126
|
+
}
|
|
127
|
+
if (event === "text-start") {
|
|
128
|
+
const node = (payload as { node?: string })?.node;
|
|
129
|
+
if (!node) return;
|
|
130
|
+
out.write({ type: "text-start", node });
|
|
131
|
+
streamedNodes.add(node);
|
|
132
|
+
return;
|
|
133
|
+
}
|
|
134
|
+
if (event === "text-delta") {
|
|
135
|
+
const node = (payload as { node?: string })?.node;
|
|
136
|
+
const delta = String((payload as { delta?: unknown })?.delta ?? "");
|
|
137
|
+
if (!node || !delta) return;
|
|
138
|
+
out.write({ type: "text-delta", delta, node });
|
|
139
|
+
streamedNodes.add(node);
|
|
140
|
+
return;
|
|
141
|
+
}
|
|
142
|
+
if (event === "text-end") {
|
|
143
|
+
const node = (payload as { node?: string })?.node;
|
|
144
|
+
if (!node) return;
|
|
145
|
+
out.write({ type: "text-end", node });
|
|
146
|
+
return;
|
|
147
|
+
}
|
|
148
|
+
if (event === "message") {
|
|
149
|
+
const node = (payload as { node?: string })?.node;
|
|
150
|
+
const text = String((payload as { content?: unknown })?.content ?? "");
|
|
151
|
+
out.write({ type: "message", node, content: text });
|
|
152
|
+
return;
|
|
153
|
+
}
|
|
154
|
+
if (event === "structured_data") {
|
|
155
|
+
out.write({
|
|
156
|
+
type: "structured-data",
|
|
157
|
+
node: (payload as { node?: string })?.node,
|
|
158
|
+
dataType: (payload as { dataType?: string })?.dataType,
|
|
159
|
+
data: (payload as { data?: unknown })?.data,
|
|
160
|
+
});
|
|
161
|
+
return;
|
|
162
|
+
}
|
|
163
|
+
// legacy 'human_required' removed — dynamic interrupts are used instead
|
|
164
|
+
if (event === "transition") {
|
|
165
|
+
// 1) surface to the client (useful for dev tools)
|
|
166
|
+
out.write({
|
|
167
|
+
type: "transition",
|
|
168
|
+
transitionTo: (payload as { transitionTo?: string })?.transitionTo,
|
|
169
|
+
payload:
|
|
170
|
+
(payload as { payload?: Record<string, unknown> })?.payload ?? {},
|
|
171
|
+
});
|
|
172
|
+
// 2) capture for orchestration even if the uiStream never emits it
|
|
173
|
+
pending.to = (payload as { transitionTo?: string })?.transitionTo ?? null;
|
|
174
|
+
pending.payload =
|
|
175
|
+
(payload as { payload?: Record<string, unknown> })?.payload ?? {};
|
|
176
|
+
return;
|
|
177
|
+
}
|
|
178
|
+
if (event === "interrupt") {
|
|
179
|
+
// Ignore duplicate interrupt events on resume invocations to avoid double prompts
|
|
180
|
+
if (activeIsResume) return;
|
|
181
|
+
try {
|
|
182
|
+
const p = payload as any;
|
|
183
|
+
// eslint-disable-next-line no-console
|
|
184
|
+
console.log(
|
|
185
|
+
`[orchestrator] interrupt node=${p?.node} workflow=${p?.workflow} options=${
|
|
186
|
+
Array.isArray(p?.input?.options) ? p.input.options.length : 0
|
|
187
|
+
}`,
|
|
188
|
+
);
|
|
189
|
+
} catch {}
|
|
190
|
+
// Persist a snapshot and emit a typed chunk immediately
|
|
191
|
+
const p = payload as any;
|
|
192
|
+
const local: HumanInputPayload = {
|
|
193
|
+
node: p?.node,
|
|
194
|
+
workflow: p?.workflow,
|
|
195
|
+
input: p?.input,
|
|
196
|
+
};
|
|
197
|
+
|
|
198
|
+
const token = makeResumeToken();
|
|
199
|
+
const requestId = makeRequestId("human");
|
|
200
|
+
pendingRecordToken = token;
|
|
201
|
+
const options = Array.isArray(local.input?.options)
|
|
202
|
+
? (local.input!.options! as Array<any>)
|
|
203
|
+
: [];
|
|
204
|
+
|
|
205
|
+
const kind =
|
|
206
|
+
local.input?.kind ||
|
|
207
|
+
(local.input?.multiple ? "multi-choice" : "choice");
|
|
208
|
+
const isText = kind === "text";
|
|
209
|
+
|
|
210
|
+
const record: PendingRequestRecord = {
|
|
211
|
+
token,
|
|
212
|
+
requestId,
|
|
213
|
+
sessionId: sessionId,
|
|
214
|
+
workflow: local.workflow || (currentState.currentWorkflow as string),
|
|
215
|
+
node: local.node || "",
|
|
216
|
+
// Provide an immediate snapshot so resume can work even if user clicks fast
|
|
217
|
+
state: { ...(currentState as GraphState), awaitingHumanInput: true },
|
|
218
|
+
schema: isText
|
|
219
|
+
? {
|
|
220
|
+
kind: kind as any,
|
|
221
|
+
multiple: Boolean(local.input?.multiple),
|
|
222
|
+
...(local.input?.question
|
|
223
|
+
? { question: local.input.question }
|
|
224
|
+
: {}),
|
|
225
|
+
}
|
|
226
|
+
: {
|
|
227
|
+
kind: kind as any,
|
|
228
|
+
multiple: Boolean(local.input?.multiple),
|
|
229
|
+
question: String(
|
|
230
|
+
local.input?.question || "Please choose an option.",
|
|
231
|
+
),
|
|
232
|
+
},
|
|
233
|
+
options: options.map((o: any) => ({
|
|
234
|
+
id: String(o.id),
|
|
235
|
+
label: String(o.label),
|
|
236
|
+
description:
|
|
237
|
+
typeof o.description === "string" ? o.description : undefined,
|
|
238
|
+
value: (o as any).value,
|
|
239
|
+
})),
|
|
240
|
+
createdAt: Date.now(),
|
|
241
|
+
ttlMs: 15 * 60 * 1000,
|
|
242
|
+
};
|
|
243
|
+
savePendingRequest(record);
|
|
244
|
+
out.write({
|
|
245
|
+
type: "interrupt",
|
|
246
|
+
requestId: record.requestId,
|
|
247
|
+
resumeToken: record.token,
|
|
248
|
+
workflow: record.workflow,
|
|
249
|
+
node: record.node,
|
|
250
|
+
input: {
|
|
251
|
+
kind: record.schema.kind,
|
|
252
|
+
multiple: record.schema.multiple,
|
|
253
|
+
question: record.schema.question,
|
|
254
|
+
options: record.options.map((o) => ({
|
|
255
|
+
id: o.id,
|
|
256
|
+
label: o.label,
|
|
257
|
+
description: o.description,
|
|
258
|
+
})),
|
|
259
|
+
},
|
|
260
|
+
} as any);
|
|
261
|
+
wroteHumanInput = true;
|
|
262
|
+
return;
|
|
263
|
+
}
|
|
264
|
+
};
|
|
265
|
+
|
|
266
|
+
(async () => {
|
|
267
|
+
while (true) {
|
|
268
|
+
let workflowFinalState: GraphState | null = null;
|
|
269
|
+
|
|
270
|
+
// Ensure the compiled graph uses our forwardEmit
|
|
271
|
+
currentGraph.config = currentGraph.config || {};
|
|
272
|
+
currentGraph.config.emit = forwardEmit;
|
|
273
|
+
const threadId =
|
|
274
|
+
((currentGraph.config as any)?.session?.id as string | undefined) ||
|
|
275
|
+
sessionId ||
|
|
276
|
+
"anonymous-session";
|
|
277
|
+
out.write({
|
|
278
|
+
type: "status",
|
|
279
|
+
message: `🧵 thread_id=${threadId} workflow=${currentState.currentWorkflow}`,
|
|
280
|
+
});
|
|
281
|
+
|
|
282
|
+
// Stream runtime events (LLM deltas, node starts/ends, etc.)
|
|
283
|
+
const isResume = Boolean((currentGraph.config as any)?.resume);
|
|
284
|
+
activeIsResume = isResume;
|
|
285
|
+
// For static breakpoints, resume with null input; if a resumeUpdate was provided,
|
|
286
|
+
// use Command({ update }) to merge selection into state at resume time.
|
|
287
|
+
const resumeUpdate = (currentGraph.config as any)?.resumeUpdate as
|
|
288
|
+
| Record<string, unknown>
|
|
289
|
+
| undefined;
|
|
290
|
+
const resumeValue = (currentGraph.config as any)?.resumeValue as
|
|
291
|
+
| unknown
|
|
292
|
+
| undefined;
|
|
293
|
+
const invokeState = isResume
|
|
294
|
+
? resumeValue !== undefined
|
|
295
|
+
? (new Command({ resume: resumeValue }) as any)
|
|
296
|
+
: resumeUpdate
|
|
297
|
+
? (new Command({ update: resumeUpdate }) as any)
|
|
298
|
+
: (null as any)
|
|
299
|
+
: (currentState as any);
|
|
300
|
+
const runtimeStream = currentGraph.streamEvents(invokeState, {
|
|
301
|
+
version: "v2",
|
|
302
|
+
configurable: {
|
|
303
|
+
thread_id: threadId,
|
|
304
|
+
// Use a stable namespace so checkpoints survive across recompiles of same workflow
|
|
305
|
+
checkpoint_ns: String(currentState.currentWorkflow || "default"),
|
|
306
|
+
},
|
|
307
|
+
});
|
|
308
|
+
|
|
309
|
+
// Diagnostics for stream invocation shape
|
|
310
|
+
try {
|
|
311
|
+
out.write({
|
|
312
|
+
type: "status",
|
|
313
|
+
message: `▶️ streamEvents invoke: resume=${Boolean((currentGraph.config as any)?.resume)} thread_id=${threadId} ns=${String(currentState.currentWorkflow || "default")}`,
|
|
314
|
+
} as any);
|
|
315
|
+
} catch {}
|
|
316
|
+
|
|
317
|
+
const uiStream = transformGraphStreamForUI(runtimeStream as any, {
|
|
318
|
+
debug: Boolean((config as any)?.features?.tracing),
|
|
319
|
+
});
|
|
320
|
+
|
|
321
|
+
// Also allow transition detection via uiStream (if transformer emits it)
|
|
322
|
+
let loopTransitionTo: string | null = null;
|
|
323
|
+
let loopTransitionPayload: Record<string, unknown> = {};
|
|
324
|
+
|
|
325
|
+
for await (const chunk of uiStream as AsyncIterable<StreamChunk>) {
|
|
326
|
+
if (finished) break;
|
|
327
|
+
const node = (chunk as { node?: string }).node;
|
|
328
|
+
// Convert placeholder interrupt chunk (from transformer) into a real chunk with tokens + persist snapshot
|
|
329
|
+
if (
|
|
330
|
+
(chunk as any).type === "interrupt" &&
|
|
331
|
+
(!(chunk as any).resumeToken || !(chunk as any).requestId)
|
|
332
|
+
) {
|
|
333
|
+
if (wroteHumanInput) {
|
|
334
|
+
// Already emitted an interrupt for this pause; skip placeholder
|
|
335
|
+
continue;
|
|
336
|
+
}
|
|
337
|
+
const hi = chunk as any;
|
|
338
|
+
const token = makeResumeToken();
|
|
339
|
+
const requestId = makeRequestId("human");
|
|
340
|
+
pendingRecordToken = token;
|
|
341
|
+
const options = Array.isArray(hi.input?.options)
|
|
342
|
+
? hi.input.options
|
|
343
|
+
: [];
|
|
344
|
+
|
|
345
|
+
const kind =
|
|
346
|
+
hi.input?.kind || (hi.input?.multiple ? "multi-choice" : "choice");
|
|
347
|
+
const isText = kind === "text";
|
|
348
|
+
|
|
349
|
+
const record: PendingRequestRecord = {
|
|
350
|
+
token,
|
|
351
|
+
requestId,
|
|
352
|
+
sessionId: sessionId,
|
|
353
|
+
workflow: currentState.currentWorkflow as string,
|
|
354
|
+
node: node || "",
|
|
355
|
+
state: {
|
|
356
|
+
...(currentState as GraphState),
|
|
357
|
+
awaitingHumanInput: true,
|
|
358
|
+
},
|
|
359
|
+
schema: isText
|
|
360
|
+
? {
|
|
361
|
+
kind: kind as any,
|
|
362
|
+
multiple: Boolean(hi.input?.multiple),
|
|
363
|
+
...(hi.input?.question
|
|
364
|
+
? { question: hi.input.question }
|
|
365
|
+
: {}),
|
|
366
|
+
}
|
|
367
|
+
: {
|
|
368
|
+
kind: kind as any,
|
|
369
|
+
multiple: Boolean(hi.input?.multiple),
|
|
370
|
+
question: String(
|
|
371
|
+
hi.input?.question || "Please choose an option.",
|
|
372
|
+
),
|
|
373
|
+
},
|
|
374
|
+
options: options.map((o: any) => ({
|
|
375
|
+
id: String(o.id),
|
|
376
|
+
label: String(o.label),
|
|
377
|
+
description:
|
|
378
|
+
typeof o.description === "string" ? o.description : undefined,
|
|
379
|
+
value: (o as any).value,
|
|
380
|
+
})),
|
|
381
|
+
createdAt: Date.now(),
|
|
382
|
+
ttlMs: 15 * 60 * 1000,
|
|
383
|
+
};
|
|
384
|
+
savePendingRequest(record);
|
|
385
|
+
out.write({
|
|
386
|
+
type: "interrupt",
|
|
387
|
+
requestId,
|
|
388
|
+
resumeToken: token,
|
|
389
|
+
workflow: record.workflow,
|
|
390
|
+
node: record.node,
|
|
391
|
+
input: {
|
|
392
|
+
kind: record.schema.kind,
|
|
393
|
+
multiple: record.schema.multiple,
|
|
394
|
+
question: record.schema.question,
|
|
395
|
+
options: record.options.map((o) => ({
|
|
396
|
+
id: o.id,
|
|
397
|
+
label: o.label,
|
|
398
|
+
description: o.description,
|
|
399
|
+
})),
|
|
400
|
+
},
|
|
401
|
+
} as any);
|
|
402
|
+
wroteHumanInput = true;
|
|
403
|
+
continue;
|
|
404
|
+
}
|
|
405
|
+
if (chunk.type === "text-delta") {
|
|
406
|
+
if (typeof chunk.delta === "string" && chunk.delta.length > 60) {
|
|
407
|
+
const text = chunk.delta as string;
|
|
408
|
+
for (let i = 0; i < text.length; i += 60) {
|
|
409
|
+
out.write({
|
|
410
|
+
type: "text-delta",
|
|
411
|
+
delta: text.slice(i, i + 60),
|
|
412
|
+
node,
|
|
413
|
+
});
|
|
414
|
+
}
|
|
415
|
+
if (node) streamedNodes.add(node);
|
|
416
|
+
} else {
|
|
417
|
+
out.write(chunk);
|
|
418
|
+
if (node) streamedNodes.add(node);
|
|
419
|
+
}
|
|
420
|
+
} else {
|
|
421
|
+
out.write(chunk);
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
// Persist memory snapshots opportunistically
|
|
425
|
+
if (saveMemory && sessionId && chunk.type !== "status") {
|
|
426
|
+
await saveMemory(sessionId, { ...currentState });
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
// Transition surfaced by transformer
|
|
430
|
+
if (chunk.type === "transition") {
|
|
431
|
+
loopTransitionTo = String(chunk.transitionTo || "");
|
|
432
|
+
loopTransitionPayload = chunk.payload ?? {};
|
|
433
|
+
break; // stop current workflow, move to next
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
if (chunk.type === "done") {
|
|
437
|
+
workflowFinalState = (chunk.data as GraphState) ?? null;
|
|
438
|
+
break;
|
|
439
|
+
}
|
|
440
|
+
}
|
|
441
|
+
|
|
442
|
+
if (finished) return;
|
|
443
|
+
|
|
444
|
+
// Prefer transition detected from the uiStream; otherwise use pending from emit()
|
|
445
|
+
const transitionTo = loopTransitionTo || pending.to;
|
|
446
|
+
const transitionPayload = Object.keys(loopTransitionPayload).length
|
|
447
|
+
? loopTransitionPayload
|
|
448
|
+
: pending.payload;
|
|
449
|
+
|
|
450
|
+
// Reset pending so we don't carry it accidentally
|
|
451
|
+
pending.to = null;
|
|
452
|
+
pending.payload = {};
|
|
453
|
+
|
|
454
|
+
if (transitionTo) {
|
|
455
|
+
// 🔁 Handoff to the next workflow
|
|
456
|
+
try {
|
|
457
|
+
const nextWorkflow = await selectWorkflow(transitionTo);
|
|
458
|
+
const nextGraph = await createLangGraph(nextWorkflow, {
|
|
459
|
+
...(config as Record<string, unknown>),
|
|
460
|
+
emit: forwardEmit, // keep forwarding emits
|
|
461
|
+
});
|
|
462
|
+
|
|
463
|
+
// Merge data: prefer the final state's data if present, then add transition payload
|
|
464
|
+
const mergedData = {
|
|
465
|
+
...(workflowFinalState?.data ?? currentState.data ?? {}),
|
|
466
|
+
...(transitionPayload ?? {}),
|
|
467
|
+
};
|
|
468
|
+
|
|
469
|
+
const newInput: string =
|
|
470
|
+
typeof (transitionPayload as { rawInput?: unknown })?.rawInput ===
|
|
471
|
+
"string"
|
|
472
|
+
? ((transitionPayload as { rawInput?: unknown })
|
|
473
|
+
.rawInput as string)
|
|
474
|
+
: currentState.input;
|
|
475
|
+
|
|
476
|
+
currentState = {
|
|
477
|
+
...currentState,
|
|
478
|
+
currentWorkflow: transitionTo as WorkflowId,
|
|
479
|
+
input: newInput,
|
|
480
|
+
data: mergedData,
|
|
481
|
+
ui: {}, // reset UI layer on new graph
|
|
482
|
+
};
|
|
483
|
+
|
|
484
|
+
if (saveMemory && sessionId) {
|
|
485
|
+
await saveMemory(sessionId, currentState);
|
|
486
|
+
}
|
|
487
|
+
|
|
488
|
+
currentGraph = nextGraph;
|
|
489
|
+
continue; // run the next graph
|
|
490
|
+
} catch (err) {
|
|
491
|
+
out.write({
|
|
492
|
+
type: "status",
|
|
493
|
+
message: `⚠️ Transition failed to '${transitionTo}': ${
|
|
494
|
+
err instanceof Error ? err.message : String(err)
|
|
495
|
+
}`,
|
|
496
|
+
});
|
|
497
|
+
out.end();
|
|
498
|
+
return;
|
|
499
|
+
}
|
|
500
|
+
}
|
|
501
|
+
|
|
502
|
+
// No transition → either graph returned done or ended naturally
|
|
503
|
+
if (workflowFinalState) {
|
|
504
|
+
// If we paused for an interrupt, persist a pending request and emit an interrupt chunk
|
|
505
|
+
// Attach final state to pending record if we have one
|
|
506
|
+
if (workflowFinalState && pendingRecordToken) {
|
|
507
|
+
updatePendingRequest(pendingRecordToken, {
|
|
508
|
+
state: workflowFinalState,
|
|
509
|
+
});
|
|
510
|
+
}
|
|
511
|
+
|
|
512
|
+
finished = true;
|
|
513
|
+
out.write({ type: "done", data: workflowFinalState } as any);
|
|
514
|
+
out.end();
|
|
515
|
+
return;
|
|
516
|
+
}
|
|
517
|
+
|
|
518
|
+
// Natural end with no explicit "done" (defensive close)
|
|
519
|
+
if (!finished) {
|
|
520
|
+
out.write({ type: "done" });
|
|
521
|
+
out.end();
|
|
522
|
+
}
|
|
523
|
+
return;
|
|
524
|
+
}
|
|
525
|
+
})().catch((err) => {
|
|
526
|
+
console.error("[error:orchestrateGraphStream]", err);
|
|
527
|
+
out.write({ type: "status", message: `Error: ${err.message}` });
|
|
528
|
+
out.end();
|
|
529
|
+
});
|
|
530
|
+
|
|
531
|
+
return out;
|
|
532
|
+
}
|