@kortyx/agent 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/CHANGELOG.md +8 -0
  2. package/dist/chat/create-agent.d.ts +16 -0
  3. package/dist/chat/create-agent.d.ts.map +1 -0
  4. package/dist/chat/create-agent.js +61 -0
  5. package/dist/chat/process-chat.d.ts +25 -0
  6. package/dist/chat/process-chat.d.ts.map +1 -0
  7. package/dist/chat/process-chat.js +69 -0
  8. package/dist/index.d.ts +12 -0
  9. package/dist/index.d.ts.map +1 -0
  10. package/dist/index.js +16 -0
  11. package/dist/interrupt/resume-handler.d.ts +27 -0
  12. package/dist/interrupt/resume-handler.d.ts.map +1 -0
  13. package/dist/interrupt/resume-handler.js +89 -0
  14. package/dist/orchestrator.d.ts +20 -0
  15. package/dist/orchestrator.d.ts.map +1 -0
  16. package/dist/orchestrator.js +391 -0
  17. package/dist/stream/transform-graph-stream-for-ui.d.ts +10 -0
  18. package/dist/stream/transform-graph-stream-for-ui.d.ts.map +1 -0
  19. package/dist/stream/transform-graph-stream-for-ui.js +194 -0
  20. package/dist/types/chat-message.d.ts +8 -0
  21. package/dist/types/chat-message.d.ts.map +1 -0
  22. package/dist/types/chat-message.js +2 -0
  23. package/dist/utils/extract-latest-message.d.ts +3 -0
  24. package/dist/utils/extract-latest-message.d.ts.map +1 -0
  25. package/dist/utils/extract-latest-message.js +14 -0
  26. package/package.json +36 -0
  27. package/src/chat/create-agent.ts +97 -0
  28. package/src/chat/process-chat.ts +132 -0
  29. package/src/index.ts +22 -0
  30. package/src/interrupt/resume-handler.ts +146 -0
  31. package/src/orchestrator.ts +532 -0
  32. package/src/stream/transform-graph-stream-for-ui.ts +245 -0
  33. package/src/types/chat-message.ts +7 -0
  34. package/src/utils/extract-latest-message.ts +13 -0
  35. package/tsconfig.build.json +21 -0
  36. package/tsconfig.build.tsbuildinfo +1 -0
  37. package/tsconfig.json +16 -0
@@ -0,0 +1,391 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.orchestrateGraphStream = orchestrateGraphStream;
4
+ const memory_1 = require("@kortyx/memory");
5
+ const runtime_1 = require("@kortyx/runtime");
6
+ const langgraph_1 = require("@langchain/langgraph");
7
+ const stream_1 = require("stream");
8
+ const transform_graph_stream_for_ui_1 = require("./stream/transform-graph-stream-for-ui");
9
+ async function orchestrateGraphStream({ sessionId, graph, state, config, saveMemory, selectWorkflow, }) {
10
+ const out = new stream_1.PassThrough({ objectMode: true });
11
+ let currentGraph = graph;
12
+ let currentState = state;
13
+ let finished = false;
14
+ try {
15
+ const sid = config?.session?.id;
16
+ if (sid && typeof sid === "string") {
17
+ out.write({ type: "session", sessionId: sid });
18
+ }
19
+ }
20
+ catch { }
21
+ const pending = {
22
+ to: null,
23
+ payload: {},
24
+ };
25
+ const streamedNodes = new Set();
26
+ let lastStatusMsg = "";
27
+ let lastStatusAt = 0;
28
+ let pendingRecordToken = null;
29
+ let activeIsResume = false;
30
+ let wroteHumanInput = false;
31
+ const forwardEmit = (event, payload) => {
32
+ if (event === "error") {
33
+ const msg = String(payload?.message ?? "Unexpected error");
34
+ out.write({ type: "error", message: msg });
35
+ out.write({ type: "done" });
36
+ finished = true;
37
+ out.end();
38
+ return;
39
+ }
40
+ if (event === "status") {
41
+ const msg = String(payload?.message ?? "");
42
+ const now = Date.now();
43
+ if (msg && msg === lastStatusMsg && now - lastStatusAt < 250)
44
+ return;
45
+ lastStatusMsg = msg;
46
+ lastStatusAt = now;
47
+ out.write({ type: "status", message: msg });
48
+ return;
49
+ }
50
+ if (event === "text-start") {
51
+ const node = payload?.node;
52
+ if (!node)
53
+ return;
54
+ out.write({ type: "text-start", node });
55
+ streamedNodes.add(node);
56
+ return;
57
+ }
58
+ if (event === "text-delta") {
59
+ const node = payload?.node;
60
+ const delta = String(payload?.delta ?? "");
61
+ if (!node || !delta)
62
+ return;
63
+ out.write({ type: "text-delta", delta, node });
64
+ streamedNodes.add(node);
65
+ return;
66
+ }
67
+ if (event === "text-end") {
68
+ const node = payload?.node;
69
+ if (!node)
70
+ return;
71
+ out.write({ type: "text-end", node });
72
+ return;
73
+ }
74
+ if (event === "message") {
75
+ const node = payload?.node;
76
+ const text = String(payload?.content ?? "");
77
+ out.write({ type: "message", node, content: text });
78
+ return;
79
+ }
80
+ if (event === "structured_data") {
81
+ out.write({
82
+ type: "structured-data",
83
+ node: payload?.node,
84
+ dataType: payload?.dataType,
85
+ data: payload?.data,
86
+ });
87
+ return;
88
+ }
89
+ if (event === "transition") {
90
+ out.write({
91
+ type: "transition",
92
+ transitionTo: payload?.transitionTo,
93
+ payload: payload?.payload ?? {},
94
+ });
95
+ pending.to = payload?.transitionTo ?? null;
96
+ pending.payload =
97
+ payload?.payload ?? {};
98
+ return;
99
+ }
100
+ if (event === "interrupt") {
101
+ if (activeIsResume)
102
+ return;
103
+ try {
104
+ const p = payload;
105
+ console.log(`[orchestrator] interrupt node=${p?.node} workflow=${p?.workflow} options=${Array.isArray(p?.input?.options) ? p.input.options.length : 0}`);
106
+ }
107
+ catch { }
108
+ const p = payload;
109
+ const local = {
110
+ node: p?.node,
111
+ workflow: p?.workflow,
112
+ input: p?.input,
113
+ };
114
+ const token = (0, runtime_1.makeResumeToken)();
115
+ const requestId = (0, runtime_1.makeRequestId)("human");
116
+ pendingRecordToken = token;
117
+ const options = Array.isArray(local.input?.options)
118
+ ? local.input.options
119
+ : [];
120
+ const kind = local.input?.kind ||
121
+ (local.input?.multiple ? "multi-choice" : "choice");
122
+ const isText = kind === "text";
123
+ const record = {
124
+ token,
125
+ requestId,
126
+ sessionId: sessionId,
127
+ workflow: local.workflow || currentState.currentWorkflow,
128
+ node: local.node || "",
129
+ state: { ...currentState, awaitingHumanInput: true },
130
+ schema: isText
131
+ ? {
132
+ kind: kind,
133
+ multiple: Boolean(local.input?.multiple),
134
+ ...(local.input?.question
135
+ ? { question: local.input.question }
136
+ : {}),
137
+ }
138
+ : {
139
+ kind: kind,
140
+ multiple: Boolean(local.input?.multiple),
141
+ question: String(local.input?.question || "Please choose an option."),
142
+ },
143
+ options: options.map((o) => ({
144
+ id: String(o.id),
145
+ label: String(o.label),
146
+ description: typeof o.description === "string" ? o.description : undefined,
147
+ value: o.value,
148
+ })),
149
+ createdAt: Date.now(),
150
+ ttlMs: 15 * 60 * 1000,
151
+ };
152
+ (0, memory_1.savePendingRequest)(record);
153
+ out.write({
154
+ type: "interrupt",
155
+ requestId: record.requestId,
156
+ resumeToken: record.token,
157
+ workflow: record.workflow,
158
+ node: record.node,
159
+ input: {
160
+ kind: record.schema.kind,
161
+ multiple: record.schema.multiple,
162
+ question: record.schema.question,
163
+ options: record.options.map((o) => ({
164
+ id: o.id,
165
+ label: o.label,
166
+ description: o.description,
167
+ })),
168
+ },
169
+ });
170
+ wroteHumanInput = true;
171
+ return;
172
+ }
173
+ };
174
+ (async () => {
175
+ while (true) {
176
+ let workflowFinalState = null;
177
+ currentGraph.config = currentGraph.config || {};
178
+ currentGraph.config.emit = forwardEmit;
179
+ const threadId = currentGraph.config?.session?.id ||
180
+ sessionId ||
181
+ "anonymous-session";
182
+ out.write({
183
+ type: "status",
184
+ message: `🧵 thread_id=${threadId} workflow=${currentState.currentWorkflow}`,
185
+ });
186
+ const isResume = Boolean(currentGraph.config?.resume);
187
+ activeIsResume = isResume;
188
+ const resumeUpdate = currentGraph.config?.resumeUpdate;
189
+ const resumeValue = currentGraph.config?.resumeValue;
190
+ const invokeState = isResume
191
+ ? resumeValue !== undefined
192
+ ? new langgraph_1.Command({ resume: resumeValue })
193
+ : resumeUpdate
194
+ ? new langgraph_1.Command({ update: resumeUpdate })
195
+ : null
196
+ : currentState;
197
+ const runtimeStream = currentGraph.streamEvents(invokeState, {
198
+ version: "v2",
199
+ configurable: {
200
+ thread_id: threadId,
201
+ checkpoint_ns: String(currentState.currentWorkflow || "default"),
202
+ },
203
+ });
204
+ try {
205
+ out.write({
206
+ type: "status",
207
+ message: `▶️ streamEvents invoke: resume=${Boolean(currentGraph.config?.resume)} thread_id=${threadId} ns=${String(currentState.currentWorkflow || "default")}`,
208
+ });
209
+ }
210
+ catch { }
211
+ const uiStream = (0, transform_graph_stream_for_ui_1.transformGraphStreamForUI)(runtimeStream, {
212
+ debug: Boolean(config?.features?.tracing),
213
+ });
214
+ let loopTransitionTo = null;
215
+ let loopTransitionPayload = {};
216
+ for await (const chunk of uiStream) {
217
+ if (finished)
218
+ break;
219
+ const node = chunk.node;
220
+ if (chunk.type === "interrupt" &&
221
+ (!chunk.resumeToken || !chunk.requestId)) {
222
+ if (wroteHumanInput) {
223
+ continue;
224
+ }
225
+ const hi = chunk;
226
+ const token = (0, runtime_1.makeResumeToken)();
227
+ const requestId = (0, runtime_1.makeRequestId)("human");
228
+ pendingRecordToken = token;
229
+ const options = Array.isArray(hi.input?.options)
230
+ ? hi.input.options
231
+ : [];
232
+ const kind = hi.input?.kind || (hi.input?.multiple ? "multi-choice" : "choice");
233
+ const isText = kind === "text";
234
+ const record = {
235
+ token,
236
+ requestId,
237
+ sessionId: sessionId,
238
+ workflow: currentState.currentWorkflow,
239
+ node: node || "",
240
+ state: {
241
+ ...currentState,
242
+ awaitingHumanInput: true,
243
+ },
244
+ schema: isText
245
+ ? {
246
+ kind: kind,
247
+ multiple: Boolean(hi.input?.multiple),
248
+ ...(hi.input?.question
249
+ ? { question: hi.input.question }
250
+ : {}),
251
+ }
252
+ : {
253
+ kind: kind,
254
+ multiple: Boolean(hi.input?.multiple),
255
+ question: String(hi.input?.question || "Please choose an option."),
256
+ },
257
+ options: options.map((o) => ({
258
+ id: String(o.id),
259
+ label: String(o.label),
260
+ description: typeof o.description === "string" ? o.description : undefined,
261
+ value: o.value,
262
+ })),
263
+ createdAt: Date.now(),
264
+ ttlMs: 15 * 60 * 1000,
265
+ };
266
+ (0, memory_1.savePendingRequest)(record);
267
+ out.write({
268
+ type: "interrupt",
269
+ requestId,
270
+ resumeToken: token,
271
+ workflow: record.workflow,
272
+ node: record.node,
273
+ input: {
274
+ kind: record.schema.kind,
275
+ multiple: record.schema.multiple,
276
+ question: record.schema.question,
277
+ options: record.options.map((o) => ({
278
+ id: o.id,
279
+ label: o.label,
280
+ description: o.description,
281
+ })),
282
+ },
283
+ });
284
+ wroteHumanInput = true;
285
+ continue;
286
+ }
287
+ if (chunk.type === "text-delta") {
288
+ if (typeof chunk.delta === "string" && chunk.delta.length > 60) {
289
+ const text = chunk.delta;
290
+ for (let i = 0; i < text.length; i += 60) {
291
+ out.write({
292
+ type: "text-delta",
293
+ delta: text.slice(i, i + 60),
294
+ node,
295
+ });
296
+ }
297
+ if (node)
298
+ streamedNodes.add(node);
299
+ }
300
+ else {
301
+ out.write(chunk);
302
+ if (node)
303
+ streamedNodes.add(node);
304
+ }
305
+ }
306
+ else {
307
+ out.write(chunk);
308
+ }
309
+ if (saveMemory && sessionId && chunk.type !== "status") {
310
+ await saveMemory(sessionId, { ...currentState });
311
+ }
312
+ if (chunk.type === "transition") {
313
+ loopTransitionTo = String(chunk.transitionTo || "");
314
+ loopTransitionPayload = chunk.payload ?? {};
315
+ break;
316
+ }
317
+ if (chunk.type === "done") {
318
+ workflowFinalState = chunk.data ?? null;
319
+ break;
320
+ }
321
+ }
322
+ if (finished)
323
+ return;
324
+ const transitionTo = loopTransitionTo || pending.to;
325
+ const transitionPayload = Object.keys(loopTransitionPayload).length
326
+ ? loopTransitionPayload
327
+ : pending.payload;
328
+ pending.to = null;
329
+ pending.payload = {};
330
+ if (transitionTo) {
331
+ try {
332
+ const nextWorkflow = await selectWorkflow(transitionTo);
333
+ const nextGraph = await (0, runtime_1.createLangGraph)(nextWorkflow, {
334
+ ...config,
335
+ emit: forwardEmit,
336
+ });
337
+ const mergedData = {
338
+ ...(workflowFinalState?.data ?? currentState.data ?? {}),
339
+ ...(transitionPayload ?? {}),
340
+ };
341
+ const newInput = typeof transitionPayload?.rawInput ===
342
+ "string"
343
+ ? transitionPayload
344
+ .rawInput
345
+ : currentState.input;
346
+ currentState = {
347
+ ...currentState,
348
+ currentWorkflow: transitionTo,
349
+ input: newInput,
350
+ data: mergedData,
351
+ ui: {},
352
+ };
353
+ if (saveMemory && sessionId) {
354
+ await saveMemory(sessionId, currentState);
355
+ }
356
+ currentGraph = nextGraph;
357
+ continue;
358
+ }
359
+ catch (err) {
360
+ out.write({
361
+ type: "status",
362
+ message: `⚠️ Transition failed to '${transitionTo}': ${err instanceof Error ? err.message : String(err)}`,
363
+ });
364
+ out.end();
365
+ return;
366
+ }
367
+ }
368
+ if (workflowFinalState) {
369
+ if (workflowFinalState && pendingRecordToken) {
370
+ (0, memory_1.updatePendingRequest)(pendingRecordToken, {
371
+ state: workflowFinalState,
372
+ });
373
+ }
374
+ finished = true;
375
+ out.write({ type: "done", data: workflowFinalState });
376
+ out.end();
377
+ return;
378
+ }
379
+ if (!finished) {
380
+ out.write({ type: "done" });
381
+ out.end();
382
+ }
383
+ return;
384
+ }
385
+ })().catch((err) => {
386
+ console.error("[error:orchestrateGraphStream]", err);
387
+ out.write({ type: "status", message: `Error: ${err.message}` });
388
+ out.end();
389
+ });
390
+ return out;
391
+ }
@@ -0,0 +1,10 @@
1
+ import type { StreamChunk } from "@kortyx/stream";
2
+ import type { StreamEvent } from "@langchain/core/tracers/log_stream";
3
+ interface TransformOptions {
4
+ debug?: boolean;
5
+ visibleNodes?: string[];
6
+ forwardModelStream?: boolean;
7
+ }
8
+ export declare function transformGraphStreamForUI(stream: AsyncIterable<StreamEvent>, options?: TransformOptions): AsyncGenerator<StreamChunk>;
9
+ export {};
10
+ //# sourceMappingURL=transform-graph-stream-for-ui.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"transform-graph-stream-for-ui.d.ts","sourceRoot":"","sources":["../../src/stream/transform-graph-stream-for-ui.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAElD,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,oCAAoC,CAAC;AAEtE,UAAU,gBAAgB;IACxB,KAAK,CAAC,EAAE,OAAO,CAAC;IAChB,YAAY,CAAC,EAAE,MAAM,EAAE,CAAC;IACxB,kBAAkB,CAAC,EAAE,OAAO,CAAC;CAC9B;AASD,wBAAuB,yBAAyB,CAC9C,MAAM,EAAE,aAAa,CAAC,WAAW,CAAC,EAClC,OAAO,GAAE,gBAAqB,GAC7B,cAAc,CAAC,WAAW,CAAC,CAgO7B"}
@@ -0,0 +1,194 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.transformGraphStreamForUI = transformGraphStreamForUI;
4
+ const utils_1 = require("@kortyx/utils");
5
+ async function* transformGraphStreamForUI(stream, options = {}) {
6
+ const { debug = false, forwardModelStream = false } = options;
7
+ let currentNode = null;
8
+ const streamedTextByNode = new Map();
9
+ const startedNodes = new Set();
10
+ const endedNodes = new Set();
11
+ let sawInterrupt = false;
12
+ function findChoiceSchema(obj) {
13
+ try {
14
+ if (!obj || typeof obj !== "object")
15
+ return null;
16
+ if (typeof obj.kind === "string" &&
17
+ (obj.kind === "choice" || obj.kind === "multi-choice") &&
18
+ Array.isArray(obj.options)) {
19
+ return obj;
20
+ }
21
+ for (const v of Object.values(obj)) {
22
+ const inner = findChoiceSchema(v);
23
+ if (inner)
24
+ return inner;
25
+ }
26
+ }
27
+ catch { }
28
+ return null;
29
+ }
30
+ for await (const event of stream) {
31
+ const { event: type, name, data } = event ?? {};
32
+ if (debug)
33
+ console.log(`[debug:event]`, JSON.stringify(event, null, 2));
34
+ switch (type) {
35
+ case "on_chain_start":
36
+ if (name && !name.startsWith("ChannelWrite")) {
37
+ currentNode = name;
38
+ if (name === "__start__" || name === "__end__")
39
+ break;
40
+ if (startedNodes.has(name))
41
+ break;
42
+ startedNodes.add(name);
43
+ if (debug)
44
+ console.log(`[debug:start] node=${name}`);
45
+ yield { type: "status", message: `Processing node: ${name}` };
46
+ }
47
+ break;
48
+ case "on_graph_interrupt": {
49
+ sawInterrupt = true;
50
+ if (debug)
51
+ console.log(`[debug:on_graph_interrupt]`, JSON.stringify({ name, data }, null, 2));
52
+ const where = (typeof name === "string" && name) ||
53
+ data?.node ||
54
+ "unknown";
55
+ const schema = findChoiceSchema(data);
56
+ if (schema) {
57
+ const isText = schema.kind === "text";
58
+ const options = Array.isArray(schema.options)
59
+ ? schema.options
60
+ .map((o) => ({
61
+ id: String(o.id ?? ""),
62
+ label: String(o.label ?? ""),
63
+ ...(o.description
64
+ ? { description: String(o.description) }
65
+ : {}),
66
+ }))
67
+ .filter((o) => o.id && o.label)
68
+ : [];
69
+ yield {
70
+ type: "interrupt",
71
+ requestId: "",
72
+ resumeToken: "",
73
+ node: typeof where === "string" ? where : undefined,
74
+ input: {
75
+ kind: schema.kind,
76
+ multiple: Boolean(schema.multiple),
77
+ ...(isText
78
+ ? { question: schema.question }
79
+ : {
80
+ question: typeof schema.question === "string"
81
+ ? schema.question
82
+ : "Please choose",
83
+ }),
84
+ ...(options.length > 0 ? { options } : {}),
85
+ },
86
+ };
87
+ }
88
+ else {
89
+ yield { type: "status", message: `⏸️ Interrupted at: ${where}` };
90
+ }
91
+ break;
92
+ }
93
+ case "on_chat_model_stream":
94
+ if (forwardModelStream && data?.chunk?.content) {
95
+ if (currentNode && !streamedTextByNode.get(currentNode)) {
96
+ yield { type: "text-start", node: currentNode };
97
+ streamedTextByNode.set(currentNode, true);
98
+ }
99
+ const delta = (0, utils_1.contentToText)(data.chunk.content);
100
+ if (delta) {
101
+ yield {
102
+ type: "text-delta",
103
+ delta,
104
+ node: currentNode ?? "ai",
105
+ };
106
+ }
107
+ }
108
+ break;
109
+ case "on_chain_end": {
110
+ const nodeName = name;
111
+ const output = data?.output;
112
+ if (debug)
113
+ console.log(`[debug:on_chain_end:${nodeName}] output=`, JSON.stringify(output, null, 2));
114
+ if (!output || nodeName?.startsWith("ChannelWrite"))
115
+ break;
116
+ if (nodeName && streamedTextByNode.get(nodeName)) {
117
+ yield { type: "text-end", node: nodeName };
118
+ streamedTextByNode.delete(nodeName);
119
+ }
120
+ if (nodeName !== "__start__" && nodeName !== "__end__") {
121
+ if (!endedNodes.has(nodeName)) {
122
+ yield { type: "status", message: `✅ Completed node: ${nodeName}` };
123
+ endedNodes.add(nodeName);
124
+ }
125
+ }
126
+ currentNode = null;
127
+ break;
128
+ }
129
+ case "on_graph_end": {
130
+ if (debug)
131
+ console.log(`[debug:on_graph_end]`, JSON.stringify(data, null, 2));
132
+ const out = data?.output ?? null;
133
+ const interrupts = out && out.__interrupt__;
134
+ if (interrupts && Array.isArray(interrupts) && interrupts.length > 0) {
135
+ const first = interrupts[0];
136
+ const val = first?.value ?? first;
137
+ const schema = findChoiceSchema(val);
138
+ if (schema && Array.isArray(schema.options)) {
139
+ const isText = schema.kind === "text";
140
+ const options = Array.isArray(schema.options)
141
+ ? schema.options
142
+ .map((o) => ({
143
+ id: String(o.id ?? ""),
144
+ label: String(o.label ?? ""),
145
+ ...(o.description
146
+ ? { description: String(o.description) }
147
+ : {}),
148
+ }))
149
+ .filter((o) => o.id && o.label)
150
+ : [];
151
+ yield {
152
+ type: "interrupt",
153
+ requestId: "",
154
+ resumeToken: "",
155
+ input: {
156
+ kind: schema.kind,
157
+ multiple: Boolean(schema.multiple),
158
+ ...(isText
159
+ ? { question: schema.question }
160
+ : {
161
+ question: typeof schema.question === "string"
162
+ ? schema.question
163
+ : "Please choose",
164
+ }),
165
+ ...(options.length > 0 ? { options } : {}),
166
+ },
167
+ };
168
+ }
169
+ else {
170
+ yield { type: "status", message: "⏸️ Interrupt received" };
171
+ }
172
+ }
173
+ if (sawInterrupt && debug)
174
+ yield { type: "status", message: "🔚 Graph ended after interrupt" };
175
+ yield { type: "done", data: out };
176
+ break;
177
+ }
178
+ default:
179
+ if (typeof type === "string" && type.includes("interrupt")) {
180
+ sawInterrupt = true;
181
+ if (debug)
182
+ console.log(`[debug:interrupt_like]`, JSON.stringify({ type, name, data }, null, 2));
183
+ yield {
184
+ type: "status",
185
+ message: `⏸️ Interrupt event: ${type} ${name ?? ""}`.trim(),
186
+ };
187
+ }
188
+ else if (debug) {
189
+ console.warn(`[debug:unknown_event]`, type);
190
+ }
191
+ break;
192
+ }
193
+ }
194
+ }
@@ -0,0 +1,8 @@
1
+ export type ChatMessage = {
2
+ role: "user" | "assistant" | "system";
3
+ content: string;
4
+ timestamp?: number | undefined;
5
+ id?: string | undefined;
6
+ metadata?: Record<string, unknown> | undefined;
7
+ };
8
+ //# sourceMappingURL=chat-message.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"chat-message.d.ts","sourceRoot":"","sources":["../../src/types/chat-message.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,WAAW,GAAG;IACxB,IAAI,EAAE,MAAM,GAAG,WAAW,GAAG,QAAQ,CAAC;IACtC,OAAO,EAAE,MAAM,CAAC;IAChB,SAAS,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAC/B,EAAE,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IACxB,QAAQ,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,SAAS,CAAC;CAChD,CAAC"}
@@ -0,0 +1,2 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
@@ -0,0 +1,3 @@
1
+ import type { ChatMessage } from "../types/chat-message";
2
+ export declare function extractLatestUserMessage(messages: ChatMessage[]): string;
3
+ //# sourceMappingURL=extract-latest-message.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"extract-latest-message.d.ts","sourceRoot":"","sources":["../../src/utils/extract-latest-message.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,uBAAuB,CAAC;AAEzD,wBAAgB,wBAAwB,CAAC,QAAQ,EAAE,WAAW,EAAE,GAAG,MAAM,CAUxE"}
@@ -0,0 +1,14 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.extractLatestUserMessage = extractLatestUserMessage;
4
+ function extractLatestUserMessage(messages) {
5
+ if (!messages || messages.length === 0)
6
+ return "";
7
+ for (let i = messages.length - 1; i >= 0; i -= 1) {
8
+ const msg = messages[i];
9
+ if (msg?.role === "user" && msg?.content?.trim()) {
10
+ return msg.content.trim();
11
+ }
12
+ }
13
+ return "";
14
+ }
package/package.json ADDED
@@ -0,0 +1,36 @@
1
+ {
2
+ "name": "@kortyx/agent",
3
+ "version": "0.2.0",
4
+ "private": false,
5
+ "main": "./dist/index.js",
6
+ "types": "./src/index.ts",
7
+ "exports": {
8
+ ".": {
9
+ "types": "./src/index.ts",
10
+ "default": "./dist/index.js"
11
+ }
12
+ },
13
+ "dependencies": {
14
+ "@langchain/core": "^1.0.1",
15
+ "@langchain/langgraph": "^1.0.1",
16
+ "@kortyx/core": "0.2.0",
17
+ "@kortyx/providers": "0.2.0",
18
+ "@kortyx/runtime": "0.2.0",
19
+ "@kortyx/memory": "0.2.0",
20
+ "@kortyx/stream": "0.2.0",
21
+ "@kortyx/utils": "0.2.0"
22
+ },
23
+ "devDependencies": {
24
+ "turbo": "^2.5.4",
25
+ "typescript": "5.7.2"
26
+ },
27
+ "sideEffects": false,
28
+ "scripts": {
29
+ "build": "tsc -p tsconfig.build.json",
30
+ "predev": "tsc -p tsconfig.build.json",
31
+ "dev": "tsc --watch",
32
+ "lint": "echo 'Linting @kortyx/agent...' && echo 'Lint passed!'",
33
+ "test": "echo 'Testing @kortyx/agent...' && echo 'All tests passed!'",
34
+ "type-check": "tsc --noEmit"
35
+ }
36
+ }