@ai-sdk/langchain 2.0.0-beta.99 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1,75 +1,778 @@
1
- // src/stream-callbacks.ts
2
- function createCallbacksTransformer(callbacks = {}) {
3
- let aggregatedResponse = "";
4
- return new TransformStream({
5
- async start() {
6
- if (callbacks.onStart) await callbacks.onStart();
7
- },
8
- async transform(message, controller) {
9
- controller.enqueue(message);
10
- aggregatedResponse += message;
11
- if (callbacks.onToken) await callbacks.onToken(message);
12
- if (callbacks.onText && typeof message === "string") {
13
- await callbacks.onText(message);
14
- }
15
- },
16
- async flush() {
17
- if (callbacks.onFinal) {
18
- await callbacks.onFinal(aggregatedResponse);
19
- }
1
+ // src/adapter.ts
2
+ import {
3
+ SystemMessage
4
+ } from "@langchain/core/messages";
5
+ import {
6
+ convertToModelMessages
7
+ } from "ai";
8
+
9
+ // src/utils.ts
10
+ import {
11
+ AIMessage,
12
+ HumanMessage,
13
+ ToolMessage,
14
+ AIMessageChunk
15
+ } from "@langchain/core/messages";
16
+ function convertToolResultPart(block) {
17
+ const content = (() => {
18
+ if (block.output.type === "text" || block.output.type === "error-text") {
19
+ return block.output.value;
20
+ }
21
+ if (block.output.type === "json" || block.output.type === "error-json") {
22
+ return JSON.stringify(block.output.value);
23
+ }
24
+ if (block.output.type === "content") {
25
+ return block.output.value.map((outputBlock) => {
26
+ if (outputBlock.type === "text") {
27
+ return outputBlock.text;
28
+ }
29
+ return "";
30
+ }).join("");
20
31
  }
32
+ return "";
33
+ })();
34
+ return new ToolMessage({
35
+ tool_call_id: block.toolCallId,
36
+ content
21
37
  });
22
38
  }
23
-
24
- // src/langchain-adapter.ts
25
- function toUIMessageStream(stream, callbacks) {
26
- return stream.pipeThrough(
27
- new TransformStream({
28
- transform: async (value, controller) => {
29
- var _a;
30
- if (typeof value === "string") {
31
- controller.enqueue(value);
39
+ function convertAssistantContent(content) {
40
+ if (typeof content === "string") {
41
+ return new AIMessage({ content });
42
+ }
43
+ const textParts = [];
44
+ const toolCalls = [];
45
+ for (const part of content) {
46
+ if (part.type === "text") {
47
+ textParts.push(part.text);
48
+ } else if (part.type === "tool-call") {
49
+ toolCalls.push({
50
+ id: part.toolCallId,
51
+ name: part.toolName,
52
+ args: part.input
53
+ });
54
+ }
55
+ }
56
+ return new AIMessage({
57
+ content: textParts.join(""),
58
+ tool_calls: toolCalls.length > 0 ? toolCalls : void 0
59
+ });
60
+ }
61
+ function convertUserContent(content) {
62
+ if (typeof content === "string") {
63
+ return new HumanMessage({ content });
64
+ }
65
+ const textParts = content.filter(
66
+ (part) => part.type === "text"
67
+ ).map((part) => part.text);
68
+ return new HumanMessage({ content: textParts.join("") });
69
+ }
70
+ function isToolResultPart(item) {
71
+ return item != null && typeof item === "object" && "type" in item && item.type === "tool-result";
72
+ }
73
+ function processModelChunk(chunk, state, controller) {
74
+ if (chunk.id) {
75
+ state.messageId = chunk.id;
76
+ }
77
+ const reasoning = extractReasoningFromContentBlocks(chunk) || extractReasoningFromValuesMessage(chunk);
78
+ if (reasoning) {
79
+ if (!state.reasoningStarted) {
80
+ controller.enqueue({ type: "reasoning-start", id: state.messageId });
81
+ state.reasoningStarted = true;
82
+ state.started = true;
83
+ }
84
+ controller.enqueue({
85
+ type: "reasoning-delta",
86
+ delta: reasoning,
87
+ id: state.messageId
88
+ });
89
+ }
90
+ const text = typeof chunk.content === "string" ? chunk.content : Array.isArray(chunk.content) ? chunk.content.filter(
91
+ (c) => typeof c === "object" && c !== null && "type" in c && c.type === "text"
92
+ ).map((c) => c.text).join("") : "";
93
+ if (text) {
94
+ if (state.reasoningStarted && !state.textStarted) {
95
+ controller.enqueue({ type: "reasoning-end", id: state.messageId });
96
+ state.reasoningStarted = false;
97
+ }
98
+ if (!state.textStarted) {
99
+ controller.enqueue({ type: "text-start", id: state.messageId });
100
+ state.textStarted = true;
101
+ state.started = true;
102
+ }
103
+ controller.enqueue({
104
+ type: "text-delta",
105
+ delta: text,
106
+ id: state.messageId
107
+ });
108
+ }
109
+ }
110
+ function isPlainMessageObject(msg) {
111
+ if (msg == null || typeof msg !== "object") return false;
112
+ return typeof msg._getType !== "function";
113
+ }
114
+ function getMessageId(msg) {
115
+ if (msg == null || typeof msg !== "object") return void 0;
116
+ const msgObj = msg;
117
+ if (typeof msgObj.id === "string") {
118
+ return msgObj.id;
119
+ }
120
+ if (msgObj.type === "constructor" && msgObj.kwargs && typeof msgObj.kwargs === "object") {
121
+ const kwargs = msgObj.kwargs;
122
+ if (typeof kwargs.id === "string") {
123
+ return kwargs.id;
124
+ }
125
+ }
126
+ return void 0;
127
+ }
128
+ function isAIMessageChunk(msg) {
129
+ if (AIMessageChunk.isInstance(msg)) return true;
130
+ if (isPlainMessageObject(msg)) {
131
+ const obj = msg;
132
+ if ("type" in obj && obj.type === "ai") return true;
133
+ if (obj.type === "constructor" && Array.isArray(obj.id) && (obj.id.includes("AIMessageChunk") || obj.id.includes("AIMessage"))) {
134
+ return true;
135
+ }
136
+ }
137
+ return false;
138
+ }
139
+ function isToolMessageType(msg) {
140
+ if (ToolMessage.isInstance(msg)) return true;
141
+ if (isPlainMessageObject(msg)) {
142
+ const obj = msg;
143
+ if ("type" in obj && obj.type === "tool") return true;
144
+ if (obj.type === "constructor" && Array.isArray(obj.id) && obj.id.includes("ToolMessage")) {
145
+ return true;
146
+ }
147
+ }
148
+ return false;
149
+ }
150
+ function getMessageText(msg) {
151
+ var _a;
152
+ if (AIMessageChunk.isInstance(msg)) {
153
+ return (_a = msg.text) != null ? _a : "";
154
+ }
155
+ if (msg == null || typeof msg !== "object") return "";
156
+ const msgObj = msg;
157
+ const dataSource = msgObj.type === "constructor" && msgObj.kwargs && typeof msgObj.kwargs === "object" ? msgObj.kwargs : msgObj;
158
+ if ("content" in dataSource) {
159
+ const content = dataSource.content;
160
+ if (typeof content === "string") {
161
+ return content;
162
+ }
163
+ if (Array.isArray(content)) {
164
+ return content.filter(
165
+ (block) => block != null && typeof block === "object" && block.type === "text" && typeof block.text === "string"
166
+ ).map((block) => block.text).join("");
167
+ }
168
+ return "";
169
+ }
170
+ return "";
171
+ }
172
+ function isReasoningContentBlock(obj) {
173
+ return obj != null && typeof obj === "object" && "type" in obj && obj.type === "reasoning" && "reasoning" in obj && typeof obj.reasoning === "string";
174
+ }
175
+ function isThinkingContentBlock(obj) {
176
+ return obj != null && typeof obj === "object" && "type" in obj && obj.type === "thinking" && "thinking" in obj && typeof obj.thinking === "string";
177
+ }
178
+ function isGPT5ReasoningOutput(obj) {
179
+ return obj != null && typeof obj === "object" && "type" in obj && obj.type === "reasoning" && "summary" in obj && Array.isArray(obj.summary);
180
+ }
181
+ function extractReasoningId(msg) {
182
+ var _a;
183
+ if (msg == null || typeof msg !== "object") return void 0;
184
+ const msgObj = msg;
185
+ const kwargs = msgObj.kwargs && typeof msgObj.kwargs === "object" ? msgObj.kwargs : msgObj;
186
+ const additionalKwargs = kwargs.additional_kwargs;
187
+ if ((_a = additionalKwargs == null ? void 0 : additionalKwargs.reasoning) == null ? void 0 : _a.id) {
188
+ return additionalKwargs.reasoning.id;
189
+ }
190
+ const responseMetadata = kwargs.response_metadata;
191
+ if (responseMetadata && Array.isArray(responseMetadata.output)) {
192
+ for (const item of responseMetadata.output) {
193
+ if (isGPT5ReasoningOutput(item)) {
194
+ return item.id;
195
+ }
196
+ }
197
+ }
198
+ return void 0;
199
+ }
200
+ function extractReasoningFromContentBlocks(msg) {
201
+ if (msg == null || typeof msg !== "object") return void 0;
202
+ const msgObj = msg;
203
+ const kwargs = msgObj.kwargs && typeof msgObj.kwargs === "object" ? msgObj.kwargs : msgObj;
204
+ const contentBlocks = kwargs.contentBlocks;
205
+ if (Array.isArray(contentBlocks)) {
206
+ const reasoningParts = [];
207
+ for (const block of contentBlocks) {
208
+ if (isReasoningContentBlock(block)) {
209
+ reasoningParts.push(block.reasoning);
210
+ } else if (isThinkingContentBlock(block)) {
211
+ reasoningParts.push(block.thinking);
212
+ }
213
+ }
214
+ if (reasoningParts.length > 0) {
215
+ return reasoningParts.join("");
216
+ }
217
+ }
218
+ const additionalKwargs = kwargs.additional_kwargs;
219
+ if ((additionalKwargs == null ? void 0 : additionalKwargs.reasoning) && Array.isArray(additionalKwargs.reasoning.summary)) {
220
+ const reasoningParts = [];
221
+ for (const summaryItem of additionalKwargs.reasoning.summary) {
222
+ if (typeof summaryItem === "object" && summaryItem !== null && "text" in summaryItem && typeof summaryItem.text === "string") {
223
+ reasoningParts.push(summaryItem.text);
224
+ }
225
+ }
226
+ if (reasoningParts.length > 0) {
227
+ return reasoningParts.join("");
228
+ }
229
+ }
230
+ return void 0;
231
+ }
232
+ function extractReasoningFromValuesMessage(msg) {
233
+ if (msg == null || typeof msg !== "object") return void 0;
234
+ const msgObj = msg;
235
+ const kwargs = msgObj.kwargs && typeof msgObj.kwargs === "object" ? msgObj.kwargs : msgObj;
236
+ const responseMetadata = kwargs.response_metadata;
237
+ if (responseMetadata && Array.isArray(responseMetadata.output)) {
238
+ const reasoningParts = [];
239
+ for (const item of responseMetadata.output) {
240
+ if (isGPT5ReasoningOutput(item)) {
241
+ for (const summaryItem of item.summary) {
242
+ if (typeof summaryItem === "object" && summaryItem !== null) {
243
+ const text = summaryItem.text;
244
+ if (typeof text === "string" && text) {
245
+ reasoningParts.push(text);
246
+ }
247
+ }
248
+ }
249
+ }
250
+ }
251
+ if (reasoningParts.length > 0) {
252
+ return reasoningParts.join("");
253
+ }
254
+ }
255
+ const additionalKwargs = kwargs.additional_kwargs;
256
+ if ((additionalKwargs == null ? void 0 : additionalKwargs.reasoning) && Array.isArray(additionalKwargs.reasoning.summary)) {
257
+ const reasoningParts = [];
258
+ for (const summaryItem of additionalKwargs.reasoning.summary) {
259
+ if (typeof summaryItem === "object" && summaryItem !== null && "text" in summaryItem && typeof summaryItem.text === "string") {
260
+ reasoningParts.push(summaryItem.text);
261
+ }
262
+ }
263
+ if (reasoningParts.length > 0) {
264
+ return reasoningParts.join("");
265
+ }
266
+ }
267
+ return void 0;
268
+ }
269
+ function isImageGenerationOutput(obj) {
270
+ return obj != null && typeof obj === "object" && "type" in obj && obj.type === "image_generation_call";
271
+ }
272
+ function extractImageOutputs(additionalKwargs) {
273
+ if (!additionalKwargs) return [];
274
+ const toolOutputs = additionalKwargs.tool_outputs;
275
+ if (!Array.isArray(toolOutputs)) return [];
276
+ return toolOutputs.filter(isImageGenerationOutput);
277
+ }
278
+ function processLangGraphEvent(event, state, controller) {
279
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x;
280
+ const {
281
+ messageSeen,
282
+ messageConcat,
283
+ emittedToolCalls,
284
+ emittedImages,
285
+ emittedReasoningIds,
286
+ messageReasoningIds,
287
+ toolCallInfoByIndex,
288
+ emittedToolCallsByKey
289
+ } = state;
290
+ const [type, data] = event.length === 3 ? event.slice(1) : event;
291
+ switch (type) {
292
+ case "custom": {
293
+ let customTypeName = "custom";
294
+ let partId;
295
+ if (data != null && typeof data === "object" && !Array.isArray(data)) {
296
+ const dataObj = data;
297
+ if (typeof dataObj.type === "string" && dataObj.type) {
298
+ customTypeName = dataObj.type;
299
+ }
300
+ if (typeof dataObj.id === "string" && dataObj.id) {
301
+ partId = dataObj.id;
302
+ }
303
+ }
304
+ controller.enqueue({
305
+ type: `data-${customTypeName}`,
306
+ id: partId,
307
+ transient: partId == null,
308
+ data
309
+ });
310
+ break;
311
+ }
312
+ case "messages": {
313
+ const [rawMsg, metadata] = data;
314
+ const msg = rawMsg;
315
+ const msgId = getMessageId(msg);
316
+ if (!msgId) return;
317
+ const langgraphStep = typeof (metadata == null ? void 0 : metadata.langgraph_step) === "number" ? metadata.langgraph_step : null;
318
+ if (langgraphStep !== null && langgraphStep !== state.currentStep) {
319
+ if (state.currentStep !== null) {
320
+ controller.enqueue({ type: "finish-step" });
321
+ }
322
+ controller.enqueue({ type: "start-step" });
323
+ state.currentStep = langgraphStep;
324
+ }
325
+ if (AIMessageChunk.isInstance(msg)) {
326
+ if (messageConcat[msgId]) {
327
+ messageConcat[msgId] = messageConcat[msgId].concat(
328
+ msg
329
+ );
330
+ } else {
331
+ messageConcat[msgId] = msg;
332
+ }
333
+ }
334
+ if (isAIMessageChunk(msg)) {
335
+ const concatChunk = messageConcat[msgId];
336
+ const msgObj = msg;
337
+ const dataSource = msgObj.type === "constructor" && msgObj.kwargs && typeof msgObj.kwargs === "object" ? msgObj.kwargs : msgObj;
338
+ const additionalKwargs = dataSource.additional_kwargs;
339
+ const imageOutputs = extractImageOutputs(additionalKwargs);
340
+ for (const imageOutput of imageOutputs) {
341
+ if (imageOutput.result && !emittedImages.has(imageOutput.id)) {
342
+ emittedImages.add(imageOutput.id);
343
+ const mediaType = `image/${imageOutput.output_format || "png"}`;
344
+ controller.enqueue({
345
+ type: "file",
346
+ mediaType,
347
+ url: `data:${mediaType};base64,${imageOutput.result}`
348
+ });
349
+ }
350
+ }
351
+ const toolCallChunks = dataSource.tool_call_chunks;
352
+ if (toolCallChunks == null ? void 0 : toolCallChunks.length) {
353
+ for (const toolCallChunk of toolCallChunks) {
354
+ const idx = (_a = toolCallChunk.index) != null ? _a : 0;
355
+ if (toolCallChunk.id) {
356
+ (_b = toolCallInfoByIndex[msgId]) != null ? _b : toolCallInfoByIndex[msgId] = {};
357
+ toolCallInfoByIndex[msgId][idx] = {
358
+ id: toolCallChunk.id,
359
+ name: toolCallChunk.name || ((_d = (_c = concatChunk == null ? void 0 : concatChunk.tool_call_chunks) == null ? void 0 : _c[idx]) == null ? void 0 : _d.name) || "unknown"
360
+ };
361
+ }
362
+ const toolCallId = toolCallChunk.id || ((_f = (_e = toolCallInfoByIndex[msgId]) == null ? void 0 : _e[idx]) == null ? void 0 : _f.id) || ((_h = (_g = concatChunk == null ? void 0 : concatChunk.tool_call_chunks) == null ? void 0 : _g[idx]) == null ? void 0 : _h.id);
363
+ if (!toolCallId) {
364
+ continue;
365
+ }
366
+ const toolName = toolCallChunk.name || ((_j = (_i = toolCallInfoByIndex[msgId]) == null ? void 0 : _i[idx]) == null ? void 0 : _j.name) || ((_l = (_k = concatChunk == null ? void 0 : concatChunk.tool_call_chunks) == null ? void 0 : _k[idx]) == null ? void 0 : _l.name) || "unknown";
367
+ if (!((_n = (_m = messageSeen[msgId]) == null ? void 0 : _m.tool) == null ? void 0 : _n[toolCallId])) {
368
+ controller.enqueue({
369
+ type: "tool-input-start",
370
+ toolCallId,
371
+ toolName,
372
+ dynamic: true
373
+ });
374
+ (_o = messageSeen[msgId]) != null ? _o : messageSeen[msgId] = {};
375
+ (_q = (_p = messageSeen[msgId]).tool) != null ? _q : _p.tool = {};
376
+ messageSeen[msgId].tool[toolCallId] = true;
377
+ emittedToolCalls.add(toolCallId);
378
+ }
379
+ if (toolCallChunk.args) {
380
+ controller.enqueue({
381
+ type: "tool-input-delta",
382
+ toolCallId,
383
+ inputTextDelta: toolCallChunk.args
384
+ });
385
+ }
386
+ }
32
387
  return;
33
388
  }
34
- if ("event" in value) {
35
- if (value.event === "on_chat_model_stream") {
36
- forwardAIMessageChunk(
37
- (_a = value.data) == null ? void 0 : _a.chunk,
38
- controller
389
+ const chunkReasoningId = extractReasoningId(msg);
390
+ if (chunkReasoningId) {
391
+ if (!messageReasoningIds[msgId]) {
392
+ messageReasoningIds[msgId] = chunkReasoningId;
393
+ }
394
+ emittedReasoningIds.add(chunkReasoningId);
395
+ }
396
+ const reasoning = extractReasoningFromContentBlocks(msg);
397
+ if (reasoning) {
398
+ const reasoningId = (_s = (_r = messageReasoningIds[msgId]) != null ? _r : chunkReasoningId) != null ? _s : msgId;
399
+ if (!((_t = messageSeen[msgId]) == null ? void 0 : _t.reasoning)) {
400
+ controller.enqueue({ type: "reasoning-start", id: msgId });
401
+ (_u = messageSeen[msgId]) != null ? _u : messageSeen[msgId] = {};
402
+ messageSeen[msgId].reasoning = true;
403
+ }
404
+ controller.enqueue({
405
+ type: "reasoning-delta",
406
+ delta: reasoning,
407
+ id: msgId
408
+ });
409
+ emittedReasoningIds.add(reasoningId);
410
+ }
411
+ const text = getMessageText(msg);
412
+ if (text) {
413
+ if (!((_v = messageSeen[msgId]) == null ? void 0 : _v.text)) {
414
+ controller.enqueue({ type: "text-start", id: msgId });
415
+ (_w = messageSeen[msgId]) != null ? _w : messageSeen[msgId] = {};
416
+ messageSeen[msgId].text = true;
417
+ }
418
+ controller.enqueue({
419
+ type: "text-delta",
420
+ delta: text,
421
+ id: msgId
422
+ });
423
+ }
424
+ } else if (isToolMessageType(msg)) {
425
+ const msgObj = msg;
426
+ const dataSource = msgObj.type === "constructor" && msgObj.kwargs && typeof msgObj.kwargs === "object" ? msgObj.kwargs : msgObj;
427
+ const toolCallId = dataSource.tool_call_id;
428
+ const status = dataSource.status;
429
+ if (toolCallId) {
430
+ if (status === "error") {
431
+ controller.enqueue({
432
+ type: "tool-output-error",
433
+ toolCallId,
434
+ errorText: typeof dataSource.content === "string" ? dataSource.content : "Tool execution failed"
435
+ });
436
+ } else {
437
+ controller.enqueue({
438
+ type: "tool-output-available",
439
+ toolCallId,
440
+ output: dataSource.content
441
+ });
442
+ }
443
+ }
444
+ }
445
+ return;
446
+ }
447
+ case "values": {
448
+ for (const [id, seen] of Object.entries(messageSeen)) {
449
+ if (seen.text) controller.enqueue({ type: "text-end", id });
450
+ if (seen.tool) {
451
+ for (const [toolCallId, toolCallSeen] of Object.entries(seen.tool)) {
452
+ const concatMsg = messageConcat[id];
453
+ const toolCall = (_x = concatMsg == null ? void 0 : concatMsg.tool_calls) == null ? void 0 : _x.find(
454
+ (call) => call.id === toolCallId
39
455
  );
456
+ if (toolCallSeen && toolCall) {
457
+ emittedToolCalls.add(toolCallId);
458
+ const toolCallKey = `${toolCall.name}:${JSON.stringify(toolCall.args)}`;
459
+ emittedToolCallsByKey.set(toolCallKey, toolCallId);
460
+ controller.enqueue({
461
+ type: "tool-input-available",
462
+ toolCallId,
463
+ toolName: toolCall.name,
464
+ input: toolCall.args,
465
+ dynamic: true
466
+ });
467
+ }
40
468
  }
41
- return;
42
469
  }
43
- forwardAIMessageChunk(value, controller);
470
+ if (seen.reasoning) {
471
+ controller.enqueue({ type: "reasoning-end", id });
472
+ }
473
+ delete messageSeen[id];
474
+ delete messageConcat[id];
475
+ delete messageReasoningIds[id];
44
476
  }
45
- })
46
- ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
47
- new TransformStream({
48
- start: async (controller) => {
49
- controller.enqueue({ type: "text-start", id: "1" });
50
- },
51
- transform: async (chunk, controller) => {
52
- controller.enqueue({ type: "text-delta", delta: chunk, id: "1" });
53
- },
54
- flush: async (controller) => {
55
- controller.enqueue({ type: "text-end", id: "1" });
477
+ if (data != null && typeof data === "object" && "messages" in data) {
478
+ const messages = data.messages;
479
+ if (Array.isArray(messages)) {
480
+ for (const msg of messages) {
481
+ if (!msg || typeof msg !== "object") continue;
482
+ const msgId = getMessageId(msg);
483
+ if (!msgId) continue;
484
+ let toolCalls;
485
+ if (AIMessageChunk.isInstance(msg) || AIMessage.isInstance(msg)) {
486
+ toolCalls = msg.tool_calls;
487
+ } else if (isPlainMessageObject(msg)) {
488
+ const obj = msg;
489
+ const isSerializedFormat = obj.type === "constructor" && Array.isArray(obj.id) && (obj.id.includes("AIMessageChunk") || obj.id.includes("AIMessage"));
490
+ const dataSource = isSerializedFormat ? obj.kwargs : obj;
491
+ if (obj.type === "ai" || isSerializedFormat) {
492
+ if (Array.isArray(dataSource == null ? void 0 : dataSource.tool_calls)) {
493
+ toolCalls = dataSource.tool_calls;
494
+ } else if (
495
+ /**
496
+ * Fall back to additional_kwargs.tool_calls (OpenAI format)
497
+ */
498
+ (dataSource == null ? void 0 : dataSource.additional_kwargs) && typeof dataSource.additional_kwargs === "object"
499
+ ) {
500
+ const additionalKwargs = dataSource.additional_kwargs;
501
+ if (Array.isArray(additionalKwargs.tool_calls)) {
502
+ toolCalls = additionalKwargs.tool_calls.map((tc, idx) => {
503
+ const functionData = tc.function;
504
+ let args;
505
+ try {
506
+ args = (functionData == null ? void 0 : functionData.arguments) ? JSON.parse(functionData.arguments) : {};
507
+ } catch (e) {
508
+ args = {};
509
+ }
510
+ return {
511
+ id: tc.id || `call_${idx}`,
512
+ name: (functionData == null ? void 0 : functionData.name) || "unknown",
513
+ args
514
+ };
515
+ });
516
+ }
517
+ }
518
+ }
519
+ }
520
+ if (toolCalls && toolCalls.length > 0) {
521
+ for (const toolCall of toolCalls) {
522
+ if (toolCall.id && !emittedToolCalls.has(toolCall.id)) {
523
+ emittedToolCalls.add(toolCall.id);
524
+ const toolCallKey = `${toolCall.name}:${JSON.stringify(toolCall.args)}`;
525
+ emittedToolCallsByKey.set(toolCallKey, toolCall.id);
526
+ controller.enqueue({
527
+ type: "tool-input-start",
528
+ toolCallId: toolCall.id,
529
+ toolName: toolCall.name,
530
+ dynamic: true
531
+ });
532
+ controller.enqueue({
533
+ type: "tool-input-available",
534
+ toolCallId: toolCall.id,
535
+ toolName: toolCall.name,
536
+ input: toolCall.args,
537
+ dynamic: true
538
+ });
539
+ }
540
+ }
541
+ }
542
+ const reasoningId = extractReasoningId(msg);
543
+ const wasStreamedThisRequest = !!messageSeen[msgId];
544
+ const hasToolCalls = toolCalls && toolCalls.length > 0;
545
+ const shouldEmitReasoning = reasoningId && !emittedReasoningIds.has(reasoningId) && (wasStreamedThisRequest || !hasToolCalls);
546
+ if (shouldEmitReasoning) {
547
+ const reasoning = extractReasoningFromValuesMessage(msg);
548
+ if (reasoning) {
549
+ controller.enqueue({ type: "reasoning-start", id: msgId });
550
+ controller.enqueue({
551
+ type: "reasoning-delta",
552
+ delta: reasoning,
553
+ id: msgId
554
+ });
555
+ controller.enqueue({ type: "reasoning-end", id: msgId });
556
+ emittedReasoningIds.add(reasoningId);
557
+ }
558
+ }
559
+ }
560
+ }
561
+ }
562
+ if (data != null && typeof data === "object") {
563
+ const interrupt = data.__interrupt__;
564
+ if (Array.isArray(interrupt) && interrupt.length > 0) {
565
+ for (const interruptItem of interrupt) {
566
+ const interruptValue = interruptItem == null ? void 0 : interruptItem.value;
567
+ if (!interruptValue) continue;
568
+ const actionRequests = interruptValue.actionRequests || interruptValue.action_requests;
569
+ if (!Array.isArray(actionRequests)) continue;
570
+ for (const actionRequest of actionRequests) {
571
+ const toolName = actionRequest.name;
572
+ const input = actionRequest.args || actionRequest.arguments;
573
+ const toolCallKey = `${toolName}:${JSON.stringify(input)}`;
574
+ const toolCallId = emittedToolCallsByKey.get(toolCallKey) || actionRequest.id || `hitl-${toolName}-${Date.now()}`;
575
+ if (!emittedToolCalls.has(toolCallId)) {
576
+ emittedToolCalls.add(toolCallId);
577
+ emittedToolCallsByKey.set(toolCallKey, toolCallId);
578
+ controller.enqueue({
579
+ type: "tool-input-start",
580
+ toolCallId,
581
+ toolName,
582
+ dynamic: true
583
+ });
584
+ controller.enqueue({
585
+ type: "tool-input-available",
586
+ toolCallId,
587
+ toolName,
588
+ input,
589
+ dynamic: true
590
+ });
591
+ }
592
+ controller.enqueue({
593
+ type: "tool-approval-request",
594
+ approvalId: toolCallId,
595
+ toolCallId
596
+ });
597
+ }
598
+ }
599
+ }
56
600
  }
57
- })
58
- );
601
+ break;
602
+ }
603
+ }
604
+ }
605
+
606
+ // src/adapter.ts
607
+ async function toBaseMessages(messages) {
608
+ const modelMessages = await convertToModelMessages(messages);
609
+ return convertModelMessages(modelMessages);
59
610
  }
60
- function forwardAIMessageChunk(chunk, controller) {
61
- if (typeof chunk.content === "string") {
62
- controller.enqueue(chunk.content);
63
- } else {
64
- const content = chunk.content;
65
- for (const item of content) {
66
- if (item.type === "text") {
67
- controller.enqueue(item.text);
611
+ function convertModelMessages(modelMessages) {
612
+ const result = [];
613
+ for (const message of modelMessages) {
614
+ switch (message.role) {
615
+ case "tool": {
616
+ for (const item of message.content) {
617
+ if (isToolResultPart(item)) {
618
+ result.push(convertToolResultPart(item));
619
+ }
620
+ }
621
+ break;
622
+ }
623
+ case "assistant": {
624
+ result.push(convertAssistantContent(message.content));
625
+ break;
626
+ }
627
+ case "system": {
628
+ result.push(new SystemMessage({ content: message.content }));
629
+ break;
630
+ }
631
+ case "user": {
632
+ result.push(convertUserContent(message.content));
633
+ break;
68
634
  }
69
635
  }
70
636
  }
637
+ return result;
638
+ }
639
+ function toUIMessageStream(stream, callbacks) {
640
+ const textChunks = [];
641
+ const modelState = {
642
+ started: false,
643
+ messageId: "langchain-msg-1",
644
+ reasoningStarted: false,
645
+ textStarted: false
646
+ };
647
+ const langGraphState = {
648
+ messageSeen: {},
649
+ messageConcat: {},
650
+ emittedToolCalls: /* @__PURE__ */ new Set(),
651
+ emittedImages: /* @__PURE__ */ new Set(),
652
+ emittedReasoningIds: /* @__PURE__ */ new Set(),
653
+ messageReasoningIds: {},
654
+ toolCallInfoByIndex: {},
655
+ currentStep: null,
656
+ emittedToolCallsByKey: /* @__PURE__ */ new Map()
657
+ };
658
+ let streamType = null;
659
+ const getAsyncIterator = () => {
660
+ if (Symbol.asyncIterator in stream) {
661
+ return stream[Symbol.asyncIterator]();
662
+ }
663
+ const reader = stream.getReader();
664
+ return {
665
+ async next() {
666
+ const { done, value } = await reader.read();
667
+ return { done, value };
668
+ }
669
+ };
670
+ };
671
+ const iterator = getAsyncIterator();
672
+ const createCallbackController = (originalController) => {
673
+ return {
674
+ get desiredSize() {
675
+ return originalController.desiredSize;
676
+ },
677
+ close: () => originalController.close(),
678
+ error: (e) => originalController.error(e),
679
+ enqueue: (chunk) => {
680
+ var _a, _b;
681
+ if (callbacks && chunk.type === "text-delta" && chunk.delta) {
682
+ textChunks.push(chunk.delta);
683
+ (_a = callbacks.onToken) == null ? void 0 : _a.call(callbacks, chunk.delta);
684
+ (_b = callbacks.onText) == null ? void 0 : _b.call(callbacks, chunk.delta);
685
+ }
686
+ originalController.enqueue(chunk);
687
+ }
688
+ };
689
+ };
690
+ return new ReadableStream({
691
+ async start(controller) {
692
+ var _a, _b;
693
+ await ((_a = callbacks == null ? void 0 : callbacks.onStart) == null ? void 0 : _a.call(callbacks));
694
+ const wrappedController = createCallbackController(controller);
695
+ controller.enqueue({ type: "start" });
696
+ try {
697
+ while (true) {
698
+ const { done, value } = await iterator.next();
699
+ if (done) break;
700
+ if (streamType === null) {
701
+ if (Array.isArray(value)) {
702
+ streamType = "langgraph";
703
+ } else {
704
+ streamType = "model";
705
+ }
706
+ }
707
+ if (streamType === "model") {
708
+ processModelChunk(
709
+ value,
710
+ modelState,
711
+ wrappedController
712
+ );
713
+ } else {
714
+ processLangGraphEvent(
715
+ value,
716
+ langGraphState,
717
+ wrappedController
718
+ );
719
+ }
720
+ }
721
+ if (streamType === "model") {
722
+ if (modelState.reasoningStarted) {
723
+ controller.enqueue({
724
+ type: "reasoning-end",
725
+ id: modelState.messageId
726
+ });
727
+ }
728
+ if (modelState.textStarted) {
729
+ controller.enqueue({ type: "text-end", id: modelState.messageId });
730
+ }
731
+ controller.enqueue({ type: "finish" });
732
+ }
733
+ await ((_b = callbacks == null ? void 0 : callbacks.onFinal) == null ? void 0 : _b.call(callbacks, textChunks.join("")));
734
+ } catch (error) {
735
+ controller.enqueue({
736
+ type: "error",
737
+ errorText: error instanceof Error ? error.message : "Unknown error"
738
+ });
739
+ } finally {
740
+ controller.close();
741
+ }
742
+ }
743
+ });
71
744
  }
745
+
746
+ // src/transport.ts
747
+ import {
748
+ RemoteGraph
749
+ } from "@langchain/langgraph/remote";
750
+ var LangSmithDeploymentTransport = class {
751
+ constructor(options) {
752
+ var _a;
753
+ this.graph = new RemoteGraph({
754
+ ...options,
755
+ graphId: (_a = options.graphId) != null ? _a : "agent"
756
+ });
757
+ }
758
+ async sendMessages(options) {
759
+ const baseMessages = await toBaseMessages(options.messages);
760
+ const stream = await this.graph.stream(
761
+ { messages: baseMessages },
762
+ { streamMode: ["values", "messages"] }
763
+ );
764
+ return toUIMessageStream(
765
+ stream
766
+ );
767
+ }
768
+ async reconnectToStream(_options) {
769
+ throw new Error("Method not implemented.");
770
+ }
771
+ };
72
772
  export {
773
+ LangSmithDeploymentTransport,
774
+ convertModelMessages,
775
+ toBaseMessages,
73
776
  toUIMessageStream
74
777
  };
75
778
  //# sourceMappingURL=index.mjs.map