@langchain/anthropic 0.1.13 → 0.1.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.ChatAnthropic = exports.ChatAnthropicMessages = void 0;
3
+ exports.ChatAnthropic = exports.ChatAnthropicMessages = exports._convertLangChainToolCallToAnthropic = void 0;
4
4
  const sdk_1 = require("@anthropic-ai/sdk");
5
5
  const messages_1 = require("@langchain/core/messages");
6
6
  const outputs_1 = require("@langchain/core/outputs");
@@ -36,14 +36,15 @@ function anthropicResponseToChatMessages(messages, additionalKwargs) {
36
36
  ];
37
37
  }
38
38
  else {
39
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
40
- const castMessage = messages;
39
+ const toolCalls = (0, output_parsers_js_1.extractToolCalls)(messages);
41
40
  const generations = [
42
41
  {
43
42
  text: "",
44
43
  message: new messages_1.AIMessage({
45
- content: castMessage,
44
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
45
+ content: messages,
46
46
  additional_kwargs: additionalKwargs,
47
+ tool_calls: toolCalls,
47
48
  }),
48
49
  },
49
50
  ];
@@ -54,6 +55,180 @@ function anthropicResponseToChatMessages(messages, additionalKwargs) {
54
55
  function isAnthropicTool(tool) {
55
56
  return "input_schema" in tool;
56
57
  }
58
+ function _mergeMessages(messages) {
59
+ // Merge runs of human/tool messages into single human messages with content blocks.
60
+ const merged = [];
61
+ for (const message of messages) {
62
+ if (message._getType() === "tool") {
63
+ if (typeof message.content === "string") {
64
+ merged.push(new messages_1.HumanMessage({
65
+ content: [
66
+ {
67
+ type: "tool_result",
68
+ content: message.content,
69
+ tool_use_id: message.tool_call_id,
70
+ },
71
+ ],
72
+ }));
73
+ }
74
+ else {
75
+ merged.push(new messages_1.HumanMessage({ content: message.content }));
76
+ }
77
+ }
78
+ else {
79
+ const previousMessage = merged[merged.length - 1];
80
+ if (previousMessage?._getType() === "human" &&
81
+ message._getType() === "human") {
82
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
83
+ let combinedContent;
84
+ if (typeof previousMessage.content === "string") {
85
+ combinedContent = [{ type: "text", text: previousMessage.content }];
86
+ }
87
+ else {
88
+ combinedContent = previousMessage.content;
89
+ }
90
+ if (typeof message.content === "string") {
91
+ combinedContent.push({ type: "text", text: message.content });
92
+ }
93
+ else {
94
+ combinedContent = combinedContent.concat(message.content);
95
+ }
96
+ previousMessage.content = combinedContent;
97
+ }
98
+ else {
99
+ merged.push(message);
100
+ }
101
+ }
102
+ }
103
+ return merged;
104
+ }
105
+ function _convertLangChainToolCallToAnthropic(toolCall) {
106
+ if (toolCall.id === undefined) {
107
+ throw new Error(`Anthropic requires all tool calls to have an "id".`);
108
+ }
109
+ return {
110
+ type: "tool_use",
111
+ id: toolCall.id,
112
+ name: toolCall.name,
113
+ input: toolCall.args,
114
+ };
115
+ }
116
+ exports._convertLangChainToolCallToAnthropic = _convertLangChainToolCallToAnthropic;
117
+ function _formatContent(content) {
118
+ if (typeof content === "string") {
119
+ return content;
120
+ }
121
+ else {
122
+ const contentBlocks = content.map((contentPart) => {
123
+ if (contentPart.type === "image_url") {
124
+ let source;
125
+ if (typeof contentPart.image_url === "string") {
126
+ source = _formatImage(contentPart.image_url);
127
+ }
128
+ else {
129
+ source = _formatImage(contentPart.image_url.url);
130
+ }
131
+ return {
132
+ type: "image",
133
+ source,
134
+ };
135
+ }
136
+ else if (contentPart.type === "text") {
137
+ // Assuming contentPart is of type MessageContentText here
138
+ return {
139
+ type: "text",
140
+ text: contentPart.text,
141
+ };
142
+ }
143
+ else if (contentPart.type === "tool_use" ||
144
+ contentPart.type === "tool_result") {
145
+ // TODO: Fix when SDK types are fixed
146
+ return {
147
+ ...contentPart,
148
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
149
+ };
150
+ }
151
+ else {
152
+ throw new Error("Unsupported message content format");
153
+ }
154
+ });
155
+ return contentBlocks;
156
+ }
157
+ }
158
+ /**
159
+ * Formats messages as a prompt for the model.
160
+ * @param messages The base messages to format as a prompt.
161
+ * @returns The formatted prompt.
162
+ */
163
+ function _formatMessagesForAnthropic(messages) {
164
+ const mergedMessages = _mergeMessages(messages);
165
+ let system;
166
+ if (mergedMessages.length > 0 && mergedMessages[0]._getType() === "system") {
167
+ if (typeof messages[0].content !== "string") {
168
+ throw new Error("System message content must be a string.");
169
+ }
170
+ system = messages[0].content;
171
+ }
172
+ const conversationMessages = system !== undefined ? mergedMessages.slice(1) : mergedMessages;
173
+ const formattedMessages = conversationMessages.map((message) => {
174
+ let role;
175
+ if (message._getType() === "human") {
176
+ role = "user";
177
+ }
178
+ else if (message._getType() === "ai") {
179
+ role = "assistant";
180
+ }
181
+ else if (message._getType() === "tool") {
182
+ role = "user";
183
+ }
184
+ else if (message._getType() === "system") {
185
+ throw new Error("System messages are only permitted as the first passed message.");
186
+ }
187
+ else {
188
+ throw new Error(`Message type "${message._getType()}" is not supported.`);
189
+ }
190
+ if ((0, messages_1.isAIMessage)(message) && !!message.tool_calls?.length) {
191
+ if (typeof message.content === "string") {
192
+ if (message.content === "") {
193
+ return {
194
+ role,
195
+ content: message.tool_calls.map(_convertLangChainToolCallToAnthropic),
196
+ };
197
+ }
198
+ else {
199
+ return {
200
+ role,
201
+ content: [
202
+ { type: "text", text: message.content },
203
+ ...message.tool_calls.map(_convertLangChainToolCallToAnthropic),
204
+ ],
205
+ };
206
+ }
207
+ }
208
+ else {
209
+ const { content } = message;
210
+ const hasMismatchedToolCalls = !message.tool_calls.every((toolCall) => content.find((contentPart) => contentPart.type === "tool_use" && contentPart.id === toolCall.id));
211
+ if (hasMismatchedToolCalls) {
212
+ console.warn(`The "tool_calls" field on a message is only respected if content is a string.`);
213
+ }
214
+ return {
215
+ role,
216
+ content: _formatContent(message.content),
217
+ };
218
+ }
219
+ }
220
+ else {
221
+ return {
222
+ role,
223
+ content: _formatContent(message.content),
224
+ };
225
+ }
226
+ });
227
+ return {
228
+ messages: formattedMessages,
229
+ system,
230
+ };
231
+ }
57
232
  /**
58
233
  * Wrapper around Anthropic large language models.
59
234
  *
@@ -242,6 +417,12 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
242
417
  input_schema: (0, zod_to_json_schema_1.zodToJsonSchema)(tool.schema),
243
418
  }));
244
419
  }
420
+ bindTools(tools, kwargs) {
421
+ return this.bind({
422
+ tools: this.formatStructuredToolToAnthropic(tools),
423
+ ...kwargs,
424
+ });
425
+ }
245
426
  /**
246
427
  * Get the parameters used to invoke the model
247
428
  */
@@ -254,30 +435,10 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
254
435
  stop_sequences: options?.stop ?? this.stopSequences,
255
436
  stream: this.streaming,
256
437
  max_tokens: this.maxTokens,
438
+ tools: this.formatStructuredToolToAnthropic(options?.tools),
257
439
  ...this.invocationKwargs,
258
440
  };
259
441
  }
260
- invocationOptions(request, options) {
261
- const toolUseBetaHeader = {
262
- "anthropic-beta": "tools-2024-04-04",
263
- };
264
- const tools = this.formatStructuredToolToAnthropic(options?.tools);
265
- // If tools are present, populate the body with the message request params.
266
- // This is because Anthropic overwrites the message request params if a body
267
- // is passed.
268
- const body = tools
269
- ? {
270
- ...request,
271
- tools,
272
- }
273
- : undefined;
274
- const headers = tools ? toolUseBetaHeader : undefined;
275
- return {
276
- signal: options.signal,
277
- ...(body ? { body } : {}),
278
- ...(headers ? { headers } : {}),
279
- };
280
- }
281
442
  /** @ignore */
282
443
  _identifyingParams() {
283
444
  return {
@@ -296,22 +457,23 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
296
457
  }
297
458
  async *_streamResponseChunks(messages, options, runManager) {
298
459
  const params = this.invocationParams(options);
299
- const requestOptions = this.invocationOptions({
300
- ...params,
301
- stream: false,
302
- ...this.formatMessagesForAnthropic(messages),
303
- }, options);
460
+ const formattedMessages = _formatMessagesForAnthropic(messages);
304
461
  if (options.tools !== undefined && options.tools.length > 0) {
305
- const requestOptions = this.invocationOptions({
306
- ...params,
307
- stream: false,
308
- ...this.formatMessagesForAnthropic(messages),
309
- }, options);
310
- const generations = await this._generateNonStreaming(messages, params, requestOptions);
462
+ const generations = await this._generateNonStreaming(messages, params, {
463
+ signal: options.signal,
464
+ });
465
+ const result = generations[0].message;
466
+ const toolCallChunks = result.tool_calls?.map((toolCall, index) => ({
467
+ name: toolCall.name,
468
+ args: JSON.stringify(toolCall.args),
469
+ id: toolCall.id,
470
+ index,
471
+ }));
311
472
  yield new outputs_1.ChatGenerationChunk({
312
473
  message: new messages_1.AIMessageChunk({
313
- content: generations[0].message.content,
314
- additional_kwargs: generations[0].message.additional_kwargs,
474
+ content: result.content,
475
+ additional_kwargs: result.additional_kwargs,
476
+ tool_call_chunks: toolCallChunks,
315
477
  }),
316
478
  text: generations[0].text,
317
479
  });
@@ -319,9 +481,9 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
319
481
  else {
320
482
  const stream = await this.createStreamWithRetry({
321
483
  ...params,
322
- ...this.formatMessagesForAnthropic(messages),
484
+ ...formattedMessages,
323
485
  stream: true,
324
- }, requestOptions);
486
+ });
325
487
  let usageData = { input_tokens: 0, output_tokens: 0 };
326
488
  for await (const data of stream) {
327
489
  if (options.signal?.aborted) {
@@ -382,95 +544,22 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
382
544
  });
383
545
  }
384
546
  }
385
- /**
386
- * Formats messages as a prompt for the model.
387
- * @param messages The base messages to format as a prompt.
388
- * @returns The formatted prompt.
389
- */
390
- formatMessagesForAnthropic(messages) {
391
- let system;
392
- if (messages.length > 0 && messages[0]._getType() === "system") {
393
- if (typeof messages[0].content !== "string") {
394
- throw new Error("System message content must be a string.");
395
- }
396
- system = messages[0].content;
397
- }
398
- const conversationMessages = system !== undefined ? messages.slice(1) : messages;
399
- const formattedMessages = conversationMessages.map((message) => {
400
- let role;
401
- if (message._getType() === "human") {
402
- role = "user";
403
- }
404
- else if (message._getType() === "ai") {
405
- role = "assistant";
406
- }
407
- else if (message._getType() === "tool") {
408
- role = "user";
409
- }
410
- else if (message._getType() === "system") {
411
- throw new Error("System messages are only permitted as the first passed message.");
412
- }
413
- else {
414
- throw new Error(`Message type "${message._getType()}" is not supported.`);
415
- }
416
- if (typeof message.content === "string") {
417
- return {
418
- role,
419
- content: message.content,
420
- };
421
- }
422
- else {
423
- const contentBlocks = message.content.map((contentPart) => {
424
- if (contentPart.type === "image_url") {
425
- let source;
426
- if (typeof contentPart.image_url === "string") {
427
- source = _formatImage(contentPart.image_url);
428
- }
429
- else {
430
- source = _formatImage(contentPart.image_url.url);
431
- }
432
- return {
433
- type: "image",
434
- source,
435
- };
436
- }
437
- else if (contentPart.type === "text") {
438
- // Assuming contentPart is of type MessageContentText here
439
- return {
440
- type: "text",
441
- text: contentPart.text,
442
- };
443
- }
444
- else if (contentPart.type === "tool_use" ||
445
- contentPart.type === "tool_result") {
446
- // TODO: Fix when SDK types are fixed
447
- return {
448
- ...contentPart,
449
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
450
- };
451
- }
452
- else {
453
- throw new Error("Unsupported message content format");
454
- }
455
- });
456
- return {
457
- role,
458
- content: contentBlocks,
459
- };
460
- }
461
- });
462
- return {
463
- messages: formattedMessages,
464
- system,
465
- };
466
- }
467
547
  /** @ignore */
468
548
  async _generateNonStreaming(messages, params, requestOptions) {
549
+ const options = params.tools !== undefined
550
+ ? {
551
+ ...requestOptions,
552
+ headers: {
553
+ ...requestOptions.headers,
554
+ "anthropic-beta": "tools-2024-04-04",
555
+ },
556
+ }
557
+ : requestOptions;
469
558
  const response = await this.completionWithRetry({
470
559
  ...params,
471
560
  stream: false,
472
- ...this.formatMessagesForAnthropic(messages),
473
- }, requestOptions);
561
+ ..._formatMessagesForAnthropic(messages),
562
+ }, options);
474
563
  const { content, ...additionalKwargs } = response;
475
564
  const generations = anthropicResponseToChatMessages(content, additionalKwargs);
476
565
  return generations;
@@ -505,12 +594,9 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
505
594
  };
506
595
  }
507
596
  else {
508
- const requestOptions = this.invocationOptions({
509
- ...params,
510
- stream: false,
511
- ...this.formatMessagesForAnthropic(messages),
512
- }, options);
513
- const generations = await this._generateNonStreaming(messages, params, requestOptions);
597
+ const generations = await this._generateNonStreaming(messages, params, {
598
+ signal: options.signal,
599
+ });
514
600
  return {
515
601
  generations,
516
602
  };
@@ -1,13 +1,15 @@
1
1
  import { Anthropic, type ClientOptions } from "@anthropic-ai/sdk";
2
2
  import type { Stream } from "@anthropic-ai/sdk/streaming";
3
3
  import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
4
- import { type BaseMessage } from "@langchain/core/messages";
4
+ import { AIMessageChunk, type BaseMessage } from "@langchain/core/messages";
5
5
  import { ChatGeneration, ChatGenerationChunk, type ChatResult } from "@langchain/core/outputs";
6
6
  import { BaseChatModel, type BaseChatModelParams } from "@langchain/core/language_models/chat_models";
7
7
  import { StructuredOutputMethodOptions, type BaseLanguageModelCallOptions, BaseLanguageModelInput } from "@langchain/core/language_models/base";
8
8
  import { StructuredToolInterface } from "@langchain/core/tools";
9
- import { Runnable } from "@langchain/core/runnables";
9
+ import { Runnable, RunnableInterface } from "@langchain/core/runnables";
10
+ import { ToolCall } from "@langchain/core/messages/tool";
10
11
  import { z } from "zod";
12
+ import { AnthropicToolResponse } from "./types.js";
11
13
  type AnthropicTool = {
12
14
  name: string;
13
15
  description: string;
@@ -16,13 +18,12 @@ type AnthropicTool = {
16
18
  */
17
19
  input_schema: Record<string, unknown>;
18
20
  };
19
- type AnthropicMessage = Anthropic.MessageParam;
20
21
  type AnthropicMessageCreateParams = Anthropic.MessageCreateParamsNonStreaming;
21
22
  type AnthropicStreamingMessageCreateParams = Anthropic.MessageCreateParamsStreaming;
22
23
  type AnthropicMessageStreamEvent = Anthropic.MessageStreamEvent;
23
24
  type AnthropicRequestOptions = Anthropic.RequestOptions;
24
25
  interface ChatAnthropicCallOptions extends BaseLanguageModelCallOptions {
25
- tools?: StructuredToolInterface[] | AnthropicTool[];
26
+ tools?: (StructuredToolInterface | AnthropicTool)[];
26
27
  }
27
28
  /**
28
29
  * Input to AnthropicChat class.
@@ -85,6 +86,7 @@ export interface AnthropicInput {
85
86
  * Anthropic API.
86
87
  */
87
88
  type Kwargs = Record<string, any>;
89
+ export declare function _convertLangChainToolCallToAnthropic(toolCall: ToolCall): AnthropicToolResponse;
88
90
  /**
89
91
  * Wrapper around Anthropic large language models.
90
92
  *
@@ -108,7 +110,7 @@ type Kwargs = Record<string, any>;
108
110
  * console.log(res);
109
111
  * ```
110
112
  */
111
- export declare class ChatAnthropicMessages<CallOptions extends ChatAnthropicCallOptions = ChatAnthropicCallOptions> extends BaseChatModel<CallOptions> implements AnthropicInput {
113
+ export declare class ChatAnthropicMessages<CallOptions extends ChatAnthropicCallOptions = ChatAnthropicCallOptions> extends BaseChatModel<CallOptions, AIMessageChunk> implements AnthropicInput {
112
114
  static lc_name(): string;
113
115
  get lc_secrets(): {
114
116
  [key: string]: string;
@@ -139,11 +141,11 @@ export declare class ChatAnthropicMessages<CallOptions extends ChatAnthropicCall
139
141
  * @throws {Error} If a mix of AnthropicTools and StructuredTools are passed.
140
142
  */
141
143
  formatStructuredToolToAnthropic(tools: ChatAnthropicCallOptions["tools"]): AnthropicTool[] | undefined;
144
+ bindTools(tools: (AnthropicTool | StructuredToolInterface)[], kwargs?: Partial<CallOptions>): RunnableInterface<BaseLanguageModelInput, AIMessageChunk, CallOptions>;
142
145
  /**
143
146
  * Get the parameters used to invoke the model
144
147
  */
145
148
  invocationParams(options?: this["ParsedCallOptions"]): Omit<AnthropicMessageCreateParams | AnthropicStreamingMessageCreateParams, "messages"> & Kwargs;
146
- invocationOptions(request: Omit<AnthropicMessageCreateParams | AnthropicStreamingMessageCreateParams, "messages"> & Kwargs, options: this["ParsedCallOptions"]): AnthropicRequestOptions;
147
149
  /** @ignore */
148
150
  _identifyingParams(): {
149
151
  system?: string | undefined;
@@ -173,15 +175,6 @@ export declare class ChatAnthropicMessages<CallOptions extends ChatAnthropicCall
173
175
  model_name: string;
174
176
  };
175
177
  _streamResponseChunks(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
176
- /**
177
- * Formats messages as a prompt for the model.
178
- * @param messages The base messages to format as a prompt.
179
- * @returns The formatted prompt.
180
- */
181
- protected formatMessagesForAnthropic(messages: BaseMessage[]): {
182
- system?: string;
183
- messages: AnthropicMessage[];
184
- };
185
178
  /** @ignore */
186
179
  _generateNonStreaming(messages: BaseMessage[], params: Omit<Anthropic.Messages.MessageCreateParamsNonStreaming | Anthropic.Messages.MessageCreateParamsStreaming, "messages"> & Kwargs, requestOptions: AnthropicRequestOptions): Promise<ChatGeneration[]>;
187
180
  /** @ignore */