@langchain/anthropic 0.1.13 → 0.1.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,12 +1,12 @@
1
1
  import { Anthropic } from "@anthropic-ai/sdk";
2
- import { AIMessage, AIMessageChunk, } from "@langchain/core/messages";
2
+ import { AIMessage, AIMessageChunk, HumanMessage, isAIMessage, } from "@langchain/core/messages";
3
3
  import { ChatGenerationChunk, } from "@langchain/core/outputs";
4
4
  import { getEnvironmentVariable } from "@langchain/core/utils/env";
5
5
  import { BaseChatModel, } from "@langchain/core/language_models/chat_models";
6
6
  import { zodToJsonSchema } from "zod-to-json-schema";
7
7
  import { RunnablePassthrough, RunnableSequence, } from "@langchain/core/runnables";
8
8
  import { isZodSchema } from "@langchain/core/utils/types";
9
- import { AnthropicToolsOutputParser } from "./output_parsers.js";
9
+ import { AnthropicToolsOutputParser, extractToolCalls, } from "./output_parsers.js";
10
10
  function _formatImage(imageUrl) {
11
11
  const regex = /^data:(image\/.+);base64,(.+)$/;
12
12
  const match = imageUrl.match(regex);
@@ -33,14 +33,15 @@ function anthropicResponseToChatMessages(messages, additionalKwargs) {
33
33
  ];
34
34
  }
35
35
  else {
36
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
37
- const castMessage = messages;
36
+ const toolCalls = extractToolCalls(messages);
38
37
  const generations = [
39
38
  {
40
39
  text: "",
41
40
  message: new AIMessage({
42
- content: castMessage,
41
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
42
+ content: messages,
43
43
  additional_kwargs: additionalKwargs,
44
+ tool_calls: toolCalls,
44
45
  }),
45
46
  },
46
47
  ];
@@ -51,6 +52,175 @@ function anthropicResponseToChatMessages(messages, additionalKwargs) {
51
52
  function isAnthropicTool(tool) {
52
53
  return "input_schema" in tool;
53
54
  }
55
+ function _mergeMessages(messages) {
56
+ // Merge runs of human/tool messages into single human messages with content blocks.
57
+ const merged = [];
58
+ for (const message of messages) {
59
+ if (message._getType() === "tool") {
60
+ if (typeof message.content === "string") {
61
+ merged.push(new HumanMessage({
62
+ content: [
63
+ {
64
+ type: "tool_result",
65
+ content: message.content,
66
+ tool_use_id: message.tool_call_id,
67
+ },
68
+ ],
69
+ }));
70
+ }
71
+ else {
72
+ merged.push(new HumanMessage({ content: message.content }));
73
+ }
74
+ }
75
+ else {
76
+ const previousMessage = merged[merged.length - 1];
77
+ if (previousMessage?._getType() === "human" &&
78
+ message._getType() === "human") {
79
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
80
+ let combinedContent;
81
+ if (typeof previousMessage.content === "string") {
82
+ combinedContent = [{ type: "text", text: previousMessage.content }];
83
+ }
84
+ else {
85
+ combinedContent = previousMessage.content;
86
+ }
87
+ if (typeof message.content === "string") {
88
+ combinedContent.push({ type: "text", text: message.content });
89
+ }
90
+ else {
91
+ combinedContent = combinedContent.concat(message.content);
92
+ }
93
+ previousMessage.content = combinedContent;
94
+ }
95
+ else {
96
+ merged.push(message);
97
+ }
98
+ }
99
+ }
100
+ return merged;
101
+ }
102
+ export function _convertLangChainToolCallToAnthropic(toolCall) {
103
+ if (toolCall.id === undefined) {
104
+ throw new Error(`Anthropic requires all tool calls to have an "id".`);
105
+ }
106
+ return {
107
+ type: "tool_use",
108
+ id: toolCall.id,
109
+ name: toolCall.name,
110
+ input: toolCall.args,
111
+ };
112
+ }
113
+ function _formatContent(content) {
114
+ if (typeof content === "string") {
115
+ return content;
116
+ }
117
+ else {
118
+ const contentBlocks = content.map((contentPart) => {
119
+ if (contentPart.type === "image_url") {
120
+ let source;
121
+ if (typeof contentPart.image_url === "string") {
122
+ source = _formatImage(contentPart.image_url);
123
+ }
124
+ else {
125
+ source = _formatImage(contentPart.image_url.url);
126
+ }
127
+ return {
128
+ type: "image",
129
+ source,
130
+ };
131
+ }
132
+ else if (contentPart.type === "text") {
133
+ // Assuming contentPart is of type MessageContentText here
134
+ return {
135
+ type: "text",
136
+ text: contentPart.text,
137
+ };
138
+ }
139
+ else if (contentPart.type === "tool_use" ||
140
+ contentPart.type === "tool_result") {
141
+ // TODO: Fix when SDK types are fixed
142
+ return {
143
+ ...contentPart,
144
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
145
+ };
146
+ }
147
+ else {
148
+ throw new Error("Unsupported message content format");
149
+ }
150
+ });
151
+ return contentBlocks;
152
+ }
153
+ }
154
+ /**
155
+ * Formats messages as a prompt for the model.
156
+ * @param messages The base messages to format as a prompt.
157
+ * @returns The formatted prompt.
158
+ */
159
+ function _formatMessagesForAnthropic(messages) {
160
+ const mergedMessages = _mergeMessages(messages);
161
+ let system;
162
+ if (mergedMessages.length > 0 && mergedMessages[0]._getType() === "system") {
163
+ if (typeof messages[0].content !== "string") {
164
+ throw new Error("System message content must be a string.");
165
+ }
166
+ system = messages[0].content;
167
+ }
168
+ const conversationMessages = system !== undefined ? mergedMessages.slice(1) : mergedMessages;
169
+ const formattedMessages = conversationMessages.map((message) => {
170
+ let role;
171
+ if (message._getType() === "human") {
172
+ role = "user";
173
+ }
174
+ else if (message._getType() === "ai") {
175
+ role = "assistant";
176
+ }
177
+ else if (message._getType() === "tool") {
178
+ role = "user";
179
+ }
180
+ else if (message._getType() === "system") {
181
+ throw new Error("System messages are only permitted as the first passed message.");
182
+ }
183
+ else {
184
+ throw new Error(`Message type "${message._getType()}" is not supported.`);
185
+ }
186
+ if (isAIMessage(message) && !!message.tool_calls?.length) {
187
+ if (message.content === "") {
188
+ return {
189
+ role,
190
+ content: message.tool_calls.map(_convertLangChainToolCallToAnthropic),
191
+ };
192
+ }
193
+ else if (typeof message.content === "string") {
194
+ console.warn(`The "tool_calls" field on a message is only respected if content is an empty string.`);
195
+ return {
196
+ role,
197
+ content: _formatContent(message.content),
198
+ };
199
+ }
200
+ else {
201
+ const { content } = message;
202
+ const hasMismatchedToolCalls = !message.tool_calls.every((toolCall) => content.find((contentPart) => contentPart.type === "tool_use" && contentPart.id === toolCall.id));
203
+ if (hasMismatchedToolCalls) {
204
+ console.warn(`The "tool_calls" field on a message is only respected if content is an empty string.`);
205
+ }
206
+ return {
207
+ role,
208
+ content: _formatContent(message.content),
209
+ };
210
+ }
211
+ }
212
+ else {
213
+ return {
214
+ role,
215
+ content: _formatContent(message.content),
216
+ };
217
+ }
218
+ });
219
+ return {
220
+ messages: formattedMessages,
221
+ system,
222
+ };
223
+ }
54
224
  /**
55
225
  * Wrapper around Anthropic large language models.
56
226
  *
@@ -239,6 +409,12 @@ export class ChatAnthropicMessages extends BaseChatModel {
239
409
  input_schema: zodToJsonSchema(tool.schema),
240
410
  }));
241
411
  }
412
+ bindTools(tools, kwargs) {
413
+ return this.bind({
414
+ tools: this.formatStructuredToolToAnthropic(tools),
415
+ ...kwargs,
416
+ });
417
+ }
242
418
  /**
243
419
  * Get the parameters used to invoke the model
244
420
  */
@@ -251,30 +427,10 @@ export class ChatAnthropicMessages extends BaseChatModel {
251
427
  stop_sequences: options?.stop ?? this.stopSequences,
252
428
  stream: this.streaming,
253
429
  max_tokens: this.maxTokens,
430
+ tools: this.formatStructuredToolToAnthropic(options?.tools),
254
431
  ...this.invocationKwargs,
255
432
  };
256
433
  }
257
- invocationOptions(request, options) {
258
- const toolUseBetaHeader = {
259
- "anthropic-beta": "tools-2024-04-04",
260
- };
261
- const tools = this.formatStructuredToolToAnthropic(options?.tools);
262
- // If tools are present, populate the body with the message request params.
263
- // This is because Anthropic overwrites the message request params if a body
264
- // is passed.
265
- const body = tools
266
- ? {
267
- ...request,
268
- tools,
269
- }
270
- : undefined;
271
- const headers = tools ? toolUseBetaHeader : undefined;
272
- return {
273
- signal: options.signal,
274
- ...(body ? { body } : {}),
275
- ...(headers ? { headers } : {}),
276
- };
277
- }
278
434
  /** @ignore */
279
435
  _identifyingParams() {
280
436
  return {
@@ -293,22 +449,23 @@ export class ChatAnthropicMessages extends BaseChatModel {
293
449
  }
294
450
  async *_streamResponseChunks(messages, options, runManager) {
295
451
  const params = this.invocationParams(options);
296
- const requestOptions = this.invocationOptions({
297
- ...params,
298
- stream: false,
299
- ...this.formatMessagesForAnthropic(messages),
300
- }, options);
452
+ const formattedMessages = _formatMessagesForAnthropic(messages);
301
453
  if (options.tools !== undefined && options.tools.length > 0) {
302
- const requestOptions = this.invocationOptions({
303
- ...params,
304
- stream: false,
305
- ...this.formatMessagesForAnthropic(messages),
306
- }, options);
307
- const generations = await this._generateNonStreaming(messages, params, requestOptions);
454
+ const generations = await this._generateNonStreaming(messages, params, {
455
+ signal: options.signal,
456
+ });
457
+ const result = generations[0].message;
458
+ const toolCallChunks = result.tool_calls?.map((toolCall, index) => ({
459
+ name: toolCall.name,
460
+ args: JSON.stringify(toolCall.args),
461
+ id: toolCall.id,
462
+ index,
463
+ }));
308
464
  yield new ChatGenerationChunk({
309
465
  message: new AIMessageChunk({
310
- content: generations[0].message.content,
311
- additional_kwargs: generations[0].message.additional_kwargs,
466
+ content: result.content,
467
+ additional_kwargs: result.additional_kwargs,
468
+ tool_call_chunks: toolCallChunks,
312
469
  }),
313
470
  text: generations[0].text,
314
471
  });
@@ -316,9 +473,9 @@ export class ChatAnthropicMessages extends BaseChatModel {
316
473
  else {
317
474
  const stream = await this.createStreamWithRetry({
318
475
  ...params,
319
- ...this.formatMessagesForAnthropic(messages),
476
+ ...formattedMessages,
320
477
  stream: true,
321
- }, requestOptions);
478
+ });
322
479
  let usageData = { input_tokens: 0, output_tokens: 0 };
323
480
  for await (const data of stream) {
324
481
  if (options.signal?.aborted) {
@@ -379,95 +536,22 @@ export class ChatAnthropicMessages extends BaseChatModel {
379
536
  });
380
537
  }
381
538
  }
382
- /**
383
- * Formats messages as a prompt for the model.
384
- * @param messages The base messages to format as a prompt.
385
- * @returns The formatted prompt.
386
- */
387
- formatMessagesForAnthropic(messages) {
388
- let system;
389
- if (messages.length > 0 && messages[0]._getType() === "system") {
390
- if (typeof messages[0].content !== "string") {
391
- throw new Error("System message content must be a string.");
392
- }
393
- system = messages[0].content;
394
- }
395
- const conversationMessages = system !== undefined ? messages.slice(1) : messages;
396
- const formattedMessages = conversationMessages.map((message) => {
397
- let role;
398
- if (message._getType() === "human") {
399
- role = "user";
400
- }
401
- else if (message._getType() === "ai") {
402
- role = "assistant";
403
- }
404
- else if (message._getType() === "tool") {
405
- role = "user";
406
- }
407
- else if (message._getType() === "system") {
408
- throw new Error("System messages are only permitted as the first passed message.");
409
- }
410
- else {
411
- throw new Error(`Message type "${message._getType()}" is not supported.`);
412
- }
413
- if (typeof message.content === "string") {
414
- return {
415
- role,
416
- content: message.content,
417
- };
418
- }
419
- else {
420
- const contentBlocks = message.content.map((contentPart) => {
421
- if (contentPart.type === "image_url") {
422
- let source;
423
- if (typeof contentPart.image_url === "string") {
424
- source = _formatImage(contentPart.image_url);
425
- }
426
- else {
427
- source = _formatImage(contentPart.image_url.url);
428
- }
429
- return {
430
- type: "image",
431
- source,
432
- };
433
- }
434
- else if (contentPart.type === "text") {
435
- // Assuming contentPart is of type MessageContentText here
436
- return {
437
- type: "text",
438
- text: contentPart.text,
439
- };
440
- }
441
- else if (contentPart.type === "tool_use" ||
442
- contentPart.type === "tool_result") {
443
- // TODO: Fix when SDK types are fixed
444
- return {
445
- ...contentPart,
446
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
447
- };
448
- }
449
- else {
450
- throw new Error("Unsupported message content format");
451
- }
452
- });
453
- return {
454
- role,
455
- content: contentBlocks,
456
- };
457
- }
458
- });
459
- return {
460
- messages: formattedMessages,
461
- system,
462
- };
463
- }
464
539
  /** @ignore */
465
540
  async _generateNonStreaming(messages, params, requestOptions) {
541
+ const options = params.tools !== undefined
542
+ ? {
543
+ ...requestOptions,
544
+ headers: {
545
+ ...requestOptions.headers,
546
+ "anthropic-beta": "tools-2024-04-04",
547
+ },
548
+ }
549
+ : requestOptions;
466
550
  const response = await this.completionWithRetry({
467
551
  ...params,
468
552
  stream: false,
469
- ...this.formatMessagesForAnthropic(messages),
470
- }, requestOptions);
553
+ ..._formatMessagesForAnthropic(messages),
554
+ }, options);
471
555
  const { content, ...additionalKwargs } = response;
472
556
  const generations = anthropicResponseToChatMessages(content, additionalKwargs);
473
557
  return generations;
@@ -502,12 +586,9 @@ export class ChatAnthropicMessages extends BaseChatModel {
502
586
  };
503
587
  }
504
588
  else {
505
- const requestOptions = this.invocationOptions({
506
- ...params,
507
- stream: false,
508
- ...this.formatMessagesForAnthropic(messages),
509
- }, options);
510
- const generations = await this._generateNonStreaming(messages, params, requestOptions);
589
+ const generations = await this._generateNonStreaming(messages, params, {
590
+ signal: options.signal,
591
+ });
511
592
  return {
512
593
  generations,
513
594
  };
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.AnthropicToolsOutputParser = void 0;
3
+ exports.extractToolCalls = exports.AnthropicToolsOutputParser = void 0;
4
4
  const output_parsers_1 = require("@langchain/core/output_parsers");
5
5
  class AnthropicToolsOutputParser extends output_parsers_1.BaseLLMOutputParser {
6
6
  static lc_name() {
@@ -56,25 +56,32 @@ class AnthropicToolsOutputParser extends output_parsers_1.BaseLLMOutputParser {
56
56
  throw new output_parsers_1.OutputParserException(`Failed to parse. Text: "${JSON.stringify(result, null, 2)}". Error: ${JSON.stringify(zodParsedResult.error.errors)}`, JSON.stringify(result, null, 2));
57
57
  }
58
58
  }
59
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
60
59
  async parseResult(generations) {
61
60
  const tools = generations.flatMap((generation) => {
62
61
  const { message } = generation;
63
- if (typeof message === "string") {
64
- return [];
65
- }
66
62
  if (!Array.isArray(message.content)) {
67
63
  return [];
68
64
  }
69
- const tool = message.content.find((item) => item.type === "tool_use");
65
+ const tool = extractToolCalls(message.content)[0];
70
66
  return tool;
71
67
  });
72
68
  if (tools[0] === undefined) {
73
69
  throw new Error("No parseable tool calls provided to AnthropicToolsOutputParser.");
74
70
  }
75
71
  const [tool] = tools;
76
- const validatedResult = await this._validateResult(tool.input);
72
+ const validatedResult = await this._validateResult(tool.args);
77
73
  return validatedResult;
78
74
  }
79
75
  }
80
76
  exports.AnthropicToolsOutputParser = AnthropicToolsOutputParser;
77
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
78
+ function extractToolCalls(content) {
79
+ const toolCalls = [];
80
+ for (const block of content) {
81
+ if (block.type === "tool_use") {
82
+ toolCalls.push({ name: block.name, args: block.input, id: block.id });
83
+ }
84
+ }
85
+ return toolCalls;
86
+ }
87
+ exports.extractToolCalls = extractToolCalls;
@@ -2,6 +2,7 @@ import { z } from "zod";
2
2
  import { BaseLLMOutputParser } from "@langchain/core/output_parsers";
3
3
  import { JsonOutputKeyToolsParserParams } from "@langchain/core/output_parsers/openai_tools";
4
4
  import { ChatGeneration } from "@langchain/core/outputs";
5
+ import { ToolCall } from "@langchain/core/messages/tool";
5
6
  interface AnthropicToolsOutputParserParams<T extends Record<string, any>> extends JsonOutputKeyToolsParserParams<T> {
6
7
  }
7
8
  export declare class AnthropicToolsOutputParser<T extends Record<string, any> = Record<string, any>> extends BaseLLMOutputParser<T> {
@@ -17,4 +18,5 @@ export declare class AnthropicToolsOutputParser<T extends Record<string, any> =
17
18
  protected _validateResult(result: unknown): Promise<T>;
18
19
  parseResult(generations: ChatGeneration[]): Promise<T>;
19
20
  }
21
+ export declare function extractToolCalls(content: Record<string, any>[]): ToolCall[];
20
22
  export {};
@@ -53,24 +53,30 @@ export class AnthropicToolsOutputParser extends BaseLLMOutputParser {
53
53
  throw new OutputParserException(`Failed to parse. Text: "${JSON.stringify(result, null, 2)}". Error: ${JSON.stringify(zodParsedResult.error.errors)}`, JSON.stringify(result, null, 2));
54
54
  }
55
55
  }
56
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
57
56
  async parseResult(generations) {
58
57
  const tools = generations.flatMap((generation) => {
59
58
  const { message } = generation;
60
- if (typeof message === "string") {
61
- return [];
62
- }
63
59
  if (!Array.isArray(message.content)) {
64
60
  return [];
65
61
  }
66
- const tool = message.content.find((item) => item.type === "tool_use");
62
+ const tool = extractToolCalls(message.content)[0];
67
63
  return tool;
68
64
  });
69
65
  if (tools[0] === undefined) {
70
66
  throw new Error("No parseable tool calls provided to AnthropicToolsOutputParser.");
71
67
  }
72
68
  const [tool] = tools;
73
- const validatedResult = await this._validateResult(tool.input);
69
+ const validatedResult = await this._validateResult(tool.args);
74
70
  return validatedResult;
75
71
  }
76
72
  }
73
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
74
+ export function extractToolCalls(content) {
75
+ const toolCalls = [];
76
+ for (const block of content) {
77
+ if (block.type === "tool_use") {
78
+ toolCalls.push({ name: block.name, args: block.input, id: block.id });
79
+ }
80
+ }
81
+ return toolCalls;
82
+ }
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,39 @@
1
+ // import { test, expect } from "@jest/globals";
2
+ // import { ChatPromptTemplate } from "@langchain/core/prompts";
3
+ // import { TavilySearchResults } from "@langchain/community/tools/tavily_search";
4
+ // import { AgentExecutor, createToolCallingAgent } from "langchain/agents";
5
+ // import { ChatAnthropic } from "../index.js";
6
+ // const tools = [new TavilySearchResults({ maxResults: 1 })];
7
+ // TODO: This test breaks CI build due to dependencies. Figure out a way around it.
8
+ test("createToolCallingAgent works", async () => {
9
+ // const prompt = ChatPromptTemplate.fromMessages([
10
+ // ["system", "You are a helpful assistant"],
11
+ // ["placeholder", "{chat_history}"],
12
+ // ["human", "{input}"],
13
+ // ["placeholder", "{agent_scratchpad}"],
14
+ // ]);
15
+ // const llm = new ChatAnthropic({
16
+ // modelName: "claude-3-sonnet-20240229",
17
+ // temperature: 0,
18
+ // });
19
+ // const agent = await createToolCallingAgent({
20
+ // llm,
21
+ // tools,
22
+ // prompt,
23
+ // });
24
+ // const agentExecutor = new AgentExecutor({
25
+ // agent,
26
+ // tools,
27
+ // });
28
+ // const input = "what is the current weather in SF?";
29
+ // const result = await agentExecutor.invoke({
30
+ // input,
31
+ // });
32
+ // console.log(result);
33
+ // expect(result.input).toBe(input);
34
+ // expect(typeof result.output).toBe("string");
35
+ // // Length greater than 10 because any less than that would warrant
36
+ // // an investigation into why such a short generation was returned.
37
+ // expect(result.output.length).toBeGreaterThan(10);
38
+ });
39
+ export {};
@@ -0,0 +1 @@
1
+ export {};