@langchain/anthropic 0.3.10 → 0.3.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -520,8 +520,8 @@ export declare class ChatAnthropicMessages<CallOptions extends ChatAnthropicCall
520
520
  model: Anthropic.Messages.Model;
521
521
  max_tokens: number;
522
522
  tools?: Anthropic.Messages.Tool[] | undefined;
523
- tool_choice?: Anthropic.Messages.MessageCreateParams.ToolChoiceAuto | Anthropic.Messages.MessageCreateParams.ToolChoiceAny | Anthropic.Messages.MessageCreateParams.ToolChoiceTool | undefined;
524
- metadata?: Anthropic.Messages.MessageCreateParams.Metadata | undefined;
523
+ tool_choice?: Anthropic.Messages.ToolChoice | undefined;
524
+ metadata?: Anthropic.Messages.Metadata | undefined;
525
525
  temperature?: number | undefined;
526
526
  stream?: boolean | undefined;
527
527
  stop_sequences?: string[] | undefined;
@@ -537,8 +537,8 @@ export declare class ChatAnthropicMessages<CallOptions extends ChatAnthropicCall
537
537
  model: Anthropic.Messages.Model;
538
538
  max_tokens: number;
539
539
  tools?: Anthropic.Messages.Tool[] | undefined;
540
- tool_choice?: Anthropic.Messages.MessageCreateParams.ToolChoiceAuto | Anthropic.Messages.MessageCreateParams.ToolChoiceAny | Anthropic.Messages.MessageCreateParams.ToolChoiceTool | undefined;
541
- metadata?: Anthropic.Messages.MessageCreateParams.Metadata | undefined;
540
+ tool_choice?: Anthropic.Messages.ToolChoice | undefined;
541
+ metadata?: Anthropic.Messages.Metadata | undefined;
542
542
  temperature?: number | undefined;
543
543
  stream?: boolean | undefined;
544
544
  stop_sequences?: string[] | undefined;
@@ -554,7 +554,6 @@ export declare class ChatAnthropicMessages<CallOptions extends ChatAnthropicCall
554
554
  id: string;
555
555
  model: Anthropic.Messages.Model;
556
556
  stop_reason: "tool_use" | "stop_sequence" | "end_turn" | "max_tokens" | null;
557
- /** Anthropic API key */
558
557
  stop_sequence: string | null;
559
558
  usage: Anthropic.Messages.Usage;
560
559
  };
package/dist/types.d.ts CHANGED
@@ -18,3 +18,7 @@ export type AnthropicToolChoice = {
18
18
  name: string;
19
19
  } | "any" | "auto" | "none" | string;
20
20
  export type ChatAnthropicToolType = AnthropicTool | BindToolsInput;
21
+ export type AnthropicTextBlockParam = Anthropic.Messages.TextBlockParam;
22
+ export type AnthropicImageBlockParam = Anthropic.Messages.ImageBlockParam;
23
+ export type AnthropicToolUseBlockParam = Anthropic.Messages.ToolUseBlockParam;
24
+ export type AnthropicToolResultBlockParam = Anthropic.Messages.ToolResultBlockParam;
@@ -218,8 +218,55 @@ function _convertMessagesToAnthropicPayload(messages) {
218
218
  }
219
219
  });
220
220
  return {
221
- messages: formattedMessages,
221
+ messages: mergeMessages(formattedMessages),
222
222
  system,
223
223
  };
224
224
  }
225
225
  exports._convertMessagesToAnthropicPayload = _convertMessagesToAnthropicPayload;
226
+ function mergeMessages(messages) {
227
+ if (!messages || messages.length <= 1) {
228
+ return messages;
229
+ }
230
+ const result = [];
231
+ let currentMessage = messages[0];
232
+ const normalizeContent = (content) => {
233
+ if (typeof content === "string") {
234
+ return [
235
+ {
236
+ type: "text",
237
+ text: content,
238
+ },
239
+ ];
240
+ }
241
+ return content;
242
+ };
243
+ const isToolResultMessage = (msg) => {
244
+ if (msg.role !== "user")
245
+ return false;
246
+ if (typeof msg.content === "string") {
247
+ return false;
248
+ }
249
+ return (Array.isArray(msg.content) &&
250
+ msg.content.every((item) => item.type === "tool_result"));
251
+ };
252
+ for (let i = 1; i < messages.length; i += 1) {
253
+ const nextMessage = messages[i];
254
+ if (isToolResultMessage(currentMessage) &&
255
+ isToolResultMessage(nextMessage)) {
256
+ // Merge the messages by combining their content arrays
257
+ currentMessage = {
258
+ ...currentMessage,
259
+ content: [
260
+ ...normalizeContent(currentMessage.content),
261
+ ...normalizeContent(nextMessage.content),
262
+ ],
263
+ };
264
+ }
265
+ else {
266
+ result.push(currentMessage);
267
+ currentMessage = nextMessage;
268
+ }
269
+ }
270
+ result.push(currentMessage);
271
+ return result;
272
+ }
@@ -214,7 +214,54 @@ export function _convertMessagesToAnthropicPayload(messages) {
214
214
  }
215
215
  });
216
216
  return {
217
- messages: formattedMessages,
217
+ messages: mergeMessages(formattedMessages),
218
218
  system,
219
219
  };
220
220
  }
221
+ function mergeMessages(messages) {
222
+ if (!messages || messages.length <= 1) {
223
+ return messages;
224
+ }
225
+ const result = [];
226
+ let currentMessage = messages[0];
227
+ const normalizeContent = (content) => {
228
+ if (typeof content === "string") {
229
+ return [
230
+ {
231
+ type: "text",
232
+ text: content,
233
+ },
234
+ ];
235
+ }
236
+ return content;
237
+ };
238
+ const isToolResultMessage = (msg) => {
239
+ if (msg.role !== "user")
240
+ return false;
241
+ if (typeof msg.content === "string") {
242
+ return false;
243
+ }
244
+ return (Array.isArray(msg.content) &&
245
+ msg.content.every((item) => item.type === "tool_result"));
246
+ };
247
+ for (let i = 1; i < messages.length; i += 1) {
248
+ const nextMessage = messages[i];
249
+ if (isToolResultMessage(currentMessage) &&
250
+ isToolResultMessage(nextMessage)) {
251
+ // Merge the messages by combining their content arrays
252
+ currentMessage = {
253
+ ...currentMessage,
254
+ content: [
255
+ ...normalizeContent(currentMessage.content),
256
+ ...normalizeContent(nextMessage.content),
257
+ ],
258
+ };
259
+ }
260
+ else {
261
+ result.push(currentMessage);
262
+ currentMessage = nextMessage;
263
+ }
264
+ }
265
+ result.push(currentMessage);
266
+ return result;
267
+ }
@@ -14,11 +14,16 @@ function _makeMessageChunkFromAnthropicEvent(data, fields) {
14
14
  filteredAdditionalKwargs[key] = value;
15
15
  }
16
16
  }
17
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
17
18
  const { input_tokens, output_tokens, ...rest } = usage ?? {};
18
19
  const usageMetadata = {
19
20
  input_tokens,
20
21
  output_tokens,
21
22
  total_tokens: input_tokens + output_tokens,
23
+ input_token_details: {
24
+ cache_creation: rest.cache_creation_input_tokens,
25
+ cache_read: rest.cache_read_input_tokens,
26
+ },
22
27
  };
23
28
  return {
24
29
  chunk: new messages_1.AIMessageChunk({
@@ -39,6 +44,12 @@ function _makeMessageChunkFromAnthropicEvent(data, fields) {
39
44
  input_tokens: 0,
40
45
  output_tokens: data.usage.output_tokens,
41
46
  total_tokens: data.usage.output_tokens,
47
+ input_token_details: {
48
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
49
+ cache_creation: data.usage.cache_creation_input_tokens,
50
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
51
+ cache_read: data.usage.cache_read_input_tokens,
52
+ },
42
53
  };
43
54
  return {
44
55
  chunk: new messages_1.AIMessageChunk({
@@ -145,6 +156,10 @@ function anthropicResponseToChatMessages(messages, additionalKwargs) {
145
156
  input_tokens: usage.input_tokens ?? 0,
146
157
  output_tokens: usage.output_tokens ?? 0,
147
158
  total_tokens: (usage.input_tokens ?? 0) + (usage.output_tokens ?? 0),
159
+ input_token_details: {
160
+ cache_creation: usage.cache_creation_input_tokens,
161
+ cache_read: usage.cache_read_input_tokens,
162
+ },
148
163
  }
149
164
  : undefined;
150
165
  if (messages.length === 1 && messages[0].type === "text") {
@@ -11,11 +11,16 @@ export function _makeMessageChunkFromAnthropicEvent(data, fields) {
11
11
  filteredAdditionalKwargs[key] = value;
12
12
  }
13
13
  }
14
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
14
15
  const { input_tokens, output_tokens, ...rest } = usage ?? {};
15
16
  const usageMetadata = {
16
17
  input_tokens,
17
18
  output_tokens,
18
19
  total_tokens: input_tokens + output_tokens,
20
+ input_token_details: {
21
+ cache_creation: rest.cache_creation_input_tokens,
22
+ cache_read: rest.cache_read_input_tokens,
23
+ },
19
24
  };
20
25
  return {
21
26
  chunk: new AIMessageChunk({
@@ -36,6 +41,12 @@ export function _makeMessageChunkFromAnthropicEvent(data, fields) {
36
41
  input_tokens: 0,
37
42
  output_tokens: data.usage.output_tokens,
38
43
  total_tokens: data.usage.output_tokens,
44
+ input_token_details: {
45
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
46
+ cache_creation: data.usage.cache_creation_input_tokens,
47
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
48
+ cache_read: data.usage.cache_read_input_tokens,
49
+ },
39
50
  };
40
51
  return {
41
52
  chunk: new AIMessageChunk({
@@ -141,6 +152,10 @@ export function anthropicResponseToChatMessages(messages, additionalKwargs) {
141
152
  input_tokens: usage.input_tokens ?? 0,
142
153
  output_tokens: usage.output_tokens ?? 0,
143
154
  total_tokens: (usage.input_tokens ?? 0) + (usage.output_tokens ?? 0),
155
+ input_token_details: {
156
+ cache_creation: usage.cache_creation_input_tokens,
157
+ cache_read: usage.cache_read_input_tokens,
158
+ },
144
159
  }
145
160
  : undefined;
146
161
  if (messages.length === 1 && messages[0].type === "text") {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/anthropic",
3
- "version": "0.3.10",
3
+ "version": "0.3.11",
4
4
  "description": "Anthropic integrations for LangChain.js",
5
5
  "type": "module",
6
6
  "engines": {
@@ -35,7 +35,7 @@
35
35
  "author": "LangChain",
36
36
  "license": "MIT",
37
37
  "dependencies": {
38
- "@anthropic-ai/sdk": "^0.27.3",
38
+ "@anthropic-ai/sdk": "^0.32.1",
39
39
  "fast-xml-parser": "^4.4.1",
40
40
  "zod": "^3.22.4",
41
41
  "zod-to-json-schema": "^3.22.4"