@langchain/anthropic 0.3.10 → 0.3.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -520,8 +520,8 @@ export declare class ChatAnthropicMessages<CallOptions extends ChatAnthropicCall
520
520
  model: Anthropic.Messages.Model;
521
521
  max_tokens: number;
522
522
  tools?: Anthropic.Messages.Tool[] | undefined;
523
- tool_choice?: Anthropic.Messages.MessageCreateParams.ToolChoiceAuto | Anthropic.Messages.MessageCreateParams.ToolChoiceAny | Anthropic.Messages.MessageCreateParams.ToolChoiceTool | undefined;
524
- metadata?: Anthropic.Messages.MessageCreateParams.Metadata | undefined;
523
+ tool_choice?: Anthropic.Messages.ToolChoice | undefined;
524
+ metadata?: Anthropic.Messages.Metadata | undefined;
525
525
  temperature?: number | undefined;
526
526
  stream?: boolean | undefined;
527
527
  stop_sequences?: string[] | undefined;
@@ -537,8 +537,8 @@ export declare class ChatAnthropicMessages<CallOptions extends ChatAnthropicCall
537
537
  model: Anthropic.Messages.Model;
538
538
  max_tokens: number;
539
539
  tools?: Anthropic.Messages.Tool[] | undefined;
540
- tool_choice?: Anthropic.Messages.MessageCreateParams.ToolChoiceAuto | Anthropic.Messages.MessageCreateParams.ToolChoiceAny | Anthropic.Messages.MessageCreateParams.ToolChoiceTool | undefined;
541
- metadata?: Anthropic.Messages.MessageCreateParams.Metadata | undefined;
540
+ tool_choice?: Anthropic.Messages.ToolChoice | undefined;
541
+ metadata?: Anthropic.Messages.Metadata | undefined;
542
542
  temperature?: number | undefined;
543
543
  stream?: boolean | undefined;
544
544
  stop_sequences?: string[] | undefined;
@@ -554,7 +554,6 @@ export declare class ChatAnthropicMessages<CallOptions extends ChatAnthropicCall
554
554
  id: string;
555
555
  model: Anthropic.Messages.Model;
556
556
  stop_reason: "tool_use" | "stop_sequence" | "end_turn" | "max_tokens" | null;
557
- /** Anthropic API key */
558
557
  stop_sequence: string | null;
559
558
  usage: Anthropic.Messages.Usage;
560
559
  };
package/dist/types.d.ts CHANGED
@@ -18,3 +18,7 @@ export type AnthropicToolChoice = {
18
18
  name: string;
19
19
  } | "any" | "auto" | "none" | string;
20
20
  export type ChatAnthropicToolType = AnthropicTool | BindToolsInput;
21
+ export type AnthropicTextBlockParam = Anthropic.Messages.TextBlockParam;
22
+ export type AnthropicImageBlockParam = Anthropic.Messages.ImageBlockParam;
23
+ export type AnthropicToolUseBlockParam = Anthropic.Messages.ToolUseBlockParam;
24
+ export type AnthropicToolResultBlockParam = Anthropic.Messages.ToolResultBlockParam;
@@ -105,6 +105,14 @@ function _formatContent(content) {
105
105
  ...(cacheControl ? { cache_control: cacheControl } : {}),
106
106
  };
107
107
  }
108
+ else if (contentPart.type === "document") {
109
+ // PDF
110
+ return {
111
+ type: "document",
112
+ source: contentPart.source,
113
+ ...(cacheControl ? { cache_control: cacheControl } : {}),
114
+ };
115
+ }
108
116
  else if (textTypes.find((t) => t === contentPart.type) &&
109
117
  "text" in contentPart) {
110
118
  // Assuming contentPart is of type MessageContentText here
@@ -218,8 +226,55 @@ function _convertMessagesToAnthropicPayload(messages) {
218
226
  }
219
227
  });
220
228
  return {
221
- messages: formattedMessages,
229
+ messages: mergeMessages(formattedMessages),
222
230
  system,
223
231
  };
224
232
  }
225
233
  exports._convertMessagesToAnthropicPayload = _convertMessagesToAnthropicPayload;
234
+ function mergeMessages(messages) {
235
+ if (!messages || messages.length <= 1) {
236
+ return messages;
237
+ }
238
+ const result = [];
239
+ let currentMessage = messages[0];
240
+ const normalizeContent = (content) => {
241
+ if (typeof content === "string") {
242
+ return [
243
+ {
244
+ type: "text",
245
+ text: content,
246
+ },
247
+ ];
248
+ }
249
+ return content;
250
+ };
251
+ const isToolResultMessage = (msg) => {
252
+ if (msg.role !== "user")
253
+ return false;
254
+ if (typeof msg.content === "string") {
255
+ return false;
256
+ }
257
+ return (Array.isArray(msg.content) &&
258
+ msg.content.every((item) => item.type === "tool_result"));
259
+ };
260
+ for (let i = 1; i < messages.length; i += 1) {
261
+ const nextMessage = messages[i];
262
+ if (isToolResultMessage(currentMessage) &&
263
+ isToolResultMessage(nextMessage)) {
264
+ // Merge the messages by combining their content arrays
265
+ currentMessage = {
266
+ ...currentMessage,
267
+ content: [
268
+ ...normalizeContent(currentMessage.content),
269
+ ...normalizeContent(nextMessage.content),
270
+ ],
271
+ };
272
+ }
273
+ else {
274
+ result.push(currentMessage);
275
+ currentMessage = nextMessage;
276
+ }
277
+ }
278
+ result.push(currentMessage);
279
+ return result;
280
+ }
@@ -101,6 +101,14 @@ function _formatContent(content) {
101
101
  ...(cacheControl ? { cache_control: cacheControl } : {}),
102
102
  };
103
103
  }
104
+ else if (contentPart.type === "document") {
105
+ // PDF
106
+ return {
107
+ type: "document",
108
+ source: contentPart.source,
109
+ ...(cacheControl ? { cache_control: cacheControl } : {}),
110
+ };
111
+ }
104
112
  else if (textTypes.find((t) => t === contentPart.type) &&
105
113
  "text" in contentPart) {
106
114
  // Assuming contentPart is of type MessageContentText here
@@ -214,7 +222,54 @@ export function _convertMessagesToAnthropicPayload(messages) {
214
222
  }
215
223
  });
216
224
  return {
217
- messages: formattedMessages,
225
+ messages: mergeMessages(formattedMessages),
218
226
  system,
219
227
  };
220
228
  }
229
+ function mergeMessages(messages) {
230
+ if (!messages || messages.length <= 1) {
231
+ return messages;
232
+ }
233
+ const result = [];
234
+ let currentMessage = messages[0];
235
+ const normalizeContent = (content) => {
236
+ if (typeof content === "string") {
237
+ return [
238
+ {
239
+ type: "text",
240
+ text: content,
241
+ },
242
+ ];
243
+ }
244
+ return content;
245
+ };
246
+ const isToolResultMessage = (msg) => {
247
+ if (msg.role !== "user")
248
+ return false;
249
+ if (typeof msg.content === "string") {
250
+ return false;
251
+ }
252
+ return (Array.isArray(msg.content) &&
253
+ msg.content.every((item) => item.type === "tool_result"));
254
+ };
255
+ for (let i = 1; i < messages.length; i += 1) {
256
+ const nextMessage = messages[i];
257
+ if (isToolResultMessage(currentMessage) &&
258
+ isToolResultMessage(nextMessage)) {
259
+ // Merge the messages by combining their content arrays
260
+ currentMessage = {
261
+ ...currentMessage,
262
+ content: [
263
+ ...normalizeContent(currentMessage.content),
264
+ ...normalizeContent(nextMessage.content),
265
+ ],
266
+ };
267
+ }
268
+ else {
269
+ result.push(currentMessage);
270
+ currentMessage = nextMessage;
271
+ }
272
+ }
273
+ result.push(currentMessage);
274
+ return result;
275
+ }
@@ -14,11 +14,16 @@ function _makeMessageChunkFromAnthropicEvent(data, fields) {
14
14
  filteredAdditionalKwargs[key] = value;
15
15
  }
16
16
  }
17
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
17
18
  const { input_tokens, output_tokens, ...rest } = usage ?? {};
18
19
  const usageMetadata = {
19
20
  input_tokens,
20
21
  output_tokens,
21
22
  total_tokens: input_tokens + output_tokens,
23
+ input_token_details: {
24
+ cache_creation: rest.cache_creation_input_tokens,
25
+ cache_read: rest.cache_read_input_tokens,
26
+ },
22
27
  };
23
28
  return {
24
29
  chunk: new messages_1.AIMessageChunk({
@@ -39,6 +44,12 @@ function _makeMessageChunkFromAnthropicEvent(data, fields) {
39
44
  input_tokens: 0,
40
45
  output_tokens: data.usage.output_tokens,
41
46
  total_tokens: data.usage.output_tokens,
47
+ input_token_details: {
48
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
49
+ cache_creation: data.usage.cache_creation_input_tokens,
50
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
51
+ cache_read: data.usage.cache_read_input_tokens,
52
+ },
42
53
  };
43
54
  return {
44
55
  chunk: new messages_1.AIMessageChunk({
@@ -145,6 +156,10 @@ function anthropicResponseToChatMessages(messages, additionalKwargs) {
145
156
  input_tokens: usage.input_tokens ?? 0,
146
157
  output_tokens: usage.output_tokens ?? 0,
147
158
  total_tokens: (usage.input_tokens ?? 0) + (usage.output_tokens ?? 0),
159
+ input_token_details: {
160
+ cache_creation: usage.cache_creation_input_tokens,
161
+ cache_read: usage.cache_read_input_tokens,
162
+ },
148
163
  }
149
164
  : undefined;
150
165
  if (messages.length === 1 && messages[0].type === "text") {
@@ -11,11 +11,16 @@ export function _makeMessageChunkFromAnthropicEvent(data, fields) {
11
11
  filteredAdditionalKwargs[key] = value;
12
12
  }
13
13
  }
14
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
14
15
  const { input_tokens, output_tokens, ...rest } = usage ?? {};
15
16
  const usageMetadata = {
16
17
  input_tokens,
17
18
  output_tokens,
18
19
  total_tokens: input_tokens + output_tokens,
20
+ input_token_details: {
21
+ cache_creation: rest.cache_creation_input_tokens,
22
+ cache_read: rest.cache_read_input_tokens,
23
+ },
19
24
  };
20
25
  return {
21
26
  chunk: new AIMessageChunk({
@@ -36,6 +41,12 @@ export function _makeMessageChunkFromAnthropicEvent(data, fields) {
36
41
  input_tokens: 0,
37
42
  output_tokens: data.usage.output_tokens,
38
43
  total_tokens: data.usage.output_tokens,
44
+ input_token_details: {
45
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
46
+ cache_creation: data.usage.cache_creation_input_tokens,
47
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
48
+ cache_read: data.usage.cache_read_input_tokens,
49
+ },
39
50
  };
40
51
  return {
41
52
  chunk: new AIMessageChunk({
@@ -141,6 +152,10 @@ export function anthropicResponseToChatMessages(messages, additionalKwargs) {
141
152
  input_tokens: usage.input_tokens ?? 0,
142
153
  output_tokens: usage.output_tokens ?? 0,
143
154
  total_tokens: (usage.input_tokens ?? 0) + (usage.output_tokens ?? 0),
155
+ input_token_details: {
156
+ cache_creation: usage.cache_creation_input_tokens,
157
+ cache_read: usage.cache_read_input_tokens,
158
+ },
144
159
  }
145
160
  : undefined;
146
161
  if (messages.length === 1 && messages[0].type === "text") {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/anthropic",
3
- "version": "0.3.10",
3
+ "version": "0.3.12",
4
4
  "description": "Anthropic integrations for LangChain.js",
5
5
  "type": "module",
6
6
  "engines": {
@@ -35,7 +35,7 @@
35
35
  "author": "LangChain",
36
36
  "license": "MIT",
37
37
  "dependencies": {
38
- "@anthropic-ai/sdk": "^0.27.3",
38
+ "@anthropic-ai/sdk": "^0.32.1",
39
39
  "fast-xml-parser": "^4.4.1",
40
40
  "zod": "^3.22.4",
41
41
  "zod-to-json-schema": "^3.22.4"