@langchain/anthropic 0.3.9 → 0.3.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/chat_models.d.ts
CHANGED
|
@@ -520,8 +520,8 @@ export declare class ChatAnthropicMessages<CallOptions extends ChatAnthropicCall
|
|
|
520
520
|
model: Anthropic.Messages.Model;
|
|
521
521
|
max_tokens: number;
|
|
522
522
|
tools?: Anthropic.Messages.Tool[] | undefined;
|
|
523
|
-
tool_choice?: Anthropic.Messages.
|
|
524
|
-
metadata?: Anthropic.Messages.
|
|
523
|
+
tool_choice?: Anthropic.Messages.ToolChoice | undefined;
|
|
524
|
+
metadata?: Anthropic.Messages.Metadata | undefined;
|
|
525
525
|
temperature?: number | undefined;
|
|
526
526
|
stream?: boolean | undefined;
|
|
527
527
|
stop_sequences?: string[] | undefined;
|
|
@@ -537,8 +537,8 @@ export declare class ChatAnthropicMessages<CallOptions extends ChatAnthropicCall
|
|
|
537
537
|
model: Anthropic.Messages.Model;
|
|
538
538
|
max_tokens: number;
|
|
539
539
|
tools?: Anthropic.Messages.Tool[] | undefined;
|
|
540
|
-
tool_choice?: Anthropic.Messages.
|
|
541
|
-
metadata?: Anthropic.Messages.
|
|
540
|
+
tool_choice?: Anthropic.Messages.ToolChoice | undefined;
|
|
541
|
+
metadata?: Anthropic.Messages.Metadata | undefined;
|
|
542
542
|
temperature?: number | undefined;
|
|
543
543
|
stream?: boolean | undefined;
|
|
544
544
|
stop_sequences?: string[] | undefined;
|
|
@@ -554,7 +554,6 @@ export declare class ChatAnthropicMessages<CallOptions extends ChatAnthropicCall
|
|
|
554
554
|
id: string;
|
|
555
555
|
model: Anthropic.Messages.Model;
|
|
556
556
|
stop_reason: "tool_use" | "stop_sequence" | "end_turn" | "max_tokens" | null;
|
|
557
|
-
/** Anthropic API key */
|
|
558
557
|
stop_sequence: string | null;
|
|
559
558
|
usage: Anthropic.Messages.Usage;
|
|
560
559
|
};
|
package/dist/types.d.ts
CHANGED
|
@@ -18,3 +18,7 @@ export type AnthropicToolChoice = {
|
|
|
18
18
|
name: string;
|
|
19
19
|
} | "any" | "auto" | "none" | string;
|
|
20
20
|
export type ChatAnthropicToolType = AnthropicTool | BindToolsInput;
|
|
21
|
+
export type AnthropicTextBlockParam = Anthropic.Messages.TextBlockParam;
|
|
22
|
+
export type AnthropicImageBlockParam = Anthropic.Messages.ImageBlockParam;
|
|
23
|
+
export type AnthropicToolUseBlockParam = Anthropic.Messages.ToolUseBlockParam;
|
|
24
|
+
export type AnthropicToolResultBlockParam = Anthropic.Messages.ToolResultBlockParam;
|
|
@@ -218,8 +218,55 @@ function _convertMessagesToAnthropicPayload(messages) {
|
|
|
218
218
|
}
|
|
219
219
|
});
|
|
220
220
|
return {
|
|
221
|
-
messages: formattedMessages,
|
|
221
|
+
messages: mergeMessages(formattedMessages),
|
|
222
222
|
system,
|
|
223
223
|
};
|
|
224
224
|
}
|
|
225
225
|
exports._convertMessagesToAnthropicPayload = _convertMessagesToAnthropicPayload;
|
|
226
|
+
function mergeMessages(messages) {
|
|
227
|
+
if (!messages || messages.length <= 1) {
|
|
228
|
+
return messages;
|
|
229
|
+
}
|
|
230
|
+
const result = [];
|
|
231
|
+
let currentMessage = messages[0];
|
|
232
|
+
const normalizeContent = (content) => {
|
|
233
|
+
if (typeof content === "string") {
|
|
234
|
+
return [
|
|
235
|
+
{
|
|
236
|
+
type: "text",
|
|
237
|
+
text: content,
|
|
238
|
+
},
|
|
239
|
+
];
|
|
240
|
+
}
|
|
241
|
+
return content;
|
|
242
|
+
};
|
|
243
|
+
const isToolResultMessage = (msg) => {
|
|
244
|
+
if (msg.role !== "user")
|
|
245
|
+
return false;
|
|
246
|
+
if (typeof msg.content === "string") {
|
|
247
|
+
return false;
|
|
248
|
+
}
|
|
249
|
+
return (Array.isArray(msg.content) &&
|
|
250
|
+
msg.content.every((item) => item.type === "tool_result"));
|
|
251
|
+
};
|
|
252
|
+
for (let i = 1; i < messages.length; i += 1) {
|
|
253
|
+
const nextMessage = messages[i];
|
|
254
|
+
if (isToolResultMessage(currentMessage) &&
|
|
255
|
+
isToolResultMessage(nextMessage)) {
|
|
256
|
+
// Merge the messages by combining their content arrays
|
|
257
|
+
currentMessage = {
|
|
258
|
+
...currentMessage,
|
|
259
|
+
content: [
|
|
260
|
+
...normalizeContent(currentMessage.content),
|
|
261
|
+
...normalizeContent(nextMessage.content),
|
|
262
|
+
],
|
|
263
|
+
};
|
|
264
|
+
}
|
|
265
|
+
else {
|
|
266
|
+
result.push(currentMessage);
|
|
267
|
+
currentMessage = nextMessage;
|
|
268
|
+
}
|
|
269
|
+
}
|
|
270
|
+
result.push(currentMessage);
|
|
271
|
+
return result;
|
|
272
|
+
}
|
|
@@ -214,7 +214,54 @@ export function _convertMessagesToAnthropicPayload(messages) {
|
|
|
214
214
|
}
|
|
215
215
|
});
|
|
216
216
|
return {
|
|
217
|
-
messages: formattedMessages,
|
|
217
|
+
messages: mergeMessages(formattedMessages),
|
|
218
218
|
system,
|
|
219
219
|
};
|
|
220
220
|
}
|
|
221
|
+
function mergeMessages(messages) {
|
|
222
|
+
if (!messages || messages.length <= 1) {
|
|
223
|
+
return messages;
|
|
224
|
+
}
|
|
225
|
+
const result = [];
|
|
226
|
+
let currentMessage = messages[0];
|
|
227
|
+
const normalizeContent = (content) => {
|
|
228
|
+
if (typeof content === "string") {
|
|
229
|
+
return [
|
|
230
|
+
{
|
|
231
|
+
type: "text",
|
|
232
|
+
text: content,
|
|
233
|
+
},
|
|
234
|
+
];
|
|
235
|
+
}
|
|
236
|
+
return content;
|
|
237
|
+
};
|
|
238
|
+
const isToolResultMessage = (msg) => {
|
|
239
|
+
if (msg.role !== "user")
|
|
240
|
+
return false;
|
|
241
|
+
if (typeof msg.content === "string") {
|
|
242
|
+
return false;
|
|
243
|
+
}
|
|
244
|
+
return (Array.isArray(msg.content) &&
|
|
245
|
+
msg.content.every((item) => item.type === "tool_result"));
|
|
246
|
+
};
|
|
247
|
+
for (let i = 1; i < messages.length; i += 1) {
|
|
248
|
+
const nextMessage = messages[i];
|
|
249
|
+
if (isToolResultMessage(currentMessage) &&
|
|
250
|
+
isToolResultMessage(nextMessage)) {
|
|
251
|
+
// Merge the messages by combining their content arrays
|
|
252
|
+
currentMessage = {
|
|
253
|
+
...currentMessage,
|
|
254
|
+
content: [
|
|
255
|
+
...normalizeContent(currentMessage.content),
|
|
256
|
+
...normalizeContent(nextMessage.content),
|
|
257
|
+
],
|
|
258
|
+
};
|
|
259
|
+
}
|
|
260
|
+
else {
|
|
261
|
+
result.push(currentMessage);
|
|
262
|
+
currentMessage = nextMessage;
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
result.push(currentMessage);
|
|
266
|
+
return result;
|
|
267
|
+
}
|
|
@@ -14,16 +14,27 @@ function _makeMessageChunkFromAnthropicEvent(data, fields) {
|
|
|
14
14
|
filteredAdditionalKwargs[key] = value;
|
|
15
15
|
}
|
|
16
16
|
}
|
|
17
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
18
|
+
const { input_tokens, output_tokens, ...rest } = usage ?? {};
|
|
17
19
|
const usageMetadata = {
|
|
18
|
-
input_tokens
|
|
19
|
-
output_tokens
|
|
20
|
-
total_tokens:
|
|
20
|
+
input_tokens,
|
|
21
|
+
output_tokens,
|
|
22
|
+
total_tokens: input_tokens + output_tokens,
|
|
23
|
+
input_token_details: {
|
|
24
|
+
cache_creation: rest.cache_creation_input_tokens,
|
|
25
|
+
cache_read: rest.cache_read_input_tokens,
|
|
26
|
+
},
|
|
21
27
|
};
|
|
22
28
|
return {
|
|
23
29
|
chunk: new messages_1.AIMessageChunk({
|
|
24
30
|
content: fields.coerceContentToString ? "" : [],
|
|
25
31
|
additional_kwargs: filteredAdditionalKwargs,
|
|
26
32
|
usage_metadata: fields.streamUsage ? usageMetadata : undefined,
|
|
33
|
+
response_metadata: {
|
|
34
|
+
usage: {
|
|
35
|
+
...rest,
|
|
36
|
+
},
|
|
37
|
+
},
|
|
27
38
|
id: data.message.id,
|
|
28
39
|
}),
|
|
29
40
|
};
|
|
@@ -33,6 +44,12 @@ function _makeMessageChunkFromAnthropicEvent(data, fields) {
|
|
|
33
44
|
input_tokens: 0,
|
|
34
45
|
output_tokens: data.usage.output_tokens,
|
|
35
46
|
total_tokens: data.usage.output_tokens,
|
|
47
|
+
input_token_details: {
|
|
48
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
49
|
+
cache_creation: data.usage.cache_creation_input_tokens,
|
|
50
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
51
|
+
cache_read: data.usage.cache_read_input_tokens,
|
|
52
|
+
},
|
|
36
53
|
};
|
|
37
54
|
return {
|
|
38
55
|
chunk: new messages_1.AIMessageChunk({
|
|
@@ -139,6 +156,10 @@ function anthropicResponseToChatMessages(messages, additionalKwargs) {
|
|
|
139
156
|
input_tokens: usage.input_tokens ?? 0,
|
|
140
157
|
output_tokens: usage.output_tokens ?? 0,
|
|
141
158
|
total_tokens: (usage.input_tokens ?? 0) + (usage.output_tokens ?? 0),
|
|
159
|
+
input_token_details: {
|
|
160
|
+
cache_creation: usage.cache_creation_input_tokens,
|
|
161
|
+
cache_read: usage.cache_read_input_tokens,
|
|
162
|
+
},
|
|
142
163
|
}
|
|
143
164
|
: undefined;
|
|
144
165
|
if (messages.length === 1 && messages[0].type === "text") {
|
|
@@ -11,16 +11,27 @@ export function _makeMessageChunkFromAnthropicEvent(data, fields) {
|
|
|
11
11
|
filteredAdditionalKwargs[key] = value;
|
|
12
12
|
}
|
|
13
13
|
}
|
|
14
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
15
|
+
const { input_tokens, output_tokens, ...rest } = usage ?? {};
|
|
14
16
|
const usageMetadata = {
|
|
15
|
-
input_tokens
|
|
16
|
-
output_tokens
|
|
17
|
-
total_tokens:
|
|
17
|
+
input_tokens,
|
|
18
|
+
output_tokens,
|
|
19
|
+
total_tokens: input_tokens + output_tokens,
|
|
20
|
+
input_token_details: {
|
|
21
|
+
cache_creation: rest.cache_creation_input_tokens,
|
|
22
|
+
cache_read: rest.cache_read_input_tokens,
|
|
23
|
+
},
|
|
18
24
|
};
|
|
19
25
|
return {
|
|
20
26
|
chunk: new AIMessageChunk({
|
|
21
27
|
content: fields.coerceContentToString ? "" : [],
|
|
22
28
|
additional_kwargs: filteredAdditionalKwargs,
|
|
23
29
|
usage_metadata: fields.streamUsage ? usageMetadata : undefined,
|
|
30
|
+
response_metadata: {
|
|
31
|
+
usage: {
|
|
32
|
+
...rest,
|
|
33
|
+
},
|
|
34
|
+
},
|
|
24
35
|
id: data.message.id,
|
|
25
36
|
}),
|
|
26
37
|
};
|
|
@@ -30,6 +41,12 @@ export function _makeMessageChunkFromAnthropicEvent(data, fields) {
|
|
|
30
41
|
input_tokens: 0,
|
|
31
42
|
output_tokens: data.usage.output_tokens,
|
|
32
43
|
total_tokens: data.usage.output_tokens,
|
|
44
|
+
input_token_details: {
|
|
45
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
46
|
+
cache_creation: data.usage.cache_creation_input_tokens,
|
|
47
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
48
|
+
cache_read: data.usage.cache_read_input_tokens,
|
|
49
|
+
},
|
|
33
50
|
};
|
|
34
51
|
return {
|
|
35
52
|
chunk: new AIMessageChunk({
|
|
@@ -135,6 +152,10 @@ export function anthropicResponseToChatMessages(messages, additionalKwargs) {
|
|
|
135
152
|
input_tokens: usage.input_tokens ?? 0,
|
|
136
153
|
output_tokens: usage.output_tokens ?? 0,
|
|
137
154
|
total_tokens: (usage.input_tokens ?? 0) + (usage.output_tokens ?? 0),
|
|
155
|
+
input_token_details: {
|
|
156
|
+
cache_creation: usage.cache_creation_input_tokens,
|
|
157
|
+
cache_read: usage.cache_read_input_tokens,
|
|
158
|
+
},
|
|
138
159
|
}
|
|
139
160
|
: undefined;
|
|
140
161
|
if (messages.length === 1 && messages[0].type === "text") {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@langchain/anthropic",
|
|
3
|
-
"version": "0.3.
|
|
3
|
+
"version": "0.3.11",
|
|
4
4
|
"description": "Anthropic integrations for LangChain.js",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"engines": {
|
|
@@ -35,7 +35,7 @@
|
|
|
35
35
|
"author": "LangChain",
|
|
36
36
|
"license": "MIT",
|
|
37
37
|
"dependencies": {
|
|
38
|
-
"@anthropic-ai/sdk": "^0.
|
|
38
|
+
"@anthropic-ai/sdk": "^0.32.1",
|
|
39
39
|
"fast-xml-parser": "^4.4.1",
|
|
40
40
|
"zod": "^3.22.4",
|
|
41
41
|
"zod-to-json-schema": "^3.22.4"
|