@langchain/anthropic 0.1.12 → 0.1.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -3
- package/dist/chat_models.cjs +240 -139
- package/dist/chat_models.d.ts +15 -16
- package/dist/chat_models.js +240 -140
- package/dist/output_parsers.cjs +14 -7
- package/dist/output_parsers.d.ts +2 -0
- package/dist/output_parsers.js +12 -6
- package/dist/tests/agent.int.test.d.ts +1 -0
- package/dist/tests/agent.int.test.js +39 -0
- package/dist/tests/chat_models-tools.int.test.d.ts +1 -0
- package/dist/tests/chat_models-tools.int.test.js +218 -0
- package/dist/tests/chat_models.int.test.js +1 -179
- package/package.json +2 -2
package/README.md
CHANGED
|
@@ -52,7 +52,7 @@ Then initialize
|
|
|
52
52
|
import { ChatAnthropicMessages } from "@langchain/anthropic";
|
|
53
53
|
|
|
54
54
|
const model = new ChatAnthropic({
|
|
55
|
-
|
|
55
|
+
apiKey: process.env.ANTHROPIC_API_KEY,
|
|
56
56
|
});
|
|
57
57
|
const response = await model.invoke(new HumanMessage("Hello world!"));
|
|
58
58
|
```
|
|
@@ -63,8 +63,8 @@ const response = await model.invoke(new HumanMessage("Hello world!"));
|
|
|
63
63
|
import { ChatAnthropicMessages } from "@langchain/anthropic";
|
|
64
64
|
|
|
65
65
|
const model = new ChatAnthropic({
|
|
66
|
-
|
|
67
|
-
|
|
66
|
+
apiKey: process.env.ANTHROPIC_API_KEY,
|
|
67
|
+
model: "claude-3-sonnet-20240229",
|
|
68
68
|
});
|
|
69
69
|
const response = await model.stream(new HumanMessage("Hello world!"));
|
|
70
70
|
```
|
package/dist/chat_models.cjs
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.ChatAnthropic = exports.ChatAnthropicMessages = void 0;
|
|
3
|
+
exports.ChatAnthropic = exports.ChatAnthropicMessages = exports._convertLangChainToolCallToAnthropic = void 0;
|
|
4
4
|
const sdk_1 = require("@anthropic-ai/sdk");
|
|
5
5
|
const messages_1 = require("@langchain/core/messages");
|
|
6
6
|
const outputs_1 = require("@langchain/core/outputs");
|
|
@@ -36,14 +36,15 @@ function anthropicResponseToChatMessages(messages, additionalKwargs) {
|
|
|
36
36
|
];
|
|
37
37
|
}
|
|
38
38
|
else {
|
|
39
|
-
|
|
40
|
-
const castMessage = messages;
|
|
39
|
+
const toolCalls = (0, output_parsers_js_1.extractToolCalls)(messages);
|
|
41
40
|
const generations = [
|
|
42
41
|
{
|
|
43
42
|
text: "",
|
|
44
43
|
message: new messages_1.AIMessage({
|
|
45
|
-
|
|
44
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
45
|
+
content: messages,
|
|
46
46
|
additional_kwargs: additionalKwargs,
|
|
47
|
+
tool_calls: toolCalls,
|
|
47
48
|
}),
|
|
48
49
|
},
|
|
49
50
|
];
|
|
@@ -54,6 +55,176 @@ function anthropicResponseToChatMessages(messages, additionalKwargs) {
|
|
|
54
55
|
function isAnthropicTool(tool) {
|
|
55
56
|
return "input_schema" in tool;
|
|
56
57
|
}
|
|
58
|
+
function _mergeMessages(messages) {
|
|
59
|
+
// Merge runs of human/tool messages into single human messages with content blocks.
|
|
60
|
+
const merged = [];
|
|
61
|
+
for (const message of messages) {
|
|
62
|
+
if (message._getType() === "tool") {
|
|
63
|
+
if (typeof message.content === "string") {
|
|
64
|
+
merged.push(new messages_1.HumanMessage({
|
|
65
|
+
content: [
|
|
66
|
+
{
|
|
67
|
+
type: "tool_result",
|
|
68
|
+
content: message.content,
|
|
69
|
+
tool_use_id: message.tool_call_id,
|
|
70
|
+
},
|
|
71
|
+
],
|
|
72
|
+
}));
|
|
73
|
+
}
|
|
74
|
+
else {
|
|
75
|
+
merged.push(new messages_1.HumanMessage({ content: message.content }));
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
else {
|
|
79
|
+
const previousMessage = merged[merged.length - 1];
|
|
80
|
+
if (previousMessage?._getType() === "human" &&
|
|
81
|
+
message._getType() === "human") {
|
|
82
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
83
|
+
let combinedContent;
|
|
84
|
+
if (typeof previousMessage.content === "string") {
|
|
85
|
+
combinedContent = [{ type: "text", text: previousMessage.content }];
|
|
86
|
+
}
|
|
87
|
+
else {
|
|
88
|
+
combinedContent = previousMessage.content;
|
|
89
|
+
}
|
|
90
|
+
if (typeof message.content === "string") {
|
|
91
|
+
combinedContent.push({ type: "text", text: message.content });
|
|
92
|
+
}
|
|
93
|
+
else {
|
|
94
|
+
combinedContent = combinedContent.concat(message.content);
|
|
95
|
+
}
|
|
96
|
+
previousMessage.content = combinedContent;
|
|
97
|
+
}
|
|
98
|
+
else {
|
|
99
|
+
merged.push(message);
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
return merged;
|
|
104
|
+
}
|
|
105
|
+
function _convertLangChainToolCallToAnthropic(toolCall) {
|
|
106
|
+
if (toolCall.id === undefined) {
|
|
107
|
+
throw new Error(`Anthropic requires all tool calls to have an "id".`);
|
|
108
|
+
}
|
|
109
|
+
return {
|
|
110
|
+
type: "tool_use",
|
|
111
|
+
id: toolCall.id,
|
|
112
|
+
name: toolCall.name,
|
|
113
|
+
input: toolCall.args,
|
|
114
|
+
};
|
|
115
|
+
}
|
|
116
|
+
exports._convertLangChainToolCallToAnthropic = _convertLangChainToolCallToAnthropic;
|
|
117
|
+
function _formatContent(content) {
|
|
118
|
+
if (typeof content === "string") {
|
|
119
|
+
return content;
|
|
120
|
+
}
|
|
121
|
+
else {
|
|
122
|
+
const contentBlocks = content.map((contentPart) => {
|
|
123
|
+
if (contentPart.type === "image_url") {
|
|
124
|
+
let source;
|
|
125
|
+
if (typeof contentPart.image_url === "string") {
|
|
126
|
+
source = _formatImage(contentPart.image_url);
|
|
127
|
+
}
|
|
128
|
+
else {
|
|
129
|
+
source = _formatImage(contentPart.image_url.url);
|
|
130
|
+
}
|
|
131
|
+
return {
|
|
132
|
+
type: "image",
|
|
133
|
+
source,
|
|
134
|
+
};
|
|
135
|
+
}
|
|
136
|
+
else if (contentPart.type === "text") {
|
|
137
|
+
// Assuming contentPart is of type MessageContentText here
|
|
138
|
+
return {
|
|
139
|
+
type: "text",
|
|
140
|
+
text: contentPart.text,
|
|
141
|
+
};
|
|
142
|
+
}
|
|
143
|
+
else if (contentPart.type === "tool_use" ||
|
|
144
|
+
contentPart.type === "tool_result") {
|
|
145
|
+
// TODO: Fix when SDK types are fixed
|
|
146
|
+
return {
|
|
147
|
+
...contentPart,
|
|
148
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
149
|
+
};
|
|
150
|
+
}
|
|
151
|
+
else {
|
|
152
|
+
throw new Error("Unsupported message content format");
|
|
153
|
+
}
|
|
154
|
+
});
|
|
155
|
+
return contentBlocks;
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
/**
|
|
159
|
+
* Formats messages as a prompt for the model.
|
|
160
|
+
* @param messages The base messages to format as a prompt.
|
|
161
|
+
* @returns The formatted prompt.
|
|
162
|
+
*/
|
|
163
|
+
function _formatMessagesForAnthropic(messages) {
|
|
164
|
+
const mergedMessages = _mergeMessages(messages);
|
|
165
|
+
let system;
|
|
166
|
+
if (mergedMessages.length > 0 && mergedMessages[0]._getType() === "system") {
|
|
167
|
+
if (typeof messages[0].content !== "string") {
|
|
168
|
+
throw new Error("System message content must be a string.");
|
|
169
|
+
}
|
|
170
|
+
system = messages[0].content;
|
|
171
|
+
}
|
|
172
|
+
const conversationMessages = system !== undefined ? mergedMessages.slice(1) : mergedMessages;
|
|
173
|
+
const formattedMessages = conversationMessages.map((message) => {
|
|
174
|
+
let role;
|
|
175
|
+
if (message._getType() === "human") {
|
|
176
|
+
role = "user";
|
|
177
|
+
}
|
|
178
|
+
else if (message._getType() === "ai") {
|
|
179
|
+
role = "assistant";
|
|
180
|
+
}
|
|
181
|
+
else if (message._getType() === "tool") {
|
|
182
|
+
role = "user";
|
|
183
|
+
}
|
|
184
|
+
else if (message._getType() === "system") {
|
|
185
|
+
throw new Error("System messages are only permitted as the first passed message.");
|
|
186
|
+
}
|
|
187
|
+
else {
|
|
188
|
+
throw new Error(`Message type "${message._getType()}" is not supported.`);
|
|
189
|
+
}
|
|
190
|
+
if ((0, messages_1.isAIMessage)(message) && !!message.tool_calls?.length) {
|
|
191
|
+
if (message.content === "") {
|
|
192
|
+
return {
|
|
193
|
+
role,
|
|
194
|
+
content: message.tool_calls.map(_convertLangChainToolCallToAnthropic),
|
|
195
|
+
};
|
|
196
|
+
}
|
|
197
|
+
else if (typeof message.content === "string") {
|
|
198
|
+
console.warn(`The "tool_calls" field on a message is only respected if content is an empty string.`);
|
|
199
|
+
return {
|
|
200
|
+
role,
|
|
201
|
+
content: _formatContent(message.content),
|
|
202
|
+
};
|
|
203
|
+
}
|
|
204
|
+
else {
|
|
205
|
+
const { content } = message;
|
|
206
|
+
const hasMismatchedToolCalls = !message.tool_calls.every((toolCall) => content.find((contentPart) => contentPart.type === "tool_use" && contentPart.id === toolCall.id));
|
|
207
|
+
if (hasMismatchedToolCalls) {
|
|
208
|
+
console.warn(`The "tool_calls" field on a message is only respected if content is an empty string.`);
|
|
209
|
+
}
|
|
210
|
+
return {
|
|
211
|
+
role,
|
|
212
|
+
content: _formatContent(message.content),
|
|
213
|
+
};
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
else {
|
|
217
|
+
return {
|
|
218
|
+
role,
|
|
219
|
+
content: _formatContent(message.content),
|
|
220
|
+
};
|
|
221
|
+
}
|
|
222
|
+
});
|
|
223
|
+
return {
|
|
224
|
+
messages: formattedMessages,
|
|
225
|
+
system,
|
|
226
|
+
};
|
|
227
|
+
}
|
|
57
228
|
/**
|
|
58
229
|
* Wrapper around Anthropic large language models.
|
|
59
230
|
*
|
|
@@ -71,7 +242,7 @@ function isAnthropicTool(tool) {
|
|
|
71
242
|
*
|
|
72
243
|
* const model = new ChatAnthropic({
|
|
73
244
|
* temperature: 0.9,
|
|
74
|
-
*
|
|
245
|
+
* apiKey: 'YOUR-API-KEY',
|
|
75
246
|
* });
|
|
76
247
|
* const res = await model.invoke({ input: 'Hello!' });
|
|
77
248
|
* console.log(res);
|
|
@@ -84,6 +255,7 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
84
255
|
get lc_secrets() {
|
|
85
256
|
return {
|
|
86
257
|
anthropicApiKey: "ANTHROPIC_API_KEY",
|
|
258
|
+
apiKey: "ANTHROPIC_API_KEY",
|
|
87
259
|
};
|
|
88
260
|
}
|
|
89
261
|
get lc_aliases() {
|
|
@@ -105,6 +277,12 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
105
277
|
writable: true,
|
|
106
278
|
value: void 0
|
|
107
279
|
});
|
|
280
|
+
Object.defineProperty(this, "apiKey", {
|
|
281
|
+
enumerable: true,
|
|
282
|
+
configurable: true,
|
|
283
|
+
writable: true,
|
|
284
|
+
value: void 0
|
|
285
|
+
});
|
|
108
286
|
Object.defineProperty(this, "apiUrl", {
|
|
109
287
|
enumerable: true,
|
|
110
288
|
configurable: true,
|
|
@@ -141,6 +319,12 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
141
319
|
writable: true,
|
|
142
320
|
value: "claude-2.1"
|
|
143
321
|
});
|
|
322
|
+
Object.defineProperty(this, "model", {
|
|
323
|
+
enumerable: true,
|
|
324
|
+
configurable: true,
|
|
325
|
+
writable: true,
|
|
326
|
+
value: "claude-2.1"
|
|
327
|
+
});
|
|
144
328
|
Object.defineProperty(this, "invocationKwargs", {
|
|
145
329
|
enumerable: true,
|
|
146
330
|
configurable: true,
|
|
@@ -180,13 +364,19 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
180
364
|
value: void 0
|
|
181
365
|
});
|
|
182
366
|
this.anthropicApiKey =
|
|
183
|
-
fields?.
|
|
367
|
+
fields?.apiKey ??
|
|
368
|
+
fields?.anthropicApiKey ??
|
|
369
|
+
(0, env_1.getEnvironmentVariable)("ANTHROPIC_API_KEY");
|
|
184
370
|
if (!this.anthropicApiKey) {
|
|
185
371
|
throw new Error("Anthropic API key not found");
|
|
186
372
|
}
|
|
373
|
+
/** Keep anthropicApiKey for backwards compatibility */
|
|
374
|
+
this.apiKey = this.anthropicApiKey;
|
|
187
375
|
// Support overriding the default API URL (i.e., https://api.anthropic.com)
|
|
188
376
|
this.apiUrl = fields?.anthropicApiUrl;
|
|
189
|
-
|
|
377
|
+
/** Keep modelName for backwards compatibility */
|
|
378
|
+
this.modelName = fields?.model ?? fields?.modelName ?? this.model;
|
|
379
|
+
this.model = this.modelName;
|
|
190
380
|
this.invocationKwargs = fields?.invocationKwargs ?? {};
|
|
191
381
|
this.temperature = fields?.temperature ?? this.temperature;
|
|
192
382
|
this.topK = fields?.topK ?? this.topK;
|
|
@@ -223,46 +413,32 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
223
413
|
input_schema: (0, zod_to_json_schema_1.zodToJsonSchema)(tool.schema),
|
|
224
414
|
}));
|
|
225
415
|
}
|
|
416
|
+
bindTools(tools, kwargs) {
|
|
417
|
+
return this.bind({
|
|
418
|
+
tools: this.formatStructuredToolToAnthropic(tools),
|
|
419
|
+
...kwargs,
|
|
420
|
+
});
|
|
421
|
+
}
|
|
226
422
|
/**
|
|
227
423
|
* Get the parameters used to invoke the model
|
|
228
424
|
*/
|
|
229
425
|
invocationParams(options) {
|
|
230
426
|
return {
|
|
231
|
-
model: this.
|
|
427
|
+
model: this.model,
|
|
232
428
|
temperature: this.temperature,
|
|
233
429
|
top_k: this.topK,
|
|
234
430
|
top_p: this.topP,
|
|
235
431
|
stop_sequences: options?.stop ?? this.stopSequences,
|
|
236
432
|
stream: this.streaming,
|
|
237
433
|
max_tokens: this.maxTokens,
|
|
434
|
+
tools: this.formatStructuredToolToAnthropic(options?.tools),
|
|
238
435
|
...this.invocationKwargs,
|
|
239
436
|
};
|
|
240
437
|
}
|
|
241
|
-
invocationOptions(request, options) {
|
|
242
|
-
const toolUseBetaHeader = {
|
|
243
|
-
"anthropic-beta": "tools-2024-04-04",
|
|
244
|
-
};
|
|
245
|
-
const tools = this.formatStructuredToolToAnthropic(options?.tools);
|
|
246
|
-
// If tools are present, populate the body with the message request params.
|
|
247
|
-
// This is because Anthropic overwrites the message request params if a body
|
|
248
|
-
// is passed.
|
|
249
|
-
const body = tools
|
|
250
|
-
? {
|
|
251
|
-
...request,
|
|
252
|
-
tools,
|
|
253
|
-
}
|
|
254
|
-
: undefined;
|
|
255
|
-
const headers = tools ? toolUseBetaHeader : undefined;
|
|
256
|
-
return {
|
|
257
|
-
signal: options.signal,
|
|
258
|
-
...(body ? { body } : {}),
|
|
259
|
-
...(headers ? { headers } : {}),
|
|
260
|
-
};
|
|
261
|
-
}
|
|
262
438
|
/** @ignore */
|
|
263
439
|
_identifyingParams() {
|
|
264
440
|
return {
|
|
265
|
-
model_name: this.
|
|
441
|
+
model_name: this.model,
|
|
266
442
|
...this.invocationParams(),
|
|
267
443
|
};
|
|
268
444
|
}
|
|
@@ -271,28 +447,29 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
271
447
|
*/
|
|
272
448
|
identifyingParams() {
|
|
273
449
|
return {
|
|
274
|
-
model_name: this.
|
|
450
|
+
model_name: this.model,
|
|
275
451
|
...this.invocationParams(),
|
|
276
452
|
};
|
|
277
453
|
}
|
|
278
454
|
async *_streamResponseChunks(messages, options, runManager) {
|
|
279
455
|
const params = this.invocationParams(options);
|
|
280
|
-
const
|
|
281
|
-
...params,
|
|
282
|
-
stream: false,
|
|
283
|
-
...this.formatMessagesForAnthropic(messages),
|
|
284
|
-
}, options);
|
|
456
|
+
const formattedMessages = _formatMessagesForAnthropic(messages);
|
|
285
457
|
if (options.tools !== undefined && options.tools.length > 0) {
|
|
286
|
-
const
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
458
|
+
const generations = await this._generateNonStreaming(messages, params, {
|
|
459
|
+
signal: options.signal,
|
|
460
|
+
});
|
|
461
|
+
const result = generations[0].message;
|
|
462
|
+
const toolCallChunks = result.tool_calls?.map((toolCall, index) => ({
|
|
463
|
+
name: toolCall.name,
|
|
464
|
+
args: JSON.stringify(toolCall.args),
|
|
465
|
+
id: toolCall.id,
|
|
466
|
+
index,
|
|
467
|
+
}));
|
|
292
468
|
yield new outputs_1.ChatGenerationChunk({
|
|
293
469
|
message: new messages_1.AIMessageChunk({
|
|
294
|
-
content:
|
|
295
|
-
additional_kwargs:
|
|
470
|
+
content: result.content,
|
|
471
|
+
additional_kwargs: result.additional_kwargs,
|
|
472
|
+
tool_call_chunks: toolCallChunks,
|
|
296
473
|
}),
|
|
297
474
|
text: generations[0].text,
|
|
298
475
|
});
|
|
@@ -300,9 +477,9 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
300
477
|
else {
|
|
301
478
|
const stream = await this.createStreamWithRetry({
|
|
302
479
|
...params,
|
|
303
|
-
...
|
|
480
|
+
...formattedMessages,
|
|
304
481
|
stream: true,
|
|
305
|
-
}
|
|
482
|
+
});
|
|
306
483
|
let usageData = { input_tokens: 0, output_tokens: 0 };
|
|
307
484
|
for await (const data of stream) {
|
|
308
485
|
if (options.signal?.aborted) {
|
|
@@ -363,95 +540,22 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
363
540
|
});
|
|
364
541
|
}
|
|
365
542
|
}
|
|
366
|
-
/**
|
|
367
|
-
* Formats messages as a prompt for the model.
|
|
368
|
-
* @param messages The base messages to format as a prompt.
|
|
369
|
-
* @returns The formatted prompt.
|
|
370
|
-
*/
|
|
371
|
-
formatMessagesForAnthropic(messages) {
|
|
372
|
-
let system;
|
|
373
|
-
if (messages.length > 0 && messages[0]._getType() === "system") {
|
|
374
|
-
if (typeof messages[0].content !== "string") {
|
|
375
|
-
throw new Error("System message content must be a string.");
|
|
376
|
-
}
|
|
377
|
-
system = messages[0].content;
|
|
378
|
-
}
|
|
379
|
-
const conversationMessages = system !== undefined ? messages.slice(1) : messages;
|
|
380
|
-
const formattedMessages = conversationMessages.map((message) => {
|
|
381
|
-
let role;
|
|
382
|
-
if (message._getType() === "human") {
|
|
383
|
-
role = "user";
|
|
384
|
-
}
|
|
385
|
-
else if (message._getType() === "ai") {
|
|
386
|
-
role = "assistant";
|
|
387
|
-
}
|
|
388
|
-
else if (message._getType() === "tool") {
|
|
389
|
-
role = "user";
|
|
390
|
-
}
|
|
391
|
-
else if (message._getType() === "system") {
|
|
392
|
-
throw new Error("System messages are only permitted as the first passed message.");
|
|
393
|
-
}
|
|
394
|
-
else {
|
|
395
|
-
throw new Error(`Message type "${message._getType()}" is not supported.`);
|
|
396
|
-
}
|
|
397
|
-
if (typeof message.content === "string") {
|
|
398
|
-
return {
|
|
399
|
-
role,
|
|
400
|
-
content: message.content,
|
|
401
|
-
};
|
|
402
|
-
}
|
|
403
|
-
else {
|
|
404
|
-
const contentBlocks = message.content.map((contentPart) => {
|
|
405
|
-
if (contentPart.type === "image_url") {
|
|
406
|
-
let source;
|
|
407
|
-
if (typeof contentPart.image_url === "string") {
|
|
408
|
-
source = _formatImage(contentPart.image_url);
|
|
409
|
-
}
|
|
410
|
-
else {
|
|
411
|
-
source = _formatImage(contentPart.image_url.url);
|
|
412
|
-
}
|
|
413
|
-
return {
|
|
414
|
-
type: "image",
|
|
415
|
-
source,
|
|
416
|
-
};
|
|
417
|
-
}
|
|
418
|
-
else if (contentPart.type === "text") {
|
|
419
|
-
// Assuming contentPart is of type MessageContentText here
|
|
420
|
-
return {
|
|
421
|
-
type: "text",
|
|
422
|
-
text: contentPart.text,
|
|
423
|
-
};
|
|
424
|
-
}
|
|
425
|
-
else if (contentPart.type === "tool_use" ||
|
|
426
|
-
contentPart.type === "tool_result") {
|
|
427
|
-
// TODO: Fix when SDK types are fixed
|
|
428
|
-
return {
|
|
429
|
-
...contentPart,
|
|
430
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
431
|
-
};
|
|
432
|
-
}
|
|
433
|
-
else {
|
|
434
|
-
throw new Error("Unsupported message content format");
|
|
435
|
-
}
|
|
436
|
-
});
|
|
437
|
-
return {
|
|
438
|
-
role,
|
|
439
|
-
content: contentBlocks,
|
|
440
|
-
};
|
|
441
|
-
}
|
|
442
|
-
});
|
|
443
|
-
return {
|
|
444
|
-
messages: formattedMessages,
|
|
445
|
-
system,
|
|
446
|
-
};
|
|
447
|
-
}
|
|
448
543
|
/** @ignore */
|
|
449
544
|
async _generateNonStreaming(messages, params, requestOptions) {
|
|
545
|
+
const options = params.tools !== undefined
|
|
546
|
+
? {
|
|
547
|
+
...requestOptions,
|
|
548
|
+
headers: {
|
|
549
|
+
...requestOptions.headers,
|
|
550
|
+
"anthropic-beta": "tools-2024-04-04",
|
|
551
|
+
},
|
|
552
|
+
}
|
|
553
|
+
: requestOptions;
|
|
450
554
|
const response = await this.completionWithRetry({
|
|
451
555
|
...params,
|
|
452
556
|
stream: false,
|
|
453
|
-
...
|
|
454
|
-
},
|
|
557
|
+
..._formatMessagesForAnthropic(messages),
|
|
558
|
+
}, options);
|
|
455
559
|
const { content, ...additionalKwargs } = response;
|
|
456
560
|
const generations = anthropicResponseToChatMessages(content, additionalKwargs);
|
|
457
561
|
return generations;
|
|
@@ -486,12 +590,9 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
486
590
|
};
|
|
487
591
|
}
|
|
488
592
|
else {
|
|
489
|
-
const
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
...this.formatMessagesForAnthropic(messages),
|
|
493
|
-
}, options);
|
|
494
|
-
const generations = await this._generateNonStreaming(messages, params, requestOptions);
|
|
593
|
+
const generations = await this._generateNonStreaming(messages, params, {
|
|
594
|
+
signal: options.signal,
|
|
595
|
+
});
|
|
495
596
|
return {
|
|
496
597
|
generations,
|
|
497
598
|
};
|
|
@@ -508,7 +609,7 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
508
609
|
this.streamingClient = new sdk_1.Anthropic({
|
|
509
610
|
...this.clientOptions,
|
|
510
611
|
...options_,
|
|
511
|
-
apiKey: this.
|
|
612
|
+
apiKey: this.apiKey,
|
|
512
613
|
// Prefer LangChain built-in retries
|
|
513
614
|
maxRetries: 0,
|
|
514
615
|
});
|
|
@@ -522,7 +623,7 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
522
623
|
}
|
|
523
624
|
/** @ignore */
|
|
524
625
|
async completionWithRetry(request, options) {
|
|
525
|
-
if (!this.
|
|
626
|
+
if (!this.apiKey) {
|
|
526
627
|
throw new Error("Missing Anthropic API key.");
|
|
527
628
|
}
|
|
528
629
|
if (!this.batchClient) {
|
|
@@ -530,7 +631,7 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
530
631
|
this.batchClient = new sdk_1.Anthropic({
|
|
531
632
|
...this.clientOptions,
|
|
532
633
|
...options,
|
|
533
|
-
apiKey: this.
|
|
634
|
+
apiKey: this.apiKey,
|
|
534
635
|
maxRetries: 0,
|
|
535
636
|
});
|
|
536
637
|
}
|
package/dist/chat_models.d.ts
CHANGED
|
@@ -1,13 +1,15 @@
|
|
|
1
1
|
import { Anthropic, type ClientOptions } from "@anthropic-ai/sdk";
|
|
2
2
|
import type { Stream } from "@anthropic-ai/sdk/streaming";
|
|
3
3
|
import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
|
|
4
|
-
import { type BaseMessage } from "@langchain/core/messages";
|
|
4
|
+
import { AIMessageChunk, type BaseMessage } from "@langchain/core/messages";
|
|
5
5
|
import { ChatGeneration, ChatGenerationChunk, type ChatResult } from "@langchain/core/outputs";
|
|
6
6
|
import { BaseChatModel, type BaseChatModelParams } from "@langchain/core/language_models/chat_models";
|
|
7
7
|
import { StructuredOutputMethodOptions, type BaseLanguageModelCallOptions, BaseLanguageModelInput } from "@langchain/core/language_models/base";
|
|
8
8
|
import { StructuredToolInterface } from "@langchain/core/tools";
|
|
9
|
-
import { Runnable } from "@langchain/core/runnables";
|
|
9
|
+
import { Runnable, RunnableInterface } from "@langchain/core/runnables";
|
|
10
|
+
import { ToolCall } from "@langchain/core/messages/tool";
|
|
10
11
|
import { z } from "zod";
|
|
12
|
+
import { AnthropicToolResponse } from "./types.js";
|
|
11
13
|
type AnthropicTool = {
|
|
12
14
|
name: string;
|
|
13
15
|
description: string;
|
|
@@ -16,13 +18,12 @@ type AnthropicTool = {
|
|
|
16
18
|
*/
|
|
17
19
|
input_schema: Record<string, unknown>;
|
|
18
20
|
};
|
|
19
|
-
type AnthropicMessage = Anthropic.MessageParam;
|
|
20
21
|
type AnthropicMessageCreateParams = Anthropic.MessageCreateParamsNonStreaming;
|
|
21
22
|
type AnthropicStreamingMessageCreateParams = Anthropic.MessageCreateParamsStreaming;
|
|
22
23
|
type AnthropicMessageStreamEvent = Anthropic.MessageStreamEvent;
|
|
23
24
|
type AnthropicRequestOptions = Anthropic.RequestOptions;
|
|
24
25
|
interface ChatAnthropicCallOptions extends BaseLanguageModelCallOptions {
|
|
25
|
-
tools?: StructuredToolInterface
|
|
26
|
+
tools?: (StructuredToolInterface | AnthropicTool)[];
|
|
26
27
|
}
|
|
27
28
|
/**
|
|
28
29
|
* Input to AnthropicChat class.
|
|
@@ -64,10 +65,14 @@ export interface AnthropicInput {
|
|
|
64
65
|
streaming?: boolean;
|
|
65
66
|
/** Anthropic API key */
|
|
66
67
|
anthropicApiKey?: string;
|
|
68
|
+
/** Anthropic API key */
|
|
69
|
+
apiKey?: string;
|
|
67
70
|
/** Anthropic API URL */
|
|
68
71
|
anthropicApiUrl?: string;
|
|
69
72
|
/** Model name to use */
|
|
70
73
|
modelName: string;
|
|
74
|
+
/** Model name to use */
|
|
75
|
+
model: string;
|
|
71
76
|
/** Overridable Anthropic ClientOptions */
|
|
72
77
|
clientOptions: ClientOptions;
|
|
73
78
|
/** Holds any additional parameters that are valid to pass to {@link
|
|
@@ -81,6 +86,7 @@ export interface AnthropicInput {
|
|
|
81
86
|
* Anthropic API.
|
|
82
87
|
*/
|
|
83
88
|
type Kwargs = Record<string, any>;
|
|
89
|
+
export declare function _convertLangChainToolCallToAnthropic(toolCall: ToolCall): AnthropicToolResponse;
|
|
84
90
|
/**
|
|
85
91
|
* Wrapper around Anthropic large language models.
|
|
86
92
|
*
|
|
@@ -98,13 +104,13 @@ type Kwargs = Record<string, any>;
|
|
|
98
104
|
*
|
|
99
105
|
* const model = new ChatAnthropic({
|
|
100
106
|
* temperature: 0.9,
|
|
101
|
-
*
|
|
107
|
+
* apiKey: 'YOUR-API-KEY',
|
|
102
108
|
* });
|
|
103
109
|
* const res = await model.invoke({ input: 'Hello!' });
|
|
104
110
|
* console.log(res);
|
|
105
111
|
* ```
|
|
106
112
|
*/
|
|
107
|
-
export declare class ChatAnthropicMessages<CallOptions extends ChatAnthropicCallOptions = ChatAnthropicCallOptions> extends BaseChatModel<CallOptions> implements AnthropicInput {
|
|
113
|
+
export declare class ChatAnthropicMessages<CallOptions extends ChatAnthropicCallOptions = ChatAnthropicCallOptions> extends BaseChatModel<CallOptions, AIMessageChunk> implements AnthropicInput {
|
|
108
114
|
static lc_name(): string;
|
|
109
115
|
get lc_secrets(): {
|
|
110
116
|
[key: string]: string;
|
|
@@ -112,12 +118,14 @@ export declare class ChatAnthropicMessages<CallOptions extends ChatAnthropicCall
|
|
|
112
118
|
get lc_aliases(): Record<string, string>;
|
|
113
119
|
lc_serializable: boolean;
|
|
114
120
|
anthropicApiKey?: string;
|
|
121
|
+
apiKey?: string;
|
|
115
122
|
apiUrl?: string;
|
|
116
123
|
temperature: number;
|
|
117
124
|
topK: number;
|
|
118
125
|
topP: number;
|
|
119
126
|
maxTokens: number;
|
|
120
127
|
modelName: string;
|
|
128
|
+
model: string;
|
|
121
129
|
invocationKwargs?: Kwargs;
|
|
122
130
|
stopSequences?: string[];
|
|
123
131
|
streaming: boolean;
|
|
@@ -133,11 +141,11 @@ export declare class ChatAnthropicMessages<CallOptions extends ChatAnthropicCall
|
|
|
133
141
|
* @throws {Error} If a mix of AnthropicTools and StructuredTools are passed.
|
|
134
142
|
*/
|
|
135
143
|
formatStructuredToolToAnthropic(tools: ChatAnthropicCallOptions["tools"]): AnthropicTool[] | undefined;
|
|
144
|
+
bindTools(tools: (AnthropicTool | StructuredToolInterface)[], kwargs?: Partial<CallOptions>): RunnableInterface<BaseLanguageModelInput, AIMessageChunk, CallOptions>;
|
|
136
145
|
/**
|
|
137
146
|
* Get the parameters used to invoke the model
|
|
138
147
|
*/
|
|
139
148
|
invocationParams(options?: this["ParsedCallOptions"]): Omit<AnthropicMessageCreateParams | AnthropicStreamingMessageCreateParams, "messages"> & Kwargs;
|
|
140
|
-
invocationOptions(request: Omit<AnthropicMessageCreateParams | AnthropicStreamingMessageCreateParams, "messages"> & Kwargs, options: this["ParsedCallOptions"]): AnthropicRequestOptions;
|
|
141
149
|
/** @ignore */
|
|
142
150
|
_identifyingParams(): {
|
|
143
151
|
system?: string | undefined;
|
|
@@ -167,15 +175,6 @@ export declare class ChatAnthropicMessages<CallOptions extends ChatAnthropicCall
|
|
|
167
175
|
model_name: string;
|
|
168
176
|
};
|
|
169
177
|
_streamResponseChunks(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
|
|
170
|
-
/**
|
|
171
|
-
* Formats messages as a prompt for the model.
|
|
172
|
-
* @param messages The base messages to format as a prompt.
|
|
173
|
-
* @returns The formatted prompt.
|
|
174
|
-
*/
|
|
175
|
-
protected formatMessagesForAnthropic(messages: BaseMessage[]): {
|
|
176
|
-
system?: string;
|
|
177
|
-
messages: AnthropicMessage[];
|
|
178
|
-
};
|
|
179
178
|
/** @ignore */
|
|
180
179
|
_generateNonStreaming(messages: BaseMessage[], params: Omit<Anthropic.Messages.MessageCreateParamsNonStreaming | Anthropic.Messages.MessageCreateParamsStreaming, "messages"> & Kwargs, requestOptions: AnthropicRequestOptions): Promise<ChatGeneration[]>;
|
|
181
180
|
/** @ignore */
|