@langchain/anthropic 0.1.13 → 0.1.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chat_models.cjs +212 -130
- package/dist/chat_models.d.ts +8 -15
- package/dist/chat_models.js +212 -131
- package/dist/output_parsers.cjs +14 -7
- package/dist/output_parsers.d.ts +2 -0
- package/dist/output_parsers.js +12 -6
- package/dist/tests/agent.int.test.d.ts +1 -0
- package/dist/tests/agent.int.test.js +39 -0
- package/dist/tests/chat_models-tools.int.test.d.ts +1 -0
- package/dist/tests/chat_models-tools.int.test.js +218 -0
- package/dist/tests/chat_models.int.test.js +0 -178
- package/package.json +2 -2
package/dist/chat_models.cjs
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.ChatAnthropic = exports.ChatAnthropicMessages = void 0;
|
|
3
|
+
exports.ChatAnthropic = exports.ChatAnthropicMessages = exports._convertLangChainToolCallToAnthropic = void 0;
|
|
4
4
|
const sdk_1 = require("@anthropic-ai/sdk");
|
|
5
5
|
const messages_1 = require("@langchain/core/messages");
|
|
6
6
|
const outputs_1 = require("@langchain/core/outputs");
|
|
@@ -36,14 +36,15 @@ function anthropicResponseToChatMessages(messages, additionalKwargs) {
|
|
|
36
36
|
];
|
|
37
37
|
}
|
|
38
38
|
else {
|
|
39
|
-
|
|
40
|
-
const castMessage = messages;
|
|
39
|
+
const toolCalls = (0, output_parsers_js_1.extractToolCalls)(messages);
|
|
41
40
|
const generations = [
|
|
42
41
|
{
|
|
43
42
|
text: "",
|
|
44
43
|
message: new messages_1.AIMessage({
|
|
45
|
-
|
|
44
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
45
|
+
content: messages,
|
|
46
46
|
additional_kwargs: additionalKwargs,
|
|
47
|
+
tool_calls: toolCalls,
|
|
47
48
|
}),
|
|
48
49
|
},
|
|
49
50
|
];
|
|
@@ -54,6 +55,176 @@ function anthropicResponseToChatMessages(messages, additionalKwargs) {
|
|
|
54
55
|
function isAnthropicTool(tool) {
|
|
55
56
|
return "input_schema" in tool;
|
|
56
57
|
}
|
|
58
|
+
function _mergeMessages(messages) {
|
|
59
|
+
// Merge runs of human/tool messages into single human messages with content blocks.
|
|
60
|
+
const merged = [];
|
|
61
|
+
for (const message of messages) {
|
|
62
|
+
if (message._getType() === "tool") {
|
|
63
|
+
if (typeof message.content === "string") {
|
|
64
|
+
merged.push(new messages_1.HumanMessage({
|
|
65
|
+
content: [
|
|
66
|
+
{
|
|
67
|
+
type: "tool_result",
|
|
68
|
+
content: message.content,
|
|
69
|
+
tool_use_id: message.tool_call_id,
|
|
70
|
+
},
|
|
71
|
+
],
|
|
72
|
+
}));
|
|
73
|
+
}
|
|
74
|
+
else {
|
|
75
|
+
merged.push(new messages_1.HumanMessage({ content: message.content }));
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
else {
|
|
79
|
+
const previousMessage = merged[merged.length - 1];
|
|
80
|
+
if (previousMessage?._getType() === "human" &&
|
|
81
|
+
message._getType() === "human") {
|
|
82
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
83
|
+
let combinedContent;
|
|
84
|
+
if (typeof previousMessage.content === "string") {
|
|
85
|
+
combinedContent = [{ type: "text", text: previousMessage.content }];
|
|
86
|
+
}
|
|
87
|
+
else {
|
|
88
|
+
combinedContent = previousMessage.content;
|
|
89
|
+
}
|
|
90
|
+
if (typeof message.content === "string") {
|
|
91
|
+
combinedContent.push({ type: "text", text: message.content });
|
|
92
|
+
}
|
|
93
|
+
else {
|
|
94
|
+
combinedContent = combinedContent.concat(message.content);
|
|
95
|
+
}
|
|
96
|
+
previousMessage.content = combinedContent;
|
|
97
|
+
}
|
|
98
|
+
else {
|
|
99
|
+
merged.push(message);
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
return merged;
|
|
104
|
+
}
|
|
105
|
+
function _convertLangChainToolCallToAnthropic(toolCall) {
|
|
106
|
+
if (toolCall.id === undefined) {
|
|
107
|
+
throw new Error(`Anthropic requires all tool calls to have an "id".`);
|
|
108
|
+
}
|
|
109
|
+
return {
|
|
110
|
+
type: "tool_use",
|
|
111
|
+
id: toolCall.id,
|
|
112
|
+
name: toolCall.name,
|
|
113
|
+
input: toolCall.args,
|
|
114
|
+
};
|
|
115
|
+
}
|
|
116
|
+
exports._convertLangChainToolCallToAnthropic = _convertLangChainToolCallToAnthropic;
|
|
117
|
+
function _formatContent(content) {
|
|
118
|
+
if (typeof content === "string") {
|
|
119
|
+
return content;
|
|
120
|
+
}
|
|
121
|
+
else {
|
|
122
|
+
const contentBlocks = content.map((contentPart) => {
|
|
123
|
+
if (contentPart.type === "image_url") {
|
|
124
|
+
let source;
|
|
125
|
+
if (typeof contentPart.image_url === "string") {
|
|
126
|
+
source = _formatImage(contentPart.image_url);
|
|
127
|
+
}
|
|
128
|
+
else {
|
|
129
|
+
source = _formatImage(contentPart.image_url.url);
|
|
130
|
+
}
|
|
131
|
+
return {
|
|
132
|
+
type: "image",
|
|
133
|
+
source,
|
|
134
|
+
};
|
|
135
|
+
}
|
|
136
|
+
else if (contentPart.type === "text") {
|
|
137
|
+
// Assuming contentPart is of type MessageContentText here
|
|
138
|
+
return {
|
|
139
|
+
type: "text",
|
|
140
|
+
text: contentPart.text,
|
|
141
|
+
};
|
|
142
|
+
}
|
|
143
|
+
else if (contentPart.type === "tool_use" ||
|
|
144
|
+
contentPart.type === "tool_result") {
|
|
145
|
+
// TODO: Fix when SDK types are fixed
|
|
146
|
+
return {
|
|
147
|
+
...contentPart,
|
|
148
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
149
|
+
};
|
|
150
|
+
}
|
|
151
|
+
else {
|
|
152
|
+
throw new Error("Unsupported message content format");
|
|
153
|
+
}
|
|
154
|
+
});
|
|
155
|
+
return contentBlocks;
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
/**
|
|
159
|
+
* Formats messages as a prompt for the model.
|
|
160
|
+
* @param messages The base messages to format as a prompt.
|
|
161
|
+
* @returns The formatted prompt.
|
|
162
|
+
*/
|
|
163
|
+
function _formatMessagesForAnthropic(messages) {
|
|
164
|
+
const mergedMessages = _mergeMessages(messages);
|
|
165
|
+
let system;
|
|
166
|
+
if (mergedMessages.length > 0 && mergedMessages[0]._getType() === "system") {
|
|
167
|
+
if (typeof messages[0].content !== "string") {
|
|
168
|
+
throw new Error("System message content must be a string.");
|
|
169
|
+
}
|
|
170
|
+
system = messages[0].content;
|
|
171
|
+
}
|
|
172
|
+
const conversationMessages = system !== undefined ? mergedMessages.slice(1) : mergedMessages;
|
|
173
|
+
const formattedMessages = conversationMessages.map((message) => {
|
|
174
|
+
let role;
|
|
175
|
+
if (message._getType() === "human") {
|
|
176
|
+
role = "user";
|
|
177
|
+
}
|
|
178
|
+
else if (message._getType() === "ai") {
|
|
179
|
+
role = "assistant";
|
|
180
|
+
}
|
|
181
|
+
else if (message._getType() === "tool") {
|
|
182
|
+
role = "user";
|
|
183
|
+
}
|
|
184
|
+
else if (message._getType() === "system") {
|
|
185
|
+
throw new Error("System messages are only permitted as the first passed message.");
|
|
186
|
+
}
|
|
187
|
+
else {
|
|
188
|
+
throw new Error(`Message type "${message._getType()}" is not supported.`);
|
|
189
|
+
}
|
|
190
|
+
if ((0, messages_1.isAIMessage)(message) && !!message.tool_calls?.length) {
|
|
191
|
+
if (message.content === "") {
|
|
192
|
+
return {
|
|
193
|
+
role,
|
|
194
|
+
content: message.tool_calls.map(_convertLangChainToolCallToAnthropic),
|
|
195
|
+
};
|
|
196
|
+
}
|
|
197
|
+
else if (typeof message.content === "string") {
|
|
198
|
+
console.warn(`The "tool_calls" field on a message is only respected if content is an empty string.`);
|
|
199
|
+
return {
|
|
200
|
+
role,
|
|
201
|
+
content: _formatContent(message.content),
|
|
202
|
+
};
|
|
203
|
+
}
|
|
204
|
+
else {
|
|
205
|
+
const { content } = message;
|
|
206
|
+
const hasMismatchedToolCalls = !message.tool_calls.every((toolCall) => content.find((contentPart) => contentPart.type === "tool_use" && contentPart.id === toolCall.id));
|
|
207
|
+
if (hasMismatchedToolCalls) {
|
|
208
|
+
console.warn(`The "tool_calls" field on a message is only respected if content is an empty string.`);
|
|
209
|
+
}
|
|
210
|
+
return {
|
|
211
|
+
role,
|
|
212
|
+
content: _formatContent(message.content),
|
|
213
|
+
};
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
else {
|
|
217
|
+
return {
|
|
218
|
+
role,
|
|
219
|
+
content: _formatContent(message.content),
|
|
220
|
+
};
|
|
221
|
+
}
|
|
222
|
+
});
|
|
223
|
+
return {
|
|
224
|
+
messages: formattedMessages,
|
|
225
|
+
system,
|
|
226
|
+
};
|
|
227
|
+
}
|
|
57
228
|
/**
|
|
58
229
|
* Wrapper around Anthropic large language models.
|
|
59
230
|
*
|
|
@@ -242,6 +413,12 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
242
413
|
input_schema: (0, zod_to_json_schema_1.zodToJsonSchema)(tool.schema),
|
|
243
414
|
}));
|
|
244
415
|
}
|
|
416
|
+
bindTools(tools, kwargs) {
|
|
417
|
+
return this.bind({
|
|
418
|
+
tools: this.formatStructuredToolToAnthropic(tools),
|
|
419
|
+
...kwargs,
|
|
420
|
+
});
|
|
421
|
+
}
|
|
245
422
|
/**
|
|
246
423
|
* Get the parameters used to invoke the model
|
|
247
424
|
*/
|
|
@@ -254,30 +431,10 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
254
431
|
stop_sequences: options?.stop ?? this.stopSequences,
|
|
255
432
|
stream: this.streaming,
|
|
256
433
|
max_tokens: this.maxTokens,
|
|
434
|
+
tools: this.formatStructuredToolToAnthropic(options?.tools),
|
|
257
435
|
...this.invocationKwargs,
|
|
258
436
|
};
|
|
259
437
|
}
|
|
260
|
-
invocationOptions(request, options) {
|
|
261
|
-
const toolUseBetaHeader = {
|
|
262
|
-
"anthropic-beta": "tools-2024-04-04",
|
|
263
|
-
};
|
|
264
|
-
const tools = this.formatStructuredToolToAnthropic(options?.tools);
|
|
265
|
-
// If tools are present, populate the body with the message request params.
|
|
266
|
-
// This is because Anthropic overwrites the message request params if a body
|
|
267
|
-
// is passed.
|
|
268
|
-
const body = tools
|
|
269
|
-
? {
|
|
270
|
-
...request,
|
|
271
|
-
tools,
|
|
272
|
-
}
|
|
273
|
-
: undefined;
|
|
274
|
-
const headers = tools ? toolUseBetaHeader : undefined;
|
|
275
|
-
return {
|
|
276
|
-
signal: options.signal,
|
|
277
|
-
...(body ? { body } : {}),
|
|
278
|
-
...(headers ? { headers } : {}),
|
|
279
|
-
};
|
|
280
|
-
}
|
|
281
438
|
/** @ignore */
|
|
282
439
|
_identifyingParams() {
|
|
283
440
|
return {
|
|
@@ -296,22 +453,23 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
296
453
|
}
|
|
297
454
|
async *_streamResponseChunks(messages, options, runManager) {
|
|
298
455
|
const params = this.invocationParams(options);
|
|
299
|
-
const
|
|
300
|
-
...params,
|
|
301
|
-
stream: false,
|
|
302
|
-
...this.formatMessagesForAnthropic(messages),
|
|
303
|
-
}, options);
|
|
456
|
+
const formattedMessages = _formatMessagesForAnthropic(messages);
|
|
304
457
|
if (options.tools !== undefined && options.tools.length > 0) {
|
|
305
|
-
const
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
458
|
+
const generations = await this._generateNonStreaming(messages, params, {
|
|
459
|
+
signal: options.signal,
|
|
460
|
+
});
|
|
461
|
+
const result = generations[0].message;
|
|
462
|
+
const toolCallChunks = result.tool_calls?.map((toolCall, index) => ({
|
|
463
|
+
name: toolCall.name,
|
|
464
|
+
args: JSON.stringify(toolCall.args),
|
|
465
|
+
id: toolCall.id,
|
|
466
|
+
index,
|
|
467
|
+
}));
|
|
311
468
|
yield new outputs_1.ChatGenerationChunk({
|
|
312
469
|
message: new messages_1.AIMessageChunk({
|
|
313
|
-
content:
|
|
314
|
-
additional_kwargs:
|
|
470
|
+
content: result.content,
|
|
471
|
+
additional_kwargs: result.additional_kwargs,
|
|
472
|
+
tool_call_chunks: toolCallChunks,
|
|
315
473
|
}),
|
|
316
474
|
text: generations[0].text,
|
|
317
475
|
});
|
|
@@ -319,9 +477,9 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
319
477
|
else {
|
|
320
478
|
const stream = await this.createStreamWithRetry({
|
|
321
479
|
...params,
|
|
322
|
-
...
|
|
480
|
+
...formattedMessages,
|
|
323
481
|
stream: true,
|
|
324
|
-
}
|
|
482
|
+
});
|
|
325
483
|
let usageData = { input_tokens: 0, output_tokens: 0 };
|
|
326
484
|
for await (const data of stream) {
|
|
327
485
|
if (options.signal?.aborted) {
|
|
@@ -382,95 +540,22 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
382
540
|
});
|
|
383
541
|
}
|
|
384
542
|
}
|
|
385
|
-
/**
|
|
386
|
-
* Formats messages as a prompt for the model.
|
|
387
|
-
* @param messages The base messages to format as a prompt.
|
|
388
|
-
* @returns The formatted prompt.
|
|
389
|
-
*/
|
|
390
|
-
formatMessagesForAnthropic(messages) {
|
|
391
|
-
let system;
|
|
392
|
-
if (messages.length > 0 && messages[0]._getType() === "system") {
|
|
393
|
-
if (typeof messages[0].content !== "string") {
|
|
394
|
-
throw new Error("System message content must be a string.");
|
|
395
|
-
}
|
|
396
|
-
system = messages[0].content;
|
|
397
|
-
}
|
|
398
|
-
const conversationMessages = system !== undefined ? messages.slice(1) : messages;
|
|
399
|
-
const formattedMessages = conversationMessages.map((message) => {
|
|
400
|
-
let role;
|
|
401
|
-
if (message._getType() === "human") {
|
|
402
|
-
role = "user";
|
|
403
|
-
}
|
|
404
|
-
else if (message._getType() === "ai") {
|
|
405
|
-
role = "assistant";
|
|
406
|
-
}
|
|
407
|
-
else if (message._getType() === "tool") {
|
|
408
|
-
role = "user";
|
|
409
|
-
}
|
|
410
|
-
else if (message._getType() === "system") {
|
|
411
|
-
throw new Error("System messages are only permitted as the first passed message.");
|
|
412
|
-
}
|
|
413
|
-
else {
|
|
414
|
-
throw new Error(`Message type "${message._getType()}" is not supported.`);
|
|
415
|
-
}
|
|
416
|
-
if (typeof message.content === "string") {
|
|
417
|
-
return {
|
|
418
|
-
role,
|
|
419
|
-
content: message.content,
|
|
420
|
-
};
|
|
421
|
-
}
|
|
422
|
-
else {
|
|
423
|
-
const contentBlocks = message.content.map((contentPart) => {
|
|
424
|
-
if (contentPart.type === "image_url") {
|
|
425
|
-
let source;
|
|
426
|
-
if (typeof contentPart.image_url === "string") {
|
|
427
|
-
source = _formatImage(contentPart.image_url);
|
|
428
|
-
}
|
|
429
|
-
else {
|
|
430
|
-
source = _formatImage(contentPart.image_url.url);
|
|
431
|
-
}
|
|
432
|
-
return {
|
|
433
|
-
type: "image",
|
|
434
|
-
source,
|
|
435
|
-
};
|
|
436
|
-
}
|
|
437
|
-
else if (contentPart.type === "text") {
|
|
438
|
-
// Assuming contentPart is of type MessageContentText here
|
|
439
|
-
return {
|
|
440
|
-
type: "text",
|
|
441
|
-
text: contentPart.text,
|
|
442
|
-
};
|
|
443
|
-
}
|
|
444
|
-
else if (contentPart.type === "tool_use" ||
|
|
445
|
-
contentPart.type === "tool_result") {
|
|
446
|
-
// TODO: Fix when SDK types are fixed
|
|
447
|
-
return {
|
|
448
|
-
...contentPart,
|
|
449
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
450
|
-
};
|
|
451
|
-
}
|
|
452
|
-
else {
|
|
453
|
-
throw new Error("Unsupported message content format");
|
|
454
|
-
}
|
|
455
|
-
});
|
|
456
|
-
return {
|
|
457
|
-
role,
|
|
458
|
-
content: contentBlocks,
|
|
459
|
-
};
|
|
460
|
-
}
|
|
461
|
-
});
|
|
462
|
-
return {
|
|
463
|
-
messages: formattedMessages,
|
|
464
|
-
system,
|
|
465
|
-
};
|
|
466
|
-
}
|
|
467
543
|
/** @ignore */
|
|
468
544
|
async _generateNonStreaming(messages, params, requestOptions) {
|
|
545
|
+
const options = params.tools !== undefined
|
|
546
|
+
? {
|
|
547
|
+
...requestOptions,
|
|
548
|
+
headers: {
|
|
549
|
+
...requestOptions.headers,
|
|
550
|
+
"anthropic-beta": "tools-2024-04-04",
|
|
551
|
+
},
|
|
552
|
+
}
|
|
553
|
+
: requestOptions;
|
|
469
554
|
const response = await this.completionWithRetry({
|
|
470
555
|
...params,
|
|
471
556
|
stream: false,
|
|
472
|
-
...
|
|
473
|
-
},
|
|
557
|
+
..._formatMessagesForAnthropic(messages),
|
|
558
|
+
}, options);
|
|
474
559
|
const { content, ...additionalKwargs } = response;
|
|
475
560
|
const generations = anthropicResponseToChatMessages(content, additionalKwargs);
|
|
476
561
|
return generations;
|
|
@@ -505,12 +590,9 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
505
590
|
};
|
|
506
591
|
}
|
|
507
592
|
else {
|
|
508
|
-
const
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
...this.formatMessagesForAnthropic(messages),
|
|
512
|
-
}, options);
|
|
513
|
-
const generations = await this._generateNonStreaming(messages, params, requestOptions);
|
|
593
|
+
const generations = await this._generateNonStreaming(messages, params, {
|
|
594
|
+
signal: options.signal,
|
|
595
|
+
});
|
|
514
596
|
return {
|
|
515
597
|
generations,
|
|
516
598
|
};
|
package/dist/chat_models.d.ts
CHANGED
|
@@ -1,13 +1,15 @@
|
|
|
1
1
|
import { Anthropic, type ClientOptions } from "@anthropic-ai/sdk";
|
|
2
2
|
import type { Stream } from "@anthropic-ai/sdk/streaming";
|
|
3
3
|
import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
|
|
4
|
-
import { type BaseMessage } from "@langchain/core/messages";
|
|
4
|
+
import { AIMessageChunk, type BaseMessage } from "@langchain/core/messages";
|
|
5
5
|
import { ChatGeneration, ChatGenerationChunk, type ChatResult } from "@langchain/core/outputs";
|
|
6
6
|
import { BaseChatModel, type BaseChatModelParams } from "@langchain/core/language_models/chat_models";
|
|
7
7
|
import { StructuredOutputMethodOptions, type BaseLanguageModelCallOptions, BaseLanguageModelInput } from "@langchain/core/language_models/base";
|
|
8
8
|
import { StructuredToolInterface } from "@langchain/core/tools";
|
|
9
|
-
import { Runnable } from "@langchain/core/runnables";
|
|
9
|
+
import { Runnable, RunnableInterface } from "@langchain/core/runnables";
|
|
10
|
+
import { ToolCall } from "@langchain/core/messages/tool";
|
|
10
11
|
import { z } from "zod";
|
|
12
|
+
import { AnthropicToolResponse } from "./types.js";
|
|
11
13
|
type AnthropicTool = {
|
|
12
14
|
name: string;
|
|
13
15
|
description: string;
|
|
@@ -16,13 +18,12 @@ type AnthropicTool = {
|
|
|
16
18
|
*/
|
|
17
19
|
input_schema: Record<string, unknown>;
|
|
18
20
|
};
|
|
19
|
-
type AnthropicMessage = Anthropic.MessageParam;
|
|
20
21
|
type AnthropicMessageCreateParams = Anthropic.MessageCreateParamsNonStreaming;
|
|
21
22
|
type AnthropicStreamingMessageCreateParams = Anthropic.MessageCreateParamsStreaming;
|
|
22
23
|
type AnthropicMessageStreamEvent = Anthropic.MessageStreamEvent;
|
|
23
24
|
type AnthropicRequestOptions = Anthropic.RequestOptions;
|
|
24
25
|
interface ChatAnthropicCallOptions extends BaseLanguageModelCallOptions {
|
|
25
|
-
tools?: StructuredToolInterface
|
|
26
|
+
tools?: (StructuredToolInterface | AnthropicTool)[];
|
|
26
27
|
}
|
|
27
28
|
/**
|
|
28
29
|
* Input to AnthropicChat class.
|
|
@@ -85,6 +86,7 @@ export interface AnthropicInput {
|
|
|
85
86
|
* Anthropic API.
|
|
86
87
|
*/
|
|
87
88
|
type Kwargs = Record<string, any>;
|
|
89
|
+
export declare function _convertLangChainToolCallToAnthropic(toolCall: ToolCall): AnthropicToolResponse;
|
|
88
90
|
/**
|
|
89
91
|
* Wrapper around Anthropic large language models.
|
|
90
92
|
*
|
|
@@ -108,7 +110,7 @@ type Kwargs = Record<string, any>;
|
|
|
108
110
|
* console.log(res);
|
|
109
111
|
* ```
|
|
110
112
|
*/
|
|
111
|
-
export declare class ChatAnthropicMessages<CallOptions extends ChatAnthropicCallOptions = ChatAnthropicCallOptions> extends BaseChatModel<CallOptions> implements AnthropicInput {
|
|
113
|
+
export declare class ChatAnthropicMessages<CallOptions extends ChatAnthropicCallOptions = ChatAnthropicCallOptions> extends BaseChatModel<CallOptions, AIMessageChunk> implements AnthropicInput {
|
|
112
114
|
static lc_name(): string;
|
|
113
115
|
get lc_secrets(): {
|
|
114
116
|
[key: string]: string;
|
|
@@ -139,11 +141,11 @@ export declare class ChatAnthropicMessages<CallOptions extends ChatAnthropicCall
|
|
|
139
141
|
* @throws {Error} If a mix of AnthropicTools and StructuredTools are passed.
|
|
140
142
|
*/
|
|
141
143
|
formatStructuredToolToAnthropic(tools: ChatAnthropicCallOptions["tools"]): AnthropicTool[] | undefined;
|
|
144
|
+
bindTools(tools: (AnthropicTool | StructuredToolInterface)[], kwargs?: Partial<CallOptions>): RunnableInterface<BaseLanguageModelInput, AIMessageChunk, CallOptions>;
|
|
142
145
|
/**
|
|
143
146
|
* Get the parameters used to invoke the model
|
|
144
147
|
*/
|
|
145
148
|
invocationParams(options?: this["ParsedCallOptions"]): Omit<AnthropicMessageCreateParams | AnthropicStreamingMessageCreateParams, "messages"> & Kwargs;
|
|
146
|
-
invocationOptions(request: Omit<AnthropicMessageCreateParams | AnthropicStreamingMessageCreateParams, "messages"> & Kwargs, options: this["ParsedCallOptions"]): AnthropicRequestOptions;
|
|
147
149
|
/** @ignore */
|
|
148
150
|
_identifyingParams(): {
|
|
149
151
|
system?: string | undefined;
|
|
@@ -173,15 +175,6 @@ export declare class ChatAnthropicMessages<CallOptions extends ChatAnthropicCall
|
|
|
173
175
|
model_name: string;
|
|
174
176
|
};
|
|
175
177
|
_streamResponseChunks(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
|
|
176
|
-
/**
|
|
177
|
-
* Formats messages as a prompt for the model.
|
|
178
|
-
* @param messages The base messages to format as a prompt.
|
|
179
|
-
* @returns The formatted prompt.
|
|
180
|
-
*/
|
|
181
|
-
protected formatMessagesForAnthropic(messages: BaseMessage[]): {
|
|
182
|
-
system?: string;
|
|
183
|
-
messages: AnthropicMessage[];
|
|
184
|
-
};
|
|
185
178
|
/** @ignore */
|
|
186
179
|
_generateNonStreaming(messages: BaseMessage[], params: Omit<Anthropic.Messages.MessageCreateParamsNonStreaming | Anthropic.Messages.MessageCreateParamsStreaming, "messages"> & Kwargs, requestOptions: AnthropicRequestOptions): Promise<ChatGeneration[]>;
|
|
187
180
|
/** @ignore */
|