@langchain/anthropic 0.1.18 → 0.1.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chat_models.cjs +29 -0
- package/dist/chat_models.d.ts +11 -1
- package/dist/chat_models.js +29 -0
- package/dist/tests/chat_models-tools.int.test.js +60 -0
- package/package.json +3 -3
package/dist/chat_models.cjs
CHANGED
|
@@ -391,6 +391,17 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
391
391
|
this.streaming = fields?.streaming ?? false;
|
|
392
392
|
this.clientOptions = fields?.clientOptions ?? {};
|
|
393
393
|
}
|
|
394
|
+
getLsParams(options) {
|
|
395
|
+
const params = this.invocationParams(options);
|
|
396
|
+
return {
|
|
397
|
+
ls_provider: "openai",
|
|
398
|
+
ls_model_name: this.model,
|
|
399
|
+
ls_model_type: "chat",
|
|
400
|
+
ls_temperature: params.temperature ?? undefined,
|
|
401
|
+
ls_max_tokens: params.max_tokens ?? undefined,
|
|
402
|
+
ls_stop: options.stop,
|
|
403
|
+
};
|
|
404
|
+
}
|
|
394
405
|
/**
|
|
395
406
|
* Formats LangChain StructuredTools to AnthropicTools.
|
|
396
407
|
*
|
|
@@ -427,6 +438,22 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
427
438
|
* Get the parameters used to invoke the model
|
|
428
439
|
*/
|
|
429
440
|
invocationParams(options) {
|
|
441
|
+
let tool_choice;
|
|
442
|
+
if (options?.tool_choice) {
|
|
443
|
+
if (options?.tool_choice === "any") {
|
|
444
|
+
tool_choice = {
|
|
445
|
+
type: "any",
|
|
446
|
+
};
|
|
447
|
+
}
|
|
448
|
+
else if (options?.tool_choice === "auto") {
|
|
449
|
+
tool_choice = {
|
|
450
|
+
type: "auto",
|
|
451
|
+
};
|
|
452
|
+
}
|
|
453
|
+
else {
|
|
454
|
+
tool_choice = options?.tool_choice;
|
|
455
|
+
}
|
|
456
|
+
}
|
|
430
457
|
return {
|
|
431
458
|
model: this.model,
|
|
432
459
|
temperature: this.temperature,
|
|
@@ -436,6 +463,7 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
436
463
|
stream: this.streaming,
|
|
437
464
|
max_tokens: this.maxTokens,
|
|
438
465
|
tools: this.formatStructuredToolToAnthropic(options?.tools),
|
|
466
|
+
tool_choice,
|
|
439
467
|
...this.invocationKwargs,
|
|
440
468
|
};
|
|
441
469
|
}
|
|
@@ -697,6 +725,7 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
697
725
|
}
|
|
698
726
|
const llm = this.bind({
|
|
699
727
|
tools,
|
|
728
|
+
tool_choice: "any",
|
|
700
729
|
});
|
|
701
730
|
if (!includeRaw) {
|
|
702
731
|
return llm.pipe(outputParser).withConfig({
|
package/dist/chat_models.d.ts
CHANGED
|
@@ -3,7 +3,7 @@ import type { Stream } from "@anthropic-ai/sdk/streaming";
|
|
|
3
3
|
import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
|
|
4
4
|
import { AIMessageChunk, type BaseMessage } from "@langchain/core/messages";
|
|
5
5
|
import { ChatGeneration, ChatGenerationChunk, type ChatResult } from "@langchain/core/outputs";
|
|
6
|
-
import { BaseChatModel, type BaseChatModelParams } from "@langchain/core/language_models/chat_models";
|
|
6
|
+
import { BaseChatModel, LangSmithParams, type BaseChatModelParams } from "@langchain/core/language_models/chat_models";
|
|
7
7
|
import { StructuredOutputMethodOptions, type BaseLanguageModelCallOptions, BaseLanguageModelInput } from "@langchain/core/language_models/base";
|
|
8
8
|
import { StructuredToolInterface } from "@langchain/core/tools";
|
|
9
9
|
import { Runnable, RunnableInterface } from "@langchain/core/runnables";
|
|
@@ -22,8 +22,17 @@ type AnthropicMessageCreateParams = Anthropic.MessageCreateParamsNonStreaming;
|
|
|
22
22
|
type AnthropicStreamingMessageCreateParams = Anthropic.MessageCreateParamsStreaming;
|
|
23
23
|
type AnthropicMessageStreamEvent = Anthropic.MessageStreamEvent;
|
|
24
24
|
type AnthropicRequestOptions = Anthropic.RequestOptions;
|
|
25
|
+
type AnthropicToolChoice = {
|
|
26
|
+
type: "tool";
|
|
27
|
+
name: string;
|
|
28
|
+
} | "any" | "auto";
|
|
25
29
|
interface ChatAnthropicCallOptions extends BaseLanguageModelCallOptions {
|
|
26
30
|
tools?: (StructuredToolInterface | AnthropicTool)[];
|
|
31
|
+
/**
|
|
32
|
+
* Whether or not to specify what tool the model should use
|
|
33
|
+
* @default "auto"
|
|
34
|
+
*/
|
|
35
|
+
tool_choice?: AnthropicToolChoice;
|
|
27
36
|
}
|
|
28
37
|
/**
|
|
29
38
|
* Input to AnthropicChat class.
|
|
@@ -133,6 +142,7 @@ export declare class ChatAnthropicMessages<CallOptions extends ChatAnthropicCall
|
|
|
133
142
|
protected batchClient: Anthropic;
|
|
134
143
|
protected streamingClient: Anthropic;
|
|
135
144
|
constructor(fields?: Partial<AnthropicInput> & BaseChatModelParams);
|
|
145
|
+
protected getLsParams(options: this["ParsedCallOptions"]): LangSmithParams;
|
|
136
146
|
/**
|
|
137
147
|
* Formats LangChain StructuredTools to AnthropicTools.
|
|
138
148
|
*
|
package/dist/chat_models.js
CHANGED
|
@@ -387,6 +387,17 @@ export class ChatAnthropicMessages extends BaseChatModel {
|
|
|
387
387
|
this.streaming = fields?.streaming ?? false;
|
|
388
388
|
this.clientOptions = fields?.clientOptions ?? {};
|
|
389
389
|
}
|
|
390
|
+
getLsParams(options) {
|
|
391
|
+
const params = this.invocationParams(options);
|
|
392
|
+
return {
|
|
393
|
+
ls_provider: "openai",
|
|
394
|
+
ls_model_name: this.model,
|
|
395
|
+
ls_model_type: "chat",
|
|
396
|
+
ls_temperature: params.temperature ?? undefined,
|
|
397
|
+
ls_max_tokens: params.max_tokens ?? undefined,
|
|
398
|
+
ls_stop: options.stop,
|
|
399
|
+
};
|
|
400
|
+
}
|
|
390
401
|
/**
|
|
391
402
|
* Formats LangChain StructuredTools to AnthropicTools.
|
|
392
403
|
*
|
|
@@ -423,6 +434,22 @@ export class ChatAnthropicMessages extends BaseChatModel {
|
|
|
423
434
|
* Get the parameters used to invoke the model
|
|
424
435
|
*/
|
|
425
436
|
invocationParams(options) {
|
|
437
|
+
let tool_choice;
|
|
438
|
+
if (options?.tool_choice) {
|
|
439
|
+
if (options?.tool_choice === "any") {
|
|
440
|
+
tool_choice = {
|
|
441
|
+
type: "any",
|
|
442
|
+
};
|
|
443
|
+
}
|
|
444
|
+
else if (options?.tool_choice === "auto") {
|
|
445
|
+
tool_choice = {
|
|
446
|
+
type: "auto",
|
|
447
|
+
};
|
|
448
|
+
}
|
|
449
|
+
else {
|
|
450
|
+
tool_choice = options?.tool_choice;
|
|
451
|
+
}
|
|
452
|
+
}
|
|
426
453
|
return {
|
|
427
454
|
model: this.model,
|
|
428
455
|
temperature: this.temperature,
|
|
@@ -432,6 +459,7 @@ export class ChatAnthropicMessages extends BaseChatModel {
|
|
|
432
459
|
stream: this.streaming,
|
|
433
460
|
max_tokens: this.maxTokens,
|
|
434
461
|
tools: this.formatStructuredToolToAnthropic(options?.tools),
|
|
462
|
+
tool_choice,
|
|
435
463
|
...this.invocationKwargs,
|
|
436
464
|
};
|
|
437
465
|
}
|
|
@@ -693,6 +721,7 @@ export class ChatAnthropicMessages extends BaseChatModel {
|
|
|
693
721
|
}
|
|
694
722
|
const llm = this.bind({
|
|
695
723
|
tools,
|
|
724
|
+
tool_choice: "any",
|
|
696
725
|
});
|
|
697
726
|
if (!includeRaw) {
|
|
698
727
|
return llm.pipe(outputParser).withConfig({
|
|
@@ -216,3 +216,63 @@ test("withStructuredOutput JSON Schema only", async () => {
|
|
|
216
216
|
}, "withStructuredOutput JSON Schema only");
|
|
217
217
|
expect(typeof result.location).toBe("string");
|
|
218
218
|
});
|
|
219
|
+
test("Can pass tool_choice", async () => {
|
|
220
|
+
const tool1 = {
|
|
221
|
+
name: "get_weather",
|
|
222
|
+
description: "Get the weather of a specific location and return the temperature in Celsius.",
|
|
223
|
+
input_schema: {
|
|
224
|
+
type: "object",
|
|
225
|
+
properties: {
|
|
226
|
+
location: {
|
|
227
|
+
type: "string",
|
|
228
|
+
description: "The name of city to get the weather for.",
|
|
229
|
+
},
|
|
230
|
+
},
|
|
231
|
+
required: ["location"],
|
|
232
|
+
},
|
|
233
|
+
};
|
|
234
|
+
const tool2 = {
|
|
235
|
+
name: "calculator",
|
|
236
|
+
description: "Calculate any math expression and return the result.",
|
|
237
|
+
input_schema: {
|
|
238
|
+
type: "object",
|
|
239
|
+
properties: {
|
|
240
|
+
expression: {
|
|
241
|
+
type: "string",
|
|
242
|
+
description: "The math expression to calculate.",
|
|
243
|
+
},
|
|
244
|
+
},
|
|
245
|
+
required: ["expression"],
|
|
246
|
+
},
|
|
247
|
+
};
|
|
248
|
+
const tools = [tool1, tool2];
|
|
249
|
+
const modelWithTools = model.bindTools(tools, {
|
|
250
|
+
tool_choice: {
|
|
251
|
+
type: "tool",
|
|
252
|
+
name: "get_weather",
|
|
253
|
+
},
|
|
254
|
+
});
|
|
255
|
+
const result = await modelWithTools.invoke("What is the sum of 272818 and 281818?");
|
|
256
|
+
console.log({
|
|
257
|
+
tool_calls: JSON.stringify(result.content, null, 2),
|
|
258
|
+
}, "Can bind & invoke StructuredTools");
|
|
259
|
+
expect(Array.isArray(result.content)).toBeTruthy();
|
|
260
|
+
if (!Array.isArray(result.content)) {
|
|
261
|
+
throw new Error("Content is not an array");
|
|
262
|
+
}
|
|
263
|
+
let toolCall;
|
|
264
|
+
result.content.forEach((item) => {
|
|
265
|
+
if (item.type === "tool_use") {
|
|
266
|
+
toolCall = item;
|
|
267
|
+
}
|
|
268
|
+
});
|
|
269
|
+
if (!toolCall) {
|
|
270
|
+
throw new Error("No tool call found");
|
|
271
|
+
}
|
|
272
|
+
expect(toolCall).toBeTruthy();
|
|
273
|
+
const { name, input } = toolCall;
|
|
274
|
+
expect(toolCall.input).toEqual(result.tool_calls?.[0].args);
|
|
275
|
+
expect(name).toBe("get_weather");
|
|
276
|
+
expect(input).toBeTruthy();
|
|
277
|
+
expect(input.location).toBeTruthy();
|
|
278
|
+
});
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@langchain/anthropic",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.20",
|
|
4
4
|
"description": "Anthropic integrations for LangChain.js",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"engines": {
|
|
@@ -39,8 +39,8 @@
|
|
|
39
39
|
"author": "LangChain",
|
|
40
40
|
"license": "MIT",
|
|
41
41
|
"dependencies": {
|
|
42
|
-
"@anthropic-ai/sdk": "^0.
|
|
43
|
-
"@langchain/core": "
|
|
42
|
+
"@anthropic-ai/sdk": "^0.21.0",
|
|
43
|
+
"@langchain/core": ">0.1.56 <0.3.0",
|
|
44
44
|
"fast-xml-parser": "^4.3.5",
|
|
45
45
|
"zod": "^3.22.4",
|
|
46
46
|
"zod-to-json-schema": "^3.22.4"
|