@langchain/anthropic 0.1.18 → 0.1.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chat_models.cjs +18 -0
- package/dist/chat_models.d.ts +13 -1
- package/dist/chat_models.js +18 -0
- package/dist/tests/chat_models-tools.int.test.js +60 -0
- package/package.json +2 -2
package/dist/chat_models.cjs
CHANGED
|
@@ -427,6 +427,22 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
427
427
|
* Get the parameters used to invoke the model
|
|
428
428
|
*/
|
|
429
429
|
invocationParams(options) {
|
|
430
|
+
let tool_choice;
|
|
431
|
+
if (options?.tool_choice) {
|
|
432
|
+
if (options?.tool_choice === "any") {
|
|
433
|
+
tool_choice = {
|
|
434
|
+
type: "any",
|
|
435
|
+
};
|
|
436
|
+
}
|
|
437
|
+
else if (options?.tool_choice === "auto") {
|
|
438
|
+
tool_choice = {
|
|
439
|
+
type: "auto",
|
|
440
|
+
};
|
|
441
|
+
}
|
|
442
|
+
else {
|
|
443
|
+
tool_choice = options?.tool_choice;
|
|
444
|
+
}
|
|
445
|
+
}
|
|
430
446
|
return {
|
|
431
447
|
model: this.model,
|
|
432
448
|
temperature: this.temperature,
|
|
@@ -436,6 +452,7 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
436
452
|
stream: this.streaming,
|
|
437
453
|
max_tokens: this.maxTokens,
|
|
438
454
|
tools: this.formatStructuredToolToAnthropic(options?.tools),
|
|
455
|
+
tool_choice,
|
|
439
456
|
...this.invocationKwargs,
|
|
440
457
|
};
|
|
441
458
|
}
|
|
@@ -697,6 +714,7 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
697
714
|
}
|
|
698
715
|
const llm = this.bind({
|
|
699
716
|
tools,
|
|
717
|
+
tool_choice: "any",
|
|
700
718
|
});
|
|
701
719
|
if (!includeRaw) {
|
|
702
720
|
return llm.pipe(outputParser).withConfig({
|
package/dist/chat_models.d.ts
CHANGED
|
@@ -22,8 +22,17 @@ type AnthropicMessageCreateParams = Anthropic.MessageCreateParamsNonStreaming;
|
|
|
22
22
|
type AnthropicStreamingMessageCreateParams = Anthropic.MessageCreateParamsStreaming;
|
|
23
23
|
type AnthropicMessageStreamEvent = Anthropic.MessageStreamEvent;
|
|
24
24
|
type AnthropicRequestOptions = Anthropic.RequestOptions;
|
|
25
|
+
type AnthropicToolChoice = {
|
|
26
|
+
type: "tool";
|
|
27
|
+
name: string;
|
|
28
|
+
} | "any" | "auto";
|
|
25
29
|
interface ChatAnthropicCallOptions extends BaseLanguageModelCallOptions {
|
|
26
30
|
tools?: (StructuredToolInterface | AnthropicTool)[];
|
|
31
|
+
/**
|
|
32
|
+
* Whether or not to specify what tool the model should use
|
|
33
|
+
* @default "auto"
|
|
34
|
+
*/
|
|
35
|
+
tool_choice?: AnthropicToolChoice;
|
|
27
36
|
}
|
|
28
37
|
/**
|
|
29
38
|
* Input to AnthropicChat class.
|
|
@@ -183,7 +192,10 @@ export declare class ChatAnthropicMessages<CallOptions extends ChatAnthropicCall
|
|
|
183
192
|
model: string;
|
|
184
193
|
stop_reason: "max_tokens" | "stop_sequence" | "end_turn" | null;
|
|
185
194
|
stop_sequence: string | null;
|
|
186
|
-
usage: Anthropic.Messages.Usage;
|
|
195
|
+
usage: Anthropic.Messages.Usage; /** A list of strings upon which to stop generating.
|
|
196
|
+
* You probably want `["\n\nHuman:"]`, as that's the cue for
|
|
197
|
+
* the next turn in the dialog agent.
|
|
198
|
+
*/
|
|
187
199
|
};
|
|
188
200
|
}>;
|
|
189
201
|
/** @ignore */
|
package/dist/chat_models.js
CHANGED
|
@@ -423,6 +423,22 @@ export class ChatAnthropicMessages extends BaseChatModel {
|
|
|
423
423
|
* Get the parameters used to invoke the model
|
|
424
424
|
*/
|
|
425
425
|
invocationParams(options) {
|
|
426
|
+
let tool_choice;
|
|
427
|
+
if (options?.tool_choice) {
|
|
428
|
+
if (options?.tool_choice === "any") {
|
|
429
|
+
tool_choice = {
|
|
430
|
+
type: "any",
|
|
431
|
+
};
|
|
432
|
+
}
|
|
433
|
+
else if (options?.tool_choice === "auto") {
|
|
434
|
+
tool_choice = {
|
|
435
|
+
type: "auto",
|
|
436
|
+
};
|
|
437
|
+
}
|
|
438
|
+
else {
|
|
439
|
+
tool_choice = options?.tool_choice;
|
|
440
|
+
}
|
|
441
|
+
}
|
|
426
442
|
return {
|
|
427
443
|
model: this.model,
|
|
428
444
|
temperature: this.temperature,
|
|
@@ -432,6 +448,7 @@ export class ChatAnthropicMessages extends BaseChatModel {
|
|
|
432
448
|
stream: this.streaming,
|
|
433
449
|
max_tokens: this.maxTokens,
|
|
434
450
|
tools: this.formatStructuredToolToAnthropic(options?.tools),
|
|
451
|
+
tool_choice,
|
|
435
452
|
...this.invocationKwargs,
|
|
436
453
|
};
|
|
437
454
|
}
|
|
@@ -693,6 +710,7 @@ export class ChatAnthropicMessages extends BaseChatModel {
|
|
|
693
710
|
}
|
|
694
711
|
const llm = this.bind({
|
|
695
712
|
tools,
|
|
713
|
+
tool_choice: "any",
|
|
696
714
|
});
|
|
697
715
|
if (!includeRaw) {
|
|
698
716
|
return llm.pipe(outputParser).withConfig({
|
|
@@ -216,3 +216,63 @@ test("withStructuredOutput JSON Schema only", async () => {
|
|
|
216
216
|
}, "withStructuredOutput JSON Schema only");
|
|
217
217
|
expect(typeof result.location).toBe("string");
|
|
218
218
|
});
|
|
219
|
+
test("Can pass tool_choice", async () => {
|
|
220
|
+
const tool1 = {
|
|
221
|
+
name: "get_weather",
|
|
222
|
+
description: "Get the weather of a specific location and return the temperature in Celsius.",
|
|
223
|
+
input_schema: {
|
|
224
|
+
type: "object",
|
|
225
|
+
properties: {
|
|
226
|
+
location: {
|
|
227
|
+
type: "string",
|
|
228
|
+
description: "The name of city to get the weather for.",
|
|
229
|
+
},
|
|
230
|
+
},
|
|
231
|
+
required: ["location"],
|
|
232
|
+
},
|
|
233
|
+
};
|
|
234
|
+
const tool2 = {
|
|
235
|
+
name: "calculator",
|
|
236
|
+
description: "Calculate any math expression and return the result.",
|
|
237
|
+
input_schema: {
|
|
238
|
+
type: "object",
|
|
239
|
+
properties: {
|
|
240
|
+
expression: {
|
|
241
|
+
type: "string",
|
|
242
|
+
description: "The math expression to calculate.",
|
|
243
|
+
},
|
|
244
|
+
},
|
|
245
|
+
required: ["expression"],
|
|
246
|
+
},
|
|
247
|
+
};
|
|
248
|
+
const tools = [tool1, tool2];
|
|
249
|
+
const modelWithTools = model.bindTools(tools, {
|
|
250
|
+
tool_choice: {
|
|
251
|
+
type: "tool",
|
|
252
|
+
name: "get_weather",
|
|
253
|
+
},
|
|
254
|
+
});
|
|
255
|
+
const result = await modelWithTools.invoke("What is the sum of 272818 and 281818?");
|
|
256
|
+
console.log({
|
|
257
|
+
tool_calls: JSON.stringify(result.content, null, 2),
|
|
258
|
+
}, "Can bind & invoke StructuredTools");
|
|
259
|
+
expect(Array.isArray(result.content)).toBeTruthy();
|
|
260
|
+
if (!Array.isArray(result.content)) {
|
|
261
|
+
throw new Error("Content is not an array");
|
|
262
|
+
}
|
|
263
|
+
let toolCall;
|
|
264
|
+
result.content.forEach((item) => {
|
|
265
|
+
if (item.type === "tool_use") {
|
|
266
|
+
toolCall = item;
|
|
267
|
+
}
|
|
268
|
+
});
|
|
269
|
+
if (!toolCall) {
|
|
270
|
+
throw new Error("No tool call found");
|
|
271
|
+
}
|
|
272
|
+
expect(toolCall).toBeTruthy();
|
|
273
|
+
const { name, input } = toolCall;
|
|
274
|
+
expect(toolCall.input).toEqual(result.tool_calls?.[0].args);
|
|
275
|
+
expect(name).toBe("get_weather");
|
|
276
|
+
expect(input).toBeTruthy();
|
|
277
|
+
expect(input.location).toBeTruthy();
|
|
278
|
+
});
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@langchain/anthropic",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.19",
|
|
4
4
|
"description": "Anthropic integrations for LangChain.js",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"engines": {
|
|
@@ -39,7 +39,7 @@
|
|
|
39
39
|
"author": "LangChain",
|
|
40
40
|
"license": "MIT",
|
|
41
41
|
"dependencies": {
|
|
42
|
-
"@anthropic-ai/sdk": "^0.
|
|
42
|
+
"@anthropic-ai/sdk": "^0.21.0",
|
|
43
43
|
"@langchain/core": "<0.3.0 || >0.1.0",
|
|
44
44
|
"fast-xml-parser": "^4.3.5",
|
|
45
45
|
"zod": "^3.22.4",
|