@langchain/anthropic 0.1.0 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/chat_models.cjs +46 -12
- package/dist/chat_models.d.ts +1 -3
- package/dist/chat_models.js +46 -12
- package/dist/experimental/index.cjs +17 -0
- package/dist/experimental/index.d.ts +1 -0
- package/dist/experimental/index.js +1 -0
- package/dist/experimental/tests/tool_calling.int.test.d.ts +1 -0
- package/dist/experimental/tests/tool_calling.int.test.js +204 -0
- package/dist/experimental/tool_calling.cjs +247 -0
- package/dist/experimental/tool_calling.d.ts +51 -0
- package/dist/experimental/tool_calling.js +243 -0
- package/dist/experimental/utils/tool_calling.cjs +52 -0
- package/dist/experimental/utils/tool_calling.d.ts +8 -0
- package/dist/experimental/utils/tool_calling.js +48 -0
- package/dist/tests/chat_models.int.test.js +41 -39
- package/experimental.cjs +1 -0
- package/experimental.d.cts +1 -0
- package/experimental.d.ts +1 -0
- package/experimental.js +1 -0
- package/package.json +21 -4
|
@@ -0,0 +1,247 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.ChatAnthropicTools = void 0;
|
|
4
|
+
const fast_xml_parser_1 = require("fast-xml-parser");
|
|
5
|
+
const messages_1 = require("@langchain/core/messages");
|
|
6
|
+
const chat_models_1 = require("@langchain/core/language_models/chat_models");
|
|
7
|
+
const runnables_1 = require("@langchain/core/runnables");
|
|
8
|
+
const openai_tools_1 = require("@langchain/core/output_parsers/openai_tools");
|
|
9
|
+
const zod_to_json_schema_1 = require("zod-to-json-schema");
|
|
10
|
+
const chat_models_js_1 = require("../chat_models.cjs");
|
|
11
|
+
const tool_calling_js_1 = require("./utils/tool_calling.cjs");
|
|
12
|
+
/**
|
|
13
|
+
* Experimental wrapper over Anthropic chat models that adds support for
|
|
14
|
+
* a function calling interface.
|
|
15
|
+
*/
|
|
16
|
+
class ChatAnthropicTools extends chat_models_1.BaseChatModel {
|
|
17
|
+
static lc_name() {
|
|
18
|
+
return "ChatAnthropicTools";
|
|
19
|
+
}
|
|
20
|
+
constructor(fields) {
|
|
21
|
+
super(fields ?? {});
|
|
22
|
+
Object.defineProperty(this, "llm", {
|
|
23
|
+
enumerable: true,
|
|
24
|
+
configurable: true,
|
|
25
|
+
writable: true,
|
|
26
|
+
value: void 0
|
|
27
|
+
});
|
|
28
|
+
Object.defineProperty(this, "stopSequences", {
|
|
29
|
+
enumerable: true,
|
|
30
|
+
configurable: true,
|
|
31
|
+
writable: true,
|
|
32
|
+
value: void 0
|
|
33
|
+
});
|
|
34
|
+
Object.defineProperty(this, "systemPromptTemplate", {
|
|
35
|
+
enumerable: true,
|
|
36
|
+
configurable: true,
|
|
37
|
+
writable: true,
|
|
38
|
+
value: void 0
|
|
39
|
+
});
|
|
40
|
+
Object.defineProperty(this, "lc_namespace", {
|
|
41
|
+
enumerable: true,
|
|
42
|
+
configurable: true,
|
|
43
|
+
writable: true,
|
|
44
|
+
value: ["langchain", "experimental", "chat_models"]
|
|
45
|
+
});
|
|
46
|
+
this.llm = fields?.llm ?? new chat_models_js_1.ChatAnthropic(fields);
|
|
47
|
+
this.systemPromptTemplate =
|
|
48
|
+
fields?.systemPromptTemplate ?? tool_calling_js_1.DEFAULT_TOOL_SYSTEM_PROMPT;
|
|
49
|
+
this.stopSequences =
|
|
50
|
+
fields?.stopSequences ?? this.llm.stopSequences;
|
|
51
|
+
}
|
|
52
|
+
invocationParams() {
|
|
53
|
+
return this.llm.invocationParams();
|
|
54
|
+
}
|
|
55
|
+
/** @ignore */
|
|
56
|
+
_identifyingParams() {
|
|
57
|
+
return this.llm._identifyingParams();
|
|
58
|
+
}
|
|
59
|
+
async *_streamResponseChunks(messages, options, runManager) {
|
|
60
|
+
yield* this.llm._streamResponseChunks(messages, options, runManager);
|
|
61
|
+
}
|
|
62
|
+
async _prepareAndParseToolCall({ messages, options, runManager, systemPromptTemplate = tool_calling_js_1.DEFAULT_TOOL_SYSTEM_PROMPT, stopSequences, }) {
|
|
63
|
+
let promptMessages = messages;
|
|
64
|
+
let forced = false;
|
|
65
|
+
let toolCall;
|
|
66
|
+
if (options.tools !== undefined && options.tools.length > 0) {
|
|
67
|
+
const content = await systemPromptTemplate.format({
|
|
68
|
+
tools: `<tools>\n${options.tools
|
|
69
|
+
.map(tool_calling_js_1.formatAsXMLRepresentation)
|
|
70
|
+
.join("\n\n")}</tools>`,
|
|
71
|
+
});
|
|
72
|
+
if (promptMessages.length && promptMessages[0]._getType() !== "system") {
|
|
73
|
+
const systemMessage = new messages_1.SystemMessage({ content });
|
|
74
|
+
promptMessages = [systemMessage].concat(promptMessages);
|
|
75
|
+
}
|
|
76
|
+
else {
|
|
77
|
+
const systemMessage = new messages_1.SystemMessage({
|
|
78
|
+
content: `${content}\n\n${promptMessages[0].content}`,
|
|
79
|
+
});
|
|
80
|
+
promptMessages = [systemMessage].concat(promptMessages.slice(1));
|
|
81
|
+
}
|
|
82
|
+
// eslint-disable-next-line no-param-reassign
|
|
83
|
+
options.stop = stopSequences.concat(["</function_calls>"]);
|
|
84
|
+
if (options.tool_choice && options.tool_choice !== "auto") {
|
|
85
|
+
toolCall = options.tool_choice.function.name;
|
|
86
|
+
forced = true;
|
|
87
|
+
const matchingFunction = options.tools.find(
|
|
88
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
89
|
+
(tool) => tool.function.name === toolCall);
|
|
90
|
+
if (!matchingFunction) {
|
|
91
|
+
throw new Error(`No matching function found for passed "tool_choice"`);
|
|
92
|
+
}
|
|
93
|
+
promptMessages = promptMessages.concat([
|
|
94
|
+
new messages_1.AIMessage({
|
|
95
|
+
content: `<function_calls>\n<invoke><tool_name>${toolCall}</tool_name>`,
|
|
96
|
+
}),
|
|
97
|
+
]);
|
|
98
|
+
// eslint-disable-next-line no-param-reassign
|
|
99
|
+
delete options.tool_choice;
|
|
100
|
+
}
|
|
101
|
+
// eslint-disable-next-line no-param-reassign
|
|
102
|
+
delete options.tools;
|
|
103
|
+
}
|
|
104
|
+
else if (options.tool_choice !== undefined) {
|
|
105
|
+
throw new Error(`If "tool_choice" is provided, "tools" must also be.`);
|
|
106
|
+
}
|
|
107
|
+
const chatResult = await this.llm._generate(promptMessages, options, runManager);
|
|
108
|
+
const chatGenerationContent = chatResult.generations[0].message.content;
|
|
109
|
+
if (typeof chatGenerationContent !== "string") {
|
|
110
|
+
throw new Error("AnthropicFunctions does not support non-string output.");
|
|
111
|
+
}
|
|
112
|
+
if (forced) {
|
|
113
|
+
const parser = new fast_xml_parser_1.XMLParser();
|
|
114
|
+
const result = parser.parse(`<function_calls>\n<invoke><tool_name>${toolCall}</tool_name>${chatGenerationContent}</function_calls>`);
|
|
115
|
+
if (toolCall === undefined) {
|
|
116
|
+
throw new Error(`Could not parse called function from model output.`);
|
|
117
|
+
}
|
|
118
|
+
const invocations = Array.isArray(result.function_calls?.invoke ?? [])
|
|
119
|
+
? result.function_calls.invoke
|
|
120
|
+
: [result.function_calls.invoke];
|
|
121
|
+
const responseMessageWithFunctions = new messages_1.AIMessage({
|
|
122
|
+
content: "",
|
|
123
|
+
additional_kwargs: {
|
|
124
|
+
tool_calls: invocations.map((toolInvocation, i) => ({
|
|
125
|
+
id: i.toString(),
|
|
126
|
+
type: "function",
|
|
127
|
+
function: {
|
|
128
|
+
name: toolInvocation.tool_name,
|
|
129
|
+
arguments: JSON.stringify(toolInvocation.parameters),
|
|
130
|
+
},
|
|
131
|
+
})),
|
|
132
|
+
},
|
|
133
|
+
});
|
|
134
|
+
return {
|
|
135
|
+
generations: [{ message: responseMessageWithFunctions, text: "" }],
|
|
136
|
+
};
|
|
137
|
+
}
|
|
138
|
+
else if (chatGenerationContent.includes("<function_calls>")) {
|
|
139
|
+
const parser = new fast_xml_parser_1.XMLParser();
|
|
140
|
+
const result = parser.parse(`${chatGenerationContent}</function_calls>`);
|
|
141
|
+
const invocations = Array.isArray(result.function_calls?.invoke ?? [])
|
|
142
|
+
? result.function_calls.invoke
|
|
143
|
+
: [result.function_calls.invoke];
|
|
144
|
+
const responseMessageWithFunctions = new messages_1.AIMessage({
|
|
145
|
+
content: chatGenerationContent.split("<function_calls>")[0],
|
|
146
|
+
additional_kwargs: {
|
|
147
|
+
tool_calls: invocations.map((toolInvocation, i) => ({
|
|
148
|
+
id: i.toString(),
|
|
149
|
+
type: "function",
|
|
150
|
+
function: {
|
|
151
|
+
name: toolInvocation.tool_name,
|
|
152
|
+
arguments: JSON.stringify(toolInvocation.parameters),
|
|
153
|
+
},
|
|
154
|
+
})),
|
|
155
|
+
},
|
|
156
|
+
});
|
|
157
|
+
return {
|
|
158
|
+
generations: [{ message: responseMessageWithFunctions, text: "" }],
|
|
159
|
+
};
|
|
160
|
+
}
|
|
161
|
+
return chatResult;
|
|
162
|
+
}
|
|
163
|
+
async _generate(messages, options, _runManager) {
|
|
164
|
+
return this._prepareAndParseToolCall({
|
|
165
|
+
messages,
|
|
166
|
+
options,
|
|
167
|
+
systemPromptTemplate: this.systemPromptTemplate,
|
|
168
|
+
stopSequences: this.stopSequences ?? [],
|
|
169
|
+
});
|
|
170
|
+
}
|
|
171
|
+
_llmType() {
|
|
172
|
+
return "anthropic_tool_calling";
|
|
173
|
+
}
|
|
174
|
+
withStructuredOutput({ schema, name, method, includeRaw, }) {
|
|
175
|
+
if (method === "jsonMode") {
|
|
176
|
+
throw new Error(`Anthropic only supports "functionCalling" as a method.`);
|
|
177
|
+
}
|
|
178
|
+
const functionName = name ?? "extract";
|
|
179
|
+
const outputParser = new openai_tools_1.JsonOutputKeyToolsParser({
|
|
180
|
+
returnSingle: true,
|
|
181
|
+
keyName: functionName,
|
|
182
|
+
});
|
|
183
|
+
let tools;
|
|
184
|
+
if (isZodSchema(schema)) {
|
|
185
|
+
const jsonSchema = (0, zod_to_json_schema_1.zodToJsonSchema)(schema);
|
|
186
|
+
tools = [
|
|
187
|
+
{
|
|
188
|
+
type: "function",
|
|
189
|
+
function: {
|
|
190
|
+
name: functionName,
|
|
191
|
+
description: jsonSchema.description,
|
|
192
|
+
parameters: jsonSchema,
|
|
193
|
+
},
|
|
194
|
+
},
|
|
195
|
+
];
|
|
196
|
+
}
|
|
197
|
+
else {
|
|
198
|
+
tools = [
|
|
199
|
+
{
|
|
200
|
+
type: "function",
|
|
201
|
+
function: {
|
|
202
|
+
name: functionName,
|
|
203
|
+
description: schema.description,
|
|
204
|
+
parameters: schema,
|
|
205
|
+
},
|
|
206
|
+
},
|
|
207
|
+
];
|
|
208
|
+
}
|
|
209
|
+
const llm = this.bind({
|
|
210
|
+
tools,
|
|
211
|
+
tool_choice: {
|
|
212
|
+
type: "function",
|
|
213
|
+
function: {
|
|
214
|
+
name: functionName,
|
|
215
|
+
},
|
|
216
|
+
},
|
|
217
|
+
});
|
|
218
|
+
if (!includeRaw) {
|
|
219
|
+
return llm.pipe(outputParser).withConfig({
|
|
220
|
+
runName: "ChatAnthropicStructuredOutput",
|
|
221
|
+
});
|
|
222
|
+
}
|
|
223
|
+
const parserAssign = runnables_1.RunnablePassthrough.assign({
|
|
224
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
225
|
+
parsed: (input, config) => outputParser.invoke(input.raw, config),
|
|
226
|
+
});
|
|
227
|
+
const parserNone = runnables_1.RunnablePassthrough.assign({
|
|
228
|
+
parsed: () => null,
|
|
229
|
+
});
|
|
230
|
+
const parsedWithFallback = parserAssign.withFallbacks({
|
|
231
|
+
fallbacks: [parserNone],
|
|
232
|
+
});
|
|
233
|
+
return runnables_1.RunnableSequence.from([
|
|
234
|
+
{
|
|
235
|
+
raw: llm,
|
|
236
|
+
},
|
|
237
|
+
parsedWithFallback,
|
|
238
|
+
]).withConfig({
|
|
239
|
+
runName: "StructuredOutputRunnable",
|
|
240
|
+
});
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
exports.ChatAnthropicTools = ChatAnthropicTools;
|
|
244
|
+
function isZodSchema(input) {
|
|
245
|
+
// Check for a characteristic method of Zod schemas
|
|
246
|
+
return typeof input?.parse === "function";
|
|
247
|
+
}
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import { BaseMessage } from "@langchain/core/messages";
|
|
2
|
+
import { ChatGenerationChunk, ChatResult } from "@langchain/core/outputs";
|
|
3
|
+
import { BaseChatModel, BaseChatModelParams } from "@langchain/core/language_models/chat_models";
|
|
4
|
+
import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
|
|
5
|
+
import { BasePromptTemplate } from "@langchain/core/prompts";
|
|
6
|
+
import { BaseLanguageModelCallOptions, BaseLanguageModelInput, StructuredOutputMethodParams, ToolDefinition } from "@langchain/core/language_models/base";
|
|
7
|
+
import { Runnable } from "@langchain/core/runnables";
|
|
8
|
+
import { type AnthropicInput } from "../chat_models.js";
|
|
9
|
+
export interface ChatAnthropicToolsCallOptions extends BaseLanguageModelCallOptions {
|
|
10
|
+
tools?: ToolDefinition[];
|
|
11
|
+
tool_choice?: "auto" | {
|
|
12
|
+
function: {
|
|
13
|
+
name: string;
|
|
14
|
+
};
|
|
15
|
+
type: "function";
|
|
16
|
+
};
|
|
17
|
+
}
|
|
18
|
+
export type ChatAnthropicToolsInput = Partial<AnthropicInput> & BaseChatModelParams & {
|
|
19
|
+
llm?: BaseChatModel;
|
|
20
|
+
systemPromptTemplate?: BasePromptTemplate;
|
|
21
|
+
};
|
|
22
|
+
/**
|
|
23
|
+
* Experimental wrapper over Anthropic chat models that adds support for
|
|
24
|
+
* a function calling interface.
|
|
25
|
+
*/
|
|
26
|
+
export declare class ChatAnthropicTools extends BaseChatModel<ChatAnthropicToolsCallOptions> {
|
|
27
|
+
llm: BaseChatModel;
|
|
28
|
+
stopSequences?: string[];
|
|
29
|
+
systemPromptTemplate: BasePromptTemplate;
|
|
30
|
+
lc_namespace: string[];
|
|
31
|
+
static lc_name(): string;
|
|
32
|
+
constructor(fields?: ChatAnthropicToolsInput);
|
|
33
|
+
invocationParams(): any;
|
|
34
|
+
/** @ignore */
|
|
35
|
+
_identifyingParams(): Record<string, any>;
|
|
36
|
+
_streamResponseChunks(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
|
|
37
|
+
_prepareAndParseToolCall({ messages, options, runManager, systemPromptTemplate, stopSequences, }: {
|
|
38
|
+
messages: BaseMessage[];
|
|
39
|
+
options: ChatAnthropicToolsCallOptions;
|
|
40
|
+
runManager?: CallbackManagerForLLMRun;
|
|
41
|
+
systemPromptTemplate?: BasePromptTemplate;
|
|
42
|
+
stopSequences: string[];
|
|
43
|
+
}): Promise<ChatResult>;
|
|
44
|
+
_generate(messages: BaseMessage[], options: this["ParsedCallOptions"], _runManager?: CallbackManagerForLLMRun | undefined): Promise<ChatResult>;
|
|
45
|
+
_llmType(): string;
|
|
46
|
+
withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>({ schema, name, method, includeRaw, }: StructuredOutputMethodParams<RunOutput, false>): Runnable<BaseLanguageModelInput, RunOutput>;
|
|
47
|
+
withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>({ schema, name, method, includeRaw, }: StructuredOutputMethodParams<RunOutput, true>): Runnable<BaseLanguageModelInput, {
|
|
48
|
+
raw: BaseMessage;
|
|
49
|
+
parsed: RunOutput;
|
|
50
|
+
}>;
|
|
51
|
+
}
|
|
@@ -0,0 +1,243 @@
|
|
|
1
|
+
import { XMLParser } from "fast-xml-parser";
|
|
2
|
+
import { AIMessage, SystemMessage, } from "@langchain/core/messages";
|
|
3
|
+
import { BaseChatModel, } from "@langchain/core/language_models/chat_models";
|
|
4
|
+
import { RunnablePassthrough, RunnableSequence, } from "@langchain/core/runnables";
|
|
5
|
+
import { JsonOutputKeyToolsParser } from "@langchain/core/output_parsers/openai_tools";
|
|
6
|
+
import { zodToJsonSchema } from "zod-to-json-schema";
|
|
7
|
+
import { ChatAnthropic } from "../chat_models.js";
|
|
8
|
+
import { DEFAULT_TOOL_SYSTEM_PROMPT, formatAsXMLRepresentation, } from "./utils/tool_calling.js";
|
|
9
|
+
/**
|
|
10
|
+
* Experimental wrapper over Anthropic chat models that adds support for
|
|
11
|
+
* a function calling interface.
|
|
12
|
+
*/
|
|
13
|
+
export class ChatAnthropicTools extends BaseChatModel {
|
|
14
|
+
static lc_name() {
|
|
15
|
+
return "ChatAnthropicTools";
|
|
16
|
+
}
|
|
17
|
+
constructor(fields) {
|
|
18
|
+
super(fields ?? {});
|
|
19
|
+
Object.defineProperty(this, "llm", {
|
|
20
|
+
enumerable: true,
|
|
21
|
+
configurable: true,
|
|
22
|
+
writable: true,
|
|
23
|
+
value: void 0
|
|
24
|
+
});
|
|
25
|
+
Object.defineProperty(this, "stopSequences", {
|
|
26
|
+
enumerable: true,
|
|
27
|
+
configurable: true,
|
|
28
|
+
writable: true,
|
|
29
|
+
value: void 0
|
|
30
|
+
});
|
|
31
|
+
Object.defineProperty(this, "systemPromptTemplate", {
|
|
32
|
+
enumerable: true,
|
|
33
|
+
configurable: true,
|
|
34
|
+
writable: true,
|
|
35
|
+
value: void 0
|
|
36
|
+
});
|
|
37
|
+
Object.defineProperty(this, "lc_namespace", {
|
|
38
|
+
enumerable: true,
|
|
39
|
+
configurable: true,
|
|
40
|
+
writable: true,
|
|
41
|
+
value: ["langchain", "experimental", "chat_models"]
|
|
42
|
+
});
|
|
43
|
+
this.llm = fields?.llm ?? new ChatAnthropic(fields);
|
|
44
|
+
this.systemPromptTemplate =
|
|
45
|
+
fields?.systemPromptTemplate ?? DEFAULT_TOOL_SYSTEM_PROMPT;
|
|
46
|
+
this.stopSequences =
|
|
47
|
+
fields?.stopSequences ?? this.llm.stopSequences;
|
|
48
|
+
}
|
|
49
|
+
invocationParams() {
|
|
50
|
+
return this.llm.invocationParams();
|
|
51
|
+
}
|
|
52
|
+
/** @ignore */
|
|
53
|
+
_identifyingParams() {
|
|
54
|
+
return this.llm._identifyingParams();
|
|
55
|
+
}
|
|
56
|
+
async *_streamResponseChunks(messages, options, runManager) {
|
|
57
|
+
yield* this.llm._streamResponseChunks(messages, options, runManager);
|
|
58
|
+
}
|
|
59
|
+
async _prepareAndParseToolCall({ messages, options, runManager, systemPromptTemplate = DEFAULT_TOOL_SYSTEM_PROMPT, stopSequences, }) {
|
|
60
|
+
let promptMessages = messages;
|
|
61
|
+
let forced = false;
|
|
62
|
+
let toolCall;
|
|
63
|
+
if (options.tools !== undefined && options.tools.length > 0) {
|
|
64
|
+
const content = await systemPromptTemplate.format({
|
|
65
|
+
tools: `<tools>\n${options.tools
|
|
66
|
+
.map(formatAsXMLRepresentation)
|
|
67
|
+
.join("\n\n")}</tools>`,
|
|
68
|
+
});
|
|
69
|
+
if (promptMessages.length && promptMessages[0]._getType() !== "system") {
|
|
70
|
+
const systemMessage = new SystemMessage({ content });
|
|
71
|
+
promptMessages = [systemMessage].concat(promptMessages);
|
|
72
|
+
}
|
|
73
|
+
else {
|
|
74
|
+
const systemMessage = new SystemMessage({
|
|
75
|
+
content: `${content}\n\n${promptMessages[0].content}`,
|
|
76
|
+
});
|
|
77
|
+
promptMessages = [systemMessage].concat(promptMessages.slice(1));
|
|
78
|
+
}
|
|
79
|
+
// eslint-disable-next-line no-param-reassign
|
|
80
|
+
options.stop = stopSequences.concat(["</function_calls>"]);
|
|
81
|
+
if (options.tool_choice && options.tool_choice !== "auto") {
|
|
82
|
+
toolCall = options.tool_choice.function.name;
|
|
83
|
+
forced = true;
|
|
84
|
+
const matchingFunction = options.tools.find(
|
|
85
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
86
|
+
(tool) => tool.function.name === toolCall);
|
|
87
|
+
if (!matchingFunction) {
|
|
88
|
+
throw new Error(`No matching function found for passed "tool_choice"`);
|
|
89
|
+
}
|
|
90
|
+
promptMessages = promptMessages.concat([
|
|
91
|
+
new AIMessage({
|
|
92
|
+
content: `<function_calls>\n<invoke><tool_name>${toolCall}</tool_name>`,
|
|
93
|
+
}),
|
|
94
|
+
]);
|
|
95
|
+
// eslint-disable-next-line no-param-reassign
|
|
96
|
+
delete options.tool_choice;
|
|
97
|
+
}
|
|
98
|
+
// eslint-disable-next-line no-param-reassign
|
|
99
|
+
delete options.tools;
|
|
100
|
+
}
|
|
101
|
+
else if (options.tool_choice !== undefined) {
|
|
102
|
+
throw new Error(`If "tool_choice" is provided, "tools" must also be.`);
|
|
103
|
+
}
|
|
104
|
+
const chatResult = await this.llm._generate(promptMessages, options, runManager);
|
|
105
|
+
const chatGenerationContent = chatResult.generations[0].message.content;
|
|
106
|
+
if (typeof chatGenerationContent !== "string") {
|
|
107
|
+
throw new Error("AnthropicFunctions does not support non-string output.");
|
|
108
|
+
}
|
|
109
|
+
if (forced) {
|
|
110
|
+
const parser = new XMLParser();
|
|
111
|
+
const result = parser.parse(`<function_calls>\n<invoke><tool_name>${toolCall}</tool_name>${chatGenerationContent}</function_calls>`);
|
|
112
|
+
if (toolCall === undefined) {
|
|
113
|
+
throw new Error(`Could not parse called function from model output.`);
|
|
114
|
+
}
|
|
115
|
+
const invocations = Array.isArray(result.function_calls?.invoke ?? [])
|
|
116
|
+
? result.function_calls.invoke
|
|
117
|
+
: [result.function_calls.invoke];
|
|
118
|
+
const responseMessageWithFunctions = new AIMessage({
|
|
119
|
+
content: "",
|
|
120
|
+
additional_kwargs: {
|
|
121
|
+
tool_calls: invocations.map((toolInvocation, i) => ({
|
|
122
|
+
id: i.toString(),
|
|
123
|
+
type: "function",
|
|
124
|
+
function: {
|
|
125
|
+
name: toolInvocation.tool_name,
|
|
126
|
+
arguments: JSON.stringify(toolInvocation.parameters),
|
|
127
|
+
},
|
|
128
|
+
})),
|
|
129
|
+
},
|
|
130
|
+
});
|
|
131
|
+
return {
|
|
132
|
+
generations: [{ message: responseMessageWithFunctions, text: "" }],
|
|
133
|
+
};
|
|
134
|
+
}
|
|
135
|
+
else if (chatGenerationContent.includes("<function_calls>")) {
|
|
136
|
+
const parser = new XMLParser();
|
|
137
|
+
const result = parser.parse(`${chatGenerationContent}</function_calls>`);
|
|
138
|
+
const invocations = Array.isArray(result.function_calls?.invoke ?? [])
|
|
139
|
+
? result.function_calls.invoke
|
|
140
|
+
: [result.function_calls.invoke];
|
|
141
|
+
const responseMessageWithFunctions = new AIMessage({
|
|
142
|
+
content: chatGenerationContent.split("<function_calls>")[0],
|
|
143
|
+
additional_kwargs: {
|
|
144
|
+
tool_calls: invocations.map((toolInvocation, i) => ({
|
|
145
|
+
id: i.toString(),
|
|
146
|
+
type: "function",
|
|
147
|
+
function: {
|
|
148
|
+
name: toolInvocation.tool_name,
|
|
149
|
+
arguments: JSON.stringify(toolInvocation.parameters),
|
|
150
|
+
},
|
|
151
|
+
})),
|
|
152
|
+
},
|
|
153
|
+
});
|
|
154
|
+
return {
|
|
155
|
+
generations: [{ message: responseMessageWithFunctions, text: "" }],
|
|
156
|
+
};
|
|
157
|
+
}
|
|
158
|
+
return chatResult;
|
|
159
|
+
}
|
|
160
|
+
async _generate(messages, options, _runManager) {
|
|
161
|
+
return this._prepareAndParseToolCall({
|
|
162
|
+
messages,
|
|
163
|
+
options,
|
|
164
|
+
systemPromptTemplate: this.systemPromptTemplate,
|
|
165
|
+
stopSequences: this.stopSequences ?? [],
|
|
166
|
+
});
|
|
167
|
+
}
|
|
168
|
+
_llmType() {
|
|
169
|
+
return "anthropic_tool_calling";
|
|
170
|
+
}
|
|
171
|
+
withStructuredOutput({ schema, name, method, includeRaw, }) {
|
|
172
|
+
if (method === "jsonMode") {
|
|
173
|
+
throw new Error(`Anthropic only supports "functionCalling" as a method.`);
|
|
174
|
+
}
|
|
175
|
+
const functionName = name ?? "extract";
|
|
176
|
+
const outputParser = new JsonOutputKeyToolsParser({
|
|
177
|
+
returnSingle: true,
|
|
178
|
+
keyName: functionName,
|
|
179
|
+
});
|
|
180
|
+
let tools;
|
|
181
|
+
if (isZodSchema(schema)) {
|
|
182
|
+
const jsonSchema = zodToJsonSchema(schema);
|
|
183
|
+
tools = [
|
|
184
|
+
{
|
|
185
|
+
type: "function",
|
|
186
|
+
function: {
|
|
187
|
+
name: functionName,
|
|
188
|
+
description: jsonSchema.description,
|
|
189
|
+
parameters: jsonSchema,
|
|
190
|
+
},
|
|
191
|
+
},
|
|
192
|
+
];
|
|
193
|
+
}
|
|
194
|
+
else {
|
|
195
|
+
tools = [
|
|
196
|
+
{
|
|
197
|
+
type: "function",
|
|
198
|
+
function: {
|
|
199
|
+
name: functionName,
|
|
200
|
+
description: schema.description,
|
|
201
|
+
parameters: schema,
|
|
202
|
+
},
|
|
203
|
+
},
|
|
204
|
+
];
|
|
205
|
+
}
|
|
206
|
+
const llm = this.bind({
|
|
207
|
+
tools,
|
|
208
|
+
tool_choice: {
|
|
209
|
+
type: "function",
|
|
210
|
+
function: {
|
|
211
|
+
name: functionName,
|
|
212
|
+
},
|
|
213
|
+
},
|
|
214
|
+
});
|
|
215
|
+
if (!includeRaw) {
|
|
216
|
+
return llm.pipe(outputParser).withConfig({
|
|
217
|
+
runName: "ChatAnthropicStructuredOutput",
|
|
218
|
+
});
|
|
219
|
+
}
|
|
220
|
+
const parserAssign = RunnablePassthrough.assign({
|
|
221
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
222
|
+
parsed: (input, config) => outputParser.invoke(input.raw, config),
|
|
223
|
+
});
|
|
224
|
+
const parserNone = RunnablePassthrough.assign({
|
|
225
|
+
parsed: () => null,
|
|
226
|
+
});
|
|
227
|
+
const parsedWithFallback = parserAssign.withFallbacks({
|
|
228
|
+
fallbacks: [parserNone],
|
|
229
|
+
});
|
|
230
|
+
return RunnableSequence.from([
|
|
231
|
+
{
|
|
232
|
+
raw: llm,
|
|
233
|
+
},
|
|
234
|
+
parsedWithFallback,
|
|
235
|
+
]).withConfig({
|
|
236
|
+
runName: "StructuredOutputRunnable",
|
|
237
|
+
});
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
function isZodSchema(input) {
|
|
241
|
+
// Check for a characteristic method of Zod schemas
|
|
242
|
+
return typeof input?.parse === "function";
|
|
243
|
+
}
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.formatAsXMLRepresentation = exports.DEFAULT_TOOL_SYSTEM_PROMPT = void 0;
|
|
4
|
+
const fast_xml_parser_1 = require("fast-xml-parser");
|
|
5
|
+
const prompts_1 = require("@langchain/core/prompts");
|
|
6
|
+
exports.DEFAULT_TOOL_SYSTEM_PROMPT =
|
|
7
|
+
/* #__PURE__ */ prompts_1.PromptTemplate.fromTemplate(`In this environment you have access to a set of tools you can use to answer the user's question.
|
|
8
|
+
|
|
9
|
+
You may call them like this:
|
|
10
|
+
<function_calls>
|
|
11
|
+
<invoke>
|
|
12
|
+
<tool_name>$TOOL_NAME</tool_name>
|
|
13
|
+
<parameters>
|
|
14
|
+
<$PARAMETER_NAME>$PARAMETER_VALUE</$PARAMETER_NAME>
|
|
15
|
+
...
|
|
16
|
+
</parameters>
|
|
17
|
+
</invoke>
|
|
18
|
+
</function_calls>
|
|
19
|
+
|
|
20
|
+
Here are the tools available:
|
|
21
|
+
{tools}`);
|
|
22
|
+
function formatAsXMLRepresentation(tool) {
|
|
23
|
+
const builder = new fast_xml_parser_1.XMLBuilder();
|
|
24
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
25
|
+
const toolParamProps = tool.function.parameters?.properties;
|
|
26
|
+
const parameterXml = Object.keys(toolParamProps)
|
|
27
|
+
.map((key) => {
|
|
28
|
+
const parameterData = toolParamProps[key];
|
|
29
|
+
let xml = `<parameter>
|
|
30
|
+
<name>${key}</name>
|
|
31
|
+
<type>${parameterData.type}</type>`;
|
|
32
|
+
if (parameterData.description) {
|
|
33
|
+
xml += `\n<description>${parameterData.description}</description>`;
|
|
34
|
+
}
|
|
35
|
+
if (parameterData.type === "array" && parameterData.items) {
|
|
36
|
+
xml += `\n<items>${builder.build(parameterData.items.properties)}</items>`;
|
|
37
|
+
}
|
|
38
|
+
if (parameterData.properties) {
|
|
39
|
+
xml += `\n<properties>\n${builder.build(parameterData.properties)}\n</properties>`;
|
|
40
|
+
}
|
|
41
|
+
return `${xml}\n</parameter>`;
|
|
42
|
+
})
|
|
43
|
+
.join("\n");
|
|
44
|
+
return `<tool_description>
|
|
45
|
+
<tool_name>${tool.function.name}</tool_name>
|
|
46
|
+
<description>${tool.function.description}</description>
|
|
47
|
+
<parameters>
|
|
48
|
+
${parameterXml}
|
|
49
|
+
</parameters>
|
|
50
|
+
</tool_description>`;
|
|
51
|
+
}
|
|
52
|
+
exports.formatAsXMLRepresentation = formatAsXMLRepresentation;
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import { PromptTemplate } from "@langchain/core/prompts";
|
|
2
|
+
import { ToolDefinition } from "@langchain/core/language_models/base";
|
|
3
|
+
export declare const DEFAULT_TOOL_SYSTEM_PROMPT: PromptTemplate<import("@langchain/core/prompts").ParamsFromFString<"In this environment you have access to a set of tools you can use to answer the user's question.\n\nYou may call them like this:\n<function_calls>\n<invoke>\n<tool_name>$TOOL_NAME</tool_name>\n<parameters>\n<$PARAMETER_NAME>$PARAMETER_VALUE</$PARAMETER_NAME>\n...\n</parameters>\n</invoke>\n</function_calls>\n\nHere are the tools available:\n{tools}">, any>;
|
|
4
|
+
export type ToolInvocation = {
|
|
5
|
+
tool_name: string;
|
|
6
|
+
parameters: Record<string, unknown>;
|
|
7
|
+
};
|
|
8
|
+
export declare function formatAsXMLRepresentation(tool: ToolDefinition): string;
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
import { XMLBuilder } from "fast-xml-parser";
|
|
2
|
+
import { PromptTemplate } from "@langchain/core/prompts";
|
|
3
|
+
export const DEFAULT_TOOL_SYSTEM_PROMPT =
|
|
4
|
+
/* #__PURE__ */ PromptTemplate.fromTemplate(`In this environment you have access to a set of tools you can use to answer the user's question.
|
|
5
|
+
|
|
6
|
+
You may call them like this:
|
|
7
|
+
<function_calls>
|
|
8
|
+
<invoke>
|
|
9
|
+
<tool_name>$TOOL_NAME</tool_name>
|
|
10
|
+
<parameters>
|
|
11
|
+
<$PARAMETER_NAME>$PARAMETER_VALUE</$PARAMETER_NAME>
|
|
12
|
+
...
|
|
13
|
+
</parameters>
|
|
14
|
+
</invoke>
|
|
15
|
+
</function_calls>
|
|
16
|
+
|
|
17
|
+
Here are the tools available:
|
|
18
|
+
{tools}`);
|
|
19
|
+
export function formatAsXMLRepresentation(tool) {
|
|
20
|
+
const builder = new XMLBuilder();
|
|
21
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
22
|
+
const toolParamProps = tool.function.parameters?.properties;
|
|
23
|
+
const parameterXml = Object.keys(toolParamProps)
|
|
24
|
+
.map((key) => {
|
|
25
|
+
const parameterData = toolParamProps[key];
|
|
26
|
+
let xml = `<parameter>
|
|
27
|
+
<name>${key}</name>
|
|
28
|
+
<type>${parameterData.type}</type>`;
|
|
29
|
+
if (parameterData.description) {
|
|
30
|
+
xml += `\n<description>${parameterData.description}</description>`;
|
|
31
|
+
}
|
|
32
|
+
if (parameterData.type === "array" && parameterData.items) {
|
|
33
|
+
xml += `\n<items>${builder.build(parameterData.items.properties)}</items>`;
|
|
34
|
+
}
|
|
35
|
+
if (parameterData.properties) {
|
|
36
|
+
xml += `\n<properties>\n${builder.build(parameterData.properties)}\n</properties>`;
|
|
37
|
+
}
|
|
38
|
+
return `${xml}\n</parameter>`;
|
|
39
|
+
})
|
|
40
|
+
.join("\n");
|
|
41
|
+
return `<tool_description>
|
|
42
|
+
<tool_name>${tool.function.name}</tool_name>
|
|
43
|
+
<description>${tool.function.description}</description>
|
|
44
|
+
<parameters>
|
|
45
|
+
${parameterXml}
|
|
46
|
+
</parameters>
|
|
47
|
+
</tool_description>`;
|
|
48
|
+
}
|