@langchain/anthropic 0.1.9 → 0.1.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chat_models.cjs +289 -92
- package/dist/chat_models.d.ts +40 -11
- package/dist/chat_models.js +290 -93
- package/dist/experimental/tests/tool_calling.int.test.js +10 -10
- package/dist/experimental/tool_calling.cjs +1 -0
- package/dist/experimental/tool_calling.d.ts +1 -0
- package/dist/experimental/tool_calling.js +1 -0
- package/dist/output_parsers.cjs +80 -0
- package/dist/output_parsers.d.ts +20 -0
- package/dist/output_parsers.js +76 -0
- package/dist/tests/chat_models.int.test.js +178 -0
- package/dist/tests/chat_models.test.d.ts +1 -0
- package/dist/tests/chat_models.test.js +88 -0
- package/dist/types.cjs +2 -0
- package/dist/types.d.ts +6 -0
- package/dist/types.js +1 -0
- package/package.json +3 -3
package/dist/chat_models.cjs
CHANGED
|
@@ -6,6 +6,10 @@ const messages_1 = require("@langchain/core/messages");
|
|
|
6
6
|
const outputs_1 = require("@langchain/core/outputs");
|
|
7
7
|
const env_1 = require("@langchain/core/utils/env");
|
|
8
8
|
const chat_models_1 = require("@langchain/core/language_models/chat_models");
|
|
9
|
+
const zod_to_json_schema_1 = require("zod-to-json-schema");
|
|
10
|
+
const runnables_1 = require("@langchain/core/runnables");
|
|
11
|
+
const types_1 = require("@langchain/core/utils/types");
|
|
12
|
+
const output_parsers_js_1 = require("./output_parsers.cjs");
|
|
9
13
|
function _formatImage(imageUrl) {
|
|
10
14
|
const regex = /^data:(image\/.+);base64,(.+)$/;
|
|
11
15
|
const match = imageUrl.match(regex);
|
|
@@ -22,6 +26,34 @@ function _formatImage(imageUrl) {
|
|
|
22
26
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
23
27
|
};
|
|
24
28
|
}
|
|
29
|
+
function anthropicResponseToChatMessages(messages, additionalKwargs) {
|
|
30
|
+
if (messages.length === 1 && messages[0].type === "text") {
|
|
31
|
+
return [
|
|
32
|
+
{
|
|
33
|
+
text: messages[0].text,
|
|
34
|
+
message: new messages_1.AIMessage(messages[0].text, additionalKwargs),
|
|
35
|
+
},
|
|
36
|
+
];
|
|
37
|
+
}
|
|
38
|
+
else {
|
|
39
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
40
|
+
const castMessage = messages;
|
|
41
|
+
const generations = [
|
|
42
|
+
{
|
|
43
|
+
text: "",
|
|
44
|
+
message: new messages_1.AIMessage({
|
|
45
|
+
content: castMessage,
|
|
46
|
+
additional_kwargs: additionalKwargs,
|
|
47
|
+
}),
|
|
48
|
+
},
|
|
49
|
+
];
|
|
50
|
+
return generations;
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
54
|
+
function isAnthropicTool(tool) {
|
|
55
|
+
return "input_schema" in tool;
|
|
56
|
+
}
|
|
25
57
|
/**
|
|
26
58
|
* Wrapper around Anthropic large language models.
|
|
27
59
|
*
|
|
@@ -165,6 +197,32 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
165
197
|
this.streaming = fields?.streaming ?? false;
|
|
166
198
|
this.clientOptions = fields?.clientOptions ?? {};
|
|
167
199
|
}
|
|
200
|
+
/**
|
|
201
|
+
* Formats LangChain StructuredTools to AnthropicTools.
|
|
202
|
+
*
|
|
203
|
+
* @param {ChatAnthropicCallOptions["tools"]} tools The tools to format
|
|
204
|
+
* @returns {AnthropicTool[] | undefined} The formatted tools, or undefined if none are passed.
|
|
205
|
+
* @throws {Error} If a mix of AnthropicTools and StructuredTools are passed.
|
|
206
|
+
*/
|
|
207
|
+
formatStructuredToolToAnthropic(tools) {
|
|
208
|
+
if (!tools || !tools.length) {
|
|
209
|
+
return undefined;
|
|
210
|
+
}
|
|
211
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
212
|
+
if (tools.every((tool) => isAnthropicTool(tool))) {
|
|
213
|
+
// If the tool is already an anthropic tool, return it
|
|
214
|
+
return tools;
|
|
215
|
+
}
|
|
216
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
217
|
+
if (tools.some((tool) => isAnthropicTool(tool))) {
|
|
218
|
+
throw new Error(`Can not pass in a mix of AnthropicTools and StructuredTools`);
|
|
219
|
+
}
|
|
220
|
+
return tools.map((tool) => ({
|
|
221
|
+
name: tool.name,
|
|
222
|
+
description: tool.description,
|
|
223
|
+
input_schema: (0, zod_to_json_schema_1.zodToJsonSchema)(tool.schema),
|
|
224
|
+
}));
|
|
225
|
+
}
|
|
168
226
|
/**
|
|
169
227
|
* Get the parameters used to invoke the model
|
|
170
228
|
*/
|
|
@@ -180,6 +238,27 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
180
238
|
...this.invocationKwargs,
|
|
181
239
|
};
|
|
182
240
|
}
|
|
241
|
+
invocationOptions(request, options) {
|
|
242
|
+
const toolUseBetaHeader = {
|
|
243
|
+
"anthropic-beta": "tools-2024-04-04",
|
|
244
|
+
};
|
|
245
|
+
const tools = this.formatStructuredToolToAnthropic(options?.tools);
|
|
246
|
+
// If tools are present, populate the body with the message request params.
|
|
247
|
+
// This is because Anthropic overwrites the message request params if a body
|
|
248
|
+
// is passed.
|
|
249
|
+
const body = tools
|
|
250
|
+
? {
|
|
251
|
+
...request,
|
|
252
|
+
tools,
|
|
253
|
+
}
|
|
254
|
+
: undefined;
|
|
255
|
+
const headers = tools ? toolUseBetaHeader : undefined;
|
|
256
|
+
return {
|
|
257
|
+
signal: options.signal,
|
|
258
|
+
...(body ? { body } : {}),
|
|
259
|
+
...(headers ? { headers } : {}),
|
|
260
|
+
};
|
|
261
|
+
}
|
|
183
262
|
/** @ignore */
|
|
184
263
|
_identifyingParams() {
|
|
185
264
|
return {
|
|
@@ -198,69 +277,91 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
198
277
|
}
|
|
199
278
|
async *_streamResponseChunks(messages, options, runManager) {
|
|
200
279
|
const params = this.invocationParams(options);
|
|
201
|
-
const
|
|
280
|
+
const requestOptions = this.invocationOptions({
|
|
202
281
|
...params,
|
|
282
|
+
stream: false,
|
|
203
283
|
...this.formatMessagesForAnthropic(messages),
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
284
|
+
}, options);
|
|
285
|
+
if (options.tools !== undefined && options.tools.length > 0) {
|
|
286
|
+
const requestOptions = this.invocationOptions({
|
|
287
|
+
...params,
|
|
288
|
+
stream: false,
|
|
289
|
+
...this.formatMessagesForAnthropic(messages),
|
|
290
|
+
}, options);
|
|
291
|
+
const generations = await this._generateNonStreaming(messages, params, requestOptions);
|
|
292
|
+
yield new outputs_1.ChatGenerationChunk({
|
|
293
|
+
message: new messages_1.AIMessageChunk({
|
|
294
|
+
content: generations[0].message.content,
|
|
295
|
+
additional_kwargs: generations[0].message.additional_kwargs,
|
|
296
|
+
}),
|
|
297
|
+
text: generations[0].text,
|
|
298
|
+
});
|
|
299
|
+
}
|
|
300
|
+
else {
|
|
301
|
+
const stream = await this.createStreamWithRetry({
|
|
302
|
+
...params,
|
|
303
|
+
...this.formatMessagesForAnthropic(messages),
|
|
304
|
+
stream: true,
|
|
305
|
+
}, requestOptions);
|
|
306
|
+
let usageData = { input_tokens: 0, output_tokens: 0 };
|
|
307
|
+
for await (const data of stream) {
|
|
308
|
+
if (options.signal?.aborted) {
|
|
309
|
+
stream.controller.abort();
|
|
310
|
+
throw new Error("AbortError: User aborted the request.");
|
|
221
311
|
}
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
usageData.output_tokens += data.usage.output_tokens;
|
|
312
|
+
if (data.type === "message_start") {
|
|
313
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
314
|
+
const { content, usage, ...additionalKwargs } = data.message;
|
|
315
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
316
|
+
const filteredAdditionalKwargs = {};
|
|
317
|
+
for (const [key, value] of Object.entries(additionalKwargs)) {
|
|
318
|
+
if (value !== undefined && value !== null) {
|
|
319
|
+
filteredAdditionalKwargs[key] = value;
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
usageData = usage;
|
|
323
|
+
yield new outputs_1.ChatGenerationChunk({
|
|
324
|
+
message: new messages_1.AIMessageChunk({
|
|
325
|
+
content: "",
|
|
326
|
+
additional_kwargs: filteredAdditionalKwargs,
|
|
327
|
+
}),
|
|
328
|
+
text: "",
|
|
329
|
+
});
|
|
241
330
|
}
|
|
242
|
-
|
|
243
|
-
else if (data.type === "content_block_delta") {
|
|
244
|
-
const content = data.delta?.text;
|
|
245
|
-
if (content !== undefined) {
|
|
331
|
+
else if (data.type === "message_delta") {
|
|
246
332
|
yield new outputs_1.ChatGenerationChunk({
|
|
247
333
|
message: new messages_1.AIMessageChunk({
|
|
248
|
-
content,
|
|
249
|
-
additional_kwargs: {},
|
|
334
|
+
content: "",
|
|
335
|
+
additional_kwargs: { ...data.delta },
|
|
250
336
|
}),
|
|
251
|
-
text:
|
|
337
|
+
text: "",
|
|
252
338
|
});
|
|
253
|
-
|
|
339
|
+
if (data?.usage !== undefined) {
|
|
340
|
+
usageData.output_tokens += data.usage.output_tokens;
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
|
+
else if (data.type === "content_block_delta") {
|
|
344
|
+
const content = data.delta?.text;
|
|
345
|
+
if (content !== undefined) {
|
|
346
|
+
yield new outputs_1.ChatGenerationChunk({
|
|
347
|
+
message: new messages_1.AIMessageChunk({
|
|
348
|
+
content,
|
|
349
|
+
additional_kwargs: {},
|
|
350
|
+
}),
|
|
351
|
+
text: content,
|
|
352
|
+
});
|
|
353
|
+
await runManager?.handleLLMNewToken(content);
|
|
354
|
+
}
|
|
254
355
|
}
|
|
255
356
|
}
|
|
357
|
+
yield new outputs_1.ChatGenerationChunk({
|
|
358
|
+
message: new messages_1.AIMessageChunk({
|
|
359
|
+
content: "",
|
|
360
|
+
additional_kwargs: { usage: usageData },
|
|
361
|
+
}),
|
|
362
|
+
text: "",
|
|
363
|
+
});
|
|
256
364
|
}
|
|
257
|
-
yield new outputs_1.ChatGenerationChunk({
|
|
258
|
-
message: new messages_1.AIMessageChunk({
|
|
259
|
-
content: "",
|
|
260
|
-
additional_kwargs: { usage: usageData },
|
|
261
|
-
}),
|
|
262
|
-
text: "",
|
|
263
|
-
});
|
|
264
365
|
}
|
|
265
366
|
/**
|
|
266
367
|
* Formats messages as a prompt for the model.
|
|
@@ -284,6 +385,9 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
284
385
|
else if (message._getType() === "ai") {
|
|
285
386
|
role = "assistant";
|
|
286
387
|
}
|
|
388
|
+
else if (message._getType() === "tool") {
|
|
389
|
+
role = "user";
|
|
390
|
+
}
|
|
287
391
|
else if (message._getType() === "system") {
|
|
288
392
|
throw new Error("System messages are only permitted as the first passed message.");
|
|
289
393
|
}
|
|
@@ -297,26 +401,44 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
297
401
|
};
|
|
298
402
|
}
|
|
299
403
|
else {
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
if (contentPart.
|
|
304
|
-
|
|
305
|
-
if (typeof contentPart.image_url === "string") {
|
|
306
|
-
source = _formatImage(contentPart.image_url);
|
|
307
|
-
}
|
|
308
|
-
else {
|
|
309
|
-
source = _formatImage(contentPart.image_url.url);
|
|
310
|
-
}
|
|
311
|
-
return {
|
|
312
|
-
type: "image",
|
|
313
|
-
source,
|
|
314
|
-
};
|
|
404
|
+
const contentBlocks = message.content.map((contentPart) => {
|
|
405
|
+
if (contentPart.type === "image_url") {
|
|
406
|
+
let source;
|
|
407
|
+
if (typeof contentPart.image_url === "string") {
|
|
408
|
+
source = _formatImage(contentPart.image_url);
|
|
315
409
|
}
|
|
316
410
|
else {
|
|
317
|
-
|
|
411
|
+
source = _formatImage(contentPart.image_url.url);
|
|
318
412
|
}
|
|
319
|
-
|
|
413
|
+
return {
|
|
414
|
+
type: "image",
|
|
415
|
+
source,
|
|
416
|
+
};
|
|
417
|
+
}
|
|
418
|
+
else if (contentPart.type === "text") {
|
|
419
|
+
// Assuming contentPart is of type MessageContentText here
|
|
420
|
+
return {
|
|
421
|
+
type: "text",
|
|
422
|
+
text: contentPart.text,
|
|
423
|
+
};
|
|
424
|
+
}
|
|
425
|
+
else if (contentPart.type === "tool_use") {
|
|
426
|
+
// TODO: Fix when SDK types are fixed
|
|
427
|
+
return {
|
|
428
|
+
type: "tool_use",
|
|
429
|
+
id: contentPart.id,
|
|
430
|
+
name: contentPart.name,
|
|
431
|
+
input: contentPart.input,
|
|
432
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
433
|
+
};
|
|
434
|
+
}
|
|
435
|
+
else {
|
|
436
|
+
throw new Error("Unsupported message content format");
|
|
437
|
+
}
|
|
438
|
+
});
|
|
439
|
+
return {
|
|
440
|
+
role,
|
|
441
|
+
content: contentBlocks,
|
|
320
442
|
};
|
|
321
443
|
}
|
|
322
444
|
});
|
|
@@ -326,6 +448,17 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
326
448
|
};
|
|
327
449
|
}
|
|
328
450
|
/** @ignore */
|
|
451
|
+
async _generateNonStreaming(messages, params, requestOptions) {
|
|
452
|
+
const response = await this.completionWithRetry({
|
|
453
|
+
...params,
|
|
454
|
+
stream: false,
|
|
455
|
+
...this.formatMessagesForAnthropic(messages),
|
|
456
|
+
}, requestOptions);
|
|
457
|
+
const { content, ...additionalKwargs } = response;
|
|
458
|
+
const generations = anthropicResponseToChatMessages(content, additionalKwargs);
|
|
459
|
+
return generations;
|
|
460
|
+
}
|
|
461
|
+
/** @ignore */
|
|
329
462
|
async _generate(messages, options, runManager) {
|
|
330
463
|
if (this.stopSequences && options.stop) {
|
|
331
464
|
throw new Error(`"stopSequence" parameter found in input and default params`);
|
|
@@ -333,7 +466,7 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
333
466
|
const params = this.invocationParams(options);
|
|
334
467
|
if (params.stream) {
|
|
335
468
|
let finalChunk;
|
|
336
|
-
const stream =
|
|
469
|
+
const stream = this._streamResponseChunks(messages, options, runManager);
|
|
337
470
|
for await (const chunk of stream) {
|
|
338
471
|
if (finalChunk === undefined) {
|
|
339
472
|
finalChunk = chunk;
|
|
@@ -355,26 +488,14 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
355
488
|
};
|
|
356
489
|
}
|
|
357
490
|
else {
|
|
358
|
-
const
|
|
491
|
+
const requestOptions = this.invocationOptions({
|
|
359
492
|
...params,
|
|
360
493
|
stream: false,
|
|
361
494
|
...this.formatMessagesForAnthropic(messages),
|
|
362
|
-
},
|
|
363
|
-
const
|
|
364
|
-
if (!Array.isArray(content) || content.length !== 1) {
|
|
365
|
-
console.log(content);
|
|
366
|
-
throw new Error("Received multiple content parts in Anthropic response. Only single part messages are currently supported.");
|
|
367
|
-
}
|
|
495
|
+
}, options);
|
|
496
|
+
const generations = await this._generateNonStreaming(messages, params, requestOptions);
|
|
368
497
|
return {
|
|
369
|
-
generations
|
|
370
|
-
{
|
|
371
|
-
text: content[0].text,
|
|
372
|
-
message: new messages_1.AIMessage({
|
|
373
|
-
content: content[0].text,
|
|
374
|
-
additional_kwargs: additionalKwargs,
|
|
375
|
-
}),
|
|
376
|
-
},
|
|
377
|
-
],
|
|
498
|
+
generations,
|
|
378
499
|
};
|
|
379
500
|
}
|
|
380
501
|
}
|
|
@@ -383,12 +504,12 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
383
504
|
* @param request The parameters for creating a completion.
|
|
384
505
|
* @returns A streaming request.
|
|
385
506
|
*/
|
|
386
|
-
async createStreamWithRetry(request) {
|
|
507
|
+
async createStreamWithRetry(request, options) {
|
|
387
508
|
if (!this.streamingClient) {
|
|
388
|
-
const
|
|
509
|
+
const options_ = this.apiUrl ? { baseURL: this.apiUrl } : undefined;
|
|
389
510
|
this.streamingClient = new sdk_1.Anthropic({
|
|
390
511
|
...this.clientOptions,
|
|
391
|
-
...
|
|
512
|
+
...options_,
|
|
392
513
|
apiKey: this.anthropicApiKey,
|
|
393
514
|
// Prefer LangChain built-in retries
|
|
394
515
|
maxRetries: 0,
|
|
@@ -398,7 +519,7 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
398
519
|
...request,
|
|
399
520
|
...this.invocationKwargs,
|
|
400
521
|
stream: true,
|
|
401
|
-
});
|
|
522
|
+
}, options);
|
|
402
523
|
return this.caller.call(makeCompletionRequest);
|
|
403
524
|
}
|
|
404
525
|
/** @ignore */
|
|
@@ -418,12 +539,88 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
|
|
|
418
539
|
const makeCompletionRequest = async () => this.batchClient.messages.create({
|
|
419
540
|
...request,
|
|
420
541
|
...this.invocationKwargs,
|
|
421
|
-
});
|
|
422
|
-
return this.caller.callWithOptions({ signal: options.signal }, makeCompletionRequest);
|
|
542
|
+
}, options);
|
|
543
|
+
return this.caller.callWithOptions({ signal: options.signal ?? undefined }, makeCompletionRequest);
|
|
423
544
|
}
|
|
424
545
|
_llmType() {
|
|
425
546
|
return "anthropic";
|
|
426
547
|
}
|
|
548
|
+
withStructuredOutput(outputSchema, config) {
|
|
549
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
550
|
+
const schema = outputSchema;
|
|
551
|
+
const name = config?.name;
|
|
552
|
+
const method = config?.method;
|
|
553
|
+
const includeRaw = config?.includeRaw;
|
|
554
|
+
if (method === "jsonMode") {
|
|
555
|
+
throw new Error(`Anthropic only supports "functionCalling" as a method.`);
|
|
556
|
+
}
|
|
557
|
+
let functionName = name ?? "extract";
|
|
558
|
+
let outputParser;
|
|
559
|
+
let tools;
|
|
560
|
+
if ((0, types_1.isZodSchema)(schema)) {
|
|
561
|
+
const jsonSchema = (0, zod_to_json_schema_1.zodToJsonSchema)(schema);
|
|
562
|
+
tools = [
|
|
563
|
+
{
|
|
564
|
+
name: functionName,
|
|
565
|
+
description: jsonSchema.description ?? "A function available to call.",
|
|
566
|
+
input_schema: jsonSchema,
|
|
567
|
+
},
|
|
568
|
+
];
|
|
569
|
+
outputParser = new output_parsers_js_1.AnthropicToolsOutputParser({
|
|
570
|
+
returnSingle: true,
|
|
571
|
+
keyName: functionName,
|
|
572
|
+
zodSchema: schema,
|
|
573
|
+
});
|
|
574
|
+
}
|
|
575
|
+
else {
|
|
576
|
+
let anthropicTools;
|
|
577
|
+
if (typeof schema.name === "string" &&
|
|
578
|
+
typeof schema.description === "string" &&
|
|
579
|
+
typeof schema.input_schema === "object" &&
|
|
580
|
+
schema.input_schema != null) {
|
|
581
|
+
anthropicTools = schema;
|
|
582
|
+
functionName = schema.name;
|
|
583
|
+
}
|
|
584
|
+
else {
|
|
585
|
+
anthropicTools = {
|
|
586
|
+
name: functionName,
|
|
587
|
+
description: schema.description ?? "",
|
|
588
|
+
input_schema: schema,
|
|
589
|
+
};
|
|
590
|
+
}
|
|
591
|
+
tools = [anthropicTools];
|
|
592
|
+
outputParser = new output_parsers_js_1.AnthropicToolsOutputParser({
|
|
593
|
+
returnSingle: true,
|
|
594
|
+
keyName: functionName,
|
|
595
|
+
});
|
|
596
|
+
}
|
|
597
|
+
const llm = this.bind({
|
|
598
|
+
tools,
|
|
599
|
+
});
|
|
600
|
+
if (!includeRaw) {
|
|
601
|
+
return llm.pipe(outputParser).withConfig({
|
|
602
|
+
runName: "ChatAnthropicStructuredOutput",
|
|
603
|
+
});
|
|
604
|
+
}
|
|
605
|
+
const parserAssign = runnables_1.RunnablePassthrough.assign({
|
|
606
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
607
|
+
parsed: (input, config) => outputParser.invoke(input.raw, config),
|
|
608
|
+
});
|
|
609
|
+
const parserNone = runnables_1.RunnablePassthrough.assign({
|
|
610
|
+
parsed: () => null,
|
|
611
|
+
});
|
|
612
|
+
const parsedWithFallback = parserAssign.withFallbacks({
|
|
613
|
+
fallbacks: [parserNone],
|
|
614
|
+
});
|
|
615
|
+
return runnables_1.RunnableSequence.from([
|
|
616
|
+
{
|
|
617
|
+
raw: llm,
|
|
618
|
+
},
|
|
619
|
+
parsedWithFallback,
|
|
620
|
+
]).withConfig({
|
|
621
|
+
runName: "StructuredOutputRunnable",
|
|
622
|
+
});
|
|
623
|
+
}
|
|
427
624
|
}
|
|
428
625
|
exports.ChatAnthropicMessages = ChatAnthropicMessages;
|
|
429
626
|
class ChatAnthropic extends ChatAnthropicMessages {
|
package/dist/chat_models.d.ts
CHANGED
|
@@ -2,13 +2,28 @@ import { Anthropic, type ClientOptions } from "@anthropic-ai/sdk";
|
|
|
2
2
|
import type { Stream } from "@anthropic-ai/sdk/streaming";
|
|
3
3
|
import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
|
|
4
4
|
import { type BaseMessage } from "@langchain/core/messages";
|
|
5
|
-
import { ChatGenerationChunk, type ChatResult } from "@langchain/core/outputs";
|
|
5
|
+
import { ChatGeneration, ChatGenerationChunk, type ChatResult } from "@langchain/core/outputs";
|
|
6
6
|
import { BaseChatModel, type BaseChatModelParams } from "@langchain/core/language_models/chat_models";
|
|
7
|
-
import { type BaseLanguageModelCallOptions } from "@langchain/core/language_models/base";
|
|
7
|
+
import { StructuredOutputMethodOptions, type BaseLanguageModelCallOptions, BaseLanguageModelInput } from "@langchain/core/language_models/base";
|
|
8
|
+
import { StructuredToolInterface } from "@langchain/core/tools";
|
|
9
|
+
import { Runnable } from "@langchain/core/runnables";
|
|
10
|
+
import { z } from "zod";
|
|
11
|
+
type AnthropicTool = {
|
|
12
|
+
name: string;
|
|
13
|
+
description: string;
|
|
14
|
+
/**
|
|
15
|
+
* JSON schema.
|
|
16
|
+
*/
|
|
17
|
+
input_schema: Record<string, unknown>;
|
|
18
|
+
};
|
|
8
19
|
type AnthropicMessage = Anthropic.MessageParam;
|
|
9
20
|
type AnthropicMessageCreateParams = Anthropic.MessageCreateParamsNonStreaming;
|
|
10
21
|
type AnthropicStreamingMessageCreateParams = Anthropic.MessageCreateParamsStreaming;
|
|
11
22
|
type AnthropicMessageStreamEvent = Anthropic.MessageStreamEvent;
|
|
23
|
+
type AnthropicRequestOptions = Anthropic.RequestOptions;
|
|
24
|
+
interface ChatAnthropicCallOptions extends BaseLanguageModelCallOptions {
|
|
25
|
+
tools?: StructuredToolInterface[] | AnthropicTool[];
|
|
26
|
+
}
|
|
12
27
|
/**
|
|
13
28
|
* Input to AnthropicChat class.
|
|
14
29
|
*/
|
|
@@ -89,7 +104,7 @@ type Kwargs = Record<string, any>;
|
|
|
89
104
|
* console.log(res);
|
|
90
105
|
* ```
|
|
91
106
|
*/
|
|
92
|
-
export declare class ChatAnthropicMessages<CallOptions extends
|
|
107
|
+
export declare class ChatAnthropicMessages<CallOptions extends ChatAnthropicCallOptions = ChatAnthropicCallOptions> extends BaseChatModel<CallOptions> implements AnthropicInput {
|
|
93
108
|
static lc_name(): string;
|
|
94
109
|
get lc_secrets(): {
|
|
95
110
|
[key: string]: string;
|
|
@@ -110,18 +125,27 @@ export declare class ChatAnthropicMessages<CallOptions extends BaseLanguageModel
|
|
|
110
125
|
protected batchClient: Anthropic;
|
|
111
126
|
protected streamingClient: Anthropic;
|
|
112
127
|
constructor(fields?: Partial<AnthropicInput> & BaseChatModelParams);
|
|
128
|
+
/**
|
|
129
|
+
* Formats LangChain StructuredTools to AnthropicTools.
|
|
130
|
+
*
|
|
131
|
+
* @param {ChatAnthropicCallOptions["tools"]} tools The tools to format
|
|
132
|
+
* @returns {AnthropicTool[] | undefined} The formatted tools, or undefined if none are passed.
|
|
133
|
+
* @throws {Error} If a mix of AnthropicTools and StructuredTools are passed.
|
|
134
|
+
*/
|
|
135
|
+
formatStructuredToolToAnthropic(tools: ChatAnthropicCallOptions["tools"]): AnthropicTool[] | undefined;
|
|
113
136
|
/**
|
|
114
137
|
* Get the parameters used to invoke the model
|
|
115
138
|
*/
|
|
116
139
|
invocationParams(options?: this["ParsedCallOptions"]): Omit<AnthropicMessageCreateParams | AnthropicStreamingMessageCreateParams, "messages"> & Kwargs;
|
|
140
|
+
invocationOptions(request: Omit<AnthropicMessageCreateParams | AnthropicStreamingMessageCreateParams, "messages"> & Kwargs, options: this["ParsedCallOptions"]): AnthropicRequestOptions;
|
|
117
141
|
/** @ignore */
|
|
118
142
|
_identifyingParams(): {
|
|
143
|
+
system?: string | undefined;
|
|
119
144
|
metadata?: Anthropic.Messages.MessageCreateParams.Metadata | undefined;
|
|
120
145
|
stream?: boolean | undefined;
|
|
121
146
|
max_tokens: number;
|
|
122
|
-
model: (string & {}) | "claude-3-opus-20240229" | "claude-3-sonnet-20240229" | "claude-
|
|
147
|
+
model: "claude-2.1" | (string & {}) | "claude-3-opus-20240229" | "claude-3-sonnet-20240229" | "claude-3-haiku-20240307" | "claude-2.0" | "claude-instant-1.2";
|
|
123
148
|
stop_sequences?: string[] | undefined;
|
|
124
|
-
system?: string | undefined;
|
|
125
149
|
temperature?: number | undefined;
|
|
126
150
|
top_k?: number | undefined;
|
|
127
151
|
top_p?: number | undefined;
|
|
@@ -131,12 +155,12 @@ export declare class ChatAnthropicMessages<CallOptions extends BaseLanguageModel
|
|
|
131
155
|
* Get the identifying parameters for the model
|
|
132
156
|
*/
|
|
133
157
|
identifyingParams(): {
|
|
158
|
+
system?: string | undefined;
|
|
134
159
|
metadata?: Anthropic.Messages.MessageCreateParams.Metadata | undefined;
|
|
135
160
|
stream?: boolean | undefined;
|
|
136
161
|
max_tokens: number;
|
|
137
|
-
model: (string & {}) | "claude-3-opus-20240229" | "claude-3-sonnet-20240229" | "claude-
|
|
162
|
+
model: "claude-2.1" | (string & {}) | "claude-3-opus-20240229" | "claude-3-sonnet-20240229" | "claude-3-haiku-20240307" | "claude-2.0" | "claude-instant-1.2";
|
|
138
163
|
stop_sequences?: string[] | undefined;
|
|
139
|
-
system?: string | undefined;
|
|
140
164
|
temperature?: number | undefined;
|
|
141
165
|
top_k?: number | undefined;
|
|
142
166
|
top_p?: number | undefined;
|
|
@@ -153,18 +177,23 @@ export declare class ChatAnthropicMessages<CallOptions extends BaseLanguageModel
|
|
|
153
177
|
messages: AnthropicMessage[];
|
|
154
178
|
};
|
|
155
179
|
/** @ignore */
|
|
180
|
+
_generateNonStreaming(messages: BaseMessage[], params: Omit<Anthropic.Messages.MessageCreateParamsNonStreaming | Anthropic.Messages.MessageCreateParamsStreaming, "messages"> & Kwargs, requestOptions: AnthropicRequestOptions): Promise<ChatGeneration[]>;
|
|
181
|
+
/** @ignore */
|
|
156
182
|
_generate(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;
|
|
157
183
|
/**
|
|
158
184
|
* Creates a streaming request with retry.
|
|
159
185
|
* @param request The parameters for creating a completion.
|
|
160
186
|
* @returns A streaming request.
|
|
161
187
|
*/
|
|
162
|
-
protected createStreamWithRetry(request: AnthropicStreamingMessageCreateParams & Kwargs): Promise<Stream<AnthropicMessageStreamEvent>>;
|
|
188
|
+
protected createStreamWithRetry(request: AnthropicStreamingMessageCreateParams & Kwargs, options?: AnthropicRequestOptions): Promise<Stream<AnthropicMessageStreamEvent>>;
|
|
163
189
|
/** @ignore */
|
|
164
|
-
protected completionWithRetry(request: AnthropicMessageCreateParams & Kwargs, options:
|
|
165
|
-
signal?: AbortSignal;
|
|
166
|
-
}): Promise<Anthropic.Message>;
|
|
190
|
+
protected completionWithRetry(request: AnthropicMessageCreateParams & Kwargs, options: AnthropicRequestOptions): Promise<Anthropic.Message>;
|
|
167
191
|
_llmType(): string;
|
|
192
|
+
withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: z.ZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<false>): Runnable<BaseLanguageModelInput, RunOutput>;
|
|
193
|
+
withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: z.ZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<true>): Runnable<BaseLanguageModelInput, {
|
|
194
|
+
raw: BaseMessage;
|
|
195
|
+
parsed: RunOutput;
|
|
196
|
+
}>;
|
|
168
197
|
}
|
|
169
198
|
export declare class ChatAnthropic extends ChatAnthropicMessages {
|
|
170
199
|
}
|