@langchain/anthropic 0.2.13 → 0.2.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -41,28 +41,388 @@ function extractToken(chunk) {
41
41
  return undefined;
42
42
  }
43
43
  /**
44
- * Wrapper around Anthropic large language models.
45
- *
46
- * To use this package, you should have an Anthropic API key set as an
47
- * environment variable named `ANTHROPIC_API_KEY` or passed
48
- * into the constructor.
49
- *
50
- * @remarks
51
- * Any parameters that are valid to be passed to {@link
52
- * https://console.anthropic.com/docs/api/reference |
53
- * `anthropic.messages`} can be passed through {@link invocationKwargs},
54
- * even if not explicitly available on this class.
55
- * @example
44
+ * Anthropic chat model integration.
45
+ *
46
+ * Setup:
47
+ * Install `@langchain/anthropic` and set environment variable `ANTHROPIC_API_KEY`.
48
+ *
49
+ * ```bash
50
+ * npm install @langchain/anthropic
51
+ * export ANTHROPIC_API_KEY="your-api-key"
52
+ * ```
53
+ *
54
+ * ## [Constructor args](/classes/langchain_anthropic.ChatAnthropic.html#constructor)
55
+ *
56
+ * ## [Runtime args](/interfaces/langchain_anthropic.ChatAnthropicCallOptions.html)
57
+ *
58
+ * Runtime args can be passed as the second argument to any of the base runnable methods `.invoke`. `.stream`, `.batch`, etc.
59
+ * They can also be passed via `.bind`, or the second arg in `.bindTools`, like shown in the examples below:
60
+ *
56
61
  * ```typescript
57
- * import { ChatAnthropic } from "@langchain/anthropic";
62
+ * // When calling `.bind`, call options should be passed via the first argument
63
+ * const llmWithArgsBound = llm.bind({
64
+ * stop: ["\n"],
65
+ * tools: [...],
66
+ * });
67
+ *
68
+ * // When calling `.bindTools`, call options should be passed via the second argument
69
+ * const llmWithTools = llm.bindTools(
70
+ * [...],
71
+ * {
72
+ * tool_choice: "auto",
73
+ * }
74
+ * );
75
+ * ```
76
+ *
77
+ * ## Examples
78
+ *
79
+ * <details open>
80
+ * <summary><strong>Instantiate</strong></summary>
58
81
  *
59
- * const model = new ChatAnthropic({
60
- * temperature: 0.9,
61
- * apiKey: 'YOUR-API-KEY',
82
+ * ```typescript
83
+ * import { ChatAnthropic } from '@langchain/anthropic';
84
+ *
85
+ * const llm = new ChatAnthropic({
86
+ * model: "claude-3-5-sonnet-20240620",
87
+ * temperature: 0,
88
+ * maxTokens: undefined,
89
+ * maxRetries: 2,
90
+ * // apiKey: "...",
91
+ * // baseUrl: "...",
92
+ * // other params...
62
93
  * });
63
- * const res = await model.invoke({ input: 'Hello!' });
64
- * console.log(res);
65
94
  * ```
95
+ * </details>
96
+ *
97
+ * <br />
98
+ *
99
+ * <details>
100
+ * <summary><strong>Invoking</strong></summary>
101
+ *
102
+ * ```typescript
103
+ * const messages = [
104
+ * {
105
+ * type: "system" as const,
106
+ * content: "You are a helpful translator. Translate the user sentence to French.",
107
+ * },
108
+ * {
109
+ * type: "human" as const,
110
+ * content: "I love programming.",
111
+ * },
112
+ * ];
113
+ * const result = await llm.invoke(messages);
114
+ * console.log(result);
115
+ * ```
116
+ *
117
+ * ```txt
118
+ * AIMessage {
119
+ * "id": "msg_01QDpd78JUHpRP6bRRNyzbW3",
120
+ * "content": "Here's the translation to French:\n\nJ'adore la programmation.",
121
+ * "response_metadata": {
122
+ * "id": "msg_01QDpd78JUHpRP6bRRNyzbW3",
123
+ * "model": "claude-3-5-sonnet-20240620",
124
+ * "stop_reason": "end_turn",
125
+ * "stop_sequence": null,
126
+ * "usage": {
127
+ * "input_tokens": 25,
128
+ * "output_tokens": 19
129
+ * },
130
+ * "type": "message",
131
+ * "role": "assistant"
132
+ * },
133
+ * "usage_metadata": {
134
+ * "input_tokens": 25,
135
+ * "output_tokens": 19,
136
+ * "total_tokens": 44
137
+ * }
138
+ * }
139
+ * ```
140
+ * </details>
141
+ *
142
+ * <br />
143
+ *
144
+ * <details>
145
+ * <summary><strong>Streaming Chunks</strong></summary>
146
+ *
147
+ * ```typescript
148
+ * for await (const chunk of await llm.stream(messages)) {
149
+ * console.log(chunk);
150
+ * }
151
+ * ```
152
+ *
153
+ * ```txt
154
+ * AIMessageChunk {
155
+ * "id": "msg_01N8MwoYxiKo9w4chE4gXUs4",
156
+ * "content": "",
157
+ * "additional_kwargs": {
158
+ * "id": "msg_01N8MwoYxiKo9w4chE4gXUs4",
159
+ * "type": "message",
160
+ * "role": "assistant",
161
+ * "model": "claude-3-5-sonnet-20240620"
162
+ * },
163
+ * "usage_metadata": {
164
+ * "input_tokens": 25,
165
+ * "output_tokens": 1,
166
+ * "total_tokens": 26
167
+ * }
168
+ * }
169
+ * AIMessageChunk {
170
+ * "content": "",
171
+ * }
172
+ * AIMessageChunk {
173
+ * "content": "Here",
174
+ * }
175
+ * AIMessageChunk {
176
+ * "content": "'s",
177
+ * }
178
+ * AIMessageChunk {
179
+ * "content": " the translation to",
180
+ * }
181
+ * AIMessageChunk {
182
+ * "content": " French:\n\nJ",
183
+ * }
184
+ * AIMessageChunk {
185
+ * "content": "'adore la programmation",
186
+ * }
187
+ * AIMessageChunk {
188
+ * "content": ".",
189
+ * }
190
+ * AIMessageChunk {
191
+ * "content": "",
192
+ * "additional_kwargs": {
193
+ * "stop_reason": "end_turn",
194
+ * "stop_sequence": null
195
+ * },
196
+ * "usage_metadata": {
197
+ * "input_tokens": 0,
198
+ * "output_tokens": 19,
199
+ * "total_tokens": 19
200
+ * }
201
+ * }
202
+ * ```
203
+ * </details>
204
+ *
205
+ * <br />
206
+ *
207
+ * <details>
208
+ * <summary><strong>Aggregate Streamed Chunks</strong></summary>
209
+ *
210
+ * ```typescript
211
+ * import { AIMessageChunk } from '@langchain/core/messages';
212
+ * import { concat } from '@langchain/core/utils/stream';
213
+ *
214
+ * const stream = await llm.stream(messages);
215
+ * let full: AIMessageChunk | undefined;
216
+ * for await (const chunk of stream) {
217
+ * full = !full ? chunk : concat(full, chunk);
218
+ * }
219
+ * console.log(full);
220
+ * ```
221
+ *
222
+ * ```txt
223
+ * AIMessageChunk {
224
+ * "id": "msg_01SBTb5zSGXfjUc7yQ8EKEEA",
225
+ * "content": "Here's the translation to French:\n\nJ'adore la programmation.",
226
+ * "additional_kwargs": {
227
+ * "id": "msg_01SBTb5zSGXfjUc7yQ8EKEEA",
228
+ * "type": "message",
229
+ * "role": "assistant",
230
+ * "model": "claude-3-5-sonnet-20240620",
231
+ * "stop_reason": "end_turn",
232
+ * "stop_sequence": null
233
+ * },
234
+ * "usage_metadata": {
235
+ * "input_tokens": 25,
236
+ * "output_tokens": 20,
237
+ * "total_tokens": 45
238
+ * }
239
+ * }
240
+ * ```
241
+ * </details>
242
+ *
243
+ * <br />
244
+ *
245
+ * <details>
246
+ * <summary><strong>Bind tools</strong></summary>
247
+ *
248
+ * ```typescript
249
+ * import { z } from 'zod';
250
+ *
251
+ * const GetWeather = {
252
+ * name: "GetWeather",
253
+ * description: "Get the current weather in a given location",
254
+ * schema: z.object({
255
+ * location: z.string().describe("The city and state, e.g. San Francisco, CA")
256
+ * }),
257
+ * }
258
+ *
259
+ * const GetPopulation = {
260
+ * name: "GetPopulation",
261
+ * description: "Get the current population in a given location",
262
+ * schema: z.object({
263
+ * location: z.string().describe("The city and state, e.g. San Francisco, CA")
264
+ * }),
265
+ * }
266
+ *
267
+ * const llmWithTools = llm.bindTools([GetWeather, GetPopulation]);
268
+ * const aiMsg = await llmWithTools.invoke(
269
+ * "Which city is hotter today and which is bigger: LA or NY?"
270
+ * );
271
+ * console.log(aiMsg.tool_calls);
272
+ * ```
273
+ *
274
+ * ```txt
275
+ * [
276
+ * {
277
+ * name: 'GetWeather',
278
+ * args: { location: 'Los Angeles, CA' },
279
+ * id: 'toolu_01WjW3Dann6BPJVtLhovdBD5',
280
+ * type: 'tool_call'
281
+ * },
282
+ * {
283
+ * name: 'GetWeather',
284
+ * args: { location: 'New York, NY' },
285
+ * id: 'toolu_01G6wfJgqi5zRmJomsmkyZXe',
286
+ * type: 'tool_call'
287
+ * },
288
+ * {
289
+ * name: 'GetPopulation',
290
+ * args: { location: 'Los Angeles, CA' },
291
+ * id: 'toolu_0165qYWBA2VFyUst5RA18zew',
292
+ * type: 'tool_call'
293
+ * },
294
+ * {
295
+ * name: 'GetPopulation',
296
+ * args: { location: 'New York, NY' },
297
+ * id: 'toolu_01PGNyP33vxr13tGqr7i3rDo',
298
+ * type: 'tool_call'
299
+ * }
300
+ * ]
301
+ * ```
302
+ * </details>
303
+ *
304
+ * <br />
305
+ *
306
+ * <details>
307
+ * <summary><strong>Structured Output</strong></summary>
308
+ *
309
+ * ```typescript
310
+ * import { z } from 'zod';
311
+ *
312
+ * const Joke = z.object({
313
+ * setup: z.string().describe("The setup of the joke"),
314
+ * punchline: z.string().describe("The punchline to the joke"),
315
+ * rating: z.number().optional().describe("How funny the joke is, from 1 to 10")
316
+ * }).describe('Joke to tell user.');
317
+ *
318
+ * const structuredLlm = llm.withStructuredOutput(Joke);
319
+ * const jokeResult = await structuredLlm.invoke("Tell me a joke about cats");
320
+ * console.log(jokeResult);
321
+ * ```
322
+ *
323
+ * ```txt
324
+ * {
325
+ * setup: "Why don't cats play poker in the jungle?",
326
+ * punchline: 'Too many cheetahs!',
327
+ * rating: 7
328
+ * }
329
+ * ```
330
+ * </details>
331
+ *
332
+ * <br />
333
+ *
334
+ * <details>
335
+ * <summary><strong>Multimodal</strong></summary>
336
+ *
337
+ * ```typescript
338
+ * import { HumanMessage } from '@langchain/core/messages';
339
+ *
340
+ * const imageUrl = "https://example.com/image.jpg";
341
+ * const imageData = await fetch(imageUrl).then(res => res.arrayBuffer());
342
+ * const base64Image = Buffer.from(imageData).toString('base64');
343
+ *
344
+ * const message = new HumanMessage({
345
+ * content: [
346
+ * { type: "text", text: "describe the weather in this image" },
347
+ * {
348
+ * type: "image_url",
349
+ * image_url: { url: `data:image/jpeg;base64,${base64Image}` },
350
+ * },
351
+ * ]
352
+ * });
353
+ *
354
+ * const imageDescriptionAiMsg = await llm.invoke([message]);
355
+ * console.log(imageDescriptionAiMsg.content);
356
+ * ```
357
+ *
358
+ * ```txt
359
+ * The weather in this image appears to be beautiful and clear. The sky is a vibrant blue with scattered white clouds, suggesting a sunny and pleasant day. The clouds are wispy and light, indicating calm conditions without any signs of storms or heavy weather. The bright green grass on the rolling hills looks lush and well-watered, which could mean recent rainfall or good growing conditions. Overall, the scene depicts a perfect spring or early summer day with mild temperatures, plenty of sunshine, and gentle breezes - ideal weather for enjoying the outdoors or for plant growth.
360
+ * ```
361
+ * </details>
362
+ *
363
+ * <br />
364
+ *
365
+ * <details>
366
+ * <summary><strong>Usage Metadata</strong></summary>
367
+ *
368
+ * ```typescript
369
+ * const aiMsgForMetadata = await llm.invoke(messages);
370
+ * console.log(aiMsgForMetadata.usage_metadata);
371
+ * ```
372
+ *
373
+ * ```txt
374
+ * { input_tokens: 25, output_tokens: 19, total_tokens: 44 }
375
+ * ```
376
+ * </details>
377
+ *
378
+ * <br />
379
+ *
380
+ * <details>
381
+ * <summary><strong>Stream Usage Metadata</strong></summary>
382
+ *
383
+ * ```typescript
384
+ * const streamForMetadata = await llm.stream(
385
+ * messages,
386
+ * {
387
+ * streamUsage: true
388
+ * }
389
+ * );
390
+ * let fullForMetadata: AIMessageChunk | undefined;
391
+ * for await (const chunk of streamForMetadata) {
392
+ * fullForMetadata = !fullForMetadata ? chunk : concat(fullForMetadata, chunk);
393
+ * }
394
+ * console.log(fullForMetadata?.usage_metadata);
395
+ * ```
396
+ *
397
+ * ```txt
398
+ * { input_tokens: 25, output_tokens: 20, total_tokens: 45 }
399
+ * ```
400
+ * </details>
401
+ *
402
+ * <br />
403
+ *
404
+ * <details>
405
+ * <summary><strong>Response Metadata</strong></summary>
406
+ *
407
+ * ```typescript
408
+ * const aiMsgForResponseMetadata = await llm.invoke(messages);
409
+ * console.log(aiMsgForResponseMetadata.response_metadata);
410
+ * ```
411
+ *
412
+ * ```txt
413
+ * {
414
+ * id: 'msg_01STxeQxJmp4sCSpioD6vK3L',
415
+ * model: 'claude-3-5-sonnet-20240620',
416
+ * stop_reason: 'end_turn',
417
+ * stop_sequence: null,
418
+ * usage: { input_tokens: 25, output_tokens: 19 },
419
+ * type: 'message',
420
+ * role: 'assistant'
421
+ * }
422
+ * ```
423
+ * </details>
424
+ *
425
+ * <br />
66
426
  */
67
427
  class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
68
428
  static lc_name() {
@@ -349,20 +709,11 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
349
709
  }
350
710
  /** @ignore */
351
711
  async _generateNonStreaming(messages, params, requestOptions) {
352
- const options = params.tools !== undefined
353
- ? {
354
- ...requestOptions,
355
- headers: {
356
- ...requestOptions.headers,
357
- "anthropic-beta": "tools-2024-04-04",
358
- },
359
- }
360
- : requestOptions;
361
712
  const response = await this.completionWithRetry({
362
713
  ...params,
363
714
  stream: false,
364
715
  ...(0, message_inputs_js_1._formatMessagesForAnthropic)(messages),
365
- }, options);
716
+ }, requestOptions);
366
717
  const { content, ...additionalKwargs } = response;
367
718
  const generations = (0, message_outputs_js_1.anthropicResponseToChatMessages)(content, additionalKwargs);
368
719
  // eslint-disable-next-line @typescript-eslint/no-unused-vars
@@ -89,28 +89,388 @@ export interface AnthropicInput {
89
89
  */
90
90
  type Kwargs = Record<string, any>;
91
91
  /**
92
- * Wrapper around Anthropic large language models.
93
- *
94
- * To use this package, you should have an Anthropic API key set as an
95
- * environment variable named `ANTHROPIC_API_KEY` or passed
96
- * into the constructor.
97
- *
98
- * @remarks
99
- * Any parameters that are valid to be passed to {@link
100
- * https://console.anthropic.com/docs/api/reference |
101
- * `anthropic.messages`} can be passed through {@link invocationKwargs},
102
- * even if not explicitly available on this class.
103
- * @example
92
+ * Anthropic chat model integration.
93
+ *
94
+ * Setup:
95
+ * Install `@langchain/anthropic` and set environment variable `ANTHROPIC_API_KEY`.
96
+ *
97
+ * ```bash
98
+ * npm install @langchain/anthropic
99
+ * export ANTHROPIC_API_KEY="your-api-key"
100
+ * ```
101
+ *
102
+ * ## [Constructor args](/classes/langchain_anthropic.ChatAnthropic.html#constructor)
103
+ *
104
+ * ## [Runtime args](/interfaces/langchain_anthropic.ChatAnthropicCallOptions.html)
105
+ *
106
+ * Runtime args can be passed as the second argument to any of the base runnable methods `.invoke`. `.stream`, `.batch`, etc.
107
+ * They can also be passed via `.bind`, or the second arg in `.bindTools`, like shown in the examples below:
108
+ *
109
+ * ```typescript
110
+ * // When calling `.bind`, call options should be passed via the first argument
111
+ * const llmWithArgsBound = llm.bind({
112
+ * stop: ["\n"],
113
+ * tools: [...],
114
+ * });
115
+ *
116
+ * // When calling `.bindTools`, call options should be passed via the second argument
117
+ * const llmWithTools = llm.bindTools(
118
+ * [...],
119
+ * {
120
+ * tool_choice: "auto",
121
+ * }
122
+ * );
123
+ * ```
124
+ *
125
+ * ## Examples
126
+ *
127
+ * <details open>
128
+ * <summary><strong>Instantiate</strong></summary>
129
+ *
130
+ * ```typescript
131
+ * import { ChatAnthropic } from '@langchain/anthropic';
132
+ *
133
+ * const llm = new ChatAnthropic({
134
+ * model: "claude-3-5-sonnet-20240620",
135
+ * temperature: 0,
136
+ * maxTokens: undefined,
137
+ * maxRetries: 2,
138
+ * // apiKey: "...",
139
+ * // baseUrl: "...",
140
+ * // other params...
141
+ * });
142
+ * ```
143
+ * </details>
144
+ *
145
+ * <br />
146
+ *
147
+ * <details>
148
+ * <summary><strong>Invoking</strong></summary>
149
+ *
150
+ * ```typescript
151
+ * const messages = [
152
+ * {
153
+ * type: "system" as const,
154
+ * content: "You are a helpful translator. Translate the user sentence to French.",
155
+ * },
156
+ * {
157
+ * type: "human" as const,
158
+ * content: "I love programming.",
159
+ * },
160
+ * ];
161
+ * const result = await llm.invoke(messages);
162
+ * console.log(result);
163
+ * ```
164
+ *
165
+ * ```txt
166
+ * AIMessage {
167
+ * "id": "msg_01QDpd78JUHpRP6bRRNyzbW3",
168
+ * "content": "Here's the translation to French:\n\nJ'adore la programmation.",
169
+ * "response_metadata": {
170
+ * "id": "msg_01QDpd78JUHpRP6bRRNyzbW3",
171
+ * "model": "claude-3-5-sonnet-20240620",
172
+ * "stop_reason": "end_turn",
173
+ * "stop_sequence": null,
174
+ * "usage": {
175
+ * "input_tokens": 25,
176
+ * "output_tokens": 19
177
+ * },
178
+ * "type": "message",
179
+ * "role": "assistant"
180
+ * },
181
+ * "usage_metadata": {
182
+ * "input_tokens": 25,
183
+ * "output_tokens": 19,
184
+ * "total_tokens": 44
185
+ * }
186
+ * }
187
+ * ```
188
+ * </details>
189
+ *
190
+ * <br />
191
+ *
192
+ * <details>
193
+ * <summary><strong>Streaming Chunks</strong></summary>
194
+ *
195
+ * ```typescript
196
+ * for await (const chunk of await llm.stream(messages)) {
197
+ * console.log(chunk);
198
+ * }
199
+ * ```
200
+ *
201
+ * ```txt
202
+ * AIMessageChunk {
203
+ * "id": "msg_01N8MwoYxiKo9w4chE4gXUs4",
204
+ * "content": "",
205
+ * "additional_kwargs": {
206
+ * "id": "msg_01N8MwoYxiKo9w4chE4gXUs4",
207
+ * "type": "message",
208
+ * "role": "assistant",
209
+ * "model": "claude-3-5-sonnet-20240620"
210
+ * },
211
+ * "usage_metadata": {
212
+ * "input_tokens": 25,
213
+ * "output_tokens": 1,
214
+ * "total_tokens": 26
215
+ * }
216
+ * }
217
+ * AIMessageChunk {
218
+ * "content": "",
219
+ * }
220
+ * AIMessageChunk {
221
+ * "content": "Here",
222
+ * }
223
+ * AIMessageChunk {
224
+ * "content": "'s",
225
+ * }
226
+ * AIMessageChunk {
227
+ * "content": " the translation to",
228
+ * }
229
+ * AIMessageChunk {
230
+ * "content": " French:\n\nJ",
231
+ * }
232
+ * AIMessageChunk {
233
+ * "content": "'adore la programmation",
234
+ * }
235
+ * AIMessageChunk {
236
+ * "content": ".",
237
+ * }
238
+ * AIMessageChunk {
239
+ * "content": "",
240
+ * "additional_kwargs": {
241
+ * "stop_reason": "end_turn",
242
+ * "stop_sequence": null
243
+ * },
244
+ * "usage_metadata": {
245
+ * "input_tokens": 0,
246
+ * "output_tokens": 19,
247
+ * "total_tokens": 19
248
+ * }
249
+ * }
250
+ * ```
251
+ * </details>
252
+ *
253
+ * <br />
254
+ *
255
+ * <details>
256
+ * <summary><strong>Aggregate Streamed Chunks</strong></summary>
257
+ *
258
+ * ```typescript
259
+ * import { AIMessageChunk } from '@langchain/core/messages';
260
+ * import { concat } from '@langchain/core/utils/stream';
261
+ *
262
+ * const stream = await llm.stream(messages);
263
+ * let full: AIMessageChunk | undefined;
264
+ * for await (const chunk of stream) {
265
+ * full = !full ? chunk : concat(full, chunk);
266
+ * }
267
+ * console.log(full);
268
+ * ```
269
+ *
270
+ * ```txt
271
+ * AIMessageChunk {
272
+ * "id": "msg_01SBTb5zSGXfjUc7yQ8EKEEA",
273
+ * "content": "Here's the translation to French:\n\nJ'adore la programmation.",
274
+ * "additional_kwargs": {
275
+ * "id": "msg_01SBTb5zSGXfjUc7yQ8EKEEA",
276
+ * "type": "message",
277
+ * "role": "assistant",
278
+ * "model": "claude-3-5-sonnet-20240620",
279
+ * "stop_reason": "end_turn",
280
+ * "stop_sequence": null
281
+ * },
282
+ * "usage_metadata": {
283
+ * "input_tokens": 25,
284
+ * "output_tokens": 20,
285
+ * "total_tokens": 45
286
+ * }
287
+ * }
288
+ * ```
289
+ * </details>
290
+ *
291
+ * <br />
292
+ *
293
+ * <details>
294
+ * <summary><strong>Bind tools</strong></summary>
295
+ *
296
+ * ```typescript
297
+ * import { z } from 'zod';
298
+ *
299
+ * const GetWeather = {
300
+ * name: "GetWeather",
301
+ * description: "Get the current weather in a given location",
302
+ * schema: z.object({
303
+ * location: z.string().describe("The city and state, e.g. San Francisco, CA")
304
+ * }),
305
+ * }
306
+ *
307
+ * const GetPopulation = {
308
+ * name: "GetPopulation",
309
+ * description: "Get the current population in a given location",
310
+ * schema: z.object({
311
+ * location: z.string().describe("The city and state, e.g. San Francisco, CA")
312
+ * }),
313
+ * }
314
+ *
315
+ * const llmWithTools = llm.bindTools([GetWeather, GetPopulation]);
316
+ * const aiMsg = await llmWithTools.invoke(
317
+ * "Which city is hotter today and which is bigger: LA or NY?"
318
+ * );
319
+ * console.log(aiMsg.tool_calls);
320
+ * ```
321
+ *
322
+ * ```txt
323
+ * [
324
+ * {
325
+ * name: 'GetWeather',
326
+ * args: { location: 'Los Angeles, CA' },
327
+ * id: 'toolu_01WjW3Dann6BPJVtLhovdBD5',
328
+ * type: 'tool_call'
329
+ * },
330
+ * {
331
+ * name: 'GetWeather',
332
+ * args: { location: 'New York, NY' },
333
+ * id: 'toolu_01G6wfJgqi5zRmJomsmkyZXe',
334
+ * type: 'tool_call'
335
+ * },
336
+ * {
337
+ * name: 'GetPopulation',
338
+ * args: { location: 'Los Angeles, CA' },
339
+ * id: 'toolu_0165qYWBA2VFyUst5RA18zew',
340
+ * type: 'tool_call'
341
+ * },
342
+ * {
343
+ * name: 'GetPopulation',
344
+ * args: { location: 'New York, NY' },
345
+ * id: 'toolu_01PGNyP33vxr13tGqr7i3rDo',
346
+ * type: 'tool_call'
347
+ * }
348
+ * ]
349
+ * ```
350
+ * </details>
351
+ *
352
+ * <br />
353
+ *
354
+ * <details>
355
+ * <summary><strong>Structured Output</strong></summary>
356
+ *
357
+ * ```typescript
358
+ * import { z } from 'zod';
359
+ *
360
+ * const Joke = z.object({
361
+ * setup: z.string().describe("The setup of the joke"),
362
+ * punchline: z.string().describe("The punchline to the joke"),
363
+ * rating: z.number().optional().describe("How funny the joke is, from 1 to 10")
364
+ * }).describe('Joke to tell user.');
365
+ *
366
+ * const structuredLlm = llm.withStructuredOutput(Joke);
367
+ * const jokeResult = await structuredLlm.invoke("Tell me a joke about cats");
368
+ * console.log(jokeResult);
369
+ * ```
370
+ *
371
+ * ```txt
372
+ * {
373
+ * setup: "Why don't cats play poker in the jungle?",
374
+ * punchline: 'Too many cheetahs!',
375
+ * rating: 7
376
+ * }
377
+ * ```
378
+ * </details>
379
+ *
380
+ * <br />
381
+ *
382
+ * <details>
383
+ * <summary><strong>Multimodal</strong></summary>
384
+ *
104
385
  * ```typescript
105
- * import { ChatAnthropic } from "@langchain/anthropic";
386
+ * import { HumanMessage } from '@langchain/core/messages';
387
+ *
388
+ * const imageUrl = "https://example.com/image.jpg";
389
+ * const imageData = await fetch(imageUrl).then(res => res.arrayBuffer());
390
+ * const base64Image = Buffer.from(imageData).toString('base64');
106
391
  *
107
- * const model = new ChatAnthropic({
108
- * temperature: 0.9,
109
- * apiKey: 'YOUR-API-KEY',
392
+ * const message = new HumanMessage({
393
+ * content: [
394
+ * { type: "text", text: "describe the weather in this image" },
395
+ * {
396
+ * type: "image_url",
397
+ * image_url: { url: `data:image/jpeg;base64,${base64Image}` },
398
+ * },
399
+ * ]
110
400
  * });
111
- * const res = await model.invoke({ input: 'Hello!' });
112
- * console.log(res);
401
+ *
402
+ * const imageDescriptionAiMsg = await llm.invoke([message]);
403
+ * console.log(imageDescriptionAiMsg.content);
404
+ * ```
405
+ *
406
+ * ```txt
407
+ * The weather in this image appears to be beautiful and clear. The sky is a vibrant blue with scattered white clouds, suggesting a sunny and pleasant day. The clouds are wispy and light, indicating calm conditions without any signs of storms or heavy weather. The bright green grass on the rolling hills looks lush and well-watered, which could mean recent rainfall or good growing conditions. Overall, the scene depicts a perfect spring or early summer day with mild temperatures, plenty of sunshine, and gentle breezes - ideal weather for enjoying the outdoors or for plant growth.
113
408
  * ```
409
+ * </details>
410
+ *
411
+ * <br />
412
+ *
413
+ * <details>
414
+ * <summary><strong>Usage Metadata</strong></summary>
415
+ *
416
+ * ```typescript
417
+ * const aiMsgForMetadata = await llm.invoke(messages);
418
+ * console.log(aiMsgForMetadata.usage_metadata);
419
+ * ```
420
+ *
421
+ * ```txt
422
+ * { input_tokens: 25, output_tokens: 19, total_tokens: 44 }
423
+ * ```
424
+ * </details>
425
+ *
426
+ * <br />
427
+ *
428
+ * <details>
429
+ * <summary><strong>Stream Usage Metadata</strong></summary>
430
+ *
431
+ * ```typescript
432
+ * const streamForMetadata = await llm.stream(
433
+ * messages,
434
+ * {
435
+ * streamUsage: true
436
+ * }
437
+ * );
438
+ * let fullForMetadata: AIMessageChunk | undefined;
439
+ * for await (const chunk of streamForMetadata) {
440
+ * fullForMetadata = !fullForMetadata ? chunk : concat(fullForMetadata, chunk);
441
+ * }
442
+ * console.log(fullForMetadata?.usage_metadata);
443
+ * ```
444
+ *
445
+ * ```txt
446
+ * { input_tokens: 25, output_tokens: 20, total_tokens: 45 }
447
+ * ```
448
+ * </details>
449
+ *
450
+ * <br />
451
+ *
452
+ * <details>
453
+ * <summary><strong>Response Metadata</strong></summary>
454
+ *
455
+ * ```typescript
456
+ * const aiMsgForResponseMetadata = await llm.invoke(messages);
457
+ * console.log(aiMsgForResponseMetadata.response_metadata);
458
+ * ```
459
+ *
460
+ * ```txt
461
+ * {
462
+ * id: 'msg_01STxeQxJmp4sCSpioD6vK3L',
463
+ * model: 'claude-3-5-sonnet-20240620',
464
+ * stop_reason: 'end_turn',
465
+ * stop_sequence: null,
466
+ * usage: { input_tokens: 25, output_tokens: 19 },
467
+ * type: 'message',
468
+ * role: 'assistant'
469
+ * }
470
+ * ```
471
+ * </details>
472
+ *
473
+ * <br />
114
474
  */
115
475
  export declare class ChatAnthropicMessages<CallOptions extends ChatAnthropicCallOptions = ChatAnthropicCallOptions> extends BaseChatModel<CallOptions, AIMessageChunk> implements AnthropicInput {
116
476
  static lc_name(): string;
@@ -38,28 +38,388 @@ function extractToken(chunk) {
38
38
  return undefined;
39
39
  }
40
40
  /**
41
- * Wrapper around Anthropic large language models.
42
- *
43
- * To use this package, you should have an Anthropic API key set as an
44
- * environment variable named `ANTHROPIC_API_KEY` or passed
45
- * into the constructor.
46
- *
47
- * @remarks
48
- * Any parameters that are valid to be passed to {@link
49
- * https://console.anthropic.com/docs/api/reference |
50
- * `anthropic.messages`} can be passed through {@link invocationKwargs},
51
- * even if not explicitly available on this class.
52
- * @example
41
+ * Anthropic chat model integration.
42
+ *
43
+ * Setup:
44
+ * Install `@langchain/anthropic` and set environment variable `ANTHROPIC_API_KEY`.
45
+ *
46
+ * ```bash
47
+ * npm install @langchain/anthropic
48
+ * export ANTHROPIC_API_KEY="your-api-key"
49
+ * ```
50
+ *
51
+ * ## [Constructor args](/classes/langchain_anthropic.ChatAnthropic.html#constructor)
52
+ *
53
+ * ## [Runtime args](/interfaces/langchain_anthropic.ChatAnthropicCallOptions.html)
54
+ *
55
+ * Runtime args can be passed as the second argument to any of the base runnable methods `.invoke`. `.stream`, `.batch`, etc.
56
+ * They can also be passed via `.bind`, or the second arg in `.bindTools`, like shown in the examples below:
57
+ *
53
58
  * ```typescript
54
- * import { ChatAnthropic } from "@langchain/anthropic";
59
+ * // When calling `.bind`, call options should be passed via the first argument
60
+ * const llmWithArgsBound = llm.bind({
61
+ * stop: ["\n"],
62
+ * tools: [...],
63
+ * });
64
+ *
65
+ * // When calling `.bindTools`, call options should be passed via the second argument
66
+ * const llmWithTools = llm.bindTools(
67
+ * [...],
68
+ * {
69
+ * tool_choice: "auto",
70
+ * }
71
+ * );
72
+ * ```
73
+ *
74
+ * ## Examples
75
+ *
76
+ * <details open>
77
+ * <summary><strong>Instantiate</strong></summary>
55
78
  *
56
- * const model = new ChatAnthropic({
57
- * temperature: 0.9,
58
- * apiKey: 'YOUR-API-KEY',
79
+ * ```typescript
80
+ * import { ChatAnthropic } from '@langchain/anthropic';
81
+ *
82
+ * const llm = new ChatAnthropic({
83
+ * model: "claude-3-5-sonnet-20240620",
84
+ * temperature: 0,
85
+ * maxTokens: undefined,
86
+ * maxRetries: 2,
87
+ * // apiKey: "...",
88
+ * // baseUrl: "...",
89
+ * // other params...
59
90
  * });
60
- * const res = await model.invoke({ input: 'Hello!' });
61
- * console.log(res);
62
91
  * ```
92
+ * </details>
93
+ *
94
+ * <br />
95
+ *
96
+ * <details>
97
+ * <summary><strong>Invoking</strong></summary>
98
+ *
99
+ * ```typescript
100
+ * const messages = [
101
+ * {
102
+ * type: "system" as const,
103
+ * content: "You are a helpful translator. Translate the user sentence to French.",
104
+ * },
105
+ * {
106
+ * type: "human" as const,
107
+ * content: "I love programming.",
108
+ * },
109
+ * ];
110
+ * const result = await llm.invoke(messages);
111
+ * console.log(result);
112
+ * ```
113
+ *
114
+ * ```txt
115
+ * AIMessage {
116
+ * "id": "msg_01QDpd78JUHpRP6bRRNyzbW3",
117
+ * "content": "Here's the translation to French:\n\nJ'adore la programmation.",
118
+ * "response_metadata": {
119
+ * "id": "msg_01QDpd78JUHpRP6bRRNyzbW3",
120
+ * "model": "claude-3-5-sonnet-20240620",
121
+ * "stop_reason": "end_turn",
122
+ * "stop_sequence": null,
123
+ * "usage": {
124
+ * "input_tokens": 25,
125
+ * "output_tokens": 19
126
+ * },
127
+ * "type": "message",
128
+ * "role": "assistant"
129
+ * },
130
+ * "usage_metadata": {
131
+ * "input_tokens": 25,
132
+ * "output_tokens": 19,
133
+ * "total_tokens": 44
134
+ * }
135
+ * }
136
+ * ```
137
+ * </details>
138
+ *
139
+ * <br />
140
+ *
141
+ * <details>
142
+ * <summary><strong>Streaming Chunks</strong></summary>
143
+ *
144
+ * ```typescript
145
+ * for await (const chunk of await llm.stream(messages)) {
146
+ * console.log(chunk);
147
+ * }
148
+ * ```
149
+ *
150
+ * ```txt
151
+ * AIMessageChunk {
152
+ * "id": "msg_01N8MwoYxiKo9w4chE4gXUs4",
153
+ * "content": "",
154
+ * "additional_kwargs": {
155
+ * "id": "msg_01N8MwoYxiKo9w4chE4gXUs4",
156
+ * "type": "message",
157
+ * "role": "assistant",
158
+ * "model": "claude-3-5-sonnet-20240620"
159
+ * },
160
+ * "usage_metadata": {
161
+ * "input_tokens": 25,
162
+ * "output_tokens": 1,
163
+ * "total_tokens": 26
164
+ * }
165
+ * }
166
+ * AIMessageChunk {
167
+ * "content": "",
168
+ * }
169
+ * AIMessageChunk {
170
+ * "content": "Here",
171
+ * }
172
+ * AIMessageChunk {
173
+ * "content": "'s",
174
+ * }
175
+ * AIMessageChunk {
176
+ * "content": " the translation to",
177
+ * }
178
+ * AIMessageChunk {
179
+ * "content": " French:\n\nJ",
180
+ * }
181
+ * AIMessageChunk {
182
+ * "content": "'adore la programmation",
183
+ * }
184
+ * AIMessageChunk {
185
+ * "content": ".",
186
+ * }
187
+ * AIMessageChunk {
188
+ * "content": "",
189
+ * "additional_kwargs": {
190
+ * "stop_reason": "end_turn",
191
+ * "stop_sequence": null
192
+ * },
193
+ * "usage_metadata": {
194
+ * "input_tokens": 0,
195
+ * "output_tokens": 19,
196
+ * "total_tokens": 19
197
+ * }
198
+ * }
199
+ * ```
200
+ * </details>
201
+ *
202
+ * <br />
203
+ *
204
+ * <details>
205
+ * <summary><strong>Aggregate Streamed Chunks</strong></summary>
206
+ *
207
+ * ```typescript
208
+ * import { AIMessageChunk } from '@langchain/core/messages';
209
+ * import { concat } from '@langchain/core/utils/stream';
210
+ *
211
+ * const stream = await llm.stream(messages);
212
+ * let full: AIMessageChunk | undefined;
213
+ * for await (const chunk of stream) {
214
+ * full = !full ? chunk : concat(full, chunk);
215
+ * }
216
+ * console.log(full);
217
+ * ```
218
+ *
219
+ * ```txt
220
+ * AIMessageChunk {
221
+ * "id": "msg_01SBTb5zSGXfjUc7yQ8EKEEA",
222
+ * "content": "Here's the translation to French:\n\nJ'adore la programmation.",
223
+ * "additional_kwargs": {
224
+ * "id": "msg_01SBTb5zSGXfjUc7yQ8EKEEA",
225
+ * "type": "message",
226
+ * "role": "assistant",
227
+ * "model": "claude-3-5-sonnet-20240620",
228
+ * "stop_reason": "end_turn",
229
+ * "stop_sequence": null
230
+ * },
231
+ * "usage_metadata": {
232
+ * "input_tokens": 25,
233
+ * "output_tokens": 20,
234
+ * "total_tokens": 45
235
+ * }
236
+ * }
237
+ * ```
238
+ * </details>
239
+ *
240
+ * <br />
241
+ *
242
+ * <details>
243
+ * <summary><strong>Bind tools</strong></summary>
244
+ *
245
+ * ```typescript
246
+ * import { z } from 'zod';
247
+ *
248
+ * const GetWeather = {
249
+ * name: "GetWeather",
250
+ * description: "Get the current weather in a given location",
251
+ * schema: z.object({
252
+ * location: z.string().describe("The city and state, e.g. San Francisco, CA")
253
+ * }),
254
+ * }
255
+ *
256
+ * const GetPopulation = {
257
+ * name: "GetPopulation",
258
+ * description: "Get the current population in a given location",
259
+ * schema: z.object({
260
+ * location: z.string().describe("The city and state, e.g. San Francisco, CA")
261
+ * }),
262
+ * }
263
+ *
264
+ * const llmWithTools = llm.bindTools([GetWeather, GetPopulation]);
265
+ * const aiMsg = await llmWithTools.invoke(
266
+ * "Which city is hotter today and which is bigger: LA or NY?"
267
+ * );
268
+ * console.log(aiMsg.tool_calls);
269
+ * ```
270
+ *
271
+ * ```txt
272
+ * [
273
+ * {
274
+ * name: 'GetWeather',
275
+ * args: { location: 'Los Angeles, CA' },
276
+ * id: 'toolu_01WjW3Dann6BPJVtLhovdBD5',
277
+ * type: 'tool_call'
278
+ * },
279
+ * {
280
+ * name: 'GetWeather',
281
+ * args: { location: 'New York, NY' },
282
+ * id: 'toolu_01G6wfJgqi5zRmJomsmkyZXe',
283
+ * type: 'tool_call'
284
+ * },
285
+ * {
286
+ * name: 'GetPopulation',
287
+ * args: { location: 'Los Angeles, CA' },
288
+ * id: 'toolu_0165qYWBA2VFyUst5RA18zew',
289
+ * type: 'tool_call'
290
+ * },
291
+ * {
292
+ * name: 'GetPopulation',
293
+ * args: { location: 'New York, NY' },
294
+ * id: 'toolu_01PGNyP33vxr13tGqr7i3rDo',
295
+ * type: 'tool_call'
296
+ * }
297
+ * ]
298
+ * ```
299
+ * </details>
300
+ *
301
+ * <br />
302
+ *
303
+ * <details>
304
+ * <summary><strong>Structured Output</strong></summary>
305
+ *
306
+ * ```typescript
307
+ * import { z } from 'zod';
308
+ *
309
+ * const Joke = z.object({
310
+ * setup: z.string().describe("The setup of the joke"),
311
+ * punchline: z.string().describe("The punchline to the joke"),
312
+ * rating: z.number().optional().describe("How funny the joke is, from 1 to 10")
313
+ * }).describe('Joke to tell user.');
314
+ *
315
+ * const structuredLlm = llm.withStructuredOutput(Joke);
316
+ * const jokeResult = await structuredLlm.invoke("Tell me a joke about cats");
317
+ * console.log(jokeResult);
318
+ * ```
319
+ *
320
+ * ```txt
321
+ * {
322
+ * setup: "Why don't cats play poker in the jungle?",
323
+ * punchline: 'Too many cheetahs!',
324
+ * rating: 7
325
+ * }
326
+ * ```
327
+ * </details>
328
+ *
329
+ * <br />
330
+ *
331
+ * <details>
332
+ * <summary><strong>Multimodal</strong></summary>
333
+ *
334
+ * ```typescript
335
+ * import { HumanMessage } from '@langchain/core/messages';
336
+ *
337
+ * const imageUrl = "https://example.com/image.jpg";
338
+ * const imageData = await fetch(imageUrl).then(res => res.arrayBuffer());
339
+ * const base64Image = Buffer.from(imageData).toString('base64');
340
+ *
341
+ * const message = new HumanMessage({
342
+ * content: [
343
+ * { type: "text", text: "describe the weather in this image" },
344
+ * {
345
+ * type: "image_url",
346
+ * image_url: { url: `data:image/jpeg;base64,${base64Image}` },
347
+ * },
348
+ * ]
349
+ * });
350
+ *
351
+ * const imageDescriptionAiMsg = await llm.invoke([message]);
352
+ * console.log(imageDescriptionAiMsg.content);
353
+ * ```
354
+ *
355
+ * ```txt
356
+ * The weather in this image appears to be beautiful and clear. The sky is a vibrant blue with scattered white clouds, suggesting a sunny and pleasant day. The clouds are wispy and light, indicating calm conditions without any signs of storms or heavy weather. The bright green grass on the rolling hills looks lush and well-watered, which could mean recent rainfall or good growing conditions. Overall, the scene depicts a perfect spring or early summer day with mild temperatures, plenty of sunshine, and gentle breezes - ideal weather for enjoying the outdoors or for plant growth.
357
+ * ```
358
+ * </details>
359
+ *
360
+ * <br />
361
+ *
362
+ * <details>
363
+ * <summary><strong>Usage Metadata</strong></summary>
364
+ *
365
+ * ```typescript
366
+ * const aiMsgForMetadata = await llm.invoke(messages);
367
+ * console.log(aiMsgForMetadata.usage_metadata);
368
+ * ```
369
+ *
370
+ * ```txt
371
+ * { input_tokens: 25, output_tokens: 19, total_tokens: 44 }
372
+ * ```
373
+ * </details>
374
+ *
375
+ * <br />
376
+ *
377
+ * <details>
378
+ * <summary><strong>Stream Usage Metadata</strong></summary>
379
+ *
380
+ * ```typescript
381
+ * const streamForMetadata = await llm.stream(
382
+ * messages,
383
+ * {
384
+ * streamUsage: true
385
+ * }
386
+ * );
387
+ * let fullForMetadata: AIMessageChunk | undefined;
388
+ * for await (const chunk of streamForMetadata) {
389
+ * fullForMetadata = !fullForMetadata ? chunk : concat(fullForMetadata, chunk);
390
+ * }
391
+ * console.log(fullForMetadata?.usage_metadata);
392
+ * ```
393
+ *
394
+ * ```txt
395
+ * { input_tokens: 25, output_tokens: 20, total_tokens: 45 }
396
+ * ```
397
+ * </details>
398
+ *
399
+ * <br />
400
+ *
401
+ * <details>
402
+ * <summary><strong>Response Metadata</strong></summary>
403
+ *
404
+ * ```typescript
405
+ * const aiMsgForResponseMetadata = await llm.invoke(messages);
406
+ * console.log(aiMsgForResponseMetadata.response_metadata);
407
+ * ```
408
+ *
409
+ * ```txt
410
+ * {
411
+ * id: 'msg_01STxeQxJmp4sCSpioD6vK3L',
412
+ * model: 'claude-3-5-sonnet-20240620',
413
+ * stop_reason: 'end_turn',
414
+ * stop_sequence: null,
415
+ * usage: { input_tokens: 25, output_tokens: 19 },
416
+ * type: 'message',
417
+ * role: 'assistant'
418
+ * }
419
+ * ```
420
+ * </details>
421
+ *
422
+ * <br />
63
423
  */
64
424
  export class ChatAnthropicMessages extends BaseChatModel {
65
425
  static lc_name() {
@@ -346,20 +706,11 @@ export class ChatAnthropicMessages extends BaseChatModel {
346
706
  }
347
707
  /** @ignore */
348
708
  async _generateNonStreaming(messages, params, requestOptions) {
349
- const options = params.tools !== undefined
350
- ? {
351
- ...requestOptions,
352
- headers: {
353
- ...requestOptions.headers,
354
- "anthropic-beta": "tools-2024-04-04",
355
- },
356
- }
357
- : requestOptions;
358
709
  const response = await this.completionWithRetry({
359
710
  ...params,
360
711
  stream: false,
361
712
  ..._formatMessagesForAnthropic(messages),
362
- }, options);
713
+ }, requestOptions);
363
714
  const { content, ...additionalKwargs } = response;
364
715
  const generations = anthropicResponseToChatMessages(content, additionalKwargs);
365
716
  // eslint-disable-next-line @typescript-eslint/no-unused-vars
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/anthropic",
3
- "version": "0.2.13",
3
+ "version": "0.2.14",
4
4
  "description": "Anthropic integrations for LangChain.js",
5
5
  "type": "module",
6
6
  "engines": {