@langchain/anthropic 0.1.19 → 0.1.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -391,6 +391,17 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
391
391
  this.streaming = fields?.streaming ?? false;
392
392
  this.clientOptions = fields?.clientOptions ?? {};
393
393
  }
394
+ getLsParams(options) {
395
+ const params = this.invocationParams(options);
396
+ return {
397
+ ls_provider: "openai",
398
+ ls_model_name: this.model,
399
+ ls_model_type: "chat",
400
+ ls_temperature: params.temperature ?? undefined,
401
+ ls_max_tokens: params.max_tokens ?? undefined,
402
+ ls_stop: options.stop,
403
+ };
404
+ }
394
405
  /**
395
406
  * Formats LangChain StructuredTools to AnthropicTools.
396
407
  *
@@ -3,7 +3,7 @@ import type { Stream } from "@anthropic-ai/sdk/streaming";
3
3
  import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
4
4
  import { AIMessageChunk, type BaseMessage } from "@langchain/core/messages";
5
5
  import { ChatGeneration, ChatGenerationChunk, type ChatResult } from "@langchain/core/outputs";
6
- import { BaseChatModel, type BaseChatModelParams } from "@langchain/core/language_models/chat_models";
6
+ import { BaseChatModel, LangSmithParams, type BaseChatModelParams } from "@langchain/core/language_models/chat_models";
7
7
  import { StructuredOutputMethodOptions, type BaseLanguageModelCallOptions, BaseLanguageModelInput } from "@langchain/core/language_models/base";
8
8
  import { StructuredToolInterface } from "@langchain/core/tools";
9
9
  import { Runnable, RunnableInterface } from "@langchain/core/runnables";
@@ -142,6 +142,7 @@ export declare class ChatAnthropicMessages<CallOptions extends ChatAnthropicCall
142
142
  protected batchClient: Anthropic;
143
143
  protected streamingClient: Anthropic;
144
144
  constructor(fields?: Partial<AnthropicInput> & BaseChatModelParams);
145
+ protected getLsParams(options: this["ParsedCallOptions"]): LangSmithParams;
145
146
  /**
146
147
  * Formats LangChain StructuredTools to AnthropicTools.
147
148
  *
@@ -192,10 +193,7 @@ export declare class ChatAnthropicMessages<CallOptions extends ChatAnthropicCall
192
193
  model: string;
193
194
  stop_reason: "max_tokens" | "stop_sequence" | "end_turn" | null;
194
195
  stop_sequence: string | null;
195
- usage: Anthropic.Messages.Usage; /** A list of strings upon which to stop generating.
196
- * You probably want `["\n\nHuman:"]`, as that's the cue for
197
- * the next turn in the dialog agent.
198
- */
196
+ usage: Anthropic.Messages.Usage;
199
197
  };
200
198
  }>;
201
199
  /** @ignore */
@@ -387,6 +387,17 @@ export class ChatAnthropicMessages extends BaseChatModel {
387
387
  this.streaming = fields?.streaming ?? false;
388
388
  this.clientOptions = fields?.clientOptions ?? {};
389
389
  }
390
+ getLsParams(options) {
391
+ const params = this.invocationParams(options);
392
+ return {
393
+ ls_provider: "openai",
394
+ ls_model_name: this.model,
395
+ ls_model_type: "chat",
396
+ ls_temperature: params.temperature ?? undefined,
397
+ ls_max_tokens: params.max_tokens ?? undefined,
398
+ ls_stop: options.stop,
399
+ };
400
+ }
390
401
  /**
391
402
  * Formats LangChain StructuredTools to AnthropicTools.
392
403
  *
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/anthropic",
3
- "version": "0.1.19",
3
+ "version": "0.1.20",
4
4
  "description": "Anthropic integrations for LangChain.js",
5
5
  "type": "module",
6
6
  "engines": {
@@ -40,7 +40,7 @@
40
40
  "license": "MIT",
41
41
  "dependencies": {
42
42
  "@anthropic-ai/sdk": "^0.21.0",
43
- "@langchain/core": "<0.3.0 || >0.1.0",
43
+ "@langchain/core": ">0.1.56 <0.3.0",
44
44
  "fast-xml-parser": "^4.3.5",
45
45
  "zod": "^3.22.4",
46
46
  "zod-to-json-schema": "^3.22.4"