@langchain/anthropic 0.3.2 → 0.3.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -690,7 +690,7 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
690
690
  const newToolCallChunk = (0, tools_js_1.extractToolCallChunk)(chunk);
691
691
  // Extract the text content token for text field and runManager.
692
692
  const token = extractToken(chunk);
693
- yield new outputs_1.ChatGenerationChunk({
693
+ const generationChunk = new outputs_1.ChatGenerationChunk({
694
694
  message: new messages_1.AIMessageChunk({
695
695
  // Just yield chunk as it is and tool_use will be concat by BaseChatModel._generateUncached().
696
696
  content: chunk.content,
@@ -702,9 +702,8 @@ class ChatAnthropicMessages extends chat_models_1.BaseChatModel {
702
702
  }),
703
703
  text: token ?? "",
704
704
  });
705
- if (token) {
706
- await runManager?.handleLLMNewToken(token);
707
- }
705
+ yield generationChunk;
706
+ await runManager?.handleLLMNewToken(token ?? "", undefined, undefined, undefined, undefined, { chunk: generationChunk });
708
707
  }
709
708
  }
710
709
  /** @ignore */
@@ -687,7 +687,7 @@ export class ChatAnthropicMessages extends BaseChatModel {
687
687
  const newToolCallChunk = extractToolCallChunk(chunk);
688
688
  // Extract the text content token for text field and runManager.
689
689
  const token = extractToken(chunk);
690
- yield new ChatGenerationChunk({
690
+ const generationChunk = new ChatGenerationChunk({
691
691
  message: new AIMessageChunk({
692
692
  // Just yield chunk as it is and tool_use will be concat by BaseChatModel._generateUncached().
693
693
  content: chunk.content,
@@ -699,9 +699,8 @@ export class ChatAnthropicMessages extends BaseChatModel {
699
699
  }),
700
700
  text: token ?? "",
701
701
  });
702
- if (token) {
703
- await runManager?.handleLLMNewToken(token);
704
- }
702
+ yield generationChunk;
703
+ await runManager?.handleLLMNewToken(token ?? "", undefined, undefined, undefined, undefined, { chunk: generationChunk });
705
704
  }
706
705
  }
707
706
  /** @ignore */
package/dist/index.cjs CHANGED
@@ -14,7 +14,7 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) {
14
14
  for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
15
15
  };
16
16
  Object.defineProperty(exports, "__esModule", { value: true });
17
- exports._convertMessagesToAnthropicPayload = void 0;
17
+ exports.convertPromptToAnthropic = void 0;
18
18
  __exportStar(require("./chat_models.cjs"), exports);
19
- var message_inputs_js_1 = require("./utils/message_inputs.cjs");
20
- Object.defineProperty(exports, "_convertMessagesToAnthropicPayload", { enumerable: true, get: function () { return message_inputs_js_1._convertMessagesToAnthropicPayload; } });
19
+ var prompts_js_1 = require("./utils/prompts.cjs");
20
+ Object.defineProperty(exports, "convertPromptToAnthropic", { enumerable: true, get: function () { return prompts_js_1.convertPromptToAnthropic; } });
package/dist/index.d.ts CHANGED
@@ -1,2 +1,2 @@
1
1
  export * from "./chat_models.js";
2
- export { _convertMessagesToAnthropicPayload } from "./utils/message_inputs.js";
2
+ export { convertPromptToAnthropic } from "./utils/prompts.js";
package/dist/index.js CHANGED
@@ -1,2 +1,2 @@
1
1
  export * from "./chat_models.js";
2
- export { _convertMessagesToAnthropicPayload } from "./utils/message_inputs.js";
2
+ export { convertPromptToAnthropic } from "./utils/prompts.js";
@@ -0,0 +1,49 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.convertPromptToAnthropic = void 0;
4
+ const message_inputs_js_1 = require("./message_inputs.cjs");
5
+ /**
6
+ * Convert a formatted LangChain prompt (e.g. pulled from the hub) into
7
+ * a format expected by Anthropic's JS SDK.
8
+ *
9
+ * Requires the "@langchain/anthropic" package to be installed in addition
10
+ * to the Anthropic SDK.
11
+ *
12
+ * @example
13
+ * ```ts
14
+ * import { convertPromptToAnthropic } from "langsmith/utils/hub/anthropic";
15
+ * import { pull } from "langchain/hub";
16
+ *
17
+ * import Anthropic from '@anthropic-ai/sdk';
18
+ *
19
+ * const prompt = await pull("jacob/joke-generator");
20
+ * const formattedPrompt = await prompt.invoke({
21
+ * topic: "cats",
22
+ * });
23
+ *
24
+ * const { system, messages } = convertPromptToAnthropic(formattedPrompt);
25
+ *
26
+ * const anthropicClient = new Anthropic({
27
+ * apiKey: 'your_api_key',
28
+ * });
29
+ *
30
+ * const anthropicResponse = await anthropicClient.messages.create({
31
+ * model: "claude-3-5-sonnet-20240620",
32
+ * max_tokens: 1024,
33
+ * stream: false,
34
+ * system,
35
+ * messages,
36
+ * });
37
+ * ```
38
+ * @param formattedPrompt
39
+ * @returns A partial Anthropic payload.
40
+ */
41
+ function convertPromptToAnthropic(formattedPrompt) {
42
+ const messages = formattedPrompt.toChatMessages();
43
+ const anthropicBody = (0, message_inputs_js_1._convertMessagesToAnthropicPayload)(messages);
44
+ if (anthropicBody.messages === undefined) {
45
+ anthropicBody.messages = [];
46
+ }
47
+ return anthropicBody;
48
+ }
49
+ exports.convertPromptToAnthropic = convertPromptToAnthropic;
@@ -0,0 +1,39 @@
1
+ import type { BasePromptValue } from "@langchain/core/prompt_values";
2
+ import Anthropic from "@anthropic-ai/sdk";
3
+ /**
4
+ * Convert a formatted LangChain prompt (e.g. pulled from the hub) into
5
+ * a format expected by Anthropic's JS SDK.
6
+ *
7
+ * Requires the "@langchain/anthropic" package to be installed in addition
8
+ * to the Anthropic SDK.
9
+ *
10
+ * @example
11
+ * ```ts
12
+ * import { convertPromptToAnthropic } from "langsmith/utils/hub/anthropic";
13
+ * import { pull } from "langchain/hub";
14
+ *
15
+ * import Anthropic from '@anthropic-ai/sdk';
16
+ *
17
+ * const prompt = await pull("jacob/joke-generator");
18
+ * const formattedPrompt = await prompt.invoke({
19
+ * topic: "cats",
20
+ * });
21
+ *
22
+ * const { system, messages } = convertPromptToAnthropic(formattedPrompt);
23
+ *
24
+ * const anthropicClient = new Anthropic({
25
+ * apiKey: 'your_api_key',
26
+ * });
27
+ *
28
+ * const anthropicResponse = await anthropicClient.messages.create({
29
+ * model: "claude-3-5-sonnet-20240620",
30
+ * max_tokens: 1024,
31
+ * stream: false,
32
+ * system,
33
+ * messages,
34
+ * });
35
+ * ```
36
+ * @param formattedPrompt
37
+ * @returns A partial Anthropic payload.
38
+ */
39
+ export declare function convertPromptToAnthropic(formattedPrompt: BasePromptValue): Anthropic.Messages.MessageCreateParams;
@@ -0,0 +1,45 @@
1
+ import { _convertMessagesToAnthropicPayload } from "./message_inputs.js";
2
+ /**
3
+ * Convert a formatted LangChain prompt (e.g. pulled from the hub) into
4
+ * a format expected by Anthropic's JS SDK.
5
+ *
6
+ * Requires the "@langchain/anthropic" package to be installed in addition
7
+ * to the Anthropic SDK.
8
+ *
9
+ * @example
10
+ * ```ts
11
+ * import { convertPromptToAnthropic } from "langsmith/utils/hub/anthropic";
12
+ * import { pull } from "langchain/hub";
13
+ *
14
+ * import Anthropic from '@anthropic-ai/sdk';
15
+ *
16
+ * const prompt = await pull("jacob/joke-generator");
17
+ * const formattedPrompt = await prompt.invoke({
18
+ * topic: "cats",
19
+ * });
20
+ *
21
+ * const { system, messages } = convertPromptToAnthropic(formattedPrompt);
22
+ *
23
+ * const anthropicClient = new Anthropic({
24
+ * apiKey: 'your_api_key',
25
+ * });
26
+ *
27
+ * const anthropicResponse = await anthropicClient.messages.create({
28
+ * model: "claude-3-5-sonnet-20240620",
29
+ * max_tokens: 1024,
30
+ * stream: false,
31
+ * system,
32
+ * messages,
33
+ * });
34
+ * ```
35
+ * @param formattedPrompt
36
+ * @returns A partial Anthropic payload.
37
+ */
38
+ export function convertPromptToAnthropic(formattedPrompt) {
39
+ const messages = formattedPrompt.toChatMessages();
40
+ const anthropicBody = _convertMessagesToAnthropicPayload(messages);
41
+ if (anthropicBody.messages === undefined) {
42
+ anthropicBody.messages = [];
43
+ }
44
+ return anthropicBody;
45
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/anthropic",
3
- "version": "0.3.2",
3
+ "version": "0.3.4",
4
4
  "description": "Anthropic integrations for LangChain.js",
5
5
  "type": "module",
6
6
  "engines": {