@librechat/agents 1.8.2 → 1.8.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,13 +3,14 @@
3
3
  var openai = require('@langchain/openai');
4
4
  var ollama = require('@langchain/ollama');
5
5
  var aws = require('@langchain/aws');
6
+ var anthropic = require('@langchain/anthropic');
6
7
  var mistralai = require('@langchain/mistralai');
7
8
  var googleVertexai = require('@langchain/google-vertexai');
8
9
  var web = require('@langchain/community/chat_models/bedrock/web');
9
10
  var _enum = require('../common/enum.cjs');
10
- var llm = require('./anthropic/llm.cjs');
11
11
 
12
12
  // src/llm/providers.ts
13
+ // import { CustomAnthropic } from '@/llm/anthropic/llm';
13
14
  const llmProviders = {
14
15
  [_enum.Providers.OPENAI]: openai.ChatOpenAI,
15
16
  [_enum.Providers.OLLAMA]: ollama.ChatOllama,
@@ -17,8 +18,8 @@ const llmProviders = {
17
18
  [_enum.Providers.BEDROCK_LEGACY]: web.BedrockChat,
18
19
  [_enum.Providers.MISTRALAI]: mistralai.ChatMistralAI,
19
20
  [_enum.Providers.BEDROCK]: aws.ChatBedrockConverse,
20
- [_enum.Providers.ANTHROPIC]: llm.CustomAnthropic,
21
21
  // [Providers.ANTHROPIC]: CustomAnthropic,
22
+ [_enum.Providers.ANTHROPIC]: anthropic.ChatAnthropic,
22
23
  };
23
24
  const manualToolStreamProviders = new Set([_enum.Providers.ANTHROPIC, _enum.Providers.BEDROCK, _enum.Providers.OLLAMA]);
24
25
  const getChatModelClass = (provider) => {
@@ -1 +1 @@
1
- {"version":3,"file":"providers.cjs","sources":["../../../src/llm/providers.ts"],"sourcesContent":["// src/llm/providers.ts\nimport { ChatOpenAI } from '@langchain/openai';\nimport { ChatOllama } from '@langchain/ollama';\nimport { ChatBedrockConverse } from '@langchain/aws';\n// import { ChatAnthropic } from '@langchain/anthropic';\nimport { ChatMistralAI } from '@langchain/mistralai';\nimport { ChatVertexAI } from '@langchain/google-vertexai';\nimport { BedrockChat } from '@langchain/community/chat_models/bedrock/web';\nimport type { ChatModelConstructorMap, ProviderOptionsMap, ChatModelMap } from '@/types';\nimport { Providers } from '@/common';\nimport { CustomAnthropic } from '@/llm/anthropic/llm';\n\nexport const llmProviders: Partial<ChatModelConstructorMap> = {\n [Providers.OPENAI]: ChatOpenAI,\n [Providers.OLLAMA]: ChatOllama,\n [Providers.VERTEXAI]: ChatVertexAI,\n [Providers.BEDROCK_LEGACY]: BedrockChat,\n [Providers.MISTRALAI]: ChatMistralAI,\n [Providers.BEDROCK]: ChatBedrockConverse,\n [Providers.ANTHROPIC]: CustomAnthropic,\n // [Providers.ANTHROPIC]: CustomAnthropic,\n};\n\nexport const manualToolStreamProviders = new Set<Providers | string>([Providers.ANTHROPIC, Providers.BEDROCK, Providers.OLLAMA]);\n\nexport const getChatModelClass = <P extends Providers>(\n provider: P\n): new (config: ProviderOptionsMap[P]) => ChatModelMap[P] => {\n const ChatModelClass = llmProviders[provider];\n if (!ChatModelClass) {\n throw new Error(`Unsupported LLM provider: ${provider}`);\n }\n\n return ChatModelClass;\n};"],"names":["Providers","ChatOpenAI","ChatOllama","ChatVertexAI","BedrockChat","ChatMistralAI","ChatBedrockConverse","CustomAnthropic"],"mappings":";;;;;;;;;;;AAAA;AAYa,MAAA,YAAY,GAAqC;AAC5D,IAAA,CAACA,eAAS,CAAC,MAAM,GAAGC,iBAAU;AAC9B,IAAA,CAACD,eAAS,CAAC,MAAM,GAAGE,iBAAU;AAC9B,IAAA,CAACF,eAAS,CAAC,QAAQ,GAAGG,2BAAY;AAClC,IAAA,CAACH,eAAS,CAAC,cAAc,GAAGI,eAAW;AACvC,IAAA,CAACJ,eAAS,CAAC,SAAS,GAAGK,uBAAa;AACpC,IAAA,CAACL,eAAS,CAAC,OAAO,GAAGM,uBAAmB;AACxC,IAAA,CAACN,eAAS,CAAC,SAAS,GAAGO,mBAAe;;EAEtC;MAEW,yBAAyB,GAAG,IAAI,GAAG,CAAqB,CAACP,eAAS,CAAC,SAAS,EAAEA,eAAS,CAAC,OAAO,EAAEA,eAAS,CAAC,MAAM,CAAC,EAAE;AAEpH,MAAA,iBAAiB,GAAG,CAC/B,QAAW,KAC+C;AAC1D,IAAA,MAAM,cAAc,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;IAC9C,IAAI,CAAC,cAAc,EAAE;AACnB,QAAA,MAAM,IAAI,KAAK,CAAC,6BAA6B,QAAQ,CAAA,CAAE,CAAC,CAAC;KAC1D;AAED,IAAA,OAAO,cAAc,CAAC;AACxB;;;;;;"}
1
+ {"version":3,"file":"providers.cjs","sources":["../../../src/llm/providers.ts"],"sourcesContent":["// src/llm/providers.ts\nimport { ChatOpenAI } from '@langchain/openai';\nimport { ChatOllama } from '@langchain/ollama';\nimport { ChatBedrockConverse } from '@langchain/aws';\nimport { ChatAnthropic } from '@langchain/anthropic';\nimport { ChatMistralAI } from '@langchain/mistralai';\nimport { ChatVertexAI } from '@langchain/google-vertexai';\nimport { BedrockChat } from '@langchain/community/chat_models/bedrock/web';\nimport type { ChatModelConstructorMap, ProviderOptionsMap, ChatModelMap } from '@/types';\nimport { Providers } from '@/common';\n// import { CustomAnthropic } from '@/llm/anthropic/llm';\n\nexport const llmProviders: Partial<ChatModelConstructorMap> = {\n [Providers.OPENAI]: ChatOpenAI,\n [Providers.OLLAMA]: ChatOllama,\n [Providers.VERTEXAI]: ChatVertexAI,\n [Providers.BEDROCK_LEGACY]: BedrockChat,\n [Providers.MISTRALAI]: ChatMistralAI,\n [Providers.BEDROCK]: ChatBedrockConverse,\n // [Providers.ANTHROPIC]: CustomAnthropic,\n [Providers.ANTHROPIC]: ChatAnthropic,\n};\n\nexport const manualToolStreamProviders = new Set<Providers | string>([Providers.ANTHROPIC, Providers.BEDROCK, Providers.OLLAMA]);\n\nexport const getChatModelClass = <P extends Providers>(\n provider: P\n): new (config: ProviderOptionsMap[P]) => ChatModelMap[P] => {\n const ChatModelClass = llmProviders[provider];\n if (!ChatModelClass) {\n throw new Error(`Unsupported LLM provider: ${provider}`);\n }\n\n return ChatModelClass;\n};"],"names":["Providers","ChatOpenAI","ChatOllama","ChatVertexAI","BedrockChat","ChatMistralAI","ChatBedrockConverse","ChatAnthropic"],"mappings":";;;;;;;;;;;AAAA;AAUA;AAEa,MAAA,YAAY,GAAqC;AAC5D,IAAA,CAACA,eAAS,CAAC,MAAM,GAAGC,iBAAU;AAC9B,IAAA,CAACD,eAAS,CAAC,MAAM,GAAGE,iBAAU;AAC9B,IAAA,CAACF,eAAS,CAAC,QAAQ,GAAGG,2BAAY;AAClC,IAAA,CAACH,eAAS,CAAC,cAAc,GAAGI,eAAW;AACvC,IAAA,CAACJ,eAAS,CAAC,SAAS,GAAGK,uBAAa;AACpC,IAAA,CAACL,eAAS,CAAC,OAAO,GAAGM,uBAAmB;;AAExC,IAAA,CAACN,eAAS,CAAC,SAAS,GAAGO,uBAAa;EACpC;MAEW,yBAAyB,GAAG,IAAI,GAAG,CAAqB,CAACP,eAAS,CAAC,SAAS,EAAEA,eAAS,CAAC,OAAO,EAAEA,eAAS,CAAC,MAAM,CAAC,EAAE;AAEpH,MAAA,iBAAiB,GAAG,CAC/B,QAAW,KAC+C;AAC1D,IAAA,MAAM,cAAc,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;IAC9C,IAAI,CAAC,cAAc,EAAE;AACnB,QAAA,MAAM,IAAI,KAAK,CAAC,6BAA6B,QAAQ,CAAA,CAAE,CAAC,CAAC;KAC1D;AAED,IAAA,OAAO,cAAc,CAAC;AACxB;;;;;;"}
@@ -1,13 +1,14 @@
1
1
  import { ChatOpenAI } from '@langchain/openai';
2
2
  import { ChatOllama } from '@langchain/ollama';
3
3
  import { ChatBedrockConverse } from '@langchain/aws';
4
+ import { ChatAnthropic } from '@langchain/anthropic';
4
5
  import { ChatMistralAI } from '@langchain/mistralai';
5
6
  import { ChatVertexAI } from '@langchain/google-vertexai';
6
7
  import { BedrockChat } from '@langchain/community/chat_models/bedrock/web';
7
8
  import { Providers } from '../common/enum.mjs';
8
- import { CustomAnthropic } from './anthropic/llm.mjs';
9
9
 
10
10
  // src/llm/providers.ts
11
+ // import { CustomAnthropic } from '@/llm/anthropic/llm';
11
12
  const llmProviders = {
12
13
  [Providers.OPENAI]: ChatOpenAI,
13
14
  [Providers.OLLAMA]: ChatOllama,
@@ -15,8 +16,8 @@ const llmProviders = {
15
16
  [Providers.BEDROCK_LEGACY]: BedrockChat,
16
17
  [Providers.MISTRALAI]: ChatMistralAI,
17
18
  [Providers.BEDROCK]: ChatBedrockConverse,
18
- [Providers.ANTHROPIC]: CustomAnthropic,
19
19
  // [Providers.ANTHROPIC]: CustomAnthropic,
20
+ [Providers.ANTHROPIC]: ChatAnthropic,
20
21
  };
21
22
  const manualToolStreamProviders = new Set([Providers.ANTHROPIC, Providers.BEDROCK, Providers.OLLAMA]);
22
23
  const getChatModelClass = (provider) => {
@@ -1 +1 @@
1
- {"version":3,"file":"providers.mjs","sources":["../../../src/llm/providers.ts"],"sourcesContent":["// src/llm/providers.ts\nimport { ChatOpenAI } from '@langchain/openai';\nimport { ChatOllama } from '@langchain/ollama';\nimport { ChatBedrockConverse } from '@langchain/aws';\n// import { ChatAnthropic } from '@langchain/anthropic';\nimport { ChatMistralAI } from '@langchain/mistralai';\nimport { ChatVertexAI } from '@langchain/google-vertexai';\nimport { BedrockChat } from '@langchain/community/chat_models/bedrock/web';\nimport type { ChatModelConstructorMap, ProviderOptionsMap, ChatModelMap } from '@/types';\nimport { Providers } from '@/common';\nimport { CustomAnthropic } from '@/llm/anthropic/llm';\n\nexport const llmProviders: Partial<ChatModelConstructorMap> = {\n [Providers.OPENAI]: ChatOpenAI,\n [Providers.OLLAMA]: ChatOllama,\n [Providers.VERTEXAI]: ChatVertexAI,\n [Providers.BEDROCK_LEGACY]: BedrockChat,\n [Providers.MISTRALAI]: ChatMistralAI,\n [Providers.BEDROCK]: ChatBedrockConverse,\n [Providers.ANTHROPIC]: CustomAnthropic,\n // [Providers.ANTHROPIC]: CustomAnthropic,\n};\n\nexport const manualToolStreamProviders = new Set<Providers | string>([Providers.ANTHROPIC, Providers.BEDROCK, Providers.OLLAMA]);\n\nexport const getChatModelClass = <P extends Providers>(\n provider: P\n): new (config: ProviderOptionsMap[P]) => ChatModelMap[P] => {\n const ChatModelClass = llmProviders[provider];\n if (!ChatModelClass) {\n throw new Error(`Unsupported LLM provider: ${provider}`);\n }\n\n return ChatModelClass;\n};"],"names":[],"mappings":";;;;;;;;;AAAA;AAYa,MAAA,YAAY,GAAqC;AAC5D,IAAA,CAAC,SAAS,CAAC,MAAM,GAAG,UAAU;AAC9B,IAAA,CAAC,SAAS,CAAC,MAAM,GAAG,UAAU;AAC9B,IAAA,CAAC,SAAS,CAAC,QAAQ,GAAG,YAAY;AAClC,IAAA,CAAC,SAAS,CAAC,cAAc,GAAG,WAAW;AACvC,IAAA,CAAC,SAAS,CAAC,SAAS,GAAG,aAAa;AACpC,IAAA,CAAC,SAAS,CAAC,OAAO,GAAG,mBAAmB;AACxC,IAAA,CAAC,SAAS,CAAC,SAAS,GAAG,eAAe;;EAEtC;MAEW,yBAAyB,GAAG,IAAI,GAAG,CAAqB,CAAC,SAAS,CAAC,SAAS,EAAE,SAAS,CAAC,OAAO,EAAE,SAAS,CAAC,MAAM,CAAC,EAAE;AAEpH,MAAA,iBAAiB,GAAG,CAC/B,QAAW,KAC+C;AAC1D,IAAA,MAAM,cAAc,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;IAC9C,IAAI,CAAC,cAAc,EAAE;AACnB,QAAA,MAAM,IAAI,KAAK,CAAC,6BAA6B,QAAQ,CAAA,CAAE,CAAC,CAAC;KAC1D;AAED,IAAA,OAAO,cAAc,CAAC;AACxB;;;;"}
1
+ {"version":3,"file":"providers.mjs","sources":["../../../src/llm/providers.ts"],"sourcesContent":["// src/llm/providers.ts\nimport { ChatOpenAI } from '@langchain/openai';\nimport { ChatOllama } from '@langchain/ollama';\nimport { ChatBedrockConverse } from '@langchain/aws';\nimport { ChatAnthropic } from '@langchain/anthropic';\nimport { ChatMistralAI } from '@langchain/mistralai';\nimport { ChatVertexAI } from '@langchain/google-vertexai';\nimport { BedrockChat } from '@langchain/community/chat_models/bedrock/web';\nimport type { ChatModelConstructorMap, ProviderOptionsMap, ChatModelMap } from '@/types';\nimport { Providers } from '@/common';\n// import { CustomAnthropic } from '@/llm/anthropic/llm';\n\nexport const llmProviders: Partial<ChatModelConstructorMap> = {\n [Providers.OPENAI]: ChatOpenAI,\n [Providers.OLLAMA]: ChatOllama,\n [Providers.VERTEXAI]: ChatVertexAI,\n [Providers.BEDROCK_LEGACY]: BedrockChat,\n [Providers.MISTRALAI]: ChatMistralAI,\n [Providers.BEDROCK]: ChatBedrockConverse,\n // [Providers.ANTHROPIC]: CustomAnthropic,\n [Providers.ANTHROPIC]: ChatAnthropic,\n};\n\nexport const manualToolStreamProviders = new Set<Providers | string>([Providers.ANTHROPIC, Providers.BEDROCK, Providers.OLLAMA]);\n\nexport const getChatModelClass = <P extends Providers>(\n provider: P\n): new (config: ProviderOptionsMap[P]) => ChatModelMap[P] => {\n const ChatModelClass = llmProviders[provider];\n if (!ChatModelClass) {\n throw new Error(`Unsupported LLM provider: ${provider}`);\n }\n\n return ChatModelClass;\n};"],"names":[],"mappings":";;;;;;;;;AAAA;AAUA;AAEa,MAAA,YAAY,GAAqC;AAC5D,IAAA,CAAC,SAAS,CAAC,MAAM,GAAG,UAAU;AAC9B,IAAA,CAAC,SAAS,CAAC,MAAM,GAAG,UAAU;AAC9B,IAAA,CAAC,SAAS,CAAC,QAAQ,GAAG,YAAY;AAClC,IAAA,CAAC,SAAS,CAAC,cAAc,GAAG,WAAW;AACvC,IAAA,CAAC,SAAS,CAAC,SAAS,GAAG,aAAa;AACpC,IAAA,CAAC,SAAS,CAAC,OAAO,GAAG,mBAAmB;;AAExC,IAAA,CAAC,SAAS,CAAC,SAAS,GAAG,aAAa;EACpC;MAEW,yBAAyB,GAAG,IAAI,GAAG,CAAqB,CAAC,SAAS,CAAC,SAAS,EAAE,SAAS,CAAC,OAAO,EAAE,SAAS,CAAC,MAAM,CAAC,EAAE;AAEpH,MAAA,iBAAiB,GAAG,CAC/B,QAAW,KAC+C;AAC1D,IAAA,MAAM,cAAc,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;IAC9C,IAAI,CAAC,cAAc,EAAE;AACnB,QAAA,MAAM,IAAI,KAAK,CAAC,6BAA6B,QAAQ,CAAA,CAAE,CAAC,CAAC;KAC1D;AAED,IAAA,OAAO,cAAc,CAAC;AACxB;;;;"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@librechat/agents",
3
- "version": "1.8.2",
3
+ "version": "1.8.3",
4
4
  "main": "./dist/cjs/main.cjs",
5
5
  "module": "./dist/esm/main.mjs",
6
6
  "types": "./dist/types/index.d.ts",
@@ -2,13 +2,13 @@
2
2
  import { ChatOpenAI } from '@langchain/openai';
3
3
  import { ChatOllama } from '@langchain/ollama';
4
4
  import { ChatBedrockConverse } from '@langchain/aws';
5
- // import { ChatAnthropic } from '@langchain/anthropic';
5
+ import { ChatAnthropic } from '@langchain/anthropic';
6
6
  import { ChatMistralAI } from '@langchain/mistralai';
7
7
  import { ChatVertexAI } from '@langchain/google-vertexai';
8
8
  import { BedrockChat } from '@langchain/community/chat_models/bedrock/web';
9
9
  import type { ChatModelConstructorMap, ProviderOptionsMap, ChatModelMap } from '@/types';
10
10
  import { Providers } from '@/common';
11
- import { CustomAnthropic } from '@/llm/anthropic/llm';
11
+ // import { CustomAnthropic } from '@/llm/anthropic/llm';
12
12
 
13
13
  export const llmProviders: Partial<ChatModelConstructorMap> = {
14
14
  [Providers.OPENAI]: ChatOpenAI,
@@ -17,8 +17,8 @@ export const llmProviders: Partial<ChatModelConstructorMap> = {
17
17
  [Providers.BEDROCK_LEGACY]: BedrockChat,
18
18
  [Providers.MISTRALAI]: ChatMistralAI,
19
19
  [Providers.BEDROCK]: ChatBedrockConverse,
20
- [Providers.ANTHROPIC]: CustomAnthropic,
21
20
  // [Providers.ANTHROPIC]: CustomAnthropic,
21
+ [Providers.ANTHROPIC]: ChatAnthropic,
22
22
  };
23
23
 
24
24
  export const manualToolStreamProviders = new Set<Providers | string>([Providers.ANTHROPIC, Providers.BEDROCK, Providers.OLLAMA]);
@@ -1,117 +0,0 @@
1
- 'use strict';
2
-
3
- var messages = require('@langchain/core/messages');
4
- var anthropic = require('@langchain/anthropic');
5
- var outputs = require('@langchain/core/outputs');
6
- var message_outputs = require('./utils/message_outputs.cjs');
7
- var message_inputs = require('./utils/message_inputs.cjs');
8
- var text = require('../text.cjs');
9
-
10
- function _toolsInParams(params) {
11
- return !!(params.tools && params.tools.length > 0);
12
- }
13
- function extractToken(chunk) {
14
- if (typeof chunk.content === 'string') {
15
- return [chunk.content, 'string'];
16
- }
17
- else if (Array.isArray(chunk.content) &&
18
- chunk.content.length >= 1 &&
19
- 'input' in chunk.content[0]) {
20
- return typeof chunk.content[0].input === 'string'
21
- ? [chunk.content[0].input, 'input']
22
- : [JSON.stringify(chunk.content[0].input), 'input'];
23
- }
24
- else if (Array.isArray(chunk.content) &&
25
- chunk.content.length >= 1 &&
26
- 'text' in chunk.content[0]) {
27
- return [chunk.content[0].text, 'content'];
28
- }
29
- return [undefined];
30
- }
31
- function cloneChunk(text, tokenType, chunk) {
32
- if (tokenType === 'string') {
33
- return new messages.AIMessageChunk(Object.assign({}, chunk, { content: text }));
34
- }
35
- else if (tokenType === 'input') {
36
- return chunk;
37
- }
38
- const content = chunk.content[0];
39
- if (tokenType === 'content' && content.type === 'text') {
40
- return new messages.AIMessageChunk(Object.assign({}, chunk, { content: [Object.assign({}, content, { text })] }));
41
- }
42
- else if (tokenType === 'content' && content.type === 'text_delta') {
43
- return new messages.AIMessageChunk(Object.assign({}, chunk, { content: [Object.assign({}, content, { text })] }));
44
- }
45
- return chunk;
46
- }
47
- class CustomAnthropic extends anthropic.ChatAnthropicMessages {
48
- _lc_stream_delay;
49
- constructor(fields) {
50
- super(fields);
51
- this._lc_stream_delay = fields._lc_stream_delay ?? 25;
52
- }
53
- async *_streamResponseChunks(messages$1, options, runManager) {
54
- const params = this.invocationParams(options);
55
- const formattedMessages = message_inputs._convertMessagesToAnthropicPayload(messages$1);
56
- const coerceContentToString = !_toolsInParams({
57
- ...params,
58
- ...formattedMessages,
59
- stream: false,
60
- });
61
- const stream = await this.createStreamWithRetry({
62
- ...params,
63
- ...formattedMessages,
64
- stream: true,
65
- }, {
66
- headers: options.headers,
67
- });
68
- for await (const data of stream) {
69
- if (options.signal?.aborted) {
70
- stream.controller.abort();
71
- throw new Error('AbortError: User aborted the request.');
72
- }
73
- const shouldStreamUsage = this.streamUsage ?? options.streamUsage;
74
- const result = message_outputs._makeMessageChunkFromAnthropicEvent(data, {
75
- streamUsage: shouldStreamUsage,
76
- coerceContentToString,
77
- });
78
- if (!result)
79
- continue;
80
- const { chunk } = result;
81
- // Extract the text content token for text field and runManager.
82
- const [token = '', tokenType] = extractToken(chunk);
83
- const createGenerationChunk = (text, incomingChunk) => {
84
- return new outputs.ChatGenerationChunk({
85
- message: new messages.AIMessageChunk({
86
- // Just yield chunk as it is and tool_use will be concat by BaseChatModel._generateUncached().
87
- content: incomingChunk.content,
88
- additional_kwargs: incomingChunk.additional_kwargs,
89
- tool_call_chunks: incomingChunk.tool_call_chunks,
90
- usage_metadata: shouldStreamUsage ? incomingChunk.usage_metadata : undefined,
91
- response_metadata: incomingChunk.response_metadata,
92
- id: incomingChunk.id,
93
- }),
94
- text,
95
- });
96
- };
97
- if (!tokenType || tokenType === 'input') {
98
- const generationChunk = createGenerationChunk(token, chunk);
99
- yield generationChunk;
100
- await runManager?.handleLLMNewToken(token, undefined, undefined, undefined, undefined, { chunk: generationChunk });
101
- continue;
102
- }
103
- const textStream = new text.TextStream(token, {
104
- delay: this._lc_stream_delay,
105
- });
106
- for await (const currentToken of textStream.generateText()) {
107
- const newChunk = cloneChunk(currentToken, tokenType, chunk);
108
- const generationChunk = createGenerationChunk(currentToken, newChunk);
109
- yield generationChunk;
110
- await runManager?.handleLLMNewToken(token, undefined, undefined, undefined, undefined, { chunk: generationChunk });
111
- }
112
- }
113
- }
114
- }
115
-
116
- exports.CustomAnthropic = CustomAnthropic;
117
- //# sourceMappingURL=llm.cjs.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"llm.cjs","sources":["../../../../src/llm/anthropic/llm.ts"],"sourcesContent":["import { AIMessageChunk } from '@langchain/core/messages';\nimport { ChatAnthropicMessages } from '@langchain/anthropic';\nimport { ChatGenerationChunk } from '@langchain/core/outputs';\nimport type { BaseMessage, MessageContentComplex } from '@langchain/core/messages';\nimport type { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';\nimport type { AnthropicInput } from '@langchain/anthropic';\nimport type { AnthropicMessageCreateParams } from '@/llm/anthropic/types';\nimport { _makeMessageChunkFromAnthropicEvent } from './utils/message_outputs';\nimport { _convertMessagesToAnthropicPayload } from './utils/message_inputs';\nimport { TextStream } from '@/llm/text';\n\nfunction _toolsInParams(params: AnthropicMessageCreateParams): boolean {\n return !!(params.tools && params.tools.length > 0);\n}\n\nfunction extractToken(chunk: AIMessageChunk): [string, 'string' | 'input' | 'content'] | [undefined] {\n if (typeof chunk.content === 'string') {\n return [chunk.content, 'string'];\n } else if (\n Array.isArray(chunk.content) &&\n chunk.content.length >= 1 &&\n 'input' in chunk.content[0]\n ) {\n return typeof chunk.content[0].input === 'string'\n ? [chunk.content[0].input, 'input']\n : [JSON.stringify(chunk.content[0].input), 'input'];\n } else if (\n Array.isArray(chunk.content) &&\n chunk.content.length >= 1 &&\n 'text' in chunk.content[0]\n ) {\n return [chunk.content[0].text, 'content'];\n }\n return [undefined];\n}\n\nfunction cloneChunk(text: string, tokenType: string, chunk: AIMessageChunk): AIMessageChunk {\n if (tokenType === 'string') {\n return new AIMessageChunk(Object.assign({}, chunk, { content: text }));\n } else if (tokenType === 'input') {\n return chunk;\n }\n const content = chunk.content[0] as MessageContentComplex;\n if (tokenType === 'content' && content.type === 'text') {\n return new AIMessageChunk(Object.assign({}, chunk, { content: [Object.assign({}, content, { text })] }));\n } else if (tokenType === 'content' && content.type === 'text_delta') {\n return new AIMessageChunk(Object.assign({}, chunk, { content: [Object.assign({}, content, { text })] }));\n }\n\n return chunk;\n}\n\nexport type CustomAnthropicInput = AnthropicInput & { _lc_stream_delay?: number };\n\nexport class CustomAnthropic extends ChatAnthropicMessages {\n _lc_stream_delay: number;\n constructor(fields: CustomAnthropicInput) {\n super(fields);\n this._lc_stream_delay = fields._lc_stream_delay ?? 25;\n }\n\n async *_streamResponseChunks(\n messages: BaseMessage[],\n options: this['ParsedCallOptions'],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n const params = this.invocationParams(options);\n const formattedMessages = _convertMessagesToAnthropicPayload(messages);\n const coerceContentToString = !_toolsInParams({\n ...params,\n ...formattedMessages,\n stream: false,\n });\n\n const stream = await this.createStreamWithRetry(\n {\n ...params,\n ...formattedMessages,\n stream: true,\n },\n {\n headers: options.headers,\n }\n );\n\n for await (const data of stream) {\n if (options.signal?.aborted) {\n stream.controller.abort();\n throw new Error('AbortError: User aborted the request.');\n }\n const shouldStreamUsage = this.streamUsage ?? options.streamUsage;\n const result = _makeMessageChunkFromAnthropicEvent(data, {\n streamUsage: shouldStreamUsage,\n coerceContentToString,\n });\n if (!result) continue;\n\n const { chunk } = result;\n\n // Extract the text content token for text field and runManager.\n const [token = '', tokenType] = extractToken(chunk);\n const createGenerationChunk = (text: string, incomingChunk: AIMessageChunk): ChatGenerationChunk => {\n return new ChatGenerationChunk({\n message: new AIMessageChunk({\n // Just yield chunk as it is and tool_use will be concat by BaseChatModel._generateUncached().\n content: incomingChunk.content,\n additional_kwargs: incomingChunk.additional_kwargs,\n tool_call_chunks: incomingChunk.tool_call_chunks,\n usage_metadata: shouldStreamUsage ? incomingChunk.usage_metadata : undefined,\n response_metadata: incomingChunk.response_metadata,\n id: incomingChunk.id,\n }),\n text,\n });\n };\n\n if (!tokenType || tokenType === 'input') {\n const generationChunk = createGenerationChunk(token, chunk);\n yield generationChunk;\n await runManager?.handleLLMNewToken(\n token,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: generationChunk }\n );\n continue;\n }\n\n const textStream = new TextStream(token, {\n delay: this._lc_stream_delay,\n });\n for await (const currentToken of textStream.generateText()) {\n const newChunk = cloneChunk(currentToken, tokenType, chunk);\n const generationChunk = createGenerationChunk(currentToken, newChunk);\n yield generationChunk;\n\n await runManager?.handleLLMNewToken(\n token,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: generationChunk }\n );\n }\n }\n }\n\n}\n"],"names":["AIMessageChunk","ChatAnthropicMessages","messages","_convertMessagesToAnthropicPayload","_makeMessageChunkFromAnthropicEvent","ChatGenerationChunk","TextStream"],"mappings":";;;;;;;;;AAWA,SAAS,cAAc,CAAC,MAAoC,EAAA;AAC1D,IAAA,OAAO,CAAC,EAAE,MAAM,CAAC,KAAK,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;AACrD,CAAC;AAED,SAAS,YAAY,CAAC,KAAqB,EAAA;AACzC,IAAA,IAAI,OAAO,KAAK,CAAC,OAAO,KAAK,QAAQ,EAAE;AACrC,QAAA,OAAO,CAAC,KAAK,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;KAClC;AAAM,SAAA,IACL,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC;AAC5B,QAAA,KAAK,CAAC,OAAO,CAAC,MAAM,IAAI,CAAC;QACzB,OAAO,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAC3B;QACA,OAAO,OAAO,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,KAAK,QAAQ;AAC/C,cAAE,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,OAAO,CAAC;AACnC,cAAE,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,OAAO,CAAC,CAAC;KACvD;AAAM,SAAA,IACL,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC;AAC5B,QAAA,KAAK,CAAC,OAAO,CAAC,MAAM,IAAI,CAAC;QACzB,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAC1B;AACA,QAAA,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,SAAS,CAAC,CAAC;KAC3C;IACD,OAAO,CAAC,SAAS,CAAC,CAAC;AACrB,CAAC;AAED,SAAS,UAAU,CAAC,IAAY,EAAE,SAAiB,EAAE,KAAqB,EAAA;AACxE,IAAA,IAAI,SAAS,KAAK,QAAQ,EAAE;AAC1B,QAAA,OAAO,IAAIA,uBAAc,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,EAAE,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;KACxE;AAAM,SAAA,IAAI,SAAS,KAAK,OAAO,EAAE;AAChC,QAAA,OAAO,KAAK,CAAC;KACd;IACD,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC,CAA0B,CAAC;IAC1D,IAAI,SAAS,KAAK,SAAS,IAAI,OAAO,CAAC,IAAI,KAAK,MAAM,EAAE;AACtD,QAAA,OAAO,IAAIA,uBAAc,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,EAAE,EAAE,OAAO,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;KAC1G;SAAM,IAAI,SAAS,KAAK,SAAS,IAAI,OAAO,CAAC,IAAI,KAAK,YAAY,EAAE;AACnE,QAAA,OAAO,IAAIA,uBAAc,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,EAAE,EAAE,OAAO,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;KAC1G;AAED,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAIK,MAAO,eAAgB,SAAQC,+BAAqB,CAAA;AACxD,IAAA,gBAAgB,CAAS;AACzB,IAAA,WAAA,CAAY,MAA4B,EAAA;QACtC,KAAK,CAAC,MAAM,CAAC,CAAC;QACd,IAAI,CAAC,gBAAgB,GAAG,MAAM,CAAC,gBAAgB,IAAI,EAAE,CAAC;KACvD;IAED,OAAO,qBAAqB,CAC1BC,UAAuB,EACvB,OAAkC,EAClC,UAAqC,EAAA;QAErC,MAAM,MAAM,GAAG,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC,CAAC;AAC9C,QAAA,MAAM,iBAAiB,GAAGC,iDAAkC,CAACD,UAAQ,CAAC,CAAC;AACvE,QAAA,MAAM,qBAAqB,GAAG,CAAC,cAAc,CAAC;AAC5C,YAAA,GAAG,MAAM;AACT,YAAA,GAAG,iBAAiB;AACpB,YAAA,MAAM,EAAE,KAAK;AACd,SAAA,CAAC,CAAC;AAEH,QAAA,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,qBAAqB,CAC7C;AACE,YAAA,GAAG,MAAM;AACT,YAAA,GAAG,iBAAiB;AACpB,YAAA,MAAM,EAAE,IAAI;SACb,EACD;YACE,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,SAAA,CACF,CAAC;AAEF,QAAA,WAAW,MAAM,IAAI,IAAI,MAAM,EAAE;AAC/B,YAAA,IAAI,OAAO,CAAC,MAAM,EAAE,OAAO,EAAE;AAC3B,gBAAA,MAAM,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC;AAC1B,gBAAA,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;aAC1D;YACD,MAAM,iBAAiB,GAAG,IAAI,CAAC,WAAW,IAAI,OAAO,CAAC,WAAW,CAAC;AAClE,YAAA,MAAM,MAAM,GAAGE,mDAAmC,CAAC,IAAI,EAAE;AACvD,gBAAA,WAAW,EAAE,iBAAiB;gBAC9B,qBAAqB;AACtB,aAAA,CAAC,CAAC;AACH,YAAA,IAAI,CAAC,MAAM;gBAAE,SAAS;AAEtB,YAAA,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,CAAC;;AAGzB,YAAA,MAAM,CAAC,KAAK,GAAG,EAAE,EAAE,SAAS,CAAC,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC;AACpD,YAAA,MAAM,qBAAqB,GAAG,CAAC,IAAY,EAAE,aAA6B,KAAyB;gBACjG,OAAO,IAAIC,2BAAmB,CAAC;oBAC7B,OAAO,EAAE,IAAIL,uBAAc,CAAC;;wBAE1B,OAAO,EAAE,aAAa,CAAC,OAAO;wBAC9B,iBAAiB,EAAE,aAAa,CAAC,iBAAiB;wBAClD,gBAAgB,EAAE,aAAa,CAAC,gBAAgB;wBAChD,cAAc,EAAE,iBAAiB,GAAG,aAAa,CAAC,cAAc,GAAG,SAAS;wBAC5E,iBAAiB,EAAE,aAAa,CAAC,iBAAiB;wBAClD,EAAE,EAAE,aAAa,CAAC,EAAE;qBACrB,CAAC;oBACF,IAAI;AACL,iBAAA,CAAC,CAAC;AACL,aAAC,CAAC;AAEF,YAAA,IAAI,CAAC,SAAS,IAAI,SAAS,KAAK,OAAO,EAAE;gBACvC,MAAM,eAAe,GAAG,qBAAqB,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;AAC5D,gBAAA,MAAM,eAAe,CAAC;gBACtB,MAAM,UAAU,EAAE,iBAAiB,CACjC,KAAK,EACL,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,eAAe,EAAE,CAC3B,CAAC;gBACF,SAAS;aACV;AAED,YAAA,MAAM,UAAU,GAAG,IAAIM,eAAU,CAAC,KAAK,EAAE;gBACvC,KAAK,EAAE,IAAI,CAAC,gBAAgB;AAC7B,aAAA,CAAC,CAAC;YACH,WAAW,MAAM,YAAY,IAAI,UAAU,CAAC,YAAY,EAAE,EAAE;gBAC1D,MAAM,QAAQ,GAAG,UAAU,CAAC,YAAY,EAAE,SAAS,EAAE,KAAK,CAAC,CAAC;gBAC5D,MAAM,eAAe,GAAG,qBAAqB,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC;AACtE,gBAAA,MAAM,eAAe,CAAC;gBAEtB,MAAM,UAAU,EAAE,iBAAiB,CACjC,KAAK,EACL,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,eAAe,EAAE,CAC3B,CAAC;aACH;SACF;KACF;AAEF;;;;"}
@@ -1,229 +0,0 @@
1
- 'use strict';
2
-
3
- var messages = require('@langchain/core/messages');
4
-
5
- /**
6
- * This util file contains functions for converting LangChain messages to Anthropic messages.
7
- */
8
- function _formatImage(imageUrl) {
9
- const regex = /^data:(image\/.+);base64,(.+)$/;
10
- const match = imageUrl.match(regex);
11
- if (match === null) {
12
- throw new Error([
13
- 'Anthropic only supports base64-encoded images currently.',
14
- 'Example: data:image/png;base64,/9j/4AAQSk...',
15
- ].join('\n\n'));
16
- }
17
- return {
18
- type: 'base64',
19
- media_type: match[1] ?? '',
20
- data: match[2] ?? '',
21
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
22
- };
23
- }
24
- function _ensureMessageContents(messages$1) {
25
- // Merge runs of human/tool messages into single human messages with content blocks.
26
- const updatedMsgs = [];
27
- for (const message of messages$1) {
28
- if (message._getType() === 'tool') {
29
- if (typeof message.content === 'string') {
30
- const previousMessage = updatedMsgs[updatedMsgs.length - 1];
31
- if (previousMessage &&
32
- previousMessage._getType() === 'human' &&
33
- Array.isArray(previousMessage.content) &&
34
- 'type' in previousMessage.content[0] &&
35
- previousMessage.content[0].type === 'tool_result') {
36
- // If the previous message was a tool result, we merge this tool message into it.
37
- previousMessage.content.push({
38
- type: 'tool_result',
39
- content: message.content,
40
- tool_use_id: message.tool_call_id,
41
- });
42
- }
43
- else {
44
- // If not, we create a new human message with the tool result.
45
- updatedMsgs.push(new messages.HumanMessage({
46
- content: [
47
- {
48
- type: 'tool_result',
49
- content: message.content,
50
- tool_use_id: message.tool_call_id,
51
- },
52
- ],
53
- }));
54
- }
55
- }
56
- else {
57
- updatedMsgs.push(new messages.HumanMessage({
58
- content: [
59
- {
60
- type: 'tool_result',
61
- content: _formatContent(message.content),
62
- tool_use_id: message.tool_call_id,
63
- },
64
- ],
65
- }));
66
- }
67
- }
68
- else {
69
- updatedMsgs.push(message);
70
- }
71
- }
72
- return updatedMsgs;
73
- }
74
- function _convertLangChainToolCallToAnthropic(toolCall) {
75
- if (toolCall.id === undefined) {
76
- throw new Error('Anthropic requires all tool calls to have an "id".');
77
- }
78
- return {
79
- type: 'tool_use',
80
- id: toolCall.id,
81
- name: toolCall.name,
82
- input: toolCall.args,
83
- };
84
- }
85
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
86
- function _formatContent(content) {
87
- const toolTypes = ['tool_use', 'tool_result', 'input_json_delta'];
88
- const textTypes = ['text', 'text_delta'];
89
- if (typeof content === 'string') {
90
- return content;
91
- }
92
- else {
93
- const contentBlocks = content.map((contentPart) => {
94
- const cacheControl = 'cache_control' in contentPart ? contentPart.cache_control : undefined;
95
- if (contentPart.type === 'image_url') {
96
- let source;
97
- if (typeof contentPart.image_url === 'string') {
98
- source = _formatImage(contentPart.image_url);
99
- }
100
- else {
101
- source = _formatImage(contentPart.image_url.url);
102
- }
103
- return {
104
- type: 'image', // Explicitly setting the type as "image"
105
- source,
106
- ...(cacheControl ? { cache_control: cacheControl } : {}),
107
- };
108
- }
109
- else if (textTypes.find((t) => t === contentPart.type) &&
110
- 'text' in contentPart) {
111
- // Assuming contentPart is of type MessageContentText here
112
- return {
113
- type: 'text', // Explicitly setting the type as "text"
114
- text: contentPart.text,
115
- ...(cacheControl ? { cache_control: cacheControl } : {}),
116
- };
117
- }
118
- else if (toolTypes.find((t) => t === contentPart.type)) {
119
- const contentPartCopy = { ...contentPart };
120
- if ('index' in contentPartCopy) {
121
- // Anthropic does not support passing the index field here, so we remove it.
122
- delete contentPartCopy.index;
123
- }
124
- if (contentPartCopy.type === 'input_json_delta') {
125
- // `input_json_delta` type only represents yielding partial tool inputs
126
- // and is not a valid type for Anthropic messages.
127
- contentPartCopy.type = 'tool_use';
128
- }
129
- if ('input' in contentPartCopy) {
130
- // Anthropic tool use inputs should be valid objects, when applicable.
131
- try {
132
- contentPartCopy.input = JSON.parse(contentPartCopy.input);
133
- }
134
- catch {
135
- // no-op
136
- }
137
- }
138
- // TODO: Fix when SDK types are fixed
139
- return {
140
- ...contentPartCopy,
141
- ...(cacheControl ? { cache_control: cacheControl } : {}),
142
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
143
- };
144
- }
145
- else {
146
- throw new Error('Unsupported message content format');
147
- }
148
- });
149
- return contentBlocks;
150
- }
151
- }
152
- /**
153
- * Formats messages as a prompt for the model.
154
- * Used in LangSmith, export is important here.
155
- * @param messages The base messages to format as a prompt.
156
- * @returns The formatted prompt.
157
- */
158
- function _convertMessagesToAnthropicPayload(messages$1) {
159
- const mergedMessages = _ensureMessageContents(messages$1);
160
- let system;
161
- if (mergedMessages.length > 0 && mergedMessages[0]._getType() === 'system') {
162
- system = messages$1[0].content;
163
- }
164
- const conversationMessages = system !== undefined ? mergedMessages.slice(1) : mergedMessages;
165
- const formattedMessages = conversationMessages.map((message) => {
166
- let role;
167
- if (message._getType() === 'human') {
168
- role = 'user';
169
- }
170
- else if (message._getType() === 'ai') {
171
- role = 'assistant';
172
- }
173
- else if (message._getType() === 'tool') {
174
- role = 'user';
175
- }
176
- else if (message._getType() === 'system') {
177
- throw new Error('System messages are only permitted as the first passed message.');
178
- }
179
- else {
180
- throw new Error(`Message type "${message._getType()}" is not supported.`);
181
- }
182
- if (messages.isAIMessage(message) && !!message.tool_calls?.length) {
183
- if (typeof message.content === 'string') {
184
- if (message.content === '') {
185
- return {
186
- role,
187
- content: message.tool_calls.map(_convertLangChainToolCallToAnthropic),
188
- };
189
- }
190
- else {
191
- return {
192
- role,
193
- content: [
194
- { type: 'text', text: message.content },
195
- ...message.tool_calls.map(_convertLangChainToolCallToAnthropic),
196
- ],
197
- };
198
- }
199
- }
200
- else {
201
- const { content } = message;
202
- const hasMismatchedToolCalls = !message.tool_calls.every((toolCall) => content.find((contentPart) => (contentPart.type === 'tool_use' ||
203
- contentPart.type === 'input_json_delta') &&
204
- contentPart.id === toolCall.id));
205
- if (hasMismatchedToolCalls) {
206
- console.warn('The "tool_calls" field on a message is only respected if content is a string.');
207
- }
208
- return {
209
- role,
210
- content: _formatContent(message.content),
211
- };
212
- }
213
- }
214
- else {
215
- return {
216
- role,
217
- content: _formatContent(message.content),
218
- };
219
- }
220
- });
221
- return {
222
- messages: formattedMessages,
223
- system,
224
- };
225
- }
226
-
227
- exports._convertLangChainToolCallToAnthropic = _convertLangChainToolCallToAnthropic;
228
- exports._convertMessagesToAnthropicPayload = _convertMessagesToAnthropicPayload;
229
- //# sourceMappingURL=message_inputs.cjs.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"message_inputs.cjs","sources":["../../../../../src/llm/anthropic/utils/message_inputs.ts"],"sourcesContent":["/**\n * This util file contains functions for converting LangChain messages to Anthropic messages.\n */\nimport {\n BaseMessage,\n SystemMessage,\n HumanMessage,\n AIMessage,\n ToolMessage,\n MessageContent,\n isAIMessage,\n} from '@langchain/core/messages';\nimport { ToolCall } from '@langchain/core/messages/tool';\nimport type {\n AnthropicMessageCreateParams,\n AnthropicToolResponse,\n} from '@/llm/anthropic/types';\n\nfunction _formatImage(imageUrl: string): { type: string; media_type: string; data: string } {\n const regex = /^data:(image\\/.+);base64,(.+)$/;\n const match = imageUrl.match(regex);\n if (match === null) {\n throw new Error(\n [\n 'Anthropic only supports base64-encoded images currently.',\n 'Example: data:image/png;base64,/9j/4AAQSk...',\n ].join('\\n\\n')\n );\n }\n return {\n type: 'base64',\n media_type: match[1] ?? '',\n data: match[2] ?? '',\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n } as any;\n}\n\nfunction _ensureMessageContents(\n messages: BaseMessage[]\n): (SystemMessage | HumanMessage | AIMessage)[] {\n // Merge runs of human/tool messages into single human messages with content blocks.\n const updatedMsgs = [];\n for (const message of messages) {\n if (message._getType() === 'tool') {\n if (typeof message.content === 'string') {\n const previousMessage = updatedMsgs[updatedMsgs.length - 1] as BaseMessage | undefined;\n if (\n previousMessage &&\n previousMessage._getType() === 'human' &&\n Array.isArray(previousMessage.content) &&\n 'type' in previousMessage.content[0] &&\n previousMessage.content[0].type === 'tool_result'\n ) {\n // If the previous message was a tool result, we merge this tool message into it.\n previousMessage.content.push({\n type: 'tool_result',\n content: message.content,\n tool_use_id: (message as ToolMessage).tool_call_id,\n });\n } else {\n // If not, we create a new human message with the tool result.\n updatedMsgs.push(\n new HumanMessage({\n content: [\n {\n type: 'tool_result',\n content: message.content,\n tool_use_id: (message as ToolMessage).tool_call_id,\n },\n ],\n })\n );\n }\n } else {\n updatedMsgs.push(\n new HumanMessage({\n content: [\n {\n type: 'tool_result',\n content: _formatContent(message.content),\n tool_use_id: (message as ToolMessage).tool_call_id,\n },\n ],\n })\n );\n }\n } else {\n updatedMsgs.push(message);\n }\n }\n return updatedMsgs;\n}\n\nexport function _convertLangChainToolCallToAnthropic(\n toolCall: ToolCall\n): AnthropicToolResponse {\n if (toolCall.id === undefined) {\n throw new Error('Anthropic requires all tool calls to have an \"id\".');\n }\n return {\n type: 'tool_use',\n id: toolCall.id,\n name: toolCall.name,\n input: toolCall.args,\n };\n}\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction _formatContent(content: MessageContent): string | Record<string, any>[] {\n const toolTypes = ['tool_use', 'tool_result', 'input_json_delta'];\n const textTypes = ['text', 'text_delta'];\n\n if (typeof content === 'string') {\n return content;\n } else {\n const contentBlocks = content.map((contentPart) => {\n const cacheControl =\n 'cache_control' in contentPart ? contentPart.cache_control : undefined;\n\n if (contentPart.type === 'image_url') {\n let source;\n if (typeof contentPart.image_url === 'string') {\n source = _formatImage(contentPart.image_url);\n } else {\n source = _formatImage(contentPart.image_url.url);\n }\n return {\n type: 'image' as const, // Explicitly setting the type as \"image\"\n source,\n ...(cacheControl ? { cache_control: cacheControl } : {}),\n };\n } else if (\n textTypes.find((t) => t === contentPart.type) &&\n 'text' in contentPart\n ) {\n // Assuming contentPart is of type MessageContentText here\n return {\n type: 'text' as const, // Explicitly setting the type as \"text\"\n text: contentPart.text,\n ...(cacheControl ? { cache_control: cacheControl } : {}),\n };\n } else if (toolTypes.find((t) => t === contentPart.type)) {\n const contentPartCopy = { ...contentPart };\n if ('index' in contentPartCopy) {\n // Anthropic does not support passing the index field here, so we remove it.\n delete contentPartCopy.index;\n }\n\n if (contentPartCopy.type === 'input_json_delta') {\n // `input_json_delta` type only represents yielding partial tool inputs\n // and is not a valid type for Anthropic messages.\n contentPartCopy.type = 'tool_use';\n }\n\n if ('input' in contentPartCopy) {\n // Anthropic tool use inputs should be valid objects, when applicable.\n try {\n contentPartCopy.input = JSON.parse(contentPartCopy.input);\n } catch {\n // no-op\n }\n }\n\n // TODO: Fix when SDK types are fixed\n return {\n ...contentPartCopy,\n ...(cacheControl ? { cache_control: cacheControl } : {}),\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n } as any;\n } else {\n throw new Error('Unsupported message content format');\n }\n });\n return contentBlocks;\n }\n}\n\n/**\n * Formats messages as a prompt for the model.\n * Used in LangSmith, export is important here.\n * @param messages The base messages to format as a prompt.\n * @returns The formatted prompt.\n */\nexport function _convertMessagesToAnthropicPayload(\n messages: BaseMessage[]\n): AnthropicMessageCreateParams {\n const mergedMessages = _ensureMessageContents(messages);\n let system;\n if (mergedMessages.length > 0 && mergedMessages[0]._getType() === 'system') {\n system = messages[0].content;\n }\n const conversationMessages =\n system !== undefined ? mergedMessages.slice(1) : mergedMessages;\n const formattedMessages = conversationMessages.map((message) => {\n let role;\n if (message._getType() === 'human') {\n role = 'user' as const;\n } else if (message._getType() === 'ai') {\n role = 'assistant' as const;\n } else if (message._getType() === 'tool') {\n role = 'user' as const;\n } else if (message._getType() === 'system') {\n throw new Error(\n 'System messages are only permitted as the first passed message.'\n );\n } else {\n throw new Error(`Message type \"${message._getType()}\" is not supported.`);\n }\n if (isAIMessage(message) && !!message.tool_calls?.length) {\n if (typeof message.content === 'string') {\n if (message.content === '') {\n return {\n role,\n content: message.tool_calls.map(\n _convertLangChainToolCallToAnthropic\n ),\n };\n } else {\n return {\n role,\n content: [\n { type: 'text', text: message.content },\n ...message.tool_calls.map(_convertLangChainToolCallToAnthropic),\n ],\n };\n }\n } else {\n const { content } = message;\n const hasMismatchedToolCalls = !message.tool_calls.every((toolCall) =>\n content.find(\n (contentPart) =>\n (contentPart.type === 'tool_use' ||\n contentPart.type === 'input_json_delta') &&\n contentPart.id === toolCall.id\n )\n );\n if (hasMismatchedToolCalls) {\n console.warn(\n 'The \"tool_calls\" field on a message is only respected if content is a string.'\n );\n }\n return {\n role,\n content: _formatContent(message.content),\n };\n }\n } else {\n return {\n role,\n content: _formatContent(message.content),\n };\n }\n });\n return {\n messages: formattedMessages,\n system,\n } as AnthropicMessageCreateParams;\n}"],"names":["messages","HumanMessage","isAIMessage"],"mappings":";;;;AAAA;;AAEG;AAgBH,SAAS,YAAY,CAAC,QAAgB,EAAA;IACpC,MAAM,KAAK,GAAG,gCAAgC,CAAC;IAC/C,MAAM,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;AACpC,IAAA,IAAI,KAAK,KAAK,IAAI,EAAE;QAClB,MAAM,IAAI,KAAK,CACb;YACE,0DAA0D;YAC1D,8CAA8C;AAC/C,SAAA,CAAC,IAAI,CAAC,MAAM,CAAC,CACf,CAAC;KACH;IACD,OAAO;AACL,QAAA,IAAI,EAAE,QAAQ;AACd,QAAA,UAAU,EAAE,KAAK,CAAC,CAAC,CAAC,IAAI,EAAE;AAC1B,QAAA,IAAI,EAAE,KAAK,CAAC,CAAC,CAAC,IAAI,EAAE;;KAEd,CAAC;AACX,CAAC;AAED,SAAS,sBAAsB,CAC7BA,UAAuB,EAAA;;IAGvB,MAAM,WAAW,GAAG,EAAE,CAAC;AACvB,IAAA,KAAK,MAAM,OAAO,IAAIA,UAAQ,EAAE;AAC9B,QAAA,IAAI,OAAO,CAAC,QAAQ,EAAE,KAAK,MAAM,EAAE;AACjC,YAAA,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,EAAE;gBACvC,MAAM,eAAe,GAAG,WAAW,CAAC,WAAW,CAAC,MAAM,GAAG,CAAC,CAA4B,CAAC;AACvF,gBAAA,IACE,eAAe;AACf,oBAAA,eAAe,CAAC,QAAQ,EAAE,KAAK,OAAO;AACtC,oBAAA,KAAK,CAAC,OAAO,CAAC,eAAe,CAAC,OAAO,CAAC;AACtC,oBAAA,MAAM,IAAI,eAAe,CAAC,OAAO,CAAC,CAAC,CAAC;oBACpC,eAAe,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,aAAa,EACjD;;AAEA,oBAAA,eAAe,CAAC,OAAO,CAAC,IAAI,CAAC;AAC3B,wBAAA,IAAI,EAAE,aAAa;wBACnB,OAAO,EAAE,OAAO,CAAC,OAAO;wBACxB,WAAW,EAAG,OAAuB,CAAC,YAAY;AACnD,qBAAA,CAAC,CAAC;iBACJ;qBAAM;;AAEL,oBAAA,WAAW,CAAC,IAAI,CACd,IAAIC,qBAAY,CAAC;AACf,wBAAA,OAAO,EAAE;AACP,4BAAA;AACE,gCAAA,IAAI,EAAE,aAAa;gCACnB,OAAO,EAAE,OAAO,CAAC,OAAO;gCACxB,WAAW,EAAG,OAAuB,CAAC,YAAY;AACnD,6BAAA;AACF,yBAAA;AACF,qBAAA,CAAC,CACH,CAAC;iBACH;aACF;iBAAM;AACL,gBAAA,WAAW,CAAC,IAAI,CACd,IAAIA,qBAAY,CAAC;AACf,oBAAA,OAAO,EAAE;AACP,wBAAA;AACE,4BAAA,IAAI,EAAE,aAAa;AACnB,4BAAA,OAAO,EAAE,cAAc,CAAC,OAAO,CAAC,OAAO,CAAC;4BACxC,WAAW,EAAG,OAAuB,CAAC,YAAY;AACnD,yBAAA;AACF,qBAAA;AACF,iBAAA,CAAC,CACH,CAAC;aACH;SACF;aAAM;AACL,YAAA,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;SAC3B;KACF;AACD,IAAA,OAAO,WAAW,CAAC;AACrB,CAAC;AAEK,SAAU,oCAAoC,CAClD,QAAkB,EAAA;AAElB,IAAA,IAAI,QAAQ,CAAC,EAAE,KAAK,SAAS,EAAE;AAC7B,QAAA,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;KACvE;IACD,OAAO;AACL,QAAA,IAAI,EAAE,UAAU;QAChB,EAAE,EAAE,QAAQ,CAAC,EAAE;QACf,IAAI,EAAE,QAAQ,CAAC,IAAI;QACnB,KAAK,EAAE,QAAQ,CAAC,IAAI;KACrB,CAAC;AACJ,CAAC;AAED;AACA,SAAS,cAAc,CAAC,OAAuB,EAAA;IAC7C,MAAM,SAAS,GAAG,CAAC,UAAU,EAAE,aAAa,EAAE,kBAAkB,CAAC,CAAC;AAClE,IAAA,MAAM,SAAS,GAAG,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;AAEzC,IAAA,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;AAC/B,QAAA,OAAO,OAAO,CAAC;KAChB;SAAM;QACL,MAAM,aAAa,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,WAAW,KAAI;AAChD,YAAA,MAAM,YAAY,GAChB,eAAe,IAAI,WAAW,GAAG,WAAW,CAAC,aAAa,GAAG,SAAS,CAAC;AAEzE,YAAA,IAAI,WAAW,CAAC,IAAI,KAAK,WAAW,EAAE;AACpC,gBAAA,IAAI,MAAM,CAAC;AACX,gBAAA,IAAI,OAAO,WAAW,CAAC,SAAS,KAAK,QAAQ,EAAE;AAC7C,oBAAA,MAAM,GAAG,YAAY,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;iBAC9C;qBAAM;oBACL,MAAM,GAAG,YAAY,CAAC,WAAW,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;iBAClD;gBACD,OAAO;oBACL,IAAI,EAAE,OAAgB;oBACtB,MAAM;AACN,oBAAA,IAAI,YAAY,GAAG,EAAE,aAAa,EAAE,YAAY,EAAE,GAAG,EAAE,CAAC;iBACzD,CAAC;aACH;AAAM,iBAAA,IACL,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,WAAW,CAAC,IAAI,CAAC;gBAC7C,MAAM,IAAI,WAAW,EACrB;;gBAEA,OAAO;oBACL,IAAI,EAAE,MAAe;oBACrB,IAAI,EAAE,WAAW,CAAC,IAAI;AACtB,oBAAA,IAAI,YAAY,GAAG,EAAE,aAAa,EAAE,YAAY,EAAE,GAAG,EAAE,CAAC;iBACzD,CAAC;aACH;AAAM,iBAAA,IAAI,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,WAAW,CAAC,IAAI,CAAC,EAAE;AACxD,gBAAA,MAAM,eAAe,GAAG,EAAE,GAAG,WAAW,EAAE,CAAC;AAC3C,gBAAA,IAAI,OAAO,IAAI,eAAe,EAAE;;oBAE9B,OAAO,eAAe,CAAC,KAAK,CAAC;iBAC9B;AAED,gBAAA,IAAI,eAAe,CAAC,IAAI,KAAK,kBAAkB,EAAE;;;AAG/C,oBAAA,eAAe,CAAC,IAAI,GAAG,UAAU,CAAC;iBACnC;AAED,gBAAA,IAAI,OAAO,IAAI,eAAe,EAAE;;AAE9B,oBAAA,IAAI;wBACF,eAAe,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,CAAC,CAAC;qBAC3D;AAAC,oBAAA,MAAM;;qBAEP;iBACF;;gBAGD,OAAO;AACL,oBAAA,GAAG,eAAe;AAClB,oBAAA,IAAI,YAAY,GAAG,EAAE,aAAa,EAAE,YAAY,EAAE,GAAG,EAAE,CAAC;;iBAElD,CAAC;aACV;iBAAM;AACL,gBAAA,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAC;aACvD;AACH,SAAC,CAAC,CAAC;AACH,QAAA,OAAO,aAAa,CAAC;KACtB;AACH,CAAC;AAED;;;;;AAKG;AACG,SAAU,kCAAkC,CAChDD,UAAuB,EAAA;AAEvB,IAAA,MAAM,cAAc,GAAG,sBAAsB,CAACA,UAAQ,CAAC,CAAC;AACxD,IAAA,IAAI,MAAM,CAAC;AACX,IAAA,IAAI,cAAc,CAAC,MAAM,GAAG,CAAC,IAAI,cAAc,CAAC,CAAC,CAAC,CAAC,QAAQ,EAAE,KAAK,QAAQ,EAAE;AAC1E,QAAA,MAAM,GAAGA,UAAQ,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC;KAC9B;AACD,IAAA,MAAM,oBAAoB,GACxB,MAAM,KAAK,SAAS,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,cAAc,CAAC;IAClE,MAAM,iBAAiB,GAAG,oBAAoB,CAAC,GAAG,CAAC,CAAC,OAAO,KAAI;AAC7D,QAAA,IAAI,IAAI,CAAC;AACT,QAAA,IAAI,OAAO,CAAC,QAAQ,EAAE,KAAK,OAAO,EAAE;YAClC,IAAI,GAAG,MAAe,CAAC;SACxB;AAAM,aAAA,IAAI,OAAO,CAAC,QAAQ,EAAE,KAAK,IAAI,EAAE;YACtC,IAAI,GAAG,WAAoB,CAAC;SAC7B;AAAM,aAAA,IAAI,OAAO,CAAC,QAAQ,EAAE,KAAK,MAAM,EAAE;YACxC,IAAI,GAAG,MAAe,CAAC;SACxB;AAAM,aAAA,IAAI,OAAO,CAAC,QAAQ,EAAE,KAAK,QAAQ,EAAE;AAC1C,YAAA,MAAM,IAAI,KAAK,CACb,iEAAiE,CAClE,CAAC;SACH;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,CAAiB,cAAA,EAAA,OAAO,CAAC,QAAQ,EAAE,CAAqB,mBAAA,CAAA,CAAC,CAAC;SAC3E;AACD,QAAA,IAAIE,oBAAW,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,UAAU,EAAE,MAAM,EAAE;AACxD,YAAA,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,EAAE;AACvC,gBAAA,IAAI,OAAO,CAAC,OAAO,KAAK,EAAE,EAAE;oBAC1B,OAAO;wBACL,IAAI;wBACJ,OAAO,EAAE,OAAO,CAAC,UAAU,CAAC,GAAG,CAC7B,oCAAoC,CACrC;qBACF,CAAC;iBACH;qBAAM;oBACL,OAAO;wBACL,IAAI;AACJ,wBAAA,OAAO,EAAE;4BACP,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,CAAC,OAAO,EAAE;AACvC,4BAAA,GAAG,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,oCAAoC,CAAC;AAChE,yBAAA;qBACF,CAAC;iBACH;aACF;iBAAM;AACL,gBAAA,MAAM,EAAE,OAAO,EAAE,GAAG,OAAO,CAAC;AAC5B,gBAAA,MAAM,sBAAsB,GAAG,CAAC,OAAO,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,QAAQ,KAChE,OAAO,CAAC,IAAI,CACV,CAAC,WAAW,KACV,CAAC,WAAW,CAAC,IAAI,KAAK,UAAU;AAC9B,oBAAA,WAAW,CAAC,IAAI,KAAK,kBAAkB;oBACzC,WAAW,CAAC,EAAE,KAAK,QAAQ,CAAC,EAAE,CACjC,CACF,CAAC;gBACF,IAAI,sBAAsB,EAAE;AAC1B,oBAAA,OAAO,CAAC,IAAI,CACV,+EAA+E,CAChF,CAAC;iBACH;gBACD,OAAO;oBACL,IAAI;AACJ,oBAAA,OAAO,EAAE,cAAc,CAAC,OAAO,CAAC,OAAO,CAAC;iBACzC,CAAC;aACH;SACF;aAAM;YACL,OAAO;gBACL,IAAI;AACJ,gBAAA,OAAO,EAAE,cAAc,CAAC,OAAO,CAAC,OAAO,CAAC;aACzC,CAAC;SACH;AACH,KAAC,CAAC,CAAC;IACH,OAAO;AACL,QAAA,QAAQ,EAAE,iBAAiB;QAC3B,MAAM;KACyB,CAAC;AACpC;;;;;"}
@@ -1,135 +0,0 @@
1
- 'use strict';
2
-
3
- var messages = require('@langchain/core/messages');
4
-
5
- function _makeMessageChunkFromAnthropicEvent(data, fields) {
6
- if (data.type === 'message_start') {
7
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
8
- const { content, usage, ...additionalKwargs } = data.message;
9
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
10
- const filteredAdditionalKwargs = {};
11
- for (const [key, value] of Object.entries(additionalKwargs)) {
12
- if (value !== undefined && value !== null) {
13
- filteredAdditionalKwargs[key] = value;
14
- }
15
- }
16
- const usageMetadata = {
17
- input_tokens: usage.input_tokens,
18
- output_tokens: usage.output_tokens,
19
- total_tokens: usage.input_tokens + usage.output_tokens,
20
- };
21
- return {
22
- chunk: new messages.AIMessageChunk({
23
- content: fields.coerceContentToString ? '' : [],
24
- additional_kwargs: filteredAdditionalKwargs,
25
- usage_metadata: fields.streamUsage ? usageMetadata : undefined,
26
- id: data.message.id,
27
- }),
28
- };
29
- }
30
- else if (data.type === 'message_delta') {
31
- const usageMetadata = {
32
- input_tokens: 0,
33
- output_tokens: data.usage.output_tokens,
34
- total_tokens: data.usage.output_tokens,
35
- };
36
- return {
37
- chunk: new messages.AIMessageChunk({
38
- content: fields.coerceContentToString ? '' : [],
39
- additional_kwargs: { ...data.delta },
40
- usage_metadata: fields.streamUsage ? usageMetadata : undefined,
41
- }),
42
- };
43
- }
44
- else if (data.type === 'content_block_start' &&
45
- data.content_block.type === 'tool_use') {
46
- const toolCallContentBlock = data.content_block;
47
- return {
48
- chunk: new messages.AIMessageChunk({
49
- content: fields.coerceContentToString
50
- ? ''
51
- : [
52
- {
53
- index: data.index,
54
- ...data.content_block,
55
- input: '',
56
- },
57
- ],
58
- additional_kwargs: {},
59
- tool_call_chunks: [
60
- {
61
- id: toolCallContentBlock.id,
62
- index: data.index,
63
- name: toolCallContentBlock.name,
64
- args: '',
65
- },
66
- ],
67
- }),
68
- };
69
- }
70
- else if (data.type === 'content_block_delta' &&
71
- data.delta.type === 'text_delta') {
72
- const content = data.delta.text;
73
- if (content !== undefined) {
74
- return {
75
- chunk: new messages.AIMessageChunk({
76
- content: fields.coerceContentToString
77
- ? content
78
- : [
79
- {
80
- index: data.index,
81
- ...data.delta,
82
- },
83
- ],
84
- additional_kwargs: {},
85
- }),
86
- };
87
- }
88
- }
89
- else if (data.type === 'content_block_delta' &&
90
- data.delta.type === 'input_json_delta') {
91
- return {
92
- chunk: new messages.AIMessageChunk({
93
- content: fields.coerceContentToString
94
- ? ''
95
- : [
96
- {
97
- index: data.index,
98
- input: data.delta.partial_json,
99
- type: data.delta.type,
100
- },
101
- ],
102
- additional_kwargs: {},
103
- tool_call_chunks: [
104
- {
105
- index: data.index,
106
- args: data.delta.partial_json,
107
- },
108
- ],
109
- }),
110
- };
111
- }
112
- else if (data.type === 'content_block_start' &&
113
- data.content_block.type === 'text') {
114
- const content = data.content_block.text;
115
- if (content !== undefined) {
116
- return {
117
- chunk: new messages.AIMessageChunk({
118
- content: fields.coerceContentToString
119
- ? content
120
- : [
121
- {
122
- index: data.index,
123
- ...data.content_block,
124
- },
125
- ],
126
- additional_kwargs: {},
127
- }),
128
- };
129
- }
130
- }
131
- return null;
132
- }
133
-
134
- exports._makeMessageChunkFromAnthropicEvent = _makeMessageChunkFromAnthropicEvent;
135
- //# sourceMappingURL=message_outputs.cjs.map