@librechat/agents 1.8.1 → 1.8.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/llm/providers.cjs +3 -2
- package/dist/cjs/llm/providers.cjs.map +1 -1
- package/dist/esm/llm/providers.mjs +3 -2
- package/dist/esm/llm/providers.mjs.map +1 -1
- package/package.json +3 -3
- package/src/llm/anthropic/utils/message_inputs.ts +8 -29
- package/src/llm/providers.ts +3 -3
- package/src/proto/collab_design_v5.ts +1 -1
- package/dist/cjs/llm/anthropic/llm.cjs +0 -117
- package/dist/cjs/llm/anthropic/llm.cjs.map +0 -1
- package/dist/cjs/llm/anthropic/utils/message_inputs.cjs +0 -251
- package/dist/cjs/llm/anthropic/utils/message_inputs.cjs.map +0 -1
- package/dist/cjs/llm/anthropic/utils/message_outputs.cjs +0 -135
- package/dist/cjs/llm/anthropic/utils/message_outputs.cjs.map +0 -1
- package/dist/cjs/llm/text.cjs +0 -73
- package/dist/cjs/llm/text.cjs.map +0 -1
- package/dist/esm/llm/anthropic/llm.mjs +0 -115
- package/dist/esm/llm/anthropic/llm.mjs.map +0 -1
- package/dist/esm/llm/anthropic/utils/message_inputs.mjs +0 -248
- package/dist/esm/llm/anthropic/utils/message_inputs.mjs.map +0 -1
- package/dist/esm/llm/anthropic/utils/message_outputs.mjs +0 -133
- package/dist/esm/llm/anthropic/utils/message_outputs.mjs.map +0 -1
- package/dist/esm/llm/text.mjs +0 -71
- package/dist/esm/llm/text.mjs.map +0 -1
|
@@ -3,13 +3,14 @@
|
|
|
3
3
|
var openai = require('@langchain/openai');
|
|
4
4
|
var ollama = require('@langchain/ollama');
|
|
5
5
|
var aws = require('@langchain/aws');
|
|
6
|
+
var anthropic = require('@langchain/anthropic');
|
|
6
7
|
var mistralai = require('@langchain/mistralai');
|
|
7
8
|
var googleVertexai = require('@langchain/google-vertexai');
|
|
8
9
|
var web = require('@langchain/community/chat_models/bedrock/web');
|
|
9
10
|
var _enum = require('../common/enum.cjs');
|
|
10
|
-
var llm = require('./anthropic/llm.cjs');
|
|
11
11
|
|
|
12
12
|
// src/llm/providers.ts
|
|
13
|
+
// import { CustomAnthropic } from '@/llm/anthropic/llm';
|
|
13
14
|
const llmProviders = {
|
|
14
15
|
[_enum.Providers.OPENAI]: openai.ChatOpenAI,
|
|
15
16
|
[_enum.Providers.OLLAMA]: ollama.ChatOllama,
|
|
@@ -17,8 +18,8 @@ const llmProviders = {
|
|
|
17
18
|
[_enum.Providers.BEDROCK_LEGACY]: web.BedrockChat,
|
|
18
19
|
[_enum.Providers.MISTRALAI]: mistralai.ChatMistralAI,
|
|
19
20
|
[_enum.Providers.BEDROCK]: aws.ChatBedrockConverse,
|
|
20
|
-
[_enum.Providers.ANTHROPIC]: llm.CustomAnthropic,
|
|
21
21
|
// [Providers.ANTHROPIC]: CustomAnthropic,
|
|
22
|
+
[_enum.Providers.ANTHROPIC]: anthropic.ChatAnthropic,
|
|
22
23
|
};
|
|
23
24
|
const manualToolStreamProviders = new Set([_enum.Providers.ANTHROPIC, _enum.Providers.BEDROCK, _enum.Providers.OLLAMA]);
|
|
24
25
|
const getChatModelClass = (provider) => {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"providers.cjs","sources":["../../../src/llm/providers.ts"],"sourcesContent":["// src/llm/providers.ts\nimport { ChatOpenAI } from '@langchain/openai';\nimport { ChatOllama } from '@langchain/ollama';\nimport { ChatBedrockConverse } from '@langchain/aws';\
|
|
1
|
+
{"version":3,"file":"providers.cjs","sources":["../../../src/llm/providers.ts"],"sourcesContent":["// src/llm/providers.ts\nimport { ChatOpenAI } from '@langchain/openai';\nimport { ChatOllama } from '@langchain/ollama';\nimport { ChatBedrockConverse } from '@langchain/aws';\nimport { ChatAnthropic } from '@langchain/anthropic';\nimport { ChatMistralAI } from '@langchain/mistralai';\nimport { ChatVertexAI } from '@langchain/google-vertexai';\nimport { BedrockChat } from '@langchain/community/chat_models/bedrock/web';\nimport type { ChatModelConstructorMap, ProviderOptionsMap, ChatModelMap } from '@/types';\nimport { Providers } from '@/common';\n// import { CustomAnthropic } from '@/llm/anthropic/llm';\n\nexport const llmProviders: Partial<ChatModelConstructorMap> = {\n [Providers.OPENAI]: ChatOpenAI,\n [Providers.OLLAMA]: ChatOllama,\n [Providers.VERTEXAI]: ChatVertexAI,\n [Providers.BEDROCK_LEGACY]: BedrockChat,\n [Providers.MISTRALAI]: ChatMistralAI,\n [Providers.BEDROCK]: ChatBedrockConverse,\n // [Providers.ANTHROPIC]: CustomAnthropic,\n [Providers.ANTHROPIC]: ChatAnthropic,\n};\n\nexport const manualToolStreamProviders = new Set<Providers | string>([Providers.ANTHROPIC, Providers.BEDROCK, Providers.OLLAMA]);\n\nexport const getChatModelClass = <P extends Providers>(\n provider: P\n): new (config: ProviderOptionsMap[P]) => ChatModelMap[P] => {\n const ChatModelClass = llmProviders[provider];\n if (!ChatModelClass) {\n throw new Error(`Unsupported LLM provider: ${provider}`);\n }\n\n return ChatModelClass;\n};"],"names":["Providers","ChatOpenAI","ChatOllama","ChatVertexAI","BedrockChat","ChatMistralAI","ChatBedrockConverse","ChatAnthropic"],"mappings":";;;;;;;;;;;AAAA;AAUA;AAEa,MAAA,YAAY,GAAqC;AAC5D,IAAA,CAACA,eAAS,CAAC,MAAM,GAAGC,iBAAU;AAC9B,IAAA,CAACD,eAAS,CAAC,MAAM,GAAGE,iBAAU;AAC9B,IAAA,CAACF,eAAS,CAAC,QAAQ,GAAGG,2BAAY;AAClC,IAAA,CAACH,eAAS,CAAC,cAAc,GAAGI,eAAW;AACvC,IAAA,CAACJ,eAAS,CAAC,SAAS,GAAGK,uBAAa;AACpC,IAAA,CAACL,eAAS,CAAC,OAAO,GAAGM,uBAAmB;;AAExC,IAAA,CAACN,eAAS,CAAC,SAAS,GAAGO,uBAAa;EACpC;MAEW,yBAAyB,GAAG,IAAI,GAAG,CAAqB,CAACP,eAAS,CAAC,SAAS,EAAEA,eAAS,CAAC,OAAO,EAAEA,eAAS,CAAC,MAAM,CAAC,EAAE;AAEpH,MAAA,iBAAiB,GAAG,CAC/B,QAAW,KAC+C;AAC1D,IAAA,MAAM,cAAc,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;IAC9C,IAAI,CAAC,cAAc,EAAE;AACnB,QAAA,MAAM,IAAI,KAAK,CAAC,6BAA6B,QAAQ,CAAA,CAAE,CAAC,CAAC;KAC1D;AAED,IAAA,OAAO,cAAc,CAAC;AACxB;;;;;;"}
|
|
@@ -1,13 +1,14 @@
|
|
|
1
1
|
import { ChatOpenAI } from '@langchain/openai';
|
|
2
2
|
import { ChatOllama } from '@langchain/ollama';
|
|
3
3
|
import { ChatBedrockConverse } from '@langchain/aws';
|
|
4
|
+
import { ChatAnthropic } from '@langchain/anthropic';
|
|
4
5
|
import { ChatMistralAI } from '@langchain/mistralai';
|
|
5
6
|
import { ChatVertexAI } from '@langchain/google-vertexai';
|
|
6
7
|
import { BedrockChat } from '@langchain/community/chat_models/bedrock/web';
|
|
7
8
|
import { Providers } from '../common/enum.mjs';
|
|
8
|
-
import { CustomAnthropic } from './anthropic/llm.mjs';
|
|
9
9
|
|
|
10
10
|
// src/llm/providers.ts
|
|
11
|
+
// import { CustomAnthropic } from '@/llm/anthropic/llm';
|
|
11
12
|
const llmProviders = {
|
|
12
13
|
[Providers.OPENAI]: ChatOpenAI,
|
|
13
14
|
[Providers.OLLAMA]: ChatOllama,
|
|
@@ -15,8 +16,8 @@ const llmProviders = {
|
|
|
15
16
|
[Providers.BEDROCK_LEGACY]: BedrockChat,
|
|
16
17
|
[Providers.MISTRALAI]: ChatMistralAI,
|
|
17
18
|
[Providers.BEDROCK]: ChatBedrockConverse,
|
|
18
|
-
[Providers.ANTHROPIC]: CustomAnthropic,
|
|
19
19
|
// [Providers.ANTHROPIC]: CustomAnthropic,
|
|
20
|
+
[Providers.ANTHROPIC]: ChatAnthropic,
|
|
20
21
|
};
|
|
21
22
|
const manualToolStreamProviders = new Set([Providers.ANTHROPIC, Providers.BEDROCK, Providers.OLLAMA]);
|
|
22
23
|
const getChatModelClass = (provider) => {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"providers.mjs","sources":["../../../src/llm/providers.ts"],"sourcesContent":["// src/llm/providers.ts\nimport { ChatOpenAI } from '@langchain/openai';\nimport { ChatOllama } from '@langchain/ollama';\nimport { ChatBedrockConverse } from '@langchain/aws';\
|
|
1
|
+
{"version":3,"file":"providers.mjs","sources":["../../../src/llm/providers.ts"],"sourcesContent":["// src/llm/providers.ts\nimport { ChatOpenAI } from '@langchain/openai';\nimport { ChatOllama } from '@langchain/ollama';\nimport { ChatBedrockConverse } from '@langchain/aws';\nimport { ChatAnthropic } from '@langchain/anthropic';\nimport { ChatMistralAI } from '@langchain/mistralai';\nimport { ChatVertexAI } from '@langchain/google-vertexai';\nimport { BedrockChat } from '@langchain/community/chat_models/bedrock/web';\nimport type { ChatModelConstructorMap, ProviderOptionsMap, ChatModelMap } from '@/types';\nimport { Providers } from '@/common';\n// import { CustomAnthropic } from '@/llm/anthropic/llm';\n\nexport const llmProviders: Partial<ChatModelConstructorMap> = {\n [Providers.OPENAI]: ChatOpenAI,\n [Providers.OLLAMA]: ChatOllama,\n [Providers.VERTEXAI]: ChatVertexAI,\n [Providers.BEDROCK_LEGACY]: BedrockChat,\n [Providers.MISTRALAI]: ChatMistralAI,\n [Providers.BEDROCK]: ChatBedrockConverse,\n // [Providers.ANTHROPIC]: CustomAnthropic,\n [Providers.ANTHROPIC]: ChatAnthropic,\n};\n\nexport const manualToolStreamProviders = new Set<Providers | string>([Providers.ANTHROPIC, Providers.BEDROCK, Providers.OLLAMA]);\n\nexport const getChatModelClass = <P extends Providers>(\n provider: P\n): new (config: ProviderOptionsMap[P]) => ChatModelMap[P] => {\n const ChatModelClass = llmProviders[provider];\n if (!ChatModelClass) {\n throw new Error(`Unsupported LLM provider: ${provider}`);\n }\n\n return ChatModelClass;\n};"],"names":[],"mappings":";;;;;;;;;AAAA;AAUA;AAEa,MAAA,YAAY,GAAqC;AAC5D,IAAA,CAAC,SAAS,CAAC,MAAM,GAAG,UAAU;AAC9B,IAAA,CAAC,SAAS,CAAC,MAAM,GAAG,UAAU;AAC9B,IAAA,CAAC,SAAS,CAAC,QAAQ,GAAG,YAAY;AAClC,IAAA,CAAC,SAAS,CAAC,cAAc,GAAG,WAAW;AACvC,IAAA,CAAC,SAAS,CAAC,SAAS,GAAG,aAAa;AACpC,IAAA,CAAC,SAAS,CAAC,OAAO,GAAG,mBAAmB;;AAExC,IAAA,CAAC,SAAS,CAAC,SAAS,GAAG,aAAa;EACpC;MAEW,yBAAyB,GAAG,IAAI,GAAG,CAAqB,CAAC,SAAS,CAAC,SAAS,EAAE,SAAS,CAAC,OAAO,EAAE,SAAS,CAAC,MAAM,CAAC,EAAE;AAEpH,MAAA,iBAAiB,GAAG,CAC/B,QAAW,KAC+C;AAC1D,IAAA,MAAM,cAAc,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;IAC9C,IAAI,CAAC,cAAc,EAAE;AACnB,QAAA,MAAM,IAAI,KAAK,CAAC,6BAA6B,QAAQ,CAAA,CAAE,CAAC,CAAC;KAC1D;AAED,IAAA,OAAO,cAAc,CAAC;AACxB;;;;"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@librechat/agents",
|
|
3
|
-
"version": "1.8.
|
|
3
|
+
"version": "1.8.3",
|
|
4
4
|
"main": "./dist/cjs/main.cjs",
|
|
5
5
|
"module": "./dist/esm/main.mjs",
|
|
6
6
|
"types": "./dist/types/index.d.ts",
|
|
@@ -42,7 +42,7 @@
|
|
|
42
42
|
"start:cli": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/cli.ts --provider 'openAI' --name 'Jo' --location 'New York, NY'",
|
|
43
43
|
"content": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/content.ts --provider 'anthropic' --name 'Jo' --location 'New York, NY'",
|
|
44
44
|
"stream": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/stream.ts --provider 'anthropic' --name 'Jo' --location 'New York, NY'",
|
|
45
|
-
"code_exec": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/code_exec.ts --provider '
|
|
45
|
+
"code_exec": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/code_exec.ts --provider 'anthropic' --name 'Jo' --location 'New York, NY'",
|
|
46
46
|
"code_exec_simple": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/code_exec_simple.ts --provider 'anthropic' --name 'Jo' --location 'New York, NY'",
|
|
47
47
|
"simple": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/simple.ts --provider 'openAI' --name 'Jo' --location 'New York, NY'",
|
|
48
48
|
"memory": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/memory.ts --provider 'openAI' --name 'Jo' --location 'New York, NY'",
|
|
@@ -69,7 +69,7 @@
|
|
|
69
69
|
"@aws-crypto/sha256-js": "^5.2.0",
|
|
70
70
|
"@aws-sdk/credential-provider-node": "^3.613.0",
|
|
71
71
|
"@aws-sdk/types": "^3.609.0",
|
|
72
|
-
"@langchain/anthropic": "^0.3.
|
|
72
|
+
"@langchain/anthropic": "^0.3.8",
|
|
73
73
|
"@langchain/aws": "^0.1.1",
|
|
74
74
|
"@langchain/community": "^0.3.14",
|
|
75
75
|
"@langchain/core": "^0.3.18",
|
|
@@ -35,15 +35,15 @@ function _formatImage(imageUrl: string): { type: string; media_type: string; dat
|
|
|
35
35
|
} as any;
|
|
36
36
|
}
|
|
37
37
|
|
|
38
|
-
function
|
|
38
|
+
function _ensureMessageContents(
|
|
39
39
|
messages: BaseMessage[]
|
|
40
40
|
): (SystemMessage | HumanMessage | AIMessage)[] {
|
|
41
41
|
// Merge runs of human/tool messages into single human messages with content blocks.
|
|
42
|
-
const
|
|
42
|
+
const updatedMsgs = [];
|
|
43
43
|
for (const message of messages) {
|
|
44
44
|
if (message._getType() === 'tool') {
|
|
45
45
|
if (typeof message.content === 'string') {
|
|
46
|
-
const previousMessage =
|
|
46
|
+
const previousMessage = updatedMsgs[updatedMsgs.length - 1] as BaseMessage | undefined;
|
|
47
47
|
if (
|
|
48
48
|
previousMessage &&
|
|
49
49
|
previousMessage._getType() === 'human' &&
|
|
@@ -59,7 +59,7 @@ function _mergeMessages(
|
|
|
59
59
|
});
|
|
60
60
|
} else {
|
|
61
61
|
// If not, we create a new human message with the tool result.
|
|
62
|
-
|
|
62
|
+
updatedMsgs.push(
|
|
63
63
|
new HumanMessage({
|
|
64
64
|
content: [
|
|
65
65
|
{
|
|
@@ -72,7 +72,7 @@ function _mergeMessages(
|
|
|
72
72
|
);
|
|
73
73
|
}
|
|
74
74
|
} else {
|
|
75
|
-
|
|
75
|
+
updatedMsgs.push(
|
|
76
76
|
new HumanMessage({
|
|
77
77
|
content: [
|
|
78
78
|
{
|
|
@@ -85,31 +85,10 @@ function _mergeMessages(
|
|
|
85
85
|
);
|
|
86
86
|
}
|
|
87
87
|
} else {
|
|
88
|
-
|
|
89
|
-
if (
|
|
90
|
-
previousMessage &&
|
|
91
|
-
previousMessage._getType() === 'human' &&
|
|
92
|
-
message._getType() === 'human'
|
|
93
|
-
) {
|
|
94
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
95
|
-
let combinedContent: Record<string, any>[];
|
|
96
|
-
if (typeof previousMessage.content === 'string') {
|
|
97
|
-
combinedContent = [{ type: 'text', text: previousMessage.content }];
|
|
98
|
-
} else {
|
|
99
|
-
combinedContent = previousMessage.content;
|
|
100
|
-
}
|
|
101
|
-
if (typeof message.content === 'string') {
|
|
102
|
-
combinedContent.push({ type: 'text', text: message.content });
|
|
103
|
-
} else {
|
|
104
|
-
combinedContent = combinedContent.concat(message.content);
|
|
105
|
-
}
|
|
106
|
-
previousMessage.content = combinedContent;
|
|
107
|
-
} else {
|
|
108
|
-
merged.push(message);
|
|
109
|
-
}
|
|
88
|
+
updatedMsgs.push(message);
|
|
110
89
|
}
|
|
111
90
|
}
|
|
112
|
-
return
|
|
91
|
+
return updatedMsgs;
|
|
113
92
|
}
|
|
114
93
|
|
|
115
94
|
export function _convertLangChainToolCallToAnthropic(
|
|
@@ -205,7 +184,7 @@ function _formatContent(content: MessageContent): string | Record<string, any>[]
|
|
|
205
184
|
export function _convertMessagesToAnthropicPayload(
|
|
206
185
|
messages: BaseMessage[]
|
|
207
186
|
): AnthropicMessageCreateParams {
|
|
208
|
-
const mergedMessages =
|
|
187
|
+
const mergedMessages = _ensureMessageContents(messages);
|
|
209
188
|
let system;
|
|
210
189
|
if (mergedMessages.length > 0 && mergedMessages[0]._getType() === 'system') {
|
|
211
190
|
system = messages[0].content;
|
package/src/llm/providers.ts
CHANGED
|
@@ -2,13 +2,13 @@
|
|
|
2
2
|
import { ChatOpenAI } from '@langchain/openai';
|
|
3
3
|
import { ChatOllama } from '@langchain/ollama';
|
|
4
4
|
import { ChatBedrockConverse } from '@langchain/aws';
|
|
5
|
-
|
|
5
|
+
import { ChatAnthropic } from '@langchain/anthropic';
|
|
6
6
|
import { ChatMistralAI } from '@langchain/mistralai';
|
|
7
7
|
import { ChatVertexAI } from '@langchain/google-vertexai';
|
|
8
8
|
import { BedrockChat } from '@langchain/community/chat_models/bedrock/web';
|
|
9
9
|
import type { ChatModelConstructorMap, ProviderOptionsMap, ChatModelMap } from '@/types';
|
|
10
10
|
import { Providers } from '@/common';
|
|
11
|
-
import { CustomAnthropic } from '@/llm/anthropic/llm';
|
|
11
|
+
// import { CustomAnthropic } from '@/llm/anthropic/llm';
|
|
12
12
|
|
|
13
13
|
export const llmProviders: Partial<ChatModelConstructorMap> = {
|
|
14
14
|
[Providers.OPENAI]: ChatOpenAI,
|
|
@@ -17,8 +17,8 @@ export const llmProviders: Partial<ChatModelConstructorMap> = {
|
|
|
17
17
|
[Providers.BEDROCK_LEGACY]: BedrockChat,
|
|
18
18
|
[Providers.MISTRALAI]: ChatMistralAI,
|
|
19
19
|
[Providers.BEDROCK]: ChatBedrockConverse,
|
|
20
|
-
[Providers.ANTHROPIC]: CustomAnthropic,
|
|
21
20
|
// [Providers.ANTHROPIC]: CustomAnthropic,
|
|
21
|
+
[Providers.ANTHROPIC]: ChatAnthropic,
|
|
22
22
|
};
|
|
23
23
|
|
|
24
24
|
export const manualToolStreamProviders = new Set<Providers | string>([Providers.ANTHROPIC, Providers.BEDROCK, Providers.OLLAMA]);
|
|
@@ -77,7 +77,7 @@ export class CollaborativeProcessor {
|
|
|
77
77
|
}
|
|
78
78
|
|
|
79
79
|
private async createGraph(): Promise<t.CompiledWorkflow> {
|
|
80
|
-
const agentStateChannels: StateGraphArgs['channels'] = {
|
|
80
|
+
const agentStateChannels: StateGraphArgs<AgentStateChannels>['channels'] = {
|
|
81
81
|
messages: {
|
|
82
82
|
value: (x?: BaseMessage[], y?: BaseMessage[]) => (x ?? []).concat(y ?? []),
|
|
83
83
|
default: () => [],
|
|
@@ -1,117 +0,0 @@
|
|
|
1
|
-
'use strict';
|
|
2
|
-
|
|
3
|
-
var messages = require('@langchain/core/messages');
|
|
4
|
-
var anthropic = require('@langchain/anthropic');
|
|
5
|
-
var outputs = require('@langchain/core/outputs');
|
|
6
|
-
var message_outputs = require('./utils/message_outputs.cjs');
|
|
7
|
-
var message_inputs = require('./utils/message_inputs.cjs');
|
|
8
|
-
var text = require('../text.cjs');
|
|
9
|
-
|
|
10
|
-
function _toolsInParams(params) {
|
|
11
|
-
return !!(params.tools && params.tools.length > 0);
|
|
12
|
-
}
|
|
13
|
-
function extractToken(chunk) {
|
|
14
|
-
if (typeof chunk.content === 'string') {
|
|
15
|
-
return [chunk.content, 'string'];
|
|
16
|
-
}
|
|
17
|
-
else if (Array.isArray(chunk.content) &&
|
|
18
|
-
chunk.content.length >= 1 &&
|
|
19
|
-
'input' in chunk.content[0]) {
|
|
20
|
-
return typeof chunk.content[0].input === 'string'
|
|
21
|
-
? [chunk.content[0].input, 'input']
|
|
22
|
-
: [JSON.stringify(chunk.content[0].input), 'input'];
|
|
23
|
-
}
|
|
24
|
-
else if (Array.isArray(chunk.content) &&
|
|
25
|
-
chunk.content.length >= 1 &&
|
|
26
|
-
'text' in chunk.content[0]) {
|
|
27
|
-
return [chunk.content[0].text, 'content'];
|
|
28
|
-
}
|
|
29
|
-
return [undefined];
|
|
30
|
-
}
|
|
31
|
-
function cloneChunk(text, tokenType, chunk) {
|
|
32
|
-
if (tokenType === 'string') {
|
|
33
|
-
return new messages.AIMessageChunk(Object.assign({}, chunk, { content: text }));
|
|
34
|
-
}
|
|
35
|
-
else if (tokenType === 'input') {
|
|
36
|
-
return chunk;
|
|
37
|
-
}
|
|
38
|
-
const content = chunk.content[0];
|
|
39
|
-
if (tokenType === 'content' && content.type === 'text') {
|
|
40
|
-
return new messages.AIMessageChunk(Object.assign({}, chunk, { content: [Object.assign({}, content, { text })] }));
|
|
41
|
-
}
|
|
42
|
-
else if (tokenType === 'content' && content.type === 'text_delta') {
|
|
43
|
-
return new messages.AIMessageChunk(Object.assign({}, chunk, { content: [Object.assign({}, content, { text })] }));
|
|
44
|
-
}
|
|
45
|
-
return chunk;
|
|
46
|
-
}
|
|
47
|
-
class CustomAnthropic extends anthropic.ChatAnthropicMessages {
|
|
48
|
-
_lc_stream_delay;
|
|
49
|
-
constructor(fields) {
|
|
50
|
-
super(fields);
|
|
51
|
-
this._lc_stream_delay = fields._lc_stream_delay ?? 25;
|
|
52
|
-
}
|
|
53
|
-
async *_streamResponseChunks(messages$1, options, runManager) {
|
|
54
|
-
const params = this.invocationParams(options);
|
|
55
|
-
const formattedMessages = message_inputs._convertMessagesToAnthropicPayload(messages$1);
|
|
56
|
-
const coerceContentToString = !_toolsInParams({
|
|
57
|
-
...params,
|
|
58
|
-
...formattedMessages,
|
|
59
|
-
stream: false,
|
|
60
|
-
});
|
|
61
|
-
const stream = await this.createStreamWithRetry({
|
|
62
|
-
...params,
|
|
63
|
-
...formattedMessages,
|
|
64
|
-
stream: true,
|
|
65
|
-
}, {
|
|
66
|
-
headers: options.headers,
|
|
67
|
-
});
|
|
68
|
-
for await (const data of stream) {
|
|
69
|
-
if (options.signal?.aborted) {
|
|
70
|
-
stream.controller.abort();
|
|
71
|
-
throw new Error('AbortError: User aborted the request.');
|
|
72
|
-
}
|
|
73
|
-
const shouldStreamUsage = this.streamUsage ?? options.streamUsage;
|
|
74
|
-
const result = message_outputs._makeMessageChunkFromAnthropicEvent(data, {
|
|
75
|
-
streamUsage: shouldStreamUsage,
|
|
76
|
-
coerceContentToString,
|
|
77
|
-
});
|
|
78
|
-
if (!result)
|
|
79
|
-
continue;
|
|
80
|
-
const { chunk } = result;
|
|
81
|
-
// Extract the text content token for text field and runManager.
|
|
82
|
-
const [token = '', tokenType] = extractToken(chunk);
|
|
83
|
-
const createGenerationChunk = (text, incomingChunk) => {
|
|
84
|
-
return new outputs.ChatGenerationChunk({
|
|
85
|
-
message: new messages.AIMessageChunk({
|
|
86
|
-
// Just yield chunk as it is and tool_use will be concat by BaseChatModel._generateUncached().
|
|
87
|
-
content: incomingChunk.content,
|
|
88
|
-
additional_kwargs: incomingChunk.additional_kwargs,
|
|
89
|
-
tool_call_chunks: incomingChunk.tool_call_chunks,
|
|
90
|
-
usage_metadata: shouldStreamUsage ? incomingChunk.usage_metadata : undefined,
|
|
91
|
-
response_metadata: incomingChunk.response_metadata,
|
|
92
|
-
id: incomingChunk.id,
|
|
93
|
-
}),
|
|
94
|
-
text,
|
|
95
|
-
});
|
|
96
|
-
};
|
|
97
|
-
if (!tokenType || tokenType === 'input') {
|
|
98
|
-
const generationChunk = createGenerationChunk(token, chunk);
|
|
99
|
-
yield generationChunk;
|
|
100
|
-
await runManager?.handleLLMNewToken(token, undefined, undefined, undefined, undefined, { chunk: generationChunk });
|
|
101
|
-
continue;
|
|
102
|
-
}
|
|
103
|
-
const textStream = new text.TextStream(token, {
|
|
104
|
-
delay: this._lc_stream_delay,
|
|
105
|
-
});
|
|
106
|
-
for await (const currentToken of textStream.generateText()) {
|
|
107
|
-
const newChunk = cloneChunk(currentToken, tokenType, chunk);
|
|
108
|
-
const generationChunk = createGenerationChunk(currentToken, newChunk);
|
|
109
|
-
yield generationChunk;
|
|
110
|
-
await runManager?.handleLLMNewToken(token, undefined, undefined, undefined, undefined, { chunk: generationChunk });
|
|
111
|
-
}
|
|
112
|
-
}
|
|
113
|
-
}
|
|
114
|
-
}
|
|
115
|
-
|
|
116
|
-
exports.CustomAnthropic = CustomAnthropic;
|
|
117
|
-
//# sourceMappingURL=llm.cjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"llm.cjs","sources":["../../../../src/llm/anthropic/llm.ts"],"sourcesContent":["import { AIMessageChunk } from '@langchain/core/messages';\nimport { ChatAnthropicMessages } from '@langchain/anthropic';\nimport { ChatGenerationChunk } from '@langchain/core/outputs';\nimport type { BaseMessage, MessageContentComplex } from '@langchain/core/messages';\nimport type { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';\nimport type { AnthropicInput } from '@langchain/anthropic';\nimport type { AnthropicMessageCreateParams } from '@/llm/anthropic/types';\nimport { _makeMessageChunkFromAnthropicEvent } from './utils/message_outputs';\nimport { _convertMessagesToAnthropicPayload } from './utils/message_inputs';\nimport { TextStream } from '@/llm/text';\n\nfunction _toolsInParams(params: AnthropicMessageCreateParams): boolean {\n return !!(params.tools && params.tools.length > 0);\n}\n\nfunction extractToken(chunk: AIMessageChunk): [string, 'string' | 'input' | 'content'] | [undefined] {\n if (typeof chunk.content === 'string') {\n return [chunk.content, 'string'];\n } else if (\n Array.isArray(chunk.content) &&\n chunk.content.length >= 1 &&\n 'input' in chunk.content[0]\n ) {\n return typeof chunk.content[0].input === 'string'\n ? [chunk.content[0].input, 'input']\n : [JSON.stringify(chunk.content[0].input), 'input'];\n } else if (\n Array.isArray(chunk.content) &&\n chunk.content.length >= 1 &&\n 'text' in chunk.content[0]\n ) {\n return [chunk.content[0].text, 'content'];\n }\n return [undefined];\n}\n\nfunction cloneChunk(text: string, tokenType: string, chunk: AIMessageChunk): AIMessageChunk {\n if (tokenType === 'string') {\n return new AIMessageChunk(Object.assign({}, chunk, { content: text }));\n } else if (tokenType === 'input') {\n return chunk;\n }\n const content = chunk.content[0] as MessageContentComplex;\n if (tokenType === 'content' && content.type === 'text') {\n return new AIMessageChunk(Object.assign({}, chunk, { content: [Object.assign({}, content, { text })] }));\n } else if (tokenType === 'content' && content.type === 'text_delta') {\n return new AIMessageChunk(Object.assign({}, chunk, { content: [Object.assign({}, content, { text })] }));\n }\n\n return chunk;\n}\n\nexport type CustomAnthropicInput = AnthropicInput & { _lc_stream_delay?: number };\n\nexport class CustomAnthropic extends ChatAnthropicMessages {\n _lc_stream_delay: number;\n constructor(fields: CustomAnthropicInput) {\n super(fields);\n this._lc_stream_delay = fields._lc_stream_delay ?? 25;\n }\n\n async *_streamResponseChunks(\n messages: BaseMessage[],\n options: this['ParsedCallOptions'],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n const params = this.invocationParams(options);\n const formattedMessages = _convertMessagesToAnthropicPayload(messages);\n const coerceContentToString = !_toolsInParams({\n ...params,\n ...formattedMessages,\n stream: false,\n });\n\n const stream = await this.createStreamWithRetry(\n {\n ...params,\n ...formattedMessages,\n stream: true,\n },\n {\n headers: options.headers,\n }\n );\n\n for await (const data of stream) {\n if (options.signal?.aborted) {\n stream.controller.abort();\n throw new Error('AbortError: User aborted the request.');\n }\n const shouldStreamUsage = this.streamUsage ?? options.streamUsage;\n const result = _makeMessageChunkFromAnthropicEvent(data, {\n streamUsage: shouldStreamUsage,\n coerceContentToString,\n });\n if (!result) continue;\n\n const { chunk } = result;\n\n // Extract the text content token for text field and runManager.\n const [token = '', tokenType] = extractToken(chunk);\n const createGenerationChunk = (text: string, incomingChunk: AIMessageChunk): ChatGenerationChunk => {\n return new ChatGenerationChunk({\n message: new AIMessageChunk({\n // Just yield chunk as it is and tool_use will be concat by BaseChatModel._generateUncached().\n content: incomingChunk.content,\n additional_kwargs: incomingChunk.additional_kwargs,\n tool_call_chunks: incomingChunk.tool_call_chunks,\n usage_metadata: shouldStreamUsage ? incomingChunk.usage_metadata : undefined,\n response_metadata: incomingChunk.response_metadata,\n id: incomingChunk.id,\n }),\n text,\n });\n };\n\n if (!tokenType || tokenType === 'input') {\n const generationChunk = createGenerationChunk(token, chunk);\n yield generationChunk;\n await runManager?.handleLLMNewToken(\n token,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: generationChunk }\n );\n continue;\n }\n\n const textStream = new TextStream(token, {\n delay: this._lc_stream_delay,\n });\n for await (const currentToken of textStream.generateText()) {\n const newChunk = cloneChunk(currentToken, tokenType, chunk);\n const generationChunk = createGenerationChunk(currentToken, newChunk);\n yield generationChunk;\n\n await runManager?.handleLLMNewToken(\n token,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: generationChunk }\n );\n }\n }\n }\n\n}\n"],"names":["AIMessageChunk","ChatAnthropicMessages","messages","_convertMessagesToAnthropicPayload","_makeMessageChunkFromAnthropicEvent","ChatGenerationChunk","TextStream"],"mappings":";;;;;;;;;AAWA,SAAS,cAAc,CAAC,MAAoC,EAAA;AAC1D,IAAA,OAAO,CAAC,EAAE,MAAM,CAAC,KAAK,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;AACrD,CAAC;AAED,SAAS,YAAY,CAAC,KAAqB,EAAA;AACzC,IAAA,IAAI,OAAO,KAAK,CAAC,OAAO,KAAK,QAAQ,EAAE;AACrC,QAAA,OAAO,CAAC,KAAK,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;KAClC;AAAM,SAAA,IACL,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC;AAC5B,QAAA,KAAK,CAAC,OAAO,CAAC,MAAM,IAAI,CAAC;QACzB,OAAO,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAC3B;QACA,OAAO,OAAO,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,KAAK,QAAQ;AAC/C,cAAE,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,OAAO,CAAC;AACnC,cAAE,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,OAAO,CAAC,CAAC;KACvD;AAAM,SAAA,IACL,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC;AAC5B,QAAA,KAAK,CAAC,OAAO,CAAC,MAAM,IAAI,CAAC;QACzB,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAC1B;AACA,QAAA,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,SAAS,CAAC,CAAC;KAC3C;IACD,OAAO,CAAC,SAAS,CAAC,CAAC;AACrB,CAAC;AAED,SAAS,UAAU,CAAC,IAAY,EAAE,SAAiB,EAAE,KAAqB,EAAA;AACxE,IAAA,IAAI,SAAS,KAAK,QAAQ,EAAE;AAC1B,QAAA,OAAO,IAAIA,uBAAc,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,EAAE,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;KACxE;AAAM,SAAA,IAAI,SAAS,KAAK,OAAO,EAAE;AAChC,QAAA,OAAO,KAAK,CAAC;KACd;IACD,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC,CAA0B,CAAC;IAC1D,IAAI,SAAS,KAAK,SAAS,IAAI,OAAO,CAAC,IAAI,KAAK,MAAM,EAAE;AACtD,QAAA,OAAO,IAAIA,uBAAc,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,EAAE,EAAE,OAAO,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;KAC1G;SAAM,IAAI,SAAS,KAAK,SAAS,IAAI,OAAO,CAAC,IAAI,KAAK,YAAY,EAAE;AACnE,QAAA,OAAO,IAAIA,uBAAc,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,EAAE,EAAE,OAAO,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;KAC1G;AAED,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAIK,MAAO,eAAgB,SAAQC,+BAAqB,CAAA;AACxD,IAAA,gBAAgB,CAAS;AACzB,IAAA,WAAA,CAAY,MAA4B,EAAA;QACtC,KAAK,CAAC,MAAM,CAAC,CAAC;QACd,IAAI,CAAC,gBAAgB,GAAG,MAAM,CAAC,gBAAgB,IAAI,EAAE,CAAC;KACvD;IAED,OAAO,qBAAqB,CAC1BC,UAAuB,EACvB,OAAkC,EAClC,UAAqC,EAAA;QAErC,MAAM,MAAM,GAAG,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC,CAAC;AAC9C,QAAA,MAAM,iBAAiB,GAAGC,iDAAkC,CAACD,UAAQ,CAAC,CAAC;AACvE,QAAA,MAAM,qBAAqB,GAAG,CAAC,cAAc,CAAC;AAC5C,YAAA,GAAG,MAAM;AACT,YAAA,GAAG,iBAAiB;AACpB,YAAA,MAAM,EAAE,KAAK;AACd,SAAA,CAAC,CAAC;AAEH,QAAA,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,qBAAqB,CAC7C;AACE,YAAA,GAAG,MAAM;AACT,YAAA,GAAG,iBAAiB;AACpB,YAAA,MAAM,EAAE,IAAI;SACb,EACD;YACE,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,SAAA,CACF,CAAC;AAEF,QAAA,WAAW,MAAM,IAAI,IAAI,MAAM,EAAE;AAC/B,YAAA,IAAI,OAAO,CAAC,MAAM,EAAE,OAAO,EAAE;AAC3B,gBAAA,MAAM,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC;AAC1B,gBAAA,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;aAC1D;YACD,MAAM,iBAAiB,GAAG,IAAI,CAAC,WAAW,IAAI,OAAO,CAAC,WAAW,CAAC;AAClE,YAAA,MAAM,MAAM,GAAGE,mDAAmC,CAAC,IAAI,EAAE;AACvD,gBAAA,WAAW,EAAE,iBAAiB;gBAC9B,qBAAqB;AACtB,aAAA,CAAC,CAAC;AACH,YAAA,IAAI,CAAC,MAAM;gBAAE,SAAS;AAEtB,YAAA,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,CAAC;;AAGzB,YAAA,MAAM,CAAC,KAAK,GAAG,EAAE,EAAE,SAAS,CAAC,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC;AACpD,YAAA,MAAM,qBAAqB,GAAG,CAAC,IAAY,EAAE,aAA6B,KAAyB;gBACjG,OAAO,IAAIC,2BAAmB,CAAC;oBAC7B,OAAO,EAAE,IAAIL,uBAAc,CAAC;;wBAE1B,OAAO,EAAE,aAAa,CAAC,OAAO;wBAC9B,iBAAiB,EAAE,aAAa,CAAC,iBAAiB;wBAClD,gBAAgB,EAAE,aAAa,CAAC,gBAAgB;wBAChD,cAAc,EAAE,iBAAiB,GAAG,aAAa,CAAC,cAAc,GAAG,SAAS;wBAC5E,iBAAiB,EAAE,aAAa,CAAC,iBAAiB;wBAClD,EAAE,EAAE,aAAa,CAAC,EAAE;qBACrB,CAAC;oBACF,IAAI;AACL,iBAAA,CAAC,CAAC;AACL,aAAC,CAAC;AAEF,YAAA,IAAI,CAAC,SAAS,IAAI,SAAS,KAAK,OAAO,EAAE;gBACvC,MAAM,eAAe,GAAG,qBAAqB,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;AAC5D,gBAAA,MAAM,eAAe,CAAC;gBACtB,MAAM,UAAU,EAAE,iBAAiB,CACjC,KAAK,EACL,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,eAAe,EAAE,CAC3B,CAAC;gBACF,SAAS;aACV;AAED,YAAA,MAAM,UAAU,GAAG,IAAIM,eAAU,CAAC,KAAK,EAAE;gBACvC,KAAK,EAAE,IAAI,CAAC,gBAAgB;AAC7B,aAAA,CAAC,CAAC;YACH,WAAW,MAAM,YAAY,IAAI,UAAU,CAAC,YAAY,EAAE,EAAE;gBAC1D,MAAM,QAAQ,GAAG,UAAU,CAAC,YAAY,EAAE,SAAS,EAAE,KAAK,CAAC,CAAC;gBAC5D,MAAM,eAAe,GAAG,qBAAqB,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC;AACtE,gBAAA,MAAM,eAAe,CAAC;gBAEtB,MAAM,UAAU,EAAE,iBAAiB,CACjC,KAAK,EACL,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,eAAe,EAAE,CAC3B,CAAC;aACH;SACF;KACF;AAEF;;;;"}
|
|
@@ -1,251 +0,0 @@
|
|
|
1
|
-
'use strict';
|
|
2
|
-
|
|
3
|
-
var messages = require('@langchain/core/messages');
|
|
4
|
-
|
|
5
|
-
/**
|
|
6
|
-
* This util file contains functions for converting LangChain messages to Anthropic messages.
|
|
7
|
-
*/
|
|
8
|
-
function _formatImage(imageUrl) {
|
|
9
|
-
const regex = /^data:(image\/.+);base64,(.+)$/;
|
|
10
|
-
const match = imageUrl.match(regex);
|
|
11
|
-
if (match === null) {
|
|
12
|
-
throw new Error([
|
|
13
|
-
'Anthropic only supports base64-encoded images currently.',
|
|
14
|
-
'Example: data:image/png;base64,/9j/4AAQSk...',
|
|
15
|
-
].join('\n\n'));
|
|
16
|
-
}
|
|
17
|
-
return {
|
|
18
|
-
type: 'base64',
|
|
19
|
-
media_type: match[1] ?? '',
|
|
20
|
-
data: match[2] ?? '',
|
|
21
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
22
|
-
};
|
|
23
|
-
}
|
|
24
|
-
function _mergeMessages(messages$1) {
|
|
25
|
-
// Merge runs of human/tool messages into single human messages with content blocks.
|
|
26
|
-
const merged = [];
|
|
27
|
-
for (const message of messages$1) {
|
|
28
|
-
if (message._getType() === 'tool') {
|
|
29
|
-
if (typeof message.content === 'string') {
|
|
30
|
-
const previousMessage = merged[merged.length - 1];
|
|
31
|
-
if (previousMessage &&
|
|
32
|
-
previousMessage._getType() === 'human' &&
|
|
33
|
-
Array.isArray(previousMessage.content) &&
|
|
34
|
-
'type' in previousMessage.content[0] &&
|
|
35
|
-
previousMessage.content[0].type === 'tool_result') {
|
|
36
|
-
// If the previous message was a tool result, we merge this tool message into it.
|
|
37
|
-
previousMessage.content.push({
|
|
38
|
-
type: 'tool_result',
|
|
39
|
-
content: message.content,
|
|
40
|
-
tool_use_id: message.tool_call_id,
|
|
41
|
-
});
|
|
42
|
-
}
|
|
43
|
-
else {
|
|
44
|
-
// If not, we create a new human message with the tool result.
|
|
45
|
-
merged.push(new messages.HumanMessage({
|
|
46
|
-
content: [
|
|
47
|
-
{
|
|
48
|
-
type: 'tool_result',
|
|
49
|
-
content: message.content,
|
|
50
|
-
tool_use_id: message.tool_call_id,
|
|
51
|
-
},
|
|
52
|
-
],
|
|
53
|
-
}));
|
|
54
|
-
}
|
|
55
|
-
}
|
|
56
|
-
else {
|
|
57
|
-
merged.push(new messages.HumanMessage({
|
|
58
|
-
content: [
|
|
59
|
-
{
|
|
60
|
-
type: 'tool_result',
|
|
61
|
-
content: _formatContent(message.content),
|
|
62
|
-
tool_use_id: message.tool_call_id,
|
|
63
|
-
},
|
|
64
|
-
],
|
|
65
|
-
}));
|
|
66
|
-
}
|
|
67
|
-
}
|
|
68
|
-
else {
|
|
69
|
-
const previousMessage = merged[merged.length - 1];
|
|
70
|
-
if (previousMessage &&
|
|
71
|
-
previousMessage._getType() === 'human' &&
|
|
72
|
-
message._getType() === 'human') {
|
|
73
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
74
|
-
let combinedContent;
|
|
75
|
-
if (typeof previousMessage.content === 'string') {
|
|
76
|
-
combinedContent = [{ type: 'text', text: previousMessage.content }];
|
|
77
|
-
}
|
|
78
|
-
else {
|
|
79
|
-
combinedContent = previousMessage.content;
|
|
80
|
-
}
|
|
81
|
-
if (typeof message.content === 'string') {
|
|
82
|
-
combinedContent.push({ type: 'text', text: message.content });
|
|
83
|
-
}
|
|
84
|
-
else {
|
|
85
|
-
combinedContent = combinedContent.concat(message.content);
|
|
86
|
-
}
|
|
87
|
-
previousMessage.content = combinedContent;
|
|
88
|
-
}
|
|
89
|
-
else {
|
|
90
|
-
merged.push(message);
|
|
91
|
-
}
|
|
92
|
-
}
|
|
93
|
-
}
|
|
94
|
-
return merged;
|
|
95
|
-
}
|
|
96
|
-
function _convertLangChainToolCallToAnthropic(toolCall) {
|
|
97
|
-
if (toolCall.id === undefined) {
|
|
98
|
-
throw new Error('Anthropic requires all tool calls to have an "id".');
|
|
99
|
-
}
|
|
100
|
-
return {
|
|
101
|
-
type: 'tool_use',
|
|
102
|
-
id: toolCall.id,
|
|
103
|
-
name: toolCall.name,
|
|
104
|
-
input: toolCall.args,
|
|
105
|
-
};
|
|
106
|
-
}
|
|
107
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
108
|
-
function _formatContent(content) {
|
|
109
|
-
const toolTypes = ['tool_use', 'tool_result', 'input_json_delta'];
|
|
110
|
-
const textTypes = ['text', 'text_delta'];
|
|
111
|
-
if (typeof content === 'string') {
|
|
112
|
-
return content;
|
|
113
|
-
}
|
|
114
|
-
else {
|
|
115
|
-
const contentBlocks = content.map((contentPart) => {
|
|
116
|
-
const cacheControl = 'cache_control' in contentPart ? contentPart.cache_control : undefined;
|
|
117
|
-
if (contentPart.type === 'image_url') {
|
|
118
|
-
let source;
|
|
119
|
-
if (typeof contentPart.image_url === 'string') {
|
|
120
|
-
source = _formatImage(contentPart.image_url);
|
|
121
|
-
}
|
|
122
|
-
else {
|
|
123
|
-
source = _formatImage(contentPart.image_url.url);
|
|
124
|
-
}
|
|
125
|
-
return {
|
|
126
|
-
type: 'image', // Explicitly setting the type as "image"
|
|
127
|
-
source,
|
|
128
|
-
...(cacheControl ? { cache_control: cacheControl } : {}),
|
|
129
|
-
};
|
|
130
|
-
}
|
|
131
|
-
else if (textTypes.find((t) => t === contentPart.type) &&
|
|
132
|
-
'text' in contentPart) {
|
|
133
|
-
// Assuming contentPart is of type MessageContentText here
|
|
134
|
-
return {
|
|
135
|
-
type: 'text', // Explicitly setting the type as "text"
|
|
136
|
-
text: contentPart.text,
|
|
137
|
-
...(cacheControl ? { cache_control: cacheControl } : {}),
|
|
138
|
-
};
|
|
139
|
-
}
|
|
140
|
-
else if (toolTypes.find((t) => t === contentPart.type)) {
|
|
141
|
-
const contentPartCopy = { ...contentPart };
|
|
142
|
-
if ('index' in contentPartCopy) {
|
|
143
|
-
// Anthropic does not support passing the index field here, so we remove it.
|
|
144
|
-
delete contentPartCopy.index;
|
|
145
|
-
}
|
|
146
|
-
if (contentPartCopy.type === 'input_json_delta') {
|
|
147
|
-
// `input_json_delta` type only represents yielding partial tool inputs
|
|
148
|
-
// and is not a valid type for Anthropic messages.
|
|
149
|
-
contentPartCopy.type = 'tool_use';
|
|
150
|
-
}
|
|
151
|
-
if ('input' in contentPartCopy) {
|
|
152
|
-
// Anthropic tool use inputs should be valid objects, when applicable.
|
|
153
|
-
try {
|
|
154
|
-
contentPartCopy.input = JSON.parse(contentPartCopy.input);
|
|
155
|
-
}
|
|
156
|
-
catch {
|
|
157
|
-
// no-op
|
|
158
|
-
}
|
|
159
|
-
}
|
|
160
|
-
// TODO: Fix when SDK types are fixed
|
|
161
|
-
return {
|
|
162
|
-
...contentPartCopy,
|
|
163
|
-
...(cacheControl ? { cache_control: cacheControl } : {}),
|
|
164
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
165
|
-
};
|
|
166
|
-
}
|
|
167
|
-
else {
|
|
168
|
-
throw new Error('Unsupported message content format');
|
|
169
|
-
}
|
|
170
|
-
});
|
|
171
|
-
return contentBlocks;
|
|
172
|
-
}
|
|
173
|
-
}
|
|
174
|
-
/**
|
|
175
|
-
* Formats messages as a prompt for the model.
|
|
176
|
-
* Used in LangSmith, export is important here.
|
|
177
|
-
* @param messages The base messages to format as a prompt.
|
|
178
|
-
* @returns The formatted prompt.
|
|
179
|
-
*/
|
|
180
|
-
function _convertMessagesToAnthropicPayload(messages$1) {
|
|
181
|
-
const mergedMessages = _mergeMessages(messages$1);
|
|
182
|
-
let system;
|
|
183
|
-
if (mergedMessages.length > 0 && mergedMessages[0]._getType() === 'system') {
|
|
184
|
-
system = messages$1[0].content;
|
|
185
|
-
}
|
|
186
|
-
const conversationMessages = system !== undefined ? mergedMessages.slice(1) : mergedMessages;
|
|
187
|
-
const formattedMessages = conversationMessages.map((message) => {
|
|
188
|
-
let role;
|
|
189
|
-
if (message._getType() === 'human') {
|
|
190
|
-
role = 'user';
|
|
191
|
-
}
|
|
192
|
-
else if (message._getType() === 'ai') {
|
|
193
|
-
role = 'assistant';
|
|
194
|
-
}
|
|
195
|
-
else if (message._getType() === 'tool') {
|
|
196
|
-
role = 'user';
|
|
197
|
-
}
|
|
198
|
-
else if (message._getType() === 'system') {
|
|
199
|
-
throw new Error('System messages are only permitted as the first passed message.');
|
|
200
|
-
}
|
|
201
|
-
else {
|
|
202
|
-
throw new Error(`Message type "${message._getType()}" is not supported.`);
|
|
203
|
-
}
|
|
204
|
-
if (messages.isAIMessage(message) && !!message.tool_calls?.length) {
|
|
205
|
-
if (typeof message.content === 'string') {
|
|
206
|
-
if (message.content === '') {
|
|
207
|
-
return {
|
|
208
|
-
role,
|
|
209
|
-
content: message.tool_calls.map(_convertLangChainToolCallToAnthropic),
|
|
210
|
-
};
|
|
211
|
-
}
|
|
212
|
-
else {
|
|
213
|
-
return {
|
|
214
|
-
role,
|
|
215
|
-
content: [
|
|
216
|
-
{ type: 'text', text: message.content },
|
|
217
|
-
...message.tool_calls.map(_convertLangChainToolCallToAnthropic),
|
|
218
|
-
],
|
|
219
|
-
};
|
|
220
|
-
}
|
|
221
|
-
}
|
|
222
|
-
else {
|
|
223
|
-
const { content } = message;
|
|
224
|
-
const hasMismatchedToolCalls = !message.tool_calls.every((toolCall) => content.find((contentPart) => (contentPart.type === 'tool_use' ||
|
|
225
|
-
contentPart.type === 'input_json_delta') &&
|
|
226
|
-
contentPart.id === toolCall.id));
|
|
227
|
-
if (hasMismatchedToolCalls) {
|
|
228
|
-
console.warn('The "tool_calls" field on a message is only respected if content is a string.');
|
|
229
|
-
}
|
|
230
|
-
return {
|
|
231
|
-
role,
|
|
232
|
-
content: _formatContent(message.content),
|
|
233
|
-
};
|
|
234
|
-
}
|
|
235
|
-
}
|
|
236
|
-
else {
|
|
237
|
-
return {
|
|
238
|
-
role,
|
|
239
|
-
content: _formatContent(message.content),
|
|
240
|
-
};
|
|
241
|
-
}
|
|
242
|
-
});
|
|
243
|
-
return {
|
|
244
|
-
messages: formattedMessages,
|
|
245
|
-
system,
|
|
246
|
-
};
|
|
247
|
-
}
|
|
248
|
-
|
|
249
|
-
exports._convertLangChainToolCallToAnthropic = _convertLangChainToolCallToAnthropic;
|
|
250
|
-
exports._convertMessagesToAnthropicPayload = _convertMessagesToAnthropicPayload;
|
|
251
|
-
//# sourceMappingURL=message_inputs.cjs.map
|