@librechat/agents 1.7.8 → 1.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/dist/cjs/llm/anthropic/llm.cjs +117 -0
  2. package/dist/cjs/llm/anthropic/llm.cjs.map +1 -0
  3. package/dist/cjs/llm/anthropic/utils/message_inputs.cjs +251 -0
  4. package/dist/cjs/llm/anthropic/utils/message_inputs.cjs.map +1 -0
  5. package/dist/cjs/llm/anthropic/utils/message_outputs.cjs +135 -0
  6. package/dist/cjs/llm/anthropic/utils/message_outputs.cjs.map +1 -0
  7. package/dist/cjs/llm/providers.cjs +3 -2
  8. package/dist/cjs/llm/providers.cjs.map +1 -1
  9. package/dist/cjs/llm/text.cjs +73 -0
  10. package/dist/cjs/llm/text.cjs.map +1 -0
  11. package/dist/esm/llm/anthropic/llm.mjs +115 -0
  12. package/dist/esm/llm/anthropic/llm.mjs.map +1 -0
  13. package/dist/esm/llm/anthropic/utils/message_inputs.mjs +248 -0
  14. package/dist/esm/llm/anthropic/utils/message_inputs.mjs.map +1 -0
  15. package/dist/esm/llm/anthropic/utils/message_outputs.mjs +133 -0
  16. package/dist/esm/llm/anthropic/utils/message_outputs.mjs.map +1 -0
  17. package/dist/esm/llm/providers.mjs +3 -2
  18. package/dist/esm/llm/providers.mjs.map +1 -1
  19. package/dist/esm/llm/text.mjs +71 -0
  20. package/dist/esm/llm/text.mjs.map +1 -0
  21. package/dist/types/llm/anthropic/llm.d.ts +13 -0
  22. package/dist/types/llm/anthropic/types.d.ts +20 -0
  23. package/dist/types/llm/anthropic/utils/message_inputs.d.ts +14 -0
  24. package/dist/types/llm/anthropic/utils/message_outputs.d.ts +16 -0
  25. package/dist/types/llm/text.d.ts +21 -0
  26. package/package.json +6 -6
  27. package/src/llm/anthropic/llm.ts +151 -0
  28. package/src/llm/anthropic/types.ts +32 -0
  29. package/src/llm/anthropic/utils/message_inputs.ts +279 -0
  30. package/src/llm/anthropic/utils/message_outputs.ts +217 -0
  31. package/src/llm/providers.ts +4 -2
  32. package/src/llm/text.ts +90 -0
  33. package/src/scripts/code_exec.ts +1 -1
  34. package/src/scripts/code_exec_simple.ts +1 -1
@@ -0,0 +1 @@
1
+ {"version":3,"file":"message_outputs.mjs","sources":["../../../../../src/llm/anthropic/utils/message_outputs.ts"],"sourcesContent":["/**\n * This util file contains functions for converting Anthropic messages to LangChain messages.\n */\nimport Anthropic from '@anthropic-ai/sdk';\nimport {\n AIMessage,\n UsageMetadata,\n AIMessageChunk,\n} from '@langchain/core/messages';\nimport { ToolCall } from '@langchain/core/messages/tool';\nimport { ChatGeneration } from '@langchain/core/outputs';\nimport { AnthropicMessageResponse } from '../types.js';\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nexport function extractToolCalls(content: Record<string, any>[]): ToolCall[] {\n const toolCalls: ToolCall[] = [];\n for (const block of content) {\n if (block.type === 'tool_use') {\n toolCalls.push({\n name: block.name,\n args: block.input,\n id: block.id,\n type: 'tool_call',\n });\n }\n }\n return toolCalls;\n}\n\nexport function _makeMessageChunkFromAnthropicEvent(\n data: Anthropic.Messages.RawMessageStreamEvent,\n fields: {\n streamUsage: boolean;\n coerceContentToString: boolean;\n }\n): {\n chunk: AIMessageChunk;\n} | null {\n if (data.type === 'message_start') {\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n const { content, usage, ...additionalKwargs } = data.message;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const filteredAdditionalKwargs: Record<string, any> = {};\n for (const [key, value] of Object.entries(additionalKwargs)) {\n if (value !== undefined && value !== null) {\n filteredAdditionalKwargs[key] = value;\n }\n }\n const usageMetadata: UsageMetadata = {\n input_tokens: usage.input_tokens,\n output_tokens: usage.output_tokens,\n total_tokens: usage.input_tokens + usage.output_tokens,\n };\n return {\n chunk: new AIMessageChunk({\n content: fields.coerceContentToString ? '' : [],\n additional_kwargs: filteredAdditionalKwargs,\n usage_metadata: fields.streamUsage ? usageMetadata : undefined,\n id: data.message.id,\n }),\n };\n } else if (data.type === 'message_delta') {\n const usageMetadata: UsageMetadata = {\n input_tokens: 0,\n output_tokens: data.usage.output_tokens,\n total_tokens: data.usage.output_tokens,\n };\n return {\n chunk: new AIMessageChunk({\n content: fields.coerceContentToString ? '' : [],\n additional_kwargs: { ...data.delta },\n usage_metadata: fields.streamUsage ? usageMetadata : undefined,\n }),\n };\n } else if (\n data.type === 'content_block_start' &&\n data.content_block.type === 'tool_use'\n ) {\n const toolCallContentBlock =\n data.content_block as Anthropic.Messages.ToolUseBlock;\n return {\n chunk: new AIMessageChunk({\n content: fields.coerceContentToString\n ? ''\n : [\n {\n index: data.index,\n ...data.content_block,\n input: '',\n },\n ],\n additional_kwargs: {},\n tool_call_chunks: [\n {\n id: toolCallContentBlock.id,\n index: data.index,\n name: toolCallContentBlock.name,\n args: '',\n },\n ],\n }),\n };\n } else if (\n data.type === 'content_block_delta' &&\n data.delta.type === 'text_delta'\n ) {\n const content = data.delta.text;\n if (content !== undefined) {\n return {\n chunk: new AIMessageChunk({\n content: fields.coerceContentToString\n ? content\n : [\n {\n index: data.index,\n ...data.delta,\n },\n ],\n additional_kwargs: {},\n }),\n };\n }\n } else if (\n data.type === 'content_block_delta' &&\n data.delta.type === 'input_json_delta'\n ) {\n return {\n chunk: new AIMessageChunk({\n content: fields.coerceContentToString\n ? ''\n : [\n {\n index: data.index,\n input: data.delta.partial_json,\n type: data.delta.type,\n },\n ],\n additional_kwargs: {},\n tool_call_chunks: [\n {\n index: data.index,\n args: data.delta.partial_json,\n },\n ],\n }),\n };\n } else if (\n data.type === 'content_block_start' &&\n data.content_block.type === 'text'\n ) {\n const content = data.content_block.text;\n if (content !== undefined) {\n return {\n chunk: new AIMessageChunk({\n content: fields.coerceContentToString\n ? content\n : [\n {\n index: data.index,\n ...data.content_block,\n },\n ],\n additional_kwargs: {},\n }),\n };\n }\n }\n\n return null;\n}\n\nexport function anthropicResponseToChatMessages(\n messages: AnthropicMessageResponse[],\n additionalKwargs: Record<string, unknown>\n): ChatGeneration[] {\n const usage: Record<string, number> | null | undefined =\n additionalKwargs.usage as Record<string, number> | null | undefined;\n const usageMetadata =\n usage != null\n ? {\n input_tokens: usage.input_tokens ?? 0,\n output_tokens: usage.output_tokens ?? 0,\n total_tokens: (usage.input_tokens ?? 0) + (usage.output_tokens ?? 0),\n }\n : undefined;\n if (messages.length === 1 && messages[0].type === 'text') {\n return [\n {\n text: messages[0].text,\n message: new AIMessage({\n content: messages[0].text,\n additional_kwargs: additionalKwargs,\n usage_metadata: usageMetadata,\n response_metadata: additionalKwargs,\n id: additionalKwargs.id as string,\n }),\n },\n ];\n } else {\n const toolCalls = extractToolCalls(messages);\n const generations: ChatGeneration[] = [\n {\n text: '',\n message: new AIMessage({\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n content: messages as any,\n additional_kwargs: additionalKwargs,\n tool_calls: toolCalls,\n usage_metadata: usageMetadata,\n response_metadata: additionalKwargs,\n id: additionalKwargs.id as string,\n }),\n },\n ];\n return generations;\n }\n}"],"names":[],"mappings":";;AA6BgB,SAAA,mCAAmC,CACjD,IAA8C,EAC9C,MAGC,EAAA;AAID,IAAA,IAAI,IAAI,CAAC,IAAI,KAAK,eAAe,EAAE;;AAEjC,QAAA,MAAM,EAAE,OAAO,EAAE,KAAK,EAAE,GAAG,gBAAgB,EAAE,GAAG,IAAI,CAAC,OAAO,CAAC;;QAE7D,MAAM,wBAAwB,GAAwB,EAAE,CAAC;AACzD,QAAA,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,gBAAgB,CAAC,EAAE;YAC3D,IAAI,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,IAAI,EAAE;AACzC,gBAAA,wBAAwB,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;aACvC;SACF;AACD,QAAA,MAAM,aAAa,GAAkB;YACnC,YAAY,EAAE,KAAK,CAAC,YAAY;YAChC,aAAa,EAAE,KAAK,CAAC,aAAa;AAClC,YAAA,YAAY,EAAE,KAAK,CAAC,YAAY,GAAG,KAAK,CAAC,aAAa;SACvD,CAAC;QACF,OAAO;YACL,KAAK,EAAE,IAAI,cAAc,CAAC;gBACxB,OAAO,EAAE,MAAM,CAAC,qBAAqB,GAAG,EAAE,GAAG,EAAE;AAC/C,gBAAA,iBAAiB,EAAE,wBAAwB;gBAC3C,cAAc,EAAE,MAAM,CAAC,WAAW,GAAG,aAAa,GAAG,SAAS;AAC9D,gBAAA,EAAE,EAAE,IAAI,CAAC,OAAO,CAAC,EAAE;aACpB,CAAC;SACH,CAAC;KACH;AAAM,SAAA,IAAI,IAAI,CAAC,IAAI,KAAK,eAAe,EAAE;AACxC,QAAA,MAAM,aAAa,GAAkB;AACnC,YAAA,YAAY,EAAE,CAAC;AACf,YAAA,aAAa,EAAE,IAAI,CAAC,KAAK,CAAC,aAAa;AACvC,YAAA,YAAY,EAAE,IAAI,CAAC,KAAK,CAAC,aAAa;SACvC,CAAC;QACF,OAAO;YACL,KAAK,EAAE,IAAI,cAAc,CAAC;gBACxB,OAAO,EAAE,MAAM,CAAC,qBAAqB,GAAG,EAAE,GAAG,EAAE;AAC/C,gBAAA,iBAAiB,EAAE,EAAE,GAAG,IAAI,CAAC,KAAK,EAAE;gBACpC,cAAc,EAAE,MAAM,CAAC,WAAW,GAAG,aAAa,GAAG,SAAS;aAC/D,CAAC;SACH,CAAC;KACH;AAAM,SAAA,IACL,IAAI,CAAC,IAAI,KAAK,qBAAqB;AACnC,QAAA,IAAI,CAAC,aAAa,CAAC,IAAI,KAAK,UAAU,EACtC;AACA,QAAA,MAAM,oBAAoB,GACxB,IAAI,CAAC,aAAgD,CAAC;QACxD,OAAO;YACL,KAAK,EAAE,IAAI,cAAc,CAAC;gBACxB,OAAO,EAAE,MAAM,CAAC,qBAAqB;AACnC,sBAAE,EAAE;AACJ,sBAAE;AACA,wBAAA;4BACE,KAAK,EAAE,IAAI,CAAC,KAAK;4BACjB,GAAG,IAAI,CAAC,aAAa;AACrB,4BAAA,KAAK,EAAE,EAAE;AACV,yBAAA;AACF,qBAAA;AACH,gBAAA,iBAAiB,EAAE,EAAE;AACrB,gBAAA,gBAAgB,EAAE;AAChB,oBAAA;wBACE,EAAE,EAAE,oBAAoB,CAAC,EAAE;wBAC3B,KAAK,EAAE,IAAI,CAAC,KAAK;wBACjB,IAAI,EAAE,oBAAoB,CAAC,IAAI;AAC/B,wBAAA,IAAI,EAAE,EAAE;AACT,qBAAA;AACF,iBAAA;aACF,CAAC;SACH,CAAC;KACH;AAAM,SAAA,IACL,IAAI,CAAC,IAAI,KAAK,qBAAqB;AACnC,QAAA,IAAI,CAAC,KAAK,CAAC,IAAI,KAAK,YAAY,EAChC;AACA,QAAA,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC;AAChC,QAAA,IAAI,OAAO,KAAK,SAAS,EAAE;YACzB,OAAO;gBACL,KAAK,EAAE,IAAI,cAAc,CAAC;oBACxB,OAAO,EAAE,MAAM,CAAC,qBAAqB;AACnC,0BAAE,OAAO;AACT,0BAAE;AACA,4BAAA;gCACE,KAAK,EAAE,IAAI,CAAC,KAAK;gCACjB,GAAG,IAAI,CAAC,KAAK;AACd,6BAAA;AACF,yBAAA;AACH,oBAAA,iBAAiB,EAAE,EAAE;iBACtB,CAAC;aACH,CAAC;SACH;KACF;AAAM,SAAA,IACL,IAAI,CAAC,IAAI,KAAK,qBAAqB;AACnC,QAAA,IAAI,CAAC,KAAK,CAAC,IAAI,KAAK,kBAAkB,EACtC;QACA,OAAO;YACL,KAAK,EAAE,IAAI,cAAc,CAAC;gBACxB,OAAO,EAAE,MAAM,CAAC,qBAAqB;AACnC,sBAAE,EAAE;AACJ,sBAAE;AACA,wBAAA;4BACE,KAAK,EAAE,IAAI,CAAC,KAAK;AACjB,4BAAA,KAAK,EAAE,IAAI,CAAC,KAAK,CAAC,YAAY;AAC9B,4BAAA,IAAI,EAAE,IAAI,CAAC,KAAK,CAAC,IAAI;AACtB,yBAAA;AACF,qBAAA;AACH,gBAAA,iBAAiB,EAAE,EAAE;AACrB,gBAAA,gBAAgB,EAAE;AAChB,oBAAA;wBACE,KAAK,EAAE,IAAI,CAAC,KAAK;AACjB,wBAAA,IAAI,EAAE,IAAI,CAAC,KAAK,CAAC,YAAY;AAC9B,qBAAA;AACF,iBAAA;aACF,CAAC;SACH,CAAC;KACH;AAAM,SAAA,IACL,IAAI,CAAC,IAAI,KAAK,qBAAqB;AACnC,QAAA,IAAI,CAAC,aAAa,CAAC,IAAI,KAAK,MAAM,EAClC;AACA,QAAA,MAAM,OAAO,GAAG,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC;AACxC,QAAA,IAAI,OAAO,KAAK,SAAS,EAAE;YACzB,OAAO;gBACL,KAAK,EAAE,IAAI,cAAc,CAAC;oBACxB,OAAO,EAAE,MAAM,CAAC,qBAAqB;AACnC,0BAAE,OAAO;AACT,0BAAE;AACA,4BAAA;gCACE,KAAK,EAAE,IAAI,CAAC,KAAK;gCACjB,GAAG,IAAI,CAAC,aAAa;AACtB,6BAAA;AACF,yBAAA;AACH,oBAAA,iBAAiB,EAAE,EAAE;iBACtB,CAAC;aACH,CAAC;SACH;KACF;AAED,IAAA,OAAO,IAAI,CAAC;AACd;;;;"}
@@ -1,11 +1,11 @@
1
1
  import { ChatOpenAI } from '@langchain/openai';
2
2
  import { ChatOllama } from '@langchain/ollama';
3
3
  import { ChatBedrockConverse } from '@langchain/aws';
4
- import { ChatAnthropic } from '@langchain/anthropic';
5
4
  import { ChatMistralAI } from '@langchain/mistralai';
6
5
  import { ChatVertexAI } from '@langchain/google-vertexai';
7
6
  import { BedrockChat } from '@langchain/community/chat_models/bedrock/web';
8
7
  import { Providers } from '../common/enum.mjs';
8
+ import { CustomAnthropic } from './anthropic/llm.mjs';
9
9
 
10
10
  // src/llm/providers.ts
11
11
  const llmProviders = {
@@ -15,7 +15,8 @@ const llmProviders = {
15
15
  [Providers.BEDROCK_LEGACY]: BedrockChat,
16
16
  [Providers.MISTRALAI]: ChatMistralAI,
17
17
  [Providers.BEDROCK]: ChatBedrockConverse,
18
- [Providers.ANTHROPIC]: ChatAnthropic,
18
+ [Providers.ANTHROPIC]: CustomAnthropic,
19
+ // [Providers.ANTHROPIC]: CustomAnthropic,
19
20
  };
20
21
  const manualToolStreamProviders = new Set([Providers.ANTHROPIC, Providers.BEDROCK, Providers.OLLAMA]);
21
22
  const getChatModelClass = (provider) => {
@@ -1 +1 @@
1
- {"version":3,"file":"providers.mjs","sources":["../../../src/llm/providers.ts"],"sourcesContent":["// src/llm/providers.ts\nimport { ChatOpenAI } from '@langchain/openai';\nimport { ChatOllama } from '@langchain/ollama';\nimport { ChatBedrockConverse } from '@langchain/aws';\nimport { ChatAnthropic } from '@langchain/anthropic';\nimport { ChatMistralAI } from '@langchain/mistralai';\nimport { ChatVertexAI } from '@langchain/google-vertexai';\nimport { BedrockChat } from '@langchain/community/chat_models/bedrock/web';\nimport type { ChatModelConstructorMap, ProviderOptionsMap, ChatModelMap } from '@/types';\nimport { Providers } from '@/common';\n\nexport const llmProviders: Partial<ChatModelConstructorMap> = {\n [Providers.OPENAI]: ChatOpenAI,\n [Providers.OLLAMA]: ChatOllama,\n [Providers.VERTEXAI]: ChatVertexAI,\n [Providers.BEDROCK_LEGACY]: BedrockChat,\n [Providers.MISTRALAI]: ChatMistralAI,\n [Providers.BEDROCK]: ChatBedrockConverse,\n [Providers.ANTHROPIC]: ChatAnthropic,\n};\n\nexport const manualToolStreamProviders = new Set<Providers | string>([Providers.ANTHROPIC, Providers.BEDROCK, Providers.OLLAMA]);\n\nexport const getChatModelClass = <P extends Providers>(\n provider: P\n): new (config: ProviderOptionsMap[P]) => ChatModelMap[P] => {\n const ChatModelClass = llmProviders[provider];\n if (!ChatModelClass) {\n throw new Error(`Unsupported LLM provider: ${provider}`);\n }\n\n return ChatModelClass;\n};"],"names":[],"mappings":";;;;;;;;;AAAA;AAWa,MAAA,YAAY,GAAqC;AAC5D,IAAA,CAAC,SAAS,CAAC,MAAM,GAAG,UAAU;AAC9B,IAAA,CAAC,SAAS,CAAC,MAAM,GAAG,UAAU;AAC9B,IAAA,CAAC,SAAS,CAAC,QAAQ,GAAG,YAAY;AAClC,IAAA,CAAC,SAAS,CAAC,cAAc,GAAG,WAAW;AACvC,IAAA,CAAC,SAAS,CAAC,SAAS,GAAG,aAAa;AACpC,IAAA,CAAC,SAAS,CAAC,OAAO,GAAG,mBAAmB;AACxC,IAAA,CAAC,SAAS,CAAC,SAAS,GAAG,aAAa;EACpC;MAEW,yBAAyB,GAAG,IAAI,GAAG,CAAqB,CAAC,SAAS,CAAC,SAAS,EAAE,SAAS,CAAC,OAAO,EAAE,SAAS,CAAC,MAAM,CAAC,EAAE;AAEpH,MAAA,iBAAiB,GAAG,CAC/B,QAAW,KAC+C;AAC1D,IAAA,MAAM,cAAc,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;IAC9C,IAAI,CAAC,cAAc,EAAE;AACnB,QAAA,MAAM,IAAI,KAAK,CAAC,6BAA6B,QAAQ,CAAA,CAAE,CAAC,CAAC;KAC1D;AAED,IAAA,OAAO,cAAc,CAAC;AACxB;;;;"}
1
+ {"version":3,"file":"providers.mjs","sources":["../../../src/llm/providers.ts"],"sourcesContent":["// src/llm/providers.ts\nimport { ChatOpenAI } from '@langchain/openai';\nimport { ChatOllama } from '@langchain/ollama';\nimport { ChatBedrockConverse } from '@langchain/aws';\n// import { ChatAnthropic } from '@langchain/anthropic';\nimport { ChatMistralAI } from '@langchain/mistralai';\nimport { ChatVertexAI } from '@langchain/google-vertexai';\nimport { BedrockChat } from '@langchain/community/chat_models/bedrock/web';\nimport type { ChatModelConstructorMap, ProviderOptionsMap, ChatModelMap } from '@/types';\nimport { Providers } from '@/common';\nimport { CustomAnthropic } from '@/llm/anthropic/llm';\n\nexport const llmProviders: Partial<ChatModelConstructorMap> = {\n [Providers.OPENAI]: ChatOpenAI,\n [Providers.OLLAMA]: ChatOllama,\n [Providers.VERTEXAI]: ChatVertexAI,\n [Providers.BEDROCK_LEGACY]: BedrockChat,\n [Providers.MISTRALAI]: ChatMistralAI,\n [Providers.BEDROCK]: ChatBedrockConverse,\n [Providers.ANTHROPIC]: CustomAnthropic,\n // [Providers.ANTHROPIC]: CustomAnthropic,\n};\n\nexport const manualToolStreamProviders = new Set<Providers | string>([Providers.ANTHROPIC, Providers.BEDROCK, Providers.OLLAMA]);\n\nexport const getChatModelClass = <P extends Providers>(\n provider: P\n): new (config: ProviderOptionsMap[P]) => ChatModelMap[P] => {\n const ChatModelClass = llmProviders[provider];\n if (!ChatModelClass) {\n throw new Error(`Unsupported LLM provider: ${provider}`);\n }\n\n return ChatModelClass;\n};"],"names":[],"mappings":";;;;;;;;;AAAA;AAYa,MAAA,YAAY,GAAqC;AAC5D,IAAA,CAAC,SAAS,CAAC,MAAM,GAAG,UAAU;AAC9B,IAAA,CAAC,SAAS,CAAC,MAAM,GAAG,UAAU;AAC9B,IAAA,CAAC,SAAS,CAAC,QAAQ,GAAG,YAAY;AAClC,IAAA,CAAC,SAAS,CAAC,cAAc,GAAG,WAAW;AACvC,IAAA,CAAC,SAAS,CAAC,SAAS,GAAG,aAAa;AACpC,IAAA,CAAC,SAAS,CAAC,OAAO,GAAG,mBAAmB;AACxC,IAAA,CAAC,SAAS,CAAC,SAAS,GAAG,eAAe;;EAEtC;MAEW,yBAAyB,GAAG,IAAI,GAAG,CAAqB,CAAC,SAAS,CAAC,SAAS,EAAE,SAAS,CAAC,OAAO,EAAE,SAAS,CAAC,MAAM,CAAC,EAAE;AAEpH,MAAA,iBAAiB,GAAG,CAC/B,QAAW,KAC+C;AAC1D,IAAA,MAAM,cAAc,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;IAC9C,IAAI,CAAC,cAAc,EAAE;AACnB,QAAA,MAAM,IAAI,KAAK,CAAC,6BAA6B,QAAQ,CAAA,CAAE,CAAC,CAAC;KAC1D;AAED,IAAA,OAAO,cAAc,CAAC;AACxB;;;;"}
@@ -0,0 +1,71 @@
1
+ import { Readable } from 'stream';
2
+
3
+ /* eslint-disable no-console */
4
+ class TextStream extends Readable {
5
+ text;
6
+ currentIndex;
7
+ minChunkSize;
8
+ maxChunkSize;
9
+ delay;
10
+ constructor(text, options = {}) {
11
+ super(options);
12
+ this.text = text;
13
+ this.currentIndex = 0;
14
+ this.minChunkSize = options.minChunkSize ?? 2;
15
+ this.maxChunkSize = options.maxChunkSize ?? 4;
16
+ this.delay = options.delay ?? 20; // Time in milliseconds
17
+ }
18
+ _read() {
19
+ const { delay, minChunkSize, maxChunkSize } = this;
20
+ if (this.currentIndex < this.text.length) {
21
+ setTimeout(() => {
22
+ const remainingChars = this.text.length - this.currentIndex;
23
+ const chunkSize = Math.min(this.randomInt(minChunkSize, maxChunkSize + 1), remainingChars);
24
+ const chunk = this.text.slice(this.currentIndex, this.currentIndex + chunkSize);
25
+ this.push(chunk);
26
+ this.currentIndex += chunkSize;
27
+ }, delay);
28
+ }
29
+ else {
30
+ this.push(null); // signal end of data
31
+ }
32
+ }
33
+ randomInt(min, max) {
34
+ return Math.floor(Math.random() * (max - min)) + min;
35
+ }
36
+ async processTextStream(progressCallback) {
37
+ const streamPromise = new Promise((resolve, reject) => {
38
+ this.on('data', (chunk) => {
39
+ progressCallback(chunk.toString());
40
+ });
41
+ this.on('end', () => {
42
+ resolve();
43
+ });
44
+ this.on('error', (err) => {
45
+ reject(err);
46
+ });
47
+ });
48
+ try {
49
+ await streamPromise;
50
+ }
51
+ catch (err) {
52
+ console.error('[processTextStream] Error in text stream:', err);
53
+ // Handle the error appropriately, e.g., return an error message or throw an error
54
+ }
55
+ }
56
+ async *generateText(progressCallback) {
57
+ const { delay, minChunkSize, maxChunkSize } = this;
58
+ while (this.currentIndex < this.text.length) {
59
+ await new Promise(resolve => setTimeout(resolve, delay));
60
+ const remainingChars = this.text.length - this.currentIndex;
61
+ const chunkSize = Math.min(this.randomInt(minChunkSize, maxChunkSize + 1), remainingChars);
62
+ const chunk = this.text.slice(this.currentIndex, this.currentIndex + chunkSize);
63
+ progressCallback?.(chunk);
64
+ yield chunk;
65
+ this.currentIndex += chunkSize;
66
+ }
67
+ }
68
+ }
69
+
70
+ export { TextStream };
71
+ //# sourceMappingURL=text.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"text.mjs","sources":["../../../src/llm/text.ts"],"sourcesContent":["/* eslint-disable no-console */\nimport { Readable } from 'stream';\nimport type { ReadableOptions } from 'stream';\nexport interface TextStreamOptions extends ReadableOptions {\n minChunkSize?: number;\n maxChunkSize?: number;\n delay?: number;\n}\n\nexport type ProgressCallback = (chunk: string) => void;\nexport type PostChunkCallback = (chunk: string) => void;\n\nexport class TextStream extends Readable {\n private text: string;\n private currentIndex: number;\n private minChunkSize: number;\n private maxChunkSize: number;\n private delay: number;\n\n constructor(text: string, options: TextStreamOptions = {}) {\n super(options);\n this.text = text;\n this.currentIndex = 0;\n this.minChunkSize = options.minChunkSize ?? 2;\n this.maxChunkSize = options.maxChunkSize ?? 4;\n this.delay = options.delay ?? 20; // Time in milliseconds\n }\n\n _read(): void {\n const { delay, minChunkSize, maxChunkSize } = this;\n\n if (this.currentIndex < this.text.length) {\n setTimeout(() => {\n const remainingChars = this.text.length - this.currentIndex;\n const chunkSize = Math.min(this.randomInt(minChunkSize, maxChunkSize + 1), remainingChars);\n\n const chunk = this.text.slice(this.currentIndex, this.currentIndex + chunkSize);\n this.push(chunk);\n this.currentIndex += chunkSize;\n }, delay);\n } else {\n this.push(null); // signal end of data\n }\n }\n\n private randomInt(min: number, max: number): number {\n return Math.floor(Math.random() * (max - min)) + min;\n }\n\n async processTextStream(progressCallback: ProgressCallback): Promise<void> {\n const streamPromise = new Promise<void>((resolve, reject) => {\n this.on('data', (chunk) => {\n progressCallback(chunk.toString());\n });\n\n this.on('end', () => {\n resolve();\n });\n\n this.on('error', (err) => {\n reject(err);\n });\n });\n\n try {\n await streamPromise;\n } catch (err) {\n console.error('[processTextStream] Error in text stream:', err);\n // Handle the error appropriately, e.g., return an error message or throw an error\n }\n }\n\n async *generateText(progressCallback?: ProgressCallback): AsyncGenerator<string, void, unknown> {\n const { delay, minChunkSize, maxChunkSize } = this;\n\n while (this.currentIndex < this.text.length) {\n await new Promise(resolve => setTimeout(resolve, delay));\n\n const remainingChars = this.text.length - this.currentIndex;\n const chunkSize = Math.min(this.randomInt(minChunkSize, maxChunkSize + 1), remainingChars);\n\n const chunk = this.text.slice(this.currentIndex, this.currentIndex + chunkSize);\n\n progressCallback?.(chunk);\n\n yield chunk;\n this.currentIndex += chunkSize;\n }\n }\n}"],"names":[],"mappings":";;AAAA;AAYM,MAAO,UAAW,SAAQ,QAAQ,CAAA;AAC9B,IAAA,IAAI,CAAS;AACb,IAAA,YAAY,CAAS;AACrB,IAAA,YAAY,CAAS;AACrB,IAAA,YAAY,CAAS;AACrB,IAAA,KAAK,CAAS;IAEtB,WAAY,CAAA,IAAY,EAAE,OAAA,GAA6B,EAAE,EAAA;QACvD,KAAK,CAAC,OAAO,CAAC,CAAC;AACf,QAAA,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;AACjB,QAAA,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC;QACtB,IAAI,CAAC,YAAY,GAAG,OAAO,CAAC,YAAY,IAAI,CAAC,CAAC;QAC9C,IAAI,CAAC,YAAY,GAAG,OAAO,CAAC,YAAY,IAAI,CAAC,CAAC;QAC9C,IAAI,CAAC,KAAK,GAAG,OAAO,CAAC,KAAK,IAAI,EAAE,CAAC;KAClC;IAED,KAAK,GAAA;QACH,MAAM,EAAE,KAAK,EAAE,YAAY,EAAE,YAAY,EAAE,GAAG,IAAI,CAAC;QAEnD,IAAI,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;YACxC,UAAU,CAAC,MAAK;gBACd,MAAM,cAAc,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,YAAY,CAAC;AAC5D,gBAAA,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,YAAY,EAAE,YAAY,GAAG,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;AAE3F,gBAAA,MAAM,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,YAAY,EAAE,IAAI,CAAC,YAAY,GAAG,SAAS,CAAC,CAAC;AAChF,gBAAA,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;AACjB,gBAAA,IAAI,CAAC,YAAY,IAAI,SAAS,CAAC;aAChC,EAAE,KAAK,CAAC,CAAC;SACX;aAAM;AACL,YAAA,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;SACjB;KACF;IAEO,SAAS,CAAC,GAAW,EAAE,GAAW,EAAA;AACxC,QAAA,OAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,GAAG,GAAG,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;KACtD;IAED,MAAM,iBAAiB,CAAC,gBAAkC,EAAA;QACxD,MAAM,aAAa,GAAG,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,KAAI;YAC1D,IAAI,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,KAAK,KAAI;AACxB,gBAAA,gBAAgB,CAAC,KAAK,CAAC,QAAQ,EAAE,CAAC,CAAC;AACrC,aAAC,CAAC,CAAC;AAEH,YAAA,IAAI,CAAC,EAAE,CAAC,KAAK,EAAE,MAAK;AAClB,gBAAA,OAAO,EAAE,CAAC;AACZ,aAAC,CAAC,CAAC;YAEH,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,KAAI;gBACvB,MAAM,CAAC,GAAG,CAAC,CAAC;AACd,aAAC,CAAC,CAAC;AACL,SAAC,CAAC,CAAC;AAEH,QAAA,IAAI;AACF,YAAA,MAAM,aAAa,CAAC;SACrB;QAAC,OAAO,GAAG,EAAE;AACZ,YAAA,OAAO,CAAC,KAAK,CAAC,2CAA2C,EAAE,GAAG,CAAC,CAAC;;SAEjE;KACF;AAED,IAAA,OAAO,YAAY,CAAC,gBAAmC,EAAA;QACrD,MAAM,EAAE,KAAK,EAAE,YAAY,EAAE,YAAY,EAAE,GAAG,IAAI,CAAC;QAEnD,OAAO,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAC3C,YAAA,MAAM,IAAI,OAAO,CAAC,OAAO,IAAI,UAAU,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC,CAAC;YAEzD,MAAM,cAAc,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,YAAY,CAAC;AAC5D,YAAA,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,YAAY,EAAE,YAAY,GAAG,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;AAE3F,YAAA,MAAM,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,YAAY,EAAE,IAAI,CAAC,YAAY,GAAG,SAAS,CAAC,CAAC;AAEhF,YAAA,gBAAgB,GAAG,KAAK,CAAC,CAAC;AAE1B,YAAA,MAAM,KAAK,CAAC;AACZ,YAAA,IAAI,CAAC,YAAY,IAAI,SAAS,CAAC;SAChC;KACF;AACF;;;;"}
@@ -0,0 +1,13 @@
1
+ import { ChatAnthropicMessages } from '@langchain/anthropic';
2
+ import { ChatGenerationChunk } from '@langchain/core/outputs';
3
+ import type { BaseMessage } from '@langchain/core/messages';
4
+ import type { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';
5
+ import type { AnthropicInput } from '@langchain/anthropic';
6
+ export type CustomAnthropicInput = AnthropicInput & {
7
+ _lc_stream_delay?: number;
8
+ };
9
+ export declare class CustomAnthropic extends ChatAnthropicMessages {
10
+ _lc_stream_delay: number;
11
+ constructor(fields: CustomAnthropicInput);
12
+ _streamResponseChunks(messages: BaseMessage[], options: this['ParsedCallOptions'], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
13
+ }
@@ -0,0 +1,20 @@
1
+ import Anthropic from '@anthropic-ai/sdk';
2
+ import type { Tool as AnthropicTool } from '@anthropic-ai/sdk/resources';
3
+ import { BindToolsInput } from '@langchain/core/language_models/chat_models';
4
+ export type AnthropicToolResponse = {
5
+ type: 'tool_use';
6
+ id: string;
7
+ name: string;
8
+ input: Record<string, any>;
9
+ };
10
+ export type AnthropicMessageParam = Anthropic.MessageParam;
11
+ export type AnthropicMessageResponse = Anthropic.ContentBlock | AnthropicToolResponse;
12
+ export type AnthropicMessageCreateParams = Anthropic.MessageCreateParamsNonStreaming;
13
+ export type AnthropicStreamingMessageCreateParams = Anthropic.MessageCreateParamsStreaming;
14
+ export type AnthropicMessageStreamEvent = Anthropic.MessageStreamEvent;
15
+ export type AnthropicRequestOptions = Anthropic.RequestOptions;
16
+ export type AnthropicToolChoice = {
17
+ type: 'tool';
18
+ name: string;
19
+ } | 'any' | 'auto' | 'none' | string;
20
+ export type ChatAnthropicToolType = AnthropicTool | BindToolsInput;
@@ -0,0 +1,14 @@
1
+ /**
2
+ * This util file contains functions for converting LangChain messages to Anthropic messages.
3
+ */
4
+ import { BaseMessage } from '@langchain/core/messages';
5
+ import { ToolCall } from '@langchain/core/messages/tool';
6
+ import type { AnthropicMessageCreateParams, AnthropicToolResponse } from '@/llm/anthropic/types';
7
+ export declare function _convertLangChainToolCallToAnthropic(toolCall: ToolCall): AnthropicToolResponse;
8
+ /**
9
+ * Formats messages as a prompt for the model.
10
+ * Used in LangSmith, export is important here.
11
+ * @param messages The base messages to format as a prompt.
12
+ * @returns The formatted prompt.
13
+ */
14
+ export declare function _convertMessagesToAnthropicPayload(messages: BaseMessage[]): AnthropicMessageCreateParams;
@@ -0,0 +1,16 @@
1
+ /**
2
+ * This util file contains functions for converting Anthropic messages to LangChain messages.
3
+ */
4
+ import Anthropic from '@anthropic-ai/sdk';
5
+ import { AIMessageChunk } from '@langchain/core/messages';
6
+ import { ToolCall } from '@langchain/core/messages/tool';
7
+ import { ChatGeneration } from '@langchain/core/outputs';
8
+ import { AnthropicMessageResponse } from '../types.js';
9
+ export declare function extractToolCalls(content: Record<string, any>[]): ToolCall[];
10
+ export declare function _makeMessageChunkFromAnthropicEvent(data: Anthropic.Messages.RawMessageStreamEvent, fields: {
11
+ streamUsage: boolean;
12
+ coerceContentToString: boolean;
13
+ }): {
14
+ chunk: AIMessageChunk;
15
+ } | null;
16
+ export declare function anthropicResponseToChatMessages(messages: AnthropicMessageResponse[], additionalKwargs: Record<string, unknown>): ChatGeneration[];
@@ -0,0 +1,21 @@
1
+ import { Readable } from 'stream';
2
+ import type { ReadableOptions } from 'stream';
3
+ export interface TextStreamOptions extends ReadableOptions {
4
+ minChunkSize?: number;
5
+ maxChunkSize?: number;
6
+ delay?: number;
7
+ }
8
+ export type ProgressCallback = (chunk: string) => void;
9
+ export type PostChunkCallback = (chunk: string) => void;
10
+ export declare class TextStream extends Readable {
11
+ private text;
12
+ private currentIndex;
13
+ private minChunkSize;
14
+ private maxChunkSize;
15
+ private delay;
16
+ constructor(text: string, options?: TextStreamOptions);
17
+ _read(): void;
18
+ private randomInt;
19
+ processTextStream(progressCallback: ProgressCallback): Promise<void>;
20
+ generateText(progressCallback?: ProgressCallback): AsyncGenerator<string, void, unknown>;
21
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@librechat/agents",
3
- "version": "1.7.8",
3
+ "version": "1.8.0",
4
4
  "main": "./dist/cjs/main.cjs",
5
5
  "module": "./dist/esm/main.mjs",
6
6
  "types": "./dist/types/index.d.ts",
@@ -43,8 +43,8 @@
43
43
  "content": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/content.ts --provider 'anthropic' --name 'Jo' --location 'New York, NY'",
44
44
  "stream": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/stream.ts --provider 'anthropic' --name 'Jo' --location 'New York, NY'",
45
45
  "code_exec": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/code_exec.ts --provider 'openAI' --name 'Jo' --location 'New York, NY'",
46
- "code_exec_simple": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/code_exec_simple.ts --provider 'openAI' --name 'Jo' --location 'New York, NY'",
47
- "simple": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/simple.ts --provider 'openAI' --name 'Jo' --location 'New York, NY'",
46
+ "code_exec_simple": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/code_exec_simple.ts --provider 'anthropic' --name 'Jo' --location 'New York, NY'",
47
+ "simple": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/simple.ts --provider 'anthropic' --name 'Jo' --location 'New York, NY'",
48
48
  "memory": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/memory.ts --provider 'openAI' --name 'Jo' --location 'New York, NY'",
49
49
  "tool-test": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/tools.ts --provider 'anthropic' --name 'Jo' --location 'New York, NY'",
50
50
  "abort": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/abort.ts --provider 'openAI' --name 'Jo' --location 'New York, NY'",
@@ -71,9 +71,9 @@
71
71
  "@aws-sdk/types": "^3.609.0",
72
72
  "@langchain/anthropic": "^0.3.7",
73
73
  "@langchain/aws": "^0.1.1",
74
- "@langchain/community": "^0.3.11",
75
- "@langchain/core": "^0.3.16",
76
- "@langchain/google-vertexai": "^0.0.20",
74
+ "@langchain/community": "^0.3.14",
75
+ "@langchain/core": "^0.3.18",
76
+ "@langchain/google-vertexai": "^0.1.2",
77
77
  "@langchain/langgraph": "^0.2.19",
78
78
  "@langchain/mistralai": "^0.0.26",
79
79
  "@langchain/ollama": "^0.1.1",
@@ -0,0 +1,151 @@
1
+ import { AIMessageChunk } from '@langchain/core/messages';
2
+ import { ChatAnthropicMessages } from '@langchain/anthropic';
3
+ import { ChatGenerationChunk } from '@langchain/core/outputs';
4
+ import type { BaseMessage, MessageContentComplex } from '@langchain/core/messages';
5
+ import type { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';
6
+ import type { AnthropicInput } from '@langchain/anthropic';
7
+ import type { AnthropicMessageCreateParams } from '@/llm/anthropic/types';
8
+ import { _makeMessageChunkFromAnthropicEvent } from './utils/message_outputs';
9
+ import { _convertMessagesToAnthropicPayload } from './utils/message_inputs';
10
+ import { TextStream } from '@/llm/text';
11
+
12
+ function _toolsInParams(params: AnthropicMessageCreateParams): boolean {
13
+ return !!(params.tools && params.tools.length > 0);
14
+ }
15
+
16
+ function extractToken(chunk: AIMessageChunk): [string, 'string' | 'input' | 'content'] | [undefined] {
17
+ if (typeof chunk.content === 'string') {
18
+ return [chunk.content, 'string'];
19
+ } else if (
20
+ Array.isArray(chunk.content) &&
21
+ chunk.content.length >= 1 &&
22
+ 'input' in chunk.content[0]
23
+ ) {
24
+ return typeof chunk.content[0].input === 'string'
25
+ ? [chunk.content[0].input, 'input']
26
+ : [JSON.stringify(chunk.content[0].input), 'input'];
27
+ } else if (
28
+ Array.isArray(chunk.content) &&
29
+ chunk.content.length >= 1 &&
30
+ 'text' in chunk.content[0]
31
+ ) {
32
+ return [chunk.content[0].text, 'content'];
33
+ }
34
+ return [undefined];
35
+ }
36
+
37
+ function cloneChunk(text: string, tokenType: string, chunk: AIMessageChunk): AIMessageChunk {
38
+ if (tokenType === 'string') {
39
+ return new AIMessageChunk(Object.assign({}, chunk, { content: text }));
40
+ } else if (tokenType === 'input') {
41
+ return chunk;
42
+ }
43
+ const content = chunk.content[0] as MessageContentComplex;
44
+ if (tokenType === 'content' && content.type === 'text') {
45
+ return new AIMessageChunk(Object.assign({}, chunk, { content: [Object.assign({}, content, { text })] }));
46
+ } else if (tokenType === 'content' && content.type === 'text_delta') {
47
+ return new AIMessageChunk(Object.assign({}, chunk, { content: [Object.assign({}, content, { text })] }));
48
+ }
49
+
50
+ return chunk;
51
+ }
52
+
53
+ export type CustomAnthropicInput = AnthropicInput & { _lc_stream_delay?: number };
54
+
55
+ export class CustomAnthropic extends ChatAnthropicMessages {
56
+ _lc_stream_delay: number;
57
+ constructor(fields: CustomAnthropicInput) {
58
+ super(fields);
59
+ this._lc_stream_delay = fields._lc_stream_delay ?? 25;
60
+ }
61
+
62
+ async *_streamResponseChunks(
63
+ messages: BaseMessage[],
64
+ options: this['ParsedCallOptions'],
65
+ runManager?: CallbackManagerForLLMRun
66
+ ): AsyncGenerator<ChatGenerationChunk> {
67
+ const params = this.invocationParams(options);
68
+ const formattedMessages = _convertMessagesToAnthropicPayload(messages);
69
+ const coerceContentToString = !_toolsInParams({
70
+ ...params,
71
+ ...formattedMessages,
72
+ stream: false,
73
+ });
74
+
75
+ const stream = await this.createStreamWithRetry(
76
+ {
77
+ ...params,
78
+ ...formattedMessages,
79
+ stream: true,
80
+ },
81
+ {
82
+ headers: options.headers,
83
+ }
84
+ );
85
+
86
+ for await (const data of stream) {
87
+ if (options.signal?.aborted) {
88
+ stream.controller.abort();
89
+ throw new Error('AbortError: User aborted the request.');
90
+ }
91
+ const shouldStreamUsage = this.streamUsage ?? options.streamUsage;
92
+ const result = _makeMessageChunkFromAnthropicEvent(data, {
93
+ streamUsage: shouldStreamUsage,
94
+ coerceContentToString,
95
+ });
96
+ if (!result) continue;
97
+
98
+ const { chunk } = result;
99
+
100
+ // Extract the text content token for text field and runManager.
101
+ const [token = '', tokenType] = extractToken(chunk);
102
+ const createGenerationChunk = (text: string, incomingChunk: AIMessageChunk): ChatGenerationChunk => {
103
+ return new ChatGenerationChunk({
104
+ message: new AIMessageChunk({
105
+ // Just yield chunk as it is and tool_use will be concat by BaseChatModel._generateUncached().
106
+ content: incomingChunk.content,
107
+ additional_kwargs: incomingChunk.additional_kwargs,
108
+ tool_call_chunks: incomingChunk.tool_call_chunks,
109
+ usage_metadata: shouldStreamUsage ? incomingChunk.usage_metadata : undefined,
110
+ response_metadata: incomingChunk.response_metadata,
111
+ id: incomingChunk.id,
112
+ }),
113
+ text,
114
+ });
115
+ };
116
+
117
+ if (!tokenType || tokenType === 'input') {
118
+ const generationChunk = createGenerationChunk(token, chunk);
119
+ yield generationChunk;
120
+ await runManager?.handleLLMNewToken(
121
+ token,
122
+ undefined,
123
+ undefined,
124
+ undefined,
125
+ undefined,
126
+ { chunk: generationChunk }
127
+ );
128
+ continue;
129
+ }
130
+
131
+ const textStream = new TextStream(token, {
132
+ delay: this._lc_stream_delay,
133
+ });
134
+ for await (const currentToken of textStream.generateText()) {
135
+ const newChunk = cloneChunk(currentToken, tokenType, chunk);
136
+ const generationChunk = createGenerationChunk(currentToken, newChunk);
137
+ yield generationChunk;
138
+
139
+ await runManager?.handleLLMNewToken(
140
+ token,
141
+ undefined,
142
+ undefined,
143
+ undefined,
144
+ undefined,
145
+ { chunk: generationChunk }
146
+ );
147
+ }
148
+ }
149
+ }
150
+
151
+ }
@@ -0,0 +1,32 @@
1
+ // eslint-disable-next-line import/no-named-as-default
2
+ import Anthropic from '@anthropic-ai/sdk';
3
+ import type { Tool as AnthropicTool } from '@anthropic-ai/sdk/resources';
4
+ import { BindToolsInput } from '@langchain/core/language_models/chat_models';
5
+
6
+ export type AnthropicToolResponse = {
7
+ type: 'tool_use';
8
+ id: string;
9
+ name: string;
10
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
11
+ input: Record<string, any>;
12
+ };
13
+ export type AnthropicMessageParam = Anthropic.MessageParam;
14
+ export type AnthropicMessageResponse =
15
+ | Anthropic.ContentBlock
16
+ | AnthropicToolResponse;
17
+ export type AnthropicMessageCreateParams =
18
+ Anthropic.MessageCreateParamsNonStreaming;
19
+ export type AnthropicStreamingMessageCreateParams =
20
+ Anthropic.MessageCreateParamsStreaming;
21
+ export type AnthropicMessageStreamEvent = Anthropic.MessageStreamEvent;
22
+ export type AnthropicRequestOptions = Anthropic.RequestOptions;
23
+ export type AnthropicToolChoice =
24
+ | {
25
+ type: 'tool';
26
+ name: string;
27
+ }
28
+ | 'any'
29
+ | 'auto'
30
+ | 'none'
31
+ | string;
32
+ export type ChatAnthropicToolType = AnthropicTool | BindToolsInput;