@librechat/agents 3.0.0-rc1 → 3.0.0-rc11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/common/enum.cjs +1 -0
- package/dist/cjs/common/enum.cjs.map +1 -1
- package/dist/cjs/graphs/Graph.cjs +7 -2
- package/dist/cjs/graphs/Graph.cjs.map +1 -1
- package/dist/cjs/graphs/MultiAgentGraph.cjs +229 -44
- package/dist/cjs/graphs/MultiAgentGraph.cjs.map +1 -1
- package/dist/cjs/llm/anthropic/index.cjs +21 -2
- package/dist/cjs/llm/anthropic/index.cjs.map +1 -1
- package/dist/cjs/llm/google/index.cjs +3 -0
- package/dist/cjs/llm/google/index.cjs.map +1 -1
- package/dist/cjs/llm/google/utils/common.cjs +13 -0
- package/dist/cjs/llm/google/utils/common.cjs.map +1 -1
- package/dist/cjs/llm/ollama/index.cjs +3 -0
- package/dist/cjs/llm/ollama/index.cjs.map +1 -1
- package/dist/cjs/llm/openai/index.cjs +53 -1
- package/dist/cjs/llm/openai/index.cjs.map +1 -1
- package/dist/cjs/llm/openai/utils/index.cjs +6 -1
- package/dist/cjs/llm/openai/utils/index.cjs.map +1 -1
- package/dist/cjs/llm/openrouter/index.cjs +5 -1
- package/dist/cjs/llm/openrouter/index.cjs.map +1 -1
- package/dist/cjs/llm/vertexai/index.cjs +1 -1
- package/dist/cjs/llm/vertexai/index.cjs.map +1 -1
- package/dist/cjs/main.cjs +3 -1
- package/dist/cjs/main.cjs.map +1 -1
- package/dist/cjs/messages/core.cjs +5 -1
- package/dist/cjs/messages/core.cjs.map +1 -1
- package/dist/cjs/messages/format.cjs +52 -34
- package/dist/cjs/messages/format.cjs.map +1 -1
- package/dist/cjs/messages/prune.cjs +28 -0
- package/dist/cjs/messages/prune.cjs.map +1 -1
- package/dist/cjs/run.cjs +28 -15
- package/dist/cjs/run.cjs.map +1 -1
- package/dist/cjs/stream.cjs +1 -1
- package/dist/cjs/stream.cjs.map +1 -1
- package/dist/cjs/tools/ToolNode.cjs +2 -0
- package/dist/cjs/tools/ToolNode.cjs.map +1 -1
- package/dist/cjs/tools/search/firecrawl.cjs +3 -1
- package/dist/cjs/tools/search/firecrawl.cjs.map +1 -1
- package/dist/cjs/tools/search/rerankers.cjs +8 -6
- package/dist/cjs/tools/search/rerankers.cjs.map +1 -1
- package/dist/cjs/tools/search/search.cjs +5 -5
- package/dist/cjs/tools/search/search.cjs.map +1 -1
- package/dist/cjs/tools/search/serper-scraper.cjs +132 -0
- package/dist/cjs/tools/search/serper-scraper.cjs.map +1 -0
- package/dist/cjs/tools/search/tool.cjs +46 -9
- package/dist/cjs/tools/search/tool.cjs.map +1 -1
- package/dist/cjs/utils/handlers.cjs +70 -0
- package/dist/cjs/utils/handlers.cjs.map +1 -0
- package/dist/esm/common/enum.mjs +1 -0
- package/dist/esm/common/enum.mjs.map +1 -1
- package/dist/esm/graphs/Graph.mjs +7 -2
- package/dist/esm/graphs/Graph.mjs.map +1 -1
- package/dist/esm/graphs/MultiAgentGraph.mjs +230 -45
- package/dist/esm/graphs/MultiAgentGraph.mjs.map +1 -1
- package/dist/esm/llm/anthropic/index.mjs +21 -2
- package/dist/esm/llm/anthropic/index.mjs.map +1 -1
- package/dist/esm/llm/google/index.mjs +3 -0
- package/dist/esm/llm/google/index.mjs.map +1 -1
- package/dist/esm/llm/google/utils/common.mjs +13 -0
- package/dist/esm/llm/google/utils/common.mjs.map +1 -1
- package/dist/esm/llm/ollama/index.mjs +3 -0
- package/dist/esm/llm/ollama/index.mjs.map +1 -1
- package/dist/esm/llm/openai/index.mjs +53 -1
- package/dist/esm/llm/openai/index.mjs.map +1 -1
- package/dist/esm/llm/openai/utils/index.mjs +6 -1
- package/dist/esm/llm/openai/utils/index.mjs.map +1 -1
- package/dist/esm/llm/openrouter/index.mjs +5 -1
- package/dist/esm/llm/openrouter/index.mjs.map +1 -1
- package/dist/esm/llm/vertexai/index.mjs +1 -1
- package/dist/esm/llm/vertexai/index.mjs.map +1 -1
- package/dist/esm/main.mjs +2 -1
- package/dist/esm/main.mjs.map +1 -1
- package/dist/esm/messages/core.mjs +5 -1
- package/dist/esm/messages/core.mjs.map +1 -1
- package/dist/esm/messages/format.mjs +52 -34
- package/dist/esm/messages/format.mjs.map +1 -1
- package/dist/esm/messages/prune.mjs +28 -0
- package/dist/esm/messages/prune.mjs.map +1 -1
- package/dist/esm/run.mjs +28 -15
- package/dist/esm/run.mjs.map +1 -1
- package/dist/esm/stream.mjs +1 -1
- package/dist/esm/stream.mjs.map +1 -1
- package/dist/esm/tools/ToolNode.mjs +2 -0
- package/dist/esm/tools/ToolNode.mjs.map +1 -1
- package/dist/esm/tools/search/firecrawl.mjs +3 -1
- package/dist/esm/tools/search/firecrawl.mjs.map +1 -1
- package/dist/esm/tools/search/rerankers.mjs +8 -6
- package/dist/esm/tools/search/rerankers.mjs.map +1 -1
- package/dist/esm/tools/search/search.mjs +5 -5
- package/dist/esm/tools/search/search.mjs.map +1 -1
- package/dist/esm/tools/search/serper-scraper.mjs +129 -0
- package/dist/esm/tools/search/serper-scraper.mjs.map +1 -0
- package/dist/esm/tools/search/tool.mjs +46 -9
- package/dist/esm/tools/search/tool.mjs.map +1 -1
- package/dist/esm/utils/handlers.mjs +68 -0
- package/dist/esm/utils/handlers.mjs.map +1 -0
- package/dist/types/common/enum.d.ts +2 -1
- package/dist/types/graphs/MultiAgentGraph.d.ts +12 -2
- package/dist/types/llm/anthropic/index.d.ts +3 -0
- package/dist/types/llm/google/index.d.ts +1 -0
- package/dist/types/llm/ollama/index.d.ts +1 -0
- package/dist/types/llm/openai/index.d.ts +14 -0
- package/dist/types/llm/openrouter/index.d.ts +4 -2
- package/dist/types/llm/vertexai/index.d.ts +1 -1
- package/dist/types/messages/format.d.ts +23 -20
- package/dist/types/run.d.ts +1 -1
- package/dist/types/tools/search/firecrawl.d.ts +2 -1
- package/dist/types/tools/search/rerankers.d.ts +4 -1
- package/dist/types/tools/search/search.d.ts +1 -2
- package/dist/types/tools/search/serper-scraper.d.ts +59 -0
- package/dist/types/tools/search/tool.d.ts +25 -4
- package/dist/types/tools/search/types.d.ts +31 -1
- package/dist/types/types/graph.d.ts +38 -4
- package/dist/types/types/llm.d.ts +1 -0
- package/dist/types/types/run.d.ts +5 -1
- package/dist/types/utils/handlers.d.ts +34 -0
- package/dist/types/utils/index.d.ts +1 -0
- package/package.json +11 -3
- package/src/common/enum.ts +1 -0
- package/src/graphs/Graph.ts +8 -2
- package/src/graphs/MultiAgentGraph.ts +267 -50
- package/src/llm/anthropic/index.ts +23 -2
- package/src/llm/google/index.ts +4 -0
- package/src/llm/google/utils/common.ts +14 -0
- package/src/llm/ollama/index.ts +3 -0
- package/src/llm/openai/index.ts +60 -1
- package/src/llm/openai/utils/index.ts +7 -1
- package/src/llm/openrouter/index.ts +15 -6
- package/src/llm/vertexai/index.ts +2 -2
- package/src/messages/core.ts +5 -2
- package/src/messages/format.ts +67 -39
- package/src/messages/formatMessage.test.ts +418 -2
- package/src/messages/prune.ts +51 -0
- package/src/run.ts +38 -27
- package/src/scripts/multi-agent-chain.ts +278 -0
- package/src/scripts/multi-agent-document-review-chain.ts +197 -0
- package/src/scripts/multi-agent-hybrid-flow.ts +310 -0
- package/src/scripts/multi-agent-parallel.ts +27 -23
- package/src/scripts/multi-agent-supervisor.ts +362 -0
- package/src/scripts/search.ts +5 -1
- package/src/scripts/test-custom-prompt-key.ts +145 -0
- package/src/scripts/test-handoff-input.ts +170 -0
- package/src/scripts/test-multi-agent-list-handoff.ts +261 -0
- package/src/scripts/test-tools-before-handoff.ts +233 -0
- package/src/scripts/tools.ts +4 -1
- package/src/stream.ts +4 -1
- package/src/tools/search/firecrawl.ts +5 -2
- package/src/tools/search/jina-reranker.test.ts +126 -0
- package/src/tools/search/rerankers.ts +11 -5
- package/src/tools/search/search.ts +6 -8
- package/src/tools/search/serper-scraper.ts +155 -0
- package/src/tools/search/tool.ts +49 -8
- package/src/tools/search/types.ts +46 -0
- package/src/types/graph.ts +51 -5
- package/src/types/llm.ts +1 -0
- package/src/types/run.ts +6 -1
- package/src/utils/handlers.ts +107 -0
- package/src/utils/index.ts +2 -1
- package/src/utils/llmConfig.ts +35 -1
- package/dist/types/scripts/abort.d.ts +0 -1
- package/dist/types/scripts/ant_web_search.d.ts +0 -1
- package/dist/types/scripts/args.d.ts +0 -7
- package/dist/types/scripts/caching.d.ts +0 -1
- package/dist/types/scripts/cli.d.ts +0 -1
- package/dist/types/scripts/cli2.d.ts +0 -1
- package/dist/types/scripts/cli3.d.ts +0 -1
- package/dist/types/scripts/cli4.d.ts +0 -1
- package/dist/types/scripts/cli5.d.ts +0 -1
- package/dist/types/scripts/code_exec.d.ts +0 -1
- package/dist/types/scripts/code_exec_files.d.ts +0 -1
- package/dist/types/scripts/code_exec_simple.d.ts +0 -1
- package/dist/types/scripts/content.d.ts +0 -1
- package/dist/types/scripts/empty_input.d.ts +0 -1
- package/dist/types/scripts/handoff-test.d.ts +0 -1
- package/dist/types/scripts/image.d.ts +0 -1
- package/dist/types/scripts/memory.d.ts +0 -1
- package/dist/types/scripts/multi-agent-conditional.d.ts +0 -1
- package/dist/types/scripts/multi-agent-parallel.d.ts +0 -1
- package/dist/types/scripts/multi-agent-sequence.d.ts +0 -1
- package/dist/types/scripts/multi-agent-test.d.ts +0 -1
- package/dist/types/scripts/search.d.ts +0 -1
- package/dist/types/scripts/simple.d.ts +0 -1
- package/dist/types/scripts/stream.d.ts +0 -1
- package/dist/types/scripts/thinking.d.ts +0 -1
- package/dist/types/scripts/tools.d.ts +0 -1
- package/dist/types/specs/spec.utils.d.ts +0 -1
- package/src/scripts/multi-agent-example-output.md +0 -110
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.cjs","sources":["../../../../src/llm/anthropic/index.ts"],"sourcesContent":["import { AIMessageChunk } from '@langchain/core/messages';\nimport { ChatAnthropicMessages } from '@langchain/anthropic';\nimport { ChatGenerationChunk } from '@langchain/core/outputs';\nimport type { BaseChatModelParams } from '@langchain/core/language_models/chat_models';\nimport type {\n BaseMessage,\n UsageMetadata,\n MessageContentComplex,\n} from '@langchain/core/messages';\nimport type { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';\nimport type { AnthropicInput } from '@langchain/anthropic';\nimport type { Anthropic } from '@anthropic-ai/sdk';\nimport type {\n AnthropicMessageCreateParams,\n AnthropicStreamingMessageCreateParams,\n AnthropicStreamUsage,\n AnthropicMessageStartEvent,\n AnthropicMessageDeltaEvent,\n} from '@/llm/anthropic/types';\nimport { _makeMessageChunkFromAnthropicEvent } from './utils/message_outputs';\nimport { _convertMessagesToAnthropicPayload } from './utils/message_inputs';\nimport { handleToolChoice } from './utils/tools';\nimport { TextStream } from '@/llm/text';\n\nfunction _toolsInParams(\n params: AnthropicMessageCreateParams | AnthropicStreamingMessageCreateParams\n): boolean {\n return !!(params.tools && params.tools.length > 0);\n}\nfunction _documentsInParams(\n params: AnthropicMessageCreateParams | AnthropicStreamingMessageCreateParams\n): boolean {\n for (const message of params.messages ?? []) {\n if (typeof message.content === 'string') {\n continue;\n }\n for (const block of message.content ?? []) {\n if (\n typeof block === 'object' &&\n block != null &&\n block.type === 'document' &&\n typeof block.citations === 'object' &&\n block.citations.enabled\n ) {\n return true;\n }\n }\n }\n return false;\n}\n\nfunction _thinkingInParams(\n params: AnthropicMessageCreateParams | AnthropicStreamingMessageCreateParams\n): boolean {\n return !!(params.thinking && params.thinking.type === 'enabled');\n}\n\nfunction extractToken(\n chunk: AIMessageChunk\n): [string, 'string' | 'input' | 'content'] | [undefined] {\n if (typeof chunk.content === 'string') {\n return [chunk.content, 'string'];\n } else if (\n Array.isArray(chunk.content) &&\n chunk.content.length >= 1 &&\n 'input' in chunk.content[0]\n ) {\n return typeof chunk.content[0].input === 'string'\n ? [chunk.content[0].input, 'input']\n : [JSON.stringify(chunk.content[0].input), 'input'];\n } else if (\n Array.isArray(chunk.content) &&\n chunk.content.length >= 1 &&\n 'text' in chunk.content[0]\n ) {\n return [chunk.content[0].text, 'content'];\n } else if (\n Array.isArray(chunk.content) &&\n chunk.content.length >= 1 &&\n 'thinking' in chunk.content[0]\n ) {\n return [chunk.content[0].thinking, 'content'];\n }\n return [undefined];\n}\n\nfunction cloneChunk(\n text: string,\n tokenType: string,\n chunk: AIMessageChunk\n): AIMessageChunk {\n if (tokenType === 'string') {\n return new AIMessageChunk(Object.assign({}, chunk, { content: text }));\n } else if (tokenType === 'input') {\n return chunk;\n }\n const content = chunk.content[0] as MessageContentComplex;\n if (tokenType === 'content' && content.type === 'text') {\n return new AIMessageChunk(\n Object.assign({}, chunk, {\n content: [Object.assign({}, content, { text })],\n })\n );\n } else if (tokenType === 'content' && content.type === 'text_delta') {\n return new AIMessageChunk(\n Object.assign({}, chunk, {\n content: [Object.assign({}, content, { text })],\n })\n );\n } else if (tokenType === 'content' && content.type?.startsWith('thinking')) {\n return new AIMessageChunk(\n Object.assign({}, chunk, {\n content: [Object.assign({}, content, { thinking: text })],\n })\n );\n }\n\n return chunk;\n}\n\nexport type CustomAnthropicInput = AnthropicInput & {\n _lc_stream_delay?: number;\n} & BaseChatModelParams;\n\n/**\n * A type representing additional parameters that can be passed to the\n * Anthropic API.\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\ntype Kwargs = Record<string, any>;\n\nexport class CustomAnthropic extends ChatAnthropicMessages {\n _lc_stream_delay: number;\n private message_start: AnthropicMessageStartEvent | undefined;\n private message_delta: AnthropicMessageDeltaEvent | undefined;\n private tools_in_params?: boolean;\n private emitted_usage?: boolean;\n constructor(fields?: CustomAnthropicInput) {\n super(fields);\n this.resetTokenEvents();\n this._lc_stream_delay = fields?._lc_stream_delay ?? 25;\n }\n\n /**\n * Get the parameters used to invoke the model\n */\n override invocationParams(\n options?: this['ParsedCallOptions']\n ): Omit<\n AnthropicMessageCreateParams | AnthropicStreamingMessageCreateParams,\n 'messages'\n > &\n Kwargs {\n const tool_choice:\n | Anthropic.Messages.ToolChoiceAuto\n | Anthropic.Messages.ToolChoiceAny\n | Anthropic.Messages.ToolChoiceTool\n | undefined = handleToolChoice(options?.tool_choice);\n\n if (this.thinking.type === 'enabled') {\n if (this.topK !== -1 && (this.topK as number | undefined) != null) {\n throw new Error('topK is not supported when thinking is enabled');\n }\n if (this.topP !== -1 && (this.topP as number | undefined) != null) {\n throw new Error('topP is not supported when thinking is enabled');\n }\n if (\n this.temperature !== 1 &&\n (this.temperature as number | undefined) != null\n ) {\n throw new Error(\n 'temperature is not supported when thinking is enabled'\n );\n }\n\n return {\n model: this.model,\n stop_sequences: options?.stop ?? this.stopSequences,\n stream: this.streaming,\n max_tokens: this.maxTokens,\n tools: this.formatStructuredToolToAnthropic(options?.tools),\n tool_choice,\n thinking: this.thinking,\n ...this.invocationKwargs,\n };\n }\n return {\n model: this.model,\n temperature: this.temperature,\n top_k: this.topK,\n top_p: this.topP,\n stop_sequences: options?.stop ?? this.stopSequences,\n stream: this.streaming,\n max_tokens: this.maxTokens,\n tools: this.formatStructuredToolToAnthropic(options?.tools),\n tool_choice,\n thinking: this.thinking,\n ...this.invocationKwargs,\n };\n }\n\n /**\n * Get stream usage as returned by this client's API response.\n * @returns The stream usage object.\n */\n getStreamUsage(): UsageMetadata | undefined {\n if (this.emitted_usage === true) {\n return;\n }\n const inputUsage = this.message_start?.message.usage as\n | undefined\n | AnthropicStreamUsage;\n const outputUsage = this.message_delta?.usage as\n | undefined\n | Partial<AnthropicStreamUsage>;\n if (!outputUsage) {\n return;\n }\n const totalUsage: UsageMetadata = {\n input_tokens: inputUsage?.input_tokens ?? 0,\n output_tokens: outputUsage.output_tokens ?? 0,\n total_tokens:\n (inputUsage?.input_tokens ?? 0) + (outputUsage.output_tokens ?? 0),\n };\n\n if (\n inputUsage?.cache_creation_input_tokens != null ||\n inputUsage?.cache_read_input_tokens != null\n ) {\n totalUsage.input_token_details = {\n cache_creation: inputUsage.cache_creation_input_tokens ?? 0,\n cache_read: inputUsage.cache_read_input_tokens ?? 0,\n };\n }\n\n this.emitted_usage = true;\n return totalUsage;\n }\n\n resetTokenEvents(): void {\n this.message_start = undefined;\n this.message_delta = undefined;\n this.emitted_usage = undefined;\n this.tools_in_params = undefined;\n }\n\n private createGenerationChunk({\n token,\n chunk,\n usageMetadata,\n shouldStreamUsage,\n }: {\n token?: string;\n chunk: AIMessageChunk;\n shouldStreamUsage: boolean;\n usageMetadata?: UsageMetadata;\n }): ChatGenerationChunk {\n const usage_metadata = shouldStreamUsage\n ? (usageMetadata ?? chunk.usage_metadata)\n : undefined;\n return new ChatGenerationChunk({\n message: new AIMessageChunk({\n // Just yield chunk as it is and tool_use will be concat by BaseChatModel._generateUncached().\n content: chunk.content,\n additional_kwargs: chunk.additional_kwargs,\n tool_call_chunks: chunk.tool_call_chunks,\n response_metadata: chunk.response_metadata,\n usage_metadata,\n id: chunk.id,\n }),\n text: token ?? '',\n });\n }\n\n async *_streamResponseChunks(\n messages: BaseMessage[],\n options: this['ParsedCallOptions'],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n this.resetTokenEvents();\n const params = this.invocationParams(options);\n const formattedMessages = _convertMessagesToAnthropicPayload(messages);\n const payload = {\n ...params,\n ...formattedMessages,\n stream: true,\n } as const;\n const coerceContentToString =\n !_toolsInParams(payload) &&\n !_documentsInParams(payload) &&\n !_thinkingInParams(payload);\n\n const stream = await this.createStreamWithRetry(payload, {\n headers: options.headers,\n });\n\n const shouldStreamUsage = this.streamUsage ?? options.streamUsage;\n\n for await (const data of stream) {\n if (options.signal?.aborted === true) {\n stream.controller.abort();\n throw new Error('AbortError: User aborted the request.');\n }\n\n if (data.type === 'message_start') {\n this.message_start = data as AnthropicMessageStartEvent;\n } else if (data.type === 'message_delta') {\n this.message_delta = data as AnthropicMessageDeltaEvent;\n }\n\n let usageMetadata: UsageMetadata | undefined;\n if (this.tools_in_params !== true && this.emitted_usage !== true) {\n usageMetadata = this.getStreamUsage();\n }\n\n const result = _makeMessageChunkFromAnthropicEvent(data, {\n streamUsage: shouldStreamUsage,\n coerceContentToString,\n });\n if (!result) continue;\n\n const { chunk } = result;\n const [token = '', tokenType] = extractToken(chunk);\n\n if (\n !tokenType ||\n tokenType === 'input' ||\n (token === '' && (usageMetadata != null || chunk.id != null))\n ) {\n const generationChunk = this.createGenerationChunk({\n token,\n chunk,\n usageMetadata,\n shouldStreamUsage,\n });\n yield generationChunk;\n await runManager?.handleLLMNewToken(\n token,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: generationChunk }\n );\n continue;\n }\n\n const textStream = new TextStream(token, {\n delay: this._lc_stream_delay,\n firstWordChunk: true,\n minChunkSize: 4,\n maxChunkSize: 8,\n });\n\n const generator = textStream.generateText(options.signal);\n try {\n let emittedUsage = false;\n for await (const currentToken of generator) {\n if ((options.signal as AbortSignal | undefined)?.aborted === true) {\n break;\n }\n const newChunk = cloneChunk(currentToken, tokenType, chunk);\n\n const generationChunk = this.createGenerationChunk({\n token: currentToken,\n chunk: newChunk,\n usageMetadata: emittedUsage ? undefined : usageMetadata,\n shouldStreamUsage,\n });\n\n if (usageMetadata && !emittedUsage) {\n emittedUsage = true;\n }\n yield generationChunk;\n\n await runManager?.handleLLMNewToken(\n currentToken,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: generationChunk }\n );\n }\n } finally {\n await generator.return();\n }\n }\n\n this.resetTokenEvents();\n }\n}\n"],"names":["AIMessageChunk","ChatAnthropicMessages","handleToolChoice","ChatGenerationChunk","_convertMessagesToAnthropicPayload","_makeMessageChunkFromAnthropicEvent","TextStream"],"mappings":";;;;;;;;;;AAwBA,SAAS,cAAc,CACrB,MAA4E,EAAA;AAE5E,IAAA,OAAO,CAAC,EAAE,MAAM,CAAC,KAAK,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC;AACpD;AACA,SAAS,kBAAkB,CACzB,MAA4E,EAAA;IAE5E,KAAK,MAAM,OAAO,IAAI,MAAM,CAAC,QAAQ,IAAI,EAAE,EAAE;AAC3C,QAAA,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,EAAE;YACvC;;QAEF,KAAK,MAAM,KAAK,IAAI,OAAO,CAAC,OAAO,IAAI,EAAE,EAAE;YACzC,IACE,OAAO,KAAK,KAAK,QAAQ;AACzB,gBAAA,KAAK,IAAI,IAAI;gBACb,KAAK,CAAC,IAAI,KAAK,UAAU;AACzB,gBAAA,OAAO,KAAK,CAAC,SAAS,KAAK,QAAQ;AACnC,gBAAA,KAAK,CAAC,SAAS,CAAC,OAAO,EACvB;AACA,gBAAA,OAAO,IAAI;;;;AAIjB,IAAA,OAAO,KAAK;AACd;AAEA,SAAS,iBAAiB,CACxB,MAA4E,EAAA;AAE5E,IAAA,OAAO,CAAC,EAAE,MAAM,CAAC,QAAQ,IAAI,MAAM,CAAC,QAAQ,CAAC,IAAI,KAAK,SAAS,CAAC;AAClE;AAEA,SAAS,YAAY,CACnB,KAAqB,EAAA;AAErB,IAAA,IAAI,OAAO,KAAK,CAAC,OAAO,KAAK,QAAQ,EAAE;AACrC,QAAA,OAAO,CAAC,KAAK,CAAC,OAAO,EAAE,QAAQ,CAAC;;AAC3B,SAAA,IACL,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC;AAC5B,QAAA,KAAK,CAAC,OAAO,CAAC,MAAM,IAAI,CAAC;QACzB,OAAO,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAC3B;QACA,OAAO,OAAO,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,KAAK;AACvC,cAAE,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,OAAO;AAClC,cAAE,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,OAAO,CAAC;;AAChD,SAAA,IACL,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC;AAC5B,QAAA,KAAK,CAAC,OAAO,CAAC,MAAM,IAAI,CAAC;QACzB,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAC1B;AACA,QAAA,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,SAAS,CAAC;;AACpC,SAAA,IACL,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC;AAC5B,QAAA,KAAK,CAAC,OAAO,CAAC,MAAM,IAAI,CAAC;QACzB,UAAU,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAC9B;AACA,QAAA,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,QAAQ,EAAE,SAAS,CAAC;;IAE/C,OAAO,CAAC,SAAS,CAAC;AACpB;AAEA,SAAS,UAAU,CACjB,IAAY,EACZ,SAAiB,EACjB,KAAqB,EAAA;AAErB,IAAA,IAAI,SAAS,KAAK,QAAQ,EAAE;AAC1B,QAAA,OAAO,IAAIA,uBAAc,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,EAAE,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,CAAC;;AACjE,SAAA,IAAI,SAAS,KAAK,OAAO,EAAE;AAChC,QAAA,OAAO,KAAK;;IAEd,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC,CAA0B;IACzD,IAAI,SAAS,KAAK,SAAS,IAAI,OAAO,CAAC,IAAI,KAAK,MAAM,EAAE;QACtD,OAAO,IAAIA,uBAAc,CACvB,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,EAAE;AACvB,YAAA,OAAO,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,CAAC,CAAC;AAChD,SAAA,CAAC,CACH;;SACI,IAAI,SAAS,KAAK,SAAS,IAAI,OAAO,CAAC,IAAI,KAAK,YAAY,EAAE;QACnE,OAAO,IAAIA,uBAAc,CACvB,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,EAAE;AACvB,YAAA,OAAO,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,CAAC,CAAC;AAChD,SAAA,CAAC,CACH;;AACI,SAAA,IAAI,SAAS,KAAK,SAAS,IAAI,OAAO,CAAC,IAAI,EAAE,UAAU,CAAC,UAAU,CAAC,EAAE;QAC1E,OAAO,IAAIA,uBAAc,CACvB,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,EAAE;AACvB,YAAA,OAAO,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,EAAE,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;AAC1D,SAAA,CAAC,CACH;;AAGH,IAAA,OAAO,KAAK;AACd;AAaM,MAAO,eAAgB,SAAQC,+BAAqB,CAAA;AACxD,IAAA,gBAAgB;AACR,IAAA,aAAa;AACb,IAAA,aAAa;AACb,IAAA,eAAe;AACf,IAAA,aAAa;AACrB,IAAA,WAAA,CAAY,MAA6B,EAAA;QACvC,KAAK,CAAC,MAAM,CAAC;QACb,IAAI,CAAC,gBAAgB,EAAE;QACvB,IAAI,CAAC,gBAAgB,GAAG,MAAM,EAAE,gBAAgB,IAAI,EAAE;;AAGxD;;AAEG;AACM,IAAA,gBAAgB,CACvB,OAAmC,EAAA;QAMnC,MAAM,WAAW,GAIDC,sBAAgB,CAAC,OAAO,EAAE,WAAW,CAAC;QAEtD,IAAI,IAAI,CAAC,QAAQ,CAAC,IAAI,KAAK,SAAS,EAAE;AACpC,YAAA,IAAI,IAAI,CAAC,IAAI,KAAK,EAAE,IAAK,IAAI,CAAC,IAA2B,IAAI,IAAI,EAAE;AACjE,gBAAA,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC;;AAEnE,YAAA,IAAI,IAAI,CAAC,IAAI,KAAK,EAAE,IAAK,IAAI,CAAC,IAA2B,IAAI,IAAI,EAAE;AACjE,gBAAA,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC;;AAEnE,YAAA,IACE,IAAI,CAAC,WAAW,KAAK,CAAC;AACrB,gBAAA,IAAI,CAAC,WAAkC,IAAI,IAAI,EAChD;AACA,gBAAA,MAAM,IAAI,KAAK,CACb,uDAAuD,CACxD;;YAGH,OAAO;gBACL,KAAK,EAAE,IAAI,CAAC,KAAK;AACjB,gBAAA,cAAc,EAAE,OAAO,EAAE,IAAI,IAAI,IAAI,CAAC,aAAa;gBACnD,MAAM,EAAE,IAAI,CAAC,SAAS;gBACtB,UAAU,EAAE,IAAI,CAAC,SAAS;gBAC1B,KAAK,EAAE,IAAI,CAAC,+BAA+B,CAAC,OAAO,EAAE,KAAK,CAAC;gBAC3D,WAAW;gBACX,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,GAAG,IAAI,CAAC,gBAAgB;aACzB;;QAEH,OAAO;YACL,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,WAAW,EAAE,IAAI,CAAC,WAAW;YAC7B,KAAK,EAAE,IAAI,CAAC,IAAI;YAChB,KAAK,EAAE,IAAI,CAAC,IAAI;AAChB,YAAA,cAAc,EAAE,OAAO,EAAE,IAAI,IAAI,IAAI,CAAC,aAAa;YACnD,MAAM,EAAE,IAAI,CAAC,SAAS;YACtB,UAAU,EAAE,IAAI,CAAC,SAAS;YAC1B,KAAK,EAAE,IAAI,CAAC,+BAA+B,CAAC,OAAO,EAAE,KAAK,CAAC;YAC3D,WAAW;YACX,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,GAAG,IAAI,CAAC,gBAAgB;SACzB;;AAGH;;;AAGG;IACH,cAAc,GAAA;AACZ,QAAA,IAAI,IAAI,CAAC,aAAa,KAAK,IAAI,EAAE;YAC/B;;QAEF,MAAM,UAAU,GAAG,IAAI,CAAC,aAAa,EAAE,OAAO,CAAC,KAEvB;AACxB,QAAA,MAAM,WAAW,GAAG,IAAI,CAAC,aAAa,EAAE,KAEP;QACjC,IAAI,CAAC,WAAW,EAAE;YAChB;;AAEF,QAAA,MAAM,UAAU,GAAkB;AAChC,YAAA,YAAY,EAAE,UAAU,EAAE,YAAY,IAAI,CAAC;AAC3C,YAAA,aAAa,EAAE,WAAW,CAAC,aAAa,IAAI,CAAC;AAC7C,YAAA,YAAY,EACV,CAAC,UAAU,EAAE,YAAY,IAAI,CAAC,KAAK,WAAW,CAAC,aAAa,IAAI,CAAC,CAAC;SACrE;AAED,QAAA,IACE,UAAU,EAAE,2BAA2B,IAAI,IAAI;AAC/C,YAAA,UAAU,EAAE,uBAAuB,IAAI,IAAI,EAC3C;YACA,UAAU,CAAC,mBAAmB,GAAG;AAC/B,gBAAA,cAAc,EAAE,UAAU,CAAC,2BAA2B,IAAI,CAAC;AAC3D,gBAAA,UAAU,EAAE,UAAU,CAAC,uBAAuB,IAAI,CAAC;aACpD;;AAGH,QAAA,IAAI,CAAC,aAAa,GAAG,IAAI;AACzB,QAAA,OAAO,UAAU;;IAGnB,gBAAgB,GAAA;AACd,QAAA,IAAI,CAAC,aAAa,GAAG,SAAS;AAC9B,QAAA,IAAI,CAAC,aAAa,GAAG,SAAS;AAC9B,QAAA,IAAI,CAAC,aAAa,GAAG,SAAS;AAC9B,QAAA,IAAI,CAAC,eAAe,GAAG,SAAS;;IAG1B,qBAAqB,CAAC,EAC5B,KAAK,EACL,KAAK,EACL,aAAa,EACb,iBAAiB,GAMlB,EAAA;QACC,MAAM,cAAc,GAAG;AACrB,eAAG,aAAa,IAAI,KAAK,CAAC,cAAc;cACtC,SAAS;QACb,OAAO,IAAIC,2BAAmB,CAAC;YAC7B,OAAO,EAAE,IAAIH,uBAAc,CAAC;;gBAE1B,OAAO,EAAE,KAAK,CAAC,OAAO;gBACtB,iBAAiB,EAAE,KAAK,CAAC,iBAAiB;gBAC1C,gBAAgB,EAAE,KAAK,CAAC,gBAAgB;gBACxC,iBAAiB,EAAE,KAAK,CAAC,iBAAiB;gBAC1C,cAAc;gBACd,EAAE,EAAE,KAAK,CAAC,EAAE;aACb,CAAC;YACF,IAAI,EAAE,KAAK,IAAI,EAAE;AAClB,SAAA,CAAC;;IAGJ,OAAO,qBAAqB,CAC1B,QAAuB,EACvB,OAAkC,EAClC,UAAqC,EAAA;QAErC,IAAI,CAAC,gBAAgB,EAAE;QACvB,MAAM,MAAM,GAAG,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;AAC7C,QAAA,MAAM,iBAAiB,GAAGI,iDAAkC,CAAC,QAAQ,CAAC;AACtE,QAAA,MAAM,OAAO,GAAG;AACd,YAAA,GAAG,MAAM;AACT,YAAA,GAAG,iBAAiB;AACpB,YAAA,MAAM,EAAE,IAAI;SACJ;AACV,QAAA,MAAM,qBAAqB,GACzB,CAAC,cAAc,CAAC,OAAO,CAAC;YACxB,CAAC,kBAAkB,CAAC,OAAO,CAAC;AAC5B,YAAA,CAAC,iBAAiB,CAAC,OAAO,CAAC;QAE7B,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,qBAAqB,CAAC,OAAO,EAAE;YACvD,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,SAAA,CAAC;QAEF,MAAM,iBAAiB,GAAG,IAAI,CAAC,WAAW,IAAI,OAAO,CAAC,WAAW;AAEjE,QAAA,WAAW,MAAM,IAAI,IAAI,MAAM,EAAE;YAC/B,IAAI,OAAO,CAAC,MAAM,EAAE,OAAO,KAAK,IAAI,EAAE;AACpC,gBAAA,MAAM,CAAC,UAAU,CAAC,KAAK,EAAE;AACzB,gBAAA,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC;;AAG1D,YAAA,IAAI,IAAI,CAAC,IAAI,KAAK,eAAe,EAAE;AACjC,gBAAA,IAAI,CAAC,aAAa,GAAG,IAAkC;;AAClD,iBAAA,IAAI,IAAI,CAAC,IAAI,KAAK,eAAe,EAAE;AACxC,gBAAA,IAAI,CAAC,aAAa,GAAG,IAAkC;;AAGzD,YAAA,IAAI,aAAwC;AAC5C,YAAA,IAAI,IAAI,CAAC,eAAe,KAAK,IAAI,IAAI,IAAI,CAAC,aAAa,KAAK,IAAI,EAAE;AAChE,gBAAA,aAAa,GAAG,IAAI,CAAC,cAAc,EAAE;;AAGvC,YAAA,MAAM,MAAM,GAAGC,mDAAmC,CAAC,IAAI,EAAE;AACvD,gBAAA,WAAW,EAAE,iBAAiB;gBAC9B,qBAAqB;AACtB,aAAA,CAAC;AACF,YAAA,IAAI,CAAC,MAAM;gBAAE;AAEb,YAAA,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM;AACxB,YAAA,MAAM,CAAC,KAAK,GAAG,EAAE,EAAE,SAAS,CAAC,GAAG,YAAY,CAAC,KAAK,CAAC;AAEnD,YAAA,IACE,CAAC,SAAS;AACV,gBAAA,SAAS,KAAK,OAAO;AACrB,iBAAC,KAAK,KAAK,EAAE,KAAK,aAAa,IAAI,IAAI,IAAI,KAAK,CAAC,EAAE,IAAI,IAAI,CAAC,CAAC,EAC7D;AACA,gBAAA,MAAM,eAAe,GAAG,IAAI,CAAC,qBAAqB,CAAC;oBACjD,KAAK;oBACL,KAAK;oBACL,aAAa;oBACb,iBAAiB;AAClB,iBAAA,CAAC;AACF,gBAAA,MAAM,eAAe;gBACrB,MAAM,UAAU,EAAE,iBAAiB,CACjC,KAAK,EACL,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,eAAe,EAAE,CAC3B;gBACD;;AAGF,YAAA,MAAM,UAAU,GAAG,IAAIC,eAAU,CAAC,KAAK,EAAE;gBACvC,KAAK,EAAE,IAAI,CAAC,gBAAgB;AAC5B,gBAAA,cAAc,EAAE,IAAI;AACpB,gBAAA,YAAY,EAAE,CAAC;AACf,gBAAA,YAAY,EAAE,CAAC;AAChB,aAAA,CAAC;YAEF,MAAM,SAAS,GAAG,UAAU,CAAC,YAAY,CAAC,OAAO,CAAC,MAAM,CAAC;AACzD,YAAA,IAAI;gBACF,IAAI,YAAY,GAAG,KAAK;AACxB,gBAAA,WAAW,MAAM,YAAY,IAAI,SAAS,EAAE;oBAC1C,IAAK,OAAO,CAAC,MAAkC,EAAE,OAAO,KAAK,IAAI,EAAE;wBACjE;;oBAEF,MAAM,QAAQ,GAAG,UAAU,CAAC,YAAY,EAAE,SAAS,EAAE,KAAK,CAAC;AAE3D,oBAAA,MAAM,eAAe,GAAG,IAAI,CAAC,qBAAqB,CAAC;AACjD,wBAAA,KAAK,EAAE,YAAY;AACnB,wBAAA,KAAK,EAAE,QAAQ;wBACf,aAAa,EAAE,YAAY,GAAG,SAAS,GAAG,aAAa;wBACvD,iBAAiB;AAClB,qBAAA,CAAC;AAEF,oBAAA,IAAI,aAAa,IAAI,CAAC,YAAY,EAAE;wBAClC,YAAY,GAAG,IAAI;;AAErB,oBAAA,MAAM,eAAe;oBAErB,MAAM,UAAU,EAAE,iBAAiB,CACjC,YAAY,EACZ,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,eAAe,EAAE,CAC3B;;;oBAEK;AACR,gBAAA,MAAM,SAAS,CAAC,MAAM,EAAE;;;QAI5B,IAAI,CAAC,gBAAgB,EAAE;;AAE1B;;;;"}
|
|
1
|
+
{"version":3,"file":"index.cjs","sources":["../../../../src/llm/anthropic/index.ts"],"sourcesContent":["import { AIMessageChunk } from '@langchain/core/messages';\nimport { ChatAnthropicMessages } from '@langchain/anthropic';\nimport { ChatGenerationChunk } from '@langchain/core/outputs';\nimport type { BaseChatModelParams } from '@langchain/core/language_models/chat_models';\nimport type {\n BaseMessage,\n UsageMetadata,\n MessageContentComplex,\n} from '@langchain/core/messages';\nimport type { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';\nimport type { AnthropicInput } from '@langchain/anthropic';\nimport type { Anthropic } from '@anthropic-ai/sdk';\nimport type {\n AnthropicMessageCreateParams,\n AnthropicStreamingMessageCreateParams,\n AnthropicStreamUsage,\n AnthropicMessageStartEvent,\n AnthropicMessageDeltaEvent,\n} from '@/llm/anthropic/types';\nimport { _makeMessageChunkFromAnthropicEvent } from './utils/message_outputs';\nimport { _convertMessagesToAnthropicPayload } from './utils/message_inputs';\nimport { handleToolChoice } from './utils/tools';\nimport { TextStream } from '@/llm/text';\n\nfunction _toolsInParams(\n params: AnthropicMessageCreateParams | AnthropicStreamingMessageCreateParams\n): boolean {\n return !!(params.tools && params.tools.length > 0);\n}\nfunction _documentsInParams(\n params: AnthropicMessageCreateParams | AnthropicStreamingMessageCreateParams\n): boolean {\n for (const message of params.messages ?? []) {\n if (typeof message.content === 'string') {\n continue;\n }\n for (const block of message.content ?? []) {\n if (\n typeof block === 'object' &&\n block != null &&\n block.type === 'document' &&\n typeof block.citations === 'object' &&\n block.citations.enabled\n ) {\n return true;\n }\n }\n }\n return false;\n}\n\nfunction _thinkingInParams(\n params: AnthropicMessageCreateParams | AnthropicStreamingMessageCreateParams\n): boolean {\n return !!(params.thinking && params.thinking.type === 'enabled');\n}\n\nfunction extractToken(\n chunk: AIMessageChunk\n): [string, 'string' | 'input' | 'content'] | [undefined] {\n if (typeof chunk.content === 'string') {\n return [chunk.content, 'string'];\n } else if (\n Array.isArray(chunk.content) &&\n chunk.content.length >= 1 &&\n 'input' in chunk.content[0]\n ) {\n return typeof chunk.content[0].input === 'string'\n ? [chunk.content[0].input, 'input']\n : [JSON.stringify(chunk.content[0].input), 'input'];\n } else if (\n Array.isArray(chunk.content) &&\n chunk.content.length >= 1 &&\n 'text' in chunk.content[0]\n ) {\n return [chunk.content[0].text, 'content'];\n } else if (\n Array.isArray(chunk.content) &&\n chunk.content.length >= 1 &&\n 'thinking' in chunk.content[0]\n ) {\n return [chunk.content[0].thinking, 'content'];\n }\n return [undefined];\n}\n\nfunction cloneChunk(\n text: string,\n tokenType: string,\n chunk: AIMessageChunk\n): AIMessageChunk {\n if (tokenType === 'string') {\n return new AIMessageChunk(Object.assign({}, chunk, { content: text }));\n } else if (tokenType === 'input') {\n return chunk;\n }\n const content = chunk.content[0] as MessageContentComplex;\n if (tokenType === 'content' && content.type === 'text') {\n return new AIMessageChunk(\n Object.assign({}, chunk, {\n content: [Object.assign({}, content, { text })],\n })\n );\n } else if (tokenType === 'content' && content.type === 'text_delta') {\n return new AIMessageChunk(\n Object.assign({}, chunk, {\n content: [Object.assign({}, content, { text })],\n })\n );\n } else if (tokenType === 'content' && content.type?.startsWith('thinking')) {\n return new AIMessageChunk(\n Object.assign({}, chunk, {\n content: [Object.assign({}, content, { thinking: text })],\n })\n );\n }\n\n return chunk;\n}\n\nexport type CustomAnthropicInput = AnthropicInput & {\n _lc_stream_delay?: number;\n} & BaseChatModelParams;\n\n/**\n * A type representing additional parameters that can be passed to the\n * Anthropic API.\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\ntype Kwargs = Record<string, any>;\n\nexport class CustomAnthropic extends ChatAnthropicMessages {\n _lc_stream_delay: number;\n private message_start: AnthropicMessageStartEvent | undefined;\n private message_delta: AnthropicMessageDeltaEvent | undefined;\n private tools_in_params?: boolean;\n private emitted_usage?: boolean;\n top_k: number | undefined;\n constructor(fields?: CustomAnthropicInput) {\n super(fields);\n this.resetTokenEvents();\n this.setDirectFields(fields);\n this._lc_stream_delay = fields?._lc_stream_delay ?? 25;\n }\n\n static lc_name(): 'LibreChatAnthropic' {\n return 'LibreChatAnthropic';\n }\n\n /**\n * Get the parameters used to invoke the model\n */\n override invocationParams(\n options?: this['ParsedCallOptions']\n ): Omit<\n AnthropicMessageCreateParams | AnthropicStreamingMessageCreateParams,\n 'messages'\n > &\n Kwargs {\n const tool_choice:\n | Anthropic.Messages.ToolChoiceAuto\n | Anthropic.Messages.ToolChoiceAny\n | Anthropic.Messages.ToolChoiceTool\n | undefined = handleToolChoice(options?.tool_choice);\n\n if (this.thinking.type === 'enabled') {\n if (this.top_k !== -1 && (this.top_k as number | undefined) != null) {\n throw new Error('topK is not supported when thinking is enabled');\n }\n if (this.topP !== -1 && (this.topP as number | undefined) != null) {\n throw new Error('topP is not supported when thinking is enabled');\n }\n if (\n this.temperature !== 1 &&\n (this.temperature as number | undefined) != null\n ) {\n throw new Error(\n 'temperature is not supported when thinking is enabled'\n );\n }\n\n return {\n model: this.model,\n stop_sequences: options?.stop ?? this.stopSequences,\n stream: this.streaming,\n max_tokens: this.maxTokens,\n tools: this.formatStructuredToolToAnthropic(options?.tools),\n tool_choice,\n thinking: this.thinking,\n ...this.invocationKwargs,\n };\n }\n return {\n model: this.model,\n temperature: this.temperature,\n top_k: this.top_k,\n top_p: this.topP,\n stop_sequences: options?.stop ?? this.stopSequences,\n stream: this.streaming,\n max_tokens: this.maxTokens,\n tools: this.formatStructuredToolToAnthropic(options?.tools),\n tool_choice,\n thinking: this.thinking,\n ...this.invocationKwargs,\n };\n }\n\n /**\n * Get stream usage as returned by this client's API response.\n * @returns The stream usage object.\n */\n getStreamUsage(): UsageMetadata | undefined {\n if (this.emitted_usage === true) {\n return;\n }\n const inputUsage = this.message_start?.message.usage as\n | undefined\n | AnthropicStreamUsage;\n const outputUsage = this.message_delta?.usage as\n | undefined\n | Partial<AnthropicStreamUsage>;\n if (!outputUsage) {\n return;\n }\n const totalUsage: UsageMetadata = {\n input_tokens: inputUsage?.input_tokens ?? 0,\n output_tokens: outputUsage.output_tokens ?? 0,\n total_tokens:\n (inputUsage?.input_tokens ?? 0) + (outputUsage.output_tokens ?? 0),\n };\n\n if (\n inputUsage?.cache_creation_input_tokens != null ||\n inputUsage?.cache_read_input_tokens != null\n ) {\n totalUsage.input_token_details = {\n cache_creation: inputUsage.cache_creation_input_tokens ?? 0,\n cache_read: inputUsage.cache_read_input_tokens ?? 0,\n };\n }\n\n this.emitted_usage = true;\n return totalUsage;\n }\n\n resetTokenEvents(): void {\n this.message_start = undefined;\n this.message_delta = undefined;\n this.emitted_usage = undefined;\n this.tools_in_params = undefined;\n }\n\n setDirectFields(fields?: CustomAnthropicInput): void {\n this.temperature = fields?.temperature ?? undefined;\n this.topP = fields?.topP ?? undefined;\n this.top_k = fields?.topK;\n if (this.temperature === -1 || this.temperature === 1) {\n this.temperature = undefined;\n }\n if (this.topP === -1) {\n this.topP = undefined;\n }\n if (this.top_k === -1) {\n this.top_k = undefined;\n }\n }\n\n private createGenerationChunk({\n token,\n chunk,\n usageMetadata,\n shouldStreamUsage,\n }: {\n token?: string;\n chunk: AIMessageChunk;\n shouldStreamUsage: boolean;\n usageMetadata?: UsageMetadata;\n }): ChatGenerationChunk {\n const usage_metadata = shouldStreamUsage\n ? (usageMetadata ?? chunk.usage_metadata)\n : undefined;\n return new ChatGenerationChunk({\n message: new AIMessageChunk({\n // Just yield chunk as it is and tool_use will be concat by BaseChatModel._generateUncached().\n content: chunk.content,\n additional_kwargs: chunk.additional_kwargs,\n tool_call_chunks: chunk.tool_call_chunks,\n response_metadata: chunk.response_metadata,\n usage_metadata,\n id: chunk.id,\n }),\n text: token ?? '',\n });\n }\n\n async *_streamResponseChunks(\n messages: BaseMessage[],\n options: this['ParsedCallOptions'],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n this.resetTokenEvents();\n const params = this.invocationParams(options);\n const formattedMessages = _convertMessagesToAnthropicPayload(messages);\n const payload = {\n ...params,\n ...formattedMessages,\n stream: true,\n } as const;\n const coerceContentToString =\n !_toolsInParams(payload) &&\n !_documentsInParams(payload) &&\n !_thinkingInParams(payload);\n\n const stream = await this.createStreamWithRetry(payload, {\n headers: options.headers,\n });\n\n const shouldStreamUsage = this.streamUsage ?? options.streamUsage;\n\n for await (const data of stream) {\n if (options.signal?.aborted === true) {\n stream.controller.abort();\n throw new Error('AbortError: User aborted the request.');\n }\n\n if (data.type === 'message_start') {\n this.message_start = data as AnthropicMessageStartEvent;\n } else if (data.type === 'message_delta') {\n this.message_delta = data as AnthropicMessageDeltaEvent;\n }\n\n let usageMetadata: UsageMetadata | undefined;\n if (this.tools_in_params !== true && this.emitted_usage !== true) {\n usageMetadata = this.getStreamUsage();\n }\n\n const result = _makeMessageChunkFromAnthropicEvent(data, {\n streamUsage: shouldStreamUsage,\n coerceContentToString,\n });\n if (!result) continue;\n\n const { chunk } = result;\n const [token = '', tokenType] = extractToken(chunk);\n\n if (\n !tokenType ||\n tokenType === 'input' ||\n (token === '' && (usageMetadata != null || chunk.id != null))\n ) {\n const generationChunk = this.createGenerationChunk({\n token,\n chunk,\n usageMetadata,\n shouldStreamUsage,\n });\n yield generationChunk;\n await runManager?.handleLLMNewToken(\n token,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: generationChunk }\n );\n continue;\n }\n\n const textStream = new TextStream(token, {\n delay: this._lc_stream_delay,\n firstWordChunk: true,\n minChunkSize: 4,\n maxChunkSize: 8,\n });\n\n const generator = textStream.generateText(options.signal);\n try {\n let emittedUsage = false;\n for await (const currentToken of generator) {\n if ((options.signal as AbortSignal | undefined)?.aborted === true) {\n break;\n }\n const newChunk = cloneChunk(currentToken, tokenType, chunk);\n\n const generationChunk = this.createGenerationChunk({\n token: currentToken,\n chunk: newChunk,\n usageMetadata: emittedUsage ? undefined : usageMetadata,\n shouldStreamUsage,\n });\n\n if (usageMetadata && !emittedUsage) {\n emittedUsage = true;\n }\n yield generationChunk;\n\n await runManager?.handleLLMNewToken(\n currentToken,\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: generationChunk }\n );\n }\n } finally {\n await generator.return();\n }\n }\n\n this.resetTokenEvents();\n }\n}\n"],"names":["AIMessageChunk","ChatAnthropicMessages","handleToolChoice","ChatGenerationChunk","_convertMessagesToAnthropicPayload","_makeMessageChunkFromAnthropicEvent","TextStream"],"mappings":";;;;;;;;;;AAwBA,SAAS,cAAc,CACrB,MAA4E,EAAA;AAE5E,IAAA,OAAO,CAAC,EAAE,MAAM,CAAC,KAAK,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC;AACpD;AACA,SAAS,kBAAkB,CACzB,MAA4E,EAAA;IAE5E,KAAK,MAAM,OAAO,IAAI,MAAM,CAAC,QAAQ,IAAI,EAAE,EAAE;AAC3C,QAAA,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,EAAE;YACvC;;QAEF,KAAK,MAAM,KAAK,IAAI,OAAO,CAAC,OAAO,IAAI,EAAE,EAAE;YACzC,IACE,OAAO,KAAK,KAAK,QAAQ;AACzB,gBAAA,KAAK,IAAI,IAAI;gBACb,KAAK,CAAC,IAAI,KAAK,UAAU;AACzB,gBAAA,OAAO,KAAK,CAAC,SAAS,KAAK,QAAQ;AACnC,gBAAA,KAAK,CAAC,SAAS,CAAC,OAAO,EACvB;AACA,gBAAA,OAAO,IAAI;;;;AAIjB,IAAA,OAAO,KAAK;AACd;AAEA,SAAS,iBAAiB,CACxB,MAA4E,EAAA;AAE5E,IAAA,OAAO,CAAC,EAAE,MAAM,CAAC,QAAQ,IAAI,MAAM,CAAC,QAAQ,CAAC,IAAI,KAAK,SAAS,CAAC;AAClE;AAEA,SAAS,YAAY,CACnB,KAAqB,EAAA;AAErB,IAAA,IAAI,OAAO,KAAK,CAAC,OAAO,KAAK,QAAQ,EAAE;AACrC,QAAA,OAAO,CAAC,KAAK,CAAC,OAAO,EAAE,QAAQ,CAAC;;AAC3B,SAAA,IACL,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC;AAC5B,QAAA,KAAK,CAAC,OAAO,CAAC,MAAM,IAAI,CAAC;QACzB,OAAO,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAC3B;QACA,OAAO,OAAO,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,KAAK;AACvC,cAAE,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,EAAE,OAAO;AAClC,cAAE,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,OAAO,CAAC;;AAChD,SAAA,IACL,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC;AAC5B,QAAA,KAAK,CAAC,OAAO,CAAC,MAAM,IAAI,CAAC;QACzB,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAC1B;AACA,QAAA,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,SAAS,CAAC;;AACpC,SAAA,IACL,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC;AAC5B,QAAA,KAAK,CAAC,OAAO,CAAC,MAAM,IAAI,CAAC;QACzB,UAAU,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,EAC9B;AACA,QAAA,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,QAAQ,EAAE,SAAS,CAAC;;IAE/C,OAAO,CAAC,SAAS,CAAC;AACpB;AAEA,SAAS,UAAU,CACjB,IAAY,EACZ,SAAiB,EACjB,KAAqB,EAAA;AAErB,IAAA,IAAI,SAAS,KAAK,QAAQ,EAAE;AAC1B,QAAA,OAAO,IAAIA,uBAAc,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,EAAE,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,CAAC;;AACjE,SAAA,IAAI,SAAS,KAAK,OAAO,EAAE;AAChC,QAAA,OAAO,KAAK;;IAEd,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC,CAA0B;IACzD,IAAI,SAAS,KAAK,SAAS,IAAI,OAAO,CAAC,IAAI,KAAK,MAAM,EAAE;QACtD,OAAO,IAAIA,uBAAc,CACvB,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,EAAE;AACvB,YAAA,OAAO,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,CAAC,CAAC;AAChD,SAAA,CAAC,CACH;;SACI,IAAI,SAAS,KAAK,SAAS,IAAI,OAAO,CAAC,IAAI,KAAK,YAAY,EAAE;QACnE,OAAO,IAAIA,uBAAc,CACvB,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,EAAE;AACvB,YAAA,OAAO,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,CAAC,CAAC;AAChD,SAAA,CAAC,CACH;;AACI,SAAA,IAAI,SAAS,KAAK,SAAS,IAAI,OAAO,CAAC,IAAI,EAAE,UAAU,CAAC,UAAU,CAAC,EAAE;QAC1E,OAAO,IAAIA,uBAAc,CACvB,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,EAAE;AACvB,YAAA,OAAO,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,EAAE,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;AAC1D,SAAA,CAAC,CACH;;AAGH,IAAA,OAAO,KAAK;AACd;AAaM,MAAO,eAAgB,SAAQC,+BAAqB,CAAA;AACxD,IAAA,gBAAgB;AACR,IAAA,aAAa;AACb,IAAA,aAAa;AACb,IAAA,eAAe;AACf,IAAA,aAAa;AACrB,IAAA,KAAK;AACL,IAAA,WAAA,CAAY,MAA6B,EAAA;QACvC,KAAK,CAAC,MAAM,CAAC;QACb,IAAI,CAAC,gBAAgB,EAAE;AACvB,QAAA,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC;QAC5B,IAAI,CAAC,gBAAgB,GAAG,MAAM,EAAE,gBAAgB,IAAI,EAAE;;AAGxD,IAAA,OAAO,OAAO,GAAA;AACZ,QAAA,OAAO,oBAAoB;;AAG7B;;AAEG;AACM,IAAA,gBAAgB,CACvB,OAAmC,EAAA;QAMnC,MAAM,WAAW,GAIDC,sBAAgB,CAAC,OAAO,EAAE,WAAW,CAAC;QAEtD,IAAI,IAAI,CAAC,QAAQ,CAAC,IAAI,KAAK,SAAS,EAAE;AACpC,YAAA,IAAI,IAAI,CAAC,KAAK,KAAK,EAAE,IAAK,IAAI,CAAC,KAA4B,IAAI,IAAI,EAAE;AACnE,gBAAA,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC;;AAEnE,YAAA,IAAI,IAAI,CAAC,IAAI,KAAK,EAAE,IAAK,IAAI,CAAC,IAA2B,IAAI,IAAI,EAAE;AACjE,gBAAA,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC;;AAEnE,YAAA,IACE,IAAI,CAAC,WAAW,KAAK,CAAC;AACrB,gBAAA,IAAI,CAAC,WAAkC,IAAI,IAAI,EAChD;AACA,gBAAA,MAAM,IAAI,KAAK,CACb,uDAAuD,CACxD;;YAGH,OAAO;gBACL,KAAK,EAAE,IAAI,CAAC,KAAK;AACjB,gBAAA,cAAc,EAAE,OAAO,EAAE,IAAI,IAAI,IAAI,CAAC,aAAa;gBACnD,MAAM,EAAE,IAAI,CAAC,SAAS;gBACtB,UAAU,EAAE,IAAI,CAAC,SAAS;gBAC1B,KAAK,EAAE,IAAI,CAAC,+BAA+B,CAAC,OAAO,EAAE,KAAK,CAAC;gBAC3D,WAAW;gBACX,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,GAAG,IAAI,CAAC,gBAAgB;aACzB;;QAEH,OAAO;YACL,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,WAAW,EAAE,IAAI,CAAC,WAAW;YAC7B,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,KAAK,EAAE,IAAI,CAAC,IAAI;AAChB,YAAA,cAAc,EAAE,OAAO,EAAE,IAAI,IAAI,IAAI,CAAC,aAAa;YACnD,MAAM,EAAE,IAAI,CAAC,SAAS;YACtB,UAAU,EAAE,IAAI,CAAC,SAAS;YAC1B,KAAK,EAAE,IAAI,CAAC,+BAA+B,CAAC,OAAO,EAAE,KAAK,CAAC;YAC3D,WAAW;YACX,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,GAAG,IAAI,CAAC,gBAAgB;SACzB;;AAGH;;;AAGG;IACH,cAAc,GAAA;AACZ,QAAA,IAAI,IAAI,CAAC,aAAa,KAAK,IAAI,EAAE;YAC/B;;QAEF,MAAM,UAAU,GAAG,IAAI,CAAC,aAAa,EAAE,OAAO,CAAC,KAEvB;AACxB,QAAA,MAAM,WAAW,GAAG,IAAI,CAAC,aAAa,EAAE,KAEP;QACjC,IAAI,CAAC,WAAW,EAAE;YAChB;;AAEF,QAAA,MAAM,UAAU,GAAkB;AAChC,YAAA,YAAY,EAAE,UAAU,EAAE,YAAY,IAAI,CAAC;AAC3C,YAAA,aAAa,EAAE,WAAW,CAAC,aAAa,IAAI,CAAC;AAC7C,YAAA,YAAY,EACV,CAAC,UAAU,EAAE,YAAY,IAAI,CAAC,KAAK,WAAW,CAAC,aAAa,IAAI,CAAC,CAAC;SACrE;AAED,QAAA,IACE,UAAU,EAAE,2BAA2B,IAAI,IAAI;AAC/C,YAAA,UAAU,EAAE,uBAAuB,IAAI,IAAI,EAC3C;YACA,UAAU,CAAC,mBAAmB,GAAG;AAC/B,gBAAA,cAAc,EAAE,UAAU,CAAC,2BAA2B,IAAI,CAAC;AAC3D,gBAAA,UAAU,EAAE,UAAU,CAAC,uBAAuB,IAAI,CAAC;aACpD;;AAGH,QAAA,IAAI,CAAC,aAAa,GAAG,IAAI;AACzB,QAAA,OAAO,UAAU;;IAGnB,gBAAgB,GAAA;AACd,QAAA,IAAI,CAAC,aAAa,GAAG,SAAS;AAC9B,QAAA,IAAI,CAAC,aAAa,GAAG,SAAS;AAC9B,QAAA,IAAI,CAAC,aAAa,GAAG,SAAS;AAC9B,QAAA,IAAI,CAAC,eAAe,GAAG,SAAS;;AAGlC,IAAA,eAAe,CAAC,MAA6B,EAAA;QAC3C,IAAI,CAAC,WAAW,GAAG,MAAM,EAAE,WAAW,IAAI,SAAS;QACnD,IAAI,CAAC,IAAI,GAAG,MAAM,EAAE,IAAI,IAAI,SAAS;AACrC,QAAA,IAAI,CAAC,KAAK,GAAG,MAAM,EAAE,IAAI;AACzB,QAAA,IAAI,IAAI,CAAC,WAAW,KAAK,EAAE,IAAI,IAAI,CAAC,WAAW,KAAK,CAAC,EAAE;AACrD,YAAA,IAAI,CAAC,WAAW,GAAG,SAAS;;AAE9B,QAAA,IAAI,IAAI,CAAC,IAAI,KAAK,EAAE,EAAE;AACpB,YAAA,IAAI,CAAC,IAAI,GAAG,SAAS;;AAEvB,QAAA,IAAI,IAAI,CAAC,KAAK,KAAK,EAAE,EAAE;AACrB,YAAA,IAAI,CAAC,KAAK,GAAG,SAAS;;;IAIlB,qBAAqB,CAAC,EAC5B,KAAK,EACL,KAAK,EACL,aAAa,EACb,iBAAiB,GAMlB,EAAA;QACC,MAAM,cAAc,GAAG;AACrB,eAAG,aAAa,IAAI,KAAK,CAAC,cAAc;cACtC,SAAS;QACb,OAAO,IAAIC,2BAAmB,CAAC;YAC7B,OAAO,EAAE,IAAIH,uBAAc,CAAC;;gBAE1B,OAAO,EAAE,KAAK,CAAC,OAAO;gBACtB,iBAAiB,EAAE,KAAK,CAAC,iBAAiB;gBAC1C,gBAAgB,EAAE,KAAK,CAAC,gBAAgB;gBACxC,iBAAiB,EAAE,KAAK,CAAC,iBAAiB;gBAC1C,cAAc;gBACd,EAAE,EAAE,KAAK,CAAC,EAAE;aACb,CAAC;YACF,IAAI,EAAE,KAAK,IAAI,EAAE;AAClB,SAAA,CAAC;;IAGJ,OAAO,qBAAqB,CAC1B,QAAuB,EACvB,OAAkC,EAClC,UAAqC,EAAA;QAErC,IAAI,CAAC,gBAAgB,EAAE;QACvB,MAAM,MAAM,GAAG,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;AAC7C,QAAA,MAAM,iBAAiB,GAAGI,iDAAkC,CAAC,QAAQ,CAAC;AACtE,QAAA,MAAM,OAAO,GAAG;AACd,YAAA,GAAG,MAAM;AACT,YAAA,GAAG,iBAAiB;AACpB,YAAA,MAAM,EAAE,IAAI;SACJ;AACV,QAAA,MAAM,qBAAqB,GACzB,CAAC,cAAc,CAAC,OAAO,CAAC;YACxB,CAAC,kBAAkB,CAAC,OAAO,CAAC;AAC5B,YAAA,CAAC,iBAAiB,CAAC,OAAO,CAAC;QAE7B,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,qBAAqB,CAAC,OAAO,EAAE;YACvD,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,SAAA,CAAC;QAEF,MAAM,iBAAiB,GAAG,IAAI,CAAC,WAAW,IAAI,OAAO,CAAC,WAAW;AAEjE,QAAA,WAAW,MAAM,IAAI,IAAI,MAAM,EAAE;YAC/B,IAAI,OAAO,CAAC,MAAM,EAAE,OAAO,KAAK,IAAI,EAAE;AACpC,gBAAA,MAAM,CAAC,UAAU,CAAC,KAAK,EAAE;AACzB,gBAAA,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC;;AAG1D,YAAA,IAAI,IAAI,CAAC,IAAI,KAAK,eAAe,EAAE;AACjC,gBAAA,IAAI,CAAC,aAAa,GAAG,IAAkC;;AAClD,iBAAA,IAAI,IAAI,CAAC,IAAI,KAAK,eAAe,EAAE;AACxC,gBAAA,IAAI,CAAC,aAAa,GAAG,IAAkC;;AAGzD,YAAA,IAAI,aAAwC;AAC5C,YAAA,IAAI,IAAI,CAAC,eAAe,KAAK,IAAI,IAAI,IAAI,CAAC,aAAa,KAAK,IAAI,EAAE;AAChE,gBAAA,aAAa,GAAG,IAAI,CAAC,cAAc,EAAE;;AAGvC,YAAA,MAAM,MAAM,GAAGC,mDAAmC,CAAC,IAAI,EAAE;AACvD,gBAAA,WAAW,EAAE,iBAAiB;gBAC9B,qBAAqB;AACtB,aAAA,CAAC;AACF,YAAA,IAAI,CAAC,MAAM;gBAAE;AAEb,YAAA,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM;AACxB,YAAA,MAAM,CAAC,KAAK,GAAG,EAAE,EAAE,SAAS,CAAC,GAAG,YAAY,CAAC,KAAK,CAAC;AAEnD,YAAA,IACE,CAAC,SAAS;AACV,gBAAA,SAAS,KAAK,OAAO;AACrB,iBAAC,KAAK,KAAK,EAAE,KAAK,aAAa,IAAI,IAAI,IAAI,KAAK,CAAC,EAAE,IAAI,IAAI,CAAC,CAAC,EAC7D;AACA,gBAAA,MAAM,eAAe,GAAG,IAAI,CAAC,qBAAqB,CAAC;oBACjD,KAAK;oBACL,KAAK;oBACL,aAAa;oBACb,iBAAiB;AAClB,iBAAA,CAAC;AACF,gBAAA,MAAM,eAAe;gBACrB,MAAM,UAAU,EAAE,iBAAiB,CACjC,KAAK,EACL,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,eAAe,EAAE,CAC3B;gBACD;;AAGF,YAAA,MAAM,UAAU,GAAG,IAAIC,eAAU,CAAC,KAAK,EAAE;gBACvC,KAAK,EAAE,IAAI,CAAC,gBAAgB;AAC5B,gBAAA,cAAc,EAAE,IAAI;AACpB,gBAAA,YAAY,EAAE,CAAC;AACf,gBAAA,YAAY,EAAE,CAAC;AAChB,aAAA,CAAC;YAEF,MAAM,SAAS,GAAG,UAAU,CAAC,YAAY,CAAC,OAAO,CAAC,MAAM,CAAC;AACzD,YAAA,IAAI;gBACF,IAAI,YAAY,GAAG,KAAK;AACxB,gBAAA,WAAW,MAAM,YAAY,IAAI,SAAS,EAAE;oBAC1C,IAAK,OAAO,CAAC,MAAkC,EAAE,OAAO,KAAK,IAAI,EAAE;wBACjE;;oBAEF,MAAM,QAAQ,GAAG,UAAU,CAAC,YAAY,EAAE,SAAS,EAAE,KAAK,CAAC;AAE3D,oBAAA,MAAM,eAAe,GAAG,IAAI,CAAC,qBAAqB,CAAC;AACjD,wBAAA,KAAK,EAAE,YAAY;AACnB,wBAAA,KAAK,EAAE,QAAQ;wBACf,aAAa,EAAE,YAAY,GAAG,SAAS,GAAG,aAAa;wBACvD,iBAAiB;AAClB,qBAAA,CAAC;AAEF,oBAAA,IAAI,aAAa,IAAI,CAAC,YAAY,EAAE;wBAClC,YAAY,GAAG,IAAI;;AAErB,oBAAA,MAAM,eAAe;oBAErB,MAAM,UAAU,EAAE,iBAAiB,CACjC,YAAY,EACZ,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,eAAe,EAAE,CAC3B;;;oBAEK;AACR,gBAAA,MAAM,SAAS,CAAC,MAAM,EAAE;;;QAI5B,IAAI,CAAC,gBAAgB,EAAE;;AAE1B;;;;"}
|
|
@@ -72,6 +72,9 @@ class CustomChatGoogleGenerativeAI extends googleGenai.ChatGoogleGenerativeAI {
|
|
|
72
72
|
});
|
|
73
73
|
this.streamUsage = fields.streamUsage ?? this.streamUsage;
|
|
74
74
|
}
|
|
75
|
+
static lc_name() {
|
|
76
|
+
return 'LibreChatGoogleGenerativeAI';
|
|
77
|
+
}
|
|
75
78
|
invocationParams(options) {
|
|
76
79
|
const params = super.invocationParams(options);
|
|
77
80
|
if (this.thinkingConfig) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.cjs","sources":["../../../../src/llm/google/index.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/ban-ts-comment */\nimport { AIMessageChunk } from '@langchain/core/messages';\nimport { ChatGenerationChunk } from '@langchain/core/outputs';\nimport { ChatGoogleGenerativeAI } from '@langchain/google-genai';\nimport { getEnvironmentVariable } from '@langchain/core/utils/env';\nimport { GoogleGenerativeAI as GenerativeAI } from '@google/generative-ai';\nimport type {\n GenerateContentRequest,\n SafetySetting,\n} from '@google/generative-ai';\nimport type { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';\nimport type { BaseMessage, UsageMetadata } from '@langchain/core/messages';\nimport type { GeminiGenerationConfig } from '@langchain/google-common';\nimport type { GeminiApiUsageMetadata } from './types';\nimport type { GoogleClientOptions } from '@/types';\nimport {\n convertResponseContentToChatGenerationChunk,\n convertBaseMessagesToContent,\n} from './utils/common';\n\nexport class CustomChatGoogleGenerativeAI extends ChatGoogleGenerativeAI {\n thinkingConfig?: GeminiGenerationConfig['thinkingConfig'];\n constructor(fields: GoogleClientOptions) {\n super(fields);\n\n this.model = fields.model.replace(/^models\\//, '');\n\n this.maxOutputTokens = fields.maxOutputTokens ?? this.maxOutputTokens;\n\n if (this.maxOutputTokens != null && this.maxOutputTokens < 0) {\n throw new Error('`maxOutputTokens` must be a positive integer');\n }\n\n this.temperature = fields.temperature ?? this.temperature;\n if (\n this.temperature != null &&\n (this.temperature < 0 || this.temperature > 2)\n ) {\n throw new Error('`temperature` must be in the range of [0.0,2.0]');\n }\n\n this.topP = fields.topP ?? this.topP;\n if (this.topP != null && this.topP < 0) {\n throw new Error('`topP` must be a positive integer');\n }\n\n if (this.topP != null && this.topP > 1) {\n throw new Error('`topP` must be below 1.');\n }\n\n this.topK = fields.topK ?? this.topK;\n if (this.topK != null && this.topK < 0) {\n throw new Error('`topK` must be a positive integer');\n }\n\n this.stopSequences = fields.stopSequences ?? this.stopSequences;\n\n this.apiKey = fields.apiKey ?? getEnvironmentVariable('GOOGLE_API_KEY');\n if (this.apiKey == null || this.apiKey === '') {\n throw new Error(\n 'Please set an API key for Google GenerativeAI ' +\n 'in the environment variable GOOGLE_API_KEY ' +\n 'or in the `apiKey` field of the ' +\n 'ChatGoogleGenerativeAI constructor'\n );\n }\n\n this.safetySettings = fields.safetySettings ?? this.safetySettings;\n if (this.safetySettings && this.safetySettings.length > 0) {\n const safetySettingsSet = new Set(\n this.safetySettings.map((s) => s.category)\n );\n if (safetySettingsSet.size !== this.safetySettings.length) {\n throw new Error(\n 'The categories in `safetySettings` array must be unique'\n );\n }\n }\n\n this.thinkingConfig = fields.thinkingConfig ?? this.thinkingConfig;\n\n this.streaming = fields.streaming ?? this.streaming;\n this.json = fields.json;\n\n // @ts-ignore - Accessing private property from parent class\n this.client = new GenerativeAI(this.apiKey).getGenerativeModel(\n {\n model: this.model,\n safetySettings: this.safetySettings as SafetySetting[],\n generationConfig: {\n stopSequences: this.stopSequences,\n maxOutputTokens: this.maxOutputTokens,\n temperature: this.temperature,\n topP: this.topP,\n topK: this.topK,\n ...(this.json != null\n ? { responseMimeType: 'application/json' }\n : {}),\n },\n },\n {\n apiVersion: fields.apiVersion,\n baseUrl: fields.baseUrl,\n customHeaders: fields.customHeaders,\n }\n );\n this.streamUsage = fields.streamUsage ?? this.streamUsage;\n }\n\n invocationParams(\n options?: this['ParsedCallOptions']\n ): Omit<GenerateContentRequest, 'contents'> {\n const params = super.invocationParams(options);\n if (this.thinkingConfig) {\n /** @ts-ignore */\n this.client.generationConfig = {\n /** @ts-ignore */\n ...this.client.generationConfig,\n /** @ts-ignore */\n thinkingConfig: this.thinkingConfig,\n };\n }\n return params;\n }\n\n async *_streamResponseChunks(\n messages: BaseMessage[],\n options: this['ParsedCallOptions'],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n const prompt = convertBaseMessagesToContent(\n messages,\n this._isMultimodalModel,\n this.useSystemInstruction\n );\n let actualPrompt = prompt;\n if (prompt?.[0].role === 'system') {\n const [systemInstruction] = prompt;\n /** @ts-ignore */\n this.client.systemInstruction = systemInstruction;\n actualPrompt = prompt.slice(1);\n }\n const parameters = this.invocationParams(options);\n const request = {\n ...parameters,\n contents: actualPrompt,\n };\n const stream = await this.caller.callWithOptions(\n { signal: options.signal },\n async () => {\n /** @ts-ignore */\n const { stream } = await this.client.generateContentStream(request);\n return stream;\n }\n );\n\n let index = 0;\n let lastUsageMetadata: UsageMetadata | undefined;\n for await (const response of stream) {\n if (\n 'usageMetadata' in response &&\n this.streamUsage !== false &&\n options.streamUsage !== false\n ) {\n const genAIUsageMetadata = response.usageMetadata as\n | GeminiApiUsageMetadata\n | undefined;\n\n const output_tokens =\n (genAIUsageMetadata?.candidatesTokenCount ?? 0) +\n (genAIUsageMetadata?.thoughtsTokenCount ?? 0);\n lastUsageMetadata = {\n input_tokens: genAIUsageMetadata?.promptTokenCount ?? 0,\n output_tokens,\n total_tokens: genAIUsageMetadata?.totalTokenCount ?? 0,\n };\n }\n\n const chunk = convertResponseContentToChatGenerationChunk(response, {\n usageMetadata: undefined,\n index,\n });\n index += 1;\n if (!chunk) {\n continue;\n }\n\n yield chunk;\n await runManager?.handleLLMNewToken(\n chunk.text || '',\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk }\n );\n }\n\n if (lastUsageMetadata) {\n const finalChunk = new ChatGenerationChunk({\n text: '',\n message: new AIMessageChunk({\n content: '',\n usage_metadata: lastUsageMetadata,\n }),\n });\n yield finalChunk;\n await runManager?.handleLLMNewToken(\n finalChunk.text || '',\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: finalChunk }\n );\n }\n }\n}\n"],"names":["ChatGoogleGenerativeAI","getEnvironmentVariable","GenerativeAI","messages","convertBaseMessagesToContent","convertResponseContentToChatGenerationChunk","ChatGenerationChunk","AIMessageChunk"],"mappings":";;;;;;;;;AAAA;AAoBM,MAAO,4BAA6B,SAAQA,kCAAsB,CAAA;AACtE,IAAA,cAAc;AACd,IAAA,WAAA,CAAY,MAA2B,EAAA;QACrC,KAAK,CAAC,MAAM,CAAC;AAEb,QAAA,IAAI,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;QAElD,IAAI,CAAC,eAAe,GAAG,MAAM,CAAC,eAAe,IAAI,IAAI,CAAC,eAAe;AAErE,QAAA,IAAI,IAAI,CAAC,eAAe,IAAI,IAAI,IAAI,IAAI,CAAC,eAAe,GAAG,CAAC,EAAE;AAC5D,YAAA,MAAM,IAAI,KAAK,CAAC,8CAA8C,CAAC;;QAGjE,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,IAAI,IAAI,CAAC,WAAW;AACzD,QAAA,IACE,IAAI,CAAC,WAAW,IAAI,IAAI;AACxB,aAAC,IAAI,CAAC,WAAW,GAAG,CAAC,IAAI,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,EAC9C;AACA,YAAA,MAAM,IAAI,KAAK,CAAC,iDAAiD,CAAC;;QAGpE,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI;AACpC,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC;;AAGtD,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC;;QAG5C,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI;AACpC,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC;;QAGtD,IAAI,CAAC,aAAa,GAAG,MAAM,CAAC,aAAa,IAAI,IAAI,CAAC,aAAa;QAE/D,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,IAAIC,0BAAsB,CAAC,gBAAgB,CAAC;AACvE,QAAA,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,IAAI,IAAI,CAAC,MAAM,KAAK,EAAE,EAAE;YAC7C,MAAM,IAAI,KAAK,CACb,gDAAgD;gBAC9C,6CAA6C;gBAC7C,kCAAkC;AAClC,gBAAA,oCAAoC,CACvC;;QAGH,IAAI,CAAC,cAAc,GAAG,MAAM,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc;AAClE,QAAA,IAAI,IAAI,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc,CAAC,MAAM,GAAG,CAAC,EAAE;YACzD,MAAM,iBAAiB,GAAG,IAAI,GAAG,CAC/B,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,CAC3C;YACD,IAAI,iBAAiB,CAAC,IAAI,KAAK,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE;AACzD,gBAAA,MAAM,IAAI,KAAK,CACb,yDAAyD,CAC1D;;;QAIL,IAAI,CAAC,cAAc,GAAG,MAAM,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc;QAElE,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS,IAAI,IAAI,CAAC,SAAS;AACnD,QAAA,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI;;AAGvB,QAAA,IAAI,CAAC,MAAM,GAAG,IAAIC,+BAAY,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,kBAAkB,CAC5D;YACE,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,cAAc,EAAE,IAAI,CAAC,cAAiC;AACtD,YAAA,gBAAgB,EAAE;gBAChB,aAAa,EAAE,IAAI,CAAC,aAAa;gBACjC,eAAe,EAAE,IAAI,CAAC,eAAe;gBACrC,WAAW,EAAE,IAAI,CAAC,WAAW;gBAC7B,IAAI,EAAE,IAAI,CAAC,IAAI;gBACf,IAAI,EAAE,IAAI,CAAC,IAAI;AACf,gBAAA,IAAI,IAAI,CAAC,IAAI,IAAI;AACf,sBAAE,EAAE,gBAAgB,EAAE,kBAAkB;sBACtC,EAAE,CAAC;AACR,aAAA;SACF,EACD;YACE,UAAU,EAAE,MAAM,CAAC,UAAU;YAC7B,OAAO,EAAE,MAAM,CAAC,OAAO;YACvB,aAAa,EAAE,MAAM,CAAC,aAAa;AACpC,SAAA,CACF;QACD,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,IAAI,IAAI,CAAC,WAAW;;AAG3D,IAAA,gBAAgB,CACd,OAAmC,EAAA;QAEnC,MAAM,MAAM,GAAG,KAAK,CAAC,gBAAgB,CAAC,OAAO,CAAC;AAC9C,QAAA,IAAI,IAAI,CAAC,cAAc,EAAE;;AAEvB,YAAA,IAAI,CAAC,MAAM,CAAC,gBAAgB,GAAG;;AAE7B,gBAAA,GAAG,IAAI,CAAC,MAAM,CAAC,gBAAgB;;gBAE/B,cAAc,EAAE,IAAI,CAAC,cAAc;aACpC;;AAEH,QAAA,OAAO,MAAM;;IAGf,OAAO,qBAAqB,CAC1BC,UAAuB,EACvB,OAAkC,EAClC,UAAqC,EAAA;AAErC,QAAA,MAAM,MAAM,GAAGC,mCAA4B,CACzCD,UAAQ,EACR,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,oBAAoB,CAC1B;QACD,IAAI,YAAY,GAAG,MAAM;QACzB,IAAI,MAAM,GAAG,CAAC,CAAC,CAAC,IAAI,KAAK,QAAQ,EAAE;AACjC,YAAA,MAAM,CAAC,iBAAiB,CAAC,GAAG,MAAM;;AAElC,YAAA,IAAI,CAAC,MAAM,CAAC,iBAAiB,GAAG,iBAAiB;AACjD,YAAA,YAAY,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;;QAEhC,MAAM,UAAU,GAAG,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;AACjD,QAAA,MAAM,OAAO,GAAG;AACd,YAAA,GAAG,UAAU;AACb,YAAA,QAAQ,EAAE,YAAY;SACvB;AACD,QAAA,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,eAAe,CAC9C,EAAE,MAAM,EAAE,OAAO,CAAC,MAAM,EAAE,EAC1B,YAAW;;AAET,YAAA,MAAM,EAAE,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,qBAAqB,CAAC,OAAO,CAAC;AACnE,YAAA,OAAO,MAAM;AACf,SAAC,CACF;AAGD,QAAA,IAAI,iBAA4C;AAChD,QAAA,WAAW,MAAM,QAAQ,IAAI,MAAM,EAAE;YACnC,IACE,eAAe,IAAI,QAAQ;gBAC3B,IAAI,CAAC,WAAW,KAAK,KAAK;AAC1B,gBAAA,OAAO,CAAC,WAAW,KAAK,KAAK,EAC7B;AACA,gBAAA,MAAM,kBAAkB,GAAG,QAAQ,CAAC,aAEvB;gBAEb,MAAM,aAAa,GACjB,CAAC,kBAAkB,EAAE,oBAAoB,IAAI,CAAC;AAC9C,qBAAC,kBAAkB,EAAE,kBAAkB,IAAI,CAAC,CAAC;AAC/C,gBAAA,iBAAiB,GAAG;AAClB,oBAAA,YAAY,EAAE,kBAAkB,EAAE,gBAAgB,IAAI,CAAC;oBACvD,aAAa;AACb,oBAAA,YAAY,EAAE,kBAAkB,EAAE,eAAe,IAAI,CAAC;iBACvD;;AAGH,YAAA,MAAM,KAAK,GAAGE,kDAA2C,CAAC,QAAQ,EAAE;AAClE,gBAAA,aAAa,EAAE,SAEhB,CAAA,CAAC;YAEF,IAAI,CAAC,KAAK,EAAE;gBACV;;AAGF,YAAA,MAAM,KAAK;YACX,MAAM,UAAU,EAAE,iBAAiB,CACjC,KAAK,CAAC,IAAI,IAAI,EAAE,EAChB,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,CACV;;QAGH,IAAI,iBAAiB,EAAE;AACrB,YAAA,MAAM,UAAU,GAAG,IAAIC,2BAAmB,CAAC;AACzC,gBAAA,IAAI,EAAE,EAAE;gBACR,OAAO,EAAE,IAAIC,uBAAc,CAAC;AAC1B,oBAAA,OAAO,EAAE,EAAE;AACX,oBAAA,cAAc,EAAE,iBAAiB;iBAClC,CAAC;AACH,aAAA,CAAC;AACF,YAAA,MAAM,UAAU;YAChB,MAAM,UAAU,EAAE,iBAAiB,CACjC,UAAU,CAAC,IAAI,IAAI,EAAE,EACrB,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,UAAU,EAAE,CACtB;;;AAGN;;;;"}
|
|
1
|
+
{"version":3,"file":"index.cjs","sources":["../../../../src/llm/google/index.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/ban-ts-comment */\nimport { AIMessageChunk } from '@langchain/core/messages';\nimport { ChatGenerationChunk } from '@langchain/core/outputs';\nimport { ChatGoogleGenerativeAI } from '@langchain/google-genai';\nimport { getEnvironmentVariable } from '@langchain/core/utils/env';\nimport { GoogleGenerativeAI as GenerativeAI } from '@google/generative-ai';\nimport type {\n GenerateContentRequest,\n SafetySetting,\n} from '@google/generative-ai';\nimport type { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';\nimport type { BaseMessage, UsageMetadata } from '@langchain/core/messages';\nimport type { GeminiGenerationConfig } from '@langchain/google-common';\nimport type { GeminiApiUsageMetadata } from './types';\nimport type { GoogleClientOptions } from '@/types';\nimport {\n convertResponseContentToChatGenerationChunk,\n convertBaseMessagesToContent,\n} from './utils/common';\n\nexport class CustomChatGoogleGenerativeAI extends ChatGoogleGenerativeAI {\n thinkingConfig?: GeminiGenerationConfig['thinkingConfig'];\n constructor(fields: GoogleClientOptions) {\n super(fields);\n\n this.model = fields.model.replace(/^models\\//, '');\n\n this.maxOutputTokens = fields.maxOutputTokens ?? this.maxOutputTokens;\n\n if (this.maxOutputTokens != null && this.maxOutputTokens < 0) {\n throw new Error('`maxOutputTokens` must be a positive integer');\n }\n\n this.temperature = fields.temperature ?? this.temperature;\n if (\n this.temperature != null &&\n (this.temperature < 0 || this.temperature > 2)\n ) {\n throw new Error('`temperature` must be in the range of [0.0,2.0]');\n }\n\n this.topP = fields.topP ?? this.topP;\n if (this.topP != null && this.topP < 0) {\n throw new Error('`topP` must be a positive integer');\n }\n\n if (this.topP != null && this.topP > 1) {\n throw new Error('`topP` must be below 1.');\n }\n\n this.topK = fields.topK ?? this.topK;\n if (this.topK != null && this.topK < 0) {\n throw new Error('`topK` must be a positive integer');\n }\n\n this.stopSequences = fields.stopSequences ?? this.stopSequences;\n\n this.apiKey = fields.apiKey ?? getEnvironmentVariable('GOOGLE_API_KEY');\n if (this.apiKey == null || this.apiKey === '') {\n throw new Error(\n 'Please set an API key for Google GenerativeAI ' +\n 'in the environment variable GOOGLE_API_KEY ' +\n 'or in the `apiKey` field of the ' +\n 'ChatGoogleGenerativeAI constructor'\n );\n }\n\n this.safetySettings = fields.safetySettings ?? this.safetySettings;\n if (this.safetySettings && this.safetySettings.length > 0) {\n const safetySettingsSet = new Set(\n this.safetySettings.map((s) => s.category)\n );\n if (safetySettingsSet.size !== this.safetySettings.length) {\n throw new Error(\n 'The categories in `safetySettings` array must be unique'\n );\n }\n }\n\n this.thinkingConfig = fields.thinkingConfig ?? this.thinkingConfig;\n\n this.streaming = fields.streaming ?? this.streaming;\n this.json = fields.json;\n\n // @ts-ignore - Accessing private property from parent class\n this.client = new GenerativeAI(this.apiKey).getGenerativeModel(\n {\n model: this.model,\n safetySettings: this.safetySettings as SafetySetting[],\n generationConfig: {\n stopSequences: this.stopSequences,\n maxOutputTokens: this.maxOutputTokens,\n temperature: this.temperature,\n topP: this.topP,\n topK: this.topK,\n ...(this.json != null\n ? { responseMimeType: 'application/json' }\n : {}),\n },\n },\n {\n apiVersion: fields.apiVersion,\n baseUrl: fields.baseUrl,\n customHeaders: fields.customHeaders,\n }\n );\n this.streamUsage = fields.streamUsage ?? this.streamUsage;\n }\n\n static lc_name(): 'LibreChatGoogleGenerativeAI' {\n return 'LibreChatGoogleGenerativeAI';\n }\n\n invocationParams(\n options?: this['ParsedCallOptions']\n ): Omit<GenerateContentRequest, 'contents'> {\n const params = super.invocationParams(options);\n if (this.thinkingConfig) {\n /** @ts-ignore */\n this.client.generationConfig = {\n /** @ts-ignore */\n ...this.client.generationConfig,\n /** @ts-ignore */\n thinkingConfig: this.thinkingConfig,\n };\n }\n return params;\n }\n\n async *_streamResponseChunks(\n messages: BaseMessage[],\n options: this['ParsedCallOptions'],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n const prompt = convertBaseMessagesToContent(\n messages,\n this._isMultimodalModel,\n this.useSystemInstruction\n );\n let actualPrompt = prompt;\n if (prompt?.[0].role === 'system') {\n const [systemInstruction] = prompt;\n /** @ts-ignore */\n this.client.systemInstruction = systemInstruction;\n actualPrompt = prompt.slice(1);\n }\n const parameters = this.invocationParams(options);\n const request = {\n ...parameters,\n contents: actualPrompt,\n };\n const stream = await this.caller.callWithOptions(\n { signal: options.signal },\n async () => {\n /** @ts-ignore */\n const { stream } = await this.client.generateContentStream(request);\n return stream;\n }\n );\n\n let index = 0;\n let lastUsageMetadata: UsageMetadata | undefined;\n for await (const response of stream) {\n if (\n 'usageMetadata' in response &&\n this.streamUsage !== false &&\n options.streamUsage !== false\n ) {\n const genAIUsageMetadata = response.usageMetadata as\n | GeminiApiUsageMetadata\n | undefined;\n\n const output_tokens =\n (genAIUsageMetadata?.candidatesTokenCount ?? 0) +\n (genAIUsageMetadata?.thoughtsTokenCount ?? 0);\n lastUsageMetadata = {\n input_tokens: genAIUsageMetadata?.promptTokenCount ?? 0,\n output_tokens,\n total_tokens: genAIUsageMetadata?.totalTokenCount ?? 0,\n };\n }\n\n const chunk = convertResponseContentToChatGenerationChunk(response, {\n usageMetadata: undefined,\n index,\n });\n index += 1;\n if (!chunk) {\n continue;\n }\n\n yield chunk;\n await runManager?.handleLLMNewToken(\n chunk.text || '',\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk }\n );\n }\n\n if (lastUsageMetadata) {\n const finalChunk = new ChatGenerationChunk({\n text: '',\n message: new AIMessageChunk({\n content: '',\n usage_metadata: lastUsageMetadata,\n }),\n });\n yield finalChunk;\n await runManager?.handleLLMNewToken(\n finalChunk.text || '',\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: finalChunk }\n );\n }\n }\n}\n"],"names":["ChatGoogleGenerativeAI","getEnvironmentVariable","GenerativeAI","messages","convertBaseMessagesToContent","convertResponseContentToChatGenerationChunk","ChatGenerationChunk","AIMessageChunk"],"mappings":";;;;;;;;;AAAA;AAoBM,MAAO,4BAA6B,SAAQA,kCAAsB,CAAA;AACtE,IAAA,cAAc;AACd,IAAA,WAAA,CAAY,MAA2B,EAAA;QACrC,KAAK,CAAC,MAAM,CAAC;AAEb,QAAA,IAAI,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;QAElD,IAAI,CAAC,eAAe,GAAG,MAAM,CAAC,eAAe,IAAI,IAAI,CAAC,eAAe;AAErE,QAAA,IAAI,IAAI,CAAC,eAAe,IAAI,IAAI,IAAI,IAAI,CAAC,eAAe,GAAG,CAAC,EAAE;AAC5D,YAAA,MAAM,IAAI,KAAK,CAAC,8CAA8C,CAAC;;QAGjE,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,IAAI,IAAI,CAAC,WAAW;AACzD,QAAA,IACE,IAAI,CAAC,WAAW,IAAI,IAAI;AACxB,aAAC,IAAI,CAAC,WAAW,GAAG,CAAC,IAAI,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,EAC9C;AACA,YAAA,MAAM,IAAI,KAAK,CAAC,iDAAiD,CAAC;;QAGpE,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI;AACpC,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC;;AAGtD,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC;;QAG5C,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI;AACpC,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC;;QAGtD,IAAI,CAAC,aAAa,GAAG,MAAM,CAAC,aAAa,IAAI,IAAI,CAAC,aAAa;QAE/D,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,IAAIC,0BAAsB,CAAC,gBAAgB,CAAC;AACvE,QAAA,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,IAAI,IAAI,CAAC,MAAM,KAAK,EAAE,EAAE;YAC7C,MAAM,IAAI,KAAK,CACb,gDAAgD;gBAC9C,6CAA6C;gBAC7C,kCAAkC;AAClC,gBAAA,oCAAoC,CACvC;;QAGH,IAAI,CAAC,cAAc,GAAG,MAAM,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc;AAClE,QAAA,IAAI,IAAI,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc,CAAC,MAAM,GAAG,CAAC,EAAE;YACzD,MAAM,iBAAiB,GAAG,IAAI,GAAG,CAC/B,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,CAC3C;YACD,IAAI,iBAAiB,CAAC,IAAI,KAAK,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE;AACzD,gBAAA,MAAM,IAAI,KAAK,CACb,yDAAyD,CAC1D;;;QAIL,IAAI,CAAC,cAAc,GAAG,MAAM,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc;QAElE,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS,IAAI,IAAI,CAAC,SAAS;AACnD,QAAA,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI;;AAGvB,QAAA,IAAI,CAAC,MAAM,GAAG,IAAIC,+BAAY,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,kBAAkB,CAC5D;YACE,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,cAAc,EAAE,IAAI,CAAC,cAAiC;AACtD,YAAA,gBAAgB,EAAE;gBAChB,aAAa,EAAE,IAAI,CAAC,aAAa;gBACjC,eAAe,EAAE,IAAI,CAAC,eAAe;gBACrC,WAAW,EAAE,IAAI,CAAC,WAAW;gBAC7B,IAAI,EAAE,IAAI,CAAC,IAAI;gBACf,IAAI,EAAE,IAAI,CAAC,IAAI;AACf,gBAAA,IAAI,IAAI,CAAC,IAAI,IAAI;AACf,sBAAE,EAAE,gBAAgB,EAAE,kBAAkB;sBACtC,EAAE,CAAC;AACR,aAAA;SACF,EACD;YACE,UAAU,EAAE,MAAM,CAAC,UAAU;YAC7B,OAAO,EAAE,MAAM,CAAC,OAAO;YACvB,aAAa,EAAE,MAAM,CAAC,aAAa;AACpC,SAAA,CACF;QACD,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,IAAI,IAAI,CAAC,WAAW;;AAG3D,IAAA,OAAO,OAAO,GAAA;AACZ,QAAA,OAAO,6BAA6B;;AAGtC,IAAA,gBAAgB,CACd,OAAmC,EAAA;QAEnC,MAAM,MAAM,GAAG,KAAK,CAAC,gBAAgB,CAAC,OAAO,CAAC;AAC9C,QAAA,IAAI,IAAI,CAAC,cAAc,EAAE;;AAEvB,YAAA,IAAI,CAAC,MAAM,CAAC,gBAAgB,GAAG;;AAE7B,gBAAA,GAAG,IAAI,CAAC,MAAM,CAAC,gBAAgB;;gBAE/B,cAAc,EAAE,IAAI,CAAC,cAAc;aACpC;;AAEH,QAAA,OAAO,MAAM;;IAGf,OAAO,qBAAqB,CAC1BC,UAAuB,EACvB,OAAkC,EAClC,UAAqC,EAAA;AAErC,QAAA,MAAM,MAAM,GAAGC,mCAA4B,CACzCD,UAAQ,EACR,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,oBAAoB,CAC1B;QACD,IAAI,YAAY,GAAG,MAAM;QACzB,IAAI,MAAM,GAAG,CAAC,CAAC,CAAC,IAAI,KAAK,QAAQ,EAAE;AACjC,YAAA,MAAM,CAAC,iBAAiB,CAAC,GAAG,MAAM;;AAElC,YAAA,IAAI,CAAC,MAAM,CAAC,iBAAiB,GAAG,iBAAiB;AACjD,YAAA,YAAY,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;;QAEhC,MAAM,UAAU,GAAG,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;AACjD,QAAA,MAAM,OAAO,GAAG;AACd,YAAA,GAAG,UAAU;AACb,YAAA,QAAQ,EAAE,YAAY;SACvB;AACD,QAAA,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,eAAe,CAC9C,EAAE,MAAM,EAAE,OAAO,CAAC,MAAM,EAAE,EAC1B,YAAW;;AAET,YAAA,MAAM,EAAE,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,qBAAqB,CAAC,OAAO,CAAC;AACnE,YAAA,OAAO,MAAM;AACf,SAAC,CACF;AAGD,QAAA,IAAI,iBAA4C;AAChD,QAAA,WAAW,MAAM,QAAQ,IAAI,MAAM,EAAE;YACnC,IACE,eAAe,IAAI,QAAQ;gBAC3B,IAAI,CAAC,WAAW,KAAK,KAAK;AAC1B,gBAAA,OAAO,CAAC,WAAW,KAAK,KAAK,EAC7B;AACA,gBAAA,MAAM,kBAAkB,GAAG,QAAQ,CAAC,aAEvB;gBAEb,MAAM,aAAa,GACjB,CAAC,kBAAkB,EAAE,oBAAoB,IAAI,CAAC;AAC9C,qBAAC,kBAAkB,EAAE,kBAAkB,IAAI,CAAC,CAAC;AAC/C,gBAAA,iBAAiB,GAAG;AAClB,oBAAA,YAAY,EAAE,kBAAkB,EAAE,gBAAgB,IAAI,CAAC;oBACvD,aAAa;AACb,oBAAA,YAAY,EAAE,kBAAkB,EAAE,eAAe,IAAI,CAAC;iBACvD;;AAGH,YAAA,MAAM,KAAK,GAAGE,kDAA2C,CAAC,QAAQ,EAAE;AAClE,gBAAA,aAAa,EAAE,SAEhB,CAAA,CAAC;YAEF,IAAI,CAAC,KAAK,EAAE;gBACV;;AAGF,YAAA,MAAM,KAAK;YACX,MAAM,UAAU,EAAE,iBAAiB,CACjC,KAAK,CAAC,IAAI,IAAI,EAAE,EAChB,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,CACV;;QAGH,IAAI,iBAAiB,EAAE;AACrB,YAAA,MAAM,UAAU,GAAG,IAAIC,2BAAmB,CAAC;AACzC,gBAAA,IAAI,EAAE,EAAE;gBACR,OAAO,EAAE,IAAIC,uBAAc,CAAC;AAC1B,oBAAA,OAAO,EAAE,EAAE;AACX,oBAAA,cAAc,EAAE,iBAAiB;iBAClC,CAAC;AACH,aAAA,CAAC;AACF,YAAA,MAAM,UAAU;YAChB,MAAM,UAAU,EAAE,iBAAiB,CACjC,UAAU,CAAC,IAAI,IAAI,EAAE,EACrB,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,UAAU,EAAE,CACtB;;;AAGN;;;;"}
|
|
@@ -235,6 +235,19 @@ function _convertLangChainContentToPart(content, isMultimodalModel) {
|
|
|
235
235
|
},
|
|
236
236
|
};
|
|
237
237
|
}
|
|
238
|
+
else if (content.type === 'document' ||
|
|
239
|
+
content.type === 'audio' ||
|
|
240
|
+
content.type === 'video') {
|
|
241
|
+
if (!isMultimodalModel) {
|
|
242
|
+
throw new Error(`This model does not support ${content.type}s`);
|
|
243
|
+
}
|
|
244
|
+
return {
|
|
245
|
+
inlineData: {
|
|
246
|
+
data: content.data,
|
|
247
|
+
mimeType: content.mimeType,
|
|
248
|
+
},
|
|
249
|
+
};
|
|
250
|
+
}
|
|
238
251
|
else if (content.type === 'media') {
|
|
239
252
|
return messageContentMedia(content);
|
|
240
253
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"common.cjs","sources":["../../../../../src/llm/google/utils/common.ts"],"sourcesContent":["import {\n POSSIBLE_ROLES,\n type Part,\n type Content,\n type TextPart,\n type FileDataPart,\n type InlineDataPart,\n type FunctionCallPart,\n type GenerateContentCandidate,\n type EnhancedGenerateContentResponse,\n type FunctionDeclaration as GenerativeAIFunctionDeclaration,\n type FunctionDeclarationsTool as GoogleGenerativeAIFunctionDeclarationsTool,\n} from '@google/generative-ai';\nimport {\n AIMessageChunk,\n BaseMessage,\n ChatMessage,\n ToolMessage,\n ToolMessageChunk,\n MessageContent,\n MessageContentComplex,\n UsageMetadata,\n isAIMessage,\n isBaseMessage,\n isToolMessage,\n StandardContentBlockConverter,\n parseBase64DataUrl,\n convertToProviderContentBlock,\n isDataContentBlock,\n} from '@langchain/core/messages';\nimport { ChatGenerationChunk } from '@langchain/core/outputs';\nimport type { ChatGeneration } from '@langchain/core/outputs';\nimport { isLangChainTool } from '@langchain/core/utils/function_calling';\nimport { isOpenAITool } from '@langchain/core/language_models/base';\nimport { ToolCallChunk } from '@langchain/core/messages/tool';\nimport { v4 as uuidv4 } from 'uuid';\nimport {\n jsonSchemaToGeminiParameters,\n schemaToGenerativeAIParameters,\n} from './zod_to_genai_parameters';\nimport { GoogleGenerativeAIToolType } from '../types';\n\nexport function getMessageAuthor(message: BaseMessage): string {\n const type = message._getType();\n if (ChatMessage.isInstance(message)) {\n return message.role;\n }\n if (type === 'tool') {\n return type;\n }\n return message.name ?? type;\n}\n\n/**\n * Maps a message type to a Google Generative AI chat author.\n * @param message The message to map.\n * @param model The model to use for mapping.\n * @returns The message type mapped to a Google Generative AI chat author.\n */\nexport function convertAuthorToRole(\n author: string\n): (typeof POSSIBLE_ROLES)[number] {\n switch (author) {\n /**\n * Note: Gemini currently is not supporting system messages\n * we will convert them to human messages and merge with following\n * */\n case 'supervisor':\n case 'ai':\n case 'model': // getMessageAuthor returns message.name. code ex.: return message.name ?? type;\n return 'model';\n case 'system':\n return 'system';\n case 'human':\n return 'user';\n case 'tool':\n case 'function':\n return 'function';\n default:\n throw new Error(`Unknown / unsupported author: ${author}`);\n }\n}\n\nfunction messageContentMedia(content: MessageContentComplex): Part {\n if ('mimeType' in content && 'data' in content) {\n return {\n inlineData: {\n mimeType: content.mimeType,\n data: content.data,\n },\n };\n }\n if ('mimeType' in content && 'fileUri' in content) {\n return {\n fileData: {\n mimeType: content.mimeType,\n fileUri: content.fileUri,\n },\n };\n }\n\n throw new Error('Invalid media content');\n}\n\nfunction inferToolNameFromPreviousMessages(\n message: ToolMessage | ToolMessageChunk,\n previousMessages: BaseMessage[]\n): string | undefined {\n return previousMessages\n .map((msg) => {\n if (isAIMessage(msg)) {\n return msg.tool_calls ?? [];\n }\n return [];\n })\n .flat()\n .find((toolCall) => {\n return toolCall.id === message.tool_call_id;\n })?.name;\n}\n\nfunction _getStandardContentBlockConverter(\n isMultimodalModel: boolean\n): StandardContentBlockConverter<{\n text: TextPart;\n image: FileDataPart | InlineDataPart;\n audio: FileDataPart | InlineDataPart;\n file: FileDataPart | InlineDataPart | TextPart;\n}> {\n const standardContentBlockConverter: StandardContentBlockConverter<{\n text: TextPart;\n image: FileDataPart | InlineDataPart;\n audio: FileDataPart | InlineDataPart;\n file: FileDataPart | InlineDataPart | TextPart;\n }> = {\n providerName: 'Google Gemini',\n\n fromStandardTextBlock(block) {\n return {\n text: block.text,\n };\n },\n\n fromStandardImageBlock(block): FileDataPart | InlineDataPart {\n if (!isMultimodalModel) {\n throw new Error('This model does not support images');\n }\n if (block.source_type === 'url') {\n const data = parseBase64DataUrl({ dataUrl: block.url });\n if (data) {\n return {\n inlineData: {\n mimeType: data.mime_type,\n data: data.data,\n },\n };\n } else {\n return {\n fileData: {\n mimeType: block.mime_type ?? '',\n fileUri: block.url,\n },\n };\n }\n }\n\n if (block.source_type === 'base64') {\n return {\n inlineData: {\n mimeType: block.mime_type ?? '',\n data: block.data,\n },\n };\n }\n\n throw new Error(`Unsupported source type: ${block.source_type}`);\n },\n\n fromStandardAudioBlock(block): FileDataPart | InlineDataPart {\n if (!isMultimodalModel) {\n throw new Error('This model does not support audio');\n }\n if (block.source_type === 'url') {\n const data = parseBase64DataUrl({ dataUrl: block.url });\n if (data) {\n return {\n inlineData: {\n mimeType: data.mime_type,\n data: data.data,\n },\n };\n } else {\n return {\n fileData: {\n mimeType: block.mime_type ?? '',\n fileUri: block.url,\n },\n };\n }\n }\n\n if (block.source_type === 'base64') {\n return {\n inlineData: {\n mimeType: block.mime_type ?? '',\n data: block.data,\n },\n };\n }\n\n throw new Error(`Unsupported source type: ${block.source_type}`);\n },\n\n fromStandardFileBlock(block): FileDataPart | InlineDataPart | TextPart {\n if (!isMultimodalModel) {\n throw new Error('This model does not support files');\n }\n if (block.source_type === 'text') {\n return {\n text: block.text,\n };\n }\n if (block.source_type === 'url') {\n const data = parseBase64DataUrl({ dataUrl: block.url });\n if (data) {\n return {\n inlineData: {\n mimeType: data.mime_type,\n data: data.data,\n },\n };\n } else {\n return {\n fileData: {\n mimeType: block.mime_type ?? '',\n fileUri: block.url,\n },\n };\n }\n }\n\n if (block.source_type === 'base64') {\n return {\n inlineData: {\n mimeType: block.mime_type ?? '',\n data: block.data,\n },\n };\n }\n throw new Error(`Unsupported source type: ${block.source_type}`);\n },\n };\n return standardContentBlockConverter;\n}\n\nfunction _convertLangChainContentToPart(\n content: MessageContentComplex,\n isMultimodalModel: boolean\n): Part | undefined {\n if (isDataContentBlock(content)) {\n return convertToProviderContentBlock(\n content,\n _getStandardContentBlockConverter(isMultimodalModel)\n );\n }\n\n if (content.type === 'text') {\n return { text: content.text };\n } else if (content.type === 'executableCode') {\n return { executableCode: content.executableCode };\n } else if (content.type === 'codeExecutionResult') {\n return { codeExecutionResult: content.codeExecutionResult };\n } else if (content.type === 'image_url') {\n if (!isMultimodalModel) {\n throw new Error('This model does not support images');\n }\n let source: string;\n if (typeof content.image_url === 'string') {\n source = content.image_url;\n } else if (\n typeof content.image_url === 'object' &&\n 'url' in content.image_url\n ) {\n source = content.image_url.url;\n } else {\n throw new Error('Please provide image as base64 encoded data URL');\n }\n const [dm, data] = source.split(',');\n if (!dm.startsWith('data:')) {\n throw new Error('Please provide image as base64 encoded data URL');\n }\n\n const [mimeType, encoding] = dm.replace(/^data:/, '').split(';');\n if (encoding !== 'base64') {\n throw new Error('Please provide image as base64 encoded data URL');\n }\n\n return {\n inlineData: {\n data,\n mimeType,\n },\n };\n } else if (content.type === 'media') {\n return messageContentMedia(content);\n } else if (content.type === 'tool_use') {\n return {\n functionCall: {\n name: content.name,\n args: content.input,\n },\n };\n } else if (\n content.type?.includes('/') === true &&\n // Ensure it's a single slash.\n content.type.split('/').length === 2 &&\n 'data' in content &&\n typeof content.data === 'string'\n ) {\n return {\n inlineData: {\n mimeType: content.type,\n data: content.data,\n },\n };\n } else if ('functionCall' in content) {\n // No action needed here — function calls will be added later from message.tool_calls\n return undefined;\n } else {\n if ('type' in content) {\n throw new Error(`Unknown content type ${content.type}`);\n } else {\n throw new Error(`Unknown content ${JSON.stringify(content)}`);\n }\n }\n}\n\nexport function convertMessageContentToParts(\n message: BaseMessage,\n isMultimodalModel: boolean,\n previousMessages: BaseMessage[]\n): Part[] {\n if (isToolMessage(message)) {\n const messageName =\n message.name ??\n inferToolNameFromPreviousMessages(message, previousMessages);\n if (messageName === undefined) {\n throw new Error(\n `Google requires a tool name for each tool call response, and we could not infer a called tool name for ToolMessage \"${message.id}\" from your passed messages. Please populate a \"name\" field on that ToolMessage explicitly.`\n );\n }\n\n const result = Array.isArray(message.content)\n ? (message.content\n .map((c) => _convertLangChainContentToPart(c, isMultimodalModel))\n .filter((p) => p !== undefined) as Part[])\n : message.content;\n\n if (message.status === 'error') {\n return [\n {\n functionResponse: {\n name: messageName,\n // The API expects an object with an `error` field if the function call fails.\n // `error` must be a valid object (not a string or array), so we wrap `message.content` here\n response: { error: { details: result } },\n },\n },\n ];\n }\n\n return [\n {\n functionResponse: {\n name: messageName,\n // again, can't have a string or array value for `response`, so we wrap it as an object here\n response: { result },\n },\n },\n ];\n }\n\n let functionCalls: FunctionCallPart[] = [];\n const messageParts: Part[] = [];\n\n if (typeof message.content === 'string' && message.content) {\n messageParts.push({ text: message.content });\n }\n\n if (Array.isArray(message.content)) {\n messageParts.push(\n ...(message.content\n .map((c) => _convertLangChainContentToPart(c, isMultimodalModel))\n .filter((p) => p !== undefined) as Part[])\n );\n }\n\n if (isAIMessage(message) && message.tool_calls?.length != null) {\n functionCalls = message.tool_calls.map((tc) => {\n return {\n functionCall: {\n name: tc.name,\n args: tc.args,\n },\n };\n });\n }\n\n return [...messageParts, ...functionCalls];\n}\n\nexport function convertBaseMessagesToContent(\n messages: BaseMessage[],\n isMultimodalModel: boolean,\n convertSystemMessageToHumanContent: boolean = false\n): Content[] | undefined {\n return messages.reduce<{\n content: Content[] | undefined;\n mergeWithPreviousContent: boolean;\n }>(\n (acc, message, index) => {\n if (!isBaseMessage(message)) {\n throw new Error('Unsupported message input');\n }\n const author = getMessageAuthor(message);\n if (author === 'system' && index !== 0) {\n throw new Error('System message should be the first one');\n }\n const role = convertAuthorToRole(author);\n\n const prevContent = acc.content?.[acc.content.length];\n if (\n !acc.mergeWithPreviousContent &&\n prevContent &&\n prevContent.role === role\n ) {\n throw new Error(\n 'Google Generative AI requires alternate messages between authors'\n );\n }\n\n const parts = convertMessageContentToParts(\n message,\n isMultimodalModel,\n messages.slice(0, index)\n );\n\n if (acc.mergeWithPreviousContent) {\n const prevContent = acc.content?.[acc.content.length - 1];\n if (!prevContent) {\n throw new Error(\n 'There was a problem parsing your system message. Please try a prompt without one.'\n );\n }\n prevContent.parts.push(...parts);\n\n return {\n mergeWithPreviousContent: false,\n content: acc.content,\n };\n }\n let actualRole = role;\n if (\n actualRole === 'function' ||\n (actualRole === 'system' && !convertSystemMessageToHumanContent)\n ) {\n // GenerativeAI API will throw an error if the role is not \"user\" or \"model.\"\n actualRole = 'user';\n }\n const content: Content = {\n role: actualRole,\n parts,\n };\n return {\n mergeWithPreviousContent:\n author === 'system' && !convertSystemMessageToHumanContent,\n content: [...(acc.content ?? []), content],\n };\n },\n { content: [], mergeWithPreviousContent: false }\n ).content;\n}\n\nexport function convertResponseContentToChatGenerationChunk(\n response: EnhancedGenerateContentResponse,\n extra: {\n usageMetadata?: UsageMetadata | undefined;\n index: number;\n }\n): ChatGenerationChunk | null {\n if (!response.candidates || response.candidates.length === 0) {\n return null;\n }\n const functionCalls = response.functionCalls();\n const [candidate] = response.candidates as [\n Partial<GenerateContentCandidate> | undefined,\n ];\n const { content: candidateContent, ...generationInfo } = candidate ?? {};\n let content: MessageContent | undefined;\n // Checks if some parts do not have text. If false, it means that the content is a string.\n const reasoningParts: string[] = [];\n if (\n candidateContent != null &&\n Array.isArray(candidateContent.parts) &&\n candidateContent.parts.every((p) => 'text' in p)\n ) {\n // content = candidateContent.parts.map((p) => p.text).join('');\n const textParts: string[] = [];\n for (const part of candidateContent.parts) {\n if ('thought' in part && part.thought === true) {\n reasoningParts.push(part.text ?? '');\n continue;\n }\n textParts.push(part.text ?? '');\n }\n content = textParts.join('');\n } else if (candidateContent && Array.isArray(candidateContent.parts)) {\n content = candidateContent.parts.map((p) => {\n if ('text' in p && 'thought' in p && p.thought === true) {\n reasoningParts.push(p.text ?? '');\n } else if ('text' in p) {\n return {\n type: 'text',\n text: p.text,\n };\n } else if ('executableCode' in p) {\n return {\n type: 'executableCode',\n executableCode: p.executableCode,\n };\n } else if ('codeExecutionResult' in p) {\n return {\n type: 'codeExecutionResult',\n codeExecutionResult: p.codeExecutionResult,\n };\n }\n return p;\n });\n } else {\n // no content returned - likely due to abnormal stop reason, e.g. malformed function call\n content = [];\n }\n\n let text = '';\n if (typeof content === 'string' && content) {\n text = content;\n } else if (Array.isArray(content)) {\n const block = content.find((b) => 'text' in b) as\n | { text: string }\n | undefined;\n text = block?.text ?? '';\n }\n\n const toolCallChunks: ToolCallChunk[] = [];\n if (functionCalls) {\n toolCallChunks.push(\n ...functionCalls.map((fc) => ({\n ...fc,\n args: JSON.stringify(fc.args),\n // Un-commenting this causes LangChain to incorrectly merge tool calls together\n // index: extra.index,\n type: 'tool_call_chunk' as const,\n id: 'id' in fc && typeof fc.id === 'string' ? fc.id : uuidv4(),\n }))\n );\n }\n\n const additional_kwargs: ChatGeneration['message']['additional_kwargs'] = {};\n if (reasoningParts.length > 0) {\n additional_kwargs.reasoning = reasoningParts.join('');\n }\n\n if (candidate?.groundingMetadata) {\n additional_kwargs.groundingMetadata = candidate.groundingMetadata;\n }\n\n const isFinalChunk =\n response.candidates[0]?.finishReason === 'STOP' ||\n response.candidates[0]?.finishReason === 'MAX_TOKENS' ||\n response.candidates[0]?.finishReason === 'SAFETY';\n\n return new ChatGenerationChunk({\n text,\n message: new AIMessageChunk({\n content: content,\n name: !candidateContent ? undefined : candidateContent.role,\n tool_call_chunks: toolCallChunks,\n // Each chunk can have unique \"generationInfo\", and merging strategy is unclear,\n // so leave blank for now.\n additional_kwargs,\n usage_metadata: isFinalChunk ? extra.usageMetadata : undefined,\n }),\n generationInfo,\n });\n}\n\nexport function convertToGenerativeAITools(\n tools: GoogleGenerativeAIToolType[]\n): GoogleGenerativeAIFunctionDeclarationsTool[] {\n if (\n tools.every(\n (tool) =>\n 'functionDeclarations' in tool &&\n Array.isArray(tool.functionDeclarations)\n )\n ) {\n return tools as GoogleGenerativeAIFunctionDeclarationsTool[];\n }\n return [\n {\n functionDeclarations: tools.map(\n (tool): GenerativeAIFunctionDeclaration => {\n if (isLangChainTool(tool)) {\n const jsonSchema = schemaToGenerativeAIParameters(tool.schema);\n if (\n jsonSchema.type === 'object' &&\n 'properties' in jsonSchema &&\n Object.keys(jsonSchema.properties).length === 0\n ) {\n return {\n name: tool.name,\n description: tool.description,\n };\n }\n return {\n name: tool.name,\n description: tool.description,\n parameters: jsonSchema,\n };\n }\n if (isOpenAITool(tool)) {\n return {\n name: tool.function.name,\n description:\n tool.function.description ?? 'A function available to call.',\n parameters: jsonSchemaToGeminiParameters(\n tool.function.parameters\n ),\n };\n }\n return tool as unknown as GenerativeAIFunctionDeclaration;\n }\n ),\n },\n ];\n}\n"],"names":["ChatMessage","isAIMessage","parseBase64DataUrl","isDataContentBlock","convertToProviderContentBlock","isToolMessage","messages","isBaseMessage","uuidv4","ChatGenerationChunk","AIMessageChunk"],"mappings":";;;;;;;;;;AA0CM,SAAU,gBAAgB,CAAC,OAAoB,EAAA;AACnD,IAAA,MAAM,IAAI,GAAG,OAAO,CAAC,QAAQ,EAAE;AAC/B,IAAA,IAAIA,oBAAW,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;QACnC,OAAO,OAAO,CAAC,IAAI;;AAErB,IAAA,IAAI,IAAI,KAAK,MAAM,EAAE;AACnB,QAAA,OAAO,IAAI;;AAEb,IAAA,OAAO,OAAO,CAAC,IAAI,IAAI,IAAI;AAC7B;AAEA;;;;;AAKG;AACG,SAAU,mBAAmB,CACjC,MAAc,EAAA;IAEd,QAAQ,MAAM;AACd;;;AAGO;AACP,QAAA,KAAK,YAAY;AACjB,QAAA,KAAK,IAAI;QACT,KAAK,OAAO;AACV,YAAA,OAAO,OAAO;AAChB,QAAA,KAAK,QAAQ;AACX,YAAA,OAAO,QAAQ;AACjB,QAAA,KAAK,OAAO;AACV,YAAA,OAAO,MAAM;AACf,QAAA,KAAK,MAAM;AACX,QAAA,KAAK,UAAU;AACb,YAAA,OAAO,UAAU;AACnB,QAAA;AACE,YAAA,MAAM,IAAI,KAAK,CAAC,iCAAiC,MAAM,CAAA,CAAE,CAAC;;AAE9D;AAEA,SAAS,mBAAmB,CAAC,OAA8B,EAAA;IACzD,IAAI,UAAU,IAAI,OAAO,IAAI,MAAM,IAAI,OAAO,EAAE;QAC9C,OAAO;AACL,YAAA,UAAU,EAAE;gBACV,QAAQ,EAAE,OAAO,CAAC,QAAQ;gBAC1B,IAAI,EAAE,OAAO,CAAC,IAAI;AACnB,aAAA;SACF;;IAEH,IAAI,UAAU,IAAI,OAAO,IAAI,SAAS,IAAI,OAAO,EAAE;QACjD,OAAO;AACL,YAAA,QAAQ,EAAE;gBACR,QAAQ,EAAE,OAAO,CAAC,QAAQ;gBAC1B,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,aAAA;SACF;;AAGH,IAAA,MAAM,IAAI,KAAK,CAAC,uBAAuB,CAAC;AAC1C;AAEA,SAAS,iCAAiC,CACxC,OAAuC,EACvC,gBAA+B,EAAA;AAE/B,IAAA,OAAO;AACJ,SAAA,GAAG,CAAC,CAAC,GAAG,KAAI;AACX,QAAA,IAAIC,oBAAW,CAAC,GAAG,CAAC,EAAE;AACpB,YAAA,OAAO,GAAG,CAAC,UAAU,IAAI,EAAE;;AAE7B,QAAA,OAAO,EAAE;AACX,KAAC;AACA,SAAA,IAAI;AACJ,SAAA,IAAI,CAAC,CAAC,QAAQ,KAAI;AACjB,QAAA,OAAO,QAAQ,CAAC,EAAE,KAAK,OAAO,CAAC,YAAY;KAC5C,CAAC,EAAE,IAAI;AACZ;AAEA,SAAS,iCAAiC,CACxC,iBAA0B,EAAA;AAO1B,IAAA,MAAM,6BAA6B,GAK9B;AACH,QAAA,YAAY,EAAE,eAAe;AAE7B,QAAA,qBAAqB,CAAC,KAAK,EAAA;YACzB,OAAO;gBACL,IAAI,EAAE,KAAK,CAAC,IAAI;aACjB;SACF;AAED,QAAA,sBAAsB,CAAC,KAAK,EAAA;YAC1B,IAAI,CAAC,iBAAiB,EAAE;AACtB,gBAAA,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC;;AAEvD,YAAA,IAAI,KAAK,CAAC,WAAW,KAAK,KAAK,EAAE;AAC/B,gBAAA,MAAM,IAAI,GAAGC,2BAAkB,CAAC,EAAE,OAAO,EAAE,KAAK,CAAC,GAAG,EAAE,CAAC;gBACvD,IAAI,IAAI,EAAE;oBACR,OAAO;AACL,wBAAA,UAAU,EAAE;4BACV,QAAQ,EAAE,IAAI,CAAC,SAAS;4BACxB,IAAI,EAAE,IAAI,CAAC,IAAI;AAChB,yBAAA;qBACF;;qBACI;oBACL,OAAO;AACL,wBAAA,QAAQ,EAAE;AACR,4BAAA,QAAQ,EAAE,KAAK,CAAC,SAAS,IAAI,EAAE;4BAC/B,OAAO,EAAE,KAAK,CAAC,GAAG;AACnB,yBAAA;qBACF;;;AAIL,YAAA,IAAI,KAAK,CAAC,WAAW,KAAK,QAAQ,EAAE;gBAClC,OAAO;AACL,oBAAA,UAAU,EAAE;AACV,wBAAA,QAAQ,EAAE,KAAK,CAAC,SAAS,IAAI,EAAE;wBAC/B,IAAI,EAAE,KAAK,CAAC,IAAI;AACjB,qBAAA;iBACF;;YAGH,MAAM,IAAI,KAAK,CAAC,CAAA,yBAAA,EAA4B,KAAK,CAAC,WAAW,CAAE,CAAA,CAAC;SACjE;AAED,QAAA,sBAAsB,CAAC,KAAK,EAAA;YAC1B,IAAI,CAAC,iBAAiB,EAAE;AACtB,gBAAA,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC;;AAEtD,YAAA,IAAI,KAAK,CAAC,WAAW,KAAK,KAAK,EAAE;AAC/B,gBAAA,MAAM,IAAI,GAAGA,2BAAkB,CAAC,EAAE,OAAO,EAAE,KAAK,CAAC,GAAG,EAAE,CAAC;gBACvD,IAAI,IAAI,EAAE;oBACR,OAAO;AACL,wBAAA,UAAU,EAAE;4BACV,QAAQ,EAAE,IAAI,CAAC,SAAS;4BACxB,IAAI,EAAE,IAAI,CAAC,IAAI;AAChB,yBAAA;qBACF;;qBACI;oBACL,OAAO;AACL,wBAAA,QAAQ,EAAE;AACR,4BAAA,QAAQ,EAAE,KAAK,CAAC,SAAS,IAAI,EAAE;4BAC/B,OAAO,EAAE,KAAK,CAAC,GAAG;AACnB,yBAAA;qBACF;;;AAIL,YAAA,IAAI,KAAK,CAAC,WAAW,KAAK,QAAQ,EAAE;gBAClC,OAAO;AACL,oBAAA,UAAU,EAAE;AACV,wBAAA,QAAQ,EAAE,KAAK,CAAC,SAAS,IAAI,EAAE;wBAC/B,IAAI,EAAE,KAAK,CAAC,IAAI;AACjB,qBAAA;iBACF;;YAGH,MAAM,IAAI,KAAK,CAAC,CAAA,yBAAA,EAA4B,KAAK,CAAC,WAAW,CAAE,CAAA,CAAC;SACjE;AAED,QAAA,qBAAqB,CAAC,KAAK,EAAA;YACzB,IAAI,CAAC,iBAAiB,EAAE;AACtB,gBAAA,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC;;AAEtD,YAAA,IAAI,KAAK,CAAC,WAAW,KAAK,MAAM,EAAE;gBAChC,OAAO;oBACL,IAAI,EAAE,KAAK,CAAC,IAAI;iBACjB;;AAEH,YAAA,IAAI,KAAK,CAAC,WAAW,KAAK,KAAK,EAAE;AAC/B,gBAAA,MAAM,IAAI,GAAGA,2BAAkB,CAAC,EAAE,OAAO,EAAE,KAAK,CAAC,GAAG,EAAE,CAAC;gBACvD,IAAI,IAAI,EAAE;oBACR,OAAO;AACL,wBAAA,UAAU,EAAE;4BACV,QAAQ,EAAE,IAAI,CAAC,SAAS;4BACxB,IAAI,EAAE,IAAI,CAAC,IAAI;AAChB,yBAAA;qBACF;;qBACI;oBACL,OAAO;AACL,wBAAA,QAAQ,EAAE;AACR,4BAAA,QAAQ,EAAE,KAAK,CAAC,SAAS,IAAI,EAAE;4BAC/B,OAAO,EAAE,KAAK,CAAC,GAAG;AACnB,yBAAA;qBACF;;;AAIL,YAAA,IAAI,KAAK,CAAC,WAAW,KAAK,QAAQ,EAAE;gBAClC,OAAO;AACL,oBAAA,UAAU,EAAE;AACV,wBAAA,QAAQ,EAAE,KAAK,CAAC,SAAS,IAAI,EAAE;wBAC/B,IAAI,EAAE,KAAK,CAAC,IAAI;AACjB,qBAAA;iBACF;;YAEH,MAAM,IAAI,KAAK,CAAC,CAAA,yBAAA,EAA4B,KAAK,CAAC,WAAW,CAAE,CAAA,CAAC;SACjE;KACF;AACD,IAAA,OAAO,6BAA6B;AACtC;AAEA,SAAS,8BAA8B,CACrC,OAA8B,EAC9B,iBAA0B,EAAA;AAE1B,IAAA,IAAIC,2BAAkB,CAAC,OAAO,CAAC,EAAE;QAC/B,OAAOC,sCAA6B,CAClC,OAAO,EACP,iCAAiC,CAAC,iBAAiB,CAAC,CACrD;;AAGH,IAAA,IAAI,OAAO,CAAC,IAAI,KAAK,MAAM,EAAE;AAC3B,QAAA,OAAO,EAAE,IAAI,EAAE,OAAO,CAAC,IAAI,EAAE;;AACxB,SAAA,IAAI,OAAO,CAAC,IAAI,KAAK,gBAAgB,EAAE;AAC5C,QAAA,OAAO,EAAE,cAAc,EAAE,OAAO,CAAC,cAAc,EAAE;;AAC5C,SAAA,IAAI,OAAO,CAAC,IAAI,KAAK,qBAAqB,EAAE;AACjD,QAAA,OAAO,EAAE,mBAAmB,EAAE,OAAO,CAAC,mBAAmB,EAAE;;AACtD,SAAA,IAAI,OAAO,CAAC,IAAI,KAAK,WAAW,EAAE;QACvC,IAAI,CAAC,iBAAiB,EAAE;AACtB,YAAA,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC;;AAEvD,QAAA,IAAI,MAAc;AAClB,QAAA,IAAI,OAAO,OAAO,CAAC,SAAS,KAAK,QAAQ,EAAE;AACzC,YAAA,MAAM,GAAG,OAAO,CAAC,SAAS;;AACrB,aAAA,IACL,OAAO,OAAO,CAAC,SAAS,KAAK,QAAQ;AACrC,YAAA,KAAK,IAAI,OAAO,CAAC,SAAS,EAC1B;AACA,YAAA,MAAM,GAAG,OAAO,CAAC,SAAS,CAAC,GAAG;;aACzB;AACL,YAAA,MAAM,IAAI,KAAK,CAAC,iDAAiD,CAAC;;AAEpE,QAAA,MAAM,CAAC,EAAE,EAAE,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC;QACpC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;AAC3B,YAAA,MAAM,IAAI,KAAK,CAAC,iDAAiD,CAAC;;AAGpE,QAAA,MAAM,CAAC,QAAQ,EAAE,QAAQ,CAAC,GAAG,EAAE,CAAC,OAAO,CAAC,QAAQ,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC;AAChE,QAAA,IAAI,QAAQ,KAAK,QAAQ,EAAE;AACzB,YAAA,MAAM,IAAI,KAAK,CAAC,iDAAiD,CAAC;;QAGpE,OAAO;AACL,YAAA,UAAU,EAAE;gBACV,IAAI;gBACJ,QAAQ;AACT,aAAA;SACF;;AACI,SAAA,IAAI,OAAO,CAAC,IAAI,KAAK,OAAO,EAAE;AACnC,QAAA,OAAO,mBAAmB,CAAC,OAAO,CAAC;;AAC9B,SAAA,IAAI,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;QACtC,OAAO;AACL,YAAA,YAAY,EAAE;gBACZ,IAAI,EAAE,OAAO,CAAC,IAAI;gBAClB,IAAI,EAAE,OAAO,CAAC,KAAK;AACpB,aAAA;SACF;;SACI,IACL,OAAO,CAAC,IAAI,EAAE,QAAQ,CAAC,GAAG,CAAC,KAAK,IAAI;;QAEpC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,MAAM,KAAK,CAAC;AACpC,QAAA,MAAM,IAAI,OAAO;AACjB,QAAA,OAAO,OAAO,CAAC,IAAI,KAAK,QAAQ,EAChC;QACA,OAAO;AACL,YAAA,UAAU,EAAE;gBACV,QAAQ,EAAE,OAAO,CAAC,IAAI;gBACtB,IAAI,EAAE,OAAO,CAAC,IAAI;AACnB,aAAA;SACF;;AACI,SAAA,IAAI,cAAc,IAAI,OAAO,EAAE;;AAEpC,QAAA,OAAO,SAAS;;SACX;AACL,QAAA,IAAI,MAAM,IAAI,OAAO,EAAE;YACrB,MAAM,IAAI,KAAK,CAAC,CAAA,qBAAA,EAAwB,OAAO,CAAC,IAAI,CAAE,CAAA,CAAC;;aAClD;AACL,YAAA,MAAM,IAAI,KAAK,CAAC,CAAA,gBAAA,EAAmB,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAE,CAAA,CAAC;;;AAGnE;SAEgB,4BAA4B,CAC1C,OAAoB,EACpB,iBAA0B,EAC1B,gBAA+B,EAAA;AAE/B,IAAA,IAAIC,sBAAa,CAAC,OAAO,CAAC,EAAE;AAC1B,QAAA,MAAM,WAAW,GACf,OAAO,CAAC,IAAI;AACZ,YAAA,iCAAiC,CAAC,OAAO,EAAE,gBAAgB,CAAC;AAC9D,QAAA,IAAI,WAAW,KAAK,SAAS,EAAE;YAC7B,MAAM,IAAI,KAAK,CACb,CAAA,oHAAA,EAAuH,OAAO,CAAC,EAAE,CAA6F,2FAAA,CAAA,CAC/N;;QAGH,MAAM,MAAM,GAAG,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,OAAO;cACvC,OAAO,CAAC;AACR,iBAAA,GAAG,CAAC,CAAC,CAAC,KAAK,8BAA8B,CAAC,CAAC,EAAE,iBAAiB,CAAC;iBAC/D,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,SAAS;AAChC,cAAE,OAAO,CAAC,OAAO;AAEnB,QAAA,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;YAC9B,OAAO;AACL,gBAAA;AACE,oBAAA,gBAAgB,EAAE;AAChB,wBAAA,IAAI,EAAE,WAAW;;;wBAGjB,QAAQ,EAAE,EAAE,KAAK,EAAE,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE;AACzC,qBAAA;AACF,iBAAA;aACF;;QAGH,OAAO;AACL,YAAA;AACE,gBAAA,gBAAgB,EAAE;AAChB,oBAAA,IAAI,EAAE,WAAW;;oBAEjB,QAAQ,EAAE,EAAE,MAAM,EAAE;AACrB,iBAAA;AACF,aAAA;SACF;;IAGH,IAAI,aAAa,GAAuB,EAAE;IAC1C,MAAM,YAAY,GAAW,EAAE;IAE/B,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,IAAI,OAAO,CAAC,OAAO,EAAE;QAC1D,YAAY,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,CAAC,OAAO,EAAE,CAAC;;IAG9C,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;AAClC,QAAA,YAAY,CAAC,IAAI,CACf,GAAI,OAAO,CAAC;AACT,aAAA,GAAG,CAAC,CAAC,CAAC,KAAK,8BAA8B,CAAC,CAAC,EAAE,iBAAiB,CAAC;aAC/D,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,SAAS,CAAY,CAC7C;;AAGH,IAAA,IAAIJ,oBAAW,CAAC,OAAO,CAAC,IAAI,OAAO,CAAC,UAAU,EAAE,MAAM,IAAI,IAAI,EAAE;QAC9D,aAAa,GAAG,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,EAAE,KAAI;YAC5C,OAAO;AACL,gBAAA,YAAY,EAAE;oBACZ,IAAI,EAAE,EAAE,CAAC,IAAI;oBACb,IAAI,EAAE,EAAE,CAAC,IAAI;AACd,iBAAA;aACF;AACH,SAAC,CAAC;;AAGJ,IAAA,OAAO,CAAC,GAAG,YAAY,EAAE,GAAG,aAAa,CAAC;AAC5C;AAEM,SAAU,4BAA4B,CAC1CK,UAAuB,EACvB,iBAA0B,EAC1B,qCAA8C,KAAK,EAAA;IAEnD,OAAOA,UAAQ,CAAC,MAAM,CAIpB,CAAC,GAAG,EAAE,OAAO,EAAE,KAAK,KAAI;AACtB,QAAA,IAAI,CAACC,sBAAa,CAAC,OAAO,CAAC,EAAE;AAC3B,YAAA,MAAM,IAAI,KAAK,CAAC,2BAA2B,CAAC;;AAE9C,QAAA,MAAM,MAAM,GAAG,gBAAgB,CAAC,OAAO,CAAC;QACxC,IAAI,MAAM,KAAK,QAAQ,IAAI,KAAK,KAAK,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,wCAAwC,CAAC;;AAE3D,QAAA,MAAM,IAAI,GAAG,mBAAmB,CAAC,MAAM,CAAC;AAExC,QAAA,MAAM,WAAW,GAAG,GAAG,CAAC,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC,MAAM,CAAC;QACrD,IACE,CAAC,GAAG,CAAC,wBAAwB;YAC7B,WAAW;AACX,YAAA,WAAW,CAAC,IAAI,KAAK,IAAI,EACzB;AACA,YAAA,MAAM,IAAI,KAAK,CACb,kEAAkE,CACnE;;AAGH,QAAA,MAAM,KAAK,GAAG,4BAA4B,CACxC,OAAO,EACP,iBAAiB,EACjBD,UAAQ,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,CAAC,CACzB;AAED,QAAA,IAAI,GAAG,CAAC,wBAAwB,EAAE;AAChC,YAAA,MAAM,WAAW,GAAG,GAAG,CAAC,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC;YACzD,IAAI,CAAC,WAAW,EAAE;AAChB,gBAAA,MAAM,IAAI,KAAK,CACb,mFAAmF,CACpF;;YAEH,WAAW,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC;YAEhC,OAAO;AACL,gBAAA,wBAAwB,EAAE,KAAK;gBAC/B,OAAO,EAAE,GAAG,CAAC,OAAO;aACrB;;QAEH,IAAI,UAAU,GAAG,IAAI;QACrB,IACE,UAAU,KAAK,UAAU;aACxB,UAAU,KAAK,QAAQ,IAAI,CAAC,kCAAkC,CAAC,EAChE;;YAEA,UAAU,GAAG,MAAM;;AAErB,QAAA,MAAM,OAAO,GAAY;AACvB,YAAA,IAAI,EAAE,UAAU;YAChB,KAAK;SACN;QACD,OAAO;AACL,YAAA,wBAAwB,EACtB,MAAM,KAAK,QAAQ,IAAI,CAAC,kCAAkC;AAC5D,YAAA,OAAO,EAAE,CAAC,IAAI,GAAG,CAAC,OAAO,IAAI,EAAE,CAAC,EAAE,OAAO,CAAC;SAC3C;AACH,KAAC,EACD,EAAE,OAAO,EAAE,EAAE,EAAE,wBAAwB,EAAE,KAAK,EAAE,CACjD,CAAC,OAAO;AACX;AAEgB,SAAA,2CAA2C,CACzD,QAAyC,EACzC,KAGC,EAAA;AAED,IAAA,IAAI,CAAC,QAAQ,CAAC,UAAU,IAAI,QAAQ,CAAC,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;AAC5D,QAAA,OAAO,IAAI;;AAEb,IAAA,MAAM,aAAa,GAAG,QAAQ,CAAC,aAAa,EAAE;AAC9C,IAAA,MAAM,CAAC,SAAS,CAAC,GAAG,QAAQ,CAAC,UAE5B;AACD,IAAA,MAAM,EAAE,OAAO,EAAE,gBAAgB,EAAE,GAAG,cAAc,EAAE,GAAG,SAAS,IAAI,EAAE;AACxE,IAAA,IAAI,OAAmC;;IAEvC,MAAM,cAAc,GAAa,EAAE;IACnC,IACE,gBAAgB,IAAI,IAAI;AACxB,QAAA,KAAK,CAAC,OAAO,CAAC,gBAAgB,CAAC,KAAK,CAAC;AACrC,QAAA,gBAAgB,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,MAAM,IAAI,CAAC,CAAC,EAChD;;QAEA,MAAM,SAAS,GAAa,EAAE;AAC9B,QAAA,KAAK,MAAM,IAAI,IAAI,gBAAgB,CAAC,KAAK,EAAE;YACzC,IAAI,SAAS,IAAI,IAAI,IAAI,IAAI,CAAC,OAAO,KAAK,IAAI,EAAE;gBAC9C,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,IAAI,EAAE,CAAC;gBACpC;;YAEF,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,IAAI,EAAE,CAAC;;AAEjC,QAAA,OAAO,GAAG,SAAS,CAAC,IAAI,CAAC,EAAE,CAAC;;SACvB,IAAI,gBAAgB,IAAI,KAAK,CAAC,OAAO,CAAC,gBAAgB,CAAC,KAAK,CAAC,EAAE;QACpE,OAAO,GAAG,gBAAgB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,KAAI;AACzC,YAAA,IAAI,MAAM,IAAI,CAAC,IAAI,SAAS,IAAI,CAAC,IAAI,CAAC,CAAC,OAAO,KAAK,IAAI,EAAE;gBACvD,cAAc,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,IAAI,EAAE,CAAC;;AAC5B,iBAAA,IAAI,MAAM,IAAI,CAAC,EAAE;gBACtB,OAAO;AACL,oBAAA,IAAI,EAAE,MAAM;oBACZ,IAAI,EAAE,CAAC,CAAC,IAAI;iBACb;;AACI,iBAAA,IAAI,gBAAgB,IAAI,CAAC,EAAE;gBAChC,OAAO;AACL,oBAAA,IAAI,EAAE,gBAAgB;oBACtB,cAAc,EAAE,CAAC,CAAC,cAAc;iBACjC;;AACI,iBAAA,IAAI,qBAAqB,IAAI,CAAC,EAAE;gBACrC,OAAO;AACL,oBAAA,IAAI,EAAE,qBAAqB;oBAC3B,mBAAmB,EAAE,CAAC,CAAC,mBAAmB;iBAC3C;;AAEH,YAAA,OAAO,CAAC;AACV,SAAC,CAAC;;SACG;;QAEL,OAAO,GAAG,EAAE;;IAGd,IAAI,IAAI,GAAG,EAAE;AACb,IAAA,IAAI,OAAO,OAAO,KAAK,QAAQ,IAAI,OAAO,EAAE;QAC1C,IAAI,GAAG,OAAO;;AACT,SAAA,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;AACjC,QAAA,MAAM,KAAK,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,MAAM,IAAI,CAAC,CAEhC;AACb,QAAA,IAAI,GAAG,KAAK,EAAE,IAAI,IAAI,EAAE;;IAG1B,MAAM,cAAc,GAAoB,EAAE;IAC1C,IAAI,aAAa,EAAE;AACjB,QAAA,cAAc,CAAC,IAAI,CACjB,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC,EAAE,MAAM;AAC5B,YAAA,GAAG,EAAE;YACL,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,IAAI,CAAC;;;AAG7B,YAAA,IAAI,EAAE,iBAA0B;YAChC,EAAE,EAAE,IAAI,IAAI,EAAE,IAAI,OAAO,EAAE,CAAC,EAAE,KAAK,QAAQ,GAAG,EAAE,CAAC,EAAE,GAAGE,OAAM,EAAE;SAC/D,CAAC,CAAC,CACJ;;IAGH,MAAM,iBAAiB,GAAmD,EAAE;AAC5E,IAAA,IAAI,cAAc,CAAC,MAAM,GAAG,CAAC,EAAE;QAC7B,iBAAiB,CAAC,SAAS,GAAG,cAAc,CAAC,IAAI,CAAC,EAAE,CAAC;;AAGvD,IAAA,IAAI,SAAS,EAAE,iBAAiB,EAAE;AAChC,QAAA,iBAAiB,CAAC,iBAAiB,GAAG,SAAS,CAAC,iBAAiB;;IAGnE,MAAM,YAAY,GAChB,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,YAAY,KAAK,MAAM;QAC/C,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,YAAY,KAAK,YAAY;QACrD,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,YAAY,KAAK,QAAQ;IAEnD,OAAO,IAAIC,2BAAmB,CAAC;QAC7B,IAAI;QACJ,OAAO,EAAE,IAAIC,uBAAc,CAAC;AAC1B,YAAA,OAAO,EAAE,OAAO;AAChB,YAAA,IAAI,EAAE,CAAC,gBAAgB,GAAG,SAAS,GAAG,gBAAgB,CAAC,IAAI;AAC3D,YAAA,gBAAgB,EAAE,cAAc;;;YAGhC,iBAAiB;YACjB,cAAc,EAAE,YAAY,GAAG,KAAK,CAAC,aAAa,GAAG,SAAS;SAC/D,CAAC;QACF,cAAc;AACf,KAAA,CAAC;AACJ;;;;;;;;"}
|
|
1
|
+
{"version":3,"file":"common.cjs","sources":["../../../../../src/llm/google/utils/common.ts"],"sourcesContent":["import {\n POSSIBLE_ROLES,\n type Part,\n type Content,\n type TextPart,\n type FileDataPart,\n type InlineDataPart,\n type FunctionCallPart,\n type GenerateContentCandidate,\n type EnhancedGenerateContentResponse,\n type FunctionDeclaration as GenerativeAIFunctionDeclaration,\n type FunctionDeclarationsTool as GoogleGenerativeAIFunctionDeclarationsTool,\n} from '@google/generative-ai';\nimport {\n AIMessageChunk,\n BaseMessage,\n ChatMessage,\n ToolMessage,\n ToolMessageChunk,\n MessageContent,\n MessageContentComplex,\n UsageMetadata,\n isAIMessage,\n isBaseMessage,\n isToolMessage,\n StandardContentBlockConverter,\n parseBase64DataUrl,\n convertToProviderContentBlock,\n isDataContentBlock,\n} from '@langchain/core/messages';\nimport { ChatGenerationChunk } from '@langchain/core/outputs';\nimport type { ChatGeneration } from '@langchain/core/outputs';\nimport { isLangChainTool } from '@langchain/core/utils/function_calling';\nimport { isOpenAITool } from '@langchain/core/language_models/base';\nimport { ToolCallChunk } from '@langchain/core/messages/tool';\nimport { v4 as uuidv4 } from 'uuid';\nimport {\n jsonSchemaToGeminiParameters,\n schemaToGenerativeAIParameters,\n} from './zod_to_genai_parameters';\nimport { GoogleGenerativeAIToolType } from '../types';\n\nexport function getMessageAuthor(message: BaseMessage): string {\n const type = message._getType();\n if (ChatMessage.isInstance(message)) {\n return message.role;\n }\n if (type === 'tool') {\n return type;\n }\n return message.name ?? type;\n}\n\n/**\n * Maps a message type to a Google Generative AI chat author.\n * @param message The message to map.\n * @param model The model to use for mapping.\n * @returns The message type mapped to a Google Generative AI chat author.\n */\nexport function convertAuthorToRole(\n author: string\n): (typeof POSSIBLE_ROLES)[number] {\n switch (author) {\n /**\n * Note: Gemini currently is not supporting system messages\n * we will convert them to human messages and merge with following\n * */\n case 'supervisor':\n case 'ai':\n case 'model': // getMessageAuthor returns message.name. code ex.: return message.name ?? type;\n return 'model';\n case 'system':\n return 'system';\n case 'human':\n return 'user';\n case 'tool':\n case 'function':\n return 'function';\n default:\n throw new Error(`Unknown / unsupported author: ${author}`);\n }\n}\n\nfunction messageContentMedia(content: MessageContentComplex): Part {\n if ('mimeType' in content && 'data' in content) {\n return {\n inlineData: {\n mimeType: content.mimeType,\n data: content.data,\n },\n };\n }\n if ('mimeType' in content && 'fileUri' in content) {\n return {\n fileData: {\n mimeType: content.mimeType,\n fileUri: content.fileUri,\n },\n };\n }\n\n throw new Error('Invalid media content');\n}\n\nfunction inferToolNameFromPreviousMessages(\n message: ToolMessage | ToolMessageChunk,\n previousMessages: BaseMessage[]\n): string | undefined {\n return previousMessages\n .map((msg) => {\n if (isAIMessage(msg)) {\n return msg.tool_calls ?? [];\n }\n return [];\n })\n .flat()\n .find((toolCall) => {\n return toolCall.id === message.tool_call_id;\n })?.name;\n}\n\nfunction _getStandardContentBlockConverter(\n isMultimodalModel: boolean\n): StandardContentBlockConverter<{\n text: TextPart;\n image: FileDataPart | InlineDataPart;\n audio: FileDataPart | InlineDataPart;\n file: FileDataPart | InlineDataPart | TextPart;\n}> {\n const standardContentBlockConverter: StandardContentBlockConverter<{\n text: TextPart;\n image: FileDataPart | InlineDataPart;\n audio: FileDataPart | InlineDataPart;\n file: FileDataPart | InlineDataPart | TextPart;\n }> = {\n providerName: 'Google Gemini',\n\n fromStandardTextBlock(block) {\n return {\n text: block.text,\n };\n },\n\n fromStandardImageBlock(block): FileDataPart | InlineDataPart {\n if (!isMultimodalModel) {\n throw new Error('This model does not support images');\n }\n if (block.source_type === 'url') {\n const data = parseBase64DataUrl({ dataUrl: block.url });\n if (data) {\n return {\n inlineData: {\n mimeType: data.mime_type,\n data: data.data,\n },\n };\n } else {\n return {\n fileData: {\n mimeType: block.mime_type ?? '',\n fileUri: block.url,\n },\n };\n }\n }\n\n if (block.source_type === 'base64') {\n return {\n inlineData: {\n mimeType: block.mime_type ?? '',\n data: block.data,\n },\n };\n }\n\n throw new Error(`Unsupported source type: ${block.source_type}`);\n },\n\n fromStandardAudioBlock(block): FileDataPart | InlineDataPart {\n if (!isMultimodalModel) {\n throw new Error('This model does not support audio');\n }\n if (block.source_type === 'url') {\n const data = parseBase64DataUrl({ dataUrl: block.url });\n if (data) {\n return {\n inlineData: {\n mimeType: data.mime_type,\n data: data.data,\n },\n };\n } else {\n return {\n fileData: {\n mimeType: block.mime_type ?? '',\n fileUri: block.url,\n },\n };\n }\n }\n\n if (block.source_type === 'base64') {\n return {\n inlineData: {\n mimeType: block.mime_type ?? '',\n data: block.data,\n },\n };\n }\n\n throw new Error(`Unsupported source type: ${block.source_type}`);\n },\n\n fromStandardFileBlock(block): FileDataPart | InlineDataPart | TextPart {\n if (!isMultimodalModel) {\n throw new Error('This model does not support files');\n }\n if (block.source_type === 'text') {\n return {\n text: block.text,\n };\n }\n if (block.source_type === 'url') {\n const data = parseBase64DataUrl({ dataUrl: block.url });\n if (data) {\n return {\n inlineData: {\n mimeType: data.mime_type,\n data: data.data,\n },\n };\n } else {\n return {\n fileData: {\n mimeType: block.mime_type ?? '',\n fileUri: block.url,\n },\n };\n }\n }\n\n if (block.source_type === 'base64') {\n return {\n inlineData: {\n mimeType: block.mime_type ?? '',\n data: block.data,\n },\n };\n }\n throw new Error(`Unsupported source type: ${block.source_type}`);\n },\n };\n return standardContentBlockConverter;\n}\n\nfunction _convertLangChainContentToPart(\n content: MessageContentComplex,\n isMultimodalModel: boolean\n): Part | undefined {\n if (isDataContentBlock(content)) {\n return convertToProviderContentBlock(\n content,\n _getStandardContentBlockConverter(isMultimodalModel)\n );\n }\n\n if (content.type === 'text') {\n return { text: content.text };\n } else if (content.type === 'executableCode') {\n return { executableCode: content.executableCode };\n } else if (content.type === 'codeExecutionResult') {\n return { codeExecutionResult: content.codeExecutionResult };\n } else if (content.type === 'image_url') {\n if (!isMultimodalModel) {\n throw new Error('This model does not support images');\n }\n let source: string;\n if (typeof content.image_url === 'string') {\n source = content.image_url;\n } else if (\n typeof content.image_url === 'object' &&\n 'url' in content.image_url\n ) {\n source = content.image_url.url;\n } else {\n throw new Error('Please provide image as base64 encoded data URL');\n }\n const [dm, data] = source.split(',');\n if (!dm.startsWith('data:')) {\n throw new Error('Please provide image as base64 encoded data URL');\n }\n\n const [mimeType, encoding] = dm.replace(/^data:/, '').split(';');\n if (encoding !== 'base64') {\n throw new Error('Please provide image as base64 encoded data URL');\n }\n\n return {\n inlineData: {\n data,\n mimeType,\n },\n };\n } else if (\n content.type === 'document' ||\n content.type === 'audio' ||\n content.type === 'video'\n ) {\n if (!isMultimodalModel) {\n throw new Error(`This model does not support ${content.type}s`);\n }\n return {\n inlineData: {\n data: content.data,\n mimeType: content.mimeType,\n },\n };\n } else if (content.type === 'media') {\n return messageContentMedia(content);\n } else if (content.type === 'tool_use') {\n return {\n functionCall: {\n name: content.name,\n args: content.input,\n },\n };\n } else if (\n content.type?.includes('/') === true &&\n // Ensure it's a single slash.\n content.type.split('/').length === 2 &&\n 'data' in content &&\n typeof content.data === 'string'\n ) {\n return {\n inlineData: {\n mimeType: content.type,\n data: content.data,\n },\n };\n } else if ('functionCall' in content) {\n // No action needed here — function calls will be added later from message.tool_calls\n return undefined;\n } else {\n if ('type' in content) {\n throw new Error(`Unknown content type ${content.type}`);\n } else {\n throw new Error(`Unknown content ${JSON.stringify(content)}`);\n }\n }\n}\n\nexport function convertMessageContentToParts(\n message: BaseMessage,\n isMultimodalModel: boolean,\n previousMessages: BaseMessage[]\n): Part[] {\n if (isToolMessage(message)) {\n const messageName =\n message.name ??\n inferToolNameFromPreviousMessages(message, previousMessages);\n if (messageName === undefined) {\n throw new Error(\n `Google requires a tool name for each tool call response, and we could not infer a called tool name for ToolMessage \"${message.id}\" from your passed messages. Please populate a \"name\" field on that ToolMessage explicitly.`\n );\n }\n\n const result = Array.isArray(message.content)\n ? (message.content\n .map((c) => _convertLangChainContentToPart(c, isMultimodalModel))\n .filter((p) => p !== undefined) as Part[])\n : message.content;\n\n if (message.status === 'error') {\n return [\n {\n functionResponse: {\n name: messageName,\n // The API expects an object with an `error` field if the function call fails.\n // `error` must be a valid object (not a string or array), so we wrap `message.content` here\n response: { error: { details: result } },\n },\n },\n ];\n }\n\n return [\n {\n functionResponse: {\n name: messageName,\n // again, can't have a string or array value for `response`, so we wrap it as an object here\n response: { result },\n },\n },\n ];\n }\n\n let functionCalls: FunctionCallPart[] = [];\n const messageParts: Part[] = [];\n\n if (typeof message.content === 'string' && message.content) {\n messageParts.push({ text: message.content });\n }\n\n if (Array.isArray(message.content)) {\n messageParts.push(\n ...(message.content\n .map((c) => _convertLangChainContentToPart(c, isMultimodalModel))\n .filter((p) => p !== undefined) as Part[])\n );\n }\n\n if (isAIMessage(message) && message.tool_calls?.length != null) {\n functionCalls = message.tool_calls.map((tc) => {\n return {\n functionCall: {\n name: tc.name,\n args: tc.args,\n },\n };\n });\n }\n\n return [...messageParts, ...functionCalls];\n}\n\nexport function convertBaseMessagesToContent(\n messages: BaseMessage[],\n isMultimodalModel: boolean,\n convertSystemMessageToHumanContent: boolean = false\n): Content[] | undefined {\n return messages.reduce<{\n content: Content[] | undefined;\n mergeWithPreviousContent: boolean;\n }>(\n (acc, message, index) => {\n if (!isBaseMessage(message)) {\n throw new Error('Unsupported message input');\n }\n const author = getMessageAuthor(message);\n if (author === 'system' && index !== 0) {\n throw new Error('System message should be the first one');\n }\n const role = convertAuthorToRole(author);\n\n const prevContent = acc.content?.[acc.content.length];\n if (\n !acc.mergeWithPreviousContent &&\n prevContent &&\n prevContent.role === role\n ) {\n throw new Error(\n 'Google Generative AI requires alternate messages between authors'\n );\n }\n\n const parts = convertMessageContentToParts(\n message,\n isMultimodalModel,\n messages.slice(0, index)\n );\n\n if (acc.mergeWithPreviousContent) {\n const prevContent = acc.content?.[acc.content.length - 1];\n if (!prevContent) {\n throw new Error(\n 'There was a problem parsing your system message. Please try a prompt without one.'\n );\n }\n prevContent.parts.push(...parts);\n\n return {\n mergeWithPreviousContent: false,\n content: acc.content,\n };\n }\n let actualRole = role;\n if (\n actualRole === 'function' ||\n (actualRole === 'system' && !convertSystemMessageToHumanContent)\n ) {\n // GenerativeAI API will throw an error if the role is not \"user\" or \"model.\"\n actualRole = 'user';\n }\n const content: Content = {\n role: actualRole,\n parts,\n };\n return {\n mergeWithPreviousContent:\n author === 'system' && !convertSystemMessageToHumanContent,\n content: [...(acc.content ?? []), content],\n };\n },\n { content: [], mergeWithPreviousContent: false }\n ).content;\n}\n\nexport function convertResponseContentToChatGenerationChunk(\n response: EnhancedGenerateContentResponse,\n extra: {\n usageMetadata?: UsageMetadata | undefined;\n index: number;\n }\n): ChatGenerationChunk | null {\n if (!response.candidates || response.candidates.length === 0) {\n return null;\n }\n const functionCalls = response.functionCalls();\n const [candidate] = response.candidates as [\n Partial<GenerateContentCandidate> | undefined,\n ];\n const { content: candidateContent, ...generationInfo } = candidate ?? {};\n let content: MessageContent | undefined;\n // Checks if some parts do not have text. If false, it means that the content is a string.\n const reasoningParts: string[] = [];\n if (\n candidateContent != null &&\n Array.isArray(candidateContent.parts) &&\n candidateContent.parts.every((p) => 'text' in p)\n ) {\n // content = candidateContent.parts.map((p) => p.text).join('');\n const textParts: string[] = [];\n for (const part of candidateContent.parts) {\n if ('thought' in part && part.thought === true) {\n reasoningParts.push(part.text ?? '');\n continue;\n }\n textParts.push(part.text ?? '');\n }\n content = textParts.join('');\n } else if (candidateContent && Array.isArray(candidateContent.parts)) {\n content = candidateContent.parts.map((p) => {\n if ('text' in p && 'thought' in p && p.thought === true) {\n reasoningParts.push(p.text ?? '');\n } else if ('text' in p) {\n return {\n type: 'text',\n text: p.text,\n };\n } else if ('executableCode' in p) {\n return {\n type: 'executableCode',\n executableCode: p.executableCode,\n };\n } else if ('codeExecutionResult' in p) {\n return {\n type: 'codeExecutionResult',\n codeExecutionResult: p.codeExecutionResult,\n };\n }\n return p;\n });\n } else {\n // no content returned - likely due to abnormal stop reason, e.g. malformed function call\n content = [];\n }\n\n let text = '';\n if (typeof content === 'string' && content) {\n text = content;\n } else if (Array.isArray(content)) {\n const block = content.find((b) => 'text' in b) as\n | { text: string }\n | undefined;\n text = block?.text ?? '';\n }\n\n const toolCallChunks: ToolCallChunk[] = [];\n if (functionCalls) {\n toolCallChunks.push(\n ...functionCalls.map((fc) => ({\n ...fc,\n args: JSON.stringify(fc.args),\n // Un-commenting this causes LangChain to incorrectly merge tool calls together\n // index: extra.index,\n type: 'tool_call_chunk' as const,\n id: 'id' in fc && typeof fc.id === 'string' ? fc.id : uuidv4(),\n }))\n );\n }\n\n const additional_kwargs: ChatGeneration['message']['additional_kwargs'] = {};\n if (reasoningParts.length > 0) {\n additional_kwargs.reasoning = reasoningParts.join('');\n }\n\n if (candidate?.groundingMetadata) {\n additional_kwargs.groundingMetadata = candidate.groundingMetadata;\n }\n\n const isFinalChunk =\n response.candidates[0]?.finishReason === 'STOP' ||\n response.candidates[0]?.finishReason === 'MAX_TOKENS' ||\n response.candidates[0]?.finishReason === 'SAFETY';\n\n return new ChatGenerationChunk({\n text,\n message: new AIMessageChunk({\n content: content,\n name: !candidateContent ? undefined : candidateContent.role,\n tool_call_chunks: toolCallChunks,\n // Each chunk can have unique \"generationInfo\", and merging strategy is unclear,\n // so leave blank for now.\n additional_kwargs,\n usage_metadata: isFinalChunk ? extra.usageMetadata : undefined,\n }),\n generationInfo,\n });\n}\n\nexport function convertToGenerativeAITools(\n tools: GoogleGenerativeAIToolType[]\n): GoogleGenerativeAIFunctionDeclarationsTool[] {\n if (\n tools.every(\n (tool) =>\n 'functionDeclarations' in tool &&\n Array.isArray(tool.functionDeclarations)\n )\n ) {\n return tools as GoogleGenerativeAIFunctionDeclarationsTool[];\n }\n return [\n {\n functionDeclarations: tools.map(\n (tool): GenerativeAIFunctionDeclaration => {\n if (isLangChainTool(tool)) {\n const jsonSchema = schemaToGenerativeAIParameters(tool.schema);\n if (\n jsonSchema.type === 'object' &&\n 'properties' in jsonSchema &&\n Object.keys(jsonSchema.properties).length === 0\n ) {\n return {\n name: tool.name,\n description: tool.description,\n };\n }\n return {\n name: tool.name,\n description: tool.description,\n parameters: jsonSchema,\n };\n }\n if (isOpenAITool(tool)) {\n return {\n name: tool.function.name,\n description:\n tool.function.description ?? 'A function available to call.',\n parameters: jsonSchemaToGeminiParameters(\n tool.function.parameters\n ),\n };\n }\n return tool as unknown as GenerativeAIFunctionDeclaration;\n }\n ),\n },\n ];\n}\n"],"names":["ChatMessage","isAIMessage","parseBase64DataUrl","isDataContentBlock","convertToProviderContentBlock","isToolMessage","messages","isBaseMessage","uuidv4","ChatGenerationChunk","AIMessageChunk"],"mappings":";;;;;;;;;;AA0CM,SAAU,gBAAgB,CAAC,OAAoB,EAAA;AACnD,IAAA,MAAM,IAAI,GAAG,OAAO,CAAC,QAAQ,EAAE;AAC/B,IAAA,IAAIA,oBAAW,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;QACnC,OAAO,OAAO,CAAC,IAAI;;AAErB,IAAA,IAAI,IAAI,KAAK,MAAM,EAAE;AACnB,QAAA,OAAO,IAAI;;AAEb,IAAA,OAAO,OAAO,CAAC,IAAI,IAAI,IAAI;AAC7B;AAEA;;;;;AAKG;AACG,SAAU,mBAAmB,CACjC,MAAc,EAAA;IAEd,QAAQ,MAAM;AACd;;;AAGO;AACP,QAAA,KAAK,YAAY;AACjB,QAAA,KAAK,IAAI;QACT,KAAK,OAAO;AACV,YAAA,OAAO,OAAO;AAChB,QAAA,KAAK,QAAQ;AACX,YAAA,OAAO,QAAQ;AACjB,QAAA,KAAK,OAAO;AACV,YAAA,OAAO,MAAM;AACf,QAAA,KAAK,MAAM;AACX,QAAA,KAAK,UAAU;AACb,YAAA,OAAO,UAAU;AACnB,QAAA;AACE,YAAA,MAAM,IAAI,KAAK,CAAC,iCAAiC,MAAM,CAAA,CAAE,CAAC;;AAE9D;AAEA,SAAS,mBAAmB,CAAC,OAA8B,EAAA;IACzD,IAAI,UAAU,IAAI,OAAO,IAAI,MAAM,IAAI,OAAO,EAAE;QAC9C,OAAO;AACL,YAAA,UAAU,EAAE;gBACV,QAAQ,EAAE,OAAO,CAAC,QAAQ;gBAC1B,IAAI,EAAE,OAAO,CAAC,IAAI;AACnB,aAAA;SACF;;IAEH,IAAI,UAAU,IAAI,OAAO,IAAI,SAAS,IAAI,OAAO,EAAE;QACjD,OAAO;AACL,YAAA,QAAQ,EAAE;gBACR,QAAQ,EAAE,OAAO,CAAC,QAAQ;gBAC1B,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,aAAA;SACF;;AAGH,IAAA,MAAM,IAAI,KAAK,CAAC,uBAAuB,CAAC;AAC1C;AAEA,SAAS,iCAAiC,CACxC,OAAuC,EACvC,gBAA+B,EAAA;AAE/B,IAAA,OAAO;AACJ,SAAA,GAAG,CAAC,CAAC,GAAG,KAAI;AACX,QAAA,IAAIC,oBAAW,CAAC,GAAG,CAAC,EAAE;AACpB,YAAA,OAAO,GAAG,CAAC,UAAU,IAAI,EAAE;;AAE7B,QAAA,OAAO,EAAE;AACX,KAAC;AACA,SAAA,IAAI;AACJ,SAAA,IAAI,CAAC,CAAC,QAAQ,KAAI;AACjB,QAAA,OAAO,QAAQ,CAAC,EAAE,KAAK,OAAO,CAAC,YAAY;KAC5C,CAAC,EAAE,IAAI;AACZ;AAEA,SAAS,iCAAiC,CACxC,iBAA0B,EAAA;AAO1B,IAAA,MAAM,6BAA6B,GAK9B;AACH,QAAA,YAAY,EAAE,eAAe;AAE7B,QAAA,qBAAqB,CAAC,KAAK,EAAA;YACzB,OAAO;gBACL,IAAI,EAAE,KAAK,CAAC,IAAI;aACjB;SACF;AAED,QAAA,sBAAsB,CAAC,KAAK,EAAA;YAC1B,IAAI,CAAC,iBAAiB,EAAE;AACtB,gBAAA,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC;;AAEvD,YAAA,IAAI,KAAK,CAAC,WAAW,KAAK,KAAK,EAAE;AAC/B,gBAAA,MAAM,IAAI,GAAGC,2BAAkB,CAAC,EAAE,OAAO,EAAE,KAAK,CAAC,GAAG,EAAE,CAAC;gBACvD,IAAI,IAAI,EAAE;oBACR,OAAO;AACL,wBAAA,UAAU,EAAE;4BACV,QAAQ,EAAE,IAAI,CAAC,SAAS;4BACxB,IAAI,EAAE,IAAI,CAAC,IAAI;AAChB,yBAAA;qBACF;;qBACI;oBACL,OAAO;AACL,wBAAA,QAAQ,EAAE;AACR,4BAAA,QAAQ,EAAE,KAAK,CAAC,SAAS,IAAI,EAAE;4BAC/B,OAAO,EAAE,KAAK,CAAC,GAAG;AACnB,yBAAA;qBACF;;;AAIL,YAAA,IAAI,KAAK,CAAC,WAAW,KAAK,QAAQ,EAAE;gBAClC,OAAO;AACL,oBAAA,UAAU,EAAE;AACV,wBAAA,QAAQ,EAAE,KAAK,CAAC,SAAS,IAAI,EAAE;wBAC/B,IAAI,EAAE,KAAK,CAAC,IAAI;AACjB,qBAAA;iBACF;;YAGH,MAAM,IAAI,KAAK,CAAC,CAAA,yBAAA,EAA4B,KAAK,CAAC,WAAW,CAAE,CAAA,CAAC;SACjE;AAED,QAAA,sBAAsB,CAAC,KAAK,EAAA;YAC1B,IAAI,CAAC,iBAAiB,EAAE;AACtB,gBAAA,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC;;AAEtD,YAAA,IAAI,KAAK,CAAC,WAAW,KAAK,KAAK,EAAE;AAC/B,gBAAA,MAAM,IAAI,GAAGA,2BAAkB,CAAC,EAAE,OAAO,EAAE,KAAK,CAAC,GAAG,EAAE,CAAC;gBACvD,IAAI,IAAI,EAAE;oBACR,OAAO;AACL,wBAAA,UAAU,EAAE;4BACV,QAAQ,EAAE,IAAI,CAAC,SAAS;4BACxB,IAAI,EAAE,IAAI,CAAC,IAAI;AAChB,yBAAA;qBACF;;qBACI;oBACL,OAAO;AACL,wBAAA,QAAQ,EAAE;AACR,4BAAA,QAAQ,EAAE,KAAK,CAAC,SAAS,IAAI,EAAE;4BAC/B,OAAO,EAAE,KAAK,CAAC,GAAG;AACnB,yBAAA;qBACF;;;AAIL,YAAA,IAAI,KAAK,CAAC,WAAW,KAAK,QAAQ,EAAE;gBAClC,OAAO;AACL,oBAAA,UAAU,EAAE;AACV,wBAAA,QAAQ,EAAE,KAAK,CAAC,SAAS,IAAI,EAAE;wBAC/B,IAAI,EAAE,KAAK,CAAC,IAAI;AACjB,qBAAA;iBACF;;YAGH,MAAM,IAAI,KAAK,CAAC,CAAA,yBAAA,EAA4B,KAAK,CAAC,WAAW,CAAE,CAAA,CAAC;SACjE;AAED,QAAA,qBAAqB,CAAC,KAAK,EAAA;YACzB,IAAI,CAAC,iBAAiB,EAAE;AACtB,gBAAA,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC;;AAEtD,YAAA,IAAI,KAAK,CAAC,WAAW,KAAK,MAAM,EAAE;gBAChC,OAAO;oBACL,IAAI,EAAE,KAAK,CAAC,IAAI;iBACjB;;AAEH,YAAA,IAAI,KAAK,CAAC,WAAW,KAAK,KAAK,EAAE;AAC/B,gBAAA,MAAM,IAAI,GAAGA,2BAAkB,CAAC,EAAE,OAAO,EAAE,KAAK,CAAC,GAAG,EAAE,CAAC;gBACvD,IAAI,IAAI,EAAE;oBACR,OAAO;AACL,wBAAA,UAAU,EAAE;4BACV,QAAQ,EAAE,IAAI,CAAC,SAAS;4BACxB,IAAI,EAAE,IAAI,CAAC,IAAI;AAChB,yBAAA;qBACF;;qBACI;oBACL,OAAO;AACL,wBAAA,QAAQ,EAAE;AACR,4BAAA,QAAQ,EAAE,KAAK,CAAC,SAAS,IAAI,EAAE;4BAC/B,OAAO,EAAE,KAAK,CAAC,GAAG;AACnB,yBAAA;qBACF;;;AAIL,YAAA,IAAI,KAAK,CAAC,WAAW,KAAK,QAAQ,EAAE;gBAClC,OAAO;AACL,oBAAA,UAAU,EAAE;AACV,wBAAA,QAAQ,EAAE,KAAK,CAAC,SAAS,IAAI,EAAE;wBAC/B,IAAI,EAAE,KAAK,CAAC,IAAI;AACjB,qBAAA;iBACF;;YAEH,MAAM,IAAI,KAAK,CAAC,CAAA,yBAAA,EAA4B,KAAK,CAAC,WAAW,CAAE,CAAA,CAAC;SACjE;KACF;AACD,IAAA,OAAO,6BAA6B;AACtC;AAEA,SAAS,8BAA8B,CACrC,OAA8B,EAC9B,iBAA0B,EAAA;AAE1B,IAAA,IAAIC,2BAAkB,CAAC,OAAO,CAAC,EAAE;QAC/B,OAAOC,sCAA6B,CAClC,OAAO,EACP,iCAAiC,CAAC,iBAAiB,CAAC,CACrD;;AAGH,IAAA,IAAI,OAAO,CAAC,IAAI,KAAK,MAAM,EAAE;AAC3B,QAAA,OAAO,EAAE,IAAI,EAAE,OAAO,CAAC,IAAI,EAAE;;AACxB,SAAA,IAAI,OAAO,CAAC,IAAI,KAAK,gBAAgB,EAAE;AAC5C,QAAA,OAAO,EAAE,cAAc,EAAE,OAAO,CAAC,cAAc,EAAE;;AAC5C,SAAA,IAAI,OAAO,CAAC,IAAI,KAAK,qBAAqB,EAAE;AACjD,QAAA,OAAO,EAAE,mBAAmB,EAAE,OAAO,CAAC,mBAAmB,EAAE;;AACtD,SAAA,IAAI,OAAO,CAAC,IAAI,KAAK,WAAW,EAAE;QACvC,IAAI,CAAC,iBAAiB,EAAE;AACtB,YAAA,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC;;AAEvD,QAAA,IAAI,MAAc;AAClB,QAAA,IAAI,OAAO,OAAO,CAAC,SAAS,KAAK,QAAQ,EAAE;AACzC,YAAA,MAAM,GAAG,OAAO,CAAC,SAAS;;AACrB,aAAA,IACL,OAAO,OAAO,CAAC,SAAS,KAAK,QAAQ;AACrC,YAAA,KAAK,IAAI,OAAO,CAAC,SAAS,EAC1B;AACA,YAAA,MAAM,GAAG,OAAO,CAAC,SAAS,CAAC,GAAG;;aACzB;AACL,YAAA,MAAM,IAAI,KAAK,CAAC,iDAAiD,CAAC;;AAEpE,QAAA,MAAM,CAAC,EAAE,EAAE,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC;QACpC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;AAC3B,YAAA,MAAM,IAAI,KAAK,CAAC,iDAAiD,CAAC;;AAGpE,QAAA,MAAM,CAAC,QAAQ,EAAE,QAAQ,CAAC,GAAG,EAAE,CAAC,OAAO,CAAC,QAAQ,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC;AAChE,QAAA,IAAI,QAAQ,KAAK,QAAQ,EAAE;AACzB,YAAA,MAAM,IAAI,KAAK,CAAC,iDAAiD,CAAC;;QAGpE,OAAO;AACL,YAAA,UAAU,EAAE;gBACV,IAAI;gBACJ,QAAQ;AACT,aAAA;SACF;;AACI,SAAA,IACL,OAAO,CAAC,IAAI,KAAK,UAAU;QAC3B,OAAO,CAAC,IAAI,KAAK,OAAO;AACxB,QAAA,OAAO,CAAC,IAAI,KAAK,OAAO,EACxB;QACA,IAAI,CAAC,iBAAiB,EAAE;YACtB,MAAM,IAAI,KAAK,CAAC,CAAA,4BAAA,EAA+B,OAAO,CAAC,IAAI,CAAG,CAAA,CAAA,CAAC;;QAEjE,OAAO;AACL,YAAA,UAAU,EAAE;gBACV,IAAI,EAAE,OAAO,CAAC,IAAI;gBAClB,QAAQ,EAAE,OAAO,CAAC,QAAQ;AAC3B,aAAA;SACF;;AACI,SAAA,IAAI,OAAO,CAAC,IAAI,KAAK,OAAO,EAAE;AACnC,QAAA,OAAO,mBAAmB,CAAC,OAAO,CAAC;;AAC9B,SAAA,IAAI,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;QACtC,OAAO;AACL,YAAA,YAAY,EAAE;gBACZ,IAAI,EAAE,OAAO,CAAC,IAAI;gBAClB,IAAI,EAAE,OAAO,CAAC,KAAK;AACpB,aAAA;SACF;;SACI,IACL,OAAO,CAAC,IAAI,EAAE,QAAQ,CAAC,GAAG,CAAC,KAAK,IAAI;;QAEpC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,MAAM,KAAK,CAAC;AACpC,QAAA,MAAM,IAAI,OAAO;AACjB,QAAA,OAAO,OAAO,CAAC,IAAI,KAAK,QAAQ,EAChC;QACA,OAAO;AACL,YAAA,UAAU,EAAE;gBACV,QAAQ,EAAE,OAAO,CAAC,IAAI;gBACtB,IAAI,EAAE,OAAO,CAAC,IAAI;AACnB,aAAA;SACF;;AACI,SAAA,IAAI,cAAc,IAAI,OAAO,EAAE;;AAEpC,QAAA,OAAO,SAAS;;SACX;AACL,QAAA,IAAI,MAAM,IAAI,OAAO,EAAE;YACrB,MAAM,IAAI,KAAK,CAAC,CAAA,qBAAA,EAAwB,OAAO,CAAC,IAAI,CAAE,CAAA,CAAC;;aAClD;AACL,YAAA,MAAM,IAAI,KAAK,CAAC,CAAA,gBAAA,EAAmB,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAE,CAAA,CAAC;;;AAGnE;SAEgB,4BAA4B,CAC1C,OAAoB,EACpB,iBAA0B,EAC1B,gBAA+B,EAAA;AAE/B,IAAA,IAAIC,sBAAa,CAAC,OAAO,CAAC,EAAE;AAC1B,QAAA,MAAM,WAAW,GACf,OAAO,CAAC,IAAI;AACZ,YAAA,iCAAiC,CAAC,OAAO,EAAE,gBAAgB,CAAC;AAC9D,QAAA,IAAI,WAAW,KAAK,SAAS,EAAE;YAC7B,MAAM,IAAI,KAAK,CACb,CAAA,oHAAA,EAAuH,OAAO,CAAC,EAAE,CAA6F,2FAAA,CAAA,CAC/N;;QAGH,MAAM,MAAM,GAAG,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,OAAO;cACvC,OAAO,CAAC;AACR,iBAAA,GAAG,CAAC,CAAC,CAAC,KAAK,8BAA8B,CAAC,CAAC,EAAE,iBAAiB,CAAC;iBAC/D,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,SAAS;AAChC,cAAE,OAAO,CAAC,OAAO;AAEnB,QAAA,IAAI,OAAO,CAAC,MAAM,KAAK,OAAO,EAAE;YAC9B,OAAO;AACL,gBAAA;AACE,oBAAA,gBAAgB,EAAE;AAChB,wBAAA,IAAI,EAAE,WAAW;;;wBAGjB,QAAQ,EAAE,EAAE,KAAK,EAAE,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE;AACzC,qBAAA;AACF,iBAAA;aACF;;QAGH,OAAO;AACL,YAAA;AACE,gBAAA,gBAAgB,EAAE;AAChB,oBAAA,IAAI,EAAE,WAAW;;oBAEjB,QAAQ,EAAE,EAAE,MAAM,EAAE;AACrB,iBAAA;AACF,aAAA;SACF;;IAGH,IAAI,aAAa,GAAuB,EAAE;IAC1C,MAAM,YAAY,GAAW,EAAE;IAE/B,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,IAAI,OAAO,CAAC,OAAO,EAAE;QAC1D,YAAY,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,CAAC,OAAO,EAAE,CAAC;;IAG9C,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;AAClC,QAAA,YAAY,CAAC,IAAI,CACf,GAAI,OAAO,CAAC;AACT,aAAA,GAAG,CAAC,CAAC,CAAC,KAAK,8BAA8B,CAAC,CAAC,EAAE,iBAAiB,CAAC;aAC/D,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,SAAS,CAAY,CAC7C;;AAGH,IAAA,IAAIJ,oBAAW,CAAC,OAAO,CAAC,IAAI,OAAO,CAAC,UAAU,EAAE,MAAM,IAAI,IAAI,EAAE;QAC9D,aAAa,GAAG,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,EAAE,KAAI;YAC5C,OAAO;AACL,gBAAA,YAAY,EAAE;oBACZ,IAAI,EAAE,EAAE,CAAC,IAAI;oBACb,IAAI,EAAE,EAAE,CAAC,IAAI;AACd,iBAAA;aACF;AACH,SAAC,CAAC;;AAGJ,IAAA,OAAO,CAAC,GAAG,YAAY,EAAE,GAAG,aAAa,CAAC;AAC5C;AAEM,SAAU,4BAA4B,CAC1CK,UAAuB,EACvB,iBAA0B,EAC1B,qCAA8C,KAAK,EAAA;IAEnD,OAAOA,UAAQ,CAAC,MAAM,CAIpB,CAAC,GAAG,EAAE,OAAO,EAAE,KAAK,KAAI;AACtB,QAAA,IAAI,CAACC,sBAAa,CAAC,OAAO,CAAC,EAAE;AAC3B,YAAA,MAAM,IAAI,KAAK,CAAC,2BAA2B,CAAC;;AAE9C,QAAA,MAAM,MAAM,GAAG,gBAAgB,CAAC,OAAO,CAAC;QACxC,IAAI,MAAM,KAAK,QAAQ,IAAI,KAAK,KAAK,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,wCAAwC,CAAC;;AAE3D,QAAA,MAAM,IAAI,GAAG,mBAAmB,CAAC,MAAM,CAAC;AAExC,QAAA,MAAM,WAAW,GAAG,GAAG,CAAC,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC,MAAM,CAAC;QACrD,IACE,CAAC,GAAG,CAAC,wBAAwB;YAC7B,WAAW;AACX,YAAA,WAAW,CAAC,IAAI,KAAK,IAAI,EACzB;AACA,YAAA,MAAM,IAAI,KAAK,CACb,kEAAkE,CACnE;;AAGH,QAAA,MAAM,KAAK,GAAG,4BAA4B,CACxC,OAAO,EACP,iBAAiB,EACjBD,UAAQ,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,CAAC,CACzB;AAED,QAAA,IAAI,GAAG,CAAC,wBAAwB,EAAE;AAChC,YAAA,MAAM,WAAW,GAAG,GAAG,CAAC,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC;YACzD,IAAI,CAAC,WAAW,EAAE;AAChB,gBAAA,MAAM,IAAI,KAAK,CACb,mFAAmF,CACpF;;YAEH,WAAW,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC;YAEhC,OAAO;AACL,gBAAA,wBAAwB,EAAE,KAAK;gBAC/B,OAAO,EAAE,GAAG,CAAC,OAAO;aACrB;;QAEH,IAAI,UAAU,GAAG,IAAI;QACrB,IACE,UAAU,KAAK,UAAU;aACxB,UAAU,KAAK,QAAQ,IAAI,CAAC,kCAAkC,CAAC,EAChE;;YAEA,UAAU,GAAG,MAAM;;AAErB,QAAA,MAAM,OAAO,GAAY;AACvB,YAAA,IAAI,EAAE,UAAU;YAChB,KAAK;SACN;QACD,OAAO;AACL,YAAA,wBAAwB,EACtB,MAAM,KAAK,QAAQ,IAAI,CAAC,kCAAkC;AAC5D,YAAA,OAAO,EAAE,CAAC,IAAI,GAAG,CAAC,OAAO,IAAI,EAAE,CAAC,EAAE,OAAO,CAAC;SAC3C;AACH,KAAC,EACD,EAAE,OAAO,EAAE,EAAE,EAAE,wBAAwB,EAAE,KAAK,EAAE,CACjD,CAAC,OAAO;AACX;AAEgB,SAAA,2CAA2C,CACzD,QAAyC,EACzC,KAGC,EAAA;AAED,IAAA,IAAI,CAAC,QAAQ,CAAC,UAAU,IAAI,QAAQ,CAAC,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;AAC5D,QAAA,OAAO,IAAI;;AAEb,IAAA,MAAM,aAAa,GAAG,QAAQ,CAAC,aAAa,EAAE;AAC9C,IAAA,MAAM,CAAC,SAAS,CAAC,GAAG,QAAQ,CAAC,UAE5B;AACD,IAAA,MAAM,EAAE,OAAO,EAAE,gBAAgB,EAAE,GAAG,cAAc,EAAE,GAAG,SAAS,IAAI,EAAE;AACxE,IAAA,IAAI,OAAmC;;IAEvC,MAAM,cAAc,GAAa,EAAE;IACnC,IACE,gBAAgB,IAAI,IAAI;AACxB,QAAA,KAAK,CAAC,OAAO,CAAC,gBAAgB,CAAC,KAAK,CAAC;AACrC,QAAA,gBAAgB,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,MAAM,IAAI,CAAC,CAAC,EAChD;;QAEA,MAAM,SAAS,GAAa,EAAE;AAC9B,QAAA,KAAK,MAAM,IAAI,IAAI,gBAAgB,CAAC,KAAK,EAAE;YACzC,IAAI,SAAS,IAAI,IAAI,IAAI,IAAI,CAAC,OAAO,KAAK,IAAI,EAAE;gBAC9C,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,IAAI,EAAE,CAAC;gBACpC;;YAEF,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,IAAI,EAAE,CAAC;;AAEjC,QAAA,OAAO,GAAG,SAAS,CAAC,IAAI,CAAC,EAAE,CAAC;;SACvB,IAAI,gBAAgB,IAAI,KAAK,CAAC,OAAO,CAAC,gBAAgB,CAAC,KAAK,CAAC,EAAE;QACpE,OAAO,GAAG,gBAAgB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,KAAI;AACzC,YAAA,IAAI,MAAM,IAAI,CAAC,IAAI,SAAS,IAAI,CAAC,IAAI,CAAC,CAAC,OAAO,KAAK,IAAI,EAAE;gBACvD,cAAc,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,IAAI,EAAE,CAAC;;AAC5B,iBAAA,IAAI,MAAM,IAAI,CAAC,EAAE;gBACtB,OAAO;AACL,oBAAA,IAAI,EAAE,MAAM;oBACZ,IAAI,EAAE,CAAC,CAAC,IAAI;iBACb;;AACI,iBAAA,IAAI,gBAAgB,IAAI,CAAC,EAAE;gBAChC,OAAO;AACL,oBAAA,IAAI,EAAE,gBAAgB;oBACtB,cAAc,EAAE,CAAC,CAAC,cAAc;iBACjC;;AACI,iBAAA,IAAI,qBAAqB,IAAI,CAAC,EAAE;gBACrC,OAAO;AACL,oBAAA,IAAI,EAAE,qBAAqB;oBAC3B,mBAAmB,EAAE,CAAC,CAAC,mBAAmB;iBAC3C;;AAEH,YAAA,OAAO,CAAC;AACV,SAAC,CAAC;;SACG;;QAEL,OAAO,GAAG,EAAE;;IAGd,IAAI,IAAI,GAAG,EAAE;AACb,IAAA,IAAI,OAAO,OAAO,KAAK,QAAQ,IAAI,OAAO,EAAE;QAC1C,IAAI,GAAG,OAAO;;AACT,SAAA,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;AACjC,QAAA,MAAM,KAAK,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,MAAM,IAAI,CAAC,CAEhC;AACb,QAAA,IAAI,GAAG,KAAK,EAAE,IAAI,IAAI,EAAE;;IAG1B,MAAM,cAAc,GAAoB,EAAE;IAC1C,IAAI,aAAa,EAAE;AACjB,QAAA,cAAc,CAAC,IAAI,CACjB,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC,EAAE,MAAM;AAC5B,YAAA,GAAG,EAAE;YACL,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,IAAI,CAAC;;;AAG7B,YAAA,IAAI,EAAE,iBAA0B;YAChC,EAAE,EAAE,IAAI,IAAI,EAAE,IAAI,OAAO,EAAE,CAAC,EAAE,KAAK,QAAQ,GAAG,EAAE,CAAC,EAAE,GAAGE,OAAM,EAAE;SAC/D,CAAC,CAAC,CACJ;;IAGH,MAAM,iBAAiB,GAAmD,EAAE;AAC5E,IAAA,IAAI,cAAc,CAAC,MAAM,GAAG,CAAC,EAAE;QAC7B,iBAAiB,CAAC,SAAS,GAAG,cAAc,CAAC,IAAI,CAAC,EAAE,CAAC;;AAGvD,IAAA,IAAI,SAAS,EAAE,iBAAiB,EAAE;AAChC,QAAA,iBAAiB,CAAC,iBAAiB,GAAG,SAAS,CAAC,iBAAiB;;IAGnE,MAAM,YAAY,GAChB,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,YAAY,KAAK,MAAM;QAC/C,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,YAAY,KAAK,YAAY;QACrD,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,YAAY,KAAK,QAAQ;IAEnD,OAAO,IAAIC,2BAAmB,CAAC;QAC7B,IAAI;QACJ,OAAO,EAAE,IAAIC,uBAAc,CAAC;AAC1B,YAAA,OAAO,EAAE,OAAO;AAChB,YAAA,IAAI,EAAE,CAAC,gBAAgB,GAAG,SAAS,GAAG,gBAAgB,CAAC,IAAI;AAC3D,YAAA,gBAAgB,EAAE,cAAc;;;YAGhC,iBAAiB;YACjB,cAAc,EAAE,YAAY,GAAG,KAAK,CAAC,aAAa,GAAG,SAAS;SAC/D,CAAC;QACF,cAAc;AACf,KAAA,CAAC;AACJ;;;;;;;;"}
|
|
@@ -6,6 +6,9 @@ var ollama = require('@langchain/ollama');
|
|
|
6
6
|
var utils = require('./utils.cjs');
|
|
7
7
|
|
|
8
8
|
class ChatOllama extends ollama.ChatOllama {
|
|
9
|
+
static lc_name() {
|
|
10
|
+
return 'LibreChatOllama';
|
|
11
|
+
}
|
|
9
12
|
async *_streamResponseChunks(messages$1, options, runManager) {
|
|
10
13
|
if (this.checkOrPullModel) {
|
|
11
14
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.cjs","sources":["../../../../src/llm/ollama/index.ts"],"sourcesContent":["import { AIMessageChunk } from '@langchain/core/messages';\nimport { ChatGenerationChunk } from '@langchain/core/outputs';\nimport { ChatOllama as BaseChatOllama } from '@langchain/ollama';\nimport { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';\nimport type {\n ChatResponse as OllamaChatResponse,\n Message as OllamaMessage,\n} from 'ollama';\nimport type { UsageMetadata, BaseMessage } from '@langchain/core/messages';\nimport {\n convertOllamaMessagesToLangChain,\n convertToOllamaMessages,\n} from './utils';\n\nexport class ChatOllama extends BaseChatOllama {\n async *_streamResponseChunks(\n messages: BaseMessage[],\n options: this['ParsedCallOptions'],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n if (this.checkOrPullModel) {\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n // @ts-ignore\n if (!((await this.checkModelExistsOnMachine(this.model)) as boolean)) {\n await this.pull(this.model, {\n logProgress: true,\n });\n }\n }\n\n const params = this.invocationParams(options);\n // TODO: remove cast after SDK adds support for tool calls\n const ollamaMessages = convertToOllamaMessages(messages) as OllamaMessage[];\n\n const usageMetadata: UsageMetadata = {\n input_tokens: 0,\n output_tokens: 0,\n total_tokens: 0,\n };\n\n const stream = await this.client.chat({\n ...params,\n messages: ollamaMessages,\n stream: true,\n });\n\n let lastMetadata: Omit<OllamaChatResponse, 'message'> | undefined;\n\n for await (const chunk of stream) {\n if (options.signal?.aborted === true) {\n this.client.abort();\n }\n const { message: responseMessage, ...rest } =\n chunk as Partial<OllamaChatResponse>;\n usageMetadata.input_tokens += rest.prompt_eval_count ?? 0;\n usageMetadata.output_tokens += rest.eval_count ?? 0;\n usageMetadata.total_tokens =\n usageMetadata.input_tokens + usageMetadata.output_tokens;\n lastMetadata = rest as Omit<OllamaChatResponse, 'message'>;\n if (!responseMessage) {\n continue;\n }\n const message = convertOllamaMessagesToLangChain(responseMessage);\n const generationChunk = new ChatGenerationChunk({\n text: responseMessage.content || '',\n message,\n });\n yield generationChunk;\n await runManager?.handleLLMNewToken(\n responseMessage.content || '',\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: generationChunk }\n );\n }\n\n // Yield the `response_metadata` as the final chunk.\n yield new ChatGenerationChunk({\n text: '',\n message: new AIMessageChunk({\n content: '',\n response_metadata: lastMetadata,\n usage_metadata: usageMetadata,\n }),\n });\n }\n}\n"],"names":["BaseChatOllama","messages","convertToOllamaMessages","convertOllamaMessagesToLangChain","ChatGenerationChunk","AIMessageChunk"],"mappings":";;;;;;;AAcM,MAAO,UAAW,SAAQA,iBAAc,CAAA;
|
|
1
|
+
{"version":3,"file":"index.cjs","sources":["../../../../src/llm/ollama/index.ts"],"sourcesContent":["import { AIMessageChunk } from '@langchain/core/messages';\nimport { ChatGenerationChunk } from '@langchain/core/outputs';\nimport { ChatOllama as BaseChatOllama } from '@langchain/ollama';\nimport { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';\nimport type {\n ChatResponse as OllamaChatResponse,\n Message as OllamaMessage,\n} from 'ollama';\nimport type { UsageMetadata, BaseMessage } from '@langchain/core/messages';\nimport {\n convertOllamaMessagesToLangChain,\n convertToOllamaMessages,\n} from './utils';\n\nexport class ChatOllama extends BaseChatOllama {\n static lc_name(): 'LibreChatOllama' {\n return 'LibreChatOllama';\n }\n async *_streamResponseChunks(\n messages: BaseMessage[],\n options: this['ParsedCallOptions'],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n if (this.checkOrPullModel) {\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n // @ts-ignore\n if (!((await this.checkModelExistsOnMachine(this.model)) as boolean)) {\n await this.pull(this.model, {\n logProgress: true,\n });\n }\n }\n\n const params = this.invocationParams(options);\n // TODO: remove cast after SDK adds support for tool calls\n const ollamaMessages = convertToOllamaMessages(messages) as OllamaMessage[];\n\n const usageMetadata: UsageMetadata = {\n input_tokens: 0,\n output_tokens: 0,\n total_tokens: 0,\n };\n\n const stream = await this.client.chat({\n ...params,\n messages: ollamaMessages,\n stream: true,\n });\n\n let lastMetadata: Omit<OllamaChatResponse, 'message'> | undefined;\n\n for await (const chunk of stream) {\n if (options.signal?.aborted === true) {\n this.client.abort();\n }\n const { message: responseMessage, ...rest } =\n chunk as Partial<OllamaChatResponse>;\n usageMetadata.input_tokens += rest.prompt_eval_count ?? 0;\n usageMetadata.output_tokens += rest.eval_count ?? 0;\n usageMetadata.total_tokens =\n usageMetadata.input_tokens + usageMetadata.output_tokens;\n lastMetadata = rest as Omit<OllamaChatResponse, 'message'>;\n if (!responseMessage) {\n continue;\n }\n const message = convertOllamaMessagesToLangChain(responseMessage);\n const generationChunk = new ChatGenerationChunk({\n text: responseMessage.content || '',\n message,\n });\n yield generationChunk;\n await runManager?.handleLLMNewToken(\n responseMessage.content || '',\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: generationChunk }\n );\n }\n\n // Yield the `response_metadata` as the final chunk.\n yield new ChatGenerationChunk({\n text: '',\n message: new AIMessageChunk({\n content: '',\n response_metadata: lastMetadata,\n usage_metadata: usageMetadata,\n }),\n });\n }\n}\n"],"names":["BaseChatOllama","messages","convertToOllamaMessages","convertOllamaMessagesToLangChain","ChatGenerationChunk","AIMessageChunk"],"mappings":";;;;;;;AAcM,MAAO,UAAW,SAAQA,iBAAc,CAAA;AAC5C,IAAA,OAAO,OAAO,GAAA;AACZ,QAAA,OAAO,iBAAiB;;IAE1B,OAAO,qBAAqB,CAC1BC,UAAuB,EACvB,OAAkC,EAClC,UAAqC,EAAA;AAErC,QAAA,IAAI,IAAI,CAAC,gBAAgB,EAAE;;;AAGzB,YAAA,IAAI,EAAG,MAAM,IAAI,CAAC,yBAAyB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAa,EAAE;AACpE,gBAAA,MAAM,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AAC1B,oBAAA,WAAW,EAAE,IAAI;AAClB,iBAAA,CAAC;;;QAIN,MAAM,MAAM,GAAG,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;;AAE7C,QAAA,MAAM,cAAc,GAAGC,6BAAuB,CAACD,UAAQ,CAAoB;AAE3E,QAAA,MAAM,aAAa,GAAkB;AACnC,YAAA,YAAY,EAAE,CAAC;AACf,YAAA,aAAa,EAAE,CAAC;AAChB,YAAA,YAAY,EAAE,CAAC;SAChB;QAED,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC;AACpC,YAAA,GAAG,MAAM;AACT,YAAA,QAAQ,EAAE,cAAc;AACxB,YAAA,MAAM,EAAE,IAAI;AACb,SAAA,CAAC;AAEF,QAAA,IAAI,YAA6D;AAEjE,QAAA,WAAW,MAAM,KAAK,IAAI,MAAM,EAAE;YAChC,IAAI,OAAO,CAAC,MAAM,EAAE,OAAO,KAAK,IAAI,EAAE;AACpC,gBAAA,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE;;YAErB,MAAM,EAAE,OAAO,EAAE,eAAe,EAAE,GAAG,IAAI,EAAE,GACzC,KAAoC;YACtC,aAAa,CAAC,YAAY,IAAI,IAAI,CAAC,iBAAiB,IAAI,CAAC;YACzD,aAAa,CAAC,aAAa,IAAI,IAAI,CAAC,UAAU,IAAI,CAAC;AACnD,YAAA,aAAa,CAAC,YAAY;AACxB,gBAAA,aAAa,CAAC,YAAY,GAAG,aAAa,CAAC,aAAa;YAC1D,YAAY,GAAG,IAA2C;YAC1D,IAAI,CAAC,eAAe,EAAE;gBACpB;;AAEF,YAAA,MAAM,OAAO,GAAGE,sCAAgC,CAAC,eAAe,CAAC;AACjE,YAAA,MAAM,eAAe,GAAG,IAAIC,2BAAmB,CAAC;AAC9C,gBAAA,IAAI,EAAE,eAAe,CAAC,OAAO,IAAI,EAAE;gBACnC,OAAO;AACR,aAAA,CAAC;AACF,YAAA,MAAM,eAAe;YACrB,MAAM,UAAU,EAAE,iBAAiB,CACjC,eAAe,CAAC,OAAO,IAAI,EAAE,EAC7B,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,eAAe,EAAE,CAC3B;;;QAIH,MAAM,IAAIA,2BAAmB,CAAC;AAC5B,YAAA,IAAI,EAAE,EAAE;YACR,OAAO,EAAE,IAAIC,uBAAc,CAAC;AAC1B,gBAAA,OAAO,EAAE,EAAE;AACX,gBAAA,iBAAiB,EAAE,YAAY;AAC/B,gBAAA,cAAc,EAAE,aAAa;aAC9B,CAAC;AACH,SAAA,CAAC;;AAEL;;;;"}
|
|
@@ -8,6 +8,11 @@ require('@langchain/core/utils/function_calling');
|
|
|
8
8
|
var deepseek = require('@langchain/deepseek');
|
|
9
9
|
var openai = require('@langchain/openai');
|
|
10
10
|
var index = require('./utils/index.cjs');
|
|
11
|
+
require('../../common/enum.cjs');
|
|
12
|
+
require('nanoid');
|
|
13
|
+
require('../../messages/core.cjs');
|
|
14
|
+
var run = require('../../utils/run.cjs');
|
|
15
|
+
require('js-tiktoken/lite');
|
|
11
16
|
|
|
12
17
|
// eslint-disable-next-line @typescript-eslint/explicit-function-return-type
|
|
13
18
|
const iife = (fn) => fn();
|
|
@@ -106,9 +111,17 @@ class CustomAzureOpenAIClient extends openai$1.AzureOpenAI {
|
|
|
106
111
|
}
|
|
107
112
|
/** @ts-expect-error We are intentionally overriding `getReasoningParams` */
|
|
108
113
|
class ChatOpenAI extends openai.ChatOpenAI {
|
|
114
|
+
_lc_stream_delay;
|
|
115
|
+
constructor(fields) {
|
|
116
|
+
super(fields);
|
|
117
|
+
this._lc_stream_delay = fields?._lc_stream_delay;
|
|
118
|
+
}
|
|
109
119
|
get exposedClient() {
|
|
110
120
|
return this.client;
|
|
111
121
|
}
|
|
122
|
+
static lc_name() {
|
|
123
|
+
return 'LibreChatOpenAI';
|
|
124
|
+
}
|
|
112
125
|
_getClientOptions(options) {
|
|
113
126
|
if (!this.client) {
|
|
114
127
|
const openAIEndpointConfig = {
|
|
@@ -137,7 +150,8 @@ class ChatOpenAI extends openai.ChatOpenAI {
|
|
|
137
150
|
* @internal
|
|
138
151
|
*/
|
|
139
152
|
getReasoningParams(options) {
|
|
140
|
-
|
|
153
|
+
const lc_name = this.constructor.lc_name();
|
|
154
|
+
if (lc_name === 'LibreChatOpenAI' && !index.isReasoningModel(this.model)) {
|
|
141
155
|
return;
|
|
142
156
|
}
|
|
143
157
|
// apply options in reverse order of importance -- newer options supersede older options
|
|
@@ -173,6 +187,9 @@ class ChatOpenAI extends openai.ChatOpenAI {
|
|
|
173
187
|
if (chunk == null)
|
|
174
188
|
continue;
|
|
175
189
|
yield chunk;
|
|
190
|
+
if (this._lc_stream_delay != null) {
|
|
191
|
+
await run.sleep(this._lc_stream_delay);
|
|
192
|
+
}
|
|
176
193
|
await runManager?.handleLLMNewToken(chunk.text || '', undefined, undefined, undefined, undefined, { chunk });
|
|
177
194
|
}
|
|
178
195
|
return;
|
|
@@ -208,6 +225,10 @@ class ChatOpenAI extends openai.ChatOpenAI {
|
|
|
208
225
|
else if ('reasoning' in delta) {
|
|
209
226
|
chunk.additional_kwargs.reasoning_content = delta.reasoning;
|
|
210
227
|
}
|
|
228
|
+
if ('provider_specific_fields' in delta) {
|
|
229
|
+
chunk.additional_kwargs.provider_specific_fields =
|
|
230
|
+
delta.provider_specific_fields;
|
|
231
|
+
}
|
|
211
232
|
defaultRole = delta.role ?? defaultRole;
|
|
212
233
|
const newTokenIndices = {
|
|
213
234
|
prompt: options.promptIndex ?? 0,
|
|
@@ -237,6 +258,9 @@ class ChatOpenAI extends openai.ChatOpenAI {
|
|
|
237
258
|
generationInfo,
|
|
238
259
|
});
|
|
239
260
|
yield generationChunk;
|
|
261
|
+
if (this._lc_stream_delay != null) {
|
|
262
|
+
await run.sleep(this._lc_stream_delay);
|
|
263
|
+
}
|
|
240
264
|
await runManager?.handleLLMNewToken(generationChunk.text || '', newTokenIndices, undefined, undefined, undefined, { chunk: generationChunk });
|
|
241
265
|
}
|
|
242
266
|
if (usage) {
|
|
@@ -277,6 +301,9 @@ class ChatOpenAI extends openai.ChatOpenAI {
|
|
|
277
301
|
text: '',
|
|
278
302
|
});
|
|
279
303
|
yield generationChunk;
|
|
304
|
+
if (this._lc_stream_delay != null) {
|
|
305
|
+
await run.sleep(this._lc_stream_delay);
|
|
306
|
+
}
|
|
280
307
|
}
|
|
281
308
|
if (options.signal?.aborted === true) {
|
|
282
309
|
throw new Error('AbortError');
|
|
@@ -285,9 +312,17 @@ class ChatOpenAI extends openai.ChatOpenAI {
|
|
|
285
312
|
}
|
|
286
313
|
/** @ts-expect-error We are intentionally overriding `getReasoningParams` */
|
|
287
314
|
class AzureChatOpenAI extends openai.AzureChatOpenAI {
|
|
315
|
+
_lc_stream_delay;
|
|
316
|
+
constructor(fields) {
|
|
317
|
+
super(fields);
|
|
318
|
+
this._lc_stream_delay = fields?._lc_stream_delay;
|
|
319
|
+
}
|
|
288
320
|
get exposedClient() {
|
|
289
321
|
return this.client;
|
|
290
322
|
}
|
|
323
|
+
static lc_name() {
|
|
324
|
+
return 'LibreChatAzureOpenAI';
|
|
325
|
+
}
|
|
291
326
|
/**
|
|
292
327
|
* Returns backwards compatible reasoning parameters from constructor params and call options
|
|
293
328
|
* @internal
|
|
@@ -381,6 +416,9 @@ class AzureChatOpenAI extends openai.AzureChatOpenAI {
|
|
|
381
416
|
if (chunk == null)
|
|
382
417
|
continue;
|
|
383
418
|
yield chunk;
|
|
419
|
+
if (this._lc_stream_delay != null) {
|
|
420
|
+
await run.sleep(this._lc_stream_delay);
|
|
421
|
+
}
|
|
384
422
|
await runManager?.handleLLMNewToken(chunk.text || '', undefined, undefined, undefined, undefined, { chunk });
|
|
385
423
|
}
|
|
386
424
|
return;
|
|
@@ -390,6 +428,9 @@ class ChatDeepSeek extends deepseek.ChatDeepSeek {
|
|
|
390
428
|
get exposedClient() {
|
|
391
429
|
return this.client;
|
|
392
430
|
}
|
|
431
|
+
static lc_name() {
|
|
432
|
+
return 'LibreChatDeepSeek';
|
|
433
|
+
}
|
|
393
434
|
_getClientOptions(options) {
|
|
394
435
|
if (!this.client) {
|
|
395
436
|
const openAIEndpointConfig = {
|
|
@@ -415,8 +456,10 @@ class ChatDeepSeek extends deepseek.ChatDeepSeek {
|
|
|
415
456
|
}
|
|
416
457
|
}
|
|
417
458
|
class ChatXAI extends xai.ChatXAI {
|
|
459
|
+
_lc_stream_delay;
|
|
418
460
|
constructor(fields) {
|
|
419
461
|
super(fields);
|
|
462
|
+
this._lc_stream_delay = fields?._lc_stream_delay;
|
|
420
463
|
const customBaseURL = fields?.configuration?.baseURL ?? fields?.clientConfig?.baseURL;
|
|
421
464
|
if (customBaseURL != null && customBaseURL) {
|
|
422
465
|
this.clientConfig = {
|
|
@@ -428,6 +471,9 @@ class ChatXAI extends xai.ChatXAI {
|
|
|
428
471
|
this.client = undefined;
|
|
429
472
|
}
|
|
430
473
|
}
|
|
474
|
+
static lc_name() {
|
|
475
|
+
return 'LibreChatXAI';
|
|
476
|
+
}
|
|
431
477
|
get exposedClient() {
|
|
432
478
|
return this.client;
|
|
433
479
|
}
|
|
@@ -518,6 +564,9 @@ class ChatXAI extends xai.ChatXAI {
|
|
|
518
564
|
generationInfo,
|
|
519
565
|
});
|
|
520
566
|
yield generationChunk;
|
|
567
|
+
if (this._lc_stream_delay != null) {
|
|
568
|
+
await run.sleep(this._lc_stream_delay);
|
|
569
|
+
}
|
|
521
570
|
await runManager?.handleLLMNewToken(generationChunk.text || '', newTokenIndices, undefined, undefined, undefined, { chunk: generationChunk });
|
|
522
571
|
}
|
|
523
572
|
if (usage) {
|
|
@@ -582,6 +631,9 @@ class ChatXAI extends xai.ChatXAI {
|
|
|
582
631
|
text: '',
|
|
583
632
|
});
|
|
584
633
|
yield generationChunk;
|
|
634
|
+
if (this._lc_stream_delay != null) {
|
|
635
|
+
await run.sleep(this._lc_stream_delay);
|
|
636
|
+
}
|
|
585
637
|
}
|
|
586
638
|
if (options.signal?.aborted === true) {
|
|
587
639
|
throw new Error('AbortError');
|