@librechat/agents 2.4.41 → 2.4.43

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. package/dist/cjs/common/enum.cjs +4 -2
  2. package/dist/cjs/common/enum.cjs.map +1 -1
  3. package/dist/cjs/graphs/Graph.cjs +5 -6
  4. package/dist/cjs/graphs/Graph.cjs.map +1 -1
  5. package/dist/cjs/llm/google/index.cjs +73 -1
  6. package/dist/cjs/llm/google/index.cjs.map +1 -1
  7. package/dist/cjs/llm/google/utils/common.cjs +469 -0
  8. package/dist/cjs/llm/google/utils/common.cjs.map +1 -0
  9. package/dist/cjs/run.cjs +4 -3
  10. package/dist/cjs/run.cjs.map +1 -1
  11. package/dist/cjs/stream.cjs +5 -2
  12. package/dist/cjs/stream.cjs.map +1 -1
  13. package/dist/cjs/utils/title.cjs +25 -20
  14. package/dist/cjs/utils/title.cjs.map +1 -1
  15. package/dist/esm/common/enum.mjs +4 -2
  16. package/dist/esm/common/enum.mjs.map +1 -1
  17. package/dist/esm/graphs/Graph.mjs +5 -6
  18. package/dist/esm/graphs/Graph.mjs.map +1 -1
  19. package/dist/esm/llm/google/index.mjs +73 -1
  20. package/dist/esm/llm/google/index.mjs.map +1 -1
  21. package/dist/esm/llm/google/utils/common.mjs +463 -0
  22. package/dist/esm/llm/google/utils/common.mjs.map +1 -0
  23. package/dist/esm/run.mjs +4 -3
  24. package/dist/esm/run.mjs.map +1 -1
  25. package/dist/esm/stream.mjs +5 -2
  26. package/dist/esm/stream.mjs.map +1 -1
  27. package/dist/esm/utils/title.mjs +25 -20
  28. package/dist/esm/utils/title.mjs.map +1 -1
  29. package/dist/types/common/enum.d.ts +5 -3
  30. package/dist/types/graphs/Graph.d.ts +3 -2
  31. package/dist/types/llm/google/index.d.ts +10 -5
  32. package/dist/types/llm/google/types.d.ts +32 -0
  33. package/dist/types/llm/google/utils/common.d.ts +19 -0
  34. package/dist/types/llm/google/utils/tools.d.ts +10 -0
  35. package/dist/types/llm/google/utils/zod_to_genai_parameters.d.ts +14 -0
  36. package/dist/types/run.d.ts +1 -1
  37. package/dist/types/scripts/args.d.ts +2 -1
  38. package/dist/types/types/llm.d.ts +2 -0
  39. package/dist/types/types/run.d.ts +1 -0
  40. package/dist/types/types/stream.d.ts +5 -0
  41. package/package.json +1 -1
  42. package/src/common/enum.ts +4 -2
  43. package/src/graphs/Graph.ts +16 -11
  44. package/src/llm/google/index.ts +118 -8
  45. package/src/llm/google/types.ts +43 -0
  46. package/src/llm/google/utils/common.ts +632 -0
  47. package/src/llm/google/utils/tools.ts +160 -0
  48. package/src/llm/google/utils/zod_to_genai_parameters.ts +88 -0
  49. package/src/run.ts +4 -2
  50. package/src/scripts/args.ts +12 -8
  51. package/src/scripts/code_exec.ts +49 -18
  52. package/src/scripts/code_exec_files.ts +48 -17
  53. package/src/scripts/image.ts +52 -20
  54. package/src/scripts/simple.ts +1 -0
  55. package/src/specs/anthropic.simple.test.ts +88 -31
  56. package/src/specs/openai.simple.test.ts +88 -31
  57. package/src/stream.ts +5 -2
  58. package/src/types/llm.ts +2 -0
  59. package/src/types/run.ts +1 -0
  60. package/src/types/stream.ts +6 -0
  61. package/src/utils/llmConfig.ts +2 -2
  62. package/src/utils/title.ts +44 -27
@@ -1,8 +1,11 @@
1
1
  import { ChatGoogleGenerativeAI } from '@langchain/google-genai';
2
2
  import { getEnvironmentVariable } from '@langchain/core/utils/env';
3
3
  import { GoogleGenerativeAI } from '@google/generative-ai';
4
+ import { convertBaseMessagesToContent, convertResponseContentToChatGenerationChunk } from './utils/common.mjs';
4
5
 
6
+ /* eslint-disable @typescript-eslint/ban-ts-comment */
5
7
  class CustomChatGoogleGenerativeAI extends ChatGoogleGenerativeAI {
8
+ thinkingConfig;
6
9
  constructor(fields) {
7
10
  super(fields);
8
11
  this.model = fields.model.replace(/^models\//, '');
@@ -41,9 +44,9 @@ class CustomChatGoogleGenerativeAI extends ChatGoogleGenerativeAI {
41
44
  throw new Error('The categories in `safetySettings` array must be unique');
42
45
  }
43
46
  }
47
+ this.thinkingConfig = fields.thinkingConfig ?? this.thinkingConfig;
44
48
  this.streaming = fields.streaming ?? this.streaming;
45
49
  this.json = fields.json;
46
- // eslint-disable-next-line @typescript-eslint/ban-ts-comment
47
50
  // @ts-ignore - Accessing private property from parent class
48
51
  this.client = new GoogleGenerativeAI(this.apiKey).getGenerativeModel({
49
52
  model: this.model,
@@ -65,6 +68,75 @@ class CustomChatGoogleGenerativeAI extends ChatGoogleGenerativeAI {
65
68
  });
66
69
  this.streamUsage = fields.streamUsage ?? this.streamUsage;
67
70
  }
71
+ invocationParams(options) {
72
+ const params = super.invocationParams(options);
73
+ return {
74
+ ...params,
75
+ generationConfig: {
76
+ ...params.generationConfig,
77
+ /** @ts-ignore */
78
+ thinkingConfig: this.thinkingConfig,
79
+ },
80
+ };
81
+ }
82
+ async *_streamResponseChunks(messages, options, runManager) {
83
+ const prompt = convertBaseMessagesToContent(messages, this._isMultimodalModel, this.useSystemInstruction);
84
+ let actualPrompt = prompt;
85
+ if (prompt[0].role === 'system') {
86
+ const [systemInstruction] = prompt;
87
+ /** @ts-ignore */
88
+ this.client.systemInstruction = systemInstruction;
89
+ actualPrompt = prompt.slice(1);
90
+ }
91
+ const parameters = this.invocationParams(options);
92
+ const request = {
93
+ ...parameters,
94
+ contents: actualPrompt,
95
+ };
96
+ const stream = await this.caller.callWithOptions({ signal: options.signal }, async () => {
97
+ /** @ts-ignore */
98
+ const { stream } = await this.client.generateContentStream(request);
99
+ return stream;
100
+ });
101
+ let usageMetadata;
102
+ let index = 0;
103
+ for await (const response of stream) {
104
+ if ('usageMetadata' in response &&
105
+ this.streamUsage !== false &&
106
+ options.streamUsage !== false) {
107
+ const genAIUsageMetadata = response.usageMetadata;
108
+ const output_tokens = (genAIUsageMetadata?.candidatesTokenCount ?? 0) +
109
+ (genAIUsageMetadata?.thoughtsTokenCount ?? 0);
110
+ if (!usageMetadata) {
111
+ usageMetadata = {
112
+ input_tokens: genAIUsageMetadata?.promptTokenCount ?? 0,
113
+ output_tokens,
114
+ total_tokens: genAIUsageMetadata?.totalTokenCount ?? 0,
115
+ };
116
+ }
117
+ else {
118
+ // Under the hood, LangChain combines the prompt tokens. Google returns the updated
119
+ // total each time, so we need to find the difference between the tokens.
120
+ const outputTokenDiff = output_tokens - usageMetadata.output_tokens;
121
+ usageMetadata = {
122
+ input_tokens: 0,
123
+ output_tokens: outputTokenDiff,
124
+ total_tokens: outputTokenDiff,
125
+ };
126
+ }
127
+ }
128
+ const chunk = convertResponseContentToChatGenerationChunk(response, {
129
+ usageMetadata,
130
+ index,
131
+ });
132
+ index += 1;
133
+ if (!chunk) {
134
+ continue;
135
+ }
136
+ yield chunk;
137
+ await runManager?.handleLLMNewToken(chunk.text || '', undefined, undefined, undefined, undefined, { chunk });
138
+ }
139
+ }
68
140
  }
69
141
 
70
142
  export { CustomChatGoogleGenerativeAI };
@@ -1 +1 @@
1
- {"version":3,"file":"index.mjs","sources":["../../../../src/llm/google/index.ts"],"sourcesContent":["import { ChatGoogleGenerativeAI } from '@langchain/google-genai';\nimport { getEnvironmentVariable } from '@langchain/core/utils/env';\nimport { GoogleGenerativeAI as GenerativeAI } from '@google/generative-ai';\nimport type { GoogleGenerativeAIChatInput } from '@langchain/google-genai';\nimport type { RequestOptions, SafetySetting } from '@google/generative-ai';\n\nexport class CustomChatGoogleGenerativeAI extends ChatGoogleGenerativeAI {\n constructor(\n fields: GoogleGenerativeAIChatInput & {\n customHeaders?: RequestOptions['customHeaders'];\n }\n ) {\n super(fields);\n\n this.model = fields.model.replace(/^models\\//, '');\n\n this.maxOutputTokens = fields.maxOutputTokens ?? this.maxOutputTokens;\n\n if (this.maxOutputTokens != null && this.maxOutputTokens < 0) {\n throw new Error('`maxOutputTokens` must be a positive integer');\n }\n\n this.temperature = fields.temperature ?? this.temperature;\n if (\n this.temperature != null &&\n (this.temperature < 0 || this.temperature > 2)\n ) {\n throw new Error('`temperature` must be in the range of [0.0,2.0]');\n }\n\n this.topP = fields.topP ?? this.topP;\n if (this.topP != null && this.topP < 0) {\n throw new Error('`topP` must be a positive integer');\n }\n\n if (this.topP != null && this.topP > 1) {\n throw new Error('`topP` must be below 1.');\n }\n\n this.topK = fields.topK ?? this.topK;\n if (this.topK != null && this.topK < 0) {\n throw new Error('`topK` must be a positive integer');\n }\n\n this.stopSequences = fields.stopSequences ?? this.stopSequences;\n\n this.apiKey = fields.apiKey ?? getEnvironmentVariable('GOOGLE_API_KEY');\n if (this.apiKey == null || this.apiKey === '') {\n throw new Error(\n 'Please set an API key for Google GenerativeAI ' +\n 'in the environment variable GOOGLE_API_KEY ' +\n 'or in the `apiKey` field of the ' +\n 'ChatGoogleGenerativeAI constructor'\n );\n }\n\n this.safetySettings = fields.safetySettings ?? this.safetySettings;\n if (this.safetySettings && this.safetySettings.length > 0) {\n const safetySettingsSet = new Set(\n this.safetySettings.map((s) => s.category)\n );\n if (safetySettingsSet.size !== this.safetySettings.length) {\n throw new Error(\n 'The categories in `safetySettings` array must be unique'\n );\n }\n }\n\n this.streaming = fields.streaming ?? this.streaming;\n this.json = fields.json;\n\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n // @ts-ignore - Accessing private property from parent class\n this.client = new GenerativeAI(this.apiKey).getGenerativeModel(\n {\n model: this.model,\n safetySettings: this.safetySettings as SafetySetting[],\n generationConfig: {\n stopSequences: this.stopSequences,\n maxOutputTokens: this.maxOutputTokens,\n temperature: this.temperature,\n topP: this.topP,\n topK: this.topK,\n ...(this.json != null\n ? { responseMimeType: 'application/json' }\n : {}),\n },\n },\n {\n apiVersion: fields.apiVersion,\n baseUrl: fields.baseUrl,\n customHeaders: fields.customHeaders,\n }\n );\n this.streamUsage = fields.streamUsage ?? this.streamUsage;\n }\n}\n"],"names":["GenerativeAI"],"mappings":";;;;AAMM,MAAO,4BAA6B,SAAQ,sBAAsB,CAAA;AACtE,IAAA,WAAA,CACE,MAEC,EAAA;QAED,KAAK,CAAC,MAAM,CAAC;AAEb,QAAA,IAAI,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;QAElD,IAAI,CAAC,eAAe,GAAG,MAAM,CAAC,eAAe,IAAI,IAAI,CAAC,eAAe;AAErE,QAAA,IAAI,IAAI,CAAC,eAAe,IAAI,IAAI,IAAI,IAAI,CAAC,eAAe,GAAG,CAAC,EAAE;AAC5D,YAAA,MAAM,IAAI,KAAK,CAAC,8CAA8C,CAAC;;QAGjE,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,IAAI,IAAI,CAAC,WAAW;AACzD,QAAA,IACE,IAAI,CAAC,WAAW,IAAI,IAAI;AACxB,aAAC,IAAI,CAAC,WAAW,GAAG,CAAC,IAAI,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,EAC9C;AACA,YAAA,MAAM,IAAI,KAAK,CAAC,iDAAiD,CAAC;;QAGpE,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI;AACpC,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC;;AAGtD,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC;;QAG5C,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI;AACpC,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC;;QAGtD,IAAI,CAAC,aAAa,GAAG,MAAM,CAAC,aAAa,IAAI,IAAI,CAAC,aAAa;QAE/D,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,sBAAsB,CAAC,gBAAgB,CAAC;AACvE,QAAA,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,IAAI,IAAI,CAAC,MAAM,KAAK,EAAE,EAAE;YAC7C,MAAM,IAAI,KAAK,CACb,gDAAgD;gBAC9C,6CAA6C;gBAC7C,kCAAkC;AAClC,gBAAA,oCAAoC,CACvC;;QAGH,IAAI,CAAC,cAAc,GAAG,MAAM,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc;AAClE,QAAA,IAAI,IAAI,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc,CAAC,MAAM,GAAG,CAAC,EAAE;YACzD,MAAM,iBAAiB,GAAG,IAAI,GAAG,CAC/B,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,CAC3C;YACD,IAAI,iBAAiB,CAAC,IAAI,KAAK,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE;AACzD,gBAAA,MAAM,IAAI,KAAK,CACb,yDAAyD,CAC1D;;;QAIL,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS,IAAI,IAAI,CAAC,SAAS;AACnD,QAAA,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI;;;AAIvB,QAAA,IAAI,CAAC,MAAM,GAAG,IAAIA,kBAAY,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,kBAAkB,CAC5D;YACE,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,cAAc,EAAE,IAAI,CAAC,cAAiC;AACtD,YAAA,gBAAgB,EAAE;gBAChB,aAAa,EAAE,IAAI,CAAC,aAAa;gBACjC,eAAe,EAAE,IAAI,CAAC,eAAe;gBACrC,WAAW,EAAE,IAAI,CAAC,WAAW;gBAC7B,IAAI,EAAE,IAAI,CAAC,IAAI;gBACf,IAAI,EAAE,IAAI,CAAC,IAAI;AACf,gBAAA,IAAI,IAAI,CAAC,IAAI,IAAI;AACf,sBAAE,EAAE,gBAAgB,EAAE,kBAAkB;sBACtC,EAAE,CAAC;AACR,aAAA;SACF,EACD;YACE,UAAU,EAAE,MAAM,CAAC,UAAU;YAC7B,OAAO,EAAE,MAAM,CAAC,OAAO;YACvB,aAAa,EAAE,MAAM,CAAC,aAAa;AACpC,SAAA,CACF;QACD,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,IAAI,IAAI,CAAC,WAAW;;AAE5D;;;;"}
1
+ {"version":3,"file":"index.mjs","sources":["../../../../src/llm/google/index.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/ban-ts-comment */\nimport { ChatGoogleGenerativeAI } from '@langchain/google-genai';\nimport { getEnvironmentVariable } from '@langchain/core/utils/env';\nimport { GoogleGenerativeAI as GenerativeAI } from '@google/generative-ai';\nimport type {\n GenerateContentRequest,\n SafetySetting,\n} from '@google/generative-ai';\nimport type { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';\nimport type { BaseMessage, UsageMetadata } from '@langchain/core/messages';\nimport type { GeminiGenerationConfig } from '@langchain/google-common';\nimport type { ChatGenerationChunk } from '@langchain/core/outputs';\nimport type { GeminiApiUsageMetadata } from './types';\nimport type { GoogleClientOptions } from '@/types';\nimport {\n convertResponseContentToChatGenerationChunk,\n convertBaseMessagesToContent,\n} from './utils/common';\n\nexport class CustomChatGoogleGenerativeAI extends ChatGoogleGenerativeAI {\n thinkingConfig?: GeminiGenerationConfig['thinkingConfig'];\n constructor(fields: GoogleClientOptions) {\n super(fields);\n\n this.model = fields.model.replace(/^models\\//, '');\n\n this.maxOutputTokens = fields.maxOutputTokens ?? this.maxOutputTokens;\n\n if (this.maxOutputTokens != null && this.maxOutputTokens < 0) {\n throw new Error('`maxOutputTokens` must be a positive integer');\n }\n\n this.temperature = fields.temperature ?? this.temperature;\n if (\n this.temperature != null &&\n (this.temperature < 0 || this.temperature > 2)\n ) {\n throw new Error('`temperature` must be in the range of [0.0,2.0]');\n }\n\n this.topP = fields.topP ?? this.topP;\n if (this.topP != null && this.topP < 0) {\n throw new Error('`topP` must be a positive integer');\n }\n\n if (this.topP != null && this.topP > 1) {\n throw new Error('`topP` must be below 1.');\n }\n\n this.topK = fields.topK ?? this.topK;\n if (this.topK != null && this.topK < 0) {\n throw new Error('`topK` must be a positive integer');\n }\n\n this.stopSequences = fields.stopSequences ?? this.stopSequences;\n\n this.apiKey = fields.apiKey ?? getEnvironmentVariable('GOOGLE_API_KEY');\n if (this.apiKey == null || this.apiKey === '') {\n throw new Error(\n 'Please set an API key for Google GenerativeAI ' +\n 'in the environment variable GOOGLE_API_KEY ' +\n 'or in the `apiKey` field of the ' +\n 'ChatGoogleGenerativeAI constructor'\n );\n }\n\n this.safetySettings = fields.safetySettings ?? this.safetySettings;\n if (this.safetySettings && this.safetySettings.length > 0) {\n const safetySettingsSet = new Set(\n this.safetySettings.map((s) => s.category)\n );\n if (safetySettingsSet.size !== this.safetySettings.length) {\n throw new Error(\n 'The categories in `safetySettings` array must be unique'\n );\n }\n }\n\n this.thinkingConfig = fields.thinkingConfig ?? this.thinkingConfig;\n\n this.streaming = fields.streaming ?? this.streaming;\n this.json = fields.json;\n\n // @ts-ignore - Accessing private property from parent class\n this.client = new GenerativeAI(this.apiKey).getGenerativeModel(\n {\n model: this.model,\n safetySettings: this.safetySettings as SafetySetting[],\n generationConfig: {\n stopSequences: this.stopSequences,\n maxOutputTokens: this.maxOutputTokens,\n temperature: this.temperature,\n topP: this.topP,\n topK: this.topK,\n ...(this.json != null\n ? { responseMimeType: 'application/json' }\n : {}),\n },\n },\n {\n apiVersion: fields.apiVersion,\n baseUrl: fields.baseUrl,\n customHeaders: fields.customHeaders,\n }\n );\n this.streamUsage = fields.streamUsage ?? this.streamUsage;\n }\n\n invocationParams(\n options?: this['ParsedCallOptions']\n ): Omit<GenerateContentRequest, 'contents'> {\n const params = super.invocationParams(options);\n return {\n ...params,\n generationConfig: {\n ...params.generationConfig,\n\n /** @ts-ignore */\n thinkingConfig: this.thinkingConfig,\n },\n };\n }\n\n async *_streamResponseChunks(\n messages: BaseMessage[],\n options: this['ParsedCallOptions'],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n const prompt = convertBaseMessagesToContent(\n messages,\n this._isMultimodalModel,\n this.useSystemInstruction\n );\n let actualPrompt = prompt;\n if (prompt[0].role === 'system') {\n const [systemInstruction] = prompt;\n /** @ts-ignore */\n this.client.systemInstruction = systemInstruction;\n actualPrompt = prompt.slice(1);\n }\n const parameters = this.invocationParams(options);\n const request = {\n ...parameters,\n contents: actualPrompt,\n };\n const stream = await this.caller.callWithOptions(\n { signal: options.signal },\n async () => {\n /** @ts-ignore */\n const { stream } = await this.client.generateContentStream(request);\n return stream;\n }\n );\n\n let usageMetadata: UsageMetadata | undefined;\n let index = 0;\n for await (const response of stream) {\n if (\n 'usageMetadata' in response &&\n this.streamUsage !== false &&\n options.streamUsage !== false\n ) {\n const genAIUsageMetadata = response.usageMetadata as\n | GeminiApiUsageMetadata\n | undefined;\n const output_tokens =\n (genAIUsageMetadata?.candidatesTokenCount ?? 0) +\n (genAIUsageMetadata?.thoughtsTokenCount ?? 0);\n if (!usageMetadata) {\n usageMetadata = {\n input_tokens: genAIUsageMetadata?.promptTokenCount ?? 0,\n output_tokens,\n total_tokens: genAIUsageMetadata?.totalTokenCount ?? 0,\n };\n } else {\n // Under the hood, LangChain combines the prompt tokens. Google returns the updated\n // total each time, so we need to find the difference between the tokens.\n const outputTokenDiff = output_tokens - usageMetadata.output_tokens;\n usageMetadata = {\n input_tokens: 0,\n output_tokens: outputTokenDiff,\n total_tokens: outputTokenDiff,\n };\n }\n }\n\n const chunk = convertResponseContentToChatGenerationChunk(response, {\n usageMetadata,\n index,\n });\n index += 1;\n if (!chunk) {\n continue;\n }\n\n yield chunk;\n await runManager?.handleLLMNewToken(\n chunk.text || '',\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk }\n );\n }\n }\n}\n"],"names":["GenerativeAI"],"mappings":";;;;;AAAA;AAmBM,MAAO,4BAA6B,SAAQ,sBAAsB,CAAA;AACtE,IAAA,cAAc;AACd,IAAA,WAAA,CAAY,MAA2B,EAAA;QACrC,KAAK,CAAC,MAAM,CAAC;AAEb,QAAA,IAAI,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;QAElD,IAAI,CAAC,eAAe,GAAG,MAAM,CAAC,eAAe,IAAI,IAAI,CAAC,eAAe;AAErE,QAAA,IAAI,IAAI,CAAC,eAAe,IAAI,IAAI,IAAI,IAAI,CAAC,eAAe,GAAG,CAAC,EAAE;AAC5D,YAAA,MAAM,IAAI,KAAK,CAAC,8CAA8C,CAAC;;QAGjE,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,IAAI,IAAI,CAAC,WAAW;AACzD,QAAA,IACE,IAAI,CAAC,WAAW,IAAI,IAAI;AACxB,aAAC,IAAI,CAAC,WAAW,GAAG,CAAC,IAAI,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,EAC9C;AACA,YAAA,MAAM,IAAI,KAAK,CAAC,iDAAiD,CAAC;;QAGpE,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI;AACpC,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC;;AAGtD,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC;;QAG5C,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI;AACpC,QAAA,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,EAAE;AACtC,YAAA,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC;;QAGtD,IAAI,CAAC,aAAa,GAAG,MAAM,CAAC,aAAa,IAAI,IAAI,CAAC,aAAa;QAE/D,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,sBAAsB,CAAC,gBAAgB,CAAC;AACvE,QAAA,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,IAAI,IAAI,CAAC,MAAM,KAAK,EAAE,EAAE;YAC7C,MAAM,IAAI,KAAK,CACb,gDAAgD;gBAC9C,6CAA6C;gBAC7C,kCAAkC;AAClC,gBAAA,oCAAoC,CACvC;;QAGH,IAAI,CAAC,cAAc,GAAG,MAAM,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc;AAClE,QAAA,IAAI,IAAI,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc,CAAC,MAAM,GAAG,CAAC,EAAE;YACzD,MAAM,iBAAiB,GAAG,IAAI,GAAG,CAC/B,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,CAC3C;YACD,IAAI,iBAAiB,CAAC,IAAI,KAAK,IAAI,CAAC,cAAc,CAAC,MAAM,EAAE;AACzD,gBAAA,MAAM,IAAI,KAAK,CACb,yDAAyD,CAC1D;;;QAIL,IAAI,CAAC,cAAc,GAAG,MAAM,CAAC,cAAc,IAAI,IAAI,CAAC,cAAc;QAElE,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS,IAAI,IAAI,CAAC,SAAS;AACnD,QAAA,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI;;AAGvB,QAAA,IAAI,CAAC,MAAM,GAAG,IAAIA,kBAAY,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,kBAAkB,CAC5D;YACE,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,cAAc,EAAE,IAAI,CAAC,cAAiC;AACtD,YAAA,gBAAgB,EAAE;gBAChB,aAAa,EAAE,IAAI,CAAC,aAAa;gBACjC,eAAe,EAAE,IAAI,CAAC,eAAe;gBACrC,WAAW,EAAE,IAAI,CAAC,WAAW;gBAC7B,IAAI,EAAE,IAAI,CAAC,IAAI;gBACf,IAAI,EAAE,IAAI,CAAC,IAAI;AACf,gBAAA,IAAI,IAAI,CAAC,IAAI,IAAI;AACf,sBAAE,EAAE,gBAAgB,EAAE,kBAAkB;sBACtC,EAAE,CAAC;AACR,aAAA;SACF,EACD;YACE,UAAU,EAAE,MAAM,CAAC,UAAU;YAC7B,OAAO,EAAE,MAAM,CAAC,OAAO;YACvB,aAAa,EAAE,MAAM,CAAC,aAAa;AACpC,SAAA,CACF;QACD,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,IAAI,IAAI,CAAC,WAAW;;AAG3D,IAAA,gBAAgB,CACd,OAAmC,EAAA;QAEnC,MAAM,MAAM,GAAG,KAAK,CAAC,gBAAgB,CAAC,OAAO,CAAC;QAC9C,OAAO;AACL,YAAA,GAAG,MAAM;AACT,YAAA,gBAAgB,EAAE;gBAChB,GAAG,MAAM,CAAC,gBAAgB;;gBAG1B,cAAc,EAAE,IAAI,CAAC,cAAc;AACpC,aAAA;SACF;;IAGH,OAAO,qBAAqB,CAC1B,QAAuB,EACvB,OAAkC,EAClC,UAAqC,EAAA;AAErC,QAAA,MAAM,MAAM,GAAG,4BAA4B,CACzC,QAAQ,EACR,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,oBAAoB,CAC1B;QACD,IAAI,YAAY,GAAG,MAAM;QACzB,IAAI,MAAM,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,QAAQ,EAAE;AAC/B,YAAA,MAAM,CAAC,iBAAiB,CAAC,GAAG,MAAM;;AAElC,YAAA,IAAI,CAAC,MAAM,CAAC,iBAAiB,GAAG,iBAAiB;AACjD,YAAA,YAAY,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;;QAEhC,MAAM,UAAU,GAAG,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;AACjD,QAAA,MAAM,OAAO,GAAG;AACd,YAAA,GAAG,UAAU;AACb,YAAA,QAAQ,EAAE,YAAY;SACvB;AACD,QAAA,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,eAAe,CAC9C,EAAE,MAAM,EAAE,OAAO,CAAC,MAAM,EAAE,EAC1B,YAAW;;AAET,YAAA,MAAM,EAAE,MAAM,EAAE,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,qBAAqB,CAAC,OAAO,CAAC;AACnE,YAAA,OAAO,MAAM;AACf,SAAC,CACF;AAED,QAAA,IAAI,aAAwC;QAC5C,IAAI,KAAK,GAAG,CAAC;AACb,QAAA,WAAW,MAAM,QAAQ,IAAI,MAAM,EAAE;YACnC,IACE,eAAe,IAAI,QAAQ;gBAC3B,IAAI,CAAC,WAAW,KAAK,KAAK;AAC1B,gBAAA,OAAO,CAAC,WAAW,KAAK,KAAK,EAC7B;AACA,gBAAA,MAAM,kBAAkB,GAAG,QAAQ,CAAC,aAEvB;gBACb,MAAM,aAAa,GACjB,CAAC,kBAAkB,EAAE,oBAAoB,IAAI,CAAC;AAC9C,qBAAC,kBAAkB,EAAE,kBAAkB,IAAI,CAAC,CAAC;gBAC/C,IAAI,CAAC,aAAa,EAAE;AAClB,oBAAA,aAAa,GAAG;AACd,wBAAA,YAAY,EAAE,kBAAkB,EAAE,gBAAgB,IAAI,CAAC;wBACvD,aAAa;AACb,wBAAA,YAAY,EAAE,kBAAkB,EAAE,eAAe,IAAI,CAAC;qBACvD;;qBACI;;;AAGL,oBAAA,MAAM,eAAe,GAAG,aAAa,GAAG,aAAa,CAAC,aAAa;AACnE,oBAAA,aAAa,GAAG;AACd,wBAAA,YAAY,EAAE,CAAC;AACf,wBAAA,aAAa,EAAE,eAAe;AAC9B,wBAAA,YAAY,EAAE,eAAe;qBAC9B;;;AAIL,YAAA,MAAM,KAAK,GAAG,2CAA2C,CAAC,QAAQ,EAAE;gBAClE,aAAa;gBACb,KAAK;AACN,aAAA,CAAC;YACF,KAAK,IAAI,CAAC;YACV,IAAI,CAAC,KAAK,EAAE;gBACV;;AAGF,YAAA,MAAM,KAAK;YACX,MAAM,UAAU,EAAE,iBAAiB,CACjC,KAAK,CAAC,IAAI,IAAI,EAAE,EAChB,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,CACV;;;AAGN;;;;"}
@@ -0,0 +1,463 @@
1
+ import { isBaseMessage, AIMessageChunk, ChatMessage, isToolMessage, isAIMessage, isDataContentBlock, convertToProviderContentBlock, parseBase64DataUrl } from '@langchain/core/messages';
2
+ import { ChatGenerationChunk } from '@langchain/core/outputs';
3
+ import '@langchain/core/utils/function_calling';
4
+ import '@langchain/core/language_models/base';
5
+ import { v4 } from 'uuid';
6
+ import '@langchain/core/utils/types';
7
+ import '@langchain/core/utils/json_schema';
8
+
9
+ function getMessageAuthor(message) {
10
+ const type = message._getType();
11
+ if (ChatMessage.isInstance(message)) {
12
+ return message.role;
13
+ }
14
+ if (type === 'tool') {
15
+ return type;
16
+ }
17
+ return message.name ?? type;
18
+ }
19
+ /**
20
+ * Maps a message type to a Google Generative AI chat author.
21
+ * @param message The message to map.
22
+ * @param model The model to use for mapping.
23
+ * @returns The message type mapped to a Google Generative AI chat author.
24
+ */
25
+ function convertAuthorToRole(author) {
26
+ switch (author) {
27
+ /**
28
+ * Note: Gemini currently is not supporting system messages
29
+ * we will convert them to human messages and merge with following
30
+ * */
31
+ case 'supervisor':
32
+ case 'ai':
33
+ case 'model': // getMessageAuthor returns message.name. code ex.: return message.name ?? type;
34
+ return 'model';
35
+ case 'system':
36
+ return 'system';
37
+ case 'human':
38
+ return 'user';
39
+ case 'tool':
40
+ case 'function':
41
+ return 'function';
42
+ default:
43
+ throw new Error(`Unknown / unsupported author: ${author}`);
44
+ }
45
+ }
46
+ function messageContentMedia(content) {
47
+ if ('mimeType' in content && 'data' in content) {
48
+ return {
49
+ inlineData: {
50
+ mimeType: content.mimeType,
51
+ data: content.data,
52
+ },
53
+ };
54
+ }
55
+ if ('mimeType' in content && 'fileUri' in content) {
56
+ return {
57
+ fileData: {
58
+ mimeType: content.mimeType,
59
+ fileUri: content.fileUri,
60
+ },
61
+ };
62
+ }
63
+ throw new Error('Invalid media content');
64
+ }
65
+ function inferToolNameFromPreviousMessages(message, previousMessages) {
66
+ return previousMessages
67
+ .map((msg) => {
68
+ if (isAIMessage(msg)) {
69
+ return msg.tool_calls ?? [];
70
+ }
71
+ return [];
72
+ })
73
+ .flat()
74
+ .find((toolCall) => {
75
+ return toolCall.id === message.tool_call_id;
76
+ })?.name;
77
+ }
78
+ function _getStandardContentBlockConverter(isMultimodalModel) {
79
+ const standardContentBlockConverter = {
80
+ providerName: 'Google Gemini',
81
+ fromStandardTextBlock(block) {
82
+ return {
83
+ text: block.text,
84
+ };
85
+ },
86
+ fromStandardImageBlock(block) {
87
+ if (!isMultimodalModel) {
88
+ throw new Error('This model does not support images');
89
+ }
90
+ if (block.source_type === 'url') {
91
+ const data = parseBase64DataUrl({ dataUrl: block.url });
92
+ if (data) {
93
+ return {
94
+ inlineData: {
95
+ mimeType: data.mime_type,
96
+ data: data.data,
97
+ },
98
+ };
99
+ }
100
+ else {
101
+ return {
102
+ fileData: {
103
+ mimeType: block.mime_type ?? '',
104
+ fileUri: block.url,
105
+ },
106
+ };
107
+ }
108
+ }
109
+ if (block.source_type === 'base64') {
110
+ return {
111
+ inlineData: {
112
+ mimeType: block.mime_type ?? '',
113
+ data: block.data,
114
+ },
115
+ };
116
+ }
117
+ throw new Error(`Unsupported source type: ${block.source_type}`);
118
+ },
119
+ fromStandardAudioBlock(block) {
120
+ if (!isMultimodalModel) {
121
+ throw new Error('This model does not support audio');
122
+ }
123
+ if (block.source_type === 'url') {
124
+ const data = parseBase64DataUrl({ dataUrl: block.url });
125
+ if (data) {
126
+ return {
127
+ inlineData: {
128
+ mimeType: data.mime_type,
129
+ data: data.data,
130
+ },
131
+ };
132
+ }
133
+ else {
134
+ return {
135
+ fileData: {
136
+ mimeType: block.mime_type ?? '',
137
+ fileUri: block.url,
138
+ },
139
+ };
140
+ }
141
+ }
142
+ if (block.source_type === 'base64') {
143
+ return {
144
+ inlineData: {
145
+ mimeType: block.mime_type ?? '',
146
+ data: block.data,
147
+ },
148
+ };
149
+ }
150
+ throw new Error(`Unsupported source type: ${block.source_type}`);
151
+ },
152
+ fromStandardFileBlock(block) {
153
+ if (!isMultimodalModel) {
154
+ throw new Error('This model does not support files');
155
+ }
156
+ if (block.source_type === 'text') {
157
+ return {
158
+ text: block.text,
159
+ };
160
+ }
161
+ if (block.source_type === 'url') {
162
+ const data = parseBase64DataUrl({ dataUrl: block.url });
163
+ if (data) {
164
+ return {
165
+ inlineData: {
166
+ mimeType: data.mime_type,
167
+ data: data.data,
168
+ },
169
+ };
170
+ }
171
+ else {
172
+ return {
173
+ fileData: {
174
+ mimeType: block.mime_type ?? '',
175
+ fileUri: block.url,
176
+ },
177
+ };
178
+ }
179
+ }
180
+ if (block.source_type === 'base64') {
181
+ return {
182
+ inlineData: {
183
+ mimeType: block.mime_type ?? '',
184
+ data: block.data,
185
+ },
186
+ };
187
+ }
188
+ throw new Error(`Unsupported source type: ${block.source_type}`);
189
+ },
190
+ };
191
+ return standardContentBlockConverter;
192
+ }
193
+ function _convertLangChainContentToPart(content, isMultimodalModel) {
194
+ if (isDataContentBlock(content)) {
195
+ return convertToProviderContentBlock(content, _getStandardContentBlockConverter(isMultimodalModel));
196
+ }
197
+ if (content.type === 'text') {
198
+ return { text: content.text };
199
+ }
200
+ else if (content.type === 'executableCode') {
201
+ return { executableCode: content.executableCode };
202
+ }
203
+ else if (content.type === 'codeExecutionResult') {
204
+ return { codeExecutionResult: content.codeExecutionResult };
205
+ }
206
+ else if (content.type === 'image_url') {
207
+ if (!isMultimodalModel) {
208
+ throw new Error('This model does not support images');
209
+ }
210
+ let source;
211
+ if (typeof content.image_url === 'string') {
212
+ source = content.image_url;
213
+ }
214
+ else if (typeof content.image_url === 'object' &&
215
+ 'url' in content.image_url) {
216
+ source = content.image_url.url;
217
+ }
218
+ else {
219
+ throw new Error('Please provide image as base64 encoded data URL');
220
+ }
221
+ const [dm, data] = source.split(',');
222
+ if (!dm.startsWith('data:')) {
223
+ throw new Error('Please provide image as base64 encoded data URL');
224
+ }
225
+ const [mimeType, encoding] = dm.replace(/^data:/, '').split(';');
226
+ if (encoding !== 'base64') {
227
+ throw new Error('Please provide image as base64 encoded data URL');
228
+ }
229
+ return {
230
+ inlineData: {
231
+ data,
232
+ mimeType,
233
+ },
234
+ };
235
+ }
236
+ else if (content.type === 'media') {
237
+ return messageContentMedia(content);
238
+ }
239
+ else if (content.type === 'tool_use') {
240
+ return {
241
+ functionCall: {
242
+ name: content.name,
243
+ args: content.input,
244
+ },
245
+ };
246
+ }
247
+ else if (content.type?.includes('/') === true &&
248
+ // Ensure it's a single slash.
249
+ content.type.split('/').length === 2 &&
250
+ 'data' in content &&
251
+ typeof content.data === 'string') {
252
+ return {
253
+ inlineData: {
254
+ mimeType: content.type,
255
+ data: content.data,
256
+ },
257
+ };
258
+ }
259
+ else if ('functionCall' in content) {
260
+ // No action needed here — function calls will be added later from message.tool_calls
261
+ return undefined;
262
+ }
263
+ else {
264
+ if ('type' in content) {
265
+ throw new Error(`Unknown content type ${content.type}`);
266
+ }
267
+ else {
268
+ throw new Error(`Unknown content ${JSON.stringify(content)}`);
269
+ }
270
+ }
271
+ }
272
+ function convertMessageContentToParts(message, isMultimodalModel, previousMessages) {
273
+ if (isToolMessage(message)) {
274
+ const messageName = message.name ??
275
+ inferToolNameFromPreviousMessages(message, previousMessages);
276
+ if (messageName === undefined) {
277
+ throw new Error(`Google requires a tool name for each tool call response, and we could not infer a called tool name for ToolMessage "${message.id}" from your passed messages. Please populate a "name" field on that ToolMessage explicitly.`);
278
+ }
279
+ const result = Array.isArray(message.content)
280
+ ? message.content
281
+ .map((c) => _convertLangChainContentToPart(c, isMultimodalModel))
282
+ .filter((p) => p !== undefined)
283
+ : message.content;
284
+ if (message.status === 'error') {
285
+ return [
286
+ {
287
+ functionResponse: {
288
+ name: messageName,
289
+ // The API expects an object with an `error` field if the function call fails.
290
+ // `error` must be a valid object (not a string or array), so we wrap `message.content` here
291
+ response: { error: { details: result } },
292
+ },
293
+ },
294
+ ];
295
+ }
296
+ return [
297
+ {
298
+ functionResponse: {
299
+ name: messageName,
300
+ // again, can't have a string or array value for `response`, so we wrap it as an object here
301
+ response: { result },
302
+ },
303
+ },
304
+ ];
305
+ }
306
+ let functionCalls = [];
307
+ const messageParts = [];
308
+ if (typeof message.content === 'string' && message.content) {
309
+ messageParts.push({ text: message.content });
310
+ }
311
+ if (Array.isArray(message.content)) {
312
+ messageParts.push(...message.content
313
+ .map((c) => _convertLangChainContentToPart(c, isMultimodalModel))
314
+ .filter((p) => p !== undefined));
315
+ }
316
+ if (isAIMessage(message) && message.tool_calls?.length != null) {
317
+ functionCalls = message.tool_calls.map((tc) => {
318
+ return {
319
+ functionCall: {
320
+ name: tc.name,
321
+ args: tc.args,
322
+ },
323
+ };
324
+ });
325
+ }
326
+ return [...messageParts, ...functionCalls];
327
+ }
328
+ function convertBaseMessagesToContent(messages, isMultimodalModel, convertSystemMessageToHumanContent = false) {
329
+ return messages.reduce((acc, message, index) => {
330
+ if (!isBaseMessage(message)) {
331
+ throw new Error('Unsupported message input');
332
+ }
333
+ const author = getMessageAuthor(message);
334
+ if (author === 'system' && index !== 0) {
335
+ throw new Error('System message should be the first one');
336
+ }
337
+ const role = convertAuthorToRole(author);
338
+ const prevContent = acc.content[acc.content.length];
339
+ if (!acc.mergeWithPreviousContent &&
340
+ prevContent &&
341
+ prevContent.role === role) {
342
+ throw new Error('Google Generative AI requires alternate messages between authors');
343
+ }
344
+ const parts = convertMessageContentToParts(message, isMultimodalModel, messages.slice(0, index));
345
+ if (acc.mergeWithPreviousContent) {
346
+ const prevContent = acc.content[acc.content.length - 1];
347
+ if (!prevContent) {
348
+ throw new Error('There was a problem parsing your system message. Please try a prompt without one.');
349
+ }
350
+ prevContent.parts.push(...parts);
351
+ return {
352
+ mergeWithPreviousContent: false,
353
+ content: acc.content,
354
+ };
355
+ }
356
+ let actualRole = role;
357
+ if (actualRole === 'function' ||
358
+ (actualRole === 'system' && !convertSystemMessageToHumanContent)) {
359
+ // GenerativeAI API will throw an error if the role is not "user" or "model."
360
+ actualRole = 'user';
361
+ }
362
+ const content = {
363
+ role: actualRole,
364
+ parts,
365
+ };
366
+ return {
367
+ mergeWithPreviousContent: author === 'system' && !convertSystemMessageToHumanContent,
368
+ content: [...acc.content, content],
369
+ };
370
+ }, { content: [], mergeWithPreviousContent: false }).content;
371
+ }
372
+ function convertResponseContentToChatGenerationChunk(response, extra) {
373
+ if (!response.candidates || response.candidates.length === 0) {
374
+ return null;
375
+ }
376
+ const functionCalls = response.functionCalls();
377
+ const [candidate] = response.candidates;
378
+ const { content: candidateContent, ...generationInfo } = candidate;
379
+ let content;
380
+ // Checks if some parts do not have text. If false, it means that the content is a string.
381
+ const reasoningParts = [];
382
+ if (Array.isArray(candidateContent.parts) &&
383
+ candidateContent.parts.every((p) => 'text' in p)) {
384
+ // content = candidateContent.parts.map((p) => p.text).join('');
385
+ const textParts = [];
386
+ for (const part of candidateContent.parts) {
387
+ if ('thought' in part && part.thought === true) {
388
+ reasoningParts.push(part.text ?? '');
389
+ continue;
390
+ }
391
+ textParts.push(part.text ?? '');
392
+ }
393
+ content = textParts.join('');
394
+ }
395
+ else if (Array.isArray(candidateContent.parts)) {
396
+ content = candidateContent.parts.map((p) => {
397
+ if ('text' in p && 'thought' in p && p.thought === true) {
398
+ reasoningParts.push(p.text ?? '');
399
+ }
400
+ else if ('text' in p) {
401
+ return {
402
+ type: 'text',
403
+ text: p.text,
404
+ };
405
+ }
406
+ else if ('executableCode' in p) {
407
+ return {
408
+ type: 'executableCode',
409
+ executableCode: p.executableCode,
410
+ };
411
+ }
412
+ else if ('codeExecutionResult' in p) {
413
+ return {
414
+ type: 'codeExecutionResult',
415
+ codeExecutionResult: p.codeExecutionResult,
416
+ };
417
+ }
418
+ return p;
419
+ });
420
+ }
421
+ else {
422
+ // no content returned - likely due to abnormal stop reason, e.g. malformed function call
423
+ content = [];
424
+ }
425
+ let text = '';
426
+ if (typeof content === 'string' && content) {
427
+ text = content;
428
+ }
429
+ else if (Array.isArray(content)) {
430
+ const block = content.find((b) => 'text' in b);
431
+ text = block?.text ?? '';
432
+ }
433
+ const toolCallChunks = [];
434
+ if (functionCalls) {
435
+ toolCallChunks.push(...functionCalls.map((fc) => ({
436
+ ...fc,
437
+ args: JSON.stringify(fc.args),
438
+ index: extra.index,
439
+ type: 'tool_call_chunk',
440
+ id: 'id' in fc && typeof fc.id === 'string' ? fc.id : v4(),
441
+ })));
442
+ }
443
+ const additional_kwargs = {};
444
+ if (reasoningParts.length > 0) {
445
+ additional_kwargs.reasoning = reasoningParts.join('');
446
+ }
447
+ return new ChatGenerationChunk({
448
+ text,
449
+ message: new AIMessageChunk({
450
+ content: content || '',
451
+ name: !candidateContent ? undefined : candidateContent.role,
452
+ tool_call_chunks: toolCallChunks,
453
+ // Each chunk can have unique "generationInfo", and merging strategy is unclear,
454
+ // so leave blank for now.
455
+ additional_kwargs,
456
+ usage_metadata: extra.usageMetadata,
457
+ }),
458
+ generationInfo,
459
+ });
460
+ }
461
+
462
+ export { convertAuthorToRole, convertBaseMessagesToContent, convertMessageContentToParts, convertResponseContentToChatGenerationChunk, getMessageAuthor };
463
+ //# sourceMappingURL=common.mjs.map