@posthog/ai 6.1.0 → 6.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- {"version":3,"file":"index.cjs","sources":["../../src/utils.ts","../../src/gemini/index.ts"],"sourcesContent":["import { PostHog } from 'posthog-node'\nimport { Buffer } from 'buffer'\nimport OpenAIOrignal from 'openai'\nimport AnthropicOriginal from '@anthropic-ai/sdk'\nimport type { ChatCompletionTool } from 'openai/resources/chat/completions'\nimport type { Tool as GeminiTool } from '@google/genai'\nimport type { FormattedMessage, FormattedContent, TokenUsage } from './types'\n\ntype ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams\ntype MessageCreateParams = AnthropicOriginal.Messages.MessageCreateParams\ntype ResponseCreateParams = OpenAIOrignal.Responses.ResponseCreateParams\ntype AnthropicTool = AnthropicOriginal.Tool\n\n// limit large outputs by truncating to 200kb (approx 200k bytes)\nexport const MAX_OUTPUT_SIZE = 200000\nconst STRING_FORMAT = 'utf8'\n\nexport interface MonitoringParams {\n posthogDistinctId?: string\n posthogTraceId?: string\n posthogProperties?: Record<string, any>\n posthogPrivacyMode?: boolean\n posthogGroups?: Record<string, any>\n posthogModelOverride?: string\n posthogProviderOverride?: string\n posthogCostOverride?: CostOverride\n posthogCaptureImmediate?: boolean\n}\n\nexport interface CostOverride {\n inputCost: number\n outputCost: number\n}\n\nexport const getModelParams = (\n params: ((ChatCompletionCreateParamsBase | MessageCreateParams | ResponseCreateParams) & MonitoringParams) | null\n): Record<string, any> => {\n if (!params) {\n return {}\n }\n const modelParams: Record<string, any> = {}\n const paramKeys = [\n 'temperature',\n 'max_tokens',\n 'max_completion_tokens',\n 'top_p',\n 'frequency_penalty',\n 'presence_penalty',\n 'n',\n 'stop',\n 'stream',\n 'streaming',\n ] as const\n\n for (const key of paramKeys) {\n if (key in params && (params as any)[key] !== undefined) {\n modelParams[key] = (params as any)[key]\n }\n }\n return modelParams\n}\n\n/**\n * Helper to format responses (non-streaming) for consumption, mirroring Python's openai vs. anthropic approach.\n */\nexport const formatResponse = (response: any, provider: string): FormattedMessage[] => {\n if (!response) {\n return []\n }\n if (provider === 'anthropic') {\n return formatResponseAnthropic(response)\n } else if (provider === 'openai') {\n return formatResponseOpenAI(response)\n } else if (provider === 'gemini') {\n return formatResponseGemini(response)\n }\n return []\n}\n\nexport const formatResponseAnthropic = (response: any): FormattedMessage[] => {\n const output: FormattedMessage[] = []\n const content: FormattedContent = []\n\n for (const choice of response.content ?? []) {\n if (choice?.type === 'text' && choice?.text) {\n content.push({ type: 'text', text: choice.text })\n } else if (choice?.type === 'tool_use' && choice?.name && choice?.id) {\n content.push({\n type: 'function',\n id: choice.id,\n function: {\n name: choice.name,\n arguments: choice.input || {},\n },\n })\n }\n }\n\n if (content.length > 0) {\n output.push({\n role: 'assistant',\n content,\n })\n }\n\n return output\n}\n\nexport const formatResponseOpenAI = (response: any): FormattedMessage[] => {\n const output: FormattedMessage[] = []\n\n if (response.choices) {\n for (const choice of response.choices) {\n const content: FormattedContent = []\n let role = 'assistant'\n\n if (choice.message) {\n if (choice.message.role) {\n role = choice.message.role\n }\n\n if (choice.message.content) {\n content.push({ type: 'text', text: choice.message.content })\n }\n\n if (choice.message.tool_calls) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'function',\n id: toolCall.id,\n function: {\n name: toolCall.function.name,\n arguments: toolCall.function.arguments,\n },\n })\n }\n }\n }\n\n if (content.length > 0) {\n output.push({\n role,\n content,\n })\n }\n }\n }\n\n // Handle Responses API format\n if (response.output) {\n const content: FormattedContent = []\n let role = 'assistant'\n\n for (const item of response.output) {\n if (item.type === 'message') {\n role = item.role\n\n if (item.content && Array.isArray(item.content)) {\n for (const contentItem of item.content) {\n if (contentItem.type === 'output_text' && contentItem.text) {\n content.push({ type: 'text', text: contentItem.text })\n } else if (contentItem.text) {\n content.push({ type: 'text', text: contentItem.text })\n } else if (contentItem.type === 'input_image' && contentItem.image_url) {\n content.push({\n type: 'image',\n image: contentItem.image_url,\n })\n }\n }\n } else if (item.content) {\n content.push({ type: 'text', text: String(item.content) })\n }\n } else if (item.type === 'function_call') {\n content.push({\n type: 'function',\n id: item.call_id || item.id || '',\n function: {\n name: item.name,\n arguments: item.arguments || {},\n },\n })\n }\n }\n\n if (content.length > 0) {\n output.push({\n role,\n content,\n })\n }\n }\n\n return output\n}\n\nexport const formatResponseGemini = (response: any): FormattedMessage[] => {\n const output: FormattedMessage[] = []\n\n if (response.candidates && Array.isArray(response.candidates)) {\n for (const candidate of response.candidates) {\n if (candidate.content && candidate.content.parts) {\n const content: FormattedContent = []\n\n for (const part of candidate.content.parts) {\n if (part.text) {\n content.push({ type: 'text', text: part.text })\n } else if (part.functionCall) {\n content.push({\n type: 'function',\n function: {\n name: part.functionCall.name,\n arguments: part.functionCall.args,\n },\n })\n }\n }\n\n if (content.length > 0) {\n output.push({\n role: 'assistant',\n content,\n })\n }\n } else if (candidate.text) {\n output.push({\n role: 'assistant',\n content: [{ type: 'text', text: candidate.text }],\n })\n }\n }\n } else if (response.text) {\n output.push({\n role: 'assistant',\n content: [{ type: 'text', text: response.text }],\n })\n }\n\n return output\n}\n\nexport const mergeSystemPrompt = (params: MessageCreateParams & MonitoringParams, provider: string): any => {\n if (provider == 'anthropic') {\n const messages = params.messages || []\n if (!(params as any).system) {\n return messages\n }\n const systemMessage = (params as any).system\n return [{ role: 'system', content: systemMessage }, ...messages]\n }\n return params.messages\n}\n\nexport const withPrivacyMode = (client: PostHog, privacyMode: boolean, input: any): any => {\n return (client as any).privacy_mode || privacyMode ? null : input\n}\n\nexport const truncate = (str: string): string => {\n try {\n const buffer = Buffer.from(str, STRING_FORMAT)\n if (buffer.length <= MAX_OUTPUT_SIZE) {\n return str\n }\n const truncatedBuffer = buffer.slice(0, MAX_OUTPUT_SIZE)\n return `${truncatedBuffer.toString(STRING_FORMAT)}... [truncated]`\n } catch (error) {\n console.error('Error truncating, likely not a string')\n return str\n }\n}\n\n/**\n * Extract available tool calls from the request parameters.\n * These are the tools provided to the LLM, not the tool calls in the response.\n */\nexport const extractAvailableToolCalls = (\n provider: string,\n params: any\n): ChatCompletionTool[] | AnthropicTool[] | GeminiTool[] | null => {\n if (provider === 'anthropic') {\n if (params.tools) {\n return params.tools\n }\n\n return null\n } else if (provider === 'gemini') {\n if (params.config && params.config.tools) {\n return params.config.tools\n }\n\n return null\n } else if (provider === 'openai') {\n if (params.tools) {\n return params.tools\n }\n\n return null\n } else if (provider === 'vercel') {\n // Vercel AI SDK stores tools in params.mode.tools when mode type is 'regular'\n if (params.mode?.type === 'regular' && params.mode.tools) {\n return params.mode.tools\n }\n\n return null\n }\n\n return null\n}\n\nexport type SendEventToPosthogParams = {\n client: PostHog\n distinctId?: string\n traceId: string\n model: string\n provider: string\n input: any\n output: any\n latency: number\n baseURL: string\n httpStatus: number\n usage?: TokenUsage\n params: (ChatCompletionCreateParamsBase | MessageCreateParams | ResponseCreateParams) & MonitoringParams\n isError?: boolean\n error?: string\n tools?: ChatCompletionTool[] | AnthropicTool[] | GeminiTool[] | null\n captureImmediate?: boolean\n}\n\nfunction sanitizeValues(obj: any): any {\n if (obj === undefined || obj === null) {\n return obj\n }\n const jsonSafe = JSON.parse(JSON.stringify(obj))\n if (typeof jsonSafe === 'string') {\n return Buffer.from(jsonSafe, STRING_FORMAT).toString(STRING_FORMAT)\n } else if (Array.isArray(jsonSafe)) {\n return jsonSafe.map(sanitizeValues)\n } else if (jsonSafe && typeof jsonSafe === 'object') {\n return Object.fromEntries(Object.entries(jsonSafe).map(([k, v]) => [k, sanitizeValues(v)]))\n }\n return jsonSafe\n}\n\nexport const sendEventToPosthog = async ({\n client,\n distinctId,\n traceId,\n model,\n provider,\n input,\n output,\n latency,\n baseURL,\n params,\n httpStatus = 200,\n usage = {},\n isError = false,\n error,\n tools,\n captureImmediate = false,\n}: SendEventToPosthogParams): Promise<void> => {\n if (!client.capture) {\n return Promise.resolve()\n }\n // sanitize input and output for UTF-8 validity\n const safeInput = sanitizeValues(input)\n const safeOutput = sanitizeValues(output)\n const safeError = sanitizeValues(error)\n\n let errorData = {}\n if (isError) {\n errorData = {\n $ai_is_error: true,\n $ai_error: safeError,\n }\n }\n let costOverrideData = {}\n if (params.posthogCostOverride) {\n const inputCostUSD = (params.posthogCostOverride.inputCost ?? 0) * (usage.inputTokens ?? 0)\n const outputCostUSD = (params.posthogCostOverride.outputCost ?? 0) * (usage.outputTokens ?? 0)\n costOverrideData = {\n $ai_input_cost_usd: inputCostUSD,\n $ai_output_cost_usd: outputCostUSD,\n $ai_total_cost_usd: inputCostUSD + outputCostUSD,\n }\n }\n\n const additionalTokenValues = {\n ...(usage.reasoningTokens ? { $ai_reasoning_tokens: usage.reasoningTokens } : {}),\n ...(usage.cacheReadInputTokens ? { $ai_cache_read_input_tokens: usage.cacheReadInputTokens } : {}),\n ...(usage.cacheCreationInputTokens ? { $ai_cache_creation_input_tokens: usage.cacheCreationInputTokens } : {}),\n }\n\n const properties = {\n $ai_provider: params.posthogProviderOverride ?? provider,\n $ai_model: params.posthogModelOverride ?? model,\n $ai_model_parameters: getModelParams(params),\n $ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeInput),\n $ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeOutput),\n $ai_http_status: httpStatus,\n $ai_input_tokens: usage.inputTokens ?? 0,\n $ai_output_tokens: usage.outputTokens ?? 0,\n ...additionalTokenValues,\n $ai_latency: latency,\n $ai_trace_id: traceId,\n $ai_base_url: baseURL,\n ...params.posthogProperties,\n ...(distinctId ? {} : { $process_person_profile: false }),\n ...(tools ? { $ai_tools: tools } : {}),\n ...errorData,\n ...costOverrideData,\n }\n\n const event = {\n distinctId: distinctId ?? traceId,\n event: '$ai_generation',\n properties,\n groups: params.posthogGroups,\n }\n\n if (captureImmediate) {\n // await capture promise to send single event in serverless environments\n await client.captureImmediate(event)\n } else {\n client.capture(event)\n }\n}\n","import { GoogleGenAI } from '@google/genai'\nimport { PostHog } from 'posthog-node'\nimport { v4 as uuidv4 } from 'uuid'\nimport { MonitoringParams, sendEventToPosthog, extractAvailableToolCalls, formatResponseGemini } from '../utils'\nimport type { TokenUsage } from '../types'\n\n// Types from @google/genai\ntype GenerateContentRequest = {\n model: string\n contents: any\n config?: any\n [key: string]: any\n}\n\ntype GenerateContentResponse = {\n text?: string\n candidates?: any[]\n usageMetadata?: {\n promptTokenCount?: number\n candidatesTokenCount?: number\n totalTokenCount?: number\n thoughtsTokenCount?: number\n cachedContentTokenCount?: number\n }\n [key: string]: any\n}\n\ninterface MonitoringGeminiConfig {\n apiKey?: string\n vertexai?: boolean\n project?: string\n location?: string\n apiVersion?: string\n posthog: PostHog\n}\n\nexport class PostHogGoogleGenAI {\n private readonly phClient: PostHog\n private readonly client: GoogleGenAI\n public models: WrappedModels\n\n constructor(config: MonitoringGeminiConfig) {\n const { posthog, ...geminiConfig } = config\n this.phClient = posthog\n this.client = new GoogleGenAI(geminiConfig)\n this.models = new WrappedModels(this.client, this.phClient)\n }\n}\n\nexport class WrappedModels {\n private readonly phClient: PostHog\n private readonly client: GoogleGenAI\n\n constructor(client: GoogleGenAI, phClient: PostHog) {\n this.client = client\n this.phClient = phClient\n }\n\n public async generateContent(params: GenerateContentRequest & MonitoringParams): Promise<GenerateContentResponse> {\n const {\n posthogDistinctId,\n posthogTraceId,\n posthogProperties,\n posthogGroups,\n posthogCaptureImmediate,\n ...geminiParams\n } = params\n\n const traceId = posthogTraceId ?? uuidv4()\n const startTime = Date.now()\n\n try {\n const response = await this.client.models.generateContent(geminiParams)\n const latency = (Date.now() - startTime) / 1000\n\n const availableTools = extractAvailableToolCalls('gemini', geminiParams)\n\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: geminiParams.model,\n provider: 'gemini',\n input: this.formatInput(geminiParams.contents),\n output: formatResponseGemini(response),\n latency,\n baseURL: 'https://generativelanguage.googleapis.com',\n params: params as any,\n httpStatus: 200,\n usage: {\n inputTokens: response.usageMetadata?.promptTokenCount ?? 0,\n outputTokens: response.usageMetadata?.candidatesTokenCount ?? 0,\n reasoningTokens: response.usageMetadata?.thoughtsTokenCount ?? 0,\n cacheReadInputTokens: response.usageMetadata?.cachedContentTokenCount ?? 0,\n },\n tools: availableTools,\n captureImmediate: posthogCaptureImmediate,\n })\n\n return response\n } catch (error: any) {\n const latency = (Date.now() - startTime) / 1000\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: geminiParams.model,\n provider: 'gemini',\n input: this.formatInput(geminiParams.contents),\n output: [],\n latency,\n baseURL: 'https://generativelanguage.googleapis.com',\n params: params as any,\n httpStatus: error?.status ?? 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n isError: true,\n error: JSON.stringify(error),\n captureImmediate: posthogCaptureImmediate,\n })\n throw error\n }\n }\n\n public async *generateContentStream(\n params: GenerateContentRequest & MonitoringParams\n ): AsyncGenerator<any, void, unknown> {\n const {\n posthogDistinctId,\n posthogTraceId,\n posthogProperties,\n posthogGroups,\n posthogCaptureImmediate,\n ...geminiParams\n } = params\n\n const traceId = posthogTraceId ?? uuidv4()\n const startTime = Date.now()\n let accumulatedContent = ''\n let usage: TokenUsage = {\n inputTokens: 0,\n outputTokens: 0,\n }\n\n try {\n const stream = await this.client.models.generateContentStream(geminiParams)\n\n for await (const chunk of stream) {\n if (chunk.text) {\n accumulatedContent += chunk.text\n }\n if (chunk.usageMetadata) {\n usage = {\n inputTokens: chunk.usageMetadata.promptTokenCount ?? 0,\n outputTokens: chunk.usageMetadata.candidatesTokenCount ?? 0,\n reasoningTokens: chunk.usageMetadata.thoughtsTokenCount ?? 0,\n cacheReadInputTokens: chunk.usageMetadata.cachedContentTokenCount ?? 0,\n }\n }\n yield chunk\n }\n\n const latency = (Date.now() - startTime) / 1000\n\n const availableTools = extractAvailableToolCalls('gemini', geminiParams)\n\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: geminiParams.model,\n provider: 'gemini',\n input: this.formatInput(geminiParams.contents),\n output: [{ content: accumulatedContent, role: 'assistant' }],\n latency,\n baseURL: 'https://generativelanguage.googleapis.com',\n params: params as any,\n httpStatus: 200,\n usage,\n tools: availableTools,\n captureImmediate: posthogCaptureImmediate,\n })\n } catch (error: any) {\n const latency = (Date.now() - startTime) / 1000\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: geminiParams.model,\n provider: 'gemini',\n input: this.formatInput(geminiParams.contents),\n output: [],\n latency,\n baseURL: 'https://generativelanguage.googleapis.com',\n params: params as any,\n httpStatus: error?.status ?? 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n isError: true,\n error: JSON.stringify(error),\n captureImmediate: posthogCaptureImmediate,\n })\n throw error\n }\n }\n\n private formatInput(contents: any): Array<{ role: string; content: string }> {\n if (typeof contents === 'string') {\n return [{ role: 'user', content: contents }]\n }\n\n if (Array.isArray(contents)) {\n return contents.map((item) => {\n if (typeof item === 'string') {\n return { role: 'user', content: item }\n }\n if (item && typeof item === 'object') {\n if (item.text) {\n return { role: item.role || 'user', content: item.text }\n }\n if (item.content) {\n return { role: item.role || 'user', content: item.content }\n }\n }\n return { role: 'user', content: String(item) }\n })\n }\n\n if (contents && typeof contents === 'object') {\n if (contents.text) {\n return [{ role: 'user', content: contents.text }]\n }\n if (contents.content) {\n return [{ role: 'user', content: contents.content }]\n }\n }\n\n return [{ role: 'user', content: String(contents) }]\n }\n}\n\nexport default PostHogGoogleGenAI\nexport { PostHogGoogleGenAI as Gemini }\n"],"names":["STRING_FORMAT","getModelParams","params","modelParams","paramKeys","key","undefined","formatResponseGemini","response","output","candidates","Array","isArray","candidate","content","parts","part","text","push","type","functionCall","function","name","arguments","args","length","role","withPrivacyMode","client","privacyMode","input","privacy_mode","extractAvailableToolCalls","provider","config","tools","sanitizeValues","obj","jsonSafe","JSON","parse","stringify","Buffer","from","toString","map","Object","fromEntries","entries","k","v","sendEventToPosthog","distinctId","traceId","model","latency","baseURL","httpStatus","usage","isError","error","captureImmediate","capture","Promise","resolve","safeInput","safeOutput","safeError","errorData","$ai_is_error","$ai_error","costOverrideData","posthogCostOverride","inputCostUSD","inputCost","inputTokens","outputCostUSD","outputCost","outputTokens","$ai_input_cost_usd","$ai_output_cost_usd","$ai_total_cost_usd","additionalTokenValues","reasoningTokens","$ai_reasoning_tokens","cacheReadInputTokens","$ai_cache_read_input_tokens","cacheCreationInputTokens","$ai_cache_creation_input_tokens","properties","$ai_provider","posthogProviderOverride","$ai_model","posthogModelOverride","$ai_model_parameters","$ai_input","posthogPrivacyMode","$ai_output_choices","$ai_http_status","$ai_input_tokens","$ai_output_tokens","$ai_latency","$ai_trace_id","$ai_base_url","posthogProperties","$process_person_profile","$ai_tools","event","groups","posthogGroups","PostHogGoogleGenAI","constructor","posthog","geminiConfig","phClient","GoogleGenAI","models","WrappedModels","generateContent","posthogDistinctId","posthogTraceId","posthogCaptureImmediate","geminiParams","uuidv4","startTime","Date","now","availableTools","formatInput","contents","usageMetadata","promptTokenCount","candidatesTokenCount","thoughtsTokenCount","cachedContentTokenCount","status","generateContentStream","accumulatedContent","stream","chunk","item","String"],"mappings":";;;;;;;;AAeA,MAAMA,aAAa,GAAG,MAAM;AAmBrB,MAAMC,cAAc,GACzBC,MAAiH,IACzF;EACxB,IAAI,CAACA,MAAM,EAAE;AACX,IAAA,OAAO,EAAE;AACX,EAAA;EACA,MAAMC,WAAgC,GAAG,EAAE;EAC3C,MAAMC,SAAS,GAAG,CAChB,aAAa,EACb,YAAY,EACZ,uBAAuB,EACvB,OAAO,EACP,mBAAmB,EACnB,kBAAkB,EAClB,GAAG,EACH,MAAM,EACN,QAAQ,EACR,WAAW,CACH;AAEV,EAAA,KAAK,MAAMC,GAAG,IAAID,SAAS,EAAE;IAC3B,IAAIC,GAAG,IAAIH,MAAM,IAAKA,MAAM,CAASG,GAAG,CAAC,KAAKC,SAAS,EAAE;AACvDH,MAAAA,WAAW,CAACE,GAAG,CAAC,GAAIH,MAAM,CAASG,GAAG,CAAC;AACzC,IAAA;AACF,EAAA;AACA,EAAA,OAAOF,WAAW;AACpB,CAAC;AAwIM,MAAMI,oBAAoB,GAAIC,QAAa,IAAyB;EACzE,MAAMC,MAA0B,GAAG,EAAE;AAErC,EAAA,IAAID,QAAQ,CAACE,UAAU,IAAIC,KAAK,CAACC,OAAO,CAACJ,QAAQ,CAACE,UAAU,CAAC,EAAE;AAC7D,IAAA,KAAK,MAAMG,SAAS,IAAIL,QAAQ,CAACE,UAAU,EAAE;MAC3C,IAAIG,SAAS,CAACC,OAAO,IAAID,SAAS,CAACC,OAAO,CAACC,KAAK,EAAE;QAChD,MAAMD,OAAyB,GAAG,EAAE;QAEpC,KAAK,MAAME,IAAI,IAAIH,SAAS,CAACC,OAAO,CAACC,KAAK,EAAE;UAC1C,IAAIC,IAAI,CAACC,IAAI,EAAE;YACbH,OAAO,CAACI,IAAI,CAAC;AAAEC,cAAAA,IAAI,EAAE,MAAM;cAAEF,IAAI,EAAED,IAAI,CAACC;AAAK,aAAC,CAAC;AACjD,UAAA,CAAC,MAAM,IAAID,IAAI,CAACI,YAAY,EAAE;YAC5BN,OAAO,CAACI,IAAI,CAAC;AACXC,cAAAA,IAAI,EAAE,UAAU;AAChBE,cAAAA,QAAQ,EAAE;AACRC,gBAAAA,IAAI,EAAEN,IAAI,CAACI,YAAY,CAACE,IAAI;AAC5BC,gBAAAA,SAAS,EAAEP,IAAI,CAACI,YAAY,CAACI;AAC/B;AACF,aAAC,CAAC;AACJ,UAAA;AACF,QAAA;AAEA,QAAA,IAAIV,OAAO,CAACW,MAAM,GAAG,CAAC,EAAE;UACtBhB,MAAM,CAACS,IAAI,CAAC;AACVQ,YAAAA,IAAI,EAAE,WAAW;AACjBZ,YAAAA;AACF,WAAC,CAAC;AACJ,QAAA;AACF,MAAA,CAAC,MAAM,IAAID,SAAS,CAACI,IAAI,EAAE;QACzBR,MAAM,CAACS,IAAI,CAAC;AACVQ,UAAAA,IAAI,EAAE,WAAW;AACjBZ,UAAAA,OAAO,EAAE,CAAC;AAAEK,YAAAA,IAAI,EAAE,MAAM;YAAEF,IAAI,EAAEJ,SAAS,CAACI;WAAM;AAClD,SAAC,CAAC;AACJ,MAAA;AACF,IAAA;AACF,EAAA,CAAC,MAAM,IAAIT,QAAQ,CAACS,IAAI,EAAE;IACxBR,MAAM,CAACS,IAAI,CAAC;AACVQ,MAAAA,IAAI,EAAE,WAAW;AACjBZ,MAAAA,OAAO,EAAE,CAAC;AAAEK,QAAAA,IAAI,EAAE,MAAM;QAAEF,IAAI,EAAET,QAAQ,CAACS;OAAM;AACjD,KAAC,CAAC;AACJ,EAAA;AAEA,EAAA,OAAOR,MAAM;AACf,CAAC;AAcM,MAAMkB,eAAe,GAAGA,CAACC,MAAe,EAAEC,WAAoB,EAAEC,KAAU,KAAU;EACzF,OAAQF,MAAM,CAASG,YAAY,IAAIF,WAAW,GAAG,IAAI,GAAGC,KAAK;AACnE,CAAC;;AAgBD;AACA;AACA;AACA;AACO,MAAME,yBAAyB,GAAGA,CACvCC,QAAgB,EAChB/B,MAAW,KACsD;EAO/B;IAChC,IAAIA,MAAM,CAACgC,MAAM,IAAIhC,MAAM,CAACgC,MAAM,CAACC,KAAK,EAAE;AACxC,MAAA,OAAOjC,MAAM,CAACgC,MAAM,CAACC,KAAK;AAC5B,IAAA;AAEA,IAAA,OAAO,IAAI;AACb,EAAA;AAgBF,CAAC;AAqBD,SAASC,cAAcA,CAACC,GAAQ,EAAO;AACrC,EAAA,IAAIA,GAAG,KAAK/B,SAAS,IAAI+B,GAAG,KAAK,IAAI,EAAE;AACrC,IAAA,OAAOA,GAAG;AACZ,EAAA;AACA,EAAA,MAAMC,QAAQ,GAAGC,IAAI,CAACC,KAAK,CAACD,IAAI,CAACE,SAAS,CAACJ,GAAG,CAAC,CAAC;AAChD,EAAA,IAAI,OAAOC,QAAQ,KAAK,QAAQ,EAAE;AAChC,IAAA,OAAOI,aAAM,CAACC,IAAI,CAACL,QAAQ,EAAEtC,aAAa,CAAC,CAAC4C,QAAQ,CAAC5C,aAAa,CAAC;EACrE,CAAC,MAAM,IAAIW,KAAK,CAACC,OAAO,CAAC0B,QAAQ,CAAC,EAAE;AAClC,IAAA,OAAOA,QAAQ,CAACO,GAAG,CAACT,cAAc,CAAC;EACrC,CAAC,MAAM,IAAIE,QAAQ,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;AACnD,IAAA,OAAOQ,MAAM,CAACC,WAAW,CAACD,MAAM,CAACE,OAAO,CAACV,QAAQ,CAAC,CAACO,GAAG,CAAC,CAAC,CAACI,CAAC,EAAEC,CAAC,CAAC,KAAK,CAACD,CAAC,EAAEb,cAAc,CAACc,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7F,EAAA;AACA,EAAA,OAAOZ,QAAQ;AACjB;AAEO,MAAMa,kBAAkB,GAAG,OAAO;EACvCvB,MAAM;EACNwB,UAAU;EACVC,OAAO;EACPC,KAAK;EACLrB,QAAQ;EACRH,KAAK;EACLrB,MAAM;EACN8C,OAAO;EACPC,OAAO;EACPtD,MAAM;AACNuD,EAAAA,UAAU,GAAG,GAAG;EAChBC,KAAK,GAAG,EAAE;AACVC,EAAAA,OAAO,GAAG,KAAK;EACfC,KAAK;EACLzB,KAAK;AACL0B,EAAAA,gBAAgB,GAAG;AACK,CAAC,KAAoB;AAC7C,EAAA,IAAI,CAACjC,MAAM,CAACkC,OAAO,EAAE;AACnB,IAAA,OAAOC,OAAO,CAACC,OAAO,EAAE;AAC1B,EAAA;AACA;AACA,EAAA,MAAMC,SAAS,GAAG7B,cAAc,CAACN,KAAK,CAAC;AACvC,EAAA,MAAMoC,UAAU,GAAG9B,cAAc,CAAC3B,MAAM,CAAC;AACzC,EAAA,MAAM0D,SAAS,GAAG/B,cAAc,CAACwB,KAAK,CAAC;EAEvC,IAAIQ,SAAS,GAAG,EAAE;AAClB,EAAA,IAAIT,OAAO,EAAE;AACXS,IAAAA,SAAS,GAAG;AACVC,MAAAA,YAAY,EAAE,IAAI;AAClBC,MAAAA,SAAS,EAAEH;KACZ;AACH,EAAA;EACA,IAAII,gBAAgB,GAAG,EAAE;EACzB,IAAIrE,MAAM,CAACsE,mBAAmB,EAAE;AAC9B,IAAA,MAAMC,YAAY,GAAG,CAACvE,MAAM,CAACsE,mBAAmB,CAACE,SAAS,IAAI,CAAC,KAAKhB,KAAK,CAACiB,WAAW,IAAI,CAAC,CAAC;AAC3F,IAAA,MAAMC,aAAa,GAAG,CAAC1E,MAAM,CAACsE,mBAAmB,CAACK,UAAU,IAAI,CAAC,KAAKnB,KAAK,CAACoB,YAAY,IAAI,CAAC,CAAC;AAC9FP,IAAAA,gBAAgB,GAAG;AACjBQ,MAAAA,kBAAkB,EAAEN,YAAY;AAChCO,MAAAA,mBAAmB,EAAEJ,aAAa;MAClCK,kBAAkB,EAAER,YAAY,GAAGG;KACpC;AACH,EAAA;AAEA,EAAA,MAAMM,qBAAqB,GAAG;IAC5B,IAAIxB,KAAK,CAACyB,eAAe,GAAG;MAAEC,oBAAoB,EAAE1B,KAAK,CAACyB;KAAiB,GAAG,EAAE,CAAC;IACjF,IAAIzB,KAAK,CAAC2B,oBAAoB,GAAG;MAAEC,2BAA2B,EAAE5B,KAAK,CAAC2B;KAAsB,GAAG,EAAE,CAAC;IAClG,IAAI3B,KAAK,CAAC6B,wBAAwB,GAAG;MAAEC,+BAA+B,EAAE9B,KAAK,CAAC6B;KAA0B,GAAG,EAAE;GAC9G;AAED,EAAA,MAAME,UAAU,GAAG;AACjBC,IAAAA,YAAY,EAAExF,MAAM,CAACyF,uBAAuB,IAAI1D,QAAQ;AACxD2D,IAAAA,SAAS,EAAE1F,MAAM,CAAC2F,oBAAoB,IAAIvC,KAAK;AAC/CwC,IAAAA,oBAAoB,EAAE7F,cAAc,CAACC,MAAM,CAAC;AAC5C6F,IAAAA,SAAS,EAAEpE,eAAe,CAACC,MAAM,EAAE1B,MAAM,CAAC8F,kBAAkB,IAAI,KAAK,EAAE/B,SAAS,CAAC;AACjFgC,IAAAA,kBAAkB,EAAEtE,eAAe,CAACC,MAAM,EAAE1B,MAAM,CAAC8F,kBAAkB,IAAI,KAAK,EAAE9B,UAAU,CAAC;AAC3FgC,IAAAA,eAAe,EAAEzC,UAAU;AAC3B0C,IAAAA,gBAAgB,EAAEzC,KAAK,CAACiB,WAAW,IAAI,CAAC;AACxCyB,IAAAA,iBAAiB,EAAE1C,KAAK,CAACoB,YAAY,IAAI,CAAC;AAC1C,IAAA,GAAGI,qBAAqB;AACxBmB,IAAAA,WAAW,EAAE9C,OAAO;AACpB+C,IAAAA,YAAY,EAAEjD,OAAO;AACrBkD,IAAAA,YAAY,EAAE/C,OAAO;IACrB,GAAGtD,MAAM,CAACsG,iBAAiB;AAC3B,IAAA,IAAIpD,UAAU,GAAG,EAAE,GAAG;AAAEqD,MAAAA,uBAAuB,EAAE;AAAM,KAAC,CAAC;AACzD,IAAA,IAAItE,KAAK,GAAG;AAAEuE,MAAAA,SAAS,EAAEvE;KAAO,GAAG,EAAE,CAAC;AACtC,IAAA,GAAGiC,SAAS;IACZ,GAAGG;GACJ;AAED,EAAA,MAAMoC,KAAK,GAAG;IACZvD,UAAU,EAAEA,UAAU,IAAIC,OAAO;AACjCsD,IAAAA,KAAK,EAAE,gBAAgB;IACvBlB,UAAU;IACVmB,MAAM,EAAE1G,MAAM,CAAC2G;GAChB;AAED,EAAA,IAAIhD,gBAAgB,EAAE;AACpB;AACA,IAAA,MAAMjC,MAAM,CAACiC,gBAAgB,CAAC8C,KAAK,CAAC;AACtC,EAAA,CAAC,MAAM;AACL/E,IAAAA,MAAM,CAACkC,OAAO,CAAC6C,KAAK,CAAC;AACvB,EAAA;AACF,CAAC;;ACpaD;;AA8BO,MAAMG,kBAAkB,CAAC;EAK9BC,WAAWA,CAAC7E,MAA8B,EAAE;IAC1C,MAAM;MAAE8E,OAAO;MAAE,GAAGC;AAAa,KAAC,GAAG/E,MAAM;IAC3C,IAAI,CAACgF,QAAQ,GAAGF,OAAO;AACvB,IAAA,IAAI,CAACpF,MAAM,GAAG,IAAIuF,iBAAW,CAACF,YAAY,CAAC;AAC3C,IAAA,IAAI,CAACG,MAAM,GAAG,IAAIC,aAAa,CAAC,IAAI,CAACzF,MAAM,EAAE,IAAI,CAACsF,QAAQ,CAAC;AAC7D,EAAA;AACF;AAEO,MAAMG,aAAa,CAAC;AAIzBN,EAAAA,WAAWA,CAACnF,MAAmB,EAAEsF,QAAiB,EAAE;IAClD,IAAI,CAACtF,MAAM,GAAGA,MAAM;IACpB,IAAI,CAACsF,QAAQ,GAAGA,QAAQ;AAC1B,EAAA;EAEA,MAAaI,eAAeA,CAACpH,MAAiD,EAAoC;IAChH,MAAM;MACJqH,iBAAiB;MACjBC,cAAc;MACdhB,iBAAiB;MACjBK,aAAa;MACbY,uBAAuB;MACvB,GAAGC;AACL,KAAC,GAAGxH,MAAM;AAEV,IAAA,MAAMmD,OAAO,GAAGmE,cAAc,IAAIG,OAAM,EAAE;AAC1C,IAAA,MAAMC,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE;IAE5B,IAAI;AACF,MAAA,MAAMtH,QAAQ,GAAG,MAAM,IAAI,CAACoB,MAAM,CAACwF,MAAM,CAACE,eAAe,CAACI,YAAY,CAAC;MACvE,MAAMnE,OAAO,GAAG,CAACsE,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI;AAE/C,MAAA,MAAMG,cAAc,GAAG/F,yBAAyB,CAAC,QAAQ,EAAE0F,YAAY,CAAC;AAExE,MAAA,MAAMvE,kBAAkB,CAAC;QACvBvB,MAAM,EAAE,IAAI,CAACsF,QAAQ;AACrB9D,QAAAA,UAAU,EAAEmE,iBAAiB;QAC7BlE,OAAO;QACPC,KAAK,EAAEoE,YAAY,CAACpE,KAAK;AACzBrB,QAAAA,QAAQ,EAAE,QAAQ;QAClBH,KAAK,EAAE,IAAI,CAACkG,WAAW,CAACN,YAAY,CAACO,QAAQ,CAAC;AAC9CxH,QAAAA,MAAM,EAAEF,oBAAoB,CAACC,QAAQ,CAAC;QACtC+C,OAAO;AACPC,QAAAA,OAAO,EAAE,2CAA2C;AACpDtD,QAAAA,MAAM,EAAEA,MAAa;AACrBuD,QAAAA,UAAU,EAAE,GAAG;AACfC,QAAAA,KAAK,EAAE;AACLiB,UAAAA,WAAW,EAAEnE,QAAQ,CAAC0H,aAAa,EAAEC,gBAAgB,IAAI,CAAC;AAC1DrD,UAAAA,YAAY,EAAEtE,QAAQ,CAAC0H,aAAa,EAAEE,oBAAoB,IAAI,CAAC;AAC/DjD,UAAAA,eAAe,EAAE3E,QAAQ,CAAC0H,aAAa,EAAEG,kBAAkB,IAAI,CAAC;AAChEhD,UAAAA,oBAAoB,EAAE7E,QAAQ,CAAC0H,aAAa,EAAEI,uBAAuB,IAAI;SAC1E;AACDnG,QAAAA,KAAK,EAAE4F,cAAc;AACrBlE,QAAAA,gBAAgB,EAAE4D;AACpB,OAAC,CAAC;AAEF,MAAA,OAAOjH,QAAQ;IACjB,CAAC,CAAC,OAAOoD,KAAU,EAAE;MACnB,MAAML,OAAO,GAAG,CAACsE,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI;AAC/C,MAAA,MAAMzE,kBAAkB,CAAC;QACvBvB,MAAM,EAAE,IAAI,CAACsF,QAAQ;AACrB9D,QAAAA,UAAU,EAAEmE,iBAAiB;QAC7BlE,OAAO;QACPC,KAAK,EAAEoE,YAAY,CAACpE,KAAK;AACzBrB,QAAAA,QAAQ,EAAE,QAAQ;QAClBH,KAAK,EAAE,IAAI,CAACkG,WAAW,CAACN,YAAY,CAACO,QAAQ,CAAC;AAC9CxH,QAAAA,MAAM,EAAE,EAAE;QACV8C,OAAO;AACPC,QAAAA,OAAO,EAAE,2CAA2C;AACpDtD,QAAAA,MAAM,EAAEA,MAAa;AACrBuD,QAAAA,UAAU,EAAEG,KAAK,EAAE2E,MAAM,IAAI,GAAG;AAChC7E,QAAAA,KAAK,EAAE;AACLiB,UAAAA,WAAW,EAAE,CAAC;AACdG,UAAAA,YAAY,EAAE;SACf;AACDnB,QAAAA,OAAO,EAAE,IAAI;AACbC,QAAAA,KAAK,EAAErB,IAAI,CAACE,SAAS,CAACmB,KAAK,CAAC;AAC5BC,QAAAA,gBAAgB,EAAE4D;AACpB,OAAC,CAAC;AACF,MAAA,MAAM7D,KAAK;AACb,IAAA;AACF,EAAA;EAEA,OAAc4E,qBAAqBA,CACjCtI,MAAiD,EACb;IACpC,MAAM;MACJqH,iBAAiB;MACjBC,cAAc;MACdhB,iBAAiB;MACjBK,aAAa;MACbY,uBAAuB;MACvB,GAAGC;AACL,KAAC,GAAGxH,MAAM;AAEV,IAAA,MAAMmD,OAAO,GAAGmE,cAAc,IAAIG,OAAM,EAAE;AAC1C,IAAA,MAAMC,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE;IAC5B,IAAIW,kBAAkB,GAAG,EAAE;AAC3B,IAAA,IAAI/E,KAAiB,GAAG;AACtBiB,MAAAA,WAAW,EAAE,CAAC;AACdG,MAAAA,YAAY,EAAE;KACf;IAED,IAAI;AACF,MAAA,MAAM4D,MAAM,GAAG,MAAM,IAAI,CAAC9G,MAAM,CAACwF,MAAM,CAACoB,qBAAqB,CAACd,YAAY,CAAC;AAE3E,MAAA,WAAW,MAAMiB,KAAK,IAAID,MAAM,EAAE;QAChC,IAAIC,KAAK,CAAC1H,IAAI,EAAE;UACdwH,kBAAkB,IAAIE,KAAK,CAAC1H,IAAI;AAClC,QAAA;QACA,IAAI0H,KAAK,CAACT,aAAa,EAAE;AACvBxE,UAAAA,KAAK,GAAG;AACNiB,YAAAA,WAAW,EAAEgE,KAAK,CAACT,aAAa,CAACC,gBAAgB,IAAI,CAAC;AACtDrD,YAAAA,YAAY,EAAE6D,KAAK,CAACT,aAAa,CAACE,oBAAoB,IAAI,CAAC;AAC3DjD,YAAAA,eAAe,EAAEwD,KAAK,CAACT,aAAa,CAACG,kBAAkB,IAAI,CAAC;AAC5DhD,YAAAA,oBAAoB,EAAEsD,KAAK,CAACT,aAAa,CAACI,uBAAuB,IAAI;WACtE;AACH,QAAA;AACA,QAAA,MAAMK,KAAK;AACb,MAAA;MAEA,MAAMpF,OAAO,GAAG,CAACsE,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI;AAE/C,MAAA,MAAMG,cAAc,GAAG/F,yBAAyB,CAAC,QAAQ,EAAE0F,YAAY,CAAC;AAExE,MAAA,MAAMvE,kBAAkB,CAAC;QACvBvB,MAAM,EAAE,IAAI,CAACsF,QAAQ;AACrB9D,QAAAA,UAAU,EAAEmE,iBAAiB;QAC7BlE,OAAO;QACPC,KAAK,EAAEoE,YAAY,CAACpE,KAAK;AACzBrB,QAAAA,QAAQ,EAAE,QAAQ;QAClBH,KAAK,EAAE,IAAI,CAACkG,WAAW,CAACN,YAAY,CAACO,QAAQ,CAAC;AAC9CxH,QAAAA,MAAM,EAAE,CAAC;AAAEK,UAAAA,OAAO,EAAE2H,kBAAkB;AAAE/G,UAAAA,IAAI,EAAE;AAAY,SAAC,CAAC;QAC5D6B,OAAO;AACPC,QAAAA,OAAO,EAAE,2CAA2C;AACpDtD,QAAAA,MAAM,EAAEA,MAAa;AACrBuD,QAAAA,UAAU,EAAE,GAAG;QACfC,KAAK;AACLvB,QAAAA,KAAK,EAAE4F,cAAc;AACrBlE,QAAAA,gBAAgB,EAAE4D;AACpB,OAAC,CAAC;IACJ,CAAC,CAAC,OAAO7D,KAAU,EAAE;MACnB,MAAML,OAAO,GAAG,CAACsE,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI;AAC/C,MAAA,MAAMzE,kBAAkB,CAAC;QACvBvB,MAAM,EAAE,IAAI,CAACsF,QAAQ;AACrB9D,QAAAA,UAAU,EAAEmE,iBAAiB;QAC7BlE,OAAO;QACPC,KAAK,EAAEoE,YAAY,CAACpE,KAAK;AACzBrB,QAAAA,QAAQ,EAAE,QAAQ;QAClBH,KAAK,EAAE,IAAI,CAACkG,WAAW,CAACN,YAAY,CAACO,QAAQ,CAAC;AAC9CxH,QAAAA,MAAM,EAAE,EAAE;QACV8C,OAAO;AACPC,QAAAA,OAAO,EAAE,2CAA2C;AACpDtD,QAAAA,MAAM,EAAEA,MAAa;AACrBuD,QAAAA,UAAU,EAAEG,KAAK,EAAE2E,MAAM,IAAI,GAAG;AAChC7E,QAAAA,KAAK,EAAE;AACLiB,UAAAA,WAAW,EAAE,CAAC;AACdG,UAAAA,YAAY,EAAE;SACf;AACDnB,QAAAA,OAAO,EAAE,IAAI;AACbC,QAAAA,KAAK,EAAErB,IAAI,CAACE,SAAS,CAACmB,KAAK,CAAC;AAC5BC,QAAAA,gBAAgB,EAAE4D;AACpB,OAAC,CAAC;AACF,MAAA,MAAM7D,KAAK;AACb,IAAA;AACF,EAAA;EAEQoE,WAAWA,CAACC,QAAa,EAA4C;AAC3E,IAAA,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;AAChC,MAAA,OAAO,CAAC;AAAEvG,QAAAA,IAAI,EAAE,MAAM;AAAEZ,QAAAA,OAAO,EAAEmH;AAAS,OAAC,CAAC;AAC9C,IAAA;AAEA,IAAA,IAAItH,KAAK,CAACC,OAAO,CAACqH,QAAQ,CAAC,EAAE;AAC3B,MAAA,OAAOA,QAAQ,CAACpF,GAAG,CAAE+F,IAAI,IAAK;AAC5B,QAAA,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;UAC5B,OAAO;AAAElH,YAAAA,IAAI,EAAE,MAAM;AAAEZ,YAAAA,OAAO,EAAE8H;WAAM;AACxC,QAAA;AACA,QAAA,IAAIA,IAAI,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;UACpC,IAAIA,IAAI,CAAC3H,IAAI,EAAE;YACb,OAAO;AAAES,cAAAA,IAAI,EAAEkH,IAAI,CAAClH,IAAI,IAAI,MAAM;cAAEZ,OAAO,EAAE8H,IAAI,CAAC3H;aAAM;AAC1D,UAAA;UACA,IAAI2H,IAAI,CAAC9H,OAAO,EAAE;YAChB,OAAO;AAAEY,cAAAA,IAAI,EAAEkH,IAAI,CAAClH,IAAI,IAAI,MAAM;cAAEZ,OAAO,EAAE8H,IAAI,CAAC9H;aAAS;AAC7D,UAAA;AACF,QAAA;QACA,OAAO;AAAEY,UAAAA,IAAI,EAAE,MAAM;UAAEZ,OAAO,EAAE+H,MAAM,CAACD,IAAI;SAAG;AAChD,MAAA,CAAC,CAAC;AACJ,IAAA;AAEA,IAAA,IAAIX,QAAQ,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;MAC5C,IAAIA,QAAQ,CAAChH,IAAI,EAAE;AACjB,QAAA,OAAO,CAAC;AAAES,UAAAA,IAAI,EAAE,MAAM;UAAEZ,OAAO,EAAEmH,QAAQ,CAAChH;AAAK,SAAC,CAAC;AACnD,MAAA;MACA,IAAIgH,QAAQ,CAACnH,OAAO,EAAE;AACpB,QAAA,OAAO,CAAC;AAAEY,UAAAA,IAAI,EAAE,MAAM;UAAEZ,OAAO,EAAEmH,QAAQ,CAACnH;AAAQ,SAAC,CAAC;AACtD,MAAA;AACF,IAAA;AAEA,IAAA,OAAO,CAAC;AAAEY,MAAAA,IAAI,EAAE,MAAM;MAAEZ,OAAO,EAAE+H,MAAM,CAACZ,QAAQ;AAAE,KAAC,CAAC;AACtD,EAAA;AACF;;;;;;;"}
1
+ {"version":3,"file":"index.cjs","sources":["../../src/utils.ts","../../src/typeGuards.ts","../../src/sanitization.ts","../../src/gemini/index.ts"],"sourcesContent":["import { PostHog } from 'posthog-node'\nimport { Buffer } from 'buffer'\nimport OpenAIOrignal from 'openai'\nimport AnthropicOriginal from '@anthropic-ai/sdk'\nimport type { ChatCompletionTool } from 'openai/resources/chat/completions'\nimport type { Tool as GeminiTool } from '@google/genai'\nimport type { FormattedMessage, FormattedContent, TokenUsage } from './types'\n\ntype ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams\ntype MessageCreateParams = AnthropicOriginal.Messages.MessageCreateParams\ntype ResponseCreateParams = OpenAIOrignal.Responses.ResponseCreateParams\ntype AnthropicTool = AnthropicOriginal.Tool\n\n// limit large outputs by truncating to 200kb (approx 200k bytes)\nexport const MAX_OUTPUT_SIZE = 200000\nconst STRING_FORMAT = 'utf8'\n\nexport interface MonitoringParams {\n posthogDistinctId?: string\n posthogTraceId?: string\n posthogProperties?: Record<string, any>\n posthogPrivacyMode?: boolean\n posthogGroups?: Record<string, any>\n posthogModelOverride?: string\n posthogProviderOverride?: string\n posthogCostOverride?: CostOverride\n posthogCaptureImmediate?: boolean\n}\n\nexport interface CostOverride {\n inputCost: number\n outputCost: number\n}\n\nexport const getModelParams = (\n params: ((ChatCompletionCreateParamsBase | MessageCreateParams | ResponseCreateParams) & MonitoringParams) | null\n): Record<string, any> => {\n if (!params) {\n return {}\n }\n const modelParams: Record<string, any> = {}\n const paramKeys = [\n 'temperature',\n 'max_tokens',\n 'max_completion_tokens',\n 'top_p',\n 'frequency_penalty',\n 'presence_penalty',\n 'n',\n 'stop',\n 'stream',\n 'streaming',\n ] as const\n\n for (const key of paramKeys) {\n if (key in params && (params as any)[key] !== undefined) {\n modelParams[key] = (params as any)[key]\n }\n }\n return modelParams\n}\n\n/**\n * Helper to format responses (non-streaming) for consumption, mirroring Python's openai vs. anthropic approach.\n */\nexport const formatResponse = (response: any, provider: string): FormattedMessage[] => {\n if (!response) {\n return []\n }\n if (provider === 'anthropic') {\n return formatResponseAnthropic(response)\n } else if (provider === 'openai') {\n return formatResponseOpenAI(response)\n } else if (provider === 'gemini') {\n return formatResponseGemini(response)\n }\n return []\n}\n\nexport const formatResponseAnthropic = (response: any): FormattedMessage[] => {\n const output: FormattedMessage[] = []\n const content: FormattedContent = []\n\n for (const choice of response.content ?? []) {\n if (choice?.type === 'text' && choice?.text) {\n content.push({ type: 'text', text: choice.text })\n } else if (choice?.type === 'tool_use' && choice?.name && choice?.id) {\n content.push({\n type: 'function',\n id: choice.id,\n function: {\n name: choice.name,\n arguments: choice.input || {},\n },\n })\n }\n }\n\n if (content.length > 0) {\n output.push({\n role: 'assistant',\n content,\n })\n }\n\n return output\n}\n\nexport const formatResponseOpenAI = (response: any): FormattedMessage[] => {\n const output: FormattedMessage[] = []\n\n if (response.choices) {\n for (const choice of response.choices) {\n const content: FormattedContent = []\n let role = 'assistant'\n\n if (choice.message) {\n if (choice.message.role) {\n role = choice.message.role\n }\n\n if (choice.message.content) {\n content.push({ type: 'text', text: choice.message.content })\n }\n\n if (choice.message.tool_calls) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'function',\n id: toolCall.id,\n function: {\n name: toolCall.function.name,\n arguments: toolCall.function.arguments,\n },\n })\n }\n }\n }\n\n if (content.length > 0) {\n output.push({\n role,\n content,\n })\n }\n }\n }\n\n // Handle Responses API format\n if (response.output) {\n const content: FormattedContent = []\n let role = 'assistant'\n\n for (const item of response.output) {\n if (item.type === 'message') {\n role = item.role\n\n if (item.content && Array.isArray(item.content)) {\n for (const contentItem of item.content) {\n if (contentItem.type === 'output_text' && contentItem.text) {\n content.push({ type: 'text', text: contentItem.text })\n } else if (contentItem.text) {\n content.push({ type: 'text', text: contentItem.text })\n } else if (contentItem.type === 'input_image' && contentItem.image_url) {\n content.push({\n type: 'image',\n image: contentItem.image_url,\n })\n }\n }\n } else if (item.content) {\n content.push({ type: 'text', text: String(item.content) })\n }\n } else if (item.type === 'function_call') {\n content.push({\n type: 'function',\n id: item.call_id || item.id || '',\n function: {\n name: item.name,\n arguments: item.arguments || {},\n },\n })\n }\n }\n\n if (content.length > 0) {\n output.push({\n role,\n content,\n })\n }\n }\n\n return output\n}\n\nexport const formatResponseGemini = (response: any): FormattedMessage[] => {\n const output: FormattedMessage[] = []\n\n if (response.candidates && Array.isArray(response.candidates)) {\n for (const candidate of response.candidates) {\n if (candidate.content && candidate.content.parts) {\n const content: FormattedContent = []\n\n for (const part of candidate.content.parts) {\n if (part.text) {\n content.push({ type: 'text', text: part.text })\n } else if (part.functionCall) {\n content.push({\n type: 'function',\n function: {\n name: part.functionCall.name,\n arguments: part.functionCall.args,\n },\n })\n }\n }\n\n if (content.length > 0) {\n output.push({\n role: 'assistant',\n content,\n })\n }\n } else if (candidate.text) {\n output.push({\n role: 'assistant',\n content: [{ type: 'text', text: candidate.text }],\n })\n }\n }\n } else if (response.text) {\n output.push({\n role: 'assistant',\n content: [{ type: 'text', text: response.text }],\n })\n }\n\n return output\n}\n\nexport const mergeSystemPrompt = (params: MessageCreateParams & MonitoringParams, provider: string): any => {\n if (provider == 'anthropic') {\n const messages = params.messages || []\n if (!(params as any).system) {\n return messages\n }\n const systemMessage = (params as any).system\n return [{ role: 'system', content: systemMessage }, ...messages]\n }\n return params.messages\n}\n\nexport const withPrivacyMode = (client: PostHog, privacyMode: boolean, input: any): any => {\n return (client as any).privacy_mode || privacyMode ? null : input\n}\n\nexport const truncate = (str: string): string => {\n try {\n const buffer = Buffer.from(str, STRING_FORMAT)\n if (buffer.length <= MAX_OUTPUT_SIZE) {\n return str\n }\n const truncatedBuffer = buffer.slice(0, MAX_OUTPUT_SIZE)\n return `${truncatedBuffer.toString(STRING_FORMAT)}... [truncated]`\n } catch (error) {\n console.error('Error truncating, likely not a string')\n return str\n }\n}\n\n/**\n * Extract available tool calls from the request parameters.\n * These are the tools provided to the LLM, not the tool calls in the response.\n */\nexport const extractAvailableToolCalls = (\n provider: string,\n params: any\n): ChatCompletionTool[] | AnthropicTool[] | GeminiTool[] | null => {\n if (provider === 'anthropic') {\n if (params.tools) {\n return params.tools\n }\n\n return null\n } else if (provider === 'gemini') {\n if (params.config && params.config.tools) {\n return params.config.tools\n }\n\n return null\n } else if (provider === 'openai') {\n if (params.tools) {\n return params.tools\n }\n\n return null\n } else if (provider === 'vercel') {\n if (params.tools) {\n return params.tools\n }\n\n return null\n }\n\n return null\n}\n\nexport type SendEventToPosthogParams = {\n client: PostHog\n distinctId?: string\n traceId: string\n model: string\n provider: string\n input: any\n output: any\n latency: number\n baseURL: string\n httpStatus: number\n usage?: TokenUsage\n params: (ChatCompletionCreateParamsBase | MessageCreateParams | ResponseCreateParams) & MonitoringParams\n isError?: boolean\n error?: string\n tools?: ChatCompletionTool[] | AnthropicTool[] | GeminiTool[] | null\n captureImmediate?: boolean\n}\n\nfunction sanitizeValues(obj: any): any {\n if (obj === undefined || obj === null) {\n return obj\n }\n const jsonSafe = JSON.parse(JSON.stringify(obj))\n if (typeof jsonSafe === 'string') {\n return Buffer.from(jsonSafe, STRING_FORMAT).toString(STRING_FORMAT)\n } else if (Array.isArray(jsonSafe)) {\n return jsonSafe.map(sanitizeValues)\n } else if (jsonSafe && typeof jsonSafe === 'object') {\n return Object.fromEntries(Object.entries(jsonSafe).map(([k, v]) => [k, sanitizeValues(v)]))\n }\n return jsonSafe\n}\n\nexport const sendEventToPosthog = async ({\n client,\n distinctId,\n traceId,\n model,\n provider,\n input,\n output,\n latency,\n baseURL,\n params,\n httpStatus = 200,\n usage = {},\n isError = false,\n error,\n tools,\n captureImmediate = false,\n}: SendEventToPosthogParams): Promise<void> => {\n if (!client.capture) {\n return Promise.resolve()\n }\n // sanitize input and output for UTF-8 validity\n const safeInput = sanitizeValues(input)\n const safeOutput = sanitizeValues(output)\n const safeError = sanitizeValues(error)\n\n let errorData = {}\n if (isError) {\n errorData = {\n $ai_is_error: true,\n $ai_error: safeError,\n }\n }\n let costOverrideData = {}\n if (params.posthogCostOverride) {\n const inputCostUSD = (params.posthogCostOverride.inputCost ?? 0) * (usage.inputTokens ?? 0)\n const outputCostUSD = (params.posthogCostOverride.outputCost ?? 0) * (usage.outputTokens ?? 0)\n costOverrideData = {\n $ai_input_cost_usd: inputCostUSD,\n $ai_output_cost_usd: outputCostUSD,\n $ai_total_cost_usd: inputCostUSD + outputCostUSD,\n }\n }\n\n const additionalTokenValues = {\n ...(usage.reasoningTokens ? { $ai_reasoning_tokens: usage.reasoningTokens } : {}),\n ...(usage.cacheReadInputTokens ? { $ai_cache_read_input_tokens: usage.cacheReadInputTokens } : {}),\n ...(usage.cacheCreationInputTokens ? { $ai_cache_creation_input_tokens: usage.cacheCreationInputTokens } : {}),\n }\n\n const properties = {\n $ai_provider: params.posthogProviderOverride ?? provider,\n $ai_model: params.posthogModelOverride ?? model,\n $ai_model_parameters: getModelParams(params),\n $ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeInput),\n $ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeOutput),\n $ai_http_status: httpStatus,\n $ai_input_tokens: usage.inputTokens ?? 0,\n $ai_output_tokens: usage.outputTokens ?? 0,\n ...additionalTokenValues,\n $ai_latency: latency,\n $ai_trace_id: traceId,\n $ai_base_url: baseURL,\n ...params.posthogProperties,\n ...(distinctId ? {} : { $process_person_profile: false }),\n ...(tools ? { $ai_tools: tools } : {}),\n ...errorData,\n ...costOverrideData,\n }\n\n const event = {\n distinctId: distinctId ?? traceId,\n event: '$ai_generation',\n properties,\n groups: params.posthogGroups,\n }\n\n if (captureImmediate) {\n // await capture promise to send single event in serverless environments\n await client.captureImmediate(event)\n } else {\n client.capture(event)\n }\n}\n","// Type guards for safer type checking\n\nexport const isString = (value: unknown): value is string => {\n return typeof value === 'string'\n}\n\nexport const isObject = (value: unknown): value is Record<string, unknown> => {\n return value !== null && typeof value === 'object' && !Array.isArray(value)\n}\n","import { isString, isObject } from './typeGuards'\n\nconst REDACTED_IMAGE_PLACEHOLDER = '[base64 image redacted]'\n\n// ============================================\n// Base64 Detection Helpers\n// ============================================\n\nconst isBase64DataUrl = (str: string): boolean => {\n return /^data:([^;]+);base64,/.test(str)\n}\n\nconst isValidUrl = (str: string): boolean => {\n try {\n new URL(str)\n return true\n } catch {\n // Not an absolute URL, check if it's a relative URL or path\n return str.startsWith('/') || str.startsWith('./') || str.startsWith('../')\n }\n}\n\nconst isRawBase64 = (str: string): boolean => {\n // Skip if it's a valid URL or path\n if (isValidUrl(str)) {\n return false\n }\n\n // Check if it's a valid base64 string\n // Base64 images are typically at least a few hundred chars, but we'll be conservative\n return str.length > 20 && /^[A-Za-z0-9+/]+=*$/.test(str)\n}\n\nexport function redactBase64DataUrl(str: string): string\nexport function redactBase64DataUrl(str: unknown): unknown\nexport function redactBase64DataUrl(str: unknown): unknown {\n if (!isString(str)) return str\n\n // Check for data URL format\n if (isBase64DataUrl(str)) {\n return REDACTED_IMAGE_PLACEHOLDER\n }\n\n // Check for raw base64 (Vercel sends raw base64 for inline images)\n if (isRawBase64(str)) {\n return REDACTED_IMAGE_PLACEHOLDER\n }\n\n return str\n}\n\n// ============================================\n// Common Message Processing\n// ============================================\n\ntype ContentTransformer = (item: unknown) => unknown\n\nconst processMessages = (messages: unknown, transformContent: ContentTransformer): unknown => {\n if (!messages) return messages\n\n const processContent = (content: unknown): unknown => {\n if (typeof content === 'string') return content\n\n if (!content) return content\n\n if (Array.isArray(content)) {\n return content.map(transformContent)\n }\n\n // Handle single object content\n return transformContent(content)\n }\n\n const processMessage = (msg: unknown): unknown => {\n if (!isObject(msg) || !('content' in msg)) return msg\n return { ...msg, content: processContent(msg.content) }\n }\n\n // Handle both arrays and single messages\n if (Array.isArray(messages)) {\n return messages.map(processMessage)\n }\n\n return processMessage(messages)\n}\n\n// ============================================\n// Provider-Specific Image Sanitizers\n// ============================================\n\nconst sanitizeOpenAIImage = (item: unknown): unknown => {\n if (!isObject(item)) return item\n\n // Handle image_url format\n if (item.type === 'image_url' && 'image_url' in item && isObject(item.image_url) && 'url' in item.image_url) {\n return {\n ...item,\n image_url: {\n ...item.image_url,\n url: redactBase64DataUrl(item.image_url.url),\n },\n }\n }\n\n return item\n}\n\nconst sanitizeOpenAIResponseImage = (item: unknown): unknown => {\n if (!isObject(item)) return item\n\n // Handle input_image format\n if (item.type === 'input_image' && 'image_url' in item) {\n return {\n ...item,\n image_url: redactBase64DataUrl(item.image_url),\n }\n }\n\n return item\n}\n\nconst sanitizeAnthropicImage = (item: unknown): unknown => {\n if (!isObject(item)) return item\n\n // Handle Anthropic's image format\n if (\n item.type === 'image' &&\n 'source' in item &&\n isObject(item.source) &&\n item.source.type === 'base64' &&\n 'data' in item.source\n ) {\n return {\n ...item,\n source: {\n ...item.source,\n data: REDACTED_IMAGE_PLACEHOLDER,\n },\n }\n }\n\n return item\n}\n\nconst sanitizeGeminiPart = (part: unknown): unknown => {\n if (!isObject(part)) return part\n\n // Handle Gemini's inline data format\n if ('inlineData' in part && isObject(part.inlineData) && 'data' in part.inlineData) {\n return {\n ...part,\n inlineData: {\n ...part.inlineData,\n data: REDACTED_IMAGE_PLACEHOLDER,\n },\n }\n }\n\n return part\n}\n\nconst processGeminiItem = (item: unknown): unknown => {\n if (!isObject(item)) return item\n\n // If it has parts, process them\n if ('parts' in item && item.parts) {\n const parts = Array.isArray(item.parts) ? item.parts.map(sanitizeGeminiPart) : sanitizeGeminiPart(item.parts)\n\n return { ...item, parts }\n }\n\n return item\n}\n\nconst sanitizeLangChainImage = (item: unknown): unknown => {\n if (!isObject(item)) return item\n\n // OpenAI style\n if (item.type === 'image_url' && 'image_url' in item && isObject(item.image_url) && 'url' in item.image_url) {\n return {\n ...item,\n image_url: {\n ...item.image_url,\n url: redactBase64DataUrl(item.image_url.url),\n },\n }\n }\n\n // Direct image with data field\n if (item.type === 'image' && 'data' in item) {\n return { ...item, data: redactBase64DataUrl(item.data) }\n }\n\n // Anthropic style\n if (item.type === 'image' && 'source' in item && isObject(item.source) && 'data' in item.source) {\n return {\n ...item,\n source: {\n ...item.source,\n data: redactBase64DataUrl(item.source.data),\n },\n }\n }\n\n // Google style\n if (item.type === 'media' && 'data' in item) {\n return { ...item, data: redactBase64DataUrl(item.data) }\n }\n\n return item\n}\n\n// Export individual sanitizers for tree-shaking\nexport const sanitizeOpenAI = (data: unknown): unknown => {\n return processMessages(data, sanitizeOpenAIImage)\n}\n\nexport const sanitizeOpenAIResponse = (data: unknown): unknown => {\n return processMessages(data, sanitizeOpenAIResponseImage)\n}\n\nexport const sanitizeAnthropic = (data: unknown): unknown => {\n return processMessages(data, sanitizeAnthropicImage)\n}\n\nexport const sanitizeGemini = (data: unknown): unknown => {\n // Gemini has a different structure with 'parts' directly on items instead of 'content'\n // So we need custom processing instead of using processMessages\n if (!data) return data\n\n if (Array.isArray(data)) {\n return data.map(processGeminiItem)\n }\n\n return processGeminiItem(data)\n}\n\nexport const sanitizeLangChain = (data: unknown): unknown => {\n return processMessages(data, sanitizeLangChainImage)\n}\n","import {\n GoogleGenAI,\n GenerateContentResponse as GeminiResponse,\n GenerateContentParameters,\n Part,\n GenerateContentResponseUsageMetadata,\n} from '@google/genai'\nimport { PostHog } from 'posthog-node'\nimport { v4 as uuidv4 } from 'uuid'\nimport { MonitoringParams, sendEventToPosthog, extractAvailableToolCalls, formatResponseGemini } from '../utils'\nimport { sanitizeGemini } from '../sanitization'\nimport type { TokenUsage, FormattedContent, FormattedContentItem, FormattedMessage } from '../types'\n\ninterface MonitoringGeminiConfig {\n apiKey?: string\n vertexai?: boolean\n project?: string\n location?: string\n apiVersion?: string\n posthog: PostHog\n}\n\nexport class PostHogGoogleGenAI {\n private readonly phClient: PostHog\n private readonly client: GoogleGenAI\n public models: WrappedModels\n\n constructor(config: MonitoringGeminiConfig) {\n const { posthog, ...geminiConfig } = config\n this.phClient = posthog\n this.client = new GoogleGenAI(geminiConfig)\n this.models = new WrappedModels(this.client, this.phClient)\n }\n}\n\nexport class WrappedModels {\n private readonly phClient: PostHog\n private readonly client: GoogleGenAI\n\n constructor(client: GoogleGenAI, phClient: PostHog) {\n this.client = client\n this.phClient = phClient\n }\n\n public async generateContent(params: GenerateContentParameters & MonitoringParams): Promise<GeminiResponse> {\n const {\n posthogDistinctId,\n posthogTraceId,\n posthogProperties,\n posthogGroups,\n posthogCaptureImmediate,\n ...geminiParams\n } = params\n\n const traceId = posthogTraceId ?? uuidv4()\n const startTime = Date.now()\n\n try {\n const response = await this.client.models.generateContent(geminiParams as GenerateContentParameters)\n const latency = (Date.now() - startTime) / 1000\n\n const availableTools = extractAvailableToolCalls('gemini', geminiParams)\n\n const metadata = response.usageMetadata\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: geminiParams.model,\n provider: 'gemini',\n input: this.formatInputForPostHog(geminiParams.contents),\n output: formatResponseGemini(response),\n latency,\n baseURL: 'https://generativelanguage.googleapis.com',\n params: params as GenerateContentParameters & MonitoringParams,\n httpStatus: 200,\n usage: {\n inputTokens: metadata?.promptTokenCount ?? 0,\n outputTokens: metadata?.candidatesTokenCount ?? 0,\n reasoningTokens:\n (metadata as GenerateContentResponseUsageMetadata & { thoughtsTokenCount?: number })?.thoughtsTokenCount ??\n 0,\n cacheReadInputTokens: metadata?.cachedContentTokenCount ?? 0,\n },\n tools: availableTools,\n captureImmediate: posthogCaptureImmediate,\n })\n\n return response\n } catch (error: unknown) {\n const latency = (Date.now() - startTime) / 1000\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: geminiParams.model,\n provider: 'gemini',\n input: this.formatInputForPostHog(geminiParams.contents),\n output: [],\n latency,\n baseURL: 'https://generativelanguage.googleapis.com',\n params: params as GenerateContentParameters & MonitoringParams,\n httpStatus: (error as { status?: number })?.status ?? 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n isError: true,\n error: JSON.stringify(error),\n captureImmediate: posthogCaptureImmediate,\n })\n throw error\n }\n }\n\n public async *generateContentStream(\n params: GenerateContentParameters & MonitoringParams\n ): AsyncGenerator<GeminiResponse, void, unknown> {\n const {\n posthogDistinctId,\n posthogTraceId,\n posthogProperties,\n posthogGroups,\n posthogCaptureImmediate,\n ...geminiParams\n } = params\n\n const traceId = posthogTraceId ?? uuidv4()\n const startTime = Date.now()\n const accumulatedContent: FormattedContent = []\n let usage: TokenUsage = {\n inputTokens: 0,\n outputTokens: 0,\n }\n\n try {\n const stream = await this.client.models.generateContentStream(geminiParams as GenerateContentParameters)\n\n for await (const chunk of stream) {\n // Handle text content\n if (chunk.text) {\n // Find if we already have a text item to append to\n let lastTextItem: FormattedContentItem | undefined\n for (let i = accumulatedContent.length - 1; i >= 0; i--) {\n if (accumulatedContent[i].type === 'text') {\n lastTextItem = accumulatedContent[i]\n break\n }\n }\n\n if (lastTextItem && lastTextItem.type === 'text') {\n lastTextItem.text += chunk.text\n } else {\n accumulatedContent.push({ type: 'text', text: chunk.text })\n }\n }\n\n // Handle function calls from candidates\n if (chunk.candidates && Array.isArray(chunk.candidates)) {\n for (const candidate of chunk.candidates) {\n if (candidate.content && candidate.content.parts) {\n for (const part of candidate.content.parts) {\n // Type-safe check for functionCall\n if ('functionCall' in part) {\n const funcCall = (part as Part & { functionCall?: { name?: string; args?: unknown } }).functionCall\n if (funcCall?.name) {\n accumulatedContent.push({\n type: 'function',\n function: {\n name: funcCall.name,\n arguments: funcCall.args || {},\n },\n })\n }\n }\n }\n }\n }\n }\n\n // Update usage metadata - handle both old and new field names\n if (chunk.usageMetadata) {\n const metadata = chunk.usageMetadata as GenerateContentResponseUsageMetadata\n usage = {\n inputTokens: metadata.promptTokenCount ?? 0,\n outputTokens: metadata.candidatesTokenCount ?? 0,\n reasoningTokens:\n (metadata as GenerateContentResponseUsageMetadata & { thoughtsTokenCount?: number }).thoughtsTokenCount ??\n 0,\n cacheReadInputTokens: metadata.cachedContentTokenCount ?? 0,\n }\n }\n yield chunk\n }\n\n const latency = (Date.now() - startTime) / 1000\n\n const availableTools = extractAvailableToolCalls('gemini', geminiParams)\n\n // Format output similar to formatResponseGemini\n const output = accumulatedContent.length > 0 ? [{ role: 'assistant', content: accumulatedContent }] : []\n\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: geminiParams.model,\n provider: 'gemini',\n input: this.formatInputForPostHog(geminiParams.contents),\n output,\n latency,\n baseURL: 'https://generativelanguage.googleapis.com',\n params: params as GenerateContentParameters & MonitoringParams,\n httpStatus: 200,\n usage,\n tools: availableTools,\n captureImmediate: posthogCaptureImmediate,\n })\n } catch (error: unknown) {\n const latency = (Date.now() - startTime) / 1000\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: geminiParams.model,\n provider: 'gemini',\n input: this.formatInputForPostHog(geminiParams.contents),\n output: [],\n latency,\n baseURL: 'https://generativelanguage.googleapis.com',\n params: params as GenerateContentParameters & MonitoringParams,\n httpStatus: (error as { status?: number })?.status ?? 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n isError: true,\n error: JSON.stringify(error),\n captureImmediate: posthogCaptureImmediate,\n })\n throw error\n }\n }\n\n private formatInput(contents: unknown): FormattedMessage[] {\n if (typeof contents === 'string') {\n return [{ role: 'user', content: contents }]\n }\n\n if (Array.isArray(contents)) {\n return contents.map((item) => {\n if (typeof item === 'string') {\n return { role: 'user', content: item }\n }\n\n if (item && typeof item === 'object') {\n const obj = item as Record<string, unknown>\n if ('text' in obj && obj.text) {\n return { role: (obj.role as string) || 'user', content: obj.text }\n }\n\n if ('content' in obj && obj.content) {\n return { role: (obj.role as string) || 'user', content: obj.content }\n }\n\n if ('parts' in obj && Array.isArray(obj.parts)) {\n return {\n role: (obj.role as string) || 'user',\n content: obj.parts.map((part: unknown) => {\n if (part && typeof part === 'object' && 'text' in part) {\n return (part as { text: unknown }).text\n }\n return part\n }),\n }\n }\n }\n\n return { role: 'user', content: String(item) }\n })\n }\n\n if (contents && typeof contents === 'object') {\n const obj = contents as Record<string, unknown>\n if ('text' in obj && obj.text) {\n return [{ role: 'user', content: obj.text }]\n }\n\n if ('content' in obj && obj.content) {\n return [{ role: 'user', content: obj.content }]\n }\n }\n\n return [{ role: 'user', content: String(contents) }]\n }\n\n private formatInputForPostHog(contents: unknown): unknown {\n const sanitized = sanitizeGemini(contents)\n return this.formatInput(sanitized)\n }\n}\n\nexport default PostHogGoogleGenAI\nexport { PostHogGoogleGenAI as Gemini }\n"],"names":["STRING_FORMAT","getModelParams","params","modelParams","paramKeys","key","undefined","formatResponseGemini","response","output","candidates","Array","isArray","candidate","content","parts","part","text","push","type","functionCall","function","name","arguments","args","length","role","withPrivacyMode","client","privacyMode","input","privacy_mode","extractAvailableToolCalls","provider","config","tools","sanitizeValues","obj","jsonSafe","JSON","parse","stringify","Buffer","from","toString","map","Object","fromEntries","entries","k","v","sendEventToPosthog","distinctId","traceId","model","latency","baseURL","httpStatus","usage","isError","error","captureImmediate","capture","Promise","resolve","safeInput","safeOutput","safeError","errorData","$ai_is_error","$ai_error","costOverrideData","posthogCostOverride","inputCostUSD","inputCost","inputTokens","outputCostUSD","outputCost","outputTokens","$ai_input_cost_usd","$ai_output_cost_usd","$ai_total_cost_usd","additionalTokenValues","reasoningTokens","$ai_reasoning_tokens","cacheReadInputTokens","$ai_cache_read_input_tokens","cacheCreationInputTokens","$ai_cache_creation_input_tokens","properties","$ai_provider","posthogProviderOverride","$ai_model","posthogModelOverride","$ai_model_parameters","$ai_input","posthogPrivacyMode","$ai_output_choices","$ai_http_status","$ai_input_tokens","$ai_output_tokens","$ai_latency","$ai_trace_id","$ai_base_url","posthogProperties","$process_person_profile","$ai_tools","event","groups","posthogGroups","isObject","value","REDACTED_IMAGE_PLACEHOLDER","sanitizeGeminiPart","inlineData","data","processGeminiItem","item","sanitizeGemini","PostHogGoogleGenAI","constructor","posthog","geminiConfig","phClient","GoogleGenAI","models","WrappedModels","generateContent","posthogDistinctId","posthogTraceId","posthogCaptureImmediate","geminiParams","uuidv4","startTime","Date","now","availableTools","metadata","usageMetadata","formatInputForPostHog","contents","promptTokenCount","candidatesTokenCount","thoughtsTokenCount","cachedContentTokenCount","status","generateContentStream","accumulatedContent","stream","chunk","lastTextItem","i","funcCall","formatInput","String","sanitized"],"mappings":";;;;;;;;AAeA,MAAMA,aAAa,GAAG,MAAM;AAmBrB,MAAMC,cAAc,GACzBC,MAAiH,IACzF;EACxB,IAAI,CAACA,MAAM,EAAE;AACX,IAAA,OAAO,EAAE;AACX,EAAA;EACA,MAAMC,WAAgC,GAAG,EAAE;EAC3C,MAAMC,SAAS,GAAG,CAChB,aAAa,EACb,YAAY,EACZ,uBAAuB,EACvB,OAAO,EACP,mBAAmB,EACnB,kBAAkB,EAClB,GAAG,EACH,MAAM,EACN,QAAQ,EACR,WAAW,CACH;AAEV,EAAA,KAAK,MAAMC,GAAG,IAAID,SAAS,EAAE;IAC3B,IAAIC,GAAG,IAAIH,MAAM,IAAKA,MAAM,CAASG,GAAG,CAAC,KAAKC,SAAS,EAAE;AACvDH,MAAAA,WAAW,CAACE,GAAG,CAAC,GAAIH,MAAM,CAASG,GAAG,CAAC;AACzC,IAAA;AACF,EAAA;AACA,EAAA,OAAOF,WAAW;AACpB,CAAC;AAwIM,MAAMI,oBAAoB,GAAIC,QAAa,IAAyB;EACzE,MAAMC,MAA0B,GAAG,EAAE;AAErC,EAAA,IAAID,QAAQ,CAACE,UAAU,IAAIC,KAAK,CAACC,OAAO,CAACJ,QAAQ,CAACE,UAAU,CAAC,EAAE;AAC7D,IAAA,KAAK,MAAMG,SAAS,IAAIL,QAAQ,CAACE,UAAU,EAAE;MAC3C,IAAIG,SAAS,CAACC,OAAO,IAAID,SAAS,CAACC,OAAO,CAACC,KAAK,EAAE;QAChD,MAAMD,OAAyB,GAAG,EAAE;QAEpC,KAAK,MAAME,IAAI,IAAIH,SAAS,CAACC,OAAO,CAACC,KAAK,EAAE;UAC1C,IAAIC,IAAI,CAACC,IAAI,EAAE;YACbH,OAAO,CAACI,IAAI,CAAC;AAAEC,cAAAA,IAAI,EAAE,MAAM;cAAEF,IAAI,EAAED,IAAI,CAACC;AAAK,aAAC,CAAC;AACjD,UAAA,CAAC,MAAM,IAAID,IAAI,CAACI,YAAY,EAAE;YAC5BN,OAAO,CAACI,IAAI,CAAC;AACXC,cAAAA,IAAI,EAAE,UAAU;AAChBE,cAAAA,QAAQ,EAAE;AACRC,gBAAAA,IAAI,EAAEN,IAAI,CAACI,YAAY,CAACE,IAAI;AAC5BC,gBAAAA,SAAS,EAAEP,IAAI,CAACI,YAAY,CAACI;AAC/B;AACF,aAAC,CAAC;AACJ,UAAA;AACF,QAAA;AAEA,QAAA,IAAIV,OAAO,CAACW,MAAM,GAAG,CAAC,EAAE;UACtBhB,MAAM,CAACS,IAAI,CAAC;AACVQ,YAAAA,IAAI,EAAE,WAAW;AACjBZ,YAAAA;AACF,WAAC,CAAC;AACJ,QAAA;AACF,MAAA,CAAC,MAAM,IAAID,SAAS,CAACI,IAAI,EAAE;QACzBR,MAAM,CAACS,IAAI,CAAC;AACVQ,UAAAA,IAAI,EAAE,WAAW;AACjBZ,UAAAA,OAAO,EAAE,CAAC;AAAEK,YAAAA,IAAI,EAAE,MAAM;YAAEF,IAAI,EAAEJ,SAAS,CAACI;WAAM;AAClD,SAAC,CAAC;AACJ,MAAA;AACF,IAAA;AACF,EAAA,CAAC,MAAM,IAAIT,QAAQ,CAACS,IAAI,EAAE;IACxBR,MAAM,CAACS,IAAI,CAAC;AACVQ,MAAAA,IAAI,EAAE,WAAW;AACjBZ,MAAAA,OAAO,EAAE,CAAC;AAAEK,QAAAA,IAAI,EAAE,MAAM;QAAEF,IAAI,EAAET,QAAQ,CAACS;OAAM;AACjD,KAAC,CAAC;AACJ,EAAA;AAEA,EAAA,OAAOR,MAAM;AACf,CAAC;AAcM,MAAMkB,eAAe,GAAGA,CAACC,MAAe,EAAEC,WAAoB,EAAEC,KAAU,KAAU;EACzF,OAAQF,MAAM,CAASG,YAAY,IAAIF,WAAW,GAAG,IAAI,GAAGC,KAAK;AACnE,CAAC;;AAgBD;AACA;AACA;AACA;AACO,MAAME,yBAAyB,GAAGA,CACvCC,QAAgB,EAChB/B,MAAW,KACsD;EAO/B;IAChC,IAAIA,MAAM,CAACgC,MAAM,IAAIhC,MAAM,CAACgC,MAAM,CAACC,KAAK,EAAE;AACxC,MAAA,OAAOjC,MAAM,CAACgC,MAAM,CAACC,KAAK;AAC5B,IAAA;AAEA,IAAA,OAAO,IAAI;AACb,EAAA;AAeF,CAAC;AAqBD,SAASC,cAAcA,CAACC,GAAQ,EAAO;AACrC,EAAA,IAAIA,GAAG,KAAK/B,SAAS,IAAI+B,GAAG,KAAK,IAAI,EAAE;AACrC,IAAA,OAAOA,GAAG;AACZ,EAAA;AACA,EAAA,MAAMC,QAAQ,GAAGC,IAAI,CAACC,KAAK,CAACD,IAAI,CAACE,SAAS,CAACJ,GAAG,CAAC,CAAC;AAChD,EAAA,IAAI,OAAOC,QAAQ,KAAK,QAAQ,EAAE;AAChC,IAAA,OAAOI,aAAM,CAACC,IAAI,CAACL,QAAQ,EAAEtC,aAAa,CAAC,CAAC4C,QAAQ,CAAC5C,aAAa,CAAC;EACrE,CAAC,MAAM,IAAIW,KAAK,CAACC,OAAO,CAAC0B,QAAQ,CAAC,EAAE;AAClC,IAAA,OAAOA,QAAQ,CAACO,GAAG,CAACT,cAAc,CAAC;EACrC,CAAC,MAAM,IAAIE,QAAQ,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;AACnD,IAAA,OAAOQ,MAAM,CAACC,WAAW,CAACD,MAAM,CAACE,OAAO,CAACV,QAAQ,CAAC,CAACO,GAAG,CAAC,CAAC,CAACI,CAAC,EAAEC,CAAC,CAAC,KAAK,CAACD,CAAC,EAAEb,cAAc,CAACc,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7F,EAAA;AACA,EAAA,OAAOZ,QAAQ;AACjB;AAEO,MAAMa,kBAAkB,GAAG,OAAO;EACvCvB,MAAM;EACNwB,UAAU;EACVC,OAAO;EACPC,KAAK;EACLrB,QAAQ;EACRH,KAAK;EACLrB,MAAM;EACN8C,OAAO;EACPC,OAAO;EACPtD,MAAM;AACNuD,EAAAA,UAAU,GAAG,GAAG;EAChBC,KAAK,GAAG,EAAE;AACVC,EAAAA,OAAO,GAAG,KAAK;EACfC,KAAK;EACLzB,KAAK;AACL0B,EAAAA,gBAAgB,GAAG;AACK,CAAC,KAAoB;AAC7C,EAAA,IAAI,CAACjC,MAAM,CAACkC,OAAO,EAAE;AACnB,IAAA,OAAOC,OAAO,CAACC,OAAO,EAAE;AAC1B,EAAA;AACA;AACA,EAAA,MAAMC,SAAS,GAAG7B,cAAc,CAACN,KAAK,CAAC;AACvC,EAAA,MAAMoC,UAAU,GAAG9B,cAAc,CAAC3B,MAAM,CAAC;AACzC,EAAA,MAAM0D,SAAS,GAAG/B,cAAc,CAACwB,KAAK,CAAC;EAEvC,IAAIQ,SAAS,GAAG,EAAE;AAClB,EAAA,IAAIT,OAAO,EAAE;AACXS,IAAAA,SAAS,GAAG;AACVC,MAAAA,YAAY,EAAE,IAAI;AAClBC,MAAAA,SAAS,EAAEH;KACZ;AACH,EAAA;EACA,IAAII,gBAAgB,GAAG,EAAE;EACzB,IAAIrE,MAAM,CAACsE,mBAAmB,EAAE;AAC9B,IAAA,MAAMC,YAAY,GAAG,CAACvE,MAAM,CAACsE,mBAAmB,CAACE,SAAS,IAAI,CAAC,KAAKhB,KAAK,CAACiB,WAAW,IAAI,CAAC,CAAC;AAC3F,IAAA,MAAMC,aAAa,GAAG,CAAC1E,MAAM,CAACsE,mBAAmB,CAACK,UAAU,IAAI,CAAC,KAAKnB,KAAK,CAACoB,YAAY,IAAI,CAAC,CAAC;AAC9FP,IAAAA,gBAAgB,GAAG;AACjBQ,MAAAA,kBAAkB,EAAEN,YAAY;AAChCO,MAAAA,mBAAmB,EAAEJ,aAAa;MAClCK,kBAAkB,EAAER,YAAY,GAAGG;KACpC;AACH,EAAA;AAEA,EAAA,MAAMM,qBAAqB,GAAG;IAC5B,IAAIxB,KAAK,CAACyB,eAAe,GAAG;MAAEC,oBAAoB,EAAE1B,KAAK,CAACyB;KAAiB,GAAG,EAAE,CAAC;IACjF,IAAIzB,KAAK,CAAC2B,oBAAoB,GAAG;MAAEC,2BAA2B,EAAE5B,KAAK,CAAC2B;KAAsB,GAAG,EAAE,CAAC;IAClG,IAAI3B,KAAK,CAAC6B,wBAAwB,GAAG;MAAEC,+BAA+B,EAAE9B,KAAK,CAAC6B;KAA0B,GAAG,EAAE;GAC9G;AAED,EAAA,MAAME,UAAU,GAAG;AACjBC,IAAAA,YAAY,EAAExF,MAAM,CAACyF,uBAAuB,IAAI1D,QAAQ;AACxD2D,IAAAA,SAAS,EAAE1F,MAAM,CAAC2F,oBAAoB,IAAIvC,KAAK;AAC/CwC,IAAAA,oBAAoB,EAAE7F,cAAc,CAACC,MAAM,CAAC;AAC5C6F,IAAAA,SAAS,EAAEpE,eAAe,CAACC,MAAM,EAAE1B,MAAM,CAAC8F,kBAAkB,IAAI,KAAK,EAAE/B,SAAS,CAAC;AACjFgC,IAAAA,kBAAkB,EAAEtE,eAAe,CAACC,MAAM,EAAE1B,MAAM,CAAC8F,kBAAkB,IAAI,KAAK,EAAE9B,UAAU,CAAC;AAC3FgC,IAAAA,eAAe,EAAEzC,UAAU;AAC3B0C,IAAAA,gBAAgB,EAAEzC,KAAK,CAACiB,WAAW,IAAI,CAAC;AACxCyB,IAAAA,iBAAiB,EAAE1C,KAAK,CAACoB,YAAY,IAAI,CAAC;AAC1C,IAAA,GAAGI,qBAAqB;AACxBmB,IAAAA,WAAW,EAAE9C,OAAO;AACpB+C,IAAAA,YAAY,EAAEjD,OAAO;AACrBkD,IAAAA,YAAY,EAAE/C,OAAO;IACrB,GAAGtD,MAAM,CAACsG,iBAAiB;AAC3B,IAAA,IAAIpD,UAAU,GAAG,EAAE,GAAG;AAAEqD,MAAAA,uBAAuB,EAAE;AAAM,KAAC,CAAC;AACzD,IAAA,IAAItE,KAAK,GAAG;AAAEuE,MAAAA,SAAS,EAAEvE;KAAO,GAAG,EAAE,CAAC;AACtC,IAAA,GAAGiC,SAAS;IACZ,GAAGG;GACJ;AAED,EAAA,MAAMoC,KAAK,GAAG;IACZvD,UAAU,EAAEA,UAAU,IAAIC,OAAO;AACjCsD,IAAAA,KAAK,EAAE,gBAAgB;IACvBlB,UAAU;IACVmB,MAAM,EAAE1G,MAAM,CAAC2G;GAChB;AAED,EAAA,IAAIhD,gBAAgB,EAAE;AACpB;AACA,IAAA,MAAMjC,MAAM,CAACiC,gBAAgB,CAAC8C,KAAK,CAAC;AACtC,EAAA,CAAC,MAAM;AACL/E,IAAAA,MAAM,CAACkC,OAAO,CAAC6C,KAAK,CAAC;AACvB,EAAA;AACF,CAAC;;ACzaD;;AAMO,MAAMG,QAAQ,GAAIC,KAAc,IAAuC;AAC5E,EAAA,OAAOA,KAAK,KAAK,IAAI,IAAI,OAAOA,KAAK,KAAK,QAAQ,IAAI,CAACpG,KAAK,CAACC,OAAO,CAACmG,KAAK,CAAC;AAC7E,CAAC;;ACND,MAAMC,0BAA0B,GAAG,yBAAyB;AA8I5D,MAAMC,kBAAkB,GAAIjG,IAAa,IAAc;AACrD,EAAA,IAAI,CAAC8F,QAAQ,CAAC9F,IAAI,CAAC,EAAE,OAAOA,IAAI;;AAEhC;AACA,EAAA,IAAI,YAAY,IAAIA,IAAI,IAAI8F,QAAQ,CAAC9F,IAAI,CAACkG,UAAU,CAAC,IAAI,MAAM,IAAIlG,IAAI,CAACkG,UAAU,EAAE;IAClF,OAAO;AACL,MAAA,GAAGlG,IAAI;AACPkG,MAAAA,UAAU,EAAE;QACV,GAAGlG,IAAI,CAACkG,UAAU;AAClBC,QAAAA,IAAI,EAAEH;AACR;KACD;AACH,EAAA;AAEA,EAAA,OAAOhG,IAAI;AACb,CAAC;AAED,MAAMoG,iBAAiB,GAAIC,IAAa,IAAc;AACpD,EAAA,IAAI,CAACP,QAAQ,CAACO,IAAI,CAAC,EAAE,OAAOA,IAAI;;AAEhC;AACA,EAAA,IAAI,OAAO,IAAIA,IAAI,IAAIA,IAAI,CAACtG,KAAK,EAAE;IACjC,MAAMA,KAAK,GAAGJ,KAAK,CAACC,OAAO,CAACyG,IAAI,CAACtG,KAAK,CAAC,GAAGsG,IAAI,CAACtG,KAAK,CAAC8B,GAAG,CAACoE,kBAAkB,CAAC,GAAGA,kBAAkB,CAACI,IAAI,CAACtG,KAAK,CAAC;IAE7G,OAAO;AAAE,MAAA,GAAGsG,IAAI;AAAEtG,MAAAA;KAAO;AAC3B,EAAA;AAEA,EAAA,OAAOsG,IAAI;AACb,CAAC;AAqDM,MAAMC,cAAc,GAAIH,IAAa,IAAc;AACxD;AACA;AACA,EAAA,IAAI,CAACA,IAAI,EAAE,OAAOA,IAAI;AAEtB,EAAA,IAAIxG,KAAK,CAACC,OAAO,CAACuG,IAAI,CAAC,EAAE;AACvB,IAAA,OAAOA,IAAI,CAACtE,GAAG,CAACuE,iBAAiB,CAAC;AACpC,EAAA;EAEA,OAAOA,iBAAiB,CAACD,IAAI,CAAC;AAChC,CAAC;;ACrNM,MAAMI,kBAAkB,CAAC;EAK9BC,WAAWA,CAACtF,MAA8B,EAAE;IAC1C,MAAM;MAAEuF,OAAO;MAAE,GAAGC;AAAa,KAAC,GAAGxF,MAAM;IAC3C,IAAI,CAACyF,QAAQ,GAAGF,OAAO;AACvB,IAAA,IAAI,CAAC7F,MAAM,GAAG,IAAIgG,iBAAW,CAACF,YAAY,CAAC;AAC3C,IAAA,IAAI,CAACG,MAAM,GAAG,IAAIC,aAAa,CAAC,IAAI,CAAClG,MAAM,EAAE,IAAI,CAAC+F,QAAQ,CAAC;AAC7D,EAAA;AACF;AAEO,MAAMG,aAAa,CAAC;AAIzBN,EAAAA,WAAWA,CAAC5F,MAAmB,EAAE+F,QAAiB,EAAE;IAClD,IAAI,CAAC/F,MAAM,GAAGA,MAAM;IACpB,IAAI,CAAC+F,QAAQ,GAAGA,QAAQ;AAC1B,EAAA;EAEA,MAAaI,eAAeA,CAAC7H,MAAoD,EAA2B;IAC1G,MAAM;MACJ8H,iBAAiB;MACjBC,cAAc;MACdzB,iBAAiB;MACjBK,aAAa;MACbqB,uBAAuB;MACvB,GAAGC;AACL,KAAC,GAAGjI,MAAM;AAEV,IAAA,MAAMmD,OAAO,GAAG4E,cAAc,IAAIG,OAAM,EAAE;AAC1C,IAAA,MAAMC,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE;IAE5B,IAAI;AACF,MAAA,MAAM/H,QAAQ,GAAG,MAAM,IAAI,CAACoB,MAAM,CAACiG,MAAM,CAACE,eAAe,CAACI,YAAyC,CAAC;MACpG,MAAM5E,OAAO,GAAG,CAAC+E,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI;AAE/C,MAAA,MAAMG,cAAc,GAAGxG,yBAAyB,CAAC,QAAQ,EAAEmG,YAAY,CAAC;AAExE,MAAA,MAAMM,QAAQ,GAAGjI,QAAQ,CAACkI,aAAa;AACvC,MAAA,MAAMvF,kBAAkB,CAAC;QACvBvB,MAAM,EAAE,IAAI,CAAC+F,QAAQ;AACrBvE,QAAAA,UAAU,EAAE4E,iBAAiB;QAC7B3E,OAAO;QACPC,KAAK,EAAE6E,YAAY,CAAC7E,KAAK;AACzBrB,QAAAA,QAAQ,EAAE,QAAQ;QAClBH,KAAK,EAAE,IAAI,CAAC6G,qBAAqB,CAACR,YAAY,CAACS,QAAQ,CAAC;AACxDnI,QAAAA,MAAM,EAAEF,oBAAoB,CAACC,QAAQ,CAAC;QACtC+C,OAAO;AACPC,QAAAA,OAAO,EAAE,2CAA2C;AACpDtD,QAAAA,MAAM,EAAEA,MAAsD;AAC9DuD,QAAAA,UAAU,EAAE,GAAG;AACfC,QAAAA,KAAK,EAAE;AACLiB,UAAAA,WAAW,EAAE8D,QAAQ,EAAEI,gBAAgB,IAAI,CAAC;AAC5C/D,UAAAA,YAAY,EAAE2D,QAAQ,EAAEK,oBAAoB,IAAI,CAAC;AACjD3D,UAAAA,eAAe,EACZsD,QAAQ,EAA6EM,kBAAkB,IACxG,CAAC;AACH1D,UAAAA,oBAAoB,EAAEoD,QAAQ,EAAEO,uBAAuB,IAAI;SAC5D;AACD7G,QAAAA,KAAK,EAAEqG,cAAc;AACrB3E,QAAAA,gBAAgB,EAAEqE;AACpB,OAAC,CAAC;AAEF,MAAA,OAAO1H,QAAQ;IACjB,CAAC,CAAC,OAAOoD,KAAc,EAAE;MACvB,MAAML,OAAO,GAAG,CAAC+E,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI;AAC/C,MAAA,MAAMlF,kBAAkB,CAAC;QACvBvB,MAAM,EAAE,IAAI,CAAC+F,QAAQ;AACrBvE,QAAAA,UAAU,EAAE4E,iBAAiB;QAC7B3E,OAAO;QACPC,KAAK,EAAE6E,YAAY,CAAC7E,KAAK;AACzBrB,QAAAA,QAAQ,EAAE,QAAQ;QAClBH,KAAK,EAAE,IAAI,CAAC6G,qBAAqB,CAACR,YAAY,CAACS,QAAQ,CAAC;AACxDnI,QAAAA,MAAM,EAAE,EAAE;QACV8C,OAAO;AACPC,QAAAA,OAAO,EAAE,2CAA2C;AACpDtD,QAAAA,MAAM,EAAEA,MAAsD;AAC9DuD,QAAAA,UAAU,EAAGG,KAAK,EAA0BqF,MAAM,IAAI,GAAG;AACzDvF,QAAAA,KAAK,EAAE;AACLiB,UAAAA,WAAW,EAAE,CAAC;AACdG,UAAAA,YAAY,EAAE;SACf;AACDnB,QAAAA,OAAO,EAAE,IAAI;AACbC,QAAAA,KAAK,EAAErB,IAAI,CAACE,SAAS,CAACmB,KAAK,CAAC;AAC5BC,QAAAA,gBAAgB,EAAEqE;AACpB,OAAC,CAAC;AACF,MAAA,MAAMtE,KAAK;AACb,IAAA;AACF,EAAA;EAEA,OAAcsF,qBAAqBA,CACjChJ,MAAoD,EACL;IAC/C,MAAM;MACJ8H,iBAAiB;MACjBC,cAAc;MACdzB,iBAAiB;MACjBK,aAAa;MACbqB,uBAAuB;MACvB,GAAGC;AACL,KAAC,GAAGjI,MAAM;AAEV,IAAA,MAAMmD,OAAO,GAAG4E,cAAc,IAAIG,OAAM,EAAE;AAC1C,IAAA,MAAMC,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE;IAC5B,MAAMY,kBAAoC,GAAG,EAAE;AAC/C,IAAA,IAAIzF,KAAiB,GAAG;AACtBiB,MAAAA,WAAW,EAAE,CAAC;AACdG,MAAAA,YAAY,EAAE;KACf;IAED,IAAI;AACF,MAAA,MAAMsE,MAAM,GAAG,MAAM,IAAI,CAACxH,MAAM,CAACiG,MAAM,CAACqB,qBAAqB,CAACf,YAAyC,CAAC;AAExG,MAAA,WAAW,MAAMkB,KAAK,IAAID,MAAM,EAAE;AAChC;QACA,IAAIC,KAAK,CAACpI,IAAI,EAAE;AACd;AACA,UAAA,IAAIqI,YAA8C;AAClD,UAAA,KAAK,IAAIC,CAAC,GAAGJ,kBAAkB,CAAC1H,MAAM,GAAG,CAAC,EAAE8H,CAAC,IAAI,CAAC,EAAEA,CAAC,EAAE,EAAE;YACvD,IAAIJ,kBAAkB,CAACI,CAAC,CAAC,CAACpI,IAAI,KAAK,MAAM,EAAE;AACzCmI,cAAAA,YAAY,GAAGH,kBAAkB,CAACI,CAAC,CAAC;AACpC,cAAA;AACF,YAAA;AACF,UAAA;AAEA,UAAA,IAAID,YAAY,IAAIA,YAAY,CAACnI,IAAI,KAAK,MAAM,EAAE;AAChDmI,YAAAA,YAAY,CAACrI,IAAI,IAAIoI,KAAK,CAACpI,IAAI;AACjC,UAAA,CAAC,MAAM;YACLkI,kBAAkB,CAACjI,IAAI,CAAC;AAAEC,cAAAA,IAAI,EAAE,MAAM;cAAEF,IAAI,EAAEoI,KAAK,CAACpI;AAAK,aAAC,CAAC;AAC7D,UAAA;AACF,QAAA;;AAEA;AACA,QAAA,IAAIoI,KAAK,CAAC3I,UAAU,IAAIC,KAAK,CAACC,OAAO,CAACyI,KAAK,CAAC3I,UAAU,CAAC,EAAE;AACvD,UAAA,KAAK,MAAMG,SAAS,IAAIwI,KAAK,CAAC3I,UAAU,EAAE;YACxC,IAAIG,SAAS,CAACC,OAAO,IAAID,SAAS,CAACC,OAAO,CAACC,KAAK,EAAE;cAChD,KAAK,MAAMC,IAAI,IAAIH,SAAS,CAACC,OAAO,CAACC,KAAK,EAAE;AAC1C;gBACA,IAAI,cAAc,IAAIC,IAAI,EAAE;AAC1B,kBAAA,MAAMwI,QAAQ,GAAIxI,IAAI,CAAiEI,YAAY;kBACnG,IAAIoI,QAAQ,EAAElI,IAAI,EAAE;oBAClB6H,kBAAkB,CAACjI,IAAI,CAAC;AACtBC,sBAAAA,IAAI,EAAE,UAAU;AAChBE,sBAAAA,QAAQ,EAAE;wBACRC,IAAI,EAAEkI,QAAQ,CAAClI,IAAI;AACnBC,wBAAAA,SAAS,EAAEiI,QAAQ,CAAChI,IAAI,IAAI;AAC9B;AACF,qBAAC,CAAC;AACJ,kBAAA;AACF,gBAAA;AACF,cAAA;AACF,YAAA;AACF,UAAA;AACF,QAAA;;AAEA;QACA,IAAI6H,KAAK,CAACX,aAAa,EAAE;AACvB,UAAA,MAAMD,QAAQ,GAAGY,KAAK,CAACX,aAAqD;AAC5EhF,UAAAA,KAAK,GAAG;AACNiB,YAAAA,WAAW,EAAE8D,QAAQ,CAACI,gBAAgB,IAAI,CAAC;AAC3C/D,YAAAA,YAAY,EAAE2D,QAAQ,CAACK,oBAAoB,IAAI,CAAC;AAChD3D,YAAAA,eAAe,EACZsD,QAAQ,CAA4EM,kBAAkB,IACvG,CAAC;AACH1D,YAAAA,oBAAoB,EAAEoD,QAAQ,CAACO,uBAAuB,IAAI;WAC3D;AACH,QAAA;AACA,QAAA,MAAMK,KAAK;AACb,MAAA;MAEA,MAAM9F,OAAO,GAAG,CAAC+E,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI;AAE/C,MAAA,MAAMG,cAAc,GAAGxG,yBAAyB,CAAC,QAAQ,EAAEmG,YAAY,CAAC;;AAExE;MACA,MAAM1H,MAAM,GAAG0I,kBAAkB,CAAC1H,MAAM,GAAG,CAAC,GAAG,CAAC;AAAEC,QAAAA,IAAI,EAAE,WAAW;AAAEZ,QAAAA,OAAO,EAAEqI;OAAoB,CAAC,GAAG,EAAE;AAExG,MAAA,MAAMhG,kBAAkB,CAAC;QACvBvB,MAAM,EAAE,IAAI,CAAC+F,QAAQ;AACrBvE,QAAAA,UAAU,EAAE4E,iBAAiB;QAC7B3E,OAAO;QACPC,KAAK,EAAE6E,YAAY,CAAC7E,KAAK;AACzBrB,QAAAA,QAAQ,EAAE,QAAQ;QAClBH,KAAK,EAAE,IAAI,CAAC6G,qBAAqB,CAACR,YAAY,CAACS,QAAQ,CAAC;QACxDnI,MAAM;QACN8C,OAAO;AACPC,QAAAA,OAAO,EAAE,2CAA2C;AACpDtD,QAAAA,MAAM,EAAEA,MAAsD;AAC9DuD,QAAAA,UAAU,EAAE,GAAG;QACfC,KAAK;AACLvB,QAAAA,KAAK,EAAEqG,cAAc;AACrB3E,QAAAA,gBAAgB,EAAEqE;AACpB,OAAC,CAAC;IACJ,CAAC,CAAC,OAAOtE,KAAc,EAAE;MACvB,MAAML,OAAO,GAAG,CAAC+E,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI;AAC/C,MAAA,MAAMlF,kBAAkB,CAAC;QACvBvB,MAAM,EAAE,IAAI,CAAC+F,QAAQ;AACrBvE,QAAAA,UAAU,EAAE4E,iBAAiB;QAC7B3E,OAAO;QACPC,KAAK,EAAE6E,YAAY,CAAC7E,KAAK;AACzBrB,QAAAA,QAAQ,EAAE,QAAQ;QAClBH,KAAK,EAAE,IAAI,CAAC6G,qBAAqB,CAACR,YAAY,CAACS,QAAQ,CAAC;AACxDnI,QAAAA,MAAM,EAAE,EAAE;QACV8C,OAAO;AACPC,QAAAA,OAAO,EAAE,2CAA2C;AACpDtD,QAAAA,MAAM,EAAEA,MAAsD;AAC9DuD,QAAAA,UAAU,EAAGG,KAAK,EAA0BqF,MAAM,IAAI,GAAG;AACzDvF,QAAAA,KAAK,EAAE;AACLiB,UAAAA,WAAW,EAAE,CAAC;AACdG,UAAAA,YAAY,EAAE;SACf;AACDnB,QAAAA,OAAO,EAAE,IAAI;AACbC,QAAAA,KAAK,EAAErB,IAAI,CAACE,SAAS,CAACmB,KAAK,CAAC;AAC5BC,QAAAA,gBAAgB,EAAEqE;AACpB,OAAC,CAAC;AACF,MAAA,MAAMtE,KAAK;AACb,IAAA;AACF,EAAA;EAEQ6F,WAAWA,CAACb,QAAiB,EAAsB;AACzD,IAAA,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;AAChC,MAAA,OAAO,CAAC;AAAElH,QAAAA,IAAI,EAAE,MAAM;AAAEZ,QAAAA,OAAO,EAAE8H;AAAS,OAAC,CAAC;AAC9C,IAAA;AAEA,IAAA,IAAIjI,KAAK,CAACC,OAAO,CAACgI,QAAQ,CAAC,EAAE;AAC3B,MAAA,OAAOA,QAAQ,CAAC/F,GAAG,CAAEwE,IAAI,IAAK;AAC5B,QAAA,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;UAC5B,OAAO;AAAE3F,YAAAA,IAAI,EAAE,MAAM;AAAEZ,YAAAA,OAAO,EAAEuG;WAAM;AACxC,QAAA;AAEA,QAAA,IAAIA,IAAI,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;UACpC,MAAMhF,GAAG,GAAGgF,IAA+B;AAC3C,UAAA,IAAI,MAAM,IAAIhF,GAAG,IAAIA,GAAG,CAACpB,IAAI,EAAE;YAC7B,OAAO;AAAES,cAAAA,IAAI,EAAGW,GAAG,CAACX,IAAI,IAAe,MAAM;cAAEZ,OAAO,EAAEuB,GAAG,CAACpB;aAAM;AACpE,UAAA;AAEA,UAAA,IAAI,SAAS,IAAIoB,GAAG,IAAIA,GAAG,CAACvB,OAAO,EAAE;YACnC,OAAO;AAAEY,cAAAA,IAAI,EAAGW,GAAG,CAACX,IAAI,IAAe,MAAM;cAAEZ,OAAO,EAAEuB,GAAG,CAACvB;aAAS;AACvE,UAAA;AAEA,UAAA,IAAI,OAAO,IAAIuB,GAAG,IAAI1B,KAAK,CAACC,OAAO,CAACyB,GAAG,CAACtB,KAAK,CAAC,EAAE;YAC9C,OAAO;AACLW,cAAAA,IAAI,EAAGW,GAAG,CAACX,IAAI,IAAe,MAAM;cACpCZ,OAAO,EAAEuB,GAAG,CAACtB,KAAK,CAAC8B,GAAG,CAAE7B,IAAa,IAAK;gBACxC,IAAIA,IAAI,IAAI,OAAOA,IAAI,KAAK,QAAQ,IAAI,MAAM,IAAIA,IAAI,EAAE;kBACtD,OAAQA,IAAI,CAAuBC,IAAI;AACzC,gBAAA;AACA,gBAAA,OAAOD,IAAI;cACb,CAAC;aACF;AACH,UAAA;AACF,QAAA;QAEA,OAAO;AAAEU,UAAAA,IAAI,EAAE,MAAM;UAAEZ,OAAO,EAAE4I,MAAM,CAACrC,IAAI;SAAG;AAChD,MAAA,CAAC,CAAC;AACJ,IAAA;AAEA,IAAA,IAAIuB,QAAQ,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;MAC5C,MAAMvG,GAAG,GAAGuG,QAAmC;AAC/C,MAAA,IAAI,MAAM,IAAIvG,GAAG,IAAIA,GAAG,CAACpB,IAAI,EAAE;AAC7B,QAAA,OAAO,CAAC;AAAES,UAAAA,IAAI,EAAE,MAAM;UAAEZ,OAAO,EAAEuB,GAAG,CAACpB;AAAK,SAAC,CAAC;AAC9C,MAAA;AAEA,MAAA,IAAI,SAAS,IAAIoB,GAAG,IAAIA,GAAG,CAACvB,OAAO,EAAE;AACnC,QAAA,OAAO,CAAC;AAAEY,UAAAA,IAAI,EAAE,MAAM;UAAEZ,OAAO,EAAEuB,GAAG,CAACvB;AAAQ,SAAC,CAAC;AACjD,MAAA;AACF,IAAA;AAEA,IAAA,OAAO,CAAC;AAAEY,MAAAA,IAAI,EAAE,MAAM;MAAEZ,OAAO,EAAE4I,MAAM,CAACd,QAAQ;AAAE,KAAC,CAAC;AACtD,EAAA;EAEQD,qBAAqBA,CAACC,QAAiB,EAAW;AACxD,IAAA,MAAMe,SAAS,GAAGrC,cAAc,CAACsB,QAAQ,CAAC;AAC1C,IAAA,OAAO,IAAI,CAACa,WAAW,CAACE,SAAS,CAAC;AACpC,EAAA;AACF;;;;;;;"}
@@ -1,4 +1,4 @@
1
- import { GoogleGenAI } from '@google/genai';
1
+ import { GoogleGenAI, GenerateContentParameters, GenerateContentResponse } from '@google/genai';
2
2
  import { PostHog } from 'posthog-node';
3
3
 
4
4
  interface MonitoringParams {
@@ -17,24 +17,6 @@ interface CostOverride {
17
17
  outputCost: number;
18
18
  }
19
19
 
20
- type GenerateContentRequest = {
21
- model: string;
22
- contents: any;
23
- config?: any;
24
- [key: string]: any;
25
- };
26
- type GenerateContentResponse = {
27
- text?: string;
28
- candidates?: any[];
29
- usageMetadata?: {
30
- promptTokenCount?: number;
31
- candidatesTokenCount?: number;
32
- totalTokenCount?: number;
33
- thoughtsTokenCount?: number;
34
- cachedContentTokenCount?: number;
35
- };
36
- [key: string]: any;
37
- };
38
20
  interface MonitoringGeminiConfig {
39
21
  apiKey?: string;
40
22
  vertexai?: boolean;
@@ -53,9 +35,10 @@ declare class WrappedModels {
53
35
  private readonly phClient;
54
36
  private readonly client;
55
37
  constructor(client: GoogleGenAI, phClient: PostHog);
56
- generateContent(params: GenerateContentRequest & MonitoringParams): Promise<GenerateContentResponse>;
57
- generateContentStream(params: GenerateContentRequest & MonitoringParams): AsyncGenerator<any, void, unknown>;
38
+ generateContent(params: GenerateContentParameters & MonitoringParams): Promise<GenerateContentResponse>;
39
+ generateContentStream(params: GenerateContentParameters & MonitoringParams): AsyncGenerator<GenerateContentResponse, void, unknown>;
58
40
  private formatInput;
41
+ private formatInputForPostHog;
59
42
  }
60
43
 
61
44
  export { PostHogGoogleGenAI as Gemini, PostHogGoogleGenAI, WrappedModels, PostHogGoogleGenAI as default };
@@ -185,7 +185,50 @@ const sendEventToPosthog = async ({
185
185
  }
186
186
  };
187
187
 
188
- // Types from @google/genai
188
+ // Type guards for safer type checking
189
+
190
+ const isObject = value => {
191
+ return value !== null && typeof value === 'object' && !Array.isArray(value);
192
+ };
193
+
194
+ const REDACTED_IMAGE_PLACEHOLDER = '[base64 image redacted]';
195
+ const sanitizeGeminiPart = part => {
196
+ if (!isObject(part)) return part;
197
+
198
+ // Handle Gemini's inline data format
199
+ if ('inlineData' in part && isObject(part.inlineData) && 'data' in part.inlineData) {
200
+ return {
201
+ ...part,
202
+ inlineData: {
203
+ ...part.inlineData,
204
+ data: REDACTED_IMAGE_PLACEHOLDER
205
+ }
206
+ };
207
+ }
208
+ return part;
209
+ };
210
+ const processGeminiItem = item => {
211
+ if (!isObject(item)) return item;
212
+
213
+ // If it has parts, process them
214
+ if ('parts' in item && item.parts) {
215
+ const parts = Array.isArray(item.parts) ? item.parts.map(sanitizeGeminiPart) : sanitizeGeminiPart(item.parts);
216
+ return {
217
+ ...item,
218
+ parts
219
+ };
220
+ }
221
+ return item;
222
+ };
223
+ const sanitizeGemini = data => {
224
+ // Gemini has a different structure with 'parts' directly on items instead of 'content'
225
+ // So we need custom processing instead of using processMessages
226
+ if (!data) return data;
227
+ if (Array.isArray(data)) {
228
+ return data.map(processGeminiItem);
229
+ }
230
+ return processGeminiItem(data);
231
+ };
189
232
 
190
233
  class PostHogGoogleGenAI {
191
234
  constructor(config) {
@@ -218,23 +261,24 @@ class WrappedModels {
218
261
  const response = await this.client.models.generateContent(geminiParams);
219
262
  const latency = (Date.now() - startTime) / 1000;
220
263
  const availableTools = extractAvailableToolCalls('gemini', geminiParams);
264
+ const metadata = response.usageMetadata;
221
265
  await sendEventToPosthog({
222
266
  client: this.phClient,
223
267
  distinctId: posthogDistinctId,
224
268
  traceId,
225
269
  model: geminiParams.model,
226
270
  provider: 'gemini',
227
- input: this.formatInput(geminiParams.contents),
271
+ input: this.formatInputForPostHog(geminiParams.contents),
228
272
  output: formatResponseGemini(response),
229
273
  latency,
230
274
  baseURL: 'https://generativelanguage.googleapis.com',
231
275
  params: params,
232
276
  httpStatus: 200,
233
277
  usage: {
234
- inputTokens: response.usageMetadata?.promptTokenCount ?? 0,
235
- outputTokens: response.usageMetadata?.candidatesTokenCount ?? 0,
236
- reasoningTokens: response.usageMetadata?.thoughtsTokenCount ?? 0,
237
- cacheReadInputTokens: response.usageMetadata?.cachedContentTokenCount ?? 0
278
+ inputTokens: metadata?.promptTokenCount ?? 0,
279
+ outputTokens: metadata?.candidatesTokenCount ?? 0,
280
+ reasoningTokens: metadata?.thoughtsTokenCount ?? 0,
281
+ cacheReadInputTokens: metadata?.cachedContentTokenCount ?? 0
238
282
  },
239
283
  tools: availableTools,
240
284
  captureImmediate: posthogCaptureImmediate
@@ -248,7 +292,7 @@ class WrappedModels {
248
292
  traceId,
249
293
  model: geminiParams.model,
250
294
  provider: 'gemini',
251
- input: this.formatInput(geminiParams.contents),
295
+ input: this.formatInputForPostHog(geminiParams.contents),
252
296
  output: [],
253
297
  latency,
254
298
  baseURL: 'https://generativelanguage.googleapis.com',
@@ -276,7 +320,7 @@ class WrappedModels {
276
320
  } = params;
277
321
  const traceId = posthogTraceId ?? v4();
278
322
  const startTime = Date.now();
279
- let accumulatedContent = '';
323
+ const accumulatedContent = [];
280
324
  let usage = {
281
325
  inputTokens: 0,
282
326
  outputTokens: 0
@@ -284,32 +328,77 @@ class WrappedModels {
284
328
  try {
285
329
  const stream = await this.client.models.generateContentStream(geminiParams);
286
330
  for await (const chunk of stream) {
331
+ // Handle text content
287
332
  if (chunk.text) {
288
- accumulatedContent += chunk.text;
333
+ // Find if we already have a text item to append to
334
+ let lastTextItem;
335
+ for (let i = accumulatedContent.length - 1; i >= 0; i--) {
336
+ if (accumulatedContent[i].type === 'text') {
337
+ lastTextItem = accumulatedContent[i];
338
+ break;
339
+ }
340
+ }
341
+ if (lastTextItem && lastTextItem.type === 'text') {
342
+ lastTextItem.text += chunk.text;
343
+ } else {
344
+ accumulatedContent.push({
345
+ type: 'text',
346
+ text: chunk.text
347
+ });
348
+ }
349
+ }
350
+
351
+ // Handle function calls from candidates
352
+ if (chunk.candidates && Array.isArray(chunk.candidates)) {
353
+ for (const candidate of chunk.candidates) {
354
+ if (candidate.content && candidate.content.parts) {
355
+ for (const part of candidate.content.parts) {
356
+ // Type-safe check for functionCall
357
+ if ('functionCall' in part) {
358
+ const funcCall = part.functionCall;
359
+ if (funcCall?.name) {
360
+ accumulatedContent.push({
361
+ type: 'function',
362
+ function: {
363
+ name: funcCall.name,
364
+ arguments: funcCall.args || {}
365
+ }
366
+ });
367
+ }
368
+ }
369
+ }
370
+ }
371
+ }
289
372
  }
373
+
374
+ // Update usage metadata - handle both old and new field names
290
375
  if (chunk.usageMetadata) {
376
+ const metadata = chunk.usageMetadata;
291
377
  usage = {
292
- inputTokens: chunk.usageMetadata.promptTokenCount ?? 0,
293
- outputTokens: chunk.usageMetadata.candidatesTokenCount ?? 0,
294
- reasoningTokens: chunk.usageMetadata.thoughtsTokenCount ?? 0,
295
- cacheReadInputTokens: chunk.usageMetadata.cachedContentTokenCount ?? 0
378
+ inputTokens: metadata.promptTokenCount ?? 0,
379
+ outputTokens: metadata.candidatesTokenCount ?? 0,
380
+ reasoningTokens: metadata.thoughtsTokenCount ?? 0,
381
+ cacheReadInputTokens: metadata.cachedContentTokenCount ?? 0
296
382
  };
297
383
  }
298
384
  yield chunk;
299
385
  }
300
386
  const latency = (Date.now() - startTime) / 1000;
301
387
  const availableTools = extractAvailableToolCalls('gemini', geminiParams);
388
+
389
+ // Format output similar to formatResponseGemini
390
+ const output = accumulatedContent.length > 0 ? [{
391
+ role: 'assistant',
392
+ content: accumulatedContent
393
+ }] : [];
302
394
  await sendEventToPosthog({
303
395
  client: this.phClient,
304
396
  distinctId: posthogDistinctId,
305
397
  traceId,
306
398
  model: geminiParams.model,
307
399
  provider: 'gemini',
308
- input: this.formatInput(geminiParams.contents),
309
- output: [{
310
- content: accumulatedContent,
311
- role: 'assistant'
312
- }],
400
+ input: this.formatInputForPostHog(geminiParams.contents),
401
+ output,
313
402
  latency,
314
403
  baseURL: 'https://generativelanguage.googleapis.com',
315
404
  params: params,
@@ -326,7 +415,7 @@ class WrappedModels {
326
415
  traceId,
327
416
  model: geminiParams.model,
328
417
  provider: 'gemini',
329
- input: this.formatInput(geminiParams.contents),
418
+ input: this.formatInputForPostHog(geminiParams.contents),
330
419
  output: [],
331
420
  latency,
332
421
  baseURL: 'https://generativelanguage.googleapis.com',
@@ -359,16 +448,28 @@ class WrappedModels {
359
448
  };
360
449
  }
361
450
  if (item && typeof item === 'object') {
362
- if (item.text) {
451
+ const obj = item;
452
+ if ('text' in obj && obj.text) {
363
453
  return {
364
- role: item.role || 'user',
365
- content: item.text
454
+ role: obj.role || 'user',
455
+ content: obj.text
366
456
  };
367
457
  }
368
- if (item.content) {
458
+ if ('content' in obj && obj.content) {
369
459
  return {
370
- role: item.role || 'user',
371
- content: item.content
460
+ role: obj.role || 'user',
461
+ content: obj.content
462
+ };
463
+ }
464
+ if ('parts' in obj && Array.isArray(obj.parts)) {
465
+ return {
466
+ role: obj.role || 'user',
467
+ content: obj.parts.map(part => {
468
+ if (part && typeof part === 'object' && 'text' in part) {
469
+ return part.text;
470
+ }
471
+ return part;
472
+ })
372
473
  };
373
474
  }
374
475
  }
@@ -379,16 +480,17 @@ class WrappedModels {
379
480
  });
380
481
  }
381
482
  if (contents && typeof contents === 'object') {
382
- if (contents.text) {
483
+ const obj = contents;
484
+ if ('text' in obj && obj.text) {
383
485
  return [{
384
486
  role: 'user',
385
- content: contents.text
487
+ content: obj.text
386
488
  }];
387
489
  }
388
- if (contents.content) {
490
+ if ('content' in obj && obj.content) {
389
491
  return [{
390
492
  role: 'user',
391
- content: contents.content
493
+ content: obj.content
392
494
  }];
393
495
  }
394
496
  }
@@ -397,6 +499,10 @@ class WrappedModels {
397
499
  content: String(contents)
398
500
  }];
399
501
  }
502
+ formatInputForPostHog(contents) {
503
+ const sanitized = sanitizeGemini(contents);
504
+ return this.formatInput(sanitized);
505
+ }
400
506
  }
401
507
 
402
508
  export { PostHogGoogleGenAI as Gemini, PostHogGoogleGenAI, WrappedModels, PostHogGoogleGenAI as default };