@posthog/ai 6.0.1 → 6.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -236,7 +236,9 @@ class WrappedModels {
236
236
  httpStatus: 200,
237
237
  usage: {
238
238
  inputTokens: response.usageMetadata?.promptTokenCount ?? 0,
239
- outputTokens: response.usageMetadata?.candidatesTokenCount ?? 0
239
+ outputTokens: response.usageMetadata?.candidatesTokenCount ?? 0,
240
+ reasoningTokens: response.usageMetadata?.thoughtsTokenCount ?? 0,
241
+ cacheReadInputTokens: response.usageMetadata?.cachedContentTokenCount ?? 0
240
242
  },
241
243
  tools: availableTools,
242
244
  captureImmediate: posthogCaptureImmediate
@@ -292,7 +294,9 @@ class WrappedModels {
292
294
  if (chunk.usageMetadata) {
293
295
  usage = {
294
296
  inputTokens: chunk.usageMetadata.promptTokenCount ?? 0,
295
- outputTokens: chunk.usageMetadata.candidatesTokenCount ?? 0
297
+ outputTokens: chunk.usageMetadata.candidatesTokenCount ?? 0,
298
+ reasoningTokens: chunk.usageMetadata.thoughtsTokenCount ?? 0,
299
+ cacheReadInputTokens: chunk.usageMetadata.cachedContentTokenCount ?? 0
296
300
  };
297
301
  }
298
302
  yield chunk;
@@ -1 +1 @@
1
- {"version":3,"file":"index.cjs","sources":["../../src/utils.ts","../../src/gemini/index.ts"],"sourcesContent":["import { PostHog } from 'posthog-node'\nimport { Buffer } from 'buffer'\nimport OpenAIOrignal from 'openai'\nimport AnthropicOriginal from '@anthropic-ai/sdk'\nimport type { ChatCompletionTool } from 'openai/resources/chat/completions'\nimport type { Tool as GeminiTool } from '@google/genai'\nimport type { FormattedMessage, FormattedContent, TokenUsage } from './types'\n\ntype ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams\ntype MessageCreateParams = AnthropicOriginal.Messages.MessageCreateParams\ntype ResponseCreateParams = OpenAIOrignal.Responses.ResponseCreateParams\ntype AnthropicTool = AnthropicOriginal.Tool\n\n// limit large outputs by truncating to 200kb (approx 200k bytes)\nexport const MAX_OUTPUT_SIZE = 200000\nconst STRING_FORMAT = 'utf8'\n\nexport interface MonitoringParams {\n posthogDistinctId?: string\n posthogTraceId?: string\n posthogProperties?: Record<string, any>\n posthogPrivacyMode?: boolean\n posthogGroups?: Record<string, any>\n posthogModelOverride?: string\n posthogProviderOverride?: string\n posthogCostOverride?: CostOverride\n posthogCaptureImmediate?: boolean\n}\n\nexport interface CostOverride {\n inputCost: number\n outputCost: number\n}\n\nexport const getModelParams = (\n params: ((ChatCompletionCreateParamsBase | MessageCreateParams | ResponseCreateParams) & MonitoringParams) | null\n): Record<string, any> => {\n if (!params) {\n return {}\n }\n const modelParams: Record<string, any> = {}\n const paramKeys = [\n 'temperature',\n 'max_tokens',\n 'max_completion_tokens',\n 'top_p',\n 'frequency_penalty',\n 'presence_penalty',\n 'n',\n 'stop',\n 'stream',\n 'streaming',\n ] as const\n\n for (const key of paramKeys) {\n if (key in params && (params as any)[key] !== undefined) {\n modelParams[key] = (params as any)[key]\n }\n }\n return modelParams\n}\n\n/**\n * Helper to format responses (non-streaming) for consumption, mirroring Python's openai vs. anthropic approach.\n */\nexport const formatResponse = (response: any, provider: string): FormattedMessage[] => {\n if (!response) {\n return []\n }\n if (provider === 'anthropic') {\n return formatResponseAnthropic(response)\n } else if (provider === 'openai') {\n return formatResponseOpenAI(response)\n } else if (provider === 'gemini') {\n return formatResponseGemini(response)\n }\n return []\n}\n\nexport const formatResponseAnthropic = (response: any): FormattedMessage[] => {\n const output: FormattedMessage[] = []\n const content: FormattedContent = []\n\n for (const choice of response.content ?? []) {\n if (choice?.type === 'text' && choice?.text) {\n content.push({ type: 'text', text: choice.text })\n } else if (choice?.type === 'tool_use' && choice?.name && choice?.id) {\n content.push({\n type: 'function',\n id: choice.id,\n function: {\n name: choice.name,\n arguments: choice.input || {},\n },\n })\n }\n }\n\n if (content.length > 0) {\n output.push({\n role: 'assistant',\n content,\n })\n }\n\n return output\n}\n\nexport const formatResponseOpenAI = (response: any): FormattedMessage[] => {\n const output: FormattedMessage[] = []\n\n if (response.choices) {\n for (const choice of response.choices) {\n const content: FormattedContent = []\n let role = 'assistant'\n\n if (choice.message) {\n if (choice.message.role) {\n role = choice.message.role\n }\n\n if (choice.message.content) {\n content.push({ type: 'text', text: choice.message.content })\n }\n\n if (choice.message.tool_calls) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'function',\n id: toolCall.id,\n function: {\n name: toolCall.function.name,\n arguments: toolCall.function.arguments,\n },\n })\n }\n }\n }\n\n if (content.length > 0) {\n output.push({\n role,\n content,\n })\n }\n }\n }\n\n // Handle Responses API format\n if (response.output) {\n const content: FormattedContent = []\n let role = 'assistant'\n\n for (const item of response.output) {\n if (item.type === 'message') {\n role = item.role\n\n if (item.content && Array.isArray(item.content)) {\n for (const contentItem of item.content) {\n if (contentItem.type === 'output_text' && contentItem.text) {\n content.push({ type: 'text', text: contentItem.text })\n } else if (contentItem.text) {\n content.push({ type: 'text', text: contentItem.text })\n } else if (contentItem.type === 'input_image' && contentItem.image_url) {\n content.push({\n type: 'image',\n image: contentItem.image_url,\n })\n }\n }\n } else if (item.content) {\n content.push({ type: 'text', text: String(item.content) })\n }\n } else if (item.type === 'function_call') {\n content.push({\n type: 'function',\n id: item.call_id || item.id || '',\n function: {\n name: item.name,\n arguments: item.arguments || {},\n },\n })\n }\n }\n\n if (content.length > 0) {\n output.push({\n role,\n content,\n })\n }\n }\n\n return output\n}\n\nexport const formatResponseGemini = (response: any): FormattedMessage[] => {\n const output: FormattedMessage[] = []\n\n if (response.candidates && Array.isArray(response.candidates)) {\n for (const candidate of response.candidates) {\n if (candidate.content && candidate.content.parts) {\n const content: FormattedContent = []\n\n for (const part of candidate.content.parts) {\n if (part.text) {\n content.push({ type: 'text', text: part.text })\n } else if (part.functionCall) {\n content.push({\n type: 'function',\n function: {\n name: part.functionCall.name,\n arguments: part.functionCall.args,\n },\n })\n }\n }\n\n if (content.length > 0) {\n output.push({\n role: 'assistant',\n content,\n })\n }\n } else if (candidate.text) {\n output.push({\n role: 'assistant',\n content: [{ type: 'text', text: candidate.text }],\n })\n }\n }\n } else if (response.text) {\n output.push({\n role: 'assistant',\n content: [{ type: 'text', text: response.text }],\n })\n }\n\n return output\n}\n\nexport const mergeSystemPrompt = (params: MessageCreateParams & MonitoringParams, provider: string): any => {\n if (provider == 'anthropic') {\n const messages = params.messages || []\n if (!(params as any).system) {\n return messages\n }\n const systemMessage = (params as any).system\n return [{ role: 'system', content: systemMessage }, ...messages]\n }\n return params.messages\n}\n\nexport const withPrivacyMode = (client: PostHog, privacyMode: boolean, input: any): any => {\n return (client as any).privacy_mode || privacyMode ? null : input\n}\n\nexport const truncate = (str: string): string => {\n try {\n const buffer = Buffer.from(str, STRING_FORMAT)\n if (buffer.length <= MAX_OUTPUT_SIZE) {\n return str\n }\n const truncatedBuffer = buffer.slice(0, MAX_OUTPUT_SIZE)\n return `${truncatedBuffer.toString(STRING_FORMAT)}... [truncated]`\n } catch (error) {\n console.error('Error truncating, likely not a string')\n return str\n }\n}\n\n/**\n * Extract available tool calls from the request parameters.\n * These are the tools provided to the LLM, not the tool calls in the response.\n */\nexport const extractAvailableToolCalls = (\n provider: string,\n params: any\n): ChatCompletionTool[] | AnthropicTool[] | GeminiTool[] | null => {\n if (provider === 'anthropic') {\n if (params.tools) {\n return params.tools\n }\n\n return null\n } else if (provider === 'gemini') {\n if (params.config && params.config.tools) {\n return params.config.tools\n }\n\n return null\n } else if (provider === 'openai') {\n if (params.tools) {\n return params.tools\n }\n\n return null\n } else if (provider === 'vercel') {\n // Vercel AI SDK stores tools in params.mode.tools when mode type is 'regular'\n if (params.mode?.type === 'regular' && params.mode.tools) {\n return params.mode.tools\n }\n\n return null\n }\n\n return null\n}\n\nexport type SendEventToPosthogParams = {\n client: PostHog\n distinctId?: string\n traceId: string\n model: string\n provider: string\n input: any\n output: any\n latency: number\n baseURL: string\n httpStatus: number\n usage?: TokenUsage\n params: (ChatCompletionCreateParamsBase | MessageCreateParams | ResponseCreateParams) & MonitoringParams\n isError?: boolean\n error?: string\n tools?: ChatCompletionTool[] | AnthropicTool[] | GeminiTool[] | null\n captureImmediate?: boolean\n}\n\nfunction sanitizeValues(obj: any): any {\n if (obj === undefined || obj === null) {\n return obj\n }\n const jsonSafe = JSON.parse(JSON.stringify(obj))\n if (typeof jsonSafe === 'string') {\n return Buffer.from(jsonSafe, STRING_FORMAT).toString(STRING_FORMAT)\n } else if (Array.isArray(jsonSafe)) {\n return jsonSafe.map(sanitizeValues)\n } else if (jsonSafe && typeof jsonSafe === 'object') {\n return Object.fromEntries(Object.entries(jsonSafe).map(([k, v]) => [k, sanitizeValues(v)]))\n }\n return jsonSafe\n}\n\nexport const sendEventToPosthog = async ({\n client,\n distinctId,\n traceId,\n model,\n provider,\n input,\n output,\n latency,\n baseURL,\n params,\n httpStatus = 200,\n usage = {},\n isError = false,\n error,\n tools,\n captureImmediate = false,\n}: SendEventToPosthogParams): Promise<void> => {\n if (!client.capture) {\n return Promise.resolve()\n }\n // sanitize input and output for UTF-8 validity\n const safeInput = sanitizeValues(input)\n const safeOutput = sanitizeValues(output)\n const safeError = sanitizeValues(error)\n\n let errorData = {}\n if (isError) {\n errorData = {\n $ai_is_error: true,\n $ai_error: safeError,\n }\n }\n let costOverrideData = {}\n if (params.posthogCostOverride) {\n const inputCostUSD = (params.posthogCostOverride.inputCost ?? 0) * (usage.inputTokens ?? 0)\n const outputCostUSD = (params.posthogCostOverride.outputCost ?? 0) * (usage.outputTokens ?? 0)\n costOverrideData = {\n $ai_input_cost_usd: inputCostUSD,\n $ai_output_cost_usd: outputCostUSD,\n $ai_total_cost_usd: inputCostUSD + outputCostUSD,\n }\n }\n\n const additionalTokenValues = {\n ...(usage.reasoningTokens ? { $ai_reasoning_tokens: usage.reasoningTokens } : {}),\n ...(usage.cacheReadInputTokens ? { $ai_cache_read_input_tokens: usage.cacheReadInputTokens } : {}),\n ...(usage.cacheCreationInputTokens ? { $ai_cache_creation_input_tokens: usage.cacheCreationInputTokens } : {}),\n }\n\n const properties = {\n $ai_provider: params.posthogProviderOverride ?? provider,\n $ai_model: params.posthogModelOverride ?? model,\n $ai_model_parameters: getModelParams(params),\n $ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeInput),\n $ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeOutput),\n $ai_http_status: httpStatus,\n $ai_input_tokens: usage.inputTokens ?? 0,\n $ai_output_tokens: usage.outputTokens ?? 0,\n ...additionalTokenValues,\n $ai_latency: latency,\n $ai_trace_id: traceId,\n $ai_base_url: baseURL,\n ...params.posthogProperties,\n ...(distinctId ? {} : { $process_person_profile: false }),\n ...(tools ? { $ai_tools: tools } : {}),\n ...errorData,\n ...costOverrideData,\n }\n\n const event = {\n distinctId: distinctId ?? traceId,\n event: '$ai_generation',\n properties,\n groups: params.posthogGroups,\n }\n\n if (captureImmediate) {\n // await capture promise to send single event in serverless environments\n await client.captureImmediate(event)\n } else {\n client.capture(event)\n }\n}\n","import { GoogleGenAI } from '@google/genai'\nimport { PostHog } from 'posthog-node'\nimport { v4 as uuidv4 } from 'uuid'\nimport { MonitoringParams, sendEventToPosthog, extractAvailableToolCalls, formatResponseGemini } from '../utils'\n\n// Types from @google/genai\ntype GenerateContentRequest = {\n model: string\n contents: any\n config?: any\n [key: string]: any\n}\n\ntype GenerateContentResponse = {\n text?: string\n candidates?: any[]\n usageMetadata?: {\n promptTokenCount?: number\n candidatesTokenCount?: number\n totalTokenCount?: number\n }\n [key: string]: any\n}\n\ninterface MonitoringGeminiConfig {\n apiKey?: string\n vertexai?: boolean\n project?: string\n location?: string\n apiVersion?: string\n posthog: PostHog\n}\n\nexport class PostHogGoogleGenAI {\n private readonly phClient: PostHog\n private readonly client: GoogleGenAI\n public models: WrappedModels\n\n constructor(config: MonitoringGeminiConfig) {\n const { posthog, ...geminiConfig } = config\n this.phClient = posthog\n this.client = new GoogleGenAI(geminiConfig)\n this.models = new WrappedModels(this.client, this.phClient)\n }\n}\n\nexport class WrappedModels {\n private readonly phClient: PostHog\n private readonly client: GoogleGenAI\n\n constructor(client: GoogleGenAI, phClient: PostHog) {\n this.client = client\n this.phClient = phClient\n }\n\n public async generateContent(params: GenerateContentRequest & MonitoringParams): Promise<GenerateContentResponse> {\n const {\n posthogDistinctId,\n posthogTraceId,\n posthogProperties,\n posthogGroups,\n posthogCaptureImmediate,\n ...geminiParams\n } = params\n\n const traceId = posthogTraceId ?? uuidv4()\n const startTime = Date.now()\n\n try {\n const response = await this.client.models.generateContent(geminiParams)\n const latency = (Date.now() - startTime) / 1000\n\n const availableTools = extractAvailableToolCalls('gemini', geminiParams)\n\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: geminiParams.model,\n provider: 'gemini',\n input: this.formatInput(geminiParams.contents),\n output: formatResponseGemini(response),\n latency,\n baseURL: 'https://generativelanguage.googleapis.com',\n params: params as any,\n httpStatus: 200,\n usage: {\n inputTokens: response.usageMetadata?.promptTokenCount ?? 0,\n outputTokens: response.usageMetadata?.candidatesTokenCount ?? 0,\n },\n tools: availableTools,\n captureImmediate: posthogCaptureImmediate,\n })\n\n return response\n } catch (error: any) {\n const latency = (Date.now() - startTime) / 1000\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: geminiParams.model,\n provider: 'gemini',\n input: this.formatInput(geminiParams.contents),\n output: [],\n latency,\n baseURL: 'https://generativelanguage.googleapis.com',\n params: params as any,\n httpStatus: error?.status ?? 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n isError: true,\n error: JSON.stringify(error),\n captureImmediate: posthogCaptureImmediate,\n })\n throw error\n }\n }\n\n public async *generateContentStream(\n params: GenerateContentRequest & MonitoringParams\n ): AsyncGenerator<any, void, unknown> {\n const {\n posthogDistinctId,\n posthogTraceId,\n posthogProperties,\n posthogGroups,\n posthogCaptureImmediate,\n ...geminiParams\n } = params\n\n const traceId = posthogTraceId ?? uuidv4()\n const startTime = Date.now()\n let accumulatedContent = ''\n let usage = {\n inputTokens: 0,\n outputTokens: 0,\n }\n\n try {\n const stream = await this.client.models.generateContentStream(geminiParams)\n\n for await (const chunk of stream) {\n if (chunk.text) {\n accumulatedContent += chunk.text\n }\n if (chunk.usageMetadata) {\n usage = {\n inputTokens: chunk.usageMetadata.promptTokenCount ?? 0,\n outputTokens: chunk.usageMetadata.candidatesTokenCount ?? 0,\n }\n }\n yield chunk\n }\n\n const latency = (Date.now() - startTime) / 1000\n\n const availableTools = extractAvailableToolCalls('gemini', geminiParams)\n\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: geminiParams.model,\n provider: 'gemini',\n input: this.formatInput(geminiParams.contents),\n output: [{ content: accumulatedContent, role: 'assistant' }],\n latency,\n baseURL: 'https://generativelanguage.googleapis.com',\n params: params as any,\n httpStatus: 200,\n usage,\n tools: availableTools,\n captureImmediate: posthogCaptureImmediate,\n })\n } catch (error: any) {\n const latency = (Date.now() - startTime) / 1000\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: geminiParams.model,\n provider: 'gemini',\n input: this.formatInput(geminiParams.contents),\n output: [],\n latency,\n baseURL: 'https://generativelanguage.googleapis.com',\n params: params as any,\n httpStatus: error?.status ?? 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n isError: true,\n error: JSON.stringify(error),\n captureImmediate: posthogCaptureImmediate,\n })\n throw error\n }\n }\n\n private formatInput(contents: any): Array<{ role: string; content: string }> {\n if (typeof contents === 'string') {\n return [{ role: 'user', content: contents }]\n }\n\n if (Array.isArray(contents)) {\n return contents.map((item) => {\n if (typeof item === 'string') {\n return { role: 'user', content: item }\n }\n if (item && typeof item === 'object') {\n if (item.text) {\n return { role: item.role || 'user', content: item.text }\n }\n if (item.content) {\n return { role: item.role || 'user', content: item.content }\n }\n }\n return { role: 'user', content: String(item) }\n })\n }\n\n if (contents && typeof contents === 'object') {\n if (contents.text) {\n return [{ role: 'user', content: contents.text }]\n }\n if (contents.content) {\n return [{ role: 'user', content: contents.content }]\n }\n }\n\n return [{ role: 'user', content: String(contents) }]\n }\n}\n\nexport default PostHogGoogleGenAI\nexport { PostHogGoogleGenAI as Gemini }\n"],"names":["STRING_FORMAT","getModelParams","params","modelParams","paramKeys","key","undefined","formatResponseGemini","response","output","candidates","Array","isArray","candidate","content","parts","part","text","push","type","functionCall","function","name","arguments","args","length","role","withPrivacyMode","client","privacyMode","input","privacy_mode","extractAvailableToolCalls","provider","config","tools","sanitizeValues","obj","jsonSafe","JSON","parse","stringify","Buffer","from","toString","map","Object","fromEntries","entries","k","v","sendEventToPosthog","distinctId","traceId","model","latency","baseURL","httpStatus","usage","isError","error","captureImmediate","capture","Promise","resolve","safeInput","safeOutput","safeError","errorData","$ai_is_error","$ai_error","costOverrideData","posthogCostOverride","inputCostUSD","inputCost","inputTokens","outputCostUSD","outputCost","outputTokens","$ai_input_cost_usd","$ai_output_cost_usd","$ai_total_cost_usd","additionalTokenValues","reasoningTokens","$ai_reasoning_tokens","cacheReadInputTokens","$ai_cache_read_input_tokens","cacheCreationInputTokens","$ai_cache_creation_input_tokens","properties","$ai_provider","posthogProviderOverride","$ai_model","posthogModelOverride","$ai_model_parameters","$ai_input","posthogPrivacyMode","$ai_output_choices","$ai_http_status","$ai_input_tokens","$ai_output_tokens","$ai_latency","$ai_trace_id","$ai_base_url","posthogProperties","$process_person_profile","$ai_tools","event","groups","posthogGroups","PostHogGoogleGenAI","constructor","posthog","geminiConfig","phClient","GoogleGenAI","models","WrappedModels","generateContent","posthogDistinctId","posthogTraceId","posthogCaptureImmediate","geminiParams","uuidv4","startTime","Date","now","availableTools","formatInput","contents","usageMetadata","promptTokenCount","candidatesTokenCount","status","generateContentStream","accumulatedContent","stream","chunk","item","String"],"mappings":";;;;;;;;AAeA,MAAMA,aAAa,GAAG,MAAM;AAmBrB,MAAMC,cAAc,GACzBC,MAAiH,IACzF;EACxB,IAAI,CAACA,MAAM,EAAE;AACX,IAAA,OAAO,EAAE;AACX,EAAA;EACA,MAAMC,WAAgC,GAAG,EAAE;EAC3C,MAAMC,SAAS,GAAG,CAChB,aAAa,EACb,YAAY,EACZ,uBAAuB,EACvB,OAAO,EACP,mBAAmB,EACnB,kBAAkB,EAClB,GAAG,EACH,MAAM,EACN,QAAQ,EACR,WAAW,CACH;AAEV,EAAA,KAAK,MAAMC,GAAG,IAAID,SAAS,EAAE;IAC3B,IAAIC,GAAG,IAAIH,MAAM,IAAKA,MAAM,CAASG,GAAG,CAAC,KAAKC,SAAS,EAAE;AACvDH,MAAAA,WAAW,CAACE,GAAG,CAAC,GAAIH,MAAM,CAASG,GAAG,CAAC;AACzC,IAAA;AACF,EAAA;AACA,EAAA,OAAOF,WAAW;AACpB,CAAC;AAwIM,MAAMI,oBAAoB,GAAIC,QAAa,IAAyB;EACzE,MAAMC,MAA0B,GAAG,EAAE;AAErC,EAAA,IAAID,QAAQ,CAACE,UAAU,IAAIC,KAAK,CAACC,OAAO,CAACJ,QAAQ,CAACE,UAAU,CAAC,EAAE;AAC7D,IAAA,KAAK,MAAMG,SAAS,IAAIL,QAAQ,CAACE,UAAU,EAAE;MAC3C,IAAIG,SAAS,CAACC,OAAO,IAAID,SAAS,CAACC,OAAO,CAACC,KAAK,EAAE;QAChD,MAAMD,OAAyB,GAAG,EAAE;QAEpC,KAAK,MAAME,IAAI,IAAIH,SAAS,CAACC,OAAO,CAACC,KAAK,EAAE;UAC1C,IAAIC,IAAI,CAACC,IAAI,EAAE;YACbH,OAAO,CAACI,IAAI,CAAC;AAAEC,cAAAA,IAAI,EAAE,MAAM;cAAEF,IAAI,EAAED,IAAI,CAACC;AAAK,aAAC,CAAC;AACjD,UAAA,CAAC,MAAM,IAAID,IAAI,CAACI,YAAY,EAAE;YAC5BN,OAAO,CAACI,IAAI,CAAC;AACXC,cAAAA,IAAI,EAAE,UAAU;AAChBE,cAAAA,QAAQ,EAAE;AACRC,gBAAAA,IAAI,EAAEN,IAAI,CAACI,YAAY,CAACE,IAAI;AAC5BC,gBAAAA,SAAS,EAAEP,IAAI,CAACI,YAAY,CAACI;AAC/B;AACF,aAAC,CAAC;AACJ,UAAA;AACF,QAAA;AAEA,QAAA,IAAIV,OAAO,CAACW,MAAM,GAAG,CAAC,EAAE;UACtBhB,MAAM,CAACS,IAAI,CAAC;AACVQ,YAAAA,IAAI,EAAE,WAAW;AACjBZ,YAAAA;AACF,WAAC,CAAC;AACJ,QAAA;AACF,MAAA,CAAC,MAAM,IAAID,SAAS,CAACI,IAAI,EAAE;QACzBR,MAAM,CAACS,IAAI,CAAC;AACVQ,UAAAA,IAAI,EAAE,WAAW;AACjBZ,UAAAA,OAAO,EAAE,CAAC;AAAEK,YAAAA,IAAI,EAAE,MAAM;YAAEF,IAAI,EAAEJ,SAAS,CAACI;WAAM;AAClD,SAAC,CAAC;AACJ,MAAA;AACF,IAAA;AACF,EAAA,CAAC,MAAM,IAAIT,QAAQ,CAACS,IAAI,EAAE;IACxBR,MAAM,CAACS,IAAI,CAAC;AACVQ,MAAAA,IAAI,EAAE,WAAW;AACjBZ,MAAAA,OAAO,EAAE,CAAC;AAAEK,QAAAA,IAAI,EAAE,MAAM;QAAEF,IAAI,EAAET,QAAQ,CAACS;OAAM;AACjD,KAAC,CAAC;AACJ,EAAA;AAEA,EAAA,OAAOR,MAAM;AACf,CAAC;AAcM,MAAMkB,eAAe,GAAGA,CAACC,MAAe,EAAEC,WAAoB,EAAEC,KAAU,KAAU;EACzF,OAAQF,MAAM,CAASG,YAAY,IAAIF,WAAW,GAAG,IAAI,GAAGC,KAAK;AACnE,CAAC;;AAgBD;AACA;AACA;AACA;AACO,MAAME,yBAAyB,GAAGA,CACvCC,QAAgB,EAChB/B,MAAW,KACsD;EAO/B;IAChC,IAAIA,MAAM,CAACgC,MAAM,IAAIhC,MAAM,CAACgC,MAAM,CAACC,KAAK,EAAE;AACxC,MAAA,OAAOjC,MAAM,CAACgC,MAAM,CAACC,KAAK;AAC5B,IAAA;AAEA,IAAA,OAAO,IAAI;AACb,EAAA;AAgBF,CAAC;AAqBD,SAASC,cAAcA,CAACC,GAAQ,EAAO;AACrC,EAAA,IAAIA,GAAG,KAAK/B,SAAS,IAAI+B,GAAG,KAAK,IAAI,EAAE;AACrC,IAAA,OAAOA,GAAG;AACZ,EAAA;AACA,EAAA,MAAMC,QAAQ,GAAGC,IAAI,CAACC,KAAK,CAACD,IAAI,CAACE,SAAS,CAACJ,GAAG,CAAC,CAAC;AAChD,EAAA,IAAI,OAAOC,QAAQ,KAAK,QAAQ,EAAE;AAChC,IAAA,OAAOI,aAAM,CAACC,IAAI,CAACL,QAAQ,EAAEtC,aAAa,CAAC,CAAC4C,QAAQ,CAAC5C,aAAa,CAAC;EACrE,CAAC,MAAM,IAAIW,KAAK,CAACC,OAAO,CAAC0B,QAAQ,CAAC,EAAE;AAClC,IAAA,OAAOA,QAAQ,CAACO,GAAG,CAACT,cAAc,CAAC;EACrC,CAAC,MAAM,IAAIE,QAAQ,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;AACnD,IAAA,OAAOQ,MAAM,CAACC,WAAW,CAACD,MAAM,CAACE,OAAO,CAACV,QAAQ,CAAC,CAACO,GAAG,CAAC,CAAC,CAACI,CAAC,EAAEC,CAAC,CAAC,KAAK,CAACD,CAAC,EAAEb,cAAc,CAACc,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7F,EAAA;AACA,EAAA,OAAOZ,QAAQ;AACjB;AAEO,MAAMa,kBAAkB,GAAG,OAAO;EACvCvB,MAAM;EACNwB,UAAU;EACVC,OAAO;EACPC,KAAK;EACLrB,QAAQ;EACRH,KAAK;EACLrB,MAAM;EACN8C,OAAO;EACPC,OAAO;EACPtD,MAAM;AACNuD,EAAAA,UAAU,GAAG,GAAG;EAChBC,KAAK,GAAG,EAAE;AACVC,EAAAA,OAAO,GAAG,KAAK;EACfC,KAAK;EACLzB,KAAK;AACL0B,EAAAA,gBAAgB,GAAG;AACK,CAAC,KAAoB;AAC7C,EAAA,IAAI,CAACjC,MAAM,CAACkC,OAAO,EAAE;AACnB,IAAA,OAAOC,OAAO,CAACC,OAAO,EAAE;AAC1B,EAAA;AACA;AACA,EAAA,MAAMC,SAAS,GAAG7B,cAAc,CAACN,KAAK,CAAC;AACvC,EAAA,MAAMoC,UAAU,GAAG9B,cAAc,CAAC3B,MAAM,CAAC;AACzC,EAAA,MAAM0D,SAAS,GAAG/B,cAAc,CAACwB,KAAK,CAAC;EAEvC,IAAIQ,SAAS,GAAG,EAAE;AAClB,EAAA,IAAIT,OAAO,EAAE;AACXS,IAAAA,SAAS,GAAG;AACVC,MAAAA,YAAY,EAAE,IAAI;AAClBC,MAAAA,SAAS,EAAEH;KACZ;AACH,EAAA;EACA,IAAII,gBAAgB,GAAG,EAAE;EACzB,IAAIrE,MAAM,CAACsE,mBAAmB,EAAE;AAC9B,IAAA,MAAMC,YAAY,GAAG,CAACvE,MAAM,CAACsE,mBAAmB,CAACE,SAAS,IAAI,CAAC,KAAKhB,KAAK,CAACiB,WAAW,IAAI,CAAC,CAAC;AAC3F,IAAA,MAAMC,aAAa,GAAG,CAAC1E,MAAM,CAACsE,mBAAmB,CAACK,UAAU,IAAI,CAAC,KAAKnB,KAAK,CAACoB,YAAY,IAAI,CAAC,CAAC;AAC9FP,IAAAA,gBAAgB,GAAG;AACjBQ,MAAAA,kBAAkB,EAAEN,YAAY;AAChCO,MAAAA,mBAAmB,EAAEJ,aAAa;MAClCK,kBAAkB,EAAER,YAAY,GAAGG;KACpC;AACH,EAAA;AAEA,EAAA,MAAMM,qBAAqB,GAAG;IAC5B,IAAIxB,KAAK,CAACyB,eAAe,GAAG;MAAEC,oBAAoB,EAAE1B,KAAK,CAACyB;KAAiB,GAAG,EAAE,CAAC;IACjF,IAAIzB,KAAK,CAAC2B,oBAAoB,GAAG;MAAEC,2BAA2B,EAAE5B,KAAK,CAAC2B;KAAsB,GAAG,EAAE,CAAC;IAClG,IAAI3B,KAAK,CAAC6B,wBAAwB,GAAG;MAAEC,+BAA+B,EAAE9B,KAAK,CAAC6B;KAA0B,GAAG,EAAE;GAC9G;AAED,EAAA,MAAME,UAAU,GAAG;AACjBC,IAAAA,YAAY,EAAExF,MAAM,CAACyF,uBAAuB,IAAI1D,QAAQ;AACxD2D,IAAAA,SAAS,EAAE1F,MAAM,CAAC2F,oBAAoB,IAAIvC,KAAK;AAC/CwC,IAAAA,oBAAoB,EAAE7F,cAAc,CAACC,MAAM,CAAC;AAC5C6F,IAAAA,SAAS,EAAEpE,eAAe,CAACC,MAAM,EAAE1B,MAAM,CAAC8F,kBAAkB,IAAI,KAAK,EAAE/B,SAAS,CAAC;AACjFgC,IAAAA,kBAAkB,EAAEtE,eAAe,CAACC,MAAM,EAAE1B,MAAM,CAAC8F,kBAAkB,IAAI,KAAK,EAAE9B,UAAU,CAAC;AAC3FgC,IAAAA,eAAe,EAAEzC,UAAU;AAC3B0C,IAAAA,gBAAgB,EAAEzC,KAAK,CAACiB,WAAW,IAAI,CAAC;AACxCyB,IAAAA,iBAAiB,EAAE1C,KAAK,CAACoB,YAAY,IAAI,CAAC;AAC1C,IAAA,GAAGI,qBAAqB;AACxBmB,IAAAA,WAAW,EAAE9C,OAAO;AACpB+C,IAAAA,YAAY,EAAEjD,OAAO;AACrBkD,IAAAA,YAAY,EAAE/C,OAAO;IACrB,GAAGtD,MAAM,CAACsG,iBAAiB;AAC3B,IAAA,IAAIpD,UAAU,GAAG,EAAE,GAAG;AAAEqD,MAAAA,uBAAuB,EAAE;AAAM,KAAC,CAAC;AACzD,IAAA,IAAItE,KAAK,GAAG;AAAEuE,MAAAA,SAAS,EAAEvE;KAAO,GAAG,EAAE,CAAC;AACtC,IAAA,GAAGiC,SAAS;IACZ,GAAGG;GACJ;AAED,EAAA,MAAMoC,KAAK,GAAG;IACZvD,UAAU,EAAEA,UAAU,IAAIC,OAAO;AACjCsD,IAAAA,KAAK,EAAE,gBAAgB;IACvBlB,UAAU;IACVmB,MAAM,EAAE1G,MAAM,CAAC2G;GAChB;AAED,EAAA,IAAIhD,gBAAgB,EAAE;AACpB;AACA,IAAA,MAAMjC,MAAM,CAACiC,gBAAgB,CAAC8C,KAAK,CAAC;AACtC,EAAA,CAAC,MAAM;AACL/E,IAAAA,MAAM,CAACkC,OAAO,CAAC6C,KAAK,CAAC;AACvB,EAAA;AACF,CAAC;;ACraD;;AA4BO,MAAMG,kBAAkB,CAAC;EAK9BC,WAAWA,CAAC7E,MAA8B,EAAE;IAC1C,MAAM;MAAE8E,OAAO;MAAE,GAAGC;AAAa,KAAC,GAAG/E,MAAM;IAC3C,IAAI,CAACgF,QAAQ,GAAGF,OAAO;AACvB,IAAA,IAAI,CAACpF,MAAM,GAAG,IAAIuF,iBAAW,CAACF,YAAY,CAAC;AAC3C,IAAA,IAAI,CAACG,MAAM,GAAG,IAAIC,aAAa,CAAC,IAAI,CAACzF,MAAM,EAAE,IAAI,CAACsF,QAAQ,CAAC;AAC7D,EAAA;AACF;AAEO,MAAMG,aAAa,CAAC;AAIzBN,EAAAA,WAAWA,CAACnF,MAAmB,EAAEsF,QAAiB,EAAE;IAClD,IAAI,CAACtF,MAAM,GAAGA,MAAM;IACpB,IAAI,CAACsF,QAAQ,GAAGA,QAAQ;AAC1B,EAAA;EAEA,MAAaI,eAAeA,CAACpH,MAAiD,EAAoC;IAChH,MAAM;MACJqH,iBAAiB;MACjBC,cAAc;MACdhB,iBAAiB;MACjBK,aAAa;MACbY,uBAAuB;MACvB,GAAGC;AACL,KAAC,GAAGxH,MAAM;AAEV,IAAA,MAAMmD,OAAO,GAAGmE,cAAc,IAAIG,OAAM,EAAE;AAC1C,IAAA,MAAMC,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE;IAE5B,IAAI;AACF,MAAA,MAAMtH,QAAQ,GAAG,MAAM,IAAI,CAACoB,MAAM,CAACwF,MAAM,CAACE,eAAe,CAACI,YAAY,CAAC;MACvE,MAAMnE,OAAO,GAAG,CAACsE,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI;AAE/C,MAAA,MAAMG,cAAc,GAAG/F,yBAAyB,CAAC,QAAQ,EAAE0F,YAAY,CAAC;AAExE,MAAA,MAAMvE,kBAAkB,CAAC;QACvBvB,MAAM,EAAE,IAAI,CAACsF,QAAQ;AACrB9D,QAAAA,UAAU,EAAEmE,iBAAiB;QAC7BlE,OAAO;QACPC,KAAK,EAAEoE,YAAY,CAACpE,KAAK;AACzBrB,QAAAA,QAAQ,EAAE,QAAQ;QAClBH,KAAK,EAAE,IAAI,CAACkG,WAAW,CAACN,YAAY,CAACO,QAAQ,CAAC;AAC9CxH,QAAAA,MAAM,EAAEF,oBAAoB,CAACC,QAAQ,CAAC;QACtC+C,OAAO;AACPC,QAAAA,OAAO,EAAE,2CAA2C;AACpDtD,QAAAA,MAAM,EAAEA,MAAa;AACrBuD,QAAAA,UAAU,EAAE,GAAG;AACfC,QAAAA,KAAK,EAAE;AACLiB,UAAAA,WAAW,EAAEnE,QAAQ,CAAC0H,aAAa,EAAEC,gBAAgB,IAAI,CAAC;AAC1DrD,UAAAA,YAAY,EAAEtE,QAAQ,CAAC0H,aAAa,EAAEE,oBAAoB,IAAI;SAC/D;AACDjG,QAAAA,KAAK,EAAE4F,cAAc;AACrBlE,QAAAA,gBAAgB,EAAE4D;AACpB,OAAC,CAAC;AAEF,MAAA,OAAOjH,QAAQ;IACjB,CAAC,CAAC,OAAOoD,KAAU,EAAE;MACnB,MAAML,OAAO,GAAG,CAACsE,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI;AAC/C,MAAA,MAAMzE,kBAAkB,CAAC;QACvBvB,MAAM,EAAE,IAAI,CAACsF,QAAQ;AACrB9D,QAAAA,UAAU,EAAEmE,iBAAiB;QAC7BlE,OAAO;QACPC,KAAK,EAAEoE,YAAY,CAACpE,KAAK;AACzBrB,QAAAA,QAAQ,EAAE,QAAQ;QAClBH,KAAK,EAAE,IAAI,CAACkG,WAAW,CAACN,YAAY,CAACO,QAAQ,CAAC;AAC9CxH,QAAAA,MAAM,EAAE,EAAE;QACV8C,OAAO;AACPC,QAAAA,OAAO,EAAE,2CAA2C;AACpDtD,QAAAA,MAAM,EAAEA,MAAa;AACrBuD,QAAAA,UAAU,EAAEG,KAAK,EAAEyE,MAAM,IAAI,GAAG;AAChC3E,QAAAA,KAAK,EAAE;AACLiB,UAAAA,WAAW,EAAE,CAAC;AACdG,UAAAA,YAAY,EAAE;SACf;AACDnB,QAAAA,OAAO,EAAE,IAAI;AACbC,QAAAA,KAAK,EAAErB,IAAI,CAACE,SAAS,CAACmB,KAAK,CAAC;AAC5BC,QAAAA,gBAAgB,EAAE4D;AACpB,OAAC,CAAC;AACF,MAAA,MAAM7D,KAAK;AACb,IAAA;AACF,EAAA;EAEA,OAAc0E,qBAAqBA,CACjCpI,MAAiD,EACb;IACpC,MAAM;MACJqH,iBAAiB;MACjBC,cAAc;MACdhB,iBAAiB;MACjBK,aAAa;MACbY,uBAAuB;MACvB,GAAGC;AACL,KAAC,GAAGxH,MAAM;AAEV,IAAA,MAAMmD,OAAO,GAAGmE,cAAc,IAAIG,OAAM,EAAE;AAC1C,IAAA,MAAMC,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE;IAC5B,IAAIS,kBAAkB,GAAG,EAAE;AAC3B,IAAA,IAAI7E,KAAK,GAAG;AACViB,MAAAA,WAAW,EAAE,CAAC;AACdG,MAAAA,YAAY,EAAE;KACf;IAED,IAAI;AACF,MAAA,MAAM0D,MAAM,GAAG,MAAM,IAAI,CAAC5G,MAAM,CAACwF,MAAM,CAACkB,qBAAqB,CAACZ,YAAY,CAAC;AAE3E,MAAA,WAAW,MAAMe,KAAK,IAAID,MAAM,EAAE;QAChC,IAAIC,KAAK,CAACxH,IAAI,EAAE;UACdsH,kBAAkB,IAAIE,KAAK,CAACxH,IAAI;AAClC,QAAA;QACA,IAAIwH,KAAK,CAACP,aAAa,EAAE;AACvBxE,UAAAA,KAAK,GAAG;AACNiB,YAAAA,WAAW,EAAE8D,KAAK,CAACP,aAAa,CAACC,gBAAgB,IAAI,CAAC;AACtDrD,YAAAA,YAAY,EAAE2D,KAAK,CAACP,aAAa,CAACE,oBAAoB,IAAI;WAC3D;AACH,QAAA;AACA,QAAA,MAAMK,KAAK;AACb,MAAA;MAEA,MAAMlF,OAAO,GAAG,CAACsE,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI;AAE/C,MAAA,MAAMG,cAAc,GAAG/F,yBAAyB,CAAC,QAAQ,EAAE0F,YAAY,CAAC;AAExE,MAAA,MAAMvE,kBAAkB,CAAC;QACvBvB,MAAM,EAAE,IAAI,CAACsF,QAAQ;AACrB9D,QAAAA,UAAU,EAAEmE,iBAAiB;QAC7BlE,OAAO;QACPC,KAAK,EAAEoE,YAAY,CAACpE,KAAK;AACzBrB,QAAAA,QAAQ,EAAE,QAAQ;QAClBH,KAAK,EAAE,IAAI,CAACkG,WAAW,CAACN,YAAY,CAACO,QAAQ,CAAC;AAC9CxH,QAAAA,MAAM,EAAE,CAAC;AAAEK,UAAAA,OAAO,EAAEyH,kBAAkB;AAAE7G,UAAAA,IAAI,EAAE;AAAY,SAAC,CAAC;QAC5D6B,OAAO;AACPC,QAAAA,OAAO,EAAE,2CAA2C;AACpDtD,QAAAA,MAAM,EAAEA,MAAa;AACrBuD,QAAAA,UAAU,EAAE,GAAG;QACfC,KAAK;AACLvB,QAAAA,KAAK,EAAE4F,cAAc;AACrBlE,QAAAA,gBAAgB,EAAE4D;AACpB,OAAC,CAAC;IACJ,CAAC,CAAC,OAAO7D,KAAU,EAAE;MACnB,MAAML,OAAO,GAAG,CAACsE,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI;AAC/C,MAAA,MAAMzE,kBAAkB,CAAC;QACvBvB,MAAM,EAAE,IAAI,CAACsF,QAAQ;AACrB9D,QAAAA,UAAU,EAAEmE,iBAAiB;QAC7BlE,OAAO;QACPC,KAAK,EAAEoE,YAAY,CAACpE,KAAK;AACzBrB,QAAAA,QAAQ,EAAE,QAAQ;QAClBH,KAAK,EAAE,IAAI,CAACkG,WAAW,CAACN,YAAY,CAACO,QAAQ,CAAC;AAC9CxH,QAAAA,MAAM,EAAE,EAAE;QACV8C,OAAO;AACPC,QAAAA,OAAO,EAAE,2CAA2C;AACpDtD,QAAAA,MAAM,EAAEA,MAAa;AACrBuD,QAAAA,UAAU,EAAEG,KAAK,EAAEyE,MAAM,IAAI,GAAG;AAChC3E,QAAAA,KAAK,EAAE;AACLiB,UAAAA,WAAW,EAAE,CAAC;AACdG,UAAAA,YAAY,EAAE;SACf;AACDnB,QAAAA,OAAO,EAAE,IAAI;AACbC,QAAAA,KAAK,EAAErB,IAAI,CAACE,SAAS,CAACmB,KAAK,CAAC;AAC5BC,QAAAA,gBAAgB,EAAE4D;AACpB,OAAC,CAAC;AACF,MAAA,MAAM7D,KAAK;AACb,IAAA;AACF,EAAA;EAEQoE,WAAWA,CAACC,QAAa,EAA4C;AAC3E,IAAA,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;AAChC,MAAA,OAAO,CAAC;AAAEvG,QAAAA,IAAI,EAAE,MAAM;AAAEZ,QAAAA,OAAO,EAAEmH;AAAS,OAAC,CAAC;AAC9C,IAAA;AAEA,IAAA,IAAItH,KAAK,CAACC,OAAO,CAACqH,QAAQ,CAAC,EAAE;AAC3B,MAAA,OAAOA,QAAQ,CAACpF,GAAG,CAAE6F,IAAI,IAAK;AAC5B,QAAA,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;UAC5B,OAAO;AAAEhH,YAAAA,IAAI,EAAE,MAAM;AAAEZ,YAAAA,OAAO,EAAE4H;WAAM;AACxC,QAAA;AACA,QAAA,IAAIA,IAAI,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;UACpC,IAAIA,IAAI,CAACzH,IAAI,EAAE;YACb,OAAO;AAAES,cAAAA,IAAI,EAAEgH,IAAI,CAAChH,IAAI,IAAI,MAAM;cAAEZ,OAAO,EAAE4H,IAAI,CAACzH;aAAM;AAC1D,UAAA;UACA,IAAIyH,IAAI,CAAC5H,OAAO,EAAE;YAChB,OAAO;AAAEY,cAAAA,IAAI,EAAEgH,IAAI,CAAChH,IAAI,IAAI,MAAM;cAAEZ,OAAO,EAAE4H,IAAI,CAAC5H;aAAS;AAC7D,UAAA;AACF,QAAA;QACA,OAAO;AAAEY,UAAAA,IAAI,EAAE,MAAM;UAAEZ,OAAO,EAAE6H,MAAM,CAACD,IAAI;SAAG;AAChD,MAAA,CAAC,CAAC;AACJ,IAAA;AAEA,IAAA,IAAIT,QAAQ,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;MAC5C,IAAIA,QAAQ,CAAChH,IAAI,EAAE;AACjB,QAAA,OAAO,CAAC;AAAES,UAAAA,IAAI,EAAE,MAAM;UAAEZ,OAAO,EAAEmH,QAAQ,CAAChH;AAAK,SAAC,CAAC;AACnD,MAAA;MACA,IAAIgH,QAAQ,CAACnH,OAAO,EAAE;AACpB,QAAA,OAAO,CAAC;AAAEY,UAAAA,IAAI,EAAE,MAAM;UAAEZ,OAAO,EAAEmH,QAAQ,CAACnH;AAAQ,SAAC,CAAC;AACtD,MAAA;AACF,IAAA;AAEA,IAAA,OAAO,CAAC;AAAEY,MAAAA,IAAI,EAAE,MAAM;MAAEZ,OAAO,EAAE6H,MAAM,CAACV,QAAQ;AAAE,KAAC,CAAC;AACtD,EAAA;AACF;;;;;;;"}
1
+ {"version":3,"file":"index.cjs","sources":["../../src/utils.ts","../../src/gemini/index.ts"],"sourcesContent":["import { PostHog } from 'posthog-node'\nimport { Buffer } from 'buffer'\nimport OpenAIOrignal from 'openai'\nimport AnthropicOriginal from '@anthropic-ai/sdk'\nimport type { ChatCompletionTool } from 'openai/resources/chat/completions'\nimport type { Tool as GeminiTool } from '@google/genai'\nimport type { FormattedMessage, FormattedContent, TokenUsage } from './types'\n\ntype ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams\ntype MessageCreateParams = AnthropicOriginal.Messages.MessageCreateParams\ntype ResponseCreateParams = OpenAIOrignal.Responses.ResponseCreateParams\ntype AnthropicTool = AnthropicOriginal.Tool\n\n// limit large outputs by truncating to 200kb (approx 200k bytes)\nexport const MAX_OUTPUT_SIZE = 200000\nconst STRING_FORMAT = 'utf8'\n\nexport interface MonitoringParams {\n posthogDistinctId?: string\n posthogTraceId?: string\n posthogProperties?: Record<string, any>\n posthogPrivacyMode?: boolean\n posthogGroups?: Record<string, any>\n posthogModelOverride?: string\n posthogProviderOverride?: string\n posthogCostOverride?: CostOverride\n posthogCaptureImmediate?: boolean\n}\n\nexport interface CostOverride {\n inputCost: number\n outputCost: number\n}\n\nexport const getModelParams = (\n params: ((ChatCompletionCreateParamsBase | MessageCreateParams | ResponseCreateParams) & MonitoringParams) | null\n): Record<string, any> => {\n if (!params) {\n return {}\n }\n const modelParams: Record<string, any> = {}\n const paramKeys = [\n 'temperature',\n 'max_tokens',\n 'max_completion_tokens',\n 'top_p',\n 'frequency_penalty',\n 'presence_penalty',\n 'n',\n 'stop',\n 'stream',\n 'streaming',\n ] as const\n\n for (const key of paramKeys) {\n if (key in params && (params as any)[key] !== undefined) {\n modelParams[key] = (params as any)[key]\n }\n }\n return modelParams\n}\n\n/**\n * Helper to format responses (non-streaming) for consumption, mirroring Python's openai vs. anthropic approach.\n */\nexport const formatResponse = (response: any, provider: string): FormattedMessage[] => {\n if (!response) {\n return []\n }\n if (provider === 'anthropic') {\n return formatResponseAnthropic(response)\n } else if (provider === 'openai') {\n return formatResponseOpenAI(response)\n } else if (provider === 'gemini') {\n return formatResponseGemini(response)\n }\n return []\n}\n\nexport const formatResponseAnthropic = (response: any): FormattedMessage[] => {\n const output: FormattedMessage[] = []\n const content: FormattedContent = []\n\n for (const choice of response.content ?? []) {\n if (choice?.type === 'text' && choice?.text) {\n content.push({ type: 'text', text: choice.text })\n } else if (choice?.type === 'tool_use' && choice?.name && choice?.id) {\n content.push({\n type: 'function',\n id: choice.id,\n function: {\n name: choice.name,\n arguments: choice.input || {},\n },\n })\n }\n }\n\n if (content.length > 0) {\n output.push({\n role: 'assistant',\n content,\n })\n }\n\n return output\n}\n\nexport const formatResponseOpenAI = (response: any): FormattedMessage[] => {\n const output: FormattedMessage[] = []\n\n if (response.choices) {\n for (const choice of response.choices) {\n const content: FormattedContent = []\n let role = 'assistant'\n\n if (choice.message) {\n if (choice.message.role) {\n role = choice.message.role\n }\n\n if (choice.message.content) {\n content.push({ type: 'text', text: choice.message.content })\n }\n\n if (choice.message.tool_calls) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'function',\n id: toolCall.id,\n function: {\n name: toolCall.function.name,\n arguments: toolCall.function.arguments,\n },\n })\n }\n }\n }\n\n if (content.length > 0) {\n output.push({\n role,\n content,\n })\n }\n }\n }\n\n // Handle Responses API format\n if (response.output) {\n const content: FormattedContent = []\n let role = 'assistant'\n\n for (const item of response.output) {\n if (item.type === 'message') {\n role = item.role\n\n if (item.content && Array.isArray(item.content)) {\n for (const contentItem of item.content) {\n if (contentItem.type === 'output_text' && contentItem.text) {\n content.push({ type: 'text', text: contentItem.text })\n } else if (contentItem.text) {\n content.push({ type: 'text', text: contentItem.text })\n } else if (contentItem.type === 'input_image' && contentItem.image_url) {\n content.push({\n type: 'image',\n image: contentItem.image_url,\n })\n }\n }\n } else if (item.content) {\n content.push({ type: 'text', text: String(item.content) })\n }\n } else if (item.type === 'function_call') {\n content.push({\n type: 'function',\n id: item.call_id || item.id || '',\n function: {\n name: item.name,\n arguments: item.arguments || {},\n },\n })\n }\n }\n\n if (content.length > 0) {\n output.push({\n role,\n content,\n })\n }\n }\n\n return output\n}\n\nexport const formatResponseGemini = (response: any): FormattedMessage[] => {\n const output: FormattedMessage[] = []\n\n if (response.candidates && Array.isArray(response.candidates)) {\n for (const candidate of response.candidates) {\n if (candidate.content && candidate.content.parts) {\n const content: FormattedContent = []\n\n for (const part of candidate.content.parts) {\n if (part.text) {\n content.push({ type: 'text', text: part.text })\n } else if (part.functionCall) {\n content.push({\n type: 'function',\n function: {\n name: part.functionCall.name,\n arguments: part.functionCall.args,\n },\n })\n }\n }\n\n if (content.length > 0) {\n output.push({\n role: 'assistant',\n content,\n })\n }\n } else if (candidate.text) {\n output.push({\n role: 'assistant',\n content: [{ type: 'text', text: candidate.text }],\n })\n }\n }\n } else if (response.text) {\n output.push({\n role: 'assistant',\n content: [{ type: 'text', text: response.text }],\n })\n }\n\n return output\n}\n\nexport const mergeSystemPrompt = (params: MessageCreateParams & MonitoringParams, provider: string): any => {\n if (provider == 'anthropic') {\n const messages = params.messages || []\n if (!(params as any).system) {\n return messages\n }\n const systemMessage = (params as any).system\n return [{ role: 'system', content: systemMessage }, ...messages]\n }\n return params.messages\n}\n\nexport const withPrivacyMode = (client: PostHog, privacyMode: boolean, input: any): any => {\n return (client as any).privacy_mode || privacyMode ? null : input\n}\n\nexport const truncate = (str: string): string => {\n try {\n const buffer = Buffer.from(str, STRING_FORMAT)\n if (buffer.length <= MAX_OUTPUT_SIZE) {\n return str\n }\n const truncatedBuffer = buffer.slice(0, MAX_OUTPUT_SIZE)\n return `${truncatedBuffer.toString(STRING_FORMAT)}... [truncated]`\n } catch (error) {\n console.error('Error truncating, likely not a string')\n return str\n }\n}\n\n/**\n * Extract available tool calls from the request parameters.\n * These are the tools provided to the LLM, not the tool calls in the response.\n */\nexport const extractAvailableToolCalls = (\n provider: string,\n params: any\n): ChatCompletionTool[] | AnthropicTool[] | GeminiTool[] | null => {\n if (provider === 'anthropic') {\n if (params.tools) {\n return params.tools\n }\n\n return null\n } else if (provider === 'gemini') {\n if (params.config && params.config.tools) {\n return params.config.tools\n }\n\n return null\n } else if (provider === 'openai') {\n if (params.tools) {\n return params.tools\n }\n\n return null\n } else if (provider === 'vercel') {\n // Vercel AI SDK stores tools in params.mode.tools when mode type is 'regular'\n if (params.mode?.type === 'regular' && params.mode.tools) {\n return params.mode.tools\n }\n\n return null\n }\n\n return null\n}\n\nexport type SendEventToPosthogParams = {\n client: PostHog\n distinctId?: string\n traceId: string\n model: string\n provider: string\n input: any\n output: any\n latency: number\n baseURL: string\n httpStatus: number\n usage?: TokenUsage\n params: (ChatCompletionCreateParamsBase | MessageCreateParams | ResponseCreateParams) & MonitoringParams\n isError?: boolean\n error?: string\n tools?: ChatCompletionTool[] | AnthropicTool[] | GeminiTool[] | null\n captureImmediate?: boolean\n}\n\nfunction sanitizeValues(obj: any): any {\n if (obj === undefined || obj === null) {\n return obj\n }\n const jsonSafe = JSON.parse(JSON.stringify(obj))\n if (typeof jsonSafe === 'string') {\n return Buffer.from(jsonSafe, STRING_FORMAT).toString(STRING_FORMAT)\n } else if (Array.isArray(jsonSafe)) {\n return jsonSafe.map(sanitizeValues)\n } else if (jsonSafe && typeof jsonSafe === 'object') {\n return Object.fromEntries(Object.entries(jsonSafe).map(([k, v]) => [k, sanitizeValues(v)]))\n }\n return jsonSafe\n}\n\nexport const sendEventToPosthog = async ({\n client,\n distinctId,\n traceId,\n model,\n provider,\n input,\n output,\n latency,\n baseURL,\n params,\n httpStatus = 200,\n usage = {},\n isError = false,\n error,\n tools,\n captureImmediate = false,\n}: SendEventToPosthogParams): Promise<void> => {\n if (!client.capture) {\n return Promise.resolve()\n }\n // sanitize input and output for UTF-8 validity\n const safeInput = sanitizeValues(input)\n const safeOutput = sanitizeValues(output)\n const safeError = sanitizeValues(error)\n\n let errorData = {}\n if (isError) {\n errorData = {\n $ai_is_error: true,\n $ai_error: safeError,\n }\n }\n let costOverrideData = {}\n if (params.posthogCostOverride) {\n const inputCostUSD = (params.posthogCostOverride.inputCost ?? 0) * (usage.inputTokens ?? 0)\n const outputCostUSD = (params.posthogCostOverride.outputCost ?? 0) * (usage.outputTokens ?? 0)\n costOverrideData = {\n $ai_input_cost_usd: inputCostUSD,\n $ai_output_cost_usd: outputCostUSD,\n $ai_total_cost_usd: inputCostUSD + outputCostUSD,\n }\n }\n\n const additionalTokenValues = {\n ...(usage.reasoningTokens ? { $ai_reasoning_tokens: usage.reasoningTokens } : {}),\n ...(usage.cacheReadInputTokens ? { $ai_cache_read_input_tokens: usage.cacheReadInputTokens } : {}),\n ...(usage.cacheCreationInputTokens ? { $ai_cache_creation_input_tokens: usage.cacheCreationInputTokens } : {}),\n }\n\n const properties = {\n $ai_provider: params.posthogProviderOverride ?? provider,\n $ai_model: params.posthogModelOverride ?? model,\n $ai_model_parameters: getModelParams(params),\n $ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeInput),\n $ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeOutput),\n $ai_http_status: httpStatus,\n $ai_input_tokens: usage.inputTokens ?? 0,\n $ai_output_tokens: usage.outputTokens ?? 0,\n ...additionalTokenValues,\n $ai_latency: latency,\n $ai_trace_id: traceId,\n $ai_base_url: baseURL,\n ...params.posthogProperties,\n ...(distinctId ? {} : { $process_person_profile: false }),\n ...(tools ? { $ai_tools: tools } : {}),\n ...errorData,\n ...costOverrideData,\n }\n\n const event = {\n distinctId: distinctId ?? traceId,\n event: '$ai_generation',\n properties,\n groups: params.posthogGroups,\n }\n\n if (captureImmediate) {\n // await capture promise to send single event in serverless environments\n await client.captureImmediate(event)\n } else {\n client.capture(event)\n }\n}\n","import { GoogleGenAI } from '@google/genai'\nimport { PostHog } from 'posthog-node'\nimport { v4 as uuidv4 } from 'uuid'\nimport { MonitoringParams, sendEventToPosthog, extractAvailableToolCalls, formatResponseGemini } from '../utils'\nimport type { TokenUsage } from '../types'\n\n// Types from @google/genai\ntype GenerateContentRequest = {\n model: string\n contents: any\n config?: any\n [key: string]: any\n}\n\ntype GenerateContentResponse = {\n text?: string\n candidates?: any[]\n usageMetadata?: {\n promptTokenCount?: number\n candidatesTokenCount?: number\n totalTokenCount?: number\n thoughtsTokenCount?: number\n cachedContentTokenCount?: number\n }\n [key: string]: any\n}\n\ninterface MonitoringGeminiConfig {\n apiKey?: string\n vertexai?: boolean\n project?: string\n location?: string\n apiVersion?: string\n posthog: PostHog\n}\n\nexport class PostHogGoogleGenAI {\n private readonly phClient: PostHog\n private readonly client: GoogleGenAI\n public models: WrappedModels\n\n constructor(config: MonitoringGeminiConfig) {\n const { posthog, ...geminiConfig } = config\n this.phClient = posthog\n this.client = new GoogleGenAI(geminiConfig)\n this.models = new WrappedModels(this.client, this.phClient)\n }\n}\n\nexport class WrappedModels {\n private readonly phClient: PostHog\n private readonly client: GoogleGenAI\n\n constructor(client: GoogleGenAI, phClient: PostHog) {\n this.client = client\n this.phClient = phClient\n }\n\n public async generateContent(params: GenerateContentRequest & MonitoringParams): Promise<GenerateContentResponse> {\n const {\n posthogDistinctId,\n posthogTraceId,\n posthogProperties,\n posthogGroups,\n posthogCaptureImmediate,\n ...geminiParams\n } = params\n\n const traceId = posthogTraceId ?? uuidv4()\n const startTime = Date.now()\n\n try {\n const response = await this.client.models.generateContent(geminiParams)\n const latency = (Date.now() - startTime) / 1000\n\n const availableTools = extractAvailableToolCalls('gemini', geminiParams)\n\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: geminiParams.model,\n provider: 'gemini',\n input: this.formatInput(geminiParams.contents),\n output: formatResponseGemini(response),\n latency,\n baseURL: 'https://generativelanguage.googleapis.com',\n params: params as any,\n httpStatus: 200,\n usage: {\n inputTokens: response.usageMetadata?.promptTokenCount ?? 0,\n outputTokens: response.usageMetadata?.candidatesTokenCount ?? 0,\n reasoningTokens: response.usageMetadata?.thoughtsTokenCount ?? 0,\n cacheReadInputTokens: response.usageMetadata?.cachedContentTokenCount ?? 0,\n },\n tools: availableTools,\n captureImmediate: posthogCaptureImmediate,\n })\n\n return response\n } catch (error: any) {\n const latency = (Date.now() - startTime) / 1000\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: geminiParams.model,\n provider: 'gemini',\n input: this.formatInput(geminiParams.contents),\n output: [],\n latency,\n baseURL: 'https://generativelanguage.googleapis.com',\n params: params as any,\n httpStatus: error?.status ?? 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n isError: true,\n error: JSON.stringify(error),\n captureImmediate: posthogCaptureImmediate,\n })\n throw error\n }\n }\n\n public async *generateContentStream(\n params: GenerateContentRequest & MonitoringParams\n ): AsyncGenerator<any, void, unknown> {\n const {\n posthogDistinctId,\n posthogTraceId,\n posthogProperties,\n posthogGroups,\n posthogCaptureImmediate,\n ...geminiParams\n } = params\n\n const traceId = posthogTraceId ?? uuidv4()\n const startTime = Date.now()\n let accumulatedContent = ''\n let usage: TokenUsage = {\n inputTokens: 0,\n outputTokens: 0,\n }\n\n try {\n const stream = await this.client.models.generateContentStream(geminiParams)\n\n for await (const chunk of stream) {\n if (chunk.text) {\n accumulatedContent += chunk.text\n }\n if (chunk.usageMetadata) {\n usage = {\n inputTokens: chunk.usageMetadata.promptTokenCount ?? 0,\n outputTokens: chunk.usageMetadata.candidatesTokenCount ?? 0,\n reasoningTokens: chunk.usageMetadata.thoughtsTokenCount ?? 0,\n cacheReadInputTokens: chunk.usageMetadata.cachedContentTokenCount ?? 0,\n }\n }\n yield chunk\n }\n\n const latency = (Date.now() - startTime) / 1000\n\n const availableTools = extractAvailableToolCalls('gemini', geminiParams)\n\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: geminiParams.model,\n provider: 'gemini',\n input: this.formatInput(geminiParams.contents),\n output: [{ content: accumulatedContent, role: 'assistant' }],\n latency,\n baseURL: 'https://generativelanguage.googleapis.com',\n params: params as any,\n httpStatus: 200,\n usage,\n tools: availableTools,\n captureImmediate: posthogCaptureImmediate,\n })\n } catch (error: any) {\n const latency = (Date.now() - startTime) / 1000\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: geminiParams.model,\n provider: 'gemini',\n input: this.formatInput(geminiParams.contents),\n output: [],\n latency,\n baseURL: 'https://generativelanguage.googleapis.com',\n params: params as any,\n httpStatus: error?.status ?? 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n isError: true,\n error: JSON.stringify(error),\n captureImmediate: posthogCaptureImmediate,\n })\n throw error\n }\n }\n\n private formatInput(contents: any): Array<{ role: string; content: string }> {\n if (typeof contents === 'string') {\n return [{ role: 'user', content: contents }]\n }\n\n if (Array.isArray(contents)) {\n return contents.map((item) => {\n if (typeof item === 'string') {\n return { role: 'user', content: item }\n }\n if (item && typeof item === 'object') {\n if (item.text) {\n return { role: item.role || 'user', content: item.text }\n }\n if (item.content) {\n return { role: item.role || 'user', content: item.content }\n }\n }\n return { role: 'user', content: String(item) }\n })\n }\n\n if (contents && typeof contents === 'object') {\n if (contents.text) {\n return [{ role: 'user', content: contents.text }]\n }\n if (contents.content) {\n return [{ role: 'user', content: contents.content }]\n }\n }\n\n return [{ role: 'user', content: String(contents) }]\n }\n}\n\nexport default PostHogGoogleGenAI\nexport { PostHogGoogleGenAI as Gemini }\n"],"names":["STRING_FORMAT","getModelParams","params","modelParams","paramKeys","key","undefined","formatResponseGemini","response","output","candidates","Array","isArray","candidate","content","parts","part","text","push","type","functionCall","function","name","arguments","args","length","role","withPrivacyMode","client","privacyMode","input","privacy_mode","extractAvailableToolCalls","provider","config","tools","sanitizeValues","obj","jsonSafe","JSON","parse","stringify","Buffer","from","toString","map","Object","fromEntries","entries","k","v","sendEventToPosthog","distinctId","traceId","model","latency","baseURL","httpStatus","usage","isError","error","captureImmediate","capture","Promise","resolve","safeInput","safeOutput","safeError","errorData","$ai_is_error","$ai_error","costOverrideData","posthogCostOverride","inputCostUSD","inputCost","inputTokens","outputCostUSD","outputCost","outputTokens","$ai_input_cost_usd","$ai_output_cost_usd","$ai_total_cost_usd","additionalTokenValues","reasoningTokens","$ai_reasoning_tokens","cacheReadInputTokens","$ai_cache_read_input_tokens","cacheCreationInputTokens","$ai_cache_creation_input_tokens","properties","$ai_provider","posthogProviderOverride","$ai_model","posthogModelOverride","$ai_model_parameters","$ai_input","posthogPrivacyMode","$ai_output_choices","$ai_http_status","$ai_input_tokens","$ai_output_tokens","$ai_latency","$ai_trace_id","$ai_base_url","posthogProperties","$process_person_profile","$ai_tools","event","groups","posthogGroups","PostHogGoogleGenAI","constructor","posthog","geminiConfig","phClient","GoogleGenAI","models","WrappedModels","generateContent","posthogDistinctId","posthogTraceId","posthogCaptureImmediate","geminiParams","uuidv4","startTime","Date","now","availableTools","formatInput","contents","usageMetadata","promptTokenCount","candidatesTokenCount","thoughtsTokenCount","cachedContentTokenCount","status","generateContentStream","accumulatedContent","stream","chunk","item","String"],"mappings":";;;;;;;;AAeA,MAAMA,aAAa,GAAG,MAAM;AAmBrB,MAAMC,cAAc,GACzBC,MAAiH,IACzF;EACxB,IAAI,CAACA,MAAM,EAAE;AACX,IAAA,OAAO,EAAE;AACX,EAAA;EACA,MAAMC,WAAgC,GAAG,EAAE;EAC3C,MAAMC,SAAS,GAAG,CAChB,aAAa,EACb,YAAY,EACZ,uBAAuB,EACvB,OAAO,EACP,mBAAmB,EACnB,kBAAkB,EAClB,GAAG,EACH,MAAM,EACN,QAAQ,EACR,WAAW,CACH;AAEV,EAAA,KAAK,MAAMC,GAAG,IAAID,SAAS,EAAE;IAC3B,IAAIC,GAAG,IAAIH,MAAM,IAAKA,MAAM,CAASG,GAAG,CAAC,KAAKC,SAAS,EAAE;AACvDH,MAAAA,WAAW,CAACE,GAAG,CAAC,GAAIH,MAAM,CAASG,GAAG,CAAC;AACzC,IAAA;AACF,EAAA;AACA,EAAA,OAAOF,WAAW;AACpB,CAAC;AAwIM,MAAMI,oBAAoB,GAAIC,QAAa,IAAyB;EACzE,MAAMC,MAA0B,GAAG,EAAE;AAErC,EAAA,IAAID,QAAQ,CAACE,UAAU,IAAIC,KAAK,CAACC,OAAO,CAACJ,QAAQ,CAACE,UAAU,CAAC,EAAE;AAC7D,IAAA,KAAK,MAAMG,SAAS,IAAIL,QAAQ,CAACE,UAAU,EAAE;MAC3C,IAAIG,SAAS,CAACC,OAAO,IAAID,SAAS,CAACC,OAAO,CAACC,KAAK,EAAE;QAChD,MAAMD,OAAyB,GAAG,EAAE;QAEpC,KAAK,MAAME,IAAI,IAAIH,SAAS,CAACC,OAAO,CAACC,KAAK,EAAE;UAC1C,IAAIC,IAAI,CAACC,IAAI,EAAE;YACbH,OAAO,CAACI,IAAI,CAAC;AAAEC,cAAAA,IAAI,EAAE,MAAM;cAAEF,IAAI,EAAED,IAAI,CAACC;AAAK,aAAC,CAAC;AACjD,UAAA,CAAC,MAAM,IAAID,IAAI,CAACI,YAAY,EAAE;YAC5BN,OAAO,CAACI,IAAI,CAAC;AACXC,cAAAA,IAAI,EAAE,UAAU;AAChBE,cAAAA,QAAQ,EAAE;AACRC,gBAAAA,IAAI,EAAEN,IAAI,CAACI,YAAY,CAACE,IAAI;AAC5BC,gBAAAA,SAAS,EAAEP,IAAI,CAACI,YAAY,CAACI;AAC/B;AACF,aAAC,CAAC;AACJ,UAAA;AACF,QAAA;AAEA,QAAA,IAAIV,OAAO,CAACW,MAAM,GAAG,CAAC,EAAE;UACtBhB,MAAM,CAACS,IAAI,CAAC;AACVQ,YAAAA,IAAI,EAAE,WAAW;AACjBZ,YAAAA;AACF,WAAC,CAAC;AACJ,QAAA;AACF,MAAA,CAAC,MAAM,IAAID,SAAS,CAACI,IAAI,EAAE;QACzBR,MAAM,CAACS,IAAI,CAAC;AACVQ,UAAAA,IAAI,EAAE,WAAW;AACjBZ,UAAAA,OAAO,EAAE,CAAC;AAAEK,YAAAA,IAAI,EAAE,MAAM;YAAEF,IAAI,EAAEJ,SAAS,CAACI;WAAM;AAClD,SAAC,CAAC;AACJ,MAAA;AACF,IAAA;AACF,EAAA,CAAC,MAAM,IAAIT,QAAQ,CAACS,IAAI,EAAE;IACxBR,MAAM,CAACS,IAAI,CAAC;AACVQ,MAAAA,IAAI,EAAE,WAAW;AACjBZ,MAAAA,OAAO,EAAE,CAAC;AAAEK,QAAAA,IAAI,EAAE,MAAM;QAAEF,IAAI,EAAET,QAAQ,CAACS;OAAM;AACjD,KAAC,CAAC;AACJ,EAAA;AAEA,EAAA,OAAOR,MAAM;AACf,CAAC;AAcM,MAAMkB,eAAe,GAAGA,CAACC,MAAe,EAAEC,WAAoB,EAAEC,KAAU,KAAU;EACzF,OAAQF,MAAM,CAASG,YAAY,IAAIF,WAAW,GAAG,IAAI,GAAGC,KAAK;AACnE,CAAC;;AAgBD;AACA;AACA;AACA;AACO,MAAME,yBAAyB,GAAGA,CACvCC,QAAgB,EAChB/B,MAAW,KACsD;EAO/B;IAChC,IAAIA,MAAM,CAACgC,MAAM,IAAIhC,MAAM,CAACgC,MAAM,CAACC,KAAK,EAAE;AACxC,MAAA,OAAOjC,MAAM,CAACgC,MAAM,CAACC,KAAK;AAC5B,IAAA;AAEA,IAAA,OAAO,IAAI;AACb,EAAA;AAgBF,CAAC;AAqBD,SAASC,cAAcA,CAACC,GAAQ,EAAO;AACrC,EAAA,IAAIA,GAAG,KAAK/B,SAAS,IAAI+B,GAAG,KAAK,IAAI,EAAE;AACrC,IAAA,OAAOA,GAAG;AACZ,EAAA;AACA,EAAA,MAAMC,QAAQ,GAAGC,IAAI,CAACC,KAAK,CAACD,IAAI,CAACE,SAAS,CAACJ,GAAG,CAAC,CAAC;AAChD,EAAA,IAAI,OAAOC,QAAQ,KAAK,QAAQ,EAAE;AAChC,IAAA,OAAOI,aAAM,CAACC,IAAI,CAACL,QAAQ,EAAEtC,aAAa,CAAC,CAAC4C,QAAQ,CAAC5C,aAAa,CAAC;EACrE,CAAC,MAAM,IAAIW,KAAK,CAACC,OAAO,CAAC0B,QAAQ,CAAC,EAAE;AAClC,IAAA,OAAOA,QAAQ,CAACO,GAAG,CAACT,cAAc,CAAC;EACrC,CAAC,MAAM,IAAIE,QAAQ,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;AACnD,IAAA,OAAOQ,MAAM,CAACC,WAAW,CAACD,MAAM,CAACE,OAAO,CAACV,QAAQ,CAAC,CAACO,GAAG,CAAC,CAAC,CAACI,CAAC,EAAEC,CAAC,CAAC,KAAK,CAACD,CAAC,EAAEb,cAAc,CAACc,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7F,EAAA;AACA,EAAA,OAAOZ,QAAQ;AACjB;AAEO,MAAMa,kBAAkB,GAAG,OAAO;EACvCvB,MAAM;EACNwB,UAAU;EACVC,OAAO;EACPC,KAAK;EACLrB,QAAQ;EACRH,KAAK;EACLrB,MAAM;EACN8C,OAAO;EACPC,OAAO;EACPtD,MAAM;AACNuD,EAAAA,UAAU,GAAG,GAAG;EAChBC,KAAK,GAAG,EAAE;AACVC,EAAAA,OAAO,GAAG,KAAK;EACfC,KAAK;EACLzB,KAAK;AACL0B,EAAAA,gBAAgB,GAAG;AACK,CAAC,KAAoB;AAC7C,EAAA,IAAI,CAACjC,MAAM,CAACkC,OAAO,EAAE;AACnB,IAAA,OAAOC,OAAO,CAACC,OAAO,EAAE;AAC1B,EAAA;AACA;AACA,EAAA,MAAMC,SAAS,GAAG7B,cAAc,CAACN,KAAK,CAAC;AACvC,EAAA,MAAMoC,UAAU,GAAG9B,cAAc,CAAC3B,MAAM,CAAC;AACzC,EAAA,MAAM0D,SAAS,GAAG/B,cAAc,CAACwB,KAAK,CAAC;EAEvC,IAAIQ,SAAS,GAAG,EAAE;AAClB,EAAA,IAAIT,OAAO,EAAE;AACXS,IAAAA,SAAS,GAAG;AACVC,MAAAA,YAAY,EAAE,IAAI;AAClBC,MAAAA,SAAS,EAAEH;KACZ;AACH,EAAA;EACA,IAAII,gBAAgB,GAAG,EAAE;EACzB,IAAIrE,MAAM,CAACsE,mBAAmB,EAAE;AAC9B,IAAA,MAAMC,YAAY,GAAG,CAACvE,MAAM,CAACsE,mBAAmB,CAACE,SAAS,IAAI,CAAC,KAAKhB,KAAK,CAACiB,WAAW,IAAI,CAAC,CAAC;AAC3F,IAAA,MAAMC,aAAa,GAAG,CAAC1E,MAAM,CAACsE,mBAAmB,CAACK,UAAU,IAAI,CAAC,KAAKnB,KAAK,CAACoB,YAAY,IAAI,CAAC,CAAC;AAC9FP,IAAAA,gBAAgB,GAAG;AACjBQ,MAAAA,kBAAkB,EAAEN,YAAY;AAChCO,MAAAA,mBAAmB,EAAEJ,aAAa;MAClCK,kBAAkB,EAAER,YAAY,GAAGG;KACpC;AACH,EAAA;AAEA,EAAA,MAAMM,qBAAqB,GAAG;IAC5B,IAAIxB,KAAK,CAACyB,eAAe,GAAG;MAAEC,oBAAoB,EAAE1B,KAAK,CAACyB;KAAiB,GAAG,EAAE,CAAC;IACjF,IAAIzB,KAAK,CAAC2B,oBAAoB,GAAG;MAAEC,2BAA2B,EAAE5B,KAAK,CAAC2B;KAAsB,GAAG,EAAE,CAAC;IAClG,IAAI3B,KAAK,CAAC6B,wBAAwB,GAAG;MAAEC,+BAA+B,EAAE9B,KAAK,CAAC6B;KAA0B,GAAG,EAAE;GAC9G;AAED,EAAA,MAAME,UAAU,GAAG;AACjBC,IAAAA,YAAY,EAAExF,MAAM,CAACyF,uBAAuB,IAAI1D,QAAQ;AACxD2D,IAAAA,SAAS,EAAE1F,MAAM,CAAC2F,oBAAoB,IAAIvC,KAAK;AAC/CwC,IAAAA,oBAAoB,EAAE7F,cAAc,CAACC,MAAM,CAAC;AAC5C6F,IAAAA,SAAS,EAAEpE,eAAe,CAACC,MAAM,EAAE1B,MAAM,CAAC8F,kBAAkB,IAAI,KAAK,EAAE/B,SAAS,CAAC;AACjFgC,IAAAA,kBAAkB,EAAEtE,eAAe,CAACC,MAAM,EAAE1B,MAAM,CAAC8F,kBAAkB,IAAI,KAAK,EAAE9B,UAAU,CAAC;AAC3FgC,IAAAA,eAAe,EAAEzC,UAAU;AAC3B0C,IAAAA,gBAAgB,EAAEzC,KAAK,CAACiB,WAAW,IAAI,CAAC;AACxCyB,IAAAA,iBAAiB,EAAE1C,KAAK,CAACoB,YAAY,IAAI,CAAC;AAC1C,IAAA,GAAGI,qBAAqB;AACxBmB,IAAAA,WAAW,EAAE9C,OAAO;AACpB+C,IAAAA,YAAY,EAAEjD,OAAO;AACrBkD,IAAAA,YAAY,EAAE/C,OAAO;IACrB,GAAGtD,MAAM,CAACsG,iBAAiB;AAC3B,IAAA,IAAIpD,UAAU,GAAG,EAAE,GAAG;AAAEqD,MAAAA,uBAAuB,EAAE;AAAM,KAAC,CAAC;AACzD,IAAA,IAAItE,KAAK,GAAG;AAAEuE,MAAAA,SAAS,EAAEvE;KAAO,GAAG,EAAE,CAAC;AACtC,IAAA,GAAGiC,SAAS;IACZ,GAAGG;GACJ;AAED,EAAA,MAAMoC,KAAK,GAAG;IACZvD,UAAU,EAAEA,UAAU,IAAIC,OAAO;AACjCsD,IAAAA,KAAK,EAAE,gBAAgB;IACvBlB,UAAU;IACVmB,MAAM,EAAE1G,MAAM,CAAC2G;GAChB;AAED,EAAA,IAAIhD,gBAAgB,EAAE;AACpB;AACA,IAAA,MAAMjC,MAAM,CAACiC,gBAAgB,CAAC8C,KAAK,CAAC;AACtC,EAAA,CAAC,MAAM;AACL/E,IAAAA,MAAM,CAACkC,OAAO,CAAC6C,KAAK,CAAC;AACvB,EAAA;AACF,CAAC;;ACpaD;;AA8BO,MAAMG,kBAAkB,CAAC;EAK9BC,WAAWA,CAAC7E,MAA8B,EAAE;IAC1C,MAAM;MAAE8E,OAAO;MAAE,GAAGC;AAAa,KAAC,GAAG/E,MAAM;IAC3C,IAAI,CAACgF,QAAQ,GAAGF,OAAO;AACvB,IAAA,IAAI,CAACpF,MAAM,GAAG,IAAIuF,iBAAW,CAACF,YAAY,CAAC;AAC3C,IAAA,IAAI,CAACG,MAAM,GAAG,IAAIC,aAAa,CAAC,IAAI,CAACzF,MAAM,EAAE,IAAI,CAACsF,QAAQ,CAAC;AAC7D,EAAA;AACF;AAEO,MAAMG,aAAa,CAAC;AAIzBN,EAAAA,WAAWA,CAACnF,MAAmB,EAAEsF,QAAiB,EAAE;IAClD,IAAI,CAACtF,MAAM,GAAGA,MAAM;IACpB,IAAI,CAACsF,QAAQ,GAAGA,QAAQ;AAC1B,EAAA;EAEA,MAAaI,eAAeA,CAACpH,MAAiD,EAAoC;IAChH,MAAM;MACJqH,iBAAiB;MACjBC,cAAc;MACdhB,iBAAiB;MACjBK,aAAa;MACbY,uBAAuB;MACvB,GAAGC;AACL,KAAC,GAAGxH,MAAM;AAEV,IAAA,MAAMmD,OAAO,GAAGmE,cAAc,IAAIG,OAAM,EAAE;AAC1C,IAAA,MAAMC,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE;IAE5B,IAAI;AACF,MAAA,MAAMtH,QAAQ,GAAG,MAAM,IAAI,CAACoB,MAAM,CAACwF,MAAM,CAACE,eAAe,CAACI,YAAY,CAAC;MACvE,MAAMnE,OAAO,GAAG,CAACsE,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI;AAE/C,MAAA,MAAMG,cAAc,GAAG/F,yBAAyB,CAAC,QAAQ,EAAE0F,YAAY,CAAC;AAExE,MAAA,MAAMvE,kBAAkB,CAAC;QACvBvB,MAAM,EAAE,IAAI,CAACsF,QAAQ;AACrB9D,QAAAA,UAAU,EAAEmE,iBAAiB;QAC7BlE,OAAO;QACPC,KAAK,EAAEoE,YAAY,CAACpE,KAAK;AACzBrB,QAAAA,QAAQ,EAAE,QAAQ;QAClBH,KAAK,EAAE,IAAI,CAACkG,WAAW,CAACN,YAAY,CAACO,QAAQ,CAAC;AAC9CxH,QAAAA,MAAM,EAAEF,oBAAoB,CAACC,QAAQ,CAAC;QACtC+C,OAAO;AACPC,QAAAA,OAAO,EAAE,2CAA2C;AACpDtD,QAAAA,MAAM,EAAEA,MAAa;AACrBuD,QAAAA,UAAU,EAAE,GAAG;AACfC,QAAAA,KAAK,EAAE;AACLiB,UAAAA,WAAW,EAAEnE,QAAQ,CAAC0H,aAAa,EAAEC,gBAAgB,IAAI,CAAC;AAC1DrD,UAAAA,YAAY,EAAEtE,QAAQ,CAAC0H,aAAa,EAAEE,oBAAoB,IAAI,CAAC;AAC/DjD,UAAAA,eAAe,EAAE3E,QAAQ,CAAC0H,aAAa,EAAEG,kBAAkB,IAAI,CAAC;AAChEhD,UAAAA,oBAAoB,EAAE7E,QAAQ,CAAC0H,aAAa,EAAEI,uBAAuB,IAAI;SAC1E;AACDnG,QAAAA,KAAK,EAAE4F,cAAc;AACrBlE,QAAAA,gBAAgB,EAAE4D;AACpB,OAAC,CAAC;AAEF,MAAA,OAAOjH,QAAQ;IACjB,CAAC,CAAC,OAAOoD,KAAU,EAAE;MACnB,MAAML,OAAO,GAAG,CAACsE,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI;AAC/C,MAAA,MAAMzE,kBAAkB,CAAC;QACvBvB,MAAM,EAAE,IAAI,CAACsF,QAAQ;AACrB9D,QAAAA,UAAU,EAAEmE,iBAAiB;QAC7BlE,OAAO;QACPC,KAAK,EAAEoE,YAAY,CAACpE,KAAK;AACzBrB,QAAAA,QAAQ,EAAE,QAAQ;QAClBH,KAAK,EAAE,IAAI,CAACkG,WAAW,CAACN,YAAY,CAACO,QAAQ,CAAC;AAC9CxH,QAAAA,MAAM,EAAE,EAAE;QACV8C,OAAO;AACPC,QAAAA,OAAO,EAAE,2CAA2C;AACpDtD,QAAAA,MAAM,EAAEA,MAAa;AACrBuD,QAAAA,UAAU,EAAEG,KAAK,EAAE2E,MAAM,IAAI,GAAG;AAChC7E,QAAAA,KAAK,EAAE;AACLiB,UAAAA,WAAW,EAAE,CAAC;AACdG,UAAAA,YAAY,EAAE;SACf;AACDnB,QAAAA,OAAO,EAAE,IAAI;AACbC,QAAAA,KAAK,EAAErB,IAAI,CAACE,SAAS,CAACmB,KAAK,CAAC;AAC5BC,QAAAA,gBAAgB,EAAE4D;AACpB,OAAC,CAAC;AACF,MAAA,MAAM7D,KAAK;AACb,IAAA;AACF,EAAA;EAEA,OAAc4E,qBAAqBA,CACjCtI,MAAiD,EACb;IACpC,MAAM;MACJqH,iBAAiB;MACjBC,cAAc;MACdhB,iBAAiB;MACjBK,aAAa;MACbY,uBAAuB;MACvB,GAAGC;AACL,KAAC,GAAGxH,MAAM;AAEV,IAAA,MAAMmD,OAAO,GAAGmE,cAAc,IAAIG,OAAM,EAAE;AAC1C,IAAA,MAAMC,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE;IAC5B,IAAIW,kBAAkB,GAAG,EAAE;AAC3B,IAAA,IAAI/E,KAAiB,GAAG;AACtBiB,MAAAA,WAAW,EAAE,CAAC;AACdG,MAAAA,YAAY,EAAE;KACf;IAED,IAAI;AACF,MAAA,MAAM4D,MAAM,GAAG,MAAM,IAAI,CAAC9G,MAAM,CAACwF,MAAM,CAACoB,qBAAqB,CAACd,YAAY,CAAC;AAE3E,MAAA,WAAW,MAAMiB,KAAK,IAAID,MAAM,EAAE;QAChC,IAAIC,KAAK,CAAC1H,IAAI,EAAE;UACdwH,kBAAkB,IAAIE,KAAK,CAAC1H,IAAI;AAClC,QAAA;QACA,IAAI0H,KAAK,CAACT,aAAa,EAAE;AACvBxE,UAAAA,KAAK,GAAG;AACNiB,YAAAA,WAAW,EAAEgE,KAAK,CAACT,aAAa,CAACC,gBAAgB,IAAI,CAAC;AACtDrD,YAAAA,YAAY,EAAE6D,KAAK,CAACT,aAAa,CAACE,oBAAoB,IAAI,CAAC;AAC3DjD,YAAAA,eAAe,EAAEwD,KAAK,CAACT,aAAa,CAACG,kBAAkB,IAAI,CAAC;AAC5DhD,YAAAA,oBAAoB,EAAEsD,KAAK,CAACT,aAAa,CAACI,uBAAuB,IAAI;WACtE;AACH,QAAA;AACA,QAAA,MAAMK,KAAK;AACb,MAAA;MAEA,MAAMpF,OAAO,GAAG,CAACsE,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI;AAE/C,MAAA,MAAMG,cAAc,GAAG/F,yBAAyB,CAAC,QAAQ,EAAE0F,YAAY,CAAC;AAExE,MAAA,MAAMvE,kBAAkB,CAAC;QACvBvB,MAAM,EAAE,IAAI,CAACsF,QAAQ;AACrB9D,QAAAA,UAAU,EAAEmE,iBAAiB;QAC7BlE,OAAO;QACPC,KAAK,EAAEoE,YAAY,CAACpE,KAAK;AACzBrB,QAAAA,QAAQ,EAAE,QAAQ;QAClBH,KAAK,EAAE,IAAI,CAACkG,WAAW,CAACN,YAAY,CAACO,QAAQ,CAAC;AAC9CxH,QAAAA,MAAM,EAAE,CAAC;AAAEK,UAAAA,OAAO,EAAE2H,kBAAkB;AAAE/G,UAAAA,IAAI,EAAE;AAAY,SAAC,CAAC;QAC5D6B,OAAO;AACPC,QAAAA,OAAO,EAAE,2CAA2C;AACpDtD,QAAAA,MAAM,EAAEA,MAAa;AACrBuD,QAAAA,UAAU,EAAE,GAAG;QACfC,KAAK;AACLvB,QAAAA,KAAK,EAAE4F,cAAc;AACrBlE,QAAAA,gBAAgB,EAAE4D;AACpB,OAAC,CAAC;IACJ,CAAC,CAAC,OAAO7D,KAAU,EAAE;MACnB,MAAML,OAAO,GAAG,CAACsE,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI;AAC/C,MAAA,MAAMzE,kBAAkB,CAAC;QACvBvB,MAAM,EAAE,IAAI,CAACsF,QAAQ;AACrB9D,QAAAA,UAAU,EAAEmE,iBAAiB;QAC7BlE,OAAO;QACPC,KAAK,EAAEoE,YAAY,CAACpE,KAAK;AACzBrB,QAAAA,QAAQ,EAAE,QAAQ;QAClBH,KAAK,EAAE,IAAI,CAACkG,WAAW,CAACN,YAAY,CAACO,QAAQ,CAAC;AAC9CxH,QAAAA,MAAM,EAAE,EAAE;QACV8C,OAAO;AACPC,QAAAA,OAAO,EAAE,2CAA2C;AACpDtD,QAAAA,MAAM,EAAEA,MAAa;AACrBuD,QAAAA,UAAU,EAAEG,KAAK,EAAE2E,MAAM,IAAI,GAAG;AAChC7E,QAAAA,KAAK,EAAE;AACLiB,UAAAA,WAAW,EAAE,CAAC;AACdG,UAAAA,YAAY,EAAE;SACf;AACDnB,QAAAA,OAAO,EAAE,IAAI;AACbC,QAAAA,KAAK,EAAErB,IAAI,CAACE,SAAS,CAACmB,KAAK,CAAC;AAC5BC,QAAAA,gBAAgB,EAAE4D;AACpB,OAAC,CAAC;AACF,MAAA,MAAM7D,KAAK;AACb,IAAA;AACF,EAAA;EAEQoE,WAAWA,CAACC,QAAa,EAA4C;AAC3E,IAAA,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;AAChC,MAAA,OAAO,CAAC;AAAEvG,QAAAA,IAAI,EAAE,MAAM;AAAEZ,QAAAA,OAAO,EAAEmH;AAAS,OAAC,CAAC;AAC9C,IAAA;AAEA,IAAA,IAAItH,KAAK,CAACC,OAAO,CAACqH,QAAQ,CAAC,EAAE;AAC3B,MAAA,OAAOA,QAAQ,CAACpF,GAAG,CAAE+F,IAAI,IAAK;AAC5B,QAAA,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;UAC5B,OAAO;AAAElH,YAAAA,IAAI,EAAE,MAAM;AAAEZ,YAAAA,OAAO,EAAE8H;WAAM;AACxC,QAAA;AACA,QAAA,IAAIA,IAAI,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;UACpC,IAAIA,IAAI,CAAC3H,IAAI,EAAE;YACb,OAAO;AAAES,cAAAA,IAAI,EAAEkH,IAAI,CAAClH,IAAI,IAAI,MAAM;cAAEZ,OAAO,EAAE8H,IAAI,CAAC3H;aAAM;AAC1D,UAAA;UACA,IAAI2H,IAAI,CAAC9H,OAAO,EAAE;YAChB,OAAO;AAAEY,cAAAA,IAAI,EAAEkH,IAAI,CAAClH,IAAI,IAAI,MAAM;cAAEZ,OAAO,EAAE8H,IAAI,CAAC9H;aAAS;AAC7D,UAAA;AACF,QAAA;QACA,OAAO;AAAEY,UAAAA,IAAI,EAAE,MAAM;UAAEZ,OAAO,EAAE+H,MAAM,CAACD,IAAI;SAAG;AAChD,MAAA,CAAC,CAAC;AACJ,IAAA;AAEA,IAAA,IAAIX,QAAQ,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;MAC5C,IAAIA,QAAQ,CAAChH,IAAI,EAAE;AACjB,QAAA,OAAO,CAAC;AAAES,UAAAA,IAAI,EAAE,MAAM;UAAEZ,OAAO,EAAEmH,QAAQ,CAAChH;AAAK,SAAC,CAAC;AACnD,MAAA;MACA,IAAIgH,QAAQ,CAACnH,OAAO,EAAE;AACpB,QAAA,OAAO,CAAC;AAAEY,UAAAA,IAAI,EAAE,MAAM;UAAEZ,OAAO,EAAEmH,QAAQ,CAACnH;AAAQ,SAAC,CAAC;AACtD,MAAA;AACF,IAAA;AAEA,IAAA,OAAO,CAAC;AAAEY,MAAAA,IAAI,EAAE,MAAM;MAAEZ,OAAO,EAAE+H,MAAM,CAACZ,QAAQ;AAAE,KAAC,CAAC;AACtD,EAAA;AACF;;;;;;;"}
@@ -30,6 +30,8 @@ type GenerateContentResponse = {
30
30
  promptTokenCount?: number;
31
31
  candidatesTokenCount?: number;
32
32
  totalTokenCount?: number;
33
+ thoughtsTokenCount?: number;
34
+ cachedContentTokenCount?: number;
33
35
  };
34
36
  [key: string]: any;
35
37
  };
@@ -232,7 +232,9 @@ class WrappedModels {
232
232
  httpStatus: 200,
233
233
  usage: {
234
234
  inputTokens: response.usageMetadata?.promptTokenCount ?? 0,
235
- outputTokens: response.usageMetadata?.candidatesTokenCount ?? 0
235
+ outputTokens: response.usageMetadata?.candidatesTokenCount ?? 0,
236
+ reasoningTokens: response.usageMetadata?.thoughtsTokenCount ?? 0,
237
+ cacheReadInputTokens: response.usageMetadata?.cachedContentTokenCount ?? 0
236
238
  },
237
239
  tools: availableTools,
238
240
  captureImmediate: posthogCaptureImmediate
@@ -288,7 +290,9 @@ class WrappedModels {
288
290
  if (chunk.usageMetadata) {
289
291
  usage = {
290
292
  inputTokens: chunk.usageMetadata.promptTokenCount ?? 0,
291
- outputTokens: chunk.usageMetadata.candidatesTokenCount ?? 0
293
+ outputTokens: chunk.usageMetadata.candidatesTokenCount ?? 0,
294
+ reasoningTokens: chunk.usageMetadata.thoughtsTokenCount ?? 0,
295
+ cacheReadInputTokens: chunk.usageMetadata.cachedContentTokenCount ?? 0
292
296
  };
293
297
  }
294
298
  yield chunk;
@@ -1 +1 @@
1
- {"version":3,"file":"index.mjs","sources":["../../src/utils.ts","../../src/gemini/index.ts"],"sourcesContent":["import { PostHog } from 'posthog-node'\nimport { Buffer } from 'buffer'\nimport OpenAIOrignal from 'openai'\nimport AnthropicOriginal from '@anthropic-ai/sdk'\nimport type { ChatCompletionTool } from 'openai/resources/chat/completions'\nimport type { Tool as GeminiTool } from '@google/genai'\nimport type { FormattedMessage, FormattedContent, TokenUsage } from './types'\n\ntype ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams\ntype MessageCreateParams = AnthropicOriginal.Messages.MessageCreateParams\ntype ResponseCreateParams = OpenAIOrignal.Responses.ResponseCreateParams\ntype AnthropicTool = AnthropicOriginal.Tool\n\n// limit large outputs by truncating to 200kb (approx 200k bytes)\nexport const MAX_OUTPUT_SIZE = 200000\nconst STRING_FORMAT = 'utf8'\n\nexport interface MonitoringParams {\n posthogDistinctId?: string\n posthogTraceId?: string\n posthogProperties?: Record<string, any>\n posthogPrivacyMode?: boolean\n posthogGroups?: Record<string, any>\n posthogModelOverride?: string\n posthogProviderOverride?: string\n posthogCostOverride?: CostOverride\n posthogCaptureImmediate?: boolean\n}\n\nexport interface CostOverride {\n inputCost: number\n outputCost: number\n}\n\nexport const getModelParams = (\n params: ((ChatCompletionCreateParamsBase | MessageCreateParams | ResponseCreateParams) & MonitoringParams) | null\n): Record<string, any> => {\n if (!params) {\n return {}\n }\n const modelParams: Record<string, any> = {}\n const paramKeys = [\n 'temperature',\n 'max_tokens',\n 'max_completion_tokens',\n 'top_p',\n 'frequency_penalty',\n 'presence_penalty',\n 'n',\n 'stop',\n 'stream',\n 'streaming',\n ] as const\n\n for (const key of paramKeys) {\n if (key in params && (params as any)[key] !== undefined) {\n modelParams[key] = (params as any)[key]\n }\n }\n return modelParams\n}\n\n/**\n * Helper to format responses (non-streaming) for consumption, mirroring Python's openai vs. anthropic approach.\n */\nexport const formatResponse = (response: any, provider: string): FormattedMessage[] => {\n if (!response) {\n return []\n }\n if (provider === 'anthropic') {\n return formatResponseAnthropic(response)\n } else if (provider === 'openai') {\n return formatResponseOpenAI(response)\n } else if (provider === 'gemini') {\n return formatResponseGemini(response)\n }\n return []\n}\n\nexport const formatResponseAnthropic = (response: any): FormattedMessage[] => {\n const output: FormattedMessage[] = []\n const content: FormattedContent = []\n\n for (const choice of response.content ?? []) {\n if (choice?.type === 'text' && choice?.text) {\n content.push({ type: 'text', text: choice.text })\n } else if (choice?.type === 'tool_use' && choice?.name && choice?.id) {\n content.push({\n type: 'function',\n id: choice.id,\n function: {\n name: choice.name,\n arguments: choice.input || {},\n },\n })\n }\n }\n\n if (content.length > 0) {\n output.push({\n role: 'assistant',\n content,\n })\n }\n\n return output\n}\n\nexport const formatResponseOpenAI = (response: any): FormattedMessage[] => {\n const output: FormattedMessage[] = []\n\n if (response.choices) {\n for (const choice of response.choices) {\n const content: FormattedContent = []\n let role = 'assistant'\n\n if (choice.message) {\n if (choice.message.role) {\n role = choice.message.role\n }\n\n if (choice.message.content) {\n content.push({ type: 'text', text: choice.message.content })\n }\n\n if (choice.message.tool_calls) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'function',\n id: toolCall.id,\n function: {\n name: toolCall.function.name,\n arguments: toolCall.function.arguments,\n },\n })\n }\n }\n }\n\n if (content.length > 0) {\n output.push({\n role,\n content,\n })\n }\n }\n }\n\n // Handle Responses API format\n if (response.output) {\n const content: FormattedContent = []\n let role = 'assistant'\n\n for (const item of response.output) {\n if (item.type === 'message') {\n role = item.role\n\n if (item.content && Array.isArray(item.content)) {\n for (const contentItem of item.content) {\n if (contentItem.type === 'output_text' && contentItem.text) {\n content.push({ type: 'text', text: contentItem.text })\n } else if (contentItem.text) {\n content.push({ type: 'text', text: contentItem.text })\n } else if (contentItem.type === 'input_image' && contentItem.image_url) {\n content.push({\n type: 'image',\n image: contentItem.image_url,\n })\n }\n }\n } else if (item.content) {\n content.push({ type: 'text', text: String(item.content) })\n }\n } else if (item.type === 'function_call') {\n content.push({\n type: 'function',\n id: item.call_id || item.id || '',\n function: {\n name: item.name,\n arguments: item.arguments || {},\n },\n })\n }\n }\n\n if (content.length > 0) {\n output.push({\n role,\n content,\n })\n }\n }\n\n return output\n}\n\nexport const formatResponseGemini = (response: any): FormattedMessage[] => {\n const output: FormattedMessage[] = []\n\n if (response.candidates && Array.isArray(response.candidates)) {\n for (const candidate of response.candidates) {\n if (candidate.content && candidate.content.parts) {\n const content: FormattedContent = []\n\n for (const part of candidate.content.parts) {\n if (part.text) {\n content.push({ type: 'text', text: part.text })\n } else if (part.functionCall) {\n content.push({\n type: 'function',\n function: {\n name: part.functionCall.name,\n arguments: part.functionCall.args,\n },\n })\n }\n }\n\n if (content.length > 0) {\n output.push({\n role: 'assistant',\n content,\n })\n }\n } else if (candidate.text) {\n output.push({\n role: 'assistant',\n content: [{ type: 'text', text: candidate.text }],\n })\n }\n }\n } else if (response.text) {\n output.push({\n role: 'assistant',\n content: [{ type: 'text', text: response.text }],\n })\n }\n\n return output\n}\n\nexport const mergeSystemPrompt = (params: MessageCreateParams & MonitoringParams, provider: string): any => {\n if (provider == 'anthropic') {\n const messages = params.messages || []\n if (!(params as any).system) {\n return messages\n }\n const systemMessage = (params as any).system\n return [{ role: 'system', content: systemMessage }, ...messages]\n }\n return params.messages\n}\n\nexport const withPrivacyMode = (client: PostHog, privacyMode: boolean, input: any): any => {\n return (client as any).privacy_mode || privacyMode ? null : input\n}\n\nexport const truncate = (str: string): string => {\n try {\n const buffer = Buffer.from(str, STRING_FORMAT)\n if (buffer.length <= MAX_OUTPUT_SIZE) {\n return str\n }\n const truncatedBuffer = buffer.slice(0, MAX_OUTPUT_SIZE)\n return `${truncatedBuffer.toString(STRING_FORMAT)}... [truncated]`\n } catch (error) {\n console.error('Error truncating, likely not a string')\n return str\n }\n}\n\n/**\n * Extract available tool calls from the request parameters.\n * These are the tools provided to the LLM, not the tool calls in the response.\n */\nexport const extractAvailableToolCalls = (\n provider: string,\n params: any\n): ChatCompletionTool[] | AnthropicTool[] | GeminiTool[] | null => {\n if (provider === 'anthropic') {\n if (params.tools) {\n return params.tools\n }\n\n return null\n } else if (provider === 'gemini') {\n if (params.config && params.config.tools) {\n return params.config.tools\n }\n\n return null\n } else if (provider === 'openai') {\n if (params.tools) {\n return params.tools\n }\n\n return null\n } else if (provider === 'vercel') {\n // Vercel AI SDK stores tools in params.mode.tools when mode type is 'regular'\n if (params.mode?.type === 'regular' && params.mode.tools) {\n return params.mode.tools\n }\n\n return null\n }\n\n return null\n}\n\nexport type SendEventToPosthogParams = {\n client: PostHog\n distinctId?: string\n traceId: string\n model: string\n provider: string\n input: any\n output: any\n latency: number\n baseURL: string\n httpStatus: number\n usage?: TokenUsage\n params: (ChatCompletionCreateParamsBase | MessageCreateParams | ResponseCreateParams) & MonitoringParams\n isError?: boolean\n error?: string\n tools?: ChatCompletionTool[] | AnthropicTool[] | GeminiTool[] | null\n captureImmediate?: boolean\n}\n\nfunction sanitizeValues(obj: any): any {\n if (obj === undefined || obj === null) {\n return obj\n }\n const jsonSafe = JSON.parse(JSON.stringify(obj))\n if (typeof jsonSafe === 'string') {\n return Buffer.from(jsonSafe, STRING_FORMAT).toString(STRING_FORMAT)\n } else if (Array.isArray(jsonSafe)) {\n return jsonSafe.map(sanitizeValues)\n } else if (jsonSafe && typeof jsonSafe === 'object') {\n return Object.fromEntries(Object.entries(jsonSafe).map(([k, v]) => [k, sanitizeValues(v)]))\n }\n return jsonSafe\n}\n\nexport const sendEventToPosthog = async ({\n client,\n distinctId,\n traceId,\n model,\n provider,\n input,\n output,\n latency,\n baseURL,\n params,\n httpStatus = 200,\n usage = {},\n isError = false,\n error,\n tools,\n captureImmediate = false,\n}: SendEventToPosthogParams): Promise<void> => {\n if (!client.capture) {\n return Promise.resolve()\n }\n // sanitize input and output for UTF-8 validity\n const safeInput = sanitizeValues(input)\n const safeOutput = sanitizeValues(output)\n const safeError = sanitizeValues(error)\n\n let errorData = {}\n if (isError) {\n errorData = {\n $ai_is_error: true,\n $ai_error: safeError,\n }\n }\n let costOverrideData = {}\n if (params.posthogCostOverride) {\n const inputCostUSD = (params.posthogCostOverride.inputCost ?? 0) * (usage.inputTokens ?? 0)\n const outputCostUSD = (params.posthogCostOverride.outputCost ?? 0) * (usage.outputTokens ?? 0)\n costOverrideData = {\n $ai_input_cost_usd: inputCostUSD,\n $ai_output_cost_usd: outputCostUSD,\n $ai_total_cost_usd: inputCostUSD + outputCostUSD,\n }\n }\n\n const additionalTokenValues = {\n ...(usage.reasoningTokens ? { $ai_reasoning_tokens: usage.reasoningTokens } : {}),\n ...(usage.cacheReadInputTokens ? { $ai_cache_read_input_tokens: usage.cacheReadInputTokens } : {}),\n ...(usage.cacheCreationInputTokens ? { $ai_cache_creation_input_tokens: usage.cacheCreationInputTokens } : {}),\n }\n\n const properties = {\n $ai_provider: params.posthogProviderOverride ?? provider,\n $ai_model: params.posthogModelOverride ?? model,\n $ai_model_parameters: getModelParams(params),\n $ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeInput),\n $ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeOutput),\n $ai_http_status: httpStatus,\n $ai_input_tokens: usage.inputTokens ?? 0,\n $ai_output_tokens: usage.outputTokens ?? 0,\n ...additionalTokenValues,\n $ai_latency: latency,\n $ai_trace_id: traceId,\n $ai_base_url: baseURL,\n ...params.posthogProperties,\n ...(distinctId ? {} : { $process_person_profile: false }),\n ...(tools ? { $ai_tools: tools } : {}),\n ...errorData,\n ...costOverrideData,\n }\n\n const event = {\n distinctId: distinctId ?? traceId,\n event: '$ai_generation',\n properties,\n groups: params.posthogGroups,\n }\n\n if (captureImmediate) {\n // await capture promise to send single event in serverless environments\n await client.captureImmediate(event)\n } else {\n client.capture(event)\n }\n}\n","import { GoogleGenAI } from '@google/genai'\nimport { PostHog } from 'posthog-node'\nimport { v4 as uuidv4 } from 'uuid'\nimport { MonitoringParams, sendEventToPosthog, extractAvailableToolCalls, formatResponseGemini } from '../utils'\n\n// Types from @google/genai\ntype GenerateContentRequest = {\n model: string\n contents: any\n config?: any\n [key: string]: any\n}\n\ntype GenerateContentResponse = {\n text?: string\n candidates?: any[]\n usageMetadata?: {\n promptTokenCount?: number\n candidatesTokenCount?: number\n totalTokenCount?: number\n }\n [key: string]: any\n}\n\ninterface MonitoringGeminiConfig {\n apiKey?: string\n vertexai?: boolean\n project?: string\n location?: string\n apiVersion?: string\n posthog: PostHog\n}\n\nexport class PostHogGoogleGenAI {\n private readonly phClient: PostHog\n private readonly client: GoogleGenAI\n public models: WrappedModels\n\n constructor(config: MonitoringGeminiConfig) {\n const { posthog, ...geminiConfig } = config\n this.phClient = posthog\n this.client = new GoogleGenAI(geminiConfig)\n this.models = new WrappedModels(this.client, this.phClient)\n }\n}\n\nexport class WrappedModels {\n private readonly phClient: PostHog\n private readonly client: GoogleGenAI\n\n constructor(client: GoogleGenAI, phClient: PostHog) {\n this.client = client\n this.phClient = phClient\n }\n\n public async generateContent(params: GenerateContentRequest & MonitoringParams): Promise<GenerateContentResponse> {\n const {\n posthogDistinctId,\n posthogTraceId,\n posthogProperties,\n posthogGroups,\n posthogCaptureImmediate,\n ...geminiParams\n } = params\n\n const traceId = posthogTraceId ?? uuidv4()\n const startTime = Date.now()\n\n try {\n const response = await this.client.models.generateContent(geminiParams)\n const latency = (Date.now() - startTime) / 1000\n\n const availableTools = extractAvailableToolCalls('gemini', geminiParams)\n\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: geminiParams.model,\n provider: 'gemini',\n input: this.formatInput(geminiParams.contents),\n output: formatResponseGemini(response),\n latency,\n baseURL: 'https://generativelanguage.googleapis.com',\n params: params as any,\n httpStatus: 200,\n usage: {\n inputTokens: response.usageMetadata?.promptTokenCount ?? 0,\n outputTokens: response.usageMetadata?.candidatesTokenCount ?? 0,\n },\n tools: availableTools,\n captureImmediate: posthogCaptureImmediate,\n })\n\n return response\n } catch (error: any) {\n const latency = (Date.now() - startTime) / 1000\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: geminiParams.model,\n provider: 'gemini',\n input: this.formatInput(geminiParams.contents),\n output: [],\n latency,\n baseURL: 'https://generativelanguage.googleapis.com',\n params: params as any,\n httpStatus: error?.status ?? 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n isError: true,\n error: JSON.stringify(error),\n captureImmediate: posthogCaptureImmediate,\n })\n throw error\n }\n }\n\n public async *generateContentStream(\n params: GenerateContentRequest & MonitoringParams\n ): AsyncGenerator<any, void, unknown> {\n const {\n posthogDistinctId,\n posthogTraceId,\n posthogProperties,\n posthogGroups,\n posthogCaptureImmediate,\n ...geminiParams\n } = params\n\n const traceId = posthogTraceId ?? uuidv4()\n const startTime = Date.now()\n let accumulatedContent = ''\n let usage = {\n inputTokens: 0,\n outputTokens: 0,\n }\n\n try {\n const stream = await this.client.models.generateContentStream(geminiParams)\n\n for await (const chunk of stream) {\n if (chunk.text) {\n accumulatedContent += chunk.text\n }\n if (chunk.usageMetadata) {\n usage = {\n inputTokens: chunk.usageMetadata.promptTokenCount ?? 0,\n outputTokens: chunk.usageMetadata.candidatesTokenCount ?? 0,\n }\n }\n yield chunk\n }\n\n const latency = (Date.now() - startTime) / 1000\n\n const availableTools = extractAvailableToolCalls('gemini', geminiParams)\n\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: geminiParams.model,\n provider: 'gemini',\n input: this.formatInput(geminiParams.contents),\n output: [{ content: accumulatedContent, role: 'assistant' }],\n latency,\n baseURL: 'https://generativelanguage.googleapis.com',\n params: params as any,\n httpStatus: 200,\n usage,\n tools: availableTools,\n captureImmediate: posthogCaptureImmediate,\n })\n } catch (error: any) {\n const latency = (Date.now() - startTime) / 1000\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: geminiParams.model,\n provider: 'gemini',\n input: this.formatInput(geminiParams.contents),\n output: [],\n latency,\n baseURL: 'https://generativelanguage.googleapis.com',\n params: params as any,\n httpStatus: error?.status ?? 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n isError: true,\n error: JSON.stringify(error),\n captureImmediate: posthogCaptureImmediate,\n })\n throw error\n }\n }\n\n private formatInput(contents: any): Array<{ role: string; content: string }> {\n if (typeof contents === 'string') {\n return [{ role: 'user', content: contents }]\n }\n\n if (Array.isArray(contents)) {\n return contents.map((item) => {\n if (typeof item === 'string') {\n return { role: 'user', content: item }\n }\n if (item && typeof item === 'object') {\n if (item.text) {\n return { role: item.role || 'user', content: item.text }\n }\n if (item.content) {\n return { role: item.role || 'user', content: item.content }\n }\n }\n return { role: 'user', content: String(item) }\n })\n }\n\n if (contents && typeof contents === 'object') {\n if (contents.text) {\n return [{ role: 'user', content: contents.text }]\n }\n if (contents.content) {\n return [{ role: 'user', content: contents.content }]\n }\n }\n\n return [{ role: 'user', content: String(contents) }]\n }\n}\n\nexport default PostHogGoogleGenAI\nexport { PostHogGoogleGenAI as Gemini }\n"],"names":["STRING_FORMAT","getModelParams","params","modelParams","paramKeys","key","undefined","formatResponseGemini","response","output","candidates","Array","isArray","candidate","content","parts","part","text","push","type","functionCall","function","name","arguments","args","length","role","withPrivacyMode","client","privacyMode","input","privacy_mode","extractAvailableToolCalls","provider","config","tools","sanitizeValues","obj","jsonSafe","JSON","parse","stringify","Buffer","from","toString","map","Object","fromEntries","entries","k","v","sendEventToPosthog","distinctId","traceId","model","latency","baseURL","httpStatus","usage","isError","error","captureImmediate","capture","Promise","resolve","safeInput","safeOutput","safeError","errorData","$ai_is_error","$ai_error","costOverrideData","posthogCostOverride","inputCostUSD","inputCost","inputTokens","outputCostUSD","outputCost","outputTokens","$ai_input_cost_usd","$ai_output_cost_usd","$ai_total_cost_usd","additionalTokenValues","reasoningTokens","$ai_reasoning_tokens","cacheReadInputTokens","$ai_cache_read_input_tokens","cacheCreationInputTokens","$ai_cache_creation_input_tokens","properties","$ai_provider","posthogProviderOverride","$ai_model","posthogModelOverride","$ai_model_parameters","$ai_input","posthogPrivacyMode","$ai_output_choices","$ai_http_status","$ai_input_tokens","$ai_output_tokens","$ai_latency","$ai_trace_id","$ai_base_url","posthogProperties","$process_person_profile","$ai_tools","event","groups","posthogGroups","PostHogGoogleGenAI","constructor","posthog","geminiConfig","phClient","GoogleGenAI","models","WrappedModels","generateContent","posthogDistinctId","posthogTraceId","posthogCaptureImmediate","geminiParams","uuidv4","startTime","Date","now","availableTools","formatInput","contents","usageMetadata","promptTokenCount","candidatesTokenCount","status","generateContentStream","accumulatedContent","stream","chunk","item","String"],"mappings":";;;;AAeA,MAAMA,aAAa,GAAG,MAAM;AAmBrB,MAAMC,cAAc,GACzBC,MAAiH,IACzF;EACxB,IAAI,CAACA,MAAM,EAAE;AACX,IAAA,OAAO,EAAE;AACX,EAAA;EACA,MAAMC,WAAgC,GAAG,EAAE;EAC3C,MAAMC,SAAS,GAAG,CAChB,aAAa,EACb,YAAY,EACZ,uBAAuB,EACvB,OAAO,EACP,mBAAmB,EACnB,kBAAkB,EAClB,GAAG,EACH,MAAM,EACN,QAAQ,EACR,WAAW,CACH;AAEV,EAAA,KAAK,MAAMC,GAAG,IAAID,SAAS,EAAE;IAC3B,IAAIC,GAAG,IAAIH,MAAM,IAAKA,MAAM,CAASG,GAAG,CAAC,KAAKC,SAAS,EAAE;AACvDH,MAAAA,WAAW,CAACE,GAAG,CAAC,GAAIH,MAAM,CAASG,GAAG,CAAC;AACzC,IAAA;AACF,EAAA;AACA,EAAA,OAAOF,WAAW;AACpB,CAAC;AAwIM,MAAMI,oBAAoB,GAAIC,QAAa,IAAyB;EACzE,MAAMC,MAA0B,GAAG,EAAE;AAErC,EAAA,IAAID,QAAQ,CAACE,UAAU,IAAIC,KAAK,CAACC,OAAO,CAACJ,QAAQ,CAACE,UAAU,CAAC,EAAE;AAC7D,IAAA,KAAK,MAAMG,SAAS,IAAIL,QAAQ,CAACE,UAAU,EAAE;MAC3C,IAAIG,SAAS,CAACC,OAAO,IAAID,SAAS,CAACC,OAAO,CAACC,KAAK,EAAE;QAChD,MAAMD,OAAyB,GAAG,EAAE;QAEpC,KAAK,MAAME,IAAI,IAAIH,SAAS,CAACC,OAAO,CAACC,KAAK,EAAE;UAC1C,IAAIC,IAAI,CAACC,IAAI,EAAE;YACbH,OAAO,CAACI,IAAI,CAAC;AAAEC,cAAAA,IAAI,EAAE,MAAM;cAAEF,IAAI,EAAED,IAAI,CAACC;AAAK,aAAC,CAAC;AACjD,UAAA,CAAC,MAAM,IAAID,IAAI,CAACI,YAAY,EAAE;YAC5BN,OAAO,CAACI,IAAI,CAAC;AACXC,cAAAA,IAAI,EAAE,UAAU;AAChBE,cAAAA,QAAQ,EAAE;AACRC,gBAAAA,IAAI,EAAEN,IAAI,CAACI,YAAY,CAACE,IAAI;AAC5BC,gBAAAA,SAAS,EAAEP,IAAI,CAACI,YAAY,CAACI;AAC/B;AACF,aAAC,CAAC;AACJ,UAAA;AACF,QAAA;AAEA,QAAA,IAAIV,OAAO,CAACW,MAAM,GAAG,CAAC,EAAE;UACtBhB,MAAM,CAACS,IAAI,CAAC;AACVQ,YAAAA,IAAI,EAAE,WAAW;AACjBZ,YAAAA;AACF,WAAC,CAAC;AACJ,QAAA;AACF,MAAA,CAAC,MAAM,IAAID,SAAS,CAACI,IAAI,EAAE;QACzBR,MAAM,CAACS,IAAI,CAAC;AACVQ,UAAAA,IAAI,EAAE,WAAW;AACjBZ,UAAAA,OAAO,EAAE,CAAC;AAAEK,YAAAA,IAAI,EAAE,MAAM;YAAEF,IAAI,EAAEJ,SAAS,CAACI;WAAM;AAClD,SAAC,CAAC;AACJ,MAAA;AACF,IAAA;AACF,EAAA,CAAC,MAAM,IAAIT,QAAQ,CAACS,IAAI,EAAE;IACxBR,MAAM,CAACS,IAAI,CAAC;AACVQ,MAAAA,IAAI,EAAE,WAAW;AACjBZ,MAAAA,OAAO,EAAE,CAAC;AAAEK,QAAAA,IAAI,EAAE,MAAM;QAAEF,IAAI,EAAET,QAAQ,CAACS;OAAM;AACjD,KAAC,CAAC;AACJ,EAAA;AAEA,EAAA,OAAOR,MAAM;AACf,CAAC;AAcM,MAAMkB,eAAe,GAAGA,CAACC,MAAe,EAAEC,WAAoB,EAAEC,KAAU,KAAU;EACzF,OAAQF,MAAM,CAASG,YAAY,IAAIF,WAAW,GAAG,IAAI,GAAGC,KAAK;AACnE,CAAC;;AAgBD;AACA;AACA;AACA;AACO,MAAME,yBAAyB,GAAGA,CACvCC,QAAgB,EAChB/B,MAAW,KACsD;EAO/B;IAChC,IAAIA,MAAM,CAACgC,MAAM,IAAIhC,MAAM,CAACgC,MAAM,CAACC,KAAK,EAAE;AACxC,MAAA,OAAOjC,MAAM,CAACgC,MAAM,CAACC,KAAK;AAC5B,IAAA;AAEA,IAAA,OAAO,IAAI;AACb,EAAA;AAgBF,CAAC;AAqBD,SAASC,cAAcA,CAACC,GAAQ,EAAO;AACrC,EAAA,IAAIA,GAAG,KAAK/B,SAAS,IAAI+B,GAAG,KAAK,IAAI,EAAE;AACrC,IAAA,OAAOA,GAAG;AACZ,EAAA;AACA,EAAA,MAAMC,QAAQ,GAAGC,IAAI,CAACC,KAAK,CAACD,IAAI,CAACE,SAAS,CAACJ,GAAG,CAAC,CAAC;AAChD,EAAA,IAAI,OAAOC,QAAQ,KAAK,QAAQ,EAAE;AAChC,IAAA,OAAOI,MAAM,CAACC,IAAI,CAACL,QAAQ,EAAEtC,aAAa,CAAC,CAAC4C,QAAQ,CAAC5C,aAAa,CAAC;EACrE,CAAC,MAAM,IAAIW,KAAK,CAACC,OAAO,CAAC0B,QAAQ,CAAC,EAAE;AAClC,IAAA,OAAOA,QAAQ,CAACO,GAAG,CAACT,cAAc,CAAC;EACrC,CAAC,MAAM,IAAIE,QAAQ,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;AACnD,IAAA,OAAOQ,MAAM,CAACC,WAAW,CAACD,MAAM,CAACE,OAAO,CAACV,QAAQ,CAAC,CAACO,GAAG,CAAC,CAAC,CAACI,CAAC,EAAEC,CAAC,CAAC,KAAK,CAACD,CAAC,EAAEb,cAAc,CAACc,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7F,EAAA;AACA,EAAA,OAAOZ,QAAQ;AACjB;AAEO,MAAMa,kBAAkB,GAAG,OAAO;EACvCvB,MAAM;EACNwB,UAAU;EACVC,OAAO;EACPC,KAAK;EACLrB,QAAQ;EACRH,KAAK;EACLrB,MAAM;EACN8C,OAAO;EACPC,OAAO;EACPtD,MAAM;AACNuD,EAAAA,UAAU,GAAG,GAAG;EAChBC,KAAK,GAAG,EAAE;AACVC,EAAAA,OAAO,GAAG,KAAK;EACfC,KAAK;EACLzB,KAAK;AACL0B,EAAAA,gBAAgB,GAAG;AACK,CAAC,KAAoB;AAC7C,EAAA,IAAI,CAACjC,MAAM,CAACkC,OAAO,EAAE;AACnB,IAAA,OAAOC,OAAO,CAACC,OAAO,EAAE;AAC1B,EAAA;AACA;AACA,EAAA,MAAMC,SAAS,GAAG7B,cAAc,CAACN,KAAK,CAAC;AACvC,EAAA,MAAMoC,UAAU,GAAG9B,cAAc,CAAC3B,MAAM,CAAC;AACzC,EAAA,MAAM0D,SAAS,GAAG/B,cAAc,CAACwB,KAAK,CAAC;EAEvC,IAAIQ,SAAS,GAAG,EAAE;AAClB,EAAA,IAAIT,OAAO,EAAE;AACXS,IAAAA,SAAS,GAAG;AACVC,MAAAA,YAAY,EAAE,IAAI;AAClBC,MAAAA,SAAS,EAAEH;KACZ;AACH,EAAA;EACA,IAAII,gBAAgB,GAAG,EAAE;EACzB,IAAIrE,MAAM,CAACsE,mBAAmB,EAAE;AAC9B,IAAA,MAAMC,YAAY,GAAG,CAACvE,MAAM,CAACsE,mBAAmB,CAACE,SAAS,IAAI,CAAC,KAAKhB,KAAK,CAACiB,WAAW,IAAI,CAAC,CAAC;AAC3F,IAAA,MAAMC,aAAa,GAAG,CAAC1E,MAAM,CAACsE,mBAAmB,CAACK,UAAU,IAAI,CAAC,KAAKnB,KAAK,CAACoB,YAAY,IAAI,CAAC,CAAC;AAC9FP,IAAAA,gBAAgB,GAAG;AACjBQ,MAAAA,kBAAkB,EAAEN,YAAY;AAChCO,MAAAA,mBAAmB,EAAEJ,aAAa;MAClCK,kBAAkB,EAAER,YAAY,GAAGG;KACpC;AACH,EAAA;AAEA,EAAA,MAAMM,qBAAqB,GAAG;IAC5B,IAAIxB,KAAK,CAACyB,eAAe,GAAG;MAAEC,oBAAoB,EAAE1B,KAAK,CAACyB;KAAiB,GAAG,EAAE,CAAC;IACjF,IAAIzB,KAAK,CAAC2B,oBAAoB,GAAG;MAAEC,2BAA2B,EAAE5B,KAAK,CAAC2B;KAAsB,GAAG,EAAE,CAAC;IAClG,IAAI3B,KAAK,CAAC6B,wBAAwB,GAAG;MAAEC,+BAA+B,EAAE9B,KAAK,CAAC6B;KAA0B,GAAG,EAAE;GAC9G;AAED,EAAA,MAAME,UAAU,GAAG;AACjBC,IAAAA,YAAY,EAAExF,MAAM,CAACyF,uBAAuB,IAAI1D,QAAQ;AACxD2D,IAAAA,SAAS,EAAE1F,MAAM,CAAC2F,oBAAoB,IAAIvC,KAAK;AAC/CwC,IAAAA,oBAAoB,EAAE7F,cAAc,CAACC,MAAM,CAAC;AAC5C6F,IAAAA,SAAS,EAAEpE,eAAe,CAACC,MAAM,EAAE1B,MAAM,CAAC8F,kBAAkB,IAAI,KAAK,EAAE/B,SAAS,CAAC;AACjFgC,IAAAA,kBAAkB,EAAEtE,eAAe,CAACC,MAAM,EAAE1B,MAAM,CAAC8F,kBAAkB,IAAI,KAAK,EAAE9B,UAAU,CAAC;AAC3FgC,IAAAA,eAAe,EAAEzC,UAAU;AAC3B0C,IAAAA,gBAAgB,EAAEzC,KAAK,CAACiB,WAAW,IAAI,CAAC;AACxCyB,IAAAA,iBAAiB,EAAE1C,KAAK,CAACoB,YAAY,IAAI,CAAC;AAC1C,IAAA,GAAGI,qBAAqB;AACxBmB,IAAAA,WAAW,EAAE9C,OAAO;AACpB+C,IAAAA,YAAY,EAAEjD,OAAO;AACrBkD,IAAAA,YAAY,EAAE/C,OAAO;IACrB,GAAGtD,MAAM,CAACsG,iBAAiB;AAC3B,IAAA,IAAIpD,UAAU,GAAG,EAAE,GAAG;AAAEqD,MAAAA,uBAAuB,EAAE;AAAM,KAAC,CAAC;AACzD,IAAA,IAAItE,KAAK,GAAG;AAAEuE,MAAAA,SAAS,EAAEvE;KAAO,GAAG,EAAE,CAAC;AACtC,IAAA,GAAGiC,SAAS;IACZ,GAAGG;GACJ;AAED,EAAA,MAAMoC,KAAK,GAAG;IACZvD,UAAU,EAAEA,UAAU,IAAIC,OAAO;AACjCsD,IAAAA,KAAK,EAAE,gBAAgB;IACvBlB,UAAU;IACVmB,MAAM,EAAE1G,MAAM,CAAC2G;GAChB;AAED,EAAA,IAAIhD,gBAAgB,EAAE;AACpB;AACA,IAAA,MAAMjC,MAAM,CAACiC,gBAAgB,CAAC8C,KAAK,CAAC;AACtC,EAAA,CAAC,MAAM;AACL/E,IAAAA,MAAM,CAACkC,OAAO,CAAC6C,KAAK,CAAC;AACvB,EAAA;AACF,CAAC;;ACraD;;AA4BO,MAAMG,kBAAkB,CAAC;EAK9BC,WAAWA,CAAC7E,MAA8B,EAAE;IAC1C,MAAM;MAAE8E,OAAO;MAAE,GAAGC;AAAa,KAAC,GAAG/E,MAAM;IAC3C,IAAI,CAACgF,QAAQ,GAAGF,OAAO;AACvB,IAAA,IAAI,CAACpF,MAAM,GAAG,IAAIuF,WAAW,CAACF,YAAY,CAAC;AAC3C,IAAA,IAAI,CAACG,MAAM,GAAG,IAAIC,aAAa,CAAC,IAAI,CAACzF,MAAM,EAAE,IAAI,CAACsF,QAAQ,CAAC;AAC7D,EAAA;AACF;AAEO,MAAMG,aAAa,CAAC;AAIzBN,EAAAA,WAAWA,CAACnF,MAAmB,EAAEsF,QAAiB,EAAE;IAClD,IAAI,CAACtF,MAAM,GAAGA,MAAM;IACpB,IAAI,CAACsF,QAAQ,GAAGA,QAAQ;AAC1B,EAAA;EAEA,MAAaI,eAAeA,CAACpH,MAAiD,EAAoC;IAChH,MAAM;MACJqH,iBAAiB;MACjBC,cAAc;MACdhB,iBAAiB;MACjBK,aAAa;MACbY,uBAAuB;MACvB,GAAGC;AACL,KAAC,GAAGxH,MAAM;AAEV,IAAA,MAAMmD,OAAO,GAAGmE,cAAc,IAAIG,EAAM,EAAE;AAC1C,IAAA,MAAMC,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE;IAE5B,IAAI;AACF,MAAA,MAAMtH,QAAQ,GAAG,MAAM,IAAI,CAACoB,MAAM,CAACwF,MAAM,CAACE,eAAe,CAACI,YAAY,CAAC;MACvE,MAAMnE,OAAO,GAAG,CAACsE,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI;AAE/C,MAAA,MAAMG,cAAc,GAAG/F,yBAAyB,CAAC,QAAQ,EAAE0F,YAAY,CAAC;AAExE,MAAA,MAAMvE,kBAAkB,CAAC;QACvBvB,MAAM,EAAE,IAAI,CAACsF,QAAQ;AACrB9D,QAAAA,UAAU,EAAEmE,iBAAiB;QAC7BlE,OAAO;QACPC,KAAK,EAAEoE,YAAY,CAACpE,KAAK;AACzBrB,QAAAA,QAAQ,EAAE,QAAQ;QAClBH,KAAK,EAAE,IAAI,CAACkG,WAAW,CAACN,YAAY,CAACO,QAAQ,CAAC;AAC9CxH,QAAAA,MAAM,EAAEF,oBAAoB,CAACC,QAAQ,CAAC;QACtC+C,OAAO;AACPC,QAAAA,OAAO,EAAE,2CAA2C;AACpDtD,QAAAA,MAAM,EAAEA,MAAa;AACrBuD,QAAAA,UAAU,EAAE,GAAG;AACfC,QAAAA,KAAK,EAAE;AACLiB,UAAAA,WAAW,EAAEnE,QAAQ,CAAC0H,aAAa,EAAEC,gBAAgB,IAAI,CAAC;AAC1DrD,UAAAA,YAAY,EAAEtE,QAAQ,CAAC0H,aAAa,EAAEE,oBAAoB,IAAI;SAC/D;AACDjG,QAAAA,KAAK,EAAE4F,cAAc;AACrBlE,QAAAA,gBAAgB,EAAE4D;AACpB,OAAC,CAAC;AAEF,MAAA,OAAOjH,QAAQ;IACjB,CAAC,CAAC,OAAOoD,KAAU,EAAE;MACnB,MAAML,OAAO,GAAG,CAACsE,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI;AAC/C,MAAA,MAAMzE,kBAAkB,CAAC;QACvBvB,MAAM,EAAE,IAAI,CAACsF,QAAQ;AACrB9D,QAAAA,UAAU,EAAEmE,iBAAiB;QAC7BlE,OAAO;QACPC,KAAK,EAAEoE,YAAY,CAACpE,KAAK;AACzBrB,QAAAA,QAAQ,EAAE,QAAQ;QAClBH,KAAK,EAAE,IAAI,CAACkG,WAAW,CAACN,YAAY,CAACO,QAAQ,CAAC;AAC9CxH,QAAAA,MAAM,EAAE,EAAE;QACV8C,OAAO;AACPC,QAAAA,OAAO,EAAE,2CAA2C;AACpDtD,QAAAA,MAAM,EAAEA,MAAa;AACrBuD,QAAAA,UAAU,EAAEG,KAAK,EAAEyE,MAAM,IAAI,GAAG;AAChC3E,QAAAA,KAAK,EAAE;AACLiB,UAAAA,WAAW,EAAE,CAAC;AACdG,UAAAA,YAAY,EAAE;SACf;AACDnB,QAAAA,OAAO,EAAE,IAAI;AACbC,QAAAA,KAAK,EAAErB,IAAI,CAACE,SAAS,CAACmB,KAAK,CAAC;AAC5BC,QAAAA,gBAAgB,EAAE4D;AACpB,OAAC,CAAC;AACF,MAAA,MAAM7D,KAAK;AACb,IAAA;AACF,EAAA;EAEA,OAAc0E,qBAAqBA,CACjCpI,MAAiD,EACb;IACpC,MAAM;MACJqH,iBAAiB;MACjBC,cAAc;MACdhB,iBAAiB;MACjBK,aAAa;MACbY,uBAAuB;MACvB,GAAGC;AACL,KAAC,GAAGxH,MAAM;AAEV,IAAA,MAAMmD,OAAO,GAAGmE,cAAc,IAAIG,EAAM,EAAE;AAC1C,IAAA,MAAMC,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE;IAC5B,IAAIS,kBAAkB,GAAG,EAAE;AAC3B,IAAA,IAAI7E,KAAK,GAAG;AACViB,MAAAA,WAAW,EAAE,CAAC;AACdG,MAAAA,YAAY,EAAE;KACf;IAED,IAAI;AACF,MAAA,MAAM0D,MAAM,GAAG,MAAM,IAAI,CAAC5G,MAAM,CAACwF,MAAM,CAACkB,qBAAqB,CAACZ,YAAY,CAAC;AAE3E,MAAA,WAAW,MAAMe,KAAK,IAAID,MAAM,EAAE;QAChC,IAAIC,KAAK,CAACxH,IAAI,EAAE;UACdsH,kBAAkB,IAAIE,KAAK,CAACxH,IAAI;AAClC,QAAA;QACA,IAAIwH,KAAK,CAACP,aAAa,EAAE;AACvBxE,UAAAA,KAAK,GAAG;AACNiB,YAAAA,WAAW,EAAE8D,KAAK,CAACP,aAAa,CAACC,gBAAgB,IAAI,CAAC;AACtDrD,YAAAA,YAAY,EAAE2D,KAAK,CAACP,aAAa,CAACE,oBAAoB,IAAI;WAC3D;AACH,QAAA;AACA,QAAA,MAAMK,KAAK;AACb,MAAA;MAEA,MAAMlF,OAAO,GAAG,CAACsE,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI;AAE/C,MAAA,MAAMG,cAAc,GAAG/F,yBAAyB,CAAC,QAAQ,EAAE0F,YAAY,CAAC;AAExE,MAAA,MAAMvE,kBAAkB,CAAC;QACvBvB,MAAM,EAAE,IAAI,CAACsF,QAAQ;AACrB9D,QAAAA,UAAU,EAAEmE,iBAAiB;QAC7BlE,OAAO;QACPC,KAAK,EAAEoE,YAAY,CAACpE,KAAK;AACzBrB,QAAAA,QAAQ,EAAE,QAAQ;QAClBH,KAAK,EAAE,IAAI,CAACkG,WAAW,CAACN,YAAY,CAACO,QAAQ,CAAC;AAC9CxH,QAAAA,MAAM,EAAE,CAAC;AAAEK,UAAAA,OAAO,EAAEyH,kBAAkB;AAAE7G,UAAAA,IAAI,EAAE;AAAY,SAAC,CAAC;QAC5D6B,OAAO;AACPC,QAAAA,OAAO,EAAE,2CAA2C;AACpDtD,QAAAA,MAAM,EAAEA,MAAa;AACrBuD,QAAAA,UAAU,EAAE,GAAG;QACfC,KAAK;AACLvB,QAAAA,KAAK,EAAE4F,cAAc;AACrBlE,QAAAA,gBAAgB,EAAE4D;AACpB,OAAC,CAAC;IACJ,CAAC,CAAC,OAAO7D,KAAU,EAAE;MACnB,MAAML,OAAO,GAAG,CAACsE,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI;AAC/C,MAAA,MAAMzE,kBAAkB,CAAC;QACvBvB,MAAM,EAAE,IAAI,CAACsF,QAAQ;AACrB9D,QAAAA,UAAU,EAAEmE,iBAAiB;QAC7BlE,OAAO;QACPC,KAAK,EAAEoE,YAAY,CAACpE,KAAK;AACzBrB,QAAAA,QAAQ,EAAE,QAAQ;QAClBH,KAAK,EAAE,IAAI,CAACkG,WAAW,CAACN,YAAY,CAACO,QAAQ,CAAC;AAC9CxH,QAAAA,MAAM,EAAE,EAAE;QACV8C,OAAO;AACPC,QAAAA,OAAO,EAAE,2CAA2C;AACpDtD,QAAAA,MAAM,EAAEA,MAAa;AACrBuD,QAAAA,UAAU,EAAEG,KAAK,EAAEyE,MAAM,IAAI,GAAG;AAChC3E,QAAAA,KAAK,EAAE;AACLiB,UAAAA,WAAW,EAAE,CAAC;AACdG,UAAAA,YAAY,EAAE;SACf;AACDnB,QAAAA,OAAO,EAAE,IAAI;AACbC,QAAAA,KAAK,EAAErB,IAAI,CAACE,SAAS,CAACmB,KAAK,CAAC;AAC5BC,QAAAA,gBAAgB,EAAE4D;AACpB,OAAC,CAAC;AACF,MAAA,MAAM7D,KAAK;AACb,IAAA;AACF,EAAA;EAEQoE,WAAWA,CAACC,QAAa,EAA4C;AAC3E,IAAA,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;AAChC,MAAA,OAAO,CAAC;AAAEvG,QAAAA,IAAI,EAAE,MAAM;AAAEZ,QAAAA,OAAO,EAAEmH;AAAS,OAAC,CAAC;AAC9C,IAAA;AAEA,IAAA,IAAItH,KAAK,CAACC,OAAO,CAACqH,QAAQ,CAAC,EAAE;AAC3B,MAAA,OAAOA,QAAQ,CAACpF,GAAG,CAAE6F,IAAI,IAAK;AAC5B,QAAA,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;UAC5B,OAAO;AAAEhH,YAAAA,IAAI,EAAE,MAAM;AAAEZ,YAAAA,OAAO,EAAE4H;WAAM;AACxC,QAAA;AACA,QAAA,IAAIA,IAAI,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;UACpC,IAAIA,IAAI,CAACzH,IAAI,EAAE;YACb,OAAO;AAAES,cAAAA,IAAI,EAAEgH,IAAI,CAAChH,IAAI,IAAI,MAAM;cAAEZ,OAAO,EAAE4H,IAAI,CAACzH;aAAM;AAC1D,UAAA;UACA,IAAIyH,IAAI,CAAC5H,OAAO,EAAE;YAChB,OAAO;AAAEY,cAAAA,IAAI,EAAEgH,IAAI,CAAChH,IAAI,IAAI,MAAM;cAAEZ,OAAO,EAAE4H,IAAI,CAAC5H;aAAS;AAC7D,UAAA;AACF,QAAA;QACA,OAAO;AAAEY,UAAAA,IAAI,EAAE,MAAM;UAAEZ,OAAO,EAAE6H,MAAM,CAACD,IAAI;SAAG;AAChD,MAAA,CAAC,CAAC;AACJ,IAAA;AAEA,IAAA,IAAIT,QAAQ,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;MAC5C,IAAIA,QAAQ,CAAChH,IAAI,EAAE;AACjB,QAAA,OAAO,CAAC;AAAES,UAAAA,IAAI,EAAE,MAAM;UAAEZ,OAAO,EAAEmH,QAAQ,CAAChH;AAAK,SAAC,CAAC;AACnD,MAAA;MACA,IAAIgH,QAAQ,CAACnH,OAAO,EAAE;AACpB,QAAA,OAAO,CAAC;AAAEY,UAAAA,IAAI,EAAE,MAAM;UAAEZ,OAAO,EAAEmH,QAAQ,CAACnH;AAAQ,SAAC,CAAC;AACtD,MAAA;AACF,IAAA;AAEA,IAAA,OAAO,CAAC;AAAEY,MAAAA,IAAI,EAAE,MAAM;MAAEZ,OAAO,EAAE6H,MAAM,CAACV,QAAQ;AAAE,KAAC,CAAC;AACtD,EAAA;AACF;;;;"}
1
+ {"version":3,"file":"index.mjs","sources":["../../src/utils.ts","../../src/gemini/index.ts"],"sourcesContent":["import { PostHog } from 'posthog-node'\nimport { Buffer } from 'buffer'\nimport OpenAIOrignal from 'openai'\nimport AnthropicOriginal from '@anthropic-ai/sdk'\nimport type { ChatCompletionTool } from 'openai/resources/chat/completions'\nimport type { Tool as GeminiTool } from '@google/genai'\nimport type { FormattedMessage, FormattedContent, TokenUsage } from './types'\n\ntype ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams\ntype MessageCreateParams = AnthropicOriginal.Messages.MessageCreateParams\ntype ResponseCreateParams = OpenAIOrignal.Responses.ResponseCreateParams\ntype AnthropicTool = AnthropicOriginal.Tool\n\n// limit large outputs by truncating to 200kb (approx 200k bytes)\nexport const MAX_OUTPUT_SIZE = 200000\nconst STRING_FORMAT = 'utf8'\n\nexport interface MonitoringParams {\n posthogDistinctId?: string\n posthogTraceId?: string\n posthogProperties?: Record<string, any>\n posthogPrivacyMode?: boolean\n posthogGroups?: Record<string, any>\n posthogModelOverride?: string\n posthogProviderOverride?: string\n posthogCostOverride?: CostOverride\n posthogCaptureImmediate?: boolean\n}\n\nexport interface CostOverride {\n inputCost: number\n outputCost: number\n}\n\nexport const getModelParams = (\n params: ((ChatCompletionCreateParamsBase | MessageCreateParams | ResponseCreateParams) & MonitoringParams) | null\n): Record<string, any> => {\n if (!params) {\n return {}\n }\n const modelParams: Record<string, any> = {}\n const paramKeys = [\n 'temperature',\n 'max_tokens',\n 'max_completion_tokens',\n 'top_p',\n 'frequency_penalty',\n 'presence_penalty',\n 'n',\n 'stop',\n 'stream',\n 'streaming',\n ] as const\n\n for (const key of paramKeys) {\n if (key in params && (params as any)[key] !== undefined) {\n modelParams[key] = (params as any)[key]\n }\n }\n return modelParams\n}\n\n/**\n * Helper to format responses (non-streaming) for consumption, mirroring Python's openai vs. anthropic approach.\n */\nexport const formatResponse = (response: any, provider: string): FormattedMessage[] => {\n if (!response) {\n return []\n }\n if (provider === 'anthropic') {\n return formatResponseAnthropic(response)\n } else if (provider === 'openai') {\n return formatResponseOpenAI(response)\n } else if (provider === 'gemini') {\n return formatResponseGemini(response)\n }\n return []\n}\n\nexport const formatResponseAnthropic = (response: any): FormattedMessage[] => {\n const output: FormattedMessage[] = []\n const content: FormattedContent = []\n\n for (const choice of response.content ?? []) {\n if (choice?.type === 'text' && choice?.text) {\n content.push({ type: 'text', text: choice.text })\n } else if (choice?.type === 'tool_use' && choice?.name && choice?.id) {\n content.push({\n type: 'function',\n id: choice.id,\n function: {\n name: choice.name,\n arguments: choice.input || {},\n },\n })\n }\n }\n\n if (content.length > 0) {\n output.push({\n role: 'assistant',\n content,\n })\n }\n\n return output\n}\n\nexport const formatResponseOpenAI = (response: any): FormattedMessage[] => {\n const output: FormattedMessage[] = []\n\n if (response.choices) {\n for (const choice of response.choices) {\n const content: FormattedContent = []\n let role = 'assistant'\n\n if (choice.message) {\n if (choice.message.role) {\n role = choice.message.role\n }\n\n if (choice.message.content) {\n content.push({ type: 'text', text: choice.message.content })\n }\n\n if (choice.message.tool_calls) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'function',\n id: toolCall.id,\n function: {\n name: toolCall.function.name,\n arguments: toolCall.function.arguments,\n },\n })\n }\n }\n }\n\n if (content.length > 0) {\n output.push({\n role,\n content,\n })\n }\n }\n }\n\n // Handle Responses API format\n if (response.output) {\n const content: FormattedContent = []\n let role = 'assistant'\n\n for (const item of response.output) {\n if (item.type === 'message') {\n role = item.role\n\n if (item.content && Array.isArray(item.content)) {\n for (const contentItem of item.content) {\n if (contentItem.type === 'output_text' && contentItem.text) {\n content.push({ type: 'text', text: contentItem.text })\n } else if (contentItem.text) {\n content.push({ type: 'text', text: contentItem.text })\n } else if (contentItem.type === 'input_image' && contentItem.image_url) {\n content.push({\n type: 'image',\n image: contentItem.image_url,\n })\n }\n }\n } else if (item.content) {\n content.push({ type: 'text', text: String(item.content) })\n }\n } else if (item.type === 'function_call') {\n content.push({\n type: 'function',\n id: item.call_id || item.id || '',\n function: {\n name: item.name,\n arguments: item.arguments || {},\n },\n })\n }\n }\n\n if (content.length > 0) {\n output.push({\n role,\n content,\n })\n }\n }\n\n return output\n}\n\nexport const formatResponseGemini = (response: any): FormattedMessage[] => {\n const output: FormattedMessage[] = []\n\n if (response.candidates && Array.isArray(response.candidates)) {\n for (const candidate of response.candidates) {\n if (candidate.content && candidate.content.parts) {\n const content: FormattedContent = []\n\n for (const part of candidate.content.parts) {\n if (part.text) {\n content.push({ type: 'text', text: part.text })\n } else if (part.functionCall) {\n content.push({\n type: 'function',\n function: {\n name: part.functionCall.name,\n arguments: part.functionCall.args,\n },\n })\n }\n }\n\n if (content.length > 0) {\n output.push({\n role: 'assistant',\n content,\n })\n }\n } else if (candidate.text) {\n output.push({\n role: 'assistant',\n content: [{ type: 'text', text: candidate.text }],\n })\n }\n }\n } else if (response.text) {\n output.push({\n role: 'assistant',\n content: [{ type: 'text', text: response.text }],\n })\n }\n\n return output\n}\n\nexport const mergeSystemPrompt = (params: MessageCreateParams & MonitoringParams, provider: string): any => {\n if (provider == 'anthropic') {\n const messages = params.messages || []\n if (!(params as any).system) {\n return messages\n }\n const systemMessage = (params as any).system\n return [{ role: 'system', content: systemMessage }, ...messages]\n }\n return params.messages\n}\n\nexport const withPrivacyMode = (client: PostHog, privacyMode: boolean, input: any): any => {\n return (client as any).privacy_mode || privacyMode ? null : input\n}\n\nexport const truncate = (str: string): string => {\n try {\n const buffer = Buffer.from(str, STRING_FORMAT)\n if (buffer.length <= MAX_OUTPUT_SIZE) {\n return str\n }\n const truncatedBuffer = buffer.slice(0, MAX_OUTPUT_SIZE)\n return `${truncatedBuffer.toString(STRING_FORMAT)}... [truncated]`\n } catch (error) {\n console.error('Error truncating, likely not a string')\n return str\n }\n}\n\n/**\n * Extract available tool calls from the request parameters.\n * These are the tools provided to the LLM, not the tool calls in the response.\n */\nexport const extractAvailableToolCalls = (\n provider: string,\n params: any\n): ChatCompletionTool[] | AnthropicTool[] | GeminiTool[] | null => {\n if (provider === 'anthropic') {\n if (params.tools) {\n return params.tools\n }\n\n return null\n } else if (provider === 'gemini') {\n if (params.config && params.config.tools) {\n return params.config.tools\n }\n\n return null\n } else if (provider === 'openai') {\n if (params.tools) {\n return params.tools\n }\n\n return null\n } else if (provider === 'vercel') {\n // Vercel AI SDK stores tools in params.mode.tools when mode type is 'regular'\n if (params.mode?.type === 'regular' && params.mode.tools) {\n return params.mode.tools\n }\n\n return null\n }\n\n return null\n}\n\nexport type SendEventToPosthogParams = {\n client: PostHog\n distinctId?: string\n traceId: string\n model: string\n provider: string\n input: any\n output: any\n latency: number\n baseURL: string\n httpStatus: number\n usage?: TokenUsage\n params: (ChatCompletionCreateParamsBase | MessageCreateParams | ResponseCreateParams) & MonitoringParams\n isError?: boolean\n error?: string\n tools?: ChatCompletionTool[] | AnthropicTool[] | GeminiTool[] | null\n captureImmediate?: boolean\n}\n\nfunction sanitizeValues(obj: any): any {\n if (obj === undefined || obj === null) {\n return obj\n }\n const jsonSafe = JSON.parse(JSON.stringify(obj))\n if (typeof jsonSafe === 'string') {\n return Buffer.from(jsonSafe, STRING_FORMAT).toString(STRING_FORMAT)\n } else if (Array.isArray(jsonSafe)) {\n return jsonSafe.map(sanitizeValues)\n } else if (jsonSafe && typeof jsonSafe === 'object') {\n return Object.fromEntries(Object.entries(jsonSafe).map(([k, v]) => [k, sanitizeValues(v)]))\n }\n return jsonSafe\n}\n\nexport const sendEventToPosthog = async ({\n client,\n distinctId,\n traceId,\n model,\n provider,\n input,\n output,\n latency,\n baseURL,\n params,\n httpStatus = 200,\n usage = {},\n isError = false,\n error,\n tools,\n captureImmediate = false,\n}: SendEventToPosthogParams): Promise<void> => {\n if (!client.capture) {\n return Promise.resolve()\n }\n // sanitize input and output for UTF-8 validity\n const safeInput = sanitizeValues(input)\n const safeOutput = sanitizeValues(output)\n const safeError = sanitizeValues(error)\n\n let errorData = {}\n if (isError) {\n errorData = {\n $ai_is_error: true,\n $ai_error: safeError,\n }\n }\n let costOverrideData = {}\n if (params.posthogCostOverride) {\n const inputCostUSD = (params.posthogCostOverride.inputCost ?? 0) * (usage.inputTokens ?? 0)\n const outputCostUSD = (params.posthogCostOverride.outputCost ?? 0) * (usage.outputTokens ?? 0)\n costOverrideData = {\n $ai_input_cost_usd: inputCostUSD,\n $ai_output_cost_usd: outputCostUSD,\n $ai_total_cost_usd: inputCostUSD + outputCostUSD,\n }\n }\n\n const additionalTokenValues = {\n ...(usage.reasoningTokens ? { $ai_reasoning_tokens: usage.reasoningTokens } : {}),\n ...(usage.cacheReadInputTokens ? { $ai_cache_read_input_tokens: usage.cacheReadInputTokens } : {}),\n ...(usage.cacheCreationInputTokens ? { $ai_cache_creation_input_tokens: usage.cacheCreationInputTokens } : {}),\n }\n\n const properties = {\n $ai_provider: params.posthogProviderOverride ?? provider,\n $ai_model: params.posthogModelOverride ?? model,\n $ai_model_parameters: getModelParams(params),\n $ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeInput),\n $ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeOutput),\n $ai_http_status: httpStatus,\n $ai_input_tokens: usage.inputTokens ?? 0,\n $ai_output_tokens: usage.outputTokens ?? 0,\n ...additionalTokenValues,\n $ai_latency: latency,\n $ai_trace_id: traceId,\n $ai_base_url: baseURL,\n ...params.posthogProperties,\n ...(distinctId ? {} : { $process_person_profile: false }),\n ...(tools ? { $ai_tools: tools } : {}),\n ...errorData,\n ...costOverrideData,\n }\n\n const event = {\n distinctId: distinctId ?? traceId,\n event: '$ai_generation',\n properties,\n groups: params.posthogGroups,\n }\n\n if (captureImmediate) {\n // await capture promise to send single event in serverless environments\n await client.captureImmediate(event)\n } else {\n client.capture(event)\n }\n}\n","import { GoogleGenAI } from '@google/genai'\nimport { PostHog } from 'posthog-node'\nimport { v4 as uuidv4 } from 'uuid'\nimport { MonitoringParams, sendEventToPosthog, extractAvailableToolCalls, formatResponseGemini } from '../utils'\nimport type { TokenUsage } from '../types'\n\n// Types from @google/genai\ntype GenerateContentRequest = {\n model: string\n contents: any\n config?: any\n [key: string]: any\n}\n\ntype GenerateContentResponse = {\n text?: string\n candidates?: any[]\n usageMetadata?: {\n promptTokenCount?: number\n candidatesTokenCount?: number\n totalTokenCount?: number\n thoughtsTokenCount?: number\n cachedContentTokenCount?: number\n }\n [key: string]: any\n}\n\ninterface MonitoringGeminiConfig {\n apiKey?: string\n vertexai?: boolean\n project?: string\n location?: string\n apiVersion?: string\n posthog: PostHog\n}\n\nexport class PostHogGoogleGenAI {\n private readonly phClient: PostHog\n private readonly client: GoogleGenAI\n public models: WrappedModels\n\n constructor(config: MonitoringGeminiConfig) {\n const { posthog, ...geminiConfig } = config\n this.phClient = posthog\n this.client = new GoogleGenAI(geminiConfig)\n this.models = new WrappedModels(this.client, this.phClient)\n }\n}\n\nexport class WrappedModels {\n private readonly phClient: PostHog\n private readonly client: GoogleGenAI\n\n constructor(client: GoogleGenAI, phClient: PostHog) {\n this.client = client\n this.phClient = phClient\n }\n\n public async generateContent(params: GenerateContentRequest & MonitoringParams): Promise<GenerateContentResponse> {\n const {\n posthogDistinctId,\n posthogTraceId,\n posthogProperties,\n posthogGroups,\n posthogCaptureImmediate,\n ...geminiParams\n } = params\n\n const traceId = posthogTraceId ?? uuidv4()\n const startTime = Date.now()\n\n try {\n const response = await this.client.models.generateContent(geminiParams)\n const latency = (Date.now() - startTime) / 1000\n\n const availableTools = extractAvailableToolCalls('gemini', geminiParams)\n\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: geminiParams.model,\n provider: 'gemini',\n input: this.formatInput(geminiParams.contents),\n output: formatResponseGemini(response),\n latency,\n baseURL: 'https://generativelanguage.googleapis.com',\n params: params as any,\n httpStatus: 200,\n usage: {\n inputTokens: response.usageMetadata?.promptTokenCount ?? 0,\n outputTokens: response.usageMetadata?.candidatesTokenCount ?? 0,\n reasoningTokens: response.usageMetadata?.thoughtsTokenCount ?? 0,\n cacheReadInputTokens: response.usageMetadata?.cachedContentTokenCount ?? 0,\n },\n tools: availableTools,\n captureImmediate: posthogCaptureImmediate,\n })\n\n return response\n } catch (error: any) {\n const latency = (Date.now() - startTime) / 1000\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: geminiParams.model,\n provider: 'gemini',\n input: this.formatInput(geminiParams.contents),\n output: [],\n latency,\n baseURL: 'https://generativelanguage.googleapis.com',\n params: params as any,\n httpStatus: error?.status ?? 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n isError: true,\n error: JSON.stringify(error),\n captureImmediate: posthogCaptureImmediate,\n })\n throw error\n }\n }\n\n public async *generateContentStream(\n params: GenerateContentRequest & MonitoringParams\n ): AsyncGenerator<any, void, unknown> {\n const {\n posthogDistinctId,\n posthogTraceId,\n posthogProperties,\n posthogGroups,\n posthogCaptureImmediate,\n ...geminiParams\n } = params\n\n const traceId = posthogTraceId ?? uuidv4()\n const startTime = Date.now()\n let accumulatedContent = ''\n let usage: TokenUsage = {\n inputTokens: 0,\n outputTokens: 0,\n }\n\n try {\n const stream = await this.client.models.generateContentStream(geminiParams)\n\n for await (const chunk of stream) {\n if (chunk.text) {\n accumulatedContent += chunk.text\n }\n if (chunk.usageMetadata) {\n usage = {\n inputTokens: chunk.usageMetadata.promptTokenCount ?? 0,\n outputTokens: chunk.usageMetadata.candidatesTokenCount ?? 0,\n reasoningTokens: chunk.usageMetadata.thoughtsTokenCount ?? 0,\n cacheReadInputTokens: chunk.usageMetadata.cachedContentTokenCount ?? 0,\n }\n }\n yield chunk\n }\n\n const latency = (Date.now() - startTime) / 1000\n\n const availableTools = extractAvailableToolCalls('gemini', geminiParams)\n\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: geminiParams.model,\n provider: 'gemini',\n input: this.formatInput(geminiParams.contents),\n output: [{ content: accumulatedContent, role: 'assistant' }],\n latency,\n baseURL: 'https://generativelanguage.googleapis.com',\n params: params as any,\n httpStatus: 200,\n usage,\n tools: availableTools,\n captureImmediate: posthogCaptureImmediate,\n })\n } catch (error: any) {\n const latency = (Date.now() - startTime) / 1000\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: geminiParams.model,\n provider: 'gemini',\n input: this.formatInput(geminiParams.contents),\n output: [],\n latency,\n baseURL: 'https://generativelanguage.googleapis.com',\n params: params as any,\n httpStatus: error?.status ?? 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n isError: true,\n error: JSON.stringify(error),\n captureImmediate: posthogCaptureImmediate,\n })\n throw error\n }\n }\n\n private formatInput(contents: any): Array<{ role: string; content: string }> {\n if (typeof contents === 'string') {\n return [{ role: 'user', content: contents }]\n }\n\n if (Array.isArray(contents)) {\n return contents.map((item) => {\n if (typeof item === 'string') {\n return { role: 'user', content: item }\n }\n if (item && typeof item === 'object') {\n if (item.text) {\n return { role: item.role || 'user', content: item.text }\n }\n if (item.content) {\n return { role: item.role || 'user', content: item.content }\n }\n }\n return { role: 'user', content: String(item) }\n })\n }\n\n if (contents && typeof contents === 'object') {\n if (contents.text) {\n return [{ role: 'user', content: contents.text }]\n }\n if (contents.content) {\n return [{ role: 'user', content: contents.content }]\n }\n }\n\n return [{ role: 'user', content: String(contents) }]\n }\n}\n\nexport default PostHogGoogleGenAI\nexport { PostHogGoogleGenAI as Gemini }\n"],"names":["STRING_FORMAT","getModelParams","params","modelParams","paramKeys","key","undefined","formatResponseGemini","response","output","candidates","Array","isArray","candidate","content","parts","part","text","push","type","functionCall","function","name","arguments","args","length","role","withPrivacyMode","client","privacyMode","input","privacy_mode","extractAvailableToolCalls","provider","config","tools","sanitizeValues","obj","jsonSafe","JSON","parse","stringify","Buffer","from","toString","map","Object","fromEntries","entries","k","v","sendEventToPosthog","distinctId","traceId","model","latency","baseURL","httpStatus","usage","isError","error","captureImmediate","capture","Promise","resolve","safeInput","safeOutput","safeError","errorData","$ai_is_error","$ai_error","costOverrideData","posthogCostOverride","inputCostUSD","inputCost","inputTokens","outputCostUSD","outputCost","outputTokens","$ai_input_cost_usd","$ai_output_cost_usd","$ai_total_cost_usd","additionalTokenValues","reasoningTokens","$ai_reasoning_tokens","cacheReadInputTokens","$ai_cache_read_input_tokens","cacheCreationInputTokens","$ai_cache_creation_input_tokens","properties","$ai_provider","posthogProviderOverride","$ai_model","posthogModelOverride","$ai_model_parameters","$ai_input","posthogPrivacyMode","$ai_output_choices","$ai_http_status","$ai_input_tokens","$ai_output_tokens","$ai_latency","$ai_trace_id","$ai_base_url","posthogProperties","$process_person_profile","$ai_tools","event","groups","posthogGroups","PostHogGoogleGenAI","constructor","posthog","geminiConfig","phClient","GoogleGenAI","models","WrappedModels","generateContent","posthogDistinctId","posthogTraceId","posthogCaptureImmediate","geminiParams","uuidv4","startTime","Date","now","availableTools","formatInput","contents","usageMetadata","promptTokenCount","candidatesTokenCount","thoughtsTokenCount","cachedContentTokenCount","status","generateContentStream","accumulatedContent","stream","chunk","item","String"],"mappings":";;;;AAeA,MAAMA,aAAa,GAAG,MAAM;AAmBrB,MAAMC,cAAc,GACzBC,MAAiH,IACzF;EACxB,IAAI,CAACA,MAAM,EAAE;AACX,IAAA,OAAO,EAAE;AACX,EAAA;EACA,MAAMC,WAAgC,GAAG,EAAE;EAC3C,MAAMC,SAAS,GAAG,CAChB,aAAa,EACb,YAAY,EACZ,uBAAuB,EACvB,OAAO,EACP,mBAAmB,EACnB,kBAAkB,EAClB,GAAG,EACH,MAAM,EACN,QAAQ,EACR,WAAW,CACH;AAEV,EAAA,KAAK,MAAMC,GAAG,IAAID,SAAS,EAAE;IAC3B,IAAIC,GAAG,IAAIH,MAAM,IAAKA,MAAM,CAASG,GAAG,CAAC,KAAKC,SAAS,EAAE;AACvDH,MAAAA,WAAW,CAACE,GAAG,CAAC,GAAIH,MAAM,CAASG,GAAG,CAAC;AACzC,IAAA;AACF,EAAA;AACA,EAAA,OAAOF,WAAW;AACpB,CAAC;AAwIM,MAAMI,oBAAoB,GAAIC,QAAa,IAAyB;EACzE,MAAMC,MAA0B,GAAG,EAAE;AAErC,EAAA,IAAID,QAAQ,CAACE,UAAU,IAAIC,KAAK,CAACC,OAAO,CAACJ,QAAQ,CAACE,UAAU,CAAC,EAAE;AAC7D,IAAA,KAAK,MAAMG,SAAS,IAAIL,QAAQ,CAACE,UAAU,EAAE;MAC3C,IAAIG,SAAS,CAACC,OAAO,IAAID,SAAS,CAACC,OAAO,CAACC,KAAK,EAAE;QAChD,MAAMD,OAAyB,GAAG,EAAE;QAEpC,KAAK,MAAME,IAAI,IAAIH,SAAS,CAACC,OAAO,CAACC,KAAK,EAAE;UAC1C,IAAIC,IAAI,CAACC,IAAI,EAAE;YACbH,OAAO,CAACI,IAAI,CAAC;AAAEC,cAAAA,IAAI,EAAE,MAAM;cAAEF,IAAI,EAAED,IAAI,CAACC;AAAK,aAAC,CAAC;AACjD,UAAA,CAAC,MAAM,IAAID,IAAI,CAACI,YAAY,EAAE;YAC5BN,OAAO,CAACI,IAAI,CAAC;AACXC,cAAAA,IAAI,EAAE,UAAU;AAChBE,cAAAA,QAAQ,EAAE;AACRC,gBAAAA,IAAI,EAAEN,IAAI,CAACI,YAAY,CAACE,IAAI;AAC5BC,gBAAAA,SAAS,EAAEP,IAAI,CAACI,YAAY,CAACI;AAC/B;AACF,aAAC,CAAC;AACJ,UAAA;AACF,QAAA;AAEA,QAAA,IAAIV,OAAO,CAACW,MAAM,GAAG,CAAC,EAAE;UACtBhB,MAAM,CAACS,IAAI,CAAC;AACVQ,YAAAA,IAAI,EAAE,WAAW;AACjBZ,YAAAA;AACF,WAAC,CAAC;AACJ,QAAA;AACF,MAAA,CAAC,MAAM,IAAID,SAAS,CAACI,IAAI,EAAE;QACzBR,MAAM,CAACS,IAAI,CAAC;AACVQ,UAAAA,IAAI,EAAE,WAAW;AACjBZ,UAAAA,OAAO,EAAE,CAAC;AAAEK,YAAAA,IAAI,EAAE,MAAM;YAAEF,IAAI,EAAEJ,SAAS,CAACI;WAAM;AAClD,SAAC,CAAC;AACJ,MAAA;AACF,IAAA;AACF,EAAA,CAAC,MAAM,IAAIT,QAAQ,CAACS,IAAI,EAAE;IACxBR,MAAM,CAACS,IAAI,CAAC;AACVQ,MAAAA,IAAI,EAAE,WAAW;AACjBZ,MAAAA,OAAO,EAAE,CAAC;AAAEK,QAAAA,IAAI,EAAE,MAAM;QAAEF,IAAI,EAAET,QAAQ,CAACS;OAAM;AACjD,KAAC,CAAC;AACJ,EAAA;AAEA,EAAA,OAAOR,MAAM;AACf,CAAC;AAcM,MAAMkB,eAAe,GAAGA,CAACC,MAAe,EAAEC,WAAoB,EAAEC,KAAU,KAAU;EACzF,OAAQF,MAAM,CAASG,YAAY,IAAIF,WAAW,GAAG,IAAI,GAAGC,KAAK;AACnE,CAAC;;AAgBD;AACA;AACA;AACA;AACO,MAAME,yBAAyB,GAAGA,CACvCC,QAAgB,EAChB/B,MAAW,KACsD;EAO/B;IAChC,IAAIA,MAAM,CAACgC,MAAM,IAAIhC,MAAM,CAACgC,MAAM,CAACC,KAAK,EAAE;AACxC,MAAA,OAAOjC,MAAM,CAACgC,MAAM,CAACC,KAAK;AAC5B,IAAA;AAEA,IAAA,OAAO,IAAI;AACb,EAAA;AAgBF,CAAC;AAqBD,SAASC,cAAcA,CAACC,GAAQ,EAAO;AACrC,EAAA,IAAIA,GAAG,KAAK/B,SAAS,IAAI+B,GAAG,KAAK,IAAI,EAAE;AACrC,IAAA,OAAOA,GAAG;AACZ,EAAA;AACA,EAAA,MAAMC,QAAQ,GAAGC,IAAI,CAACC,KAAK,CAACD,IAAI,CAACE,SAAS,CAACJ,GAAG,CAAC,CAAC;AAChD,EAAA,IAAI,OAAOC,QAAQ,KAAK,QAAQ,EAAE;AAChC,IAAA,OAAOI,MAAM,CAACC,IAAI,CAACL,QAAQ,EAAEtC,aAAa,CAAC,CAAC4C,QAAQ,CAAC5C,aAAa,CAAC;EACrE,CAAC,MAAM,IAAIW,KAAK,CAACC,OAAO,CAAC0B,QAAQ,CAAC,EAAE;AAClC,IAAA,OAAOA,QAAQ,CAACO,GAAG,CAACT,cAAc,CAAC;EACrC,CAAC,MAAM,IAAIE,QAAQ,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;AACnD,IAAA,OAAOQ,MAAM,CAACC,WAAW,CAACD,MAAM,CAACE,OAAO,CAACV,QAAQ,CAAC,CAACO,GAAG,CAAC,CAAC,CAACI,CAAC,EAAEC,CAAC,CAAC,KAAK,CAACD,CAAC,EAAEb,cAAc,CAACc,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7F,EAAA;AACA,EAAA,OAAOZ,QAAQ;AACjB;AAEO,MAAMa,kBAAkB,GAAG,OAAO;EACvCvB,MAAM;EACNwB,UAAU;EACVC,OAAO;EACPC,KAAK;EACLrB,QAAQ;EACRH,KAAK;EACLrB,MAAM;EACN8C,OAAO;EACPC,OAAO;EACPtD,MAAM;AACNuD,EAAAA,UAAU,GAAG,GAAG;EAChBC,KAAK,GAAG,EAAE;AACVC,EAAAA,OAAO,GAAG,KAAK;EACfC,KAAK;EACLzB,KAAK;AACL0B,EAAAA,gBAAgB,GAAG;AACK,CAAC,KAAoB;AAC7C,EAAA,IAAI,CAACjC,MAAM,CAACkC,OAAO,EAAE;AACnB,IAAA,OAAOC,OAAO,CAACC,OAAO,EAAE;AAC1B,EAAA;AACA;AACA,EAAA,MAAMC,SAAS,GAAG7B,cAAc,CAACN,KAAK,CAAC;AACvC,EAAA,MAAMoC,UAAU,GAAG9B,cAAc,CAAC3B,MAAM,CAAC;AACzC,EAAA,MAAM0D,SAAS,GAAG/B,cAAc,CAACwB,KAAK,CAAC;EAEvC,IAAIQ,SAAS,GAAG,EAAE;AAClB,EAAA,IAAIT,OAAO,EAAE;AACXS,IAAAA,SAAS,GAAG;AACVC,MAAAA,YAAY,EAAE,IAAI;AAClBC,MAAAA,SAAS,EAAEH;KACZ;AACH,EAAA;EACA,IAAII,gBAAgB,GAAG,EAAE;EACzB,IAAIrE,MAAM,CAACsE,mBAAmB,EAAE;AAC9B,IAAA,MAAMC,YAAY,GAAG,CAACvE,MAAM,CAACsE,mBAAmB,CAACE,SAAS,IAAI,CAAC,KAAKhB,KAAK,CAACiB,WAAW,IAAI,CAAC,CAAC;AAC3F,IAAA,MAAMC,aAAa,GAAG,CAAC1E,MAAM,CAACsE,mBAAmB,CAACK,UAAU,IAAI,CAAC,KAAKnB,KAAK,CAACoB,YAAY,IAAI,CAAC,CAAC;AAC9FP,IAAAA,gBAAgB,GAAG;AACjBQ,MAAAA,kBAAkB,EAAEN,YAAY;AAChCO,MAAAA,mBAAmB,EAAEJ,aAAa;MAClCK,kBAAkB,EAAER,YAAY,GAAGG;KACpC;AACH,EAAA;AAEA,EAAA,MAAMM,qBAAqB,GAAG;IAC5B,IAAIxB,KAAK,CAACyB,eAAe,GAAG;MAAEC,oBAAoB,EAAE1B,KAAK,CAACyB;KAAiB,GAAG,EAAE,CAAC;IACjF,IAAIzB,KAAK,CAAC2B,oBAAoB,GAAG;MAAEC,2BAA2B,EAAE5B,KAAK,CAAC2B;KAAsB,GAAG,EAAE,CAAC;IAClG,IAAI3B,KAAK,CAAC6B,wBAAwB,GAAG;MAAEC,+BAA+B,EAAE9B,KAAK,CAAC6B;KAA0B,GAAG,EAAE;GAC9G;AAED,EAAA,MAAME,UAAU,GAAG;AACjBC,IAAAA,YAAY,EAAExF,MAAM,CAACyF,uBAAuB,IAAI1D,QAAQ;AACxD2D,IAAAA,SAAS,EAAE1F,MAAM,CAAC2F,oBAAoB,IAAIvC,KAAK;AAC/CwC,IAAAA,oBAAoB,EAAE7F,cAAc,CAACC,MAAM,CAAC;AAC5C6F,IAAAA,SAAS,EAAEpE,eAAe,CAACC,MAAM,EAAE1B,MAAM,CAAC8F,kBAAkB,IAAI,KAAK,EAAE/B,SAAS,CAAC;AACjFgC,IAAAA,kBAAkB,EAAEtE,eAAe,CAACC,MAAM,EAAE1B,MAAM,CAAC8F,kBAAkB,IAAI,KAAK,EAAE9B,UAAU,CAAC;AAC3FgC,IAAAA,eAAe,EAAEzC,UAAU;AAC3B0C,IAAAA,gBAAgB,EAAEzC,KAAK,CAACiB,WAAW,IAAI,CAAC;AACxCyB,IAAAA,iBAAiB,EAAE1C,KAAK,CAACoB,YAAY,IAAI,CAAC;AAC1C,IAAA,GAAGI,qBAAqB;AACxBmB,IAAAA,WAAW,EAAE9C,OAAO;AACpB+C,IAAAA,YAAY,EAAEjD,OAAO;AACrBkD,IAAAA,YAAY,EAAE/C,OAAO;IACrB,GAAGtD,MAAM,CAACsG,iBAAiB;AAC3B,IAAA,IAAIpD,UAAU,GAAG,EAAE,GAAG;AAAEqD,MAAAA,uBAAuB,EAAE;AAAM,KAAC,CAAC;AACzD,IAAA,IAAItE,KAAK,GAAG;AAAEuE,MAAAA,SAAS,EAAEvE;KAAO,GAAG,EAAE,CAAC;AACtC,IAAA,GAAGiC,SAAS;IACZ,GAAGG;GACJ;AAED,EAAA,MAAMoC,KAAK,GAAG;IACZvD,UAAU,EAAEA,UAAU,IAAIC,OAAO;AACjCsD,IAAAA,KAAK,EAAE,gBAAgB;IACvBlB,UAAU;IACVmB,MAAM,EAAE1G,MAAM,CAAC2G;GAChB;AAED,EAAA,IAAIhD,gBAAgB,EAAE;AACpB;AACA,IAAA,MAAMjC,MAAM,CAACiC,gBAAgB,CAAC8C,KAAK,CAAC;AACtC,EAAA,CAAC,MAAM;AACL/E,IAAAA,MAAM,CAACkC,OAAO,CAAC6C,KAAK,CAAC;AACvB,EAAA;AACF,CAAC;;ACpaD;;AA8BO,MAAMG,kBAAkB,CAAC;EAK9BC,WAAWA,CAAC7E,MAA8B,EAAE;IAC1C,MAAM;MAAE8E,OAAO;MAAE,GAAGC;AAAa,KAAC,GAAG/E,MAAM;IAC3C,IAAI,CAACgF,QAAQ,GAAGF,OAAO;AACvB,IAAA,IAAI,CAACpF,MAAM,GAAG,IAAIuF,WAAW,CAACF,YAAY,CAAC;AAC3C,IAAA,IAAI,CAACG,MAAM,GAAG,IAAIC,aAAa,CAAC,IAAI,CAACzF,MAAM,EAAE,IAAI,CAACsF,QAAQ,CAAC;AAC7D,EAAA;AACF;AAEO,MAAMG,aAAa,CAAC;AAIzBN,EAAAA,WAAWA,CAACnF,MAAmB,EAAEsF,QAAiB,EAAE;IAClD,IAAI,CAACtF,MAAM,GAAGA,MAAM;IACpB,IAAI,CAACsF,QAAQ,GAAGA,QAAQ;AAC1B,EAAA;EAEA,MAAaI,eAAeA,CAACpH,MAAiD,EAAoC;IAChH,MAAM;MACJqH,iBAAiB;MACjBC,cAAc;MACdhB,iBAAiB;MACjBK,aAAa;MACbY,uBAAuB;MACvB,GAAGC;AACL,KAAC,GAAGxH,MAAM;AAEV,IAAA,MAAMmD,OAAO,GAAGmE,cAAc,IAAIG,EAAM,EAAE;AAC1C,IAAA,MAAMC,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE;IAE5B,IAAI;AACF,MAAA,MAAMtH,QAAQ,GAAG,MAAM,IAAI,CAACoB,MAAM,CAACwF,MAAM,CAACE,eAAe,CAACI,YAAY,CAAC;MACvE,MAAMnE,OAAO,GAAG,CAACsE,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI;AAE/C,MAAA,MAAMG,cAAc,GAAG/F,yBAAyB,CAAC,QAAQ,EAAE0F,YAAY,CAAC;AAExE,MAAA,MAAMvE,kBAAkB,CAAC;QACvBvB,MAAM,EAAE,IAAI,CAACsF,QAAQ;AACrB9D,QAAAA,UAAU,EAAEmE,iBAAiB;QAC7BlE,OAAO;QACPC,KAAK,EAAEoE,YAAY,CAACpE,KAAK;AACzBrB,QAAAA,QAAQ,EAAE,QAAQ;QAClBH,KAAK,EAAE,IAAI,CAACkG,WAAW,CAACN,YAAY,CAACO,QAAQ,CAAC;AAC9CxH,QAAAA,MAAM,EAAEF,oBAAoB,CAACC,QAAQ,CAAC;QACtC+C,OAAO;AACPC,QAAAA,OAAO,EAAE,2CAA2C;AACpDtD,QAAAA,MAAM,EAAEA,MAAa;AACrBuD,QAAAA,UAAU,EAAE,GAAG;AACfC,QAAAA,KAAK,EAAE;AACLiB,UAAAA,WAAW,EAAEnE,QAAQ,CAAC0H,aAAa,EAAEC,gBAAgB,IAAI,CAAC;AAC1DrD,UAAAA,YAAY,EAAEtE,QAAQ,CAAC0H,aAAa,EAAEE,oBAAoB,IAAI,CAAC;AAC/DjD,UAAAA,eAAe,EAAE3E,QAAQ,CAAC0H,aAAa,EAAEG,kBAAkB,IAAI,CAAC;AAChEhD,UAAAA,oBAAoB,EAAE7E,QAAQ,CAAC0H,aAAa,EAAEI,uBAAuB,IAAI;SAC1E;AACDnG,QAAAA,KAAK,EAAE4F,cAAc;AACrBlE,QAAAA,gBAAgB,EAAE4D;AACpB,OAAC,CAAC;AAEF,MAAA,OAAOjH,QAAQ;IACjB,CAAC,CAAC,OAAOoD,KAAU,EAAE;MACnB,MAAML,OAAO,GAAG,CAACsE,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI;AAC/C,MAAA,MAAMzE,kBAAkB,CAAC;QACvBvB,MAAM,EAAE,IAAI,CAACsF,QAAQ;AACrB9D,QAAAA,UAAU,EAAEmE,iBAAiB;QAC7BlE,OAAO;QACPC,KAAK,EAAEoE,YAAY,CAACpE,KAAK;AACzBrB,QAAAA,QAAQ,EAAE,QAAQ;QAClBH,KAAK,EAAE,IAAI,CAACkG,WAAW,CAACN,YAAY,CAACO,QAAQ,CAAC;AAC9CxH,QAAAA,MAAM,EAAE,EAAE;QACV8C,OAAO;AACPC,QAAAA,OAAO,EAAE,2CAA2C;AACpDtD,QAAAA,MAAM,EAAEA,MAAa;AACrBuD,QAAAA,UAAU,EAAEG,KAAK,EAAE2E,MAAM,IAAI,GAAG;AAChC7E,QAAAA,KAAK,EAAE;AACLiB,UAAAA,WAAW,EAAE,CAAC;AACdG,UAAAA,YAAY,EAAE;SACf;AACDnB,QAAAA,OAAO,EAAE,IAAI;AACbC,QAAAA,KAAK,EAAErB,IAAI,CAACE,SAAS,CAACmB,KAAK,CAAC;AAC5BC,QAAAA,gBAAgB,EAAE4D;AACpB,OAAC,CAAC;AACF,MAAA,MAAM7D,KAAK;AACb,IAAA;AACF,EAAA;EAEA,OAAc4E,qBAAqBA,CACjCtI,MAAiD,EACb;IACpC,MAAM;MACJqH,iBAAiB;MACjBC,cAAc;MACdhB,iBAAiB;MACjBK,aAAa;MACbY,uBAAuB;MACvB,GAAGC;AACL,KAAC,GAAGxH,MAAM;AAEV,IAAA,MAAMmD,OAAO,GAAGmE,cAAc,IAAIG,EAAM,EAAE;AAC1C,IAAA,MAAMC,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE;IAC5B,IAAIW,kBAAkB,GAAG,EAAE;AAC3B,IAAA,IAAI/E,KAAiB,GAAG;AACtBiB,MAAAA,WAAW,EAAE,CAAC;AACdG,MAAAA,YAAY,EAAE;KACf;IAED,IAAI;AACF,MAAA,MAAM4D,MAAM,GAAG,MAAM,IAAI,CAAC9G,MAAM,CAACwF,MAAM,CAACoB,qBAAqB,CAACd,YAAY,CAAC;AAE3E,MAAA,WAAW,MAAMiB,KAAK,IAAID,MAAM,EAAE;QAChC,IAAIC,KAAK,CAAC1H,IAAI,EAAE;UACdwH,kBAAkB,IAAIE,KAAK,CAAC1H,IAAI;AAClC,QAAA;QACA,IAAI0H,KAAK,CAACT,aAAa,EAAE;AACvBxE,UAAAA,KAAK,GAAG;AACNiB,YAAAA,WAAW,EAAEgE,KAAK,CAACT,aAAa,CAACC,gBAAgB,IAAI,CAAC;AACtDrD,YAAAA,YAAY,EAAE6D,KAAK,CAACT,aAAa,CAACE,oBAAoB,IAAI,CAAC;AAC3DjD,YAAAA,eAAe,EAAEwD,KAAK,CAACT,aAAa,CAACG,kBAAkB,IAAI,CAAC;AAC5DhD,YAAAA,oBAAoB,EAAEsD,KAAK,CAACT,aAAa,CAACI,uBAAuB,IAAI;WACtE;AACH,QAAA;AACA,QAAA,MAAMK,KAAK;AACb,MAAA;MAEA,MAAMpF,OAAO,GAAG,CAACsE,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI;AAE/C,MAAA,MAAMG,cAAc,GAAG/F,yBAAyB,CAAC,QAAQ,EAAE0F,YAAY,CAAC;AAExE,MAAA,MAAMvE,kBAAkB,CAAC;QACvBvB,MAAM,EAAE,IAAI,CAACsF,QAAQ;AACrB9D,QAAAA,UAAU,EAAEmE,iBAAiB;QAC7BlE,OAAO;QACPC,KAAK,EAAEoE,YAAY,CAACpE,KAAK;AACzBrB,QAAAA,QAAQ,EAAE,QAAQ;QAClBH,KAAK,EAAE,IAAI,CAACkG,WAAW,CAACN,YAAY,CAACO,QAAQ,CAAC;AAC9CxH,QAAAA,MAAM,EAAE,CAAC;AAAEK,UAAAA,OAAO,EAAE2H,kBAAkB;AAAE/G,UAAAA,IAAI,EAAE;AAAY,SAAC,CAAC;QAC5D6B,OAAO;AACPC,QAAAA,OAAO,EAAE,2CAA2C;AACpDtD,QAAAA,MAAM,EAAEA,MAAa;AACrBuD,QAAAA,UAAU,EAAE,GAAG;QACfC,KAAK;AACLvB,QAAAA,KAAK,EAAE4F,cAAc;AACrBlE,QAAAA,gBAAgB,EAAE4D;AACpB,OAAC,CAAC;IACJ,CAAC,CAAC,OAAO7D,KAAU,EAAE;MACnB,MAAML,OAAO,GAAG,CAACsE,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI;AAC/C,MAAA,MAAMzE,kBAAkB,CAAC;QACvBvB,MAAM,EAAE,IAAI,CAACsF,QAAQ;AACrB9D,QAAAA,UAAU,EAAEmE,iBAAiB;QAC7BlE,OAAO;QACPC,KAAK,EAAEoE,YAAY,CAACpE,KAAK;AACzBrB,QAAAA,QAAQ,EAAE,QAAQ;QAClBH,KAAK,EAAE,IAAI,CAACkG,WAAW,CAACN,YAAY,CAACO,QAAQ,CAAC;AAC9CxH,QAAAA,MAAM,EAAE,EAAE;QACV8C,OAAO;AACPC,QAAAA,OAAO,EAAE,2CAA2C;AACpDtD,QAAAA,MAAM,EAAEA,MAAa;AACrBuD,QAAAA,UAAU,EAAEG,KAAK,EAAE2E,MAAM,IAAI,GAAG;AAChC7E,QAAAA,KAAK,EAAE;AACLiB,UAAAA,WAAW,EAAE,CAAC;AACdG,UAAAA,YAAY,EAAE;SACf;AACDnB,QAAAA,OAAO,EAAE,IAAI;AACbC,QAAAA,KAAK,EAAErB,IAAI,CAACE,SAAS,CAACmB,KAAK,CAAC;AAC5BC,QAAAA,gBAAgB,EAAE4D;AACpB,OAAC,CAAC;AACF,MAAA,MAAM7D,KAAK;AACb,IAAA;AACF,EAAA;EAEQoE,WAAWA,CAACC,QAAa,EAA4C;AAC3E,IAAA,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;AAChC,MAAA,OAAO,CAAC;AAAEvG,QAAAA,IAAI,EAAE,MAAM;AAAEZ,QAAAA,OAAO,EAAEmH;AAAS,OAAC,CAAC;AAC9C,IAAA;AAEA,IAAA,IAAItH,KAAK,CAACC,OAAO,CAACqH,QAAQ,CAAC,EAAE;AAC3B,MAAA,OAAOA,QAAQ,CAACpF,GAAG,CAAE+F,IAAI,IAAK;AAC5B,QAAA,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;UAC5B,OAAO;AAAElH,YAAAA,IAAI,EAAE,MAAM;AAAEZ,YAAAA,OAAO,EAAE8H;WAAM;AACxC,QAAA;AACA,QAAA,IAAIA,IAAI,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;UACpC,IAAIA,IAAI,CAAC3H,IAAI,EAAE;YACb,OAAO;AAAES,cAAAA,IAAI,EAAEkH,IAAI,CAAClH,IAAI,IAAI,MAAM;cAAEZ,OAAO,EAAE8H,IAAI,CAAC3H;aAAM;AAC1D,UAAA;UACA,IAAI2H,IAAI,CAAC9H,OAAO,EAAE;YAChB,OAAO;AAAEY,cAAAA,IAAI,EAAEkH,IAAI,CAAClH,IAAI,IAAI,MAAM;cAAEZ,OAAO,EAAE8H,IAAI,CAAC9H;aAAS;AAC7D,UAAA;AACF,QAAA;QACA,OAAO;AAAEY,UAAAA,IAAI,EAAE,MAAM;UAAEZ,OAAO,EAAE+H,MAAM,CAACD,IAAI;SAAG;AAChD,MAAA,CAAC,CAAC;AACJ,IAAA;AAEA,IAAA,IAAIX,QAAQ,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;MAC5C,IAAIA,QAAQ,CAAChH,IAAI,EAAE;AACjB,QAAA,OAAO,CAAC;AAAES,UAAAA,IAAI,EAAE,MAAM;UAAEZ,OAAO,EAAEmH,QAAQ,CAAChH;AAAK,SAAC,CAAC;AACnD,MAAA;MACA,IAAIgH,QAAQ,CAACnH,OAAO,EAAE;AACpB,QAAA,OAAO,CAAC;AAAEY,UAAAA,IAAI,EAAE,MAAM;UAAEZ,OAAO,EAAEmH,QAAQ,CAACnH;AAAQ,SAAC,CAAC;AACtD,MAAA;AACF,IAAA;AAEA,IAAA,OAAO,CAAC;AAAEY,MAAAA,IAAI,EAAE,MAAM;MAAEZ,OAAO,EAAE+H,MAAM,CAACZ,QAAQ;AAAE,KAAC,CAAC;AACtD,EAAA;AACF;;;;"}
package/dist/index.cjs CHANGED
@@ -1339,17 +1339,17 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
1339
1339
  const latency = (Date.now() - startTime) / 1000;
1340
1340
  const providerMetadata = result.providerMetadata;
1341
1341
  const additionalTokenValues = {
1342
- ...(providerMetadata?.openai?.reasoningTokens ? {
1343
- reasoningTokens: providerMetadata.openai.reasoningTokens
1344
- } : {}),
1345
- ...(providerMetadata?.openai?.cachedPromptTokens ? {
1346
- cacheReadInputTokens: providerMetadata.openai.cachedPromptTokens
1347
- } : {}),
1348
1342
  ...(providerMetadata?.anthropic ? {
1349
- cacheReadInputTokens: providerMetadata.anthropic.cacheReadInputTokens,
1350
1343
  cacheCreationInputTokens: providerMetadata.anthropic.cacheCreationInputTokens
1351
1344
  } : {})
1352
1345
  };
1346
+ const usage = {
1347
+ inputTokens: result.usage.inputTokens,
1348
+ outputTokens: result.usage.outputTokens,
1349
+ reasoningTokens: result.usage.reasoningTokens,
1350
+ cacheReadInputTokens: result.usage.cachedInputTokens,
1351
+ ...additionalTokenValues
1352
+ };
1353
1353
  await sendEventToPosthog({
1354
1354
  client: phClient,
1355
1355
  distinctId: options.posthogDistinctId,
@@ -1362,11 +1362,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
1362
1362
  baseURL,
1363
1363
  params: mergedParams,
1364
1364
  httpStatus: 200,
1365
- usage: {
1366
- inputTokens: result.usage.inputTokens,
1367
- outputTokens: result.usage.outputTokens,
1368
- ...additionalTokenValues
1369
- },
1365
+ usage,
1370
1366
  tools: availableTools,
1371
1367
  captureImmediate: options.posthogCaptureImmediate
1372
1368
  });
@@ -1428,22 +1424,19 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
1428
1424
  reasoningText += chunk.delta; // New in v5
1429
1425
  }
1430
1426
  if (chunk.type === 'finish') {
1427
+ const providerMetadata = chunk.providerMetadata;
1428
+ const additionalTokenValues = {
1429
+ ...(providerMetadata?.anthropic ? {
1430
+ cacheCreationInputTokens: providerMetadata.anthropic.cacheCreationInputTokens
1431
+ } : {})
1432
+ };
1431
1433
  usage = {
1432
1434
  inputTokens: chunk.usage?.inputTokens,
1433
- outputTokens: chunk.usage?.outputTokens
1435
+ outputTokens: chunk.usage?.outputTokens,
1436
+ reasoningTokens: chunk.usage?.reasoningTokens,
1437
+ cacheReadInputTokens: chunk.usage?.cachedInputTokens,
1438
+ ...additionalTokenValues
1434
1439
  };
1435
- if (chunk.providerMetadata?.openai?.reasoningTokens) {
1436
- usage.reasoningTokens = chunk.providerMetadata.openai.reasoningTokens;
1437
- }
1438
- if (chunk.providerMetadata?.openai?.cachedPromptTokens) {
1439
- usage.cacheReadInputTokens = chunk.providerMetadata.openai.cachedPromptTokens;
1440
- }
1441
- if (chunk.providerMetadata?.anthropic?.cacheReadInputTokens) {
1442
- usage.cacheReadInputTokens = chunk.providerMetadata.anthropic.cacheReadInputTokens;
1443
- }
1444
- if (chunk.providerMetadata?.anthropic?.cacheCreationInputTokens) {
1445
- usage.cacheCreationInputTokens = chunk.providerMetadata.anthropic.cacheCreationInputTokens;
1446
- }
1447
1440
  }
1448
1441
  controller.enqueue(chunk);
1449
1442
  },
@@ -1742,7 +1735,9 @@ class WrappedModels {
1742
1735
  httpStatus: 200,
1743
1736
  usage: {
1744
1737
  inputTokens: response.usageMetadata?.promptTokenCount ?? 0,
1745
- outputTokens: response.usageMetadata?.candidatesTokenCount ?? 0
1738
+ outputTokens: response.usageMetadata?.candidatesTokenCount ?? 0,
1739
+ reasoningTokens: response.usageMetadata?.thoughtsTokenCount ?? 0,
1740
+ cacheReadInputTokens: response.usageMetadata?.cachedContentTokenCount ?? 0
1746
1741
  },
1747
1742
  tools: availableTools,
1748
1743
  captureImmediate: posthogCaptureImmediate
@@ -1798,7 +1793,9 @@ class WrappedModels {
1798
1793
  if (chunk.usageMetadata) {
1799
1794
  usage = {
1800
1795
  inputTokens: chunk.usageMetadata.promptTokenCount ?? 0,
1801
- outputTokens: chunk.usageMetadata.candidatesTokenCount ?? 0
1796
+ outputTokens: chunk.usageMetadata.candidatesTokenCount ?? 0,
1797
+ reasoningTokens: chunk.usageMetadata.thoughtsTokenCount ?? 0,
1798
+ cacheReadInputTokens: chunk.usageMetadata.cachedContentTokenCount ?? 0
1802
1799
  };
1803
1800
  }
1804
1801
  yield chunk;