@posthog/ai 5.0.1 → 5.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,364 @@
1
+ 'use strict';
2
+
3
+ Object.defineProperty(exports, '__esModule', { value: true });
4
+
5
+ var genai = require('@google/genai');
6
+ var uuid = require('uuid');
7
+ var buffer = require('buffer');
8
+
9
+ const STRING_FORMAT = 'utf8';
10
+ const getModelParams = params => {
11
+ if (!params) {
12
+ return {};
13
+ }
14
+ const modelParams = {};
15
+ const paramKeys = ['temperature', 'max_tokens', 'max_completion_tokens', 'top_p', 'frequency_penalty', 'presence_penalty', 'n', 'stop', 'stream', 'streaming'];
16
+ for (const key of paramKeys) {
17
+ if (key in params && params[key] !== undefined) {
18
+ modelParams[key] = params[key];
19
+ }
20
+ }
21
+ return modelParams;
22
+ };
23
+ const withPrivacyMode = (client, privacyMode, input) => {
24
+ return client.privacy_mode || privacyMode ? null : input;
25
+ };
26
+ function sanitizeValues(obj) {
27
+ if (obj === undefined || obj === null) {
28
+ return obj;
29
+ }
30
+ const jsonSafe = JSON.parse(JSON.stringify(obj));
31
+ if (typeof jsonSafe === 'string') {
32
+ return buffer.Buffer.from(jsonSafe, STRING_FORMAT).toString(STRING_FORMAT);
33
+ } else if (Array.isArray(jsonSafe)) {
34
+ return jsonSafe.map(sanitizeValues);
35
+ } else if (jsonSafe && typeof jsonSafe === 'object') {
36
+ return Object.fromEntries(Object.entries(jsonSafe).map(([k, v]) => [k, sanitizeValues(v)]));
37
+ }
38
+ return jsonSafe;
39
+ }
40
+ const sendEventToPosthog = async ({
41
+ client,
42
+ distinctId,
43
+ traceId,
44
+ model,
45
+ provider,
46
+ input,
47
+ output,
48
+ latency,
49
+ baseURL,
50
+ params,
51
+ httpStatus = 200,
52
+ usage = {},
53
+ isError = false,
54
+ error,
55
+ tools,
56
+ captureImmediate = false
57
+ }) => {
58
+ if (!client.capture) {
59
+ return Promise.resolve();
60
+ }
61
+ // sanitize input and output for UTF-8 validity
62
+ const safeInput = sanitizeValues(input);
63
+ const safeOutput = sanitizeValues(output);
64
+ const safeError = sanitizeValues(error);
65
+ let errorData = {};
66
+ if (isError) {
67
+ errorData = {
68
+ $ai_is_error: true,
69
+ $ai_error: safeError
70
+ };
71
+ }
72
+ let costOverrideData = {};
73
+ if (params.posthogCostOverride) {
74
+ const inputCostUSD = (params.posthogCostOverride.inputCost ?? 0) * (usage.inputTokens ?? 0);
75
+ const outputCostUSD = (params.posthogCostOverride.outputCost ?? 0) * (usage.outputTokens ?? 0);
76
+ costOverrideData = {
77
+ $ai_input_cost_usd: inputCostUSD,
78
+ $ai_output_cost_usd: outputCostUSD,
79
+ $ai_total_cost_usd: inputCostUSD + outputCostUSD
80
+ };
81
+ }
82
+ const additionalTokenValues = {
83
+ ...(usage.reasoningTokens ? {
84
+ $ai_reasoning_tokens: usage.reasoningTokens
85
+ } : {}),
86
+ ...(usage.cacheReadInputTokens ? {
87
+ $ai_cache_read_input_tokens: usage.cacheReadInputTokens
88
+ } : {}),
89
+ ...(usage.cacheCreationInputTokens ? {
90
+ $ai_cache_creation_input_tokens: usage.cacheCreationInputTokens
91
+ } : {})
92
+ };
93
+ const properties = {
94
+ $ai_provider: params.posthogProviderOverride ?? provider,
95
+ $ai_model: params.posthogModelOverride ?? model,
96
+ $ai_model_parameters: getModelParams(params),
97
+ $ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeInput),
98
+ $ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeOutput),
99
+ $ai_http_status: httpStatus,
100
+ $ai_input_tokens: usage.inputTokens ?? 0,
101
+ $ai_output_tokens: usage.outputTokens ?? 0,
102
+ ...additionalTokenValues,
103
+ $ai_latency: latency,
104
+ $ai_trace_id: traceId,
105
+ $ai_base_url: baseURL,
106
+ ...params.posthogProperties,
107
+ ...(distinctId ? {} : {
108
+ $process_person_profile: false
109
+ }),
110
+ ...(tools ? {
111
+ $ai_tools: tools
112
+ } : {}),
113
+ ...errorData,
114
+ ...costOverrideData
115
+ };
116
+ const event = {
117
+ distinctId: distinctId ?? traceId,
118
+ event: '$ai_generation',
119
+ properties,
120
+ groups: params.posthogGroups
121
+ };
122
+ if (captureImmediate) {
123
+ // await capture promise to send single event in serverless environments
124
+ await client.captureImmediate(event);
125
+ } else {
126
+ client.capture(event);
127
+ }
128
+ };
129
+
130
+ // Types from @google/genai
131
+
132
+ class PostHogGoogleGenAI {
133
+ constructor(config) {
134
+ const {
135
+ posthog,
136
+ ...geminiConfig
137
+ } = config;
138
+ this.phClient = posthog;
139
+ this.client = new genai.GoogleGenAI(geminiConfig);
140
+ this.models = new WrappedModels(this.client, this.phClient);
141
+ }
142
+ }
143
+ class WrappedModels {
144
+ constructor(client, phClient) {
145
+ this.client = client;
146
+ this.phClient = phClient;
147
+ }
148
+ async generateContent(params) {
149
+ const {
150
+ posthogDistinctId,
151
+ posthogTraceId,
152
+ posthogProperties,
153
+ posthogGroups,
154
+ posthogCaptureImmediate,
155
+ ...geminiParams
156
+ } = params;
157
+ const traceId = posthogTraceId ?? uuid.v4();
158
+ const startTime = Date.now();
159
+ try {
160
+ const response = await this.client.models.generateContent(geminiParams);
161
+ const latency = (Date.now() - startTime) / 1000;
162
+ await sendEventToPosthog({
163
+ client: this.phClient,
164
+ distinctId: posthogDistinctId,
165
+ traceId,
166
+ model: geminiParams.model,
167
+ provider: 'gemini',
168
+ input: this.formatInput(geminiParams.contents),
169
+ output: this.formatOutput(response),
170
+ latency,
171
+ baseURL: 'https://generativelanguage.googleapis.com',
172
+ params: params,
173
+ httpStatus: 200,
174
+ usage: {
175
+ inputTokens: response.usageMetadata?.promptTokenCount ?? 0,
176
+ outputTokens: response.usageMetadata?.candidatesTokenCount ?? 0
177
+ },
178
+ captureImmediate: posthogCaptureImmediate
179
+ });
180
+ return response;
181
+ } catch (error) {
182
+ const latency = (Date.now() - startTime) / 1000;
183
+ await sendEventToPosthog({
184
+ client: this.phClient,
185
+ distinctId: posthogDistinctId,
186
+ traceId,
187
+ model: geminiParams.model,
188
+ provider: 'gemini',
189
+ input: this.formatInput(geminiParams.contents),
190
+ output: [],
191
+ latency,
192
+ baseURL: 'https://generativelanguage.googleapis.com',
193
+ params: params,
194
+ httpStatus: error?.status ?? 500,
195
+ usage: {
196
+ inputTokens: 0,
197
+ outputTokens: 0
198
+ },
199
+ isError: true,
200
+ error: JSON.stringify(error),
201
+ captureImmediate: posthogCaptureImmediate
202
+ });
203
+ throw error;
204
+ }
205
+ }
206
+ async *generateContentStream(params) {
207
+ const {
208
+ posthogDistinctId,
209
+ posthogTraceId,
210
+ posthogProperties,
211
+ posthogGroups,
212
+ posthogCaptureImmediate,
213
+ ...geminiParams
214
+ } = params;
215
+ const traceId = posthogTraceId ?? uuid.v4();
216
+ const startTime = Date.now();
217
+ let accumulatedContent = '';
218
+ let usage = {
219
+ inputTokens: 0,
220
+ outputTokens: 0
221
+ };
222
+ try {
223
+ const stream = await this.client.models.generateContentStream(geminiParams);
224
+ for await (const chunk of stream) {
225
+ if (chunk.text) {
226
+ accumulatedContent += chunk.text;
227
+ }
228
+ if (chunk.usageMetadata) {
229
+ usage = {
230
+ inputTokens: chunk.usageMetadata.promptTokenCount ?? 0,
231
+ outputTokens: chunk.usageMetadata.candidatesTokenCount ?? 0
232
+ };
233
+ }
234
+ yield chunk;
235
+ }
236
+ const latency = (Date.now() - startTime) / 1000;
237
+ await sendEventToPosthog({
238
+ client: this.phClient,
239
+ distinctId: posthogDistinctId,
240
+ traceId,
241
+ model: geminiParams.model,
242
+ provider: 'gemini',
243
+ input: this.formatInput(geminiParams.contents),
244
+ output: [{
245
+ content: accumulatedContent,
246
+ role: 'assistant'
247
+ }],
248
+ latency,
249
+ baseURL: 'https://generativelanguage.googleapis.com',
250
+ params: params,
251
+ httpStatus: 200,
252
+ usage,
253
+ captureImmediate: posthogCaptureImmediate
254
+ });
255
+ } catch (error) {
256
+ const latency = (Date.now() - startTime) / 1000;
257
+ await sendEventToPosthog({
258
+ client: this.phClient,
259
+ distinctId: posthogDistinctId,
260
+ traceId,
261
+ model: geminiParams.model,
262
+ provider: 'gemini',
263
+ input: this.formatInput(geminiParams.contents),
264
+ output: [],
265
+ latency,
266
+ baseURL: 'https://generativelanguage.googleapis.com',
267
+ params: params,
268
+ httpStatus: error?.status ?? 500,
269
+ usage: {
270
+ inputTokens: 0,
271
+ outputTokens: 0
272
+ },
273
+ isError: true,
274
+ error: JSON.stringify(error),
275
+ captureImmediate: posthogCaptureImmediate
276
+ });
277
+ throw error;
278
+ }
279
+ }
280
+ formatInput(contents) {
281
+ if (typeof contents === 'string') {
282
+ return [{
283
+ role: 'user',
284
+ content: contents
285
+ }];
286
+ }
287
+ if (Array.isArray(contents)) {
288
+ return contents.map(item => {
289
+ if (typeof item === 'string') {
290
+ return {
291
+ role: 'user',
292
+ content: item
293
+ };
294
+ }
295
+ if (item && typeof item === 'object') {
296
+ if (item.text) {
297
+ return {
298
+ role: item.role || 'user',
299
+ content: item.text
300
+ };
301
+ }
302
+ if (item.content) {
303
+ return {
304
+ role: item.role || 'user',
305
+ content: item.content
306
+ };
307
+ }
308
+ }
309
+ return {
310
+ role: 'user',
311
+ content: String(item)
312
+ };
313
+ });
314
+ }
315
+ if (contents && typeof contents === 'object') {
316
+ if (contents.text) {
317
+ return [{
318
+ role: 'user',
319
+ content: contents.text
320
+ }];
321
+ }
322
+ if (contents.content) {
323
+ return [{
324
+ role: 'user',
325
+ content: contents.content
326
+ }];
327
+ }
328
+ }
329
+ return [{
330
+ role: 'user',
331
+ content: String(contents)
332
+ }];
333
+ }
334
+ formatOutput(response) {
335
+ if (response.text) {
336
+ return [{
337
+ role: 'assistant',
338
+ content: response.text
339
+ }];
340
+ }
341
+ if (response.candidates && Array.isArray(response.candidates)) {
342
+ return response.candidates.map(candidate => {
343
+ if (candidate.content && candidate.content.parts) {
344
+ const text = candidate.content.parts.filter(part => part.text).map(part => part.text).join('');
345
+ return {
346
+ role: 'assistant',
347
+ content: text
348
+ };
349
+ }
350
+ return {
351
+ role: 'assistant',
352
+ content: String(candidate)
353
+ };
354
+ });
355
+ }
356
+ return [];
357
+ }
358
+ }
359
+
360
+ exports.Gemini = PostHogGoogleGenAI;
361
+ exports.PostHogGoogleGenAI = PostHogGoogleGenAI;
362
+ exports.WrappedModels = WrappedModels;
363
+ exports["default"] = PostHogGoogleGenAI;
364
+ //# sourceMappingURL=index.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.cjs","sources":["../../src/utils.ts","../../src/gemini/index.ts"],"sourcesContent":["import { PostHog } from 'posthog-node'\nimport { Buffer } from 'buffer'\nimport OpenAIOrignal from 'openai'\nimport AnthropicOriginal from '@anthropic-ai/sdk'\n\ntype ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams\ntype MessageCreateParams = AnthropicOriginal.Messages.MessageCreateParams\ntype ResponseCreateParams = OpenAIOrignal.Responses.ResponseCreateParams\n\n// limit large outputs by truncating to 200kb (approx 200k bytes)\nexport const MAX_OUTPUT_SIZE = 200000\nconst STRING_FORMAT = 'utf8'\n\nexport interface MonitoringParams {\n posthogDistinctId?: string\n posthogTraceId?: string\n posthogProperties?: Record<string, any>\n posthogPrivacyMode?: boolean\n posthogGroups?: Record<string, any>\n posthogModelOverride?: string\n posthogProviderOverride?: string\n posthogCostOverride?: CostOverride\n posthogCaptureImmediate?: boolean\n}\n\nexport interface CostOverride {\n inputCost: number\n outputCost: number\n}\n\nexport const getModelParams = (\n params: ((ChatCompletionCreateParamsBase | MessageCreateParams | ResponseCreateParams) & MonitoringParams) | null\n): Record<string, any> => {\n if (!params) {\n return {}\n }\n const modelParams: Record<string, any> = {}\n const paramKeys = [\n 'temperature',\n 'max_tokens',\n 'max_completion_tokens',\n 'top_p',\n 'frequency_penalty',\n 'presence_penalty',\n 'n',\n 'stop',\n 'stream',\n 'streaming',\n ] as const\n\n for (const key of paramKeys) {\n if (key in params && (params as any)[key] !== undefined) {\n modelParams[key] = (params as any)[key]\n }\n }\n return modelParams\n}\n\n/**\n * Helper to format responses (non-streaming) for consumption, mirroring Python's openai vs. anthropic approach.\n */\nexport const formatResponse = (response: any, provider: string): Array<{ role: string; content: string }> => {\n if (!response) {\n return []\n }\n if (provider === 'anthropic') {\n return formatResponseAnthropic(response)\n } else if (provider === 'openai') {\n return formatResponseOpenAI(response)\n } else if (provider === 'gemini') {\n return formatResponseGemini(response)\n }\n return []\n}\n\nexport const formatResponseAnthropic = (response: any): Array<{ role: string; content: string }> => {\n // Example approach if \"response.content\" holds array of text segments, etc.\n const output: Array<{ role: string; content: string }> = []\n for (const choice of response.content ?? []) {\n if (choice?.text) {\n output.push({\n role: 'assistant',\n content: choice.text,\n })\n }\n }\n return output\n}\n\nexport const formatResponseOpenAI = (response: any): Array<{ role: string; content: string }> => {\n const output: Array<{ role: string; content: string }> = []\n for (const choice of response.choices ?? []) {\n if (choice.message?.content) {\n output.push({\n role: choice.message.role,\n content: choice.message.content,\n })\n }\n }\n return output\n}\n\nexport const formatResponseGemini = (response: any): Array<{ role: string; content: string }> => {\n const output: Array<{ role: string; content: string }> = []\n\n if (response.text) {\n output.push({\n role: 'assistant',\n content: response.text,\n })\n return output\n }\n\n if (response.candidates && Array.isArray(response.candidates)) {\n for (const candidate of response.candidates) {\n if (candidate.content && candidate.content.parts) {\n const text = candidate.content.parts\n .filter((part: any) => part.text)\n .map((part: any) => part.text)\n .join('')\n if (text) {\n output.push({\n role: 'assistant',\n content: text,\n })\n }\n }\n }\n }\n\n return output\n}\n\nexport const mergeSystemPrompt = (params: MessageCreateParams & MonitoringParams, provider: string): any => {\n if (provider == 'anthropic') {\n const messages = params.messages || []\n if (!(params as any).system) {\n return messages\n }\n const systemMessage = (params as any).system\n return [{ role: 'system', content: systemMessage }, ...messages]\n }\n return params.messages\n}\n\nexport const withPrivacyMode = (client: PostHog, privacyMode: boolean, input: any): any => {\n return (client as any).privacy_mode || privacyMode ? null : input\n}\n\nexport const truncate = (str: string): string => {\n try {\n const buffer = Buffer.from(str, STRING_FORMAT)\n if (buffer.length <= MAX_OUTPUT_SIZE) {\n return str\n }\n const truncatedBuffer = buffer.slice(0, MAX_OUTPUT_SIZE)\n return `${truncatedBuffer.toString(STRING_FORMAT)}... [truncated]`\n } catch (error) {\n console.error('Error truncating, likely not a string')\n return str\n }\n}\n\nexport type SendEventToPosthogParams = {\n client: PostHog\n distinctId?: string\n traceId: string\n model: string\n provider: string\n input: any\n output: any\n latency: number\n baseURL: string\n httpStatus: number\n usage?: {\n inputTokens?: number\n outputTokens?: number\n reasoningTokens?: any\n cacheReadInputTokens?: any\n cacheCreationInputTokens?: any\n }\n params: (ChatCompletionCreateParamsBase | MessageCreateParams | ResponseCreateParams) & MonitoringParams\n isError?: boolean\n error?: string\n tools?: any\n captureImmediate?: boolean\n}\n\nfunction sanitizeValues(obj: any): any {\n if (obj === undefined || obj === null) {\n return obj\n }\n const jsonSafe = JSON.parse(JSON.stringify(obj))\n if (typeof jsonSafe === 'string') {\n return Buffer.from(jsonSafe, STRING_FORMAT).toString(STRING_FORMAT)\n } else if (Array.isArray(jsonSafe)) {\n return jsonSafe.map(sanitizeValues)\n } else if (jsonSafe && typeof jsonSafe === 'object') {\n return Object.fromEntries(Object.entries(jsonSafe).map(([k, v]) => [k, sanitizeValues(v)]))\n }\n return jsonSafe\n}\n\nexport const sendEventToPosthog = async ({\n client,\n distinctId,\n traceId,\n model,\n provider,\n input,\n output,\n latency,\n baseURL,\n params,\n httpStatus = 200,\n usage = {},\n isError = false,\n error,\n tools,\n captureImmediate = false,\n}: SendEventToPosthogParams): Promise<void> => {\n if (!client.capture) {\n return Promise.resolve()\n }\n // sanitize input and output for UTF-8 validity\n const safeInput = sanitizeValues(input)\n const safeOutput = sanitizeValues(output)\n const safeError = sanitizeValues(error)\n\n let errorData = {}\n if (isError) {\n errorData = {\n $ai_is_error: true,\n $ai_error: safeError,\n }\n }\n let costOverrideData = {}\n if (params.posthogCostOverride) {\n const inputCostUSD = (params.posthogCostOverride.inputCost ?? 0) * (usage.inputTokens ?? 0)\n const outputCostUSD = (params.posthogCostOverride.outputCost ?? 0) * (usage.outputTokens ?? 0)\n costOverrideData = {\n $ai_input_cost_usd: inputCostUSD,\n $ai_output_cost_usd: outputCostUSD,\n $ai_total_cost_usd: inputCostUSD + outputCostUSD,\n }\n }\n\n const additionalTokenValues = {\n ...(usage.reasoningTokens ? { $ai_reasoning_tokens: usage.reasoningTokens } : {}),\n ...(usage.cacheReadInputTokens ? { $ai_cache_read_input_tokens: usage.cacheReadInputTokens } : {}),\n ...(usage.cacheCreationInputTokens ? { $ai_cache_creation_input_tokens: usage.cacheCreationInputTokens } : {}),\n }\n\n const properties = {\n $ai_provider: params.posthogProviderOverride ?? provider,\n $ai_model: params.posthogModelOverride ?? model,\n $ai_model_parameters: getModelParams(params),\n $ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeInput),\n $ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeOutput),\n $ai_http_status: httpStatus,\n $ai_input_tokens: usage.inputTokens ?? 0,\n $ai_output_tokens: usage.outputTokens ?? 0,\n ...additionalTokenValues,\n $ai_latency: latency,\n $ai_trace_id: traceId,\n $ai_base_url: baseURL,\n ...params.posthogProperties,\n ...(distinctId ? {} : { $process_person_profile: false }),\n ...(tools ? { $ai_tools: tools } : {}),\n ...errorData,\n ...costOverrideData,\n }\n\n const event = {\n distinctId: distinctId ?? traceId,\n event: '$ai_generation',\n properties,\n groups: params.posthogGroups,\n }\n\n if (captureImmediate) {\n // await capture promise to send single event in serverless environments\n await client.captureImmediate(event)\n } else {\n client.capture(event)\n }\n}\n","import { GoogleGenAI } from '@google/genai'\nimport { PostHog } from 'posthog-node'\nimport { v4 as uuidv4 } from 'uuid'\nimport { MonitoringParams, sendEventToPosthog } from '../utils'\n\n// Types from @google/genai\ntype GenerateContentRequest = {\n model: string\n contents: any\n config?: any\n [key: string]: any\n}\n\ntype GenerateContentResponse = {\n text?: string\n candidates?: any[]\n usageMetadata?: {\n promptTokenCount?: number\n candidatesTokenCount?: number\n totalTokenCount?: number\n }\n [key: string]: any\n}\n\ninterface MonitoringGeminiConfig {\n apiKey?: string\n vertexai?: boolean\n project?: string\n location?: string\n apiVersion?: string\n posthog: PostHog\n}\n\nexport class PostHogGoogleGenAI {\n private readonly phClient: PostHog\n private readonly client: GoogleGenAI\n public models: WrappedModels\n\n constructor(config: MonitoringGeminiConfig) {\n const { posthog, ...geminiConfig } = config\n this.phClient = posthog\n this.client = new GoogleGenAI(geminiConfig)\n this.models = new WrappedModels(this.client, this.phClient)\n }\n}\n\nexport class WrappedModels {\n private readonly phClient: PostHog\n private readonly client: GoogleGenAI\n\n constructor(client: GoogleGenAI, phClient: PostHog) {\n this.client = client\n this.phClient = phClient\n }\n\n public async generateContent(params: GenerateContentRequest & MonitoringParams): Promise<GenerateContentResponse> {\n const {\n posthogDistinctId,\n posthogTraceId,\n posthogProperties,\n posthogGroups,\n posthogCaptureImmediate,\n ...geminiParams\n } = params\n\n const traceId = posthogTraceId ?? uuidv4()\n const startTime = Date.now()\n\n try {\n const response = await this.client.models.generateContent(geminiParams)\n const latency = (Date.now() - startTime) / 1000\n\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: geminiParams.model,\n provider: 'gemini',\n input: this.formatInput(geminiParams.contents),\n output: this.formatOutput(response),\n latency,\n baseURL: 'https://generativelanguage.googleapis.com',\n params: params as any,\n httpStatus: 200,\n usage: {\n inputTokens: response.usageMetadata?.promptTokenCount ?? 0,\n outputTokens: response.usageMetadata?.candidatesTokenCount ?? 0,\n },\n captureImmediate: posthogCaptureImmediate,\n })\n\n return response\n } catch (error: any) {\n const latency = (Date.now() - startTime) / 1000\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: geminiParams.model,\n provider: 'gemini',\n input: this.formatInput(geminiParams.contents),\n output: [],\n latency,\n baseURL: 'https://generativelanguage.googleapis.com',\n params: params as any,\n httpStatus: error?.status ?? 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n isError: true,\n error: JSON.stringify(error),\n captureImmediate: posthogCaptureImmediate,\n })\n throw error\n }\n }\n\n public async *generateContentStream(\n params: GenerateContentRequest & MonitoringParams\n ): AsyncGenerator<any, void, unknown> {\n const {\n posthogDistinctId,\n posthogTraceId,\n posthogProperties,\n posthogGroups,\n posthogCaptureImmediate,\n ...geminiParams\n } = params\n\n const traceId = posthogTraceId ?? uuidv4()\n const startTime = Date.now()\n let accumulatedContent = ''\n let usage = {\n inputTokens: 0,\n outputTokens: 0,\n }\n\n try {\n const stream = await this.client.models.generateContentStream(geminiParams)\n\n for await (const chunk of stream) {\n if (chunk.text) {\n accumulatedContent += chunk.text\n }\n if (chunk.usageMetadata) {\n usage = {\n inputTokens: chunk.usageMetadata.promptTokenCount ?? 0,\n outputTokens: chunk.usageMetadata.candidatesTokenCount ?? 0,\n }\n }\n yield chunk\n }\n\n const latency = (Date.now() - startTime) / 1000\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: geminiParams.model,\n provider: 'gemini',\n input: this.formatInput(geminiParams.contents),\n output: [{ content: accumulatedContent, role: 'assistant' }],\n latency,\n baseURL: 'https://generativelanguage.googleapis.com',\n params: params as any,\n httpStatus: 200,\n usage,\n captureImmediate: posthogCaptureImmediate,\n })\n } catch (error: any) {\n const latency = (Date.now() - startTime) / 1000\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: geminiParams.model,\n provider: 'gemini',\n input: this.formatInput(geminiParams.contents),\n output: [],\n latency,\n baseURL: 'https://generativelanguage.googleapis.com',\n params: params as any,\n httpStatus: error?.status ?? 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n isError: true,\n error: JSON.stringify(error),\n captureImmediate: posthogCaptureImmediate,\n })\n throw error\n }\n }\n\n private formatInput(contents: any): Array<{ role: string; content: string }> {\n if (typeof contents === 'string') {\n return [{ role: 'user', content: contents }]\n }\n\n if (Array.isArray(contents)) {\n return contents.map((item) => {\n if (typeof item === 'string') {\n return { role: 'user', content: item }\n }\n if (item && typeof item === 'object') {\n if (item.text) {\n return { role: item.role || 'user', content: item.text }\n }\n if (item.content) {\n return { role: item.role || 'user', content: item.content }\n }\n }\n return { role: 'user', content: String(item) }\n })\n }\n\n if (contents && typeof contents === 'object') {\n if (contents.text) {\n return [{ role: 'user', content: contents.text }]\n }\n if (contents.content) {\n return [{ role: 'user', content: contents.content }]\n }\n }\n\n return [{ role: 'user', content: String(contents) }]\n }\n\n private formatOutput(response: GenerateContentResponse): Array<{ role: string; content: string }> {\n if (response.text) {\n return [{ role: 'assistant', content: response.text }]\n }\n\n if (response.candidates && Array.isArray(response.candidates)) {\n return response.candidates.map((candidate) => {\n if (candidate.content && candidate.content.parts) {\n const text = candidate.content.parts\n .filter((part: any) => part.text)\n .map((part: any) => part.text)\n .join('')\n return { role: 'assistant', content: text }\n }\n return { role: 'assistant', content: String(candidate) }\n })\n }\n\n return []\n }\n}\n\nexport default PostHogGoogleGenAI\nexport { PostHogGoogleGenAI as Gemini }\n"],"names":["STRING_FORMAT","getModelParams","params","modelParams","paramKeys","key","undefined","withPrivacyMode","client","privacyMode","input","privacy_mode","sanitizeValues","obj","jsonSafe","JSON","parse","stringify","Buffer","from","toString","Array","isArray","map","Object","fromEntries","entries","k","v","sendEventToPosthog","distinctId","traceId","model","provider","output","latency","baseURL","httpStatus","usage","isError","error","tools","captureImmediate","capture","Promise","resolve","safeInput","safeOutput","safeError","errorData","$ai_is_error","$ai_error","costOverrideData","posthogCostOverride","inputCostUSD","inputCost","inputTokens","outputCostUSD","outputCost","outputTokens","$ai_input_cost_usd","$ai_output_cost_usd","$ai_total_cost_usd","additionalTokenValues","reasoningTokens","$ai_reasoning_tokens","cacheReadInputTokens","$ai_cache_read_input_tokens","cacheCreationInputTokens","$ai_cache_creation_input_tokens","properties","$ai_provider","posthogProviderOverride","$ai_model","posthogModelOverride","$ai_model_parameters","$ai_input","posthogPrivacyMode","$ai_output_choices","$ai_http_status","$ai_input_tokens","$ai_output_tokens","$ai_latency","$ai_trace_id","$ai_base_url","posthogProperties","$process_person_profile","$ai_tools","event","groups","posthogGroups","PostHogGoogleGenAI","constructor","config","posthog","geminiConfig","phClient","GoogleGenAI","models","WrappedModels","generateContent","posthogDistinctId","posthogTraceId","posthogCaptureImmediate","geminiParams","uuidv4","startTime","Date","now","response","formatInput","contents","formatOutput","usageMetadata","promptTokenCount","candidatesTokenCount","status","generateContentStream","accumulatedContent","stream","chunk","text","content","role","item","String","candidates","candidate","parts","filter","part","join"],"mappings":";;;;;;;;AAWA,MAAMA,aAAa,GAAG,MAAM,CAAA;AAmBrB,MAAMC,cAAc,GACzBC,MAAiH,IACzF;EACxB,IAAI,CAACA,MAAM,EAAE;AACX,IAAA,OAAO,EAAE,CAAA;AACX,GAAA;EACA,MAAMC,WAAgC,GAAG,EAAE,CAAA;EAC3C,MAAMC,SAAS,GAAG,CAChB,aAAa,EACb,YAAY,EACZ,uBAAuB,EACvB,OAAO,EACP,mBAAmB,EACnB,kBAAkB,EAClB,GAAG,EACH,MAAM,EACN,QAAQ,EACR,WAAW,CACH,CAAA;AAEV,EAAA,KAAK,MAAMC,GAAG,IAAID,SAAS,EAAE;IAC3B,IAAIC,GAAG,IAAIH,MAAM,IAAKA,MAAM,CAASG,GAAG,CAAC,KAAKC,SAAS,EAAE;AACvDH,MAAAA,WAAW,CAACE,GAAG,CAAC,GAAIH,MAAM,CAASG,GAAG,CAAC,CAAA;AACzC,KAAA;AACF,GAAA;AACA,EAAA,OAAOF,WAAW,CAAA;AACpB,CAAC,CAAA;AAyFM,MAAMI,eAAe,GAAGA,CAACC,MAAe,EAAEC,WAAoB,EAAEC,KAAU,KAAU;EACzF,OAAQF,MAAM,CAASG,YAAY,IAAIF,WAAW,GAAG,IAAI,GAAGC,KAAK,CAAA;AACnE,CAAC,CAAA;AAyCD,SAASE,cAAcA,CAACC,GAAQ,EAAO;AACrC,EAAA,IAAIA,GAAG,KAAKP,SAAS,IAAIO,GAAG,KAAK,IAAI,EAAE;AACrC,IAAA,OAAOA,GAAG,CAAA;AACZ,GAAA;AACA,EAAA,MAAMC,QAAQ,GAAGC,IAAI,CAACC,KAAK,CAACD,IAAI,CAACE,SAAS,CAACJ,GAAG,CAAC,CAAC,CAAA;AAChD,EAAA,IAAI,OAAOC,QAAQ,KAAK,QAAQ,EAAE;AAChC,IAAA,OAAOI,aAAM,CAACC,IAAI,CAACL,QAAQ,EAAEd,aAAa,CAAC,CAACoB,QAAQ,CAACpB,aAAa,CAAC,CAAA;GACpE,MAAM,IAAIqB,KAAK,CAACC,OAAO,CAACR,QAAQ,CAAC,EAAE;AAClC,IAAA,OAAOA,QAAQ,CAACS,GAAG,CAACX,cAAc,CAAC,CAAA;GACpC,MAAM,IAAIE,QAAQ,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;AACnD,IAAA,OAAOU,MAAM,CAACC,WAAW,CAACD,MAAM,CAACE,OAAO,CAACZ,QAAQ,CAAC,CAACS,GAAG,CAAC,CAAC,CAACI,CAAC,EAAEC,CAAC,CAAC,KAAK,CAACD,CAAC,EAAEf,cAAc,CAACgB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;AAC7F,GAAA;AACA,EAAA,OAAOd,QAAQ,CAAA;AACjB,CAAA;AAEO,MAAMe,kBAAkB,GAAG,OAAO;EACvCrB,MAAM;EACNsB,UAAU;EACVC,OAAO;EACPC,KAAK;EACLC,QAAQ;EACRvB,KAAK;EACLwB,MAAM;EACNC,OAAO;EACPC,OAAO;EACPlC,MAAM;AACNmC,EAAAA,UAAU,GAAG,GAAG;EAChBC,KAAK,GAAG,EAAE;AACVC,EAAAA,OAAO,GAAG,KAAK;EACfC,KAAK;EACLC,KAAK;AACLC,EAAAA,gBAAgB,GAAG,KAAA;AACK,CAAC,KAAoB;AAC7C,EAAA,IAAI,CAAClC,MAAM,CAACmC,OAAO,EAAE;AACnB,IAAA,OAAOC,OAAO,CAACC,OAAO,EAAE,CAAA;AAC1B,GAAA;AACA;AACA,EAAA,MAAMC,SAAS,GAAGlC,cAAc,CAACF,KAAK,CAAC,CAAA;AACvC,EAAA,MAAMqC,UAAU,GAAGnC,cAAc,CAACsB,MAAM,CAAC,CAAA;AACzC,EAAA,MAAMc,SAAS,GAAGpC,cAAc,CAAC4B,KAAK,CAAC,CAAA;EAEvC,IAAIS,SAAS,GAAG,EAAE,CAAA;AAClB,EAAA,IAAIV,OAAO,EAAE;AACXU,IAAAA,SAAS,GAAG;AACVC,MAAAA,YAAY,EAAE,IAAI;AAClBC,MAAAA,SAAS,EAAEH,SAAAA;KACZ,CAAA;AACH,GAAA;EACA,IAAII,gBAAgB,GAAG,EAAE,CAAA;EACzB,IAAIlD,MAAM,CAACmD,mBAAmB,EAAE;AAC9B,IAAA,MAAMC,YAAY,GAAG,CAACpD,MAAM,CAACmD,mBAAmB,CAACE,SAAS,IAAI,CAAC,KAAKjB,KAAK,CAACkB,WAAW,IAAI,CAAC,CAAC,CAAA;AAC3F,IAAA,MAAMC,aAAa,GAAG,CAACvD,MAAM,CAACmD,mBAAmB,CAACK,UAAU,IAAI,CAAC,KAAKpB,KAAK,CAACqB,YAAY,IAAI,CAAC,CAAC,CAAA;AAC9FP,IAAAA,gBAAgB,GAAG;AACjBQ,MAAAA,kBAAkB,EAAEN,YAAY;AAChCO,MAAAA,mBAAmB,EAAEJ,aAAa;MAClCK,kBAAkB,EAAER,YAAY,GAAGG,aAAAA;KACpC,CAAA;AACH,GAAA;AAEA,EAAA,MAAMM,qBAAqB,GAAG;IAC5B,IAAIzB,KAAK,CAAC0B,eAAe,GAAG;MAAEC,oBAAoB,EAAE3B,KAAK,CAAC0B,eAAAA;KAAiB,GAAG,EAAE,CAAC;IACjF,IAAI1B,KAAK,CAAC4B,oBAAoB,GAAG;MAAEC,2BAA2B,EAAE7B,KAAK,CAAC4B,oBAAAA;KAAsB,GAAG,EAAE,CAAC;IAClG,IAAI5B,KAAK,CAAC8B,wBAAwB,GAAG;MAAEC,+BAA+B,EAAE/B,KAAK,CAAC8B,wBAAAA;KAA0B,GAAG,EAAE,CAAA;GAC9G,CAAA;AAED,EAAA,MAAME,UAAU,GAAG;AACjBC,IAAAA,YAAY,EAAErE,MAAM,CAACsE,uBAAuB,IAAIvC,QAAQ;AACxDwC,IAAAA,SAAS,EAAEvE,MAAM,CAACwE,oBAAoB,IAAI1C,KAAK;AAC/C2C,IAAAA,oBAAoB,EAAE1E,cAAc,CAACC,MAAM,CAAC;AAC5C0E,IAAAA,SAAS,EAAErE,eAAe,CAACC,MAAM,EAAEN,MAAM,CAAC2E,kBAAkB,IAAI,KAAK,EAAE/B,SAAS,CAAC;AACjFgC,IAAAA,kBAAkB,EAAEvE,eAAe,CAACC,MAAM,EAAEN,MAAM,CAAC2E,kBAAkB,IAAI,KAAK,EAAE9B,UAAU,CAAC;AAC3FgC,IAAAA,eAAe,EAAE1C,UAAU;AAC3B2C,IAAAA,gBAAgB,EAAE1C,KAAK,CAACkB,WAAW,IAAI,CAAC;AACxCyB,IAAAA,iBAAiB,EAAE3C,KAAK,CAACqB,YAAY,IAAI,CAAC;AAC1C,IAAA,GAAGI,qBAAqB;AACxBmB,IAAAA,WAAW,EAAE/C,OAAO;AACpBgD,IAAAA,YAAY,EAAEpD,OAAO;AACrBqD,IAAAA,YAAY,EAAEhD,OAAO;IACrB,GAAGlC,MAAM,CAACmF,iBAAiB;AAC3B,IAAA,IAAIvD,UAAU,GAAG,EAAE,GAAG;AAAEwD,MAAAA,uBAAuB,EAAE,KAAA;AAAM,KAAC,CAAC;AACzD,IAAA,IAAI7C,KAAK,GAAG;AAAE8C,MAAAA,SAAS,EAAE9C,KAAAA;KAAO,GAAG,EAAE,CAAC;AACtC,IAAA,GAAGQ,SAAS;IACZ,GAAGG,gBAAAA;GACJ,CAAA;AAED,EAAA,MAAMoC,KAAK,GAAG;IACZ1D,UAAU,EAAEA,UAAU,IAAIC,OAAO;AACjCyD,IAAAA,KAAK,EAAE,gBAAgB;IACvBlB,UAAU;IACVmB,MAAM,EAAEvF,MAAM,CAACwF,aAAAA;GAChB,CAAA;AAED,EAAA,IAAIhD,gBAAgB,EAAE;AACpB;AACA,IAAA,MAAMlC,MAAM,CAACkC,gBAAgB,CAAC8C,KAAK,CAAC,CAAA;AACtC,GAAC,MAAM;AACLhF,IAAAA,MAAM,CAACmC,OAAO,CAAC6C,KAAK,CAAC,CAAA;AACvB,GAAA;AACF,CAAC;;ACzRD;;AA4BO,MAAMG,kBAAkB,CAAC;EAK9BC,WAAWA,CAACC,MAA8B,EAAE;IAC1C,MAAM;MAAEC,OAAO;MAAE,GAAGC,YAAAA;AAAa,KAAC,GAAGF,MAAM,CAAA;IAC3C,IAAI,CAACG,QAAQ,GAAGF,OAAO,CAAA;AACvB,IAAA,IAAI,CAACtF,MAAM,GAAG,IAAIyF,iBAAW,CAACF,YAAY,CAAC,CAAA;AAC3C,IAAA,IAAI,CAACG,MAAM,GAAG,IAAIC,aAAa,CAAC,IAAI,CAAC3F,MAAM,EAAE,IAAI,CAACwF,QAAQ,CAAC,CAAA;AAC7D,GAAA;AACF,CAAA;AAEO,MAAMG,aAAa,CAAC;AAIzBP,EAAAA,WAAWA,CAACpF,MAAmB,EAAEwF,QAAiB,EAAE;IAClD,IAAI,CAACxF,MAAM,GAAGA,MAAM,CAAA;IACpB,IAAI,CAACwF,QAAQ,GAAGA,QAAQ,CAAA;AAC1B,GAAA;EAEA,MAAaI,eAAeA,CAAClG,MAAiD,EAAoC;IAChH,MAAM;MACJmG,iBAAiB;MACjBC,cAAc;MACdjB,iBAAiB;MACjBK,aAAa;MACba,uBAAuB;MACvB,GAAGC,YAAAA;AACL,KAAC,GAAGtG,MAAM,CAAA;AAEV,IAAA,MAAM6B,OAAO,GAAGuE,cAAc,IAAIG,OAAM,EAAE,CAAA;AAC1C,IAAA,MAAMC,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE,CAAA;IAE5B,IAAI;AACF,MAAA,MAAMC,QAAQ,GAAG,MAAM,IAAI,CAACrG,MAAM,CAAC0F,MAAM,CAACE,eAAe,CAACI,YAAY,CAAC,CAAA;MACvE,MAAMrE,OAAO,GAAG,CAACwE,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAE/C,MAAA,MAAM7E,kBAAkB,CAAC;QACvBrB,MAAM,EAAE,IAAI,CAACwF,QAAQ;AACrBlE,QAAAA,UAAU,EAAEuE,iBAAiB;QAC7BtE,OAAO;QACPC,KAAK,EAAEwE,YAAY,CAACxE,KAAK;AACzBC,QAAAA,QAAQ,EAAE,QAAQ;QAClBvB,KAAK,EAAE,IAAI,CAACoG,WAAW,CAACN,YAAY,CAACO,QAAQ,CAAC;AAC9C7E,QAAAA,MAAM,EAAE,IAAI,CAAC8E,YAAY,CAACH,QAAQ,CAAC;QACnC1E,OAAO;AACPC,QAAAA,OAAO,EAAE,2CAA2C;AACpDlC,QAAAA,MAAM,EAAEA,MAAa;AACrBmC,QAAAA,UAAU,EAAE,GAAG;AACfC,QAAAA,KAAK,EAAE;AACLkB,UAAAA,WAAW,EAAEqD,QAAQ,CAACI,aAAa,EAAEC,gBAAgB,IAAI,CAAC;AAC1DvD,UAAAA,YAAY,EAAEkD,QAAQ,CAACI,aAAa,EAAEE,oBAAoB,IAAI,CAAA;SAC/D;AACDzE,QAAAA,gBAAgB,EAAE6D,uBAAAA;AACpB,OAAC,CAAC,CAAA;AAEF,MAAA,OAAOM,QAAQ,CAAA;KAChB,CAAC,OAAOrE,KAAU,EAAE;MACnB,MAAML,OAAO,GAAG,CAACwE,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/C,MAAA,MAAM7E,kBAAkB,CAAC;QACvBrB,MAAM,EAAE,IAAI,CAACwF,QAAQ;AACrBlE,QAAAA,UAAU,EAAEuE,iBAAiB;QAC7BtE,OAAO;QACPC,KAAK,EAAEwE,YAAY,CAACxE,KAAK;AACzBC,QAAAA,QAAQ,EAAE,QAAQ;QAClBvB,KAAK,EAAE,IAAI,CAACoG,WAAW,CAACN,YAAY,CAACO,QAAQ,CAAC;AAC9C7E,QAAAA,MAAM,EAAE,EAAE;QACVC,OAAO;AACPC,QAAAA,OAAO,EAAE,2CAA2C;AACpDlC,QAAAA,MAAM,EAAEA,MAAa;AACrBmC,QAAAA,UAAU,EAAEG,KAAK,EAAE4E,MAAM,IAAI,GAAG;AAChC9E,QAAAA,KAAK,EAAE;AACLkB,UAAAA,WAAW,EAAE,CAAC;AACdG,UAAAA,YAAY,EAAE,CAAA;SACf;AACDpB,QAAAA,OAAO,EAAE,IAAI;AACbC,QAAAA,KAAK,EAAEzB,IAAI,CAACE,SAAS,CAACuB,KAAK,CAAC;AAC5BE,QAAAA,gBAAgB,EAAE6D,uBAAAA;AACpB,OAAC,CAAC,CAAA;AACF,MAAA,MAAM/D,KAAK,CAAA;AACb,KAAA;AACF,GAAA;EAEA,OAAc6E,qBAAqBA,CACjCnH,MAAiD,EACb;IACpC,MAAM;MACJmG,iBAAiB;MACjBC,cAAc;MACdjB,iBAAiB;MACjBK,aAAa;MACba,uBAAuB;MACvB,GAAGC,YAAAA;AACL,KAAC,GAAGtG,MAAM,CAAA;AAEV,IAAA,MAAM6B,OAAO,GAAGuE,cAAc,IAAIG,OAAM,EAAE,CAAA;AAC1C,IAAA,MAAMC,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE,CAAA;IAC5B,IAAIU,kBAAkB,GAAG,EAAE,CAAA;AAC3B,IAAA,IAAIhF,KAAK,GAAG;AACVkB,MAAAA,WAAW,EAAE,CAAC;AACdG,MAAAA,YAAY,EAAE,CAAA;KACf,CAAA;IAED,IAAI;AACF,MAAA,MAAM4D,MAAM,GAAG,MAAM,IAAI,CAAC/G,MAAM,CAAC0F,MAAM,CAACmB,qBAAqB,CAACb,YAAY,CAAC,CAAA;AAE3E,MAAA,WAAW,MAAMgB,KAAK,IAAID,MAAM,EAAE;QAChC,IAAIC,KAAK,CAACC,IAAI,EAAE;UACdH,kBAAkB,IAAIE,KAAK,CAACC,IAAI,CAAA;AAClC,SAAA;QACA,IAAID,KAAK,CAACP,aAAa,EAAE;AACvB3E,UAAAA,KAAK,GAAG;AACNkB,YAAAA,WAAW,EAAEgE,KAAK,CAACP,aAAa,CAACC,gBAAgB,IAAI,CAAC;AACtDvD,YAAAA,YAAY,EAAE6D,KAAK,CAACP,aAAa,CAACE,oBAAoB,IAAI,CAAA;WAC3D,CAAA;AACH,SAAA;AACA,QAAA,MAAMK,KAAK,CAAA;AACb,OAAA;MAEA,MAAMrF,OAAO,GAAG,CAACwE,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/C,MAAA,MAAM7E,kBAAkB,CAAC;QACvBrB,MAAM,EAAE,IAAI,CAACwF,QAAQ;AACrBlE,QAAAA,UAAU,EAAEuE,iBAAiB;QAC7BtE,OAAO;QACPC,KAAK,EAAEwE,YAAY,CAACxE,KAAK;AACzBC,QAAAA,QAAQ,EAAE,QAAQ;QAClBvB,KAAK,EAAE,IAAI,CAACoG,WAAW,CAACN,YAAY,CAACO,QAAQ,CAAC;AAC9C7E,QAAAA,MAAM,EAAE,CAAC;AAAEwF,UAAAA,OAAO,EAAEJ,kBAAkB;AAAEK,UAAAA,IAAI,EAAE,WAAA;AAAY,SAAC,CAAC;QAC5DxF,OAAO;AACPC,QAAAA,OAAO,EAAE,2CAA2C;AACpDlC,QAAAA,MAAM,EAAEA,MAAa;AACrBmC,QAAAA,UAAU,EAAE,GAAG;QACfC,KAAK;AACLI,QAAAA,gBAAgB,EAAE6D,uBAAAA;AACpB,OAAC,CAAC,CAAA;KACH,CAAC,OAAO/D,KAAU,EAAE;MACnB,MAAML,OAAO,GAAG,CAACwE,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/C,MAAA,MAAM7E,kBAAkB,CAAC;QACvBrB,MAAM,EAAE,IAAI,CAACwF,QAAQ;AACrBlE,QAAAA,UAAU,EAAEuE,iBAAiB;QAC7BtE,OAAO;QACPC,KAAK,EAAEwE,YAAY,CAACxE,KAAK;AACzBC,QAAAA,QAAQ,EAAE,QAAQ;QAClBvB,KAAK,EAAE,IAAI,CAACoG,WAAW,CAACN,YAAY,CAACO,QAAQ,CAAC;AAC9C7E,QAAAA,MAAM,EAAE,EAAE;QACVC,OAAO;AACPC,QAAAA,OAAO,EAAE,2CAA2C;AACpDlC,QAAAA,MAAM,EAAEA,MAAa;AACrBmC,QAAAA,UAAU,EAAEG,KAAK,EAAE4E,MAAM,IAAI,GAAG;AAChC9E,QAAAA,KAAK,EAAE;AACLkB,UAAAA,WAAW,EAAE,CAAC;AACdG,UAAAA,YAAY,EAAE,CAAA;SACf;AACDpB,QAAAA,OAAO,EAAE,IAAI;AACbC,QAAAA,KAAK,EAAEzB,IAAI,CAACE,SAAS,CAACuB,KAAK,CAAC;AAC5BE,QAAAA,gBAAgB,EAAE6D,uBAAAA;AACpB,OAAC,CAAC,CAAA;AACF,MAAA,MAAM/D,KAAK,CAAA;AACb,KAAA;AACF,GAAA;EAEQsE,WAAWA,CAACC,QAAa,EAA4C;AAC3E,IAAA,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;AAChC,MAAA,OAAO,CAAC;AAAEY,QAAAA,IAAI,EAAE,MAAM;AAAED,QAAAA,OAAO,EAAEX,QAAAA;AAAS,OAAC,CAAC,CAAA;AAC9C,KAAA;AAEA,IAAA,IAAI1F,KAAK,CAACC,OAAO,CAACyF,QAAQ,CAAC,EAAE;AAC3B,MAAA,OAAOA,QAAQ,CAACxF,GAAG,CAAEqG,IAAI,IAAK;AAC5B,QAAA,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;UAC5B,OAAO;AAAED,YAAAA,IAAI,EAAE,MAAM;AAAED,YAAAA,OAAO,EAAEE,IAAAA;WAAM,CAAA;AACxC,SAAA;AACA,QAAA,IAAIA,IAAI,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;UACpC,IAAIA,IAAI,CAACH,IAAI,EAAE;YACb,OAAO;AAAEE,cAAAA,IAAI,EAAEC,IAAI,CAACD,IAAI,IAAI,MAAM;cAAED,OAAO,EAAEE,IAAI,CAACH,IAAAA;aAAM,CAAA;AAC1D,WAAA;UACA,IAAIG,IAAI,CAACF,OAAO,EAAE;YAChB,OAAO;AAAEC,cAAAA,IAAI,EAAEC,IAAI,CAACD,IAAI,IAAI,MAAM;cAAED,OAAO,EAAEE,IAAI,CAACF,OAAAA;aAAS,CAAA;AAC7D,WAAA;AACF,SAAA;QACA,OAAO;AAAEC,UAAAA,IAAI,EAAE,MAAM;UAAED,OAAO,EAAEG,MAAM,CAACD,IAAI,CAAA;SAAG,CAAA;AAChD,OAAC,CAAC,CAAA;AACJ,KAAA;AAEA,IAAA,IAAIb,QAAQ,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;MAC5C,IAAIA,QAAQ,CAACU,IAAI,EAAE;AACjB,QAAA,OAAO,CAAC;AAAEE,UAAAA,IAAI,EAAE,MAAM;UAAED,OAAO,EAAEX,QAAQ,CAACU,IAAAA;AAAK,SAAC,CAAC,CAAA;AACnD,OAAA;MACA,IAAIV,QAAQ,CAACW,OAAO,EAAE;AACpB,QAAA,OAAO,CAAC;AAAEC,UAAAA,IAAI,EAAE,MAAM;UAAED,OAAO,EAAEX,QAAQ,CAACW,OAAAA;AAAQ,SAAC,CAAC,CAAA;AACtD,OAAA;AACF,KAAA;AAEA,IAAA,OAAO,CAAC;AAAEC,MAAAA,IAAI,EAAE,MAAM;MAAED,OAAO,EAAEG,MAAM,CAACd,QAAQ,CAAA;AAAE,KAAC,CAAC,CAAA;AACtD,GAAA;EAEQC,YAAYA,CAACH,QAAiC,EAA4C;IAChG,IAAIA,QAAQ,CAACY,IAAI,EAAE;AACjB,MAAA,OAAO,CAAC;AAAEE,QAAAA,IAAI,EAAE,WAAW;QAAED,OAAO,EAAEb,QAAQ,CAACY,IAAAA;AAAK,OAAC,CAAC,CAAA;AACxD,KAAA;AAEA,IAAA,IAAIZ,QAAQ,CAACiB,UAAU,IAAIzG,KAAK,CAACC,OAAO,CAACuF,QAAQ,CAACiB,UAAU,CAAC,EAAE;AAC7D,MAAA,OAAOjB,QAAQ,CAACiB,UAAU,CAACvG,GAAG,CAAEwG,SAAS,IAAK;QAC5C,IAAIA,SAAS,CAACL,OAAO,IAAIK,SAAS,CAACL,OAAO,CAACM,KAAK,EAAE;AAChD,UAAA,MAAMP,IAAI,GAAGM,SAAS,CAACL,OAAO,CAACM,KAAK,CACjCC,MAAM,CAAEC,IAAS,IAAKA,IAAI,CAACT,IAAI,CAAC,CAChClG,GAAG,CAAE2G,IAAS,IAAKA,IAAI,CAACT,IAAI,CAAC,CAC7BU,IAAI,CAAC,EAAE,CAAC,CAAA;UACX,OAAO;AAAER,YAAAA,IAAI,EAAE,WAAW;AAAED,YAAAA,OAAO,EAAED,IAAAA;WAAM,CAAA;AAC7C,SAAA;QACA,OAAO;AAAEE,UAAAA,IAAI,EAAE,WAAW;UAAED,OAAO,EAAEG,MAAM,CAACE,SAAS,CAAA;SAAG,CAAA;AAC1D,OAAC,CAAC,CAAA;AACJ,KAAA;AAEA,IAAA,OAAO,EAAE,CAAA;AACX,GAAA;AACF;;;;;;;"}
@@ -0,0 +1,60 @@
1
+ import { GoogleGenAI } from '@google/genai';
2
+ import { PostHog } from 'posthog-node';
3
+
4
+ interface MonitoringParams {
5
+ posthogDistinctId?: string;
6
+ posthogTraceId?: string;
7
+ posthogProperties?: Record<string, any>;
8
+ posthogPrivacyMode?: boolean;
9
+ posthogGroups?: Record<string, any>;
10
+ posthogModelOverride?: string;
11
+ posthogProviderOverride?: string;
12
+ posthogCostOverride?: CostOverride;
13
+ posthogCaptureImmediate?: boolean;
14
+ }
15
+ interface CostOverride {
16
+ inputCost: number;
17
+ outputCost: number;
18
+ }
19
+
20
+ type GenerateContentRequest = {
21
+ model: string;
22
+ contents: any;
23
+ config?: any;
24
+ [key: string]: any;
25
+ };
26
+ type GenerateContentResponse = {
27
+ text?: string;
28
+ candidates?: any[];
29
+ usageMetadata?: {
30
+ promptTokenCount?: number;
31
+ candidatesTokenCount?: number;
32
+ totalTokenCount?: number;
33
+ };
34
+ [key: string]: any;
35
+ };
36
+ interface MonitoringGeminiConfig {
37
+ apiKey?: string;
38
+ vertexai?: boolean;
39
+ project?: string;
40
+ location?: string;
41
+ apiVersion?: string;
42
+ posthog: PostHog;
43
+ }
44
+ declare class PostHogGoogleGenAI {
45
+ private readonly phClient;
46
+ private readonly client;
47
+ models: WrappedModels;
48
+ constructor(config: MonitoringGeminiConfig);
49
+ }
50
+ declare class WrappedModels {
51
+ private readonly phClient;
52
+ private readonly client;
53
+ constructor(client: GoogleGenAI, phClient: PostHog);
54
+ generateContent(params: GenerateContentRequest & MonitoringParams): Promise<GenerateContentResponse>;
55
+ generateContentStream(params: GenerateContentRequest & MonitoringParams): AsyncGenerator<any, void, unknown>;
56
+ private formatInput;
57
+ private formatOutput;
58
+ }
59
+
60
+ export { PostHogGoogleGenAI as Gemini, PostHogGoogleGenAI, WrappedModels, PostHogGoogleGenAI as default };