@posthog/ai 4.4.0 → 5.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@posthog/ai",
3
- "version": "4.4.0",
3
+ "version": "5.0.1",
4
4
  "description": "PostHog Node.js AI integrations",
5
5
  "repository": {
6
6
  "type": "git",
@@ -13,7 +13,7 @@
13
13
  "license": "MIT",
14
14
  "devDependencies": {
15
15
  "@types/jest": "^29.5.14",
16
- "@types/node": "^18.0.0",
16
+ "@types/node": "^20.0.0",
17
17
  "node-fetch": "^3.3.2",
18
18
  "typescript": "^4.7.4"
19
19
  },
@@ -22,11 +22,16 @@
22
22
  "ai",
23
23
  "openai",
24
24
  "anthropic",
25
+ "gemini",
25
26
  "llm",
26
27
  "observability"
27
28
  ],
29
+ "engines": {
30
+ "node": ">=20"
31
+ },
28
32
  "dependencies": {
29
33
  "@anthropic-ai/sdk": "^0.36.3",
34
+ "@google/genai": "^1.1.0",
30
35
  "@langchain/core": "^0.3.37",
31
36
  "ai": "^4.1.0",
32
37
  "langchain": "^0.3.15",
@@ -35,7 +40,7 @@
35
40
  "zod": "^3.24.1"
36
41
  },
37
42
  "peerDependencies": {
38
- "posthog-node": "^4.17.1"
43
+ "posthog-node": "^5.0.0"
39
44
  },
40
45
  "scripts": {
41
46
  "test": "jest",
@@ -52,6 +57,11 @@
52
57
  "import": "./lib/anthropic/index.mjs",
53
58
  "types": "./lib/anthropic/index.d.ts"
54
59
  },
60
+ "./gemini": {
61
+ "require": "./lib/gemini/index.cjs",
62
+ "import": "./lib/gemini/index.mjs",
63
+ "types": "./lib/gemini/index.d.ts"
64
+ },
55
65
  "./openai": {
56
66
  "require": "./lib/openai/index.cjs",
57
67
  "import": "./lib/openai/index.mjs",
@@ -0,0 +1,254 @@
1
+ import { GoogleGenAI } from '@google/genai'
2
+ import { PostHog } from 'posthog-node'
3
+ import { v4 as uuidv4 } from 'uuid'
4
+ import { MonitoringParams, sendEventToPosthog } from '../utils'
5
+
6
+ // Types from @google/genai
7
+ type GenerateContentRequest = {
8
+ model: string
9
+ contents: any
10
+ config?: any
11
+ [key: string]: any
12
+ }
13
+
14
+ type GenerateContentResponse = {
15
+ text?: string
16
+ candidates?: any[]
17
+ usageMetadata?: {
18
+ promptTokenCount?: number
19
+ candidatesTokenCount?: number
20
+ totalTokenCount?: number
21
+ }
22
+ [key: string]: any
23
+ }
24
+
25
+ interface MonitoringGeminiConfig {
26
+ apiKey?: string
27
+ vertexai?: boolean
28
+ project?: string
29
+ location?: string
30
+ apiVersion?: string
31
+ posthog: PostHog
32
+ }
33
+
34
+ export class PostHogGoogleGenAI {
35
+ private readonly phClient: PostHog
36
+ private readonly client: GoogleGenAI
37
+ public models: WrappedModels
38
+
39
+ constructor(config: MonitoringGeminiConfig) {
40
+ const { posthog, ...geminiConfig } = config
41
+ this.phClient = posthog
42
+ this.client = new GoogleGenAI(geminiConfig)
43
+ this.models = new WrappedModels(this.client, this.phClient)
44
+ }
45
+ }
46
+
47
+ export class WrappedModels {
48
+ private readonly phClient: PostHog
49
+ private readonly client: GoogleGenAI
50
+
51
+ constructor(client: GoogleGenAI, phClient: PostHog) {
52
+ this.client = client
53
+ this.phClient = phClient
54
+ }
55
+
56
+ public async generateContent(params: GenerateContentRequest & MonitoringParams): Promise<GenerateContentResponse> {
57
+ const {
58
+ posthogDistinctId,
59
+ posthogTraceId,
60
+ posthogProperties,
61
+ posthogGroups,
62
+ posthogCaptureImmediate,
63
+ ...geminiParams
64
+ } = params
65
+
66
+ const traceId = posthogTraceId ?? uuidv4()
67
+ const startTime = Date.now()
68
+
69
+ try {
70
+ const response = await this.client.models.generateContent(geminiParams)
71
+ const latency = (Date.now() - startTime) / 1000
72
+
73
+ await sendEventToPosthog({
74
+ client: this.phClient,
75
+ distinctId: posthogDistinctId ?? traceId,
76
+ traceId,
77
+ model: geminiParams.model,
78
+ provider: 'gemini',
79
+ input: this.formatInput(geminiParams.contents),
80
+ output: this.formatOutput(response),
81
+ latency,
82
+ baseURL: 'https://generativelanguage.googleapis.com',
83
+ params: params as any,
84
+ httpStatus: 200,
85
+ usage: {
86
+ inputTokens: response.usageMetadata?.promptTokenCount ?? 0,
87
+ outputTokens: response.usageMetadata?.candidatesTokenCount ?? 0,
88
+ },
89
+ captureImmediate: posthogCaptureImmediate,
90
+ })
91
+
92
+ return response
93
+ } catch (error: any) {
94
+ const latency = (Date.now() - startTime) / 1000
95
+ await sendEventToPosthog({
96
+ client: this.phClient,
97
+ distinctId: posthogDistinctId ?? traceId,
98
+ traceId,
99
+ model: geminiParams.model,
100
+ provider: 'gemini',
101
+ input: this.formatInput(geminiParams.contents),
102
+ output: [],
103
+ latency,
104
+ baseURL: 'https://generativelanguage.googleapis.com',
105
+ params: params as any,
106
+ httpStatus: error?.status ?? 500,
107
+ usage: {
108
+ inputTokens: 0,
109
+ outputTokens: 0,
110
+ },
111
+ isError: true,
112
+ error: JSON.stringify(error),
113
+ captureImmediate: posthogCaptureImmediate,
114
+ })
115
+ throw error
116
+ }
117
+ }
118
+
119
+ public async *generateContentStream(
120
+ params: GenerateContentRequest & MonitoringParams
121
+ ): AsyncGenerator<any, void, unknown> {
122
+ const {
123
+ posthogDistinctId,
124
+ posthogTraceId,
125
+ posthogProperties,
126
+ posthogGroups,
127
+ posthogCaptureImmediate,
128
+ ...geminiParams
129
+ } = params
130
+
131
+ const traceId = posthogTraceId ?? uuidv4()
132
+ const startTime = Date.now()
133
+ let accumulatedContent = ''
134
+ let usage = {
135
+ inputTokens: 0,
136
+ outputTokens: 0,
137
+ }
138
+
139
+ try {
140
+ const stream = await this.client.models.generateContentStream(geminiParams)
141
+
142
+ for await (const chunk of stream) {
143
+ if (chunk.text) {
144
+ accumulatedContent += chunk.text
145
+ }
146
+ if (chunk.usageMetadata) {
147
+ usage = {
148
+ inputTokens: chunk.usageMetadata.promptTokenCount ?? 0,
149
+ outputTokens: chunk.usageMetadata.candidatesTokenCount ?? 0,
150
+ }
151
+ }
152
+ yield chunk
153
+ }
154
+
155
+ const latency = (Date.now() - startTime) / 1000
156
+ await sendEventToPosthog({
157
+ client: this.phClient,
158
+ distinctId: posthogDistinctId ?? traceId,
159
+ traceId,
160
+ model: geminiParams.model,
161
+ provider: 'gemini',
162
+ input: this.formatInput(geminiParams.contents),
163
+ output: [{ content: accumulatedContent, role: 'assistant' }],
164
+ latency,
165
+ baseURL: 'https://generativelanguage.googleapis.com',
166
+ params: params as any,
167
+ httpStatus: 200,
168
+ usage,
169
+ captureImmediate: posthogCaptureImmediate,
170
+ })
171
+ } catch (error: any) {
172
+ const latency = (Date.now() - startTime) / 1000
173
+ await sendEventToPosthog({
174
+ client: this.phClient,
175
+ distinctId: posthogDistinctId ?? traceId,
176
+ traceId,
177
+ model: geminiParams.model,
178
+ provider: 'gemini',
179
+ input: this.formatInput(geminiParams.contents),
180
+ output: [],
181
+ latency,
182
+ baseURL: 'https://generativelanguage.googleapis.com',
183
+ params: params as any,
184
+ httpStatus: error?.status ?? 500,
185
+ usage: {
186
+ inputTokens: 0,
187
+ outputTokens: 0,
188
+ },
189
+ isError: true,
190
+ error: JSON.stringify(error),
191
+ captureImmediate: posthogCaptureImmediate,
192
+ })
193
+ throw error
194
+ }
195
+ }
196
+
197
+ private formatInput(contents: any): Array<{ role: string; content: string }> {
198
+ if (typeof contents === 'string') {
199
+ return [{ role: 'user', content: contents }]
200
+ }
201
+
202
+ if (Array.isArray(contents)) {
203
+ return contents.map((item) => {
204
+ if (typeof item === 'string') {
205
+ return { role: 'user', content: item }
206
+ }
207
+ if (item && typeof item === 'object') {
208
+ if (item.text) {
209
+ return { role: item.role || 'user', content: item.text }
210
+ }
211
+ if (item.content) {
212
+ return { role: item.role || 'user', content: item.content }
213
+ }
214
+ }
215
+ return { role: 'user', content: String(item) }
216
+ })
217
+ }
218
+
219
+ if (contents && typeof contents === 'object') {
220
+ if (contents.text) {
221
+ return [{ role: 'user', content: contents.text }]
222
+ }
223
+ if (contents.content) {
224
+ return [{ role: 'user', content: contents.content }]
225
+ }
226
+ }
227
+
228
+ return [{ role: 'user', content: String(contents) }]
229
+ }
230
+
231
+ private formatOutput(response: GenerateContentResponse): Array<{ role: string; content: string }> {
232
+ if (response.text) {
233
+ return [{ role: 'assistant', content: response.text }]
234
+ }
235
+
236
+ if (response.candidates && Array.isArray(response.candidates)) {
237
+ return response.candidates.map((candidate) => {
238
+ if (candidate.content && candidate.content.parts) {
239
+ const text = candidate.content.parts
240
+ .filter((part: any) => part.text)
241
+ .map((part: any) => part.text)
242
+ .join('')
243
+ return { role: 'assistant', content: text }
244
+ }
245
+ return { role: 'assistant', content: String(candidate) }
246
+ })
247
+ }
248
+
249
+ return []
250
+ }
251
+ }
252
+
253
+ export default PostHogGoogleGenAI
254
+ export { PostHogGoogleGenAI as Gemini }
package/src/index.ts CHANGED
@@ -2,10 +2,12 @@ import PostHogOpenAI from './openai'
2
2
  import PostHogAzureOpenAI from './openai/azure'
3
3
  import { wrapVercelLanguageModel } from './vercel/middleware'
4
4
  import PostHogAnthropic from './anthropic'
5
+ import PostHogGoogleGenAI from './gemini'
5
6
  import { LangChainCallbackHandler } from './langchain/callbacks'
6
7
 
7
8
  export { PostHogOpenAI as OpenAI }
8
9
  export { PostHogAzureOpenAI as AzureOpenAI }
9
10
  export { PostHogAnthropic as Anthropic }
11
+ export { PostHogGoogleGenAI as GoogleGenAI }
10
12
  export { wrapVercelLanguageModel as withTracing }
11
13
  export { LangChainCallbackHandler }
package/src/utils.ts CHANGED
@@ -66,6 +66,8 @@ export const formatResponse = (response: any, provider: string): Array<{ role: s
66
66
  return formatResponseAnthropic(response)
67
67
  } else if (provider === 'openai') {
68
68
  return formatResponseOpenAI(response)
69
+ } else if (provider === 'gemini') {
70
+ return formatResponseGemini(response)
69
71
  }
70
72
  return []
71
73
  }
@@ -97,6 +99,37 @@ export const formatResponseOpenAI = (response: any): Array<{ role: string; conte
97
99
  return output
98
100
  }
99
101
 
102
+ export const formatResponseGemini = (response: any): Array<{ role: string; content: string }> => {
103
+ const output: Array<{ role: string; content: string }> = []
104
+
105
+ if (response.text) {
106
+ output.push({
107
+ role: 'assistant',
108
+ content: response.text,
109
+ })
110
+ return output
111
+ }
112
+
113
+ if (response.candidates && Array.isArray(response.candidates)) {
114
+ for (const candidate of response.candidates) {
115
+ if (candidate.content && candidate.content.parts) {
116
+ const text = candidate.content.parts
117
+ .filter((part: any) => part.text)
118
+ .map((part: any) => part.text)
119
+ .join('')
120
+ if (text) {
121
+ output.push({
122
+ role: 'assistant',
123
+ content: text,
124
+ })
125
+ }
126
+ }
127
+ }
128
+ }
129
+
130
+ return output
131
+ }
132
+
100
133
  export const mergeSystemPrompt = (params: MessageCreateParams & MonitoringParams, provider: string): any => {
101
134
  if (provider == 'anthropic') {
102
135
  const messages = params.messages || []
@@ -0,0 +1,313 @@
1
+ import { PostHog } from 'posthog-node'
2
+ import PostHogGemini from '../src/gemini'
3
+
4
+ let mockGeminiResponse: any = {}
5
+ let mockGeminiStreamResponse: any = {}
6
+
7
+ jest.mock('posthog-node', () => {
8
+ return {
9
+ PostHog: jest.fn().mockImplementation(() => {
10
+ return {
11
+ capture: jest.fn(),
12
+ captureImmediate: jest.fn(),
13
+ privacyMode: false,
14
+ }
15
+ }),
16
+ }
17
+ })
18
+
19
+ jest.mock('@google/genai', () => {
20
+ class MockGoogleGenAI {
21
+ models: any
22
+ constructor() {
23
+ this.models = {
24
+ generateContent: jest.fn(),
25
+ generateContentStream: jest.fn(),
26
+ }
27
+ }
28
+ }
29
+
30
+ return {
31
+ GoogleGenAI: MockGoogleGenAI,
32
+ }
33
+ })
34
+
35
+ describe('PostHogGemini - Jest test suite', () => {
36
+ let mockPostHogClient: PostHog
37
+ let client: PostHogGemini
38
+
39
+ beforeAll(() => {
40
+ if (!process.env.GEMINI_API_KEY) {
41
+ console.warn('⚠️ Skipping Gemini tests: No GEMINI_API_KEY environment variable set')
42
+ }
43
+ })
44
+
45
+ beforeEach(() => {
46
+ // Skip all tests if no API key is present
47
+ if (!process.env.GEMINI_API_KEY) {
48
+ return
49
+ }
50
+
51
+ jest.clearAllMocks()
52
+
53
+ // Reset the default mocks
54
+ mockPostHogClient = new (PostHog as any)()
55
+ client = new PostHogGemini({
56
+ apiKey: process.env.GEMINI_API_KEY || '',
57
+ posthog: mockPostHogClient as any,
58
+ })
59
+
60
+ // Some default generate content mock
61
+ mockGeminiResponse = {
62
+ text: 'Hello from Gemini!',
63
+ candidates: [
64
+ {
65
+ content: {
66
+ parts: [
67
+ {
68
+ text: 'Hello from Gemini!',
69
+ },
70
+ ],
71
+ },
72
+ finishReason: 'STOP',
73
+ },
74
+ ],
75
+ usageMetadata: {
76
+ promptTokenCount: 15,
77
+ candidatesTokenCount: 8,
78
+ totalTokenCount: 23,
79
+ },
80
+ }
81
+
82
+ // Mock streaming response
83
+ mockGeminiStreamResponse = [
84
+ {
85
+ text: 'Hello ',
86
+ candidates: [
87
+ {
88
+ content: {
89
+ parts: [{ text: 'Hello ' }],
90
+ },
91
+ },
92
+ ],
93
+ usageMetadata: {
94
+ promptTokenCount: 15,
95
+ candidatesTokenCount: 2,
96
+ },
97
+ },
98
+ {
99
+ text: 'from ',
100
+ candidates: [
101
+ {
102
+ content: {
103
+ parts: [{ text: 'from ' }],
104
+ },
105
+ },
106
+ ],
107
+ usageMetadata: {
108
+ promptTokenCount: 15,
109
+ candidatesTokenCount: 4,
110
+ },
111
+ },
112
+ {
113
+ text: 'Gemini!',
114
+ candidates: [
115
+ {
116
+ content: {
117
+ parts: [{ text: 'Gemini!' }],
118
+ },
119
+ },
120
+ ],
121
+ usageMetadata: {
122
+ promptTokenCount: 15,
123
+ candidatesTokenCount: 8,
124
+ },
125
+ },
126
+ ]
127
+
128
+ // Mock the generateContent method
129
+ ;(client as any).client.models.generateContent = jest.fn().mockResolvedValue(mockGeminiResponse)
130
+
131
+ // Mock the generateContentStream method
132
+ ;(client as any).client.models.generateContentStream = jest.fn().mockImplementation(async function* () {
133
+ for (const chunk of mockGeminiStreamResponse) {
134
+ yield chunk
135
+ }
136
+ })
137
+ })
138
+
139
+ // Wrap each test with conditional skip
140
+ const conditionalTest = process.env.GEMINI_API_KEY ? test : test.skip
141
+
142
+ conditionalTest('basic content generation', async () => {
143
+ const response = await client.models.generateContent({
144
+ model: 'gemini-2.0-flash-001',
145
+ contents: 'Hello',
146
+ posthogDistinctId: 'test-id',
147
+ posthogProperties: { foo: 'bar' },
148
+ })
149
+
150
+ expect(response).toEqual(mockGeminiResponse)
151
+ // We expect 1 capture call
152
+ expect(mockPostHogClient.capture).toHaveBeenCalledTimes(1)
153
+ // Check the capture arguments
154
+ const [captureArgs] = (mockPostHogClient.capture as jest.Mock).mock.calls
155
+ const { distinctId, event, properties } = captureArgs[0]
156
+
157
+ expect(distinctId).toBe('test-id')
158
+ expect(event).toBe('$ai_generation')
159
+ expect(properties['$ai_provider']).toBe('gemini')
160
+ expect(properties['$ai_model']).toBe('gemini-2.0-flash-001')
161
+ expect(properties['$ai_input']).toEqual([{ role: 'user', content: 'Hello' }])
162
+ expect(properties['$ai_output_choices']).toEqual([{ role: 'assistant', content: 'Hello from Gemini!' }])
163
+ expect(properties['$ai_input_tokens']).toBe(15)
164
+ expect(properties['$ai_output_tokens']).toBe(8)
165
+ expect(properties['$ai_http_status']).toBe(200)
166
+ expect(properties['foo']).toBe('bar')
167
+ expect(typeof properties['$ai_latency']).toBe('number')
168
+ })
169
+
170
+ conditionalTest('streaming content generation', async () => {
171
+ const stream = client.models.generateContentStream({
172
+ model: 'gemini-2.0-flash-001',
173
+ contents: 'Write a short poem',
174
+ posthogDistinctId: 'test-id',
175
+ posthogProperties: { foo: 'bar' },
176
+ })
177
+
178
+ let accumulatedText = ''
179
+ for await (const chunk of stream) {
180
+ if (chunk.text) {
181
+ accumulatedText += chunk.text
182
+ }
183
+ }
184
+
185
+ expect(accumulatedText).toBe('Hello from Gemini!')
186
+ // We expect 1 capture call after streaming completes
187
+ expect(mockPostHogClient.capture).toHaveBeenCalledTimes(1)
188
+
189
+ const [captureArgs] = (mockPostHogClient.capture as jest.Mock).mock.calls
190
+ const { distinctId, event, properties } = captureArgs[0]
191
+
192
+ expect(distinctId).toBe('test-id')
193
+ expect(event).toBe('$ai_generation')
194
+ expect(properties['$ai_provider']).toBe('gemini')
195
+ expect(properties['$ai_model']).toBe('gemini-2.0-flash-001')
196
+ expect(properties['$ai_input']).toEqual([{ role: 'user', content: 'Write a short poem' }])
197
+ expect(properties['$ai_output_choices']).toEqual([{ content: 'Hello from Gemini!', role: 'assistant' }])
198
+ expect(properties['$ai_input_tokens']).toBe(15)
199
+ expect(properties['$ai_output_tokens']).toBe(8)
200
+ expect(properties['$ai_http_status']).toBe(200)
201
+ expect(properties['foo']).toBe('bar')
202
+ expect(typeof properties['$ai_latency']).toBe('number')
203
+ })
204
+
205
+ conditionalTest('groups', async () => {
206
+ await client.models.generateContent({
207
+ model: 'gemini-2.0-flash-001',
208
+ contents: 'Hello',
209
+ posthogDistinctId: 'test-id',
210
+ posthogGroups: { team: 'ai-team' },
211
+ })
212
+
213
+ expect(mockPostHogClient.capture).toHaveBeenCalledTimes(1)
214
+ const [captureArgs] = (mockPostHogClient.capture as jest.Mock).mock.calls
215
+ const { groups } = captureArgs[0]
216
+
217
+ expect(groups).toEqual({ team: 'ai-team' })
218
+ })
219
+
220
+ conditionalTest('privacy mode', async () => {
221
+ await client.models.generateContent({
222
+ model: 'gemini-2.0-flash-001',
223
+ contents: 'Sensitive information',
224
+ posthogDistinctId: 'test-id',
225
+ posthogPrivacyMode: true,
226
+ })
227
+
228
+ expect(mockPostHogClient.capture).toHaveBeenCalledTimes(1)
229
+ const [captureArgs] = (mockPostHogClient.capture as jest.Mock).mock.calls
230
+ const { properties } = captureArgs[0]
231
+
232
+ expect(properties['$ai_input']).toBeNull()
233
+ expect(properties['$ai_output_choices']).toBeNull()
234
+ })
235
+
236
+ conditionalTest('error handling', async () => {
237
+ const error = new Error('API Error')
238
+ ;(error as any).status = 400
239
+ ;(client as any).client.models.generateContent = jest.fn().mockRejectedValue(error)
240
+
241
+ await expect(
242
+ client.models.generateContent({
243
+ model: 'gemini-2.0-flash-001',
244
+ contents: 'Hello',
245
+ posthogDistinctId: 'test-id',
246
+ })
247
+ ).rejects.toThrow('API Error')
248
+
249
+ expect(mockPostHogClient.capture).toHaveBeenCalledTimes(1)
250
+ const [captureArgs] = (mockPostHogClient.capture as jest.Mock).mock.calls
251
+ const { properties } = captureArgs[0]
252
+
253
+ expect(properties['$ai_is_error']).toBe(true)
254
+ expect(properties['$ai_http_status']).toBe(400)
255
+ expect(properties['$ai_input_tokens']).toBe(0)
256
+ expect(properties['$ai_output_tokens']).toBe(0)
257
+ })
258
+
259
+ conditionalTest('array contents input', async () => {
260
+ await client.models.generateContent({
261
+ model: 'gemini-2.0-flash-001',
262
+ contents: ['Hello', 'How are you?'],
263
+ posthogDistinctId: 'test-id',
264
+ })
265
+
266
+ expect(mockPostHogClient.capture).toHaveBeenCalledTimes(1)
267
+ const [captureArgs] = (mockPostHogClient.capture as jest.Mock).mock.calls
268
+ const { properties } = captureArgs[0]
269
+
270
+ expect(properties['$ai_input']).toEqual([
271
+ { role: 'user', content: 'Hello' },
272
+ { role: 'user', content: 'How are you?' },
273
+ ])
274
+ })
275
+
276
+ conditionalTest('object contents input', async () => {
277
+ await client.models.generateContent({
278
+ model: 'gemini-2.0-flash-001',
279
+ contents: { text: 'Hello world' },
280
+ posthogDistinctId: 'test-id',
281
+ })
282
+
283
+ expect(mockPostHogClient.capture).toHaveBeenCalledTimes(1)
284
+ const [captureArgs] = (mockPostHogClient.capture as jest.Mock).mock.calls
285
+ const { properties } = captureArgs[0]
286
+
287
+ expect(properties['$ai_input']).toEqual([{ role: 'user', content: 'Hello world' }])
288
+ })
289
+
290
+ conditionalTest('capture immediate', async () => {
291
+ await client.models.generateContent({
292
+ model: 'gemini-2.0-flash-001',
293
+ contents: 'Hello',
294
+ posthogDistinctId: 'test-id',
295
+ posthogCaptureImmediate: true,
296
+ })
297
+
298
+ expect(mockPostHogClient.captureImmediate).toHaveBeenCalledTimes(1)
299
+ expect(mockPostHogClient.capture).toHaveBeenCalledTimes(0)
300
+ })
301
+
302
+ conditionalTest('vertex ai configuration', () => {
303
+ const vertexClient = new PostHogGemini({
304
+ vertexai: true,
305
+ project: 'test-project',
306
+ location: 'us-central1',
307
+ posthog: mockPostHogClient as any,
308
+ })
309
+
310
+ expect(vertexClient).toBeInstanceOf(PostHogGemini)
311
+ expect(vertexClient.models).toBeDefined()
312
+ })
313
+ })