@posthog/ai 5.0.1 → 5.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,7 +3,7 @@ import PostHogOpenAI from '../src/openai'
3
3
  import openaiModule from 'openai'
4
4
 
5
5
  let mockOpenAiChatResponse: any = {}
6
- let mockOpenAiEmbeddingResponse: any = {}
6
+ let mockOpenAiParsedResponse: any = {}
7
7
 
8
8
  jest.mock('posthog-node', () => {
9
9
  return {
@@ -11,7 +11,7 @@ jest.mock('posthog-node', () => {
11
11
  return {
12
12
  capture: jest.fn(),
13
13
  captureImmediate: jest.fn(),
14
- privacyMode: false,
14
+ privacy_mode: false,
15
15
  }
16
16
  }),
17
17
  }
@@ -36,10 +36,20 @@ jest.mock('openai', () => {
36
36
  static Completions = MockCompletions
37
37
  }
38
38
 
39
+ // Mock Responses class
40
+ class MockResponses {
41
+ constructor() {}
42
+ create = jest.fn()
43
+ }
44
+
45
+ // Add parse to prototype instead of instance
46
+ ;(MockResponses.prototype as any).parse = jest.fn()
47
+
39
48
  // Mock OpenAI class
40
49
  class MockOpenAI {
41
50
  chat: any
42
51
  embeddings: any
52
+ responses: any
43
53
  constructor() {
44
54
  this.chat = {
45
55
  completions: {
@@ -49,8 +59,12 @@ jest.mock('openai', () => {
49
59
  this.embeddings = {
50
60
  create: jest.fn(),
51
61
  }
62
+ this.responses = {
63
+ create: jest.fn(),
64
+ }
52
65
  }
53
66
  static Chat = MockChat
67
+ static Responses = MockResponses
54
68
  }
55
69
 
56
70
  return {
@@ -58,6 +72,7 @@ jest.mock('openai', () => {
58
72
  default: MockOpenAI,
59
73
  OpenAI: MockOpenAI,
60
74
  Chat: MockChat,
75
+ Responses: MockResponses,
61
76
  }
62
77
  })
63
78
 
@@ -109,25 +124,39 @@ describe('PostHogOpenAI - Jest test suite', () => {
109
124
  },
110
125
  }
111
126
 
112
- // Some default embedding mock
113
- mockOpenAiEmbeddingResponse = {
114
- data: [
127
+ // Some default parsed response mock
128
+ mockOpenAiParsedResponse = {
129
+ id: 'test-parsed-response-id',
130
+ model: 'gpt-4o-2024-08-06',
131
+ object: 'response',
132
+ created_at: Date.now(),
133
+ status: 'completed',
134
+ output: [
115
135
  {
116
- object: 'embedding',
117
- index: 0,
118
- embedding: [0.1, 0.2, 0.3],
136
+ type: 'output_text',
137
+ text: '{"name": "Science Fair", "date": "Friday", "participants": ["Alice", "Bob"]}',
119
138
  },
120
139
  ],
121
- model: 'text-embedding-3-small',
122
- object: 'list',
140
+ output_parsed: {
141
+ name: 'Science Fair',
142
+ date: 'Friday',
143
+ participants: ['Alice', 'Bob'],
144
+ },
123
145
  usage: {
124
- prompt_tokens: 10,
125
- total_tokens: 10,
146
+ input_tokens: 15,
147
+ output_tokens: 20,
148
+ input_tokens_details: { cached_tokens: 0 },
149
+ output_tokens_details: { reasoning_tokens: 5 },
150
+ total_tokens: 35,
126
151
  },
127
152
  }
128
153
 
129
154
  const ChatMock: any = openaiModule.Chat
130
155
  ;(ChatMock.Completions as any).prototype.create = jest.fn().mockResolvedValue(mockOpenAiChatResponse)
156
+
157
+ // Mock responses.parse using the same pattern as chat completions
158
+ const ResponsesMock: any = openaiModule.Responses
159
+ ResponsesMock.prototype.parse.mockResolvedValue(mockOpenAiParsedResponse)
131
160
  })
132
161
 
133
162
  // Wrap each test with conditional skip
@@ -163,39 +192,6 @@ describe('PostHogOpenAI - Jest test suite', () => {
163
192
  expect(typeof properties['$ai_latency']).toBe('number')
164
193
  })
165
194
 
166
- conditionalTest('embeddings', async () => {
167
- // Since embeddings calls are not implemented in the snippet by default,
168
- // we'll demonstrate how you *would* do it if WrappedEmbeddings is used.
169
- // Let's override the internal embeddings to return our mock.
170
- const mockEmbeddingsCreate = jest.fn().mockResolvedValue(mockOpenAiEmbeddingResponse)
171
- ;(client as any).embeddings = {
172
- create: mockEmbeddingsCreate,
173
- }
174
-
175
- const response = await (client as any).embeddings.create({
176
- model: 'text-embedding-3-small',
177
- input: 'Hello world',
178
- posthog_distinct_id: 'test-id',
179
- posthog_properties: { foo: 'bar' },
180
- })
181
-
182
- expect(response).toEqual(mockOpenAiEmbeddingResponse)
183
- expect(mockPostHogClient.capture).toHaveBeenCalledTimes(1)
184
-
185
- const [captureArgs] = (mockPostHogClient.capture as jest.Mock).mock.calls
186
- const { distinctId, event, properties } = captureArgs[0]
187
-
188
- expect(distinctId).toBe('test-id')
189
- expect(event).toBe('$ai_embedding')
190
- expect(properties['$ai_provider']).toBe('openai')
191
- expect(properties['$ai_model']).toBe('text-embedding-3-small')
192
- expect(properties['$ai_input']).toBe('Hello world')
193
- expect(properties['$ai_input_tokens']).toBe(10)
194
- expect(properties['$ai_http_status']).toBe(200)
195
- expect(properties['foo']).toBe('bar')
196
- expect(typeof properties['$ai_latency']).toBe('number')
197
- })
198
-
199
195
  conditionalTest('groups', async () => {
200
196
  await client.chat.completions.create({
201
197
  model: 'gpt-4',
@@ -269,9 +265,6 @@ describe('PostHogOpenAI - Jest test suite', () => {
269
265
  max_completion_tokens: 100,
270
266
  stream: false,
271
267
  })
272
- expect(properties['$ai_temperature']).toBe(0.5)
273
- expect(properties['$ai_max_tokens']).toBe(100)
274
- expect(properties['$ai_stream']).toBe(false)
275
268
  expect(properties['foo']).toBe('bar')
276
269
  })
277
270
 
@@ -324,4 +317,87 @@ describe('PostHogOpenAI - Jest test suite', () => {
324
317
  expect(mockPostHogClient.captureImmediate).toHaveBeenCalledTimes(1)
325
318
  expect(mockPostHogClient.capture).toHaveBeenCalledTimes(0)
326
319
  })
320
+
321
+ conditionalTest('responses parse', async () => {
322
+ const response = await client.responses.parse({
323
+ model: 'gpt-4o-2024-08-06',
324
+ input: [
325
+ { role: 'system', content: 'Extract the event information.' },
326
+ { role: 'user', content: 'Alice and Bob are going to a science fair on Friday.' },
327
+ ],
328
+ text: {
329
+ format: {
330
+ type: 'json_object',
331
+ json_schema: {
332
+ name: 'event',
333
+ schema: {
334
+ type: 'object',
335
+ properties: {
336
+ name: { type: 'string' },
337
+ date: { type: 'string' },
338
+ participants: { type: 'array', items: { type: 'string' } },
339
+ },
340
+ required: ['name', 'date', 'participants'],
341
+ },
342
+ },
343
+ },
344
+ },
345
+ posthogDistinctId: 'test-id',
346
+ posthogProperties: { foo: 'bar' },
347
+ })
348
+
349
+ expect(response).toEqual(mockOpenAiParsedResponse)
350
+ expect(mockPostHogClient.capture).toHaveBeenCalledTimes(1)
351
+
352
+ const [captureArgs] = (mockPostHogClient.capture as jest.Mock).mock.calls
353
+ const { distinctId, event, properties } = captureArgs[0]
354
+
355
+ expect(distinctId).toBe('test-id')
356
+ expect(event).toBe('$ai_generation')
357
+ expect(properties['$ai_provider']).toBe('openai')
358
+ expect(properties['$ai_model']).toBe('gpt-4o-2024-08-06')
359
+ expect(properties['$ai_input']).toEqual([
360
+ { role: 'system', content: 'Extract the event information.' },
361
+ { role: 'user', content: 'Alice and Bob are going to a science fair on Friday.' },
362
+ ])
363
+ expect(properties['$ai_output_choices']).toEqual(mockOpenAiParsedResponse.output)
364
+ expect(properties['$ai_input_tokens']).toBe(15)
365
+ expect(properties['$ai_output_tokens']).toBe(20)
366
+ expect(properties['$ai_reasoning_tokens']).toBe(5)
367
+ expect(properties['$ai_cache_read_input_tokens']).toBeUndefined()
368
+ expect(properties['$ai_http_status']).toBe(200)
369
+ expect(properties['foo']).toBe('bar')
370
+ expect(typeof properties['$ai_latency']).toBe('number')
371
+ })
372
+
373
+ conditionalTest('anonymous user - $process_person_profile set to false', async () => {
374
+ await client.chat.completions.create({
375
+ model: 'gpt-4',
376
+ messages: [{ role: 'user', content: 'Hello' }],
377
+ posthogTraceId: 'trace-123',
378
+ })
379
+
380
+ expect(mockPostHogClient.capture).toHaveBeenCalledTimes(1)
381
+ const [captureArgs] = (mockPostHogClient.capture as jest.Mock).mock.calls
382
+ const { distinctId, properties } = captureArgs[0]
383
+
384
+ expect(distinctId).toBe('trace-123')
385
+ expect(properties['$process_person_profile']).toBe(false)
386
+ })
387
+
388
+ conditionalTest('identified user - $process_person_profile not set', async () => {
389
+ await client.chat.completions.create({
390
+ model: 'gpt-4',
391
+ messages: [{ role: 'user', content: 'Hello' }],
392
+ posthogDistinctId: 'user-456',
393
+ posthogTraceId: 'trace-123',
394
+ })
395
+
396
+ expect(mockPostHogClient.capture).toHaveBeenCalledTimes(1)
397
+ const [captureArgs] = (mockPostHogClient.capture as jest.Mock).mock.calls
398
+ const { distinctId, properties } = captureArgs[0]
399
+
400
+ expect(distinctId).toBe('user-456')
401
+ expect(properties['$process_person_profile']).toBeUndefined()
402
+ })
327
403
  })