@posthog/ai 5.0.1 → 5.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +4 -0
- package/lib/anthropic/index.cjs.map +1 -1
- package/lib/anthropic/index.mjs.map +1 -1
- package/lib/index.cjs +424 -18
- package/lib/index.cjs.map +1 -1
- package/lib/index.d.ts +23 -11
- package/lib/index.mjs +424 -18
- package/lib/index.mjs.map +1 -1
- package/lib/langchain/index.cjs.map +1 -1
- package/lib/langchain/index.mjs.map +1 -1
- package/lib/openai/index.cjs +222 -0
- package/lib/openai/index.cjs.map +1 -1
- package/lib/openai/index.d.ts +16 -4
- package/lib/openai/index.mjs +222 -1
- package/lib/openai/index.mjs.map +1 -1
- package/lib/vercel/index.cjs.map +1 -1
- package/lib/vercel/index.mjs.map +1 -1
- package/package.json +2 -2
- package/src/openai/azure.ts +283 -29
- package/src/openai/index.ts +276 -2
- package/src/utils.ts +3 -2
- package/tests/openai.test.ts +93 -48
package/src/openai/index.ts
CHANGED
|
@@ -2,14 +2,18 @@ import OpenAIOrignal, { ClientOptions } from 'openai'
|
|
|
2
2
|
import { PostHog } from 'posthog-node'
|
|
3
3
|
import { v4 as uuidv4 } from 'uuid'
|
|
4
4
|
import { formatResponseOpenAI, MonitoringParams, sendEventToPosthog } from '../utils'
|
|
5
|
+
import type { APIPromise } from 'openai'
|
|
6
|
+
import type { Stream } from 'openai/streaming'
|
|
7
|
+
import type { ParsedResponse } from 'openai/resources/responses/responses'
|
|
5
8
|
|
|
6
9
|
type ChatCompletion = OpenAIOrignal.ChatCompletion
|
|
7
10
|
type ChatCompletionChunk = OpenAIOrignal.ChatCompletionChunk
|
|
8
11
|
type ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams
|
|
9
12
|
type ChatCompletionCreateParamsNonStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsNonStreaming
|
|
10
13
|
type ChatCompletionCreateParamsStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsStreaming
|
|
11
|
-
|
|
12
|
-
|
|
14
|
+
type ResponsesCreateParamsBase = OpenAIOrignal.Responses.ResponseCreateParams
|
|
15
|
+
type ResponsesCreateParamsNonStreaming = OpenAIOrignal.Responses.ResponseCreateParamsNonStreaming
|
|
16
|
+
type ResponsesCreateParamsStreaming = OpenAIOrignal.Responses.ResponseCreateParamsStreaming
|
|
13
17
|
|
|
14
18
|
interface MonitoringOpenAIConfig extends ClientOptions {
|
|
15
19
|
apiKey: string
|
|
@@ -17,15 +21,19 @@ interface MonitoringOpenAIConfig extends ClientOptions {
|
|
|
17
21
|
baseURL?: string
|
|
18
22
|
}
|
|
19
23
|
|
|
24
|
+
type RequestOptions = Record<string, any>
|
|
25
|
+
|
|
20
26
|
export class PostHogOpenAI extends OpenAIOrignal {
|
|
21
27
|
private readonly phClient: PostHog
|
|
22
28
|
public chat: WrappedChat
|
|
29
|
+
public responses: WrappedResponses
|
|
23
30
|
|
|
24
31
|
constructor(config: MonitoringOpenAIConfig) {
|
|
25
32
|
const { posthog, ...openAIConfig } = config
|
|
26
33
|
super(openAIConfig)
|
|
27
34
|
this.phClient = posthog
|
|
28
35
|
this.chat = new WrappedChat(this, this.phClient)
|
|
36
|
+
this.responses = new WrappedResponses(this, this.phClient)
|
|
29
37
|
}
|
|
30
38
|
}
|
|
31
39
|
|
|
@@ -215,6 +223,272 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
|
215
223
|
}
|
|
216
224
|
}
|
|
217
225
|
|
|
226
|
+
export class WrappedResponses extends OpenAIOrignal.Responses {
|
|
227
|
+
private readonly phClient: PostHog
|
|
228
|
+
|
|
229
|
+
constructor(client: OpenAIOrignal, phClient: PostHog) {
|
|
230
|
+
super(client)
|
|
231
|
+
this.phClient = phClient
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
// --- Overload #1: Non-streaming
|
|
235
|
+
public create(
|
|
236
|
+
body: ResponsesCreateParamsNonStreaming & MonitoringParams,
|
|
237
|
+
options?: RequestOptions
|
|
238
|
+
): APIPromise<OpenAIOrignal.Responses.Response>
|
|
239
|
+
|
|
240
|
+
// --- Overload #2: Streaming
|
|
241
|
+
public create(
|
|
242
|
+
body: ResponsesCreateParamsStreaming & MonitoringParams,
|
|
243
|
+
options?: RequestOptions
|
|
244
|
+
): APIPromise<Stream<OpenAIOrignal.Responses.ResponseStreamEvent>>
|
|
245
|
+
|
|
246
|
+
// --- Overload #3: Generic base
|
|
247
|
+
public create(
|
|
248
|
+
body: ResponsesCreateParamsBase & MonitoringParams,
|
|
249
|
+
options?: RequestOptions
|
|
250
|
+
): APIPromise<OpenAIOrignal.Responses.Response | Stream<OpenAIOrignal.Responses.ResponseStreamEvent>>
|
|
251
|
+
|
|
252
|
+
// --- Implementation Signature
|
|
253
|
+
public create(
|
|
254
|
+
body: ResponsesCreateParamsBase & MonitoringParams,
|
|
255
|
+
options?: RequestOptions
|
|
256
|
+
): APIPromise<OpenAIOrignal.Responses.Response | Stream<OpenAIOrignal.Responses.ResponseStreamEvent>> {
|
|
257
|
+
const {
|
|
258
|
+
posthogDistinctId,
|
|
259
|
+
posthogTraceId,
|
|
260
|
+
posthogProperties,
|
|
261
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
262
|
+
posthogPrivacyMode = false,
|
|
263
|
+
posthogGroups,
|
|
264
|
+
posthogCaptureImmediate,
|
|
265
|
+
...openAIParams
|
|
266
|
+
} = body
|
|
267
|
+
|
|
268
|
+
const traceId = posthogTraceId ?? uuidv4()
|
|
269
|
+
const startTime = Date.now()
|
|
270
|
+
|
|
271
|
+
const parentPromise = super.create(openAIParams, options)
|
|
272
|
+
|
|
273
|
+
if (openAIParams.stream) {
|
|
274
|
+
return parentPromise.then((value) => {
|
|
275
|
+
if ('tee' in value && typeof (value as any).tee === 'function') {
|
|
276
|
+
const [stream1, stream2] = (value as any).tee()
|
|
277
|
+
;(async () => {
|
|
278
|
+
try {
|
|
279
|
+
let finalContent: any[] = []
|
|
280
|
+
let usage: {
|
|
281
|
+
inputTokens?: number
|
|
282
|
+
outputTokens?: number
|
|
283
|
+
reasoningTokens?: number
|
|
284
|
+
cacheReadInputTokens?: number
|
|
285
|
+
} = {
|
|
286
|
+
inputTokens: 0,
|
|
287
|
+
outputTokens: 0,
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
for await (const chunk of stream1) {
|
|
291
|
+
if (
|
|
292
|
+
chunk.type === 'response.completed' &&
|
|
293
|
+
'response' in chunk &&
|
|
294
|
+
chunk.response?.output &&
|
|
295
|
+
chunk.response.output.length > 0
|
|
296
|
+
) {
|
|
297
|
+
finalContent = chunk.response.output
|
|
298
|
+
}
|
|
299
|
+
if ('response' in chunk && chunk.response?.usage) {
|
|
300
|
+
usage = {
|
|
301
|
+
inputTokens: chunk.response.usage.input_tokens ?? 0,
|
|
302
|
+
outputTokens: chunk.response.usage.output_tokens ?? 0,
|
|
303
|
+
reasoningTokens: chunk.response.usage.output_tokens_details?.reasoning_tokens ?? 0,
|
|
304
|
+
cacheReadInputTokens: chunk.response.usage.input_tokens_details?.cached_tokens ?? 0,
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
const latency = (Date.now() - startTime) / 1000
|
|
310
|
+
await sendEventToPosthog({
|
|
311
|
+
client: this.phClient,
|
|
312
|
+
distinctId: posthogDistinctId ?? traceId,
|
|
313
|
+
traceId,
|
|
314
|
+
model: openAIParams.model,
|
|
315
|
+
provider: 'openai',
|
|
316
|
+
input: openAIParams.input,
|
|
317
|
+
output: finalContent,
|
|
318
|
+
latency,
|
|
319
|
+
baseURL: (this as any).baseURL ?? '',
|
|
320
|
+
params: body,
|
|
321
|
+
httpStatus: 200,
|
|
322
|
+
usage,
|
|
323
|
+
captureImmediate: posthogCaptureImmediate,
|
|
324
|
+
})
|
|
325
|
+
} catch (error: any) {
|
|
326
|
+
await sendEventToPosthog({
|
|
327
|
+
client: this.phClient,
|
|
328
|
+
distinctId: posthogDistinctId ?? traceId,
|
|
329
|
+
traceId,
|
|
330
|
+
model: openAIParams.model,
|
|
331
|
+
provider: 'openai',
|
|
332
|
+
input: openAIParams.input,
|
|
333
|
+
output: [],
|
|
334
|
+
latency: 0,
|
|
335
|
+
baseURL: (this as any).baseURL ?? '',
|
|
336
|
+
params: body,
|
|
337
|
+
httpStatus: error?.status ? error.status : 500,
|
|
338
|
+
usage: { inputTokens: 0, outputTokens: 0 },
|
|
339
|
+
isError: true,
|
|
340
|
+
error: JSON.stringify(error),
|
|
341
|
+
captureImmediate: posthogCaptureImmediate,
|
|
342
|
+
})
|
|
343
|
+
}
|
|
344
|
+
})()
|
|
345
|
+
|
|
346
|
+
return stream2
|
|
347
|
+
}
|
|
348
|
+
return value
|
|
349
|
+
}) as APIPromise<Stream<OpenAIOrignal.Responses.ResponseStreamEvent>>
|
|
350
|
+
} else {
|
|
351
|
+
const wrappedPromise = parentPromise.then(
|
|
352
|
+
async (result) => {
|
|
353
|
+
if ('output' in result) {
|
|
354
|
+
const latency = (Date.now() - startTime) / 1000
|
|
355
|
+
await sendEventToPosthog({
|
|
356
|
+
client: this.phClient,
|
|
357
|
+
distinctId: posthogDistinctId ?? traceId,
|
|
358
|
+
traceId,
|
|
359
|
+
model: openAIParams.model,
|
|
360
|
+
provider: 'openai',
|
|
361
|
+
input: openAIParams.input,
|
|
362
|
+
output: result.output,
|
|
363
|
+
latency,
|
|
364
|
+
baseURL: (this as any).baseURL ?? '',
|
|
365
|
+
params: body,
|
|
366
|
+
httpStatus: 200,
|
|
367
|
+
usage: {
|
|
368
|
+
inputTokens: result.usage?.input_tokens ?? 0,
|
|
369
|
+
outputTokens: result.usage?.output_tokens ?? 0,
|
|
370
|
+
reasoningTokens: result.usage?.output_tokens_details?.reasoning_tokens ?? 0,
|
|
371
|
+
cacheReadInputTokens: result.usage?.input_tokens_details?.cached_tokens ?? 0,
|
|
372
|
+
},
|
|
373
|
+
captureImmediate: posthogCaptureImmediate,
|
|
374
|
+
})
|
|
375
|
+
}
|
|
376
|
+
return result
|
|
377
|
+
},
|
|
378
|
+
async (error: any) => {
|
|
379
|
+
await sendEventToPosthog({
|
|
380
|
+
client: this.phClient,
|
|
381
|
+
distinctId: posthogDistinctId ?? traceId,
|
|
382
|
+
traceId,
|
|
383
|
+
model: openAIParams.model,
|
|
384
|
+
provider: 'openai',
|
|
385
|
+
input: openAIParams.input,
|
|
386
|
+
output: [],
|
|
387
|
+
latency: 0,
|
|
388
|
+
baseURL: (this as any).baseURL ?? '',
|
|
389
|
+
params: body,
|
|
390
|
+
httpStatus: error?.status ? error.status : 500,
|
|
391
|
+
usage: {
|
|
392
|
+
inputTokens: 0,
|
|
393
|
+
outputTokens: 0,
|
|
394
|
+
},
|
|
395
|
+
isError: true,
|
|
396
|
+
error: JSON.stringify(error),
|
|
397
|
+
captureImmediate: posthogCaptureImmediate,
|
|
398
|
+
})
|
|
399
|
+
throw error
|
|
400
|
+
}
|
|
401
|
+
) as APIPromise<OpenAIOrignal.Responses.Response>
|
|
402
|
+
|
|
403
|
+
return wrappedPromise
|
|
404
|
+
}
|
|
405
|
+
}
|
|
406
|
+
|
|
407
|
+
public parse<Params extends ResponsesCreateParamsBase, ParsedT = any>(
|
|
408
|
+
body: Params & MonitoringParams,
|
|
409
|
+
options?: RequestOptions
|
|
410
|
+
): APIPromise<ParsedResponse<ParsedT>> {
|
|
411
|
+
const {
|
|
412
|
+
posthogDistinctId,
|
|
413
|
+
posthogTraceId,
|
|
414
|
+
posthogProperties,
|
|
415
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
416
|
+
posthogPrivacyMode = false,
|
|
417
|
+
posthogGroups,
|
|
418
|
+
posthogCaptureImmediate,
|
|
419
|
+
...openAIParams
|
|
420
|
+
} = body
|
|
421
|
+
|
|
422
|
+
const traceId = posthogTraceId ?? uuidv4()
|
|
423
|
+
const startTime = Date.now()
|
|
424
|
+
|
|
425
|
+
// Create a temporary instance that bypasses our wrapped create method
|
|
426
|
+
const originalCreate = super.create.bind(this)
|
|
427
|
+
const originalSelf = this as any
|
|
428
|
+
const tempCreate = originalSelf.create
|
|
429
|
+
originalSelf.create = originalCreate
|
|
430
|
+
|
|
431
|
+
try {
|
|
432
|
+
const parentPromise = super.parse(openAIParams, options)
|
|
433
|
+
|
|
434
|
+
const wrappedPromise = parentPromise.then(
|
|
435
|
+
async (result) => {
|
|
436
|
+
const latency = (Date.now() - startTime) / 1000
|
|
437
|
+
await sendEventToPosthog({
|
|
438
|
+
client: this.phClient,
|
|
439
|
+
distinctId: posthogDistinctId ?? traceId,
|
|
440
|
+
traceId,
|
|
441
|
+
model: openAIParams.model,
|
|
442
|
+
provider: 'openai',
|
|
443
|
+
input: openAIParams.input,
|
|
444
|
+
output: result.output,
|
|
445
|
+
latency,
|
|
446
|
+
baseURL: (this as any).baseURL ?? '',
|
|
447
|
+
params: body,
|
|
448
|
+
httpStatus: 200,
|
|
449
|
+
usage: {
|
|
450
|
+
inputTokens: result.usage?.input_tokens ?? 0,
|
|
451
|
+
outputTokens: result.usage?.output_tokens ?? 0,
|
|
452
|
+
reasoningTokens: result.usage?.output_tokens_details?.reasoning_tokens ?? 0,
|
|
453
|
+
cacheReadInputTokens: result.usage?.input_tokens_details?.cached_tokens ?? 0,
|
|
454
|
+
},
|
|
455
|
+
captureImmediate: posthogCaptureImmediate,
|
|
456
|
+
})
|
|
457
|
+
return result
|
|
458
|
+
},
|
|
459
|
+
async (error: any) => {
|
|
460
|
+
await sendEventToPosthog({
|
|
461
|
+
client: this.phClient,
|
|
462
|
+
distinctId: posthogDistinctId ?? traceId,
|
|
463
|
+
traceId,
|
|
464
|
+
model: openAIParams.model,
|
|
465
|
+
provider: 'openai',
|
|
466
|
+
input: openAIParams.input,
|
|
467
|
+
output: [],
|
|
468
|
+
latency: 0,
|
|
469
|
+
baseURL: (this as any).baseURL ?? '',
|
|
470
|
+
params: body,
|
|
471
|
+
httpStatus: error?.status ? error.status : 500,
|
|
472
|
+
usage: {
|
|
473
|
+
inputTokens: 0,
|
|
474
|
+
outputTokens: 0,
|
|
475
|
+
},
|
|
476
|
+
isError: true,
|
|
477
|
+
error: JSON.stringify(error),
|
|
478
|
+
captureImmediate: posthogCaptureImmediate,
|
|
479
|
+
})
|
|
480
|
+
throw error
|
|
481
|
+
}
|
|
482
|
+
)
|
|
483
|
+
|
|
484
|
+
return wrappedPromise as APIPromise<ParsedResponse<ParsedT>>
|
|
485
|
+
} finally {
|
|
486
|
+
// Restore our wrapped create method
|
|
487
|
+
originalSelf.create = tempCreate
|
|
488
|
+
}
|
|
489
|
+
}
|
|
490
|
+
}
|
|
491
|
+
|
|
218
492
|
export default PostHogOpenAI
|
|
219
493
|
|
|
220
494
|
export { PostHogOpenAI as OpenAI }
|
package/src/utils.ts
CHANGED
|
@@ -5,6 +5,7 @@ import AnthropicOriginal from '@anthropic-ai/sdk'
|
|
|
5
5
|
|
|
6
6
|
type ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams
|
|
7
7
|
type MessageCreateParams = AnthropicOriginal.Messages.MessageCreateParams
|
|
8
|
+
type ResponseCreateParams = OpenAIOrignal.Responses.ResponseCreateParams
|
|
8
9
|
|
|
9
10
|
// limit large outputs by truncating to 200kb (approx 200k bytes)
|
|
10
11
|
export const MAX_OUTPUT_SIZE = 200000
|
|
@@ -28,7 +29,7 @@ export interface CostOverride {
|
|
|
28
29
|
}
|
|
29
30
|
|
|
30
31
|
export const getModelParams = (
|
|
31
|
-
params: ((ChatCompletionCreateParamsBase | MessageCreateParams) & MonitoringParams) | null
|
|
32
|
+
params: ((ChatCompletionCreateParamsBase | MessageCreateParams | ResponseCreateParams) & MonitoringParams) | null
|
|
32
33
|
): Record<string, any> => {
|
|
33
34
|
if (!params) {
|
|
34
35
|
return {}
|
|
@@ -178,7 +179,7 @@ export type SendEventToPosthogParams = {
|
|
|
178
179
|
cacheReadInputTokens?: any
|
|
179
180
|
cacheCreationInputTokens?: any
|
|
180
181
|
}
|
|
181
|
-
params: (ChatCompletionCreateParamsBase | MessageCreateParams) & MonitoringParams
|
|
182
|
+
params: (ChatCompletionCreateParamsBase | MessageCreateParams | ResponseCreateParams) & MonitoringParams
|
|
182
183
|
isError?: boolean
|
|
183
184
|
error?: string
|
|
184
185
|
tools?: any
|
package/tests/openai.test.ts
CHANGED
|
@@ -3,7 +3,7 @@ import PostHogOpenAI from '../src/openai'
|
|
|
3
3
|
import openaiModule from 'openai'
|
|
4
4
|
|
|
5
5
|
let mockOpenAiChatResponse: any = {}
|
|
6
|
-
let
|
|
6
|
+
let mockOpenAiParsedResponse: any = {}
|
|
7
7
|
|
|
8
8
|
jest.mock('posthog-node', () => {
|
|
9
9
|
return {
|
|
@@ -11,7 +11,7 @@ jest.mock('posthog-node', () => {
|
|
|
11
11
|
return {
|
|
12
12
|
capture: jest.fn(),
|
|
13
13
|
captureImmediate: jest.fn(),
|
|
14
|
-
|
|
14
|
+
privacy_mode: false,
|
|
15
15
|
}
|
|
16
16
|
}),
|
|
17
17
|
}
|
|
@@ -36,10 +36,20 @@ jest.mock('openai', () => {
|
|
|
36
36
|
static Completions = MockCompletions
|
|
37
37
|
}
|
|
38
38
|
|
|
39
|
+
// Mock Responses class
|
|
40
|
+
class MockResponses {
|
|
41
|
+
constructor() {}
|
|
42
|
+
create = jest.fn()
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// Add parse to prototype instead of instance
|
|
46
|
+
;(MockResponses.prototype as any).parse = jest.fn()
|
|
47
|
+
|
|
39
48
|
// Mock OpenAI class
|
|
40
49
|
class MockOpenAI {
|
|
41
50
|
chat: any
|
|
42
51
|
embeddings: any
|
|
52
|
+
responses: any
|
|
43
53
|
constructor() {
|
|
44
54
|
this.chat = {
|
|
45
55
|
completions: {
|
|
@@ -49,8 +59,12 @@ jest.mock('openai', () => {
|
|
|
49
59
|
this.embeddings = {
|
|
50
60
|
create: jest.fn(),
|
|
51
61
|
}
|
|
62
|
+
this.responses = {
|
|
63
|
+
create: jest.fn(),
|
|
64
|
+
}
|
|
52
65
|
}
|
|
53
66
|
static Chat = MockChat
|
|
67
|
+
static Responses = MockResponses
|
|
54
68
|
}
|
|
55
69
|
|
|
56
70
|
return {
|
|
@@ -58,6 +72,7 @@ jest.mock('openai', () => {
|
|
|
58
72
|
default: MockOpenAI,
|
|
59
73
|
OpenAI: MockOpenAI,
|
|
60
74
|
Chat: MockChat,
|
|
75
|
+
Responses: MockResponses,
|
|
61
76
|
}
|
|
62
77
|
})
|
|
63
78
|
|
|
@@ -109,25 +124,39 @@ describe('PostHogOpenAI - Jest test suite', () => {
|
|
|
109
124
|
},
|
|
110
125
|
}
|
|
111
126
|
|
|
112
|
-
// Some default
|
|
113
|
-
|
|
114
|
-
|
|
127
|
+
// Some default parsed response mock
|
|
128
|
+
mockOpenAiParsedResponse = {
|
|
129
|
+
id: 'test-parsed-response-id',
|
|
130
|
+
model: 'gpt-4o-2024-08-06',
|
|
131
|
+
object: 'response',
|
|
132
|
+
created_at: Date.now(),
|
|
133
|
+
status: 'completed',
|
|
134
|
+
output: [
|
|
115
135
|
{
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
embedding: [0.1, 0.2, 0.3],
|
|
136
|
+
type: 'output_text',
|
|
137
|
+
text: '{"name": "Science Fair", "date": "Friday", "participants": ["Alice", "Bob"]}',
|
|
119
138
|
},
|
|
120
139
|
],
|
|
121
|
-
|
|
122
|
-
|
|
140
|
+
output_parsed: {
|
|
141
|
+
name: 'Science Fair',
|
|
142
|
+
date: 'Friday',
|
|
143
|
+
participants: ['Alice', 'Bob'],
|
|
144
|
+
},
|
|
123
145
|
usage: {
|
|
124
|
-
|
|
125
|
-
|
|
146
|
+
input_tokens: 15,
|
|
147
|
+
output_tokens: 20,
|
|
148
|
+
input_tokens_details: { cached_tokens: 0 },
|
|
149
|
+
output_tokens_details: { reasoning_tokens: 5 },
|
|
150
|
+
total_tokens: 35,
|
|
126
151
|
},
|
|
127
152
|
}
|
|
128
153
|
|
|
129
154
|
const ChatMock: any = openaiModule.Chat
|
|
130
155
|
;(ChatMock.Completions as any).prototype.create = jest.fn().mockResolvedValue(mockOpenAiChatResponse)
|
|
156
|
+
|
|
157
|
+
// Mock responses.parse using the same pattern as chat completions
|
|
158
|
+
const ResponsesMock: any = openaiModule.Responses
|
|
159
|
+
ResponsesMock.prototype.parse.mockResolvedValue(mockOpenAiParsedResponse)
|
|
131
160
|
})
|
|
132
161
|
|
|
133
162
|
// Wrap each test with conditional skip
|
|
@@ -163,39 +192,6 @@ describe('PostHogOpenAI - Jest test suite', () => {
|
|
|
163
192
|
expect(typeof properties['$ai_latency']).toBe('number')
|
|
164
193
|
})
|
|
165
194
|
|
|
166
|
-
conditionalTest('embeddings', async () => {
|
|
167
|
-
// Since embeddings calls are not implemented in the snippet by default,
|
|
168
|
-
// we'll demonstrate how you *would* do it if WrappedEmbeddings is used.
|
|
169
|
-
// Let's override the internal embeddings to return our mock.
|
|
170
|
-
const mockEmbeddingsCreate = jest.fn().mockResolvedValue(mockOpenAiEmbeddingResponse)
|
|
171
|
-
;(client as any).embeddings = {
|
|
172
|
-
create: mockEmbeddingsCreate,
|
|
173
|
-
}
|
|
174
|
-
|
|
175
|
-
const response = await (client as any).embeddings.create({
|
|
176
|
-
model: 'text-embedding-3-small',
|
|
177
|
-
input: 'Hello world',
|
|
178
|
-
posthog_distinct_id: 'test-id',
|
|
179
|
-
posthog_properties: { foo: 'bar' },
|
|
180
|
-
})
|
|
181
|
-
|
|
182
|
-
expect(response).toEqual(mockOpenAiEmbeddingResponse)
|
|
183
|
-
expect(mockPostHogClient.capture).toHaveBeenCalledTimes(1)
|
|
184
|
-
|
|
185
|
-
const [captureArgs] = (mockPostHogClient.capture as jest.Mock).mock.calls
|
|
186
|
-
const { distinctId, event, properties } = captureArgs[0]
|
|
187
|
-
|
|
188
|
-
expect(distinctId).toBe('test-id')
|
|
189
|
-
expect(event).toBe('$ai_embedding')
|
|
190
|
-
expect(properties['$ai_provider']).toBe('openai')
|
|
191
|
-
expect(properties['$ai_model']).toBe('text-embedding-3-small')
|
|
192
|
-
expect(properties['$ai_input']).toBe('Hello world')
|
|
193
|
-
expect(properties['$ai_input_tokens']).toBe(10)
|
|
194
|
-
expect(properties['$ai_http_status']).toBe(200)
|
|
195
|
-
expect(properties['foo']).toBe('bar')
|
|
196
|
-
expect(typeof properties['$ai_latency']).toBe('number')
|
|
197
|
-
})
|
|
198
|
-
|
|
199
195
|
conditionalTest('groups', async () => {
|
|
200
196
|
await client.chat.completions.create({
|
|
201
197
|
model: 'gpt-4',
|
|
@@ -269,9 +265,6 @@ describe('PostHogOpenAI - Jest test suite', () => {
|
|
|
269
265
|
max_completion_tokens: 100,
|
|
270
266
|
stream: false,
|
|
271
267
|
})
|
|
272
|
-
expect(properties['$ai_temperature']).toBe(0.5)
|
|
273
|
-
expect(properties['$ai_max_tokens']).toBe(100)
|
|
274
|
-
expect(properties['$ai_stream']).toBe(false)
|
|
275
268
|
expect(properties['foo']).toBe('bar')
|
|
276
269
|
})
|
|
277
270
|
|
|
@@ -324,4 +317,56 @@ describe('PostHogOpenAI - Jest test suite', () => {
|
|
|
324
317
|
expect(mockPostHogClient.captureImmediate).toHaveBeenCalledTimes(1)
|
|
325
318
|
expect(mockPostHogClient.capture).toHaveBeenCalledTimes(0)
|
|
326
319
|
})
|
|
320
|
+
|
|
321
|
+
conditionalTest('responses parse', async () => {
|
|
322
|
+
const response = await client.responses.parse({
|
|
323
|
+
model: 'gpt-4o-2024-08-06',
|
|
324
|
+
input: [
|
|
325
|
+
{ role: 'system', content: 'Extract the event information.' },
|
|
326
|
+
{ role: 'user', content: 'Alice and Bob are going to a science fair on Friday.' },
|
|
327
|
+
],
|
|
328
|
+
text: {
|
|
329
|
+
format: {
|
|
330
|
+
type: 'json_object',
|
|
331
|
+
json_schema: {
|
|
332
|
+
name: 'event',
|
|
333
|
+
schema: {
|
|
334
|
+
type: 'object',
|
|
335
|
+
properties: {
|
|
336
|
+
name: { type: 'string' },
|
|
337
|
+
date: { type: 'string' },
|
|
338
|
+
participants: { type: 'array', items: { type: 'string' } },
|
|
339
|
+
},
|
|
340
|
+
required: ['name', 'date', 'participants'],
|
|
341
|
+
},
|
|
342
|
+
},
|
|
343
|
+
},
|
|
344
|
+
},
|
|
345
|
+
posthogDistinctId: 'test-id',
|
|
346
|
+
posthogProperties: { foo: 'bar' },
|
|
347
|
+
})
|
|
348
|
+
|
|
349
|
+
expect(response).toEqual(mockOpenAiParsedResponse)
|
|
350
|
+
expect(mockPostHogClient.capture).toHaveBeenCalledTimes(1)
|
|
351
|
+
|
|
352
|
+
const [captureArgs] = (mockPostHogClient.capture as jest.Mock).mock.calls
|
|
353
|
+
const { distinctId, event, properties } = captureArgs[0]
|
|
354
|
+
|
|
355
|
+
expect(distinctId).toBe('test-id')
|
|
356
|
+
expect(event).toBe('$ai_generation')
|
|
357
|
+
expect(properties['$ai_provider']).toBe('openai')
|
|
358
|
+
expect(properties['$ai_model']).toBe('gpt-4o-2024-08-06')
|
|
359
|
+
expect(properties['$ai_input']).toEqual([
|
|
360
|
+
{ role: 'system', content: 'Extract the event information.' },
|
|
361
|
+
{ role: 'user', content: 'Alice and Bob are going to a science fair on Friday.' },
|
|
362
|
+
])
|
|
363
|
+
expect(properties['$ai_output_choices']).toEqual(mockOpenAiParsedResponse.output)
|
|
364
|
+
expect(properties['$ai_input_tokens']).toBe(15)
|
|
365
|
+
expect(properties['$ai_output_tokens']).toBe(20)
|
|
366
|
+
expect(properties['$ai_reasoning_tokens']).toBe(5)
|
|
367
|
+
expect(properties['$ai_cache_read_input_tokens']).toBeUndefined()
|
|
368
|
+
expect(properties['$ai_http_status']).toBe(200)
|
|
369
|
+
expect(properties['foo']).toBe('bar')
|
|
370
|
+
expect(typeof properties['$ai_latency']).toBe('number')
|
|
371
|
+
})
|
|
327
372
|
})
|