@posthog/ai 5.2.2 → 6.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +245 -0
- package/{lib → dist}/anthropic/index.cjs +44 -17
- package/dist/anthropic/index.cjs.map +1 -0
- package/{lib → dist}/anthropic/index.mjs +41 -10
- package/dist/anthropic/index.mjs.map +1 -0
- package/{lib → dist}/gemini/index.cjs +68 -26
- package/dist/gemini/index.cjs.map +1 -0
- package/{lib → dist}/gemini/index.d.ts +0 -1
- package/{lib → dist}/gemini/index.mjs +67 -25
- package/dist/gemini/index.mjs.map +1 -0
- package/{lib → dist}/index.cjs +875 -601
- package/dist/index.cjs.map +1 -0
- package/{lib → dist}/index.d.ts +3 -3
- package/{lib → dist}/index.mjs +859 -579
- package/dist/index.mjs.map +1 -0
- package/{lib → dist}/langchain/index.cjs +178 -118
- package/dist/langchain/index.cjs.map +1 -0
- package/{lib → dist}/langchain/index.d.ts +1 -0
- package/{lib → dist}/langchain/index.mjs +175 -112
- package/dist/langchain/index.mjs.map +1 -0
- package/{lib → dist}/openai/index.cjs +113 -6
- package/dist/openai/index.cjs.map +1 -0
- package/{lib → dist}/openai/index.mjs +112 -5
- package/dist/openai/index.mjs.map +1 -0
- package/{lib → dist}/vercel/index.cjs +117 -82
- package/dist/vercel/index.cjs.map +1 -0
- package/{lib → dist}/vercel/index.d.ts +2 -2
- package/{lib → dist}/vercel/index.mjs +118 -81
- package/dist/vercel/index.mjs.map +1 -0
- package/package.json +45 -35
- package/CHANGELOG.md +0 -89
- package/index.ts +0 -1
- package/lib/anthropic/index.cjs.map +0 -1
- package/lib/anthropic/index.mjs.map +0 -1
- package/lib/gemini/index.cjs.map +0 -1
- package/lib/gemini/index.mjs.map +0 -1
- package/lib/index.cjs.map +0 -1
- package/lib/index.mjs.map +0 -1
- package/lib/langchain/index.cjs.map +0 -1
- package/lib/langchain/index.mjs.map +0 -1
- package/lib/openai/index.cjs.map +0 -1
- package/lib/openai/index.mjs.map +0 -1
- package/lib/vercel/index.cjs.map +0 -1
- package/lib/vercel/index.mjs.map +0 -1
- package/src/anthropic/index.ts +0 -211
- package/src/gemini/index.ts +0 -254
- package/src/index.ts +0 -13
- package/src/langchain/callbacks.ts +0 -640
- package/src/langchain/index.ts +0 -1
- package/src/openai/azure.ts +0 -481
- package/src/openai/index.ts +0 -498
- package/src/utils.ts +0 -287
- package/src/vercel/index.ts +0 -1
- package/src/vercel/middleware.ts +0 -393
- package/tests/callbacks.test.ts +0 -48
- package/tests/gemini.test.ts +0 -344
- package/tests/openai.test.ts +0 -403
- package/tsconfig.json +0 -10
- /package/{lib → dist}/anthropic/index.d.ts +0 -0
- /package/{lib → dist}/openai/index.d.ts +0 -0
package/src/openai/index.ts
DELETED
|
@@ -1,498 +0,0 @@
|
|
|
1
|
-
import { OpenAI as OpenAIOrignal, ClientOptions } from 'openai'
|
|
2
|
-
import { PostHog } from 'posthog-node'
|
|
3
|
-
import { v4 as uuidv4 } from 'uuid'
|
|
4
|
-
import { formatResponseOpenAI, MonitoringParams, sendEventToPosthog } from '../utils'
|
|
5
|
-
import type { APIPromise } from 'openai'
|
|
6
|
-
import type { Stream } from 'openai/streaming'
|
|
7
|
-
import type { ParsedResponse } from 'openai/resources/responses/responses'
|
|
8
|
-
|
|
9
|
-
const Chat = OpenAIOrignal.Chat
|
|
10
|
-
const Completions = Chat.Completions
|
|
11
|
-
const Responses = OpenAIOrignal.Responses
|
|
12
|
-
|
|
13
|
-
type ChatCompletion = OpenAIOrignal.ChatCompletion
|
|
14
|
-
type ChatCompletionChunk = OpenAIOrignal.ChatCompletionChunk
|
|
15
|
-
type ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams
|
|
16
|
-
type ChatCompletionCreateParamsNonStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsNonStreaming
|
|
17
|
-
type ChatCompletionCreateParamsStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsStreaming
|
|
18
|
-
type ResponsesCreateParamsBase = OpenAIOrignal.Responses.ResponseCreateParams
|
|
19
|
-
type ResponsesCreateParamsNonStreaming = OpenAIOrignal.Responses.ResponseCreateParamsNonStreaming
|
|
20
|
-
type ResponsesCreateParamsStreaming = OpenAIOrignal.Responses.ResponseCreateParamsStreaming
|
|
21
|
-
|
|
22
|
-
interface MonitoringOpenAIConfig extends ClientOptions {
|
|
23
|
-
apiKey: string
|
|
24
|
-
posthog: PostHog
|
|
25
|
-
baseURL?: string
|
|
26
|
-
}
|
|
27
|
-
|
|
28
|
-
type RequestOptions = Record<string, any>
|
|
29
|
-
|
|
30
|
-
export class PostHogOpenAI extends OpenAIOrignal {
|
|
31
|
-
private readonly phClient: PostHog
|
|
32
|
-
public chat: WrappedChat
|
|
33
|
-
public responses: WrappedResponses
|
|
34
|
-
|
|
35
|
-
constructor(config: MonitoringOpenAIConfig) {
|
|
36
|
-
const { posthog, ...openAIConfig } = config
|
|
37
|
-
super(openAIConfig)
|
|
38
|
-
this.phClient = posthog
|
|
39
|
-
this.chat = new WrappedChat(this, this.phClient)
|
|
40
|
-
this.responses = new WrappedResponses(this, this.phClient)
|
|
41
|
-
}
|
|
42
|
-
}
|
|
43
|
-
|
|
44
|
-
export class WrappedChat extends Chat {
|
|
45
|
-
constructor(parentClient: PostHogOpenAI, phClient: PostHog) {
|
|
46
|
-
super(parentClient)
|
|
47
|
-
this.completions = new WrappedCompletions(parentClient, phClient)
|
|
48
|
-
}
|
|
49
|
-
|
|
50
|
-
public completions: WrappedCompletions
|
|
51
|
-
}
|
|
52
|
-
|
|
53
|
-
export class WrappedCompletions extends Completions {
|
|
54
|
-
private readonly phClient: PostHog
|
|
55
|
-
|
|
56
|
-
constructor(client: OpenAIOrignal, phClient: PostHog) {
|
|
57
|
-
super(client)
|
|
58
|
-
this.phClient = phClient
|
|
59
|
-
}
|
|
60
|
-
|
|
61
|
-
// --- Overload #1: Non-streaming
|
|
62
|
-
public create(
|
|
63
|
-
body: ChatCompletionCreateParamsNonStreaming & MonitoringParams,
|
|
64
|
-
options?: RequestOptions
|
|
65
|
-
): APIPromise<ChatCompletion>
|
|
66
|
-
|
|
67
|
-
// --- Overload #2: Streaming
|
|
68
|
-
public create(
|
|
69
|
-
body: ChatCompletionCreateParamsStreaming & MonitoringParams,
|
|
70
|
-
options?: RequestOptions
|
|
71
|
-
): APIPromise<Stream<ChatCompletionChunk>>
|
|
72
|
-
|
|
73
|
-
// --- Overload #3: Generic base
|
|
74
|
-
public create(
|
|
75
|
-
body: ChatCompletionCreateParamsBase & MonitoringParams,
|
|
76
|
-
options?: RequestOptions
|
|
77
|
-
): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>>
|
|
78
|
-
|
|
79
|
-
// --- Implementation Signature
|
|
80
|
-
public create(
|
|
81
|
-
body: ChatCompletionCreateParamsBase & MonitoringParams,
|
|
82
|
-
options?: RequestOptions
|
|
83
|
-
): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>> {
|
|
84
|
-
const {
|
|
85
|
-
posthogDistinctId,
|
|
86
|
-
posthogTraceId,
|
|
87
|
-
posthogProperties,
|
|
88
|
-
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
89
|
-
posthogPrivacyMode = false,
|
|
90
|
-
posthogGroups,
|
|
91
|
-
posthogCaptureImmediate,
|
|
92
|
-
...openAIParams
|
|
93
|
-
} = body
|
|
94
|
-
|
|
95
|
-
const traceId = posthogTraceId ?? uuidv4()
|
|
96
|
-
const startTime = Date.now()
|
|
97
|
-
|
|
98
|
-
const parentPromise = super.create(openAIParams, options)
|
|
99
|
-
|
|
100
|
-
if (openAIParams.stream) {
|
|
101
|
-
return parentPromise.then((value) => {
|
|
102
|
-
if ('tee' in value) {
|
|
103
|
-
const [stream1, stream2] = value.tee()
|
|
104
|
-
;(async () => {
|
|
105
|
-
try {
|
|
106
|
-
let accumulatedContent = ''
|
|
107
|
-
let usage: {
|
|
108
|
-
inputTokens?: number
|
|
109
|
-
outputTokens?: number
|
|
110
|
-
reasoningTokens?: number
|
|
111
|
-
cacheReadInputTokens?: number
|
|
112
|
-
} = {
|
|
113
|
-
inputTokens: 0,
|
|
114
|
-
outputTokens: 0,
|
|
115
|
-
}
|
|
116
|
-
|
|
117
|
-
for await (const chunk of stream1) {
|
|
118
|
-
const delta = chunk?.choices?.[0]?.delta?.content ?? ''
|
|
119
|
-
accumulatedContent += delta
|
|
120
|
-
if (chunk.usage) {
|
|
121
|
-
usage = {
|
|
122
|
-
inputTokens: chunk.usage.prompt_tokens ?? 0,
|
|
123
|
-
outputTokens: chunk.usage.completion_tokens ?? 0,
|
|
124
|
-
reasoningTokens: chunk.usage.completion_tokens_details?.reasoning_tokens ?? 0,
|
|
125
|
-
cacheReadInputTokens: chunk.usage.prompt_tokens_details?.cached_tokens ?? 0,
|
|
126
|
-
}
|
|
127
|
-
}
|
|
128
|
-
}
|
|
129
|
-
|
|
130
|
-
const latency = (Date.now() - startTime) / 1000
|
|
131
|
-
await sendEventToPosthog({
|
|
132
|
-
client: this.phClient,
|
|
133
|
-
distinctId: posthogDistinctId,
|
|
134
|
-
traceId,
|
|
135
|
-
model: openAIParams.model,
|
|
136
|
-
provider: 'openai',
|
|
137
|
-
input: openAIParams.messages,
|
|
138
|
-
output: [{ content: accumulatedContent, role: 'assistant' }],
|
|
139
|
-
latency,
|
|
140
|
-
baseURL: (this as any).baseURL ?? '',
|
|
141
|
-
params: body,
|
|
142
|
-
httpStatus: 200,
|
|
143
|
-
usage,
|
|
144
|
-
captureImmediate: posthogCaptureImmediate,
|
|
145
|
-
})
|
|
146
|
-
} catch (error: any) {
|
|
147
|
-
await sendEventToPosthog({
|
|
148
|
-
client: this.phClient,
|
|
149
|
-
distinctId: posthogDistinctId,
|
|
150
|
-
traceId,
|
|
151
|
-
model: openAIParams.model,
|
|
152
|
-
provider: 'openai',
|
|
153
|
-
input: openAIParams.messages,
|
|
154
|
-
output: [],
|
|
155
|
-
latency: 0,
|
|
156
|
-
baseURL: (this as any).baseURL ?? '',
|
|
157
|
-
params: body,
|
|
158
|
-
httpStatus: error?.status ? error.status : 500,
|
|
159
|
-
usage: { inputTokens: 0, outputTokens: 0 },
|
|
160
|
-
isError: true,
|
|
161
|
-
error: JSON.stringify(error),
|
|
162
|
-
captureImmediate: posthogCaptureImmediate,
|
|
163
|
-
})
|
|
164
|
-
}
|
|
165
|
-
})()
|
|
166
|
-
|
|
167
|
-
// Return the other stream to the user
|
|
168
|
-
return stream2
|
|
169
|
-
}
|
|
170
|
-
return value
|
|
171
|
-
}) as APIPromise<Stream<ChatCompletionChunk>>
|
|
172
|
-
} else {
|
|
173
|
-
const wrappedPromise = parentPromise.then(
|
|
174
|
-
async (result) => {
|
|
175
|
-
if ('choices' in result) {
|
|
176
|
-
const latency = (Date.now() - startTime) / 1000
|
|
177
|
-
await sendEventToPosthog({
|
|
178
|
-
client: this.phClient,
|
|
179
|
-
distinctId: posthogDistinctId,
|
|
180
|
-
traceId,
|
|
181
|
-
model: openAIParams.model,
|
|
182
|
-
provider: 'openai',
|
|
183
|
-
input: openAIParams.messages,
|
|
184
|
-
output: formatResponseOpenAI(result),
|
|
185
|
-
latency,
|
|
186
|
-
baseURL: (this as any).baseURL ?? '',
|
|
187
|
-
params: body,
|
|
188
|
-
httpStatus: 200,
|
|
189
|
-
usage: {
|
|
190
|
-
inputTokens: result.usage?.prompt_tokens ?? 0,
|
|
191
|
-
outputTokens: result.usage?.completion_tokens ?? 0,
|
|
192
|
-
reasoningTokens: result.usage?.completion_tokens_details?.reasoning_tokens ?? 0,
|
|
193
|
-
cacheReadInputTokens: result.usage?.prompt_tokens_details?.cached_tokens ?? 0,
|
|
194
|
-
},
|
|
195
|
-
captureImmediate: posthogCaptureImmediate,
|
|
196
|
-
})
|
|
197
|
-
}
|
|
198
|
-
return result
|
|
199
|
-
},
|
|
200
|
-
async (error: any) => {
|
|
201
|
-
await sendEventToPosthog({
|
|
202
|
-
client: this.phClient,
|
|
203
|
-
distinctId: posthogDistinctId,
|
|
204
|
-
traceId,
|
|
205
|
-
model: openAIParams.model,
|
|
206
|
-
provider: 'openai',
|
|
207
|
-
input: openAIParams.messages,
|
|
208
|
-
output: [],
|
|
209
|
-
latency: 0,
|
|
210
|
-
baseURL: (this as any).baseURL ?? '',
|
|
211
|
-
params: body,
|
|
212
|
-
httpStatus: error?.status ? error.status : 500,
|
|
213
|
-
usage: {
|
|
214
|
-
inputTokens: 0,
|
|
215
|
-
outputTokens: 0,
|
|
216
|
-
},
|
|
217
|
-
isError: true,
|
|
218
|
-
error: JSON.stringify(error),
|
|
219
|
-
captureImmediate: posthogCaptureImmediate,
|
|
220
|
-
})
|
|
221
|
-
throw error
|
|
222
|
-
}
|
|
223
|
-
) as APIPromise<ChatCompletion>
|
|
224
|
-
|
|
225
|
-
return wrappedPromise
|
|
226
|
-
}
|
|
227
|
-
}
|
|
228
|
-
}
|
|
229
|
-
|
|
230
|
-
export class WrappedResponses extends Responses {
|
|
231
|
-
private readonly phClient: PostHog
|
|
232
|
-
|
|
233
|
-
constructor(client: OpenAIOrignal, phClient: PostHog) {
|
|
234
|
-
super(client)
|
|
235
|
-
this.phClient = phClient
|
|
236
|
-
}
|
|
237
|
-
|
|
238
|
-
// --- Overload #1: Non-streaming
|
|
239
|
-
public create(
|
|
240
|
-
body: ResponsesCreateParamsNonStreaming & MonitoringParams,
|
|
241
|
-
options?: RequestOptions
|
|
242
|
-
): APIPromise<OpenAIOrignal.Responses.Response>
|
|
243
|
-
|
|
244
|
-
// --- Overload #2: Streaming
|
|
245
|
-
public create(
|
|
246
|
-
body: ResponsesCreateParamsStreaming & MonitoringParams,
|
|
247
|
-
options?: RequestOptions
|
|
248
|
-
): APIPromise<Stream<OpenAIOrignal.Responses.ResponseStreamEvent>>
|
|
249
|
-
|
|
250
|
-
// --- Overload #3: Generic base
|
|
251
|
-
public create(
|
|
252
|
-
body: ResponsesCreateParamsBase & MonitoringParams,
|
|
253
|
-
options?: RequestOptions
|
|
254
|
-
): APIPromise<OpenAIOrignal.Responses.Response | Stream<OpenAIOrignal.Responses.ResponseStreamEvent>>
|
|
255
|
-
|
|
256
|
-
// --- Implementation Signature
|
|
257
|
-
public create(
|
|
258
|
-
body: ResponsesCreateParamsBase & MonitoringParams,
|
|
259
|
-
options?: RequestOptions
|
|
260
|
-
): APIPromise<OpenAIOrignal.Responses.Response | Stream<OpenAIOrignal.Responses.ResponseStreamEvent>> {
|
|
261
|
-
const {
|
|
262
|
-
posthogDistinctId,
|
|
263
|
-
posthogTraceId,
|
|
264
|
-
posthogProperties,
|
|
265
|
-
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
266
|
-
posthogPrivacyMode = false,
|
|
267
|
-
posthogGroups,
|
|
268
|
-
posthogCaptureImmediate,
|
|
269
|
-
...openAIParams
|
|
270
|
-
} = body
|
|
271
|
-
|
|
272
|
-
const traceId = posthogTraceId ?? uuidv4()
|
|
273
|
-
const startTime = Date.now()
|
|
274
|
-
|
|
275
|
-
const parentPromise = super.create(openAIParams, options)
|
|
276
|
-
|
|
277
|
-
if (openAIParams.stream) {
|
|
278
|
-
return parentPromise.then((value) => {
|
|
279
|
-
if ('tee' in value && typeof (value as any).tee === 'function') {
|
|
280
|
-
const [stream1, stream2] = (value as any).tee()
|
|
281
|
-
;(async () => {
|
|
282
|
-
try {
|
|
283
|
-
let finalContent: any[] = []
|
|
284
|
-
let usage: {
|
|
285
|
-
inputTokens?: number
|
|
286
|
-
outputTokens?: number
|
|
287
|
-
reasoningTokens?: number
|
|
288
|
-
cacheReadInputTokens?: number
|
|
289
|
-
} = {
|
|
290
|
-
inputTokens: 0,
|
|
291
|
-
outputTokens: 0,
|
|
292
|
-
}
|
|
293
|
-
|
|
294
|
-
for await (const chunk of stream1) {
|
|
295
|
-
if (
|
|
296
|
-
chunk.type === 'response.completed' &&
|
|
297
|
-
'response' in chunk &&
|
|
298
|
-
chunk.response?.output &&
|
|
299
|
-
chunk.response.output.length > 0
|
|
300
|
-
) {
|
|
301
|
-
finalContent = chunk.response.output
|
|
302
|
-
}
|
|
303
|
-
if ('response' in chunk && chunk.response?.usage) {
|
|
304
|
-
usage = {
|
|
305
|
-
inputTokens: chunk.response.usage.input_tokens ?? 0,
|
|
306
|
-
outputTokens: chunk.response.usage.output_tokens ?? 0,
|
|
307
|
-
reasoningTokens: chunk.response.usage.output_tokens_details?.reasoning_tokens ?? 0,
|
|
308
|
-
cacheReadInputTokens: chunk.response.usage.input_tokens_details?.cached_tokens ?? 0,
|
|
309
|
-
}
|
|
310
|
-
}
|
|
311
|
-
}
|
|
312
|
-
|
|
313
|
-
const latency = (Date.now() - startTime) / 1000
|
|
314
|
-
await sendEventToPosthog({
|
|
315
|
-
client: this.phClient,
|
|
316
|
-
distinctId: posthogDistinctId,
|
|
317
|
-
traceId,
|
|
318
|
-
model: openAIParams.model,
|
|
319
|
-
provider: 'openai',
|
|
320
|
-
input: openAIParams.input,
|
|
321
|
-
output: finalContent,
|
|
322
|
-
latency,
|
|
323
|
-
baseURL: (this as any).baseURL ?? '',
|
|
324
|
-
params: body,
|
|
325
|
-
httpStatus: 200,
|
|
326
|
-
usage,
|
|
327
|
-
captureImmediate: posthogCaptureImmediate,
|
|
328
|
-
})
|
|
329
|
-
} catch (error: any) {
|
|
330
|
-
await sendEventToPosthog({
|
|
331
|
-
client: this.phClient,
|
|
332
|
-
distinctId: posthogDistinctId,
|
|
333
|
-
traceId,
|
|
334
|
-
model: openAIParams.model,
|
|
335
|
-
provider: 'openai',
|
|
336
|
-
input: openAIParams.input,
|
|
337
|
-
output: [],
|
|
338
|
-
latency: 0,
|
|
339
|
-
baseURL: (this as any).baseURL ?? '',
|
|
340
|
-
params: body,
|
|
341
|
-
httpStatus: error?.status ? error.status : 500,
|
|
342
|
-
usage: { inputTokens: 0, outputTokens: 0 },
|
|
343
|
-
isError: true,
|
|
344
|
-
error: JSON.stringify(error),
|
|
345
|
-
captureImmediate: posthogCaptureImmediate,
|
|
346
|
-
})
|
|
347
|
-
}
|
|
348
|
-
})()
|
|
349
|
-
|
|
350
|
-
return stream2
|
|
351
|
-
}
|
|
352
|
-
return value
|
|
353
|
-
}) as APIPromise<Stream<OpenAIOrignal.Responses.ResponseStreamEvent>>
|
|
354
|
-
} else {
|
|
355
|
-
const wrappedPromise = parentPromise.then(
|
|
356
|
-
async (result) => {
|
|
357
|
-
if ('output' in result) {
|
|
358
|
-
const latency = (Date.now() - startTime) / 1000
|
|
359
|
-
await sendEventToPosthog({
|
|
360
|
-
client: this.phClient,
|
|
361
|
-
distinctId: posthogDistinctId,
|
|
362
|
-
traceId,
|
|
363
|
-
model: openAIParams.model,
|
|
364
|
-
provider: 'openai',
|
|
365
|
-
input: openAIParams.input,
|
|
366
|
-
output: result.output,
|
|
367
|
-
latency,
|
|
368
|
-
baseURL: (this as any).baseURL ?? '',
|
|
369
|
-
params: body,
|
|
370
|
-
httpStatus: 200,
|
|
371
|
-
usage: {
|
|
372
|
-
inputTokens: result.usage?.input_tokens ?? 0,
|
|
373
|
-
outputTokens: result.usage?.output_tokens ?? 0,
|
|
374
|
-
reasoningTokens: result.usage?.output_tokens_details?.reasoning_tokens ?? 0,
|
|
375
|
-
cacheReadInputTokens: result.usage?.input_tokens_details?.cached_tokens ?? 0,
|
|
376
|
-
},
|
|
377
|
-
captureImmediate: posthogCaptureImmediate,
|
|
378
|
-
})
|
|
379
|
-
}
|
|
380
|
-
return result
|
|
381
|
-
},
|
|
382
|
-
async (error: any) => {
|
|
383
|
-
await sendEventToPosthog({
|
|
384
|
-
client: this.phClient,
|
|
385
|
-
distinctId: posthogDistinctId,
|
|
386
|
-
traceId,
|
|
387
|
-
model: openAIParams.model,
|
|
388
|
-
provider: 'openai',
|
|
389
|
-
input: openAIParams.input,
|
|
390
|
-
output: [],
|
|
391
|
-
latency: 0,
|
|
392
|
-
baseURL: (this as any).baseURL ?? '',
|
|
393
|
-
params: body,
|
|
394
|
-
httpStatus: error?.status ? error.status : 500,
|
|
395
|
-
usage: {
|
|
396
|
-
inputTokens: 0,
|
|
397
|
-
outputTokens: 0,
|
|
398
|
-
},
|
|
399
|
-
isError: true,
|
|
400
|
-
error: JSON.stringify(error),
|
|
401
|
-
captureImmediate: posthogCaptureImmediate,
|
|
402
|
-
})
|
|
403
|
-
throw error
|
|
404
|
-
}
|
|
405
|
-
) as APIPromise<OpenAIOrignal.Responses.Response>
|
|
406
|
-
|
|
407
|
-
return wrappedPromise
|
|
408
|
-
}
|
|
409
|
-
}
|
|
410
|
-
|
|
411
|
-
public parse<Params extends ResponsesCreateParamsBase, ParsedT = any>(
|
|
412
|
-
body: Params & MonitoringParams,
|
|
413
|
-
options?: RequestOptions
|
|
414
|
-
): APIPromise<ParsedResponse<ParsedT>> {
|
|
415
|
-
const {
|
|
416
|
-
posthogDistinctId,
|
|
417
|
-
posthogTraceId,
|
|
418
|
-
posthogProperties,
|
|
419
|
-
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
420
|
-
posthogPrivacyMode = false,
|
|
421
|
-
posthogGroups,
|
|
422
|
-
posthogCaptureImmediate,
|
|
423
|
-
...openAIParams
|
|
424
|
-
} = body
|
|
425
|
-
|
|
426
|
-
const traceId = posthogTraceId ?? uuidv4()
|
|
427
|
-
const startTime = Date.now()
|
|
428
|
-
|
|
429
|
-
// Create a temporary instance that bypasses our wrapped create method
|
|
430
|
-
const originalCreate = super.create.bind(this)
|
|
431
|
-
const originalSelf = this as any
|
|
432
|
-
const tempCreate = originalSelf.create
|
|
433
|
-
originalSelf.create = originalCreate
|
|
434
|
-
|
|
435
|
-
try {
|
|
436
|
-
const parentPromise = super.parse(openAIParams, options)
|
|
437
|
-
|
|
438
|
-
const wrappedPromise = parentPromise.then(
|
|
439
|
-
async (result) => {
|
|
440
|
-
const latency = (Date.now() - startTime) / 1000
|
|
441
|
-
await sendEventToPosthog({
|
|
442
|
-
client: this.phClient,
|
|
443
|
-
distinctId: posthogDistinctId,
|
|
444
|
-
traceId,
|
|
445
|
-
model: openAIParams.model,
|
|
446
|
-
provider: 'openai',
|
|
447
|
-
input: openAIParams.input,
|
|
448
|
-
output: result.output,
|
|
449
|
-
latency,
|
|
450
|
-
baseURL: (this as any).baseURL ?? '',
|
|
451
|
-
params: body,
|
|
452
|
-
httpStatus: 200,
|
|
453
|
-
usage: {
|
|
454
|
-
inputTokens: result.usage?.input_tokens ?? 0,
|
|
455
|
-
outputTokens: result.usage?.output_tokens ?? 0,
|
|
456
|
-
reasoningTokens: result.usage?.output_tokens_details?.reasoning_tokens ?? 0,
|
|
457
|
-
cacheReadInputTokens: result.usage?.input_tokens_details?.cached_tokens ?? 0,
|
|
458
|
-
},
|
|
459
|
-
captureImmediate: posthogCaptureImmediate,
|
|
460
|
-
})
|
|
461
|
-
return result
|
|
462
|
-
},
|
|
463
|
-
async (error: any) => {
|
|
464
|
-
await sendEventToPosthog({
|
|
465
|
-
client: this.phClient,
|
|
466
|
-
distinctId: posthogDistinctId,
|
|
467
|
-
traceId,
|
|
468
|
-
model: openAIParams.model,
|
|
469
|
-
provider: 'openai',
|
|
470
|
-
input: openAIParams.input,
|
|
471
|
-
output: [],
|
|
472
|
-
latency: 0,
|
|
473
|
-
baseURL: (this as any).baseURL ?? '',
|
|
474
|
-
params: body,
|
|
475
|
-
httpStatus: error?.status ? error.status : 500,
|
|
476
|
-
usage: {
|
|
477
|
-
inputTokens: 0,
|
|
478
|
-
outputTokens: 0,
|
|
479
|
-
},
|
|
480
|
-
isError: true,
|
|
481
|
-
error: JSON.stringify(error),
|
|
482
|
-
captureImmediate: posthogCaptureImmediate,
|
|
483
|
-
})
|
|
484
|
-
throw error
|
|
485
|
-
}
|
|
486
|
-
)
|
|
487
|
-
|
|
488
|
-
return wrappedPromise as APIPromise<ParsedResponse<ParsedT>>
|
|
489
|
-
} finally {
|
|
490
|
-
// Restore our wrapped create method
|
|
491
|
-
originalSelf.create = tempCreate
|
|
492
|
-
}
|
|
493
|
-
}
|
|
494
|
-
}
|
|
495
|
-
|
|
496
|
-
export default PostHogOpenAI
|
|
497
|
-
|
|
498
|
-
export { PostHogOpenAI as OpenAI }
|