@posthog/ai 5.2.2 → 5.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +245 -0
- package/{lib → dist}/anthropic/index.cjs +7 -12
- package/{lib → dist}/anthropic/index.cjs.map +1 -1
- package/{lib → dist}/anthropic/index.mjs +4 -5
- package/{lib → dist}/anthropic/index.mjs.map +1 -1
- package/{lib → dist}/gemini/index.cjs +1 -1
- package/{lib → dist}/gemini/index.cjs.map +1 -1
- package/{lib → dist}/gemini/index.mjs.map +1 -1
- package/{lib → dist}/index.cjs +547 -479
- package/dist/index.cjs.map +1 -0
- package/{lib → dist}/index.mjs +530 -456
- package/dist/index.mjs.map +1 -0
- package/{lib → dist}/langchain/index.cjs +150 -110
- package/dist/langchain/index.cjs.map +1 -0
- package/{lib → dist}/langchain/index.mjs +147 -104
- package/dist/langchain/index.mjs.map +1 -0
- package/{lib → dist}/openai/index.cjs +7 -1
- package/dist/openai/index.cjs.map +1 -0
- package/{lib → dist}/openai/index.mjs +6 -0
- package/dist/openai/index.mjs.map +1 -0
- package/{lib → dist}/vercel/index.cjs +0 -2
- package/{lib → dist}/vercel/index.cjs.map +1 -1
- package/{lib → dist}/vercel/index.mjs.map +1 -1
- package/package.json +42 -33
- package/CHANGELOG.md +0 -89
- package/index.ts +0 -1
- package/lib/index.cjs.map +0 -1
- package/lib/index.mjs.map +0 -1
- package/lib/langchain/index.cjs.map +0 -1
- package/lib/langchain/index.mjs.map +0 -1
- package/lib/openai/index.cjs.map +0 -1
- package/lib/openai/index.mjs.map +0 -1
- package/src/anthropic/index.ts +0 -211
- package/src/gemini/index.ts +0 -254
- package/src/index.ts +0 -13
- package/src/langchain/callbacks.ts +0 -640
- package/src/langchain/index.ts +0 -1
- package/src/openai/azure.ts +0 -481
- package/src/openai/index.ts +0 -498
- package/src/utils.ts +0 -287
- package/src/vercel/index.ts +0 -1
- package/src/vercel/middleware.ts +0 -393
- package/tests/callbacks.test.ts +0 -48
- package/tests/gemini.test.ts +0 -344
- package/tests/openai.test.ts +0 -403
- package/tsconfig.json +0 -10
- /package/{lib → dist}/anthropic/index.d.ts +0 -0
- /package/{lib → dist}/gemini/index.d.ts +0 -0
- /package/{lib → dist}/gemini/index.mjs +0 -0
- /package/{lib → dist}/index.d.ts +0 -0
- /package/{lib → dist}/langchain/index.d.ts +0 -0
- /package/{lib → dist}/openai/index.d.ts +0 -0
- /package/{lib → dist}/vercel/index.d.ts +0 -0
- /package/{lib → dist}/vercel/index.mjs +0 -0
package/src/openai/azure.ts
DELETED
|
@@ -1,481 +0,0 @@
|
|
|
1
|
-
import OpenAIOrignal, { AzureOpenAI } from 'openai'
|
|
2
|
-
import { PostHog } from 'posthog-node'
|
|
3
|
-
import { v4 as uuidv4 } from 'uuid'
|
|
4
|
-
import { formatResponseOpenAI, MonitoringParams, sendEventToPosthog } from '../utils'
|
|
5
|
-
import type { APIPromise } from 'openai'
|
|
6
|
-
import type { Stream } from 'openai/streaming'
|
|
7
|
-
import type { ParsedResponse } from 'openai/resources/responses/responses'
|
|
8
|
-
|
|
9
|
-
type ChatCompletion = OpenAIOrignal.ChatCompletion
|
|
10
|
-
type ChatCompletionChunk = OpenAIOrignal.ChatCompletionChunk
|
|
11
|
-
type ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams
|
|
12
|
-
type ChatCompletionCreateParamsNonStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsNonStreaming
|
|
13
|
-
type ChatCompletionCreateParamsStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsStreaming
|
|
14
|
-
type ResponsesCreateParamsBase = OpenAIOrignal.Responses.ResponseCreateParams
|
|
15
|
-
type ResponsesCreateParamsNonStreaming = OpenAIOrignal.Responses.ResponseCreateParamsNonStreaming
|
|
16
|
-
type ResponsesCreateParamsStreaming = OpenAIOrignal.Responses.ResponseCreateParamsStreaming
|
|
17
|
-
|
|
18
|
-
interface MonitoringOpenAIConfig {
|
|
19
|
-
apiKey: string
|
|
20
|
-
posthog: PostHog
|
|
21
|
-
baseURL?: string
|
|
22
|
-
}
|
|
23
|
-
|
|
24
|
-
type RequestOptions = Record<string, any>
|
|
25
|
-
|
|
26
|
-
export class PostHogAzureOpenAI extends AzureOpenAI {
|
|
27
|
-
private readonly phClient: PostHog
|
|
28
|
-
public chat: WrappedChat
|
|
29
|
-
|
|
30
|
-
constructor(config: MonitoringOpenAIConfig) {
|
|
31
|
-
const { posthog, ...openAIConfig } = config
|
|
32
|
-
super(openAIConfig)
|
|
33
|
-
this.phClient = posthog
|
|
34
|
-
this.chat = new WrappedChat(this, this.phClient)
|
|
35
|
-
}
|
|
36
|
-
}
|
|
37
|
-
|
|
38
|
-
export class WrappedChat extends AzureOpenAI.Chat {
|
|
39
|
-
constructor(parentClient: PostHogAzureOpenAI, phClient: PostHog) {
|
|
40
|
-
super(parentClient)
|
|
41
|
-
this.completions = new WrappedCompletions(parentClient, phClient)
|
|
42
|
-
}
|
|
43
|
-
|
|
44
|
-
public completions: WrappedCompletions
|
|
45
|
-
}
|
|
46
|
-
|
|
47
|
-
export class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
48
|
-
private readonly phClient: PostHog
|
|
49
|
-
|
|
50
|
-
constructor(client: AzureOpenAI, phClient: PostHog) {
|
|
51
|
-
super(client)
|
|
52
|
-
this.phClient = phClient
|
|
53
|
-
}
|
|
54
|
-
|
|
55
|
-
// --- Overload #1: Non-streaming
|
|
56
|
-
public create(
|
|
57
|
-
body: ChatCompletionCreateParamsNonStreaming & MonitoringParams,
|
|
58
|
-
options?: RequestOptions
|
|
59
|
-
): APIPromise<ChatCompletion>
|
|
60
|
-
|
|
61
|
-
// --- Overload #2: Streaming
|
|
62
|
-
public create(
|
|
63
|
-
body: ChatCompletionCreateParamsStreaming & MonitoringParams,
|
|
64
|
-
options?: RequestOptions
|
|
65
|
-
): APIPromise<Stream<ChatCompletionChunk>>
|
|
66
|
-
|
|
67
|
-
// --- Overload #3: Generic base
|
|
68
|
-
public create(
|
|
69
|
-
body: ChatCompletionCreateParamsBase & MonitoringParams,
|
|
70
|
-
options?: RequestOptions
|
|
71
|
-
): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>>
|
|
72
|
-
|
|
73
|
-
// --- Implementation Signature
|
|
74
|
-
public create(
|
|
75
|
-
body: ChatCompletionCreateParamsBase & MonitoringParams,
|
|
76
|
-
options?: RequestOptions
|
|
77
|
-
): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>> {
|
|
78
|
-
const {
|
|
79
|
-
posthogDistinctId,
|
|
80
|
-
posthogTraceId,
|
|
81
|
-
posthogProperties,
|
|
82
|
-
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
83
|
-
posthogPrivacyMode = false,
|
|
84
|
-
posthogGroups,
|
|
85
|
-
posthogCaptureImmediate,
|
|
86
|
-
...openAIParams
|
|
87
|
-
} = body
|
|
88
|
-
|
|
89
|
-
const traceId = posthogTraceId ?? uuidv4()
|
|
90
|
-
const startTime = Date.now()
|
|
91
|
-
|
|
92
|
-
const parentPromise = super.create(openAIParams, options)
|
|
93
|
-
|
|
94
|
-
if (openAIParams.stream) {
|
|
95
|
-
return parentPromise.then((value) => {
|
|
96
|
-
if ('tee' in value) {
|
|
97
|
-
const [stream1, stream2] = value.tee()
|
|
98
|
-
;(async () => {
|
|
99
|
-
try {
|
|
100
|
-
let accumulatedContent = ''
|
|
101
|
-
let usage: {
|
|
102
|
-
inputTokens?: number
|
|
103
|
-
outputTokens?: number
|
|
104
|
-
reasoningTokens?: number
|
|
105
|
-
cacheReadInputTokens?: number
|
|
106
|
-
} = {
|
|
107
|
-
inputTokens: 0,
|
|
108
|
-
outputTokens: 0,
|
|
109
|
-
}
|
|
110
|
-
|
|
111
|
-
for await (const chunk of stream1) {
|
|
112
|
-
const delta = chunk?.choices?.[0]?.delta?.content ?? ''
|
|
113
|
-
accumulatedContent += delta
|
|
114
|
-
if (chunk.usage) {
|
|
115
|
-
usage = {
|
|
116
|
-
inputTokens: chunk.usage.prompt_tokens ?? 0,
|
|
117
|
-
outputTokens: chunk.usage.completion_tokens ?? 0,
|
|
118
|
-
reasoningTokens: chunk.usage.completion_tokens_details?.reasoning_tokens ?? 0,
|
|
119
|
-
cacheReadInputTokens: chunk.usage.prompt_tokens_details?.cached_tokens ?? 0,
|
|
120
|
-
}
|
|
121
|
-
}
|
|
122
|
-
}
|
|
123
|
-
|
|
124
|
-
const latency = (Date.now() - startTime) / 1000
|
|
125
|
-
await sendEventToPosthog({
|
|
126
|
-
client: this.phClient,
|
|
127
|
-
distinctId: posthogDistinctId,
|
|
128
|
-
traceId,
|
|
129
|
-
model: openAIParams.model,
|
|
130
|
-
provider: 'azure',
|
|
131
|
-
input: openAIParams.messages,
|
|
132
|
-
output: [{ content: accumulatedContent, role: 'assistant' }],
|
|
133
|
-
latency,
|
|
134
|
-
baseURL: (this as any).baseURL ?? '',
|
|
135
|
-
params: body,
|
|
136
|
-
httpStatus: 200,
|
|
137
|
-
usage,
|
|
138
|
-
captureImmediate: posthogCaptureImmediate,
|
|
139
|
-
})
|
|
140
|
-
} catch (error: any) {
|
|
141
|
-
await sendEventToPosthog({
|
|
142
|
-
client: this.phClient,
|
|
143
|
-
distinctId: posthogDistinctId,
|
|
144
|
-
traceId,
|
|
145
|
-
model: openAIParams.model,
|
|
146
|
-
provider: 'azure',
|
|
147
|
-
input: openAIParams.messages,
|
|
148
|
-
output: [],
|
|
149
|
-
latency: 0,
|
|
150
|
-
baseURL: (this as any).baseURL ?? '',
|
|
151
|
-
params: body,
|
|
152
|
-
httpStatus: error?.status ? error.status : 500,
|
|
153
|
-
usage: { inputTokens: 0, outputTokens: 0 },
|
|
154
|
-
isError: true,
|
|
155
|
-
error: JSON.stringify(error),
|
|
156
|
-
captureImmediate: posthogCaptureImmediate,
|
|
157
|
-
})
|
|
158
|
-
}
|
|
159
|
-
})()
|
|
160
|
-
|
|
161
|
-
// Return the other stream to the user
|
|
162
|
-
return stream2
|
|
163
|
-
}
|
|
164
|
-
return value
|
|
165
|
-
}) as APIPromise<Stream<ChatCompletionChunk>>
|
|
166
|
-
} else {
|
|
167
|
-
const wrappedPromise = parentPromise.then(
|
|
168
|
-
async (result) => {
|
|
169
|
-
if ('choices' in result) {
|
|
170
|
-
const latency = (Date.now() - startTime) / 1000
|
|
171
|
-
await sendEventToPosthog({
|
|
172
|
-
client: this.phClient,
|
|
173
|
-
distinctId: posthogDistinctId,
|
|
174
|
-
traceId,
|
|
175
|
-
model: openAIParams.model,
|
|
176
|
-
provider: 'azure',
|
|
177
|
-
input: openAIParams.messages,
|
|
178
|
-
output: formatResponseOpenAI(result),
|
|
179
|
-
latency,
|
|
180
|
-
baseURL: (this as any).baseURL ?? '',
|
|
181
|
-
params: body,
|
|
182
|
-
httpStatus: 200,
|
|
183
|
-
usage: {
|
|
184
|
-
inputTokens: result.usage?.prompt_tokens ?? 0,
|
|
185
|
-
outputTokens: result.usage?.completion_tokens ?? 0,
|
|
186
|
-
reasoningTokens: result.usage?.completion_tokens_details?.reasoning_tokens ?? 0,
|
|
187
|
-
cacheReadInputTokens: result.usage?.prompt_tokens_details?.cached_tokens ?? 0,
|
|
188
|
-
},
|
|
189
|
-
captureImmediate: posthogCaptureImmediate,
|
|
190
|
-
})
|
|
191
|
-
}
|
|
192
|
-
return result
|
|
193
|
-
},
|
|
194
|
-
async (error: any) => {
|
|
195
|
-
await sendEventToPosthog({
|
|
196
|
-
client: this.phClient,
|
|
197
|
-
distinctId: posthogDistinctId,
|
|
198
|
-
traceId,
|
|
199
|
-
model: openAIParams.model,
|
|
200
|
-
provider: 'azure',
|
|
201
|
-
input: openAIParams.messages,
|
|
202
|
-
output: [],
|
|
203
|
-
latency: 0,
|
|
204
|
-
baseURL: (this as any).baseURL ?? '',
|
|
205
|
-
params: body,
|
|
206
|
-
httpStatus: error?.status ? error.status : 500,
|
|
207
|
-
usage: {
|
|
208
|
-
inputTokens: 0,
|
|
209
|
-
outputTokens: 0,
|
|
210
|
-
},
|
|
211
|
-
isError: true,
|
|
212
|
-
error: JSON.stringify(error),
|
|
213
|
-
captureImmediate: posthogCaptureImmediate,
|
|
214
|
-
})
|
|
215
|
-
throw error
|
|
216
|
-
}
|
|
217
|
-
) as APIPromise<ChatCompletion>
|
|
218
|
-
|
|
219
|
-
return wrappedPromise
|
|
220
|
-
}
|
|
221
|
-
}
|
|
222
|
-
}
|
|
223
|
-
|
|
224
|
-
export class WrappedResponses extends AzureOpenAI.Responses {
|
|
225
|
-
private readonly phClient: PostHog
|
|
226
|
-
|
|
227
|
-
constructor(client: AzureOpenAI, phClient: PostHog) {
|
|
228
|
-
super(client)
|
|
229
|
-
this.phClient = phClient
|
|
230
|
-
}
|
|
231
|
-
|
|
232
|
-
// --- Overload #1: Non-streaming
|
|
233
|
-
public create(
|
|
234
|
-
body: ResponsesCreateParamsNonStreaming & MonitoringParams,
|
|
235
|
-
options?: RequestOptions
|
|
236
|
-
): APIPromise<OpenAIOrignal.Responses.Response>
|
|
237
|
-
|
|
238
|
-
// --- Overload #2: Streaming
|
|
239
|
-
public create(
|
|
240
|
-
body: ResponsesCreateParamsStreaming & MonitoringParams,
|
|
241
|
-
options?: RequestOptions
|
|
242
|
-
): APIPromise<Stream<OpenAIOrignal.Responses.ResponseStreamEvent>>
|
|
243
|
-
|
|
244
|
-
// --- Overload #3: Generic base
|
|
245
|
-
public create(
|
|
246
|
-
body: ResponsesCreateParamsBase & MonitoringParams,
|
|
247
|
-
options?: RequestOptions
|
|
248
|
-
): APIPromise<OpenAIOrignal.Responses.Response | Stream<OpenAIOrignal.Responses.ResponseStreamEvent>>
|
|
249
|
-
|
|
250
|
-
// --- Implementation Signature
|
|
251
|
-
public create(
|
|
252
|
-
body: ResponsesCreateParamsBase & MonitoringParams,
|
|
253
|
-
options?: RequestOptions
|
|
254
|
-
): APIPromise<OpenAIOrignal.Responses.Response | Stream<OpenAIOrignal.Responses.ResponseStreamEvent>> {
|
|
255
|
-
const {
|
|
256
|
-
posthogDistinctId,
|
|
257
|
-
posthogTraceId,
|
|
258
|
-
posthogProperties,
|
|
259
|
-
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
260
|
-
posthogPrivacyMode = false,
|
|
261
|
-
posthogGroups,
|
|
262
|
-
posthogCaptureImmediate,
|
|
263
|
-
...openAIParams
|
|
264
|
-
} = body
|
|
265
|
-
|
|
266
|
-
const traceId = posthogTraceId ?? uuidv4()
|
|
267
|
-
const startTime = Date.now()
|
|
268
|
-
|
|
269
|
-
const parentPromise = super.create(openAIParams, options)
|
|
270
|
-
|
|
271
|
-
if (openAIParams.stream) {
|
|
272
|
-
return parentPromise.then((value) => {
|
|
273
|
-
if ('tee' in value && typeof (value as any).tee === 'function') {
|
|
274
|
-
const [stream1, stream2] = (value as any).tee()
|
|
275
|
-
;(async () => {
|
|
276
|
-
try {
|
|
277
|
-
let finalContent: any[] = []
|
|
278
|
-
let usage: {
|
|
279
|
-
inputTokens?: number
|
|
280
|
-
outputTokens?: number
|
|
281
|
-
reasoningTokens?: number
|
|
282
|
-
cacheReadInputTokens?: number
|
|
283
|
-
} = {
|
|
284
|
-
inputTokens: 0,
|
|
285
|
-
outputTokens: 0,
|
|
286
|
-
}
|
|
287
|
-
|
|
288
|
-
for await (const chunk of stream1) {
|
|
289
|
-
if (
|
|
290
|
-
chunk.type === 'response.completed' &&
|
|
291
|
-
'response' in chunk &&
|
|
292
|
-
chunk.response?.output &&
|
|
293
|
-
chunk.response.output.length > 0
|
|
294
|
-
) {
|
|
295
|
-
finalContent = chunk.response.output
|
|
296
|
-
}
|
|
297
|
-
if ('usage' in chunk && chunk.usage) {
|
|
298
|
-
usage = {
|
|
299
|
-
inputTokens: chunk.usage.input_tokens ?? 0,
|
|
300
|
-
outputTokens: chunk.usage.output_tokens ?? 0,
|
|
301
|
-
reasoningTokens: chunk.usage.output_tokens_details?.reasoning_tokens ?? 0,
|
|
302
|
-
cacheReadInputTokens: chunk.usage.input_tokens_details?.cached_tokens ?? 0,
|
|
303
|
-
}
|
|
304
|
-
}
|
|
305
|
-
}
|
|
306
|
-
|
|
307
|
-
const latency = (Date.now() - startTime) / 1000
|
|
308
|
-
await sendEventToPosthog({
|
|
309
|
-
client: this.phClient,
|
|
310
|
-
distinctId: posthogDistinctId,
|
|
311
|
-
traceId,
|
|
312
|
-
model: openAIParams.model,
|
|
313
|
-
provider: 'azure',
|
|
314
|
-
input: openAIParams.input,
|
|
315
|
-
output: finalContent,
|
|
316
|
-
latency,
|
|
317
|
-
baseURL: (this as any).baseURL ?? '',
|
|
318
|
-
params: body,
|
|
319
|
-
httpStatus: 200,
|
|
320
|
-
usage,
|
|
321
|
-
captureImmediate: posthogCaptureImmediate,
|
|
322
|
-
})
|
|
323
|
-
} catch (error: any) {
|
|
324
|
-
await sendEventToPosthog({
|
|
325
|
-
client: this.phClient,
|
|
326
|
-
distinctId: posthogDistinctId,
|
|
327
|
-
traceId,
|
|
328
|
-
model: openAIParams.model,
|
|
329
|
-
provider: 'azure',
|
|
330
|
-
input: openAIParams.input,
|
|
331
|
-
output: [],
|
|
332
|
-
latency: 0,
|
|
333
|
-
baseURL: (this as any).baseURL ?? '',
|
|
334
|
-
params: body,
|
|
335
|
-
httpStatus: error?.status ? error.status : 500,
|
|
336
|
-
usage: { inputTokens: 0, outputTokens: 0 },
|
|
337
|
-
isError: true,
|
|
338
|
-
error: JSON.stringify(error),
|
|
339
|
-
captureImmediate: posthogCaptureImmediate,
|
|
340
|
-
})
|
|
341
|
-
}
|
|
342
|
-
})()
|
|
343
|
-
|
|
344
|
-
return stream2
|
|
345
|
-
}
|
|
346
|
-
return value
|
|
347
|
-
}) as APIPromise<Stream<OpenAIOrignal.Responses.ResponseStreamEvent>>
|
|
348
|
-
} else {
|
|
349
|
-
const wrappedPromise = parentPromise.then(
|
|
350
|
-
async (result) => {
|
|
351
|
-
if ('output' in result) {
|
|
352
|
-
const latency = (Date.now() - startTime) / 1000
|
|
353
|
-
await sendEventToPosthog({
|
|
354
|
-
client: this.phClient,
|
|
355
|
-
distinctId: posthogDistinctId,
|
|
356
|
-
traceId,
|
|
357
|
-
model: openAIParams.model,
|
|
358
|
-
provider: 'azure',
|
|
359
|
-
input: openAIParams.input,
|
|
360
|
-
output: result.output,
|
|
361
|
-
latency,
|
|
362
|
-
baseURL: (this as any).baseURL ?? '',
|
|
363
|
-
params: body,
|
|
364
|
-
httpStatus: 200,
|
|
365
|
-
usage: {
|
|
366
|
-
inputTokens: result.usage?.input_tokens ?? 0,
|
|
367
|
-
outputTokens: result.usage?.output_tokens ?? 0,
|
|
368
|
-
reasoningTokens: result.usage?.output_tokens_details?.reasoning_tokens ?? 0,
|
|
369
|
-
cacheReadInputTokens: result.usage?.input_tokens_details?.cached_tokens ?? 0,
|
|
370
|
-
},
|
|
371
|
-
captureImmediate: posthogCaptureImmediate,
|
|
372
|
-
})
|
|
373
|
-
}
|
|
374
|
-
return result
|
|
375
|
-
},
|
|
376
|
-
async (error: any) => {
|
|
377
|
-
await sendEventToPosthog({
|
|
378
|
-
client: this.phClient,
|
|
379
|
-
distinctId: posthogDistinctId,
|
|
380
|
-
traceId,
|
|
381
|
-
model: openAIParams.model,
|
|
382
|
-
provider: 'azure',
|
|
383
|
-
input: openAIParams.input,
|
|
384
|
-
output: [],
|
|
385
|
-
latency: 0,
|
|
386
|
-
baseURL: (this as any).baseURL ?? '',
|
|
387
|
-
params: body,
|
|
388
|
-
httpStatus: error?.status ? error.status : 500,
|
|
389
|
-
usage: {
|
|
390
|
-
inputTokens: 0,
|
|
391
|
-
outputTokens: 0,
|
|
392
|
-
},
|
|
393
|
-
isError: true,
|
|
394
|
-
error: JSON.stringify(error),
|
|
395
|
-
captureImmediate: posthogCaptureImmediate,
|
|
396
|
-
})
|
|
397
|
-
throw error
|
|
398
|
-
}
|
|
399
|
-
) as APIPromise<OpenAIOrignal.Responses.Response>
|
|
400
|
-
|
|
401
|
-
return wrappedPromise
|
|
402
|
-
}
|
|
403
|
-
}
|
|
404
|
-
|
|
405
|
-
public parse<Params extends OpenAIOrignal.Responses.ResponseCreateParams, ParsedT = any>(
|
|
406
|
-
body: Params & MonitoringParams,
|
|
407
|
-
options?: RequestOptions
|
|
408
|
-
): APIPromise<ParsedResponse<ParsedT>> {
|
|
409
|
-
const {
|
|
410
|
-
posthogDistinctId,
|
|
411
|
-
posthogTraceId,
|
|
412
|
-
posthogProperties,
|
|
413
|
-
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
414
|
-
posthogPrivacyMode = false,
|
|
415
|
-
posthogGroups,
|
|
416
|
-
posthogCaptureImmediate,
|
|
417
|
-
...openAIParams
|
|
418
|
-
} = body
|
|
419
|
-
|
|
420
|
-
const traceId = posthogTraceId ?? uuidv4()
|
|
421
|
-
const startTime = Date.now()
|
|
422
|
-
|
|
423
|
-
const parentPromise = super.parse(openAIParams, options)
|
|
424
|
-
|
|
425
|
-
const wrappedPromise = parentPromise.then(
|
|
426
|
-
async (result) => {
|
|
427
|
-
const latency = (Date.now() - startTime) / 1000
|
|
428
|
-
await sendEventToPosthog({
|
|
429
|
-
client: this.phClient,
|
|
430
|
-
distinctId: posthogDistinctId,
|
|
431
|
-
traceId,
|
|
432
|
-
model: openAIParams.model,
|
|
433
|
-
provider: 'azure',
|
|
434
|
-
input: openAIParams.input,
|
|
435
|
-
output: result.output,
|
|
436
|
-
latency,
|
|
437
|
-
baseURL: (this as any).baseURL ?? '',
|
|
438
|
-
params: body,
|
|
439
|
-
httpStatus: 200,
|
|
440
|
-
usage: {
|
|
441
|
-
inputTokens: result.usage?.input_tokens ?? 0,
|
|
442
|
-
outputTokens: result.usage?.output_tokens ?? 0,
|
|
443
|
-
reasoningTokens: result.usage?.output_tokens_details?.reasoning_tokens ?? 0,
|
|
444
|
-
cacheReadInputTokens: result.usage?.input_tokens_details?.cached_tokens ?? 0,
|
|
445
|
-
},
|
|
446
|
-
captureImmediate: posthogCaptureImmediate,
|
|
447
|
-
})
|
|
448
|
-
return result
|
|
449
|
-
},
|
|
450
|
-
async (error: any) => {
|
|
451
|
-
await sendEventToPosthog({
|
|
452
|
-
client: this.phClient,
|
|
453
|
-
distinctId: posthogDistinctId,
|
|
454
|
-
traceId,
|
|
455
|
-
model: openAIParams.model,
|
|
456
|
-
provider: 'azure',
|
|
457
|
-
input: openAIParams.input,
|
|
458
|
-
output: [],
|
|
459
|
-
latency: 0,
|
|
460
|
-
baseURL: (this as any).baseURL ?? '',
|
|
461
|
-
params: body,
|
|
462
|
-
httpStatus: error?.status ? error.status : 500,
|
|
463
|
-
usage: {
|
|
464
|
-
inputTokens: 0,
|
|
465
|
-
outputTokens: 0,
|
|
466
|
-
},
|
|
467
|
-
isError: true,
|
|
468
|
-
error: JSON.stringify(error),
|
|
469
|
-
captureImmediate: posthogCaptureImmediate,
|
|
470
|
-
})
|
|
471
|
-
throw error
|
|
472
|
-
}
|
|
473
|
-
)
|
|
474
|
-
|
|
475
|
-
return wrappedPromise as APIPromise<ParsedResponse<ParsedT>>
|
|
476
|
-
}
|
|
477
|
-
}
|
|
478
|
-
|
|
479
|
-
export default PostHogAzureOpenAI
|
|
480
|
-
|
|
481
|
-
export { PostHogAzureOpenAI as OpenAI }
|