@posthog/ai 2.1.2 → 2.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/index.cjs.js +219 -30
- package/lib/index.cjs.js.map +1 -1
- package/lib/index.d.ts +37 -7
- package/lib/index.esm.js +220 -32
- package/lib/index.esm.js.map +1 -1
- package/lib/posthog-ai/src/index.d.ts +2 -0
- package/lib/posthog-ai/src/openai/azure.d.ts +32 -0
- package/lib/posthog-ai/src/utils.d.ts +3 -1
- package/lib/posthog-ai/src/vercel/middleware.d.ts +4 -0
- package/package.json +1 -1
- package/src/index.ts +2 -0
- package/src/openai/azure.ts +214 -0
- package/src/openai/index.ts +8 -4
- package/src/utils.ts +20 -8
- package/src/vercel/middleware.ts +22 -10
|
@@ -0,0 +1,214 @@
|
|
|
1
|
+
import OpenAIOrignal, { AzureOpenAI } from 'openai'
|
|
2
|
+
import { PostHog } from 'posthog-node'
|
|
3
|
+
import { v4 as uuidv4 } from 'uuid'
|
|
4
|
+
import { PassThrough } from 'stream'
|
|
5
|
+
import { mergeSystemPrompt, MonitoringParams, sendEventToPosthog } from '../utils'
|
|
6
|
+
|
|
7
|
+
type ChatCompletion = OpenAIOrignal.ChatCompletion
|
|
8
|
+
type ChatCompletionChunk = OpenAIOrignal.ChatCompletionChunk
|
|
9
|
+
type ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams
|
|
10
|
+
type ChatCompletionCreateParamsNonStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsNonStreaming
|
|
11
|
+
type ChatCompletionCreateParamsStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsStreaming
|
|
12
|
+
import type { APIPromise, RequestOptions } from 'openai/core'
|
|
13
|
+
import type { Stream } from 'openai/streaming'
|
|
14
|
+
|
|
15
|
+
interface MonitoringOpenAIConfig {
|
|
16
|
+
apiKey: string
|
|
17
|
+
posthog: PostHog
|
|
18
|
+
baseURL?: string
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
export class PostHogAzureOpenAI extends AzureOpenAI {
|
|
22
|
+
private readonly phClient: PostHog
|
|
23
|
+
public chat: WrappedChat
|
|
24
|
+
|
|
25
|
+
constructor(config: MonitoringOpenAIConfig) {
|
|
26
|
+
const { posthog, ...openAIConfig } = config
|
|
27
|
+
super(openAIConfig)
|
|
28
|
+
this.phClient = posthog
|
|
29
|
+
this.chat = new WrappedChat(this, this.phClient)
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
export class WrappedChat extends AzureOpenAI.Chat {
|
|
34
|
+
constructor(parentClient: PostHogAzureOpenAI, phClient: PostHog) {
|
|
35
|
+
super(parentClient)
|
|
36
|
+
this.completions = new WrappedCompletions(parentClient, phClient)
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
public completions: WrappedCompletions
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
export class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
43
|
+
private readonly phClient: PostHog
|
|
44
|
+
|
|
45
|
+
constructor(client: AzureOpenAI, phClient: PostHog) {
|
|
46
|
+
super(client)
|
|
47
|
+
this.phClient = phClient
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// --- Overload #1: Non-streaming
|
|
51
|
+
public create(
|
|
52
|
+
body: ChatCompletionCreateParamsNonStreaming & MonitoringParams,
|
|
53
|
+
options?: RequestOptions
|
|
54
|
+
): APIPromise<ChatCompletion>
|
|
55
|
+
|
|
56
|
+
// --- Overload #2: Streaming
|
|
57
|
+
public create(
|
|
58
|
+
body: ChatCompletionCreateParamsStreaming & MonitoringParams,
|
|
59
|
+
options?: RequestOptions
|
|
60
|
+
): APIPromise<Stream<ChatCompletionChunk>>
|
|
61
|
+
|
|
62
|
+
// --- Overload #3: Generic base
|
|
63
|
+
public create(
|
|
64
|
+
body: ChatCompletionCreateParamsBase & MonitoringParams,
|
|
65
|
+
options?: RequestOptions
|
|
66
|
+
): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>>
|
|
67
|
+
|
|
68
|
+
// --- Implementation Signature
|
|
69
|
+
public create(
|
|
70
|
+
body: ChatCompletionCreateParamsBase & MonitoringParams,
|
|
71
|
+
options?: RequestOptions
|
|
72
|
+
): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>> {
|
|
73
|
+
const {
|
|
74
|
+
posthogDistinctId,
|
|
75
|
+
posthogTraceId,
|
|
76
|
+
posthogProperties,
|
|
77
|
+
posthogPrivacyMode = false,
|
|
78
|
+
posthogGroups,
|
|
79
|
+
...openAIParams
|
|
80
|
+
} = body
|
|
81
|
+
|
|
82
|
+
const traceId = posthogTraceId ?? uuidv4()
|
|
83
|
+
const startTime = Date.now()
|
|
84
|
+
const parentPromise = super.create(openAIParams, options)
|
|
85
|
+
|
|
86
|
+
if (openAIParams.stream) {
|
|
87
|
+
return parentPromise.then((value) => {
|
|
88
|
+
const passThroughStream = new PassThrough({ objectMode: true })
|
|
89
|
+
let accumulatedContent = ''
|
|
90
|
+
let usage: { inputTokens: number; outputTokens: number } = {
|
|
91
|
+
inputTokens: 0,
|
|
92
|
+
outputTokens: 0,
|
|
93
|
+
}
|
|
94
|
+
let model = openAIParams.model
|
|
95
|
+
if ('tee' in value) {
|
|
96
|
+
const openAIStream = value
|
|
97
|
+
;(async () => {
|
|
98
|
+
try {
|
|
99
|
+
for await (const chunk of openAIStream) {
|
|
100
|
+
const delta = chunk?.choices?.[0]?.delta?.content ?? ''
|
|
101
|
+
accumulatedContent += delta
|
|
102
|
+
if (chunk.usage) {
|
|
103
|
+
if (chunk.model != model) {
|
|
104
|
+
model = chunk.model
|
|
105
|
+
}
|
|
106
|
+
usage = {
|
|
107
|
+
inputTokens: chunk.usage.prompt_tokens ?? 0,
|
|
108
|
+
outputTokens: chunk.usage.completion_tokens ?? 0,
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
passThroughStream.write(chunk)
|
|
112
|
+
}
|
|
113
|
+
const latency = (Date.now() - startTime) / 1000
|
|
114
|
+
sendEventToPosthog({
|
|
115
|
+
client: this.phClient,
|
|
116
|
+
distinctId: posthogDistinctId ?? traceId,
|
|
117
|
+
traceId,
|
|
118
|
+
model,
|
|
119
|
+
provider: 'azure',
|
|
120
|
+
input: mergeSystemPrompt(openAIParams, 'azure'),
|
|
121
|
+
output: [{ content: accumulatedContent, role: 'assistant' }],
|
|
122
|
+
latency,
|
|
123
|
+
baseURL: (this as any).baseURL ?? '',
|
|
124
|
+
params: body,
|
|
125
|
+
httpStatus: 200,
|
|
126
|
+
usage,
|
|
127
|
+
})
|
|
128
|
+
passThroughStream.end()
|
|
129
|
+
} catch (error: any) {
|
|
130
|
+
// error handling
|
|
131
|
+
sendEventToPosthog({
|
|
132
|
+
client: this.phClient,
|
|
133
|
+
distinctId: posthogDistinctId ?? traceId,
|
|
134
|
+
traceId,
|
|
135
|
+
model,
|
|
136
|
+
provider: 'azure',
|
|
137
|
+
input: mergeSystemPrompt(openAIParams, 'azure'),
|
|
138
|
+
output: JSON.stringify(error),
|
|
139
|
+
latency: 0,
|
|
140
|
+
baseURL: (this as any).baseURL ?? '',
|
|
141
|
+
params: body,
|
|
142
|
+
httpStatus: error?.status ? error.status : 500,
|
|
143
|
+
usage: {
|
|
144
|
+
inputTokens: 0,
|
|
145
|
+
outputTokens: 0,
|
|
146
|
+
},
|
|
147
|
+
isError: true,
|
|
148
|
+
error: JSON.stringify(error),
|
|
149
|
+
})
|
|
150
|
+
passThroughStream.emit('error', error)
|
|
151
|
+
}
|
|
152
|
+
})()
|
|
153
|
+
}
|
|
154
|
+
return passThroughStream as unknown as Stream<ChatCompletionChunk>
|
|
155
|
+
}) as APIPromise<Stream<ChatCompletionChunk>>
|
|
156
|
+
} else {
|
|
157
|
+
const wrappedPromise = parentPromise.then(
|
|
158
|
+
(result) => {
|
|
159
|
+
if ('choices' in result) {
|
|
160
|
+
const latency = (Date.now() - startTime) / 1000
|
|
161
|
+
let model = openAIParams.model
|
|
162
|
+
if (result.model != model) {
|
|
163
|
+
model = result.model
|
|
164
|
+
}
|
|
165
|
+
sendEventToPosthog({
|
|
166
|
+
client: this.phClient,
|
|
167
|
+
distinctId: posthogDistinctId ?? traceId,
|
|
168
|
+
traceId,
|
|
169
|
+
model,
|
|
170
|
+
provider: '1234',
|
|
171
|
+
input: mergeSystemPrompt(openAIParams, 'azure'),
|
|
172
|
+
output: [{ content: result.choices[0].message.content, role: 'assistant' }],
|
|
173
|
+
latency,
|
|
174
|
+
baseURL: (this as any).baseURL ?? '',
|
|
175
|
+
params: body,
|
|
176
|
+
httpStatus: 200,
|
|
177
|
+
usage: {
|
|
178
|
+
inputTokens: result.usage?.prompt_tokens ?? 0,
|
|
179
|
+
outputTokens: result.usage?.completion_tokens ?? 0,
|
|
180
|
+
},
|
|
181
|
+
})
|
|
182
|
+
}
|
|
183
|
+
return result
|
|
184
|
+
},
|
|
185
|
+
(error: any) => {
|
|
186
|
+
sendEventToPosthog({
|
|
187
|
+
client: this.phClient,
|
|
188
|
+
distinctId: posthogDistinctId ?? traceId,
|
|
189
|
+
traceId,
|
|
190
|
+
model: openAIParams.model,
|
|
191
|
+
provider: 'azure',
|
|
192
|
+
input: mergeSystemPrompt(openAIParams, 'azure'),
|
|
193
|
+
output: [],
|
|
194
|
+
latency: 0,
|
|
195
|
+
baseURL: (this as any).baseURL ?? '',
|
|
196
|
+
params: body,
|
|
197
|
+
httpStatus: error?.status ? error.status : 500,
|
|
198
|
+
usage: {
|
|
199
|
+
inputTokens: 0,
|
|
200
|
+
outputTokens: 0,
|
|
201
|
+
},
|
|
202
|
+
isError: true,
|
|
203
|
+
error: JSON.stringify(error),
|
|
204
|
+
})
|
|
205
|
+
throw error
|
|
206
|
+
}
|
|
207
|
+
) as APIPromise<ChatCompletion>
|
|
208
|
+
|
|
209
|
+
return wrappedPromise
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
export default PostHogAzureOpenAI
|
package/src/openai/index.ts
CHANGED
|
@@ -123,7 +123,7 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
|
123
123
|
usage,
|
|
124
124
|
})
|
|
125
125
|
passThroughStream.end()
|
|
126
|
-
} catch (error) {
|
|
126
|
+
} catch (error: any) {
|
|
127
127
|
// error handling
|
|
128
128
|
sendEventToPosthog({
|
|
129
129
|
client: this.phClient,
|
|
@@ -136,11 +136,13 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
|
136
136
|
latency: 0,
|
|
137
137
|
baseURL: (this as any).baseURL ?? '',
|
|
138
138
|
params: body,
|
|
139
|
-
httpStatus: 500,
|
|
139
|
+
httpStatus: error?.status ? error.status : 500,
|
|
140
140
|
usage: {
|
|
141
141
|
inputTokens: 0,
|
|
142
142
|
outputTokens: 0,
|
|
143
143
|
},
|
|
144
|
+
isError: true,
|
|
145
|
+
error: JSON.stringify(error),
|
|
144
146
|
})
|
|
145
147
|
passThroughStream.emit('error', error)
|
|
146
148
|
}
|
|
@@ -173,7 +175,7 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
|
173
175
|
}
|
|
174
176
|
return result
|
|
175
177
|
},
|
|
176
|
-
(error) => {
|
|
178
|
+
(error: any) => {
|
|
177
179
|
sendEventToPosthog({
|
|
178
180
|
client: this.phClient,
|
|
179
181
|
distinctId: posthogDistinctId ?? traceId,
|
|
@@ -185,11 +187,13 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
|
185
187
|
latency: 0,
|
|
186
188
|
baseURL: (this as any).baseURL ?? '',
|
|
187
189
|
params: body,
|
|
188
|
-
httpStatus: 500,
|
|
190
|
+
httpStatus: error?.status ? error.status : 500,
|
|
189
191
|
usage: {
|
|
190
192
|
inputTokens: 0,
|
|
191
193
|
outputTokens: 0,
|
|
192
194
|
},
|
|
195
|
+
isError: true,
|
|
196
|
+
error: JSON.stringify(error),
|
|
193
197
|
})
|
|
194
198
|
throw error
|
|
195
199
|
}
|
package/src/utils.ts
CHANGED
|
@@ -77,15 +77,15 @@ export const formatResponseOpenAI = (response: any): Array<{ role: string; conte
|
|
|
77
77
|
}
|
|
78
78
|
|
|
79
79
|
export const mergeSystemPrompt = (params: ChatCompletionCreateParamsBase & MonitoringParams, provider: string): any => {
|
|
80
|
-
if (provider
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
80
|
+
if (provider == 'anthropic') {
|
|
81
|
+
const messages = params.messages || []
|
|
82
|
+
if (!(params as any).system) {
|
|
83
|
+
return messages
|
|
84
|
+
}
|
|
85
|
+
const systemMessage = (params as any).system
|
|
86
|
+
return [{ role: 'system', content: systemMessage }, ...messages]
|
|
86
87
|
}
|
|
87
|
-
|
|
88
|
-
return [{ role: 'system', content: systemMessage }, ...messages]
|
|
88
|
+
return params.messages
|
|
89
89
|
}
|
|
90
90
|
|
|
91
91
|
export const withPrivacyMode = (client: PostHog, privacyMode: boolean, input: any): any => {
|
|
@@ -105,6 +105,8 @@ export type SendEventToPosthogParams = {
|
|
|
105
105
|
httpStatus: number
|
|
106
106
|
usage?: { inputTokens?: number; outputTokens?: number }
|
|
107
107
|
params: ChatCompletionCreateParamsBase & MonitoringParams
|
|
108
|
+
isError?: boolean
|
|
109
|
+
error?: string
|
|
108
110
|
}
|
|
109
111
|
|
|
110
112
|
export const sendEventToPosthog = ({
|
|
@@ -120,8 +122,17 @@ export const sendEventToPosthog = ({
|
|
|
120
122
|
params,
|
|
121
123
|
httpStatus = 200,
|
|
122
124
|
usage = {},
|
|
125
|
+
isError = false,
|
|
126
|
+
error,
|
|
123
127
|
}: SendEventToPosthogParams): void => {
|
|
124
128
|
if (client.capture) {
|
|
129
|
+
let errorData = {}
|
|
130
|
+
if (isError) {
|
|
131
|
+
errorData = {
|
|
132
|
+
$ai_is_error: true,
|
|
133
|
+
$ai_error: error,
|
|
134
|
+
}
|
|
135
|
+
}
|
|
125
136
|
client.capture({
|
|
126
137
|
distinctId: distinctId ?? traceId,
|
|
127
138
|
event: '$ai_generation',
|
|
@@ -139,6 +150,7 @@ export const sendEventToPosthog = ({
|
|
|
139
150
|
$ai_base_url: baseURL,
|
|
140
151
|
...params.posthogProperties,
|
|
141
152
|
...(distinctId ? {} : { $process_person_profile: false }),
|
|
153
|
+
...errorData,
|
|
142
154
|
},
|
|
143
155
|
groups: params.posthogGroups,
|
|
144
156
|
})
|
package/src/vercel/middleware.ts
CHANGED
|
@@ -15,6 +15,8 @@ interface ClientOptions {
|
|
|
15
15
|
posthogProperties?: Record<string, any>
|
|
16
16
|
posthogPrivacyMode?: boolean
|
|
17
17
|
posthogGroups?: Record<string, any>
|
|
18
|
+
posthogModelOverride?: string
|
|
19
|
+
posthogProviderOverride?: string
|
|
18
20
|
}
|
|
19
21
|
|
|
20
22
|
interface CreateInstrumentationMiddlewareOptions {
|
|
@@ -23,6 +25,8 @@ interface CreateInstrumentationMiddlewareOptions {
|
|
|
23
25
|
posthogProperties?: Record<string, any>
|
|
24
26
|
posthogPrivacyMode?: boolean
|
|
25
27
|
posthogGroups?: Record<string, any>
|
|
28
|
+
posthogModelOverride?: string
|
|
29
|
+
posthogProviderOverride?: string
|
|
26
30
|
}
|
|
27
31
|
|
|
28
32
|
interface PostHogInput {
|
|
@@ -80,14 +84,15 @@ export const createInstrumentationMiddleware = (
|
|
|
80
84
|
const result = await doGenerate()
|
|
81
85
|
const latency = (Date.now() - startTime) / 1000
|
|
82
86
|
|
|
83
|
-
const modelId = result.response?.modelId ? result.response.modelId : model.modelId
|
|
87
|
+
const modelId = options.posthogModelOverride ?? (result.response?.modelId ? result.response.modelId : model.modelId)
|
|
88
|
+
const provider = options.posthogProviderOverride ?? model.provider
|
|
84
89
|
|
|
85
90
|
sendEventToPosthog({
|
|
86
91
|
client: phClient,
|
|
87
92
|
distinctId: options.posthogDistinctId,
|
|
88
93
|
traceId: options.posthogTraceId,
|
|
89
94
|
model: modelId,
|
|
90
|
-
provider:
|
|
95
|
+
provider: provider,
|
|
91
96
|
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
92
97
|
output: [{ content: result.text, role: 'assistant' }],
|
|
93
98
|
latency,
|
|
@@ -101,7 +106,7 @@ export const createInstrumentationMiddleware = (
|
|
|
101
106
|
})
|
|
102
107
|
|
|
103
108
|
return result
|
|
104
|
-
} catch (error) {
|
|
109
|
+
} catch (error: any) {
|
|
105
110
|
const modelId = model.modelId
|
|
106
111
|
sendEventToPosthog({
|
|
107
112
|
client: phClient,
|
|
@@ -114,11 +119,13 @@ export const createInstrumentationMiddleware = (
|
|
|
114
119
|
latency: 0,
|
|
115
120
|
baseURL: '',
|
|
116
121
|
params: mergedParams as any,
|
|
117
|
-
httpStatus: 500,
|
|
122
|
+
httpStatus: error?.status ? error.status : 500,
|
|
118
123
|
usage: {
|
|
119
124
|
inputTokens: 0,
|
|
120
125
|
outputTokens: 0,
|
|
121
126
|
},
|
|
127
|
+
isError: true,
|
|
128
|
+
error: JSON.stringify(error),
|
|
122
129
|
})
|
|
123
130
|
throw error
|
|
124
131
|
}
|
|
@@ -132,6 +139,9 @@ export const createInstrumentationMiddleware = (
|
|
|
132
139
|
...options,
|
|
133
140
|
...mapVercelParams(params),
|
|
134
141
|
}
|
|
142
|
+
|
|
143
|
+
const modelId = options.posthogModelOverride ?? model.modelId
|
|
144
|
+
const provider = options.posthogProviderOverride ?? model.provider
|
|
135
145
|
try {
|
|
136
146
|
const { stream, ...rest } = await doStream()
|
|
137
147
|
|
|
@@ -155,8 +165,8 @@ export const createInstrumentationMiddleware = (
|
|
|
155
165
|
client: phClient,
|
|
156
166
|
distinctId: options.posthogDistinctId,
|
|
157
167
|
traceId: options.posthogTraceId,
|
|
158
|
-
model:
|
|
159
|
-
provider:
|
|
168
|
+
model: modelId,
|
|
169
|
+
provider: provider,
|
|
160
170
|
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
161
171
|
output: [{ content: generatedText, role: 'assistant' }],
|
|
162
172
|
latency,
|
|
@@ -172,23 +182,25 @@ export const createInstrumentationMiddleware = (
|
|
|
172
182
|
stream: stream.pipeThrough(transformStream),
|
|
173
183
|
...rest,
|
|
174
184
|
}
|
|
175
|
-
} catch (error) {
|
|
185
|
+
} catch (error: any) {
|
|
176
186
|
sendEventToPosthog({
|
|
177
187
|
client: phClient,
|
|
178
188
|
distinctId: options.posthogDistinctId,
|
|
179
189
|
traceId: options.posthogTraceId,
|
|
180
|
-
model:
|
|
181
|
-
provider:
|
|
190
|
+
model: modelId,
|
|
191
|
+
provider: provider,
|
|
182
192
|
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
183
193
|
output: [],
|
|
184
194
|
latency: 0,
|
|
185
195
|
baseURL: '',
|
|
186
196
|
params: mergedParams as any,
|
|
187
|
-
httpStatus: 500,
|
|
197
|
+
httpStatus: error?.status ? error.status : 500,
|
|
188
198
|
usage: {
|
|
189
199
|
inputTokens: 0,
|
|
190
200
|
outputTokens: 0,
|
|
191
201
|
},
|
|
202
|
+
isError: true,
|
|
203
|
+
error: JSON.stringify(error),
|
|
192
204
|
})
|
|
193
205
|
throw error
|
|
194
206
|
}
|