@posthog/ai 1.2.0 → 1.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/package.json +1 -1
- package/src/index.ts +1 -1
- package/src/openai/index.ts +1 -1
- package/src/utils.ts +1 -1
- package/src/vercel/middleware.ts +144 -139
package/README.md
CHANGED
package/package.json
CHANGED
package/src/index.ts
CHANGED
package/src/openai/index.ts
CHANGED
|
@@ -2,7 +2,7 @@ import OpenAIOrignal from 'openai'
|
|
|
2
2
|
import { PostHog } from 'posthog-node'
|
|
3
3
|
import { v4 as uuidv4 } from 'uuid'
|
|
4
4
|
import { PassThrough } from 'stream'
|
|
5
|
-
import { mergeSystemPrompt,
|
|
5
|
+
import { mergeSystemPrompt, MonitoringParams, sendEventToPosthog } from '../utils'
|
|
6
6
|
|
|
7
7
|
type ChatCompletion = OpenAIOrignal.ChatCompletion
|
|
8
8
|
type ChatCompletionChunk = OpenAIOrignal.ChatCompletionChunk
|
package/src/utils.ts
CHANGED
|
@@ -123,7 +123,7 @@ export type SendEventToPosthogParams = {
|
|
|
123
123
|
latency: number
|
|
124
124
|
baseURL: string
|
|
125
125
|
httpStatus: number
|
|
126
|
-
usage
|
|
126
|
+
usage?: { input_tokens?: number; output_tokens?: number }
|
|
127
127
|
params: ChatCompletionCreateParamsBase & MonitoringParams
|
|
128
128
|
}
|
|
129
129
|
|
package/src/vercel/middleware.ts
CHANGED
|
@@ -1,157 +1,162 @@
|
|
|
1
|
-
import { experimental_wrapLanguageModel as wrapLanguageModel } from 'ai'
|
|
1
|
+
import { experimental_wrapLanguageModel as wrapLanguageModel } from 'ai'
|
|
2
2
|
import type {
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
} from 'ai'
|
|
7
|
-
import { v4 as uuidv4 } from 'uuid'
|
|
8
|
-
import type { PostHog } from 'posthog-node'
|
|
9
|
-
import { sendEventToPosthog } from '../utils'
|
|
3
|
+
LanguageModelV1,
|
|
4
|
+
Experimental_LanguageModelV1Middleware as LanguageModelV1Middleware,
|
|
5
|
+
LanguageModelV1StreamPart,
|
|
6
|
+
} from 'ai'
|
|
7
|
+
import { v4 as uuidv4 } from 'uuid'
|
|
8
|
+
import type { PostHog } from 'posthog-node'
|
|
9
|
+
import { sendEventToPosthog } from '../utils'
|
|
10
10
|
|
|
11
11
|
interface CreateInstrumentationMiddlewareOptions {
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
12
|
+
posthog_distinct_id?: string
|
|
13
|
+
posthog_trace_id: string
|
|
14
|
+
posthog_properties?: Record<string, any>
|
|
15
|
+
posthog_privacy_mode?: boolean
|
|
16
|
+
posthog_groups?: string[]
|
|
17
17
|
}
|
|
18
18
|
|
|
19
|
-
export const createInstrumentationMiddleware = (
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
19
|
+
export const createInstrumentationMiddleware = (
|
|
20
|
+
phClient: PostHog,
|
|
21
|
+
model: LanguageModelV1,
|
|
22
|
+
options: CreateInstrumentationMiddlewareOptions
|
|
23
|
+
): LanguageModelV1Middleware => {
|
|
24
|
+
const middleware: LanguageModelV1Middleware = {
|
|
25
|
+
wrapGenerate: async ({ doGenerate, params }) => {
|
|
26
|
+
const startTime = Date.now()
|
|
23
27
|
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
28
|
+
try {
|
|
29
|
+
const result = await doGenerate()
|
|
30
|
+
const latency = (Date.now() - startTime) / 1000
|
|
27
31
|
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
32
|
+
sendEventToPosthog({
|
|
33
|
+
client: phClient,
|
|
34
|
+
distinctId: options.posthog_distinct_id,
|
|
35
|
+
traceId: options.posthog_trace_id,
|
|
36
|
+
model: model.modelId,
|
|
37
|
+
provider: 'vercel',
|
|
38
|
+
input: options.posthog_privacy_mode ? '' : params.prompt,
|
|
39
|
+
output: [{ content: result.text, role: 'assistant' }],
|
|
40
|
+
latency,
|
|
41
|
+
baseURL: '',
|
|
42
|
+
params: { posthog_properties: options } as any,
|
|
43
|
+
httpStatus: 200,
|
|
44
|
+
usage: {
|
|
45
|
+
input_tokens: result.usage.promptTokens,
|
|
46
|
+
output_tokens: result.usage.completionTokens,
|
|
47
|
+
},
|
|
48
|
+
})
|
|
45
49
|
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
50
|
+
return result
|
|
51
|
+
} catch (error) {
|
|
52
|
+
sendEventToPosthog({
|
|
53
|
+
client: phClient,
|
|
54
|
+
distinctId: options.posthog_distinct_id,
|
|
55
|
+
traceId: options.posthog_trace_id,
|
|
56
|
+
model: model.modelId,
|
|
57
|
+
provider: 'vercel',
|
|
58
|
+
input: options.posthog_privacy_mode ? '' : params.prompt,
|
|
59
|
+
output: [],
|
|
60
|
+
latency: 0,
|
|
61
|
+
baseURL: '',
|
|
62
|
+
params: { posthog_properties: options } as any,
|
|
63
|
+
httpStatus: 500,
|
|
64
|
+
usage: {
|
|
65
|
+
input_tokens: 0,
|
|
66
|
+
output_tokens: 0,
|
|
67
|
+
},
|
|
68
|
+
})
|
|
69
|
+
throw error
|
|
70
|
+
}
|
|
71
|
+
},
|
|
68
72
|
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
73
|
+
wrapStream: async ({ doStream, params }) => {
|
|
74
|
+
const startTime = Date.now()
|
|
75
|
+
let generatedText = ''
|
|
76
|
+
let usage: { input_tokens?: number; output_tokens?: number } = {}
|
|
72
77
|
|
|
73
|
-
|
|
74
|
-
|
|
78
|
+
try {
|
|
79
|
+
const { stream, ...rest } = await doStream()
|
|
75
80
|
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
81
|
+
const transformStream = new TransformStream<LanguageModelV1StreamPart, LanguageModelV1StreamPart>({
|
|
82
|
+
transform(chunk, controller) {
|
|
83
|
+
if (chunk.type === 'text-delta') {
|
|
84
|
+
generatedText += chunk.textDelta
|
|
85
|
+
}
|
|
86
|
+
if (chunk.type === 'finish') {
|
|
87
|
+
usage = {
|
|
88
|
+
input_tokens: chunk.usage?.promptTokens,
|
|
89
|
+
output_tokens: chunk.usage?.completionTokens,
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
controller.enqueue(chunk)
|
|
93
|
+
},
|
|
86
94
|
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
});
|
|
106
|
-
},
|
|
107
|
-
});
|
|
95
|
+
flush() {
|
|
96
|
+
const latency = (Date.now() - startTime) / 1000
|
|
97
|
+
sendEventToPosthog({
|
|
98
|
+
client: phClient,
|
|
99
|
+
distinctId: options.posthog_distinct_id,
|
|
100
|
+
traceId: options.posthog_trace_id,
|
|
101
|
+
model: model.modelId,
|
|
102
|
+
provider: 'vercel',
|
|
103
|
+
input: options.posthog_privacy_mode ? '' : params.prompt,
|
|
104
|
+
output: [{ content: generatedText, role: 'assistant' }],
|
|
105
|
+
latency,
|
|
106
|
+
baseURL: '',
|
|
107
|
+
params: { posthog_properties: options } as any,
|
|
108
|
+
httpStatus: 200,
|
|
109
|
+
usage,
|
|
110
|
+
})
|
|
111
|
+
},
|
|
112
|
+
})
|
|
108
113
|
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
114
|
+
return {
|
|
115
|
+
stream: stream.pipeThrough(transformStream),
|
|
116
|
+
...rest,
|
|
117
|
+
}
|
|
118
|
+
} catch (error) {
|
|
119
|
+
sendEventToPosthog({
|
|
120
|
+
client: phClient,
|
|
121
|
+
distinctId: options.posthog_distinct_id,
|
|
122
|
+
traceId: options.posthog_trace_id,
|
|
123
|
+
model: model.modelId,
|
|
124
|
+
provider: 'vercel',
|
|
125
|
+
input: options.posthog_privacy_mode ? '' : params.prompt,
|
|
126
|
+
output: [],
|
|
127
|
+
latency: 0,
|
|
128
|
+
baseURL: '',
|
|
129
|
+
params: { posthog_properties: options } as any,
|
|
130
|
+
httpStatus: 500,
|
|
131
|
+
usage: {
|
|
132
|
+
input_tokens: 0,
|
|
133
|
+
output_tokens: 0,
|
|
134
|
+
},
|
|
135
|
+
})
|
|
136
|
+
throw error
|
|
137
|
+
}
|
|
138
|
+
},
|
|
139
|
+
}
|
|
135
140
|
|
|
136
|
-
|
|
137
|
-
}
|
|
141
|
+
return middleware
|
|
142
|
+
}
|
|
138
143
|
|
|
139
144
|
export const wrapVercelLanguageModel = (
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
) => {
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
145
|
+
model: LanguageModelV1,
|
|
146
|
+
phClient: PostHog,
|
|
147
|
+
options: CreateInstrumentationMiddlewareOptions
|
|
148
|
+
): LanguageModelV1 => {
|
|
149
|
+
const traceId = options.posthog_trace_id ?? uuidv4()
|
|
150
|
+
const middleware = createInstrumentationMiddleware(phClient, model, {
|
|
151
|
+
...options,
|
|
152
|
+
posthog_trace_id: traceId,
|
|
153
|
+
posthog_distinct_id: options.posthog_distinct_id ?? traceId,
|
|
154
|
+
})
|
|
150
155
|
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
156
|
+
const wrappedModel = wrapLanguageModel({
|
|
157
|
+
model,
|
|
158
|
+
middleware,
|
|
159
|
+
})
|
|
155
160
|
|
|
156
|
-
|
|
157
|
-
}
|
|
161
|
+
return wrappedModel
|
|
162
|
+
}
|