@posthog/ai 2.3.1 → 2.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/index.cjs.js +193 -17
- package/lib/index.cjs.js.map +1 -1
- package/lib/index.d.ts +28 -1
- package/lib/index.esm.js +192 -18
- package/lib/index.esm.js.map +1 -1
- package/lib/posthog-ai/src/anthropic/index.d.ts +28 -0
- package/lib/posthog-ai/src/index.d.ts +2 -0
- package/lib/posthog-ai/src/utils.d.ts +5 -3
- package/package.json +10 -8
- package/src/anthropic/index.ts +195 -0
- package/src/index.ts +2 -0
- package/src/openai/azure.ts +7 -7
- package/src/openai/index.ts +6 -6
- package/src/utils.ts +7 -3
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@posthog/ai",
|
|
3
|
-
"version": "2.
|
|
3
|
+
"version": "2.4.0",
|
|
4
4
|
"description": "PostHog Node.js AI integrations",
|
|
5
5
|
"repository": {
|
|
6
6
|
"type": "git",
|
|
@@ -14,25 +14,27 @@
|
|
|
14
14
|
"devDependencies": {
|
|
15
15
|
"@types/jest": "^28.1.5",
|
|
16
16
|
"@types/node": "^18.0.0",
|
|
17
|
+
"ai": "^4.0.0",
|
|
17
18
|
"jest": "^29.0.0",
|
|
18
19
|
"node-fetch": "^3.3.2",
|
|
20
|
+
"openai": "^4.0.0",
|
|
19
21
|
"ts-jest": "^29.0.0",
|
|
20
|
-
"typescript": "^4.7.4"
|
|
21
|
-
"ai": "^4.0.0",
|
|
22
|
-
"openai": "^4.0.0"
|
|
22
|
+
"typescript": "^4.7.4"
|
|
23
23
|
},
|
|
24
24
|
"keywords": [
|
|
25
25
|
"posthog",
|
|
26
26
|
"ai",
|
|
27
27
|
"openai",
|
|
28
28
|
"anthropic",
|
|
29
|
-
"llm"
|
|
29
|
+
"llm",
|
|
30
|
+
"observability"
|
|
30
31
|
],
|
|
31
32
|
"dependencies": {
|
|
32
|
-
"
|
|
33
|
-
"zod": "^3.24.1",
|
|
33
|
+
"@anthropic-ai/sdk": "^0.36.3",
|
|
34
34
|
"ai": "^4.0.0",
|
|
35
|
-
"openai": "^4.0.0"
|
|
35
|
+
"openai": "^4.0.0",
|
|
36
|
+
"uuid": "^11.0.5",
|
|
37
|
+
"zod": "^3.24.1"
|
|
36
38
|
},
|
|
37
39
|
"scripts": {
|
|
38
40
|
"test": "jest",
|
|
@@ -0,0 +1,195 @@
|
|
|
1
|
+
import AnthropicOriginal from '@anthropic-ai/sdk'
|
|
2
|
+
import { PostHog } from 'posthog-node'
|
|
3
|
+
import { v4 as uuidv4 } from 'uuid'
|
|
4
|
+
import { PassThrough } from 'stream'
|
|
5
|
+
import { formatResponseAnthropic, mergeSystemPrompt, MonitoringParams, sendEventToPosthog } from '../utils'
|
|
6
|
+
|
|
7
|
+
type MessageCreateParamsNonStreaming = AnthropicOriginal.Messages.MessageCreateParamsNonStreaming
|
|
8
|
+
type MessageCreateParamsStreaming = AnthropicOriginal.Messages.MessageCreateParamsStreaming
|
|
9
|
+
type MessageCreateParams = AnthropicOriginal.Messages.MessageCreateParams
|
|
10
|
+
type Message = AnthropicOriginal.Messages.Message
|
|
11
|
+
type RawMessageStreamEvent = AnthropicOriginal.Messages.RawMessageStreamEvent
|
|
12
|
+
type MessageCreateParamsBase = AnthropicOriginal.Messages.MessageCreateParams
|
|
13
|
+
|
|
14
|
+
import type { APIPromise, RequestOptions } from '@anthropic-ai/sdk/core'
|
|
15
|
+
import type { Stream } from '@anthropic-ai/sdk/streaming'
|
|
16
|
+
|
|
17
|
+
interface MonitoringAnthropicConfig {
|
|
18
|
+
apiKey: string
|
|
19
|
+
posthog: PostHog
|
|
20
|
+
baseURL?: string
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export class PostHogAnthropic extends AnthropicOriginal {
|
|
24
|
+
private readonly phClient: PostHog
|
|
25
|
+
public messages: WrappedMessages
|
|
26
|
+
|
|
27
|
+
constructor(config: MonitoringAnthropicConfig) {
|
|
28
|
+
const { posthog, ...anthropicConfig } = config
|
|
29
|
+
super(anthropicConfig)
|
|
30
|
+
this.phClient = posthog
|
|
31
|
+
this.messages = new WrappedMessages(this, this.phClient)
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
export class WrappedMessages extends AnthropicOriginal.Messages {
|
|
36
|
+
private readonly phClient: PostHog
|
|
37
|
+
|
|
38
|
+
constructor(parentClient: PostHogAnthropic, phClient: PostHog) {
|
|
39
|
+
super(parentClient)
|
|
40
|
+
this.phClient = phClient
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
public create(body: MessageCreateParamsNonStreaming, options?: RequestOptions): APIPromise<Message>
|
|
44
|
+
public create(
|
|
45
|
+
body: MessageCreateParamsStreaming & MonitoringParams,
|
|
46
|
+
options?: RequestOptions
|
|
47
|
+
): APIPromise<Stream<RawMessageStreamEvent>>
|
|
48
|
+
public create(
|
|
49
|
+
body: MessageCreateParamsBase & MonitoringParams,
|
|
50
|
+
options?: RequestOptions
|
|
51
|
+
): APIPromise<Stream<RawMessageStreamEvent> | Message>
|
|
52
|
+
public create(
|
|
53
|
+
body: MessageCreateParams & MonitoringParams,
|
|
54
|
+
options?: RequestOptions
|
|
55
|
+
): APIPromise<Message> | APIPromise<Stream<RawMessageStreamEvent>> {
|
|
56
|
+
const {
|
|
57
|
+
posthogDistinctId,
|
|
58
|
+
posthogTraceId,
|
|
59
|
+
posthogProperties,
|
|
60
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
61
|
+
posthogPrivacyMode = false,
|
|
62
|
+
posthogGroups,
|
|
63
|
+
...anthropicParams
|
|
64
|
+
} = body
|
|
65
|
+
|
|
66
|
+
const traceId = posthogTraceId ?? uuidv4()
|
|
67
|
+
const startTime = Date.now()
|
|
68
|
+
|
|
69
|
+
const parentPromise = super.create(anthropicParams, options)
|
|
70
|
+
|
|
71
|
+
if (anthropicParams.stream) {
|
|
72
|
+
return parentPromise.then((value) => {
|
|
73
|
+
const passThroughStream = new PassThrough({ objectMode: true })
|
|
74
|
+
let accumulatedContent = ''
|
|
75
|
+
let usage: { inputTokens: number; outputTokens: number } = {
|
|
76
|
+
inputTokens: 0,
|
|
77
|
+
outputTokens: 0,
|
|
78
|
+
}
|
|
79
|
+
if ('tee' in value) {
|
|
80
|
+
const anthropicStream = value
|
|
81
|
+
;(async () => {
|
|
82
|
+
try {
|
|
83
|
+
for await (const chunk of anthropicStream) {
|
|
84
|
+
if ('delta' in chunk) {
|
|
85
|
+
if ('text' in chunk.delta) {
|
|
86
|
+
const delta = chunk?.delta?.text ?? ''
|
|
87
|
+
accumulatedContent += delta
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
if (chunk.type == 'message_start') {
|
|
91
|
+
usage.inputTokens = chunk.message.usage.input_tokens ?? 0
|
|
92
|
+
}
|
|
93
|
+
if ('usage' in chunk) {
|
|
94
|
+
usage.outputTokens = chunk.usage.output_tokens ?? 0
|
|
95
|
+
}
|
|
96
|
+
passThroughStream.write(chunk)
|
|
97
|
+
}
|
|
98
|
+
const latency = (Date.now() - startTime) / 1000
|
|
99
|
+
sendEventToPosthog({
|
|
100
|
+
client: this.phClient,
|
|
101
|
+
distinctId: posthogDistinctId ?? traceId,
|
|
102
|
+
traceId,
|
|
103
|
+
model: anthropicParams.model,
|
|
104
|
+
provider: 'anthropic',
|
|
105
|
+
input: mergeSystemPrompt(anthropicParams, 'anthropic'),
|
|
106
|
+
output: [{ content: accumulatedContent, role: 'assistant' }],
|
|
107
|
+
latency,
|
|
108
|
+
baseURL: (this as any).baseURL ?? '',
|
|
109
|
+
params: body,
|
|
110
|
+
httpStatus: 200,
|
|
111
|
+
usage,
|
|
112
|
+
})
|
|
113
|
+
passThroughStream.end()
|
|
114
|
+
} catch (error: any) {
|
|
115
|
+
// error handling
|
|
116
|
+
sendEventToPosthog({
|
|
117
|
+
client: this.phClient,
|
|
118
|
+
distinctId: posthogDistinctId ?? traceId,
|
|
119
|
+
traceId,
|
|
120
|
+
model: anthropicParams.model,
|
|
121
|
+
provider: 'anthropic',
|
|
122
|
+
input: mergeSystemPrompt(anthropicParams, 'anthropic'),
|
|
123
|
+
output: [],
|
|
124
|
+
latency: 0,
|
|
125
|
+
baseURL: (this as any).baseURL ?? '',
|
|
126
|
+
params: body,
|
|
127
|
+
httpStatus: error?.status ? error.status : 500,
|
|
128
|
+
usage: {
|
|
129
|
+
inputTokens: 0,
|
|
130
|
+
outputTokens: 0,
|
|
131
|
+
},
|
|
132
|
+
isError: true,
|
|
133
|
+
error: JSON.stringify(error),
|
|
134
|
+
})
|
|
135
|
+
passThroughStream.emit('error', error)
|
|
136
|
+
}
|
|
137
|
+
})()
|
|
138
|
+
}
|
|
139
|
+
return passThroughStream as unknown as Stream<RawMessageStreamEvent>
|
|
140
|
+
}) as APIPromise<Stream<RawMessageStreamEvent>>
|
|
141
|
+
} else {
|
|
142
|
+
const wrappedPromise = parentPromise.then(
|
|
143
|
+
(result) => {
|
|
144
|
+
if ('content' in result) {
|
|
145
|
+
const latency = (Date.now() - startTime) / 1000
|
|
146
|
+
sendEventToPosthog({
|
|
147
|
+
client: this.phClient,
|
|
148
|
+
distinctId: posthogDistinctId ?? traceId,
|
|
149
|
+
traceId,
|
|
150
|
+
model: anthropicParams.model,
|
|
151
|
+
provider: 'anthropic',
|
|
152
|
+
input: mergeSystemPrompt(anthropicParams, 'anthropic'),
|
|
153
|
+
output: formatResponseAnthropic(result),
|
|
154
|
+
latency,
|
|
155
|
+
baseURL: (this as any).baseURL ?? '',
|
|
156
|
+
params: body,
|
|
157
|
+
httpStatus: 200,
|
|
158
|
+
usage: {
|
|
159
|
+
inputTokens: result.usage.input_tokens ?? 0,
|
|
160
|
+
outputTokens: result.usage.output_tokens ?? 0,
|
|
161
|
+
},
|
|
162
|
+
})
|
|
163
|
+
}
|
|
164
|
+
return result
|
|
165
|
+
},
|
|
166
|
+
(error: any) => {
|
|
167
|
+
sendEventToPosthog({
|
|
168
|
+
client: this.phClient,
|
|
169
|
+
distinctId: posthogDistinctId ?? traceId,
|
|
170
|
+
traceId,
|
|
171
|
+
model: anthropicParams.model,
|
|
172
|
+
provider: 'anthropic',
|
|
173
|
+
input: mergeSystemPrompt(anthropicParams, 'anthropic'),
|
|
174
|
+
output: [],
|
|
175
|
+
latency: 0,
|
|
176
|
+
baseURL: (this as any).baseURL ?? '',
|
|
177
|
+
params: body,
|
|
178
|
+
httpStatus: error?.status ? error.status : 500,
|
|
179
|
+
usage: {
|
|
180
|
+
inputTokens: 0,
|
|
181
|
+
outputTokens: 0,
|
|
182
|
+
},
|
|
183
|
+
isError: true,
|
|
184
|
+
error: JSON.stringify(error),
|
|
185
|
+
})
|
|
186
|
+
throw error
|
|
187
|
+
}
|
|
188
|
+
) as APIPromise<Message>
|
|
189
|
+
|
|
190
|
+
return wrappedPromise
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
export default PostHogAnthropic
|
package/src/index.ts
CHANGED
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
import PostHogOpenAI from './openai'
|
|
2
2
|
import PostHogAzureOpenAI from './openai/azure'
|
|
3
3
|
import { wrapVercelLanguageModel } from './vercel/middleware'
|
|
4
|
+
import PostHogAnthropic from './anthropic'
|
|
4
5
|
|
|
5
6
|
export { PostHogOpenAI as OpenAI }
|
|
6
7
|
export { PostHogAzureOpenAI as AzureOpenAI }
|
|
8
|
+
export { PostHogAnthropic as Anthropic }
|
|
7
9
|
export { wrapVercelLanguageModel as withTracing }
|
package/src/openai/azure.ts
CHANGED
|
@@ -2,7 +2,7 @@ import OpenAIOrignal, { AzureOpenAI } from 'openai'
|
|
|
2
2
|
import { PostHog } from 'posthog-node'
|
|
3
3
|
import { v4 as uuidv4 } from 'uuid'
|
|
4
4
|
import { PassThrough } from 'stream'
|
|
5
|
-
import {
|
|
5
|
+
import { formatResponseOpenAI, MonitoringParams, sendEventToPosthog } from '../utils'
|
|
6
6
|
|
|
7
7
|
type ChatCompletion = OpenAIOrignal.ChatCompletion
|
|
8
8
|
type ChatCompletionChunk = OpenAIOrignal.ChatCompletionChunk
|
|
@@ -118,7 +118,7 @@ export class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
118
118
|
traceId,
|
|
119
119
|
model,
|
|
120
120
|
provider: 'azure',
|
|
121
|
-
input:
|
|
121
|
+
input: openAIParams.messages,
|
|
122
122
|
output: [{ content: accumulatedContent, role: 'assistant' }],
|
|
123
123
|
latency,
|
|
124
124
|
baseURL: (this as any).baseURL ?? '',
|
|
@@ -135,7 +135,7 @@ export class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
135
135
|
traceId,
|
|
136
136
|
model,
|
|
137
137
|
provider: 'azure',
|
|
138
|
-
input:
|
|
138
|
+
input: openAIParams.messages,
|
|
139
139
|
output: JSON.stringify(error),
|
|
140
140
|
latency: 0,
|
|
141
141
|
baseURL: (this as any).baseURL ?? '',
|
|
@@ -168,9 +168,9 @@ export class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
168
168
|
distinctId: posthogDistinctId ?? traceId,
|
|
169
169
|
traceId,
|
|
170
170
|
model,
|
|
171
|
-
provider: '
|
|
172
|
-
input:
|
|
173
|
-
output:
|
|
171
|
+
provider: 'azure',
|
|
172
|
+
input: openAIParams.messages,
|
|
173
|
+
output: formatResponseOpenAI(result),
|
|
174
174
|
latency,
|
|
175
175
|
baseURL: (this as any).baseURL ?? '',
|
|
176
176
|
params: body,
|
|
@@ -190,7 +190,7 @@ export class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
190
190
|
traceId,
|
|
191
191
|
model: openAIParams.model,
|
|
192
192
|
provider: 'azure',
|
|
193
|
-
input:
|
|
193
|
+
input: openAIParams.messages,
|
|
194
194
|
output: [],
|
|
195
195
|
latency: 0,
|
|
196
196
|
baseURL: (this as any).baseURL ?? '',
|
package/src/openai/index.ts
CHANGED
|
@@ -2,7 +2,7 @@ import OpenAIOrignal from 'openai'
|
|
|
2
2
|
import { PostHog } from 'posthog-node'
|
|
3
3
|
import { v4 as uuidv4 } from 'uuid'
|
|
4
4
|
import { PassThrough } from 'stream'
|
|
5
|
-
import {
|
|
5
|
+
import { formatResponseOpenAI, MonitoringParams, sendEventToPosthog } from '../utils'
|
|
6
6
|
|
|
7
7
|
type ChatCompletion = OpenAIOrignal.ChatCompletion
|
|
8
8
|
type ChatCompletionChunk = OpenAIOrignal.ChatCompletionChunk
|
|
@@ -115,7 +115,7 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
|
115
115
|
traceId,
|
|
116
116
|
model: openAIParams.model,
|
|
117
117
|
provider: 'openai',
|
|
118
|
-
input:
|
|
118
|
+
input: openAIParams.messages,
|
|
119
119
|
output: [{ content: accumulatedContent, role: 'assistant' }],
|
|
120
120
|
latency,
|
|
121
121
|
baseURL: (this as any).baseURL ?? '',
|
|
@@ -132,7 +132,7 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
|
132
132
|
traceId,
|
|
133
133
|
model: openAIParams.model,
|
|
134
134
|
provider: 'openai',
|
|
135
|
-
input:
|
|
135
|
+
input: openAIParams.messages,
|
|
136
136
|
output: [],
|
|
137
137
|
latency: 0,
|
|
138
138
|
baseURL: (this as any).baseURL ?? '',
|
|
@@ -162,8 +162,8 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
|
162
162
|
traceId,
|
|
163
163
|
model: openAIParams.model,
|
|
164
164
|
provider: 'openai',
|
|
165
|
-
input:
|
|
166
|
-
output:
|
|
165
|
+
input: openAIParams.messages,
|
|
166
|
+
output: formatResponseOpenAI(result),
|
|
167
167
|
latency,
|
|
168
168
|
baseURL: (this as any).baseURL ?? '',
|
|
169
169
|
params: body,
|
|
@@ -183,7 +183,7 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
|
183
183
|
traceId,
|
|
184
184
|
model: openAIParams.model,
|
|
185
185
|
provider: 'openai',
|
|
186
|
-
input:
|
|
186
|
+
input: openAIParams.messages,
|
|
187
187
|
output: [],
|
|
188
188
|
latency: 0,
|
|
189
189
|
baseURL: (this as any).baseURL ?? '',
|
package/src/utils.ts
CHANGED
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
import { PostHog } from 'posthog-node'
|
|
2
2
|
import OpenAIOrignal from 'openai'
|
|
3
|
+
import AnthropicOriginal from '@anthropic-ai/sdk'
|
|
3
4
|
|
|
4
5
|
type ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams
|
|
6
|
+
type MessageCreateParams = AnthropicOriginal.Messages.MessageCreateParams
|
|
5
7
|
|
|
6
8
|
export interface MonitoringParams {
|
|
7
9
|
posthogDistinctId?: string
|
|
@@ -11,7 +13,9 @@ export interface MonitoringParams {
|
|
|
11
13
|
posthogGroups?: Record<string, any>
|
|
12
14
|
}
|
|
13
15
|
|
|
14
|
-
export const getModelParams = (
|
|
16
|
+
export const getModelParams = (
|
|
17
|
+
params: (ChatCompletionCreateParamsBase | MessageCreateParams) & MonitoringParams
|
|
18
|
+
): Record<string, any> => {
|
|
15
19
|
const modelParams: Record<string, any> = {}
|
|
16
20
|
const paramKeys = [
|
|
17
21
|
'temperature',
|
|
@@ -76,7 +80,7 @@ export const formatResponseOpenAI = (response: any): Array<{ role: string; conte
|
|
|
76
80
|
return output
|
|
77
81
|
}
|
|
78
82
|
|
|
79
|
-
export const mergeSystemPrompt = (params:
|
|
83
|
+
export const mergeSystemPrompt = (params: MessageCreateParams & MonitoringParams, provider: string): any => {
|
|
80
84
|
if (provider == 'anthropic') {
|
|
81
85
|
const messages = params.messages || []
|
|
82
86
|
if (!(params as any).system) {
|
|
@@ -104,7 +108,7 @@ export type SendEventToPosthogParams = {
|
|
|
104
108
|
baseURL: string
|
|
105
109
|
httpStatus: number
|
|
106
110
|
usage?: { inputTokens?: number; outputTokens?: number }
|
|
107
|
-
params: ChatCompletionCreateParamsBase & MonitoringParams
|
|
111
|
+
params: (ChatCompletionCreateParamsBase | MessageCreateParams) & MonitoringParams
|
|
108
112
|
isError?: boolean
|
|
109
113
|
error?: string
|
|
110
114
|
}
|