@posthog/ai 2.3.0 → 2.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -0
- package/lib/index.cjs.js +207 -21
- package/lib/index.cjs.js.map +1 -1
- package/lib/index.d.ts +28 -1
- package/lib/index.esm.js +206 -22
- package/lib/index.esm.js.map +1 -1
- package/lib/posthog-ai/src/anthropic/index.d.ts +28 -0
- package/lib/posthog-ai/src/index.d.ts +2 -0
- package/lib/posthog-ai/src/utils.d.ts +5 -3
- package/package.json +10 -8
- package/src/anthropic/index.ts +195 -0
- package/src/index.ts +2 -0
- package/src/openai/azure.ts +8 -7
- package/src/openai/index.ts +7 -6
- package/src/utils.ts +7 -3
- package/src/vercel/middleware.ts +15 -7
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import AnthropicOriginal from '@anthropic-ai/sdk';
|
|
2
|
+
import { PostHog } from 'posthog-node';
|
|
3
|
+
import { MonitoringParams } from '../utils';
|
|
4
|
+
type MessageCreateParamsNonStreaming = AnthropicOriginal.Messages.MessageCreateParamsNonStreaming;
|
|
5
|
+
type MessageCreateParamsStreaming = AnthropicOriginal.Messages.MessageCreateParamsStreaming;
|
|
6
|
+
type Message = AnthropicOriginal.Messages.Message;
|
|
7
|
+
type RawMessageStreamEvent = AnthropicOriginal.Messages.RawMessageStreamEvent;
|
|
8
|
+
type MessageCreateParamsBase = AnthropicOriginal.Messages.MessageCreateParams;
|
|
9
|
+
import type { APIPromise, RequestOptions } from '@anthropic-ai/sdk/core';
|
|
10
|
+
import type { Stream } from '@anthropic-ai/sdk/streaming';
|
|
11
|
+
interface MonitoringAnthropicConfig {
|
|
12
|
+
apiKey: string;
|
|
13
|
+
posthog: PostHog;
|
|
14
|
+
baseURL?: string;
|
|
15
|
+
}
|
|
16
|
+
export declare class PostHogAnthropic extends AnthropicOriginal {
|
|
17
|
+
private readonly phClient;
|
|
18
|
+
messages: WrappedMessages;
|
|
19
|
+
constructor(config: MonitoringAnthropicConfig);
|
|
20
|
+
}
|
|
21
|
+
export declare class WrappedMessages extends AnthropicOriginal.Messages {
|
|
22
|
+
private readonly phClient;
|
|
23
|
+
constructor(parentClient: PostHogAnthropic, phClient: PostHog);
|
|
24
|
+
create(body: MessageCreateParamsNonStreaming, options?: RequestOptions): APIPromise<Message>;
|
|
25
|
+
create(body: MessageCreateParamsStreaming & MonitoringParams, options?: RequestOptions): APIPromise<Stream<RawMessageStreamEvent>>;
|
|
26
|
+
create(body: MessageCreateParamsBase & MonitoringParams, options?: RequestOptions): APIPromise<Stream<RawMessageStreamEvent> | Message>;
|
|
27
|
+
}
|
|
28
|
+
export default PostHogAnthropic;
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
import PostHogOpenAI from './openai';
|
|
2
2
|
import PostHogAzureOpenAI from './openai/azure';
|
|
3
3
|
import { wrapVercelLanguageModel } from './vercel/middleware';
|
|
4
|
+
import PostHogAnthropic from './anthropic';
|
|
4
5
|
export { PostHogOpenAI as OpenAI };
|
|
5
6
|
export { PostHogAzureOpenAI as AzureOpenAI };
|
|
7
|
+
export { PostHogAnthropic as Anthropic };
|
|
6
8
|
export { wrapVercelLanguageModel as withTracing };
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
import { PostHog } from 'posthog-node';
|
|
2
2
|
import OpenAIOrignal from 'openai';
|
|
3
|
+
import AnthropicOriginal from '@anthropic-ai/sdk';
|
|
3
4
|
type ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams;
|
|
5
|
+
type MessageCreateParams = AnthropicOriginal.Messages.MessageCreateParams;
|
|
4
6
|
export interface MonitoringParams {
|
|
5
7
|
posthogDistinctId?: string;
|
|
6
8
|
posthogTraceId?: string;
|
|
@@ -8,7 +10,7 @@ export interface MonitoringParams {
|
|
|
8
10
|
posthogPrivacyMode?: boolean;
|
|
9
11
|
posthogGroups?: Record<string, any>;
|
|
10
12
|
}
|
|
11
|
-
export declare const getModelParams: (params: ChatCompletionCreateParamsBase & MonitoringParams) => Record<string, any>;
|
|
13
|
+
export declare const getModelParams: (params: (ChatCompletionCreateParamsBase | MessageCreateParams) & MonitoringParams) => Record<string, any>;
|
|
12
14
|
/**
|
|
13
15
|
* Helper to format responses (non-streaming) for consumption, mirroring Python's openai vs. anthropic approach.
|
|
14
16
|
*/
|
|
@@ -24,7 +26,7 @@ export declare const formatResponseOpenAI: (response: any) => Array<{
|
|
|
24
26
|
role: string;
|
|
25
27
|
content: string;
|
|
26
28
|
}>;
|
|
27
|
-
export declare const mergeSystemPrompt: (params:
|
|
29
|
+
export declare const mergeSystemPrompt: (params: MessageCreateParams & MonitoringParams, provider: string) => any;
|
|
28
30
|
export declare const withPrivacyMode: (client: PostHog, privacyMode: boolean, input: any) => any;
|
|
29
31
|
export type SendEventToPosthogParams = {
|
|
30
32
|
client: PostHog;
|
|
@@ -41,7 +43,7 @@ export type SendEventToPosthogParams = {
|
|
|
41
43
|
inputTokens?: number;
|
|
42
44
|
outputTokens?: number;
|
|
43
45
|
};
|
|
44
|
-
params: ChatCompletionCreateParamsBase & MonitoringParams;
|
|
46
|
+
params: (ChatCompletionCreateParamsBase | MessageCreateParams) & MonitoringParams;
|
|
45
47
|
isError?: boolean;
|
|
46
48
|
error?: string;
|
|
47
49
|
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@posthog/ai",
|
|
3
|
-
"version": "2.
|
|
3
|
+
"version": "2.4.0",
|
|
4
4
|
"description": "PostHog Node.js AI integrations",
|
|
5
5
|
"repository": {
|
|
6
6
|
"type": "git",
|
|
@@ -14,25 +14,27 @@
|
|
|
14
14
|
"devDependencies": {
|
|
15
15
|
"@types/jest": "^28.1.5",
|
|
16
16
|
"@types/node": "^18.0.0",
|
|
17
|
+
"ai": "^4.0.0",
|
|
17
18
|
"jest": "^29.0.0",
|
|
18
19
|
"node-fetch": "^3.3.2",
|
|
20
|
+
"openai": "^4.0.0",
|
|
19
21
|
"ts-jest": "^29.0.0",
|
|
20
|
-
"typescript": "^4.7.4"
|
|
21
|
-
"ai": "^4.0.0",
|
|
22
|
-
"openai": "^4.0.0"
|
|
22
|
+
"typescript": "^4.7.4"
|
|
23
23
|
},
|
|
24
24
|
"keywords": [
|
|
25
25
|
"posthog",
|
|
26
26
|
"ai",
|
|
27
27
|
"openai",
|
|
28
28
|
"anthropic",
|
|
29
|
-
"llm"
|
|
29
|
+
"llm",
|
|
30
|
+
"observability"
|
|
30
31
|
],
|
|
31
32
|
"dependencies": {
|
|
32
|
-
"
|
|
33
|
-
"zod": "^3.24.1",
|
|
33
|
+
"@anthropic-ai/sdk": "^0.36.3",
|
|
34
34
|
"ai": "^4.0.0",
|
|
35
|
-
"openai": "^4.0.0"
|
|
35
|
+
"openai": "^4.0.0",
|
|
36
|
+
"uuid": "^11.0.5",
|
|
37
|
+
"zod": "^3.24.1"
|
|
36
38
|
},
|
|
37
39
|
"scripts": {
|
|
38
40
|
"test": "jest",
|
|
@@ -0,0 +1,195 @@
|
|
|
1
|
+
import AnthropicOriginal from '@anthropic-ai/sdk'
|
|
2
|
+
import { PostHog } from 'posthog-node'
|
|
3
|
+
import { v4 as uuidv4 } from 'uuid'
|
|
4
|
+
import { PassThrough } from 'stream'
|
|
5
|
+
import { formatResponseAnthropic, mergeSystemPrompt, MonitoringParams, sendEventToPosthog } from '../utils'
|
|
6
|
+
|
|
7
|
+
type MessageCreateParamsNonStreaming = AnthropicOriginal.Messages.MessageCreateParamsNonStreaming
|
|
8
|
+
type MessageCreateParamsStreaming = AnthropicOriginal.Messages.MessageCreateParamsStreaming
|
|
9
|
+
type MessageCreateParams = AnthropicOriginal.Messages.MessageCreateParams
|
|
10
|
+
type Message = AnthropicOriginal.Messages.Message
|
|
11
|
+
type RawMessageStreamEvent = AnthropicOriginal.Messages.RawMessageStreamEvent
|
|
12
|
+
type MessageCreateParamsBase = AnthropicOriginal.Messages.MessageCreateParams
|
|
13
|
+
|
|
14
|
+
import type { APIPromise, RequestOptions } from '@anthropic-ai/sdk/core'
|
|
15
|
+
import type { Stream } from '@anthropic-ai/sdk/streaming'
|
|
16
|
+
|
|
17
|
+
interface MonitoringAnthropicConfig {
|
|
18
|
+
apiKey: string
|
|
19
|
+
posthog: PostHog
|
|
20
|
+
baseURL?: string
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export class PostHogAnthropic extends AnthropicOriginal {
|
|
24
|
+
private readonly phClient: PostHog
|
|
25
|
+
public messages: WrappedMessages
|
|
26
|
+
|
|
27
|
+
constructor(config: MonitoringAnthropicConfig) {
|
|
28
|
+
const { posthog, ...anthropicConfig } = config
|
|
29
|
+
super(anthropicConfig)
|
|
30
|
+
this.phClient = posthog
|
|
31
|
+
this.messages = new WrappedMessages(this, this.phClient)
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
export class WrappedMessages extends AnthropicOriginal.Messages {
|
|
36
|
+
private readonly phClient: PostHog
|
|
37
|
+
|
|
38
|
+
constructor(parentClient: PostHogAnthropic, phClient: PostHog) {
|
|
39
|
+
super(parentClient)
|
|
40
|
+
this.phClient = phClient
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
public create(body: MessageCreateParamsNonStreaming, options?: RequestOptions): APIPromise<Message>
|
|
44
|
+
public create(
|
|
45
|
+
body: MessageCreateParamsStreaming & MonitoringParams,
|
|
46
|
+
options?: RequestOptions
|
|
47
|
+
): APIPromise<Stream<RawMessageStreamEvent>>
|
|
48
|
+
public create(
|
|
49
|
+
body: MessageCreateParamsBase & MonitoringParams,
|
|
50
|
+
options?: RequestOptions
|
|
51
|
+
): APIPromise<Stream<RawMessageStreamEvent> | Message>
|
|
52
|
+
public create(
|
|
53
|
+
body: MessageCreateParams & MonitoringParams,
|
|
54
|
+
options?: RequestOptions
|
|
55
|
+
): APIPromise<Message> | APIPromise<Stream<RawMessageStreamEvent>> {
|
|
56
|
+
const {
|
|
57
|
+
posthogDistinctId,
|
|
58
|
+
posthogTraceId,
|
|
59
|
+
posthogProperties,
|
|
60
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
61
|
+
posthogPrivacyMode = false,
|
|
62
|
+
posthogGroups,
|
|
63
|
+
...anthropicParams
|
|
64
|
+
} = body
|
|
65
|
+
|
|
66
|
+
const traceId = posthogTraceId ?? uuidv4()
|
|
67
|
+
const startTime = Date.now()
|
|
68
|
+
|
|
69
|
+
const parentPromise = super.create(anthropicParams, options)
|
|
70
|
+
|
|
71
|
+
if (anthropicParams.stream) {
|
|
72
|
+
return parentPromise.then((value) => {
|
|
73
|
+
const passThroughStream = new PassThrough({ objectMode: true })
|
|
74
|
+
let accumulatedContent = ''
|
|
75
|
+
let usage: { inputTokens: number; outputTokens: number } = {
|
|
76
|
+
inputTokens: 0,
|
|
77
|
+
outputTokens: 0,
|
|
78
|
+
}
|
|
79
|
+
if ('tee' in value) {
|
|
80
|
+
const anthropicStream = value
|
|
81
|
+
;(async () => {
|
|
82
|
+
try {
|
|
83
|
+
for await (const chunk of anthropicStream) {
|
|
84
|
+
if ('delta' in chunk) {
|
|
85
|
+
if ('text' in chunk.delta) {
|
|
86
|
+
const delta = chunk?.delta?.text ?? ''
|
|
87
|
+
accumulatedContent += delta
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
if (chunk.type == 'message_start') {
|
|
91
|
+
usage.inputTokens = chunk.message.usage.input_tokens ?? 0
|
|
92
|
+
}
|
|
93
|
+
if ('usage' in chunk) {
|
|
94
|
+
usage.outputTokens = chunk.usage.output_tokens ?? 0
|
|
95
|
+
}
|
|
96
|
+
passThroughStream.write(chunk)
|
|
97
|
+
}
|
|
98
|
+
const latency = (Date.now() - startTime) / 1000
|
|
99
|
+
sendEventToPosthog({
|
|
100
|
+
client: this.phClient,
|
|
101
|
+
distinctId: posthogDistinctId ?? traceId,
|
|
102
|
+
traceId,
|
|
103
|
+
model: anthropicParams.model,
|
|
104
|
+
provider: 'anthropic',
|
|
105
|
+
input: mergeSystemPrompt(anthropicParams, 'anthropic'),
|
|
106
|
+
output: [{ content: accumulatedContent, role: 'assistant' }],
|
|
107
|
+
latency,
|
|
108
|
+
baseURL: (this as any).baseURL ?? '',
|
|
109
|
+
params: body,
|
|
110
|
+
httpStatus: 200,
|
|
111
|
+
usage,
|
|
112
|
+
})
|
|
113
|
+
passThroughStream.end()
|
|
114
|
+
} catch (error: any) {
|
|
115
|
+
// error handling
|
|
116
|
+
sendEventToPosthog({
|
|
117
|
+
client: this.phClient,
|
|
118
|
+
distinctId: posthogDistinctId ?? traceId,
|
|
119
|
+
traceId,
|
|
120
|
+
model: anthropicParams.model,
|
|
121
|
+
provider: 'anthropic',
|
|
122
|
+
input: mergeSystemPrompt(anthropicParams, 'anthropic'),
|
|
123
|
+
output: [],
|
|
124
|
+
latency: 0,
|
|
125
|
+
baseURL: (this as any).baseURL ?? '',
|
|
126
|
+
params: body,
|
|
127
|
+
httpStatus: error?.status ? error.status : 500,
|
|
128
|
+
usage: {
|
|
129
|
+
inputTokens: 0,
|
|
130
|
+
outputTokens: 0,
|
|
131
|
+
},
|
|
132
|
+
isError: true,
|
|
133
|
+
error: JSON.stringify(error),
|
|
134
|
+
})
|
|
135
|
+
passThroughStream.emit('error', error)
|
|
136
|
+
}
|
|
137
|
+
})()
|
|
138
|
+
}
|
|
139
|
+
return passThroughStream as unknown as Stream<RawMessageStreamEvent>
|
|
140
|
+
}) as APIPromise<Stream<RawMessageStreamEvent>>
|
|
141
|
+
} else {
|
|
142
|
+
const wrappedPromise = parentPromise.then(
|
|
143
|
+
(result) => {
|
|
144
|
+
if ('content' in result) {
|
|
145
|
+
const latency = (Date.now() - startTime) / 1000
|
|
146
|
+
sendEventToPosthog({
|
|
147
|
+
client: this.phClient,
|
|
148
|
+
distinctId: posthogDistinctId ?? traceId,
|
|
149
|
+
traceId,
|
|
150
|
+
model: anthropicParams.model,
|
|
151
|
+
provider: 'anthropic',
|
|
152
|
+
input: mergeSystemPrompt(anthropicParams, 'anthropic'),
|
|
153
|
+
output: formatResponseAnthropic(result),
|
|
154
|
+
latency,
|
|
155
|
+
baseURL: (this as any).baseURL ?? '',
|
|
156
|
+
params: body,
|
|
157
|
+
httpStatus: 200,
|
|
158
|
+
usage: {
|
|
159
|
+
inputTokens: result.usage.input_tokens ?? 0,
|
|
160
|
+
outputTokens: result.usage.output_tokens ?? 0,
|
|
161
|
+
},
|
|
162
|
+
})
|
|
163
|
+
}
|
|
164
|
+
return result
|
|
165
|
+
},
|
|
166
|
+
(error: any) => {
|
|
167
|
+
sendEventToPosthog({
|
|
168
|
+
client: this.phClient,
|
|
169
|
+
distinctId: posthogDistinctId ?? traceId,
|
|
170
|
+
traceId,
|
|
171
|
+
model: anthropicParams.model,
|
|
172
|
+
provider: 'anthropic',
|
|
173
|
+
input: mergeSystemPrompt(anthropicParams, 'anthropic'),
|
|
174
|
+
output: [],
|
|
175
|
+
latency: 0,
|
|
176
|
+
baseURL: (this as any).baseURL ?? '',
|
|
177
|
+
params: body,
|
|
178
|
+
httpStatus: error?.status ? error.status : 500,
|
|
179
|
+
usage: {
|
|
180
|
+
inputTokens: 0,
|
|
181
|
+
outputTokens: 0,
|
|
182
|
+
},
|
|
183
|
+
isError: true,
|
|
184
|
+
error: JSON.stringify(error),
|
|
185
|
+
})
|
|
186
|
+
throw error
|
|
187
|
+
}
|
|
188
|
+
) as APIPromise<Message>
|
|
189
|
+
|
|
190
|
+
return wrappedPromise
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
export default PostHogAnthropic
|
package/src/index.ts
CHANGED
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
import PostHogOpenAI from './openai'
|
|
2
2
|
import PostHogAzureOpenAI from './openai/azure'
|
|
3
3
|
import { wrapVercelLanguageModel } from './vercel/middleware'
|
|
4
|
+
import PostHogAnthropic from './anthropic'
|
|
4
5
|
|
|
5
6
|
export { PostHogOpenAI as OpenAI }
|
|
6
7
|
export { PostHogAzureOpenAI as AzureOpenAI }
|
|
8
|
+
export { PostHogAnthropic as Anthropic }
|
|
7
9
|
export { wrapVercelLanguageModel as withTracing }
|
package/src/openai/azure.ts
CHANGED
|
@@ -2,7 +2,7 @@ import OpenAIOrignal, { AzureOpenAI } from 'openai'
|
|
|
2
2
|
import { PostHog } from 'posthog-node'
|
|
3
3
|
import { v4 as uuidv4 } from 'uuid'
|
|
4
4
|
import { PassThrough } from 'stream'
|
|
5
|
-
import {
|
|
5
|
+
import { formatResponseOpenAI, MonitoringParams, sendEventToPosthog } from '../utils'
|
|
6
6
|
|
|
7
7
|
type ChatCompletion = OpenAIOrignal.ChatCompletion
|
|
8
8
|
type ChatCompletionChunk = OpenAIOrignal.ChatCompletionChunk
|
|
@@ -74,6 +74,7 @@ export class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
74
74
|
posthogDistinctId,
|
|
75
75
|
posthogTraceId,
|
|
76
76
|
posthogProperties,
|
|
77
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
77
78
|
posthogPrivacyMode = false,
|
|
78
79
|
posthogGroups,
|
|
79
80
|
...openAIParams
|
|
@@ -117,7 +118,7 @@ export class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
117
118
|
traceId,
|
|
118
119
|
model,
|
|
119
120
|
provider: 'azure',
|
|
120
|
-
input:
|
|
121
|
+
input: openAIParams.messages,
|
|
121
122
|
output: [{ content: accumulatedContent, role: 'assistant' }],
|
|
122
123
|
latency,
|
|
123
124
|
baseURL: (this as any).baseURL ?? '',
|
|
@@ -134,7 +135,7 @@ export class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
134
135
|
traceId,
|
|
135
136
|
model,
|
|
136
137
|
provider: 'azure',
|
|
137
|
-
input:
|
|
138
|
+
input: openAIParams.messages,
|
|
138
139
|
output: JSON.stringify(error),
|
|
139
140
|
latency: 0,
|
|
140
141
|
baseURL: (this as any).baseURL ?? '',
|
|
@@ -167,9 +168,9 @@ export class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
167
168
|
distinctId: posthogDistinctId ?? traceId,
|
|
168
169
|
traceId,
|
|
169
170
|
model,
|
|
170
|
-
provider: '
|
|
171
|
-
input:
|
|
172
|
-
output:
|
|
171
|
+
provider: 'azure',
|
|
172
|
+
input: openAIParams.messages,
|
|
173
|
+
output: formatResponseOpenAI(result),
|
|
173
174
|
latency,
|
|
174
175
|
baseURL: (this as any).baseURL ?? '',
|
|
175
176
|
params: body,
|
|
@@ -189,7 +190,7 @@ export class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
189
190
|
traceId,
|
|
190
191
|
model: openAIParams.model,
|
|
191
192
|
provider: 'azure',
|
|
192
|
-
input:
|
|
193
|
+
input: openAIParams.messages,
|
|
193
194
|
output: [],
|
|
194
195
|
latency: 0,
|
|
195
196
|
baseURL: (this as any).baseURL ?? '',
|
package/src/openai/index.ts
CHANGED
|
@@ -2,7 +2,7 @@ import OpenAIOrignal from 'openai'
|
|
|
2
2
|
import { PostHog } from 'posthog-node'
|
|
3
3
|
import { v4 as uuidv4 } from 'uuid'
|
|
4
4
|
import { PassThrough } from 'stream'
|
|
5
|
-
import {
|
|
5
|
+
import { formatResponseOpenAI, MonitoringParams, sendEventToPosthog } from '../utils'
|
|
6
6
|
|
|
7
7
|
type ChatCompletion = OpenAIOrignal.ChatCompletion
|
|
8
8
|
type ChatCompletionChunk = OpenAIOrignal.ChatCompletionChunk
|
|
@@ -74,6 +74,7 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
|
74
74
|
posthogDistinctId,
|
|
75
75
|
posthogTraceId,
|
|
76
76
|
posthogProperties,
|
|
77
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
77
78
|
posthogPrivacyMode = false,
|
|
78
79
|
posthogGroups,
|
|
79
80
|
...openAIParams
|
|
@@ -114,7 +115,7 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
|
114
115
|
traceId,
|
|
115
116
|
model: openAIParams.model,
|
|
116
117
|
provider: 'openai',
|
|
117
|
-
input:
|
|
118
|
+
input: openAIParams.messages,
|
|
118
119
|
output: [{ content: accumulatedContent, role: 'assistant' }],
|
|
119
120
|
latency,
|
|
120
121
|
baseURL: (this as any).baseURL ?? '',
|
|
@@ -131,7 +132,7 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
|
131
132
|
traceId,
|
|
132
133
|
model: openAIParams.model,
|
|
133
134
|
provider: 'openai',
|
|
134
|
-
input:
|
|
135
|
+
input: openAIParams.messages,
|
|
135
136
|
output: [],
|
|
136
137
|
latency: 0,
|
|
137
138
|
baseURL: (this as any).baseURL ?? '',
|
|
@@ -161,8 +162,8 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
|
161
162
|
traceId,
|
|
162
163
|
model: openAIParams.model,
|
|
163
164
|
provider: 'openai',
|
|
164
|
-
input:
|
|
165
|
-
output:
|
|
165
|
+
input: openAIParams.messages,
|
|
166
|
+
output: formatResponseOpenAI(result),
|
|
166
167
|
latency,
|
|
167
168
|
baseURL: (this as any).baseURL ?? '',
|
|
168
169
|
params: body,
|
|
@@ -182,7 +183,7 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
|
182
183
|
traceId,
|
|
183
184
|
model: openAIParams.model,
|
|
184
185
|
provider: 'openai',
|
|
185
|
-
input:
|
|
186
|
+
input: openAIParams.messages,
|
|
186
187
|
output: [],
|
|
187
188
|
latency: 0,
|
|
188
189
|
baseURL: (this as any).baseURL ?? '',
|
package/src/utils.ts
CHANGED
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
import { PostHog } from 'posthog-node'
|
|
2
2
|
import OpenAIOrignal from 'openai'
|
|
3
|
+
import AnthropicOriginal from '@anthropic-ai/sdk'
|
|
3
4
|
|
|
4
5
|
type ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams
|
|
6
|
+
type MessageCreateParams = AnthropicOriginal.Messages.MessageCreateParams
|
|
5
7
|
|
|
6
8
|
export interface MonitoringParams {
|
|
7
9
|
posthogDistinctId?: string
|
|
@@ -11,7 +13,9 @@ export interface MonitoringParams {
|
|
|
11
13
|
posthogGroups?: Record<string, any>
|
|
12
14
|
}
|
|
13
15
|
|
|
14
|
-
export const getModelParams = (
|
|
16
|
+
export const getModelParams = (
|
|
17
|
+
params: (ChatCompletionCreateParamsBase | MessageCreateParams) & MonitoringParams
|
|
18
|
+
): Record<string, any> => {
|
|
15
19
|
const modelParams: Record<string, any> = {}
|
|
16
20
|
const paramKeys = [
|
|
17
21
|
'temperature',
|
|
@@ -76,7 +80,7 @@ export const formatResponseOpenAI = (response: any): Array<{ role: string; conte
|
|
|
76
80
|
return output
|
|
77
81
|
}
|
|
78
82
|
|
|
79
|
-
export const mergeSystemPrompt = (params:
|
|
83
|
+
export const mergeSystemPrompt = (params: MessageCreateParams & MonitoringParams, provider: string): any => {
|
|
80
84
|
if (provider == 'anthropic') {
|
|
81
85
|
const messages = params.messages || []
|
|
82
86
|
if (!(params as any).system) {
|
|
@@ -104,7 +108,7 @@ export type SendEventToPosthogParams = {
|
|
|
104
108
|
baseURL: string
|
|
105
109
|
httpStatus: number
|
|
106
110
|
usage?: { inputTokens?: number; outputTokens?: number }
|
|
107
|
-
params: ChatCompletionCreateParamsBase & MonitoringParams
|
|
111
|
+
params: (ChatCompletionCreateParamsBase | MessageCreateParams) & MonitoringParams
|
|
108
112
|
isError?: boolean
|
|
109
113
|
error?: string
|
|
110
114
|
}
|
package/src/vercel/middleware.ts
CHANGED
|
@@ -68,6 +68,13 @@ const mapVercelPrompt = (prompt: LanguageModelV1Prompt): PostHogInput[] => {
|
|
|
68
68
|
})
|
|
69
69
|
}
|
|
70
70
|
|
|
71
|
+
const extractProvider = (model: LanguageModelV1): string => {
|
|
72
|
+
// vercel provider is in the format of provider.endpoint
|
|
73
|
+
const provider = model.provider.toLowerCase()
|
|
74
|
+
const providerName = provider.split('.')[0]
|
|
75
|
+
return providerName
|
|
76
|
+
}
|
|
77
|
+
|
|
71
78
|
export const createInstrumentationMiddleware = (
|
|
72
79
|
phClient: PostHog,
|
|
73
80
|
model: LanguageModelV1,
|
|
@@ -83,9 +90,10 @@ export const createInstrumentationMiddleware = (
|
|
|
83
90
|
try {
|
|
84
91
|
const result = await doGenerate()
|
|
85
92
|
const latency = (Date.now() - startTime) / 1000
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
const provider = options.posthogProviderOverride ?? model
|
|
93
|
+
const modelId =
|
|
94
|
+
options.posthogModelOverride ?? (result.response?.modelId ? result.response.modelId : model.modelId)
|
|
95
|
+
const provider = options.posthogProviderOverride ?? extractProvider(model)
|
|
96
|
+
const baseURL = '' // cannot currently get baseURL from vercel
|
|
89
97
|
|
|
90
98
|
sendEventToPosthog({
|
|
91
99
|
client: phClient,
|
|
@@ -96,7 +104,7 @@ export const createInstrumentationMiddleware = (
|
|
|
96
104
|
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
97
105
|
output: [{ content: result.text, role: 'assistant' }],
|
|
98
106
|
latency,
|
|
99
|
-
baseURL
|
|
107
|
+
baseURL,
|
|
100
108
|
params: mergedParams as any,
|
|
101
109
|
httpStatus: 200,
|
|
102
110
|
usage: {
|
|
@@ -141,10 +149,10 @@ export const createInstrumentationMiddleware = (
|
|
|
141
149
|
}
|
|
142
150
|
|
|
143
151
|
const modelId = options.posthogModelOverride ?? model.modelId
|
|
144
|
-
const provider = options.posthogProviderOverride ?? model
|
|
152
|
+
const provider = options.posthogProviderOverride ?? extractProvider(model)
|
|
153
|
+
const baseURL = '' // cannot currently get baseURL from vercel
|
|
145
154
|
try {
|
|
146
155
|
const { stream, ...rest } = await doStream()
|
|
147
|
-
|
|
148
156
|
const transformStream = new TransformStream<LanguageModelV1StreamPart, LanguageModelV1StreamPart>({
|
|
149
157
|
transform(chunk, controller) {
|
|
150
158
|
if (chunk.type === 'text-delta') {
|
|
@@ -170,7 +178,7 @@ export const createInstrumentationMiddleware = (
|
|
|
170
178
|
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
171
179
|
output: [{ content: generatedText, role: 'assistant' }],
|
|
172
180
|
latency,
|
|
173
|
-
baseURL
|
|
181
|
+
baseURL,
|
|
174
182
|
params: mergedParams as any,
|
|
175
183
|
httpStatus: 200,
|
|
176
184
|
usage,
|