@posthog/ai 3.3.1 → 4.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +8 -0
- package/lib/anthropic/index.cjs.js +293 -0
- package/lib/anthropic/index.cjs.js.map +1 -0
- package/lib/anthropic/index.d.ts +45 -0
- package/lib/anthropic/index.esm.js +282 -0
- package/lib/anthropic/index.esm.js.map +1 -0
- package/lib/index.cjs.js +36 -8
- package/lib/index.cjs.js.map +1 -1
- package/lib/index.d.ts +2 -2
- package/lib/index.esm.js +36 -8
- package/lib/index.esm.js.map +1 -1
- package/lib/langchain/index.cjs.js +1003 -0
- package/lib/langchain/index.cjs.js.map +1 -0
- package/lib/langchain/index.d.ts +68 -0
- package/lib/langchain/index.esm.js +979 -0
- package/lib/langchain/index.esm.js.map +1 -0
- package/lib/openai/index.cjs.js +286 -0
- package/lib/openai/index.cjs.js.map +1 -0
- package/lib/openai/index.d.ts +49 -0
- package/lib/openai/index.esm.js +274 -0
- package/lib/openai/index.esm.js.map +1 -0
- package/lib/posthog-ai/src/anthropic/index.d.ts +1 -0
- package/lib/posthog-ai/src/langchain/index.d.ts +1 -0
- package/lib/posthog-ai/src/openai/index.d.ts +3 -2
- package/lib/posthog-ai/src/vercel/index.d.ts +1 -0
- package/lib/vercel/index.cjs.js +408 -0
- package/lib/vercel/index.cjs.js.map +1 -0
- package/lib/vercel/index.d.ts +21 -0
- package/lib/vercel/index.esm.js +404 -0
- package/lib/vercel/index.esm.js.map +1 -0
- package/package.json +28 -1
- package/src/anthropic/index.ts +2 -0
- package/src/langchain/callbacks.ts +39 -7
- package/src/langchain/index.ts +1 -0
- package/src/openai/index.ts +4 -2
- package/src/utils.ts +1 -1
- package/src/vercel/index.ts +1 -0
- package/src/vercel/middleware.ts +4 -4
- package/tsconfig.json +1 -0
|
@@ -0,0 +1,274 @@
|
|
|
1
|
+
import OpenAIOrignal from 'openai';
|
|
2
|
+
import { v4 } from 'uuid';
|
|
3
|
+
|
|
4
|
+
const getModelParams = params => {
|
|
5
|
+
if (!params) {
|
|
6
|
+
return {};
|
|
7
|
+
}
|
|
8
|
+
const modelParams = {};
|
|
9
|
+
const paramKeys = ['temperature', 'max_tokens', 'max_completion_tokens', 'top_p', 'frequency_penalty', 'presence_penalty', 'n', 'stop', 'stream', 'streaming'];
|
|
10
|
+
for (const key of paramKeys) {
|
|
11
|
+
if (key in params && params[key] !== undefined) {
|
|
12
|
+
modelParams[key] = params[key];
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
return modelParams;
|
|
16
|
+
};
|
|
17
|
+
const formatResponseOpenAI = response => {
|
|
18
|
+
const output = [];
|
|
19
|
+
for (const choice of response.choices ?? []) {
|
|
20
|
+
if (choice.message?.content) {
|
|
21
|
+
output.push({
|
|
22
|
+
role: choice.message.role,
|
|
23
|
+
content: choice.message.content
|
|
24
|
+
});
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
return output;
|
|
28
|
+
};
|
|
29
|
+
const withPrivacyMode = (client, privacyMode, input) => {
|
|
30
|
+
return client.privacy_mode || privacyMode ? null : input;
|
|
31
|
+
};
|
|
32
|
+
const sendEventToPosthog = ({
|
|
33
|
+
client,
|
|
34
|
+
distinctId,
|
|
35
|
+
traceId,
|
|
36
|
+
model,
|
|
37
|
+
provider,
|
|
38
|
+
input,
|
|
39
|
+
output,
|
|
40
|
+
latency,
|
|
41
|
+
baseURL,
|
|
42
|
+
params,
|
|
43
|
+
httpStatus = 200,
|
|
44
|
+
usage = {},
|
|
45
|
+
isError = false,
|
|
46
|
+
error,
|
|
47
|
+
tools
|
|
48
|
+
}) => {
|
|
49
|
+
if (client.capture) {
|
|
50
|
+
let errorData = {};
|
|
51
|
+
if (isError) {
|
|
52
|
+
errorData = {
|
|
53
|
+
$ai_is_error: true,
|
|
54
|
+
$ai_error: error
|
|
55
|
+
};
|
|
56
|
+
}
|
|
57
|
+
let costOverrideData = {};
|
|
58
|
+
if (params.posthogCostOverride) {
|
|
59
|
+
const inputCostUSD = (params.posthogCostOverride.inputCost ?? 0) * (usage.inputTokens ?? 0);
|
|
60
|
+
const outputCostUSD = (params.posthogCostOverride.outputCost ?? 0) * (usage.outputTokens ?? 0);
|
|
61
|
+
costOverrideData = {
|
|
62
|
+
$ai_input_cost_usd: inputCostUSD,
|
|
63
|
+
$ai_output_cost_usd: outputCostUSD,
|
|
64
|
+
$ai_total_cost_usd: inputCostUSD + outputCostUSD
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
const additionalTokenValues = {
|
|
68
|
+
...(usage.reasoningTokens ? {
|
|
69
|
+
$ai_reasoning_tokens: usage.reasoningTokens
|
|
70
|
+
} : {}),
|
|
71
|
+
...(usage.cacheReadInputTokens ? {
|
|
72
|
+
$ai_cache_read_input_tokens: usage.cacheReadInputTokens
|
|
73
|
+
} : {}),
|
|
74
|
+
...(usage.cacheCreationInputTokens ? {
|
|
75
|
+
$ai_cache_creation_input_tokens: usage.cacheCreationInputTokens
|
|
76
|
+
} : {})
|
|
77
|
+
};
|
|
78
|
+
client.capture({
|
|
79
|
+
distinctId: distinctId ?? traceId,
|
|
80
|
+
event: '$ai_generation',
|
|
81
|
+
properties: {
|
|
82
|
+
$ai_provider: params.posthogProviderOverride ?? provider,
|
|
83
|
+
$ai_model: params.posthogModelOverride ?? model,
|
|
84
|
+
$ai_model_parameters: getModelParams(params),
|
|
85
|
+
$ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, input),
|
|
86
|
+
$ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, output),
|
|
87
|
+
$ai_http_status: httpStatus,
|
|
88
|
+
$ai_input_tokens: usage.inputTokens ?? 0,
|
|
89
|
+
$ai_output_tokens: usage.outputTokens ?? 0,
|
|
90
|
+
...additionalTokenValues,
|
|
91
|
+
$ai_latency: latency,
|
|
92
|
+
$ai_trace_id: traceId,
|
|
93
|
+
$ai_base_url: baseURL,
|
|
94
|
+
...params.posthogProperties,
|
|
95
|
+
...(distinctId ? {} : {
|
|
96
|
+
$process_person_profile: false
|
|
97
|
+
}),
|
|
98
|
+
...(tools ? {
|
|
99
|
+
$ai_tools: tools
|
|
100
|
+
} : {}),
|
|
101
|
+
...errorData,
|
|
102
|
+
...costOverrideData
|
|
103
|
+
},
|
|
104
|
+
groups: params.posthogGroups
|
|
105
|
+
});
|
|
106
|
+
}
|
|
107
|
+
};
|
|
108
|
+
|
|
109
|
+
class PostHogOpenAI extends OpenAIOrignal {
|
|
110
|
+
constructor(config) {
|
|
111
|
+
const {
|
|
112
|
+
posthog,
|
|
113
|
+
...openAIConfig
|
|
114
|
+
} = config;
|
|
115
|
+
super(openAIConfig);
|
|
116
|
+
this.phClient = posthog;
|
|
117
|
+
this.chat = new WrappedChat(this, this.phClient);
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
class WrappedChat extends OpenAIOrignal.Chat {
|
|
121
|
+
constructor(parentClient, phClient) {
|
|
122
|
+
super(parentClient);
|
|
123
|
+
this.completions = new WrappedCompletions(parentClient, phClient);
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
127
|
+
constructor(client, phClient) {
|
|
128
|
+
super(client);
|
|
129
|
+
this.phClient = phClient;
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
// --- Overload #1: Non-streaming
|
|
133
|
+
|
|
134
|
+
// --- Overload #2: Streaming
|
|
135
|
+
|
|
136
|
+
// --- Overload #3: Generic base
|
|
137
|
+
|
|
138
|
+
// --- Implementation Signature
|
|
139
|
+
create(body, options) {
|
|
140
|
+
const {
|
|
141
|
+
posthogDistinctId,
|
|
142
|
+
posthogTraceId,
|
|
143
|
+
posthogProperties,
|
|
144
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
145
|
+
posthogPrivacyMode = false,
|
|
146
|
+
posthogGroups,
|
|
147
|
+
...openAIParams
|
|
148
|
+
} = body;
|
|
149
|
+
const traceId = posthogTraceId ?? v4();
|
|
150
|
+
const startTime = Date.now();
|
|
151
|
+
const parentPromise = super.create(openAIParams, options);
|
|
152
|
+
if (openAIParams.stream) {
|
|
153
|
+
return parentPromise.then(value => {
|
|
154
|
+
if ('tee' in value) {
|
|
155
|
+
const [stream1, stream2] = value.tee();
|
|
156
|
+
(async () => {
|
|
157
|
+
try {
|
|
158
|
+
let accumulatedContent = '';
|
|
159
|
+
let usage = {
|
|
160
|
+
inputTokens: 0,
|
|
161
|
+
outputTokens: 0
|
|
162
|
+
};
|
|
163
|
+
for await (const chunk of stream1) {
|
|
164
|
+
const delta = chunk?.choices?.[0]?.delta?.content ?? '';
|
|
165
|
+
accumulatedContent += delta;
|
|
166
|
+
if (chunk.usage) {
|
|
167
|
+
usage = {
|
|
168
|
+
inputTokens: chunk.usage.prompt_tokens ?? 0,
|
|
169
|
+
outputTokens: chunk.usage.completion_tokens ?? 0,
|
|
170
|
+
reasoningTokens: chunk.usage.completion_tokens_details?.reasoning_tokens ?? 0,
|
|
171
|
+
cacheReadInputTokens: chunk.usage.prompt_tokens_details?.cached_tokens ?? 0
|
|
172
|
+
};
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
const latency = (Date.now() - startTime) / 1000;
|
|
176
|
+
sendEventToPosthog({
|
|
177
|
+
client: this.phClient,
|
|
178
|
+
distinctId: posthogDistinctId ?? traceId,
|
|
179
|
+
traceId,
|
|
180
|
+
model: openAIParams.model,
|
|
181
|
+
provider: 'openai',
|
|
182
|
+
input: openAIParams.messages,
|
|
183
|
+
output: [{
|
|
184
|
+
content: accumulatedContent,
|
|
185
|
+
role: 'assistant'
|
|
186
|
+
}],
|
|
187
|
+
latency,
|
|
188
|
+
baseURL: this.baseURL ?? '',
|
|
189
|
+
params: body,
|
|
190
|
+
httpStatus: 200,
|
|
191
|
+
usage
|
|
192
|
+
});
|
|
193
|
+
} catch (error) {
|
|
194
|
+
sendEventToPosthog({
|
|
195
|
+
client: this.phClient,
|
|
196
|
+
distinctId: posthogDistinctId ?? traceId,
|
|
197
|
+
traceId,
|
|
198
|
+
model: openAIParams.model,
|
|
199
|
+
provider: 'openai',
|
|
200
|
+
input: openAIParams.messages,
|
|
201
|
+
output: [],
|
|
202
|
+
latency: 0,
|
|
203
|
+
baseURL: this.baseURL ?? '',
|
|
204
|
+
params: body,
|
|
205
|
+
httpStatus: error?.status ? error.status : 500,
|
|
206
|
+
usage: {
|
|
207
|
+
inputTokens: 0,
|
|
208
|
+
outputTokens: 0
|
|
209
|
+
},
|
|
210
|
+
isError: true,
|
|
211
|
+
error: JSON.stringify(error)
|
|
212
|
+
});
|
|
213
|
+
}
|
|
214
|
+
})();
|
|
215
|
+
|
|
216
|
+
// Return the other stream to the user
|
|
217
|
+
return stream2;
|
|
218
|
+
}
|
|
219
|
+
return value;
|
|
220
|
+
});
|
|
221
|
+
} else {
|
|
222
|
+
const wrappedPromise = parentPromise.then(result => {
|
|
223
|
+
if ('choices' in result) {
|
|
224
|
+
const latency = (Date.now() - startTime) / 1000;
|
|
225
|
+
sendEventToPosthog({
|
|
226
|
+
client: this.phClient,
|
|
227
|
+
distinctId: posthogDistinctId ?? traceId,
|
|
228
|
+
traceId,
|
|
229
|
+
model: openAIParams.model,
|
|
230
|
+
provider: 'openai',
|
|
231
|
+
input: openAIParams.messages,
|
|
232
|
+
output: formatResponseOpenAI(result),
|
|
233
|
+
latency,
|
|
234
|
+
baseURL: this.baseURL ?? '',
|
|
235
|
+
params: body,
|
|
236
|
+
httpStatus: 200,
|
|
237
|
+
usage: {
|
|
238
|
+
inputTokens: result.usage?.prompt_tokens ?? 0,
|
|
239
|
+
outputTokens: result.usage?.completion_tokens ?? 0,
|
|
240
|
+
reasoningTokens: result.usage?.completion_tokens_details?.reasoning_tokens ?? 0,
|
|
241
|
+
cacheReadInputTokens: result.usage?.prompt_tokens_details?.cached_tokens ?? 0
|
|
242
|
+
}
|
|
243
|
+
});
|
|
244
|
+
}
|
|
245
|
+
return result;
|
|
246
|
+
}, error => {
|
|
247
|
+
sendEventToPosthog({
|
|
248
|
+
client: this.phClient,
|
|
249
|
+
distinctId: posthogDistinctId ?? traceId,
|
|
250
|
+
traceId,
|
|
251
|
+
model: openAIParams.model,
|
|
252
|
+
provider: 'openai',
|
|
253
|
+
input: openAIParams.messages,
|
|
254
|
+
output: [],
|
|
255
|
+
latency: 0,
|
|
256
|
+
baseURL: this.baseURL ?? '',
|
|
257
|
+
params: body,
|
|
258
|
+
httpStatus: error?.status ? error.status : 500,
|
|
259
|
+
usage: {
|
|
260
|
+
inputTokens: 0,
|
|
261
|
+
outputTokens: 0
|
|
262
|
+
},
|
|
263
|
+
isError: true,
|
|
264
|
+
error: JSON.stringify(error)
|
|
265
|
+
});
|
|
266
|
+
throw error;
|
|
267
|
+
});
|
|
268
|
+
return wrappedPromise;
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
export { PostHogOpenAI as OpenAI, PostHogOpenAI, WrappedChat, WrappedCompletions, PostHogOpenAI as default };
|
|
274
|
+
//# sourceMappingURL=index.esm.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.esm.js","sources":["../../src/utils.ts","../../src/openai/index.ts"],"sourcesContent":["import { PostHog } from 'posthog-node'\nimport OpenAIOrignal from 'openai'\nimport AnthropicOriginal from '@anthropic-ai/sdk'\n\ntype ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams\ntype MessageCreateParams = AnthropicOriginal.Messages.MessageCreateParams\n\nexport interface MonitoringParams {\n posthogDistinctId?: string\n posthogTraceId?: string\n posthogProperties?: Record<string, any>\n posthogPrivacyMode?: boolean\n posthogGroups?: Record<string, any>\n posthogModelOverride?: string\n posthogProviderOverride?: string\n posthogCostOverride?: CostOverride\n}\n\nexport interface CostOverride {\n inputCost: number\n outputCost: number\n}\n\nexport const getModelParams = (\n params: ((ChatCompletionCreateParamsBase | MessageCreateParams) & MonitoringParams) | null\n): Record<string, any> => {\n if (!params) {\n return {}\n }\n const modelParams: Record<string, any> = {}\n const paramKeys = [\n 'temperature',\n 'max_tokens',\n 'max_completion_tokens',\n 'top_p',\n 'frequency_penalty',\n 'presence_penalty',\n 'n',\n 'stop',\n 'stream',\n 'streaming',\n ] as const\n\n for (const key of paramKeys) {\n if (key in params && (params as any)[key] !== undefined) {\n modelParams[key] = (params as any)[key]\n }\n }\n return modelParams\n}\n\n/**\n * Helper to format responses (non-streaming) for consumption, mirroring Python's openai vs. anthropic approach.\n */\nexport const formatResponse = (response: any, provider: string): Array<{ role: string; content: string }> => {\n if (!response) {\n return []\n }\n if (provider === 'anthropic') {\n return formatResponseAnthropic(response)\n } else if (provider === 'openai') {\n return formatResponseOpenAI(response)\n }\n return []\n}\n\nexport const formatResponseAnthropic = (response: any): Array<{ role: string; content: string }> => {\n // Example approach if \"response.content\" holds array of text segments, etc.\n const output: Array<{ role: string; content: string }> = []\n for (const choice of response.content ?? []) {\n if (choice?.text) {\n output.push({\n role: 'assistant',\n content: choice.text,\n })\n }\n }\n return output\n}\n\nexport const formatResponseOpenAI = (response: any): Array<{ role: string; content: string }> => {\n const output: Array<{ role: string; content: string }> = []\n for (const choice of response.choices ?? []) {\n if (choice.message?.content) {\n output.push({\n role: choice.message.role,\n content: choice.message.content,\n })\n }\n }\n return output\n}\n\nexport const mergeSystemPrompt = (params: MessageCreateParams & MonitoringParams, provider: string): any => {\n if (provider == 'anthropic') {\n const messages = params.messages || []\n if (!(params as any).system) {\n return messages\n }\n const systemMessage = (params as any).system\n return [{ role: 'system', content: systemMessage }, ...messages]\n }\n return params.messages\n}\n\nexport const withPrivacyMode = (client: PostHog, privacyMode: boolean, input: any): any => {\n return (client as any).privacy_mode || privacyMode ? null : input\n}\n\nexport type SendEventToPosthogParams = {\n client: PostHog\n distinctId?: string\n traceId: string\n model: string\n provider: string\n input: any\n output: any\n latency: number\n baseURL: string\n httpStatus: number\n usage?: {\n inputTokens?: number\n outputTokens?: number\n reasoningTokens?: any\n cacheReadInputTokens?: any\n cacheCreationInputTokens?: any\n }\n params: (ChatCompletionCreateParamsBase | MessageCreateParams) & MonitoringParams\n isError?: boolean\n error?: string\n tools?: any\n}\n\nexport const sendEventToPosthog = ({\n client,\n distinctId,\n traceId,\n model,\n provider,\n input,\n output,\n latency,\n baseURL,\n params,\n httpStatus = 200,\n usage = {},\n isError = false,\n error,\n tools,\n}: SendEventToPosthogParams): void => {\n if (client.capture) {\n let errorData = {}\n if (isError) {\n errorData = {\n $ai_is_error: true,\n $ai_error: error,\n }\n }\n let costOverrideData = {}\n if (params.posthogCostOverride) {\n const inputCostUSD = (params.posthogCostOverride.inputCost ?? 0) * (usage.inputTokens ?? 0)\n const outputCostUSD = (params.posthogCostOverride.outputCost ?? 0) * (usage.outputTokens ?? 0)\n costOverrideData = {\n $ai_input_cost_usd: inputCostUSD,\n $ai_output_cost_usd: outputCostUSD,\n $ai_total_cost_usd: inputCostUSD + outputCostUSD,\n }\n }\n\n const additionalTokenValues = {\n ...(usage.reasoningTokens ? { $ai_reasoning_tokens: usage.reasoningTokens } : {}),\n ...(usage.cacheReadInputTokens ? { $ai_cache_read_input_tokens: usage.cacheReadInputTokens } : {}),\n ...(usage.cacheCreationInputTokens ? { $ai_cache_creation_input_tokens: usage.cacheCreationInputTokens } : {}),\n }\n\n client.capture({\n distinctId: distinctId ?? traceId,\n event: '$ai_generation',\n properties: {\n $ai_provider: params.posthogProviderOverride ?? provider,\n $ai_model: params.posthogModelOverride ?? model,\n $ai_model_parameters: getModelParams(params),\n $ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, input),\n $ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, output),\n $ai_http_status: httpStatus,\n $ai_input_tokens: usage.inputTokens ?? 0,\n $ai_output_tokens: usage.outputTokens ?? 0,\n ...additionalTokenValues,\n $ai_latency: latency,\n $ai_trace_id: traceId,\n $ai_base_url: baseURL,\n ...params.posthogProperties,\n ...(distinctId ? {} : { $process_person_profile: false }),\n ...(tools ? { $ai_tools: tools } : {}),\n ...errorData,\n ...costOverrideData,\n },\n groups: params.posthogGroups,\n })\n }\n}\n","import OpenAIOrignal, { ClientOptions } from 'openai'\nimport { PostHog } from 'posthog-node'\nimport { v4 as uuidv4 } from 'uuid'\nimport { formatResponseOpenAI, MonitoringParams, sendEventToPosthog } from '../utils'\n\ntype ChatCompletion = OpenAIOrignal.ChatCompletion\ntype ChatCompletionChunk = OpenAIOrignal.ChatCompletionChunk\ntype ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams\ntype ChatCompletionCreateParamsNonStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsNonStreaming\ntype ChatCompletionCreateParamsStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsStreaming\nimport type { APIPromise, RequestOptions } from 'openai/core'\nimport type { Stream } from 'openai/streaming'\n\ninterface MonitoringOpenAIConfig extends ClientOptions {\n apiKey: string\n posthog: PostHog\n baseURL?: string\n}\n\nexport class PostHogOpenAI extends OpenAIOrignal {\n private readonly phClient: PostHog\n public chat: WrappedChat\n\n constructor(config: MonitoringOpenAIConfig) {\n const { posthog, ...openAIConfig } = config\n super(openAIConfig)\n this.phClient = posthog\n this.chat = new WrappedChat(this, this.phClient)\n }\n}\n\nexport class WrappedChat extends OpenAIOrignal.Chat {\n constructor(parentClient: PostHogOpenAI, phClient: PostHog) {\n super(parentClient)\n this.completions = new WrappedCompletions(parentClient, phClient)\n }\n\n public completions: WrappedCompletions\n}\n\nexport class WrappedCompletions extends OpenAIOrignal.Chat.Completions {\n private readonly phClient: PostHog\n\n constructor(client: OpenAIOrignal, phClient: PostHog) {\n super(client)\n this.phClient = phClient\n }\n\n // --- Overload #1: Non-streaming\n public create(\n body: ChatCompletionCreateParamsNonStreaming & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<ChatCompletion>\n\n // --- Overload #2: Streaming\n public create(\n body: ChatCompletionCreateParamsStreaming & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<Stream<ChatCompletionChunk>>\n\n // --- Overload #3: Generic base\n public create(\n body: ChatCompletionCreateParamsBase & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>>\n\n // --- Implementation Signature\n public create(\n body: ChatCompletionCreateParamsBase & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>> {\n const {\n posthogDistinctId,\n posthogTraceId,\n posthogProperties,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n posthogPrivacyMode = false,\n posthogGroups,\n ...openAIParams\n } = body\n\n const traceId = posthogTraceId ?? uuidv4()\n const startTime = Date.now()\n\n const parentPromise = super.create(openAIParams, options)\n\n if (openAIParams.stream) {\n return parentPromise.then((value) => {\n if ('tee' in value) {\n const [stream1, stream2] = value.tee()\n ;(async () => {\n try {\n let accumulatedContent = ''\n let usage: {\n inputTokens?: number\n outputTokens?: number\n reasoningTokens?: number\n cacheReadInputTokens?: number\n } = {\n inputTokens: 0,\n outputTokens: 0,\n }\n\n for await (const chunk of stream1) {\n const delta = chunk?.choices?.[0]?.delta?.content ?? ''\n accumulatedContent += delta\n if (chunk.usage) {\n usage = {\n inputTokens: chunk.usage.prompt_tokens ?? 0,\n outputTokens: chunk.usage.completion_tokens ?? 0,\n reasoningTokens: chunk.usage.completion_tokens_details?.reasoning_tokens ?? 0,\n cacheReadInputTokens: chunk.usage.prompt_tokens_details?.cached_tokens ?? 0,\n }\n }\n }\n\n const latency = (Date.now() - startTime) / 1000\n sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId ?? traceId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: openAIParams.messages,\n output: [{ content: accumulatedContent, role: 'assistant' }],\n latency,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 200,\n usage,\n })\n } catch (error: any) {\n sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId ?? traceId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: openAIParams.messages,\n output: [],\n latency: 0,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: error?.status ? error.status : 500,\n usage: { inputTokens: 0, outputTokens: 0 },\n isError: true,\n error: JSON.stringify(error),\n })\n }\n })()\n\n // Return the other stream to the user\n return stream2\n }\n return value\n }) as APIPromise<Stream<ChatCompletionChunk>>\n } else {\n const wrappedPromise = parentPromise.then(\n (result) => {\n if ('choices' in result) {\n const latency = (Date.now() - startTime) / 1000\n sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId ?? traceId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: openAIParams.messages,\n output: formatResponseOpenAI(result),\n latency,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 200,\n usage: {\n inputTokens: result.usage?.prompt_tokens ?? 0,\n outputTokens: result.usage?.completion_tokens ?? 0,\n reasoningTokens: result.usage?.completion_tokens_details?.reasoning_tokens ?? 0,\n cacheReadInputTokens: result.usage?.prompt_tokens_details?.cached_tokens ?? 0,\n },\n })\n }\n return result\n },\n (error: any) => {\n sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId ?? traceId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: openAIParams.messages,\n output: [],\n latency: 0,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: error?.status ? error.status : 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n isError: true,\n error: JSON.stringify(error),\n })\n throw error\n }\n ) as APIPromise<ChatCompletion>\n\n return wrappedPromise\n }\n }\n}\n\nexport default PostHogOpenAI\n\nexport { PostHogOpenAI as OpenAI }\n"],"names":["getModelParams","params","modelParams","paramKeys","key","undefined","formatResponseOpenAI","response","output","choice","choices","message","content","push","role","withPrivacyMode","client","privacyMode","input","privacy_mode","sendEventToPosthog","distinctId","traceId","model","provider","latency","baseURL","httpStatus","usage","isError","error","tools","capture","errorData","$ai_is_error","$ai_error","costOverrideData","posthogCostOverride","inputCostUSD","inputCost","inputTokens","outputCostUSD","outputCost","outputTokens","$ai_input_cost_usd","$ai_output_cost_usd","$ai_total_cost_usd","additionalTokenValues","reasoningTokens","$ai_reasoning_tokens","cacheReadInputTokens","$ai_cache_read_input_tokens","cacheCreationInputTokens","$ai_cache_creation_input_tokens","event","properties","$ai_provider","posthogProviderOverride","$ai_model","posthogModelOverride","$ai_model_parameters","$ai_input","posthogPrivacyMode","$ai_output_choices","$ai_http_status","$ai_input_tokens","$ai_output_tokens","$ai_latency","$ai_trace_id","$ai_base_url","posthogProperties","$process_person_profile","$ai_tools","groups","posthogGroups","PostHogOpenAI","OpenAIOrignal","constructor","config","posthog","openAIConfig","phClient","chat","WrappedChat","Chat","parentClient","completions","WrappedCompletions","Completions","create","body","options","posthogDistinctId","posthogTraceId","openAIParams","uuidv4","startTime","Date","now","parentPromise","stream","then","value","stream1","stream2","tee","accumulatedContent","chunk","delta","prompt_tokens","completion_tokens","completion_tokens_details","reasoning_tokens","prompt_tokens_details","cached_tokens","messages","status","JSON","stringify","wrappedPromise","result"],"mappings":";;;AAuBO,MAAMA,cAAc,GACzBC,MAA0F,IAClE;EACxB,IAAI,CAACA,MAAM,EAAE;AACX,IAAA,OAAO,EAAE,CAAA;AACX,GAAA;EACA,MAAMC,WAAgC,GAAG,EAAE,CAAA;EAC3C,MAAMC,SAAS,GAAG,CAChB,aAAa,EACb,YAAY,EACZ,uBAAuB,EACvB,OAAO,EACP,mBAAmB,EACnB,kBAAkB,EAClB,GAAG,EACH,MAAM,EACN,QAAQ,EACR,WAAW,CACH,CAAA;AAEV,EAAA,KAAK,MAAMC,GAAG,IAAID,SAAS,EAAE;IAC3B,IAAIC,GAAG,IAAIH,MAAM,IAAKA,MAAM,CAASG,GAAG,CAAC,KAAKC,SAAS,EAAE;AACvDH,MAAAA,WAAW,CAACE,GAAG,CAAC,GAAIH,MAAM,CAASG,GAAG,CAAC,CAAA;AACzC,KAAA;AACF,GAAA;AACA,EAAA,OAAOF,WAAW,CAAA;AACpB,CAAC,CAAA;AA+BM,MAAMI,oBAAoB,GAAIC,QAAa,IAA+C;EAC/F,MAAMC,MAAgD,GAAG,EAAE,CAAA;EAC3D,KAAK,MAAMC,MAAM,IAAIF,QAAQ,CAACG,OAAO,IAAI,EAAE,EAAE;AAC3C,IAAA,IAAID,MAAM,CAACE,OAAO,EAAEC,OAAO,EAAE;MAC3BJ,MAAM,CAACK,IAAI,CAAC;AACVC,QAAAA,IAAI,EAAEL,MAAM,CAACE,OAAO,CAACG,IAAI;AACzBF,QAAAA,OAAO,EAAEH,MAAM,CAACE,OAAO,CAACC,OAAAA;AAC1B,OAAC,CAAC,CAAA;AACJ,KAAA;AACF,GAAA;AACA,EAAA,OAAOJ,MAAM,CAAA;AACf,CAAC,CAAA;AAcM,MAAMO,eAAe,GAAGA,CAACC,MAAe,EAAEC,WAAoB,EAAEC,KAAU,KAAU;EACzF,OAAQF,MAAM,CAASG,YAAY,IAAIF,WAAW,GAAG,IAAI,GAAGC,KAAK,CAAA;AACnE,CAAC,CAAA;AA0BM,MAAME,kBAAkB,GAAGA,CAAC;EACjCJ,MAAM;EACNK,UAAU;EACVC,OAAO;EACPC,KAAK;EACLC,QAAQ;EACRN,KAAK;EACLV,MAAM;EACNiB,OAAO;EACPC,OAAO;EACPzB,MAAM;AACN0B,EAAAA,UAAU,GAAG,GAAG;EAChBC,KAAK,GAAG,EAAE;AACVC,EAAAA,OAAO,GAAG,KAAK;EACfC,KAAK;AACLC,EAAAA,KAAAA;AACwB,CAAC,KAAW;EACpC,IAAIf,MAAM,CAACgB,OAAO,EAAE;IAClB,IAAIC,SAAS,GAAG,EAAE,CAAA;AAClB,IAAA,IAAIJ,OAAO,EAAE;AACXI,MAAAA,SAAS,GAAG;AACVC,QAAAA,YAAY,EAAE,IAAI;AAClBC,QAAAA,SAAS,EAAEL,KAAAA;OACZ,CAAA;AACH,KAAA;IACA,IAAIM,gBAAgB,GAAG,EAAE,CAAA;IACzB,IAAInC,MAAM,CAACoC,mBAAmB,EAAE;AAC9B,MAAA,MAAMC,YAAY,GAAG,CAACrC,MAAM,CAACoC,mBAAmB,CAACE,SAAS,IAAI,CAAC,KAAKX,KAAK,CAACY,WAAW,IAAI,CAAC,CAAC,CAAA;AAC3F,MAAA,MAAMC,aAAa,GAAG,CAACxC,MAAM,CAACoC,mBAAmB,CAACK,UAAU,IAAI,CAAC,KAAKd,KAAK,CAACe,YAAY,IAAI,CAAC,CAAC,CAAA;AAC9FP,MAAAA,gBAAgB,GAAG;AACjBQ,QAAAA,kBAAkB,EAAEN,YAAY;AAChCO,QAAAA,mBAAmB,EAAEJ,aAAa;QAClCK,kBAAkB,EAAER,YAAY,GAAGG,aAAAA;OACpC,CAAA;AACH,KAAA;AAEA,IAAA,MAAMM,qBAAqB,GAAG;MAC5B,IAAInB,KAAK,CAACoB,eAAe,GAAG;QAAEC,oBAAoB,EAAErB,KAAK,CAACoB,eAAAA;OAAiB,GAAG,EAAE,CAAC;MACjF,IAAIpB,KAAK,CAACsB,oBAAoB,GAAG;QAAEC,2BAA2B,EAAEvB,KAAK,CAACsB,oBAAAA;OAAsB,GAAG,EAAE,CAAC;MAClG,IAAItB,KAAK,CAACwB,wBAAwB,GAAG;QAAEC,+BAA+B,EAAEzB,KAAK,CAACwB,wBAAAA;OAA0B,GAAG,EAAE,CAAA;KAC9G,CAAA;IAEDpC,MAAM,CAACgB,OAAO,CAAC;MACbX,UAAU,EAAEA,UAAU,IAAIC,OAAO;AACjCgC,MAAAA,KAAK,EAAE,gBAAgB;AACvBC,MAAAA,UAAU,EAAE;AACVC,QAAAA,YAAY,EAAEvD,MAAM,CAACwD,uBAAuB,IAAIjC,QAAQ;AACxDkC,QAAAA,SAAS,EAAEzD,MAAM,CAAC0D,oBAAoB,IAAIpC,KAAK;AAC/CqC,QAAAA,oBAAoB,EAAE5D,cAAc,CAACC,MAAM,CAAC;AAC5C4D,QAAAA,SAAS,EAAE9C,eAAe,CAACC,MAAM,EAAEf,MAAM,CAAC6D,kBAAkB,IAAI,KAAK,EAAE5C,KAAK,CAAC;AAC7E6C,QAAAA,kBAAkB,EAAEhD,eAAe,CAACC,MAAM,EAAEf,MAAM,CAAC6D,kBAAkB,IAAI,KAAK,EAAEtD,MAAM,CAAC;AACvFwD,QAAAA,eAAe,EAAErC,UAAU;AAC3BsC,QAAAA,gBAAgB,EAAErC,KAAK,CAACY,WAAW,IAAI,CAAC;AACxC0B,QAAAA,iBAAiB,EAAEtC,KAAK,CAACe,YAAY,IAAI,CAAC;AAC1C,QAAA,GAAGI,qBAAqB;AACxBoB,QAAAA,WAAW,EAAE1C,OAAO;AACpB2C,QAAAA,YAAY,EAAE9C,OAAO;AACrB+C,QAAAA,YAAY,EAAE3C,OAAO;QACrB,GAAGzB,MAAM,CAACqE,iBAAiB;AAC3B,QAAA,IAAIjD,UAAU,GAAG,EAAE,GAAG;AAAEkD,UAAAA,uBAAuB,EAAE,KAAA;AAAM,SAAC,CAAC;AACzD,QAAA,IAAIxC,KAAK,GAAG;AAAEyC,UAAAA,SAAS,EAAEzC,KAAAA;SAAO,GAAG,EAAE,CAAC;AACtC,QAAA,GAAGE,SAAS;QACZ,GAAGG,gBAAAA;OACJ;MACDqC,MAAM,EAAExE,MAAM,CAACyE,aAAAA;AACjB,KAAC,CAAC,CAAA;AACJ,GAAA;AACF,CAAC;;ACrLM,MAAMC,aAAa,SAASC,aAAa,CAAC;EAI/CC,WAAWA,CAACC,MAA8B,EAAE;IAC1C,MAAM;MAAEC,OAAO;MAAE,GAAGC,YAAAA;AAAa,KAAC,GAAGF,MAAM,CAAA;IAC3C,KAAK,CAACE,YAAY,CAAC,CAAA;IACnB,IAAI,CAACC,QAAQ,GAAGF,OAAO,CAAA;IACvB,IAAI,CAACG,IAAI,GAAG,IAAIC,WAAW,CAAC,IAAI,EAAE,IAAI,CAACF,QAAQ,CAAC,CAAA;AAClD,GAAA;AACF,CAAA;AAEO,MAAME,WAAW,SAASP,aAAa,CAACQ,IAAI,CAAC;AAClDP,EAAAA,WAAWA,CAACQ,YAA2B,EAAEJ,QAAiB,EAAE;IAC1D,KAAK,CAACI,YAAY,CAAC,CAAA;IACnB,IAAI,CAACC,WAAW,GAAG,IAAIC,kBAAkB,CAACF,YAAY,EAAEJ,QAAQ,CAAC,CAAA;AACnE,GAAA;AAGF,CAAA;AAEO,MAAMM,kBAAkB,SAASX,aAAa,CAACQ,IAAI,CAACI,WAAW,CAAC;AAGrEX,EAAAA,WAAWA,CAAC7D,MAAqB,EAAEiE,QAAiB,EAAE;IACpD,KAAK,CAACjE,MAAM,CAAC,CAAA;IACb,IAAI,CAACiE,QAAQ,GAAGA,QAAQ,CAAA;AAC1B,GAAA;;AAEA;;AAMA;;AAMA;;AAMA;AACOQ,EAAAA,MAAMA,CACXC,IAAuD,EACvDC,OAAwB,EACkC;IAC1D,MAAM;MACJC,iBAAiB;MACjBC,cAAc;MACdvB,iBAAiB;AACjB;AACAR,MAAAA,kBAAkB,GAAG,KAAK;MAC1BY,aAAa;MACb,GAAGoB,YAAAA;AACL,KAAC,GAAGJ,IAAI,CAAA;AAER,IAAA,MAAMpE,OAAO,GAAGuE,cAAc,IAAIE,EAAM,EAAE,CAAA;AAC1C,IAAA,MAAMC,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE,CAAA;IAE5B,MAAMC,aAAa,GAAG,KAAK,CAACV,MAAM,CAACK,YAAY,EAAEH,OAAO,CAAC,CAAA;IAEzD,IAAIG,YAAY,CAACM,MAAM,EAAE;AACvB,MAAA,OAAOD,aAAa,CAACE,IAAI,CAAEC,KAAK,IAAK;QACnC,IAAI,KAAK,IAAIA,KAAK,EAAE;UAClB,MAAM,CAACC,OAAO,EAAEC,OAAO,CAAC,GAAGF,KAAK,CAACG,GAAG,EAAE,CAAA;AACrC,UAAA,CAAC,YAAY;YACZ,IAAI;cACF,IAAIC,kBAAkB,GAAG,EAAE,CAAA;AAC3B,cAAA,IAAI9E,KAKH,GAAG;AACFY,gBAAAA,WAAW,EAAE,CAAC;AACdG,gBAAAA,YAAY,EAAE,CAAA;eACf,CAAA;AAED,cAAA,WAAW,MAAMgE,KAAK,IAAIJ,OAAO,EAAE;AACjC,gBAAA,MAAMK,KAAK,GAAGD,KAAK,EAAEjG,OAAO,GAAG,CAAC,CAAC,EAAEkG,KAAK,EAAEhG,OAAO,IAAI,EAAE,CAAA;AACvD8F,gBAAAA,kBAAkB,IAAIE,KAAK,CAAA;gBAC3B,IAAID,KAAK,CAAC/E,KAAK,EAAE;AACfA,kBAAAA,KAAK,GAAG;AACNY,oBAAAA,WAAW,EAAEmE,KAAK,CAAC/E,KAAK,CAACiF,aAAa,IAAI,CAAC;AAC3ClE,oBAAAA,YAAY,EAAEgE,KAAK,CAAC/E,KAAK,CAACkF,iBAAiB,IAAI,CAAC;oBAChD9D,eAAe,EAAE2D,KAAK,CAAC/E,KAAK,CAACmF,yBAAyB,EAAEC,gBAAgB,IAAI,CAAC;oBAC7E9D,oBAAoB,EAAEyD,KAAK,CAAC/E,KAAK,CAACqF,qBAAqB,EAAEC,aAAa,IAAI,CAAA;mBAC3E,CAAA;AACH,iBAAA;AACF,eAAA;cAEA,MAAMzF,OAAO,GAAG,CAACwE,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/C5E,cAAAA,kBAAkB,CAAC;gBACjBJ,MAAM,EAAE,IAAI,CAACiE,QAAQ;gBACrB5D,UAAU,EAAEuE,iBAAiB,IAAItE,OAAO;gBACxCA,OAAO;gBACPC,KAAK,EAAEuE,YAAY,CAACvE,KAAK;AACzBC,gBAAAA,QAAQ,EAAE,QAAQ;gBAClBN,KAAK,EAAE4E,YAAY,CAACqB,QAAQ;AAC5B3G,gBAAAA,MAAM,EAAE,CAAC;AAAEI,kBAAAA,OAAO,EAAE8F,kBAAkB;AAAE5F,kBAAAA,IAAI,EAAE,WAAA;AAAY,iBAAC,CAAC;gBAC5DW,OAAO;AACPC,gBAAAA,OAAO,EAAG,IAAI,CAASA,OAAO,IAAI,EAAE;AACpCzB,gBAAAA,MAAM,EAAEyF,IAAI;AACZ/D,gBAAAA,UAAU,EAAE,GAAG;AACfC,gBAAAA,KAAAA;AACF,eAAC,CAAC,CAAA;aACH,CAAC,OAAOE,KAAU,EAAE;AACnBV,cAAAA,kBAAkB,CAAC;gBACjBJ,MAAM,EAAE,IAAI,CAACiE,QAAQ;gBACrB5D,UAAU,EAAEuE,iBAAiB,IAAItE,OAAO;gBACxCA,OAAO;gBACPC,KAAK,EAAEuE,YAAY,CAACvE,KAAK;AACzBC,gBAAAA,QAAQ,EAAE,QAAQ;gBAClBN,KAAK,EAAE4E,YAAY,CAACqB,QAAQ;AAC5B3G,gBAAAA,MAAM,EAAE,EAAE;AACViB,gBAAAA,OAAO,EAAE,CAAC;AACVC,gBAAAA,OAAO,EAAG,IAAI,CAASA,OAAO,IAAI,EAAE;AACpCzB,gBAAAA,MAAM,EAAEyF,IAAI;gBACZ/D,UAAU,EAAEG,KAAK,EAAEsF,MAAM,GAAGtF,KAAK,CAACsF,MAAM,GAAG,GAAG;AAC9CxF,gBAAAA,KAAK,EAAE;AAAEY,kBAAAA,WAAW,EAAE,CAAC;AAAEG,kBAAAA,YAAY,EAAE,CAAA;iBAAG;AAC1Cd,gBAAAA,OAAO,EAAE,IAAI;AACbC,gBAAAA,KAAK,EAAEuF,IAAI,CAACC,SAAS,CAACxF,KAAK,CAAA;AAC7B,eAAC,CAAC,CAAA;AACJ,aAAA;AACF,WAAC,GAAG,CAAA;;AAEJ;AACA,UAAA,OAAO0E,OAAO,CAAA;AAChB,SAAA;AACA,QAAA,OAAOF,KAAK,CAAA;AACd,OAAC,CAAC,CAAA;AACJ,KAAC,MAAM;AACL,MAAA,MAAMiB,cAAc,GAAGpB,aAAa,CAACE,IAAI,CACtCmB,MAAM,IAAK;QACV,IAAI,SAAS,IAAIA,MAAM,EAAE;UACvB,MAAM/F,OAAO,GAAG,CAACwE,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/C5E,UAAAA,kBAAkB,CAAC;YACjBJ,MAAM,EAAE,IAAI,CAACiE,QAAQ;YACrB5D,UAAU,EAAEuE,iBAAiB,IAAItE,OAAO;YACxCA,OAAO;YACPC,KAAK,EAAEuE,YAAY,CAACvE,KAAK;AACzBC,YAAAA,QAAQ,EAAE,QAAQ;YAClBN,KAAK,EAAE4E,YAAY,CAACqB,QAAQ;AAC5B3G,YAAAA,MAAM,EAAEF,oBAAoB,CAACkH,MAAM,CAAC;YACpC/F,OAAO;AACPC,YAAAA,OAAO,EAAG,IAAI,CAASA,OAAO,IAAI,EAAE;AACpCzB,YAAAA,MAAM,EAAEyF,IAAI;AACZ/D,YAAAA,UAAU,EAAE,GAAG;AACfC,YAAAA,KAAK,EAAE;AACLY,cAAAA,WAAW,EAAEgF,MAAM,CAAC5F,KAAK,EAAEiF,aAAa,IAAI,CAAC;AAC7ClE,cAAAA,YAAY,EAAE6E,MAAM,CAAC5F,KAAK,EAAEkF,iBAAiB,IAAI,CAAC;cAClD9D,eAAe,EAAEwE,MAAM,CAAC5F,KAAK,EAAEmF,yBAAyB,EAAEC,gBAAgB,IAAI,CAAC;cAC/E9D,oBAAoB,EAAEsE,MAAM,CAAC5F,KAAK,EAAEqF,qBAAqB,EAAEC,aAAa,IAAI,CAAA;AAC9E,aAAA;AACF,WAAC,CAAC,CAAA;AACJ,SAAA;AACA,QAAA,OAAOM,MAAM,CAAA;OACd,EACA1F,KAAU,IAAK;AACdV,QAAAA,kBAAkB,CAAC;UACjBJ,MAAM,EAAE,IAAI,CAACiE,QAAQ;UACrB5D,UAAU,EAAEuE,iBAAiB,IAAItE,OAAO;UACxCA,OAAO;UACPC,KAAK,EAAEuE,YAAY,CAACvE,KAAK;AACzBC,UAAAA,QAAQ,EAAE,QAAQ;UAClBN,KAAK,EAAE4E,YAAY,CAACqB,QAAQ;AAC5B3G,UAAAA,MAAM,EAAE,EAAE;AACViB,UAAAA,OAAO,EAAE,CAAC;AACVC,UAAAA,OAAO,EAAG,IAAI,CAASA,OAAO,IAAI,EAAE;AACpCzB,UAAAA,MAAM,EAAEyF,IAAI;UACZ/D,UAAU,EAAEG,KAAK,EAAEsF,MAAM,GAAGtF,KAAK,CAACsF,MAAM,GAAG,GAAG;AAC9CxF,UAAAA,KAAK,EAAE;AACLY,YAAAA,WAAW,EAAE,CAAC;AACdG,YAAAA,YAAY,EAAE,CAAA;WACf;AACDd,UAAAA,OAAO,EAAE,IAAI;AACbC,UAAAA,KAAK,EAAEuF,IAAI,CAACC,SAAS,CAACxF,KAAK,CAAA;AAC7B,SAAC,CAAC,CAAA;AACF,QAAA,MAAMA,KAAK,CAAA;AACb,OACF,CAA+B,CAAA;AAE/B,MAAA,OAAOyF,cAAc,CAAA;AACvB,KAAA;AACF,GAAA;AACF;;;;"}
|
|
@@ -26,3 +26,4 @@ export declare class WrappedMessages extends AnthropicOriginal.Messages {
|
|
|
26
26
|
create(body: MessageCreateParamsBase & MonitoringParams, options?: RequestOptions): APIPromise<Stream<RawMessageStreamEvent> | Message>;
|
|
27
27
|
}
|
|
28
28
|
export default PostHogAnthropic;
|
|
29
|
+
export { PostHogAnthropic as Anthropic };
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from './callbacks';
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import OpenAIOrignal from 'openai';
|
|
1
|
+
import OpenAIOrignal, { ClientOptions } from 'openai';
|
|
2
2
|
import { PostHog } from 'posthog-node';
|
|
3
3
|
import { MonitoringParams } from '../utils';
|
|
4
4
|
type ChatCompletion = OpenAIOrignal.ChatCompletion;
|
|
@@ -8,7 +8,7 @@ type ChatCompletionCreateParamsNonStreaming = OpenAIOrignal.Chat.Completions.Cha
|
|
|
8
8
|
type ChatCompletionCreateParamsStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsStreaming;
|
|
9
9
|
import type { APIPromise, RequestOptions } from 'openai/core';
|
|
10
10
|
import type { Stream } from 'openai/streaming';
|
|
11
|
-
interface MonitoringOpenAIConfig {
|
|
11
|
+
interface MonitoringOpenAIConfig extends ClientOptions {
|
|
12
12
|
apiKey: string;
|
|
13
13
|
posthog: PostHog;
|
|
14
14
|
baseURL?: string;
|
|
@@ -30,3 +30,4 @@ export declare class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
|
30
30
|
create(body: ChatCompletionCreateParamsBase & MonitoringParams, options?: RequestOptions): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>>;
|
|
31
31
|
}
|
|
32
32
|
export default PostHogOpenAI;
|
|
33
|
+
export { PostHogOpenAI as OpenAI };
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { wrapVercelLanguageModel as withTracing } from './middleware';
|