@posthog/ai 1.3.0 → 1.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/index.cjs.js +386 -0
- package/lib/index.cjs.js.map +1 -0
- package/lib/index.d.ts +52 -0
- package/lib/index.esm.js +377 -0
- package/lib/index.esm.js.map +1 -0
- package/lib/posthog-ai/index.d.ts +1 -0
- package/lib/posthog-ai/src/index.d.ts +4 -0
- package/lib/posthog-ai/src/openai/index.d.ts +32 -0
- package/lib/posthog-ai/src/utils.d.ts +51 -0
- package/lib/posthog-ai/src/vercel/middleware.d.ts +12 -0
- package/lib/posthog-ai/tests/openai.test.d.ts +1 -0
- package/package.json +4 -4
package/lib/index.cjs.js
ADDED
|
@@ -0,0 +1,386 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, '__esModule', { value: true });
|
|
4
|
+
|
|
5
|
+
var OpenAIOrignal = require('openai');
|
|
6
|
+
var uuid = require('uuid');
|
|
7
|
+
var stream = require('stream');
|
|
8
|
+
var ai = require('ai');
|
|
9
|
+
|
|
10
|
+
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
|
|
11
|
+
|
|
12
|
+
var OpenAIOrignal__default = /*#__PURE__*/_interopDefaultLegacy(OpenAIOrignal);
|
|
13
|
+
|
|
14
|
+
const getModelParams = params => {
|
|
15
|
+
const modelParams = {};
|
|
16
|
+
const paramKeys = ['temperature', 'max_tokens', 'max_completion_tokens', 'top_p', 'frequency_penalty', 'presence_penalty', 'n', 'stop', 'stream', 'streaming'];
|
|
17
|
+
for (const key of paramKeys) {
|
|
18
|
+
if (key in params && params[key] !== undefined) {
|
|
19
|
+
modelParams[key] = params[key];
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
return modelParams;
|
|
23
|
+
};
|
|
24
|
+
const mergeSystemPrompt = (params, provider) => {
|
|
25
|
+
if (provider !== 'anthropic') {
|
|
26
|
+
return params.messages;
|
|
27
|
+
}
|
|
28
|
+
const messages = params.messages || [];
|
|
29
|
+
if (!params.system) {
|
|
30
|
+
return messages;
|
|
31
|
+
}
|
|
32
|
+
const systemMessage = params.system;
|
|
33
|
+
return [{
|
|
34
|
+
role: 'system',
|
|
35
|
+
content: systemMessage
|
|
36
|
+
}, ...messages];
|
|
37
|
+
};
|
|
38
|
+
const withPrivacyMode = (client, privacyMode, input) => {
|
|
39
|
+
return client.privacy_mode || privacyMode ? null : input;
|
|
40
|
+
};
|
|
41
|
+
const sendEventToPosthog = ({
|
|
42
|
+
client,
|
|
43
|
+
distinctId,
|
|
44
|
+
traceId,
|
|
45
|
+
model,
|
|
46
|
+
provider,
|
|
47
|
+
input,
|
|
48
|
+
output,
|
|
49
|
+
latency,
|
|
50
|
+
baseURL,
|
|
51
|
+
params,
|
|
52
|
+
httpStatus = 200,
|
|
53
|
+
usage = {}
|
|
54
|
+
}) => {
|
|
55
|
+
if (client.capture) {
|
|
56
|
+
client.capture({
|
|
57
|
+
distinctId: distinctId ?? traceId,
|
|
58
|
+
event: '$ai_generation',
|
|
59
|
+
properties: {
|
|
60
|
+
$ai_provider: provider,
|
|
61
|
+
$ai_model: model,
|
|
62
|
+
$ai_model_parameters: getModelParams(params),
|
|
63
|
+
$ai_input: withPrivacyMode(client, params.posthog_privacy_mode ?? false, input),
|
|
64
|
+
$ai_output_choices: withPrivacyMode(client, params.posthog_privacy_mode ?? false, output),
|
|
65
|
+
$ai_http_status: httpStatus,
|
|
66
|
+
$ai_input_tokens: usage.input_tokens ?? 0,
|
|
67
|
+
$ai_output_tokens: usage.output_tokens ?? 0,
|
|
68
|
+
$ai_latency: latency,
|
|
69
|
+
$ai_trace_id: traceId,
|
|
70
|
+
$ai_base_url: baseURL,
|
|
71
|
+
...params.posthog_properties,
|
|
72
|
+
...(distinctId ? {} : {
|
|
73
|
+
$process_person_profile: false
|
|
74
|
+
})
|
|
75
|
+
},
|
|
76
|
+
groups: params.posthog_groups
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
};
|
|
80
|
+
|
|
81
|
+
class PostHogOpenAI extends OpenAIOrignal__default["default"] {
|
|
82
|
+
constructor(config) {
|
|
83
|
+
const {
|
|
84
|
+
posthog,
|
|
85
|
+
...openAIConfig
|
|
86
|
+
} = config;
|
|
87
|
+
super(openAIConfig);
|
|
88
|
+
this.phClient = posthog;
|
|
89
|
+
this.chat = new WrappedChat(this, this.phClient);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
class WrappedChat extends OpenAIOrignal__default["default"].Chat {
|
|
93
|
+
constructor(parentClient, phClient) {
|
|
94
|
+
super(parentClient);
|
|
95
|
+
this.completions = new WrappedCompletions(parentClient, phClient);
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
class WrappedCompletions extends OpenAIOrignal__default["default"].Chat.Completions {
|
|
99
|
+
constructor(client, phClient) {
|
|
100
|
+
super(client);
|
|
101
|
+
this.phClient = phClient;
|
|
102
|
+
}
|
|
103
|
+
// --- Implementation Signature
|
|
104
|
+
create(body, options) {
|
|
105
|
+
const {
|
|
106
|
+
posthog_distinct_id,
|
|
107
|
+
posthog_trace_id,
|
|
108
|
+
posthog_properties,
|
|
109
|
+
posthog_privacy_mode = false,
|
|
110
|
+
posthog_groups,
|
|
111
|
+
...openAIParams
|
|
112
|
+
} = body;
|
|
113
|
+
const traceId = posthog_trace_id ?? uuid.v4();
|
|
114
|
+
const startTime = Date.now();
|
|
115
|
+
const parentPromise = super.create(openAIParams, options);
|
|
116
|
+
if (openAIParams.stream) {
|
|
117
|
+
return parentPromise.then(value => {
|
|
118
|
+
const passThroughStream = new stream.PassThrough({
|
|
119
|
+
objectMode: true
|
|
120
|
+
});
|
|
121
|
+
let accumulatedContent = '';
|
|
122
|
+
let usage = {
|
|
123
|
+
input_tokens: 0,
|
|
124
|
+
output_tokens: 0
|
|
125
|
+
};
|
|
126
|
+
if ('tee' in value) {
|
|
127
|
+
const openAIStream = value;
|
|
128
|
+
(async () => {
|
|
129
|
+
try {
|
|
130
|
+
for await (const chunk of openAIStream) {
|
|
131
|
+
const delta = chunk?.choices?.[0]?.delta?.content ?? '';
|
|
132
|
+
accumulatedContent += delta;
|
|
133
|
+
if (chunk.usage) {
|
|
134
|
+
usage = {
|
|
135
|
+
input_tokens: chunk.usage.prompt_tokens ?? 0,
|
|
136
|
+
output_tokens: chunk.usage.completion_tokens ?? 0
|
|
137
|
+
};
|
|
138
|
+
}
|
|
139
|
+
passThroughStream.write(chunk);
|
|
140
|
+
}
|
|
141
|
+
const latency = (Date.now() - startTime) / 1000;
|
|
142
|
+
sendEventToPosthog({
|
|
143
|
+
client: this.phClient,
|
|
144
|
+
distinctId: posthog_distinct_id ?? traceId,
|
|
145
|
+
traceId,
|
|
146
|
+
model: openAIParams.model,
|
|
147
|
+
provider: 'openai',
|
|
148
|
+
input: posthog_privacy_mode ? '' : mergeSystemPrompt(openAIParams, 'openai'),
|
|
149
|
+
output: [{
|
|
150
|
+
content: accumulatedContent,
|
|
151
|
+
role: 'assistant'
|
|
152
|
+
}],
|
|
153
|
+
latency,
|
|
154
|
+
baseURL: this.baseURL ?? '',
|
|
155
|
+
params: body,
|
|
156
|
+
httpStatus: 200,
|
|
157
|
+
usage
|
|
158
|
+
});
|
|
159
|
+
passThroughStream.end();
|
|
160
|
+
} catch (error) {
|
|
161
|
+
// error handling
|
|
162
|
+
sendEventToPosthog({
|
|
163
|
+
client: this.phClient,
|
|
164
|
+
distinctId: posthog_distinct_id ?? traceId,
|
|
165
|
+
traceId,
|
|
166
|
+
model: openAIParams.model,
|
|
167
|
+
provider: 'openai',
|
|
168
|
+
input: posthog_privacy_mode ? '' : mergeSystemPrompt(openAIParams, 'openai'),
|
|
169
|
+
output: [],
|
|
170
|
+
latency: 0,
|
|
171
|
+
baseURL: this.baseURL ?? '',
|
|
172
|
+
params: body,
|
|
173
|
+
httpStatus: 500,
|
|
174
|
+
usage: {
|
|
175
|
+
input_tokens: 0,
|
|
176
|
+
output_tokens: 0
|
|
177
|
+
}
|
|
178
|
+
});
|
|
179
|
+
passThroughStream.emit('error', error);
|
|
180
|
+
}
|
|
181
|
+
})();
|
|
182
|
+
}
|
|
183
|
+
return passThroughStream;
|
|
184
|
+
});
|
|
185
|
+
} else {
|
|
186
|
+
const wrappedPromise = parentPromise.then(result => {
|
|
187
|
+
if ('choices' in result) {
|
|
188
|
+
const latency = (Date.now() - startTime) / 1000;
|
|
189
|
+
sendEventToPosthog({
|
|
190
|
+
client: this.phClient,
|
|
191
|
+
distinctId: posthog_distinct_id ?? traceId,
|
|
192
|
+
traceId,
|
|
193
|
+
model: openAIParams.model,
|
|
194
|
+
provider: 'openai',
|
|
195
|
+
input: posthog_privacy_mode ? '' : mergeSystemPrompt(openAIParams, 'openai'),
|
|
196
|
+
output: [{
|
|
197
|
+
content: result.choices[0].message.content,
|
|
198
|
+
role: 'assistant'
|
|
199
|
+
}],
|
|
200
|
+
latency,
|
|
201
|
+
baseURL: this.baseURL ?? '',
|
|
202
|
+
params: body,
|
|
203
|
+
httpStatus: 200,
|
|
204
|
+
usage: {
|
|
205
|
+
input_tokens: result.usage?.prompt_tokens ?? 0,
|
|
206
|
+
output_tokens: result.usage?.completion_tokens ?? 0
|
|
207
|
+
}
|
|
208
|
+
});
|
|
209
|
+
}
|
|
210
|
+
return result;
|
|
211
|
+
}, error => {
|
|
212
|
+
sendEventToPosthog({
|
|
213
|
+
client: this.phClient,
|
|
214
|
+
distinctId: posthog_distinct_id ?? traceId,
|
|
215
|
+
traceId,
|
|
216
|
+
model: openAIParams.model,
|
|
217
|
+
provider: 'openai',
|
|
218
|
+
input: posthog_privacy_mode ? '' : mergeSystemPrompt(openAIParams, 'openai'),
|
|
219
|
+
output: [],
|
|
220
|
+
latency: 0,
|
|
221
|
+
baseURL: this.baseURL ?? '',
|
|
222
|
+
params: body,
|
|
223
|
+
httpStatus: 500,
|
|
224
|
+
usage: {
|
|
225
|
+
input_tokens: 0,
|
|
226
|
+
output_tokens: 0
|
|
227
|
+
}
|
|
228
|
+
});
|
|
229
|
+
throw error;
|
|
230
|
+
});
|
|
231
|
+
return wrappedPromise;
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
237
|
+
const middleware = {
|
|
238
|
+
wrapGenerate: async ({
|
|
239
|
+
doGenerate,
|
|
240
|
+
params
|
|
241
|
+
}) => {
|
|
242
|
+
const startTime = Date.now();
|
|
243
|
+
try {
|
|
244
|
+
const result = await doGenerate();
|
|
245
|
+
const latency = (Date.now() - startTime) / 1000;
|
|
246
|
+
sendEventToPosthog({
|
|
247
|
+
client: phClient,
|
|
248
|
+
distinctId: options.posthogDistinctId,
|
|
249
|
+
traceId: options.posthogTraceId,
|
|
250
|
+
model: model.modelId,
|
|
251
|
+
provider: 'vercel',
|
|
252
|
+
input: options.posthogPrivacyMode ? '' : params.prompt,
|
|
253
|
+
output: [{
|
|
254
|
+
content: result.text,
|
|
255
|
+
role: 'assistant'
|
|
256
|
+
}],
|
|
257
|
+
latency,
|
|
258
|
+
baseURL: '',
|
|
259
|
+
params: {
|
|
260
|
+
posthog_properties: options
|
|
261
|
+
},
|
|
262
|
+
httpStatus: 200,
|
|
263
|
+
usage: {
|
|
264
|
+
input_tokens: result.usage.promptTokens,
|
|
265
|
+
output_tokens: result.usage.completionTokens
|
|
266
|
+
}
|
|
267
|
+
});
|
|
268
|
+
return result;
|
|
269
|
+
} catch (error) {
|
|
270
|
+
sendEventToPosthog({
|
|
271
|
+
client: phClient,
|
|
272
|
+
distinctId: options.posthogDistinctId,
|
|
273
|
+
traceId: options.posthogTraceId,
|
|
274
|
+
model: model.modelId,
|
|
275
|
+
provider: 'vercel',
|
|
276
|
+
input: options.posthogPrivacyMode ? '' : params.prompt,
|
|
277
|
+
output: [],
|
|
278
|
+
latency: 0,
|
|
279
|
+
baseURL: '',
|
|
280
|
+
params: {
|
|
281
|
+
posthog_properties: options
|
|
282
|
+
},
|
|
283
|
+
httpStatus: 500,
|
|
284
|
+
usage: {
|
|
285
|
+
input_tokens: 0,
|
|
286
|
+
output_tokens: 0
|
|
287
|
+
}
|
|
288
|
+
});
|
|
289
|
+
throw error;
|
|
290
|
+
}
|
|
291
|
+
},
|
|
292
|
+
wrapStream: async ({
|
|
293
|
+
doStream,
|
|
294
|
+
params
|
|
295
|
+
}) => {
|
|
296
|
+
const startTime = Date.now();
|
|
297
|
+
let generatedText = '';
|
|
298
|
+
let usage = {};
|
|
299
|
+
try {
|
|
300
|
+
const {
|
|
301
|
+
stream,
|
|
302
|
+
...rest
|
|
303
|
+
} = await doStream();
|
|
304
|
+
const transformStream = new TransformStream({
|
|
305
|
+
transform(chunk, controller) {
|
|
306
|
+
if (chunk.type === 'text-delta') {
|
|
307
|
+
generatedText += chunk.textDelta;
|
|
308
|
+
}
|
|
309
|
+
if (chunk.type === 'finish') {
|
|
310
|
+
usage = {
|
|
311
|
+
input_tokens: chunk.usage?.promptTokens,
|
|
312
|
+
output_tokens: chunk.usage?.completionTokens
|
|
313
|
+
};
|
|
314
|
+
}
|
|
315
|
+
controller.enqueue(chunk);
|
|
316
|
+
},
|
|
317
|
+
flush() {
|
|
318
|
+
const latency = (Date.now() - startTime) / 1000;
|
|
319
|
+
sendEventToPosthog({
|
|
320
|
+
client: phClient,
|
|
321
|
+
distinctId: options.posthogDistinctId,
|
|
322
|
+
traceId: options.posthogTraceId,
|
|
323
|
+
model: model.modelId,
|
|
324
|
+
provider: 'vercel',
|
|
325
|
+
input: options.posthogPrivacyMode ? '' : params.prompt,
|
|
326
|
+
output: [{
|
|
327
|
+
content: generatedText,
|
|
328
|
+
role: 'assistant'
|
|
329
|
+
}],
|
|
330
|
+
latency,
|
|
331
|
+
baseURL: '',
|
|
332
|
+
params: {
|
|
333
|
+
posthog_properties: options
|
|
334
|
+
},
|
|
335
|
+
httpStatus: 200,
|
|
336
|
+
usage
|
|
337
|
+
});
|
|
338
|
+
}
|
|
339
|
+
});
|
|
340
|
+
return {
|
|
341
|
+
stream: stream.pipeThrough(transformStream),
|
|
342
|
+
...rest
|
|
343
|
+
};
|
|
344
|
+
} catch (error) {
|
|
345
|
+
sendEventToPosthog({
|
|
346
|
+
client: phClient,
|
|
347
|
+
distinctId: options.posthogDistinctId,
|
|
348
|
+
traceId: options.posthogTraceId,
|
|
349
|
+
model: model.modelId,
|
|
350
|
+
provider: 'vercel',
|
|
351
|
+
input: options.posthogPrivacyMode ? '' : params.prompt,
|
|
352
|
+
output: [],
|
|
353
|
+
latency: 0,
|
|
354
|
+
baseURL: '',
|
|
355
|
+
params: {
|
|
356
|
+
posthog_properties: options
|
|
357
|
+
},
|
|
358
|
+
httpStatus: 500,
|
|
359
|
+
usage: {
|
|
360
|
+
input_tokens: 0,
|
|
361
|
+
output_tokens: 0
|
|
362
|
+
}
|
|
363
|
+
});
|
|
364
|
+
throw error;
|
|
365
|
+
}
|
|
366
|
+
}
|
|
367
|
+
};
|
|
368
|
+
return middleware;
|
|
369
|
+
};
|
|
370
|
+
const wrapVercelLanguageModel = (model, phClient, options) => {
|
|
371
|
+
const traceId = options.posthogTraceId ?? uuid.v4();
|
|
372
|
+
const middleware = createInstrumentationMiddleware(phClient, model, {
|
|
373
|
+
...options,
|
|
374
|
+
posthogTraceId: traceId,
|
|
375
|
+
posthogDistinctId: options.posthogDistinctId ?? traceId
|
|
376
|
+
});
|
|
377
|
+
const wrappedModel = ai.experimental_wrapLanguageModel({
|
|
378
|
+
model,
|
|
379
|
+
middleware
|
|
380
|
+
});
|
|
381
|
+
return wrappedModel;
|
|
382
|
+
};
|
|
383
|
+
|
|
384
|
+
exports.OpenAI = PostHogOpenAI;
|
|
385
|
+
exports.withTracing = wrapVercelLanguageModel;
|
|
386
|
+
//# sourceMappingURL=index.cjs.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.cjs.js","sources":["../src/utils.ts","../src/openai/index.ts","../src/vercel/middleware.ts"],"sourcesContent":["import { PostHog } from 'posthog-node'\nimport OpenAIOrignal from 'openai'\n\ntype ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams\n\nexport interface MonitoringParams {\n posthog_distinct_id?: string\n posthog_trace_id?: string\n posthog_properties?: Record<string, any>\n posthog_privacy_mode?: boolean\n posthog_groups?: Record<string, any>\n}\n\nexport const getModelParams = (params: ChatCompletionCreateParamsBase & MonitoringParams): Record<string, any> => {\n const modelParams: Record<string, any> = {}\n const paramKeys = [\n 'temperature',\n 'max_tokens',\n 'max_completion_tokens',\n 'top_p',\n 'frequency_penalty',\n 'presence_penalty',\n 'n',\n 'stop',\n 'stream',\n 'streaming',\n ] as const\n\n for (const key of paramKeys) {\n if (key in params && (params as any)[key] !== undefined) {\n modelParams[key] = (params as any)[key]\n }\n }\n return modelParams\n}\n\nexport const getUsage = (response: any, provider: string): { input_tokens: number; output_tokens: number } => {\n if (!response?.usage) {\n return { input_tokens: 0, output_tokens: 0 }\n }\n\n if (provider === 'anthropic') {\n return {\n input_tokens: response.usage.input_tokens ?? 0,\n output_tokens: response.usage.output_tokens ?? 0,\n }\n } else if (provider === 'openai') {\n return {\n input_tokens: response.usage.prompt_tokens ?? 0,\n output_tokens: response.usage.completion_tokens ?? 0,\n }\n }\n\n return { input_tokens: 0, output_tokens: 0 }\n}\n\n/**\n * Helper to format responses (non-streaming) for consumption, mirroring Python's openai vs. anthropic approach.\n */\nexport const formatResponse = (response: any, provider: string): Array<{ role: string; content: string }> => {\n if (!response) {\n return []\n }\n if (provider === 'anthropic') {\n return formatResponseAnthropic(response)\n } else if (provider === 'openai') {\n return formatResponseOpenAI(response)\n }\n return []\n}\n\nexport const formatResponseAnthropic = (response: any): Array<{ role: string; content: string }> => {\n // Example approach if \"response.content\" holds array of text segments, etc.\n const output: Array<{ role: string; content: string }> = []\n for (const choice of response.content ?? []) {\n if (choice?.text) {\n output.push({\n role: 'assistant',\n content: choice.text,\n })\n }\n }\n return output\n}\n\nexport const formatResponseOpenAI = (response: any): Array<{ role: string; content: string }> => {\n const output: Array<{ role: string; content: string }> = []\n for (const choice of response.choices ?? []) {\n if (choice.message?.content) {\n output.push({\n role: choice.message.role,\n content: choice.message.content,\n })\n }\n }\n return output\n}\n\nexport const mergeSystemPrompt = (params: ChatCompletionCreateParamsBase & MonitoringParams, provider: string): any => {\n if (provider !== 'anthropic') {\n return params.messages\n }\n const messages = params.messages || []\n if (!(params as any).system) {\n return messages\n }\n const systemMessage = (params as any).system\n return [{ role: 'system', content: systemMessage }, ...messages]\n}\n\nexport const withPrivacyMode = (client: PostHog, privacyMode: boolean, input: any): any => {\n return (client as any).privacy_mode || privacyMode ? null : input\n}\n\nexport type SendEventToPosthogParams = {\n client: PostHog\n distinctId?: string\n traceId: string\n model: string\n provider: string\n input: any\n output: any\n latency: number\n baseURL: string\n httpStatus: number\n usage?: { input_tokens?: number; output_tokens?: number }\n params: ChatCompletionCreateParamsBase & MonitoringParams\n}\n\nexport const sendEventToPosthog = ({\n client,\n distinctId,\n traceId,\n model,\n provider,\n input,\n output,\n latency,\n baseURL,\n params,\n httpStatus = 200,\n usage = {},\n}: SendEventToPosthogParams): void => {\n if (client.capture) {\n client.capture({\n distinctId: distinctId ?? traceId,\n event: '$ai_generation',\n properties: {\n $ai_provider: provider,\n $ai_model: model,\n $ai_model_parameters: getModelParams(params),\n $ai_input: withPrivacyMode(client, params.posthog_privacy_mode ?? false, input),\n $ai_output_choices: withPrivacyMode(client, params.posthog_privacy_mode ?? false, output),\n $ai_http_status: httpStatus,\n $ai_input_tokens: usage.input_tokens ?? 0,\n $ai_output_tokens: usage.output_tokens ?? 0,\n $ai_latency: latency,\n $ai_trace_id: traceId,\n $ai_base_url: baseURL,\n ...params.posthog_properties,\n ...(distinctId ? {} : { $process_person_profile: false }),\n },\n groups: params.posthog_groups,\n })\n }\n}\n","import OpenAIOrignal from 'openai'\nimport { PostHog } from 'posthog-node'\nimport { v4 as uuidv4 } from 'uuid'\nimport { PassThrough } from 'stream'\nimport { mergeSystemPrompt, MonitoringParams, sendEventToPosthog } from '../utils'\n\ntype ChatCompletion = OpenAIOrignal.ChatCompletion\ntype ChatCompletionChunk = OpenAIOrignal.ChatCompletionChunk\ntype ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams\ntype ChatCompletionCreateParamsNonStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsNonStreaming\ntype ChatCompletionCreateParamsStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsStreaming\nimport type { APIPromise, RequestOptions } from 'openai/core'\nimport type { Stream } from 'openai/streaming'\n\ninterface MonitoringOpenAIConfig {\n apiKey: string\n posthog: PostHog\n baseURL?: string\n}\n\nexport class PostHogOpenAI extends OpenAIOrignal {\n private readonly phClient: PostHog\n\n constructor(config: MonitoringOpenAIConfig) {\n const { posthog, ...openAIConfig } = config\n super(openAIConfig)\n this.phClient = posthog\n this.chat = new WrappedChat(this, this.phClient)\n }\n\n public chat: WrappedChat\n}\n\nexport class WrappedChat extends OpenAIOrignal.Chat {\n constructor(parentClient: PostHogOpenAI, phClient: PostHog) {\n super(parentClient)\n this.completions = new WrappedCompletions(parentClient, phClient)\n }\n\n public completions: WrappedCompletions\n}\n\nexport class WrappedCompletions extends OpenAIOrignal.Chat.Completions {\n private readonly phClient: PostHog\n\n constructor(client: OpenAIOrignal, phClient: PostHog) {\n super(client)\n this.phClient = phClient\n }\n\n // --- Overload #1: Non-streaming\n public create(\n body: ChatCompletionCreateParamsNonStreaming & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<ChatCompletion>\n\n // --- Overload #2: Streaming\n public create(\n body: ChatCompletionCreateParamsStreaming & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<Stream<ChatCompletionChunk>>\n\n // --- Overload #3: Generic base\n public create(\n body: ChatCompletionCreateParamsBase & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>>\n\n // --- Implementation Signature\n public create(\n body: ChatCompletionCreateParamsBase & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>> {\n const {\n posthog_distinct_id,\n posthog_trace_id,\n posthog_properties,\n posthog_privacy_mode = false,\n posthog_groups,\n ...openAIParams\n } = body\n\n const traceId = posthog_trace_id ?? uuidv4()\n const startTime = Date.now()\n\n const parentPromise = super.create(openAIParams, options)\n\n if (openAIParams.stream) {\n return parentPromise.then((value) => {\n const passThroughStream = new PassThrough({ objectMode: true })\n let accumulatedContent = ''\n let usage: { input_tokens: number; output_tokens: number } = {\n input_tokens: 0,\n output_tokens: 0,\n }\n if ('tee' in value) {\n const openAIStream = value\n ;(async () => {\n try {\n for await (const chunk of openAIStream) {\n const delta = chunk?.choices?.[0]?.delta?.content ?? ''\n accumulatedContent += delta\n if (chunk.usage) {\n usage = {\n input_tokens: chunk.usage.prompt_tokens ?? 0,\n output_tokens: chunk.usage.completion_tokens ?? 0,\n }\n }\n passThroughStream.write(chunk)\n }\n const latency = (Date.now() - startTime) / 1000\n sendEventToPosthog({\n client: this.phClient,\n distinctId: posthog_distinct_id ?? traceId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: posthog_privacy_mode ? '' : mergeSystemPrompt(openAIParams, 'openai'),\n output: [{ content: accumulatedContent, role: 'assistant' }],\n latency,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 200,\n usage,\n })\n passThroughStream.end()\n } catch (error) {\n // error handling\n sendEventToPosthog({\n client: this.phClient,\n distinctId: posthog_distinct_id ?? traceId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: posthog_privacy_mode ? '' : mergeSystemPrompt(openAIParams, 'openai'),\n output: [],\n latency: 0,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 500,\n usage: {\n input_tokens: 0,\n output_tokens: 0,\n },\n })\n passThroughStream.emit('error', error)\n }\n })()\n }\n return passThroughStream as unknown as Stream<ChatCompletionChunk>\n }) as APIPromise<Stream<ChatCompletionChunk>>\n } else {\n const wrappedPromise = parentPromise.then(\n (result) => {\n if ('choices' in result) {\n const latency = (Date.now() - startTime) / 1000\n sendEventToPosthog({\n client: this.phClient,\n distinctId: posthog_distinct_id ?? traceId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: posthog_privacy_mode ? '' : mergeSystemPrompt(openAIParams, 'openai'),\n output: [{ content: result.choices[0].message.content, role: 'assistant' }],\n latency,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 200,\n usage: {\n input_tokens: result.usage?.prompt_tokens ?? 0,\n output_tokens: result.usage?.completion_tokens ?? 0,\n },\n })\n }\n return result\n },\n (error) => {\n sendEventToPosthog({\n client: this.phClient,\n distinctId: posthog_distinct_id ?? traceId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: posthog_privacy_mode ? '' : mergeSystemPrompt(openAIParams, 'openai'),\n output: [],\n latency: 0,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 500,\n usage: {\n input_tokens: 0,\n output_tokens: 0,\n },\n })\n throw error\n }\n ) as APIPromise<ChatCompletion>\n\n return wrappedPromise\n }\n }\n}\n\nexport default PostHogOpenAI\n","import { experimental_wrapLanguageModel as wrapLanguageModel } from 'ai'\nimport type {\n LanguageModelV1,\n Experimental_LanguageModelV1Middleware as LanguageModelV1Middleware,\n LanguageModelV1StreamPart,\n} from 'ai'\nimport { v4 as uuidv4 } from 'uuid'\nimport type { PostHog } from 'posthog-node'\nimport { sendEventToPosthog } from '../utils'\n\ninterface CreateInstrumentationMiddlewareOptions {\n posthogDistinctId?: string\n posthogTraceId: string\n posthogProperties?: Record<string, any>\n posthogPrivacyMode?: boolean\n posthogGroups?: string[]\n}\n\nexport const createInstrumentationMiddleware = (\n phClient: PostHog,\n model: LanguageModelV1,\n options: CreateInstrumentationMiddlewareOptions\n): LanguageModelV1Middleware => {\n const middleware: LanguageModelV1Middleware = {\n wrapGenerate: async ({ doGenerate, params }) => {\n const startTime = Date.now()\n\n try {\n const result = await doGenerate()\n const latency = (Date.now() - startTime) / 1000\n\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: model.modelId,\n provider: 'vercel',\n input: options.posthogPrivacyMode ? '' : params.prompt,\n output: [{ content: result.text, role: 'assistant' }],\n latency,\n baseURL: '',\n params: { posthog_properties: options } as any,\n httpStatus: 200,\n usage: {\n input_tokens: result.usage.promptTokens,\n output_tokens: result.usage.completionTokens,\n },\n })\n\n return result\n } catch (error) {\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: model.modelId,\n provider: 'vercel',\n input: options.posthogPrivacyMode ? '' : params.prompt,\n output: [],\n latency: 0,\n baseURL: '',\n params: { posthog_properties: options } as any,\n httpStatus: 500,\n usage: {\n input_tokens: 0,\n output_tokens: 0,\n },\n })\n throw error\n }\n },\n\n wrapStream: async ({ doStream, params }) => {\n const startTime = Date.now()\n let generatedText = ''\n let usage: { input_tokens?: number; output_tokens?: number } = {}\n\n try {\n const { stream, ...rest } = await doStream()\n\n const transformStream = new TransformStream<LanguageModelV1StreamPart, LanguageModelV1StreamPart>({\n transform(chunk, controller) {\n if (chunk.type === 'text-delta') {\n generatedText += chunk.textDelta\n }\n if (chunk.type === 'finish') {\n usage = {\n input_tokens: chunk.usage?.promptTokens,\n output_tokens: chunk.usage?.completionTokens,\n }\n }\n controller.enqueue(chunk)\n },\n\n flush() {\n const latency = (Date.now() - startTime) / 1000\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: model.modelId,\n provider: 'vercel',\n input: options.posthogPrivacyMode ? '' : params.prompt,\n output: [{ content: generatedText, role: 'assistant' }],\n latency,\n baseURL: '',\n params: { posthog_properties: options } as any,\n httpStatus: 200,\n usage,\n })\n },\n })\n\n return {\n stream: stream.pipeThrough(transformStream),\n ...rest,\n }\n } catch (error) {\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: model.modelId,\n provider: 'vercel',\n input: options.posthogPrivacyMode ? '' : params.prompt,\n output: [],\n latency: 0,\n baseURL: '',\n params: { posthog_properties: options } as any,\n httpStatus: 500,\n usage: {\n input_tokens: 0,\n output_tokens: 0,\n },\n })\n throw error\n }\n },\n }\n\n return middleware\n}\n\nexport const wrapVercelLanguageModel = (\n model: LanguageModelV1,\n phClient: PostHog,\n options: CreateInstrumentationMiddlewareOptions\n): LanguageModelV1 => {\n const traceId = options.posthogTraceId ?? uuidv4()\n const middleware = createInstrumentationMiddleware(phClient, model, {\n ...options,\n posthogTraceId: traceId,\n posthogDistinctId: options.posthogDistinctId ?? traceId,\n })\n\n const wrappedModel = wrapLanguageModel({\n model,\n middleware,\n })\n\n return wrappedModel\n}\n"],"names":["getModelParams","params","modelParams","paramKeys","key","undefined","mergeSystemPrompt","provider","messages","system","systemMessage","role","content","withPrivacyMode","client","privacyMode","input","privacy_mode","sendEventToPosthog","distinctId","traceId","model","output","latency","baseURL","httpStatus","usage","capture","event","properties","$ai_provider","$ai_model","$ai_model_parameters","$ai_input","posthog_privacy_mode","$ai_output_choices","$ai_http_status","$ai_input_tokens","input_tokens","$ai_output_tokens","output_tokens","$ai_latency","$ai_trace_id","$ai_base_url","posthog_properties","$process_person_profile","groups","posthog_groups","PostHogOpenAI","OpenAIOrignal","constructor","config","posthog","openAIConfig","phClient","chat","WrappedChat","Chat","parentClient","completions","WrappedCompletions","Completions","create","body","options","posthog_distinct_id","posthog_trace_id","openAIParams","uuidv4","startTime","Date","now","parentPromise","stream","then","value","passThroughStream","PassThrough","objectMode","accumulatedContent","openAIStream","chunk","delta","choices","prompt_tokens","completion_tokens","write","end","error","emit","wrappedPromise","result","message","createInstrumentationMiddleware","middleware","wrapGenerate","doGenerate","posthogDistinctId","posthogTraceId","modelId","posthogPrivacyMode","prompt","text","promptTokens","completionTokens","wrapStream","doStream","generatedText","rest","transformStream","TransformStream","transform","controller","type","textDelta","enqueue","flush","pipeThrough","wrapVercelLanguageModel","wrappedModel","wrapLanguageModel"],"mappings":";;;;;;;;;;;;;AAaO,MAAMA,cAAc,GAAIC,MAAyD,IAAyB;EAC/G,MAAMC,WAAW,GAAwB,EAAE,CAAA;EAC3C,MAAMC,SAAS,GAAG,CAChB,aAAa,EACb,YAAY,EACZ,uBAAuB,EACvB,OAAO,EACP,mBAAmB,EACnB,kBAAkB,EAClB,GAAG,EACH,MAAM,EACN,QAAQ,EACR,WAAW,CACH,CAAA;AAEV,EAAA,KAAK,MAAMC,GAAG,IAAID,SAAS,EAAE;IAC3B,IAAIC,GAAG,IAAIH,MAAM,IAAKA,MAAc,CAACG,GAAG,CAAC,KAAKC,SAAS,EAAE;AACvDH,MAAAA,WAAW,CAACE,GAAG,CAAC,GAAIH,MAAc,CAACG,GAAG,CAAC,CAAA;AACxC,KAAA;AACF,GAAA;AACD,EAAA,OAAOF,WAAW,CAAA;AACpB,CAAC,CAAA;AAgEM,MAAMI,iBAAiB,GAAGA,CAACL,MAAyD,EAAEM,QAAgB,KAAS;EACpH,IAAIA,QAAQ,KAAK,WAAW,EAAE;IAC5B,OAAON,MAAM,CAACO,QAAQ,CAAA;AACvB,GAAA;AACD,EAAA,MAAMA,QAAQ,GAAGP,MAAM,CAACO,QAAQ,IAAI,EAAE,CAAA;AACtC,EAAA,IAAI,CAAEP,MAAc,CAACQ,MAAM,EAAE;AAC3B,IAAA,OAAOD,QAAQ,CAAA;AAChB,GAAA;AACD,EAAA,MAAME,aAAa,GAAIT,MAAc,CAACQ,MAAM,CAAA;AAC5C,EAAA,OAAO,CAAC;AAAEE,IAAAA,IAAI,EAAE,QAAQ;AAAEC,IAAAA,OAAO,EAAEF,aAAAA;GAAe,EAAE,GAAGF,QAAQ,CAAC,CAAA;AAClE,CAAC,CAAA;AAEM,MAAMK,eAAe,GAAGA,CAACC,MAAe,EAAEC,WAAoB,EAAEC,KAAU,KAAS;EACxF,OAAQF,MAAc,CAACG,YAAY,IAAIF,WAAW,GAAG,IAAI,GAAGC,KAAK,CAAA;AACnE,CAAC,CAAA;AAiBM,MAAME,kBAAkB,GAAGA,CAAC;EACjCJ,MAAM;EACNK,UAAU;EACVC,OAAO;EACPC,KAAK;EACLd,QAAQ;EACRS,KAAK;EACLM,MAAM;EACNC,OAAO;EACPC,OAAO;EACPvB,MAAM;AACNwB,EAAAA,UAAU,GAAG,GAAG;AAChBC,EAAAA,KAAK,GAAG,EAAE;AAAA,CACe,KAAU;EACnC,IAAIZ,MAAM,CAACa,OAAO,EAAE;IAClBb,MAAM,CAACa,OAAO,CAAC;MACbR,UAAU,EAAEA,UAAU,IAAIC,OAAO;AACjCQ,MAAAA,KAAK,EAAE,gBAAgB;AACvBC,MAAAA,UAAU,EAAE;AACVC,QAAAA,YAAY,EAAEvB,QAAQ;AACtBwB,QAAAA,SAAS,EAAEV,KAAK;AAChBW,QAAAA,oBAAoB,EAAEhC,cAAc,CAACC,MAAM,CAAC;AAC5CgC,QAAAA,SAAS,EAAEpB,eAAe,CAACC,MAAM,EAAEb,MAAM,CAACiC,oBAAoB,IAAI,KAAK,EAAElB,KAAK,CAAC;AAC/EmB,QAAAA,kBAAkB,EAAEtB,eAAe,CAACC,MAAM,EAAEb,MAAM,CAACiC,oBAAoB,IAAI,KAAK,EAAEZ,MAAM,CAAC;AACzFc,QAAAA,eAAe,EAAEX,UAAU;AAC3BY,QAAAA,gBAAgB,EAAEX,KAAK,CAACY,YAAY,IAAI,CAAC;AACzCC,QAAAA,iBAAiB,EAAEb,KAAK,CAACc,aAAa,IAAI,CAAC;AAC3CC,QAAAA,WAAW,EAAElB,OAAO;AACpBmB,QAAAA,YAAY,EAAEtB,OAAO;AACrBuB,QAAAA,YAAY,EAAEnB,OAAO;QACrB,GAAGvB,MAAM,CAAC2C,kBAAkB;AAC5B,QAAA,IAAIzB,UAAU,GAAG,EAAE,GAAG;AAAE0B,UAAAA,uBAAuB,EAAE,KAAA;SAAO,CAAA;OACzD;MACDC,MAAM,EAAE7C,MAAM,CAAC8C,cAAAA;AAChB,KAAA,CAAC,CAAA;AACH,GAAA;AACH,CAAC;;ACjJK,MAAOC,aAAc,SAAQC,iCAAa,CAAA;EAG9CC,WAAAA,CAAYC,MAA8B,EAAA;IACxC,MAAM;MAAEC,OAAO;MAAE,GAAGC,YAAAA;AAAc,KAAA,GAAGF,MAAM,CAAA;IAC3C,KAAK,CAACE,YAAY,CAAC,CAAA;IACnB,IAAI,CAACC,QAAQ,GAAGF,OAAO,CAAA;IACvB,IAAI,CAACG,IAAI,GAAG,IAAIC,WAAW,CAAC,IAAI,EAAE,IAAI,CAACF,QAAQ,CAAC,CAAA;AAClD,GAAA;AAGD,CAAA;AAEY,MAAAE,WAAY,SAAQP,iCAAa,CAACQ,IAAI,CAAA;AACjDP,EAAAA,WAAYA,CAAAQ,YAA2B,EAAEJ,QAAiB,EAAA;IACxD,KAAK,CAACI,YAAY,CAAC,CAAA;IACnB,IAAI,CAACC,WAAW,GAAG,IAAIC,kBAAkB,CAACF,YAAY,EAAEJ,QAAQ,CAAC,CAAA;AACnE,GAAA;AAGD,CAAA;MAEYM,kBAAmB,SAAQX,iCAAa,CAACQ,IAAI,CAACI,WAAW,CAAA;AAGpEX,EAAAA,WAAYA,CAAApC,MAAqB,EAAEwC,QAAiB,EAAA;IAClD,KAAK,CAACxC,MAAM,CAAC,CAAA;IACb,IAAI,CAACwC,QAAQ,GAAGA,QAAQ,CAAA;AAC1B,GAAA;AAoBA;AACOQ,EAAAA,MAAMA,CACXC,IAAuD,EACvDC,OAAwB,EAAA;IAExB,MAAM;MACJC,mBAAmB;MACnBC,gBAAgB;MAChBtB,kBAAkB;AAClBV,MAAAA,oBAAoB,GAAG,KAAK;MAC5Ba,cAAc;MACd,GAAGoB,YAAAA;AACJ,KAAA,GAAGJ,IAAI,CAAA;AAER,IAAA,MAAM3C,OAAO,GAAG8C,gBAAgB,IAAIE,OAAM,EAAE,CAAA;AAC5C,IAAA,MAAMC,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE,CAAA;IAE5B,MAAMC,aAAa,GAAG,KAAK,CAACV,MAAM,CAACK,YAAY,EAAEH,OAAO,CAAC,CAAA;IAEzD,IAAIG,YAAY,CAACM,MAAM,EAAE;AACvB,MAAA,OAAOD,aAAa,CAACE,IAAI,CAAEC,KAAK,IAAI;AAClC,QAAA,MAAMC,iBAAiB,GAAG,IAAIC,kBAAW,CAAC;AAAEC,UAAAA,UAAU,EAAE,IAAA;AAAM,SAAA,CAAC,CAAA;QAC/D,IAAIC,kBAAkB,GAAG,EAAE,CAAA;AAC3B,QAAA,IAAIrD,KAAK,GAAoD;AAC3DY,UAAAA,YAAY,EAAE,CAAC;AACfE,UAAAA,aAAa,EAAE,CAAA;SAChB,CAAA;QACD,IAAI,KAAK,IAAImC,KAAK,EAAE;UAClB,MAAMK,YAAY,GAAGL,KAAK,CAAA;AACzB,UAAA,CAAC,YAAW;YACX,IAAI;AACF,cAAA,WAAW,MAAMM,KAAK,IAAID,YAAY,EAAE;AACtC,gBAAA,MAAME,KAAK,GAAGD,KAAK,EAAEE,OAAO,GAAG,CAAC,CAAC,EAAED,KAAK,EAAEtE,OAAO,IAAI,EAAE,CAAA;AACvDmE,gBAAAA,kBAAkB,IAAIG,KAAK,CAAA;gBAC3B,IAAID,KAAK,CAACvD,KAAK,EAAE;AACfA,kBAAAA,KAAK,GAAG;AACNY,oBAAAA,YAAY,EAAE2C,KAAK,CAACvD,KAAK,CAAC0D,aAAa,IAAI,CAAC;AAC5C5C,oBAAAA,aAAa,EAAEyC,KAAK,CAACvD,KAAK,CAAC2D,iBAAiB,IAAI,CAAA;mBACjD,CAAA;AACF,iBAAA;AACDT,gBAAAA,iBAAiB,CAACU,KAAK,CAACL,KAAK,CAAC,CAAA;AAC/B,eAAA;cACD,MAAM1D,OAAO,GAAG,CAAC+C,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/CnD,cAAAA,kBAAkB,CAAC;gBACjBJ,MAAM,EAAE,IAAI,CAACwC,QAAQ;gBACrBnC,UAAU,EAAE8C,mBAAmB,IAAI7C,OAAO;gBAC1CA,OAAO;gBACPC,KAAK,EAAE8C,YAAY,CAAC9C,KAAK;AACzBd,gBAAAA,QAAQ,EAAE,QAAQ;gBAClBS,KAAK,EAAEkB,oBAAoB,GAAG,EAAE,GAAG5B,iBAAiB,CAAC6D,YAAY,EAAE,QAAQ,CAAC;AAC5E7C,gBAAAA,MAAM,EAAE,CAAC;AAAEV,kBAAAA,OAAO,EAAEmE,kBAAkB;AAAEpE,kBAAAA,IAAI,EAAE,WAAA;AAAW,iBAAE,CAAC;gBAC5DY,OAAO;AACPC,gBAAAA,OAAO,EAAG,IAAY,CAACA,OAAO,IAAI,EAAE;AACpCvB,gBAAAA,MAAM,EAAE8D,IAAI;AACZtC,gBAAAA,UAAU,EAAE,GAAG;AACfC,gBAAAA,KAAAA;AACD,eAAA,CAAC,CAAA;cACFkD,iBAAiB,CAACW,GAAG,EAAE,CAAA;aACxB,CAAC,OAAOC,KAAK,EAAE;AACd;AACAtE,cAAAA,kBAAkB,CAAC;gBACjBJ,MAAM,EAAE,IAAI,CAACwC,QAAQ;gBACrBnC,UAAU,EAAE8C,mBAAmB,IAAI7C,OAAO;gBAC1CA,OAAO;gBACPC,KAAK,EAAE8C,YAAY,CAAC9C,KAAK;AACzBd,gBAAAA,QAAQ,EAAE,QAAQ;gBAClBS,KAAK,EAAEkB,oBAAoB,GAAG,EAAE,GAAG5B,iBAAiB,CAAC6D,YAAY,EAAE,QAAQ,CAAC;AAC5E7C,gBAAAA,MAAM,EAAE,EAAE;AACVC,gBAAAA,OAAO,EAAE,CAAC;AACVC,gBAAAA,OAAO,EAAG,IAAY,CAACA,OAAO,IAAI,EAAE;AACpCvB,gBAAAA,MAAM,EAAE8D,IAAI;AACZtC,gBAAAA,UAAU,EAAE,GAAG;AACfC,gBAAAA,KAAK,EAAE;AACLY,kBAAAA,YAAY,EAAE,CAAC;AACfE,kBAAAA,aAAa,EAAE,CAAA;AAChB,iBAAA;AACF,eAAA,CAAC,CAAA;AACFoC,cAAAA,iBAAiB,CAACa,IAAI,CAAC,OAAO,EAAED,KAAK,CAAC,CAAA;AACvC,aAAA;AACH,WAAC,GAAG,CAAA;AACL,SAAA;AACD,QAAA,OAAOZ,iBAA2D,CAAA;AACpE,OAAC,CAA4C,CAAA;AAC9C,KAAA,MAAM;AACL,MAAA,MAAMc,cAAc,GAAGlB,aAAa,CAACE,IAAI,CACtCiB,MAAM,IAAI;QACT,IAAI,SAAS,IAAIA,MAAM,EAAE;UACvB,MAAMpE,OAAO,GAAG,CAAC+C,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/CnD,UAAAA,kBAAkB,CAAC;YACjBJ,MAAM,EAAE,IAAI,CAACwC,QAAQ;YACrBnC,UAAU,EAAE8C,mBAAmB,IAAI7C,OAAO;YAC1CA,OAAO;YACPC,KAAK,EAAE8C,YAAY,CAAC9C,KAAK;AACzBd,YAAAA,QAAQ,EAAE,QAAQ;YAClBS,KAAK,EAAEkB,oBAAoB,GAAG,EAAE,GAAG5B,iBAAiB,CAAC6D,YAAY,EAAE,QAAQ,CAAC;AAC5E7C,YAAAA,MAAM,EAAE,CAAC;cAAEV,OAAO,EAAE+E,MAAM,CAACR,OAAO,CAAC,CAAC,CAAC,CAACS,OAAO,CAAChF,OAAO;AAAED,cAAAA,IAAI,EAAE,WAAA;aAAa,CAAC;YAC3EY,OAAO;AACPC,YAAAA,OAAO,EAAG,IAAY,CAACA,OAAO,IAAI,EAAE;AACpCvB,YAAAA,MAAM,EAAE8D,IAAI;AACZtC,YAAAA,UAAU,EAAE,GAAG;AACfC,YAAAA,KAAK,EAAE;AACLY,cAAAA,YAAY,EAAEqD,MAAM,CAACjE,KAAK,EAAE0D,aAAa,IAAI,CAAC;AAC9C5C,cAAAA,aAAa,EAAEmD,MAAM,CAACjE,KAAK,EAAE2D,iBAAiB,IAAI,CAAA;AACnD,aAAA;AACF,WAAA,CAAC,CAAA;AACH,SAAA;AACD,QAAA,OAAOM,MAAM,CAAA;OACd,EACAH,KAAK,IAAI;AACRtE,QAAAA,kBAAkB,CAAC;UACjBJ,MAAM,EAAE,IAAI,CAACwC,QAAQ;UACrBnC,UAAU,EAAE8C,mBAAmB,IAAI7C,OAAO;UAC1CA,OAAO;UACPC,KAAK,EAAE8C,YAAY,CAAC9C,KAAK;AACzBd,UAAAA,QAAQ,EAAE,QAAQ;UAClBS,KAAK,EAAEkB,oBAAoB,GAAG,EAAE,GAAG5B,iBAAiB,CAAC6D,YAAY,EAAE,QAAQ,CAAC;AAC5E7C,UAAAA,MAAM,EAAE,EAAE;AACVC,UAAAA,OAAO,EAAE,CAAC;AACVC,UAAAA,OAAO,EAAG,IAAY,CAACA,OAAO,IAAI,EAAE;AACpCvB,UAAAA,MAAM,EAAE8D,IAAI;AACZtC,UAAAA,UAAU,EAAE,GAAG;AACfC,UAAAA,KAAK,EAAE;AACLY,YAAAA,YAAY,EAAE,CAAC;AACfE,YAAAA,aAAa,EAAE,CAAA;AAChB,WAAA;AACF,SAAA,CAAC,CAAA;AACF,QAAA,MAAMgD,KAAK,CAAA;AACb,OAAC,CAC4B,CAAA;AAE/B,MAAA,OAAOE,cAAc,CAAA;AACtB,KAAA;AACH,GAAA;AACD;;ACvLM,MAAMG,+BAA+B,GAAGA,CAC7CvC,QAAiB,EACjBjC,KAAsB,EACtB2C,OAA+C,KAClB;AAC7B,EAAA,MAAM8B,UAAU,GAA8B;IAC5CC,YAAY,EAAE,OAAO;MAAEC,UAAU;AAAE/F,MAAAA,MAAAA;AAAQ,KAAA,KAAI;AAC7C,MAAA,MAAMoE,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE,CAAA;MAE5B,IAAI;AACF,QAAA,MAAMoB,MAAM,GAAG,MAAMK,UAAU,EAAE,CAAA;QACjC,MAAMzE,OAAO,GAAG,CAAC+C,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAE/CnD,QAAAA,kBAAkB,CAAC;AACjBJ,UAAAA,MAAM,EAAEwC,QAAQ;UAChBnC,UAAU,EAAE6C,OAAO,CAACiC,iBAAiB;UACrC7E,OAAO,EAAE4C,OAAO,CAACkC,cAAc;UAC/B7E,KAAK,EAAEA,KAAK,CAAC8E,OAAO;AACpB5F,UAAAA,QAAQ,EAAE,QAAQ;UAClBS,KAAK,EAAEgD,OAAO,CAACoC,kBAAkB,GAAG,EAAE,GAAGnG,MAAM,CAACoG,MAAM;AACtD/E,UAAAA,MAAM,EAAE,CAAC;YAAEV,OAAO,EAAE+E,MAAM,CAACW,IAAI;AAAE3F,YAAAA,IAAI,EAAE,WAAA;WAAa,CAAC;UACrDY,OAAO;AACPC,UAAAA,OAAO,EAAE,EAAE;AACXvB,UAAAA,MAAM,EAAE;AAAE2C,YAAAA,kBAAkB,EAAEoB,OAAAA;WAAgB;AAC9CvC,UAAAA,UAAU,EAAE,GAAG;AACfC,UAAAA,KAAK,EAAE;AACLY,YAAAA,YAAY,EAAEqD,MAAM,CAACjE,KAAK,CAAC6E,YAAY;AACvC/D,YAAAA,aAAa,EAAEmD,MAAM,CAACjE,KAAK,CAAC8E,gBAAAA;AAC7B,WAAA;AACF,SAAA,CAAC,CAAA;AAEF,QAAA,OAAOb,MAAM,CAAA;OACd,CAAC,OAAOH,KAAK,EAAE;AACdtE,QAAAA,kBAAkB,CAAC;AACjBJ,UAAAA,MAAM,EAAEwC,QAAQ;UAChBnC,UAAU,EAAE6C,OAAO,CAACiC,iBAAiB;UACrC7E,OAAO,EAAE4C,OAAO,CAACkC,cAAc;UAC/B7E,KAAK,EAAEA,KAAK,CAAC8E,OAAO;AACpB5F,UAAAA,QAAQ,EAAE,QAAQ;UAClBS,KAAK,EAAEgD,OAAO,CAACoC,kBAAkB,GAAG,EAAE,GAAGnG,MAAM,CAACoG,MAAM;AACtD/E,UAAAA,MAAM,EAAE,EAAE;AACVC,UAAAA,OAAO,EAAE,CAAC;AACVC,UAAAA,OAAO,EAAE,EAAE;AACXvB,UAAAA,MAAM,EAAE;AAAE2C,YAAAA,kBAAkB,EAAEoB,OAAAA;WAAgB;AAC9CvC,UAAAA,UAAU,EAAE,GAAG;AACfC,UAAAA,KAAK,EAAE;AACLY,YAAAA,YAAY,EAAE,CAAC;AACfE,YAAAA,aAAa,EAAE,CAAA;AAChB,WAAA;AACF,SAAA,CAAC,CAAA;AACF,QAAA,MAAMgD,KAAK,CAAA;AACZ,OAAA;KACF;IAEDiB,UAAU,EAAE,OAAO;MAAEC,QAAQ;AAAEzG,MAAAA,MAAAA;AAAQ,KAAA,KAAI;AACzC,MAAA,MAAMoE,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE,CAAA;MAC5B,IAAIoC,aAAa,GAAG,EAAE,CAAA;MACtB,IAAIjF,KAAK,GAAsD,EAAE,CAAA;MAEjE,IAAI;QACF,MAAM;UAAE+C,MAAM;UAAE,GAAGmC,IAAAA;SAAM,GAAG,MAAMF,QAAQ,EAAE,CAAA;AAE5C,QAAA,MAAMG,eAAe,GAAG,IAAIC,eAAe,CAAuD;AAChGC,UAAAA,SAASA,CAAC9B,KAAK,EAAE+B,UAAU,EAAA;AACzB,YAAA,IAAI/B,KAAK,CAACgC,IAAI,KAAK,YAAY,EAAE;cAC/BN,aAAa,IAAI1B,KAAK,CAACiC,SAAS,CAAA;AACjC,aAAA;AACD,YAAA,IAAIjC,KAAK,CAACgC,IAAI,KAAK,QAAQ,EAAE;AAC3BvF,cAAAA,KAAK,GAAG;AACNY,gBAAAA,YAAY,EAAE2C,KAAK,CAACvD,KAAK,EAAE6E,YAAY;AACvC/D,gBAAAA,aAAa,EAAEyC,KAAK,CAACvD,KAAK,EAAE8E,gBAAAA;eAC7B,CAAA;AACF,aAAA;AACDQ,YAAAA,UAAU,CAACG,OAAO,CAAClC,KAAK,CAAC,CAAA;WAC1B;AAEDmC,UAAAA,KAAKA,GAAA;YACH,MAAM7F,OAAO,GAAG,CAAC+C,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/CnD,YAAAA,kBAAkB,CAAC;AACjBJ,cAAAA,MAAM,EAAEwC,QAAQ;cAChBnC,UAAU,EAAE6C,OAAO,CAACiC,iBAAiB;cACrC7E,OAAO,EAAE4C,OAAO,CAACkC,cAAc;cAC/B7E,KAAK,EAAEA,KAAK,CAAC8E,OAAO;AACpB5F,cAAAA,QAAQ,EAAE,QAAQ;cAClBS,KAAK,EAAEgD,OAAO,CAACoC,kBAAkB,GAAG,EAAE,GAAGnG,MAAM,CAACoG,MAAM;AACtD/E,cAAAA,MAAM,EAAE,CAAC;AAAEV,gBAAAA,OAAO,EAAE+F,aAAa;AAAEhG,gBAAAA,IAAI,EAAE,WAAA;AAAW,eAAE,CAAC;cACvDY,OAAO;AACPC,cAAAA,OAAO,EAAE,EAAE;AACXvB,cAAAA,MAAM,EAAE;AAAE2C,gBAAAA,kBAAkB,EAAEoB,OAAAA;eAAgB;AAC9CvC,cAAAA,UAAU,EAAE,GAAG;AACfC,cAAAA,KAAAA;AACD,aAAA,CAAC,CAAA;AACJ,WAAA;AACD,SAAA,CAAC,CAAA;QAEF,OAAO;AACL+C,UAAAA,MAAM,EAAEA,MAAM,CAAC4C,WAAW,CAACR,eAAe,CAAC;UAC3C,GAAGD,IAAAA;SACJ,CAAA;OACF,CAAC,OAAOpB,KAAK,EAAE;AACdtE,QAAAA,kBAAkB,CAAC;AACjBJ,UAAAA,MAAM,EAAEwC,QAAQ;UAChBnC,UAAU,EAAE6C,OAAO,CAACiC,iBAAiB;UACrC7E,OAAO,EAAE4C,OAAO,CAACkC,cAAc;UAC/B7E,KAAK,EAAEA,KAAK,CAAC8E,OAAO;AACpB5F,UAAAA,QAAQ,EAAE,QAAQ;UAClBS,KAAK,EAAEgD,OAAO,CAACoC,kBAAkB,GAAG,EAAE,GAAGnG,MAAM,CAACoG,MAAM;AACtD/E,UAAAA,MAAM,EAAE,EAAE;AACVC,UAAAA,OAAO,EAAE,CAAC;AACVC,UAAAA,OAAO,EAAE,EAAE;AACXvB,UAAAA,MAAM,EAAE;AAAE2C,YAAAA,kBAAkB,EAAEoB,OAAAA;WAAgB;AAC9CvC,UAAAA,UAAU,EAAE,GAAG;AACfC,UAAAA,KAAK,EAAE;AACLY,YAAAA,YAAY,EAAE,CAAC;AACfE,YAAAA,aAAa,EAAE,CAAA;AAChB,WAAA;AACF,SAAA,CAAC,CAAA;AACF,QAAA,MAAMgD,KAAK,CAAA;AACZ,OAAA;AACH,KAAA;GACD,CAAA;AAED,EAAA,OAAOM,UAAU,CAAA;AACnB,CAAC,CAAA;AAEM,MAAMwB,uBAAuB,GAAGA,CACrCjG,KAAsB,EACtBiC,QAAiB,EACjBU,OAA+C,KAC5B;EACnB,MAAM5C,OAAO,GAAG4C,OAAO,CAACkC,cAAc,IAAI9B,OAAM,EAAE,CAAA;AAClD,EAAA,MAAM0B,UAAU,GAAGD,+BAA+B,CAACvC,QAAQ,EAAEjC,KAAK,EAAE;AAClE,IAAA,GAAG2C,OAAO;AACVkC,IAAAA,cAAc,EAAE9E,OAAO;AACvB6E,IAAAA,iBAAiB,EAAEjC,OAAO,CAACiC,iBAAiB,IAAI7E,OAAAA;AACjD,GAAA,CAAC,CAAA;EAEF,MAAMmG,YAAY,GAAGC,iCAAiB,CAAC;IACrCnG,KAAK;AACLyE,IAAAA,UAAAA;AACD,GAAA,CAAC,CAAA;AAEF,EAAA,OAAOyB,YAAY,CAAA;AACrB;;;;;"}
|
package/lib/index.d.ts
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import OpenAIOrignal from 'openai';
|
|
2
|
+
import { PostHog } from 'posthog-node';
|
|
3
|
+
import { RequestOptions, APIPromise } from 'openai/core';
|
|
4
|
+
import { Stream } from 'openai/streaming';
|
|
5
|
+
import { LanguageModelV1 } from 'ai';
|
|
6
|
+
|
|
7
|
+
interface MonitoringParams {
|
|
8
|
+
posthog_distinct_id?: string;
|
|
9
|
+
posthog_trace_id?: string;
|
|
10
|
+
posthog_properties?: Record<string, any>;
|
|
11
|
+
posthog_privacy_mode?: boolean;
|
|
12
|
+
posthog_groups?: Record<string, any>;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
type ChatCompletion = OpenAIOrignal.ChatCompletion;
|
|
16
|
+
type ChatCompletionChunk = OpenAIOrignal.ChatCompletionChunk;
|
|
17
|
+
type ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams;
|
|
18
|
+
type ChatCompletionCreateParamsNonStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsNonStreaming;
|
|
19
|
+
type ChatCompletionCreateParamsStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsStreaming;
|
|
20
|
+
|
|
21
|
+
interface MonitoringOpenAIConfig {
|
|
22
|
+
apiKey: string;
|
|
23
|
+
posthog: PostHog;
|
|
24
|
+
baseURL?: string;
|
|
25
|
+
}
|
|
26
|
+
declare class PostHogOpenAI extends OpenAIOrignal {
|
|
27
|
+
private readonly phClient;
|
|
28
|
+
constructor(config: MonitoringOpenAIConfig);
|
|
29
|
+
chat: WrappedChat;
|
|
30
|
+
}
|
|
31
|
+
declare class WrappedChat extends OpenAIOrignal.Chat {
|
|
32
|
+
constructor(parentClient: PostHogOpenAI, phClient: PostHog);
|
|
33
|
+
completions: WrappedCompletions;
|
|
34
|
+
}
|
|
35
|
+
declare class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
36
|
+
private readonly phClient;
|
|
37
|
+
constructor(client: OpenAIOrignal, phClient: PostHog);
|
|
38
|
+
create(body: ChatCompletionCreateParamsNonStreaming & MonitoringParams, options?: RequestOptions): APIPromise<ChatCompletion>;
|
|
39
|
+
create(body: ChatCompletionCreateParamsStreaming & MonitoringParams, options?: RequestOptions): APIPromise<Stream<ChatCompletionChunk>>;
|
|
40
|
+
create(body: ChatCompletionCreateParamsBase & MonitoringParams, options?: RequestOptions): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>>;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
interface CreateInstrumentationMiddlewareOptions {
|
|
44
|
+
posthogDistinctId?: string;
|
|
45
|
+
posthogTraceId: string;
|
|
46
|
+
posthogProperties?: Record<string, any>;
|
|
47
|
+
posthogPrivacyMode?: boolean;
|
|
48
|
+
posthogGroups?: string[];
|
|
49
|
+
}
|
|
50
|
+
declare const wrapVercelLanguageModel: (model: LanguageModelV1, phClient: PostHog, options: CreateInstrumentationMiddlewareOptions) => LanguageModelV1;
|
|
51
|
+
|
|
52
|
+
export { PostHogOpenAI as OpenAI, wrapVercelLanguageModel as withTracing };
|
package/lib/index.esm.js
ADDED
|
@@ -0,0 +1,377 @@
|
|
|
1
|
+
import OpenAIOrignal from 'openai';
|
|
2
|
+
import { v4 } from 'uuid';
|
|
3
|
+
import { PassThrough } from 'stream';
|
|
4
|
+
import { experimental_wrapLanguageModel } from 'ai';
|
|
5
|
+
|
|
6
|
+
const getModelParams = params => {
|
|
7
|
+
const modelParams = {};
|
|
8
|
+
const paramKeys = ['temperature', 'max_tokens', 'max_completion_tokens', 'top_p', 'frequency_penalty', 'presence_penalty', 'n', 'stop', 'stream', 'streaming'];
|
|
9
|
+
for (const key of paramKeys) {
|
|
10
|
+
if (key in params && params[key] !== undefined) {
|
|
11
|
+
modelParams[key] = params[key];
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
return modelParams;
|
|
15
|
+
};
|
|
16
|
+
const mergeSystemPrompt = (params, provider) => {
|
|
17
|
+
if (provider !== 'anthropic') {
|
|
18
|
+
return params.messages;
|
|
19
|
+
}
|
|
20
|
+
const messages = params.messages || [];
|
|
21
|
+
if (!params.system) {
|
|
22
|
+
return messages;
|
|
23
|
+
}
|
|
24
|
+
const systemMessage = params.system;
|
|
25
|
+
return [{
|
|
26
|
+
role: 'system',
|
|
27
|
+
content: systemMessage
|
|
28
|
+
}, ...messages];
|
|
29
|
+
};
|
|
30
|
+
const withPrivacyMode = (client, privacyMode, input) => {
|
|
31
|
+
return client.privacy_mode || privacyMode ? null : input;
|
|
32
|
+
};
|
|
33
|
+
const sendEventToPosthog = ({
|
|
34
|
+
client,
|
|
35
|
+
distinctId,
|
|
36
|
+
traceId,
|
|
37
|
+
model,
|
|
38
|
+
provider,
|
|
39
|
+
input,
|
|
40
|
+
output,
|
|
41
|
+
latency,
|
|
42
|
+
baseURL,
|
|
43
|
+
params,
|
|
44
|
+
httpStatus = 200,
|
|
45
|
+
usage = {}
|
|
46
|
+
}) => {
|
|
47
|
+
if (client.capture) {
|
|
48
|
+
client.capture({
|
|
49
|
+
distinctId: distinctId ?? traceId,
|
|
50
|
+
event: '$ai_generation',
|
|
51
|
+
properties: {
|
|
52
|
+
$ai_provider: provider,
|
|
53
|
+
$ai_model: model,
|
|
54
|
+
$ai_model_parameters: getModelParams(params),
|
|
55
|
+
$ai_input: withPrivacyMode(client, params.posthog_privacy_mode ?? false, input),
|
|
56
|
+
$ai_output_choices: withPrivacyMode(client, params.posthog_privacy_mode ?? false, output),
|
|
57
|
+
$ai_http_status: httpStatus,
|
|
58
|
+
$ai_input_tokens: usage.input_tokens ?? 0,
|
|
59
|
+
$ai_output_tokens: usage.output_tokens ?? 0,
|
|
60
|
+
$ai_latency: latency,
|
|
61
|
+
$ai_trace_id: traceId,
|
|
62
|
+
$ai_base_url: baseURL,
|
|
63
|
+
...params.posthog_properties,
|
|
64
|
+
...(distinctId ? {} : {
|
|
65
|
+
$process_person_profile: false
|
|
66
|
+
})
|
|
67
|
+
},
|
|
68
|
+
groups: params.posthog_groups
|
|
69
|
+
});
|
|
70
|
+
}
|
|
71
|
+
};
|
|
72
|
+
|
|
73
|
+
class PostHogOpenAI extends OpenAIOrignal {
|
|
74
|
+
constructor(config) {
|
|
75
|
+
const {
|
|
76
|
+
posthog,
|
|
77
|
+
...openAIConfig
|
|
78
|
+
} = config;
|
|
79
|
+
super(openAIConfig);
|
|
80
|
+
this.phClient = posthog;
|
|
81
|
+
this.chat = new WrappedChat(this, this.phClient);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
class WrappedChat extends OpenAIOrignal.Chat {
|
|
85
|
+
constructor(parentClient, phClient) {
|
|
86
|
+
super(parentClient);
|
|
87
|
+
this.completions = new WrappedCompletions(parentClient, phClient);
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
91
|
+
constructor(client, phClient) {
|
|
92
|
+
super(client);
|
|
93
|
+
this.phClient = phClient;
|
|
94
|
+
}
|
|
95
|
+
// --- Implementation Signature
|
|
96
|
+
create(body, options) {
|
|
97
|
+
const {
|
|
98
|
+
posthog_distinct_id,
|
|
99
|
+
posthog_trace_id,
|
|
100
|
+
posthog_properties,
|
|
101
|
+
posthog_privacy_mode = false,
|
|
102
|
+
posthog_groups,
|
|
103
|
+
...openAIParams
|
|
104
|
+
} = body;
|
|
105
|
+
const traceId = posthog_trace_id ?? v4();
|
|
106
|
+
const startTime = Date.now();
|
|
107
|
+
const parentPromise = super.create(openAIParams, options);
|
|
108
|
+
if (openAIParams.stream) {
|
|
109
|
+
return parentPromise.then(value => {
|
|
110
|
+
const passThroughStream = new PassThrough({
|
|
111
|
+
objectMode: true
|
|
112
|
+
});
|
|
113
|
+
let accumulatedContent = '';
|
|
114
|
+
let usage = {
|
|
115
|
+
input_tokens: 0,
|
|
116
|
+
output_tokens: 0
|
|
117
|
+
};
|
|
118
|
+
if ('tee' in value) {
|
|
119
|
+
const openAIStream = value;
|
|
120
|
+
(async () => {
|
|
121
|
+
try {
|
|
122
|
+
for await (const chunk of openAIStream) {
|
|
123
|
+
const delta = chunk?.choices?.[0]?.delta?.content ?? '';
|
|
124
|
+
accumulatedContent += delta;
|
|
125
|
+
if (chunk.usage) {
|
|
126
|
+
usage = {
|
|
127
|
+
input_tokens: chunk.usage.prompt_tokens ?? 0,
|
|
128
|
+
output_tokens: chunk.usage.completion_tokens ?? 0
|
|
129
|
+
};
|
|
130
|
+
}
|
|
131
|
+
passThroughStream.write(chunk);
|
|
132
|
+
}
|
|
133
|
+
const latency = (Date.now() - startTime) / 1000;
|
|
134
|
+
sendEventToPosthog({
|
|
135
|
+
client: this.phClient,
|
|
136
|
+
distinctId: posthog_distinct_id ?? traceId,
|
|
137
|
+
traceId,
|
|
138
|
+
model: openAIParams.model,
|
|
139
|
+
provider: 'openai',
|
|
140
|
+
input: posthog_privacy_mode ? '' : mergeSystemPrompt(openAIParams, 'openai'),
|
|
141
|
+
output: [{
|
|
142
|
+
content: accumulatedContent,
|
|
143
|
+
role: 'assistant'
|
|
144
|
+
}],
|
|
145
|
+
latency,
|
|
146
|
+
baseURL: this.baseURL ?? '',
|
|
147
|
+
params: body,
|
|
148
|
+
httpStatus: 200,
|
|
149
|
+
usage
|
|
150
|
+
});
|
|
151
|
+
passThroughStream.end();
|
|
152
|
+
} catch (error) {
|
|
153
|
+
// error handling
|
|
154
|
+
sendEventToPosthog({
|
|
155
|
+
client: this.phClient,
|
|
156
|
+
distinctId: posthog_distinct_id ?? traceId,
|
|
157
|
+
traceId,
|
|
158
|
+
model: openAIParams.model,
|
|
159
|
+
provider: 'openai',
|
|
160
|
+
input: posthog_privacy_mode ? '' : mergeSystemPrompt(openAIParams, 'openai'),
|
|
161
|
+
output: [],
|
|
162
|
+
latency: 0,
|
|
163
|
+
baseURL: this.baseURL ?? '',
|
|
164
|
+
params: body,
|
|
165
|
+
httpStatus: 500,
|
|
166
|
+
usage: {
|
|
167
|
+
input_tokens: 0,
|
|
168
|
+
output_tokens: 0
|
|
169
|
+
}
|
|
170
|
+
});
|
|
171
|
+
passThroughStream.emit('error', error);
|
|
172
|
+
}
|
|
173
|
+
})();
|
|
174
|
+
}
|
|
175
|
+
return passThroughStream;
|
|
176
|
+
});
|
|
177
|
+
} else {
|
|
178
|
+
const wrappedPromise = parentPromise.then(result => {
|
|
179
|
+
if ('choices' in result) {
|
|
180
|
+
const latency = (Date.now() - startTime) / 1000;
|
|
181
|
+
sendEventToPosthog({
|
|
182
|
+
client: this.phClient,
|
|
183
|
+
distinctId: posthog_distinct_id ?? traceId,
|
|
184
|
+
traceId,
|
|
185
|
+
model: openAIParams.model,
|
|
186
|
+
provider: 'openai',
|
|
187
|
+
input: posthog_privacy_mode ? '' : mergeSystemPrompt(openAIParams, 'openai'),
|
|
188
|
+
output: [{
|
|
189
|
+
content: result.choices[0].message.content,
|
|
190
|
+
role: 'assistant'
|
|
191
|
+
}],
|
|
192
|
+
latency,
|
|
193
|
+
baseURL: this.baseURL ?? '',
|
|
194
|
+
params: body,
|
|
195
|
+
httpStatus: 200,
|
|
196
|
+
usage: {
|
|
197
|
+
input_tokens: result.usage?.prompt_tokens ?? 0,
|
|
198
|
+
output_tokens: result.usage?.completion_tokens ?? 0
|
|
199
|
+
}
|
|
200
|
+
});
|
|
201
|
+
}
|
|
202
|
+
return result;
|
|
203
|
+
}, error => {
|
|
204
|
+
sendEventToPosthog({
|
|
205
|
+
client: this.phClient,
|
|
206
|
+
distinctId: posthog_distinct_id ?? traceId,
|
|
207
|
+
traceId,
|
|
208
|
+
model: openAIParams.model,
|
|
209
|
+
provider: 'openai',
|
|
210
|
+
input: posthog_privacy_mode ? '' : mergeSystemPrompt(openAIParams, 'openai'),
|
|
211
|
+
output: [],
|
|
212
|
+
latency: 0,
|
|
213
|
+
baseURL: this.baseURL ?? '',
|
|
214
|
+
params: body,
|
|
215
|
+
httpStatus: 500,
|
|
216
|
+
usage: {
|
|
217
|
+
input_tokens: 0,
|
|
218
|
+
output_tokens: 0
|
|
219
|
+
}
|
|
220
|
+
});
|
|
221
|
+
throw error;
|
|
222
|
+
});
|
|
223
|
+
return wrappedPromise;
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
229
|
+
const middleware = {
|
|
230
|
+
wrapGenerate: async ({
|
|
231
|
+
doGenerate,
|
|
232
|
+
params
|
|
233
|
+
}) => {
|
|
234
|
+
const startTime = Date.now();
|
|
235
|
+
try {
|
|
236
|
+
const result = await doGenerate();
|
|
237
|
+
const latency = (Date.now() - startTime) / 1000;
|
|
238
|
+
sendEventToPosthog({
|
|
239
|
+
client: phClient,
|
|
240
|
+
distinctId: options.posthogDistinctId,
|
|
241
|
+
traceId: options.posthogTraceId,
|
|
242
|
+
model: model.modelId,
|
|
243
|
+
provider: 'vercel',
|
|
244
|
+
input: options.posthogPrivacyMode ? '' : params.prompt,
|
|
245
|
+
output: [{
|
|
246
|
+
content: result.text,
|
|
247
|
+
role: 'assistant'
|
|
248
|
+
}],
|
|
249
|
+
latency,
|
|
250
|
+
baseURL: '',
|
|
251
|
+
params: {
|
|
252
|
+
posthog_properties: options
|
|
253
|
+
},
|
|
254
|
+
httpStatus: 200,
|
|
255
|
+
usage: {
|
|
256
|
+
input_tokens: result.usage.promptTokens,
|
|
257
|
+
output_tokens: result.usage.completionTokens
|
|
258
|
+
}
|
|
259
|
+
});
|
|
260
|
+
return result;
|
|
261
|
+
} catch (error) {
|
|
262
|
+
sendEventToPosthog({
|
|
263
|
+
client: phClient,
|
|
264
|
+
distinctId: options.posthogDistinctId,
|
|
265
|
+
traceId: options.posthogTraceId,
|
|
266
|
+
model: model.modelId,
|
|
267
|
+
provider: 'vercel',
|
|
268
|
+
input: options.posthogPrivacyMode ? '' : params.prompt,
|
|
269
|
+
output: [],
|
|
270
|
+
latency: 0,
|
|
271
|
+
baseURL: '',
|
|
272
|
+
params: {
|
|
273
|
+
posthog_properties: options
|
|
274
|
+
},
|
|
275
|
+
httpStatus: 500,
|
|
276
|
+
usage: {
|
|
277
|
+
input_tokens: 0,
|
|
278
|
+
output_tokens: 0
|
|
279
|
+
}
|
|
280
|
+
});
|
|
281
|
+
throw error;
|
|
282
|
+
}
|
|
283
|
+
},
|
|
284
|
+
wrapStream: async ({
|
|
285
|
+
doStream,
|
|
286
|
+
params
|
|
287
|
+
}) => {
|
|
288
|
+
const startTime = Date.now();
|
|
289
|
+
let generatedText = '';
|
|
290
|
+
let usage = {};
|
|
291
|
+
try {
|
|
292
|
+
const {
|
|
293
|
+
stream,
|
|
294
|
+
...rest
|
|
295
|
+
} = await doStream();
|
|
296
|
+
const transformStream = new TransformStream({
|
|
297
|
+
transform(chunk, controller) {
|
|
298
|
+
if (chunk.type === 'text-delta') {
|
|
299
|
+
generatedText += chunk.textDelta;
|
|
300
|
+
}
|
|
301
|
+
if (chunk.type === 'finish') {
|
|
302
|
+
usage = {
|
|
303
|
+
input_tokens: chunk.usage?.promptTokens,
|
|
304
|
+
output_tokens: chunk.usage?.completionTokens
|
|
305
|
+
};
|
|
306
|
+
}
|
|
307
|
+
controller.enqueue(chunk);
|
|
308
|
+
},
|
|
309
|
+
flush() {
|
|
310
|
+
const latency = (Date.now() - startTime) / 1000;
|
|
311
|
+
sendEventToPosthog({
|
|
312
|
+
client: phClient,
|
|
313
|
+
distinctId: options.posthogDistinctId,
|
|
314
|
+
traceId: options.posthogTraceId,
|
|
315
|
+
model: model.modelId,
|
|
316
|
+
provider: 'vercel',
|
|
317
|
+
input: options.posthogPrivacyMode ? '' : params.prompt,
|
|
318
|
+
output: [{
|
|
319
|
+
content: generatedText,
|
|
320
|
+
role: 'assistant'
|
|
321
|
+
}],
|
|
322
|
+
latency,
|
|
323
|
+
baseURL: '',
|
|
324
|
+
params: {
|
|
325
|
+
posthog_properties: options
|
|
326
|
+
},
|
|
327
|
+
httpStatus: 200,
|
|
328
|
+
usage
|
|
329
|
+
});
|
|
330
|
+
}
|
|
331
|
+
});
|
|
332
|
+
return {
|
|
333
|
+
stream: stream.pipeThrough(transformStream),
|
|
334
|
+
...rest
|
|
335
|
+
};
|
|
336
|
+
} catch (error) {
|
|
337
|
+
sendEventToPosthog({
|
|
338
|
+
client: phClient,
|
|
339
|
+
distinctId: options.posthogDistinctId,
|
|
340
|
+
traceId: options.posthogTraceId,
|
|
341
|
+
model: model.modelId,
|
|
342
|
+
provider: 'vercel',
|
|
343
|
+
input: options.posthogPrivacyMode ? '' : params.prompt,
|
|
344
|
+
output: [],
|
|
345
|
+
latency: 0,
|
|
346
|
+
baseURL: '',
|
|
347
|
+
params: {
|
|
348
|
+
posthog_properties: options
|
|
349
|
+
},
|
|
350
|
+
httpStatus: 500,
|
|
351
|
+
usage: {
|
|
352
|
+
input_tokens: 0,
|
|
353
|
+
output_tokens: 0
|
|
354
|
+
}
|
|
355
|
+
});
|
|
356
|
+
throw error;
|
|
357
|
+
}
|
|
358
|
+
}
|
|
359
|
+
};
|
|
360
|
+
return middleware;
|
|
361
|
+
};
|
|
362
|
+
const wrapVercelLanguageModel = (model, phClient, options) => {
|
|
363
|
+
const traceId = options.posthogTraceId ?? v4();
|
|
364
|
+
const middleware = createInstrumentationMiddleware(phClient, model, {
|
|
365
|
+
...options,
|
|
366
|
+
posthogTraceId: traceId,
|
|
367
|
+
posthogDistinctId: options.posthogDistinctId ?? traceId
|
|
368
|
+
});
|
|
369
|
+
const wrappedModel = experimental_wrapLanguageModel({
|
|
370
|
+
model,
|
|
371
|
+
middleware
|
|
372
|
+
});
|
|
373
|
+
return wrappedModel;
|
|
374
|
+
};
|
|
375
|
+
|
|
376
|
+
export { PostHogOpenAI as OpenAI, wrapVercelLanguageModel as withTracing };
|
|
377
|
+
//# sourceMappingURL=index.esm.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.esm.js","sources":["../src/utils.ts","../src/openai/index.ts","../src/vercel/middleware.ts"],"sourcesContent":["import { PostHog } from 'posthog-node'\nimport OpenAIOrignal from 'openai'\n\ntype ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams\n\nexport interface MonitoringParams {\n posthog_distinct_id?: string\n posthog_trace_id?: string\n posthog_properties?: Record<string, any>\n posthog_privacy_mode?: boolean\n posthog_groups?: Record<string, any>\n}\n\nexport const getModelParams = (params: ChatCompletionCreateParamsBase & MonitoringParams): Record<string, any> => {\n const modelParams: Record<string, any> = {}\n const paramKeys = [\n 'temperature',\n 'max_tokens',\n 'max_completion_tokens',\n 'top_p',\n 'frequency_penalty',\n 'presence_penalty',\n 'n',\n 'stop',\n 'stream',\n 'streaming',\n ] as const\n\n for (const key of paramKeys) {\n if (key in params && (params as any)[key] !== undefined) {\n modelParams[key] = (params as any)[key]\n }\n }\n return modelParams\n}\n\nexport const getUsage = (response: any, provider: string): { input_tokens: number; output_tokens: number } => {\n if (!response?.usage) {\n return { input_tokens: 0, output_tokens: 0 }\n }\n\n if (provider === 'anthropic') {\n return {\n input_tokens: response.usage.input_tokens ?? 0,\n output_tokens: response.usage.output_tokens ?? 0,\n }\n } else if (provider === 'openai') {\n return {\n input_tokens: response.usage.prompt_tokens ?? 0,\n output_tokens: response.usage.completion_tokens ?? 0,\n }\n }\n\n return { input_tokens: 0, output_tokens: 0 }\n}\n\n/**\n * Helper to format responses (non-streaming) for consumption, mirroring Python's openai vs. anthropic approach.\n */\nexport const formatResponse = (response: any, provider: string): Array<{ role: string; content: string }> => {\n if (!response) {\n return []\n }\n if (provider === 'anthropic') {\n return formatResponseAnthropic(response)\n } else if (provider === 'openai') {\n return formatResponseOpenAI(response)\n }\n return []\n}\n\nexport const formatResponseAnthropic = (response: any): Array<{ role: string; content: string }> => {\n // Example approach if \"response.content\" holds array of text segments, etc.\n const output: Array<{ role: string; content: string }> = []\n for (const choice of response.content ?? []) {\n if (choice?.text) {\n output.push({\n role: 'assistant',\n content: choice.text,\n })\n }\n }\n return output\n}\n\nexport const formatResponseOpenAI = (response: any): Array<{ role: string; content: string }> => {\n const output: Array<{ role: string; content: string }> = []\n for (const choice of response.choices ?? []) {\n if (choice.message?.content) {\n output.push({\n role: choice.message.role,\n content: choice.message.content,\n })\n }\n }\n return output\n}\n\nexport const mergeSystemPrompt = (params: ChatCompletionCreateParamsBase & MonitoringParams, provider: string): any => {\n if (provider !== 'anthropic') {\n return params.messages\n }\n const messages = params.messages || []\n if (!(params as any).system) {\n return messages\n }\n const systemMessage = (params as any).system\n return [{ role: 'system', content: systemMessage }, ...messages]\n}\n\nexport const withPrivacyMode = (client: PostHog, privacyMode: boolean, input: any): any => {\n return (client as any).privacy_mode || privacyMode ? null : input\n}\n\nexport type SendEventToPosthogParams = {\n client: PostHog\n distinctId?: string\n traceId: string\n model: string\n provider: string\n input: any\n output: any\n latency: number\n baseURL: string\n httpStatus: number\n usage?: { input_tokens?: number; output_tokens?: number }\n params: ChatCompletionCreateParamsBase & MonitoringParams\n}\n\nexport const sendEventToPosthog = ({\n client,\n distinctId,\n traceId,\n model,\n provider,\n input,\n output,\n latency,\n baseURL,\n params,\n httpStatus = 200,\n usage = {},\n}: SendEventToPosthogParams): void => {\n if (client.capture) {\n client.capture({\n distinctId: distinctId ?? traceId,\n event: '$ai_generation',\n properties: {\n $ai_provider: provider,\n $ai_model: model,\n $ai_model_parameters: getModelParams(params),\n $ai_input: withPrivacyMode(client, params.posthog_privacy_mode ?? false, input),\n $ai_output_choices: withPrivacyMode(client, params.posthog_privacy_mode ?? false, output),\n $ai_http_status: httpStatus,\n $ai_input_tokens: usage.input_tokens ?? 0,\n $ai_output_tokens: usage.output_tokens ?? 0,\n $ai_latency: latency,\n $ai_trace_id: traceId,\n $ai_base_url: baseURL,\n ...params.posthog_properties,\n ...(distinctId ? {} : { $process_person_profile: false }),\n },\n groups: params.posthog_groups,\n })\n }\n}\n","import OpenAIOrignal from 'openai'\nimport { PostHog } from 'posthog-node'\nimport { v4 as uuidv4 } from 'uuid'\nimport { PassThrough } from 'stream'\nimport { mergeSystemPrompt, MonitoringParams, sendEventToPosthog } from '../utils'\n\ntype ChatCompletion = OpenAIOrignal.ChatCompletion\ntype ChatCompletionChunk = OpenAIOrignal.ChatCompletionChunk\ntype ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams\ntype ChatCompletionCreateParamsNonStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsNonStreaming\ntype ChatCompletionCreateParamsStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsStreaming\nimport type { APIPromise, RequestOptions } from 'openai/core'\nimport type { Stream } from 'openai/streaming'\n\ninterface MonitoringOpenAIConfig {\n apiKey: string\n posthog: PostHog\n baseURL?: string\n}\n\nexport class PostHogOpenAI extends OpenAIOrignal {\n private readonly phClient: PostHog\n\n constructor(config: MonitoringOpenAIConfig) {\n const { posthog, ...openAIConfig } = config\n super(openAIConfig)\n this.phClient = posthog\n this.chat = new WrappedChat(this, this.phClient)\n }\n\n public chat: WrappedChat\n}\n\nexport class WrappedChat extends OpenAIOrignal.Chat {\n constructor(parentClient: PostHogOpenAI, phClient: PostHog) {\n super(parentClient)\n this.completions = new WrappedCompletions(parentClient, phClient)\n }\n\n public completions: WrappedCompletions\n}\n\nexport class WrappedCompletions extends OpenAIOrignal.Chat.Completions {\n private readonly phClient: PostHog\n\n constructor(client: OpenAIOrignal, phClient: PostHog) {\n super(client)\n this.phClient = phClient\n }\n\n // --- Overload #1: Non-streaming\n public create(\n body: ChatCompletionCreateParamsNonStreaming & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<ChatCompletion>\n\n // --- Overload #2: Streaming\n public create(\n body: ChatCompletionCreateParamsStreaming & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<Stream<ChatCompletionChunk>>\n\n // --- Overload #3: Generic base\n public create(\n body: ChatCompletionCreateParamsBase & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>>\n\n // --- Implementation Signature\n public create(\n body: ChatCompletionCreateParamsBase & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>> {\n const {\n posthog_distinct_id,\n posthog_trace_id,\n posthog_properties,\n posthog_privacy_mode = false,\n posthog_groups,\n ...openAIParams\n } = body\n\n const traceId = posthog_trace_id ?? uuidv4()\n const startTime = Date.now()\n\n const parentPromise = super.create(openAIParams, options)\n\n if (openAIParams.stream) {\n return parentPromise.then((value) => {\n const passThroughStream = new PassThrough({ objectMode: true })\n let accumulatedContent = ''\n let usage: { input_tokens: number; output_tokens: number } = {\n input_tokens: 0,\n output_tokens: 0,\n }\n if ('tee' in value) {\n const openAIStream = value\n ;(async () => {\n try {\n for await (const chunk of openAIStream) {\n const delta = chunk?.choices?.[0]?.delta?.content ?? ''\n accumulatedContent += delta\n if (chunk.usage) {\n usage = {\n input_tokens: chunk.usage.prompt_tokens ?? 0,\n output_tokens: chunk.usage.completion_tokens ?? 0,\n }\n }\n passThroughStream.write(chunk)\n }\n const latency = (Date.now() - startTime) / 1000\n sendEventToPosthog({\n client: this.phClient,\n distinctId: posthog_distinct_id ?? traceId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: posthog_privacy_mode ? '' : mergeSystemPrompt(openAIParams, 'openai'),\n output: [{ content: accumulatedContent, role: 'assistant' }],\n latency,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 200,\n usage,\n })\n passThroughStream.end()\n } catch (error) {\n // error handling\n sendEventToPosthog({\n client: this.phClient,\n distinctId: posthog_distinct_id ?? traceId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: posthog_privacy_mode ? '' : mergeSystemPrompt(openAIParams, 'openai'),\n output: [],\n latency: 0,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 500,\n usage: {\n input_tokens: 0,\n output_tokens: 0,\n },\n })\n passThroughStream.emit('error', error)\n }\n })()\n }\n return passThroughStream as unknown as Stream<ChatCompletionChunk>\n }) as APIPromise<Stream<ChatCompletionChunk>>\n } else {\n const wrappedPromise = parentPromise.then(\n (result) => {\n if ('choices' in result) {\n const latency = (Date.now() - startTime) / 1000\n sendEventToPosthog({\n client: this.phClient,\n distinctId: posthog_distinct_id ?? traceId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: posthog_privacy_mode ? '' : mergeSystemPrompt(openAIParams, 'openai'),\n output: [{ content: result.choices[0].message.content, role: 'assistant' }],\n latency,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 200,\n usage: {\n input_tokens: result.usage?.prompt_tokens ?? 0,\n output_tokens: result.usage?.completion_tokens ?? 0,\n },\n })\n }\n return result\n },\n (error) => {\n sendEventToPosthog({\n client: this.phClient,\n distinctId: posthog_distinct_id ?? traceId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: posthog_privacy_mode ? '' : mergeSystemPrompt(openAIParams, 'openai'),\n output: [],\n latency: 0,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 500,\n usage: {\n input_tokens: 0,\n output_tokens: 0,\n },\n })\n throw error\n }\n ) as APIPromise<ChatCompletion>\n\n return wrappedPromise\n }\n }\n}\n\nexport default PostHogOpenAI\n","import { experimental_wrapLanguageModel as wrapLanguageModel } from 'ai'\nimport type {\n LanguageModelV1,\n Experimental_LanguageModelV1Middleware as LanguageModelV1Middleware,\n LanguageModelV1StreamPart,\n} from 'ai'\nimport { v4 as uuidv4 } from 'uuid'\nimport type { PostHog } from 'posthog-node'\nimport { sendEventToPosthog } from '../utils'\n\ninterface CreateInstrumentationMiddlewareOptions {\n posthogDistinctId?: string\n posthogTraceId: string\n posthogProperties?: Record<string, any>\n posthogPrivacyMode?: boolean\n posthogGroups?: string[]\n}\n\nexport const createInstrumentationMiddleware = (\n phClient: PostHog,\n model: LanguageModelV1,\n options: CreateInstrumentationMiddlewareOptions\n): LanguageModelV1Middleware => {\n const middleware: LanguageModelV1Middleware = {\n wrapGenerate: async ({ doGenerate, params }) => {\n const startTime = Date.now()\n\n try {\n const result = await doGenerate()\n const latency = (Date.now() - startTime) / 1000\n\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: model.modelId,\n provider: 'vercel',\n input: options.posthogPrivacyMode ? '' : params.prompt,\n output: [{ content: result.text, role: 'assistant' }],\n latency,\n baseURL: '',\n params: { posthog_properties: options } as any,\n httpStatus: 200,\n usage: {\n input_tokens: result.usage.promptTokens,\n output_tokens: result.usage.completionTokens,\n },\n })\n\n return result\n } catch (error) {\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: model.modelId,\n provider: 'vercel',\n input: options.posthogPrivacyMode ? '' : params.prompt,\n output: [],\n latency: 0,\n baseURL: '',\n params: { posthog_properties: options } as any,\n httpStatus: 500,\n usage: {\n input_tokens: 0,\n output_tokens: 0,\n },\n })\n throw error\n }\n },\n\n wrapStream: async ({ doStream, params }) => {\n const startTime = Date.now()\n let generatedText = ''\n let usage: { input_tokens?: number; output_tokens?: number } = {}\n\n try {\n const { stream, ...rest } = await doStream()\n\n const transformStream = new TransformStream<LanguageModelV1StreamPart, LanguageModelV1StreamPart>({\n transform(chunk, controller) {\n if (chunk.type === 'text-delta') {\n generatedText += chunk.textDelta\n }\n if (chunk.type === 'finish') {\n usage = {\n input_tokens: chunk.usage?.promptTokens,\n output_tokens: chunk.usage?.completionTokens,\n }\n }\n controller.enqueue(chunk)\n },\n\n flush() {\n const latency = (Date.now() - startTime) / 1000\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: model.modelId,\n provider: 'vercel',\n input: options.posthogPrivacyMode ? '' : params.prompt,\n output: [{ content: generatedText, role: 'assistant' }],\n latency,\n baseURL: '',\n params: { posthog_properties: options } as any,\n httpStatus: 200,\n usage,\n })\n },\n })\n\n return {\n stream: stream.pipeThrough(transformStream),\n ...rest,\n }\n } catch (error) {\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: model.modelId,\n provider: 'vercel',\n input: options.posthogPrivacyMode ? '' : params.prompt,\n output: [],\n latency: 0,\n baseURL: '',\n params: { posthog_properties: options } as any,\n httpStatus: 500,\n usage: {\n input_tokens: 0,\n output_tokens: 0,\n },\n })\n throw error\n }\n },\n }\n\n return middleware\n}\n\nexport const wrapVercelLanguageModel = (\n model: LanguageModelV1,\n phClient: PostHog,\n options: CreateInstrumentationMiddlewareOptions\n): LanguageModelV1 => {\n const traceId = options.posthogTraceId ?? uuidv4()\n const middleware = createInstrumentationMiddleware(phClient, model, {\n ...options,\n posthogTraceId: traceId,\n posthogDistinctId: options.posthogDistinctId ?? traceId,\n })\n\n const wrappedModel = wrapLanguageModel({\n model,\n middleware,\n })\n\n return wrappedModel\n}\n"],"names":["getModelParams","params","modelParams","paramKeys","key","undefined","mergeSystemPrompt","provider","messages","system","systemMessage","role","content","withPrivacyMode","client","privacyMode","input","privacy_mode","sendEventToPosthog","distinctId","traceId","model","output","latency","baseURL","httpStatus","usage","capture","event","properties","$ai_provider","$ai_model","$ai_model_parameters","$ai_input","posthog_privacy_mode","$ai_output_choices","$ai_http_status","$ai_input_tokens","input_tokens","$ai_output_tokens","output_tokens","$ai_latency","$ai_trace_id","$ai_base_url","posthog_properties","$process_person_profile","groups","posthog_groups","PostHogOpenAI","OpenAIOrignal","constructor","config","posthog","openAIConfig","phClient","chat","WrappedChat","Chat","parentClient","completions","WrappedCompletions","Completions","create","body","options","posthog_distinct_id","posthog_trace_id","openAIParams","uuidv4","startTime","Date","now","parentPromise","stream","then","value","passThroughStream","PassThrough","objectMode","accumulatedContent","openAIStream","chunk","delta","choices","prompt_tokens","completion_tokens","write","end","error","emit","wrappedPromise","result","message","createInstrumentationMiddleware","middleware","wrapGenerate","doGenerate","posthogDistinctId","posthogTraceId","modelId","posthogPrivacyMode","prompt","text","promptTokens","completionTokens","wrapStream","doStream","generatedText","rest","transformStream","TransformStream","transform","controller","type","textDelta","enqueue","flush","pipeThrough","wrapVercelLanguageModel","wrappedModel","wrapLanguageModel"],"mappings":";;;;;AAaO,MAAMA,cAAc,GAAIC,MAAyD,IAAyB;EAC/G,MAAMC,WAAW,GAAwB,EAAE,CAAA;EAC3C,MAAMC,SAAS,GAAG,CAChB,aAAa,EACb,YAAY,EACZ,uBAAuB,EACvB,OAAO,EACP,mBAAmB,EACnB,kBAAkB,EAClB,GAAG,EACH,MAAM,EACN,QAAQ,EACR,WAAW,CACH,CAAA;AAEV,EAAA,KAAK,MAAMC,GAAG,IAAID,SAAS,EAAE;IAC3B,IAAIC,GAAG,IAAIH,MAAM,IAAKA,MAAc,CAACG,GAAG,CAAC,KAAKC,SAAS,EAAE;AACvDH,MAAAA,WAAW,CAACE,GAAG,CAAC,GAAIH,MAAc,CAACG,GAAG,CAAC,CAAA;AACxC,KAAA;AACF,GAAA;AACD,EAAA,OAAOF,WAAW,CAAA;AACpB,CAAC,CAAA;AAgEM,MAAMI,iBAAiB,GAAGA,CAACL,MAAyD,EAAEM,QAAgB,KAAS;EACpH,IAAIA,QAAQ,KAAK,WAAW,EAAE;IAC5B,OAAON,MAAM,CAACO,QAAQ,CAAA;AACvB,GAAA;AACD,EAAA,MAAMA,QAAQ,GAAGP,MAAM,CAACO,QAAQ,IAAI,EAAE,CAAA;AACtC,EAAA,IAAI,CAAEP,MAAc,CAACQ,MAAM,EAAE;AAC3B,IAAA,OAAOD,QAAQ,CAAA;AAChB,GAAA;AACD,EAAA,MAAME,aAAa,GAAIT,MAAc,CAACQ,MAAM,CAAA;AAC5C,EAAA,OAAO,CAAC;AAAEE,IAAAA,IAAI,EAAE,QAAQ;AAAEC,IAAAA,OAAO,EAAEF,aAAAA;GAAe,EAAE,GAAGF,QAAQ,CAAC,CAAA;AAClE,CAAC,CAAA;AAEM,MAAMK,eAAe,GAAGA,CAACC,MAAe,EAAEC,WAAoB,EAAEC,KAAU,KAAS;EACxF,OAAQF,MAAc,CAACG,YAAY,IAAIF,WAAW,GAAG,IAAI,GAAGC,KAAK,CAAA;AACnE,CAAC,CAAA;AAiBM,MAAME,kBAAkB,GAAGA,CAAC;EACjCJ,MAAM;EACNK,UAAU;EACVC,OAAO;EACPC,KAAK;EACLd,QAAQ;EACRS,KAAK;EACLM,MAAM;EACNC,OAAO;EACPC,OAAO;EACPvB,MAAM;AACNwB,EAAAA,UAAU,GAAG,GAAG;AAChBC,EAAAA,KAAK,GAAG,EAAE;AAAA,CACe,KAAU;EACnC,IAAIZ,MAAM,CAACa,OAAO,EAAE;IAClBb,MAAM,CAACa,OAAO,CAAC;MACbR,UAAU,EAAEA,UAAU,IAAIC,OAAO;AACjCQ,MAAAA,KAAK,EAAE,gBAAgB;AACvBC,MAAAA,UAAU,EAAE;AACVC,QAAAA,YAAY,EAAEvB,QAAQ;AACtBwB,QAAAA,SAAS,EAAEV,KAAK;AAChBW,QAAAA,oBAAoB,EAAEhC,cAAc,CAACC,MAAM,CAAC;AAC5CgC,QAAAA,SAAS,EAAEpB,eAAe,CAACC,MAAM,EAAEb,MAAM,CAACiC,oBAAoB,IAAI,KAAK,EAAElB,KAAK,CAAC;AAC/EmB,QAAAA,kBAAkB,EAAEtB,eAAe,CAACC,MAAM,EAAEb,MAAM,CAACiC,oBAAoB,IAAI,KAAK,EAAEZ,MAAM,CAAC;AACzFc,QAAAA,eAAe,EAAEX,UAAU;AAC3BY,QAAAA,gBAAgB,EAAEX,KAAK,CAACY,YAAY,IAAI,CAAC;AACzCC,QAAAA,iBAAiB,EAAEb,KAAK,CAACc,aAAa,IAAI,CAAC;AAC3CC,QAAAA,WAAW,EAAElB,OAAO;AACpBmB,QAAAA,YAAY,EAAEtB,OAAO;AACrBuB,QAAAA,YAAY,EAAEnB,OAAO;QACrB,GAAGvB,MAAM,CAAC2C,kBAAkB;AAC5B,QAAA,IAAIzB,UAAU,GAAG,EAAE,GAAG;AAAE0B,UAAAA,uBAAuB,EAAE,KAAA;SAAO,CAAA;OACzD;MACDC,MAAM,EAAE7C,MAAM,CAAC8C,cAAAA;AAChB,KAAA,CAAC,CAAA;AACH,GAAA;AACH,CAAC;;ACjJK,MAAOC,aAAc,SAAQC,aAAa,CAAA;EAG9CC,WAAAA,CAAYC,MAA8B,EAAA;IACxC,MAAM;MAAEC,OAAO;MAAE,GAAGC,YAAAA;AAAc,KAAA,GAAGF,MAAM,CAAA;IAC3C,KAAK,CAACE,YAAY,CAAC,CAAA;IACnB,IAAI,CAACC,QAAQ,GAAGF,OAAO,CAAA;IACvB,IAAI,CAACG,IAAI,GAAG,IAAIC,WAAW,CAAC,IAAI,EAAE,IAAI,CAACF,QAAQ,CAAC,CAAA;AAClD,GAAA;AAGD,CAAA;AAEY,MAAAE,WAAY,SAAQP,aAAa,CAACQ,IAAI,CAAA;AACjDP,EAAAA,WAAYA,CAAAQ,YAA2B,EAAEJ,QAAiB,EAAA;IACxD,KAAK,CAACI,YAAY,CAAC,CAAA;IACnB,IAAI,CAACC,WAAW,GAAG,IAAIC,kBAAkB,CAACF,YAAY,EAAEJ,QAAQ,CAAC,CAAA;AACnE,GAAA;AAGD,CAAA;MAEYM,kBAAmB,SAAQX,aAAa,CAACQ,IAAI,CAACI,WAAW,CAAA;AAGpEX,EAAAA,WAAYA,CAAApC,MAAqB,EAAEwC,QAAiB,EAAA;IAClD,KAAK,CAACxC,MAAM,CAAC,CAAA;IACb,IAAI,CAACwC,QAAQ,GAAGA,QAAQ,CAAA;AAC1B,GAAA;AAoBA;AACOQ,EAAAA,MAAMA,CACXC,IAAuD,EACvDC,OAAwB,EAAA;IAExB,MAAM;MACJC,mBAAmB;MACnBC,gBAAgB;MAChBtB,kBAAkB;AAClBV,MAAAA,oBAAoB,GAAG,KAAK;MAC5Ba,cAAc;MACd,GAAGoB,YAAAA;AACJ,KAAA,GAAGJ,IAAI,CAAA;AAER,IAAA,MAAM3C,OAAO,GAAG8C,gBAAgB,IAAIE,EAAM,EAAE,CAAA;AAC5C,IAAA,MAAMC,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE,CAAA;IAE5B,MAAMC,aAAa,GAAG,KAAK,CAACV,MAAM,CAACK,YAAY,EAAEH,OAAO,CAAC,CAAA;IAEzD,IAAIG,YAAY,CAACM,MAAM,EAAE;AACvB,MAAA,OAAOD,aAAa,CAACE,IAAI,CAAEC,KAAK,IAAI;AAClC,QAAA,MAAMC,iBAAiB,GAAG,IAAIC,WAAW,CAAC;AAAEC,UAAAA,UAAU,EAAE,IAAA;AAAM,SAAA,CAAC,CAAA;QAC/D,IAAIC,kBAAkB,GAAG,EAAE,CAAA;AAC3B,QAAA,IAAIrD,KAAK,GAAoD;AAC3DY,UAAAA,YAAY,EAAE,CAAC;AACfE,UAAAA,aAAa,EAAE,CAAA;SAChB,CAAA;QACD,IAAI,KAAK,IAAImC,KAAK,EAAE;UAClB,MAAMK,YAAY,GAAGL,KAAK,CAAA;AACzB,UAAA,CAAC,YAAW;YACX,IAAI;AACF,cAAA,WAAW,MAAMM,KAAK,IAAID,YAAY,EAAE;AACtC,gBAAA,MAAME,KAAK,GAAGD,KAAK,EAAEE,OAAO,GAAG,CAAC,CAAC,EAAED,KAAK,EAAEtE,OAAO,IAAI,EAAE,CAAA;AACvDmE,gBAAAA,kBAAkB,IAAIG,KAAK,CAAA;gBAC3B,IAAID,KAAK,CAACvD,KAAK,EAAE;AACfA,kBAAAA,KAAK,GAAG;AACNY,oBAAAA,YAAY,EAAE2C,KAAK,CAACvD,KAAK,CAAC0D,aAAa,IAAI,CAAC;AAC5C5C,oBAAAA,aAAa,EAAEyC,KAAK,CAACvD,KAAK,CAAC2D,iBAAiB,IAAI,CAAA;mBACjD,CAAA;AACF,iBAAA;AACDT,gBAAAA,iBAAiB,CAACU,KAAK,CAACL,KAAK,CAAC,CAAA;AAC/B,eAAA;cACD,MAAM1D,OAAO,GAAG,CAAC+C,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/CnD,cAAAA,kBAAkB,CAAC;gBACjBJ,MAAM,EAAE,IAAI,CAACwC,QAAQ;gBACrBnC,UAAU,EAAE8C,mBAAmB,IAAI7C,OAAO;gBAC1CA,OAAO;gBACPC,KAAK,EAAE8C,YAAY,CAAC9C,KAAK;AACzBd,gBAAAA,QAAQ,EAAE,QAAQ;gBAClBS,KAAK,EAAEkB,oBAAoB,GAAG,EAAE,GAAG5B,iBAAiB,CAAC6D,YAAY,EAAE,QAAQ,CAAC;AAC5E7C,gBAAAA,MAAM,EAAE,CAAC;AAAEV,kBAAAA,OAAO,EAAEmE,kBAAkB;AAAEpE,kBAAAA,IAAI,EAAE,WAAA;AAAW,iBAAE,CAAC;gBAC5DY,OAAO;AACPC,gBAAAA,OAAO,EAAG,IAAY,CAACA,OAAO,IAAI,EAAE;AACpCvB,gBAAAA,MAAM,EAAE8D,IAAI;AACZtC,gBAAAA,UAAU,EAAE,GAAG;AACfC,gBAAAA,KAAAA;AACD,eAAA,CAAC,CAAA;cACFkD,iBAAiB,CAACW,GAAG,EAAE,CAAA;aACxB,CAAC,OAAOC,KAAK,EAAE;AACd;AACAtE,cAAAA,kBAAkB,CAAC;gBACjBJ,MAAM,EAAE,IAAI,CAACwC,QAAQ;gBACrBnC,UAAU,EAAE8C,mBAAmB,IAAI7C,OAAO;gBAC1CA,OAAO;gBACPC,KAAK,EAAE8C,YAAY,CAAC9C,KAAK;AACzBd,gBAAAA,QAAQ,EAAE,QAAQ;gBAClBS,KAAK,EAAEkB,oBAAoB,GAAG,EAAE,GAAG5B,iBAAiB,CAAC6D,YAAY,EAAE,QAAQ,CAAC;AAC5E7C,gBAAAA,MAAM,EAAE,EAAE;AACVC,gBAAAA,OAAO,EAAE,CAAC;AACVC,gBAAAA,OAAO,EAAG,IAAY,CAACA,OAAO,IAAI,EAAE;AACpCvB,gBAAAA,MAAM,EAAE8D,IAAI;AACZtC,gBAAAA,UAAU,EAAE,GAAG;AACfC,gBAAAA,KAAK,EAAE;AACLY,kBAAAA,YAAY,EAAE,CAAC;AACfE,kBAAAA,aAAa,EAAE,CAAA;AAChB,iBAAA;AACF,eAAA,CAAC,CAAA;AACFoC,cAAAA,iBAAiB,CAACa,IAAI,CAAC,OAAO,EAAED,KAAK,CAAC,CAAA;AACvC,aAAA;AACH,WAAC,GAAG,CAAA;AACL,SAAA;AACD,QAAA,OAAOZ,iBAA2D,CAAA;AACpE,OAAC,CAA4C,CAAA;AAC9C,KAAA,MAAM;AACL,MAAA,MAAMc,cAAc,GAAGlB,aAAa,CAACE,IAAI,CACtCiB,MAAM,IAAI;QACT,IAAI,SAAS,IAAIA,MAAM,EAAE;UACvB,MAAMpE,OAAO,GAAG,CAAC+C,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/CnD,UAAAA,kBAAkB,CAAC;YACjBJ,MAAM,EAAE,IAAI,CAACwC,QAAQ;YACrBnC,UAAU,EAAE8C,mBAAmB,IAAI7C,OAAO;YAC1CA,OAAO;YACPC,KAAK,EAAE8C,YAAY,CAAC9C,KAAK;AACzBd,YAAAA,QAAQ,EAAE,QAAQ;YAClBS,KAAK,EAAEkB,oBAAoB,GAAG,EAAE,GAAG5B,iBAAiB,CAAC6D,YAAY,EAAE,QAAQ,CAAC;AAC5E7C,YAAAA,MAAM,EAAE,CAAC;cAAEV,OAAO,EAAE+E,MAAM,CAACR,OAAO,CAAC,CAAC,CAAC,CAACS,OAAO,CAAChF,OAAO;AAAED,cAAAA,IAAI,EAAE,WAAA;aAAa,CAAC;YAC3EY,OAAO;AACPC,YAAAA,OAAO,EAAG,IAAY,CAACA,OAAO,IAAI,EAAE;AACpCvB,YAAAA,MAAM,EAAE8D,IAAI;AACZtC,YAAAA,UAAU,EAAE,GAAG;AACfC,YAAAA,KAAK,EAAE;AACLY,cAAAA,YAAY,EAAEqD,MAAM,CAACjE,KAAK,EAAE0D,aAAa,IAAI,CAAC;AAC9C5C,cAAAA,aAAa,EAAEmD,MAAM,CAACjE,KAAK,EAAE2D,iBAAiB,IAAI,CAAA;AACnD,aAAA;AACF,WAAA,CAAC,CAAA;AACH,SAAA;AACD,QAAA,OAAOM,MAAM,CAAA;OACd,EACAH,KAAK,IAAI;AACRtE,QAAAA,kBAAkB,CAAC;UACjBJ,MAAM,EAAE,IAAI,CAACwC,QAAQ;UACrBnC,UAAU,EAAE8C,mBAAmB,IAAI7C,OAAO;UAC1CA,OAAO;UACPC,KAAK,EAAE8C,YAAY,CAAC9C,KAAK;AACzBd,UAAAA,QAAQ,EAAE,QAAQ;UAClBS,KAAK,EAAEkB,oBAAoB,GAAG,EAAE,GAAG5B,iBAAiB,CAAC6D,YAAY,EAAE,QAAQ,CAAC;AAC5E7C,UAAAA,MAAM,EAAE,EAAE;AACVC,UAAAA,OAAO,EAAE,CAAC;AACVC,UAAAA,OAAO,EAAG,IAAY,CAACA,OAAO,IAAI,EAAE;AACpCvB,UAAAA,MAAM,EAAE8D,IAAI;AACZtC,UAAAA,UAAU,EAAE,GAAG;AACfC,UAAAA,KAAK,EAAE;AACLY,YAAAA,YAAY,EAAE,CAAC;AACfE,YAAAA,aAAa,EAAE,CAAA;AAChB,WAAA;AACF,SAAA,CAAC,CAAA;AACF,QAAA,MAAMgD,KAAK,CAAA;AACb,OAAC,CAC4B,CAAA;AAE/B,MAAA,OAAOE,cAAc,CAAA;AACtB,KAAA;AACH,GAAA;AACD;;ACvLM,MAAMG,+BAA+B,GAAGA,CAC7CvC,QAAiB,EACjBjC,KAAsB,EACtB2C,OAA+C,KAClB;AAC7B,EAAA,MAAM8B,UAAU,GAA8B;IAC5CC,YAAY,EAAE,OAAO;MAAEC,UAAU;AAAE/F,MAAAA,MAAAA;AAAQ,KAAA,KAAI;AAC7C,MAAA,MAAMoE,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE,CAAA;MAE5B,IAAI;AACF,QAAA,MAAMoB,MAAM,GAAG,MAAMK,UAAU,EAAE,CAAA;QACjC,MAAMzE,OAAO,GAAG,CAAC+C,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAE/CnD,QAAAA,kBAAkB,CAAC;AACjBJ,UAAAA,MAAM,EAAEwC,QAAQ;UAChBnC,UAAU,EAAE6C,OAAO,CAACiC,iBAAiB;UACrC7E,OAAO,EAAE4C,OAAO,CAACkC,cAAc;UAC/B7E,KAAK,EAAEA,KAAK,CAAC8E,OAAO;AACpB5F,UAAAA,QAAQ,EAAE,QAAQ;UAClBS,KAAK,EAAEgD,OAAO,CAACoC,kBAAkB,GAAG,EAAE,GAAGnG,MAAM,CAACoG,MAAM;AACtD/E,UAAAA,MAAM,EAAE,CAAC;YAAEV,OAAO,EAAE+E,MAAM,CAACW,IAAI;AAAE3F,YAAAA,IAAI,EAAE,WAAA;WAAa,CAAC;UACrDY,OAAO;AACPC,UAAAA,OAAO,EAAE,EAAE;AACXvB,UAAAA,MAAM,EAAE;AAAE2C,YAAAA,kBAAkB,EAAEoB,OAAAA;WAAgB;AAC9CvC,UAAAA,UAAU,EAAE,GAAG;AACfC,UAAAA,KAAK,EAAE;AACLY,YAAAA,YAAY,EAAEqD,MAAM,CAACjE,KAAK,CAAC6E,YAAY;AACvC/D,YAAAA,aAAa,EAAEmD,MAAM,CAACjE,KAAK,CAAC8E,gBAAAA;AAC7B,WAAA;AACF,SAAA,CAAC,CAAA;AAEF,QAAA,OAAOb,MAAM,CAAA;OACd,CAAC,OAAOH,KAAK,EAAE;AACdtE,QAAAA,kBAAkB,CAAC;AACjBJ,UAAAA,MAAM,EAAEwC,QAAQ;UAChBnC,UAAU,EAAE6C,OAAO,CAACiC,iBAAiB;UACrC7E,OAAO,EAAE4C,OAAO,CAACkC,cAAc;UAC/B7E,KAAK,EAAEA,KAAK,CAAC8E,OAAO;AACpB5F,UAAAA,QAAQ,EAAE,QAAQ;UAClBS,KAAK,EAAEgD,OAAO,CAACoC,kBAAkB,GAAG,EAAE,GAAGnG,MAAM,CAACoG,MAAM;AACtD/E,UAAAA,MAAM,EAAE,EAAE;AACVC,UAAAA,OAAO,EAAE,CAAC;AACVC,UAAAA,OAAO,EAAE,EAAE;AACXvB,UAAAA,MAAM,EAAE;AAAE2C,YAAAA,kBAAkB,EAAEoB,OAAAA;WAAgB;AAC9CvC,UAAAA,UAAU,EAAE,GAAG;AACfC,UAAAA,KAAK,EAAE;AACLY,YAAAA,YAAY,EAAE,CAAC;AACfE,YAAAA,aAAa,EAAE,CAAA;AAChB,WAAA;AACF,SAAA,CAAC,CAAA;AACF,QAAA,MAAMgD,KAAK,CAAA;AACZ,OAAA;KACF;IAEDiB,UAAU,EAAE,OAAO;MAAEC,QAAQ;AAAEzG,MAAAA,MAAAA;AAAQ,KAAA,KAAI;AACzC,MAAA,MAAMoE,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE,CAAA;MAC5B,IAAIoC,aAAa,GAAG,EAAE,CAAA;MACtB,IAAIjF,KAAK,GAAsD,EAAE,CAAA;MAEjE,IAAI;QACF,MAAM;UAAE+C,MAAM;UAAE,GAAGmC,IAAAA;SAAM,GAAG,MAAMF,QAAQ,EAAE,CAAA;AAE5C,QAAA,MAAMG,eAAe,GAAG,IAAIC,eAAe,CAAuD;AAChGC,UAAAA,SAASA,CAAC9B,KAAK,EAAE+B,UAAU,EAAA;AACzB,YAAA,IAAI/B,KAAK,CAACgC,IAAI,KAAK,YAAY,EAAE;cAC/BN,aAAa,IAAI1B,KAAK,CAACiC,SAAS,CAAA;AACjC,aAAA;AACD,YAAA,IAAIjC,KAAK,CAACgC,IAAI,KAAK,QAAQ,EAAE;AAC3BvF,cAAAA,KAAK,GAAG;AACNY,gBAAAA,YAAY,EAAE2C,KAAK,CAACvD,KAAK,EAAE6E,YAAY;AACvC/D,gBAAAA,aAAa,EAAEyC,KAAK,CAACvD,KAAK,EAAE8E,gBAAAA;eAC7B,CAAA;AACF,aAAA;AACDQ,YAAAA,UAAU,CAACG,OAAO,CAAClC,KAAK,CAAC,CAAA;WAC1B;AAEDmC,UAAAA,KAAKA,GAAA;YACH,MAAM7F,OAAO,GAAG,CAAC+C,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/CnD,YAAAA,kBAAkB,CAAC;AACjBJ,cAAAA,MAAM,EAAEwC,QAAQ;cAChBnC,UAAU,EAAE6C,OAAO,CAACiC,iBAAiB;cACrC7E,OAAO,EAAE4C,OAAO,CAACkC,cAAc;cAC/B7E,KAAK,EAAEA,KAAK,CAAC8E,OAAO;AACpB5F,cAAAA,QAAQ,EAAE,QAAQ;cAClBS,KAAK,EAAEgD,OAAO,CAACoC,kBAAkB,GAAG,EAAE,GAAGnG,MAAM,CAACoG,MAAM;AACtD/E,cAAAA,MAAM,EAAE,CAAC;AAAEV,gBAAAA,OAAO,EAAE+F,aAAa;AAAEhG,gBAAAA,IAAI,EAAE,WAAA;AAAW,eAAE,CAAC;cACvDY,OAAO;AACPC,cAAAA,OAAO,EAAE,EAAE;AACXvB,cAAAA,MAAM,EAAE;AAAE2C,gBAAAA,kBAAkB,EAAEoB,OAAAA;eAAgB;AAC9CvC,cAAAA,UAAU,EAAE,GAAG;AACfC,cAAAA,KAAAA;AACD,aAAA,CAAC,CAAA;AACJ,WAAA;AACD,SAAA,CAAC,CAAA;QAEF,OAAO;AACL+C,UAAAA,MAAM,EAAEA,MAAM,CAAC4C,WAAW,CAACR,eAAe,CAAC;UAC3C,GAAGD,IAAAA;SACJ,CAAA;OACF,CAAC,OAAOpB,KAAK,EAAE;AACdtE,QAAAA,kBAAkB,CAAC;AACjBJ,UAAAA,MAAM,EAAEwC,QAAQ;UAChBnC,UAAU,EAAE6C,OAAO,CAACiC,iBAAiB;UACrC7E,OAAO,EAAE4C,OAAO,CAACkC,cAAc;UAC/B7E,KAAK,EAAEA,KAAK,CAAC8E,OAAO;AACpB5F,UAAAA,QAAQ,EAAE,QAAQ;UAClBS,KAAK,EAAEgD,OAAO,CAACoC,kBAAkB,GAAG,EAAE,GAAGnG,MAAM,CAACoG,MAAM;AACtD/E,UAAAA,MAAM,EAAE,EAAE;AACVC,UAAAA,OAAO,EAAE,CAAC;AACVC,UAAAA,OAAO,EAAE,EAAE;AACXvB,UAAAA,MAAM,EAAE;AAAE2C,YAAAA,kBAAkB,EAAEoB,OAAAA;WAAgB;AAC9CvC,UAAAA,UAAU,EAAE,GAAG;AACfC,UAAAA,KAAK,EAAE;AACLY,YAAAA,YAAY,EAAE,CAAC;AACfE,YAAAA,aAAa,EAAE,CAAA;AAChB,WAAA;AACF,SAAA,CAAC,CAAA;AACF,QAAA,MAAMgD,KAAK,CAAA;AACZ,OAAA;AACH,KAAA;GACD,CAAA;AAED,EAAA,OAAOM,UAAU,CAAA;AACnB,CAAC,CAAA;AAEM,MAAMwB,uBAAuB,GAAGA,CACrCjG,KAAsB,EACtBiC,QAAiB,EACjBU,OAA+C,KAC5B;EACnB,MAAM5C,OAAO,GAAG4C,OAAO,CAACkC,cAAc,IAAI9B,EAAM,EAAE,CAAA;AAClD,EAAA,MAAM0B,UAAU,GAAGD,+BAA+B,CAACvC,QAAQ,EAAEjC,KAAK,EAAE;AAClE,IAAA,GAAG2C,OAAO;AACVkC,IAAAA,cAAc,EAAE9E,OAAO;AACvB6E,IAAAA,iBAAiB,EAAEjC,OAAO,CAACiC,iBAAiB,IAAI7E,OAAAA;AACjD,GAAA,CAAC,CAAA;EAEF,MAAMmG,YAAY,GAAGC,8BAAiB,CAAC;IACrCnG,KAAK;AACLyE,IAAAA,UAAAA;AACD,GAAA,CAAC,CAAA;AAEF,EAAA,OAAOyB,YAAY,CAAA;AACrB;;;;"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from './src';
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import OpenAIOrignal from 'openai';
|
|
2
|
+
import { PostHog } from 'posthog-node';
|
|
3
|
+
import { MonitoringParams } from '../utils';
|
|
4
|
+
type ChatCompletion = OpenAIOrignal.ChatCompletion;
|
|
5
|
+
type ChatCompletionChunk = OpenAIOrignal.ChatCompletionChunk;
|
|
6
|
+
type ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams;
|
|
7
|
+
type ChatCompletionCreateParamsNonStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsNonStreaming;
|
|
8
|
+
type ChatCompletionCreateParamsStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsStreaming;
|
|
9
|
+
import type { APIPromise, RequestOptions } from 'openai/core';
|
|
10
|
+
import type { Stream } from 'openai/streaming';
|
|
11
|
+
interface MonitoringOpenAIConfig {
|
|
12
|
+
apiKey: string;
|
|
13
|
+
posthog: PostHog;
|
|
14
|
+
baseURL?: string;
|
|
15
|
+
}
|
|
16
|
+
export declare class PostHogOpenAI extends OpenAIOrignal {
|
|
17
|
+
private readonly phClient;
|
|
18
|
+
constructor(config: MonitoringOpenAIConfig);
|
|
19
|
+
chat: WrappedChat;
|
|
20
|
+
}
|
|
21
|
+
export declare class WrappedChat extends OpenAIOrignal.Chat {
|
|
22
|
+
constructor(parentClient: PostHogOpenAI, phClient: PostHog);
|
|
23
|
+
completions: WrappedCompletions;
|
|
24
|
+
}
|
|
25
|
+
export declare class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
26
|
+
private readonly phClient;
|
|
27
|
+
constructor(client: OpenAIOrignal, phClient: PostHog);
|
|
28
|
+
create(body: ChatCompletionCreateParamsNonStreaming & MonitoringParams, options?: RequestOptions): APIPromise<ChatCompletion>;
|
|
29
|
+
create(body: ChatCompletionCreateParamsStreaming & MonitoringParams, options?: RequestOptions): APIPromise<Stream<ChatCompletionChunk>>;
|
|
30
|
+
create(body: ChatCompletionCreateParamsBase & MonitoringParams, options?: RequestOptions): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>>;
|
|
31
|
+
}
|
|
32
|
+
export default PostHogOpenAI;
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import { PostHog } from 'posthog-node';
|
|
2
|
+
import OpenAIOrignal from 'openai';
|
|
3
|
+
type ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams;
|
|
4
|
+
export interface MonitoringParams {
|
|
5
|
+
posthog_distinct_id?: string;
|
|
6
|
+
posthog_trace_id?: string;
|
|
7
|
+
posthog_properties?: Record<string, any>;
|
|
8
|
+
posthog_privacy_mode?: boolean;
|
|
9
|
+
posthog_groups?: Record<string, any>;
|
|
10
|
+
}
|
|
11
|
+
export declare const getModelParams: (params: ChatCompletionCreateParamsBase & MonitoringParams) => Record<string, any>;
|
|
12
|
+
export declare const getUsage: (response: any, provider: string) => {
|
|
13
|
+
input_tokens: number;
|
|
14
|
+
output_tokens: number;
|
|
15
|
+
};
|
|
16
|
+
/**
|
|
17
|
+
* Helper to format responses (non-streaming) for consumption, mirroring Python's openai vs. anthropic approach.
|
|
18
|
+
*/
|
|
19
|
+
export declare const formatResponse: (response: any, provider: string) => Array<{
|
|
20
|
+
role: string;
|
|
21
|
+
content: string;
|
|
22
|
+
}>;
|
|
23
|
+
export declare const formatResponseAnthropic: (response: any) => Array<{
|
|
24
|
+
role: string;
|
|
25
|
+
content: string;
|
|
26
|
+
}>;
|
|
27
|
+
export declare const formatResponseOpenAI: (response: any) => Array<{
|
|
28
|
+
role: string;
|
|
29
|
+
content: string;
|
|
30
|
+
}>;
|
|
31
|
+
export declare const mergeSystemPrompt: (params: ChatCompletionCreateParamsBase & MonitoringParams, provider: string) => any;
|
|
32
|
+
export declare const withPrivacyMode: (client: PostHog, privacyMode: boolean, input: any) => any;
|
|
33
|
+
export type SendEventToPosthogParams = {
|
|
34
|
+
client: PostHog;
|
|
35
|
+
distinctId?: string;
|
|
36
|
+
traceId: string;
|
|
37
|
+
model: string;
|
|
38
|
+
provider: string;
|
|
39
|
+
input: any;
|
|
40
|
+
output: any;
|
|
41
|
+
latency: number;
|
|
42
|
+
baseURL: string;
|
|
43
|
+
httpStatus: number;
|
|
44
|
+
usage?: {
|
|
45
|
+
input_tokens?: number;
|
|
46
|
+
output_tokens?: number;
|
|
47
|
+
};
|
|
48
|
+
params: ChatCompletionCreateParamsBase & MonitoringParams;
|
|
49
|
+
};
|
|
50
|
+
export declare const sendEventToPosthog: ({ client, distinctId, traceId, model, provider, input, output, latency, baseURL, params, httpStatus, usage, }: SendEventToPosthogParams) => void;
|
|
51
|
+
export {};
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import type { LanguageModelV1, Experimental_LanguageModelV1Middleware as LanguageModelV1Middleware } from 'ai';
|
|
2
|
+
import type { PostHog } from 'posthog-node';
|
|
3
|
+
interface CreateInstrumentationMiddlewareOptions {
|
|
4
|
+
posthogDistinctId?: string;
|
|
5
|
+
posthogTraceId: string;
|
|
6
|
+
posthogProperties?: Record<string, any>;
|
|
7
|
+
posthogPrivacyMode?: boolean;
|
|
8
|
+
posthogGroups?: string[];
|
|
9
|
+
}
|
|
10
|
+
export declare const createInstrumentationMiddleware: (phClient: PostHog, model: LanguageModelV1, options: CreateInstrumentationMiddlewareOptions) => LanguageModelV1Middleware;
|
|
11
|
+
export declare const wrapVercelLanguageModel: (model: LanguageModelV1, phClient: PostHog, options: CreateInstrumentationMiddlewareOptions) => LanguageModelV1;
|
|
12
|
+
export {};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
package/package.json
CHANGED
|
@@ -1,15 +1,15 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@posthog/ai",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.5.0",
|
|
4
4
|
"description": "PostHog Node.js AI integrations",
|
|
5
5
|
"repository": {
|
|
6
6
|
"type": "git",
|
|
7
7
|
"url": "git+https://github.com/PostHog/posthog-js-lite.git",
|
|
8
8
|
"directory": "posthog-ai"
|
|
9
9
|
},
|
|
10
|
-
"main": "
|
|
11
|
-
"module": "
|
|
12
|
-
"types": "
|
|
10
|
+
"main": "lib/index.cjs.js",
|
|
11
|
+
"module": "lib/index.esm.js",
|
|
12
|
+
"types": "lib/index.d.ts",
|
|
13
13
|
"license": "MIT",
|
|
14
14
|
"devDependencies": {
|
|
15
15
|
"@types/jest": "^28.1.5",
|