@posthog/ai 2.0.0 → 2.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +15 -31
- package/lib/index.cjs.js +78 -36
- package/lib/index.cjs.js.map +1 -1
- package/lib/index.d.ts +1 -1
- package/lib/index.esm.js +78 -36
- package/lib/index.esm.js.map +1 -1
- package/lib/posthog-ai/src/utils.d.ts +2 -6
- package/lib/posthog-ai/src/vercel/middleware.d.ts +2 -2
- package/package.json +1 -1
- package/src/openai/index.ts +11 -11
- package/src/utils.ts +15 -23
- package/src/vercel/middleware.ts +67 -21
package/README.md
CHANGED
|
@@ -5,56 +5,40 @@ Initial Typescript SDK for LLM Observability
|
|
|
5
5
|
## Installation
|
|
6
6
|
|
|
7
7
|
```bash
|
|
8
|
-
|
|
8
|
+
npm install @posthog/ai
|
|
9
9
|
```
|
|
10
10
|
|
|
11
11
|
## Usage
|
|
12
12
|
|
|
13
|
-
### Before
|
|
14
|
-
|
|
15
|
-
```typescript
|
|
16
|
-
import { OpenAI } from 'openai'
|
|
17
|
-
|
|
18
|
-
const client = new OpenAI({
|
|
19
|
-
apiKey: process.env.OPENAI_API_KEY || '',
|
|
20
|
-
})
|
|
21
|
-
|
|
22
|
-
await client.chat.completions.create({
|
|
23
|
-
model: 'gpt-4',
|
|
24
|
-
messages: [{ role: 'user', content: 'Hello, world!' }],
|
|
25
|
-
})
|
|
26
|
-
```
|
|
27
|
-
|
|
28
|
-
### After
|
|
29
|
-
|
|
30
13
|
```typescript
|
|
31
14
|
import { OpenAI } from '@posthog/ai'
|
|
32
15
|
import { PostHog } from 'posthog-node'
|
|
33
16
|
|
|
34
|
-
const phClient = new PostHog(
|
|
35
|
-
process.env.POSTHOG_API_KEY, {
|
|
36
|
-
host: process.env.POSTHOG_HOST || 'https://us.posthog.com',
|
|
37
|
-
}
|
|
38
|
-
})
|
|
17
|
+
const phClient = new PostHog('<YOUR_PROJECT_API_KEY>', { host: 'https://us.i.posthog.com' })
|
|
39
18
|
|
|
40
19
|
const client = new OpenAI({
|
|
41
|
-
apiKey:
|
|
20
|
+
apiKey: '<YOUR_OPENAI_API_KEY>',
|
|
42
21
|
posthog: phClient,
|
|
43
22
|
})
|
|
44
23
|
|
|
45
|
-
await client.chat.completions.create({
|
|
46
|
-
model: 'gpt-
|
|
47
|
-
messages: [{ role: 'user', content: '
|
|
48
|
-
posthogDistinctId: '
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
}
|
|
24
|
+
const completion = await client.chat.completions.create({
|
|
25
|
+
model: 'gpt-3.5-turbo',
|
|
26
|
+
messages: [{ role: 'user', content: 'Tell me a fun fact about hedgehogs' }],
|
|
27
|
+
posthogDistinctId: 'user_123', // optional
|
|
28
|
+
posthogTraceId: 'trace_123', // optional
|
|
29
|
+
posthogProperties: { conversation_id: 'abc123', paid: true }, //optional
|
|
30
|
+
posthogGroups: { company: 'company_id_in_your_db' }, // optional
|
|
31
|
+
posthogPrivacyMode: false, // optional
|
|
52
32
|
})
|
|
53
33
|
|
|
34
|
+
console.log(completion.choices[0].message.content)
|
|
35
|
+
|
|
54
36
|
// YOU HAVE TO HAVE THIS OR THE CLIENT MAY NOT SEND EVENTS
|
|
55
37
|
await phClient.shutdown()
|
|
56
38
|
```
|
|
57
39
|
|
|
40
|
+
LLM Observability [docs](https://posthog.com/docs/ai-engineering/observability)
|
|
41
|
+
|
|
58
42
|
Please see the main [PostHog docs](https://www.posthog.com/docs).
|
|
59
43
|
|
|
60
44
|
## Questions?
|
package/lib/index.cjs.js
CHANGED
|
@@ -52,6 +52,18 @@ const sendEventToPosthog = ({
|
|
|
52
52
|
httpStatus = 200,
|
|
53
53
|
usage = {}
|
|
54
54
|
}) => {
|
|
55
|
+
console.log('sendEventToPosthog', {
|
|
56
|
+
client,
|
|
57
|
+
distinctId,
|
|
58
|
+
traceId,
|
|
59
|
+
model,
|
|
60
|
+
provider,
|
|
61
|
+
input,
|
|
62
|
+
output,
|
|
63
|
+
latency,
|
|
64
|
+
baseURL,
|
|
65
|
+
params
|
|
66
|
+
});
|
|
55
67
|
if (client.capture) {
|
|
56
68
|
client.capture({
|
|
57
69
|
distinctId: distinctId ?? traceId,
|
|
@@ -63,8 +75,8 @@ const sendEventToPosthog = ({
|
|
|
63
75
|
$ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, input),
|
|
64
76
|
$ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, output),
|
|
65
77
|
$ai_http_status: httpStatus,
|
|
66
|
-
$ai_input_tokens: usage.
|
|
67
|
-
$ai_output_tokens: usage.
|
|
78
|
+
$ai_input_tokens: usage.inputTokens ?? 0,
|
|
79
|
+
$ai_output_tokens: usage.outputTokens ?? 0,
|
|
68
80
|
$ai_latency: latency,
|
|
69
81
|
$ai_trace_id: traceId,
|
|
70
82
|
$ai_base_url: baseURL,
|
|
@@ -120,8 +132,8 @@ class WrappedCompletions extends OpenAIOrignal__default["default"].Chat.Completi
|
|
|
120
132
|
});
|
|
121
133
|
let accumulatedContent = '';
|
|
122
134
|
let usage = {
|
|
123
|
-
|
|
124
|
-
|
|
135
|
+
inputTokens: 0,
|
|
136
|
+
outputTokens: 0
|
|
125
137
|
};
|
|
126
138
|
if ('tee' in value) {
|
|
127
139
|
const openAIStream = value;
|
|
@@ -132,8 +144,8 @@ class WrappedCompletions extends OpenAIOrignal__default["default"].Chat.Completi
|
|
|
132
144
|
accumulatedContent += delta;
|
|
133
145
|
if (chunk.usage) {
|
|
134
146
|
usage = {
|
|
135
|
-
|
|
136
|
-
|
|
147
|
+
inputTokens: chunk.usage.prompt_tokens ?? 0,
|
|
148
|
+
outputTokens: chunk.usage.completion_tokens ?? 0
|
|
137
149
|
};
|
|
138
150
|
}
|
|
139
151
|
passThroughStream.write(chunk);
|
|
@@ -172,8 +184,8 @@ class WrappedCompletions extends OpenAIOrignal__default["default"].Chat.Completi
|
|
|
172
184
|
params: body,
|
|
173
185
|
httpStatus: 500,
|
|
174
186
|
usage: {
|
|
175
|
-
|
|
176
|
-
|
|
187
|
+
inputTokens: 0,
|
|
188
|
+
outputTokens: 0
|
|
177
189
|
}
|
|
178
190
|
});
|
|
179
191
|
passThroughStream.emit('error', error);
|
|
@@ -202,8 +214,8 @@ class WrappedCompletions extends OpenAIOrignal__default["default"].Chat.Completi
|
|
|
202
214
|
params: body,
|
|
203
215
|
httpStatus: 200,
|
|
204
216
|
usage: {
|
|
205
|
-
|
|
206
|
-
|
|
217
|
+
inputTokens: result.usage?.prompt_tokens ?? 0,
|
|
218
|
+
outputTokens: result.usage?.completion_tokens ?? 0
|
|
207
219
|
}
|
|
208
220
|
});
|
|
209
221
|
}
|
|
@@ -222,8 +234,8 @@ class WrappedCompletions extends OpenAIOrignal__default["default"].Chat.Completi
|
|
|
222
234
|
params: body,
|
|
223
235
|
httpStatus: 500,
|
|
224
236
|
usage: {
|
|
225
|
-
|
|
226
|
-
|
|
237
|
+
inputTokens: 0,
|
|
238
|
+
outputTokens: 0
|
|
227
239
|
}
|
|
228
240
|
});
|
|
229
241
|
throw error;
|
|
@@ -233,6 +245,36 @@ class WrappedCompletions extends OpenAIOrignal__default["default"].Chat.Completi
|
|
|
233
245
|
}
|
|
234
246
|
}
|
|
235
247
|
|
|
248
|
+
const mapVercelParams = params => {
|
|
249
|
+
return {
|
|
250
|
+
temperature: params.temperature,
|
|
251
|
+
max_tokens: params.maxTokens,
|
|
252
|
+
top_p: params.topP,
|
|
253
|
+
frequency_penalty: params.frequencyPenalty,
|
|
254
|
+
presence_penalty: params.presencePenalty,
|
|
255
|
+
stop: params.stopSequences,
|
|
256
|
+
stream: params.stream
|
|
257
|
+
};
|
|
258
|
+
};
|
|
259
|
+
const mapVercelPrompt = prompt => {
|
|
260
|
+
return prompt.map(p => {
|
|
261
|
+
let content = '';
|
|
262
|
+
if (Array.isArray(p.content)) {
|
|
263
|
+
content = p.content.map(c => {
|
|
264
|
+
if (c.type === 'text') {
|
|
265
|
+
return c.text;
|
|
266
|
+
}
|
|
267
|
+
return '';
|
|
268
|
+
}).join('');
|
|
269
|
+
} else {
|
|
270
|
+
content = p.content;
|
|
271
|
+
}
|
|
272
|
+
return {
|
|
273
|
+
role: p.role,
|
|
274
|
+
content
|
|
275
|
+
};
|
|
276
|
+
});
|
|
277
|
+
};
|
|
236
278
|
const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
237
279
|
const middleware = {
|
|
238
280
|
wrapGenerate: async ({
|
|
@@ -240,6 +282,10 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
240
282
|
params
|
|
241
283
|
}) => {
|
|
242
284
|
const startTime = Date.now();
|
|
285
|
+
let mergedParams = {
|
|
286
|
+
...options,
|
|
287
|
+
...mapVercelParams(params)
|
|
288
|
+
};
|
|
243
289
|
try {
|
|
244
290
|
const result = await doGenerate();
|
|
245
291
|
const latency = (Date.now() - startTime) / 1000;
|
|
@@ -249,20 +295,18 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
249
295
|
traceId: options.posthogTraceId,
|
|
250
296
|
model: model.modelId,
|
|
251
297
|
provider: 'vercel',
|
|
252
|
-
input: options.posthogPrivacyMode ? '' : params.prompt,
|
|
298
|
+
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
253
299
|
output: [{
|
|
254
300
|
content: result.text,
|
|
255
301
|
role: 'assistant'
|
|
256
302
|
}],
|
|
257
303
|
latency,
|
|
258
304
|
baseURL: '',
|
|
259
|
-
params:
|
|
260
|
-
posthog_properties: options
|
|
261
|
-
},
|
|
305
|
+
params: mergedParams,
|
|
262
306
|
httpStatus: 200,
|
|
263
307
|
usage: {
|
|
264
|
-
|
|
265
|
-
|
|
308
|
+
inputTokens: result.usage.promptTokens,
|
|
309
|
+
outputTokens: result.usage.completionTokens
|
|
266
310
|
}
|
|
267
311
|
});
|
|
268
312
|
return result;
|
|
@@ -273,17 +317,15 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
273
317
|
traceId: options.posthogTraceId,
|
|
274
318
|
model: model.modelId,
|
|
275
319
|
provider: 'vercel',
|
|
276
|
-
input: options.posthogPrivacyMode ? '' : params.prompt,
|
|
320
|
+
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
277
321
|
output: [],
|
|
278
322
|
latency: 0,
|
|
279
323
|
baseURL: '',
|
|
280
|
-
params:
|
|
281
|
-
posthog_properties: options
|
|
282
|
-
},
|
|
324
|
+
params: mergedParams,
|
|
283
325
|
httpStatus: 500,
|
|
284
326
|
usage: {
|
|
285
|
-
|
|
286
|
-
|
|
327
|
+
inputTokens: 0,
|
|
328
|
+
outputTokens: 0
|
|
287
329
|
}
|
|
288
330
|
});
|
|
289
331
|
throw error;
|
|
@@ -296,6 +338,10 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
296
338
|
const startTime = Date.now();
|
|
297
339
|
let generatedText = '';
|
|
298
340
|
let usage = {};
|
|
341
|
+
let mergedParams = {
|
|
342
|
+
...options,
|
|
343
|
+
...mapVercelParams(params)
|
|
344
|
+
};
|
|
299
345
|
try {
|
|
300
346
|
const {
|
|
301
347
|
stream,
|
|
@@ -308,8 +354,8 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
308
354
|
}
|
|
309
355
|
if (chunk.type === 'finish') {
|
|
310
356
|
usage = {
|
|
311
|
-
|
|
312
|
-
|
|
357
|
+
inputTokens: chunk.usage?.promptTokens,
|
|
358
|
+
outputTokens: chunk.usage?.completionTokens
|
|
313
359
|
};
|
|
314
360
|
}
|
|
315
361
|
controller.enqueue(chunk);
|
|
@@ -322,16 +368,14 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
322
368
|
traceId: options.posthogTraceId,
|
|
323
369
|
model: model.modelId,
|
|
324
370
|
provider: 'vercel',
|
|
325
|
-
input: options.posthogPrivacyMode ? '' : params.prompt,
|
|
371
|
+
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
326
372
|
output: [{
|
|
327
373
|
content: generatedText,
|
|
328
374
|
role: 'assistant'
|
|
329
375
|
}],
|
|
330
376
|
latency,
|
|
331
377
|
baseURL: '',
|
|
332
|
-
params:
|
|
333
|
-
posthog_properties: options
|
|
334
|
-
},
|
|
378
|
+
params: mergedParams,
|
|
335
379
|
httpStatus: 200,
|
|
336
380
|
usage
|
|
337
381
|
});
|
|
@@ -348,17 +392,15 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
348
392
|
traceId: options.posthogTraceId,
|
|
349
393
|
model: model.modelId,
|
|
350
394
|
provider: 'vercel',
|
|
351
|
-
input: options.posthogPrivacyMode ? '' : params.prompt,
|
|
395
|
+
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
352
396
|
output: [],
|
|
353
397
|
latency: 0,
|
|
354
398
|
baseURL: '',
|
|
355
|
-
params:
|
|
356
|
-
posthog_properties: options
|
|
357
|
-
},
|
|
399
|
+
params: mergedParams,
|
|
358
400
|
httpStatus: 500,
|
|
359
401
|
usage: {
|
|
360
|
-
|
|
361
|
-
|
|
402
|
+
inputTokens: 0,
|
|
403
|
+
outputTokens: 0
|
|
362
404
|
}
|
|
363
405
|
});
|
|
364
406
|
throw error;
|
package/lib/index.cjs.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.cjs.js","sources":["../src/utils.ts","../src/openai/index.ts","../src/vercel/middleware.ts"],"sourcesContent":["import { PostHog } from 'posthog-node'\nimport OpenAIOrignal from 'openai'\n\ntype ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams\n\nexport interface MonitoringParams {\n posthogDistinctId?: string\n posthogTraceId?: string\n posthogProperties?: Record<string, any>\n posthogPrivacyMode?: boolean\n posthogGroups?: Record<string, any>\n}\n\nexport const getModelParams = (params: ChatCompletionCreateParamsBase & MonitoringParams): Record<string, any> => {\n const modelParams: Record<string, any> = {}\n const paramKeys = [\n 'temperature',\n 'max_tokens',\n 'max_completion_tokens',\n 'top_p',\n 'frequency_penalty',\n 'presence_penalty',\n 'n',\n 'stop',\n 'stream',\n 'streaming',\n ] as const\n\n for (const key of paramKeys) {\n if (key in params && (params as any)[key] !== undefined) {\n modelParams[key] = (params as any)[key]\n }\n }\n return modelParams\n}\n\nexport const getUsage = (response: any, provider: string): { input_tokens: number; output_tokens: number } => {\n if (!response?.usage) {\n return { input_tokens: 0, output_tokens: 0 }\n }\n\n if (provider === 'anthropic') {\n return {\n input_tokens: response.usage.input_tokens ?? 0,\n output_tokens: response.usage.output_tokens ?? 0,\n }\n } else if (provider === 'openai') {\n return {\n input_tokens: response.usage.prompt_tokens ?? 0,\n output_tokens: response.usage.completion_tokens ?? 0,\n }\n }\n\n return { input_tokens: 0, output_tokens: 0 }\n}\n\n/**\n * Helper to format responses (non-streaming) for consumption, mirroring Python's openai vs. anthropic approach.\n */\nexport const formatResponse = (response: any, provider: string): Array<{ role: string; content: string }> => {\n if (!response) {\n return []\n }\n if (provider === 'anthropic') {\n return formatResponseAnthropic(response)\n } else if (provider === 'openai') {\n return formatResponseOpenAI(response)\n }\n return []\n}\n\nexport const formatResponseAnthropic = (response: any): Array<{ role: string; content: string }> => {\n // Example approach if \"response.content\" holds array of text segments, etc.\n const output: Array<{ role: string; content: string }> = []\n for (const choice of response.content ?? []) {\n if (choice?.text) {\n output.push({\n role: 'assistant',\n content: choice.text,\n })\n }\n }\n return output\n}\n\nexport const formatResponseOpenAI = (response: any): Array<{ role: string; content: string }> => {\n const output: Array<{ role: string; content: string }> = []\n for (const choice of response.choices ?? []) {\n if (choice.message?.content) {\n output.push({\n role: choice.message.role,\n content: choice.message.content,\n })\n }\n }\n return output\n}\n\nexport const mergeSystemPrompt = (params: ChatCompletionCreateParamsBase & MonitoringParams, provider: string): any => {\n if (provider !== 'anthropic') {\n return params.messages\n }\n const messages = params.messages || []\n if (!(params as any).system) {\n return messages\n }\n const systemMessage = (params as any).system\n return [{ role: 'system', content: systemMessage }, ...messages]\n}\n\nexport const withPrivacyMode = (client: PostHog, privacyMode: boolean, input: any): any => {\n return (client as any).privacy_mode || privacyMode ? null : input\n}\n\nexport type SendEventToPosthogParams = {\n client: PostHog\n distinctId?: string\n traceId: string\n model: string\n provider: string\n input: any\n output: any\n latency: number\n baseURL: string\n httpStatus: number\n usage?: { input_tokens?: number; output_tokens?: number }\n params: ChatCompletionCreateParamsBase & MonitoringParams\n}\n\nexport const sendEventToPosthog = ({\n client,\n distinctId,\n traceId,\n model,\n provider,\n input,\n output,\n latency,\n baseURL,\n params,\n httpStatus = 200,\n usage = {},\n}: SendEventToPosthogParams): void => {\n if (client.capture) {\n client.capture({\n distinctId: distinctId ?? traceId,\n event: '$ai_generation',\n properties: {\n $ai_provider: provider,\n $ai_model: model,\n $ai_model_parameters: getModelParams(params),\n $ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, input),\n $ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, output),\n $ai_http_status: httpStatus,\n $ai_input_tokens: usage.input_tokens ?? 0,\n $ai_output_tokens: usage.output_tokens ?? 0,\n $ai_latency: latency,\n $ai_trace_id: traceId,\n $ai_base_url: baseURL,\n ...params.posthogProperties,\n ...(distinctId ? {} : { $process_person_profile: false }),\n },\n groups: params.posthogGroups,\n })\n }\n}\n","import OpenAIOrignal from 'openai'\nimport { PostHog } from 'posthog-node'\nimport { v4 as uuidv4 } from 'uuid'\nimport { PassThrough } from 'stream'\nimport { mergeSystemPrompt, MonitoringParams, sendEventToPosthog } from '../utils'\n\ntype ChatCompletion = OpenAIOrignal.ChatCompletion\ntype ChatCompletionChunk = OpenAIOrignal.ChatCompletionChunk\ntype ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams\ntype ChatCompletionCreateParamsNonStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsNonStreaming\ntype ChatCompletionCreateParamsStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsStreaming\nimport type { APIPromise, RequestOptions } from 'openai/core'\nimport type { Stream } from 'openai/streaming'\n\ninterface MonitoringOpenAIConfig {\n apiKey: string\n posthog: PostHog\n baseURL?: string\n}\n\nexport class PostHogOpenAI extends OpenAIOrignal {\n private readonly phClient: PostHog\n\n constructor(config: MonitoringOpenAIConfig) {\n const { posthog, ...openAIConfig } = config\n super(openAIConfig)\n this.phClient = posthog\n this.chat = new WrappedChat(this, this.phClient)\n }\n\n public chat: WrappedChat\n}\n\nexport class WrappedChat extends OpenAIOrignal.Chat {\n constructor(parentClient: PostHogOpenAI, phClient: PostHog) {\n super(parentClient)\n this.completions = new WrappedCompletions(parentClient, phClient)\n }\n\n public completions: WrappedCompletions\n}\n\nexport class WrappedCompletions extends OpenAIOrignal.Chat.Completions {\n private readonly phClient: PostHog\n\n constructor(client: OpenAIOrignal, phClient: PostHog) {\n super(client)\n this.phClient = phClient\n }\n\n // --- Overload #1: Non-streaming\n public create(\n body: ChatCompletionCreateParamsNonStreaming & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<ChatCompletion>\n\n // --- Overload #2: Streaming\n public create(\n body: ChatCompletionCreateParamsStreaming & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<Stream<ChatCompletionChunk>>\n\n // --- Overload #3: Generic base\n public create(\n body: ChatCompletionCreateParamsBase & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>>\n\n // --- Implementation Signature\n public create(\n body: ChatCompletionCreateParamsBase & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>> {\n const {\n posthogDistinctId,\n posthogTraceId,\n posthogProperties,\n posthogPrivacyMode = false,\n posthogGroups,\n ...openAIParams\n } = body\n\n const traceId = posthogTraceId ?? uuidv4()\n const startTime = Date.now()\n\n const parentPromise = super.create(openAIParams, options)\n\n if (openAIParams.stream) {\n return parentPromise.then((value) => {\n const passThroughStream = new PassThrough({ objectMode: true })\n let accumulatedContent = ''\n let usage: { input_tokens: number; output_tokens: number } = {\n input_tokens: 0,\n output_tokens: 0,\n }\n if ('tee' in value) {\n const openAIStream = value\n ;(async () => {\n try {\n for await (const chunk of openAIStream) {\n const delta = chunk?.choices?.[0]?.delta?.content ?? ''\n accumulatedContent += delta\n if (chunk.usage) {\n usage = {\n input_tokens: chunk.usage.prompt_tokens ?? 0,\n output_tokens: chunk.usage.completion_tokens ?? 0,\n }\n }\n passThroughStream.write(chunk)\n }\n const latency = (Date.now() - startTime) / 1000\n sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId ?? traceId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: posthogPrivacyMode ? '' : mergeSystemPrompt(openAIParams, 'openai'),\n output: [{ content: accumulatedContent, role: 'assistant' }],\n latency,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 200,\n usage,\n })\n passThroughStream.end()\n } catch (error) {\n // error handling\n sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId ?? traceId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: posthogPrivacyMode ? '' : mergeSystemPrompt(openAIParams, 'openai'),\n output: [],\n latency: 0,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 500,\n usage: {\n input_tokens: 0,\n output_tokens: 0,\n },\n })\n passThroughStream.emit('error', error)\n }\n })()\n }\n return passThroughStream as unknown as Stream<ChatCompletionChunk>\n }) as APIPromise<Stream<ChatCompletionChunk>>\n } else {\n const wrappedPromise = parentPromise.then(\n (result) => {\n if ('choices' in result) {\n const latency = (Date.now() - startTime) / 1000\n sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId ?? traceId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: posthogPrivacyMode ? '' : mergeSystemPrompt(openAIParams, 'openai'),\n output: [{ content: result.choices[0].message.content, role: 'assistant' }],\n latency,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 200,\n usage: {\n input_tokens: result.usage?.prompt_tokens ?? 0,\n output_tokens: result.usage?.completion_tokens ?? 0,\n },\n })\n }\n return result\n },\n (error) => {\n sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId ?? traceId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: posthogPrivacyMode ? '' : mergeSystemPrompt(openAIParams, 'openai'),\n output: [],\n latency: 0,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 500,\n usage: {\n input_tokens: 0,\n output_tokens: 0,\n },\n })\n throw error\n }\n ) as APIPromise<ChatCompletion>\n\n return wrappedPromise\n }\n }\n}\n\nexport default PostHogOpenAI\n","import { experimental_wrapLanguageModel as wrapLanguageModel } from 'ai'\nimport type {\n LanguageModelV1,\n Experimental_LanguageModelV1Middleware as LanguageModelV1Middleware,\n LanguageModelV1StreamPart,\n} from 'ai'\nimport { v4 as uuidv4 } from 'uuid'\nimport type { PostHog } from 'posthog-node'\nimport { sendEventToPosthog } from '../utils'\n\ninterface CreateInstrumentationMiddlewareOptions {\n posthogDistinctId?: string\n posthogTraceId: string\n posthogProperties?: Record<string, any>\n posthogPrivacyMode?: boolean\n posthogGroups?: string[]\n}\n\nexport const createInstrumentationMiddleware = (\n phClient: PostHog,\n model: LanguageModelV1,\n options: CreateInstrumentationMiddlewareOptions\n): LanguageModelV1Middleware => {\n const middleware: LanguageModelV1Middleware = {\n wrapGenerate: async ({ doGenerate, params }) => {\n const startTime = Date.now()\n\n try {\n const result = await doGenerate()\n const latency = (Date.now() - startTime) / 1000\n\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: model.modelId,\n provider: 'vercel',\n input: options.posthogPrivacyMode ? '' : params.prompt,\n output: [{ content: result.text, role: 'assistant' }],\n latency,\n baseURL: '',\n params: { posthog_properties: options } as any,\n httpStatus: 200,\n usage: {\n input_tokens: result.usage.promptTokens,\n output_tokens: result.usage.completionTokens,\n },\n })\n\n return result\n } catch (error) {\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: model.modelId,\n provider: 'vercel',\n input: options.posthogPrivacyMode ? '' : params.prompt,\n output: [],\n latency: 0,\n baseURL: '',\n params: { posthog_properties: options } as any,\n httpStatus: 500,\n usage: {\n input_tokens: 0,\n output_tokens: 0,\n },\n })\n throw error\n }\n },\n\n wrapStream: async ({ doStream, params }) => {\n const startTime = Date.now()\n let generatedText = ''\n let usage: { input_tokens?: number; output_tokens?: number } = {}\n\n try {\n const { stream, ...rest } = await doStream()\n\n const transformStream = new TransformStream<LanguageModelV1StreamPart, LanguageModelV1StreamPart>({\n transform(chunk, controller) {\n if (chunk.type === 'text-delta') {\n generatedText += chunk.textDelta\n }\n if (chunk.type === 'finish') {\n usage = {\n input_tokens: chunk.usage?.promptTokens,\n output_tokens: chunk.usage?.completionTokens,\n }\n }\n controller.enqueue(chunk)\n },\n\n flush() {\n const latency = (Date.now() - startTime) / 1000\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: model.modelId,\n provider: 'vercel',\n input: options.posthogPrivacyMode ? '' : params.prompt,\n output: [{ content: generatedText, role: 'assistant' }],\n latency,\n baseURL: '',\n params: { posthog_properties: options } as any,\n httpStatus: 200,\n usage,\n })\n },\n })\n\n return {\n stream: stream.pipeThrough(transformStream),\n ...rest,\n }\n } catch (error) {\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: model.modelId,\n provider: 'vercel',\n input: options.posthogPrivacyMode ? '' : params.prompt,\n output: [],\n latency: 0,\n baseURL: '',\n params: { posthog_properties: options } as any,\n httpStatus: 500,\n usage: {\n input_tokens: 0,\n output_tokens: 0,\n },\n })\n throw error\n }\n },\n }\n\n return middleware\n}\n\nexport const wrapVercelLanguageModel = (\n model: LanguageModelV1,\n phClient: PostHog,\n options: CreateInstrumentationMiddlewareOptions\n): LanguageModelV1 => {\n const traceId = options.posthogTraceId ?? uuidv4()\n const middleware = createInstrumentationMiddleware(phClient, model, {\n ...options,\n posthogTraceId: traceId,\n posthogDistinctId: options.posthogDistinctId ?? traceId,\n })\n\n const wrappedModel = wrapLanguageModel({\n model,\n middleware,\n })\n\n return wrappedModel\n}\n"],"names":["getModelParams","params","modelParams","paramKeys","key","undefined","mergeSystemPrompt","provider","messages","system","systemMessage","role","content","withPrivacyMode","client","privacyMode","input","privacy_mode","sendEventToPosthog","distinctId","traceId","model","output","latency","baseURL","httpStatus","usage","capture","event","properties","$ai_provider","$ai_model","$ai_model_parameters","$ai_input","posthogPrivacyMode","$ai_output_choices","$ai_http_status","$ai_input_tokens","input_tokens","$ai_output_tokens","output_tokens","$ai_latency","$ai_trace_id","$ai_base_url","posthogProperties","$process_person_profile","groups","posthogGroups","PostHogOpenAI","OpenAIOrignal","constructor","config","posthog","openAIConfig","phClient","chat","WrappedChat","Chat","parentClient","completions","WrappedCompletions","Completions","create","body","options","posthogDistinctId","posthogTraceId","openAIParams","uuidv4","startTime","Date","now","parentPromise","stream","then","value","passThroughStream","PassThrough","objectMode","accumulatedContent","openAIStream","chunk","delta","choices","prompt_tokens","completion_tokens","write","end","error","emit","wrappedPromise","result","message","createInstrumentationMiddleware","middleware","wrapGenerate","doGenerate","modelId","prompt","text","posthog_properties","promptTokens","completionTokens","wrapStream","doStream","generatedText","rest","transformStream","TransformStream","transform","controller","type","textDelta","enqueue","flush","pipeThrough","wrapVercelLanguageModel","wrappedModel","wrapLanguageModel"],"mappings":";;;;;;;;;;;;;AAaO,MAAMA,cAAc,GAAIC,MAAyD,IAAyB;EAC/G,MAAMC,WAAW,GAAwB,EAAE,CAAA;EAC3C,MAAMC,SAAS,GAAG,CAChB,aAAa,EACb,YAAY,EACZ,uBAAuB,EACvB,OAAO,EACP,mBAAmB,EACnB,kBAAkB,EAClB,GAAG,EACH,MAAM,EACN,QAAQ,EACR,WAAW,CACH,CAAA;AAEV,EAAA,KAAK,MAAMC,GAAG,IAAID,SAAS,EAAE;IAC3B,IAAIC,GAAG,IAAIH,MAAM,IAAKA,MAAc,CAACG,GAAG,CAAC,KAAKC,SAAS,EAAE;AACvDH,MAAAA,WAAW,CAACE,GAAG,CAAC,GAAIH,MAAc,CAACG,GAAG,CAAC,CAAA;AACxC,KAAA;AACF,GAAA;AACD,EAAA,OAAOF,WAAW,CAAA;AACpB,CAAC,CAAA;AAgEM,MAAMI,iBAAiB,GAAGA,CAACL,MAAyD,EAAEM,QAAgB,KAAS;EACpH,IAAIA,QAAQ,KAAK,WAAW,EAAE;IAC5B,OAAON,MAAM,CAACO,QAAQ,CAAA;AACvB,GAAA;AACD,EAAA,MAAMA,QAAQ,GAAGP,MAAM,CAACO,QAAQ,IAAI,EAAE,CAAA;AACtC,EAAA,IAAI,CAAEP,MAAc,CAACQ,MAAM,EAAE;AAC3B,IAAA,OAAOD,QAAQ,CAAA;AAChB,GAAA;AACD,EAAA,MAAME,aAAa,GAAIT,MAAc,CAACQ,MAAM,CAAA;AAC5C,EAAA,OAAO,CAAC;AAAEE,IAAAA,IAAI,EAAE,QAAQ;AAAEC,IAAAA,OAAO,EAAEF,aAAAA;GAAe,EAAE,GAAGF,QAAQ,CAAC,CAAA;AAClE,CAAC,CAAA;AAEM,MAAMK,eAAe,GAAGA,CAACC,MAAe,EAAEC,WAAoB,EAAEC,KAAU,KAAS;EACxF,OAAQF,MAAc,CAACG,YAAY,IAAIF,WAAW,GAAG,IAAI,GAAGC,KAAK,CAAA;AACnE,CAAC,CAAA;AAiBM,MAAME,kBAAkB,GAAGA,CAAC;EACjCJ,MAAM;EACNK,UAAU;EACVC,OAAO;EACPC,KAAK;EACLd,QAAQ;EACRS,KAAK;EACLM,MAAM;EACNC,OAAO;EACPC,OAAO;EACPvB,MAAM;AACNwB,EAAAA,UAAU,GAAG,GAAG;AAChBC,EAAAA,KAAK,GAAG,EAAE;AAAA,CACe,KAAU;EACnC,IAAIZ,MAAM,CAACa,OAAO,EAAE;IAClBb,MAAM,CAACa,OAAO,CAAC;MACbR,UAAU,EAAEA,UAAU,IAAIC,OAAO;AACjCQ,MAAAA,KAAK,EAAE,gBAAgB;AACvBC,MAAAA,UAAU,EAAE;AACVC,QAAAA,YAAY,EAAEvB,QAAQ;AACtBwB,QAAAA,SAAS,EAAEV,KAAK;AAChBW,QAAAA,oBAAoB,EAAEhC,cAAc,CAACC,MAAM,CAAC;AAC5CgC,QAAAA,SAAS,EAAEpB,eAAe,CAACC,MAAM,EAAEb,MAAM,CAACiC,kBAAkB,IAAI,KAAK,EAAElB,KAAK,CAAC;AAC7EmB,QAAAA,kBAAkB,EAAEtB,eAAe,CAACC,MAAM,EAAEb,MAAM,CAACiC,kBAAkB,IAAI,KAAK,EAAEZ,MAAM,CAAC;AACvFc,QAAAA,eAAe,EAAEX,UAAU;AAC3BY,QAAAA,gBAAgB,EAAEX,KAAK,CAACY,YAAY,IAAI,CAAC;AACzCC,QAAAA,iBAAiB,EAAEb,KAAK,CAACc,aAAa,IAAI,CAAC;AAC3CC,QAAAA,WAAW,EAAElB,OAAO;AACpBmB,QAAAA,YAAY,EAAEtB,OAAO;AACrBuB,QAAAA,YAAY,EAAEnB,OAAO;QACrB,GAAGvB,MAAM,CAAC2C,iBAAiB;AAC3B,QAAA,IAAIzB,UAAU,GAAG,EAAE,GAAG;AAAE0B,UAAAA,uBAAuB,EAAE,KAAA;SAAO,CAAA;OACzD;MACDC,MAAM,EAAE7C,MAAM,CAAC8C,aAAAA;AAChB,KAAA,CAAC,CAAA;AACH,GAAA;AACH,CAAC;;ACjJK,MAAOC,aAAc,SAAQC,iCAAa,CAAA;EAG9CC,WAAAA,CAAYC,MAA8B,EAAA;IACxC,MAAM;MAAEC,OAAO;MAAE,GAAGC,YAAAA;AAAc,KAAA,GAAGF,MAAM,CAAA;IAC3C,KAAK,CAACE,YAAY,CAAC,CAAA;IACnB,IAAI,CAACC,QAAQ,GAAGF,OAAO,CAAA;IACvB,IAAI,CAACG,IAAI,GAAG,IAAIC,WAAW,CAAC,IAAI,EAAE,IAAI,CAACF,QAAQ,CAAC,CAAA;AAClD,GAAA;AAGD,CAAA;AAEY,MAAAE,WAAY,SAAQP,iCAAa,CAACQ,IAAI,CAAA;AACjDP,EAAAA,WAAYA,CAAAQ,YAA2B,EAAEJ,QAAiB,EAAA;IACxD,KAAK,CAACI,YAAY,CAAC,CAAA;IACnB,IAAI,CAACC,WAAW,GAAG,IAAIC,kBAAkB,CAACF,YAAY,EAAEJ,QAAQ,CAAC,CAAA;AACnE,GAAA;AAGD,CAAA;MAEYM,kBAAmB,SAAQX,iCAAa,CAACQ,IAAI,CAACI,WAAW,CAAA;AAGpEX,EAAAA,WAAYA,CAAApC,MAAqB,EAAEwC,QAAiB,EAAA;IAClD,KAAK,CAACxC,MAAM,CAAC,CAAA;IACb,IAAI,CAACwC,QAAQ,GAAGA,QAAQ,CAAA;AAC1B,GAAA;AAoBA;AACOQ,EAAAA,MAAMA,CACXC,IAAuD,EACvDC,OAAwB,EAAA;IAExB,MAAM;MACJC,iBAAiB;MACjBC,cAAc;MACdtB,iBAAiB;AACjBV,MAAAA,kBAAkB,GAAG,KAAK;MAC1Ba,aAAa;MACb,GAAGoB,YAAAA;AACJ,KAAA,GAAGJ,IAAI,CAAA;AAER,IAAA,MAAM3C,OAAO,GAAG8C,cAAc,IAAIE,OAAM,EAAE,CAAA;AAC1C,IAAA,MAAMC,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE,CAAA;IAE5B,MAAMC,aAAa,GAAG,KAAK,CAACV,MAAM,CAACK,YAAY,EAAEH,OAAO,CAAC,CAAA;IAEzD,IAAIG,YAAY,CAACM,MAAM,EAAE;AACvB,MAAA,OAAOD,aAAa,CAACE,IAAI,CAAEC,KAAK,IAAI;AAClC,QAAA,MAAMC,iBAAiB,GAAG,IAAIC,kBAAW,CAAC;AAAEC,UAAAA,UAAU,EAAE,IAAA;AAAM,SAAA,CAAC,CAAA;QAC/D,IAAIC,kBAAkB,GAAG,EAAE,CAAA;AAC3B,QAAA,IAAIrD,KAAK,GAAoD;AAC3DY,UAAAA,YAAY,EAAE,CAAC;AACfE,UAAAA,aAAa,EAAE,CAAA;SAChB,CAAA;QACD,IAAI,KAAK,IAAImC,KAAK,EAAE;UAClB,MAAMK,YAAY,GAAGL,KAAK,CAAA;AACzB,UAAA,CAAC,YAAW;YACX,IAAI;AACF,cAAA,WAAW,MAAMM,KAAK,IAAID,YAAY,EAAE;AACtC,gBAAA,MAAME,KAAK,GAAGD,KAAK,EAAEE,OAAO,GAAG,CAAC,CAAC,EAAED,KAAK,EAAEtE,OAAO,IAAI,EAAE,CAAA;AACvDmE,gBAAAA,kBAAkB,IAAIG,KAAK,CAAA;gBAC3B,IAAID,KAAK,CAACvD,KAAK,EAAE;AACfA,kBAAAA,KAAK,GAAG;AACNY,oBAAAA,YAAY,EAAE2C,KAAK,CAACvD,KAAK,CAAC0D,aAAa,IAAI,CAAC;AAC5C5C,oBAAAA,aAAa,EAAEyC,KAAK,CAACvD,KAAK,CAAC2D,iBAAiB,IAAI,CAAA;mBACjD,CAAA;AACF,iBAAA;AACDT,gBAAAA,iBAAiB,CAACU,KAAK,CAACL,KAAK,CAAC,CAAA;AAC/B,eAAA;cACD,MAAM1D,OAAO,GAAG,CAAC+C,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/CnD,cAAAA,kBAAkB,CAAC;gBACjBJ,MAAM,EAAE,IAAI,CAACwC,QAAQ;gBACrBnC,UAAU,EAAE8C,iBAAiB,IAAI7C,OAAO;gBACxCA,OAAO;gBACPC,KAAK,EAAE8C,YAAY,CAAC9C,KAAK;AACzBd,gBAAAA,QAAQ,EAAE,QAAQ;gBAClBS,KAAK,EAAEkB,kBAAkB,GAAG,EAAE,GAAG5B,iBAAiB,CAAC6D,YAAY,EAAE,QAAQ,CAAC;AAC1E7C,gBAAAA,MAAM,EAAE,CAAC;AAAEV,kBAAAA,OAAO,EAAEmE,kBAAkB;AAAEpE,kBAAAA,IAAI,EAAE,WAAA;AAAW,iBAAE,CAAC;gBAC5DY,OAAO;AACPC,gBAAAA,OAAO,EAAG,IAAY,CAACA,OAAO,IAAI,EAAE;AACpCvB,gBAAAA,MAAM,EAAE8D,IAAI;AACZtC,gBAAAA,UAAU,EAAE,GAAG;AACfC,gBAAAA,KAAAA;AACD,eAAA,CAAC,CAAA;cACFkD,iBAAiB,CAACW,GAAG,EAAE,CAAA;aACxB,CAAC,OAAOC,KAAK,EAAE;AACd;AACAtE,cAAAA,kBAAkB,CAAC;gBACjBJ,MAAM,EAAE,IAAI,CAACwC,QAAQ;gBACrBnC,UAAU,EAAE8C,iBAAiB,IAAI7C,OAAO;gBACxCA,OAAO;gBACPC,KAAK,EAAE8C,YAAY,CAAC9C,KAAK;AACzBd,gBAAAA,QAAQ,EAAE,QAAQ;gBAClBS,KAAK,EAAEkB,kBAAkB,GAAG,EAAE,GAAG5B,iBAAiB,CAAC6D,YAAY,EAAE,QAAQ,CAAC;AAC1E7C,gBAAAA,MAAM,EAAE,EAAE;AACVC,gBAAAA,OAAO,EAAE,CAAC;AACVC,gBAAAA,OAAO,EAAG,IAAY,CAACA,OAAO,IAAI,EAAE;AACpCvB,gBAAAA,MAAM,EAAE8D,IAAI;AACZtC,gBAAAA,UAAU,EAAE,GAAG;AACfC,gBAAAA,KAAK,EAAE;AACLY,kBAAAA,YAAY,EAAE,CAAC;AACfE,kBAAAA,aAAa,EAAE,CAAA;AAChB,iBAAA;AACF,eAAA,CAAC,CAAA;AACFoC,cAAAA,iBAAiB,CAACa,IAAI,CAAC,OAAO,EAAED,KAAK,CAAC,CAAA;AACvC,aAAA;AACH,WAAC,GAAG,CAAA;AACL,SAAA;AACD,QAAA,OAAOZ,iBAA2D,CAAA;AACpE,OAAC,CAA4C,CAAA;AAC9C,KAAA,MAAM;AACL,MAAA,MAAMc,cAAc,GAAGlB,aAAa,CAACE,IAAI,CACtCiB,MAAM,IAAI;QACT,IAAI,SAAS,IAAIA,MAAM,EAAE;UACvB,MAAMpE,OAAO,GAAG,CAAC+C,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/CnD,UAAAA,kBAAkB,CAAC;YACjBJ,MAAM,EAAE,IAAI,CAACwC,QAAQ;YACrBnC,UAAU,EAAE8C,iBAAiB,IAAI7C,OAAO;YACxCA,OAAO;YACPC,KAAK,EAAE8C,YAAY,CAAC9C,KAAK;AACzBd,YAAAA,QAAQ,EAAE,QAAQ;YAClBS,KAAK,EAAEkB,kBAAkB,GAAG,EAAE,GAAG5B,iBAAiB,CAAC6D,YAAY,EAAE,QAAQ,CAAC;AAC1E7C,YAAAA,MAAM,EAAE,CAAC;cAAEV,OAAO,EAAE+E,MAAM,CAACR,OAAO,CAAC,CAAC,CAAC,CAACS,OAAO,CAAChF,OAAO;AAAED,cAAAA,IAAI,EAAE,WAAA;aAAa,CAAC;YAC3EY,OAAO;AACPC,YAAAA,OAAO,EAAG,IAAY,CAACA,OAAO,IAAI,EAAE;AACpCvB,YAAAA,MAAM,EAAE8D,IAAI;AACZtC,YAAAA,UAAU,EAAE,GAAG;AACfC,YAAAA,KAAK,EAAE;AACLY,cAAAA,YAAY,EAAEqD,MAAM,CAACjE,KAAK,EAAE0D,aAAa,IAAI,CAAC;AAC9C5C,cAAAA,aAAa,EAAEmD,MAAM,CAACjE,KAAK,EAAE2D,iBAAiB,IAAI,CAAA;AACnD,aAAA;AACF,WAAA,CAAC,CAAA;AACH,SAAA;AACD,QAAA,OAAOM,MAAM,CAAA;OACd,EACAH,KAAK,IAAI;AACRtE,QAAAA,kBAAkB,CAAC;UACjBJ,MAAM,EAAE,IAAI,CAACwC,QAAQ;UACrBnC,UAAU,EAAE8C,iBAAiB,IAAI7C,OAAO;UACxCA,OAAO;UACPC,KAAK,EAAE8C,YAAY,CAAC9C,KAAK;AACzBd,UAAAA,QAAQ,EAAE,QAAQ;UAClBS,KAAK,EAAEkB,kBAAkB,GAAG,EAAE,GAAG5B,iBAAiB,CAAC6D,YAAY,EAAE,QAAQ,CAAC;AAC1E7C,UAAAA,MAAM,EAAE,EAAE;AACVC,UAAAA,OAAO,EAAE,CAAC;AACVC,UAAAA,OAAO,EAAG,IAAY,CAACA,OAAO,IAAI,EAAE;AACpCvB,UAAAA,MAAM,EAAE8D,IAAI;AACZtC,UAAAA,UAAU,EAAE,GAAG;AACfC,UAAAA,KAAK,EAAE;AACLY,YAAAA,YAAY,EAAE,CAAC;AACfE,YAAAA,aAAa,EAAE,CAAA;AAChB,WAAA;AACF,SAAA,CAAC,CAAA;AACF,QAAA,MAAMgD,KAAK,CAAA;AACb,OAAC,CAC4B,CAAA;AAE/B,MAAA,OAAOE,cAAc,CAAA;AACtB,KAAA;AACH,GAAA;AACD;;ACvLM,MAAMG,+BAA+B,GAAGA,CAC7CvC,QAAiB,EACjBjC,KAAsB,EACtB2C,OAA+C,KAClB;AAC7B,EAAA,MAAM8B,UAAU,GAA8B;IAC5CC,YAAY,EAAE,OAAO;MAAEC,UAAU;AAAE/F,MAAAA,MAAAA;AAAQ,KAAA,KAAI;AAC7C,MAAA,MAAMoE,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE,CAAA;MAE5B,IAAI;AACF,QAAA,MAAMoB,MAAM,GAAG,MAAMK,UAAU,EAAE,CAAA;QACjC,MAAMzE,OAAO,GAAG,CAAC+C,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAE/CnD,QAAAA,kBAAkB,CAAC;AACjBJ,UAAAA,MAAM,EAAEwC,QAAQ;UAChBnC,UAAU,EAAE6C,OAAO,CAACC,iBAAiB;UACrC7C,OAAO,EAAE4C,OAAO,CAACE,cAAc;UAC/B7C,KAAK,EAAEA,KAAK,CAAC4E,OAAO;AACpB1F,UAAAA,QAAQ,EAAE,QAAQ;UAClBS,KAAK,EAAEgD,OAAO,CAAC9B,kBAAkB,GAAG,EAAE,GAAGjC,MAAM,CAACiG,MAAM;AACtD5E,UAAAA,MAAM,EAAE,CAAC;YAAEV,OAAO,EAAE+E,MAAM,CAACQ,IAAI;AAAExF,YAAAA,IAAI,EAAE,WAAA;WAAa,CAAC;UACrDY,OAAO;AACPC,UAAAA,OAAO,EAAE,EAAE;AACXvB,UAAAA,MAAM,EAAE;AAAEmG,YAAAA,kBAAkB,EAAEpC,OAAAA;WAAgB;AAC9CvC,UAAAA,UAAU,EAAE,GAAG;AACfC,UAAAA,KAAK,EAAE;AACLY,YAAAA,YAAY,EAAEqD,MAAM,CAACjE,KAAK,CAAC2E,YAAY;AACvC7D,YAAAA,aAAa,EAAEmD,MAAM,CAACjE,KAAK,CAAC4E,gBAAAA;AAC7B,WAAA;AACF,SAAA,CAAC,CAAA;AAEF,QAAA,OAAOX,MAAM,CAAA;OACd,CAAC,OAAOH,KAAK,EAAE;AACdtE,QAAAA,kBAAkB,CAAC;AACjBJ,UAAAA,MAAM,EAAEwC,QAAQ;UAChBnC,UAAU,EAAE6C,OAAO,CAACC,iBAAiB;UACrC7C,OAAO,EAAE4C,OAAO,CAACE,cAAc;UAC/B7C,KAAK,EAAEA,KAAK,CAAC4E,OAAO;AACpB1F,UAAAA,QAAQ,EAAE,QAAQ;UAClBS,KAAK,EAAEgD,OAAO,CAAC9B,kBAAkB,GAAG,EAAE,GAAGjC,MAAM,CAACiG,MAAM;AACtD5E,UAAAA,MAAM,EAAE,EAAE;AACVC,UAAAA,OAAO,EAAE,CAAC;AACVC,UAAAA,OAAO,EAAE,EAAE;AACXvB,UAAAA,MAAM,EAAE;AAAEmG,YAAAA,kBAAkB,EAAEpC,OAAAA;WAAgB;AAC9CvC,UAAAA,UAAU,EAAE,GAAG;AACfC,UAAAA,KAAK,EAAE;AACLY,YAAAA,YAAY,EAAE,CAAC;AACfE,YAAAA,aAAa,EAAE,CAAA;AAChB,WAAA;AACF,SAAA,CAAC,CAAA;AACF,QAAA,MAAMgD,KAAK,CAAA;AACZ,OAAA;KACF;IAEDe,UAAU,EAAE,OAAO;MAAEC,QAAQ;AAAEvG,MAAAA,MAAAA;AAAQ,KAAA,KAAI;AACzC,MAAA,MAAMoE,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE,CAAA;MAC5B,IAAIkC,aAAa,GAAG,EAAE,CAAA;MACtB,IAAI/E,KAAK,GAAsD,EAAE,CAAA;MAEjE,IAAI;QACF,MAAM;UAAE+C,MAAM;UAAE,GAAGiC,IAAAA;SAAM,GAAG,MAAMF,QAAQ,EAAE,CAAA;AAE5C,QAAA,MAAMG,eAAe,GAAG,IAAIC,eAAe,CAAuD;AAChGC,UAAAA,SAASA,CAAC5B,KAAK,EAAE6B,UAAU,EAAA;AACzB,YAAA,IAAI7B,KAAK,CAAC8B,IAAI,KAAK,YAAY,EAAE;cAC/BN,aAAa,IAAIxB,KAAK,CAAC+B,SAAS,CAAA;AACjC,aAAA;AACD,YAAA,IAAI/B,KAAK,CAAC8B,IAAI,KAAK,QAAQ,EAAE;AAC3BrF,cAAAA,KAAK,GAAG;AACNY,gBAAAA,YAAY,EAAE2C,KAAK,CAACvD,KAAK,EAAE2E,YAAY;AACvC7D,gBAAAA,aAAa,EAAEyC,KAAK,CAACvD,KAAK,EAAE4E,gBAAAA;eAC7B,CAAA;AACF,aAAA;AACDQ,YAAAA,UAAU,CAACG,OAAO,CAAChC,KAAK,CAAC,CAAA;WAC1B;AAEDiC,UAAAA,KAAKA,GAAA;YACH,MAAM3F,OAAO,GAAG,CAAC+C,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/CnD,YAAAA,kBAAkB,CAAC;AACjBJ,cAAAA,MAAM,EAAEwC,QAAQ;cAChBnC,UAAU,EAAE6C,OAAO,CAACC,iBAAiB;cACrC7C,OAAO,EAAE4C,OAAO,CAACE,cAAc;cAC/B7C,KAAK,EAAEA,KAAK,CAAC4E,OAAO;AACpB1F,cAAAA,QAAQ,EAAE,QAAQ;cAClBS,KAAK,EAAEgD,OAAO,CAAC9B,kBAAkB,GAAG,EAAE,GAAGjC,MAAM,CAACiG,MAAM;AACtD5E,cAAAA,MAAM,EAAE,CAAC;AAAEV,gBAAAA,OAAO,EAAE6F,aAAa;AAAE9F,gBAAAA,IAAI,EAAE,WAAA;AAAW,eAAE,CAAC;cACvDY,OAAO;AACPC,cAAAA,OAAO,EAAE,EAAE;AACXvB,cAAAA,MAAM,EAAE;AAAEmG,gBAAAA,kBAAkB,EAAEpC,OAAAA;eAAgB;AAC9CvC,cAAAA,UAAU,EAAE,GAAG;AACfC,cAAAA,KAAAA;AACD,aAAA,CAAC,CAAA;AACJ,WAAA;AACD,SAAA,CAAC,CAAA;QAEF,OAAO;AACL+C,UAAAA,MAAM,EAAEA,MAAM,CAAC0C,WAAW,CAACR,eAAe,CAAC;UAC3C,GAAGD,IAAAA;SACJ,CAAA;OACF,CAAC,OAAOlB,KAAK,EAAE;AACdtE,QAAAA,kBAAkB,CAAC;AACjBJ,UAAAA,MAAM,EAAEwC,QAAQ;UAChBnC,UAAU,EAAE6C,OAAO,CAACC,iBAAiB;UACrC7C,OAAO,EAAE4C,OAAO,CAACE,cAAc;UAC/B7C,KAAK,EAAEA,KAAK,CAAC4E,OAAO;AACpB1F,UAAAA,QAAQ,EAAE,QAAQ;UAClBS,KAAK,EAAEgD,OAAO,CAAC9B,kBAAkB,GAAG,EAAE,GAAGjC,MAAM,CAACiG,MAAM;AACtD5E,UAAAA,MAAM,EAAE,EAAE;AACVC,UAAAA,OAAO,EAAE,CAAC;AACVC,UAAAA,OAAO,EAAE,EAAE;AACXvB,UAAAA,MAAM,EAAE;AAAEmG,YAAAA,kBAAkB,EAAEpC,OAAAA;WAAgB;AAC9CvC,UAAAA,UAAU,EAAE,GAAG;AACfC,UAAAA,KAAK,EAAE;AACLY,YAAAA,YAAY,EAAE,CAAC;AACfE,YAAAA,aAAa,EAAE,CAAA;AAChB,WAAA;AACF,SAAA,CAAC,CAAA;AACF,QAAA,MAAMgD,KAAK,CAAA;AACZ,OAAA;AACH,KAAA;GACD,CAAA;AAED,EAAA,OAAOM,UAAU,CAAA;AACnB,CAAC,CAAA;AAEM,MAAMsB,uBAAuB,GAAGA,CACrC/F,KAAsB,EACtBiC,QAAiB,EACjBU,OAA+C,KAC5B;EACnB,MAAM5C,OAAO,GAAG4C,OAAO,CAACE,cAAc,IAAIE,OAAM,EAAE,CAAA;AAClD,EAAA,MAAM0B,UAAU,GAAGD,+BAA+B,CAACvC,QAAQ,EAAEjC,KAAK,EAAE;AAClE,IAAA,GAAG2C,OAAO;AACVE,IAAAA,cAAc,EAAE9C,OAAO;AACvB6C,IAAAA,iBAAiB,EAAED,OAAO,CAACC,iBAAiB,IAAI7C,OAAAA;AACjD,GAAA,CAAC,CAAA;EAEF,MAAMiG,YAAY,GAAGC,iCAAiB,CAAC;IACrCjG,KAAK;AACLyE,IAAAA,UAAAA;AACD,GAAA,CAAC,CAAA;AAEF,EAAA,OAAOuB,YAAY,CAAA;AACrB;;;;;"}
|
|
1
|
+
{"version":3,"file":"index.cjs.js","sources":["../src/utils.ts","../src/openai/index.ts","../src/vercel/middleware.ts"],"sourcesContent":["import { PostHog } from 'posthog-node'\nimport OpenAIOrignal from 'openai'\n\ntype ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams\n\nexport interface MonitoringParams {\n posthogDistinctId?: string\n posthogTraceId?: string\n posthogProperties?: Record<string, any>\n posthogPrivacyMode?: boolean\n posthogGroups?: Record<string, any>\n}\n\nexport const getModelParams = (params: ChatCompletionCreateParamsBase & MonitoringParams): Record<string, any> => {\n const modelParams: Record<string, any> = {}\n const paramKeys = [\n 'temperature',\n 'max_tokens',\n 'max_completion_tokens',\n 'top_p',\n 'frequency_penalty',\n 'presence_penalty',\n 'n',\n 'stop',\n 'stream',\n 'streaming',\n ] as const\n\n for (const key of paramKeys) {\n if (key in params && (params as any)[key] !== undefined) {\n modelParams[key] = (params as any)[key]\n }\n }\n return modelParams\n}\n\n/**\n * Helper to format responses (non-streaming) for consumption, mirroring Python's openai vs. anthropic approach.\n */\nexport const formatResponse = (response: any, provider: string): Array<{ role: string; content: string }> => {\n if (!response) {\n return []\n }\n if (provider === 'anthropic') {\n return formatResponseAnthropic(response)\n } else if (provider === 'openai') {\n return formatResponseOpenAI(response)\n }\n return []\n}\n\nexport const formatResponseAnthropic = (response: any): Array<{ role: string; content: string }> => {\n // Example approach if \"response.content\" holds array of text segments, etc.\n const output: Array<{ role: string; content: string }> = []\n for (const choice of response.content ?? []) {\n if (choice?.text) {\n output.push({\n role: 'assistant',\n content: choice.text,\n })\n }\n }\n return output\n}\n\nexport const formatResponseOpenAI = (response: any): Array<{ role: string; content: string }> => {\n const output: Array<{ role: string; content: string }> = []\n for (const choice of response.choices ?? []) {\n if (choice.message?.content) {\n output.push({\n role: choice.message.role,\n content: choice.message.content,\n })\n }\n }\n return output\n}\n\nexport const mergeSystemPrompt = (params: ChatCompletionCreateParamsBase & MonitoringParams, provider: string): any => {\n if (provider !== 'anthropic') {\n return params.messages\n }\n const messages = params.messages || []\n if (!(params as any).system) {\n return messages\n }\n const systemMessage = (params as any).system\n return [{ role: 'system', content: systemMessage }, ...messages]\n}\n\nexport const withPrivacyMode = (client: PostHog, privacyMode: boolean, input: any): any => {\n return (client as any).privacy_mode || privacyMode ? null : input\n}\n\nexport type SendEventToPosthogParams = {\n client: PostHog\n distinctId?: string\n traceId: string\n model: string\n provider: string\n input: any\n output: any\n latency: number\n baseURL: string\n httpStatus: number\n usage?: { inputTokens?: number; outputTokens?: number }\n params: ChatCompletionCreateParamsBase & MonitoringParams\n}\n\nexport const sendEventToPosthog = ({\n client,\n distinctId,\n traceId,\n model,\n provider,\n input,\n output,\n latency,\n baseURL,\n params,\n httpStatus = 200,\n usage = {},\n}: SendEventToPosthogParams): void => {\n console.log('sendEventToPosthog', {\n client,\n distinctId,\n traceId,\n model,\n provider,\n input,\n output,\n latency,\n baseURL,\n params,\n })\n if (client.capture) {\n client.capture({\n distinctId: distinctId ?? traceId,\n event: '$ai_generation',\n properties: {\n $ai_provider: provider,\n $ai_model: model,\n $ai_model_parameters: getModelParams(params),\n $ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, input),\n $ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, output),\n $ai_http_status: httpStatus,\n $ai_input_tokens: usage.inputTokens ?? 0,\n $ai_output_tokens: usage.outputTokens ?? 0,\n $ai_latency: latency,\n $ai_trace_id: traceId,\n $ai_base_url: baseURL,\n ...params.posthogProperties,\n ...(distinctId ? {} : { $process_person_profile: false }),\n },\n groups: params.posthogGroups,\n })\n }\n}\n","import OpenAIOrignal from 'openai'\nimport { PostHog } from 'posthog-node'\nimport { v4 as uuidv4 } from 'uuid'\nimport { PassThrough } from 'stream'\nimport { mergeSystemPrompt, MonitoringParams, sendEventToPosthog } from '../utils'\n\ntype ChatCompletion = OpenAIOrignal.ChatCompletion\ntype ChatCompletionChunk = OpenAIOrignal.ChatCompletionChunk\ntype ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams\ntype ChatCompletionCreateParamsNonStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsNonStreaming\ntype ChatCompletionCreateParamsStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsStreaming\nimport type { APIPromise, RequestOptions } from 'openai/core'\nimport type { Stream } from 'openai/streaming'\n\ninterface MonitoringOpenAIConfig {\n apiKey: string\n posthog: PostHog\n baseURL?: string\n}\n\nexport class PostHogOpenAI extends OpenAIOrignal {\n private readonly phClient: PostHog\n\n constructor(config: MonitoringOpenAIConfig) {\n const { posthog, ...openAIConfig } = config\n super(openAIConfig)\n this.phClient = posthog\n this.chat = new WrappedChat(this, this.phClient)\n }\n\n public chat: WrappedChat\n}\n\nexport class WrappedChat extends OpenAIOrignal.Chat {\n constructor(parentClient: PostHogOpenAI, phClient: PostHog) {\n super(parentClient)\n this.completions = new WrappedCompletions(parentClient, phClient)\n }\n\n public completions: WrappedCompletions\n}\n\nexport class WrappedCompletions extends OpenAIOrignal.Chat.Completions {\n private readonly phClient: PostHog\n\n constructor(client: OpenAIOrignal, phClient: PostHog) {\n super(client)\n this.phClient = phClient\n }\n\n // --- Overload #1: Non-streaming\n public create(\n body: ChatCompletionCreateParamsNonStreaming & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<ChatCompletion>\n\n // --- Overload #2: Streaming\n public create(\n body: ChatCompletionCreateParamsStreaming & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<Stream<ChatCompletionChunk>>\n\n // --- Overload #3: Generic base\n public create(\n body: ChatCompletionCreateParamsBase & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>>\n\n // --- Implementation Signature\n public create(\n body: ChatCompletionCreateParamsBase & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>> {\n const {\n posthogDistinctId,\n posthogTraceId,\n posthogProperties,\n posthogPrivacyMode = false,\n posthogGroups,\n ...openAIParams\n } = body\n\n const traceId = posthogTraceId ?? uuidv4()\n const startTime = Date.now()\n\n const parentPromise = super.create(openAIParams, options)\n\n if (openAIParams.stream) {\n return parentPromise.then((value) => {\n const passThroughStream = new PassThrough({ objectMode: true })\n let accumulatedContent = ''\n let usage: { inputTokens: number; outputTokens: number } = {\n inputTokens: 0,\n outputTokens: 0,\n }\n if ('tee' in value) {\n const openAIStream = value\n ;(async () => {\n try {\n for await (const chunk of openAIStream) {\n const delta = chunk?.choices?.[0]?.delta?.content ?? ''\n accumulatedContent += delta\n if (chunk.usage) {\n usage = {\n inputTokens: chunk.usage.prompt_tokens ?? 0,\n outputTokens: chunk.usage.completion_tokens ?? 0,\n }\n }\n passThroughStream.write(chunk)\n }\n const latency = (Date.now() - startTime) / 1000\n sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId ?? traceId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: posthogPrivacyMode ? '' : mergeSystemPrompt(openAIParams, 'openai'),\n output: [{ content: accumulatedContent, role: 'assistant' }],\n latency,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 200,\n usage,\n })\n passThroughStream.end()\n } catch (error) {\n // error handling\n sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId ?? traceId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: posthogPrivacyMode ? '' : mergeSystemPrompt(openAIParams, 'openai'),\n output: [],\n latency: 0,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n })\n passThroughStream.emit('error', error)\n }\n })()\n }\n return passThroughStream as unknown as Stream<ChatCompletionChunk>\n }) as APIPromise<Stream<ChatCompletionChunk>>\n } else {\n const wrappedPromise = parentPromise.then(\n (result) => {\n if ('choices' in result) {\n const latency = (Date.now() - startTime) / 1000\n sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId ?? traceId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: posthogPrivacyMode ? '' : mergeSystemPrompt(openAIParams, 'openai'),\n output: [{ content: result.choices[0].message.content, role: 'assistant' }],\n latency,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 200,\n usage: {\n inputTokens: result.usage?.prompt_tokens ?? 0,\n outputTokens: result.usage?.completion_tokens ?? 0,\n },\n })\n }\n return result\n },\n (error) => {\n sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId ?? traceId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: posthogPrivacyMode ? '' : mergeSystemPrompt(openAIParams, 'openai'),\n output: [],\n latency: 0,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n })\n throw error\n }\n ) as APIPromise<ChatCompletion>\n\n return wrappedPromise\n }\n }\n}\n\nexport default PostHogOpenAI\n","import { experimental_wrapLanguageModel as wrapLanguageModel } from 'ai'\nimport type {\n LanguageModelV1,\n Experimental_LanguageModelV1Middleware as LanguageModelV1Middleware,\n LanguageModelV1Prompt,\n LanguageModelV1StreamPart,\n} from 'ai'\nimport { v4 as uuidv4 } from 'uuid'\nimport { PostHog } from 'posthog-node'\nimport { sendEventToPosthog } from '../utils'\n\ninterface CreateInstrumentationMiddlewareOptions {\n posthogDistinctId?: string\n posthogTraceId: string\n posthogProperties?: Record<string, any>\n posthogPrivacyMode?: boolean\n posthogGroups?: Record<string, any>\n}\n\ninterface PostHogInput {\n content: string\n role: string\n}\n\nconst mapVercelParams = (params: any): Record<string, any> => {\n return {\n temperature: params.temperature,\n max_tokens: params.maxTokens,\n top_p: params.topP,\n frequency_penalty: params.frequencyPenalty,\n presence_penalty: params.presencePenalty,\n stop: params.stopSequences,\n stream: params.stream,\n }\n}\n\nconst mapVercelPrompt = (prompt: LanguageModelV1Prompt): PostHogInput[] => {\n return prompt.map((p) => {\n let content = ''\n if (Array.isArray(p.content)) {\n content = p.content\n .map((c) => {\n if (c.type === 'text') {\n return c.text\n }\n return ''\n })\n .join('')\n } else {\n content = p.content\n }\n return {\n role: p.role,\n content,\n }\n })\n}\n\nexport const createInstrumentationMiddleware = (\n phClient: PostHog,\n model: LanguageModelV1,\n options: CreateInstrumentationMiddlewareOptions\n): LanguageModelV1Middleware => {\n const middleware: LanguageModelV1Middleware = {\n wrapGenerate: async ({ doGenerate, params }) => {\n const startTime = Date.now()\n let mergedParams = {\n ...options,\n ...mapVercelParams(params),\n }\n try {\n const result = await doGenerate()\n const latency = (Date.now() - startTime) / 1000\n\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: model.modelId,\n provider: 'vercel',\n input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),\n output: [{ content: result.text, role: 'assistant' }],\n latency,\n baseURL: '',\n params: mergedParams as any,\n httpStatus: 200,\n usage: {\n inputTokens: result.usage.promptTokens,\n outputTokens: result.usage.completionTokens,\n },\n })\n\n return result\n } catch (error) {\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: model.modelId,\n provider: 'vercel',\n input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),\n output: [],\n latency: 0,\n baseURL: '',\n params: mergedParams as any,\n httpStatus: 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n })\n throw error\n }\n },\n\n wrapStream: async ({ doStream, params }) => {\n const startTime = Date.now()\n let generatedText = ''\n let usage: { inputTokens?: number; outputTokens?: number } = {}\n let mergedParams = {\n ...options,\n ...mapVercelParams(params),\n }\n try {\n const { stream, ...rest } = await doStream()\n\n const transformStream = new TransformStream<LanguageModelV1StreamPart, LanguageModelV1StreamPart>({\n transform(chunk, controller) {\n if (chunk.type === 'text-delta') {\n generatedText += chunk.textDelta\n }\n if (chunk.type === 'finish') {\n usage = {\n inputTokens: chunk.usage?.promptTokens,\n outputTokens: chunk.usage?.completionTokens,\n }\n }\n controller.enqueue(chunk)\n },\n\n flush() {\n const latency = (Date.now() - startTime) / 1000\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: model.modelId,\n provider: 'vercel',\n input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),\n output: [{ content: generatedText, role: 'assistant' }],\n latency,\n baseURL: '',\n params: mergedParams as any,\n httpStatus: 200,\n usage,\n })\n },\n })\n\n return {\n stream: stream.pipeThrough(transformStream),\n ...rest,\n }\n } catch (error) {\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: model.modelId,\n provider: 'vercel',\n input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),\n output: [],\n latency: 0,\n baseURL: '',\n params: mergedParams as any,\n httpStatus: 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n })\n throw error\n }\n },\n }\n\n return middleware\n}\n\nexport const wrapVercelLanguageModel = (\n model: LanguageModelV1,\n phClient: PostHog,\n options: CreateInstrumentationMiddlewareOptions\n): LanguageModelV1 => {\n const traceId = options.posthogTraceId ?? uuidv4()\n const middleware = createInstrumentationMiddleware(phClient, model, {\n ...options,\n posthogTraceId: traceId,\n posthogDistinctId: options.posthogDistinctId ?? traceId,\n })\n\n const wrappedModel = wrapLanguageModel({\n model,\n middleware,\n })\n\n return wrappedModel\n}\n"],"names":["getModelParams","params","modelParams","paramKeys","key","undefined","mergeSystemPrompt","provider","messages","system","systemMessage","role","content","withPrivacyMode","client","privacyMode","input","privacy_mode","sendEventToPosthog","distinctId","traceId","model","output","latency","baseURL","httpStatus","usage","console","log","capture","event","properties","$ai_provider","$ai_model","$ai_model_parameters","$ai_input","posthogPrivacyMode","$ai_output_choices","$ai_http_status","$ai_input_tokens","inputTokens","$ai_output_tokens","outputTokens","$ai_latency","$ai_trace_id","$ai_base_url","posthogProperties","$process_person_profile","groups","posthogGroups","PostHogOpenAI","OpenAIOrignal","constructor","config","posthog","openAIConfig","phClient","chat","WrappedChat","Chat","parentClient","completions","WrappedCompletions","Completions","create","body","options","posthogDistinctId","posthogTraceId","openAIParams","uuidv4","startTime","Date","now","parentPromise","stream","then","value","passThroughStream","PassThrough","objectMode","accumulatedContent","openAIStream","chunk","delta","choices","prompt_tokens","completion_tokens","write","end","error","emit","wrappedPromise","result","message","mapVercelParams","temperature","max_tokens","maxTokens","top_p","topP","frequency_penalty","frequencyPenalty","presence_penalty","presencePenalty","stop","stopSequences","mapVercelPrompt","prompt","map","p","Array","isArray","c","type","text","join","createInstrumentationMiddleware","middleware","wrapGenerate","doGenerate","mergedParams","modelId","promptTokens","completionTokens","wrapStream","doStream","generatedText","rest","transformStream","TransformStream","transform","controller","textDelta","enqueue","flush","pipeThrough","wrapVercelLanguageModel","wrappedModel","wrapLanguageModel"],"mappings":";;;;;;;;;;;;;AAaO,MAAMA,cAAc,GAAIC,MAAyD,IAAyB;EAC/G,MAAMC,WAAW,GAAwB,EAAE,CAAA;EAC3C,MAAMC,SAAS,GAAG,CAChB,aAAa,EACb,YAAY,EACZ,uBAAuB,EACvB,OAAO,EACP,mBAAmB,EACnB,kBAAkB,EAClB,GAAG,EACH,MAAM,EACN,QAAQ,EACR,WAAW,CACH,CAAA;AAEV,EAAA,KAAK,MAAMC,GAAG,IAAID,SAAS,EAAE;IAC3B,IAAIC,GAAG,IAAIH,MAAM,IAAKA,MAAc,CAACG,GAAG,CAAC,KAAKC,SAAS,EAAE;AACvDH,MAAAA,WAAW,CAACE,GAAG,CAAC,GAAIH,MAAc,CAACG,GAAG,CAAC,CAAA;AACxC,KAAA;AACF,GAAA;AACD,EAAA,OAAOF,WAAW,CAAA;AACpB,CAAC,CAAA;AA4CM,MAAMI,iBAAiB,GAAGA,CAACL,MAAyD,EAAEM,QAAgB,KAAS;EACpH,IAAIA,QAAQ,KAAK,WAAW,EAAE;IAC5B,OAAON,MAAM,CAACO,QAAQ,CAAA;AACvB,GAAA;AACD,EAAA,MAAMA,QAAQ,GAAGP,MAAM,CAACO,QAAQ,IAAI,EAAE,CAAA;AACtC,EAAA,IAAI,CAAEP,MAAc,CAACQ,MAAM,EAAE;AAC3B,IAAA,OAAOD,QAAQ,CAAA;AAChB,GAAA;AACD,EAAA,MAAME,aAAa,GAAIT,MAAc,CAACQ,MAAM,CAAA;AAC5C,EAAA,OAAO,CAAC;AAAEE,IAAAA,IAAI,EAAE,QAAQ;AAAEC,IAAAA,OAAO,EAAEF,aAAAA;GAAe,EAAE,GAAGF,QAAQ,CAAC,CAAA;AAClE,CAAC,CAAA;AAEM,MAAMK,eAAe,GAAGA,CAACC,MAAe,EAAEC,WAAoB,EAAEC,KAAU,KAAS;EACxF,OAAQF,MAAc,CAACG,YAAY,IAAIF,WAAW,GAAG,IAAI,GAAGC,KAAK,CAAA;AACnE,CAAC,CAAA;AAiBM,MAAME,kBAAkB,GAAGA,CAAC;EACjCJ,MAAM;EACNK,UAAU;EACVC,OAAO;EACPC,KAAK;EACLd,QAAQ;EACRS,KAAK;EACLM,MAAM;EACNC,OAAO;EACPC,OAAO;EACPvB,MAAM;AACNwB,EAAAA,UAAU,GAAG,GAAG;AAChBC,EAAAA,KAAK,GAAG,EAAE;AAAA,CACe,KAAU;AACnCC,EAAAA,OAAO,CAACC,GAAG,CAAC,oBAAoB,EAAE;IAChCd,MAAM;IACNK,UAAU;IACVC,OAAO;IACPC,KAAK;IACLd,QAAQ;IACRS,KAAK;IACLM,MAAM;IACNC,OAAO;IACPC,OAAO;AACPvB,IAAAA,MAAAA;AACD,GAAA,CAAC,CAAA;EACF,IAAIa,MAAM,CAACe,OAAO,EAAE;IAClBf,MAAM,CAACe,OAAO,CAAC;MACbV,UAAU,EAAEA,UAAU,IAAIC,OAAO;AACjCU,MAAAA,KAAK,EAAE,gBAAgB;AACvBC,MAAAA,UAAU,EAAE;AACVC,QAAAA,YAAY,EAAEzB,QAAQ;AACtB0B,QAAAA,SAAS,EAAEZ,KAAK;AAChBa,QAAAA,oBAAoB,EAAElC,cAAc,CAACC,MAAM,CAAC;AAC5CkC,QAAAA,SAAS,EAAEtB,eAAe,CAACC,MAAM,EAAEb,MAAM,CAACmC,kBAAkB,IAAI,KAAK,EAAEpB,KAAK,CAAC;AAC7EqB,QAAAA,kBAAkB,EAAExB,eAAe,CAACC,MAAM,EAAEb,MAAM,CAACmC,kBAAkB,IAAI,KAAK,EAAEd,MAAM,CAAC;AACvFgB,QAAAA,eAAe,EAAEb,UAAU;AAC3Bc,QAAAA,gBAAgB,EAAEb,KAAK,CAACc,WAAW,IAAI,CAAC;AACxCC,QAAAA,iBAAiB,EAAEf,KAAK,CAACgB,YAAY,IAAI,CAAC;AAC1CC,QAAAA,WAAW,EAAEpB,OAAO;AACpBqB,QAAAA,YAAY,EAAExB,OAAO;AACrByB,QAAAA,YAAY,EAAErB,OAAO;QACrB,GAAGvB,MAAM,CAAC6C,iBAAiB;AAC3B,QAAA,IAAI3B,UAAU,GAAG,EAAE,GAAG;AAAE4B,UAAAA,uBAAuB,EAAE,KAAA;SAAO,CAAA;OACzD;MACDC,MAAM,EAAE/C,MAAM,CAACgD,aAAAA;AAChB,KAAA,CAAC,CAAA;AACH,GAAA;AACH,CAAC;;ACzIK,MAAOC,aAAc,SAAQC,iCAAa,CAAA;EAG9CC,WAAAA,CAAYC,MAA8B,EAAA;IACxC,MAAM;MAAEC,OAAO;MAAE,GAAGC,YAAAA;AAAc,KAAA,GAAGF,MAAM,CAAA;IAC3C,KAAK,CAACE,YAAY,CAAC,CAAA;IACnB,IAAI,CAACC,QAAQ,GAAGF,OAAO,CAAA;IACvB,IAAI,CAACG,IAAI,GAAG,IAAIC,WAAW,CAAC,IAAI,EAAE,IAAI,CAACF,QAAQ,CAAC,CAAA;AAClD,GAAA;AAGD,CAAA;AAEY,MAAAE,WAAY,SAAQP,iCAAa,CAACQ,IAAI,CAAA;AACjDP,EAAAA,WAAYA,CAAAQ,YAA2B,EAAEJ,QAAiB,EAAA;IACxD,KAAK,CAACI,YAAY,CAAC,CAAA;IACnB,IAAI,CAACC,WAAW,GAAG,IAAIC,kBAAkB,CAACF,YAAY,EAAEJ,QAAQ,CAAC,CAAA;AACnE,GAAA;AAGD,CAAA;MAEYM,kBAAmB,SAAQX,iCAAa,CAACQ,IAAI,CAACI,WAAW,CAAA;AAGpEX,EAAAA,WAAYA,CAAAtC,MAAqB,EAAE0C,QAAiB,EAAA;IAClD,KAAK,CAAC1C,MAAM,CAAC,CAAA;IACb,IAAI,CAAC0C,QAAQ,GAAGA,QAAQ,CAAA;AAC1B,GAAA;AAoBA;AACOQ,EAAAA,MAAMA,CACXC,IAAuD,EACvDC,OAAwB,EAAA;IAExB,MAAM;MACJC,iBAAiB;MACjBC,cAAc;MACdtB,iBAAiB;AACjBV,MAAAA,kBAAkB,GAAG,KAAK;MAC1Ba,aAAa;MACb,GAAGoB,YAAAA;AACJ,KAAA,GAAGJ,IAAI,CAAA;AAER,IAAA,MAAM7C,OAAO,GAAGgD,cAAc,IAAIE,OAAM,EAAE,CAAA;AAC1C,IAAA,MAAMC,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE,CAAA;IAE5B,MAAMC,aAAa,GAAG,KAAK,CAACV,MAAM,CAACK,YAAY,EAAEH,OAAO,CAAC,CAAA;IAEzD,IAAIG,YAAY,CAACM,MAAM,EAAE;AACvB,MAAA,OAAOD,aAAa,CAACE,IAAI,CAAEC,KAAK,IAAI;AAClC,QAAA,MAAMC,iBAAiB,GAAG,IAAIC,kBAAW,CAAC;AAAEC,UAAAA,UAAU,EAAE,IAAA;AAAM,SAAA,CAAC,CAAA;QAC/D,IAAIC,kBAAkB,GAAG,EAAE,CAAA;AAC3B,QAAA,IAAIvD,KAAK,GAAkD;AACzDc,UAAAA,WAAW,EAAE,CAAC;AACdE,UAAAA,YAAY,EAAE,CAAA;SACf,CAAA;QACD,IAAI,KAAK,IAAImC,KAAK,EAAE;UAClB,MAAMK,YAAY,GAAGL,KAAK,CAAA;AACzB,UAAA,CAAC,YAAW;YACX,IAAI;AACF,cAAA,WAAW,MAAMM,KAAK,IAAID,YAAY,EAAE;AACtC,gBAAA,MAAME,KAAK,GAAGD,KAAK,EAAEE,OAAO,GAAG,CAAC,CAAC,EAAED,KAAK,EAAExE,OAAO,IAAI,EAAE,CAAA;AACvDqE,gBAAAA,kBAAkB,IAAIG,KAAK,CAAA;gBAC3B,IAAID,KAAK,CAACzD,KAAK,EAAE;AACfA,kBAAAA,KAAK,GAAG;AACNc,oBAAAA,WAAW,EAAE2C,KAAK,CAACzD,KAAK,CAAC4D,aAAa,IAAI,CAAC;AAC3C5C,oBAAAA,YAAY,EAAEyC,KAAK,CAACzD,KAAK,CAAC6D,iBAAiB,IAAI,CAAA;mBAChD,CAAA;AACF,iBAAA;AACDT,gBAAAA,iBAAiB,CAACU,KAAK,CAACL,KAAK,CAAC,CAAA;AAC/B,eAAA;cACD,MAAM5D,OAAO,GAAG,CAACiD,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/CrD,cAAAA,kBAAkB,CAAC;gBACjBJ,MAAM,EAAE,IAAI,CAAC0C,QAAQ;gBACrBrC,UAAU,EAAEgD,iBAAiB,IAAI/C,OAAO;gBACxCA,OAAO;gBACPC,KAAK,EAAEgD,YAAY,CAAChD,KAAK;AACzBd,gBAAAA,QAAQ,EAAE,QAAQ;gBAClBS,KAAK,EAAEoB,kBAAkB,GAAG,EAAE,GAAG9B,iBAAiB,CAAC+D,YAAY,EAAE,QAAQ,CAAC;AAC1E/C,gBAAAA,MAAM,EAAE,CAAC;AAAEV,kBAAAA,OAAO,EAAEqE,kBAAkB;AAAEtE,kBAAAA,IAAI,EAAE,WAAA;AAAW,iBAAE,CAAC;gBAC5DY,OAAO;AACPC,gBAAAA,OAAO,EAAG,IAAY,CAACA,OAAO,IAAI,EAAE;AACpCvB,gBAAAA,MAAM,EAAEgE,IAAI;AACZxC,gBAAAA,UAAU,EAAE,GAAG;AACfC,gBAAAA,KAAAA;AACD,eAAA,CAAC,CAAA;cACFoD,iBAAiB,CAACW,GAAG,EAAE,CAAA;aACxB,CAAC,OAAOC,KAAK,EAAE;AACd;AACAxE,cAAAA,kBAAkB,CAAC;gBACjBJ,MAAM,EAAE,IAAI,CAAC0C,QAAQ;gBACrBrC,UAAU,EAAEgD,iBAAiB,IAAI/C,OAAO;gBACxCA,OAAO;gBACPC,KAAK,EAAEgD,YAAY,CAAChD,KAAK;AACzBd,gBAAAA,QAAQ,EAAE,QAAQ;gBAClBS,KAAK,EAAEoB,kBAAkB,GAAG,EAAE,GAAG9B,iBAAiB,CAAC+D,YAAY,EAAE,QAAQ,CAAC;AAC1E/C,gBAAAA,MAAM,EAAE,EAAE;AACVC,gBAAAA,OAAO,EAAE,CAAC;AACVC,gBAAAA,OAAO,EAAG,IAAY,CAACA,OAAO,IAAI,EAAE;AACpCvB,gBAAAA,MAAM,EAAEgE,IAAI;AACZxC,gBAAAA,UAAU,EAAE,GAAG;AACfC,gBAAAA,KAAK,EAAE;AACLc,kBAAAA,WAAW,EAAE,CAAC;AACdE,kBAAAA,YAAY,EAAE,CAAA;AACf,iBAAA;AACF,eAAA,CAAC,CAAA;AACFoC,cAAAA,iBAAiB,CAACa,IAAI,CAAC,OAAO,EAAED,KAAK,CAAC,CAAA;AACvC,aAAA;AACH,WAAC,GAAG,CAAA;AACL,SAAA;AACD,QAAA,OAAOZ,iBAA2D,CAAA;AACpE,OAAC,CAA4C,CAAA;AAC9C,KAAA,MAAM;AACL,MAAA,MAAMc,cAAc,GAAGlB,aAAa,CAACE,IAAI,CACtCiB,MAAM,IAAI;QACT,IAAI,SAAS,IAAIA,MAAM,EAAE;UACvB,MAAMtE,OAAO,GAAG,CAACiD,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/CrD,UAAAA,kBAAkB,CAAC;YACjBJ,MAAM,EAAE,IAAI,CAAC0C,QAAQ;YACrBrC,UAAU,EAAEgD,iBAAiB,IAAI/C,OAAO;YACxCA,OAAO;YACPC,KAAK,EAAEgD,YAAY,CAAChD,KAAK;AACzBd,YAAAA,QAAQ,EAAE,QAAQ;YAClBS,KAAK,EAAEoB,kBAAkB,GAAG,EAAE,GAAG9B,iBAAiB,CAAC+D,YAAY,EAAE,QAAQ,CAAC;AAC1E/C,YAAAA,MAAM,EAAE,CAAC;cAAEV,OAAO,EAAEiF,MAAM,CAACR,OAAO,CAAC,CAAC,CAAC,CAACS,OAAO,CAAClF,OAAO;AAAED,cAAAA,IAAI,EAAE,WAAA;aAAa,CAAC;YAC3EY,OAAO;AACPC,YAAAA,OAAO,EAAG,IAAY,CAACA,OAAO,IAAI,EAAE;AACpCvB,YAAAA,MAAM,EAAEgE,IAAI;AACZxC,YAAAA,UAAU,EAAE,GAAG;AACfC,YAAAA,KAAK,EAAE;AACLc,cAAAA,WAAW,EAAEqD,MAAM,CAACnE,KAAK,EAAE4D,aAAa,IAAI,CAAC;AAC7C5C,cAAAA,YAAY,EAAEmD,MAAM,CAACnE,KAAK,EAAE6D,iBAAiB,IAAI,CAAA;AAClD,aAAA;AACF,WAAA,CAAC,CAAA;AACH,SAAA;AACD,QAAA,OAAOM,MAAM,CAAA;OACd,EACAH,KAAK,IAAI;AACRxE,QAAAA,kBAAkB,CAAC;UACjBJ,MAAM,EAAE,IAAI,CAAC0C,QAAQ;UACrBrC,UAAU,EAAEgD,iBAAiB,IAAI/C,OAAO;UACxCA,OAAO;UACPC,KAAK,EAAEgD,YAAY,CAAChD,KAAK;AACzBd,UAAAA,QAAQ,EAAE,QAAQ;UAClBS,KAAK,EAAEoB,kBAAkB,GAAG,EAAE,GAAG9B,iBAAiB,CAAC+D,YAAY,EAAE,QAAQ,CAAC;AAC1E/C,UAAAA,MAAM,EAAE,EAAE;AACVC,UAAAA,OAAO,EAAE,CAAC;AACVC,UAAAA,OAAO,EAAG,IAAY,CAACA,OAAO,IAAI,EAAE;AACpCvB,UAAAA,MAAM,EAAEgE,IAAI;AACZxC,UAAAA,UAAU,EAAE,GAAG;AACfC,UAAAA,KAAK,EAAE;AACLc,YAAAA,WAAW,EAAE,CAAC;AACdE,YAAAA,YAAY,EAAE,CAAA;AACf,WAAA;AACF,SAAA,CAAC,CAAA;AACF,QAAA,MAAMgD,KAAK,CAAA;AACb,OAAC,CAC4B,CAAA;AAE/B,MAAA,OAAOE,cAAc,CAAA;AACtB,KAAA;AACH,GAAA;AACD;;ACjLD,MAAMG,eAAe,GAAI9F,MAAW,IAAyB;EAC3D,OAAO;IACL+F,WAAW,EAAE/F,MAAM,CAAC+F,WAAW;IAC/BC,UAAU,EAAEhG,MAAM,CAACiG,SAAS;IAC5BC,KAAK,EAAElG,MAAM,CAACmG,IAAI;IAClBC,iBAAiB,EAAEpG,MAAM,CAACqG,gBAAgB;IAC1CC,gBAAgB,EAAEtG,MAAM,CAACuG,eAAe;IACxCC,IAAI,EAAExG,MAAM,CAACyG,aAAa;IAC1B/B,MAAM,EAAE1E,MAAM,CAAC0E,MAAAA;GAChB,CAAA;AACH,CAAC,CAAA;AAED,MAAMgC,eAAe,GAAIC,MAA6B,IAAoB;AACxE,EAAA,OAAOA,MAAM,CAACC,GAAG,CAAEC,CAAC,IAAI;IACtB,IAAIlG,OAAO,GAAG,EAAE,CAAA;IAChB,IAAImG,KAAK,CAACC,OAAO,CAACF,CAAC,CAAClG,OAAO,CAAC,EAAE;MAC5BA,OAAO,GAAGkG,CAAC,CAAClG,OAAO,CAChBiG,GAAG,CAAEI,CAAC,IAAI;AACT,QAAA,IAAIA,CAAC,CAACC,IAAI,KAAK,MAAM,EAAE;UACrB,OAAOD,CAAC,CAACE,IAAI,CAAA;AACd,SAAA;AACD,QAAA,OAAO,EAAE,CAAA;AACX,OAAC,CAAC,CACDC,IAAI,CAAC,EAAE,CAAC,CAAA;AACZ,KAAA,MAAM;MACLxG,OAAO,GAAGkG,CAAC,CAAClG,OAAO,CAAA;AACpB,KAAA;IACD,OAAO;MACLD,IAAI,EAAEmG,CAAC,CAACnG,IAAI;AACZC,MAAAA,OAAAA;KACD,CAAA;AACH,GAAC,CAAC,CAAA;AACJ,CAAC,CAAA;AAEM,MAAMyG,+BAA+B,GAAGA,CAC7C7D,QAAiB,EACjBnC,KAAsB,EACtB6C,OAA+C,KAClB;AAC7B,EAAA,MAAMoD,UAAU,GAA8B;IAC5CC,YAAY,EAAE,OAAO;MAAEC,UAAU;AAAEvH,MAAAA,MAAAA;AAAQ,KAAA,KAAI;AAC7C,MAAA,MAAMsE,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE,CAAA;AAC5B,MAAA,IAAIgD,YAAY,GAAG;AACjB,QAAA,GAAGvD,OAAO;QACV,GAAG6B,eAAe,CAAC9F,MAAM,CAAA;OAC1B,CAAA;MACD,IAAI;AACF,QAAA,MAAM4F,MAAM,GAAG,MAAM2B,UAAU,EAAE,CAAA;QACjC,MAAMjG,OAAO,GAAG,CAACiD,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAE/CrD,QAAAA,kBAAkB,CAAC;AACjBJ,UAAAA,MAAM,EAAE0C,QAAQ;UAChBrC,UAAU,EAAE+C,OAAO,CAACC,iBAAiB;UACrC/C,OAAO,EAAE8C,OAAO,CAACE,cAAc;UAC/B/C,KAAK,EAAEA,KAAK,CAACqG,OAAO;AACpBnH,UAAAA,QAAQ,EAAE,QAAQ;AAClBS,UAAAA,KAAK,EAAEkD,OAAO,CAAC9B,kBAAkB,GAAG,EAAE,GAAGuE,eAAe,CAAC1G,MAAM,CAAC2G,MAAM,CAAC;AACvEtF,UAAAA,MAAM,EAAE,CAAC;YAAEV,OAAO,EAAEiF,MAAM,CAACsB,IAAI;AAAExG,YAAAA,IAAI,EAAE,WAAA;WAAa,CAAC;UACrDY,OAAO;AACPC,UAAAA,OAAO,EAAE,EAAE;AACXvB,UAAAA,MAAM,EAAEwH,YAAmB;AAC3BhG,UAAAA,UAAU,EAAE,GAAG;AACfC,UAAAA,KAAK,EAAE;AACLc,YAAAA,WAAW,EAAEqD,MAAM,CAACnE,KAAK,CAACiG,YAAY;AACtCjF,YAAAA,YAAY,EAAEmD,MAAM,CAACnE,KAAK,CAACkG,gBAAAA;AAC5B,WAAA;AACF,SAAA,CAAC,CAAA;AAEF,QAAA,OAAO/B,MAAM,CAAA;OACd,CAAC,OAAOH,KAAK,EAAE;AACdxE,QAAAA,kBAAkB,CAAC;AACjBJ,UAAAA,MAAM,EAAE0C,QAAQ;UAChBrC,UAAU,EAAE+C,OAAO,CAACC,iBAAiB;UACrC/C,OAAO,EAAE8C,OAAO,CAACE,cAAc;UAC/B/C,KAAK,EAAEA,KAAK,CAACqG,OAAO;AACpBnH,UAAAA,QAAQ,EAAE,QAAQ;AAClBS,UAAAA,KAAK,EAAEkD,OAAO,CAAC9B,kBAAkB,GAAG,EAAE,GAAGuE,eAAe,CAAC1G,MAAM,CAAC2G,MAAM,CAAC;AACvEtF,UAAAA,MAAM,EAAE,EAAE;AACVC,UAAAA,OAAO,EAAE,CAAC;AACVC,UAAAA,OAAO,EAAE,EAAE;AACXvB,UAAAA,MAAM,EAAEwH,YAAmB;AAC3BhG,UAAAA,UAAU,EAAE,GAAG;AACfC,UAAAA,KAAK,EAAE;AACLc,YAAAA,WAAW,EAAE,CAAC;AACdE,YAAAA,YAAY,EAAE,CAAA;AACf,WAAA;AACF,SAAA,CAAC,CAAA;AACF,QAAA,MAAMgD,KAAK,CAAA;AACZ,OAAA;KACF;IAEDmC,UAAU,EAAE,OAAO;MAAEC,QAAQ;AAAE7H,MAAAA,MAAAA;AAAQ,KAAA,KAAI;AACzC,MAAA,MAAMsE,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE,CAAA;MAC5B,IAAIsD,aAAa,GAAG,EAAE,CAAA;MACtB,IAAIrG,KAAK,GAAoD,EAAE,CAAA;AAC/D,MAAA,IAAI+F,YAAY,GAAG;AACjB,QAAA,GAAGvD,OAAO;QACV,GAAG6B,eAAe,CAAC9F,MAAM,CAAA;OAC1B,CAAA;MACD,IAAI;QACF,MAAM;UAAE0E,MAAM;UAAE,GAAGqD,IAAAA;SAAM,GAAG,MAAMF,QAAQ,EAAE,CAAA;AAE5C,QAAA,MAAMG,eAAe,GAAG,IAAIC,eAAe,CAAuD;AAChGC,UAAAA,SAASA,CAAChD,KAAK,EAAEiD,UAAU,EAAA;AACzB,YAAA,IAAIjD,KAAK,CAAC+B,IAAI,KAAK,YAAY,EAAE;cAC/Ba,aAAa,IAAI5C,KAAK,CAACkD,SAAS,CAAA;AACjC,aAAA;AACD,YAAA,IAAIlD,KAAK,CAAC+B,IAAI,KAAK,QAAQ,EAAE;AAC3BxF,cAAAA,KAAK,GAAG;AACNc,gBAAAA,WAAW,EAAE2C,KAAK,CAACzD,KAAK,EAAEiG,YAAY;AACtCjF,gBAAAA,YAAY,EAAEyC,KAAK,CAACzD,KAAK,EAAEkG,gBAAAA;eAC5B,CAAA;AACF,aAAA;AACDQ,YAAAA,UAAU,CAACE,OAAO,CAACnD,KAAK,CAAC,CAAA;WAC1B;AAEDoD,UAAAA,KAAKA,GAAA;YACH,MAAMhH,OAAO,GAAG,CAACiD,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/CrD,YAAAA,kBAAkB,CAAC;AACjBJ,cAAAA,MAAM,EAAE0C,QAAQ;cAChBrC,UAAU,EAAE+C,OAAO,CAACC,iBAAiB;cACrC/C,OAAO,EAAE8C,OAAO,CAACE,cAAc;cAC/B/C,KAAK,EAAEA,KAAK,CAACqG,OAAO;AACpBnH,cAAAA,QAAQ,EAAE,QAAQ;AAClBS,cAAAA,KAAK,EAAEkD,OAAO,CAAC9B,kBAAkB,GAAG,EAAE,GAAGuE,eAAe,CAAC1G,MAAM,CAAC2G,MAAM,CAAC;AACvEtF,cAAAA,MAAM,EAAE,CAAC;AAAEV,gBAAAA,OAAO,EAAEmH,aAAa;AAAEpH,gBAAAA,IAAI,EAAE,WAAA;AAAW,eAAE,CAAC;cACvDY,OAAO;AACPC,cAAAA,OAAO,EAAE,EAAE;AACXvB,cAAAA,MAAM,EAAEwH,YAAmB;AAC3BhG,cAAAA,UAAU,EAAE,GAAG;AACfC,cAAAA,KAAAA;AACD,aAAA,CAAC,CAAA;AACJ,WAAA;AACD,SAAA,CAAC,CAAA;QAEF,OAAO;AACLiD,UAAAA,MAAM,EAAEA,MAAM,CAAC6D,WAAW,CAACP,eAAe,CAAC;UAC3C,GAAGD,IAAAA;SACJ,CAAA;OACF,CAAC,OAAOtC,KAAK,EAAE;AACdxE,QAAAA,kBAAkB,CAAC;AACjBJ,UAAAA,MAAM,EAAE0C,QAAQ;UAChBrC,UAAU,EAAE+C,OAAO,CAACC,iBAAiB;UACrC/C,OAAO,EAAE8C,OAAO,CAACE,cAAc;UAC/B/C,KAAK,EAAEA,KAAK,CAACqG,OAAO;AACpBnH,UAAAA,QAAQ,EAAE,QAAQ;AAClBS,UAAAA,KAAK,EAAEkD,OAAO,CAAC9B,kBAAkB,GAAG,EAAE,GAAGuE,eAAe,CAAC1G,MAAM,CAAC2G,MAAM,CAAC;AACvEtF,UAAAA,MAAM,EAAE,EAAE;AACVC,UAAAA,OAAO,EAAE,CAAC;AACVC,UAAAA,OAAO,EAAE,EAAE;AACXvB,UAAAA,MAAM,EAAEwH,YAAmB;AAC3BhG,UAAAA,UAAU,EAAE,GAAG;AACfC,UAAAA,KAAK,EAAE;AACLc,YAAAA,WAAW,EAAE,CAAC;AACdE,YAAAA,YAAY,EAAE,CAAA;AACf,WAAA;AACF,SAAA,CAAC,CAAA;AACF,QAAA,MAAMgD,KAAK,CAAA;AACZ,OAAA;AACH,KAAA;GACD,CAAA;AAED,EAAA,OAAO4B,UAAU,CAAA;AACnB,CAAC,CAAA;AAEM,MAAMmB,uBAAuB,GAAGA,CACrCpH,KAAsB,EACtBmC,QAAiB,EACjBU,OAA+C,KAC5B;EACnB,MAAM9C,OAAO,GAAG8C,OAAO,CAACE,cAAc,IAAIE,OAAM,EAAE,CAAA;AAClD,EAAA,MAAMgD,UAAU,GAAGD,+BAA+B,CAAC7D,QAAQ,EAAEnC,KAAK,EAAE;AAClE,IAAA,GAAG6C,OAAO;AACVE,IAAAA,cAAc,EAAEhD,OAAO;AACvB+C,IAAAA,iBAAiB,EAAED,OAAO,CAACC,iBAAiB,IAAI/C,OAAAA;AACjD,GAAA,CAAC,CAAA;EAEF,MAAMsH,YAAY,GAAGC,iCAAiB,CAAC;IACrCtH,KAAK;AACLiG,IAAAA,UAAAA;AACD,GAAA,CAAC,CAAA;AAEF,EAAA,OAAOoB,YAAY,CAAA;AACrB;;;;;"}
|
package/lib/index.d.ts
CHANGED
|
@@ -45,7 +45,7 @@ interface CreateInstrumentationMiddlewareOptions {
|
|
|
45
45
|
posthogTraceId: string;
|
|
46
46
|
posthogProperties?: Record<string, any>;
|
|
47
47
|
posthogPrivacyMode?: boolean;
|
|
48
|
-
posthogGroups?: string
|
|
48
|
+
posthogGroups?: Record<string, any>;
|
|
49
49
|
}
|
|
50
50
|
declare const wrapVercelLanguageModel: (model: LanguageModelV1, phClient: PostHog, options: CreateInstrumentationMiddlewareOptions) => LanguageModelV1;
|
|
51
51
|
|
package/lib/index.esm.js
CHANGED
|
@@ -44,6 +44,18 @@ const sendEventToPosthog = ({
|
|
|
44
44
|
httpStatus = 200,
|
|
45
45
|
usage = {}
|
|
46
46
|
}) => {
|
|
47
|
+
console.log('sendEventToPosthog', {
|
|
48
|
+
client,
|
|
49
|
+
distinctId,
|
|
50
|
+
traceId,
|
|
51
|
+
model,
|
|
52
|
+
provider,
|
|
53
|
+
input,
|
|
54
|
+
output,
|
|
55
|
+
latency,
|
|
56
|
+
baseURL,
|
|
57
|
+
params
|
|
58
|
+
});
|
|
47
59
|
if (client.capture) {
|
|
48
60
|
client.capture({
|
|
49
61
|
distinctId: distinctId ?? traceId,
|
|
@@ -55,8 +67,8 @@ const sendEventToPosthog = ({
|
|
|
55
67
|
$ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, input),
|
|
56
68
|
$ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, output),
|
|
57
69
|
$ai_http_status: httpStatus,
|
|
58
|
-
$ai_input_tokens: usage.
|
|
59
|
-
$ai_output_tokens: usage.
|
|
70
|
+
$ai_input_tokens: usage.inputTokens ?? 0,
|
|
71
|
+
$ai_output_tokens: usage.outputTokens ?? 0,
|
|
60
72
|
$ai_latency: latency,
|
|
61
73
|
$ai_trace_id: traceId,
|
|
62
74
|
$ai_base_url: baseURL,
|
|
@@ -112,8 +124,8 @@ class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
|
112
124
|
});
|
|
113
125
|
let accumulatedContent = '';
|
|
114
126
|
let usage = {
|
|
115
|
-
|
|
116
|
-
|
|
127
|
+
inputTokens: 0,
|
|
128
|
+
outputTokens: 0
|
|
117
129
|
};
|
|
118
130
|
if ('tee' in value) {
|
|
119
131
|
const openAIStream = value;
|
|
@@ -124,8 +136,8 @@ class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
|
124
136
|
accumulatedContent += delta;
|
|
125
137
|
if (chunk.usage) {
|
|
126
138
|
usage = {
|
|
127
|
-
|
|
128
|
-
|
|
139
|
+
inputTokens: chunk.usage.prompt_tokens ?? 0,
|
|
140
|
+
outputTokens: chunk.usage.completion_tokens ?? 0
|
|
129
141
|
};
|
|
130
142
|
}
|
|
131
143
|
passThroughStream.write(chunk);
|
|
@@ -164,8 +176,8 @@ class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
|
164
176
|
params: body,
|
|
165
177
|
httpStatus: 500,
|
|
166
178
|
usage: {
|
|
167
|
-
|
|
168
|
-
|
|
179
|
+
inputTokens: 0,
|
|
180
|
+
outputTokens: 0
|
|
169
181
|
}
|
|
170
182
|
});
|
|
171
183
|
passThroughStream.emit('error', error);
|
|
@@ -194,8 +206,8 @@ class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
|
194
206
|
params: body,
|
|
195
207
|
httpStatus: 200,
|
|
196
208
|
usage: {
|
|
197
|
-
|
|
198
|
-
|
|
209
|
+
inputTokens: result.usage?.prompt_tokens ?? 0,
|
|
210
|
+
outputTokens: result.usage?.completion_tokens ?? 0
|
|
199
211
|
}
|
|
200
212
|
});
|
|
201
213
|
}
|
|
@@ -214,8 +226,8 @@ class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
|
214
226
|
params: body,
|
|
215
227
|
httpStatus: 500,
|
|
216
228
|
usage: {
|
|
217
|
-
|
|
218
|
-
|
|
229
|
+
inputTokens: 0,
|
|
230
|
+
outputTokens: 0
|
|
219
231
|
}
|
|
220
232
|
});
|
|
221
233
|
throw error;
|
|
@@ -225,6 +237,36 @@ class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
|
225
237
|
}
|
|
226
238
|
}
|
|
227
239
|
|
|
240
|
+
const mapVercelParams = params => {
|
|
241
|
+
return {
|
|
242
|
+
temperature: params.temperature,
|
|
243
|
+
max_tokens: params.maxTokens,
|
|
244
|
+
top_p: params.topP,
|
|
245
|
+
frequency_penalty: params.frequencyPenalty,
|
|
246
|
+
presence_penalty: params.presencePenalty,
|
|
247
|
+
stop: params.stopSequences,
|
|
248
|
+
stream: params.stream
|
|
249
|
+
};
|
|
250
|
+
};
|
|
251
|
+
const mapVercelPrompt = prompt => {
|
|
252
|
+
return prompt.map(p => {
|
|
253
|
+
let content = '';
|
|
254
|
+
if (Array.isArray(p.content)) {
|
|
255
|
+
content = p.content.map(c => {
|
|
256
|
+
if (c.type === 'text') {
|
|
257
|
+
return c.text;
|
|
258
|
+
}
|
|
259
|
+
return '';
|
|
260
|
+
}).join('');
|
|
261
|
+
} else {
|
|
262
|
+
content = p.content;
|
|
263
|
+
}
|
|
264
|
+
return {
|
|
265
|
+
role: p.role,
|
|
266
|
+
content
|
|
267
|
+
};
|
|
268
|
+
});
|
|
269
|
+
};
|
|
228
270
|
const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
229
271
|
const middleware = {
|
|
230
272
|
wrapGenerate: async ({
|
|
@@ -232,6 +274,10 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
232
274
|
params
|
|
233
275
|
}) => {
|
|
234
276
|
const startTime = Date.now();
|
|
277
|
+
let mergedParams = {
|
|
278
|
+
...options,
|
|
279
|
+
...mapVercelParams(params)
|
|
280
|
+
};
|
|
235
281
|
try {
|
|
236
282
|
const result = await doGenerate();
|
|
237
283
|
const latency = (Date.now() - startTime) / 1000;
|
|
@@ -241,20 +287,18 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
241
287
|
traceId: options.posthogTraceId,
|
|
242
288
|
model: model.modelId,
|
|
243
289
|
provider: 'vercel',
|
|
244
|
-
input: options.posthogPrivacyMode ? '' : params.prompt,
|
|
290
|
+
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
245
291
|
output: [{
|
|
246
292
|
content: result.text,
|
|
247
293
|
role: 'assistant'
|
|
248
294
|
}],
|
|
249
295
|
latency,
|
|
250
296
|
baseURL: '',
|
|
251
|
-
params:
|
|
252
|
-
posthog_properties: options
|
|
253
|
-
},
|
|
297
|
+
params: mergedParams,
|
|
254
298
|
httpStatus: 200,
|
|
255
299
|
usage: {
|
|
256
|
-
|
|
257
|
-
|
|
300
|
+
inputTokens: result.usage.promptTokens,
|
|
301
|
+
outputTokens: result.usage.completionTokens
|
|
258
302
|
}
|
|
259
303
|
});
|
|
260
304
|
return result;
|
|
@@ -265,17 +309,15 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
265
309
|
traceId: options.posthogTraceId,
|
|
266
310
|
model: model.modelId,
|
|
267
311
|
provider: 'vercel',
|
|
268
|
-
input: options.posthogPrivacyMode ? '' : params.prompt,
|
|
312
|
+
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
269
313
|
output: [],
|
|
270
314
|
latency: 0,
|
|
271
315
|
baseURL: '',
|
|
272
|
-
params:
|
|
273
|
-
posthog_properties: options
|
|
274
|
-
},
|
|
316
|
+
params: mergedParams,
|
|
275
317
|
httpStatus: 500,
|
|
276
318
|
usage: {
|
|
277
|
-
|
|
278
|
-
|
|
319
|
+
inputTokens: 0,
|
|
320
|
+
outputTokens: 0
|
|
279
321
|
}
|
|
280
322
|
});
|
|
281
323
|
throw error;
|
|
@@ -288,6 +330,10 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
288
330
|
const startTime = Date.now();
|
|
289
331
|
let generatedText = '';
|
|
290
332
|
let usage = {};
|
|
333
|
+
let mergedParams = {
|
|
334
|
+
...options,
|
|
335
|
+
...mapVercelParams(params)
|
|
336
|
+
};
|
|
291
337
|
try {
|
|
292
338
|
const {
|
|
293
339
|
stream,
|
|
@@ -300,8 +346,8 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
300
346
|
}
|
|
301
347
|
if (chunk.type === 'finish') {
|
|
302
348
|
usage = {
|
|
303
|
-
|
|
304
|
-
|
|
349
|
+
inputTokens: chunk.usage?.promptTokens,
|
|
350
|
+
outputTokens: chunk.usage?.completionTokens
|
|
305
351
|
};
|
|
306
352
|
}
|
|
307
353
|
controller.enqueue(chunk);
|
|
@@ -314,16 +360,14 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
314
360
|
traceId: options.posthogTraceId,
|
|
315
361
|
model: model.modelId,
|
|
316
362
|
provider: 'vercel',
|
|
317
|
-
input: options.posthogPrivacyMode ? '' : params.prompt,
|
|
363
|
+
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
318
364
|
output: [{
|
|
319
365
|
content: generatedText,
|
|
320
366
|
role: 'assistant'
|
|
321
367
|
}],
|
|
322
368
|
latency,
|
|
323
369
|
baseURL: '',
|
|
324
|
-
params:
|
|
325
|
-
posthog_properties: options
|
|
326
|
-
},
|
|
370
|
+
params: mergedParams,
|
|
327
371
|
httpStatus: 200,
|
|
328
372
|
usage
|
|
329
373
|
});
|
|
@@ -340,17 +384,15 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
340
384
|
traceId: options.posthogTraceId,
|
|
341
385
|
model: model.modelId,
|
|
342
386
|
provider: 'vercel',
|
|
343
|
-
input: options.posthogPrivacyMode ? '' : params.prompt,
|
|
387
|
+
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
344
388
|
output: [],
|
|
345
389
|
latency: 0,
|
|
346
390
|
baseURL: '',
|
|
347
|
-
params:
|
|
348
|
-
posthog_properties: options
|
|
349
|
-
},
|
|
391
|
+
params: mergedParams,
|
|
350
392
|
httpStatus: 500,
|
|
351
393
|
usage: {
|
|
352
|
-
|
|
353
|
-
|
|
394
|
+
inputTokens: 0,
|
|
395
|
+
outputTokens: 0
|
|
354
396
|
}
|
|
355
397
|
});
|
|
356
398
|
throw error;
|
package/lib/index.esm.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.esm.js","sources":["../src/utils.ts","../src/openai/index.ts","../src/vercel/middleware.ts"],"sourcesContent":["import { PostHog } from 'posthog-node'\nimport OpenAIOrignal from 'openai'\n\ntype ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams\n\nexport interface MonitoringParams {\n posthogDistinctId?: string\n posthogTraceId?: string\n posthogProperties?: Record<string, any>\n posthogPrivacyMode?: boolean\n posthogGroups?: Record<string, any>\n}\n\nexport const getModelParams = (params: ChatCompletionCreateParamsBase & MonitoringParams): Record<string, any> => {\n const modelParams: Record<string, any> = {}\n const paramKeys = [\n 'temperature',\n 'max_tokens',\n 'max_completion_tokens',\n 'top_p',\n 'frequency_penalty',\n 'presence_penalty',\n 'n',\n 'stop',\n 'stream',\n 'streaming',\n ] as const\n\n for (const key of paramKeys) {\n if (key in params && (params as any)[key] !== undefined) {\n modelParams[key] = (params as any)[key]\n }\n }\n return modelParams\n}\n\nexport const getUsage = (response: any, provider: string): { input_tokens: number; output_tokens: number } => {\n if (!response?.usage) {\n return { input_tokens: 0, output_tokens: 0 }\n }\n\n if (provider === 'anthropic') {\n return {\n input_tokens: response.usage.input_tokens ?? 0,\n output_tokens: response.usage.output_tokens ?? 0,\n }\n } else if (provider === 'openai') {\n return {\n input_tokens: response.usage.prompt_tokens ?? 0,\n output_tokens: response.usage.completion_tokens ?? 0,\n }\n }\n\n return { input_tokens: 0, output_tokens: 0 }\n}\n\n/**\n * Helper to format responses (non-streaming) for consumption, mirroring Python's openai vs. anthropic approach.\n */\nexport const formatResponse = (response: any, provider: string): Array<{ role: string; content: string }> => {\n if (!response) {\n return []\n }\n if (provider === 'anthropic') {\n return formatResponseAnthropic(response)\n } else if (provider === 'openai') {\n return formatResponseOpenAI(response)\n }\n return []\n}\n\nexport const formatResponseAnthropic = (response: any): Array<{ role: string; content: string }> => {\n // Example approach if \"response.content\" holds array of text segments, etc.\n const output: Array<{ role: string; content: string }> = []\n for (const choice of response.content ?? []) {\n if (choice?.text) {\n output.push({\n role: 'assistant',\n content: choice.text,\n })\n }\n }\n return output\n}\n\nexport const formatResponseOpenAI = (response: any): Array<{ role: string; content: string }> => {\n const output: Array<{ role: string; content: string }> = []\n for (const choice of response.choices ?? []) {\n if (choice.message?.content) {\n output.push({\n role: choice.message.role,\n content: choice.message.content,\n })\n }\n }\n return output\n}\n\nexport const mergeSystemPrompt = (params: ChatCompletionCreateParamsBase & MonitoringParams, provider: string): any => {\n if (provider !== 'anthropic') {\n return params.messages\n }\n const messages = params.messages || []\n if (!(params as any).system) {\n return messages\n }\n const systemMessage = (params as any).system\n return [{ role: 'system', content: systemMessage }, ...messages]\n}\n\nexport const withPrivacyMode = (client: PostHog, privacyMode: boolean, input: any): any => {\n return (client as any).privacy_mode || privacyMode ? null : input\n}\n\nexport type SendEventToPosthogParams = {\n client: PostHog\n distinctId?: string\n traceId: string\n model: string\n provider: string\n input: any\n output: any\n latency: number\n baseURL: string\n httpStatus: number\n usage?: { input_tokens?: number; output_tokens?: number }\n params: ChatCompletionCreateParamsBase & MonitoringParams\n}\n\nexport const sendEventToPosthog = ({\n client,\n distinctId,\n traceId,\n model,\n provider,\n input,\n output,\n latency,\n baseURL,\n params,\n httpStatus = 200,\n usage = {},\n}: SendEventToPosthogParams): void => {\n if (client.capture) {\n client.capture({\n distinctId: distinctId ?? traceId,\n event: '$ai_generation',\n properties: {\n $ai_provider: provider,\n $ai_model: model,\n $ai_model_parameters: getModelParams(params),\n $ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, input),\n $ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, output),\n $ai_http_status: httpStatus,\n $ai_input_tokens: usage.input_tokens ?? 0,\n $ai_output_tokens: usage.output_tokens ?? 0,\n $ai_latency: latency,\n $ai_trace_id: traceId,\n $ai_base_url: baseURL,\n ...params.posthogProperties,\n ...(distinctId ? {} : { $process_person_profile: false }),\n },\n groups: params.posthogGroups,\n })\n }\n}\n","import OpenAIOrignal from 'openai'\nimport { PostHog } from 'posthog-node'\nimport { v4 as uuidv4 } from 'uuid'\nimport { PassThrough } from 'stream'\nimport { mergeSystemPrompt, MonitoringParams, sendEventToPosthog } from '../utils'\n\ntype ChatCompletion = OpenAIOrignal.ChatCompletion\ntype ChatCompletionChunk = OpenAIOrignal.ChatCompletionChunk\ntype ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams\ntype ChatCompletionCreateParamsNonStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsNonStreaming\ntype ChatCompletionCreateParamsStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsStreaming\nimport type { APIPromise, RequestOptions } from 'openai/core'\nimport type { Stream } from 'openai/streaming'\n\ninterface MonitoringOpenAIConfig {\n apiKey: string\n posthog: PostHog\n baseURL?: string\n}\n\nexport class PostHogOpenAI extends OpenAIOrignal {\n private readonly phClient: PostHog\n\n constructor(config: MonitoringOpenAIConfig) {\n const { posthog, ...openAIConfig } = config\n super(openAIConfig)\n this.phClient = posthog\n this.chat = new WrappedChat(this, this.phClient)\n }\n\n public chat: WrappedChat\n}\n\nexport class WrappedChat extends OpenAIOrignal.Chat {\n constructor(parentClient: PostHogOpenAI, phClient: PostHog) {\n super(parentClient)\n this.completions = new WrappedCompletions(parentClient, phClient)\n }\n\n public completions: WrappedCompletions\n}\n\nexport class WrappedCompletions extends OpenAIOrignal.Chat.Completions {\n private readonly phClient: PostHog\n\n constructor(client: OpenAIOrignal, phClient: PostHog) {\n super(client)\n this.phClient = phClient\n }\n\n // --- Overload #1: Non-streaming\n public create(\n body: ChatCompletionCreateParamsNonStreaming & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<ChatCompletion>\n\n // --- Overload #2: Streaming\n public create(\n body: ChatCompletionCreateParamsStreaming & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<Stream<ChatCompletionChunk>>\n\n // --- Overload #3: Generic base\n public create(\n body: ChatCompletionCreateParamsBase & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>>\n\n // --- Implementation Signature\n public create(\n body: ChatCompletionCreateParamsBase & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>> {\n const {\n posthogDistinctId,\n posthogTraceId,\n posthogProperties,\n posthogPrivacyMode = false,\n posthogGroups,\n ...openAIParams\n } = body\n\n const traceId = posthogTraceId ?? uuidv4()\n const startTime = Date.now()\n\n const parentPromise = super.create(openAIParams, options)\n\n if (openAIParams.stream) {\n return parentPromise.then((value) => {\n const passThroughStream = new PassThrough({ objectMode: true })\n let accumulatedContent = ''\n let usage: { input_tokens: number; output_tokens: number } = {\n input_tokens: 0,\n output_tokens: 0,\n }\n if ('tee' in value) {\n const openAIStream = value\n ;(async () => {\n try {\n for await (const chunk of openAIStream) {\n const delta = chunk?.choices?.[0]?.delta?.content ?? ''\n accumulatedContent += delta\n if (chunk.usage) {\n usage = {\n input_tokens: chunk.usage.prompt_tokens ?? 0,\n output_tokens: chunk.usage.completion_tokens ?? 0,\n }\n }\n passThroughStream.write(chunk)\n }\n const latency = (Date.now() - startTime) / 1000\n sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId ?? traceId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: posthogPrivacyMode ? '' : mergeSystemPrompt(openAIParams, 'openai'),\n output: [{ content: accumulatedContent, role: 'assistant' }],\n latency,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 200,\n usage,\n })\n passThroughStream.end()\n } catch (error) {\n // error handling\n sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId ?? traceId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: posthogPrivacyMode ? '' : mergeSystemPrompt(openAIParams, 'openai'),\n output: [],\n latency: 0,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 500,\n usage: {\n input_tokens: 0,\n output_tokens: 0,\n },\n })\n passThroughStream.emit('error', error)\n }\n })()\n }\n return passThroughStream as unknown as Stream<ChatCompletionChunk>\n }) as APIPromise<Stream<ChatCompletionChunk>>\n } else {\n const wrappedPromise = parentPromise.then(\n (result) => {\n if ('choices' in result) {\n const latency = (Date.now() - startTime) / 1000\n sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId ?? traceId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: posthogPrivacyMode ? '' : mergeSystemPrompt(openAIParams, 'openai'),\n output: [{ content: result.choices[0].message.content, role: 'assistant' }],\n latency,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 200,\n usage: {\n input_tokens: result.usage?.prompt_tokens ?? 0,\n output_tokens: result.usage?.completion_tokens ?? 0,\n },\n })\n }\n return result\n },\n (error) => {\n sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId ?? traceId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: posthogPrivacyMode ? '' : mergeSystemPrompt(openAIParams, 'openai'),\n output: [],\n latency: 0,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 500,\n usage: {\n input_tokens: 0,\n output_tokens: 0,\n },\n })\n throw error\n }\n ) as APIPromise<ChatCompletion>\n\n return wrappedPromise\n }\n }\n}\n\nexport default PostHogOpenAI\n","import { experimental_wrapLanguageModel as wrapLanguageModel } from 'ai'\nimport type {\n LanguageModelV1,\n Experimental_LanguageModelV1Middleware as LanguageModelV1Middleware,\n LanguageModelV1StreamPart,\n} from 'ai'\nimport { v4 as uuidv4 } from 'uuid'\nimport type { PostHog } from 'posthog-node'\nimport { sendEventToPosthog } from '../utils'\n\ninterface CreateInstrumentationMiddlewareOptions {\n posthogDistinctId?: string\n posthogTraceId: string\n posthogProperties?: Record<string, any>\n posthogPrivacyMode?: boolean\n posthogGroups?: string[]\n}\n\nexport const createInstrumentationMiddleware = (\n phClient: PostHog,\n model: LanguageModelV1,\n options: CreateInstrumentationMiddlewareOptions\n): LanguageModelV1Middleware => {\n const middleware: LanguageModelV1Middleware = {\n wrapGenerate: async ({ doGenerate, params }) => {\n const startTime = Date.now()\n\n try {\n const result = await doGenerate()\n const latency = (Date.now() - startTime) / 1000\n\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: model.modelId,\n provider: 'vercel',\n input: options.posthogPrivacyMode ? '' : params.prompt,\n output: [{ content: result.text, role: 'assistant' }],\n latency,\n baseURL: '',\n params: { posthog_properties: options } as any,\n httpStatus: 200,\n usage: {\n input_tokens: result.usage.promptTokens,\n output_tokens: result.usage.completionTokens,\n },\n })\n\n return result\n } catch (error) {\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: model.modelId,\n provider: 'vercel',\n input: options.posthogPrivacyMode ? '' : params.prompt,\n output: [],\n latency: 0,\n baseURL: '',\n params: { posthog_properties: options } as any,\n httpStatus: 500,\n usage: {\n input_tokens: 0,\n output_tokens: 0,\n },\n })\n throw error\n }\n },\n\n wrapStream: async ({ doStream, params }) => {\n const startTime = Date.now()\n let generatedText = ''\n let usage: { input_tokens?: number; output_tokens?: number } = {}\n\n try {\n const { stream, ...rest } = await doStream()\n\n const transformStream = new TransformStream<LanguageModelV1StreamPart, LanguageModelV1StreamPart>({\n transform(chunk, controller) {\n if (chunk.type === 'text-delta') {\n generatedText += chunk.textDelta\n }\n if (chunk.type === 'finish') {\n usage = {\n input_tokens: chunk.usage?.promptTokens,\n output_tokens: chunk.usage?.completionTokens,\n }\n }\n controller.enqueue(chunk)\n },\n\n flush() {\n const latency = (Date.now() - startTime) / 1000\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: model.modelId,\n provider: 'vercel',\n input: options.posthogPrivacyMode ? '' : params.prompt,\n output: [{ content: generatedText, role: 'assistant' }],\n latency,\n baseURL: '',\n params: { posthog_properties: options } as any,\n httpStatus: 200,\n usage,\n })\n },\n })\n\n return {\n stream: stream.pipeThrough(transformStream),\n ...rest,\n }\n } catch (error) {\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: model.modelId,\n provider: 'vercel',\n input: options.posthogPrivacyMode ? '' : params.prompt,\n output: [],\n latency: 0,\n baseURL: '',\n params: { posthog_properties: options } as any,\n httpStatus: 500,\n usage: {\n input_tokens: 0,\n output_tokens: 0,\n },\n })\n throw error\n }\n },\n }\n\n return middleware\n}\n\nexport const wrapVercelLanguageModel = (\n model: LanguageModelV1,\n phClient: PostHog,\n options: CreateInstrumentationMiddlewareOptions\n): LanguageModelV1 => {\n const traceId = options.posthogTraceId ?? uuidv4()\n const middleware = createInstrumentationMiddleware(phClient, model, {\n ...options,\n posthogTraceId: traceId,\n posthogDistinctId: options.posthogDistinctId ?? traceId,\n })\n\n const wrappedModel = wrapLanguageModel({\n model,\n middleware,\n })\n\n return wrappedModel\n}\n"],"names":["getModelParams","params","modelParams","paramKeys","key","undefined","mergeSystemPrompt","provider","messages","system","systemMessage","role","content","withPrivacyMode","client","privacyMode","input","privacy_mode","sendEventToPosthog","distinctId","traceId","model","output","latency","baseURL","httpStatus","usage","capture","event","properties","$ai_provider","$ai_model","$ai_model_parameters","$ai_input","posthogPrivacyMode","$ai_output_choices","$ai_http_status","$ai_input_tokens","input_tokens","$ai_output_tokens","output_tokens","$ai_latency","$ai_trace_id","$ai_base_url","posthogProperties","$process_person_profile","groups","posthogGroups","PostHogOpenAI","OpenAIOrignal","constructor","config","posthog","openAIConfig","phClient","chat","WrappedChat","Chat","parentClient","completions","WrappedCompletions","Completions","create","body","options","posthogDistinctId","posthogTraceId","openAIParams","uuidv4","startTime","Date","now","parentPromise","stream","then","value","passThroughStream","PassThrough","objectMode","accumulatedContent","openAIStream","chunk","delta","choices","prompt_tokens","completion_tokens","write","end","error","emit","wrappedPromise","result","message","createInstrumentationMiddleware","middleware","wrapGenerate","doGenerate","modelId","prompt","text","posthog_properties","promptTokens","completionTokens","wrapStream","doStream","generatedText","rest","transformStream","TransformStream","transform","controller","type","textDelta","enqueue","flush","pipeThrough","wrapVercelLanguageModel","wrappedModel","wrapLanguageModel"],"mappings":";;;;;AAaO,MAAMA,cAAc,GAAIC,MAAyD,IAAyB;EAC/G,MAAMC,WAAW,GAAwB,EAAE,CAAA;EAC3C,MAAMC,SAAS,GAAG,CAChB,aAAa,EACb,YAAY,EACZ,uBAAuB,EACvB,OAAO,EACP,mBAAmB,EACnB,kBAAkB,EAClB,GAAG,EACH,MAAM,EACN,QAAQ,EACR,WAAW,CACH,CAAA;AAEV,EAAA,KAAK,MAAMC,GAAG,IAAID,SAAS,EAAE;IAC3B,IAAIC,GAAG,IAAIH,MAAM,IAAKA,MAAc,CAACG,GAAG,CAAC,KAAKC,SAAS,EAAE;AACvDH,MAAAA,WAAW,CAACE,GAAG,CAAC,GAAIH,MAAc,CAACG,GAAG,CAAC,CAAA;AACxC,KAAA;AACF,GAAA;AACD,EAAA,OAAOF,WAAW,CAAA;AACpB,CAAC,CAAA;AAgEM,MAAMI,iBAAiB,GAAGA,CAACL,MAAyD,EAAEM,QAAgB,KAAS;EACpH,IAAIA,QAAQ,KAAK,WAAW,EAAE;IAC5B,OAAON,MAAM,CAACO,QAAQ,CAAA;AACvB,GAAA;AACD,EAAA,MAAMA,QAAQ,GAAGP,MAAM,CAACO,QAAQ,IAAI,EAAE,CAAA;AACtC,EAAA,IAAI,CAAEP,MAAc,CAACQ,MAAM,EAAE;AAC3B,IAAA,OAAOD,QAAQ,CAAA;AAChB,GAAA;AACD,EAAA,MAAME,aAAa,GAAIT,MAAc,CAACQ,MAAM,CAAA;AAC5C,EAAA,OAAO,CAAC;AAAEE,IAAAA,IAAI,EAAE,QAAQ;AAAEC,IAAAA,OAAO,EAAEF,aAAAA;GAAe,EAAE,GAAGF,QAAQ,CAAC,CAAA;AAClE,CAAC,CAAA;AAEM,MAAMK,eAAe,GAAGA,CAACC,MAAe,EAAEC,WAAoB,EAAEC,KAAU,KAAS;EACxF,OAAQF,MAAc,CAACG,YAAY,IAAIF,WAAW,GAAG,IAAI,GAAGC,KAAK,CAAA;AACnE,CAAC,CAAA;AAiBM,MAAME,kBAAkB,GAAGA,CAAC;EACjCJ,MAAM;EACNK,UAAU;EACVC,OAAO;EACPC,KAAK;EACLd,QAAQ;EACRS,KAAK;EACLM,MAAM;EACNC,OAAO;EACPC,OAAO;EACPvB,MAAM;AACNwB,EAAAA,UAAU,GAAG,GAAG;AAChBC,EAAAA,KAAK,GAAG,EAAE;AAAA,CACe,KAAU;EACnC,IAAIZ,MAAM,CAACa,OAAO,EAAE;IAClBb,MAAM,CAACa,OAAO,CAAC;MACbR,UAAU,EAAEA,UAAU,IAAIC,OAAO;AACjCQ,MAAAA,KAAK,EAAE,gBAAgB;AACvBC,MAAAA,UAAU,EAAE;AACVC,QAAAA,YAAY,EAAEvB,QAAQ;AACtBwB,QAAAA,SAAS,EAAEV,KAAK;AAChBW,QAAAA,oBAAoB,EAAEhC,cAAc,CAACC,MAAM,CAAC;AAC5CgC,QAAAA,SAAS,EAAEpB,eAAe,CAACC,MAAM,EAAEb,MAAM,CAACiC,kBAAkB,IAAI,KAAK,EAAElB,KAAK,CAAC;AAC7EmB,QAAAA,kBAAkB,EAAEtB,eAAe,CAACC,MAAM,EAAEb,MAAM,CAACiC,kBAAkB,IAAI,KAAK,EAAEZ,MAAM,CAAC;AACvFc,QAAAA,eAAe,EAAEX,UAAU;AAC3BY,QAAAA,gBAAgB,EAAEX,KAAK,CAACY,YAAY,IAAI,CAAC;AACzCC,QAAAA,iBAAiB,EAAEb,KAAK,CAACc,aAAa,IAAI,CAAC;AAC3CC,QAAAA,WAAW,EAAElB,OAAO;AACpBmB,QAAAA,YAAY,EAAEtB,OAAO;AACrBuB,QAAAA,YAAY,EAAEnB,OAAO;QACrB,GAAGvB,MAAM,CAAC2C,iBAAiB;AAC3B,QAAA,IAAIzB,UAAU,GAAG,EAAE,GAAG;AAAE0B,UAAAA,uBAAuB,EAAE,KAAA;SAAO,CAAA;OACzD;MACDC,MAAM,EAAE7C,MAAM,CAAC8C,aAAAA;AAChB,KAAA,CAAC,CAAA;AACH,GAAA;AACH,CAAC;;ACjJK,MAAOC,aAAc,SAAQC,aAAa,CAAA;EAG9CC,WAAAA,CAAYC,MAA8B,EAAA;IACxC,MAAM;MAAEC,OAAO;MAAE,GAAGC,YAAAA;AAAc,KAAA,GAAGF,MAAM,CAAA;IAC3C,KAAK,CAACE,YAAY,CAAC,CAAA;IACnB,IAAI,CAACC,QAAQ,GAAGF,OAAO,CAAA;IACvB,IAAI,CAACG,IAAI,GAAG,IAAIC,WAAW,CAAC,IAAI,EAAE,IAAI,CAACF,QAAQ,CAAC,CAAA;AAClD,GAAA;AAGD,CAAA;AAEY,MAAAE,WAAY,SAAQP,aAAa,CAACQ,IAAI,CAAA;AACjDP,EAAAA,WAAYA,CAAAQ,YAA2B,EAAEJ,QAAiB,EAAA;IACxD,KAAK,CAACI,YAAY,CAAC,CAAA;IACnB,IAAI,CAACC,WAAW,GAAG,IAAIC,kBAAkB,CAACF,YAAY,EAAEJ,QAAQ,CAAC,CAAA;AACnE,GAAA;AAGD,CAAA;MAEYM,kBAAmB,SAAQX,aAAa,CAACQ,IAAI,CAACI,WAAW,CAAA;AAGpEX,EAAAA,WAAYA,CAAApC,MAAqB,EAAEwC,QAAiB,EAAA;IAClD,KAAK,CAACxC,MAAM,CAAC,CAAA;IACb,IAAI,CAACwC,QAAQ,GAAGA,QAAQ,CAAA;AAC1B,GAAA;AAoBA;AACOQ,EAAAA,MAAMA,CACXC,IAAuD,EACvDC,OAAwB,EAAA;IAExB,MAAM;MACJC,iBAAiB;MACjBC,cAAc;MACdtB,iBAAiB;AACjBV,MAAAA,kBAAkB,GAAG,KAAK;MAC1Ba,aAAa;MACb,GAAGoB,YAAAA;AACJ,KAAA,GAAGJ,IAAI,CAAA;AAER,IAAA,MAAM3C,OAAO,GAAG8C,cAAc,IAAIE,EAAM,EAAE,CAAA;AAC1C,IAAA,MAAMC,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE,CAAA;IAE5B,MAAMC,aAAa,GAAG,KAAK,CAACV,MAAM,CAACK,YAAY,EAAEH,OAAO,CAAC,CAAA;IAEzD,IAAIG,YAAY,CAACM,MAAM,EAAE;AACvB,MAAA,OAAOD,aAAa,CAACE,IAAI,CAAEC,KAAK,IAAI;AAClC,QAAA,MAAMC,iBAAiB,GAAG,IAAIC,WAAW,CAAC;AAAEC,UAAAA,UAAU,EAAE,IAAA;AAAM,SAAA,CAAC,CAAA;QAC/D,IAAIC,kBAAkB,GAAG,EAAE,CAAA;AAC3B,QAAA,IAAIrD,KAAK,GAAoD;AAC3DY,UAAAA,YAAY,EAAE,CAAC;AACfE,UAAAA,aAAa,EAAE,CAAA;SAChB,CAAA;QACD,IAAI,KAAK,IAAImC,KAAK,EAAE;UAClB,MAAMK,YAAY,GAAGL,KAAK,CAAA;AACzB,UAAA,CAAC,YAAW;YACX,IAAI;AACF,cAAA,WAAW,MAAMM,KAAK,IAAID,YAAY,EAAE;AACtC,gBAAA,MAAME,KAAK,GAAGD,KAAK,EAAEE,OAAO,GAAG,CAAC,CAAC,EAAED,KAAK,EAAEtE,OAAO,IAAI,EAAE,CAAA;AACvDmE,gBAAAA,kBAAkB,IAAIG,KAAK,CAAA;gBAC3B,IAAID,KAAK,CAACvD,KAAK,EAAE;AACfA,kBAAAA,KAAK,GAAG;AACNY,oBAAAA,YAAY,EAAE2C,KAAK,CAACvD,KAAK,CAAC0D,aAAa,IAAI,CAAC;AAC5C5C,oBAAAA,aAAa,EAAEyC,KAAK,CAACvD,KAAK,CAAC2D,iBAAiB,IAAI,CAAA;mBACjD,CAAA;AACF,iBAAA;AACDT,gBAAAA,iBAAiB,CAACU,KAAK,CAACL,KAAK,CAAC,CAAA;AAC/B,eAAA;cACD,MAAM1D,OAAO,GAAG,CAAC+C,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/CnD,cAAAA,kBAAkB,CAAC;gBACjBJ,MAAM,EAAE,IAAI,CAACwC,QAAQ;gBACrBnC,UAAU,EAAE8C,iBAAiB,IAAI7C,OAAO;gBACxCA,OAAO;gBACPC,KAAK,EAAE8C,YAAY,CAAC9C,KAAK;AACzBd,gBAAAA,QAAQ,EAAE,QAAQ;gBAClBS,KAAK,EAAEkB,kBAAkB,GAAG,EAAE,GAAG5B,iBAAiB,CAAC6D,YAAY,EAAE,QAAQ,CAAC;AAC1E7C,gBAAAA,MAAM,EAAE,CAAC;AAAEV,kBAAAA,OAAO,EAAEmE,kBAAkB;AAAEpE,kBAAAA,IAAI,EAAE,WAAA;AAAW,iBAAE,CAAC;gBAC5DY,OAAO;AACPC,gBAAAA,OAAO,EAAG,IAAY,CAACA,OAAO,IAAI,EAAE;AACpCvB,gBAAAA,MAAM,EAAE8D,IAAI;AACZtC,gBAAAA,UAAU,EAAE,GAAG;AACfC,gBAAAA,KAAAA;AACD,eAAA,CAAC,CAAA;cACFkD,iBAAiB,CAACW,GAAG,EAAE,CAAA;aACxB,CAAC,OAAOC,KAAK,EAAE;AACd;AACAtE,cAAAA,kBAAkB,CAAC;gBACjBJ,MAAM,EAAE,IAAI,CAACwC,QAAQ;gBACrBnC,UAAU,EAAE8C,iBAAiB,IAAI7C,OAAO;gBACxCA,OAAO;gBACPC,KAAK,EAAE8C,YAAY,CAAC9C,KAAK;AACzBd,gBAAAA,QAAQ,EAAE,QAAQ;gBAClBS,KAAK,EAAEkB,kBAAkB,GAAG,EAAE,GAAG5B,iBAAiB,CAAC6D,YAAY,EAAE,QAAQ,CAAC;AAC1E7C,gBAAAA,MAAM,EAAE,EAAE;AACVC,gBAAAA,OAAO,EAAE,CAAC;AACVC,gBAAAA,OAAO,EAAG,IAAY,CAACA,OAAO,IAAI,EAAE;AACpCvB,gBAAAA,MAAM,EAAE8D,IAAI;AACZtC,gBAAAA,UAAU,EAAE,GAAG;AACfC,gBAAAA,KAAK,EAAE;AACLY,kBAAAA,YAAY,EAAE,CAAC;AACfE,kBAAAA,aAAa,EAAE,CAAA;AAChB,iBAAA;AACF,eAAA,CAAC,CAAA;AACFoC,cAAAA,iBAAiB,CAACa,IAAI,CAAC,OAAO,EAAED,KAAK,CAAC,CAAA;AACvC,aAAA;AACH,WAAC,GAAG,CAAA;AACL,SAAA;AACD,QAAA,OAAOZ,iBAA2D,CAAA;AACpE,OAAC,CAA4C,CAAA;AAC9C,KAAA,MAAM;AACL,MAAA,MAAMc,cAAc,GAAGlB,aAAa,CAACE,IAAI,CACtCiB,MAAM,IAAI;QACT,IAAI,SAAS,IAAIA,MAAM,EAAE;UACvB,MAAMpE,OAAO,GAAG,CAAC+C,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/CnD,UAAAA,kBAAkB,CAAC;YACjBJ,MAAM,EAAE,IAAI,CAACwC,QAAQ;YACrBnC,UAAU,EAAE8C,iBAAiB,IAAI7C,OAAO;YACxCA,OAAO;YACPC,KAAK,EAAE8C,YAAY,CAAC9C,KAAK;AACzBd,YAAAA,QAAQ,EAAE,QAAQ;YAClBS,KAAK,EAAEkB,kBAAkB,GAAG,EAAE,GAAG5B,iBAAiB,CAAC6D,YAAY,EAAE,QAAQ,CAAC;AAC1E7C,YAAAA,MAAM,EAAE,CAAC;cAAEV,OAAO,EAAE+E,MAAM,CAACR,OAAO,CAAC,CAAC,CAAC,CAACS,OAAO,CAAChF,OAAO;AAAED,cAAAA,IAAI,EAAE,WAAA;aAAa,CAAC;YAC3EY,OAAO;AACPC,YAAAA,OAAO,EAAG,IAAY,CAACA,OAAO,IAAI,EAAE;AACpCvB,YAAAA,MAAM,EAAE8D,IAAI;AACZtC,YAAAA,UAAU,EAAE,GAAG;AACfC,YAAAA,KAAK,EAAE;AACLY,cAAAA,YAAY,EAAEqD,MAAM,CAACjE,KAAK,EAAE0D,aAAa,IAAI,CAAC;AAC9C5C,cAAAA,aAAa,EAAEmD,MAAM,CAACjE,KAAK,EAAE2D,iBAAiB,IAAI,CAAA;AACnD,aAAA;AACF,WAAA,CAAC,CAAA;AACH,SAAA;AACD,QAAA,OAAOM,MAAM,CAAA;OACd,EACAH,KAAK,IAAI;AACRtE,QAAAA,kBAAkB,CAAC;UACjBJ,MAAM,EAAE,IAAI,CAACwC,QAAQ;UACrBnC,UAAU,EAAE8C,iBAAiB,IAAI7C,OAAO;UACxCA,OAAO;UACPC,KAAK,EAAE8C,YAAY,CAAC9C,KAAK;AACzBd,UAAAA,QAAQ,EAAE,QAAQ;UAClBS,KAAK,EAAEkB,kBAAkB,GAAG,EAAE,GAAG5B,iBAAiB,CAAC6D,YAAY,EAAE,QAAQ,CAAC;AAC1E7C,UAAAA,MAAM,EAAE,EAAE;AACVC,UAAAA,OAAO,EAAE,CAAC;AACVC,UAAAA,OAAO,EAAG,IAAY,CAACA,OAAO,IAAI,EAAE;AACpCvB,UAAAA,MAAM,EAAE8D,IAAI;AACZtC,UAAAA,UAAU,EAAE,GAAG;AACfC,UAAAA,KAAK,EAAE;AACLY,YAAAA,YAAY,EAAE,CAAC;AACfE,YAAAA,aAAa,EAAE,CAAA;AAChB,WAAA;AACF,SAAA,CAAC,CAAA;AACF,QAAA,MAAMgD,KAAK,CAAA;AACb,OAAC,CAC4B,CAAA;AAE/B,MAAA,OAAOE,cAAc,CAAA;AACtB,KAAA;AACH,GAAA;AACD;;ACvLM,MAAMG,+BAA+B,GAAGA,CAC7CvC,QAAiB,EACjBjC,KAAsB,EACtB2C,OAA+C,KAClB;AAC7B,EAAA,MAAM8B,UAAU,GAA8B;IAC5CC,YAAY,EAAE,OAAO;MAAEC,UAAU;AAAE/F,MAAAA,MAAAA;AAAQ,KAAA,KAAI;AAC7C,MAAA,MAAMoE,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE,CAAA;MAE5B,IAAI;AACF,QAAA,MAAMoB,MAAM,GAAG,MAAMK,UAAU,EAAE,CAAA;QACjC,MAAMzE,OAAO,GAAG,CAAC+C,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAE/CnD,QAAAA,kBAAkB,CAAC;AACjBJ,UAAAA,MAAM,EAAEwC,QAAQ;UAChBnC,UAAU,EAAE6C,OAAO,CAACC,iBAAiB;UACrC7C,OAAO,EAAE4C,OAAO,CAACE,cAAc;UAC/B7C,KAAK,EAAEA,KAAK,CAAC4E,OAAO;AACpB1F,UAAAA,QAAQ,EAAE,QAAQ;UAClBS,KAAK,EAAEgD,OAAO,CAAC9B,kBAAkB,GAAG,EAAE,GAAGjC,MAAM,CAACiG,MAAM;AACtD5E,UAAAA,MAAM,EAAE,CAAC;YAAEV,OAAO,EAAE+E,MAAM,CAACQ,IAAI;AAAExF,YAAAA,IAAI,EAAE,WAAA;WAAa,CAAC;UACrDY,OAAO;AACPC,UAAAA,OAAO,EAAE,EAAE;AACXvB,UAAAA,MAAM,EAAE;AAAEmG,YAAAA,kBAAkB,EAAEpC,OAAAA;WAAgB;AAC9CvC,UAAAA,UAAU,EAAE,GAAG;AACfC,UAAAA,KAAK,EAAE;AACLY,YAAAA,YAAY,EAAEqD,MAAM,CAACjE,KAAK,CAAC2E,YAAY;AACvC7D,YAAAA,aAAa,EAAEmD,MAAM,CAACjE,KAAK,CAAC4E,gBAAAA;AAC7B,WAAA;AACF,SAAA,CAAC,CAAA;AAEF,QAAA,OAAOX,MAAM,CAAA;OACd,CAAC,OAAOH,KAAK,EAAE;AACdtE,QAAAA,kBAAkB,CAAC;AACjBJ,UAAAA,MAAM,EAAEwC,QAAQ;UAChBnC,UAAU,EAAE6C,OAAO,CAACC,iBAAiB;UACrC7C,OAAO,EAAE4C,OAAO,CAACE,cAAc;UAC/B7C,KAAK,EAAEA,KAAK,CAAC4E,OAAO;AACpB1F,UAAAA,QAAQ,EAAE,QAAQ;UAClBS,KAAK,EAAEgD,OAAO,CAAC9B,kBAAkB,GAAG,EAAE,GAAGjC,MAAM,CAACiG,MAAM;AACtD5E,UAAAA,MAAM,EAAE,EAAE;AACVC,UAAAA,OAAO,EAAE,CAAC;AACVC,UAAAA,OAAO,EAAE,EAAE;AACXvB,UAAAA,MAAM,EAAE;AAAEmG,YAAAA,kBAAkB,EAAEpC,OAAAA;WAAgB;AAC9CvC,UAAAA,UAAU,EAAE,GAAG;AACfC,UAAAA,KAAK,EAAE;AACLY,YAAAA,YAAY,EAAE,CAAC;AACfE,YAAAA,aAAa,EAAE,CAAA;AAChB,WAAA;AACF,SAAA,CAAC,CAAA;AACF,QAAA,MAAMgD,KAAK,CAAA;AACZ,OAAA;KACF;IAEDe,UAAU,EAAE,OAAO;MAAEC,QAAQ;AAAEvG,MAAAA,MAAAA;AAAQ,KAAA,KAAI;AACzC,MAAA,MAAMoE,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE,CAAA;MAC5B,IAAIkC,aAAa,GAAG,EAAE,CAAA;MACtB,IAAI/E,KAAK,GAAsD,EAAE,CAAA;MAEjE,IAAI;QACF,MAAM;UAAE+C,MAAM;UAAE,GAAGiC,IAAAA;SAAM,GAAG,MAAMF,QAAQ,EAAE,CAAA;AAE5C,QAAA,MAAMG,eAAe,GAAG,IAAIC,eAAe,CAAuD;AAChGC,UAAAA,SAASA,CAAC5B,KAAK,EAAE6B,UAAU,EAAA;AACzB,YAAA,IAAI7B,KAAK,CAAC8B,IAAI,KAAK,YAAY,EAAE;cAC/BN,aAAa,IAAIxB,KAAK,CAAC+B,SAAS,CAAA;AACjC,aAAA;AACD,YAAA,IAAI/B,KAAK,CAAC8B,IAAI,KAAK,QAAQ,EAAE;AAC3BrF,cAAAA,KAAK,GAAG;AACNY,gBAAAA,YAAY,EAAE2C,KAAK,CAACvD,KAAK,EAAE2E,YAAY;AACvC7D,gBAAAA,aAAa,EAAEyC,KAAK,CAACvD,KAAK,EAAE4E,gBAAAA;eAC7B,CAAA;AACF,aAAA;AACDQ,YAAAA,UAAU,CAACG,OAAO,CAAChC,KAAK,CAAC,CAAA;WAC1B;AAEDiC,UAAAA,KAAKA,GAAA;YACH,MAAM3F,OAAO,GAAG,CAAC+C,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/CnD,YAAAA,kBAAkB,CAAC;AACjBJ,cAAAA,MAAM,EAAEwC,QAAQ;cAChBnC,UAAU,EAAE6C,OAAO,CAACC,iBAAiB;cACrC7C,OAAO,EAAE4C,OAAO,CAACE,cAAc;cAC/B7C,KAAK,EAAEA,KAAK,CAAC4E,OAAO;AACpB1F,cAAAA,QAAQ,EAAE,QAAQ;cAClBS,KAAK,EAAEgD,OAAO,CAAC9B,kBAAkB,GAAG,EAAE,GAAGjC,MAAM,CAACiG,MAAM;AACtD5E,cAAAA,MAAM,EAAE,CAAC;AAAEV,gBAAAA,OAAO,EAAE6F,aAAa;AAAE9F,gBAAAA,IAAI,EAAE,WAAA;AAAW,eAAE,CAAC;cACvDY,OAAO;AACPC,cAAAA,OAAO,EAAE,EAAE;AACXvB,cAAAA,MAAM,EAAE;AAAEmG,gBAAAA,kBAAkB,EAAEpC,OAAAA;eAAgB;AAC9CvC,cAAAA,UAAU,EAAE,GAAG;AACfC,cAAAA,KAAAA;AACD,aAAA,CAAC,CAAA;AACJ,WAAA;AACD,SAAA,CAAC,CAAA;QAEF,OAAO;AACL+C,UAAAA,MAAM,EAAEA,MAAM,CAAC0C,WAAW,CAACR,eAAe,CAAC;UAC3C,GAAGD,IAAAA;SACJ,CAAA;OACF,CAAC,OAAOlB,KAAK,EAAE;AACdtE,QAAAA,kBAAkB,CAAC;AACjBJ,UAAAA,MAAM,EAAEwC,QAAQ;UAChBnC,UAAU,EAAE6C,OAAO,CAACC,iBAAiB;UACrC7C,OAAO,EAAE4C,OAAO,CAACE,cAAc;UAC/B7C,KAAK,EAAEA,KAAK,CAAC4E,OAAO;AACpB1F,UAAAA,QAAQ,EAAE,QAAQ;UAClBS,KAAK,EAAEgD,OAAO,CAAC9B,kBAAkB,GAAG,EAAE,GAAGjC,MAAM,CAACiG,MAAM;AACtD5E,UAAAA,MAAM,EAAE,EAAE;AACVC,UAAAA,OAAO,EAAE,CAAC;AACVC,UAAAA,OAAO,EAAE,EAAE;AACXvB,UAAAA,MAAM,EAAE;AAAEmG,YAAAA,kBAAkB,EAAEpC,OAAAA;WAAgB;AAC9CvC,UAAAA,UAAU,EAAE,GAAG;AACfC,UAAAA,KAAK,EAAE;AACLY,YAAAA,YAAY,EAAE,CAAC;AACfE,YAAAA,aAAa,EAAE,CAAA;AAChB,WAAA;AACF,SAAA,CAAC,CAAA;AACF,QAAA,MAAMgD,KAAK,CAAA;AACZ,OAAA;AACH,KAAA;GACD,CAAA;AAED,EAAA,OAAOM,UAAU,CAAA;AACnB,CAAC,CAAA;AAEM,MAAMsB,uBAAuB,GAAGA,CACrC/F,KAAsB,EACtBiC,QAAiB,EACjBU,OAA+C,KAC5B;EACnB,MAAM5C,OAAO,GAAG4C,OAAO,CAACE,cAAc,IAAIE,EAAM,EAAE,CAAA;AAClD,EAAA,MAAM0B,UAAU,GAAGD,+BAA+B,CAACvC,QAAQ,EAAEjC,KAAK,EAAE;AAClE,IAAA,GAAG2C,OAAO;AACVE,IAAAA,cAAc,EAAE9C,OAAO;AACvB6C,IAAAA,iBAAiB,EAAED,OAAO,CAACC,iBAAiB,IAAI7C,OAAAA;AACjD,GAAA,CAAC,CAAA;EAEF,MAAMiG,YAAY,GAAGC,8BAAiB,CAAC;IACrCjG,KAAK;AACLyE,IAAAA,UAAAA;AACD,GAAA,CAAC,CAAA;AAEF,EAAA,OAAOuB,YAAY,CAAA;AACrB;;;;"}
|
|
1
|
+
{"version":3,"file":"index.esm.js","sources":["../src/utils.ts","../src/openai/index.ts","../src/vercel/middleware.ts"],"sourcesContent":["import { PostHog } from 'posthog-node'\nimport OpenAIOrignal from 'openai'\n\ntype ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams\n\nexport interface MonitoringParams {\n posthogDistinctId?: string\n posthogTraceId?: string\n posthogProperties?: Record<string, any>\n posthogPrivacyMode?: boolean\n posthogGroups?: Record<string, any>\n}\n\nexport const getModelParams = (params: ChatCompletionCreateParamsBase & MonitoringParams): Record<string, any> => {\n const modelParams: Record<string, any> = {}\n const paramKeys = [\n 'temperature',\n 'max_tokens',\n 'max_completion_tokens',\n 'top_p',\n 'frequency_penalty',\n 'presence_penalty',\n 'n',\n 'stop',\n 'stream',\n 'streaming',\n ] as const\n\n for (const key of paramKeys) {\n if (key in params && (params as any)[key] !== undefined) {\n modelParams[key] = (params as any)[key]\n }\n }\n return modelParams\n}\n\n/**\n * Helper to format responses (non-streaming) for consumption, mirroring Python's openai vs. anthropic approach.\n */\nexport const formatResponse = (response: any, provider: string): Array<{ role: string; content: string }> => {\n if (!response) {\n return []\n }\n if (provider === 'anthropic') {\n return formatResponseAnthropic(response)\n } else if (provider === 'openai') {\n return formatResponseOpenAI(response)\n }\n return []\n}\n\nexport const formatResponseAnthropic = (response: any): Array<{ role: string; content: string }> => {\n // Example approach if \"response.content\" holds array of text segments, etc.\n const output: Array<{ role: string; content: string }> = []\n for (const choice of response.content ?? []) {\n if (choice?.text) {\n output.push({\n role: 'assistant',\n content: choice.text,\n })\n }\n }\n return output\n}\n\nexport const formatResponseOpenAI = (response: any): Array<{ role: string; content: string }> => {\n const output: Array<{ role: string; content: string }> = []\n for (const choice of response.choices ?? []) {\n if (choice.message?.content) {\n output.push({\n role: choice.message.role,\n content: choice.message.content,\n })\n }\n }\n return output\n}\n\nexport const mergeSystemPrompt = (params: ChatCompletionCreateParamsBase & MonitoringParams, provider: string): any => {\n if (provider !== 'anthropic') {\n return params.messages\n }\n const messages = params.messages || []\n if (!(params as any).system) {\n return messages\n }\n const systemMessage = (params as any).system\n return [{ role: 'system', content: systemMessage }, ...messages]\n}\n\nexport const withPrivacyMode = (client: PostHog, privacyMode: boolean, input: any): any => {\n return (client as any).privacy_mode || privacyMode ? null : input\n}\n\nexport type SendEventToPosthogParams = {\n client: PostHog\n distinctId?: string\n traceId: string\n model: string\n provider: string\n input: any\n output: any\n latency: number\n baseURL: string\n httpStatus: number\n usage?: { inputTokens?: number; outputTokens?: number }\n params: ChatCompletionCreateParamsBase & MonitoringParams\n}\n\nexport const sendEventToPosthog = ({\n client,\n distinctId,\n traceId,\n model,\n provider,\n input,\n output,\n latency,\n baseURL,\n params,\n httpStatus = 200,\n usage = {},\n}: SendEventToPosthogParams): void => {\n console.log('sendEventToPosthog', {\n client,\n distinctId,\n traceId,\n model,\n provider,\n input,\n output,\n latency,\n baseURL,\n params,\n })\n if (client.capture) {\n client.capture({\n distinctId: distinctId ?? traceId,\n event: '$ai_generation',\n properties: {\n $ai_provider: provider,\n $ai_model: model,\n $ai_model_parameters: getModelParams(params),\n $ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, input),\n $ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, output),\n $ai_http_status: httpStatus,\n $ai_input_tokens: usage.inputTokens ?? 0,\n $ai_output_tokens: usage.outputTokens ?? 0,\n $ai_latency: latency,\n $ai_trace_id: traceId,\n $ai_base_url: baseURL,\n ...params.posthogProperties,\n ...(distinctId ? {} : { $process_person_profile: false }),\n },\n groups: params.posthogGroups,\n })\n }\n}\n","import OpenAIOrignal from 'openai'\nimport { PostHog } from 'posthog-node'\nimport { v4 as uuidv4 } from 'uuid'\nimport { PassThrough } from 'stream'\nimport { mergeSystemPrompt, MonitoringParams, sendEventToPosthog } from '../utils'\n\ntype ChatCompletion = OpenAIOrignal.ChatCompletion\ntype ChatCompletionChunk = OpenAIOrignal.ChatCompletionChunk\ntype ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams\ntype ChatCompletionCreateParamsNonStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsNonStreaming\ntype ChatCompletionCreateParamsStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsStreaming\nimport type { APIPromise, RequestOptions } from 'openai/core'\nimport type { Stream } from 'openai/streaming'\n\ninterface MonitoringOpenAIConfig {\n apiKey: string\n posthog: PostHog\n baseURL?: string\n}\n\nexport class PostHogOpenAI extends OpenAIOrignal {\n private readonly phClient: PostHog\n\n constructor(config: MonitoringOpenAIConfig) {\n const { posthog, ...openAIConfig } = config\n super(openAIConfig)\n this.phClient = posthog\n this.chat = new WrappedChat(this, this.phClient)\n }\n\n public chat: WrappedChat\n}\n\nexport class WrappedChat extends OpenAIOrignal.Chat {\n constructor(parentClient: PostHogOpenAI, phClient: PostHog) {\n super(parentClient)\n this.completions = new WrappedCompletions(parentClient, phClient)\n }\n\n public completions: WrappedCompletions\n}\n\nexport class WrappedCompletions extends OpenAIOrignal.Chat.Completions {\n private readonly phClient: PostHog\n\n constructor(client: OpenAIOrignal, phClient: PostHog) {\n super(client)\n this.phClient = phClient\n }\n\n // --- Overload #1: Non-streaming\n public create(\n body: ChatCompletionCreateParamsNonStreaming & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<ChatCompletion>\n\n // --- Overload #2: Streaming\n public create(\n body: ChatCompletionCreateParamsStreaming & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<Stream<ChatCompletionChunk>>\n\n // --- Overload #3: Generic base\n public create(\n body: ChatCompletionCreateParamsBase & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>>\n\n // --- Implementation Signature\n public create(\n body: ChatCompletionCreateParamsBase & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>> {\n const {\n posthogDistinctId,\n posthogTraceId,\n posthogProperties,\n posthogPrivacyMode = false,\n posthogGroups,\n ...openAIParams\n } = body\n\n const traceId = posthogTraceId ?? uuidv4()\n const startTime = Date.now()\n\n const parentPromise = super.create(openAIParams, options)\n\n if (openAIParams.stream) {\n return parentPromise.then((value) => {\n const passThroughStream = new PassThrough({ objectMode: true })\n let accumulatedContent = ''\n let usage: { inputTokens: number; outputTokens: number } = {\n inputTokens: 0,\n outputTokens: 0,\n }\n if ('tee' in value) {\n const openAIStream = value\n ;(async () => {\n try {\n for await (const chunk of openAIStream) {\n const delta = chunk?.choices?.[0]?.delta?.content ?? ''\n accumulatedContent += delta\n if (chunk.usage) {\n usage = {\n inputTokens: chunk.usage.prompt_tokens ?? 0,\n outputTokens: chunk.usage.completion_tokens ?? 0,\n }\n }\n passThroughStream.write(chunk)\n }\n const latency = (Date.now() - startTime) / 1000\n sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId ?? traceId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: posthogPrivacyMode ? '' : mergeSystemPrompt(openAIParams, 'openai'),\n output: [{ content: accumulatedContent, role: 'assistant' }],\n latency,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 200,\n usage,\n })\n passThroughStream.end()\n } catch (error) {\n // error handling\n sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId ?? traceId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: posthogPrivacyMode ? '' : mergeSystemPrompt(openAIParams, 'openai'),\n output: [],\n latency: 0,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n })\n passThroughStream.emit('error', error)\n }\n })()\n }\n return passThroughStream as unknown as Stream<ChatCompletionChunk>\n }) as APIPromise<Stream<ChatCompletionChunk>>\n } else {\n const wrappedPromise = parentPromise.then(\n (result) => {\n if ('choices' in result) {\n const latency = (Date.now() - startTime) / 1000\n sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId ?? traceId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: posthogPrivacyMode ? '' : mergeSystemPrompt(openAIParams, 'openai'),\n output: [{ content: result.choices[0].message.content, role: 'assistant' }],\n latency,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 200,\n usage: {\n inputTokens: result.usage?.prompt_tokens ?? 0,\n outputTokens: result.usage?.completion_tokens ?? 0,\n },\n })\n }\n return result\n },\n (error) => {\n sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId ?? traceId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: posthogPrivacyMode ? '' : mergeSystemPrompt(openAIParams, 'openai'),\n output: [],\n latency: 0,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n })\n throw error\n }\n ) as APIPromise<ChatCompletion>\n\n return wrappedPromise\n }\n }\n}\n\nexport default PostHogOpenAI\n","import { experimental_wrapLanguageModel as wrapLanguageModel } from 'ai'\nimport type {\n LanguageModelV1,\n Experimental_LanguageModelV1Middleware as LanguageModelV1Middleware,\n LanguageModelV1Prompt,\n LanguageModelV1StreamPart,\n} from 'ai'\nimport { v4 as uuidv4 } from 'uuid'\nimport { PostHog } from 'posthog-node'\nimport { sendEventToPosthog } from '../utils'\n\ninterface CreateInstrumentationMiddlewareOptions {\n posthogDistinctId?: string\n posthogTraceId: string\n posthogProperties?: Record<string, any>\n posthogPrivacyMode?: boolean\n posthogGroups?: Record<string, any>\n}\n\ninterface PostHogInput {\n content: string\n role: string\n}\n\nconst mapVercelParams = (params: any): Record<string, any> => {\n return {\n temperature: params.temperature,\n max_tokens: params.maxTokens,\n top_p: params.topP,\n frequency_penalty: params.frequencyPenalty,\n presence_penalty: params.presencePenalty,\n stop: params.stopSequences,\n stream: params.stream,\n }\n}\n\nconst mapVercelPrompt = (prompt: LanguageModelV1Prompt): PostHogInput[] => {\n return prompt.map((p) => {\n let content = ''\n if (Array.isArray(p.content)) {\n content = p.content\n .map((c) => {\n if (c.type === 'text') {\n return c.text\n }\n return ''\n })\n .join('')\n } else {\n content = p.content\n }\n return {\n role: p.role,\n content,\n }\n })\n}\n\nexport const createInstrumentationMiddleware = (\n phClient: PostHog,\n model: LanguageModelV1,\n options: CreateInstrumentationMiddlewareOptions\n): LanguageModelV1Middleware => {\n const middleware: LanguageModelV1Middleware = {\n wrapGenerate: async ({ doGenerate, params }) => {\n const startTime = Date.now()\n let mergedParams = {\n ...options,\n ...mapVercelParams(params),\n }\n try {\n const result = await doGenerate()\n const latency = (Date.now() - startTime) / 1000\n\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: model.modelId,\n provider: 'vercel',\n input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),\n output: [{ content: result.text, role: 'assistant' }],\n latency,\n baseURL: '',\n params: mergedParams as any,\n httpStatus: 200,\n usage: {\n inputTokens: result.usage.promptTokens,\n outputTokens: result.usage.completionTokens,\n },\n })\n\n return result\n } catch (error) {\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: model.modelId,\n provider: 'vercel',\n input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),\n output: [],\n latency: 0,\n baseURL: '',\n params: mergedParams as any,\n httpStatus: 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n })\n throw error\n }\n },\n\n wrapStream: async ({ doStream, params }) => {\n const startTime = Date.now()\n let generatedText = ''\n let usage: { inputTokens?: number; outputTokens?: number } = {}\n let mergedParams = {\n ...options,\n ...mapVercelParams(params),\n }\n try {\n const { stream, ...rest } = await doStream()\n\n const transformStream = new TransformStream<LanguageModelV1StreamPart, LanguageModelV1StreamPart>({\n transform(chunk, controller) {\n if (chunk.type === 'text-delta') {\n generatedText += chunk.textDelta\n }\n if (chunk.type === 'finish') {\n usage = {\n inputTokens: chunk.usage?.promptTokens,\n outputTokens: chunk.usage?.completionTokens,\n }\n }\n controller.enqueue(chunk)\n },\n\n flush() {\n const latency = (Date.now() - startTime) / 1000\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: model.modelId,\n provider: 'vercel',\n input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),\n output: [{ content: generatedText, role: 'assistant' }],\n latency,\n baseURL: '',\n params: mergedParams as any,\n httpStatus: 200,\n usage,\n })\n },\n })\n\n return {\n stream: stream.pipeThrough(transformStream),\n ...rest,\n }\n } catch (error) {\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: model.modelId,\n provider: 'vercel',\n input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),\n output: [],\n latency: 0,\n baseURL: '',\n params: mergedParams as any,\n httpStatus: 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n })\n throw error\n }\n },\n }\n\n return middleware\n}\n\nexport const wrapVercelLanguageModel = (\n model: LanguageModelV1,\n phClient: PostHog,\n options: CreateInstrumentationMiddlewareOptions\n): LanguageModelV1 => {\n const traceId = options.posthogTraceId ?? uuidv4()\n const middleware = createInstrumentationMiddleware(phClient, model, {\n ...options,\n posthogTraceId: traceId,\n posthogDistinctId: options.posthogDistinctId ?? traceId,\n })\n\n const wrappedModel = wrapLanguageModel({\n model,\n middleware,\n })\n\n return wrappedModel\n}\n"],"names":["getModelParams","params","modelParams","paramKeys","key","undefined","mergeSystemPrompt","provider","messages","system","systemMessage","role","content","withPrivacyMode","client","privacyMode","input","privacy_mode","sendEventToPosthog","distinctId","traceId","model","output","latency","baseURL","httpStatus","usage","console","log","capture","event","properties","$ai_provider","$ai_model","$ai_model_parameters","$ai_input","posthogPrivacyMode","$ai_output_choices","$ai_http_status","$ai_input_tokens","inputTokens","$ai_output_tokens","outputTokens","$ai_latency","$ai_trace_id","$ai_base_url","posthogProperties","$process_person_profile","groups","posthogGroups","PostHogOpenAI","OpenAIOrignal","constructor","config","posthog","openAIConfig","phClient","chat","WrappedChat","Chat","parentClient","completions","WrappedCompletions","Completions","create","body","options","posthogDistinctId","posthogTraceId","openAIParams","uuidv4","startTime","Date","now","parentPromise","stream","then","value","passThroughStream","PassThrough","objectMode","accumulatedContent","openAIStream","chunk","delta","choices","prompt_tokens","completion_tokens","write","end","error","emit","wrappedPromise","result","message","mapVercelParams","temperature","max_tokens","maxTokens","top_p","topP","frequency_penalty","frequencyPenalty","presence_penalty","presencePenalty","stop","stopSequences","mapVercelPrompt","prompt","map","p","Array","isArray","c","type","text","join","createInstrumentationMiddleware","middleware","wrapGenerate","doGenerate","mergedParams","modelId","promptTokens","completionTokens","wrapStream","doStream","generatedText","rest","transformStream","TransformStream","transform","controller","textDelta","enqueue","flush","pipeThrough","wrapVercelLanguageModel","wrappedModel","wrapLanguageModel"],"mappings":";;;;;AAaO,MAAMA,cAAc,GAAIC,MAAyD,IAAyB;EAC/G,MAAMC,WAAW,GAAwB,EAAE,CAAA;EAC3C,MAAMC,SAAS,GAAG,CAChB,aAAa,EACb,YAAY,EACZ,uBAAuB,EACvB,OAAO,EACP,mBAAmB,EACnB,kBAAkB,EAClB,GAAG,EACH,MAAM,EACN,QAAQ,EACR,WAAW,CACH,CAAA;AAEV,EAAA,KAAK,MAAMC,GAAG,IAAID,SAAS,EAAE;IAC3B,IAAIC,GAAG,IAAIH,MAAM,IAAKA,MAAc,CAACG,GAAG,CAAC,KAAKC,SAAS,EAAE;AACvDH,MAAAA,WAAW,CAACE,GAAG,CAAC,GAAIH,MAAc,CAACG,GAAG,CAAC,CAAA;AACxC,KAAA;AACF,GAAA;AACD,EAAA,OAAOF,WAAW,CAAA;AACpB,CAAC,CAAA;AA4CM,MAAMI,iBAAiB,GAAGA,CAACL,MAAyD,EAAEM,QAAgB,KAAS;EACpH,IAAIA,QAAQ,KAAK,WAAW,EAAE;IAC5B,OAAON,MAAM,CAACO,QAAQ,CAAA;AACvB,GAAA;AACD,EAAA,MAAMA,QAAQ,GAAGP,MAAM,CAACO,QAAQ,IAAI,EAAE,CAAA;AACtC,EAAA,IAAI,CAAEP,MAAc,CAACQ,MAAM,EAAE;AAC3B,IAAA,OAAOD,QAAQ,CAAA;AAChB,GAAA;AACD,EAAA,MAAME,aAAa,GAAIT,MAAc,CAACQ,MAAM,CAAA;AAC5C,EAAA,OAAO,CAAC;AAAEE,IAAAA,IAAI,EAAE,QAAQ;AAAEC,IAAAA,OAAO,EAAEF,aAAAA;GAAe,EAAE,GAAGF,QAAQ,CAAC,CAAA;AAClE,CAAC,CAAA;AAEM,MAAMK,eAAe,GAAGA,CAACC,MAAe,EAAEC,WAAoB,EAAEC,KAAU,KAAS;EACxF,OAAQF,MAAc,CAACG,YAAY,IAAIF,WAAW,GAAG,IAAI,GAAGC,KAAK,CAAA;AACnE,CAAC,CAAA;AAiBM,MAAME,kBAAkB,GAAGA,CAAC;EACjCJ,MAAM;EACNK,UAAU;EACVC,OAAO;EACPC,KAAK;EACLd,QAAQ;EACRS,KAAK;EACLM,MAAM;EACNC,OAAO;EACPC,OAAO;EACPvB,MAAM;AACNwB,EAAAA,UAAU,GAAG,GAAG;AAChBC,EAAAA,KAAK,GAAG,EAAE;AAAA,CACe,KAAU;AACnCC,EAAAA,OAAO,CAACC,GAAG,CAAC,oBAAoB,EAAE;IAChCd,MAAM;IACNK,UAAU;IACVC,OAAO;IACPC,KAAK;IACLd,QAAQ;IACRS,KAAK;IACLM,MAAM;IACNC,OAAO;IACPC,OAAO;AACPvB,IAAAA,MAAAA;AACD,GAAA,CAAC,CAAA;EACF,IAAIa,MAAM,CAACe,OAAO,EAAE;IAClBf,MAAM,CAACe,OAAO,CAAC;MACbV,UAAU,EAAEA,UAAU,IAAIC,OAAO;AACjCU,MAAAA,KAAK,EAAE,gBAAgB;AACvBC,MAAAA,UAAU,EAAE;AACVC,QAAAA,YAAY,EAAEzB,QAAQ;AACtB0B,QAAAA,SAAS,EAAEZ,KAAK;AAChBa,QAAAA,oBAAoB,EAAElC,cAAc,CAACC,MAAM,CAAC;AAC5CkC,QAAAA,SAAS,EAAEtB,eAAe,CAACC,MAAM,EAAEb,MAAM,CAACmC,kBAAkB,IAAI,KAAK,EAAEpB,KAAK,CAAC;AAC7EqB,QAAAA,kBAAkB,EAAExB,eAAe,CAACC,MAAM,EAAEb,MAAM,CAACmC,kBAAkB,IAAI,KAAK,EAAEd,MAAM,CAAC;AACvFgB,QAAAA,eAAe,EAAEb,UAAU;AAC3Bc,QAAAA,gBAAgB,EAAEb,KAAK,CAACc,WAAW,IAAI,CAAC;AACxCC,QAAAA,iBAAiB,EAAEf,KAAK,CAACgB,YAAY,IAAI,CAAC;AAC1CC,QAAAA,WAAW,EAAEpB,OAAO;AACpBqB,QAAAA,YAAY,EAAExB,OAAO;AACrByB,QAAAA,YAAY,EAAErB,OAAO;QACrB,GAAGvB,MAAM,CAAC6C,iBAAiB;AAC3B,QAAA,IAAI3B,UAAU,GAAG,EAAE,GAAG;AAAE4B,UAAAA,uBAAuB,EAAE,KAAA;SAAO,CAAA;OACzD;MACDC,MAAM,EAAE/C,MAAM,CAACgD,aAAAA;AAChB,KAAA,CAAC,CAAA;AACH,GAAA;AACH,CAAC;;ACzIK,MAAOC,aAAc,SAAQC,aAAa,CAAA;EAG9CC,WAAAA,CAAYC,MAA8B,EAAA;IACxC,MAAM;MAAEC,OAAO;MAAE,GAAGC,YAAAA;AAAc,KAAA,GAAGF,MAAM,CAAA;IAC3C,KAAK,CAACE,YAAY,CAAC,CAAA;IACnB,IAAI,CAACC,QAAQ,GAAGF,OAAO,CAAA;IACvB,IAAI,CAACG,IAAI,GAAG,IAAIC,WAAW,CAAC,IAAI,EAAE,IAAI,CAACF,QAAQ,CAAC,CAAA;AAClD,GAAA;AAGD,CAAA;AAEY,MAAAE,WAAY,SAAQP,aAAa,CAACQ,IAAI,CAAA;AACjDP,EAAAA,WAAYA,CAAAQ,YAA2B,EAAEJ,QAAiB,EAAA;IACxD,KAAK,CAACI,YAAY,CAAC,CAAA;IACnB,IAAI,CAACC,WAAW,GAAG,IAAIC,kBAAkB,CAACF,YAAY,EAAEJ,QAAQ,CAAC,CAAA;AACnE,GAAA;AAGD,CAAA;MAEYM,kBAAmB,SAAQX,aAAa,CAACQ,IAAI,CAACI,WAAW,CAAA;AAGpEX,EAAAA,WAAYA,CAAAtC,MAAqB,EAAE0C,QAAiB,EAAA;IAClD,KAAK,CAAC1C,MAAM,CAAC,CAAA;IACb,IAAI,CAAC0C,QAAQ,GAAGA,QAAQ,CAAA;AAC1B,GAAA;AAoBA;AACOQ,EAAAA,MAAMA,CACXC,IAAuD,EACvDC,OAAwB,EAAA;IAExB,MAAM;MACJC,iBAAiB;MACjBC,cAAc;MACdtB,iBAAiB;AACjBV,MAAAA,kBAAkB,GAAG,KAAK;MAC1Ba,aAAa;MACb,GAAGoB,YAAAA;AACJ,KAAA,GAAGJ,IAAI,CAAA;AAER,IAAA,MAAM7C,OAAO,GAAGgD,cAAc,IAAIE,EAAM,EAAE,CAAA;AAC1C,IAAA,MAAMC,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE,CAAA;IAE5B,MAAMC,aAAa,GAAG,KAAK,CAACV,MAAM,CAACK,YAAY,EAAEH,OAAO,CAAC,CAAA;IAEzD,IAAIG,YAAY,CAACM,MAAM,EAAE;AACvB,MAAA,OAAOD,aAAa,CAACE,IAAI,CAAEC,KAAK,IAAI;AAClC,QAAA,MAAMC,iBAAiB,GAAG,IAAIC,WAAW,CAAC;AAAEC,UAAAA,UAAU,EAAE,IAAA;AAAM,SAAA,CAAC,CAAA;QAC/D,IAAIC,kBAAkB,GAAG,EAAE,CAAA;AAC3B,QAAA,IAAIvD,KAAK,GAAkD;AACzDc,UAAAA,WAAW,EAAE,CAAC;AACdE,UAAAA,YAAY,EAAE,CAAA;SACf,CAAA;QACD,IAAI,KAAK,IAAImC,KAAK,EAAE;UAClB,MAAMK,YAAY,GAAGL,KAAK,CAAA;AACzB,UAAA,CAAC,YAAW;YACX,IAAI;AACF,cAAA,WAAW,MAAMM,KAAK,IAAID,YAAY,EAAE;AACtC,gBAAA,MAAME,KAAK,GAAGD,KAAK,EAAEE,OAAO,GAAG,CAAC,CAAC,EAAED,KAAK,EAAExE,OAAO,IAAI,EAAE,CAAA;AACvDqE,gBAAAA,kBAAkB,IAAIG,KAAK,CAAA;gBAC3B,IAAID,KAAK,CAACzD,KAAK,EAAE;AACfA,kBAAAA,KAAK,GAAG;AACNc,oBAAAA,WAAW,EAAE2C,KAAK,CAACzD,KAAK,CAAC4D,aAAa,IAAI,CAAC;AAC3C5C,oBAAAA,YAAY,EAAEyC,KAAK,CAACzD,KAAK,CAAC6D,iBAAiB,IAAI,CAAA;mBAChD,CAAA;AACF,iBAAA;AACDT,gBAAAA,iBAAiB,CAACU,KAAK,CAACL,KAAK,CAAC,CAAA;AAC/B,eAAA;cACD,MAAM5D,OAAO,GAAG,CAACiD,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/CrD,cAAAA,kBAAkB,CAAC;gBACjBJ,MAAM,EAAE,IAAI,CAAC0C,QAAQ;gBACrBrC,UAAU,EAAEgD,iBAAiB,IAAI/C,OAAO;gBACxCA,OAAO;gBACPC,KAAK,EAAEgD,YAAY,CAAChD,KAAK;AACzBd,gBAAAA,QAAQ,EAAE,QAAQ;gBAClBS,KAAK,EAAEoB,kBAAkB,GAAG,EAAE,GAAG9B,iBAAiB,CAAC+D,YAAY,EAAE,QAAQ,CAAC;AAC1E/C,gBAAAA,MAAM,EAAE,CAAC;AAAEV,kBAAAA,OAAO,EAAEqE,kBAAkB;AAAEtE,kBAAAA,IAAI,EAAE,WAAA;AAAW,iBAAE,CAAC;gBAC5DY,OAAO;AACPC,gBAAAA,OAAO,EAAG,IAAY,CAACA,OAAO,IAAI,EAAE;AACpCvB,gBAAAA,MAAM,EAAEgE,IAAI;AACZxC,gBAAAA,UAAU,EAAE,GAAG;AACfC,gBAAAA,KAAAA;AACD,eAAA,CAAC,CAAA;cACFoD,iBAAiB,CAACW,GAAG,EAAE,CAAA;aACxB,CAAC,OAAOC,KAAK,EAAE;AACd;AACAxE,cAAAA,kBAAkB,CAAC;gBACjBJ,MAAM,EAAE,IAAI,CAAC0C,QAAQ;gBACrBrC,UAAU,EAAEgD,iBAAiB,IAAI/C,OAAO;gBACxCA,OAAO;gBACPC,KAAK,EAAEgD,YAAY,CAAChD,KAAK;AACzBd,gBAAAA,QAAQ,EAAE,QAAQ;gBAClBS,KAAK,EAAEoB,kBAAkB,GAAG,EAAE,GAAG9B,iBAAiB,CAAC+D,YAAY,EAAE,QAAQ,CAAC;AAC1E/C,gBAAAA,MAAM,EAAE,EAAE;AACVC,gBAAAA,OAAO,EAAE,CAAC;AACVC,gBAAAA,OAAO,EAAG,IAAY,CAACA,OAAO,IAAI,EAAE;AACpCvB,gBAAAA,MAAM,EAAEgE,IAAI;AACZxC,gBAAAA,UAAU,EAAE,GAAG;AACfC,gBAAAA,KAAK,EAAE;AACLc,kBAAAA,WAAW,EAAE,CAAC;AACdE,kBAAAA,YAAY,EAAE,CAAA;AACf,iBAAA;AACF,eAAA,CAAC,CAAA;AACFoC,cAAAA,iBAAiB,CAACa,IAAI,CAAC,OAAO,EAAED,KAAK,CAAC,CAAA;AACvC,aAAA;AACH,WAAC,GAAG,CAAA;AACL,SAAA;AACD,QAAA,OAAOZ,iBAA2D,CAAA;AACpE,OAAC,CAA4C,CAAA;AAC9C,KAAA,MAAM;AACL,MAAA,MAAMc,cAAc,GAAGlB,aAAa,CAACE,IAAI,CACtCiB,MAAM,IAAI;QACT,IAAI,SAAS,IAAIA,MAAM,EAAE;UACvB,MAAMtE,OAAO,GAAG,CAACiD,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/CrD,UAAAA,kBAAkB,CAAC;YACjBJ,MAAM,EAAE,IAAI,CAAC0C,QAAQ;YACrBrC,UAAU,EAAEgD,iBAAiB,IAAI/C,OAAO;YACxCA,OAAO;YACPC,KAAK,EAAEgD,YAAY,CAAChD,KAAK;AACzBd,YAAAA,QAAQ,EAAE,QAAQ;YAClBS,KAAK,EAAEoB,kBAAkB,GAAG,EAAE,GAAG9B,iBAAiB,CAAC+D,YAAY,EAAE,QAAQ,CAAC;AAC1E/C,YAAAA,MAAM,EAAE,CAAC;cAAEV,OAAO,EAAEiF,MAAM,CAACR,OAAO,CAAC,CAAC,CAAC,CAACS,OAAO,CAAClF,OAAO;AAAED,cAAAA,IAAI,EAAE,WAAA;aAAa,CAAC;YAC3EY,OAAO;AACPC,YAAAA,OAAO,EAAG,IAAY,CAACA,OAAO,IAAI,EAAE;AACpCvB,YAAAA,MAAM,EAAEgE,IAAI;AACZxC,YAAAA,UAAU,EAAE,GAAG;AACfC,YAAAA,KAAK,EAAE;AACLc,cAAAA,WAAW,EAAEqD,MAAM,CAACnE,KAAK,EAAE4D,aAAa,IAAI,CAAC;AAC7C5C,cAAAA,YAAY,EAAEmD,MAAM,CAACnE,KAAK,EAAE6D,iBAAiB,IAAI,CAAA;AAClD,aAAA;AACF,WAAA,CAAC,CAAA;AACH,SAAA;AACD,QAAA,OAAOM,MAAM,CAAA;OACd,EACAH,KAAK,IAAI;AACRxE,QAAAA,kBAAkB,CAAC;UACjBJ,MAAM,EAAE,IAAI,CAAC0C,QAAQ;UACrBrC,UAAU,EAAEgD,iBAAiB,IAAI/C,OAAO;UACxCA,OAAO;UACPC,KAAK,EAAEgD,YAAY,CAAChD,KAAK;AACzBd,UAAAA,QAAQ,EAAE,QAAQ;UAClBS,KAAK,EAAEoB,kBAAkB,GAAG,EAAE,GAAG9B,iBAAiB,CAAC+D,YAAY,EAAE,QAAQ,CAAC;AAC1E/C,UAAAA,MAAM,EAAE,EAAE;AACVC,UAAAA,OAAO,EAAE,CAAC;AACVC,UAAAA,OAAO,EAAG,IAAY,CAACA,OAAO,IAAI,EAAE;AACpCvB,UAAAA,MAAM,EAAEgE,IAAI;AACZxC,UAAAA,UAAU,EAAE,GAAG;AACfC,UAAAA,KAAK,EAAE;AACLc,YAAAA,WAAW,EAAE,CAAC;AACdE,YAAAA,YAAY,EAAE,CAAA;AACf,WAAA;AACF,SAAA,CAAC,CAAA;AACF,QAAA,MAAMgD,KAAK,CAAA;AACb,OAAC,CAC4B,CAAA;AAE/B,MAAA,OAAOE,cAAc,CAAA;AACtB,KAAA;AACH,GAAA;AACD;;ACjLD,MAAMG,eAAe,GAAI9F,MAAW,IAAyB;EAC3D,OAAO;IACL+F,WAAW,EAAE/F,MAAM,CAAC+F,WAAW;IAC/BC,UAAU,EAAEhG,MAAM,CAACiG,SAAS;IAC5BC,KAAK,EAAElG,MAAM,CAACmG,IAAI;IAClBC,iBAAiB,EAAEpG,MAAM,CAACqG,gBAAgB;IAC1CC,gBAAgB,EAAEtG,MAAM,CAACuG,eAAe;IACxCC,IAAI,EAAExG,MAAM,CAACyG,aAAa;IAC1B/B,MAAM,EAAE1E,MAAM,CAAC0E,MAAAA;GAChB,CAAA;AACH,CAAC,CAAA;AAED,MAAMgC,eAAe,GAAIC,MAA6B,IAAoB;AACxE,EAAA,OAAOA,MAAM,CAACC,GAAG,CAAEC,CAAC,IAAI;IACtB,IAAIlG,OAAO,GAAG,EAAE,CAAA;IAChB,IAAImG,KAAK,CAACC,OAAO,CAACF,CAAC,CAAClG,OAAO,CAAC,EAAE;MAC5BA,OAAO,GAAGkG,CAAC,CAAClG,OAAO,CAChBiG,GAAG,CAAEI,CAAC,IAAI;AACT,QAAA,IAAIA,CAAC,CAACC,IAAI,KAAK,MAAM,EAAE;UACrB,OAAOD,CAAC,CAACE,IAAI,CAAA;AACd,SAAA;AACD,QAAA,OAAO,EAAE,CAAA;AACX,OAAC,CAAC,CACDC,IAAI,CAAC,EAAE,CAAC,CAAA;AACZ,KAAA,MAAM;MACLxG,OAAO,GAAGkG,CAAC,CAAClG,OAAO,CAAA;AACpB,KAAA;IACD,OAAO;MACLD,IAAI,EAAEmG,CAAC,CAACnG,IAAI;AACZC,MAAAA,OAAAA;KACD,CAAA;AACH,GAAC,CAAC,CAAA;AACJ,CAAC,CAAA;AAEM,MAAMyG,+BAA+B,GAAGA,CAC7C7D,QAAiB,EACjBnC,KAAsB,EACtB6C,OAA+C,KAClB;AAC7B,EAAA,MAAMoD,UAAU,GAA8B;IAC5CC,YAAY,EAAE,OAAO;MAAEC,UAAU;AAAEvH,MAAAA,MAAAA;AAAQ,KAAA,KAAI;AAC7C,MAAA,MAAMsE,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE,CAAA;AAC5B,MAAA,IAAIgD,YAAY,GAAG;AACjB,QAAA,GAAGvD,OAAO;QACV,GAAG6B,eAAe,CAAC9F,MAAM,CAAA;OAC1B,CAAA;MACD,IAAI;AACF,QAAA,MAAM4F,MAAM,GAAG,MAAM2B,UAAU,EAAE,CAAA;QACjC,MAAMjG,OAAO,GAAG,CAACiD,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAE/CrD,QAAAA,kBAAkB,CAAC;AACjBJ,UAAAA,MAAM,EAAE0C,QAAQ;UAChBrC,UAAU,EAAE+C,OAAO,CAACC,iBAAiB;UACrC/C,OAAO,EAAE8C,OAAO,CAACE,cAAc;UAC/B/C,KAAK,EAAEA,KAAK,CAACqG,OAAO;AACpBnH,UAAAA,QAAQ,EAAE,QAAQ;AAClBS,UAAAA,KAAK,EAAEkD,OAAO,CAAC9B,kBAAkB,GAAG,EAAE,GAAGuE,eAAe,CAAC1G,MAAM,CAAC2G,MAAM,CAAC;AACvEtF,UAAAA,MAAM,EAAE,CAAC;YAAEV,OAAO,EAAEiF,MAAM,CAACsB,IAAI;AAAExG,YAAAA,IAAI,EAAE,WAAA;WAAa,CAAC;UACrDY,OAAO;AACPC,UAAAA,OAAO,EAAE,EAAE;AACXvB,UAAAA,MAAM,EAAEwH,YAAmB;AAC3BhG,UAAAA,UAAU,EAAE,GAAG;AACfC,UAAAA,KAAK,EAAE;AACLc,YAAAA,WAAW,EAAEqD,MAAM,CAACnE,KAAK,CAACiG,YAAY;AACtCjF,YAAAA,YAAY,EAAEmD,MAAM,CAACnE,KAAK,CAACkG,gBAAAA;AAC5B,WAAA;AACF,SAAA,CAAC,CAAA;AAEF,QAAA,OAAO/B,MAAM,CAAA;OACd,CAAC,OAAOH,KAAK,EAAE;AACdxE,QAAAA,kBAAkB,CAAC;AACjBJ,UAAAA,MAAM,EAAE0C,QAAQ;UAChBrC,UAAU,EAAE+C,OAAO,CAACC,iBAAiB;UACrC/C,OAAO,EAAE8C,OAAO,CAACE,cAAc;UAC/B/C,KAAK,EAAEA,KAAK,CAACqG,OAAO;AACpBnH,UAAAA,QAAQ,EAAE,QAAQ;AAClBS,UAAAA,KAAK,EAAEkD,OAAO,CAAC9B,kBAAkB,GAAG,EAAE,GAAGuE,eAAe,CAAC1G,MAAM,CAAC2G,MAAM,CAAC;AACvEtF,UAAAA,MAAM,EAAE,EAAE;AACVC,UAAAA,OAAO,EAAE,CAAC;AACVC,UAAAA,OAAO,EAAE,EAAE;AACXvB,UAAAA,MAAM,EAAEwH,YAAmB;AAC3BhG,UAAAA,UAAU,EAAE,GAAG;AACfC,UAAAA,KAAK,EAAE;AACLc,YAAAA,WAAW,EAAE,CAAC;AACdE,YAAAA,YAAY,EAAE,CAAA;AACf,WAAA;AACF,SAAA,CAAC,CAAA;AACF,QAAA,MAAMgD,KAAK,CAAA;AACZ,OAAA;KACF;IAEDmC,UAAU,EAAE,OAAO;MAAEC,QAAQ;AAAE7H,MAAAA,MAAAA;AAAQ,KAAA,KAAI;AACzC,MAAA,MAAMsE,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE,CAAA;MAC5B,IAAIsD,aAAa,GAAG,EAAE,CAAA;MACtB,IAAIrG,KAAK,GAAoD,EAAE,CAAA;AAC/D,MAAA,IAAI+F,YAAY,GAAG;AACjB,QAAA,GAAGvD,OAAO;QACV,GAAG6B,eAAe,CAAC9F,MAAM,CAAA;OAC1B,CAAA;MACD,IAAI;QACF,MAAM;UAAE0E,MAAM;UAAE,GAAGqD,IAAAA;SAAM,GAAG,MAAMF,QAAQ,EAAE,CAAA;AAE5C,QAAA,MAAMG,eAAe,GAAG,IAAIC,eAAe,CAAuD;AAChGC,UAAAA,SAASA,CAAChD,KAAK,EAAEiD,UAAU,EAAA;AACzB,YAAA,IAAIjD,KAAK,CAAC+B,IAAI,KAAK,YAAY,EAAE;cAC/Ba,aAAa,IAAI5C,KAAK,CAACkD,SAAS,CAAA;AACjC,aAAA;AACD,YAAA,IAAIlD,KAAK,CAAC+B,IAAI,KAAK,QAAQ,EAAE;AAC3BxF,cAAAA,KAAK,GAAG;AACNc,gBAAAA,WAAW,EAAE2C,KAAK,CAACzD,KAAK,EAAEiG,YAAY;AACtCjF,gBAAAA,YAAY,EAAEyC,KAAK,CAACzD,KAAK,EAAEkG,gBAAAA;eAC5B,CAAA;AACF,aAAA;AACDQ,YAAAA,UAAU,CAACE,OAAO,CAACnD,KAAK,CAAC,CAAA;WAC1B;AAEDoD,UAAAA,KAAKA,GAAA;YACH,MAAMhH,OAAO,GAAG,CAACiD,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/CrD,YAAAA,kBAAkB,CAAC;AACjBJ,cAAAA,MAAM,EAAE0C,QAAQ;cAChBrC,UAAU,EAAE+C,OAAO,CAACC,iBAAiB;cACrC/C,OAAO,EAAE8C,OAAO,CAACE,cAAc;cAC/B/C,KAAK,EAAEA,KAAK,CAACqG,OAAO;AACpBnH,cAAAA,QAAQ,EAAE,QAAQ;AAClBS,cAAAA,KAAK,EAAEkD,OAAO,CAAC9B,kBAAkB,GAAG,EAAE,GAAGuE,eAAe,CAAC1G,MAAM,CAAC2G,MAAM,CAAC;AACvEtF,cAAAA,MAAM,EAAE,CAAC;AAAEV,gBAAAA,OAAO,EAAEmH,aAAa;AAAEpH,gBAAAA,IAAI,EAAE,WAAA;AAAW,eAAE,CAAC;cACvDY,OAAO;AACPC,cAAAA,OAAO,EAAE,EAAE;AACXvB,cAAAA,MAAM,EAAEwH,YAAmB;AAC3BhG,cAAAA,UAAU,EAAE,GAAG;AACfC,cAAAA,KAAAA;AACD,aAAA,CAAC,CAAA;AACJ,WAAA;AACD,SAAA,CAAC,CAAA;QAEF,OAAO;AACLiD,UAAAA,MAAM,EAAEA,MAAM,CAAC6D,WAAW,CAACP,eAAe,CAAC;UAC3C,GAAGD,IAAAA;SACJ,CAAA;OACF,CAAC,OAAOtC,KAAK,EAAE;AACdxE,QAAAA,kBAAkB,CAAC;AACjBJ,UAAAA,MAAM,EAAE0C,QAAQ;UAChBrC,UAAU,EAAE+C,OAAO,CAACC,iBAAiB;UACrC/C,OAAO,EAAE8C,OAAO,CAACE,cAAc;UAC/B/C,KAAK,EAAEA,KAAK,CAACqG,OAAO;AACpBnH,UAAAA,QAAQ,EAAE,QAAQ;AAClBS,UAAAA,KAAK,EAAEkD,OAAO,CAAC9B,kBAAkB,GAAG,EAAE,GAAGuE,eAAe,CAAC1G,MAAM,CAAC2G,MAAM,CAAC;AACvEtF,UAAAA,MAAM,EAAE,EAAE;AACVC,UAAAA,OAAO,EAAE,CAAC;AACVC,UAAAA,OAAO,EAAE,EAAE;AACXvB,UAAAA,MAAM,EAAEwH,YAAmB;AAC3BhG,UAAAA,UAAU,EAAE,GAAG;AACfC,UAAAA,KAAK,EAAE;AACLc,YAAAA,WAAW,EAAE,CAAC;AACdE,YAAAA,YAAY,EAAE,CAAA;AACf,WAAA;AACF,SAAA,CAAC,CAAA;AACF,QAAA,MAAMgD,KAAK,CAAA;AACZ,OAAA;AACH,KAAA;GACD,CAAA;AAED,EAAA,OAAO4B,UAAU,CAAA;AACnB,CAAC,CAAA;AAEM,MAAMmB,uBAAuB,GAAGA,CACrCpH,KAAsB,EACtBmC,QAAiB,EACjBU,OAA+C,KAC5B;EACnB,MAAM9C,OAAO,GAAG8C,OAAO,CAACE,cAAc,IAAIE,EAAM,EAAE,CAAA;AAClD,EAAA,MAAMgD,UAAU,GAAGD,+BAA+B,CAAC7D,QAAQ,EAAEnC,KAAK,EAAE;AAClE,IAAA,GAAG6C,OAAO;AACVE,IAAAA,cAAc,EAAEhD,OAAO;AACvB+C,IAAAA,iBAAiB,EAAED,OAAO,CAACC,iBAAiB,IAAI/C,OAAAA;AACjD,GAAA,CAAC,CAAA;EAEF,MAAMsH,YAAY,GAAGC,8BAAiB,CAAC;IACrCtH,KAAK;AACLiG,IAAAA,UAAAA;AACD,GAAA,CAAC,CAAA;AAEF,EAAA,OAAOoB,YAAY,CAAA;AACrB;;;;"}
|
|
@@ -9,10 +9,6 @@ export interface MonitoringParams {
|
|
|
9
9
|
posthogGroups?: Record<string, any>;
|
|
10
10
|
}
|
|
11
11
|
export declare const getModelParams: (params: ChatCompletionCreateParamsBase & MonitoringParams) => Record<string, any>;
|
|
12
|
-
export declare const getUsage: (response: any, provider: string) => {
|
|
13
|
-
input_tokens: number;
|
|
14
|
-
output_tokens: number;
|
|
15
|
-
};
|
|
16
12
|
/**
|
|
17
13
|
* Helper to format responses (non-streaming) for consumption, mirroring Python's openai vs. anthropic approach.
|
|
18
14
|
*/
|
|
@@ -42,8 +38,8 @@ export type SendEventToPosthogParams = {
|
|
|
42
38
|
baseURL: string;
|
|
43
39
|
httpStatus: number;
|
|
44
40
|
usage?: {
|
|
45
|
-
|
|
46
|
-
|
|
41
|
+
inputTokens?: number;
|
|
42
|
+
outputTokens?: number;
|
|
47
43
|
};
|
|
48
44
|
params: ChatCompletionCreateParamsBase & MonitoringParams;
|
|
49
45
|
};
|
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
import type { LanguageModelV1, Experimental_LanguageModelV1Middleware as LanguageModelV1Middleware } from 'ai';
|
|
2
|
-
import
|
|
2
|
+
import { PostHog } from 'posthog-node';
|
|
3
3
|
interface CreateInstrumentationMiddlewareOptions {
|
|
4
4
|
posthogDistinctId?: string;
|
|
5
5
|
posthogTraceId: string;
|
|
6
6
|
posthogProperties?: Record<string, any>;
|
|
7
7
|
posthogPrivacyMode?: boolean;
|
|
8
|
-
posthogGroups?: string
|
|
8
|
+
posthogGroups?: Record<string, any>;
|
|
9
9
|
}
|
|
10
10
|
export declare const createInstrumentationMiddleware: (phClient: PostHog, model: LanguageModelV1, options: CreateInstrumentationMiddlewareOptions) => LanguageModelV1Middleware;
|
|
11
11
|
export declare const wrapVercelLanguageModel: (model: LanguageModelV1, phClient: PostHog, options: CreateInstrumentationMiddlewareOptions) => LanguageModelV1;
|
package/package.json
CHANGED
package/src/openai/index.ts
CHANGED
|
@@ -89,9 +89,9 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
|
89
89
|
return parentPromise.then((value) => {
|
|
90
90
|
const passThroughStream = new PassThrough({ objectMode: true })
|
|
91
91
|
let accumulatedContent = ''
|
|
92
|
-
let usage: {
|
|
93
|
-
|
|
94
|
-
|
|
92
|
+
let usage: { inputTokens: number; outputTokens: number } = {
|
|
93
|
+
inputTokens: 0,
|
|
94
|
+
outputTokens: 0,
|
|
95
95
|
}
|
|
96
96
|
if ('tee' in value) {
|
|
97
97
|
const openAIStream = value
|
|
@@ -102,8 +102,8 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
|
102
102
|
accumulatedContent += delta
|
|
103
103
|
if (chunk.usage) {
|
|
104
104
|
usage = {
|
|
105
|
-
|
|
106
|
-
|
|
105
|
+
inputTokens: chunk.usage.prompt_tokens ?? 0,
|
|
106
|
+
outputTokens: chunk.usage.completion_tokens ?? 0,
|
|
107
107
|
}
|
|
108
108
|
}
|
|
109
109
|
passThroughStream.write(chunk)
|
|
@@ -139,8 +139,8 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
|
139
139
|
params: body,
|
|
140
140
|
httpStatus: 500,
|
|
141
141
|
usage: {
|
|
142
|
-
|
|
143
|
-
|
|
142
|
+
inputTokens: 0,
|
|
143
|
+
outputTokens: 0,
|
|
144
144
|
},
|
|
145
145
|
})
|
|
146
146
|
passThroughStream.emit('error', error)
|
|
@@ -167,8 +167,8 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
|
167
167
|
params: body,
|
|
168
168
|
httpStatus: 200,
|
|
169
169
|
usage: {
|
|
170
|
-
|
|
171
|
-
|
|
170
|
+
inputTokens: result.usage?.prompt_tokens ?? 0,
|
|
171
|
+
outputTokens: result.usage?.completion_tokens ?? 0,
|
|
172
172
|
},
|
|
173
173
|
})
|
|
174
174
|
}
|
|
@@ -188,8 +188,8 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
|
|
|
188
188
|
params: body,
|
|
189
189
|
httpStatus: 500,
|
|
190
190
|
usage: {
|
|
191
|
-
|
|
192
|
-
|
|
191
|
+
inputTokens: 0,
|
|
192
|
+
outputTokens: 0,
|
|
193
193
|
},
|
|
194
194
|
})
|
|
195
195
|
throw error
|
package/src/utils.ts
CHANGED
|
@@ -34,26 +34,6 @@ export const getModelParams = (params: ChatCompletionCreateParamsBase & Monitori
|
|
|
34
34
|
return modelParams
|
|
35
35
|
}
|
|
36
36
|
|
|
37
|
-
export const getUsage = (response: any, provider: string): { input_tokens: number; output_tokens: number } => {
|
|
38
|
-
if (!response?.usage) {
|
|
39
|
-
return { input_tokens: 0, output_tokens: 0 }
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
if (provider === 'anthropic') {
|
|
43
|
-
return {
|
|
44
|
-
input_tokens: response.usage.input_tokens ?? 0,
|
|
45
|
-
output_tokens: response.usage.output_tokens ?? 0,
|
|
46
|
-
}
|
|
47
|
-
} else if (provider === 'openai') {
|
|
48
|
-
return {
|
|
49
|
-
input_tokens: response.usage.prompt_tokens ?? 0,
|
|
50
|
-
output_tokens: response.usage.completion_tokens ?? 0,
|
|
51
|
-
}
|
|
52
|
-
}
|
|
53
|
-
|
|
54
|
-
return { input_tokens: 0, output_tokens: 0 }
|
|
55
|
-
}
|
|
56
|
-
|
|
57
37
|
/**
|
|
58
38
|
* Helper to format responses (non-streaming) for consumption, mirroring Python's openai vs. anthropic approach.
|
|
59
39
|
*/
|
|
@@ -123,7 +103,7 @@ export type SendEventToPosthogParams = {
|
|
|
123
103
|
latency: number
|
|
124
104
|
baseURL: string
|
|
125
105
|
httpStatus: number
|
|
126
|
-
usage?: {
|
|
106
|
+
usage?: { inputTokens?: number; outputTokens?: number }
|
|
127
107
|
params: ChatCompletionCreateParamsBase & MonitoringParams
|
|
128
108
|
}
|
|
129
109
|
|
|
@@ -141,6 +121,18 @@ export const sendEventToPosthog = ({
|
|
|
141
121
|
httpStatus = 200,
|
|
142
122
|
usage = {},
|
|
143
123
|
}: SendEventToPosthogParams): void => {
|
|
124
|
+
console.log('sendEventToPosthog', {
|
|
125
|
+
client,
|
|
126
|
+
distinctId,
|
|
127
|
+
traceId,
|
|
128
|
+
model,
|
|
129
|
+
provider,
|
|
130
|
+
input,
|
|
131
|
+
output,
|
|
132
|
+
latency,
|
|
133
|
+
baseURL,
|
|
134
|
+
params,
|
|
135
|
+
})
|
|
144
136
|
if (client.capture) {
|
|
145
137
|
client.capture({
|
|
146
138
|
distinctId: distinctId ?? traceId,
|
|
@@ -152,8 +144,8 @@ export const sendEventToPosthog = ({
|
|
|
152
144
|
$ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, input),
|
|
153
145
|
$ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, output),
|
|
154
146
|
$ai_http_status: httpStatus,
|
|
155
|
-
$ai_input_tokens: usage.
|
|
156
|
-
$ai_output_tokens: usage.
|
|
147
|
+
$ai_input_tokens: usage.inputTokens ?? 0,
|
|
148
|
+
$ai_output_tokens: usage.outputTokens ?? 0,
|
|
157
149
|
$ai_latency: latency,
|
|
158
150
|
$ai_trace_id: traceId,
|
|
159
151
|
$ai_base_url: baseURL,
|
package/src/vercel/middleware.ts
CHANGED
|
@@ -2,10 +2,11 @@ import { experimental_wrapLanguageModel as wrapLanguageModel } from 'ai'
|
|
|
2
2
|
import type {
|
|
3
3
|
LanguageModelV1,
|
|
4
4
|
Experimental_LanguageModelV1Middleware as LanguageModelV1Middleware,
|
|
5
|
+
LanguageModelV1Prompt,
|
|
5
6
|
LanguageModelV1StreamPart,
|
|
6
7
|
} from 'ai'
|
|
7
8
|
import { v4 as uuidv4 } from 'uuid'
|
|
8
|
-
import
|
|
9
|
+
import { PostHog } from 'posthog-node'
|
|
9
10
|
import { sendEventToPosthog } from '../utils'
|
|
10
11
|
|
|
11
12
|
interface CreateInstrumentationMiddlewareOptions {
|
|
@@ -13,7 +14,46 @@ interface CreateInstrumentationMiddlewareOptions {
|
|
|
13
14
|
posthogTraceId: string
|
|
14
15
|
posthogProperties?: Record<string, any>
|
|
15
16
|
posthogPrivacyMode?: boolean
|
|
16
|
-
posthogGroups?: string
|
|
17
|
+
posthogGroups?: Record<string, any>
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
interface PostHogInput {
|
|
21
|
+
content: string
|
|
22
|
+
role: string
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
const mapVercelParams = (params: any): Record<string, any> => {
|
|
26
|
+
return {
|
|
27
|
+
temperature: params.temperature,
|
|
28
|
+
max_tokens: params.maxTokens,
|
|
29
|
+
top_p: params.topP,
|
|
30
|
+
frequency_penalty: params.frequencyPenalty,
|
|
31
|
+
presence_penalty: params.presencePenalty,
|
|
32
|
+
stop: params.stopSequences,
|
|
33
|
+
stream: params.stream,
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
const mapVercelPrompt = (prompt: LanguageModelV1Prompt): PostHogInput[] => {
|
|
38
|
+
return prompt.map((p) => {
|
|
39
|
+
let content = ''
|
|
40
|
+
if (Array.isArray(p.content)) {
|
|
41
|
+
content = p.content
|
|
42
|
+
.map((c) => {
|
|
43
|
+
if (c.type === 'text') {
|
|
44
|
+
return c.text
|
|
45
|
+
}
|
|
46
|
+
return ''
|
|
47
|
+
})
|
|
48
|
+
.join('')
|
|
49
|
+
} else {
|
|
50
|
+
content = p.content
|
|
51
|
+
}
|
|
52
|
+
return {
|
|
53
|
+
role: p.role,
|
|
54
|
+
content,
|
|
55
|
+
}
|
|
56
|
+
})
|
|
17
57
|
}
|
|
18
58
|
|
|
19
59
|
export const createInstrumentationMiddleware = (
|
|
@@ -24,7 +64,10 @@ export const createInstrumentationMiddleware = (
|
|
|
24
64
|
const middleware: LanguageModelV1Middleware = {
|
|
25
65
|
wrapGenerate: async ({ doGenerate, params }) => {
|
|
26
66
|
const startTime = Date.now()
|
|
27
|
-
|
|
67
|
+
let mergedParams = {
|
|
68
|
+
...options,
|
|
69
|
+
...mapVercelParams(params),
|
|
70
|
+
}
|
|
28
71
|
try {
|
|
29
72
|
const result = await doGenerate()
|
|
30
73
|
const latency = (Date.now() - startTime) / 1000
|
|
@@ -35,15 +78,15 @@ export const createInstrumentationMiddleware = (
|
|
|
35
78
|
traceId: options.posthogTraceId,
|
|
36
79
|
model: model.modelId,
|
|
37
80
|
provider: 'vercel',
|
|
38
|
-
input: options.posthogPrivacyMode ? '' : params.prompt,
|
|
81
|
+
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
39
82
|
output: [{ content: result.text, role: 'assistant' }],
|
|
40
83
|
latency,
|
|
41
84
|
baseURL: '',
|
|
42
|
-
params:
|
|
85
|
+
params: mergedParams as any,
|
|
43
86
|
httpStatus: 200,
|
|
44
87
|
usage: {
|
|
45
|
-
|
|
46
|
-
|
|
88
|
+
inputTokens: result.usage.promptTokens,
|
|
89
|
+
outputTokens: result.usage.completionTokens,
|
|
47
90
|
},
|
|
48
91
|
})
|
|
49
92
|
|
|
@@ -55,15 +98,15 @@ export const createInstrumentationMiddleware = (
|
|
|
55
98
|
traceId: options.posthogTraceId,
|
|
56
99
|
model: model.modelId,
|
|
57
100
|
provider: 'vercel',
|
|
58
|
-
input: options.posthogPrivacyMode ? '' : params.prompt,
|
|
101
|
+
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
59
102
|
output: [],
|
|
60
103
|
latency: 0,
|
|
61
104
|
baseURL: '',
|
|
62
|
-
params:
|
|
105
|
+
params: mergedParams as any,
|
|
63
106
|
httpStatus: 500,
|
|
64
107
|
usage: {
|
|
65
|
-
|
|
66
|
-
|
|
108
|
+
inputTokens: 0,
|
|
109
|
+
outputTokens: 0,
|
|
67
110
|
},
|
|
68
111
|
})
|
|
69
112
|
throw error
|
|
@@ -73,8 +116,11 @@ export const createInstrumentationMiddleware = (
|
|
|
73
116
|
wrapStream: async ({ doStream, params }) => {
|
|
74
117
|
const startTime = Date.now()
|
|
75
118
|
let generatedText = ''
|
|
76
|
-
let usage: {
|
|
77
|
-
|
|
119
|
+
let usage: { inputTokens?: number; outputTokens?: number } = {}
|
|
120
|
+
let mergedParams = {
|
|
121
|
+
...options,
|
|
122
|
+
...mapVercelParams(params),
|
|
123
|
+
}
|
|
78
124
|
try {
|
|
79
125
|
const { stream, ...rest } = await doStream()
|
|
80
126
|
|
|
@@ -85,8 +131,8 @@ export const createInstrumentationMiddleware = (
|
|
|
85
131
|
}
|
|
86
132
|
if (chunk.type === 'finish') {
|
|
87
133
|
usage = {
|
|
88
|
-
|
|
89
|
-
|
|
134
|
+
inputTokens: chunk.usage?.promptTokens,
|
|
135
|
+
outputTokens: chunk.usage?.completionTokens,
|
|
90
136
|
}
|
|
91
137
|
}
|
|
92
138
|
controller.enqueue(chunk)
|
|
@@ -100,11 +146,11 @@ export const createInstrumentationMiddleware = (
|
|
|
100
146
|
traceId: options.posthogTraceId,
|
|
101
147
|
model: model.modelId,
|
|
102
148
|
provider: 'vercel',
|
|
103
|
-
input: options.posthogPrivacyMode ? '' : params.prompt,
|
|
149
|
+
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
104
150
|
output: [{ content: generatedText, role: 'assistant' }],
|
|
105
151
|
latency,
|
|
106
152
|
baseURL: '',
|
|
107
|
-
params:
|
|
153
|
+
params: mergedParams as any,
|
|
108
154
|
httpStatus: 200,
|
|
109
155
|
usage,
|
|
110
156
|
})
|
|
@@ -122,15 +168,15 @@ export const createInstrumentationMiddleware = (
|
|
|
122
168
|
traceId: options.posthogTraceId,
|
|
123
169
|
model: model.modelId,
|
|
124
170
|
provider: 'vercel',
|
|
125
|
-
input: options.posthogPrivacyMode ? '' : params.prompt,
|
|
171
|
+
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
126
172
|
output: [],
|
|
127
173
|
latency: 0,
|
|
128
174
|
baseURL: '',
|
|
129
|
-
params:
|
|
175
|
+
params: mergedParams as any,
|
|
130
176
|
httpStatus: 500,
|
|
131
177
|
usage: {
|
|
132
|
-
|
|
133
|
-
|
|
178
|
+
inputTokens: 0,
|
|
179
|
+
outputTokens: 0,
|
|
134
180
|
},
|
|
135
181
|
})
|
|
136
182
|
throw error
|