@posthog/ai 3.3.1 → 4.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/CHANGELOG.md +8 -0
  2. package/lib/anthropic/index.cjs.js +293 -0
  3. package/lib/anthropic/index.cjs.js.map +1 -0
  4. package/lib/anthropic/index.d.ts +45 -0
  5. package/lib/anthropic/index.esm.js +282 -0
  6. package/lib/anthropic/index.esm.js.map +1 -0
  7. package/lib/index.cjs.js +36 -8
  8. package/lib/index.cjs.js.map +1 -1
  9. package/lib/index.d.ts +2 -2
  10. package/lib/index.esm.js +36 -8
  11. package/lib/index.esm.js.map +1 -1
  12. package/lib/langchain/index.cjs.js +1003 -0
  13. package/lib/langchain/index.cjs.js.map +1 -0
  14. package/lib/langchain/index.d.ts +68 -0
  15. package/lib/langchain/index.esm.js +979 -0
  16. package/lib/langchain/index.esm.js.map +1 -0
  17. package/lib/openai/index.cjs.js +286 -0
  18. package/lib/openai/index.cjs.js.map +1 -0
  19. package/lib/openai/index.d.ts +49 -0
  20. package/lib/openai/index.esm.js +274 -0
  21. package/lib/openai/index.esm.js.map +1 -0
  22. package/lib/posthog-ai/src/anthropic/index.d.ts +1 -0
  23. package/lib/posthog-ai/src/langchain/index.d.ts +1 -0
  24. package/lib/posthog-ai/src/openai/index.d.ts +3 -2
  25. package/lib/posthog-ai/src/vercel/index.d.ts +1 -0
  26. package/lib/vercel/index.cjs.js +408 -0
  27. package/lib/vercel/index.cjs.js.map +1 -0
  28. package/lib/vercel/index.d.ts +21 -0
  29. package/lib/vercel/index.esm.js +404 -0
  30. package/lib/vercel/index.esm.js.map +1 -0
  31. package/package.json +28 -1
  32. package/src/anthropic/index.ts +2 -0
  33. package/src/langchain/callbacks.ts +39 -7
  34. package/src/langchain/index.ts +1 -0
  35. package/src/openai/index.ts +4 -2
  36. package/src/utils.ts +1 -1
  37. package/src/vercel/index.ts +1 -0
  38. package/src/vercel/middleware.ts +4 -4
  39. package/tsconfig.json +1 -0
@@ -0,0 +1,404 @@
1
+ import { experimental_wrapLanguageModel } from 'ai';
2
+ import { v4 } from 'uuid';
3
+
4
+ const getModelParams = params => {
5
+ if (!params) {
6
+ return {};
7
+ }
8
+ const modelParams = {};
9
+ const paramKeys = ['temperature', 'max_tokens', 'max_completion_tokens', 'top_p', 'frequency_penalty', 'presence_penalty', 'n', 'stop', 'stream', 'streaming'];
10
+ for (const key of paramKeys) {
11
+ if (key in params && params[key] !== undefined) {
12
+ modelParams[key] = params[key];
13
+ }
14
+ }
15
+ return modelParams;
16
+ };
17
+ const withPrivacyMode = (client, privacyMode, input) => {
18
+ return client.privacy_mode || privacyMode ? null : input;
19
+ };
20
+ const sendEventToPosthog = ({
21
+ client,
22
+ distinctId,
23
+ traceId,
24
+ model,
25
+ provider,
26
+ input,
27
+ output,
28
+ latency,
29
+ baseURL,
30
+ params,
31
+ httpStatus = 200,
32
+ usage = {},
33
+ isError = false,
34
+ error,
35
+ tools
36
+ }) => {
37
+ if (client.capture) {
38
+ let errorData = {};
39
+ if (isError) {
40
+ errorData = {
41
+ $ai_is_error: true,
42
+ $ai_error: error
43
+ };
44
+ }
45
+ let costOverrideData = {};
46
+ if (params.posthogCostOverride) {
47
+ const inputCostUSD = (params.posthogCostOverride.inputCost ?? 0) * (usage.inputTokens ?? 0);
48
+ const outputCostUSD = (params.posthogCostOverride.outputCost ?? 0) * (usage.outputTokens ?? 0);
49
+ costOverrideData = {
50
+ $ai_input_cost_usd: inputCostUSD,
51
+ $ai_output_cost_usd: outputCostUSD,
52
+ $ai_total_cost_usd: inputCostUSD + outputCostUSD
53
+ };
54
+ }
55
+ const additionalTokenValues = {
56
+ ...(usage.reasoningTokens ? {
57
+ $ai_reasoning_tokens: usage.reasoningTokens
58
+ } : {}),
59
+ ...(usage.cacheReadInputTokens ? {
60
+ $ai_cache_read_input_tokens: usage.cacheReadInputTokens
61
+ } : {}),
62
+ ...(usage.cacheCreationInputTokens ? {
63
+ $ai_cache_creation_input_tokens: usage.cacheCreationInputTokens
64
+ } : {})
65
+ };
66
+ client.capture({
67
+ distinctId: distinctId ?? traceId,
68
+ event: '$ai_generation',
69
+ properties: {
70
+ $ai_provider: params.posthogProviderOverride ?? provider,
71
+ $ai_model: params.posthogModelOverride ?? model,
72
+ $ai_model_parameters: getModelParams(params),
73
+ $ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, input),
74
+ $ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, output),
75
+ $ai_http_status: httpStatus,
76
+ $ai_input_tokens: usage.inputTokens ?? 0,
77
+ $ai_output_tokens: usage.outputTokens ?? 0,
78
+ ...additionalTokenValues,
79
+ $ai_latency: latency,
80
+ $ai_trace_id: traceId,
81
+ $ai_base_url: baseURL,
82
+ ...params.posthogProperties,
83
+ ...(distinctId ? {} : {
84
+ $process_person_profile: false
85
+ }),
86
+ ...(tools ? {
87
+ $ai_tools: tools
88
+ } : {}),
89
+ ...errorData,
90
+ ...costOverrideData
91
+ },
92
+ groups: params.posthogGroups
93
+ });
94
+ }
95
+ };
96
+
97
+ const mapVercelParams = params => {
98
+ return {
99
+ temperature: params.temperature,
100
+ max_tokens: params.maxTokens,
101
+ top_p: params.topP,
102
+ frequency_penalty: params.frequencyPenalty,
103
+ presence_penalty: params.presencePenalty,
104
+ stop: params.stopSequences,
105
+ stream: params.stream
106
+ };
107
+ };
108
+ const mapVercelPrompt = prompt => {
109
+ return prompt.map(p => {
110
+ let content = {};
111
+ if (Array.isArray(p.content)) {
112
+ content = p.content.map(c => {
113
+ if (c.type === 'text') {
114
+ return {
115
+ type: 'text',
116
+ content: c.text
117
+ };
118
+ } else if (c.type === 'image') {
119
+ return {
120
+ type: 'image',
121
+ content: {
122
+ // if image is a url use it, or use "none supported"
123
+ image: c.image instanceof URL ? c.image.toString() : 'raw images not supported',
124
+ mimeType: c.mimeType
125
+ }
126
+ };
127
+ } else if (c.type === 'file') {
128
+ return {
129
+ type: 'file',
130
+ content: {
131
+ file: c.data instanceof URL ? c.data.toString() : 'raw files not supported',
132
+ mimeType: c.mimeType
133
+ }
134
+ };
135
+ } else if (c.type === 'tool-call') {
136
+ return {
137
+ type: 'tool-call',
138
+ content: {
139
+ toolCallId: c.toolCallId,
140
+ toolName: c.toolName,
141
+ args: c.args
142
+ }
143
+ };
144
+ } else if (c.type === 'tool-result') {
145
+ return {
146
+ type: 'tool-result',
147
+ content: {
148
+ toolCallId: c.toolCallId,
149
+ toolName: c.toolName,
150
+ result: c.result,
151
+ isError: c.isError
152
+ }
153
+ };
154
+ }
155
+ return {
156
+ content: ''
157
+ };
158
+ });
159
+ } else {
160
+ content = {
161
+ type: 'text',
162
+ text: p.content
163
+ };
164
+ }
165
+ return {
166
+ role: p.role,
167
+ content
168
+ };
169
+ });
170
+ };
171
+ const mapVercelOutput = result => {
172
+ const output = {
173
+ ...(result.text ? {
174
+ text: result.text
175
+ } : {}),
176
+ ...(result.object ? {
177
+ object: result.object
178
+ } : {}),
179
+ ...(result.reasoning ? {
180
+ reasoning: result.reasoning
181
+ } : {}),
182
+ ...(result.response ? {
183
+ response: result.response
184
+ } : {}),
185
+ ...(result.finishReason ? {
186
+ finishReason: result.finishReason
187
+ } : {}),
188
+ ...(result.usage ? {
189
+ usage: result.usage
190
+ } : {}),
191
+ ...(result.warnings ? {
192
+ warnings: result.warnings
193
+ } : {}),
194
+ ...(result.providerMetadata ? {
195
+ toolCalls: result.providerMetadata
196
+ } : {})
197
+ };
198
+ // if text and no object or reasoning, return text
199
+ if (output.text && !output.object && !output.reasoning) {
200
+ return [{
201
+ content: output.text,
202
+ role: 'assistant'
203
+ }];
204
+ }
205
+ return [{
206
+ content: JSON.stringify(output),
207
+ role: 'assistant'
208
+ }];
209
+ };
210
+ const extractProvider = model => {
211
+ // vercel provider is in the format of provider.endpoint
212
+ const provider = model.provider.toLowerCase();
213
+ const providerName = provider.split('.')[0];
214
+ return providerName;
215
+ };
216
+ const createInstrumentationMiddleware = (phClient, model, options) => {
217
+ const middleware = {
218
+ wrapGenerate: async ({
219
+ doGenerate,
220
+ params
221
+ }) => {
222
+ const startTime = Date.now();
223
+ const mergedParams = {
224
+ ...options,
225
+ ...mapVercelParams(params)
226
+ };
227
+ try {
228
+ const result = await doGenerate();
229
+ const latency = (Date.now() - startTime) / 1000;
230
+ const modelId = options.posthogModelOverride ?? (result.response?.modelId ? result.response.modelId : model.modelId);
231
+ const provider = options.posthogProviderOverride ?? extractProvider(model);
232
+ const baseURL = ''; // cannot currently get baseURL from vercel
233
+ const content = mapVercelOutput(result);
234
+ // let tools = result.toolCalls
235
+ const providerMetadata = result.providerMetadata;
236
+ const additionalTokenValues = {
237
+ ...(providerMetadata?.openai?.reasoningTokens ? {
238
+ reasoningTokens: providerMetadata.openai.reasoningTokens
239
+ } : {}),
240
+ ...(providerMetadata?.openai?.cachedPromptTokens ? {
241
+ cacheReadInputTokens: providerMetadata.openai.cachedPromptTokens
242
+ } : {}),
243
+ ...(providerMetadata?.anthropic ? {
244
+ cacheReadInputTokens: providerMetadata.anthropic.cacheReadInputTokens,
245
+ cacheCreationInputTokens: providerMetadata.anthropic.cacheCreationInputTokens
246
+ } : {})
247
+ };
248
+ sendEventToPosthog({
249
+ client: phClient,
250
+ distinctId: options.posthogDistinctId,
251
+ traceId: options.posthogTraceId,
252
+ model: modelId,
253
+ provider: provider,
254
+ input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
255
+ output: [{
256
+ content,
257
+ role: 'assistant'
258
+ }],
259
+ latency,
260
+ baseURL,
261
+ params: mergedParams,
262
+ httpStatus: 200,
263
+ usage: {
264
+ inputTokens: result.usage.promptTokens,
265
+ outputTokens: result.usage.completionTokens,
266
+ ...additionalTokenValues
267
+ }
268
+ });
269
+ return result;
270
+ } catch (error) {
271
+ const modelId = model.modelId;
272
+ sendEventToPosthog({
273
+ client: phClient,
274
+ distinctId: options.posthogDistinctId,
275
+ traceId: options.posthogTraceId,
276
+ model: modelId,
277
+ provider: model.provider,
278
+ input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
279
+ output: [],
280
+ latency: 0,
281
+ baseURL: '',
282
+ params: mergedParams,
283
+ httpStatus: error?.status ? error.status : 500,
284
+ usage: {
285
+ inputTokens: 0,
286
+ outputTokens: 0
287
+ },
288
+ isError: true,
289
+ error: JSON.stringify(error)
290
+ });
291
+ throw error;
292
+ }
293
+ },
294
+ wrapStream: async ({
295
+ doStream,
296
+ params
297
+ }) => {
298
+ const startTime = Date.now();
299
+ let generatedText = '';
300
+ let usage = {};
301
+ const mergedParams = {
302
+ ...options,
303
+ ...mapVercelParams(params)
304
+ };
305
+ const modelId = options.posthogModelOverride ?? model.modelId;
306
+ const provider = options.posthogProviderOverride ?? extractProvider(model);
307
+ const baseURL = ''; // cannot currently get baseURL from vercel
308
+ try {
309
+ const {
310
+ stream,
311
+ ...rest
312
+ } = await doStream();
313
+ const transformStream = new TransformStream({
314
+ transform(chunk, controller) {
315
+ if (chunk.type === 'text-delta') {
316
+ generatedText += chunk.textDelta;
317
+ }
318
+ if (chunk.type === 'finish') {
319
+ usage = {
320
+ inputTokens: chunk.usage?.promptTokens,
321
+ outputTokens: chunk.usage?.completionTokens
322
+ };
323
+ if (chunk.providerMetadata?.openai?.reasoningTokens) {
324
+ usage.reasoningTokens = chunk.providerMetadata.openai.reasoningTokens;
325
+ }
326
+ if (chunk.providerMetadata?.openai?.cachedPromptTokens) {
327
+ usage.cacheReadInputTokens = chunk.providerMetadata.openai.cachedPromptTokens;
328
+ }
329
+ if (chunk.providerMetadata?.anthropic?.cacheReadInputTokens) {
330
+ usage.cacheReadInputTokens = chunk.providerMetadata.anthropic.cacheReadInputTokens;
331
+ }
332
+ if (chunk.providerMetadata?.anthropic?.cacheCreationInputTokens) {
333
+ usage.cacheCreationInputTokens = chunk.providerMetadata.anthropic.cacheCreationInputTokens;
334
+ }
335
+ }
336
+ controller.enqueue(chunk);
337
+ },
338
+ flush() {
339
+ const latency = (Date.now() - startTime) / 1000;
340
+ sendEventToPosthog({
341
+ client: phClient,
342
+ distinctId: options.posthogDistinctId,
343
+ traceId: options.posthogTraceId,
344
+ model: modelId,
345
+ provider: provider,
346
+ input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
347
+ output: [{
348
+ content: generatedText,
349
+ role: 'assistant'
350
+ }],
351
+ latency,
352
+ baseURL,
353
+ params: mergedParams,
354
+ httpStatus: 200,
355
+ usage
356
+ });
357
+ }
358
+ });
359
+ return {
360
+ stream: stream.pipeThrough(transformStream),
361
+ ...rest
362
+ };
363
+ } catch (error) {
364
+ sendEventToPosthog({
365
+ client: phClient,
366
+ distinctId: options.posthogDistinctId,
367
+ traceId: options.posthogTraceId,
368
+ model: modelId,
369
+ provider: provider,
370
+ input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
371
+ output: [],
372
+ latency: 0,
373
+ baseURL: '',
374
+ params: mergedParams,
375
+ httpStatus: error?.status ? error.status : 500,
376
+ usage: {
377
+ inputTokens: 0,
378
+ outputTokens: 0
379
+ },
380
+ isError: true,
381
+ error: JSON.stringify(error)
382
+ });
383
+ throw error;
384
+ }
385
+ }
386
+ };
387
+ return middleware;
388
+ };
389
+ const wrapVercelLanguageModel = (model, phClient, options) => {
390
+ const traceId = options.posthogTraceId ?? v4();
391
+ const middleware = createInstrumentationMiddleware(phClient, model, {
392
+ ...options,
393
+ posthogTraceId: traceId,
394
+ posthogDistinctId: options.posthogDistinctId ?? traceId
395
+ });
396
+ const wrappedModel = experimental_wrapLanguageModel({
397
+ model,
398
+ middleware
399
+ });
400
+ return wrappedModel;
401
+ };
402
+
403
+ export { wrapVercelLanguageModel as withTracing };
404
+ //# sourceMappingURL=index.esm.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.esm.js","sources":["../../src/utils.ts","../../src/vercel/middleware.ts"],"sourcesContent":["import { PostHog } from 'posthog-node'\nimport OpenAIOrignal from 'openai'\nimport AnthropicOriginal from '@anthropic-ai/sdk'\n\ntype ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams\ntype MessageCreateParams = AnthropicOriginal.Messages.MessageCreateParams\n\nexport interface MonitoringParams {\n posthogDistinctId?: string\n posthogTraceId?: string\n posthogProperties?: Record<string, any>\n posthogPrivacyMode?: boolean\n posthogGroups?: Record<string, any>\n posthogModelOverride?: string\n posthogProviderOverride?: string\n posthogCostOverride?: CostOverride\n}\n\nexport interface CostOverride {\n inputCost: number\n outputCost: number\n}\n\nexport const getModelParams = (\n params: ((ChatCompletionCreateParamsBase | MessageCreateParams) & MonitoringParams) | null\n): Record<string, any> => {\n if (!params) {\n return {}\n }\n const modelParams: Record<string, any> = {}\n const paramKeys = [\n 'temperature',\n 'max_tokens',\n 'max_completion_tokens',\n 'top_p',\n 'frequency_penalty',\n 'presence_penalty',\n 'n',\n 'stop',\n 'stream',\n 'streaming',\n ] as const\n\n for (const key of paramKeys) {\n if (key in params && (params as any)[key] !== undefined) {\n modelParams[key] = (params as any)[key]\n }\n }\n return modelParams\n}\n\n/**\n * Helper to format responses (non-streaming) for consumption, mirroring Python's openai vs. anthropic approach.\n */\nexport const formatResponse = (response: any, provider: string): Array<{ role: string; content: string }> => {\n if (!response) {\n return []\n }\n if (provider === 'anthropic') {\n return formatResponseAnthropic(response)\n } else if (provider === 'openai') {\n return formatResponseOpenAI(response)\n }\n return []\n}\n\nexport const formatResponseAnthropic = (response: any): Array<{ role: string; content: string }> => {\n // Example approach if \"response.content\" holds array of text segments, etc.\n const output: Array<{ role: string; content: string }> = []\n for (const choice of response.content ?? []) {\n if (choice?.text) {\n output.push({\n role: 'assistant',\n content: choice.text,\n })\n }\n }\n return output\n}\n\nexport const formatResponseOpenAI = (response: any): Array<{ role: string; content: string }> => {\n const output: Array<{ role: string; content: string }> = []\n for (const choice of response.choices ?? []) {\n if (choice.message?.content) {\n output.push({\n role: choice.message.role,\n content: choice.message.content,\n })\n }\n }\n return output\n}\n\nexport const mergeSystemPrompt = (params: MessageCreateParams & MonitoringParams, provider: string): any => {\n if (provider == 'anthropic') {\n const messages = params.messages || []\n if (!(params as any).system) {\n return messages\n }\n const systemMessage = (params as any).system\n return [{ role: 'system', content: systemMessage }, ...messages]\n }\n return params.messages\n}\n\nexport const withPrivacyMode = (client: PostHog, privacyMode: boolean, input: any): any => {\n return (client as any).privacy_mode || privacyMode ? null : input\n}\n\nexport type SendEventToPosthogParams = {\n client: PostHog\n distinctId?: string\n traceId: string\n model: string\n provider: string\n input: any\n output: any\n latency: number\n baseURL: string\n httpStatus: number\n usage?: {\n inputTokens?: number\n outputTokens?: number\n reasoningTokens?: any\n cacheReadInputTokens?: any\n cacheCreationInputTokens?: any\n }\n params: (ChatCompletionCreateParamsBase | MessageCreateParams) & MonitoringParams\n isError?: boolean\n error?: string\n tools?: any\n}\n\nexport const sendEventToPosthog = ({\n client,\n distinctId,\n traceId,\n model,\n provider,\n input,\n output,\n latency,\n baseURL,\n params,\n httpStatus = 200,\n usage = {},\n isError = false,\n error,\n tools,\n}: SendEventToPosthogParams): void => {\n if (client.capture) {\n let errorData = {}\n if (isError) {\n errorData = {\n $ai_is_error: true,\n $ai_error: error,\n }\n }\n let costOverrideData = {}\n if (params.posthogCostOverride) {\n const inputCostUSD = (params.posthogCostOverride.inputCost ?? 0) * (usage.inputTokens ?? 0)\n const outputCostUSD = (params.posthogCostOverride.outputCost ?? 0) * (usage.outputTokens ?? 0)\n costOverrideData = {\n $ai_input_cost_usd: inputCostUSD,\n $ai_output_cost_usd: outputCostUSD,\n $ai_total_cost_usd: inputCostUSD + outputCostUSD,\n }\n }\n\n const additionalTokenValues = {\n ...(usage.reasoningTokens ? { $ai_reasoning_tokens: usage.reasoningTokens } : {}),\n ...(usage.cacheReadInputTokens ? { $ai_cache_read_input_tokens: usage.cacheReadInputTokens } : {}),\n ...(usage.cacheCreationInputTokens ? { $ai_cache_creation_input_tokens: usage.cacheCreationInputTokens } : {}),\n }\n\n client.capture({\n distinctId: distinctId ?? traceId,\n event: '$ai_generation',\n properties: {\n $ai_provider: params.posthogProviderOverride ?? provider,\n $ai_model: params.posthogModelOverride ?? model,\n $ai_model_parameters: getModelParams(params),\n $ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, input),\n $ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, output),\n $ai_http_status: httpStatus,\n $ai_input_tokens: usage.inputTokens ?? 0,\n $ai_output_tokens: usage.outputTokens ?? 0,\n ...additionalTokenValues,\n $ai_latency: latency,\n $ai_trace_id: traceId,\n $ai_base_url: baseURL,\n ...params.posthogProperties,\n ...(distinctId ? {} : { $process_person_profile: false }),\n ...(tools ? { $ai_tools: tools } : {}),\n ...errorData,\n ...costOverrideData,\n },\n groups: params.posthogGroups,\n })\n }\n}\n","import { experimental_wrapLanguageModel as wrapLanguageModel } from 'ai'\nimport type { LanguageModelV1, LanguageModelV1Middleware, LanguageModelV1Prompt, LanguageModelV1StreamPart } from 'ai'\nimport { v4 as uuidv4 } from 'uuid'\nimport { PostHog } from 'posthog-node'\nimport { CostOverride, sendEventToPosthog } from '../utils'\n\ninterface ClientOptions {\n posthogDistinctId?: string\n posthogTraceId?: string\n posthogProperties?: Record<string, any>\n posthogPrivacyMode?: boolean\n posthogGroups?: Record<string, any>\n posthogModelOverride?: string\n posthogProviderOverride?: string\n posthogCostOverride?: CostOverride\n}\n\ninterface CreateInstrumentationMiddlewareOptions {\n posthogDistinctId: string\n posthogTraceId: string\n posthogProperties?: Record<string, any>\n posthogPrivacyMode?: boolean\n posthogGroups?: Record<string, any>\n posthogModelOverride?: string\n posthogProviderOverride?: string\n posthogCostOverride?: CostOverride\n}\n\ninterface PostHogInput {\n role: string\n type?: string\n content?:\n | string\n | {\n [key: string]: any\n }\n}\n\nconst mapVercelParams = (params: any): Record<string, any> => {\n return {\n temperature: params.temperature,\n max_tokens: params.maxTokens,\n top_p: params.topP,\n frequency_penalty: params.frequencyPenalty,\n presence_penalty: params.presencePenalty,\n stop: params.stopSequences,\n stream: params.stream,\n }\n}\n\nconst mapVercelPrompt = (prompt: LanguageModelV1Prompt): PostHogInput[] => {\n return prompt.map((p) => {\n let content = {}\n if (Array.isArray(p.content)) {\n content = p.content.map((c) => {\n if (c.type === 'text') {\n return {\n type: 'text',\n content: c.text,\n }\n } else if (c.type === 'image') {\n return {\n type: 'image',\n content: {\n // if image is a url use it, or use \"none supported\"\n image: c.image instanceof URL ? c.image.toString() : 'raw images not supported',\n mimeType: c.mimeType,\n },\n }\n } else if (c.type === 'file') {\n return {\n type: 'file',\n content: {\n file: c.data instanceof URL ? c.data.toString() : 'raw files not supported',\n mimeType: c.mimeType,\n },\n }\n } else if (c.type === 'tool-call') {\n return {\n type: 'tool-call',\n content: {\n toolCallId: c.toolCallId,\n toolName: c.toolName,\n args: c.args,\n },\n }\n } else if (c.type === 'tool-result') {\n return {\n type: 'tool-result',\n content: {\n toolCallId: c.toolCallId,\n toolName: c.toolName,\n result: c.result,\n isError: c.isError,\n },\n }\n }\n return {\n content: '',\n }\n })\n } else {\n content = {\n type: 'text',\n text: p.content,\n }\n }\n return {\n role: p.role,\n content,\n }\n })\n}\n\nconst mapVercelOutput = (result: any): PostHogInput[] => {\n const output = {\n ...(result.text ? { text: result.text } : {}),\n ...(result.object ? { object: result.object } : {}),\n ...(result.reasoning ? { reasoning: result.reasoning } : {}),\n ...(result.response ? { response: result.response } : {}),\n ...(result.finishReason ? { finishReason: result.finishReason } : {}),\n ...(result.usage ? { usage: result.usage } : {}),\n ...(result.warnings ? { warnings: result.warnings } : {}),\n ...(result.providerMetadata ? { toolCalls: result.providerMetadata } : {}),\n }\n // if text and no object or reasoning, return text\n if (output.text && !output.object && !output.reasoning) {\n return [{ content: output.text, role: 'assistant' }]\n }\n return [{ content: JSON.stringify(output), role: 'assistant' }]\n}\n\nconst extractProvider = (model: LanguageModelV1): string => {\n // vercel provider is in the format of provider.endpoint\n const provider = model.provider.toLowerCase()\n const providerName = provider.split('.')[0]\n return providerName\n}\n\nexport const createInstrumentationMiddleware = (\n phClient: PostHog,\n model: LanguageModelV1,\n options: CreateInstrumentationMiddlewareOptions\n): LanguageModelV1Middleware => {\n const middleware: LanguageModelV1Middleware = {\n wrapGenerate: async ({ doGenerate, params }) => {\n const startTime = Date.now()\n const mergedParams = {\n ...options,\n ...mapVercelParams(params),\n }\n try {\n const result = await doGenerate()\n const latency = (Date.now() - startTime) / 1000\n const modelId =\n options.posthogModelOverride ?? (result.response?.modelId ? result.response.modelId : model.modelId)\n const provider = options.posthogProviderOverride ?? extractProvider(model)\n const baseURL = '' // cannot currently get baseURL from vercel\n const content = mapVercelOutput(result)\n // let tools = result.toolCalls\n const providerMetadata = result.providerMetadata\n const additionalTokenValues = {\n ...(providerMetadata?.openai?.reasoningTokens\n ? { reasoningTokens: providerMetadata.openai.reasoningTokens }\n : {}),\n ...(providerMetadata?.openai?.cachedPromptTokens\n ? { cacheReadInputTokens: providerMetadata.openai.cachedPromptTokens }\n : {}),\n ...(providerMetadata?.anthropic\n ? {\n cacheReadInputTokens: providerMetadata.anthropic.cacheReadInputTokens,\n cacheCreationInputTokens: providerMetadata.anthropic.cacheCreationInputTokens,\n }\n : {}),\n }\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: modelId,\n provider: provider,\n input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),\n output: [{ content, role: 'assistant' }],\n latency,\n baseURL,\n params: mergedParams as any,\n httpStatus: 200,\n usage: {\n inputTokens: result.usage.promptTokens,\n outputTokens: result.usage.completionTokens,\n ...additionalTokenValues,\n },\n })\n\n return result\n } catch (error: any) {\n const modelId = model.modelId\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: modelId,\n provider: model.provider,\n input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),\n output: [],\n latency: 0,\n baseURL: '',\n params: mergedParams as any,\n httpStatus: error?.status ? error.status : 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n isError: true,\n error: JSON.stringify(error),\n })\n throw error\n }\n },\n\n wrapStream: async ({ doStream, params }) => {\n const startTime = Date.now()\n let generatedText = ''\n let usage: {\n inputTokens?: number\n outputTokens?: number\n reasoningTokens?: any\n cacheReadInputTokens?: any\n cacheCreationInputTokens?: any\n } = {}\n const mergedParams = {\n ...options,\n ...mapVercelParams(params),\n }\n\n const modelId = options.posthogModelOverride ?? model.modelId\n const provider = options.posthogProviderOverride ?? extractProvider(model)\n const baseURL = '' // cannot currently get baseURL from vercel\n try {\n const { stream, ...rest } = await doStream()\n const transformStream = new TransformStream<LanguageModelV1StreamPart, LanguageModelV1StreamPart>({\n transform(chunk, controller) {\n if (chunk.type === 'text-delta') {\n generatedText += chunk.textDelta\n }\n if (chunk.type === 'finish') {\n usage = {\n inputTokens: chunk.usage?.promptTokens,\n outputTokens: chunk.usage?.completionTokens,\n }\n if (chunk.providerMetadata?.openai?.reasoningTokens) {\n usage.reasoningTokens = chunk.providerMetadata.openai.reasoningTokens\n }\n if (chunk.providerMetadata?.openai?.cachedPromptTokens) {\n usage.cacheReadInputTokens = chunk.providerMetadata.openai.cachedPromptTokens\n }\n if (chunk.providerMetadata?.anthropic?.cacheReadInputTokens) {\n usage.cacheReadInputTokens = chunk.providerMetadata.anthropic.cacheReadInputTokens\n }\n if (chunk.providerMetadata?.anthropic?.cacheCreationInputTokens) {\n usage.cacheCreationInputTokens = chunk.providerMetadata.anthropic.cacheCreationInputTokens\n }\n }\n controller.enqueue(chunk)\n },\n\n flush() {\n const latency = (Date.now() - startTime) / 1000\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: modelId,\n provider: provider,\n input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),\n output: [{ content: generatedText, role: 'assistant' }],\n latency,\n baseURL,\n params: mergedParams as any,\n httpStatus: 200,\n usage,\n })\n },\n })\n\n return {\n stream: stream.pipeThrough(transformStream),\n ...rest,\n }\n } catch (error: any) {\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: modelId,\n provider: provider,\n input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),\n output: [],\n latency: 0,\n baseURL: '',\n params: mergedParams as any,\n httpStatus: error?.status ? error.status : 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n isError: true,\n error: JSON.stringify(error),\n })\n throw error\n }\n },\n }\n\n return middleware\n}\n\nexport const wrapVercelLanguageModel = (\n model: LanguageModelV1,\n phClient: PostHog,\n options: ClientOptions\n): LanguageModelV1 => {\n const traceId = options.posthogTraceId ?? uuidv4()\n const middleware = createInstrumentationMiddleware(phClient, model, {\n ...options,\n posthogTraceId: traceId,\n posthogDistinctId: options.posthogDistinctId ?? traceId,\n })\n\n const wrappedModel = wrapLanguageModel({\n model,\n middleware,\n })\n\n return wrappedModel\n}\n"],"names":["getModelParams","params","modelParams","paramKeys","key","undefined","withPrivacyMode","client","privacyMode","input","privacy_mode","sendEventToPosthog","distinctId","traceId","model","provider","output","latency","baseURL","httpStatus","usage","isError","error","tools","capture","errorData","$ai_is_error","$ai_error","costOverrideData","posthogCostOverride","inputCostUSD","inputCost","inputTokens","outputCostUSD","outputCost","outputTokens","$ai_input_cost_usd","$ai_output_cost_usd","$ai_total_cost_usd","additionalTokenValues","reasoningTokens","$ai_reasoning_tokens","cacheReadInputTokens","$ai_cache_read_input_tokens","cacheCreationInputTokens","$ai_cache_creation_input_tokens","event","properties","$ai_provider","posthogProviderOverride","$ai_model","posthogModelOverride","$ai_model_parameters","$ai_input","posthogPrivacyMode","$ai_output_choices","$ai_http_status","$ai_input_tokens","$ai_output_tokens","$ai_latency","$ai_trace_id","$ai_base_url","posthogProperties","$process_person_profile","$ai_tools","groups","posthogGroups","mapVercelParams","temperature","max_tokens","maxTokens","top_p","topP","frequency_penalty","frequencyPenalty","presence_penalty","presencePenalty","stop","stopSequences","stream","mapVercelPrompt","prompt","map","p","content","Array","isArray","c","type","text","image","URL","toString","mimeType","file","data","toolCallId","toolName","args","result","role","mapVercelOutput","object","reasoning","response","finishReason","warnings","providerMetadata","toolCalls","JSON","stringify","extractProvider","toLowerCase","providerName","split","createInstrumentationMiddleware","phClient","options","middleware","wrapGenerate","doGenerate","startTime","Date","now","mergedParams","modelId","openai","cachedPromptTokens","anthropic","posthogDistinctId","posthogTraceId","promptTokens","completionTokens","status","wrapStream","doStream","generatedText","rest","transformStream","TransformStream","transform","chunk","controller","textDelta","enqueue","flush","pipeThrough","wrapVercelLanguageModel","uuidv4","wrappedModel","wrapLanguageModel"],"mappings":";;;AAuBO,MAAMA,cAAc,GACzBC,MAA0F,IAClE;EACxB,IAAI,CAACA,MAAM,EAAE;AACX,IAAA,OAAO,EAAE,CAAA;AACX,GAAA;EACA,MAAMC,WAAgC,GAAG,EAAE,CAAA;EAC3C,MAAMC,SAAS,GAAG,CAChB,aAAa,EACb,YAAY,EACZ,uBAAuB,EACvB,OAAO,EACP,mBAAmB,EACnB,kBAAkB,EAClB,GAAG,EACH,MAAM,EACN,QAAQ,EACR,WAAW,CACH,CAAA;AAEV,EAAA,KAAK,MAAMC,GAAG,IAAID,SAAS,EAAE;IAC3B,IAAIC,GAAG,IAAIH,MAAM,IAAKA,MAAM,CAASG,GAAG,CAAC,KAAKC,SAAS,EAAE;AACvDH,MAAAA,WAAW,CAACE,GAAG,CAAC,GAAIH,MAAM,CAASG,GAAG,CAAC,CAAA;AACzC,KAAA;AACF,GAAA;AACA,EAAA,OAAOF,WAAW,CAAA;AACpB,CAAC,CAAA;AAwDM,MAAMI,eAAe,GAAGA,CAACC,MAAe,EAAEC,WAAoB,EAAEC,KAAU,KAAU;EACzF,OAAQF,MAAM,CAASG,YAAY,IAAIF,WAAW,GAAG,IAAI,GAAGC,KAAK,CAAA;AACnE,CAAC,CAAA;AA0BM,MAAME,kBAAkB,GAAGA,CAAC;EACjCJ,MAAM;EACNK,UAAU;EACVC,OAAO;EACPC,KAAK;EACLC,QAAQ;EACRN,KAAK;EACLO,MAAM;EACNC,OAAO;EACPC,OAAO;EACPjB,MAAM;AACNkB,EAAAA,UAAU,GAAG,GAAG;EAChBC,KAAK,GAAG,EAAE;AACVC,EAAAA,OAAO,GAAG,KAAK;EACfC,KAAK;AACLC,EAAAA,KAAAA;AACwB,CAAC,KAAW;EACpC,IAAIhB,MAAM,CAACiB,OAAO,EAAE;IAClB,IAAIC,SAAS,GAAG,EAAE,CAAA;AAClB,IAAA,IAAIJ,OAAO,EAAE;AACXI,MAAAA,SAAS,GAAG;AACVC,QAAAA,YAAY,EAAE,IAAI;AAClBC,QAAAA,SAAS,EAAEL,KAAAA;OACZ,CAAA;AACH,KAAA;IACA,IAAIM,gBAAgB,GAAG,EAAE,CAAA;IACzB,IAAI3B,MAAM,CAAC4B,mBAAmB,EAAE;AAC9B,MAAA,MAAMC,YAAY,GAAG,CAAC7B,MAAM,CAAC4B,mBAAmB,CAACE,SAAS,IAAI,CAAC,KAAKX,KAAK,CAACY,WAAW,IAAI,CAAC,CAAC,CAAA;AAC3F,MAAA,MAAMC,aAAa,GAAG,CAAChC,MAAM,CAAC4B,mBAAmB,CAACK,UAAU,IAAI,CAAC,KAAKd,KAAK,CAACe,YAAY,IAAI,CAAC,CAAC,CAAA;AAC9FP,MAAAA,gBAAgB,GAAG;AACjBQ,QAAAA,kBAAkB,EAAEN,YAAY;AAChCO,QAAAA,mBAAmB,EAAEJ,aAAa;QAClCK,kBAAkB,EAAER,YAAY,GAAGG,aAAAA;OACpC,CAAA;AACH,KAAA;AAEA,IAAA,MAAMM,qBAAqB,GAAG;MAC5B,IAAInB,KAAK,CAACoB,eAAe,GAAG;QAAEC,oBAAoB,EAAErB,KAAK,CAACoB,eAAAA;OAAiB,GAAG,EAAE,CAAC;MACjF,IAAIpB,KAAK,CAACsB,oBAAoB,GAAG;QAAEC,2BAA2B,EAAEvB,KAAK,CAACsB,oBAAAA;OAAsB,GAAG,EAAE,CAAC;MAClG,IAAItB,KAAK,CAACwB,wBAAwB,GAAG;QAAEC,+BAA+B,EAAEzB,KAAK,CAACwB,wBAAAA;OAA0B,GAAG,EAAE,CAAA;KAC9G,CAAA;IAEDrC,MAAM,CAACiB,OAAO,CAAC;MACbZ,UAAU,EAAEA,UAAU,IAAIC,OAAO;AACjCiC,MAAAA,KAAK,EAAE,gBAAgB;AACvBC,MAAAA,UAAU,EAAE;AACVC,QAAAA,YAAY,EAAE/C,MAAM,CAACgD,uBAAuB,IAAIlC,QAAQ;AACxDmC,QAAAA,SAAS,EAAEjD,MAAM,CAACkD,oBAAoB,IAAIrC,KAAK;AAC/CsC,QAAAA,oBAAoB,EAAEpD,cAAc,CAACC,MAAM,CAAC;AAC5CoD,QAAAA,SAAS,EAAE/C,eAAe,CAACC,MAAM,EAAEN,MAAM,CAACqD,kBAAkB,IAAI,KAAK,EAAE7C,KAAK,CAAC;AAC7E8C,QAAAA,kBAAkB,EAAEjD,eAAe,CAACC,MAAM,EAAEN,MAAM,CAACqD,kBAAkB,IAAI,KAAK,EAAEtC,MAAM,CAAC;AACvFwC,QAAAA,eAAe,EAAErC,UAAU;AAC3BsC,QAAAA,gBAAgB,EAAErC,KAAK,CAACY,WAAW,IAAI,CAAC;AACxC0B,QAAAA,iBAAiB,EAAEtC,KAAK,CAACe,YAAY,IAAI,CAAC;AAC1C,QAAA,GAAGI,qBAAqB;AACxBoB,QAAAA,WAAW,EAAE1C,OAAO;AACpB2C,QAAAA,YAAY,EAAE/C,OAAO;AACrBgD,QAAAA,YAAY,EAAE3C,OAAO;QACrB,GAAGjB,MAAM,CAAC6D,iBAAiB;AAC3B,QAAA,IAAIlD,UAAU,GAAG,EAAE,GAAG;AAAEmD,UAAAA,uBAAuB,EAAE,KAAA;AAAM,SAAC,CAAC;AACzD,QAAA,IAAIxC,KAAK,GAAG;AAAEyC,UAAAA,SAAS,EAAEzC,KAAAA;SAAO,GAAG,EAAE,CAAC;AACtC,QAAA,GAAGE,SAAS;QACZ,GAAGG,gBAAAA;OACJ;MACDqC,MAAM,EAAEhE,MAAM,CAACiE,aAAAA;AACjB,KAAC,CAAC,CAAA;AACJ,GAAA;AACF,CAAC;;AClKD,MAAMC,eAAe,GAAIlE,MAAW,IAA0B;EAC5D,OAAO;IACLmE,WAAW,EAAEnE,MAAM,CAACmE,WAAW;IAC/BC,UAAU,EAAEpE,MAAM,CAACqE,SAAS;IAC5BC,KAAK,EAAEtE,MAAM,CAACuE,IAAI;IAClBC,iBAAiB,EAAExE,MAAM,CAACyE,gBAAgB;IAC1CC,gBAAgB,EAAE1E,MAAM,CAAC2E,eAAe;IACxCC,IAAI,EAAE5E,MAAM,CAAC6E,aAAa;IAC1BC,MAAM,EAAE9E,MAAM,CAAC8E,MAAAA;GAChB,CAAA;AACH,CAAC,CAAA;AAED,MAAMC,eAAe,GAAIC,MAA6B,IAAqB;AACzE,EAAA,OAAOA,MAAM,CAACC,GAAG,CAAEC,CAAC,IAAK;IACvB,IAAIC,OAAO,GAAG,EAAE,CAAA;IAChB,IAAIC,KAAK,CAACC,OAAO,CAACH,CAAC,CAACC,OAAO,CAAC,EAAE;MAC5BA,OAAO,GAAGD,CAAC,CAACC,OAAO,CAACF,GAAG,CAAEK,CAAC,IAAK;AAC7B,QAAA,IAAIA,CAAC,CAACC,IAAI,KAAK,MAAM,EAAE;UACrB,OAAO;AACLA,YAAAA,IAAI,EAAE,MAAM;YACZJ,OAAO,EAAEG,CAAC,CAACE,IAAAA;WACZ,CAAA;AACH,SAAC,MAAM,IAAIF,CAAC,CAACC,IAAI,KAAK,OAAO,EAAE;UAC7B,OAAO;AACLA,YAAAA,IAAI,EAAE,OAAO;AACbJ,YAAAA,OAAO,EAAE;AACP;AACAM,cAAAA,KAAK,EAAEH,CAAC,CAACG,KAAK,YAAYC,GAAG,GAAGJ,CAAC,CAACG,KAAK,CAACE,QAAQ,EAAE,GAAG,0BAA0B;cAC/EC,QAAQ,EAAEN,CAAC,CAACM,QAAAA;AACd,aAAA;WACD,CAAA;AACH,SAAC,MAAM,IAAIN,CAAC,CAACC,IAAI,KAAK,MAAM,EAAE;UAC5B,OAAO;AACLA,YAAAA,IAAI,EAAE,MAAM;AACZJ,YAAAA,OAAO,EAAE;AACPU,cAAAA,IAAI,EAAEP,CAAC,CAACQ,IAAI,YAAYJ,GAAG,GAAGJ,CAAC,CAACQ,IAAI,CAACH,QAAQ,EAAE,GAAG,yBAAyB;cAC3EC,QAAQ,EAAEN,CAAC,CAACM,QAAAA;AACd,aAAA;WACD,CAAA;AACH,SAAC,MAAM,IAAIN,CAAC,CAACC,IAAI,KAAK,WAAW,EAAE;UACjC,OAAO;AACLA,YAAAA,IAAI,EAAE,WAAW;AACjBJ,YAAAA,OAAO,EAAE;cACPY,UAAU,EAAET,CAAC,CAACS,UAAU;cACxBC,QAAQ,EAAEV,CAAC,CAACU,QAAQ;cACpBC,IAAI,EAAEX,CAAC,CAACW,IAAAA;AACV,aAAA;WACD,CAAA;AACH,SAAC,MAAM,IAAIX,CAAC,CAACC,IAAI,KAAK,aAAa,EAAE;UACnC,OAAO;AACLA,YAAAA,IAAI,EAAE,aAAa;AACnBJ,YAAAA,OAAO,EAAE;cACPY,UAAU,EAAET,CAAC,CAACS,UAAU;cACxBC,QAAQ,EAAEV,CAAC,CAACU,QAAQ;cACpBE,MAAM,EAAEZ,CAAC,CAACY,MAAM;cAChB9E,OAAO,EAAEkE,CAAC,CAAClE,OAAAA;AACb,aAAA;WACD,CAAA;AACH,SAAA;QACA,OAAO;AACL+D,UAAAA,OAAO,EAAE,EAAA;SACV,CAAA;AACH,OAAC,CAAC,CAAA;AACJ,KAAC,MAAM;AACLA,MAAAA,OAAO,GAAG;AACRI,QAAAA,IAAI,EAAE,MAAM;QACZC,IAAI,EAAEN,CAAC,CAACC,OAAAA;OACT,CAAA;AACH,KAAA;IACA,OAAO;MACLgB,IAAI,EAAEjB,CAAC,CAACiB,IAAI;AACZhB,MAAAA,OAAAA;KACD,CAAA;AACH,GAAC,CAAC,CAAA;AACJ,CAAC,CAAA;AAED,MAAMiB,eAAe,GAAIF,MAAW,IAAqB;AACvD,EAAA,MAAMnF,MAAM,GAAG;IACb,IAAImF,MAAM,CAACV,IAAI,GAAG;MAAEA,IAAI,EAAEU,MAAM,CAACV,IAAAA;KAAM,GAAG,EAAE,CAAC;IAC7C,IAAIU,MAAM,CAACG,MAAM,GAAG;MAAEA,MAAM,EAAEH,MAAM,CAACG,MAAAA;KAAQ,GAAG,EAAE,CAAC;IACnD,IAAIH,MAAM,CAACI,SAAS,GAAG;MAAEA,SAAS,EAAEJ,MAAM,CAACI,SAAAA;KAAW,GAAG,EAAE,CAAC;IAC5D,IAAIJ,MAAM,CAACK,QAAQ,GAAG;MAAEA,QAAQ,EAAEL,MAAM,CAACK,QAAAA;KAAU,GAAG,EAAE,CAAC;IACzD,IAAIL,MAAM,CAACM,YAAY,GAAG;MAAEA,YAAY,EAAEN,MAAM,CAACM,YAAAA;KAAc,GAAG,EAAE,CAAC;IACrE,IAAIN,MAAM,CAAC/E,KAAK,GAAG;MAAEA,KAAK,EAAE+E,MAAM,CAAC/E,KAAAA;KAAO,GAAG,EAAE,CAAC;IAChD,IAAI+E,MAAM,CAACO,QAAQ,GAAG;MAAEA,QAAQ,EAAEP,MAAM,CAACO,QAAAA;KAAU,GAAG,EAAE,CAAC;IACzD,IAAIP,MAAM,CAACQ,gBAAgB,GAAG;MAAEC,SAAS,EAAET,MAAM,CAACQ,gBAAAA;KAAkB,GAAG,EAAE,CAAA;GAC1E,CAAA;AACD;AACA,EAAA,IAAI3F,MAAM,CAACyE,IAAI,IAAI,CAACzE,MAAM,CAACsF,MAAM,IAAI,CAACtF,MAAM,CAACuF,SAAS,EAAE;AACtD,IAAA,OAAO,CAAC;MAAEnB,OAAO,EAAEpE,MAAM,CAACyE,IAAI;AAAEW,MAAAA,IAAI,EAAE,WAAA;AAAY,KAAC,CAAC,CAAA;AACtD,GAAA;AACA,EAAA,OAAO,CAAC;AAAEhB,IAAAA,OAAO,EAAEyB,IAAI,CAACC,SAAS,CAAC9F,MAAM,CAAC;AAAEoF,IAAAA,IAAI,EAAE,WAAA;AAAY,GAAC,CAAC,CAAA;AACjE,CAAC,CAAA;AAED,MAAMW,eAAe,GAAIjG,KAAsB,IAAa;AAC1D;EACA,MAAMC,QAAQ,GAAGD,KAAK,CAACC,QAAQ,CAACiG,WAAW,EAAE,CAAA;EAC7C,MAAMC,YAAY,GAAGlG,QAAQ,CAACmG,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAA;AAC3C,EAAA,OAAOD,YAAY,CAAA;AACrB,CAAC,CAAA;AAEM,MAAME,+BAA+B,GAAGA,CAC7CC,QAAiB,EACjBtG,KAAsB,EACtBuG,OAA+C,KACjB;AAC9B,EAAA,MAAMC,UAAqC,GAAG;IAC5CC,YAAY,EAAE,OAAO;MAAEC,UAAU;AAAEvH,MAAAA,MAAAA;AAAO,KAAC,KAAK;AAC9C,MAAA,MAAMwH,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE,CAAA;AAC5B,MAAA,MAAMC,YAAY,GAAG;AACnB,QAAA,GAAGP,OAAO;QACV,GAAGlD,eAAe,CAAClE,MAAM,CAAA;OAC1B,CAAA;MACD,IAAI;AACF,QAAA,MAAMkG,MAAM,GAAG,MAAMqB,UAAU,EAAE,CAAA;QACjC,MAAMvG,OAAO,GAAG,CAACyG,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;QAC/C,MAAMI,OAAO,GACXR,OAAO,CAAClE,oBAAoB,KAAKgD,MAAM,CAACK,QAAQ,EAAEqB,OAAO,GAAG1B,MAAM,CAACK,QAAQ,CAACqB,OAAO,GAAG/G,KAAK,CAAC+G,OAAO,CAAC,CAAA;QACtG,MAAM9G,QAAQ,GAAGsG,OAAO,CAACpE,uBAAuB,IAAI8D,eAAe,CAACjG,KAAK,CAAC,CAAA;QAC1E,MAAMI,OAAO,GAAG,EAAE,CAAC;AACnB,QAAA,MAAMkE,OAAO,GAAGiB,eAAe,CAACF,MAAM,CAAC,CAAA;AACvC;AACA,QAAA,MAAMQ,gBAAgB,GAAGR,MAAM,CAACQ,gBAAgB,CAAA;AAChD,QAAA,MAAMpE,qBAAqB,GAAG;AAC5B,UAAA,IAAIoE,gBAAgB,EAAEmB,MAAM,EAAEtF,eAAe,GACzC;AAAEA,YAAAA,eAAe,EAAEmE,gBAAgB,CAACmB,MAAM,CAACtF,eAAAA;WAAiB,GAC5D,EAAE,CAAC;AACP,UAAA,IAAImE,gBAAgB,EAAEmB,MAAM,EAAEC,kBAAkB,GAC5C;AAAErF,YAAAA,oBAAoB,EAAEiE,gBAAgB,CAACmB,MAAM,CAACC,kBAAAA;WAAoB,GACpE,EAAE,CAAC;UACP,IAAIpB,gBAAgB,EAAEqB,SAAS,GAC3B;AACEtF,YAAAA,oBAAoB,EAAEiE,gBAAgB,CAACqB,SAAS,CAACtF,oBAAoB;AACrEE,YAAAA,wBAAwB,EAAE+D,gBAAgB,CAACqB,SAAS,CAACpF,wBAAAA;WACtD,GACD,EAAE,CAAA;SACP,CAAA;AACDjC,QAAAA,kBAAkB,CAAC;AACjBJ,UAAAA,MAAM,EAAE6G,QAAQ;UAChBxG,UAAU,EAAEyG,OAAO,CAACY,iBAAiB;UACrCpH,OAAO,EAAEwG,OAAO,CAACa,cAAc;AAC/BpH,UAAAA,KAAK,EAAE+G,OAAO;AACd9G,UAAAA,QAAQ,EAAEA,QAAQ;AAClBN,UAAAA,KAAK,EAAE4G,OAAO,CAAC/D,kBAAkB,GAAG,EAAE,GAAG0B,eAAe,CAAC/E,MAAM,CAACgF,MAAM,CAAC;AACvEjE,UAAAA,MAAM,EAAE,CAAC;YAAEoE,OAAO;AAAEgB,YAAAA,IAAI,EAAE,WAAA;AAAY,WAAC,CAAC;UACxCnF,OAAO;UACPC,OAAO;AACPjB,UAAAA,MAAM,EAAE2H,YAAmB;AAC3BzG,UAAAA,UAAU,EAAE,GAAG;AACfC,UAAAA,KAAK,EAAE;AACLY,YAAAA,WAAW,EAAEmE,MAAM,CAAC/E,KAAK,CAAC+G,YAAY;AACtChG,YAAAA,YAAY,EAAEgE,MAAM,CAAC/E,KAAK,CAACgH,gBAAgB;YAC3C,GAAG7F,qBAAAA;AACL,WAAA;AACF,SAAC,CAAC,CAAA;AAEF,QAAA,OAAO4D,MAAM,CAAA;OACd,CAAC,OAAO7E,KAAU,EAAE;AACnB,QAAA,MAAMuG,OAAO,GAAG/G,KAAK,CAAC+G,OAAO,CAAA;AAC7BlH,QAAAA,kBAAkB,CAAC;AACjBJ,UAAAA,MAAM,EAAE6G,QAAQ;UAChBxG,UAAU,EAAEyG,OAAO,CAACY,iBAAiB;UACrCpH,OAAO,EAAEwG,OAAO,CAACa,cAAc;AAC/BpH,UAAAA,KAAK,EAAE+G,OAAO;UACd9G,QAAQ,EAAED,KAAK,CAACC,QAAQ;AACxBN,UAAAA,KAAK,EAAE4G,OAAO,CAAC/D,kBAAkB,GAAG,EAAE,GAAG0B,eAAe,CAAC/E,MAAM,CAACgF,MAAM,CAAC;AACvEjE,UAAAA,MAAM,EAAE,EAAE;AACVC,UAAAA,OAAO,EAAE,CAAC;AACVC,UAAAA,OAAO,EAAE,EAAE;AACXjB,UAAAA,MAAM,EAAE2H,YAAmB;UAC3BzG,UAAU,EAAEG,KAAK,EAAE+G,MAAM,GAAG/G,KAAK,CAAC+G,MAAM,GAAG,GAAG;AAC9CjH,UAAAA,KAAK,EAAE;AACLY,YAAAA,WAAW,EAAE,CAAC;AACdG,YAAAA,YAAY,EAAE,CAAA;WACf;AACDd,UAAAA,OAAO,EAAE,IAAI;AACbC,UAAAA,KAAK,EAAEuF,IAAI,CAACC,SAAS,CAACxF,KAAK,CAAA;AAC7B,SAAC,CAAC,CAAA;AACF,QAAA,MAAMA,KAAK,CAAA;AACb,OAAA;KACD;IAEDgH,UAAU,EAAE,OAAO;MAAEC,QAAQ;AAAEtI,MAAAA,MAAAA;AAAO,KAAC,KAAK;AAC1C,MAAA,MAAMwH,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE,CAAA;MAC5B,IAAIa,aAAa,GAAG,EAAE,CAAA;MACtB,IAAIpH,KAMH,GAAG,EAAE,CAAA;AACN,MAAA,MAAMwG,YAAY,GAAG;AACnB,QAAA,GAAGP,OAAO;QACV,GAAGlD,eAAe,CAAClE,MAAM,CAAA;OAC1B,CAAA;MAED,MAAM4H,OAAO,GAAGR,OAAO,CAAClE,oBAAoB,IAAIrC,KAAK,CAAC+G,OAAO,CAAA;MAC7D,MAAM9G,QAAQ,GAAGsG,OAAO,CAACpE,uBAAuB,IAAI8D,eAAe,CAACjG,KAAK,CAAC,CAAA;MAC1E,MAAMI,OAAO,GAAG,EAAE,CAAC;MACnB,IAAI;QACF,MAAM;UAAE6D,MAAM;UAAE,GAAG0D,IAAAA;AAAK,SAAC,GAAG,MAAMF,QAAQ,EAAE,CAAA;AAC5C,QAAA,MAAMG,eAAe,GAAG,IAAIC,eAAe,CAAuD;AAChGC,UAAAA,SAASA,CAACC,KAAK,EAAEC,UAAU,EAAE;AAC3B,YAAA,IAAID,KAAK,CAACrD,IAAI,KAAK,YAAY,EAAE;cAC/BgD,aAAa,IAAIK,KAAK,CAACE,SAAS,CAAA;AAClC,aAAA;AACA,YAAA,IAAIF,KAAK,CAACrD,IAAI,KAAK,QAAQ,EAAE;AAC3BpE,cAAAA,KAAK,GAAG;AACNY,gBAAAA,WAAW,EAAE6G,KAAK,CAACzH,KAAK,EAAE+G,YAAY;AACtChG,gBAAAA,YAAY,EAAE0G,KAAK,CAACzH,KAAK,EAAEgH,gBAAAA;eAC5B,CAAA;AACD,cAAA,IAAIS,KAAK,CAAClC,gBAAgB,EAAEmB,MAAM,EAAEtF,eAAe,EAAE;gBACnDpB,KAAK,CAACoB,eAAe,GAAGqG,KAAK,CAAClC,gBAAgB,CAACmB,MAAM,CAACtF,eAAe,CAAA;AACvE,eAAA;AACA,cAAA,IAAIqG,KAAK,CAAClC,gBAAgB,EAAEmB,MAAM,EAAEC,kBAAkB,EAAE;gBACtD3G,KAAK,CAACsB,oBAAoB,GAAGmG,KAAK,CAAClC,gBAAgB,CAACmB,MAAM,CAACC,kBAAkB,CAAA;AAC/E,eAAA;AACA,cAAA,IAAIc,KAAK,CAAClC,gBAAgB,EAAEqB,SAAS,EAAEtF,oBAAoB,EAAE;gBAC3DtB,KAAK,CAACsB,oBAAoB,GAAGmG,KAAK,CAAClC,gBAAgB,CAACqB,SAAS,CAACtF,oBAAoB,CAAA;AACpF,eAAA;AACA,cAAA,IAAImG,KAAK,CAAClC,gBAAgB,EAAEqB,SAAS,EAAEpF,wBAAwB,EAAE;gBAC/DxB,KAAK,CAACwB,wBAAwB,GAAGiG,KAAK,CAAClC,gBAAgB,CAACqB,SAAS,CAACpF,wBAAwB,CAAA;AAC5F,eAAA;AACF,aAAA;AACAkG,YAAAA,UAAU,CAACE,OAAO,CAACH,KAAK,CAAC,CAAA;WAC1B;AAEDI,UAAAA,KAAKA,GAAG;YACN,MAAMhI,OAAO,GAAG,CAACyG,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/C9G,YAAAA,kBAAkB,CAAC;AACjBJ,cAAAA,MAAM,EAAE6G,QAAQ;cAChBxG,UAAU,EAAEyG,OAAO,CAACY,iBAAiB;cACrCpH,OAAO,EAAEwG,OAAO,CAACa,cAAc;AAC/BpH,cAAAA,KAAK,EAAE+G,OAAO;AACd9G,cAAAA,QAAQ,EAAEA,QAAQ;AAClBN,cAAAA,KAAK,EAAE4G,OAAO,CAAC/D,kBAAkB,GAAG,EAAE,GAAG0B,eAAe,CAAC/E,MAAM,CAACgF,MAAM,CAAC;AACvEjE,cAAAA,MAAM,EAAE,CAAC;AAAEoE,gBAAAA,OAAO,EAAEoD,aAAa;AAAEpC,gBAAAA,IAAI,EAAE,WAAA;AAAY,eAAC,CAAC;cACvDnF,OAAO;cACPC,OAAO;AACPjB,cAAAA,MAAM,EAAE2H,YAAmB;AAC3BzG,cAAAA,UAAU,EAAE,GAAG;AACfC,cAAAA,KAAAA;AACF,aAAC,CAAC,CAAA;AACJ,WAAA;AACF,SAAC,CAAC,CAAA;QAEF,OAAO;AACL2D,UAAAA,MAAM,EAAEA,MAAM,CAACmE,WAAW,CAACR,eAAe,CAAC;UAC3C,GAAGD,IAAAA;SACJ,CAAA;OACF,CAAC,OAAOnH,KAAU,EAAE;AACnBX,QAAAA,kBAAkB,CAAC;AACjBJ,UAAAA,MAAM,EAAE6G,QAAQ;UAChBxG,UAAU,EAAEyG,OAAO,CAACY,iBAAiB;UACrCpH,OAAO,EAAEwG,OAAO,CAACa,cAAc;AAC/BpH,UAAAA,KAAK,EAAE+G,OAAO;AACd9G,UAAAA,QAAQ,EAAEA,QAAQ;AAClBN,UAAAA,KAAK,EAAE4G,OAAO,CAAC/D,kBAAkB,GAAG,EAAE,GAAG0B,eAAe,CAAC/E,MAAM,CAACgF,MAAM,CAAC;AACvEjE,UAAAA,MAAM,EAAE,EAAE;AACVC,UAAAA,OAAO,EAAE,CAAC;AACVC,UAAAA,OAAO,EAAE,EAAE;AACXjB,UAAAA,MAAM,EAAE2H,YAAmB;UAC3BzG,UAAU,EAAEG,KAAK,EAAE+G,MAAM,GAAG/G,KAAK,CAAC+G,MAAM,GAAG,GAAG;AAC9CjH,UAAAA,KAAK,EAAE;AACLY,YAAAA,WAAW,EAAE,CAAC;AACdG,YAAAA,YAAY,EAAE,CAAA;WACf;AACDd,UAAAA,OAAO,EAAE,IAAI;AACbC,UAAAA,KAAK,EAAEuF,IAAI,CAACC,SAAS,CAACxF,KAAK,CAAA;AAC7B,SAAC,CAAC,CAAA;AACF,QAAA,MAAMA,KAAK,CAAA;AACb,OAAA;AACF,KAAA;GACD,CAAA;AAED,EAAA,OAAOgG,UAAU,CAAA;AACnB,CAAC,CAAA;AAEM,MAAM6B,uBAAuB,GAAGA,CACrCrI,KAAsB,EACtBsG,QAAiB,EACjBC,OAAsB,KACF;EACpB,MAAMxG,OAAO,GAAGwG,OAAO,CAACa,cAAc,IAAIkB,EAAM,EAAE,CAAA;AAClD,EAAA,MAAM9B,UAAU,GAAGH,+BAA+B,CAACC,QAAQ,EAAEtG,KAAK,EAAE;AAClE,IAAA,GAAGuG,OAAO;AACVa,IAAAA,cAAc,EAAErH,OAAO;AACvBoH,IAAAA,iBAAiB,EAAEZ,OAAO,CAACY,iBAAiB,IAAIpH,OAAAA;AAClD,GAAC,CAAC,CAAA;EAEF,MAAMwI,YAAY,GAAGC,8BAAiB,CAAC;IACrCxI,KAAK;AACLwG,IAAAA,UAAAA;AACF,GAAC,CAAC,CAAA;AAEF,EAAA,OAAO+B,YAAY,CAAA;AACrB;;;;"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@posthog/ai",
3
- "version": "3.3.1",
3
+ "version": "4.0.0",
4
4
  "description": "PostHog Node.js AI integrations",
5
5
  "repository": {
6
6
  "type": "git",
@@ -37,6 +37,33 @@
37
37
  "test": "jest",
38
38
  "prepublishOnly": "cd .. && yarn build"
39
39
  },
40
+ "exports": {
41
+ ".": {
42
+ "require": "./lib/index.cjs.js",
43
+ "import": "./lib/index.esm.js",
44
+ "types": "./lib/index.d.ts"
45
+ },
46
+ "./anthropic": {
47
+ "require": "./lib/anthropic/index.cjs.js",
48
+ "import": "./lib/anthropic/index.esm.js",
49
+ "types": "./lib/anthropic/index.d.ts"
50
+ },
51
+ "./openai": {
52
+ "require": "./lib/openai/index.cjs.js",
53
+ "import": "./lib/openai/index.esm.js",
54
+ "types": "./lib/openai/index.d.ts"
55
+ },
56
+ "./vercel": {
57
+ "require": "./lib/vercel/index.cjs.js",
58
+ "import": "./lib/vercel/index.esm.js",
59
+ "types": "./lib/vercel/index.d.ts"
60
+ },
61
+ "./langchain": {
62
+ "require": "./lib/langchain/index.cjs.js",
63
+ "import": "./lib/langchain/index.esm.js",
64
+ "types": "./lib/langchain/index.d.ts"
65
+ }
66
+ },
40
67
  "directories": {
41
68
  "lib": "lib",
42
69
  "test": "tests"
@@ -202,3 +202,5 @@ export class WrappedMessages extends AnthropicOriginal.Messages {
202
202
  }
203
203
 
204
204
  export default PostHogAnthropic
205
+
206
+ export { PostHogAnthropic as Anthropic }
@@ -82,7 +82,6 @@ export class LangChainCallbackHandler extends BaseCallbackHandler {
82
82
  parentRunId?: string,
83
83
  tags?: string[],
84
84
  metadata?: Record<string, unknown>,
85
-
86
85
  runType?: string,
87
86
  runName?: string
88
87
  ): void {
@@ -432,10 +431,18 @@ export class LangChainCallbackHandler extends BaseCallbackHandler {
432
431
  eventProperties['$ai_is_error'] = true
433
432
  } else {
434
433
  // Handle token usage
435
- const [inputTokens, outputTokens] = this.parseUsage(output)
434
+ const [inputTokens, outputTokens, additionalTokenData] = this.parseUsage(output)
436
435
  eventProperties['$ai_input_tokens'] = inputTokens
437
436
  eventProperties['$ai_output_tokens'] = outputTokens
438
437
 
438
+ // Add additional token data to properties
439
+ if (additionalTokenData.cacheReadInputTokens) {
440
+ eventProperties['$ai_cache_read_tokens'] = additionalTokenData.cacheReadInputTokens
441
+ }
442
+ if (additionalTokenData.reasoningTokens) {
443
+ eventProperties['$ai_reasoning_tokens'] = additionalTokenData.reasoningTokens
444
+ }
445
+
439
446
  // Handle generations/completions
440
447
  let completions
441
448
  if (output.generations && Array.isArray(output.generations)) {
@@ -471,14 +478,17 @@ export class LangChainCallbackHandler extends BaseCallbackHandler {
471
478
  }
472
479
  }
473
480
 
474
- private _getLangchainRunName(serialized: any, ...args: any[]): string | undefined {
481
+ private _getLangchainRunName(serialized: any, ...args: any): string | undefined {
475
482
  if (args && args.length > 0) {
476
483
  for (const arg of args) {
477
484
  if (arg && typeof arg === 'object' && 'name' in arg) {
478
485
  return arg.name
486
+ } else if (arg && typeof arg === 'object' && 'runName' in arg) {
487
+ return arg.runName
479
488
  }
480
489
  }
481
490
  }
491
+
482
492
  if (serialized && serialized.name) {
483
493
  return serialized.name
484
494
  }
@@ -520,7 +530,7 @@ export class LangChainCallbackHandler extends BaseCallbackHandler {
520
530
  return messageDict
521
531
  }
522
532
 
523
- private _parseUsageModel(usage: any): [number, number] {
533
+ private _parseUsageModel(usage: any): [number, number, Record<string, any>] {
524
534
  const conversionList: Array<[string, 'input' | 'output']> = [
525
535
  ['promptTokens', 'input'],
526
536
  ['completionTokens', 'output'],
@@ -548,11 +558,32 @@ export class LangChainCallbackHandler extends BaseCallbackHandler {
548
558
  { input: 0, output: 0 }
549
559
  )
550
560
 
551
- return [parsedUsage.input, parsedUsage.output]
561
+ // Extract additional token details like cached tokens and reasoning tokens
562
+ const additionalTokenData: Record<string, any> = {}
563
+
564
+ // Check for cached tokens in various formats
565
+ if (usage.prompt_tokens_details?.cached_tokens != null) {
566
+ additionalTokenData.cacheReadInputTokens = usage.prompt_tokens_details.cached_tokens
567
+ } else if (usage.input_token_details?.cache_read != null) {
568
+ additionalTokenData.cacheReadInputTokens = usage.input_token_details.cache_read
569
+ } else if (usage.cachedPromptTokens != null) {
570
+ additionalTokenData.cacheReadInputTokens = usage.cachedPromptTokens
571
+ }
572
+
573
+ // Check for reasoning tokens in various formats
574
+ if (usage.completion_tokens_details?.reasoning_tokens != null) {
575
+ additionalTokenData.reasoningTokens = usage.completion_tokens_details.reasoning_tokens
576
+ } else if (usage.output_token_details?.reasoning != null) {
577
+ additionalTokenData.reasoningTokens = usage.output_token_details.reasoning
578
+ } else if (usage.reasoningTokens != null) {
579
+ additionalTokenData.reasoningTokens = usage.reasoningTokens
580
+ }
581
+
582
+ return [parsedUsage.input, parsedUsage.output, additionalTokenData]
552
583
  }
553
584
 
554
- private parseUsage(response: LLMResult): [number, number] {
555
- let llmUsage: [number, number] = [0, 0]
585
+ private parseUsage(response: LLMResult): [number, number, Record<string, any>] {
586
+ let llmUsage: [number, number, Record<string, any>] = [0, 0, {}]
556
587
  const llmUsageKeys = ['token_usage', 'usage', 'tokenUsage']
557
588
 
558
589
  if (response.llmOutput != null) {
@@ -566,6 +597,7 @@ export class LangChainCallbackHandler extends BaseCallbackHandler {
566
597
  if (llmUsage[0] === 0 && llmUsage[1] === 0 && response.generations) {
567
598
  for (const generation of response.generations) {
568
599
  for (const genChunk of generation) {
600
+ // Check other paths for usage information
569
601
  if (genChunk.generationInfo?.usage_metadata) {
570
602
  llmUsage = this._parseUsageModel(genChunk.generationInfo.usage_metadata)
571
603
  return llmUsage
@@ -0,0 +1 @@
1
+ export * from './callbacks'
@@ -1,4 +1,4 @@
1
- import OpenAIOrignal from 'openai'
1
+ import OpenAIOrignal, { ClientOptions } from 'openai'
2
2
  import { PostHog } from 'posthog-node'
3
3
  import { v4 as uuidv4 } from 'uuid'
4
4
  import { formatResponseOpenAI, MonitoringParams, sendEventToPosthog } from '../utils'
@@ -11,7 +11,7 @@ type ChatCompletionCreateParamsStreaming = OpenAIOrignal.Chat.Completions.ChatCo
11
11
  import type { APIPromise, RequestOptions } from 'openai/core'
12
12
  import type { Stream } from 'openai/streaming'
13
13
 
14
- interface MonitoringOpenAIConfig {
14
+ interface MonitoringOpenAIConfig extends ClientOptions {
15
15
  apiKey: string
16
16
  posthog: PostHog
17
17
  baseURL?: string
@@ -211,3 +211,5 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
211
211
  }
212
212
 
213
213
  export default PostHogOpenAI
214
+
215
+ export { PostHogOpenAI as OpenAI }
package/src/utils.ts CHANGED
@@ -167,7 +167,7 @@ export const sendEventToPosthog = ({
167
167
  }
168
168
  }
169
169
 
170
- let additionalTokenValues = {
170
+ const additionalTokenValues = {
171
171
  ...(usage.reasoningTokens ? { $ai_reasoning_tokens: usage.reasoningTokens } : {}),
172
172
  ...(usage.cacheReadInputTokens ? { $ai_cache_read_input_tokens: usage.cacheReadInputTokens } : {}),
173
173
  ...(usage.cacheCreationInputTokens ? { $ai_cache_creation_input_tokens: usage.cacheCreationInputTokens } : {}),
@@ -0,0 +1 @@
1
+ export { wrapVercelLanguageModel as withTracing } from './middleware'