@posthog/ai 3.3.2 → 4.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,423 @@
1
+ import { experimental_wrapLanguageModel } from 'ai';
2
+ import { v4 } from 'uuid';
3
+ import { Buffer } from 'buffer';
4
+
5
+ const getModelParams = params => {
6
+ if (!params) {
7
+ return {};
8
+ }
9
+ const modelParams = {};
10
+ const paramKeys = ['temperature', 'max_tokens', 'max_completion_tokens', 'top_p', 'frequency_penalty', 'presence_penalty', 'n', 'stop', 'stream', 'streaming'];
11
+ for (const key of paramKeys) {
12
+ if (key in params && params[key] !== undefined) {
13
+ modelParams[key] = params[key];
14
+ }
15
+ }
16
+ return modelParams;
17
+ };
18
+ const withPrivacyMode = (client, privacyMode, input) => {
19
+ return client.privacy_mode || privacyMode ? null : input;
20
+ };
21
+ function sanitizeValues(obj) {
22
+ if (obj === undefined || obj === null) {
23
+ return obj;
24
+ }
25
+ const jsonSafe = JSON.parse(JSON.stringify(obj));
26
+ if (typeof jsonSafe === 'string') {
27
+ return Buffer.from(jsonSafe, 'utf8').toString('utf8');
28
+ } else if (Array.isArray(jsonSafe)) {
29
+ return jsonSafe.map(sanitizeValues);
30
+ } else if (jsonSafe && typeof jsonSafe === 'object') {
31
+ return Object.fromEntries(Object.entries(jsonSafe).map(([k, v]) => [k, sanitizeValues(v)]));
32
+ }
33
+ return jsonSafe;
34
+ }
35
+ const sendEventToPosthog = ({
36
+ client,
37
+ distinctId,
38
+ traceId,
39
+ model,
40
+ provider,
41
+ input,
42
+ output,
43
+ latency,
44
+ baseURL,
45
+ params,
46
+ httpStatus = 200,
47
+ usage = {},
48
+ isError = false,
49
+ error,
50
+ tools
51
+ }) => {
52
+ if (client.capture) {
53
+ // sanitize input and output for UTF-8 validity
54
+ const safeInput = sanitizeValues(input);
55
+ const safeOutput = sanitizeValues(output);
56
+ const safeError = sanitizeValues(error);
57
+ let errorData = {};
58
+ if (isError) {
59
+ errorData = {
60
+ $ai_is_error: true,
61
+ $ai_error: safeError
62
+ };
63
+ }
64
+ let costOverrideData = {};
65
+ if (params.posthogCostOverride) {
66
+ const inputCostUSD = (params.posthogCostOverride.inputCost ?? 0) * (usage.inputTokens ?? 0);
67
+ const outputCostUSD = (params.posthogCostOverride.outputCost ?? 0) * (usage.outputTokens ?? 0);
68
+ costOverrideData = {
69
+ $ai_input_cost_usd: inputCostUSD,
70
+ $ai_output_cost_usd: outputCostUSD,
71
+ $ai_total_cost_usd: inputCostUSD + outputCostUSD
72
+ };
73
+ }
74
+ const additionalTokenValues = {
75
+ ...(usage.reasoningTokens ? {
76
+ $ai_reasoning_tokens: usage.reasoningTokens
77
+ } : {}),
78
+ ...(usage.cacheReadInputTokens ? {
79
+ $ai_cache_read_input_tokens: usage.cacheReadInputTokens
80
+ } : {}),
81
+ ...(usage.cacheCreationInputTokens ? {
82
+ $ai_cache_creation_input_tokens: usage.cacheCreationInputTokens
83
+ } : {})
84
+ };
85
+ client.capture({
86
+ distinctId: distinctId ?? traceId,
87
+ event: '$ai_generation',
88
+ properties: {
89
+ $ai_provider: params.posthogProviderOverride ?? provider,
90
+ $ai_model: params.posthogModelOverride ?? model,
91
+ $ai_model_parameters: getModelParams(params),
92
+ $ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeInput),
93
+ $ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeOutput),
94
+ $ai_http_status: httpStatus,
95
+ $ai_input_tokens: usage.inputTokens ?? 0,
96
+ $ai_output_tokens: usage.outputTokens ?? 0,
97
+ ...additionalTokenValues,
98
+ $ai_latency: latency,
99
+ $ai_trace_id: traceId,
100
+ $ai_base_url: baseURL,
101
+ ...params.posthogProperties,
102
+ ...(distinctId ? {} : {
103
+ $process_person_profile: false
104
+ }),
105
+ ...(tools ? {
106
+ $ai_tools: tools
107
+ } : {}),
108
+ ...errorData,
109
+ ...costOverrideData
110
+ },
111
+ groups: params.posthogGroups
112
+ });
113
+ }
114
+ };
115
+
116
+ const mapVercelParams = params => {
117
+ return {
118
+ temperature: params.temperature,
119
+ max_tokens: params.maxTokens,
120
+ top_p: params.topP,
121
+ frequency_penalty: params.frequencyPenalty,
122
+ presence_penalty: params.presencePenalty,
123
+ stop: params.stopSequences,
124
+ stream: params.stream
125
+ };
126
+ };
127
+ const mapVercelPrompt = prompt => {
128
+ return prompt.map(p => {
129
+ let content = {};
130
+ if (Array.isArray(p.content)) {
131
+ content = p.content.map(c => {
132
+ if (c.type === 'text') {
133
+ return {
134
+ type: 'text',
135
+ content: c.text
136
+ };
137
+ } else if (c.type === 'image') {
138
+ return {
139
+ type: 'image',
140
+ content: {
141
+ // if image is a url use it, or use "none supported"
142
+ image: c.image instanceof URL ? c.image.toString() : 'raw images not supported',
143
+ mimeType: c.mimeType
144
+ }
145
+ };
146
+ } else if (c.type === 'file') {
147
+ return {
148
+ type: 'file',
149
+ content: {
150
+ file: c.data instanceof URL ? c.data.toString() : 'raw files not supported',
151
+ mimeType: c.mimeType
152
+ }
153
+ };
154
+ } else if (c.type === 'tool-call') {
155
+ return {
156
+ type: 'tool-call',
157
+ content: {
158
+ toolCallId: c.toolCallId,
159
+ toolName: c.toolName,
160
+ args: c.args
161
+ }
162
+ };
163
+ } else if (c.type === 'tool-result') {
164
+ return {
165
+ type: 'tool-result',
166
+ content: {
167
+ toolCallId: c.toolCallId,
168
+ toolName: c.toolName,
169
+ result: c.result,
170
+ isError: c.isError
171
+ }
172
+ };
173
+ }
174
+ return {
175
+ content: ''
176
+ };
177
+ });
178
+ } else {
179
+ content = {
180
+ type: 'text',
181
+ text: p.content
182
+ };
183
+ }
184
+ return {
185
+ role: p.role,
186
+ content
187
+ };
188
+ });
189
+ };
190
+ const mapVercelOutput = result => {
191
+ const output = {
192
+ ...(result.text ? {
193
+ text: result.text
194
+ } : {}),
195
+ ...(result.object ? {
196
+ object: result.object
197
+ } : {}),
198
+ ...(result.reasoning ? {
199
+ reasoning: result.reasoning
200
+ } : {}),
201
+ ...(result.response ? {
202
+ response: result.response
203
+ } : {}),
204
+ ...(result.finishReason ? {
205
+ finishReason: result.finishReason
206
+ } : {}),
207
+ ...(result.usage ? {
208
+ usage: result.usage
209
+ } : {}),
210
+ ...(result.warnings ? {
211
+ warnings: result.warnings
212
+ } : {}),
213
+ ...(result.providerMetadata ? {
214
+ toolCalls: result.providerMetadata
215
+ } : {})
216
+ };
217
+ // if text and no object or reasoning, return text
218
+ if (output.text && !output.object && !output.reasoning) {
219
+ return [{
220
+ content: output.text,
221
+ role: 'assistant'
222
+ }];
223
+ }
224
+ return [{
225
+ content: JSON.stringify(output),
226
+ role: 'assistant'
227
+ }];
228
+ };
229
+ const extractProvider = model => {
230
+ // vercel provider is in the format of provider.endpoint
231
+ const provider = model.provider.toLowerCase();
232
+ const providerName = provider.split('.')[0];
233
+ return providerName;
234
+ };
235
+ const createInstrumentationMiddleware = (phClient, model, options) => {
236
+ const middleware = {
237
+ wrapGenerate: async ({
238
+ doGenerate,
239
+ params
240
+ }) => {
241
+ const startTime = Date.now();
242
+ const mergedParams = {
243
+ ...options,
244
+ ...mapVercelParams(params)
245
+ };
246
+ try {
247
+ const result = await doGenerate();
248
+ const latency = (Date.now() - startTime) / 1000;
249
+ const modelId = options.posthogModelOverride ?? (result.response?.modelId ? result.response.modelId : model.modelId);
250
+ const provider = options.posthogProviderOverride ?? extractProvider(model);
251
+ const baseURL = ''; // cannot currently get baseURL from vercel
252
+ const content = mapVercelOutput(result);
253
+ // let tools = result.toolCalls
254
+ const providerMetadata = result.providerMetadata;
255
+ const additionalTokenValues = {
256
+ ...(providerMetadata?.openai?.reasoningTokens ? {
257
+ reasoningTokens: providerMetadata.openai.reasoningTokens
258
+ } : {}),
259
+ ...(providerMetadata?.openai?.cachedPromptTokens ? {
260
+ cacheReadInputTokens: providerMetadata.openai.cachedPromptTokens
261
+ } : {}),
262
+ ...(providerMetadata?.anthropic ? {
263
+ cacheReadInputTokens: providerMetadata.anthropic.cacheReadInputTokens,
264
+ cacheCreationInputTokens: providerMetadata.anthropic.cacheCreationInputTokens
265
+ } : {})
266
+ };
267
+ sendEventToPosthog({
268
+ client: phClient,
269
+ distinctId: options.posthogDistinctId,
270
+ traceId: options.posthogTraceId,
271
+ model: modelId,
272
+ provider: provider,
273
+ input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
274
+ output: [{
275
+ content,
276
+ role: 'assistant'
277
+ }],
278
+ latency,
279
+ baseURL,
280
+ params: mergedParams,
281
+ httpStatus: 200,
282
+ usage: {
283
+ inputTokens: result.usage.promptTokens,
284
+ outputTokens: result.usage.completionTokens,
285
+ ...additionalTokenValues
286
+ }
287
+ });
288
+ return result;
289
+ } catch (error) {
290
+ const modelId = model.modelId;
291
+ sendEventToPosthog({
292
+ client: phClient,
293
+ distinctId: options.posthogDistinctId,
294
+ traceId: options.posthogTraceId,
295
+ model: modelId,
296
+ provider: model.provider,
297
+ input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
298
+ output: [],
299
+ latency: 0,
300
+ baseURL: '',
301
+ params: mergedParams,
302
+ httpStatus: error?.status ? error.status : 500,
303
+ usage: {
304
+ inputTokens: 0,
305
+ outputTokens: 0
306
+ },
307
+ isError: true,
308
+ error: JSON.stringify(error)
309
+ });
310
+ throw error;
311
+ }
312
+ },
313
+ wrapStream: async ({
314
+ doStream,
315
+ params
316
+ }) => {
317
+ const startTime = Date.now();
318
+ let generatedText = '';
319
+ let usage = {};
320
+ const mergedParams = {
321
+ ...options,
322
+ ...mapVercelParams(params)
323
+ };
324
+ const modelId = options.posthogModelOverride ?? model.modelId;
325
+ const provider = options.posthogProviderOverride ?? extractProvider(model);
326
+ const baseURL = ''; // cannot currently get baseURL from vercel
327
+ try {
328
+ const {
329
+ stream,
330
+ ...rest
331
+ } = await doStream();
332
+ const transformStream = new TransformStream({
333
+ transform(chunk, controller) {
334
+ if (chunk.type === 'text-delta') {
335
+ generatedText += chunk.textDelta;
336
+ }
337
+ if (chunk.type === 'finish') {
338
+ usage = {
339
+ inputTokens: chunk.usage?.promptTokens,
340
+ outputTokens: chunk.usage?.completionTokens
341
+ };
342
+ if (chunk.providerMetadata?.openai?.reasoningTokens) {
343
+ usage.reasoningTokens = chunk.providerMetadata.openai.reasoningTokens;
344
+ }
345
+ if (chunk.providerMetadata?.openai?.cachedPromptTokens) {
346
+ usage.cacheReadInputTokens = chunk.providerMetadata.openai.cachedPromptTokens;
347
+ }
348
+ if (chunk.providerMetadata?.anthropic?.cacheReadInputTokens) {
349
+ usage.cacheReadInputTokens = chunk.providerMetadata.anthropic.cacheReadInputTokens;
350
+ }
351
+ if (chunk.providerMetadata?.anthropic?.cacheCreationInputTokens) {
352
+ usage.cacheCreationInputTokens = chunk.providerMetadata.anthropic.cacheCreationInputTokens;
353
+ }
354
+ }
355
+ controller.enqueue(chunk);
356
+ },
357
+ flush() {
358
+ const latency = (Date.now() - startTime) / 1000;
359
+ sendEventToPosthog({
360
+ client: phClient,
361
+ distinctId: options.posthogDistinctId,
362
+ traceId: options.posthogTraceId,
363
+ model: modelId,
364
+ provider: provider,
365
+ input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
366
+ output: [{
367
+ content: generatedText,
368
+ role: 'assistant'
369
+ }],
370
+ latency,
371
+ baseURL,
372
+ params: mergedParams,
373
+ httpStatus: 200,
374
+ usage
375
+ });
376
+ }
377
+ });
378
+ return {
379
+ stream: stream.pipeThrough(transformStream),
380
+ ...rest
381
+ };
382
+ } catch (error) {
383
+ sendEventToPosthog({
384
+ client: phClient,
385
+ distinctId: options.posthogDistinctId,
386
+ traceId: options.posthogTraceId,
387
+ model: modelId,
388
+ provider: provider,
389
+ input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
390
+ output: [],
391
+ latency: 0,
392
+ baseURL: '',
393
+ params: mergedParams,
394
+ httpStatus: error?.status ? error.status : 500,
395
+ usage: {
396
+ inputTokens: 0,
397
+ outputTokens: 0
398
+ },
399
+ isError: true,
400
+ error: JSON.stringify(error)
401
+ });
402
+ throw error;
403
+ }
404
+ }
405
+ };
406
+ return middleware;
407
+ };
408
+ const wrapVercelLanguageModel = (model, phClient, options) => {
409
+ const traceId = options.posthogTraceId ?? v4();
410
+ const middleware = createInstrumentationMiddleware(phClient, model, {
411
+ ...options,
412
+ posthogTraceId: traceId,
413
+ posthogDistinctId: options.posthogDistinctId ?? traceId
414
+ });
415
+ const wrappedModel = experimental_wrapLanguageModel({
416
+ model,
417
+ middleware
418
+ });
419
+ return wrappedModel;
420
+ };
421
+
422
+ export { wrapVercelLanguageModel as withTracing };
423
+ //# sourceMappingURL=index.esm.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.esm.js","sources":["../../src/utils.ts","../../src/vercel/middleware.ts"],"sourcesContent":["import { PostHog } from 'posthog-node'\nimport { Buffer } from 'buffer'\nimport OpenAIOrignal from 'openai'\nimport AnthropicOriginal from '@anthropic-ai/sdk'\n\ntype ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams\ntype MessageCreateParams = AnthropicOriginal.Messages.MessageCreateParams\n\nexport interface MonitoringParams {\n posthogDistinctId?: string\n posthogTraceId?: string\n posthogProperties?: Record<string, any>\n posthogPrivacyMode?: boolean\n posthogGroups?: Record<string, any>\n posthogModelOverride?: string\n posthogProviderOverride?: string\n posthogCostOverride?: CostOverride\n}\n\nexport interface CostOverride {\n inputCost: number\n outputCost: number\n}\n\nexport const getModelParams = (\n params: ((ChatCompletionCreateParamsBase | MessageCreateParams) & MonitoringParams) | null\n): Record<string, any> => {\n if (!params) {\n return {}\n }\n const modelParams: Record<string, any> = {}\n const paramKeys = [\n 'temperature',\n 'max_tokens',\n 'max_completion_tokens',\n 'top_p',\n 'frequency_penalty',\n 'presence_penalty',\n 'n',\n 'stop',\n 'stream',\n 'streaming',\n ] as const\n\n for (const key of paramKeys) {\n if (key in params && (params as any)[key] !== undefined) {\n modelParams[key] = (params as any)[key]\n }\n }\n return modelParams\n}\n\n/**\n * Helper to format responses (non-streaming) for consumption, mirroring Python's openai vs. anthropic approach.\n */\nexport const formatResponse = (response: any, provider: string): Array<{ role: string; content: string }> => {\n if (!response) {\n return []\n }\n if (provider === 'anthropic') {\n return formatResponseAnthropic(response)\n } else if (provider === 'openai') {\n return formatResponseOpenAI(response)\n }\n return []\n}\n\nexport const formatResponseAnthropic = (response: any): Array<{ role: string; content: string }> => {\n // Example approach if \"response.content\" holds array of text segments, etc.\n const output: Array<{ role: string; content: string }> = []\n for (const choice of response.content ?? []) {\n if (choice?.text) {\n output.push({\n role: 'assistant',\n content: choice.text,\n })\n }\n }\n return output\n}\n\nexport const formatResponseOpenAI = (response: any): Array<{ role: string; content: string }> => {\n const output: Array<{ role: string; content: string }> = []\n for (const choice of response.choices ?? []) {\n if (choice.message?.content) {\n output.push({\n role: choice.message.role,\n content: choice.message.content,\n })\n }\n }\n return output\n}\n\nexport const mergeSystemPrompt = (params: MessageCreateParams & MonitoringParams, provider: string): any => {\n if (provider == 'anthropic') {\n const messages = params.messages || []\n if (!(params as any).system) {\n return messages\n }\n const systemMessage = (params as any).system\n return [{ role: 'system', content: systemMessage }, ...messages]\n }\n return params.messages\n}\n\nexport const withPrivacyMode = (client: PostHog, privacyMode: boolean, input: any): any => {\n return (client as any).privacy_mode || privacyMode ? null : input\n}\n\nexport type SendEventToPosthogParams = {\n client: PostHog\n distinctId?: string\n traceId: string\n model: string\n provider: string\n input: any\n output: any\n latency: number\n baseURL: string\n httpStatus: number\n usage?: {\n inputTokens?: number\n outputTokens?: number\n reasoningTokens?: any\n cacheReadInputTokens?: any\n cacheCreationInputTokens?: any\n }\n params: (ChatCompletionCreateParamsBase | MessageCreateParams) & MonitoringParams\n isError?: boolean\n error?: string\n tools?: any\n}\n\nfunction sanitizeValues(obj: any): any {\n if (obj === undefined || obj === null) {\n return obj\n }\n const jsonSafe = JSON.parse(JSON.stringify(obj))\n if (typeof jsonSafe === 'string') {\n return Buffer.from(jsonSafe, 'utf8').toString('utf8')\n } else if (Array.isArray(jsonSafe)) {\n return jsonSafe.map(sanitizeValues)\n } else if (jsonSafe && typeof jsonSafe === 'object') {\n return Object.fromEntries(Object.entries(jsonSafe).map(([k, v]) => [k, sanitizeValues(v)]))\n }\n return jsonSafe\n}\n\nexport const sendEventToPosthog = ({\n client,\n distinctId,\n traceId,\n model,\n provider,\n input,\n output,\n latency,\n baseURL,\n params,\n httpStatus = 200,\n usage = {},\n isError = false,\n error,\n tools,\n}: SendEventToPosthogParams): void => {\n if (client.capture) {\n // sanitize input and output for UTF-8 validity\n const safeInput = sanitizeValues(input)\n const safeOutput = sanitizeValues(output)\n const safeError = sanitizeValues(error)\n\n let errorData = {}\n if (isError) {\n errorData = {\n $ai_is_error: true,\n $ai_error: safeError,\n }\n }\n let costOverrideData = {}\n if (params.posthogCostOverride) {\n const inputCostUSD = (params.posthogCostOverride.inputCost ?? 0) * (usage.inputTokens ?? 0)\n const outputCostUSD = (params.posthogCostOverride.outputCost ?? 0) * (usage.outputTokens ?? 0)\n costOverrideData = {\n $ai_input_cost_usd: inputCostUSD,\n $ai_output_cost_usd: outputCostUSD,\n $ai_total_cost_usd: inputCostUSD + outputCostUSD,\n }\n }\n\n const additionalTokenValues = {\n ...(usage.reasoningTokens ? { $ai_reasoning_tokens: usage.reasoningTokens } : {}),\n ...(usage.cacheReadInputTokens ? { $ai_cache_read_input_tokens: usage.cacheReadInputTokens } : {}),\n ...(usage.cacheCreationInputTokens ? { $ai_cache_creation_input_tokens: usage.cacheCreationInputTokens } : {}),\n }\n\n client.capture({\n distinctId: distinctId ?? traceId,\n event: '$ai_generation',\n properties: {\n $ai_provider: params.posthogProviderOverride ?? provider,\n $ai_model: params.posthogModelOverride ?? model,\n $ai_model_parameters: getModelParams(params),\n $ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeInput),\n $ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeOutput),\n $ai_http_status: httpStatus,\n $ai_input_tokens: usage.inputTokens ?? 0,\n $ai_output_tokens: usage.outputTokens ?? 0,\n ...additionalTokenValues,\n $ai_latency: latency,\n $ai_trace_id: traceId,\n $ai_base_url: baseURL,\n ...params.posthogProperties,\n ...(distinctId ? {} : { $process_person_profile: false }),\n ...(tools ? { $ai_tools: tools } : {}),\n ...errorData,\n ...costOverrideData,\n },\n groups: params.posthogGroups,\n })\n }\n}\n","import { experimental_wrapLanguageModel as wrapLanguageModel } from 'ai'\nimport type { LanguageModelV1, LanguageModelV1Middleware, LanguageModelV1Prompt, LanguageModelV1StreamPart } from 'ai'\nimport { v4 as uuidv4 } from 'uuid'\nimport { PostHog } from 'posthog-node'\nimport { CostOverride, sendEventToPosthog } from '../utils'\n\ninterface ClientOptions {\n posthogDistinctId?: string\n posthogTraceId?: string\n posthogProperties?: Record<string, any>\n posthogPrivacyMode?: boolean\n posthogGroups?: Record<string, any>\n posthogModelOverride?: string\n posthogProviderOverride?: string\n posthogCostOverride?: CostOverride\n}\n\ninterface CreateInstrumentationMiddlewareOptions {\n posthogDistinctId: string\n posthogTraceId: string\n posthogProperties?: Record<string, any>\n posthogPrivacyMode?: boolean\n posthogGroups?: Record<string, any>\n posthogModelOverride?: string\n posthogProviderOverride?: string\n posthogCostOverride?: CostOverride\n}\n\ninterface PostHogInput {\n role: string\n type?: string\n content?:\n | string\n | {\n [key: string]: any\n }\n}\n\nconst mapVercelParams = (params: any): Record<string, any> => {\n return {\n temperature: params.temperature,\n max_tokens: params.maxTokens,\n top_p: params.topP,\n frequency_penalty: params.frequencyPenalty,\n presence_penalty: params.presencePenalty,\n stop: params.stopSequences,\n stream: params.stream,\n }\n}\n\nconst mapVercelPrompt = (prompt: LanguageModelV1Prompt): PostHogInput[] => {\n return prompt.map((p) => {\n let content = {}\n if (Array.isArray(p.content)) {\n content = p.content.map((c) => {\n if (c.type === 'text') {\n return {\n type: 'text',\n content: c.text,\n }\n } else if (c.type === 'image') {\n return {\n type: 'image',\n content: {\n // if image is a url use it, or use \"none supported\"\n image: c.image instanceof URL ? c.image.toString() : 'raw images not supported',\n mimeType: c.mimeType,\n },\n }\n } else if (c.type === 'file') {\n return {\n type: 'file',\n content: {\n file: c.data instanceof URL ? c.data.toString() : 'raw files not supported',\n mimeType: c.mimeType,\n },\n }\n } else if (c.type === 'tool-call') {\n return {\n type: 'tool-call',\n content: {\n toolCallId: c.toolCallId,\n toolName: c.toolName,\n args: c.args,\n },\n }\n } else if (c.type === 'tool-result') {\n return {\n type: 'tool-result',\n content: {\n toolCallId: c.toolCallId,\n toolName: c.toolName,\n result: c.result,\n isError: c.isError,\n },\n }\n }\n return {\n content: '',\n }\n })\n } else {\n content = {\n type: 'text',\n text: p.content,\n }\n }\n return {\n role: p.role,\n content,\n }\n })\n}\n\nconst mapVercelOutput = (result: any): PostHogInput[] => {\n const output = {\n ...(result.text ? { text: result.text } : {}),\n ...(result.object ? { object: result.object } : {}),\n ...(result.reasoning ? { reasoning: result.reasoning } : {}),\n ...(result.response ? { response: result.response } : {}),\n ...(result.finishReason ? { finishReason: result.finishReason } : {}),\n ...(result.usage ? { usage: result.usage } : {}),\n ...(result.warnings ? { warnings: result.warnings } : {}),\n ...(result.providerMetadata ? { toolCalls: result.providerMetadata } : {}),\n }\n // if text and no object or reasoning, return text\n if (output.text && !output.object && !output.reasoning) {\n return [{ content: output.text, role: 'assistant' }]\n }\n return [{ content: JSON.stringify(output), role: 'assistant' }]\n}\n\nconst extractProvider = (model: LanguageModelV1): string => {\n // vercel provider is in the format of provider.endpoint\n const provider = model.provider.toLowerCase()\n const providerName = provider.split('.')[0]\n return providerName\n}\n\nexport const createInstrumentationMiddleware = (\n phClient: PostHog,\n model: LanguageModelV1,\n options: CreateInstrumentationMiddlewareOptions\n): LanguageModelV1Middleware => {\n const middleware: LanguageModelV1Middleware = {\n wrapGenerate: async ({ doGenerate, params }) => {\n const startTime = Date.now()\n const mergedParams = {\n ...options,\n ...mapVercelParams(params),\n }\n try {\n const result = await doGenerate()\n const latency = (Date.now() - startTime) / 1000\n const modelId =\n options.posthogModelOverride ?? (result.response?.modelId ? result.response.modelId : model.modelId)\n const provider = options.posthogProviderOverride ?? extractProvider(model)\n const baseURL = '' // cannot currently get baseURL from vercel\n const content = mapVercelOutput(result)\n // let tools = result.toolCalls\n const providerMetadata = result.providerMetadata\n const additionalTokenValues = {\n ...(providerMetadata?.openai?.reasoningTokens\n ? { reasoningTokens: providerMetadata.openai.reasoningTokens }\n : {}),\n ...(providerMetadata?.openai?.cachedPromptTokens\n ? { cacheReadInputTokens: providerMetadata.openai.cachedPromptTokens }\n : {}),\n ...(providerMetadata?.anthropic\n ? {\n cacheReadInputTokens: providerMetadata.anthropic.cacheReadInputTokens,\n cacheCreationInputTokens: providerMetadata.anthropic.cacheCreationInputTokens,\n }\n : {}),\n }\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: modelId,\n provider: provider,\n input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),\n output: [{ content, role: 'assistant' }],\n latency,\n baseURL,\n params: mergedParams as any,\n httpStatus: 200,\n usage: {\n inputTokens: result.usage.promptTokens,\n outputTokens: result.usage.completionTokens,\n ...additionalTokenValues,\n },\n })\n\n return result\n } catch (error: any) {\n const modelId = model.modelId\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: modelId,\n provider: model.provider,\n input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),\n output: [],\n latency: 0,\n baseURL: '',\n params: mergedParams as any,\n httpStatus: error?.status ? error.status : 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n isError: true,\n error: JSON.stringify(error),\n })\n throw error\n }\n },\n\n wrapStream: async ({ doStream, params }) => {\n const startTime = Date.now()\n let generatedText = ''\n let usage: {\n inputTokens?: number\n outputTokens?: number\n reasoningTokens?: any\n cacheReadInputTokens?: any\n cacheCreationInputTokens?: any\n } = {}\n const mergedParams = {\n ...options,\n ...mapVercelParams(params),\n }\n\n const modelId = options.posthogModelOverride ?? model.modelId\n const provider = options.posthogProviderOverride ?? extractProvider(model)\n const baseURL = '' // cannot currently get baseURL from vercel\n try {\n const { stream, ...rest } = await doStream()\n const transformStream = new TransformStream<LanguageModelV1StreamPart, LanguageModelV1StreamPart>({\n transform(chunk, controller) {\n if (chunk.type === 'text-delta') {\n generatedText += chunk.textDelta\n }\n if (chunk.type === 'finish') {\n usage = {\n inputTokens: chunk.usage?.promptTokens,\n outputTokens: chunk.usage?.completionTokens,\n }\n if (chunk.providerMetadata?.openai?.reasoningTokens) {\n usage.reasoningTokens = chunk.providerMetadata.openai.reasoningTokens\n }\n if (chunk.providerMetadata?.openai?.cachedPromptTokens) {\n usage.cacheReadInputTokens = chunk.providerMetadata.openai.cachedPromptTokens\n }\n if (chunk.providerMetadata?.anthropic?.cacheReadInputTokens) {\n usage.cacheReadInputTokens = chunk.providerMetadata.anthropic.cacheReadInputTokens\n }\n if (chunk.providerMetadata?.anthropic?.cacheCreationInputTokens) {\n usage.cacheCreationInputTokens = chunk.providerMetadata.anthropic.cacheCreationInputTokens\n }\n }\n controller.enqueue(chunk)\n },\n\n flush() {\n const latency = (Date.now() - startTime) / 1000\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: modelId,\n provider: provider,\n input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),\n output: [{ content: generatedText, role: 'assistant' }],\n latency,\n baseURL,\n params: mergedParams as any,\n httpStatus: 200,\n usage,\n })\n },\n })\n\n return {\n stream: stream.pipeThrough(transformStream),\n ...rest,\n }\n } catch (error: any) {\n sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId,\n model: modelId,\n provider: provider,\n input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),\n output: [],\n latency: 0,\n baseURL: '',\n params: mergedParams as any,\n httpStatus: error?.status ? error.status : 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n isError: true,\n error: JSON.stringify(error),\n })\n throw error\n }\n },\n }\n\n return middleware\n}\n\nexport const wrapVercelLanguageModel = (\n model: LanguageModelV1,\n phClient: PostHog,\n options: ClientOptions\n): LanguageModelV1 => {\n const traceId = options.posthogTraceId ?? uuidv4()\n const middleware = createInstrumentationMiddleware(phClient, model, {\n ...options,\n posthogTraceId: traceId,\n posthogDistinctId: options.posthogDistinctId ?? traceId,\n })\n\n const wrappedModel = wrapLanguageModel({\n model,\n middleware,\n })\n\n return wrappedModel\n}\n"],"names":["getModelParams","params","modelParams","paramKeys","key","undefined","withPrivacyMode","client","privacyMode","input","privacy_mode","sanitizeValues","obj","jsonSafe","JSON","parse","stringify","Buffer","from","toString","Array","isArray","map","Object","fromEntries","entries","k","v","sendEventToPosthog","distinctId","traceId","model","provider","output","latency","baseURL","httpStatus","usage","isError","error","tools","capture","safeInput","safeOutput","safeError","errorData","$ai_is_error","$ai_error","costOverrideData","posthogCostOverride","inputCostUSD","inputCost","inputTokens","outputCostUSD","outputCost","outputTokens","$ai_input_cost_usd","$ai_output_cost_usd","$ai_total_cost_usd","additionalTokenValues","reasoningTokens","$ai_reasoning_tokens","cacheReadInputTokens","$ai_cache_read_input_tokens","cacheCreationInputTokens","$ai_cache_creation_input_tokens","event","properties","$ai_provider","posthogProviderOverride","$ai_model","posthogModelOverride","$ai_model_parameters","$ai_input","posthogPrivacyMode","$ai_output_choices","$ai_http_status","$ai_input_tokens","$ai_output_tokens","$ai_latency","$ai_trace_id","$ai_base_url","posthogProperties","$process_person_profile","$ai_tools","groups","posthogGroups","mapVercelParams","temperature","max_tokens","maxTokens","top_p","topP","frequency_penalty","frequencyPenalty","presence_penalty","presencePenalty","stop","stopSequences","stream","mapVercelPrompt","prompt","p","content","c","type","text","image","URL","mimeType","file","data","toolCallId","toolName","args","result","role","mapVercelOutput","object","reasoning","response","finishReason","warnings","providerMetadata","toolCalls","extractProvider","toLowerCase","providerName","split","createInstrumentationMiddleware","phClient","options","middleware","wrapGenerate","doGenerate","startTime","Date","now","mergedParams","modelId","openai","cachedPromptTokens","anthropic","posthogDistinctId","posthogTraceId","promptTokens","completionTokens","status","wrapStream","doStream","generatedText","rest","transformStream","TransformStream","transform","chunk","controller","textDelta","enqueue","flush","pipeThrough","wrapVercelLanguageModel","uuidv4","wrappedModel","wrapLanguageModel"],"mappings":";;;;AAwBO,MAAMA,cAAc,GACzBC,MAA0F,IAClE;EACxB,IAAI,CAACA,MAAM,EAAE;AACX,IAAA,OAAO,EAAE,CAAA;AACX,GAAA;EACA,MAAMC,WAAgC,GAAG,EAAE,CAAA;EAC3C,MAAMC,SAAS,GAAG,CAChB,aAAa,EACb,YAAY,EACZ,uBAAuB,EACvB,OAAO,EACP,mBAAmB,EACnB,kBAAkB,EAClB,GAAG,EACH,MAAM,EACN,QAAQ,EACR,WAAW,CACH,CAAA;AAEV,EAAA,KAAK,MAAMC,GAAG,IAAID,SAAS,EAAE;IAC3B,IAAIC,GAAG,IAAIH,MAAM,IAAKA,MAAM,CAASG,GAAG,CAAC,KAAKC,SAAS,EAAE;AACvDH,MAAAA,WAAW,CAACE,GAAG,CAAC,GAAIH,MAAM,CAASG,GAAG,CAAC,CAAA;AACzC,KAAA;AACF,GAAA;AACA,EAAA,OAAOF,WAAW,CAAA;AACpB,CAAC,CAAA;AAwDM,MAAMI,eAAe,GAAGA,CAACC,MAAe,EAAEC,WAAoB,EAAEC,KAAU,KAAU;EACzF,OAAQF,MAAM,CAASG,YAAY,IAAIF,WAAW,GAAG,IAAI,GAAGC,KAAK,CAAA;AACnE,CAAC,CAAA;AA0BD,SAASE,cAAcA,CAACC,GAAQ,EAAO;AACrC,EAAA,IAAIA,GAAG,KAAKP,SAAS,IAAIO,GAAG,KAAK,IAAI,EAAE;AACrC,IAAA,OAAOA,GAAG,CAAA;AACZ,GAAA;AACA,EAAA,MAAMC,QAAQ,GAAGC,IAAI,CAACC,KAAK,CAACD,IAAI,CAACE,SAAS,CAACJ,GAAG,CAAC,CAAC,CAAA;AAChD,EAAA,IAAI,OAAOC,QAAQ,KAAK,QAAQ,EAAE;AAChC,IAAA,OAAOI,MAAM,CAACC,IAAI,CAACL,QAAQ,EAAE,MAAM,CAAC,CAACM,QAAQ,CAAC,MAAM,CAAC,CAAA;GACtD,MAAM,IAAIC,KAAK,CAACC,OAAO,CAACR,QAAQ,CAAC,EAAE;AAClC,IAAA,OAAOA,QAAQ,CAACS,GAAG,CAACX,cAAc,CAAC,CAAA;GACpC,MAAM,IAAIE,QAAQ,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;AACnD,IAAA,OAAOU,MAAM,CAACC,WAAW,CAACD,MAAM,CAACE,OAAO,CAACZ,QAAQ,CAAC,CAACS,GAAG,CAAC,CAAC,CAACI,CAAC,EAAEC,CAAC,CAAC,KAAK,CAACD,CAAC,EAAEf,cAAc,CAACgB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;AAC7F,GAAA;AACA,EAAA,OAAOd,QAAQ,CAAA;AACjB,CAAA;AAEO,MAAMe,kBAAkB,GAAGA,CAAC;EACjCrB,MAAM;EACNsB,UAAU;EACVC,OAAO;EACPC,KAAK;EACLC,QAAQ;EACRvB,KAAK;EACLwB,MAAM;EACNC,OAAO;EACPC,OAAO;EACPlC,MAAM;AACNmC,EAAAA,UAAU,GAAG,GAAG;EAChBC,KAAK,GAAG,EAAE;AACVC,EAAAA,OAAO,GAAG,KAAK;EACfC,KAAK;AACLC,EAAAA,KAAAA;AACwB,CAAC,KAAW;EACpC,IAAIjC,MAAM,CAACkC,OAAO,EAAE;AAClB;AACA,IAAA,MAAMC,SAAS,GAAG/B,cAAc,CAACF,KAAK,CAAC,CAAA;AACvC,IAAA,MAAMkC,UAAU,GAAGhC,cAAc,CAACsB,MAAM,CAAC,CAAA;AACzC,IAAA,MAAMW,SAAS,GAAGjC,cAAc,CAAC4B,KAAK,CAAC,CAAA;IAEvC,IAAIM,SAAS,GAAG,EAAE,CAAA;AAClB,IAAA,IAAIP,OAAO,EAAE;AACXO,MAAAA,SAAS,GAAG;AACVC,QAAAA,YAAY,EAAE,IAAI;AAClBC,QAAAA,SAAS,EAAEH,SAAAA;OACZ,CAAA;AACH,KAAA;IACA,IAAII,gBAAgB,GAAG,EAAE,CAAA;IACzB,IAAI/C,MAAM,CAACgD,mBAAmB,EAAE;AAC9B,MAAA,MAAMC,YAAY,GAAG,CAACjD,MAAM,CAACgD,mBAAmB,CAACE,SAAS,IAAI,CAAC,KAAKd,KAAK,CAACe,WAAW,IAAI,CAAC,CAAC,CAAA;AAC3F,MAAA,MAAMC,aAAa,GAAG,CAACpD,MAAM,CAACgD,mBAAmB,CAACK,UAAU,IAAI,CAAC,KAAKjB,KAAK,CAACkB,YAAY,IAAI,CAAC,CAAC,CAAA;AAC9FP,MAAAA,gBAAgB,GAAG;AACjBQ,QAAAA,kBAAkB,EAAEN,YAAY;AAChCO,QAAAA,mBAAmB,EAAEJ,aAAa;QAClCK,kBAAkB,EAAER,YAAY,GAAGG,aAAAA;OACpC,CAAA;AACH,KAAA;AAEA,IAAA,MAAMM,qBAAqB,GAAG;MAC5B,IAAItB,KAAK,CAACuB,eAAe,GAAG;QAAEC,oBAAoB,EAAExB,KAAK,CAACuB,eAAAA;OAAiB,GAAG,EAAE,CAAC;MACjF,IAAIvB,KAAK,CAACyB,oBAAoB,GAAG;QAAEC,2BAA2B,EAAE1B,KAAK,CAACyB,oBAAAA;OAAsB,GAAG,EAAE,CAAC;MAClG,IAAIzB,KAAK,CAAC2B,wBAAwB,GAAG;QAAEC,+BAA+B,EAAE5B,KAAK,CAAC2B,wBAAAA;OAA0B,GAAG,EAAE,CAAA;KAC9G,CAAA;IAEDzD,MAAM,CAACkC,OAAO,CAAC;MACbZ,UAAU,EAAEA,UAAU,IAAIC,OAAO;AACjCoC,MAAAA,KAAK,EAAE,gBAAgB;AACvBC,MAAAA,UAAU,EAAE;AACVC,QAAAA,YAAY,EAAEnE,MAAM,CAACoE,uBAAuB,IAAIrC,QAAQ;AACxDsC,QAAAA,SAAS,EAAErE,MAAM,CAACsE,oBAAoB,IAAIxC,KAAK;AAC/CyC,QAAAA,oBAAoB,EAAExE,cAAc,CAACC,MAAM,CAAC;AAC5CwE,QAAAA,SAAS,EAAEnE,eAAe,CAACC,MAAM,EAAEN,MAAM,CAACyE,kBAAkB,IAAI,KAAK,EAAEhC,SAAS,CAAC;AACjFiC,QAAAA,kBAAkB,EAAErE,eAAe,CAACC,MAAM,EAAEN,MAAM,CAACyE,kBAAkB,IAAI,KAAK,EAAE/B,UAAU,CAAC;AAC3FiC,QAAAA,eAAe,EAAExC,UAAU;AAC3ByC,QAAAA,gBAAgB,EAAExC,KAAK,CAACe,WAAW,IAAI,CAAC;AACxC0B,QAAAA,iBAAiB,EAAEzC,KAAK,CAACkB,YAAY,IAAI,CAAC;AAC1C,QAAA,GAAGI,qBAAqB;AACxBoB,QAAAA,WAAW,EAAE7C,OAAO;AACpB8C,QAAAA,YAAY,EAAElD,OAAO;AACrBmD,QAAAA,YAAY,EAAE9C,OAAO;QACrB,GAAGlC,MAAM,CAACiF,iBAAiB;AAC3B,QAAA,IAAIrD,UAAU,GAAG,EAAE,GAAG;AAAEsD,UAAAA,uBAAuB,EAAE,KAAA;AAAM,SAAC,CAAC;AACzD,QAAA,IAAI3C,KAAK,GAAG;AAAE4C,UAAAA,SAAS,EAAE5C,KAAAA;SAAO,GAAG,EAAE,CAAC;AACtC,QAAA,GAAGK,SAAS;QACZ,GAAGG,gBAAAA;OACJ;MACDqC,MAAM,EAAEpF,MAAM,CAACqF,aAAAA;AACjB,KAAC,CAAC,CAAA;AACJ,GAAA;AACF,CAAC;;ACvLD,MAAMC,eAAe,GAAItF,MAAW,IAA0B;EAC5D,OAAO;IACLuF,WAAW,EAAEvF,MAAM,CAACuF,WAAW;IAC/BC,UAAU,EAAExF,MAAM,CAACyF,SAAS;IAC5BC,KAAK,EAAE1F,MAAM,CAAC2F,IAAI;IAClBC,iBAAiB,EAAE5F,MAAM,CAAC6F,gBAAgB;IAC1CC,gBAAgB,EAAE9F,MAAM,CAAC+F,eAAe;IACxCC,IAAI,EAAEhG,MAAM,CAACiG,aAAa;IAC1BC,MAAM,EAAElG,MAAM,CAACkG,MAAAA;GAChB,CAAA;AACH,CAAC,CAAA;AAED,MAAMC,eAAe,GAAIC,MAA6B,IAAqB;AACzE,EAAA,OAAOA,MAAM,CAAC/E,GAAG,CAAEgF,CAAC,IAAK;IACvB,IAAIC,OAAO,GAAG,EAAE,CAAA;IAChB,IAAInF,KAAK,CAACC,OAAO,CAACiF,CAAC,CAACC,OAAO,CAAC,EAAE;MAC5BA,OAAO,GAAGD,CAAC,CAACC,OAAO,CAACjF,GAAG,CAAEkF,CAAC,IAAK;AAC7B,QAAA,IAAIA,CAAC,CAACC,IAAI,KAAK,MAAM,EAAE;UACrB,OAAO;AACLA,YAAAA,IAAI,EAAE,MAAM;YACZF,OAAO,EAAEC,CAAC,CAACE,IAAAA;WACZ,CAAA;AACH,SAAC,MAAM,IAAIF,CAAC,CAACC,IAAI,KAAK,OAAO,EAAE;UAC7B,OAAO;AACLA,YAAAA,IAAI,EAAE,OAAO;AACbF,YAAAA,OAAO,EAAE;AACP;AACAI,cAAAA,KAAK,EAAEH,CAAC,CAACG,KAAK,YAAYC,GAAG,GAAGJ,CAAC,CAACG,KAAK,CAACxF,QAAQ,EAAE,GAAG,0BAA0B;cAC/E0F,QAAQ,EAAEL,CAAC,CAACK,QAAAA;AACd,aAAA;WACD,CAAA;AACH,SAAC,MAAM,IAAIL,CAAC,CAACC,IAAI,KAAK,MAAM,EAAE;UAC5B,OAAO;AACLA,YAAAA,IAAI,EAAE,MAAM;AACZF,YAAAA,OAAO,EAAE;AACPO,cAAAA,IAAI,EAAEN,CAAC,CAACO,IAAI,YAAYH,GAAG,GAAGJ,CAAC,CAACO,IAAI,CAAC5F,QAAQ,EAAE,GAAG,yBAAyB;cAC3E0F,QAAQ,EAAEL,CAAC,CAACK,QAAAA;AACd,aAAA;WACD,CAAA;AACH,SAAC,MAAM,IAAIL,CAAC,CAACC,IAAI,KAAK,WAAW,EAAE;UACjC,OAAO;AACLA,YAAAA,IAAI,EAAE,WAAW;AACjBF,YAAAA,OAAO,EAAE;cACPS,UAAU,EAAER,CAAC,CAACQ,UAAU;cACxBC,QAAQ,EAAET,CAAC,CAACS,QAAQ;cACpBC,IAAI,EAAEV,CAAC,CAACU,IAAAA;AACV,aAAA;WACD,CAAA;AACH,SAAC,MAAM,IAAIV,CAAC,CAACC,IAAI,KAAK,aAAa,EAAE;UACnC,OAAO;AACLA,YAAAA,IAAI,EAAE,aAAa;AACnBF,YAAAA,OAAO,EAAE;cACPS,UAAU,EAAER,CAAC,CAACQ,UAAU;cACxBC,QAAQ,EAAET,CAAC,CAACS,QAAQ;cACpBE,MAAM,EAAEX,CAAC,CAACW,MAAM;cAChB7E,OAAO,EAAEkE,CAAC,CAAClE,OAAAA;AACb,aAAA;WACD,CAAA;AACH,SAAA;QACA,OAAO;AACLiE,UAAAA,OAAO,EAAE,EAAA;SACV,CAAA;AACH,OAAC,CAAC,CAAA;AACJ,KAAC,MAAM;AACLA,MAAAA,OAAO,GAAG;AACRE,QAAAA,IAAI,EAAE,MAAM;QACZC,IAAI,EAAEJ,CAAC,CAACC,OAAAA;OACT,CAAA;AACH,KAAA;IACA,OAAO;MACLa,IAAI,EAAEd,CAAC,CAACc,IAAI;AACZb,MAAAA,OAAAA;KACD,CAAA;AACH,GAAC,CAAC,CAAA;AACJ,CAAC,CAAA;AAED,MAAMc,eAAe,GAAIF,MAAW,IAAqB;AACvD,EAAA,MAAMlF,MAAM,GAAG;IACb,IAAIkF,MAAM,CAACT,IAAI,GAAG;MAAEA,IAAI,EAAES,MAAM,CAACT,IAAAA;KAAM,GAAG,EAAE,CAAC;IAC7C,IAAIS,MAAM,CAACG,MAAM,GAAG;MAAEA,MAAM,EAAEH,MAAM,CAACG,MAAAA;KAAQ,GAAG,EAAE,CAAC;IACnD,IAAIH,MAAM,CAACI,SAAS,GAAG;MAAEA,SAAS,EAAEJ,MAAM,CAACI,SAAAA;KAAW,GAAG,EAAE,CAAC;IAC5D,IAAIJ,MAAM,CAACK,QAAQ,GAAG;MAAEA,QAAQ,EAAEL,MAAM,CAACK,QAAAA;KAAU,GAAG,EAAE,CAAC;IACzD,IAAIL,MAAM,CAACM,YAAY,GAAG;MAAEA,YAAY,EAAEN,MAAM,CAACM,YAAAA;KAAc,GAAG,EAAE,CAAC;IACrE,IAAIN,MAAM,CAAC9E,KAAK,GAAG;MAAEA,KAAK,EAAE8E,MAAM,CAAC9E,KAAAA;KAAO,GAAG,EAAE,CAAC;IAChD,IAAI8E,MAAM,CAACO,QAAQ,GAAG;MAAEA,QAAQ,EAAEP,MAAM,CAACO,QAAAA;KAAU,GAAG,EAAE,CAAC;IACzD,IAAIP,MAAM,CAACQ,gBAAgB,GAAG;MAAEC,SAAS,EAAET,MAAM,CAACQ,gBAAAA;KAAkB,GAAG,EAAE,CAAA;GAC1E,CAAA;AACD;AACA,EAAA,IAAI1F,MAAM,CAACyE,IAAI,IAAI,CAACzE,MAAM,CAACqF,MAAM,IAAI,CAACrF,MAAM,CAACsF,SAAS,EAAE;AACtD,IAAA,OAAO,CAAC;MAAEhB,OAAO,EAAEtE,MAAM,CAACyE,IAAI;AAAEU,MAAAA,IAAI,EAAE,WAAA;AAAY,KAAC,CAAC,CAAA;AACtD,GAAA;AACA,EAAA,OAAO,CAAC;AAAEb,IAAAA,OAAO,EAAEzF,IAAI,CAACE,SAAS,CAACiB,MAAM,CAAC;AAAEmF,IAAAA,IAAI,EAAE,WAAA;AAAY,GAAC,CAAC,CAAA;AACjE,CAAC,CAAA;AAED,MAAMS,eAAe,GAAI9F,KAAsB,IAAa;AAC1D;EACA,MAAMC,QAAQ,GAAGD,KAAK,CAACC,QAAQ,CAAC8F,WAAW,EAAE,CAAA;EAC7C,MAAMC,YAAY,GAAG/F,QAAQ,CAACgG,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAA;AAC3C,EAAA,OAAOD,YAAY,CAAA;AACrB,CAAC,CAAA;AAEM,MAAME,+BAA+B,GAAGA,CAC7CC,QAAiB,EACjBnG,KAAsB,EACtBoG,OAA+C,KACjB;AAC9B,EAAA,MAAMC,UAAqC,GAAG;IAC5CC,YAAY,EAAE,OAAO;MAAEC,UAAU;AAAErI,MAAAA,MAAAA;AAAO,KAAC,KAAK;AAC9C,MAAA,MAAMsI,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE,CAAA;AAC5B,MAAA,MAAMC,YAAY,GAAG;AACnB,QAAA,GAAGP,OAAO;QACV,GAAG5C,eAAe,CAACtF,MAAM,CAAA;OAC1B,CAAA;MACD,IAAI;AACF,QAAA,MAAMkH,MAAM,GAAG,MAAMmB,UAAU,EAAE,CAAA;QACjC,MAAMpG,OAAO,GAAG,CAACsG,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;QAC/C,MAAMI,OAAO,GACXR,OAAO,CAAC5D,oBAAoB,KAAK4C,MAAM,CAACK,QAAQ,EAAEmB,OAAO,GAAGxB,MAAM,CAACK,QAAQ,CAACmB,OAAO,GAAG5G,KAAK,CAAC4G,OAAO,CAAC,CAAA;QACtG,MAAM3G,QAAQ,GAAGmG,OAAO,CAAC9D,uBAAuB,IAAIwD,eAAe,CAAC9F,KAAK,CAAC,CAAA;QAC1E,MAAMI,OAAO,GAAG,EAAE,CAAC;AACnB,QAAA,MAAMoE,OAAO,GAAGc,eAAe,CAACF,MAAM,CAAC,CAAA;AACvC;AACA,QAAA,MAAMQ,gBAAgB,GAAGR,MAAM,CAACQ,gBAAgB,CAAA;AAChD,QAAA,MAAMhE,qBAAqB,GAAG;AAC5B,UAAA,IAAIgE,gBAAgB,EAAEiB,MAAM,EAAEhF,eAAe,GACzC;AAAEA,YAAAA,eAAe,EAAE+D,gBAAgB,CAACiB,MAAM,CAAChF,eAAAA;WAAiB,GAC5D,EAAE,CAAC;AACP,UAAA,IAAI+D,gBAAgB,EAAEiB,MAAM,EAAEC,kBAAkB,GAC5C;AAAE/E,YAAAA,oBAAoB,EAAE6D,gBAAgB,CAACiB,MAAM,CAACC,kBAAAA;WAAoB,GACpE,EAAE,CAAC;UACP,IAAIlB,gBAAgB,EAAEmB,SAAS,GAC3B;AACEhF,YAAAA,oBAAoB,EAAE6D,gBAAgB,CAACmB,SAAS,CAAChF,oBAAoB;AACrEE,YAAAA,wBAAwB,EAAE2D,gBAAgB,CAACmB,SAAS,CAAC9E,wBAAAA;WACtD,GACD,EAAE,CAAA;SACP,CAAA;AACDpC,QAAAA,kBAAkB,CAAC;AACjBrB,UAAAA,MAAM,EAAE2H,QAAQ;UAChBrG,UAAU,EAAEsG,OAAO,CAACY,iBAAiB;UACrCjH,OAAO,EAAEqG,OAAO,CAACa,cAAc;AAC/BjH,UAAAA,KAAK,EAAE4G,OAAO;AACd3G,UAAAA,QAAQ,EAAEA,QAAQ;AAClBvB,UAAAA,KAAK,EAAE0H,OAAO,CAACzD,kBAAkB,GAAG,EAAE,GAAG0B,eAAe,CAACnG,MAAM,CAACoG,MAAM,CAAC;AACvEpE,UAAAA,MAAM,EAAE,CAAC;YAAEsE,OAAO;AAAEa,YAAAA,IAAI,EAAE,WAAA;AAAY,WAAC,CAAC;UACxClF,OAAO;UACPC,OAAO;AACPlC,UAAAA,MAAM,EAAEyI,YAAmB;AAC3BtG,UAAAA,UAAU,EAAE,GAAG;AACfC,UAAAA,KAAK,EAAE;AACLe,YAAAA,WAAW,EAAE+D,MAAM,CAAC9E,KAAK,CAAC4G,YAAY;AACtC1F,YAAAA,YAAY,EAAE4D,MAAM,CAAC9E,KAAK,CAAC6G,gBAAgB;YAC3C,GAAGvF,qBAAAA;AACL,WAAA;AACF,SAAC,CAAC,CAAA;AAEF,QAAA,OAAOwD,MAAM,CAAA;OACd,CAAC,OAAO5E,KAAU,EAAE;AACnB,QAAA,MAAMoG,OAAO,GAAG5G,KAAK,CAAC4G,OAAO,CAAA;AAC7B/G,QAAAA,kBAAkB,CAAC;AACjBrB,UAAAA,MAAM,EAAE2H,QAAQ;UAChBrG,UAAU,EAAEsG,OAAO,CAACY,iBAAiB;UACrCjH,OAAO,EAAEqG,OAAO,CAACa,cAAc;AAC/BjH,UAAAA,KAAK,EAAE4G,OAAO;UACd3G,QAAQ,EAAED,KAAK,CAACC,QAAQ;AACxBvB,UAAAA,KAAK,EAAE0H,OAAO,CAACzD,kBAAkB,GAAG,EAAE,GAAG0B,eAAe,CAACnG,MAAM,CAACoG,MAAM,CAAC;AACvEpE,UAAAA,MAAM,EAAE,EAAE;AACVC,UAAAA,OAAO,EAAE,CAAC;AACVC,UAAAA,OAAO,EAAE,EAAE;AACXlC,UAAAA,MAAM,EAAEyI,YAAmB;UAC3BtG,UAAU,EAAEG,KAAK,EAAE4G,MAAM,GAAG5G,KAAK,CAAC4G,MAAM,GAAG,GAAG;AAC9C9G,UAAAA,KAAK,EAAE;AACLe,YAAAA,WAAW,EAAE,CAAC;AACdG,YAAAA,YAAY,EAAE,CAAA;WACf;AACDjB,UAAAA,OAAO,EAAE,IAAI;AACbC,UAAAA,KAAK,EAAEzB,IAAI,CAACE,SAAS,CAACuB,KAAK,CAAA;AAC7B,SAAC,CAAC,CAAA;AACF,QAAA,MAAMA,KAAK,CAAA;AACb,OAAA;KACD;IAED6G,UAAU,EAAE,OAAO;MAAEC,QAAQ;AAAEpJ,MAAAA,MAAAA;AAAO,KAAC,KAAK;AAC1C,MAAA,MAAMsI,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE,CAAA;MAC5B,IAAIa,aAAa,GAAG,EAAE,CAAA;MACtB,IAAIjH,KAMH,GAAG,EAAE,CAAA;AACN,MAAA,MAAMqG,YAAY,GAAG;AACnB,QAAA,GAAGP,OAAO;QACV,GAAG5C,eAAe,CAACtF,MAAM,CAAA;OAC1B,CAAA;MAED,MAAM0I,OAAO,GAAGR,OAAO,CAAC5D,oBAAoB,IAAIxC,KAAK,CAAC4G,OAAO,CAAA;MAC7D,MAAM3G,QAAQ,GAAGmG,OAAO,CAAC9D,uBAAuB,IAAIwD,eAAe,CAAC9F,KAAK,CAAC,CAAA;MAC1E,MAAMI,OAAO,GAAG,EAAE,CAAC;MACnB,IAAI;QACF,MAAM;UAAEgE,MAAM;UAAE,GAAGoD,IAAAA;AAAK,SAAC,GAAG,MAAMF,QAAQ,EAAE,CAAA;AAC5C,QAAA,MAAMG,eAAe,GAAG,IAAIC,eAAe,CAAuD;AAChGC,UAAAA,SAASA,CAACC,KAAK,EAAEC,UAAU,EAAE;AAC3B,YAAA,IAAID,KAAK,CAAClD,IAAI,KAAK,YAAY,EAAE;cAC/B6C,aAAa,IAAIK,KAAK,CAACE,SAAS,CAAA;AAClC,aAAA;AACA,YAAA,IAAIF,KAAK,CAAClD,IAAI,KAAK,QAAQ,EAAE;AAC3BpE,cAAAA,KAAK,GAAG;AACNe,gBAAAA,WAAW,EAAEuG,KAAK,CAACtH,KAAK,EAAE4G,YAAY;AACtC1F,gBAAAA,YAAY,EAAEoG,KAAK,CAACtH,KAAK,EAAE6G,gBAAAA;eAC5B,CAAA;AACD,cAAA,IAAIS,KAAK,CAAChC,gBAAgB,EAAEiB,MAAM,EAAEhF,eAAe,EAAE;gBACnDvB,KAAK,CAACuB,eAAe,GAAG+F,KAAK,CAAChC,gBAAgB,CAACiB,MAAM,CAAChF,eAAe,CAAA;AACvE,eAAA;AACA,cAAA,IAAI+F,KAAK,CAAChC,gBAAgB,EAAEiB,MAAM,EAAEC,kBAAkB,EAAE;gBACtDxG,KAAK,CAACyB,oBAAoB,GAAG6F,KAAK,CAAChC,gBAAgB,CAACiB,MAAM,CAACC,kBAAkB,CAAA;AAC/E,eAAA;AACA,cAAA,IAAIc,KAAK,CAAChC,gBAAgB,EAAEmB,SAAS,EAAEhF,oBAAoB,EAAE;gBAC3DzB,KAAK,CAACyB,oBAAoB,GAAG6F,KAAK,CAAChC,gBAAgB,CAACmB,SAAS,CAAChF,oBAAoB,CAAA;AACpF,eAAA;AACA,cAAA,IAAI6F,KAAK,CAAChC,gBAAgB,EAAEmB,SAAS,EAAE9E,wBAAwB,EAAE;gBAC/D3B,KAAK,CAAC2B,wBAAwB,GAAG2F,KAAK,CAAChC,gBAAgB,CAACmB,SAAS,CAAC9E,wBAAwB,CAAA;AAC5F,eAAA;AACF,aAAA;AACA4F,YAAAA,UAAU,CAACE,OAAO,CAACH,KAAK,CAAC,CAAA;WAC1B;AAEDI,UAAAA,KAAKA,GAAG;YACN,MAAM7H,OAAO,GAAG,CAACsG,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/C3G,YAAAA,kBAAkB,CAAC;AACjBrB,cAAAA,MAAM,EAAE2H,QAAQ;cAChBrG,UAAU,EAAEsG,OAAO,CAACY,iBAAiB;cACrCjH,OAAO,EAAEqG,OAAO,CAACa,cAAc;AAC/BjH,cAAAA,KAAK,EAAE4G,OAAO;AACd3G,cAAAA,QAAQ,EAAEA,QAAQ;AAClBvB,cAAAA,KAAK,EAAE0H,OAAO,CAACzD,kBAAkB,GAAG,EAAE,GAAG0B,eAAe,CAACnG,MAAM,CAACoG,MAAM,CAAC;AACvEpE,cAAAA,MAAM,EAAE,CAAC;AAAEsE,gBAAAA,OAAO,EAAE+C,aAAa;AAAElC,gBAAAA,IAAI,EAAE,WAAA;AAAY,eAAC,CAAC;cACvDlF,OAAO;cACPC,OAAO;AACPlC,cAAAA,MAAM,EAAEyI,YAAmB;AAC3BtG,cAAAA,UAAU,EAAE,GAAG;AACfC,cAAAA,KAAAA;AACF,aAAC,CAAC,CAAA;AACJ,WAAA;AACF,SAAC,CAAC,CAAA;QAEF,OAAO;AACL8D,UAAAA,MAAM,EAAEA,MAAM,CAAC6D,WAAW,CAACR,eAAe,CAAC;UAC3C,GAAGD,IAAAA;SACJ,CAAA;OACF,CAAC,OAAOhH,KAAU,EAAE;AACnBX,QAAAA,kBAAkB,CAAC;AACjBrB,UAAAA,MAAM,EAAE2H,QAAQ;UAChBrG,UAAU,EAAEsG,OAAO,CAACY,iBAAiB;UACrCjH,OAAO,EAAEqG,OAAO,CAACa,cAAc;AAC/BjH,UAAAA,KAAK,EAAE4G,OAAO;AACd3G,UAAAA,QAAQ,EAAEA,QAAQ;AAClBvB,UAAAA,KAAK,EAAE0H,OAAO,CAACzD,kBAAkB,GAAG,EAAE,GAAG0B,eAAe,CAACnG,MAAM,CAACoG,MAAM,CAAC;AACvEpE,UAAAA,MAAM,EAAE,EAAE;AACVC,UAAAA,OAAO,EAAE,CAAC;AACVC,UAAAA,OAAO,EAAE,EAAE;AACXlC,UAAAA,MAAM,EAAEyI,YAAmB;UAC3BtG,UAAU,EAAEG,KAAK,EAAE4G,MAAM,GAAG5G,KAAK,CAAC4G,MAAM,GAAG,GAAG;AAC9C9G,UAAAA,KAAK,EAAE;AACLe,YAAAA,WAAW,EAAE,CAAC;AACdG,YAAAA,YAAY,EAAE,CAAA;WACf;AACDjB,UAAAA,OAAO,EAAE,IAAI;AACbC,UAAAA,KAAK,EAAEzB,IAAI,CAACE,SAAS,CAACuB,KAAK,CAAA;AAC7B,SAAC,CAAC,CAAA;AACF,QAAA,MAAMA,KAAK,CAAA;AACb,OAAA;AACF,KAAA;GACD,CAAA;AAED,EAAA,OAAO6F,UAAU,CAAA;AACnB,CAAC,CAAA;AAEM,MAAM6B,uBAAuB,GAAGA,CACrClI,KAAsB,EACtBmG,QAAiB,EACjBC,OAAsB,KACF;EACpB,MAAMrG,OAAO,GAAGqG,OAAO,CAACa,cAAc,IAAIkB,EAAM,EAAE,CAAA;AAClD,EAAA,MAAM9B,UAAU,GAAGH,+BAA+B,CAACC,QAAQ,EAAEnG,KAAK,EAAE;AAClE,IAAA,GAAGoG,OAAO;AACVa,IAAAA,cAAc,EAAElH,OAAO;AACvBiH,IAAAA,iBAAiB,EAAEZ,OAAO,CAACY,iBAAiB,IAAIjH,OAAAA;AAClD,GAAC,CAAC,CAAA;EAEF,MAAMqI,YAAY,GAAGC,8BAAiB,CAAC;IACrCrI,KAAK;AACLqG,IAAAA,UAAAA;AACF,GAAC,CAAC,CAAA;AAEF,EAAA,OAAO+B,YAAY,CAAA;AACrB;;;;"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@posthog/ai",
3
- "version": "3.3.2",
3
+ "version": "4.0.1",
4
4
  "description": "PostHog Node.js AI integrations",
5
5
  "repository": {
6
6
  "type": "git",
@@ -37,6 +37,33 @@
37
37
  "test": "jest",
38
38
  "prepublishOnly": "cd .. && yarn build"
39
39
  },
40
+ "exports": {
41
+ ".": {
42
+ "require": "./lib/index.cjs.js",
43
+ "import": "./lib/index.esm.js",
44
+ "types": "./lib/index.d.ts"
45
+ },
46
+ "./anthropic": {
47
+ "require": "./lib/anthropic/index.cjs.js",
48
+ "import": "./lib/anthropic/index.esm.js",
49
+ "types": "./lib/anthropic/index.d.ts"
50
+ },
51
+ "./openai": {
52
+ "require": "./lib/openai/index.cjs.js",
53
+ "import": "./lib/openai/index.esm.js",
54
+ "types": "./lib/openai/index.d.ts"
55
+ },
56
+ "./vercel": {
57
+ "require": "./lib/vercel/index.cjs.js",
58
+ "import": "./lib/vercel/index.esm.js",
59
+ "types": "./lib/vercel/index.d.ts"
60
+ },
61
+ "./langchain": {
62
+ "require": "./lib/langchain/index.cjs.js",
63
+ "import": "./lib/langchain/index.esm.js",
64
+ "types": "./lib/langchain/index.d.ts"
65
+ }
66
+ },
40
67
  "directories": {
41
68
  "lib": "lib",
42
69
  "test": "tests"
@@ -202,3 +202,5 @@ export class WrappedMessages extends AnthropicOriginal.Messages {
202
202
  }
203
203
 
204
204
  export default PostHogAnthropic
205
+
206
+ export { PostHogAnthropic as Anthropic }
@@ -0,0 +1 @@
1
+ export * from './callbacks'
@@ -1,4 +1,4 @@
1
- import OpenAIOrignal from 'openai'
1
+ import OpenAIOrignal, { ClientOptions } from 'openai'
2
2
  import { PostHog } from 'posthog-node'
3
3
  import { v4 as uuidv4 } from 'uuid'
4
4
  import { formatResponseOpenAI, MonitoringParams, sendEventToPosthog } from '../utils'
@@ -11,7 +11,7 @@ type ChatCompletionCreateParamsStreaming = OpenAIOrignal.Chat.Completions.ChatCo
11
11
  import type { APIPromise, RequestOptions } from 'openai/core'
12
12
  import type { Stream } from 'openai/streaming'
13
13
 
14
- interface MonitoringOpenAIConfig {
14
+ interface MonitoringOpenAIConfig extends ClientOptions {
15
15
  apiKey: string
16
16
  posthog: PostHog
17
17
  baseURL?: string
@@ -211,3 +211,5 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
211
211
  }
212
212
 
213
213
  export default PostHogOpenAI
214
+
215
+ export { PostHogOpenAI as OpenAI }
package/src/utils.ts CHANGED
@@ -1,4 +1,5 @@
1
1
  import { PostHog } from 'posthog-node'
2
+ import { Buffer } from 'buffer'
2
3
  import OpenAIOrignal from 'openai'
3
4
  import AnthropicOriginal from '@anthropic-ai/sdk'
4
5
 
@@ -131,6 +132,21 @@ export type SendEventToPosthogParams = {
131
132
  tools?: any
132
133
  }
133
134
 
135
+ function sanitizeValues(obj: any): any {
136
+ if (obj === undefined || obj === null) {
137
+ return obj
138
+ }
139
+ const jsonSafe = JSON.parse(JSON.stringify(obj))
140
+ if (typeof jsonSafe === 'string') {
141
+ return Buffer.from(jsonSafe, 'utf8').toString('utf8')
142
+ } else if (Array.isArray(jsonSafe)) {
143
+ return jsonSafe.map(sanitizeValues)
144
+ } else if (jsonSafe && typeof jsonSafe === 'object') {
145
+ return Object.fromEntries(Object.entries(jsonSafe).map(([k, v]) => [k, sanitizeValues(v)]))
146
+ }
147
+ return jsonSafe
148
+ }
149
+
134
150
  export const sendEventToPosthog = ({
135
151
  client,
136
152
  distinctId,
@@ -149,11 +165,16 @@ export const sendEventToPosthog = ({
149
165
  tools,
150
166
  }: SendEventToPosthogParams): void => {
151
167
  if (client.capture) {
168
+ // sanitize input and output for UTF-8 validity
169
+ const safeInput = sanitizeValues(input)
170
+ const safeOutput = sanitizeValues(output)
171
+ const safeError = sanitizeValues(error)
172
+
152
173
  let errorData = {}
153
174
  if (isError) {
154
175
  errorData = {
155
176
  $ai_is_error: true,
156
- $ai_error: error,
177
+ $ai_error: safeError,
157
178
  }
158
179
  }
159
180
  let costOverrideData = {}
@@ -180,8 +201,8 @@ export const sendEventToPosthog = ({
180
201
  $ai_provider: params.posthogProviderOverride ?? provider,
181
202
  $ai_model: params.posthogModelOverride ?? model,
182
203
  $ai_model_parameters: getModelParams(params),
183
- $ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, input),
184
- $ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, output),
204
+ $ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeInput),
205
+ $ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeOutput),
185
206
  $ai_http_status: httpStatus,
186
207
  $ai_input_tokens: usage.inputTokens ?? 0,
187
208
  $ai_output_tokens: usage.outputTokens ?? 0,
@@ -0,0 +1 @@
1
+ export { wrapVercelLanguageModel as withTracing } from './middleware'
package/tsconfig.json CHANGED
@@ -1,6 +1,7 @@
1
1
  {
2
2
  "extends": "../tsconfig.json",
3
3
  "compilerOptions": {
4
+ "incremental": false,
4
5
  "types": ["node"],
5
6
  "typeRoots": ["./node_modules/@types", "../node_modules/@types"],
6
7
  "moduleResolution": "node",