@posthog/ai 6.1.0 → 6.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -226,6 +226,129 @@ const sendEventToPosthog = async ({
226
226
  }
227
227
  };
228
228
 
229
+ // Type guards for safer type checking
230
+
231
+ const isString = value => {
232
+ return typeof value === 'string';
233
+ };
234
+ const isObject = value => {
235
+ return value !== null && typeof value === 'object' && !Array.isArray(value);
236
+ };
237
+
238
+ const REDACTED_IMAGE_PLACEHOLDER = '[base64 image redacted]';
239
+
240
+ // ============================================
241
+ // Base64 Detection Helpers
242
+ // ============================================
243
+
244
+ const isBase64DataUrl = str => {
245
+ return /^data:([^;]+);base64,/.test(str);
246
+ };
247
+ const isValidUrl = str => {
248
+ try {
249
+ new URL(str);
250
+ return true;
251
+ } catch {
252
+ // Not an absolute URL, check if it's a relative URL or path
253
+ return str.startsWith('/') || str.startsWith('./') || str.startsWith('../');
254
+ }
255
+ };
256
+ const isRawBase64 = str => {
257
+ // Skip if it's a valid URL or path
258
+ if (isValidUrl(str)) {
259
+ return false;
260
+ }
261
+
262
+ // Check if it's a valid base64 string
263
+ // Base64 images are typically at least a few hundred chars, but we'll be conservative
264
+ return str.length > 20 && /^[A-Za-z0-9+/]+=*$/.test(str);
265
+ };
266
+ function redactBase64DataUrl(str) {
267
+ if (!isString(str)) return str;
268
+
269
+ // Check for data URL format
270
+ if (isBase64DataUrl(str)) {
271
+ return REDACTED_IMAGE_PLACEHOLDER;
272
+ }
273
+
274
+ // Check for raw base64 (Vercel sends raw base64 for inline images)
275
+ if (isRawBase64(str)) {
276
+ return REDACTED_IMAGE_PLACEHOLDER;
277
+ }
278
+ return str;
279
+ }
280
+
281
+ // ============================================
282
+ // Common Message Processing
283
+ // ============================================
284
+
285
+ const processMessages = (messages, transformContent) => {
286
+ if (!messages) return messages;
287
+ const processContent = content => {
288
+ if (typeof content === 'string') return content;
289
+ if (!content) return content;
290
+ if (Array.isArray(content)) {
291
+ return content.map(transformContent);
292
+ }
293
+
294
+ // Handle single object content
295
+ return transformContent(content);
296
+ };
297
+ const processMessage = msg => {
298
+ if (!isObject(msg) || !('content' in msg)) return msg;
299
+ return {
300
+ ...msg,
301
+ content: processContent(msg.content)
302
+ };
303
+ };
304
+
305
+ // Handle both arrays and single messages
306
+ if (Array.isArray(messages)) {
307
+ return messages.map(processMessage);
308
+ }
309
+ return processMessage(messages);
310
+ };
311
+
312
+ // ============================================
313
+ // Provider-Specific Image Sanitizers
314
+ // ============================================
315
+
316
+ const sanitizeOpenAIImage = item => {
317
+ if (!isObject(item)) return item;
318
+
319
+ // Handle image_url format
320
+ if (item.type === 'image_url' && 'image_url' in item && isObject(item.image_url) && 'url' in item.image_url) {
321
+ return {
322
+ ...item,
323
+ image_url: {
324
+ ...item.image_url,
325
+ url: redactBase64DataUrl(item.image_url.url)
326
+ }
327
+ };
328
+ }
329
+ return item;
330
+ };
331
+ const sanitizeOpenAIResponseImage = item => {
332
+ if (!isObject(item)) return item;
333
+
334
+ // Handle input_image format
335
+ if (item.type === 'input_image' && 'image_url' in item) {
336
+ return {
337
+ ...item,
338
+ image_url: redactBase64DataUrl(item.image_url)
339
+ };
340
+ }
341
+ return item;
342
+ };
343
+
344
+ // Export individual sanitizers for tree-shaking
345
+ const sanitizeOpenAI = data => {
346
+ return processMessages(data, sanitizeOpenAIImage);
347
+ };
348
+ const sanitizeOpenAIResponse = data => {
349
+ return processMessages(data, sanitizeOpenAIResponseImage);
350
+ };
351
+
229
352
  const Chat = OpenAI.Chat;
230
353
  const Completions = Chat.Completions;
231
354
  const Responses = OpenAI.Responses;
@@ -280,14 +403,56 @@ class WrappedCompletions extends Completions {
280
403
  const [stream1, stream2] = value.tee();
281
404
  (async () => {
282
405
  try {
406
+ const contentBlocks = [];
283
407
  let accumulatedContent = '';
284
408
  let usage = {
285
409
  inputTokens: 0,
286
410
  outputTokens: 0
287
411
  };
412
+
413
+ // Map to track in-progress tool calls
414
+ const toolCallsInProgress = new Map();
288
415
  for await (const chunk of stream1) {
289
- const delta = chunk?.choices?.[0]?.delta?.content ?? '';
290
- accumulatedContent += delta;
416
+ const choice = chunk?.choices?.[0];
417
+
418
+ // Handle text content
419
+ const deltaContent = choice?.delta?.content;
420
+ if (deltaContent) {
421
+ accumulatedContent += deltaContent;
422
+ }
423
+
424
+ // Handle tool calls
425
+ const deltaToolCalls = choice?.delta?.tool_calls;
426
+ if (deltaToolCalls && Array.isArray(deltaToolCalls)) {
427
+ for (const toolCall of deltaToolCalls) {
428
+ const index = toolCall.index;
429
+ if (index !== undefined) {
430
+ if (!toolCallsInProgress.has(index)) {
431
+ // New tool call
432
+ toolCallsInProgress.set(index, {
433
+ id: toolCall.id || '',
434
+ name: toolCall.function?.name || '',
435
+ arguments: ''
436
+ });
437
+ }
438
+ const inProgressCall = toolCallsInProgress.get(index);
439
+ if (inProgressCall) {
440
+ // Update tool call data
441
+ if (toolCall.id) {
442
+ inProgressCall.id = toolCall.id;
443
+ }
444
+ if (toolCall.function?.name) {
445
+ inProgressCall.name = toolCall.function.name;
446
+ }
447
+ if (toolCall.function?.arguments) {
448
+ inProgressCall.arguments += toolCall.function.arguments;
449
+ }
450
+ }
451
+ }
452
+ }
453
+ }
454
+
455
+ // Handle usage information
291
456
  if (chunk.usage) {
292
457
  usage = {
293
458
  inputTokens: chunk.usage.prompt_tokens ?? 0,
@@ -297,6 +462,40 @@ class WrappedCompletions extends Completions {
297
462
  };
298
463
  }
299
464
  }
465
+
466
+ // Build final content blocks
467
+ if (accumulatedContent) {
468
+ contentBlocks.push({
469
+ type: 'text',
470
+ text: accumulatedContent
471
+ });
472
+ }
473
+
474
+ // Add completed tool calls to content blocks
475
+ for (const toolCall of toolCallsInProgress.values()) {
476
+ if (toolCall.name) {
477
+ contentBlocks.push({
478
+ type: 'function',
479
+ id: toolCall.id,
480
+ function: {
481
+ name: toolCall.name,
482
+ arguments: toolCall.arguments
483
+ }
484
+ });
485
+ }
486
+ }
487
+
488
+ // Format output to match non-streaming version
489
+ const formattedOutput = contentBlocks.length > 0 ? [{
490
+ role: 'assistant',
491
+ content: contentBlocks
492
+ }] : [{
493
+ role: 'assistant',
494
+ content: [{
495
+ type: 'text',
496
+ text: ''
497
+ }]
498
+ }];
300
499
  const latency = (Date.now() - startTime) / 1000;
301
500
  const availableTools = extractAvailableToolCalls('openai', openAIParams);
302
501
  await sendEventToPosthog({
@@ -305,11 +504,8 @@ class WrappedCompletions extends Completions {
305
504
  traceId,
306
505
  model: openAIParams.model,
307
506
  provider: 'openai',
308
- input: openAIParams.messages,
309
- output: [{
310
- content: accumulatedContent,
311
- role: 'assistant'
312
- }],
507
+ input: sanitizeOpenAI(openAIParams.messages),
508
+ output: formattedOutput,
313
509
  latency,
314
510
  baseURL: this.baseURL ?? '',
315
511
  params: body,
@@ -319,18 +515,19 @@ class WrappedCompletions extends Completions {
319
515
  captureImmediate: posthogCaptureImmediate
320
516
  });
321
517
  } catch (error) {
518
+ const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
322
519
  await sendEventToPosthog({
323
520
  client: this.phClient,
324
521
  distinctId: posthogDistinctId,
325
522
  traceId,
326
523
  model: openAIParams.model,
327
524
  provider: 'openai',
328
- input: openAIParams.messages,
525
+ input: sanitizeOpenAI(openAIParams.messages),
329
526
  output: [],
330
527
  latency: 0,
331
528
  baseURL: this.baseURL ?? '',
332
529
  params: body,
333
- httpStatus: error?.status ? error.status : 500,
530
+ httpStatus,
334
531
  usage: {
335
532
  inputTokens: 0,
336
533
  outputTokens: 0
@@ -358,7 +555,7 @@ class WrappedCompletions extends Completions {
358
555
  traceId,
359
556
  model: openAIParams.model,
360
557
  provider: 'openai',
361
- input: openAIParams.messages,
558
+ input: sanitizeOpenAI(openAIParams.messages),
362
559
  output: formatResponseOpenAI(result),
363
560
  latency,
364
561
  baseURL: this.baseURL ?? '',
@@ -376,18 +573,19 @@ class WrappedCompletions extends Completions {
376
573
  }
377
574
  return result;
378
575
  }, async error => {
576
+ const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
379
577
  await sendEventToPosthog({
380
578
  client: this.phClient,
381
579
  distinctId: posthogDistinctId,
382
580
  traceId,
383
581
  model: openAIParams.model,
384
582
  provider: 'openai',
385
- input: openAIParams.messages,
583
+ input: sanitizeOpenAI(openAIParams.messages),
386
584
  output: [],
387
585
  latency: 0,
388
586
  baseURL: this.baseURL ?? '',
389
587
  params: body,
390
- httpStatus: error?.status ? error.status : 500,
588
+ httpStatus,
391
589
  usage: {
392
590
  inputTokens: 0,
393
591
  outputTokens: 0
@@ -462,7 +660,7 @@ class WrappedResponses extends Responses {
462
660
  //@ts-expect-error
463
661
  model: openAIParams.model,
464
662
  provider: 'openai',
465
- input: openAIParams.input,
663
+ input: sanitizeOpenAIResponse(openAIParams.input),
466
664
  output: finalContent,
467
665
  latency,
468
666
  baseURL: this.baseURL ?? '',
@@ -473,6 +671,7 @@ class WrappedResponses extends Responses {
473
671
  captureImmediate: posthogCaptureImmediate
474
672
  });
475
673
  } catch (error) {
674
+ const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
476
675
  await sendEventToPosthog({
477
676
  client: this.phClient,
478
677
  distinctId: posthogDistinctId,
@@ -480,12 +679,12 @@ class WrappedResponses extends Responses {
480
679
  //@ts-expect-error
481
680
  model: openAIParams.model,
482
681
  provider: 'openai',
483
- input: openAIParams.input,
682
+ input: sanitizeOpenAIResponse(openAIParams.input),
484
683
  output: [],
485
684
  latency: 0,
486
685
  baseURL: this.baseURL ?? '',
487
686
  params: body,
488
- httpStatus: error?.status ? error.status : 500,
687
+ httpStatus,
489
688
  usage: {
490
689
  inputTokens: 0,
491
690
  outputTokens: 0
@@ -512,7 +711,7 @@ class WrappedResponses extends Responses {
512
711
  //@ts-expect-error
513
712
  model: openAIParams.model,
514
713
  provider: 'openai',
515
- input: openAIParams.input,
714
+ input: sanitizeOpenAIResponse(openAIParams.input),
516
715
  output: formatResponseOpenAI({
517
716
  output: result.output
518
717
  }),
@@ -532,6 +731,7 @@ class WrappedResponses extends Responses {
532
731
  }
533
732
  return result;
534
733
  }, async error => {
734
+ const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
535
735
  await sendEventToPosthog({
536
736
  client: this.phClient,
537
737
  distinctId: posthogDistinctId,
@@ -539,12 +739,12 @@ class WrappedResponses extends Responses {
539
739
  //@ts-expect-error
540
740
  model: openAIParams.model,
541
741
  provider: 'openai',
542
- input: openAIParams.input,
742
+ input: sanitizeOpenAIResponse(openAIParams.input),
543
743
  output: [],
544
744
  latency: 0,
545
745
  baseURL: this.baseURL ?? '',
546
746
  params: body,
547
- httpStatus: error?.status ? error.status : 500,
747
+ httpStatus,
548
748
  usage: {
549
749
  inputTokens: 0,
550
750
  outputTokens: 0
@@ -588,7 +788,7 @@ class WrappedResponses extends Responses {
588
788
  //@ts-expect-error
589
789
  model: openAIParams.model,
590
790
  provider: 'openai',
591
- input: openAIParams.input,
791
+ input: sanitizeOpenAIResponse(openAIParams.input),
592
792
  output: result.output,
593
793
  latency,
594
794
  baseURL: this.baseURL ?? '',
@@ -604,6 +804,7 @@ class WrappedResponses extends Responses {
604
804
  });
605
805
  return result;
606
806
  }, async error => {
807
+ const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
607
808
  await sendEventToPosthog({
608
809
  client: this.phClient,
609
810
  distinctId: posthogDistinctId,
@@ -611,12 +812,12 @@ class WrappedResponses extends Responses {
611
812
  //@ts-expect-error
612
813
  model: openAIParams.model,
613
814
  provider: 'openai',
614
- input: openAIParams.input,
815
+ input: sanitizeOpenAIResponse(openAIParams.input),
615
816
  output: [],
616
817
  latency: 0,
617
818
  baseURL: this.baseURL ?? '',
618
819
  params: body,
619
- httpStatus: error?.status ? error.status : 500,
820
+ httpStatus,
620
821
  usage: {
621
822
  inputTokens: 0,
622
823
  outputTokens: 0