@posthog/ai 5.0.1 → 5.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/index.cjs CHANGED
@@ -217,6 +217,7 @@ class PostHogOpenAI extends OpenAIOrignal__default["default"] {
217
217
  super(openAIConfig);
218
218
  this.phClient = posthog;
219
219
  this.chat = new WrappedChat$1(this, this.phClient);
220
+ this.responses = new WrappedResponses$1(this, this.phClient);
220
221
  }
221
222
  }
222
223
  class WrappedChat$1 extends OpenAIOrignal__default["default"].Chat {
@@ -368,6 +369,218 @@ class WrappedCompletions$1 extends OpenAIOrignal__default["default"].Chat.Comple
368
369
  }
369
370
  }
370
371
  }
372
+ class WrappedResponses$1 extends OpenAIOrignal__default["default"].Responses {
373
+ constructor(client, phClient) {
374
+ super(client);
375
+ this.phClient = phClient;
376
+ }
377
+ // --- Implementation Signature
378
+ create(body, options) {
379
+ const {
380
+ posthogDistinctId,
381
+ posthogTraceId,
382
+ posthogProperties,
383
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
384
+ posthogPrivacyMode = false,
385
+ posthogGroups,
386
+ posthogCaptureImmediate,
387
+ ...openAIParams
388
+ } = body;
389
+ const traceId = posthogTraceId ?? uuid.v4();
390
+ const startTime = Date.now();
391
+ const parentPromise = super.create(openAIParams, options);
392
+ if (openAIParams.stream) {
393
+ return parentPromise.then(value => {
394
+ if ('tee' in value && typeof value.tee === 'function') {
395
+ const [stream1, stream2] = value.tee();
396
+ (async () => {
397
+ try {
398
+ let finalContent = [];
399
+ let usage = {
400
+ inputTokens: 0,
401
+ outputTokens: 0
402
+ };
403
+ for await (const chunk of stream1) {
404
+ if (chunk.type === 'response.completed' && 'response' in chunk && chunk.response?.output && chunk.response.output.length > 0) {
405
+ finalContent = chunk.response.output;
406
+ }
407
+ if ('response' in chunk && chunk.response?.usage) {
408
+ usage = {
409
+ inputTokens: chunk.response.usage.input_tokens ?? 0,
410
+ outputTokens: chunk.response.usage.output_tokens ?? 0,
411
+ reasoningTokens: chunk.response.usage.output_tokens_details?.reasoning_tokens ?? 0,
412
+ cacheReadInputTokens: chunk.response.usage.input_tokens_details?.cached_tokens ?? 0
413
+ };
414
+ }
415
+ }
416
+ const latency = (Date.now() - startTime) / 1000;
417
+ await sendEventToPosthog({
418
+ client: this.phClient,
419
+ distinctId: posthogDistinctId ?? traceId,
420
+ traceId,
421
+ model: openAIParams.model,
422
+ provider: 'openai',
423
+ input: openAIParams.input,
424
+ output: finalContent,
425
+ latency,
426
+ baseURL: this.baseURL ?? '',
427
+ params: body,
428
+ httpStatus: 200,
429
+ usage,
430
+ captureImmediate: posthogCaptureImmediate
431
+ });
432
+ } catch (error) {
433
+ await sendEventToPosthog({
434
+ client: this.phClient,
435
+ distinctId: posthogDistinctId ?? traceId,
436
+ traceId,
437
+ model: openAIParams.model,
438
+ provider: 'openai',
439
+ input: openAIParams.input,
440
+ output: [],
441
+ latency: 0,
442
+ baseURL: this.baseURL ?? '',
443
+ params: body,
444
+ httpStatus: error?.status ? error.status : 500,
445
+ usage: {
446
+ inputTokens: 0,
447
+ outputTokens: 0
448
+ },
449
+ isError: true,
450
+ error: JSON.stringify(error),
451
+ captureImmediate: posthogCaptureImmediate
452
+ });
453
+ }
454
+ })();
455
+ return stream2;
456
+ }
457
+ return value;
458
+ });
459
+ } else {
460
+ const wrappedPromise = parentPromise.then(async result => {
461
+ if ('output' in result) {
462
+ const latency = (Date.now() - startTime) / 1000;
463
+ await sendEventToPosthog({
464
+ client: this.phClient,
465
+ distinctId: posthogDistinctId ?? traceId,
466
+ traceId,
467
+ model: openAIParams.model,
468
+ provider: 'openai',
469
+ input: openAIParams.input,
470
+ output: result.output,
471
+ latency,
472
+ baseURL: this.baseURL ?? '',
473
+ params: body,
474
+ httpStatus: 200,
475
+ usage: {
476
+ inputTokens: result.usage?.input_tokens ?? 0,
477
+ outputTokens: result.usage?.output_tokens ?? 0,
478
+ reasoningTokens: result.usage?.output_tokens_details?.reasoning_tokens ?? 0,
479
+ cacheReadInputTokens: result.usage?.input_tokens_details?.cached_tokens ?? 0
480
+ },
481
+ captureImmediate: posthogCaptureImmediate
482
+ });
483
+ }
484
+ return result;
485
+ }, async error => {
486
+ await sendEventToPosthog({
487
+ client: this.phClient,
488
+ distinctId: posthogDistinctId ?? traceId,
489
+ traceId,
490
+ model: openAIParams.model,
491
+ provider: 'openai',
492
+ input: openAIParams.input,
493
+ output: [],
494
+ latency: 0,
495
+ baseURL: this.baseURL ?? '',
496
+ params: body,
497
+ httpStatus: error?.status ? error.status : 500,
498
+ usage: {
499
+ inputTokens: 0,
500
+ outputTokens: 0
501
+ },
502
+ isError: true,
503
+ error: JSON.stringify(error),
504
+ captureImmediate: posthogCaptureImmediate
505
+ });
506
+ throw error;
507
+ });
508
+ return wrappedPromise;
509
+ }
510
+ }
511
+ parse(body, options) {
512
+ const {
513
+ posthogDistinctId,
514
+ posthogTraceId,
515
+ posthogProperties,
516
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
517
+ posthogPrivacyMode = false,
518
+ posthogGroups,
519
+ posthogCaptureImmediate,
520
+ ...openAIParams
521
+ } = body;
522
+ const traceId = posthogTraceId ?? uuid.v4();
523
+ const startTime = Date.now();
524
+ // Create a temporary instance that bypasses our wrapped create method
525
+ const originalCreate = super.create.bind(this);
526
+ const originalSelf = this;
527
+ const tempCreate = originalSelf.create;
528
+ originalSelf.create = originalCreate;
529
+ try {
530
+ const parentPromise = super.parse(openAIParams, options);
531
+ const wrappedPromise = parentPromise.then(async result => {
532
+ const latency = (Date.now() - startTime) / 1000;
533
+ await sendEventToPosthog({
534
+ client: this.phClient,
535
+ distinctId: posthogDistinctId ?? traceId,
536
+ traceId,
537
+ model: openAIParams.model,
538
+ provider: 'openai',
539
+ input: openAIParams.input,
540
+ output: result.output,
541
+ latency,
542
+ baseURL: this.baseURL ?? '',
543
+ params: body,
544
+ httpStatus: 200,
545
+ usage: {
546
+ inputTokens: result.usage?.input_tokens ?? 0,
547
+ outputTokens: result.usage?.output_tokens ?? 0,
548
+ reasoningTokens: result.usage?.output_tokens_details?.reasoning_tokens ?? 0,
549
+ cacheReadInputTokens: result.usage?.input_tokens_details?.cached_tokens ?? 0
550
+ },
551
+ captureImmediate: posthogCaptureImmediate
552
+ });
553
+ return result;
554
+ }, async error => {
555
+ await sendEventToPosthog({
556
+ client: this.phClient,
557
+ distinctId: posthogDistinctId ?? traceId,
558
+ traceId,
559
+ model: openAIParams.model,
560
+ provider: 'openai',
561
+ input: openAIParams.input,
562
+ output: [],
563
+ latency: 0,
564
+ baseURL: this.baseURL ?? '',
565
+ params: body,
566
+ httpStatus: error?.status ? error.status : 500,
567
+ usage: {
568
+ inputTokens: 0,
569
+ outputTokens: 0
570
+ },
571
+ isError: true,
572
+ error: JSON.stringify(error),
573
+ captureImmediate: posthogCaptureImmediate
574
+ });
575
+ throw error;
576
+ });
577
+ return wrappedPromise;
578
+ } finally {
579
+ // Restore our wrapped create method
580
+ originalSelf.create = tempCreate;
581
+ }
582
+ }
583
+ }
371
584
 
372
585
  class PostHogAzureOpenAI extends OpenAIOrignal.AzureOpenAI {
373
586
  constructor(config) {
@@ -408,23 +621,19 @@ class WrappedCompletions extends OpenAIOrignal.AzureOpenAI.Chat.Completions {
408
621
  const parentPromise = super.create(openAIParams, options);
409
622
  if (openAIParams.stream) {
410
623
  return parentPromise.then(value => {
411
- let accumulatedContent = '';
412
- let usage = {
413
- inputTokens: 0,
414
- outputTokens: 0
415
- };
416
- let model = openAIParams.model;
417
624
  if ('tee' in value) {
418
625
  const [stream1, stream2] = value.tee();
419
626
  (async () => {
420
627
  try {
628
+ let accumulatedContent = '';
629
+ let usage = {
630
+ inputTokens: 0,
631
+ outputTokens: 0
632
+ };
421
633
  for await (const chunk of stream1) {
422
634
  const delta = chunk?.choices?.[0]?.delta?.content ?? '';
423
635
  accumulatedContent += delta;
424
636
  if (chunk.usage) {
425
- if (chunk.model != model) {
426
- model = chunk.model;
427
- }
428
637
  usage = {
429
638
  inputTokens: chunk.usage.prompt_tokens ?? 0,
430
639
  outputTokens: chunk.usage.completion_tokens ?? 0,
@@ -438,7 +647,7 @@ class WrappedCompletions extends OpenAIOrignal.AzureOpenAI.Chat.Completions {
438
647
  client: this.phClient,
439
648
  distinctId: posthogDistinctId ?? traceId,
440
649
  traceId,
441
- model,
650
+ model: openAIParams.model,
442
651
  provider: 'azure',
443
652
  input: openAIParams.messages,
444
653
  output: [{
@@ -453,15 +662,14 @@ class WrappedCompletions extends OpenAIOrignal.AzureOpenAI.Chat.Completions {
453
662
  captureImmediate: posthogCaptureImmediate
454
663
  });
455
664
  } catch (error) {
456
- // error handling
457
665
  await sendEventToPosthog({
458
666
  client: this.phClient,
459
667
  distinctId: posthogDistinctId ?? traceId,
460
668
  traceId,
461
- model,
669
+ model: openAIParams.model,
462
670
  provider: 'azure',
463
671
  input: openAIParams.messages,
464
- output: JSON.stringify(error),
672
+ output: [],
465
673
  latency: 0,
466
674
  baseURL: this.baseURL ?? '',
467
675
  params: body,
@@ -485,15 +693,11 @@ class WrappedCompletions extends OpenAIOrignal.AzureOpenAI.Chat.Completions {
485
693
  const wrappedPromise = parentPromise.then(async result => {
486
694
  if ('choices' in result) {
487
695
  const latency = (Date.now() - startTime) / 1000;
488
- let model = openAIParams.model;
489
- if (result.model != model) {
490
- model = result.model;
491
- }
492
696
  await sendEventToPosthog({
493
697
  client: this.phClient,
494
698
  distinctId: posthogDistinctId ?? traceId,
495
699
  traceId,
496
- model,
700
+ model: openAIParams.model,
497
701
  provider: 'azure',
498
702
  input: openAIParams.messages,
499
703
  output: formatResponseOpenAI(result),
@@ -538,6 +742,208 @@ class WrappedCompletions extends OpenAIOrignal.AzureOpenAI.Chat.Completions {
538
742
  }
539
743
  }
540
744
  }
745
+ class WrappedResponses extends OpenAIOrignal.AzureOpenAI.Responses {
746
+ constructor(client, phClient) {
747
+ super(client);
748
+ this.phClient = phClient;
749
+ }
750
+ // --- Implementation Signature
751
+ create(body, options) {
752
+ const {
753
+ posthogDistinctId,
754
+ posthogTraceId,
755
+ posthogProperties,
756
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
757
+ posthogPrivacyMode = false,
758
+ posthogGroups,
759
+ posthogCaptureImmediate,
760
+ ...openAIParams
761
+ } = body;
762
+ const traceId = posthogTraceId ?? uuid.v4();
763
+ const startTime = Date.now();
764
+ const parentPromise = super.create(openAIParams, options);
765
+ if (openAIParams.stream) {
766
+ return parentPromise.then(value => {
767
+ if ('tee' in value && typeof value.tee === 'function') {
768
+ const [stream1, stream2] = value.tee();
769
+ (async () => {
770
+ try {
771
+ let finalContent = [];
772
+ let usage = {
773
+ inputTokens: 0,
774
+ outputTokens: 0
775
+ };
776
+ for await (const chunk of stream1) {
777
+ if (chunk.type === 'response.completed' && 'response' in chunk && chunk.response?.output && chunk.response.output.length > 0) {
778
+ finalContent = chunk.response.output;
779
+ }
780
+ if ('usage' in chunk && chunk.usage) {
781
+ usage = {
782
+ inputTokens: chunk.usage.input_tokens ?? 0,
783
+ outputTokens: chunk.usage.output_tokens ?? 0,
784
+ reasoningTokens: chunk.usage.output_tokens_details?.reasoning_tokens ?? 0,
785
+ cacheReadInputTokens: chunk.usage.input_tokens_details?.cached_tokens ?? 0
786
+ };
787
+ }
788
+ }
789
+ const latency = (Date.now() - startTime) / 1000;
790
+ await sendEventToPosthog({
791
+ client: this.phClient,
792
+ distinctId: posthogDistinctId ?? traceId,
793
+ traceId,
794
+ model: openAIParams.model,
795
+ provider: 'azure',
796
+ input: openAIParams.input,
797
+ output: finalContent,
798
+ latency,
799
+ baseURL: this.baseURL ?? '',
800
+ params: body,
801
+ httpStatus: 200,
802
+ usage,
803
+ captureImmediate: posthogCaptureImmediate
804
+ });
805
+ } catch (error) {
806
+ await sendEventToPosthog({
807
+ client: this.phClient,
808
+ distinctId: posthogDistinctId ?? traceId,
809
+ traceId,
810
+ model: openAIParams.model,
811
+ provider: 'azure',
812
+ input: openAIParams.input,
813
+ output: [],
814
+ latency: 0,
815
+ baseURL: this.baseURL ?? '',
816
+ params: body,
817
+ httpStatus: error?.status ? error.status : 500,
818
+ usage: {
819
+ inputTokens: 0,
820
+ outputTokens: 0
821
+ },
822
+ isError: true,
823
+ error: JSON.stringify(error),
824
+ captureImmediate: posthogCaptureImmediate
825
+ });
826
+ }
827
+ })();
828
+ return stream2;
829
+ }
830
+ return value;
831
+ });
832
+ } else {
833
+ const wrappedPromise = parentPromise.then(async result => {
834
+ if ('output' in result) {
835
+ const latency = (Date.now() - startTime) / 1000;
836
+ await sendEventToPosthog({
837
+ client: this.phClient,
838
+ distinctId: posthogDistinctId ?? traceId,
839
+ traceId,
840
+ model: openAIParams.model,
841
+ provider: 'azure',
842
+ input: openAIParams.input,
843
+ output: result.output,
844
+ latency,
845
+ baseURL: this.baseURL ?? '',
846
+ params: body,
847
+ httpStatus: 200,
848
+ usage: {
849
+ inputTokens: result.usage?.input_tokens ?? 0,
850
+ outputTokens: result.usage?.output_tokens ?? 0,
851
+ reasoningTokens: result.usage?.output_tokens_details?.reasoning_tokens ?? 0,
852
+ cacheReadInputTokens: result.usage?.input_tokens_details?.cached_tokens ?? 0
853
+ },
854
+ captureImmediate: posthogCaptureImmediate
855
+ });
856
+ }
857
+ return result;
858
+ }, async error => {
859
+ await sendEventToPosthog({
860
+ client: this.phClient,
861
+ distinctId: posthogDistinctId ?? traceId,
862
+ traceId,
863
+ model: openAIParams.model,
864
+ provider: 'azure',
865
+ input: openAIParams.input,
866
+ output: [],
867
+ latency: 0,
868
+ baseURL: this.baseURL ?? '',
869
+ params: body,
870
+ httpStatus: error?.status ? error.status : 500,
871
+ usage: {
872
+ inputTokens: 0,
873
+ outputTokens: 0
874
+ },
875
+ isError: true,
876
+ error: JSON.stringify(error),
877
+ captureImmediate: posthogCaptureImmediate
878
+ });
879
+ throw error;
880
+ });
881
+ return wrappedPromise;
882
+ }
883
+ }
884
+ parse(body, options) {
885
+ const {
886
+ posthogDistinctId,
887
+ posthogTraceId,
888
+ posthogProperties,
889
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
890
+ posthogPrivacyMode = false,
891
+ posthogGroups,
892
+ posthogCaptureImmediate,
893
+ ...openAIParams
894
+ } = body;
895
+ const traceId = posthogTraceId ?? uuid.v4();
896
+ const startTime = Date.now();
897
+ const parentPromise = super.parse(openAIParams, options);
898
+ const wrappedPromise = parentPromise.then(async result => {
899
+ const latency = (Date.now() - startTime) / 1000;
900
+ await sendEventToPosthog({
901
+ client: this.phClient,
902
+ distinctId: posthogDistinctId ?? traceId,
903
+ traceId,
904
+ model: openAIParams.model,
905
+ provider: 'azure',
906
+ input: openAIParams.input,
907
+ output: result.output,
908
+ latency,
909
+ baseURL: this.baseURL ?? '',
910
+ params: body,
911
+ httpStatus: 200,
912
+ usage: {
913
+ inputTokens: result.usage?.input_tokens ?? 0,
914
+ outputTokens: result.usage?.output_tokens ?? 0,
915
+ reasoningTokens: result.usage?.output_tokens_details?.reasoning_tokens ?? 0,
916
+ cacheReadInputTokens: result.usage?.input_tokens_details?.cached_tokens ?? 0
917
+ },
918
+ captureImmediate: posthogCaptureImmediate
919
+ });
920
+ return result;
921
+ }, async error => {
922
+ await sendEventToPosthog({
923
+ client: this.phClient,
924
+ distinctId: posthogDistinctId ?? traceId,
925
+ traceId,
926
+ model: openAIParams.model,
927
+ provider: 'azure',
928
+ input: openAIParams.input,
929
+ output: [],
930
+ latency: 0,
931
+ baseURL: this.baseURL ?? '',
932
+ params: body,
933
+ httpStatus: error?.status ? error.status : 500,
934
+ usage: {
935
+ inputTokens: 0,
936
+ outputTokens: 0
937
+ },
938
+ isError: true,
939
+ error: JSON.stringify(error),
940
+ captureImmediate: posthogCaptureImmediate
941
+ });
942
+ throw error;
943
+ });
944
+ return wrappedPromise;
945
+ }
946
+ }
541
947
 
542
948
  const mapVercelParams = params => {
543
949
  return {