@posthog/ai 7.2.2 → 7.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,7 +2,7 @@ import { OpenAI } from 'openai';
2
2
  import { Buffer } from 'buffer';
3
3
  import { v4 } from 'uuid';
4
4
 
5
- var version = "7.2.2";
5
+ var version = "7.3.0";
6
6
 
7
7
  // Type guards for safer type checking
8
8
 
@@ -641,6 +641,7 @@ class WrappedCompletions extends Completions {
641
641
  try {
642
642
  const contentBlocks = [];
643
643
  let accumulatedContent = '';
644
+ let modelFromResponse;
644
645
  let usage = {
645
646
  inputTokens: 0,
646
647
  outputTokens: 0,
@@ -650,6 +651,10 @@ class WrappedCompletions extends Completions {
650
651
  // Map to track in-progress tool calls
651
652
  const toolCallsInProgress = new Map();
652
653
  for await (const chunk of stream1) {
654
+ // Extract model from chunk (Chat Completions chunks have model field)
655
+ if (!modelFromResponse && chunk.model) {
656
+ modelFromResponse = chunk.model;
657
+ }
653
658
  const choice = chunk?.choices?.[0];
654
659
  const chunkWebSearchCount = calculateWebSearchCount(chunk);
655
660
  if (chunkWebSearchCount > 0 && chunkWebSearchCount > (usage.webSearchCount ?? 0)) {
@@ -743,7 +748,7 @@ class WrappedCompletions extends Completions {
743
748
  await sendEventToPosthog({
744
749
  client: this.phClient,
745
750
  ...posthogParams,
746
- model: openAIParams.model,
751
+ model: openAIParams.model ?? modelFromResponse,
747
752
  provider: 'openai',
748
753
  input: sanitizeOpenAI(openAIParams.messages),
749
754
  output: formattedOutput,
@@ -797,7 +802,7 @@ class WrappedCompletions extends Completions {
797
802
  await sendEventToPosthog({
798
803
  client: this.phClient,
799
804
  ...posthogParams,
800
- model: openAIParams.model,
805
+ model: openAIParams.model ?? result.model,
801
806
  provider: 'openai',
802
807
  input: sanitizeOpenAI(openAIParams.messages),
803
808
  output: formattedOutput,
@@ -821,7 +826,7 @@ class WrappedCompletions extends Completions {
821
826
  await sendEventToPosthog({
822
827
  client: this.phClient,
823
828
  ...posthogParams,
824
- model: String(openAIParams.model ?? ''),
829
+ model: openAIParams.model,
825
830
  provider: 'openai',
826
831
  input: sanitizeOpenAI(openAIParams.messages),
827
832
  output: [],
@@ -870,6 +875,7 @@ class WrappedResponses extends Responses {
870
875
  (async () => {
871
876
  try {
872
877
  let finalContent = [];
878
+ let modelFromResponse;
873
879
  let usage = {
874
880
  inputTokens: 0,
875
881
  outputTokens: 0,
@@ -877,6 +883,10 @@ class WrappedResponses extends Responses {
877
883
  };
878
884
  for await (const chunk of stream1) {
879
885
  if ('response' in chunk && chunk.response) {
886
+ // Extract model from response object in chunk (for stored prompts)
887
+ if (!modelFromResponse && chunk.response.model) {
888
+ modelFromResponse = chunk.response.model;
889
+ }
880
890
  const chunkWebSearchCount = calculateWebSearchCount(chunk.response);
881
891
  if (chunkWebSearchCount > 0 && chunkWebSearchCount > (usage.webSearchCount ?? 0)) {
882
892
  usage.webSearchCount = chunkWebSearchCount;
@@ -900,8 +910,7 @@ class WrappedResponses extends Responses {
900
910
  await sendEventToPosthog({
901
911
  client: this.phClient,
902
912
  ...posthogParams,
903
- //@ts-expect-error
904
- model: openAIParams.model,
913
+ model: openAIParams.model ?? modelFromResponse,
905
914
  provider: 'openai',
906
915
  input: formatOpenAIResponsesInput(openAIParams.input, openAIParams.instructions),
907
916
  output: finalContent,
@@ -923,7 +932,6 @@ class WrappedResponses extends Responses {
923
932
  await sendEventToPosthog({
924
933
  client: this.phClient,
925
934
  ...posthogParams,
926
- //@ts-expect-error
927
935
  model: openAIParams.model,
928
936
  provider: 'openai',
929
937
  input: formatOpenAIResponsesInput(openAIParams.input, openAIParams.instructions),
@@ -956,8 +964,7 @@ class WrappedResponses extends Responses {
956
964
  await sendEventToPosthog({
957
965
  client: this.phClient,
958
966
  ...posthogParams,
959
- //@ts-expect-error
960
- model: openAIParams.model,
967
+ model: openAIParams.model ?? result.model,
961
968
  provider: 'openai',
962
969
  input: formatOpenAIResponsesInput(openAIParams.input, openAIParams.instructions),
963
970
  output: formattedOutput,
@@ -981,7 +988,7 @@ class WrappedResponses extends Responses {
981
988
  await sendEventToPosthog({
982
989
  client: this.phClient,
983
990
  ...posthogParams,
984
- model: String(openAIParams.model ?? ''),
991
+ model: openAIParams.model,
985
992
  provider: 'openai',
986
993
  input: formatOpenAIResponsesInput(openAIParams.input, openAIParams.instructions),
987
994
  output: [],
@@ -1018,7 +1025,7 @@ class WrappedResponses extends Responses {
1018
1025
  await sendEventToPosthog({
1019
1026
  client: this.phClient,
1020
1027
  ...posthogParams,
1021
- model: String(openAIParams.model ?? ''),
1028
+ model: openAIParams.model ?? result.model,
1022
1029
  provider: 'openai',
1023
1030
  input: formatOpenAIResponsesInput(openAIParams.input, openAIParams.instructions),
1024
1031
  output: result.output,
@@ -1039,7 +1046,7 @@ class WrappedResponses extends Responses {
1039
1046
  await sendEventToPosthog({
1040
1047
  client: this.phClient,
1041
1048
  ...posthogParams,
1042
- model: String(openAIParams.model ?? ''),
1049
+ model: openAIParams.model,
1043
1050
  provider: 'openai',
1044
1051
  input: formatOpenAIResponsesInput(openAIParams.input, openAIParams.instructions),
1045
1052
  output: [],
@@ -1229,7 +1236,7 @@ class WrappedTranscriptions extends Transcriptions {
1229
1236
  await sendEventToPosthog({
1230
1237
  client: this.phClient,
1231
1238
  ...posthogParams,
1232
- model: String(openAIParams.model ?? ''),
1239
+ model: openAIParams.model,
1233
1240
  provider: 'openai',
1234
1241
  input: openAIParams.prompt,
1235
1242
  output: result.text,
@@ -1249,7 +1256,7 @@ class WrappedTranscriptions extends Transcriptions {
1249
1256
  await sendEventToPosthog({
1250
1257
  client: this.phClient,
1251
1258
  ...posthogParams,
1252
- model: String(openAIParams.model ?? ''),
1259
+ model: openAIParams.model,
1253
1260
  provider: 'openai',
1254
1261
  input: openAIParams.prompt,
1255
1262
  output: [],