@posthog/ai 6.1.2 → 6.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -22,6 +22,7 @@ interface CostOverride {
22
22
  declare const Chat: typeof OpenAI.Chat;
23
23
  declare const Completions: typeof OpenAI.Chat.Completions;
24
24
  declare const Responses: typeof OpenAI.Responses;
25
+ declare const Embeddings: typeof OpenAI.Embeddings;
25
26
  type ChatCompletion = OpenAI.ChatCompletion;
26
27
  type ChatCompletionChunk = OpenAI.ChatCompletionChunk;
27
28
  type ChatCompletionCreateParamsBase = OpenAI.Chat.Completions.ChatCompletionCreateParams;
@@ -30,6 +31,8 @@ type ChatCompletionCreateParamsStreaming = OpenAI.Chat.Completions.ChatCompletio
30
31
  type ResponsesCreateParamsBase = OpenAI.Responses.ResponseCreateParams;
31
32
  type ResponsesCreateParamsNonStreaming = OpenAI.Responses.ResponseCreateParamsNonStreaming;
32
33
  type ResponsesCreateParamsStreaming = OpenAI.Responses.ResponseCreateParamsStreaming;
34
+ type CreateEmbeddingResponse = OpenAI.CreateEmbeddingResponse;
35
+ type EmbeddingCreateParams = OpenAI.EmbeddingCreateParams;
33
36
  interface MonitoringOpenAIConfig extends ClientOptions {
34
37
  apiKey: string;
35
38
  posthog: PostHog;
@@ -40,6 +43,7 @@ declare class PostHogOpenAI extends OpenAI {
40
43
  private readonly phClient;
41
44
  chat: WrappedChat;
42
45
  responses: WrappedResponses;
46
+ embeddings: WrappedEmbeddings;
43
47
  constructor(config: MonitoringOpenAIConfig);
44
48
  }
45
49
  declare class WrappedChat extends Chat {
@@ -48,6 +52,7 @@ declare class WrappedChat extends Chat {
48
52
  }
49
53
  declare class WrappedCompletions extends Completions {
50
54
  private readonly phClient;
55
+ private readonly baseURL;
51
56
  constructor(client: OpenAI, phClient: PostHog);
52
57
  create(body: ChatCompletionCreateParamsNonStreaming & MonitoringParams, options?: RequestOptions): APIPromise<ChatCompletion>;
53
58
  create(body: ChatCompletionCreateParamsStreaming & MonitoringParams, options?: RequestOptions): APIPromise<Stream<ChatCompletionChunk>>;
@@ -55,11 +60,18 @@ declare class WrappedCompletions extends Completions {
55
60
  }
56
61
  declare class WrappedResponses extends Responses {
57
62
  private readonly phClient;
63
+ private readonly baseURL;
58
64
  constructor(client: OpenAI, phClient: PostHog);
59
65
  create(body: ResponsesCreateParamsNonStreaming & MonitoringParams, options?: RequestOptions): APIPromise<OpenAI.Responses.Response>;
60
66
  create(body: ResponsesCreateParamsStreaming & MonitoringParams, options?: RequestOptions): APIPromise<Stream<OpenAI.Responses.ResponseStreamEvent>>;
61
67
  create(body: ResponsesCreateParamsBase & MonitoringParams, options?: RequestOptions): APIPromise<OpenAI.Responses.Response | Stream<OpenAI.Responses.ResponseStreamEvent>>;
62
68
  parse<Params extends ResponsesCreateParamsBase, ParsedT = any>(body: Params & MonitoringParams, options?: RequestOptions): APIPromise<ParsedResponse<ParsedT>>;
63
69
  }
70
+ declare class WrappedEmbeddings extends Embeddings {
71
+ private readonly phClient;
72
+ private readonly baseURL;
73
+ constructor(client: OpenAI, phClient: PostHog);
74
+ create(body: EmbeddingCreateParams & MonitoringParams, options?: RequestOptions): APIPromise<CreateEmbeddingResponse>;
75
+ }
64
76
 
65
- export { PostHogOpenAI as OpenAI, PostHogOpenAI, WrappedChat, WrappedCompletions, WrappedResponses, PostHogOpenAI as default };
77
+ export { PostHogOpenAI as OpenAI, PostHogOpenAI, WrappedChat, WrappedCompletions, WrappedEmbeddings, WrappedResponses, PostHogOpenAI as default };
@@ -2,6 +2,8 @@ import { OpenAI } from 'openai';
2
2
  import { v4 } from 'uuid';
3
3
  import { Buffer } from 'buffer';
4
4
 
5
+ var version = "6.3.0";
6
+
5
7
  const STRING_FORMAT = 'utf8';
6
8
  const getModelParams = params => {
7
9
  if (!params) {
@@ -122,6 +124,11 @@ const extractAvailableToolCalls = (provider, params) => {
122
124
  return null;
123
125
  }
124
126
  };
127
+ let AIEvent = /*#__PURE__*/function (AIEvent) {
128
+ AIEvent["Generation"] = "$ai_generation";
129
+ AIEvent["Embedding"] = "$ai_embedding";
130
+ return AIEvent;
131
+ }({});
125
132
  function sanitizeValues(obj) {
126
133
  if (obj === undefined || obj === null) {
127
134
  return obj;
@@ -138,6 +145,7 @@ function sanitizeValues(obj) {
138
145
  }
139
146
  const sendEventToPosthog = async ({
140
147
  client,
148
+ eventType = AIEvent.Generation,
141
149
  distinctId,
142
150
  traceId,
143
151
  model,
@@ -190,6 +198,8 @@ const sendEventToPosthog = async ({
190
198
  } : {})
191
199
  };
192
200
  const properties = {
201
+ $ai_lib: 'posthog-ai',
202
+ $ai_lib_version: version,
193
203
  $ai_provider: params.posthogProviderOverride ?? provider,
194
204
  $ai_model: params.posthogModelOverride ?? model,
195
205
  $ai_model_parameters: getModelParams(params),
@@ -197,7 +207,9 @@ const sendEventToPosthog = async ({
197
207
  $ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeOutput),
198
208
  $ai_http_status: httpStatus,
199
209
  $ai_input_tokens: usage.inputTokens ?? 0,
200
- $ai_output_tokens: usage.outputTokens ?? 0,
210
+ ...(usage.outputTokens !== undefined ? {
211
+ $ai_output_tokens: usage.outputTokens
212
+ } : {}),
201
213
  ...additionalTokenValues,
202
214
  $ai_latency: latency,
203
215
  $ai_trace_id: traceId,
@@ -214,7 +226,7 @@ const sendEventToPosthog = async ({
214
226
  };
215
227
  const event = {
216
228
  distinctId: distinctId ?? traceId,
217
- event: '$ai_generation',
229
+ event: eventType,
218
230
  properties,
219
231
  groups: params.posthogGroups
220
232
  };
@@ -352,6 +364,7 @@ const sanitizeOpenAIResponse = data => {
352
364
  const Chat = OpenAI.Chat;
353
365
  const Completions = Chat.Completions;
354
366
  const Responses = OpenAI.Responses;
367
+ const Embeddings = OpenAI.Embeddings;
355
368
  class PostHogOpenAI extends OpenAI {
356
369
  constructor(config) {
357
370
  const {
@@ -362,6 +375,7 @@ class PostHogOpenAI extends OpenAI {
362
375
  this.phClient = posthog;
363
376
  this.chat = new WrappedChat(this, this.phClient);
364
377
  this.responses = new WrappedResponses(this, this.phClient);
378
+ this.embeddings = new WrappedEmbeddings(this, this.phClient);
365
379
  }
366
380
  }
367
381
  class WrappedChat extends Chat {
@@ -374,6 +388,7 @@ class WrappedCompletions extends Completions {
374
388
  constructor(client, phClient) {
375
389
  super(client);
376
390
  this.phClient = phClient;
391
+ this.baseURL = client.baseURL;
377
392
  }
378
393
 
379
394
  // --- Overload #1: Non-streaming
@@ -387,10 +402,6 @@ class WrappedCompletions extends Completions {
387
402
  const {
388
403
  posthogDistinctId,
389
404
  posthogTraceId,
390
- posthogProperties,
391
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
392
- posthogPrivacyMode = false,
393
- posthogGroups,
394
405
  posthogCaptureImmediate,
395
406
  ...openAIParams
396
407
  } = body;
@@ -507,7 +518,7 @@ class WrappedCompletions extends Completions {
507
518
  input: sanitizeOpenAI(openAIParams.messages),
508
519
  output: formattedOutput,
509
520
  latency,
510
- baseURL: this.baseURL ?? '',
521
+ baseURL: this.baseURL,
511
522
  params: body,
512
523
  httpStatus: 200,
513
524
  usage,
@@ -525,7 +536,7 @@ class WrappedCompletions extends Completions {
525
536
  input: sanitizeOpenAI(openAIParams.messages),
526
537
  output: [],
527
538
  latency: 0,
528
- baseURL: this.baseURL ?? '',
539
+ baseURL: this.baseURL,
529
540
  params: body,
530
541
  httpStatus,
531
542
  usage: {
@@ -558,7 +569,7 @@ class WrappedCompletions extends Completions {
558
569
  input: sanitizeOpenAI(openAIParams.messages),
559
570
  output: formatResponseOpenAI(result),
560
571
  latency,
561
- baseURL: this.baseURL ?? '',
572
+ baseURL: this.baseURL,
562
573
  params: body,
563
574
  httpStatus: 200,
564
575
  usage: {
@@ -583,7 +594,7 @@ class WrappedCompletions extends Completions {
583
594
  input: sanitizeOpenAI(openAIParams.messages),
584
595
  output: [],
585
596
  latency: 0,
586
- baseURL: this.baseURL ?? '',
597
+ baseURL: this.baseURL,
587
598
  params: body,
588
599
  httpStatus,
589
600
  usage: {
@@ -604,6 +615,7 @@ class WrappedResponses extends Responses {
604
615
  constructor(client, phClient) {
605
616
  super(client);
606
617
  this.phClient = phClient;
618
+ this.baseURL = client.baseURL;
607
619
  }
608
620
 
609
621
  // --- Overload #1: Non-streaming
@@ -617,10 +629,6 @@ class WrappedResponses extends Responses {
617
629
  const {
618
630
  posthogDistinctId,
619
631
  posthogTraceId,
620
- posthogProperties,
621
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
622
- posthogPrivacyMode = false,
623
- posthogGroups,
624
632
  posthogCaptureImmediate,
625
633
  ...openAIParams
626
634
  } = body;
@@ -663,7 +671,7 @@ class WrappedResponses extends Responses {
663
671
  input: sanitizeOpenAIResponse(openAIParams.input),
664
672
  output: finalContent,
665
673
  latency,
666
- baseURL: this.baseURL ?? '',
674
+ baseURL: this.baseURL,
667
675
  params: body,
668
676
  httpStatus: 200,
669
677
  usage,
@@ -682,7 +690,7 @@ class WrappedResponses extends Responses {
682
690
  input: sanitizeOpenAIResponse(openAIParams.input),
683
691
  output: [],
684
692
  latency: 0,
685
- baseURL: this.baseURL ?? '',
693
+ baseURL: this.baseURL,
686
694
  params: body,
687
695
  httpStatus,
688
696
  usage: {
@@ -716,7 +724,7 @@ class WrappedResponses extends Responses {
716
724
  output: result.output
717
725
  }),
718
726
  latency,
719
- baseURL: this.baseURL ?? '',
727
+ baseURL: this.baseURL,
720
728
  params: body,
721
729
  httpStatus: 200,
722
730
  usage: {
@@ -742,7 +750,7 @@ class WrappedResponses extends Responses {
742
750
  input: sanitizeOpenAIResponse(openAIParams.input),
743
751
  output: [],
744
752
  latency: 0,
745
- baseURL: this.baseURL ?? '',
753
+ baseURL: this.baseURL,
746
754
  params: body,
747
755
  httpStatus,
748
756
  usage: {
@@ -762,10 +770,6 @@ class WrappedResponses extends Responses {
762
770
  const {
763
771
  posthogDistinctId,
764
772
  posthogTraceId,
765
- posthogProperties,
766
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
767
- posthogPrivacyMode = false,
768
- posthogGroups,
769
773
  posthogCaptureImmediate,
770
774
  ...openAIParams
771
775
  } = body;
@@ -791,7 +795,7 @@ class WrappedResponses extends Responses {
791
795
  input: sanitizeOpenAIResponse(openAIParams.input),
792
796
  output: result.output,
793
797
  latency,
794
- baseURL: this.baseURL ?? '',
798
+ baseURL: this.baseURL,
795
799
  params: body,
796
800
  httpStatus: 200,
797
801
  usage: {
@@ -815,7 +819,7 @@ class WrappedResponses extends Responses {
815
819
  input: sanitizeOpenAIResponse(openAIParams.input),
816
820
  output: [],
817
821
  latency: 0,
818
- baseURL: this.baseURL ?? '',
822
+ baseURL: this.baseURL,
819
823
  params: body,
820
824
  httpStatus,
821
825
  usage: {
@@ -835,6 +839,73 @@ class WrappedResponses extends Responses {
835
839
  }
836
840
  }
837
841
  }
842
+ class WrappedEmbeddings extends Embeddings {
843
+ constructor(client, phClient) {
844
+ super(client);
845
+ this.phClient = phClient;
846
+ this.baseURL = client.baseURL;
847
+ }
848
+ create(body, options) {
849
+ const {
850
+ posthogDistinctId,
851
+ posthogTraceId,
852
+ posthogPrivacyMode = false,
853
+ posthogCaptureImmediate,
854
+ ...openAIParams
855
+ } = body;
856
+ const traceId = posthogTraceId ?? v4();
857
+ const startTime = Date.now();
858
+ const parentPromise = super.create(openAIParams, options);
859
+ const wrappedPromise = parentPromise.then(async result => {
860
+ const latency = (Date.now() - startTime) / 1000;
861
+ await sendEventToPosthog({
862
+ client: this.phClient,
863
+ eventType: AIEvent.Embedding,
864
+ distinctId: posthogDistinctId,
865
+ traceId,
866
+ model: openAIParams.model,
867
+ provider: 'openai',
868
+ input: withPrivacyMode(this.phClient, posthogPrivacyMode, openAIParams.input),
869
+ output: null,
870
+ // Embeddings don't have output content
871
+ latency,
872
+ baseURL: this.baseURL,
873
+ params: body,
874
+ httpStatus: 200,
875
+ usage: {
876
+ inputTokens: result.usage?.prompt_tokens ?? 0
877
+ },
878
+ captureImmediate: posthogCaptureImmediate
879
+ });
880
+ return result;
881
+ }, async error => {
882
+ const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
883
+ await sendEventToPosthog({
884
+ client: this.phClient,
885
+ eventType: AIEvent.Embedding,
886
+ distinctId: posthogDistinctId,
887
+ traceId,
888
+ model: openAIParams.model,
889
+ provider: 'openai',
890
+ input: withPrivacyMode(this.phClient, posthogPrivacyMode, openAIParams.input),
891
+ output: null,
892
+ // Embeddings don't have output content
893
+ latency: 0,
894
+ baseURL: this.baseURL,
895
+ params: body,
896
+ httpStatus,
897
+ usage: {
898
+ inputTokens: 0
899
+ },
900
+ isError: true,
901
+ error: JSON.stringify(error),
902
+ captureImmediate: posthogCaptureImmediate
903
+ });
904
+ throw error;
905
+ });
906
+ return wrappedPromise;
907
+ }
908
+ }
838
909
 
839
- export { PostHogOpenAI as OpenAI, PostHogOpenAI, WrappedChat, WrappedCompletions, WrappedResponses, PostHogOpenAI as default };
910
+ export { PostHogOpenAI as OpenAI, PostHogOpenAI, WrappedChat, WrappedCompletions, WrappedEmbeddings, WrappedResponses, PostHogOpenAI as default };
840
911
  //# sourceMappingURL=index.mjs.map