@posthog/ai 5.1.0 → 5.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/index.d.ts CHANGED
@@ -1,4 +1,4 @@
1
- import OpenAIOrignal, { APIPromise, ClientOptions as ClientOptions$1, AzureOpenAI } from 'openai';
1
+ import OpenAIOrignal, { OpenAI, APIPromise, ClientOptions as ClientOptions$1, AzureOpenAI } from 'openai';
2
2
  import { PostHog } from 'posthog-node';
3
3
  import { Stream } from 'openai/streaming';
4
4
  import { ParsedResponse } from 'openai/resources/responses/responses';
@@ -31,43 +31,46 @@ interface CostOverride {
31
31
  outputCost: number;
32
32
  }
33
33
 
34
- type ChatCompletion$1 = OpenAIOrignal.ChatCompletion;
35
- type ChatCompletionChunk$1 = OpenAIOrignal.ChatCompletionChunk;
36
- type ChatCompletionCreateParamsBase$1 = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams;
37
- type ChatCompletionCreateParamsNonStreaming$1 = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsNonStreaming;
38
- type ChatCompletionCreateParamsStreaming$1 = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsStreaming;
39
- type ResponsesCreateParamsBase = OpenAIOrignal.Responses.ResponseCreateParams;
40
- type ResponsesCreateParamsNonStreaming = OpenAIOrignal.Responses.ResponseCreateParamsNonStreaming;
41
- type ResponsesCreateParamsStreaming = OpenAIOrignal.Responses.ResponseCreateParamsStreaming;
34
+ declare const Chat: typeof OpenAI.Chat;
35
+ declare const Completions: typeof OpenAI.Chat.Completions;
36
+ declare const Responses: typeof OpenAI.Responses;
37
+ type ChatCompletion$1 = OpenAI.ChatCompletion;
38
+ type ChatCompletionChunk$1 = OpenAI.ChatCompletionChunk;
39
+ type ChatCompletionCreateParamsBase$1 = OpenAI.Chat.Completions.ChatCompletionCreateParams;
40
+ type ChatCompletionCreateParamsNonStreaming$1 = OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming;
41
+ type ChatCompletionCreateParamsStreaming$1 = OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming;
42
+ type ResponsesCreateParamsBase = OpenAI.Responses.ResponseCreateParams;
43
+ type ResponsesCreateParamsNonStreaming = OpenAI.Responses.ResponseCreateParamsNonStreaming;
44
+ type ResponsesCreateParamsStreaming = OpenAI.Responses.ResponseCreateParamsStreaming;
42
45
  interface MonitoringOpenAIConfig$1 extends ClientOptions$1 {
43
46
  apiKey: string;
44
47
  posthog: PostHog;
45
48
  baseURL?: string;
46
49
  }
47
50
  type RequestOptions$1 = Record<string, any>;
48
- declare class PostHogOpenAI extends OpenAIOrignal {
51
+ declare class PostHogOpenAI extends OpenAI {
49
52
  private readonly phClient;
50
53
  chat: WrappedChat$1;
51
54
  responses: WrappedResponses;
52
55
  constructor(config: MonitoringOpenAIConfig$1);
53
56
  }
54
- declare class WrappedChat$1 extends OpenAIOrignal.Chat {
57
+ declare class WrappedChat$1 extends Chat {
55
58
  constructor(parentClient: PostHogOpenAI, phClient: PostHog);
56
59
  completions: WrappedCompletions$1;
57
60
  }
58
- declare class WrappedCompletions$1 extends OpenAIOrignal.Chat.Completions {
61
+ declare class WrappedCompletions$1 extends Completions {
59
62
  private readonly phClient;
60
- constructor(client: OpenAIOrignal, phClient: PostHog);
63
+ constructor(client: OpenAI, phClient: PostHog);
61
64
  create(body: ChatCompletionCreateParamsNonStreaming$1 & MonitoringParams, options?: RequestOptions$1): APIPromise<ChatCompletion$1>;
62
65
  create(body: ChatCompletionCreateParamsStreaming$1 & MonitoringParams, options?: RequestOptions$1): APIPromise<Stream<ChatCompletionChunk$1>>;
63
66
  create(body: ChatCompletionCreateParamsBase$1 & MonitoringParams, options?: RequestOptions$1): APIPromise<ChatCompletion$1 | Stream<ChatCompletionChunk$1>>;
64
67
  }
65
- declare class WrappedResponses extends OpenAIOrignal.Responses {
68
+ declare class WrappedResponses extends Responses {
66
69
  private readonly phClient;
67
- constructor(client: OpenAIOrignal, phClient: PostHog);
68
- create(body: ResponsesCreateParamsNonStreaming & MonitoringParams, options?: RequestOptions$1): APIPromise<OpenAIOrignal.Responses.Response>;
69
- create(body: ResponsesCreateParamsStreaming & MonitoringParams, options?: RequestOptions$1): APIPromise<Stream<OpenAIOrignal.Responses.ResponseStreamEvent>>;
70
- create(body: ResponsesCreateParamsBase & MonitoringParams, options?: RequestOptions$1): APIPromise<OpenAIOrignal.Responses.Response | Stream<OpenAIOrignal.Responses.ResponseStreamEvent>>;
70
+ constructor(client: OpenAI, phClient: PostHog);
71
+ create(body: ResponsesCreateParamsNonStreaming & MonitoringParams, options?: RequestOptions$1): APIPromise<OpenAI.Responses.Response>;
72
+ create(body: ResponsesCreateParamsStreaming & MonitoringParams, options?: RequestOptions$1): APIPromise<Stream<OpenAI.Responses.ResponseStreamEvent>>;
73
+ create(body: ResponsesCreateParamsBase & MonitoringParams, options?: RequestOptions$1): APIPromise<OpenAI.Responses.Response | Stream<OpenAI.Responses.ResponseStreamEvent>>;
71
74
  parse<Params extends ResponsesCreateParamsBase, ParsedT = any>(body: Params & MonitoringParams, options?: RequestOptions$1): APIPromise<ParsedResponse<ParsedT>>;
72
75
  }
73
76
 
package/lib/index.mjs CHANGED
@@ -1,4 +1,4 @@
1
- import OpenAIOrignal, { AzureOpenAI } from 'openai';
1
+ import { OpenAI, AzureOpenAI } from 'openai';
2
2
  import * as uuid from 'uuid';
3
3
  import { v4 } from 'uuid';
4
4
  import { Buffer } from 'buffer';
@@ -181,7 +181,10 @@ const sendEventToPosthog = async ({
181
181
  }
182
182
  };
183
183
 
184
- class PostHogOpenAI extends OpenAIOrignal {
184
+ const Chat = OpenAI.Chat;
185
+ const Completions = Chat.Completions;
186
+ const Responses = OpenAI.Responses;
187
+ class PostHogOpenAI extends OpenAI {
185
188
  constructor(config) {
186
189
  const {
187
190
  posthog,
@@ -193,13 +196,13 @@ class PostHogOpenAI extends OpenAIOrignal {
193
196
  this.responses = new WrappedResponses$1(this, this.phClient);
194
197
  }
195
198
  }
196
- class WrappedChat$1 extends OpenAIOrignal.Chat {
199
+ class WrappedChat$1 extends Chat {
197
200
  constructor(parentClient, phClient) {
198
201
  super(parentClient);
199
202
  this.completions = new WrappedCompletions$1(parentClient, phClient);
200
203
  }
201
204
  }
202
- class WrappedCompletions$1 extends OpenAIOrignal.Chat.Completions {
205
+ class WrappedCompletions$1 extends Completions {
203
206
  constructor(client, phClient) {
204
207
  super(client);
205
208
  this.phClient = phClient;
@@ -245,7 +248,7 @@ class WrappedCompletions$1 extends OpenAIOrignal.Chat.Completions {
245
248
  const latency = (Date.now() - startTime) / 1000;
246
249
  await sendEventToPosthog({
247
250
  client: this.phClient,
248
- distinctId: posthogDistinctId ?? traceId,
251
+ distinctId: posthogDistinctId,
249
252
  traceId,
250
253
  model: openAIParams.model,
251
254
  provider: 'openai',
@@ -264,7 +267,7 @@ class WrappedCompletions$1 extends OpenAIOrignal.Chat.Completions {
264
267
  } catch (error) {
265
268
  await sendEventToPosthog({
266
269
  client: this.phClient,
267
- distinctId: posthogDistinctId ?? traceId,
270
+ distinctId: posthogDistinctId,
268
271
  traceId,
269
272
  model: openAIParams.model,
270
273
  provider: 'openai',
@@ -295,7 +298,7 @@ class WrappedCompletions$1 extends OpenAIOrignal.Chat.Completions {
295
298
  const latency = (Date.now() - startTime) / 1000;
296
299
  await sendEventToPosthog({
297
300
  client: this.phClient,
298
- distinctId: posthogDistinctId ?? traceId,
301
+ distinctId: posthogDistinctId,
299
302
  traceId,
300
303
  model: openAIParams.model,
301
304
  provider: 'openai',
@@ -318,7 +321,7 @@ class WrappedCompletions$1 extends OpenAIOrignal.Chat.Completions {
318
321
  }, async error => {
319
322
  await sendEventToPosthog({
320
323
  client: this.phClient,
321
- distinctId: posthogDistinctId ?? traceId,
324
+ distinctId: posthogDistinctId,
322
325
  traceId,
323
326
  model: openAIParams.model,
324
327
  provider: 'openai',
@@ -342,7 +345,7 @@ class WrappedCompletions$1 extends OpenAIOrignal.Chat.Completions {
342
345
  }
343
346
  }
344
347
  }
345
- class WrappedResponses$1 extends OpenAIOrignal.Responses {
348
+ class WrappedResponses$1 extends Responses {
346
349
  constructor(client, phClient) {
347
350
  super(client);
348
351
  this.phClient = phClient;
@@ -389,7 +392,7 @@ class WrappedResponses$1 extends OpenAIOrignal.Responses {
389
392
  const latency = (Date.now() - startTime) / 1000;
390
393
  await sendEventToPosthog({
391
394
  client: this.phClient,
392
- distinctId: posthogDistinctId ?? traceId,
395
+ distinctId: posthogDistinctId,
393
396
  traceId,
394
397
  model: openAIParams.model,
395
398
  provider: 'openai',
@@ -405,7 +408,7 @@ class WrappedResponses$1 extends OpenAIOrignal.Responses {
405
408
  } catch (error) {
406
409
  await sendEventToPosthog({
407
410
  client: this.phClient,
408
- distinctId: posthogDistinctId ?? traceId,
411
+ distinctId: posthogDistinctId,
409
412
  traceId,
410
413
  model: openAIParams.model,
411
414
  provider: 'openai',
@@ -435,7 +438,7 @@ class WrappedResponses$1 extends OpenAIOrignal.Responses {
435
438
  const latency = (Date.now() - startTime) / 1000;
436
439
  await sendEventToPosthog({
437
440
  client: this.phClient,
438
- distinctId: posthogDistinctId ?? traceId,
441
+ distinctId: posthogDistinctId,
439
442
  traceId,
440
443
  model: openAIParams.model,
441
444
  provider: 'openai',
@@ -458,7 +461,7 @@ class WrappedResponses$1 extends OpenAIOrignal.Responses {
458
461
  }, async error => {
459
462
  await sendEventToPosthog({
460
463
  client: this.phClient,
461
- distinctId: posthogDistinctId ?? traceId,
464
+ distinctId: posthogDistinctId,
462
465
  traceId,
463
466
  model: openAIParams.model,
464
467
  provider: 'openai',
@@ -505,7 +508,7 @@ class WrappedResponses$1 extends OpenAIOrignal.Responses {
505
508
  const latency = (Date.now() - startTime) / 1000;
506
509
  await sendEventToPosthog({
507
510
  client: this.phClient,
508
- distinctId: posthogDistinctId ?? traceId,
511
+ distinctId: posthogDistinctId,
509
512
  traceId,
510
513
  model: openAIParams.model,
511
514
  provider: 'openai',
@@ -527,7 +530,7 @@ class WrappedResponses$1 extends OpenAIOrignal.Responses {
527
530
  }, async error => {
528
531
  await sendEventToPosthog({
529
532
  client: this.phClient,
530
- distinctId: posthogDistinctId ?? traceId,
533
+ distinctId: posthogDistinctId,
531
534
  traceId,
532
535
  model: openAIParams.model,
533
536
  provider: 'openai',
@@ -618,7 +621,7 @@ class WrappedCompletions extends AzureOpenAI.Chat.Completions {
618
621
  const latency = (Date.now() - startTime) / 1000;
619
622
  await sendEventToPosthog({
620
623
  client: this.phClient,
621
- distinctId: posthogDistinctId ?? traceId,
624
+ distinctId: posthogDistinctId,
622
625
  traceId,
623
626
  model: openAIParams.model,
624
627
  provider: 'azure',
@@ -637,7 +640,7 @@ class WrappedCompletions extends AzureOpenAI.Chat.Completions {
637
640
  } catch (error) {
638
641
  await sendEventToPosthog({
639
642
  client: this.phClient,
640
- distinctId: posthogDistinctId ?? traceId,
643
+ distinctId: posthogDistinctId,
641
644
  traceId,
642
645
  model: openAIParams.model,
643
646
  provider: 'azure',
@@ -668,7 +671,7 @@ class WrappedCompletions extends AzureOpenAI.Chat.Completions {
668
671
  const latency = (Date.now() - startTime) / 1000;
669
672
  await sendEventToPosthog({
670
673
  client: this.phClient,
671
- distinctId: posthogDistinctId ?? traceId,
674
+ distinctId: posthogDistinctId,
672
675
  traceId,
673
676
  model: openAIParams.model,
674
677
  provider: 'azure',
@@ -691,7 +694,7 @@ class WrappedCompletions extends AzureOpenAI.Chat.Completions {
691
694
  }, async error => {
692
695
  await sendEventToPosthog({
693
696
  client: this.phClient,
694
- distinctId: posthogDistinctId ?? traceId,
697
+ distinctId: posthogDistinctId,
695
698
  traceId,
696
699
  model: openAIParams.model,
697
700
  provider: 'azure',
@@ -762,7 +765,7 @@ class WrappedResponses extends AzureOpenAI.Responses {
762
765
  const latency = (Date.now() - startTime) / 1000;
763
766
  await sendEventToPosthog({
764
767
  client: this.phClient,
765
- distinctId: posthogDistinctId ?? traceId,
768
+ distinctId: posthogDistinctId,
766
769
  traceId,
767
770
  model: openAIParams.model,
768
771
  provider: 'azure',
@@ -778,7 +781,7 @@ class WrappedResponses extends AzureOpenAI.Responses {
778
781
  } catch (error) {
779
782
  await sendEventToPosthog({
780
783
  client: this.phClient,
781
- distinctId: posthogDistinctId ?? traceId,
784
+ distinctId: posthogDistinctId,
782
785
  traceId,
783
786
  model: openAIParams.model,
784
787
  provider: 'azure',
@@ -808,7 +811,7 @@ class WrappedResponses extends AzureOpenAI.Responses {
808
811
  const latency = (Date.now() - startTime) / 1000;
809
812
  await sendEventToPosthog({
810
813
  client: this.phClient,
811
- distinctId: posthogDistinctId ?? traceId,
814
+ distinctId: posthogDistinctId,
812
815
  traceId,
813
816
  model: openAIParams.model,
814
817
  provider: 'azure',
@@ -831,7 +834,7 @@ class WrappedResponses extends AzureOpenAI.Responses {
831
834
  }, async error => {
832
835
  await sendEventToPosthog({
833
836
  client: this.phClient,
834
- distinctId: posthogDistinctId ?? traceId,
837
+ distinctId: posthogDistinctId,
835
838
  traceId,
836
839
  model: openAIParams.model,
837
840
  provider: 'azure',
@@ -872,7 +875,7 @@ class WrappedResponses extends AzureOpenAI.Responses {
872
875
  const latency = (Date.now() - startTime) / 1000;
873
876
  await sendEventToPosthog({
874
877
  client: this.phClient,
875
- distinctId: posthogDistinctId ?? traceId,
878
+ distinctId: posthogDistinctId,
876
879
  traceId,
877
880
  model: openAIParams.model,
878
881
  provider: 'azure',
@@ -894,7 +897,7 @@ class WrappedResponses extends AzureOpenAI.Responses {
894
897
  }, async error => {
895
898
  await sendEventToPosthog({
896
899
  client: this.phClient,
897
- distinctId: posthogDistinctId ?? traceId,
900
+ distinctId: posthogDistinctId,
898
901
  traceId,
899
902
  model: openAIParams.model,
900
903
  provider: 'azure',
@@ -1127,7 +1130,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
1127
1130
  await sendEventToPosthog({
1128
1131
  client: phClient,
1129
1132
  distinctId: options.posthogDistinctId,
1130
- traceId: options.posthogTraceId,
1133
+ traceId: options.posthogTraceId ?? v4(),
1131
1134
  model: modelId,
1132
1135
  provider: provider,
1133
1136
  input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
@@ -1152,7 +1155,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
1152
1155
  await sendEventToPosthog({
1153
1156
  client: phClient,
1154
1157
  distinctId: options.posthogDistinctId,
1155
- traceId: options.posthogTraceId,
1158
+ traceId: options.posthogTraceId ?? v4(),
1156
1159
  model: modelId,
1157
1160
  provider: model.provider,
1158
1161
  input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
@@ -1221,7 +1224,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
1221
1224
  await sendEventToPosthog({
1222
1225
  client: phClient,
1223
1226
  distinctId: options.posthogDistinctId,
1224
- traceId: options.posthogTraceId,
1227
+ traceId: options.posthogTraceId ?? v4(),
1225
1228
  model: modelId,
1226
1229
  provider: provider,
1227
1230
  input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
@@ -1246,7 +1249,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
1246
1249
  await sendEventToPosthog({
1247
1250
  client: phClient,
1248
1251
  distinctId: options.posthogDistinctId,
1249
- traceId: options.posthogTraceId,
1252
+ traceId: options.posthogTraceId ?? v4(),
1250
1253
  model: modelId,
1251
1254
  provider: provider,
1252
1255
  input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
@@ -1274,7 +1277,7 @@ const wrapVercelLanguageModel = (model, phClient, options) => {
1274
1277
  const middleware = createInstrumentationMiddleware(phClient, model, {
1275
1278
  ...options,
1276
1279
  posthogTraceId: traceId,
1277
- posthogDistinctId: options.posthogDistinctId ?? traceId
1280
+ posthogDistinctId: options.posthogDistinctId
1278
1281
  });
1279
1282
  const wrappedModel = experimental_wrapLanguageModel({
1280
1283
  model,
@@ -1345,7 +1348,7 @@ class WrappedMessages extends AnthropicOriginal.Messages {
1345
1348
  const latency = (Date.now() - startTime) / 1000;
1346
1349
  await sendEventToPosthog({
1347
1350
  client: this.phClient,
1348
- distinctId: posthogDistinctId ?? traceId,
1351
+ distinctId: posthogDistinctId,
1349
1352
  traceId,
1350
1353
  model: anthropicParams.model,
1351
1354
  provider: 'anthropic',
@@ -1365,7 +1368,7 @@ class WrappedMessages extends AnthropicOriginal.Messages {
1365
1368
  // error handling
1366
1369
  await sendEventToPosthog({
1367
1370
  client: this.phClient,
1368
- distinctId: posthogDistinctId ?? traceId,
1371
+ distinctId: posthogDistinctId,
1369
1372
  traceId,
1370
1373
  model: anthropicParams.model,
1371
1374
  provider: 'anthropic',
@@ -1396,7 +1399,7 @@ class WrappedMessages extends AnthropicOriginal.Messages {
1396
1399
  const latency = (Date.now() - startTime) / 1000;
1397
1400
  await sendEventToPosthog({
1398
1401
  client: this.phClient,
1399
- distinctId: posthogDistinctId ?? traceId,
1402
+ distinctId: posthogDistinctId,
1400
1403
  traceId,
1401
1404
  model: anthropicParams.model,
1402
1405
  provider: 'anthropic',
@@ -1419,7 +1422,7 @@ class WrappedMessages extends AnthropicOriginal.Messages {
1419
1422
  }, async error => {
1420
1423
  await sendEventToPosthog({
1421
1424
  client: this.phClient,
1422
- distinctId: posthogDistinctId ?? traceId,
1425
+ distinctId: posthogDistinctId,
1423
1426
  traceId,
1424
1427
  model: anthropicParams.model,
1425
1428
  provider: 'anthropic',
@@ -1476,7 +1479,7 @@ class WrappedModels {
1476
1479
  const latency = (Date.now() - startTime) / 1000;
1477
1480
  await sendEventToPosthog({
1478
1481
  client: this.phClient,
1479
- distinctId: posthogDistinctId ?? traceId,
1482
+ distinctId: posthogDistinctId,
1480
1483
  traceId,
1481
1484
  model: geminiParams.model,
1482
1485
  provider: 'gemini',
@@ -1497,7 +1500,7 @@ class WrappedModels {
1497
1500
  const latency = (Date.now() - startTime) / 1000;
1498
1501
  await sendEventToPosthog({
1499
1502
  client: this.phClient,
1500
- distinctId: posthogDistinctId ?? traceId,
1503
+ distinctId: posthogDistinctId,
1501
1504
  traceId,
1502
1505
  model: geminiParams.model,
1503
1506
  provider: 'gemini',
@@ -1551,7 +1554,7 @@ class WrappedModels {
1551
1554
  const latency = (Date.now() - startTime) / 1000;
1552
1555
  await sendEventToPosthog({
1553
1556
  client: this.phClient,
1554
- distinctId: posthogDistinctId ?? traceId,
1557
+ distinctId: posthogDistinctId,
1555
1558
  traceId,
1556
1559
  model: geminiParams.model,
1557
1560
  provider: 'gemini',
@@ -1571,7 +1574,7 @@ class WrappedModels {
1571
1574
  const latency = (Date.now() - startTime) / 1000;
1572
1575
  await sendEventToPosthog({
1573
1576
  client: this.phClient,
1574
- distinctId: posthogDistinctId ?? traceId,
1577
+ distinctId: posthogDistinctId,
1575
1578
  traceId,
1576
1579
  model: geminiParams.model,
1577
1580
  provider: 'gemini',