@posthog/ai 5.0.1 → 5.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +7 -1
- package/lib/anthropic/index.cjs +4 -4
- package/lib/anthropic/index.cjs.map +1 -1
- package/lib/anthropic/index.mjs +4 -4
- package/lib/anthropic/index.mjs.map +1 -1
- package/lib/gemini/index.cjs +364 -0
- package/lib/gemini/index.cjs.map +1 -0
- package/lib/gemini/index.d.ts +60 -0
- package/lib/gemini/index.mjs +357 -0
- package/lib/gemini/index.mjs.map +1 -0
- package/lib/index.cjs +445 -39
- package/lib/index.cjs.map +1 -1
- package/lib/index.d.ts +23 -11
- package/lib/index.mjs +445 -39
- package/lib/index.mjs.map +1 -1
- package/lib/langchain/index.cjs.map +1 -1
- package/lib/langchain/index.mjs.map +1 -1
- package/lib/openai/index.cjs +226 -4
- package/lib/openai/index.cjs.map +1 -1
- package/lib/openai/index.d.ts +16 -4
- package/lib/openai/index.mjs +226 -5
- package/lib/openai/index.mjs.map +1 -1
- package/lib/vercel/index.cjs +5 -5
- package/lib/vercel/index.cjs.map +1 -1
- package/lib/vercel/index.mjs +5 -5
- package/lib/vercel/index.mjs.map +1 -1
- package/package.json +2 -2
- package/src/anthropic/index.ts +4 -4
- package/src/gemini/index.ts +4 -4
- package/src/openai/azure.ts +287 -33
- package/src/openai/index.ts +280 -6
- package/src/utils.ts +3 -2
- package/src/vercel/middleware.ts +7 -7
- package/tests/gemini.test.ts +31 -0
- package/tests/openai.test.ts +124 -48
package/lib/index.cjs
CHANGED
|
@@ -217,6 +217,7 @@ class PostHogOpenAI extends OpenAIOrignal__default["default"] {
|
|
|
217
217
|
super(openAIConfig);
|
|
218
218
|
this.phClient = posthog;
|
|
219
219
|
this.chat = new WrappedChat$1(this, this.phClient);
|
|
220
|
+
this.responses = new WrappedResponses$1(this, this.phClient);
|
|
220
221
|
}
|
|
221
222
|
}
|
|
222
223
|
class WrappedChat$1 extends OpenAIOrignal__default["default"].Chat {
|
|
@@ -271,7 +272,7 @@ class WrappedCompletions$1 extends OpenAIOrignal__default["default"].Chat.Comple
|
|
|
271
272
|
const latency = (Date.now() - startTime) / 1000;
|
|
272
273
|
await sendEventToPosthog({
|
|
273
274
|
client: this.phClient,
|
|
274
|
-
distinctId: posthogDistinctId
|
|
275
|
+
distinctId: posthogDistinctId,
|
|
275
276
|
traceId,
|
|
276
277
|
model: openAIParams.model,
|
|
277
278
|
provider: 'openai',
|
|
@@ -290,7 +291,7 @@ class WrappedCompletions$1 extends OpenAIOrignal__default["default"].Chat.Comple
|
|
|
290
291
|
} catch (error) {
|
|
291
292
|
await sendEventToPosthog({
|
|
292
293
|
client: this.phClient,
|
|
293
|
-
distinctId: posthogDistinctId
|
|
294
|
+
distinctId: posthogDistinctId,
|
|
294
295
|
traceId,
|
|
295
296
|
model: openAIParams.model,
|
|
296
297
|
provider: 'openai',
|
|
@@ -321,7 +322,7 @@ class WrappedCompletions$1 extends OpenAIOrignal__default["default"].Chat.Comple
|
|
|
321
322
|
const latency = (Date.now() - startTime) / 1000;
|
|
322
323
|
await sendEventToPosthog({
|
|
323
324
|
client: this.phClient,
|
|
324
|
-
distinctId: posthogDistinctId
|
|
325
|
+
distinctId: posthogDistinctId,
|
|
325
326
|
traceId,
|
|
326
327
|
model: openAIParams.model,
|
|
327
328
|
provider: 'openai',
|
|
@@ -344,7 +345,7 @@ class WrappedCompletions$1 extends OpenAIOrignal__default["default"].Chat.Comple
|
|
|
344
345
|
}, async error => {
|
|
345
346
|
await sendEventToPosthog({
|
|
346
347
|
client: this.phClient,
|
|
347
|
-
distinctId: posthogDistinctId
|
|
348
|
+
distinctId: posthogDistinctId,
|
|
348
349
|
traceId,
|
|
349
350
|
model: openAIParams.model,
|
|
350
351
|
provider: 'openai',
|
|
@@ -368,6 +369,218 @@ class WrappedCompletions$1 extends OpenAIOrignal__default["default"].Chat.Comple
|
|
|
368
369
|
}
|
|
369
370
|
}
|
|
370
371
|
}
|
|
372
|
+
class WrappedResponses$1 extends OpenAIOrignal__default["default"].Responses {
|
|
373
|
+
constructor(client, phClient) {
|
|
374
|
+
super(client);
|
|
375
|
+
this.phClient = phClient;
|
|
376
|
+
}
|
|
377
|
+
// --- Implementation Signature
|
|
378
|
+
create(body, options) {
|
|
379
|
+
const {
|
|
380
|
+
posthogDistinctId,
|
|
381
|
+
posthogTraceId,
|
|
382
|
+
posthogProperties,
|
|
383
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
384
|
+
posthogPrivacyMode = false,
|
|
385
|
+
posthogGroups,
|
|
386
|
+
posthogCaptureImmediate,
|
|
387
|
+
...openAIParams
|
|
388
|
+
} = body;
|
|
389
|
+
const traceId = posthogTraceId ?? uuid.v4();
|
|
390
|
+
const startTime = Date.now();
|
|
391
|
+
const parentPromise = super.create(openAIParams, options);
|
|
392
|
+
if (openAIParams.stream) {
|
|
393
|
+
return parentPromise.then(value => {
|
|
394
|
+
if ('tee' in value && typeof value.tee === 'function') {
|
|
395
|
+
const [stream1, stream2] = value.tee();
|
|
396
|
+
(async () => {
|
|
397
|
+
try {
|
|
398
|
+
let finalContent = [];
|
|
399
|
+
let usage = {
|
|
400
|
+
inputTokens: 0,
|
|
401
|
+
outputTokens: 0
|
|
402
|
+
};
|
|
403
|
+
for await (const chunk of stream1) {
|
|
404
|
+
if (chunk.type === 'response.completed' && 'response' in chunk && chunk.response?.output && chunk.response.output.length > 0) {
|
|
405
|
+
finalContent = chunk.response.output;
|
|
406
|
+
}
|
|
407
|
+
if ('response' in chunk && chunk.response?.usage) {
|
|
408
|
+
usage = {
|
|
409
|
+
inputTokens: chunk.response.usage.input_tokens ?? 0,
|
|
410
|
+
outputTokens: chunk.response.usage.output_tokens ?? 0,
|
|
411
|
+
reasoningTokens: chunk.response.usage.output_tokens_details?.reasoning_tokens ?? 0,
|
|
412
|
+
cacheReadInputTokens: chunk.response.usage.input_tokens_details?.cached_tokens ?? 0
|
|
413
|
+
};
|
|
414
|
+
}
|
|
415
|
+
}
|
|
416
|
+
const latency = (Date.now() - startTime) / 1000;
|
|
417
|
+
await sendEventToPosthog({
|
|
418
|
+
client: this.phClient,
|
|
419
|
+
distinctId: posthogDistinctId,
|
|
420
|
+
traceId,
|
|
421
|
+
model: openAIParams.model,
|
|
422
|
+
provider: 'openai',
|
|
423
|
+
input: openAIParams.input,
|
|
424
|
+
output: finalContent,
|
|
425
|
+
latency,
|
|
426
|
+
baseURL: this.baseURL ?? '',
|
|
427
|
+
params: body,
|
|
428
|
+
httpStatus: 200,
|
|
429
|
+
usage,
|
|
430
|
+
captureImmediate: posthogCaptureImmediate
|
|
431
|
+
});
|
|
432
|
+
} catch (error) {
|
|
433
|
+
await sendEventToPosthog({
|
|
434
|
+
client: this.phClient,
|
|
435
|
+
distinctId: posthogDistinctId,
|
|
436
|
+
traceId,
|
|
437
|
+
model: openAIParams.model,
|
|
438
|
+
provider: 'openai',
|
|
439
|
+
input: openAIParams.input,
|
|
440
|
+
output: [],
|
|
441
|
+
latency: 0,
|
|
442
|
+
baseURL: this.baseURL ?? '',
|
|
443
|
+
params: body,
|
|
444
|
+
httpStatus: error?.status ? error.status : 500,
|
|
445
|
+
usage: {
|
|
446
|
+
inputTokens: 0,
|
|
447
|
+
outputTokens: 0
|
|
448
|
+
},
|
|
449
|
+
isError: true,
|
|
450
|
+
error: JSON.stringify(error),
|
|
451
|
+
captureImmediate: posthogCaptureImmediate
|
|
452
|
+
});
|
|
453
|
+
}
|
|
454
|
+
})();
|
|
455
|
+
return stream2;
|
|
456
|
+
}
|
|
457
|
+
return value;
|
|
458
|
+
});
|
|
459
|
+
} else {
|
|
460
|
+
const wrappedPromise = parentPromise.then(async result => {
|
|
461
|
+
if ('output' in result) {
|
|
462
|
+
const latency = (Date.now() - startTime) / 1000;
|
|
463
|
+
await sendEventToPosthog({
|
|
464
|
+
client: this.phClient,
|
|
465
|
+
distinctId: posthogDistinctId,
|
|
466
|
+
traceId,
|
|
467
|
+
model: openAIParams.model,
|
|
468
|
+
provider: 'openai',
|
|
469
|
+
input: openAIParams.input,
|
|
470
|
+
output: result.output,
|
|
471
|
+
latency,
|
|
472
|
+
baseURL: this.baseURL ?? '',
|
|
473
|
+
params: body,
|
|
474
|
+
httpStatus: 200,
|
|
475
|
+
usage: {
|
|
476
|
+
inputTokens: result.usage?.input_tokens ?? 0,
|
|
477
|
+
outputTokens: result.usage?.output_tokens ?? 0,
|
|
478
|
+
reasoningTokens: result.usage?.output_tokens_details?.reasoning_tokens ?? 0,
|
|
479
|
+
cacheReadInputTokens: result.usage?.input_tokens_details?.cached_tokens ?? 0
|
|
480
|
+
},
|
|
481
|
+
captureImmediate: posthogCaptureImmediate
|
|
482
|
+
});
|
|
483
|
+
}
|
|
484
|
+
return result;
|
|
485
|
+
}, async error => {
|
|
486
|
+
await sendEventToPosthog({
|
|
487
|
+
client: this.phClient,
|
|
488
|
+
distinctId: posthogDistinctId,
|
|
489
|
+
traceId,
|
|
490
|
+
model: openAIParams.model,
|
|
491
|
+
provider: 'openai',
|
|
492
|
+
input: openAIParams.input,
|
|
493
|
+
output: [],
|
|
494
|
+
latency: 0,
|
|
495
|
+
baseURL: this.baseURL ?? '',
|
|
496
|
+
params: body,
|
|
497
|
+
httpStatus: error?.status ? error.status : 500,
|
|
498
|
+
usage: {
|
|
499
|
+
inputTokens: 0,
|
|
500
|
+
outputTokens: 0
|
|
501
|
+
},
|
|
502
|
+
isError: true,
|
|
503
|
+
error: JSON.stringify(error),
|
|
504
|
+
captureImmediate: posthogCaptureImmediate
|
|
505
|
+
});
|
|
506
|
+
throw error;
|
|
507
|
+
});
|
|
508
|
+
return wrappedPromise;
|
|
509
|
+
}
|
|
510
|
+
}
|
|
511
|
+
parse(body, options) {
|
|
512
|
+
const {
|
|
513
|
+
posthogDistinctId,
|
|
514
|
+
posthogTraceId,
|
|
515
|
+
posthogProperties,
|
|
516
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
517
|
+
posthogPrivacyMode = false,
|
|
518
|
+
posthogGroups,
|
|
519
|
+
posthogCaptureImmediate,
|
|
520
|
+
...openAIParams
|
|
521
|
+
} = body;
|
|
522
|
+
const traceId = posthogTraceId ?? uuid.v4();
|
|
523
|
+
const startTime = Date.now();
|
|
524
|
+
// Create a temporary instance that bypasses our wrapped create method
|
|
525
|
+
const originalCreate = super.create.bind(this);
|
|
526
|
+
const originalSelf = this;
|
|
527
|
+
const tempCreate = originalSelf.create;
|
|
528
|
+
originalSelf.create = originalCreate;
|
|
529
|
+
try {
|
|
530
|
+
const parentPromise = super.parse(openAIParams, options);
|
|
531
|
+
const wrappedPromise = parentPromise.then(async result => {
|
|
532
|
+
const latency = (Date.now() - startTime) / 1000;
|
|
533
|
+
await sendEventToPosthog({
|
|
534
|
+
client: this.phClient,
|
|
535
|
+
distinctId: posthogDistinctId,
|
|
536
|
+
traceId,
|
|
537
|
+
model: openAIParams.model,
|
|
538
|
+
provider: 'openai',
|
|
539
|
+
input: openAIParams.input,
|
|
540
|
+
output: result.output,
|
|
541
|
+
latency,
|
|
542
|
+
baseURL: this.baseURL ?? '',
|
|
543
|
+
params: body,
|
|
544
|
+
httpStatus: 200,
|
|
545
|
+
usage: {
|
|
546
|
+
inputTokens: result.usage?.input_tokens ?? 0,
|
|
547
|
+
outputTokens: result.usage?.output_tokens ?? 0,
|
|
548
|
+
reasoningTokens: result.usage?.output_tokens_details?.reasoning_tokens ?? 0,
|
|
549
|
+
cacheReadInputTokens: result.usage?.input_tokens_details?.cached_tokens ?? 0
|
|
550
|
+
},
|
|
551
|
+
captureImmediate: posthogCaptureImmediate
|
|
552
|
+
});
|
|
553
|
+
return result;
|
|
554
|
+
}, async error => {
|
|
555
|
+
await sendEventToPosthog({
|
|
556
|
+
client: this.phClient,
|
|
557
|
+
distinctId: posthogDistinctId,
|
|
558
|
+
traceId,
|
|
559
|
+
model: openAIParams.model,
|
|
560
|
+
provider: 'openai',
|
|
561
|
+
input: openAIParams.input,
|
|
562
|
+
output: [],
|
|
563
|
+
latency: 0,
|
|
564
|
+
baseURL: this.baseURL ?? '',
|
|
565
|
+
params: body,
|
|
566
|
+
httpStatus: error?.status ? error.status : 500,
|
|
567
|
+
usage: {
|
|
568
|
+
inputTokens: 0,
|
|
569
|
+
outputTokens: 0
|
|
570
|
+
},
|
|
571
|
+
isError: true,
|
|
572
|
+
error: JSON.stringify(error),
|
|
573
|
+
captureImmediate: posthogCaptureImmediate
|
|
574
|
+
});
|
|
575
|
+
throw error;
|
|
576
|
+
});
|
|
577
|
+
return wrappedPromise;
|
|
578
|
+
} finally {
|
|
579
|
+
// Restore our wrapped create method
|
|
580
|
+
originalSelf.create = tempCreate;
|
|
581
|
+
}
|
|
582
|
+
}
|
|
583
|
+
}
|
|
371
584
|
|
|
372
585
|
class PostHogAzureOpenAI extends OpenAIOrignal.AzureOpenAI {
|
|
373
586
|
constructor(config) {
|
|
@@ -408,23 +621,19 @@ class WrappedCompletions extends OpenAIOrignal.AzureOpenAI.Chat.Completions {
|
|
|
408
621
|
const parentPromise = super.create(openAIParams, options);
|
|
409
622
|
if (openAIParams.stream) {
|
|
410
623
|
return parentPromise.then(value => {
|
|
411
|
-
let accumulatedContent = '';
|
|
412
|
-
let usage = {
|
|
413
|
-
inputTokens: 0,
|
|
414
|
-
outputTokens: 0
|
|
415
|
-
};
|
|
416
|
-
let model = openAIParams.model;
|
|
417
624
|
if ('tee' in value) {
|
|
418
625
|
const [stream1, stream2] = value.tee();
|
|
419
626
|
(async () => {
|
|
420
627
|
try {
|
|
628
|
+
let accumulatedContent = '';
|
|
629
|
+
let usage = {
|
|
630
|
+
inputTokens: 0,
|
|
631
|
+
outputTokens: 0
|
|
632
|
+
};
|
|
421
633
|
for await (const chunk of stream1) {
|
|
422
634
|
const delta = chunk?.choices?.[0]?.delta?.content ?? '';
|
|
423
635
|
accumulatedContent += delta;
|
|
424
636
|
if (chunk.usage) {
|
|
425
|
-
if (chunk.model != model) {
|
|
426
|
-
model = chunk.model;
|
|
427
|
-
}
|
|
428
637
|
usage = {
|
|
429
638
|
inputTokens: chunk.usage.prompt_tokens ?? 0,
|
|
430
639
|
outputTokens: chunk.usage.completion_tokens ?? 0,
|
|
@@ -436,9 +645,9 @@ class WrappedCompletions extends OpenAIOrignal.AzureOpenAI.Chat.Completions {
|
|
|
436
645
|
const latency = (Date.now() - startTime) / 1000;
|
|
437
646
|
await sendEventToPosthog({
|
|
438
647
|
client: this.phClient,
|
|
439
|
-
distinctId: posthogDistinctId
|
|
648
|
+
distinctId: posthogDistinctId,
|
|
440
649
|
traceId,
|
|
441
|
-
model,
|
|
650
|
+
model: openAIParams.model,
|
|
442
651
|
provider: 'azure',
|
|
443
652
|
input: openAIParams.messages,
|
|
444
653
|
output: [{
|
|
@@ -453,15 +662,14 @@ class WrappedCompletions extends OpenAIOrignal.AzureOpenAI.Chat.Completions {
|
|
|
453
662
|
captureImmediate: posthogCaptureImmediate
|
|
454
663
|
});
|
|
455
664
|
} catch (error) {
|
|
456
|
-
// error handling
|
|
457
665
|
await sendEventToPosthog({
|
|
458
666
|
client: this.phClient,
|
|
459
|
-
distinctId: posthogDistinctId
|
|
667
|
+
distinctId: posthogDistinctId,
|
|
460
668
|
traceId,
|
|
461
|
-
model,
|
|
669
|
+
model: openAIParams.model,
|
|
462
670
|
provider: 'azure',
|
|
463
671
|
input: openAIParams.messages,
|
|
464
|
-
output:
|
|
672
|
+
output: [],
|
|
465
673
|
latency: 0,
|
|
466
674
|
baseURL: this.baseURL ?? '',
|
|
467
675
|
params: body,
|
|
@@ -485,15 +693,11 @@ class WrappedCompletions extends OpenAIOrignal.AzureOpenAI.Chat.Completions {
|
|
|
485
693
|
const wrappedPromise = parentPromise.then(async result => {
|
|
486
694
|
if ('choices' in result) {
|
|
487
695
|
const latency = (Date.now() - startTime) / 1000;
|
|
488
|
-
let model = openAIParams.model;
|
|
489
|
-
if (result.model != model) {
|
|
490
|
-
model = result.model;
|
|
491
|
-
}
|
|
492
696
|
await sendEventToPosthog({
|
|
493
697
|
client: this.phClient,
|
|
494
|
-
distinctId: posthogDistinctId
|
|
698
|
+
distinctId: posthogDistinctId,
|
|
495
699
|
traceId,
|
|
496
|
-
model,
|
|
700
|
+
model: openAIParams.model,
|
|
497
701
|
provider: 'azure',
|
|
498
702
|
input: openAIParams.messages,
|
|
499
703
|
output: formatResponseOpenAI(result),
|
|
@@ -514,7 +718,7 @@ class WrappedCompletions extends OpenAIOrignal.AzureOpenAI.Chat.Completions {
|
|
|
514
718
|
}, async error => {
|
|
515
719
|
await sendEventToPosthog({
|
|
516
720
|
client: this.phClient,
|
|
517
|
-
distinctId: posthogDistinctId
|
|
721
|
+
distinctId: posthogDistinctId,
|
|
518
722
|
traceId,
|
|
519
723
|
model: openAIParams.model,
|
|
520
724
|
provider: 'azure',
|
|
@@ -538,6 +742,208 @@ class WrappedCompletions extends OpenAIOrignal.AzureOpenAI.Chat.Completions {
|
|
|
538
742
|
}
|
|
539
743
|
}
|
|
540
744
|
}
|
|
745
|
+
class WrappedResponses extends OpenAIOrignal.AzureOpenAI.Responses {
|
|
746
|
+
constructor(client, phClient) {
|
|
747
|
+
super(client);
|
|
748
|
+
this.phClient = phClient;
|
|
749
|
+
}
|
|
750
|
+
// --- Implementation Signature
|
|
751
|
+
create(body, options) {
|
|
752
|
+
const {
|
|
753
|
+
posthogDistinctId,
|
|
754
|
+
posthogTraceId,
|
|
755
|
+
posthogProperties,
|
|
756
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
757
|
+
posthogPrivacyMode = false,
|
|
758
|
+
posthogGroups,
|
|
759
|
+
posthogCaptureImmediate,
|
|
760
|
+
...openAIParams
|
|
761
|
+
} = body;
|
|
762
|
+
const traceId = posthogTraceId ?? uuid.v4();
|
|
763
|
+
const startTime = Date.now();
|
|
764
|
+
const parentPromise = super.create(openAIParams, options);
|
|
765
|
+
if (openAIParams.stream) {
|
|
766
|
+
return parentPromise.then(value => {
|
|
767
|
+
if ('tee' in value && typeof value.tee === 'function') {
|
|
768
|
+
const [stream1, stream2] = value.tee();
|
|
769
|
+
(async () => {
|
|
770
|
+
try {
|
|
771
|
+
let finalContent = [];
|
|
772
|
+
let usage = {
|
|
773
|
+
inputTokens: 0,
|
|
774
|
+
outputTokens: 0
|
|
775
|
+
};
|
|
776
|
+
for await (const chunk of stream1) {
|
|
777
|
+
if (chunk.type === 'response.completed' && 'response' in chunk && chunk.response?.output && chunk.response.output.length > 0) {
|
|
778
|
+
finalContent = chunk.response.output;
|
|
779
|
+
}
|
|
780
|
+
if ('usage' in chunk && chunk.usage) {
|
|
781
|
+
usage = {
|
|
782
|
+
inputTokens: chunk.usage.input_tokens ?? 0,
|
|
783
|
+
outputTokens: chunk.usage.output_tokens ?? 0,
|
|
784
|
+
reasoningTokens: chunk.usage.output_tokens_details?.reasoning_tokens ?? 0,
|
|
785
|
+
cacheReadInputTokens: chunk.usage.input_tokens_details?.cached_tokens ?? 0
|
|
786
|
+
};
|
|
787
|
+
}
|
|
788
|
+
}
|
|
789
|
+
const latency = (Date.now() - startTime) / 1000;
|
|
790
|
+
await sendEventToPosthog({
|
|
791
|
+
client: this.phClient,
|
|
792
|
+
distinctId: posthogDistinctId,
|
|
793
|
+
traceId,
|
|
794
|
+
model: openAIParams.model,
|
|
795
|
+
provider: 'azure',
|
|
796
|
+
input: openAIParams.input,
|
|
797
|
+
output: finalContent,
|
|
798
|
+
latency,
|
|
799
|
+
baseURL: this.baseURL ?? '',
|
|
800
|
+
params: body,
|
|
801
|
+
httpStatus: 200,
|
|
802
|
+
usage,
|
|
803
|
+
captureImmediate: posthogCaptureImmediate
|
|
804
|
+
});
|
|
805
|
+
} catch (error) {
|
|
806
|
+
await sendEventToPosthog({
|
|
807
|
+
client: this.phClient,
|
|
808
|
+
distinctId: posthogDistinctId,
|
|
809
|
+
traceId,
|
|
810
|
+
model: openAIParams.model,
|
|
811
|
+
provider: 'azure',
|
|
812
|
+
input: openAIParams.input,
|
|
813
|
+
output: [],
|
|
814
|
+
latency: 0,
|
|
815
|
+
baseURL: this.baseURL ?? '',
|
|
816
|
+
params: body,
|
|
817
|
+
httpStatus: error?.status ? error.status : 500,
|
|
818
|
+
usage: {
|
|
819
|
+
inputTokens: 0,
|
|
820
|
+
outputTokens: 0
|
|
821
|
+
},
|
|
822
|
+
isError: true,
|
|
823
|
+
error: JSON.stringify(error),
|
|
824
|
+
captureImmediate: posthogCaptureImmediate
|
|
825
|
+
});
|
|
826
|
+
}
|
|
827
|
+
})();
|
|
828
|
+
return stream2;
|
|
829
|
+
}
|
|
830
|
+
return value;
|
|
831
|
+
});
|
|
832
|
+
} else {
|
|
833
|
+
const wrappedPromise = parentPromise.then(async result => {
|
|
834
|
+
if ('output' in result) {
|
|
835
|
+
const latency = (Date.now() - startTime) / 1000;
|
|
836
|
+
await sendEventToPosthog({
|
|
837
|
+
client: this.phClient,
|
|
838
|
+
distinctId: posthogDistinctId,
|
|
839
|
+
traceId,
|
|
840
|
+
model: openAIParams.model,
|
|
841
|
+
provider: 'azure',
|
|
842
|
+
input: openAIParams.input,
|
|
843
|
+
output: result.output,
|
|
844
|
+
latency,
|
|
845
|
+
baseURL: this.baseURL ?? '',
|
|
846
|
+
params: body,
|
|
847
|
+
httpStatus: 200,
|
|
848
|
+
usage: {
|
|
849
|
+
inputTokens: result.usage?.input_tokens ?? 0,
|
|
850
|
+
outputTokens: result.usage?.output_tokens ?? 0,
|
|
851
|
+
reasoningTokens: result.usage?.output_tokens_details?.reasoning_tokens ?? 0,
|
|
852
|
+
cacheReadInputTokens: result.usage?.input_tokens_details?.cached_tokens ?? 0
|
|
853
|
+
},
|
|
854
|
+
captureImmediate: posthogCaptureImmediate
|
|
855
|
+
});
|
|
856
|
+
}
|
|
857
|
+
return result;
|
|
858
|
+
}, async error => {
|
|
859
|
+
await sendEventToPosthog({
|
|
860
|
+
client: this.phClient,
|
|
861
|
+
distinctId: posthogDistinctId,
|
|
862
|
+
traceId,
|
|
863
|
+
model: openAIParams.model,
|
|
864
|
+
provider: 'azure',
|
|
865
|
+
input: openAIParams.input,
|
|
866
|
+
output: [],
|
|
867
|
+
latency: 0,
|
|
868
|
+
baseURL: this.baseURL ?? '',
|
|
869
|
+
params: body,
|
|
870
|
+
httpStatus: error?.status ? error.status : 500,
|
|
871
|
+
usage: {
|
|
872
|
+
inputTokens: 0,
|
|
873
|
+
outputTokens: 0
|
|
874
|
+
},
|
|
875
|
+
isError: true,
|
|
876
|
+
error: JSON.stringify(error),
|
|
877
|
+
captureImmediate: posthogCaptureImmediate
|
|
878
|
+
});
|
|
879
|
+
throw error;
|
|
880
|
+
});
|
|
881
|
+
return wrappedPromise;
|
|
882
|
+
}
|
|
883
|
+
}
|
|
884
|
+
parse(body, options) {
|
|
885
|
+
const {
|
|
886
|
+
posthogDistinctId,
|
|
887
|
+
posthogTraceId,
|
|
888
|
+
posthogProperties,
|
|
889
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
890
|
+
posthogPrivacyMode = false,
|
|
891
|
+
posthogGroups,
|
|
892
|
+
posthogCaptureImmediate,
|
|
893
|
+
...openAIParams
|
|
894
|
+
} = body;
|
|
895
|
+
const traceId = posthogTraceId ?? uuid.v4();
|
|
896
|
+
const startTime = Date.now();
|
|
897
|
+
const parentPromise = super.parse(openAIParams, options);
|
|
898
|
+
const wrappedPromise = parentPromise.then(async result => {
|
|
899
|
+
const latency = (Date.now() - startTime) / 1000;
|
|
900
|
+
await sendEventToPosthog({
|
|
901
|
+
client: this.phClient,
|
|
902
|
+
distinctId: posthogDistinctId,
|
|
903
|
+
traceId,
|
|
904
|
+
model: openAIParams.model,
|
|
905
|
+
provider: 'azure',
|
|
906
|
+
input: openAIParams.input,
|
|
907
|
+
output: result.output,
|
|
908
|
+
latency,
|
|
909
|
+
baseURL: this.baseURL ?? '',
|
|
910
|
+
params: body,
|
|
911
|
+
httpStatus: 200,
|
|
912
|
+
usage: {
|
|
913
|
+
inputTokens: result.usage?.input_tokens ?? 0,
|
|
914
|
+
outputTokens: result.usage?.output_tokens ?? 0,
|
|
915
|
+
reasoningTokens: result.usage?.output_tokens_details?.reasoning_tokens ?? 0,
|
|
916
|
+
cacheReadInputTokens: result.usage?.input_tokens_details?.cached_tokens ?? 0
|
|
917
|
+
},
|
|
918
|
+
captureImmediate: posthogCaptureImmediate
|
|
919
|
+
});
|
|
920
|
+
return result;
|
|
921
|
+
}, async error => {
|
|
922
|
+
await sendEventToPosthog({
|
|
923
|
+
client: this.phClient,
|
|
924
|
+
distinctId: posthogDistinctId,
|
|
925
|
+
traceId,
|
|
926
|
+
model: openAIParams.model,
|
|
927
|
+
provider: 'azure',
|
|
928
|
+
input: openAIParams.input,
|
|
929
|
+
output: [],
|
|
930
|
+
latency: 0,
|
|
931
|
+
baseURL: this.baseURL ?? '',
|
|
932
|
+
params: body,
|
|
933
|
+
httpStatus: error?.status ? error.status : 500,
|
|
934
|
+
usage: {
|
|
935
|
+
inputTokens: 0,
|
|
936
|
+
outputTokens: 0
|
|
937
|
+
},
|
|
938
|
+
isError: true,
|
|
939
|
+
error: JSON.stringify(error),
|
|
940
|
+
captureImmediate: posthogCaptureImmediate
|
|
941
|
+
});
|
|
942
|
+
throw error;
|
|
943
|
+
});
|
|
944
|
+
return wrappedPromise;
|
|
945
|
+
}
|
|
946
|
+
}
|
|
541
947
|
|
|
542
948
|
const mapVercelParams = params => {
|
|
543
949
|
return {
|
|
@@ -748,7 +1154,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
748
1154
|
await sendEventToPosthog({
|
|
749
1155
|
client: phClient,
|
|
750
1156
|
distinctId: options.posthogDistinctId,
|
|
751
|
-
traceId: options.posthogTraceId,
|
|
1157
|
+
traceId: options.posthogTraceId ?? uuid.v4(),
|
|
752
1158
|
model: modelId,
|
|
753
1159
|
provider: provider,
|
|
754
1160
|
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
@@ -773,7 +1179,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
773
1179
|
await sendEventToPosthog({
|
|
774
1180
|
client: phClient,
|
|
775
1181
|
distinctId: options.posthogDistinctId,
|
|
776
|
-
traceId: options.posthogTraceId,
|
|
1182
|
+
traceId: options.posthogTraceId ?? uuid.v4(),
|
|
777
1183
|
model: modelId,
|
|
778
1184
|
provider: model.provider,
|
|
779
1185
|
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
@@ -842,7 +1248,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
842
1248
|
await sendEventToPosthog({
|
|
843
1249
|
client: phClient,
|
|
844
1250
|
distinctId: options.posthogDistinctId,
|
|
845
|
-
traceId: options.posthogTraceId,
|
|
1251
|
+
traceId: options.posthogTraceId ?? uuid.v4(),
|
|
846
1252
|
model: modelId,
|
|
847
1253
|
provider: provider,
|
|
848
1254
|
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
@@ -867,7 +1273,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
867
1273
|
await sendEventToPosthog({
|
|
868
1274
|
client: phClient,
|
|
869
1275
|
distinctId: options.posthogDistinctId,
|
|
870
|
-
traceId: options.posthogTraceId,
|
|
1276
|
+
traceId: options.posthogTraceId ?? uuid.v4(),
|
|
871
1277
|
model: modelId,
|
|
872
1278
|
provider: provider,
|
|
873
1279
|
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
@@ -895,7 +1301,7 @@ const wrapVercelLanguageModel = (model, phClient, options) => {
|
|
|
895
1301
|
const middleware = createInstrumentationMiddleware(phClient, model, {
|
|
896
1302
|
...options,
|
|
897
1303
|
posthogTraceId: traceId,
|
|
898
|
-
posthogDistinctId: options.posthogDistinctId
|
|
1304
|
+
posthogDistinctId: options.posthogDistinctId
|
|
899
1305
|
});
|
|
900
1306
|
const wrappedModel = ai.experimental_wrapLanguageModel({
|
|
901
1307
|
model,
|
|
@@ -966,7 +1372,7 @@ class WrappedMessages extends AnthropicOriginal__default["default"].Messages {
|
|
|
966
1372
|
const latency = (Date.now() - startTime) / 1000;
|
|
967
1373
|
await sendEventToPosthog({
|
|
968
1374
|
client: this.phClient,
|
|
969
|
-
distinctId: posthogDistinctId
|
|
1375
|
+
distinctId: posthogDistinctId,
|
|
970
1376
|
traceId,
|
|
971
1377
|
model: anthropicParams.model,
|
|
972
1378
|
provider: 'anthropic',
|
|
@@ -986,7 +1392,7 @@ class WrappedMessages extends AnthropicOriginal__default["default"].Messages {
|
|
|
986
1392
|
// error handling
|
|
987
1393
|
await sendEventToPosthog({
|
|
988
1394
|
client: this.phClient,
|
|
989
|
-
distinctId: posthogDistinctId
|
|
1395
|
+
distinctId: posthogDistinctId,
|
|
990
1396
|
traceId,
|
|
991
1397
|
model: anthropicParams.model,
|
|
992
1398
|
provider: 'anthropic',
|
|
@@ -1017,7 +1423,7 @@ class WrappedMessages extends AnthropicOriginal__default["default"].Messages {
|
|
|
1017
1423
|
const latency = (Date.now() - startTime) / 1000;
|
|
1018
1424
|
await sendEventToPosthog({
|
|
1019
1425
|
client: this.phClient,
|
|
1020
|
-
distinctId: posthogDistinctId
|
|
1426
|
+
distinctId: posthogDistinctId,
|
|
1021
1427
|
traceId,
|
|
1022
1428
|
model: anthropicParams.model,
|
|
1023
1429
|
provider: 'anthropic',
|
|
@@ -1040,7 +1446,7 @@ class WrappedMessages extends AnthropicOriginal__default["default"].Messages {
|
|
|
1040
1446
|
}, async error => {
|
|
1041
1447
|
await sendEventToPosthog({
|
|
1042
1448
|
client: this.phClient,
|
|
1043
|
-
distinctId: posthogDistinctId
|
|
1449
|
+
distinctId: posthogDistinctId,
|
|
1044
1450
|
traceId,
|
|
1045
1451
|
model: anthropicParams.model,
|
|
1046
1452
|
provider: 'anthropic',
|
|
@@ -1097,7 +1503,7 @@ class WrappedModels {
|
|
|
1097
1503
|
const latency = (Date.now() - startTime) / 1000;
|
|
1098
1504
|
await sendEventToPosthog({
|
|
1099
1505
|
client: this.phClient,
|
|
1100
|
-
distinctId: posthogDistinctId
|
|
1506
|
+
distinctId: posthogDistinctId,
|
|
1101
1507
|
traceId,
|
|
1102
1508
|
model: geminiParams.model,
|
|
1103
1509
|
provider: 'gemini',
|
|
@@ -1118,7 +1524,7 @@ class WrappedModels {
|
|
|
1118
1524
|
const latency = (Date.now() - startTime) / 1000;
|
|
1119
1525
|
await sendEventToPosthog({
|
|
1120
1526
|
client: this.phClient,
|
|
1121
|
-
distinctId: posthogDistinctId
|
|
1527
|
+
distinctId: posthogDistinctId,
|
|
1122
1528
|
traceId,
|
|
1123
1529
|
model: geminiParams.model,
|
|
1124
1530
|
provider: 'gemini',
|
|
@@ -1172,7 +1578,7 @@ class WrappedModels {
|
|
|
1172
1578
|
const latency = (Date.now() - startTime) / 1000;
|
|
1173
1579
|
await sendEventToPosthog({
|
|
1174
1580
|
client: this.phClient,
|
|
1175
|
-
distinctId: posthogDistinctId
|
|
1581
|
+
distinctId: posthogDistinctId,
|
|
1176
1582
|
traceId,
|
|
1177
1583
|
model: geminiParams.model,
|
|
1178
1584
|
provider: 'gemini',
|
|
@@ -1192,7 +1598,7 @@ class WrappedModels {
|
|
|
1192
1598
|
const latency = (Date.now() - startTime) / 1000;
|
|
1193
1599
|
await sendEventToPosthog({
|
|
1194
1600
|
client: this.phClient,
|
|
1195
|
-
distinctId: posthogDistinctId
|
|
1601
|
+
distinctId: posthogDistinctId,
|
|
1196
1602
|
traceId,
|
|
1197
1603
|
model: geminiParams.model,
|
|
1198
1604
|
provider: 'gemini',
|