@posthog/ai 5.0.1 → 5.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +7 -1
- package/lib/anthropic/index.cjs +4 -4
- package/lib/anthropic/index.cjs.map +1 -1
- package/lib/anthropic/index.mjs +4 -4
- package/lib/anthropic/index.mjs.map +1 -1
- package/lib/gemini/index.cjs +364 -0
- package/lib/gemini/index.cjs.map +1 -0
- package/lib/gemini/index.d.ts +60 -0
- package/lib/gemini/index.mjs +357 -0
- package/lib/gemini/index.mjs.map +1 -0
- package/lib/index.cjs +445 -39
- package/lib/index.cjs.map +1 -1
- package/lib/index.d.ts +23 -11
- package/lib/index.mjs +445 -39
- package/lib/index.mjs.map +1 -1
- package/lib/langchain/index.cjs.map +1 -1
- package/lib/langchain/index.mjs.map +1 -1
- package/lib/openai/index.cjs +226 -4
- package/lib/openai/index.cjs.map +1 -1
- package/lib/openai/index.d.ts +16 -4
- package/lib/openai/index.mjs +226 -5
- package/lib/openai/index.mjs.map +1 -1
- package/lib/vercel/index.cjs +5 -5
- package/lib/vercel/index.cjs.map +1 -1
- package/lib/vercel/index.mjs +5 -5
- package/lib/vercel/index.mjs.map +1 -1
- package/package.json +2 -2
- package/src/anthropic/index.ts +4 -4
- package/src/gemini/index.ts +4 -4
- package/src/openai/azure.ts +287 -33
- package/src/openai/index.ts +280 -6
- package/src/utils.ts +3 -2
- package/src/vercel/middleware.ts +7 -7
- package/tests/gemini.test.ts +31 -0
- package/tests/openai.test.ts +124 -48
package/lib/index.mjs
CHANGED
|
@@ -190,6 +190,7 @@ class PostHogOpenAI extends OpenAIOrignal {
|
|
|
190
190
|
super(openAIConfig);
|
|
191
191
|
this.phClient = posthog;
|
|
192
192
|
this.chat = new WrappedChat$1(this, this.phClient);
|
|
193
|
+
this.responses = new WrappedResponses$1(this, this.phClient);
|
|
193
194
|
}
|
|
194
195
|
}
|
|
195
196
|
class WrappedChat$1 extends OpenAIOrignal.Chat {
|
|
@@ -244,7 +245,7 @@ class WrappedCompletions$1 extends OpenAIOrignal.Chat.Completions {
|
|
|
244
245
|
const latency = (Date.now() - startTime) / 1000;
|
|
245
246
|
await sendEventToPosthog({
|
|
246
247
|
client: this.phClient,
|
|
247
|
-
distinctId: posthogDistinctId
|
|
248
|
+
distinctId: posthogDistinctId,
|
|
248
249
|
traceId,
|
|
249
250
|
model: openAIParams.model,
|
|
250
251
|
provider: 'openai',
|
|
@@ -263,7 +264,7 @@ class WrappedCompletions$1 extends OpenAIOrignal.Chat.Completions {
|
|
|
263
264
|
} catch (error) {
|
|
264
265
|
await sendEventToPosthog({
|
|
265
266
|
client: this.phClient,
|
|
266
|
-
distinctId: posthogDistinctId
|
|
267
|
+
distinctId: posthogDistinctId,
|
|
267
268
|
traceId,
|
|
268
269
|
model: openAIParams.model,
|
|
269
270
|
provider: 'openai',
|
|
@@ -294,7 +295,7 @@ class WrappedCompletions$1 extends OpenAIOrignal.Chat.Completions {
|
|
|
294
295
|
const latency = (Date.now() - startTime) / 1000;
|
|
295
296
|
await sendEventToPosthog({
|
|
296
297
|
client: this.phClient,
|
|
297
|
-
distinctId: posthogDistinctId
|
|
298
|
+
distinctId: posthogDistinctId,
|
|
298
299
|
traceId,
|
|
299
300
|
model: openAIParams.model,
|
|
300
301
|
provider: 'openai',
|
|
@@ -317,7 +318,7 @@ class WrappedCompletions$1 extends OpenAIOrignal.Chat.Completions {
|
|
|
317
318
|
}, async error => {
|
|
318
319
|
await sendEventToPosthog({
|
|
319
320
|
client: this.phClient,
|
|
320
|
-
distinctId: posthogDistinctId
|
|
321
|
+
distinctId: posthogDistinctId,
|
|
321
322
|
traceId,
|
|
322
323
|
model: openAIParams.model,
|
|
323
324
|
provider: 'openai',
|
|
@@ -341,6 +342,218 @@ class WrappedCompletions$1 extends OpenAIOrignal.Chat.Completions {
|
|
|
341
342
|
}
|
|
342
343
|
}
|
|
343
344
|
}
|
|
345
|
+
class WrappedResponses$1 extends OpenAIOrignal.Responses {
|
|
346
|
+
constructor(client, phClient) {
|
|
347
|
+
super(client);
|
|
348
|
+
this.phClient = phClient;
|
|
349
|
+
}
|
|
350
|
+
// --- Implementation Signature
|
|
351
|
+
create(body, options) {
|
|
352
|
+
const {
|
|
353
|
+
posthogDistinctId,
|
|
354
|
+
posthogTraceId,
|
|
355
|
+
posthogProperties,
|
|
356
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
357
|
+
posthogPrivacyMode = false,
|
|
358
|
+
posthogGroups,
|
|
359
|
+
posthogCaptureImmediate,
|
|
360
|
+
...openAIParams
|
|
361
|
+
} = body;
|
|
362
|
+
const traceId = posthogTraceId ?? v4();
|
|
363
|
+
const startTime = Date.now();
|
|
364
|
+
const parentPromise = super.create(openAIParams, options);
|
|
365
|
+
if (openAIParams.stream) {
|
|
366
|
+
return parentPromise.then(value => {
|
|
367
|
+
if ('tee' in value && typeof value.tee === 'function') {
|
|
368
|
+
const [stream1, stream2] = value.tee();
|
|
369
|
+
(async () => {
|
|
370
|
+
try {
|
|
371
|
+
let finalContent = [];
|
|
372
|
+
let usage = {
|
|
373
|
+
inputTokens: 0,
|
|
374
|
+
outputTokens: 0
|
|
375
|
+
};
|
|
376
|
+
for await (const chunk of stream1) {
|
|
377
|
+
if (chunk.type === 'response.completed' && 'response' in chunk && chunk.response?.output && chunk.response.output.length > 0) {
|
|
378
|
+
finalContent = chunk.response.output;
|
|
379
|
+
}
|
|
380
|
+
if ('response' in chunk && chunk.response?.usage) {
|
|
381
|
+
usage = {
|
|
382
|
+
inputTokens: chunk.response.usage.input_tokens ?? 0,
|
|
383
|
+
outputTokens: chunk.response.usage.output_tokens ?? 0,
|
|
384
|
+
reasoningTokens: chunk.response.usage.output_tokens_details?.reasoning_tokens ?? 0,
|
|
385
|
+
cacheReadInputTokens: chunk.response.usage.input_tokens_details?.cached_tokens ?? 0
|
|
386
|
+
};
|
|
387
|
+
}
|
|
388
|
+
}
|
|
389
|
+
const latency = (Date.now() - startTime) / 1000;
|
|
390
|
+
await sendEventToPosthog({
|
|
391
|
+
client: this.phClient,
|
|
392
|
+
distinctId: posthogDistinctId,
|
|
393
|
+
traceId,
|
|
394
|
+
model: openAIParams.model,
|
|
395
|
+
provider: 'openai',
|
|
396
|
+
input: openAIParams.input,
|
|
397
|
+
output: finalContent,
|
|
398
|
+
latency,
|
|
399
|
+
baseURL: this.baseURL ?? '',
|
|
400
|
+
params: body,
|
|
401
|
+
httpStatus: 200,
|
|
402
|
+
usage,
|
|
403
|
+
captureImmediate: posthogCaptureImmediate
|
|
404
|
+
});
|
|
405
|
+
} catch (error) {
|
|
406
|
+
await sendEventToPosthog({
|
|
407
|
+
client: this.phClient,
|
|
408
|
+
distinctId: posthogDistinctId,
|
|
409
|
+
traceId,
|
|
410
|
+
model: openAIParams.model,
|
|
411
|
+
provider: 'openai',
|
|
412
|
+
input: openAIParams.input,
|
|
413
|
+
output: [],
|
|
414
|
+
latency: 0,
|
|
415
|
+
baseURL: this.baseURL ?? '',
|
|
416
|
+
params: body,
|
|
417
|
+
httpStatus: error?.status ? error.status : 500,
|
|
418
|
+
usage: {
|
|
419
|
+
inputTokens: 0,
|
|
420
|
+
outputTokens: 0
|
|
421
|
+
},
|
|
422
|
+
isError: true,
|
|
423
|
+
error: JSON.stringify(error),
|
|
424
|
+
captureImmediate: posthogCaptureImmediate
|
|
425
|
+
});
|
|
426
|
+
}
|
|
427
|
+
})();
|
|
428
|
+
return stream2;
|
|
429
|
+
}
|
|
430
|
+
return value;
|
|
431
|
+
});
|
|
432
|
+
} else {
|
|
433
|
+
const wrappedPromise = parentPromise.then(async result => {
|
|
434
|
+
if ('output' in result) {
|
|
435
|
+
const latency = (Date.now() - startTime) / 1000;
|
|
436
|
+
await sendEventToPosthog({
|
|
437
|
+
client: this.phClient,
|
|
438
|
+
distinctId: posthogDistinctId,
|
|
439
|
+
traceId,
|
|
440
|
+
model: openAIParams.model,
|
|
441
|
+
provider: 'openai',
|
|
442
|
+
input: openAIParams.input,
|
|
443
|
+
output: result.output,
|
|
444
|
+
latency,
|
|
445
|
+
baseURL: this.baseURL ?? '',
|
|
446
|
+
params: body,
|
|
447
|
+
httpStatus: 200,
|
|
448
|
+
usage: {
|
|
449
|
+
inputTokens: result.usage?.input_tokens ?? 0,
|
|
450
|
+
outputTokens: result.usage?.output_tokens ?? 0,
|
|
451
|
+
reasoningTokens: result.usage?.output_tokens_details?.reasoning_tokens ?? 0,
|
|
452
|
+
cacheReadInputTokens: result.usage?.input_tokens_details?.cached_tokens ?? 0
|
|
453
|
+
},
|
|
454
|
+
captureImmediate: posthogCaptureImmediate
|
|
455
|
+
});
|
|
456
|
+
}
|
|
457
|
+
return result;
|
|
458
|
+
}, async error => {
|
|
459
|
+
await sendEventToPosthog({
|
|
460
|
+
client: this.phClient,
|
|
461
|
+
distinctId: posthogDistinctId,
|
|
462
|
+
traceId,
|
|
463
|
+
model: openAIParams.model,
|
|
464
|
+
provider: 'openai',
|
|
465
|
+
input: openAIParams.input,
|
|
466
|
+
output: [],
|
|
467
|
+
latency: 0,
|
|
468
|
+
baseURL: this.baseURL ?? '',
|
|
469
|
+
params: body,
|
|
470
|
+
httpStatus: error?.status ? error.status : 500,
|
|
471
|
+
usage: {
|
|
472
|
+
inputTokens: 0,
|
|
473
|
+
outputTokens: 0
|
|
474
|
+
},
|
|
475
|
+
isError: true,
|
|
476
|
+
error: JSON.stringify(error),
|
|
477
|
+
captureImmediate: posthogCaptureImmediate
|
|
478
|
+
});
|
|
479
|
+
throw error;
|
|
480
|
+
});
|
|
481
|
+
return wrappedPromise;
|
|
482
|
+
}
|
|
483
|
+
}
|
|
484
|
+
parse(body, options) {
|
|
485
|
+
const {
|
|
486
|
+
posthogDistinctId,
|
|
487
|
+
posthogTraceId,
|
|
488
|
+
posthogProperties,
|
|
489
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
490
|
+
posthogPrivacyMode = false,
|
|
491
|
+
posthogGroups,
|
|
492
|
+
posthogCaptureImmediate,
|
|
493
|
+
...openAIParams
|
|
494
|
+
} = body;
|
|
495
|
+
const traceId = posthogTraceId ?? v4();
|
|
496
|
+
const startTime = Date.now();
|
|
497
|
+
// Create a temporary instance that bypasses our wrapped create method
|
|
498
|
+
const originalCreate = super.create.bind(this);
|
|
499
|
+
const originalSelf = this;
|
|
500
|
+
const tempCreate = originalSelf.create;
|
|
501
|
+
originalSelf.create = originalCreate;
|
|
502
|
+
try {
|
|
503
|
+
const parentPromise = super.parse(openAIParams, options);
|
|
504
|
+
const wrappedPromise = parentPromise.then(async result => {
|
|
505
|
+
const latency = (Date.now() - startTime) / 1000;
|
|
506
|
+
await sendEventToPosthog({
|
|
507
|
+
client: this.phClient,
|
|
508
|
+
distinctId: posthogDistinctId,
|
|
509
|
+
traceId,
|
|
510
|
+
model: openAIParams.model,
|
|
511
|
+
provider: 'openai',
|
|
512
|
+
input: openAIParams.input,
|
|
513
|
+
output: result.output,
|
|
514
|
+
latency,
|
|
515
|
+
baseURL: this.baseURL ?? '',
|
|
516
|
+
params: body,
|
|
517
|
+
httpStatus: 200,
|
|
518
|
+
usage: {
|
|
519
|
+
inputTokens: result.usage?.input_tokens ?? 0,
|
|
520
|
+
outputTokens: result.usage?.output_tokens ?? 0,
|
|
521
|
+
reasoningTokens: result.usage?.output_tokens_details?.reasoning_tokens ?? 0,
|
|
522
|
+
cacheReadInputTokens: result.usage?.input_tokens_details?.cached_tokens ?? 0
|
|
523
|
+
},
|
|
524
|
+
captureImmediate: posthogCaptureImmediate
|
|
525
|
+
});
|
|
526
|
+
return result;
|
|
527
|
+
}, async error => {
|
|
528
|
+
await sendEventToPosthog({
|
|
529
|
+
client: this.phClient,
|
|
530
|
+
distinctId: posthogDistinctId,
|
|
531
|
+
traceId,
|
|
532
|
+
model: openAIParams.model,
|
|
533
|
+
provider: 'openai',
|
|
534
|
+
input: openAIParams.input,
|
|
535
|
+
output: [],
|
|
536
|
+
latency: 0,
|
|
537
|
+
baseURL: this.baseURL ?? '',
|
|
538
|
+
params: body,
|
|
539
|
+
httpStatus: error?.status ? error.status : 500,
|
|
540
|
+
usage: {
|
|
541
|
+
inputTokens: 0,
|
|
542
|
+
outputTokens: 0
|
|
543
|
+
},
|
|
544
|
+
isError: true,
|
|
545
|
+
error: JSON.stringify(error),
|
|
546
|
+
captureImmediate: posthogCaptureImmediate
|
|
547
|
+
});
|
|
548
|
+
throw error;
|
|
549
|
+
});
|
|
550
|
+
return wrappedPromise;
|
|
551
|
+
} finally {
|
|
552
|
+
// Restore our wrapped create method
|
|
553
|
+
originalSelf.create = tempCreate;
|
|
554
|
+
}
|
|
555
|
+
}
|
|
556
|
+
}
|
|
344
557
|
|
|
345
558
|
class PostHogAzureOpenAI extends AzureOpenAI {
|
|
346
559
|
constructor(config) {
|
|
@@ -381,23 +594,19 @@ class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
381
594
|
const parentPromise = super.create(openAIParams, options);
|
|
382
595
|
if (openAIParams.stream) {
|
|
383
596
|
return parentPromise.then(value => {
|
|
384
|
-
let accumulatedContent = '';
|
|
385
|
-
let usage = {
|
|
386
|
-
inputTokens: 0,
|
|
387
|
-
outputTokens: 0
|
|
388
|
-
};
|
|
389
|
-
let model = openAIParams.model;
|
|
390
597
|
if ('tee' in value) {
|
|
391
598
|
const [stream1, stream2] = value.tee();
|
|
392
599
|
(async () => {
|
|
393
600
|
try {
|
|
601
|
+
let accumulatedContent = '';
|
|
602
|
+
let usage = {
|
|
603
|
+
inputTokens: 0,
|
|
604
|
+
outputTokens: 0
|
|
605
|
+
};
|
|
394
606
|
for await (const chunk of stream1) {
|
|
395
607
|
const delta = chunk?.choices?.[0]?.delta?.content ?? '';
|
|
396
608
|
accumulatedContent += delta;
|
|
397
609
|
if (chunk.usage) {
|
|
398
|
-
if (chunk.model != model) {
|
|
399
|
-
model = chunk.model;
|
|
400
|
-
}
|
|
401
610
|
usage = {
|
|
402
611
|
inputTokens: chunk.usage.prompt_tokens ?? 0,
|
|
403
612
|
outputTokens: chunk.usage.completion_tokens ?? 0,
|
|
@@ -409,9 +618,9 @@ class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
409
618
|
const latency = (Date.now() - startTime) / 1000;
|
|
410
619
|
await sendEventToPosthog({
|
|
411
620
|
client: this.phClient,
|
|
412
|
-
distinctId: posthogDistinctId
|
|
621
|
+
distinctId: posthogDistinctId,
|
|
413
622
|
traceId,
|
|
414
|
-
model,
|
|
623
|
+
model: openAIParams.model,
|
|
415
624
|
provider: 'azure',
|
|
416
625
|
input: openAIParams.messages,
|
|
417
626
|
output: [{
|
|
@@ -426,15 +635,14 @@ class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
426
635
|
captureImmediate: posthogCaptureImmediate
|
|
427
636
|
});
|
|
428
637
|
} catch (error) {
|
|
429
|
-
// error handling
|
|
430
638
|
await sendEventToPosthog({
|
|
431
639
|
client: this.phClient,
|
|
432
|
-
distinctId: posthogDistinctId
|
|
640
|
+
distinctId: posthogDistinctId,
|
|
433
641
|
traceId,
|
|
434
|
-
model,
|
|
642
|
+
model: openAIParams.model,
|
|
435
643
|
provider: 'azure',
|
|
436
644
|
input: openAIParams.messages,
|
|
437
|
-
output:
|
|
645
|
+
output: [],
|
|
438
646
|
latency: 0,
|
|
439
647
|
baseURL: this.baseURL ?? '',
|
|
440
648
|
params: body,
|
|
@@ -458,15 +666,11 @@ class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
458
666
|
const wrappedPromise = parentPromise.then(async result => {
|
|
459
667
|
if ('choices' in result) {
|
|
460
668
|
const latency = (Date.now() - startTime) / 1000;
|
|
461
|
-
let model = openAIParams.model;
|
|
462
|
-
if (result.model != model) {
|
|
463
|
-
model = result.model;
|
|
464
|
-
}
|
|
465
669
|
await sendEventToPosthog({
|
|
466
670
|
client: this.phClient,
|
|
467
|
-
distinctId: posthogDistinctId
|
|
671
|
+
distinctId: posthogDistinctId,
|
|
468
672
|
traceId,
|
|
469
|
-
model,
|
|
673
|
+
model: openAIParams.model,
|
|
470
674
|
provider: 'azure',
|
|
471
675
|
input: openAIParams.messages,
|
|
472
676
|
output: formatResponseOpenAI(result),
|
|
@@ -487,7 +691,7 @@ class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
487
691
|
}, async error => {
|
|
488
692
|
await sendEventToPosthog({
|
|
489
693
|
client: this.phClient,
|
|
490
|
-
distinctId: posthogDistinctId
|
|
694
|
+
distinctId: posthogDistinctId,
|
|
491
695
|
traceId,
|
|
492
696
|
model: openAIParams.model,
|
|
493
697
|
provider: 'azure',
|
|
@@ -511,6 +715,208 @@ class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
511
715
|
}
|
|
512
716
|
}
|
|
513
717
|
}
|
|
718
|
+
class WrappedResponses extends AzureOpenAI.Responses {
|
|
719
|
+
constructor(client, phClient) {
|
|
720
|
+
super(client);
|
|
721
|
+
this.phClient = phClient;
|
|
722
|
+
}
|
|
723
|
+
// --- Implementation Signature
|
|
724
|
+
create(body, options) {
|
|
725
|
+
const {
|
|
726
|
+
posthogDistinctId,
|
|
727
|
+
posthogTraceId,
|
|
728
|
+
posthogProperties,
|
|
729
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
730
|
+
posthogPrivacyMode = false,
|
|
731
|
+
posthogGroups,
|
|
732
|
+
posthogCaptureImmediate,
|
|
733
|
+
...openAIParams
|
|
734
|
+
} = body;
|
|
735
|
+
const traceId = posthogTraceId ?? v4();
|
|
736
|
+
const startTime = Date.now();
|
|
737
|
+
const parentPromise = super.create(openAIParams, options);
|
|
738
|
+
if (openAIParams.stream) {
|
|
739
|
+
return parentPromise.then(value => {
|
|
740
|
+
if ('tee' in value && typeof value.tee === 'function') {
|
|
741
|
+
const [stream1, stream2] = value.tee();
|
|
742
|
+
(async () => {
|
|
743
|
+
try {
|
|
744
|
+
let finalContent = [];
|
|
745
|
+
let usage = {
|
|
746
|
+
inputTokens: 0,
|
|
747
|
+
outputTokens: 0
|
|
748
|
+
};
|
|
749
|
+
for await (const chunk of stream1) {
|
|
750
|
+
if (chunk.type === 'response.completed' && 'response' in chunk && chunk.response?.output && chunk.response.output.length > 0) {
|
|
751
|
+
finalContent = chunk.response.output;
|
|
752
|
+
}
|
|
753
|
+
if ('usage' in chunk && chunk.usage) {
|
|
754
|
+
usage = {
|
|
755
|
+
inputTokens: chunk.usage.input_tokens ?? 0,
|
|
756
|
+
outputTokens: chunk.usage.output_tokens ?? 0,
|
|
757
|
+
reasoningTokens: chunk.usage.output_tokens_details?.reasoning_tokens ?? 0,
|
|
758
|
+
cacheReadInputTokens: chunk.usage.input_tokens_details?.cached_tokens ?? 0
|
|
759
|
+
};
|
|
760
|
+
}
|
|
761
|
+
}
|
|
762
|
+
const latency = (Date.now() - startTime) / 1000;
|
|
763
|
+
await sendEventToPosthog({
|
|
764
|
+
client: this.phClient,
|
|
765
|
+
distinctId: posthogDistinctId,
|
|
766
|
+
traceId,
|
|
767
|
+
model: openAIParams.model,
|
|
768
|
+
provider: 'azure',
|
|
769
|
+
input: openAIParams.input,
|
|
770
|
+
output: finalContent,
|
|
771
|
+
latency,
|
|
772
|
+
baseURL: this.baseURL ?? '',
|
|
773
|
+
params: body,
|
|
774
|
+
httpStatus: 200,
|
|
775
|
+
usage,
|
|
776
|
+
captureImmediate: posthogCaptureImmediate
|
|
777
|
+
});
|
|
778
|
+
} catch (error) {
|
|
779
|
+
await sendEventToPosthog({
|
|
780
|
+
client: this.phClient,
|
|
781
|
+
distinctId: posthogDistinctId,
|
|
782
|
+
traceId,
|
|
783
|
+
model: openAIParams.model,
|
|
784
|
+
provider: 'azure',
|
|
785
|
+
input: openAIParams.input,
|
|
786
|
+
output: [],
|
|
787
|
+
latency: 0,
|
|
788
|
+
baseURL: this.baseURL ?? '',
|
|
789
|
+
params: body,
|
|
790
|
+
httpStatus: error?.status ? error.status : 500,
|
|
791
|
+
usage: {
|
|
792
|
+
inputTokens: 0,
|
|
793
|
+
outputTokens: 0
|
|
794
|
+
},
|
|
795
|
+
isError: true,
|
|
796
|
+
error: JSON.stringify(error),
|
|
797
|
+
captureImmediate: posthogCaptureImmediate
|
|
798
|
+
});
|
|
799
|
+
}
|
|
800
|
+
})();
|
|
801
|
+
return stream2;
|
|
802
|
+
}
|
|
803
|
+
return value;
|
|
804
|
+
});
|
|
805
|
+
} else {
|
|
806
|
+
const wrappedPromise = parentPromise.then(async result => {
|
|
807
|
+
if ('output' in result) {
|
|
808
|
+
const latency = (Date.now() - startTime) / 1000;
|
|
809
|
+
await sendEventToPosthog({
|
|
810
|
+
client: this.phClient,
|
|
811
|
+
distinctId: posthogDistinctId,
|
|
812
|
+
traceId,
|
|
813
|
+
model: openAIParams.model,
|
|
814
|
+
provider: 'azure',
|
|
815
|
+
input: openAIParams.input,
|
|
816
|
+
output: result.output,
|
|
817
|
+
latency,
|
|
818
|
+
baseURL: this.baseURL ?? '',
|
|
819
|
+
params: body,
|
|
820
|
+
httpStatus: 200,
|
|
821
|
+
usage: {
|
|
822
|
+
inputTokens: result.usage?.input_tokens ?? 0,
|
|
823
|
+
outputTokens: result.usage?.output_tokens ?? 0,
|
|
824
|
+
reasoningTokens: result.usage?.output_tokens_details?.reasoning_tokens ?? 0,
|
|
825
|
+
cacheReadInputTokens: result.usage?.input_tokens_details?.cached_tokens ?? 0
|
|
826
|
+
},
|
|
827
|
+
captureImmediate: posthogCaptureImmediate
|
|
828
|
+
});
|
|
829
|
+
}
|
|
830
|
+
return result;
|
|
831
|
+
}, async error => {
|
|
832
|
+
await sendEventToPosthog({
|
|
833
|
+
client: this.phClient,
|
|
834
|
+
distinctId: posthogDistinctId,
|
|
835
|
+
traceId,
|
|
836
|
+
model: openAIParams.model,
|
|
837
|
+
provider: 'azure',
|
|
838
|
+
input: openAIParams.input,
|
|
839
|
+
output: [],
|
|
840
|
+
latency: 0,
|
|
841
|
+
baseURL: this.baseURL ?? '',
|
|
842
|
+
params: body,
|
|
843
|
+
httpStatus: error?.status ? error.status : 500,
|
|
844
|
+
usage: {
|
|
845
|
+
inputTokens: 0,
|
|
846
|
+
outputTokens: 0
|
|
847
|
+
},
|
|
848
|
+
isError: true,
|
|
849
|
+
error: JSON.stringify(error),
|
|
850
|
+
captureImmediate: posthogCaptureImmediate
|
|
851
|
+
});
|
|
852
|
+
throw error;
|
|
853
|
+
});
|
|
854
|
+
return wrappedPromise;
|
|
855
|
+
}
|
|
856
|
+
}
|
|
857
|
+
parse(body, options) {
|
|
858
|
+
const {
|
|
859
|
+
posthogDistinctId,
|
|
860
|
+
posthogTraceId,
|
|
861
|
+
posthogProperties,
|
|
862
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
863
|
+
posthogPrivacyMode = false,
|
|
864
|
+
posthogGroups,
|
|
865
|
+
posthogCaptureImmediate,
|
|
866
|
+
...openAIParams
|
|
867
|
+
} = body;
|
|
868
|
+
const traceId = posthogTraceId ?? v4();
|
|
869
|
+
const startTime = Date.now();
|
|
870
|
+
const parentPromise = super.parse(openAIParams, options);
|
|
871
|
+
const wrappedPromise = parentPromise.then(async result => {
|
|
872
|
+
const latency = (Date.now() - startTime) / 1000;
|
|
873
|
+
await sendEventToPosthog({
|
|
874
|
+
client: this.phClient,
|
|
875
|
+
distinctId: posthogDistinctId,
|
|
876
|
+
traceId,
|
|
877
|
+
model: openAIParams.model,
|
|
878
|
+
provider: 'azure',
|
|
879
|
+
input: openAIParams.input,
|
|
880
|
+
output: result.output,
|
|
881
|
+
latency,
|
|
882
|
+
baseURL: this.baseURL ?? '',
|
|
883
|
+
params: body,
|
|
884
|
+
httpStatus: 200,
|
|
885
|
+
usage: {
|
|
886
|
+
inputTokens: result.usage?.input_tokens ?? 0,
|
|
887
|
+
outputTokens: result.usage?.output_tokens ?? 0,
|
|
888
|
+
reasoningTokens: result.usage?.output_tokens_details?.reasoning_tokens ?? 0,
|
|
889
|
+
cacheReadInputTokens: result.usage?.input_tokens_details?.cached_tokens ?? 0
|
|
890
|
+
},
|
|
891
|
+
captureImmediate: posthogCaptureImmediate
|
|
892
|
+
});
|
|
893
|
+
return result;
|
|
894
|
+
}, async error => {
|
|
895
|
+
await sendEventToPosthog({
|
|
896
|
+
client: this.phClient,
|
|
897
|
+
distinctId: posthogDistinctId,
|
|
898
|
+
traceId,
|
|
899
|
+
model: openAIParams.model,
|
|
900
|
+
provider: 'azure',
|
|
901
|
+
input: openAIParams.input,
|
|
902
|
+
output: [],
|
|
903
|
+
latency: 0,
|
|
904
|
+
baseURL: this.baseURL ?? '',
|
|
905
|
+
params: body,
|
|
906
|
+
httpStatus: error?.status ? error.status : 500,
|
|
907
|
+
usage: {
|
|
908
|
+
inputTokens: 0,
|
|
909
|
+
outputTokens: 0
|
|
910
|
+
},
|
|
911
|
+
isError: true,
|
|
912
|
+
error: JSON.stringify(error),
|
|
913
|
+
captureImmediate: posthogCaptureImmediate
|
|
914
|
+
});
|
|
915
|
+
throw error;
|
|
916
|
+
});
|
|
917
|
+
return wrappedPromise;
|
|
918
|
+
}
|
|
919
|
+
}
|
|
514
920
|
|
|
515
921
|
const mapVercelParams = params => {
|
|
516
922
|
return {
|
|
@@ -721,7 +1127,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
721
1127
|
await sendEventToPosthog({
|
|
722
1128
|
client: phClient,
|
|
723
1129
|
distinctId: options.posthogDistinctId,
|
|
724
|
-
traceId: options.posthogTraceId,
|
|
1130
|
+
traceId: options.posthogTraceId ?? v4(),
|
|
725
1131
|
model: modelId,
|
|
726
1132
|
provider: provider,
|
|
727
1133
|
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
@@ -746,7 +1152,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
746
1152
|
await sendEventToPosthog({
|
|
747
1153
|
client: phClient,
|
|
748
1154
|
distinctId: options.posthogDistinctId,
|
|
749
|
-
traceId: options.posthogTraceId,
|
|
1155
|
+
traceId: options.posthogTraceId ?? v4(),
|
|
750
1156
|
model: modelId,
|
|
751
1157
|
provider: model.provider,
|
|
752
1158
|
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
@@ -815,7 +1221,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
815
1221
|
await sendEventToPosthog({
|
|
816
1222
|
client: phClient,
|
|
817
1223
|
distinctId: options.posthogDistinctId,
|
|
818
|
-
traceId: options.posthogTraceId,
|
|
1224
|
+
traceId: options.posthogTraceId ?? v4(),
|
|
819
1225
|
model: modelId,
|
|
820
1226
|
provider: provider,
|
|
821
1227
|
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
@@ -840,7 +1246,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
840
1246
|
await sendEventToPosthog({
|
|
841
1247
|
client: phClient,
|
|
842
1248
|
distinctId: options.posthogDistinctId,
|
|
843
|
-
traceId: options.posthogTraceId,
|
|
1249
|
+
traceId: options.posthogTraceId ?? v4(),
|
|
844
1250
|
model: modelId,
|
|
845
1251
|
provider: provider,
|
|
846
1252
|
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
@@ -868,7 +1274,7 @@ const wrapVercelLanguageModel = (model, phClient, options) => {
|
|
|
868
1274
|
const middleware = createInstrumentationMiddleware(phClient, model, {
|
|
869
1275
|
...options,
|
|
870
1276
|
posthogTraceId: traceId,
|
|
871
|
-
posthogDistinctId: options.posthogDistinctId
|
|
1277
|
+
posthogDistinctId: options.posthogDistinctId
|
|
872
1278
|
});
|
|
873
1279
|
const wrappedModel = experimental_wrapLanguageModel({
|
|
874
1280
|
model,
|
|
@@ -939,7 +1345,7 @@ class WrappedMessages extends AnthropicOriginal.Messages {
|
|
|
939
1345
|
const latency = (Date.now() - startTime) / 1000;
|
|
940
1346
|
await sendEventToPosthog({
|
|
941
1347
|
client: this.phClient,
|
|
942
|
-
distinctId: posthogDistinctId
|
|
1348
|
+
distinctId: posthogDistinctId,
|
|
943
1349
|
traceId,
|
|
944
1350
|
model: anthropicParams.model,
|
|
945
1351
|
provider: 'anthropic',
|
|
@@ -959,7 +1365,7 @@ class WrappedMessages extends AnthropicOriginal.Messages {
|
|
|
959
1365
|
// error handling
|
|
960
1366
|
await sendEventToPosthog({
|
|
961
1367
|
client: this.phClient,
|
|
962
|
-
distinctId: posthogDistinctId
|
|
1368
|
+
distinctId: posthogDistinctId,
|
|
963
1369
|
traceId,
|
|
964
1370
|
model: anthropicParams.model,
|
|
965
1371
|
provider: 'anthropic',
|
|
@@ -990,7 +1396,7 @@ class WrappedMessages extends AnthropicOriginal.Messages {
|
|
|
990
1396
|
const latency = (Date.now() - startTime) / 1000;
|
|
991
1397
|
await sendEventToPosthog({
|
|
992
1398
|
client: this.phClient,
|
|
993
|
-
distinctId: posthogDistinctId
|
|
1399
|
+
distinctId: posthogDistinctId,
|
|
994
1400
|
traceId,
|
|
995
1401
|
model: anthropicParams.model,
|
|
996
1402
|
provider: 'anthropic',
|
|
@@ -1013,7 +1419,7 @@ class WrappedMessages extends AnthropicOriginal.Messages {
|
|
|
1013
1419
|
}, async error => {
|
|
1014
1420
|
await sendEventToPosthog({
|
|
1015
1421
|
client: this.phClient,
|
|
1016
|
-
distinctId: posthogDistinctId
|
|
1422
|
+
distinctId: posthogDistinctId,
|
|
1017
1423
|
traceId,
|
|
1018
1424
|
model: anthropicParams.model,
|
|
1019
1425
|
provider: 'anthropic',
|
|
@@ -1070,7 +1476,7 @@ class WrappedModels {
|
|
|
1070
1476
|
const latency = (Date.now() - startTime) / 1000;
|
|
1071
1477
|
await sendEventToPosthog({
|
|
1072
1478
|
client: this.phClient,
|
|
1073
|
-
distinctId: posthogDistinctId
|
|
1479
|
+
distinctId: posthogDistinctId,
|
|
1074
1480
|
traceId,
|
|
1075
1481
|
model: geminiParams.model,
|
|
1076
1482
|
provider: 'gemini',
|
|
@@ -1091,7 +1497,7 @@ class WrappedModels {
|
|
|
1091
1497
|
const latency = (Date.now() - startTime) / 1000;
|
|
1092
1498
|
await sendEventToPosthog({
|
|
1093
1499
|
client: this.phClient,
|
|
1094
|
-
distinctId: posthogDistinctId
|
|
1500
|
+
distinctId: posthogDistinctId,
|
|
1095
1501
|
traceId,
|
|
1096
1502
|
model: geminiParams.model,
|
|
1097
1503
|
provider: 'gemini',
|
|
@@ -1145,7 +1551,7 @@ class WrappedModels {
|
|
|
1145
1551
|
const latency = (Date.now() - startTime) / 1000;
|
|
1146
1552
|
await sendEventToPosthog({
|
|
1147
1553
|
client: this.phClient,
|
|
1148
|
-
distinctId: posthogDistinctId
|
|
1554
|
+
distinctId: posthogDistinctId,
|
|
1149
1555
|
traceId,
|
|
1150
1556
|
model: geminiParams.model,
|
|
1151
1557
|
provider: 'gemini',
|
|
@@ -1165,7 +1571,7 @@ class WrappedModels {
|
|
|
1165
1571
|
const latency = (Date.now() - startTime) / 1000;
|
|
1166
1572
|
await sendEventToPosthog({
|
|
1167
1573
|
client: this.phClient,
|
|
1168
|
-
distinctId: posthogDistinctId
|
|
1574
|
+
distinctId: posthogDistinctId,
|
|
1169
1575
|
traceId,
|
|
1170
1576
|
model: geminiParams.model,
|
|
1171
1577
|
provider: 'gemini',
|