@posthog/ai 6.1.0 → 6.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/anthropic/index.cjs +141 -8
- package/dist/anthropic/index.cjs.map +1 -1
- package/dist/anthropic/index.mjs +141 -8
- package/dist/anthropic/index.mjs.map +1 -1
- package/dist/gemini/index.cjs +135 -29
- package/dist/gemini/index.cjs.map +1 -1
- package/dist/gemini/index.d.ts +4 -21
- package/dist/gemini/index.mjs +135 -29
- package/dist/gemini/index.mjs.map +1 -1
- package/dist/index.cjs +600 -77
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +4 -21
- package/dist/index.mjs +600 -77
- package/dist/index.mjs.map +1 -1
- package/dist/langchain/index.cjs +132 -2
- package/dist/langchain/index.cjs.map +1 -1
- package/dist/langchain/index.mjs +132 -2
- package/dist/langchain/index.mjs.map +1 -1
- package/dist/openai/index.cjs +222 -21
- package/dist/openai/index.cjs.map +1 -1
- package/dist/openai/index.mjs +222 -21
- package/dist/openai/index.mjs.map +1 -1
- package/dist/vercel/index.cjs +114 -8
- package/dist/vercel/index.cjs.map +1 -1
- package/dist/vercel/index.mjs +114 -8
- package/dist/vercel/index.mjs.map +1 -1
- package/package.json +4 -4
package/dist/index.mjs
CHANGED
|
@@ -238,9 +238,8 @@ const extractAvailableToolCalls = (provider, params) => {
|
|
|
238
238
|
}
|
|
239
239
|
return null;
|
|
240
240
|
} else if (provider === 'vercel') {
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
return params.mode.tools;
|
|
241
|
+
if (params.tools) {
|
|
242
|
+
return params.tools;
|
|
244
243
|
}
|
|
245
244
|
return null;
|
|
246
245
|
}
|
|
@@ -350,6 +349,204 @@ const sendEventToPosthog = async ({
|
|
|
350
349
|
}
|
|
351
350
|
};
|
|
352
351
|
|
|
352
|
+
// Type guards for safer type checking
|
|
353
|
+
const isString = value => {
|
|
354
|
+
return typeof value === 'string';
|
|
355
|
+
};
|
|
356
|
+
const isObject = value => {
|
|
357
|
+
return value !== null && typeof value === 'object' && !Array.isArray(value);
|
|
358
|
+
};
|
|
359
|
+
|
|
360
|
+
const REDACTED_IMAGE_PLACEHOLDER = '[base64 image redacted]';
|
|
361
|
+
// ============================================
|
|
362
|
+
// Base64 Detection Helpers
|
|
363
|
+
// ============================================
|
|
364
|
+
const isBase64DataUrl = str => {
|
|
365
|
+
return /^data:([^;]+);base64,/.test(str);
|
|
366
|
+
};
|
|
367
|
+
const isValidUrl = str => {
|
|
368
|
+
try {
|
|
369
|
+
new URL(str);
|
|
370
|
+
return true;
|
|
371
|
+
} catch {
|
|
372
|
+
// Not an absolute URL, check if it's a relative URL or path
|
|
373
|
+
return str.startsWith('/') || str.startsWith('./') || str.startsWith('../');
|
|
374
|
+
}
|
|
375
|
+
};
|
|
376
|
+
const isRawBase64 = str => {
|
|
377
|
+
// Skip if it's a valid URL or path
|
|
378
|
+
if (isValidUrl(str)) {
|
|
379
|
+
return false;
|
|
380
|
+
}
|
|
381
|
+
// Check if it's a valid base64 string
|
|
382
|
+
// Base64 images are typically at least a few hundred chars, but we'll be conservative
|
|
383
|
+
return str.length > 20 && /^[A-Za-z0-9+/]+=*$/.test(str);
|
|
384
|
+
};
|
|
385
|
+
function redactBase64DataUrl(str) {
|
|
386
|
+
if (!isString(str)) return str;
|
|
387
|
+
// Check for data URL format
|
|
388
|
+
if (isBase64DataUrl(str)) {
|
|
389
|
+
return REDACTED_IMAGE_PLACEHOLDER;
|
|
390
|
+
}
|
|
391
|
+
// Check for raw base64 (Vercel sends raw base64 for inline images)
|
|
392
|
+
if (isRawBase64(str)) {
|
|
393
|
+
return REDACTED_IMAGE_PLACEHOLDER;
|
|
394
|
+
}
|
|
395
|
+
return str;
|
|
396
|
+
}
|
|
397
|
+
const processMessages = (messages, transformContent) => {
|
|
398
|
+
if (!messages) return messages;
|
|
399
|
+
const processContent = content => {
|
|
400
|
+
if (typeof content === 'string') return content;
|
|
401
|
+
if (!content) return content;
|
|
402
|
+
if (Array.isArray(content)) {
|
|
403
|
+
return content.map(transformContent);
|
|
404
|
+
}
|
|
405
|
+
// Handle single object content
|
|
406
|
+
return transformContent(content);
|
|
407
|
+
};
|
|
408
|
+
const processMessage = msg => {
|
|
409
|
+
if (!isObject(msg) || !('content' in msg)) return msg;
|
|
410
|
+
return {
|
|
411
|
+
...msg,
|
|
412
|
+
content: processContent(msg.content)
|
|
413
|
+
};
|
|
414
|
+
};
|
|
415
|
+
// Handle both arrays and single messages
|
|
416
|
+
if (Array.isArray(messages)) {
|
|
417
|
+
return messages.map(processMessage);
|
|
418
|
+
}
|
|
419
|
+
return processMessage(messages);
|
|
420
|
+
};
|
|
421
|
+
// ============================================
|
|
422
|
+
// Provider-Specific Image Sanitizers
|
|
423
|
+
// ============================================
|
|
424
|
+
const sanitizeOpenAIImage = item => {
|
|
425
|
+
if (!isObject(item)) return item;
|
|
426
|
+
// Handle image_url format
|
|
427
|
+
if (item.type === 'image_url' && 'image_url' in item && isObject(item.image_url) && 'url' in item.image_url) {
|
|
428
|
+
return {
|
|
429
|
+
...item,
|
|
430
|
+
image_url: {
|
|
431
|
+
...item.image_url,
|
|
432
|
+
url: redactBase64DataUrl(item.image_url.url)
|
|
433
|
+
}
|
|
434
|
+
};
|
|
435
|
+
}
|
|
436
|
+
return item;
|
|
437
|
+
};
|
|
438
|
+
const sanitizeOpenAIResponseImage = item => {
|
|
439
|
+
if (!isObject(item)) return item;
|
|
440
|
+
// Handle input_image format
|
|
441
|
+
if (item.type === 'input_image' && 'image_url' in item) {
|
|
442
|
+
return {
|
|
443
|
+
...item,
|
|
444
|
+
image_url: redactBase64DataUrl(item.image_url)
|
|
445
|
+
};
|
|
446
|
+
}
|
|
447
|
+
return item;
|
|
448
|
+
};
|
|
449
|
+
const sanitizeAnthropicImage = item => {
|
|
450
|
+
if (!isObject(item)) return item;
|
|
451
|
+
// Handle Anthropic's image format
|
|
452
|
+
if (item.type === 'image' && 'source' in item && isObject(item.source) && item.source.type === 'base64' && 'data' in item.source) {
|
|
453
|
+
return {
|
|
454
|
+
...item,
|
|
455
|
+
source: {
|
|
456
|
+
...item.source,
|
|
457
|
+
data: REDACTED_IMAGE_PLACEHOLDER
|
|
458
|
+
}
|
|
459
|
+
};
|
|
460
|
+
}
|
|
461
|
+
return item;
|
|
462
|
+
};
|
|
463
|
+
const sanitizeGeminiPart = part => {
|
|
464
|
+
if (!isObject(part)) return part;
|
|
465
|
+
// Handle Gemini's inline data format
|
|
466
|
+
if ('inlineData' in part && isObject(part.inlineData) && 'data' in part.inlineData) {
|
|
467
|
+
return {
|
|
468
|
+
...part,
|
|
469
|
+
inlineData: {
|
|
470
|
+
...part.inlineData,
|
|
471
|
+
data: REDACTED_IMAGE_PLACEHOLDER
|
|
472
|
+
}
|
|
473
|
+
};
|
|
474
|
+
}
|
|
475
|
+
return part;
|
|
476
|
+
};
|
|
477
|
+
const processGeminiItem = item => {
|
|
478
|
+
if (!isObject(item)) return item;
|
|
479
|
+
// If it has parts, process them
|
|
480
|
+
if ('parts' in item && item.parts) {
|
|
481
|
+
const parts = Array.isArray(item.parts) ? item.parts.map(sanitizeGeminiPart) : sanitizeGeminiPart(item.parts);
|
|
482
|
+
return {
|
|
483
|
+
...item,
|
|
484
|
+
parts
|
|
485
|
+
};
|
|
486
|
+
}
|
|
487
|
+
return item;
|
|
488
|
+
};
|
|
489
|
+
const sanitizeLangChainImage = item => {
|
|
490
|
+
if (!isObject(item)) return item;
|
|
491
|
+
// OpenAI style
|
|
492
|
+
if (item.type === 'image_url' && 'image_url' in item && isObject(item.image_url) && 'url' in item.image_url) {
|
|
493
|
+
return {
|
|
494
|
+
...item,
|
|
495
|
+
image_url: {
|
|
496
|
+
...item.image_url,
|
|
497
|
+
url: redactBase64DataUrl(item.image_url.url)
|
|
498
|
+
}
|
|
499
|
+
};
|
|
500
|
+
}
|
|
501
|
+
// Direct image with data field
|
|
502
|
+
if (item.type === 'image' && 'data' in item) {
|
|
503
|
+
return {
|
|
504
|
+
...item,
|
|
505
|
+
data: redactBase64DataUrl(item.data)
|
|
506
|
+
};
|
|
507
|
+
}
|
|
508
|
+
// Anthropic style
|
|
509
|
+
if (item.type === 'image' && 'source' in item && isObject(item.source) && 'data' in item.source) {
|
|
510
|
+
return {
|
|
511
|
+
...item,
|
|
512
|
+
source: {
|
|
513
|
+
...item.source,
|
|
514
|
+
data: redactBase64DataUrl(item.source.data)
|
|
515
|
+
}
|
|
516
|
+
};
|
|
517
|
+
}
|
|
518
|
+
// Google style
|
|
519
|
+
if (item.type === 'media' && 'data' in item) {
|
|
520
|
+
return {
|
|
521
|
+
...item,
|
|
522
|
+
data: redactBase64DataUrl(item.data)
|
|
523
|
+
};
|
|
524
|
+
}
|
|
525
|
+
return item;
|
|
526
|
+
};
|
|
527
|
+
// Export individual sanitizers for tree-shaking
|
|
528
|
+
const sanitizeOpenAI = data => {
|
|
529
|
+
return processMessages(data, sanitizeOpenAIImage);
|
|
530
|
+
};
|
|
531
|
+
const sanitizeOpenAIResponse = data => {
|
|
532
|
+
return processMessages(data, sanitizeOpenAIResponseImage);
|
|
533
|
+
};
|
|
534
|
+
const sanitizeAnthropic = data => {
|
|
535
|
+
return processMessages(data, sanitizeAnthropicImage);
|
|
536
|
+
};
|
|
537
|
+
const sanitizeGemini = data => {
|
|
538
|
+
// Gemini has a different structure with 'parts' directly on items instead of 'content'
|
|
539
|
+
// So we need custom processing instead of using processMessages
|
|
540
|
+
if (!data) return data;
|
|
541
|
+
if (Array.isArray(data)) {
|
|
542
|
+
return data.map(processGeminiItem);
|
|
543
|
+
}
|
|
544
|
+
return processGeminiItem(data);
|
|
545
|
+
};
|
|
546
|
+
const sanitizeLangChain = data => {
|
|
547
|
+
return processMessages(data, sanitizeLangChainImage);
|
|
548
|
+
};
|
|
549
|
+
|
|
353
550
|
const Chat = OpenAI.Chat;
|
|
354
551
|
const Completions = Chat.Completions;
|
|
355
552
|
const Responses = OpenAI.Responses;
|
|
@@ -397,14 +594,52 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
397
594
|
const [stream1, stream2] = value.tee();
|
|
398
595
|
(async () => {
|
|
399
596
|
try {
|
|
597
|
+
const contentBlocks = [];
|
|
400
598
|
let accumulatedContent = '';
|
|
401
599
|
let usage = {
|
|
402
600
|
inputTokens: 0,
|
|
403
601
|
outputTokens: 0
|
|
404
602
|
};
|
|
603
|
+
// Map to track in-progress tool calls
|
|
604
|
+
const toolCallsInProgress = new Map();
|
|
405
605
|
for await (const chunk of stream1) {
|
|
406
|
-
const
|
|
407
|
-
|
|
606
|
+
const choice = chunk?.choices?.[0];
|
|
607
|
+
// Handle text content
|
|
608
|
+
const deltaContent = choice?.delta?.content;
|
|
609
|
+
if (deltaContent) {
|
|
610
|
+
accumulatedContent += deltaContent;
|
|
611
|
+
}
|
|
612
|
+
// Handle tool calls
|
|
613
|
+
const deltaToolCalls = choice?.delta?.tool_calls;
|
|
614
|
+
if (deltaToolCalls && Array.isArray(deltaToolCalls)) {
|
|
615
|
+
for (const toolCall of deltaToolCalls) {
|
|
616
|
+
const index = toolCall.index;
|
|
617
|
+
if (index !== undefined) {
|
|
618
|
+
if (!toolCallsInProgress.has(index)) {
|
|
619
|
+
// New tool call
|
|
620
|
+
toolCallsInProgress.set(index, {
|
|
621
|
+
id: toolCall.id || '',
|
|
622
|
+
name: toolCall.function?.name || '',
|
|
623
|
+
arguments: ''
|
|
624
|
+
});
|
|
625
|
+
}
|
|
626
|
+
const inProgressCall = toolCallsInProgress.get(index);
|
|
627
|
+
if (inProgressCall) {
|
|
628
|
+
// Update tool call data
|
|
629
|
+
if (toolCall.id) {
|
|
630
|
+
inProgressCall.id = toolCall.id;
|
|
631
|
+
}
|
|
632
|
+
if (toolCall.function?.name) {
|
|
633
|
+
inProgressCall.name = toolCall.function.name;
|
|
634
|
+
}
|
|
635
|
+
if (toolCall.function?.arguments) {
|
|
636
|
+
inProgressCall.arguments += toolCall.function.arguments;
|
|
637
|
+
}
|
|
638
|
+
}
|
|
639
|
+
}
|
|
640
|
+
}
|
|
641
|
+
}
|
|
642
|
+
// Handle usage information
|
|
408
643
|
if (chunk.usage) {
|
|
409
644
|
usage = {
|
|
410
645
|
inputTokens: chunk.usage.prompt_tokens ?? 0,
|
|
@@ -414,6 +649,37 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
414
649
|
};
|
|
415
650
|
}
|
|
416
651
|
}
|
|
652
|
+
// Build final content blocks
|
|
653
|
+
if (accumulatedContent) {
|
|
654
|
+
contentBlocks.push({
|
|
655
|
+
type: 'text',
|
|
656
|
+
text: accumulatedContent
|
|
657
|
+
});
|
|
658
|
+
}
|
|
659
|
+
// Add completed tool calls to content blocks
|
|
660
|
+
for (const toolCall of toolCallsInProgress.values()) {
|
|
661
|
+
if (toolCall.name) {
|
|
662
|
+
contentBlocks.push({
|
|
663
|
+
type: 'function',
|
|
664
|
+
id: toolCall.id,
|
|
665
|
+
function: {
|
|
666
|
+
name: toolCall.name,
|
|
667
|
+
arguments: toolCall.arguments
|
|
668
|
+
}
|
|
669
|
+
});
|
|
670
|
+
}
|
|
671
|
+
}
|
|
672
|
+
// Format output to match non-streaming version
|
|
673
|
+
const formattedOutput = contentBlocks.length > 0 ? [{
|
|
674
|
+
role: 'assistant',
|
|
675
|
+
content: contentBlocks
|
|
676
|
+
}] : [{
|
|
677
|
+
role: 'assistant',
|
|
678
|
+
content: [{
|
|
679
|
+
type: 'text',
|
|
680
|
+
text: ''
|
|
681
|
+
}]
|
|
682
|
+
}];
|
|
417
683
|
const latency = (Date.now() - startTime) / 1000;
|
|
418
684
|
const availableTools = extractAvailableToolCalls('openai', openAIParams);
|
|
419
685
|
await sendEventToPosthog({
|
|
@@ -422,11 +688,8 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
422
688
|
traceId,
|
|
423
689
|
model: openAIParams.model,
|
|
424
690
|
provider: 'openai',
|
|
425
|
-
input: openAIParams.messages,
|
|
426
|
-
output:
|
|
427
|
-
content: accumulatedContent,
|
|
428
|
-
role: 'assistant'
|
|
429
|
-
}],
|
|
691
|
+
input: sanitizeOpenAI(openAIParams.messages),
|
|
692
|
+
output: formattedOutput,
|
|
430
693
|
latency,
|
|
431
694
|
baseURL: this.baseURL ?? '',
|
|
432
695
|
params: body,
|
|
@@ -436,18 +699,19 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
436
699
|
captureImmediate: posthogCaptureImmediate
|
|
437
700
|
});
|
|
438
701
|
} catch (error) {
|
|
702
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
439
703
|
await sendEventToPosthog({
|
|
440
704
|
client: this.phClient,
|
|
441
705
|
distinctId: posthogDistinctId,
|
|
442
706
|
traceId,
|
|
443
707
|
model: openAIParams.model,
|
|
444
708
|
provider: 'openai',
|
|
445
|
-
input: openAIParams.messages,
|
|
709
|
+
input: sanitizeOpenAI(openAIParams.messages),
|
|
446
710
|
output: [],
|
|
447
711
|
latency: 0,
|
|
448
712
|
baseURL: this.baseURL ?? '',
|
|
449
713
|
params: body,
|
|
450
|
-
httpStatus
|
|
714
|
+
httpStatus,
|
|
451
715
|
usage: {
|
|
452
716
|
inputTokens: 0,
|
|
453
717
|
outputTokens: 0
|
|
@@ -474,7 +738,7 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
474
738
|
traceId,
|
|
475
739
|
model: openAIParams.model,
|
|
476
740
|
provider: 'openai',
|
|
477
|
-
input: openAIParams.messages,
|
|
741
|
+
input: sanitizeOpenAI(openAIParams.messages),
|
|
478
742
|
output: formatResponseOpenAI(result),
|
|
479
743
|
latency,
|
|
480
744
|
baseURL: this.baseURL ?? '',
|
|
@@ -492,18 +756,19 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
492
756
|
}
|
|
493
757
|
return result;
|
|
494
758
|
}, async error => {
|
|
759
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
495
760
|
await sendEventToPosthog({
|
|
496
761
|
client: this.phClient,
|
|
497
762
|
distinctId: posthogDistinctId,
|
|
498
763
|
traceId,
|
|
499
764
|
model: openAIParams.model,
|
|
500
765
|
provider: 'openai',
|
|
501
|
-
input: openAIParams.messages,
|
|
766
|
+
input: sanitizeOpenAI(openAIParams.messages),
|
|
502
767
|
output: [],
|
|
503
768
|
latency: 0,
|
|
504
769
|
baseURL: this.baseURL ?? '',
|
|
505
770
|
params: body,
|
|
506
|
-
httpStatus
|
|
771
|
+
httpStatus,
|
|
507
772
|
usage: {
|
|
508
773
|
inputTokens: 0,
|
|
509
774
|
outputTokens: 0
|
|
@@ -571,7 +836,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
571
836
|
//@ts-expect-error
|
|
572
837
|
model: openAIParams.model,
|
|
573
838
|
provider: 'openai',
|
|
574
|
-
input: openAIParams.input,
|
|
839
|
+
input: sanitizeOpenAIResponse(openAIParams.input),
|
|
575
840
|
output: finalContent,
|
|
576
841
|
latency,
|
|
577
842
|
baseURL: this.baseURL ?? '',
|
|
@@ -582,6 +847,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
582
847
|
captureImmediate: posthogCaptureImmediate
|
|
583
848
|
});
|
|
584
849
|
} catch (error) {
|
|
850
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
585
851
|
await sendEventToPosthog({
|
|
586
852
|
client: this.phClient,
|
|
587
853
|
distinctId: posthogDistinctId,
|
|
@@ -589,12 +855,12 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
589
855
|
//@ts-expect-error
|
|
590
856
|
model: openAIParams.model,
|
|
591
857
|
provider: 'openai',
|
|
592
|
-
input: openAIParams.input,
|
|
858
|
+
input: sanitizeOpenAIResponse(openAIParams.input),
|
|
593
859
|
output: [],
|
|
594
860
|
latency: 0,
|
|
595
861
|
baseURL: this.baseURL ?? '',
|
|
596
862
|
params: body,
|
|
597
|
-
httpStatus
|
|
863
|
+
httpStatus,
|
|
598
864
|
usage: {
|
|
599
865
|
inputTokens: 0,
|
|
600
866
|
outputTokens: 0
|
|
@@ -621,7 +887,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
621
887
|
//@ts-expect-error
|
|
622
888
|
model: openAIParams.model,
|
|
623
889
|
provider: 'openai',
|
|
624
|
-
input: openAIParams.input,
|
|
890
|
+
input: sanitizeOpenAIResponse(openAIParams.input),
|
|
625
891
|
output: formatResponseOpenAI({
|
|
626
892
|
output: result.output
|
|
627
893
|
}),
|
|
@@ -641,6 +907,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
641
907
|
}
|
|
642
908
|
return result;
|
|
643
909
|
}, async error => {
|
|
910
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
644
911
|
await sendEventToPosthog({
|
|
645
912
|
client: this.phClient,
|
|
646
913
|
distinctId: posthogDistinctId,
|
|
@@ -648,12 +915,12 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
648
915
|
//@ts-expect-error
|
|
649
916
|
model: openAIParams.model,
|
|
650
917
|
provider: 'openai',
|
|
651
|
-
input: openAIParams.input,
|
|
918
|
+
input: sanitizeOpenAIResponse(openAIParams.input),
|
|
652
919
|
output: [],
|
|
653
920
|
latency: 0,
|
|
654
921
|
baseURL: this.baseURL ?? '',
|
|
655
922
|
params: body,
|
|
656
|
-
httpStatus
|
|
923
|
+
httpStatus,
|
|
657
924
|
usage: {
|
|
658
925
|
inputTokens: 0,
|
|
659
926
|
outputTokens: 0
|
|
@@ -696,7 +963,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
696
963
|
//@ts-expect-error
|
|
697
964
|
model: openAIParams.model,
|
|
698
965
|
provider: 'openai',
|
|
699
|
-
input: openAIParams.input,
|
|
966
|
+
input: sanitizeOpenAIResponse(openAIParams.input),
|
|
700
967
|
output: result.output,
|
|
701
968
|
latency,
|
|
702
969
|
baseURL: this.baseURL ?? '',
|
|
@@ -712,6 +979,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
712
979
|
});
|
|
713
980
|
return result;
|
|
714
981
|
}, async error => {
|
|
982
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
715
983
|
await sendEventToPosthog({
|
|
716
984
|
client: this.phClient,
|
|
717
985
|
distinctId: posthogDistinctId,
|
|
@@ -719,12 +987,12 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
719
987
|
//@ts-expect-error
|
|
720
988
|
model: openAIParams.model,
|
|
721
989
|
provider: 'openai',
|
|
722
|
-
input: openAIParams.input,
|
|
990
|
+
input: sanitizeOpenAIResponse(openAIParams.input),
|
|
723
991
|
output: [],
|
|
724
992
|
latency: 0,
|
|
725
993
|
baseURL: this.baseURL ?? '',
|
|
726
994
|
params: body,
|
|
727
|
-
httpStatus
|
|
995
|
+
httpStatus,
|
|
728
996
|
usage: {
|
|
729
997
|
inputTokens: 0,
|
|
730
998
|
outputTokens: 0
|
|
@@ -786,14 +1054,52 @@ class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
786
1054
|
const [stream1, stream2] = value.tee();
|
|
787
1055
|
(async () => {
|
|
788
1056
|
try {
|
|
1057
|
+
const contentBlocks = [];
|
|
789
1058
|
let accumulatedContent = '';
|
|
790
1059
|
let usage = {
|
|
791
1060
|
inputTokens: 0,
|
|
792
1061
|
outputTokens: 0
|
|
793
1062
|
};
|
|
1063
|
+
// Map to track in-progress tool calls
|
|
1064
|
+
const toolCallsInProgress = new Map();
|
|
794
1065
|
for await (const chunk of stream1) {
|
|
795
|
-
const
|
|
796
|
-
|
|
1066
|
+
const choice = chunk?.choices?.[0];
|
|
1067
|
+
// Handle text content
|
|
1068
|
+
const deltaContent = choice?.delta?.content;
|
|
1069
|
+
if (deltaContent) {
|
|
1070
|
+
accumulatedContent += deltaContent;
|
|
1071
|
+
}
|
|
1072
|
+
// Handle tool calls
|
|
1073
|
+
const deltaToolCalls = choice?.delta?.tool_calls;
|
|
1074
|
+
if (deltaToolCalls && Array.isArray(deltaToolCalls)) {
|
|
1075
|
+
for (const toolCall of deltaToolCalls) {
|
|
1076
|
+
const index = toolCall.index;
|
|
1077
|
+
if (index !== undefined) {
|
|
1078
|
+
if (!toolCallsInProgress.has(index)) {
|
|
1079
|
+
// New tool call
|
|
1080
|
+
toolCallsInProgress.set(index, {
|
|
1081
|
+
id: toolCall.id || '',
|
|
1082
|
+
name: toolCall.function?.name || '',
|
|
1083
|
+
arguments: ''
|
|
1084
|
+
});
|
|
1085
|
+
}
|
|
1086
|
+
const inProgressCall = toolCallsInProgress.get(index);
|
|
1087
|
+
if (inProgressCall) {
|
|
1088
|
+
// Update tool call data
|
|
1089
|
+
if (toolCall.id) {
|
|
1090
|
+
inProgressCall.id = toolCall.id;
|
|
1091
|
+
}
|
|
1092
|
+
if (toolCall.function?.name) {
|
|
1093
|
+
inProgressCall.name = toolCall.function.name;
|
|
1094
|
+
}
|
|
1095
|
+
if (toolCall.function?.arguments) {
|
|
1096
|
+
inProgressCall.arguments += toolCall.function.arguments;
|
|
1097
|
+
}
|
|
1098
|
+
}
|
|
1099
|
+
}
|
|
1100
|
+
}
|
|
1101
|
+
}
|
|
1102
|
+
// Handle usage information
|
|
797
1103
|
if (chunk.usage) {
|
|
798
1104
|
usage = {
|
|
799
1105
|
inputTokens: chunk.usage.prompt_tokens ?? 0,
|
|
@@ -803,6 +1109,37 @@ class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
803
1109
|
};
|
|
804
1110
|
}
|
|
805
1111
|
}
|
|
1112
|
+
// Build final content blocks
|
|
1113
|
+
if (accumulatedContent) {
|
|
1114
|
+
contentBlocks.push({
|
|
1115
|
+
type: 'text',
|
|
1116
|
+
text: accumulatedContent
|
|
1117
|
+
});
|
|
1118
|
+
}
|
|
1119
|
+
// Add completed tool calls to content blocks
|
|
1120
|
+
for (const toolCall of toolCallsInProgress.values()) {
|
|
1121
|
+
if (toolCall.name) {
|
|
1122
|
+
contentBlocks.push({
|
|
1123
|
+
type: 'function',
|
|
1124
|
+
id: toolCall.id,
|
|
1125
|
+
function: {
|
|
1126
|
+
name: toolCall.name,
|
|
1127
|
+
arguments: toolCall.arguments
|
|
1128
|
+
}
|
|
1129
|
+
});
|
|
1130
|
+
}
|
|
1131
|
+
}
|
|
1132
|
+
// Format output to match non-streaming version
|
|
1133
|
+
const formattedOutput = contentBlocks.length > 0 ? [{
|
|
1134
|
+
role: 'assistant',
|
|
1135
|
+
content: contentBlocks
|
|
1136
|
+
}] : [{
|
|
1137
|
+
role: 'assistant',
|
|
1138
|
+
content: [{
|
|
1139
|
+
type: 'text',
|
|
1140
|
+
text: ''
|
|
1141
|
+
}]
|
|
1142
|
+
}];
|
|
806
1143
|
const latency = (Date.now() - startTime) / 1000;
|
|
807
1144
|
await sendEventToPosthog({
|
|
808
1145
|
client: this.phClient,
|
|
@@ -811,10 +1148,7 @@ class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
811
1148
|
model: openAIParams.model,
|
|
812
1149
|
provider: 'azure',
|
|
813
1150
|
input: openAIParams.messages,
|
|
814
|
-
output:
|
|
815
|
-
content: accumulatedContent,
|
|
816
|
-
role: 'assistant'
|
|
817
|
-
}],
|
|
1151
|
+
output: formattedOutput,
|
|
818
1152
|
latency,
|
|
819
1153
|
baseURL: this.baseURL ?? '',
|
|
820
1154
|
params: body,
|
|
@@ -823,6 +1157,7 @@ class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
823
1157
|
captureImmediate: posthogCaptureImmediate
|
|
824
1158
|
});
|
|
825
1159
|
} catch (error) {
|
|
1160
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
826
1161
|
await sendEventToPosthog({
|
|
827
1162
|
client: this.phClient,
|
|
828
1163
|
distinctId: posthogDistinctId,
|
|
@@ -834,7 +1169,7 @@ class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
834
1169
|
latency: 0,
|
|
835
1170
|
baseURL: this.baseURL ?? '',
|
|
836
1171
|
params: body,
|
|
837
|
-
httpStatus
|
|
1172
|
+
httpStatus,
|
|
838
1173
|
usage: {
|
|
839
1174
|
inputTokens: 0,
|
|
840
1175
|
outputTokens: 0
|
|
@@ -877,6 +1212,7 @@ class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
877
1212
|
}
|
|
878
1213
|
return result;
|
|
879
1214
|
}, async error => {
|
|
1215
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
880
1216
|
await sendEventToPosthog({
|
|
881
1217
|
client: this.phClient,
|
|
882
1218
|
distinctId: posthogDistinctId,
|
|
@@ -888,7 +1224,7 @@ class WrappedCompletions extends AzureOpenAI.Chat.Completions {
|
|
|
888
1224
|
latency: 0,
|
|
889
1225
|
baseURL: this.baseURL ?? '',
|
|
890
1226
|
params: body,
|
|
891
|
-
httpStatus
|
|
1227
|
+
httpStatus,
|
|
892
1228
|
usage: {
|
|
893
1229
|
inputTokens: 0,
|
|
894
1230
|
outputTokens: 0
|
|
@@ -965,6 +1301,7 @@ class WrappedResponses extends AzureOpenAI.Responses {
|
|
|
965
1301
|
captureImmediate: posthogCaptureImmediate
|
|
966
1302
|
});
|
|
967
1303
|
} catch (error) {
|
|
1304
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
968
1305
|
await sendEventToPosthog({
|
|
969
1306
|
client: this.phClient,
|
|
970
1307
|
distinctId: posthogDistinctId,
|
|
@@ -977,7 +1314,7 @@ class WrappedResponses extends AzureOpenAI.Responses {
|
|
|
977
1314
|
latency: 0,
|
|
978
1315
|
baseURL: this.baseURL ?? '',
|
|
979
1316
|
params: body,
|
|
980
|
-
httpStatus
|
|
1317
|
+
httpStatus,
|
|
981
1318
|
usage: {
|
|
982
1319
|
inputTokens: 0,
|
|
983
1320
|
outputTokens: 0
|
|
@@ -1020,6 +1357,7 @@ class WrappedResponses extends AzureOpenAI.Responses {
|
|
|
1020
1357
|
}
|
|
1021
1358
|
return result;
|
|
1022
1359
|
}, async error => {
|
|
1360
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
1023
1361
|
await sendEventToPosthog({
|
|
1024
1362
|
client: this.phClient,
|
|
1025
1363
|
distinctId: posthogDistinctId,
|
|
@@ -1032,7 +1370,7 @@ class WrappedResponses extends AzureOpenAI.Responses {
|
|
|
1032
1370
|
latency: 0,
|
|
1033
1371
|
baseURL: this.baseURL ?? '',
|
|
1034
1372
|
params: body,
|
|
1035
|
-
httpStatus
|
|
1373
|
+
httpStatus,
|
|
1036
1374
|
usage: {
|
|
1037
1375
|
inputTokens: 0,
|
|
1038
1376
|
outputTokens: 0
|
|
@@ -1143,9 +1481,20 @@ const mapVercelPrompt = messages => {
|
|
|
1143
1481
|
text: truncate(c.text)
|
|
1144
1482
|
};
|
|
1145
1483
|
} else if (c.type === 'file') {
|
|
1484
|
+
// For file type, check if it's a data URL and redact if needed
|
|
1485
|
+
let fileData;
|
|
1486
|
+
const contentData = c.data;
|
|
1487
|
+
if (contentData instanceof URL) {
|
|
1488
|
+
fileData = contentData.toString();
|
|
1489
|
+
} else if (isString(contentData)) {
|
|
1490
|
+
// Redact base64 data URLs and raw base64 to prevent oversized events
|
|
1491
|
+
fileData = redactBase64DataUrl(contentData);
|
|
1492
|
+
} else {
|
|
1493
|
+
fileData = 'raw files not supported';
|
|
1494
|
+
}
|
|
1146
1495
|
return {
|
|
1147
1496
|
type: 'file',
|
|
1148
|
-
file:
|
|
1497
|
+
file: fileData,
|
|
1149
1498
|
mediaType: c.mediaType
|
|
1150
1499
|
};
|
|
1151
1500
|
} else if (c.type === 'reasoning') {
|
|
@@ -1244,11 +1593,10 @@ const mapVercelOutput = result => {
|
|
|
1244
1593
|
if (item.data instanceof URL) {
|
|
1245
1594
|
fileData = item.data.toString();
|
|
1246
1595
|
} else if (typeof item.data === 'string') {
|
|
1247
|
-
|
|
1248
|
-
|
|
1596
|
+
fileData = redactBase64DataUrl(item.data);
|
|
1597
|
+
// If not redacted and still large, replace with size indicator
|
|
1598
|
+
if (fileData === item.data && item.data.length > 1000) {
|
|
1249
1599
|
fileData = `[${item.mediaType} file - ${item.data.length} bytes]`;
|
|
1250
|
-
} else {
|
|
1251
|
-
fileData = item.data;
|
|
1252
1600
|
}
|
|
1253
1601
|
} else {
|
|
1254
1602
|
fileData = `[binary ${item.mediaType} file]`;
|
|
@@ -1389,6 +1737,8 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
1389
1737
|
const provider = options.posthogProviderOverride ?? extractProvider(model);
|
|
1390
1738
|
const availableTools = extractAvailableToolCalls('vercel', params);
|
|
1391
1739
|
const baseURL = ''; // cannot currently get baseURL from vercel
|
|
1740
|
+
// Map to track in-progress tool calls
|
|
1741
|
+
const toolCallsInProgress = new Map();
|
|
1392
1742
|
try {
|
|
1393
1743
|
const {
|
|
1394
1744
|
stream,
|
|
@@ -1403,6 +1753,34 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
1403
1753
|
if (chunk.type === 'reasoning-delta') {
|
|
1404
1754
|
reasoningText += chunk.delta; // New in v5
|
|
1405
1755
|
}
|
|
1756
|
+
// Handle tool call chunks
|
|
1757
|
+
if (chunk.type === 'tool-input-start') {
|
|
1758
|
+
// Initialize a new tool call
|
|
1759
|
+
toolCallsInProgress.set(chunk.id, {
|
|
1760
|
+
toolCallId: chunk.id,
|
|
1761
|
+
toolName: chunk.toolName,
|
|
1762
|
+
input: ''
|
|
1763
|
+
});
|
|
1764
|
+
}
|
|
1765
|
+
if (chunk.type === 'tool-input-delta') {
|
|
1766
|
+
// Accumulate tool call arguments
|
|
1767
|
+
const toolCall = toolCallsInProgress.get(chunk.id);
|
|
1768
|
+
if (toolCall) {
|
|
1769
|
+
toolCall.input += chunk.delta;
|
|
1770
|
+
}
|
|
1771
|
+
}
|
|
1772
|
+
if (chunk.type === 'tool-input-end') {
|
|
1773
|
+
// Tool call is complete, keep it in the map for final processing
|
|
1774
|
+
// Nothing specific to do here, the tool call is already complete
|
|
1775
|
+
}
|
|
1776
|
+
if (chunk.type === 'tool-call') {
|
|
1777
|
+
// Direct tool call chunk (complete tool call)
|
|
1778
|
+
toolCallsInProgress.set(chunk.toolCallId, {
|
|
1779
|
+
toolCallId: chunk.toolCallId,
|
|
1780
|
+
toolName: chunk.toolName,
|
|
1781
|
+
input: chunk.input
|
|
1782
|
+
});
|
|
1783
|
+
}
|
|
1406
1784
|
if (chunk.type === 'finish') {
|
|
1407
1785
|
const providerMetadata = chunk.providerMetadata;
|
|
1408
1786
|
const additionalTokenValues = {
|
|
@@ -1436,6 +1814,19 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
1436
1814
|
text: truncate(generatedText)
|
|
1437
1815
|
});
|
|
1438
1816
|
}
|
|
1817
|
+
// Add completed tool calls to content
|
|
1818
|
+
for (const toolCall of toolCallsInProgress.values()) {
|
|
1819
|
+
if (toolCall.toolName) {
|
|
1820
|
+
content.push({
|
|
1821
|
+
type: 'tool-call',
|
|
1822
|
+
id: toolCall.toolCallId,
|
|
1823
|
+
function: {
|
|
1824
|
+
name: toolCall.toolName,
|
|
1825
|
+
arguments: toolCall.input
|
|
1826
|
+
}
|
|
1827
|
+
});
|
|
1828
|
+
}
|
|
1829
|
+
}
|
|
1439
1830
|
// Structure output like mapVercelOutput does
|
|
1440
1831
|
const output = content.length > 0 ? [{
|
|
1441
1832
|
role: 'assistant',
|
|
@@ -1538,6 +1929,9 @@ class WrappedMessages extends AnthropicOriginal.Messages {
|
|
|
1538
1929
|
if (anthropicParams.stream) {
|
|
1539
1930
|
return parentPromise.then(value => {
|
|
1540
1931
|
let accumulatedContent = '';
|
|
1932
|
+
const contentBlocks = [];
|
|
1933
|
+
const toolsInProgress = new Map();
|
|
1934
|
+
let currentTextBlock = null;
|
|
1541
1935
|
const usage = {
|
|
1542
1936
|
inputTokens: 0,
|
|
1543
1937
|
outputTokens: 0,
|
|
@@ -1549,10 +1943,70 @@ class WrappedMessages extends AnthropicOriginal.Messages {
|
|
|
1549
1943
|
(async () => {
|
|
1550
1944
|
try {
|
|
1551
1945
|
for await (const chunk of stream1) {
|
|
1946
|
+
// Handle content block start events
|
|
1947
|
+
if (chunk.type === 'content_block_start') {
|
|
1948
|
+
if (chunk.content_block?.type === 'text') {
|
|
1949
|
+
currentTextBlock = {
|
|
1950
|
+
type: 'text',
|
|
1951
|
+
text: ''
|
|
1952
|
+
};
|
|
1953
|
+
contentBlocks.push(currentTextBlock);
|
|
1954
|
+
} else if (chunk.content_block?.type === 'tool_use') {
|
|
1955
|
+
const toolBlock = {
|
|
1956
|
+
type: 'function',
|
|
1957
|
+
id: chunk.content_block.id,
|
|
1958
|
+
function: {
|
|
1959
|
+
name: chunk.content_block.name,
|
|
1960
|
+
arguments: {}
|
|
1961
|
+
}
|
|
1962
|
+
};
|
|
1963
|
+
contentBlocks.push(toolBlock);
|
|
1964
|
+
toolsInProgress.set(chunk.content_block.id, {
|
|
1965
|
+
block: toolBlock,
|
|
1966
|
+
inputString: ''
|
|
1967
|
+
});
|
|
1968
|
+
currentTextBlock = null;
|
|
1969
|
+
}
|
|
1970
|
+
}
|
|
1971
|
+
// Handle text delta events
|
|
1552
1972
|
if ('delta' in chunk) {
|
|
1553
1973
|
if ('text' in chunk.delta) {
|
|
1554
1974
|
const delta = chunk?.delta?.text ?? '';
|
|
1555
1975
|
accumulatedContent += delta;
|
|
1976
|
+
if (currentTextBlock) {
|
|
1977
|
+
currentTextBlock.text += delta;
|
|
1978
|
+
}
|
|
1979
|
+
}
|
|
1980
|
+
}
|
|
1981
|
+
// Handle tool input delta events
|
|
1982
|
+
if (chunk.type === 'content_block_delta' && chunk.delta?.type === 'input_json_delta') {
|
|
1983
|
+
const block = chunk.index !== undefined ? contentBlocks[chunk.index] : undefined;
|
|
1984
|
+
const toolId = block?.type === 'function' ? block.id : undefined;
|
|
1985
|
+
if (toolId && toolsInProgress.has(toolId)) {
|
|
1986
|
+
const tool = toolsInProgress.get(toolId);
|
|
1987
|
+
if (tool) {
|
|
1988
|
+
tool.inputString += chunk.delta.partial_json || '';
|
|
1989
|
+
}
|
|
1990
|
+
}
|
|
1991
|
+
}
|
|
1992
|
+
// Handle content block stop events
|
|
1993
|
+
if (chunk.type === 'content_block_stop') {
|
|
1994
|
+
currentTextBlock = null;
|
|
1995
|
+
// Parse accumulated tool input
|
|
1996
|
+
if (chunk.index !== undefined) {
|
|
1997
|
+
const block = contentBlocks[chunk.index];
|
|
1998
|
+
if (block?.type === 'function' && block.id && toolsInProgress.has(block.id)) {
|
|
1999
|
+
const tool = toolsInProgress.get(block.id);
|
|
2000
|
+
if (tool) {
|
|
2001
|
+
try {
|
|
2002
|
+
block.function.arguments = JSON.parse(tool.inputString);
|
|
2003
|
+
} catch (e) {
|
|
2004
|
+
// Keep empty object if parsing fails
|
|
2005
|
+
console.error('Error parsing tool input:', e);
|
|
2006
|
+
}
|
|
2007
|
+
}
|
|
2008
|
+
toolsInProgress.delete(block.id);
|
|
2009
|
+
}
|
|
1556
2010
|
}
|
|
1557
2011
|
}
|
|
1558
2012
|
if (chunk.type == 'message_start') {
|
|
@@ -1566,17 +2020,25 @@ class WrappedMessages extends AnthropicOriginal.Messages {
|
|
|
1566
2020
|
}
|
|
1567
2021
|
const latency = (Date.now() - startTime) / 1000;
|
|
1568
2022
|
const availableTools = extractAvailableToolCalls('anthropic', anthropicParams);
|
|
2023
|
+
// Format output to match non-streaming version
|
|
2024
|
+
const formattedOutput = contentBlocks.length > 0 ? [{
|
|
2025
|
+
role: 'assistant',
|
|
2026
|
+
content: contentBlocks
|
|
2027
|
+
}] : [{
|
|
2028
|
+
role: 'assistant',
|
|
2029
|
+
content: [{
|
|
2030
|
+
type: 'text',
|
|
2031
|
+
text: accumulatedContent
|
|
2032
|
+
}]
|
|
2033
|
+
}];
|
|
1569
2034
|
await sendEventToPosthog({
|
|
1570
2035
|
client: this.phClient,
|
|
1571
2036
|
distinctId: posthogDistinctId,
|
|
1572
2037
|
traceId,
|
|
1573
2038
|
model: anthropicParams.model,
|
|
1574
2039
|
provider: 'anthropic',
|
|
1575
|
-
input: mergeSystemPrompt(anthropicParams, 'anthropic'),
|
|
1576
|
-
output:
|
|
1577
|
-
content: accumulatedContent,
|
|
1578
|
-
role: 'assistant'
|
|
1579
|
-
}],
|
|
2040
|
+
input: sanitizeAnthropic(mergeSystemPrompt(anthropicParams, 'anthropic')),
|
|
2041
|
+
output: formattedOutput,
|
|
1580
2042
|
latency,
|
|
1581
2043
|
baseURL: this.baseURL ?? '',
|
|
1582
2044
|
params: body,
|
|
@@ -1593,7 +2055,7 @@ class WrappedMessages extends AnthropicOriginal.Messages {
|
|
|
1593
2055
|
traceId,
|
|
1594
2056
|
model: anthropicParams.model,
|
|
1595
2057
|
provider: 'anthropic',
|
|
1596
|
-
input: mergeSystemPrompt(anthropicParams),
|
|
2058
|
+
input: sanitizeAnthropic(mergeSystemPrompt(anthropicParams)),
|
|
1597
2059
|
output: [],
|
|
1598
2060
|
latency: 0,
|
|
1599
2061
|
baseURL: this.baseURL ?? '',
|
|
@@ -1625,7 +2087,7 @@ class WrappedMessages extends AnthropicOriginal.Messages {
|
|
|
1625
2087
|
traceId,
|
|
1626
2088
|
model: anthropicParams.model,
|
|
1627
2089
|
provider: 'anthropic',
|
|
1628
|
-
input: mergeSystemPrompt(anthropicParams),
|
|
2090
|
+
input: sanitizeAnthropic(mergeSystemPrompt(anthropicParams)),
|
|
1629
2091
|
output: formatResponseAnthropic(result),
|
|
1630
2092
|
latency,
|
|
1631
2093
|
baseURL: this.baseURL ?? '',
|
|
@@ -1649,7 +2111,7 @@ class WrappedMessages extends AnthropicOriginal.Messages {
|
|
|
1649
2111
|
traceId,
|
|
1650
2112
|
model: anthropicParams.model,
|
|
1651
2113
|
provider: 'anthropic',
|
|
1652
|
-
input: mergeSystemPrompt(anthropicParams),
|
|
2114
|
+
input: sanitizeAnthropic(mergeSystemPrompt(anthropicParams)),
|
|
1653
2115
|
output: [],
|
|
1654
2116
|
latency: 0,
|
|
1655
2117
|
baseURL: this.baseURL ?? '',
|
|
@@ -1701,23 +2163,24 @@ class WrappedModels {
|
|
|
1701
2163
|
const response = await this.client.models.generateContent(geminiParams);
|
|
1702
2164
|
const latency = (Date.now() - startTime) / 1000;
|
|
1703
2165
|
const availableTools = extractAvailableToolCalls('gemini', geminiParams);
|
|
2166
|
+
const metadata = response.usageMetadata;
|
|
1704
2167
|
await sendEventToPosthog({
|
|
1705
2168
|
client: this.phClient,
|
|
1706
2169
|
distinctId: posthogDistinctId,
|
|
1707
2170
|
traceId,
|
|
1708
2171
|
model: geminiParams.model,
|
|
1709
2172
|
provider: 'gemini',
|
|
1710
|
-
input: this.
|
|
2173
|
+
input: this.formatInputForPostHog(geminiParams.contents),
|
|
1711
2174
|
output: formatResponseGemini(response),
|
|
1712
2175
|
latency,
|
|
1713
2176
|
baseURL: 'https://generativelanguage.googleapis.com',
|
|
1714
2177
|
params: params,
|
|
1715
2178
|
httpStatus: 200,
|
|
1716
2179
|
usage: {
|
|
1717
|
-
inputTokens:
|
|
1718
|
-
outputTokens:
|
|
1719
|
-
reasoningTokens:
|
|
1720
|
-
cacheReadInputTokens:
|
|
2180
|
+
inputTokens: metadata?.promptTokenCount ?? 0,
|
|
2181
|
+
outputTokens: metadata?.candidatesTokenCount ?? 0,
|
|
2182
|
+
reasoningTokens: metadata?.thoughtsTokenCount ?? 0,
|
|
2183
|
+
cacheReadInputTokens: metadata?.cachedContentTokenCount ?? 0
|
|
1721
2184
|
},
|
|
1722
2185
|
tools: availableTools,
|
|
1723
2186
|
captureImmediate: posthogCaptureImmediate
|
|
@@ -1731,7 +2194,7 @@ class WrappedModels {
|
|
|
1731
2194
|
traceId,
|
|
1732
2195
|
model: geminiParams.model,
|
|
1733
2196
|
provider: 'gemini',
|
|
1734
|
-
input: this.
|
|
2197
|
+
input: this.formatInputForPostHog(geminiParams.contents),
|
|
1735
2198
|
output: [],
|
|
1736
2199
|
latency,
|
|
1737
2200
|
baseURL: 'https://generativelanguage.googleapis.com',
|
|
@@ -1759,7 +2222,7 @@ class WrappedModels {
|
|
|
1759
2222
|
} = params;
|
|
1760
2223
|
const traceId = posthogTraceId ?? v4();
|
|
1761
2224
|
const startTime = Date.now();
|
|
1762
|
-
|
|
2225
|
+
const accumulatedContent = [];
|
|
1763
2226
|
let usage = {
|
|
1764
2227
|
inputTokens: 0,
|
|
1765
2228
|
outputTokens: 0
|
|
@@ -1767,32 +2230,74 @@ class WrappedModels {
|
|
|
1767
2230
|
try {
|
|
1768
2231
|
const stream = await this.client.models.generateContentStream(geminiParams);
|
|
1769
2232
|
for await (const chunk of stream) {
|
|
2233
|
+
// Handle text content
|
|
1770
2234
|
if (chunk.text) {
|
|
1771
|
-
|
|
2235
|
+
// Find if we already have a text item to append to
|
|
2236
|
+
let lastTextItem;
|
|
2237
|
+
for (let i = accumulatedContent.length - 1; i >= 0; i--) {
|
|
2238
|
+
if (accumulatedContent[i].type === 'text') {
|
|
2239
|
+
lastTextItem = accumulatedContent[i];
|
|
2240
|
+
break;
|
|
2241
|
+
}
|
|
2242
|
+
}
|
|
2243
|
+
if (lastTextItem && lastTextItem.type === 'text') {
|
|
2244
|
+
lastTextItem.text += chunk.text;
|
|
2245
|
+
} else {
|
|
2246
|
+
accumulatedContent.push({
|
|
2247
|
+
type: 'text',
|
|
2248
|
+
text: chunk.text
|
|
2249
|
+
});
|
|
2250
|
+
}
|
|
1772
2251
|
}
|
|
2252
|
+
// Handle function calls from candidates
|
|
2253
|
+
if (chunk.candidates && Array.isArray(chunk.candidates)) {
|
|
2254
|
+
for (const candidate of chunk.candidates) {
|
|
2255
|
+
if (candidate.content && candidate.content.parts) {
|
|
2256
|
+
for (const part of candidate.content.parts) {
|
|
2257
|
+
// Type-safe check for functionCall
|
|
2258
|
+
if ('functionCall' in part) {
|
|
2259
|
+
const funcCall = part.functionCall;
|
|
2260
|
+
if (funcCall?.name) {
|
|
2261
|
+
accumulatedContent.push({
|
|
2262
|
+
type: 'function',
|
|
2263
|
+
function: {
|
|
2264
|
+
name: funcCall.name,
|
|
2265
|
+
arguments: funcCall.args || {}
|
|
2266
|
+
}
|
|
2267
|
+
});
|
|
2268
|
+
}
|
|
2269
|
+
}
|
|
2270
|
+
}
|
|
2271
|
+
}
|
|
2272
|
+
}
|
|
2273
|
+
}
|
|
2274
|
+
// Update usage metadata - handle both old and new field names
|
|
1773
2275
|
if (chunk.usageMetadata) {
|
|
2276
|
+
const metadata = chunk.usageMetadata;
|
|
1774
2277
|
usage = {
|
|
1775
|
-
inputTokens:
|
|
1776
|
-
outputTokens:
|
|
1777
|
-
reasoningTokens:
|
|
1778
|
-
cacheReadInputTokens:
|
|
2278
|
+
inputTokens: metadata.promptTokenCount ?? 0,
|
|
2279
|
+
outputTokens: metadata.candidatesTokenCount ?? 0,
|
|
2280
|
+
reasoningTokens: metadata.thoughtsTokenCount ?? 0,
|
|
2281
|
+
cacheReadInputTokens: metadata.cachedContentTokenCount ?? 0
|
|
1779
2282
|
};
|
|
1780
2283
|
}
|
|
1781
2284
|
yield chunk;
|
|
1782
2285
|
}
|
|
1783
2286
|
const latency = (Date.now() - startTime) / 1000;
|
|
1784
2287
|
const availableTools = extractAvailableToolCalls('gemini', geminiParams);
|
|
2288
|
+
// Format output similar to formatResponseGemini
|
|
2289
|
+
const output = accumulatedContent.length > 0 ? [{
|
|
2290
|
+
role: 'assistant',
|
|
2291
|
+
content: accumulatedContent
|
|
2292
|
+
}] : [];
|
|
1785
2293
|
await sendEventToPosthog({
|
|
1786
2294
|
client: this.phClient,
|
|
1787
2295
|
distinctId: posthogDistinctId,
|
|
1788
2296
|
traceId,
|
|
1789
2297
|
model: geminiParams.model,
|
|
1790
2298
|
provider: 'gemini',
|
|
1791
|
-
input: this.
|
|
1792
|
-
output
|
|
1793
|
-
content: accumulatedContent,
|
|
1794
|
-
role: 'assistant'
|
|
1795
|
-
}],
|
|
2299
|
+
input: this.formatInputForPostHog(geminiParams.contents),
|
|
2300
|
+
output,
|
|
1796
2301
|
latency,
|
|
1797
2302
|
baseURL: 'https://generativelanguage.googleapis.com',
|
|
1798
2303
|
params: params,
|
|
@@ -1809,7 +2314,7 @@ class WrappedModels {
|
|
|
1809
2314
|
traceId,
|
|
1810
2315
|
model: geminiParams.model,
|
|
1811
2316
|
provider: 'gemini',
|
|
1812
|
-
input: this.
|
|
2317
|
+
input: this.formatInputForPostHog(geminiParams.contents),
|
|
1813
2318
|
output: [],
|
|
1814
2319
|
latency,
|
|
1815
2320
|
baseURL: 'https://generativelanguage.googleapis.com',
|
|
@@ -1842,16 +2347,28 @@ class WrappedModels {
|
|
|
1842
2347
|
};
|
|
1843
2348
|
}
|
|
1844
2349
|
if (item && typeof item === 'object') {
|
|
1845
|
-
|
|
2350
|
+
const obj = item;
|
|
2351
|
+
if ('text' in obj && obj.text) {
|
|
2352
|
+
return {
|
|
2353
|
+
role: obj.role || 'user',
|
|
2354
|
+
content: obj.text
|
|
2355
|
+
};
|
|
2356
|
+
}
|
|
2357
|
+
if ('content' in obj && obj.content) {
|
|
1846
2358
|
return {
|
|
1847
|
-
role:
|
|
1848
|
-
content:
|
|
2359
|
+
role: obj.role || 'user',
|
|
2360
|
+
content: obj.content
|
|
1849
2361
|
};
|
|
1850
2362
|
}
|
|
1851
|
-
if (
|
|
2363
|
+
if ('parts' in obj && Array.isArray(obj.parts)) {
|
|
1852
2364
|
return {
|
|
1853
|
-
role:
|
|
1854
|
-
content:
|
|
2365
|
+
role: obj.role || 'user',
|
|
2366
|
+
content: obj.parts.map(part => {
|
|
2367
|
+
if (part && typeof part === 'object' && 'text' in part) {
|
|
2368
|
+
return part.text;
|
|
2369
|
+
}
|
|
2370
|
+
return part;
|
|
2371
|
+
})
|
|
1855
2372
|
};
|
|
1856
2373
|
}
|
|
1857
2374
|
}
|
|
@@ -1862,16 +2379,17 @@ class WrappedModels {
|
|
|
1862
2379
|
});
|
|
1863
2380
|
}
|
|
1864
2381
|
if (contents && typeof contents === 'object') {
|
|
1865
|
-
|
|
2382
|
+
const obj = contents;
|
|
2383
|
+
if ('text' in obj && obj.text) {
|
|
1866
2384
|
return [{
|
|
1867
2385
|
role: 'user',
|
|
1868
|
-
content:
|
|
2386
|
+
content: obj.text
|
|
1869
2387
|
}];
|
|
1870
2388
|
}
|
|
1871
|
-
if (
|
|
2389
|
+
if ('content' in obj && obj.content) {
|
|
1872
2390
|
return [{
|
|
1873
2391
|
role: 'user',
|
|
1874
|
-
content:
|
|
2392
|
+
content: obj.content
|
|
1875
2393
|
}];
|
|
1876
2394
|
}
|
|
1877
2395
|
}
|
|
@@ -1880,6 +2398,10 @@ class WrappedModels {
|
|
|
1880
2398
|
content: String(contents)
|
|
1881
2399
|
}];
|
|
1882
2400
|
}
|
|
2401
|
+
formatInputForPostHog(contents) {
|
|
2402
|
+
const sanitized = sanitizeGemini(contents);
|
|
2403
|
+
return this.formatInput(sanitized);
|
|
2404
|
+
}
|
|
1883
2405
|
}
|
|
1884
2406
|
|
|
1885
2407
|
function getDefaultExportFromCjs (x) {
|
|
@@ -2571,7 +3093,7 @@ class LangChainCallbackHandler extends BaseCallbackHandler {
|
|
|
2571
3093
|
}) || 'generation';
|
|
2572
3094
|
const generation = {
|
|
2573
3095
|
name: runNameFound,
|
|
2574
|
-
input: messages,
|
|
3096
|
+
input: sanitizeLangChain(messages),
|
|
2575
3097
|
startTime: Date.now()
|
|
2576
3098
|
};
|
|
2577
3099
|
if (extraParams) {
|
|
@@ -2834,7 +3356,8 @@ class LangChainCallbackHandler extends BaseCallbackHandler {
|
|
|
2834
3356
|
...message.additional_kwargs
|
|
2835
3357
|
};
|
|
2836
3358
|
}
|
|
2837
|
-
|
|
3359
|
+
// Sanitize the message content to redact base64 images
|
|
3360
|
+
return sanitizeLangChain(messageDict);
|
|
2838
3361
|
}
|
|
2839
3362
|
_parseUsageModel(usage) {
|
|
2840
3363
|
const conversionList = [['promptTokens', 'input'], ['completionTokens', 'output'], ['input_tokens', 'input'], ['output_tokens', 'output'], ['prompt_token_count', 'input'], ['candidates_token_count', 'output'], ['inputTokenCount', 'input'], ['outputTokenCount', 'output'], ['input_token_count', 'input'], ['generated_token_count', 'output']];
|