@posthog/ai 6.1.0 → 6.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/anthropic/index.cjs +61 -4
- package/dist/anthropic/index.cjs.map +1 -1
- package/dist/anthropic/index.mjs +61 -4
- package/dist/anthropic/index.mjs.map +1 -1
- package/dist/gemini/index.cjs +59 -4
- package/dist/gemini/index.cjs.map +1 -1
- package/dist/gemini/index.d.ts +1 -0
- package/dist/gemini/index.mjs +59 -4
- package/dist/gemini/index.mjs.map +1 -1
- package/dist/index.cjs +244 -25
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +1 -0
- package/dist/index.mjs +244 -25
- package/dist/index.mjs.map +1 -1
- package/dist/langchain/index.cjs +132 -2
- package/dist/langchain/index.cjs.map +1 -1
- package/dist/langchain/index.mjs +132 -2
- package/dist/langchain/index.mjs.map +1 -1
- package/dist/openai/index.cjs +133 -10
- package/dist/openai/index.cjs.map +1 -1
- package/dist/openai/index.mjs +133 -10
- package/dist/openai/index.mjs.map +1 -1
- package/dist/vercel/index.cjs +65 -5
- package/dist/vercel/index.cjs.map +1 -1
- package/dist/vercel/index.mjs +65 -5
- package/dist/vercel/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/index.mjs
CHANGED
|
@@ -350,6 +350,204 @@ const sendEventToPosthog = async ({
|
|
|
350
350
|
}
|
|
351
351
|
};
|
|
352
352
|
|
|
353
|
+
// Type guards for safer type checking
|
|
354
|
+
const isString = value => {
|
|
355
|
+
return typeof value === 'string';
|
|
356
|
+
};
|
|
357
|
+
const isObject = value => {
|
|
358
|
+
return value !== null && typeof value === 'object' && !Array.isArray(value);
|
|
359
|
+
};
|
|
360
|
+
|
|
361
|
+
const REDACTED_IMAGE_PLACEHOLDER = '[base64 image redacted]';
|
|
362
|
+
// ============================================
|
|
363
|
+
// Base64 Detection Helpers
|
|
364
|
+
// ============================================
|
|
365
|
+
const isBase64DataUrl = str => {
|
|
366
|
+
return /^data:([^;]+);base64,/.test(str);
|
|
367
|
+
};
|
|
368
|
+
const isValidUrl = str => {
|
|
369
|
+
try {
|
|
370
|
+
new URL(str);
|
|
371
|
+
return true;
|
|
372
|
+
} catch {
|
|
373
|
+
// Not an absolute URL, check if it's a relative URL or path
|
|
374
|
+
return str.startsWith('/') || str.startsWith('./') || str.startsWith('../');
|
|
375
|
+
}
|
|
376
|
+
};
|
|
377
|
+
const isRawBase64 = str => {
|
|
378
|
+
// Skip if it's a valid URL or path
|
|
379
|
+
if (isValidUrl(str)) {
|
|
380
|
+
return false;
|
|
381
|
+
}
|
|
382
|
+
// Check if it's a valid base64 string
|
|
383
|
+
// Base64 images are typically at least a few hundred chars, but we'll be conservative
|
|
384
|
+
return str.length > 20 && /^[A-Za-z0-9+/]+=*$/.test(str);
|
|
385
|
+
};
|
|
386
|
+
function redactBase64DataUrl(str) {
|
|
387
|
+
if (!isString(str)) return str;
|
|
388
|
+
// Check for data URL format
|
|
389
|
+
if (isBase64DataUrl(str)) {
|
|
390
|
+
return REDACTED_IMAGE_PLACEHOLDER;
|
|
391
|
+
}
|
|
392
|
+
// Check for raw base64 (Vercel sends raw base64 for inline images)
|
|
393
|
+
if (isRawBase64(str)) {
|
|
394
|
+
return REDACTED_IMAGE_PLACEHOLDER;
|
|
395
|
+
}
|
|
396
|
+
return str;
|
|
397
|
+
}
|
|
398
|
+
const processMessages = (messages, transformContent) => {
|
|
399
|
+
if (!messages) return messages;
|
|
400
|
+
const processContent = content => {
|
|
401
|
+
if (typeof content === 'string') return content;
|
|
402
|
+
if (!content) return content;
|
|
403
|
+
if (Array.isArray(content)) {
|
|
404
|
+
return content.map(transformContent);
|
|
405
|
+
}
|
|
406
|
+
// Handle single object content
|
|
407
|
+
return transformContent(content);
|
|
408
|
+
};
|
|
409
|
+
const processMessage = msg => {
|
|
410
|
+
if (!isObject(msg) || !('content' in msg)) return msg;
|
|
411
|
+
return {
|
|
412
|
+
...msg,
|
|
413
|
+
content: processContent(msg.content)
|
|
414
|
+
};
|
|
415
|
+
};
|
|
416
|
+
// Handle both arrays and single messages
|
|
417
|
+
if (Array.isArray(messages)) {
|
|
418
|
+
return messages.map(processMessage);
|
|
419
|
+
}
|
|
420
|
+
return processMessage(messages);
|
|
421
|
+
};
|
|
422
|
+
// ============================================
|
|
423
|
+
// Provider-Specific Image Sanitizers
|
|
424
|
+
// ============================================
|
|
425
|
+
const sanitizeOpenAIImage = item => {
|
|
426
|
+
if (!isObject(item)) return item;
|
|
427
|
+
// Handle image_url format
|
|
428
|
+
if (item.type === 'image_url' && 'image_url' in item && isObject(item.image_url) && 'url' in item.image_url) {
|
|
429
|
+
return {
|
|
430
|
+
...item,
|
|
431
|
+
image_url: {
|
|
432
|
+
...item.image_url,
|
|
433
|
+
url: redactBase64DataUrl(item.image_url.url)
|
|
434
|
+
}
|
|
435
|
+
};
|
|
436
|
+
}
|
|
437
|
+
return item;
|
|
438
|
+
};
|
|
439
|
+
const sanitizeOpenAIResponseImage = item => {
|
|
440
|
+
if (!isObject(item)) return item;
|
|
441
|
+
// Handle input_image format
|
|
442
|
+
if (item.type === 'input_image' && 'image_url' in item) {
|
|
443
|
+
return {
|
|
444
|
+
...item,
|
|
445
|
+
image_url: redactBase64DataUrl(item.image_url)
|
|
446
|
+
};
|
|
447
|
+
}
|
|
448
|
+
return item;
|
|
449
|
+
};
|
|
450
|
+
const sanitizeAnthropicImage = item => {
|
|
451
|
+
if (!isObject(item)) return item;
|
|
452
|
+
// Handle Anthropic's image format
|
|
453
|
+
if (item.type === 'image' && 'source' in item && isObject(item.source) && item.source.type === 'base64' && 'data' in item.source) {
|
|
454
|
+
return {
|
|
455
|
+
...item,
|
|
456
|
+
source: {
|
|
457
|
+
...item.source,
|
|
458
|
+
data: REDACTED_IMAGE_PLACEHOLDER
|
|
459
|
+
}
|
|
460
|
+
};
|
|
461
|
+
}
|
|
462
|
+
return item;
|
|
463
|
+
};
|
|
464
|
+
const sanitizeGeminiPart = part => {
|
|
465
|
+
if (!isObject(part)) return part;
|
|
466
|
+
// Handle Gemini's inline data format
|
|
467
|
+
if ('inlineData' in part && isObject(part.inlineData) && 'data' in part.inlineData) {
|
|
468
|
+
return {
|
|
469
|
+
...part,
|
|
470
|
+
inlineData: {
|
|
471
|
+
...part.inlineData,
|
|
472
|
+
data: REDACTED_IMAGE_PLACEHOLDER
|
|
473
|
+
}
|
|
474
|
+
};
|
|
475
|
+
}
|
|
476
|
+
return part;
|
|
477
|
+
};
|
|
478
|
+
const processGeminiItem = item => {
|
|
479
|
+
if (!isObject(item)) return item;
|
|
480
|
+
// If it has parts, process them
|
|
481
|
+
if ('parts' in item && item.parts) {
|
|
482
|
+
const parts = Array.isArray(item.parts) ? item.parts.map(sanitizeGeminiPart) : sanitizeGeminiPart(item.parts);
|
|
483
|
+
return {
|
|
484
|
+
...item,
|
|
485
|
+
parts
|
|
486
|
+
};
|
|
487
|
+
}
|
|
488
|
+
return item;
|
|
489
|
+
};
|
|
490
|
+
const sanitizeLangChainImage = item => {
|
|
491
|
+
if (!isObject(item)) return item;
|
|
492
|
+
// OpenAI style
|
|
493
|
+
if (item.type === 'image_url' && 'image_url' in item && isObject(item.image_url) && 'url' in item.image_url) {
|
|
494
|
+
return {
|
|
495
|
+
...item,
|
|
496
|
+
image_url: {
|
|
497
|
+
...item.image_url,
|
|
498
|
+
url: redactBase64DataUrl(item.image_url.url)
|
|
499
|
+
}
|
|
500
|
+
};
|
|
501
|
+
}
|
|
502
|
+
// Direct image with data field
|
|
503
|
+
if (item.type === 'image' && 'data' in item) {
|
|
504
|
+
return {
|
|
505
|
+
...item,
|
|
506
|
+
data: redactBase64DataUrl(item.data)
|
|
507
|
+
};
|
|
508
|
+
}
|
|
509
|
+
// Anthropic style
|
|
510
|
+
if (item.type === 'image' && 'source' in item && isObject(item.source) && 'data' in item.source) {
|
|
511
|
+
return {
|
|
512
|
+
...item,
|
|
513
|
+
source: {
|
|
514
|
+
...item.source,
|
|
515
|
+
data: redactBase64DataUrl(item.source.data)
|
|
516
|
+
}
|
|
517
|
+
};
|
|
518
|
+
}
|
|
519
|
+
// Google style
|
|
520
|
+
if (item.type === 'media' && 'data' in item) {
|
|
521
|
+
return {
|
|
522
|
+
...item,
|
|
523
|
+
data: redactBase64DataUrl(item.data)
|
|
524
|
+
};
|
|
525
|
+
}
|
|
526
|
+
return item;
|
|
527
|
+
};
|
|
528
|
+
// Export individual sanitizers for tree-shaking
|
|
529
|
+
const sanitizeOpenAI = data => {
|
|
530
|
+
return processMessages(data, sanitizeOpenAIImage);
|
|
531
|
+
};
|
|
532
|
+
const sanitizeOpenAIResponse = data => {
|
|
533
|
+
return processMessages(data, sanitizeOpenAIResponseImage);
|
|
534
|
+
};
|
|
535
|
+
const sanitizeAnthropic = data => {
|
|
536
|
+
return processMessages(data, sanitizeAnthropicImage);
|
|
537
|
+
};
|
|
538
|
+
const sanitizeGemini = data => {
|
|
539
|
+
// Gemini has a different structure with 'parts' directly on items instead of 'content'
|
|
540
|
+
// So we need custom processing instead of using processMessages
|
|
541
|
+
if (!data) return data;
|
|
542
|
+
if (Array.isArray(data)) {
|
|
543
|
+
return data.map(processGeminiItem);
|
|
544
|
+
}
|
|
545
|
+
return processGeminiItem(data);
|
|
546
|
+
};
|
|
547
|
+
const sanitizeLangChain = data => {
|
|
548
|
+
return processMessages(data, sanitizeLangChainImage);
|
|
549
|
+
};
|
|
550
|
+
|
|
353
551
|
const Chat = OpenAI.Chat;
|
|
354
552
|
const Completions = Chat.Completions;
|
|
355
553
|
const Responses = OpenAI.Responses;
|
|
@@ -422,7 +620,7 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
422
620
|
traceId,
|
|
423
621
|
model: openAIParams.model,
|
|
424
622
|
provider: 'openai',
|
|
425
|
-
input: openAIParams.messages,
|
|
623
|
+
input: sanitizeOpenAI(openAIParams.messages),
|
|
426
624
|
output: [{
|
|
427
625
|
content: accumulatedContent,
|
|
428
626
|
role: 'assistant'
|
|
@@ -442,7 +640,7 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
442
640
|
traceId,
|
|
443
641
|
model: openAIParams.model,
|
|
444
642
|
provider: 'openai',
|
|
445
|
-
input: openAIParams.messages,
|
|
643
|
+
input: sanitizeOpenAI(openAIParams.messages),
|
|
446
644
|
output: [],
|
|
447
645
|
latency: 0,
|
|
448
646
|
baseURL: this.baseURL ?? '',
|
|
@@ -474,7 +672,7 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
474
672
|
traceId,
|
|
475
673
|
model: openAIParams.model,
|
|
476
674
|
provider: 'openai',
|
|
477
|
-
input: openAIParams.messages,
|
|
675
|
+
input: sanitizeOpenAI(openAIParams.messages),
|
|
478
676
|
output: formatResponseOpenAI(result),
|
|
479
677
|
latency,
|
|
480
678
|
baseURL: this.baseURL ?? '',
|
|
@@ -498,7 +696,7 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
498
696
|
traceId,
|
|
499
697
|
model: openAIParams.model,
|
|
500
698
|
provider: 'openai',
|
|
501
|
-
input: openAIParams.messages,
|
|
699
|
+
input: sanitizeOpenAI(openAIParams.messages),
|
|
502
700
|
output: [],
|
|
503
701
|
latency: 0,
|
|
504
702
|
baseURL: this.baseURL ?? '',
|
|
@@ -571,7 +769,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
571
769
|
//@ts-expect-error
|
|
572
770
|
model: openAIParams.model,
|
|
573
771
|
provider: 'openai',
|
|
574
|
-
input: openAIParams.input,
|
|
772
|
+
input: sanitizeOpenAIResponse(openAIParams.input),
|
|
575
773
|
output: finalContent,
|
|
576
774
|
latency,
|
|
577
775
|
baseURL: this.baseURL ?? '',
|
|
@@ -589,7 +787,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
589
787
|
//@ts-expect-error
|
|
590
788
|
model: openAIParams.model,
|
|
591
789
|
provider: 'openai',
|
|
592
|
-
input: openAIParams.input,
|
|
790
|
+
input: sanitizeOpenAIResponse(openAIParams.input),
|
|
593
791
|
output: [],
|
|
594
792
|
latency: 0,
|
|
595
793
|
baseURL: this.baseURL ?? '',
|
|
@@ -621,7 +819,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
621
819
|
//@ts-expect-error
|
|
622
820
|
model: openAIParams.model,
|
|
623
821
|
provider: 'openai',
|
|
624
|
-
input: openAIParams.input,
|
|
822
|
+
input: sanitizeOpenAIResponse(openAIParams.input),
|
|
625
823
|
output: formatResponseOpenAI({
|
|
626
824
|
output: result.output
|
|
627
825
|
}),
|
|
@@ -648,7 +846,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
648
846
|
//@ts-expect-error
|
|
649
847
|
model: openAIParams.model,
|
|
650
848
|
provider: 'openai',
|
|
651
|
-
input: openAIParams.input,
|
|
849
|
+
input: sanitizeOpenAIResponse(openAIParams.input),
|
|
652
850
|
output: [],
|
|
653
851
|
latency: 0,
|
|
654
852
|
baseURL: this.baseURL ?? '',
|
|
@@ -696,7 +894,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
696
894
|
//@ts-expect-error
|
|
697
895
|
model: openAIParams.model,
|
|
698
896
|
provider: 'openai',
|
|
699
|
-
input: openAIParams.input,
|
|
897
|
+
input: sanitizeOpenAIResponse(openAIParams.input),
|
|
700
898
|
output: result.output,
|
|
701
899
|
latency,
|
|
702
900
|
baseURL: this.baseURL ?? '',
|
|
@@ -719,7 +917,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
719
917
|
//@ts-expect-error
|
|
720
918
|
model: openAIParams.model,
|
|
721
919
|
provider: 'openai',
|
|
722
|
-
input: openAIParams.input,
|
|
920
|
+
input: sanitizeOpenAIResponse(openAIParams.input),
|
|
723
921
|
output: [],
|
|
724
922
|
latency: 0,
|
|
725
923
|
baseURL: this.baseURL ?? '',
|
|
@@ -1143,9 +1341,20 @@ const mapVercelPrompt = messages => {
|
|
|
1143
1341
|
text: truncate(c.text)
|
|
1144
1342
|
};
|
|
1145
1343
|
} else if (c.type === 'file') {
|
|
1344
|
+
// For file type, check if it's a data URL and redact if needed
|
|
1345
|
+
let fileData;
|
|
1346
|
+
const contentData = c.data;
|
|
1347
|
+
if (contentData instanceof URL) {
|
|
1348
|
+
fileData = contentData.toString();
|
|
1349
|
+
} else if (isString(contentData)) {
|
|
1350
|
+
// Redact base64 data URLs and raw base64 to prevent oversized events
|
|
1351
|
+
fileData = redactBase64DataUrl(contentData);
|
|
1352
|
+
} else {
|
|
1353
|
+
fileData = 'raw files not supported';
|
|
1354
|
+
}
|
|
1146
1355
|
return {
|
|
1147
1356
|
type: 'file',
|
|
1148
|
-
file:
|
|
1357
|
+
file: fileData,
|
|
1149
1358
|
mediaType: c.mediaType
|
|
1150
1359
|
};
|
|
1151
1360
|
} else if (c.type === 'reasoning') {
|
|
@@ -1244,11 +1453,10 @@ const mapVercelOutput = result => {
|
|
|
1244
1453
|
if (item.data instanceof URL) {
|
|
1245
1454
|
fileData = item.data.toString();
|
|
1246
1455
|
} else if (typeof item.data === 'string') {
|
|
1247
|
-
|
|
1248
|
-
|
|
1456
|
+
fileData = redactBase64DataUrl(item.data);
|
|
1457
|
+
// If not redacted and still large, replace with size indicator
|
|
1458
|
+
if (fileData === item.data && item.data.length > 1000) {
|
|
1249
1459
|
fileData = `[${item.mediaType} file - ${item.data.length} bytes]`;
|
|
1250
|
-
} else {
|
|
1251
|
-
fileData = item.data;
|
|
1252
1460
|
}
|
|
1253
1461
|
} else {
|
|
1254
1462
|
fileData = `[binary ${item.mediaType} file]`;
|
|
@@ -1572,7 +1780,7 @@ class WrappedMessages extends AnthropicOriginal.Messages {
|
|
|
1572
1780
|
traceId,
|
|
1573
1781
|
model: anthropicParams.model,
|
|
1574
1782
|
provider: 'anthropic',
|
|
1575
|
-
input: mergeSystemPrompt(anthropicParams, 'anthropic'),
|
|
1783
|
+
input: sanitizeAnthropic(mergeSystemPrompt(anthropicParams, 'anthropic')),
|
|
1576
1784
|
output: [{
|
|
1577
1785
|
content: accumulatedContent,
|
|
1578
1786
|
role: 'assistant'
|
|
@@ -1593,7 +1801,7 @@ class WrappedMessages extends AnthropicOriginal.Messages {
|
|
|
1593
1801
|
traceId,
|
|
1594
1802
|
model: anthropicParams.model,
|
|
1595
1803
|
provider: 'anthropic',
|
|
1596
|
-
input: mergeSystemPrompt(anthropicParams),
|
|
1804
|
+
input: sanitizeAnthropic(mergeSystemPrompt(anthropicParams)),
|
|
1597
1805
|
output: [],
|
|
1598
1806
|
latency: 0,
|
|
1599
1807
|
baseURL: this.baseURL ?? '',
|
|
@@ -1625,7 +1833,7 @@ class WrappedMessages extends AnthropicOriginal.Messages {
|
|
|
1625
1833
|
traceId,
|
|
1626
1834
|
model: anthropicParams.model,
|
|
1627
1835
|
provider: 'anthropic',
|
|
1628
|
-
input: mergeSystemPrompt(anthropicParams),
|
|
1836
|
+
input: sanitizeAnthropic(mergeSystemPrompt(anthropicParams)),
|
|
1629
1837
|
output: formatResponseAnthropic(result),
|
|
1630
1838
|
latency,
|
|
1631
1839
|
baseURL: this.baseURL ?? '',
|
|
@@ -1649,7 +1857,7 @@ class WrappedMessages extends AnthropicOriginal.Messages {
|
|
|
1649
1857
|
traceId,
|
|
1650
1858
|
model: anthropicParams.model,
|
|
1651
1859
|
provider: 'anthropic',
|
|
1652
|
-
input: mergeSystemPrompt(anthropicParams),
|
|
1860
|
+
input: sanitizeAnthropic(mergeSystemPrompt(anthropicParams)),
|
|
1653
1861
|
output: [],
|
|
1654
1862
|
latency: 0,
|
|
1655
1863
|
baseURL: this.baseURL ?? '',
|
|
@@ -1707,7 +1915,7 @@ class WrappedModels {
|
|
|
1707
1915
|
traceId,
|
|
1708
1916
|
model: geminiParams.model,
|
|
1709
1917
|
provider: 'gemini',
|
|
1710
|
-
input: this.
|
|
1918
|
+
input: this.formatInputForPostHog(geminiParams.contents),
|
|
1711
1919
|
output: formatResponseGemini(response),
|
|
1712
1920
|
latency,
|
|
1713
1921
|
baseURL: 'https://generativelanguage.googleapis.com',
|
|
@@ -1731,7 +1939,7 @@ class WrappedModels {
|
|
|
1731
1939
|
traceId,
|
|
1732
1940
|
model: geminiParams.model,
|
|
1733
1941
|
provider: 'gemini',
|
|
1734
|
-
input: this.
|
|
1942
|
+
input: this.formatInputForPostHog(geminiParams.contents),
|
|
1735
1943
|
output: [],
|
|
1736
1944
|
latency,
|
|
1737
1945
|
baseURL: 'https://generativelanguage.googleapis.com',
|
|
@@ -1788,7 +1996,7 @@ class WrappedModels {
|
|
|
1788
1996
|
traceId,
|
|
1789
1997
|
model: geminiParams.model,
|
|
1790
1998
|
provider: 'gemini',
|
|
1791
|
-
input: this.
|
|
1999
|
+
input: this.formatInputForPostHog(geminiParams.contents),
|
|
1792
2000
|
output: [{
|
|
1793
2001
|
content: accumulatedContent,
|
|
1794
2002
|
role: 'assistant'
|
|
@@ -1809,7 +2017,7 @@ class WrappedModels {
|
|
|
1809
2017
|
traceId,
|
|
1810
2018
|
model: geminiParams.model,
|
|
1811
2019
|
provider: 'gemini',
|
|
1812
|
-
input: this.
|
|
2020
|
+
input: this.formatInputForPostHog(geminiParams.contents),
|
|
1813
2021
|
output: [],
|
|
1814
2022
|
latency,
|
|
1815
2023
|
baseURL: 'https://generativelanguage.googleapis.com',
|
|
@@ -1854,6 +2062,12 @@ class WrappedModels {
|
|
|
1854
2062
|
content: item.content
|
|
1855
2063
|
};
|
|
1856
2064
|
}
|
|
2065
|
+
if (item.parts) {
|
|
2066
|
+
return {
|
|
2067
|
+
role: item.role || 'user',
|
|
2068
|
+
content: item.parts.map(part => part.text ? part.text : part)
|
|
2069
|
+
};
|
|
2070
|
+
}
|
|
1857
2071
|
}
|
|
1858
2072
|
return {
|
|
1859
2073
|
role: 'user',
|
|
@@ -1880,6 +2094,10 @@ class WrappedModels {
|
|
|
1880
2094
|
content: String(contents)
|
|
1881
2095
|
}];
|
|
1882
2096
|
}
|
|
2097
|
+
formatInputForPostHog(contents) {
|
|
2098
|
+
const sanitized = sanitizeGemini(contents);
|
|
2099
|
+
return this.formatInput(sanitized);
|
|
2100
|
+
}
|
|
1883
2101
|
}
|
|
1884
2102
|
|
|
1885
2103
|
function getDefaultExportFromCjs (x) {
|
|
@@ -2571,7 +2789,7 @@ class LangChainCallbackHandler extends BaseCallbackHandler {
|
|
|
2571
2789
|
}) || 'generation';
|
|
2572
2790
|
const generation = {
|
|
2573
2791
|
name: runNameFound,
|
|
2574
|
-
input: messages,
|
|
2792
|
+
input: sanitizeLangChain(messages),
|
|
2575
2793
|
startTime: Date.now()
|
|
2576
2794
|
};
|
|
2577
2795
|
if (extraParams) {
|
|
@@ -2834,7 +3052,8 @@ class LangChainCallbackHandler extends BaseCallbackHandler {
|
|
|
2834
3052
|
...message.additional_kwargs
|
|
2835
3053
|
};
|
|
2836
3054
|
}
|
|
2837
|
-
|
|
3055
|
+
// Sanitize the message content to redact base64 images
|
|
3056
|
+
return sanitizeLangChain(messageDict);
|
|
2838
3057
|
}
|
|
2839
3058
|
_parseUsageModel(usage) {
|
|
2840
3059
|
const conversionList = [['promptTokens', 'input'], ['completionTokens', 'output'], ['input_tokens', 'input'], ['output_tokens', 'output'], ['prompt_token_count', 'input'], ['candidates_token_count', 'output'], ['inputTokenCount', 'input'], ['outputTokenCount', 'output'], ['input_token_count', 'input'], ['generated_token_count', 'output']];
|