@posthog/ai 4.0.1 → 4.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +8 -0
- package/lib/anthropic/index.cjs.js +32 -25
- package/lib/anthropic/index.cjs.js.map +1 -1
- package/lib/anthropic/index.d.ts +1 -0
- package/lib/anthropic/index.esm.js +32 -25
- package/lib/anthropic/index.esm.js.map +1 -1
- package/lib/index.cjs.js +131 -55
- package/lib/index.cjs.js.map +1 -1
- package/lib/index.d.ts +2 -0
- package/lib/index.esm.js +131 -55
- package/lib/index.esm.js.map +1 -1
- package/lib/langchain/index.cjs.js.map +1 -1
- package/lib/langchain/index.esm.js.map +1 -1
- package/lib/openai/index.cjs.js +32 -25
- package/lib/openai/index.cjs.js.map +1 -1
- package/lib/openai/index.d.ts +1 -0
- package/lib/openai/index.esm.js +32 -25
- package/lib/openai/index.esm.js.map +1 -1
- package/lib/posthog-ai/src/utils.d.ts +5 -1
- package/lib/posthog-ai/src/vercel/middleware.d.ts +2 -0
- package/lib/vercel/index.cjs.js +132 -55
- package/lib/vercel/index.cjs.js.map +1 -1
- package/lib/vercel/index.d.ts +1 -0
- package/lib/vercel/index.esm.js +132 -55
- package/lib/vercel/index.esm.js.map +1 -1
- package/package.json +1 -1
- package/src/utils.ts +48 -20
- package/src/vercel/middleware.ts +68 -19
package/lib/index.d.ts
CHANGED
|
@@ -23,6 +23,7 @@ interface MonitoringParams {
|
|
|
23
23
|
posthogModelOverride?: string;
|
|
24
24
|
posthogProviderOverride?: string;
|
|
25
25
|
posthogCostOverride?: CostOverride;
|
|
26
|
+
fullDebug?: boolean;
|
|
26
27
|
}
|
|
27
28
|
interface CostOverride {
|
|
28
29
|
inputCost: number;
|
|
@@ -94,6 +95,7 @@ interface ClientOptions {
|
|
|
94
95
|
posthogModelOverride?: string;
|
|
95
96
|
posthogProviderOverride?: string;
|
|
96
97
|
posthogCostOverride?: CostOverride;
|
|
98
|
+
fullDebug?: boolean;
|
|
97
99
|
}
|
|
98
100
|
declare const wrapVercelLanguageModel: (model: LanguageModelV1, phClient: PostHog, options: ClientOptions) => LanguageModelV1;
|
|
99
101
|
|
package/lib/index.esm.js
CHANGED
|
@@ -5,6 +5,9 @@ import { Buffer } from 'buffer';
|
|
|
5
5
|
import { experimental_wrapLanguageModel } from 'ai';
|
|
6
6
|
import AnthropicOriginal from '@anthropic-ai/sdk';
|
|
7
7
|
|
|
8
|
+
// limit large outputs by truncating to 200kb (approx 200k bytes)
|
|
9
|
+
const MAX_OUTPUT_SIZE = 200000;
|
|
10
|
+
const STRING_FORMAT = 'utf8';
|
|
8
11
|
const getModelParams = params => {
|
|
9
12
|
if (!params) {
|
|
10
13
|
return {};
|
|
@@ -60,13 +63,26 @@ const mergeSystemPrompt = (params, provider) => {
|
|
|
60
63
|
const withPrivacyMode = (client, privacyMode, input) => {
|
|
61
64
|
return client.privacy_mode || privacyMode ? null : input;
|
|
62
65
|
};
|
|
66
|
+
const truncate = str => {
|
|
67
|
+
try {
|
|
68
|
+
const buffer = Buffer.from(str, STRING_FORMAT);
|
|
69
|
+
if (buffer.length <= MAX_OUTPUT_SIZE) {
|
|
70
|
+
return str;
|
|
71
|
+
}
|
|
72
|
+
const truncatedBuffer = buffer.slice(0, MAX_OUTPUT_SIZE);
|
|
73
|
+
return `${truncatedBuffer.toString(STRING_FORMAT)}... [truncated]`;
|
|
74
|
+
} catch (error) {
|
|
75
|
+
console.error('Error truncating, likely not a string');
|
|
76
|
+
return str;
|
|
77
|
+
}
|
|
78
|
+
};
|
|
63
79
|
function sanitizeValues(obj) {
|
|
64
80
|
if (obj === undefined || obj === null) {
|
|
65
81
|
return obj;
|
|
66
82
|
}
|
|
67
83
|
const jsonSafe = JSON.parse(JSON.stringify(obj));
|
|
68
84
|
if (typeof jsonSafe === 'string') {
|
|
69
|
-
return Buffer.from(jsonSafe,
|
|
85
|
+
return Buffer.from(jsonSafe, STRING_FORMAT).toString(STRING_FORMAT);
|
|
70
86
|
} else if (Array.isArray(jsonSafe)) {
|
|
71
87
|
return jsonSafe.map(sanitizeValues);
|
|
72
88
|
} else if (jsonSafe && typeof jsonSafe === 'object') {
|
|
@@ -89,7 +105,8 @@ const sendEventToPosthog = ({
|
|
|
89
105
|
usage = {},
|
|
90
106
|
isError = false,
|
|
91
107
|
error,
|
|
92
|
-
tools
|
|
108
|
+
tools,
|
|
109
|
+
fullDebug = false
|
|
93
110
|
}) => {
|
|
94
111
|
if (client.capture) {
|
|
95
112
|
// sanitize input and output for UTF-8 validity
|
|
@@ -124,32 +141,37 @@ const sendEventToPosthog = ({
|
|
|
124
141
|
$ai_cache_creation_input_tokens: usage.cacheCreationInputTokens
|
|
125
142
|
} : {})
|
|
126
143
|
};
|
|
144
|
+
const properties = {
|
|
145
|
+
$ai_provider: params.posthogProviderOverride ?? provider,
|
|
146
|
+
$ai_model: params.posthogModelOverride ?? model,
|
|
147
|
+
$ai_model_parameters: getModelParams(params),
|
|
148
|
+
$ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeInput),
|
|
149
|
+
$ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeOutput),
|
|
150
|
+
$ai_http_status: httpStatus,
|
|
151
|
+
$ai_input_tokens: usage.inputTokens ?? 0,
|
|
152
|
+
$ai_output_tokens: usage.outputTokens ?? 0,
|
|
153
|
+
...additionalTokenValues,
|
|
154
|
+
$ai_latency: latency,
|
|
155
|
+
$ai_trace_id: traceId,
|
|
156
|
+
$ai_base_url: baseURL,
|
|
157
|
+
...params.posthogProperties,
|
|
158
|
+
...(distinctId ? {} : {
|
|
159
|
+
$process_person_profile: false
|
|
160
|
+
}),
|
|
161
|
+
...(tools ? {
|
|
162
|
+
$ai_tools: tools
|
|
163
|
+
} : {}),
|
|
164
|
+
...errorData,
|
|
165
|
+
...costOverrideData
|
|
166
|
+
};
|
|
167
|
+
if (fullDebug) {
|
|
168
|
+
// @ts-ignore
|
|
169
|
+
console.log('Sending event to PostHog', properties);
|
|
170
|
+
}
|
|
127
171
|
client.capture({
|
|
128
172
|
distinctId: distinctId ?? traceId,
|
|
129
173
|
event: '$ai_generation',
|
|
130
|
-
properties
|
|
131
|
-
$ai_provider: params.posthogProviderOverride ?? provider,
|
|
132
|
-
$ai_model: params.posthogModelOverride ?? model,
|
|
133
|
-
$ai_model_parameters: getModelParams(params),
|
|
134
|
-
$ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeInput),
|
|
135
|
-
$ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeOutput),
|
|
136
|
-
$ai_http_status: httpStatus,
|
|
137
|
-
$ai_input_tokens: usage.inputTokens ?? 0,
|
|
138
|
-
$ai_output_tokens: usage.outputTokens ?? 0,
|
|
139
|
-
...additionalTokenValues,
|
|
140
|
-
$ai_latency: latency,
|
|
141
|
-
$ai_trace_id: traceId,
|
|
142
|
-
$ai_base_url: baseURL,
|
|
143
|
-
...params.posthogProperties,
|
|
144
|
-
...(distinctId ? {} : {
|
|
145
|
-
$process_person_profile: false
|
|
146
|
-
}),
|
|
147
|
-
...(tools ? {
|
|
148
|
-
$ai_tools: tools
|
|
149
|
-
} : {}),
|
|
150
|
-
...errorData,
|
|
151
|
-
...costOverrideData
|
|
152
|
-
},
|
|
174
|
+
properties,
|
|
153
175
|
groups: params.posthogGroups
|
|
154
176
|
});
|
|
155
177
|
}
|
|
@@ -488,14 +510,27 @@ const mapVercelParams = params => {
|
|
|
488
510
|
};
|
|
489
511
|
};
|
|
490
512
|
const mapVercelPrompt = prompt => {
|
|
491
|
-
|
|
513
|
+
// normalize single inputs into an array of messages
|
|
514
|
+
let promptsArray;
|
|
515
|
+
if (typeof prompt === 'string') {
|
|
516
|
+
promptsArray = [{
|
|
517
|
+
role: 'user',
|
|
518
|
+
content: prompt
|
|
519
|
+
}];
|
|
520
|
+
} else if (!Array.isArray(prompt)) {
|
|
521
|
+
promptsArray = [prompt];
|
|
522
|
+
} else {
|
|
523
|
+
promptsArray = prompt;
|
|
524
|
+
}
|
|
525
|
+
// Map and truncate individual content
|
|
526
|
+
const inputs = promptsArray.map(p => {
|
|
492
527
|
let content = {};
|
|
493
528
|
if (Array.isArray(p.content)) {
|
|
494
529
|
content = p.content.map(c => {
|
|
495
530
|
if (c.type === 'text') {
|
|
496
531
|
return {
|
|
497
532
|
type: 'text',
|
|
498
|
-
content: c.text
|
|
533
|
+
content: truncate(c.text)
|
|
499
534
|
};
|
|
500
535
|
} else if (c.type === 'image') {
|
|
501
536
|
return {
|
|
@@ -541,7 +576,7 @@ const mapVercelPrompt = prompt => {
|
|
|
541
576
|
} else {
|
|
542
577
|
content = {
|
|
543
578
|
type: 'text',
|
|
544
|
-
text: p.content
|
|
579
|
+
text: truncate(p.content)
|
|
545
580
|
};
|
|
546
581
|
}
|
|
547
582
|
return {
|
|
@@ -549,48 +584,85 @@ const mapVercelPrompt = prompt => {
|
|
|
549
584
|
content
|
|
550
585
|
};
|
|
551
586
|
});
|
|
587
|
+
try {
|
|
588
|
+
// Trim the inputs array until its JSON size fits within MAX_OUTPUT_SIZE
|
|
589
|
+
let serialized = JSON.stringify(inputs);
|
|
590
|
+
while (Buffer.byteLength(serialized, 'utf8') > MAX_OUTPUT_SIZE && inputs.length > 0) {
|
|
591
|
+
// Remove oldest message
|
|
592
|
+
inputs.shift();
|
|
593
|
+
// add blank message to beginning of array
|
|
594
|
+
inputs.unshift({
|
|
595
|
+
role: 'assistant',
|
|
596
|
+
content: '[removed message due to size limit]'
|
|
597
|
+
});
|
|
598
|
+
serialized = JSON.stringify(inputs);
|
|
599
|
+
}
|
|
600
|
+
} catch (error) {
|
|
601
|
+
console.error('Error stringifying inputs');
|
|
602
|
+
return [{
|
|
603
|
+
role: 'posthog',
|
|
604
|
+
content: 'An error occurred while processing your request. Please try again.'
|
|
605
|
+
}];
|
|
606
|
+
}
|
|
607
|
+
return inputs;
|
|
552
608
|
};
|
|
553
609
|
const mapVercelOutput = result => {
|
|
610
|
+
// normalize string results to object
|
|
611
|
+
const normalizedResult = typeof result === 'string' ? {
|
|
612
|
+
text: result
|
|
613
|
+
} : result;
|
|
554
614
|
const output = {
|
|
555
|
-
...(
|
|
556
|
-
text:
|
|
615
|
+
...(normalizedResult.text ? {
|
|
616
|
+
text: normalizedResult.text
|
|
557
617
|
} : {}),
|
|
558
|
-
...(
|
|
559
|
-
object:
|
|
618
|
+
...(normalizedResult.object ? {
|
|
619
|
+
object: normalizedResult.object
|
|
560
620
|
} : {}),
|
|
561
|
-
...(
|
|
562
|
-
reasoning:
|
|
621
|
+
...(normalizedResult.reasoning ? {
|
|
622
|
+
reasoning: normalizedResult.reasoning
|
|
563
623
|
} : {}),
|
|
564
|
-
...(
|
|
565
|
-
response:
|
|
624
|
+
...(normalizedResult.response ? {
|
|
625
|
+
response: normalizedResult.response
|
|
566
626
|
} : {}),
|
|
567
|
-
...(
|
|
568
|
-
finishReason:
|
|
627
|
+
...(normalizedResult.finishReason ? {
|
|
628
|
+
finishReason: normalizedResult.finishReason
|
|
569
629
|
} : {}),
|
|
570
|
-
...(
|
|
571
|
-
usage:
|
|
630
|
+
...(normalizedResult.usage ? {
|
|
631
|
+
usage: normalizedResult.usage
|
|
572
632
|
} : {}),
|
|
573
|
-
...(
|
|
574
|
-
warnings:
|
|
633
|
+
...(normalizedResult.warnings ? {
|
|
634
|
+
warnings: normalizedResult.warnings
|
|
575
635
|
} : {}),
|
|
576
|
-
...(
|
|
577
|
-
toolCalls:
|
|
636
|
+
...(normalizedResult.providerMetadata ? {
|
|
637
|
+
toolCalls: normalizedResult.providerMetadata
|
|
638
|
+
} : {}),
|
|
639
|
+
...(normalizedResult.files ? {
|
|
640
|
+
files: normalizedResult.files.map(file => ({
|
|
641
|
+
name: file.name,
|
|
642
|
+
size: file.size,
|
|
643
|
+
type: file.type
|
|
644
|
+
}))
|
|
578
645
|
} : {})
|
|
579
646
|
};
|
|
580
|
-
// if text and no object or reasoning, return text
|
|
581
647
|
if (output.text && !output.object && !output.reasoning) {
|
|
582
648
|
return [{
|
|
583
|
-
content: output.text,
|
|
649
|
+
content: truncate(output.text),
|
|
650
|
+
role: 'assistant'
|
|
651
|
+
}];
|
|
652
|
+
}
|
|
653
|
+
// otherwise stringify and truncate
|
|
654
|
+
try {
|
|
655
|
+
const jsonOutput = JSON.stringify(output);
|
|
656
|
+
return [{
|
|
657
|
+
content: truncate(jsonOutput),
|
|
584
658
|
role: 'assistant'
|
|
585
659
|
}];
|
|
660
|
+
} catch (error) {
|
|
661
|
+
console.error('Error stringifying output');
|
|
662
|
+
return [];
|
|
586
663
|
}
|
|
587
|
-
return [{
|
|
588
|
-
content: JSON.stringify(output),
|
|
589
|
-
role: 'assistant'
|
|
590
|
-
}];
|
|
591
664
|
};
|
|
592
665
|
const extractProvider = model => {
|
|
593
|
-
// vercel provider is in the format of provider.endpoint
|
|
594
666
|
const provider = model.provider.toLowerCase();
|
|
595
667
|
const providerName = provider.split('.')[0];
|
|
596
668
|
return providerName;
|
|
@@ -646,7 +718,8 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
646
718
|
inputTokens: result.usage.promptTokens,
|
|
647
719
|
outputTokens: result.usage.completionTokens,
|
|
648
720
|
...additionalTokenValues
|
|
649
|
-
}
|
|
721
|
+
},
|
|
722
|
+
fullDebug: options.fullDebug
|
|
650
723
|
});
|
|
651
724
|
return result;
|
|
652
725
|
} catch (error) {
|
|
@@ -668,7 +741,8 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
668
741
|
outputTokens: 0
|
|
669
742
|
},
|
|
670
743
|
isError: true,
|
|
671
|
-
error: JSON.stringify(error)
|
|
744
|
+
error: truncate(JSON.stringify(error)),
|
|
745
|
+
fullDebug: options.fullDebug
|
|
672
746
|
});
|
|
673
747
|
throw error;
|
|
674
748
|
}
|
|
@@ -734,7 +808,8 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
734
808
|
baseURL,
|
|
735
809
|
params: mergedParams,
|
|
736
810
|
httpStatus: 200,
|
|
737
|
-
usage
|
|
811
|
+
usage,
|
|
812
|
+
fullDebug: options.fullDebug
|
|
738
813
|
});
|
|
739
814
|
}
|
|
740
815
|
});
|
|
@@ -760,7 +835,8 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
760
835
|
outputTokens: 0
|
|
761
836
|
},
|
|
762
837
|
isError: true,
|
|
763
|
-
error: JSON.stringify(error)
|
|
838
|
+
error: truncate(JSON.stringify(error)),
|
|
839
|
+
fullDebug: options.fullDebug
|
|
764
840
|
});
|
|
765
841
|
throw error;
|
|
766
842
|
}
|