@posthog/ai 5.2.2 → 5.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +245 -0
- package/{lib → dist}/anthropic/index.cjs +7 -12
- package/{lib → dist}/anthropic/index.cjs.map +1 -1
- package/{lib → dist}/anthropic/index.mjs +4 -5
- package/{lib → dist}/anthropic/index.mjs.map +1 -1
- package/{lib → dist}/gemini/index.cjs +1 -1
- package/{lib → dist}/gemini/index.cjs.map +1 -1
- package/{lib → dist}/gemini/index.mjs.map +1 -1
- package/{lib → dist}/index.cjs +547 -479
- package/dist/index.cjs.map +1 -0
- package/{lib → dist}/index.mjs +530 -456
- package/dist/index.mjs.map +1 -0
- package/{lib → dist}/langchain/index.cjs +150 -110
- package/dist/langchain/index.cjs.map +1 -0
- package/{lib → dist}/langchain/index.mjs +147 -104
- package/dist/langchain/index.mjs.map +1 -0
- package/{lib → dist}/openai/index.cjs +7 -1
- package/dist/openai/index.cjs.map +1 -0
- package/{lib → dist}/openai/index.mjs +6 -0
- package/dist/openai/index.mjs.map +1 -0
- package/{lib → dist}/vercel/index.cjs +0 -2
- package/{lib → dist}/vercel/index.cjs.map +1 -1
- package/{lib → dist}/vercel/index.mjs.map +1 -1
- package/package.json +42 -33
- package/CHANGELOG.md +0 -89
- package/index.ts +0 -1
- package/lib/index.cjs.map +0 -1
- package/lib/index.mjs.map +0 -1
- package/lib/langchain/index.cjs.map +0 -1
- package/lib/langchain/index.mjs.map +0 -1
- package/lib/openai/index.cjs.map +0 -1
- package/lib/openai/index.mjs.map +0 -1
- package/src/anthropic/index.ts +0 -211
- package/src/gemini/index.ts +0 -254
- package/src/index.ts +0 -13
- package/src/langchain/callbacks.ts +0 -640
- package/src/langchain/index.ts +0 -1
- package/src/openai/azure.ts +0 -481
- package/src/openai/index.ts +0 -498
- package/src/utils.ts +0 -287
- package/src/vercel/index.ts +0 -1
- package/src/vercel/middleware.ts +0 -393
- package/tests/callbacks.test.ts +0 -48
- package/tests/gemini.test.ts +0 -344
- package/tests/openai.test.ts +0 -403
- package/tsconfig.json +0 -10
- /package/{lib → dist}/anthropic/index.d.ts +0 -0
- /package/{lib → dist}/gemini/index.d.ts +0 -0
- /package/{lib → dist}/gemini/index.mjs +0 -0
- /package/{lib → dist}/index.d.ts +0 -0
- /package/{lib → dist}/langchain/index.d.ts +0 -0
- /package/{lib → dist}/openai/index.d.ts +0 -0
- /package/{lib → dist}/vercel/index.d.ts +0 -0
- /package/{lib → dist}/vercel/index.mjs +0 -0
package/tests/callbacks.test.ts
DELETED
|
@@ -1,48 +0,0 @@
|
|
|
1
|
-
import { LangChainCallbackHandler } from '../src/langchain/callbacks'
|
|
2
|
-
import { PostHog } from 'posthog-node'
|
|
3
|
-
import { AIMessage } from '@langchain/core/messages'
|
|
4
|
-
|
|
5
|
-
const mockPostHogClient = {
|
|
6
|
-
capture: jest.fn(),
|
|
7
|
-
} as unknown as PostHog
|
|
8
|
-
|
|
9
|
-
describe('LangChainCallbackHandler', () => {
|
|
10
|
-
let handler: LangChainCallbackHandler
|
|
11
|
-
|
|
12
|
-
beforeEach(() => {
|
|
13
|
-
handler = new LangChainCallbackHandler({
|
|
14
|
-
client: mockPostHogClient,
|
|
15
|
-
})
|
|
16
|
-
jest.clearAllMocks()
|
|
17
|
-
})
|
|
18
|
-
|
|
19
|
-
it('should convert AIMessage with tool calls to dict format', () => {
|
|
20
|
-
const toolCalls = [
|
|
21
|
-
{
|
|
22
|
-
id: 'call_123',
|
|
23
|
-
name: 'get_weather',
|
|
24
|
-
args: { city: 'San Francisco', units: 'celsius' },
|
|
25
|
-
},
|
|
26
|
-
]
|
|
27
|
-
|
|
28
|
-
const aiMessage = new AIMessage({
|
|
29
|
-
content: "I'll check the weather for you.",
|
|
30
|
-
tool_calls: toolCalls,
|
|
31
|
-
})
|
|
32
|
-
|
|
33
|
-
const result = (handler as any)._convertMessageToDict(aiMessage)
|
|
34
|
-
|
|
35
|
-
expect(result.role).toBe('assistant')
|
|
36
|
-
expect(result.content).toBe("I'll check the weather for you.")
|
|
37
|
-
expect(result.tool_calls).toEqual([
|
|
38
|
-
{
|
|
39
|
-
type: 'function',
|
|
40
|
-
id: 'call_123',
|
|
41
|
-
function: {
|
|
42
|
-
name: 'get_weather',
|
|
43
|
-
arguments: '{"city":"San Francisco","units":"celsius"}',
|
|
44
|
-
},
|
|
45
|
-
},
|
|
46
|
-
])
|
|
47
|
-
})
|
|
48
|
-
})
|
package/tests/gemini.test.ts
DELETED
|
@@ -1,344 +0,0 @@
|
|
|
1
|
-
import { PostHog } from 'posthog-node'
|
|
2
|
-
import PostHogGemini from '../src/gemini'
|
|
3
|
-
|
|
4
|
-
let mockGeminiResponse: any = {}
|
|
5
|
-
let mockGeminiStreamResponse: any = {}
|
|
6
|
-
|
|
7
|
-
jest.mock('posthog-node', () => {
|
|
8
|
-
return {
|
|
9
|
-
PostHog: jest.fn().mockImplementation(() => {
|
|
10
|
-
return {
|
|
11
|
-
capture: jest.fn(),
|
|
12
|
-
captureImmediate: jest.fn(),
|
|
13
|
-
privacyMode: false,
|
|
14
|
-
}
|
|
15
|
-
}),
|
|
16
|
-
}
|
|
17
|
-
})
|
|
18
|
-
|
|
19
|
-
jest.mock('@google/genai', () => {
|
|
20
|
-
class MockGoogleGenAI {
|
|
21
|
-
models: any
|
|
22
|
-
constructor() {
|
|
23
|
-
this.models = {
|
|
24
|
-
generateContent: jest.fn(),
|
|
25
|
-
generateContentStream: jest.fn(),
|
|
26
|
-
}
|
|
27
|
-
}
|
|
28
|
-
}
|
|
29
|
-
|
|
30
|
-
return {
|
|
31
|
-
GoogleGenAI: MockGoogleGenAI,
|
|
32
|
-
}
|
|
33
|
-
})
|
|
34
|
-
|
|
35
|
-
describe('PostHogGemini - Jest test suite', () => {
|
|
36
|
-
let mockPostHogClient: PostHog
|
|
37
|
-
let client: PostHogGemini
|
|
38
|
-
|
|
39
|
-
beforeAll(() => {
|
|
40
|
-
if (!process.env.GEMINI_API_KEY) {
|
|
41
|
-
console.warn('⚠️ Skipping Gemini tests: No GEMINI_API_KEY environment variable set')
|
|
42
|
-
}
|
|
43
|
-
})
|
|
44
|
-
|
|
45
|
-
beforeEach(() => {
|
|
46
|
-
// Skip all tests if no API key is present
|
|
47
|
-
if (!process.env.GEMINI_API_KEY) {
|
|
48
|
-
return
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
jest.clearAllMocks()
|
|
52
|
-
|
|
53
|
-
// Reset the default mocks
|
|
54
|
-
mockPostHogClient = new (PostHog as any)()
|
|
55
|
-
client = new PostHogGemini({
|
|
56
|
-
apiKey: process.env.GEMINI_API_KEY || '',
|
|
57
|
-
posthog: mockPostHogClient as any,
|
|
58
|
-
})
|
|
59
|
-
|
|
60
|
-
// Some default generate content mock
|
|
61
|
-
mockGeminiResponse = {
|
|
62
|
-
text: 'Hello from Gemini!',
|
|
63
|
-
candidates: [
|
|
64
|
-
{
|
|
65
|
-
content: {
|
|
66
|
-
parts: [
|
|
67
|
-
{
|
|
68
|
-
text: 'Hello from Gemini!',
|
|
69
|
-
},
|
|
70
|
-
],
|
|
71
|
-
},
|
|
72
|
-
finishReason: 'STOP',
|
|
73
|
-
},
|
|
74
|
-
],
|
|
75
|
-
usageMetadata: {
|
|
76
|
-
promptTokenCount: 15,
|
|
77
|
-
candidatesTokenCount: 8,
|
|
78
|
-
totalTokenCount: 23,
|
|
79
|
-
},
|
|
80
|
-
}
|
|
81
|
-
|
|
82
|
-
// Mock streaming response
|
|
83
|
-
mockGeminiStreamResponse = [
|
|
84
|
-
{
|
|
85
|
-
text: 'Hello ',
|
|
86
|
-
candidates: [
|
|
87
|
-
{
|
|
88
|
-
content: {
|
|
89
|
-
parts: [{ text: 'Hello ' }],
|
|
90
|
-
},
|
|
91
|
-
},
|
|
92
|
-
],
|
|
93
|
-
usageMetadata: {
|
|
94
|
-
promptTokenCount: 15,
|
|
95
|
-
candidatesTokenCount: 2,
|
|
96
|
-
},
|
|
97
|
-
},
|
|
98
|
-
{
|
|
99
|
-
text: 'from ',
|
|
100
|
-
candidates: [
|
|
101
|
-
{
|
|
102
|
-
content: {
|
|
103
|
-
parts: [{ text: 'from ' }],
|
|
104
|
-
},
|
|
105
|
-
},
|
|
106
|
-
],
|
|
107
|
-
usageMetadata: {
|
|
108
|
-
promptTokenCount: 15,
|
|
109
|
-
candidatesTokenCount: 4,
|
|
110
|
-
},
|
|
111
|
-
},
|
|
112
|
-
{
|
|
113
|
-
text: 'Gemini!',
|
|
114
|
-
candidates: [
|
|
115
|
-
{
|
|
116
|
-
content: {
|
|
117
|
-
parts: [{ text: 'Gemini!' }],
|
|
118
|
-
},
|
|
119
|
-
},
|
|
120
|
-
],
|
|
121
|
-
usageMetadata: {
|
|
122
|
-
promptTokenCount: 15,
|
|
123
|
-
candidatesTokenCount: 8,
|
|
124
|
-
},
|
|
125
|
-
},
|
|
126
|
-
]
|
|
127
|
-
|
|
128
|
-
// Mock the generateContent method
|
|
129
|
-
;(client as any).client.models.generateContent = jest.fn().mockResolvedValue(mockGeminiResponse)
|
|
130
|
-
|
|
131
|
-
// Mock the generateContentStream method
|
|
132
|
-
;(client as any).client.models.generateContentStream = jest.fn().mockImplementation(async function* () {
|
|
133
|
-
for (const chunk of mockGeminiStreamResponse) {
|
|
134
|
-
yield chunk
|
|
135
|
-
}
|
|
136
|
-
})
|
|
137
|
-
})
|
|
138
|
-
|
|
139
|
-
// Wrap each test with conditional skip
|
|
140
|
-
const conditionalTest = process.env.GEMINI_API_KEY ? test : test.skip
|
|
141
|
-
|
|
142
|
-
conditionalTest('basic content generation', async () => {
|
|
143
|
-
const response = await client.models.generateContent({
|
|
144
|
-
model: 'gemini-2.0-flash-001',
|
|
145
|
-
contents: 'Hello',
|
|
146
|
-
posthogDistinctId: 'test-id',
|
|
147
|
-
posthogProperties: { foo: 'bar' },
|
|
148
|
-
})
|
|
149
|
-
|
|
150
|
-
expect(response).toEqual(mockGeminiResponse)
|
|
151
|
-
// We expect 1 capture call
|
|
152
|
-
expect(mockPostHogClient.capture).toHaveBeenCalledTimes(1)
|
|
153
|
-
// Check the capture arguments
|
|
154
|
-
const [captureArgs] = (mockPostHogClient.capture as jest.Mock).mock.calls
|
|
155
|
-
const { distinctId, event, properties } = captureArgs[0]
|
|
156
|
-
|
|
157
|
-
expect(distinctId).toBe('test-id')
|
|
158
|
-
expect(event).toBe('$ai_generation')
|
|
159
|
-
expect(properties['$ai_provider']).toBe('gemini')
|
|
160
|
-
expect(properties['$ai_model']).toBe('gemini-2.0-flash-001')
|
|
161
|
-
expect(properties['$ai_input']).toEqual([{ role: 'user', content: 'Hello' }])
|
|
162
|
-
expect(properties['$ai_output_choices']).toEqual([{ role: 'assistant', content: 'Hello from Gemini!' }])
|
|
163
|
-
expect(properties['$ai_input_tokens']).toBe(15)
|
|
164
|
-
expect(properties['$ai_output_tokens']).toBe(8)
|
|
165
|
-
expect(properties['$ai_http_status']).toBe(200)
|
|
166
|
-
expect(properties['foo']).toBe('bar')
|
|
167
|
-
expect(typeof properties['$ai_latency']).toBe('number')
|
|
168
|
-
})
|
|
169
|
-
|
|
170
|
-
conditionalTest('streaming content generation', async () => {
|
|
171
|
-
const stream = client.models.generateContentStream({
|
|
172
|
-
model: 'gemini-2.0-flash-001',
|
|
173
|
-
contents: 'Write a short poem',
|
|
174
|
-
posthogDistinctId: 'test-id',
|
|
175
|
-
posthogProperties: { foo: 'bar' },
|
|
176
|
-
})
|
|
177
|
-
|
|
178
|
-
let accumulatedText = ''
|
|
179
|
-
for await (const chunk of stream) {
|
|
180
|
-
if (chunk.text) {
|
|
181
|
-
accumulatedText += chunk.text
|
|
182
|
-
}
|
|
183
|
-
}
|
|
184
|
-
|
|
185
|
-
expect(accumulatedText).toBe('Hello from Gemini!')
|
|
186
|
-
// We expect 1 capture call after streaming completes
|
|
187
|
-
expect(mockPostHogClient.capture).toHaveBeenCalledTimes(1)
|
|
188
|
-
|
|
189
|
-
const [captureArgs] = (mockPostHogClient.capture as jest.Mock).mock.calls
|
|
190
|
-
const { distinctId, event, properties } = captureArgs[0]
|
|
191
|
-
|
|
192
|
-
expect(distinctId).toBe('test-id')
|
|
193
|
-
expect(event).toBe('$ai_generation')
|
|
194
|
-
expect(properties['$ai_provider']).toBe('gemini')
|
|
195
|
-
expect(properties['$ai_model']).toBe('gemini-2.0-flash-001')
|
|
196
|
-
expect(properties['$ai_input']).toEqual([{ role: 'user', content: 'Write a short poem' }])
|
|
197
|
-
expect(properties['$ai_output_choices']).toEqual([{ content: 'Hello from Gemini!', role: 'assistant' }])
|
|
198
|
-
expect(properties['$ai_input_tokens']).toBe(15)
|
|
199
|
-
expect(properties['$ai_output_tokens']).toBe(8)
|
|
200
|
-
expect(properties['$ai_http_status']).toBe(200)
|
|
201
|
-
expect(properties['foo']).toBe('bar')
|
|
202
|
-
expect(typeof properties['$ai_latency']).toBe('number')
|
|
203
|
-
})
|
|
204
|
-
|
|
205
|
-
conditionalTest('groups', async () => {
|
|
206
|
-
await client.models.generateContent({
|
|
207
|
-
model: 'gemini-2.0-flash-001',
|
|
208
|
-
contents: 'Hello',
|
|
209
|
-
posthogDistinctId: 'test-id',
|
|
210
|
-
posthogGroups: { team: 'ai-team' },
|
|
211
|
-
})
|
|
212
|
-
|
|
213
|
-
expect(mockPostHogClient.capture).toHaveBeenCalledTimes(1)
|
|
214
|
-
const [captureArgs] = (mockPostHogClient.capture as jest.Mock).mock.calls
|
|
215
|
-
const { groups } = captureArgs[0]
|
|
216
|
-
|
|
217
|
-
expect(groups).toEqual({ team: 'ai-team' })
|
|
218
|
-
})
|
|
219
|
-
|
|
220
|
-
conditionalTest('privacy mode', async () => {
|
|
221
|
-
await client.models.generateContent({
|
|
222
|
-
model: 'gemini-2.0-flash-001',
|
|
223
|
-
contents: 'Sensitive information',
|
|
224
|
-
posthogDistinctId: 'test-id',
|
|
225
|
-
posthogPrivacyMode: true,
|
|
226
|
-
})
|
|
227
|
-
|
|
228
|
-
expect(mockPostHogClient.capture).toHaveBeenCalledTimes(1)
|
|
229
|
-
const [captureArgs] = (mockPostHogClient.capture as jest.Mock).mock.calls
|
|
230
|
-
const { properties } = captureArgs[0]
|
|
231
|
-
|
|
232
|
-
expect(properties['$ai_input']).toBeNull()
|
|
233
|
-
expect(properties['$ai_output_choices']).toBeNull()
|
|
234
|
-
})
|
|
235
|
-
|
|
236
|
-
conditionalTest('error handling', async () => {
|
|
237
|
-
const error = new Error('API Error')
|
|
238
|
-
;(error as any).status = 400
|
|
239
|
-
;(client as any).client.models.generateContent = jest.fn().mockRejectedValue(error)
|
|
240
|
-
|
|
241
|
-
await expect(
|
|
242
|
-
client.models.generateContent({
|
|
243
|
-
model: 'gemini-2.0-flash-001',
|
|
244
|
-
contents: 'Hello',
|
|
245
|
-
posthogDistinctId: 'test-id',
|
|
246
|
-
})
|
|
247
|
-
).rejects.toThrow('API Error')
|
|
248
|
-
|
|
249
|
-
expect(mockPostHogClient.capture).toHaveBeenCalledTimes(1)
|
|
250
|
-
const [captureArgs] = (mockPostHogClient.capture as jest.Mock).mock.calls
|
|
251
|
-
const { properties } = captureArgs[0]
|
|
252
|
-
|
|
253
|
-
expect(properties['$ai_is_error']).toBe(true)
|
|
254
|
-
expect(properties['$ai_http_status']).toBe(400)
|
|
255
|
-
expect(properties['$ai_input_tokens']).toBe(0)
|
|
256
|
-
expect(properties['$ai_output_tokens']).toBe(0)
|
|
257
|
-
})
|
|
258
|
-
|
|
259
|
-
conditionalTest('array contents input', async () => {
|
|
260
|
-
await client.models.generateContent({
|
|
261
|
-
model: 'gemini-2.0-flash-001',
|
|
262
|
-
contents: ['Hello', 'How are you?'],
|
|
263
|
-
posthogDistinctId: 'test-id',
|
|
264
|
-
})
|
|
265
|
-
|
|
266
|
-
expect(mockPostHogClient.capture).toHaveBeenCalledTimes(1)
|
|
267
|
-
const [captureArgs] = (mockPostHogClient.capture as jest.Mock).mock.calls
|
|
268
|
-
const { properties } = captureArgs[0]
|
|
269
|
-
|
|
270
|
-
expect(properties['$ai_input']).toEqual([
|
|
271
|
-
{ role: 'user', content: 'Hello' },
|
|
272
|
-
{ role: 'user', content: 'How are you?' },
|
|
273
|
-
])
|
|
274
|
-
})
|
|
275
|
-
|
|
276
|
-
conditionalTest('object contents input', async () => {
|
|
277
|
-
await client.models.generateContent({
|
|
278
|
-
model: 'gemini-2.0-flash-001',
|
|
279
|
-
contents: { text: 'Hello world' },
|
|
280
|
-
posthogDistinctId: 'test-id',
|
|
281
|
-
})
|
|
282
|
-
|
|
283
|
-
expect(mockPostHogClient.capture).toHaveBeenCalledTimes(1)
|
|
284
|
-
const [captureArgs] = (mockPostHogClient.capture as jest.Mock).mock.calls
|
|
285
|
-
const { properties } = captureArgs[0]
|
|
286
|
-
|
|
287
|
-
expect(properties['$ai_input']).toEqual([{ role: 'user', content: 'Hello world' }])
|
|
288
|
-
})
|
|
289
|
-
|
|
290
|
-
conditionalTest('capture immediate', async () => {
|
|
291
|
-
await client.models.generateContent({
|
|
292
|
-
model: 'gemini-2.0-flash-001',
|
|
293
|
-
contents: 'Hello',
|
|
294
|
-
posthogDistinctId: 'test-id',
|
|
295
|
-
posthogCaptureImmediate: true,
|
|
296
|
-
})
|
|
297
|
-
|
|
298
|
-
expect(mockPostHogClient.captureImmediate).toHaveBeenCalledTimes(1)
|
|
299
|
-
expect(mockPostHogClient.capture).toHaveBeenCalledTimes(0)
|
|
300
|
-
})
|
|
301
|
-
|
|
302
|
-
conditionalTest('vertex ai configuration', () => {
|
|
303
|
-
const vertexClient = new PostHogGemini({
|
|
304
|
-
vertexai: true,
|
|
305
|
-
project: 'test-project',
|
|
306
|
-
location: 'us-central1',
|
|
307
|
-
posthog: mockPostHogClient as any,
|
|
308
|
-
})
|
|
309
|
-
|
|
310
|
-
expect(vertexClient).toBeInstanceOf(PostHogGemini)
|
|
311
|
-
expect(vertexClient.models).toBeDefined()
|
|
312
|
-
})
|
|
313
|
-
|
|
314
|
-
conditionalTest('anonymous user - $process_person_profile set to false', async () => {
|
|
315
|
-
await client.models.generateContent({
|
|
316
|
-
model: 'gemini-2.0-flash-001',
|
|
317
|
-
contents: 'Hello',
|
|
318
|
-
posthogTraceId: 'trace-123',
|
|
319
|
-
})
|
|
320
|
-
|
|
321
|
-
expect(mockPostHogClient.capture).toHaveBeenCalledTimes(1)
|
|
322
|
-
const [captureArgs] = (mockPostHogClient.capture as jest.Mock).mock.calls
|
|
323
|
-
const { distinctId, properties } = captureArgs[0]
|
|
324
|
-
|
|
325
|
-
expect(distinctId).toBe('trace-123')
|
|
326
|
-
expect(properties['$process_person_profile']).toBe(false)
|
|
327
|
-
})
|
|
328
|
-
|
|
329
|
-
conditionalTest('identified user - $process_person_profile not set', async () => {
|
|
330
|
-
await client.models.generateContent({
|
|
331
|
-
model: 'gemini-2.0-flash-001',
|
|
332
|
-
contents: 'Hello',
|
|
333
|
-
posthogDistinctId: 'user-456',
|
|
334
|
-
posthogTraceId: 'trace-123',
|
|
335
|
-
})
|
|
336
|
-
|
|
337
|
-
expect(mockPostHogClient.capture).toHaveBeenCalledTimes(1)
|
|
338
|
-
const [captureArgs] = (mockPostHogClient.capture as jest.Mock).mock.calls
|
|
339
|
-
const { distinctId, properties } = captureArgs[0]
|
|
340
|
-
|
|
341
|
-
expect(distinctId).toBe('user-456')
|
|
342
|
-
expect(properties['$process_person_profile']).toBeUndefined()
|
|
343
|
-
})
|
|
344
|
-
})
|