@posthog/ai 5.2.2 → 6.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +245 -0
- package/{lib → dist}/anthropic/index.cjs +44 -17
- package/dist/anthropic/index.cjs.map +1 -0
- package/{lib → dist}/anthropic/index.mjs +41 -10
- package/dist/anthropic/index.mjs.map +1 -0
- package/{lib → dist}/gemini/index.cjs +68 -26
- package/dist/gemini/index.cjs.map +1 -0
- package/{lib → dist}/gemini/index.d.ts +0 -1
- package/{lib → dist}/gemini/index.mjs +67 -25
- package/dist/gemini/index.mjs.map +1 -0
- package/{lib → dist}/index.cjs +875 -601
- package/dist/index.cjs.map +1 -0
- package/{lib → dist}/index.d.ts +3 -3
- package/{lib → dist}/index.mjs +859 -579
- package/dist/index.mjs.map +1 -0
- package/{lib → dist}/langchain/index.cjs +178 -118
- package/dist/langchain/index.cjs.map +1 -0
- package/{lib → dist}/langchain/index.d.ts +1 -0
- package/{lib → dist}/langchain/index.mjs +175 -112
- package/dist/langchain/index.mjs.map +1 -0
- package/{lib → dist}/openai/index.cjs +113 -6
- package/dist/openai/index.cjs.map +1 -0
- package/{lib → dist}/openai/index.mjs +112 -5
- package/dist/openai/index.mjs.map +1 -0
- package/{lib → dist}/vercel/index.cjs +117 -82
- package/dist/vercel/index.cjs.map +1 -0
- package/{lib → dist}/vercel/index.d.ts +2 -2
- package/{lib → dist}/vercel/index.mjs +118 -81
- package/dist/vercel/index.mjs.map +1 -0
- package/package.json +45 -35
- package/CHANGELOG.md +0 -89
- package/index.ts +0 -1
- package/lib/anthropic/index.cjs.map +0 -1
- package/lib/anthropic/index.mjs.map +0 -1
- package/lib/gemini/index.cjs.map +0 -1
- package/lib/gemini/index.mjs.map +0 -1
- package/lib/index.cjs.map +0 -1
- package/lib/index.mjs.map +0 -1
- package/lib/langchain/index.cjs.map +0 -1
- package/lib/langchain/index.mjs.map +0 -1
- package/lib/openai/index.cjs.map +0 -1
- package/lib/openai/index.mjs.map +0 -1
- package/lib/vercel/index.cjs.map +0 -1
- package/lib/vercel/index.mjs.map +0 -1
- package/src/anthropic/index.ts +0 -211
- package/src/gemini/index.ts +0 -254
- package/src/index.ts +0 -13
- package/src/langchain/callbacks.ts +0 -640
- package/src/langchain/index.ts +0 -1
- package/src/openai/azure.ts +0 -481
- package/src/openai/index.ts +0 -498
- package/src/utils.ts +0 -287
- package/src/vercel/index.ts +0 -1
- package/src/vercel/middleware.ts +0 -393
- package/tests/callbacks.test.ts +0 -48
- package/tests/gemini.test.ts +0 -344
- package/tests/openai.test.ts +0 -403
- package/tsconfig.json +0 -10
- /package/{lib → dist}/anthropic/index.d.ts +0 -0
- /package/{lib → dist}/openai/index.d.ts +0 -0
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { wrapLanguageModel } from 'ai';
|
|
2
2
|
import { v4 } from 'uuid';
|
|
3
3
|
import { Buffer } from 'buffer';
|
|
4
4
|
|
|
@@ -34,6 +34,20 @@ const truncate = str => {
|
|
|
34
34
|
return str;
|
|
35
35
|
}
|
|
36
36
|
};
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* Extract available tool calls from the request parameters.
|
|
40
|
+
* These are the tools provided to the LLM, not the tool calls in the response.
|
|
41
|
+
*/
|
|
42
|
+
const extractAvailableToolCalls = (provider, params) => {
|
|
43
|
+
{
|
|
44
|
+
// Vercel AI SDK stores tools in params.mode.tools when mode type is 'regular'
|
|
45
|
+
if (params.mode?.type === 'regular' && params.mode.tools) {
|
|
46
|
+
return params.mode.tools;
|
|
47
|
+
}
|
|
48
|
+
return null;
|
|
49
|
+
}
|
|
50
|
+
};
|
|
37
51
|
function sanitizeValues(obj) {
|
|
38
52
|
if (obj === undefined || obj === null) {
|
|
39
53
|
return obj;
|
|
@@ -141,7 +155,7 @@ const sendEventToPosthog = async ({
|
|
|
141
155
|
const mapVercelParams = params => {
|
|
142
156
|
return {
|
|
143
157
|
temperature: params.temperature,
|
|
144
|
-
|
|
158
|
+
max_output_tokens: params.maxOutputTokens,
|
|
145
159
|
top_p: params.topP,
|
|
146
160
|
frequency_penalty: params.frequencyPenalty,
|
|
147
161
|
presence_penalty: params.presencePenalty,
|
|
@@ -149,79 +163,68 @@ const mapVercelParams = params => {
|
|
|
149
163
|
stream: params.stream
|
|
150
164
|
};
|
|
151
165
|
};
|
|
152
|
-
const mapVercelPrompt =
|
|
153
|
-
// normalize single inputs into an array of messages
|
|
154
|
-
let promptsArray;
|
|
155
|
-
if (typeof prompt === 'string') {
|
|
156
|
-
promptsArray = [{
|
|
157
|
-
role: 'user',
|
|
158
|
-
content: prompt
|
|
159
|
-
}];
|
|
160
|
-
} else if (!Array.isArray(prompt)) {
|
|
161
|
-
promptsArray = [prompt];
|
|
162
|
-
} else {
|
|
163
|
-
promptsArray = prompt;
|
|
164
|
-
}
|
|
165
|
-
|
|
166
|
+
const mapVercelPrompt = messages => {
|
|
166
167
|
// Map and truncate individual content
|
|
167
|
-
const inputs =
|
|
168
|
-
let content
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
content: {
|
|
168
|
+
const inputs = messages.map(message => {
|
|
169
|
+
let content;
|
|
170
|
+
|
|
171
|
+
// Handle system role which has string content
|
|
172
|
+
if (message.role === 'system') {
|
|
173
|
+
content = [{
|
|
174
|
+
type: 'text',
|
|
175
|
+
text: truncate(String(message.content))
|
|
176
|
+
}];
|
|
177
|
+
} else {
|
|
178
|
+
// Handle other roles which have array content
|
|
179
|
+
if (Array.isArray(message.content)) {
|
|
180
|
+
content = message.content.map(c => {
|
|
181
|
+
if (c.type === 'text') {
|
|
182
|
+
return {
|
|
183
|
+
type: 'text',
|
|
184
|
+
text: truncate(c.text)
|
|
185
|
+
};
|
|
186
|
+
} else if (c.type === 'file') {
|
|
187
|
+
return {
|
|
188
|
+
type: 'file',
|
|
189
189
|
file: c.data instanceof URL ? c.data.toString() : 'raw files not supported',
|
|
190
|
-
|
|
191
|
-
}
|
|
192
|
-
}
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
190
|
+
mediaType: c.mediaType
|
|
191
|
+
};
|
|
192
|
+
} else if (c.type === 'reasoning') {
|
|
193
|
+
return {
|
|
194
|
+
type: 'reasoning',
|
|
195
|
+
text: truncate(c.reasoning)
|
|
196
|
+
};
|
|
197
|
+
} else if (c.type === 'tool-call') {
|
|
198
|
+
return {
|
|
199
|
+
type: 'tool-call',
|
|
197
200
|
toolCallId: c.toolCallId,
|
|
198
201
|
toolName: c.toolName,
|
|
199
|
-
|
|
200
|
-
}
|
|
201
|
-
}
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
type: 'tool-result',
|
|
205
|
-
content: {
|
|
202
|
+
input: c.input
|
|
203
|
+
};
|
|
204
|
+
} else if (c.type === 'tool-result') {
|
|
205
|
+
return {
|
|
206
|
+
type: 'tool-result',
|
|
206
207
|
toolCallId: c.toolCallId,
|
|
207
208
|
toolName: c.toolName,
|
|
208
|
-
|
|
209
|
+
output: c.output,
|
|
209
210
|
isError: c.isError
|
|
210
|
-
}
|
|
211
|
+
};
|
|
212
|
+
}
|
|
213
|
+
return {
|
|
214
|
+
type: 'text',
|
|
215
|
+
text: ''
|
|
211
216
|
};
|
|
212
|
-
}
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
text: truncate(p.content)
|
|
221
|
-
};
|
|
217
|
+
});
|
|
218
|
+
} else {
|
|
219
|
+
// Fallback for non-array content
|
|
220
|
+
content = [{
|
|
221
|
+
type: 'text',
|
|
222
|
+
text: truncate(String(message.content))
|
|
223
|
+
}];
|
|
224
|
+
}
|
|
222
225
|
}
|
|
223
226
|
return {
|
|
224
|
-
role:
|
|
227
|
+
role: message.role,
|
|
225
228
|
content
|
|
226
229
|
};
|
|
227
230
|
});
|
|
@@ -253,7 +256,32 @@ const mapVercelPrompt = prompt => {
|
|
|
253
256
|
return inputs;
|
|
254
257
|
};
|
|
255
258
|
const mapVercelOutput = result => {
|
|
256
|
-
|
|
259
|
+
const content = [];
|
|
260
|
+
if (result.text) {
|
|
261
|
+
content.push({
|
|
262
|
+
type: 'text',
|
|
263
|
+
text: truncate(result.text)
|
|
264
|
+
});
|
|
265
|
+
}
|
|
266
|
+
if (result.toolCalls && Array.isArray(result.toolCalls)) {
|
|
267
|
+
for (const toolCall of result.toolCalls) {
|
|
268
|
+
content.push({
|
|
269
|
+
type: 'function',
|
|
270
|
+
id: toolCall.toolCallId,
|
|
271
|
+
function: {
|
|
272
|
+
name: toolCall.toolName,
|
|
273
|
+
arguments: typeof toolCall.args === 'string' ? toolCall.args : JSON.stringify(toolCall.args)
|
|
274
|
+
}
|
|
275
|
+
});
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
if (content.length > 0) {
|
|
279
|
+
return [{
|
|
280
|
+
role: 'assistant',
|
|
281
|
+
content: content.length === 1 && content[0].type === 'text' ? content[0].text : content
|
|
282
|
+
}];
|
|
283
|
+
}
|
|
284
|
+
// Fallback to original behavior for other result types TODO: check if we can remove this
|
|
257
285
|
const normalizedResult = typeof result === 'string' ? {
|
|
258
286
|
text: result
|
|
259
287
|
} : result;
|
|
@@ -264,8 +292,8 @@ const mapVercelOutput = result => {
|
|
|
264
292
|
...(normalizedResult.object ? {
|
|
265
293
|
object: normalizedResult.object
|
|
266
294
|
} : {}),
|
|
267
|
-
...(normalizedResult.
|
|
268
|
-
reasoning: normalizedResult.
|
|
295
|
+
...(normalizedResult.reasoningText ? {
|
|
296
|
+
reasoning: normalizedResult.reasoningText
|
|
269
297
|
} : {}),
|
|
270
298
|
...(normalizedResult.response ? {
|
|
271
299
|
response: normalizedResult.response
|
|
@@ -324,14 +352,14 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
324
352
|
...options,
|
|
325
353
|
...mapVercelParams(params)
|
|
326
354
|
};
|
|
355
|
+
const availableTools = extractAvailableToolCalls('vercel', params);
|
|
327
356
|
try {
|
|
328
357
|
const result = await doGenerate();
|
|
329
|
-
const latency = (Date.now() - startTime) / 1000;
|
|
330
358
|
const modelId = options.posthogModelOverride ?? (result.response?.modelId ? result.response.modelId : model.modelId);
|
|
331
359
|
const provider = options.posthogProviderOverride ?? extractProvider(model);
|
|
332
360
|
const baseURL = ''; // cannot currently get baseURL from vercel
|
|
333
361
|
const content = mapVercelOutput(result);
|
|
334
|
-
|
|
362
|
+
const latency = (Date.now() - startTime) / 1000;
|
|
335
363
|
const providerMetadata = result.providerMetadata;
|
|
336
364
|
const additionalTokenValues = {
|
|
337
365
|
...(providerMetadata?.openai?.reasoningTokens ? {
|
|
@@ -352,19 +380,17 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
352
380
|
model: modelId,
|
|
353
381
|
provider: provider,
|
|
354
382
|
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
355
|
-
output:
|
|
356
|
-
content,
|
|
357
|
-
role: 'assistant'
|
|
358
|
-
}],
|
|
383
|
+
output: content,
|
|
359
384
|
latency,
|
|
360
385
|
baseURL,
|
|
361
386
|
params: mergedParams,
|
|
362
387
|
httpStatus: 200,
|
|
363
388
|
usage: {
|
|
364
|
-
inputTokens: result.usage.
|
|
365
|
-
outputTokens: result.usage.
|
|
389
|
+
inputTokens: result.usage.inputTokens,
|
|
390
|
+
outputTokens: result.usage.outputTokens,
|
|
366
391
|
...additionalTokenValues
|
|
367
392
|
},
|
|
393
|
+
tools: availableTools,
|
|
368
394
|
captureImmediate: options.posthogCaptureImmediate
|
|
369
395
|
});
|
|
370
396
|
return result;
|
|
@@ -388,6 +414,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
388
414
|
},
|
|
389
415
|
isError: true,
|
|
390
416
|
error: truncate(JSON.stringify(error)),
|
|
417
|
+
tools: availableTools,
|
|
391
418
|
captureImmediate: options.posthogCaptureImmediate
|
|
392
419
|
});
|
|
393
420
|
throw error;
|
|
@@ -399,6 +426,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
399
426
|
}) => {
|
|
400
427
|
const startTime = Date.now();
|
|
401
428
|
let generatedText = '';
|
|
429
|
+
let reasoningText = '';
|
|
402
430
|
let usage = {};
|
|
403
431
|
const mergedParams = {
|
|
404
432
|
...options,
|
|
@@ -406,7 +434,9 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
406
434
|
};
|
|
407
435
|
const modelId = options.posthogModelOverride ?? model.modelId;
|
|
408
436
|
const provider = options.posthogProviderOverride ?? extractProvider(model);
|
|
437
|
+
const availableTools = extractAvailableToolCalls('vercel', params);
|
|
409
438
|
const baseURL = ''; // cannot currently get baseURL from vercel
|
|
439
|
+
|
|
410
440
|
try {
|
|
411
441
|
const {
|
|
412
442
|
stream,
|
|
@@ -414,13 +444,17 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
414
444
|
} = await doStream();
|
|
415
445
|
const transformStream = new TransformStream({
|
|
416
446
|
transform(chunk, controller) {
|
|
447
|
+
// Handle new v5 streaming patterns
|
|
417
448
|
if (chunk.type === 'text-delta') {
|
|
418
|
-
generatedText += chunk.
|
|
449
|
+
generatedText += chunk.delta;
|
|
450
|
+
}
|
|
451
|
+
if (chunk.type === 'reasoning-delta') {
|
|
452
|
+
reasoningText += chunk.delta; // New in v5
|
|
419
453
|
}
|
|
420
454
|
if (chunk.type === 'finish') {
|
|
421
455
|
usage = {
|
|
422
|
-
inputTokens: chunk.usage?.
|
|
423
|
-
outputTokens: chunk.usage?.
|
|
456
|
+
inputTokens: chunk.usage?.inputTokens,
|
|
457
|
+
outputTokens: chunk.usage?.outputTokens
|
|
424
458
|
};
|
|
425
459
|
if (chunk.providerMetadata?.openai?.reasoningTokens) {
|
|
426
460
|
usage.reasoningTokens = chunk.providerMetadata.openai.reasoningTokens;
|
|
@@ -439,6 +473,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
439
473
|
},
|
|
440
474
|
flush: async () => {
|
|
441
475
|
const latency = (Date.now() - startTime) / 1000;
|
|
476
|
+
const outputContent = reasoningText ? `${reasoningText}\n\n${generatedText}` : generatedText;
|
|
442
477
|
await sendEventToPosthog({
|
|
443
478
|
client: phClient,
|
|
444
479
|
distinctId: options.posthogDistinctId,
|
|
@@ -447,7 +482,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
447
482
|
provider: provider,
|
|
448
483
|
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
449
484
|
output: [{
|
|
450
|
-
content:
|
|
485
|
+
content: outputContent,
|
|
451
486
|
role: 'assistant'
|
|
452
487
|
}],
|
|
453
488
|
latency,
|
|
@@ -455,6 +490,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
455
490
|
params: mergedParams,
|
|
456
491
|
httpStatus: 200,
|
|
457
492
|
usage,
|
|
493
|
+
tools: availableTools,
|
|
458
494
|
captureImmediate: options.posthogCaptureImmediate
|
|
459
495
|
});
|
|
460
496
|
}
|
|
@@ -482,6 +518,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
482
518
|
},
|
|
483
519
|
isError: true,
|
|
484
520
|
error: truncate(JSON.stringify(error)),
|
|
521
|
+
tools: availableTools,
|
|
485
522
|
captureImmediate: options.posthogCaptureImmediate
|
|
486
523
|
});
|
|
487
524
|
throw error;
|
|
@@ -497,7 +534,7 @@ const wrapVercelLanguageModel = (model, phClient, options) => {
|
|
|
497
534
|
posthogTraceId: traceId,
|
|
498
535
|
posthogDistinctId: options.posthogDistinctId
|
|
499
536
|
});
|
|
500
|
-
const wrappedModel =
|
|
537
|
+
const wrappedModel = wrapLanguageModel({
|
|
501
538
|
model,
|
|
502
539
|
middleware
|
|
503
540
|
});
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.mjs","sources":["../../src/utils.ts","../../src/vercel/middleware.ts"],"sourcesContent":["import { PostHog } from 'posthog-node'\nimport { Buffer } from 'buffer'\nimport OpenAIOrignal from 'openai'\nimport AnthropicOriginal from '@anthropic-ai/sdk'\nimport type { ChatCompletionTool } from 'openai/resources/chat/completions'\nimport type { Tool as GeminiTool } from '@google/genai'\nimport type { FormattedMessage, FormattedContent, TokenUsage } from './types'\n\ntype ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams\ntype MessageCreateParams = AnthropicOriginal.Messages.MessageCreateParams\ntype ResponseCreateParams = OpenAIOrignal.Responses.ResponseCreateParams\ntype AnthropicTool = AnthropicOriginal.Tool\n\n// limit large outputs by truncating to 200kb (approx 200k bytes)\nexport const MAX_OUTPUT_SIZE = 200000\nconst STRING_FORMAT = 'utf8'\n\nexport interface MonitoringParams {\n posthogDistinctId?: string\n posthogTraceId?: string\n posthogProperties?: Record<string, any>\n posthogPrivacyMode?: boolean\n posthogGroups?: Record<string, any>\n posthogModelOverride?: string\n posthogProviderOverride?: string\n posthogCostOverride?: CostOverride\n posthogCaptureImmediate?: boolean\n}\n\nexport interface CostOverride {\n inputCost: number\n outputCost: number\n}\n\nexport const getModelParams = (\n params: ((ChatCompletionCreateParamsBase | MessageCreateParams | ResponseCreateParams) & MonitoringParams) | null\n): Record<string, any> => {\n if (!params) {\n return {}\n }\n const modelParams: Record<string, any> = {}\n const paramKeys = [\n 'temperature',\n 'max_tokens',\n 'max_completion_tokens',\n 'top_p',\n 'frequency_penalty',\n 'presence_penalty',\n 'n',\n 'stop',\n 'stream',\n 'streaming',\n ] as const\n\n for (const key of paramKeys) {\n if (key in params && (params as any)[key] !== undefined) {\n modelParams[key] = (params as any)[key]\n }\n }\n return modelParams\n}\n\n/**\n * Helper to format responses (non-streaming) for consumption, mirroring Python's openai vs. anthropic approach.\n */\nexport const formatResponse = (response: any, provider: string): FormattedMessage[] => {\n if (!response) {\n return []\n }\n if (provider === 'anthropic') {\n return formatResponseAnthropic(response)\n } else if (provider === 'openai') {\n return formatResponseOpenAI(response)\n } else if (provider === 'gemini') {\n return formatResponseGemini(response)\n }\n return []\n}\n\nexport const formatResponseAnthropic = (response: any): FormattedMessage[] => {\n const output: FormattedMessage[] = []\n const content: FormattedContent = []\n\n for (const choice of response.content ?? []) {\n if (choice?.type === 'text' && choice?.text) {\n content.push({ type: 'text', text: choice.text })\n } else if (choice?.type === 'tool_use' && choice?.name && choice?.id) {\n content.push({\n type: 'function',\n id: choice.id,\n function: {\n name: choice.name,\n arguments: choice.input || {},\n },\n })\n }\n }\n\n if (content.length > 0) {\n output.push({\n role: 'assistant',\n content,\n })\n }\n\n return output\n}\n\nexport const formatResponseOpenAI = (response: any): FormattedMessage[] => {\n const output: FormattedMessage[] = []\n\n if (response.choices) {\n for (const choice of response.choices) {\n const content: FormattedContent = []\n let role = 'assistant'\n\n if (choice.message) {\n if (choice.message.role) {\n role = choice.message.role\n }\n\n if (choice.message.content) {\n content.push({ type: 'text', text: choice.message.content })\n }\n\n if (choice.message.tool_calls) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'function',\n id: toolCall.id,\n function: {\n name: toolCall.function.name,\n arguments: toolCall.function.arguments,\n },\n })\n }\n }\n }\n\n if (content.length > 0) {\n output.push({\n role,\n content,\n })\n }\n }\n }\n\n // Handle Responses API format\n if (response.output) {\n const content: FormattedContent = []\n let role = 'assistant'\n\n for (const item of response.output) {\n if (item.type === 'message') {\n role = item.role\n\n if (item.content && Array.isArray(item.content)) {\n for (const contentItem of item.content) {\n if (contentItem.type === 'output_text' && contentItem.text) {\n content.push({ type: 'text', text: contentItem.text })\n } else if (contentItem.text) {\n content.push({ type: 'text', text: contentItem.text })\n } else if (contentItem.type === 'input_image' && contentItem.image_url) {\n content.push({\n type: 'image',\n image: contentItem.image_url,\n })\n }\n }\n } else if (item.content) {\n content.push({ type: 'text', text: String(item.content) })\n }\n } else if (item.type === 'function_call') {\n content.push({\n type: 'function',\n id: item.call_id || item.id || '',\n function: {\n name: item.name,\n arguments: item.arguments || {},\n },\n })\n }\n }\n\n if (content.length > 0) {\n output.push({\n role,\n content,\n })\n }\n }\n\n return output\n}\n\nexport const formatResponseGemini = (response: any): FormattedMessage[] => {\n const output: FormattedMessage[] = []\n\n if (response.candidates && Array.isArray(response.candidates)) {\n for (const candidate of response.candidates) {\n if (candidate.content && candidate.content.parts) {\n const content: FormattedContent = []\n\n for (const part of candidate.content.parts) {\n if (part.text) {\n content.push({ type: 'text', text: part.text })\n } else if (part.functionCall) {\n content.push({\n type: 'function',\n function: {\n name: part.functionCall.name,\n arguments: part.functionCall.args,\n },\n })\n }\n }\n\n if (content.length > 0) {\n output.push({\n role: 'assistant',\n content,\n })\n }\n } else if (candidate.text) {\n output.push({\n role: 'assistant',\n content: [{ type: 'text', text: candidate.text }],\n })\n }\n }\n } else if (response.text) {\n output.push({\n role: 'assistant',\n content: [{ type: 'text', text: response.text }],\n })\n }\n\n return output\n}\n\nexport const mergeSystemPrompt = (params: MessageCreateParams & MonitoringParams, provider: string): any => {\n if (provider == 'anthropic') {\n const messages = params.messages || []\n if (!(params as any).system) {\n return messages\n }\n const systemMessage = (params as any).system\n return [{ role: 'system', content: systemMessage }, ...messages]\n }\n return params.messages\n}\n\nexport const withPrivacyMode = (client: PostHog, privacyMode: boolean, input: any): any => {\n return (client as any).privacy_mode || privacyMode ? null : input\n}\n\nexport const truncate = (str: string): string => {\n try {\n const buffer = Buffer.from(str, STRING_FORMAT)\n if (buffer.length <= MAX_OUTPUT_SIZE) {\n return str\n }\n const truncatedBuffer = buffer.slice(0, MAX_OUTPUT_SIZE)\n return `${truncatedBuffer.toString(STRING_FORMAT)}... [truncated]`\n } catch (error) {\n console.error('Error truncating, likely not a string')\n return str\n }\n}\n\n/**\n * Extract available tool calls from the request parameters.\n * These are the tools provided to the LLM, not the tool calls in the response.\n */\nexport const extractAvailableToolCalls = (\n provider: string,\n params: any\n): ChatCompletionTool[] | AnthropicTool[] | GeminiTool[] | null => {\n if (provider === 'anthropic') {\n if (params.tools) {\n return params.tools\n }\n\n return null\n } else if (provider === 'gemini') {\n if (params.config && params.config.tools) {\n return params.config.tools\n }\n\n return null\n } else if (provider === 'openai') {\n if (params.tools) {\n return params.tools\n }\n\n return null\n } else if (provider === 'vercel') {\n // Vercel AI SDK stores tools in params.mode.tools when mode type is 'regular'\n if (params.mode?.type === 'regular' && params.mode.tools) {\n return params.mode.tools\n }\n\n return null\n }\n\n return null\n}\n\nexport type SendEventToPosthogParams = {\n client: PostHog\n distinctId?: string\n traceId: string\n model: string\n provider: string\n input: any\n output: any\n latency: number\n baseURL: string\n httpStatus: number\n usage?: TokenUsage\n params: (ChatCompletionCreateParamsBase | MessageCreateParams | ResponseCreateParams) & MonitoringParams\n isError?: boolean\n error?: string\n tools?: ChatCompletionTool[] | AnthropicTool[] | GeminiTool[] | null\n captureImmediate?: boolean\n}\n\nfunction sanitizeValues(obj: any): any {\n if (obj === undefined || obj === null) {\n return obj\n }\n const jsonSafe = JSON.parse(JSON.stringify(obj))\n if (typeof jsonSafe === 'string') {\n return Buffer.from(jsonSafe, STRING_FORMAT).toString(STRING_FORMAT)\n } else if (Array.isArray(jsonSafe)) {\n return jsonSafe.map(sanitizeValues)\n } else if (jsonSafe && typeof jsonSafe === 'object') {\n return Object.fromEntries(Object.entries(jsonSafe).map(([k, v]) => [k, sanitizeValues(v)]))\n }\n return jsonSafe\n}\n\nexport const sendEventToPosthog = async ({\n client,\n distinctId,\n traceId,\n model,\n provider,\n input,\n output,\n latency,\n baseURL,\n params,\n httpStatus = 200,\n usage = {},\n isError = false,\n error,\n tools,\n captureImmediate = false,\n}: SendEventToPosthogParams): Promise<void> => {\n if (!client.capture) {\n return Promise.resolve()\n }\n // sanitize input and output for UTF-8 validity\n const safeInput = sanitizeValues(input)\n const safeOutput = sanitizeValues(output)\n const safeError = sanitizeValues(error)\n\n let errorData = {}\n if (isError) {\n errorData = {\n $ai_is_error: true,\n $ai_error: safeError,\n }\n }\n let costOverrideData = {}\n if (params.posthogCostOverride) {\n const inputCostUSD = (params.posthogCostOverride.inputCost ?? 0) * (usage.inputTokens ?? 0)\n const outputCostUSD = (params.posthogCostOverride.outputCost ?? 0) * (usage.outputTokens ?? 0)\n costOverrideData = {\n $ai_input_cost_usd: inputCostUSD,\n $ai_output_cost_usd: outputCostUSD,\n $ai_total_cost_usd: inputCostUSD + outputCostUSD,\n }\n }\n\n const additionalTokenValues = {\n ...(usage.reasoningTokens ? { $ai_reasoning_tokens: usage.reasoningTokens } : {}),\n ...(usage.cacheReadInputTokens ? { $ai_cache_read_input_tokens: usage.cacheReadInputTokens } : {}),\n ...(usage.cacheCreationInputTokens ? { $ai_cache_creation_input_tokens: usage.cacheCreationInputTokens } : {}),\n }\n\n const properties = {\n $ai_provider: params.posthogProviderOverride ?? provider,\n $ai_model: params.posthogModelOverride ?? model,\n $ai_model_parameters: getModelParams(params),\n $ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeInput),\n $ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeOutput),\n $ai_http_status: httpStatus,\n $ai_input_tokens: usage.inputTokens ?? 0,\n $ai_output_tokens: usage.outputTokens ?? 0,\n ...additionalTokenValues,\n $ai_latency: latency,\n $ai_trace_id: traceId,\n $ai_base_url: baseURL,\n ...params.posthogProperties,\n ...(distinctId ? {} : { $process_person_profile: false }),\n ...(tools ? { $ai_tools: tools } : {}),\n ...errorData,\n ...costOverrideData,\n }\n\n const event = {\n distinctId: distinctId ?? traceId,\n event: '$ai_generation',\n properties,\n groups: params.posthogGroups,\n }\n\n if (captureImmediate) {\n // await capture promise to send single event in serverless environments\n await client.captureImmediate(event)\n } else {\n client.capture(event)\n }\n}\n","import { wrapLanguageModel } from 'ai'\nimport type {\n LanguageModelV2,\n LanguageModelV2Middleware,\n LanguageModelV2Prompt,\n LanguageModelV2StreamPart,\n} from '@ai-sdk/provider'\nimport { v4 as uuidv4 } from 'uuid'\nimport { PostHog } from 'posthog-node'\nimport { CostOverride, sendEventToPosthog, truncate, MAX_OUTPUT_SIZE, extractAvailableToolCalls } from '../utils'\nimport { Buffer } from 'buffer'\n\ninterface ClientOptions {\n posthogDistinctId?: string\n posthogTraceId?: string\n posthogProperties?: Record<string, any>\n posthogPrivacyMode?: boolean\n posthogGroups?: Record<string, any>\n posthogModelOverride?: string\n posthogProviderOverride?: string\n posthogCostOverride?: CostOverride\n posthogCaptureImmediate?: boolean\n}\n\ninterface CreateInstrumentationMiddlewareOptions {\n posthogDistinctId?: string\n posthogTraceId?: string\n posthogProperties?: Record<string, any>\n posthogPrivacyMode?: boolean\n posthogGroups?: Record<string, any>\n posthogModelOverride?: string\n posthogProviderOverride?: string\n posthogCostOverride?: CostOverride\n posthogCaptureImmediate?: boolean\n}\n\ninterface PostHogInput {\n role: string\n type?: string\n content?:\n | string\n | {\n [key: string]: any\n }\n}\n\nconst mapVercelParams = (params: any): Record<string, any> => {\n return {\n temperature: params.temperature,\n max_output_tokens: params.maxOutputTokens,\n top_p: params.topP,\n frequency_penalty: params.frequencyPenalty,\n presence_penalty: params.presencePenalty,\n stop: params.stopSequences,\n stream: params.stream,\n }\n}\n\nconst mapVercelPrompt = (messages: LanguageModelV2Prompt): PostHogInput[] => {\n // Map and truncate individual content\n const inputs: PostHogInput[] = messages.map((message) => {\n let content: any\n\n // Handle system role which has string content\n if (message.role === 'system') {\n content = [\n {\n type: 'text',\n text: truncate(String(message.content)),\n },\n ]\n } else {\n // Handle other roles which have array content\n if (Array.isArray(message.content)) {\n content = message.content.map((c: any) => {\n if (c.type === 'text') {\n return {\n type: 'text',\n text: truncate(c.text),\n }\n } else if (c.type === 'file') {\n return {\n type: 'file',\n file: c.data instanceof URL ? c.data.toString() : 'raw files not supported',\n mediaType: c.mediaType,\n }\n } else if (c.type === 'reasoning') {\n return {\n type: 'reasoning',\n text: truncate(c.reasoning),\n }\n } else if (c.type === 'tool-call') {\n return {\n type: 'tool-call',\n toolCallId: c.toolCallId,\n toolName: c.toolName,\n input: c.input,\n }\n } else if (c.type === 'tool-result') {\n return {\n type: 'tool-result',\n toolCallId: c.toolCallId,\n toolName: c.toolName,\n output: c.output,\n isError: c.isError,\n }\n }\n return {\n type: 'text',\n text: '',\n }\n })\n } else {\n // Fallback for non-array content\n content = [\n {\n type: 'text',\n text: truncate(String(message.content)),\n },\n ]\n }\n }\n\n return {\n role: message.role,\n content,\n }\n })\n\n try {\n // Trim the inputs array until its JSON size fits within MAX_OUTPUT_SIZE\n let serialized = JSON.stringify(inputs)\n let removedCount = 0\n // We need to keep track of the initial size of the inputs array because we're going to be mutating it\n const initialSize = inputs.length\n for (let i = 0; i < initialSize && Buffer.byteLength(serialized, 'utf8') > MAX_OUTPUT_SIZE; i++) {\n inputs.shift()\n removedCount++\n serialized = JSON.stringify(inputs)\n }\n if (removedCount > 0) {\n // Add one placeholder to indicate how many were removed\n inputs.unshift({\n role: 'posthog',\n content: `[${removedCount} message${removedCount === 1 ? '' : 's'} removed due to size limit]`,\n })\n }\n } catch (error) {\n console.error('Error stringifying inputs', error)\n return [{ role: 'posthog', content: 'An error occurred while processing your request. Please try again.' }]\n }\n return inputs\n}\n\nconst mapVercelOutput = (result: any): PostHogInput[] => {\n const content: any[] = []\n\n if (result.text) {\n content.push({ type: 'text', text: truncate(result.text) })\n }\n\n if (result.toolCalls && Array.isArray(result.toolCalls)) {\n for (const toolCall of result.toolCalls) {\n content.push({\n type: 'function',\n id: toolCall.toolCallId,\n function: {\n name: toolCall.toolName,\n arguments: typeof toolCall.args === 'string' ? toolCall.args : JSON.stringify(toolCall.args),\n },\n })\n }\n }\n\n if (content.length > 0) {\n return [\n {\n role: 'assistant',\n content: content.length === 1 && content[0].type === 'text' ? content[0].text : content,\n },\n ]\n }\n // Fallback to original behavior for other result types TODO: check if we can remove this\n const normalizedResult = typeof result === 'string' ? { text: result } : result\n const output = {\n ...(normalizedResult.text ? { text: normalizedResult.text } : {}),\n ...(normalizedResult.object ? { object: normalizedResult.object } : {}),\n ...(normalizedResult.reasoningText ? { reasoning: normalizedResult.reasoningText } : {}),\n ...(normalizedResult.response ? { response: normalizedResult.response } : {}),\n ...(normalizedResult.finishReason ? { finishReason: normalizedResult.finishReason } : {}),\n ...(normalizedResult.usage ? { usage: normalizedResult.usage } : {}),\n ...(normalizedResult.warnings ? { warnings: normalizedResult.warnings } : {}),\n ...(normalizedResult.providerMetadata ? { toolCalls: normalizedResult.providerMetadata } : {}),\n ...(normalizedResult.files\n ? {\n files: normalizedResult.files.map((file: any) => ({\n name: file.name,\n size: file.size,\n type: file.type,\n })),\n }\n : {}),\n }\n if (output.text && !output.object && !output.reasoning) {\n return [{ content: truncate(output.text as string), role: 'assistant' }]\n }\n // otherwise stringify and truncate\n try {\n const jsonOutput = JSON.stringify(output)\n return [{ content: truncate(jsonOutput), role: 'assistant' }]\n } catch (error) {\n console.error('Error stringifying output')\n return []\n }\n}\n\nconst extractProvider = (model: LanguageModelV2): string => {\n const provider = model.provider.toLowerCase()\n const providerName = provider.split('.')[0]\n return providerName\n}\n\nexport const createInstrumentationMiddleware = (\n phClient: PostHog,\n model: LanguageModelV2,\n options: CreateInstrumentationMiddlewareOptions\n): LanguageModelV2Middleware => {\n const middleware: LanguageModelV2Middleware = {\n wrapGenerate: async ({ doGenerate, params }) => {\n const startTime = Date.now()\n const mergedParams = {\n ...options,\n ...mapVercelParams(params),\n }\n const availableTools = extractAvailableToolCalls('vercel', params)\n\n try {\n const result = await doGenerate()\n const modelId =\n options.posthogModelOverride ?? (result.response?.modelId ? result.response.modelId : model.modelId)\n const provider = options.posthogProviderOverride ?? extractProvider(model)\n const baseURL = '' // cannot currently get baseURL from vercel\n const content = mapVercelOutput(result)\n const latency = (Date.now() - startTime) / 1000\n const providerMetadata = result.providerMetadata\n const additionalTokenValues = {\n ...(providerMetadata?.openai?.reasoningTokens\n ? { reasoningTokens: providerMetadata.openai.reasoningTokens }\n : {}),\n ...(providerMetadata?.openai?.cachedPromptTokens\n ? { cacheReadInputTokens: providerMetadata.openai.cachedPromptTokens }\n : {}),\n ...(providerMetadata?.anthropic\n ? {\n cacheReadInputTokens: providerMetadata.anthropic.cacheReadInputTokens,\n cacheCreationInputTokens: providerMetadata.anthropic.cacheCreationInputTokens,\n }\n : {}),\n }\n await sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId ?? uuidv4(),\n model: modelId,\n provider: provider,\n input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),\n output: content,\n latency,\n baseURL,\n params: mergedParams as any,\n httpStatus: 200,\n usage: {\n inputTokens: result.usage.inputTokens,\n outputTokens: result.usage.outputTokens,\n ...additionalTokenValues,\n },\n tools: availableTools,\n captureImmediate: options.posthogCaptureImmediate,\n })\n\n return result\n } catch (error: any) {\n const modelId = model.modelId\n await sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId ?? uuidv4(),\n model: modelId,\n provider: model.provider,\n input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),\n output: [],\n latency: 0,\n baseURL: '',\n params: mergedParams as any,\n httpStatus: error?.status ? error.status : 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n isError: true,\n error: truncate(JSON.stringify(error)),\n tools: availableTools,\n captureImmediate: options.posthogCaptureImmediate,\n })\n throw error\n }\n },\n\n wrapStream: async ({ doStream, params }) => {\n const startTime = Date.now()\n let generatedText = ''\n let reasoningText = ''\n let usage: {\n inputTokens?: number\n outputTokens?: number\n reasoningTokens?: any\n cacheReadInputTokens?: any\n cacheCreationInputTokens?: any\n } = {}\n const mergedParams = {\n ...options,\n ...mapVercelParams(params),\n }\n\n const modelId = options.posthogModelOverride ?? model.modelId\n const provider = options.posthogProviderOverride ?? extractProvider(model)\n const availableTools = extractAvailableToolCalls('vercel', params)\n const baseURL = '' // cannot currently get baseURL from vercel\n\n try {\n const { stream, ...rest } = await doStream()\n const transformStream = new TransformStream<LanguageModelV2StreamPart, LanguageModelV2StreamPart>({\n transform(chunk, controller) {\n // Handle new v5 streaming patterns\n if (chunk.type === 'text-delta') {\n generatedText += chunk.delta\n }\n if (chunk.type === 'reasoning-delta') {\n reasoningText += chunk.delta // New in v5\n }\n if (chunk.type === 'finish') {\n usage = {\n inputTokens: chunk.usage?.inputTokens,\n outputTokens: chunk.usage?.outputTokens,\n }\n if (chunk.providerMetadata?.openai?.reasoningTokens) {\n usage.reasoningTokens = chunk.providerMetadata.openai.reasoningTokens\n }\n if (chunk.providerMetadata?.openai?.cachedPromptTokens) {\n usage.cacheReadInputTokens = chunk.providerMetadata.openai.cachedPromptTokens\n }\n if (chunk.providerMetadata?.anthropic?.cacheReadInputTokens) {\n usage.cacheReadInputTokens = chunk.providerMetadata.anthropic.cacheReadInputTokens\n }\n if (chunk.providerMetadata?.anthropic?.cacheCreationInputTokens) {\n usage.cacheCreationInputTokens = chunk.providerMetadata.anthropic.cacheCreationInputTokens\n }\n }\n controller.enqueue(chunk)\n },\n\n flush: async () => {\n const latency = (Date.now() - startTime) / 1000\n const outputContent = reasoningText ? `${reasoningText}\\n\\n${generatedText}` : generatedText\n await sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId ?? uuidv4(),\n model: modelId,\n provider: provider,\n input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),\n output: [{ content: outputContent, role: 'assistant' }],\n latency,\n baseURL,\n params: mergedParams as any,\n httpStatus: 200,\n usage,\n tools: availableTools,\n captureImmediate: options.posthogCaptureImmediate,\n })\n },\n })\n\n return {\n stream: stream.pipeThrough(transformStream),\n ...rest,\n }\n } catch (error: any) {\n await sendEventToPosthog({\n client: phClient,\n distinctId: options.posthogDistinctId,\n traceId: options.posthogTraceId ?? uuidv4(),\n model: modelId,\n provider: provider,\n input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),\n output: [],\n latency: 0,\n baseURL: '',\n params: mergedParams as any,\n httpStatus: error?.status ? error.status : 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n isError: true,\n error: truncate(JSON.stringify(error)),\n tools: availableTools,\n captureImmediate: options.posthogCaptureImmediate,\n })\n throw error\n }\n },\n }\n\n return middleware\n}\n\nexport const wrapVercelLanguageModel = (\n model: LanguageModelV2,\n phClient: PostHog,\n options: ClientOptions\n): LanguageModelV2 => {\n const traceId = options.posthogTraceId ?? uuidv4()\n const middleware = createInstrumentationMiddleware(phClient, model, {\n ...options,\n posthogTraceId: traceId,\n posthogDistinctId: options.posthogDistinctId,\n })\n\n const wrappedModel = wrapLanguageModel({\n model,\n middleware,\n })\n\n return wrappedModel\n}\n"],"names":["MAX_OUTPUT_SIZE","STRING_FORMAT","getModelParams","params","modelParams","paramKeys","key","undefined","withPrivacyMode","client","privacyMode","input","privacy_mode","truncate","str","buffer","Buffer","from","length","truncatedBuffer","slice","toString","error","console","extractAvailableToolCalls","provider","mode","type","tools","sanitizeValues","obj","jsonSafe","JSON","parse","stringify","Array","isArray","map","Object","fromEntries","entries","k","v","sendEventToPosthog","distinctId","traceId","model","output","latency","baseURL","httpStatus","usage","isError","captureImmediate","capture","Promise","resolve","safeInput","safeOutput","safeError","errorData","$ai_is_error","$ai_error","costOverrideData","posthogCostOverride","inputCostUSD","inputCost","inputTokens","outputCostUSD","outputCost","outputTokens","$ai_input_cost_usd","$ai_output_cost_usd","$ai_total_cost_usd","additionalTokenValues","reasoningTokens","$ai_reasoning_tokens","cacheReadInputTokens","$ai_cache_read_input_tokens","cacheCreationInputTokens","$ai_cache_creation_input_tokens","properties","$ai_provider","posthogProviderOverride","$ai_model","posthogModelOverride","$ai_model_parameters","$ai_input","posthogPrivacyMode","$ai_output_choices","$ai_http_status","$ai_input_tokens","$ai_output_tokens","$ai_latency","$ai_trace_id","$ai_base_url","posthogProperties","$process_person_profile","$ai_tools","event","groups","posthogGroups","mapVercelParams","temperature","max_output_tokens","maxOutputTokens","top_p","topP","frequency_penalty","frequencyPenalty","presence_penalty","presencePenalty","stop","stopSequences","stream","mapVercelPrompt","messages","inputs","message","content","role","text","String","c","file","data","URL","mediaType","reasoning","toolCallId","toolName","serialized","removedCount","initialSize","i","byteLength","shift","unshift","mapVercelOutput","result","push","toolCalls","toolCall","id","function","name","arguments","args","normalizedResult","object","reasoningText","response","finishReason","warnings","providerMetadata","files","size","jsonOutput","extractProvider","toLowerCase","providerName","split","createInstrumentationMiddleware","phClient","options","middleware","wrapGenerate","doGenerate","startTime","Date","now","mergedParams","availableTools","modelId","openai","cachedPromptTokens","anthropic","posthogDistinctId","posthogTraceId","uuidv4","prompt","posthogCaptureImmediate","status","wrapStream","doStream","generatedText","rest","transformStream","TransformStream","transform","chunk","controller","delta","enqueue","flush","outputContent","pipeThrough","wrapVercelLanguageModel","wrappedModel","wrapLanguageModel"],"mappings":";;;;AAaA;AACO,MAAMA,eAAe,GAAG,MAAM;AACrC,MAAMC,aAAa,GAAG,MAAM;AAmBrB,MAAMC,cAAc,GACzBC,MAAiH,IACzF;EACxB,IAAI,CAACA,MAAM,EAAE;AACX,IAAA,OAAO,EAAE;AACX,EAAA;EACA,MAAMC,WAAgC,GAAG,EAAE;EAC3C,MAAMC,SAAS,GAAG,CAChB,aAAa,EACb,YAAY,EACZ,uBAAuB,EACvB,OAAO,EACP,mBAAmB,EACnB,kBAAkB,EAClB,GAAG,EACH,MAAM,EACN,QAAQ,EACR,WAAW,CACH;AAEV,EAAA,KAAK,MAAMC,GAAG,IAAID,SAAS,EAAE;IAC3B,IAAIC,GAAG,IAAIH,MAAM,IAAKA,MAAM,CAASG,GAAG,CAAC,KAAKC,SAAS,EAAE;AACvDH,MAAAA,WAAW,CAACE,GAAG,CAAC,GAAIH,MAAM,CAASG,GAAG,CAAC;AACzC,IAAA;AACF,EAAA;AACA,EAAA,OAAOF,WAAW;AACpB,CAAC;AAiMM,MAAMI,eAAe,GAAGA,CAACC,MAAe,EAAEC,WAAoB,EAAEC,KAAU,KAAU;EACzF,OAAQF,MAAM,CAASG,YAAY,IAAIF,WAAW,GAAG,IAAI,GAAGC,KAAK;AACnE,CAAC;AAEM,MAAME,QAAQ,GAAIC,GAAW,IAAa;EAC/C,IAAI;IACF,MAAMC,MAAM,GAAGC,MAAM,CAACC,IAAI,CAACH,GAAG,EAAEb,aAAa,CAAC;AAC9C,IAAA,IAAIc,MAAM,CAACG,MAAM,IAAIlB,eAAe,EAAE;AACpC,MAAA,OAAOc,GAAG;AACZ,IAAA;IACA,MAAMK,eAAe,GAAGJ,MAAM,CAACK,KAAK,CAAC,CAAC,EAAEpB,eAAe,CAAC;AACxD,IAAA,OAAO,GAAGmB,eAAe,CAACE,QAAQ,CAACpB,aAAa,CAAC,CAAA,eAAA,CAAiB;EACpE,CAAC,CAAC,OAAOqB,KAAK,EAAE;AACdC,IAAAA,OAAO,CAACD,KAAK,CAAC,uCAAuC,CAAC;AACtD,IAAA,OAAOR,GAAG;AACZ,EAAA;AACF,CAAC;;AAED;AACA;AACA;AACA;AACO,MAAMU,yBAAyB,GAAGA,CACvCC,QAAgB,EAChBtB,MAAW,KACsD;EAmB/B;AAChC;AACA,IAAA,IAAIA,MAAM,CAACuB,IAAI,EAAEC,IAAI,KAAK,SAAS,IAAIxB,MAAM,CAACuB,IAAI,CAACE,KAAK,EAAE;AACxD,MAAA,OAAOzB,MAAM,CAACuB,IAAI,CAACE,KAAK;AAC1B,IAAA;AAEA,IAAA,OAAO,IAAI;AACb,EAAA;AAGF,CAAC;AAqBD,SAASC,cAAcA,CAACC,GAAQ,EAAO;AACrC,EAAA,IAAIA,GAAG,KAAKvB,SAAS,IAAIuB,GAAG,KAAK,IAAI,EAAE;AACrC,IAAA,OAAOA,GAAG;AACZ,EAAA;AACA,EAAA,MAAMC,QAAQ,GAAGC,IAAI,CAACC,KAAK,CAACD,IAAI,CAACE,SAAS,CAACJ,GAAG,CAAC,CAAC;AAChD,EAAA,IAAI,OAAOC,QAAQ,KAAK,QAAQ,EAAE;AAChC,IAAA,OAAOf,MAAM,CAACC,IAAI,CAACc,QAAQ,EAAE9B,aAAa,CAAC,CAACoB,QAAQ,CAACpB,aAAa,CAAC;EACrE,CAAC,MAAM,IAAIkC,KAAK,CAACC,OAAO,CAACL,QAAQ,CAAC,EAAE;AAClC,IAAA,OAAOA,QAAQ,CAACM,GAAG,CAACR,cAAc,CAAC;EACrC,CAAC,MAAM,IAAIE,QAAQ,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;AACnD,IAAA,OAAOO,MAAM,CAACC,WAAW,CAACD,MAAM,CAACE,OAAO,CAACT,QAAQ,CAAC,CAACM,GAAG,CAAC,CAAC,CAACI,CAAC,EAAEC,CAAC,CAAC,KAAK,CAACD,CAAC,EAAEZ,cAAc,CAACa,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7F,EAAA;AACA,EAAA,OAAOX,QAAQ;AACjB;AAEO,MAAMY,kBAAkB,GAAG,OAAO;EACvClC,MAAM;EACNmC,UAAU;EACVC,OAAO;EACPC,KAAK;EACLrB,QAAQ;EACRd,KAAK;EACLoC,MAAM;EACNC,OAAO;EACPC,OAAO;EACP9C,MAAM;AACN+C,EAAAA,UAAU,GAAG,GAAG;EAChBC,KAAK,GAAG,EAAE;AACVC,EAAAA,OAAO,GAAG,KAAK;EACf9B,KAAK;EACLM,KAAK;AACLyB,EAAAA,gBAAgB,GAAG;AACK,CAAC,KAAoB;AAC7C,EAAA,IAAI,CAAC5C,MAAM,CAAC6C,OAAO,EAAE;AACnB,IAAA,OAAOC,OAAO,CAACC,OAAO,EAAE;AAC1B,EAAA;AACA;AACA,EAAA,MAAMC,SAAS,GAAG5B,cAAc,CAAClB,KAAK,CAAC;AACvC,EAAA,MAAM+C,UAAU,GAAG7B,cAAc,CAACkB,MAAM,CAAC;AACzC,EAAA,MAAMY,SAAS,GAAG9B,cAAc,CAACP,KAAK,CAAC;EAEvC,IAAIsC,SAAS,GAAG,EAAE;AAClB,EAAA,IAAIR,OAAO,EAAE;AACXQ,IAAAA,SAAS,GAAG;AACVC,MAAAA,YAAY,EAAE,IAAI;AAClBC,MAAAA,SAAS,EAAEH;KACZ;AACH,EAAA;EACA,IAAII,gBAAgB,GAAG,EAAE;EACzB,IAAI5D,MAAM,CAAC6D,mBAAmB,EAAE;AAC9B,IAAA,MAAMC,YAAY,GAAG,CAAC9D,MAAM,CAAC6D,mBAAmB,CAACE,SAAS,IAAI,CAAC,KAAKf,KAAK,CAACgB,WAAW,IAAI,CAAC,CAAC;AAC3F,IAAA,MAAMC,aAAa,GAAG,CAACjE,MAAM,CAAC6D,mBAAmB,CAACK,UAAU,IAAI,CAAC,KAAKlB,KAAK,CAACmB,YAAY,IAAI,CAAC,CAAC;AAC9FP,IAAAA,gBAAgB,GAAG;AACjBQ,MAAAA,kBAAkB,EAAEN,YAAY;AAChCO,MAAAA,mBAAmB,EAAEJ,aAAa;MAClCK,kBAAkB,EAAER,YAAY,GAAGG;KACpC;AACH,EAAA;AAEA,EAAA,MAAMM,qBAAqB,GAAG;IAC5B,IAAIvB,KAAK,CAACwB,eAAe,GAAG;MAAEC,oBAAoB,EAAEzB,KAAK,CAACwB;KAAiB,GAAG,EAAE,CAAC;IACjF,IAAIxB,KAAK,CAAC0B,oBAAoB,GAAG;MAAEC,2BAA2B,EAAE3B,KAAK,CAAC0B;KAAsB,GAAG,EAAE,CAAC;IAClG,IAAI1B,KAAK,CAAC4B,wBAAwB,GAAG;MAAEC,+BAA+B,EAAE7B,KAAK,CAAC4B;KAA0B,GAAG,EAAE;GAC9G;AAED,EAAA,MAAME,UAAU,GAAG;AACjBC,IAAAA,YAAY,EAAE/E,MAAM,CAACgF,uBAAuB,IAAI1D,QAAQ;AACxD2D,IAAAA,SAAS,EAAEjF,MAAM,CAACkF,oBAAoB,IAAIvC,KAAK;AAC/CwC,IAAAA,oBAAoB,EAAEpF,cAAc,CAACC,MAAM,CAAC;AAC5CoF,IAAAA,SAAS,EAAE/E,eAAe,CAACC,MAAM,EAAEN,MAAM,CAACqF,kBAAkB,IAAI,KAAK,EAAE/B,SAAS,CAAC;AACjFgC,IAAAA,kBAAkB,EAAEjF,eAAe,CAACC,MAAM,EAAEN,MAAM,CAACqF,kBAAkB,IAAI,KAAK,EAAE9B,UAAU,CAAC;AAC3FgC,IAAAA,eAAe,EAAExC,UAAU;AAC3ByC,IAAAA,gBAAgB,EAAExC,KAAK,CAACgB,WAAW,IAAI,CAAC;AACxCyB,IAAAA,iBAAiB,EAAEzC,KAAK,CAACmB,YAAY,IAAI,CAAC;AAC1C,IAAA,GAAGI,qBAAqB;AACxBmB,IAAAA,WAAW,EAAE7C,OAAO;AACpB8C,IAAAA,YAAY,EAAEjD,OAAO;AACrBkD,IAAAA,YAAY,EAAE9C,OAAO;IACrB,GAAG9C,MAAM,CAAC6F,iBAAiB;AAC3B,IAAA,IAAIpD,UAAU,GAAG,EAAE,GAAG;AAAEqD,MAAAA,uBAAuB,EAAE;AAAM,KAAC,CAAC;AACzD,IAAA,IAAIrE,KAAK,GAAG;AAAEsE,MAAAA,SAAS,EAAEtE;KAAO,GAAG,EAAE,CAAC;AACtC,IAAA,GAAGgC,SAAS;IACZ,GAAGG;GACJ;AAED,EAAA,MAAMoC,KAAK,GAAG;IACZvD,UAAU,EAAEA,UAAU,IAAIC,OAAO;AACjCsD,IAAAA,KAAK,EAAE,gBAAgB;IACvBlB,UAAU;IACVmB,MAAM,EAAEjG,MAAM,CAACkG;GAChB;AAED,EAAA,IAAIhD,gBAAgB,EAAE;AACpB;AACA,IAAA,MAAM5C,MAAM,CAAC4C,gBAAgB,CAAC8C,KAAK,CAAC;AACtC,EAAA,CAAC,MAAM;AACL1F,IAAAA,MAAM,CAAC6C,OAAO,CAAC6C,KAAK,CAAC;AACvB,EAAA;AACF,CAAC;;AC5XD,MAAMG,eAAe,GAAInG,MAAW,IAA0B;EAC5D,OAAO;IACLoG,WAAW,EAAEpG,MAAM,CAACoG,WAAW;IAC/BC,iBAAiB,EAAErG,MAAM,CAACsG,eAAe;IACzCC,KAAK,EAAEvG,MAAM,CAACwG,IAAI;IAClBC,iBAAiB,EAAEzG,MAAM,CAAC0G,gBAAgB;IAC1CC,gBAAgB,EAAE3G,MAAM,CAAC4G,eAAe;IACxCC,IAAI,EAAE7G,MAAM,CAAC8G,aAAa;IAC1BC,MAAM,EAAE/G,MAAM,CAAC+G;GAChB;AACH,CAAC;AAED,MAAMC,eAAe,GAAIC,QAA+B,IAAqB;AAC3E;AACA,EAAA,MAAMC,MAAsB,GAAGD,QAAQ,CAAC/E,GAAG,CAAEiF,OAAO,IAAK;AACvD,IAAA,IAAIC,OAAY;;AAEhB;AACA,IAAA,IAAID,OAAO,CAACE,IAAI,KAAK,QAAQ,EAAE;AAC7BD,MAAAA,OAAO,GAAG,CACR;AACE5F,QAAAA,IAAI,EAAE,MAAM;QACZ8F,IAAI,EAAE5G,QAAQ,CAAC6G,MAAM,CAACJ,OAAO,CAACC,OAAO,CAAC;AACxC,OAAC,CACF;AACH,IAAA,CAAC,MAAM;AACL;MACA,IAAIpF,KAAK,CAACC,OAAO,CAACkF,OAAO,CAACC,OAAO,CAAC,EAAE;QAClCA,OAAO,GAAGD,OAAO,CAACC,OAAO,CAAClF,GAAG,CAAEsF,CAAM,IAAK;AACxC,UAAA,IAAIA,CAAC,CAAChG,IAAI,KAAK,MAAM,EAAE;YACrB,OAAO;AACLA,cAAAA,IAAI,EAAE,MAAM;AACZ8F,cAAAA,IAAI,EAAE5G,QAAQ,CAAC8G,CAAC,CAACF,IAAI;aACtB;AACH,UAAA,CAAC,MAAM,IAAIE,CAAC,CAAChG,IAAI,KAAK,MAAM,EAAE;YAC5B,OAAO;AACLA,cAAAA,IAAI,EAAE,MAAM;AACZiG,cAAAA,IAAI,EAAED,CAAC,CAACE,IAAI,YAAYC,GAAG,GAAGH,CAAC,CAACE,IAAI,CAACxG,QAAQ,EAAE,GAAG,yBAAyB;cAC3E0G,SAAS,EAAEJ,CAAC,CAACI;aACd;AACH,UAAA,CAAC,MAAM,IAAIJ,CAAC,CAAChG,IAAI,KAAK,WAAW,EAAE;YACjC,OAAO;AACLA,cAAAA,IAAI,EAAE,WAAW;AACjB8F,cAAAA,IAAI,EAAE5G,QAAQ,CAAC8G,CAAC,CAACK,SAAS;aAC3B;AACH,UAAA,CAAC,MAAM,IAAIL,CAAC,CAAChG,IAAI,KAAK,WAAW,EAAE;YACjC,OAAO;AACLA,cAAAA,IAAI,EAAE,WAAW;cACjBsG,UAAU,EAAEN,CAAC,CAACM,UAAU;cACxBC,QAAQ,EAAEP,CAAC,CAACO,QAAQ;cACpBvH,KAAK,EAAEgH,CAAC,CAAChH;aACV;AACH,UAAA,CAAC,MAAM,IAAIgH,CAAC,CAAChG,IAAI,KAAK,aAAa,EAAE;YACnC,OAAO;AACLA,cAAAA,IAAI,EAAE,aAAa;cACnBsG,UAAU,EAAEN,CAAC,CAACM,UAAU;cACxBC,QAAQ,EAAEP,CAAC,CAACO,QAAQ;cACpBnF,MAAM,EAAE4E,CAAC,CAAC5E,MAAM;cAChBK,OAAO,EAAEuE,CAAC,CAACvE;aACZ;AACH,UAAA;UACA,OAAO;AACLzB,YAAAA,IAAI,EAAE,MAAM;AACZ8F,YAAAA,IAAI,EAAE;WACP;AACH,QAAA,CAAC,CAAC;AACJ,MAAA,CAAC,MAAM;AACL;AACAF,QAAAA,OAAO,GAAG,CACR;AACE5F,UAAAA,IAAI,EAAE,MAAM;UACZ8F,IAAI,EAAE5G,QAAQ,CAAC6G,MAAM,CAACJ,OAAO,CAACC,OAAO,CAAC;AACxC,SAAC,CACF;AACH,MAAA;AACF,IAAA;IAEA,OAAO;MACLC,IAAI,EAAEF,OAAO,CAACE,IAAI;AAClBD,MAAAA;KACD;AACH,EAAA,CAAC,CAAC;EAEF,IAAI;AACF;AACA,IAAA,IAAIY,UAAU,GAAGnG,IAAI,CAACE,SAAS,CAACmF,MAAM,CAAC;IACvC,IAAIe,YAAY,GAAG,CAAC;AACpB;AACA,IAAA,MAAMC,WAAW,GAAGhB,MAAM,CAACnG,MAAM;IACjC,KAAK,IAAIoH,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGD,WAAW,IAAIrH,MAAM,CAACuH,UAAU,CAACJ,UAAU,EAAE,MAAM,CAAC,GAAGnI,eAAe,EAAEsI,CAAC,EAAE,EAAE;MAC/FjB,MAAM,CAACmB,KAAK,EAAE;AACdJ,MAAAA,YAAY,EAAE;AACdD,MAAAA,UAAU,GAAGnG,IAAI,CAACE,SAAS,CAACmF,MAAM,CAAC;AACrC,IAAA;IACA,IAAIe,YAAY,GAAG,CAAC,EAAE;AACpB;MACAf,MAAM,CAACoB,OAAO,CAAC;AACbjB,QAAAA,IAAI,EAAE,SAAS;QACfD,OAAO,EAAE,CAAA,CAAA,EAAIa,YAAY,CAAA,QAAA,EAAWA,YAAY,KAAK,CAAC,GAAG,EAAE,GAAG,GAAG,CAAA,2BAAA;AACnE,OAAC,CAAC;AACJ,IAAA;EACF,CAAC,CAAC,OAAO9G,KAAK,EAAE;AACdC,IAAAA,OAAO,CAACD,KAAK,CAAC,2BAA2B,EAAEA,KAAK,CAAC;AACjD,IAAA,OAAO,CAAC;AAAEkG,MAAAA,IAAI,EAAE,SAAS;AAAED,MAAAA,OAAO,EAAE;AAAqE,KAAC,CAAC;AAC7G,EAAA;AACA,EAAA,OAAOF,MAAM;AACf,CAAC;AAED,MAAMqB,eAAe,GAAIC,MAAW,IAAqB;EACvD,MAAMpB,OAAc,GAAG,EAAE;EAEzB,IAAIoB,MAAM,CAAClB,IAAI,EAAE;IACfF,OAAO,CAACqB,IAAI,CAAC;AAAEjH,MAAAA,IAAI,EAAE,MAAM;AAAE8F,MAAAA,IAAI,EAAE5G,QAAQ,CAAC8H,MAAM,CAAClB,IAAI;AAAE,KAAC,CAAC;AAC7D,EAAA;AAEA,EAAA,IAAIkB,MAAM,CAACE,SAAS,IAAI1G,KAAK,CAACC,OAAO,CAACuG,MAAM,CAACE,SAAS,CAAC,EAAE;AACvD,IAAA,KAAK,MAAMC,QAAQ,IAAIH,MAAM,CAACE,SAAS,EAAE;MACvCtB,OAAO,CAACqB,IAAI,CAAC;AACXjH,QAAAA,IAAI,EAAE,UAAU;QAChBoH,EAAE,EAAED,QAAQ,CAACb,UAAU;AACvBe,QAAAA,QAAQ,EAAE;UACRC,IAAI,EAAEH,QAAQ,CAACZ,QAAQ;AACvBgB,UAAAA,SAAS,EAAE,OAAOJ,QAAQ,CAACK,IAAI,KAAK,QAAQ,GAAGL,QAAQ,CAACK,IAAI,GAAGnH,IAAI,CAACE,SAAS,CAAC4G,QAAQ,CAACK,IAAI;AAC7F;AACF,OAAC,CAAC;AACJ,IAAA;AACF,EAAA;AAEA,EAAA,IAAI5B,OAAO,CAACrG,MAAM,GAAG,CAAC,EAAE;AACtB,IAAA,OAAO,CACL;AACEsG,MAAAA,IAAI,EAAE,WAAW;MACjBD,OAAO,EAAEA,OAAO,CAACrG,MAAM,KAAK,CAAC,IAAIqG,OAAO,CAAC,CAAC,CAAC,CAAC5F,IAAI,KAAK,MAAM,GAAG4F,OAAO,CAAC,CAAC,CAAC,CAACE,IAAI,GAAGF;AAClF,KAAC,CACF;AACH,EAAA;AACA;AACA,EAAA,MAAM6B,gBAAgB,GAAG,OAAOT,MAAM,KAAK,QAAQ,GAAG;AAAElB,IAAAA,IAAI,EAAEkB;AAAO,GAAC,GAAGA,MAAM;AAC/E,EAAA,MAAM5F,MAAM,GAAG;IACb,IAAIqG,gBAAgB,CAAC3B,IAAI,GAAG;MAAEA,IAAI,EAAE2B,gBAAgB,CAAC3B;KAAM,GAAG,EAAE,CAAC;IACjE,IAAI2B,gBAAgB,CAACC,MAAM,GAAG;MAAEA,MAAM,EAAED,gBAAgB,CAACC;KAAQ,GAAG,EAAE,CAAC;IACvE,IAAID,gBAAgB,CAACE,aAAa,GAAG;MAAEtB,SAAS,EAAEoB,gBAAgB,CAACE;KAAe,GAAG,EAAE,CAAC;IACxF,IAAIF,gBAAgB,CAACG,QAAQ,GAAG;MAAEA,QAAQ,EAAEH,gBAAgB,CAACG;KAAU,GAAG,EAAE,CAAC;IAC7E,IAAIH,gBAAgB,CAACI,YAAY,GAAG;MAAEA,YAAY,EAAEJ,gBAAgB,CAACI;KAAc,GAAG,EAAE,CAAC;IACzF,IAAIJ,gBAAgB,CAACjG,KAAK,GAAG;MAAEA,KAAK,EAAEiG,gBAAgB,CAACjG;KAAO,GAAG,EAAE,CAAC;IACpE,IAAIiG,gBAAgB,CAACK,QAAQ,GAAG;MAAEA,QAAQ,EAAEL,gBAAgB,CAACK;KAAU,GAAG,EAAE,CAAC;IAC7E,IAAIL,gBAAgB,CAACM,gBAAgB,GAAG;MAAEb,SAAS,EAAEO,gBAAgB,CAACM;KAAkB,GAAG,EAAE,CAAC;IAC9F,IAAIN,gBAAgB,CAACO,KAAK,GACtB;MACEA,KAAK,EAAEP,gBAAgB,CAACO,KAAK,CAACtH,GAAG,CAAEuF,IAAS,KAAM;QAChDqB,IAAI,EAAErB,IAAI,CAACqB,IAAI;QACfW,IAAI,EAAEhC,IAAI,CAACgC,IAAI;QACfjI,IAAI,EAAEiG,IAAI,CAACjG;AACb,OAAC,CAAC;KACH,GACD,EAAE;GACP;AACD,EAAA,IAAIoB,MAAM,CAAC0E,IAAI,IAAI,CAAC1E,MAAM,CAACsG,MAAM,IAAI,CAACtG,MAAM,CAACiF,SAAS,EAAE;AACtD,IAAA,OAAO,CAAC;AAAET,MAAAA,OAAO,EAAE1G,QAAQ,CAACkC,MAAM,CAAC0E,IAAc,CAAC;AAAED,MAAAA,IAAI,EAAE;AAAY,KAAC,CAAC;AAC1E,EAAA;AACA;EACA,IAAI;AACF,IAAA,MAAMqC,UAAU,GAAG7H,IAAI,CAACE,SAAS,CAACa,MAAM,CAAC;AACzC,IAAA,OAAO,CAAC;AAAEwE,MAAAA,OAAO,EAAE1G,QAAQ,CAACgJ,UAAU,CAAC;AAAErC,MAAAA,IAAI,EAAE;AAAY,KAAC,CAAC;EAC/D,CAAC,CAAC,OAAOlG,KAAK,EAAE;AACdC,IAAAA,OAAO,CAACD,KAAK,CAAC,2BAA2B,CAAC;AAC1C,IAAA,OAAO,EAAE;AACX,EAAA;AACF,CAAC;AAED,MAAMwI,eAAe,GAAIhH,KAAsB,IAAa;EAC1D,MAAMrB,QAAQ,GAAGqB,KAAK,CAACrB,QAAQ,CAACsI,WAAW,EAAE;EAC7C,MAAMC,YAAY,GAAGvI,QAAQ,CAACwI,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AAC3C,EAAA,OAAOD,YAAY;AACrB,CAAC;AAEM,MAAME,+BAA+B,GAAGA,CAC7CC,QAAiB,EACjBrH,KAAsB,EACtBsH,OAA+C,KACjB;AAC9B,EAAA,MAAMC,UAAqC,GAAG;IAC5CC,YAAY,EAAE,OAAO;MAAEC,UAAU;AAAEpK,MAAAA;AAAO,KAAC,KAAK;AAC9C,MAAA,MAAMqK,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE;AAC5B,MAAA,MAAMC,YAAY,GAAG;AACnB,QAAA,GAAGP,OAAO;QACV,GAAG9D,eAAe,CAACnG,MAAM;OAC1B;AACD,MAAA,MAAMyK,cAAc,GAAGpJ,yBAAyB,CAAC,QAAQ,EAAErB,MAAM,CAAC;MAElE,IAAI;AACF,QAAA,MAAMwI,MAAM,GAAG,MAAM4B,UAAU,EAAE;QACjC,MAAMM,OAAO,GACXT,OAAO,CAAC/E,oBAAoB,KAAKsD,MAAM,CAACY,QAAQ,EAAEsB,OAAO,GAAGlC,MAAM,CAACY,QAAQ,CAACsB,OAAO,GAAG/H,KAAK,CAAC+H,OAAO,CAAC;QACtG,MAAMpJ,QAAQ,GAAG2I,OAAO,CAACjF,uBAAuB,IAAI2E,eAAe,CAAChH,KAAK,CAAC;QAC1E,MAAMG,OAAO,GAAG,EAAE,CAAA;AAClB,QAAA,MAAMsE,OAAO,GAAGmB,eAAe,CAACC,MAAM,CAAC;QACvC,MAAM3F,OAAO,GAAG,CAACyH,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI;AAC/C,QAAA,MAAMd,gBAAgB,GAAGf,MAAM,CAACe,gBAAgB;AAChD,QAAA,MAAMhF,qBAAqB,GAAG;AAC5B,UAAA,IAAIgF,gBAAgB,EAAEoB,MAAM,EAAEnG,eAAe,GACzC;AAAEA,YAAAA,eAAe,EAAE+E,gBAAgB,CAACoB,MAAM,CAACnG;WAAiB,GAC5D,EAAE,CAAC;AACP,UAAA,IAAI+E,gBAAgB,EAAEoB,MAAM,EAAEC,kBAAkB,GAC5C;AAAElG,YAAAA,oBAAoB,EAAE6E,gBAAgB,CAACoB,MAAM,CAACC;WAAoB,GACpE,EAAE,CAAC;UACP,IAAIrB,gBAAgB,EAAEsB,SAAS,GAC3B;AACEnG,YAAAA,oBAAoB,EAAE6E,gBAAgB,CAACsB,SAAS,CAACnG,oBAAoB;AACrEE,YAAAA,wBAAwB,EAAE2E,gBAAgB,CAACsB,SAAS,CAACjG;WACtD,GACD,EAAE;SACP;AACD,QAAA,MAAMpC,kBAAkB,CAAC;AACvBlC,UAAAA,MAAM,EAAE0J,QAAQ;UAChBvH,UAAU,EAAEwH,OAAO,CAACa,iBAAiB;AACrCpI,UAAAA,OAAO,EAAEuH,OAAO,CAACc,cAAc,IAAIC,EAAM,EAAE;AAC3CrI,UAAAA,KAAK,EAAE+H,OAAO;AACdpJ,UAAAA,QAAQ,EAAEA,QAAQ;AAClBd,UAAAA,KAAK,EAAEyJ,OAAO,CAAC5E,kBAAkB,GAAG,EAAE,GAAG2B,eAAe,CAAChH,MAAM,CAACiL,MAAM,CAAC;AACvErI,UAAAA,MAAM,EAAEwE,OAAO;UACfvE,OAAO;UACPC,OAAO;AACP9C,UAAAA,MAAM,EAAEwK,YAAmB;AAC3BzH,UAAAA,UAAU,EAAE,GAAG;AACfC,UAAAA,KAAK,EAAE;AACLgB,YAAAA,WAAW,EAAEwE,MAAM,CAACxF,KAAK,CAACgB,WAAW;AACrCG,YAAAA,YAAY,EAAEqE,MAAM,CAACxF,KAAK,CAACmB,YAAY;YACvC,GAAGI;WACJ;AACD9C,UAAAA,KAAK,EAAEgJ,cAAc;UACrBvH,gBAAgB,EAAE+G,OAAO,CAACiB;AAC5B,SAAC,CAAC;AAEF,QAAA,OAAO1C,MAAM;MACf,CAAC,CAAC,OAAOrH,KAAU,EAAE;AACnB,QAAA,MAAMuJ,OAAO,GAAG/H,KAAK,CAAC+H,OAAO;AAC7B,QAAA,MAAMlI,kBAAkB,CAAC;AACvBlC,UAAAA,MAAM,EAAE0J,QAAQ;UAChBvH,UAAU,EAAEwH,OAAO,CAACa,iBAAiB;AACrCpI,UAAAA,OAAO,EAAEuH,OAAO,CAACc,cAAc,IAAIC,EAAM,EAAE;AAC3CrI,UAAAA,KAAK,EAAE+H,OAAO;UACdpJ,QAAQ,EAAEqB,KAAK,CAACrB,QAAQ;AACxBd,UAAAA,KAAK,EAAEyJ,OAAO,CAAC5E,kBAAkB,GAAG,EAAE,GAAG2B,eAAe,CAAChH,MAAM,CAACiL,MAAM,CAAC;AACvErI,UAAAA,MAAM,EAAE,EAAE;AACVC,UAAAA,OAAO,EAAE,CAAC;AACVC,UAAAA,OAAO,EAAE,EAAE;AACX9C,UAAAA,MAAM,EAAEwK,YAAmB;UAC3BzH,UAAU,EAAE5B,KAAK,EAAEgK,MAAM,GAAGhK,KAAK,CAACgK,MAAM,GAAG,GAAG;AAC9CnI,UAAAA,KAAK,EAAE;AACLgB,YAAAA,WAAW,EAAE,CAAC;AACdG,YAAAA,YAAY,EAAE;WACf;AACDlB,UAAAA,OAAO,EAAE,IAAI;UACb9B,KAAK,EAAET,QAAQ,CAACmB,IAAI,CAACE,SAAS,CAACZ,KAAK,CAAC,CAAC;AACtCM,UAAAA,KAAK,EAAEgJ,cAAc;UACrBvH,gBAAgB,EAAE+G,OAAO,CAACiB;AAC5B,SAAC,CAAC;AACF,QAAA,MAAM/J,KAAK;AACb,MAAA;IACF,CAAC;IAEDiK,UAAU,EAAE,OAAO;MAAEC,QAAQ;AAAErL,MAAAA;AAAO,KAAC,KAAK;AAC1C,MAAA,MAAMqK,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE;MAC5B,IAAIe,aAAa,GAAG,EAAE;MACtB,IAAInC,aAAa,GAAG,EAAE;MACtB,IAAInG,KAMH,GAAG,EAAE;AACN,MAAA,MAAMwH,YAAY,GAAG;AACnB,QAAA,GAAGP,OAAO;QACV,GAAG9D,eAAe,CAACnG,MAAM;OAC1B;MAED,MAAM0K,OAAO,GAAGT,OAAO,CAAC/E,oBAAoB,IAAIvC,KAAK,CAAC+H,OAAO;MAC7D,MAAMpJ,QAAQ,GAAG2I,OAAO,CAACjF,uBAAuB,IAAI2E,eAAe,CAAChH,KAAK,CAAC;AAC1E,MAAA,MAAM8H,cAAc,GAAGpJ,yBAAyB,CAAC,QAAQ,EAAErB,MAAM,CAAC;MAClE,MAAM8C,OAAO,GAAG,EAAE,CAAA;;MAElB,IAAI;QACF,MAAM;UAAEiE,MAAM;UAAE,GAAGwE;AAAK,SAAC,GAAG,MAAMF,QAAQ,EAAE;AAC5C,QAAA,MAAMG,eAAe,GAAG,IAAIC,eAAe,CAAuD;AAChGC,UAAAA,SAASA,CAACC,KAAK,EAAEC,UAAU,EAAE;AAC3B;AACA,YAAA,IAAID,KAAK,CAACnK,IAAI,KAAK,YAAY,EAAE;cAC/B8J,aAAa,IAAIK,KAAK,CAACE,KAAK;AAC9B,YAAA;AACA,YAAA,IAAIF,KAAK,CAACnK,IAAI,KAAK,iBAAiB,EAAE;AACpC2H,cAAAA,aAAa,IAAIwC,KAAK,CAACE,KAAK,CAAA;AAC9B,YAAA;AACA,YAAA,IAAIF,KAAK,CAACnK,IAAI,KAAK,QAAQ,EAAE;AAC3BwB,cAAAA,KAAK,GAAG;AACNgB,gBAAAA,WAAW,EAAE2H,KAAK,CAAC3I,KAAK,EAAEgB,WAAW;AACrCG,gBAAAA,YAAY,EAAEwH,KAAK,CAAC3I,KAAK,EAAEmB;eAC5B;AACD,cAAA,IAAIwH,KAAK,CAACpC,gBAAgB,EAAEoB,MAAM,EAAEnG,eAAe,EAAE;gBACnDxB,KAAK,CAACwB,eAAe,GAAGmH,KAAK,CAACpC,gBAAgB,CAACoB,MAAM,CAACnG,eAAe;AACvE,cAAA;AACA,cAAA,IAAImH,KAAK,CAACpC,gBAAgB,EAAEoB,MAAM,EAAEC,kBAAkB,EAAE;gBACtD5H,KAAK,CAAC0B,oBAAoB,GAAGiH,KAAK,CAACpC,gBAAgB,CAACoB,MAAM,CAACC,kBAAkB;AAC/E,cAAA;AACA,cAAA,IAAIe,KAAK,CAACpC,gBAAgB,EAAEsB,SAAS,EAAEnG,oBAAoB,EAAE;gBAC3D1B,KAAK,CAAC0B,oBAAoB,GAAGiH,KAAK,CAACpC,gBAAgB,CAACsB,SAAS,CAACnG,oBAAoB;AACpF,cAAA;AACA,cAAA,IAAIiH,KAAK,CAACpC,gBAAgB,EAAEsB,SAAS,EAAEjG,wBAAwB,EAAE;gBAC/D5B,KAAK,CAAC4B,wBAAwB,GAAG+G,KAAK,CAACpC,gBAAgB,CAACsB,SAAS,CAACjG,wBAAwB;AAC5F,cAAA;AACF,YAAA;AACAgH,YAAAA,UAAU,CAACE,OAAO,CAACH,KAAK,CAAC;UAC3B,CAAC;UAEDI,KAAK,EAAE,YAAY;YACjB,MAAMlJ,OAAO,GAAG,CAACyH,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI;YAC/C,MAAM2B,aAAa,GAAG7C,aAAa,GAAG,CAAA,EAAGA,aAAa,CAAA,IAAA,EAAOmC,aAAa,CAAA,CAAE,GAAGA,aAAa;AAC5F,YAAA,MAAM9I,kBAAkB,CAAC;AACvBlC,cAAAA,MAAM,EAAE0J,QAAQ;cAChBvH,UAAU,EAAEwH,OAAO,CAACa,iBAAiB;AACrCpI,cAAAA,OAAO,EAAEuH,OAAO,CAACc,cAAc,IAAIC,EAAM,EAAE;AAC3CrI,cAAAA,KAAK,EAAE+H,OAAO;AACdpJ,cAAAA,QAAQ,EAAEA,QAAQ;AAClBd,cAAAA,KAAK,EAAEyJ,OAAO,CAAC5E,kBAAkB,GAAG,EAAE,GAAG2B,eAAe,CAAChH,MAAM,CAACiL,MAAM,CAAC;AACvErI,cAAAA,MAAM,EAAE,CAAC;AAAEwE,gBAAAA,OAAO,EAAE4E,aAAa;AAAE3E,gBAAAA,IAAI,EAAE;AAAY,eAAC,CAAC;cACvDxE,OAAO;cACPC,OAAO;AACP9C,cAAAA,MAAM,EAAEwK,YAAmB;AAC3BzH,cAAAA,UAAU,EAAE,GAAG;cACfC,KAAK;AACLvB,cAAAA,KAAK,EAAEgJ,cAAc;cACrBvH,gBAAgB,EAAE+G,OAAO,CAACiB;AAC5B,aAAC,CAAC;AACJ,UAAA;AACF,SAAC,CAAC;QAEF,OAAO;AACLnE,UAAAA,MAAM,EAAEA,MAAM,CAACkF,WAAW,CAACT,eAAe,CAAC;UAC3C,GAAGD;SACJ;MACH,CAAC,CAAC,OAAOpK,KAAU,EAAE;AACnB,QAAA,MAAMqB,kBAAkB,CAAC;AACvBlC,UAAAA,MAAM,EAAE0J,QAAQ;UAChBvH,UAAU,EAAEwH,OAAO,CAACa,iBAAiB;AACrCpI,UAAAA,OAAO,EAAEuH,OAAO,CAACc,cAAc,IAAIC,EAAM,EAAE;AAC3CrI,UAAAA,KAAK,EAAE+H,OAAO;AACdpJ,UAAAA,QAAQ,EAAEA,QAAQ;AAClBd,UAAAA,KAAK,EAAEyJ,OAAO,CAAC5E,kBAAkB,GAAG,EAAE,GAAG2B,eAAe,CAAChH,MAAM,CAACiL,MAAM,CAAC;AACvErI,UAAAA,MAAM,EAAE,EAAE;AACVC,UAAAA,OAAO,EAAE,CAAC;AACVC,UAAAA,OAAO,EAAE,EAAE;AACX9C,UAAAA,MAAM,EAAEwK,YAAmB;UAC3BzH,UAAU,EAAE5B,KAAK,EAAEgK,MAAM,GAAGhK,KAAK,CAACgK,MAAM,GAAG,GAAG;AAC9CnI,UAAAA,KAAK,EAAE;AACLgB,YAAAA,WAAW,EAAE,CAAC;AACdG,YAAAA,YAAY,EAAE;WACf;AACDlB,UAAAA,OAAO,EAAE,IAAI;UACb9B,KAAK,EAAET,QAAQ,CAACmB,IAAI,CAACE,SAAS,CAACZ,KAAK,CAAC,CAAC;AACtCM,UAAAA,KAAK,EAAEgJ,cAAc;UACrBvH,gBAAgB,EAAE+G,OAAO,CAACiB;AAC5B,SAAC,CAAC;AACF,QAAA,MAAM/J,KAAK;AACb,MAAA;AACF,IAAA;GACD;AAED,EAAA,OAAO+I,UAAU;AACnB,CAAC;AAEM,MAAMgC,uBAAuB,GAAGA,CACrCvJ,KAAsB,EACtBqH,QAAiB,EACjBC,OAAsB,KACF;EACpB,MAAMvH,OAAO,GAAGuH,OAAO,CAACc,cAAc,IAAIC,EAAM,EAAE;AAClD,EAAA,MAAMd,UAAU,GAAGH,+BAA+B,CAACC,QAAQ,EAAErH,KAAK,EAAE;AAClE,IAAA,GAAGsH,OAAO;AACVc,IAAAA,cAAc,EAAErI,OAAO;IACvBoI,iBAAiB,EAAEb,OAAO,CAACa;AAC7B,GAAC,CAAC;EAEF,MAAMqB,YAAY,GAAGC,iBAAiB,CAAC;IACrCzJ,KAAK;AACLuH,IAAAA;AACF,GAAC,CAAC;AAEF,EAAA,OAAOiC,YAAY;AACrB;;;;"}
|
package/package.json
CHANGED
|
@@ -1,21 +1,22 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@posthog/ai",
|
|
3
|
-
"version": "
|
|
3
|
+
"version": "6.0.0",
|
|
4
4
|
"description": "PostHog Node.js AI integrations",
|
|
5
5
|
"repository": {
|
|
6
6
|
"type": "git",
|
|
7
|
-
"url": "git+https://github.com/PostHog/posthog-js
|
|
8
|
-
"directory": "
|
|
7
|
+
"url": "git+https://github.com/PostHog/posthog-js.git",
|
|
8
|
+
"directory": "packages/ai"
|
|
9
9
|
},
|
|
10
|
-
"main": "
|
|
11
|
-
"module": "
|
|
12
|
-
"types": "
|
|
10
|
+
"main": "dist/index.cjs",
|
|
11
|
+
"module": "dist/index.mjs",
|
|
12
|
+
"types": "dist/index.d.ts",
|
|
13
13
|
"license": "MIT",
|
|
14
14
|
"devDependencies": {
|
|
15
|
-
"@types/jest": "^29.5.14",
|
|
16
|
-
"@types/node": "^20.0.0",
|
|
17
15
|
"node-fetch": "^3.3.2",
|
|
18
|
-
"
|
|
16
|
+
"jest": "^29.7.0",
|
|
17
|
+
"@posthog-tooling/rollup-utils": "1.0.0",
|
|
18
|
+
"posthog-node": "5.6.0",
|
|
19
|
+
"@posthog-tooling/tsconfig-base": "1.0.0"
|
|
19
20
|
},
|
|
20
21
|
"keywords": [
|
|
21
22
|
"posthog",
|
|
@@ -30,52 +31,52 @@
|
|
|
30
31
|
"node": ">=20"
|
|
31
32
|
},
|
|
32
33
|
"dependencies": {
|
|
34
|
+
"@ai-sdk/provider": "^2.0.0",
|
|
33
35
|
"@anthropic-ai/sdk": "^0.36.3",
|
|
34
36
|
"@google/genai": "^1.1.0",
|
|
35
37
|
"@langchain/core": "^0.3.37",
|
|
36
|
-
"ai": "^
|
|
38
|
+
"ai": "^5.0.4",
|
|
37
39
|
"langchain": "^0.3.15",
|
|
38
40
|
"openai": "^5.0.0",
|
|
39
41
|
"uuid": "^11.0.5",
|
|
40
|
-
"zod": "^3.
|
|
42
|
+
"zod": "^3.25.0"
|
|
41
43
|
},
|
|
42
44
|
"peerDependencies": {
|
|
43
45
|
"posthog-node": "^5.0.0"
|
|
44
46
|
},
|
|
45
|
-
"
|
|
46
|
-
"
|
|
47
|
-
|
|
48
|
-
},
|
|
47
|
+
"files": [
|
|
48
|
+
"dist"
|
|
49
|
+
],
|
|
49
50
|
"exports": {
|
|
50
51
|
".": {
|
|
51
|
-
"require": "./
|
|
52
|
-
"import": "./
|
|
53
|
-
"types": "./
|
|
52
|
+
"require": "./dist/index.cjs",
|
|
53
|
+
"import": "./dist/index.mjs",
|
|
54
|
+
"types": "./dist/index.d.ts"
|
|
54
55
|
},
|
|
55
56
|
"./anthropic": {
|
|
56
|
-
"require": "./
|
|
57
|
-
"import": "./
|
|
58
|
-
"types": "./
|
|
57
|
+
"require": "./dist/anthropic/index.cjs",
|
|
58
|
+
"import": "./dist/anthropic/index.mjs",
|
|
59
|
+
"types": "./dist/anthropic/index.d.ts"
|
|
59
60
|
},
|
|
60
61
|
"./gemini": {
|
|
61
|
-
"require": "./
|
|
62
|
-
"import": "./
|
|
63
|
-
"types": "./
|
|
62
|
+
"require": "./dist/gemini/index.cjs",
|
|
63
|
+
"import": "./dist/gemini/index.mjs",
|
|
64
|
+
"types": "./dist/gemini/index.d.ts"
|
|
64
65
|
},
|
|
65
66
|
"./openai": {
|
|
66
|
-
"require": "./
|
|
67
|
-
"import": "./
|
|
68
|
-
"types": "./
|
|
67
|
+
"require": "./dist/openai/index.cjs",
|
|
68
|
+
"import": "./dist/openai/index.mjs",
|
|
69
|
+
"types": "./dist/openai/index.d.ts"
|
|
69
70
|
},
|
|
70
71
|
"./vercel": {
|
|
71
|
-
"require": "./
|
|
72
|
-
"import": "./
|
|
73
|
-
"types": "./
|
|
72
|
+
"require": "./dist/vercel/index.cjs",
|
|
73
|
+
"import": "./dist/vercel/index.mjs",
|
|
74
|
+
"types": "./dist/vercel/index.d.ts"
|
|
74
75
|
},
|
|
75
76
|
"./langchain": {
|
|
76
|
-
"require": "./
|
|
77
|
-
"import": "./
|
|
78
|
-
"types": "./
|
|
77
|
+
"require": "./dist/langchain/index.cjs",
|
|
78
|
+
"import": "./dist/langchain/index.mjs",
|
|
79
|
+
"types": "./dist/langchain/index.d.ts"
|
|
79
80
|
}
|
|
80
81
|
},
|
|
81
82
|
"directories": {
|
|
@@ -86,5 +87,14 @@
|
|
|
86
87
|
"bugs": {
|
|
87
88
|
"url": "https://github.com/PostHog/posthog-js-lite/issues"
|
|
88
89
|
},
|
|
89
|
-
"homepage": "https://github.com/PostHog/posthog-js-lite#readme"
|
|
90
|
-
|
|
90
|
+
"homepage": "https://github.com/PostHog/posthog-js-lite#readme",
|
|
91
|
+
"scripts": {
|
|
92
|
+
"clean": "rimraf dist",
|
|
93
|
+
"test:unit": "jest",
|
|
94
|
+
"lint": "eslint src tests",
|
|
95
|
+
"lint:fix": "eslint src tests --fix",
|
|
96
|
+
"build": "rollup -c",
|
|
97
|
+
"dev": "rollup -c --watch",
|
|
98
|
+
"package": "pnpm pack --out $PACKAGE_DEST/%s.tgz"
|
|
99
|
+
}
|
|
100
|
+
}
|