@posthog/ai 5.2.2 → 5.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +245 -0
- package/{lib → dist}/anthropic/index.cjs +7 -12
- package/{lib → dist}/anthropic/index.cjs.map +1 -1
- package/{lib → dist}/anthropic/index.mjs +4 -5
- package/{lib → dist}/anthropic/index.mjs.map +1 -1
- package/{lib → dist}/gemini/index.cjs +1 -1
- package/{lib → dist}/gemini/index.cjs.map +1 -1
- package/{lib → dist}/gemini/index.mjs.map +1 -1
- package/{lib → dist}/index.cjs +547 -479
- package/dist/index.cjs.map +1 -0
- package/{lib → dist}/index.mjs +530 -456
- package/dist/index.mjs.map +1 -0
- package/{lib → dist}/langchain/index.cjs +150 -110
- package/dist/langchain/index.cjs.map +1 -0
- package/{lib → dist}/langchain/index.mjs +147 -104
- package/dist/langchain/index.mjs.map +1 -0
- package/{lib → dist}/openai/index.cjs +7 -1
- package/dist/openai/index.cjs.map +1 -0
- package/{lib → dist}/openai/index.mjs +6 -0
- package/dist/openai/index.mjs.map +1 -0
- package/{lib → dist}/vercel/index.cjs +0 -2
- package/{lib → dist}/vercel/index.cjs.map +1 -1
- package/{lib → dist}/vercel/index.mjs.map +1 -1
- package/package.json +42 -33
- package/CHANGELOG.md +0 -89
- package/index.ts +0 -1
- package/lib/index.cjs.map +0 -1
- package/lib/index.mjs.map +0 -1
- package/lib/langchain/index.cjs.map +0 -1
- package/lib/langchain/index.mjs.map +0 -1
- package/lib/openai/index.cjs.map +0 -1
- package/lib/openai/index.mjs.map +0 -1
- package/src/anthropic/index.ts +0 -211
- package/src/gemini/index.ts +0 -254
- package/src/index.ts +0 -13
- package/src/langchain/callbacks.ts +0 -640
- package/src/langchain/index.ts +0 -1
- package/src/openai/azure.ts +0 -481
- package/src/openai/index.ts +0 -498
- package/src/utils.ts +0 -287
- package/src/vercel/index.ts +0 -1
- package/src/vercel/middleware.ts +0 -393
- package/tests/callbacks.test.ts +0 -48
- package/tests/gemini.test.ts +0 -344
- package/tests/openai.test.ts +0 -403
- package/tsconfig.json +0 -10
- /package/{lib → dist}/anthropic/index.d.ts +0 -0
- /package/{lib → dist}/gemini/index.d.ts +0 -0
- /package/{lib → dist}/gemini/index.mjs +0 -0
- /package/{lib → dist}/index.d.ts +0 -0
- /package/{lib → dist}/langchain/index.d.ts +0 -0
- /package/{lib → dist}/openai/index.d.ts +0 -0
- /package/{lib → dist}/vercel/index.d.ts +0 -0
- /package/{lib → dist}/vercel/index.mjs +0 -0
package/lib/openai/index.mjs.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"index.mjs","sources":["../../src/utils.ts","../../src/openai/index.ts"],"sourcesContent":["import { PostHog } from 'posthog-node'\nimport { Buffer } from 'buffer'\nimport OpenAIOrignal from 'openai'\nimport AnthropicOriginal from '@anthropic-ai/sdk'\n\ntype ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams\ntype MessageCreateParams = AnthropicOriginal.Messages.MessageCreateParams\ntype ResponseCreateParams = OpenAIOrignal.Responses.ResponseCreateParams\n\n// limit large outputs by truncating to 200kb (approx 200k bytes)\nexport const MAX_OUTPUT_SIZE = 200000\nconst STRING_FORMAT = 'utf8'\n\nexport interface MonitoringParams {\n posthogDistinctId?: string\n posthogTraceId?: string\n posthogProperties?: Record<string, any>\n posthogPrivacyMode?: boolean\n posthogGroups?: Record<string, any>\n posthogModelOverride?: string\n posthogProviderOverride?: string\n posthogCostOverride?: CostOverride\n posthogCaptureImmediate?: boolean\n}\n\nexport interface CostOverride {\n inputCost: number\n outputCost: number\n}\n\nexport const getModelParams = (\n params: ((ChatCompletionCreateParamsBase | MessageCreateParams | ResponseCreateParams) & MonitoringParams) | null\n): Record<string, any> => {\n if (!params) {\n return {}\n }\n const modelParams: Record<string, any> = {}\n const paramKeys = [\n 'temperature',\n 'max_tokens',\n 'max_completion_tokens',\n 'top_p',\n 'frequency_penalty',\n 'presence_penalty',\n 'n',\n 'stop',\n 'stream',\n 'streaming',\n ] as const\n\n for (const key of paramKeys) {\n if (key in params && (params as any)[key] !== undefined) {\n modelParams[key] = (params as any)[key]\n }\n }\n return modelParams\n}\n\n/**\n * Helper to format responses (non-streaming) for consumption, mirroring Python's openai vs. anthropic approach.\n */\nexport const formatResponse = (response: any, provider: string): Array<{ role: string; content: string }> => {\n if (!response) {\n return []\n }\n if (provider === 'anthropic') {\n return formatResponseAnthropic(response)\n } else if (provider === 'openai') {\n return formatResponseOpenAI(response)\n } else if (provider === 'gemini') {\n return formatResponseGemini(response)\n }\n return []\n}\n\nexport const formatResponseAnthropic = (response: any): Array<{ role: string; content: string }> => {\n // Example approach if \"response.content\" holds array of text segments, etc.\n const output: Array<{ role: string; content: string }> = []\n for (const choice of response.content ?? []) {\n if (choice?.text) {\n output.push({\n role: 'assistant',\n content: choice.text,\n })\n }\n }\n return output\n}\n\nexport const formatResponseOpenAI = (response: any): Array<{ role: string; content: string }> => {\n const output: Array<{ role: string; content: string }> = []\n for (const choice of response.choices ?? []) {\n if (choice.message?.content) {\n output.push({\n role: choice.message.role,\n content: choice.message.content,\n })\n }\n }\n return output\n}\n\nexport const formatResponseGemini = (response: any): Array<{ role: string; content: string }> => {\n const output: Array<{ role: string; content: string }> = []\n\n if (response.text) {\n output.push({\n role: 'assistant',\n content: response.text,\n })\n return output\n }\n\n if (response.candidates && Array.isArray(response.candidates)) {\n for (const candidate of response.candidates) {\n if (candidate.content && candidate.content.parts) {\n const text = candidate.content.parts\n .filter((part: any) => part.text)\n .map((part: any) => part.text)\n .join('')\n if (text) {\n output.push({\n role: 'assistant',\n content: text,\n })\n }\n }\n }\n }\n\n return output\n}\n\nexport const mergeSystemPrompt = (params: MessageCreateParams & MonitoringParams, provider: string): any => {\n if (provider == 'anthropic') {\n const messages = params.messages || []\n if (!(params as any).system) {\n return messages\n }\n const systemMessage = (params as any).system\n return [{ role: 'system', content: systemMessage }, ...messages]\n }\n return params.messages\n}\n\nexport const withPrivacyMode = (client: PostHog, privacyMode: boolean, input: any): any => {\n return (client as any).privacy_mode || privacyMode ? null : input\n}\n\nexport const truncate = (str: string): string => {\n try {\n const buffer = Buffer.from(str, STRING_FORMAT)\n if (buffer.length <= MAX_OUTPUT_SIZE) {\n return str\n }\n const truncatedBuffer = buffer.slice(0, MAX_OUTPUT_SIZE)\n return `${truncatedBuffer.toString(STRING_FORMAT)}... [truncated]`\n } catch (error) {\n console.error('Error truncating, likely not a string')\n return str\n }\n}\n\nexport type SendEventToPosthogParams = {\n client: PostHog\n distinctId?: string\n traceId: string\n model: string\n provider: string\n input: any\n output: any\n latency: number\n baseURL: string\n httpStatus: number\n usage?: {\n inputTokens?: number\n outputTokens?: number\n reasoningTokens?: any\n cacheReadInputTokens?: any\n cacheCreationInputTokens?: any\n }\n params: (ChatCompletionCreateParamsBase | MessageCreateParams | ResponseCreateParams) & MonitoringParams\n isError?: boolean\n error?: string\n tools?: any\n captureImmediate?: boolean\n}\n\nfunction sanitizeValues(obj: any): any {\n if (obj === undefined || obj === null) {\n return obj\n }\n const jsonSafe = JSON.parse(JSON.stringify(obj))\n if (typeof jsonSafe === 'string') {\n return Buffer.from(jsonSafe, STRING_FORMAT).toString(STRING_FORMAT)\n } else if (Array.isArray(jsonSafe)) {\n return jsonSafe.map(sanitizeValues)\n } else if (jsonSafe && typeof jsonSafe === 'object') {\n return Object.fromEntries(Object.entries(jsonSafe).map(([k, v]) => [k, sanitizeValues(v)]))\n }\n return jsonSafe\n}\n\nexport const sendEventToPosthog = async ({\n client,\n distinctId,\n traceId,\n model,\n provider,\n input,\n output,\n latency,\n baseURL,\n params,\n httpStatus = 200,\n usage = {},\n isError = false,\n error,\n tools,\n captureImmediate = false,\n}: SendEventToPosthogParams): Promise<void> => {\n if (!client.capture) {\n return Promise.resolve()\n }\n // sanitize input and output for UTF-8 validity\n const safeInput = sanitizeValues(input)\n const safeOutput = sanitizeValues(output)\n const safeError = sanitizeValues(error)\n\n let errorData = {}\n if (isError) {\n errorData = {\n $ai_is_error: true,\n $ai_error: safeError,\n }\n }\n let costOverrideData = {}\n if (params.posthogCostOverride) {\n const inputCostUSD = (params.posthogCostOverride.inputCost ?? 0) * (usage.inputTokens ?? 0)\n const outputCostUSD = (params.posthogCostOverride.outputCost ?? 0) * (usage.outputTokens ?? 0)\n costOverrideData = {\n $ai_input_cost_usd: inputCostUSD,\n $ai_output_cost_usd: outputCostUSD,\n $ai_total_cost_usd: inputCostUSD + outputCostUSD,\n }\n }\n\n const additionalTokenValues = {\n ...(usage.reasoningTokens ? { $ai_reasoning_tokens: usage.reasoningTokens } : {}),\n ...(usage.cacheReadInputTokens ? { $ai_cache_read_input_tokens: usage.cacheReadInputTokens } : {}),\n ...(usage.cacheCreationInputTokens ? { $ai_cache_creation_input_tokens: usage.cacheCreationInputTokens } : {}),\n }\n\n const properties = {\n $ai_provider: params.posthogProviderOverride ?? provider,\n $ai_model: params.posthogModelOverride ?? model,\n $ai_model_parameters: getModelParams(params),\n $ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeInput),\n $ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, safeOutput),\n $ai_http_status: httpStatus,\n $ai_input_tokens: usage.inputTokens ?? 0,\n $ai_output_tokens: usage.outputTokens ?? 0,\n ...additionalTokenValues,\n $ai_latency: latency,\n $ai_trace_id: traceId,\n $ai_base_url: baseURL,\n ...params.posthogProperties,\n ...(distinctId ? {} : { $process_person_profile: false }),\n ...(tools ? { $ai_tools: tools } : {}),\n ...errorData,\n ...costOverrideData,\n }\n\n const event = {\n distinctId: distinctId ?? traceId,\n event: '$ai_generation',\n properties,\n groups: params.posthogGroups,\n }\n\n if (captureImmediate) {\n // await capture promise to send single event in serverless environments\n await client.captureImmediate(event)\n } else {\n client.capture(event)\n }\n}\n","import { OpenAI as OpenAIOrignal, ClientOptions } from 'openai'\nimport { PostHog } from 'posthog-node'\nimport { v4 as uuidv4 } from 'uuid'\nimport { formatResponseOpenAI, MonitoringParams, sendEventToPosthog } from '../utils'\nimport type { APIPromise } from 'openai'\nimport type { Stream } from 'openai/streaming'\nimport type { ParsedResponse } from 'openai/resources/responses/responses'\n\nconst Chat = OpenAIOrignal.Chat\nconst Completions = Chat.Completions\nconst Responses = OpenAIOrignal.Responses\n\ntype ChatCompletion = OpenAIOrignal.ChatCompletion\ntype ChatCompletionChunk = OpenAIOrignal.ChatCompletionChunk\ntype ChatCompletionCreateParamsBase = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParams\ntype ChatCompletionCreateParamsNonStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsNonStreaming\ntype ChatCompletionCreateParamsStreaming = OpenAIOrignal.Chat.Completions.ChatCompletionCreateParamsStreaming\ntype ResponsesCreateParamsBase = OpenAIOrignal.Responses.ResponseCreateParams\ntype ResponsesCreateParamsNonStreaming = OpenAIOrignal.Responses.ResponseCreateParamsNonStreaming\ntype ResponsesCreateParamsStreaming = OpenAIOrignal.Responses.ResponseCreateParamsStreaming\n\ninterface MonitoringOpenAIConfig extends ClientOptions {\n apiKey: string\n posthog: PostHog\n baseURL?: string\n}\n\ntype RequestOptions = Record<string, any>\n\nexport class PostHogOpenAI extends OpenAIOrignal {\n private readonly phClient: PostHog\n public chat: WrappedChat\n public responses: WrappedResponses\n\n constructor(config: MonitoringOpenAIConfig) {\n const { posthog, ...openAIConfig } = config\n super(openAIConfig)\n this.phClient = posthog\n this.chat = new WrappedChat(this, this.phClient)\n this.responses = new WrappedResponses(this, this.phClient)\n }\n}\n\nexport class WrappedChat extends Chat {\n constructor(parentClient: PostHogOpenAI, phClient: PostHog) {\n super(parentClient)\n this.completions = new WrappedCompletions(parentClient, phClient)\n }\n\n public completions: WrappedCompletions\n}\n\nexport class WrappedCompletions extends Completions {\n private readonly phClient: PostHog\n\n constructor(client: OpenAIOrignal, phClient: PostHog) {\n super(client)\n this.phClient = phClient\n }\n\n // --- Overload #1: Non-streaming\n public create(\n body: ChatCompletionCreateParamsNonStreaming & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<ChatCompletion>\n\n // --- Overload #2: Streaming\n public create(\n body: ChatCompletionCreateParamsStreaming & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<Stream<ChatCompletionChunk>>\n\n // --- Overload #3: Generic base\n public create(\n body: ChatCompletionCreateParamsBase & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>>\n\n // --- Implementation Signature\n public create(\n body: ChatCompletionCreateParamsBase & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<ChatCompletion | Stream<ChatCompletionChunk>> {\n const {\n posthogDistinctId,\n posthogTraceId,\n posthogProperties,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n posthogPrivacyMode = false,\n posthogGroups,\n posthogCaptureImmediate,\n ...openAIParams\n } = body\n\n const traceId = posthogTraceId ?? uuidv4()\n const startTime = Date.now()\n\n const parentPromise = super.create(openAIParams, options)\n\n if (openAIParams.stream) {\n return parentPromise.then((value) => {\n if ('tee' in value) {\n const [stream1, stream2] = value.tee()\n ;(async () => {\n try {\n let accumulatedContent = ''\n let usage: {\n inputTokens?: number\n outputTokens?: number\n reasoningTokens?: number\n cacheReadInputTokens?: number\n } = {\n inputTokens: 0,\n outputTokens: 0,\n }\n\n for await (const chunk of stream1) {\n const delta = chunk?.choices?.[0]?.delta?.content ?? ''\n accumulatedContent += delta\n if (chunk.usage) {\n usage = {\n inputTokens: chunk.usage.prompt_tokens ?? 0,\n outputTokens: chunk.usage.completion_tokens ?? 0,\n reasoningTokens: chunk.usage.completion_tokens_details?.reasoning_tokens ?? 0,\n cacheReadInputTokens: chunk.usage.prompt_tokens_details?.cached_tokens ?? 0,\n }\n }\n }\n\n const latency = (Date.now() - startTime) / 1000\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: openAIParams.messages,\n output: [{ content: accumulatedContent, role: 'assistant' }],\n latency,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 200,\n usage,\n captureImmediate: posthogCaptureImmediate,\n })\n } catch (error: any) {\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: openAIParams.messages,\n output: [],\n latency: 0,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: error?.status ? error.status : 500,\n usage: { inputTokens: 0, outputTokens: 0 },\n isError: true,\n error: JSON.stringify(error),\n captureImmediate: posthogCaptureImmediate,\n })\n }\n })()\n\n // Return the other stream to the user\n return stream2\n }\n return value\n }) as APIPromise<Stream<ChatCompletionChunk>>\n } else {\n const wrappedPromise = parentPromise.then(\n async (result) => {\n if ('choices' in result) {\n const latency = (Date.now() - startTime) / 1000\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: openAIParams.messages,\n output: formatResponseOpenAI(result),\n latency,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 200,\n usage: {\n inputTokens: result.usage?.prompt_tokens ?? 0,\n outputTokens: result.usage?.completion_tokens ?? 0,\n reasoningTokens: result.usage?.completion_tokens_details?.reasoning_tokens ?? 0,\n cacheReadInputTokens: result.usage?.prompt_tokens_details?.cached_tokens ?? 0,\n },\n captureImmediate: posthogCaptureImmediate,\n })\n }\n return result\n },\n async (error: any) => {\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: openAIParams.messages,\n output: [],\n latency: 0,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: error?.status ? error.status : 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n isError: true,\n error: JSON.stringify(error),\n captureImmediate: posthogCaptureImmediate,\n })\n throw error\n }\n ) as APIPromise<ChatCompletion>\n\n return wrappedPromise\n }\n }\n}\n\nexport class WrappedResponses extends Responses {\n private readonly phClient: PostHog\n\n constructor(client: OpenAIOrignal, phClient: PostHog) {\n super(client)\n this.phClient = phClient\n }\n\n // --- Overload #1: Non-streaming\n public create(\n body: ResponsesCreateParamsNonStreaming & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<OpenAIOrignal.Responses.Response>\n\n // --- Overload #2: Streaming\n public create(\n body: ResponsesCreateParamsStreaming & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<Stream<OpenAIOrignal.Responses.ResponseStreamEvent>>\n\n // --- Overload #3: Generic base\n public create(\n body: ResponsesCreateParamsBase & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<OpenAIOrignal.Responses.Response | Stream<OpenAIOrignal.Responses.ResponseStreamEvent>>\n\n // --- Implementation Signature\n public create(\n body: ResponsesCreateParamsBase & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<OpenAIOrignal.Responses.Response | Stream<OpenAIOrignal.Responses.ResponseStreamEvent>> {\n const {\n posthogDistinctId,\n posthogTraceId,\n posthogProperties,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n posthogPrivacyMode = false,\n posthogGroups,\n posthogCaptureImmediate,\n ...openAIParams\n } = body\n\n const traceId = posthogTraceId ?? uuidv4()\n const startTime = Date.now()\n\n const parentPromise = super.create(openAIParams, options)\n\n if (openAIParams.stream) {\n return parentPromise.then((value) => {\n if ('tee' in value && typeof (value as any).tee === 'function') {\n const [stream1, stream2] = (value as any).tee()\n ;(async () => {\n try {\n let finalContent: any[] = []\n let usage: {\n inputTokens?: number\n outputTokens?: number\n reasoningTokens?: number\n cacheReadInputTokens?: number\n } = {\n inputTokens: 0,\n outputTokens: 0,\n }\n\n for await (const chunk of stream1) {\n if (\n chunk.type === 'response.completed' &&\n 'response' in chunk &&\n chunk.response?.output &&\n chunk.response.output.length > 0\n ) {\n finalContent = chunk.response.output\n }\n if ('response' in chunk && chunk.response?.usage) {\n usage = {\n inputTokens: chunk.response.usage.input_tokens ?? 0,\n outputTokens: chunk.response.usage.output_tokens ?? 0,\n reasoningTokens: chunk.response.usage.output_tokens_details?.reasoning_tokens ?? 0,\n cacheReadInputTokens: chunk.response.usage.input_tokens_details?.cached_tokens ?? 0,\n }\n }\n }\n\n const latency = (Date.now() - startTime) / 1000\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: openAIParams.input,\n output: finalContent,\n latency,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 200,\n usage,\n captureImmediate: posthogCaptureImmediate,\n })\n } catch (error: any) {\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: openAIParams.input,\n output: [],\n latency: 0,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: error?.status ? error.status : 500,\n usage: { inputTokens: 0, outputTokens: 0 },\n isError: true,\n error: JSON.stringify(error),\n captureImmediate: posthogCaptureImmediate,\n })\n }\n })()\n\n return stream2\n }\n return value\n }) as APIPromise<Stream<OpenAIOrignal.Responses.ResponseStreamEvent>>\n } else {\n const wrappedPromise = parentPromise.then(\n async (result) => {\n if ('output' in result) {\n const latency = (Date.now() - startTime) / 1000\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: openAIParams.input,\n output: result.output,\n latency,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 200,\n usage: {\n inputTokens: result.usage?.input_tokens ?? 0,\n outputTokens: result.usage?.output_tokens ?? 0,\n reasoningTokens: result.usage?.output_tokens_details?.reasoning_tokens ?? 0,\n cacheReadInputTokens: result.usage?.input_tokens_details?.cached_tokens ?? 0,\n },\n captureImmediate: posthogCaptureImmediate,\n })\n }\n return result\n },\n async (error: any) => {\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: openAIParams.input,\n output: [],\n latency: 0,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: error?.status ? error.status : 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n isError: true,\n error: JSON.stringify(error),\n captureImmediate: posthogCaptureImmediate,\n })\n throw error\n }\n ) as APIPromise<OpenAIOrignal.Responses.Response>\n\n return wrappedPromise\n }\n }\n\n public parse<Params extends ResponsesCreateParamsBase, ParsedT = any>(\n body: Params & MonitoringParams,\n options?: RequestOptions\n ): APIPromise<ParsedResponse<ParsedT>> {\n const {\n posthogDistinctId,\n posthogTraceId,\n posthogProperties,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n posthogPrivacyMode = false,\n posthogGroups,\n posthogCaptureImmediate,\n ...openAIParams\n } = body\n\n const traceId = posthogTraceId ?? uuidv4()\n const startTime = Date.now()\n\n // Create a temporary instance that bypasses our wrapped create method\n const originalCreate = super.create.bind(this)\n const originalSelf = this as any\n const tempCreate = originalSelf.create\n originalSelf.create = originalCreate\n\n try {\n const parentPromise = super.parse(openAIParams, options)\n\n const wrappedPromise = parentPromise.then(\n async (result) => {\n const latency = (Date.now() - startTime) / 1000\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: openAIParams.input,\n output: result.output,\n latency,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: 200,\n usage: {\n inputTokens: result.usage?.input_tokens ?? 0,\n outputTokens: result.usage?.output_tokens ?? 0,\n reasoningTokens: result.usage?.output_tokens_details?.reasoning_tokens ?? 0,\n cacheReadInputTokens: result.usage?.input_tokens_details?.cached_tokens ?? 0,\n },\n captureImmediate: posthogCaptureImmediate,\n })\n return result\n },\n async (error: any) => {\n await sendEventToPosthog({\n client: this.phClient,\n distinctId: posthogDistinctId,\n traceId,\n model: openAIParams.model,\n provider: 'openai',\n input: openAIParams.input,\n output: [],\n latency: 0,\n baseURL: (this as any).baseURL ?? '',\n params: body,\n httpStatus: error?.status ? error.status : 500,\n usage: {\n inputTokens: 0,\n outputTokens: 0,\n },\n isError: true,\n error: JSON.stringify(error),\n captureImmediate: posthogCaptureImmediate,\n })\n throw error\n }\n )\n\n return wrappedPromise as APIPromise<ParsedResponse<ParsedT>>\n } finally {\n // Restore our wrapped create method\n originalSelf.create = tempCreate\n }\n }\n}\n\nexport default PostHogOpenAI\n\nexport { PostHogOpenAI as OpenAI }\n"],"names":["STRING_FORMAT","getModelParams","params","modelParams","paramKeys","key","undefined","formatResponseOpenAI","response","output","choice","choices","message","content","push","role","withPrivacyMode","client","privacyMode","input","privacy_mode","sanitizeValues","obj","jsonSafe","JSON","parse","stringify","Buffer","from","toString","Array","isArray","map","Object","fromEntries","entries","k","v","sendEventToPosthog","distinctId","traceId","model","provider","latency","baseURL","httpStatus","usage","isError","error","tools","captureImmediate","capture","Promise","resolve","safeInput","safeOutput","safeError","errorData","$ai_is_error","$ai_error","costOverrideData","posthogCostOverride","inputCostUSD","inputCost","inputTokens","outputCostUSD","outputCost","outputTokens","$ai_input_cost_usd","$ai_output_cost_usd","$ai_total_cost_usd","additionalTokenValues","reasoningTokens","$ai_reasoning_tokens","cacheReadInputTokens","$ai_cache_read_input_tokens","cacheCreationInputTokens","$ai_cache_creation_input_tokens","properties","$ai_provider","posthogProviderOverride","$ai_model","posthogModelOverride","$ai_model_parameters","$ai_input","posthogPrivacyMode","$ai_output_choices","$ai_http_status","$ai_input_tokens","$ai_output_tokens","$ai_latency","$ai_trace_id","$ai_base_url","posthogProperties","$process_person_profile","$ai_tools","event","groups","posthogGroups","Chat","OpenAIOrignal","Completions","Responses","PostHogOpenAI","constructor","config","posthog","openAIConfig","phClient","chat","WrappedChat","responses","WrappedResponses","parentClient","completions","WrappedCompletions","create","body","options","posthogDistinctId","posthogTraceId","posthogCaptureImmediate","openAIParams","uuidv4","startTime","Date","now","parentPromise","stream","then","value","stream1","stream2","tee","accumulatedContent","chunk","delta","prompt_tokens","completion_tokens","completion_tokens_details","reasoning_tokens","prompt_tokens_details","cached_tokens","messages","status","wrappedPromise","result","finalContent","type","length","input_tokens","output_tokens","output_tokens_details","input_tokens_details","originalCreate","bind","originalSelf","tempCreate"],"mappings":";;;;AAWA,MAAMA,aAAa,GAAG,MAAM,CAAA;AAmBrB,MAAMC,cAAc,GACzBC,MAAiH,IACzF;EACxB,IAAI,CAACA,MAAM,EAAE;AACX,IAAA,OAAO,EAAE,CAAA;AACX,GAAA;EACA,MAAMC,WAAgC,GAAG,EAAE,CAAA;EAC3C,MAAMC,SAAS,GAAG,CAChB,aAAa,EACb,YAAY,EACZ,uBAAuB,EACvB,OAAO,EACP,mBAAmB,EACnB,kBAAkB,EAClB,GAAG,EACH,MAAM,EACN,QAAQ,EACR,WAAW,CACH,CAAA;AAEV,EAAA,KAAK,MAAMC,GAAG,IAAID,SAAS,EAAE;IAC3B,IAAIC,GAAG,IAAIH,MAAM,IAAKA,MAAM,CAASG,GAAG,CAAC,KAAKC,SAAS,EAAE;AACvDH,MAAAA,WAAW,CAACE,GAAG,CAAC,GAAIH,MAAM,CAASG,GAAG,CAAC,CAAA;AACzC,KAAA;AACF,GAAA;AACA,EAAA,OAAOF,WAAW,CAAA;AACpB,CAAC,CAAA;AAiCM,MAAMI,oBAAoB,GAAIC,QAAa,IAA+C;EAC/F,MAAMC,MAAgD,GAAG,EAAE,CAAA;EAC3D,KAAK,MAAMC,MAAM,IAAIF,QAAQ,CAACG,OAAO,IAAI,EAAE,EAAE;AAC3C,IAAA,IAAID,MAAM,CAACE,OAAO,EAAEC,OAAO,EAAE;MAC3BJ,MAAM,CAACK,IAAI,CAAC;AACVC,QAAAA,IAAI,EAAEL,MAAM,CAACE,OAAO,CAACG,IAAI;AACzBF,QAAAA,OAAO,EAAEH,MAAM,CAACE,OAAO,CAACC,OAAAA;AAC1B,OAAC,CAAC,CAAA;AACJ,KAAA;AACF,GAAA;AACA,EAAA,OAAOJ,MAAM,CAAA;AACf,CAAC,CAAA;AA6CM,MAAMO,eAAe,GAAGA,CAACC,MAAe,EAAEC,WAAoB,EAAEC,KAAU,KAAU;EACzF,OAAQF,MAAM,CAASG,YAAY,IAAIF,WAAW,GAAG,IAAI,GAAGC,KAAK,CAAA;AACnE,CAAC,CAAA;AAyCD,SAASE,cAAcA,CAACC,GAAQ,EAAO;AACrC,EAAA,IAAIA,GAAG,KAAKhB,SAAS,IAAIgB,GAAG,KAAK,IAAI,EAAE;AACrC,IAAA,OAAOA,GAAG,CAAA;AACZ,GAAA;AACA,EAAA,MAAMC,QAAQ,GAAGC,IAAI,CAACC,KAAK,CAACD,IAAI,CAACE,SAAS,CAACJ,GAAG,CAAC,CAAC,CAAA;AAChD,EAAA,IAAI,OAAOC,QAAQ,KAAK,QAAQ,EAAE;AAChC,IAAA,OAAOI,MAAM,CAACC,IAAI,CAACL,QAAQ,EAAEvB,aAAa,CAAC,CAAC6B,QAAQ,CAAC7B,aAAa,CAAC,CAAA;GACpE,MAAM,IAAI8B,KAAK,CAACC,OAAO,CAACR,QAAQ,CAAC,EAAE;AAClC,IAAA,OAAOA,QAAQ,CAACS,GAAG,CAACX,cAAc,CAAC,CAAA;GACpC,MAAM,IAAIE,QAAQ,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;AACnD,IAAA,OAAOU,MAAM,CAACC,WAAW,CAACD,MAAM,CAACE,OAAO,CAACZ,QAAQ,CAAC,CAACS,GAAG,CAAC,CAAC,CAACI,CAAC,EAAEC,CAAC,CAAC,KAAK,CAACD,CAAC,EAAEf,cAAc,CAACgB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;AAC7F,GAAA;AACA,EAAA,OAAOd,QAAQ,CAAA;AACjB,CAAA;AAEO,MAAMe,kBAAkB,GAAG,OAAO;EACvCrB,MAAM;EACNsB,UAAU;EACVC,OAAO;EACPC,KAAK;EACLC,QAAQ;EACRvB,KAAK;EACLV,MAAM;EACNkC,OAAO;EACPC,OAAO;EACP1C,MAAM;AACN2C,EAAAA,UAAU,GAAG,GAAG;EAChBC,KAAK,GAAG,EAAE;AACVC,EAAAA,OAAO,GAAG,KAAK;EACfC,KAAK;EACLC,KAAK;AACLC,EAAAA,gBAAgB,GAAG,KAAA;AACK,CAAC,KAAoB;AAC7C,EAAA,IAAI,CAACjC,MAAM,CAACkC,OAAO,EAAE;AACnB,IAAA,OAAOC,OAAO,CAACC,OAAO,EAAE,CAAA;AAC1B,GAAA;AACA;AACA,EAAA,MAAMC,SAAS,GAAGjC,cAAc,CAACF,KAAK,CAAC,CAAA;AACvC,EAAA,MAAMoC,UAAU,GAAGlC,cAAc,CAACZ,MAAM,CAAC,CAAA;AACzC,EAAA,MAAM+C,SAAS,GAAGnC,cAAc,CAAC2B,KAAK,CAAC,CAAA;EAEvC,IAAIS,SAAS,GAAG,EAAE,CAAA;AAClB,EAAA,IAAIV,OAAO,EAAE;AACXU,IAAAA,SAAS,GAAG;AACVC,MAAAA,YAAY,EAAE,IAAI;AAClBC,MAAAA,SAAS,EAAEH,SAAAA;KACZ,CAAA;AACH,GAAA;EACA,IAAII,gBAAgB,GAAG,EAAE,CAAA;EACzB,IAAI1D,MAAM,CAAC2D,mBAAmB,EAAE;AAC9B,IAAA,MAAMC,YAAY,GAAG,CAAC5D,MAAM,CAAC2D,mBAAmB,CAACE,SAAS,IAAI,CAAC,KAAKjB,KAAK,CAACkB,WAAW,IAAI,CAAC,CAAC,CAAA;AAC3F,IAAA,MAAMC,aAAa,GAAG,CAAC/D,MAAM,CAAC2D,mBAAmB,CAACK,UAAU,IAAI,CAAC,KAAKpB,KAAK,CAACqB,YAAY,IAAI,CAAC,CAAC,CAAA;AAC9FP,IAAAA,gBAAgB,GAAG;AACjBQ,MAAAA,kBAAkB,EAAEN,YAAY;AAChCO,MAAAA,mBAAmB,EAAEJ,aAAa;MAClCK,kBAAkB,EAAER,YAAY,GAAGG,aAAAA;KACpC,CAAA;AACH,GAAA;AAEA,EAAA,MAAMM,qBAAqB,GAAG;IAC5B,IAAIzB,KAAK,CAAC0B,eAAe,GAAG;MAAEC,oBAAoB,EAAE3B,KAAK,CAAC0B,eAAAA;KAAiB,GAAG,EAAE,CAAC;IACjF,IAAI1B,KAAK,CAAC4B,oBAAoB,GAAG;MAAEC,2BAA2B,EAAE7B,KAAK,CAAC4B,oBAAAA;KAAsB,GAAG,EAAE,CAAC;IAClG,IAAI5B,KAAK,CAAC8B,wBAAwB,GAAG;MAAEC,+BAA+B,EAAE/B,KAAK,CAAC8B,wBAAAA;KAA0B,GAAG,EAAE,CAAA;GAC9G,CAAA;AAED,EAAA,MAAME,UAAU,GAAG;AACjBC,IAAAA,YAAY,EAAE7E,MAAM,CAAC8E,uBAAuB,IAAItC,QAAQ;AACxDuC,IAAAA,SAAS,EAAE/E,MAAM,CAACgF,oBAAoB,IAAIzC,KAAK;AAC/C0C,IAAAA,oBAAoB,EAAElF,cAAc,CAACC,MAAM,CAAC;AAC5CkF,IAAAA,SAAS,EAAEpE,eAAe,CAACC,MAAM,EAAEf,MAAM,CAACmF,kBAAkB,IAAI,KAAK,EAAE/B,SAAS,CAAC;AACjFgC,IAAAA,kBAAkB,EAAEtE,eAAe,CAACC,MAAM,EAAEf,MAAM,CAACmF,kBAAkB,IAAI,KAAK,EAAE9B,UAAU,CAAC;AAC3FgC,IAAAA,eAAe,EAAE1C,UAAU;AAC3B2C,IAAAA,gBAAgB,EAAE1C,KAAK,CAACkB,WAAW,IAAI,CAAC;AACxCyB,IAAAA,iBAAiB,EAAE3C,KAAK,CAACqB,YAAY,IAAI,CAAC;AAC1C,IAAA,GAAGI,qBAAqB;AACxBmB,IAAAA,WAAW,EAAE/C,OAAO;AACpBgD,IAAAA,YAAY,EAAEnD,OAAO;AACrBoD,IAAAA,YAAY,EAAEhD,OAAO;IACrB,GAAG1C,MAAM,CAAC2F,iBAAiB;AAC3B,IAAA,IAAItD,UAAU,GAAG,EAAE,GAAG;AAAEuD,MAAAA,uBAAuB,EAAE,KAAA;AAAM,KAAC,CAAC;AACzD,IAAA,IAAI7C,KAAK,GAAG;AAAE8C,MAAAA,SAAS,EAAE9C,KAAAA;KAAO,GAAG,EAAE,CAAC;AACtC,IAAA,GAAGQ,SAAS;IACZ,GAAGG,gBAAAA;GACJ,CAAA;AAED,EAAA,MAAMoC,KAAK,GAAG;IACZzD,UAAU,EAAEA,UAAU,IAAIC,OAAO;AACjCwD,IAAAA,KAAK,EAAE,gBAAgB;IACvBlB,UAAU;IACVmB,MAAM,EAAE/F,MAAM,CAACgG,aAAAA;GAChB,CAAA;AAED,EAAA,IAAIhD,gBAAgB,EAAE;AACpB;AACA,IAAA,MAAMjC,MAAM,CAACiC,gBAAgB,CAAC8C,KAAK,CAAC,CAAA;AACtC,GAAC,MAAM;AACL/E,IAAAA,MAAM,CAACkC,OAAO,CAAC6C,KAAK,CAAC,CAAA;AACvB,GAAA;AACF,CAAC;;ACtRD,MAAMG,IAAI,GAAGC,MAAa,CAACD,IAAI,CAAA;AAC/B,MAAME,WAAW,GAAGF,IAAI,CAACE,WAAW,CAAA;AACpC,MAAMC,SAAS,GAAGF,MAAa,CAACE,SAAS,CAAA;AAmBlC,MAAMC,aAAa,SAASH,MAAa,CAAC;EAK/CI,WAAWA,CAACC,MAA8B,EAAE;IAC1C,MAAM;MAAEC,OAAO;MAAE,GAAGC,YAAAA;AAAa,KAAC,GAAGF,MAAM,CAAA;IAC3C,KAAK,CAACE,YAAY,CAAC,CAAA;IACnB,IAAI,CAACC,QAAQ,GAAGF,OAAO,CAAA;IACvB,IAAI,CAACG,IAAI,GAAG,IAAIC,WAAW,CAAC,IAAI,EAAE,IAAI,CAACF,QAAQ,CAAC,CAAA;IAChD,IAAI,CAACG,SAAS,GAAG,IAAIC,gBAAgB,CAAC,IAAI,EAAE,IAAI,CAACJ,QAAQ,CAAC,CAAA;AAC5D,GAAA;AACF,CAAA;AAEO,MAAME,WAAW,SAASX,IAAI,CAAC;AACpCK,EAAAA,WAAWA,CAACS,YAA2B,EAAEL,QAAiB,EAAE;IAC1D,KAAK,CAACK,YAAY,CAAC,CAAA;IACnB,IAAI,CAACC,WAAW,GAAG,IAAIC,kBAAkB,CAACF,YAAY,EAAEL,QAAQ,CAAC,CAAA;AACnE,GAAA;AAGF,CAAA;AAEO,MAAMO,kBAAkB,SAASd,WAAW,CAAC;AAGlDG,EAAAA,WAAWA,CAACvF,MAAqB,EAAE2F,QAAiB,EAAE;IACpD,KAAK,CAAC3F,MAAM,CAAC,CAAA;IACb,IAAI,CAAC2F,QAAQ,GAAGA,QAAQ,CAAA;AAC1B,GAAA;;AAEA;;AAMA;;AAMA;;AAMA;AACOQ,EAAAA,MAAMA,CACXC,IAAuD,EACvDC,OAAwB,EACkC;IAC1D,MAAM;MACJC,iBAAiB;MACjBC,cAAc;MACd3B,iBAAiB;AACjB;AACAR,MAAAA,kBAAkB,GAAG,KAAK;MAC1Ba,aAAa;MACbuB,uBAAuB;MACvB,GAAGC,YAAAA;AACL,KAAC,GAAGL,IAAI,CAAA;AAER,IAAA,MAAM7E,OAAO,GAAGgF,cAAc,IAAIG,EAAM,EAAE,CAAA;AAC1C,IAAA,MAAMC,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE,CAAA;IAE5B,MAAMC,aAAa,GAAG,KAAK,CAACX,MAAM,CAACM,YAAY,EAAEJ,OAAO,CAAC,CAAA;IAEzD,IAAII,YAAY,CAACM,MAAM,EAAE;AACvB,MAAA,OAAOD,aAAa,CAACE,IAAI,CAAEC,KAAK,IAAK;QACnC,IAAI,KAAK,IAAIA,KAAK,EAAE;UAClB,MAAM,CAACC,OAAO,EAAEC,OAAO,CAAC,GAAGF,KAAK,CAACG,GAAG,EAAE,CAAA;AACrC,UAAA,CAAC,YAAY;YACZ,IAAI;cACF,IAAIC,kBAAkB,GAAG,EAAE,CAAA;AAC3B,cAAA,IAAIxF,KAKH,GAAG;AACFkB,gBAAAA,WAAW,EAAE,CAAC;AACdG,gBAAAA,YAAY,EAAE,CAAA;eACf,CAAA;AAED,cAAA,WAAW,MAAMoE,KAAK,IAAIJ,OAAO,EAAE;AACjC,gBAAA,MAAMK,KAAK,GAAGD,KAAK,EAAE5H,OAAO,GAAG,CAAC,CAAC,EAAE6H,KAAK,EAAE3H,OAAO,IAAI,EAAE,CAAA;AACvDyH,gBAAAA,kBAAkB,IAAIE,KAAK,CAAA;gBAC3B,IAAID,KAAK,CAACzF,KAAK,EAAE;AACfA,kBAAAA,KAAK,GAAG;AACNkB,oBAAAA,WAAW,EAAEuE,KAAK,CAACzF,KAAK,CAAC2F,aAAa,IAAI,CAAC;AAC3CtE,oBAAAA,YAAY,EAAEoE,KAAK,CAACzF,KAAK,CAAC4F,iBAAiB,IAAI,CAAC;oBAChDlE,eAAe,EAAE+D,KAAK,CAACzF,KAAK,CAAC6F,yBAAyB,EAAEC,gBAAgB,IAAI,CAAC;oBAC7ElE,oBAAoB,EAAE6D,KAAK,CAACzF,KAAK,CAAC+F,qBAAqB,EAAEC,aAAa,IAAI,CAAA;mBAC3E,CAAA;AACH,iBAAA;AACF,eAAA;cAEA,MAAMnG,OAAO,GAAG,CAACkF,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/C,cAAA,MAAMtF,kBAAkB,CAAC;gBACvBrB,MAAM,EAAE,IAAI,CAAC2F,QAAQ;AACrBrE,gBAAAA,UAAU,EAAEgF,iBAAiB;gBAC7B/E,OAAO;gBACPC,KAAK,EAAEiF,YAAY,CAACjF,KAAK;AACzBC,gBAAAA,QAAQ,EAAE,QAAQ;gBAClBvB,KAAK,EAAEuG,YAAY,CAACqB,QAAQ;AAC5BtI,gBAAAA,MAAM,EAAE,CAAC;AAAEI,kBAAAA,OAAO,EAAEyH,kBAAkB;AAAEvH,kBAAAA,IAAI,EAAE,WAAA;AAAY,iBAAC,CAAC;gBAC5D4B,OAAO;AACPC,gBAAAA,OAAO,EAAG,IAAI,CAASA,OAAO,IAAI,EAAE;AACpC1C,gBAAAA,MAAM,EAAEmH,IAAI;AACZxE,gBAAAA,UAAU,EAAE,GAAG;gBACfC,KAAK;AACLI,gBAAAA,gBAAgB,EAAEuE,uBAAAA;AACpB,eAAC,CAAC,CAAA;aACH,CAAC,OAAOzE,KAAU,EAAE;AACnB,cAAA,MAAMV,kBAAkB,CAAC;gBACvBrB,MAAM,EAAE,IAAI,CAAC2F,QAAQ;AACrBrE,gBAAAA,UAAU,EAAEgF,iBAAiB;gBAC7B/E,OAAO;gBACPC,KAAK,EAAEiF,YAAY,CAACjF,KAAK;AACzBC,gBAAAA,QAAQ,EAAE,QAAQ;gBAClBvB,KAAK,EAAEuG,YAAY,CAACqB,QAAQ;AAC5BtI,gBAAAA,MAAM,EAAE,EAAE;AACVkC,gBAAAA,OAAO,EAAE,CAAC;AACVC,gBAAAA,OAAO,EAAG,IAAI,CAASA,OAAO,IAAI,EAAE;AACpC1C,gBAAAA,MAAM,EAAEmH,IAAI;gBACZxE,UAAU,EAAEG,KAAK,EAAEgG,MAAM,GAAGhG,KAAK,CAACgG,MAAM,GAAG,GAAG;AAC9ClG,gBAAAA,KAAK,EAAE;AAAEkB,kBAAAA,WAAW,EAAE,CAAC;AAAEG,kBAAAA,YAAY,EAAE,CAAA;iBAAG;AAC1CpB,gBAAAA,OAAO,EAAE,IAAI;AACbC,gBAAAA,KAAK,EAAExB,IAAI,CAACE,SAAS,CAACsB,KAAK,CAAC;AAC5BE,gBAAAA,gBAAgB,EAAEuE,uBAAAA;AACpB,eAAC,CAAC,CAAA;AACJ,aAAA;AACF,WAAC,GAAG,CAAA;;AAEJ;AACA,UAAA,OAAOW,OAAO,CAAA;AAChB,SAAA;AACA,QAAA,OAAOF,KAAK,CAAA;AACd,OAAC,CAAC,CAAA;AACJ,KAAC,MAAM;MACL,MAAMe,cAAc,GAAGlB,aAAa,CAACE,IAAI,CACvC,MAAOiB,MAAM,IAAK;QAChB,IAAI,SAAS,IAAIA,MAAM,EAAE;UACvB,MAAMvG,OAAO,GAAG,CAACkF,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/C,UAAA,MAAMtF,kBAAkB,CAAC;YACvBrB,MAAM,EAAE,IAAI,CAAC2F,QAAQ;AACrBrE,YAAAA,UAAU,EAAEgF,iBAAiB;YAC7B/E,OAAO;YACPC,KAAK,EAAEiF,YAAY,CAACjF,KAAK;AACzBC,YAAAA,QAAQ,EAAE,QAAQ;YAClBvB,KAAK,EAAEuG,YAAY,CAACqB,QAAQ;AAC5BtI,YAAAA,MAAM,EAAEF,oBAAoB,CAAC2I,MAAM,CAAC;YACpCvG,OAAO;AACPC,YAAAA,OAAO,EAAG,IAAI,CAASA,OAAO,IAAI,EAAE;AACpC1C,YAAAA,MAAM,EAAEmH,IAAI;AACZxE,YAAAA,UAAU,EAAE,GAAG;AACfC,YAAAA,KAAK,EAAE;AACLkB,cAAAA,WAAW,EAAEkF,MAAM,CAACpG,KAAK,EAAE2F,aAAa,IAAI,CAAC;AAC7CtE,cAAAA,YAAY,EAAE+E,MAAM,CAACpG,KAAK,EAAE4F,iBAAiB,IAAI,CAAC;cAClDlE,eAAe,EAAE0E,MAAM,CAACpG,KAAK,EAAE6F,yBAAyB,EAAEC,gBAAgB,IAAI,CAAC;cAC/ElE,oBAAoB,EAAEwE,MAAM,CAACpG,KAAK,EAAE+F,qBAAqB,EAAEC,aAAa,IAAI,CAAA;aAC7E;AACD5F,YAAAA,gBAAgB,EAAEuE,uBAAAA;AACpB,WAAC,CAAC,CAAA;AACJ,SAAA;AACA,QAAA,OAAOyB,MAAM,CAAA;OACd,EACD,MAAOlG,KAAU,IAAK;AACpB,QAAA,MAAMV,kBAAkB,CAAC;UACvBrB,MAAM,EAAE,IAAI,CAAC2F,QAAQ;AACrBrE,UAAAA,UAAU,EAAEgF,iBAAiB;UAC7B/E,OAAO;UACPC,KAAK,EAAEiF,YAAY,CAACjF,KAAK;AACzBC,UAAAA,QAAQ,EAAE,QAAQ;UAClBvB,KAAK,EAAEuG,YAAY,CAACqB,QAAQ;AAC5BtI,UAAAA,MAAM,EAAE,EAAE;AACVkC,UAAAA,OAAO,EAAE,CAAC;AACVC,UAAAA,OAAO,EAAG,IAAI,CAASA,OAAO,IAAI,EAAE;AACpC1C,UAAAA,MAAM,EAAEmH,IAAI;UACZxE,UAAU,EAAEG,KAAK,EAAEgG,MAAM,GAAGhG,KAAK,CAACgG,MAAM,GAAG,GAAG;AAC9ClG,UAAAA,KAAK,EAAE;AACLkB,YAAAA,WAAW,EAAE,CAAC;AACdG,YAAAA,YAAY,EAAE,CAAA;WACf;AACDpB,UAAAA,OAAO,EAAE,IAAI;AACbC,UAAAA,KAAK,EAAExB,IAAI,CAACE,SAAS,CAACsB,KAAK,CAAC;AAC5BE,UAAAA,gBAAgB,EAAEuE,uBAAAA;AACpB,SAAC,CAAC,CAAA;AACF,QAAA,MAAMzE,KAAK,CAAA;AACb,OACF,CAA+B,CAAA;AAE/B,MAAA,OAAOiG,cAAc,CAAA;AACvB,KAAA;AACF,GAAA;AACF,CAAA;AAEO,MAAMjC,gBAAgB,SAASV,SAAS,CAAC;AAG9CE,EAAAA,WAAWA,CAACvF,MAAqB,EAAE2F,QAAiB,EAAE;IACpD,KAAK,CAAC3F,MAAM,CAAC,CAAA;IACb,IAAI,CAAC2F,QAAQ,GAAGA,QAAQ,CAAA;AAC1B,GAAA;;AAEA;;AAMA;;AAMA;;AAMA;AACOQ,EAAAA,MAAMA,CACXC,IAAkD,EAClDC,OAAwB,EAC4E;IACpG,MAAM;MACJC,iBAAiB;MACjBC,cAAc;MACd3B,iBAAiB;AACjB;AACAR,MAAAA,kBAAkB,GAAG,KAAK;MAC1Ba,aAAa;MACbuB,uBAAuB;MACvB,GAAGC,YAAAA;AACL,KAAC,GAAGL,IAAI,CAAA;AAER,IAAA,MAAM7E,OAAO,GAAGgF,cAAc,IAAIG,EAAM,EAAE,CAAA;AAC1C,IAAA,MAAMC,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE,CAAA;IAE5B,MAAMC,aAAa,GAAG,KAAK,CAACX,MAAM,CAACM,YAAY,EAAEJ,OAAO,CAAC,CAAA;IAEzD,IAAII,YAAY,CAACM,MAAM,EAAE;AACvB,MAAA,OAAOD,aAAa,CAACE,IAAI,CAAEC,KAAK,IAAK;QACnC,IAAI,KAAK,IAAIA,KAAK,IAAI,OAAQA,KAAK,CAASG,GAAG,KAAK,UAAU,EAAE;UAC9D,MAAM,CAACF,OAAO,EAAEC,OAAO,CAAC,GAAIF,KAAK,CAASG,GAAG,EAAE,CAAA;AAC9C,UAAA,CAAC,YAAY;YACZ,IAAI;cACF,IAAIc,YAAmB,GAAG,EAAE,CAAA;AAC5B,cAAA,IAAIrG,KAKH,GAAG;AACFkB,gBAAAA,WAAW,EAAE,CAAC;AACdG,gBAAAA,YAAY,EAAE,CAAA;eACf,CAAA;AAED,cAAA,WAAW,MAAMoE,KAAK,IAAIJ,OAAO,EAAE;gBACjC,IACEI,KAAK,CAACa,IAAI,KAAK,oBAAoB,IACnC,UAAU,IAAIb,KAAK,IACnBA,KAAK,CAAC/H,QAAQ,EAAEC,MAAM,IACtB8H,KAAK,CAAC/H,QAAQ,CAACC,MAAM,CAAC4I,MAAM,GAAG,CAAC,EAChC;AACAF,kBAAAA,YAAY,GAAGZ,KAAK,CAAC/H,QAAQ,CAACC,MAAM,CAAA;AACtC,iBAAA;gBACA,IAAI,UAAU,IAAI8H,KAAK,IAAIA,KAAK,CAAC/H,QAAQ,EAAEsC,KAAK,EAAE;AAChDA,kBAAAA,KAAK,GAAG;oBACNkB,WAAW,EAAEuE,KAAK,CAAC/H,QAAQ,CAACsC,KAAK,CAACwG,YAAY,IAAI,CAAC;oBACnDnF,YAAY,EAAEoE,KAAK,CAAC/H,QAAQ,CAACsC,KAAK,CAACyG,aAAa,IAAI,CAAC;oBACrD/E,eAAe,EAAE+D,KAAK,CAAC/H,QAAQ,CAACsC,KAAK,CAAC0G,qBAAqB,EAAEZ,gBAAgB,IAAI,CAAC;oBAClFlE,oBAAoB,EAAE6D,KAAK,CAAC/H,QAAQ,CAACsC,KAAK,CAAC2G,oBAAoB,EAAEX,aAAa,IAAI,CAAA;mBACnF,CAAA;AACH,iBAAA;AACF,eAAA;cAEA,MAAMnG,OAAO,GAAG,CAACkF,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/C,cAAA,MAAMtF,kBAAkB,CAAC;gBACvBrB,MAAM,EAAE,IAAI,CAAC2F,QAAQ;AACrBrE,gBAAAA,UAAU,EAAEgF,iBAAiB;gBAC7B/E,OAAO;gBACPC,KAAK,EAAEiF,YAAY,CAACjF,KAAK;AACzBC,gBAAAA,QAAQ,EAAE,QAAQ;gBAClBvB,KAAK,EAAEuG,YAAY,CAACvG,KAAK;AACzBV,gBAAAA,MAAM,EAAE0I,YAAY;gBACpBxG,OAAO;AACPC,gBAAAA,OAAO,EAAG,IAAI,CAASA,OAAO,IAAI,EAAE;AACpC1C,gBAAAA,MAAM,EAAEmH,IAAI;AACZxE,gBAAAA,UAAU,EAAE,GAAG;gBACfC,KAAK;AACLI,gBAAAA,gBAAgB,EAAEuE,uBAAAA;AACpB,eAAC,CAAC,CAAA;aACH,CAAC,OAAOzE,KAAU,EAAE;AACnB,cAAA,MAAMV,kBAAkB,CAAC;gBACvBrB,MAAM,EAAE,IAAI,CAAC2F,QAAQ;AACrBrE,gBAAAA,UAAU,EAAEgF,iBAAiB;gBAC7B/E,OAAO;gBACPC,KAAK,EAAEiF,YAAY,CAACjF,KAAK;AACzBC,gBAAAA,QAAQ,EAAE,QAAQ;gBAClBvB,KAAK,EAAEuG,YAAY,CAACvG,KAAK;AACzBV,gBAAAA,MAAM,EAAE,EAAE;AACVkC,gBAAAA,OAAO,EAAE,CAAC;AACVC,gBAAAA,OAAO,EAAG,IAAI,CAASA,OAAO,IAAI,EAAE;AACpC1C,gBAAAA,MAAM,EAAEmH,IAAI;gBACZxE,UAAU,EAAEG,KAAK,EAAEgG,MAAM,GAAGhG,KAAK,CAACgG,MAAM,GAAG,GAAG;AAC9ClG,gBAAAA,KAAK,EAAE;AAAEkB,kBAAAA,WAAW,EAAE,CAAC;AAAEG,kBAAAA,YAAY,EAAE,CAAA;iBAAG;AAC1CpB,gBAAAA,OAAO,EAAE,IAAI;AACbC,gBAAAA,KAAK,EAAExB,IAAI,CAACE,SAAS,CAACsB,KAAK,CAAC;AAC5BE,gBAAAA,gBAAgB,EAAEuE,uBAAAA;AACpB,eAAC,CAAC,CAAA;AACJ,aAAA;AACF,WAAC,GAAG,CAAA;AAEJ,UAAA,OAAOW,OAAO,CAAA;AAChB,SAAA;AACA,QAAA,OAAOF,KAAK,CAAA;AACd,OAAC,CAAC,CAAA;AACJ,KAAC,MAAM;MACL,MAAMe,cAAc,GAAGlB,aAAa,CAACE,IAAI,CACvC,MAAOiB,MAAM,IAAK;QAChB,IAAI,QAAQ,IAAIA,MAAM,EAAE;UACtB,MAAMvG,OAAO,GAAG,CAACkF,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/C,UAAA,MAAMtF,kBAAkB,CAAC;YACvBrB,MAAM,EAAE,IAAI,CAAC2F,QAAQ;AACrBrE,YAAAA,UAAU,EAAEgF,iBAAiB;YAC7B/E,OAAO;YACPC,KAAK,EAAEiF,YAAY,CAACjF,KAAK;AACzBC,YAAAA,QAAQ,EAAE,QAAQ;YAClBvB,KAAK,EAAEuG,YAAY,CAACvG,KAAK;YACzBV,MAAM,EAAEyI,MAAM,CAACzI,MAAM;YACrBkC,OAAO;AACPC,YAAAA,OAAO,EAAG,IAAI,CAASA,OAAO,IAAI,EAAE;AACpC1C,YAAAA,MAAM,EAAEmH,IAAI;AACZxE,YAAAA,UAAU,EAAE,GAAG;AACfC,YAAAA,KAAK,EAAE;AACLkB,cAAAA,WAAW,EAAEkF,MAAM,CAACpG,KAAK,EAAEwG,YAAY,IAAI,CAAC;AAC5CnF,cAAAA,YAAY,EAAE+E,MAAM,CAACpG,KAAK,EAAEyG,aAAa,IAAI,CAAC;cAC9C/E,eAAe,EAAE0E,MAAM,CAACpG,KAAK,EAAE0G,qBAAqB,EAAEZ,gBAAgB,IAAI,CAAC;cAC3ElE,oBAAoB,EAAEwE,MAAM,CAACpG,KAAK,EAAE2G,oBAAoB,EAAEX,aAAa,IAAI,CAAA;aAC5E;AACD5F,YAAAA,gBAAgB,EAAEuE,uBAAAA;AACpB,WAAC,CAAC,CAAA;AACJ,SAAA;AACA,QAAA,OAAOyB,MAAM,CAAA;OACd,EACD,MAAOlG,KAAU,IAAK;AACpB,QAAA,MAAMV,kBAAkB,CAAC;UACvBrB,MAAM,EAAE,IAAI,CAAC2F,QAAQ;AACrBrE,UAAAA,UAAU,EAAEgF,iBAAiB;UAC7B/E,OAAO;UACPC,KAAK,EAAEiF,YAAY,CAACjF,KAAK;AACzBC,UAAAA,QAAQ,EAAE,QAAQ;UAClBvB,KAAK,EAAEuG,YAAY,CAACvG,KAAK;AACzBV,UAAAA,MAAM,EAAE,EAAE;AACVkC,UAAAA,OAAO,EAAE,CAAC;AACVC,UAAAA,OAAO,EAAG,IAAI,CAASA,OAAO,IAAI,EAAE;AACpC1C,UAAAA,MAAM,EAAEmH,IAAI;UACZxE,UAAU,EAAEG,KAAK,EAAEgG,MAAM,GAAGhG,KAAK,CAACgG,MAAM,GAAG,GAAG;AAC9ClG,UAAAA,KAAK,EAAE;AACLkB,YAAAA,WAAW,EAAE,CAAC;AACdG,YAAAA,YAAY,EAAE,CAAA;WACf;AACDpB,UAAAA,OAAO,EAAE,IAAI;AACbC,UAAAA,KAAK,EAAExB,IAAI,CAACE,SAAS,CAACsB,KAAK,CAAC;AAC5BE,UAAAA,gBAAgB,EAAEuE,uBAAAA;AACpB,SAAC,CAAC,CAAA;AACF,QAAA,MAAMzE,KAAK,CAAA;AACb,OACF,CAAiD,CAAA;AAEjD,MAAA,OAAOiG,cAAc,CAAA;AACvB,KAAA;AACF,GAAA;AAEOxH,EAAAA,KAAKA,CACV4F,IAA+B,EAC/BC,OAAwB,EACa;IACrC,MAAM;MACJC,iBAAiB;MACjBC,cAAc;MACd3B,iBAAiB;AACjB;AACAR,MAAAA,kBAAkB,GAAG,KAAK;MAC1Ba,aAAa;MACbuB,uBAAuB;MACvB,GAAGC,YAAAA;AACL,KAAC,GAAGL,IAAI,CAAA;AAER,IAAA,MAAM7E,OAAO,GAAGgF,cAAc,IAAIG,EAAM,EAAE,CAAA;AAC1C,IAAA,MAAMC,SAAS,GAAGC,IAAI,CAACC,GAAG,EAAE,CAAA;;AAE5B;IACA,MAAM4B,cAAc,GAAG,KAAK,CAACtC,MAAM,CAACuC,IAAI,CAAC,IAAI,CAAC,CAAA;IAC9C,MAAMC,YAAY,GAAG,IAAW,CAAA;AAChC,IAAA,MAAMC,UAAU,GAAGD,YAAY,CAACxC,MAAM,CAAA;IACtCwC,YAAY,CAACxC,MAAM,GAAGsC,cAAc,CAAA;IAEpC,IAAI;MACF,MAAM3B,aAAa,GAAG,KAAK,CAACtG,KAAK,CAACiG,YAAY,EAAEJ,OAAO,CAAC,CAAA;MAExD,MAAM2B,cAAc,GAAGlB,aAAa,CAACE,IAAI,CACvC,MAAOiB,MAAM,IAAK;QAChB,MAAMvG,OAAO,GAAG,CAACkF,IAAI,CAACC,GAAG,EAAE,GAAGF,SAAS,IAAI,IAAI,CAAA;AAC/C,QAAA,MAAMtF,kBAAkB,CAAC;UACvBrB,MAAM,EAAE,IAAI,CAAC2F,QAAQ;AACrBrE,UAAAA,UAAU,EAAEgF,iBAAiB;UAC7B/E,OAAO;UACPC,KAAK,EAAEiF,YAAY,CAACjF,KAAK;AACzBC,UAAAA,QAAQ,EAAE,QAAQ;UAClBvB,KAAK,EAAEuG,YAAY,CAACvG,KAAK;UACzBV,MAAM,EAAEyI,MAAM,CAACzI,MAAM;UACrBkC,OAAO;AACPC,UAAAA,OAAO,EAAG,IAAI,CAASA,OAAO,IAAI,EAAE;AACpC1C,UAAAA,MAAM,EAAEmH,IAAI;AACZxE,UAAAA,UAAU,EAAE,GAAG;AACfC,UAAAA,KAAK,EAAE;AACLkB,YAAAA,WAAW,EAAEkF,MAAM,CAACpG,KAAK,EAAEwG,YAAY,IAAI,CAAC;AAC5CnF,YAAAA,YAAY,EAAE+E,MAAM,CAACpG,KAAK,EAAEyG,aAAa,IAAI,CAAC;YAC9C/E,eAAe,EAAE0E,MAAM,CAACpG,KAAK,EAAE0G,qBAAqB,EAAEZ,gBAAgB,IAAI,CAAC;YAC3ElE,oBAAoB,EAAEwE,MAAM,CAACpG,KAAK,EAAE2G,oBAAoB,EAAEX,aAAa,IAAI,CAAA;WAC5E;AACD5F,UAAAA,gBAAgB,EAAEuE,uBAAAA;AACpB,SAAC,CAAC,CAAA;AACF,QAAA,OAAOyB,MAAM,CAAA;OACd,EACD,MAAOlG,KAAU,IAAK;AACpB,QAAA,MAAMV,kBAAkB,CAAC;UACvBrB,MAAM,EAAE,IAAI,CAAC2F,QAAQ;AACrBrE,UAAAA,UAAU,EAAEgF,iBAAiB;UAC7B/E,OAAO;UACPC,KAAK,EAAEiF,YAAY,CAACjF,KAAK;AACzBC,UAAAA,QAAQ,EAAE,QAAQ;UAClBvB,KAAK,EAAEuG,YAAY,CAACvG,KAAK;AACzBV,UAAAA,MAAM,EAAE,EAAE;AACVkC,UAAAA,OAAO,EAAE,CAAC;AACVC,UAAAA,OAAO,EAAG,IAAI,CAASA,OAAO,IAAI,EAAE;AACpC1C,UAAAA,MAAM,EAAEmH,IAAI;UACZxE,UAAU,EAAEG,KAAK,EAAEgG,MAAM,GAAGhG,KAAK,CAACgG,MAAM,GAAG,GAAG;AAC9ClG,UAAAA,KAAK,EAAE;AACLkB,YAAAA,WAAW,EAAE,CAAC;AACdG,YAAAA,YAAY,EAAE,CAAA;WACf;AACDpB,UAAAA,OAAO,EAAE,IAAI;AACbC,UAAAA,KAAK,EAAExB,IAAI,CAACE,SAAS,CAACsB,KAAK,CAAC;AAC5BE,UAAAA,gBAAgB,EAAEuE,uBAAAA;AACpB,SAAC,CAAC,CAAA;AACF,QAAA,MAAMzE,KAAK,CAAA;AACb,OACF,CAAC,CAAA;AAED,MAAA,OAAOiG,cAAc,CAAA;AACvB,KAAC,SAAS;AACR;MACAW,YAAY,CAACxC,MAAM,GAAGyC,UAAU,CAAA;AAClC,KAAA;AACF,GAAA;AACF;;;;"}
|
package/src/anthropic/index.ts
DELETED
|
@@ -1,211 +0,0 @@
|
|
|
1
|
-
import AnthropicOriginal from '@anthropic-ai/sdk'
|
|
2
|
-
import { PostHog } from 'posthog-node'
|
|
3
|
-
import { v4 as uuidv4 } from 'uuid'
|
|
4
|
-
import { formatResponseAnthropic, mergeSystemPrompt, MonitoringParams, sendEventToPosthog } from '../utils'
|
|
5
|
-
|
|
6
|
-
type MessageCreateParamsNonStreaming = AnthropicOriginal.Messages.MessageCreateParamsNonStreaming
|
|
7
|
-
type MessageCreateParamsStreaming = AnthropicOriginal.Messages.MessageCreateParamsStreaming
|
|
8
|
-
type MessageCreateParams = AnthropicOriginal.Messages.MessageCreateParams
|
|
9
|
-
type Message = AnthropicOriginal.Messages.Message
|
|
10
|
-
type RawMessageStreamEvent = AnthropicOriginal.Messages.RawMessageStreamEvent
|
|
11
|
-
type MessageCreateParamsBase = AnthropicOriginal.Messages.MessageCreateParams
|
|
12
|
-
|
|
13
|
-
import type { APIPromise, RequestOptions } from '@anthropic-ai/sdk/core'
|
|
14
|
-
import type { Stream } from '@anthropic-ai/sdk/streaming'
|
|
15
|
-
|
|
16
|
-
interface MonitoringAnthropicConfig {
|
|
17
|
-
apiKey: string
|
|
18
|
-
posthog: PostHog
|
|
19
|
-
baseURL?: string
|
|
20
|
-
}
|
|
21
|
-
|
|
22
|
-
export class PostHogAnthropic extends AnthropicOriginal {
|
|
23
|
-
private readonly phClient: PostHog
|
|
24
|
-
public messages: WrappedMessages
|
|
25
|
-
|
|
26
|
-
constructor(config: MonitoringAnthropicConfig) {
|
|
27
|
-
const { posthog, ...anthropicConfig } = config
|
|
28
|
-
super(anthropicConfig)
|
|
29
|
-
this.phClient = posthog
|
|
30
|
-
this.messages = new WrappedMessages(this, this.phClient)
|
|
31
|
-
}
|
|
32
|
-
}
|
|
33
|
-
|
|
34
|
-
export class WrappedMessages extends AnthropicOriginal.Messages {
|
|
35
|
-
private readonly phClient: PostHog
|
|
36
|
-
|
|
37
|
-
constructor(parentClient: PostHogAnthropic, phClient: PostHog) {
|
|
38
|
-
super(parentClient)
|
|
39
|
-
this.phClient = phClient
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
public create(body: MessageCreateParamsNonStreaming, options?: RequestOptions): APIPromise<Message>
|
|
43
|
-
public create(
|
|
44
|
-
body: MessageCreateParamsStreaming & MonitoringParams,
|
|
45
|
-
options?: RequestOptions
|
|
46
|
-
): APIPromise<Stream<RawMessageStreamEvent>>
|
|
47
|
-
public create(
|
|
48
|
-
body: MessageCreateParamsBase & MonitoringParams,
|
|
49
|
-
options?: RequestOptions
|
|
50
|
-
): APIPromise<Stream<RawMessageStreamEvent> | Message>
|
|
51
|
-
public create(
|
|
52
|
-
body: MessageCreateParams & MonitoringParams,
|
|
53
|
-
options?: RequestOptions
|
|
54
|
-
): APIPromise<Message> | APIPromise<Stream<RawMessageStreamEvent>> {
|
|
55
|
-
const {
|
|
56
|
-
posthogDistinctId,
|
|
57
|
-
posthogTraceId,
|
|
58
|
-
posthogProperties,
|
|
59
|
-
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
60
|
-
posthogPrivacyMode = false,
|
|
61
|
-
posthogGroups,
|
|
62
|
-
posthogCaptureImmediate,
|
|
63
|
-
...anthropicParams
|
|
64
|
-
} = body
|
|
65
|
-
|
|
66
|
-
const traceId = posthogTraceId ?? uuidv4()
|
|
67
|
-
const startTime = Date.now()
|
|
68
|
-
|
|
69
|
-
const parentPromise = super.create(anthropicParams, options)
|
|
70
|
-
|
|
71
|
-
if (anthropicParams.stream) {
|
|
72
|
-
return parentPromise.then((value) => {
|
|
73
|
-
let accumulatedContent = ''
|
|
74
|
-
const usage: {
|
|
75
|
-
inputTokens: number
|
|
76
|
-
outputTokens: number
|
|
77
|
-
cacheCreationInputTokens?: number
|
|
78
|
-
cacheReadInputTokens?: number
|
|
79
|
-
} = {
|
|
80
|
-
inputTokens: 0,
|
|
81
|
-
outputTokens: 0,
|
|
82
|
-
cacheCreationInputTokens: 0,
|
|
83
|
-
cacheReadInputTokens: 0,
|
|
84
|
-
}
|
|
85
|
-
if ('tee' in value) {
|
|
86
|
-
const [stream1, stream2] = value.tee()
|
|
87
|
-
;(async () => {
|
|
88
|
-
try {
|
|
89
|
-
for await (const chunk of stream1) {
|
|
90
|
-
if ('delta' in chunk) {
|
|
91
|
-
if ('text' in chunk.delta) {
|
|
92
|
-
const delta = chunk?.delta?.text ?? ''
|
|
93
|
-
accumulatedContent += delta
|
|
94
|
-
}
|
|
95
|
-
}
|
|
96
|
-
if (chunk.type == 'message_start') {
|
|
97
|
-
usage.inputTokens = chunk.message.usage.input_tokens ?? 0
|
|
98
|
-
usage.cacheCreationInputTokens = chunk.message.usage.cache_creation_input_tokens ?? 0
|
|
99
|
-
usage.cacheReadInputTokens = chunk.message.usage.cache_read_input_tokens ?? 0
|
|
100
|
-
}
|
|
101
|
-
if ('usage' in chunk) {
|
|
102
|
-
usage.outputTokens = chunk.usage.output_tokens ?? 0
|
|
103
|
-
}
|
|
104
|
-
}
|
|
105
|
-
const latency = (Date.now() - startTime) / 1000
|
|
106
|
-
await sendEventToPosthog({
|
|
107
|
-
client: this.phClient,
|
|
108
|
-
distinctId: posthogDistinctId,
|
|
109
|
-
traceId,
|
|
110
|
-
model: anthropicParams.model,
|
|
111
|
-
provider: 'anthropic',
|
|
112
|
-
input: mergeSystemPrompt(anthropicParams, 'anthropic'),
|
|
113
|
-
output: [{ content: accumulatedContent, role: 'assistant' }],
|
|
114
|
-
latency,
|
|
115
|
-
baseURL: (this as any).baseURL ?? '',
|
|
116
|
-
params: body,
|
|
117
|
-
httpStatus: 200,
|
|
118
|
-
usage,
|
|
119
|
-
captureImmediate: posthogCaptureImmediate,
|
|
120
|
-
})
|
|
121
|
-
} catch (error: any) {
|
|
122
|
-
// error handling
|
|
123
|
-
await sendEventToPosthog({
|
|
124
|
-
client: this.phClient,
|
|
125
|
-
distinctId: posthogDistinctId,
|
|
126
|
-
traceId,
|
|
127
|
-
model: anthropicParams.model,
|
|
128
|
-
provider: 'anthropic',
|
|
129
|
-
input: mergeSystemPrompt(anthropicParams, 'anthropic'),
|
|
130
|
-
output: [],
|
|
131
|
-
latency: 0,
|
|
132
|
-
baseURL: (this as any).baseURL ?? '',
|
|
133
|
-
params: body,
|
|
134
|
-
httpStatus: error?.status ? error.status : 500,
|
|
135
|
-
usage: {
|
|
136
|
-
inputTokens: 0,
|
|
137
|
-
outputTokens: 0,
|
|
138
|
-
},
|
|
139
|
-
isError: true,
|
|
140
|
-
error: JSON.stringify(error),
|
|
141
|
-
captureImmediate: posthogCaptureImmediate,
|
|
142
|
-
})
|
|
143
|
-
}
|
|
144
|
-
})()
|
|
145
|
-
|
|
146
|
-
// Return the other stream to the user
|
|
147
|
-
return stream2
|
|
148
|
-
}
|
|
149
|
-
return value
|
|
150
|
-
}) as APIPromise<Stream<RawMessageStreamEvent>>
|
|
151
|
-
} else {
|
|
152
|
-
const wrappedPromise = parentPromise.then(
|
|
153
|
-
async (result) => {
|
|
154
|
-
if ('content' in result) {
|
|
155
|
-
const latency = (Date.now() - startTime) / 1000
|
|
156
|
-
await sendEventToPosthog({
|
|
157
|
-
client: this.phClient,
|
|
158
|
-
distinctId: posthogDistinctId,
|
|
159
|
-
traceId,
|
|
160
|
-
model: anthropicParams.model,
|
|
161
|
-
provider: 'anthropic',
|
|
162
|
-
input: mergeSystemPrompt(anthropicParams, 'anthropic'),
|
|
163
|
-
output: formatResponseAnthropic(result),
|
|
164
|
-
latency,
|
|
165
|
-
baseURL: (this as any).baseURL ?? '',
|
|
166
|
-
params: body,
|
|
167
|
-
httpStatus: 200,
|
|
168
|
-
usage: {
|
|
169
|
-
inputTokens: result.usage.input_tokens ?? 0,
|
|
170
|
-
outputTokens: result.usage.output_tokens ?? 0,
|
|
171
|
-
cacheCreationInputTokens: result.usage.cache_creation_input_tokens ?? 0,
|
|
172
|
-
cacheReadInputTokens: result.usage.cache_read_input_tokens ?? 0,
|
|
173
|
-
},
|
|
174
|
-
captureImmediate: posthogCaptureImmediate,
|
|
175
|
-
})
|
|
176
|
-
}
|
|
177
|
-
return result
|
|
178
|
-
},
|
|
179
|
-
async (error: any) => {
|
|
180
|
-
await sendEventToPosthog({
|
|
181
|
-
client: this.phClient,
|
|
182
|
-
distinctId: posthogDistinctId,
|
|
183
|
-
traceId,
|
|
184
|
-
model: anthropicParams.model,
|
|
185
|
-
provider: 'anthropic',
|
|
186
|
-
input: mergeSystemPrompt(anthropicParams, 'anthropic'),
|
|
187
|
-
output: [],
|
|
188
|
-
latency: 0,
|
|
189
|
-
baseURL: (this as any).baseURL ?? '',
|
|
190
|
-
params: body,
|
|
191
|
-
httpStatus: error?.status ? error.status : 500,
|
|
192
|
-
usage: {
|
|
193
|
-
inputTokens: 0,
|
|
194
|
-
outputTokens: 0,
|
|
195
|
-
},
|
|
196
|
-
isError: true,
|
|
197
|
-
error: JSON.stringify(error),
|
|
198
|
-
captureImmediate: posthogCaptureImmediate,
|
|
199
|
-
})
|
|
200
|
-
throw error
|
|
201
|
-
}
|
|
202
|
-
) as APIPromise<Message>
|
|
203
|
-
|
|
204
|
-
return wrappedPromise
|
|
205
|
-
}
|
|
206
|
-
}
|
|
207
|
-
}
|
|
208
|
-
|
|
209
|
-
export default PostHogAnthropic
|
|
210
|
-
|
|
211
|
-
export { PostHogAnthropic as Anthropic }
|
package/src/gemini/index.ts
DELETED
|
@@ -1,254 +0,0 @@
|
|
|
1
|
-
import { GoogleGenAI } from '@google/genai'
|
|
2
|
-
import { PostHog } from 'posthog-node'
|
|
3
|
-
import { v4 as uuidv4 } from 'uuid'
|
|
4
|
-
import { MonitoringParams, sendEventToPosthog } from '../utils'
|
|
5
|
-
|
|
6
|
-
// Types from @google/genai
|
|
7
|
-
type GenerateContentRequest = {
|
|
8
|
-
model: string
|
|
9
|
-
contents: any
|
|
10
|
-
config?: any
|
|
11
|
-
[key: string]: any
|
|
12
|
-
}
|
|
13
|
-
|
|
14
|
-
type GenerateContentResponse = {
|
|
15
|
-
text?: string
|
|
16
|
-
candidates?: any[]
|
|
17
|
-
usageMetadata?: {
|
|
18
|
-
promptTokenCount?: number
|
|
19
|
-
candidatesTokenCount?: number
|
|
20
|
-
totalTokenCount?: number
|
|
21
|
-
}
|
|
22
|
-
[key: string]: any
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
interface MonitoringGeminiConfig {
|
|
26
|
-
apiKey?: string
|
|
27
|
-
vertexai?: boolean
|
|
28
|
-
project?: string
|
|
29
|
-
location?: string
|
|
30
|
-
apiVersion?: string
|
|
31
|
-
posthog: PostHog
|
|
32
|
-
}
|
|
33
|
-
|
|
34
|
-
export class PostHogGoogleGenAI {
|
|
35
|
-
private readonly phClient: PostHog
|
|
36
|
-
private readonly client: GoogleGenAI
|
|
37
|
-
public models: WrappedModels
|
|
38
|
-
|
|
39
|
-
constructor(config: MonitoringGeminiConfig) {
|
|
40
|
-
const { posthog, ...geminiConfig } = config
|
|
41
|
-
this.phClient = posthog
|
|
42
|
-
this.client = new GoogleGenAI(geminiConfig)
|
|
43
|
-
this.models = new WrappedModels(this.client, this.phClient)
|
|
44
|
-
}
|
|
45
|
-
}
|
|
46
|
-
|
|
47
|
-
export class WrappedModels {
|
|
48
|
-
private readonly phClient: PostHog
|
|
49
|
-
private readonly client: GoogleGenAI
|
|
50
|
-
|
|
51
|
-
constructor(client: GoogleGenAI, phClient: PostHog) {
|
|
52
|
-
this.client = client
|
|
53
|
-
this.phClient = phClient
|
|
54
|
-
}
|
|
55
|
-
|
|
56
|
-
public async generateContent(params: GenerateContentRequest & MonitoringParams): Promise<GenerateContentResponse> {
|
|
57
|
-
const {
|
|
58
|
-
posthogDistinctId,
|
|
59
|
-
posthogTraceId,
|
|
60
|
-
posthogProperties,
|
|
61
|
-
posthogGroups,
|
|
62
|
-
posthogCaptureImmediate,
|
|
63
|
-
...geminiParams
|
|
64
|
-
} = params
|
|
65
|
-
|
|
66
|
-
const traceId = posthogTraceId ?? uuidv4()
|
|
67
|
-
const startTime = Date.now()
|
|
68
|
-
|
|
69
|
-
try {
|
|
70
|
-
const response = await this.client.models.generateContent(geminiParams)
|
|
71
|
-
const latency = (Date.now() - startTime) / 1000
|
|
72
|
-
|
|
73
|
-
await sendEventToPosthog({
|
|
74
|
-
client: this.phClient,
|
|
75
|
-
distinctId: posthogDistinctId,
|
|
76
|
-
traceId,
|
|
77
|
-
model: geminiParams.model,
|
|
78
|
-
provider: 'gemini',
|
|
79
|
-
input: this.formatInput(geminiParams.contents),
|
|
80
|
-
output: this.formatOutput(response),
|
|
81
|
-
latency,
|
|
82
|
-
baseURL: 'https://generativelanguage.googleapis.com',
|
|
83
|
-
params: params as any,
|
|
84
|
-
httpStatus: 200,
|
|
85
|
-
usage: {
|
|
86
|
-
inputTokens: response.usageMetadata?.promptTokenCount ?? 0,
|
|
87
|
-
outputTokens: response.usageMetadata?.candidatesTokenCount ?? 0,
|
|
88
|
-
},
|
|
89
|
-
captureImmediate: posthogCaptureImmediate,
|
|
90
|
-
})
|
|
91
|
-
|
|
92
|
-
return response
|
|
93
|
-
} catch (error: any) {
|
|
94
|
-
const latency = (Date.now() - startTime) / 1000
|
|
95
|
-
await sendEventToPosthog({
|
|
96
|
-
client: this.phClient,
|
|
97
|
-
distinctId: posthogDistinctId,
|
|
98
|
-
traceId,
|
|
99
|
-
model: geminiParams.model,
|
|
100
|
-
provider: 'gemini',
|
|
101
|
-
input: this.formatInput(geminiParams.contents),
|
|
102
|
-
output: [],
|
|
103
|
-
latency,
|
|
104
|
-
baseURL: 'https://generativelanguage.googleapis.com',
|
|
105
|
-
params: params as any,
|
|
106
|
-
httpStatus: error?.status ?? 500,
|
|
107
|
-
usage: {
|
|
108
|
-
inputTokens: 0,
|
|
109
|
-
outputTokens: 0,
|
|
110
|
-
},
|
|
111
|
-
isError: true,
|
|
112
|
-
error: JSON.stringify(error),
|
|
113
|
-
captureImmediate: posthogCaptureImmediate,
|
|
114
|
-
})
|
|
115
|
-
throw error
|
|
116
|
-
}
|
|
117
|
-
}
|
|
118
|
-
|
|
119
|
-
public async *generateContentStream(
|
|
120
|
-
params: GenerateContentRequest & MonitoringParams
|
|
121
|
-
): AsyncGenerator<any, void, unknown> {
|
|
122
|
-
const {
|
|
123
|
-
posthogDistinctId,
|
|
124
|
-
posthogTraceId,
|
|
125
|
-
posthogProperties,
|
|
126
|
-
posthogGroups,
|
|
127
|
-
posthogCaptureImmediate,
|
|
128
|
-
...geminiParams
|
|
129
|
-
} = params
|
|
130
|
-
|
|
131
|
-
const traceId = posthogTraceId ?? uuidv4()
|
|
132
|
-
const startTime = Date.now()
|
|
133
|
-
let accumulatedContent = ''
|
|
134
|
-
let usage = {
|
|
135
|
-
inputTokens: 0,
|
|
136
|
-
outputTokens: 0,
|
|
137
|
-
}
|
|
138
|
-
|
|
139
|
-
try {
|
|
140
|
-
const stream = await this.client.models.generateContentStream(geminiParams)
|
|
141
|
-
|
|
142
|
-
for await (const chunk of stream) {
|
|
143
|
-
if (chunk.text) {
|
|
144
|
-
accumulatedContent += chunk.text
|
|
145
|
-
}
|
|
146
|
-
if (chunk.usageMetadata) {
|
|
147
|
-
usage = {
|
|
148
|
-
inputTokens: chunk.usageMetadata.promptTokenCount ?? 0,
|
|
149
|
-
outputTokens: chunk.usageMetadata.candidatesTokenCount ?? 0,
|
|
150
|
-
}
|
|
151
|
-
}
|
|
152
|
-
yield chunk
|
|
153
|
-
}
|
|
154
|
-
|
|
155
|
-
const latency = (Date.now() - startTime) / 1000
|
|
156
|
-
await sendEventToPosthog({
|
|
157
|
-
client: this.phClient,
|
|
158
|
-
distinctId: posthogDistinctId,
|
|
159
|
-
traceId,
|
|
160
|
-
model: geminiParams.model,
|
|
161
|
-
provider: 'gemini',
|
|
162
|
-
input: this.formatInput(geminiParams.contents),
|
|
163
|
-
output: [{ content: accumulatedContent, role: 'assistant' }],
|
|
164
|
-
latency,
|
|
165
|
-
baseURL: 'https://generativelanguage.googleapis.com',
|
|
166
|
-
params: params as any,
|
|
167
|
-
httpStatus: 200,
|
|
168
|
-
usage,
|
|
169
|
-
captureImmediate: posthogCaptureImmediate,
|
|
170
|
-
})
|
|
171
|
-
} catch (error: any) {
|
|
172
|
-
const latency = (Date.now() - startTime) / 1000
|
|
173
|
-
await sendEventToPosthog({
|
|
174
|
-
client: this.phClient,
|
|
175
|
-
distinctId: posthogDistinctId,
|
|
176
|
-
traceId,
|
|
177
|
-
model: geminiParams.model,
|
|
178
|
-
provider: 'gemini',
|
|
179
|
-
input: this.formatInput(geminiParams.contents),
|
|
180
|
-
output: [],
|
|
181
|
-
latency,
|
|
182
|
-
baseURL: 'https://generativelanguage.googleapis.com',
|
|
183
|
-
params: params as any,
|
|
184
|
-
httpStatus: error?.status ?? 500,
|
|
185
|
-
usage: {
|
|
186
|
-
inputTokens: 0,
|
|
187
|
-
outputTokens: 0,
|
|
188
|
-
},
|
|
189
|
-
isError: true,
|
|
190
|
-
error: JSON.stringify(error),
|
|
191
|
-
captureImmediate: posthogCaptureImmediate,
|
|
192
|
-
})
|
|
193
|
-
throw error
|
|
194
|
-
}
|
|
195
|
-
}
|
|
196
|
-
|
|
197
|
-
private formatInput(contents: any): Array<{ role: string; content: string }> {
|
|
198
|
-
if (typeof contents === 'string') {
|
|
199
|
-
return [{ role: 'user', content: contents }]
|
|
200
|
-
}
|
|
201
|
-
|
|
202
|
-
if (Array.isArray(contents)) {
|
|
203
|
-
return contents.map((item) => {
|
|
204
|
-
if (typeof item === 'string') {
|
|
205
|
-
return { role: 'user', content: item }
|
|
206
|
-
}
|
|
207
|
-
if (item && typeof item === 'object') {
|
|
208
|
-
if (item.text) {
|
|
209
|
-
return { role: item.role || 'user', content: item.text }
|
|
210
|
-
}
|
|
211
|
-
if (item.content) {
|
|
212
|
-
return { role: item.role || 'user', content: item.content }
|
|
213
|
-
}
|
|
214
|
-
}
|
|
215
|
-
return { role: 'user', content: String(item) }
|
|
216
|
-
})
|
|
217
|
-
}
|
|
218
|
-
|
|
219
|
-
if (contents && typeof contents === 'object') {
|
|
220
|
-
if (contents.text) {
|
|
221
|
-
return [{ role: 'user', content: contents.text }]
|
|
222
|
-
}
|
|
223
|
-
if (contents.content) {
|
|
224
|
-
return [{ role: 'user', content: contents.content }]
|
|
225
|
-
}
|
|
226
|
-
}
|
|
227
|
-
|
|
228
|
-
return [{ role: 'user', content: String(contents) }]
|
|
229
|
-
}
|
|
230
|
-
|
|
231
|
-
private formatOutput(response: GenerateContentResponse): Array<{ role: string; content: string }> {
|
|
232
|
-
if (response.text) {
|
|
233
|
-
return [{ role: 'assistant', content: response.text }]
|
|
234
|
-
}
|
|
235
|
-
|
|
236
|
-
if (response.candidates && Array.isArray(response.candidates)) {
|
|
237
|
-
return response.candidates.map((candidate) => {
|
|
238
|
-
if (candidate.content && candidate.content.parts) {
|
|
239
|
-
const text = candidate.content.parts
|
|
240
|
-
.filter((part: any) => part.text)
|
|
241
|
-
.map((part: any) => part.text)
|
|
242
|
-
.join('')
|
|
243
|
-
return { role: 'assistant', content: text }
|
|
244
|
-
}
|
|
245
|
-
return { role: 'assistant', content: String(candidate) }
|
|
246
|
-
})
|
|
247
|
-
}
|
|
248
|
-
|
|
249
|
-
return []
|
|
250
|
-
}
|
|
251
|
-
}
|
|
252
|
-
|
|
253
|
-
export default PostHogGoogleGenAI
|
|
254
|
-
export { PostHogGoogleGenAI as Gemini }
|
package/src/index.ts
DELETED
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
import PostHogOpenAI from './openai'
|
|
2
|
-
import PostHogAzureOpenAI from './openai/azure'
|
|
3
|
-
import { wrapVercelLanguageModel } from './vercel/middleware'
|
|
4
|
-
import PostHogAnthropic from './anthropic'
|
|
5
|
-
import PostHogGoogleGenAI from './gemini'
|
|
6
|
-
import { LangChainCallbackHandler } from './langchain/callbacks'
|
|
7
|
-
|
|
8
|
-
export { PostHogOpenAI as OpenAI }
|
|
9
|
-
export { PostHogAzureOpenAI as AzureOpenAI }
|
|
10
|
-
export { PostHogAnthropic as Anthropic }
|
|
11
|
-
export { PostHogGoogleGenAI as GoogleGenAI }
|
|
12
|
-
export { wrapVercelLanguageModel as withTracing }
|
|
13
|
-
export { LangChainCallbackHandler }
|