@posthog/ai 7.4.1 → 7.4.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -11,7 +11,7 @@ function _interopDefault (e) { return e && e.__esModule ? e : { default: e }; }
11
11
 
12
12
  var AnthropicOriginal__default = /*#__PURE__*/_interopDefault(AnthropicOriginal);
13
13
 
14
- var version = "7.4.1";
14
+ var version = "7.4.2";
15
15
 
16
16
  // Type guards for safer type checking
17
17
 
@@ -3,7 +3,7 @@ import { Buffer } from 'buffer';
3
3
  import { v4 } from 'uuid';
4
4
  import { uuidv7 } from '@posthog/core';
5
5
 
6
- var version = "7.4.1";
6
+ var version = "7.4.2";
7
7
 
8
8
  // Type guards for safer type checking
9
9
 
@@ -7,7 +7,7 @@ var buffer = require('buffer');
7
7
  var uuid = require('uuid');
8
8
  var core = require('@posthog/core');
9
9
 
10
- var version = "7.4.1";
10
+ var version = "7.4.2";
11
11
 
12
12
  // Type guards for safer type checking
13
13
 
@@ -3,7 +3,7 @@ import { Buffer } from 'buffer';
3
3
  import { v4 } from 'uuid';
4
4
  import { uuidv7 } from '@posthog/core';
5
5
 
6
- var version = "7.4.1";
6
+ var version = "7.4.2";
7
7
 
8
8
  // Type guards for safer type checking
9
9
 
package/dist/index.cjs CHANGED
@@ -30,7 +30,7 @@ function _interopNamespace(e) {
30
30
  var uuid__namespace = /*#__PURE__*/_interopNamespace(uuid);
31
31
  var AnthropicOriginal__default = /*#__PURE__*/_interopDefault(AnthropicOriginal);
32
32
 
33
- var version = "7.4.1";
33
+ var version = "7.4.2";
34
34
 
35
35
  // Type guards for safer type checking
36
36
  const isString = value => {
@@ -2248,236 +2248,246 @@ const wrapVercelLanguageModel = (model, phClient, options) => {
2248
2248
  $ai_framework_version: model.specificationVersion === 'v3' ? '6' : '5'
2249
2249
  }
2250
2250
  };
2251
- // Create wrapped model that preserves the original type
2252
- const wrappedModel = {
2253
- ...model,
2254
- doGenerate: async params => {
2255
- const startTime = Date.now();
2256
- const mergedParams = {
2257
- ...mergedOptions,
2258
- ...mapVercelParams(params)
2259
- };
2260
- const availableTools = extractAvailableToolCalls('vercel', params);
2261
- try {
2262
- const result = await model.doGenerate(params);
2263
- const modelId = mergedOptions.posthogModelOverride ?? (result.response?.modelId ? result.response.modelId : model.modelId);
2264
- const provider = mergedOptions.posthogProviderOverride ?? extractProvider(model);
2265
- const baseURL = ''; // cannot currently get baseURL from vercel
2266
- const content = mapVercelOutput(result.content);
2267
- const latency = (Date.now() - startTime) / 1000;
2268
- const providerMetadata = result.providerMetadata;
2269
- const additionalTokenValues = extractAdditionalTokenValues(providerMetadata);
2270
- const webSearchCount = extractWebSearchCount(providerMetadata, result.usage);
2271
- // V2 usage has simple numbers, V3 has objects with .total - normalize both
2272
- const usageObj = result.usage;
2273
- const usage = {
2274
- inputTokens: extractTokenCount(result.usage.inputTokens),
2275
- outputTokens: extractTokenCount(result.usage.outputTokens),
2276
- reasoningTokens: extractReasoningTokens(usageObj),
2277
- cacheReadInputTokens: extractCacheReadTokens(usageObj),
2278
- webSearchCount,
2279
- ...additionalTokenValues
2251
+ // Create wrapped model using Object.create to preserve the prototype chain
2252
+ // This automatically inherits all properties (including getters) from the model
2253
+ const wrappedModel = Object.create(model, {
2254
+ doGenerate: {
2255
+ value: async params => {
2256
+ const startTime = Date.now();
2257
+ const mergedParams = {
2258
+ ...mergedOptions,
2259
+ ...mapVercelParams(params)
2280
2260
  };
2281
- adjustAnthropicV3CacheTokens(model, provider, usage);
2282
- await sendEventToPosthog({
2283
- client: phClient,
2284
- distinctId: mergedOptions.posthogDistinctId,
2285
- traceId: mergedOptions.posthogTraceId ?? uuid.v4(),
2286
- model: modelId,
2287
- provider: provider,
2288
- input: mergedOptions.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
2289
- output: content,
2290
- latency,
2291
- baseURL,
2292
- params: mergedParams,
2293
- httpStatus: 200,
2294
- usage,
2295
- tools: availableTools,
2296
- captureImmediate: mergedOptions.posthogCaptureImmediate
2297
- });
2298
- return result;
2299
- } catch (error) {
2300
- const modelId = model.modelId;
2301
- const enrichedError = await sendEventWithErrorToPosthog({
2302
- client: phClient,
2303
- distinctId: mergedOptions.posthogDistinctId,
2304
- traceId: mergedOptions.posthogTraceId ?? uuid.v4(),
2305
- model: modelId,
2306
- provider: model.provider,
2307
- input: mergedOptions.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
2308
- output: [],
2309
- latency: 0,
2310
- baseURL: '',
2311
- params: mergedParams,
2312
- usage: {
2313
- inputTokens: 0,
2314
- outputTokens: 0
2315
- },
2316
- error: error,
2317
- tools: availableTools,
2318
- captureImmediate: mergedOptions.posthogCaptureImmediate
2319
- });
2320
- throw enrichedError;
2321
- }
2261
+ const availableTools = extractAvailableToolCalls('vercel', params);
2262
+ try {
2263
+ const result = await model.doGenerate(params);
2264
+ const modelId = mergedOptions.posthogModelOverride ?? (result.response?.modelId ? result.response.modelId : model.modelId);
2265
+ const provider = mergedOptions.posthogProviderOverride ?? extractProvider(model);
2266
+ const baseURL = ''; // cannot currently get baseURL from vercel
2267
+ const content = mapVercelOutput(result.content);
2268
+ const latency = (Date.now() - startTime) / 1000;
2269
+ const providerMetadata = result.providerMetadata;
2270
+ const additionalTokenValues = extractAdditionalTokenValues(providerMetadata);
2271
+ const webSearchCount = extractWebSearchCount(providerMetadata, result.usage);
2272
+ // V2 usage has simple numbers, V3 has objects with .total - normalize both
2273
+ const usageObj = result.usage;
2274
+ const usage = {
2275
+ inputTokens: extractTokenCount(result.usage.inputTokens),
2276
+ outputTokens: extractTokenCount(result.usage.outputTokens),
2277
+ reasoningTokens: extractReasoningTokens(usageObj),
2278
+ cacheReadInputTokens: extractCacheReadTokens(usageObj),
2279
+ webSearchCount,
2280
+ ...additionalTokenValues
2281
+ };
2282
+ adjustAnthropicV3CacheTokens(model, provider, usage);
2283
+ await sendEventToPosthog({
2284
+ client: phClient,
2285
+ distinctId: mergedOptions.posthogDistinctId,
2286
+ traceId: mergedOptions.posthogTraceId ?? uuid.v4(),
2287
+ model: modelId,
2288
+ provider: provider,
2289
+ input: mergedOptions.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
2290
+ output: content,
2291
+ latency,
2292
+ baseURL,
2293
+ params: mergedParams,
2294
+ httpStatus: 200,
2295
+ usage,
2296
+ tools: availableTools,
2297
+ captureImmediate: mergedOptions.posthogCaptureImmediate
2298
+ });
2299
+ return result;
2300
+ } catch (error) {
2301
+ const modelId = model.modelId;
2302
+ const enrichedError = await sendEventWithErrorToPosthog({
2303
+ client: phClient,
2304
+ distinctId: mergedOptions.posthogDistinctId,
2305
+ traceId: mergedOptions.posthogTraceId ?? uuid.v4(),
2306
+ model: modelId,
2307
+ provider: model.provider,
2308
+ input: mergedOptions.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
2309
+ output: [],
2310
+ latency: 0,
2311
+ baseURL: '',
2312
+ params: mergedParams,
2313
+ usage: {
2314
+ inputTokens: 0,
2315
+ outputTokens: 0
2316
+ },
2317
+ error: error,
2318
+ tools: availableTools,
2319
+ captureImmediate: mergedOptions.posthogCaptureImmediate
2320
+ });
2321
+ throw enrichedError;
2322
+ }
2323
+ },
2324
+ writable: true,
2325
+ configurable: true,
2326
+ enumerable: false
2322
2327
  },
2323
- doStream: async params => {
2324
- const startTime = Date.now();
2325
- let generatedText = '';
2326
- let reasoningText = '';
2327
- let usage = {};
2328
- let providerMetadata = undefined;
2329
- const mergedParams = {
2330
- ...mergedOptions,
2331
- ...mapVercelParams(params)
2332
- };
2333
- const modelId = mergedOptions.posthogModelOverride ?? model.modelId;
2334
- const provider = mergedOptions.posthogProviderOverride ?? extractProvider(model);
2335
- const availableTools = extractAvailableToolCalls('vercel', params);
2336
- const baseURL = ''; // cannot currently get baseURL from vercel
2337
- // Map to track in-progress tool calls
2338
- const toolCallsInProgress = new Map();
2339
- try {
2340
- const {
2341
- stream,
2342
- ...rest
2343
- } = await model.doStream(params);
2344
- const transformStream = new TransformStream({
2345
- transform(chunk, controller) {
2346
- // Handle streaming patterns - compatible with both V2 and V3
2347
- if (chunk.type === 'text-delta') {
2348
- generatedText += chunk.delta;
2349
- }
2350
- if (chunk.type === 'reasoning-delta') {
2351
- reasoningText += chunk.delta;
2352
- }
2353
- // Handle tool call chunks
2354
- if (chunk.type === 'tool-input-start') {
2355
- // Initialize a new tool call
2356
- toolCallsInProgress.set(chunk.id, {
2357
- toolCallId: chunk.id,
2358
- toolName: chunk.toolName,
2359
- input: ''
2360
- });
2361
- }
2362
- if (chunk.type === 'tool-input-delta') {
2363
- // Accumulate tool call arguments
2364
- const toolCall = toolCallsInProgress.get(chunk.id);
2365
- if (toolCall) {
2366
- toolCall.input += chunk.delta;
2328
+ doStream: {
2329
+ value: async params => {
2330
+ const startTime = Date.now();
2331
+ let generatedText = '';
2332
+ let reasoningText = '';
2333
+ let usage = {};
2334
+ let providerMetadata = undefined;
2335
+ const mergedParams = {
2336
+ ...mergedOptions,
2337
+ ...mapVercelParams(params)
2338
+ };
2339
+ const modelId = mergedOptions.posthogModelOverride ?? model.modelId;
2340
+ const provider = mergedOptions.posthogProviderOverride ?? extractProvider(model);
2341
+ const availableTools = extractAvailableToolCalls('vercel', params);
2342
+ const baseURL = ''; // cannot currently get baseURL from vercel
2343
+ // Map to track in-progress tool calls
2344
+ const toolCallsInProgress = new Map();
2345
+ try {
2346
+ const {
2347
+ stream,
2348
+ ...rest
2349
+ } = await model.doStream(params);
2350
+ const transformStream = new TransformStream({
2351
+ transform(chunk, controller) {
2352
+ // Handle streaming patterns - compatible with both V2 and V3
2353
+ if (chunk.type === 'text-delta') {
2354
+ generatedText += chunk.delta;
2367
2355
  }
2368
- }
2369
- if (chunk.type === 'tool-input-end') {
2370
- // Tool call is complete, keep it in the map for final processing
2371
- }
2372
- if (chunk.type === 'tool-call') {
2373
- // Direct tool call chunk (complete tool call)
2374
- toolCallsInProgress.set(chunk.toolCallId, {
2375
- toolCallId: chunk.toolCallId,
2376
- toolName: chunk.toolName,
2377
- input: chunk.input
2378
- });
2379
- }
2380
- if (chunk.type === 'finish') {
2381
- providerMetadata = chunk.providerMetadata;
2382
- const additionalTokenValues = extractAdditionalTokenValues(providerMetadata);
2383
- const chunkUsage = chunk.usage || {};
2384
- usage = {
2385
- inputTokens: extractTokenCount(chunk.usage?.inputTokens),
2386
- outputTokens: extractTokenCount(chunk.usage?.outputTokens),
2387
- reasoningTokens: extractReasoningTokens(chunkUsage),
2388
- cacheReadInputTokens: extractCacheReadTokens(chunkUsage),
2389
- ...additionalTokenValues
2390
- };
2391
- }
2392
- controller.enqueue(chunk);
2393
- },
2394
- flush: async () => {
2395
- const latency = (Date.now() - startTime) / 1000;
2396
- // Build content array similar to mapVercelOutput structure
2397
- const content = [];
2398
- if (reasoningText) {
2399
- content.push({
2400
- type: 'reasoning',
2401
- text: truncate(reasoningText)
2402
- });
2403
- }
2404
- if (generatedText) {
2405
- content.push({
2406
- type: 'text',
2407
- text: truncate(generatedText)
2408
- });
2409
- }
2410
- // Add completed tool calls to content
2411
- for (const toolCall of toolCallsInProgress.values()) {
2412
- if (toolCall.toolName) {
2356
+ if (chunk.type === 'reasoning-delta') {
2357
+ reasoningText += chunk.delta;
2358
+ }
2359
+ // Handle tool call chunks
2360
+ if (chunk.type === 'tool-input-start') {
2361
+ // Initialize a new tool call
2362
+ toolCallsInProgress.set(chunk.id, {
2363
+ toolCallId: chunk.id,
2364
+ toolName: chunk.toolName,
2365
+ input: ''
2366
+ });
2367
+ }
2368
+ if (chunk.type === 'tool-input-delta') {
2369
+ // Accumulate tool call arguments
2370
+ const toolCall = toolCallsInProgress.get(chunk.id);
2371
+ if (toolCall) {
2372
+ toolCall.input += chunk.delta;
2373
+ }
2374
+ }
2375
+ if (chunk.type === 'tool-input-end') {
2376
+ // Tool call is complete, keep it in the map for final processing
2377
+ }
2378
+ if (chunk.type === 'tool-call') {
2379
+ // Direct tool call chunk (complete tool call)
2380
+ toolCallsInProgress.set(chunk.toolCallId, {
2381
+ toolCallId: chunk.toolCallId,
2382
+ toolName: chunk.toolName,
2383
+ input: chunk.input
2384
+ });
2385
+ }
2386
+ if (chunk.type === 'finish') {
2387
+ providerMetadata = chunk.providerMetadata;
2388
+ const additionalTokenValues = extractAdditionalTokenValues(providerMetadata);
2389
+ const chunkUsage = chunk.usage || {};
2390
+ usage = {
2391
+ inputTokens: extractTokenCount(chunk.usage?.inputTokens),
2392
+ outputTokens: extractTokenCount(chunk.usage?.outputTokens),
2393
+ reasoningTokens: extractReasoningTokens(chunkUsage),
2394
+ cacheReadInputTokens: extractCacheReadTokens(chunkUsage),
2395
+ ...additionalTokenValues
2396
+ };
2397
+ }
2398
+ controller.enqueue(chunk);
2399
+ },
2400
+ flush: async () => {
2401
+ const latency = (Date.now() - startTime) / 1000;
2402
+ // Build content array similar to mapVercelOutput structure
2403
+ const content = [];
2404
+ if (reasoningText) {
2413
2405
  content.push({
2414
- type: 'tool-call',
2415
- id: toolCall.toolCallId,
2416
- function: {
2417
- name: toolCall.toolName,
2418
- arguments: toolCall.input
2419
- }
2406
+ type: 'reasoning',
2407
+ text: truncate(reasoningText)
2420
2408
  });
2421
2409
  }
2410
+ if (generatedText) {
2411
+ content.push({
2412
+ type: 'text',
2413
+ text: truncate(generatedText)
2414
+ });
2415
+ }
2416
+ // Add completed tool calls to content
2417
+ for (const toolCall of toolCallsInProgress.values()) {
2418
+ if (toolCall.toolName) {
2419
+ content.push({
2420
+ type: 'tool-call',
2421
+ id: toolCall.toolCallId,
2422
+ function: {
2423
+ name: toolCall.toolName,
2424
+ arguments: toolCall.input
2425
+ }
2426
+ });
2427
+ }
2428
+ }
2429
+ // Structure output like mapVercelOutput does
2430
+ const output = content.length > 0 ? [{
2431
+ role: 'assistant',
2432
+ content: content.length === 1 && content[0].type === 'text' ? content[0].text : content
2433
+ }] : [];
2434
+ const webSearchCount = extractWebSearchCount(providerMetadata, usage);
2435
+ // Update usage with web search count
2436
+ const finalUsage = {
2437
+ ...usage,
2438
+ webSearchCount
2439
+ };
2440
+ adjustAnthropicV3CacheTokens(model, provider, finalUsage);
2441
+ await sendEventToPosthog({
2442
+ client: phClient,
2443
+ distinctId: mergedOptions.posthogDistinctId,
2444
+ traceId: mergedOptions.posthogTraceId ?? uuid.v4(),
2445
+ model: modelId,
2446
+ provider: provider,
2447
+ input: mergedOptions.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
2448
+ output: output,
2449
+ latency,
2450
+ baseURL,
2451
+ params: mergedParams,
2452
+ httpStatus: 200,
2453
+ usage: finalUsage,
2454
+ tools: availableTools,
2455
+ captureImmediate: mergedOptions.posthogCaptureImmediate
2456
+ });
2422
2457
  }
2423
- // Structure output like mapVercelOutput does
2424
- const output = content.length > 0 ? [{
2425
- role: 'assistant',
2426
- content: content.length === 1 && content[0].type === 'text' ? content[0].text : content
2427
- }] : [];
2428
- const webSearchCount = extractWebSearchCount(providerMetadata, usage);
2429
- // Update usage with web search count
2430
- const finalUsage = {
2431
- ...usage,
2432
- webSearchCount
2433
- };
2434
- adjustAnthropicV3CacheTokens(model, provider, finalUsage);
2435
- await sendEventToPosthog({
2436
- client: phClient,
2437
- distinctId: mergedOptions.posthogDistinctId,
2438
- traceId: mergedOptions.posthogTraceId ?? uuid.v4(),
2439
- model: modelId,
2440
- provider: provider,
2441
- input: mergedOptions.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
2442
- output: output,
2443
- latency,
2444
- baseURL,
2445
- params: mergedParams,
2446
- httpStatus: 200,
2447
- usage: finalUsage,
2448
- tools: availableTools,
2449
- captureImmediate: mergedOptions.posthogCaptureImmediate
2450
- });
2451
- }
2452
- });
2453
- return {
2454
- stream: stream.pipeThrough(transformStream),
2455
- ...rest
2456
- };
2457
- } catch (error) {
2458
- const enrichedError = await sendEventWithErrorToPosthog({
2459
- client: phClient,
2460
- distinctId: mergedOptions.posthogDistinctId,
2461
- traceId: mergedOptions.posthogTraceId ?? uuid.v4(),
2462
- model: modelId,
2463
- provider: provider,
2464
- input: mergedOptions.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
2465
- output: [],
2466
- latency: 0,
2467
- baseURL: '',
2468
- params: mergedParams,
2469
- usage: {
2470
- inputTokens: 0,
2471
- outputTokens: 0
2472
- },
2473
- error: error,
2474
- tools: availableTools,
2475
- captureImmediate: mergedOptions.posthogCaptureImmediate
2476
- });
2477
- throw enrichedError;
2478
- }
2458
+ });
2459
+ return {
2460
+ stream: stream.pipeThrough(transformStream),
2461
+ ...rest
2462
+ };
2463
+ } catch (error) {
2464
+ const enrichedError = await sendEventWithErrorToPosthog({
2465
+ client: phClient,
2466
+ distinctId: mergedOptions.posthogDistinctId,
2467
+ traceId: mergedOptions.posthogTraceId ?? uuid.v4(),
2468
+ model: modelId,
2469
+ provider: provider,
2470
+ input: mergedOptions.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
2471
+ output: [],
2472
+ latency: 0,
2473
+ baseURL: '',
2474
+ params: mergedParams,
2475
+ usage: {
2476
+ inputTokens: 0,
2477
+ outputTokens: 0
2478
+ },
2479
+ error: error,
2480
+ tools: availableTools,
2481
+ captureImmediate: mergedOptions.posthogCaptureImmediate
2482
+ });
2483
+ throw enrichedError;
2484
+ }
2485
+ },
2486
+ writable: true,
2487
+ configurable: true,
2488
+ enumerable: false
2479
2489
  }
2480
- };
2490
+ });
2481
2491
  return wrappedModel;
2482
2492
  };
2483
2493