@posthog/ai 7.3.2 → 7.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -3,6 +3,7 @@
3
3
  var openai = require('openai');
4
4
  var buffer = require('buffer');
5
5
  var uuid = require('uuid');
6
+ var core = require('@posthog/core');
6
7
  var AnthropicOriginal = require('@anthropic-ai/sdk');
7
8
  var genai = require('@google/genai');
8
9
 
@@ -29,7 +30,7 @@ function _interopNamespace(e) {
29
30
  var uuid__namespace = /*#__PURE__*/_interopNamespace(uuid);
30
31
  var AnthropicOriginal__default = /*#__PURE__*/_interopDefault(AnthropicOriginal);
31
32
 
32
- var version = "7.3.2";
33
+ var version = "7.4.0";
33
34
 
34
35
  // Type guards for safer type checking
35
36
  const isString = value => {
@@ -497,6 +498,33 @@ function addDefaults(params) {
497
498
  traceId: params.traceId ?? uuid.v4()
498
499
  };
499
500
  }
501
+ const sendEventWithErrorToPosthog = async ({
502
+ client,
503
+ traceId,
504
+ error,
505
+ ...args
506
+ }) => {
507
+ const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
508
+ const properties = {
509
+ client,
510
+ traceId,
511
+ httpStatus,
512
+ error: JSON.stringify(error),
513
+ ...args
514
+ };
515
+ const enrichedError = error;
516
+ if (client.options?.enableExceptionAutocapture) {
517
+ // assign a uuid that can be used to link the trace and exception events
518
+ const exceptionId = core.uuidv7();
519
+ client.captureException(error, undefined, {
520
+ $ai_trace_id: traceId
521
+ }, exceptionId);
522
+ enrichedError.__posthog_previously_captured_error = true;
523
+ properties.exceptionId = exceptionId;
524
+ }
525
+ await sendEventToPosthog(properties);
526
+ return enrichedError;
527
+ };
500
528
  const sendEventToPosthog = async ({
501
529
  client,
502
530
  eventType = AIEvent.Generation,
@@ -511,8 +539,8 @@ const sendEventToPosthog = async ({
511
539
  params,
512
540
  httpStatus = 200,
513
541
  usage = {},
514
- isError = false,
515
542
  error,
543
+ exceptionId,
516
544
  tools,
517
545
  captureImmediate = false
518
546
  }) => {
@@ -524,10 +552,11 @@ const sendEventToPosthog = async ({
524
552
  const safeOutput = sanitizeValues(output);
525
553
  const safeError = sanitizeValues(error);
526
554
  let errorData = {};
527
- if (isError) {
555
+ if (error) {
528
556
  errorData = {
529
557
  $ai_is_error: true,
530
- $ai_error: safeError
558
+ $ai_error: safeError,
559
+ $exception_event_id: exceptionId
531
560
  };
532
561
  }
533
562
  let costOverrideData = {};
@@ -593,6 +622,7 @@ const sendEventToPosthog = async ({
593
622
  } else {
594
623
  client.capture(event);
595
624
  }
625
+ return Promise.resolve();
596
626
  };
597
627
  function formatOpenAIResponsesInput(input, instructions) {
598
628
  const messages = [];
@@ -1000,8 +1030,7 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
1000
1030
  tools: availableTools
1001
1031
  });
1002
1032
  } catch (error) {
1003
- const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
1004
- await sendEventToPosthog({
1033
+ const enrichedError = await sendEventWithErrorToPosthog({
1005
1034
  client: this.phClient,
1006
1035
  ...posthogParams,
1007
1036
  model: openAIParams.model,
@@ -1011,14 +1040,13 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
1011
1040
  latency: 0,
1012
1041
  baseURL: this.baseURL,
1013
1042
  params: body,
1014
- httpStatus,
1015
1043
  usage: {
1016
1044
  inputTokens: 0,
1017
1045
  outputTokens: 0
1018
1046
  },
1019
- isError: true,
1020
- error: JSON.stringify(error)
1047
+ error
1021
1048
  });
1049
+ throw enrichedError;
1022
1050
  }
1023
1051
  })();
1024
1052
  // Return the other stream to the user
@@ -1071,7 +1099,6 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
1071
1099
  inputTokens: 0,
1072
1100
  outputTokens: 0
1073
1101
  },
1074
- isError: true,
1075
1102
  error: JSON.stringify(error)
1076
1103
  });
1077
1104
  throw error;
@@ -1154,8 +1181,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
1154
1181
  tools: availableTools
1155
1182
  });
1156
1183
  } catch (error) {
1157
- const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
1158
- await sendEventToPosthog({
1184
+ const enrichedError = await sendEventWithErrorToPosthog({
1159
1185
  client: this.phClient,
1160
1186
  ...posthogParams,
1161
1187
  model: openAIParams.model,
@@ -1165,14 +1191,13 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
1165
1191
  latency: 0,
1166
1192
  baseURL: this.baseURL,
1167
1193
  params: body,
1168
- httpStatus,
1169
1194
  usage: {
1170
1195
  inputTokens: 0,
1171
1196
  outputTokens: 0
1172
1197
  },
1173
- isError: true,
1174
- error: JSON.stringify(error)
1198
+ error: error
1175
1199
  });
1200
+ throw enrichedError;
1176
1201
  }
1177
1202
  })();
1178
1203
  return stream2;
@@ -1226,7 +1251,6 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
1226
1251
  inputTokens: 0,
1227
1252
  outputTokens: 0
1228
1253
  },
1229
- isError: true,
1230
1254
  error: JSON.stringify(error)
1231
1255
  });
1232
1256
  throw error;
@@ -1268,8 +1292,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
1268
1292
  });
1269
1293
  return result;
1270
1294
  }, async error => {
1271
- const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
1272
- await sendEventToPosthog({
1295
+ const enrichedError = await sendEventWithErrorToPosthog({
1273
1296
  client: this.phClient,
1274
1297
  ...posthogParams,
1275
1298
  model: openAIParams.model,
@@ -1279,15 +1302,13 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
1279
1302
  latency: 0,
1280
1303
  baseURL: this.baseURL,
1281
1304
  params: body,
1282
- httpStatus,
1283
1305
  usage: {
1284
1306
  inputTokens: 0,
1285
1307
  outputTokens: 0
1286
1308
  },
1287
- isError: true,
1288
1309
  error: JSON.stringify(error)
1289
1310
  });
1290
- throw error;
1311
+ throw enrichedError;
1291
1312
  });
1292
1313
  return wrappedPromise;
1293
1314
  } finally {
@@ -1347,7 +1368,6 @@ let WrappedEmbeddings$1 = class WrappedEmbeddings extends Embeddings {
1347
1368
  usage: {
1348
1369
  inputTokens: 0
1349
1370
  },
1350
- isError: true,
1351
1371
  error: JSON.stringify(error)
1352
1372
  });
1353
1373
  throw error;
@@ -1415,8 +1435,7 @@ class WrappedTranscriptions extends Transcriptions {
1415
1435
  tools: availableTools
1416
1436
  });
1417
1437
  } catch (error) {
1418
- const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
1419
- await sendEventToPosthog({
1438
+ const enrichedError = await sendEventWithErrorToPosthog({
1420
1439
  client: this.phClient,
1421
1440
  ...posthogParams,
1422
1441
  model: openAIParams.model,
@@ -1426,14 +1445,13 @@ class WrappedTranscriptions extends Transcriptions {
1426
1445
  latency: 0,
1427
1446
  baseURL: this.baseURL,
1428
1447
  params: body,
1429
- httpStatus,
1430
1448
  usage: {
1431
1449
  inputTokens: 0,
1432
1450
  outputTokens: 0
1433
1451
  },
1434
- isError: true,
1435
- error: JSON.stringify(error)
1452
+ error: error
1436
1453
  });
1454
+ throw enrichedError;
1437
1455
  }
1438
1456
  })();
1439
1457
  return stream2;
@@ -1463,8 +1481,7 @@ class WrappedTranscriptions extends Transcriptions {
1463
1481
  return result;
1464
1482
  }
1465
1483
  }, async error => {
1466
- const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
1467
- await sendEventToPosthog({
1484
+ const enrichedError = await sendEventWithErrorToPosthog({
1468
1485
  client: this.phClient,
1469
1486
  ...posthogParams,
1470
1487
  model: openAIParams.model,
@@ -1474,15 +1491,13 @@ class WrappedTranscriptions extends Transcriptions {
1474
1491
  latency: 0,
1475
1492
  baseURL: this.baseURL,
1476
1493
  params: body,
1477
- httpStatus,
1478
1494
  usage: {
1479
1495
  inputTokens: 0,
1480
1496
  outputTokens: 0
1481
1497
  },
1482
- isError: true,
1483
- error: JSON.stringify(error)
1498
+ error: error
1484
1499
  });
1485
- throw error;
1500
+ throw enrichedError;
1486
1501
  });
1487
1502
  return wrappedPromise;
1488
1503
  }
@@ -1633,8 +1648,7 @@ class WrappedCompletions extends openai.AzureOpenAI.Chat.Completions {
1633
1648
  usage
1634
1649
  });
1635
1650
  } catch (error) {
1636
- const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
1637
- await sendEventToPosthog({
1651
+ const enrichedError = await sendEventWithErrorToPosthog({
1638
1652
  client: this.phClient,
1639
1653
  ...posthogParams,
1640
1654
  model: openAIParams.model,
@@ -1644,14 +1658,13 @@ class WrappedCompletions extends openai.AzureOpenAI.Chat.Completions {
1644
1658
  latency: 0,
1645
1659
  baseURL: this.baseURL,
1646
1660
  params: body,
1647
- httpStatus,
1648
1661
  usage: {
1649
1662
  inputTokens: 0,
1650
1663
  outputTokens: 0
1651
1664
  },
1652
- isError: true,
1653
- error: JSON.stringify(error)
1665
+ error: error
1654
1666
  });
1667
+ throw enrichedError;
1655
1668
  }
1656
1669
  })();
1657
1670
  // Return the other stream to the user
@@ -1700,7 +1713,6 @@ class WrappedCompletions extends openai.AzureOpenAI.Chat.Completions {
1700
1713
  inputTokens: 0,
1701
1714
  outputTokens: 0
1702
1715
  },
1703
- isError: true,
1704
1716
  error: JSON.stringify(error)
1705
1717
  });
1706
1718
  throw error;
@@ -1769,8 +1781,7 @@ class WrappedResponses extends openai.AzureOpenAI.Responses {
1769
1781
  usage
1770
1782
  });
1771
1783
  } catch (error) {
1772
- const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
1773
- await sendEventToPosthog({
1784
+ const enrichedError = await sendEventWithErrorToPosthog({
1774
1785
  client: this.phClient,
1775
1786
  ...posthogParams,
1776
1787
  model: openAIParams.model,
@@ -1780,14 +1791,13 @@ class WrappedResponses extends openai.AzureOpenAI.Responses {
1780
1791
  latency: 0,
1781
1792
  baseURL: this.baseURL,
1782
1793
  params: body,
1783
- httpStatus,
1784
1794
  usage: {
1785
1795
  inputTokens: 0,
1786
1796
  outputTokens: 0
1787
1797
  },
1788
- isError: true,
1789
- error: JSON.stringify(error)
1798
+ error: error
1790
1799
  });
1800
+ throw enrichedError;
1791
1801
  }
1792
1802
  })();
1793
1803
  return stream2;
@@ -1835,7 +1845,6 @@ class WrappedResponses extends openai.AzureOpenAI.Responses {
1835
1845
  inputTokens: 0,
1836
1846
  outputTokens: 0
1837
1847
  },
1838
- isError: true,
1839
1848
  error: JSON.stringify(error)
1840
1849
  });
1841
1850
  throw error;
@@ -1887,7 +1896,6 @@ class WrappedResponses extends openai.AzureOpenAI.Responses {
1887
1896
  inputTokens: 0,
1888
1897
  outputTokens: 0
1889
1898
  },
1890
- isError: true,
1891
1899
  error: JSON.stringify(error)
1892
1900
  });
1893
1901
  throw error;
@@ -1945,7 +1953,6 @@ class WrappedEmbeddings extends openai.AzureOpenAI.Embeddings {
1945
1953
  usage: {
1946
1954
  inputTokens: 0
1947
1955
  },
1948
- isError: true,
1949
1956
  error: JSON.stringify(error)
1950
1957
  });
1951
1958
  throw error;
@@ -2273,7 +2280,7 @@ const wrapVercelLanguageModel = (model, phClient, options) => {
2273
2280
  return result;
2274
2281
  } catch (error) {
2275
2282
  const modelId = model.modelId;
2276
- await sendEventToPosthog({
2283
+ const enrichedError = await sendEventWithErrorToPosthog({
2277
2284
  client: phClient,
2278
2285
  distinctId: mergedOptions.posthogDistinctId,
2279
2286
  traceId: mergedOptions.posthogTraceId ?? uuid.v4(),
@@ -2284,17 +2291,15 @@ const wrapVercelLanguageModel = (model, phClient, options) => {
2284
2291
  latency: 0,
2285
2292
  baseURL: '',
2286
2293
  params: mergedParams,
2287
- httpStatus: error?.status ? error.status : 500,
2288
2294
  usage: {
2289
2295
  inputTokens: 0,
2290
2296
  outputTokens: 0
2291
2297
  },
2292
- isError: true,
2293
- error: truncate(JSON.stringify(error)),
2298
+ error: error,
2294
2299
  tools: availableTools,
2295
2300
  captureImmediate: mergedOptions.posthogCaptureImmediate
2296
2301
  });
2297
- throw error;
2302
+ throw enrichedError;
2298
2303
  }
2299
2304
  },
2300
2305
  doStream: async params => {
@@ -2431,7 +2436,7 @@ const wrapVercelLanguageModel = (model, phClient, options) => {
2431
2436
  ...rest
2432
2437
  };
2433
2438
  } catch (error) {
2434
- await sendEventToPosthog({
2439
+ const enrichedError = await sendEventWithErrorToPosthog({
2435
2440
  client: phClient,
2436
2441
  distinctId: mergedOptions.posthogDistinctId,
2437
2442
  traceId: mergedOptions.posthogTraceId ?? uuid.v4(),
@@ -2442,17 +2447,15 @@ const wrapVercelLanguageModel = (model, phClient, options) => {
2442
2447
  latency: 0,
2443
2448
  baseURL: '',
2444
2449
  params: mergedParams,
2445
- httpStatus: error?.status ? error.status : 500,
2446
2450
  usage: {
2447
2451
  inputTokens: 0,
2448
2452
  outputTokens: 0
2449
2453
  },
2450
- isError: true,
2451
- error: truncate(JSON.stringify(error)),
2454
+ error: error,
2452
2455
  tools: availableTools,
2453
2456
  captureImmediate: mergedOptions.posthogCaptureImmediate
2454
2457
  });
2455
- throw error;
2458
+ throw enrichedError;
2456
2459
  }
2457
2460
  }
2458
2461
  };
@@ -2609,8 +2612,7 @@ class WrappedMessages extends AnthropicOriginal__default.default.Messages {
2609
2612
  tools: availableTools
2610
2613
  });
2611
2614
  } catch (error) {
2612
- // error handling
2613
- await sendEventToPosthog({
2615
+ const enrichedError = await sendEventWithErrorToPosthog({
2614
2616
  client: this.phClient,
2615
2617
  ...posthogParams,
2616
2618
  model: anthropicParams.model,
@@ -2620,14 +2622,13 @@ class WrappedMessages extends AnthropicOriginal__default.default.Messages {
2620
2622
  latency: 0,
2621
2623
  baseURL: this.baseURL,
2622
2624
  params: body,
2623
- httpStatus: error?.status ? error.status : 500,
2624
2625
  usage: {
2625
2626
  inputTokens: 0,
2626
2627
  outputTokens: 0
2627
2628
  },
2628
- isError: true,
2629
- error: JSON.stringify(error)
2629
+ error: error
2630
2630
  });
2631
+ throw enrichedError;
2631
2632
  }
2632
2633
  })();
2633
2634
  // Return the other stream to the user
@@ -2678,7 +2679,6 @@ class WrappedMessages extends AnthropicOriginal__default.default.Messages {
2678
2679
  inputTokens: 0,
2679
2680
  outputTokens: 0
2680
2681
  },
2681
- isError: true,
2682
2682
  error: JSON.stringify(error)
2683
2683
  });
2684
2684
  throw error;
@@ -2738,7 +2738,7 @@ class WrappedModels {
2738
2738
  return response;
2739
2739
  } catch (error) {
2740
2740
  const latency = (Date.now() - startTime) / 1000;
2741
- await sendEventToPosthog({
2741
+ const enrichedError = await sendEventWithErrorToPosthog({
2742
2742
  client: this.phClient,
2743
2743
  ...posthogParams,
2744
2744
  model: geminiParams.model,
@@ -2748,15 +2748,13 @@ class WrappedModels {
2748
2748
  latency,
2749
2749
  baseURL: 'https://generativelanguage.googleapis.com',
2750
2750
  params: params,
2751
- httpStatus: error?.status ?? 500,
2752
2751
  usage: {
2753
2752
  inputTokens: 0,
2754
2753
  outputTokens: 0
2755
2754
  },
2756
- isError: true,
2757
- error: JSON.stringify(error)
2755
+ error: error
2758
2756
  });
2759
- throw error;
2757
+ throw enrichedError;
2760
2758
  }
2761
2759
  }
2762
2760
  async *generateContentStream(params) {
@@ -2858,7 +2856,7 @@ class WrappedModels {
2858
2856
  });
2859
2857
  } catch (error) {
2860
2858
  const latency = (Date.now() - startTime) / 1000;
2861
- await sendEventToPosthog({
2859
+ const enrichedError = await sendEventWithErrorToPosthog({
2862
2860
  client: this.phClient,
2863
2861
  ...posthogParams,
2864
2862
  model: geminiParams.model,
@@ -2868,15 +2866,13 @@ class WrappedModels {
2868
2866
  latency,
2869
2867
  baseURL: 'https://generativelanguage.googleapis.com',
2870
2868
  params: params,
2871
- httpStatus: error?.status ?? 500,
2872
2869
  usage: {
2873
2870
  inputTokens: 0,
2874
2871
  outputTokens: 0
2875
2872
  },
2876
- isError: true,
2877
- error: JSON.stringify(error)
2873
+ error: error
2878
2874
  });
2879
- throw error;
2875
+ throw enrichedError;
2880
2876
  }
2881
2877
  }
2882
2878
  formatPartsAsContentBlocks(parts) {
@@ -3256,6 +3252,64 @@ function mapKeys(fields, mapper, map) {
3256
3252
  return mapped;
3257
3253
  }
3258
3254
 
3255
+ //#region src/load/validation.ts
3256
+ /**
3257
+ * Sentinel key used to mark escaped user objects during serialization.
3258
+ *
3259
+ * When a plain object contains 'lc' key (which could be confused with LC objects),
3260
+ * we wrap it as `{"__lc_escaped__": {...original...}}`.
3261
+ */
3262
+ const LC_ESCAPED_KEY = "__lc_escaped__";
3263
+ /**
3264
+ * Check if an object needs escaping to prevent confusion with LC objects.
3265
+ *
3266
+ * An object needs escaping if:
3267
+ * 1. It has an `'lc'` key (could be confused with LC serialization format)
3268
+ * 2. It has only the escape key (would be mistaken for an escaped object)
3269
+ */
3270
+ function needsEscaping(obj) {
3271
+ return "lc" in obj || Object.keys(obj).length === 1 && LC_ESCAPED_KEY in obj;
3272
+ }
3273
+ /**
3274
+ * Wrap an object in the escape marker.
3275
+ *
3276
+ * @example
3277
+ * ```typescript
3278
+ * {"key": "value"} // becomes {"__lc_escaped__": {"key": "value"}}
3279
+ * ```
3280
+ */
3281
+ function escapeObject(obj) {
3282
+ return { [LC_ESCAPED_KEY]: obj };
3283
+ }
3284
+ /**
3285
+ * Check if an object looks like a Serializable instance (duck typing).
3286
+ */
3287
+ function isSerializableLike(obj) {
3288
+ return obj !== null && typeof obj === "object" && "lc_serializable" in obj && typeof obj.toJSON === "function";
3289
+ }
3290
+ /**
3291
+ * Escape a value if it needs escaping (contains `lc` key).
3292
+ *
3293
+ * This is a simpler version of `serializeValue` that doesn't handle Serializable
3294
+ * objects - it's meant to be called on kwargs values that have already been
3295
+ * processed by `toJSON()`.
3296
+ *
3297
+ * @param value - The value to potentially escape.
3298
+ * @returns The value with any `lc`-containing objects wrapped in escape markers.
3299
+ */
3300
+ function escapeIfNeeded(value) {
3301
+ if (value !== null && typeof value === "object" && !Array.isArray(value)) {
3302
+ if (isSerializableLike(value)) return value;
3303
+ const record = value;
3304
+ if (needsEscaping(record)) return escapeObject(record);
3305
+ const result = {};
3306
+ for (const [key, val] of Object.entries(record)) result[key] = escapeIfNeeded(val);
3307
+ return result;
3308
+ }
3309
+ if (Array.isArray(value)) return value.map((item) => escapeIfNeeded(item));
3310
+ return value;
3311
+ }
3312
+
3259
3313
  //#region src/load/serializable.ts
3260
3314
  var serializable_exports = {};
3261
3315
  __export(serializable_exports, {
@@ -3377,11 +3431,15 @@ var Serializable = class Serializable {
3377
3431
  }
3378
3432
  if (last in read && read[last] !== void 0) write[last] = write[last] || read[last];
3379
3433
  });
3434
+ const escapedKwargs = {};
3435
+ for (const [key, value] of Object.entries(kwargs)) escapedKwargs[key] = escapeIfNeeded(value);
3436
+ const kwargsWithSecrets = Object.keys(secrets).length ? replaceSecrets(escapedKwargs, secrets) : escapedKwargs;
3437
+ const processedKwargs = mapKeys(kwargsWithSecrets, keyToJson, aliases);
3380
3438
  return {
3381
3439
  lc: 1,
3382
3440
  type: "constructor",
3383
3441
  id: this.lc_id,
3384
- kwargs: mapKeys(Object.keys(secrets).length ? replaceSecrets(kwargs, secrets) : kwargs, keyToJson, aliases)
3442
+ kwargs: processedKwargs
3385
3443
  };
3386
3444
  }
3387
3445
  toJSONNotImplemented() {