@posthog/ai 7.3.2 → 7.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/anthropic/index.cjs +37 -10
- package/dist/anthropic/index.cjs.map +1 -1
- package/dist/anthropic/index.mjs +37 -10
- package/dist/anthropic/index.mjs.map +1 -1
- package/dist/gemini/index.cjs +40 -14
- package/dist/gemini/index.cjs.map +1 -1
- package/dist/gemini/index.mjs +40 -14
- package/dist/gemini/index.mjs.map +1 -1
- package/dist/index.cjs +151 -74
- package/dist/index.cjs.map +1 -1
- package/dist/index.mjs +151 -74
- package/dist/index.mjs.map +1 -1
- package/dist/langchain/index.cjs +65 -2
- package/dist/langchain/index.cjs.map +1 -1
- package/dist/langchain/index.mjs +65 -2
- package/dist/langchain/index.mjs.map +1 -1
- package/dist/openai/index.cjs +48 -33
- package/dist/openai/index.cjs.map +1 -1
- package/dist/openai/index.mjs +48 -33
- package/dist/openai/index.mjs.map +1 -1
- package/dist/vercel/index.cjs +63 -14
- package/dist/vercel/index.cjs.map +1 -1
- package/dist/vercel/index.mjs +63 -14
- package/dist/vercel/index.mjs.map +1 -1
- package/package.json +11 -10
package/dist/index.cjs
CHANGED
|
@@ -3,6 +3,7 @@
|
|
|
3
3
|
var openai = require('openai');
|
|
4
4
|
var buffer = require('buffer');
|
|
5
5
|
var uuid = require('uuid');
|
|
6
|
+
var core = require('@posthog/core');
|
|
6
7
|
var AnthropicOriginal = require('@anthropic-ai/sdk');
|
|
7
8
|
var genai = require('@google/genai');
|
|
8
9
|
|
|
@@ -29,7 +30,7 @@ function _interopNamespace(e) {
|
|
|
29
30
|
var uuid__namespace = /*#__PURE__*/_interopNamespace(uuid);
|
|
30
31
|
var AnthropicOriginal__default = /*#__PURE__*/_interopDefault(AnthropicOriginal);
|
|
31
32
|
|
|
32
|
-
var version = "7.
|
|
33
|
+
var version = "7.4.1";
|
|
33
34
|
|
|
34
35
|
// Type guards for safer type checking
|
|
35
36
|
const isString = value => {
|
|
@@ -497,6 +498,33 @@ function addDefaults(params) {
|
|
|
497
498
|
traceId: params.traceId ?? uuid.v4()
|
|
498
499
|
};
|
|
499
500
|
}
|
|
501
|
+
const sendEventWithErrorToPosthog = async ({
|
|
502
|
+
client,
|
|
503
|
+
traceId,
|
|
504
|
+
error,
|
|
505
|
+
...args
|
|
506
|
+
}) => {
|
|
507
|
+
const httpStatus = error && typeof error === 'object' && 'status' in error ? error.status ?? 500 : 500;
|
|
508
|
+
const properties = {
|
|
509
|
+
client,
|
|
510
|
+
traceId,
|
|
511
|
+
httpStatus,
|
|
512
|
+
error: JSON.stringify(error),
|
|
513
|
+
...args
|
|
514
|
+
};
|
|
515
|
+
const enrichedError = error;
|
|
516
|
+
if (client.options?.enableExceptionAutocapture) {
|
|
517
|
+
// assign a uuid that can be used to link the trace and exception events
|
|
518
|
+
const exceptionId = core.uuidv7();
|
|
519
|
+
client.captureException(error, undefined, {
|
|
520
|
+
$ai_trace_id: traceId
|
|
521
|
+
}, exceptionId);
|
|
522
|
+
enrichedError.__posthog_previously_captured_error = true;
|
|
523
|
+
properties.exceptionId = exceptionId;
|
|
524
|
+
}
|
|
525
|
+
await sendEventToPosthog(properties);
|
|
526
|
+
return enrichedError;
|
|
527
|
+
};
|
|
500
528
|
const sendEventToPosthog = async ({
|
|
501
529
|
client,
|
|
502
530
|
eventType = AIEvent.Generation,
|
|
@@ -511,8 +539,8 @@ const sendEventToPosthog = async ({
|
|
|
511
539
|
params,
|
|
512
540
|
httpStatus = 200,
|
|
513
541
|
usage = {},
|
|
514
|
-
isError = false,
|
|
515
542
|
error,
|
|
543
|
+
exceptionId,
|
|
516
544
|
tools,
|
|
517
545
|
captureImmediate = false
|
|
518
546
|
}) => {
|
|
@@ -524,10 +552,11 @@ const sendEventToPosthog = async ({
|
|
|
524
552
|
const safeOutput = sanitizeValues(output);
|
|
525
553
|
const safeError = sanitizeValues(error);
|
|
526
554
|
let errorData = {};
|
|
527
|
-
if (
|
|
555
|
+
if (error) {
|
|
528
556
|
errorData = {
|
|
529
557
|
$ai_is_error: true,
|
|
530
|
-
$ai_error: safeError
|
|
558
|
+
$ai_error: safeError,
|
|
559
|
+
$exception_event_id: exceptionId
|
|
531
560
|
};
|
|
532
561
|
}
|
|
533
562
|
let costOverrideData = {};
|
|
@@ -593,6 +622,7 @@ const sendEventToPosthog = async ({
|
|
|
593
622
|
} else {
|
|
594
623
|
client.capture(event);
|
|
595
624
|
}
|
|
625
|
+
return Promise.resolve();
|
|
596
626
|
};
|
|
597
627
|
function formatOpenAIResponsesInput(input, instructions) {
|
|
598
628
|
const messages = [];
|
|
@@ -1000,8 +1030,7 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
1000
1030
|
tools: availableTools
|
|
1001
1031
|
});
|
|
1002
1032
|
} catch (error) {
|
|
1003
|
-
const
|
|
1004
|
-
await sendEventToPosthog({
|
|
1033
|
+
const enrichedError = await sendEventWithErrorToPosthog({
|
|
1005
1034
|
client: this.phClient,
|
|
1006
1035
|
...posthogParams,
|
|
1007
1036
|
model: openAIParams.model,
|
|
@@ -1011,14 +1040,13 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
1011
1040
|
latency: 0,
|
|
1012
1041
|
baseURL: this.baseURL,
|
|
1013
1042
|
params: body,
|
|
1014
|
-
httpStatus,
|
|
1015
1043
|
usage: {
|
|
1016
1044
|
inputTokens: 0,
|
|
1017
1045
|
outputTokens: 0
|
|
1018
1046
|
},
|
|
1019
|
-
|
|
1020
|
-
error: JSON.stringify(error)
|
|
1047
|
+
error
|
|
1021
1048
|
});
|
|
1049
|
+
throw enrichedError;
|
|
1022
1050
|
}
|
|
1023
1051
|
})();
|
|
1024
1052
|
// Return the other stream to the user
|
|
@@ -1071,7 +1099,6 @@ let WrappedCompletions$1 = class WrappedCompletions extends Completions {
|
|
|
1071
1099
|
inputTokens: 0,
|
|
1072
1100
|
outputTokens: 0
|
|
1073
1101
|
},
|
|
1074
|
-
isError: true,
|
|
1075
1102
|
error: JSON.stringify(error)
|
|
1076
1103
|
});
|
|
1077
1104
|
throw error;
|
|
@@ -1154,8 +1181,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
1154
1181
|
tools: availableTools
|
|
1155
1182
|
});
|
|
1156
1183
|
} catch (error) {
|
|
1157
|
-
const
|
|
1158
|
-
await sendEventToPosthog({
|
|
1184
|
+
const enrichedError = await sendEventWithErrorToPosthog({
|
|
1159
1185
|
client: this.phClient,
|
|
1160
1186
|
...posthogParams,
|
|
1161
1187
|
model: openAIParams.model,
|
|
@@ -1165,14 +1191,13 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
1165
1191
|
latency: 0,
|
|
1166
1192
|
baseURL: this.baseURL,
|
|
1167
1193
|
params: body,
|
|
1168
|
-
httpStatus,
|
|
1169
1194
|
usage: {
|
|
1170
1195
|
inputTokens: 0,
|
|
1171
1196
|
outputTokens: 0
|
|
1172
1197
|
},
|
|
1173
|
-
|
|
1174
|
-
error: JSON.stringify(error)
|
|
1198
|
+
error: error
|
|
1175
1199
|
});
|
|
1200
|
+
throw enrichedError;
|
|
1176
1201
|
}
|
|
1177
1202
|
})();
|
|
1178
1203
|
return stream2;
|
|
@@ -1226,7 +1251,6 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
1226
1251
|
inputTokens: 0,
|
|
1227
1252
|
outputTokens: 0
|
|
1228
1253
|
},
|
|
1229
|
-
isError: true,
|
|
1230
1254
|
error: JSON.stringify(error)
|
|
1231
1255
|
});
|
|
1232
1256
|
throw error;
|
|
@@ -1268,8 +1292,7 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
1268
1292
|
});
|
|
1269
1293
|
return result;
|
|
1270
1294
|
}, async error => {
|
|
1271
|
-
const
|
|
1272
|
-
await sendEventToPosthog({
|
|
1295
|
+
const enrichedError = await sendEventWithErrorToPosthog({
|
|
1273
1296
|
client: this.phClient,
|
|
1274
1297
|
...posthogParams,
|
|
1275
1298
|
model: openAIParams.model,
|
|
@@ -1279,15 +1302,13 @@ let WrappedResponses$1 = class WrappedResponses extends Responses {
|
|
|
1279
1302
|
latency: 0,
|
|
1280
1303
|
baseURL: this.baseURL,
|
|
1281
1304
|
params: body,
|
|
1282
|
-
httpStatus,
|
|
1283
1305
|
usage: {
|
|
1284
1306
|
inputTokens: 0,
|
|
1285
1307
|
outputTokens: 0
|
|
1286
1308
|
},
|
|
1287
|
-
isError: true,
|
|
1288
1309
|
error: JSON.stringify(error)
|
|
1289
1310
|
});
|
|
1290
|
-
throw
|
|
1311
|
+
throw enrichedError;
|
|
1291
1312
|
});
|
|
1292
1313
|
return wrappedPromise;
|
|
1293
1314
|
} finally {
|
|
@@ -1347,7 +1368,6 @@ let WrappedEmbeddings$1 = class WrappedEmbeddings extends Embeddings {
|
|
|
1347
1368
|
usage: {
|
|
1348
1369
|
inputTokens: 0
|
|
1349
1370
|
},
|
|
1350
|
-
isError: true,
|
|
1351
1371
|
error: JSON.stringify(error)
|
|
1352
1372
|
});
|
|
1353
1373
|
throw error;
|
|
@@ -1415,8 +1435,7 @@ class WrappedTranscriptions extends Transcriptions {
|
|
|
1415
1435
|
tools: availableTools
|
|
1416
1436
|
});
|
|
1417
1437
|
} catch (error) {
|
|
1418
|
-
const
|
|
1419
|
-
await sendEventToPosthog({
|
|
1438
|
+
const enrichedError = await sendEventWithErrorToPosthog({
|
|
1420
1439
|
client: this.phClient,
|
|
1421
1440
|
...posthogParams,
|
|
1422
1441
|
model: openAIParams.model,
|
|
@@ -1426,14 +1445,13 @@ class WrappedTranscriptions extends Transcriptions {
|
|
|
1426
1445
|
latency: 0,
|
|
1427
1446
|
baseURL: this.baseURL,
|
|
1428
1447
|
params: body,
|
|
1429
|
-
httpStatus,
|
|
1430
1448
|
usage: {
|
|
1431
1449
|
inputTokens: 0,
|
|
1432
1450
|
outputTokens: 0
|
|
1433
1451
|
},
|
|
1434
|
-
|
|
1435
|
-
error: JSON.stringify(error)
|
|
1452
|
+
error: error
|
|
1436
1453
|
});
|
|
1454
|
+
throw enrichedError;
|
|
1437
1455
|
}
|
|
1438
1456
|
})();
|
|
1439
1457
|
return stream2;
|
|
@@ -1463,8 +1481,7 @@ class WrappedTranscriptions extends Transcriptions {
|
|
|
1463
1481
|
return result;
|
|
1464
1482
|
}
|
|
1465
1483
|
}, async error => {
|
|
1466
|
-
const
|
|
1467
|
-
await sendEventToPosthog({
|
|
1484
|
+
const enrichedError = await sendEventWithErrorToPosthog({
|
|
1468
1485
|
client: this.phClient,
|
|
1469
1486
|
...posthogParams,
|
|
1470
1487
|
model: openAIParams.model,
|
|
@@ -1474,15 +1491,13 @@ class WrappedTranscriptions extends Transcriptions {
|
|
|
1474
1491
|
latency: 0,
|
|
1475
1492
|
baseURL: this.baseURL,
|
|
1476
1493
|
params: body,
|
|
1477
|
-
httpStatus,
|
|
1478
1494
|
usage: {
|
|
1479
1495
|
inputTokens: 0,
|
|
1480
1496
|
outputTokens: 0
|
|
1481
1497
|
},
|
|
1482
|
-
|
|
1483
|
-
error: JSON.stringify(error)
|
|
1498
|
+
error: error
|
|
1484
1499
|
});
|
|
1485
|
-
throw
|
|
1500
|
+
throw enrichedError;
|
|
1486
1501
|
});
|
|
1487
1502
|
return wrappedPromise;
|
|
1488
1503
|
}
|
|
@@ -1633,8 +1648,7 @@ class WrappedCompletions extends openai.AzureOpenAI.Chat.Completions {
|
|
|
1633
1648
|
usage
|
|
1634
1649
|
});
|
|
1635
1650
|
} catch (error) {
|
|
1636
|
-
const
|
|
1637
|
-
await sendEventToPosthog({
|
|
1651
|
+
const enrichedError = await sendEventWithErrorToPosthog({
|
|
1638
1652
|
client: this.phClient,
|
|
1639
1653
|
...posthogParams,
|
|
1640
1654
|
model: openAIParams.model,
|
|
@@ -1644,14 +1658,13 @@ class WrappedCompletions extends openai.AzureOpenAI.Chat.Completions {
|
|
|
1644
1658
|
latency: 0,
|
|
1645
1659
|
baseURL: this.baseURL,
|
|
1646
1660
|
params: body,
|
|
1647
|
-
httpStatus,
|
|
1648
1661
|
usage: {
|
|
1649
1662
|
inputTokens: 0,
|
|
1650
1663
|
outputTokens: 0
|
|
1651
1664
|
},
|
|
1652
|
-
|
|
1653
|
-
error: JSON.stringify(error)
|
|
1665
|
+
error: error
|
|
1654
1666
|
});
|
|
1667
|
+
throw enrichedError;
|
|
1655
1668
|
}
|
|
1656
1669
|
})();
|
|
1657
1670
|
// Return the other stream to the user
|
|
@@ -1700,7 +1713,6 @@ class WrappedCompletions extends openai.AzureOpenAI.Chat.Completions {
|
|
|
1700
1713
|
inputTokens: 0,
|
|
1701
1714
|
outputTokens: 0
|
|
1702
1715
|
},
|
|
1703
|
-
isError: true,
|
|
1704
1716
|
error: JSON.stringify(error)
|
|
1705
1717
|
});
|
|
1706
1718
|
throw error;
|
|
@@ -1769,8 +1781,7 @@ class WrappedResponses extends openai.AzureOpenAI.Responses {
|
|
|
1769
1781
|
usage
|
|
1770
1782
|
});
|
|
1771
1783
|
} catch (error) {
|
|
1772
|
-
const
|
|
1773
|
-
await sendEventToPosthog({
|
|
1784
|
+
const enrichedError = await sendEventWithErrorToPosthog({
|
|
1774
1785
|
client: this.phClient,
|
|
1775
1786
|
...posthogParams,
|
|
1776
1787
|
model: openAIParams.model,
|
|
@@ -1780,14 +1791,13 @@ class WrappedResponses extends openai.AzureOpenAI.Responses {
|
|
|
1780
1791
|
latency: 0,
|
|
1781
1792
|
baseURL: this.baseURL,
|
|
1782
1793
|
params: body,
|
|
1783
|
-
httpStatus,
|
|
1784
1794
|
usage: {
|
|
1785
1795
|
inputTokens: 0,
|
|
1786
1796
|
outputTokens: 0
|
|
1787
1797
|
},
|
|
1788
|
-
|
|
1789
|
-
error: JSON.stringify(error)
|
|
1798
|
+
error: error
|
|
1790
1799
|
});
|
|
1800
|
+
throw enrichedError;
|
|
1791
1801
|
}
|
|
1792
1802
|
})();
|
|
1793
1803
|
return stream2;
|
|
@@ -1835,7 +1845,6 @@ class WrappedResponses extends openai.AzureOpenAI.Responses {
|
|
|
1835
1845
|
inputTokens: 0,
|
|
1836
1846
|
outputTokens: 0
|
|
1837
1847
|
},
|
|
1838
|
-
isError: true,
|
|
1839
1848
|
error: JSON.stringify(error)
|
|
1840
1849
|
});
|
|
1841
1850
|
throw error;
|
|
@@ -1887,7 +1896,6 @@ class WrappedResponses extends openai.AzureOpenAI.Responses {
|
|
|
1887
1896
|
inputTokens: 0,
|
|
1888
1897
|
outputTokens: 0
|
|
1889
1898
|
},
|
|
1890
|
-
isError: true,
|
|
1891
1899
|
error: JSON.stringify(error)
|
|
1892
1900
|
});
|
|
1893
1901
|
throw error;
|
|
@@ -1945,7 +1953,6 @@ class WrappedEmbeddings extends openai.AzureOpenAI.Embeddings {
|
|
|
1945
1953
|
usage: {
|
|
1946
1954
|
inputTokens: 0
|
|
1947
1955
|
},
|
|
1948
|
-
isError: true,
|
|
1949
1956
|
error: JSON.stringify(error)
|
|
1950
1957
|
});
|
|
1951
1958
|
throw error;
|
|
@@ -1954,6 +1961,10 @@ class WrappedEmbeddings extends openai.AzureOpenAI.Embeddings {
|
|
|
1954
1961
|
}
|
|
1955
1962
|
}
|
|
1956
1963
|
|
|
1964
|
+
// Type guards
|
|
1965
|
+
function isV3Model(model) {
|
|
1966
|
+
return model.specificationVersion === 'v3';
|
|
1967
|
+
}
|
|
1957
1968
|
const mapVercelParams = params => {
|
|
1958
1969
|
return {
|
|
1959
1970
|
temperature: params.temperature,
|
|
@@ -2174,6 +2185,19 @@ const extractAdditionalTokenValues = providerMetadata => {
|
|
|
2174
2185
|
}
|
|
2175
2186
|
return {};
|
|
2176
2187
|
};
|
|
2188
|
+
// For Anthropic providers in V3, inputTokens.total is the sum of all tokens (uncached + cache read + cache write).
|
|
2189
|
+
// Our cost calculation expects inputTokens to be only the uncached portion for Anthropic.
|
|
2190
|
+
// This helper subtracts cache tokens from inputTokens for Anthropic V3 models.
|
|
2191
|
+
const adjustAnthropicV3CacheTokens = (model, provider, usage) => {
|
|
2192
|
+
if (isV3Model(model) && provider.toLowerCase().includes('anthropic')) {
|
|
2193
|
+
const cacheReadTokens = usage.cacheReadInputTokens || 0;
|
|
2194
|
+
const cacheWriteTokens = usage.cacheCreationInputTokens || 0;
|
|
2195
|
+
const cacheTokens = cacheReadTokens + cacheWriteTokens;
|
|
2196
|
+
if (usage.inputTokens && cacheTokens > 0) {
|
|
2197
|
+
usage.inputTokens = Math.max(usage.inputTokens - cacheTokens, 0);
|
|
2198
|
+
}
|
|
2199
|
+
}
|
|
2200
|
+
};
|
|
2177
2201
|
// Helper to extract numeric token value from V2 (number) or V3 (object with .total) usage formats
|
|
2178
2202
|
const extractTokenCount = value => {
|
|
2179
2203
|
if (typeof value === 'number') {
|
|
@@ -2254,6 +2278,7 @@ const wrapVercelLanguageModel = (model, phClient, options) => {
|
|
|
2254
2278
|
webSearchCount,
|
|
2255
2279
|
...additionalTokenValues
|
|
2256
2280
|
};
|
|
2281
|
+
adjustAnthropicV3CacheTokens(model, provider, usage);
|
|
2257
2282
|
await sendEventToPosthog({
|
|
2258
2283
|
client: phClient,
|
|
2259
2284
|
distinctId: mergedOptions.posthogDistinctId,
|
|
@@ -2273,7 +2298,7 @@ const wrapVercelLanguageModel = (model, phClient, options) => {
|
|
|
2273
2298
|
return result;
|
|
2274
2299
|
} catch (error) {
|
|
2275
2300
|
const modelId = model.modelId;
|
|
2276
|
-
await
|
|
2301
|
+
const enrichedError = await sendEventWithErrorToPosthog({
|
|
2277
2302
|
client: phClient,
|
|
2278
2303
|
distinctId: mergedOptions.posthogDistinctId,
|
|
2279
2304
|
traceId: mergedOptions.posthogTraceId ?? uuid.v4(),
|
|
@@ -2284,17 +2309,15 @@ const wrapVercelLanguageModel = (model, phClient, options) => {
|
|
|
2284
2309
|
latency: 0,
|
|
2285
2310
|
baseURL: '',
|
|
2286
2311
|
params: mergedParams,
|
|
2287
|
-
httpStatus: error?.status ? error.status : 500,
|
|
2288
2312
|
usage: {
|
|
2289
2313
|
inputTokens: 0,
|
|
2290
2314
|
outputTokens: 0
|
|
2291
2315
|
},
|
|
2292
|
-
|
|
2293
|
-
error: truncate(JSON.stringify(error)),
|
|
2316
|
+
error: error,
|
|
2294
2317
|
tools: availableTools,
|
|
2295
2318
|
captureImmediate: mergedOptions.posthogCaptureImmediate
|
|
2296
2319
|
});
|
|
2297
|
-
throw
|
|
2320
|
+
throw enrichedError;
|
|
2298
2321
|
}
|
|
2299
2322
|
},
|
|
2300
2323
|
doStream: async params => {
|
|
@@ -2408,6 +2431,7 @@ const wrapVercelLanguageModel = (model, phClient, options) => {
|
|
|
2408
2431
|
...usage,
|
|
2409
2432
|
webSearchCount
|
|
2410
2433
|
};
|
|
2434
|
+
adjustAnthropicV3CacheTokens(model, provider, finalUsage);
|
|
2411
2435
|
await sendEventToPosthog({
|
|
2412
2436
|
client: phClient,
|
|
2413
2437
|
distinctId: mergedOptions.posthogDistinctId,
|
|
@@ -2431,7 +2455,7 @@ const wrapVercelLanguageModel = (model, phClient, options) => {
|
|
|
2431
2455
|
...rest
|
|
2432
2456
|
};
|
|
2433
2457
|
} catch (error) {
|
|
2434
|
-
await
|
|
2458
|
+
const enrichedError = await sendEventWithErrorToPosthog({
|
|
2435
2459
|
client: phClient,
|
|
2436
2460
|
distinctId: mergedOptions.posthogDistinctId,
|
|
2437
2461
|
traceId: mergedOptions.posthogTraceId ?? uuid.v4(),
|
|
@@ -2442,17 +2466,15 @@ const wrapVercelLanguageModel = (model, phClient, options) => {
|
|
|
2442
2466
|
latency: 0,
|
|
2443
2467
|
baseURL: '',
|
|
2444
2468
|
params: mergedParams,
|
|
2445
|
-
httpStatus: error?.status ? error.status : 500,
|
|
2446
2469
|
usage: {
|
|
2447
2470
|
inputTokens: 0,
|
|
2448
2471
|
outputTokens: 0
|
|
2449
2472
|
},
|
|
2450
|
-
|
|
2451
|
-
error: truncate(JSON.stringify(error)),
|
|
2473
|
+
error: error,
|
|
2452
2474
|
tools: availableTools,
|
|
2453
2475
|
captureImmediate: mergedOptions.posthogCaptureImmediate
|
|
2454
2476
|
});
|
|
2455
|
-
throw
|
|
2477
|
+
throw enrichedError;
|
|
2456
2478
|
}
|
|
2457
2479
|
}
|
|
2458
2480
|
};
|
|
@@ -2609,8 +2631,7 @@ class WrappedMessages extends AnthropicOriginal__default.default.Messages {
|
|
|
2609
2631
|
tools: availableTools
|
|
2610
2632
|
});
|
|
2611
2633
|
} catch (error) {
|
|
2612
|
-
|
|
2613
|
-
await sendEventToPosthog({
|
|
2634
|
+
const enrichedError = await sendEventWithErrorToPosthog({
|
|
2614
2635
|
client: this.phClient,
|
|
2615
2636
|
...posthogParams,
|
|
2616
2637
|
model: anthropicParams.model,
|
|
@@ -2620,14 +2641,13 @@ class WrappedMessages extends AnthropicOriginal__default.default.Messages {
|
|
|
2620
2641
|
latency: 0,
|
|
2621
2642
|
baseURL: this.baseURL,
|
|
2622
2643
|
params: body,
|
|
2623
|
-
httpStatus: error?.status ? error.status : 500,
|
|
2624
2644
|
usage: {
|
|
2625
2645
|
inputTokens: 0,
|
|
2626
2646
|
outputTokens: 0
|
|
2627
2647
|
},
|
|
2628
|
-
|
|
2629
|
-
error: JSON.stringify(error)
|
|
2648
|
+
error: error
|
|
2630
2649
|
});
|
|
2650
|
+
throw enrichedError;
|
|
2631
2651
|
}
|
|
2632
2652
|
})();
|
|
2633
2653
|
// Return the other stream to the user
|
|
@@ -2678,7 +2698,6 @@ class WrappedMessages extends AnthropicOriginal__default.default.Messages {
|
|
|
2678
2698
|
inputTokens: 0,
|
|
2679
2699
|
outputTokens: 0
|
|
2680
2700
|
},
|
|
2681
|
-
isError: true,
|
|
2682
2701
|
error: JSON.stringify(error)
|
|
2683
2702
|
});
|
|
2684
2703
|
throw error;
|
|
@@ -2738,7 +2757,7 @@ class WrappedModels {
|
|
|
2738
2757
|
return response;
|
|
2739
2758
|
} catch (error) {
|
|
2740
2759
|
const latency = (Date.now() - startTime) / 1000;
|
|
2741
|
-
await
|
|
2760
|
+
const enrichedError = await sendEventWithErrorToPosthog({
|
|
2742
2761
|
client: this.phClient,
|
|
2743
2762
|
...posthogParams,
|
|
2744
2763
|
model: geminiParams.model,
|
|
@@ -2748,15 +2767,13 @@ class WrappedModels {
|
|
|
2748
2767
|
latency,
|
|
2749
2768
|
baseURL: 'https://generativelanguage.googleapis.com',
|
|
2750
2769
|
params: params,
|
|
2751
|
-
httpStatus: error?.status ?? 500,
|
|
2752
2770
|
usage: {
|
|
2753
2771
|
inputTokens: 0,
|
|
2754
2772
|
outputTokens: 0
|
|
2755
2773
|
},
|
|
2756
|
-
|
|
2757
|
-
error: JSON.stringify(error)
|
|
2774
|
+
error: error
|
|
2758
2775
|
});
|
|
2759
|
-
throw
|
|
2776
|
+
throw enrichedError;
|
|
2760
2777
|
}
|
|
2761
2778
|
}
|
|
2762
2779
|
async *generateContentStream(params) {
|
|
@@ -2858,7 +2875,7 @@ class WrappedModels {
|
|
|
2858
2875
|
});
|
|
2859
2876
|
} catch (error) {
|
|
2860
2877
|
const latency = (Date.now() - startTime) / 1000;
|
|
2861
|
-
await
|
|
2878
|
+
const enrichedError = await sendEventWithErrorToPosthog({
|
|
2862
2879
|
client: this.phClient,
|
|
2863
2880
|
...posthogParams,
|
|
2864
2881
|
model: geminiParams.model,
|
|
@@ -2868,15 +2885,13 @@ class WrappedModels {
|
|
|
2868
2885
|
latency,
|
|
2869
2886
|
baseURL: 'https://generativelanguage.googleapis.com',
|
|
2870
2887
|
params: params,
|
|
2871
|
-
httpStatus: error?.status ?? 500,
|
|
2872
2888
|
usage: {
|
|
2873
2889
|
inputTokens: 0,
|
|
2874
2890
|
outputTokens: 0
|
|
2875
2891
|
},
|
|
2876
|
-
|
|
2877
|
-
error: JSON.stringify(error)
|
|
2892
|
+
error: error
|
|
2878
2893
|
});
|
|
2879
|
-
throw
|
|
2894
|
+
throw enrichedError;
|
|
2880
2895
|
}
|
|
2881
2896
|
}
|
|
2882
2897
|
formatPartsAsContentBlocks(parts) {
|
|
@@ -3256,6 +3271,64 @@ function mapKeys(fields, mapper, map) {
|
|
|
3256
3271
|
return mapped;
|
|
3257
3272
|
}
|
|
3258
3273
|
|
|
3274
|
+
//#region src/load/validation.ts
|
|
3275
|
+
/**
|
|
3276
|
+
* Sentinel key used to mark escaped user objects during serialization.
|
|
3277
|
+
*
|
|
3278
|
+
* When a plain object contains 'lc' key (which could be confused with LC objects),
|
|
3279
|
+
* we wrap it as `{"__lc_escaped__": {...original...}}`.
|
|
3280
|
+
*/
|
|
3281
|
+
const LC_ESCAPED_KEY = "__lc_escaped__";
|
|
3282
|
+
/**
|
|
3283
|
+
* Check if an object needs escaping to prevent confusion with LC objects.
|
|
3284
|
+
*
|
|
3285
|
+
* An object needs escaping if:
|
|
3286
|
+
* 1. It has an `'lc'` key (could be confused with LC serialization format)
|
|
3287
|
+
* 2. It has only the escape key (would be mistaken for an escaped object)
|
|
3288
|
+
*/
|
|
3289
|
+
function needsEscaping(obj) {
|
|
3290
|
+
return "lc" in obj || Object.keys(obj).length === 1 && LC_ESCAPED_KEY in obj;
|
|
3291
|
+
}
|
|
3292
|
+
/**
|
|
3293
|
+
* Wrap an object in the escape marker.
|
|
3294
|
+
*
|
|
3295
|
+
* @example
|
|
3296
|
+
* ```typescript
|
|
3297
|
+
* {"key": "value"} // becomes {"__lc_escaped__": {"key": "value"}}
|
|
3298
|
+
* ```
|
|
3299
|
+
*/
|
|
3300
|
+
function escapeObject(obj) {
|
|
3301
|
+
return { [LC_ESCAPED_KEY]: obj };
|
|
3302
|
+
}
|
|
3303
|
+
/**
|
|
3304
|
+
* Check if an object looks like a Serializable instance (duck typing).
|
|
3305
|
+
*/
|
|
3306
|
+
function isSerializableLike(obj) {
|
|
3307
|
+
return obj !== null && typeof obj === "object" && "lc_serializable" in obj && typeof obj.toJSON === "function";
|
|
3308
|
+
}
|
|
3309
|
+
/**
|
|
3310
|
+
* Escape a value if it needs escaping (contains `lc` key).
|
|
3311
|
+
*
|
|
3312
|
+
* This is a simpler version of `serializeValue` that doesn't handle Serializable
|
|
3313
|
+
* objects - it's meant to be called on kwargs values that have already been
|
|
3314
|
+
* processed by `toJSON()`.
|
|
3315
|
+
*
|
|
3316
|
+
* @param value - The value to potentially escape.
|
|
3317
|
+
* @returns The value with any `lc`-containing objects wrapped in escape markers.
|
|
3318
|
+
*/
|
|
3319
|
+
function escapeIfNeeded(value) {
|
|
3320
|
+
if (value !== null && typeof value === "object" && !Array.isArray(value)) {
|
|
3321
|
+
if (isSerializableLike(value)) return value;
|
|
3322
|
+
const record = value;
|
|
3323
|
+
if (needsEscaping(record)) return escapeObject(record);
|
|
3324
|
+
const result = {};
|
|
3325
|
+
for (const [key, val] of Object.entries(record)) result[key] = escapeIfNeeded(val);
|
|
3326
|
+
return result;
|
|
3327
|
+
}
|
|
3328
|
+
if (Array.isArray(value)) return value.map((item) => escapeIfNeeded(item));
|
|
3329
|
+
return value;
|
|
3330
|
+
}
|
|
3331
|
+
|
|
3259
3332
|
//#region src/load/serializable.ts
|
|
3260
3333
|
var serializable_exports = {};
|
|
3261
3334
|
__export(serializable_exports, {
|
|
@@ -3377,11 +3450,15 @@ var Serializable = class Serializable {
|
|
|
3377
3450
|
}
|
|
3378
3451
|
if (last in read && read[last] !== void 0) write[last] = write[last] || read[last];
|
|
3379
3452
|
});
|
|
3453
|
+
const escapedKwargs = {};
|
|
3454
|
+
for (const [key, value] of Object.entries(kwargs)) escapedKwargs[key] = escapeIfNeeded(value);
|
|
3455
|
+
const kwargsWithSecrets = Object.keys(secrets).length ? replaceSecrets(escapedKwargs, secrets) : escapedKwargs;
|
|
3456
|
+
const processedKwargs = mapKeys(kwargsWithSecrets, keyToJson, aliases);
|
|
3380
3457
|
return {
|
|
3381
3458
|
lc: 1,
|
|
3382
3459
|
type: "constructor",
|
|
3383
3460
|
id: this.lc_id,
|
|
3384
|
-
kwargs:
|
|
3461
|
+
kwargs: processedKwargs
|
|
3385
3462
|
};
|
|
3386
3463
|
}
|
|
3387
3464
|
toJSONNotImplemented() {
|