@posthog/ai 6.0.0 → 6.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/gemini/index.cjs +6 -2
- package/dist/gemini/index.cjs.map +1 -1
- package/dist/gemini/index.d.ts +2 -0
- package/dist/gemini/index.mjs +6 -2
- package/dist/gemini/index.mjs.map +1 -1
- package/dist/index.cjs +103 -93
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +2 -0
- package/dist/index.mjs +103 -93
- package/dist/index.mjs.map +1 -1
- package/dist/vercel/index.cjs +98 -91
- package/dist/vercel/index.cjs.map +1 -1
- package/dist/vercel/index.mjs +98 -91
- package/dist/vercel/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.cjs
CHANGED
|
@@ -1235,77 +1235,75 @@ const mapVercelPrompt = messages => {
|
|
|
1235
1235
|
return inputs;
|
|
1236
1236
|
};
|
|
1237
1237
|
const mapVercelOutput = result => {
|
|
1238
|
-
const content =
|
|
1239
|
-
|
|
1240
|
-
|
|
1241
|
-
|
|
1242
|
-
|
|
1243
|
-
|
|
1244
|
-
|
|
1245
|
-
|
|
1246
|
-
|
|
1247
|
-
|
|
1248
|
-
|
|
1249
|
-
id: toolCall.toolCallId,
|
|
1238
|
+
const content = result.map(item => {
|
|
1239
|
+
if (item.type === 'text') {
|
|
1240
|
+
return {
|
|
1241
|
+
type: 'text',
|
|
1242
|
+
text: truncate(item.text)
|
|
1243
|
+
};
|
|
1244
|
+
}
|
|
1245
|
+
if (item.type === 'tool-call') {
|
|
1246
|
+
return {
|
|
1247
|
+
type: 'tool-call',
|
|
1248
|
+
id: item.toolCallId,
|
|
1250
1249
|
function: {
|
|
1251
|
-
name:
|
|
1252
|
-
arguments:
|
|
1250
|
+
name: item.toolName,
|
|
1251
|
+
arguments: item.args || JSON.stringify(item.arguments || {})
|
|
1253
1252
|
}
|
|
1254
|
-
}
|
|
1253
|
+
};
|
|
1255
1254
|
}
|
|
1256
|
-
|
|
1255
|
+
if (item.type === 'reasoning') {
|
|
1256
|
+
return {
|
|
1257
|
+
type: 'reasoning',
|
|
1258
|
+
text: truncate(item.text)
|
|
1259
|
+
};
|
|
1260
|
+
}
|
|
1261
|
+
if (item.type === 'file') {
|
|
1262
|
+
// Handle files similar to input mapping - avoid large base64 data
|
|
1263
|
+
let fileData;
|
|
1264
|
+
if (item.data instanceof URL) {
|
|
1265
|
+
fileData = item.data.toString();
|
|
1266
|
+
} else if (typeof item.data === 'string') {
|
|
1267
|
+
// Check if it's base64 data and potentially large
|
|
1268
|
+
if (item.data.startsWith('data:') || item.data.length > 1000) {
|
|
1269
|
+
fileData = `[${item.mediaType} file - ${item.data.length} bytes]`;
|
|
1270
|
+
} else {
|
|
1271
|
+
fileData = item.data;
|
|
1272
|
+
}
|
|
1273
|
+
} else {
|
|
1274
|
+
fileData = `[binary ${item.mediaType} file]`;
|
|
1275
|
+
}
|
|
1276
|
+
return {
|
|
1277
|
+
type: 'file',
|
|
1278
|
+
name: 'generated_file',
|
|
1279
|
+
mediaType: item.mediaType,
|
|
1280
|
+
data: fileData
|
|
1281
|
+
};
|
|
1282
|
+
}
|
|
1283
|
+
if (item.type === 'source') {
|
|
1284
|
+
return {
|
|
1285
|
+
type: 'source',
|
|
1286
|
+
sourceType: item.sourceType,
|
|
1287
|
+
id: item.id,
|
|
1288
|
+
url: item.url || '',
|
|
1289
|
+
title: item.title || ''
|
|
1290
|
+
};
|
|
1291
|
+
}
|
|
1292
|
+
// Fallback for unknown types - try to extract text if possible
|
|
1293
|
+
return {
|
|
1294
|
+
type: 'text',
|
|
1295
|
+
text: truncate(JSON.stringify(item))
|
|
1296
|
+
};
|
|
1297
|
+
});
|
|
1257
1298
|
if (content.length > 0) {
|
|
1258
1299
|
return [{
|
|
1259
1300
|
role: 'assistant',
|
|
1260
1301
|
content: content.length === 1 && content[0].type === 'text' ? content[0].text : content
|
|
1261
1302
|
}];
|
|
1262
1303
|
}
|
|
1263
|
-
// Fallback to original behavior for other result types TODO: check if we can remove this
|
|
1264
|
-
const normalizedResult = typeof result === 'string' ? {
|
|
1265
|
-
text: result
|
|
1266
|
-
} : result;
|
|
1267
|
-
const output = {
|
|
1268
|
-
...(normalizedResult.text ? {
|
|
1269
|
-
text: normalizedResult.text
|
|
1270
|
-
} : {}),
|
|
1271
|
-
...(normalizedResult.object ? {
|
|
1272
|
-
object: normalizedResult.object
|
|
1273
|
-
} : {}),
|
|
1274
|
-
...(normalizedResult.reasoningText ? {
|
|
1275
|
-
reasoning: normalizedResult.reasoningText
|
|
1276
|
-
} : {}),
|
|
1277
|
-
...(normalizedResult.response ? {
|
|
1278
|
-
response: normalizedResult.response
|
|
1279
|
-
} : {}),
|
|
1280
|
-
...(normalizedResult.finishReason ? {
|
|
1281
|
-
finishReason: normalizedResult.finishReason
|
|
1282
|
-
} : {}),
|
|
1283
|
-
...(normalizedResult.usage ? {
|
|
1284
|
-
usage: normalizedResult.usage
|
|
1285
|
-
} : {}),
|
|
1286
|
-
...(normalizedResult.warnings ? {
|
|
1287
|
-
warnings: normalizedResult.warnings
|
|
1288
|
-
} : {}),
|
|
1289
|
-
...(normalizedResult.providerMetadata ? {
|
|
1290
|
-
toolCalls: normalizedResult.providerMetadata
|
|
1291
|
-
} : {}),
|
|
1292
|
-
...(normalizedResult.files ? {
|
|
1293
|
-
files: normalizedResult.files.map(file => ({
|
|
1294
|
-
name: file.name,
|
|
1295
|
-
size: file.size,
|
|
1296
|
-
type: file.type
|
|
1297
|
-
}))
|
|
1298
|
-
} : {})
|
|
1299
|
-
};
|
|
1300
|
-
if (output.text && !output.object && !output.reasoning) {
|
|
1301
|
-
return [{
|
|
1302
|
-
content: truncate(output.text),
|
|
1303
|
-
role: 'assistant'
|
|
1304
|
-
}];
|
|
1305
|
-
}
|
|
1306
1304
|
// otherwise stringify and truncate
|
|
1307
1305
|
try {
|
|
1308
|
-
const jsonOutput = JSON.stringify(
|
|
1306
|
+
const jsonOutput = JSON.stringify(result);
|
|
1309
1307
|
return [{
|
|
1310
1308
|
content: truncate(jsonOutput),
|
|
1311
1309
|
role: 'assistant'
|
|
@@ -1337,21 +1335,21 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
1337
1335
|
const modelId = options.posthogModelOverride ?? (result.response?.modelId ? result.response.modelId : model.modelId);
|
|
1338
1336
|
const provider = options.posthogProviderOverride ?? extractProvider(model);
|
|
1339
1337
|
const baseURL = ''; // cannot currently get baseURL from vercel
|
|
1340
|
-
const content = mapVercelOutput(result);
|
|
1338
|
+
const content = mapVercelOutput(result.content);
|
|
1341
1339
|
const latency = (Date.now() - startTime) / 1000;
|
|
1342
1340
|
const providerMetadata = result.providerMetadata;
|
|
1343
1341
|
const additionalTokenValues = {
|
|
1344
|
-
...(providerMetadata?.openai?.reasoningTokens ? {
|
|
1345
|
-
reasoningTokens: providerMetadata.openai.reasoningTokens
|
|
1346
|
-
} : {}),
|
|
1347
|
-
...(providerMetadata?.openai?.cachedPromptTokens ? {
|
|
1348
|
-
cacheReadInputTokens: providerMetadata.openai.cachedPromptTokens
|
|
1349
|
-
} : {}),
|
|
1350
1342
|
...(providerMetadata?.anthropic ? {
|
|
1351
|
-
cacheReadInputTokens: providerMetadata.anthropic.cacheReadInputTokens,
|
|
1352
1343
|
cacheCreationInputTokens: providerMetadata.anthropic.cacheCreationInputTokens
|
|
1353
1344
|
} : {})
|
|
1354
1345
|
};
|
|
1346
|
+
const usage = {
|
|
1347
|
+
inputTokens: result.usage.inputTokens,
|
|
1348
|
+
outputTokens: result.usage.outputTokens,
|
|
1349
|
+
reasoningTokens: result.usage.reasoningTokens,
|
|
1350
|
+
cacheReadInputTokens: result.usage.cachedInputTokens,
|
|
1351
|
+
...additionalTokenValues
|
|
1352
|
+
};
|
|
1355
1353
|
await sendEventToPosthog({
|
|
1356
1354
|
client: phClient,
|
|
1357
1355
|
distinctId: options.posthogDistinctId,
|
|
@@ -1364,11 +1362,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
1364
1362
|
baseURL,
|
|
1365
1363
|
params: mergedParams,
|
|
1366
1364
|
httpStatus: 200,
|
|
1367
|
-
usage
|
|
1368
|
-
inputTokens: result.usage.inputTokens,
|
|
1369
|
-
outputTokens: result.usage.outputTokens,
|
|
1370
|
-
...additionalTokenValues
|
|
1371
|
-
},
|
|
1365
|
+
usage,
|
|
1372
1366
|
tools: availableTools,
|
|
1373
1367
|
captureImmediate: options.posthogCaptureImmediate
|
|
1374
1368
|
});
|
|
@@ -1430,28 +1424,43 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
1430
1424
|
reasoningText += chunk.delta; // New in v5
|
|
1431
1425
|
}
|
|
1432
1426
|
if (chunk.type === 'finish') {
|
|
1427
|
+
const providerMetadata = chunk.providerMetadata;
|
|
1428
|
+
const additionalTokenValues = {
|
|
1429
|
+
...(providerMetadata?.anthropic ? {
|
|
1430
|
+
cacheCreationInputTokens: providerMetadata.anthropic.cacheCreationInputTokens
|
|
1431
|
+
} : {})
|
|
1432
|
+
};
|
|
1433
1433
|
usage = {
|
|
1434
1434
|
inputTokens: chunk.usage?.inputTokens,
|
|
1435
|
-
outputTokens: chunk.usage?.outputTokens
|
|
1435
|
+
outputTokens: chunk.usage?.outputTokens,
|
|
1436
|
+
reasoningTokens: chunk.usage?.reasoningTokens,
|
|
1437
|
+
cacheReadInputTokens: chunk.usage?.cachedInputTokens,
|
|
1438
|
+
...additionalTokenValues
|
|
1436
1439
|
};
|
|
1437
|
-
if (chunk.providerMetadata?.openai?.reasoningTokens) {
|
|
1438
|
-
usage.reasoningTokens = chunk.providerMetadata.openai.reasoningTokens;
|
|
1439
|
-
}
|
|
1440
|
-
if (chunk.providerMetadata?.openai?.cachedPromptTokens) {
|
|
1441
|
-
usage.cacheReadInputTokens = chunk.providerMetadata.openai.cachedPromptTokens;
|
|
1442
|
-
}
|
|
1443
|
-
if (chunk.providerMetadata?.anthropic?.cacheReadInputTokens) {
|
|
1444
|
-
usage.cacheReadInputTokens = chunk.providerMetadata.anthropic.cacheReadInputTokens;
|
|
1445
|
-
}
|
|
1446
|
-
if (chunk.providerMetadata?.anthropic?.cacheCreationInputTokens) {
|
|
1447
|
-
usage.cacheCreationInputTokens = chunk.providerMetadata.anthropic.cacheCreationInputTokens;
|
|
1448
|
-
}
|
|
1449
1440
|
}
|
|
1450
1441
|
controller.enqueue(chunk);
|
|
1451
1442
|
},
|
|
1452
1443
|
flush: async () => {
|
|
1453
1444
|
const latency = (Date.now() - startTime) / 1000;
|
|
1454
|
-
|
|
1445
|
+
// Build content array similar to mapVercelOutput structure
|
|
1446
|
+
const content = [];
|
|
1447
|
+
if (reasoningText) {
|
|
1448
|
+
content.push({
|
|
1449
|
+
type: 'reasoning',
|
|
1450
|
+
text: truncate(reasoningText)
|
|
1451
|
+
});
|
|
1452
|
+
}
|
|
1453
|
+
if (generatedText) {
|
|
1454
|
+
content.push({
|
|
1455
|
+
type: 'text',
|
|
1456
|
+
text: truncate(generatedText)
|
|
1457
|
+
});
|
|
1458
|
+
}
|
|
1459
|
+
// Structure output like mapVercelOutput does
|
|
1460
|
+
const output = content.length > 0 ? [{
|
|
1461
|
+
role: 'assistant',
|
|
1462
|
+
content: content.length === 1 && content[0].type === 'text' ? content[0].text : content
|
|
1463
|
+
}] : [];
|
|
1455
1464
|
await sendEventToPosthog({
|
|
1456
1465
|
client: phClient,
|
|
1457
1466
|
distinctId: options.posthogDistinctId,
|
|
@@ -1459,10 +1468,7 @@ const createInstrumentationMiddleware = (phClient, model, options) => {
|
|
|
1459
1468
|
model: modelId,
|
|
1460
1469
|
provider: provider,
|
|
1461
1470
|
input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt),
|
|
1462
|
-
output:
|
|
1463
|
-
content: outputContent,
|
|
1464
|
-
role: 'assistant'
|
|
1465
|
-
}],
|
|
1471
|
+
output: output,
|
|
1466
1472
|
latency,
|
|
1467
1473
|
baseURL,
|
|
1468
1474
|
params: mergedParams,
|
|
@@ -1729,7 +1735,9 @@ class WrappedModels {
|
|
|
1729
1735
|
httpStatus: 200,
|
|
1730
1736
|
usage: {
|
|
1731
1737
|
inputTokens: response.usageMetadata?.promptTokenCount ?? 0,
|
|
1732
|
-
outputTokens: response.usageMetadata?.candidatesTokenCount ?? 0
|
|
1738
|
+
outputTokens: response.usageMetadata?.candidatesTokenCount ?? 0,
|
|
1739
|
+
reasoningTokens: response.usageMetadata?.thoughtsTokenCount ?? 0,
|
|
1740
|
+
cacheReadInputTokens: response.usageMetadata?.cachedContentTokenCount ?? 0
|
|
1733
1741
|
},
|
|
1734
1742
|
tools: availableTools,
|
|
1735
1743
|
captureImmediate: posthogCaptureImmediate
|
|
@@ -1785,7 +1793,9 @@ class WrappedModels {
|
|
|
1785
1793
|
if (chunk.usageMetadata) {
|
|
1786
1794
|
usage = {
|
|
1787
1795
|
inputTokens: chunk.usageMetadata.promptTokenCount ?? 0,
|
|
1788
|
-
outputTokens: chunk.usageMetadata.candidatesTokenCount ?? 0
|
|
1796
|
+
outputTokens: chunk.usageMetadata.candidatesTokenCount ?? 0,
|
|
1797
|
+
reasoningTokens: chunk.usageMetadata.thoughtsTokenCount ?? 0,
|
|
1798
|
+
cacheReadInputTokens: chunk.usageMetadata.cachedContentTokenCount ?? 0
|
|
1789
1799
|
};
|
|
1790
1800
|
}
|
|
1791
1801
|
yield chunk;
|