@mastra/observability 1.0.0-beta.3 → 1.0.0-beta.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +9 -0
- package/dist/index.cjs +74 -17
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +74 -17
- package/dist/index.js.map +1 -1
- package/dist/model-tracing.d.ts +4 -3
- package/dist/model-tracing.d.ts.map +1 -1
- package/dist/spans/base.d.ts.map +1 -1
- package/dist/usage.d.ts +21 -0
- package/dist/usage.d.ts.map +1 -0
- package/package.json +3 -3
package/dist/index.js
CHANGED
|
@@ -946,6 +946,58 @@ var TestExporter = class extends BaseExporter {
|
|
|
946
946
|
this.logger.info("TestExporter shutdown");
|
|
947
947
|
}
|
|
948
948
|
};
|
|
949
|
+
|
|
950
|
+
// src/usage.ts
|
|
951
|
+
function extractUsageMetrics(usage, providerMetadata) {
|
|
952
|
+
if (!usage) {
|
|
953
|
+
return {};
|
|
954
|
+
}
|
|
955
|
+
const inputDetails = {};
|
|
956
|
+
const outputDetails = {};
|
|
957
|
+
let inputTokens = usage.inputTokens;
|
|
958
|
+
const outputTokens = usage.outputTokens;
|
|
959
|
+
if (usage.cachedInputTokens) {
|
|
960
|
+
inputDetails.cacheRead = usage.cachedInputTokens;
|
|
961
|
+
}
|
|
962
|
+
if (usage.reasoningTokens) {
|
|
963
|
+
outputDetails.reasoning = usage.reasoningTokens;
|
|
964
|
+
}
|
|
965
|
+
const anthropic = providerMetadata?.anthropic;
|
|
966
|
+
if (anthropic) {
|
|
967
|
+
if (anthropic.cacheReadInputTokens) {
|
|
968
|
+
inputDetails.cacheRead = anthropic.cacheReadInputTokens;
|
|
969
|
+
}
|
|
970
|
+
if (anthropic.cacheCreationInputTokens) {
|
|
971
|
+
inputDetails.cacheWrite = anthropic.cacheCreationInputTokens;
|
|
972
|
+
}
|
|
973
|
+
if (anthropic.cacheReadInputTokens || anthropic.cacheCreationInputTokens) {
|
|
974
|
+
inputDetails.text = usage.inputTokens;
|
|
975
|
+
inputTokens = (usage.inputTokens ?? 0) + (anthropic.cacheReadInputTokens ?? 0) + (anthropic.cacheCreationInputTokens ?? 0);
|
|
976
|
+
}
|
|
977
|
+
}
|
|
978
|
+
const google = providerMetadata?.google;
|
|
979
|
+
if (google?.usageMetadata) {
|
|
980
|
+
if (google.usageMetadata.cachedContentTokenCount) {
|
|
981
|
+
inputDetails.cacheRead = google.usageMetadata.cachedContentTokenCount;
|
|
982
|
+
}
|
|
983
|
+
if (google.usageMetadata.thoughtsTokenCount) {
|
|
984
|
+
outputDetails.reasoning = google.usageMetadata.thoughtsTokenCount;
|
|
985
|
+
}
|
|
986
|
+
}
|
|
987
|
+
const result = {
|
|
988
|
+
inputTokens,
|
|
989
|
+
outputTokens
|
|
990
|
+
};
|
|
991
|
+
if (Object.keys(inputDetails).length > 0) {
|
|
992
|
+
result.inputDetails = inputDetails;
|
|
993
|
+
}
|
|
994
|
+
if (Object.keys(outputDetails).length > 0) {
|
|
995
|
+
result.outputDetails = outputDetails;
|
|
996
|
+
}
|
|
997
|
+
return result;
|
|
998
|
+
}
|
|
999
|
+
|
|
1000
|
+
// src/model-tracing.ts
|
|
949
1001
|
var ModelSpanTracker = class {
|
|
950
1002
|
#modelSpan;
|
|
951
1003
|
#currentStepSpan;
|
|
@@ -953,8 +1005,7 @@ var ModelSpanTracker = class {
|
|
|
953
1005
|
#accumulator = {};
|
|
954
1006
|
#stepIndex = 0;
|
|
955
1007
|
#chunkSequence = 0;
|
|
956
|
-
|
|
957
|
-
#completionStartTimeCaptured = false;
|
|
1008
|
+
#completionStartTime;
|
|
958
1009
|
/** Tracks tool output accumulators by toolCallId for consolidating sub-agent streams */
|
|
959
1010
|
#toolOutputAccumulators = /* @__PURE__ */ new Map();
|
|
960
1011
|
/** Tracks toolCallIds that had streaming output (to skip redundant tool-result spans) */
|
|
@@ -964,18 +1015,12 @@ var ModelSpanTracker = class {
|
|
|
964
1015
|
}
|
|
965
1016
|
/**
|
|
966
1017
|
* Capture the completion start time (time to first token) when the first content chunk arrives.
|
|
967
|
-
* This is used by observability providers like Langfuse to calculate TTFT metrics.
|
|
968
1018
|
*/
|
|
969
1019
|
#captureCompletionStartTime() {
|
|
970
|
-
if (this.#
|
|
1020
|
+
if (this.#completionStartTime) {
|
|
971
1021
|
return;
|
|
972
1022
|
}
|
|
973
|
-
this.#
|
|
974
|
-
this.#modelSpan.update({
|
|
975
|
-
attributes: {
|
|
976
|
-
completionStartTime: /* @__PURE__ */ new Date()
|
|
977
|
-
}
|
|
978
|
-
});
|
|
1023
|
+
this.#completionStartTime = /* @__PURE__ */ new Date();
|
|
979
1024
|
}
|
|
980
1025
|
/**
|
|
981
1026
|
* Get the tracing context for creating child spans.
|
|
@@ -993,10 +1038,16 @@ var ModelSpanTracker = class {
|
|
|
993
1038
|
this.#modelSpan?.error(options);
|
|
994
1039
|
}
|
|
995
1040
|
/**
|
|
996
|
-
* End the generation span
|
|
1041
|
+
* End the generation span with optional raw usage data.
|
|
1042
|
+
* If usage is provided, it will be converted to UsageStats with cache token details.
|
|
997
1043
|
*/
|
|
998
1044
|
endGeneration(options) {
|
|
999
|
-
|
|
1045
|
+
const { usage, providerMetadata, ...spanOptions } = options ?? {};
|
|
1046
|
+
if (spanOptions.attributes) {
|
|
1047
|
+
spanOptions.attributes.completionStartTime = this.#completionStartTime;
|
|
1048
|
+
spanOptions.attributes.usage = extractUsageMetrics(usage, providerMetadata);
|
|
1049
|
+
}
|
|
1050
|
+
this.#modelSpan?.end(spanOptions);
|
|
1000
1051
|
}
|
|
1001
1052
|
/**
|
|
1002
1053
|
* Update the generation span
|
|
@@ -1026,9 +1077,10 @@ var ModelSpanTracker = class {
|
|
|
1026
1077
|
#endStepSpan(payload) {
|
|
1027
1078
|
if (!this.#currentStepSpan) return;
|
|
1028
1079
|
const output = payload.output;
|
|
1029
|
-
const { usage, ...otherOutput } = output;
|
|
1080
|
+
const { usage: rawUsage, ...otherOutput } = output;
|
|
1030
1081
|
const stepResult = payload.stepResult;
|
|
1031
1082
|
const metadata = payload.metadata;
|
|
1083
|
+
const usage = extractUsageMetrics(rawUsage, metadata?.providerMetadata);
|
|
1032
1084
|
const cleanMetadata = metadata ? { ...metadata } : void 0;
|
|
1033
1085
|
if (cleanMetadata?.request) {
|
|
1034
1086
|
delete cleanMetadata.request;
|
|
@@ -1279,13 +1331,15 @@ var ModelSpanTracker = class {
|
|
|
1279
1331
|
* create MODEL_STEP and MODEL_CHUNK spans for each semantic unit in the stream.
|
|
1280
1332
|
*/
|
|
1281
1333
|
wrapStream(stream) {
|
|
1282
|
-
let captureCompletionStartTime = false;
|
|
1283
1334
|
return stream.pipeThrough(
|
|
1284
1335
|
new TransformStream({
|
|
1285
1336
|
transform: (chunk, controller) => {
|
|
1286
|
-
|
|
1287
|
-
|
|
1288
|
-
|
|
1337
|
+
switch (chunk.type) {
|
|
1338
|
+
case "text-delta":
|
|
1339
|
+
case "tool-call-delta":
|
|
1340
|
+
case "reasoning-delta":
|
|
1341
|
+
this.#captureCompletionStartTime();
|
|
1342
|
+
break;
|
|
1289
1343
|
}
|
|
1290
1344
|
controller.enqueue(chunk);
|
|
1291
1345
|
switch (chunk.type) {
|
|
@@ -1544,6 +1598,9 @@ function deepClean(value, options = {}, _seen = /* @__PURE__ */ new WeakSet(), _
|
|
|
1544
1598
|
return "[Circular]";
|
|
1545
1599
|
}
|
|
1546
1600
|
_seen.add(value);
|
|
1601
|
+
if (value instanceof Date) {
|
|
1602
|
+
return value;
|
|
1603
|
+
}
|
|
1547
1604
|
if (Array.isArray(value)) {
|
|
1548
1605
|
return value.map((item) => deepClean(item, options, _seen, _depth + 1));
|
|
1549
1606
|
}
|