ai 4.3.16 → 4.3.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.js +21 -3
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +21 -3
- package/dist/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/index.mjs
CHANGED
@@ -2813,6 +2813,9 @@ async function generateObject({
|
|
2813
2813
|
"ai.response.id": responseData.id,
|
2814
2814
|
"ai.response.model": responseData.modelId,
|
2815
2815
|
"ai.response.timestamp": responseData.timestamp.toISOString(),
|
2816
|
+
"ai.response.providerMetadata": JSON.stringify(
|
2817
|
+
result2.providerMetadata
|
2818
|
+
),
|
2816
2819
|
"ai.usage.promptTokens": result2.usage.promptTokens,
|
2817
2820
|
"ai.usage.completionTokens": result2.usage.completionTokens,
|
2818
2821
|
// standardized gen-ai llm span attributes:
|
@@ -2923,6 +2926,9 @@ async function generateObject({
|
|
2923
2926
|
"ai.response.id": responseData.id,
|
2924
2927
|
"ai.response.model": responseData.modelId,
|
2925
2928
|
"ai.response.timestamp": responseData.timestamp.toISOString(),
|
2929
|
+
"ai.response.providerMetadata": JSON.stringify(
|
2930
|
+
result2.providerMetadata
|
2931
|
+
),
|
2926
2932
|
"ai.usage.promptTokens": result2.usage.promptTokens,
|
2927
2933
|
"ai.usage.completionTokens": result2.usage.completionTokens,
|
2928
2934
|
// standardized gen-ai llm span attributes:
|
@@ -3626,6 +3632,7 @@ var DefaultStreamObjectResult = class {
|
|
3626
3632
|
"ai.response.id": response.id,
|
3627
3633
|
"ai.response.model": response.modelId,
|
3628
3634
|
"ai.response.timestamp": response.timestamp.toISOString(),
|
3635
|
+
"ai.response.providerMetadata": JSON.stringify(providerMetadata),
|
3629
3636
|
"ai.usage.promptTokens": finalUsage.promptTokens,
|
3630
3637
|
"ai.usage.completionTokens": finalUsage.completionTokens,
|
3631
3638
|
// standardized gen-ai llm span attributes:
|
@@ -3646,7 +3653,8 @@ var DefaultStreamObjectResult = class {
|
|
3646
3653
|
"ai.usage.completionTokens": finalUsage.completionTokens,
|
3647
3654
|
"ai.response.object": {
|
3648
3655
|
output: () => JSON.stringify(object2)
|
3649
|
-
}
|
3656
|
+
},
|
3657
|
+
"ai.response.providerMetadata": JSON.stringify(providerMetadata)
|
3650
3658
|
}
|
3651
3659
|
})
|
3652
3660
|
);
|
@@ -4315,6 +4323,9 @@ async function generateText({
|
|
4315
4323
|
"ai.response.id": responseData.id,
|
4316
4324
|
"ai.response.model": responseData.modelId,
|
4317
4325
|
"ai.response.timestamp": responseData.timestamp.toISOString(),
|
4326
|
+
"ai.response.providerMetadata": JSON.stringify(
|
4327
|
+
result.providerMetadata
|
4328
|
+
),
|
4318
4329
|
"ai.usage.promptTokens": result.usage.promptTokens,
|
4319
4330
|
"ai.usage.completionTokens": result.usage.completionTokens,
|
4320
4331
|
// standardized gen-ai llm span attributes:
|
@@ -4441,7 +4452,10 @@ async function generateText({
|
|
4441
4452
|
output: () => JSON.stringify(currentModelResponse.toolCalls)
|
4442
4453
|
},
|
4443
4454
|
"ai.usage.promptTokens": currentModelResponse.usage.promptTokens,
|
4444
|
-
"ai.usage.completionTokens": currentModelResponse.usage.completionTokens
|
4455
|
+
"ai.usage.completionTokens": currentModelResponse.usage.completionTokens,
|
4456
|
+
"ai.response.providerMetadata": JSON.stringify(
|
4457
|
+
currentModelResponse.providerMetadata
|
4458
|
+
)
|
4445
4459
|
}
|
4446
4460
|
})
|
4447
4461
|
);
|
@@ -5509,7 +5523,10 @@ var DefaultStreamTextResult = class {
|
|
5509
5523
|
}
|
5510
5524
|
},
|
5511
5525
|
"ai.usage.promptTokens": usage.promptTokens,
|
5512
|
-
"ai.usage.completionTokens": usage.completionTokens
|
5526
|
+
"ai.usage.completionTokens": usage.completionTokens,
|
5527
|
+
"ai.response.providerMetadata": JSON.stringify(
|
5528
|
+
lastStep.providerMetadata
|
5529
|
+
)
|
5513
5530
|
}
|
5514
5531
|
})
|
5515
5532
|
);
|
@@ -5886,6 +5903,7 @@ var DefaultStreamTextResult = class {
|
|
5886
5903
|
"ai.response.id": stepResponse.id,
|
5887
5904
|
"ai.response.model": stepResponse.modelId,
|
5888
5905
|
"ai.response.timestamp": stepResponse.timestamp.toISOString(),
|
5906
|
+
"ai.response.providerMetadata": JSON.stringify(stepProviderMetadata),
|
5889
5907
|
"ai.usage.promptTokens": stepUsage.promptTokens,
|
5890
5908
|
"ai.usage.completionTokens": stepUsage.completionTokens,
|
5891
5909
|
// standardized gen-ai llm span attributes:
|