ai 3.4.28 → 3.4.29
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.js +25 -8
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +25 -8
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
@@ -3382,6 +3382,16 @@ async function generateText({
|
|
3382
3382
|
"ai.prompt.messages": {
|
3383
3383
|
input: () => JSON.stringify(promptMessages)
|
3384
3384
|
},
|
3385
|
+
"ai.prompt.tools": {
|
3386
|
+
// convert the language model level tools:
|
3387
|
+
input: () => {
|
3388
|
+
var _a12;
|
3389
|
+
return (_a12 = mode.tools) == null ? void 0 : _a12.map((tool2) => JSON.stringify(tool2));
|
3390
|
+
}
|
3391
|
+
},
|
3392
|
+
"ai.prompt.toolChoice": {
|
3393
|
+
input: () => mode.toolChoice != null ? JSON.stringify(mode.toolChoice) : void 0
|
3394
|
+
},
|
3385
3395
|
// standardized gen-ai llm span attributes:
|
3386
3396
|
"gen_ai.system": model.provider,
|
3387
3397
|
"gen_ai.request.model": model.modelId,
|
@@ -4073,6 +4083,10 @@ async function streamText({
|
|
4073
4083
|
modelSupportsImageUrls: model.supportsImageUrls,
|
4074
4084
|
modelSupportsUrl: model.supportsUrl
|
4075
4085
|
});
|
4086
|
+
const mode = {
|
4087
|
+
type: "regular",
|
4088
|
+
...prepareToolsAndToolChoice({ tools, toolChoice, activeTools })
|
4089
|
+
};
|
4076
4090
|
const {
|
4077
4091
|
result: { stream: stream2, warnings: warnings2, rawResponse: rawResponse2, request: request2 },
|
4078
4092
|
doStreamSpan: doStreamSpan2,
|
@@ -4094,6 +4108,16 @@ async function streamText({
|
|
4094
4108
|
"ai.prompt.messages": {
|
4095
4109
|
input: () => JSON.stringify(promptMessages)
|
4096
4110
|
},
|
4111
|
+
"ai.prompt.tools": {
|
4112
|
+
// convert the language model level tools:
|
4113
|
+
input: () => {
|
4114
|
+
var _a11;
|
4115
|
+
return (_a11 = mode.tools) == null ? void 0 : _a11.map((tool2) => JSON.stringify(tool2));
|
4116
|
+
}
|
4117
|
+
},
|
4118
|
+
"ai.prompt.toolChoice": {
|
4119
|
+
input: () => mode.toolChoice != null ? JSON.stringify(mode.toolChoice) : void 0
|
4120
|
+
},
|
4097
4121
|
// standardized gen-ai llm span attributes:
|
4098
4122
|
"gen_ai.system": model.provider,
|
4099
4123
|
"gen_ai.request.model": model.modelId,
|
@@ -4113,14 +4137,7 @@ async function streamText({
|
|
4113
4137
|
// get before the call
|
4114
4138
|
doStreamSpan: doStreamSpan3,
|
4115
4139
|
result: await model.doStream({
|
4116
|
-
mode
|
4117
|
-
type: "regular",
|
4118
|
-
...prepareToolsAndToolChoice({
|
4119
|
-
tools,
|
4120
|
-
toolChoice,
|
4121
|
-
activeTools
|
4122
|
-
})
|
4123
|
-
},
|
4140
|
+
mode,
|
4124
4141
|
...prepareCallSettings(settings),
|
4125
4142
|
inputFormat: promptFormat,
|
4126
4143
|
prompt: promptMessages,
|