@ai-sdk/openai 2.0.0-canary.3 → 2.0.0-canary.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +8 -0
- package/dist/index.js +19 -15
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +19 -15
- package/dist/index.mjs.map +1 -1
- package/internal/dist/index.js +19 -15
- package/internal/dist/index.js.map +1 -1
- package/internal/dist/index.mjs +19 -15
- package/internal/dist/index.mjs.map +1 -1
- package/package.json +3 -3
package/internal/dist/index.js
CHANGED
|
@@ -633,9 +633,12 @@ var OpenAIChatLanguageModel = class {
|
|
|
633
633
|
completionTokens: (_h = (_g = response.usage) == null ? void 0 : _g.completion_tokens) != null ? _h : NaN
|
|
634
634
|
},
|
|
635
635
|
rawCall: { rawPrompt, rawSettings },
|
|
636
|
-
rawResponse: { headers: responseHeaders, body: rawResponse },
|
|
637
636
|
request: { body: JSON.stringify(body) },
|
|
638
|
-
response:
|
|
637
|
+
response: {
|
|
638
|
+
...getResponseMetadata(response),
|
|
639
|
+
headers: responseHeaders,
|
|
640
|
+
body: rawResponse
|
|
641
|
+
},
|
|
639
642
|
warnings,
|
|
640
643
|
logprobs: mapOpenAIChatLogProbsOutput(choice.logprobs),
|
|
641
644
|
providerMetadata
|
|
@@ -681,7 +684,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
681
684
|
return {
|
|
682
685
|
stream: simulatedStream,
|
|
683
686
|
rawCall: result.rawCall,
|
|
684
|
-
|
|
687
|
+
response: result.response,
|
|
685
688
|
warnings: result.warnings
|
|
686
689
|
};
|
|
687
690
|
}
|
|
@@ -891,7 +894,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
891
894
|
})
|
|
892
895
|
),
|
|
893
896
|
rawCall: { rawPrompt, rawSettings },
|
|
894
|
-
|
|
897
|
+
response: { headers: responseHeaders },
|
|
895
898
|
request: { body: JSON.stringify(body) },
|
|
896
899
|
warnings
|
|
897
900
|
};
|
|
@@ -1234,10 +1237,13 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1234
1237
|
finishReason: mapOpenAIFinishReason(choice.finish_reason),
|
|
1235
1238
|
logprobs: mapOpenAICompletionLogProbs(choice.logprobs),
|
|
1236
1239
|
rawCall: { rawPrompt, rawSettings },
|
|
1237
|
-
|
|
1238
|
-
response:
|
|
1239
|
-
|
|
1240
|
-
|
|
1240
|
+
request: { body: JSON.stringify(args) },
|
|
1241
|
+
response: {
|
|
1242
|
+
...getResponseMetadata(response),
|
|
1243
|
+
headers: responseHeaders,
|
|
1244
|
+
body: rawResponse
|
|
1245
|
+
},
|
|
1246
|
+
warnings
|
|
1241
1247
|
};
|
|
1242
1248
|
}
|
|
1243
1249
|
async doStream(options) {
|
|
@@ -1327,7 +1333,7 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1327
1333
|
})
|
|
1328
1334
|
),
|
|
1329
1335
|
rawCall: { rawPrompt, rawSettings },
|
|
1330
|
-
|
|
1336
|
+
response: { headers: responseHeaders },
|
|
1331
1337
|
warnings,
|
|
1332
1338
|
request: { body: JSON.stringify(body) }
|
|
1333
1339
|
};
|
|
@@ -2137,17 +2143,15 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2137
2143
|
rawPrompt: void 0,
|
|
2138
2144
|
rawSettings: {}
|
|
2139
2145
|
},
|
|
2140
|
-
rawResponse: {
|
|
2141
|
-
headers: responseHeaders,
|
|
2142
|
-
body: rawResponse
|
|
2143
|
-
},
|
|
2144
2146
|
request: {
|
|
2145
2147
|
body: JSON.stringify(body)
|
|
2146
2148
|
},
|
|
2147
2149
|
response: {
|
|
2148
2150
|
id: response.id,
|
|
2149
2151
|
timestamp: new Date(response.created_at * 1e3),
|
|
2150
|
-
modelId: response.model
|
|
2152
|
+
modelId: response.model,
|
|
2153
|
+
headers: responseHeaders,
|
|
2154
|
+
body: rawResponse
|
|
2151
2155
|
},
|
|
2152
2156
|
providerMetadata: {
|
|
2153
2157
|
openai: {
|
|
@@ -2289,8 +2293,8 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2289
2293
|
rawPrompt: void 0,
|
|
2290
2294
|
rawSettings: {}
|
|
2291
2295
|
},
|
|
2292
|
-
rawResponse: { headers: responseHeaders },
|
|
2293
2296
|
request: { body: JSON.stringify(body) },
|
|
2297
|
+
response: { headers: responseHeaders },
|
|
2294
2298
|
warnings
|
|
2295
2299
|
};
|
|
2296
2300
|
}
|