@zenning/openai 1.4.1 → 1.4.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +3 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.js +24 -4
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +24 -4
- package/dist/index.mjs.map +1 -1
- package/internal/dist/index.d.mts +3 -0
- package/internal/dist/index.d.ts +3 -0
- package/internal/dist/index.js +24 -4
- package/internal/dist/index.js.map +1 -1
- package/internal/dist/index.mjs +24 -4
- package/internal/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
|
@@ -2162,6 +2162,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2162
2162
|
// provider options:
|
|
2163
2163
|
metadata: openaiOptions == null ? void 0 : openaiOptions.metadata,
|
|
2164
2164
|
parallel_tool_calls: openaiOptions == null ? void 0 : openaiOptions.parallelToolCalls,
|
|
2165
|
+
include: openaiOptions == null ? void 0 : openaiOptions.include,
|
|
2165
2166
|
previous_response_id: openaiOptions == null ? void 0 : openaiOptions.previousResponseId,
|
|
2166
2167
|
store: openaiOptions == null ? void 0 : openaiOptions.store,
|
|
2167
2168
|
user: openaiOptions == null ? void 0 : openaiOptions.user,
|
|
@@ -2181,7 +2182,6 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2181
2182
|
truncation: "auto"
|
|
2182
2183
|
}
|
|
2183
2184
|
};
|
|
2184
|
-
console.log("baseArgs", JSON.stringify(baseArgs));
|
|
2185
2185
|
if (modelConfig.isReasoningModel) {
|
|
2186
2186
|
if (baseArgs.temperature != null) {
|
|
2187
2187
|
baseArgs.temperature = void 0;
|
|
@@ -2336,6 +2336,10 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2336
2336
|
args: output.arguments
|
|
2337
2337
|
}));
|
|
2338
2338
|
const reasoningSummary = (_b = (_a = response.output.find((item) => item.type === "reasoning")) == null ? void 0 : _a.summary) != null ? _b : null;
|
|
2339
|
+
console.log(JSON.stringify({
|
|
2340
|
+
msg: "ai-sdk: content annotations",
|
|
2341
|
+
annotations: outputTextElements.flatMap((content) => content.annotations)
|
|
2342
|
+
}));
|
|
2339
2343
|
return {
|
|
2340
2344
|
text: outputTextElements.map((content) => content.text).join("\n"),
|
|
2341
2345
|
sources: outputTextElements.flatMap(
|
|
@@ -2490,13 +2494,23 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2490
2494
|
cachedPromptTokens = (_c = (_b = value.response.usage.input_tokens_details) == null ? void 0 : _b.cached_tokens) != null ? _c : cachedPromptTokens;
|
|
2491
2495
|
reasoningTokens = (_e = (_d = value.response.usage.output_tokens_details) == null ? void 0 : _d.reasoning_tokens) != null ? _e : reasoningTokens;
|
|
2492
2496
|
} else if (isResponseAnnotationAddedChunk(value)) {
|
|
2497
|
+
console.log(JSON.stringify({
|
|
2498
|
+
msg: "ai-sdk: source (stream)",
|
|
2499
|
+
source: value.annotation
|
|
2500
|
+
}));
|
|
2493
2501
|
controller.enqueue({
|
|
2494
2502
|
type: "source",
|
|
2495
2503
|
source: {
|
|
2496
2504
|
sourceType: "url",
|
|
2497
2505
|
id: (_h = (_g = (_f = self.config).generateId) == null ? void 0 : _g.call(_f)) != null ? _h : generateId2(),
|
|
2498
|
-
|
|
2499
|
-
|
|
2506
|
+
file: value.annotation.type && value.annotation.type === "file_citation" ? {
|
|
2507
|
+
type: value.annotation.type,
|
|
2508
|
+
file_id: value.annotation.file_id,
|
|
2509
|
+
filename: value.annotation.filename,
|
|
2510
|
+
index: value.annotation.index
|
|
2511
|
+
} : void 0,
|
|
2512
|
+
url: value.annotation.type && value.annotation.type === "url_citation" ? value.annotation.url : void 0,
|
|
2513
|
+
title: value.annotation.type && value.annotation.type === "url_citation" ? value.annotation.title : void 0
|
|
2500
2514
|
}
|
|
2501
2515
|
});
|
|
2502
2516
|
}
|
|
@@ -2599,7 +2613,12 @@ var responseAnnotationAddedSchema = z7.object({
|
|
|
2599
2613
|
type: z7.literal("url_citation"),
|
|
2600
2614
|
url: z7.string(),
|
|
2601
2615
|
title: z7.string()
|
|
2602
|
-
})
|
|
2616
|
+
}).or(z7.object({
|
|
2617
|
+
type: z7.literal("file_citation"),
|
|
2618
|
+
file_id: z7.string(),
|
|
2619
|
+
filename: z7.string(),
|
|
2620
|
+
index: z7.number()
|
|
2621
|
+
}))
|
|
2603
2622
|
});
|
|
2604
2623
|
var responseReasoningSummaryTextDeltaSchema = z7.object({
|
|
2605
2624
|
type: z7.literal("response.reasoning_summary_text.delta"),
|
|
@@ -2668,6 +2687,7 @@ function getResponsesModelConfig(modelId) {
|
|
|
2668
2687
|
var openaiResponsesProviderOptionsSchema = z7.object({
|
|
2669
2688
|
metadata: z7.any().nullish(),
|
|
2670
2689
|
parallelToolCalls: z7.boolean().nullish(),
|
|
2690
|
+
include: z7.array(z7.string()).nullish(),
|
|
2671
2691
|
previousResponseId: z7.string().nullish(),
|
|
2672
2692
|
forceNoTemperature: z7.boolean().nullish(),
|
|
2673
2693
|
store: z7.boolean().nullish(),
|