@zenning/openai 1.4.1 → 1.4.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.mts CHANGED
@@ -432,6 +432,7 @@ declare const openai: OpenAIProvider;
432
432
  declare const openaiResponsesProviderOptionsSchema: z.ZodObject<{
433
433
  metadata: z.ZodOptional<z.ZodNullable<z.ZodAny>>;
434
434
  parallelToolCalls: z.ZodOptional<z.ZodNullable<z.ZodBoolean>>;
435
+ include: z.ZodOptional<z.ZodNullable<z.ZodArray<z.ZodString, "many">>>;
435
436
  previousResponseId: z.ZodOptional<z.ZodNullable<z.ZodString>>;
436
437
  forceNoTemperature: z.ZodOptional<z.ZodNullable<z.ZodBoolean>>;
437
438
  store: z.ZodOptional<z.ZodNullable<z.ZodBoolean>>;
@@ -446,6 +447,7 @@ declare const openaiResponsesProviderOptionsSchema: z.ZodObject<{
446
447
  store?: boolean | null | undefined;
447
448
  metadata?: any;
448
449
  reasoningEffort?: string | null | undefined;
450
+ include?: string[] | null | undefined;
449
451
  parallelToolCalls?: boolean | null | undefined;
450
452
  previousResponseId?: string | null | undefined;
451
453
  strictSchemas?: boolean | null | undefined;
@@ -457,6 +459,7 @@ declare const openaiResponsesProviderOptionsSchema: z.ZodObject<{
457
459
  store?: boolean | null | undefined;
458
460
  metadata?: any;
459
461
  reasoningEffort?: string | null | undefined;
462
+ include?: string[] | null | undefined;
460
463
  parallelToolCalls?: boolean | null | undefined;
461
464
  previousResponseId?: string | null | undefined;
462
465
  strictSchemas?: boolean | null | undefined;
package/dist/index.d.ts CHANGED
@@ -432,6 +432,7 @@ declare const openai: OpenAIProvider;
432
432
  declare const openaiResponsesProviderOptionsSchema: z.ZodObject<{
433
433
  metadata: z.ZodOptional<z.ZodNullable<z.ZodAny>>;
434
434
  parallelToolCalls: z.ZodOptional<z.ZodNullable<z.ZodBoolean>>;
435
+ include: z.ZodOptional<z.ZodNullable<z.ZodArray<z.ZodString, "many">>>;
435
436
  previousResponseId: z.ZodOptional<z.ZodNullable<z.ZodString>>;
436
437
  forceNoTemperature: z.ZodOptional<z.ZodNullable<z.ZodBoolean>>;
437
438
  store: z.ZodOptional<z.ZodNullable<z.ZodBoolean>>;
@@ -446,6 +447,7 @@ declare const openaiResponsesProviderOptionsSchema: z.ZodObject<{
446
447
  store?: boolean | null | undefined;
447
448
  metadata?: any;
448
449
  reasoningEffort?: string | null | undefined;
450
+ include?: string[] | null | undefined;
449
451
  parallelToolCalls?: boolean | null | undefined;
450
452
  previousResponseId?: string | null | undefined;
451
453
  strictSchemas?: boolean | null | undefined;
@@ -457,6 +459,7 @@ declare const openaiResponsesProviderOptionsSchema: z.ZodObject<{
457
459
  store?: boolean | null | undefined;
458
460
  metadata?: any;
459
461
  reasoningEffort?: string | null | undefined;
462
+ include?: string[] | null | undefined;
460
463
  parallelToolCalls?: boolean | null | undefined;
461
464
  previousResponseId?: string | null | undefined;
462
465
  strictSchemas?: boolean | null | undefined;
package/dist/index.js CHANGED
@@ -2135,6 +2135,7 @@ var OpenAIResponsesLanguageModel = class {
2135
2135
  // provider options:
2136
2136
  metadata: openaiOptions == null ? void 0 : openaiOptions.metadata,
2137
2137
  parallel_tool_calls: openaiOptions == null ? void 0 : openaiOptions.parallelToolCalls,
2138
+ include: openaiOptions == null ? void 0 : openaiOptions.include,
2138
2139
  previous_response_id: openaiOptions == null ? void 0 : openaiOptions.previousResponseId,
2139
2140
  store: openaiOptions == null ? void 0 : openaiOptions.store,
2140
2141
  user: openaiOptions == null ? void 0 : openaiOptions.user,
@@ -2154,7 +2155,6 @@ var OpenAIResponsesLanguageModel = class {
2154
2155
  truncation: "auto"
2155
2156
  }
2156
2157
  };
2157
- console.log("baseArgs", JSON.stringify(baseArgs));
2158
2158
  if (modelConfig.isReasoningModel) {
2159
2159
  if (baseArgs.temperature != null) {
2160
2160
  baseArgs.temperature = void 0;
@@ -2309,6 +2309,10 @@ var OpenAIResponsesLanguageModel = class {
2309
2309
  args: output.arguments
2310
2310
  }));
2311
2311
  const reasoningSummary = (_b = (_a = response.output.find((item) => item.type === "reasoning")) == null ? void 0 : _a.summary) != null ? _b : null;
2312
+ console.log(JSON.stringify({
2313
+ msg: "ai-sdk: content annotations",
2314
+ annotations: outputTextElements.flatMap((content) => content.annotations)
2315
+ }));
2312
2316
  return {
2313
2317
  text: outputTextElements.map((content) => content.text).join("\n"),
2314
2318
  sources: outputTextElements.flatMap(
@@ -2463,13 +2467,23 @@ var OpenAIResponsesLanguageModel = class {
2463
2467
  cachedPromptTokens = (_c = (_b = value.response.usage.input_tokens_details) == null ? void 0 : _b.cached_tokens) != null ? _c : cachedPromptTokens;
2464
2468
  reasoningTokens = (_e = (_d = value.response.usage.output_tokens_details) == null ? void 0 : _d.reasoning_tokens) != null ? _e : reasoningTokens;
2465
2469
  } else if (isResponseAnnotationAddedChunk(value)) {
2470
+ console.log(JSON.stringify({
2471
+ msg: "ai-sdk: source (stream)",
2472
+ source: value.annotation
2473
+ }));
2466
2474
  controller.enqueue({
2467
2475
  type: "source",
2468
2476
  source: {
2469
2477
  sourceType: "url",
2470
2478
  id: (_h = (_g = (_f = self.config).generateId) == null ? void 0 : _g.call(_f)) != null ? _h : (0, import_provider_utils9.generateId)(),
2471
- url: value.annotation.url,
2472
- title: value.annotation.title
2479
+ file: value.annotation.type && value.annotation.type === "file_citation" ? {
2480
+ type: value.annotation.type,
2481
+ file_id: value.annotation.file_id,
2482
+ filename: value.annotation.filename,
2483
+ index: value.annotation.index
2484
+ } : void 0,
2485
+ url: value.annotation.type && value.annotation.type === "url_citation" ? value.annotation.url : void 0,
2486
+ title: value.annotation.type && value.annotation.type === "url_citation" ? value.annotation.title : void 0
2473
2487
  }
2474
2488
  });
2475
2489
  }
@@ -2572,7 +2586,12 @@ var responseAnnotationAddedSchema = import_zod7.z.object({
2572
2586
  type: import_zod7.z.literal("url_citation"),
2573
2587
  url: import_zod7.z.string(),
2574
2588
  title: import_zod7.z.string()
2575
- })
2589
+ }).or(import_zod7.z.object({
2590
+ type: import_zod7.z.literal("file_citation"),
2591
+ file_id: import_zod7.z.string(),
2592
+ filename: import_zod7.z.string(),
2593
+ index: import_zod7.z.number()
2594
+ }))
2576
2595
  });
2577
2596
  var responseReasoningSummaryTextDeltaSchema = import_zod7.z.object({
2578
2597
  type: import_zod7.z.literal("response.reasoning_summary_text.delta"),
@@ -2641,6 +2660,7 @@ function getResponsesModelConfig(modelId) {
2641
2660
  var openaiResponsesProviderOptionsSchema = import_zod7.z.object({
2642
2661
  metadata: import_zod7.z.any().nullish(),
2643
2662
  parallelToolCalls: import_zod7.z.boolean().nullish(),
2663
+ include: import_zod7.z.array(import_zod7.z.string()).nullish(),
2644
2664
  previousResponseId: import_zod7.z.string().nullish(),
2645
2665
  forceNoTemperature: import_zod7.z.boolean().nullish(),
2646
2666
  store: import_zod7.z.boolean().nullish(),